From de288e0cf2ad3bda67a2950899282430b42bba6e Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Fri, 26 Nov 2021 14:51:35 -0500 Subject: [PATCH 0001/1761] Remove 'run_setup' from easy_install command. --- setuptools/command/easy_install.py | 24 +------- setuptools/tests/test_easy_install.py | 79 --------------------------- 2 files changed, 2 insertions(+), 101 deletions(-) diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py index fc848d0d1c..d7ea033b0d 100644 --- a/setuptools/command/easy_install.py +++ b/setuptools/command/easy_install.py @@ -46,13 +46,12 @@ from setuptools import SetuptoolsDeprecationWarning from setuptools import Command -from setuptools.sandbox import run_setup from setuptools.command import setopt from setuptools.archive_util import unpack_archive from setuptools.package_index import ( PackageIndex, parse_requirement_arg, URL_SCHEME, ) -from setuptools.command import bdist_egg, egg_info +from setuptools.command import bdist_egg from setuptools.wheel import Wheel from pkg_resources import ( yield_lines, normalize_path, resource_string, ensure_directory, @@ -1134,26 +1133,7 @@ def report_editable(self, spec, setup_script): return '\n' + self.__editable_msg % locals() def run_setup(self, setup_script, setup_base, args): - sys.modules.setdefault('distutils.command.bdist_egg', bdist_egg) - sys.modules.setdefault('distutils.command.egg_info', egg_info) - - args = list(args) - if self.verbose > 2: - v = 'v' * (self.verbose - 1) - args.insert(0, '-' + v) - elif self.verbose < 2: - args.insert(0, '-q') - if self.dry_run: - args.insert(0, '-n') - log.info( - "Running %s %s", setup_script[len(setup_base) + 1:], ' '.join(args) - ) - try: - run_setup(setup_script, args) - except SystemExit as v: - raise DistutilsError( - "Setup script exited with %s" % (v.args[0],) - ) from v + raise NotImplementedError("easy_install support has been removed") def build_and_install(self, setup_script, setup_base): args = ['bdist_egg', '--dist-dir'] diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py index 6840d03b32..5e8240e4aa 100644 --- a/setuptools/tests/test_easy_install.py +++ b/setuptools/tests/test_easy_install.py @@ -15,7 +15,6 @@ import mock import time import re -import subprocess import pathlib import pytest @@ -32,7 +31,6 @@ from pkg_resources import normalize_path, working_set from pkg_resources import Distribution as PRDistribution from setuptools.tests.server import MockServer, path_to_url -from setuptools.tests import fail_on_ascii import pkg_resources from . import contexts @@ -162,24 +160,6 @@ def sdist_unicode(self, tmpdir): sdist_zip.close() return str(sdist) - @fail_on_ascii - def test_unicode_filename_in_sdist( - self, sdist_unicode, tmpdir, monkeypatch): - """ - The install command should execute correctly even if - the package has unicode filenames. - """ - dist = Distribution({'script_args': ['easy_install']}) - target = (tmpdir / 'target').ensure_dir() - cmd = ei.easy_install( - dist, - install_dir=str(target), - args=['x'], - ) - monkeypatch.setitem(os.environ, 'PYTHONPATH', str(target)) - cmd.ensure_finalized() - cmd.easy_install(sdist_unicode) - @pytest.fixture def sdist_unicode_in_script(self, tmpdir): files = [ @@ -219,20 +199,6 @@ def sdist_unicode_in_script(self, tmpdir): sdist_zip.close() return str(sdist) - @fail_on_ascii - def test_unicode_content_in_sdist( - self, sdist_unicode_in_script, tmpdir, monkeypatch): - """ - The install command should execute correctly even if - the package has unicode in scripts. - """ - dist = Distribution({"script_args": ["easy_install"]}) - target = (tmpdir / "target").ensure_dir() - cmd = ei.easy_install(dist, install_dir=str(target), args=["x"]) - monkeypatch.setitem(os.environ, "PYTHONPATH", str(target)) - cmd.ensure_finalized() - cmd.easy_install(sdist_unicode_in_script) - @pytest.fixture def sdist_script(self, tmpdir): files = [ @@ -260,24 +226,6 @@ def sdist_script(self, tmpdir): make_sdist(sdist, files) return sdist - @pytest.mark.skipif(not sys.platform.startswith('linux'), - reason="Test can only be run on Linux") - def test_script_install(self, sdist_script, tmpdir, monkeypatch): - """ - Check scripts are installed. - """ - dist = Distribution({'script_args': ['easy_install']}) - target = (tmpdir / 'target').ensure_dir() - cmd = ei.easy_install( - dist, - install_dir=str(target), - args=['x'], - ) - monkeypatch.setitem(os.environ, 'PYTHONPATH', str(target)) - cmd.ensure_finalized() - cmd.easy_install(sdist_script) - assert (target / 'mypkg_script').exists() - def test_dist_get_script_args_deprecated(self): with pytest.warns(EasyInstallDeprecationWarning): ScriptWriter.get_script_args(None, None) @@ -448,33 +396,6 @@ def test_bdist_egg_available_on_distutils_pkg(self, distutils_package): class TestSetupRequires: - def test_setup_requires_honors_fetch_params(self, mock_index, monkeypatch): - """ - When easy_install installs a source distribution which specifies - setup_requires, it should honor the fetch parameters (such as - index-url, and find-links). - """ - monkeypatch.setenv(str('PIP_RETRIES'), str('0')) - monkeypatch.setenv(str('PIP_TIMEOUT'), str('0')) - monkeypatch.setenv('PIP_NO_INDEX', 'false') - with contexts.quiet(): - # create an sdist that has a build-time dependency. - with TestSetupRequires.create_sdist() as dist_file: - with contexts.tempdir() as temp_install_dir: - with contexts.environment(PYTHONPATH=temp_install_dir): - cmd = [ - sys.executable, - '-m', 'setup', - 'easy_install', - '--index-url', mock_index.url, - '--exclude-scripts', - '--install-dir', temp_install_dir, - dist_file, - ] - subprocess.Popen(cmd).wait() - # there should have been one requests to the server - assert [r.path for r in mock_index.requests] == ['/does-not-exist/'] - @staticmethod @contextlib.contextmanager def create_sdist(): From b19d64f715d589137666adad46a75dd8b06eb6e2 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Fri, 26 Nov 2021 14:59:35 -0500 Subject: [PATCH 0002/1761] Remove patched_setup_context, unused. --- setuptools/tests/test_easy_install.py | 20 -------------------- 1 file changed, 20 deletions(-) diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py index 5e8240e4aa..380e92c443 100644 --- a/setuptools/tests/test_easy_install.py +++ b/setuptools/tests/test_easy_install.py @@ -20,7 +20,6 @@ import pytest from jaraco import path -from setuptools import sandbox from setuptools.sandbox import run_setup import setuptools.command.easy_install as ei from setuptools.command.easy_install import ( @@ -345,25 +344,6 @@ def test_local_index(self, foo_package, install_target): expected = os.path.normcase(os.path.realpath(foo_package)) assert actual == expected - @contextlib.contextmanager - def user_install_setup_context(self, *args, **kwargs): - """ - Wrap sandbox.setup_context to patch easy_install in that context to - appear as user-installed. - """ - with self.orig_context(*args, **kwargs): - import setuptools.command.easy_install as ei - ei.__file__ = site.USER_SITE - yield - - def patched_setup_context(self): - self.orig_context = sandbox.setup_context - - return mock.patch( - 'setuptools.sandbox.setup_context', - self.user_install_setup_context, - ) - @pytest.fixture def distutils_package(): From 1da50ce2509ae3a826fca15f82e274ed4cafa728 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Fri, 26 Nov 2021 15:04:48 -0500 Subject: [PATCH 0003/1761] Remove tests reliant on sandbox.run_setup --- setuptools/tests/test_easy_install.py | 366 +------------------------- 1 file changed, 2 insertions(+), 364 deletions(-) diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py index 380e92c443..c7026852fa 100644 --- a/setuptools/tests/test_easy_install.py +++ b/setuptools/tests/test_easy_install.py @@ -5,7 +5,6 @@ import os import tempfile import site -import contextlib import tarfile import logging import itertools @@ -15,21 +14,18 @@ import mock import time import re -import pathlib import pytest -from jaraco import path -from setuptools.sandbox import run_setup import setuptools.command.easy_install as ei from setuptools.command.easy_install import ( EasyInstallDeprecationWarning, ScriptWriter, PthDistributions, WindowsScriptWriter, ) from setuptools.dist import Distribution -from pkg_resources import normalize_path, working_set +from pkg_resources import normalize_path from pkg_resources import Distribution as PRDistribution -from setuptools.tests.server import MockServer, path_to_url +from setuptools.tests.server import MockServer import pkg_resources from . import contexts @@ -369,364 +365,6 @@ def mock_index(): return p_index -class TestDistutilsPackage: - def test_bdist_egg_available_on_distutils_pkg(self, distutils_package): - run_setup('setup.py', ['bdist_egg']) - - -class TestSetupRequires: - - @staticmethod - @contextlib.contextmanager - def create_sdist(): - """ - Return an sdist with a setup_requires dependency (of something that - doesn't exist) - """ - with contexts.tempdir() as dir: - dist_path = os.path.join(dir, 'setuptools-test-fetcher-1.0.tar.gz') - make_sdist(dist_path, [ - ('setup.py', DALS(""" - import setuptools - setuptools.setup( - name="setuptools-test-fetcher", - version="1.0", - setup_requires = ['does-not-exist'], - ) - """)), - ('setup.cfg', ''), - ]) - yield dist_path - - use_setup_cfg = ( - (), - ('dependency_links',), - ('setup_requires',), - ('dependency_links', 'setup_requires'), - ) - - @pytest.mark.parametrize('use_setup_cfg', use_setup_cfg) - def test_setup_requires_overrides_version_conflict(self, use_setup_cfg): - """ - Regression test for distribution issue 323: - https://bitbucket.org/tarek/distribute/issues/323 - - Ensures that a distribution's setup_requires requirements can still be - installed and used locally even if a conflicting version of that - requirement is already on the path. - """ - - fake_dist = PRDistribution('does-not-matter', project_name='foobar', - version='0.0') - working_set.add(fake_dist) - - with contexts.save_pkg_resources_state(): - with contexts.tempdir() as temp_dir: - test_pkg = create_setup_requires_package( - temp_dir, use_setup_cfg=use_setup_cfg) - test_setup_py = os.path.join(test_pkg, 'setup.py') - with contexts.quiet() as (stdout, stderr): - # Don't even need to install the package, just - # running the setup.py at all is sufficient - run_setup(test_setup_py, [str('--name')]) - - lines = stdout.readlines() - assert len(lines) > 0 - assert lines[-1].strip() == 'test_pkg' - - @pytest.mark.parametrize('use_setup_cfg', use_setup_cfg) - def test_setup_requires_override_nspkg(self, use_setup_cfg): - """ - Like ``test_setup_requires_overrides_version_conflict`` but where the - ``setup_requires`` package is part of a namespace package that has - *already* been imported. - """ - - with contexts.save_pkg_resources_state(): - with contexts.tempdir() as temp_dir: - foobar_1_archive = os.path.join(temp_dir, 'foo.bar-0.1.tar.gz') - make_nspkg_sdist(foobar_1_archive, 'foo.bar', '0.1') - # Now actually go ahead an extract to the temp dir and add the - # extracted path to sys.path so foo.bar v0.1 is importable - foobar_1_dir = os.path.join(temp_dir, 'foo.bar-0.1') - os.mkdir(foobar_1_dir) - with tarfile.open(foobar_1_archive) as tf: - tf.extractall(foobar_1_dir) - sys.path.insert(1, foobar_1_dir) - - dist = PRDistribution(foobar_1_dir, project_name='foo.bar', - version='0.1') - working_set.add(dist) - - template = DALS("""\ - import foo # Even with foo imported first the - # setup_requires package should override - import setuptools - setuptools.setup(**%r) - - if not (hasattr(foo, '__path__') and - len(foo.__path__) == 2): - print('FAIL') - - if 'foo.bar-0.2' not in foo.__path__[0]: - print('FAIL') - """) - - test_pkg = create_setup_requires_package( - temp_dir, 'foo.bar', '0.2', make_nspkg_sdist, template, - use_setup_cfg=use_setup_cfg) - - test_setup_py = os.path.join(test_pkg, 'setup.py') - - with contexts.quiet() as (stdout, stderr): - try: - # Don't even need to install the package, just - # running the setup.py at all is sufficient - run_setup(test_setup_py, [str('--name')]) - except pkg_resources.VersionConflict: - self.fail( - 'Installing setup.py requirements ' - 'caused a VersionConflict') - - assert 'FAIL' not in stdout.getvalue() - lines = stdout.readlines() - assert len(lines) > 0 - assert lines[-1].strip() == 'test_pkg' - - @pytest.mark.parametrize('use_setup_cfg', use_setup_cfg) - def test_setup_requires_with_attr_version(self, use_setup_cfg): - def make_dependency_sdist(dist_path, distname, version): - files = [( - 'setup.py', - DALS(""" - import setuptools - setuptools.setup( - name={name!r}, - version={version!r}, - py_modules=[{name!r}], - ) - """.format(name=distname, version=version)), - ), ( - distname + '.py', - DALS(""" - version = 42 - """), - )] - make_sdist(dist_path, files) - with contexts.save_pkg_resources_state(): - with contexts.tempdir() as temp_dir: - test_pkg = create_setup_requires_package( - temp_dir, setup_attrs=dict(version='attr: foobar.version'), - make_package=make_dependency_sdist, - use_setup_cfg=use_setup_cfg + ('version',), - ) - test_setup_py = os.path.join(test_pkg, 'setup.py') - with contexts.quiet() as (stdout, stderr): - run_setup(test_setup_py, [str('--version')]) - lines = stdout.readlines() - assert len(lines) > 0 - assert lines[-1].strip() == '42' - - def test_setup_requires_honors_pip_env(self, mock_index, monkeypatch): - monkeypatch.setenv(str('PIP_RETRIES'), str('0')) - monkeypatch.setenv(str('PIP_TIMEOUT'), str('0')) - monkeypatch.setenv('PIP_NO_INDEX', 'false') - monkeypatch.setenv(str('PIP_INDEX_URL'), mock_index.url) - with contexts.save_pkg_resources_state(): - with contexts.tempdir() as temp_dir: - test_pkg = create_setup_requires_package( - temp_dir, 'python-xlib', '0.19', - setup_attrs=dict(dependency_links=[])) - test_setup_cfg = os.path.join(test_pkg, 'setup.cfg') - with open(test_setup_cfg, 'w') as fp: - fp.write(DALS( - ''' - [easy_install] - index_url = https://pypi.org/legacy/ - ''')) - test_setup_py = os.path.join(test_pkg, 'setup.py') - with pytest.raises(distutils.errors.DistutilsError): - run_setup(test_setup_py, [str('--version')]) - assert len(mock_index.requests) == 1 - assert mock_index.requests[0].path == '/python-xlib/' - - def test_setup_requires_with_pep508_url(self, mock_index, monkeypatch): - monkeypatch.setenv(str('PIP_RETRIES'), str('0')) - monkeypatch.setenv(str('PIP_TIMEOUT'), str('0')) - monkeypatch.setenv(str('PIP_INDEX_URL'), mock_index.url) - with contexts.save_pkg_resources_state(): - with contexts.tempdir() as temp_dir: - dep_sdist = os.path.join(temp_dir, 'dep.tar.gz') - make_trivial_sdist(dep_sdist, 'dependency', '42') - dep_url = path_to_url(dep_sdist, authority='localhost') - test_pkg = create_setup_requires_package( - temp_dir, - # Ignored (overridden by setup_attrs) - 'python-xlib', '0.19', - setup_attrs=dict( - setup_requires='dependency @ %s' % dep_url)) - test_setup_py = os.path.join(test_pkg, 'setup.py') - run_setup(test_setup_py, [str('--version')]) - assert len(mock_index.requests) == 0 - - def test_setup_requires_with_allow_hosts(self, mock_index): - ''' The `allow-hosts` option in not supported anymore. ''' - files = { - 'test_pkg': { - 'setup.py': DALS(''' - from setuptools import setup - setup(setup_requires='python-xlib') - '''), - 'setup.cfg': DALS(''' - [easy_install] - allow_hosts = * - '''), - } - } - with contexts.save_pkg_resources_state(): - with contexts.tempdir() as temp_dir: - path.build(files, prefix=temp_dir) - setup_py = str(pathlib.Path(temp_dir, 'test_pkg', 'setup.py')) - with pytest.raises(distutils.errors.DistutilsError): - run_setup(setup_py, [str('--version')]) - assert len(mock_index.requests) == 0 - - def test_setup_requires_with_python_requires(self, monkeypatch, tmpdir): - ''' Check `python_requires` is honored. ''' - monkeypatch.setenv(str('PIP_RETRIES'), str('0')) - monkeypatch.setenv(str('PIP_TIMEOUT'), str('0')) - monkeypatch.setenv(str('PIP_NO_INDEX'), str('1')) - monkeypatch.setenv(str('PIP_VERBOSE'), str('1')) - dep_1_0_sdist = 'dep-1.0.tar.gz' - dep_1_0_url = path_to_url(str(tmpdir / dep_1_0_sdist)) - dep_1_0_python_requires = '>=2.7' - make_python_requires_sdist( - str(tmpdir / dep_1_0_sdist), 'dep', '1.0', dep_1_0_python_requires) - dep_2_0_sdist = 'dep-2.0.tar.gz' - dep_2_0_url = path_to_url(str(tmpdir / dep_2_0_sdist)) - dep_2_0_python_requires = '!=' + '.'.join( - map(str, sys.version_info[:2])) + '.*' - make_python_requires_sdist( - str(tmpdir / dep_2_0_sdist), 'dep', '2.0', dep_2_0_python_requires) - index = tmpdir / 'index.html' - index.write_text(DALS( - ''' - - Links for dep - -

Links for dep

- {dep_1_0_sdist}
- {dep_2_0_sdist}
- - - ''').format( # noqa - dep_1_0_url=dep_1_0_url, - dep_1_0_sdist=dep_1_0_sdist, - dep_1_0_python_requires=dep_1_0_python_requires, - dep_2_0_url=dep_2_0_url, - dep_2_0_sdist=dep_2_0_sdist, - dep_2_0_python_requires=dep_2_0_python_requires, - ), 'utf-8') - index_url = path_to_url(str(index)) - with contexts.save_pkg_resources_state(): - test_pkg = create_setup_requires_package( - str(tmpdir), - 'python-xlib', '0.19', # Ignored (overridden by setup_attrs). - setup_attrs=dict( - setup_requires='dep', dependency_links=[index_url])) - test_setup_py = os.path.join(test_pkg, 'setup.py') - run_setup(test_setup_py, [str('--version')]) - eggs = list(map(str, pkg_resources.find_distributions( - os.path.join(test_pkg, '.eggs')))) - assert eggs == ['dep 1.0'] - - @pytest.mark.parametrize( - 'with_dependency_links_in_setup_py', - (False, True)) - def test_setup_requires_with_find_links_in_setup_cfg( - self, monkeypatch, - with_dependency_links_in_setup_py): - monkeypatch.setenv(str('PIP_RETRIES'), str('0')) - monkeypatch.setenv(str('PIP_TIMEOUT'), str('0')) - with contexts.save_pkg_resources_state(): - with contexts.tempdir() as temp_dir: - make_trivial_sdist( - os.path.join(temp_dir, 'python-xlib-42.tar.gz'), - 'python-xlib', - '42') - test_pkg = os.path.join(temp_dir, 'test_pkg') - test_setup_py = os.path.join(test_pkg, 'setup.py') - test_setup_cfg = os.path.join(test_pkg, 'setup.cfg') - os.mkdir(test_pkg) - with open(test_setup_py, 'w') as fp: - if with_dependency_links_in_setup_py: - dependency_links = [os.path.join(temp_dir, 'links')] - else: - dependency_links = [] - fp.write(DALS( - ''' - from setuptools import installer, setup - setup(setup_requires='python-xlib==42', - dependency_links={dependency_links!r}) - ''').format( - dependency_links=dependency_links)) - with open(test_setup_cfg, 'w') as fp: - fp.write(DALS( - ''' - [easy_install] - index_url = {index_url} - find_links = {find_links} - ''').format(index_url=os.path.join(temp_dir, 'index'), - find_links=temp_dir)) - run_setup(test_setup_py, [str('--version')]) - - def test_setup_requires_with_transitive_extra_dependency( - self, monkeypatch): - # Use case: installing a package with a build dependency on - # an already installed `dep[extra]`, which in turn depends - # on `extra_dep` (whose is not already installed). - with contexts.save_pkg_resources_state(): - with contexts.tempdir() as temp_dir: - # Create source distribution for `extra_dep`. - make_trivial_sdist( - os.path.join(temp_dir, 'extra_dep-1.0.tar.gz'), - 'extra_dep', '1.0') - # Create source tree for `dep`. - dep_pkg = os.path.join(temp_dir, 'dep') - os.mkdir(dep_pkg) - path.build({ - 'setup.py': - DALS(""" - import setuptools - setuptools.setup( - name='dep', version='2.0', - extras_require={'extra': ['extra_dep']}, - ) - """), - 'setup.cfg': '', - }, prefix=dep_pkg) - # "Install" dep. - run_setup( - os.path.join(dep_pkg, 'setup.py'), [str('dist_info')]) - working_set.add_entry(dep_pkg) - # Create source tree for test package. - test_pkg = os.path.join(temp_dir, 'test_pkg') - test_setup_py = os.path.join(test_pkg, 'setup.py') - os.mkdir(test_pkg) - with open(test_setup_py, 'w') as fp: - fp.write(DALS( - ''' - from setuptools import installer, setup - setup(setup_requires='dep[extra]') - ''')) - # Check... - monkeypatch.setenv(str('PIP_FIND_LINKS'), str(temp_dir)) - monkeypatch.setenv(str('PIP_NO_INDEX'), str('1')) - monkeypatch.setenv(str('PIP_RETRIES'), str('0')) - monkeypatch.setenv(str('PIP_TIMEOUT'), str('0')) - run_setup(test_setup_py, [str('--version')]) - - def make_trivial_sdist(dist_path, distname, version): """ Create a simple sdist tarball at dist_path, containing just a simple From 8d12d6196c369c7cf0164a1202e968dd68a2cb6c Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Fri, 26 Nov 2021 15:18:28 -0500 Subject: [PATCH 0004/1761] Remove sandbox module and tests --- setuptools/sandbox.py | 530 ------------------------------- setuptools/tests/test_sandbox.py | 134 -------- 2 files changed, 664 deletions(-) delete mode 100644 setuptools/sandbox.py delete mode 100644 setuptools/tests/test_sandbox.py diff --git a/setuptools/sandbox.py b/setuptools/sandbox.py deleted file mode 100644 index 034fc80d20..0000000000 --- a/setuptools/sandbox.py +++ /dev/null @@ -1,530 +0,0 @@ -import os -import sys -import tempfile -import operator -import functools -import itertools -import re -import contextlib -import pickle -import textwrap -import builtins - -import pkg_resources -from distutils.errors import DistutilsError -from pkg_resources import working_set - -if sys.platform.startswith('java'): - import org.python.modules.posix.PosixModule as _os -else: - _os = sys.modules[os.name] -try: - _file = file -except NameError: - _file = None -_open = open - - -__all__ = [ - "AbstractSandbox", - "DirectorySandbox", - "SandboxViolation", - "run_setup", -] - - -def _execfile(filename, globals, locals=None): - """ - Python 3 implementation of execfile. - """ - mode = 'rb' - with open(filename, mode) as stream: - script = stream.read() - if locals is None: - locals = globals - code = compile(script, filename, 'exec') - exec(code, globals, locals) - - -@contextlib.contextmanager -def save_argv(repl=None): - saved = sys.argv[:] - if repl is not None: - sys.argv[:] = repl - try: - yield saved - finally: - sys.argv[:] = saved - - -@contextlib.contextmanager -def save_path(): - saved = sys.path[:] - try: - yield saved - finally: - sys.path[:] = saved - - -@contextlib.contextmanager -def override_temp(replacement): - """ - Monkey-patch tempfile.tempdir with replacement, ensuring it exists - """ - os.makedirs(replacement, exist_ok=True) - - saved = tempfile.tempdir - - tempfile.tempdir = replacement - - try: - yield - finally: - tempfile.tempdir = saved - - -@contextlib.contextmanager -def pushd(target): - saved = os.getcwd() - os.chdir(target) - try: - yield saved - finally: - os.chdir(saved) - - -class UnpickleableException(Exception): - """ - An exception representing another Exception that could not be pickled. - """ - - @staticmethod - def dump(type, exc): - """ - Always return a dumped (pickled) type and exc. If exc can't be pickled, - wrap it in UnpickleableException first. - """ - try: - return pickle.dumps(type), pickle.dumps(exc) - except Exception: - # get UnpickleableException inside the sandbox - from setuptools.sandbox import UnpickleableException as cls - - return cls.dump(cls, cls(repr(exc))) - - -class ExceptionSaver: - """ - A Context Manager that will save an exception, serialized, and restore it - later. - """ - - def __enter__(self): - return self - - def __exit__(self, type, exc, tb): - if not exc: - return - - # dump the exception - self._saved = UnpickleableException.dump(type, exc) - self._tb = tb - - # suppress the exception - return True - - def resume(self): - "restore and re-raise any exception" - - if '_saved' not in vars(self): - return - - type, exc = map(pickle.loads, self._saved) - raise exc.with_traceback(self._tb) - - -@contextlib.contextmanager -def save_modules(): - """ - Context in which imported modules are saved. - - Translates exceptions internal to the context into the equivalent exception - outside the context. - """ - saved = sys.modules.copy() - with ExceptionSaver() as saved_exc: - yield saved - - sys.modules.update(saved) - # remove any modules imported since - del_modules = ( - mod_name - for mod_name in sys.modules - if mod_name not in saved - # exclude any encodings modules. See #285 - and not mod_name.startswith('encodings.') - ) - _clear_modules(del_modules) - - saved_exc.resume() - - -def _clear_modules(module_names): - for mod_name in list(module_names): - del sys.modules[mod_name] - - -@contextlib.contextmanager -def save_pkg_resources_state(): - saved = pkg_resources.__getstate__() - try: - yield saved - finally: - pkg_resources.__setstate__(saved) - - -@contextlib.contextmanager -def setup_context(setup_dir): - temp_dir = os.path.join(setup_dir, 'temp') - with save_pkg_resources_state(): - with save_modules(): - with save_path(): - hide_setuptools() - with save_argv(): - with override_temp(temp_dir): - with pushd(setup_dir): - # ensure setuptools commands are available - __import__('setuptools') - yield - - -_MODULES_TO_HIDE = { - 'setuptools', - 'distutils', - 'pkg_resources', - 'Cython', - '_distutils_hack', -} - - -def _needs_hiding(mod_name): - """ - >>> _needs_hiding('setuptools') - True - >>> _needs_hiding('pkg_resources') - True - >>> _needs_hiding('setuptools_plugin') - False - >>> _needs_hiding('setuptools.__init__') - True - >>> _needs_hiding('distutils') - True - >>> _needs_hiding('os') - False - >>> _needs_hiding('Cython') - True - """ - base_module = mod_name.split('.', 1)[0] - return base_module in _MODULES_TO_HIDE - - -def hide_setuptools(): - """ - Remove references to setuptools' modules from sys.modules to allow the - invocation to import the most appropriate setuptools. This technique is - necessary to avoid issues such as #315 where setuptools upgrading itself - would fail to find a function declared in the metadata. - """ - _distutils_hack = sys.modules.get('_distutils_hack', None) - if _distutils_hack is not None: - _distutils_hack.remove_shim() - - modules = filter(_needs_hiding, sys.modules) - _clear_modules(modules) - - -def run_setup(setup_script, args): - """Run a distutils setup script, sandboxed in its directory""" - setup_dir = os.path.abspath(os.path.dirname(setup_script)) - with setup_context(setup_dir): - try: - sys.argv[:] = [setup_script] + list(args) - sys.path.insert(0, setup_dir) - # reset to include setup dir, w/clean callback list - working_set.__init__() - working_set.callbacks.append(lambda dist: dist.activate()) - - with DirectorySandbox(setup_dir): - ns = dict(__file__=setup_script, __name__='__main__') - _execfile(setup_script, ns) - except SystemExit as v: - if v.args and v.args[0]: - raise - # Normal exit, just return - - -class AbstractSandbox: - """Wrap 'os' module and 'open()' builtin for virtualizing setup scripts""" - - _active = False - - def __init__(self): - self._attrs = [ - name - for name in dir(_os) - if not name.startswith('_') and hasattr(self, name) - ] - - def _copy(self, source): - for name in self._attrs: - setattr(os, name, getattr(source, name)) - - def __enter__(self): - self._copy(self) - if _file: - builtins.file = self._file - builtins.open = self._open - self._active = True - - def __exit__(self, exc_type, exc_value, traceback): - self._active = False - if _file: - builtins.file = _file - builtins.open = _open - self._copy(_os) - - def run(self, func): - """Run 'func' under os sandboxing""" - with self: - return func() - - def _mk_dual_path_wrapper(name): - original = getattr(_os, name) - - def wrap(self, src, dst, *args, **kw): - if self._active: - src, dst = self._remap_pair(name, src, dst, *args, **kw) - return original(src, dst, *args, **kw) - - return wrap - - for name in ["rename", "link", "symlink"]: - if hasattr(_os, name): - locals()[name] = _mk_dual_path_wrapper(name) - - def _mk_single_path_wrapper(name, original=None): - original = original or getattr(_os, name) - - def wrap(self, path, *args, **kw): - if self._active: - path = self._remap_input(name, path, *args, **kw) - return original(path, *args, **kw) - - return wrap - - if _file: - _file = _mk_single_path_wrapper('file', _file) - _open = _mk_single_path_wrapper('open', _open) - for name in [ - "stat", - "listdir", - "chdir", - "open", - "chmod", - "chown", - "mkdir", - "remove", - "unlink", - "rmdir", - "utime", - "lchown", - "chroot", - "lstat", - "startfile", - "mkfifo", - "mknod", - "pathconf", - "access", - ]: - if hasattr(_os, name): - locals()[name] = _mk_single_path_wrapper(name) - - def _mk_single_with_return(name): - original = getattr(_os, name) - - def wrap(self, path, *args, **kw): - if self._active: - path = self._remap_input(name, path, *args, **kw) - return self._remap_output(name, original(path, *args, **kw)) - return original(path, *args, **kw) - - return wrap - - for name in ['readlink', 'tempnam']: - if hasattr(_os, name): - locals()[name] = _mk_single_with_return(name) - - def _mk_query(name): - original = getattr(_os, name) - - def wrap(self, *args, **kw): - retval = original(*args, **kw) - if self._active: - return self._remap_output(name, retval) - return retval - - return wrap - - for name in ['getcwd', 'tmpnam']: - if hasattr(_os, name): - locals()[name] = _mk_query(name) - - def _validate_path(self, path): - """Called to remap or validate any path, whether input or output""" - return path - - def _remap_input(self, operation, path, *args, **kw): - """Called for path inputs""" - return self._validate_path(path) - - def _remap_output(self, operation, path): - """Called for path outputs""" - return self._validate_path(path) - - def _remap_pair(self, operation, src, dst, *args, **kw): - """Called for path pairs like rename, link, and symlink operations""" - return ( - self._remap_input(operation + '-from', src, *args, **kw), - self._remap_input(operation + '-to', dst, *args, **kw), - ) - - -if hasattr(os, 'devnull'): - _EXCEPTIONS = [os.devnull] -else: - _EXCEPTIONS = [] - - -class DirectorySandbox(AbstractSandbox): - """Restrict operations to a single subdirectory - pseudo-chroot""" - - write_ops = dict.fromkeys( - [ - "open", - "chmod", - "chown", - "mkdir", - "remove", - "unlink", - "rmdir", - "utime", - "lchown", - "chroot", - "mkfifo", - "mknod", - "tempnam", - ] - ) - - _exception_patterns = [] - "exempt writing to paths that match the pattern" - - def __init__(self, sandbox, exceptions=_EXCEPTIONS): - self._sandbox = os.path.normcase(os.path.realpath(sandbox)) - self._prefix = os.path.join(self._sandbox, '') - self._exceptions = [ - os.path.normcase(os.path.realpath(path)) for path in exceptions - ] - AbstractSandbox.__init__(self) - - def _violation(self, operation, *args, **kw): - from setuptools.sandbox import SandboxViolation - - raise SandboxViolation(operation, args, kw) - - if _file: - - def _file(self, path, mode='r', *args, **kw): - if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path): - self._violation("file", path, mode, *args, **kw) - return _file(path, mode, *args, **kw) - - def _open(self, path, mode='r', *args, **kw): - if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path): - self._violation("open", path, mode, *args, **kw) - return _open(path, mode, *args, **kw) - - def tmpnam(self): - self._violation("tmpnam") - - def _ok(self, path): - active = self._active - try: - self._active = False - realpath = os.path.normcase(os.path.realpath(path)) - return ( - self._exempted(realpath) - or realpath == self._sandbox - or realpath.startswith(self._prefix) - ) - finally: - self._active = active - - def _exempted(self, filepath): - start_matches = ( - filepath.startswith(exception) for exception in self._exceptions - ) - pattern_matches = ( - re.match(pattern, filepath) for pattern in self._exception_patterns - ) - candidates = itertools.chain(start_matches, pattern_matches) - return any(candidates) - - def _remap_input(self, operation, path, *args, **kw): - """Called for path inputs""" - if operation in self.write_ops and not self._ok(path): - self._violation(operation, os.path.realpath(path), *args, **kw) - return path - - def _remap_pair(self, operation, src, dst, *args, **kw): - """Called for path pairs like rename, link, and symlink operations""" - if not self._ok(src) or not self._ok(dst): - self._violation(operation, src, dst, *args, **kw) - return (src, dst) - - def open(self, file, flags, mode=0o777, *args, **kw): - """Called for low-level os.open()""" - if flags & WRITE_FLAGS and not self._ok(file): - self._violation("os.open", file, flags, mode, *args, **kw) - return _os.open(file, flags, mode, *args, **kw) - - -WRITE_FLAGS = functools.reduce( - operator.or_, - [ - getattr(_os, a, 0) - for a in "O_WRONLY O_RDWR O_APPEND O_CREAT O_TRUNC O_TEMPORARY".split() - ], -) - - -class SandboxViolation(DistutilsError): - """A setup script attempted to modify the filesystem outside the sandbox""" - - tmpl = textwrap.dedent( - """ - SandboxViolation: {cmd}{args!r} {kwargs} - - The package setup script has attempted to modify files on your system - that are not within the EasyInstall build area, and has been aborted. - - This package cannot be safely installed by EasyInstall, and may not - support alternate installation locations even if you run its setup - script by hand. Please inform the package's author and the EasyInstall - maintainers to find out if a fix or workaround is available. - """ - ).lstrip() - - def __str__(self): - cmd, args, kwargs = self.args - return self.tmpl.format(**locals()) diff --git a/setuptools/tests/test_sandbox.py b/setuptools/tests/test_sandbox.py deleted file mode 100644 index 99398cdb93..0000000000 --- a/setuptools/tests/test_sandbox.py +++ /dev/null @@ -1,134 +0,0 @@ -"""develop tests -""" -import os -import types - -import pytest - -import pkg_resources -import setuptools.sandbox - - -class TestSandbox: - def test_devnull(self, tmpdir): - with setuptools.sandbox.DirectorySandbox(str(tmpdir)): - self._file_writer(os.devnull) - - @staticmethod - def _file_writer(path): - def do_write(): - with open(path, 'w') as f: - f.write('xxx') - - return do_write - - def test_setup_py_with_BOM(self): - """ - It should be possible to execute a setup.py with a Byte Order Mark - """ - target = pkg_resources.resource_filename( - __name__, - 'script-with-bom.py') - namespace = types.ModuleType('namespace') - setuptools.sandbox._execfile(target, vars(namespace)) - assert namespace.result == 'passed' - - def test_setup_py_with_CRLF(self, tmpdir): - setup_py = tmpdir / 'setup.py' - with setup_py.open('wb') as stream: - stream.write(b'"degenerate script"\r\n') - setuptools.sandbox._execfile(str(setup_py), globals()) - - -class TestExceptionSaver: - def test_exception_trapped(self): - with setuptools.sandbox.ExceptionSaver(): - raise ValueError("details") - - def test_exception_resumed(self): - with setuptools.sandbox.ExceptionSaver() as saved_exc: - raise ValueError("details") - - with pytest.raises(ValueError) as caught: - saved_exc.resume() - - assert isinstance(caught.value, ValueError) - assert str(caught.value) == 'details' - - def test_exception_reconstructed(self): - orig_exc = ValueError("details") - - with setuptools.sandbox.ExceptionSaver() as saved_exc: - raise orig_exc - - with pytest.raises(ValueError) as caught: - saved_exc.resume() - - assert isinstance(caught.value, ValueError) - assert caught.value is not orig_exc - - def test_no_exception_passes_quietly(self): - with setuptools.sandbox.ExceptionSaver() as saved_exc: - pass - - saved_exc.resume() - - def test_unpickleable_exception(self): - class CantPickleThis(Exception): - "This Exception is unpickleable because it's not in globals" - def __repr__(self): - return 'CantPickleThis%r' % (self.args,) - - with setuptools.sandbox.ExceptionSaver() as saved_exc: - raise CantPickleThis('detail') - - with pytest.raises(setuptools.sandbox.UnpickleableException) as caught: - saved_exc.resume() - - assert str(caught.value) == "CantPickleThis('detail',)" - - def test_unpickleable_exception_when_hiding_setuptools(self): - """ - As revealed in #440, an infinite recursion can occur if an unpickleable - exception while setuptools is hidden. Ensure this doesn't happen. - """ - - class ExceptionUnderTest(Exception): - """ - An unpickleable exception (not in globals). - """ - - with pytest.raises(setuptools.sandbox.UnpickleableException) as caught: - with setuptools.sandbox.save_modules(): - setuptools.sandbox.hide_setuptools() - raise ExceptionUnderTest() - - msg, = caught.value.args - assert msg == 'ExceptionUnderTest()' - - def test_sandbox_violation_raised_hiding_setuptools(self, tmpdir): - """ - When in a sandbox with setuptools hidden, a SandboxViolation - should reflect a proper exception and not be wrapped in - an UnpickleableException. - """ - - def write_file(): - "Trigger a SandboxViolation by writing outside the sandbox" - with open('/etc/foo', 'w'): - pass - - with pytest.raises(setuptools.sandbox.SandboxViolation) as caught: - with setuptools.sandbox.save_modules(): - setuptools.sandbox.hide_setuptools() - with setuptools.sandbox.DirectorySandbox(str(tmpdir)): - write_file() - - cmd, args, kwargs = caught.value.args - assert cmd == 'open' - assert args == ('/etc/foo', 'w') - assert kwargs == {} - - msg = str(caught.value) - assert 'open' in msg - assert "('/etc/foo', 'w')" in msg From d10ffc7ccf3db87abe5771becc9ffc02d03a356b Mon Sep 17 00:00:00 2001 From: Mohit Saxena Date: Sat, 3 Dec 2022 20:49:08 +0530 Subject: [PATCH 0005/1761] sync with remote --- .github/workflows/main.yml | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 7413fa7de4..edac6187ef 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -40,6 +40,31 @@ env: jobs: + check_changed_folders: + name: Check files + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v3 + with: + fetch-depth: 0 + - name: Get changed files in the _distutils folder + id: changed-files-specific-distutils + uses: tj-actions/changed-files@v34 + with: + files: | + setuptools/_distutils/** + - name: Get changed files in the _vendor folder + id: changed-files-specific-vendor + uses: tj-actions/changed-files@v34 + with: + files: | + setuptools/_vendor/** + - name: Run step if any file(s) in the docs folder change + if: steps.changed-files-specific-distutils.outputs.any_changed == 'true' || steps.changed-files-specific-vendor.outputs.any_changed == 'true' + run: | + echo "One or more files in the setuptools/_distutils or setuptools/_vendor folder has changed." + exit 1 test: strategy: matrix: @@ -111,6 +136,7 @@ jobs: - integration-test - test - test_cygwin + - check_changed_folders runs-on: ubuntu-latest From f04ba1f8201d61d4f8341bae94145eb65c944958 Mon Sep 17 00:00:00 2001 From: Mohit Saxena <76725454+mohitsaxenaknoldus@users.noreply.github.com> Date: Sat, 17 Dec 2022 03:26:07 +0530 Subject: [PATCH 0006/1761] Update main.yml --- .github/workflows/main.yml | 51 +++++++++++++++++++------------------- 1 file changed, 26 insertions(+), 25 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index edac6187ef..da320cbafb 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -40,31 +40,6 @@ env: jobs: - check_changed_folders: - name: Check files - runs-on: ubuntu-latest - steps: - - name: Checkout code - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - name: Get changed files in the _distutils folder - id: changed-files-specific-distutils - uses: tj-actions/changed-files@v34 - with: - files: | - setuptools/_distutils/** - - name: Get changed files in the _vendor folder - id: changed-files-specific-vendor - uses: tj-actions/changed-files@v34 - with: - files: | - setuptools/_vendor/** - - name: Run step if any file(s) in the docs folder change - if: steps.changed-files-specific-distutils.outputs.any_changed == 'true' || steps.changed-files-specific-vendor.outputs.any_changed == 'true' - run: | - echo "One or more files in the setuptools/_distutils or setuptools/_vendor folder has changed." - exit 1 test: strategy: matrix: @@ -199,6 +174,32 @@ jobs: OS-${{ runner.os }}, VM-${{ matrix.platform }}, Py-${{ steps.python-install.outputs.python-version }} + + check_changed_folders: + name: Check files + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v3 + with: + fetch-depth: 0 + - name: Get changed files in the _distutils folder + id: changed-files-specific-distutils + uses: tj-actions/changed-files@v34 + with: + files: | + setuptools/_distutils/** + - name: Get changed files in the _vendor folder + id: changed-files-specific-vendor + uses: tj-actions/changed-files@v34 + with: + files: | + setuptools/_vendor/** + - name: Run step if any file(s) in the docs folder change + if: steps.changed-files-specific-distutils.outputs.any_changed == 'true' || steps.changed-files-specific-vendor.outputs.any_changed == 'true' + run: | + echo "One or more files in the setuptools/_distutils or setuptools/_vendor folder has changed." + exit 1 integration-test: needs: test From 5293c30b7a8e795273f6fe7a132851ff365f0251 Mon Sep 17 00:00:00 2001 From: Mohit Saxena <76725454+mohitsaxenaknoldus@users.noreply.github.com> Date: Thu, 22 Dec 2022 23:35:18 +0530 Subject: [PATCH 0007/1761] Update .github/workflows/main.yml Co-authored-by: Sviatoslav Sydorenko --- .github/workflows/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index da320cbafb..558391bb80 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -108,10 +108,10 @@ jobs: if: always() needs: + - check_changed_folders - integration-test - test - test_cygwin - - check_changed_folders runs-on: ubuntu-latest From 01df0f6aff1ddeae5cb6e44c9e3511b3fdd0a030 Mon Sep 17 00:00:00 2001 From: Mohit Saxena <76725454+mohitsaxenaknoldus@users.noreply.github.com> Date: Thu, 22 Dec 2022 23:35:28 +0530 Subject: [PATCH 0008/1761] Update .github/workflows/main.yml Co-authored-by: Sviatoslav Sydorenko --- .github/workflows/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 558391bb80..64c210eb2c 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -175,7 +175,7 @@ jobs: VM-${{ matrix.platform }}, Py-${{ steps.python-install.outputs.python-version }} - check_changed_folders: + check-changed-folders: name: Check files runs-on: ubuntu-latest steps: From 8b704e647b906d4e87734673491129c346c2c398 Mon Sep 17 00:00:00 2001 From: Mohit Saxena <76725454+mohitsaxenaknoldus@users.noreply.github.com> Date: Thu, 22 Dec 2022 23:35:38 +0530 Subject: [PATCH 0009/1761] Update .github/workflows/main.yml Co-authored-by: Sviatoslav Sydorenko --- .github/workflows/main.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 64c210eb2c..be6afcb52c 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -196,7 +196,9 @@ jobs: files: | setuptools/_vendor/** - name: Run step if any file(s) in the docs folder change - if: steps.changed-files-specific-distutils.outputs.any_changed == 'true' || steps.changed-files-specific-vendor.outputs.any_changed == 'true' + if: >- + steps.changed-files-specific-distutils.outputs.any_changed == 'true' + || steps.changed-files-specific-vendor.outputs.any_changed == 'true' run: | echo "One or more files in the setuptools/_distutils or setuptools/_vendor folder has changed." exit 1 From e8c52c81d86e370e53a1ef42f8e765d004db53b4 Mon Sep 17 00:00:00 2001 From: Mohit Saxena <76725454+mohitsaxenaknoldus@users.noreply.github.com> Date: Fri, 23 Dec 2022 00:34:43 +0530 Subject: [PATCH 0010/1761] Update main.yml --- .github/workflows/main.yml | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index be6afcb52c..c777f2465f 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -195,13 +195,16 @@ jobs: with: files: | setuptools/_vendor/** - - name: Run step if any file(s) in the docs folder change - if: >- - steps.changed-files-specific-distutils.outputs.any_changed == 'true' - || steps.changed-files-specific-vendor.outputs.any_changed == 'true' + - name: Run step if any file(s) in the _distutils folder change + if: steps.changed-files-specific-distutils.outputs.any_changed == 'true' run: | - echo "One or more files in the setuptools/_distutils or setuptools/_vendor folder has changed." - exit 1 + echo "One or more files in the setuptools/_distutils folder has changed." >> $GITHUB_STEP_SUMMARY + exit 1 + - name: Run step if any file(s) in the _vendor folder change + if: steps.changed-files-specific-vendor.outputs.any_changed == 'true' + run: | + echo "One or more files in the setuptools/_vendor folder has changed." >> $GITHUB_STEP_SUMMARY + exit 1 integration-test: needs: test From 47b0b08de6af29beafadcdb2eb1a9f19d5ceeedd Mon Sep 17 00:00:00 2001 From: Mohit Saxena <76725454+mohitsaxenaknoldus@users.noreply.github.com> Date: Fri, 23 Dec 2022 01:34:07 +0530 Subject: [PATCH 0011/1761] Update .github/workflows/main.yml Co-authored-by: Sviatoslav Sydorenko --- .github/workflows/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index c777f2465f..ba393a8302 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -108,7 +108,7 @@ jobs: if: always() needs: - - check_changed_folders + - check-changed-folders - integration-test - test - test_cygwin From c86f5ec73a1f513064d797119897241b0087d16c Mon Sep 17 00:00:00 2001 From: Mohit Saxena <76725454+mohitsaxenaknoldus@users.noreply.github.com> Date: Fri, 23 Dec 2022 13:45:19 +0530 Subject: [PATCH 0012/1761] Update .github/workflows/main.yml Co-authored-by: Sviatoslav Sydorenko --- .github/workflows/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index ba393a8302..d16e25100a 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -198,7 +198,7 @@ jobs: - name: Run step if any file(s) in the _distutils folder change if: steps.changed-files-specific-distutils.outputs.any_changed == 'true' run: | - echo "One or more files in the setuptools/_distutils folder has changed." >> $GITHUB_STEP_SUMMARY + echo "One or more files in the setuptools/_distutils folder has changed." | tee "${GITHUB_STEP_SUMMARY}" exit 1 - name: Run step if any file(s) in the _vendor folder change if: steps.changed-files-specific-vendor.outputs.any_changed == 'true' From a4893f7c94e9bf28dbeb5667bbfdf11fe8eb71ee Mon Sep 17 00:00:00 2001 From: Mohit Saxena <76725454+mohitsaxenaknoldus@users.noreply.github.com> Date: Fri, 23 Dec 2022 13:45:43 +0530 Subject: [PATCH 0013/1761] Update main.yml --- .github/workflows/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index d16e25100a..de0b00046a 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -203,7 +203,7 @@ jobs: - name: Run step if any file(s) in the _vendor folder change if: steps.changed-files-specific-vendor.outputs.any_changed == 'true' run: | - echo "One or more files in the setuptools/_vendor folder has changed." >> $GITHUB_STEP_SUMMARY + echo "One or more files in the setuptools/_vendor folder has changed." | tee "${GITHUB_STEP_SUMMARY}" exit 1 integration-test: From c9526f1ca85d404d372b0dce1cca979d59586998 Mon Sep 17 00:00:00 2001 From: Mohit Saxena <76725454+mohitsaxenaknoldus@users.noreply.github.com> Date: Mon, 26 Dec 2022 21:51:56 +0530 Subject: [PATCH 0014/1761] Update main.yml --- .github/workflows/main.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index de0b00046a..6b88a1feb8 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -177,6 +177,7 @@ jobs: check-changed-folders: name: Check files + if: github.event_name == 'pull_request' runs-on: ubuntu-latest steps: - name: Checkout code From 3ee732000feb686cbd265b5ce5bab3056872d8df Mon Sep 17 00:00:00 2001 From: Mohit Saxena <76725454+mohitsaxenaknoldus@users.noreply.github.com> Date: Wed, 28 Dec 2022 01:22:12 +0530 Subject: [PATCH 0015/1761] Update main.yml --- .github/workflows/main.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 6b88a1feb8..8c5b3672a9 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -176,7 +176,6 @@ jobs: Py-${{ steps.python-install.outputs.python-version }} check-changed-folders: - name: Check files if: github.event_name == 'pull_request' runs-on: ubuntu-latest steps: From 8d5633b409e26ec993fd2fa85b5d67080cef1bea Mon Sep 17 00:00:00 2001 From: Mohit Saxena <76725454+mohitsaxenaknoldus@users.noreply.github.com> Date: Wed, 28 Dec 2022 02:10:53 +0530 Subject: [PATCH 0016/1761] Update main.yml --- .github/workflows/main.yml | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 8c5b3672a9..8e518c3e5f 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -176,6 +176,7 @@ jobs: Py-${{ steps.python-install.outputs.python-version }} check-changed-folders: + name: Fail the job if files changed under _disutils/_vendor folders if: github.event_name == 'pull_request' runs-on: ubuntu-latest steps: @@ -183,24 +184,24 @@ jobs: uses: actions/checkout@v3 with: fetch-depth: 0 - - name: Get changed files in the _distutils folder + - name: Check if files changed in the _distutils folder id: changed-files-specific-distutils uses: tj-actions/changed-files@v34 with: files: | setuptools/_distutils/** - - name: Get changed files in the _vendor folder + - name: Check if files changed in the _vendor folder id: changed-files-specific-vendor uses: tj-actions/changed-files@v34 with: files: | setuptools/_vendor/** - - name: Run step if any file(s) in the _distutils folder change + - name: Fail the job if any file(s) in the _distutils folder change if: steps.changed-files-specific-distutils.outputs.any_changed == 'true' run: | echo "One or more files in the setuptools/_distutils folder has changed." | tee "${GITHUB_STEP_SUMMARY}" exit 1 - - name: Run step if any file(s) in the _vendor folder change + - name: Fail the job if any file(s) in the _vendor folder change if: steps.changed-files-specific-vendor.outputs.any_changed == 'true' run: | echo "One or more files in the setuptools/_vendor folder has changed." | tee "${GITHUB_STEP_SUMMARY}" From e649e936e79fb5cbbf45f63475934faa3cb0f4bc Mon Sep 17 00:00:00 2001 From: Lisandro Dalcin Date: Tue, 28 Feb 2023 08:55:11 +0300 Subject: [PATCH 0017/1761] Fix accumulating flags after compile/link --- distutils/ccompiler.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/distutils/ccompiler.py b/distutils/ccompiler.py index 1818fce901..ae60578ac2 100644 --- a/distutils/ccompiler.py +++ b/distutils/ccompiler.py @@ -382,7 +382,7 @@ def _fix_compile_args(self, output_dir, macros, include_dirs): raise TypeError("'output_dir' must be a string or None") if macros is None: - macros = self.macros + macros = list(self.macros) elif isinstance(macros, list): macros = macros + (self.macros or []) else: @@ -441,14 +441,14 @@ def _fix_lib_args(self, libraries, library_dirs, runtime_library_dirs): fixed versions of all arguments. """ if libraries is None: - libraries = self.libraries + libraries = list(self.libraries) elif isinstance(libraries, (list, tuple)): libraries = list(libraries) + (self.libraries or []) else: raise TypeError("'libraries' (if supplied) must be a list of strings") if library_dirs is None: - library_dirs = self.library_dirs + library_dirs = list(self.library_dirs) elif isinstance(library_dirs, (list, tuple)): library_dirs = list(library_dirs) + (self.library_dirs or []) else: @@ -458,7 +458,7 @@ def _fix_lib_args(self, libraries, library_dirs, runtime_library_dirs): library_dirs += self.__class__.library_dirs if runtime_library_dirs is None: - runtime_library_dirs = self.runtime_library_dirs + runtime_library_dirs = list(self.runtime_library_dirs) elif isinstance(runtime_library_dirs, (list, tuple)): runtime_library_dirs = list(runtime_library_dirs) + ( self.runtime_library_dirs or [] From b42197ceb9ac8a0cd95b53092b30c4f51c0c0057 Mon Sep 17 00:00:00 2001 From: DWesl <22566757+DWesl@users.noreply.github.com> Date: Sun, 14 May 2023 07:00:04 -0400 Subject: [PATCH 0018/1761] Port code from CygwinCCompiler to UnixCCompiler https://github.com/python-pillow/Pillow/issues/7158#issuecomment-1546746716 suggests that Cygwin uses UnixCCompiler rather than CygwinCCompiler by default, so UnixCCompiler would need to know how to find shared libraries, import libraries, and static libraries on Cygwin. --- distutils/unixccompiler.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/distutils/unixccompiler.py b/distutils/unixccompiler.py index 6ca2332ae1..b1139cfbd8 100644 --- a/distutils/unixccompiler.py +++ b/distutils/unixccompiler.py @@ -141,6 +141,10 @@ class UnixCCompiler(CCompiler): xcode_stub_lib_format = dylib_lib_format if sys.platform == "cygwin": exe_extension = ".exe" + shared_lib_extension = ".dll.a" + dylib_lib_extension = ".dll" + static_lib_format = shared_lib_format = "lib%s%s" + dylib_lib_format = "cyg%s%s" def preprocess( self, From 5a8ca1b0f362968a29f2fb9c107cc0d4d79c3263 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 1 Jul 2023 18:20:05 -0400 Subject: [PATCH 0019/1761] Rely on pytest as found in pytest-dev/pytest#11155. Fixes pypa/distutils#186. --- tox.ini | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tox.ini b/tox.ini index c42004831a..06657e4eaa 100644 --- a/tox.ini +++ b/tox.ini @@ -5,8 +5,8 @@ toxworkdir={env:TOX_WORK_DIR:.tox} [testenv] deps = - # < 7.2 due to pypa/distutils#186 - pytest < 7.2 + # pypa/distutils#186; workaround for pytest-dev/pytest#10447 + pytest @ git+https://github.com/RonnyPfannschmidt/pytest@fix-10447-maker-mro-order-needs-reverse pytest-flake8 # workaround for tholo/pytest-flake8#87 From ef9a76640ab0c64a502377e2c345d34d052fb48d Mon Sep 17 00:00:00 2001 From: DWesl <22566757+DWesl@users.noreply.github.com> Date: Sun, 6 Aug 2023 18:45:55 -0400 Subject: [PATCH 0020/1761] CI: Install git on Cygwin CI runner Cygwin pip now has a chance to resolve everything on the command line. It won't be able to resolve dependencies, due to something pulling in Rust, but it'll get to the point where pip points out that it is not pip's fault that CI doesn't have Rust compilers for Cygwin --- .github/workflows/main.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 60801acecd..dbba53e2b7 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -58,6 +58,7 @@ jobs: gcc-core, gcc-g++, ncompress + git - name: Run tests shell: C:\cygwin\bin\env.exe CYGWIN_NOWINPATH=1 CHERE_INVOKING=1 C:\cygwin\bin\bash.exe -leo pipefail -o igncr {0} run: tox From aa3a9968c9c6944645b2bf5e5e714c82d3c392b9 Mon Sep 17 00:00:00 2001 From: DWesl <22566757+DWesl@users.noreply.github.com> Date: Sun, 6 Aug 2023 19:11:02 -0400 Subject: [PATCH 0021/1761] CI: Try to fix Cygwin tox configuration. jaraco.text depends on inflect; inflect>=6.0.0 depends on Rust. Add an additional rule installing a version of the dependency that will actually install. --- tox.ini | 1 + 1 file changed, 1 insertion(+) diff --git a/tox.ini b/tox.ini index 06657e4eaa..fd858d182e 100644 --- a/tox.ini +++ b/tox.ini @@ -16,6 +16,7 @@ deps = pytest-cov pytest-enabler >= 1.3 + inflect<6.0.0; sys.platform=="cygwin" jaraco.envs>=2.4 jaraco.path jaraco.text From 222b249f4f7ee9c1b2fae7f483db88c031fe4302 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Wed, 30 Aug 2023 19:19:22 +0100 Subject: [PATCH 0022/1761] Improve test_rfc822_escape, capturing interoperability requirements --- distutils/tests/test_util.py | 59 ++++++++++++++++++++++++++++++++---- 1 file changed, 53 insertions(+), 6 deletions(-) diff --git a/distutils/tests/test_util.py b/distutils/tests/test_util.py index 070a277069..22a003d8ca 100644 --- a/distutils/tests/test_util.py +++ b/distutils/tests/test_util.py @@ -1,4 +1,8 @@ """Tests for distutils.util.""" +import email +import email.policy +import email.generator +import io import os import sys import sysconfig as stdlib_sysconfig @@ -184,12 +188,55 @@ def test_strtobool(self): for n in no: assert not strtobool(n) - def test_rfc822_escape(self): - header = 'I am a\npoor\nlonesome\nheader\n' - res = rfc822_escape(header) - wanted = ('I am a%(8s)spoor%(8s)slonesome%(8s)s' 'header%(8s)s') % { - '8s': '\n' + 8 * ' ' - } + indent = 8 * ' ' + + @pytest.mark.parametrize( + "given,wanted", + [ + # 0x0b, 0x0c, ..., etc are also considered a line break by Python + ("hello\x0b\nworld\n", f"hello\x0b{indent}\n{indent}world\n{indent}"), + ("hello\x1eworld", f"hello\x1e{indent}world"), + ("", ""), + ( + "I am a\npoor\nlonesome\nheader\n", + f"I am a\n{indent}poor\n{indent}lonesome\n{indent}header\n{indent}", + ), + ], + ) + def test_rfc822_escape(self, given, wanted): + """ + We want to ensure a multi-line header parses correctly. + + For interoperability, the escaped value should also "round-trip" over + `email.generator.Generator.flatten` and `email.message_from_*` + (see pypa/setuptools#4033). + + The main issue is that internally `email.policy.EmailPolicy` uses + `splitlines` which will split on some control chars. If all the new lines + are not prefixed with spaces, the parser will interrupt reading + the current header and produce an incomplete value, while + incorrectly interpreting the rest of the headers as part of the payload. + """ + res = rfc822_escape(given) + + policy = email.policy.EmailPolicy( + utf8=True, + mangle_from_=False, + max_line_length=0, + ) + with io.StringIO() as buffer: + raw = f"header: {res}\nother-header: 42\n\npayload\n" + orig = email.message_from_string(raw) + email.generator.Generator(buffer, policy=policy).flatten(orig) + buffer.seek(0) + regen = email.message_from_file(buffer) + + for msg in (orig, regen): + assert msg.get_payload() == "payload\n" + assert msg["other-header"] == "42" + # Generator may replace control chars with `\n` + assert set(msg["header"].splitlines()) == set(res.splitlines()) + assert res == wanted def test_dont_write_bytecode(self): From 157fbfed51a405866c9f63cc75c69cfac6b8735e Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Wed, 30 Aug 2023 19:24:13 +0100 Subject: [PATCH 0023/1761] Improve TestMetadata, capturing interoperability requirements --- distutils/tests/test_dist.py | 41 ++++++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) diff --git a/distutils/tests/test_dist.py b/distutils/tests/test_dist.py index 30a6f9ff2e..694bf02a60 100644 --- a/distutils/tests/test_dist.py +++ b/distutils/tests/test_dist.py @@ -1,6 +1,9 @@ """Tests for distutils.dist.""" import os import io +import email +import email.policy +import email.generator import sys import warnings import textwrap @@ -510,3 +513,41 @@ def test_read_metadata(self): assert metadata.platforms is None assert metadata.obsoletes is None assert metadata.requires == ['foo'] + + def test_round_trip_through_email_generator(self): + """ + In pypa/setuptools#4033, it was shown that once PKG-INFO is + re-generated using ``email.generator.Generator``, some control + characters might cause problems. + """ + # Given a PKG-INFO file ... + attrs = { + "name": "package", + "version": "1.0", + "long_description": "hello\x0b\nworld\n", + } + dist = Distribution(attrs) + metadata = dist.metadata + + with io.StringIO() as buffer: + metadata.write_pkg_file(buffer) + msg = buffer.getvalue() + + # ... when it is read and re-written using stdlib's email library, + orig = email.message_from_string(msg) + policy = email.policy.EmailPolicy( + utf8=True, + mangle_from_=False, + max_line_length=0, + ) + with io.StringIO() as buffer: + email.generator.Generator(buffer, policy=policy).flatten(orig) + + buffer.seek(0) + regen = email.message_from_file(buffer) + + # ... then it should be the same as the original + # (except for the specific line break characters) + orig_desc = set(orig["Description"].splitlines()) + regen_desc = set(regen["Description"].splitlines()) + assert regen_desc == orig_desc From 0ece9871247625ed3541b66529ca654039a5d8b5 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Wed, 30 Aug 2023 19:26:11 +0100 Subject: [PATCH 0024/1761] Fix interoperability of rfc822_escape with stblib's email library --- distutils/util.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/distutils/util.py b/distutils/util.py index 7ef47176e2..4f94e587e2 100644 --- a/distutils/util.py +++ b/distutils/util.py @@ -508,6 +508,12 @@ def rfc822_escape(header): """Return a version of the string escaped for inclusion in an RFC-822 header, by ensuring there are 8 spaces space after each newline. """ - lines = header.split('\n') - sep = '\n' + 8 * ' ' - return sep.join(lines) + indent = 8 * " " + lines = header.splitlines(keepends=True) + + # Emulate the behaviour of `str.split` + # (the terminal line break in `splitlines` does not result in an extra line): + ends_in_newline = lines and lines[-1].splitlines()[0] != lines[-1] + suffix = indent if ends_in_newline else "" + + return indent.join(lines) + suffix From 0e2032c4754c598ba75e467c64009ba4490ddea9 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Thu, 31 Aug 2023 18:42:14 -0400 Subject: [PATCH 0025/1761] Pin against sphinx 7.2.5 as workaround for sphinx/sphinx-doc#11662. Closes jaraco/skeleton#88. --- setup.cfg | 2 ++ 1 file changed, 2 insertions(+) diff --git a/setup.cfg b/setup.cfg index 46f7bdf799..4f184c7ec5 100644 --- a/setup.cfg +++ b/setup.cfg @@ -45,6 +45,8 @@ testing = docs = # upstream sphinx >= 3.5 + # workaround for sphinx/sphinx-doc#11662 + sphinx < 7.2.5 jaraco.packaging >= 9.3 rst.linker >= 1.9 furo From 92d2d8e1aff997f3877239230c9490ed9cdd1222 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Fri, 1 Sep 2023 18:46:27 -0400 Subject: [PATCH 0026/1761] Allow GITHUB_* settings to pass through to tests. --- .github/workflows/main.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index b822409927..67d9d3bc40 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -36,6 +36,10 @@ env: # Must be "1". TOX_PARALLEL_NO_SPINNER: 1 + # Ensure tests can sense settings about the environment + TOX_OVERRIDE: >- + testenv.pass_env+=GITHUB_* + jobs: test: From f3dc1f4776c94a9a4a7c0e8c5b49c532b0a7d411 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Fri, 1 Sep 2023 18:49:13 -0400 Subject: [PATCH 0027/1761] Remove spinner disablement. If it's not already fixed upstream, that's where it should be fixed. --- .github/workflows/main.yml | 4 ---- 1 file changed, 4 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 67d9d3bc40..30c9615d16 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -32,10 +32,6 @@ env: PIP_NO_PYTHON_VERSION_WARNING: 'true' PIP_NO_WARN_SCRIPT_LOCATION: 'true' - # Disable the spinner, noise in GHA; TODO(webknjaz): Fix this upstream - # Must be "1". - TOX_PARALLEL_NO_SPINNER: 1 - # Ensure tests can sense settings about the environment TOX_OVERRIDE: >- testenv.pass_env+=GITHUB_* From 0484daa8a6f72c9ad4e1784f9181c2488a191d8e Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Fri, 1 Sep 2023 18:53:55 -0400 Subject: [PATCH 0028/1761] Clean up 'color' environment variables. The TOX_TESTENV_PASSENV hasn't been useful for some time and by its mere presence wasted a lot of time today under the assumption that it's doing something. Instead, just rely on one variable FORCE_COLOR. If it's not honored, then that should be the fix upstream. --- .github/workflows/main.yml | 20 +------------------- 1 file changed, 1 insertion(+), 19 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 30c9615d16..f302854902 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -6,26 +6,8 @@ permissions: contents: read env: - # Environment variables to support color support (jaraco/skeleton#66): - # Request colored output from CLI tools supporting it. Different tools - # interpret the value differently. For some, just being set is sufficient. - # For others, it must be a non-zero integer. For yet others, being set - # to a non-empty value is sufficient. For tox, it must be one of - # , 0, 1, false, no, off, on, true, yes. The only enabling value - # in common is "1". + # Environment variable to support color support (jaraco/skeleton#66) FORCE_COLOR: 1 - # MyPy's color enforcement (must be a non-zero number) - MYPY_FORCE_COLOR: -42 - # Recognized by the `py` package, dependency of `pytest` (must be "1") - PY_COLORS: 1 - # Make tox-wrapped tools see color requests - TOX_TESTENV_PASSENV: >- - FORCE_COLOR - MYPY_FORCE_COLOR - NO_COLOR - PY_COLORS - PYTEST_THEME - PYTEST_THEME_MODE # Suppress noisy pip warnings PIP_DISABLE_PIP_VERSION_CHECK: 'true' From 63f93a4db6a3419139a623ee3e23b5f3aae7809c Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Mon, 4 Sep 2023 14:44:17 +0100 Subject: [PATCH 0029/1761] Reactivate sphinx-notfound --- docs/conf.py | 2 +- setup.cfg | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 29f2c8bb10..bd4ffdbadb 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -232,7 +232,7 @@ html_static_path = ['images'] # should contain the folder with icons # Add support for nice Not Found 404 pages -# extensions += ['notfound.extension'] # readthedocs/sphinx-notfound-page#219 +extensions += ['notfound.extension'] # List of dicts with HTML attributes # static-file points to files in the html_static_path (href is computed) diff --git a/setup.cfg b/setup.cfg index 358f302bd8..f3cec16401 100644 --- a/setup.cfg +++ b/setup.cfg @@ -108,7 +108,7 @@ docs = sphinx-inline-tabs sphinx-reredirects sphinxcontrib-towncrier - sphinx-notfound-page == 0.8.3 + sphinx-notfound-page >=1,<2 sphinx-hoverxref < 2 ssl = From 94aee535999facca9a594d48e311e4e70265c6cf Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Tue, 5 Sep 2023 10:33:18 +0100 Subject: [PATCH 0030/1761] Avoid repeating namespace names in '*-nspkg.pth' file --- setuptools/namespaces.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setuptools/namespaces.py b/setuptools/namespaces.py index 5402f120b8..8774432a7e 100644 --- a/setuptools/namespaces.py +++ b/setuptools/namespaces.py @@ -75,7 +75,7 @@ def _gen_nspkg_line(self, pkg): def _get_all_ns_packages(self): """Return sorted list of all package namespaces""" pkgs = self.distribution.namespace_packages or [] - return sorted(flatten(map(self._pkg_names, pkgs))) + return sorted(set(flatten(map(self._pkg_names, pkgs)))) @staticmethod def _pkg_names(pkg): From faf88c7e4035b6e0e034d6a95368a5934ed8d021 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Tue, 5 Sep 2023 18:28:47 +0100 Subject: [PATCH 0031/1761] Modify tests/namespaces to allow nested namespaces --- setuptools/tests/namespaces.py | 31 +++++++++++++++++++++---------- 1 file changed, 21 insertions(+), 10 deletions(-) diff --git a/setuptools/tests/namespaces.py b/setuptools/tests/namespaces.py index 20efc485d2..ecddf73b6e 100644 --- a/setuptools/tests/namespaces.py +++ b/setuptools/tests/namespaces.py @@ -1,29 +1,40 @@ import textwrap +def iter_namespace_pkgs(namespace): + parts = namespace.split(".") + for i in range(len(parts)): + yield ".".join(parts[:i+1]) + + def build_namespace_package(tmpdir, name): src_dir = tmpdir / name src_dir.mkdir() setup_py = src_dir / 'setup.py' - namespace, sep, rest = name.partition('.') + namespace, _, rest = name.rpartition('.') + namespaces = list(iter_namespace_pkgs(namespace)) script = textwrap.dedent( """ import setuptools setuptools.setup( name={name!r}, version="1.0", - namespace_packages=[{namespace!r}], - packages=[{namespace!r}], + namespace_packages={namespaces!r}, + packages={namespaces!r}, ) """ ).format(**locals()) setup_py.write_text(script, encoding='utf-8') - ns_pkg_dir = src_dir / namespace - ns_pkg_dir.mkdir() - pkg_init = ns_pkg_dir / '__init__.py' - tmpl = '__import__("pkg_resources").declare_namespace({namespace!r})' - decl = tmpl.format(**locals()) - pkg_init.write_text(decl, encoding='utf-8') + + ns_pkg_dir = src_dir / namespace.replace(".", "/") + ns_pkg_dir.mkdir(parents=True) + + for ns in namespaces: + pkg_init = src_dir / ns.replace(".", "/") / '__init__.py' + tmpl = '__import__("pkg_resources").declare_namespace(__name__)' + decl = tmpl.format(**locals()) + pkg_init.write_text(decl, encoding='utf-8') + pkg_mod = ns_pkg_dir / (rest + '.py') some_functionality = 'name = {rest!r}'.format(**locals()) pkg_mod.write_text(some_functionality, encoding='utf-8') @@ -34,7 +45,7 @@ def build_pep420_namespace_package(tmpdir, name): src_dir = tmpdir / name src_dir.mkdir() pyproject = src_dir / "pyproject.toml" - namespace, sep, rest = name.rpartition(".") + namespace, _, rest = name.rpartition(".") script = f"""\ [build-system] requires = ["setuptools"] From 233e048afbe95348f4a5fcd9d85f556e03a2bcf6 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Tue, 5 Sep 2023 18:52:51 +0100 Subject: [PATCH 0032/1761] Add version parameter to build_namespace_package test helper --- setuptools/tests/namespaces.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/setuptools/tests/namespaces.py b/setuptools/tests/namespaces.py index ecddf73b6e..276157f3a9 100644 --- a/setuptools/tests/namespaces.py +++ b/setuptools/tests/namespaces.py @@ -7,7 +7,7 @@ def iter_namespace_pkgs(namespace): yield ".".join(parts[:i+1]) -def build_namespace_package(tmpdir, name): +def build_namespace_package(tmpdir, name, version="1.0"): src_dir = tmpdir / name src_dir.mkdir() setup_py = src_dir / 'setup.py' @@ -18,7 +18,7 @@ def build_namespace_package(tmpdir, name): import setuptools setuptools.setup( name={name!r}, - version="1.0", + version={version!r}, namespace_packages={namespaces!r}, packages={namespaces!r}, ) From b526de032b3470cceedb5dbc6497254f9fa6b683 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Tue, 5 Sep 2023 18:54:41 +0100 Subject: [PATCH 0033/1761] Test *-nspkg.pth files have unique names in editable_wheel --- setuptools/tests/test_editable_install.py | 32 ++++++++++++++++++++++- 1 file changed, 31 insertions(+), 1 deletion(-) diff --git a/setuptools/tests/test_editable_install.py b/setuptools/tests/test_editable_install.py index e58168b0cf..5ef04efe7e 100644 --- a/setuptools/tests/test_editable_install.py +++ b/setuptools/tests/test_editable_install.py @@ -11,6 +11,8 @@ from unittest.mock import Mock from uuid import uuid4 +from distutils.core import run_setup + import jaraco.envs import jaraco.path import pytest @@ -31,6 +33,7 @@ ) from setuptools.dist import Distribution from setuptools.extension import Extension +from setuptools.warnings import SetuptoolsDeprecationWarning @pytest.fixture(params=["strict", "lenient"]) @@ -230,7 +233,33 @@ def test_editable_with_single_module(tmp_path, venv, editable_opts): class TestLegacyNamespaces: - """Ported from test_develop""" + # legacy => pkg_resources.declare_namespace(...) + setup(namespace_packages=...) + + def test_nspkg_file_is_unique(self, tmp_path, monkeypatch): + deprecation = pytest.warns( + SetuptoolsDeprecationWarning, match=".*namespace_packages parameter.*" + ) + installation_dir = tmp_path / ".installation_dir" + installation_dir.mkdir() + examples = ( + "myns.pkgA", + "myns.pkgB", + "myns.n.pkgA", + "myns.n.pkgB", + ) + + for name in examples: + pkg = namespaces.build_namespace_package(tmp_path, name, version="42") + with deprecation, monkeypatch.context() as ctx: + ctx.chdir(pkg) + dist = run_setup("setup.py", stop_after="config") + cmd = editable_wheel(dist) + cmd.finalize_options() + editable_name = cmd.get_finalized_command("dist_info").name + cmd._install_namespaces(installation_dir, editable_name) + + files = list(installation_dir.glob("*-nspkg.pth")) + assert len(files) == len(examples) def test_namespace_package_importable(self, venv, tmp_path, editable_opts): """ @@ -238,6 +267,7 @@ def test_namespace_package_importable(self, venv, tmp_path, editable_opts): naturally using pip or `--single-version-externally-managed` and the other installed in editable mode should leave the namespace intact and both packages reachable by import. + (Ported from test_develop). """ build_system = """\ [build-system] From 39245fc7503a82d7ba7f1706b2e599c5aecb1cdf Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Tue, 5 Sep 2023 18:57:45 +0100 Subject: [PATCH 0034/1761] Customise *-nspkg.pth files for unique names in editable_wheel --- setuptools/command/editable_wheel.py | 6 +++--- setuptools/namespaces.py | 10 ++++++---- 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py index 7f66f7e3e9..f0d6f4575a 100644 --- a/setuptools/command/editable_wheel.py +++ b/setuptools/command/editable_wheel.py @@ -341,7 +341,7 @@ def _create_wheel_file(self, bdist_wheel): with unpacked_wheel as unpacked, build_lib as lib, build_tmp as tmp: unpacked_dist_info = Path(unpacked, Path(self.dist_info_dir).name) shutil.copytree(self.dist_info_dir, unpacked_dist_info) - self._install_namespaces(unpacked, dist_info.name) + self._install_namespaces(unpacked, dist_name) files, mapping = self._run_build_commands(dist_name, unpacked, lib, tmp) strategy = self._select_strategy(dist_name, tag, lib) with strategy, WheelFile(wheel_path, "w") as wheel_obj: @@ -752,9 +752,9 @@ def __init__(self, distribution, installation_dir, editable_name, src_root): self.outputs = [] self.dry_run = False - def _get_target(self): + def _get_nspkg_file(self): """Installation target.""" - return os.path.join(self.installation_dir, self.editable_name) + return os.path.join(self.installation_dir, self.editable_name + self.nspkg_ext) def _get_root(self): """Where the modules/packages should be loaded from.""" diff --git a/setuptools/namespaces.py b/setuptools/namespaces.py index 8774432a7e..3332f864ae 100644 --- a/setuptools/namespaces.py +++ b/setuptools/namespaces.py @@ -13,8 +13,7 @@ def install_namespaces(self): nsp = self._get_all_ns_packages() if not nsp: return - filename, ext = os.path.splitext(self._get_target()) - filename += self.nspkg_ext + filename = self._get_nspkg_file() self.outputs.append(filename) log.info("Installing %s", filename) lines = map(self._gen_nspkg_line, nsp) @@ -28,13 +27,16 @@ def install_namespaces(self): f.writelines(lines) def uninstall_namespaces(self): - filename, ext = os.path.splitext(self._get_target()) - filename += self.nspkg_ext + filename = self._get_nspkg_file() if not os.path.exists(filename): return log.info("Removing %s", filename) os.remove(filename) + def _get_nspkg_file(self): + filename, _ = os.path.splitext(self._get_target()) + return filename + self.nspkg_ext + def _get_target(self): return self.target From 9785782ceab5e1ab99f1e76c6d453e5cf9ea9205 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Tue, 5 Sep 2023 19:06:46 +0100 Subject: [PATCH 0035/1761] Test nested legacy namespaces for editable installs --- setuptools/tests/test_editable_install.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/setuptools/tests/test_editable_install.py b/setuptools/tests/test_editable_install.py index 5ef04efe7e..165d93ed2f 100644 --- a/setuptools/tests/test_editable_install.py +++ b/setuptools/tests/test_editable_install.py @@ -261,7 +261,8 @@ def test_nspkg_file_is_unique(self, tmp_path, monkeypatch): files = list(installation_dir.glob("*-nspkg.pth")) assert len(files) == len(examples) - def test_namespace_package_importable(self, venv, tmp_path, editable_opts): + @pytest.mark.parametrize("ns", ("myns.n",)) + def test_namespace_package_importable(self, venv, tmp_path, ns, editable_opts): """ Installing two packages sharing the same namespace, one installed naturally using pip or `--single-version-externally-managed` @@ -274,8 +275,8 @@ def test_namespace_package_importable(self, venv, tmp_path, editable_opts): requires = ["setuptools"] build-backend = "setuptools.build_meta" """ - pkg_A = namespaces.build_namespace_package(tmp_path, 'myns.pkgA') - pkg_B = namespaces.build_namespace_package(tmp_path, 'myns.pkgB') + pkg_A = namespaces.build_namespace_package(tmp_path, f"{ns}.pkgA") + pkg_B = namespaces.build_namespace_package(tmp_path, f"{ns}.pkgB") (pkg_A / "pyproject.toml").write_text(build_system, encoding="utf-8") (pkg_B / "pyproject.toml").write_text(build_system, encoding="utf-8") # use pip to install to the target directory @@ -283,7 +284,7 @@ def test_namespace_package_importable(self, venv, tmp_path, editable_opts): opts.append("--no-build-isolation") # force current version of setuptools venv.run(["python", "-m", "pip", "install", str(pkg_A), *opts]) venv.run(["python", "-m", "pip", "install", "-e", str(pkg_B), *opts]) - venv.run(["python", "-c", "import myns.pkgA; import myns.pkgB"]) + venv.run(["python", "-c", f"import {ns}.pkgA; import {ns}.pkgB"]) # additionally ensure that pkg_resources import works venv.run(["python", "-c", "import pkg_resources"]) From 6818948314c2c748177d5ffaede730fd72155474 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Wed, 6 Sep 2023 10:12:32 +0100 Subject: [PATCH 0036/1761] Consider extra path in editable finder --- setuptools/command/editable_wheel.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py index f0d6f4575a..24acb6446d 100644 --- a/setuptools/command/editable_wheel.py +++ b/setuptools/command/editable_wheel.py @@ -777,6 +777,8 @@ def _get_root(self): class _EditableFinder: # MetaPathFinder @classmethod def find_spec(cls, fullname, path=None, target=None): + extra_path = [] + # Top-level packages and modules (we know these exist in the FS) if fullname in MAPPING: pkg_path = MAPPING[fullname] @@ -787,7 +789,7 @@ def find_spec(cls, fullname, path=None, target=None): # to the importlib.machinery implementation. parent, _, child = fullname.rpartition(".") if parent and parent in MAPPING: - return PathFinder.find_spec(fullname, path=[MAPPING[parent]]) + return PathFinder.find_spec(fullname, path=[MAPPING[parent], *extra_path]) # Other levels of nesting should be handled automatically by importlib # using the parent path. From d6513447ba74b9c3e341485d4aacd7dd6f129c8a Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Wed, 6 Sep 2023 11:16:35 +0100 Subject: [PATCH 0037/1761] Explicitly add legacy namespaces to package mapping in editable_wheel This seems to be necessary to ensure modules inside legacy namespaces import correctly even if another distribution that shares the same namespace is installed conventionally. --- setuptools/command/editable_wheel.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py index 24acb6446d..79c839f8f0 100644 --- a/setuptools/command/editable_wheel.py +++ b/setuptools/command/editable_wheel.py @@ -505,9 +505,19 @@ def __call__(self, wheel: "WheelFile", files: List[str], mapping: Dict[str, str] ) ) + legacy_namespaces = { + pkg: find_package_path(pkg, roots, self.dist.src_root or "") + for pkg in self.dist.namespace_packages or [] + } + + mapping = {**roots, **legacy_namespaces} + # ^-- We need to explicitly add the legacy_namespaces to the mapping to be + # able to import their modules even if another package sharing the same + # namespace is installed in a conventional (non-editable) way. + name = f"__editable__.{self.name}.finder" finder = _normalization.safe_identifier(name) - content = bytes(_finder_template(name, roots, namespaces_), "utf-8") + content = bytes(_finder_template(name, mapping, namespaces_), "utf-8") wheel.writestr(f"{finder}.py", content) content = _encode_pth(f"import {finder}; {finder}.install()") From 88cc6c5b98090b4e1232cd628f79f52d85913e3b Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Wed, 6 Sep 2023 13:22:40 +0100 Subject: [PATCH 0038/1761] Allow tests to build pkgutil legacy namespaces --- setuptools/tests/namespaces.py | 37 +++++++++++++++-------- setuptools/tests/test_editable_install.py | 15 +++++++-- 2 files changed, 37 insertions(+), 15 deletions(-) diff --git a/setuptools/tests/namespaces.py b/setuptools/tests/namespaces.py index 276157f3a9..85540d3b82 100644 --- a/setuptools/tests/namespaces.py +++ b/setuptools/tests/namespaces.py @@ -1,3 +1,5 @@ +import ast +import json import textwrap @@ -7,23 +9,36 @@ def iter_namespace_pkgs(namespace): yield ".".join(parts[:i+1]) -def build_namespace_package(tmpdir, name, version="1.0"): +def build_namespace_package(tmpdir, name, version="1.0", impl="pkg_resources"): src_dir = tmpdir / name src_dir.mkdir() setup_py = src_dir / 'setup.py' namespace, _, rest = name.rpartition('.') namespaces = list(iter_namespace_pkgs(namespace)) + setup_args = { + "name": name, + "version": version, + "packages": namespaces, + } + + if impl == "pkg_resources": + tmpl = '__import__("pkg_resources").declare_namespace(__name__)' + setup_args["namespace_packages"] = namespaces + elif impl == "pkgutil": + tmpl = '__path__ = __import__("pkgutil").extend_path(__path__, __name__)' + else: + raise ValueError(f"Cannot recognise {impl=} when creating namespaces") + + args = json.dumps(setup_args, indent=4) + assert ast.literal_eval(args) # ensure it is valid Python + script = textwrap.dedent( - """ + """\ import setuptools - setuptools.setup( - name={name!r}, - version={version!r}, - namespace_packages={namespaces!r}, - packages={namespaces!r}, - ) + args = {args} + setuptools.setup(**args) """ - ).format(**locals()) + ).format(args=args) setup_py.write_text(script, encoding='utf-8') ns_pkg_dir = src_dir / namespace.replace(".", "/") @@ -31,9 +46,7 @@ def build_namespace_package(tmpdir, name, version="1.0"): for ns in namespaces: pkg_init = src_dir / ns.replace(".", "/") / '__init__.py' - tmpl = '__import__("pkg_resources").declare_namespace(__name__)' - decl = tmpl.format(**locals()) - pkg_init.write_text(decl, encoding='utf-8') + pkg_init.write_text(tmpl, encoding='utf-8') pkg_mod = ns_pkg_dir / (rest + '.py') some_functionality = 'name = {rest!r}'.format(**locals()) diff --git a/setuptools/tests/test_editable_install.py b/setuptools/tests/test_editable_install.py index 165d93ed2f..12716fd5e1 100644 --- a/setuptools/tests/test_editable_install.py +++ b/setuptools/tests/test_editable_install.py @@ -261,8 +261,17 @@ def test_nspkg_file_is_unique(self, tmp_path, monkeypatch): files = list(installation_dir.glob("*-nspkg.pth")) assert len(files) == len(examples) + @pytest.mark.parametrize( + "impl", + ( + "pkg_resources", + # "pkgutil", => does not work + ) + ) @pytest.mark.parametrize("ns", ("myns.n",)) - def test_namespace_package_importable(self, venv, tmp_path, ns, editable_opts): + def test_namespace_package_importable( + self, venv, tmp_path, ns, impl, editable_opts + ): """ Installing two packages sharing the same namespace, one installed naturally using pip or `--single-version-externally-managed` @@ -275,8 +284,8 @@ def test_namespace_package_importable(self, venv, tmp_path, ns, editable_opts): requires = ["setuptools"] build-backend = "setuptools.build_meta" """ - pkg_A = namespaces.build_namespace_package(tmp_path, f"{ns}.pkgA") - pkg_B = namespaces.build_namespace_package(tmp_path, f"{ns}.pkgB") + pkg_A = namespaces.build_namespace_package(tmp_path, f"{ns}.pkgA", impl=impl) + pkg_B = namespaces.build_namespace_package(tmp_path, f"{ns}.pkgB", impl=impl) (pkg_A / "pyproject.toml").write_text(build_system, encoding="utf-8") (pkg_B / "pyproject.toml").write_text(build_system, encoding="utf-8") # use pip to install to the target directory From 96b3d3d1412f456e635d8306f286ae013e89d0ff Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Wed, 6 Sep 2023 14:42:08 +0100 Subject: [PATCH 0039/1761] Use pathlib in tests/namespaces to ensure Path API --- setuptools/tests/namespaces.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/setuptools/tests/namespaces.py b/setuptools/tests/namespaces.py index 85540d3b82..3b610d5c24 100644 --- a/setuptools/tests/namespaces.py +++ b/setuptools/tests/namespaces.py @@ -1,6 +1,7 @@ import ast import json import textwrap +from pathlib import Path def iter_namespace_pkgs(namespace): @@ -41,7 +42,7 @@ def build_namespace_package(tmpdir, name, version="1.0", impl="pkg_resources"): ).format(args=args) setup_py.write_text(script, encoding='utf-8') - ns_pkg_dir = src_dir / namespace.replace(".", "/") + ns_pkg_dir = Path(src_dir, namespace.replace(".", "/")) ns_pkg_dir.mkdir(parents=True) for ns in namespaces: @@ -69,7 +70,7 @@ def build_pep420_namespace_package(tmpdir, name): version = "3.14159" """ pyproject.write_text(textwrap.dedent(script), encoding='utf-8') - ns_pkg_dir = src_dir / namespace.replace(".", "/") + ns_pkg_dir = Path(src_dir, namespace.replace(".", "/")) ns_pkg_dir.mkdir(parents=True) pkg_mod = ns_pkg_dir / (rest + ".py") some_functionality = f"name = {rest!r}" From 2ee98fd35bc67643a8333aadf5928376840d382c Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Wed, 6 Sep 2023 15:49:18 +0100 Subject: [PATCH 0040/1761] Fix lint issues --- setuptools/tests/namespaces.py | 2 +- setuptools/tests/test_editable_install.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/setuptools/tests/namespaces.py b/setuptools/tests/namespaces.py index 3b610d5c24..248db98f97 100644 --- a/setuptools/tests/namespaces.py +++ b/setuptools/tests/namespaces.py @@ -7,7 +7,7 @@ def iter_namespace_pkgs(namespace): parts = namespace.split(".") for i in range(len(parts)): - yield ".".join(parts[:i+1]) + yield ".".join(parts[: i + 1]) def build_namespace_package(tmpdir, name, version="1.0", impl="pkg_resources"): diff --git a/setuptools/tests/test_editable_install.py b/setuptools/tests/test_editable_install.py index 12716fd5e1..ef71147adf 100644 --- a/setuptools/tests/test_editable_install.py +++ b/setuptools/tests/test_editable_install.py @@ -266,7 +266,7 @@ def test_nspkg_file_is_unique(self, tmp_path, monkeypatch): ( "pkg_resources", # "pkgutil", => does not work - ) + ), ) @pytest.mark.parametrize("ns", ("myns.n",)) def test_namespace_package_importable( From 8c740e5569ee73fa8356e84ba72f62db941e4598 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Wed, 6 Sep 2023 23:14:05 +0100 Subject: [PATCH 0041/1761] Add news fragment --- newsfragments/4041.bugfix.1.rst | 2 ++ newsfragments/4041.bugfix.2.rst | 2 ++ 2 files changed, 4 insertions(+) create mode 100644 newsfragments/4041.bugfix.1.rst create mode 100644 newsfragments/4041.bugfix.2.rst diff --git a/newsfragments/4041.bugfix.1.rst b/newsfragments/4041.bugfix.1.rst new file mode 100644 index 0000000000..b464efcb71 --- /dev/null +++ b/newsfragments/4041.bugfix.1.rst @@ -0,0 +1,2 @@ +Fix the name given to the ``*-nspkg.pth`` files in editable installs, +ensuring they are unique per distribution. diff --git a/newsfragments/4041.bugfix.2.rst b/newsfragments/4041.bugfix.2.rst new file mode 100644 index 0000000000..a497fa789b --- /dev/null +++ b/newsfragments/4041.bugfix.2.rst @@ -0,0 +1,2 @@ +Workaround some limitations on ``pkg_resources``-style legacy namespaces in +the meta path finder for editable installations. From c95ddc47a4b63488fdb003528bc6dbece85be1e2 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Wed, 6 Sep 2023 23:45:55 +0100 Subject: [PATCH 0042/1761] =?UTF-8?q?Bump=20version:=2068.1.2=20=E2=86=92?= =?UTF-8?q?=2068.2.0b1?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- setup.cfg | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index bed73ed9ee..94d601aa03 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 68.1.2 +current_version = 68.2.0b1 commit = True tag = True diff --git a/setup.cfg b/setup.cfg index f3cec16401..38a853ecdb 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = setuptools -version = 68.1.2 +version = 68.2.0 author = Python Packaging Authority author_email = distutils-sig@python.org description = Easily download, build, install, upgrade, and uninstall Python packages From f45acf160a7a245aca65a1d6b51582748e3bd9a3 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Thu, 7 Sep 2023 02:37:15 +0100 Subject: [PATCH 0043/1761] Update news fragment --- NEWS.rst | 30 ++++++++++++++++++++++++++++++ newsfragments/3903.feature.rst | 2 -- newsfragments/3904.feature.1.rst | 3 --- newsfragments/3904.feature.2.rst | 2 -- newsfragments/4023.misc.rst | 2 -- newsfragments/4027.misc.1.rst | 1 - newsfragments/4027.misc.2.rst | 1 - newsfragments/4041.bugfix.1.rst | 2 -- newsfragments/4041.bugfix.2.rst | 2 -- 9 files changed, 30 insertions(+), 15 deletions(-) delete mode 100644 newsfragments/3903.feature.rst delete mode 100644 newsfragments/3904.feature.1.rst delete mode 100644 newsfragments/3904.feature.2.rst delete mode 100644 newsfragments/4023.misc.rst delete mode 100644 newsfragments/4027.misc.1.rst delete mode 100644 newsfragments/4027.misc.2.rst delete mode 100644 newsfragments/4041.bugfix.1.rst delete mode 100644 newsfragments/4041.bugfix.2.rst diff --git a/NEWS.rst b/NEWS.rst index 2c5e02ff43..82cd7bf5ff 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -1,3 +1,33 @@ +v68.2.0 +======= + +Features +-------- + +- Rework how ``setuptools`` internally handles ``dependencies/install_requires`` + and ``optional-dependencies/extras_require``. (#3903) +- Improve the generated ``PKG-INFO`` files, by adding ``Requires-Dist`` fields. + Previously, these fields would be omitted in favour of a non-standard + ``*.egg-info/requires.txt`` file (which is still generated for the time being). (#3904) +- Improve atomicity when writing ``PKG-INFO`` files to avoid race + conditions with ``importlib.metadata``. (#3904) + + +Bugfixes +-------- + +- Fix the name given to the ``*-nspkg.pth`` files in editable installs, + ensuring they are unique per distribution. (#4041) +- Workaround some limitations on ``pkg_resources``-style legacy namespaces in + the meta path finder for editable installations. (#4041) + + +Misc +---- + +- #4023, #4027, #4027 + + v68.1.2 ======= diff --git a/newsfragments/3903.feature.rst b/newsfragments/3903.feature.rst deleted file mode 100644 index ac6c00a2fb..0000000000 --- a/newsfragments/3903.feature.rst +++ /dev/null @@ -1,2 +0,0 @@ -Rework how ``setuptools`` internally handles ``dependencies/install_requires`` -and ``optional-dependencies/extras_require``. diff --git a/newsfragments/3904.feature.1.rst b/newsfragments/3904.feature.1.rst deleted file mode 100644 index 114553846e..0000000000 --- a/newsfragments/3904.feature.1.rst +++ /dev/null @@ -1,3 +0,0 @@ -Improve the generated ``PKG-INFO`` files, by adding ``Requires-Dist`` fields. -Previously, these fields would be omitted in favour of a non-standard -``*.egg-info/requires.txt`` file (which is still generated for the time being). diff --git a/newsfragments/3904.feature.2.rst b/newsfragments/3904.feature.2.rst deleted file mode 100644 index 0c80d33c06..0000000000 --- a/newsfragments/3904.feature.2.rst +++ /dev/null @@ -1,2 +0,0 @@ -Improve atomicity when writing ``PKG-INFO`` files to avoid race -conditions with ``importlib.metadata``. diff --git a/newsfragments/4023.misc.rst b/newsfragments/4023.misc.rst deleted file mode 100644 index 9a0c2e7736..0000000000 --- a/newsfragments/4023.misc.rst +++ /dev/null @@ -1,2 +0,0 @@ -Avoid circular imports (particularly between ``setuptools/{__init__,dist,monkey}.py``), -or at least delay them, so tools like ``sphinx`` don't have problems analysing the codebase. diff --git a/newsfragments/4027.misc.1.rst b/newsfragments/4027.misc.1.rst deleted file mode 100644 index 2c84450f0f..0000000000 --- a/newsfragments/4027.misc.1.rst +++ /dev/null @@ -1 +0,0 @@ -Removed ``pandas`` from integration tests. diff --git a/newsfragments/4027.misc.2.rst b/newsfragments/4027.misc.2.rst deleted file mode 100644 index d33b85a6c6..0000000000 --- a/newsfragments/4027.misc.2.rst +++ /dev/null @@ -1 +0,0 @@ -Added ``pyyaml``, ``charset-normalizer`` and ``protobuf`` to integration tests. diff --git a/newsfragments/4041.bugfix.1.rst b/newsfragments/4041.bugfix.1.rst deleted file mode 100644 index b464efcb71..0000000000 --- a/newsfragments/4041.bugfix.1.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix the name given to the ``*-nspkg.pth`` files in editable installs, -ensuring they are unique per distribution. diff --git a/newsfragments/4041.bugfix.2.rst b/newsfragments/4041.bugfix.2.rst deleted file mode 100644 index a497fa789b..0000000000 --- a/newsfragments/4041.bugfix.2.rst +++ /dev/null @@ -1,2 +0,0 @@ -Workaround some limitations on ``pkg_resources``-style legacy namespaces in -the meta path finder for editable installations. From 06600a0e22ade61be25b5f9794d783e7de9b9e4f Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Fri, 8 Sep 2023 11:13:21 +0100 Subject: [PATCH 0044/1761] Add lru_cache to requirement parsing --- setuptools/_reqs.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/setuptools/_reqs.py b/setuptools/_reqs.py index 5d5b927fd8..7d7130d50e 100644 --- a/setuptools/_reqs.py +++ b/setuptools/_reqs.py @@ -1,3 +1,4 @@ +from functools import lru_cache from typing import Callable, Iterable, Iterator, TypeVar, Union, overload import setuptools.extern.jaraco.text as text @@ -7,6 +8,12 @@ _StrOrIter = Union[str, Iterable[str]] +parse_req: Callable[[str], Requirement] = lru_cache()(Requirement) +# Setuptools parses the same requirement many times +# (e.g. first for validation than for normalisation), +# so it might be worth to cache. + + def parse_strings(strs: _StrOrIter) -> Iterator[str]: """ Yield requirement strings for each specification in `strs`. @@ -26,7 +33,7 @@ def parse(strs: _StrOrIter, parser: Callable[[str], _T]) -> Iterator[_T]: ... -def parse(strs, parser=Requirement): +def parse(strs, parser=parse_req): """ Replacement for ``pkg_resources.parse_requirements`` that uses ``packaging``. """ From 3aa774465e475eccc807dc55ec65471e0b92cb8e Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Fri, 8 Sep 2023 11:13:41 +0100 Subject: [PATCH 0045/1761] Revert "Record normalized requirements in dist.metadata object" This reverts commit a641c78c7c4e91d85a8772434d046db56abd1576. --- setuptools/dist.py | 21 ++++++--------------- 1 file changed, 6 insertions(+), 15 deletions(-) diff --git a/setuptools/dist.py b/setuptools/dist.py index f1d361f1c3..445c3a6856 100644 --- a/setuptools/dist.py +++ b/setuptools/dist.py @@ -263,11 +263,8 @@ class Distribution(_Distribution): 'provides_extras': OrderedSet, 'license_file': lambda: None, 'license_files': lambda: None, - # Both install_requires and extras_require are needed to write PKG-INFO, - # So we take this opportunity to cache parsed requirement objects. - # These attributes are not part of the public API and intended for internal use. - '_normalized_install_requires': dict, # Dict[str, Requirement] - '_normalized_extras_require': dict, # Dict[str, Dict[str, Requirement]] + 'install_requires': list, + 'extras_require': dict, } _patched_dist = None @@ -394,6 +391,8 @@ def _finalize_requires(self): self.metadata.python_requires = self.python_requires self._normalize_requires() + self.metadata.install_requires = self.install_requires + self.metadata.extras_require = self.extras_require if self.extras_require: for extra in self.extras_require.keys(): @@ -406,17 +405,9 @@ def _normalize_requires(self): """Make sure requirement-related attributes exist and are normalized""" install_requires = getattr(self, "install_requires", None) or [] extras_require = getattr(self, "extras_require", None) or {} - meta = self.metadata - meta._normalized_install_requires = { - str(r): r for r in _reqs.parse(install_requires) - } - meta._normalized_extras_require = { - k: {str(r): r for r in _reqs.parse(v or [])} - for k, v in extras_require.items() - } - self.install_requires = list(meta._normalized_install_requires) + self.install_requires = list(map(str, _reqs.parse(install_requires))) self.extras_require = { - k: list(v) for k, v in meta._normalized_extras_require.items() + k: list(map(str, _reqs.parse(v or []))) for k, v in extras_require.items() } def _finalize_license_files(self): From 11edfe600fcd89ec535be292f9b41668b9c302ce Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Fri, 8 Sep 2023 11:23:41 +0100 Subject: [PATCH 0046/1761] Revert "Re-use pre-parsed requirements when writing requires.txt" This reverts commit 3c9d6ac96bb76670adc48535816fa3331b027c80. --- setuptools/command/_requirestxt.py | 22 ++++++++++------------ 1 file changed, 10 insertions(+), 12 deletions(-) diff --git a/setuptools/command/_requirestxt.py b/setuptools/command/_requirestxt.py index d223737fd4..435d71dca5 100644 --- a/setuptools/command/_requirestxt.py +++ b/setuptools/command/_requirestxt.py @@ -11,6 +11,7 @@ from itertools import filterfalse from typing import Dict, List, Tuple, Mapping, TypeVar +from .. import _reqs from ..extern.jaraco.text import yield_lines from ..extern.packaging.requirements import Requirement @@ -19,11 +20,11 @@ _T = TypeVar("_T") _Ordered = Dict[_T, None] _ordered = dict +_StrOrIter = _reqs._StrOrIter def _prepare( - install_requires: Dict[str, Requirement], - extras_require: Mapping[str, Dict[str, Requirement]], + install_requires: _StrOrIter, extras_require: Mapping[str, _StrOrIter] ) -> Tuple[List[str], Dict[str, List[str]]]: """Given values for ``install_requires`` and ``extras_require`` create modified versions in a way that can be written in ``requires.txt`` @@ -33,7 +34,7 @@ def _prepare( def _convert_extras_requirements( - extras_require: Dict[str, Dict[str, Requirement]], + extras_require: _StrOrIter, ) -> Mapping[str, _Ordered[Requirement]]: """ Convert requirements in `extras_require` of the form @@ -44,15 +45,14 @@ def _convert_extras_requirements( for section, v in extras_require.items(): # Do not strip empty sections. output[section] - for r in v.values(): + for r in _reqs.parse(v): output[section + _suffix_for(r)].setdefault(r) return output def _move_install_requirements_markers( - install_requires: Dict[str, Requirement], - extras_require: Mapping[str, _Ordered[Requirement]], + install_requires: _StrOrIter, extras_require: Mapping[str, _Ordered[Requirement]] ) -> Tuple[List[str], Dict[str, List[str]]]: """ The ``requires.txt`` file has an specific format: @@ -66,7 +66,7 @@ def _move_install_requirements_markers( # divide the install_requires into two sets, simple ones still # handled by install_requires and more complex ones handled by extras_require. - inst_reqs = install_requires.values() + inst_reqs = list(_reqs.parse(install_requires)) simple_reqs = filter(_no_marker, inst_reqs) complex_reqs = filterfalse(_no_marker, inst_reqs) simple_install_requires = list(map(str, simple_reqs)) @@ -90,9 +90,8 @@ def _suffix_for(req): def _clean_req(req): """Given a Requirement, remove environment markers and return it""" - r = Requirement(str(req)) # create a copy before modifying. - r.marker = None - return r + req.marker = None + return req def _no_marker(req): @@ -111,10 +110,9 @@ def append_cr(line): def write_requirements(cmd, basename, filename): dist = cmd.distribution - meta = dist.metadata data = io.StringIO() install_requires, extras_require = _prepare( - meta._normalized_install_requires, meta._normalized_extras_require + dist.install_requires or (), dist.extras_require or {} ) _write_requirements(data, install_requires) for extra in sorted(extras_require): From 43e24f1d27a25fa8d28a00e8e9f2507569948c5d Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Fri, 8 Sep 2023 11:40:49 +0100 Subject: [PATCH 0047/1761] Avoid modifying requirements object in-place --- setuptools/command/_requirestxt.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/setuptools/command/_requirestxt.py b/setuptools/command/_requirestxt.py index 435d71dca5..32bae2c4b4 100644 --- a/setuptools/command/_requirestxt.py +++ b/setuptools/command/_requirestxt.py @@ -90,8 +90,9 @@ def _suffix_for(req): def _clean_req(req): """Given a Requirement, remove environment markers and return it""" - req.marker = None - return req + r = Requirement(str(req)) # create a copy before modifying + r.marker = None + return r def _no_marker(req): From d25d03cf04c63bfc63a63a11454b5915d9c904a7 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Fri, 8 Sep 2023 11:41:36 +0100 Subject: [PATCH 0048/1761] Avoid relying on pre-parsed requirements --- setuptools/_core_metadata.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/setuptools/_core_metadata.py b/setuptools/_core_metadata.py index c1d41c6680..6c904c3c77 100644 --- a/setuptools/_core_metadata.py +++ b/setuptools/_core_metadata.py @@ -13,7 +13,7 @@ from distutils.util import rfc822_escape -from . import _normalization +from . import _normalization, _reqs from .extern.packaging.markers import Marker from .extern.packaging.requirements import Requirement from .extern.packaging.version import Version @@ -211,11 +211,11 @@ def write_field(key, value): def _write_requirements(self, file): - for req in self._normalized_install_requires: + for req in _reqs.parse(self.install_requires): file.write(f"Requires-Dist: {req}\n") processed_extras = {} - for augmented_extra, reqs in self._normalized_extras_require.items(): + for augmented_extra, reqs in self.extras_require.items(): # Historically, setuptools allows "augmented extras": `:` unsafe_extra, _, condition = augmented_extra.partition(":") unsafe_extra = unsafe_extra.strip() @@ -223,7 +223,7 @@ def _write_requirements(self, file): if extra: _write_provides_extra(file, processed_extras, extra, unsafe_extra) - for req in reqs: + for req in _reqs.parse_strings(reqs): r = _include_extra(req, extra, condition.strip()) file.write(f"Requires-Dist: {r}\n") @@ -231,7 +231,7 @@ def _write_requirements(self, file): def _include_extra(req: str, extra: str, condition: str) -> Requirement: - r = Requirement(req) + r = Requirement(req) # create a fresh object that can be modified parts = ( f"({r.marker})" if r.marker else None, f"({condition})" if condition else None, From c16a54766069fb3546d683529895cd548fecd830 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Fri, 8 Sep 2023 18:36:25 +0100 Subject: [PATCH 0049/1761] Prevent install_requires and extras_require to not be set in dist --- setuptools/config/_apply_pyprojecttoml.py | 2 +- setuptools/dist.py | 13 +++++-------- 2 files changed, 6 insertions(+), 9 deletions(-) diff --git a/setuptools/config/_apply_pyprojecttoml.py b/setuptools/config/_apply_pyprojecttoml.py index 4b8f803c1b..b6443308bf 100644 --- a/setuptools/config/_apply_pyprojecttoml.py +++ b/setuptools/config/_apply_pyprojecttoml.py @@ -125,7 +125,7 @@ def _set_config(dist: "Distribution", field: str, value: Any): setter(value) elif hasattr(dist.metadata, field) or field in SETUPTOOLS_PATCHES: setattr(dist.metadata, field, value) - else: + if hasattr(dist, field): setattr(dist, field, value) diff --git a/setuptools/dist.py b/setuptools/dist.py index 445c3a6856..2672f928d5 100644 --- a/setuptools/dist.py +++ b/setuptools/dist.py @@ -296,14 +296,11 @@ def __init__(self, attrs=None): self.setup_requires = attrs.pop('setup_requires', []) for ep in metadata.entry_points(group='distutils.setup_keywords'): vars(self).setdefault(ep.name, None) - _Distribution.__init__( - self, - { - k: v - for k, v in attrs.items() - if k not in self._DISTUTILS_UNSUPPORTED_METADATA - }, - ) + + metadata_only = set(self._DISTUTILS_UNSUPPORTED_METADATA) + metadata_only -= {"install_requires", "extras_require"} + dist_attrs = {k: v for k, v in attrs.items() if k not in metadata_only} + _Distribution.__init__(self, dist_attrs) # Private API (setuptools-use only, not restricted to Distribution) # Stores files that are referenced by the configuration and need to be in the From dc69ce33459f98285423d623077cdb10e268df8b Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Fri, 8 Sep 2023 19:14:22 +0100 Subject: [PATCH 0050/1761] Add news fragment --- newsfragments/4043.bugfix.rst | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 newsfragments/4043.bugfix.rst diff --git a/newsfragments/4043.bugfix.rst b/newsfragments/4043.bugfix.rst new file mode 100644 index 0000000000..e32728fe24 --- /dev/null +++ b/newsfragments/4043.bugfix.rst @@ -0,0 +1,4 @@ +Avoid using caching attributes in ``Distribution.metadata`` for requirements. +This is done for backwards compatibility with customizations that attempt to +modify ``install_requires`` or ``extras_require`` at a late point (still not +recommended). From b02bf32bae729d53bdb7c9649d6ec36afdb793ee Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 10 Sep 2023 13:27:03 -0400 Subject: [PATCH 0051/1761] Add diff-cover check to Github Actions CI. Closes jaraco/skeleton#90. --- .github/workflows/main.yml | 18 ++++++++++++++++++ tox.ini | 8 ++++++++ 2 files changed, 26 insertions(+) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index f302854902..fa326a26df 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -53,6 +53,24 @@ jobs: - name: Run run: tox + diffcov: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + - name: Setup Python + uses: actions/setup-python@v4 + with: + python-version: 3.x + - name: Install tox + run: | + python -m pip install tox + - name: Evaluate coverage + run: tox + env: + TOXENV: diffcov + docs: runs-on: ubuntu-latest env: diff --git a/tox.ini b/tox.ini index e51d652d7f..3b4414b40d 100644 --- a/tox.ini +++ b/tox.ini @@ -8,6 +8,14 @@ usedevelop = True extras = testing +[testenv:diffcov] +deps = + diff-cover +commands = + pytest {posargs} --cov-report xml + diff-cover coverage.xml --compare-branch=origin/main --html-report diffcov.html + diff-cover coverage.xml --compare-branch=origin/main --fail-under=100 + [testenv:docs] extras = docs From a6256e2935468b72a61aa7fda1e036faef3bfb3d Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 10 Sep 2023 13:59:47 -0400 Subject: [PATCH 0052/1761] Add descriptions to the tox environments. Closes jaraco/skeleton#91. --- tox.ini | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/tox.ini b/tox.ini index 3b4414b40d..1950b4ef99 100644 --- a/tox.ini +++ b/tox.ini @@ -1,4 +1,5 @@ [testenv] +description = perform primary checks (tests, style, types, coverage) deps = setenv = PYTHONWARNDEFAULTENCODING = 1 @@ -9,6 +10,7 @@ extras = testing [testenv:diffcov] +description = run tests and check that diff from main is covered deps = diff-cover commands = @@ -17,6 +19,7 @@ commands = diff-cover coverage.xml --compare-branch=origin/main --fail-under=100 [testenv:docs] +description = build the documentation extras = docs testing @@ -26,6 +29,7 @@ commands = python -m sphinxlint [testenv:finalize] +description = assemble changelog and tag a release skip_install = True deps = towncrier @@ -36,6 +40,7 @@ commands = [testenv:release] +description = publish the package to PyPI and GitHub skip_install = True deps = build From 928e9a86d61d3a660948bcba7689f90216cc8243 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 10 Sep 2023 14:10:31 -0400 Subject: [PATCH 0053/1761] Add FORCE_COLOR to the TOX_OVERRIDE for GHA. Requires tox 4.11.1. Closes jaraco/skeleton#89. --- .github/workflows/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index fa326a26df..28e3678679 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -16,7 +16,7 @@ env: # Ensure tests can sense settings about the environment TOX_OVERRIDE: >- - testenv.pass_env+=GITHUB_* + testenv.pass_env+=GITHUB_*,FORCE_COLOR jobs: From c2608c5befb7c5c9d10b4b73944e5b9404dc5612 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Mon, 11 Sep 2023 11:05:35 +0100 Subject: [PATCH 0054/1761] Update test dependency on build --- setup.cfg | 2 +- tox.ini | 2 -- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/setup.cfg b/setup.cfg index 38a853ecdb..f01c0122bd 100644 --- a/setup.cfg +++ b/setup.cfg @@ -87,7 +87,7 @@ testing-integration = wheel jaraco.path>=3.2.0 jaraco.envs>=2.2 - build[virtualenv] + build[virtualenv]>=1.0.3 filelock>=3.4.0 packaging diff --git a/tox.ini b/tox.ini index 783827d26e..166f3e3909 100644 --- a/tox.ini +++ b/tox.ini @@ -1,8 +1,6 @@ [testenv] deps = # Ideally all the dependencies should be set as "extras" - build[virtualenv] @ git+https://github.com/pypa/build@59c1f87 - # ^-- pypa/build#630, use dev version while we wait for the new release packaging @ git+https://github.com/pypa/packaging@7e68d82 # ^-- use dev version while we wait for the new release setenv = From dca98f8b6f04bcf73db8f7c1d3eb2ecdb8ceebc9 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Mon, 11 Sep 2023 11:19:58 +0100 Subject: [PATCH 0055/1761] Add fallback for old packaging --- setup.cfg | 2 +- setuptools/tests/_packaging_compat.py | 9 +++++++++ setuptools/tests/config/test_apply_pyprojecttoml.py | 4 +++- setuptools/tests/test_core_metadata.py | 3 ++- 4 files changed, 15 insertions(+), 3 deletions(-) create mode 100644 setuptools/tests/_packaging_compat.py diff --git a/setup.cfg b/setup.cfg index f01c0122bd..5e86f65960 100644 --- a/setup.cfg +++ b/setup.cfg @@ -89,7 +89,7 @@ testing-integration = jaraco.envs>=2.2 build[virtualenv]>=1.0.3 filelock>=3.4.0 - packaging + packaging>=23.1 # TODO: update once packaging 23.2 is available docs = # upstream diff --git a/setuptools/tests/_packaging_compat.py b/setuptools/tests/_packaging_compat.py new file mode 100644 index 0000000000..5bdcc554d5 --- /dev/null +++ b/setuptools/tests/_packaging_compat.py @@ -0,0 +1,9 @@ +from packaging import __version__ as packaging_version + +if tuple(packaging_version.split(".")) >= ("23", "2"): + from packaging.metadata import Metadata +else: + # Just pretend it exists while waiting for release... + from unittest.mock import MagicMock + + Metadata = MagicMock() diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py index ffcbd318a2..c0c6b13392 100644 --- a/setuptools/tests/config/test_apply_pyprojecttoml.py +++ b/setuptools/tests/config/test_apply_pyprojecttoml.py @@ -13,7 +13,9 @@ import pytest from ini2toml.api import Translator -from packaging.metadata import Metadata + +# TODO: replace with `from packaging.metadata import Metadata` in future versions +from .._packaging_compat import Metadata import setuptools # noqa ensure monkey patch to metadata from setuptools.dist import Distribution diff --git a/setuptools/tests/test_core_metadata.py b/setuptools/tests/test_core_metadata.py index 8c2483f26e..92717bd8bf 100644 --- a/setuptools/tests/test_core_metadata.py +++ b/setuptools/tests/test_core_metadata.py @@ -5,7 +5,8 @@ import pytest -from packaging.metadata import Metadata +# TODO: replace with `from packaging.metadata import Metadata` in future versions: +from ._packaging_compat import Metadata from setuptools import sic, _reqs from setuptools.dist import Distribution From 6981c042e63fc117cfb5a9add28dab3f15dfc79b Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Mon, 11 Sep 2023 11:29:18 +0100 Subject: [PATCH 0056/1761] Add news fragment --- newsfragments/4045.misc.rst | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 newsfragments/4045.misc.rst diff --git a/newsfragments/4045.misc.rst b/newsfragments/4045.misc.rst new file mode 100644 index 0000000000..ce3de929ae --- /dev/null +++ b/newsfragments/4045.misc.rst @@ -0,0 +1,2 @@ +Update test dependency on ``build==1.0.3`` and +add fallback for ``packaging==23.1`` (regarding ``Metadata`` validation). From 6e1bd6b2f38e51ec0f2f877480f672506839b578 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Mon, 11 Sep 2023 13:00:45 +0100 Subject: [PATCH 0057/1761] Fix bumpversion state --- .bumpversion.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 94d601aa03..ee8f0fb6b4 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 68.2.0b1 +current_version = 68.2.0 commit = True tag = True From 0120605db966af66207d996b82320ee676fabbf1 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Mon, 11 Sep 2023 13:28:58 +0100 Subject: [PATCH 0058/1761] =?UTF-8?q?Bump=20version:=2068.2.0=20=E2=86=92?= =?UTF-8?q?=2068.2.1?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- NEWS.rst | 18 ++++++++++++++++++ newsfragments/4043.bugfix.rst | 4 ---- newsfragments/4045.misc.rst | 2 -- setup.cfg | 2 +- 5 files changed, 20 insertions(+), 8 deletions(-) delete mode 100644 newsfragments/4043.bugfix.rst delete mode 100644 newsfragments/4045.misc.rst diff --git a/.bumpversion.cfg b/.bumpversion.cfg index ee8f0fb6b4..aa6e23b6d9 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 68.2.0 +current_version = 68.2.1 commit = True tag = True diff --git a/NEWS.rst b/NEWS.rst index 82cd7bf5ff..5b86e1c43f 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -1,3 +1,21 @@ +v68.2.1 +======= + +Bugfixes +-------- + +- Avoid using caching attributes in ``Distribution.metadata`` for requirements. + This is done for backwards compatibility with customizations that attempt to + modify ``install_requires`` or ``extras_require`` at a late point (still not + recommended). (#4043) + + +Misc +---- + +- #4045 + + v68.2.0 ======= diff --git a/newsfragments/4043.bugfix.rst b/newsfragments/4043.bugfix.rst deleted file mode 100644 index e32728fe24..0000000000 --- a/newsfragments/4043.bugfix.rst +++ /dev/null @@ -1,4 +0,0 @@ -Avoid using caching attributes in ``Distribution.metadata`` for requirements. -This is done for backwards compatibility with customizations that attempt to -modify ``install_requires`` or ``extras_require`` at a late point (still not -recommended). diff --git a/newsfragments/4045.misc.rst b/newsfragments/4045.misc.rst deleted file mode 100644 index ce3de929ae..0000000000 --- a/newsfragments/4045.misc.rst +++ /dev/null @@ -1,2 +0,0 @@ -Update test dependency on ``build==1.0.3`` and -add fallback for ``packaging==23.1`` (regarding ``Metadata`` validation). diff --git a/setup.cfg b/setup.cfg index 5e86f65960..7d718377a7 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = setuptools -version = 68.2.0 +version = 68.2.1 author = Python Packaging Authority author_email = distutils-sig@python.org description = Easily download, build, install, upgrade, and uninstall Python packages From 566f2948d2fc452109da4e0cf20ac4e113e07809 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Tue, 12 Sep 2023 13:31:55 +0100 Subject: [PATCH 0059/1761] Test command line parsing of '--version' interoperates with pyproject.toml --- .../tests/config/test_apply_pyprojecttoml.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py index c0c6b13392..294947a00a 100644 --- a/setuptools/tests/config/test_apply_pyprojecttoml.py +++ b/setuptools/tests/config/test_apply_pyprojecttoml.py @@ -423,6 +423,25 @@ def test_example_file_not_in_wheel(self, setuptools_wheel): assert not any(name.endswith(EXAMPLES_FILE) for name in zipfile.namelist()) +class TestInteropCommandLineParsing: + def test_version(self, tmp_path, monkeypatch, capsys): + # See pypa/setuptools#4047 + # This test can be removed once the CLI interface of setup.py is removed + monkeypatch.chdir(tmp_path) + toml_config = """ + [project] + name = "test" + version = "42.0" + """ + pyproject = Path(tmp_path, "pyproject.toml") + pyproject.write_text(cleandoc(toml_config), encoding="utf-8") + opts = {"script_args": ["--version"]} + dist = pyprojecttoml.apply_configuration(Distribution(opts), pyproject) + dist.parse_command_line() # <-- there should be no exception here. + captured = capsys.readouterr() + assert "42.0" in captured.out + + # --- Auxiliary Functions --- From 7b7971e6b08c2a9586e4699312371bb5ae19df9b Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Tue, 12 Sep 2023 13:41:29 +0100 Subject: [PATCH 0060/1761] Set requirements attributes directly into dist when parsing pyproject.toml --- setuptools/config/_apply_pyprojecttoml.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/setuptools/config/_apply_pyprojecttoml.py b/setuptools/config/_apply_pyprojecttoml.py index b6443308bf..4489d22437 100644 --- a/setuptools/config/_apply_pyprojecttoml.py +++ b/setuptools/config/_apply_pyprojecttoml.py @@ -125,7 +125,7 @@ def _set_config(dist: "Distribution", field: str, value: Any): setter(value) elif hasattr(dist.metadata, field) or field in SETUPTOOLS_PATCHES: setattr(dist.metadata, field, value) - if hasattr(dist, field): + else: setattr(dist, field, value) @@ -212,12 +212,12 @@ def _dependencies(dist: "Distribution", val: list, _root_dir): if getattr(dist, "install_requires", []): msg = "`install_requires` overwritten in `pyproject.toml` (dependencies)" SetuptoolsWarning.emit(msg) - _set_config(dist, "install_requires", val) + dist.install_requires = val def _optional_dependencies(dist: "Distribution", val: dict, _root_dir): existing = getattr(dist, "extras_require", None) or {} - _set_config(dist, "extras_require", {**existing, **val}) + dist.extras_require = {**existing, **val} def _unify_entry_points(project_table: dict): From facbb757dbef0641fb12b550f07d1da2edd6ed6a Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Tue, 12 Sep 2023 13:51:53 +0100 Subject: [PATCH 0061/1761] Add news fragment --- newsfragments/4048.bugfix.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 newsfragments/4048.bugfix.rst diff --git a/newsfragments/4048.bugfix.rst b/newsfragments/4048.bugfix.rst new file mode 100644 index 0000000000..cb8407ed5d --- /dev/null +++ b/newsfragments/4048.bugfix.rst @@ -0,0 +1 @@ +Improve backwards compatibility with deprecated CLI practices. From 5f29762b9f4933d8b4dfbfd8a2d470d68c8adf1f Mon Sep 17 00:00:00 2001 From: Ryan Routsong Date: Tue, 12 Sep 2023 09:21:21 -0600 Subject: [PATCH 0062/1761] fix import statement on quickstart guide --- docs/userguide/quickstart.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/userguide/quickstart.rst b/docs/userguide/quickstart.rst index 2afab9e54c..f0426293a4 100644 --- a/docs/userguide/quickstart.rst +++ b/docs/userguide/quickstart.rst @@ -229,7 +229,7 @@ found, as shown in the example below: .. code-block:: python - from setuptools import find_packages # or find_namespace_packages + from setuptools import setup, find_packages # or find_namespace_packages setup( # ... From 2255e6366c70b9813d115ae0a0bba329affbd0ac Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Tue, 12 Sep 2023 17:37:58 +0100 Subject: [PATCH 0063/1761] =?UTF-8?q?Bump=20version:=2068.2.1=20=E2=86=92?= =?UTF-8?q?=2068.2.2?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- NEWS.rst | 9 +++++++++ newsfragments/4048.bugfix.rst | 1 - setup.cfg | 2 +- 4 files changed, 11 insertions(+), 3 deletions(-) delete mode 100644 newsfragments/4048.bugfix.rst diff --git a/.bumpversion.cfg b/.bumpversion.cfg index aa6e23b6d9..9e6872031e 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 68.2.1 +current_version = 68.2.2 commit = True tag = True diff --git a/NEWS.rst b/NEWS.rst index 5b86e1c43f..295b7b8ce6 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -1,3 +1,12 @@ +v68.2.2 +======= + +Bugfixes +-------- + +- Improve backwards compatibility with deprecated CLI practices. (#4048) + + v68.2.1 ======= diff --git a/newsfragments/4048.bugfix.rst b/newsfragments/4048.bugfix.rst deleted file mode 100644 index cb8407ed5d..0000000000 --- a/newsfragments/4048.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Improve backwards compatibility with deprecated CLI practices. diff --git a/setup.cfg b/setup.cfg index 7d718377a7..d6c1f6a07d 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = setuptools -version = 68.2.1 +version = 68.2.2 author = Python Packaging Authority author_email = distutils-sig@python.org description = Easily download, build, install, upgrade, and uninstall Python packages From ca1831c2148fe5ddbffd001de76ff5f6005f812c Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Mon, 18 Sep 2023 11:05:36 -0400 Subject: [PATCH 0064/1761] Prefer ``pass_env`` in tox config. Preferred failure mode for tox-dev/tox#3127 and closes jaraco/skeleton#92. --- tox.ini | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tox.ini b/tox.ini index 1950b4ef99..33da3deb08 100644 --- a/tox.ini +++ b/tox.ini @@ -34,7 +34,7 @@ skip_install = True deps = towncrier jaraco.develop >= 7.23 -passenv = * +pass_env = * commands = python -m jaraco.develop.finalize @@ -46,7 +46,7 @@ deps = build twine>=3 jaraco.develop>=7.1 -passenv = +pass_env = TWINE_PASSWORD GITHUB_TOKEN setenv = From b1d5ae4f1450fd67e4fc8a754103d3dbca3b3a50 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sat, 23 Sep 2023 20:59:07 +0200 Subject: [PATCH 0065/1761] Fix typo found by codespell --- setuptools/tests/test_core_metadata.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setuptools/tests/test_core_metadata.py b/setuptools/tests/test_core_metadata.py index 92717bd8bf..14fa508a88 100644 --- a/setuptools/tests/test_core_metadata.py +++ b/setuptools/tests/test_core_metadata.py @@ -357,7 +357,7 @@ def test_parity_with_metadata_from_pypa_wheel(tmp_path): for line in expected: assert line in pkg_info - # Generate a METADATA file using pypa/wheel for comparisson + # Generate a METADATA file using pypa/wheel for comparison wheel_metadata = importlib.import_module("wheel.metadata") pkginfo_to_metadata = getattr(wheel_metadata, "pkginfo_to_metadata", None) From bae02cf059b44b680e574860b3f61c48a4fe09ec Mon Sep 17 00:00:00 2001 From: Matthias Koeppe Date: Sun, 24 Sep 2023 11:29:36 -0700 Subject: [PATCH 0066/1761] CI: Update ci-sage.yml after move of Sage development to GitHub --- .github/workflows/ci-sage.yml | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/.github/workflows/ci-sage.yml b/.github/workflows/ci-sage.yml index 8cb9e3827f..81ee101528 100644 --- a/.github/workflows/ci-sage.yml +++ b/.github/workflows/ci-sage.yml @@ -58,7 +58,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Check out ${{ env.SPKG }} - uses: actions/checkout@v2 + uses: actions/checkout@v4 with: path: build/pkgs/${{ env.SPKG }}/src - name: Install prerequisites @@ -80,8 +80,7 @@ jobs: linux: # https://github.com/sagemath/sage/blob/develop/.github/workflows/docker.yml - # Use branch of ticket https://trac.sagemath.org/ticket/33288 - uses: sagemath/sagetrac-mirror/.github/workflows/docker.yml@u/mkoeppe/setuptools_ci_target + uses: sagemath/sage/.github/workflows/docker.yml@develop with: # Sage distribution packages to build targets: setuptools pyzmq @@ -89,11 +88,6 @@ jobs: sage_repo: sagemath/sage sage_ref: develop upstream_artifact: upstream - sage_trac_git: https://github.com/sagemath/sagetrac-mirror.git - # Test with the branch from https://trac.sagemath.org/ticket/33288 - # This may provide hotfixes for the CI that have not been merged into - # the sage develop branch yet. - sage_trac_ticket: 33288 # We prefix the image name with the SPKG name ("setuptools-") to avoid the error # 'Package "sage-docker-..." is already associated with another repository.' docker_push_repository: ghcr.io/${{ github.repository }}/setuptools- From a131f83e2967514e2973fb36f2ca64e3ac8efc3c Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Tue, 26 Sep 2023 11:23:08 +0200 Subject: [PATCH 0067/1761] GNU: use -Wl,-rpath, instead of -Wl,-R The latter is supported in binutils for backwards compatibility, but in general `-R` is equivalent to `--just-symbols=` when `path` is a file; only when it's a directory, it's treated as `-rpath=`. Better avoid that ambiguity and use `-rpath`. Also split `-Wl,--enable-new-dtags` and `-Wl,-rpath,...` into two separate arguments, which is more common, and more likely to be parsed correctly by compiler wrappers. This commit does not attempt to add `--enable-new-dtags` to other linkers than binutils ld/gold that support the flag. --- distutils/tests/test_unixccompiler.py | 5 ++++- distutils/unixccompiler.py | 13 +++++++------ 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/distutils/tests/test_unixccompiler.py b/distutils/tests/test_unixccompiler.py index a018442459..23b4eb5a4c 100644 --- a/distutils/tests/test_unixccompiler.py +++ b/distutils/tests/test_unixccompiler.py @@ -186,7 +186,10 @@ def gcv(v): return 'yes' sysconfig.get_config_var = gcv - assert self.cc.rpath_foo() == '-Wl,--enable-new-dtags,-R/foo' + assert self.cc.rpath_foo() == [ + '-Wl,--enable-new-dtags', + '-Wl,-rpath,/foo' + ] # non-GCC GNULD sys.platform = 'bar' diff --git a/distutils/unixccompiler.py b/distutils/unixccompiler.py index 6ca2332ae1..d5c245969d 100644 --- a/distutils/unixccompiler.py +++ b/distutils/unixccompiler.py @@ -311,13 +311,14 @@ def runtime_library_dir_option(self, dir): "-L" + dir, ] - # For all compilers, `-Wl` is the presumed way to - # pass a compiler option to the linker and `-R` is - # the way to pass an RPATH. + # For all compilers, `-Wl` is the presumed way to pass a + # compiler option to the linker if sysconfig.get_config_var("GNULD") == "yes": - # GNU ld needs an extra option to get a RUNPATH - # instead of just an RPATH. - return "-Wl,--enable-new-dtags,-R" + dir + return [ + # Force RUNPATH instead of RPATH + "-Wl,--enable-new-dtags", + "-Wl,-rpath," + dir + ] else: return "-Wl,-R" + dir From ac212a7d329167be4293e2cb4e3ac225ce33581c Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Thu, 28 Sep 2023 17:44:34 +0100 Subject: [PATCH 0068/1761] Temporarily disable overdue deprecations --- tox.ini | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 166f3e3909..ee6edac799 100644 --- a/tox.ini +++ b/tox.ini @@ -5,7 +5,8 @@ deps = # ^-- use dev version while we wait for the new release setenv = PYTHONWARNDEFAULTENCODING = 1 - SETUPTOOLS_ENFORCE_DEPRECATION = 1 + SETUPTOOLS_ENFORCE_DEPRECATION = 0 + # ^-- Temporarily disable, until overdue deprecations are handled commands = pytest {posargs} usedevelop = True From b5c9f374bab7855d8d450925bcad4207ceada864 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Thu, 28 Sep 2023 14:20:07 +0100 Subject: [PATCH 0069/1761] Remove sphinx-hoverxref This change attempts to address an usability problem in the docs, that arise from the use of `sphinx-hoverxref`. The approach used is to simply remove it, since it is not fundamental for the website. Closes #4064. --- docs/conf.py | 13 ------------- setup.cfg | 1 - 2 files changed, 14 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index bd4ffdbadb..fdd2e67130 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -114,19 +114,6 @@ } ) -# Support tooltips on references -extensions += ['hoverxref.extension'] -hoverxref_auto_ref = True -hoverxref_intersphinx = [ - 'python', - 'pip', - 'build', - 'PyPUG', - 'packaging', - 'twine', - 'importlib-resources', -] - # Add support for linking usernames github_url = 'https://github.com' github_repo_org = 'pypa' diff --git a/setup.cfg b/setup.cfg index d6c1f6a07d..04050dce0c 100644 --- a/setup.cfg +++ b/setup.cfg @@ -109,7 +109,6 @@ docs = sphinx-reredirects sphinxcontrib-towncrier sphinx-notfound-page >=1,<2 - sphinx-hoverxref < 2 ssl = From 7a34047999ffcc40fd791add66b3b571314da413 Mon Sep 17 00:00:00 2001 From: Avasam Date: Sun, 1 Oct 2023 15:12:51 -0400 Subject: [PATCH 0070/1761] Re-export `distutils.dep_util` in `setuptools.dep_util` --- docs/deprecated/distutils-legacy.rst | 2 ++ setuptools/dep_util.py | 6 +++++- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/docs/deprecated/distutils-legacy.rst b/docs/deprecated/distutils-legacy.rst index 8112f12eae..85bda3d88a 100644 --- a/docs/deprecated/distutils-legacy.rst +++ b/docs/deprecated/distutils-legacy.rst @@ -28,6 +28,8 @@ As Distutils is deprecated, any usage of functions or objects from distutils is ``distutils.command.{build_clib,build_ext,build_py,sdist}`` → ``setuptools.command.*`` +``distutils.dep_util`` → ``setuptools.dep_util`` + ``distutils.log`` → :mod:`logging` (standard library) ``distutils.version.*`` → :doc:`packaging.version.* ` diff --git a/setuptools/dep_util.py b/setuptools/dep_util.py index dc9ccf62c2..79597c75f0 100644 --- a/setuptools/dep_util.py +++ b/setuptools/dep_util.py @@ -1,4 +1,8 @@ -from distutils.dep_util import newer_group +from distutils.dep_util import ( + newer as newer, + newer_pairwise as newer_pairwise, + newer_group as newer_group, +) # yes, this is was almost entirely copy-pasted from From 9a8a0b271260ec06a76b57e22f1da0d088310390 Mon Sep 17 00:00:00 2001 From: Avasam Date: Sun, 1 Oct 2023 15:33:11 -0400 Subject: [PATCH 0071/1761] Added towncrier file --- newsfragments/4069.feature.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 newsfragments/4069.feature.rst diff --git a/newsfragments/4069.feature.rst b/newsfragments/4069.feature.rst new file mode 100644 index 0000000000..786861e5c6 --- /dev/null +++ b/newsfragments/4069.feature.rst @@ -0,0 +1 @@ +Exported `distutils.dep_util` through `setuptools.dep_util` -- by :user:`github-username` From 3a3413ec3defb035b503f87a5139bd3775f1ffd7 Mon Sep 17 00:00:00 2001 From: Avasam Date: Sun, 1 Oct 2023 15:33:11 -0400 Subject: [PATCH 0072/1761] Added towncrier file --- newsfragments/4069.feature.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 newsfragments/4069.feature.rst diff --git a/newsfragments/4069.feature.rst b/newsfragments/4069.feature.rst new file mode 100644 index 0000000000..831e2c0df4 --- /dev/null +++ b/newsfragments/4069.feature.rst @@ -0,0 +1 @@ +Exported `distutils.dep_util` through `setuptools.dep_util` -- by :user:`Avasam` From e76f55b8eaf896e1868cb00b41940151a3d829b4 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Thu, 12 Oct 2023 11:55:19 +0100 Subject: [PATCH 0073/1761] Unify passenv with pass_env in tox.ini so we don't have trouble with TOX_OVERRIDE --- tox.ini | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tox.ini b/tox.ini index 6020647e18..b7f50ea89f 100644 --- a/tox.ini +++ b/tox.ini @@ -13,7 +13,7 @@ commands = usedevelop = True extras = testing -passenv = +pass_env = SETUPTOOLS_USE_DISTUTILS PRE_BUILT_SETUPTOOLS_WHEEL PRE_BUILT_SETUPTOOLS_SDIST @@ -25,8 +25,8 @@ passenv = [testenv:integration] deps = {[testenv]deps} extras = testing-integration -passenv = - {[testenv]passenv} +pass_env = + {[testenv]pass_env} DOWNLOAD_PATH setenv = PROJECT_ROOT = {toxinidir} From ae7e38472249bfc40e9c76381b39e877d5574414 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Thu, 12 Oct 2023 18:25:51 +0100 Subject: [PATCH 0074/1761] Docs: use Python 3.11 inventory to avoid errors with distutils references --- docs/conf.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/docs/conf.py b/docs/conf.py index fdd2e67130..f305029833 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -200,7 +200,12 @@ # Allow linking objects on other Sphinx sites seamlessly: intersphinx_mapping.update( - python=('https://docs.python.org/3', None), + # python=('https://docs.python.org/3', None), + python=('https://docs.python.org/3.11/', None), + # ^-- Python 3.11 is required because it still contains `distutils`. + # Just leaving it as `3` would imply 3.12+, but that causes an + # error with the cross references to disutils functions. + # Inventory cache may cause errors, deleting it solves the problem. ) # Add support for the unreleased "next-version" change notes From b9a3255ac5fe2a3a37829d24685f14fcc90889ef Mon Sep 17 00:00:00 2001 From: Daniel Naylor Date: Thu, 17 Aug 2023 18:57:51 +0100 Subject: [PATCH 0075/1761] Include type information by default --- setuptools/command/build_py.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/setuptools/command/build_py.py b/setuptools/command/build_py.py index 5709eb6d8c..de9762da78 100644 --- a/setuptools/command/build_py.py +++ b/setuptools/command/build_py.py @@ -302,6 +302,19 @@ def _get_platform_patterns(spec, package, src_dir): for pattern in raw_patterns ) + def get_source_files(self): + py_files = [module[-1] for module in self.find_all_modules()] + + possible_stub_files = set(os.path.splitext(f)[0] + ".pyi" for f in py_files) + stub_files = [f for f in possible_stub_files if os.path.isfile(f)] + + possible_py_typed_files = set( + os.path.join(os.path.dirname(f), "py.typed") for f in py_files + ) + py_typed_files = [f for f in possible_py_typed_files if os.path.isfile(f)] + + return py_files + stub_files + py_typed_files + def assert_relative(path): if not os.path.isabs(path): From 88556f9f07012fe9c434f54d3e3e50d2db755ce2 Mon Sep 17 00:00:00 2001 From: Daniel Naylor Date: Thu, 17 Aug 2023 19:10:35 +0100 Subject: [PATCH 0076/1761] Add newsfragment --- newsfragments/3136.feat.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 newsfragments/3136.feat.rst diff --git a/newsfragments/3136.feat.rst b/newsfragments/3136.feat.rst new file mode 100644 index 0000000000..d87124cc0f --- /dev/null +++ b/newsfragments/3136.feat.rst @@ -0,0 +1 @@ +Include type information (py.typed, *.pyi) by default (#3136) -- by :user:`Danie-1` From 9e13c09cc59f4ec6c9281365354637257c2100d5 Mon Sep 17 00:00:00 2001 From: Danie-1 <63882624+Danie-1@users.noreply.github.com> Date: Fri, 18 Aug 2023 11:37:42 +0100 Subject: [PATCH 0077/1761] Use generators See https://github.com/pypa/setuptools/pull/4021#discussion_r1298221625 --- setuptools/command/build_py.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/setuptools/command/build_py.py b/setuptools/command/build_py.py index de9762da78..2c05790568 100644 --- a/setuptools/command/build_py.py +++ b/setuptools/command/build_py.py @@ -305,15 +305,13 @@ def _get_platform_patterns(spec, package, src_dir): def get_source_files(self): py_files = [module[-1] for module in self.find_all_modules()] - possible_stub_files = set(os.path.splitext(f)[0] + ".pyi" for f in py_files) + possible_stub_files = (os.path.splitext(f)[0] + ".pyi" for f in py_files) stub_files = [f for f in possible_stub_files if os.path.isfile(f)] - possible_py_typed_files = set( - os.path.join(os.path.dirname(f), "py.typed") for f in py_files - ) - py_typed_files = [f for f in possible_py_typed_files if os.path.isfile(f)] + possible_py_typed_files = (os.path.join(os.path.dirname(f), "py.typed") for f in py_files) + py_typed_files = set(f for f in possible_py_typed_files if os.path.isfile(f)) - return py_files + stub_files + py_typed_files + return py_files + stub_files + list(py_typed_files) def assert_relative(path): From 909a2bcb6d7b2f0c305b3ce22831579e7cacaea4 Mon Sep 17 00:00:00 2001 From: Daniel Naylor Date: Sun, 3 Sep 2023 11:10:27 +0100 Subject: [PATCH 0078/1761] Revert "Use generators" This reverts commit fa06b37e9621e00827e2febf8452078ce0aa0345. --- setuptools/command/build_py.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/setuptools/command/build_py.py b/setuptools/command/build_py.py index 2c05790568..de9762da78 100644 --- a/setuptools/command/build_py.py +++ b/setuptools/command/build_py.py @@ -305,13 +305,15 @@ def _get_platform_patterns(spec, package, src_dir): def get_source_files(self): py_files = [module[-1] for module in self.find_all_modules()] - possible_stub_files = (os.path.splitext(f)[0] + ".pyi" for f in py_files) + possible_stub_files = set(os.path.splitext(f)[0] + ".pyi" for f in py_files) stub_files = [f for f in possible_stub_files if os.path.isfile(f)] - possible_py_typed_files = (os.path.join(os.path.dirname(f), "py.typed") for f in py_files) - py_typed_files = set(f for f in possible_py_typed_files if os.path.isfile(f)) + possible_py_typed_files = set( + os.path.join(os.path.dirname(f), "py.typed") for f in py_files + ) + py_typed_files = [f for f in possible_py_typed_files if os.path.isfile(f)] - return py_files + stub_files + list(py_typed_files) + return py_files + stub_files + py_typed_files def assert_relative(path): From c272dcf618858d0ba40539751eb0c18f54ead218 Mon Sep 17 00:00:00 2001 From: Daniel Naylor Date: Sun, 3 Sep 2023 11:11:02 +0100 Subject: [PATCH 0079/1761] Revert "Include type information by default" This reverts commit 2684ba24ce2eaf9d9ab5543b32fd9b33acf911d2. --- setuptools/command/build_py.py | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/setuptools/command/build_py.py b/setuptools/command/build_py.py index de9762da78..5709eb6d8c 100644 --- a/setuptools/command/build_py.py +++ b/setuptools/command/build_py.py @@ -302,19 +302,6 @@ def _get_platform_patterns(spec, package, src_dir): for pattern in raw_patterns ) - def get_source_files(self): - py_files = [module[-1] for module in self.find_all_modules()] - - possible_stub_files = set(os.path.splitext(f)[0] + ".pyi" for f in py_files) - stub_files = [f for f in possible_stub_files if os.path.isfile(f)] - - possible_py_typed_files = set( - os.path.join(os.path.dirname(f), "py.typed") for f in py_files - ) - py_typed_files = [f for f in possible_py_typed_files if os.path.isfile(f)] - - return py_files + stub_files + py_typed_files - def assert_relative(path): if not os.path.isabs(path): From 66a36ae55f848ba8d6040b153ef6fe761ccb0f6b Mon Sep 17 00:00:00 2001 From: Daniel Naylor Date: Sun, 3 Sep 2023 11:41:44 +0100 Subject: [PATCH 0080/1761] Rename newsfragment file --- newsfragments/{3136.feat.rst => 3136.feature.rst} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename newsfragments/{3136.feat.rst => 3136.feature.rst} (100%) diff --git a/newsfragments/3136.feat.rst b/newsfragments/3136.feature.rst similarity index 100% rename from newsfragments/3136.feat.rst rename to newsfragments/3136.feature.rst From e59ce9402ce59d3c5b2295db676bc8d049b2d884 Mon Sep 17 00:00:00 2001 From: Daniel Naylor Date: Mon, 4 Sep 2023 18:16:27 +0100 Subject: [PATCH 0081/1761] Add tests to check type information is included by default --- setuptools/tests/test_build_meta.py | 91 +++++++++++++++++++++++++++++ 1 file changed, 91 insertions(+) diff --git a/setuptools/tests/test_build_meta.py b/setuptools/tests/test_build_meta.py index fd7cf168ce..cf6f3b62b5 100644 --- a/setuptools/tests/test_build_meta.py +++ b/setuptools/tests/test_build_meta.py @@ -4,6 +4,7 @@ import signal import tarfile import importlib +import itertools import contextlib from concurrent import futures import re @@ -981,3 +982,93 @@ def test_system_exit_in_setuppy(monkeypatch, tmp_path): with pytest.raises(SystemExit, match="some error"): backend = BuildBackend(backend_name="setuptools.build_meta") backend.get_requires_for_build_wheel() + + +class TestTypeInformationIncludedByDefault(): + dont_include_package_data = """ + [project] + name = "foo" + version = "1" + [tools.setuptools] + include-package-data = false + """ + + exclude_type_info = """ + [tool.setuptools.exclude-package-data] + "*" = ["py.typed", "*.pyi"] + """ + + package_1 = { + "foo": { + "bar.pyi": "", + "py.typed": "", + } + } + + package_2 = { + "foo": { + "bar": { + "py.typed": "", + "mod.pyi": "", + } + } + } + + package_3 = { + "foo": { + "namespace": { + "foo.pyi": "", + }, + "__init__.pyi": "", + "py.typed": "" + } + } + + packages_to_test = [package_1, package_2, package_3] + + def is_type_information_file(self, filename): + basename = os.path.basename(filename) + return basename.endswith(".pyi") or basename == "py.typed" + + def get_type_files(self, file_spec): + output = set() + for key in file_spec.keys(): + if isinstance(file_spec[key], str): + if self.is_type_information_file(key): + output.add(key) + else: + output.update(key + "/" + f for f in self.get_type_files(file_spec[key])) + return output + + @pytest.fixture(params=itertools.product(packages_to_test, [True, False])) + def file_spec_and_expected(self, request): + file_spec, exclude_type_information = request.param + pyproject = self.dont_include_package_data + if exclude_type_information: + pyproject += self.exclude_type_info + file_spec["pyproject.toml"] = pyproject + + if exclude_type_information: + expected = set() + else: + expected = self.get_type_files(file_spec) + + yield file_spec, expected + + def test_type_information_always_included(self, monkeypatch, tmp_path, file_spec_and_expected): + """Setuptools should include type information in the wheel (py.typed, *.pyi).""" + file_spec, expected = file_spec_and_expected + monkeypatch.chdir(tmp_path) + dist_dir = os.path.abspath('pip-wheel') + os.makedirs(dist_dir) + path.build(file_spec) + build_backend = BuildBackend(backend_name="setuptools.build_meta") + wheel_name = build_backend.build_wheel(dist_dir) + + wheel_file = os.path.join(dist_dir, wheel_name) + assert os.path.isfile(wheel_file) + + with ZipFile(wheel_file) as zipfile: + wheel_contents = set(filter(self.is_type_information_file, zipfile.namelist())) + + assert wheel_contents == expected From 418eb189032486f742b99d2e8591b2c8cc624f97 Mon Sep 17 00:00:00 2001 From: Daniel Naylor Date: Wed, 6 Sep 2023 22:56:20 +0100 Subject: [PATCH 0082/1761] Format test with black --- setuptools/tests/test_build_meta.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/setuptools/tests/test_build_meta.py b/setuptools/tests/test_build_meta.py index cf6f3b62b5..43f3100a83 100644 --- a/setuptools/tests/test_build_meta.py +++ b/setuptools/tests/test_build_meta.py @@ -984,7 +984,7 @@ def test_system_exit_in_setuppy(monkeypatch, tmp_path): backend.get_requires_for_build_wheel() -class TestTypeInformationIncludedByDefault(): +class TestTypeInformationIncludedByDefault: dont_include_package_data = """ [project] name = "foo" @@ -1020,7 +1020,7 @@ class TestTypeInformationIncludedByDefault(): "foo.pyi": "", }, "__init__.pyi": "", - "py.typed": "" + "py.typed": "", } } @@ -1037,7 +1037,9 @@ def get_type_files(self, file_spec): if self.is_type_information_file(key): output.add(key) else: - output.update(key + "/" + f for f in self.get_type_files(file_spec[key])) + output.update( + key + "/" + f for f in self.get_type_files(file_spec[key]) + ) return output @pytest.fixture(params=itertools.product(packages_to_test, [True, False])) @@ -1055,7 +1057,9 @@ def file_spec_and_expected(self, request): yield file_spec, expected - def test_type_information_always_included(self, monkeypatch, tmp_path, file_spec_and_expected): + def test_type_information_always_included( + self, monkeypatch, tmp_path, file_spec_and_expected + ): """Setuptools should include type information in the wheel (py.typed, *.pyi).""" file_spec, expected = file_spec_and_expected monkeypatch.chdir(tmp_path) @@ -1069,6 +1073,8 @@ def test_type_information_always_included(self, monkeypatch, tmp_path, file_spec assert os.path.isfile(wheel_file) with ZipFile(wheel_file) as zipfile: - wheel_contents = set(filter(self.is_type_information_file, zipfile.namelist())) + wheel_contents = set( + filter(self.is_type_information_file, zipfile.namelist()) + ) assert wheel_contents == expected From df6b3b676a6ce7b4edf6e12a338ced358843ab7d Mon Sep 17 00:00:00 2001 From: Daniel Naylor Date: Wed, 6 Sep 2023 22:59:42 +0100 Subject: [PATCH 0083/1761] Include type information by default --- setuptools/command/build_py.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/setuptools/command/build_py.py b/setuptools/command/build_py.py index 5709eb6d8c..8a201a270c 100644 --- a/setuptools/command/build_py.py +++ b/setuptools/command/build_py.py @@ -116,6 +116,7 @@ def find_data_files(self, package, src_dir): self.package_data, package, src_dir, + extra_patterns=['*.pyi', 'py.typed'], ) globs_expanded = map(partial(glob, recursive=True), patterns) # flatten the expanded globs into an iterable of matches @@ -285,7 +286,7 @@ def exclude_data_files(self, package, src_dir, files): return list(unique_everseen(keepers)) @staticmethod - def _get_platform_patterns(spec, package, src_dir): + def _get_platform_patterns(spec, package, src_dir, extra_patterns=[]): """ yield platform-specific path patterns (suitable for glob or fn_match) from a glob-based spec (such as @@ -293,6 +294,7 @@ def _get_platform_patterns(spec, package, src_dir): matching package in src_dir. """ raw_patterns = itertools.chain( + extra_patterns, spec.get('', []), spec.get(package, []), ) From 35b9fa0d8ccd6394b1f2e2a5be51e2a068768839 Mon Sep 17 00:00:00 2001 From: Daniel Naylor Date: Sun, 17 Sep 2023 16:05:42 +0100 Subject: [PATCH 0084/1761] Move tests from test_build_meta to test_build_py --- setuptools/command/build_py.py | 5 +- setuptools/tests/test_build_meta.py | 103 ++----------------- setuptools/tests/test_build_py.py | 152 ++++++++++++++++++++++++++++ 3 files changed, 162 insertions(+), 98 deletions(-) diff --git a/setuptools/command/build_py.py b/setuptools/command/build_py.py index 8a201a270c..242d60f011 100644 --- a/setuptools/command/build_py.py +++ b/setuptools/command/build_py.py @@ -16,6 +16,9 @@ from ..warnings import SetuptoolsDeprecationWarning +_IMPLICIT_DATA_FILES = ('*.pyi', 'py.typed') + + def make_writable(target): os.chmod(target, os.stat(target).st_mode | stat.S_IWRITE) @@ -116,7 +119,7 @@ def find_data_files(self, package, src_dir): self.package_data, package, src_dir, - extra_patterns=['*.pyi', 'py.typed'], + extra_patterns=_IMPLICIT_DATA_FILES, ) globs_expanded = map(partial(glob, recursive=True), patterns) # flatten the expanded globs into an iterable of matches diff --git a/setuptools/tests/test_build_meta.py b/setuptools/tests/test_build_meta.py index 43f3100a83..5ce4714393 100644 --- a/setuptools/tests/test_build_meta.py +++ b/setuptools/tests/test_build_meta.py @@ -4,7 +4,6 @@ import signal import tarfile import importlib -import itertools import contextlib from concurrent import futures import re @@ -373,8 +372,10 @@ def test_build_with_pyproject_config(self, tmpdir, setup_script): "src": { "foo": { "__init__.py": "__version__ = '0.1'", + "__init__.pyi": "__version__: str", "cli.py": "def main(): print('hello world')", "data.txt": "def main(): print('hello world')", + "py.typed": "", } }, } @@ -407,8 +408,10 @@ def test_build_with_pyproject_config(self, tmpdir, setup_script): 'foo-0.1/src', 'foo-0.1/src/foo', 'foo-0.1/src/foo/__init__.py', + 'foo-0.1/src/foo/__init__.pyi', 'foo-0.1/src/foo/cli.py', 'foo-0.1/src/foo/data.txt', + 'foo-0.1/src/foo/py.typed', 'foo-0.1/src/foo.egg-info', 'foo-0.1/src/foo.egg-info/PKG-INFO', 'foo-0.1/src/foo.egg-info/SOURCES.txt', @@ -420,8 +423,10 @@ def test_build_with_pyproject_config(self, tmpdir, setup_script): } assert wheel_contents == { "foo/__init__.py", + "foo/__init__.pyi", # include type information by default "foo/cli.py", "foo/data.txt", # include_package_data defaults to True + "foo/py.typed", # include type information by default "foo-0.1.dist-info/LICENSE.txt", "foo-0.1.dist-info/METADATA", "foo-0.1.dist-info/WHEEL", @@ -982,99 +987,3 @@ def test_system_exit_in_setuppy(monkeypatch, tmp_path): with pytest.raises(SystemExit, match="some error"): backend = BuildBackend(backend_name="setuptools.build_meta") backend.get_requires_for_build_wheel() - - -class TestTypeInformationIncludedByDefault: - dont_include_package_data = """ - [project] - name = "foo" - version = "1" - [tools.setuptools] - include-package-data = false - """ - - exclude_type_info = """ - [tool.setuptools.exclude-package-data] - "*" = ["py.typed", "*.pyi"] - """ - - package_1 = { - "foo": { - "bar.pyi": "", - "py.typed": "", - } - } - - package_2 = { - "foo": { - "bar": { - "py.typed": "", - "mod.pyi": "", - } - } - } - - package_3 = { - "foo": { - "namespace": { - "foo.pyi": "", - }, - "__init__.pyi": "", - "py.typed": "", - } - } - - packages_to_test = [package_1, package_2, package_3] - - def is_type_information_file(self, filename): - basename = os.path.basename(filename) - return basename.endswith(".pyi") or basename == "py.typed" - - def get_type_files(self, file_spec): - output = set() - for key in file_spec.keys(): - if isinstance(file_spec[key], str): - if self.is_type_information_file(key): - output.add(key) - else: - output.update( - key + "/" + f for f in self.get_type_files(file_spec[key]) - ) - return output - - @pytest.fixture(params=itertools.product(packages_to_test, [True, False])) - def file_spec_and_expected(self, request): - file_spec, exclude_type_information = request.param - pyproject = self.dont_include_package_data - if exclude_type_information: - pyproject += self.exclude_type_info - file_spec["pyproject.toml"] = pyproject - - if exclude_type_information: - expected = set() - else: - expected = self.get_type_files(file_spec) - - yield file_spec, expected - - def test_type_information_always_included( - self, monkeypatch, tmp_path, file_spec_and_expected - ): - """Setuptools should include type information in the wheel (py.typed, *.pyi).""" - file_spec, expected = file_spec_and_expected - monkeypatch.chdir(tmp_path) - dist_dir = os.path.abspath('pip-wheel') - os.makedirs(dist_dir) - path.build(file_spec) - build_backend = BuildBackend(backend_name="setuptools.build_meta") - wheel_name = build_backend.build_wheel(dist_dir) - - wheel_file = os.path.join(dist_dir, wheel_name) - assert os.path.isfile(wheel_file) - - with ZipFile(wheel_file) as zipfile: - wheel_contents = set( - filter(self.is_type_information_file, zipfile.namelist()) - ) - - assert wheel_contents == expected diff --git a/setuptools/tests/test_build_py.py b/setuptools/tests/test_build_py.py index ca50ce634a..ca10645b0c 100644 --- a/setuptools/tests/test_build_py.py +++ b/setuptools/tests/test_build_py.py @@ -319,3 +319,155 @@ def test_get_outputs(tmpdir_cwd): f"{build_lib}/mypkg/sub2/nested/__init__.py": "other/__init__.py", f"{build_lib}/mypkg/sub2/nested/mod3.py": "other/mod3.py", } + + +PYPROJECTS_FOR_TYPE_INFO_TEST = { + "default_pyproject": DALS( + """ + [project] + name = "foo" + version = "1" + """ + ), + "dont_include_package_data": DALS( + """ + [project] + name = "foo" + version = "1" + + [tools.setuptools] + include-package-data = false + """ + ), + "exclude_type_info": DALS( + """ + [project] + name = "foo" + version = "1" + + [tools.setuptools] + include-package-data = false + + [tool.setuptools.exclude-package-data] + "*" = ["py.typed", "*.pyi"] + """ + ), +} + +EXAMPLES_FOR_TYPE_INFO_TEST = { + "simple_namespace": { + "directory_structure": { + "foo": { + "bar.pyi": "", + "py.typed": "", + "__init__.py": "", + } + }, + "expected_type_files": {"foo/bar.pyi", "foo/py.typed"}, + }, + "nested_inside_namespace": { + "directory_structure": { + "foo": { + "bar": { + "py.typed": "", + "mod.pyi": "", + } + } + }, + "expected_type_files": {"foo/bar/mod.pyi", "foo/bar/py.typed"}, + }, + "namespace_nested_inside_regular": { + "directory_structure": { + "foo": { + "namespace": { + "foo.pyi": "", + }, + "__init__.pyi": "", + "py.typed": "", + } + }, + "expected_type_files": { + "foo/namespace/foo.pyi", + "foo/__init__.pyi", + "foo/py.typed", + }, + }, +} + + +@pytest.mark.parametrize( + "pyproject", ["default_pyproject", "dont_include_package_data"] +) +@pytest.mark.parametrize("example", EXAMPLES_FOR_TYPE_INFO_TEST.keys()) +def test_type_files_included_by_default(tmpdir_cwd, pyproject, example): + structure = EXAMPLES_FOR_TYPE_INFO_TEST[example]["directory_structure"] + expected_type_files = EXAMPLES_FOR_TYPE_INFO_TEST[example]["expected_type_files"] + jaraco.path.build(structure) + pyproject_contents = PYPROJECTS_FOR_TYPE_INFO_TEST[pyproject] + with open("pyproject.toml", "w") as pyproject_file: + pyproject_file.write(pyproject_contents) + + dist = Distribution({"script_name": "%PEP 517%"}) + dist.parse_config_files() + build_py = dist.get_command_obj("build_py") + build_py.finalize_options() + build_py.run() + + build_dir = Path(dist.get_command_obj("build_py").build_lib) + outputs = { + os.path.relpath(x.replace(os.sep, "/"), build_dir) + for x in build_py.get_outputs() + } + assert expected_type_files <= outputs + + +@pytest.mark.parametrize("pyproject", ["exclude_type_info"]) +@pytest.mark.parametrize("example", EXAMPLES_FOR_TYPE_INFO_TEST.keys()) +def test_type_files_can_be_excluded(tmpdir_cwd, pyproject, example): + structure = EXAMPLES_FOR_TYPE_INFO_TEST[example]["directory_structure"] + expected_type_files = EXAMPLES_FOR_TYPE_INFO_TEST[example]["expected_type_files"] + jaraco.path.build(structure) + pyproject_contents = PYPROJECTS_FOR_TYPE_INFO_TEST[pyproject] + with open("pyproject.toml", "w") as pyproject_file: + pyproject_file.write(pyproject_contents) + + dist = Distribution({"script_name": "%PEP 517%"}) + dist.parse_config_files() + build_py = dist.get_command_obj("build_py") + build_py.finalize_options() + build_py.run() + + build_dir = Path(dist.get_command_obj("build_py").build_lib) + outputs = { + os.path.relpath(x.replace(os.sep, "/"), build_dir) + for x in build_py.get_outputs() + } + assert expected_type_files.isdisjoint(outputs) + + +def test_stub_only_package(tmpdir_cwd): + structure = {"foo-stubs": {"__init__.pyi": "", "bar.pyi": ""}} + expected_type_files = {"foo-stubs/__init__.pyi", "foo-stubs/bar.pyi"} + jaraco.path.build(structure) + pyproject_contents = DALS( + """ + [project] + name = "foo-stubs" + version = "1" + """ + ) + with open("pyproject.toml", "w") as pyproject_file: + pyproject_file.write(pyproject_contents) + + dist = Distribution({"script_name": "%PEP 517%"}) + dist.parse_config_files() + build_py = dist.get_command_obj("build_py") + build_py.finalize_options() + build_py.run() + + build_dir = Path(dist.get_command_obj("build_py").build_lib) + outputs = { + os.path.relpath(x.replace(os.sep, "/"), build_dir) + for x in build_py.get_outputs() + } + assert expected_type_files <= outputs From 6857643df8ec201c308eb32f6d1467f7417377a7 Mon Sep 17 00:00:00 2001 From: Daniel Naylor Date: Tue, 19 Sep 2023 20:28:11 +0100 Subject: [PATCH 0085/1761] Fix test on windows by replacing os.sep --- setuptools/tests/test_build_py.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/setuptools/tests/test_build_py.py b/setuptools/tests/test_build_py.py index ca10645b0c..d86beab486 100644 --- a/setuptools/tests/test_build_py.py +++ b/setuptools/tests/test_build_py.py @@ -415,7 +415,7 @@ def test_type_files_included_by_default(tmpdir_cwd, pyproject, example): build_dir = Path(dist.get_command_obj("build_py").build_lib) outputs = { - os.path.relpath(x.replace(os.sep, "/"), build_dir) + os.path.relpath(x, build_dir).replace(os.sep, "/") for x in build_py.get_outputs() } assert expected_type_files <= outputs @@ -439,7 +439,7 @@ def test_type_files_can_be_excluded(tmpdir_cwd, pyproject, example): build_dir = Path(dist.get_command_obj("build_py").build_lib) outputs = { - os.path.relpath(x.replace(os.sep, "/"), build_dir) + os.path.relpath(x, build_dir).replace(os.sep, "/") for x in build_py.get_outputs() } assert expected_type_files.isdisjoint(outputs) @@ -467,7 +467,7 @@ def test_stub_only_package(tmpdir_cwd): build_dir = Path(dist.get_command_obj("build_py").build_lib) outputs = { - os.path.relpath(x.replace(os.sep, "/"), build_dir) + os.path.relpath(x, build_dir).replace(os.sep, "/") for x in build_py.get_outputs() } assert expected_type_files <= outputs From 67c071c409f393982efcc88e1e8134118bb11465 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Fri, 13 Oct 2023 14:53:05 +0100 Subject: [PATCH 0086/1761] Refactor type files tests in test_build_py --- setuptools/tests/test_build_py.py | 250 ++++++++++++++---------------- 1 file changed, 117 insertions(+), 133 deletions(-) diff --git a/setuptools/tests/test_build_py.py b/setuptools/tests/test_build_py.py index d86beab486..c6ddc09dd8 100644 --- a/setuptools/tests/test_build_py.py +++ b/setuptools/tests/test_build_py.py @@ -321,153 +321,137 @@ def test_get_outputs(tmpdir_cwd): } -PYPROJECTS_FOR_TYPE_INFO_TEST = { - "default_pyproject": DALS( - """ - [project] - name = "foo" - version = "1" - """ - ), - "dont_include_package_data": DALS( - """ - [project] - name = "foo" - version = "1" - - [tools.setuptools] - include-package-data = false - """ - ), - "exclude_type_info": DALS( - """ - [project] - name = "foo" - version = "1" - - [tools.setuptools] - include-package-data = false - - [tool.setuptools.exclude-package-data] - "*" = ["py.typed", "*.pyi"] - """ - ), -} +class TestTypeInfoFiles: + PYPROJECTS = { + "default_pyproject": DALS( + """ + [project] + name = "foo" + version = "1" + """ + ), + "dont_include_package_data": DALS( + """ + [project] + name = "foo" + version = "1" + + [tools.setuptools] + include-package-data = false + """ + ), + "exclude_type_info": DALS( + """ + [project] + name = "foo" + version = "1" + + [tools.setuptools] + include-package-data = false + + [tool.setuptools.exclude-package-data] + "*" = ["py.typed", "*.pyi"] + """ + ), + } -EXAMPLES_FOR_TYPE_INFO_TEST = { - "simple_namespace": { - "directory_structure": { - "foo": { - "bar.pyi": "", - "py.typed": "", - "__init__.py": "", - } - }, - "expected_type_files": {"foo/bar.pyi", "foo/py.typed"}, - }, - "nested_inside_namespace": { - "directory_structure": { - "foo": { - "bar": { + EXAMPLES = { + "simple_namespace": { + "directory_structure": { + "foo": { + "bar.pyi": "", "py.typed": "", - "mod.pyi": "", + "__init__.py": "", } - } + }, + "expected_type_files": {"foo/bar.pyi", "foo/py.typed"}, }, - "expected_type_files": {"foo/bar/mod.pyi", "foo/bar/py.typed"}, - }, - "namespace_nested_inside_regular": { - "directory_structure": { - "foo": { - "namespace": { - "foo.pyi": "", - }, - "__init__.pyi": "", - "py.typed": "", - } + "nested_inside_namespace": { + "directory_structure": { + "foo": { + "bar": { + "py.typed": "", + "mod.pyi": "", + } + } + }, + "expected_type_files": {"foo/bar/mod.pyi", "foo/bar/py.typed"}, }, - "expected_type_files": { - "foo/namespace/foo.pyi", - "foo/__init__.pyi", - "foo/py.typed", + "namespace_nested_inside_regular": { + "directory_structure": { + "foo": { + "namespace": { + "foo.pyi": "", + }, + "__init__.pyi": "", + "py.typed": "", + } + }, + "expected_type_files": { + "foo/namespace/foo.pyi", + "foo/__init__.pyi", + "foo/py.typed", + }, }, - }, -} - - -@pytest.mark.parametrize( - "pyproject", ["default_pyproject", "dont_include_package_data"] -) -@pytest.mark.parametrize("example", EXAMPLES_FOR_TYPE_INFO_TEST.keys()) -def test_type_files_included_by_default(tmpdir_cwd, pyproject, example): - structure = EXAMPLES_FOR_TYPE_INFO_TEST[example]["directory_structure"] - expected_type_files = EXAMPLES_FOR_TYPE_INFO_TEST[example]["expected_type_files"] - jaraco.path.build(structure) - pyproject_contents = PYPROJECTS_FOR_TYPE_INFO_TEST[pyproject] - with open("pyproject.toml", "w") as pyproject_file: - pyproject_file.write(pyproject_contents) - - dist = Distribution({"script_name": "%PEP 517%"}) - dist.parse_config_files() - build_py = dist.get_command_obj("build_py") - build_py.finalize_options() - build_py.run() - - build_dir = Path(dist.get_command_obj("build_py").build_lib) - outputs = { - os.path.relpath(x, build_dir).replace(os.sep, "/") - for x in build_py.get_outputs() - } - assert expected_type_files <= outputs - - -@pytest.mark.parametrize("pyproject", ["exclude_type_info"]) -@pytest.mark.parametrize("example", EXAMPLES_FOR_TYPE_INFO_TEST.keys()) -def test_type_files_can_be_excluded(tmpdir_cwd, pyproject, example): - structure = EXAMPLES_FOR_TYPE_INFO_TEST[example]["directory_structure"] - expected_type_files = EXAMPLES_FOR_TYPE_INFO_TEST[example]["expected_type_files"] - jaraco.path.build(structure) - pyproject_contents = PYPROJECTS_FOR_TYPE_INFO_TEST[pyproject] - with open("pyproject.toml", "w") as pyproject_file: - pyproject_file.write(pyproject_contents) - - dist = Distribution({"script_name": "%PEP 517%"}) - dist.parse_config_files() - build_py = dist.get_command_obj("build_py") - build_py.finalize_options() - build_py.run() - - build_dir = Path(dist.get_command_obj("build_py").build_lib) - outputs = { - os.path.relpath(x, build_dir).replace(os.sep, "/") - for x in build_py.get_outputs() } - assert expected_type_files.isdisjoint(outputs) - -def test_stub_only_package(tmpdir_cwd): - structure = {"foo-stubs": {"__init__.pyi": "", "bar.pyi": ""}} - expected_type_files = {"foo-stubs/__init__.pyi", "foo-stubs/bar.pyi"} - jaraco.path.build(structure) - pyproject_contents = DALS( - """ - [project] - name = "foo-stubs" - version = "1" - """ + @pytest.mark.parametrize( + "pyproject", ["default_pyproject", "dont_include_package_data"] ) - with open("pyproject.toml", "w") as pyproject_file: - pyproject_file.write(pyproject_contents) - - dist = Distribution({"script_name": "%PEP 517%"}) + @pytest.mark.parametrize("example", EXAMPLES.keys()) + def test_type_files_included_by_default(self, tmpdir_cwd, pyproject, example): + structure = self.EXAMPLES[example]["directory_structure"] + expected_type_files = self.EXAMPLES[example]["expected_type_files"] + structure["pyproject.toml"] = self.PYPROJECTS[pyproject] + jaraco.path.build(structure) + + build_py = run_build_py() + outputs = get_outputs(build_py) + assert expected_type_files <= outputs + + @pytest.mark.parametrize("pyproject", ["exclude_type_info"]) + @pytest.mark.parametrize("example", EXAMPLES.keys()) + def test_type_files_can_be_excluded(self, tmpdir_cwd, pyproject, example): + structure = self.EXAMPLES[example]["directory_structure"] + expected_type_files = self.EXAMPLES[example]["expected_type_files"] + structure["pyproject.toml"] = self.PYPROJECTS[pyproject] + jaraco.path.build(structure) + + build_py = run_build_py() + outputs = get_outputs(build_py) + assert expected_type_files.isdisjoint(outputs) + + def test_stub_only_package(self, tmpdir_cwd): + structure = { + "pyproject.toml": DALS( + """ + [project] + name = "foo-stubs" + version = "1" + """ + ), + "foo-stubs": {"__init__.pyi": "", "bar.pyi": ""}, + } + expected_type_files = {"foo-stubs/__init__.pyi", "foo-stubs/bar.pyi"} + jaraco.path.build(structure) + + build_py = run_build_py() + outputs = get_outputs(build_py) + assert expected_type_files <= outputs + + +def run_build_py(script_name="%build_meta%"): + dist = Distribution({"script_name": script_name}) dist.parse_config_files() build_py = dist.get_command_obj("build_py") build_py.finalize_options() build_py.run() + return build_py - build_dir = Path(dist.get_command_obj("build_py").build_lib) - outputs = { + +def get_outputs(build_py): + build_dir = Path(build_py.build_lib) + return { os.path.relpath(x, build_dir).replace(os.sep, "/") for x in build_py.get_outputs() } - assert expected_type_files <= outputs From cbd4f51d5a2136a262c8b84ffdf413f57c2ccdd3 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Fri, 13 Oct 2023 15:01:19 +0100 Subject: [PATCH 0087/1761] Avoid running build_py in tests to speed up process --- setuptools/tests/test_build_py.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/setuptools/tests/test_build_py.py b/setuptools/tests/test_build_py.py index c6ddc09dd8..39a044dd54 100644 --- a/setuptools/tests/test_build_py.py +++ b/setuptools/tests/test_build_py.py @@ -405,7 +405,7 @@ def test_type_files_included_by_default(self, tmpdir_cwd, pyproject, example): structure["pyproject.toml"] = self.PYPROJECTS[pyproject] jaraco.path.build(structure) - build_py = run_build_py() + build_py = get_finalized_build_py() outputs = get_outputs(build_py) assert expected_type_files <= outputs @@ -417,7 +417,7 @@ def test_type_files_can_be_excluded(self, tmpdir_cwd, pyproject, example): structure["pyproject.toml"] = self.PYPROJECTS[pyproject] jaraco.path.build(structure) - build_py = run_build_py() + build_py = get_finalized_build_py() outputs = get_outputs(build_py) assert expected_type_files.isdisjoint(outputs) @@ -435,17 +435,16 @@ def test_stub_only_package(self, tmpdir_cwd): expected_type_files = {"foo-stubs/__init__.pyi", "foo-stubs/bar.pyi"} jaraco.path.build(structure) - build_py = run_build_py() + build_py = get_finalized_build_py() outputs = get_outputs(build_py) assert expected_type_files <= outputs -def run_build_py(script_name="%build_meta%"): +def get_finalized_build_py(script_name="%build_py-test%"): dist = Distribution({"script_name": script_name}) dist.parse_config_files() build_py = dist.get_command_obj("build_py") build_py.finalize_options() - build_py.run() return build_py From 111b05f4e63011ebb3a5425eba19c397cc7dcfc5 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Fri, 13 Oct 2023 15:22:22 +0100 Subject: [PATCH 0088/1761] Mark feature as experimental in docs --- docs/userguide/miscellaneous.rst | 13 +++++++++++++ newsfragments/3136.feature.rst | 3 ++- 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/docs/userguide/miscellaneous.rst b/docs/userguide/miscellaneous.rst index 19908e05ad..bba3928aa5 100644 --- a/docs/userguide/miscellaneous.rst +++ b/docs/userguide/miscellaneous.rst @@ -10,6 +10,19 @@ These include all :term:`pure Python modules ` in the headers) listed as part of extensions when creating a :term:`source distribution (or "sdist")`. +.. note:: + .. versionadded:: v68.3.0 + ``setuptools`` will attempt to include type information files + by default in the distribution + (``.pyi`` and ``py.typed``, as specified in :pep:`561`). + + *Please note however that this feature is* **EXPERIMENTAL** *and my change in + the future.* + + If you have ``.pyi`` and ``py.typed`` files in your project, but do not + wish to distribute them, you can opt out by setting + :doc:`exclude-package-data ` to remove them. + However, when building more complex packages (e.g. packages that include non-Python files, or that need to use custom C headers), you might find that not all files present in your project folder are included in package diff --git a/newsfragments/3136.feature.rst b/newsfragments/3136.feature.rst index d87124cc0f..a57a8f4e57 100644 --- a/newsfragments/3136.feature.rst +++ b/newsfragments/3136.feature.rst @@ -1 +1,2 @@ -Include type information (py.typed, *.pyi) by default (#3136) -- by :user:`Danie-1` +Include type information (``py.typed``, ``*.pyi``) by default (#3136) -- by :user:`Danie-1`, +**EXPERIMENTAL**. From 63f82edcb5365a6734c3e1c0e1c95302ca876f73 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Fri, 13 Oct 2023 15:34:28 +0100 Subject: [PATCH 0089/1761] Avoid modifying dict in test_build_py --- setuptools/tests/test_build_py.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/setuptools/tests/test_build_py.py b/setuptools/tests/test_build_py.py index 39a044dd54..500a9ab6f3 100644 --- a/setuptools/tests/test_build_py.py +++ b/setuptools/tests/test_build_py.py @@ -400,9 +400,11 @@ class TestTypeInfoFiles: ) @pytest.mark.parametrize("example", EXAMPLES.keys()) def test_type_files_included_by_default(self, tmpdir_cwd, pyproject, example): - structure = self.EXAMPLES[example]["directory_structure"] + structure = { + **self.EXAMPLES[example]["directory_structure"], + "pyproject.toml": self.PYPROJECTS[pyproject], + } expected_type_files = self.EXAMPLES[example]["expected_type_files"] - structure["pyproject.toml"] = self.PYPROJECTS[pyproject] jaraco.path.build(structure) build_py = get_finalized_build_py() @@ -412,9 +414,11 @@ def test_type_files_included_by_default(self, tmpdir_cwd, pyproject, example): @pytest.mark.parametrize("pyproject", ["exclude_type_info"]) @pytest.mark.parametrize("example", EXAMPLES.keys()) def test_type_files_can_be_excluded(self, tmpdir_cwd, pyproject, example): - structure = self.EXAMPLES[example]["directory_structure"] + structure = { + **self.EXAMPLES[example]["directory_structure"], + "pyproject.toml": self.PYPROJECTS[pyproject], + } expected_type_files = self.EXAMPLES[example]["expected_type_files"] - structure["pyproject.toml"] = self.PYPROJECTS[pyproject] jaraco.path.build(structure) build_py = get_finalized_build_py() From 9e4a18f1eb0a1518635c8bf99e898250ea33992c Mon Sep 17 00:00:00 2001 From: Avasam Date: Fri, 13 Oct 2023 14:04:24 -0400 Subject: [PATCH 0090/1761] Update newsfragments/4069.feature.rst --- newsfragments/4069.feature.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/newsfragments/4069.feature.rst b/newsfragments/4069.feature.rst index 831e2c0df4..d2b79a0c85 100644 --- a/newsfragments/4069.feature.rst +++ b/newsfragments/4069.feature.rst @@ -1 +1 @@ -Exported `distutils.dep_util` through `setuptools.dep_util` -- by :user:`Avasam` +Exported `distutils.dep_util` through ``setuptools.dep_util`` -- by :user:`Avasam` From 42fc47eb5714b8d34cae669a772792b42ca05cb1 Mon Sep 17 00:00:00 2001 From: Daniel Naylor Date: Sat, 14 Oct 2023 22:54:19 +0100 Subject: [PATCH 0091/1761] Fix typo in docs --- docs/userguide/miscellaneous.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/userguide/miscellaneous.rst b/docs/userguide/miscellaneous.rst index bba3928aa5..ea0a58845e 100644 --- a/docs/userguide/miscellaneous.rst +++ b/docs/userguide/miscellaneous.rst @@ -16,7 +16,7 @@ distribution (or "sdist")`. by default in the distribution (``.pyi`` and ``py.typed``, as specified in :pep:`561`). - *Please note however that this feature is* **EXPERIMENTAL** *and my change in + *Please note however that this feature is* **EXPERIMENTAL** *and may change in the future.* If you have ``.pyi`` and ``py.typed`` files in your project, but do not From 29f052da470ac281f15d87d4a40511b5cd1e4834 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miro=20Hron=C4=8Dok?= Date: Thu, 26 Oct 2023 14:55:12 +0200 Subject: [PATCH 0092/1761] Fix setuptools.depends:get_module_constant() on Python 3.13.0a1 Don't hardcode opcode numbers, look them up instead. Fixes https://github.com/pypa/setuptools/issues/4090 --- setuptools/depends.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/setuptools/depends.py b/setuptools/depends.py index 180e820459..42907d9bd4 100644 --- a/setuptools/depends.py +++ b/setuptools/depends.py @@ -142,9 +142,9 @@ def extract_constant(code, symbol, default=-1): name_idx = list(code.co_names).index(symbol) - STORE_NAME = 90 - STORE_GLOBAL = 97 - LOAD_CONST = 100 + STORE_NAME = dis.opmap['STORE_NAME'] + STORE_GLOBAL = dis.opmap['STORE_GLOBAL'] + LOAD_CONST = dis.opmap['LOAD_CONST'] const = default From cc01c94bc14c58184e6722c96f1b983a149b846e Mon Sep 17 00:00:00 2001 From: Avasam Date: Fri, 27 Oct 2023 17:14:18 -0400 Subject: [PATCH 0093/1761] Remove possibly redundant importlib_machinery code --- pkg_resources/__init__.py | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py index 3baa1f3c24..3c1f1875c5 100644 --- a/pkg_resources/__init__.py +++ b/pkg_resources/__init__.py @@ -41,6 +41,7 @@ import ntpath import posixpath import importlib +import importlib.machinery from pkgutil import get_importer try: @@ -68,13 +69,9 @@ from os import open as os_open from os.path import isdir, split -try: - import importlib.machinery as importlib_machinery - # access attribute to force import under delayed import mechanisms. - importlib_machinery.__name__ -except ImportError: - importlib_machinery = None +# access attribute to force import under delayed import mechanisms. +importlib.machinery.__name__ from pkg_resources.extern.jaraco.text import ( yield_lines, @@ -1734,7 +1731,7 @@ def _register(cls): 'SourcelessFileLoader', ) for name in loader_names: - loader_cls = getattr(importlib_machinery, name, type(None)) + loader_cls = getattr(importlib.machinery, name, type(None)) register_loader_type(loader_cls, cls) @@ -2231,7 +2228,7 @@ def resolve_egg_link(path): if hasattr(pkgutil, 'ImpImporter'): register_finder(pkgutil.ImpImporter, find_on_path) -register_finder(importlib_machinery.FileFinder, find_on_path) +register_finder(importlib.machinery.FileFinder, find_on_path) _declare_state('dict', _namespace_handlers={}) _declare_state('dict', _namespace_packages={}) @@ -2398,7 +2395,7 @@ def file_ns_handler(importer, path_item, packageName, module): register_namespace_handler(pkgutil.ImpImporter, file_ns_handler) register_namespace_handler(zipimport.zipimporter, file_ns_handler) -register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler) +register_namespace_handler(importlib.machinery.FileFinder, file_ns_handler) def null_ns_handler(importer, path_item, packageName, module): From 4a7033164d5bd4fe7ee4d96dae1c0cbfb122df9e Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 4 Nov 2023 11:45:37 -0400 Subject: [PATCH 0094/1761] Clean up docstrings and remove crufty comments. Replace integer literals with booleans. --- distutils/dep_util.py | 91 ++++++++++++++++++++----------------------- 1 file changed, 43 insertions(+), 48 deletions(-) diff --git a/distutils/dep_util.py b/distutils/dep_util.py index 48da8641c6..3b3c830c13 100644 --- a/distutils/dep_util.py +++ b/distutils/dep_util.py @@ -1,45 +1,45 @@ -"""distutils.dep_util +"""Timestamp comparison of files and groups of files.""" -Utility functions for simple, timestamp-based dependency of files -and groups of files; also, function based entirely on such -timestamp dependency analysis.""" +import os.path +import stat -import os from .errors import DistutilsFileError def newer(source, target): - """Return true if 'source' exists and is more recently modified than - 'target', or if 'source' exists and 'target' doesn't. Return false if - both exist and 'target' is the same age or younger than 'source'. - Raise DistutilsFileError if 'source' does not exist. + """ + Is source modified more recently than target. + + Returns True if 'source' is modified more recently than + 'target' or if 'target' does not exist. + + Raises DistutilsFileError if 'source' does not exist. """ if not os.path.exists(source): raise DistutilsFileError("file '%s' does not exist" % os.path.abspath(source)) - if not os.path.exists(target): - return 1 - from stat import ST_MTIME + if not os.path.exists(target): + return True - mtime1 = os.stat(source)[ST_MTIME] - mtime2 = os.stat(target)[ST_MTIME] + mtime1 = os.stat(source)[stat.ST_MTIME] + mtime2 = os.stat(target)[stat.ST_MTIME] return mtime1 > mtime2 -# newer () - - def newer_pairwise(sources, targets): - """Walk two filename lists in parallel, testing if each source is newer - than its corresponding target. Return a pair of lists (sources, + """ + Filter filenames where sources are newer than targets. + + Walk two filename lists in parallel, testing if each source is newer + than its corresponding target. Returns a pair of lists (sources, targets) where source is newer than target, according to the semantics of 'newer()'. """ if len(sources) != len(targets): raise ValueError("'sources' and 'targets' must be same length") - # build a pair of lists (sources, targets) where source is newer + # build a pair of lists (sources, targets) where source is newer n_sources = [] n_targets = [] for i in range(len(sources)): @@ -50,33 +50,31 @@ def newer_pairwise(sources, targets): return (n_sources, n_targets) -# newer_pairwise () - - def newer_group(sources, target, missing='error'): - """Return true if 'target' is out-of-date with respect to any file - listed in 'sources'. In other words, if 'target' exists and is newer - than every file in 'sources', return false; otherwise return true. - 'missing' controls what we do when a source file is missing; the - default ("error") is to blow up with an OSError from inside 'stat()'; - if it is "ignore", we silently drop any missing source files; if it is - "newer", any missing source files make us assume that 'target' is - out-of-date (this is handy in "dry-run" mode: it'll make you pretend to - carry out commands that wouldn't work because inputs are missing, but - that doesn't matter because you're not actually going to run the - commands). + """ + Is target out-of-date with respect to any file in sources. + + Return True if 'target' is out-of-date with respect to any file + listed in 'sources'. In other words, if 'target' exists and is newer + than every file in 'sources', return False; otherwise return True. + ``missing`` controls how to handle a missing source file: + + - error (default): allow the ``stat()`` call to fail. + - ignore: silently disregard any missing source files. + - newer: treat missing source files as "target out of date". This + mode is handy in "dry-run" mode: it will pretend to carry out + commands that wouldn't work because inputs are missing, but + that doesn't matter because dry-run won't run the commands. """ # If the target doesn't even exist, then it's definitely out-of-date. if not os.path.exists(target): - return 1 + return True - # Otherwise we have to find out the hard way: if *any* source file + # If *any* source file # is more recent than 'target', then 'target' is out-of-date and - # we can immediately return true. If we fall through to the end - # of the loop, then 'target' is up-to-date and we return false. - from stat import ST_MTIME - - target_mtime = os.stat(target)[ST_MTIME] + # we can immediately return True. If the loop completes, then + # 'target' is up-to-date. + target_mtime = os.stat(target)[stat.ST_MTIME] for source in sources: if not os.path.exists(source): if missing == 'error': # blow up when we stat() the file @@ -84,13 +82,10 @@ def newer_group(sources, target, missing='error'): elif missing == 'ignore': # missing source dropped from continue # target's dependency list elif missing == 'newer': # missing source means target is - return 1 # out-of-date + return True # out-of-date - source_mtime = os.stat(source)[ST_MTIME] + source_mtime = os.stat(source)[stat.ST_MTIME] if source_mtime > target_mtime: - return 1 + return True else: - return 0 - - -# newer_group () + return False From c4e27db944fc8ef08b215e593bbd328ce17bfff5 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 4 Nov 2023 12:35:52 -0400 Subject: [PATCH 0095/1761] "Refactor to newer_group to utilize higher level constructs ("any"), re-use _newer logic, and avoid complexity in branching." --- distutils/dep_util.py | 48 +++++++++++++++++-------------------------- 1 file changed, 19 insertions(+), 29 deletions(-) diff --git a/distutils/dep_util.py b/distutils/dep_util.py index 3b3c830c13..9250e937a1 100644 --- a/distutils/dep_util.py +++ b/distutils/dep_util.py @@ -6,6 +6,16 @@ from .errors import DistutilsFileError +def _newer(source, target): + if not os.path.exists(target): + return True + + mtime1 = os.stat(source)[stat.ST_MTIME] + mtime2 = os.stat(target)[stat.ST_MTIME] + + return mtime1 > mtime2 + + def newer(source, target): """ Is source modified more recently than target. @@ -18,13 +28,7 @@ def newer(source, target): if not os.path.exists(source): raise DistutilsFileError("file '%s' does not exist" % os.path.abspath(source)) - if not os.path.exists(target): - return True - - mtime1 = os.stat(source)[stat.ST_MTIME] - mtime2 = os.stat(target)[stat.ST_MTIME] - - return mtime1 > mtime2 + return _newer(source, target) def newer_pairwise(sources, targets): @@ -66,26 +70,12 @@ def newer_group(sources, target, missing='error'): commands that wouldn't work because inputs are missing, but that doesn't matter because dry-run won't run the commands. """ - # If the target doesn't even exist, then it's definitely out-of-date. - if not os.path.exists(target): - return True - # If *any* source file - # is more recent than 'target', then 'target' is out-of-date and - # we can immediately return True. If the loop completes, then - # 'target' is up-to-date. - target_mtime = os.stat(target)[stat.ST_MTIME] - for source in sources: - if not os.path.exists(source): - if missing == 'error': # blow up when we stat() the file - pass - elif missing == 'ignore': # missing source dropped from - continue # target's dependency list - elif missing == 'newer': # missing source means target is - return True # out-of-date - - source_mtime = os.stat(source)[stat.ST_MTIME] - if source_mtime > target_mtime: - return True - else: - return False + def missing_as_newer(source): + return missing == 'newer' and not os.path.exists(source) + + ignored = os.path.exists if missing == 'ignore' else None + return any( + missing_as_newer(source) or _newer(source, target) + for source in filter(ignored, sources) + ) From d7aa1884989cb8e57382553d4c39b7e2a48b12f8 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 4 Nov 2023 12:38:06 -0400 Subject: [PATCH 0096/1761] Prefer os.path.getmtime --- distutils/dep_util.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/distutils/dep_util.py b/distutils/dep_util.py index 9250e937a1..f4f006c728 100644 --- a/distutils/dep_util.py +++ b/distutils/dep_util.py @@ -1,7 +1,6 @@ """Timestamp comparison of files and groups of files.""" import os.path -import stat from .errors import DistutilsFileError @@ -10,10 +9,7 @@ def _newer(source, target): if not os.path.exists(target): return True - mtime1 = os.stat(source)[stat.ST_MTIME] - mtime2 = os.stat(target)[stat.ST_MTIME] - - return mtime1 > mtime2 + return os.path.getmtime(source) > os.path.getmtime(target) def newer(source, target): From dfc8e609c9ca359d2c73815af511c2f286d3a92c Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 4 Nov 2023 12:38:57 -0400 Subject: [PATCH 0097/1761] Inline check for target presence. --- distutils/dep_util.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/distutils/dep_util.py b/distutils/dep_util.py index f4f006c728..eec76c3c1e 100644 --- a/distutils/dep_util.py +++ b/distutils/dep_util.py @@ -6,10 +6,9 @@ def _newer(source, target): - if not os.path.exists(target): - return True - - return os.path.getmtime(source) > os.path.getmtime(target) + return not os.path.exists(target) or ( + os.path.getmtime(source) > os.path.getmtime(target) + ) def newer(source, target): From bdffb48680406e6a8033f35cc68b061f7765d2be Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 4 Nov 2023 12:59:15 -0400 Subject: [PATCH 0098/1761] Add test for newer_pairwise, bringing coverage in dep_util to 100%. --- distutils/tests/test_dep_util.py | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/distutils/tests/test_dep_util.py b/distutils/tests/test_dep_util.py index e5dcad9464..759772d2b9 100644 --- a/distutils/tests/test_dep_util.py +++ b/distutils/tests/test_dep_util.py @@ -27,7 +27,7 @@ def test_newer(self): # than 'new_file'. assert not newer(old_file, new_file) - def test_newer_pairwise(self): + def _setup_1234(self): tmpdir = self.mkdtemp() sources = os.path.join(tmpdir, 'sources') targets = os.path.join(tmpdir, 'targets') @@ -40,9 +40,22 @@ def test_newer_pairwise(self): self.write_file(one) self.write_file(two) self.write_file(four) + return one, two, three, four + + def test_newer_pairwise(self): + one, two, three, four = self._setup_1234() assert newer_pairwise([one, two], [three, four]) == ([one], [three]) + def test_newer_pairwise_mismatch(self): + one, two, three, four = self._setup_1234() + + with pytest.raises(ValueError): + newer_pairwise([one], [three, four]) + + with pytest.raises(ValueError): + newer_pairwise([one, two], [three]) + def test_newer_group(self): tmpdir = self.mkdtemp() sources = os.path.join(tmpdir, 'sources') From 0720b98908e0a6143c4fe260f3b154cf4426c8bc Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 4 Nov 2023 13:07:05 -0400 Subject: [PATCH 0099/1761] Replace for/append loop with a filter function (newer_pair). --- distutils/dep_util.py | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/distutils/dep_util.py b/distutils/dep_util.py index eec76c3c1e..18aeae462f 100644 --- a/distutils/dep_util.py +++ b/distutils/dep_util.py @@ -38,15 +38,11 @@ def newer_pairwise(sources, targets): if len(sources) != len(targets): raise ValueError("'sources' and 'targets' must be same length") - # build a pair of lists (sources, targets) where source is newer - n_sources = [] - n_targets = [] - for i in range(len(sources)): - if newer(sources[i], targets[i]): - n_sources.append(sources[i]) - n_targets.append(targets[i]) - - return (n_sources, n_targets) + def newer_pair(pair): + return newer(*pair) + + newer_pairs = filter(newer_pair, zip(sources, targets)) + return tuple(map(list, zip(*newer_pairs))) def newer_group(sources, target, missing='error'): From 131eff757c51fa8781404a8f1d46c358804a0ce7 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 4 Nov 2023 13:14:24 -0400 Subject: [PATCH 0100/1761] Replace explicit list check with zip(strict=True). Allows inputs to be iterables. --- distutils/dep_util.py | 7 +++---- distutils/py39compat.py | 46 ++++++++++++++++++++++++++++++++++++++++- 2 files changed, 48 insertions(+), 5 deletions(-) diff --git a/distutils/dep_util.py b/distutils/dep_util.py index 18aeae462f..d8538b5001 100644 --- a/distutils/dep_util.py +++ b/distutils/dep_util.py @@ -3,6 +3,7 @@ import os.path from .errors import DistutilsFileError +from .py39compat import zip_strict def _newer(source, target): @@ -30,18 +31,16 @@ def newer_pairwise(sources, targets): """ Filter filenames where sources are newer than targets. - Walk two filename lists in parallel, testing if each source is newer + Walk two filename iterables in parallel, testing if each source is newer than its corresponding target. Returns a pair of lists (sources, targets) where source is newer than target, according to the semantics of 'newer()'. """ - if len(sources) != len(targets): - raise ValueError("'sources' and 'targets' must be same length") def newer_pair(pair): return newer(*pair) - newer_pairs = filter(newer_pair, zip(sources, targets)) + newer_pairs = filter(newer_pair, zip_strict(sources, targets)) return tuple(map(list, zip(*newer_pairs))) diff --git a/distutils/py39compat.py b/distutils/py39compat.py index c43e5f10fd..1b436d7658 100644 --- a/distutils/py39compat.py +++ b/distutils/py39compat.py @@ -1,5 +1,7 @@ -import sys +import functools +import itertools import platform +import sys def add_ext_suffix_39(vars): @@ -20,3 +22,45 @@ def add_ext_suffix_39(vars): needs_ext_suffix = sys.version_info < (3, 10) and platform.system() == 'Windows' add_ext_suffix = add_ext_suffix_39 if needs_ext_suffix else lambda vars: None + + +# from more_itertools +class UnequalIterablesError(ValueError): + def __init__(self, details=None): + msg = 'Iterables have different lengths' + if details is not None: + msg += (': index 0 has length {}; index {} has length {}').format(*details) + + super().__init__(msg) + + +# from more_itertools +def _zip_equal_generator(iterables): + _marker = object() + for combo in itertools.zip_longest(*iterables, fillvalue=_marker): + for val in combo: + if val is _marker: + raise UnequalIterablesError() + yield combo + + +# from more_itertools +def _zip_equal(*iterables): + # Check whether the iterables are all the same size. + try: + first_size = len(iterables[0]) + for i, it in enumerate(iterables[1:], 1): + size = len(it) + if size != first_size: + raise UnequalIterablesError(details=(first_size, i, size)) + # All sizes are equal, we can use the built-in zip. + return zip(*iterables) + # If any one of the iterables didn't have a length, start reading + # them until one runs out. + except TypeError: + return _zip_equal_generator(iterables) + + +zip_strict = ( + _zip_equal if sys.version_info < (3, 10) else functools.partial(zip, strict=True) +) From 4d82dc4a053c7e8b7a5720b5a4db7da2ca2ea912 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 4 Nov 2023 13:24:00 -0400 Subject: [PATCH 0101/1761] Extract a 'starfilter', similar to itertools.starmap, to generalize the concept of filtering results over a sequence of tuples. --- distutils/dep_util.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/distutils/dep_util.py b/distutils/dep_util.py index d8538b5001..c1ae3297e8 100644 --- a/distutils/dep_util.py +++ b/distutils/dep_util.py @@ -27,6 +27,13 @@ def newer(source, target): return _newer(source, target) +def _starfilter(pred, iterables): + """ + Like itertools.starmap but for filter. + """ + return filter(lambda x: pred(*x), iterables) + + def newer_pairwise(sources, targets): """ Filter filenames where sources are newer than targets. @@ -36,11 +43,7 @@ def newer_pairwise(sources, targets): targets) where source is newer than target, according to the semantics of 'newer()'. """ - - def newer_pair(pair): - return newer(*pair) - - newer_pairs = filter(newer_pair, zip_strict(sources, targets)) + newer_pairs = _starfilter(newer, zip_strict(sources, targets)) return tuple(map(list, zip(*newer_pairs))) From 5deb5ac17329a44b720c55b9f006858607cfbb3f Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 4 Nov 2023 21:47:23 -0400 Subject: [PATCH 0102/1761] Replace '_starfilter' with 'jaraco.functools.splat'. --- distutils/_functools.py | 53 +++++++++++++++++++++++++++++++++++++++++ distutils/dep_util.py | 10 ++------ 2 files changed, 55 insertions(+), 8 deletions(-) diff --git a/distutils/_functools.py b/distutils/_functools.py index e7053bac12..e03365eafa 100644 --- a/distutils/_functools.py +++ b/distutils/_functools.py @@ -1,3 +1,4 @@ +import collections.abc import functools @@ -18,3 +19,55 @@ def wrapper(param, *args, **kwargs): return func(param, *args, **kwargs) return wrapper + + +# from jaraco.functools 4.0 +@functools.singledispatch +def _splat_inner(args, func): + """Splat args to func.""" + return func(*args) + + +@_splat_inner.register +def _(args: collections.abc.Mapping, func): + """Splat kargs to func as kwargs.""" + return func(**args) + + +def splat(func): + """ + Wrap func to expect its parameters to be passed positionally in a tuple. + + Has a similar effect to that of ``itertools.starmap`` over + simple ``map``. + + >>> import itertools, operator + >>> pairs = [(-1, 1), (0, 2)] + >>> _ = tuple(itertools.starmap(print, pairs)) + -1 1 + 0 2 + >>> _ = tuple(map(splat(print), pairs)) + -1 1 + 0 2 + + The approach generalizes to other iterators that don't have a "star" + equivalent, such as a "starfilter". + + >>> list(filter(splat(operator.add), pairs)) + [(0, 2)] + + Splat also accepts a mapping argument. + + >>> def is_nice(msg, code): + ... return "smile" in msg or code == 0 + >>> msgs = [ + ... dict(msg='smile!', code=20), + ... dict(msg='error :(', code=1), + ... dict(msg='unknown', code=0), + ... ] + >>> for msg in filter(splat(is_nice), msgs): + ... print(msg) + {'msg': 'smile!', 'code': 20} + {'msg': 'unknown', 'code': 0} + """ + return functools.wraps(func)(functools.partial(_splat_inner, func=func)) diff --git a/distutils/dep_util.py b/distutils/dep_util.py index c1ae3297e8..18a4f2b224 100644 --- a/distutils/dep_util.py +++ b/distutils/dep_util.py @@ -4,6 +4,7 @@ from .errors import DistutilsFileError from .py39compat import zip_strict +from ._functools import splat def _newer(source, target): @@ -27,13 +28,6 @@ def newer(source, target): return _newer(source, target) -def _starfilter(pred, iterables): - """ - Like itertools.starmap but for filter. - """ - return filter(lambda x: pred(*x), iterables) - - def newer_pairwise(sources, targets): """ Filter filenames where sources are newer than targets. @@ -43,7 +37,7 @@ def newer_pairwise(sources, targets): targets) where source is newer than target, according to the semantics of 'newer()'. """ - newer_pairs = _starfilter(newer, zip_strict(sources, targets)) + newer_pairs = filter(splat(newer), zip_strict(sources, targets)) return tuple(map(list, zip(*newer_pairs))) From 94942032878d431cee55adaab12a8bd83549a833 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 5 Nov 2023 05:59:02 -0500 Subject: [PATCH 0103/1761] Move dep_util to _modified and mark dep_util as deprecated. --- distutils/_modified.py | 68 +++++++++++++++++ distutils/bcppcompiler.py | 2 +- distutils/ccompiler.py | 2 +- distutils/cmd.py | 4 +- distutils/command/build_ext.py | 2 +- distutils/command/build_scripts.py | 2 +- distutils/dep_util.py | 74 +++---------------- distutils/file_util.py | 2 +- .../{test_dep_util.py => test_modified.py} | 4 +- distutils/unixccompiler.py | 2 +- distutils/util.py | 2 +- 11 files changed, 89 insertions(+), 75 deletions(-) create mode 100644 distutils/_modified.py rename distutils/tests/{test_dep_util.py => test_modified.py} (96%) diff --git a/distutils/_modified.py b/distutils/_modified.py new file mode 100644 index 0000000000..18a4f2b224 --- /dev/null +++ b/distutils/_modified.py @@ -0,0 +1,68 @@ +"""Timestamp comparison of files and groups of files.""" + +import os.path + +from .errors import DistutilsFileError +from .py39compat import zip_strict +from ._functools import splat + + +def _newer(source, target): + return not os.path.exists(target) or ( + os.path.getmtime(source) > os.path.getmtime(target) + ) + + +def newer(source, target): + """ + Is source modified more recently than target. + + Returns True if 'source' is modified more recently than + 'target' or if 'target' does not exist. + + Raises DistutilsFileError if 'source' does not exist. + """ + if not os.path.exists(source): + raise DistutilsFileError("file '%s' does not exist" % os.path.abspath(source)) + + return _newer(source, target) + + +def newer_pairwise(sources, targets): + """ + Filter filenames where sources are newer than targets. + + Walk two filename iterables in parallel, testing if each source is newer + than its corresponding target. Returns a pair of lists (sources, + targets) where source is newer than target, according to the semantics + of 'newer()'. + """ + newer_pairs = filter(splat(newer), zip_strict(sources, targets)) + return tuple(map(list, zip(*newer_pairs))) + + +def newer_group(sources, target, missing='error'): + """ + Is target out-of-date with respect to any file in sources. + + Return True if 'target' is out-of-date with respect to any file + listed in 'sources'. In other words, if 'target' exists and is newer + than every file in 'sources', return False; otherwise return True. + ``missing`` controls how to handle a missing source file: + + - error (default): allow the ``stat()`` call to fail. + - ignore: silently disregard any missing source files. + - newer: treat missing source files as "target out of date". This + mode is handy in "dry-run" mode: it will pretend to carry out + commands that wouldn't work because inputs are missing, but + that doesn't matter because dry-run won't run the commands. + """ + + def missing_as_newer(source): + return missing == 'newer' and not os.path.exists(source) + + ignored = os.path.exists if missing == 'ignore' else None + return any( + missing_as_newer(source) or _newer(source, target) + for source in filter(ignored, sources) + ) diff --git a/distutils/bcppcompiler.py b/distutils/bcppcompiler.py index ba45ea2b95..3c2ba15410 100644 --- a/distutils/bcppcompiler.py +++ b/distutils/bcppcompiler.py @@ -24,7 +24,7 @@ ) from .ccompiler import CCompiler, gen_preprocess_options from .file_util import write_file -from .dep_util import newer +from ._modified import newer from ._log import log diff --git a/distutils/ccompiler.py b/distutils/ccompiler.py index 1818fce901..c1c7d5476e 100644 --- a/distutils/ccompiler.py +++ b/distutils/ccompiler.py @@ -18,7 +18,7 @@ from .spawn import spawn from .file_util import move_file from .dir_util import mkpath -from .dep_util import newer_group +from ._modified import newer_group from .util import split_quoted, execute from ._log import log diff --git a/distutils/cmd.py b/distutils/cmd.py index 3860c3ff1e..8fdcbc0ea2 100644 --- a/distutils/cmd.py +++ b/distutils/cmd.py @@ -10,7 +10,7 @@ import logging from .errors import DistutilsOptionError -from . import util, dir_util, file_util, archive_util, dep_util +from . import util, dir_util, file_util, archive_util, _modified from ._log import log @@ -428,7 +428,7 @@ def make_file( # If 'outfile' must be regenerated (either because it doesn't # exist, is out-of-date, or the 'force' flag is true) then # perform the action that presumably regenerates it - if self.force or dep_util.newer_group(infiles, outfile): + if self.force or _modified.newer_group(infiles, outfile): self.execute(func, args, exec_msg, level) # Otherwise, print the "skip" message else: diff --git a/distutils/command/build_ext.py b/distutils/command/build_ext.py index fbeec342c0..b48f462626 100644 --- a/distutils/command/build_ext.py +++ b/distutils/command/build_ext.py @@ -19,7 +19,7 @@ ) from ..sysconfig import customize_compiler, get_python_version from ..sysconfig import get_config_h_filename -from ..dep_util import newer_group +from .._modified import newer_group from ..extension import Extension from ..util import get_platform from distutils._log import log diff --git a/distutils/command/build_scripts.py b/distutils/command/build_scripts.py index ce222f1e52..1a4d67f492 100644 --- a/distutils/command/build_scripts.py +++ b/distutils/command/build_scripts.py @@ -7,7 +7,7 @@ from stat import ST_MODE from distutils import sysconfig from ..core import Command -from ..dep_util import newer +from .._modified import newer from ..util import convert_path from distutils._log import log import tokenize diff --git a/distutils/dep_util.py b/distutils/dep_util.py index 18a4f2b224..09a8a2e126 100644 --- a/distutils/dep_util.py +++ b/distutils/dep_util.py @@ -1,68 +1,14 @@ -"""Timestamp comparison of files and groups of files.""" +import warnings -import os.path +from . import _modified -from .errors import DistutilsFileError -from .py39compat import zip_strict -from ._functools import splat - -def _newer(source, target): - return not os.path.exists(target) or ( - os.path.getmtime(source) > os.path.getmtime(target) - ) - - -def newer(source, target): - """ - Is source modified more recently than target. - - Returns True if 'source' is modified more recently than - 'target' or if 'target' does not exist. - - Raises DistutilsFileError if 'source' does not exist. - """ - if not os.path.exists(source): - raise DistutilsFileError("file '%s' does not exist" % os.path.abspath(source)) - - return _newer(source, target) - - -def newer_pairwise(sources, targets): - """ - Filter filenames where sources are newer than targets. - - Walk two filename iterables in parallel, testing if each source is newer - than its corresponding target. Returns a pair of lists (sources, - targets) where source is newer than target, according to the semantics - of 'newer()'. - """ - newer_pairs = filter(splat(newer), zip_strict(sources, targets)) - return tuple(map(list, zip(*newer_pairs))) - - -def newer_group(sources, target, missing='error'): - """ - Is target out-of-date with respect to any file in sources. - - Return True if 'target' is out-of-date with respect to any file - listed in 'sources'. In other words, if 'target' exists and is newer - than every file in 'sources', return False; otherwise return True. - ``missing`` controls how to handle a missing source file: - - - error (default): allow the ``stat()`` call to fail. - - ignore: silently disregard any missing source files. - - newer: treat missing source files as "target out of date". This - mode is handy in "dry-run" mode: it will pretend to carry out - commands that wouldn't work because inputs are missing, but - that doesn't matter because dry-run won't run the commands. - """ - - def missing_as_newer(source): - return missing == 'newer' and not os.path.exists(source) - - ignored = os.path.exists if missing == 'ignore' else None - return any( - missing_as_newer(source) or _newer(source, target) - for source in filter(ignored, sources) +def __getattr__(name): + if name not in ['newer', 'newer_group', 'newer_pairwise']: + raise AttributeError(name) + warnings.warn( + "dep_util is Deprecated. Use functions from setuptools instead.", + DeprecationWarning, + stacklevel=2, ) + return getattr(_modified, name) diff --git a/distutils/file_util.py b/distutils/file_util.py index 7c69906646..3f3e21b567 100644 --- a/distutils/file_util.py +++ b/distutils/file_util.py @@ -108,7 +108,7 @@ def copy_file( # noqa: C901 # changing it (ie. it's not already a hard/soft link to src OR # (not update) and (src newer than dst). - from distutils.dep_util import newer + from distutils._modified import newer from stat import ST_ATIME, ST_MTIME, ST_MODE, S_IMODE if not os.path.isfile(src): diff --git a/distutils/tests/test_dep_util.py b/distutils/tests/test_modified.py similarity index 96% rename from distutils/tests/test_dep_util.py rename to distutils/tests/test_modified.py index 759772d2b9..eae7a7fa04 100644 --- a/distutils/tests/test_dep_util.py +++ b/distutils/tests/test_modified.py @@ -1,7 +1,7 @@ -"""Tests for distutils.dep_util.""" +"""Tests for distutils._modified.""" import os -from distutils.dep_util import newer, newer_pairwise, newer_group +from distutils._modified import newer, newer_pairwise, newer_group from distutils.errors import DistutilsFileError from distutils.tests import support import pytest diff --git a/distutils/unixccompiler.py b/distutils/unixccompiler.py index 6ca2332ae1..bd8db9ac3f 100644 --- a/distutils/unixccompiler.py +++ b/distutils/unixccompiler.py @@ -20,7 +20,7 @@ import itertools from . import sysconfig -from .dep_util import newer +from ._modified import newer from .ccompiler import CCompiler, gen_preprocess_options, gen_lib_options from .errors import DistutilsExecError, CompileError, LibError, LinkError from ._log import log diff --git a/distutils/util.py b/distutils/util.py index 7ef47176e2..7ae914f7ee 100644 --- a/distutils/util.py +++ b/distutils/util.py @@ -14,7 +14,7 @@ import functools from .errors import DistutilsPlatformError, DistutilsByteCompileError -from .dep_util import newer +from ._modified import newer from .spawn import spawn from ._log import log From ce9efc41ec587d2f111fe09a4d855ffad15f95fc Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 5 Nov 2023 06:19:43 -0500 Subject: [PATCH 0104/1761] Extend tests for newer_pairwise and fix failed expectation when no files are newer. --- distutils/_modified.py | 2 +- distutils/tests/test_modified.py | 8 ++++++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/distutils/_modified.py b/distutils/_modified.py index 18a4f2b224..41ab1df39e 100644 --- a/distutils/_modified.py +++ b/distutils/_modified.py @@ -38,7 +38,7 @@ def newer_pairwise(sources, targets): of 'newer()'. """ newer_pairs = filter(splat(newer), zip_strict(sources, targets)) - return tuple(map(list, zip(*newer_pairs))) + return tuple(map(list, zip(*newer_pairs))) or ([], []) def newer_group(sources, target, missing='error'): diff --git a/distutils/tests/test_modified.py b/distutils/tests/test_modified.py index eae7a7fa04..34ced95624 100644 --- a/distutils/tests/test_modified.py +++ b/distutils/tests/test_modified.py @@ -56,6 +56,14 @@ def test_newer_pairwise_mismatch(self): with pytest.raises(ValueError): newer_pairwise([one, two], [three]) + def test_newer_pairwise_empty(self): + assert newer_pairwise([], []) == ([], []) + + def test_newer_pairwise_fresh(self): + one, two, three, four = self._setup_1234() + + assert newer_pairwise([one, three], [two, four]) == ([], []) + def test_newer_group(self): tmpdir = self.mkdtemp() sources = os.path.join(tmpdir, 'sources') From 2972e29ad43eb08241fd8ebebff1437b8d8dafb9 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 5 Nov 2023 06:05:59 -0500 Subject: [PATCH 0105/1761] Add newer_pairwise_group (inspired by setuptools.dep_util). --- distutils/_modified.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/distutils/_modified.py b/distutils/_modified.py index 41ab1df39e..fbb95a8f27 100644 --- a/distutils/_modified.py +++ b/distutils/_modified.py @@ -1,5 +1,6 @@ """Timestamp comparison of files and groups of files.""" +import functools import os.path from .errors import DistutilsFileError @@ -28,7 +29,7 @@ def newer(source, target): return _newer(source, target) -def newer_pairwise(sources, targets): +def newer_pairwise(sources, targets, newer=newer): """ Filter filenames where sources are newer than targets. @@ -66,3 +67,6 @@ def missing_as_newer(source): missing_as_newer(source) or _newer(source, target) for source in filter(ignored, sources) ) + + +newer_pairwise_group = functools.partial(newer_pairwise, newer=newer_group) From 378d0d5ab16baa75acc6bb91ce7eb64f5f6ea91a Mon Sep 17 00:00:00 2001 From: Daniel Nunes Date: Sun, 15 Jan 2017 01:36:02 +0000 Subject: [PATCH 0106/1761] Added tests for newer_pairwise_group(). Cherry-picked from pypa/setuptools@a40114a442e18cd29271bd3c37dbfcaf6a2ec817. --- distutils/tests/test_modified.py | 29 ++++++++++++++++++++++++++++- 1 file changed, 28 insertions(+), 1 deletion(-) diff --git a/distutils/tests/test_modified.py b/distutils/tests/test_modified.py index 34ced95624..87b3ecded0 100644 --- a/distutils/tests/test_modified.py +++ b/distutils/tests/test_modified.py @@ -1,7 +1,7 @@ """Tests for distutils._modified.""" import os -from distutils._modified import newer, newer_pairwise, newer_group +from distutils._modified import newer, newer_pairwise, newer_group, newer_pairwise_group from distutils.errors import DistutilsFileError from distutils.tests import support import pytest @@ -89,3 +89,30 @@ def test_newer_group(self): assert not newer_group([one, two, old_file], three, missing='ignore') assert newer_group([one, two, old_file], three, missing='newer') + + +@pytest.fixture +def groups_target(tmpdir): + """Sets up some older sources, a target and newer sources. + Returns a 3-tuple in this order. + """ + creation_order = ['older.c', 'older.h', 'target.o', 'newer.c', 'newer.h'] + mtime = 0 + + for i in range(len(creation_order)): + creation_order[i] = os.path.join(str(tmpdir), creation_order[i]) + with open(creation_order[i], 'w'): + pass + + # make sure modification times are sequential + os.utime(creation_order[i], (mtime, mtime)) + mtime += 1 + + return creation_order[:2], creation_order[2], creation_order[3:] + + +def test_newer_pairwise_group(groups_target): + older = newer_pairwise_group([groups_target[0]], [groups_target[1]]) + newer = newer_pairwise_group([groups_target[2]], [groups_target[1]]) + assert older == ([], []) + assert newer == ([groups_target[2]], [groups_target[1]]) From 501b753d153d5e6ca51a55d7f9b256bc3518c98a Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 5 Nov 2023 06:35:18 -0500 Subject: [PATCH 0107/1761] Modernize test_newer_pairwise_group by using tmp_path and a SimpleNamespace. --- distutils/tests/test_modified.py | 33 ++++++++++++++++---------------- 1 file changed, 17 insertions(+), 16 deletions(-) diff --git a/distutils/tests/test_modified.py b/distutils/tests/test_modified.py index 87b3ecded0..ca07c7e853 100644 --- a/distutils/tests/test_modified.py +++ b/distutils/tests/test_modified.py @@ -1,10 +1,12 @@ """Tests for distutils._modified.""" import os +import types + +import pytest from distutils._modified import newer, newer_pairwise, newer_group, newer_pairwise_group from distutils.errors import DistutilsFileError from distutils.tests import support -import pytest class TestDepUtil(support.TempdirManager): @@ -92,27 +94,26 @@ def test_newer_group(self): @pytest.fixture -def groups_target(tmpdir): - """Sets up some older sources, a target and newer sources. - Returns a 3-tuple in this order. +def groups_target(tmp_path): + """ + Set up some older sources, a target, and newer sources. + + Returns a simple namespace with these values. """ - creation_order = ['older.c', 'older.h', 'target.o', 'newer.c', 'newer.h'] - mtime = 0 + filenames = ['older.c', 'older.h', 'target.o', 'newer.c', 'newer.h'] + paths = [tmp_path / name for name in filenames] - for i in range(len(creation_order)): - creation_order[i] = os.path.join(str(tmpdir), creation_order[i]) - with open(creation_order[i], 'w'): - pass + for mtime, path in enumerate(paths): + path.write_text('', encoding='utf-8') # make sure modification times are sequential - os.utime(creation_order[i], (mtime, mtime)) - mtime += 1 + os.utime(path, (mtime, mtime)) - return creation_order[:2], creation_order[2], creation_order[3:] + return types.SimpleNamespace(older=paths[:2], target=paths[2], newer=paths[3:]) def test_newer_pairwise_group(groups_target): - older = newer_pairwise_group([groups_target[0]], [groups_target[1]]) - newer = newer_pairwise_group([groups_target[2]], [groups_target[1]]) + older = newer_pairwise_group([groups_target.older], [groups_target.target]) + newer = newer_pairwise_group([groups_target.newer], [groups_target.target]) assert older == ([], []) - assert newer == ([groups_target[2]], [groups_target[1]]) + assert newer == ([groups_target.newer], [groups_target.target]) From 603932219176de7449af496b724dd8e58d4589d1 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 5 Nov 2023 09:01:38 -0500 Subject: [PATCH 0108/1761] Remove latent references in docs. --- docs/distutils/configfile.rst | 7 ------- docs/distutils/packageindex.rst | 13 ++++++------- docs/distutils/uploading.rst | 5 +++-- 3 files changed, 9 insertions(+), 16 deletions(-) diff --git a/docs/distutils/configfile.rst b/docs/distutils/configfile.rst index bdd7c4550a..30cccd71c0 100644 --- a/docs/distutils/configfile.rst +++ b/docs/distutils/configfile.rst @@ -131,13 +131,6 @@ Note that the ``doc_files`` option is simply a whitespace-separated string split across multiple lines for readability. -.. seealso:: - - :ref:`inst-config-syntax` in "Installing Python Modules" - More information on the configuration files is available in the manual for - system administrators. - - .. rubric:: Footnotes .. [#] This ideal probably won't be achieved until auto-configuration is fully diff --git a/docs/distutils/packageindex.rst b/docs/distutils/packageindex.rst index ccb9a598b2..27ea717a78 100644 --- a/docs/distutils/packageindex.rst +++ b/docs/distutils/packageindex.rst @@ -6,11 +6,10 @@ The Python Package Index (PyPI) ******************************* -The `Python Package Index (PyPI)`_ stores metadata describing distributions -packaged with distutils and other publishing tools, as well the distribution -archives themselves. +The `Python Package Index (PyPI) `_ stores +metadata describing distributions packaged with distutils and +other publishing tools, as well the distribution archives +themselves. -References to up to date PyPI documentation can be found at -:ref:`publishing-python-packages`. - -.. _Python Package Index (PyPI): https://pypi.org +The best resource for working with PyPI is the +`Python Packaging User Guide `_. diff --git a/docs/distutils/uploading.rst b/docs/distutils/uploading.rst index 4c391cab07..f5c4c619ab 100644 --- a/docs/distutils/uploading.rst +++ b/docs/distutils/uploading.rst @@ -4,5 +4,6 @@ Uploading Packages to the Package Index *************************************** -References to up to date PyPI documentation can be found at -:ref:`publishing-python-packages`. +See the +`Python Packaging User Guide `_ +for the best guidance on uploading packages. From 03f03e7802b0842b41f70b2b1c17ab26551a7533 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 5 Nov 2023 09:43:46 -0500 Subject: [PATCH 0109/1761] Limit sphinxlint jobs to 1. Workaround for sphinx-contrib/sphinx-lint#83. --- tox.ini | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 33da3deb08..331eeed93f 100644 --- a/tox.ini +++ b/tox.ini @@ -26,7 +26,9 @@ extras = changedir = docs commands = python -m sphinx -W --keep-going . {toxinidir}/build/html - python -m sphinxlint + python -m sphinxlint \ + # workaround for sphinx-contrib/sphinx-lint#83 + --jobs 1 [testenv:finalize] description = assemble changelog and tag a release From 41b9cce3d3ee81e929610ab95b928dfd08bbba22 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 5 Nov 2023 09:58:16 -0500 Subject: [PATCH 0110/1761] Replace git version with released version. Ref #186. --- tox.ini | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tox.ini b/tox.ini index 06657e4eaa..6a224b52d1 100644 --- a/tox.ini +++ b/tox.ini @@ -5,8 +5,9 @@ toxworkdir={env:TOX_WORK_DIR:.tox} [testenv] deps = - # pypa/distutils#186; workaround for pytest-dev/pytest#10447 - pytest @ git+https://github.com/RonnyPfannschmidt/pytest@fix-10447-maker-mro-order-needs-reverse + pytest \ + # required for #186 + >= 7.4.3 pytest-flake8 # workaround for tholo/pytest-flake8#87 From d3e5de05f6afe958d0fde20945ed0f7a2dfef270 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Wed, 10 May 2023 21:38:33 -0400 Subject: [PATCH 0111/1761] Disable cygwin tests for now. Ref pypa/setuptools#3921 --- .github/workflows/main.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 60801acecd..c420a97654 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -37,6 +37,8 @@ jobs: run: tox test_cygwin: + # disabled due to lack of Rust support pypa/setuptools#3921 + if: ${{ false }} strategy: matrix: python: From d23e28a03a2c120e204c4c788ecd316e0dfe8fbb Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 5 Nov 2023 10:54:57 -0500 Subject: [PATCH 0112/1761] Disable integration test due to known breakage from deprecation warnings. --- .github/workflows/main.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index c420a97654..cb85ffe6ab 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -66,6 +66,7 @@ jobs: ci_setuptools: # Integration testing with setuptools + if: ${{ false }} # disabled for deprecation warnings strategy: matrix: python: From 03c6392c21800f51010d805b98aee7eb406f9c79 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 5 Nov 2023 13:08:21 -0500 Subject: [PATCH 0113/1761] Allow diffcov to fail also, as it requires the tests to pass on the latest Python to succeed. --- .github/workflows/main.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 158814e597..4d9b8a3ede 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -58,6 +58,7 @@ jobs: diffcov: runs-on: ubuntu-latest + continue-on-error: ${{ matrix.python == '3.12' }} steps: - uses: actions/checkout@v3 with: From 6e6ee9759da3e71c9e90920c2bb91b2a27df3dfc Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 5 Nov 2023 13:10:07 -0500 Subject: [PATCH 0114/1761] Remove newsfragment --- newsfragments/+drop-py37.feature.rst | 1 - 1 file changed, 1 deletion(-) delete mode 100644 newsfragments/+drop-py37.feature.rst diff --git a/newsfragments/+drop-py37.feature.rst b/newsfragments/+drop-py37.feature.rst deleted file mode 100644 index ccabdaa355..0000000000 --- a/newsfragments/+drop-py37.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Require Python 3.8 or later. From 7a04cbda0fc71487af84e1d35055b736e339a6d6 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 5 Nov 2023 15:57:33 -0500 Subject: [PATCH 0115/1761] Copy concurrency setting from setuptools --- .github/workflows/main.yml | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 4d9b8a3ede..e36084a3e2 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -2,6 +2,13 @@ name: tests on: [push, pull_request] +concurrency: + group: >- + ${{ github.workflow }}- + ${{ github.ref_type }}- + ${{ github.event.pull_request.number || github.sha }} + cancel-in-progress: true + permissions: contents: read @@ -18,10 +25,6 @@ env: TOX_OVERRIDE: >- testenv.pass_env+=GITHUB_*,FORCE_COLOR -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }} - cancel-in-progress: true - jobs: test: strategy: From 0296279b68c7a29dbafd62f5c2d96220767bb4b6 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 5 Nov 2023 16:19:31 -0500 Subject: [PATCH 0116/1761] Rely on distutils._modified and deprecated setuptools.dep_util. --- setuptools/command/build_clib.py | 2 +- setuptools/dep_util.py | 32 +++++++++++-------------------- setuptools/tests/test_dep_util.py | 30 ----------------------------- 3 files changed, 12 insertions(+), 52 deletions(-) delete mode 100644 setuptools/tests/test_dep_util.py diff --git a/setuptools/command/build_clib.py b/setuptools/command/build_clib.py index 5f4229b276..4679cd9432 100644 --- a/setuptools/command/build_clib.py +++ b/setuptools/command/build_clib.py @@ -1,7 +1,7 @@ import distutils.command.build_clib as orig from distutils.errors import DistutilsSetupError from distutils import log -from setuptools.dep_util import newer_pairwise_group +from distutils._modified import newer_pairwise_group class build_clib(orig.build_clib): diff --git a/setuptools/dep_util.py b/setuptools/dep_util.py index dc9ccf62c2..2d8cc5217d 100644 --- a/setuptools/dep_util.py +++ b/setuptools/dep_util.py @@ -1,24 +1,14 @@ -from distutils.dep_util import newer_group +import warnings +from ._distutils import _modified -# yes, this is was almost entirely copy-pasted from -# 'newer_pairwise()', this is just another convenience -# function. -def newer_pairwise_group(sources_groups, targets): - """Walk both arguments in parallel, testing if each source group is newer - than its corresponding target. Returns a pair of lists (sources_groups, - targets) where sources is newer than target, according to the semantics - of 'newer_group()'. - """ - if len(sources_groups) != len(targets): - raise ValueError("'sources_group' and 'targets' must be the same length") - # build a pair of lists (sources_groups, targets) where source is newer - n_sources = [] - n_targets = [] - for i in range(len(sources_groups)): - if newer_group(sources_groups[i], targets[i]): - n_sources.append(sources_groups[i]) - n_targets.append(targets[i]) - - return n_sources, n_targets +def __getattr__(name): + if name not in ['newer_pairwise_group']: + raise AttributeError(name) + warnings.warn( + "dep_util is Deprecated. Use functions from setuptools instead.", + DeprecationWarning, + stacklevel=2, + ) + return getattr(_modified, name) diff --git a/setuptools/tests/test_dep_util.py b/setuptools/tests/test_dep_util.py deleted file mode 100644 index e5027c1020..0000000000 --- a/setuptools/tests/test_dep_util.py +++ /dev/null @@ -1,30 +0,0 @@ -from setuptools.dep_util import newer_pairwise_group -import os -import pytest - - -@pytest.fixture -def groups_target(tmpdir): - """Sets up some older sources, a target and newer sources. - Returns a 3-tuple in this order. - """ - creation_order = ['older.c', 'older.h', 'target.o', 'newer.c', 'newer.h'] - mtime = 0 - - for i in range(len(creation_order)): - creation_order[i] = os.path.join(str(tmpdir), creation_order[i]) - with open(creation_order[i], 'w'): - pass - - # make sure modification times are sequential - os.utime(creation_order[i], (mtime, mtime)) - mtime += 1 - - return creation_order[:2], creation_order[2], creation_order[3:] - - -def test_newer_pairwise_group(groups_target): - older = newer_pairwise_group([groups_target[0]], [groups_target[1]]) - newer = newer_pairwise_group([groups_target[2]], [groups_target[1]]) - assert older == ([], []) - assert newer == ([groups_target[2]], [groups_target[1]]) From 0ace7f4de19305092e5961c1ce13cbfbdc8522b9 Mon Sep 17 00:00:00 2001 From: Avasam Date: Mon, 6 Nov 2023 21:15:31 -0500 Subject: [PATCH 0117/1761] Fix Ruff test --- pkg_resources/__init__.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py index 3c1f1875c5..aaf9304e9b 100644 --- a/pkg_resources/__init__.py +++ b/pkg_resources/__init__.py @@ -69,10 +69,6 @@ from os import open as os_open from os.path import isdir, split - -# access attribute to force import under delayed import mechanisms. -importlib.machinery.__name__ - from pkg_resources.extern.jaraco.text import ( yield_lines, drop_comment, From d97640a1955be86585f6cd5ac2fa4a070983c19e Mon Sep 17 00:00:00 2001 From: Avasam Date: Mon, 6 Nov 2023 22:39:07 -0500 Subject: [PATCH 0118/1761] Update Python < 3.8 (3.2 to 3.7) obsolete code and comments --- _distutils_hack/__init__.py | 7 ------- conftest.py | 5 ----- docs/userguide/declarative_config.rst | 7 +++---- docs/userguide/entry_point.rst | 7 ++++--- docs/userguide/pyproject_config.rst | 3 +-- docs/userguide/quickstart.rst | 3 --- newsfragments/4096.doc.rst | 1 + newsfragments/4096.feature.rst | 1 + pkg_resources/__init__.py | 18 +++++------------- pkg_resources/tests/test_pkg_resources.py | 16 +--------------- pkg_resources/tests/test_resources.py | 2 +- setuptools/command/bdist_egg.py | 5 +---- setuptools/command/build.py | 11 +---------- setuptools/command/dist_info.py | 8 +------- setuptools/command/easy_install.py | 3 ++- setuptools/command/editable_wheel.py | 8 +------- setuptools/command/sdist.py | 8 -------- setuptools/monkey.py | 18 +----------------- setuptools/namespaces.py | 7 +++---- setuptools/tests/test_distutils_adoption.py | 6 +----- setuptools/tests/test_editable_install.py | 8 +------- setuptools/tests/test_find_packages.py | 13 +------------ setuptools/tests/test_namespaces.py | 6 ------ setuptools/tests/test_packageindex.py | 10 ---------- 24 files changed, 30 insertions(+), 151 deletions(-) create mode 100644 newsfragments/4096.doc.rst create mode 100644 newsfragments/4096.feature.rst diff --git a/_distutils_hack/__init__.py b/_distutils_hack/__init__.py index b951c2defd..c9d1e24790 100644 --- a/_distutils_hack/__init__.py +++ b/_distutils_hack/__init__.py @@ -3,16 +3,9 @@ import os -is_pypy = '__pypy__' in sys.builtin_module_names - - def warn_distutils_present(): if 'distutils' not in sys.modules: return - if is_pypy and sys.version_info < (3, 7): - # PyPy for 3.6 unconditionally imports distutils, so bypass the warning - # https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250 - return import warnings warnings.warn( diff --git a/conftest.py b/conftest.py index 94d5cdd8b5..07c323cf05 100644 --- a/conftest.py +++ b/conftest.py @@ -40,11 +40,6 @@ def pytest_configure(config): ] -if sys.version_info < (3, 6): - collect_ignore.append('docs/conf.py') # uses f-strings - collect_ignore.append('pavement.py') - - if sys.version_info < (3, 9) or sys.platform == 'cygwin': collect_ignore.append('tools/finalize.py') diff --git a/docs/userguide/declarative_config.rst b/docs/userguide/declarative_config.rst index fa104b10e3..a5896849ac 100644 --- a/docs/userguide/declarative_config.rst +++ b/docs/userguide/declarative_config.rst @@ -39,10 +39,9 @@ boilerplate code in some cases. zip_safe = False include_package_data = True packages = find: - python_requires = >=3.7 + python_requires = >=3.8 install_requires = requests - importlib-metadata; python_version<"3.8" [options.package_data] * = *.txt, *.rst @@ -259,11 +258,11 @@ data_files section 40.6.0 [# [options] install_requires = - importlib-metadata; python_version<"3.8" + requests [options.extras_require] all = - importlib-metadata; python_version < "3.8" + requests .. [#opt-3] The ``find:`` and ``find_namespace:`` directive can be further configured in a dedicated subsection ``options.packages.find``. This subsection accepts the diff --git a/docs/userguide/entry_point.rst b/docs/userguide/entry_point.rst index 4aa7f9a27c..c8022c3d4f 100644 --- a/docs/userguide/entry_point.rst +++ b/docs/userguide/entry_point.rst @@ -486,9 +486,10 @@ entry points remains the same as for console/GUI scripts, and is discussed in th .. tip:: The recommended approach for loading and importing entry points is the :mod:`importlib.metadata` module, - which is a part of the standard library since Python 3.8. For older versions of - Python, its backport :pypi:`importlib_metadata` should be used. While using the - backport, the only change that has to be made is to replace ``importlib.metadata`` + which is a part of the standard library since Python 3.8 and is non-provisional + since Python 3.10. For older versions of Python, its backport + :pypi:`importlib_metadata` should be used. While using the backport, the only + change that has to be made is to replace ``importlib.metadata`` with ``importlib_metadata``, i.e. .. code-block:: python diff --git a/docs/userguide/pyproject_config.rst b/docs/userguide/pyproject_config.rst index 103d10ede2..9335105917 100644 --- a/docs/userguide/pyproject_config.rst +++ b/docs/userguide/pyproject_config.rst @@ -47,7 +47,7 @@ The ``project`` table contains metadata fields as described by ] description = "My package description" readme = "README.rst" - requires-python = ">=3.7" + requires-python = ">=3.8" keywords = ["one", "two"] license = {text = "BSD-3-Clause"} classifiers = [ @@ -56,7 +56,6 @@ The ``project`` table contains metadata fields as described by ] dependencies = [ "requests", - 'importlib-metadata; python_version<"3.8"', ] dynamic = ["version"] diff --git a/docs/userguide/quickstart.rst b/docs/userguide/quickstart.rst index f0426293a4..dde4620819 100644 --- a/docs/userguide/quickstart.rst +++ b/docs/userguide/quickstart.rst @@ -85,7 +85,6 @@ The following example demonstrates a minimum configuration version = "0.0.1" dependencies = [ "requests", - 'importlib-metadata; python_version<"3.8"', ] See :doc:`/userguide/pyproject_config` for more information. @@ -101,7 +100,6 @@ The following example demonstrates a minimum configuration [options] install_requires = requests - importlib-metadata; python_version < "3.8" See :doc:`/userguide/declarative_config` for more information. @@ -116,7 +114,6 @@ The following example demonstrates a minimum configuration version='0.0.1', install_requires=[ 'requests', - 'importlib-metadata; python_version == "3.8"', ], ) diff --git a/newsfragments/4096.doc.rst b/newsfragments/4096.doc.rst new file mode 100644 index 0000000000..b5b5206704 --- /dev/null +++ b/newsfragments/4096.doc.rst @@ -0,0 +1 @@ +Updated documentation referencing obsolete Python 3.7 code. -- by :user:`Avasam` diff --git a/newsfragments/4096.feature.rst b/newsfragments/4096.feature.rst new file mode 100644 index 0000000000..89b3a465bb --- /dev/null +++ b/newsfragments/4096.feature.rst @@ -0,0 +1 @@ +Updated and removed obsolete Python < 3.8 code and comments. -- by :user:`Avasam` diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py index 3baa1f3c24..2909000553 100644 --- a/pkg_resources/__init__.py +++ b/pkg_resources/__init__.py @@ -18,6 +18,10 @@ """ import sys + +if sys.version_info < (3, 8): + raise RuntimeError("Python 3.8 or later is required") + import os import io import time @@ -43,16 +47,7 @@ import importlib from pkgutil import get_importer -try: - import _imp -except ImportError: - # Python 3.2 compatibility - import imp as _imp - -try: - FileExistsError -except NameError: - FileExistsError = OSError +import _imp # capture these to bypass sandboxing from os import utime @@ -91,9 +86,6 @@ __import__('pkg_resources.extern.packaging.markers') __import__('pkg_resources.extern.packaging.utils') -if sys.version_info < (3, 5): - raise RuntimeError("Python 3.5 or later is required") - # declare some globals that will be defined later to # satisfy the linters. require = None diff --git a/pkg_resources/tests/test_pkg_resources.py b/pkg_resources/tests/test_pkg_resources.py index a05aeb2603..6f3d28467b 100644 --- a/pkg_resources/tests/test_pkg_resources.py +++ b/pkg_resources/tests/test_pkg_resources.py @@ -22,17 +22,6 @@ import pkg_resources -def timestamp(dt): - """ - Return a timestamp for a local, naive datetime instance. - """ - try: - return dt.timestamp() - except AttributeError: - # Python 3.2 and earlier - return time.mktime(dt.timetuple()) - - class EggRemover(str): def __call__(self): if self in sys.path: @@ -123,7 +112,7 @@ def test_resource_filename_rewrites_on_change(self): f = open(filename, 'w') f.write('hello, world?') f.close() - ts = timestamp(self.ref_time) + ts = self.ref_time.timestamp() os.utime(filename, (ts, ts)) filename = zp.get_resource_filename(manager, 'data.dat') with open(filename) as f: @@ -241,9 +230,6 @@ def make_distribution_no_version(tmpdir, basename): # will detect it and yield it. dist_dir.join('temp.txt').ensure() - if sys.version_info < (3, 6): - dist_dir = str(dist_dir) - dists = list(pkg_resources.distributions_from_metadata(dist_dir)) assert len(dists) == 1 (dist,) = dists diff --git a/pkg_resources/tests/test_resources.py b/pkg_resources/tests/test_resources.py index 608c67aeeb..e6a6873f1b 100644 --- a/pkg_resources/tests/test_resources.py +++ b/pkg_resources/tests/test_resources.py @@ -21,7 +21,7 @@ ) -# from Python 3.6 docs. +# from Python 3.6 docs. Available from itertools on Python 3.10 def pairwise(iterable): "s -> (s0,s1), (s1,s2), (s2, s3), ..." a, b = itertools.tee(iterable) diff --git a/setuptools/command/bdist_egg.py b/setuptools/command/bdist_egg.py index bdece56bc9..9494f1dacd 100644 --- a/setuptools/command/bdist_egg.py +++ b/setuptools/command/bdist_egg.py @@ -368,10 +368,7 @@ def scan_module(egg_dir, base, name, stubs): return True # Extension module pkg = base[len(egg_dir) + 1 :].replace(os.sep, '.') module = pkg + (pkg and '.' or '') + os.path.splitext(name)[0] - if sys.version_info < (3, 7): - skip = 12 # skip magic & date & file size - else: - skip = 16 # skip magic & reserved? & date & file size + skip = 16 # skip magic & reserved? & date & file size f = open(filename, 'rb') f.read(skip) code = marshal.load(f) diff --git a/setuptools/command/build.py b/setuptools/command/build.py index 0f1d688e17..afda7e3be9 100644 --- a/setuptools/command/build.py +++ b/setuptools/command/build.py @@ -1,17 +1,8 @@ -import sys -from typing import TYPE_CHECKING, List, Dict +from typing import Dict, List, Protocol from distutils.command.build import build as _build from ..warnings import SetuptoolsDeprecationWarning -if sys.version_info >= (3, 8): - from typing import Protocol -elif TYPE_CHECKING: - from typing_extensions import Protocol -else: - from abc import ABC as Protocol - - _ORIGINAL_SUBCOMMANDS = {"build_py", "build_clib", "build_ext", "build_scripts"} diff --git a/setuptools/command/dist_info.py b/setuptools/command/dist_info.py index 9df625cee7..28f0ff5c69 100644 --- a/setuptools/command/dist_info.py +++ b/setuptools/command/dist_info.py @@ -93,7 +93,7 @@ def _maybe_bkp_dir(self, dir_path: str, requires_bkp: bool): if requires_bkp: bkp_name = f"{dir_path}.__bkp__" _rm(bkp_name, ignore_errors=True) - _copy(dir_path, bkp_name, dirs_exist_ok=True, symlinks=True) + shutil.copytree(dir_path, bkp_name, dirs_exist_ok=True, symlinks=True) try: yield finally: @@ -119,9 +119,3 @@ def run(self): def _rm(dir_name, **opts): if os.path.isdir(dir_name): shutil.rmtree(dir_name, **opts) - - -def _copy(src, dst, **opts): - if sys.version_info < (3, 8): - opts.pop("dirs_exist_ok", None) - shutil.copytree(src, dst, **opts) diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py index 8ba4f094de..a51b1c5d19 100644 --- a/setuptools/command/easy_install.py +++ b/setuptools/command/easy_install.py @@ -257,8 +257,9 @@ def finalize_options(self): # noqa: C901 # is too complex (25) # FIXME 'py_version_nodot': f'{sys.version_info.major}{sys.version_info.minor}', 'sys_prefix': self.config_vars['prefix'], 'sys_exec_prefix': self.config_vars['exec_prefix'], - # Only python 3.2+ has abiflags + # Only POSIX systems have abiflags 'abiflags': getattr(sys, 'abiflags', ''), + # Only python 3.9+ has platlibdir 'platlibdir': getattr(sys, 'platlibdir', 'lib'), } ) diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py index 79c839f8f0..440a3bbe1c 100644 --- a/setuptools/command/editable_wheel.py +++ b/setuptools/command/editable_wheel.py @@ -30,6 +30,7 @@ List, Mapping, Optional, + Protocol, Tuple, TypeVar, Union, @@ -54,13 +55,6 @@ if TYPE_CHECKING: from wheel.wheelfile import WheelFile # noqa -if sys.version_info >= (3, 8): - from typing import Protocol -elif TYPE_CHECKING: - from typing_extensions import Protocol -else: - from abc import ABC as Protocol - _Path = Union[str, Path] _P = TypeVar("_P", bound=_Path) _logger = logging.getLogger(__name__) diff --git a/setuptools/command/sdist.py b/setuptools/command/sdist.py index c04823c159..da5f65e44d 100644 --- a/setuptools/command/sdist.py +++ b/setuptools/command/sdist.py @@ -73,14 +73,6 @@ def run(self): def initialize_options(self): orig.sdist.initialize_options(self) - self._default_to_gztar() - - def _default_to_gztar(self): - # only needed on Python prior to 3.6. - if sys.version_info >= (3, 6, 0, 'beta', 1): - return - self.formats = ['gztar'] - def make_distribution(self): """ Workaround for #516 diff --git a/setuptools/monkey.py b/setuptools/monkey.py index 2ab98c178a..2ff6c5fbfc 100644 --- a/setuptools/monkey.py +++ b/setuptools/monkey.py @@ -66,21 +66,6 @@ def patch_all(): # we can't patch distutils.cmd, alas distutils.core.Command = setuptools.Command - has_issue_12885 = sys.version_info <= (3, 5, 3) - - if has_issue_12885: - # fix findall bug in distutils (http://bugs.python.org/issue12885) - distutils.filelist.findall = setuptools.findall - - needs_warehouse = (3, 4) < sys.version_info < (3, 4, 6) or ( - 3, - 5, - ) < sys.version_info <= (3, 5, 3) - - if needs_warehouse: - warehouse = 'https://upload.pypi.org/legacy/' - distutils.config.PyPIRCCommand.DEFAULT_REPOSITORY = warehouse - _patch_distribution_metadata() # Install Distribution throughout the distutils @@ -138,8 +123,7 @@ def patch_for_msvc_specialized_compiler(): Patch functions in distutils to use standalone Microsoft Visual C++ compilers. """ - # import late to avoid circular imports on Python < 3.5 - msvc = import_module('setuptools.msvc') + from . import msvc if platform.system() != 'Windows': # Compilers only available on Microsoft Windows diff --git a/setuptools/namespaces.py b/setuptools/namespaces.py index 3332f864ae..e8f2941d45 100644 --- a/setuptools/namespaces.py +++ b/setuptools/namespaces.py @@ -42,12 +42,11 @@ def _get_target(self): _nspkg_tmpl = ( "import sys, types, os", - "has_mfs = sys.version_info > (3, 5)", "p = os.path.join(%(root)s, *%(pth)r)", - "importlib = has_mfs and __import__('importlib.util')", - "has_mfs and __import__('importlib.machinery')", + "importlib = __import__('importlib.util')", + "__import__('importlib.machinery')", ( - "m = has_mfs and " + "m = " "sys.modules.setdefault(%(pkg)r, " "importlib.util.module_from_spec(" "importlib.machinery.PathFinder.find_spec(%(pkg)r, " diff --git a/setuptools/tests/test_distutils_adoption.py b/setuptools/tests/test_distutils_adoption.py index b371a5d353..3053fdc0b7 100644 --- a/setuptools/tests/test_distutils_adoption.py +++ b/setuptools/tests/test_distutils_adoption.py @@ -14,11 +14,7 @@ def popen_text(call): """ Augment the Popen call with the parameters to ensure unicode text. """ - return ( - functools.partial(call, universal_newlines=True) - if sys.version_info < (3, 7) - else functools.partial(call, text=True) - ) + return functools.partial(call, text=True) def win_sr(env): diff --git a/setuptools/tests/test_editable_install.py b/setuptools/tests/test_editable_install.py index ef71147adf..fd22a2d334 100644 --- a/setuptools/tests/test_editable_install.py +++ b/setuptools/tests/test_editable_install.py @@ -90,11 +90,7 @@ def editable_opts(request): "__init__.py": dedent( """\ import sys - - if sys.version_info[:2] >= (3, 8): - from importlib.metadata import PackageNotFoundError, version - else: - from importlib_metadata import PackageNotFoundError, version + from importlib.metadata import PackageNotFoundError, version try: __version__ = version(__name__) @@ -439,8 +435,6 @@ def test_editable_with_prefix(tmp_path, sample_project, editable_opts): # now run 'sample' with the prefix on the PYTHONPATH bin = 'Scripts' if platform.system() == 'Windows' else 'bin' exe = prefix / bin / 'sample' - if sys.version_info < (3, 8) and platform.system() == 'Windows': - exe = str(exe) subprocess.check_call([exe], env=env) diff --git a/setuptools/tests/test_find_packages.py b/setuptools/tests/test_find_packages.py index 92da882d09..e54266a587 100644 --- a/setuptools/tests/test_find_packages.py +++ b/setuptools/tests/test_find_packages.py @@ -1,9 +1,7 @@ """Tests for automatic package discovery""" import os -import sys import shutil import tempfile -import platform import pytest @@ -27,15 +25,6 @@ def can_symlink(): return can -def has_symlink(): - bad_symlink = ( - # Windows symlink directory detection is broken on Python 3.2 - platform.system() == 'Windows' - and sys.version_info[:2] == (3, 2) - ) - return can_symlink() and not bad_symlink - - class TestFindPackages: def setup_method(self, method): self.dist_dir = tempfile.mkdtemp() @@ -134,7 +123,7 @@ def test_dir_with_packages_in_subdir_is_excluded(self): packages = find_packages(self.dist_dir) assert 'build.pkg' not in packages - @pytest.mark.skipif(not has_symlink(), reason='Symlink support required') + @pytest.mark.skipif(not can_symlink(), reason='Symlink support required') def test_symlinked_packages_are_included(self): """ A symbolically-linked directory should be treated like any other diff --git a/setuptools/tests/test_namespaces.py b/setuptools/tests/test_namespaces.py index cc54cc9f73..76b5af296a 100644 --- a/setuptools/tests/test_namespaces.py +++ b/setuptools/tests/test_namespaces.py @@ -1,17 +1,11 @@ import sys import subprocess -import pytest - from . import namespaces from setuptools.command import test class TestNamespaces: - @pytest.mark.skipif( - sys.version_info < (3, 5), - reason="Requires importlib.util.module_from_spec", - ) def test_mixed_site_and_non_site(self, tmpdir): """ Installing two packages sharing the same namespace, one installed diff --git a/setuptools/tests/test_packageindex.py b/setuptools/tests/test_packageindex.py index 02870639d3..de64c08fa9 100644 --- a/setuptools/tests/test_packageindex.py +++ b/setuptools/tests/test_packageindex.py @@ -88,16 +88,6 @@ def test_bad_url_double_scheme(self): return raise RuntimeError("Did not raise") - def test_bad_url_screwy_href(self): - index = setuptools.package_index.PackageIndex(hosts=('www.example.com',)) - - # issue #160 - if sys.version_info[0] == 2 and sys.version_info[1] == 7: - # this should not fail - url = 'http://example.com' - page = '' - index.process_index(url, page) - def test_url_ok(self): index = setuptools.package_index.PackageIndex(hosts=('www.example.com',)) url = 'file:///tmp/test_package_index' From 7620221fc709416ed1518b3cabbe3f0be96ed30f Mon Sep 17 00:00:00 2001 From: Avasam Date: Tue, 7 Nov 2023 18:59:52 -0500 Subject: [PATCH 0119/1761] Restore better config examples --- docs/userguide/declarative_config.rst | 5 +++-- docs/userguide/pyproject_config.rst | 1 + docs/userguide/quickstart.rst | 4 ++++ 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/docs/userguide/declarative_config.rst b/docs/userguide/declarative_config.rst index a5896849ac..d8a1b095b8 100644 --- a/docs/userguide/declarative_config.rst +++ b/docs/userguide/declarative_config.rst @@ -42,6 +42,7 @@ boilerplate code in some cases. python_requires = >=3.8 install_requires = requests + importlib-metadata; python_version<"3.10" [options.package_data] * = *.txt, *.rst @@ -258,11 +259,11 @@ data_files section 40.6.0 [# [options] install_requires = - requests + importlib-metadata; python_version<"3.10" [options.extras_require] all = - requests + importlib-metadata; python_version<"3.10" .. [#opt-3] The ``find:`` and ``find_namespace:`` directive can be further configured in a dedicated subsection ``options.packages.find``. This subsection accepts the diff --git a/docs/userguide/pyproject_config.rst b/docs/userguide/pyproject_config.rst index 9335105917..5d22979605 100644 --- a/docs/userguide/pyproject_config.rst +++ b/docs/userguide/pyproject_config.rst @@ -56,6 +56,7 @@ The ``project`` table contains metadata fields as described by ] dependencies = [ "requests", + 'importlib-metadata; python_version<"3.10"', ] dynamic = ["version"] diff --git a/docs/userguide/quickstart.rst b/docs/userguide/quickstart.rst index dde4620819..1ecc606926 100644 --- a/docs/userguide/quickstart.rst +++ b/docs/userguide/quickstart.rst @@ -85,6 +85,7 @@ The following example demonstrates a minimum configuration version = "0.0.1" dependencies = [ "requests", + 'importlib-metadata; python_version<"3.10"', ] See :doc:`/userguide/pyproject_config` for more information. @@ -100,6 +101,8 @@ The following example demonstrates a minimum configuration [options] install_requires = requests + importlib-metadata; python_version<"3.10" + See :doc:`/userguide/declarative_config` for more information. @@ -114,6 +117,7 @@ The following example demonstrates a minimum configuration version='0.0.1', install_requires=[ 'requests', + 'importlib-metadata; python_version<"3.10"', ], ) From ca12a0bf2ececdc74738ce713a3edaa1b906d503 Mon Sep 17 00:00:00 2001 From: Avasam Date: Tue, 7 Nov 2023 19:15:07 -0500 Subject: [PATCH 0120/1761] Add newsfragment --- newsfragments/4097.feature.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 newsfragments/4097.feature.rst diff --git a/newsfragments/4097.feature.rst b/newsfragments/4097.feature.rst new file mode 100644 index 0000000000..129e128f21 --- /dev/null +++ b/newsfragments/4097.feature.rst @@ -0,0 +1 @@ +Updated `pkg_resources` to use stdlib `importlib.machinery` instead of `importlib_machinery` -- by :user:`Avasam` From 987bc92b333f6aa67e307107f297e7ad464bab7a Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Wed, 8 Nov 2023 09:15:50 -0500 Subject: [PATCH 0121/1761] Add news fragment. --- newsfragments/+f8383dcd.feature.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 newsfragments/+f8383dcd.feature.rst diff --git a/newsfragments/+f8383dcd.feature.rst b/newsfragments/+f8383dcd.feature.rst new file mode 100644 index 0000000000..c8f0e82e55 --- /dev/null +++ b/newsfragments/+f8383dcd.feature.rst @@ -0,0 +1 @@ +Merged with pypa/distutils@7a04cbda0fc714. \ No newline at end of file From 8d47bb44c6a217507c0ae037713e1536efbbc4d0 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Wed, 8 Nov 2023 09:21:35 -0500 Subject: [PATCH 0122/1761] Use 'setuptools.modified' as the consolidated module for modified file functions. --- docs/deprecated/distutils-legacy.rst | 2 +- newsfragments/4069.feature.rst | 2 +- setuptools/modified.py | 8 ++++++++ 3 files changed, 10 insertions(+), 2 deletions(-) create mode 100644 setuptools/modified.py diff --git a/docs/deprecated/distutils-legacy.rst b/docs/deprecated/distutils-legacy.rst index 85bda3d88a..166fcd584a 100644 --- a/docs/deprecated/distutils-legacy.rst +++ b/docs/deprecated/distutils-legacy.rst @@ -28,7 +28,7 @@ As Distutils is deprecated, any usage of functions or objects from distutils is ``distutils.command.{build_clib,build_ext,build_py,sdist}`` → ``setuptools.command.*`` -``distutils.dep_util`` → ``setuptools.dep_util`` +``distutils.dep_util`` → ``setuptools.modified`` ``distutils.log`` → :mod:`logging` (standard library) diff --git a/newsfragments/4069.feature.rst b/newsfragments/4069.feature.rst index d2b79a0c85..64a0af5d41 100644 --- a/newsfragments/4069.feature.rst +++ b/newsfragments/4069.feature.rst @@ -1 +1 @@ -Exported `distutils.dep_util` through ``setuptools.dep_util`` -- by :user:`Avasam` +Exported ``distutils.dep_util`` and ``setuptools.dep_util`` through ``setuptools.modified`` -- by :user:`Avasam` diff --git a/setuptools/modified.py b/setuptools/modified.py new file mode 100644 index 0000000000..af6ceeac89 --- /dev/null +++ b/setuptools/modified.py @@ -0,0 +1,8 @@ +from ._distutils._modified import ( + newer, + newer_pairwise, + newer_group, + newer_pairwise_group, +) + +__all__ = ['newer', 'newer_pairwise', 'newer_group', 'newer_pairwise_group'] From c614ef584bb2abe660875c91cad50ca23a06ab34 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Wed, 8 Nov 2023 09:58:57 -0500 Subject: [PATCH 0123/1761] Fallback when SETUPTOOLS_USE_DISTUTILS=stdlib --- setuptools/command/build_clib.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/setuptools/command/build_clib.py b/setuptools/command/build_clib.py index 4679cd9432..acd4d1d3ba 100644 --- a/setuptools/command/build_clib.py +++ b/setuptools/command/build_clib.py @@ -1,7 +1,12 @@ import distutils.command.build_clib as orig from distutils.errors import DistutilsSetupError from distutils import log -from distutils._modified import newer_pairwise_group + +try: + from distutils._modified import newer_pairwise_group +except ImportError: + # fallback for SETUPTOOLS_USE_DISTUTILS=stdlib + from .._distutils._modified import newer_pairwise_group class build_clib(orig.build_clib): From f5930953a70b62720cda4c80dde63fcb2ce8231f Mon Sep 17 00:00:00 2001 From: Avasam Date: Wed, 8 Nov 2023 12:48:47 -0500 Subject: [PATCH 0124/1761] Fix accidental indentation change --- docs/userguide/quickstart.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/userguide/quickstart.rst b/docs/userguide/quickstart.rst index 1ecc606926..e6a09b6191 100644 --- a/docs/userguide/quickstart.rst +++ b/docs/userguide/quickstart.rst @@ -101,7 +101,7 @@ The following example demonstrates a minimum configuration [options] install_requires = requests - importlib-metadata; python_version<"3.10" + importlib-metadata; python_version<"3.10" See :doc:`/userguide/declarative_config` for more information. From 4cbdbd2b85b0cbd8d271d95fcc40c22e1fb02313 Mon Sep 17 00:00:00 2001 From: Jean Abou Samra Date: Sat, 11 Nov 2023 20:23:20 +0100 Subject: [PATCH 0125/1761] Import PUG guide to MANIFEST.in --- docs/setuptools.rst | 10 ++---- docs/userguide/datafiles.rst | 18 +++++------ docs/userguide/miscellaneous.rst | 54 ++++++++++++++++++++++++++++++-- docs/userguide/quickstart.rst | 7 ++--- 4 files changed, 65 insertions(+), 24 deletions(-) diff --git a/docs/setuptools.rst b/docs/setuptools.rst index 5317058945..ef70fe61cc 100644 --- a/docs/setuptools.rst +++ b/docs/setuptools.rst @@ -21,9 +21,9 @@ Feature Highlights: individually in setup.py * Automatically include all relevant files in your source distributions, - without needing to create a |MANIFEST.in|_ file, and without having to force - regeneration of the ``MANIFEST`` file when your source tree changes - [#manifest]_. + without needing to create a :ref:`MANIFEST.in ` file, + and without having to force regeneration of the ``MANIFEST`` file when your + source tree changes [#manifest]_. * Automatically generate wrapper scripts or Windows (console and GUI) .exe files for any number of "main" functions in your project. (Note: this is not @@ -221,7 +221,3 @@ set of steps to reproduce. any special C header). See :ref:`Controlling files in the distribution` and :doc:`userguide/datafiles` for more information about complex scenarios, if you want to include other types of files. - - -.. |MANIFEST.in| replace:: ``MANIFEST.in`` -.. _MANIFEST.in: https://packaging.python.org/en/latest/guides/using-manifest-in/ diff --git a/docs/userguide/datafiles.rst b/docs/userguide/datafiles.rst index 5cd576e0b7..9bd2efd863 100644 --- a/docs/userguide/datafiles.rst +++ b/docs/userguide/datafiles.rst @@ -72,7 +72,8 @@ and you supply this configuration: then all the ``.txt`` and ``.rst`` files will be automatically installed with your package, provided: -1. These files are included via the |MANIFEST.in|_ file, like so:: +1. These files are included via the :ref:`MANIFEST.in ` file, + like so:: include src/mypkg/*.txt include src/mypkg/*.rst @@ -149,8 +150,8 @@ data files: The ``package_data`` argument is a dictionary that maps from package names to lists of glob patterns. Note that the data files specified using the ``package_data`` -option neither require to be included within a |MANIFEST.in|_ file, nor -require to be added by a revision control system plugin. +option neither require to be included within a :ref:`MANIFEST.in ` +file, nor require to be added by a revision control system plugin. .. note:: If your glob patterns use paths, you *must* use a forward slash (``/``) as @@ -426,13 +427,14 @@ Summary In summary, the three options allow you to: ``include_package_data`` - Accept all data files and directories matched by |MANIFEST.in|_ or added by + Accept all data files and directories matched by + :ref:`MANIFEST.in ` or added by a :ref:`plugin `. ``package_data`` Specify additional patterns to match files that may or may - not be matched by |MANIFEST.in|_ or added by - a :ref:`plugin `. + not be matched by :ref:`MANIFEST.in ` + or added by a :ref:`plugin `. ``exclude_package_data`` Specify patterns for data files and directories that should *not* be @@ -537,7 +539,3 @@ run time be included **inside the package**. .. [#files_api] Reference: https://importlib-resources.readthedocs.io/en/latest/using.html#migrating-from-legacy .. [#namespace_support] Reference: https://github.com/python/importlib_resources/pull/196#issuecomment-734520374 - - -.. |MANIFEST.in| replace:: ``MANIFEST.in`` -.. _MANIFEST.in: https://packaging.python.org/en/latest/guides/using-manifest-in/ diff --git a/docs/userguide/miscellaneous.rst b/docs/userguide/miscellaneous.rst index ea0a58845e..42f43e19b0 100644 --- a/docs/userguide/miscellaneous.rst +++ b/docs/userguide/miscellaneous.rst @@ -43,8 +43,6 @@ to specify any files that the default file location algorithm doesn't catch. This file contains instructions that tell ``setuptools`` which files exactly should be part of the ``sdist`` (or not). -A comprehensive guide to ``MANIFEST.in`` syntax is available at the -:doc:`PyPA's Packaging User Guide `. .. attention:: Please note that ``setuptools`` supports the ``MANIFEST.in``, @@ -61,6 +59,58 @@ A comprehensive guide to ``MANIFEST.in`` syntax is available at the files inside a set of directories) and then fine tune the file selection by removing the excess or adding isolated files. + +A :file:`MANIFEST.in` file consists of commands, one per line, instructing +setuptools to add or remove some set of files from the sdist. The commands +are: + +========================================================= ================================================================================================== +Command Description +========================================================= ================================================================================================== +:samp:`include {pat1} {pat2} ...` Add all files matching any of the listed patterns + (Files must be given as paths relative to the root of the project) +:samp:`exclude {pat1} {pat2} ...` Remove all files matching any of the listed patterns + (Files must be given as paths relative to the root of the project) +:samp:`recursive-include {dir-pattern} {pat1} {pat2} ...` Add all files under directories matching ``dir-pattern`` that match any of the listed patterns +:samp:`recursive-exclude {dir-pattern} {pat1} {pat2} ...` Remove all files under directories matching ``dir-pattern`` that match any of the listed patterns +:samp:`global-include {pat1} {pat2} ...` Add all files anywhere in the source tree matching any of the listed patterns +:samp:`global-exclude {pat1} {pat2} ...` Remove all files anywhere in the source tree matching any of the listed patterns +:samp:`graft {dir-pattern}` Add all files under directories matching ``dir-pattern`` +:samp:`prune {dir-pattern}` Remove all files under directories matching ``dir-pattern`` +========================================================= ================================================================================================== + +The patterns here are glob-style patterns: ``*`` matches zero or more regular +filename characters (on Unix, everything except forward slash; on Windows, +everything except backslash and colon); ``?`` matches a single regular filename +character, and ``[chars]`` matches any one of the characters between the square +brackets (which may contain character ranges, e.g., ``[a-z]`` or +``[a-fA-F0-9]``). Setuptools also has support for ``**`` matching +zero or more characters including forward slash, backslash, and colon. + +Directory patterns are relative to the root of the project directory; e.g., +``graft example*`` will include a directory named :file:`examples` in the +project root but will not include :file:`docs/examples/`. + +File & directory names in :file:`MANIFEST.in` should be ``/``-separated; +setuptools will automatically convert the slashes to the local platform's +appropriate directory separator. + +Commands are processed in the order they appear in the :file:`MANIFEST.in` +file. For example, given the commands: + +.. code-block:: bash + + graft tests + global-exclude *.py[cod] + +the contents of the directory tree :file:`tests` will first be added to the +sdist, and then after that all files in the sdist with a ``.pyc``, ``.pyo``, or +``.pyd`` extension will be removed from the sdist. If the commands were in the +opposite order, then ``*.pyc`` files etc. would be only be removed from what +was already in the sdist before adding :file:`tests`, and if :file:`tests` +happened to contain any ``*.pyc`` files, they would end up included in the +sdist because the exclusion happened before they were included. + An example of ``MANIFEST.in`` for a simple project that organized according to a :ref:`src-layout` is: diff --git a/docs/userguide/quickstart.rst b/docs/userguide/quickstart.rst index f0426293a4..b4e8bf00e1 100644 --- a/docs/userguide/quickstart.rst +++ b/docs/userguide/quickstart.rst @@ -390,8 +390,8 @@ For the simplest use, you can simply use the ``include_package_data`` keyword: ) This tells setuptools to install any data files it finds in your packages. -The data files must be specified via the |MANIFEST.in|_ file -or automatically added by a :ref:`Revision Control System plugin +The data files must be specified via the :ref:`MANIFEST.in ` +file or automatically added by a :ref:`Revision Control System plugin `. For more details, see :doc:`datafiles`. @@ -459,9 +459,6 @@ Packaging in Python can be hard and is constantly evolving. up-to-date references that can help you when it is time to distribute your work. -.. |MANIFEST.in| replace:: ``MANIFEST.in`` -.. _MANIFEST.in: https://packaging.python.org/en/latest/guides/using-manifest-in/ - ---- From 8256cd5410c3b03545bc1c365f4c544e18d7d07f Mon Sep 17 00:00:00 2001 From: Jean Abou Samra Date: Tue, 14 Nov 2023 12:21:10 +0100 Subject: [PATCH 0126/1761] Add the more complete list --- docs/userguide/miscellaneous.rst | 29 ++++++++++++++++++++++++----- 1 file changed, 24 insertions(+), 5 deletions(-) diff --git a/docs/userguide/miscellaneous.rst b/docs/userguide/miscellaneous.rst index 42f43e19b0..805f4dbb31 100644 --- a/docs/userguide/miscellaneous.rst +++ b/docs/userguide/miscellaneous.rst @@ -4,11 +4,30 @@ Controlling files in the distribution ===================================== For the most common use cases, ``setuptools`` will automatically find out which -files are necessary for distributing the package. -These include all :term:`pure Python modules ` in the -``py_modules`` or ``packages`` configuration, and the C sources (but not C -headers) listed as part of extensions when creating a :term:`source -distribution (or "sdist")`. +files are necessary for distributing the package. More precisely, the following +files are included in a source distribution by default: + +- All Python source files implied by the ``py_modules`` and ``packages`` + ``setup()`` arguments +- All C source files mentioned in the ``ext_modules`` or ``libraries`` + ``setup()`` arguments +- Scripts specified by the ``scripts`` ``setup()`` argument +- All files specified by the ``package_data`` and ``data_files`` + ``setup()`` arguments +- The file specified by the ``license_file`` option in ``setup.cfg`` + (setuptools 40.8.0+) +- All files specified by the ``license_files`` option in ``setup.cfg`` + (setuptools 42.0.0+) +- All files matching the pattern ``test/test*.py`` +- ``setup.py`` (or whatever you called your setup script) +- ``setup.cfg`` +- ``README`` +- ``README.txt`` +- ``README.rst`` (Python 3.7+ or setuptools 0.6.27+) +- ``README.md`` (setuptools 36.4.0+) +- ``pyproject.toml`` (setuptools 43.0.0+) +- ``MANIFEST.in`` + .. note:: .. versionadded:: v68.3.0 From fc8c5d4f87fdb9199e3b95908ec03c38d53cc240 Mon Sep 17 00:00:00 2001 From: Jean Abou Samra Date: Tue, 14 Nov 2023 14:52:08 +0100 Subject: [PATCH 0127/1761] Revise list of default included files --- docs/userguide/miscellaneous.rst | 39 +++++++++++++++++--------------- 1 file changed, 21 insertions(+), 18 deletions(-) diff --git a/docs/userguide/miscellaneous.rst b/docs/userguide/miscellaneous.rst index 805f4dbb31..357e2a6c1c 100644 --- a/docs/userguide/miscellaneous.rst +++ b/docs/userguide/miscellaneous.rst @@ -7,25 +7,28 @@ For the most common use cases, ``setuptools`` will automatically find out which files are necessary for distributing the package. More precisely, the following files are included in a source distribution by default: -- All Python source files implied by the ``py_modules`` and ``packages`` - ``setup()`` arguments +- All Python source files implied by the ``py-modules`` and ``packages`` + configuration parameters in ``pyproject.toml`` and/or equivalent + in ``setup.cfg``/``setup.py``; - All C source files mentioned in the ``ext_modules`` or ``libraries`` - ``setup()`` arguments -- Scripts specified by the ``scripts`` ``setup()`` argument -- All files specified by the ``package_data`` and ``data_files`` - ``setup()`` arguments -- The file specified by the ``license_file`` option in ``setup.cfg`` - (setuptools 40.8.0+) -- All files specified by the ``license_files`` option in ``setup.cfg`` - (setuptools 42.0.0+) -- All files matching the pattern ``test/test*.py`` -- ``setup.py`` (or whatever you called your setup script) -- ``setup.cfg`` -- ``README`` -- ``README.txt`` -- ``README.rst`` (Python 3.7+ or setuptools 0.6.27+) -- ``README.md`` (setuptools 36.4.0+) -- ``pyproject.toml`` (setuptools 43.0.0+) + ``setup()`` arguments; +- Files that match the following glob patterns: ``tests/test*.py``, + ``test/test*.py``; +- Scripts specified by the ``scripts-files`` configuration parameter + in ``pyproject.toml`` or ``scripts`` in ``setup.py``/``setup.cfg``; +- All files specified by the ``package-data`` and ``data-files`` + configuration parameters in ``pyproject.toml`` and/or equivalent + in ``setup.cfg``/``setup.py``; +- The file specified by the ``license_file`` option in ``setup.cfg``; +- All files specified by the ``license-files`` configuration parameter + in ``pyproject.toml`` and/or equivalent in ``setup.cfg``/``setup.py``; + note that if you don't explicitly set this parameter, ``setuptools`` + will include any files that match the following glob patterns: + ``LICENSE*``, ``LICENCE*``, ``COPYING*``, ``NOTICE*``, ``AUTHORS**``; +- ``pyproject.toml``; +- ``setup.cfg``; +- ``setup.py``; +- ``README``, ``README.txt``, ``README.rst`` or ``README.md``; - ``MANIFEST.in`` From 0102dc1ce256e7ac60d0b10a11af9c20691f5f75 Mon Sep 17 00:00:00 2001 From: Jean Abou Samra Date: Tue, 14 Nov 2023 14:54:25 +0100 Subject: [PATCH 0128/1761] Add warning about other setuptools versions --- docs/userguide/miscellaneous.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/userguide/miscellaneous.rst b/docs/userguide/miscellaneous.rst index 357e2a6c1c..3678374e3f 100644 --- a/docs/userguide/miscellaneous.rst +++ b/docs/userguide/miscellaneous.rst @@ -31,6 +31,8 @@ files are included in a source distribution by default: - ``README``, ``README.txt``, ``README.rst`` or ``README.md``; - ``MANIFEST.in`` +Please note that the list above is guaranteed to work with the last stable version +of ``setuptools``. The behavior of older versions might differ. .. note:: .. versionadded:: v68.3.0 From a29645e447aaf8b56decd2086195d0fab8ad8bcf Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Tue, 14 Nov 2023 16:34:23 +0000 Subject: [PATCH 0129/1761] Apply suggestions from code review --- docs/userguide/miscellaneous.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/userguide/miscellaneous.rst b/docs/userguide/miscellaneous.rst index 3678374e3f..9d0b040a18 100644 --- a/docs/userguide/miscellaneous.rst +++ b/docs/userguide/miscellaneous.rst @@ -7,10 +7,10 @@ For the most common use cases, ``setuptools`` will automatically find out which files are necessary for distributing the package. More precisely, the following files are included in a source distribution by default: -- All Python source files implied by the ``py-modules`` and ``packages`` +- :term:`pure Python module ` files implied by the ``py-modules`` and ``packages`` configuration parameters in ``pyproject.toml`` and/or equivalent in ``setup.cfg``/``setup.py``; -- All C source files mentioned in the ``ext_modules`` or ``libraries`` +- C source files mentioned in the ``ext_modules`` or ``libraries`` ``setup()`` arguments; - Files that match the following glob patterns: ``tests/test*.py``, ``test/test*.py``; From 75d9cc1b7cb6f84e7a16a83ec3abb9a478fdb130 Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Wed, 15 Nov 2023 19:57:45 +0600 Subject: [PATCH 0130/1761] Upgrade GitHub Actions checkout (jaraco/skeleton#94) Also, upgrade from `pypy3.9` to `pypy3.10` and remove the `continue-on-error` for Python 3.12. As recommended at jaraco/cssutils#41 --- .github/workflows/main.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 28e3678679..108286674e 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -36,12 +36,12 @@ jobs: platform: ubuntu-latest - python: "3.10" platform: ubuntu-latest - - python: pypy3.9 + - python: pypy3.10 platform: ubuntu-latest runs-on: ${{ matrix.platform }} - continue-on-error: ${{ matrix.python == '3.12' }} + continue-on-error: ${{ matrix.python == '3.13' }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Setup Python uses: actions/setup-python@v4 with: @@ -56,7 +56,7 @@ jobs: diffcov: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 - name: Setup Python @@ -76,7 +76,7 @@ jobs: env: TOXENV: docs steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Setup Python uses: actions/setup-python@v4 - name: Install tox @@ -109,7 +109,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Setup Python uses: actions/setup-python@v4 with: From 5732ebeeaa9480f8cd80c96a3183d7b247f27214 Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Wed, 15 Nov 2023 20:08:10 +0600 Subject: [PATCH 0131/1761] GitHub Actions: Combine tox jobs diffcov and docs (jaraco/skeleton#95) Code reuse Co-authored-by: Jason R. Coombs --- .github/workflows/main.yml | 37 +++++++++++-------------------------- 1 file changed, 11 insertions(+), 26 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 108286674e..9682985c82 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -48,12 +48,15 @@ jobs: python-version: ${{ matrix.python }} allow-prereleases: true - name: Install tox - run: | - python -m pip install tox + run: python -m pip install tox - name: Run run: tox - diffcov: + collateral: + strategy: + fail-fast: false + matrix: + job: [diffcov, docs] runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 @@ -64,33 +67,16 @@ jobs: with: python-version: 3.x - name: Install tox - run: | - python -m pip install tox - - name: Evaluate coverage - run: tox - env: - TOXENV: diffcov - - docs: - runs-on: ubuntu-latest - env: - TOXENV: docs - steps: - - uses: actions/checkout@v4 - - name: Setup Python - uses: actions/setup-python@v4 - - name: Install tox - run: | - python -m pip install tox - - name: Run - run: tox + run: python -m pip install tox + - name: Eval ${{ matrix.job }} + run: tox -e ${{ matrix.job }} check: # This job does nothing and is only used for the branch protection if: always() needs: - test - - docs + - collateral runs-on: ubuntu-latest @@ -115,8 +101,7 @@ jobs: with: python-version: 3.x - name: Install tox - run: | - python -m pip install tox + run: python -m pip install tox - name: Run run: tox -e release env: From 3f437ab2420c3f47bbdb55deec2d00df0a574a2b Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Thu, 16 Nov 2023 15:22:26 +0000 Subject: [PATCH 0132/1761] Fix failing CI (#4110) Use pre-build distributions for tests in GitHub actions --- .github/workflows/main.yml | 8 ++++++++ setuptools/tests/fixtures.py | 18 ++++++++++-------- 2 files changed, 18 insertions(+), 8 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 3f1fa622e8..2e0e55cf43 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -73,6 +73,14 @@ jobs: if: steps.cache.outputs.cache-hit != 'true' working-directory: setuptools/tests/config run: python -m downloads.preload setupcfg_examples.txt + - name: Pre-build distributions for test + shell: bash + run: | + rm -rf dist + pipx run build + echo "PRE_BUILT_SETUPTOOLS_SDIST=$(ls dist/*.tar.gz)" >> $GITHUB_ENV + echo "PRE_BUILT_SETUPTOOLS_WHEEL=$(ls dist/*.whl)" >> $GITHUB_ENV + rm -rf setuptools.egg-info # Avoid interfering with the other tests - name: Install tox run: | python -m pip install tox diff --git a/setuptools/tests/fixtures.py b/setuptools/tests/fixtures.py index 524c6cbd24..c2dd9df2fb 100644 --- a/setuptools/tests/fixtures.py +++ b/setuptools/tests/fixtures.py @@ -66,17 +66,18 @@ def sample_project(tmp_path): @pytest.fixture(scope="session") def setuptools_sdist(tmp_path_factory, request): - if os.getenv("PRE_BUILT_SETUPTOOLS_SDIST"): - return Path(os.getenv("PRE_BUILT_SETUPTOOLS_SDIST")).resolve() + prebuilt = os.getenv("PRE_BUILT_SETUPTOOLS_SDIST") + if prebuilt and os.path.exists(prebuilt): # pragma: no cover + return Path(prebuilt).resolve() with contexts.session_locked_tmp_dir( request, tmp_path_factory, "sdist_build" - ) as tmp: + ) as tmp: # pragma: no cover dist = next(tmp.glob("*.tar.gz"), None) if dist: return dist - subprocess.check_call( + subprocess.check_output( [ sys.executable, "-m", @@ -92,17 +93,18 @@ def setuptools_sdist(tmp_path_factory, request): @pytest.fixture(scope="session") def setuptools_wheel(tmp_path_factory, request): - if os.getenv("PRE_BUILT_SETUPTOOLS_WHEEL"): - return Path(os.getenv("PRE_BUILT_SETUPTOOLS_WHEEL")).resolve() + prebuilt = os.getenv("PRE_BUILT_SETUPTOOLS_WHEEL") + if prebuilt and os.path.exists(prebuilt): # pragma: no cover + return Path(prebuilt).resolve() with contexts.session_locked_tmp_dir( request, tmp_path_factory, "wheel_build" - ) as tmp: + ) as tmp: # pragma: no cover dist = next(tmp.glob("*.whl"), None) if dist: return dist - subprocess.check_call( + subprocess.check_output( [ sys.executable, "-m", From c5eaec49f6dea61525738e5a4a59b40ee0714bc1 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Thu, 16 Nov 2023 14:40:47 +0100 Subject: [PATCH 0133/1761] Fix a couple typos found by codespell --- docs/conf.py | 2 +- setuptools/tests/test_build_meta.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index f305029833..664ad0883d 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -204,7 +204,7 @@ python=('https://docs.python.org/3.11/', None), # ^-- Python 3.11 is required because it still contains `distutils`. # Just leaving it as `3` would imply 3.12+, but that causes an - # error with the cross references to disutils functions. + # error with the cross references to distutils functions. # Inventory cache may cause errors, deleting it solves the problem. ) diff --git a/setuptools/tests/test_build_meta.py b/setuptools/tests/test_build_meta.py index 5ce4714393..f36119eb9c 100644 --- a/setuptools/tests/test_build_meta.py +++ b/setuptools/tests/test_build_meta.py @@ -968,7 +968,7 @@ def test_legacy_editable_install(venv, tmpdir, tmpdir_cwd): @pytest.mark.filterwarnings("ignore::setuptools.SetuptoolsDeprecationWarning") def test_sys_exit_0_in_setuppy(monkeypatch, tmp_path): - """Setuptools should be resilent to setup.py with ``sys.exit(0)`` (#3973).""" + """Setuptools should be resilient to setup.py with ``sys.exit(0)`` (#3973).""" monkeypatch.chdir(tmp_path) setuppy = """ import sys, setuptools From ee263dc58a6a65f60220b9ba222adc2bbe55f198 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Thu, 16 Nov 2023 20:01:37 +0100 Subject: [PATCH 0134/1761] =?UTF-8?q?Update=20URLs=20in=20documentation:?= =?UTF-8?q?=20http://=20=E2=86=92=20https://?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Update link to an old MSDN article and point to a newer article. --- distutils/command/bdist_rpm.py | 2 +- distutils/msvc9compiler.py | 6 +++--- distutils/tests/test_bdist_rpm.py | 2 +- distutils/tests/test_build_scripts.py | 2 +- distutils/tests/test_sdist.py | 2 +- distutils/unixccompiler.py | 3 +-- docs/distutils/apiref.rst | 2 +- docs/distutils/examples.rst | 2 +- docs/distutils/setupscript.rst | 2 +- 9 files changed, 11 insertions(+), 12 deletions(-) diff --git a/distutils/command/bdist_rpm.py b/distutils/command/bdist_rpm.py index 3ed608b479..696f26751f 100644 --- a/distutils/command/bdist_rpm.py +++ b/distutils/command/bdist_rpm.py @@ -435,7 +435,7 @@ def _make_spec_file(self): # noqa: C901 fixed = "brp-python-bytecompile %{__python} \\\n" fixed_hook = vendor_hook.replace(problem, fixed) if fixed_hook != vendor_hook: - spec_file.append('# Workaround for http://bugs.python.org/issue14443') + spec_file.append('# Workaround for https://bugs.python.org/issue14443') spec_file.append('%define __os_install_post ' + fixed_hook + '\n') # put locale summaries into spec file diff --git a/distutils/msvc9compiler.py b/distutils/msvc9compiler.py index f9f9f2d844..724986d89d 100644 --- a/distutils/msvc9compiler.py +++ b/distutils/msvc9compiler.py @@ -698,8 +698,8 @@ def link( # noqa: C901 def manifest_setup_ldargs(self, output_filename, build_temp, ld_args): # If we need a manifest at all, an embedded manifest is recommended. # See MSDN article titled - # "How to: Embed a Manifest Inside a C/C++ Application" - # (currently at http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx) + # "Understanding manifest generation for C/C++ programs" + # (currently at https://learn.microsoft.com/en-us/cpp/build/understanding-manifest-generation-for-c-cpp-programs) # Ask the linker to generate the manifest in the temp dir, so # we can check it, and possibly embed it, later. temp_manifest = os.path.join( @@ -710,7 +710,7 @@ def manifest_setup_ldargs(self, output_filename, build_temp, ld_args): def manifest_get_embed_info(self, target_desc, ld_args): # If a manifest should be embedded, return a tuple of # (manifest_filename, resource_id). Returns None if no manifest - # should be embedded. See http://bugs.python.org/issue7833 for why + # should be embedded. See https://bugs.python.org/issue7833 for why # we want to avoid any manifest for extension modules if we can) for arg in ld_args: if arg.startswith("/MANIFESTFILE:"): diff --git a/distutils/tests/test_bdist_rpm.py b/distutils/tests/test_bdist_rpm.py index 4a702fb913..3fd2c7e2ac 100644 --- a/distutils/tests/test_bdist_rpm.py +++ b/distutils/tests/test_bdist_rpm.py @@ -89,7 +89,7 @@ def test_quiet(self): @mac_woes @requires_zlib() - # http://bugs.python.org/issue1533164 + # https://bugs.python.org/issue1533164 @pytest.mark.skipif("not find_executable('rpm')") @pytest.mark.skipif("not find_executable('rpmbuild')") def test_no_optimize_flag(self): diff --git a/distutils/tests/test_build_scripts.py b/distutils/tests/test_build_scripts.py index 1a5753c772..28cc5632a3 100644 --- a/distutils/tests/test_build_scripts.py +++ b/distutils/tests/test_build_scripts.py @@ -88,7 +88,7 @@ def test_version_int(self): ) cmd.finalize_options() - # http://bugs.python.org/issue4524 + # https://bugs.python.org/issue4524 # # On linux-g++-32 with command line `./configure --enable-ipv6 # --with-suffix=3`, python is compiled okay but the build scripts diff --git a/distutils/tests/test_sdist.py b/distutils/tests/test_sdist.py index fdb768e73f..a3fa290275 100644 --- a/distutils/tests/test_sdist.py +++ b/distutils/tests/test_sdist.py @@ -162,7 +162,7 @@ def test_make_distribution(self): @pytest.mark.usefixtures('needs_zlib') def test_add_defaults(self): - # http://bugs.python.org/issue2279 + # https://bugs.python.org/issue2279 # add_default should also include # data_files and package_data diff --git a/distutils/unixccompiler.py b/distutils/unixccompiler.py index bd8db9ac3f..294a16b7f4 100644 --- a/distutils/unixccompiler.py +++ b/distutils/unixccompiler.py @@ -283,8 +283,7 @@ def _is_gcc(self): def runtime_library_dir_option(self, dir): # XXX Hackish, at the very least. See Python bug #445902: - # http://sourceforge.net/tracker/index.php - # ?func=detail&aid=445902&group_id=5470&atid=105470 + # https://bugs.python.org/issue445902 # Linkers on different platforms need different options to # specify that directories need to be added to the list of # directories searched for dependencies when a dynamic library diff --git a/docs/distutils/apiref.rst b/docs/distutils/apiref.rst index 83b8ef5d52..beb17bc3fc 100644 --- a/docs/distutils/apiref.rst +++ b/docs/distutils/apiref.rst @@ -1021,7 +1021,7 @@ directories. Files in *src* that begin with :file:`.nfs` are skipped (more information on these files is available in answer D2 of the `NFS FAQ page - `_). + `_). .. versionchanged:: 3.3.1 NFS files are ignored. diff --git a/docs/distutils/examples.rst b/docs/distutils/examples.rst index 28582bab36..d758a8105e 100644 --- a/docs/distutils/examples.rst +++ b/docs/distutils/examples.rst @@ -335,4 +335,4 @@ loads its values:: .. % \section{Putting it all together} -.. _docutils: http://docutils.sourceforge.net +.. _docutils: https://docutils.sourceforge.io diff --git a/docs/distutils/setupscript.rst b/docs/distutils/setupscript.rst index 3c8e1ab1b3..71d2439f7e 100644 --- a/docs/distutils/setupscript.rst +++ b/docs/distutils/setupscript.rst @@ -642,7 +642,7 @@ Notes: 'long string' Multiple lines of plain text in reStructuredText format (see - http://docutils.sourceforge.net/). + https://docutils.sourceforge.io/). 'list of strings' See below. From b9d6cb4d46d35065169a46425bb24166a8be0827 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Thu, 16 Nov 2023 20:20:44 +0100 Subject: [PATCH 0135/1761] =?UTF-8?q?Update=20URLs=20in=20documentation:?= =?UTF-8?q?=20http://=20=E2=86=92=20https://?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docs/conf.py | 6 +++--- docs/development/developer-guide.rst | 2 +- docs/index.rst | 2 +- docs/pkg_resources.rst | 2 +- setuptools/monkey.py | 2 +- setuptools/tests/environment.py | 2 +- setuptools/tests/test_editable_install.py | 2 +- setuptools/windows_support.py | 2 +- 8 files changed, 10 insertions(+), 10 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index f305029833..ba183e593b 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -45,15 +45,15 @@ ), dict( pattern=r'Old Setuptools #(?P\d+)', - url='http://bugs.python.org/setuptools/issue{old_setuptools}', + url='https://bugs.python.org/setuptools/issue{old_setuptools}', ), dict( pattern=r'Jython #(?P\d+)', - url='http://bugs.jython.org/issue{jython}', + url='https://bugs.jython.org/issue{jython}', ), dict( pattern=r'(Python #|bpo-)(?P\d+)', - url='http://bugs.python.org/issue{python}', + url='https://bugs.python.org/issue{python}', ), dict( pattern=r'Interop #(?P\d+)', diff --git a/docs/development/developer-guide.rst b/docs/development/developer-guide.rst index 88ac282a87..8c9142fc30 100644 --- a/docs/development/developer-guide.rst +++ b/docs/development/developer-guide.rst @@ -109,7 +109,7 @@ To build the docs locally, use tox:: $ tox -e docs -.. _Sphinx: http://www.sphinx-doc.org/en/master/ +.. _Sphinx: https://www.sphinx-doc.org/en/master/ .. _published documentation: https://setuptools.pypa.io/en/latest/ --------------------- diff --git a/docs/index.rst b/docs/index.rst index 3e6b021648..2d4089d52b 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -9,7 +9,7 @@ designed to facilitate packaging Python projects. It helps developers to easily share reusable code (in the form of a library) and programs (e.g., CLI/GUI tools implemented in Python), that can be installed -with :pypi:`pip` and uploaded to `PyPI `_. +with :pypi:`pip` and uploaded to `PyPI `_. .. sidebar-links:: :home: diff --git a/docs/pkg_resources.rst b/docs/pkg_resources.rst index d5ebffab55..e999a37e2e 100644 --- a/docs/pkg_resources.rst +++ b/docs/pkg_resources.rst @@ -108,7 +108,7 @@ eggs (For more information about these terms and concepts, see also this `architectural overview`_ of ``pkg_resources`` and Python Eggs in general.) -.. _architectural overview: http://mail.python.org/pipermail/distutils-sig/2005-June/004652.html +.. _architectural overview: https://mail.python.org/pipermail/distutils-sig/2005-June/004652.html .. ----------------- diff --git a/setuptools/monkey.py b/setuptools/monkey.py index 2ab98c178a..6c8a2f12f6 100644 --- a/setuptools/monkey.py +++ b/setuptools/monkey.py @@ -69,7 +69,7 @@ def patch_all(): has_issue_12885 = sys.version_info <= (3, 5, 3) if has_issue_12885: - # fix findall bug in distutils (http://bugs.python.org/issue12885) + # fix findall bug in distutils (https://bugs.python.org/issue12885) distutils.filelist.findall = setuptools.findall needs_warehouse = (3, 4) < sys.version_info < (3, 4, 6) or ( diff --git a/setuptools/tests/environment.py b/setuptools/tests/environment.py index 78d73fb705..df2bd37ff6 100644 --- a/setuptools/tests/environment.py +++ b/setuptools/tests/environment.py @@ -66,7 +66,7 @@ def run_setup_py(cmd, pypath=None, path=None, data_stream=0, env=None): cmd = [sys.executable, "setup.py"] + list(cmd) - # http://bugs.python.org/issue8557 + # https://bugs.python.org/issue8557 shell = sys.platform == 'win32' try: diff --git a/setuptools/tests/test_editable_install.py b/setuptools/tests/test_editable_install.py index ef71147adf..eeffcf1962 100644 --- a/setuptools/tests/test_editable_install.py +++ b/setuptools/tests/test_editable_install.py @@ -60,7 +60,7 @@ def editable_opts(request): "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers" ] - urls = {Homepage = "http://github.com"} + urls = {Homepage = "https://github.com"} dependencies = ['importlib-metadata; python_version<"3.8"'] [tool.setuptools] diff --git a/setuptools/windows_support.py b/setuptools/windows_support.py index fdadeb597c..8299ac1cdf 100644 --- a/setuptools/windows_support.py +++ b/setuptools/windows_support.py @@ -12,7 +12,7 @@ def hide_file(path): """ Set the hidden attribute on a file or directory. - From http://stackoverflow.com/questions/19622133/ + From https://stackoverflow.com/questions/19622133/ `path` must be text. """ From 60e01c75692879fbf0fc2bed23316a93442b1f7e Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Thu, 28 Sep 2023 17:52:37 +0100 Subject: [PATCH 0136/1761] Re-enable deprecation warning enforcement --- tox.ini | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tox.ini b/tox.ini index b7f50ea89f..ced3e00ddf 100644 --- a/tox.ini +++ b/tox.ini @@ -6,8 +6,7 @@ deps = # ^-- use dev version while we wait for the new release setenv = PYTHONWARNDEFAULTENCODING = 1 - SETUPTOOLS_ENFORCE_DEPRECATION = 0 - # ^-- Temporarily disable, until overdue deprecations are handled + SETUPTOOLS_ENFORCE_DEPRECATION = 1 commands = pytest {posargs} usedevelop = True From 81a3547847fb8baa25c6413ab6ad596c04bc02e2 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Thu, 28 Sep 2023 17:13:24 +0100 Subject: [PATCH 0137/1761] Remove deprecation warning from _normalization.best_effort_version --- setuptools/_normalization.py | 41 +++++++++++++++++++----------------- 1 file changed, 22 insertions(+), 19 deletions(-) diff --git a/setuptools/_normalization.py b/setuptools/_normalization.py index 3e94e662ef..e1a3080617 100644 --- a/setuptools/_normalization.py +++ b/setuptools/_normalization.py @@ -7,7 +7,6 @@ from typing import Union from .extern import packaging -from .warnings import SetuptoolsDeprecationWarning _Path = Union[str, Path] @@ -15,6 +14,7 @@ _VALID_NAME = re.compile(r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.I) _UNSAFE_NAME_CHARS = re.compile(r"[^A-Z0-9.]+", re.I) _NON_ALPHANUMERIC = re.compile(r"[^A-Z0-9]+", re.I) +_PEP440_FALLBACK = re.compile(r"^v?(?P(?:[0-9]+!)?[0-9]+(?:\.[0-9]+)*)", re.I) def safe_identifier(name: str) -> str: @@ -65,32 +65,35 @@ def safe_version(version: str) -> str: def best_effort_version(version: str) -> str: """Convert an arbitrary string into a version-like string. + Fallback when ``safe_version`` is not safe enough. >>> best_effort_version("v0.2 beta") '0.2b0' - - >>> import warnings - >>> warnings.simplefilter("ignore", category=SetuptoolsDeprecationWarning) >>> best_effort_version("ubuntu lts") - 'ubuntu.lts' + '0.dev0+sanitized.ubuntu.lts' + >>> best_effort_version("0.23ubuntu1") + '0.23.dev0+sanitized.ubuntu1' + >>> best_effort_version("0.23-") + '0.23.dev0+sanitized' + >>> best_effort_version("0.-_") + '0.dev0+sanitized' + >>> best_effort_version("42.+?1") + '42.dev0+sanitized.1' """ - # See pkg_resources.safe_version + # See pkg_resources._forgiving_version try: return safe_version(version) except packaging.version.InvalidVersion: - SetuptoolsDeprecationWarning.emit( - f"Invalid version: {version!r}.", - f""" - Version {version!r} is not valid according to PEP 440. - - Please make sure to specify a valid version for your package. - Also note that future releases of setuptools may halt the build process - if an invalid version is given. - """, - see_url="https://peps.python.org/pep-0440/", - due_date=(2023, 9, 26), # See setuptools/dist _validate_version - ) v = version.replace(' ', '.') - return safe_name(v) + match = _PEP440_FALLBACK.search(v) + if match: + safe = match["safe"] + rest = v[len(safe) :] + else: + safe = "0" + rest = version + safe_rest = _NON_ALPHANUMERIC.sub(".", rest).strip(".") + local = f"sanitized.{safe_rest}".strip(".") + return safe_version(f"{safe}.dev0+{local}") def safe_extra(extra: str) -> str: From 7760a7a1d4c2be4b397ecf7716d3e4b7f4aae810 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Thu, 28 Sep 2023 17:13:56 +0100 Subject: [PATCH 0138/1761] Remove egg_base option from dist_info --- setuptools/command/dist_info.py | 16 ---------------- 1 file changed, 16 deletions(-) diff --git a/setuptools/command/dist_info.py b/setuptools/command/dist_info.py index 9df625cee7..5ef322168c 100644 --- a/setuptools/command/dist_info.py +++ b/setuptools/command/dist_info.py @@ -12,7 +12,6 @@ from pathlib import Path from .. import _normalization -from ..warnings import SetuptoolsDeprecationWarning class dist_info(Command): @@ -24,13 +23,6 @@ class dist_info(Command): description = "DO NOT CALL DIRECTLY, INTERNAL ONLY: create .dist-info directory" user_options = [ - ( - 'egg-base=', - 'e', - "directory containing .egg-info directories" - " (default: top of the source tree)" - " DEPRECATED: use --output-dir.", - ), ( 'output-dir=', 'o', @@ -47,7 +39,6 @@ class dist_info(Command): negative_opt = {'no-date': 'tag-date'} def initialize_options(self): - self.egg_base = None self.output_dir = None self.name = None self.dist_info_dir = None @@ -56,13 +47,6 @@ def initialize_options(self): self.keep_egg_info = False def finalize_options(self): - if self.egg_base: - msg = "--egg-base is deprecated for dist_info command. Use --output-dir." - SetuptoolsDeprecationWarning.emit(msg, due_date=(2023, 9, 26)) - # This command is internal to setuptools, therefore it should be safe - # to remove the deprecated support soon. - self.output_dir = self.egg_base or self.output_dir - dist = self.distribution project_dir = dist.src_root or os.curdir self.output_dir = Path(self.output_dir or project_dir) From 2c96f00660b7710542e4a6c34e15a7aaa2522c70 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Thu, 28 Sep 2023 17:14:34 +0100 Subject: [PATCH 0139/1761] Ensure tags generated by egg_info are valid --- setuptools/command/egg_info.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/setuptools/command/egg_info.py b/setuptools/command/egg_info.py index a5199deb33..7c7f57aaf8 100644 --- a/setuptools/command/egg_info.py +++ b/setuptools/command/egg_info.py @@ -127,7 +127,7 @@ def name(self): def tagged_version(self): tagged = self._maybe_tag(self.distribution.get_version()) - return _normalization.best_effort_version(tagged) + return _normalization.safe_version(tagged) def _maybe_tag(self, version): """ @@ -148,7 +148,10 @@ def _already_tagged(self, version: str) -> bool: def _safe_tags(self) -> str: # To implement this we can rely on `safe_version` pretending to be version 0 # followed by tags. Then we simply discard the starting 0 (fake version number) - return _normalization.best_effort_version(f"0{self.vtags}")[1:] + try: + return _normalization.safe_version(f"0{self.vtags}")[1:] + except packaging.version.InvalidVersion: + return _normalization.safe_name(self.vtags.replace(' ', '.')) def tags(self) -> str: version = '' From db987aa6aace6df20abb0be6699c9492d60de5e4 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Thu, 28 Sep 2023 17:18:07 +0100 Subject: [PATCH 0140/1761] Remove 'requires' and 'license_file' from setup.cfg A search in [`grep.app`](https://grep.app/search?q=%5Erequires%5Cs%2A%3D®exp=true&filter%5Bpath.pattern%5D%5B0%5D=setup.cfg) suggests that `requires` is not utilised in `setup.cfg`, so it should be safe to remove. `license_file` is not fundamental to make distributions work, so it should be safe to remove. --- setuptools/config/setupcfg.py | 14 -------------- setuptools/tests/config/test_setupcfg.py | 17 ----------------- 2 files changed, 31 deletions(-) diff --git a/setuptools/config/setupcfg.py b/setuptools/config/setupcfg.py index bb35559069..1a0e4154b9 100644 --- a/setuptools/config/setupcfg.py +++ b/setuptools/config/setupcfg.py @@ -556,23 +556,9 @@ def parsers(self): 'platforms': parse_list, 'keywords': parse_list, 'provides': parse_list, - 'requires': self._deprecated_config_handler( - parse_list, - "The requires parameter is deprecated, please use " - "install_requires for runtime dependencies.", - due_date=(2023, 10, 30), - # Warning introduced in 27 Oct 2018 - ), 'obsoletes': parse_list, 'classifiers': self._get_parser_compound(parse_file, parse_list), 'license': exclude_files_parser('license'), - 'license_file': self._deprecated_config_handler( - exclude_files_parser('license_file'), - "The license_file parameter is deprecated, " - "use license_files instead.", - due_date=(2023, 10, 30), - # Warning introduced in 23 May 2021 - ), 'license_files': parse_list, 'description': parse_file, 'long_description': parse_file, diff --git a/setuptools/tests/config/test_setupcfg.py b/setuptools/tests/config/test_setupcfg.py index fa16728ea7..23fc0d0b47 100644 --- a/setuptools/tests/config/test_setupcfg.py +++ b/setuptools/tests/config/test_setupcfg.py @@ -388,23 +388,6 @@ def test_classifiers(self, tmpdir): with get_dist(tmpdir) as dist: assert set(dist.metadata.classifiers) == expected - def test_deprecated_config_handlers(self, tmpdir): - fake_env( - tmpdir, - '[metadata]\n' - 'version = 10.1.1\n' - 'description = Some description\n' - 'requires = some, requirement\n', - ) - - with pytest.warns(SetuptoolsDeprecationWarning, match="requires"): - with get_dist(tmpdir) as dist: - metadata = dist.metadata - - assert metadata.version == '10.1.1' - assert metadata.description == 'Some description' - assert metadata.requires == ['some', 'requirement'] - def test_interpolation(self, tmpdir): fake_env(tmpdir, '[metadata]\n' 'description = %(message)s\n') with pytest.raises(configparser.InterpolationMissingOptionError): From 091008425fc5a22edf31d1eabeb06fd1bdbd4873 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Thu, 28 Sep 2023 17:21:15 +0100 Subject: [PATCH 0141/1761] Improve explanation of difference between safe_version and best_effort_version --- setuptools/_normalization.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/setuptools/_normalization.py b/setuptools/_normalization.py index e1a3080617..eee4fb7746 100644 --- a/setuptools/_normalization.py +++ b/setuptools/_normalization.py @@ -42,6 +42,8 @@ def safe_name(component: str) -> str: def safe_version(version: str) -> str: """Convert an arbitrary string into a valid version string. + Can still raise an ``InvalidVersion`` exception. + To avoid exceptions use ``best_effort_version``. >>> safe_version("1988 12 25") '1988.12.25' >>> safe_version("v0.2.1") From e247d21b271f9daa8a5103306bed2da6434ebc1a Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Thu, 28 Sep 2023 17:22:04 +0100 Subject: [PATCH 0142/1761] Remove deprecation warning for config_settings --global-option This interface is not super stable/usable yet, so it should be fine to remove. --- setuptools/build_meta.py | 14 +++----------- setuptools/tests/test_build_meta.py | 19 ------------------- 2 files changed, 3 insertions(+), 30 deletions(-) diff --git a/setuptools/build_meta.py b/setuptools/build_meta.py index 9267cf312f..3696658f4d 100644 --- a/setuptools/build_meta.py +++ b/setuptools/build_meta.py @@ -284,10 +284,9 @@ def _arbitrary_args(self, config_settings: _ConfigSettings) -> Iterator[str]: ['foo'] >>> list(fn({'--build-option': 'foo bar'})) ['foo', 'bar'] - >>> warnings.simplefilter('error', SetuptoolsDeprecationWarning) >>> list(fn({'--global-option': 'foo'})) # doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): - SetuptoolsDeprecationWarning: ...arguments given via `--global-option`... + ValueError: Incompatible .config_settings. ...'foo'... """ args = self._get_config("--global-option", config_settings) global_opts = self._valid_global_options() @@ -301,15 +300,8 @@ def _arbitrary_args(self, config_settings: _ConfigSettings) -> Iterator[str]: yield from self._get_config("--build-option", config_settings) if bad_args: - SetuptoolsDeprecationWarning.emit( - "Incompatible `config_settings` passed to build backend.", - f""" - The arguments {bad_args!r} were given via `--global-option`. - Please use `--build-option` instead, - `--global-option` is reserved for flags like `--verbose` or `--quiet`. - """, - due_date=(2023, 9, 26), # Warning introduced in v64.0.1, 11/Aug/2022. - ) + msg = f"Incompatible `config_settings`: {bad_args!r} ({config_settings!r})" + raise ValueError(msg) class _BuildMetaBackend(_ConfigSettingsTranslator): diff --git a/setuptools/tests/test_build_meta.py b/setuptools/tests/test_build_meta.py index f36119eb9c..778aedf573 100644 --- a/setuptools/tests/test_build_meta.py +++ b/setuptools/tests/test_build_meta.py @@ -706,25 +706,6 @@ def _assert_link_tree(self, parent_dir): for file in files: assert file.is_symlink() or os.stat(file).st_nlink > 0 - @pytest.mark.filterwarnings("ignore::setuptools.SetuptoolsDeprecationWarning") - # Since the backend is running via a process pool, in some operating systems - # we may have problems to make assertions based on warnings/stdout/stderr... - # So the best is to ignore them for the time being. - def test_editable_with_global_option_still_works(self, tmpdir_cwd): - """The usage of --global-option is now discouraged in favour of --build-option. - This is required to make more sense of the provided scape hatch and align with - previous pip behaviour. See pypa/setuptools#1928. - """ - path.build({**self._simple_pyproject_example, '_meta': {}}) - build_backend = self.get_build_backend() - assert not Path("build").exists() - - cfg = {"--global-option": ["--mode", "strict"]} - build_backend.prepare_metadata_for_build_editable("_meta", cfg) - build_backend.build_editable("temp", cfg, "_meta") - - self._assert_link_tree(next(Path("build").glob("__editable__.*"))) - def test_editable_without_config_settings(self, tmpdir_cwd): """ Sanity check to ensure tests with --mode=strict are different from the ones From 9433e907ca4a8a45e72ff877b939d88e09807ff8 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Thu, 28 Sep 2023 17:24:53 +0100 Subject: [PATCH 0143/1761] Remove deprecation warning for invalid versions in setuptools.dist MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit … and post-pone deprecations for `setup.cfg` regarding hifens instead of underscores and upper-cased fields. --- setuptools/dist.py | 42 ++++++++---------------------------------- 1 file changed, 8 insertions(+), 34 deletions(-) diff --git a/setuptools/dist.py b/setuptools/dist.py index 2672f928d5..fce534d4e2 100644 --- a/setuptools/dist.py +++ b/setuptools/dist.py @@ -26,7 +26,7 @@ from .extern.ordered_set import OrderedSet from .extern.packaging.markers import InvalidMarker, Marker from .extern.packaging.specifiers import InvalidSpecifier, SpecifierSet -from .extern.packaging.version import InvalidVersion, Version +from .extern.packaging.version import Version from . import _entry_points from . import _normalization @@ -311,9 +311,7 @@ def __init__(self, attrs=None): self._set_metadata_defaults(attrs) - self.metadata.version = self._normalize_version( - self._validate_version(self.metadata.version) - ) + self.metadata.version = self._normalize_version(self.metadata.version) self._finalize_requires() def _validate_metadata(self): @@ -343,7 +341,10 @@ def _set_metadata_defaults(self, attrs): def _normalize_version(version): from . import sic - if isinstance(version, sic) or version is None: + if isinstance(version, numbers.Number): + # Some people apparently take "version number" too literally :) + version = str(version) + elif isinstance(version, sic) or version is None: return version normalized = str(Version(version)) @@ -352,33 +353,6 @@ def _normalize_version(version): return normalized return version - @staticmethod - def _validate_version(version): - if isinstance(version, numbers.Number): - # Some people apparently take "version number" too literally :) - version = str(version) - - if version is not None: - try: - Version(version) - except (InvalidVersion, TypeError): - from . import sic - - SetuptoolsDeprecationWarning.emit( - f"Invalid version: {version!r}.", - """ - The version specified is not a valid version according to PEP 440. - This may not work as expected with newer versions of - setuptools, pip, and PyPI. - """, - see_url="https://peps.python.org/pep-0440/", - due_date=(2023, 9, 26), - # Warning initially introduced in 26 Sept 2014 - # pypa/packaging already removed legacy versions. - ) - return sic(version) - return version - def _finalize_requires(self): """ Set `metadata.python_requires` and fix environment markers @@ -550,7 +524,7 @@ def warn_dash_deprecation(self, opt, section): versions. Please use the underscore name {underscore_opt!r} instead. """, see_docs="userguide/declarative_config.html", - due_date=(2023, 9, 26), + due_date=(2024, 9, 26), # Warning initially introduced in 3 Mar 2021 ) return underscore_opt @@ -574,7 +548,7 @@ def make_option_lowercase(self, opt, section): future versions. Please use lowercase {lowercase_opt!r} instead. """, see_docs="userguide/declarative_config.html", - due_date=(2023, 9, 26), + due_date=(2024, 9, 26), # Warning initially introduced in 6 Mar 2021 ) return lowercase_opt From 6cc69fe05659ebf0f2250397b173eed910065d0c Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Thu, 28 Sep 2023 18:07:42 +0100 Subject: [PATCH 0144/1761] Add news fragments --- newsfragments/4066.removal.1.rst | 2 ++ newsfragments/4066.removal.2.rst | 4 ++++ newsfragments/4066.removal.3.rst | 4 ++++ newsfragments/4066.removal.4.rst | 2 ++ 4 files changed, 12 insertions(+) create mode 100644 newsfragments/4066.removal.1.rst create mode 100644 newsfragments/4066.removal.2.rst create mode 100644 newsfragments/4066.removal.3.rst create mode 100644 newsfragments/4066.removal.4.rst diff --git a/newsfragments/4066.removal.1.rst b/newsfragments/4066.removal.1.rst new file mode 100644 index 0000000000..40cfd976ec --- /dev/null +++ b/newsfragments/4066.removal.1.rst @@ -0,0 +1,2 @@ +Configuring project ``version`` and ``egg_info.tag_*`` in such a way that +results in invalid version strings (according to :pep:`440`) is no longer permitted. diff --git a/newsfragments/4066.removal.2.rst b/newsfragments/4066.removal.2.rst new file mode 100644 index 0000000000..0209f5248d --- /dev/null +++ b/newsfragments/4066.removal.2.rst @@ -0,0 +1,4 @@ +Remove deprecated ``egg_base`` option from ``dist_info``. +Note that the ``dist_info`` command is considered internal to the way +``setuptools`` build backend works and not intended for +public usage. diff --git a/newsfragments/4066.removal.3.rst b/newsfragments/4066.removal.3.rst new file mode 100644 index 0000000000..7d4048b785 --- /dev/null +++ b/newsfragments/4066.removal.3.rst @@ -0,0 +1,4 @@ +The parsing of the deprecated ``metadata.license_file`` and +``metadata.requires`` fields in ``setup.cfg`` is no longer supported. +Users are expected to move to ``metadata.license_files`` and +``options.install_requires`` (respectively). diff --git a/newsfragments/4066.removal.4.rst b/newsfragments/4066.removal.4.rst new file mode 100644 index 0000000000..d599450ac2 --- /dev/null +++ b/newsfragments/4066.removal.4.rst @@ -0,0 +1,2 @@ +Passing ``config_settings`` to ``setuptools.build_meta`` with +deprecated values for ``--global-option`` is no longer allowed. From 4f60770c244be776e381dca5b3f624dc177cdc05 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Fri, 17 Nov 2023 12:38:26 +0000 Subject: [PATCH 0145/1761] Xfail on deprecated bdist_rpm tests --- setuptools/tests/test_bdist_deprecations.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setuptools/tests/test_bdist_deprecations.py b/setuptools/tests/test_bdist_deprecations.py index 1b69c41858..61f4e9a4cb 100644 --- a/setuptools/tests/test_bdist_deprecations.py +++ b/setuptools/tests/test_bdist_deprecations.py @@ -10,6 +10,7 @@ @pytest.mark.skipif(sys.platform == 'win32', reason='non-Windows only') +@pytest.mark.xfail(reason="bdist_rpm is long deprecated, should we remove it? #1988") @mock.patch('distutils.command.bdist_rpm.bdist_rpm') def test_bdist_rpm_warning(distutils_cmd, tmpdir_cwd): dist = Distribution( From e4b6fc964c887838b9cec546e053cf5ceee391c7 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Fri, 17 Nov 2023 12:39:05 +0000 Subject: [PATCH 0146/1761] Be strict on missing 'dynamic' in pyproject.toml --- setuptools/config/_apply_pyprojecttoml.py | 49 +++++++++++++------ setuptools/config/pyprojecttoml.py | 6 +-- .../tests/config/test_apply_pyprojecttoml.py | 11 ++--- setuptools/tests/config/test_pyprojecttoml.py | 11 +---- 4 files changed, 42 insertions(+), 35 deletions(-) diff --git a/setuptools/config/_apply_pyprojecttoml.py b/setuptools/config/_apply_pyprojecttoml.py index 4489d22437..9d2e77c8f5 100644 --- a/setuptools/config/_apply_pyprojecttoml.py +++ b/setuptools/config/_apply_pyprojecttoml.py @@ -105,13 +105,13 @@ def _apply_tool_table(dist: "Distribution", config: dict, filename: _Path): def _handle_missing_dynamic(dist: "Distribution", project_table: dict): """Be temporarily forgiving with ``dynamic`` fields not listed in ``dynamic``""" - # TODO: Set fields back to `None` once the feature stabilizes dynamic = set(project_table.get("dynamic", [])) for field, getter in _PREVIOUSLY_DEFINED.items(): if not (field in project_table or field in dynamic): value = getter(dist) if value: - _WouldIgnoreField.emit(field=field, value=value) + _MissingDynamic.emit(field=field, value=value) + project_table[field] = _RESET_PREVIOUSLY_DEFINED.get(field) def json_compatible_key(key: str) -> str: @@ -226,14 +226,18 @@ def _unify_entry_points(project_table: dict): renaming = {"scripts": "console_scripts", "gui_scripts": "gui_scripts"} for key, value in list(project.items()): # eager to allow modifications norm_key = json_compatible_key(key) - if norm_key in renaming and value: + if norm_key in renaming: + # Don't skip even if value is empty (reason: reset missing `dynamic`) entry_points[renaming[norm_key]] = project.pop(key) if entry_points: project["entry-points"] = { name: [f"{k} = {v}" for k, v in group.items()] for name, group in entry_points.items() + if group # now we can skip empty groups } + # Sometimes this will set `project["entry-points"] = {}`, and that is + # intentional (for reseting configurations that are missing `dynamic`). def _copy_command_options(pyproject: dict, dist: "Distribution", filename: _Path): @@ -388,14 +392,27 @@ def _acessor(obj): } -class _WouldIgnoreField(SetuptoolsDeprecationWarning): - _SUMMARY = "`{field}` defined outside of `pyproject.toml` would be ignored." +_RESET_PREVIOUSLY_DEFINED: dict = { + # Fix improper setting: given in `setup.py`, but not listed in `dynamic` + # dict: pyproject name => value to which reset + "license": {}, + "authors": [], + "maintainers": [], + "keywords": [], + "classifiers": [], + "urls": {}, + "entry-points": {}, + "scripts": {}, + "gui-scripts": {}, + "dependencies": [], + "optional-dependencies": [], +} - _DETAILS = """ - ########################################################################## - # configuration would be ignored/result in error due to `pyproject.toml` # - ########################################################################## +class _MissingDynamic(SetuptoolsWarning): + _SUMMARY = "`{field}` defined outside of `pyproject.toml` is ignored." + + _DETAILS = """ The following seems to be defined outside of `pyproject.toml`: `{field} = {value!r}` @@ -405,12 +422,14 @@ class _WouldIgnoreField(SetuptoolsDeprecationWarning): https://packaging.python.org/en/latest/specifications/declaring-project-metadata/ - For the time being, `setuptools` will still consider the given value (as a - **transitional** measure), but please note that future releases of setuptools will - follow strictly the standard. - - To prevent this warning, you can list `{field}` under `dynamic` or alternatively + To prevent this problem, you can list `{field}` under `dynamic` or alternatively remove the `[project]` table from your file and rely entirely on other means of configuration. """ - _DUE_DATE = (2023, 10, 30) # Initially introduced in 27 May 2022 + # TODO: Consider removing this check in the future? + # There is a trade-off here between improving "debug-ability" and the cost + # of running/testing/maintaining these unnecessary checks... + + @classmethod + def details(cls, field: str, value: Any) -> str: + return cls._DETAILS.format(field=field, value=value) diff --git a/setuptools/config/pyprojecttoml.py b/setuptools/config/pyprojecttoml.py index 93dbd9f559..5b7884fe4f 100644 --- a/setuptools/config/pyprojecttoml.py +++ b/setuptools/config/pyprojecttoml.py @@ -17,7 +17,7 @@ from ..errors import FileError, OptionError from ..warnings import SetuptoolsWarning from . import expand as _expand -from ._apply_pyprojecttoml import _PREVIOUSLY_DEFINED, _WouldIgnoreField +from ._apply_pyprojecttoml import _PREVIOUSLY_DEFINED, _MissingDynamic from ._apply_pyprojecttoml import apply as _apply if TYPE_CHECKING: @@ -330,9 +330,7 @@ def _set_scripts(field: str, group: str): if group in groups: value = groups.pop(group) if field not in self.dynamic: - _WouldIgnoreField.emit(field=field, value=value) - # TODO: Don't set field when support for pyproject.toml stabilizes - # instead raise an error as specified in PEP 621 + raise OptionError(_MissingDynamic.details(field, value)) expanded[field] = value _set_scripts("scripts", "console_scripts") diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py index 294947a00a..ba36416be1 100644 --- a/setuptools/tests/config/test_apply_pyprojecttoml.py +++ b/setuptools/tests/config/test_apply_pyprojecttoml.py @@ -21,7 +21,7 @@ from setuptools.dist import Distribution from setuptools.config import setupcfg, pyprojecttoml from setuptools.config import expand -from setuptools.config._apply_pyprojecttoml import _WouldIgnoreField, _some_attrgetter +from setuptools.config._apply_pyprojecttoml import _MissingDynamic, _some_attrgetter from setuptools.command.egg_info import write_requirements from setuptools.warnings import SetuptoolsDeprecationWarning @@ -339,18 +339,15 @@ def pyproject(self, tmp_path, dynamic, extra_content=""): ], ) def test_not_listed_in_dynamic(self, tmp_path, attr, field, value): - """For the time being we just warn if the user pre-set values (e.g. via - ``setup.py``) but do not include them in ``dynamic``. - """ + """Setuptools cannot set a field if not listed in ``dynamic``""" pyproject = self.pyproject(tmp_path, []) dist = makedist(tmp_path, **{attr: value}) msg = re.compile(f"defined outside of `pyproject.toml`:.*{field}", re.S) - with pytest.warns(_WouldIgnoreField, match=msg): + with pytest.warns(_MissingDynamic, match=msg): dist = pyprojecttoml.apply_configuration(dist, pyproject) - # TODO: Once support for pyproject.toml config stabilizes attr should be None dist_value = _some_attrgetter(f"metadata.{attr}", attr)(dist) - assert dist_value == value + assert not dist_value @pytest.mark.parametrize( "attr, field, value", diff --git a/setuptools/tests/config/test_pyprojecttoml.py b/setuptools/tests/config/test_pyprojecttoml.py index 81ec949a42..318885a6bd 100644 --- a/setuptools/tests/config/test_pyprojecttoml.py +++ b/setuptools/tests/config/test_pyprojecttoml.py @@ -6,7 +6,6 @@ import tomli_w from path import Path as _Path -from setuptools.config._apply_pyprojecttoml import _WouldIgnoreField from setuptools.config.pyprojecttoml import ( read_configuration, expand_configuration, @@ -200,14 +199,8 @@ def test_scripts_not_listed_in_dynamic(self, tmp_path, missing_dynamic): dynamic = {"scripts", "gui-scripts", "entry-points"} - {missing_dynamic} msg = f"defined outside of `pyproject.toml`:.*{missing_dynamic}" - with pytest.warns(_WouldIgnoreField, match=re.compile(msg, re.S)): - expanded = expand_configuration(self.pyproject(dynamic), tmp_path) - - expanded_project = expanded["project"] - assert dynamic < set(expanded_project) - assert len(expanded_project["entry-points"]) == 1 - # TODO: Test the following when pyproject.toml support stabilizes: - # >>> assert missing_dynamic not in expanded_project + with pytest.raises(OptionError, match=re.compile(msg, re.S)): + expand_configuration(self.pyproject(dynamic), tmp_path) class TestClassifiers: From 9c35cd935aeae08596cf80f327da5b0b40a2cb7f Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Fri, 17 Nov 2023 13:17:45 +0000 Subject: [PATCH 0147/1761] Enforce namespace-packages are not used in pyproject.toml --- setuptools/config/_apply_pyprojecttoml.py | 25 +++++++++++-------- setuptools/errors.py | 4 +++ .../tests/config/test_apply_pyprojecttoml.py | 4 +-- 3 files changed, 20 insertions(+), 13 deletions(-) diff --git a/setuptools/config/_apply_pyprojecttoml.py b/setuptools/config/_apply_pyprojecttoml.py index 9d2e77c8f5..ba0284e879 100644 --- a/setuptools/config/_apply_pyprojecttoml.py +++ b/setuptools/config/_apply_pyprojecttoml.py @@ -12,6 +12,7 @@ from collections.abc import Mapping from email.headerregistry import Address from functools import partial, reduce +from inspect import cleandoc from itertools import chain from types import MappingProxyType from typing import ( @@ -28,6 +29,7 @@ cast, ) +from ..errors import RemovedConfigError from ..warnings import SetuptoolsWarning, SetuptoolsDeprecationWarning if TYPE_CHECKING: @@ -90,12 +92,13 @@ def _apply_tool_table(dist: "Distribution", config: dict, filename: _Path): for field, value in tool_table.items(): norm_key = json_compatible_key(field) - if norm_key in TOOL_TABLE_DEPRECATIONS: - suggestion, kwargs = TOOL_TABLE_DEPRECATIONS[norm_key] - msg = f"The parameter `{norm_key}` is deprecated, {suggestion}" - SetuptoolsDeprecationWarning.emit( - "Deprecated config", msg, **kwargs # type: ignore - ) + if norm_key in TOOL_TABLE_REMOVALS: + suggestion = cleandoc(TOOL_TABLE_REMOVALS[norm_key]) + msg = f""" + The parameter `tool.setuptools.{field}` was long deprecated + and has been removed from `pyproject.toml`. + """ + raise RemovedConfigError("\n".join([cleandoc(msg), suggestion])) norm_key = TOOL_TABLE_RENAMES.get(norm_key, norm_key) _set_config(dist, norm_key, value) @@ -357,11 +360,11 @@ def _acessor(obj): } TOOL_TABLE_RENAMES = {"script_files": "scripts"} -TOOL_TABLE_DEPRECATIONS = { - "namespace_packages": ( - "consider using implicit namespaces instead (PEP 420).", - {"due_date": (2023, 10, 30)}, # warning introduced in May 2022 - ) +TOOL_TABLE_REMOVALS = { + "namespace_packages": """ + Please migrate to implicit native namespaces instead. + See https://packaging.python.org/en/latest/guides/packaging-namespace-packages/. + """, } SETUPTOOLS_PATCHES = { diff --git a/setuptools/errors.py b/setuptools/errors.py index ec7fb3b6c4..855875d0ed 100644 --- a/setuptools/errors.py +++ b/setuptools/errors.py @@ -29,6 +29,10 @@ BaseError = _distutils_errors.DistutilsError +class RemovedConfigError(OptionError): + """Error used for configurations that were deprecated and removed.""" + + class RemovedCommandError(BaseError, RuntimeError): """Error used for commands that have been removed in setuptools. diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py index ba36416be1..7905aa9ab6 100644 --- a/setuptools/tests/config/test_apply_pyprojecttoml.py +++ b/setuptools/tests/config/test_apply_pyprojecttoml.py @@ -23,7 +23,7 @@ from setuptools.config import expand from setuptools.config._apply_pyprojecttoml import _MissingDynamic, _some_attrgetter from setuptools.command.egg_info import write_requirements -from setuptools.warnings import SetuptoolsDeprecationWarning +from setuptools.errors import RemovedConfigError from .downloads import retrieve_file, urls_from_file @@ -316,7 +316,7 @@ def test_namespace_packages(self, tmp_path): namespace-packages = ["myproj.pkg"] """ pyproject.write_text(cleandoc(config), encoding="utf-8") - with pytest.warns(SetuptoolsDeprecationWarning, match="namespace_packages"): + with pytest.raises(RemovedConfigError, match="namespace-packages"): pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject) From 4a34a1ca341dc32a3a9603e3fecebdc4ae4e5d75 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Fri, 17 Nov 2023 13:18:17 +0000 Subject: [PATCH 0148/1761] Use custom class for InvalidConfigError This way we don't have to see `DistutilsOptionError` in the error message. --- setuptools/config/pyprojecttoml.py | 6 +++--- setuptools/errors.py | 4 ++++ 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/setuptools/config/pyprojecttoml.py b/setuptools/config/pyprojecttoml.py index 5b7884fe4f..379ef222f9 100644 --- a/setuptools/config/pyprojecttoml.py +++ b/setuptools/config/pyprojecttoml.py @@ -14,7 +14,7 @@ from functools import partial from typing import TYPE_CHECKING, Callable, Dict, Mapping, Optional, Set, Union -from ..errors import FileError, OptionError +from ..errors import FileError, InvalidConfigError from ..warnings import SetuptoolsWarning from . import expand as _expand from ._apply_pyprojecttoml import _PREVIOUSLY_DEFINED, _MissingDynamic @@ -265,7 +265,7 @@ def _ensure_previously_set(self, dist: "Distribution", field: str): "Some dynamic fields need to be specified via `tool.setuptools.dynamic`" "\nothers must be specified via the equivalent attribute in `setup.py`." ) - raise OptionError(msg) + raise InvalidConfigError(msg) def _expand_directive( self, specifier: str, directive, package_dir: Mapping[str, str] @@ -330,7 +330,7 @@ def _set_scripts(field: str, group: str): if group in groups: value = groups.pop(group) if field not in self.dynamic: - raise OptionError(_MissingDynamic.details(field, value)) + raise InvalidConfigError(_MissingDynamic.details(field, value)) expanded[field] = value _set_scripts("scripts", "console_scripts") diff --git a/setuptools/errors.py b/setuptools/errors.py index 855875d0ed..67a5a1df10 100644 --- a/setuptools/errors.py +++ b/setuptools/errors.py @@ -29,6 +29,10 @@ BaseError = _distutils_errors.DistutilsError +class InvalidConfigError(OptionError): + """Error used for invalid configurations.""" + + class RemovedConfigError(OptionError): """Error used for configurations that were deprecated and removed.""" From b44faba4a8a7598a15e2fb2f0c487c7224967a14 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Fri, 17 Nov 2023 13:33:59 +0000 Subject: [PATCH 0149/1761] Add newsfragments for latest removals --- newsfragments/4066.removal.5.rst | 4 ++++ newsfragments/4066.removal.6.rst | 4 ++++ 2 files changed, 8 insertions(+) create mode 100644 newsfragments/4066.removal.5.rst create mode 100644 newsfragments/4066.removal.6.rst diff --git a/newsfragments/4066.removal.5.rst b/newsfragments/4066.removal.5.rst new file mode 100644 index 0000000000..99f77a0965 --- /dev/null +++ b/newsfragments/4066.removal.5.rst @@ -0,0 +1,4 @@ +Removed deprecated ``namespace-packages`` from ``pyproject.toml``. +Users are asked to use +:doc:`implicit namespace packages ` +(as defined in :pep:`420`). diff --git a/newsfragments/4066.removal.6.rst b/newsfragments/4066.removal.6.rst new file mode 100644 index 0000000000..350ea60eb6 --- /dev/null +++ b/newsfragments/4066.removal.6.rst @@ -0,0 +1,4 @@ +Added strict enforcement for ``project.dynamic`` in ``pyproject.toml``. +This removes the transitional ability of users configuring certain parameters +via ``setup.py`` without making the necessary changes to ``pyproject.toml`` +(as mandated by :pep:`612`). From b802c1be4c83ab1bca762999bcbdf87dd2850372 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Fri, 17 Nov 2023 13:34:56 +0000 Subject: [PATCH 0150/1761] Update newsfragments --- newsfragments/4066.removal.2.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/newsfragments/4066.removal.2.rst b/newsfragments/4066.removal.2.rst index 0209f5248d..ff3c7c2885 100644 --- a/newsfragments/4066.removal.2.rst +++ b/newsfragments/4066.removal.2.rst @@ -1,4 +1,4 @@ -Remove deprecated ``egg_base`` option from ``dist_info``. +Removed deprecated ``egg_base`` option from ``dist_info``. Note that the ``dist_info`` command is considered internal to the way ``setuptools`` build backend works and not intended for public usage. From b2438188f124774dce67b3186c8ce8e433597849 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Fri, 17 Nov 2023 13:44:53 +0000 Subject: [PATCH 0151/1761] Fix lint errors --- setuptools/config/_apply_pyprojecttoml.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setuptools/config/_apply_pyprojecttoml.py b/setuptools/config/_apply_pyprojecttoml.py index ba0284e879..80318d5d0b 100644 --- a/setuptools/config/_apply_pyprojecttoml.py +++ b/setuptools/config/_apply_pyprojecttoml.py @@ -30,7 +30,7 @@ ) from ..errors import RemovedConfigError -from ..warnings import SetuptoolsWarning, SetuptoolsDeprecationWarning +from ..warnings import SetuptoolsWarning if TYPE_CHECKING: from setuptools._importlib import metadata # noqa From b97814ade36e48143202b6a56949faeda0c143ea Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Fri, 17 Nov 2023 14:28:31 +0000 Subject: [PATCH 0152/1761] Add workaround for unreleased PyNaCl --- .github/workflows/main.yml | 3 +++ tox.ini | 3 ++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 2e0e55cf43..f214cb148d 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -81,6 +81,9 @@ jobs: echo "PRE_BUILT_SETUPTOOLS_SDIST=$(ls dist/*.tar.gz)" >> $GITHUB_ENV echo "PRE_BUILT_SETUPTOOLS_WHEEL=$(ls dist/*.whl)" >> $GITHUB_ENV rm -rf setuptools.egg-info # Avoid interfering with the other tests + - name: Workaround for unreleased PyNaCl (pyca/pynacl#805) + if: contains(matrix.python, 'pypy') + run: echo "SETUPTOOLS_ENFORCE_DEPRECATION=0" >> $GITHUB_ENV - name: Install tox run: | python -m pip install tox diff --git a/tox.ini b/tox.ini index ced3e00ddf..5381358e6d 100644 --- a/tox.ini +++ b/tox.ini @@ -6,7 +6,7 @@ deps = # ^-- use dev version while we wait for the new release setenv = PYTHONWARNDEFAULTENCODING = 1 - SETUPTOOLS_ENFORCE_DEPRECATION = 1 + SETUPTOOLS_ENFORCE_DEPRECATION = {env:SETUPTOOLS_ENFORCE_DEPRECATION:1} commands = pytest {posargs} usedevelop = True @@ -14,6 +14,7 @@ extras = testing pass_env = SETUPTOOLS_USE_DISTUTILS + SETUPTOOLS_ENFORCE_DEPRECATION PRE_BUILT_SETUPTOOLS_WHEEL PRE_BUILT_SETUPTOOLS_SDIST TIMEOUT_BACKEND_TEST # timeout (in seconds) for test_build_meta From 74445d3a555582f36750145d6e1b12ef46e4c9d7 Mon Sep 17 00:00:00 2001 From: Jean Abou Samra Date: Fri, 17 Nov 2023 21:25:15 +0100 Subject: [PATCH 0153/1761] Link to the new pyproject.toml guide the PUG The PUG now has a pyproject.toml guide, meant for users, distinct from the specification, meant for tool developers. --- docs/userguide/pyproject_config.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/userguide/pyproject_config.rst b/docs/userguide/pyproject_config.rst index 103d10ede2..8f9d5f3745 100644 --- a/docs/userguide/pyproject_config.rst +++ b/docs/userguide/pyproject_config.rst @@ -29,8 +29,8 @@ be used with ``setuptools``. It contains two TOML tables (identified by the The ``build-system`` table is used to tell the build frontend (e.g. :pypi:`build` or :pypi:`pip`) to use ``setuptools`` and any other plugins (e.g. ``setuptools-scm``) to build the package. -The ``project`` table contains metadata fields as described by -:doc:`PyPUG:specifications/declaring-project-metadata` guide. +The ``project`` table contains metadata fields as described by the +:doc:`PyPUG:guides/writing-pyproject-toml` guide. .. _example-pyproject-config: @@ -67,8 +67,8 @@ The ``project`` table contains metadata fields as described by [project.scripts] my-script = "my_package.module:function" - # ... other project metadata fields as specified in: - # https://packaging.python.org/en/latest/specifications/declaring-project-metadata/ + # ... other project metadata fields as listed in: + # https://packaging.python.org/en/latest/guides/writing-pyproject-toml/ .. _setuptools-table: From f2a45e8b6996a0e9c8af5a07df1cc01ae4d551c9 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 18 Nov 2023 15:23:50 -0500 Subject: [PATCH 0154/1761] Make the example pyproject.toml valid toml so it doesn't crash docs builds. Closes #4118. --- docs/userguide/ext_modules.rst | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/docs/userguide/ext_modules.rst b/docs/userguide/ext_modules.rst index 8c193856ac..eabc2c0ab3 100644 --- a/docs/userguide/ext_modules.rst +++ b/docs/userguide/ext_modules.rst @@ -143,7 +143,10 @@ your ``pyproject.toml``: .. code-block:: toml [build-system] - requires = [..., "cython"] + requires = [ + # ..., + "cython", + ] Alternatively, you can include the ``.c`` code that is pre-compiled by Cython into your source distribution, alongside the original ``.pyx`` files (this From 546e5b838a3972c3ad0671522f29f792e9278fe1 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 19 Nov 2023 11:26:41 -0500 Subject: [PATCH 0155/1761] Exclude setuptools.modified from test collection to avoid early import. --- conftest.py | 1 + 1 file changed, 1 insertion(+) diff --git a/conftest.py b/conftest.py index 94d5cdd8b5..8dcf7e6805 100644 --- a/conftest.py +++ b/conftest.py @@ -37,6 +37,7 @@ def pytest_configure(config): 'setuptools/_vendor', 'pkg_resources/_vendor', 'setuptools/config/_validate_pyproject', + 'setuptools/modified.py', ] From 109c42603af395efde43ae92bf695692faa8a546 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Fri, 13 Oct 2023 11:19:27 +0100 Subject: [PATCH 0156/1761] Only pass `--build-option` to `bdist_wheel` in build_meta In https://github.com/pypa/setuptools/issues/2491#issuecomment-1742859314 the discussion seems to lead to the idea that it is better for now to avoid passing any `--build-option` for commands that are not `bdist_wheel` in `setuptools.build_meta`. --- setuptools/build_meta.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/setuptools/build_meta.py b/setuptools/build_meta.py index 9267cf312f..e8d2866639 100644 --- a/setuptools/build_meta.py +++ b/setuptools/build_meta.py @@ -318,7 +318,6 @@ def _get_build_requires(self, config_settings, requirements): *sys.argv[:1], *self._global_args(config_settings), "egg_info", - *self._arbitrary_args(config_settings), ] try: with Distribution.patch(): @@ -406,6 +405,7 @@ def _build_with_temp_dir( # Build in a temporary directory, then copy to the target. os.makedirs(result_directory, exist_ok=True) temp_opts = {"prefix": ".tmp-", "dir": result_directory} + with tempfile.TemporaryDirectory(**temp_opts) as tmp_dist_dir: sys.argv = [ *sys.argv[:1], @@ -413,7 +413,6 @@ def _build_with_temp_dir( *setup_command, "--dist-dir", tmp_dist_dir, - *self._arbitrary_args(config_settings), ] with no_install_setup_requires(): self.run_setup() @@ -432,7 +431,10 @@ def build_wheel( ): with suppress_known_deprecation(): return self._build_with_temp_dir( - ['bdist_wheel'], '.whl', wheel_directory, config_settings + ['bdist_wheel', *self._arbitrary_args(config_settings)], + '.whl', + wheel_directory, + config_settings, ) def build_sdist(self, sdist_directory, config_settings=None): From 04671312ff01d35547ad17f859bd259e754d155e Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Fri, 13 Oct 2023 11:54:10 +0100 Subject: [PATCH 0157/1761] Remove no longer valid tests for config_settings and editable installs --- setuptools/tests/test_build_meta.py | 27 +-------------------------- 1 file changed, 1 insertion(+), 26 deletions(-) diff --git a/setuptools/tests/test_build_meta.py b/setuptools/tests/test_build_meta.py index f36119eb9c..5fb6dc5e50 100644 --- a/setuptools/tests/test_build_meta.py +++ b/setuptools/tests/test_build_meta.py @@ -706,25 +706,6 @@ def _assert_link_tree(self, parent_dir): for file in files: assert file.is_symlink() or os.stat(file).st_nlink > 0 - @pytest.mark.filterwarnings("ignore::setuptools.SetuptoolsDeprecationWarning") - # Since the backend is running via a process pool, in some operating systems - # we may have problems to make assertions based on warnings/stdout/stderr... - # So the best is to ignore them for the time being. - def test_editable_with_global_option_still_works(self, tmpdir_cwd): - """The usage of --global-option is now discouraged in favour of --build-option. - This is required to make more sense of the provided scape hatch and align with - previous pip behaviour. See pypa/setuptools#1928. - """ - path.build({**self._simple_pyproject_example, '_meta': {}}) - build_backend = self.get_build_backend() - assert not Path("build").exists() - - cfg = {"--global-option": ["--mode", "strict"]} - build_backend.prepare_metadata_for_build_editable("_meta", cfg) - build_backend.build_editable("temp", cfg, "_meta") - - self._assert_link_tree(next(Path("build").glob("__editable__.*"))) - def test_editable_without_config_settings(self, tmpdir_cwd): """ Sanity check to ensure tests with --mode=strict are different from the ones @@ -739,13 +720,7 @@ def test_editable_without_config_settings(self, tmpdir_cwd): build_backend.build_editable("temp") assert not Path("build").exists() - @pytest.mark.parametrize( - "config_settings", - [ - {"--build-option": ["--mode", "strict"]}, - {"editable-mode": "strict"}, - ], - ) + @pytest.mark.parametrize("config_settings", [{"editable-mode": "strict"}]) def test_editable_with_config_settings(self, tmpdir_cwd, config_settings): path.build({**self._simple_pyproject_example, '_meta': {}}) assert not Path("build").exists() From b599a453a14af634ac00a776e8e06e709d3472d3 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Mon, 20 Nov 2023 10:02:47 +0000 Subject: [PATCH 0158/1761] Add news fragment --- newsfragments/4079.removal.rst | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 newsfragments/4079.removal.rst diff --git a/newsfragments/4079.removal.rst b/newsfragments/4079.removal.rst new file mode 100644 index 0000000000..e3d779288c --- /dev/null +++ b/newsfragments/4079.removal.rst @@ -0,0 +1,4 @@ +Removed handling of ``--config-settings["--build-option"]`` in ``setuptools.build_meta`` +from build-backend API hooks *other than* ``build_wheel``. +This was motivate by `errors caused when passing this option +`_. From 218e1af2abbacb88377912e95bcce7ac6841dd5e Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Mon, 20 Nov 2023 10:39:38 +0000 Subject: [PATCH 0159/1761] Update guides on datafiles Attempts to solve common user doubts/problems and make it clear how to use the configurations. ``pyproject.toml`` tabs were moved as the 1st tab, following other pages like the Quickstart guide. --- docs/userguide/datafiles.rst | 194 ++++++++++++++++++++--------------- 1 file changed, 112 insertions(+), 82 deletions(-) diff --git a/docs/userguide/datafiles.rst b/docs/userguide/datafiles.rst index 9bd2efd863..f641605778 100644 --- a/docs/userguide/datafiles.rst +++ b/docs/userguide/datafiles.rst @@ -30,6 +30,19 @@ For example, if the package tree looks like this:: and you supply this configuration: +.. tab:: pyproject.toml + + .. code-block:: toml + + [tool.setuptools] + # ... + # By default, include-package-data is true in pyproject.toml, so you do + # NOT have to specify this line. + include-package-data = true + + [tool.setuptools.packages.find] + where = ["src"] + .. tab:: setup.cfg .. code-block:: ini @@ -56,19 +69,6 @@ and you supply this configuration: include_package_data=True ) -.. tab:: pyproject.toml - - .. code-block:: toml - - [tool.setuptools] - # ... - # By default, include-package-data is true in pyproject.toml, so you do - # NOT have to specify this line. - include-package-data = true - - [tool.setuptools.packages.find] - where = ["src"] - then all the ``.txt`` and ``.rst`` files will be automatically installed with your package, provided: @@ -84,6 +84,14 @@ your package, provided: (See the section below on :ref:`Adding Support for Revision Control Systems` for information on how to write such plugins.) +.. note:: + .. versionadded:: v61.0.0 + The default value for ``tool.setuptools.include-package-data`` is ``True`` + when projects are configured via ``pyproject.toml``. + This behaviour differs from ``setup.cfg`` and ``setup.py`` + (where ``include_package_data=False`` by default), which was not changed + to ensure backwards compatibility with existing projects. + package_data ============ @@ -108,6 +116,16 @@ For example, if the package tree looks like this:: then you can use the following configuration to capture the ``.txt`` and ``.rst`` files as data files: +.. tab:: pyproject.toml + + .. code-block:: toml + + [tool.setuptools.packages.find] + where = ["src"] + + [tool.setuptools.package-data] + mypkg = ["*.txt", "*.rst"] + .. tab:: setup.cfg .. code-block:: ini @@ -138,16 +156,6 @@ data files: package_data={"mypkg": ["*.txt", "*.rst"]} ) -.. tab:: pyproject.toml - - .. code-block:: toml - - [tool.setuptools.packages.find] - where = ["src"] - - [tool.setuptools.package-data] - mypkg = ["*.txt", "*.rst"] - The ``package_data`` argument is a dictionary that maps from package names to lists of glob patterns. Note that the data files specified using the ``package_data`` option neither require to be included within a :ref:`MANIFEST.in ` @@ -158,9 +166,9 @@ file, nor require to be added by a revision control system plugin. the path separator, even if you are on Windows. Setuptools automatically converts slashes to appropriate platform-specific separators at build time. -.. note:: - Glob patterns do not automatically match dotfiles (directory or file names - starting with a dot (``.``)). To include such files, you must explicitly start +.. important:: + Glob patterns do not automatically match dotfiles, i.e., directory or file names + starting with a dot (``.``). To include such files, you must explicitly start the pattern with a dot, e.g. ``.*`` to match ``.gitignore``. If you have multiple top-level packages and a common pattern of data files for all these @@ -181,6 +189,17 @@ Here, both packages ``mypkg1`` and ``mypkg2`` share a common pattern of having ` data files. However, only ``mypkg1`` has ``.rst`` data files. In such a case, if you want to use the ``package_data`` option, the following configuration will work: +.. tab:: pyproject.toml + + .. code-block:: toml + + [tool.setuptools.packages.find] + where = ["src"] + + [tool.setuptools.package-data] + "*" = ["*.txt"] + mypkg1 = ["data1.rst"] + .. tab:: setup.cfg .. code-block:: ini @@ -211,28 +230,35 @@ use the ``package_data`` option, the following configuration will work: package_data={"": ["*.txt"], "mypkg1": ["data1.rst"]}, ) -.. tab:: pyproject.toml - - .. code-block:: toml - - [tool.setuptools.packages.find] - where = ["src"] - - [tool.setuptools.package-data] - "*" = ["*.txt"] - mypkg1 = ["data1.rst"] - Notice that if you list patterns in ``package_data`` under the empty string ``""`` in ``setup.py``, and the asterisk ``*`` in ``setup.cfg`` and ``pyproject.toml``, these patterns are used to find files in every package. For example, we use ``""`` or ``*`` to indicate that the ``.txt`` files from all packages should be captured as data files. +These placeholders are treated as a special case, ``setuptools`` **do not** +support glob patterns on package names for this configuration +(patterns are only supported on the file paths). Also note how we can continue to specify patterns for individual packages, i.e. we specify that ``data1.rst`` from ``mypkg1`` alone should be captured as well. .. note:: - When building an ``sdist``, the datafiles are also drawn from the - ``package_name.egg-info/SOURCES.txt`` file, so make sure that this is removed if - the ``setup.py`` ``package_data`` list is updated before calling ``setup.py``. + When building an ``sdist``, the data files are also drawn from the + ``package_name.egg-info/SOURCES.txt`` file which works as a form of cache. + So make sure that this file is removed if ``package_data`` is updated, + before re-building the package. + +.. attention:: + In Python any directory is considered a package + (even if it does not contain ``__init__.py``, + see *native namespaces packages* on :doc:`PyPUG:guides/packaging-namespace-packages`). + Therefore, if you are not relying on :doc:`automatic discovery `, + you *SHOULD* ensure that **all** packages (including the ones that don't + contain any Python files) are included in the ``packages`` configuration + (see :doc:`/userguide/package_discovery` for more information). + + Moreover, it is advisable to use full packages name using the dot + notation instead of a nested path, to avoid error prone configurations. + Please check :ref:`section subdirectories ` below. + exclude_package_data ==================== @@ -250,6 +276,16 @@ Supposing you want to prevent these files from being included in the installation (they are not relevant to Python or the package), then you could use the ``exclude_package_data`` option: +.. tab:: pyproject.toml + + .. code-block:: toml + + [tool.setuptools.packages.find] + where = ["src"] + + [tool.setuptools.exclude-package-data] + mypkg = [".gitattributes"] + .. tab:: setup.cfg .. code-block:: ini @@ -281,16 +317,6 @@ use the ``exclude_package_data`` option: exclude_package_data={"mypkg": [".gitattributes"]}, ) -.. tab:: pyproject.toml - - .. code-block:: toml - - [tool.setuptools.packages.find] - where = ["src"] - - [tool.setuptools.exclude-package-data] - mypkg = [".gitattributes"] - The ``exclude_package_data`` option is a dictionary mapping package names to lists of wildcard patterns, just like the ``package_data`` option. And, just as with that option, you can use the empty string key ``""`` in ``setup.py`` and the @@ -300,6 +326,9 @@ Any files that match these patterns will be *excluded* from installation, even if they were listed in ``package_data`` or were included as a result of using ``include_package_data``. + +.. _subdir-data-files: + Subdirectory for Data Files =========================== @@ -324,6 +353,21 @@ In this case, the recommended approach is to treat ``data`` as a namespace packa (refer :pep:`420`). With ``package_data``, the configuration might look like this: +.. tab:: pyproject.toml + + .. code-block:: toml + + # Scanning for namespace packages in the ``src`` directory is true by + # default in pyproject.toml, so you do NOT need to include the + # `tool.setuptools.packages.find` if it looks like the following: + # [tool.setuptools.packages.find] + # namespaces = true + # where = ["src"] + + [tool.setuptools.package-data] + mypkg = ["*.txt"] + "mypkg.data" = ["*.rst"] + .. tab:: setup.cfg .. code-block:: ini @@ -358,28 +402,30 @@ the configuration might look like this: } ) +In other words, we allow Setuptools to scan for namespace packages in the ``src`` directory, +which enables the ``data`` directory to be identified, and then, we separately specify data +files for the root package ``mypkg``, and the namespace package ``data`` under the package +``mypkg``. + +With ``include_package_data`` the configuration is simpler: you simply need to enable +scanning of namespace packages in the ``src`` directory and the rest is handled by Setuptools. + .. tab:: pyproject.toml .. code-block:: toml + [tool.setuptools] + # ... + # By default, include-package-data is true in pyproject.toml, so you do + # NOT have to specify this line. + include-package-data = true + [tool.setuptools.packages.find] # scanning for namespace packages is true by default in pyproject.toml, so - # you do NOT need to include the following line. + # you need NOT include the following line. namespaces = true where = ["src"] - [tool.setuptools.package-data] - mypkg = ["*.txt"] - "mypkg.data" = ["*.rst"] - -In other words, we allow Setuptools to scan for namespace packages in the ``src`` directory, -which enables the ``data`` directory to be identified, and then, we separately specify data -files for the root package ``mypkg``, and the namespace package ``data`` under the package -``mypkg``. - -With ``include_package_data`` the configuration is simpler: you simply need to enable -scanning of namespace packages in the ``src`` directory and the rest is handled by Setuptools. - .. tab:: setup.cfg .. code-block:: ini @@ -405,22 +451,6 @@ scanning of namespace packages in the ``src`` directory and the rest is handled include_package_data=True, ) -.. tab:: pyproject.toml - - .. code-block:: toml - - [tool.setuptools] - # ... - # By default, include-package-data is true in pyproject.toml, so you do - # NOT have to specify this line. - include-package-data = true - - [tool.setuptools.packages.find] - # scanning for namespace packages is true by default in pyproject.toml, so - # you need NOT include the following line. - namespaces = true - where = ["src"] - Summary ======= @@ -444,11 +474,11 @@ In summary, the three options allow you to: .. note:: Due to the way the build process works, a data file that you include in your project and then stop including may be "orphaned" in your - project's build directories, requiring you to run ``setup.py clean --all`` to - fully remove them. This may also be important for your users and contributors + project's build directories, requiring you to manually deleting them. + This may also be important for your users and contributors if they track intermediate revisions of your project using Subversion; be sure to let them know when you make changes that remove files from inclusion so they - can run ``setup.py clean --all``. + can also manually delete them. .. _Accessing Data Files at Runtime: From ad4c4a3cdeb453b72d431b58bcb2df9a118879ff Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Mon, 20 Nov 2023 10:58:43 +0000 Subject: [PATCH 0160/1761] Add note abot using namespace packages for data files --- docs/userguide/datafiles.rst | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/docs/userguide/datafiles.rst b/docs/userguide/datafiles.rst index f641605778..2e37289d5f 100644 --- a/docs/userguide/datafiles.rst +++ b/docs/userguide/datafiles.rst @@ -548,6 +548,20 @@ See :doc:`importlib-resources:using` for detailed instructions. pre-existing file is found. +Data Files from Plugins and Extensions +====================================== + +You can resort to a :doc:`native/implicit namespace package +` (as a container for files) +if you want plugins and extensions to your package to contribute with package data files. +This way, all files will be listed during runtime +when :doc:`using importlib.resources `. +Note that, although not strictly guaranteed, mainstream Python package managers, +like :pypi:`pip` and derived tools, will install files belong to multiple distributions +that share a same namespace into the same directory in the file system. +This means that the overhead for :mod:`importlib.resources` will be minimum. + + Non-Package Data Files ====================== From 7676a4365a49d6f7ab094666ed076f9237764eb8 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Mon, 20 Nov 2023 11:14:11 +0000 Subject: [PATCH 0161/1761] Add note about dynamic configs via attr and imports --- docs/userguide/declarative_config.rst | 22 +++++++++++++++++----- docs/userguide/pyproject_config.rst | 14 ++++++++++++++ 2 files changed, 31 insertions(+), 5 deletions(-) diff --git a/docs/userguide/declarative_config.rst b/docs/userguide/declarative_config.rst index fa104b10e3..047e08c6ef 100644 --- a/docs/userguide/declarative_config.rst +++ b/docs/userguide/declarative_config.rst @@ -155,13 +155,25 @@ Type names used below: Special directives: -* ``attr:`` - Value is read from a module attribute. ``attr:`` supports - callables and iterables; unsupported types are cast using ``str()``. +* ``attr:`` - Value is read from a module attribute. + + It is advisable to use literal values together with ``attr:`` (e.g. ``str``, + ``tuple[str]``, see :func:`ast.literal_eval`). This is recommend + in order to support the common case of a literal value assigned to a variable + in a module containing (directly or indirectly) third-party imports. - In order to support the common case of a literal value assigned to a variable - in a module containing (directly or indirectly) third-party imports, ``attr:`` first tries to read the value from the module by examining the - module's AST. If that fails, ``attr:`` falls back to importing the module. + module's AST. If that fails, ``attr:`` falls back to importing the module, + using :func:`importlib.util.spec_from_file_location` recommended recipe + (see :ref:`example on Python docs ` + about "Importing a source file directly"). + Note however that importing the module is error prone since your package is + not installed yet. You may also need to manually add the project directory to + ``sys.path`` (via ``setup.py``) in order to be able to do that. + + When the module is imported, ``attr:`` supports + callables and iterables; unsupported types are cast using ``str()``. + * ``file:`` - Value is read from a list of files and then concatenated diff --git a/docs/userguide/pyproject_config.rst b/docs/userguide/pyproject_config.rst index 8f9d5f3745..2529bf1ba8 100644 --- a/docs/userguide/pyproject_config.rst +++ b/docs/userguide/pyproject_config.rst @@ -242,6 +242,20 @@ however please keep in mind that all non-comment lines must conform with :pep:`5 .. versionchanged:: 66.1.0 Newer versions of ``setuptools`` will automatically add these files to the ``sdist``. +It is advisable to use literal values together with ``attr`` (e.g. ``str``, +``tuple[str]``, see :func:`ast.literal_eval`). This is recommend +in order to support the common case of a literal value assigned to a variable +in a module containing (directly or indirectly) third-party imports. + +``attr`` first tries to read the value from the module by examining the +module's AST. If that fails, ``attr`` falls back to importing the module, +using :func:`importlib.util.spec_from_file_location` recommended recipe +(see :ref:`example on Python docs ` +about "Importing a source file directly"). +Note however that importing the module is error prone since your package is +not installed yet. You may also need to manually add the project directory to +``sys.path`` (via ``setup.py``) in order to be able to do that. + ---- .. rubric:: Notes From 11c7ece67a321b1e51eaa07c687c0cf69f2539f9 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sat, 23 Sep 2023 22:51:47 +0200 Subject: [PATCH 0162/1761] =?UTF-8?q?io.open()=20=E2=86=92=20open()?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit In Python 3, io.open() is an alias for the builtin open() function. https://docs.python.org/3/library/io.html#io.open This is a suggestion from pyupgrade: https://github.com/asottile/pyupgrade#open-alias --- pkg_resources/__init__.py | 2 +- setuptools/command/build_py.py | 3 +-- setuptools/command/develop.py | 3 +-- setuptools/command/easy_install.py | 2 +- setuptools/command/sdist.py | 3 +-- setuptools/config/expand.py | 3 +-- setuptools/dist.py | 2 +- setuptools/tests/test_core_metadata.py | 2 +- setuptools/tests/test_install_scripts.py | 9 ++++----- setuptools/tests/test_sdist.py | 2 +- setuptools/tests/test_setopt.py | 5 ++--- 11 files changed, 15 insertions(+), 21 deletions(-) diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py index 3baa1f3c24..9fd419efce 100644 --- a/pkg_resources/__init__.py +++ b/pkg_resources/__init__.py @@ -2001,7 +2001,7 @@ def get_metadata(self, name): if name != 'PKG-INFO': raise KeyError("No metadata except PKG-INFO is available") - with io.open(self.path, encoding='utf-8', errors="replace") as f: + with open(self.path, encoding='utf-8', errors="replace") as f: metadata = f.read() self._warn_on_replacement(metadata) return metadata diff --git a/setuptools/command/build_py.py b/setuptools/command/build_py.py index 242d60f011..cbdd05aab0 100644 --- a/setuptools/command/build_py.py +++ b/setuptools/command/build_py.py @@ -5,7 +5,6 @@ import os import fnmatch import textwrap -import io import distutils.errors import itertools import stat @@ -249,7 +248,7 @@ def check_package(self, package, package_dir): else: return init_py - with io.open(init_py, 'rb') as f: + with open(init_py, 'rb') as f: contents = f.read() if b'declare_namespace' not in contents: raise distutils.errors.DistutilsError( diff --git a/setuptools/command/develop.py b/setuptools/command/develop.py index 27bcf9eb5b..ea3e48e55c 100644 --- a/setuptools/command/develop.py +++ b/setuptools/command/develop.py @@ -3,7 +3,6 @@ from distutils.errors import DistutilsOptionError import os import glob -import io from setuptools.command.easy_install import easy_install from setuptools import _path @@ -154,7 +153,7 @@ def install_egg_scripts(self, dist): for script_name in self.distribution.scripts or []: script_path = os.path.abspath(convert_path(script_name)) script_name = os.path.basename(script_path) - with io.open(script_path) as strm: + with open(script_path) as strm: script_text = strm.read() self.install_script(dist, script_name, script_text, script_path) diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py index 8ba4f094de..6d42047d7f 100644 --- a/setuptools/command/easy_install.py +++ b/setuptools/command/easy_install.py @@ -1995,7 +1995,7 @@ def is_python(text, filename=''): def is_sh(executable): """Determine if the specified executable is a .sh (contains a #! line)""" try: - with io.open(executable, encoding='latin-1') as fp: + with open(executable, encoding='latin-1') as fp: magic = fp.read(2) except (OSError, IOError): return executable diff --git a/setuptools/command/sdist.py b/setuptools/command/sdist.py index c04823c159..5f45fb5dee 100644 --- a/setuptools/command/sdist.py +++ b/setuptools/command/sdist.py @@ -2,7 +2,6 @@ import distutils.command.sdist as orig import os import sys -import io import contextlib from itertools import chain @@ -189,7 +188,7 @@ def _manifest_is_not_generated(self): if not os.path.isfile(self.manifest): return False - with io.open(self.manifest, 'rb') as fp: + with open(self.manifest, 'rb') as fp: first_line = fp.readline() return first_line != '# file GENERATED by distutils, do NOT edit\n'.encode() diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py index 518f5ac260..1bc71de546 100644 --- a/setuptools/config/expand.py +++ b/setuptools/config/expand.py @@ -19,7 +19,6 @@ """ import ast import importlib -import io import os import pathlib import sys @@ -147,7 +146,7 @@ def _filter_existing_files(filepaths: Iterable[_Path]) -> Iterator[_Path]: def _read_file(filepath: Union[bytes, _Path]) -> str: - with io.open(filepath, encoding='utf-8') as f: + with open(filepath, encoding='utf-8') as f: return f.read() diff --git a/setuptools/dist.py b/setuptools/dist.py index 2672f928d5..6e31bd7c27 100644 --- a/setuptools/dist.py +++ b/setuptools/dist.py @@ -482,7 +482,7 @@ def _parse_config_files(self, filenames=None): # noqa: C901 parser = ConfigParser() parser.optionxform = str for filename in filenames: - with io.open(filename, encoding='utf-8') as reader: + with open(filename, encoding='utf-8') as reader: if DEBUG: self.announce(" reading {filename}".format(**locals())) parser.read_file(reader) diff --git a/setuptools/tests/test_core_metadata.py b/setuptools/tests/test_core_metadata.py index 14fa508a88..fe9d4c5c31 100644 --- a/setuptools/tests/test_core_metadata.py +++ b/setuptools/tests/test_core_metadata.py @@ -286,7 +286,7 @@ def test_maintainer_author(name, attrs, tmpdir): dist.metadata.write_pkg_info(fn_s) - with io.open(str(fn.join('PKG-INFO')), 'r', encoding='utf-8') as f: + with open(str(fn.join('PKG-INFO')), 'r', encoding='utf-8') as f: pkg_info = f.read() assert _valid_metadata(pkg_info) diff --git a/setuptools/tests/test_install_scripts.py b/setuptools/tests/test_install_scripts.py index 4338c792f1..c1ca7af3a1 100644 --- a/setuptools/tests/test_install_scripts.py +++ b/setuptools/tests/test_install_scripts.py @@ -1,7 +1,6 @@ """install_scripts tests """ -import io import sys import pytest @@ -43,7 +42,7 @@ def test_sys_executable_escaping_unix(self, tmpdir, monkeypatch): monkeypatch.setattr('sys.executable', self.unix_exe) with tmpdir.as_cwd(): self._run_install_scripts(str(tmpdir)) - with io.open(str(tmpdir.join('foo')), 'r') as f: + with open(str(tmpdir.join('foo')), 'r') as f: actual = f.readline() assert actual == expected @@ -57,7 +56,7 @@ def test_sys_executable_escaping_win32(self, tmpdir, monkeypatch): monkeypatch.setattr('sys.executable', self.win32_exe) with tmpdir.as_cwd(): self._run_install_scripts(str(tmpdir)) - with io.open(str(tmpdir.join('foo-script.py')), 'r') as f: + with open(str(tmpdir.join('foo-script.py')), 'r') as f: actual = f.readline() assert actual == expected @@ -71,7 +70,7 @@ def test_executable_with_spaces_escaping_unix(self, tmpdir): expected = '#!%s\n' % self.unix_spaces_exe with tmpdir.as_cwd(): self._run_install_scripts(str(tmpdir), self.unix_spaces_exe) - with io.open(str(tmpdir.join('foo')), 'r') as f: + with open(str(tmpdir.join('foo')), 'r') as f: actual = f.readline() assert actual == expected @@ -85,6 +84,6 @@ def test_executable_arg_escaping_win32(self, tmpdir): expected = '#!"%s"\n' % self.win32_exe with tmpdir.as_cwd(): self._run_install_scripts(str(tmpdir), '"' + self.win32_exe + '"') - with io.open(str(tmpdir.join('foo-script.py')), 'r') as f: + with open(str(tmpdir.join('foo-script.py')), 'r') as f: actual = f.readline() assert actual == expected diff --git a/setuptools/tests/test_sdist.py b/setuptools/tests/test_sdist.py index 2cd7482792..ead2f34e91 100644 --- a/setuptools/tests/test_sdist.py +++ b/setuptools/tests/test_sdist.py @@ -86,7 +86,7 @@ def decompose(path): def read_all_bytes(filename): - with io.open(filename, 'rb') as fp: + with open(filename, 'rb') as fp: return fp.read() diff --git a/setuptools/tests/test_setopt.py b/setuptools/tests/test_setopt.py index 3600863214..ccf25618a5 100644 --- a/setuptools/tests/test_setopt.py +++ b/setuptools/tests/test_setopt.py @@ -1,4 +1,3 @@ -import io import configparser from setuptools.command import setopt @@ -8,13 +7,13 @@ class TestEdit: @staticmethod def parse_config(filename): parser = configparser.ConfigParser() - with io.open(filename, encoding='utf-8') as reader: + with open(filename, encoding='utf-8') as reader: parser.read_file(reader) return parser @staticmethod def write_text(file, content): - with io.open(file, 'wb') as strm: + with open(file, 'wb') as strm: strm.write(content.encode('utf-8')) def test_utf8_encoding_retained(self, tmpdir): From 0a640d7c57be28005f5523c355e7e954a29a0472 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sat, 23 Sep 2023 22:57:53 +0200 Subject: [PATCH 0163/1761] =?UTF-8?q?IOError=20=E2=86=92=20OSError?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit In Python ≥ 3.3, IOError is an alias of OSError. https://docs.python.org/3/library/exceptions.html#IOError This is a suggestion from pyupgrade: https://github.com/asottile/pyupgrade#oserror-aliases --- pkg_resources/__init__.py | 4 ++-- setuptools/command/easy_install.py | 8 ++++---- setuptools/msvc.py | 14 +++++++------- 3 files changed, 13 insertions(+), 13 deletions(-) diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py index 9fd419efce..0d92832583 100644 --- a/pkg_resources/__init__.py +++ b/pkg_resources/__init__.py @@ -1874,7 +1874,7 @@ def _extract_resource(self, manager, zip_path): # noqa: C901 timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) if not WRITE_SUPPORT: - raise IOError( + raise OSError( '"os.rename" and "os.unlink" are not supported ' 'on this platform' ) try: @@ -3243,7 +3243,7 @@ def ensure_directory(path): def _bypass_ensure_directory(path): """Sandbox-bypassing version of ensure_directory()""" if not WRITE_SUPPORT: - raise IOError('"os.mkdir" not supported on this platform.') + raise OSError('"os.mkdir" not supported on this platform.') dirname, filename = split(path) if dirname and filename and not isdir(dirname): _bypass_ensure_directory(dirname) diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py index 6d42047d7f..5d6fd5ca71 100644 --- a/setuptools/command/easy_install.py +++ b/setuptools/command/easy_install.py @@ -480,7 +480,7 @@ def check_site_dir(self): # noqa: C901 # is too complex (12) # FIXME if not os.path.exists(instdir): try: os.makedirs(instdir) - except (OSError, IOError): + except OSError: self.cant_write_to_target() # Is it a configured, PYTHONPATH, implicit, or explicit site dir? @@ -498,7 +498,7 @@ def check_site_dir(self): # noqa: C901 # is too complex (12) # FIXME os.unlink(testfile) open(testfile, 'w').close() os.unlink(testfile) - except (OSError, IOError): + except OSError: self.cant_write_to_target() if not is_site_dir and not self.multi_version: @@ -594,7 +594,7 @@ def check_pth_processing(self): dirname = os.path.dirname(ok_file) os.makedirs(dirname, exist_ok=True) f = open(pth_file, 'w') - except (OSError, IOError): + except OSError: self.cant_write_to_target() else: try: @@ -1997,7 +1997,7 @@ def is_sh(executable): try: with open(executable, encoding='latin-1') as fp: magic = fp.read(2) - except (OSError, IOError): + except OSError: return executable return magic == '#!' diff --git a/setuptools/msvc.py b/setuptools/msvc.py index 5785c16945..a910a64b68 100644 --- a/setuptools/msvc.py +++ b/setuptools/msvc.py @@ -567,17 +567,17 @@ def lookup(self, key, name): bkey = None try: bkey = openkey(hkey, ms(key), 0, key_read) - except (OSError, IOError): + except OSError: if not self.pi.current_is_x86(): try: bkey = openkey(hkey, ms(key, True), 0, key_read) - except (OSError, IOError): + except OSError: continue else: continue try: return winreg.QueryValueEx(bkey, name)[0] - except (OSError, IOError): + except OSError: pass finally: if bkey: @@ -646,7 +646,7 @@ def find_reg_vs_vers(self): for hkey, key in itertools.product(self.ri.HKEYS, vckeys): try: bkey = winreg.OpenKey(hkey, ms(key), 0, winreg.KEY_READ) - except (OSError, IOError): + except OSError: continue with bkey: subkeys, values, _ = winreg.QueryInfoKey(bkey) @@ -678,7 +678,7 @@ def find_programdata_vs_vers(self): try: hashed_names = listdir(instances_dir) - except (OSError, IOError): + except OSError: # Directory not exists with all Visual Studio versions return vs_versions @@ -698,7 +698,7 @@ def find_programdata_vs_vers(self): self._as_float_version(state['installationVersion']) ] = vs_path - except (OSError, IOError, KeyError): + except (OSError, KeyError): # Skip if "state.json" file is missing or bad format continue @@ -784,7 +784,7 @@ def _guess_vc(self): vc_ver = listdir(guess_vc)[-1] self.vc_ver = self._as_float_version(vc_ver) return join(guess_vc, vc_ver) - except (OSError, IOError, IndexError): + except (OSError, IndexError): return '' def _guess_vc_legacy(self): From 5069f6c1ebfa2cfc70382c6505aded7538618904 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sat, 23 Sep 2023 23:06:30 +0200 Subject: [PATCH 0164/1761] "yield from", instead of "yield" in a loop This is a suggestion from pyupgrade: https://github.com/asottile/pyupgrade#yield--yield-from --- pkg_resources/__init__.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py index 0d92832583..ab6afe955d 100644 --- a/pkg_resources/__init__.py +++ b/pkg_resources/__init__.py @@ -2095,8 +2095,7 @@ def find_eggs_in_zip(importer, path_item, only=False): if _is_egg_path(subitem): subpath = os.path.join(path_item, subitem) dists = find_eggs_in_zip(zipimport.zipimporter(subpath), subpath) - for dist in dists: - yield dist + yield from dists elif subitem.lower().endswith(('.dist-info', '.egg-info')): subpath = os.path.join(path_item, subitem) submeta = EggMetadata(zipimport.zipimporter(subpath)) @@ -2131,8 +2130,7 @@ def find_on_path(importer, path_item, only=False): for entry in sorted(entries): fullpath = os.path.join(path_item, entry) factory = dist_factory(path_item, entry, only) - for dist in factory(fullpath): - yield dist + yield from factory(fullpath) def dist_factory(path_item, entry, only): @@ -2850,8 +2848,7 @@ def _get_metadata_path_for_display(self, name): def _get_metadata(self, name): if self.has_metadata(name): - for line in self.get_metadata_lines(name): - yield line + yield from self.get_metadata_lines(name) def _get_version(self): lines = self._get_metadata(self.PKG_INFO) From 7a8511043ce663cbf25f464ba166435fe83c2747 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Fri, 17 Nov 2023 13:27:31 +0100 Subject: [PATCH 0165/1761] =?UTF-8?q?socket.error=20=E2=86=92=20OSError?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit In Python ≥ 3.3, socket.error is an alias of OSError. https://docs.python.org/3/library/socket.html#socket.error This is a suggestion from pyupgrade: https://github.com/asottile/pyupgrade#oserror-aliases --- setuptools/command/upload_docs.py | 3 +-- setuptools/package_index.py | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/setuptools/command/upload_docs.py b/setuptools/command/upload_docs.py index 27c98b7c30..32c9abd796 100644 --- a/setuptools/command/upload_docs.py +++ b/setuptools/command/upload_docs.py @@ -8,7 +8,6 @@ from distutils import log from distutils.errors import DistutilsOptionError import os -import socket import zipfile import tempfile import shutil @@ -201,7 +200,7 @@ def upload_file(self, filename): conn.putheader('Authorization', auth) conn.endheaders() conn.send(body) - except socket.error as e: + except OSError as e: self.announce(str(e), log.ERROR) return diff --git a/setuptools/package_index.py b/setuptools/package_index.py index 7095585008..3cedd5105c 100644 --- a/setuptools/package_index.py +++ b/setuptools/package_index.py @@ -806,7 +806,7 @@ def open_url(self, url, warning=None): # noqa: C901 # is too complex (12) '%s returned a bad status line. The server might be ' 'down, %s' % (url, v.line) ) from v - except (http.client.HTTPException, socket.error) as v: + except (http.client.HTTPException, OSError) as v: if warning: self.warn(warning, v) else: From 3aec0a3dbeb471e8631478e1fbf0436e54b0cf95 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Fri, 17 Nov 2023 20:59:05 +0100 Subject: [PATCH 0166/1761] =?UTF-8?q?@functools.lru=5Fcache()=20=E2=86=92?= =?UTF-8?q?=20@functools.lru=5Fcache?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Starting with Python ≥ 3.8, lru_cache can be used as a straight decorator with no arguments: https://bugs.python.org/issue36772 This is a suggestion from pyupgrade: https://github.com/asottile/pyupgrade#remove-parentheses-from-functoolslru_cache --- tools/build_launchers.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tools/build_launchers.py b/tools/build_launchers.py index 26d3913e00..8d832b9c24 100644 --- a/tools/build_launchers.py +++ b/tools/build_launchers.py @@ -86,7 +86,7 @@ def build_cmake_project_with_msbuild(build_arena, msbuild_parameters): subprocess.check_call(cmd, cwd=build_arena) -@functools.lru_cache() +@functools.lru_cache def get_cmake(): """Find CMake using registry.""" import winreg @@ -96,7 +96,7 @@ def get_cmake(): return root / 'bin\\CMake.exe' -@functools.lru_cache() +@functools.lru_cache def get_msbuild(): """Use VSWhere to find MSBuild.""" vswhere = pathlib.Path( From 7c111b812d5ee0916b1a15cc956f3099f3cd19f6 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Mon, 20 Nov 2023 13:04:08 +0000 Subject: [PATCH 0167/1761] Use InvalidConfigError instead of ValueError in build_meta --- setuptools/build_meta.py | 2 +- setuptools/tests/test_build_meta.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/setuptools/build_meta.py b/setuptools/build_meta.py index 3696658f4d..b8bc80a4de 100644 --- a/setuptools/build_meta.py +++ b/setuptools/build_meta.py @@ -301,7 +301,7 @@ def _arbitrary_args(self, config_settings: _ConfigSettings) -> Iterator[str]: if bad_args: msg = f"Incompatible `config_settings`: {bad_args!r} ({config_settings!r})" - raise ValueError(msg) + raise errors.InvalidConfigError(msg) class _BuildMetaBackend(_ConfigSettingsTranslator): diff --git a/setuptools/tests/test_build_meta.py b/setuptools/tests/test_build_meta.py index 778aedf573..22e3b4303a 100644 --- a/setuptools/tests/test_build_meta.py +++ b/setuptools/tests/test_build_meta.py @@ -901,7 +901,8 @@ def test_build_with_empty_setuppy(self, build_backend, build_hook): files = {'setup.py': ''} path.build(files) - with pytest.raises(ValueError, match=re.escape('No distribution was found.')): + msg = re.escape('No distribution was found.') + with pytest.raises(ValueError, match=msg): getattr(build_backend, build_hook)("temp") From f43196216b38133848f9df2db9229ad864910900 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Mon, 20 Nov 2023 13:10:02 +0000 Subject: [PATCH 0168/1761] Remove deprecated handling of build-option passed as global-option --- setuptools/build_meta.py | 18 ++---------------- 1 file changed, 2 insertions(+), 16 deletions(-) diff --git a/setuptools/build_meta.py b/setuptools/build_meta.py index b8bc80a4de..4a60397d69 100644 --- a/setuptools/build_meta.py +++ b/setuptools/build_meta.py @@ -284,25 +284,11 @@ def _arbitrary_args(self, config_settings: _ConfigSettings) -> Iterator[str]: ['foo'] >>> list(fn({'--build-option': 'foo bar'})) ['foo', 'bar'] - >>> list(fn({'--global-option': 'foo'})) # doctest: +IGNORE_EXCEPTION_DETAIL - Traceback (most recent call last): - ValueError: Incompatible .config_settings. ...'foo'... + >>> list(fn({'--global-option': 'foo'})) + [] """ - args = self._get_config("--global-option", config_settings) - global_opts = self._valid_global_options() - bad_args = [] - - for arg in args: - if arg.strip("-") not in global_opts: - bad_args.append(arg) - yield arg - yield from self._get_config("--build-option", config_settings) - if bad_args: - msg = f"Incompatible `config_settings`: {bad_args!r} ({config_settings!r})" - raise errors.InvalidConfigError(msg) - class _BuildMetaBackend(_ConfigSettingsTranslator): def _get_build_requires(self, config_settings, requirements): From a5a75051eeef2e3b3dc938fb246bef3f6bbc6ca7 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Mon, 20 Nov 2023 13:12:38 +0000 Subject: [PATCH 0169/1761] Relax validation of --global-option in build_meta In `pip` docs, the following example is mentioned: > python -m pip wheel --global-option bdist_ext --global-option -DFOO wheel Since both `--global-option` and `--build-option` are supposed to be compatible with `pip` options, we should not prevent this abuse... (Although it is still error prone and the user is responsible for understanding in detail what is going on how this accidentally works). --- setuptools/build_meta.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/setuptools/build_meta.py b/setuptools/build_meta.py index 4a60397d69..ff1d7eaee2 100644 --- a/setuptools/build_meta.py +++ b/setuptools/build_meta.py @@ -185,11 +185,6 @@ def _get_config(self, key: str, config_settings: _ConfigSettings) -> List[str]: opts = cfg.get(key) or [] return shlex.split(opts) if isinstance(opts, str) else opts - def _valid_global_options(self): - """Global options accepted by setuptools (e.g. quiet or verbose).""" - options = (opt[:2] for opt in setuptools.dist.Distribution.global_options) - return {flag for long_and_short in options for flag in long_and_short if flag} - def _global_args(self, config_settings: _ConfigSettings) -> Iterator[str]: """ Let the user specify ``verbose`` or ``quiet`` + escape hatch via @@ -220,9 +215,7 @@ def _global_args(self, config_settings: _ConfigSettings) -> Iterator[str]: level = str(cfg.get("quiet") or cfg.get("--quiet") or "1") yield ("-v" if level.lower() in falsey else "-q") - valid = self._valid_global_options() - args = self._get_config("--global-option", config_settings) - yield from (arg for arg in args if arg.strip("-") in valid) + yield from self._get_config("--global-option", config_settings) def __dist_info_args(self, config_settings: _ConfigSettings) -> Iterator[str]: """ From beb7931bc9e0b362c1256b647394fc567456d5de Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Mon, 20 Nov 2023 15:16:37 +0000 Subject: [PATCH 0170/1761] Add newsfragment --- newsfragments/4094.bugfix.rst | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 newsfragments/4094.bugfix.rst diff --git a/newsfragments/4094.bugfix.rst b/newsfragments/4094.bugfix.rst new file mode 100644 index 0000000000..8c73eaedbf --- /dev/null +++ b/newsfragments/4094.bugfix.rst @@ -0,0 +1,2 @@ +Replaced hardcoded numeric values with :obj:`dis.opmap`, +fixing problem with 3.13.0a1. From 6d952d71ff76971c75a7ecc09ee184813740d536 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Mon, 20 Nov 2023 15:48:11 +0000 Subject: [PATCH 0171/1761] Mark flaky test on with xfail --- setuptools/tests/test_logging.py | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/setuptools/tests/test_logging.py b/setuptools/tests/test_logging.py index 39e67bab38..9381ab14c2 100644 --- a/setuptools/tests/test_logging.py +++ b/setuptools/tests/test_logging.py @@ -1,10 +1,14 @@ import inspect import logging import os +import sys import pytest +IS_PYPY = '__pypy__' in sys.builtin_module_names + + setup_py = """\ from setuptools import setup @@ -38,16 +42,29 @@ def test_verbosity_level(tmp_path, monkeypatch, flag, expected_level): assert log_level_name == expected_level +def _flaky_on_pypy(func): + try: + func() + except AssertionError: + if IS_PYPY: + msg = "Flaky monkeypatch on PyPy" + pytest.xfail(f"{msg}. Original discussion in #3707, #3709.") + raise + + +@_flaky_on_pypy def test_patching_does_not_cause_problems(): # Ensure `dist.log` is only patched if necessary + import _distutils_hack import setuptools.logging from distutils import dist setuptools.logging.configure() - if os.getenv("SETUPTOOLS_USE_DISTUTILS", "local").lower() == "local": + if _distutils_hack.enabled(): # Modern logging infra, no problematic patching. + assert dist.__file__ is None or "setuptools" in dist.__file__ assert isinstance(dist.log, logging.Logger) else: assert inspect.ismodule(dist.log) From 73165026fd11fff521be8b5dea19440e8f9e103e Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Mon, 20 Nov 2023 16:11:21 +0000 Subject: [PATCH 0172/1761] Add pragma for xfail condition --- setuptools/tests/test_logging.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setuptools/tests/test_logging.py b/setuptools/tests/test_logging.py index 9381ab14c2..e28a050ead 100644 --- a/setuptools/tests/test_logging.py +++ b/setuptools/tests/test_logging.py @@ -45,7 +45,7 @@ def test_verbosity_level(tmp_path, monkeypatch, flag, expected_level): def _flaky_on_pypy(func): try: func() - except AssertionError: + except AssertionError: # pragma: no cover if IS_PYPY: msg = "Flaky monkeypatch on PyPy" pytest.xfail(f"{msg}. Original discussion in #3707, #3709.") From 2a3902a7e57826849738e2dec57bd288f6f07a98 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Mon, 20 Nov 2023 16:13:00 +0000 Subject: [PATCH 0173/1761] Add reference to PR number in xfail message --- setuptools/tests/test_logging.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setuptools/tests/test_logging.py b/setuptools/tests/test_logging.py index e28a050ead..fb369dd9e3 100644 --- a/setuptools/tests/test_logging.py +++ b/setuptools/tests/test_logging.py @@ -47,7 +47,7 @@ def _flaky_on_pypy(func): func() except AssertionError: # pragma: no cover if IS_PYPY: - msg = "Flaky monkeypatch on PyPy" + msg = "Flaky monkeypatch on PyPy (#4124)" pytest.xfail(f"{msg}. Original discussion in #3707, #3709.") raise From 6173290acd32d8a395e62b9c69745ab5a710816a Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Mon, 20 Nov 2023 16:18:06 +0000 Subject: [PATCH 0174/1761] Fix lint error --- setuptools/tests/test_logging.py | 1 - 1 file changed, 1 deletion(-) diff --git a/setuptools/tests/test_logging.py b/setuptools/tests/test_logging.py index fb369dd9e3..eab30b9d14 100644 --- a/setuptools/tests/test_logging.py +++ b/setuptools/tests/test_logging.py @@ -1,6 +1,5 @@ import inspect import logging -import os import sys import pytest From e4bca36f45f9fc9ebb445d783a8a62c00ad7a6f1 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Mon, 20 Nov 2023 16:24:55 +0000 Subject: [PATCH 0175/1761] Add missing contextmanager to decorator --- setuptools/tests/test_logging.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/setuptools/tests/test_logging.py b/setuptools/tests/test_logging.py index eab30b9d14..beff309362 100644 --- a/setuptools/tests/test_logging.py +++ b/setuptools/tests/test_logging.py @@ -1,6 +1,7 @@ import inspect import logging import sys +from contextlib import contextmanager import pytest @@ -41,7 +42,8 @@ def test_verbosity_level(tmp_path, monkeypatch, flag, expected_level): assert log_level_name == expected_level -def _flaky_on_pypy(func): +@contextmanager +def flaky_on_pypy(func): try: func() except AssertionError: # pragma: no cover @@ -51,7 +53,7 @@ def _flaky_on_pypy(func): raise -@_flaky_on_pypy +@flaky_on_pypy def test_patching_does_not_cause_problems(): # Ensure `dist.log` is only patched if necessary From f53d7ad7018905a4a1fbe4ac1e13ba79434922c5 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Mon, 20 Nov 2023 16:33:40 +0000 Subject: [PATCH 0176/1761] Use wrapper for decorator instead of contextlib --- setuptools/tests/test_logging.py | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/setuptools/tests/test_logging.py b/setuptools/tests/test_logging.py index beff309362..7a9a33f1ea 100644 --- a/setuptools/tests/test_logging.py +++ b/setuptools/tests/test_logging.py @@ -1,7 +1,7 @@ +import functools import inspect import logging import sys -from contextlib import contextmanager import pytest @@ -42,15 +42,18 @@ def test_verbosity_level(tmp_path, monkeypatch, flag, expected_level): assert log_level_name == expected_level -@contextmanager def flaky_on_pypy(func): - try: - func() - except AssertionError: # pragma: no cover - if IS_PYPY: - msg = "Flaky monkeypatch on PyPy (#4124)" - pytest.xfail(f"{msg}. Original discussion in #3707, #3709.") - raise + @functools.wraps(func) + def _func(): + try: + func() + except AssertionError: # pragma: no cover + if IS_PYPY: + msg = "Flaky monkeypatch on PyPy (#4124)" + pytest.xfail(f"{msg}. Original discussion in #3707, #3709.") + raise + + return _func @flaky_on_pypy From 536d4a82059747873cab87654567a721489e1cd3 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Mon, 20 Nov 2023 17:26:14 +0000 Subject: [PATCH 0177/1761] =?UTF-8?q?Bump=20version:=2068.2.2=20=E2=86=92?= =?UTF-8?q?=2069.0.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- NEWS.rst | 48 +++++++++++++++++++++++++++++ newsfragments/+f8383dcd.feature.rst | 1 - newsfragments/3136.feature.rst | 2 -- newsfragments/4066.removal.1.rst | 2 -- newsfragments/4066.removal.2.rst | 4 --- newsfragments/4066.removal.3.rst | 4 --- newsfragments/4066.removal.4.rst | 2 -- newsfragments/4066.removal.5.rst | 4 --- newsfragments/4066.removal.6.rst | 4 --- newsfragments/4069.feature.rst | 1 - newsfragments/4079.removal.rst | 4 --- newsfragments/4094.bugfix.rst | 2 -- setup.cfg | 2 +- 14 files changed, 50 insertions(+), 32 deletions(-) delete mode 100644 newsfragments/+f8383dcd.feature.rst delete mode 100644 newsfragments/3136.feature.rst delete mode 100644 newsfragments/4066.removal.1.rst delete mode 100644 newsfragments/4066.removal.2.rst delete mode 100644 newsfragments/4066.removal.3.rst delete mode 100644 newsfragments/4066.removal.4.rst delete mode 100644 newsfragments/4066.removal.5.rst delete mode 100644 newsfragments/4066.removal.6.rst delete mode 100644 newsfragments/4069.feature.rst delete mode 100644 newsfragments/4079.removal.rst delete mode 100644 newsfragments/4094.bugfix.rst diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 9e6872031e..420d8297a2 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 68.2.2 +current_version = 69.0.0 commit = True tag = True diff --git a/NEWS.rst b/NEWS.rst index 295b7b8ce6..96231696f0 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -1,3 +1,51 @@ +v69.0.0 +======= + +Features +-------- + +- Include type information (``py.typed``, ``*.pyi``) by default (#3136) -- by :user:`Danie-1`, + **EXPERIMENTAL**. (#3136) +- Exported ``distutils.dep_util`` and ``setuptools.dep_util`` through ``setuptools.modified`` -- by :user:`Avasam` (#4069) +- Merged with pypa/distutils@7a04cbda0fc714. + + +Bugfixes +-------- + +- Replaced hardcoded numeric values with :obj:`dis.opmap`, + fixing problem with 3.13.0a1. (#4094) + + +Deprecations and Removals +------------------------- + +- Configuring project ``version`` and ``egg_info.tag_*`` in such a way that + results in invalid version strings (according to :pep:`440`) is no longer permitted. (#4066) +- Removed deprecated ``egg_base`` option from ``dist_info``. + Note that the ``dist_info`` command is considered internal to the way + ``setuptools`` build backend works and not intended for + public usage. (#4066) +- The parsing of the deprecated ``metadata.license_file`` and + ``metadata.requires`` fields in ``setup.cfg`` is no longer supported. + Users are expected to move to ``metadata.license_files`` and + ``options.install_requires`` (respectively). (#4066) +- Passing ``config_settings`` to ``setuptools.build_meta`` with + deprecated values for ``--global-option`` is no longer allowed. (#4066) +- Removed deprecated ``namespace-packages`` from ``pyproject.toml``. + Users are asked to use + :doc:`implicit namespace packages ` + (as defined in :pep:`420`). (#4066) +- Added strict enforcement for ``project.dynamic`` in ``pyproject.toml``. + This removes the transitional ability of users configuring certain parameters + via ``setup.py`` without making the necessary changes to ``pyproject.toml`` + (as mandated by :pep:`612`). (#4066) +- Removed handling of ``--config-settings["--build-option"]`` in ``setuptools.build_meta`` + from build-backend API hooks *other than* ``build_wheel``. + This was motivate by `errors caused when passing this option + `_. (#4079) + + v68.2.2 ======= diff --git a/newsfragments/+f8383dcd.feature.rst b/newsfragments/+f8383dcd.feature.rst deleted file mode 100644 index c8f0e82e55..0000000000 --- a/newsfragments/+f8383dcd.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Merged with pypa/distutils@7a04cbda0fc714. \ No newline at end of file diff --git a/newsfragments/3136.feature.rst b/newsfragments/3136.feature.rst deleted file mode 100644 index a57a8f4e57..0000000000 --- a/newsfragments/3136.feature.rst +++ /dev/null @@ -1,2 +0,0 @@ -Include type information (``py.typed``, ``*.pyi``) by default (#3136) -- by :user:`Danie-1`, -**EXPERIMENTAL**. diff --git a/newsfragments/4066.removal.1.rst b/newsfragments/4066.removal.1.rst deleted file mode 100644 index 40cfd976ec..0000000000 --- a/newsfragments/4066.removal.1.rst +++ /dev/null @@ -1,2 +0,0 @@ -Configuring project ``version`` and ``egg_info.tag_*`` in such a way that -results in invalid version strings (according to :pep:`440`) is no longer permitted. diff --git a/newsfragments/4066.removal.2.rst b/newsfragments/4066.removal.2.rst deleted file mode 100644 index ff3c7c2885..0000000000 --- a/newsfragments/4066.removal.2.rst +++ /dev/null @@ -1,4 +0,0 @@ -Removed deprecated ``egg_base`` option from ``dist_info``. -Note that the ``dist_info`` command is considered internal to the way -``setuptools`` build backend works and not intended for -public usage. diff --git a/newsfragments/4066.removal.3.rst b/newsfragments/4066.removal.3.rst deleted file mode 100644 index 7d4048b785..0000000000 --- a/newsfragments/4066.removal.3.rst +++ /dev/null @@ -1,4 +0,0 @@ -The parsing of the deprecated ``metadata.license_file`` and -``metadata.requires`` fields in ``setup.cfg`` is no longer supported. -Users are expected to move to ``metadata.license_files`` and -``options.install_requires`` (respectively). diff --git a/newsfragments/4066.removal.4.rst b/newsfragments/4066.removal.4.rst deleted file mode 100644 index d599450ac2..0000000000 --- a/newsfragments/4066.removal.4.rst +++ /dev/null @@ -1,2 +0,0 @@ -Passing ``config_settings`` to ``setuptools.build_meta`` with -deprecated values for ``--global-option`` is no longer allowed. diff --git a/newsfragments/4066.removal.5.rst b/newsfragments/4066.removal.5.rst deleted file mode 100644 index 99f77a0965..0000000000 --- a/newsfragments/4066.removal.5.rst +++ /dev/null @@ -1,4 +0,0 @@ -Removed deprecated ``namespace-packages`` from ``pyproject.toml``. -Users are asked to use -:doc:`implicit namespace packages ` -(as defined in :pep:`420`). diff --git a/newsfragments/4066.removal.6.rst b/newsfragments/4066.removal.6.rst deleted file mode 100644 index 350ea60eb6..0000000000 --- a/newsfragments/4066.removal.6.rst +++ /dev/null @@ -1,4 +0,0 @@ -Added strict enforcement for ``project.dynamic`` in ``pyproject.toml``. -This removes the transitional ability of users configuring certain parameters -via ``setup.py`` without making the necessary changes to ``pyproject.toml`` -(as mandated by :pep:`612`). diff --git a/newsfragments/4069.feature.rst b/newsfragments/4069.feature.rst deleted file mode 100644 index 64a0af5d41..0000000000 --- a/newsfragments/4069.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Exported ``distutils.dep_util`` and ``setuptools.dep_util`` through ``setuptools.modified`` -- by :user:`Avasam` diff --git a/newsfragments/4079.removal.rst b/newsfragments/4079.removal.rst deleted file mode 100644 index e3d779288c..0000000000 --- a/newsfragments/4079.removal.rst +++ /dev/null @@ -1,4 +0,0 @@ -Removed handling of ``--config-settings["--build-option"]`` in ``setuptools.build_meta`` -from build-backend API hooks *other than* ``build_wheel``. -This was motivate by `errors caused when passing this option -`_. diff --git a/newsfragments/4094.bugfix.rst b/newsfragments/4094.bugfix.rst deleted file mode 100644 index 8c73eaedbf..0000000000 --- a/newsfragments/4094.bugfix.rst +++ /dev/null @@ -1,2 +0,0 @@ -Replaced hardcoded numeric values with :obj:`dis.opmap`, -fixing problem with 3.13.0a1. diff --git a/setup.cfg b/setup.cfg index c22f452f16..4854e35c13 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = setuptools -version = 68.2.2 +version = 69.0.0 author = Python Packaging Authority author_email = distutils-sig@python.org description = Easily download, build, install, upgrade, and uninstall Python packages From e1f8783a6956216a12f98e690e4b10486b093013 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Tue, 21 Nov 2023 09:57:44 +0000 Subject: [PATCH 0178/1761] Allow imports of setuptools.dep_util.newer_group with deprecation warning --- setuptools/dep_util.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setuptools/dep_util.py b/setuptools/dep_util.py index e30cd41b49..c8ab14c8f2 100644 --- a/setuptools/dep_util.py +++ b/setuptools/dep_util.py @@ -4,7 +4,7 @@ def __getattr__(name): - if name not in ['newer_pairwise_group']: + if name not in ['newer_group', 'newer_pairwise_group']: raise AttributeError(name) warnings.warn( "dep_util is Deprecated. Use functions from setuptools.modified instead.", From 7d90e9fa01dbcf05f1ab2f15627effcb70df930c Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Tue, 21 Nov 2023 09:59:32 +0000 Subject: [PATCH 0179/1761] Add newsfragment --- newsfragments/4126.bugfix.rst | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 newsfragments/4126.bugfix.rst diff --git a/newsfragments/4126.bugfix.rst b/newsfragments/4126.bugfix.rst new file mode 100644 index 0000000000..467a94887a --- /dev/null +++ b/newsfragments/4126.bugfix.rst @@ -0,0 +1,2 @@ +Fixed imports of ``setuptools.dep_util.newer_group``. +A deprecation warning is issued instead of a hard failure. From d148d9e9b530a80c61f15ba64910f51e4fd0e574 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Tue, 21 Nov 2023 10:26:41 +0000 Subject: [PATCH 0180/1761] =?UTF-8?q?Bump=20version:=2069.0.0=20=E2=86=92?= =?UTF-8?q?=2069.0.1?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- NEWS.rst | 10 ++++++++++ newsfragments/4126.bugfix.rst | 2 -- setup.cfg | 2 +- 4 files changed, 12 insertions(+), 4 deletions(-) delete mode 100644 newsfragments/4126.bugfix.rst diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 420d8297a2..020017925c 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 69.0.0 +current_version = 69.0.1 commit = True tag = True diff --git a/NEWS.rst b/NEWS.rst index 96231696f0..3fb6505431 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -1,3 +1,13 @@ +v69.0.1 +======= + +Bugfixes +-------- + +- Fixed imports of ``setuptools.dep_util.newer_group``. + A deprecation warning is issued instead of a hard failure. (#4126) + + v69.0.0 ======= diff --git a/newsfragments/4126.bugfix.rst b/newsfragments/4126.bugfix.rst deleted file mode 100644 index 467a94887a..0000000000 --- a/newsfragments/4126.bugfix.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fixed imports of ``setuptools.dep_util.newer_group``. -A deprecation warning is issued instead of a hard failure. diff --git a/setup.cfg b/setup.cfg index 4854e35c13..f1be2415ce 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = setuptools -version = 69.0.0 +version = 69.0.1 author = Python Packaging Authority author_email = distutils-sig@python.org description = Easily download, build, install, upgrade, and uninstall Python packages From c836172e42822144ac4823e94fa1b0ed65c7ca54 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Tue, 21 Nov 2023 16:59:40 +0000 Subject: [PATCH 0181/1761] Improve warning visibility with due date and reference url --- setuptools/dep_util.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/setuptools/dep_util.py b/setuptools/dep_util.py index c8ab14c8f2..998ffa206e 100644 --- a/setuptools/dep_util.py +++ b/setuptools/dep_util.py @@ -1,14 +1,16 @@ -import warnings - from ._distutils import _modified +from .warnings import SetuptoolsDeprecationWarning def __getattr__(name): if name not in ['newer_group', 'newer_pairwise_group']: raise AttributeError(name) - warnings.warn( + SetuptoolsDeprecationWarning.emit( "dep_util is Deprecated. Use functions from setuptools.modified instead.", - DeprecationWarning, - stacklevel=2, + "Please use `setuptools.modified` instead of `setuptools.dep_util`.", + see_url="https://github.com/pypa/setuptools/pull/4069", + due_date=(2024, 5, 21), + # Warning added in v69.0.0 on 2023/11/20, + # See https://github.com/pypa/setuptools/discussions/4128 ) return getattr(_modified, name) From 14957384f9e408b2569e5caba85fd2090a8944c9 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Tue, 21 Nov 2023 18:12:23 +0000 Subject: [PATCH 0182/1761] Add news fragment --- newsfragments/4131.bugfix.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 newsfragments/4131.bugfix.rst diff --git a/newsfragments/4131.bugfix.rst b/newsfragments/4131.bugfix.rst new file mode 100644 index 0000000000..615c73963d --- /dev/null +++ b/newsfragments/4131.bugfix.rst @@ -0,0 +1 @@ +Added missing estimated date for removing ``setuptools.dep_util`` (deprecated in v69.0.0). From 4f6449f409322fa63408881860ed9097c2ab0af8 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Tue, 21 Nov 2023 18:19:02 +0000 Subject: [PATCH 0183/1761] =?UTF-8?q?Bump=20version:=2069.0.1=20=E2=86=92?= =?UTF-8?q?=2069.0.2?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- NEWS.rst | 9 +++++++++ newsfragments/4131.bugfix.rst | 1 - setup.cfg | 2 +- 4 files changed, 11 insertions(+), 3 deletions(-) delete mode 100644 newsfragments/4131.bugfix.rst diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 020017925c..40430258a1 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 69.0.1 +current_version = 69.0.2 commit = True tag = True diff --git a/NEWS.rst b/NEWS.rst index 3fb6505431..46f64a41b1 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -1,3 +1,12 @@ +v69.0.2 +======= + +Bugfixes +-------- + +- Added missing estimated date for removing ``setuptools.dep_util`` (deprecated in v69.0.0). (#4131) + + v69.0.1 ======= diff --git a/newsfragments/4131.bugfix.rst b/newsfragments/4131.bugfix.rst deleted file mode 100644 index 615c73963d..0000000000 --- a/newsfragments/4131.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Added missing estimated date for removing ``setuptools.dep_util`` (deprecated in v69.0.0). diff --git a/setup.cfg b/setup.cfg index f1be2415ce..b06d9b924d 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = setuptools -version = 69.0.1 +version = 69.0.2 author = Python Packaging Authority author_email = distutils-sig@python.org description = Easily download, build, install, upgrade, and uninstall Python packages From dc5dee8f15bec2c380a8ae00e11c685d8edd8519 Mon Sep 17 00:00:00 2001 From: Avasam Date: Tue, 21 Nov 2023 19:09:01 -0500 Subject: [PATCH 0184/1761] Fix RuffError post-merge --- pkg_resources/tests/test_pkg_resources.py | 1 - setuptools/command/dist_info.py | 1 - setuptools/command/sdist.py | 1 - 3 files changed, 3 deletions(-) diff --git a/pkg_resources/tests/test_pkg_resources.py b/pkg_resources/tests/test_pkg_resources.py index 6f3d28467b..e316cd18ad 100644 --- a/pkg_resources/tests/test_pkg_resources.py +++ b/pkg_resources/tests/test_pkg_resources.py @@ -3,7 +3,6 @@ import os import zipfile import datetime -import time import subprocess import stat import distutils.dist diff --git a/setuptools/command/dist_info.py b/setuptools/command/dist_info.py index 997695d001..f5061afaaf 100644 --- a/setuptools/command/dist_info.py +++ b/setuptools/command/dist_info.py @@ -5,7 +5,6 @@ import os import shutil -import sys from contextlib import contextmanager from distutils import log from distutils.core import Command diff --git a/setuptools/command/sdist.py b/setuptools/command/sdist.py index ac09f0fd0b..dc85161196 100644 --- a/setuptools/command/sdist.py +++ b/setuptools/command/sdist.py @@ -1,7 +1,6 @@ from distutils import log import distutils.command.sdist as orig import os -import sys import contextlib from itertools import chain From bfae7bee8e05de307e6b3093656577d7819324f5 Mon Sep 17 00:00:00 2001 From: Avasam Date: Tue, 21 Nov 2023 19:23:48 -0500 Subject: [PATCH 0185/1761] Fix import error post-merge --- setuptools/tests/test_find_py_modules.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/setuptools/tests/test_find_py_modules.py b/setuptools/tests/test_find_py_modules.py index dc7ff41edc..4c349e1b54 100644 --- a/setuptools/tests/test_find_py_modules.py +++ b/setuptools/tests/test_find_py_modules.py @@ -5,7 +5,7 @@ from setuptools.discovery import FlatLayoutModuleFinder, ModuleFinder -from .test_find_packages import ensure_files, has_symlink +from .test_find_packages import can_symlink, ensure_files class TestModuleFinder: @@ -38,7 +38,7 @@ def test_finder(self, tmp_path, example): ensure_files(tmp_path, files) assert self.find(tmp_path, **kwargs) == set(expected_modules) - @pytest.mark.skipif(not has_symlink(), reason='Symlink support required') + @pytest.mark.skipif(not can_symlink(), reason='Symlink support required') def test_symlinked_packages_are_included(self, tmp_path): src = "_myfiles/file.py" ensure_files(tmp_path, [src]) From 7362e570ac8fa157a451ee1d3f205839ee89aed9 Mon Sep 17 00:00:00 2001 From: Avasam Date: Tue, 21 Nov 2023 20:03:29 -0500 Subject: [PATCH 0186/1761] Update newsfragments/4097.feature.rst --- newsfragments/4097.feature.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/newsfragments/4097.feature.rst b/newsfragments/4097.feature.rst index 129e128f21..3380658e8c 100644 --- a/newsfragments/4097.feature.rst +++ b/newsfragments/4097.feature.rst @@ -1 +1 @@ -Updated `pkg_resources` to use stdlib `importlib.machinery` instead of `importlib_machinery` -- by :user:`Avasam` +Updated `pkg_resources` to use stdlib `importlib.machinery` instead of ``importlib_machinery`` -- by :user:`Avasam` From 907487d3cf523d588934faa956fbd223b47e44d4 Mon Sep 17 00:00:00 2001 From: Alex Hedges Date: Wed, 22 Nov 2023 12:51:12 -0500 Subject: [PATCH 0187/1761] Fix incorrect PEP reference in news entry for 69.0.0 The entry referenced PEP 612 (Parameter Specification Variables), but it should have referenced PEP 621 (Storing project metadata in pyproject.toml) instead. --- NEWS.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/NEWS.rst b/NEWS.rst index 46f64a41b1..35d11c94b7 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -58,7 +58,7 @@ Deprecations and Removals - Added strict enforcement for ``project.dynamic`` in ``pyproject.toml``. This removes the transitional ability of users configuring certain parameters via ``setup.py`` without making the necessary changes to ``pyproject.toml`` - (as mandated by :pep:`612`). (#4066) + (as mandated by :pep:`621`). (#4066) - Removed handling of ``--config-settings["--build-option"]`` in ``setuptools.build_meta`` from build-backend API hooks *other than* ``build_wheel``. This was motivate by `errors caused when passing this option From 38f656064bc7ebb81d5b58b98dd62013ee9cb0f6 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Fri, 24 Nov 2023 14:25:07 -0500 Subject: [PATCH 0188/1761] Add warning when SETUPTOOLS_USE_DISTUTILS=stdlib. Ref #4137. --- _distutils_hack/__init__.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/_distutils_hack/__init__.py b/_distutils_hack/__init__.py index b951c2defd..0b47d17435 100644 --- a/_distutils_hack/__init__.py +++ b/_distutils_hack/__init__.py @@ -45,6 +45,15 @@ def enabled(): Allow selection of distutils by environment variable. """ which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'local') + if which == 'stdlib': + import warnings + + warnings.warn( + "Reliance on distutils from stdlib is deprecated. Users " + "must rely on setuptools to provide the distutils module. " + "Avoid importing distutils or import setuptools first, " + "and avoid setting SETUPTOOLS_USE_DISTUTILS=stdlib." + ) return which == 'local' From 2832216ada28985a9a972be9c7e8adf93e724a3b Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Fri, 24 Nov 2023 14:44:38 -0500 Subject: [PATCH 0189/1761] Announce the deprecation of imported distutils from stdlib. Ref #4137. --- _distutils_hack/__init__.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/_distutils_hack/__init__.py b/_distutils_hack/__init__.py index 0b47d17435..b8322fc182 100644 --- a/_distutils_hack/__init__.py +++ b/_distutils_hack/__init__.py @@ -30,7 +30,11 @@ def clear_distutils(): return import warnings - warnings.warn("Setuptools is replacing distutils.") + warnings.warn( + "Setuptools is replacing distutils. Support for replacing " + "an already imported distutils is deprecated. In the future, " + "this condition will fail.", + ) mods = [ name for name in sys.modules From 6c9cf919758f4d76f8d42d5d9756c1db43f74a20 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Tue, 28 Nov 2023 15:51:38 +0000 Subject: [PATCH 0190/1761] Temporarily disable deprecation enforcement --- tox.ini | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index e352869f39..c4f10cd667 100644 --- a/tox.ini +++ b/tox.ini @@ -6,7 +6,8 @@ deps = # ^-- use dev version while we wait for the new release setenv = PYTHONWARNDEFAULTENCODING = 1 - SETUPTOOLS_ENFORCE_DEPRECATION = {env:SETUPTOOLS_ENFORCE_DEPRECATION:1} + SETUPTOOLS_ENFORCE_DEPRECATION = {env:SETUPTOOLS_ENFORCE_DEPRECATION:0} + # ^-- Temporarily disable enforcement so CI don't fail on due dates commands = pytest {posargs} usedevelop = True From 26f420a97e73a2ab695023f6cc21f5c786d2b289 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Tue, 28 Nov 2023 11:43:20 -0500 Subject: [PATCH 0191/1761] Remove news fragment after allowing time to be processed downstream. --- newsfragments/+drop-py37.feature.rst | 1 - 1 file changed, 1 deletion(-) delete mode 100644 newsfragments/+drop-py37.feature.rst diff --git a/newsfragments/+drop-py37.feature.rst b/newsfragments/+drop-py37.feature.rst deleted file mode 100644 index ccabdaa355..0000000000 --- a/newsfragments/+drop-py37.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Require Python 3.8 or later. From 6a744c5fdca0a93cde2d059e5e92a34029f71bc4 Mon Sep 17 00:00:00 2001 From: Avasam Date: Tue, 28 Nov 2023 12:02:26 -0500 Subject: [PATCH 0192/1761] Update missed redundant code. --- setuptools/tests/test_distutils_adoption.py | 18 +++++------------- setuptools/tests/test_editable_install.py | 1 - 2 files changed, 5 insertions(+), 14 deletions(-) diff --git a/setuptools/tests/test_distutils_adoption.py b/setuptools/tests/test_distutils_adoption.py index 3053fdc0b7..e180547f0d 100644 --- a/setuptools/tests/test_distutils_adoption.py +++ b/setuptools/tests/test_distutils_adoption.py @@ -1,6 +1,5 @@ import os import sys -import functools import platform import textwrap @@ -10,13 +9,6 @@ IS_PYPY = '__pypy__' in sys.builtin_module_names -def popen_text(call): - """ - Augment the Popen call with the parameters to ensure unicode text. - """ - return functools.partial(call, text=True) - - def win_sr(env): """ On Windows, SYSTEMROOT must be present to avoid @@ -34,7 +26,7 @@ def win_sr(env): def find_distutils(venv, imports='distutils', env=None, **kwargs): py_cmd = 'import {imports}; print(distutils.__file__)'.format(**locals()) cmd = ['python', '-c', py_cmd] - return popen_text(venv.run)(cmd, env=win_sr(env), **kwargs) + return venv.run(cmd, env=win_sr(env), text=True, **kwargs) def count_meta_path(venv, env=None): @@ -46,7 +38,7 @@ def count_meta_path(venv, env=None): """ ) cmd = ['python', '-c', py_cmd] - return int(popen_text(venv.run)(cmd, env=win_sr(env))) + return int(venv.run(cmd, env=win_sr(env), text=True)) skip_without_stdlib_distutils = pytest.mark.skipif( @@ -92,7 +84,7 @@ def test_pip_import(venv): Regression test for #3002. """ cmd = ['python', '-c', 'import pip'] - popen_text(venv.run)(cmd) + venv.run(cmd, text=True) def test_distutils_has_origin(): @@ -140,7 +132,7 @@ def test_modules_are_not_duplicated_on_import( env = dict(SETUPTOOLS_USE_DISTUTILS=distutils_version) script = ENSURE_IMPORTS_ARE_NOT_DUPLICATED.format(imported_module=imported_module) cmd = ['python', '-c', script] - output = popen_text(venv.run)(cmd, env=win_sr(env)).strip() + output = venv.run(cmd, env=win_sr(env), text=True).strip() assert output == "success" @@ -164,5 +156,5 @@ def test_modules_are_not_duplicated_on_import( def test_log_module_is_not_duplicated_on_import(distutils_version, tmpdir_cwd, venv): env = dict(SETUPTOOLS_USE_DISTUTILS=distutils_version) cmd = ['python', '-c', ENSURE_LOG_IMPORT_IS_NOT_DUPLICATED] - output = popen_text(venv.run)(cmd, env=win_sr(env)).strip() + output = venv.run(cmd, env=win_sr(env), text=True).strip() assert output == "success" diff --git a/setuptools/tests/test_editable_install.py b/setuptools/tests/test_editable_install.py index 48623279eb..08bac24f4b 100644 --- a/setuptools/tests/test_editable_install.py +++ b/setuptools/tests/test_editable_install.py @@ -61,7 +61,6 @@ def editable_opts(request): "Intended Audience :: Developers" ] urls = {Homepage = "https://github.com"} - dependencies = ['importlib-metadata; python_version<"3.8"'] [tool.setuptools] package-dir = {"" = "src"} From f73efcc9291e69304522c696a4b5433ab2458acc Mon Sep 17 00:00:00 2001 From: Avasam Date: Tue, 28 Nov 2023 15:48:34 -0500 Subject: [PATCH 0193/1761] Mark interfaces as Protocols and add missing `self` argument --- newsfragments/4144.bugfix.rst | 1 + pkg_resources/__init__.py | 29 +++++++++++++++-------------- 2 files changed, 16 insertions(+), 14 deletions(-) create mode 100644 newsfragments/4144.bugfix.rst diff --git a/newsfragments/4144.bugfix.rst b/newsfragments/4144.bugfix.rst new file mode 100644 index 0000000000..9c4709b737 --- /dev/null +++ b/newsfragments/4144.bugfix.rst @@ -0,0 +1 @@ +Explicitely marked as ``Protocol`` and fixed missing ``self`` argument in interfaces ``pkg_resources.IMetadataProvider`` and ``pkg_resources.IResourceProvider`` -- by :user:`Avasam` diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py index ab6afe955d..211b7561c9 100644 --- a/pkg_resources/__init__.py +++ b/pkg_resources/__init__.py @@ -23,6 +23,7 @@ import time import re import types +from typing import Protocol import zipfile import zipimport import warnings @@ -546,54 +547,54 @@ def get_entry_info(dist, group, name): return get_distribution(dist).get_entry_info(group, name) -class IMetadataProvider: - def has_metadata(name): +class IMetadataProvider(Protocol): + def has_metadata(self, name): """Does the package's distribution contain the named metadata?""" - def get_metadata(name): + def get_metadata(self, name): """The named metadata resource as a string""" - def get_metadata_lines(name): + def get_metadata_lines(self, name): """Yield named metadata resource as list of non-blank non-comment lines Leading and trailing whitespace is stripped from each line, and lines with ``#`` as the first non-blank character are omitted.""" - def metadata_isdir(name): + def metadata_isdir(self, name): """Is the named metadata a directory? (like ``os.path.isdir()``)""" - def metadata_listdir(name): + def metadata_listdir(self, name): """List of metadata names in the directory (like ``os.listdir()``)""" - def run_script(script_name, namespace): + def run_script(self, script_name, namespace): """Execute the named script in the supplied namespace dictionary""" -class IResourceProvider(IMetadataProvider): +class IResourceProvider(IMetadataProvider, Protocol): """An object that provides access to package resources""" - def get_resource_filename(manager, resource_name): + def get_resource_filename(self, manager, resource_name): """Return a true filesystem path for `resource_name` `manager` must be an ``IResourceManager``""" - def get_resource_stream(manager, resource_name): + def get_resource_stream(self, manager, resource_name): """Return a readable file-like object for `resource_name` `manager` must be an ``IResourceManager``""" - def get_resource_string(manager, resource_name): + def get_resource_string(self, manager, resource_name): """Return a string containing the contents of `resource_name` `manager` must be an ``IResourceManager``""" - def has_resource(resource_name): + def has_resource(self, resource_name): """Does the package contain the named resource?""" - def resource_isdir(resource_name): + def resource_isdir(self, resource_name): """Is the named resource a directory? (like ``os.path.isdir()``)""" - def resource_listdir(resource_name): + def resource_listdir(self, resource_name): """List of resource names in the directory (like ``os.listdir()``)""" From 33dd01267b6a886217bae3ebd5df5b689e2ab722 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Wed, 29 Nov 2023 13:21:17 -0500 Subject: [PATCH 0194/1761] Suppress deprecation warning in dateutil. Workaround for dateutil/dateutil#1284. --- pytest.ini | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pytest.ini b/pytest.ini index d9a15ed19a..f9533b5721 100644 --- a/pytest.ini +++ b/pytest.ini @@ -24,4 +24,7 @@ filterwarnings= # pypa/build#615 ignore:'encoding' argument not specified::build.env + # dateutil/dateutil#1284 + ignore:datetime.datetime.utcfromtimestamp:DeprecationWarning:dateutil.tz.tz + ## end upstream From 97a5f44787ac5a928534cdf724210c429621435c Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Mon, 4 Dec 2023 15:53:37 -0500 Subject: [PATCH 0195/1761] Update Github Actions badge per actions/starter-workflows#1525. --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index b703d4904d..41bcfbe887 100644 --- a/README.rst +++ b/README.rst @@ -3,7 +3,7 @@ .. image:: https://img.shields.io/pypi/pyversions/PROJECT.svg -.. image:: https://github.com/PROJECT_PATH/workflows/tests/badge.svg +.. image:: https://github.com/PROJECT_PATH/actions/workflows/main.yml/badge.svg :target: https://github.com/PROJECT_PATH/actions?query=workflow%3A%22tests%22 :alt: tests From 8bff8b034a0bbf0273a38f0a0cc41e3a52b26864 Mon Sep 17 00:00:00 2001 From: Sviatoslav Sydorenko Date: Tue, 5 Dec 2023 15:48:52 +0100 Subject: [PATCH 0196/1761] Enable testing merge queues @ GitHub Actions CI/CD (jaraco/skeleton#93) This allows org-hosted projects to start enabling merge queues in the repository settings. With that, GitHub would trigger a separate event against a merge commit derived from merging several pull requests with the target branch. --- .github/workflows/main.yml | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 9682985c82..387d01aa40 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -1,6 +1,11 @@ name: tests -on: [push, pull_request] +on: + merge_group: + push: + branches-ignore: + - gh-readonly-queue/** # Temporary merge queue-related GH-made branches + pull_request: permissions: contents: read From dd5f15a600a071ba00859e84fa497a9d1a25f521 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Tue, 5 Dec 2023 20:19:50 -0500 Subject: [PATCH 0197/1761] Add attribution for #3659. --- NEWS.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/NEWS.rst b/NEWS.rst index 35d11c94b7..42bbedc0e7 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -529,7 +529,7 @@ v65.5.1 Misc ---- * #3638: Drop a test dependency on the ``mock`` package, always use :external+python:py:mod:`unittest.mock` -- by :user:`hroncok` -* #3659: Fixed REDoS vector in package_index. +* #3659: Fixed REDoS vector in package_index -- by :user:`SCH227` v65.5.0 From e4bd6091a1fbe26fe113051f0f47875d627c7ed2 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Mon, 11 Dec 2023 10:46:32 -0500 Subject: [PATCH 0198/1761] Separate collateral jobs on different lines for easier override/extension. --- .github/workflows/main.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 387d01aa40..a079bbfbe3 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -61,7 +61,9 @@ jobs: strategy: fail-fast: false matrix: - job: [diffcov, docs] + job: + - diffcov + - docs runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 From d0b0a4d5a40a6f94de55af58ecc6e542ab962323 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Thu, 14 Dec 2023 10:24:03 -0500 Subject: [PATCH 0199/1761] Retain valid names with underscores in egg_info. Closes #2522. --- newsfragments/+d4a9206f.bugfix.rst | 1 + setuptools/_normalization.py | 4 +++- 2 files changed, 4 insertions(+), 1 deletion(-) create mode 100644 newsfragments/+d4a9206f.bugfix.rst diff --git a/newsfragments/+d4a9206f.bugfix.rst b/newsfragments/+d4a9206f.bugfix.rst new file mode 100644 index 0000000000..b066641029 --- /dev/null +++ b/newsfragments/+d4a9206f.bugfix.rst @@ -0,0 +1 @@ +Retain valid names with underscores in egg_info. \ No newline at end of file diff --git a/setuptools/_normalization.py b/setuptools/_normalization.py index eee4fb7746..aa9274f093 100644 --- a/setuptools/_normalization.py +++ b/setuptools/_normalization.py @@ -12,7 +12,7 @@ # https://packaging.python.org/en/latest/specifications/core-metadata/#name _VALID_NAME = re.compile(r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.I) -_UNSAFE_NAME_CHARS = re.compile(r"[^A-Z0-9.]+", re.I) +_UNSAFE_NAME_CHARS = re.compile(r"[^A-Z0-9._-]+", re.I) _NON_ALPHANUMERIC = re.compile(r"[^A-Z0-9]+", re.I) _PEP440_FALLBACK = re.compile(r"^v?(?P(?:[0-9]+!)?[0-9]+(?:\.[0-9]+)*)", re.I) @@ -35,6 +35,8 @@ def safe_name(component: str) -> str: 'hello-world' >>> safe_name("hello?world") 'hello-world' + >>> safe_name("hello_world") + 'hello_world' """ # See pkg_resources.safe_name return _UNSAFE_NAME_CHARS.sub("-", component) From e92440ad6210b21f3ede64d7eb69dead94b37d3a Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Thu, 14 Dec 2023 10:46:47 -0500 Subject: [PATCH 0200/1761] =?UTF-8?q?Bump=20version:=2069.0.2=20=E2=86=92?= =?UTF-8?q?=2069.0.3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- NEWS.rst | 9 +++++++++ newsfragments/+d4a9206f.bugfix.rst | 1 - setup.cfg | 2 +- 4 files changed, 11 insertions(+), 3 deletions(-) delete mode 100644 newsfragments/+d4a9206f.bugfix.rst diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 40430258a1..e436630663 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 69.0.2 +current_version = 69.0.3 commit = True tag = True diff --git a/NEWS.rst b/NEWS.rst index 42bbedc0e7..f0c3f037b9 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -1,3 +1,12 @@ +v69.0.3 +======= + +Bugfixes +-------- + +- Retain valid names with underscores in egg_info. + + v69.0.2 ======= diff --git a/newsfragments/+d4a9206f.bugfix.rst b/newsfragments/+d4a9206f.bugfix.rst deleted file mode 100644 index b066641029..0000000000 --- a/newsfragments/+d4a9206f.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Retain valid names with underscores in egg_info. \ No newline at end of file diff --git a/setup.cfg b/setup.cfg index b06d9b924d..2ecf34470e 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = setuptools -version = 69.0.2 +version = 69.0.3 author = Python Packaging Authority author_email = distutils-sig@python.org description = Easily download, build, install, upgrade, and uninstall Python packages From 596e6834c8a037c935338afe92e0b9c5ffa1768f Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Tue, 19 Dec 2023 18:29:16 -0500 Subject: [PATCH 0201/1761] Drop minimum requirement on pytest-mypy as most environments are already running much later. Closes jaraco/skeleton#96. --- setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index 4f184c7ec5..20c5dd7638 100644 --- a/setup.cfg +++ b/setup.cfg @@ -34,7 +34,7 @@ testing = # workaround for jaraco/skeleton#22 python_implementation != "PyPy" pytest-cov - pytest-mypy >= 0.9.1; \ + pytest-mypy; \ # workaround for jaraco/skeleton#22 python_implementation != "PyPy" pytest-enabler >= 2.2 From b8c6c1530ef937521b60aabb0ecd98a8b5dca761 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sat, 23 Dec 2023 00:25:02 +0100 Subject: [PATCH 0202/1761] Use the ruff formatter (jaraco/skeleton#99) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Use the ruff formatter, instead of black Based on: - ruff-pre-commit README.md | Using Ruff with pre-commit https://github.com/astral-sh/ruff-pre-commit/blob/main/README.md - The Ruff Formatter | Conflicting lint rules https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules Support for the ruff formatter was added to pytest-ruff by commits from October 2023, released the same day as versions 0.2 and 0.2.1. Hence, it makes sense to require pytest-ruff ≥ 0.2.1 now. Support for `quote-style = "preserve"` was added to ruff in the last couple of weeks, therefore require the latest version, ruff ≥ 0.1.8. This option is equivalent to `skip-string-normalization` in black. Closes jaraco/skeleton#101. --------- Co-authored-by: Jason R. Coombs --- .pre-commit-config.yaml | 7 ++++--- README.rst | 4 ---- pyproject.toml | 3 --- pytest.ini | 8 -------- ruff.toml | 22 ++++++++++++++++++++++ setup.cfg | 5 +---- 6 files changed, 27 insertions(+), 22 deletions(-) create mode 100644 ruff.toml diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index af50201060..5a4a7e9166 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,5 +1,6 @@ repos: -- repo: https://github.com/psf/black - rev: 22.6.0 +- repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.1.8 hooks: - - id: black + - id: ruff + - id: ruff-format diff --git a/README.rst b/README.rst index 41bcfbe887..2fabcf3334 100644 --- a/README.rst +++ b/README.rst @@ -11,10 +11,6 @@ :target: https://github.com/astral-sh/ruff :alt: Ruff -.. image:: https://img.shields.io/badge/code%20style-black-000000.svg - :target: https://github.com/psf/black - :alt: Code style: Black - .. .. image:: https://readthedocs.org/projects/PROJECT_RTD/badge/?version=latest .. :target: https://PROJECT_RTD.readthedocs.io/en/latest/?badge=latest diff --git a/pyproject.toml b/pyproject.toml index dce944dfc3..a853c5789e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -2,7 +2,4 @@ requires = ["setuptools>=56", "setuptools_scm[toml]>=3.4.1"] build-backend = "setuptools.build_meta" -[tool.black] -skip-string-normalization = true - [tool.setuptools_scm] diff --git a/pytest.ini b/pytest.ini index f9533b5721..022a723e7e 100644 --- a/pytest.ini +++ b/pytest.ini @@ -7,14 +7,6 @@ filterwarnings= # Ensure ResourceWarnings are emitted default::ResourceWarning - # shopkeep/pytest-black#55 - ignore: is not using a cooperative constructor:pytest.PytestDeprecationWarning - ignore:The \(fspath. py.path.local\) argument to BlackItem is deprecated.:pytest.PytestDeprecationWarning - ignore:BlackItem is an Item subclass and should not be a collector:pytest.PytestWarning - - # shopkeep/pytest-black#67 - ignore:'encoding' argument not specified::pytest_black - # realpython/pytest-mypy#152 ignore:'encoding' argument not specified::pytest_mypy diff --git a/ruff.toml b/ruff.toml new file mode 100644 index 0000000000..7ed133b790 --- /dev/null +++ b/ruff.toml @@ -0,0 +1,22 @@ +[lint] +extend-ignore = [ + # https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules + "W191", + "E111", + "E114", + "E117", + "D206", + "D300", + "Q000", + "Q001", + "Q002", + "Q003", + "COM812", + "COM819", + "ISC001", + "ISC002", +] + +[format] +# https://docs.astral.sh/ruff/settings/#format-quote-style +quote-style = "preserve" diff --git a/setup.cfg b/setup.cfg index 20c5dd7638..1d2729beb2 100644 --- a/setup.cfg +++ b/setup.cfg @@ -30,15 +30,12 @@ testing = # upstream pytest >= 6 pytest-checkdocs >= 2.4 - pytest-black >= 0.3.7; \ - # workaround for jaraco/skeleton#22 - python_implementation != "PyPy" pytest-cov pytest-mypy; \ # workaround for jaraco/skeleton#22 python_implementation != "PyPy" pytest-enabler >= 2.2 - pytest-ruff + pytest-ruff >= 0.2.1 # local From 1eb79646ed76f1237a7db0c9f2868522eebba1ed Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 23 Dec 2023 09:41:49 -0500 Subject: [PATCH 0203/1761] Exclude ruff on vendored directories. --- pkg_resources/_vendor/ruff.toml | 1 + setuptools/_vendor/ruff.toml | 1 + 2 files changed, 2 insertions(+) create mode 100644 pkg_resources/_vendor/ruff.toml create mode 100644 setuptools/_vendor/ruff.toml diff --git a/pkg_resources/_vendor/ruff.toml b/pkg_resources/_vendor/ruff.toml new file mode 100644 index 0000000000..00fee625a5 --- /dev/null +++ b/pkg_resources/_vendor/ruff.toml @@ -0,0 +1 @@ +exclude = ["*"] diff --git a/setuptools/_vendor/ruff.toml b/setuptools/_vendor/ruff.toml new file mode 100644 index 0000000000..00fee625a5 --- /dev/null +++ b/setuptools/_vendor/ruff.toml @@ -0,0 +1 @@ +exclude = ["*"] From b07d2f58233f9a99a820901924e263645c57a7c6 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 23 Dec 2023 09:49:47 -0500 Subject: [PATCH 0204/1761] Fix broken reference from changelog to PyPUG. --- NEWS.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/NEWS.rst b/NEWS.rst index f0c3f037b9..3c99148834 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -1161,7 +1161,7 @@ Changes .. warning:: Please note that future releases of setuptools will halt the build process if a ``pyproject.toml`` file that does not match doc:`the PyPA Specification - ` is given. + ` is given. * #3215: Updated ``pyproject.toml`` validation, as generated by ``validate-pyproject==0.6.1``. * #3218: Prevented builds from erroring if the project specifies metadata via ``pyproject.toml``, but uses other files (e.g. ``setup.py``) to complement it, From 7f8d898f8b0da8521895b41f78a2d1c1953ff846 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 23 Dec 2023 09:57:29 -0500 Subject: [PATCH 0205/1761] Exclude distutils from ruff formatting/linting. --- setuptools/_distutils/ruff.toml | 1 + 1 file changed, 1 insertion(+) create mode 100644 setuptools/_distutils/ruff.toml diff --git a/setuptools/_distutils/ruff.toml b/setuptools/_distutils/ruff.toml new file mode 100644 index 0000000000..00fee625a5 --- /dev/null +++ b/setuptools/_distutils/ruff.toml @@ -0,0 +1 @@ +exclude = ["*"] From aa98d7870d2fc89bd9a66e1fcab2c0520fbf2d5b Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 23 Dec 2023 09:59:38 -0500 Subject: [PATCH 0206/1761] Rebuild the project using ruff style. --- docs/conf.py | 24 +- pkg_resources/tests/test_resources.py | 86 ++--- pkg_resources/tests/test_working_set.py | 76 ++-- setuptools/_core_metadata.py | 1 + setuptools/_normalization.py | 1 + setuptools/_reqs.py | 6 +- setuptools/build_meta.py | 2 +- setuptools/command/_requirestxt.py | 1 + setuptools/command/bdist_egg.py | 8 +- setuptools/command/build_ext.py | 57 ++- setuptools/command/easy_install.py | 86 ++--- setuptools/command/editable_wheel.py | 14 +- setuptools/command/egg_info.py | 6 +- setuptools/config/__init__.py | 1 + setuptools/config/_apply_pyprojecttoml.py | 1 + setuptools/config/expand.py | 1 + setuptools/config/pyprojecttoml.py | 1 + setuptools/config/setupcfg.py | 7 +- setuptools/dist.py | 2 +- setuptools/glob.py | 2 +- setuptools/msvc.py | 32 +- setuptools/sandbox.py | 32 +- setuptools/tests/config/downloads/preload.py | 1 + .../tests/config/test_apply_pyprojecttoml.py | 1 + setuptools/tests/config/test_setupcfg.py | 66 ++-- setuptools/tests/fixtures.py | 40 +- setuptools/tests/integration/helpers.py | 3 +- .../integration/test_pip_install_sdist.py | 1 + setuptools/tests/script-with-bom.py | 2 +- setuptools/tests/test_bdist_deprecations.py | 1 + setuptools/tests/test_bdist_egg.py | 1 + setuptools/tests/test_build_ext.py | 12 +- setuptools/tests/test_core_metadata.py | 2 +- setuptools/tests/test_dist.py | 12 +- setuptools/tests/test_dist_info.py | 1 + setuptools/tests/test_easy_install.py | 54 ++- setuptools/tests/test_editable_install.py | 16 +- setuptools/tests/test_egg_info.py | 185 +++++----- setuptools/tests/test_find_packages.py | 4 +- setuptools/tests/test_find_py_modules.py | 1 + setuptools/tests/test_glob.py | 8 +- setuptools/tests/test_manifest.py | 102 +++-- setuptools/tests/test_packageindex.py | 4 +- setuptools/tests/test_sandbox.py | 1 + setuptools/tests/test_sdist.py | 9 +- setuptools/tests/test_virtualenv.py | 8 +- setuptools/tests/test_wheel.py | 348 +++++++++--------- setuptools/tests/test_windows_wrappers.py | 1 + setuptools/wheel.py | 8 +- 49 files changed, 635 insertions(+), 704 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 38899e5334..0a82ff2fe2 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -100,19 +100,17 @@ # Preserve authored syntax for defaults autodoc_preserve_defaults = True -intersphinx_mapping.update( - { - 'pip': ('https://pip.pypa.io/en/latest', None), - 'build': ('https://pypa-build.readthedocs.io/en/latest', None), - 'PyPUG': ('https://packaging.python.org/en/latest/', None), - 'packaging': ('https://packaging.pypa.io/en/latest/', None), - 'twine': ('https://twine.readthedocs.io/en/stable/', None), - 'importlib-resources': ( - 'https://importlib-resources.readthedocs.io/en/latest', - None, - ), - } -) +intersphinx_mapping.update({ + 'pip': ('https://pip.pypa.io/en/latest', None), + 'build': ('https://pypa-build.readthedocs.io/en/latest', None), + 'PyPUG': ('https://packaging.python.org/en/latest/', None), + 'packaging': ('https://packaging.pypa.io/en/latest/', None), + 'twine': ('https://twine.readthedocs.io/en/stable/', None), + 'importlib-resources': ( + 'https://importlib-resources.readthedocs.io/en/latest', + None, + ), +}) # Add support for linking usernames github_url = 'https://github.com' diff --git a/pkg_resources/tests/test_resources.py b/pkg_resources/tests/test_resources.py index 608c67aeeb..c90584a882 100644 --- a/pkg_resources/tests/test_resources.py +++ b/pkg_resources/tests/test_resources.py @@ -255,12 +255,10 @@ def test_marker_evaluation_with_extras(self): ws = WorkingSet([]) Foo = Distribution.from_filename( "/foo_dir/Foo-1.2.dist-info", - metadata=Metadata( - ( - "METADATA", - "Provides-Extra: baz\n" "Requires-Dist: quux; extra=='baz'", - ) - ), + metadata=Metadata(( + "METADATA", + "Provides-Extra: baz\n" "Requires-Dist: quux; extra=='baz'", + )), ) ad.add(Foo) assert list(ws.resolve(parse_requirements("Foo"), ad)) == [Foo] @@ -275,13 +273,11 @@ def test_marker_evaluation_with_extras_normlized(self): ws = WorkingSet([]) Foo = Distribution.from_filename( "/foo_dir/Foo-1.2.dist-info", - metadata=Metadata( - ( - "METADATA", - "Provides-Extra: baz-lightyear\n" - "Requires-Dist: quux; extra=='baz-lightyear'", - ) - ), + metadata=Metadata(( + "METADATA", + "Provides-Extra: baz-lightyear\n" + "Requires-Dist: quux; extra=='baz-lightyear'", + )), ) ad.add(Foo) assert list(ws.resolve(parse_requirements("Foo"), ad)) == [Foo] @@ -295,15 +291,13 @@ def test_marker_evaluation_with_multiple_extras(self): ws = WorkingSet([]) Foo = Distribution.from_filename( "/foo_dir/Foo-1.2.dist-info", - metadata=Metadata( - ( - "METADATA", - "Provides-Extra: baz\n" - "Requires-Dist: quux; extra=='baz'\n" - "Provides-Extra: bar\n" - "Requires-Dist: fred; extra=='bar'\n", - ) - ), + metadata=Metadata(( + "METADATA", + "Provides-Extra: baz\n" + "Requires-Dist: quux; extra=='baz'\n" + "Provides-Extra: bar\n" + "Requires-Dist: fred; extra=='bar'\n", + )), ) ad.add(Foo) quux = Distribution.from_filename("/foo_dir/quux-1.0.dist-info") @@ -326,15 +320,13 @@ def test_marker_evaluation_with_extras_loop(self): ) c = Distribution.from_filename( "/foo_dir/c-1.0.dist-info", - metadata=Metadata( - ( - "METADATA", - "Provides-Extra: a\n" - "Requires-Dist: b;extra=='a'\n" - "Provides-Extra: b\n" - "Requires-Dist: foo;extra=='b'", - ) - ), + metadata=Metadata(( + "METADATA", + "Provides-Extra: a\n" + "Requires-Dist: b;extra=='a'\n" + "Provides-Extra: b\n" + "Requires-Dist: foo;extra=='b'", + )), ) foo = Distribution.from_filename("/foo_dir/foo-0.1.dist-info") for dist in (a, b, c, foo): @@ -572,26 +564,22 @@ def testOptionsAndHashing(self): assert set(r1.extras) == set(("foo", "bar")) assert set(r2.extras) == set(("foo", "bar")) assert hash(r1) == hash(r2) - assert hash(r1) == hash( - ( - "twisted", - None, - packaging.specifiers.SpecifierSet(">=1.2"), - frozenset(["foo", "bar"]), - None, - ) - ) + assert hash(r1) == hash(( + "twisted", + None, + packaging.specifiers.SpecifierSet(">=1.2"), + frozenset(["foo", "bar"]), + None, + )) assert hash( Requirement.parse("Twisted @ https://localhost/twisted.zip") - ) == hash( - ( - "twisted", - "https://localhost/twisted.zip", - packaging.specifiers.SpecifierSet(), - frozenset(), - None, - ) - ) + ) == hash(( + "twisted", + "https://localhost/twisted.zip", + packaging.specifiers.SpecifierSet(), + frozenset(), + None, + )) def testVersionEquality(self): r1 = Requirement.parse("foo==0.3a2") diff --git a/pkg_resources/tests/test_working_set.py b/pkg_resources/tests/test_working_set.py index 435d3c6beb..f8e60e752a 100644 --- a/pkg_resources/tests/test_working_set.py +++ b/pkg_resources/tests/test_working_set.py @@ -19,7 +19,7 @@ def strip_comments(s): def parse_distributions(s): - ''' + """ Parse a series of distribution specs of the form: {project_name}-{version} [optional, indented requirements specification] @@ -36,7 +36,7 @@ def parse_distributions(s): - project_name=foo, version=0.2 - project_name=bar, version=1.0, requires=['foo>=3.0', 'baz; extra=="feature"'] - ''' + """ s = s.strip() for spec in re.split(r'\n(?=[^\s])', s): if not spec: @@ -113,7 +113,7 @@ def parametrize_test_working_set_resolve(*test_list): @parametrize_test_working_set_resolve( - ''' + """ # id noop @@ -126,8 +126,8 @@ def parametrize_test_working_set_resolve(*test_list): # resolved # resolved [replace conflicting] - ''', - ''' + """, + """ # id already_installed @@ -144,8 +144,8 @@ def parametrize_test_working_set_resolve(*test_list): # resolved [replace conflicting] foo-3.0 - ''', - ''' + """, + """ # id installable_not_installed @@ -163,8 +163,8 @@ def parametrize_test_working_set_resolve(*test_list): # resolved [replace conflicting] foo-3.0 - ''', - ''' + """, + """ # id not_installable @@ -180,8 +180,8 @@ def parametrize_test_working_set_resolve(*test_list): # resolved [replace conflicting] DistributionNotFound - ''', - ''' + """, + """ # id no_matching_version @@ -198,8 +198,8 @@ def parametrize_test_working_set_resolve(*test_list): # resolved [replace conflicting] DistributionNotFound - ''', - ''' + """, + """ # id installable_with_installed_conflict @@ -217,8 +217,8 @@ def parametrize_test_working_set_resolve(*test_list): # resolved [replace conflicting] foo-3.5 - ''', - ''' + """, + """ # id not_installable_with_installed_conflict @@ -235,8 +235,8 @@ def parametrize_test_working_set_resolve(*test_list): # resolved [replace conflicting] DistributionNotFound - ''', - ''' + """, + """ # id installed_with_installed_require @@ -257,8 +257,8 @@ def parametrize_test_working_set_resolve(*test_list): # resolved [replace conflicting] foo-3.9 baz-0.1 - ''', - ''' + """, + """ # id installed_with_conflicting_installed_require @@ -277,8 +277,8 @@ def parametrize_test_working_set_resolve(*test_list): # resolved [replace conflicting] DistributionNotFound - ''', - ''' + """, + """ # id installed_with_installable_conflicting_require @@ -299,8 +299,8 @@ def parametrize_test_working_set_resolve(*test_list): # resolved [replace conflicting] baz-0.1 foo-2.9 - ''', - ''' + """, + """ # id installed_with_installable_require @@ -321,8 +321,8 @@ def parametrize_test_working_set_resolve(*test_list): # resolved [replace conflicting] foo-3.9 baz-0.1 - ''', - ''' + """, + """ # id installable_with_installed_require @@ -343,8 +343,8 @@ def parametrize_test_working_set_resolve(*test_list): # resolved [replace conflicting] foo-3.9 baz-0.1 - ''', - ''' + """, + """ # id installable_with_installable_require @@ -365,8 +365,8 @@ def parametrize_test_working_set_resolve(*test_list): # resolved [replace conflicting] foo-3.9 baz-0.1 - ''', - ''' + """, + """ # id installable_with_conflicting_installable_require @@ -387,8 +387,8 @@ def parametrize_test_working_set_resolve(*test_list): # resolved [replace conflicting] baz-0.1 foo-2.9 - ''', - ''' + """, + """ # id conflicting_installables @@ -407,8 +407,8 @@ def parametrize_test_working_set_resolve(*test_list): # resolved [replace conflicting] VersionConflict - ''', - ''' + """, + """ # id installables_with_conflicting_requires @@ -431,8 +431,8 @@ def parametrize_test_working_set_resolve(*test_list): # resolved [replace conflicting] VersionConflict - ''', - ''' + """, + """ # id installables_with_conflicting_nested_requires @@ -459,8 +459,8 @@ def parametrize_test_working_set_resolve(*test_list): # resolved [replace conflicting] VersionConflict - ''', - ''' + """, + """ # id wanted_normalized_name_installed_canonical @@ -477,7 +477,7 @@ def parametrize_test_working_set_resolve(*test_list): # resolved [replace conflicting] foo.bar-3.6 - ''', + """, ) def test_working_set_resolve( installed_dists, diff --git a/setuptools/_core_metadata.py b/setuptools/_core_metadata.py index 6c904c3c77..4bf3c7c947 100644 --- a/setuptools/_core_metadata.py +++ b/setuptools/_core_metadata.py @@ -3,6 +3,7 @@ See: https://packaging.python.org/en/latest/specifications/core-metadata/ """ + import os import stat import textwrap diff --git a/setuptools/_normalization.py b/setuptools/_normalization.py index aa9274f093..8d4731eb60 100644 --- a/setuptools/_normalization.py +++ b/setuptools/_normalization.py @@ -2,6 +2,7 @@ Helpers for normalization as expected in wheel/sdist/module file names and core metadata """ + import re from pathlib import Path from typing import Union diff --git a/setuptools/_reqs.py b/setuptools/_reqs.py index 7d7130d50e..9f83437033 100644 --- a/setuptools/_reqs.py +++ b/setuptools/_reqs.py @@ -24,13 +24,11 @@ def parse_strings(strs: _StrOrIter) -> Iterator[str]: @overload -def parse(strs: _StrOrIter) -> Iterator[Requirement]: - ... +def parse(strs: _StrOrIter) -> Iterator[Requirement]: ... @overload -def parse(strs: _StrOrIter, parser: Callable[[str], _T]) -> Iterator[_T]: - ... +def parse(strs: _StrOrIter, parser: Callable[[str], _T]) -> Iterator[_T]: ... def parse(strs, parser=parse_req): diff --git a/setuptools/build_meta.py b/setuptools/build_meta.py index 6da80d70b8..80ccceff3c 100644 --- a/setuptools/build_meta.py +++ b/setuptools/build_meta.py @@ -128,7 +128,7 @@ def _file_with_extension(directory, extension): def _open_setup_script(setup_script): if not os.path.exists(setup_script): # Supply a default setup.py - return io.StringIO(u"from setuptools import setup; setup()") + return io.StringIO("from setuptools import setup; setup()") return getattr(tokenize, 'open', open)(setup_script) diff --git a/setuptools/command/_requirestxt.py b/setuptools/command/_requirestxt.py index 32bae2c4b4..7b732b11ab 100644 --- a/setuptools/command/_requirestxt.py +++ b/setuptools/command/_requirestxt.py @@ -6,6 +6,7 @@ See https://setuptools.pypa.io/en/latest/deprecated/python_eggs.html#requires-txt """ + import io from collections import defaultdict from itertools import filterfalse diff --git a/setuptools/command/bdist_egg.py b/setuptools/command/bdist_egg.py index bdece56bc9..e0947c6624 100644 --- a/setuptools/command/bdist_egg.py +++ b/setuptools/command/bdist_egg.py @@ -232,9 +232,11 @@ def run(self): # noqa: C901 # is too complex (14) # FIXME remove_tree(self.bdist_dir, dry_run=self.dry_run) # Add to 'Distribution.dist_files' so that the "upload" command works - getattr(self.distribution, 'dist_files', []).append( - ('bdist_egg', get_python_version(), self.egg_output) - ) + getattr(self.distribution, 'dist_files', []).append(( + 'bdist_egg', + get_python_version(), + self.egg_output, + )) def zap_pyfiles(self): log.info("Removing .py files from temporary directory") diff --git a/setuptools/command/build_ext.py b/setuptools/command/build_ext.py index 9a80781cf4..2e1954ab0f 100644 --- a/setuptools/command/build_ext.py +++ b/setuptools/command/build_ext.py @@ -37,9 +37,9 @@ def _customize_compiler_for_shlib(compiler): tmp = _CONFIG_VARS.copy() try: # XXX Help! I don't have any idea whether these are right... - _CONFIG_VARS[ - 'LDSHARED' - ] = "gcc -Wl,-x -dynamiclib -undefined dynamic_lookup" + _CONFIG_VARS['LDSHARED'] = ( + "gcc -Wl,-x -dynamiclib -undefined dynamic_lookup" + ) _CONFIG_VARS['CCSHARED'] = " -dynamiclib" _CONFIG_VARS['SO'] = ".dylib" customize_compiler(compiler) @@ -341,33 +341,30 @@ def _write_stub_file(self, stub_file: str, ext: Extension, compile=False): if not self.dry_run: f = open(stub_file, 'w') f.write( - '\n'.join( - [ - "def __bootstrap__():", - " global __bootstrap__, __file__, __loader__", - " import sys, os, pkg_resources, importlib.util" - + if_dl(", dl"), - " __file__ = pkg_resources.resource_filename" - "(__name__,%r)" % os.path.basename(ext._file_name), - " del __bootstrap__", - " if '__loader__' in globals():", - " del __loader__", - if_dl(" old_flags = sys.getdlopenflags()"), - " old_dir = os.getcwd()", - " try:", - " os.chdir(os.path.dirname(__file__))", - if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"), - " spec = importlib.util.spec_from_file_location(", - " __name__, __file__)", - " mod = importlib.util.module_from_spec(spec)", - " spec.loader.exec_module(mod)", - " finally:", - if_dl(" sys.setdlopenflags(old_flags)"), - " os.chdir(old_dir)", - "__bootstrap__()", - "", # terminal \n - ] - ) + '\n'.join([ + "def __bootstrap__():", + " global __bootstrap__, __file__, __loader__", + " import sys, os, pkg_resources, importlib.util" + if_dl(", dl"), + " __file__ = pkg_resources.resource_filename" + "(__name__,%r)" % os.path.basename(ext._file_name), + " del __bootstrap__", + " if '__loader__' in globals():", + " del __loader__", + if_dl(" old_flags = sys.getdlopenflags()"), + " old_dir = os.getcwd()", + " try:", + " os.chdir(os.path.dirname(__file__))", + if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"), + " spec = importlib.util.spec_from_file_location(", + " __name__, __file__)", + " mod = importlib.util.module_from_spec(spec)", + " spec.loader.exec_module(mod)", + " finally:", + if_dl(" sys.setdlopenflags(old_flags)"), + " os.chdir(old_dir)", + "__bootstrap__()", + "", # terminal \n + ]) ) f.close() if compile: diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py index 5d6fd5ca71..897ec6ad9b 100644 --- a/setuptools/command/easy_install.py +++ b/setuptools/command/easy_install.py @@ -245,31 +245,25 @@ def finalize_options(self): # noqa: C901 # is too complex (25) # FIXME self.config_vars = dict(sysconfig.get_config_vars()) - self.config_vars.update( - { - 'dist_name': self.distribution.get_name(), - 'dist_version': self.distribution.get_version(), - 'dist_fullname': self.distribution.get_fullname(), - 'py_version': py_version, - 'py_version_short': ( - f'{sys.version_info.major}.{sys.version_info.minor}' - ), - 'py_version_nodot': f'{sys.version_info.major}{sys.version_info.minor}', - 'sys_prefix': self.config_vars['prefix'], - 'sys_exec_prefix': self.config_vars['exec_prefix'], - # Only python 3.2+ has abiflags - 'abiflags': getattr(sys, 'abiflags', ''), - 'platlibdir': getattr(sys, 'platlibdir', 'lib'), - } - ) + self.config_vars.update({ + 'dist_name': self.distribution.get_name(), + 'dist_version': self.distribution.get_version(), + 'dist_fullname': self.distribution.get_fullname(), + 'py_version': py_version, + 'py_version_short': (f'{sys.version_info.major}.{sys.version_info.minor}'), + 'py_version_nodot': f'{sys.version_info.major}{sys.version_info.minor}', + 'sys_prefix': self.config_vars['prefix'], + 'sys_exec_prefix': self.config_vars['exec_prefix'], + # Only python 3.2+ has abiflags + 'abiflags': getattr(sys, 'abiflags', ''), + 'platlibdir': getattr(sys, 'platlibdir', 'lib'), + }) with contextlib.suppress(AttributeError): # only for distutils outside stdlib - self.config_vars.update( - { - 'implementation_lower': install._get_implementation().lower(), - 'implementation': install._get_implementation(), - } - ) + self.config_vars.update({ + 'implementation_lower': install._get_implementation().lower(), + 'implementation': install._get_implementation(), + }) # pypa/distutils#113 Python 3.9 compat self.config_vars.setdefault( @@ -668,7 +662,7 @@ def check_editable(self, spec): @contextlib.contextmanager def _tmpdir(self): - tmpdir = tempfile.mkdtemp(prefix=u"easy_install-") + tmpdir = tempfile.mkdtemp(prefix="easy_install-") try: # cast to str as workaround for #709 and #710 and #712 yield str(tmpdir) @@ -1028,9 +1022,9 @@ def install_exe(self, dist_filename, tmpdir): f.close() script_dir = os.path.join(_egg_info, 'scripts') # delete entry-point scripts to avoid duping - self.delete_blockers( - [os.path.join(script_dir, args[0]) for args in ScriptWriter.get_args(dist)] - ) + self.delete_blockers([ + os.path.join(script_dir, args[0]) for args in ScriptWriter.get_args(dist) + ]) # Build .egg file from tmpdir bdist_egg.make_zipfile( egg_path, @@ -1433,24 +1427,20 @@ def get_site_dirs(): if sys.platform in ('os2emx', 'riscos'): sitedirs.append(os.path.join(prefix, "Lib", "site-packages")) elif os.sep == '/': - sitedirs.extend( - [ - os.path.join( - prefix, - "lib", - "python{}.{}".format(*sys.version_info), - "site-packages", - ), - os.path.join(prefix, "lib", "site-python"), - ] - ) - else: - sitedirs.extend( - [ + sitedirs.extend([ + os.path.join( prefix, - os.path.join(prefix, "lib", "site-packages"), - ] - ) + "lib", + "python{}.{}".format(*sys.version_info), + "site-packages", + ), + os.path.join(prefix, "lib", "site-python"), + ]) + else: + sitedirs.extend([ + prefix, + os.path.join(prefix, "lib", "site-packages"), + ]) if sys.platform != 'darwin': continue @@ -1678,9 +1668,11 @@ def save(self): last_paths.remove(path) # also, re-check that all paths are still valid before saving them for path in self.paths[:]: - if path not in last_paths and not path.startswith( - ('import ', 'from ', '#') - ): + if path not in last_paths and not path.startswith(( + 'import ', + 'from ', + '#', + )): absolute_path = os.path.join(self.basedir, path) if not os.path.exists(absolute_path): self.paths.remove(path) diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py index 79c839f8f0..73fa9fff52 100644 --- a/setuptools/command/editable_wheel.py +++ b/setuptools/command/editable_wheel.py @@ -384,14 +384,13 @@ def _select_strategy( class EditableStrategy(Protocol): - def __call__(self, wheel: "WheelFile", files: List[str], mapping: Dict[str, str]): - ... + def __call__( + self, wheel: "WheelFile", files: List[str], mapping: Dict[str, str] + ): ... - def __enter__(self): - ... + def __enter__(self): ... - def __exit__(self, _exc_type, _exc_value, _traceback): - ... + def __exit__(self, _exc_type, _exc_value, _traceback): ... class _StaticPth: @@ -413,8 +412,7 @@ def __enter__(self): _logger.warning(msg + _LENIENT_WARNING) return self - def __exit__(self, _exc_type, _exc_value, _traceback): - ... + def __exit__(self, _exc_type, _exc_value, _traceback): ... class _LinkTree(_StaticPth): diff --git a/setuptools/command/egg_info.py b/setuptools/command/egg_info.py index 7c7f57aaf8..7169f33535 100644 --- a/setuptools/command/egg_info.py +++ b/setuptools/command/egg_info.py @@ -700,9 +700,9 @@ def warn_depends_obsolete(cmd, basename, filename): def write_toplevel_names(cmd, basename, filename): - pkgs = dict.fromkeys( - [k.split('.', 1)[0] for k in cmd.distribution.iter_distribution_names()] - ) + pkgs = dict.fromkeys([ + k.split('.', 1)[0] for k in cmd.distribution.iter_distribution_names() + ]) cmd.write_file("top-level names", filename, '\n'.join(sorted(pkgs)) + '\n') diff --git a/setuptools/config/__init__.py b/setuptools/config/__init__.py index ffea394436..fcc7d008d6 100644 --- a/setuptools/config/__init__.py +++ b/setuptools/config/__init__.py @@ -1,6 +1,7 @@ """For backward compatibility, expose main functions from ``setuptools.config.setupcfg`` """ + from functools import wraps from typing import Callable, TypeVar, cast diff --git a/setuptools/config/_apply_pyprojecttoml.py b/setuptools/config/_apply_pyprojecttoml.py index 80318d5d0b..b562f91759 100644 --- a/setuptools/config/_apply_pyprojecttoml.py +++ b/setuptools/config/_apply_pyprojecttoml.py @@ -7,6 +7,7 @@ **PRIVATE MODULE**: API reserved for setuptools internal usage only. """ + import logging import os from collections.abc import Mapping diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py index 1bc71de546..b48fc1187e 100644 --- a/setuptools/config/expand.py +++ b/setuptools/config/expand.py @@ -17,6 +17,7 @@ **PRIVATE MODULE**: API reserved for setuptools internal usage only. """ + import ast import importlib import os diff --git a/setuptools/config/pyprojecttoml.py b/setuptools/config/pyprojecttoml.py index 379ef222f9..52040be49a 100644 --- a/setuptools/config/pyprojecttoml.py +++ b/setuptools/config/pyprojecttoml.py @@ -8,6 +8,7 @@ For simple scenarios, you can also try parsing the file directly with the help of ``tomllib`` or ``tomli``. """ + import logging import os from contextlib import contextmanager diff --git a/setuptools/config/setupcfg.py b/setuptools/config/setupcfg.py index 1a0e4154b9..44a2876c06 100644 --- a/setuptools/config/setupcfg.py +++ b/setuptools/config/setupcfg.py @@ -8,6 +8,7 @@ For simple scenarios, you can also try parsing the file directly with the help of ``configparser``. """ + import contextlib import functools import os @@ -694,9 +695,9 @@ def parse_section_packages__find(self, section_options): valid_keys = ['where', 'include', 'exclude'] - find_kwargs = dict( - [(k, v) for k, v in section_data.items() if k in valid_keys and v] - ) + find_kwargs = dict([ + (k, v) for k, v in section_data.items() if k in valid_keys and v + ]) where = find_kwargs.get('where') if where is not None: diff --git a/setuptools/dist.py b/setuptools/dist.py index 222e8a7623..c9c8c77515 100644 --- a/setuptools/dist.py +++ b/setuptools/dist.py @@ -87,7 +87,7 @@ def check_nsp(dist, attr, value): SetuptoolsDeprecationWarning.emit( "The namespace_packages parameter is deprecated.", "Please replace its usage with implicit namespaces (PEP 420).", - see_docs="references/keywords.html#keyword-namespace-packages" + see_docs="references/keywords.html#keyword-namespace-packages", # TODO: define due_date, it may break old packages that are no longer # maintained (e.g. sphinxcontrib extensions) when installed from source. # Warning officially introduced in May 2022, however the deprecation diff --git a/setuptools/glob.py b/setuptools/glob.py index 647b9bc6ed..8dbf34972d 100644 --- a/setuptools/glob.py +++ b/setuptools/glob.py @@ -160,7 +160,7 @@ def escape(pathname): # Metacharacters do not work in the drive part and shouldn't be escaped. drive, pathname = os.path.splitdrive(pathname) if isinstance(pathname, bytes): - pathname = magic_check_bytes.sub(br'[\1]', pathname) + pathname = magic_check_bytes.sub(rb'[\1]', pathname) else: pathname = magic_check.sub(r'[\1]', pathname) return drive + pathname diff --git a/setuptools/msvc.py b/setuptools/msvc.py index a910a64b68..be373d176e 100644 --- a/setuptools/msvc.py +++ b/setuptools/msvc.py @@ -93,21 +93,17 @@ def _msvc14_find_vc2017(): # Workaround for `-requiresAny` (only available on VS 2017 > 15.6) with contextlib.suppress(CalledProcessError, OSError, UnicodeDecodeError): path = ( - subprocess.check_output( - [ - join( - root, "Microsoft Visual Studio", "Installer", "vswhere.exe" - ), - "-latest", - "-prerelease", - "-requires", - component, - "-property", - "installationPath", - "-products", - "*", - ] - ) + subprocess.check_output([ + join(root, "Microsoft Visual Studio", "Installer", "vswhere.exe"), + "-latest", + "-prerelease", + "-requires", + component, + "-property", + "installationPath", + "-products", + "*", + ]) .decode(encoding="mbcs", errors="strict") .strip() ) @@ -694,9 +690,9 @@ def find_programdata_vs_vers(self): listdir(join(vs_path, r'VC\Tools\MSVC')) # Store version and path - vs_versions[ - self._as_float_version(state['installationVersion']) - ] = vs_path + vs_versions[self._as_float_version(state['installationVersion'])] = ( + vs_path + ) except (OSError, KeyError): # Skip if "state.json" file is missing or bad format diff --git a/setuptools/sandbox.py b/setuptools/sandbox.py index 017c897b86..757074166a 100644 --- a/setuptools/sandbox.py +++ b/setuptools/sandbox.py @@ -408,23 +408,21 @@ def _remap_pair(self, operation, src, dst, *args, **kw): class DirectorySandbox(AbstractSandbox): """Restrict operations to a single subdirectory - pseudo-chroot""" - write_ops = dict.fromkeys( - [ - "open", - "chmod", - "chown", - "mkdir", - "remove", - "unlink", - "rmdir", - "utime", - "lchown", - "chroot", - "mkfifo", - "mknod", - "tempnam", - ] - ) + write_ops = dict.fromkeys([ + "open", + "chmod", + "chown", + "mkdir", + "remove", + "unlink", + "rmdir", + "utime", + "lchown", + "chroot", + "mkfifo", + "mknod", + "tempnam", + ]) _exception_patterns = [] "exempt writing to paths that match the pattern" diff --git a/setuptools/tests/config/downloads/preload.py b/setuptools/tests/config/downloads/preload.py index 64b3f1c8d5..d559beff33 100644 --- a/setuptools/tests/config/downloads/preload.py +++ b/setuptools/tests/config/downloads/preload.py @@ -7,6 +7,7 @@ to make sure the `setup.cfg` examples are downloaded before starting the tests. """ + import sys from pathlib import Path diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py index 7905aa9ab6..6935523987 100644 --- a/setuptools/tests/config/test_apply_pyprojecttoml.py +++ b/setuptools/tests/config/test_apply_pyprojecttoml.py @@ -3,6 +3,7 @@ To run these tests offline, please have a look on ``./downloads/preload.py`` """ + import io import re import tarfile diff --git a/setuptools/tests/config/test_setupcfg.py b/setuptools/tests/config/test_setupcfg.py index 23fc0d0b47..d2bb1212dc 100644 --- a/setuptools/tests/config/test_setupcfg.py +++ b/setuptools/tests/config/test_setupcfg.py @@ -357,13 +357,11 @@ def test_usupported_section(self, tmpdir): dist.parse_config_files() def test_classifiers(self, tmpdir): - expected = set( - [ - 'Framework :: Django', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.5', - ] - ) + expected = set([ + 'Framework :: Django', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.5', + ]) # From file. _, config = fake_env(tmpdir, '[metadata]\n' 'classifiers = file: classifiers\n') @@ -488,15 +486,20 @@ def test_basic(self, tmpdir): assert dist.packages == ['pack_a', 'pack_b.subpack'] assert dist.namespace_packages == ['pack1', 'pack2'] assert dist.scripts == ['bin/one.py', 'bin/two.py'] - assert dist.dependency_links == ( - ['http://some.com/here/1', 'http://some.com/there/2'] - ) - assert dist.install_requires == ( - ['docutils>=0.3', 'pack==1.1,==1.3', 'hey'] - ) - assert dist.setup_requires == ( - ['docutils>=0.3', 'spack ==1.1, ==1.3', 'there'] - ) + assert dist.dependency_links == ([ + 'http://some.com/here/1', + 'http://some.com/there/2', + ]) + assert dist.install_requires == ([ + 'docutils>=0.3', + 'pack==1.1,==1.3', + 'hey', + ]) + assert dist.setup_requires == ([ + 'docutils>=0.3', + 'spack ==1.1, ==1.3', + 'there', + ]) assert dist.tests_require == ['mock==0.7.2', 'pytest'] assert dist.python_requires == '>=1.0, !=2.8' assert dist.py_modules == ['module1', 'module2'] @@ -541,15 +544,20 @@ def test_multiline(self, tmpdir): assert dist.packages == ['pack_a', 'pack_b.subpack'] assert dist.namespace_packages == ['pack1', 'pack2'] assert dist.scripts == ['bin/one.py', 'bin/two.py'] - assert dist.dependency_links == ( - ['http://some.com/here/1', 'http://some.com/there/2'] - ) - assert dist.install_requires == ( - ['docutils>=0.3', 'pack==1.1,==1.3', 'hey'] - ) - assert dist.setup_requires == ( - ['docutils>=0.3', 'spack ==1.1, ==1.3', 'there'] - ) + assert dist.dependency_links == ([ + 'http://some.com/here/1', + 'http://some.com/there/2', + ]) + assert dist.install_requires == ([ + 'docutils>=0.3', + 'pack==1.1,==1.3', + 'hey', + ]) + assert dist.setup_requires == ([ + 'docutils>=0.3', + 'spack ==1.1, ==1.3', + 'there', + ]) assert dist.tests_require == ['mock==0.7.2', 'pytest'] def test_package_dir_fail(self, tmpdir): @@ -593,9 +601,11 @@ def test_find_directive(self, tmpdir): dir_sub_two, _ = make_package_dir('sub_two', dir_package) with get_dist(tmpdir) as dist: - assert set(dist.packages) == set( - ['fake_package', 'fake_package.sub_two', 'fake_package.sub_one'] - ) + assert set(dist.packages) == set([ + 'fake_package', + 'fake_package.sub_two', + 'fake_package.sub_one', + ]) config.write( '[options]\n' diff --git a/setuptools/tests/fixtures.py b/setuptools/tests/fixtures.py index c2dd9df2fb..629daf93d4 100644 --- a/setuptools/tests/fixtures.py +++ b/setuptools/tests/fixtures.py @@ -77,17 +77,15 @@ def setuptools_sdist(tmp_path_factory, request): if dist: return dist - subprocess.check_output( - [ - sys.executable, - "-m", - "build", - "--sdist", - "--outdir", - str(tmp), - str(request.config.rootdir), - ] - ) + subprocess.check_output([ + sys.executable, + "-m", + "build", + "--sdist", + "--outdir", + str(tmp), + str(request.config.rootdir), + ]) return next(tmp.glob("*.tar.gz")) @@ -104,17 +102,15 @@ def setuptools_wheel(tmp_path_factory, request): if dist: return dist - subprocess.check_output( - [ - sys.executable, - "-m", - "build", - "--wheel", - "--outdir", - str(tmp), - str(request.config.rootdir), - ] - ) + subprocess.check_output([ + sys.executable, + "-m", + "build", + "--wheel", + "--outdir", + str(tmp), + str(request.config.rootdir), + ]) return next(tmp.glob("*.whl")) diff --git a/setuptools/tests/integration/helpers.py b/setuptools/tests/integration/helpers.py index d7d43bd606..824dfdfe1a 100644 --- a/setuptools/tests/integration/helpers.py +++ b/setuptools/tests/integration/helpers.py @@ -4,6 +4,7 @@ with setuptools, and ``run`` will always try to be as verbose as possible to facilitate debugging. """ + import os import subprocess import tarfile @@ -17,7 +18,7 @@ def run(cmd, env=None): stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, - env={**os.environ, **(env or {})} + env={**os.environ, **(env or {})}, # ^-- allow overwriting instead of discarding the current env ) diff --git a/setuptools/tests/integration/test_pip_install_sdist.py b/setuptools/tests/integration/test_pip_install_sdist.py index e0e892cdb8..3467a5ec07 100644 --- a/setuptools/tests/integration/test_pip_install_sdist.py +++ b/setuptools/tests/integration/test_pip_install_sdist.py @@ -10,6 +10,7 @@ and the associated maintenance cost (changes in the way these packages define their build process may require changes in the tests). """ + import json import os import shutil diff --git a/setuptools/tests/script-with-bom.py b/setuptools/tests/script-with-bom.py index 93d28f1600..c074d263c4 100644 --- a/setuptools/tests/script-with-bom.py +++ b/setuptools/tests/script-with-bom.py @@ -1 +1 @@ -result = 'passed' +result = 'passed' diff --git a/setuptools/tests/test_bdist_deprecations.py b/setuptools/tests/test_bdist_deprecations.py index 61f4e9a4cb..9690e2bf50 100644 --- a/setuptools/tests/test_bdist_deprecations.py +++ b/setuptools/tests/test_bdist_deprecations.py @@ -1,5 +1,6 @@ """develop tests """ + import sys from unittest import mock diff --git a/setuptools/tests/test_bdist_egg.py b/setuptools/tests/test_bdist_egg.py index 45dd070967..8f11a51b2d 100644 --- a/setuptools/tests/test_bdist_egg.py +++ b/setuptools/tests/test_bdist_egg.py @@ -1,5 +1,6 @@ """develop tests """ + import os import re import zipfile diff --git a/setuptools/tests/test_build_ext.py b/setuptools/tests/test_build_ext.py index 7fd09687e1..ed3bb6f665 100644 --- a/setuptools/tests/test_build_ext.py +++ b/setuptools/tests/test_build_ext.py @@ -98,13 +98,11 @@ def dist_with_example(self): ext3 = Extension("ext3", ["c-extension/ext3.c"]) path.build(files) - dist = Distribution( - { - "script_name": "%test%", - "ext_modules": [ext1, ext2, ext3], - "package_dir": {"": "src"}, - } - ) + dist = Distribution({ + "script_name": "%test%", + "ext_modules": [ext1, ext2, ext3], + "package_dir": {"": "src"}, + }) return dist def test_get_outputs(self, tmpdir_cwd, monkeypatch): diff --git a/setuptools/tests/test_core_metadata.py b/setuptools/tests/test_core_metadata.py index fe9d4c5c31..6a52130112 100644 --- a/setuptools/tests/test_core_metadata.py +++ b/setuptools/tests/test_core_metadata.py @@ -334,7 +334,7 @@ def test_parity_with_metadata_from_pypa_wheel(tmp_path): ini2toml[lite]>=0.9 """, "other": [], - } + }, ) # Generate a PKG-INFO file using setuptools dist = Distribution(attrs) diff --git a/setuptools/tests/test_dist.py b/setuptools/tests/test_dist.py index 0caaef6578..609932a9b3 100644 --- a/setuptools/tests/test_dist.py +++ b/setuptools/tests/test_dist.py @@ -31,11 +31,11 @@ def sdist_with_index(distname, version): with dist_dir.join('index.html').open('w') as fp: fp.write( DALS( - ''' + """ {dist_sdist}
- ''' + """ ).format(dist_sdist=dist_sdist) ) @@ -44,16 +44,16 @@ def sdist_with_index(distname, version): with tmpdir.join('setup.cfg').open('w') as fp: fp.write( DALS( - ''' + """ [easy_install] index_url = {index_url} - ''' + """ ).format(index_url=index_url) ) - reqs = ''' + reqs = """ barbazquux-runner barbazquux - '''.split() + """.split() with tmpdir.as_cwd(): dist = Distribution() dist.parse_config_files() diff --git a/setuptools/tests/test_dist_info.py b/setuptools/tests/test_dist_info.py index a76dbeb3f2..dbc35ba0b7 100644 --- a/setuptools/tests/test_dist_info.py +++ b/setuptools/tests/test_dist_info.py @@ -1,5 +1,6 @@ """Test .dist-info style distributions. """ + import pathlib import re import shutil diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py index d71e01586c..d0b95e09ea 100644 --- a/setuptools/tests/test_easy_install.py +++ b/setuptools/tests/test_easy_install.py @@ -294,9 +294,9 @@ def test_script_install(self, sdist_script, tmpdir, monkeypatch): @pytest.mark.filterwarnings('ignore:Unbuilt egg') class TestPTHFileWriter: def test_add_from_cwd_site_sets_dirty(self): - '''a pth file manager should set dirty + """a pth file manager should set dirty if a distribution is in site but also the cwd - ''' + """ pth = PthDistributions('does-not_exist', [os.getcwd()]) assert not pth.dirty pth.add(PRDistribution(os.getcwd())) @@ -742,9 +742,7 @@ def make_dependency_sdist(dist_path, distname, version): version={version!r}, py_modules=[{name!r}], ) - """.format( - name=distname, version=version - ) + """.format(name=distname, version=version) ), ), ( @@ -790,10 +788,10 @@ def test_setup_requires_honors_pip_env(self, mock_index, monkeypatch): with open(test_setup_cfg, 'w') as fp: fp.write( DALS( - ''' + """ [easy_install] index_url = https://pypi.org/legacy/ - ''' + """ ) ) test_setup_py = os.path.join(test_pkg, 'setup.py') @@ -823,20 +821,20 @@ def test_setup_requires_with_pep508_url(self, mock_index, monkeypatch): assert len(mock_index.requests) == 0 def test_setup_requires_with_allow_hosts(self, mock_index): - '''The `allow-hosts` option in not supported anymore.''' + """The `allow-hosts` option in not supported anymore.""" files = { 'test_pkg': { 'setup.py': DALS( - ''' + """ from setuptools import setup setup(setup_requires='python-xlib') - ''' + """ ), 'setup.cfg': DALS( - ''' + """ [easy_install] allow_hosts = * - ''' + """ ), } } @@ -849,7 +847,7 @@ def test_setup_requires_with_allow_hosts(self, mock_index): assert len(mock_index.requests) == 0 def test_setup_requires_with_python_requires(self, monkeypatch, tmpdir): - '''Check `python_requires` is honored.''' + """Check `python_requires` is honored.""" monkeypatch.setenv(str('PIP_RETRIES'), str('0')) monkeypatch.setenv(str('PIP_TIMEOUT'), str('0')) monkeypatch.setenv(str('PIP_NO_INDEX'), str('1')) @@ -869,7 +867,7 @@ def test_setup_requires_with_python_requires(self, monkeypatch, tmpdir): index = tmpdir / 'index.html' index.write_text( DALS( - ''' + """ Links for dep @@ -880,7 +878,7 @@ def test_setup_requires_with_python_requires(self, monkeypatch, tmpdir): data-requires-python="{dep_2_0_python_requires}">{dep_2_0_sdist}
- ''' + """ ).format( dep_1_0_url=dep_1_0_url, dep_1_0_sdist=dep_1_0_sdist, @@ -928,21 +926,21 @@ def test_setup_requires_with_find_links_in_setup_cfg( dependency_links = [] fp.write( DALS( - ''' + """ from setuptools import installer, setup setup(setup_requires='python-xlib==42', dependency_links={dependency_links!r}) - ''' + """ ).format(dependency_links=dependency_links) ) with open(test_setup_cfg, 'w') as fp: fp.write( DALS( - ''' + """ [easy_install] index_url = {index_url} find_links = {find_links} - ''' + """ ).format( index_url=os.path.join(temp_dir, 'index'), find_links=temp_dir, @@ -951,11 +949,11 @@ def test_setup_requires_with_find_links_in_setup_cfg( run_setup(test_setup_py, [str('--version')]) def test_setup_requires_with_transitive_extra_dependency(self, monkeypatch): - ''' + """ Use case: installing a package with a build dependency on an already installed `dep[extra]`, which in turn depends on `extra_dep` (whose is not already installed). - ''' + """ with contexts.save_pkg_resources_state(): with contexts.tempdir() as temp_dir: # Create source distribution for `extra_dep`. @@ -990,10 +988,10 @@ def test_setup_requires_with_transitive_extra_dependency(self, monkeypatch): with open(test_setup_py, 'w') as fp: fp.write( DALS( - ''' + """ from setuptools import installer, setup setup(setup_requires='dep[extra]') - ''' + """ ) ) # Check... @@ -1004,10 +1002,10 @@ def test_setup_requires_with_transitive_extra_dependency(self, monkeypatch): run_setup(test_setup_py, [str('--version')]) def test_setup_requires_with_distutils_command_dep(self, monkeypatch): - ''' + """ Use case: ensure build requirements' extras are properly installed and activated. - ''' + """ with contexts.save_pkg_resources_state(): with contexts.tempdir() as temp_dir: # Create source distribution for `extra_dep`. @@ -1074,10 +1072,10 @@ class epcmd(build_py): with open(test_setup_py, 'w') as fp: fp.write( DALS( - ''' + """ from setuptools import installer, setup setup(setup_requires='dep[extra]') - ''' + """ ) ) # Check... @@ -1395,7 +1393,7 @@ def test_use_correct_python_version_string(tmpdir, tmpdir_cwd, monkeypatch): def test_editable_user_and_build_isolation(setup_context, monkeypatch, tmp_path): - '''`setup.py develop` should honor `--user` even under build isolation''' + """`setup.py develop` should honor `--user` even under build isolation""" # == Arrange == # Pretend that build isolation was enabled diff --git a/setuptools/tests/test_editable_install.py b/setuptools/tests/test_editable_install.py index eeffcf1962..43beb2954b 100644 --- a/setuptools/tests/test_editable_install.py +++ b/setuptools/tests/test_editable_install.py @@ -638,19 +638,19 @@ def test_case_sensitivity(self, tmp_path): sys.modules.pop("foo", None) self.install_finder(template) - with pytest.raises(ImportError, match="\'FOO\'"): + with pytest.raises(ImportError, match="'FOO'"): import_module("FOO") - with pytest.raises(ImportError, match="\'foo\\.LOWERCASE\'"): + with pytest.raises(ImportError, match="'foo\\.LOWERCASE'"): import_module("foo.LOWERCASE") - with pytest.raises(ImportError, match="\'foo\\.bar\\.Lowercase\'"): + with pytest.raises(ImportError, match="'foo\\.bar\\.Lowercase'"): import_module("foo.bar.Lowercase") - with pytest.raises(ImportError, match="\'foo\\.BAR\'"): + with pytest.raises(ImportError, match="'foo\\.BAR'"): import_module("foo.BAR.lowercase") - with pytest.raises(ImportError, match="\'FOO\'"): + with pytest.raises(ImportError, match="'FOO'"): import_module("FOO.bar.lowercase") mod = import_module("foo.lowercase") @@ -691,13 +691,13 @@ def test_namespace_case_sensitivity(self, tmp_path): bar = import_module("ns.othername.foo.bar") assert bar.c == 42 - with pytest.raises(ImportError, match="\'NS\'"): + with pytest.raises(ImportError, match="'NS'"): import_module("NS.othername.foo") - with pytest.raises(ImportError, match="\'ns\\.othername\\.FOO\\'"): + with pytest.raises(ImportError, match="'ns\\.othername\\.FOO\\'"): import_module("ns.othername.FOO") - with pytest.raises(ImportError, match="\'ns\\.othername\\.foo\\.BAR\\'"): + with pytest.raises(ImportError, match="'ns\\.othername\\.foo\\.BAR\\'"): import_module("ns.othername.foo.BAR") def test_intermediate_packages(self, tmp_path): diff --git a/setuptools/tests/test_egg_info.py b/setuptools/tests/test_egg_info.py index 614fca7a23..af7d2f8295 100644 --- a/setuptools/tests/test_egg_info.py +++ b/setuptools/tests/test_egg_info.py @@ -37,19 +37,17 @@ def env(): subs = 'home', 'lib', 'scripts', 'data', 'egg-base' env.paths = dict((dirname, os.path.join(env_dir, dirname)) for dirname in subs) list(map(os.mkdir, env.paths.values())) - path.build( - { - env.paths['home']: { - '.pydistutils.cfg': DALS( - """ + path.build({ + env.paths['home']: { + '.pydistutils.cfg': DALS( + """ [egg_info] egg-base = %(egg-base)s """ - % env.paths - ) - } + % env.paths + ) } - ) + }) yield env @@ -68,17 +66,15 @@ class TestEggInfo: ) def _create_project(self): - path.build( - { - 'setup.py': self.setup_script, - 'hello.py': DALS( - """ + path.build({ + 'setup.py': self.setup_script, + 'hello.py': DALS( + """ def run(): print('hello') """ - ), - } - ) + ), + }) @staticmethod def _extract_mv_version(pkg_info_lines: List[str]) -> Tuple[int, int]: @@ -128,17 +124,15 @@ def test_egg_info_save_version_info_setup_defaults(self, tmpdir_cwd, env): the file should remain unchanged. """ setup_cfg = os.path.join(env.paths['home'], 'setup.cfg') - path.build( - { - setup_cfg: DALS( - """ + path.build({ + setup_cfg: DALS( + """ [egg_info] tag_build = tag_date = 0 """ - ), - } - ) + ), + }) dist = Distribution() ei = egg_info(dist) ei.initialize_options() @@ -207,12 +201,10 @@ def test_license_is_a_string(self, tmpdir_cwd, env): """ ) - path.build( - { - 'setup.py': setup_script, - 'setup.cfg': setup_config, - } - ) + path.build({ + 'setup.py': setup_script, + 'setup.cfg': setup_config, + }) # This command should fail with a ValueError, but because it's # currently configured to use a subprocess, the actual traceback @@ -245,18 +237,16 @@ def test_rebuilt(self, tmpdir_cwd, env): def test_manifest_template_is_read(self, tmpdir_cwd, env): self._create_project() - path.build( - { - 'MANIFEST.in': DALS( - """ + path.build({ + 'MANIFEST.in': DALS( + """ recursive-include docs *.rst """ - ), - 'docs': { - 'usage.rst': "Run 'hi'", - }, - } - ) + ), + 'docs': { + 'usage.rst': "Run 'hi'", + }, + }) self._run_egg_info_command(tmpdir_cwd, env) egg_info_dir = os.path.join('.', 'foo.egg-info') sources_txt = os.path.join(egg_info_dir, 'SOURCES.txt') @@ -264,23 +254,18 @@ def test_manifest_template_is_read(self, tmpdir_cwd, env): assert 'docs/usage.rst' in f.read().split('\n') def _setup_script_with_requires(self, requires, use_setup_cfg=False): - setup_script = ( - DALS( - ''' + setup_script = DALS( + """ from setuptools import setup setup(name='foo', zip_safe=False, %s) - ''' - ) - % ('' if use_setup_cfg else requires) - ) + """ + ) % ('' if use_setup_cfg else requires) setup_config = requires if use_setup_cfg else '' - path.build( - { - 'setup.py': setup_script, - 'setup.cfg': setup_config, - } - ) + path.build({ + 'setup.py': setup_script, + 'setup.cfg': setup_config, + }) mismatch_marker = "python_version<'{this_ver}'".format( this_ver=sys.version_info[0], @@ -343,7 +328,7 @@ def parametrize(*test_list, **format_dict): # requires block (when used in setup.cfg) # # expected contents of requires.txt - ''' + """ install_requires_deterministic install_requires=["wheel>=0.5", "pytest"] @@ -355,8 +340,8 @@ def parametrize(*test_list, **format_dict): wheel>=0.5 pytest - ''', - ''' + """, + """ install_requires_ordered install_requires=["pytest>=3.0.2,!=10.9999"] @@ -366,8 +351,8 @@ def parametrize(*test_list, **format_dict): pytest>=3.0.2,!=10.9999 pytest!=10.9999,>=3.0.2 - ''', - ''' + """, + """ install_requires_with_marker install_requires=["barbazquux;{mismatch_marker}"], @@ -378,8 +363,8 @@ def parametrize(*test_list, **format_dict): [:{mismatch_marker_alternate}] barbazquux - ''', - ''' + """, + """ install_requires_with_extra {'cmd': ['egg_info']} @@ -390,8 +375,8 @@ def parametrize(*test_list, **format_dict): barbazquux [test] barbazquux[test] - ''', - ''' + """, + """ install_requires_with_extra_and_marker install_requires=["barbazquux [test]; {mismatch_marker}"], @@ -402,8 +387,8 @@ def parametrize(*test_list, **format_dict): [:{mismatch_marker_alternate}] barbazquux[test] - ''', - ''' + """, + """ setup_requires_with_markers setup_requires=["barbazquux;{mismatch_marker}"], @@ -412,8 +397,8 @@ def parametrize(*test_list, **format_dict): setup_requires = barbazquux; {mismatch_marker} - ''', - ''' + """, + """ tests_require_with_markers {'cmd': ['test'], 'output': "Ran 0 tests in"} @@ -423,8 +408,8 @@ def parametrize(*test_list, **format_dict): tests_require = barbazquux; {mismatch_marker} - ''', - ''' + """, + """ extras_require_with_extra {'cmd': ['egg_info']} @@ -435,8 +420,8 @@ def parametrize(*test_list, **format_dict): [extra] barbazquux[test] - ''', - ''' + """, + """ extras_require_with_extra_and_marker_in_req extras_require={{"extra": ["barbazquux [test]; {mismatch_marker}"]}}, @@ -449,9 +434,9 @@ def parametrize(*test_list, **format_dict): [extra:{mismatch_marker_alternate}] barbazquux[test] - ''', + """, # FIXME: ConfigParser does not allow : in key names! - ''' + """ extras_require_with_marker extras_require={{":{mismatch_marker}": ["barbazquux"]}}, @@ -462,8 +447,8 @@ def parametrize(*test_list, **format_dict): [:{mismatch_marker}] barbazquux - ''', - ''' + """, + """ extras_require_with_marker_in_req extras_require={{"extra": ["barbazquux; {mismatch_marker}"]}}, @@ -476,8 +461,8 @@ def parametrize(*test_list, **format_dict): [extra:{mismatch_marker_alternate}] barbazquux - ''', - ''' + """, + """ extras_require_with_empty_section extras_require={{"empty": []}}, @@ -486,7 +471,7 @@ def parametrize(*test_list, **format_dict): empty = [empty] - ''', + """, # Format arguments. invalid_marker=invalid_marker, mismatch_marker=mismatch_marker, @@ -559,7 +544,7 @@ def test_provides_extra(self, tmpdir_cwd, env): def test_doesnt_provides_extra(self, tmpdir_cwd, env): self._setup_script_with_requires( - '''install_requires=["spam ; python_version<'3.6'"]''' + """install_requires=["spam ; python_version<'3.6'"]""" ) environ = os.environ.copy().update( HOME=env.paths['home'], @@ -788,7 +773,7 @@ def test_setup_cfg_license_file(self, tmpdir_cwd, env, files, license_in_sources ), 'MANIFEST.in': "exclude LICENSE-XYZ", 'LICENSE-ABC': "ABC license", - 'LICENSE-XYZ': "XYZ license" + 'LICENSE-XYZ': "XYZ license", # manifest is overwritten by license_files }, ['LICENSE-ABC', 'LICENSE-XYZ'], @@ -901,7 +886,7 @@ def test_setup_cfg_license_files( """ ), 'LICENSE-ABC': "ABC license", - 'LICENSE-XYZ': "XYZ license" + 'LICENSE-XYZ': "XYZ license", # license_file is still singular }, [], @@ -939,7 +924,7 @@ def test_setup_cfg_license_files( ), 'LICENSE-ABC': "ABC license", 'LICENSE-PQR': "PQR license", - 'LICENSE-XYZ': "XYZ license" + 'LICENSE-XYZ': "XYZ license", # duplicate license }, ['LICENSE-ABC', 'LICENSE-PQR', 'LICENSE-XYZ'], @@ -957,7 +942,7 @@ def test_setup_cfg_license_files( ), 'LICENSE-ABC': "ABC license", 'LICENSE-PQR': "PQR license", - 'LICENSE-XYZ': "XYZ license" + 'LICENSE-XYZ': "XYZ license", # combined subset }, ['LICENSE-ABC', 'LICENSE-XYZ'], @@ -974,7 +959,7 @@ def test_setup_cfg_license_files( LICENSE-PQR """ ), - 'LICENSE-PQR': "Test license" + 'LICENSE-PQR': "Test license", # with invalid licenses }, ['LICENSE-PQR'], @@ -994,7 +979,7 @@ def test_setup_cfg_license_files( 'MANIFEST.in': "exclude LICENSE-ABC\nexclude LICENSE-PQR", 'LICENSE-ABC': "ABC license", 'LICENSE-PQR': "PQR license", - 'LICENSE-XYZ': "XYZ license" + 'LICENSE-XYZ': "XYZ license", # manifest is overwritten }, ['LICENSE-ABC', 'LICENSE-PQR', 'LICENSE-XYZ'], @@ -1059,22 +1044,20 @@ def test_setup_cfg_license_file_license_files( def test_license_file_attr_pkg_info(self, tmpdir_cwd, env): """All matched license files should have a corresponding License-File.""" self._create_project() - path.build( - { - "setup.cfg": DALS( - """ + path.build({ + "setup.cfg": DALS( + """ [metadata] license_files = NOTICE* LICENSE* """ - ), - "LICENSE-ABC": "ABC license", - "LICENSE-XYZ": "XYZ license", - "NOTICE": "included", - "IGNORE": "not include", - } - ) + ), + "LICENSE-ABC": "ABC license", + "LICENSE-XYZ": "XYZ license", + "NOTICE": "included", + "IGNORE": "not include", + }) environment.run_setup_py( cmd=['egg_info'], @@ -1281,18 +1264,16 @@ def _run_egg_info_command(self, tmpdir_cwd, env, cmd=None, output=None): def test_egg_info_tag_only_once(self, tmpdir_cwd, env): self._create_project() - path.build( - { - 'setup.cfg': DALS( - """ + path.build({ + 'setup.cfg': DALS( + """ [egg_info] tag_build = dev tag_date = 0 tag_svn_revision = 0 """ - ), - } - ) + ), + }) self._run_egg_info_command(tmpdir_cwd, env) egg_info_dir = os.path.join('.', 'foo.egg-info') with open(os.path.join(egg_info_dir, 'PKG-INFO')) as pkginfo_file: diff --git a/setuptools/tests/test_find_packages.py b/setuptools/tests/test_find_packages.py index 92da882d09..7af5bb76cb 100644 --- a/setuptools/tests/test_find_packages.py +++ b/setuptools/tests/test_find_packages.py @@ -1,4 +1,5 @@ """Tests for automatic package discovery""" + import os import sys import shutil @@ -30,8 +31,7 @@ def can_symlink(): def has_symlink(): bad_symlink = ( # Windows symlink directory detection is broken on Python 3.2 - platform.system() == 'Windows' - and sys.version_info[:2] == (3, 2) + platform.system() == 'Windows' and sys.version_info[:2] == (3, 2) ) return can_symlink() and not bad_symlink diff --git a/setuptools/tests/test_find_py_modules.py b/setuptools/tests/test_find_py_modules.py index dc7ff41edc..9d4b0edb61 100644 --- a/setuptools/tests/test_find_py_modules.py +++ b/setuptools/tests/test_find_py_modules.py @@ -1,4 +1,5 @@ """Tests for automatic discovery of modules""" + import os import pytest diff --git a/setuptools/tests/test_glob.py b/setuptools/tests/test_glob.py index 42b3c43a21..bdccba6c24 100644 --- a/setuptools/tests/test_glob.py +++ b/setuptools/tests/test_glob.py @@ -10,7 +10,7 @@ ('', b'', []), ('', '', []), ( - ''' + """ appveyor.yml CHANGES.rst LICENSE @@ -19,12 +19,12 @@ README.rst setup.cfg setup.py - ''', + """, '*.rst', ('CHANGES.rst', 'README.rst'), ), ( - ''' + """ appveyor.yml CHANGES.rst LICENSE @@ -33,7 +33,7 @@ README.rst setup.cfg setup.py - ''', + """, b'*.rst', (b'CHANGES.rst', b'README.rst'), ), diff --git a/setuptools/tests/test_manifest.py b/setuptools/tests/test_manifest.py index 33b85d0644..33d3250893 100644 --- a/setuptools/tests/test_manifest.py +++ b/setuptools/tests/test_manifest.py @@ -256,44 +256,38 @@ def test_include_multiple(self): """Include with multiple patterns.""" ml = make_local_path self.make_manifest("include app/*.txt app/static/*") - files = default_files | set( - [ - ml('app/a.txt'), - ml('app/b.txt'), - ml('app/static/app.js'), - ml('app/static/app.js.map'), - ml('app/static/app.css'), - ml('app/static/app.css.map'), - ] - ) + files = default_files | set([ + ml('app/a.txt'), + ml('app/b.txt'), + ml('app/static/app.js'), + ml('app/static/app.js.map'), + ml('app/static/app.css'), + ml('app/static/app.css.map'), + ]) assert files == self.get_files() def test_graft(self): """Include the whole app/static/ directory.""" ml = make_local_path self.make_manifest("graft app/static") - files = default_files | set( - [ - ml('app/static/app.js'), - ml('app/static/app.js.map'), - ml('app/static/app.css'), - ml('app/static/app.css.map'), - ] - ) + files = default_files | set([ + ml('app/static/app.js'), + ml('app/static/app.js.map'), + ml('app/static/app.css'), + ml('app/static/app.css.map'), + ]) assert files == self.get_files() def test_graft_glob_syntax(self): """Include the whole app/static/ directory.""" ml = make_local_path self.make_manifest("graft */static") - files = default_files | set( - [ - ml('app/static/app.js'), - ml('app/static/app.js.map'), - ml('app/static/app.css'), - ml('app/static/app.css.map'), - ] - ) + files = default_files | set([ + ml('app/static/app.js'), + ml('app/static/app.js.map'), + ml('app/static/app.css'), + ml('app/static/app.css.map'), + ]) assert files == self.get_files() def test_graft_global_exclude(self): @@ -316,15 +310,13 @@ def test_global_include(self): global-include *.rst *.js *.css """ ) - files = default_files | set( - [ - '.hidden.rst', - 'testing.rst', - ml('app/c.rst'), - ml('app/static/app.js'), - ml('app/static/app.css'), - ] - ) + files = default_files | set([ + '.hidden.rst', + 'testing.rst', + ml('app/c.rst'), + ml('app/static/app.js'), + ml('app/static/app.css'), + ]) assert files == self.get_files() def test_graft_prune(self): @@ -384,27 +376,25 @@ def test_process_template_line(self): ml = make_local_path # simulated file list - self.make_files( - [ - 'foo.tmp', - 'ok', - 'xo', - 'four.txt', - 'buildout.cfg', - # filelist does not filter out VCS directories, - # it's sdist that does - ml('.hg/last-message.txt'), - ml('global/one.txt'), - ml('global/two.txt'), - ml('global/files.x'), - ml('global/here.tmp'), - ml('f/o/f.oo'), - ml('dir/graft-one'), - ml('dir/dir2/graft2'), - ml('dir3/ok'), - ml('dir3/sub/ok.txt'), - ] - ) + self.make_files([ + 'foo.tmp', + 'ok', + 'xo', + 'four.txt', + 'buildout.cfg', + # filelist does not filter out VCS directories, + # it's sdist that does + ml('.hg/last-message.txt'), + ml('global/one.txt'), + ml('global/two.txt'), + ml('global/files.x'), + ml('global/here.tmp'), + ml('f/o/f.oo'), + ml('dir/graft-one'), + ml('dir/dir2/graft2'), + ml('dir3/ok'), + ml('dir3/sub/ok.txt'), + ]) MANIFEST_IN = DALS( """\ diff --git a/setuptools/tests/test_packageindex.py b/setuptools/tests/test_packageindex.py index 02870639d3..704a07f61d 100644 --- a/setuptools/tests/test_packageindex.py +++ b/setuptools/tests/test_packageindex.py @@ -21,9 +21,7 @@ def test_regex(self): Name (md5) - """.lstrip().format( - **locals() - ) + """.lstrip().format(**locals()) assert setuptools.package_index.PYPI_MD5.match(doc) def test_bad_url_bad_port(self): diff --git a/setuptools/tests/test_sandbox.py b/setuptools/tests/test_sandbox.py index 4cbae2b679..4b3cfa9ad2 100644 --- a/setuptools/tests/test_sandbox.py +++ b/setuptools/tests/test_sandbox.py @@ -1,5 +1,6 @@ """develop tests """ + import os import types diff --git a/setuptools/tests/test_sdist.py b/setuptools/tests/test_sdist.py index ead2f34e91..0adec70f04 100644 --- a/setuptools/tests/test_sdist.py +++ b/setuptools/tests/test_sdist.py @@ -765,14 +765,11 @@ def test_build_subcommand_source_files(self, source_dir): build.sub_commands = [*build.sub_commands, ("build_custom", None)] class build_custom(Command): - def initialize_options(self): - ... + def initialize_options(self): ... - def finalize_options(self): - ... + def finalize_options(self): ... - def run(self): - ... + def run(self): ... def get_source_files(self): return ['.myfile~'] diff --git a/setuptools/tests/test_virtualenv.py b/setuptools/tests/test_virtualenv.py index d02993fd6b..b84702aa70 100644 --- a/setuptools/tests/test_virtualenv.py +++ b/setuptools/tests/test_virtualenv.py @@ -155,22 +155,20 @@ def sdist(distname, version): """, }} ) - '''.format( - dependency_links=dependency_links - ) + '''.format(dependency_links=dependency_links) ) ) with tmpdir.join('test.py').open('w') as fp: fp.write( DALS( - ''' + """ import foobar import bits import bobs import pieces open('success', 'w').close() - ''' + """ ) ) diff --git a/setuptools/tests/test_wheel.py b/setuptools/tests/test_wheel.py index 114b2e91f7..03fb05d2f4 100644 --- a/setuptools/tests/test_wheel.py +++ b/setuptools/tests/test_wheel.py @@ -92,12 +92,12 @@ def build_wheel(extra_file_defs=None, **kwargs): file_defs = { 'setup.py': ( DALS( - ''' + """ # -*- coding: utf-8 -*- from setuptools import setup import setuptools setup(**%r) - ''' + """ ) % kwargs ).encode('utf-8'), @@ -184,14 +184,12 @@ def __repr__(self): setup_kwargs=dict( packages=['foo'], ), - install_tree=flatten_tree( - { - 'foo-1.0-py{py_version}.egg': { - 'EGG-INFO': ['PKG-INFO', 'RECORD', 'WHEEL', 'top_level.txt'], - 'foo': ['__init__.py'], - } + install_tree=flatten_tree({ + 'foo-1.0-py{py_version}.egg': { + 'EGG-INFO': ['PKG-INFO', 'RECORD', 'WHEEL', 'top_level.txt'], + 'foo': ['__init__.py'], } - ), + }), ), dict( id='utf-8', @@ -203,28 +201,26 @@ def __repr__(self): id='data', file_defs={ 'data.txt': DALS( - ''' + """ Some data... - ''' + """ ), }, setup_kwargs=dict( data_files=[('data_dir', ['data.txt'])], ), - install_tree=flatten_tree( - { - 'foo-1.0-py{py_version}.egg': { - 'EGG-INFO': ['PKG-INFO', 'RECORD', 'WHEEL', 'top_level.txt'], - 'data_dir': ['data.txt'], - } + install_tree=flatten_tree({ + 'foo-1.0-py{py_version}.egg': { + 'EGG-INFO': ['PKG-INFO', 'RECORD', 'WHEEL', 'top_level.txt'], + 'data_dir': ['data.txt'], } - ), + }), ), dict( id='extension', file_defs={ 'extension.c': DALS( - ''' + """ #include "Python.h" #if PY_MAJOR_VERSION >= 3 @@ -264,7 +260,7 @@ def __repr__(self): return module; #endif } - ''' + """ ), }, setup_kwargs=dict( @@ -274,140 +270,132 @@ def __repr__(self): ) ], ), - install_tree=flatten_tree( - { - 'foo-1.0-py{py_version}-{platform}.egg': [ - 'extension{shlib_ext}', - { - 'EGG-INFO': [ - 'PKG-INFO', - 'RECORD', - 'WHEEL', - 'top_level.txt', - ] - }, - ] - } - ), + install_tree=flatten_tree({ + 'foo-1.0-py{py_version}-{platform}.egg': [ + 'extension{shlib_ext}', + { + 'EGG-INFO': [ + 'PKG-INFO', + 'RECORD', + 'WHEEL', + 'top_level.txt', + ] + }, + ] + }), ), dict( id='header', file_defs={ 'header.h': DALS( - ''' - ''' + """ + """ ), }, setup_kwargs=dict( headers=['header.h'], ), - install_tree=flatten_tree( - { - 'foo-1.0-py{py_version}.egg': [ - 'header.h', - { - 'EGG-INFO': [ - 'PKG-INFO', - 'RECORD', - 'WHEEL', - 'top_level.txt', - ] - }, - ] - } - ), + install_tree=flatten_tree({ + 'foo-1.0-py{py_version}.egg': [ + 'header.h', + { + 'EGG-INFO': [ + 'PKG-INFO', + 'RECORD', + 'WHEEL', + 'top_level.txt', + ] + }, + ] + }), ), dict( id='script', file_defs={ 'script.py': DALS( - ''' + """ #/usr/bin/python print('hello world!') - ''' + """ ), 'script.sh': DALS( - ''' + """ #/bin/sh echo 'hello world!' - ''' + """ ), }, setup_kwargs=dict( scripts=['script.py', 'script.sh'], ), - install_tree=flatten_tree( - { - 'foo-1.0-py{py_version}.egg': { - 'EGG-INFO': [ - 'PKG-INFO', - 'RECORD', - 'WHEEL', - 'top_level.txt', - {'scripts': ['script.py', 'script.sh']}, - ] - } + install_tree=flatten_tree({ + 'foo-1.0-py{py_version}.egg': { + 'EGG-INFO': [ + 'PKG-INFO', + 'RECORD', + 'WHEEL', + 'top_level.txt', + {'scripts': ['script.py', 'script.sh']}, + ] } - ), + }), ), dict( id='requires1', install_requires='foobar==2.0', - install_tree=flatten_tree( - { - 'foo-1.0-py{py_version}.egg': { - 'EGG-INFO': [ - 'PKG-INFO', - 'RECORD', - 'WHEEL', - 'requires.txt', - 'top_level.txt', - ] - } + install_tree=flatten_tree({ + 'foo-1.0-py{py_version}.egg': { + 'EGG-INFO': [ + 'PKG-INFO', + 'RECORD', + 'WHEEL', + 'requires.txt', + 'top_level.txt', + ] } - ), + }), requires_txt=DALS( - ''' + """ foobar==2.0 - ''' + """ ), ), dict( id='requires2', - install_requires=''' + install_requires=""" bar foo<=2.0; %r in sys_platform - ''' + """ % sys.platform, requires_txt=DALS( - ''' + """ bar foo<=2.0 - ''' + """ ), ), dict( id='requires3', - install_requires=''' + install_requires=""" bar; %r != sys_platform - ''' + """ % sys.platform, ), dict( id='requires4', - install_requires=''' + install_requires=""" foo - ''', + """, extras_require={ 'extra': 'foobar>3', }, requires_txt=DALS( - ''' + """ foo [extra] foobar>3 - ''' + """ ), ), dict( @@ -416,29 +404,29 @@ def __repr__(self): 'extra': 'foobar; %r != sys_platform' % sys.platform, }, requires_txt=DALS( - ''' + """ [extra] - ''' + """ ), ), dict( id='requires_ensure_order', - install_requires=''' + install_requires=""" foo bar baz qux - ''', + """, extras_require={ - 'extra': ''' + 'extra': """ foobar>3 barbaz>4 bazqux>5 quxzap>6 - ''', + """, }, requires_txt=DALS( - ''' + """ foo bar baz @@ -449,7 +437,7 @@ def __repr__(self): barbaz>4 bazqux>5 quxzap>6 - ''' + """ ), ), dict( @@ -463,28 +451,26 @@ def __repr__(self): namespace_packages=['foo'], packages=['foo.bar'], ), - install_tree=flatten_tree( - { - 'foo-1.0-py{py_version}.egg': [ - 'foo-1.0-py{py_version}-nspkg.pth', - { - 'EGG-INFO': [ - 'PKG-INFO', - 'RECORD', - 'WHEEL', - 'namespace_packages.txt', - 'top_level.txt', - ] - }, - { - 'foo': [ - '__init__.py', - {'bar': ['__init__.py']}, - ] - }, - ] - } - ), + install_tree=flatten_tree({ + 'foo-1.0-py{py_version}.egg': [ + 'foo-1.0-py{py_version}-nspkg.pth', + { + 'EGG-INFO': [ + 'PKG-INFO', + 'RECORD', + 'WHEEL', + 'namespace_packages.txt', + 'top_level.txt', + ] + }, + { + 'foo': [ + '__init__.py', + {'bar': ['__init__.py']}, + ] + }, + ] + }), ), dict( id='empty_namespace_package', @@ -499,27 +485,25 @@ def __repr__(self): namespace_packages=['foobar'], packages=['foobar'], ), - install_tree=flatten_tree( - { - 'foo-1.0-py{py_version}.egg': [ - 'foo-1.0-py{py_version}-nspkg.pth', - { - 'EGG-INFO': [ - 'PKG-INFO', - 'RECORD', - 'WHEEL', - 'namespace_packages.txt', - 'top_level.txt', - ] - }, - { - 'foobar': [ - '__init__.py', - ] - }, - ] - } - ), + install_tree=flatten_tree({ + 'foo-1.0-py{py_version}.egg': [ + 'foo-1.0-py{py_version}-nspkg.pth', + { + 'EGG-INFO': [ + 'PKG-INFO', + 'RECORD', + 'WHEEL', + 'namespace_packages.txt', + 'top_level.txt', + ] + }, + { + 'foobar': [ + '__init__.py', + ] + }, + ] + }), ), dict( id='data_in_package', @@ -528,9 +512,9 @@ def __repr__(self): '__init__.py': '', 'data_dir': { 'data.txt': DALS( - ''' + """ Some data... - ''' + """ ), }, } @@ -539,26 +523,24 @@ def __repr__(self): packages=['foo'], data_files=[('foo/data_dir', ['foo/data_dir/data.txt'])], ), - install_tree=flatten_tree( - { - 'foo-1.0-py{py_version}.egg': { - 'EGG-INFO': [ - 'PKG-INFO', - 'RECORD', - 'WHEEL', - 'top_level.txt', - ], - 'foo': [ - '__init__.py', - { - 'data_dir': [ - 'data.txt', - ] - }, - ], - } + install_tree=flatten_tree({ + 'foo-1.0-py{py_version}.egg': { + 'EGG-INFO': [ + 'PKG-INFO', + 'RECORD', + 'WHEEL', + 'top_level.txt', + ], + 'foo': [ + '__init__.py', + { + 'data_dir': [ + 'data.txt', + ] + }, + ], } - ), + }), ), ) @@ -583,7 +565,7 @@ def test_wheel_install(params): install_requires=install_requires, extras_require=extras_require, extra_file_defs=file_defs, - **setup_kwargs + **setup_kwargs, ) as filename, tempdir() as install_dir: _check_wheel_install( filename, install_dir, install_tree, project_name, version, requires_txt @@ -641,12 +623,12 @@ def build_wheel(extra_file_defs=None, **kwargs): file_defs = { 'setup.py': ( DALS( - ''' + """ # -*- coding: utf-8 -*- from setuptools import setup import setuptools setup(**%r) - ''' + """ ) % kwargs ).encode('utf-8'), @@ -666,34 +648,32 @@ def build_wheel(extra_file_defs=None, **kwargs): id='script', file_defs={ 'script.py': DALS( - ''' + """ #/usr/bin/python print('hello world!') - ''' + """ ), 'script.sh': DALS( - ''' + """ #/bin/sh echo 'hello world!' - ''' + """ ), }, setup_kwargs=dict( scripts=['script.py', 'script.sh'], ), - install_tree=flatten_tree( - { - 'foo-1.0-py{py_version}.egg': { - 'EGG-INFO': [ - 'PKG-INFO', - 'RECORD', - 'WHEEL', - 'top_level.txt', - {'scripts': ['script.py', 'script.sh']}, - ] - } + install_tree=flatten_tree({ + 'foo-1.0-py{py_version}.egg': { + 'EGG-INFO': [ + 'PKG-INFO', + 'RECORD', + 'WHEEL', + 'top_level.txt', + {'scripts': ['script.py', 'script.sh']}, + ] } - ), + }), ) project_name = params.get('name', 'foo') @@ -708,7 +688,7 @@ def build_wheel(extra_file_defs=None, **kwargs): install_requires=[], extras_require={}, extra_file_defs=file_defs, - **setup_kwargs + **setup_kwargs, ) as filename, tempdir() as install_dir: _check_wheel_install( filename, install_dir, install_tree, project_name, version, None diff --git a/setuptools/tests/test_windows_wrappers.py b/setuptools/tests/test_windows_wrappers.py index 4089634213..3f321386f1 100644 --- a/setuptools/tests/test_windows_wrappers.py +++ b/setuptools/tests/test_windows_wrappers.py @@ -11,6 +11,7 @@ the script they are to wrap and with the same name as the script they are to wrap. """ + import pathlib import sys import platform diff --git a/setuptools/wheel.py b/setuptools/wheel.py index c6eabddc1f..9861b5cf1c 100644 --- a/setuptools/wheel.py +++ b/setuptools/wheel.py @@ -38,7 +38,7 @@ def _get_supported_tags(): def unpack(src_dir, dst_dir): - '''Move everything under `src_dir` to `dst_dir`, and delete the former.''' + """Move everything under `src_dir` to `dst_dir`, and delete the former.""" for dirpath, dirnames, filenames in os.walk(src_dir): subdir = os.path.relpath(dirpath, src_dir) for f in filenames: @@ -83,7 +83,7 @@ def __init__(self, filename): setattr(self, k, v) def tags(self): - '''List tags (py_version, abi, platform) supported by this wheel.''' + """List tags (py_version, abi, platform) supported by this wheel.""" return itertools.product( self.py_version.split('.'), self.abi.split('.'), @@ -91,7 +91,7 @@ def tags(self): ) def is_compatible(self): - '''Is the wheel compatible with the current platform?''' + """Is the wheel compatible with the current platform?""" return next((True for t in self.tags() if t in _get_supported_tags()), False) def egg_name(self): @@ -115,7 +115,7 @@ def get_dist_info(self, zf): raise ValueError("unsupported wheel format. .dist-info not found") def install_as_egg(self, destination_eggdir): - '''Install wheel as an egg directory.''' + """Install wheel as an egg directory.""" with zipfile.ZipFile(self.filename) as zf: self._install_as_egg(destination_eggdir, zf) From a9c5dd5a4eab9f4132d62344cdbad24e077c650e Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 24 Dec 2023 12:08:46 -0500 Subject: [PATCH 0207/1761] Remove sole entry for branches-ignore. Workaround for and closes jaraco/skeleton#103. --- .github/workflows/main.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index a079bbfbe3..cf94f7d816 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -4,7 +4,8 @@ on: merge_group: push: branches-ignore: - - gh-readonly-queue/** # Temporary merge queue-related GH-made branches + # disabled for jaraco/skeleton#103 + # - gh-readonly-queue/** # Temporary merge queue-related GH-made branches pull_request: permissions: From db0d581685d4fc2a16d392d4dedffe622e9a355c Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Tue, 26 Dec 2023 15:58:23 +0100 Subject: [PATCH 0208/1761] =?UTF-8?q?ruff:=20extended-ignore=20=E2=86=92?= =?UTF-8?q?=20ignore=20(jaraco/skeleton#105)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Applies Repo-Review suggestion: RF201: Avoid using deprecated config settings extend-ignore deprecated, use ignore instead (identical) --- ruff.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ruff.toml b/ruff.toml index 7ed133b790..795cca162a 100644 --- a/ruff.toml +++ b/ruff.toml @@ -1,5 +1,5 @@ [lint] -extend-ignore = [ +ignore = [ # https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules "W191", "E111", From f6d9e107365ca270ec843898c05bb8e43dc6987a Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Tue, 2 Jan 2024 17:56:53 -0500 Subject: [PATCH 0209/1761] Bump year on badge --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 2fabcf3334..efabeee4f9 100644 --- a/README.rst +++ b/README.rst @@ -14,5 +14,5 @@ .. .. image:: https://readthedocs.org/projects/PROJECT_RTD/badge/?version=latest .. :target: https://PROJECT_RTD.readthedocs.io/en/latest/?badge=latest -.. image:: https://img.shields.io/badge/skeleton-2023-informational +.. image:: https://img.shields.io/badge/skeleton-2024-informational :target: https://blog.jaraco.com/skeleton From 25c1b27813eda67e9c48a622bbca99eaa24cd5d4 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Thu, 4 Jan 2024 12:13:39 +0000 Subject: [PATCH 0210/1761] Add regression test for pkg_resources._mac_vers --- pkg_resources/tests/test_pkg_resources.py | 28 +++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/pkg_resources/tests/test_pkg_resources.py b/pkg_resources/tests/test_pkg_resources.py index a05aeb2603..0dd9c3c105 100644 --- a/pkg_resources/tests/test_pkg_resources.py +++ b/pkg_resources/tests/test_pkg_resources.py @@ -1,9 +1,11 @@ +import builtins import sys import tempfile import os import zipfile import datetime import time +import plistlib import subprocess import stat import distutils.dist @@ -323,6 +325,32 @@ def test_dist_info_is_not_dir(tmp_path, only): assert not pkg_resources.dist_factory(str(tmp_path), str(dist_info), only) +@pytest.mark.skipif(sys.version_info >= (3, 9), reason="requires Python < 3.9") +@pytest.mark.filterwarnings("ignore::DeprecationWarning") +def test_macos_vers_fallback(monkeypatch, tmp_path): + """Regression test for pkg_resources._macos_vers""" + orig_open = builtins.open + + # Pretend we need to use the plist file + monkeypatch.setattr('platform.mac_ver', mock.Mock(return_value=('', (), ''))) + + # Create fake content for the fake plist file + with open(tmp_path / 'fake.plist', 'wb') as fake_file: + plistlib.dump({"ProductVersion": "11.4"}, fake_file) + + # Pretend the fake file exists + monkeypatch.setattr('os.path.exists', mock.Mock(return_value=True)) + + def fake_open(file, *args, **kwargs): + return orig_open(tmp_path / 'fake.plist', *args, **kwargs) + + # Ensure that the _macos_vers works correctly + with mock.patch('builtins.open', mock.Mock(side_effect=fake_open)) as m: + assert pkg_resources._macos_vers([]) == ["11", "4"] + + m.assert_called() + + class TestDeepVersionLookupDistutils: @pytest.fixture def env(self, tmpdir): From b3935708f1b8857c45bb63abac390b013c450694 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Wed, 3 Jan 2024 22:29:11 +0100 Subject: [PATCH 0211/1761] =?UTF-8?q?`plistlib.readPlist()`=20is=20missing?= =?UTF-8?q?=20from=20Python=20=E2=89=A5=203.9?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This function has been deprecated since Python 3.4, replaced by `plistlib.load()`. --- pkg_resources/__init__.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py index ab6afe955d..584ce5bc4a 100644 --- a/pkg_resources/__init__.py +++ b/pkg_resources/__init__.py @@ -414,10 +414,10 @@ def _macos_vers(_cache=[]): if version == '': plist = '/System/Library/CoreServices/SystemVersion.plist' if os.path.exists(plist): - if hasattr(plistlib, 'readPlist'): - plist_content = plistlib.readPlist(plist) - if 'ProductVersion' in plist_content: - version = plist_content['ProductVersion'] + with open(plist, 'rb') as fh: + plist_content = plistlib.load(fh) + if 'ProductVersion' in plist_content: + version = plist_content['ProductVersion'] _cache.append(version.split('.')) return _cache[0] From 38fe69e54f30a7b67713ec19a0f856f433560147 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Thu, 4 Jan 2024 12:25:14 +0000 Subject: [PATCH 0212/1761] Remove pytest marks no longer necessary in regression test --- pkg_resources/tests/test_pkg_resources.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/pkg_resources/tests/test_pkg_resources.py b/pkg_resources/tests/test_pkg_resources.py index 0dd9c3c105..77d650a7d0 100644 --- a/pkg_resources/tests/test_pkg_resources.py +++ b/pkg_resources/tests/test_pkg_resources.py @@ -325,8 +325,6 @@ def test_dist_info_is_not_dir(tmp_path, only): assert not pkg_resources.dist_factory(str(tmp_path), str(dist_info), only) -@pytest.mark.skipif(sys.version_info >= (3, 9), reason="requires Python < 3.9") -@pytest.mark.filterwarnings("ignore::DeprecationWarning") def test_macos_vers_fallback(monkeypatch, tmp_path): """Regression test for pkg_resources._macos_vers""" orig_open = builtins.open From c534f2a4ecb90eda92dde7055d6b52618c75c5a6 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Fri, 5 Jan 2024 13:13:48 +0000 Subject: [PATCH 0213/1761] Update README removing broken link to PyPUG page --- README.rst | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/README.rst b/README.rst index c7387deb9f..f9f497d462 100644 --- a/README.rst +++ b/README.rst @@ -27,10 +27,9 @@ :target: https://discord.com/channels/803025117553754132/815945031150993468 :alt: Discord -See the `Installation Instructions -`_ in the Python Packaging -User's Guide for instructions on installing, upgrading, and uninstalling -Setuptools. +See the `Quickstart `_ +and the `User's Guide `_ for +instructions on how to use Setuptools. Questions and comments should be directed to `GitHub Discussions `_. From 0ff881e5516aad02bfb2351639853d050fa60335 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Tue, 2 Jan 2024 18:22:44 +0100 Subject: [PATCH 0214/1761] Fix flake8-bugbear warning B015 Pointless comparison. Did you mean to assign a value? Otherwise, prepend `assert` or remove it. --- setuptools/tests/test_sdist.py | 2 +- setuptools/tests/test_setuptools.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/setuptools/tests/test_sdist.py b/setuptools/tests/test_sdist.py index 0adec70f04..5d597709ed 100644 --- a/setuptools/tests/test_sdist.py +++ b/setuptools/tests/test_sdist.py @@ -658,7 +658,7 @@ def test_sdist_with_latin1_encoded_filename(self): else: # The Latin-1 filename should have been skipped filename = filename.decode('latin-1') - filename not in cmd.filelist.files + assert filename not in cmd.filelist.files _EXAMPLE_DIRECTIVES = { "setup.cfg - long_description and version": """ diff --git a/setuptools/tests/test_setuptools.py b/setuptools/tests/test_setuptools.py index 1ca5523d20..0dc4769b93 100644 --- a/setuptools/tests/test_setuptools.py +++ b/setuptools/tests/test_setuptools.py @@ -61,13 +61,13 @@ def f1(): assert dep.extract_constant(fc, 'q', -1) is None # constant assigned - dep.extract_constant(fc, 'x', -1) == "test" + assert dep.extract_constant(fc, 'x', -1) == "test" # expression assigned - dep.extract_constant(fc, 'y', -1) == -1 + assert dep.extract_constant(fc, 'y', -1) == -1 # recognized name, not assigned - dep.extract_constant(fc, 'z', -1) is None + assert dep.extract_constant(fc, 'z', -1) is None def testFindModule(self): with pytest.raises(ImportError): From b320f39ff24dbf02eee2d58dc39a8328ea5b8982 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Tue, 2 Jan 2024 18:26:46 +0100 Subject: [PATCH 0215/1761] Fix flake8-bugbear warning B010 Do not call `setattr` with a constant attribute value. It is not any safer than normal property access. --- setuptools/command/sdist.py | 2 +- setuptools/tests/config/test_setupcfg.py | 16 ++++++---------- 2 files changed, 7 insertions(+), 11 deletions(-) diff --git a/setuptools/command/sdist.py b/setuptools/command/sdist.py index 5f45fb5dee..f0ffeba876 100644 --- a/setuptools/command/sdist.py +++ b/setuptools/command/sdist.py @@ -106,7 +106,7 @@ class NoValue: yield finally: if orig_val is not NoValue: - setattr(os, 'link', orig_val) + os.link = orig_val def add_defaults(self): super().add_defaults() diff --git a/setuptools/tests/config/test_setupcfg.py b/setuptools/tests/config/test_setupcfg.py index d2bb1212dc..d235478f7e 100644 --- a/setuptools/tests/config/test_setupcfg.py +++ b/setuptools/tests/config/test_setupcfg.py @@ -957,17 +957,13 @@ class TestExternalSetters: # pbr or something else setting these values. def _fake_distribution_init(self, dist, attrs): saved_dist_init(dist, attrs) - # see self._DISTUTUILS_UNSUPPORTED_METADATA - setattr(dist.metadata, 'long_description_content_type', 'text/something') + # see self._DISTUTILS_UNSUPPORTED_METADATA + dist.metadata.long_description_content_type = 'text/something' # Test overwrite setup() args - setattr( - dist.metadata, - 'project_urls', - { - 'Link One': 'https://example.com/one/', - 'Link Two': 'https://example.com/two/', - }, - ) + dist.metadata.project_urls = { + 'Link One': 'https://example.com/one/', + 'Link Two': 'https://example.com/two/', + } return None @patch.object(_Distribution, '__init__', autospec=True) From 4a351f5ad383029c83437355c6f04b62ce7e6a22 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Tue, 2 Jan 2024 18:59:40 +0100 Subject: [PATCH 0216/1761] Fix flake8-bugbear warning B009 Do not call `getattr` with a constant attribute value. It is not any safer than normal property access. --- setuptools/command/build_ext.py | 2 +- setuptools/monkey.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/setuptools/command/build_ext.py b/setuptools/command/build_ext.py index 2e1954ab0f..ef2a4da84d 100644 --- a/setuptools/command/build_ext.py +++ b/setuptools/command/build_ext.py @@ -157,7 +157,7 @@ def get_ext_filename(self, fullname): if fullname in self.ext_map: ext = self.ext_map[fullname] - use_abi3 = getattr(ext, 'py_limited_api') and get_abi3_suffix() + use_abi3 = ext.py_limited_api and get_abi3_suffix() if use_abi3: filename = filename[: -len(so_ext)] so_ext = get_abi3_suffix() diff --git a/setuptools/monkey.py b/setuptools/monkey.py index 6c8a2f12f6..3d5edb6bd2 100644 --- a/setuptools/monkey.py +++ b/setuptools/monkey.py @@ -130,7 +130,7 @@ def patch_func(replacement, target_mod, func_name): def get_unpatched_function(candidate): - return getattr(candidate, 'unpatched') + return candidate.unpatched def patch_for_msvc_specialized_compiler(): From ca04bfc413135d5b5e774bc8c62ace8c9845889a Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Tue, 2 Jan 2024 19:05:32 +0100 Subject: [PATCH 0217/1761] Fix flake8-bugbear warning B033 Sets should not contain duplicate item `"anyOf"` --- setuptools/config/_validate_pyproject/error_reporting.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setuptools/config/_validate_pyproject/error_reporting.py b/setuptools/config/_validate_pyproject/error_reporting.py index f78e4838fb..b6ce79f851 100644 --- a/setuptools/config/_validate_pyproject/error_reporting.py +++ b/setuptools/config/_validate_pyproject/error_reporting.py @@ -24,7 +24,7 @@ "must not be there", ) -_NEED_DETAILS = {"anyOf", "oneOf", "anyOf", "contains", "propertyNames", "not", "items"} +_NEED_DETAILS = {"anyOf", "oneOf", "contains", "propertyNames", "not", "items"} _CAMEL_CASE_SPLITTER = re.compile(r"\W+|([A-Z][^A-Z\W]*)") _IDENTIFIER = re.compile(r"^[\w_]+$", re.I) From f57229617de5a49750355e2ac55beaa71c266e1a Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Tue, 2 Jan 2024 19:35:46 +0100 Subject: [PATCH 0218/1761] Fix flake8-bugbear warning B006 Do not use mutable data structures for argument defaults --- pkg_resources/__init__.py | 7 ++++--- setuptools/command/build_py.py | 2 +- setuptools/command/test.py | 2 +- setuptools/tests/test_build_meta.py | 4 ++-- setuptools/tests/test_easy_install.py | 5 +++-- 5 files changed, 11 insertions(+), 9 deletions(-) diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py index 584ce5bc4a..7ce314d9cc 100644 --- a/pkg_resources/__init__.py +++ b/pkg_resources/__init__.py @@ -407,7 +407,7 @@ def get_provider(moduleOrReq): return _find_adapter(_provider_factories, loader)(module) -def _macos_vers(_cache=[]): +def _macos_vers(_cache=None): if not _cache: version = platform.mac_ver()[0] # fallback for MacPorts @@ -419,7 +419,7 @@ def _macos_vers(_cache=[]): if 'ProductVersion' in plist_content: version = plist_content['ProductVersion'] - _cache.append(version.split('.')) + _cache = [version.split('.')] return _cache[0] @@ -2422,7 +2422,8 @@ def _cygwin_patch(filename): # pragma: nocover return os.path.abspath(filename) if sys.platform == 'cygwin' else filename -def _normalize_cached(filename, _cache={}): +def _normalize_cached(filename, _cache=None): + _cache = _cache or {} try: return _cache[filename] except KeyError: diff --git a/setuptools/command/build_py.py b/setuptools/command/build_py.py index cbdd05aab0..3f40b060b3 100644 --- a/setuptools/command/build_py.py +++ b/setuptools/command/build_py.py @@ -288,7 +288,7 @@ def exclude_data_files(self, package, src_dir, files): return list(unique_everseen(keepers)) @staticmethod - def _get_platform_patterns(spec, package, src_dir, extra_patterns=[]): + def _get_platform_patterns(spec, package, src_dir, extra_patterns=()): """ yield platform-specific path patterns (suitable for glob or fn_match) from a glob-based spec (such as diff --git a/setuptools/command/test.py b/setuptools/command/test.py index 5fce6660c0..0a128f2a7a 100644 --- a/setuptools/command/test.py +++ b/setuptools/command/test.py @@ -132,7 +132,7 @@ def with_project_on_sys_path(self, func): func() @contextlib.contextmanager - def project_on_sys_path(self, include_dists=[]): + def project_on_sys_path(self, include_dists=()): self.run_command('egg_info') # Build extensions in-place diff --git a/setuptools/tests/test_build_meta.py b/setuptools/tests/test_build_meta.py index 429533229d..696e22c956 100644 --- a/setuptools/tests/test_build_meta.py +++ b/setuptools/tests/test_build_meta.py @@ -30,9 +30,9 @@ class BuildBackendBase: - def __init__(self, cwd='.', env={}, backend_name='setuptools.build_meta'): + def __init__(self, cwd='.', env=None, backend_name='setuptools.build_meta'): self.cwd = cwd - self.env = env + self.env = env or {} self.backend_name = backend_name diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py index d0b95e09ea..e0ea1e8c99 100644 --- a/setuptools/tests/test_easy_install.py +++ b/setuptools/tests/test_easy_install.py @@ -1195,7 +1195,7 @@ def create_setup_requires_package( version='0.1', make_package=make_trivial_sdist, setup_py_template=None, - setup_attrs={}, + setup_attrs=None, use_setup_cfg=(), ): """Creates a source tree under path for a trivial test package that has a @@ -1213,7 +1213,8 @@ def create_setup_requires_package( 'setup_requires': ['%s==%s' % (distname, version)], 'dependency_links': [os.path.abspath(path)], } - test_setup_attrs.update(setup_attrs) + if setup_attrs: + test_setup_attrs.update(setup_attrs) test_pkg = os.path.join(path, 'test_pkg') os.mkdir(test_pkg) From 5a17b0e6f111861fce31c88f41ed5c0cce6b58aa Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Tue, 2 Jan 2024 19:58:01 +0100 Subject: [PATCH 0219/1761] Fix flake8-bugbear warning B904 Within an `except` clause, raise exceptions with `raise ... from err` or `raise ... from None` to distinguish them from errors in exception handling --- setuptools/build_meta.py | 4 ++-- setuptools/command/egg_info.py | 4 ++-- setuptools/config/setupcfg.py | 8 ++++---- tools/build_launchers.py | 4 ++-- 4 files changed, 10 insertions(+), 10 deletions(-) diff --git a/setuptools/build_meta.py b/setuptools/build_meta.py index 80ccceff3c..b656f47950 100644 --- a/setuptools/build_meta.py +++ b/setuptools/build_meta.py @@ -117,11 +117,11 @@ def _file_with_extension(directory, extension): matching = (f for f in os.listdir(directory) if f.endswith(extension)) try: (file,) = matching - except ValueError: + except ValueError as e: raise ValueError( 'No distribution was found. Ensure that `setup.py` ' 'is not empty and that it calls `setup()`.' - ) + ) from e return file diff --git a/setuptools/command/egg_info.py b/setuptools/command/egg_info.py index 7169f33535..286e97a6b3 100644 --- a/setuptools/command/egg_info.py +++ b/setuptools/command/egg_info.py @@ -384,10 +384,10 @@ def process_template_line(self, line): try: process_action = action_map[action] - except KeyError: + except KeyError as e: raise DistutilsInternalError( "this cannot happen: invalid action '{action!s}'".format(action=action), - ) + ) from e # OK, now we know that the action is valid and we have the # right number of words on the line for that action -- so we diff --git a/setuptools/config/setupcfg.py b/setuptools/config/setupcfg.py index 44a2876c06..a7f02714cb 100644 --- a/setuptools/config/setupcfg.py +++ b/setuptools/config/setupcfg.py @@ -283,8 +283,8 @@ def __setitem__(self, option_name, value): try: current_value = getattr(target_obj, option_name) - except AttributeError: - raise KeyError(option_name) + except AttributeError as e: + raise KeyError(option_name) from e if current_value: # Already inhabited. Skipping. @@ -582,11 +582,11 @@ def _parse_version(self, value): # accidentally include newlines and other unintended content try: Version(version) - except InvalidVersion: + except InvalidVersion as e: raise OptionError( f'Version loaded from {value} does not ' f'comply with PEP 440: {version}' - ) + ) from e return version diff --git a/tools/build_launchers.py b/tools/build_launchers.py index 8d832b9c24..c673445365 100644 --- a/tools/build_launchers.py +++ b/tools/build_launchers.py @@ -118,8 +118,8 @@ def get_msbuild(): ] try: return subprocess.check_output(cmd, encoding='utf-8', text=True).strip() - except subprocess.CalledProcessError: - raise SystemExit("Unable to find MSBuild; check Visual Studio install") + except subprocess.CalledProcessError as e: + raise SystemExit("Unable to find MSBuild; check Visual Studio install") from e def do_build(arena, platform, target): From 17655dce0cfa5581fa653bce1f73ad9d024b30e4 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Wed, 3 Jan 2024 16:18:40 +0100 Subject: [PATCH 0220/1761] Update setuptools/monkey.py Co-authored-by: Anderson Bravalheri --- setuptools/monkey.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setuptools/monkey.py b/setuptools/monkey.py index 3d5edb6bd2..f3148e6832 100644 --- a/setuptools/monkey.py +++ b/setuptools/monkey.py @@ -130,7 +130,7 @@ def patch_func(replacement, target_mod, func_name): def get_unpatched_function(candidate): - return candidate.unpatched + return getattr(candidate, 'unpatched', None) def patch_for_msvc_specialized_compiler(): From d988bc590b2ab7de9ef65025e7557c0307d97956 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Wed, 3 Jan 2024 17:40:12 +0100 Subject: [PATCH 0221/1761] Better error message in setuptools/command/egg_info.py Co-authored-by: Anderson Bravalheri --- setuptools/command/egg_info.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setuptools/command/egg_info.py b/setuptools/command/egg_info.py index 286e97a6b3..ff1864241a 100644 --- a/setuptools/command/egg_info.py +++ b/setuptools/command/egg_info.py @@ -386,7 +386,7 @@ def process_template_line(self, line): process_action = action_map[action] except KeyError as e: raise DistutilsInternalError( - "this cannot happen: invalid action '{action!s}'".format(action=action), + f"Invalid MANIFEST.in: unknow action {action!r} in {line!r}" ) from e # OK, now we know that the action is valid and we have the From 10a21ec7887f32bb666d801be851ad958aeb34f9 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Wed, 3 Jan 2024 17:40:36 +0100 Subject: [PATCH 0222/1761] Update setuptools/config/_validate_pyproject/error_reporting.py Co-authored-by: Anderson Bravalheri --- setuptools/config/_validate_pyproject/error_reporting.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setuptools/config/_validate_pyproject/error_reporting.py b/setuptools/config/_validate_pyproject/error_reporting.py index b6ce79f851..d44e290e36 100644 --- a/setuptools/config/_validate_pyproject/error_reporting.py +++ b/setuptools/config/_validate_pyproject/error_reporting.py @@ -24,7 +24,7 @@ "must not be there", ) -_NEED_DETAILS = {"anyOf", "oneOf", "contains", "propertyNames", "not", "items"} +_NEED_DETAILS = {"anyOf", "oneOf", "allOf", "contains", "propertyNames", "not", "items"} _CAMEL_CASE_SPLITTER = re.compile(r"\W+|([A-Z][^A-Z\W]*)") _IDENTIFIER = re.compile(r"^[\w_]+$", re.I) From f7b7dfabf42486e51478b095ea1a85ab95e9b27c Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Wed, 3 Jan 2024 22:23:41 +0100 Subject: [PATCH 0223/1761] Proper fix for flake8-bugbear warning B006 The whole idea was to cache the result of a couple functions, do that explictly with @functools.cache. --- pkg_resources/__init__.py | 36 +++++++++++++++--------------------- 1 file changed, 15 insertions(+), 21 deletions(-) diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py index 7ce314d9cc..db7a38981e 100644 --- a/pkg_resources/__init__.py +++ b/pkg_resources/__init__.py @@ -407,20 +407,18 @@ def get_provider(moduleOrReq): return _find_adapter(_provider_factories, loader)(module) -def _macos_vers(_cache=None): - if not _cache: - version = platform.mac_ver()[0] - # fallback for MacPorts - if version == '': - plist = '/System/Library/CoreServices/SystemVersion.plist' - if os.path.exists(plist): - with open(plist, 'rb') as fh: - plist_content = plistlib.load(fh) - if 'ProductVersion' in plist_content: - version = plist_content['ProductVersion'] - - _cache = [version.split('.')] - return _cache[0] +@functools.cache +def _macos_vers(): + version = platform.mac_ver()[0] + # fallback for MacPorts + if version == '': + plist = '/System/Library/CoreServices/SystemVersion.plist' + if os.path.exists(plist): + with open(plist, 'rb') as fh: + plist_content = plistlib.load(fh) + if 'ProductVersion' in plist_content: + version = plist_content['ProductVersion'] + return version.split('.') def _macos_arch(machine): @@ -2422,13 +2420,9 @@ def _cygwin_patch(filename): # pragma: nocover return os.path.abspath(filename) if sys.platform == 'cygwin' else filename -def _normalize_cached(filename, _cache=None): - _cache = _cache or {} - try: - return _cache[filename] - except KeyError: - _cache[filename] = result = normalize_path(filename) - return result +@functools.cache +def _normalize_cached(filename): + return normalize_path(filename) def _is_egg_path(path): From 111d4ad94d6ca456cfae579dc16fe619cf792644 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Wed, 3 Jan 2024 23:02:02 +0100 Subject: [PATCH 0224/1761] Fix refurb warning FURB140 Use `itertools.starmap` instead of the generator --- setuptools/command/editable_wheel.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py index 73fa9fff52..76b7606ffc 100644 --- a/setuptools/command/editable_wheel.py +++ b/setuptools/command/editable_wheel.py @@ -19,7 +19,7 @@ from contextlib import suppress from enum import Enum from inspect import cleandoc -from itertools import chain +from itertools import chain, starmap from pathlib import Path from tempfile import TemporaryDirectory from typing import ( @@ -606,7 +606,7 @@ def _simple_layout( layout = {pkg: find_package_path(pkg, package_dir, project_dir) for pkg in packages} if not layout: return set(package_dir) in ({}, {""}) - parent = os.path.commonpath([_parent_path(k, v) for k, v in layout.items()]) + parent = os.path.commonpath([*starmap(_parent_path, layout.items())]) return all( _path.same_path(Path(parent, *key.split('.')), value) for key, value in layout.items() From 88df648f41ff712ab17ef575886f1232fca572ce Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Wed, 3 Jan 2024 23:04:10 +0100 Subject: [PATCH 0225/1761] Fix refurb warning FURB171 Membership test against single-item container --- setuptools/tests/test_msvc14.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setuptools/tests/test_msvc14.py b/setuptools/tests/test_msvc14.py index 619293cada..4b8344539f 100644 --- a/setuptools/tests/test_msvc14.py +++ b/setuptools/tests/test_msvc14.py @@ -57,7 +57,7 @@ def test_get_vc2017(self): # This function cannot be mocked, so pass it if we find VS 2017 # and mark it skipped if we do not. version, path = _msvccompiler._msvc14_find_vc2017() - if os.environ.get('APPVEYOR_BUILD_WORKER_IMAGE', '') in ['Visual Studio 2017']: + if os.environ.get('APPVEYOR_BUILD_WORKER_IMAGE', '') == 'Visual Studio 2017': assert version if version: assert version >= 15 From 6020b7fd73100d50b0d4364e8715fbc53e5bee61 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Fri, 5 Jan 2024 14:47:08 +0000 Subject: [PATCH 0226/1761] Ensure test does not fail due to functools.lru_cache --- pkg_resources/tests/test_pkg_resources.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pkg_resources/tests/test_pkg_resources.py b/pkg_resources/tests/test_pkg_resources.py index 77d650a7d0..c90dca7ff3 100644 --- a/pkg_resources/tests/test_pkg_resources.py +++ b/pkg_resources/tests/test_pkg_resources.py @@ -344,7 +344,9 @@ def fake_open(file, *args, **kwargs): # Ensure that the _macos_vers works correctly with mock.patch('builtins.open', mock.Mock(side_effect=fake_open)) as m: - assert pkg_resources._macos_vers([]) == ["11", "4"] + pkg_resources._macos_vers.cache_clear() + assert pkg_resources._macos_vers() == ["11", "4"] + pkg_resources._macos_vers.cache_clear() m.assert_called() From 0f08db8eaecb9ebcd5c6db7129e82f714ca4770f Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Wed, 22 Nov 2023 01:31:37 +0100 Subject: [PATCH 0227/1761] Add pyupgrade rules to ruff config --- ruff.toml | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/ruff.toml b/ruff.toml index 795cca162a..791cfdc0a5 100644 --- a/ruff.toml +++ b/ruff.toml @@ -16,7 +16,25 @@ ignore = [ "ISC001", "ISC002", ] +extend-select = [ + "UP", # pyupgrade +] +extend-ignore = [ + "UP015", # redundant-open-modes, explicit is prefered + "UP030", # temporarily disabled + "UP031", # temporarily disabled + "UP032", # temporarily disabled + "UP036", # temporarily disabled +] +extend-exclude = [ + "**/_vendor", + "setuptools/_distutils", +] [format] +extend-exclude = [ + "**/_vendor", + "setuptools/_distutils", +] # https://docs.astral.sh/ruff/settings/#format-quote-style quote-style = "preserve" From ac3bf62dbca0d2284b990de7ae3cf02001f54542 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Mon, 20 Nov 2023 22:42:34 +0100 Subject: [PATCH 0228/1761] "yield from", instead of "yield" in a loop This is a suggestion from pyupgrade: https://github.com/asottile/pyupgrade#yield--yield-from --- setuptools/command/bdist_egg.py | 9 +++------ setuptools/command/easy_install.py | 6 ++---- setuptools/command/sdist.py | 3 +-- setuptools/dist.py | 6 ++---- setuptools/glob.py | 3 +-- setuptools/package_index.py | 8 +++----- 6 files changed, 12 insertions(+), 23 deletions(-) diff --git a/setuptools/command/bdist_egg.py b/setuptools/command/bdist_egg.py index e0947c6624..a4b683f66e 100644 --- a/setuptools/command/bdist_egg.py +++ b/setuptools/command/bdist_egg.py @@ -321,8 +321,7 @@ def walk_egg(egg_dir): if 'EGG-INFO' in dirs: dirs.remove('EGG-INFO') yield base, dirs, files - for bdf in walker: - yield bdf + yield from walker def analyze_egg(egg_dir, stubs): @@ -406,14 +405,12 @@ def scan_module(egg_dir, base, name, stubs): def iter_symbols(code): """Yield names and strings used by `code` and its nested code objects""" - for name in code.co_names: - yield name + yield from code.co_names for const in code.co_consts: if isinstance(const, str): yield const elif isinstance(const, CodeType): - for name in iter_symbols(const): - yield name + yield from iter_symbols(const) def can_scan(): diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py index 897ec6ad9b..3231650c5c 100644 --- a/setuptools/command/easy_install.py +++ b/setuptools/command/easy_install.py @@ -1743,8 +1743,7 @@ class RewritePthDistributions(PthDistributions): @classmethod def _wrap_lines(cls, lines): yield cls.prelude - for line in lines: - yield line + yield from lines yield cls.postlude prelude = _one_liner( @@ -2180,8 +2179,7 @@ def get_args(cls, dist, header=None): cls._ensure_safe_name(name) script_text = cls.template % locals() args = cls._get_script_args(type_, name, header, script_text) - for res in args: - yield res + yield from args @staticmethod def _ensure_safe_name(name): diff --git a/setuptools/command/sdist.py b/setuptools/command/sdist.py index 5f45fb5dee..95d325717a 100644 --- a/setuptools/command/sdist.py +++ b/setuptools/command/sdist.py @@ -14,8 +14,7 @@ def walk_revctrl(dirname=''): """Find all files under revision control""" for ep in metadata.entry_points(group='setuptools.file_finders'): - for item in ep.load()(dirname): - yield item + yield from ep.load()(dirname) class sdist(orig.sdist): diff --git a/setuptools/dist.py b/setuptools/dist.py index c9c8c77515..0d35583dbc 100644 --- a/setuptools/dist.py +++ b/setuptools/dist.py @@ -912,11 +912,9 @@ def get_cmdline_options(self): def iter_distribution_names(self): """Yield all packages, modules, and extension names in distribution""" - for pkg in self.packages or (): - yield pkg + yield from self.packages or () - for module in self.py_modules or (): - yield module + yield from self.py_modules or () for ext in self.ext_modules or (): if isinstance(ext, tuple): diff --git a/setuptools/glob.py b/setuptools/glob.py index 8dbf34972d..ac901d9253 100644 --- a/setuptools/glob.py +++ b/setuptools/glob.py @@ -113,8 +113,7 @@ def glob0(dirname, basename): def glob2(dirname, pattern): assert _isrecursive(pattern) yield pattern[:0] - for x in _rlistdir(dirname): - yield x + yield from _rlistdir(dirname) # Recursively yields relative pathnames inside a literal directory. diff --git a/setuptools/package_index.py b/setuptools/package_index.py index 3cedd5105c..c491ddb3cc 100644 --- a/setuptools/package_index.py +++ b/setuptools/package_index.py @@ -112,15 +112,13 @@ def egg_info_for_url(url): def distros_for_url(url, metadata=None): """Yield egg or source distribution objects that might be found at a URL""" base, fragment = egg_info_for_url(url) - for dist in distros_for_location(url, base, metadata): - yield dist + yield from distros_for_location(url, base, metadata) if fragment: match = EGG_FRAGMENT.match(fragment) if match: - for dist in interpret_distro_name( + yield from interpret_distro_name( url, match.group(1), metadata, precedence=CHECKOUT_DIST - ): - yield dist + ) def distros_for_location(location, basename, metadata=None): From af41360fab5256b7d59118fdcda9b3246e000d02 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Mon, 20 Nov 2023 22:44:03 +0100 Subject: [PATCH 0229/1761] =?UTF-8?q?io.open()=20=E2=86=92=20open()?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit In Python 3, io.open() is an alias for the builtin open() function. https://docs.python.org/3/library/io.html#io.open This is a suggestion from pyupgrade: https://github.com/asottile/pyupgrade#open-alias --- setuptools/msvc.py | 1 - 1 file changed, 1 deletion(-) diff --git a/setuptools/msvc.py b/setuptools/msvc.py index be373d176e..aa69db5810 100644 --- a/setuptools/msvc.py +++ b/setuptools/msvc.py @@ -12,7 +12,6 @@ """ import json -from io import open from os import listdir, pathsep from os.path import join, isfile, isdir, dirname from subprocess import CalledProcessError From bba79a0a3f1dd450eca23192810acf12d0f758c0 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Mon, 20 Nov 2023 22:48:49 +0100 Subject: [PATCH 0230/1761] Simplify super() calls See PEP 3135: https://peps.python.org/pep-3135/ This is a suggestion from pyupgrade: https://github.com/asottile/pyupgrade#super-calls --- pkg_resources/__init__.py | 4 ++-- setuptools/build_meta.py | 2 +- setuptools/package_index.py | 2 +- setuptools/tests/test_build_meta.py | 4 ++-- setuptools/tests/test_manifest.py | 2 +- 5 files changed, 7 insertions(+), 7 deletions(-) diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py index ab6afe955d..9adc214676 100644 --- a/pkg_resources/__init__.py +++ b/pkg_resources/__init__.py @@ -2901,7 +2901,7 @@ def __getattr__(self, attr): def __dir__(self): return list( - set(super(Distribution, self).__dir__()) + set(super().__dir__()) | set(attr for attr in self._provider.__dir__() if not attr.startswith('_')) ) @@ -3168,7 +3168,7 @@ class RequirementParseError(packaging.requirements.InvalidRequirement): class Requirement(packaging.requirements.Requirement): def __init__(self, requirement_string): """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!""" - super(Requirement, self).__init__(requirement_string) + super().__init__(requirement_string) self.unsafe_name = self.name project_name = safe_name(self.name) self.project_name, self.key = project_name, project_name.lower() diff --git a/setuptools/build_meta.py b/setuptools/build_meta.py index 80ccceff3c..18ebb75c24 100644 --- a/setuptools/build_meta.py +++ b/setuptools/build_meta.py @@ -477,7 +477,7 @@ def run_setup(self, setup_script='setup.py'): sys.argv[0] = setup_script try: - super(_BuildMetaLegacyBackend, self).run_setup(setup_script=setup_script) + super().run_setup(setup_script=setup_script) finally: # While PEP 517 frontends should be calling each hook in a fresh # subprocess according to the standard (and thus it should not be diff --git a/setuptools/package_index.py b/setuptools/package_index.py index c491ddb3cc..1e535bc747 100644 --- a/setuptools/package_index.py +++ b/setuptools/package_index.py @@ -514,7 +514,7 @@ def obtain(self, requirement, installer=None): if dist in requirement: return dist self.debug("%s does not match %s", requirement, dist) - return super(PackageIndex, self).obtain(requirement, installer) + return super().obtain(requirement, installer) def check_hash(self, checker, filename, tfp): """ diff --git a/setuptools/tests/test_build_meta.py b/setuptools/tests/test_build_meta.py index 429533229d..1281eb52a2 100644 --- a/setuptools/tests/test_build_meta.py +++ b/setuptools/tests/test_build_meta.py @@ -40,7 +40,7 @@ class BuildBackend(BuildBackendBase): """PEP 517 Build Backend""" def __init__(self, *args, **kwargs): - super(BuildBackend, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.pool = futures.ProcessPoolExecutor(max_workers=1) def __getattr__(self, name): @@ -73,7 +73,7 @@ def _kill(self, pid): class BuildBackendCaller(BuildBackendBase): def __init__(self, *args, **kwargs): - super(BuildBackendCaller, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) (self.backend_name, _, self.backend_obj) = self.backend_name.partition(':') diff --git a/setuptools/tests/test_manifest.py b/setuptools/tests/test_manifest.py index 33d3250893..fbd21b1976 100644 --- a/setuptools/tests/test_manifest.py +++ b/setuptools/tests/test_manifest.py @@ -171,7 +171,7 @@ def teardown_method(self, method): class TestManifestTest(TempDirTestCase): def setup_method(self, method): - super(TestManifestTest, self).setup_method(method) + super().setup_method(method) f = open(os.path.join(self.temp_dir, 'setup.py'), 'w') f.write(SETUP_PY) From 1ddffe4946be0f6f08ea3c73b4efc5ebea74ad4b Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Wed, 22 Nov 2023 01:39:47 +0100 Subject: [PATCH 0231/1761] Don't cast string literals with str() Fixed by running `ruff --select UP018 --fix .`: UP018 [*] Unnecessary `str` call (rewrite as a literal) Original suggestions from pyupgrade: https://github.com/asottile/pyupgrade#forced-strnative-literals --- setuptools/tests/test_dist.py | 4 +- setuptools/tests/test_easy_install.py | 70 +++++++++++++-------------- setuptools/tests/test_integration.py | 2 +- 3 files changed, 38 insertions(+), 38 deletions(-) diff --git a/setuptools/tests/test_dist.py b/setuptools/tests/test_dist.py index 609932a9b3..99cd582501 100644 --- a/setuptools/tests/test_dist.py +++ b/setuptools/tests/test_dist.py @@ -116,7 +116,7 @@ def test_provides_extras_deterministic_order(): # Invalid value type. ( { - 'hello': str('*.msg'), + 'hello': '*.msg', }, ( "\"values of 'package_data' dict\" " @@ -142,7 +142,7 @@ def test_check_package_data(package_data, expected_message): assert check_package_data(None, 'package_data', package_data) is None else: with pytest.raises(DistutilsSetupError, match=re.escape(expected_message)): - check_package_data(None, str('package_data'), package_data) + check_package_data(None, 'package_data', package_data) def test_check_specifier(): diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py index d0b95e09ea..c6e3bf039a 100644 --- a/setuptools/tests/test_easy_install.py +++ b/setuptools/tests/test_easy_install.py @@ -53,7 +53,7 @@ class FakeDist: def get_entry_map(self, group): if group != 'console_scripts': return {} - return {str('name'): 'ep'} + return {'name': 'ep'} def as_requirement(self): return 'spec' @@ -567,8 +567,8 @@ def test_setup_requires_honors_fetch_params(self, mock_index, monkeypatch): setup_requires, it should honor the fetch parameters (such as index-url, and find-links). """ - monkeypatch.setenv(str('PIP_RETRIES'), str('0')) - monkeypatch.setenv(str('PIP_TIMEOUT'), str('0')) + monkeypatch.setenv('PIP_RETRIES', '0') + monkeypatch.setenv('PIP_TIMEOUT', '0') monkeypatch.setenv('PIP_NO_INDEX', 'false') with contexts.quiet(): # create an sdist that has a build-time dependency. @@ -653,7 +653,7 @@ def test_setup_requires_overrides_version_conflict(self, use_setup_cfg): with contexts.quiet() as (stdout, stderr): # Don't even need to install the package, just # running the setup.py at all is sufficient - run_setup(test_setup_py, [str('--name')]) + run_setup(test_setup_py, ['--name']) lines = stdout.readlines() assert len(lines) > 0 @@ -716,7 +716,7 @@ def test_setup_requires_override_nspkg(self, use_setup_cfg): try: # Don't even need to install the package, just # running the setup.py at all is sufficient - run_setup(test_setup_py, [str('--name')]) + run_setup(test_setup_py, ['--name']) except pkg_resources.VersionConflict: self.fail( 'Installing setup.py requirements ' @@ -766,16 +766,16 @@ def make_dependency_sdist(dist_path, distname, version): ) test_setup_py = os.path.join(test_pkg, 'setup.py') with contexts.quiet() as (stdout, stderr): - run_setup(test_setup_py, [str('--version')]) + run_setup(test_setup_py, ['--version']) lines = stdout.readlines() assert len(lines) > 0 assert lines[-1].strip() == '42' def test_setup_requires_honors_pip_env(self, mock_index, monkeypatch): - monkeypatch.setenv(str('PIP_RETRIES'), str('0')) - monkeypatch.setenv(str('PIP_TIMEOUT'), str('0')) + monkeypatch.setenv('PIP_RETRIES', '0') + monkeypatch.setenv('PIP_TIMEOUT', '0') monkeypatch.setenv('PIP_NO_INDEX', 'false') - monkeypatch.setenv(str('PIP_INDEX_URL'), mock_index.url) + monkeypatch.setenv('PIP_INDEX_URL', mock_index.url) with contexts.save_pkg_resources_state(): with contexts.tempdir() as temp_dir: test_pkg = create_setup_requires_package( @@ -796,14 +796,14 @@ def test_setup_requires_honors_pip_env(self, mock_index, monkeypatch): ) test_setup_py = os.path.join(test_pkg, 'setup.py') with pytest.raises(distutils.errors.DistutilsError): - run_setup(test_setup_py, [str('--version')]) + run_setup(test_setup_py, ['--version']) assert len(mock_index.requests) == 1 assert mock_index.requests[0].path == '/python-xlib/' def test_setup_requires_with_pep508_url(self, mock_index, monkeypatch): - monkeypatch.setenv(str('PIP_RETRIES'), str('0')) - monkeypatch.setenv(str('PIP_TIMEOUT'), str('0')) - monkeypatch.setenv(str('PIP_INDEX_URL'), mock_index.url) + monkeypatch.setenv('PIP_RETRIES', '0') + monkeypatch.setenv('PIP_TIMEOUT', '0') + monkeypatch.setenv('PIP_INDEX_URL', mock_index.url) with contexts.save_pkg_resources_state(): with contexts.tempdir() as temp_dir: dep_sdist = os.path.join(temp_dir, 'dep.tar.gz') @@ -817,7 +817,7 @@ def test_setup_requires_with_pep508_url(self, mock_index, monkeypatch): setup_attrs=dict(setup_requires='dependency @ %s' % dep_url), ) test_setup_py = os.path.join(test_pkg, 'setup.py') - run_setup(test_setup_py, [str('--version')]) + run_setup(test_setup_py, ['--version']) assert len(mock_index.requests) == 0 def test_setup_requires_with_allow_hosts(self, mock_index): @@ -843,15 +843,15 @@ def test_setup_requires_with_allow_hosts(self, mock_index): path.build(files, prefix=temp_dir) setup_py = str(pathlib.Path(temp_dir, 'test_pkg', 'setup.py')) with pytest.raises(distutils.errors.DistutilsError): - run_setup(setup_py, [str('--version')]) + run_setup(setup_py, ['--version']) assert len(mock_index.requests) == 0 def test_setup_requires_with_python_requires(self, monkeypatch, tmpdir): """Check `python_requires` is honored.""" - monkeypatch.setenv(str('PIP_RETRIES'), str('0')) - monkeypatch.setenv(str('PIP_TIMEOUT'), str('0')) - monkeypatch.setenv(str('PIP_NO_INDEX'), str('1')) - monkeypatch.setenv(str('PIP_VERBOSE'), str('1')) + monkeypatch.setenv('PIP_RETRIES', '0') + monkeypatch.setenv('PIP_TIMEOUT', '0') + monkeypatch.setenv('PIP_NO_INDEX', '1') + monkeypatch.setenv('PIP_VERBOSE', '1') dep_1_0_sdist = 'dep-1.0.tar.gz' dep_1_0_url = path_to_url(str(tmpdir / dep_1_0_sdist)) dep_1_0_python_requires = '>=2.7' @@ -898,7 +898,7 @@ def test_setup_requires_with_python_requires(self, monkeypatch, tmpdir): setup_attrs=dict(setup_requires='dep', dependency_links=[index_url]), ) test_setup_py = os.path.join(test_pkg, 'setup.py') - run_setup(test_setup_py, [str('--version')]) + run_setup(test_setup_py, ['--version']) eggs = list( map(str, pkg_resources.find_distributions(os.path.join(test_pkg, '.eggs'))) ) @@ -908,8 +908,8 @@ def test_setup_requires_with_python_requires(self, monkeypatch, tmpdir): def test_setup_requires_with_find_links_in_setup_cfg( self, monkeypatch, with_dependency_links_in_setup_py ): - monkeypatch.setenv(str('PIP_RETRIES'), str('0')) - monkeypatch.setenv(str('PIP_TIMEOUT'), str('0')) + monkeypatch.setenv('PIP_RETRIES', '0') + monkeypatch.setenv('PIP_TIMEOUT', '0') with contexts.save_pkg_resources_state(): with contexts.tempdir() as temp_dir: make_trivial_sdist( @@ -946,7 +946,7 @@ def test_setup_requires_with_find_links_in_setup_cfg( find_links=temp_dir, ) ) - run_setup(test_setup_py, [str('--version')]) + run_setup(test_setup_py, ['--version']) def test_setup_requires_with_transitive_extra_dependency(self, monkeypatch): """ @@ -979,7 +979,7 @@ def test_setup_requires_with_transitive_extra_dependency(self, monkeypatch): prefix=dep_pkg, ) # "Install" dep. - run_setup(os.path.join(dep_pkg, 'setup.py'), [str('dist_info')]) + run_setup(os.path.join(dep_pkg, 'setup.py'), ['dist_info']) working_set.add_entry(dep_pkg) # Create source tree for test package. test_pkg = os.path.join(temp_dir, 'test_pkg') @@ -995,11 +995,11 @@ def test_setup_requires_with_transitive_extra_dependency(self, monkeypatch): ) ) # Check... - monkeypatch.setenv(str('PIP_FIND_LINKS'), str(temp_dir)) - monkeypatch.setenv(str('PIP_NO_INDEX'), str('1')) - monkeypatch.setenv(str('PIP_RETRIES'), str('0')) - monkeypatch.setenv(str('PIP_TIMEOUT'), str('0')) - run_setup(test_setup_py, [str('--version')]) + monkeypatch.setenv('PIP_FIND_LINKS', str(temp_dir)) + monkeypatch.setenv('PIP_NO_INDEX', '1') + monkeypatch.setenv('PIP_RETRIES', '0') + monkeypatch.setenv('PIP_TIMEOUT', '0') + run_setup(test_setup_py, ['--version']) def test_setup_requires_with_distutils_command_dep(self, monkeypatch): """ @@ -1063,7 +1063,7 @@ class epcmd(build_py): prefix=dep_pkg, ) # "Install" dep. - run_setup(os.path.join(dep_pkg, 'setup.py'), [str('dist_info')]) + run_setup(os.path.join(dep_pkg, 'setup.py'), ['dist_info']) working_set.add_entry(dep_pkg) # Create source tree for test package. test_pkg = os.path.join(temp_dir, 'test_pkg') @@ -1079,10 +1079,10 @@ class epcmd(build_py): ) ) # Check... - monkeypatch.setenv(str('PIP_FIND_LINKS'), str(temp_dir)) - monkeypatch.setenv(str('PIP_NO_INDEX'), str('1')) - monkeypatch.setenv(str('PIP_RETRIES'), str('0')) - monkeypatch.setenv(str('PIP_TIMEOUT'), str('0')) + monkeypatch.setenv('PIP_FIND_LINKS', str(temp_dir)) + monkeypatch.setenv('PIP_NO_INDEX', '1') + monkeypatch.setenv('PIP_RETRIES', '0') + monkeypatch.setenv('PIP_TIMEOUT', '0') run_setup(test_setup_py, ['epcmd']) @@ -1286,7 +1286,7 @@ def test_get_script_header_non_ascii_exe(self): actual = ei.ScriptWriter.get_header( '#!/usr/bin/python', executable=self.non_ascii_exe ) - expected = str('#!%s -x\n') % self.non_ascii_exe + expected = '#!%s -x\n' % self.non_ascii_exe assert actual == expected def test_get_script_header_exe_with_spaces(self): diff --git a/setuptools/tests/test_integration.py b/setuptools/tests/test_integration.py index e17ffc5d5c..1aa16172b5 100644 --- a/setuptools/tests/test_integration.py +++ b/setuptools/tests/test_integration.py @@ -64,7 +64,7 @@ def fin(): monkeypatch.setattr('site.USER_BASE', user_base.strpath) monkeypatch.setattr('site.USER_SITE', user_site.strpath) monkeypatch.setattr('sys.path', sys.path + [install_dir.strpath]) - monkeypatch.setenv(str('PYTHONPATH'), str(os.path.pathsep.join(sys.path))) + monkeypatch.setenv('PYTHONPATH', str(os.path.pathsep.join(sys.path))) # Set up the command for performing the installation. dist = Distribution() From 8da168860bb7aa9d6209b10d5db6d5e61feabc81 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Wed, 22 Nov 2023 01:44:35 +0100 Subject: [PATCH 0232/1761] =?UTF-8?q?os.error=20=E2=86=92=20OSError?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This is an alias for the built-in OSError exception: https://docs.python.org/3/library/os.html#os.error Fixed by running `ruff --select UP024 --fix .`: UP024 [*] Replace aliased errors with `OSError` --- pkg_resources/__init__.py | 4 ++-- setuptools/command/easy_install.py | 2 +- setuptools/glob.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py index 9adc214676..dc51076c3a 100644 --- a/pkg_resources/__init__.py +++ b/pkg_resources/__init__.py @@ -1895,7 +1895,7 @@ def _extract_resource(self, manager, zip_path): # noqa: C901 try: rename(tmpnam, real_path) - except os.error: + except OSError: if os.path.isfile(real_path): if self._is_current(real_path, zip_path): # the file became current since it was checked above, @@ -1908,7 +1908,7 @@ def _extract_resource(self, manager, zip_path): # noqa: C901 return real_path raise - except os.error: + except OSError: # report a user-friendly error manager.extraction_error() diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py index 3231650c5c..4de28bb98f 100644 --- a/setuptools/command/easy_install.py +++ b/setuptools/command/easy_install.py @@ -2023,7 +2023,7 @@ def chmod(path, mode): log.debug("changing mode of %s to %o", path, mode) try: _chmod(path, mode) - except os.error as e: + except OSError as e: log.debug("chmod failed: %s", e) diff --git a/setuptools/glob.py b/setuptools/glob.py index ac901d9253..a184c0b643 100644 --- a/setuptools/glob.py +++ b/setuptools/glob.py @@ -125,7 +125,7 @@ def _rlistdir(dirname): dirname = os.curdir try: names = os.listdir(dirname) - except os.error: + except OSError: return for x in names: yield x From a628dc5dfb83f950c8810e65afe019b3760a7c1d Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Wed, 22 Nov 2023 01:54:00 +0100 Subject: [PATCH 0233/1761] Use `capture_output` MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit In Python ≥ 3.7, `capture_output` can be used instead of `stdout=PIPE` / `stderr=PIPE`. Fixed by running `ruff --select UP022 --fix --unsafe-fixes .`: UP022 Sending `stdout` and `stderr` to `PIPE` is deprecated, use `capture_output` --- setuptools/tests/integration/helpers.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/setuptools/tests/integration/helpers.py b/setuptools/tests/integration/helpers.py index 824dfdfe1a..62076bdf7d 100644 --- a/setuptools/tests/integration/helpers.py +++ b/setuptools/tests/integration/helpers.py @@ -15,8 +15,7 @@ def run(cmd, env=None): r = subprocess.run( cmd, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, + capture_output=True, universal_newlines=True, env={**os.environ, **(env or {})}, # ^-- allow overwriting instead of discarding the current env From b5f07c9b52b3d35b72f4ab7f29ba574b60b24139 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Wed, 22 Nov 2023 01:58:24 +0100 Subject: [PATCH 0234/1761] Remove extraneous pair of prentheses Fixed by running `ruff --select UP034 --fix .`: UP034 [*] Avoid extraneous parentheses --- setuptools/command/editable_wheel.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py index 73fa9fff52..57e35d53e4 100644 --- a/setuptools/command/editable_wheel.py +++ b/setuptools/command/editable_wheel.py @@ -400,7 +400,7 @@ def __init__(self, dist: Distribution, name: str, path_entries: List[Path]): self.path_entries = path_entries def __call__(self, wheel: "WheelFile", files: List[str], mapping: Dict[str, str]): - entries = "\n".join((str(p.resolve()) for p in self.path_entries)) + entries = "\n".join(str(p.resolve()) for p in self.path_entries) contents = _encode_pth(f"{entries}\n") wheel.writestr(f"__editable__.{self.name}.pth", contents) From 86bb681be5e5e4e08b42f1bdf1bf48f1d41d8e0e Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Wed, 22 Nov 2023 02:05:57 +0100 Subject: [PATCH 0235/1761] Get rid of one last `coding: utf-8` In Python 3, the source encodign is implict, UTF-8 by default. This is a suggestion from pyupgrade: https://github.com/asottile/pyupgrade#-coding--comment Fixed by running `ruff --select UP009 --fix .`: UP009 [*] UTF-8 encoding declaration is unnecessary --- setuptools/tests/test_archive_util.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/setuptools/tests/test_archive_util.py b/setuptools/tests/test_archive_util.py index 7f9962440c..06d7f05aa0 100644 --- a/setuptools/tests/test_archive_util.py +++ b/setuptools/tests/test_archive_util.py @@ -1,5 +1,3 @@ -# coding: utf-8 - import tarfile import io From 5ba5c2ed29ab165b8677aaa66e45e5b13c4abfba Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Wed, 22 Nov 2023 02:08:47 +0100 Subject: [PATCH 0236/1761] Use byte literals instead of .encode() This is a suggestion from pyupgrade: https://github.com/asottile/pyupgrade#encode-to-bytes-literals Fixed by running `ruff --select UP012 --fix .`: UP012 [*] Unnecessary call to `encode` as UTF-8 --- setuptools/command/sdist.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setuptools/command/sdist.py b/setuptools/command/sdist.py index 95d325717a..71f695fd36 100644 --- a/setuptools/command/sdist.py +++ b/setuptools/command/sdist.py @@ -189,7 +189,7 @@ def _manifest_is_not_generated(self): with open(self.manifest, 'rb') as fp: first_line = fp.readline() - return first_line != '# file GENERATED by distutils, do NOT edit\n'.encode() + return first_line != b'# file GENERATED by distutils, do NOT edit\n' def read_manifest(self): """Read the manifest file (named by 'self.manifest') and use it to From a14299108147d551cba219908dc180e2a2b0eeb1 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Wed, 22 Nov 2023 02:11:04 +0100 Subject: [PATCH 0237/1761] Use generator instead of list Fixed by running `ruff --select UP027 --fix .`: UP027 [*] Replace unpacked list comprehension with a generator expression --- pkg_resources/tests/test_working_set.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkg_resources/tests/test_working_set.py b/pkg_resources/tests/test_working_set.py index f8e60e752a..57f62b5492 100644 --- a/pkg_resources/tests/test_working_set.py +++ b/pkg_resources/tests/test_working_set.py @@ -76,10 +76,10 @@ def parametrize_test_working_set_resolve(*test_list): requirements, expected1, expected2, - ) = [ + ) = ( strip_comments(s.lstrip()) for s in textwrap.dedent(test).lstrip().split('\n\n', 5) - ] + ) installed_dists = list(parse_distributions(installed_dists)) installable_dists = list(parse_distributions(installable_dists)) requirements = list(pkg_resources.parse_requirements(requirements)) From dc59f1882f2dcf1c28ba864ed248a97be5b4f716 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Fri, 5 Jan 2024 14:59:25 +0000 Subject: [PATCH 0238/1761] Use functools.lru_cache for 3.8 compatibility --- pkg_resources/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py index db7a38981e..d8a915a5bb 100644 --- a/pkg_resources/__init__.py +++ b/pkg_resources/__init__.py @@ -407,7 +407,7 @@ def get_provider(moduleOrReq): return _find_adapter(_provider_factories, loader)(module) -@functools.cache +@functools.lru_cache(maxsize=None) def _macos_vers(): version = platform.mac_ver()[0] # fallback for MacPorts @@ -2420,7 +2420,7 @@ def _cygwin_patch(filename): # pragma: nocover return os.path.abspath(filename) if sys.platform == 'cygwin' else filename -@functools.cache +@functools.lru_cache(maxsize=None) def _normalize_cached(filename): return normalize_path(filename) From 356ea1b3b40bfd5191a025acae467c775f01e0fe Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Fri, 5 Jan 2024 19:11:26 +0000 Subject: [PATCH 0239/1761] Remove unecessary unpacking of iterator into list --- setuptools/command/editable_wheel.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py index 76b7606ffc..39ff4c7323 100644 --- a/setuptools/command/editable_wheel.py +++ b/setuptools/command/editable_wheel.py @@ -606,7 +606,7 @@ def _simple_layout( layout = {pkg: find_package_path(pkg, package_dir, project_dir) for pkg in packages} if not layout: return set(package_dir) in ({}, {""}) - parent = os.path.commonpath([*starmap(_parent_path, layout.items())]) + parent = os.path.commonpath(starmap(_parent_path, layout.items())) return all( _path.same_path(Path(parent, *key.split('.')), value) for key, value in layout.items() From baaef281d529ea430e5a20264d916807dbb1baae Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Fri, 5 Jan 2024 19:29:26 +0000 Subject: [PATCH 0240/1761] Hide unecessary parent exception --- setuptools/build_meta.py | 4 ++-- setuptools/command/egg_info.py | 7 +++---- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/setuptools/build_meta.py b/setuptools/build_meta.py index b656f47950..24210ded93 100644 --- a/setuptools/build_meta.py +++ b/setuptools/build_meta.py @@ -117,11 +117,11 @@ def _file_with_extension(directory, extension): matching = (f for f in os.listdir(directory) if f.endswith(extension)) try: (file,) = matching - except ValueError as e: + except ValueError: raise ValueError( 'No distribution was found. Ensure that `setup.py` ' 'is not empty and that it calls `setup()`.' - ) from e + ) from None return file diff --git a/setuptools/command/egg_info.py b/setuptools/command/egg_info.py index ff1864241a..62d2feea9b 100644 --- a/setuptools/command/egg_info.py +++ b/setuptools/command/egg_info.py @@ -384,10 +384,9 @@ def process_template_line(self, line): try: process_action = action_map[action] - except KeyError as e: - raise DistutilsInternalError( - f"Invalid MANIFEST.in: unknow action {action!r} in {line!r}" - ) from e + except KeyError: + msg = f"Invalid MANIFEST.in: unknown action {action!r} in {line!r}" + raise DistutilsInternalError(msg) from None # OK, now we know that the action is valid and we have the # right number of words on the line for that action -- so we From 2975a39609a6341d7cecc5018af26ccf99529e77 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Fri, 5 Jan 2024 19:45:51 +0000 Subject: [PATCH 0241/1761] Promote a cleaner error message if a function was never patched --- setuptools/monkey.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setuptools/monkey.py b/setuptools/monkey.py index f3148e6832..3d5edb6bd2 100644 --- a/setuptools/monkey.py +++ b/setuptools/monkey.py @@ -130,7 +130,7 @@ def patch_func(replacement, target_mod, func_name): def get_unpatched_function(candidate): - return getattr(candidate, 'unpatched', None) + return candidate.unpatched def patch_for_msvc_specialized_compiler(): From 07774d965eccd43bc2d46d24c7264276ca2bba3b Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Fri, 5 Jan 2024 19:55:10 +0000 Subject: [PATCH 0242/1761] Ignore path that are automatically generated --- ruff.toml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/ruff.toml b/ruff.toml index 791cfdc0a5..89c2910997 100644 --- a/ruff.toml +++ b/ruff.toml @@ -29,12 +29,14 @@ extend-ignore = [ extend-exclude = [ "**/_vendor", "setuptools/_distutils", + "setuptools/config/_validate_pyproject", ] [format] extend-exclude = [ "**/_vendor", "setuptools/_distutils", + "setuptools/config/_validate_pyproject", ] # https://docs.astral.sh/ruff/settings/#format-quote-style quote-style = "preserve" From eec302354da892ef140eb444903d215d74452fde Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Fri, 5 Jan 2024 21:40:19 +0100 Subject: [PATCH 0243/1761] =?UTF-8?q?ruff:=20extend-exclude=20=E2=86=92=20?= =?UTF-8?q?exclude?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit There is no such thing as extend-include, at least in ruff 0.1.11: ruff failed Cause: Failed to parse /path/to/ruff.toml Cause: TOML parse error at line 1, column 1 | 1 | [lint] | ^^^^^^ unknown field `extend-exclude` --- ruff.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ruff.toml b/ruff.toml index 89c2910997..18776ab60a 100644 --- a/ruff.toml +++ b/ruff.toml @@ -26,14 +26,14 @@ extend-ignore = [ "UP032", # temporarily disabled "UP036", # temporarily disabled ] -extend-exclude = [ +exclude = [ "**/_vendor", "setuptools/_distutils", "setuptools/config/_validate_pyproject", ] [format] -extend-exclude = [ +exclude = [ "**/_vendor", "setuptools/_distutils", "setuptools/config/_validate_pyproject", From ff32ae0b43340341719b6b1b0ff15b7598a8644f Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 6 Jan 2024 16:57:08 -0500 Subject: [PATCH 0244/1761] Copy 'missing_compiler_executable from Python 3.12 and customize it for compatibility with distutils. --- distutils/tests/__init__.py | 32 ++++++++++++++++++++++++++++++ distutils/tests/test_build_clib.py | 4 +--- distutils/tests/test_build_ext.py | 5 +++-- distutils/tests/test_config_cmd.py | 3 +-- distutils/tests/test_install.py | 5 ++--- 5 files changed, 39 insertions(+), 10 deletions(-) diff --git a/distutils/tests/__init__.py b/distutils/tests/__init__.py index 27e73393a0..fdec5a9650 100644 --- a/distutils/tests/__init__.py +++ b/distutils/tests/__init__.py @@ -6,3 +6,35 @@ distutils.command.tests package, since command identification is done by import rather than matching pre-defined names. """ + +def missing_compiler_executable(cmd_names=[]): + """Check if the compiler components used to build the interpreter exist. + + Check for the existence of the compiler executables whose names are listed + in 'cmd_names' or all the compiler executables when 'cmd_names' is empty + and return the first missing executable or None when none is found + missing. + + """ + from distutils import ccompiler, sysconfig, spawn + from distutils import errors + + compiler = ccompiler.new_compiler() + sysconfig.customize_compiler(compiler) + if compiler.compiler_type == "msvc": + # MSVC has no executables, so check whether initialization succeeds + try: + compiler.initialize() + except errors.PlatformError: + return "msvc" + for name in compiler.executables: + if cmd_names and name not in cmd_names: + continue + cmd = getattr(compiler, name) + if cmd_names: + assert cmd is not None, \ + "the '%s' executable is not configured" % name + elif not cmd: + continue + if spawn.find_executable(cmd[0]) is None: + return cmd[0] diff --git a/distutils/tests/test_build_clib.py b/distutils/tests/test_build_clib.py index b5a392a85f..98ab0b171f 100644 --- a/distutils/tests/test_build_clib.py +++ b/distutils/tests/test_build_clib.py @@ -1,13 +1,11 @@ """Tests for distutils.command.build_clib.""" import os -from test.support import missing_compiler_executable - import pytest from distutils.command.build_clib import build_clib from distutils.errors import DistutilsSetupError -from distutils.tests import support +from distutils.tests import support, missing_compiler_executable class TestBuildCLib(support.TempdirManager): diff --git a/distutils/tests/test_build_ext.py b/distutils/tests/test_build_ext.py index cb61ad7455..3c83cca4d2 100644 --- a/distutils/tests/test_build_ext.py +++ b/distutils/tests/test_build_ext.py @@ -16,6 +16,7 @@ from distutils.core import Distribution from distutils.command.build_ext import build_ext from distutils import sysconfig +from distutils.tests import missing_compiler_executable from distutils.tests.support import ( TempdirManager, copy_xxmodule_c, @@ -89,7 +90,7 @@ def build_ext(self, *args, **kwargs): return build_ext(*args, **kwargs) def test_build_ext(self): - cmd = support.missing_compiler_executable() + missing_compiler_executable() copy_xxmodule_c(self.tmp_dir) xx_c = os.path.join(self.tmp_dir, 'xxmodule.c') xx_ext = Extension('xx', [xx_c]) @@ -359,7 +360,7 @@ def test_compiler_option(self): assert cmd.compiler == 'unix' def test_get_outputs(self): - cmd = support.missing_compiler_executable() + missing_compiler_executable() tmp_dir = self.mkdtemp() c_file = os.path.join(tmp_dir, 'foo.c') self.write_file(c_file, 'void PyInit_foo(void) {}\n') diff --git a/distutils/tests/test_config_cmd.py b/distutils/tests/test_config_cmd.py index e72a7c5ff8..ecb8510246 100644 --- a/distutils/tests/test_config_cmd.py +++ b/distutils/tests/test_config_cmd.py @@ -1,12 +1,11 @@ """Tests for distutils.command.config.""" import os import sys -from test.support import missing_compiler_executable import pytest from distutils.command.config import dump_file, config -from distutils.tests import support +from distutils.tests import support, missing_compiler_executable from distutils._log import log diff --git a/distutils/tests/test_install.py b/distutils/tests/test_install.py index 3f525db42a..082ee1d349 100644 --- a/distutils/tests/test_install.py +++ b/distutils/tests/test_install.py @@ -17,8 +17,7 @@ from distutils.errors import DistutilsOptionError from distutils.extension import Extension -from distutils.tests import support -from test import support as test_support +from distutils.tests import support, missing_compiler_executable def _make_ext_name(modname): @@ -213,7 +212,7 @@ def test_record(self): assert found == expected def test_record_extensions(self): - cmd = test_support.missing_compiler_executable() + cmd = missing_compiler_executable() if cmd is not None: pytest.skip('The %r command is not found' % cmd) install_dir = self.mkdtemp() From 5b6638da22121aa215fa5b762379ff4a4d98d09a Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 6 Jan 2024 20:09:59 -0500 Subject: [PATCH 0245/1761] Remove build and dist from excludes. It appears they are not needed and their presence blocks the names of packages like 'builder' and 'distutils'. Ref pypa/distutils#224. --- setup.cfg | 2 -- 1 file changed, 2 deletions(-) diff --git a/setup.cfg b/setup.cfg index 574ffc28e6..68c38ac901 100644 --- a/setup.cfg +++ b/setup.cfg @@ -20,8 +20,6 @@ install_requires = [options.packages.find] exclude = - build* - dist* docs* tests* From dbcb0747110d074112f27e2699856acfc4ba8ea3 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 6 Jan 2024 20:09:59 -0500 Subject: [PATCH 0246/1761] Remove build and dist from excludes. It appears they are not needed and their presence blocks the names of packages like 'builder' and 'distutils'. Ref pypa/distutils#224. --- setup.cfg | 2 -- 1 file changed, 2 deletions(-) diff --git a/setup.cfg b/setup.cfg index 1d2729beb2..c2e82875e6 100644 --- a/setup.cfg +++ b/setup.cfg @@ -20,8 +20,6 @@ install_requires = [options.packages.find] exclude = - build* - dist* docs* tests* From 0148d7dcd08077e5fb849edc9b8235240a6e6771 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 6 Jan 2024 20:21:58 -0500 Subject: [PATCH 0247/1761] Mark this function as uncovered. --- distutils/tests/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/distutils/tests/__init__.py b/distutils/tests/__init__.py index fdec5a9650..85293cbb5b 100644 --- a/distutils/tests/__init__.py +++ b/distutils/tests/__init__.py @@ -7,7 +7,7 @@ by import rather than matching pre-defined names. """ -def missing_compiler_executable(cmd_names=[]): +def missing_compiler_executable(cmd_names=[]): # pragma: no cover """Check if the compiler components used to build the interpreter exist. Check for the existence of the compiler executables whose names are listed From 107eff1920a39ab46be57bced32fb1eb23aa5797 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 6 Jan 2024 20:27:59 -0500 Subject: [PATCH 0248/1761] Also disable the check --- .github/workflows/main.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 7b9cc6927b..213558aac4 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -159,7 +159,8 @@ jobs: needs: - test - collateral - - test_cygwin + # disabled due to disabled job + # - test_cygwin runs-on: ubuntu-latest From c5a16ac3f66c1281354e9d23556905417250c019 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 6 Jan 2024 21:00:22 -0500 Subject: [PATCH 0249/1761] Remove pin on inflect as it's insufficient to avoid the Rust dependency. --- setup.cfg | 3 --- 1 file changed, 3 deletions(-) diff --git a/setup.cfg b/setup.cfg index ff2aade085..68c38ac901 100644 --- a/setup.cfg +++ b/setup.cfg @@ -45,9 +45,6 @@ testing = docutils pyfakefs more_itertools - # workaround for lack of Rust support: pypa/setuptools#3921 - inflect<6.0.0; sys.platform=="cygwin" - docs = # upstream From d27890573088a6a0292139c5e30466debd7dc1dd Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 7 Jan 2024 12:26:16 -0500 Subject: [PATCH 0250/1761] Exclude docs and tests directories properly per Setuptools behavior. --- setup.cfg | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/setup.cfg b/setup.cfg index c2e82875e6..c5aa1af9a1 100644 --- a/setup.cfg +++ b/setup.cfg @@ -20,8 +20,11 @@ install_requires = [options.packages.find] exclude = - docs* - tests* + # duplicate exclusions for pypa/setuptools#2688 + docs + docs.* + tests + tests.* [options.extras_require] testing = From 63535c6efd3516a7ef35c862c24ef5b6d43c8494 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 7 Jan 2024 12:49:05 -0500 Subject: [PATCH 0251/1761] Rely on default discovery for good heuristics for finding packages. --- setup.cfg | 9 --------- 1 file changed, 9 deletions(-) diff --git a/setup.cfg b/setup.cfg index c5aa1af9a1..fe99eaf6e5 100644 --- a/setup.cfg +++ b/setup.cfg @@ -13,19 +13,10 @@ classifiers = Programming Language :: Python :: 3 :: Only [options] -packages = find_namespace: include_package_data = true python_requires = >=3.8 install_requires = -[options.packages.find] -exclude = - # duplicate exclusions for pypa/setuptools#2688 - docs - docs.* - tests - tests.* - [options.extras_require] testing = # upstream From 23389e6d556e5588dd6cb7657e2b863ecd85d9fa Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Mon, 8 Jan 2024 14:58:08 +0000 Subject: [PATCH 0252/1761] Remove unecessary 'getattr' for tokenize.open --- setuptools/build_meta.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setuptools/build_meta.py b/setuptools/build_meta.py index 62954b3b77..0a0abfdae0 100644 --- a/setuptools/build_meta.py +++ b/setuptools/build_meta.py @@ -130,7 +130,7 @@ def _open_setup_script(setup_script): # Supply a default setup.py return io.StringIO("from setuptools import setup; setup()") - return getattr(tokenize, 'open', open)(setup_script) + return tokenize.open(setup_script) @contextlib.contextmanager From 99cfb83f327eb31620f285c4f43d1e550211e519 Mon Sep 17 00:00:00 2001 From: Avasam Date: Mon, 8 Jan 2024 18:37:41 -0500 Subject: [PATCH 0253/1761] post-merge fix --- pkg_resources/tests/test_pkg_resources.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pkg_resources/tests/test_pkg_resources.py b/pkg_resources/tests/test_pkg_resources.py index c667215c37..0883642080 100644 --- a/pkg_resources/tests/test_pkg_resources.py +++ b/pkg_resources/tests/test_pkg_resources.py @@ -4,6 +4,7 @@ import os import zipfile import datetime +import plistlib import subprocess import stat import distutils.dist From 02dace2de5ed08f5e8d2818ffe5d90a97cd8e657 Mon Sep 17 00:00:00 2001 From: Eli Schwartz Date: Thu, 30 Nov 2023 17:01:47 -0500 Subject: [PATCH 0254/1761] Use the stdlib toml library on sufficiently new python Although tomli is vendored and always available, it is more clean to avoid using the backport on python 3.11. This makes it easier to automatically drop outdated branches in the future, and is a micro-optimization when user code imports a toml library in the same process space. Additionally, setuptools.extern.VendorImporter explicitly supports removing vendored modules as long as they are globally installed and available. For distributors that rely on this, importing tomllib first permits those distributors to avoid packaging tomli on versions of python that already have tomllib. --- setuptools/config/pyprojecttoml.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/setuptools/config/pyprojecttoml.py b/setuptools/config/pyprojecttoml.py index 52040be49a..2eae6a725d 100644 --- a/setuptools/config/pyprojecttoml.py +++ b/setuptools/config/pyprojecttoml.py @@ -11,6 +11,7 @@ import logging import os +import sys from contextlib import contextmanager from functools import partial from typing import TYPE_CHECKING, Callable, Dict, Mapping, Optional, Set, Union @@ -29,10 +30,13 @@ def load_file(filepath: _Path) -> dict: - from setuptools.extern import tomli # type: ignore + if sys.version_info >= (3, 11): + import tomllib + else: # pragma: no cover + from setuptools.extern import tomli as tomllib with open(filepath, "rb") as file: - return tomli.load(file) + return tomllib.load(file) def validate(config: dict, filepath: _Path) -> bool: From efdd01233792a6f3166a8994307c2ae8c6f5147d Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Tue, 9 Jan 2024 12:38:54 +0000 Subject: [PATCH 0255/1761] Move tomllib import to py311compat This approach follows pre-existing practices in the setuptools repository. It makes it easier in the future to recognise code that can be updated and fallbacks that can be removed. --- setuptools/config/pyprojecttoml.py | 6 +----- setuptools/py311compat.py | 7 +++++++ 2 files changed, 8 insertions(+), 5 deletions(-) create mode 100644 setuptools/py311compat.py diff --git a/setuptools/config/pyprojecttoml.py b/setuptools/config/pyprojecttoml.py index 2eae6a725d..321e106e40 100644 --- a/setuptools/config/pyprojecttoml.py +++ b/setuptools/config/pyprojecttoml.py @@ -11,7 +11,6 @@ import logging import os -import sys from contextlib import contextmanager from functools import partial from typing import TYPE_CHECKING, Callable, Dict, Mapping, Optional, Set, Union @@ -30,10 +29,7 @@ def load_file(filepath: _Path) -> dict: - if sys.version_info >= (3, 11): - import tomllib - else: # pragma: no cover - from setuptools.extern import tomli as tomllib + from ..py311compat import tomllib with open(filepath, "rb") as file: return tomllib.load(file) diff --git a/setuptools/py311compat.py b/setuptools/py311compat.py new file mode 100644 index 0000000000..9231cbb290 --- /dev/null +++ b/setuptools/py311compat.py @@ -0,0 +1,7 @@ +import sys + + +if sys.version_info >= (3, 11): + import tomllib +else: # pragma: no cover + from setuptools.extern import tomli as tomllib From 2ad8784dfeb816829995613fb5fd9818f3e88922 Mon Sep 17 00:00:00 2001 From: Naveen M K Date: Tue, 4 Oct 2022 13:49:49 +0530 Subject: [PATCH 0256/1761] Add support for building extensions using MinGW compilers --- distutils/ccompiler.py | 6 +++++- distutils/command/build_ext.py | 8 ++++---- distutils/cygwinccompiler.py | 4 ++-- distutils/sysconfig.py | 9 ++++++--- distutils/util.py | 9 +++++++++ 5 files changed, 26 insertions(+), 10 deletions(-) diff --git a/distutils/ccompiler.py b/distutils/ccompiler.py index c1c7d5476e..dba2e61551 100644 --- a/distutils/ccompiler.py +++ b/distutils/ccompiler.py @@ -19,7 +19,7 @@ from .file_util import move_file from .dir_util import mkpath from ._modified import newer_group -from .util import split_quoted, execute +from .util import split_quoted, execute, is_mingw from ._log import log @@ -1076,6 +1076,10 @@ def get_default_compiler(osname=None, platform=None): osname = os.name if platform is None: platform = sys.platform + # Mingw is a special case where sys.platform is 'win32' but we + # want to use the 'mingw32' compiler, so check it first + if is_mingw(): + return 'mingw32' for pattern, compiler in _default_compilers: if ( re.match(pattern, platform) is not None diff --git a/distutils/command/build_ext.py b/distutils/command/build_ext.py index b48f462626..4a69e9c113 100644 --- a/distutils/command/build_ext.py +++ b/distutils/command/build_ext.py @@ -21,7 +21,7 @@ from ..sysconfig import get_config_h_filename from .._modified import newer_group from ..extension import Extension -from ..util import get_platform +from ..util import get_platform, is_mingw from distutils._log import log from . import py37compat @@ -189,7 +189,7 @@ def finalize_options(self): # noqa: C901 # for extensions under windows use different directories # for Release and Debug builds. # also Python's library directory must be appended to library_dirs - if os.name == 'nt': + if os.name == 'nt' and not is_mingw(): # the 'libs' directory is for binary installs - we assume that # must be the *native* platform. But we don't really support # cross-compiling via a binary install anyway, so we let it go. @@ -742,7 +742,7 @@ def get_libraries(self, ext): # noqa: C901 # pyconfig.h that MSVC groks. The other Windows compilers all seem # to need it mentioned explicitly, though, so that's what we do. # Append '_d' to the python import library on debug builds. - if sys.platform == "win32": + if sys.platform == "win32" and not is_mingw(): from .._msvccompiler import MSVCCompiler if not isinstance(self.compiler, MSVCCompiler): @@ -772,7 +772,7 @@ def get_libraries(self, ext): # noqa: C901 # A native build on an Android device or on Cygwin if hasattr(sys, 'getandroidapilevel'): link_libpython = True - elif sys.platform == 'cygwin': + elif sys.platform == 'cygwin' or is_mingw(): link_libpython = True elif '_PYTHON_HOST_PLATFORM' in os.environ: # We are cross-compiling for one of the relevant platforms diff --git a/distutils/cygwinccompiler.py b/distutils/cygwinccompiler.py index 47efa377c5..7ed169f330 100644 --- a/distutils/cygwinccompiler.py +++ b/distutils/cygwinccompiler.py @@ -57,7 +57,7 @@ def get_msvcr(): try: msc_ver = int(match.group(1)) except AttributeError: - return + return [] try: return _msvcr_lookup[msc_ver] except KeyError: @@ -277,7 +277,7 @@ def __init__(self, verbose=0, dry_run=0, force=0): self.set_executables( compiler='%s -O -Wall' % self.cc, - compiler_so='%s -mdll -O -Wall' % self.cc, + compiler_so='%s -shared -O -Wall' % self.cc, compiler_cxx='%s -O -Wall' % self.cxx, linker_exe='%s' % self.cc, linker_so='{} {}'.format(self.linker_dll, shared_option), diff --git a/distutils/sysconfig.py b/distutils/sysconfig.py index a40a7231b3..166d85437a 100644 --- a/distutils/sysconfig.py +++ b/distutils/sysconfig.py @@ -18,6 +18,7 @@ from .errors import DistutilsPlatformError from . import py39compat from ._functools import pass_none +from .util import is_mingw IS_PYPY = '__pypy__' in sys.builtin_module_names @@ -120,8 +121,10 @@ def get_python_inc(plat_specific=0, prefix=None): """ default_prefix = BASE_EXEC_PREFIX if plat_specific else BASE_PREFIX resolved_prefix = prefix if prefix is not None else default_prefix + # MinGW imitates posix like layout, but os.name != posix + os_name = "posix" if is_mingw() else os.name try: - getter = globals()[f'_get_python_inc_{os.name}'] + getter = globals()[f'_get_python_inc_{os_name}'] except KeyError: raise DistutilsPlatformError( "I don't know where Python installs its C header files " @@ -244,7 +247,7 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None): else: prefix = plat_specific and EXEC_PREFIX or PREFIX - if os.name == "posix": + if os.name == "posix" or is_mingw(): if plat_specific or standard_lib: # Platform-specific modules (any module from a non-pure-Python # module distribution) or standard Python library modules. @@ -273,7 +276,7 @@ def customize_compiler(compiler): # noqa: C901 Mainly needed on Unix, so we can plug in the information that varies across Unices and is stored in Python's Makefile. """ - if compiler.compiler_type == "unix": + if compiler.compiler_type in ["unix", "cygwin", "mingw32"]: if sys.platform == "darwin": # Perform first-time customization of compiler-related # config vars on OS X now that we know we need a compiler. diff --git a/distutils/util.py b/distutils/util.py index 5408b16032..bec979d7da 100644 --- a/distutils/util.py +++ b/distutils/util.py @@ -517,3 +517,12 @@ def rfc822_escape(header): suffix = indent if ends_in_newline else "" return indent.join(lines) + suffix + + +def is_mingw(): + """Returns True if the current platform is mingw. + + Python compiled with Mingw-w64 has sys.platform == 'win32' and + get_platform() starts with 'mingw'. + """ + return sys.platform == 'win32' and get_platform().startswith('mingw') From e69a19e59768b08a4a29bb8be09c09974d625ff5 Mon Sep 17 00:00:00 2001 From: Naveen M K Date: Sat, 8 Oct 2022 11:26:48 +0530 Subject: [PATCH 0257/1761] Fix tests for `get_msvcr` function --- distutils/tests/test_cygwinccompiler.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/distutils/tests/test_cygwinccompiler.py b/distutils/tests/test_cygwinccompiler.py index 6fb449a6c2..3cb95e1231 100644 --- a/distutils/tests/test_cygwinccompiler.py +++ b/distutils/tests/test_cygwinccompiler.py @@ -71,12 +71,12 @@ def test_check_config_h(self): assert check_config_h()[0] == CONFIG_H_OK def test_get_msvcr(self): - # none + # [] sys.version = ( '2.6.1 (r261:67515, Dec 6 2008, 16:42:21) ' '\n[GCC 4.0.1 (Apple Computer, Inc. build 5370)]' ) - assert get_msvcr() is None + assert get_msvcr() == [] # MSVC 7.0 sys.version = ( From 571f761ab313aa0e8171da5956c090cb7d417764 Mon Sep 17 00:00:00 2001 From: Naveen M K Date: Thu, 17 Nov 2022 13:25:32 +0530 Subject: [PATCH 0258/1761] Make `test_customize_compiler` run on mingw Simply, run it for the subclasses for `UnixCCompiler` --- distutils/tests/test_sysconfig.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/distutils/tests/test_sysconfig.py b/distutils/tests/test_sysconfig.py index bfeaf9a6b9..c7af690bcc 100644 --- a/distutils/tests/test_sysconfig.py +++ b/distutils/tests/test_sysconfig.py @@ -12,7 +12,7 @@ import distutils from distutils import sysconfig -from distutils.ccompiler import get_default_compiler # noqa: F401 +from distutils.ccompiler import new_compiler # noqa: F401 from distutils.unixccompiler import UnixCCompiler from test.support import swap_item @@ -109,7 +109,7 @@ def set_executables(self, **kw): return comp - @pytest.mark.skipif("get_default_compiler() != 'unix'") + @pytest.mark.skipif("not isinstance(new_compiler(), UnixCCompiler)") def test_customize_compiler(self): # Make sure that sysconfig._config_vars is initialized sysconfig.get_config_vars() From 9e707f07e9794a209ee49f23c3a8890b57c5e9e7 Mon Sep 17 00:00:00 2001 From: Naveen M K Date: Thu, 17 Nov 2022 13:54:18 +0530 Subject: [PATCH 0259/1761] CI: add msys2 mingw test --- .github/workflows/main.yml | 39 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 45c66794f0..402363d636 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -115,6 +115,45 @@ jobs: shell: C:\cygwin\bin\env.exe CYGWIN_NOWINPATH=1 CHERE_INVOKING=1 C:\cygwin\bin\bash.exe -leo pipefail -o igncr {0} run: tox + test_msys2_mingw: + strategy: + matrix: + include: + - { sys: mingw64, env: x86_64 } + - { sys: mingw32, env: i686 } + - { sys: ucrt64, env: ucrt-x86_64 } + - { sys: clang64, env: clang-x86_64 } + runs-on: windows-latest + steps: + - uses: actions/checkout@v4 + - uses: msys2/setup-msys2@v2 + with: + msystem: ${{matrix.sys}} + install: | + mingw-w64-${{matrix.env}}-toolchain + mingw-w64-${{matrix.env}}-python + mingw-w64-${{matrix.env}}-python-pip + mingw-w64-${{matrix.env}}-python-virtualenv + mingw-w64-${{matrix.env}}-cc + git + - name: Install Dependencies + shell: msys2 {0} + run: | + export VIRTUALENV_NO_SETUPTOOLS=1 + + python -m virtualenv venv + source venv/bin/activate + + # python-ruff doesn't work without rust + sed -i '/pytest-ruff/d' setup.cfg + + pip install -e .[testing] + - name: Run tests + shell: msys2 {0} + run: | + source venv/bin/activate + pytest distutils/tests + ci_setuptools: # Integration testing with setuptools if: ${{ false }} # disabled for deprecation warnings From 36a2941e558126364900a6fd4ad0ab3c0d21a95b Mon Sep 17 00:00:00 2001 From: Naveen M K Date: Wed, 8 Nov 2023 20:30:29 +0530 Subject: [PATCH 0260/1761] Fix path separator issue in change_root function use `os.sep` instead of hardcoding `\\` also, fix appropriate tests --- distutils/tests/test_util.py | 1 + distutils/util.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/distutils/tests/test_util.py b/distutils/tests/test_util.py index 22a003d8ca..2b340a3b27 100644 --- a/distutils/tests/test_util.py +++ b/distutils/tests/test_util.py @@ -109,6 +109,7 @@ def _join(*path): # windows os.name = 'nt' + os.sep = '\\' def _isabs(path): return path.startswith('c:\\') diff --git a/distutils/util.py b/distutils/util.py index bec979d7da..b291ba3ed7 100644 --- a/distutils/util.py +++ b/distutils/util.py @@ -165,7 +165,7 @@ def change_root(new_root, pathname): elif os.name == 'nt': (drive, path) = os.path.splitdrive(pathname) - if path[0] == '\\': + if path[0] == os.sep: path = path[1:] return os.path.join(new_root, path) From c9770264f7af6f08d98bbd6928a921d6b14eb198 Mon Sep 17 00:00:00 2001 From: Naveen M K Date: Wed, 8 Nov 2023 20:39:37 +0530 Subject: [PATCH 0261/1761] test_install: fix an issue specific to mingw --- distutils/tests/test_install.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/distutils/tests/test_install.py b/distutils/tests/test_install.py index 082ee1d349..c74bf8cb21 100644 --- a/distutils/tests/test_install.py +++ b/distutils/tests/test_install.py @@ -16,6 +16,7 @@ from distutils.core import Distribution from distutils.errors import DistutilsOptionError from distutils.extension import Extension +from distutils.util import is_mingw from distutils.tests import support, missing_compiler_executable @@ -120,7 +121,7 @@ def _expanduser(path): assert 'usersite' in cmd.config_vars actual_headers = os.path.relpath(cmd.install_headers, site.USER_BASE) - if os.name == 'nt': + if os.name == 'nt' and not is_mingw(): site_path = os.path.relpath(os.path.dirname(orig_site), orig_base) include = os.path.join(site_path, 'Include') else: From 79830a81664feee1a9ae4d031909d261e363320e Mon Sep 17 00:00:00 2001 From: Naveen M K Date: Wed, 8 Nov 2023 23:23:42 +0530 Subject: [PATCH 0262/1761] Remove testing dependency on jaraco.text it depends on pydantic-core which requires rust to work also, takes a few minutes to build. --- distutils/tests/test_sysconfig.py | 6 +++++- setup.cfg | 1 - 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/distutils/tests/test_sysconfig.py b/distutils/tests/test_sysconfig.py index c7af690bcc..c4e1648fab 100644 --- a/distutils/tests/test_sysconfig.py +++ b/distutils/tests/test_sysconfig.py @@ -8,7 +8,7 @@ import pytest import jaraco.envs import path -from jaraco.text import trim +from textwrap import dedent import distutils from distutils import sysconfig @@ -19,6 +19,10 @@ from . import py37compat +def trim(s): + return dedent(s).strip() + + @pytest.mark.usefixtures('save_env') class TestSysconfig: def test_get_config_h_filename(self): diff --git a/setup.cfg b/setup.cfg index 68c38ac901..7a748f49b5 100644 --- a/setup.cfg +++ b/setup.cfg @@ -40,7 +40,6 @@ testing = pytest >= 7.4.3 #186 jaraco.envs>=2.4 jaraco.path - jaraco.text path docutils pyfakefs From e4e16870510edbe59f834693ff62857ddd14481d Mon Sep 17 00:00:00 2001 From: Naveen M K Date: Thu, 9 Nov 2023 18:48:16 +0530 Subject: [PATCH 0263/1761] Add test for dll_libraries attribute in CygwinCCompiler class --- distutils/tests/test_cygwinccompiler.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/distutils/tests/test_cygwinccompiler.py b/distutils/tests/test_cygwinccompiler.py index 3cb95e1231..ffbaf1ea4d 100644 --- a/distutils/tests/test_cygwinccompiler.py +++ b/distutils/tests/test_cygwinccompiler.py @@ -114,3 +114,10 @@ def test_get_msvcr(self): ) with pytest.raises(ValueError): get_msvcr() + + @pytest.mark.skipif('sys.platform != "cygwin"') + def test_dll_libraries_not_none(self): + from distutils.cygwinccompiler import CygwinCCompiler + + compiler = CygwinCCompiler() + assert compiler.dll_libraries is not None From bbe7b64f4eedbeee9d71e197dd4b5d3175f4d04a Mon Sep 17 00:00:00 2001 From: Naveen M K Date: Thu, 9 Nov 2023 18:48:50 +0530 Subject: [PATCH 0264/1761] Add some tests for Mingw32CCompiler class --- distutils/tests/test_mingwccompiler.py | 45 ++++++++++++++++++++++++++ 1 file changed, 45 insertions(+) create mode 100644 distutils/tests/test_mingwccompiler.py diff --git a/distutils/tests/test_mingwccompiler.py b/distutils/tests/test_mingwccompiler.py new file mode 100644 index 0000000000..d81360e782 --- /dev/null +++ b/distutils/tests/test_mingwccompiler.py @@ -0,0 +1,45 @@ +import pytest + +from distutils.util import split_quoted, is_mingw +from distutils.errors import DistutilsPlatformError, CCompilerError + + +class TestMingw32CCompiler: + @pytest.mark.skipif(not is_mingw(), reason='not on mingw') + def test_compiler_type(self): + from distutils.cygwinccompiler import Mingw32CCompiler + + compiler = Mingw32CCompiler() + assert compiler.compiler_type == 'mingw32' + + @pytest.mark.skipif(not is_mingw(), reason='not on mingw') + def test_set_executables(self, monkeypatch): + from distutils.cygwinccompiler import Mingw32CCompiler + + monkeypatch.setenv('CC', 'cc') + monkeypatch.setenv('CXX', 'c++') + + compiler = Mingw32CCompiler() + + assert compiler.compiler == split_quoted('cc -O -Wall') + assert compiler.compiler_so == split_quoted('cc -shared -O -Wall') + assert compiler.compiler_cxx == split_quoted('c++ -O -Wall') + assert compiler.linker_exe == split_quoted('cc') + assert compiler.linker_so == split_quoted('cc -shared') + + @pytest.mark.skipif(not is_mingw(), reason='not on mingw') + def test_runtime_library_dir_option(self): + from distutils.cygwinccompiler import Mingw32CCompiler + + compiler = Mingw32CCompiler() + with pytest.raises(DistutilsPlatformError): + compiler.runtime_library_dir_option('/usr/lib') + + @pytest.mark.skipif(not is_mingw(), reason='not on mingw') + def test_cygwincc_error(self, monkeypatch): + import distutils.cygwinccompiler + + monkeypatch.setattr(distutils.cygwinccompiler, 'is_cygwincc', lambda _: True) + + with pytest.raises(CCompilerError): + distutils.cygwinccompiler.Mingw32CCompiler() From 97639640873788726339d407dc7f6881d2a5804b Mon Sep 17 00:00:00 2001 From: Avasam Date: Wed, 10 Jan 2024 22:27:48 -0500 Subject: [PATCH 0265/1761] Fix "type information included by default"'s added version --- docs/userguide/miscellaneous.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/userguide/miscellaneous.rst b/docs/userguide/miscellaneous.rst index ea0a58845e..b175391b25 100644 --- a/docs/userguide/miscellaneous.rst +++ b/docs/userguide/miscellaneous.rst @@ -11,7 +11,7 @@ headers) listed as part of extensions when creating a :term:`source distribution (or "sdist")`. .. note:: - .. versionadded:: v68.3.0 + .. versionadded:: v69.0.0 ``setuptools`` will attempt to include type information files by default in the distribution (``.pyi`` and ``py.typed``, as specified in :pep:`561`). From e2b977c90760ce5ca0efc17febfe2bda4c0242ae Mon Sep 17 00:00:00 2001 From: Avasam Date: Wed, 10 Jan 2024 22:34:07 -0500 Subject: [PATCH 0266/1761] Create 4182.doc.rst --- newsfragments/4182.doc.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 newsfragments/4182.doc.rst diff --git a/newsfragments/4182.doc.rst b/newsfragments/4182.doc.rst new file mode 100644 index 0000000000..53f63138f6 --- /dev/null +++ b/newsfragments/4182.doc.rst @@ -0,0 +1 @@ +Changed ``versionadded`` for "Type information included by default" feature from ``v68.3.0`` to ``v69.0.0`` -- by :user:Avasam` From 2707fbd418deec4248ff32f82894d59d96c09712 Mon Sep 17 00:00:00 2001 From: Tim Paine <3105306+timkpaine@users.noreply.github.com> Date: Sun, 14 Jan 2024 09:19:32 -0500 Subject: [PATCH 0267/1761] Default optional-dependencies to dict if missing in pyproject, fixes #3801 --- setuptools/config/_apply_pyprojecttoml.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setuptools/config/_apply_pyprojecttoml.py b/setuptools/config/_apply_pyprojecttoml.py index b562f91759..4aec5f1377 100644 --- a/setuptools/config/_apply_pyprojecttoml.py +++ b/setuptools/config/_apply_pyprojecttoml.py @@ -409,7 +409,7 @@ def _acessor(obj): "scripts": {}, "gui-scripts": {}, "dependencies": [], - "optional-dependencies": [], + "optional-dependencies": {}, } From ca45ba2741017d5789ec028237d4ed75473498be Mon Sep 17 00:00:00 2001 From: Karolina Surma Date: Mon, 22 Jan 2024 15:24:33 +0100 Subject: [PATCH 0268/1761] Point to a live documentation --- setuptools/config/_apply_pyprojecttoml.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setuptools/config/_apply_pyprojecttoml.py b/setuptools/config/_apply_pyprojecttoml.py index 4aec5f1377..4261f3e218 100644 --- a/setuptools/config/_apply_pyprojecttoml.py +++ b/setuptools/config/_apply_pyprojecttoml.py @@ -424,7 +424,7 @@ class _MissingDynamic(SetuptoolsWarning): According to the spec (see the link below), however, setuptools CANNOT consider this value unless `{field}` is listed as `dynamic`. - https://packaging.python.org/en/latest/specifications/declaring-project-metadata/ + https://packaging.python.org/en/latest/specifications/pyproject-toml/#declaring-project-metadata-the-project-table To prevent this problem, you can list `{field}` under `dynamic` or alternatively remove the `[project]` table from your file and rely entirely on other means of From 97b53f9f12b52b226c7d64d289d7d3d4d912b637 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Tue, 23 Jan 2024 22:06:55 +0100 Subject: [PATCH 0269/1761] Fix a couple typos found by codespell --- ruff.toml | 2 +- setuptools/config/_apply_pyprojecttoml.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/ruff.toml b/ruff.toml index 18776ab60a..ef4c4ddd45 100644 --- a/ruff.toml +++ b/ruff.toml @@ -20,7 +20,7 @@ extend-select = [ "UP", # pyupgrade ] extend-ignore = [ - "UP015", # redundant-open-modes, explicit is prefered + "UP015", # redundant-open-modes, explicit is preferred "UP030", # temporarily disabled "UP031", # temporarily disabled "UP032", # temporarily disabled diff --git a/setuptools/config/_apply_pyprojecttoml.py b/setuptools/config/_apply_pyprojecttoml.py index 4261f3e218..32fb00131e 100644 --- a/setuptools/config/_apply_pyprojecttoml.py +++ b/setuptools/config/_apply_pyprojecttoml.py @@ -241,7 +241,7 @@ def _unify_entry_points(project_table: dict): if group # now we can skip empty groups } # Sometimes this will set `project["entry-points"] = {}`, and that is - # intentional (for reseting configurations that are missing `dynamic`). + # intentional (for resetting configurations that are missing `dynamic`). def _copy_command_options(pyproject: dict, dist: "Distribution", filename: _Path): From 79c89279d4daa5154793fe64e32e362eb74cd7b0 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Tue, 3 Oct 2023 11:18:12 -0400 Subject: [PATCH 0270/1761] Rely on pytest-home for the alternate home directory. --- setup.cfg | 1 + setuptools/tests/test_packageindex.py | 18 +++--------------- 2 files changed, 4 insertions(+), 15 deletions(-) diff --git a/setup.cfg b/setup.cfg index 27d771ad09..00a9be71e3 100644 --- a/setup.cfg +++ b/setup.cfg @@ -74,6 +74,7 @@ testing = sys_platform != "cygwin" # for tools/finalize.py jaraco.develop >= 7.21; python_version >= "3.9" and sys_platform != "cygwin" + pytest-home testing-integration = pytest diff --git a/setuptools/tests/test_packageindex.py b/setuptools/tests/test_packageindex.py index 704a07f61d..5cdb141d0c 100644 --- a/setuptools/tests/test_packageindex.py +++ b/setuptools/tests/test_packageindex.py @@ -266,22 +266,10 @@ def test_report(self): assert rep == 'My message about md5' -@pytest.fixture -def temp_home(tmpdir, monkeypatch): - key = ( - 'USERPROFILE' - if platform.system() == 'Windows' and sys.version_info > (3, 8) - else 'HOME' - ) - - monkeypatch.setitem(os.environ, key, str(tmpdir)) - return tmpdir - - class TestPyPIConfig: - def test_percent_in_password(self, temp_home): - pypirc = temp_home / '.pypirc' - pypirc.write( + def test_percent_in_password(self, alt_home): + pypirc = alt_home / '.pypirc' + pypirc.write_text( DALS( """ [pypi] From 5c71837209ff335fac0c3329caa59bbe29286e87 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Tue, 3 Oct 2023 11:21:29 -0400 Subject: [PATCH 0271/1761] Rely on pytest-home for the alternate home directory. --- newsfragments/4072.bugfix.rst | 1 + setup.cfg | 2 +- setuptools/tests/test_packageindex.py | 6 ++---- 3 files changed, 4 insertions(+), 5 deletions(-) create mode 100644 newsfragments/4072.bugfix.rst diff --git a/newsfragments/4072.bugfix.rst b/newsfragments/4072.bugfix.rst new file mode 100644 index 0000000000..d7115ecc74 --- /dev/null +++ b/newsfragments/4072.bugfix.rst @@ -0,0 +1 @@ +In tests, rely on pytest-home for reusable fixture. \ No newline at end of file diff --git a/setup.cfg b/setup.cfg index 00a9be71e3..d5653c4501 100644 --- a/setup.cfg +++ b/setup.cfg @@ -74,7 +74,7 @@ testing = sys_platform != "cygwin" # for tools/finalize.py jaraco.develop >= 7.21; python_version >= "3.9" and sys_platform != "cygwin" - pytest-home + pytest-home >= 0.5 testing-integration = pytest diff --git a/setuptools/tests/test_packageindex.py b/setuptools/tests/test_packageindex.py index 5cdb141d0c..0ec2f85a4c 100644 --- a/setuptools/tests/test_packageindex.py +++ b/setuptools/tests/test_packageindex.py @@ -1,7 +1,5 @@ import sys -import os import distutils.errors -import platform import urllib.request import urllib.error import http.client @@ -267,8 +265,8 @@ def test_report(self): class TestPyPIConfig: - def test_percent_in_password(self, alt_home): - pypirc = alt_home / '.pypirc' + def test_percent_in_password(self, tmp_home_dir): + pypirc = tmp_home_dir / '.pypirc' pypirc.write_text( DALS( """ From 3db29879e958622000fa84c4517155fa5023c296 Mon Sep 17 00:00:00 2001 From: Vladimir Fokow <57260995+VladimirFokow@users.noreply.github.com> Date: Thu, 1 Feb 2024 17:51:08 +0100 Subject: [PATCH 0272/1761] [Docs] #4198 acknowledged the side effects of running said commands --- docs/userguide/quickstart.rst | 41 +++++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) diff --git a/docs/userguide/quickstart.rst b/docs/userguide/quickstart.rst index 3a8e2a3ed4..df3e7bb965 100644 --- a/docs/userguide/quickstart.rst +++ b/docs/userguide/quickstart.rst @@ -433,6 +433,47 @@ See :doc:`development_mode` for more information. :doc:`pyproject.toml ` and/or :doc:`setup.cfg ` +.. note:: + + - Each of ``pip install -e .``, ``pip install .`` and ``python -m build`` create + a folder "src/PACKAGE_NAME.egg-info/" (path written assuming an :ref:`src-layout`) + + - ``pip install .`` also creates a "build/" folder + + - ``python -m build`` also creates a "dist/" folder + + .. note 1 + + These folders usually shouldn't be tracked in version control, so you can add such + `patterns `_ + for them in ``.gitignore``:: + + build/ + dist/ + *.egg-info/ + + These match: + + - only directories (not files) + - at any sub-levels + - ``*`` means any name + + .. note 2 + + These patterns are already included in the + `python .gitignore template + `_ + + .. note 3 + + Some developers simply ignore "PACKAGE_NAME.egg-info/" and "build/" folders, + others delete them. + + We suspect these 2 folders aren't needed after the command that created them + has finished execution. + Maybe they are an "artifact from the past", but maybe not. + If you know - add explanations, or help delete them automatically! + Uploading your package to PyPI ------------------------------ From e2de68c9a4b0cec799730a7fbb2a01a3a74d7176 Mon Sep 17 00:00:00 2001 From: Vladimir Fokow <57260995+VladimirFokow@users.noreply.github.com> Date: Thu, 1 Feb 2024 18:24:26 +0100 Subject: [PATCH 0273/1761] add a news fragment --- newsfragments/4198.doc.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 newsfragments/4198.doc.rst diff --git a/newsfragments/4198.doc.rst b/newsfragments/4198.doc.rst new file mode 100644 index 0000000000..396361a20b --- /dev/null +++ b/newsfragments/4198.doc.rst @@ -0,0 +1 @@ +Described the auto-generated files -- by :user:`VladimirFokow` \ No newline at end of file From 7c2d6ce7bb74d96dd30ef3acc24ab89a725fac15 Mon Sep 17 00:00:00 2001 From: Vladimir Fokow <57260995+VladimirFokow@users.noreply.github.com> Date: Sat, 3 Feb 2024 13:12:55 +0100 Subject: [PATCH 0274/1761] update the note - added the link to the issue - text formatting & rewording --- docs/userguide/quickstart.rst | 30 ++++++++++++++++-------------- 1 file changed, 16 insertions(+), 14 deletions(-) diff --git a/docs/userguide/quickstart.rst b/docs/userguide/quickstart.rst index df3e7bb965..9ac9f28a68 100644 --- a/docs/userguide/quickstart.rst +++ b/docs/userguide/quickstart.rst @@ -435,14 +435,15 @@ See :doc:`development_mode` for more information. .. note:: - - Each of ``pip install -e .``, ``pip install .`` and ``python -m build`` create - a folder "src/PACKAGE_NAME.egg-info/" (path written assuming an :ref:`src-layout`) + - Currently, each of ``pip install -e .``, ``pip install .`` and ``python -m build`` + creates a folder "src/PACKAGE_NAME.egg-info/" + (the path is for the :ref:`src-layout`) - ``pip install .`` also creates a "build/" folder - ``python -m build`` also creates a "dist/" folder - .. note 1 + .. note 1: These folders usually shouldn't be tracked in version control, so you can add such `patterns `_ @@ -452,27 +453,28 @@ See :doc:`development_mode` for more information. dist/ *.egg-info/ - These match: + This will match: - only directories (not files) - at any sub-levels - ``*`` means any name - .. note 2 + .. note 2: - These patterns are already included in the + (These patterns are already included in the `python .gitignore template - `_ + `_) - .. note 3 + .. note 3: - Some developers simply ignore "PACKAGE_NAME.egg-info/" and "build/" folders, - others delete them. + - Some developers simply ignore "PACKAGE_NAME.egg-info/" and "build/" folders, + others delete them manually. - We suspect these 2 folders aren't needed after the command that created them - has finished execution. - Maybe they are an "artifact from the past", but maybe not. - If you know - add explanations, or help delete them automatically! + - We suspect these 2 folders aren't needed after the command that created them + has finished execution - maybe they are "artifacts from the past", but we aren't + sure. If you know - please + `contribute! `_ - + you can add explanations or help write code to delete them automatically. Uploading your package to PyPI From 29e5d34af962e59e92c501ebb988dcaf192b114e Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 4 Feb 2024 10:15:04 -0500 Subject: [PATCH 0275/1761] Enable preview to enable preserving quotes. --- ruff.toml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/ruff.toml b/ruff.toml index 795cca162a..e61ca8b0d6 100644 --- a/ruff.toml +++ b/ruff.toml @@ -18,5 +18,7 @@ ignore = [ ] [format] +# Enable preview, required for quote-style = "preserve" +preview = true # https://docs.astral.sh/ruff/settings/#format-quote-style quote-style = "preserve" From db174f01c4ac6438b89fea0fccba41a66813cbb6 Mon Sep 17 00:00:00 2001 From: Vladimir Fokow <57260995+VladimirFokow@users.noreply.github.com> Date: Sun, 4 Feb 2024 21:21:59 +0100 Subject: [PATCH 0276/1761] update the section with the current info - from issues #3214 , #3518 and #3683 --- docs/userguide/quickstart.rst | 24 +++++++++++++++++------- 1 file changed, 17 insertions(+), 7 deletions(-) diff --git a/docs/userguide/quickstart.rst b/docs/userguide/quickstart.rst index 3a8e2a3ed4..44b7c88f51 100644 --- a/docs/userguide/quickstart.rst +++ b/docs/userguide/quickstart.rst @@ -442,14 +442,24 @@ distribution so others can use it. This functionality is provided by `. -Transitioning from ``setup.py`` to ``setup.cfg`` ------------------------------------------------- +Transitioning from ``setup.py`` to declarative config +----------------------------------------------------- To avoid executing arbitrary scripts and boilerplate code, we are transitioning -into a full-fledged ``setup.cfg`` to declare your package information instead -of running ``setup()``. This inevitably brings challenges due to a different -syntax. :doc:`Here ` we provide a quick guide to -understanding how ``setup.cfg`` is parsed by ``setuptools`` to ease the pain of -transition. +from defining all your package information by running ``setup()`` to doing this +declaratively - using ``setup.cfg`` or ``pyproject.toml``. + +To ease the challenges of transitioning, :doc:`here ` +we provide a quick guide to understanding how ``setup.cfg`` is parsed by +``setuptools``. Alternatively, :doc:`here ` +is the guide for ``pyproject.toml``. + +The approach ``setuptools`` would like to take is to eventually use a single +declarative format (``pyproject.toml``) instead of maintaining 2 +(``pyproject.toml`` / ``setup.cfg``). Chances are, ``setup.cfg`` will +continue to be maintained for a long time. Currently, ``pyproject.toml`` still +has some `limitations ` for certain users. + + .. _packaging-resources: From 9c2554624c81c3e45220563ae949f8bc01a9e8ce Mon Sep 17 00:00:00 2001 From: Vladimir Fokow <57260995+VladimirFokow@users.noreply.github.com> Date: Sun, 4 Feb 2024 21:38:57 +0100 Subject: [PATCH 0277/1761] fix link IMO this is a better link - this page lists more issues, and actually compares setup.cfg to pyproject.toml in this sense --- docs/userguide/quickstart.rst | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/docs/userguide/quickstart.rst b/docs/userguide/quickstart.rst index 44b7c88f51..017c03611c 100644 --- a/docs/userguide/quickstart.rst +++ b/docs/userguide/quickstart.rst @@ -457,8 +457,7 @@ The approach ``setuptools`` would like to take is to eventually use a single declarative format (``pyproject.toml``) instead of maintaining 2 (``pyproject.toml`` / ``setup.cfg``). Chances are, ``setup.cfg`` will continue to be maintained for a long time. Currently, ``pyproject.toml`` still -has some `limitations ` for certain users. - +has some `limitations `. .. _packaging-resources: From 52d74ad85fb45cc961079f6b8e300d5b496af409 Mon Sep 17 00:00:00 2001 From: Vladimir Fokow <57260995+VladimirFokow@users.noreply.github.com> Date: Sun, 4 Feb 2024 21:55:24 +0100 Subject: [PATCH 0278/1761] add a new fragment --- newsfragments/4200.doc.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 newsfragments/4200.doc.rst diff --git a/newsfragments/4200.doc.rst b/newsfragments/4200.doc.rst new file mode 100644 index 0000000000..4b368683cc --- /dev/null +++ b/newsfragments/4200.doc.rst @@ -0,0 +1 @@ +Updated "Quickstart" to describe the current status of ``setup.cfg`` and ``pyproject.toml`` -- by :user:`VladimirFokow` \ No newline at end of file From 8c36b61aa0de7f3f306c0ca2e9af6b620c332efc Mon Sep 17 00:00:00 2001 From: Vladimir Fokow <57260995+VladimirFokow@users.noreply.github.com> Date: Sun, 4 Feb 2024 21:57:40 +0100 Subject: [PATCH 0279/1761] fix link syntax --- docs/userguide/quickstart.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/userguide/quickstart.rst b/docs/userguide/quickstart.rst index 017c03611c..bd544e7fe1 100644 --- a/docs/userguide/quickstart.rst +++ b/docs/userguide/quickstart.rst @@ -457,7 +457,7 @@ The approach ``setuptools`` would like to take is to eventually use a single declarative format (``pyproject.toml``) instead of maintaining 2 (``pyproject.toml`` / ``setup.cfg``). Chances are, ``setup.cfg`` will continue to be maintained for a long time. Currently, ``pyproject.toml`` still -has some `limitations `. +has some `limitations `_. .. _packaging-resources: From 66ed3a50a778c799a48a314b9ff7ac12b35c5314 Mon Sep 17 00:00:00 2001 From: Vladimir Fokow <57260995+VladimirFokow@users.noreply.github.com> Date: Sun, 4 Feb 2024 22:38:04 +0100 Subject: [PATCH 0280/1761] some fixes after seeing the Preview - fix link - make last paragraph a note (for easier readability) - change language to "still has" to "still may have" to be easier to maintain --- docs/userguide/quickstart.rst | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/docs/userguide/quickstart.rst b/docs/userguide/quickstart.rst index bd544e7fe1..4b7b5de294 100644 --- a/docs/userguide/quickstart.rst +++ b/docs/userguide/quickstart.rst @@ -450,15 +450,16 @@ declaratively - using ``setup.cfg`` or ``pyproject.toml``. To ease the challenges of transitioning, :doc:`here ` we provide a quick guide to understanding how ``setup.cfg`` is parsed by -``setuptools``. Alternatively, :doc:`here ` -is the guide for ``pyproject.toml``. +``setuptools``. Alternatively, :doc:`here ` is the +guide for ``pyproject.toml``. -The approach ``setuptools`` would like to take is to eventually use a single -declarative format (``pyproject.toml``) instead of maintaining 2 -(``pyproject.toml`` / ``setup.cfg``). Chances are, ``setup.cfg`` will -continue to be maintained for a long time. Currently, ``pyproject.toml`` still -has some `limitations `_. +.. note:: + The approach ``setuptools`` would like to take is to eventually use a single + declarative format (``pyproject.toml``) instead of maintaining 2 + (``pyproject.toml`` / ``setup.cfg``). Chances are, ``setup.cfg`` will + continue to be maintained for a long time. Currently, ``pyproject.toml`` may + still have some `limitations `_. .. _packaging-resources: From c5535f6247650ae596e0d5046b3d73c6179e2c7c Mon Sep 17 00:00:00 2001 From: Vladimir Fokow <57260995+VladimirFokow@users.noreply.github.com> Date: Sun, 4 Feb 2024 22:42:03 +0100 Subject: [PATCH 0281/1761] finally fix link --- docs/userguide/quickstart.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/userguide/quickstart.rst b/docs/userguide/quickstart.rst index 4b7b5de294..58e99f94f7 100644 --- a/docs/userguide/quickstart.rst +++ b/docs/userguide/quickstart.rst @@ -450,7 +450,7 @@ declaratively - using ``setup.cfg`` or ``pyproject.toml``. To ease the challenges of transitioning, :doc:`here ` we provide a quick guide to understanding how ``setup.cfg`` is parsed by -``setuptools``. Alternatively, :doc:`here ` is the +``setuptools``. Alternatively, :doc:`here ` is the guide for ``pyproject.toml``. .. note:: From 601fac92b345e49906670eeda91bb48d4bbdab3c Mon Sep 17 00:00:00 2001 From: Vladimir Fokow <57260995+VladimirFokow@users.noreply.github.com> Date: Mon, 5 Feb 2024 11:48:33 +0100 Subject: [PATCH 0282/1761] reorder: pyproject.toml first, setup.cfg second --- docs/userguide/quickstart.rst | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/userguide/quickstart.rst b/docs/userguide/quickstart.rst index 58e99f94f7..f8554da797 100644 --- a/docs/userguide/quickstart.rst +++ b/docs/userguide/quickstart.rst @@ -446,12 +446,12 @@ Transitioning from ``setup.py`` to declarative config ----------------------------------------------------- To avoid executing arbitrary scripts and boilerplate code, we are transitioning from defining all your package information by running ``setup()`` to doing this -declaratively - using ``setup.cfg`` or ``pyproject.toml``. +declaratively - using ``pyproject.toml`` (or older ``setup.cfg``). -To ease the challenges of transitioning, :doc:`here ` -we provide a quick guide to understanding how ``setup.cfg`` is parsed by -``setuptools``. Alternatively, :doc:`here ` is the -guide for ``pyproject.toml``. +To ease the challenges of transitioning, we provide a quick +:doc:`guide ` to understanding how ``pyproject.toml`` +is parsed by ``setuptools``. (Alternatively, :doc:`here ` +is the guide for ``setup.cfg``). .. note:: From 81e5ea81aaf2e8c4791297c081741b866ffbf368 Mon Sep 17 00:00:00 2001 From: Vladimir Fokow <57260995+VladimirFokow@users.noreply.github.com> Date: Mon, 5 Feb 2024 12:00:37 +0100 Subject: [PATCH 0283/1761] remove the last sentence (it wasn't good) --- docs/userguide/quickstart.rst | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/docs/userguide/quickstart.rst b/docs/userguide/quickstart.rst index f8554da797..8f62cd2096 100644 --- a/docs/userguide/quickstart.rst +++ b/docs/userguide/quickstart.rst @@ -446,7 +446,7 @@ Transitioning from ``setup.py`` to declarative config ----------------------------------------------------- To avoid executing arbitrary scripts and boilerplate code, we are transitioning from defining all your package information by running ``setup()`` to doing this -declaratively - using ``pyproject.toml`` (or older ``setup.cfg``). +declaratively - by using ``pyproject.toml`` (or older ``setup.cfg``). To ease the challenges of transitioning, we provide a quick :doc:`guide ` to understanding how ``pyproject.toml`` @@ -457,9 +457,8 @@ is the guide for ``setup.cfg``). The approach ``setuptools`` would like to take is to eventually use a single declarative format (``pyproject.toml``) instead of maintaining 2 - (``pyproject.toml`` / ``setup.cfg``). Chances are, ``setup.cfg`` will - continue to be maintained for a long time. Currently, ``pyproject.toml`` may - still have some `limitations `_. + (``pyproject.toml`` / ``setup.cfg``). Yet chances are, ``setup.cfg`` will + continue to be maintained for a long time. .. _packaging-resources: From 2bd9897facf4aad3e2c7bb5aaf436fb37efc59c5 Mon Sep 17 00:00:00 2001 From: Vladimir Fokow <57260995+VladimirFokow@users.noreply.github.com> Date: Mon, 5 Feb 2024 12:08:43 +0100 Subject: [PATCH 0284/1761] add a comma --- docs/userguide/quickstart.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/userguide/quickstart.rst b/docs/userguide/quickstart.rst index 8f62cd2096..fcc203294a 100644 --- a/docs/userguide/quickstart.rst +++ b/docs/userguide/quickstart.rst @@ -457,7 +457,7 @@ is the guide for ``setup.cfg``). The approach ``setuptools`` would like to take is to eventually use a single declarative format (``pyproject.toml``) instead of maintaining 2 - (``pyproject.toml`` / ``setup.cfg``). Yet chances are, ``setup.cfg`` will + (``pyproject.toml`` / ``setup.cfg``). Yet, chances are, ``setup.cfg`` will continue to be maintained for a long time. .. _packaging-resources: From fc2bc7393021f3fac4b68b17693f8239a9780591 Mon Sep 17 00:00:00 2001 From: Vladimir Fokow <57260995+VladimirFokow@users.noreply.github.com> Date: Mon, 5 Feb 2024 12:32:34 +0100 Subject: [PATCH 0285/1761] replace "usually" with "for example" setup.cfg --- docs/userguide/declarative_config.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/userguide/declarative_config.rst b/docs/userguide/declarative_config.rst index c297c4d35f..1d5bf6ae42 100644 --- a/docs/userguide/declarative_config.rst +++ b/docs/userguide/declarative_config.rst @@ -11,7 +11,7 @@ Configuring setuptools using ``setup.cfg`` files build API) is desired, a ``setup.py`` file containing a ``setup()`` function call is still required even if your configuration resides in ``setup.cfg``. -``Setuptools`` allows using configuration files (usually :file:`setup.cfg`) +``Setuptools`` allows using configuration files (for example, :file:`setup.cfg`) to define a package’s metadata and other options that are normally supplied to the ``setup()`` function (declarative config). From 87b86a5a6993cff7394f7d6bb601d39d9c278186 Mon Sep 17 00:00:00 2001 From: Vladimir Fokow <57260995+VladimirFokow@users.noreply.github.com> Date: Mon, 5 Feb 2024 12:46:26 +0100 Subject: [PATCH 0286/1761] make links easier to click - by placing them farther apart --- docs/userguide/quickstart.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/userguide/quickstart.rst b/docs/userguide/quickstart.rst index fcc203294a..fb1564b8c5 100644 --- a/docs/userguide/quickstart.rst +++ b/docs/userguide/quickstart.rst @@ -450,8 +450,8 @@ declaratively - by using ``pyproject.toml`` (or older ``setup.cfg``). To ease the challenges of transitioning, we provide a quick :doc:`guide ` to understanding how ``pyproject.toml`` -is parsed by ``setuptools``. (Alternatively, :doc:`here ` -is the guide for ``setup.cfg``). +is parsed by ``setuptools``. (Alternatively, here is the +:doc:`guide ` for ``setup.cfg``). .. note:: From 77836fe70fa689cd9a2dc5d4e10ec8f85e1ab9be Mon Sep 17 00:00:00 2001 From: Vladimir Fokow <57260995+VladimirFokow@users.noreply.github.com> Date: Mon, 5 Feb 2024 14:14:49 +0100 Subject: [PATCH 0287/1761] implement suggestions from code review --- docs/userguide/quickstart.rst | 53 ++++++++--------------------------- 1 file changed, 12 insertions(+), 41 deletions(-) diff --git a/docs/userguide/quickstart.rst b/docs/userguide/quickstart.rst index 9ac9f28a68..9680eaa99a 100644 --- a/docs/userguide/quickstart.rst +++ b/docs/userguide/quickstart.rst @@ -418,7 +418,7 @@ See :doc:`development_mode` for more information. If you have a version of ``pip`` older than v21.1 or is using a different packaging-related tool that does not support :pep:`660`, you might need to keep a - ``setup.py`` file in file in your repository if you want to use editable + ``setup.py`` file in your repository if you want to use editable installs. A simple script will suffice, for example: @@ -435,46 +435,12 @@ See :doc:`development_mode` for more information. .. note:: - - Currently, each of ``pip install -e .``, ``pip install .`` and ``python -m build`` - creates a folder "src/PACKAGE_NAME.egg-info/" - (the path is for the :ref:`src-layout`) - - - ``pip install .`` also creates a "build/" folder - - - ``python -m build`` also creates a "dist/" folder - - .. note 1: - - These folders usually shouldn't be tracked in version control, so you can add such - `patterns `_ - for them in ``.gitignore``:: - - build/ - dist/ - *.egg-info/ - - This will match: - - - only directories (not files) - - at any sub-levels - - ``*`` means any name - - .. note 2: - - (These patterns are already included in the - `python .gitignore template - `_) - - .. note 3: - - - Some developers simply ignore "PACKAGE_NAME.egg-info/" and "build/" folders, - others delete them manually. - - - We suspect these 2 folders aren't needed after the command that created them - has finished execution - maybe they are "artifacts from the past", but we aren't - sure. If you know - please - `contribute! `_ - - you can add explanations or help write code to delete them automatically. + During the build process - which is also triggered when you install the project from source code, + e.g. ``pip install -e .`` - some directories hosting build artefacts and cache files may be + created, such as ``build``, ``dist``, ``*.egg-info`` [#cache]_. + You can configure your version control system to ignore them + (see `GitHub's .gitignore template `_ + for an example). Uploading your package to PyPI @@ -519,4 +485,9 @@ up-to-date references that can help you when it is time to distribute your work. supported in those files (e.g. C extensions). See :ref:`note `. +.. [#cache] + If you feel that caching is causing problems to your build specially after changes in the + configuration files, consider removing ``build``, ``dist``, ``*.egg-info`` before + rebuilding or installing your project. + .. _PyPI: https://pypi.org From 353eaa70181d982593bfdb2c0cc8597a96165bed Mon Sep 17 00:00:00 2001 From: Vladimir Fokow <57260995+VladimirFokow@users.noreply.github.com> Date: Mon, 5 Feb 2024 14:33:48 +0100 Subject: [PATCH 0288/1761] reword the note --- docs/userguide/quickstart.rst | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/docs/userguide/quickstart.rst b/docs/userguide/quickstart.rst index 9680eaa99a..d3fc22a106 100644 --- a/docs/userguide/quickstart.rst +++ b/docs/userguide/quickstart.rst @@ -435,12 +435,14 @@ See :doc:`development_mode` for more information. .. note:: - During the build process - which is also triggered when you install the project from source code, - e.g. ``pip install -e .`` - some directories hosting build artefacts and cache files may be - created, such as ``build``, ``dist``, ``*.egg-info`` [#cache]_. - You can configure your version control system to ignore them - (see `GitHub's .gitignore template `_ - for an example). + During the build process from source code (for example, by + ``pip install -e .`` or ``python -m build``) + some directories hosting build artefacts and cache files may be + created, such as ``build``, ``dist``, ``*.egg-info`` [#cache]_. + You can configure your version control system to ignore them + (see `GitHub's .gitignore template + `_ + for an example). Uploading your package to PyPI From fb5e10198e4ae20cb8232a668e10ac4f649cfc93 Mon Sep 17 00:00:00 2001 From: Vladimir Fokow <57260995+VladimirFokow@users.noreply.github.com> Date: Mon, 5 Feb 2024 14:40:16 +0100 Subject: [PATCH 0289/1761] implement suggested changes - reword - add a comma --- docs/userguide/quickstart.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/userguide/quickstart.rst b/docs/userguide/quickstart.rst index d3fc22a106..9f573f4a04 100644 --- a/docs/userguide/quickstart.rst +++ b/docs/userguide/quickstart.rst @@ -435,8 +435,8 @@ See :doc:`development_mode` for more information. .. note:: - During the build process from source code (for example, by - ``pip install -e .`` or ``python -m build``) + When building from source code (for example, by ``python -m build`` + or ``pip install -e .``) some directories hosting build artefacts and cache files may be created, such as ``build``, ``dist``, ``*.egg-info`` [#cache]_. You can configure your version control system to ignore them @@ -488,7 +488,7 @@ up-to-date references that can help you when it is time to distribute your work. See :ref:`note `. .. [#cache] - If you feel that caching is causing problems to your build specially after changes in the + If you feel that caching is causing problems to your build, specially after changes in the configuration files, consider removing ``build``, ``dist``, ``*.egg-info`` before rebuilding or installing your project. From bcf6759394f4d3e8efcb87a5a5b8d04a84a0f5e5 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Mon, 5 Feb 2024 18:17:03 +0000 Subject: [PATCH 0290/1761] Ignore side-warning in test --- setuptools/tests/config/test_apply_pyprojecttoml.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py index 6935523987..47c329157e 100644 --- a/setuptools/tests/config/test_apply_pyprojecttoml.py +++ b/setuptools/tests/config/test_apply_pyprojecttoml.py @@ -339,6 +339,7 @@ def pyproject(self, tmp_path, dynamic, extra_content=""): ("entry_points", "gui-scripts", {"gui_scripts": ["bazquux=bazquux:main"]}), ], ) + @pytest.mark.filterwarnings("ignore:.*install_requires. overwritten") def test_not_listed_in_dynamic(self, tmp_path, attr, field, value): """Setuptools cannot set a field if not listed in ``dynamic``""" pyproject = self.pyproject(tmp_path, []) From a21c72a5a08584d75c6e5871405e483ac9bb282c Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Mon, 5 Feb 2024 18:58:40 +0000 Subject: [PATCH 0291/1761] Fix warning message matching --- setuptools/tests/test_editable_install.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setuptools/tests/test_editable_install.py b/setuptools/tests/test_editable_install.py index 987c2fd67c..862f8172cd 100644 --- a/setuptools/tests/test_editable_install.py +++ b/setuptools/tests/test_editable_install.py @@ -1180,7 +1180,7 @@ def test_debugging_tips(tmpdir_cwd, monkeypatch): simulated_failure = Mock(side_effect=SimulatedErr()) monkeypatch.setattr(cmd, "get_finalized_command", simulated_failure) - expected_msg = "following steps are recommended to help debugging" + expected_msg = "following steps are recommended to help debug" with pytest.raises(SimulatedErr), pytest.warns(_DebuggingTips, match=expected_msg): cmd.run() From 514417c3cd273774b1d18081788475744abc6b14 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Mon, 5 Feb 2024 19:06:21 +0000 Subject: [PATCH 0292/1761] Refactor pytest.mark to be more specific --- setuptools/tests/config/test_apply_pyprojecttoml.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py index 47c329157e..c0ad12c3cb 100644 --- a/setuptools/tests/config/test_apply_pyprojecttoml.py +++ b/setuptools/tests/config/test_apply_pyprojecttoml.py @@ -333,13 +333,19 @@ def pyproject(self, tmp_path, dynamic, extra_content=""): @pytest.mark.parametrize( "attr, field, value", [ - ("install_requires", "dependencies", ["six"]), ("classifiers", "classifiers", ["Private :: Classifier"]), ("entry_points", "scripts", {"console_scripts": ["foobar=foobar:main"]}), ("entry_points", "gui-scripts", {"gui_scripts": ["bazquux=bazquux:main"]}), + pytest.param( + "install_requires", + "dependencies", + ["six"], + marks=[ + pytest.mark.filterwarnings("ignore:.*install_requires. overwritten") + ], + ), ], ) - @pytest.mark.filterwarnings("ignore:.*install_requires. overwritten") def test_not_listed_in_dynamic(self, tmp_path, attr, field, value): """Setuptools cannot set a field if not listed in ``dynamic``""" pyproject = self.pyproject(tmp_path, []) From f9298537ca6bde6f345a0143bff92fed218e3a75 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Mon, 5 Feb 2024 19:09:31 +0000 Subject: [PATCH 0293/1761] Improve formatting for consistency --- setuptools/tests/config/test_apply_pyprojecttoml.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py index c0ad12c3cb..cffc779b94 100644 --- a/setuptools/tests/config/test_apply_pyprojecttoml.py +++ b/setuptools/tests/config/test_apply_pyprojecttoml.py @@ -337,9 +337,7 @@ def pyproject(self, tmp_path, dynamic, extra_content=""): ("entry_points", "scripts", {"console_scripts": ["foobar=foobar:main"]}), ("entry_points", "gui-scripts", {"gui_scripts": ["bazquux=bazquux:main"]}), pytest.param( - "install_requires", - "dependencies", - ["six"], + *("install_requires", "dependencies", ["six"]), marks=[ pytest.mark.filterwarnings("ignore:.*install_requires. overwritten") ], From ce4cb8c056c644ead2f1318d6f8833b62bac512e Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Mon, 5 Feb 2024 16:13:02 +0000 Subject: [PATCH 0294/1761] Remove trailling spaces --- docs/userguide/quickstart.rst | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/userguide/quickstart.rst b/docs/userguide/quickstart.rst index 0588bc03bb..a3f285f010 100644 --- a/docs/userguide/quickstart.rst +++ b/docs/userguide/quickstart.rst @@ -435,12 +435,12 @@ See :doc:`development_mode` for more information. .. note:: - When building from source code (for example, by ``python -m build`` + When building from source code (for example, by ``python -m build`` or ``pip install -e .``) some directories hosting build artefacts and cache files may be created, such as ``build``, ``dist``, ``*.egg-info`` [#cache]_. You can configure your version control system to ignore them - (see `GitHub's .gitignore template + (see `GitHub's .gitignore template `_ for an example). @@ -459,9 +459,9 @@ To avoid executing arbitrary scripts and boilerplate code, we are transitioning from defining all your package information by running ``setup()`` to doing this declaratively - by using ``pyproject.toml`` (or older ``setup.cfg``). -To ease the challenges of transitioning, we provide a quick +To ease the challenges of transitioning, we provide a quick :doc:`guide ` to understanding how ``pyproject.toml`` -is parsed by ``setuptools``. (Alternatively, here is the +is parsed by ``setuptools``. (Alternatively, here is the :doc:`guide ` for ``setup.cfg``). .. note:: @@ -500,5 +500,5 @@ up-to-date references that can help you when it is time to distribute your work. If you feel that caching is causing problems to your build, specially after changes in the configuration files, consider removing ``build``, ``dist``, ``*.egg-info`` before rebuilding or installing your project. - + .. _PyPI: https://pypi.org From 0bab5579efd4b86c579dd948818f48decb7d213a Mon Sep 17 00:00:00 2001 From: Matthias Koeppe Date: Sat, 3 Feb 2024 20:22:57 -0800 Subject: [PATCH 0295/1761] .github/workflows/ci-sage.yml: Update for changed spkg source type --- .github/workflows/ci-sage.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-sage.yml b/.github/workflows/ci-sage.yml index 81ee101528..88cc75cabb 100644 --- a/.github/workflows/ci-sage.yml +++ b/.github/workflows/ci-sage.yml @@ -70,7 +70,7 @@ jobs: run: | (cd build/pkgs/${{ env.SPKG }}/src && python3 -m build --sdist) \ && mkdir -p upstream && cp build/pkgs/${{ env.SPKG }}/src/dist/*.tar.gz upstream/${{ env.SPKG }}-git.tar.gz \ - && echo "sage-package create ${{ env.SPKG }} --version git --tarball ${{ env.SPKG }}-git.tar.gz --type=standard" > upstream/update-pkgs.sh \ + && echo "sage-package create ${{ env.SPKG }} --pypi --source normal --type standard; sage-package create ${{ env.SPKG }} --version git --tarball ${{ env.SPKG }}-git.tar.gz --type=standard" > upstream/update-pkgs.sh \ && if [ -n "${{ env.REMOVE_PATCHES }}" ]; then echo "(cd ../build/pkgs/${{ env.SPKG }}/patches && rm -f ${{ env.REMOVE_PATCHES }}; :)" >> upstream/update-pkgs.sh; fi \ && ls -l upstream/ - uses: actions/upload-artifact@v2 From 96876a957b2b47c7a0654837fce90ab9a497ee8d Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Tue, 6 Feb 2024 09:27:12 +0000 Subject: [PATCH 0296/1761] Update pyproject_config.rst Fix mispell in docs --- docs/userguide/pyproject_config.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/userguide/pyproject_config.rst b/docs/userguide/pyproject_config.rst index 379d41b03b..749afe2344 100644 --- a/docs/userguide/pyproject_config.rst +++ b/docs/userguide/pyproject_config.rst @@ -230,7 +230,7 @@ some of them dynamically. Also note that the file format for specifying dependencies resembles a ``requirements.txt`` file, however please keep in mind that all non-comment lines must conform with :pep:`508` -(``pip``-specify syntaxes, e.g. ``-c/-r/-e`` flags, are not supported). +(``pip`` specific syntaxes, e.g. ``-c/-r/-e`` and other flags, are not supported). .. note:: From 2a402a39f154d9a6cf4621e8c5d22bace749b55e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartosz=20S=C5=82awecki?= Date: Tue, 6 Feb 2024 23:01:32 +0100 Subject: [PATCH 0297/1761] Tweak coverage configuration for type checking (jaraco/skeleton#97) * Tweak coverage configuration for type checking * Use `exclude_also` instead of `exclude_lines` Co-authored-by: Sviatoslav Sydorenko * Add reference to the issue. --------- Co-authored-by: Sviatoslav Sydorenko Co-authored-by: Jason R. Coombs --- .coveragerc | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.coveragerc b/.coveragerc index 02879483d7..35b98b1df9 100644 --- a/.coveragerc +++ b/.coveragerc @@ -7,3 +7,7 @@ disable_warnings = [report] show_missing = True +exclude_also = + # jaraco/skeleton#97 + @overload + if TYPE_CHECKING: From 68ac292eb37ce92e992e6fab05a44ad86f32e8f1 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Tue, 6 Feb 2024 16:53:46 -0500 Subject: [PATCH 0298/1761] Use latest versions in RTD boilerplate. --- .readthedocs.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 053c728715..6848906374 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -7,6 +7,6 @@ python: # required boilerplate readthedocs/readthedocs.org#10401 build: - os: ubuntu-22.04 + os: ubuntu-lts-latest tools: - python: "3" + python: latest From 63222415e1876e3a5fb9f6eca85439fc29633855 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Tue, 2 Jan 2024 20:22:08 +0100 Subject: [PATCH 0299/1761] Fix flake8-return warning RET502 Do not implicitly `return None` in function able to return non-`None` value --- setuptools/discovery.py | 2 +- setuptools/tests/config/test_setupcfg.py | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/setuptools/discovery.py b/setuptools/discovery.py index 25962863b9..50a948750f 100644 --- a/setuptools/discovery.py +++ b/setuptools/discovery.py @@ -485,7 +485,7 @@ def analyse_name(self): """ if self.dist.metadata.name or self.dist.name: # get_name() is not reliable (can return "UNKNOWN") - return None + return log.debug("No `name` configuration, performing automatic discovery") diff --git a/setuptools/tests/config/test_setupcfg.py b/setuptools/tests/config/test_setupcfg.py index d235478f7e..7f93858bd4 100644 --- a/setuptools/tests/config/test_setupcfg.py +++ b/setuptools/tests/config/test_setupcfg.py @@ -964,7 +964,6 @@ def _fake_distribution_init(self, dist, attrs): 'Link One': 'https://example.com/one/', 'Link Two': 'https://example.com/two/', } - return None @patch.object(_Distribution, '__init__', autospec=True) def test_external_setters(self, mock_parent_init, tmpdir): From b76097b841ea82648fa7847e520b9d942326b569 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Tue, 2 Jan 2024 20:28:51 +0100 Subject: [PATCH 0300/1761] Fix flake8-return warning RET502 Do not implicitly `return None` in function able to return non-`None` value --- _distutils_hack/__init__.py | 6 +++--- setuptools/package_index.py | 2 +- setuptools/sandbox.py | 4 ++-- setuptools/tests/test_distutils_adoption.py | 4 +--- 4 files changed, 7 insertions(+), 9 deletions(-) diff --git a/_distutils_hack/__init__.py b/_distutils_hack/__init__.py index c9d1e24790..4d3f09b0ae 100644 --- a/_distutils_hack/__init__.py +++ b/_distutils_hack/__init__.py @@ -83,7 +83,7 @@ def find_spec(self, fullname, path, target=None): # optimization: only consider top level modules and those # found in the CPython test suite. if path is not None and not fullname.startswith('test.'): - return + return None method_name = 'spec_for_{fullname}'.format(**locals()) method = getattr(self, method_name, lambda: None) @@ -91,7 +91,7 @@ def find_spec(self, fullname, path, target=None): def spec_for_distutils(self): if self.is_cpython(): - return + return None import importlib import importlib.abc @@ -108,7 +108,7 @@ def spec_for_distutils(self): # setuptools from the path but only after the hook # has been loaded. Ref #2980. # In either case, fall back to stdlib behavior. - return + return None class DistutilsLoader(importlib.abc.Loader): def create_module(self, spec): diff --git a/setuptools/package_index.py b/setuptools/package_index.py index 1e535bc747..099b9263d2 100644 --- a/setuptools/package_index.py +++ b/setuptools/package_index.py @@ -319,7 +319,7 @@ def add(self, dist): try: parse_version(dist.version) except Exception: - return + return None return super().add(dist) # FIXME: 'PackageIndex.process_url' is too complex (14) diff --git a/setuptools/sandbox.py b/setuptools/sandbox.py index 757074166a..7634b1320b 100644 --- a/setuptools/sandbox.py +++ b/setuptools/sandbox.py @@ -115,7 +115,7 @@ def dump(type, exc): class ExceptionSaver: """ - A Context Manager that will save an exception, serialized, and restore it + A Context Manager that will save an exception, serialize, and restore it later. """ @@ -124,7 +124,7 @@ def __enter__(self): def __exit__(self, type, exc, tb): if not exc: - return + return False # dump the exception self._saved = UnpickleableException.dump(type, exc) diff --git a/setuptools/tests/test_distutils_adoption.py b/setuptools/tests/test_distutils_adoption.py index e180547f0d..eb7feba637 100644 --- a/setuptools/tests/test_distutils_adoption.py +++ b/setuptools/tests/test_distutils_adoption.py @@ -16,9 +16,7 @@ def win_sr(env): > Fatal Python error: _Py_HashRandomization_Init: failed to > get random numbers to initialize Python """ - if env is None: - return - if platform.system() == 'Windows': + if env and platform.system() == 'Windows': env['SYSTEMROOT'] = os.environ['SYSTEMROOT'] return env From a6f6503114153b9c9af5f3b6bd7729d08aafe3fa Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Tue, 2 Jan 2024 21:06:16 +0100 Subject: [PATCH 0301/1761] Fix flake8-return warning RET503 Missing explicit `return` at the end of function able to return non-`None` value --- pkg_resources/__init__.py | 4 ++-- setuptools/command/bdist_egg.py | 1 + setuptools/command/build_ext.py | 1 + setuptools/command/develop.py | 2 ++ setuptools/command/easy_install.py | 1 + setuptools/command/install.py | 5 +++++ setuptools/depends.py | 1 + setuptools/dist.py | 2 ++ setuptools/msvc.py | 6 ++++++ setuptools/package_index.py | 5 +++++ setuptools/unicode_utils.py | 4 +++- 11 files changed, 29 insertions(+), 3 deletions(-) diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py index d6847448eb..7734bce6e4 100644 --- a/pkg_resources/__init__.py +++ b/pkg_resources/__init__.py @@ -1127,8 +1127,7 @@ def obtain(self, requirement, installer=None): None is returned instead. This method is a hook that allows subclasses to attempt other ways of obtaining a distribution before falling back to the `installer` argument.""" - if installer is not None: - return installer(requirement) + return installer(requirement) if installer else None def __iter__(self): """Yield the unique project names of the available distributions""" @@ -3210,6 +3209,7 @@ def _find_adapter(registry, ob): for t in types: if t in registry: return registry[t] + return None def ensure_directory(path): diff --git a/setuptools/command/bdist_egg.py b/setuptools/command/bdist_egg.py index 8aaf155833..3687efdf9c 100644 --- a/setuptools/command/bdist_egg.py +++ b/setuptools/command/bdist_egg.py @@ -419,6 +419,7 @@ def can_scan(): "Please ask the author to include a 'zip_safe'" " setting (either True or False) in the package's setup.py" ) + return False # Attribute names of options for commands that might need to be convinced to diff --git a/setuptools/command/build_ext.py b/setuptools/command/build_ext.py index ef2a4da84d..780afe3aec 100644 --- a/setuptools/command/build_ext.py +++ b/setuptools/command/build_ext.py @@ -76,6 +76,7 @@ def get_abi3_suffix(): return suffix elif suffix == '.pyd': # Windows return suffix + return None class build_ext(_build_ext): diff --git a/setuptools/command/develop.py b/setuptools/command/develop.py index ea3e48e55c..583e8cf5f2 100644 --- a/setuptools/command/develop.py +++ b/setuptools/command/develop.py @@ -157,6 +157,8 @@ def install_egg_scripts(self, dist): script_text = strm.read() self.install_script(dist, script_name, script_text, script_path) + return None + def install_wrapper_scripts(self, dist): dist = VersionlessRequirement(dist) return easy_install.install_wrapper_scripts(self, dist) diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py index f73d857f08..ac5264f90d 100644 --- a/setuptools/command/easy_install.py +++ b/setuptools/command/easy_install.py @@ -741,6 +741,7 @@ def install_item(self, spec, download, tmpdir, deps, install_needed=False): for dist in dists: if dist in spec: return dist + return None def select_scheme(self, name): try: diff --git a/setuptools/command/install.py b/setuptools/command/install.py index 606cce9d89..b97a9b4713 100644 --- a/setuptools/command/install.py +++ b/setuptools/command/install.py @@ -71,6 +71,7 @@ def handle_extra_path(self): # command without --root or --single-version-externally-managed self.path_file = None self.extra_dirs = '' + return None def run(self): # Explicit request for old-style install? Just do it @@ -83,6 +84,8 @@ def run(self): else: self.do_egg_install() + return None + @staticmethod def _called_from_setup(run_frame): """ @@ -114,6 +117,8 @@ def _called_from_setup(run_frame): return caller_module == 'distutils.dist' and info.function == 'run_commands' + return False + def do_egg_install(self): easy_install = self.distribution.get_command_class('easy_install') diff --git a/setuptools/depends.py b/setuptools/depends.py index 42907d9bd4..03e116c4a1 100644 --- a/setuptools/depends.py +++ b/setuptools/depends.py @@ -159,6 +159,7 @@ def extract_constant(code, symbol, default=-1): else: const = default + return None def _update_globals(): """ diff --git a/setuptools/dist.py b/setuptools/dist.py index 0d35583dbc..d5787ed474 100644 --- a/setuptools/dist.py +++ b/setuptools/dist.py @@ -778,6 +778,8 @@ def has_contents_for(self, package): if p == package or p.startswith(pfx): return True + return False + def _exclude_misc(self, name, value): """Handle 'exclude()' for list/tuple attrs without a special handler""" if not isinstance(value, sequence): diff --git a/setuptools/msvc.py b/setuptools/msvc.py index aa69db5810..53fe7b0de1 100644 --- a/setuptools/msvc.py +++ b/setuptools/msvc.py @@ -577,6 +577,7 @@ def lookup(self, key, name): finally: if bkey: closekey(bkey) + return None class SystemInfo: @@ -823,6 +824,7 @@ def WindowsSdkVersion(self): return '8.1', '8.1a' elif self.vs_ver >= 14.0: return '10.0', '8.1' + return None @property def WindowsSdkLastVersion(self): @@ -914,6 +916,8 @@ def WindowsSDKExecutablePath(self): if execpath: return execpath + return None + @property def FSharpInstallDir(self): """ @@ -946,6 +950,8 @@ def UniversalCRTSdkDir(self): if sdkdir: return sdkdir or '' + return None + @property def UniversalCRTSdkLastVersion(self): """ diff --git a/setuptools/package_index.py b/setuptools/package_index.py index 099b9263d2..271aa97f71 100644 --- a/setuptools/package_index.py +++ b/setuptools/package_index.py @@ -406,6 +406,7 @@ def url_ok(self, url, fatal=False): raise DistutilsError(msg % url) else: self.warn(msg, url) + return False def scan_egg_links(self, search_path): dirs = filter(os.path.isdir, search_path) @@ -648,6 +649,8 @@ def find(req, env=None): if os.path.exists(dist.download_location): return dist + return None + if force_scan: self.prescan() self.find_packages(requirement) @@ -671,6 +674,7 @@ def find(req, env=None): (source and "a source distribution of " or ""), requirement, ) + return None else: self.info("Best match: %s", dist) return dist.clone(location=dist.download_location) @@ -1034,6 +1038,7 @@ def find_credential(self, url): for repository, cred in self.creds_by_repository.items(): if url.startswith(repository): return cred + return None def open_with_auth(url, opener=urllib.request.urlopen): diff --git a/setuptools/unicode_utils.py b/setuptools/unicode_utils.py index e84e65e3e1..d43dcc11f9 100644 --- a/setuptools/unicode_utils.py +++ b/setuptools/unicode_utils.py @@ -18,7 +18,7 @@ def decompose(path): def filesys_decode(path): """ Ensure that the given path is decoded, - NONE when no expected encoding works + ``None`` when no expected encoding works """ if isinstance(path, str): @@ -33,6 +33,8 @@ def filesys_decode(path): except UnicodeDecodeError: continue + return None + def try_encode(string, enc): "turn unicode encoding into a functional routine" From 7ce6f39d75971f337cf3dd4200199b3588f1d5cc Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Tue, 2 Jan 2024 21:12:03 +0100 Subject: [PATCH 0302/1761] Fix flake8-return warning RET504 Unnecessary assignment to `...` before `return` statement --- pkg_resources/__init__.py | 4 +--- setuptools/command/bdist_rpm.py | 3 +-- setuptools/command/easy_install.py | 4 +--- setuptools/installer.py | 3 +-- setuptools/tests/server.py | 3 +-- setuptools/tests/test_build_ext.py | 3 +-- setuptools/tests/test_core_metadata.py | 8 ++------ 7 files changed, 8 insertions(+), 20 deletions(-) diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py index 7734bce6e4..10c6a9cd06 100644 --- a/pkg_resources/__init__.py +++ b/pkg_resources/__init__.py @@ -2832,9 +2832,7 @@ def _get_metadata(self, name): def _get_version(self): lines = self._get_metadata(self.PKG_INFO) - version = _version_from_file(lines) - - return version + return _version_from_file(lines) def activate(self, path=None, replace=False): """Ensure distribution is importable on `path` (default=sys.path)""" diff --git a/setuptools/command/bdist_rpm.py b/setuptools/command/bdist_rpm.py index 30b7c23385..70ed6b6097 100644 --- a/setuptools/command/bdist_rpm.py +++ b/setuptools/command/bdist_rpm.py @@ -30,11 +30,10 @@ def run(self): def _make_spec_file(self): spec = orig.bdist_rpm._make_spec_file(self) - spec = [ + return [ line.replace( "setup.py install ", "setup.py install --single-version-externally-managed ", ).replace("%setup", "%setup -n %{name}-%{unmangled_version}") for line in spec ] - return spec diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py index ac5264f90d..76b3adfb34 100644 --- a/setuptools/command/easy_install.py +++ b/setuptools/command/easy_install.py @@ -1474,9 +1474,7 @@ def get_site_dirs(): with contextlib.suppress(AttributeError): sitedirs.extend(site.getsitepackages()) - sitedirs = list(map(normalize_path, sitedirs)) - - return sitedirs + return list(map(normalize_path, sitedirs)) def expand_paths(inputs): # noqa: C901 # is too complex (11) # FIXME diff --git a/setuptools/installer.py b/setuptools/installer.py index e83f959a1b..a6aff723c2 100644 --- a/setuptools/installer.py +++ b/setuptools/installer.py @@ -107,10 +107,9 @@ def _fetch_build_egg_no_warn(dist, req): # noqa: C901 # is too complex (16) # dist_metadata = pkg_resources.PathMetadata( dist_location, os.path.join(dist_location, 'EGG-INFO') ) - dist = pkg_resources.Distribution.from_filename( + return pkg_resources.Distribution.from_filename( dist_location, metadata=dist_metadata ) - return dist def strip_marker(req): diff --git a/setuptools/tests/server.py b/setuptools/tests/server.py index 6b2787c0b6..2caae6dca9 100644 --- a/setuptools/tests/server.py +++ b/setuptools/tests/server.py @@ -84,5 +84,4 @@ def path_to_url(path, authority=None): base = 'file:' if authority is not None: base += '//' + authority - url = urllib.parse.urljoin(base, urllib.request.pathname2url(path)) - return url + return urllib.parse.urljoin(base, urllib.request.pathname2url(path)) diff --git a/setuptools/tests/test_build_ext.py b/setuptools/tests/test_build_ext.py index ed3bb6f665..0482fb5d5c 100644 --- a/setuptools/tests/test_build_ext.py +++ b/setuptools/tests/test_build_ext.py @@ -98,12 +98,11 @@ def dist_with_example(self): ext3 = Extension("ext3", ["c-extension/ext3.c"]) path.build(files) - dist = Distribution({ + return Distribution({ "script_name": "%test%", "ext_modules": [ext1, ext2, ext3], "package_dir": {"": "src"}, }) - return dist def test_get_outputs(self, tmpdir_cwd, monkeypatch): monkeypatch.setenv('SETUPTOOLS_EXT_SUFFIX', '.mp3') # make test OS-independent diff --git a/setuptools/tests/test_core_metadata.py b/setuptools/tests/test_core_metadata.py index 6a52130112..4857093206 100644 --- a/setuptools/tests/test_core_metadata.py +++ b/setuptools/tests/test_core_metadata.py @@ -64,7 +64,7 @@ def __read_test_cases(): params = functools.partial(dict, base) - test_cases = [ + return [ ('Metadata version 1.0', params()), ( 'Metadata Version 1.0: Short long description', @@ -156,8 +156,6 @@ def __read_test_cases(): ), ] - return test_cases - @pytest.mark.parametrize('name,attrs', __read_test_cases()) def test_read_metadata(name, attrs): @@ -209,7 +207,7 @@ def merge_dicts(d1, d2): return d1 - test_cases = [ + return [ ('No author, no maintainer', attrs.copy()), ( 'Author (no e-mail), no maintainer', @@ -267,8 +265,6 @@ def merge_dicts(d1, d2): ('Maintainer unicode', merge_dicts(attrs, {'maintainer': 'Jan Łukasiewicz'})), ] - return test_cases - @pytest.mark.parametrize('name,attrs', __maintainer_test_cases()) def test_maintainer_author(name, attrs, tmpdir): From b3e86795be3d0e74009592b3b05ac3485656b88c Mon Sep 17 00:00:00 2001 From: graingert-coef <151018808+graingert-coef@users.noreply.github.com> Date: Wed, 7 Feb 2024 16:20:02 +0000 Subject: [PATCH 0303/1761] fix data_files kwarg documentation --- docs/references/keywords.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/references/keywords.rst b/docs/references/keywords.rst index 6173e3c228..0624677a65 100644 --- a/docs/references/keywords.rst +++ b/docs/references/keywords.rst @@ -186,7 +186,7 @@ extensions). ``data_files`` is deprecated. It does not work with wheels, so it should be avoided. - A list of strings specifying the data files to install. + A sequence of (*directory*, *files*) pairs. .. _keyword/package_dir: From b02951882d6d82851dfa09a2a87e857efba7e0ac Mon Sep 17 00:00:00 2001 From: graingert-coef <151018808+graingert-coef@users.noreply.github.com> Date: Wed, 7 Feb 2024 16:31:03 +0000 Subject: [PATCH 0304/1761] remove claim that data_files does not work with wheels --- docs/references/keywords.rst | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/docs/references/keywords.rst b/docs/references/keywords.rst index 6173e3c228..dcdc21aef3 100644 --- a/docs/references/keywords.rst +++ b/docs/references/keywords.rst @@ -183,8 +183,7 @@ extensions). ``data_files`` .. warning:: - ``data_files`` is deprecated. It does not work with wheels, so it - should be avoided. + ``data_files`` is deprecated. A list of strings specifying the data files to install. From 5e504d69a6311ac99be5096b11e8aff4622115bc Mon Sep 17 00:00:00 2001 From: Thomas Grainger Date: Wed, 7 Feb 2024 17:07:28 +0000 Subject: [PATCH 0305/1761] expand on what data_files' directory and files do --- docs/references/keywords.rst | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/references/keywords.rst b/docs/references/keywords.rst index 0624677a65..86099e5800 100644 --- a/docs/references/keywords.rst +++ b/docs/references/keywords.rst @@ -186,7 +186,9 @@ extensions). ``data_files`` is deprecated. It does not work with wheels, so it should be avoided. - A sequence of (*directory*, *files*) pairs. + A sequence of (*directory*, *files*) pairs specifying the data files to install. + *directory* is a str, *files* is a sequence of files. + Each (*directory*, *files*) pair in the sequence specifies the installation directory and the files to install there. .. _keyword/package_dir: From 37786f4e3ccad455404c96a7bc02a3e771e72d76 Mon Sep 17 00:00:00 2001 From: Thomas Grainger Date: Wed, 7 Feb 2024 19:48:31 +0000 Subject: [PATCH 0306/1761] update data_files docs from PR suggestion --- docs/references/keywords.rst | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/docs/references/keywords.rst b/docs/references/keywords.rst index b071117d9f..0b23fc4e15 100644 --- a/docs/references/keywords.rst +++ b/docs/references/keywords.rst @@ -183,7 +183,12 @@ extensions). ``data_files`` .. warning:: - ``data_files`` is deprecated. + This is an advanced feature and it is *not intended to work with absolute installation paths*. + All files listed in ``data_files`` will be installed in paths relative to a directory + decided by the package installer (e.g. `pip`), + which usually results in nesting under a virtual environment. + See :docs:`userguide/datafiles` for an alternative placing inside the package directory. + Please do not use this setting for things like man-pages, application launchers or anything that requires system-wide installation. A sequence of (*directory*, *files*) pairs specifying the data files to install. *directory* is a str, *files* is a sequence of files. From 782bf39c497d486345244cd195f5aa35ad4b25fa Mon Sep 17 00:00:00 2001 From: graingert-coef <151018808+graingert-coef@users.noreply.github.com> Date: Wed, 7 Feb 2024 20:11:21 +0000 Subject: [PATCH 0307/1761] fix typo in interpreted text role "docs" -> doc --- docs/references/keywords.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/references/keywords.rst b/docs/references/keywords.rst index 0b23fc4e15..d3120be0c8 100644 --- a/docs/references/keywords.rst +++ b/docs/references/keywords.rst @@ -187,7 +187,7 @@ extensions). All files listed in ``data_files`` will be installed in paths relative to a directory decided by the package installer (e.g. `pip`), which usually results in nesting under a virtual environment. - See :docs:`userguide/datafiles` for an alternative placing inside the package directory. + See :doc:`userguide/datafiles` for an alternative placing inside the package directory. Please do not use this setting for things like man-pages, application launchers or anything that requires system-wide installation. A sequence of (*directory*, *files*) pairs specifying the data files to install. From 2a4a482435a8fd55dea81f175963968655f522c1 Mon Sep 17 00:00:00 2001 From: graingert-coef <151018808+graingert-coef@users.noreply.github.com> Date: Wed, 7 Feb 2024 20:17:09 +0000 Subject: [PATCH 0308/1761] fix path to document userguide/datafiles -> /userguide/datafiles --- docs/references/keywords.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/references/keywords.rst b/docs/references/keywords.rst index d3120be0c8..22e4031abd 100644 --- a/docs/references/keywords.rst +++ b/docs/references/keywords.rst @@ -187,7 +187,7 @@ extensions). All files listed in ``data_files`` will be installed in paths relative to a directory decided by the package installer (e.g. `pip`), which usually results in nesting under a virtual environment. - See :doc:`userguide/datafiles` for an alternative placing inside the package directory. + See :doc:`/userguide/datafiles` for an alternative placing inside the package directory. Please do not use this setting for things like man-pages, application launchers or anything that requires system-wide installation. A sequence of (*directory*, *files*) pairs specifying the data files to install. From db956402bed532be1af493612faa1cda865e1b52 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Thu, 8 Feb 2024 12:58:24 +0000 Subject: [PATCH 0309/1761] Expand on warning note for data_files --- docs/references/keywords.rst | 42 +++++++++++++++++++++++++++--------- 1 file changed, 32 insertions(+), 10 deletions(-) diff --git a/docs/references/keywords.rst b/docs/references/keywords.rst index 22e4031abd..09e1f0b82c 100644 --- a/docs/references/keywords.rst +++ b/docs/references/keywords.rst @@ -182,17 +182,32 @@ extensions). .. _keyword/data_files: ``data_files`` - .. warning:: - This is an advanced feature and it is *not intended to work with absolute installation paths*. - All files listed in ``data_files`` will be installed in paths relative to a directory - decided by the package installer (e.g. `pip`), - which usually results in nesting under a virtual environment. - See :doc:`/userguide/datafiles` for an alternative placing inside the package directory. - Please do not use this setting for things like man-pages, application launchers or anything that requires system-wide installation. + .. attention:: + **DISCOURAGED** - This is an advanced feature and it is + *not intended to work with absolute paths*. + All files listed in ``data_files`` will be installed in paths relative + to a directory decided by the package installer (e.g. `pip`). + This usually results in nesting under a virtual environment. + We **STRONGLY ADVISE AGAINST** using this setting for things like + application launchers, desktop files or anything that requires + system-wide installation [#manpages]_, unless you have extensive + experience in Python packaging and has carefully considered all the + drawbacks, limitations and problems of this method. + Also note that this feature is provided *as is* with no plans of + further changes. + + .. tip:: + See :doc:`/userguide/datafiles` for an alternative method that uses the + package directory itself and works well with :mod:`importlib.resources`, + or consider using libraries such as :pypi:`platformdirs` for creating + and managing files at runtime (i.e., **not** during the installation). - A sequence of (*directory*, *files*) pairs specifying the data files to install. - *directory* is a str, *files* is a sequence of files. - Each (*directory*, *files*) pair in the sequence specifies the installation directory and the files to install there. + A sequence of ``(directory, files)`` pairs specifying the data files to install + (``directory`` is a :class:`str`, ``files`` is a sequence of :class:`str`). + Each ``(directory, files)`` pair in the sequence specifies the installation directory + and the files to install there. + +.. _discussion in Python discourse: https://discuss.python.org/t/should-there-be-a-new-standard-for-installing-arbitrary-data-files/7853/63 .. _keyword/package_dir: @@ -505,3 +520,10 @@ extensions). An arbitrary map of URL names to hyperlinks, allowing more extensible documentation of where various resources can be found than the simple ``url`` and ``download_url`` options provide. + + +.. [#manpages] It is common for developers to attempt using ``data_files`` for manpages. + Please note however that depending on the installation directory, this will + not work out of the box - often the final user is required to change the + ``MANPATH`` environment variable. + See the `discussion in Python discourse`_ for more details. From fd66b75dabde6d5a582de2aaaccb6f056828d249 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Thu, 8 Feb 2024 13:06:32 +0000 Subject: [PATCH 0310/1761] Grammar fixes --- docs/references/keywords.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/references/keywords.rst b/docs/references/keywords.rst index 09e1f0b82c..41d30c33d4 100644 --- a/docs/references/keywords.rst +++ b/docs/references/keywords.rst @@ -191,7 +191,7 @@ extensions). We **STRONGLY ADVISE AGAINST** using this setting for things like application launchers, desktop files or anything that requires system-wide installation [#manpages]_, unless you have extensive - experience in Python packaging and has carefully considered all the + experience in Python packaging and have carefully considered all the drawbacks, limitations and problems of this method. Also note that this feature is provided *as is* with no plans of further changes. From eb62c845022825a11881ebde9b71a036eea757bf Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Thu, 8 Feb 2024 12:16:59 -0500 Subject: [PATCH 0311/1761] =?UTF-8?q?=F0=9F=91=B9=20Feed=20the=20hobgoblin?= =?UTF-8?q?s=20(delint).?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Reformatted with ruff v0.2.0. --- setuptools/depends.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setuptools/depends.py b/setuptools/depends.py index 03e116c4a1..c0ca84d404 100644 --- a/setuptools/depends.py +++ b/setuptools/depends.py @@ -161,6 +161,7 @@ def extract_constant(code, symbol, default=-1): return None + def _update_globals(): """ Patch the globals to remove the objects not available on some platforms. From 45670893ceda3ad21685d786931600f82ef7ea0e Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Thu, 8 Feb 2024 12:23:29 -0500 Subject: [PATCH 0312/1761] =?UTF-8?q?=F0=9F=91=B9=20Feed=20the=20hobgoblin?= =?UTF-8?q?s=20(delint).?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Reformatted with ruff 0.2.1. --- setuptools/tests/server.py | 3 +-- setuptools/tests/test_bdist_deprecations.py | 3 +-- setuptools/tests/test_bdist_egg.py | 3 +-- setuptools/tests/test_develop.py | 3 +-- setuptools/tests/test_dist_info.py | 3 +-- setuptools/tests/test_easy_install.py | 3 +-- setuptools/tests/test_install_scripts.py | 3 +-- setuptools/tests/test_sandbox.py | 3 +-- setuptools/tests/test_wheel.py | 3 +-- 9 files changed, 9 insertions(+), 18 deletions(-) diff --git a/setuptools/tests/server.py b/setuptools/tests/server.py index 2caae6dca9..8cb735f1e6 100644 --- a/setuptools/tests/server.py +++ b/setuptools/tests/server.py @@ -1,5 +1,4 @@ -"""Basic http server for tests to simulate PyPI or custom indexes -""" +"""Basic http server for tests to simulate PyPI or custom indexes""" import os import time diff --git a/setuptools/tests/test_bdist_deprecations.py b/setuptools/tests/test_bdist_deprecations.py index 9690e2bf50..f2b99b053b 100644 --- a/setuptools/tests/test_bdist_deprecations.py +++ b/setuptools/tests/test_bdist_deprecations.py @@ -1,5 +1,4 @@ -"""develop tests -""" +"""develop tests""" import sys from unittest import mock diff --git a/setuptools/tests/test_bdist_egg.py b/setuptools/tests/test_bdist_egg.py index 8f11a51b2d..0e473d168b 100644 --- a/setuptools/tests/test_bdist_egg.py +++ b/setuptools/tests/test_bdist_egg.py @@ -1,5 +1,4 @@ -"""develop tests -""" +"""develop tests""" import os import re diff --git a/setuptools/tests/test_develop.py b/setuptools/tests/test_develop.py index 3ed276a460..bdb4bec888 100644 --- a/setuptools/tests/test_develop.py +++ b/setuptools/tests/test_develop.py @@ -1,5 +1,4 @@ -"""develop tests -""" +"""develop tests""" import os import sys diff --git a/setuptools/tests/test_dist_info.py b/setuptools/tests/test_dist_info.py index dbc35ba0b7..ad6cebad0b 100644 --- a/setuptools/tests/test_dist_info.py +++ b/setuptools/tests/test_dist_info.py @@ -1,5 +1,4 @@ -"""Test .dist-info style distributions. -""" +"""Test .dist-info style distributions.""" import pathlib import re diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py index ef85978ecb..a001ae27c5 100644 --- a/setuptools/tests/test_easy_install.py +++ b/setuptools/tests/test_easy_install.py @@ -1,5 +1,4 @@ -"""Easy install Tests -""" +"""Easy install Tests""" import sys import os diff --git a/setuptools/tests/test_install_scripts.py b/setuptools/tests/test_install_scripts.py index c1ca7af3a1..a783459730 100644 --- a/setuptools/tests/test_install_scripts.py +++ b/setuptools/tests/test_install_scripts.py @@ -1,5 +1,4 @@ -"""install_scripts tests -""" +"""install_scripts tests""" import sys diff --git a/setuptools/tests/test_sandbox.py b/setuptools/tests/test_sandbox.py index 4b3cfa9ad2..9b4937e213 100644 --- a/setuptools/tests/test_sandbox.py +++ b/setuptools/tests/test_sandbox.py @@ -1,5 +1,4 @@ -"""develop tests -""" +"""develop tests""" import os import types diff --git a/setuptools/tests/test_wheel.py b/setuptools/tests/test_wheel.py index 03fb05d2f4..cdfd9d1a5f 100644 --- a/setuptools/tests/test_wheel.py +++ b/setuptools/tests/test_wheel.py @@ -1,5 +1,4 @@ -"""wheel tests -""" +"""wheel tests""" from distutils.sysconfig import get_config_var from distutils.util import get_platform From 898ab1aa40290962cbfe557f1dccefd7a4456c3a Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Thu, 8 Feb 2024 12:27:54 -0500 Subject: [PATCH 0313/1761] =?UTF-8?q?=F0=9F=91=B9=20Feed=20the=20hobgoblin?= =?UTF-8?q?s=20(delint).?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- setuptools/py311compat.py | 3 +++ setuptools/tests/integration/helpers.py | 2 +- setuptools/tests/test_packageindex.py | 1 - 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/setuptools/py311compat.py b/setuptools/py311compat.py index 9231cbb290..f7d53d6de9 100644 --- a/setuptools/py311compat.py +++ b/setuptools/py311compat.py @@ -1,6 +1,9 @@ import sys +__all__ = ['tomllib'] + + if sys.version_info >= (3, 11): import tomllib else: # pragma: no cover diff --git a/setuptools/tests/integration/helpers.py b/setuptools/tests/integration/helpers.py index 62076bdf7d..82cb36a2e4 100644 --- a/setuptools/tests/integration/helpers.py +++ b/setuptools/tests/integration/helpers.py @@ -16,7 +16,7 @@ def run(cmd, env=None): r = subprocess.run( cmd, capture_output=True, - universal_newlines=True, + text=True, env={**os.environ, **(env or {})}, # ^-- allow overwriting instead of discarding the current env ) diff --git a/setuptools/tests/test_packageindex.py b/setuptools/tests/test_packageindex.py index f27a5c63e9..41b96614f8 100644 --- a/setuptools/tests/test_packageindex.py +++ b/setuptools/tests/test_packageindex.py @@ -1,4 +1,3 @@ -import sys import distutils.errors import urllib.request import urllib.error From 384f6f2574a083551f42eae88fb2d2cf64ca6f97 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Thu, 8 Feb 2024 12:52:26 -0500 Subject: [PATCH 0314/1761] Create compat module. --- setuptools/compat/__init__.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 setuptools/compat/__init__.py diff --git a/setuptools/compat/__init__.py b/setuptools/compat/__init__.py new file mode 100644 index 0000000000..e69de29bb2 From c4156a94667220d1e3dc0b3a2b3763fb03cbad9f Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Thu, 8 Feb 2024 12:46:33 -0500 Subject: [PATCH 0315/1761] Move Python 3.10 compatibility into compat module. --- setuptools/{py311compat.py => compat/py310.py} | 0 setuptools/config/pyprojecttoml.py | 2 +- 2 files changed, 1 insertion(+), 1 deletion(-) rename setuptools/{py311compat.py => compat/py310.py} (100%) diff --git a/setuptools/py311compat.py b/setuptools/compat/py310.py similarity index 100% rename from setuptools/py311compat.py rename to setuptools/compat/py310.py diff --git a/setuptools/config/pyprojecttoml.py b/setuptools/config/pyprojecttoml.py index 321e106e40..9b9788eff4 100644 --- a/setuptools/config/pyprojecttoml.py +++ b/setuptools/config/pyprojecttoml.py @@ -29,7 +29,7 @@ def load_file(filepath: _Path) -> dict: - from ..py311compat import tomllib + from ..compat.py310 import tomllib with open(filepath, "rb") as file: return tomllib.load(file) From 5d6120a339a587dbfb4181288d28a7d477b4ed02 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Thu, 8 Feb 2024 12:48:52 -0500 Subject: [PATCH 0316/1761] Move Python 3.11 compatibility into compat module. --- setuptools/command/easy_install.py | 4 ++-- setuptools/{py312compat.py => compat/py311.py} | 0 2 files changed, 2 insertions(+), 2 deletions(-) rename setuptools/{py312compat.py => compat/py311.py} (100%) diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py index 76b3adfb34..cc0c409123 100644 --- a/setuptools/command/easy_install.py +++ b/setuptools/command/easy_install.py @@ -74,7 +74,7 @@ DEVELOP_DIST, ) import pkg_resources -from .. import py312compat +from ..compat import py311 from .._path import ensure_directory from ..extern.jaraco.text import yield_lines @@ -2329,7 +2329,7 @@ def load_launcher_manifest(name): def _rmtree(path, ignore_errors=False, onexc=auto_chmod): - return py312compat.shutil_rmtree(path, ignore_errors, onexc) + return py311.shutil_rmtree(path, ignore_errors, onexc) def current_umask(): diff --git a/setuptools/py312compat.py b/setuptools/compat/py311.py similarity index 100% rename from setuptools/py312compat.py rename to setuptools/compat/py311.py From 9cbe157c15980dcb326b1ecec3d9ffb5a2efb5ef Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 10 Feb 2024 09:42:22 -0500 Subject: [PATCH 0317/1761] Restore exclusion of docs from the build. Closes #4213. --- setup.cfg | 2 ++ 1 file changed, 2 insertions(+) diff --git a/setup.cfg b/setup.cfg index be2082fb9a..21f3aa9e85 100644 --- a/setup.cfg +++ b/setup.cfg @@ -36,6 +36,8 @@ exclude = debian* launcher* newsfragments* + docs + docs.* [options.extras_require] testing = From f291ee0b199e0ea2df28a3386474795a95e27954 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 11 Feb 2024 14:23:33 -0500 Subject: [PATCH 0318/1761] Restore expectation that egg-link files will be named with dashes and not underscores for compatibility with older pips. --- setuptools/_normalization.py | 15 +++++++++++++++ setuptools/command/develop.py | 5 ++++- 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/setuptools/_normalization.py b/setuptools/_normalization.py index 8d4731eb60..8f211b8bfb 100644 --- a/setuptools/_normalization.py +++ b/setuptools/_normalization.py @@ -120,6 +120,21 @@ def filename_component(value: str) -> str: return value.replace("-", "_").strip("_") +def filename_component_broken(value: str) -> str: + """ + Produce the incorrect filename component for compatibility. + + See pypa/setuptools#4167 for detailed analysis. + + TODO: replace this with filename_component after pip 24 is + nearly-ubiquitous. + + >>> filename_component_broken('foo_bar-baz') + 'foo-bar-baz' + """ + return value.replace('_', '-') + + def safer_name(value: str) -> str: """Like ``safe_name`` but can be used as filename component for wheel""" # See bdist_wheel.safer_name diff --git a/setuptools/command/develop.py b/setuptools/command/develop.py index 583e8cf5f2..d8c1b49b3d 100644 --- a/setuptools/command/develop.py +++ b/setuptools/command/develop.py @@ -5,6 +5,7 @@ import glob from setuptools.command.easy_install import easy_install +from setuptools import _normalization from setuptools import _path from setuptools import namespaces import setuptools @@ -52,7 +53,9 @@ def finalize_options(self): # pick up setup-dir .egg files only: no .egg-info self.package_index.scan(glob.glob('*.egg')) - egg_link_fn = ei.egg_name + '.egg-link' + egg_link_fn = ( + _normalization.filename_component_broken(ei.egg_name) + '.egg-link' + ) self.egg_link = os.path.join(self.install_dir, egg_link_fn) self.egg_base = ei.egg_base if self.egg_path is None: From 58af37ca8baf898695b16defc52c6b1e01ef2564 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 11 Feb 2024 14:27:55 -0500 Subject: [PATCH 0319/1761] Replace the incorrect phrase about 'unescaping' and instead clarify that the transformations are irreversible. --- docs/deprecated/python_eggs.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/deprecated/python_eggs.rst b/docs/deprecated/python_eggs.rst index 59d1adc55b..8519d27e3e 100644 --- a/docs/deprecated/python_eggs.rst +++ b/docs/deprecated/python_eggs.rst @@ -133,10 +133,10 @@ egg filename is as follows:: The "name" and "version" should be escaped using the ``to_filename()`` function provided by ``pkg_resources``, after first processing them with -``safe_name()`` and ``safe_version()`` respectively. These latter two -functions can also be used to later "unescape" these parts of the -filename. (For a detailed description of these transformations, please -see the "Parsing Utilities" section of the ``pkg_resources`` manual.) +``safe_name()`` and ``safe_version()`` respectively. Note that the escaping is +irreversible and the original name can only be retrieved from the distribution +metadata. For a detailed description of these transformations, please see the +"Parsing Utilities" section of the ``pkg_resources`` manual. The "pyver" string is the Python major version, as found in the first 3 characters of ``sys.version``. "required_platform" is essentially From 4aa55b84ba2d919ef7d7d409a533da4cf947e11f Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 11 Feb 2024 14:29:40 -0500 Subject: [PATCH 0320/1761] Rearrange escaping functions to describe them in the order they should be used. --- docs/deprecated/python_eggs.rst | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/docs/deprecated/python_eggs.rst b/docs/deprecated/python_eggs.rst index 8519d27e3e..97548f183a 100644 --- a/docs/deprecated/python_eggs.rst +++ b/docs/deprecated/python_eggs.rst @@ -131,12 +131,12 @@ egg filename is as follows:: name ["-" version ["-py" pyver ["-" required_platform]]] "." ext -The "name" and "version" should be escaped using the ``to_filename()`` -function provided by ``pkg_resources``, after first processing them with -``safe_name()`` and ``safe_version()`` respectively. Note that the escaping is -irreversible and the original name can only be retrieved from the distribution -metadata. For a detailed description of these transformations, please see the -"Parsing Utilities" section of the ``pkg_resources`` manual. +The "name" and "version" should be escaped using ``pkg_resources`` functions +``safe_name()`` and ``safe_version()`` respectively then using +``to_filename()``. Note that the escaping is irreversible and the original +name can only be retrieved from the distribution metadata. For a detailed +description of these transformations, please see the "Parsing Utilities" +section of the ``pkg_resources`` manual. The "pyver" string is the Python major version, as found in the first 3 characters of ``sys.version``. "required_platform" is essentially From f61e452d4f47277d1215047d2aa3c779fb586c0e Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 11 Feb 2024 14:36:10 -0500 Subject: [PATCH 0321/1761] Document the incorrect behavior revealed by #4167. --- docs/deprecated/python_eggs.rst | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/docs/deprecated/python_eggs.rst b/docs/deprecated/python_eggs.rst index 97548f183a..4bb099e01b 100644 --- a/docs/deprecated/python_eggs.rst +++ b/docs/deprecated/python_eggs.rst @@ -193,6 +193,14 @@ Python version, or platform information is included. When the runtime searches for available eggs, ``.egg-link`` files are opened and the actual egg file/directory name is read from them. +Note: Due to `pypa/setuptools#4167 +`_, the name in the egg-link +filename does not match the filename components used in similar files, but +instead presents with dash separators instead of underscore separators. For +compatibility with pip prior to version 24.0, these dash separators are +retained. In a future release, pip 24 or later will be required and the +underscore separators will be used. + Each ``.egg-link`` file should contain a single file or directory name, with no newlines. This filename should be the base location of one or more eggs. That is, the name must either end in ``.egg``, or else it From 0c804aa02a2c32dc1210b32e75d1a997274f06d4 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 11 Feb 2024 14:49:23 -0500 Subject: [PATCH 0322/1761] Add a test capturing the desired expectation, marked as xfail for now. --- setuptools/tests/test_develop.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/setuptools/tests/test_develop.py b/setuptools/tests/test_develop.py index bdb4bec888..d36447edbb 100644 --- a/setuptools/tests/test_develop.py +++ b/setuptools/tests/test_develop.py @@ -3,6 +3,7 @@ import os import sys import subprocess +import pathlib import platform from setuptools.command import test @@ -82,6 +83,18 @@ def test_console_scripts(self, tmpdir): cmd.run() # assert '0.0' not in foocmd_text + @pytest.mark.xfail(reason="legacy behavior retained for compatibility #4167") + def test_egg_link_filename(self): + settings = dict( + name='Foo $$$ Bar_baz-bing', + ) + dist = Distribution(settings) + cmd = develop(dist) + cmd.ensure_finalized() + link = pathlib.Path(cmd.egg_link) + assert link.suffix == '.egg-link' + assert link.stem == 'Foo_Bar_baz_bing' + class TestResolver: """ From ac20e42c2c12070089a8e0188c8d46b3ad403fdd Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 11 Feb 2024 14:52:01 -0500 Subject: [PATCH 0323/1761] Add news fragment. --- newsfragments/4167.bugfix.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 newsfragments/4167.bugfix.rst diff --git a/newsfragments/4167.bugfix.rst b/newsfragments/4167.bugfix.rst new file mode 100644 index 0000000000..c61a5525bc --- /dev/null +++ b/newsfragments/4167.bugfix.rst @@ -0,0 +1 @@ +Restored expectation that egg-link files would be named with dash separators for compatibility with pip prior to version 24. \ No newline at end of file From bc6b031574efae1be4e0940bc8f262755149e216 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 11 Feb 2024 19:59:03 -0500 Subject: [PATCH 0324/1761] =?UTF-8?q?Bump=20version:=2069.0.3=20=E2=86=92?= =?UTF-8?q?=2069.1.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- NEWS.rst | 27 +++++++++++++++++++++++++++ newsfragments/4072.bugfix.rst | 1 - newsfragments/4096.doc.rst | 1 - newsfragments/4096.feature.rst | 1 - newsfragments/4097.feature.rst | 1 - newsfragments/4144.bugfix.rst | 1 - newsfragments/4167.bugfix.rst | 1 - newsfragments/4182.doc.rst | 1 - newsfragments/4198.doc.rst | 1 - newsfragments/4200.doc.rst | 1 - setup.cfg | 2 +- 12 files changed, 29 insertions(+), 11 deletions(-) delete mode 100644 newsfragments/4072.bugfix.rst delete mode 100644 newsfragments/4096.doc.rst delete mode 100644 newsfragments/4096.feature.rst delete mode 100644 newsfragments/4097.feature.rst delete mode 100644 newsfragments/4144.bugfix.rst delete mode 100644 newsfragments/4167.bugfix.rst delete mode 100644 newsfragments/4182.doc.rst delete mode 100644 newsfragments/4198.doc.rst delete mode 100644 newsfragments/4200.doc.rst diff --git a/.bumpversion.cfg b/.bumpversion.cfg index e436630663..4c2928ffff 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 69.0.3 +current_version = 69.1.0 commit = True tag = True diff --git a/NEWS.rst b/NEWS.rst index 3c99148834..7fc8f4fa87 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -1,3 +1,30 @@ +v69.1.0 +======= + +Features +-------- + +- Updated and removed obsolete Python < 3.8 code and comments. -- by :user:`Avasam` (#4096) +- Updated `pkg_resources` to use stdlib `importlib.machinery` instead of ``importlib_machinery`` -- by :user:`Avasam` (#4097) + + +Bugfixes +-------- + +- In tests, rely on pytest-home for reusable fixture. (#4072) +- Explicitely marked as ``Protocol`` and fixed missing ``self`` argument in interfaces ``pkg_resources.IMetadataProvider`` and ``pkg_resources.IResourceProvider`` -- by :user:`Avasam` (#4144) +- Restored expectation that egg-link files would be named with dash separators for compatibility with pip prior to version 24. (#4167) + + +Improved Documentation +---------------------- + +- Updated documentation referencing obsolete Python 3.7 code. -- by :user:`Avasam` (#4096) +- Changed ``versionadded`` for "Type information included by default" feature from ``v68.3.0`` to ``v69.0.0`` -- by :user:Avasam` (#4182) +- Described the auto-generated files -- by :user:`VladimirFokow` (#4198) +- Updated "Quickstart" to describe the current status of ``setup.cfg`` and ``pyproject.toml`` -- by :user:`VladimirFokow` (#4200) + + v69.0.3 ======= diff --git a/newsfragments/4072.bugfix.rst b/newsfragments/4072.bugfix.rst deleted file mode 100644 index d7115ecc74..0000000000 --- a/newsfragments/4072.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -In tests, rely on pytest-home for reusable fixture. \ No newline at end of file diff --git a/newsfragments/4096.doc.rst b/newsfragments/4096.doc.rst deleted file mode 100644 index b5b5206704..0000000000 --- a/newsfragments/4096.doc.rst +++ /dev/null @@ -1 +0,0 @@ -Updated documentation referencing obsolete Python 3.7 code. -- by :user:`Avasam` diff --git a/newsfragments/4096.feature.rst b/newsfragments/4096.feature.rst deleted file mode 100644 index 89b3a465bb..0000000000 --- a/newsfragments/4096.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Updated and removed obsolete Python < 3.8 code and comments. -- by :user:`Avasam` diff --git a/newsfragments/4097.feature.rst b/newsfragments/4097.feature.rst deleted file mode 100644 index 3380658e8c..0000000000 --- a/newsfragments/4097.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Updated `pkg_resources` to use stdlib `importlib.machinery` instead of ``importlib_machinery`` -- by :user:`Avasam` diff --git a/newsfragments/4144.bugfix.rst b/newsfragments/4144.bugfix.rst deleted file mode 100644 index 9c4709b737..0000000000 --- a/newsfragments/4144.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Explicitely marked as ``Protocol`` and fixed missing ``self`` argument in interfaces ``pkg_resources.IMetadataProvider`` and ``pkg_resources.IResourceProvider`` -- by :user:`Avasam` diff --git a/newsfragments/4167.bugfix.rst b/newsfragments/4167.bugfix.rst deleted file mode 100644 index c61a5525bc..0000000000 --- a/newsfragments/4167.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Restored expectation that egg-link files would be named with dash separators for compatibility with pip prior to version 24. \ No newline at end of file diff --git a/newsfragments/4182.doc.rst b/newsfragments/4182.doc.rst deleted file mode 100644 index 53f63138f6..0000000000 --- a/newsfragments/4182.doc.rst +++ /dev/null @@ -1 +0,0 @@ -Changed ``versionadded`` for "Type information included by default" feature from ``v68.3.0`` to ``v69.0.0`` -- by :user:Avasam` diff --git a/newsfragments/4198.doc.rst b/newsfragments/4198.doc.rst deleted file mode 100644 index 396361a20b..0000000000 --- a/newsfragments/4198.doc.rst +++ /dev/null @@ -1 +0,0 @@ -Described the auto-generated files -- by :user:`VladimirFokow` \ No newline at end of file diff --git a/newsfragments/4200.doc.rst b/newsfragments/4200.doc.rst deleted file mode 100644 index 4b368683cc..0000000000 --- a/newsfragments/4200.doc.rst +++ /dev/null @@ -1 +0,0 @@ -Updated "Quickstart" to describe the current status of ``setup.cfg`` and ``pyproject.toml`` -- by :user:`VladimirFokow` \ No newline at end of file diff --git a/setup.cfg b/setup.cfg index 21f3aa9e85..a42439145e 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = setuptools -version = 69.0.3 +version = 69.1.0 author = Python Packaging Authority author_email = distutils-sig@python.org description = Easily download, build, install, upgrade, and uninstall Python packages From 6f1da2b3cc650038345381955a6564ad2e6d4c44 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Mon, 12 Feb 2024 10:45:15 +0000 Subject: [PATCH 0325/1761] Update bug report template --- .github/ISSUE_TEMPLATE/bug-report.yml | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/.github/ISSUE_TEMPLATE/bug-report.yml b/.github/ISSUE_TEMPLATE/bug-report.yml index 1c42bf5a94..5d7ba25664 100644 --- a/.github/ISSUE_TEMPLATE/bug-report.yml +++ b/.github/ISSUE_TEMPLATE/bug-report.yml @@ -45,7 +45,12 @@ body: - type: input attributes: label: setuptools version - placeholder: For example, setuptools===60.4.2 + placeholder: For example, setuptools===69.1.0 + description: >- + Please also test with the **latest version** of `setuptools`. + Typically, this involves modifying `requires` in `[build-system]` of + [`pyproject.toml`](https://setuptools.pypa.io/en/latest/userguide/quickstart.html#basic-use), + not just updating `setuptools` using `pip`. validations: required: true - type: input From e4cca0e16d4080289bff6719e5e8ffc82164148a Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Mon, 12 Feb 2024 10:49:58 +0000 Subject: [PATCH 0326/1761] Remove arbitrary equality operator from bug report template. According to PEP 440: > Use of this operator is heavily discouraged and tooling MAY display a warning when it is used. --- .github/ISSUE_TEMPLATE/bug-report.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/ISSUE_TEMPLATE/bug-report.yml b/.github/ISSUE_TEMPLATE/bug-report.yml index 5d7ba25664..d449ba04c9 100644 --- a/.github/ISSUE_TEMPLATE/bug-report.yml +++ b/.github/ISSUE_TEMPLATE/bug-report.yml @@ -45,9 +45,10 @@ body: - type: input attributes: label: setuptools version - placeholder: For example, setuptools===69.1.0 + placeholder: For example, setuptools==69.1.0 description: >- Please also test with the **latest version** of `setuptools`. + Typically, this involves modifying `requires` in `[build-system]` of [`pyproject.toml`](https://setuptools.pypa.io/en/latest/userguide/quickstart.html#basic-use), not just updating `setuptools` using `pip`. From 178d254379ed260eb537f48722703f819eaa8235 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Mon, 12 Feb 2024 16:02:29 -0500 Subject: [PATCH 0327/1761] Remove Sphinx pin. Ref sphinx-doc/sphinx#11662. --- setup.cfg | 2 -- 1 file changed, 2 deletions(-) diff --git a/setup.cfg b/setup.cfg index fe99eaf6e5..400a72a5ed 100644 --- a/setup.cfg +++ b/setup.cfg @@ -34,8 +34,6 @@ testing = docs = # upstream sphinx >= 3.5 - # workaround for sphinx/sphinx-doc#11662 - sphinx < 7.2.5 jaraco.packaging >= 9.3 rst.linker >= 1.9 furo From d9b441939046e965b1bfb8035f907be56c0836fc Mon Sep 17 00:00:00 2001 From: Steve Dower Date: Tue, 12 Dec 2023 00:13:38 +0000 Subject: [PATCH 0328/1761] Fixes pypa/distutils#219 Use sysconfig.get_config_h_filename() to locate pyconfig.h --- distutils/sysconfig.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/distutils/sysconfig.py b/distutils/sysconfig.py index a40a7231b3..c89fff4be1 100644 --- a/distutils/sysconfig.py +++ b/distutils/sysconfig.py @@ -195,12 +195,11 @@ def _get_python_inc_posix_prefix(prefix): def _get_python_inc_nt(prefix, spec_prefix, plat_specific): if python_build: - # Include both the include and PC dir to ensure we can find - # pyconfig.h + # Include both include dirs to ensure we can find pyconfig.h return ( os.path.join(prefix, "include") + os.path.pathsep - + os.path.join(prefix, "PC") + + os.path.dirname(sysconfig.get_config_h_filename()) ) return os.path.join(prefix, "include") From d2ddf06d4afd255ae992b4ebfdc3d18e50206152 Mon Sep 17 00:00:00 2001 From: Steve Dower Date: Mon, 18 Dec 2023 17:46:10 +0000 Subject: [PATCH 0329/1761] Also use sysconfig.get_config_h_filename() to implement distutils.sysconfig version --- distutils/sysconfig.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/distutils/sysconfig.py b/distutils/sysconfig.py index c89fff4be1..fac3259f88 100644 --- a/distutils/sysconfig.py +++ b/distutils/sysconfig.py @@ -360,14 +360,7 @@ def customize_compiler(compiler): # noqa: C901 def get_config_h_filename(): """Return full pathname of installed pyconfig.h file.""" - if python_build: - if os.name == "nt": - inc_dir = os.path.join(_sys_home or project_base, "PC") - else: - inc_dir = _sys_home or project_base - return os.path.join(inc_dir, 'pyconfig.h') - else: - return sysconfig.get_config_h_filename() + return sysconfig.get_config_h_filename() def get_makefile_filename(): From 7f70d7d3173f744cdbf37fdb353492bbe7ae089a Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Mon, 12 Feb 2024 16:39:11 -0500 Subject: [PATCH 0330/1761] =?UTF-8?q?=F0=9F=91=B9=20Feed=20the=20hobgoblin?= =?UTF-8?q?s=20(delint).?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Ran `ruff --format` on the code. --- conftest.py | 8 +-- distutils/bcppcompiler.py | 1 - distutils/ccompiler.py | 6 +- distutils/command/_framework_compat.py | 1 - distutils/command/bdist.py | 20 +++--- distutils/command/bdist_rpm.py | 84 +++++++++++-------------- distutils/command/build_py.py | 6 +- distutils/command/check.py | 10 ++- distutils/command/install.py | 35 ++++++----- distutils/command/register.py | 16 ++--- distutils/config.py | 1 + distutils/cygwinccompiler.py | 2 +- distutils/extension.py | 2 +- distutils/tests/__init__.py | 4 +- distutils/tests/support.py | 1 + distutils/tests/test_archive_util.py | 1 + distutils/tests/test_bdist.py | 1 + distutils/tests/test_bdist_dumb.py | 18 +++--- distutils/tests/test_bdist_rpm.py | 36 +++++------ distutils/tests/test_build.py | 1 + distutils/tests/test_build_clib.py | 1 + distutils/tests/test_build_ext.py | 4 +- distutils/tests/test_build_py.py | 12 ++-- distutils/tests/test_check.py | 1 + distutils/tests/test_clean.py | 1 + distutils/tests/test_cmd.py | 1 + distutils/tests/test_config.py | 1 + distutils/tests/test_config_cmd.py | 1 + distutils/tests/test_cygwinccompiler.py | 1 + distutils/tests/test_dir_util.py | 1 + distutils/tests/test_dist.py | 31 ++++----- distutils/tests/test_extension.py | 1 + distutils/tests/test_file_util.py | 1 + distutils/tests/test_filelist.py | 1 + distutils/tests/test_install_data.py | 1 + distutils/tests/test_install_headers.py | 1 + distutils/tests/test_install_lib.py | 1 + distutils/tests/test_modified.py | 1 + distutils/tests/test_msvc9compiler.py | 1 + distutils/tests/test_msvccompiler.py | 1 + distutils/tests/test_register.py | 1 + distutils/tests/test_sdist.py | 1 + distutils/tests/test_spawn.py | 1 + distutils/tests/test_sysconfig.py | 1 + distutils/tests/test_text_file.py | 1 + distutils/tests/test_unixccompiler.py | 1 + distutils/tests/test_upload.py | 1 + distutils/tests/test_util.py | 13 +++- distutils/tests/test_version.py | 1 + distutils/version.py | 2 - distutils/versionpredicate.py | 4 +- 51 files changed, 181 insertions(+), 164 deletions(-) diff --git a/conftest.py b/conftest.py index b01b313085..ca808a6ab7 100644 --- a/conftest.py +++ b/conftest.py @@ -12,11 +12,9 @@ if platform.system() != 'Windows': - collect_ignore.extend( - [ - 'distutils/msvc9compiler.py', - ] - ) + collect_ignore.extend([ + 'distutils/msvc9compiler.py', + ]) @pytest.fixture diff --git a/distutils/bcppcompiler.py b/distutils/bcppcompiler.py index 3c2ba15410..14d51472f2 100644 --- a/distutils/bcppcompiler.py +++ b/distutils/bcppcompiler.py @@ -11,7 +11,6 @@ # someone should sit down and factor out the common code as # WindowsCCompiler! --GPW - import os import warnings diff --git a/distutils/ccompiler.py b/distutils/ccompiler.py index c1c7d5476e..6935e2c37f 100644 --- a/distutils/ccompiler.py +++ b/distutils/ccompiler.py @@ -1004,7 +1004,11 @@ def executable_filename(self, basename, strip_dir=0, output_dir=''): return os.path.join(output_dir, basename + (self.exe_extension or '')) def library_filename( - self, libname, lib_type='static', strip_dir=0, output_dir='' # or 'shared' + self, + libname, + lib_type='static', + strip_dir=0, + output_dir='', # or 'shared' ): assert output_dir is not None expected = '"static", "shared", "dylib", "xcode_stub"' diff --git a/distutils/command/_framework_compat.py b/distutils/command/_framework_compat.py index cffa27cb08..b4228299f4 100644 --- a/distutils/command/_framework_compat.py +++ b/distutils/command/_framework_compat.py @@ -2,7 +2,6 @@ Backward compatibility for homebrew builds on macOS. """ - import sys import os import functools diff --git a/distutils/command/bdist.py b/distutils/command/bdist.py index 6329039ce4..237b14656f 100644 --- a/distutils/command/bdist.py +++ b/distutils/command/bdist.py @@ -76,17 +76,15 @@ class bdist(Command): default_format = {'posix': 'gztar', 'nt': 'zip'} # Define commands in preferred order for the --help-formats option - format_commands = ListCompat( - { - 'rpm': ('bdist_rpm', "RPM distribution"), - 'gztar': ('bdist_dumb', "gzip'ed tar file"), - 'bztar': ('bdist_dumb', "bzip2'ed tar file"), - 'xztar': ('bdist_dumb', "xz'ed tar file"), - 'ztar': ('bdist_dumb', "compressed tar file"), - 'tar': ('bdist_dumb', "tar file"), - 'zip': ('bdist_dumb', "ZIP file"), - } - ) + format_commands = ListCompat({ + 'rpm': ('bdist_rpm', "RPM distribution"), + 'gztar': ('bdist_dumb', "gzip'ed tar file"), + 'bztar': ('bdist_dumb', "bzip2'ed tar file"), + 'xztar': ('bdist_dumb', "xz'ed tar file"), + 'ztar': ('bdist_dumb', "compressed tar file"), + 'tar': ('bdist_dumb', "tar file"), + 'zip': ('bdist_dumb', "ZIP file"), + }) # for compatibility until consumers only reference format_commands format_command = format_commands diff --git a/distutils/command/bdist_rpm.py b/distutils/command/bdist_rpm.py index 696f26751f..e96db22bed 100644 --- a/distutils/command/bdist_rpm.py +++ b/distutils/command/bdist_rpm.py @@ -401,9 +401,11 @@ def run(self): # noqa: C901 if os.path.exists(rpm): self.move_file(rpm, self.dist_dir) filename = os.path.join(self.dist_dir, os.path.basename(rpm)) - self.distribution.dist_files.append( - ('bdist_rpm', pyversion, filename) - ) + self.distribution.dist_files.append(( + 'bdist_rpm', + pyversion, + filename, + )) def _dist_path(self, path): return os.path.join(self.dist_dir, os.path.basename(path)) @@ -428,9 +430,9 @@ def _make_spec_file(self): # noqa: C901 # Generate a potential replacement value for __os_install_post (whilst # normalizing the whitespace to simplify the test for whether the # invocation of brp-python-bytecompile passes in __python): - vendor_hook = '\n'.join( - [' %s \\' % line.strip() for line in vendor_hook.splitlines()] - ) + vendor_hook = '\n'.join([ + ' %s \\' % line.strip() for line in vendor_hook.splitlines() + ]) problem = "brp-python-bytecompile \\\n" fixed = "brp-python-bytecompile %{__python} \\\n" fixed_hook = vendor_hook.replace(problem, fixed) @@ -445,13 +447,11 @@ def _make_spec_file(self): # noqa: C901 # spec_file.append('Summary(%s): %s' % (locale, # self.summaries[locale])) - spec_file.extend( - [ - 'Name: %{name}', - 'Version: %{version}', - 'Release: %{release}', - ] - ) + spec_file.extend([ + 'Name: %{name}', + 'Version: %{version}', + 'Release: %{release}', + ]) # XXX yuck! this filename is available from the "sdist" command, # but only after it has run: and we create the spec file before @@ -461,14 +461,12 @@ def _make_spec_file(self): # noqa: C901 else: spec_file.append('Source0: %{name}-%{unmangled_version}.tar.gz') - spec_file.extend( - [ - 'License: ' + (self.distribution.get_license() or "UNKNOWN"), - 'Group: ' + self.group, - 'BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-buildroot', - 'Prefix: %{_prefix}', - ] - ) + spec_file.extend([ + 'License: ' + (self.distribution.get_license() or "UNKNOWN"), + 'Group: ' + self.group, + 'BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-buildroot', + 'Prefix: %{_prefix}', + ]) if not self.force_arch: # noarch if no extension modules @@ -506,13 +504,11 @@ def _make_spec_file(self): # noqa: C901 if self.no_autoreq: spec_file.append('AutoReq: 0') - spec_file.extend( - [ - '', - '%description', - self.distribution.get_long_description() or "", - ] - ) + spec_file.extend([ + '', + '%description', + self.distribution.get_long_description() or "", + ]) # put locale descriptions into spec file # XXX again, suppressed because config file syntax doesn't @@ -558,12 +554,10 @@ def _make_spec_file(self): # noqa: C901 # use 'default' as contents of script val = getattr(self, attr) if val or default: - spec_file.extend( - [ - '', - '%' + rpm_opt, - ] - ) + spec_file.extend([ + '', + '%' + rpm_opt, + ]) if val: with open(val) as f: spec_file.extend(f.read().split('\n')) @@ -571,24 +565,20 @@ def _make_spec_file(self): # noqa: C901 spec_file.append(default) # files section - spec_file.extend( - [ - '', - '%files -f INSTALLED_FILES', - '%defattr(-,root,root)', - ] - ) + spec_file.extend([ + '', + '%files -f INSTALLED_FILES', + '%defattr(-,root,root)', + ]) if self.doc_files: spec_file.append('%doc ' + ' '.join(self.doc_files)) if self.changelog: - spec_file.extend( - [ - '', - '%changelog', - ] - ) + spec_file.extend([ + '', + '%changelog', + ]) spec_file.extend(self.changelog) return spec_file diff --git a/distutils/command/build_py.py b/distutils/command/build_py.py index d9df95922f..e16011d46a 100644 --- a/distutils/command/build_py.py +++ b/distutils/command/build_py.py @@ -129,9 +129,9 @@ def find_data_files(self, package, src_dir): os.path.join(glob.escape(src_dir), convert_path(pattern)) ) # Files that match more than one pattern are only added once - files.extend( - [fn for fn in filelist if fn not in files and os.path.isfile(fn)] - ) + files.extend([ + fn for fn in filelist if fn not in files and os.path.isfile(fn) + ]) return files def build_package_data(self): diff --git a/distutils/command/check.py b/distutils/command/check.py index 575e49fb4b..b59cc23731 100644 --- a/distutils/command/check.py +++ b/distutils/command/check.py @@ -2,6 +2,7 @@ Implements the Distutils 'check' command. """ + import contextlib from ..core import Command @@ -144,8 +145,11 @@ def _check_rst_data(self, data): try: parser.parse(data, document) except AttributeError as e: - reporter.messages.append( - (-1, 'Could not finish the parsing: %s.' % e, '', {}) - ) + reporter.messages.append(( + -1, + 'Could not finish the parsing: %s.' % e, + '', + {}, + )) return reporter.messages diff --git a/distutils/command/install.py b/distutils/command/install.py index a7ac4e6077..927c3ed3a2 100644 --- a/distutils/command/install.py +++ b/distutils/command/install.py @@ -245,9 +245,11 @@ class install(Command): boolean_options = ['compile', 'force', 'skip-build'] if HAS_USER_SITE: - user_options.append( - ('user', None, "install in user site-package '%s'" % USER_SITE) - ) + user_options.append(( + 'user', + None, + "install in user site-package '%s'" % USER_SITE, + )) boolean_options.append('user') negative_opt = {'no-compile': 'compile'} @@ -432,9 +434,12 @@ def finalize_options(self): # noqa: C901 local_vars['userbase'] = self.install_userbase local_vars['usersite'] = self.install_usersite - self.config_vars = _collections.DictStack( - [fw.vars(), compat_vars, sysconfig.get_config_vars(), local_vars] - ) + self.config_vars = _collections.DictStack([ + fw.vars(), + compat_vars, + sysconfig.get_config_vars(), + local_vars, + ]) self.expand_basedirs() @@ -620,16 +625,14 @@ def expand_basedirs(self): def expand_dirs(self): """Calls `os.path.expanduser` on install dirs.""" - self._expand_attrs( - [ - 'install_purelib', - 'install_platlib', - 'install_lib', - 'install_headers', - 'install_scripts', - 'install_data', - ] - ) + self._expand_attrs([ + 'install_purelib', + 'install_platlib', + 'install_lib', + 'install_headers', + 'install_scripts', + 'install_data', + ]) def convert_paths(self, *names): """Call `convert_path` over `names`.""" diff --git a/distutils/command/register.py b/distutils/command/register.py index c19aabb91f..cf1afc8c1f 100644 --- a/distutils/command/register.py +++ b/distutils/command/register.py @@ -77,7 +77,7 @@ def check_metadata(self): check.run() def _set_config(self): - '''Reads the configuration file and set attributes.''' + """Reads the configuration file and set attributes.""" config = self._read_pypirc() if config != {}: self.username = config['username'] @@ -93,19 +93,19 @@ def _set_config(self): self.has_config = False def classifiers(self): - '''Fetch the list of classifiers from the server.''' + """Fetch the list of classifiers from the server.""" url = self.repository + '?:action=list_classifiers' response = urllib.request.urlopen(url) log.info(self._read_pypi_response(response)) def verify_metadata(self): - '''Send the metadata to the package index server to be checked.''' + """Send the metadata to the package index server to be checked.""" # send the info to the server and report the result (code, result) = self.post_to_server(self.build_post_data('verify')) log.info('Server response (%s): %s', code, result) def send_metadata(self): # noqa: C901 - '''Send the metadata to the package index server. + """Send the metadata to the package index server. Well, do the following: 1. figure who the user is, and then @@ -131,7 +131,7 @@ def send_metadata(self): # noqa: C901 2. register as a new user, or 3. set the password to a random string and email the user. - ''' + """ # see if we can short-cut and get the username/password from the # config if self.has_config: @@ -146,13 +146,13 @@ def send_metadata(self): # noqa: C901 choices = '1 2 3 4'.split() while choice not in choices: self.announce( - '''\ + """\ We need to know who you are, so please choose either: 1. use your existing login, 2. register as a new user, 3. have the server generate a new password for you (and email it to you), or 4. quit -Your selection [default 1]: ''', +Your selection [default 1]: """, logging.INFO, ) choice = input() @@ -262,7 +262,7 @@ def build_post_data(self, action): return data def post_to_server(self, data, auth=None): # noqa: C901 - '''Post a query to the server, and return a string response.''' + """Post a query to the server, and return a string response.""" if 'name' in data: self.announce( 'Registering {} to {}'.format(data['name'], self.repository), diff --git a/distutils/config.py b/distutils/config.py index 9a4044adaf..a55951ed7c 100644 --- a/distutils/config.py +++ b/distutils/config.py @@ -3,6 +3,7 @@ Provides the PyPIRCCommand class, the base class for the command classes that uses .pypirc in the distutils.command package. """ + import os from configparser import RawConfigParser diff --git a/distutils/cygwinccompiler.py b/distutils/cygwinccompiler.py index 47efa377c5..b3dbc3be15 100644 --- a/distutils/cygwinccompiler.py +++ b/distutils/cygwinccompiler.py @@ -344,7 +344,7 @@ def check_config_h(): def is_cygwincc(cc): - '''Try to determine if the compiler that would be used is from cygwin.''' + """Try to determine if the compiler that would be used is from cygwin.""" out_string = check_output(shlex.split(cc) + ['-dumpmachine']) return out_string.strip().endswith(b'cygwin') diff --git a/distutils/extension.py b/distutils/extension.py index 6b8575de29..8f186b72ff 100644 --- a/distutils/extension.py +++ b/distutils/extension.py @@ -102,7 +102,7 @@ def __init__( depends=None, language=None, optional=None, - **kw # To catch unknown keywords + **kw, # To catch unknown keywords ): if not isinstance(name, str): raise AssertionError("'name' must be a string") diff --git a/distutils/tests/__init__.py b/distutils/tests/__init__.py index 85293cbb5b..aad8edb242 100644 --- a/distutils/tests/__init__.py +++ b/distutils/tests/__init__.py @@ -7,6 +7,7 @@ by import rather than matching pre-defined names. """ + def missing_compiler_executable(cmd_names=[]): # pragma: no cover """Check if the compiler components used to build the interpreter exist. @@ -32,8 +33,7 @@ def missing_compiler_executable(cmd_names=[]): # pragma: no cover continue cmd = getattr(compiler, name) if cmd_names: - assert cmd is not None, \ - "the '%s' executable is not configured" % name + assert cmd is not None, "the '%s' executable is not configured" % name elif not cmd: continue if spawn.find_executable(cmd[0]) is None: diff --git a/distutils/tests/support.py b/distutils/tests/support.py index fd4b11bf75..2080604982 100644 --- a/distutils/tests/support.py +++ b/distutils/tests/support.py @@ -1,4 +1,5 @@ """Support code for distutils test cases.""" + import os import sys import shutil diff --git a/distutils/tests/test_archive_util.py b/distutils/tests/test_archive_util.py index 89c415d761..2b5eafd27e 100644 --- a/distutils/tests/test_archive_util.py +++ b/distutils/tests/test_archive_util.py @@ -1,4 +1,5 @@ """Tests for distutils.archive_util.""" + import os import sys import tarfile diff --git a/distutils/tests/test_bdist.py b/distutils/tests/test_bdist.py index af330a06e7..1804807752 100644 --- a/distutils/tests/test_bdist.py +++ b/distutils/tests/test_bdist.py @@ -1,4 +1,5 @@ """Tests for distutils.command.bdist.""" + from distutils.command.bdist import bdist from distutils.tests import support diff --git a/distutils/tests/test_bdist_dumb.py b/distutils/tests/test_bdist_dumb.py index 6fb50c4b8e..95532e83b9 100644 --- a/distutils/tests/test_bdist_dumb.py +++ b/distutils/tests/test_bdist_dumb.py @@ -38,16 +38,14 @@ def test_simple_built(self): self.write_file((pkg_dir, 'MANIFEST.in'), 'include foo.py') self.write_file((pkg_dir, 'README'), '') - dist = Distribution( - { - 'name': 'foo', - 'version': '0.1', - 'py_modules': ['foo'], - 'url': 'xxx', - 'author': 'xxx', - 'author_email': 'xxx', - } - ) + dist = Distribution({ + 'name': 'foo', + 'version': '0.1', + 'py_modules': ['foo'], + 'url': 'xxx', + 'author': 'xxx', + 'author_email': 'xxx', + }) dist.script_name = 'setup.py' os.chdir(pkg_dir) diff --git a/distutils/tests/test_bdist_rpm.py b/distutils/tests/test_bdist_rpm.py index 3fd2c7e2ac..e6804088da 100644 --- a/distutils/tests/test_bdist_rpm.py +++ b/distutils/tests/test_bdist_rpm.py @@ -58,16 +58,14 @@ def test_quiet(self): self.write_file((pkg_dir, 'MANIFEST.in'), 'include foo.py') self.write_file((pkg_dir, 'README'), '') - dist = Distribution( - { - 'name': 'foo', - 'version': '0.1', - 'py_modules': ['foo'], - 'url': 'xxx', - 'author': 'xxx', - 'author_email': 'xxx', - } - ) + dist = Distribution({ + 'name': 'foo', + 'version': '0.1', + 'py_modules': ['foo'], + 'url': 'xxx', + 'author': 'xxx', + 'author_email': 'xxx', + }) dist.script_name = 'setup.py' os.chdir(pkg_dir) @@ -103,16 +101,14 @@ def test_no_optimize_flag(self): self.write_file((pkg_dir, 'MANIFEST.in'), 'include foo.py') self.write_file((pkg_dir, 'README'), '') - dist = Distribution( - { - 'name': 'foo', - 'version': '0.1', - 'py_modules': ['foo'], - 'url': 'xxx', - 'author': 'xxx', - 'author_email': 'xxx', - } - ) + dist = Distribution({ + 'name': 'foo', + 'version': '0.1', + 'py_modules': ['foo'], + 'url': 'xxx', + 'author': 'xxx', + 'author_email': 'xxx', + }) dist.script_name = 'setup.py' os.chdir(pkg_dir) diff --git a/distutils/tests/test_build.py b/distutils/tests/test_build.py index 66d8af50ac..c2cff44523 100644 --- a/distutils/tests/test_build.py +++ b/distutils/tests/test_build.py @@ -1,4 +1,5 @@ """Tests for distutils.command.build.""" + import os import sys diff --git a/distutils/tests/test_build_clib.py b/distutils/tests/test_build_clib.py index 98ab0b171f..f855454256 100644 --- a/distutils/tests/test_build_clib.py +++ b/distutils/tests/test_build_clib.py @@ -1,4 +1,5 @@ """Tests for distutils.command.build_clib.""" + import os import pytest diff --git a/distutils/tests/test_build_ext.py b/distutils/tests/test_build_ext.py index 3c83cca4d2..537959fed6 100644 --- a/distutils/tests/test_build_ext.py +++ b/distutils/tests/test_build_ext.py @@ -501,7 +501,7 @@ def _try_compile_deployment_target(self, operator, target): with open(deptarget_c, 'w') as fp: fp.write( textwrap.dedent( - '''\ + """\ #include int dummy; @@ -511,7 +511,7 @@ def _try_compile_deployment_target(self, operator, target): #error "Unexpected target" #endif - ''' + """ % operator ) ) diff --git a/distutils/tests/test_build_py.py b/distutils/tests/test_build_py.py index 3bef9d79ec..77c9ad7573 100644 --- a/distutils/tests/test_build_py.py +++ b/distutils/tests/test_build_py.py @@ -69,13 +69,11 @@ def test_empty_package_dir(self): open(os.path.join(testdir, "testfile"), "w").close() os.chdir(sources) - dist = Distribution( - { - "packages": ["pkg"], - "package_dir": {"pkg": ""}, - "package_data": {"pkg": ["doc/*"]}, - } - ) + dist = Distribution({ + "packages": ["pkg"], + "package_dir": {"pkg": ""}, + "package_data": {"pkg": ["doc/*"]}, + }) # script_name need not exist, it just need to be initialized dist.script_name = os.path.join(sources, "setup.py") dist.script_args = ["build"] diff --git a/distutils/tests/test_check.py b/distutils/tests/test_check.py index 6d240b8b2b..8215300b97 100644 --- a/distutils/tests/test_check.py +++ b/distutils/tests/test_check.py @@ -1,4 +1,5 @@ """Tests for distutils.command.check.""" + import os import textwrap diff --git a/distutils/tests/test_clean.py b/distutils/tests/test_clean.py index 157b60a1e9..e2459aa0c1 100644 --- a/distutils/tests/test_clean.py +++ b/distutils/tests/test_clean.py @@ -1,4 +1,5 @@ """Tests for distutils.command.clean.""" + import os from distutils.command.clean import clean diff --git a/distutils/tests/test_cmd.py b/distutils/tests/test_cmd.py index cc740d1a8b..684662d32e 100644 --- a/distutils/tests/test_cmd.py +++ b/distutils/tests/test_cmd.py @@ -1,4 +1,5 @@ """Tests for distutils.cmd.""" + import os from distutils.cmd import Command diff --git a/distutils/tests/test_config.py b/distutils/tests/test_config.py index 1ae615db95..11c23d837e 100644 --- a/distutils/tests/test_config.py +++ b/distutils/tests/test_config.py @@ -1,4 +1,5 @@ """Tests for distutils.pypirc.pypirc.""" + import os import pytest diff --git a/distutils/tests/test_config_cmd.py b/distutils/tests/test_config_cmd.py index ecb8510246..2519ed6a10 100644 --- a/distutils/tests/test_config_cmd.py +++ b/distutils/tests/test_config_cmd.py @@ -1,4 +1,5 @@ """Tests for distutils.command.config.""" + import os import sys diff --git a/distutils/tests/test_cygwinccompiler.py b/distutils/tests/test_cygwinccompiler.py index 6fb449a6c2..fc67d75f82 100644 --- a/distutils/tests/test_cygwinccompiler.py +++ b/distutils/tests/test_cygwinccompiler.py @@ -1,4 +1,5 @@ """Tests for distutils.cygwinccompiler.""" + import sys import os diff --git a/distutils/tests/test_dir_util.py b/distutils/tests/test_dir_util.py index 72aca4ee55..0738b7c877 100644 --- a/distutils/tests/test_dir_util.py +++ b/distutils/tests/test_dir_util.py @@ -1,4 +1,5 @@ """Tests for distutils.dir_util.""" + import os import stat import unittest.mock as mock diff --git a/distutils/tests/test_dist.py b/distutils/tests/test_dist.py index 694bf02a60..fe979efed5 100644 --- a/distutils/tests/test_dist.py +++ b/distutils/tests/test_dist.py @@ -1,4 +1,5 @@ """Tests for distutils.dist.""" + import os import io import email @@ -69,14 +70,12 @@ def test_command_packages_unspecified(self, clear_argv): def test_command_packages_cmdline(self, clear_argv): from distutils.tests.test_dist import test_dist - sys.argv.extend( - [ - "--command-packages", - "foo.bar,distutils.tests", - "test_dist", - "-Ssometext", - ] - ) + sys.argv.extend([ + "--command-packages", + "foo.bar,distutils.tests", + "test_dist", + "-Ssometext", + ]) d = self.create_distribution() # let's actually try to load our test command: assert d.get_command_packages() == [ @@ -98,9 +97,8 @@ def test_venv_install_options(self, tmp_path): fakepath = '/somedir' - jaraco.path.build( - { - file: f""" + jaraco.path.build({ + file: f""" [install] install-base = {fakepath} install-platbase = {fakepath} @@ -116,8 +114,7 @@ def test_venv_install_options(self, tmp_path): user = {fakepath} root = {fakepath} """, - } - ) + }) # Base case: Not in a Virtual Environment with mock.patch.multiple(sys, prefix='/a', base_prefix='/a'): @@ -158,14 +155,12 @@ def test_venv_install_options(self, tmp_path): def test_command_packages_configfile(self, tmp_path, clear_argv): sys.argv.append("build") file = str(tmp_path / "file") - jaraco.path.build( - { - file: """ + jaraco.path.build({ + file: """ [global] command_packages = foo.bar, splat """, - } - ) + }) d = self.create_distribution([file]) assert d.get_command_packages() == ["distutils.command", "foo.bar", "splat"] diff --git a/distutils/tests/test_extension.py b/distutils/tests/test_extension.py index f86af07376..297ae44bfe 100644 --- a/distutils/tests/test_extension.py +++ b/distutils/tests/test_extension.py @@ -1,4 +1,5 @@ """Tests for distutils.extension.""" + import os import warnings diff --git a/distutils/tests/test_file_util.py b/distutils/tests/test_file_util.py index 9f44f91dfa..3b9f82b71e 100644 --- a/distutils/tests/test_file_util.py +++ b/distutils/tests/test_file_util.py @@ -1,4 +1,5 @@ """Tests for distutils.file_util.""" + import os import errno import unittest.mock as mock diff --git a/distutils/tests/test_filelist.py b/distutils/tests/test_filelist.py index 2cee42cddd..bfffbb1da0 100644 --- a/distutils/tests/test_filelist.py +++ b/distutils/tests/test_filelist.py @@ -1,4 +1,5 @@ """Tests for distutils.filelist.""" + import os import re import logging diff --git a/distutils/tests/test_install_data.py b/distutils/tests/test_install_data.py index 9badbc264f..198c10da8d 100644 --- a/distutils/tests/test_install_data.py +++ b/distutils/tests/test_install_data.py @@ -1,4 +1,5 @@ """Tests for distutils.command.install_data.""" + import os import pytest diff --git a/distutils/tests/test_install_headers.py b/distutils/tests/test_install_headers.py index 1e8ccf7991..8b86b6eaed 100644 --- a/distutils/tests/test_install_headers.py +++ b/distutils/tests/test_install_headers.py @@ -1,4 +1,5 @@ """Tests for distutils.command.install_headers.""" + import os import pytest diff --git a/distutils/tests/test_install_lib.py b/distutils/tests/test_install_lib.py index 0bd67cd04d..0efe39fe86 100644 --- a/distutils/tests/test_install_lib.py +++ b/distutils/tests/test_install_lib.py @@ -1,4 +1,5 @@ """Tests for distutils.command.install_data.""" + import sys import os import importlib.util diff --git a/distutils/tests/test_modified.py b/distutils/tests/test_modified.py index ca07c7e853..5fde7a5971 100644 --- a/distutils/tests/test_modified.py +++ b/distutils/tests/test_modified.py @@ -1,4 +1,5 @@ """Tests for distutils._modified.""" + import os import types diff --git a/distutils/tests/test_msvc9compiler.py b/distutils/tests/test_msvc9compiler.py index fe5693e1d8..dfb34122bc 100644 --- a/distutils/tests/test_msvc9compiler.py +++ b/distutils/tests/test_msvc9compiler.py @@ -1,4 +1,5 @@ """Tests for distutils.msvc9compiler.""" + import sys import os diff --git a/distutils/tests/test_msvccompiler.py b/distutils/tests/test_msvccompiler.py index f63537b8e5..f65a5a25a3 100644 --- a/distutils/tests/test_msvccompiler.py +++ b/distutils/tests/test_msvccompiler.py @@ -1,4 +1,5 @@ """Tests for distutils._msvccompiler.""" + import sys import os import threading diff --git a/distutils/tests/test_register.py b/distutils/tests/test_register.py index 34e593244e..5d3826a1b7 100644 --- a/distutils/tests/test_register.py +++ b/distutils/tests/test_register.py @@ -1,4 +1,5 @@ """Tests for distutils.command.register.""" + import os import getpass import urllib diff --git a/distutils/tests/test_sdist.py b/distutils/tests/test_sdist.py index a3fa290275..00718a37bd 100644 --- a/distutils/tests/test_sdist.py +++ b/distutils/tests/test_sdist.py @@ -1,4 +1,5 @@ """Tests for distutils.command.sdist.""" + import os import tarfile import warnings diff --git a/distutils/tests/test_spawn.py b/distutils/tests/test_spawn.py index 08a34ee2b8..57cf1a525c 100644 --- a/distutils/tests/test_spawn.py +++ b/distutils/tests/test_spawn.py @@ -1,4 +1,5 @@ """Tests for distutils.spawn.""" + import os import stat import sys diff --git a/distutils/tests/test_sysconfig.py b/distutils/tests/test_sysconfig.py index bfeaf9a6b9..6cbf51681b 100644 --- a/distutils/tests/test_sysconfig.py +++ b/distutils/tests/test_sysconfig.py @@ -1,4 +1,5 @@ """Tests for distutils.sysconfig.""" + import contextlib import os import subprocess diff --git a/distutils/tests/test_text_file.py b/distutils/tests/test_text_file.py index 7c8dc5be54..4a721b691c 100644 --- a/distutils/tests/test_text_file.py +++ b/distutils/tests/test_text_file.py @@ -1,4 +1,5 @@ """Tests for distutils.text_file.""" + import os from distutils.text_file import TextFile from distutils.tests import support diff --git a/distutils/tests/test_unixccompiler.py b/distutils/tests/test_unixccompiler.py index a018442459..c1e57a016f 100644 --- a/distutils/tests/test_unixccompiler.py +++ b/distutils/tests/test_unixccompiler.py @@ -1,4 +1,5 @@ """Tests for distutils.unixccompiler.""" + import os import sys import unittest.mock as mock diff --git a/distutils/tests/test_upload.py b/distutils/tests/test_upload.py index af113b8b6e..5c5bc59a40 100644 --- a/distutils/tests/test_upload.py +++ b/distutils/tests/test_upload.py @@ -1,4 +1,5 @@ """Tests for distutils.command.upload.""" + import os import unittest.mock as mock from urllib.request import HTTPError diff --git a/distutils/tests/test_util.py b/distutils/tests/test_util.py index 22a003d8ca..c632b3910f 100644 --- a/distutils/tests/test_util.py +++ b/distutils/tests/test_util.py @@ -1,4 +1,5 @@ """Tests for distutils.util.""" + import email import email.policy import email.generator @@ -155,9 +156,15 @@ def test_check_environ_getpwuid(self): import pwd # only set pw_dir field, other fields are not used - result = pwd.struct_passwd( - (None, None, None, None, None, '/home/distutils', None) - ) + result = pwd.struct_passwd(( + None, + None, + None, + None, + None, + '/home/distutils', + None, + )) with mock.patch.object(pwd, 'getpwuid', return_value=result): check_environ() assert os.environ['HOME'] == '/home/distutils' diff --git a/distutils/tests/test_version.py b/distutils/tests/test_version.py index ff52ea4683..900edafa7c 100644 --- a/distutils/tests/test_version.py +++ b/distutils/tests/test_version.py @@ -1,4 +1,5 @@ """Tests for distutils.version.""" + import pytest import distutils diff --git a/distutils/version.py b/distutils/version.py index 74c40d7bfd..18385cfef2 100644 --- a/distutils/version.py +++ b/distutils/version.py @@ -111,7 +111,6 @@ def __ge__(self, other): class StrictVersion(Version): - """Version numbering for anal retentives and software idealists. Implements the standard interface for version number classes as described above. A version number consists of two or three @@ -286,7 +285,6 @@ def _cmp(self, other): # noqa: C901 class LooseVersion(Version): - """Version numbering for anarchists and software realists. Implements the standard interface for version number classes as described above. A version number consists of a series of numbers, diff --git a/distutils/versionpredicate.py b/distutils/versionpredicate.py index d6c0c007aa..c75e49486f 100644 --- a/distutils/versionpredicate.py +++ b/distutils/versionpredicate.py @@ -1,5 +1,5 @@ -"""Module for parsing and testing package version predicate strings. -""" +"""Module for parsing and testing package version predicate strings.""" + import re from . import version import operator From a55a44168cfedfb4f52ad3aa93728d91ca218880 Mon Sep 17 00:00:00 2001 From: Steven Pitman Date: Mon, 2 Oct 2023 11:10:34 -0400 Subject: [PATCH 0331/1761] Add support for z/OS compilers; Fixes pypa/distutils#215 --- distutils/ccompiler.py | 2 + distutils/command/build_ext.py | 11 +- distutils/zosccompiler.py | 228 +++++++++++++++++++++++++++++++++ 3 files changed, 239 insertions(+), 2 deletions(-) create mode 100644 distutils/zosccompiler.py diff --git a/distutils/ccompiler.py b/distutils/ccompiler.py index 6935e2c37f..d5ca761f5a 100644 --- a/distutils/ccompiler.py +++ b/distutils/ccompiler.py @@ -1060,6 +1060,7 @@ def mkpath(self, name, mode=0o777): # on a cygwin built python we can use gcc like an ordinary UNIXish # compiler ('cygwin.*', 'unix'), + ('zos', 'zos'), # OS name mappings ('posix', 'unix'), ('nt', 'msvc'), @@ -1107,6 +1108,7 @@ def get_default_compiler(osname=None, platform=None): "Mingw32 port of GNU C Compiler for Win32", ), 'bcpp': ('bcppcompiler', 'BCPPCompiler', "Borland C++ Compiler"), + 'zos': ('zosccompiler', 'zOSCCompiler', 'IBM XL C/C++ Compilers'), } diff --git a/distutils/command/build_ext.py b/distutils/command/build_ext.py index b48f462626..98938babd0 100644 --- a/distutils/command/build_ext.py +++ b/distutils/command/build_ext.py @@ -236,8 +236,15 @@ def finalize_options(self): # noqa: C901 # See Issues: #1600860, #4366 if sysconfig.get_config_var('Py_ENABLE_SHARED'): if not sysconfig.python_build: - # building third party extensions - self.library_dirs.append(sysconfig.get_config_var('LIBDIR')) + if sys.platform == 'zos': + # On z/OS, a user is not required to install Python to + # a predetermined path, but can use Python portably + installed_dir = sysconfig.get_config_var('base') + lib_dir = sysconfig.get_config_var('platlibdir') + self.library_dirs.append(os.path.join(installed_dir, lib_dir)) + else: + # building third party extensions + self.library_dirs.append(sysconfig.get_config_var('LIBDIR')) else: # building python standard extensions self.library_dirs.append('.') diff --git a/distutils/zosccompiler.py b/distutils/zosccompiler.py new file mode 100644 index 0000000000..6d70b7f04f --- /dev/null +++ b/distutils/zosccompiler.py @@ -0,0 +1,228 @@ +"""distutils.zosccompiler + +Contains the selection of the c & c++ compilers on z/OS. There are several +different c compilers on z/OS, all of them are optional, so the correct +one needs to be chosen based on the users input. This is compatible with +the following compilers: + +IBM C/C++ For Open Enterprise Languages on z/OS 2.0 +IBM Open XL C/C++ 1.1 for z/OS +IBM XL C/C++ V2.4.1 for z/OS 2.4 and 2.5 +IBM z/OS XL C/C++ +""" + +import os +from .unixccompiler import UnixCCompiler +from . import sysconfig +from .errors import DistutilsExecError, CompileError + +_cc_args = { + 'ibm-openxl': [ + '-m64', + '-fvisibility=default', + '-fzos-le-char-mode=ascii', + '-fno-short-enums', + ], + 'ibm-xlclang': [ + '-q64', + '-qexportall', + '-qascii', + '-qstrict', + '-qnocsect', + '-Wa,asa,goff', + '-Wa,xplink', + '-qgonumber', + '-qenum=int', + '-Wc,DLL', + ], + 'ibm-xlc': [ + '-q64', + '-qexportall', + '-qascii', + '-qstrict', + '-qnocsect', + '-Wa,asa,goff', + '-Wa,xplink', + '-qgonumber', + '-qenum=int', + '-Wc,DLL', + '-qlanglvl=extc99', + ], +} + +_cxx_args = { + 'ibm-openxl': [ + '-m64', + '-fvisibility=default', + '-fzos-le-char-mode=ascii', + '-fno-short-enums', + ], + 'ibm-xlclang': [ + '-q64', + '-qexportall', + '-qascii', + '-qstrict', + '-qnocsect', + '-Wa,asa,goff', + '-Wa,xplink', + '-qgonumber', + '-qenum=int', + '-Wc,DLL', + ], + 'ibm-xlc': [ + '-q64', + '-qexportall', + '-qascii', + '-qstrict', + '-qnocsect', + '-Wa,asa,goff', + '-Wa,xplink', + '-qgonumber', + '-qenum=int', + '-Wc,DLL', + '-qlanglvl=extended0x', + ], +} + +_asm_args = { + 'ibm-openxl': ['-fasm', '-fno-integrated-as', '-Wa,--ASA', '-Wa,--GOFF'], + 'ibm-xlclang': [], + 'ibm-xlc': [], +} + +_ld_args = { + 'ibm-openxl': [], + 'ibm-xlclang': ['-Wl,dll', '-q64'], + 'ibm-xlc': ['-Wl,dll', '-q64'], +} + + +# Python on z/OS is built with no compiler specific options in it's CFLAGS. +# But each compiler requires it's own specific options to build successfully, +# though some of the options are common between them +class zOSCCompiler(UnixCCompiler): + src_extensions = ['.c', '.C', '.cc', '.cxx', '.cpp', '.m', '.s'] + _cpp_extensions = ['.cc', '.cpp', '.cxx', '.C'] + _asm_extensions = ['.s'] + + def _get_zos_compiler_name(self): + zos_compiler_names = [ + os.path.basename(binary) + for envvar in ('CC', 'CXX', 'LDSHARED') + if (binary := os.environ.get(envvar, None)) + ] + if len(zos_compiler_names) == 0: + return 'ibm-openxl' + + zos_compilers = {} + for compiler in ( + 'ibm-clang', + 'ibm-clang64', + 'ibm-clang++', + 'ibm-clang++64', + 'clang', + 'clang++', + 'clang-14', + ): + zos_compilers[compiler] = 'ibm-openxl' + + for compiler in ('xlclang', 'xlclang++', 'njsc', 'njsc++'): + zos_compilers[compiler] = 'ibm-xlclang' + + for compiler in ('xlc', 'xlC', 'xlc++'): + zos_compilers[compiler] = 'ibm-xlc' + + return zos_compilers.get(zos_compiler_names[0], 'ibm-openxl') + + def __init__(self, verbose=0, dry_run=0, force=0): + super().__init__(verbose, dry_run, force) + self.zos_compiler = self._get_zos_compiler_name() + sysconfig.customize_compiler(self) + + def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): + local_args = [] + if ext in self._cpp_extensions: + compiler = self.compiler_cxx + local_args.extend(_cxx_args[self.zos_compiler]) + elif ext in self._asm_extensions: + compiler = self.compiler_so + local_args.extend(_cc_args[self.zos_compiler]) + local_args.extend(_asm_args[self.zos_compiler]) + else: + compiler = self.compiler_so + local_args.extend(_cc_args[self.zos_compiler]) + local_args.extend(cc_args) + + try: + self.spawn(compiler + local_args + [src, '-o', obj] + extra_postargs) + except DistutilsExecError as msg: + raise CompileError(msg) + + def runtime_library_dir_option(self, dir): + return '-L' + dir + + def link( + self, + target_desc, + objects, + output_filename, + output_dir=None, + libraries=None, + library_dirs=None, + runtime_library_dirs=None, + export_symbols=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + build_temp=None, + target_lang=None, + ): + # For a built module to use functions from cpython, it needs to use Pythons + # side deck file. The side deck is located beside the libpython3.xx.so + ldversion = sysconfig.get_config_var('LDVERSION') + if sysconfig.python_build: + side_deck_path = os.path.join( + sysconfig.get_config_var('abs_builddir'), + f'libpython{ldversion}.x', + ) + else: + side_deck_path = os.path.join( + sysconfig.get_config_var('installed_base'), + sysconfig.get_config_var('platlibdir'), + f'libpython{ldversion}.x', + ) + + if os.path.exists(side_deck_path): + if extra_postargs: + extra_postargs.append(side_deck_path) + else: + extra_postargs = [side_deck_path] + + # Check and replace libraries included side deck files + if runtime_library_dirs: + for dir in runtime_library_dirs: + for library in libraries[:]: + library_side_deck = os.path.join(dir, f'{library}.x') + if os.path.exists(library_side_deck): + libraries.remove(library) + extra_postargs.append(library_side_deck) + break + + # Any required ld args for the given compiler + extra_postargs.extend(_ld_args[self.zos_compiler]) + + super().link( + target_desc, + objects, + output_filename, + output_dir, + libraries, + library_dirs, + runtime_library_dirs, + export_symbols, + debug, + extra_preargs, + extra_postargs, + build_temp, + target_lang, + ) From 88eb8cc66f8762e37ec78913c07ccf3e3dba05e1 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 15 Oct 2023 14:16:55 -0400 Subject: [PATCH 0332/1761] Extracted method for resolving python lib dir. --- distutils/command/build_ext.py | 43 ++++++++++++++++++++-------------- 1 file changed, 26 insertions(+), 17 deletions(-) diff --git a/distutils/command/build_ext.py b/distutils/command/build_ext.py index 98938babd0..ba6580c71e 100644 --- a/distutils/command/build_ext.py +++ b/distutils/command/build_ext.py @@ -130,6 +130,31 @@ def initialize_options(self): self.user = None self.parallel = None + @staticmethod + def _python_lib_dir(sysconfig): + """ + Resolve Python's library directory for building extensions + that rely on a shared Python library. + + See python/cpython#44264 and python/cpython#48686 + """ + if not sysconfig.get_config_var('Py_ENABLE_SHARED'): + return + + if sysconfig.python_build: + yield '.' + return + + if sys.platform == 'zos': + # On z/OS, a user is not required to install Python to + # a predetermined path, but can use Python portably + installed_dir = sysconfig.get_config_var('base') + lib_dir = sysconfig.get_config_var('platlibdir') + yield os.path.join(installed_dir, lib_dir) + else: + # building third party extensions + yield sysconfig.get_config_var('LIBDIR') + def finalize_options(self): # noqa: C901 from distutils import sysconfig @@ -231,23 +256,7 @@ def finalize_options(self): # noqa: C901 # building python standard extensions self.library_dirs.append('.') - # For building extensions with a shared Python library, - # Python's library directory must be appended to library_dirs - # See Issues: #1600860, #4366 - if sysconfig.get_config_var('Py_ENABLE_SHARED'): - if not sysconfig.python_build: - if sys.platform == 'zos': - # On z/OS, a user is not required to install Python to - # a predetermined path, but can use Python portably - installed_dir = sysconfig.get_config_var('base') - lib_dir = sysconfig.get_config_var('platlibdir') - self.library_dirs.append(os.path.join(installed_dir, lib_dir)) - else: - # building third party extensions - self.library_dirs.append(sysconfig.get_config_var('LIBDIR')) - else: - # building python standard extensions - self.library_dirs.append('.') + self.library_dirs.extend(self._python_lib_dir(sysconfig)) # The argument parsing will result in self.define being a string, but # it has to be a list of 2-tuples. All the preprocessor symbols From 0136c373d4be1a7cfee4683d77d659a7a5dff832 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Tue, 13 Feb 2024 11:01:51 -0500 Subject: [PATCH 0333/1761] =?UTF-8?q?=F0=9F=91=B9=20Feed=20the=20hobgoblin?= =?UTF-8?q?s=20(delint).?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- distutils/tests/test_unixccompiler.py | 2 +- distutils/unixccompiler.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/distutils/tests/test_unixccompiler.py b/distutils/tests/test_unixccompiler.py index e8c34ce63e..62efce436f 100644 --- a/distutils/tests/test_unixccompiler.py +++ b/distutils/tests/test_unixccompiler.py @@ -189,7 +189,7 @@ def gcv(v): sysconfig.get_config_var = gcv assert self.cc.rpath_foo() == [ '-Wl,--enable-new-dtags', - '-Wl,-rpath,/foo' + '-Wl,-rpath,/foo', ] # non-GCC GNULD diff --git a/distutils/unixccompiler.py b/distutils/unixccompiler.py index b676a6a8af..d749fe2529 100644 --- a/distutils/unixccompiler.py +++ b/distutils/unixccompiler.py @@ -316,7 +316,7 @@ def runtime_library_dir_option(self, dir): return [ # Force RUNPATH instead of RPATH "-Wl,--enable-new-dtags", - "-Wl,-rpath," + dir + "-Wl,-rpath," + dir, ] else: return "-Wl,-R" + dir From 91cb3279ec9c17d00c5d8b823aa8f3b65bd9f76e Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Tue, 13 Feb 2024 13:15:51 -0500 Subject: [PATCH 0334/1761] Update more tests to match the new expectation. --- distutils/tests/test_unixccompiler.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/distutils/tests/test_unixccompiler.py b/distutils/tests/test_unixccompiler.py index 62efce436f..a313da3e75 100644 --- a/distutils/tests/test_unixccompiler.py +++ b/distutils/tests/test_unixccompiler.py @@ -153,7 +153,10 @@ def gcv(v): return 'yes' sysconfig.get_config_var = gcv - assert self.cc.rpath_foo() == '-Wl,--enable-new-dtags,-R/foo' + assert self.cc.rpath_foo() == [ + '-Wl,--enable-new-dtags', + '-Wl,-rpath,/foo', + ] def gcv(v): if v == 'CC': @@ -162,7 +165,10 @@ def gcv(v): return 'yes' sysconfig.get_config_var = gcv - assert self.cc.rpath_foo() == '-Wl,--enable-new-dtags,-R/foo' + assert self.cc.rpath_foo() == [ + '-Wl,--enable-new-dtags', + '-Wl,-rpath,/foo', + ] # GCC non-GNULD sys.platform = 'bar' @@ -202,7 +208,10 @@ def gcv(v): return 'yes' sysconfig.get_config_var = gcv - assert self.cc.rpath_foo() == '-Wl,--enable-new-dtags,-R/foo' + assert self.cc.rpath_foo() == [ + '-Wl,--enable-new-dtags', + '-Wl,-rpath,/foo', + ] # non-GCC non-GNULD sys.platform = 'bar' From 0f23a0e35f960ffe5da7f52a36e5080e0cb6aa9d Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Tue, 13 Feb 2024 11:20:20 -0500 Subject: [PATCH 0335/1761] Rely on always_iterable to conditionally extend the lib_opts. --- distutils/_itertools.py | 52 +++++++++++++++++++++++++++++++++++++++++ distutils/ccompiler.py | 7 ++---- 2 files changed, 54 insertions(+), 5 deletions(-) create mode 100644 distutils/_itertools.py diff --git a/distutils/_itertools.py b/distutils/_itertools.py new file mode 100644 index 0000000000..85b2951186 --- /dev/null +++ b/distutils/_itertools.py @@ -0,0 +1,52 @@ +# from more_itertools 10.2 +def always_iterable(obj, base_type=(str, bytes)): + """If *obj* is iterable, return an iterator over its items:: + + >>> obj = (1, 2, 3) + >>> list(always_iterable(obj)) + [1, 2, 3] + + If *obj* is not iterable, return a one-item iterable containing *obj*:: + + >>> obj = 1 + >>> list(always_iterable(obj)) + [1] + + If *obj* is ``None``, return an empty iterable: + + >>> obj = None + >>> list(always_iterable(None)) + [] + + By default, binary and text strings are not considered iterable:: + + >>> obj = 'foo' + >>> list(always_iterable(obj)) + ['foo'] + + If *base_type* is set, objects for which ``isinstance(obj, base_type)`` + returns ``True`` won't be considered iterable. + + >>> obj = {'a': 1} + >>> list(always_iterable(obj)) # Iterate over the dict's keys + ['a'] + >>> list(always_iterable(obj, base_type=dict)) # Treat dicts as a unit + [{'a': 1}] + + Set *base_type* to ``None`` to avoid any special handling and treat objects + Python considers iterable as iterable: + + >>> obj = 'foo' + >>> list(always_iterable(obj, base_type=None)) + ['f', 'o', 'o'] + """ + if obj is None: + return iter(()) + + if (base_type is not None) and isinstance(obj, base_type): + return iter((obj,)) + + try: + return iter(obj) + except TypeError: + return iter((obj,)) diff --git a/distutils/ccompiler.py b/distutils/ccompiler.py index d5ca761f5a..28d2da5c58 100644 --- a/distutils/ccompiler.py +++ b/distutils/ccompiler.py @@ -21,6 +21,7 @@ from ._modified import newer_group from .util import split_quoted, execute from ._log import log +from ._itertools import always_iterable class CCompiler: @@ -1233,11 +1234,7 @@ def gen_lib_options(compiler, library_dirs, runtime_library_dirs, libraries): lib_opts.append(compiler.library_dir_option(dir)) for dir in runtime_library_dirs: - opt = compiler.runtime_library_dir_option(dir) - if isinstance(opt, list): - lib_opts = lib_opts + opt - else: - lib_opts.append(opt) + lib_opts.extend(always_iterable(compiler.runtime_library_dir_option(dir))) # XXX it's important that we *not* remove redundant library mentions! # sometimes you really do have to say "-lfoo -lbar -lfoo" in order to From dcd70baa3bdeba64d2072dc06cc50e52501de7aa Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Tue, 13 Feb 2024 17:30:38 -0500 Subject: [PATCH 0336/1761] Restore integration test with Setuptools --- .github/workflows/main.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 45c66794f0..473c2e0fcc 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -117,7 +117,6 @@ jobs: ci_setuptools: # Integration testing with setuptools - if: ${{ false }} # disabled for deprecation warnings strategy: matrix: python: From 779219ce3ecbf4477da062658a1d0b2d5bf4f77f Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 18 Feb 2024 10:38:06 -0500 Subject: [PATCH 0337/1761] Include deps from the base config in diffcov. --- tox.ini | 1 + 1 file changed, 1 insertion(+) diff --git a/tox.ini b/tox.ini index 331eeed93f..4c39a5b139 100644 --- a/tox.ini +++ b/tox.ini @@ -12,6 +12,7 @@ extras = [testenv:diffcov] description = run tests and check that diff from main is covered deps = + {[testenv]deps} diff-cover commands = pytest {posargs} --cov-report xml From 569fd7b0b587409f4043f127a766131a25b366dd Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Mon, 19 Feb 2024 13:45:46 +0000 Subject: [PATCH 0338/1761] Add comments about Python version to bug-report.yml --- .github/ISSUE_TEMPLATE/bug-report.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/ISSUE_TEMPLATE/bug-report.yml b/.github/ISSUE_TEMPLATE/bug-report.yml index d449ba04c9..f7dd51c57f 100644 --- a/.github/ISSUE_TEMPLATE/bug-report.yml +++ b/.github/ISSUE_TEMPLATE/bug-report.yml @@ -58,6 +58,12 @@ body: attributes: label: Python version placeholder: For example, Python 3.10 + description: >- + Please ensure you are using a [supported version of Python](https://devguide.python.org/versions/#supported-versions). + + Setuptools does not support versions that have reached [`end-of-life`](https://devguide.python.org/versions/#unsupported-versions). + + Support for versions of Python under development (i.e. without a stable release) is experimental. validations: required: true - type: input From 73466ded2636ef8199a65bac5c2bcfa78720e632 Mon Sep 17 00:00:00 2001 From: Avasam Date: Thu, 22 Feb 2024 00:00:27 -0500 Subject: [PATCH 0339/1761] Bump packaging in tests to 23.2 --- newsfragments/xxxx.misc.rst | 1 + setup.cfg | 2 +- setuptools/tests/_packaging_compat.py | 9 --------- setuptools/tests/config/test_apply_pyprojecttoml.py | 3 +-- setuptools/tests/test_core_metadata.py | 5 ++--- tox.ini | 3 +-- 6 files changed, 6 insertions(+), 17 deletions(-) create mode 100644 newsfragments/xxxx.misc.rst delete mode 100644 setuptools/tests/_packaging_compat.py diff --git a/newsfragments/xxxx.misc.rst b/newsfragments/xxxx.misc.rst new file mode 100644 index 0000000000..87b9a05c31 --- /dev/null +++ b/newsfragments/xxxx.misc.rst @@ -0,0 +1 @@ +Bump ``packaging`` to version 23.2 in tests -- by :user:`Avasam` diff --git a/setup.cfg b/setup.cfg index a42439145e..1d80f04517 100644 --- a/setup.cfg +++ b/setup.cfg @@ -85,7 +85,7 @@ testing-integration = jaraco.envs>=2.2 build[virtualenv]>=1.0.3 filelock>=3.4.0 - packaging>=23.1 # TODO: update once packaging 23.2 is available + packaging>=23.2 docs = # upstream diff --git a/setuptools/tests/_packaging_compat.py b/setuptools/tests/_packaging_compat.py deleted file mode 100644 index 5bdcc554d5..0000000000 --- a/setuptools/tests/_packaging_compat.py +++ /dev/null @@ -1,9 +0,0 @@ -from packaging import __version__ as packaging_version - -if tuple(packaging_version.split(".")) >= ("23", "2"): - from packaging.metadata import Metadata -else: - # Just pretend it exists while waiting for release... - from unittest.mock import MagicMock - - Metadata = MagicMock() diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py index cffc779b94..e63a89b0b4 100644 --- a/setuptools/tests/config/test_apply_pyprojecttoml.py +++ b/setuptools/tests/config/test_apply_pyprojecttoml.py @@ -15,8 +15,7 @@ import pytest from ini2toml.api import Translator -# TODO: replace with `from packaging.metadata import Metadata` in future versions -from .._packaging_compat import Metadata +from packaging.metadata import Metadata import setuptools # noqa ensure monkey patch to metadata from setuptools.dist import Distribution diff --git a/setuptools/tests/test_core_metadata.py b/setuptools/tests/test_core_metadata.py index 4857093206..68002cc51b 100644 --- a/setuptools/tests/test_core_metadata.py +++ b/setuptools/tests/test_core_metadata.py @@ -5,8 +5,7 @@ import pytest -# TODO: replace with `from packaging.metadata import Metadata` in future versions: -from ._packaging_compat import Metadata +from packaging.metadata import Metadata from setuptools import sic, _reqs from setuptools.dist import Distribution @@ -312,7 +311,7 @@ def test_parity_with_metadata_from_pypa_wheel(tmp_path): # Example with complex requirement definition python_requires=">=3.8", install_requires=""" - packaging==23.0 + packaging==23.2 ordered-set==3.1.1 more-itertools==8.8.0; extra == "other" jaraco.text==3.7.0 diff --git a/tox.ini b/tox.ini index c4f10cd667..18b2b4d800 100644 --- a/tox.ini +++ b/tox.ini @@ -2,8 +2,7 @@ description = perform primary checks (tests, style, types, coverage) deps = # Ideally all the dependencies should be set as "extras" - packaging @ git+https://github.com/pypa/packaging@7e68d82 - # ^-- use dev version while we wait for the new release + packaging >= 23.2 setenv = PYTHONWARNDEFAULTENCODING = 1 SETUPTOOLS_ENFORCE_DEPRECATION = {env:SETUPTOOLS_ENFORCE_DEPRECATION:0} From adc89991b7de4836a6e56189bfbc2bab7878205d Mon Sep 17 00:00:00 2001 From: Avasam Date: Thu, 22 Feb 2024 00:03:04 -0500 Subject: [PATCH 0340/1761] PR number to newsfragment --- newsfragments/{xxxx.misc.rst => 4231.misc.rst} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename newsfragments/{xxxx.misc.rst => 4231.misc.rst} (100%) diff --git a/newsfragments/xxxx.misc.rst b/newsfragments/4231.misc.rst similarity index 100% rename from newsfragments/xxxx.misc.rst rename to newsfragments/4231.misc.rst From 86a28998b1f28023fa3462f5ab9a4fc091773a48 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Thu, 22 Feb 2024 10:45:59 +0000 Subject: [PATCH 0341/1761] Use setup.cfg to specify test dependency on packaging --- setup.cfg | 1 + tox.ini | 1 - 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index 1d80f04517..283ceabf3e 100644 --- a/setup.cfg +++ b/setup.cfg @@ -59,6 +59,7 @@ testing = virtualenv>=13.0.0 wheel pip>=19.1 # For proper file:// URLs support. + packaging>=23.2 jaraco.envs>=2.2 pytest-xdist jaraco.path>=3.2.0 diff --git a/tox.ini b/tox.ini index 18b2b4d800..2cbff92490 100644 --- a/tox.ini +++ b/tox.ini @@ -2,7 +2,6 @@ description = perform primary checks (tests, style, types, coverage) deps = # Ideally all the dependencies should be set as "extras" - packaging >= 23.2 setenv = PYTHONWARNDEFAULTENCODING = 1 SETUPTOOLS_ENFORCE_DEPRECATION = {env:SETUPTOOLS_ENFORCE_DEPRECATION:0} From 97aae46711b6a7e0045dec3a17ebec6ec60062ee Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Wed, 21 Feb 2024 16:03:38 +0000 Subject: [PATCH 0342/1761] Further clarify include_package_data The fact that the `Subdirectory for Data Files` section in `userguide/datafiles` repeat examples for `include_package_data` without mentioning `MANIFEST.in` may be confusing for users that skip previous parts of the documen]t. --- docs/userguide/datafiles.rst | 100 +++++++++++++++++++------------ docs/userguide/miscellaneous.rst | 18 ++++++ 2 files changed, 80 insertions(+), 38 deletions(-) diff --git a/docs/userguide/datafiles.rst b/docs/userguide/datafiles.rst index 2e37289d5f..4eca7e4303 100644 --- a/docs/userguide/datafiles.rst +++ b/docs/userguide/datafiles.rst @@ -10,12 +10,19 @@ by including the data files **inside the package directory**. Setuptools focuses on this most common type of data files and offers three ways of specifying which files should be included in your packages, as described in -the following sections. +the following section. + + +Configuration Options +===================== + + +.. _include-package-data: include_package_data -==================== +-------------------- -First, you can simply use the ``include_package_data`` keyword. +First, you can use the ``include_package_data`` keyword. For example, if the package tree looks like this:: project_root_directory @@ -92,8 +99,10 @@ your package, provided: (where ``include_package_data=False`` by default), which was not changed to ensure backwards compatibility with existing projects. +.. _package-data: + package_data -============ +------------ By default, ``include_package_data`` considers **all** non ``.py`` files found inside the package directory (``src/mypkg`` in this case) as data files, and includes those that @@ -260,8 +269,10 @@ we specify that ``data1.rst`` from ``mypkg1`` alone should be captured as well. Please check :ref:`section subdirectories ` below. +.. _exclude-package-data: + exclude_package_data -==================== +-------------------- Sometimes, the ``include_package_data`` or ``package_data`` options alone aren't sufficient to precisely define what files you want included. For example, @@ -327,6 +338,38 @@ even if they were listed in ``package_data`` or were included as a result of usi ``include_package_data``. +Summary +------- + +In summary, the three options allow you to: + +``include_package_data`` + Accept all data files and directories matched by + :ref:`MANIFEST.in ` or added by + a :ref:`plugin `. + +``package_data`` + Specify additional patterns to match files that may or may + not be matched by :ref:`MANIFEST.in ` + or added by a :ref:`plugin `. + +``exclude_package_data`` + Specify patterns for data files and directories that should *not* be + included when a package is installed, even if they would otherwise have + been included due to the use of the preceding options. + +.. note:: + Due to the way the build process works, a data file that you + include in your project and then stop including may be "orphaned" in your + project's build directories, requiring you to manually deleting them. + This may also be important for your users and contributors + if they track intermediate revisions of your project using Subversion; be sure + to let them know when you make changes that remove files from inclusion so they + can also manually delete them. + + See also troubleshooting information in :ref:`Caching and Troubleshooting`. + + .. _subdir-data-files: Subdirectory for Data Files @@ -350,8 +393,13 @@ Here, the ``.rst`` files are placed under a ``data`` subdirectory inside ``mypkg while the ``.txt`` files are directly under ``mypkg``. In this case, the recommended approach is to treat ``data`` as a namespace package -(refer :pep:`420`). With ``package_data``, -the configuration might look like this: +(refer :pep:`420`). This way, you can rely on the same methods described above, +using either :ref:`package-data` or :ref:`include-package-data`. +For the sake of completeness, we include below configuration examples +for the subdirectory structure, but please refer to the detailed +information in the previous sections of this document. + +With :ref:`package-data`, the configuration might look like this: .. tab:: pyproject.toml @@ -407,8 +455,9 @@ which enables the ``data`` directory to be identified, and then, we separately s files for the root package ``mypkg``, and the namespace package ``data`` under the package ``mypkg``. -With ``include_package_data`` the configuration is simpler: you simply need to enable -scanning of namespace packages in the ``src`` directory and the rest is handled by Setuptools. +Alternatively, you can also rely on :ref:`include-package-data`. +Note that this is the default behaviour in ``pyproject.toml``, but you need to +manually enable scanning of namespace packages in ``setup.cfg`` or ``setup.py``: .. tab:: pyproject.toml @@ -422,7 +471,7 @@ scanning of namespace packages in the ``src`` directory and the rest is handled [tool.setuptools.packages.find] # scanning for namespace packages is true by default in pyproject.toml, so - # you need NOT include the following line. + # you need NOT include this configuration. namespaces = true where = ["src"] @@ -451,34 +500,9 @@ scanning of namespace packages in the ``src`` directory and the rest is handled include_package_data=True, ) -Summary -======= - -In summary, the three options allow you to: - -``include_package_data`` - Accept all data files and directories matched by - :ref:`MANIFEST.in ` or added by - a :ref:`plugin `. - -``package_data`` - Specify additional patterns to match files that may or may - not be matched by :ref:`MANIFEST.in ` - or added by a :ref:`plugin `. - -``exclude_package_data`` - Specify patterns for data files and directories that should *not* be - included when a package is installed, even if they would otherwise have - been included due to the use of the preceding options. - -.. note:: - Due to the way the build process works, a data file that you - include in your project and then stop including may be "orphaned" in your - project's build directories, requiring you to manually deleting them. - This may also be important for your users and contributors - if they track intermediate revisions of your project using Subversion; be sure - to let them know when you make changes that remove files from inclusion so they - can also manually delete them. +To avoid common mistakes with :ref:`include-package-data`, +please ensure :ref:`MANIFEST.in ` is properly set +or use a revision control system plugin (see :doc:`/userguide/miscellaneous`). .. _Accessing Data Files at Runtime: diff --git a/docs/userguide/miscellaneous.rst b/docs/userguide/miscellaneous.rst index 5f15ff6053..7d841f6661 100644 --- a/docs/userguide/miscellaneous.rst +++ b/docs/userguide/miscellaneous.rst @@ -168,6 +168,20 @@ binary extensions during the build process, or included in the final See :doc:`/userguide/datafiles` for more information. + +.. _Caching and Troubleshooting: + +Caching and Troubleshooting +=========================== + +Setuptools automatically creates a few directories to host build artefacts and +cache files, such as ``build``, ``dist``, ``*.egg-info``. While cache is +useful to speed up incremental builds, in some edge cases it might become +stale. If you feel that caching is causing problems to your build, specially +after changes in configuration or in the directory/file structure., consider +removing ``build``, ``dist``, ``*.egg-info`` [#PKG-INFO]_ before rebuilding or +reinstalling your project. + ---- .. [#build-process] @@ -183,5 +197,9 @@ binary extensions during the build process, or included in the final :term:`Virtual Environment`. Therefore it only contains items that are required during runtime. +.. [#PKG-INFO] + When working from an extracted sdist (e.g. for patching), you might also consider removing + the ``PKG-INFO`` file to force its recreation. + .. _git: https://git-scm.com .. _mercurial: https://www.mercurial-scm.org From 520d5efb7a6ee4cb27477b2ab4a62064808b0bd5 Mon Sep 17 00:00:00 2001 From: Avasam Date: Thu, 22 Feb 2024 10:45:16 -0500 Subject: [PATCH 0343/1761] Remoe .idea/ from .gitignore --- .gitignore | 1 - 1 file changed, 1 deletion(-) diff --git a/.gitignore b/.gitignore index 90ae80505e..311c9b269e 100644 --- a/.gitignore +++ b/.gitignore @@ -17,6 +17,5 @@ setuptools.egg-info *~ .hg* .cache -.idea/ .pytest_cache/ .mypy_cache/ From 1719601cb5dc667e57b9c2078b2012d22537f55a Mon Sep 17 00:00:00 2001 From: James Hilliard Date: Mon, 12 Feb 2024 18:15:50 -0700 Subject: [PATCH 0344/1761] Append arbitrary args instead of prepending. --- newsfragments/4217.bugfix.rst | 2 ++ setuptools/build_meta.py | 11 +++++++++-- setuptools/tests/test_build_meta.py | 10 ++++++++++ 3 files changed, 21 insertions(+), 2 deletions(-) create mode 100644 newsfragments/4217.bugfix.rst diff --git a/newsfragments/4217.bugfix.rst b/newsfragments/4217.bugfix.rst new file mode 100644 index 0000000000..7b9146dd50 --- /dev/null +++ b/newsfragments/4217.bugfix.rst @@ -0,0 +1,2 @@ +Fix argument order of ``--config-settings["--build-option"]`` arguments. +This was broken by `. \ No newline at end of file diff --git a/setuptools/build_meta.py b/setuptools/build_meta.py index 0a0abfdae0..2decd2d214 100644 --- a/setuptools/build_meta.py +++ b/setuptools/build_meta.py @@ -369,7 +369,12 @@ def prepare_metadata_for_build_wheel( return self._bubble_up_info_directory(metadata_directory, ".dist-info") def _build_with_temp_dir( - self, setup_command, result_extension, result_directory, config_settings + self, + setup_command, + result_extension, + result_directory, + config_settings, + arbitrary_args=(), ): result_directory = os.path.abspath(result_directory) @@ -384,6 +389,7 @@ def _build_with_temp_dir( *setup_command, "--dist-dir", tmp_dist_dir, + *arbitrary_args, ] with no_install_setup_requires(): self.run_setup() @@ -402,10 +408,11 @@ def build_wheel( ): with suppress_known_deprecation(): return self._build_with_temp_dir( - ['bdist_wheel', *self._arbitrary_args(config_settings)], + ['bdist_wheel'], '.whl', wheel_directory, config_settings, + self._arbitrary_args(config_settings), ) def build_sdist(self, sdist_directory, config_settings=None): diff --git a/setuptools/tests/test_build_meta.py b/setuptools/tests/test_build_meta.py index b912194805..9332781764 100644 --- a/setuptools/tests/test_build_meta.py +++ b/setuptools/tests/test_build_meta.py @@ -720,6 +720,16 @@ def test_editable_without_config_settings(self, tmpdir_cwd): build_backend.build_editable("temp") assert not Path("build").exists() + def test_build_wheel_inplace(self, tmpdir_cwd): + config_settings = {"--build-option": ["build_ext", "--inplace"]} + path.build(self._simple_pyproject_example) + build_backend = self.get_build_backend() + assert not Path("build").exists() + Path("build").mkdir() + build_backend.prepare_metadata_for_build_wheel("build", config_settings) + build_backend.build_wheel("build", config_settings) + assert Path("build/proj-42-py3-none-any.whl").exists() + @pytest.mark.parametrize("config_settings", [{"editable-mode": "strict"}]) def test_editable_with_config_settings(self, tmpdir_cwd, config_settings): path.build({**self._simple_pyproject_example, '_meta': {}}) From 367559f00ea13492332d00379468217afe590cec Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Fri, 23 Feb 2024 11:01:07 +0000 Subject: [PATCH 0345/1761] Cleanup withespace in Github issue template --- .github/ISSUE_TEMPLATE/bug-report.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug-report.yml b/.github/ISSUE_TEMPLATE/bug-report.yml index f7dd51c57f..ff45e687c9 100644 --- a/.github/ISSUE_TEMPLATE/bug-report.yml +++ b/.github/ISSUE_TEMPLATE/bug-report.yml @@ -48,7 +48,7 @@ body: placeholder: For example, setuptools==69.1.0 description: >- Please also test with the **latest version** of `setuptools`. - + Typically, this involves modifying `requires` in `[build-system]` of [`pyproject.toml`](https://setuptools.pypa.io/en/latest/userguide/quickstart.html#basic-use), not just updating `setuptools` using `pip`. @@ -60,9 +60,9 @@ body: placeholder: For example, Python 3.10 description: >- Please ensure you are using a [supported version of Python](https://devguide.python.org/versions/#supported-versions). - + Setuptools does not support versions that have reached [`end-of-life`](https://devguide.python.org/versions/#unsupported-versions). - + Support for versions of Python under development (i.e. without a stable release) is experimental. validations: required: true @@ -107,7 +107,7 @@ body: label: How to Reproduce description: >- Describe the steps to reproduce this bug. - + Please try to create a [minimal reproducer](https://stackoverflow.com/help/minimal-reproducible-example), and avoid things like "see the steps in the CI logs". placeholder: | From 8c45d6e445a8ca5f5a1fd724a80a5c418fe36780 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Fri, 23 Feb 2024 11:04:20 +0000 Subject: [PATCH 0346/1761] =?UTF-8?q?Bump=20version:=2069.1.0=20=E2=86=92?= =?UTF-8?q?=2069.1.1?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- NEWS.rst | 16 ++++++++++++++++ newsfragments/4217.bugfix.rst | 2 -- newsfragments/4231.misc.rst | 1 - setup.cfg | 2 +- 5 files changed, 18 insertions(+), 5 deletions(-) delete mode 100644 newsfragments/4217.bugfix.rst delete mode 100644 newsfragments/4231.misc.rst diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 4c2928ffff..8d101ab5af 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 69.1.0 +current_version = 69.1.1 commit = True tag = True diff --git a/NEWS.rst b/NEWS.rst index 7fc8f4fa87..abc4bb3f04 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -1,3 +1,19 @@ +v69.1.1 +======= + +Bugfixes +-------- + +- Fix argument order of ``--config-settings["--build-option"]`` arguments. + This was broken by `. (#4217) + + +Misc +---- + +- #4231 + + v69.1.0 ======= diff --git a/newsfragments/4217.bugfix.rst b/newsfragments/4217.bugfix.rst deleted file mode 100644 index 7b9146dd50..0000000000 --- a/newsfragments/4217.bugfix.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix argument order of ``--config-settings["--build-option"]`` arguments. -This was broken by `. \ No newline at end of file diff --git a/newsfragments/4231.misc.rst b/newsfragments/4231.misc.rst deleted file mode 100644 index 87b9a05c31..0000000000 --- a/newsfragments/4231.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Bump ``packaging`` to version 23.2 in tests -- by :user:`Avasam` diff --git a/setup.cfg b/setup.cfg index 283ceabf3e..0699bc72e7 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = setuptools -version = 69.1.0 +version = 69.1.1 author = Python Packaging Authority author_email = distutils-sig@python.org description = Easily download, build, install, upgrade, and uninstall Python packages From e8e7b1b4fae50db7508183a2cd3c7e0eaffe01d5 Mon Sep 17 00:00:00 2001 From: Tommy Date: Sun, 25 Feb 2024 15:20:26 +0700 Subject: [PATCH 0347/1761] small grammar fix --- docs/userguide/dependency_management.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/userguide/dependency_management.rst b/docs/userguide/dependency_management.rst index 0feb346dc5..a2c0c890f3 100644 --- a/docs/userguide/dependency_management.rst +++ b/docs/userguide/dependency_management.rst @@ -19,7 +19,7 @@ Build system requirement After organizing all the scripts and files and getting ready for packaging, there needs to be a way to specify what programs and libraries are actually needed -do the packaging (in our case, ``setuptools`` of course). +to do the packaging (in our case, ``setuptools`` of course). This needs to be specified in your ``pyproject.toml`` file (if you have forgot what this is, go to :doc:`/userguide/quickstart` or :doc:`/build_meta`): From c6ebe3d95f3f8720c39e01b6db6764743308ce26 Mon Sep 17 00:00:00 2001 From: Avasam Date: Sun, 25 Feb 2024 20:58:49 -0500 Subject: [PATCH 0348/1761] Drop dependency on `py` --- newsfragments/4237.misc.rst | 1 + .../tests/test_find_distributions.py | 21 ++++++++++++++----- setup.cfg | 2 +- 3 files changed, 18 insertions(+), 6 deletions(-) create mode 100644 newsfragments/4237.misc.rst diff --git a/newsfragments/4237.misc.rst b/newsfragments/4237.misc.rst new file mode 100644 index 0000000000..995bee20e1 --- /dev/null +++ b/newsfragments/4237.misc.rst @@ -0,0 +1 @@ +Drop dependency on `py`. Bump ``pytest-xdist`` to ``>=3`` and use `pathlib` instead in tests -- by :user:`Avasam` diff --git a/pkg_resources/tests/test_find_distributions.py b/pkg_resources/tests/test_find_distributions.py index 4ffcdf3b58..8263ca6c41 100644 --- a/pkg_resources/tests/test_find_distributions.py +++ b/pkg_resources/tests/test_find_distributions.py @@ -1,9 +1,10 @@ -import py +from pathlib import Path +import shutil import pytest import pkg_resources -TESTS_DATA_DIR = py.path.local(__file__).dirpath('data') +TESTS_DATA_DIR = Path(__file__).parent / 'data' class TestFindDistributions: @@ -19,21 +20,31 @@ def test_non_egg_dir_named_egg(self, target_dir): assert not list(dists) def test_standalone_egg_directory(self, target_dir): - (TESTS_DATA_DIR / 'my-test-package_unpacked-egg').copy(target_dir) + shutil.copytree( + TESTS_DATA_DIR / 'my-test-package_unpacked-egg', + target_dir, + dirs_exist_ok=True, + ) dists = pkg_resources.find_distributions(str(target_dir)) assert [dist.project_name for dist in dists] == ['my-test-package'] dists = pkg_resources.find_distributions(str(target_dir), only=True) assert not list(dists) def test_zipped_egg(self, target_dir): - (TESTS_DATA_DIR / 'my-test-package_zipped-egg').copy(target_dir) + shutil.copytree( + TESTS_DATA_DIR / 'my-test-package_zipped-egg', + target_dir, + dirs_exist_ok=True, + ) dists = pkg_resources.find_distributions(str(target_dir)) assert [dist.project_name for dist in dists] == ['my-test-package'] dists = pkg_resources.find_distributions(str(target_dir), only=True) assert not list(dists) def test_zipped_sdist_one_level_removed(self, target_dir): - (TESTS_DATA_DIR / 'my-test-package-zip').copy(target_dir) + shutil.copytree( + TESTS_DATA_DIR / 'my-test-package-zip', target_dir, dirs_exist_ok=True + ) dists = pkg_resources.find_distributions( str(target_dir / "my-test-package.zip") ) diff --git a/setup.cfg b/setup.cfg index 0699bc72e7..f40fcd8265 100644 --- a/setup.cfg +++ b/setup.cfg @@ -61,7 +61,7 @@ testing = pip>=19.1 # For proper file:// URLs support. packaging>=23.2 jaraco.envs>=2.2 - pytest-xdist + pytest-xdist>=3 # Dropped dependency on pytest-fork and py jaraco.path>=3.2.0 build[virtualenv] filelock>=3.4.0 From 3d7e0bf126397ddb1ada6aa893fa27ac61d9d2da Mon Sep 17 00:00:00 2001 From: Avasam Date: Sun, 25 Feb 2024 22:19:18 -0500 Subject: [PATCH 0349/1761] Replace Flake8-2020 by Ruff --- newsfragments/4238.misc.rst | 1 + ruff.toml | 1 + setup.cfg | 1 - setuptools/tests/test_core_metadata.py | 8 ++++---- 4 files changed, 6 insertions(+), 5 deletions(-) create mode 100644 newsfragments/4238.misc.rst diff --git a/newsfragments/4238.misc.rst b/newsfragments/4238.misc.rst new file mode 100644 index 0000000000..a7ccfc911e --- /dev/null +++ b/newsfragments/4238.misc.rst @@ -0,0 +1 @@ +Drop dependency on Flake8 by using Ruff's YTT rules instead of flake8-2020 -- by :user:`Avasam` diff --git a/ruff.toml b/ruff.toml index 597d516bc4..e520746944 100644 --- a/ruff.toml +++ b/ruff.toml @@ -18,6 +18,7 @@ ignore = [ ] extend-select = [ "UP", # pyupgrade + "YTT", # flake8-2020 ] extend-ignore = [ "UP015", # redundant-open-modes, explicit is preferred diff --git a/setup.cfg b/setup.cfg index 0699bc72e7..b544ac72f0 100644 --- a/setup.cfg +++ b/setup.cfg @@ -55,7 +55,6 @@ testing = pytest-ruff >= 0.2.1; sys_platform != "cygwin" # local - flake8-2020 virtualenv>=13.0.0 wheel pip>=19.1 # For proper file:// URLs support. diff --git a/setuptools/tests/test_core_metadata.py b/setuptools/tests/test_core_metadata.py index 68002cc51b..eaabf20a9d 100644 --- a/setuptools/tests/test_core_metadata.py +++ b/setuptools/tests/test_core_metadata.py @@ -323,9 +323,9 @@ def test_parity_with_metadata_from_pypa_wheel(tmp_path): "testing": """ pytest >= 6 pytest-checkdocs >= 2.4 - pytest-flake8 ; \\ - # workaround for tholo/pytest-flake8#87 - python_version < "3.12" + tomli ; \\ + # Using stdlib when possible + python_version < "3.11" ini2toml[lite]>=0.9 """, "other": [], @@ -345,7 +345,7 @@ def test_parity_with_metadata_from_pypa_wheel(tmp_path): 'Requires-Python: >=3.8', 'Provides-Extra: other', 'Provides-Extra: testing', - 'Requires-Dist: pytest-flake8; python_version < "3.12" and extra == "testing"', + 'Requires-Dist: tomli; python_version < "3.11" and extra == "testing"', 'Requires-Dist: more-itertools==8.8.0; extra == "other"', 'Requires-Dist: ini2toml[lite]>=0.9; extra == "testing"', ] From 2d8ab8600477d788cb68187118d73154d9746963 Mon Sep 17 00:00:00 2001 From: Avasam Date: Tue, 27 Feb 2024 17:31:05 -0500 Subject: [PATCH 0350/1761] Standardize and centralize StrPath TypeAlias --- newsfragments/4241.misc.rst | 1 + setuptools/_normalization.py | 4 --- setuptools/_path.py | 9 ++++-- setuptools/command/editable_wheel.py | 25 ++++++++------- setuptools/config/_apply_pyprojecttoml.py | 19 ++++++------ setuptools/config/expand.py | 31 +++++++++---------- setuptools/config/pyprojecttoml.py | 16 +++++----- setuptools/config/setupcfg.py | 16 +++++----- setuptools/discovery.py | 29 +++++++++-------- setuptools/tests/config/test_pyprojecttoml.py | 4 +-- setuptools/tests/test_config_discovery.py | 10 +++--- tools/generate_validation_code.py | 4 ++- 12 files changed, 85 insertions(+), 83 deletions(-) create mode 100644 newsfragments/4241.misc.rst diff --git a/newsfragments/4241.misc.rst b/newsfragments/4241.misc.rst new file mode 100644 index 0000000000..ef6da2c323 --- /dev/null +++ b/newsfragments/4241.misc.rst @@ -0,0 +1 @@ +Improvements to `Path`-related type annotations when it could be ``str | PathLike`` -- by :user:`Avasam` diff --git a/setuptools/_normalization.py b/setuptools/_normalization.py index 8f211b8bfb..e858052ccd 100644 --- a/setuptools/_normalization.py +++ b/setuptools/_normalization.py @@ -4,13 +4,9 @@ """ import re -from pathlib import Path -from typing import Union from .extern import packaging -_Path = Union[str, Path] - # https://packaging.python.org/en/latest/specifications/core-metadata/#name _VALID_NAME = re.compile(r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.I) _UNSAFE_NAME_CHARS = re.compile(r"[^A-Z0-9._-]+", re.I) diff --git a/setuptools/_path.py b/setuptools/_path.py index b99d9dadcf..fb8ef0e198 100644 --- a/setuptools/_path.py +++ b/setuptools/_path.py @@ -2,7 +2,10 @@ import sys from typing import Union -_Path = Union[str, os.PathLike] +if sys.version_info >= (3, 9): + StrPath = Union[str, os.PathLike[str]] # Same as _typeshed.StrPath +else: + StrPath = Union[str, os.PathLike] def ensure_directory(path): @@ -11,7 +14,7 @@ def ensure_directory(path): os.makedirs(dirname, exist_ok=True) -def same_path(p1: _Path, p2: _Path) -> bool: +def same_path(p1: StrPath, p2: StrPath) -> bool: """Differs from os.path.samefile because it does not require paths to exist. Purely string based (no comparison between i-nodes). >>> same_path("a/b", "./a/b") @@ -30,7 +33,7 @@ def same_path(p1: _Path, p2: _Path) -> bool: return normpath(p1) == normpath(p2) -def normpath(filename: _Path) -> str: +def normpath(filename: StrPath) -> str: """Normalize a file/dir name for comparison purposes.""" # See pkg_resources.normalize_path for notes about cygwin file = os.path.abspath(filename) if sys.platform == 'cygwin' else filename diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py index 8a4ae7928f..a81fcd5bf9 100644 --- a/setuptools/command/editable_wheel.py +++ b/setuptools/command/editable_wheel.py @@ -33,7 +33,6 @@ Protocol, Tuple, TypeVar, - Union, ) from .. import ( @@ -43,6 +42,7 @@ errors, namespaces, ) +from .._path import StrPath from ..discovery import find_package_path from ..dist import Distribution from ..warnings import ( @@ -55,8 +55,7 @@ if TYPE_CHECKING: from wheel.wheelfile import WheelFile # noqa -_Path = Union[str, Path] -_P = TypeVar("_P", bound=_Path) +_P = TypeVar("_P", bound=StrPath) _logger = logging.getLogger(__name__) @@ -181,7 +180,7 @@ def _find_egg_info_dir(self) -> Optional[str]: return next(candidates, None) def _configure_build( - self, name: str, unpacked_wheel: _Path, build_lib: _Path, tmp_dir: _Path + self, name: str, unpacked_wheel: StrPath, build_lib: StrPath, tmp_dir: StrPath ): """Configure commands to behave in the following ways: @@ -256,7 +255,11 @@ def _collect_build_outputs(self) -> Tuple[List[str], Dict[str, str]]: return files, mapping def _run_build_commands( - self, dist_name: str, unpacked_wheel: _Path, build_lib: _Path, tmp_dir: _Path + self, + dist_name: str, + unpacked_wheel: StrPath, + build_lib: StrPath, + tmp_dir: StrPath, ) -> Tuple[List[str], Dict[str, str]]: self._configure_build(dist_name, unpacked_wheel, build_lib, tmp_dir) self._run_build_subcommands() @@ -354,7 +357,7 @@ def _select_strategy( self, name: str, tag: str, - build_lib: _Path, + build_lib: StrPath, ) -> "EditableStrategy": """Decides which strategy to use to implement an editable installation.""" build_name = f"__editable__.{name}-{tag}" @@ -424,8 +427,8 @@ def __init__( self, dist: Distribution, name: str, - auxiliary_dir: _Path, - build_lib: _Path, + auxiliary_dir: StrPath, + build_lib: StrPath, ): self.auxiliary_dir = Path(auxiliary_dir) self.build_lib = Path(build_lib).resolve() @@ -567,7 +570,7 @@ def _can_symlink_files(base_dir: Path) -> bool: def _simple_layout( - packages: Iterable[str], package_dir: Dict[str, str], project_dir: Path + packages: Iterable[str], package_dir: Dict[str, str], project_dir: StrPath ) -> bool: """Return ``True`` if: - all packages are contained by the same parent directory, **and** @@ -649,7 +652,7 @@ def _find_top_level_modules(dist: Distribution) -> Iterator[str]: def _find_package_roots( packages: Iterable[str], package_dir: Mapping[str, str], - src_root: _Path, + src_root: StrPath, ) -> Dict[str, str]: pkg_roots: Dict[str, str] = { pkg: _absolute_root(find_package_path(pkg, package_dir, src_root)) @@ -659,7 +662,7 @@ def _find_package_roots( return _remove_nested(pkg_roots) -def _absolute_root(path: _Path) -> str: +def _absolute_root(path: StrPath) -> str: """Works for packages and top-level modules""" path_ = Path(path) parent = path_.parent diff --git a/setuptools/config/_apply_pyprojecttoml.py b/setuptools/config/_apply_pyprojecttoml.py index 32fb00131e..7301bc65c1 100644 --- a/setuptools/config/_apply_pyprojecttoml.py +++ b/setuptools/config/_apply_pyprojecttoml.py @@ -29,7 +29,7 @@ Union, cast, ) - +from .._path import StrPath from ..errors import RemovedConfigError from ..warnings import SetuptoolsWarning @@ -38,15 +38,14 @@ from setuptools.dist import Distribution # noqa EMPTY: Mapping = MappingProxyType({}) # Immutable dict-like -_Path = Union[os.PathLike, str] _DictOrStr = Union[dict, str] -_CorrespFn = Callable[["Distribution", Any, _Path], None] +_CorrespFn = Callable[["Distribution", Any, StrPath], None] _Correspondence = Union[str, _CorrespFn] _logger = logging.getLogger(__name__) -def apply(dist: "Distribution", config: dict, filename: _Path) -> "Distribution": +def apply(dist: "Distribution", config: dict, filename: StrPath) -> "Distribution": """Apply configuration dict read with :func:`read_configuration`""" if not config: @@ -68,7 +67,7 @@ def apply(dist: "Distribution", config: dict, filename: _Path) -> "Distribution" return dist -def _apply_project_table(dist: "Distribution", config: dict, root_dir: _Path): +def _apply_project_table(dist: "Distribution", config: dict, root_dir: StrPath): project_table = config.get("project", {}).copy() if not project_table: return # short-circuit @@ -85,7 +84,7 @@ def _apply_project_table(dist: "Distribution", config: dict, root_dir: _Path): _set_config(dist, corresp, value) -def _apply_tool_table(dist: "Distribution", config: dict, filename: _Path): +def _apply_tool_table(dist: "Distribution", config: dict, filename: StrPath): tool_table = config.get("tool", {}).get("setuptools", {}) if not tool_table: return # short-circuit @@ -153,7 +152,7 @@ def _guess_content_type(file: str) -> Optional[str]: raise ValueError(f"Undefined content type for {file}, {msg}") -def _long_description(dist: "Distribution", val: _DictOrStr, root_dir: _Path): +def _long_description(dist: "Distribution", val: _DictOrStr, root_dir: StrPath): from setuptools.config import expand if isinstance(val, str): @@ -174,7 +173,7 @@ def _long_description(dist: "Distribution", val: _DictOrStr, root_dir: _Path): dist._referenced_files.add(cast(str, file)) -def _license(dist: "Distribution", val: dict, root_dir: _Path): +def _license(dist: "Distribution", val: dict, root_dir: StrPath): from setuptools.config import expand if "file" in val: @@ -184,7 +183,7 @@ def _license(dist: "Distribution", val: dict, root_dir: _Path): _set_config(dist, "license", val["text"]) -def _people(dist: "Distribution", val: List[dict], _root_dir: _Path, kind: str): +def _people(dist: "Distribution", val: List[dict], _root_dir: StrPath, kind: str): field = [] email_field = [] for person in val: @@ -244,7 +243,7 @@ def _unify_entry_points(project_table: dict): # intentional (for resetting configurations that are missing `dynamic`). -def _copy_command_options(pyproject: dict, dist: "Distribution", filename: _Path): +def _copy_command_options(pyproject: dict, dist: "Distribution", filename: StrPath): tool_table = pyproject.get("tool", {}) cmdclass = tool_table.get("setuptools", {}).get("cmdclass", {}) valid_options = _valid_command_options(cmdclass) diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py index b48fc1187e..e23a762cf5 100644 --- a/setuptools/config/expand.py +++ b/setuptools/config/expand.py @@ -46,7 +46,7 @@ from distutils.errors import DistutilsOptionError -from .._path import same_path as _same_path +from .._path import same_path as _same_path, StrPath from ..warnings import SetuptoolsWarning if TYPE_CHECKING: @@ -55,7 +55,6 @@ from distutils.dist import DistributionMetadata # noqa chain_iter = chain.from_iterable -_Path = Union[str, os.PathLike] _K = TypeVar("_K") _V = TypeVar("_V", covariant=True) @@ -88,7 +87,7 @@ def __getattr__(self, attr): def glob_relative( - patterns: Iterable[str], root_dir: Optional[_Path] = None + patterns: Iterable[str], root_dir: Optional[StrPath] = None ) -> List[str]: """Expand the list of glob patterns, but preserving relative paths. @@ -120,7 +119,7 @@ def glob_relative( return expanded_values -def read_files(filepaths: Union[str, bytes, Iterable[_Path]], root_dir=None) -> str: +def read_files(filepaths: Union[str, bytes, Iterable[StrPath]], root_dir=None) -> str: """Return the content of the files concatenated using ``\n`` as str This function is sandboxed and won't reach anything outside ``root_dir`` @@ -138,7 +137,7 @@ def read_files(filepaths: Union[str, bytes, Iterable[_Path]], root_dir=None) -> ) -def _filter_existing_files(filepaths: Iterable[_Path]) -> Iterator[_Path]: +def _filter_existing_files(filepaths: Iterable[StrPath]) -> Iterator[StrPath]: for path in filepaths: if os.path.isfile(path): yield path @@ -146,12 +145,12 @@ def _filter_existing_files(filepaths: Iterable[_Path]) -> Iterator[_Path]: SetuptoolsWarning.emit(f"File {path!r} cannot be found") -def _read_file(filepath: Union[bytes, _Path]) -> str: +def _read_file(filepath: Union[bytes, StrPath]) -> str: with open(filepath, encoding='utf-8') as f: return f.read() -def _assert_local(filepath: _Path, root_dir: str): +def _assert_local(filepath: StrPath, root_dir: str): if Path(os.path.abspath(root_dir)) not in Path(os.path.abspath(filepath)).parents: msg = f"Cannot access {filepath!r} (or anything outside {root_dir!r})" raise DistutilsOptionError(msg) @@ -162,7 +161,7 @@ def _assert_local(filepath: _Path, root_dir: str): def read_attr( attr_desc: str, package_dir: Optional[Mapping[str, str]] = None, - root_dir: Optional[_Path] = None, + root_dir: Optional[StrPath] = None, ): """Reads the value of an attribute from a module. @@ -197,7 +196,7 @@ def read_attr( return getattr(module, attr_name) -def _find_spec(module_name: str, module_path: Optional[_Path]) -> ModuleSpec: +def _find_spec(module_name: str, module_path: Optional[StrPath]) -> ModuleSpec: spec = importlib.util.spec_from_file_location(module_name, module_path) spec = spec or importlib.util.find_spec(module_name) @@ -218,8 +217,8 @@ def _load_spec(spec: ModuleSpec, module_name: str) -> ModuleType: def _find_module( - module_name: str, package_dir: Optional[Mapping[str, str]], root_dir: _Path -) -> Tuple[_Path, Optional[str], str]: + module_name: str, package_dir: Optional[Mapping[str, str]], root_dir: StrPath +) -> Tuple[StrPath, Optional[str], str]: """Given a module (that could normally be imported by ``module_name`` after the build is complete), find the path to the parent directory where it is contained and the canonical name that could be used to import it @@ -254,7 +253,7 @@ def _find_module( def resolve_class( qualified_class_name: str, package_dir: Optional[Mapping[str, str]] = None, - root_dir: Optional[_Path] = None, + root_dir: Optional[StrPath] = None, ) -> Callable: """Given a qualified class name, return the associated class object""" root_dir = root_dir or os.getcwd() @@ -270,7 +269,7 @@ def resolve_class( def cmdclass( values: Dict[str, str], package_dir: Optional[Mapping[str, str]] = None, - root_dir: Optional[_Path] = None, + root_dir: Optional[StrPath] = None, ) -> Dict[str, Callable]: """Given a dictionary mapping command names to strings for qualified class names, apply :func:`resolve_class` to the dict values. @@ -282,7 +281,7 @@ def find_packages( *, namespaces=True, fill_package_dir: Optional[Dict[str, str]] = None, - root_dir: Optional[_Path] = None, + root_dir: Optional[StrPath] = None, **kwargs, ) -> List[str]: """Works similarly to :func:`setuptools.find_packages`, but with all @@ -331,7 +330,7 @@ def find_packages( return packages -def _nest_path(parent: _Path, path: _Path) -> str: +def _nest_path(parent: StrPath, path: StrPath) -> str: path = parent if path in {".", ""} else os.path.join(parent, path) return os.path.normpath(path) @@ -361,7 +360,7 @@ def canonic_package_data(package_data: dict) -> dict: def canonic_data_files( - data_files: Union[list, dict], root_dir: Optional[_Path] = None + data_files: Union[list, dict], root_dir: Optional[StrPath] = None ) -> List[Tuple[str, List[str]]]: """For compatibility with ``setup.py``, ``data_files`` should be a list of pairs instead of a dict. diff --git a/setuptools/config/pyprojecttoml.py b/setuptools/config/pyprojecttoml.py index 9b9788eff4..5eb9421f1f 100644 --- a/setuptools/config/pyprojecttoml.py +++ b/setuptools/config/pyprojecttoml.py @@ -13,8 +13,9 @@ import os from contextlib import contextmanager from functools import partial -from typing import TYPE_CHECKING, Callable, Dict, Mapping, Optional, Set, Union +from typing import TYPE_CHECKING, Callable, Dict, Mapping, Optional, Set +from .._path import StrPath from ..errors import FileError, InvalidConfigError from ..warnings import SetuptoolsWarning from . import expand as _expand @@ -24,18 +25,17 @@ if TYPE_CHECKING: from setuptools.dist import Distribution # noqa -_Path = Union[str, os.PathLike] _logger = logging.getLogger(__name__) -def load_file(filepath: _Path) -> dict: +def load_file(filepath: StrPath) -> dict: from ..compat.py310 import tomllib with open(filepath, "rb") as file: return tomllib.load(file) -def validate(config: dict, filepath: _Path) -> bool: +def validate(config: dict, filepath: StrPath) -> bool: from . import _validate_pyproject as validator trove_classifier = validator.FORMAT_FUNCTIONS.get("trove-classifier") @@ -58,7 +58,7 @@ def validate(config: dict, filepath: _Path) -> bool: def apply_configuration( dist: "Distribution", - filepath: _Path, + filepath: StrPath, ignore_option_errors=False, ) -> "Distribution": """Apply the configuration from a ``pyproject.toml`` file into an existing @@ -69,7 +69,7 @@ def apply_configuration( def read_configuration( - filepath: _Path, + filepath: StrPath, expand=True, ignore_option_errors=False, dist: Optional["Distribution"] = None, @@ -136,7 +136,7 @@ def read_configuration( def expand_configuration( config: dict, - root_dir: Optional[_Path] = None, + root_dir: Optional[StrPath] = None, ignore_option_errors: bool = False, dist: Optional["Distribution"] = None, ) -> dict: @@ -161,7 +161,7 @@ class _ConfigExpander: def __init__( self, config: dict, - root_dir: Optional[_Path] = None, + root_dir: Optional[StrPath] = None, ignore_option_errors: bool = False, dist: Optional["Distribution"] = None, ): diff --git a/setuptools/config/setupcfg.py b/setuptools/config/setupcfg.py index a7f02714cb..cfa43a57b5 100644 --- a/setuptools/config/setupcfg.py +++ b/setuptools/config/setupcfg.py @@ -30,6 +30,7 @@ Union, ) +from .._path import StrPath from ..errors import FileError, OptionError from ..extern.packaging.markers import default_environment as marker_env from ..extern.packaging.requirements import InvalidRequirement, Requirement @@ -43,7 +44,6 @@ from setuptools.dist import Distribution # noqa -_Path = Union[str, os.PathLike] SingleCommandOptions = Dict["str", Tuple["str", Any]] """Dict that associate the name of the options of a particular command to a tuple. The first element of the tuple indicates the origin of the option value @@ -55,7 +55,7 @@ def read_configuration( - filepath: _Path, find_others=False, ignore_option_errors=False + filepath: StrPath, find_others=False, ignore_option_errors=False ) -> dict: """Read given configuration file and returns options from it as a dict. @@ -80,7 +80,7 @@ def read_configuration( return configuration_to_dict(handlers) -def apply_configuration(dist: "Distribution", filepath: _Path) -> "Distribution": +def apply_configuration(dist: "Distribution", filepath: StrPath) -> "Distribution": """Apply the configuration from a ``setup.cfg`` file into an existing distribution object. """ @@ -91,8 +91,8 @@ def apply_configuration(dist: "Distribution", filepath: _Path) -> "Distribution" def _apply( dist: "Distribution", - filepath: _Path, - other_files: Iterable[_Path] = (), + filepath: StrPath, + other_files: Iterable[StrPath] = (), ignore_option_errors: bool = False, ) -> Tuple["ConfigHandler", ...]: """Read configuration from ``filepath`` and applies to the ``dist`` object.""" @@ -371,7 +371,7 @@ def parser(value): return parser - def _parse_file(self, value, root_dir: _Path): + def _parse_file(self, value, root_dir: StrPath): """Represents value as a string, allowing including text from nearest files using `file:` directive. @@ -397,7 +397,7 @@ def _parse_file(self, value, root_dir: _Path): self._referenced_files.update(filepaths) return expand.read_files(filepaths, root_dir) - def _parse_attr(self, value, package_dir, root_dir: _Path): + def _parse_attr(self, value, package_dir, root_dir: StrPath): """Represents value as a module attribute. Examples: @@ -539,7 +539,7 @@ def __init__( ignore_option_errors: bool, ensure_discovered: expand.EnsurePackagesDiscovered, package_dir: Optional[dict] = None, - root_dir: _Path = os.curdir, + root_dir: StrPath = os.curdir, ): super().__init__(target_obj, options, ignore_option_errors, ensure_discovered) self.package_dir = package_dir diff --git a/setuptools/discovery.py b/setuptools/discovery.py index 50a948750f..571be12bf4 100644 --- a/setuptools/discovery.py +++ b/setuptools/discovery.py @@ -51,15 +51,14 @@ Mapping, Optional, Tuple, - Union, ) import _distutils_hack.override # noqa: F401 +from ._path import StrPath from distutils import log from distutils.util import convert_path -_Path = Union[str, os.PathLike] StrIter = Iterator[str] chain_iter = itertools.chain.from_iterable @@ -68,7 +67,7 @@ from setuptools import Distribution # noqa -def _valid_name(path: _Path) -> bool: +def _valid_name(path: StrPath) -> bool: # Ignore invalid names that cannot be imported directly return os.path.basename(path).isidentifier() @@ -98,7 +97,7 @@ class _Finder: @classmethod def find( cls, - where: _Path = '.', + where: StrPath = '.', exclude: Iterable[str] = (), include: Iterable[str] = ('*',), ) -> List[str]: @@ -131,7 +130,7 @@ def find( ) @classmethod - def _find_iter(cls, where: _Path, exclude: _Filter, include: _Filter) -> StrIter: + def _find_iter(cls, where: StrPath, exclude: _Filter, include: _Filter) -> StrIter: raise NotImplementedError @@ -143,7 +142,7 @@ class PackageFinder(_Finder): ALWAYS_EXCLUDE = ("ez_setup", "*__pycache__") @classmethod - def _find_iter(cls, where: _Path, exclude: _Filter, include: _Filter) -> StrIter: + def _find_iter(cls, where: StrPath, exclude: _Filter, include: _Filter) -> StrIter: """ All the packages found in 'where' that pass the 'include' filter, but not the 'exclude' filter. @@ -175,14 +174,14 @@ def _find_iter(cls, where: _Path, exclude: _Filter, include: _Filter) -> StrIter dirs.append(dir) @staticmethod - def _looks_like_package(path: _Path, _package_name: str) -> bool: + def _looks_like_package(path: StrPath, _package_name: str) -> bool: """Does a directory look like a package?""" return os.path.isfile(os.path.join(path, '__init__.py')) class PEP420PackageFinder(PackageFinder): @staticmethod - def _looks_like_package(_path: _Path, _package_name: str) -> bool: + def _looks_like_package(_path: StrPath, _package_name: str) -> bool: return True @@ -192,7 +191,7 @@ class ModuleFinder(_Finder): """ @classmethod - def _find_iter(cls, where: _Path, exclude: _Filter, include: _Filter) -> StrIter: + def _find_iter(cls, where: StrPath, exclude: _Filter, include: _Filter) -> StrIter: for file in glob(os.path.join(where, "*.py")): module, _ext = os.path.splitext(os.path.basename(file)) @@ -255,7 +254,7 @@ class FlatLayoutPackageFinder(PEP420PackageFinder): """Reserved package names""" @staticmethod - def _looks_like_package(_path: _Path, package_name: str) -> bool: + def _looks_like_package(_path: StrPath, package_name: str) -> bool: names = package_name.split('.') # Consider PEP 561 root_pkg_is_valid = names[0].isidentifier() or names[0].endswith("-stubs") @@ -292,7 +291,7 @@ class FlatLayoutModuleFinder(ModuleFinder): """Reserved top-level module names""" -def _find_packages_within(root_pkg: str, pkg_dir: _Path) -> List[str]: +def _find_packages_within(root_pkg: str, pkg_dir: StrPath) -> List[str]: nested = PEP420PackageFinder.find(pkg_dir) return [root_pkg] + [".".join((root_pkg, n)) for n in nested] @@ -325,7 +324,7 @@ def _ignore_ext_modules(self): self._skip_ext_modules = True @property - def _root_dir(self) -> _Path: + def _root_dir(self) -> StrPath: # The best is to wait until `src_root` is set in dist, before using _root_dir. return self.dist.src_root or os.curdir @@ -551,7 +550,7 @@ def remove_stubs(packages: List[str]) -> List[str]: def find_parent_package( - packages: List[str], package_dir: Mapping[str, str], root_dir: _Path + packages: List[str], package_dir: Mapping[str, str], root_dir: StrPath ) -> Optional[str]: """Find the parent package that is not a namespace.""" packages = sorted(packages, key=len) @@ -575,7 +574,7 @@ def find_parent_package( def find_package_path( - name: str, package_dir: Mapping[str, str], root_dir: _Path + name: str, package_dir: Mapping[str, str], root_dir: StrPath ) -> str: """Given a package name, return the path where it should be found on disk, considering the ``package_dir`` option. @@ -608,7 +607,7 @@ def find_package_path( return os.path.join(root_dir, *parent.split("/"), *parts) -def construct_package_dir(packages: List[str], package_path: _Path) -> Dict[str, str]: +def construct_package_dir(packages: List[str], package_path: StrPath) -> Dict[str, str]: parent_pkgs = remove_nested_packages(packages) prefix = Path(package_path).parts return {pkg: "/".join([*prefix, *pkg.split(".")]) for pkg in parent_pkgs} diff --git a/setuptools/tests/config/test_pyprojecttoml.py b/setuptools/tests/config/test_pyprojecttoml.py index 318885a6bd..6a40f3bfd7 100644 --- a/setuptools/tests/config/test_pyprojecttoml.py +++ b/setuptools/tests/config/test_pyprojecttoml.py @@ -4,7 +4,7 @@ import pytest import tomli_w -from path import Path as _Path +from path import Path from setuptools.config.pyprojecttoml import ( read_configuration, @@ -352,7 +352,7 @@ def test_include_package_data_in_setuppy(tmp_path): setuppy = tmp_path / "setup.py" setuppy.write_text("__import__('setuptools').setup(include_package_data=False)") - with _Path(tmp_path): + with Path(tmp_path): dist = distutils.core.run_setup("setup.py", {}, stop_after="config") assert dist.get_name() == "myproj" diff --git a/setuptools/tests/test_config_discovery.py b/setuptools/tests/test_config_discovery.py index 85cb09730c..ef2979d4f5 100644 --- a/setuptools/tests/test_config_discovery.py +++ b/setuptools/tests/test_config_discovery.py @@ -13,7 +13,7 @@ import pytest import jaraco.path -from path import Path as _Path +from path import Path from .contexts import quiet from .integration.helpers import get_sdist_members, get_wheel_members, run @@ -304,7 +304,7 @@ def test_setupcfg_metadata(self, tmp_path, folder, opts): assert dist.package_dir package_path = find_package_path("pkg", dist.package_dir, tmp_path) assert os.path.exists(package_path) - assert folder in _Path(package_path).parts() + assert folder in Path(package_path).parts() _run_build(tmp_path, "--sdist") dist_file = tmp_path / "dist/pkg-42.tar.gz" @@ -607,14 +607,14 @@ def _get_dist(dist_path, attrs): script = dist_path / 'setup.py' if script.exists(): - with _Path(dist_path): + with Path(dist_path): dist = distutils.core.run_setup("setup.py", {}, stop_after="init") else: dist = Distribution(attrs) dist.src_root = root dist.script_name = "setup.py" - with _Path(dist_path): + with Path(dist_path): dist.parse_config_files() dist.set_defaults() @@ -627,7 +627,7 @@ def _run_sdist_programatically(dist_path, attrs): cmd.ensure_finalized() assert cmd.distribution.packages or cmd.distribution.py_modules - with quiet(), _Path(dist_path): + with quiet(), Path(dist_path): cmd.run() return dist, cmd diff --git a/tools/generate_validation_code.py b/tools/generate_validation_code.py index 53bc8ad650..b575fb1e1c 100644 --- a/tools/generate_validation_code.py +++ b/tools/generate_validation_code.py @@ -1,10 +1,12 @@ +from os import PathLike import subprocess import sys from pathlib import Path +from typing import Union -def generate_pyproject_validation(dest: Path): +def generate_pyproject_validation(dest: Union[str, PathLike]): """ Generates validation code for ``pyproject.toml`` based on JSON schemas and the ``validate-pyproject`` library. From 2118a5521581feb79538ac9253d21302208b53c9 Mon Sep 17 00:00:00 2001 From: Avasam Date: Tue, 27 Feb 2024 23:07:09 -0500 Subject: [PATCH 0351/1761] pkg_resources: Clarify some methods return `bytes`, not `str` --- pkg_resources/__init__.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py index 10c6a9cd06..aa77d4a3f0 100644 --- a/pkg_resources/__init__.py +++ b/pkg_resources/__init__.py @@ -566,8 +566,8 @@ def get_resource_stream(self, manager, resource_name): `manager` must be an ``IResourceManager``""" - def get_resource_string(self, manager, resource_name): - """Return a string containing the contents of `resource_name` + def get_resource_string(self, manager, resource_name) -> bytes: + """Return a bytes string containing the contents of `resource_name` `manager` must be an ``IResourceManager``""" @@ -1203,8 +1203,8 @@ def resource_stream(self, package_or_requirement, resource_name): self, resource_name ) - def resource_string(self, package_or_requirement, resource_name): - """Return specified resource as a string""" + def resource_string(self, package_or_requirement, resource_name) -> bytes: + """Return specified resource as a bytes string""" return get_provider(package_or_requirement).get_resource_string( self, resource_name ) @@ -1479,7 +1479,7 @@ def get_resource_filename(self, manager, resource_name): def get_resource_stream(self, manager, resource_name): return io.BytesIO(self.get_resource_string(manager, resource_name)) - def get_resource_string(self, manager, resource_name): + def get_resource_string(self, manager, resource_name) -> bytes: return self._get(self._fn(self.module_path, resource_name)) def has_resource(self, resource_name): @@ -1649,7 +1649,7 @@ def _validate_resource_path(path): DeprecationWarning, ) - def _get(self, path): + def _get(self, path) -> bytes: if hasattr(self.loader, 'get_data'): return self.loader.get_data(path) raise NotImplementedError( @@ -1706,7 +1706,7 @@ def _listdir(self, path): def get_resource_stream(self, manager, resource_name): return open(self._fn(self.module_path, resource_name), 'rb') - def _get(self, path): + def _get(self, path) -> bytes: with open(path, 'rb') as stream: return stream.read() @@ -1731,8 +1731,8 @@ class EmptyProvider(NullProvider): _isdir = _has = lambda self, path: False - def _get(self, path): - return '' + def _get(self, path) -> bytes: + return b'' def _listdir(self, path): return [] From 5f00f07838c35a35db0c898507de3bbde4176c0d Mon Sep 17 00:00:00 2001 From: Avasam Date: Tue, 27 Feb 2024 23:11:27 -0500 Subject: [PATCH 0352/1761] Create 4243.bugfix.rst --- newsfragments/4243.bugfix.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 newsfragments/4243.bugfix.rst diff --git a/newsfragments/4243.bugfix.rst b/newsfragments/4243.bugfix.rst new file mode 100644 index 0000000000..40dbdc3730 --- /dev/null +++ b/newsfragments/4243.bugfix.rst @@ -0,0 +1 @@ +Clarify some `pkg_resources` methods return `bytes`, not `str`. Also return an empty `bytes` in `EmptyProvider._get` -- by :user:`Avasam` From f93a094256d4c2946768a7ed38302a35944a4f88 Mon Sep 17 00:00:00 2001 From: Avasam Date: Tue, 27 Feb 2024 23:19:18 -0500 Subject: [PATCH 0353/1761] Return an empty list by default in `ResourceManager.cleanup_resources` Subclasses are expected to return a list. There's no mention of this method potentially returning `None` in the docstring. --- pkg_resources/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py index 10c6a9cd06..f2e5bb3645 100644 --- a/pkg_resources/__init__.py +++ b/pkg_resources/__init__.py @@ -1339,7 +1339,7 @@ def set_extraction_path(self, path): self.extraction_path = path - def cleanup_resources(self, force=False): + def cleanup_resources(self, force=False) -> list[str]: """ Delete all extracted resource files and directories, returning a list of the file and directory names that could not be successfully removed. @@ -1351,6 +1351,7 @@ def cleanup_resources(self, force=False): directory used for extractions. """ # XXX + return [] def get_default_cache(): From 19f57d0652c1bed18558c8d552e10a497d07cc01 Mon Sep 17 00:00:00 2001 From: Avasam Date: Tue, 27 Feb 2024 23:21:27 -0500 Subject: [PATCH 0354/1761] Update newsfragments/4243.bugfix.rst --- newsfragments/4243.bugfix.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/newsfragments/4243.bugfix.rst b/newsfragments/4243.bugfix.rst index 40dbdc3730..e8212721f3 100644 --- a/newsfragments/4243.bugfix.rst +++ b/newsfragments/4243.bugfix.rst @@ -1 +1 @@ -Clarify some `pkg_resources` methods return `bytes`, not `str`. Also return an empty `bytes` in `EmptyProvider._get` -- by :user:`Avasam` +Clarify some `pkg_resources` methods return `bytes`, not `str`. Also return an empty `bytes` in ``EmptyProvider._get`` -- by :user:`Avasam` From 29b36fa20c0350c3c998511890e6f578d91f4a87 Mon Sep 17 00:00:00 2001 From: Avasam Date: Tue, 27 Feb 2024 23:24:08 -0500 Subject: [PATCH 0355/1761] Create 4244.bugfix.rst --- newsfragments/4244.bugfix.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 newsfragments/4244.bugfix.rst diff --git a/newsfragments/4244.bugfix.rst b/newsfragments/4244.bugfix.rst new file mode 100644 index 0000000000..5d606de718 --- /dev/null +++ b/newsfragments/4244.bugfix.rst @@ -0,0 +1 @@ +Return an empty `list` by default in ``pkg_resources.ResourceManager.cleanup_resources`` -- by :user:`Avasam` From 5b538e1879edaa574a3c4cb7119bbddf3ee6cc9d Mon Sep 17 00:00:00 2001 From: Avasam Date: Tue, 27 Feb 2024 23:38:06 -0500 Subject: [PATCH 0356/1761] Update __init__.py --- pkg_resources/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py index f2e5bb3645..67883b11f1 100644 --- a/pkg_resources/__init__.py +++ b/pkg_resources/__init__.py @@ -27,7 +27,7 @@ import time import re import types -from typing import Protocol +from typing import List, Protocol import zipfile import zipimport import warnings @@ -1339,7 +1339,7 @@ def set_extraction_path(self, path): self.extraction_path = path - def cleanup_resources(self, force=False) -> list[str]: + def cleanup_resources(self, force=False) -> List[str]: """ Delete all extracted resource files and directories, returning a list of the file and directory names that could not be successfully removed. From 219a000b299f0d610dbb1b766286a379e4d2ba06 Mon Sep 17 00:00:00 2001 From: Avasam Date: Tue, 27 Feb 2024 23:53:17 -0500 Subject: [PATCH 0357/1761] Prevent `pkg_resources._find_adapter` from ever returning `None` --- pkg_resources/__init__.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py index 10c6a9cd06..5e9036564a 100644 --- a/pkg_resources/__init__.py +++ b/pkg_resources/__init__.py @@ -3207,7 +3207,9 @@ def _find_adapter(registry, ob): for t in types: if t in registry: return registry[t] - return None + # _find_adapter would previously return None, and immediatly be called. + # So we're raising a TypeError to keep backward compatibility if anyone depended on that behaviour. + raise TypeError(f"Could not find adapter for {registry} and {ob}") def ensure_directory(path): From d1c5444126aeacefee3949b30136446ab99979d8 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 10:33:21 -0500 Subject: [PATCH 0358/1761] Enable complexity check and pycodestyle warnings. Closes jaraco/skeleton#110. --- ruff.toml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/ruff.toml b/ruff.toml index e61ca8b0d6..6c5b00092e 100644 --- a/ruff.toml +++ b/ruff.toml @@ -1,4 +1,8 @@ [lint] +select = [ + "C901", + "W", +] ignore = [ # https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules "W191", From 853d0f5feffb01abc3f190c55f48e76ae8a4d24c Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 09:52:04 -0500 Subject: [PATCH 0359/1761] Extract a method for customizing the compiler for macOS. --- distutils/sysconfig.py | 38 ++++++++++++++++++++++---------------- 1 file changed, 22 insertions(+), 16 deletions(-) diff --git a/distutils/sysconfig.py b/distutils/sysconfig.py index fac3259f88..a88fd021df 100644 --- a/distutils/sysconfig.py +++ b/distutils/sysconfig.py @@ -266,6 +266,27 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None): ) +def _customize_macos(): + if sys.platform != "darwin": + return + + # Perform first-time customization of compiler-related + # config vars on OS X now that we know we need a compiler. + # This is primarily to support Pythons from binary + # installers. The kind and paths to build tools on + # the user system may vary significantly from the system + # that Python itself was built on. Also the user OS + # version and build tools may not support the same set + # of CPU architectures for universal builds. + global _config_vars + # Use get_config_var() to ensure _config_vars is initialized. + if not get_config_var('CUSTOMIZED_OSX_COMPILER'): + import _osx_support + + _osx_support.customize_compiler(_config_vars) + _config_vars['CUSTOMIZED_OSX_COMPILER'] = 'True' + + def customize_compiler(compiler): # noqa: C901 """Do any platform-specific customization of a CCompiler instance. @@ -273,22 +294,7 @@ def customize_compiler(compiler): # noqa: C901 varies across Unices and is stored in Python's Makefile. """ if compiler.compiler_type == "unix": - if sys.platform == "darwin": - # Perform first-time customization of compiler-related - # config vars on OS X now that we know we need a compiler. - # This is primarily to support Pythons from binary - # installers. The kind and paths to build tools on - # the user system may vary significantly from the system - # that Python itself was built on. Also the user OS - # version and build tools may not support the same set - # of CPU architectures for universal builds. - global _config_vars - # Use get_config_var() to ensure _config_vars is initialized. - if not get_config_var('CUSTOMIZED_OSX_COMPILER'): - import _osx_support - - _osx_support.customize_compiler(_config_vars) - _config_vars['CUSTOMIZED_OSX_COMPILER'] = 'True' + _customize_macos() ( cc, From 9ce8a1088bb0053550debabb73fb92c763f4e7b3 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 10:03:33 -0500 Subject: [PATCH 0360/1761] Convert comment to docstring; update wording. --- distutils/sysconfig.py | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/distutils/sysconfig.py b/distutils/sysconfig.py index a88fd021df..b1d8e7c7ae 100644 --- a/distutils/sysconfig.py +++ b/distutils/sysconfig.py @@ -267,17 +267,20 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None): def _customize_macos(): + """ + Perform first-time customization of compiler-related + config vars on macOS. Use after a compiler is known + to be needed. This customization exists primarily to support Pythons + from binary installers. The kind and paths to build tools on + the user system may vary significantly from the system + that Python itself was built on. Also the user OS + version and build tools may not support the same set + of CPU architectures for universal builds. + """ + if sys.platform != "darwin": return - # Perform first-time customization of compiler-related - # config vars on OS X now that we know we need a compiler. - # This is primarily to support Pythons from binary - # installers. The kind and paths to build tools on - # the user system may vary significantly from the system - # that Python itself was built on. Also the user OS - # version and build tools may not support the same set - # of CPU architectures for universal builds. global _config_vars # Use get_config_var() to ensure _config_vars is initialized. if not get_config_var('CUSTOMIZED_OSX_COMPILER'): From e58492bee26dbe58c600a72871144dd1a2a45f26 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 10:14:16 -0500 Subject: [PATCH 0361/1761] Create a fixture to patch-out compiler customization on macOS. --- conftest.py | 7 +++++++ distutils/tests/test_sysconfig.py | 3 +-- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/conftest.py b/conftest.py index ca808a6ab7..06ce3bc6c8 100644 --- a/conftest.py +++ b/conftest.py @@ -152,3 +152,10 @@ def temp_home(tmp_path, monkeypatch): def fake_home(fs, monkeypatch): home = fs.create_dir('/fakehome') return _set_home(monkeypatch, pathlib.Path(home.path)) + + +@pytest.fixture +def disable_macos_customization(monkeypatch): + from distutils import sysconfig + + monkeypatch.setattr(sysconfig, '_customize_macos', lambda: None) diff --git a/distutils/tests/test_sysconfig.py b/distutils/tests/test_sysconfig.py index 6cbf51681b..f656be6089 100644 --- a/distutils/tests/test_sysconfig.py +++ b/distutils/tests/test_sysconfig.py @@ -98,8 +98,6 @@ def set_executables(self, **kw): 'CCSHARED': '--sc-ccshared', 'LDSHARED': 'sc_ldshared', 'SHLIB_SUFFIX': 'sc_shutil_suffix', - # On macOS, disable _osx_support.customize_compiler() - 'CUSTOMIZED_OSX_COMPILER': 'True', } comp = compiler() @@ -111,6 +109,7 @@ def set_executables(self, **kw): return comp @pytest.mark.skipif("get_default_compiler() != 'unix'") + @pytest.mark.usefixtures('disable_macos_customization') def test_customize_compiler(self): # Make sure that sysconfig._config_vars is initialized sysconfig.get_config_vars() From cc455d09fb862d4827e4efd7f6ae858fa5dde4ff Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 10:14:54 -0500 Subject: [PATCH 0362/1761] Utilize the fixture for disabling compiler customization on macOS for cxx test. Closes #231. --- distutils/tests/test_unixccompiler.py | 1 + 1 file changed, 1 insertion(+) diff --git a/distutils/tests/test_unixccompiler.py b/distutils/tests/test_unixccompiler.py index a313da3e75..2763db9c02 100644 --- a/distutils/tests/test_unixccompiler.py +++ b/distutils/tests/test_unixccompiler.py @@ -248,6 +248,7 @@ def gcvs(*args, _orig=sysconfig.get_config_vars): assert self.cc.linker_so[0] == 'my_cc' @pytest.mark.skipif('platform.system == "Windows"') + @pytest.mark.usefixtures('disable_macos_customization') def test_cc_overrides_ldshared_for_cxx_correctly(self): """ Ensure that setting CC env variable also changes default linker From 9e83319a786cf55e6c3f8d3b45acba1f577924fe Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 10:25:15 -0500 Subject: [PATCH 0363/1761] Limit mutating global state and simply rely on functools.lru_cache to limit the behavior to a single invocation. --- distutils/sysconfig.py | 15 +++++---------- 1 file changed, 5 insertions(+), 10 deletions(-) diff --git a/distutils/sysconfig.py b/distutils/sysconfig.py index b1d8e7c7ae..5fb811c406 100644 --- a/distutils/sysconfig.py +++ b/distutils/sysconfig.py @@ -10,6 +10,7 @@ """ import os +import functools import re import sys import sysconfig @@ -266,6 +267,7 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None): ) +@functools.lru_cache() def _customize_macos(): """ Perform first-time customization of compiler-related @@ -278,16 +280,9 @@ def _customize_macos(): of CPU architectures for universal builds. """ - if sys.platform != "darwin": - return - - global _config_vars - # Use get_config_var() to ensure _config_vars is initialized. - if not get_config_var('CUSTOMIZED_OSX_COMPILER'): - import _osx_support - - _osx_support.customize_compiler(_config_vars) - _config_vars['CUSTOMIZED_OSX_COMPILER'] = 'True' + sys.platform == "darwin" and __import__('_osx_support').customize_compiler( + get_config_vars() + ) def customize_compiler(compiler): # noqa: C901 From 2c937116cc0dcd9b26b6070e89a3dc5dcbedc2ae Mon Sep 17 00:00:00 2001 From: Samuel Cormier-Iijima Date: Thu, 14 Dec 2023 09:24:14 -0500 Subject: [PATCH 0364/1761] Distutils C++ support Upstreamed fix from nix, see patch here: https://github.com/NixOS/nixpkgs/blob/master/pkgs/development/python-modules/setuptools/setuptools-distutils-C%2B%2B.patch --- distutils/cygwinccompiler.py | 17 ++++++++++++++--- distutils/sysconfig.py | 20 ++++++++++++++++++-- distutils/unixccompiler.py | 15 ++++++++++++--- 3 files changed, 44 insertions(+), 8 deletions(-) diff --git a/distutils/cygwinccompiler.py b/distutils/cygwinccompiler.py index b3dbc3be15..c683e3eeac 100644 --- a/distutils/cygwinccompiler.py +++ b/distutils/cygwinccompiler.py @@ -101,14 +101,19 @@ def __init__(self, verbose=0, dry_run=0, force=0): self.cxx = os.environ.get('CXX', 'g++') self.linker_dll = self.cc + self.linker_dll_cxx = self.cxx shared_option = "-shared" self.set_executables( compiler='%s -mcygwin -O -Wall' % self.cc, compiler_so='%s -mcygwin -mdll -O -Wall' % self.cc, compiler_cxx='%s -mcygwin -O -Wall' % self.cxx, + compiler_so_cxx='%s -mcygwin -mdll -O -Wall' % self.cxx, linker_exe='%s -mcygwin' % self.cc, linker_so=('{} -mcygwin {}'.format(self.linker_dll, shared_option)), + linker_exe_cxx='%s -mcygwin' % self.cxx, + linker_so_cxx=('%s -mcygwin %s' % + (self.linker_dll_cxx, shared_option)), ) # Include the appropriate MSVC runtime library if Python was built @@ -140,9 +145,12 @@ def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): raise CompileError(msg) else: # for other files use the C-compiler try: - self.spawn( - self.compiler_so + cc_args + [src, '-o', obj] + extra_postargs - ) + if self.detect_language(src) == 'c++': + self.spawn(self.compiler_so_cxx + cc_args + [src, '-o', obj] + + extra_postargs) + else: + self.spawn( + self.compiler_so + cc_args + [src, '-o', obj] + extra_postargs) except DistutilsExecError as msg: raise CompileError(msg) @@ -278,9 +286,12 @@ def __init__(self, verbose=0, dry_run=0, force=0): self.set_executables( compiler='%s -O -Wall' % self.cc, compiler_so='%s -mdll -O -Wall' % self.cc, + compiler_so_cxx='%s -mdll -O -Wall' % self.cxx, compiler_cxx='%s -O -Wall' % self.cxx, linker_exe='%s' % self.cc, linker_so='{} {}'.format(self.linker_dll, shared_option), + linker_exe_cxx='%s' % self.cxx, + linker_so_cxx='%s %s' % (self.linker_dll_cxx, shared_option) ) def runtime_library_dir_option(self, dir): diff --git a/distutils/sysconfig.py b/distutils/sysconfig.py index 5fb811c406..bab4ab693a 100644 --- a/distutils/sysconfig.py +++ b/distutils/sysconfig.py @@ -300,6 +300,7 @@ def customize_compiler(compiler): # noqa: C901 cflags, ccshared, ldshared, + ldcxxshared, shlib_suffix, ar, ar_flags, @@ -309,11 +310,14 @@ def customize_compiler(compiler): # noqa: C901 'CFLAGS', 'CCSHARED', 'LDSHARED', + 'LDCXXSHARED', 'SHLIB_SUFFIX', 'AR', 'ARFLAGS', ) + cxxflags = cflags + if 'CC' in os.environ: newcc = os.environ['CC'] if 'LDSHARED' not in os.environ and ldshared.startswith(cc): @@ -325,19 +329,27 @@ def customize_compiler(compiler): # noqa: C901 cxx = os.environ['CXX'] if 'LDSHARED' in os.environ: ldshared = os.environ['LDSHARED'] + if 'LDCXXSHARED' in os.environ: + ldcxxshared = os.environ['LDCXXSHARED'] if 'CPP' in os.environ: cpp = os.environ['CPP'] else: cpp = cc + " -E" # not always if 'LDFLAGS' in os.environ: ldshared = ldshared + ' ' + os.environ['LDFLAGS'] + ldcxxshared = ldcxxshared + ' ' + os.environ['LDFLAGS'] if 'CFLAGS' in os.environ: - cflags = cflags + ' ' + os.environ['CFLAGS'] + cflags = os.environ['CFLAGS'] ldshared = ldshared + ' ' + os.environ['CFLAGS'] + if 'CXXFLAGS' in os.environ: + cxxflags = os.environ['CXXFLAGS'] + ldcxxshared = ldcxxshared + ' ' + os.environ['CXXFLAGS'] if 'CPPFLAGS' in os.environ: cpp = cpp + ' ' + os.environ['CPPFLAGS'] cflags = cflags + ' ' + os.environ['CPPFLAGS'] + cxxflags = cxxflags + ' ' + os.environ['CPPFLAGS'] ldshared = ldshared + ' ' + os.environ['CPPFLAGS'] + ldcxxshared = ldcxxshared + ' ' + os.environ['CPPFLAGS'] if 'AR' in os.environ: ar = os.environ['AR'] if 'ARFLAGS' in os.environ: @@ -346,13 +358,17 @@ def customize_compiler(compiler): # noqa: C901 archiver = ar + ' ' + ar_flags cc_cmd = cc + ' ' + cflags + cxx_cmd = cxx + ' ' + cxxflags compiler.set_executables( preprocessor=cpp, compiler=cc_cmd, compiler_so=cc_cmd + ' ' + ccshared, - compiler_cxx=cxx, + compiler_cxx=cxx_cmd, + compiler_so_cxx=cxx_cmd + ' ' + ccshared, linker_so=ldshared, + linker_so_cxx=ldcxxshared, linker_exe=cc, + linker_exe_cxx=cxx, archiver=archiver, ) diff --git a/distutils/unixccompiler.py b/distutils/unixccompiler.py index d749fe2529..0919868af6 100644 --- a/distutils/unixccompiler.py +++ b/distutils/unixccompiler.py @@ -115,9 +115,12 @@ class UnixCCompiler(CCompiler): 'preprocessor': None, 'compiler': ["cc"], 'compiler_so': ["cc"], - 'compiler_cxx': ["cc"], + 'compiler_cxx': ["c++"], + 'compiler_so_cxx': ["c++"], 'linker_so': ["cc", "-shared"], + 'linker_so_cxx': ["c++", "-shared"], 'linker_exe': ["cc"], + 'linker_exe_cxx': ["c++", "-shared"], 'archiver': ["ar", "-cr"], 'ranlib': None, } @@ -181,8 +184,13 @@ def preprocess( def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): compiler_so = compiler_fixup(self.compiler_so, cc_args + extra_postargs) + compiler_so_cxx = compiler_fixup(self.compiler_so_cxx, cc_args + extra_postargs) try: - self.spawn(compiler_so + cc_args + [src, '-o', obj] + extra_postargs) + if self.detect_language(src) == 'c++': + self.spawn(compiler_so_cxx + cc_args + [ src, '-o', obj] + + extra_postargs) + else: + self.spawn(compiler_so + cc_args + [src, '-o', obj] + extra_postargs) except DistutilsExecError as msg: raise CompileError(msg) @@ -250,7 +258,8 @@ def link( # building an executable or linker_so (with shared options) # when building a shared library. building_exe = target_desc == CCompiler.EXECUTABLE - linker = (self.linker_exe if building_exe else self.linker_so)[:] + linker = (self.linker_exe if building_exe else (self.linker_so_cxx if + target_lang == "c++" else self.linker_so))[:] if target_lang == "c++" and self.compiler_cxx: env, linker_ne = _split_env(linker) From 26f4723dbcbc32f44be33e849eca86f15317612a Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Tue, 13 Feb 2024 13:29:18 -0500 Subject: [PATCH 0365/1761] =?UTF-8?q?=F0=9F=91=B9=20Feed=20the=20hobgoblin?= =?UTF-8?q?s=20(delint).?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- distutils/cygwinccompiler.py | 16 ++++++++++------ distutils/unixccompiler.py | 14 ++++++++++---- 2 files changed, 20 insertions(+), 10 deletions(-) diff --git a/distutils/cygwinccompiler.py b/distutils/cygwinccompiler.py index c683e3eeac..d767c6d305 100644 --- a/distutils/cygwinccompiler.py +++ b/distutils/cygwinccompiler.py @@ -112,8 +112,7 @@ def __init__(self, verbose=0, dry_run=0, force=0): linker_exe='%s -mcygwin' % self.cc, linker_so=('{} -mcygwin {}'.format(self.linker_dll, shared_option)), linker_exe_cxx='%s -mcygwin' % self.cxx, - linker_so_cxx=('%s -mcygwin %s' % - (self.linker_dll_cxx, shared_option)), + linker_so_cxx=('%s -mcygwin %s' % (self.linker_dll_cxx, shared_option)), ) # Include the appropriate MSVC runtime library if Python was built @@ -146,11 +145,16 @@ def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): else: # for other files use the C-compiler try: if self.detect_language(src) == 'c++': - self.spawn(self.compiler_so_cxx + cc_args + [src, '-o', obj] + - extra_postargs) + self.spawn( + self.compiler_so_cxx + + cc_args + + [src, '-o', obj] + + extra_postargs + ) else: self.spawn( - self.compiler_so + cc_args + [src, '-o', obj] + extra_postargs) + self.compiler_so + cc_args + [src, '-o', obj] + extra_postargs + ) except DistutilsExecError as msg: raise CompileError(msg) @@ -291,7 +295,7 @@ def __init__(self, verbose=0, dry_run=0, force=0): linker_exe='%s' % self.cc, linker_so='{} {}'.format(self.linker_dll, shared_option), linker_exe_cxx='%s' % self.cxx, - linker_so_cxx='%s %s' % (self.linker_dll_cxx, shared_option) + linker_so_cxx='%s %s' % (self.linker_dll_cxx, shared_option), ) def runtime_library_dir_option(self, dir): diff --git a/distutils/unixccompiler.py b/distutils/unixccompiler.py index 0919868af6..2b1bbacef2 100644 --- a/distutils/unixccompiler.py +++ b/distutils/unixccompiler.py @@ -187,8 +187,9 @@ def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): compiler_so_cxx = compiler_fixup(self.compiler_so_cxx, cc_args + extra_postargs) try: if self.detect_language(src) == 'c++': - self.spawn(compiler_so_cxx + cc_args + [ src, '-o', obj] + - extra_postargs) + self.spawn( + compiler_so_cxx + cc_args + [src, '-o', obj] + extra_postargs + ) else: self.spawn(compiler_so + cc_args + [src, '-o', obj] + extra_postargs) except DistutilsExecError as msg: @@ -258,8 +259,13 @@ def link( # building an executable or linker_so (with shared options) # when building a shared library. building_exe = target_desc == CCompiler.EXECUTABLE - linker = (self.linker_exe if building_exe else (self.linker_so_cxx if - target_lang == "c++" else self.linker_so))[:] + linker = ( + self.linker_exe + if building_exe + else ( + self.linker_so_cxx if target_lang == "c++" else self.linker_so + ) + )[:] if target_lang == "c++" and self.compiler_cxx: env, linker_ne = _split_env(linker) From fbac766736c5a3807968ec4ad9f5a0ca9b96a458 Mon Sep 17 00:00:00 2001 From: Samuel Cormier-Iijima Date: Tue, 13 Feb 2024 19:33:55 -0500 Subject: [PATCH 0366/1761] Adjust tests further to accommodate C++ support. --- distutils/sysconfig.py | 2 +- distutils/tests/test_sysconfig.py | 4 ++-- distutils/tests/test_unixccompiler.py | 6 +++++- 3 files changed, 8 insertions(+), 4 deletions(-) diff --git a/distutils/sysconfig.py b/distutils/sysconfig.py index bab4ab693a..ab80e8216f 100644 --- a/distutils/sysconfig.py +++ b/distutils/sysconfig.py @@ -339,7 +339,7 @@ def customize_compiler(compiler): # noqa: C901 ldshared = ldshared + ' ' + os.environ['LDFLAGS'] ldcxxshared = ldcxxshared + ' ' + os.environ['LDFLAGS'] if 'CFLAGS' in os.environ: - cflags = os.environ['CFLAGS'] + cflags = cflags + ' ' + os.environ['CFLAGS'] ldshared = ldshared + ' ' + os.environ['CFLAGS'] if 'CXXFLAGS' in os.environ: cxxflags = os.environ['CXXFLAGS'] diff --git a/distutils/tests/test_sysconfig.py b/distutils/tests/test_sysconfig.py index f656be6089..4879d72039 100644 --- a/distutils/tests/test_sysconfig.py +++ b/distutils/tests/test_sysconfig.py @@ -132,7 +132,7 @@ def test_customize_compiler(self): assert comp.exes['compiler_so'] == ( 'env_cc --sc-cflags ' '--env-cflags ' '--env-cppflags --sc-ccshared' ) - assert comp.exes['compiler_cxx'] == 'env_cxx --env-cxx-flags' + assert comp.exes['compiler_cxx'] == 'env_cxx --env-cxx-flags --sc-cflags --env-cppflags' assert comp.exes['linker_exe'] == 'env_cc' assert comp.exes['linker_so'] == ( 'env_ldshared --env-ldflags --env-cflags' ' --env-cppflags' @@ -160,7 +160,7 @@ def test_customize_compiler(self): assert comp.exes['preprocessor'] == 'sc_cc -E' assert comp.exes['compiler'] == 'sc_cc --sc-cflags' assert comp.exes['compiler_so'] == 'sc_cc --sc-cflags --sc-ccshared' - assert comp.exes['compiler_cxx'] == 'sc_cxx' + assert comp.exes['compiler_cxx'] == 'sc_cxx --sc-cflags' assert comp.exes['linker_exe'] == 'sc_cc' assert comp.exes['linker_so'] == 'sc_ldshared' assert comp.shared_lib_extension == 'sc_shutil_suffix' diff --git a/distutils/tests/test_unixccompiler.py b/distutils/tests/test_unixccompiler.py index 2763db9c02..ad08a173e2 100644 --- a/distutils/tests/test_unixccompiler.py +++ b/distutils/tests/test_unixccompiler.py @@ -260,9 +260,13 @@ def test_cc_overrides_ldshared_for_cxx_correctly(self): def gcv(v): if v == 'LDSHARED': return 'gcc-4.2 -bundle -undefined dynamic_lookup ' + elif v == 'LDCXXSHARED': + return 'g++-4.2 -bundle -undefined dynamic_lookup ' elif v == 'CXX': return 'g++-4.2' - return 'gcc-4.2' + elif v == 'CC': + return 'gcc-4.2' + return '' def gcvs(*args, _orig=sysconfig.get_config_vars): if args: From 9f176ac9ea2688e05aa4cb50a75da9bebf7e174a Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Tue, 13 Feb 2024 19:39:55 -0500 Subject: [PATCH 0367/1761] =?UTF-8?q?=F0=9F=91=B9=20Feed=20the=20hobgoblin?= =?UTF-8?q?s=20(delint).?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- distutils/tests/test_sysconfig.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/distutils/tests/test_sysconfig.py b/distutils/tests/test_sysconfig.py index 4879d72039..60a4ba3099 100644 --- a/distutils/tests/test_sysconfig.py +++ b/distutils/tests/test_sysconfig.py @@ -132,7 +132,10 @@ def test_customize_compiler(self): assert comp.exes['compiler_so'] == ( 'env_cc --sc-cflags ' '--env-cflags ' '--env-cppflags --sc-ccshared' ) - assert comp.exes['compiler_cxx'] == 'env_cxx --env-cxx-flags --sc-cflags --env-cppflags' + assert ( + comp.exes['compiler_cxx'] + == 'env_cxx --env-cxx-flags --sc-cflags --env-cppflags' + ) assert comp.exes['linker_exe'] == 'env_cc' assert comp.exes['linker_so'] == ( 'env_ldshared --env-ldflags --env-cflags' ' --env-cppflags' From 51749270038f117f8f3103523bb37fd73bb4fe29 Mon Sep 17 00:00:00 2001 From: Samuel Cormier-Iijima Date: Fri, 1 Mar 2024 22:16:58 -0500 Subject: [PATCH 0368/1761] Ignore sysconfig variables for LDCXXSHARED as it appears not to exist on PyPy. --- distutils/sysconfig.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/distutils/sysconfig.py b/distutils/sysconfig.py index ab80e8216f..3c72989347 100644 --- a/distutils/sysconfig.py +++ b/distutils/sysconfig.py @@ -300,7 +300,6 @@ def customize_compiler(compiler): # noqa: C901 cflags, ccshared, ldshared, - ldcxxshared, shlib_suffix, ar, ar_flags, @@ -310,13 +309,13 @@ def customize_compiler(compiler): # noqa: C901 'CFLAGS', 'CCSHARED', 'LDSHARED', - 'LDCXXSHARED', 'SHLIB_SUFFIX', 'AR', 'ARFLAGS', ) cxxflags = cflags + ldcxxshared = "" if 'CC' in os.environ: newcc = os.environ['CC'] From b434f69238b4ee517ae20978afa19f3cd1ed8f1f Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 14:05:46 -0500 Subject: [PATCH 0369/1761] Use 'extend-select' to avoid disabling the default config. Ref jaraco/skeleton#110. --- ruff.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ruff.toml b/ruff.toml index 6c5b00092e..70612985a7 100644 --- a/ruff.toml +++ b/ruff.toml @@ -1,5 +1,5 @@ [lint] -select = [ +extend-select = [ "C901", "W", ] From bdbe5e385a282d30611e95c3e252c9a123ade331 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 13:02:41 -0500 Subject: [PATCH 0370/1761] In test_build_ext, expose Path objects and use a path builder to build content. Fixes some EncodingWarnings. Ref pypa/distutils#232. --- distutils/tests/test_build_ext.py | 36 +++++++++++++++---------------- 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/distutils/tests/test_build_ext.py b/distutils/tests/test_build_ext.py index 537959fed6..51e5cd00cc 100644 --- a/distutils/tests/test_build_ext.py +++ b/distutils/tests/test_build_ext.py @@ -4,6 +4,7 @@ import textwrap import site import contextlib +import pathlib import platform import tempfile import importlib @@ -12,6 +13,7 @@ import path import pytest +import jaraco.path from distutils.core import Distribution from distutils.command.build_ext import build_ext @@ -38,6 +40,7 @@ def user_site_dir(request): self = request.instance self.tmp_dir = self.mkdtemp() + self.tmp_path = path.Path(self.tmp_dir) from distutils.command import build_ext orig_user_base = site.USER_BASE @@ -48,7 +51,7 @@ def user_site_dir(request): # bpo-30132: On Windows, a .pdb file may be created in the current # working directory. Create a temporary working directory to cleanup # everything at the end of the test. - with path.Path(self.tmp_dir): + with self.tmp_path: yield site.USER_BASE = orig_user_base @@ -496,25 +499,22 @@ def _try_compile_deployment_target(self, operator, target): else: os.environ['MACOSX_DEPLOYMENT_TARGET'] = target - deptarget_c = os.path.join(self.tmp_dir, 'deptargetmodule.c') + jaraco.path.build( + { + 'deptargetmodule.c': textwrap.dedent(f"""\ + #include - with open(deptarget_c, 'w') as fp: - fp.write( - textwrap.dedent( - """\ - #include + int dummy; - int dummy; + #if TARGET {operator} MAC_OS_X_VERSION_MIN_REQUIRED + #else + #error "Unexpected target" + #endif - #if TARGET %s MAC_OS_X_VERSION_MIN_REQUIRED - #else - #error "Unexpected target" - #endif - - """ - % operator - ) - ) + """), + }, + self.tmp_path, + ) # get the deployment target that the interpreter was built with target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET') @@ -534,7 +534,7 @@ def _try_compile_deployment_target(self, operator, target): target = '%02d0000' % target deptarget_ext = Extension( 'deptarget', - [deptarget_c], + [self.tmp_path / 'deptargetmodule.c'], extra_compile_args=['-DTARGET={}'.format(target)], ) dist = Distribution({'name': 'deptarget', 'ext_modules': [deptarget_ext]}) From 536553507947698491bc0e64a29491a6d2f8442b Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 13:05:25 -0500 Subject: [PATCH 0371/1761] In support, specify encoding. Ref pypa/distutils#232. --- distutils/tests/support.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/distutils/tests/support.py b/distutils/tests/support.py index 2080604982..ddf7bf1dba 100644 --- a/distutils/tests/support.py +++ b/distutils/tests/support.py @@ -34,7 +34,7 @@ def write_file(self, path, content='xxx'): path can be a string or a sequence. """ - pathlib.Path(*always_iterable(path)).write_text(content) + pathlib.Path(*always_iterable(path)).write_text(content, encoding='utf-8') def create_dist(self, pkg_name='foo', **kw): """Will generate a test environment. From ba09295a480ec95569c393084c2e0a7846ffa384 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 13:14:54 -0500 Subject: [PATCH 0372/1761] In test_build_py, rely on tree builder to build trees. Ref pypa/distutils#232. --- distutils/tests/test_build_py.py | 51 ++++++++++++++------------------ 1 file changed, 23 insertions(+), 28 deletions(-) diff --git a/distutils/tests/test_build_py.py b/distutils/tests/test_build_py.py index 77c9ad7573..6730878e96 100644 --- a/distutils/tests/test_build_py.py +++ b/distutils/tests/test_build_py.py @@ -4,6 +4,7 @@ import sys import pytest +import jaraco.path from distutils.command.build_py import build_py from distutils.core import Distribution @@ -16,16 +17,13 @@ class TestBuildPy(support.TempdirManager): def test_package_data(self): sources = self.mkdtemp() - f = open(os.path.join(sources, "__init__.py"), "w") - try: - f.write("# Pretend this is a package.") - finally: - f.close() - f = open(os.path.join(sources, "README.txt"), "w") - try: - f.write("Info about this package") - finally: - f.close() + jaraco.path.build( + { + '__init__.py': "# Pretend this is a package.", + 'README.txt': 'Info about this package', + }, + sources, + ) destination = self.mkdtemp() @@ -62,11 +60,7 @@ def test_package_data(self): def test_empty_package_dir(self): # See bugs #1668596/#1720897 sources = self.mkdtemp() - open(os.path.join(sources, "__init__.py"), "w").close() - - testdir = os.path.join(sources, "doc") - os.mkdir(testdir) - open(os.path.join(testdir, "testfile"), "w").close() + jaraco.path.build({'__init__.py': '', 'doc': {'testfile': ''}}, sources) os.chdir(sources) dist = Distribution({ @@ -124,17 +118,19 @@ def test_dir_in_package_data(self): """ # See bug 19286 sources = self.mkdtemp() - pkg_dir = os.path.join(sources, "pkg") - - os.mkdir(pkg_dir) - open(os.path.join(pkg_dir, "__init__.py"), "w").close() - - docdir = os.path.join(pkg_dir, "doc") - os.mkdir(docdir) - open(os.path.join(docdir, "testfile"), "w").close() - - # create the directory that could be incorrectly detected as a file - os.mkdir(os.path.join(docdir, 'otherdir')) + jaraco.path.build( + { + 'pkg': { + '__init__.py': '', + 'doc': { + 'testfile': '', + # create a directory that could be incorrectly detected as a file + 'otherdir': {}, + }, + } + }, + sources, + ) os.chdir(sources) dist = Distribution({"packages": ["pkg"], "package_data": {"pkg": ["doc/*"]}}) @@ -174,9 +170,8 @@ def test_namespace_package_does_not_warn(self, caplog): """ # Create a fake project structure with a package namespace: tmp = self.mkdtemp() + jaraco.path.build({'ns': {'pkg': {'module.py': ''}}}, tmp) os.chdir(tmp) - os.makedirs("ns/pkg") - open("ns/pkg/module.py", "w").close() # Configure the package: attrs = { From f5bc9d2abfd66f3e95dcf9dcfd9aab4203ed7428 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 13:20:44 -0500 Subject: [PATCH 0373/1761] Specify encoding in util.byte_compile. Ref pypa/distutils#232. --- distutils/util.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/distutils/util.py b/distutils/util.py index 5408b16032..aa0c90cfcd 100644 --- a/distutils/util.py +++ b/distutils/util.py @@ -423,9 +423,9 @@ def byte_compile( # noqa: C901 log.info("writing byte-compilation script '%s'", script_name) if not dry_run: if script_fd is not None: - script = os.fdopen(script_fd, "w") + script = os.fdopen(script_fd, "w", encoding='utf-8') else: - script = open(script_name, "w") + script = open(script_name, "w", encoding='utf-8') with script: script.write( From 66d9341ddd33d363a7fdeafa065811ba73b8077f Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 13:28:01 -0500 Subject: [PATCH 0374/1761] Rely on tree builder in test_build_scripts. Ref pypa/distutils#232. --- distutils/tests/test_build_scripts.py | 53 +++++++++++---------------- 1 file changed, 22 insertions(+), 31 deletions(-) diff --git a/distutils/tests/test_build_scripts.py b/distutils/tests/test_build_scripts.py index 28cc5632a3..8005b81c64 100644 --- a/distutils/tests/test_build_scripts.py +++ b/distutils/tests/test_build_scripts.py @@ -1,6 +1,9 @@ """Tests for distutils.command.build_scripts.""" import os +import textwrap + +import jaraco.path from distutils.command.build_scripts import build_scripts from distutils.core import Distribution @@ -46,37 +49,25 @@ def get_build_scripts_cmd(self, target, scripts): return build_scripts(dist) def write_sample_scripts(self, dir): - expected = [] - expected.append("script1.py") - self.write_script( - dir, - "script1.py", - ( - "#! /usr/bin/env python2.3\n" - "# bogus script w/ Python sh-bang\n" - "pass\n" - ), - ) - expected.append("script2.py") - self.write_script( - dir, - "script2.py", - ("#!/usr/bin/python\n" "# bogus script w/ Python sh-bang\n" "pass\n"), - ) - expected.append("shell.sh") - self.write_script( - dir, - "shell.sh", - ("#!/bin/sh\n" "# bogus shell script w/ sh-bang\n" "exit 0\n"), - ) - return expected - - def write_script(self, dir, name, text): - f = open(os.path.join(dir, name), "w") - try: - f.write(text) - finally: - f.close() + spec = { + 'script1.py': textwrap.dedent(""" + #! /usr/bin/env python2.3 + # bogus script w/ Python sh-bang + pass + """).lstrip(), + 'script2.py': textwrap.dedent(""" + #!/usr/bin/python + # bogus script w/ Python sh-bang + pass + """).lstrip(), + 'shell.sh': textwrap.dedent(""" + #!/bin/sh + # bogus shell script w/ sh-bang + exit 0 + """).lstrip(), + } + jaraco.path.build(spec, dir) + return list(spec) def test_version_int(self): source = self.mkdtemp() From b11410214a9c7398cfd3c0d6c9129f6a8f9d7599 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 13:39:02 -0500 Subject: [PATCH 0375/1761] Rely on Path object to replace the suffix, open the file, and count the lines. Ref pypa/distutils#232. --- distutils/tests/test_ccompiler.py | 2 +- distutils/tests/test_config_cmd.py | 11 +++++------ setup.cfg | 2 +- 3 files changed, 7 insertions(+), 8 deletions(-) diff --git a/distutils/tests/test_ccompiler.py b/distutils/tests/test_ccompiler.py index 49691d4b9b..b6512e6d77 100644 --- a/distutils/tests/test_ccompiler.py +++ b/distutils/tests/test_ccompiler.py @@ -36,7 +36,7 @@ def c_file(tmp_path): .lstrip() .replace('#headers', headers) ) - c_file.write_text(payload) + c_file.write_text(payload, encoding='utf-8') return c_file diff --git a/distutils/tests/test_config_cmd.py b/distutils/tests/test_config_cmd.py index 2519ed6a10..90c8f90679 100644 --- a/distutils/tests/test_config_cmd.py +++ b/distutils/tests/test_config_cmd.py @@ -3,6 +3,8 @@ import os import sys +import more_itertools +import path import pytest from distutils.command.config import dump_file, config @@ -24,12 +26,9 @@ def _info(self, msg, *args): self._logs.append(line) def test_dump_file(self): - this_file = os.path.splitext(__file__)[0] + '.py' - f = open(this_file) - try: - numlines = len(f.readlines()) - finally: - f.close() + this_file = path.Path(__file__).with_suffix('.py') + with this_file.open(encoding='utf-8') as f: + numlines = more_itertools.ilen(f) dump_file(this_file, 'I am the header') assert len(self._logs) == numlines + 1 diff --git a/setup.cfg b/setup.cfg index ba2d659984..d1c9855450 100644 --- a/setup.cfg +++ b/setup.cfg @@ -35,7 +35,7 @@ testing = jaraco.envs>=2.4 jaraco.path jaraco.text - path + path >= 10.6 docutils pyfakefs more_itertools From 3dcd43668abc4d7156eada8f63b076067fe5322b Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 13:40:50 -0500 Subject: [PATCH 0376/1761] Fix EncodingWarnings in test_core. Ref pypa/distutils#232. --- distutils/tests/test_core.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/distutils/tests/test_core.py b/distutils/tests/test_core.py index 2c11ff769e..95aa299889 100644 --- a/distutils/tests/test_core.py +++ b/distutils/tests/test_core.py @@ -70,20 +70,20 @@ class TestCore: def test_run_setup_provides_file(self, temp_file): # Make sure the script can use __file__; if that's missing, the test # setup.py script will raise NameError. - temp_file.write_text(setup_using___file__) + temp_file.write_text(setup_using___file__, encoding='utf-8') distutils.core.run_setup(temp_file) def test_run_setup_preserves_sys_argv(self, temp_file): # Make sure run_setup does not clobber sys.argv argv_copy = sys.argv.copy() - temp_file.write_text(setup_does_nothing) + temp_file.write_text(setup_does_nothing, encoding='utf-8') distutils.core.run_setup(temp_file) assert sys.argv == argv_copy def test_run_setup_defines_subclass(self, temp_file): # Make sure the script can use __file__; if that's missing, the test # setup.py script will raise NameError. - temp_file.write_text(setup_defines_subclass) + temp_file.write_text(setup_defines_subclass, encoding='utf-8') dist = distutils.core.run_setup(temp_file) install = dist.get_command_obj('install') assert 'cmd' in install.sub_commands @@ -98,7 +98,7 @@ def test_run_setup_uses_current_dir(self, tmp_path): # Create a directory and write the setup.py file there: setup_py = tmp_path / 'setup.py' - setup_py.write_text(setup_prints_cwd) + setup_py.write_text(setup_prints_cwd, encoding='utf-8') distutils.core.run_setup(setup_py) output = sys.stdout.getvalue() @@ -107,14 +107,14 @@ def test_run_setup_uses_current_dir(self, tmp_path): assert cwd == output def test_run_setup_within_if_main(self, temp_file): - temp_file.write_text(setup_within_if_main) + temp_file.write_text(setup_within_if_main, encoding='utf-8') dist = distutils.core.run_setup(temp_file, stop_after="config") assert isinstance(dist, Distribution) assert dist.get_name() == "setup_within_if_main" def test_run_commands(self, temp_file): sys.argv = ['setup.py', 'build'] - temp_file.write_text(setup_within_if_main) + temp_file.write_text(setup_within_if_main, encoding='utf-8') dist = distutils.core.run_setup(temp_file, stop_after="commandline") assert 'build' not in dist.have_run distutils.core.run_commands(dist) From cae489b96c3ebeadcee4f0efda008d25f7623516 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 13:43:36 -0500 Subject: [PATCH 0377/1761] Ran pyupgrade for Python 3.8+ followed by ruff format. --- distutils/bcppcompiler.py | 6 ++---- distutils/ccompiler.py | 4 +--- distutils/cmd.py | 8 +++----- distutils/command/_framework_compat.py | 4 ++-- distutils/command/bdist_rpm.py | 4 ++-- distutils/command/build.py | 2 +- distutils/command/build_ext.py | 2 +- distutils/command/check.py | 2 +- distutils/command/register.py | 2 +- distutils/command/upload.py | 8 +++----- distutils/core.py | 6 +++--- distutils/cygwinccompiler.py | 10 ++++------ distutils/dir_util.py | 10 +++------- distutils/dist.py | 8 ++++---- distutils/fancy_getopt.py | 6 +++--- distutils/file_util.py | 26 ++++++++------------------ distutils/filelist.py | 4 ++-- distutils/msvc9compiler.py | 14 +++++--------- distutils/msvccompiler.py | 6 ++---- distutils/py38compat.py | 2 +- distutils/spawn.py | 8 ++------ distutils/sysconfig.py | 2 +- distutils/tests/test_bdist_dumb.py | 2 +- distutils/tests/test_build.py | 2 +- distutils/tests/test_build_ext.py | 2 +- distutils/tests/test_dir_util.py | 2 +- distutils/tests/test_file_util.py | 4 ++-- distutils/tests/test_version.py | 4 ++-- distutils/util.py | 6 +++--- distutils/version.py | 2 +- 30 files changed, 67 insertions(+), 101 deletions(-) diff --git a/distutils/bcppcompiler.py b/distutils/bcppcompiler.py index 14d51472f2..d496d5d452 100644 --- a/distutils/bcppcompiler.py +++ b/distutils/bcppcompiler.py @@ -238,7 +238,7 @@ def link( # noqa: C901 def_file = os.path.join(temp_dir, '%s.def' % modname) contents = ['EXPORTS'] for sym in export_symbols or []: - contents.append(' {}=_{}'.format(sym, sym)) + contents.append(f' {sym}=_{sym}') self.execute(write_file, (def_file, contents), "writing %s" % def_file) # Borland C++ has problems with '/' in paths @@ -348,9 +348,7 @@ def object_filenames(self, source_filenames, strip_dir=0, output_dir=''): # use normcase to make sure '.rc' is really '.rc' and not '.RC' (base, ext) = os.path.splitext(os.path.normcase(src_name)) if ext not in (self.src_extensions + ['.rc', '.res']): - raise UnknownFileError( - "unknown file type '{}' (from '{}')".format(ext, src_name) - ) + raise UnknownFileError(f"unknown file type '{ext}' (from '{src_name}')") if strip_dir: base = os.path.basename(base) if ext == '.res': diff --git a/distutils/ccompiler.py b/distutils/ccompiler.py index 67feb16486..6faf546cfe 100644 --- a/distutils/ccompiler.py +++ b/distutils/ccompiler.py @@ -973,9 +973,7 @@ def _make_out_path(self, output_dir, strip_dir, src_name): try: new_ext = self.out_extensions[ext] except LookupError: - raise UnknownFileError( - "unknown file type '{}' (from '{}')".format(ext, src_name) - ) + raise UnknownFileError(f"unknown file type '{ext}' (from '{src_name}')") if strip_dir: base = os.path.basename(base) return os.path.join(output_dir, base + new_ext) diff --git a/distutils/cmd.py b/distutils/cmd.py index 8fdcbc0ea2..8849474cd7 100644 --- a/distutils/cmd.py +++ b/distutils/cmd.py @@ -165,7 +165,7 @@ def dump_options(self, header=None, indent=""): if option[-1] == "=": option = option[:-1] value = getattr(self, option) - self.announce(indent + "{} = {}".format(option, value), level=logging.INFO) + self.announce(indent + f"{option} = {value}", level=logging.INFO) def run(self): """A command's raison d'etre: carry out the action it exists to @@ -213,9 +213,7 @@ def _ensure_stringlike(self, option, what, default=None): setattr(self, option, default) return default elif not isinstance(val, str): - raise DistutilsOptionError( - "'{}' must be a {} (got `{}`)".format(option, what, val) - ) + raise DistutilsOptionError(f"'{option}' must be a {what} (got `{val}`)") return val def ensure_string(self, option, default=None): @@ -242,7 +240,7 @@ def ensure_string_list(self, option): ok = False if not ok: raise DistutilsOptionError( - "'{}' must be a list of strings (got {!r})".format(option, val) + f"'{option}' must be a list of strings (got {val!r})" ) def _ensure_tested_string(self, option, tester, what, error_fmt, default=None): diff --git a/distutils/command/_framework_compat.py b/distutils/command/_framework_compat.py index b4228299f4..397ebf823e 100644 --- a/distutils/command/_framework_compat.py +++ b/distutils/command/_framework_compat.py @@ -9,7 +9,7 @@ import sysconfig -@functools.lru_cache() +@functools.lru_cache def enabled(): """ Only enabled for Python 3.9 framework homebrew builds @@ -37,7 +37,7 @@ def enabled(): ) -@functools.lru_cache() +@functools.lru_cache def vars(): if not enabled(): return {} diff --git a/distutils/command/bdist_rpm.py b/distutils/command/bdist_rpm.py index e96db22bed..675bcebdad 100644 --- a/distutils/command/bdist_rpm.py +++ b/distutils/command/bdist_rpm.py @@ -487,7 +487,7 @@ def _make_spec_file(self): # noqa: C901 if isinstance(val, list): spec_file.append('{}: {}'.format(field, ' '.join(val))) elif val is not None: - spec_file.append('{}: {}'.format(field, val)) + spec_file.append(f'{field}: {val}') if self.distribution.get_url(): spec_file.append('Url: ' + self.distribution.get_url()) @@ -522,7 +522,7 @@ def _make_spec_file(self): # noqa: C901 # rpm scripts # figure out default build script - def_setup_call = "{} {}".format(self.python, os.path.basename(sys.argv[0])) + def_setup_call = f"{self.python} {os.path.basename(sys.argv[0])}" def_build = "%s build" % def_setup_call if self.use_rpm_opt_flags: def_build = 'env CFLAGS="$RPM_OPT_FLAGS" ' + def_build diff --git a/distutils/command/build.py b/distutils/command/build.py index cc9b367ef9..d8704e3583 100644 --- a/distutils/command/build.py +++ b/distutils/command/build.py @@ -78,7 +78,7 @@ def finalize_options(self): # noqa: C901 "using './configure --help' on your platform)" ) - plat_specifier = ".{}-{}".format(self.plat_name, sys.implementation.cache_tag) + plat_specifier = f".{self.plat_name}-{sys.implementation.cache_tag}" # Make it so Python 2.x and Python 2.x with --with-pydebug don't # share the same build directories. Doing so confuses the build diff --git a/distutils/command/build_ext.py b/distutils/command/build_ext.py index ba6580c71e..a15781f28a 100644 --- a/distutils/command/build_ext.py +++ b/distutils/command/build_ext.py @@ -515,7 +515,7 @@ def _filter_build_errors(self, ext): except (CCompilerError, DistutilsError, CompileError) as e: if not ext.optional: raise - self.warn('building extension "{}" failed: {}'.format(ext.name, e)) + self.warn(f'building extension "{ext.name}" failed: {e}') def build_extension(self, ext): sources = ext.sources diff --git a/distutils/command/check.py b/distutils/command/check.py index b59cc23731..28f55fb914 100644 --- a/distutils/command/check.py +++ b/distutils/command/check.py @@ -116,7 +116,7 @@ def check_restructuredtext(self): if line is None: warning = warning[1] else: - warning = '{} (line {})'.format(warning[1], line) + warning = f'{warning[1]} (line {line})' self.warn(warning) def _check_rst_data(self, data): diff --git a/distutils/command/register.py b/distutils/command/register.py index cf1afc8c1f..5a24246ccb 100644 --- a/distutils/command/register.py +++ b/distutils/command/register.py @@ -174,7 +174,7 @@ def send_metadata(self): # noqa: C901 auth.add_password(self.realm, host, username, password) # send the info to the server and report the result code, result = self.post_to_server(self.build_post_data('submit'), auth) - self.announce('Server response ({}): {}'.format(code, result), logging.INFO) + self.announce(f'Server response ({code}): {result}', logging.INFO) # possibly save the login if code == 200: diff --git a/distutils/command/upload.py b/distutils/command/upload.py index caf15f04a6..a9124f2b71 100644 --- a/distutils/command/upload.py +++ b/distutils/command/upload.py @@ -169,7 +169,7 @@ def upload_file(self, command, pyversion, filename): # noqa: C901 body.write(end_boundary) body = body.getvalue() - msg = "Submitting {} to {}".format(filename, self.repository) + msg = f"Submitting {filename} to {self.repository}" self.announce(msg, logging.INFO) # build the Request @@ -193,14 +193,12 @@ def upload_file(self, command, pyversion, filename): # noqa: C901 raise if status == 200: - self.announce( - 'Server response ({}): {}'.format(status, reason), logging.INFO - ) + self.announce(f'Server response ({status}): {reason}', logging.INFO) if self.show_response: text = self._read_pypi_response(result) msg = '\n'.join(('-' * 75, text, '-' * 75)) self.announce(msg, logging.INFO) else: - msg = 'Upload failed ({}): {}'.format(status, reason) + msg = f'Upload failed ({status}): {reason}' self.announce(msg, logging.ERROR) raise DistutilsError(msg) diff --git a/distutils/core.py b/distutils/core.py index 05d2971994..799de9489c 100644 --- a/distutils/core.py +++ b/distutils/core.py @@ -203,10 +203,10 @@ def run_commands(dist): raise SystemExit("interrupted") except OSError as exc: if DEBUG: - sys.stderr.write("error: {}\n".format(exc)) + sys.stderr.write(f"error: {exc}\n") raise else: - raise SystemExit("error: {}".format(exc)) + raise SystemExit(f"error: {exc}") except (DistutilsError, CCompilerError) as msg: if DEBUG: @@ -249,7 +249,7 @@ def run_setup(script_name, script_args=None, stop_after="run"): used to drive the Distutils. """ if stop_after not in ('init', 'config', 'commandline', 'run'): - raise ValueError("invalid value for 'stop_after': {!r}".format(stop_after)) + raise ValueError(f"invalid value for 'stop_after': {stop_after!r}") global _setup_stop_after, _setup_distribution _setup_stop_after = stop_after diff --git a/distutils/cygwinccompiler.py b/distutils/cygwinccompiler.py index b3dbc3be15..84151b7eb9 100644 --- a/distutils/cygwinccompiler.py +++ b/distutils/cygwinccompiler.py @@ -87,9 +87,7 @@ def __init__(self, verbose=0, dry_run=0, force=0): super().__init__(verbose, dry_run, force) status, details = check_config_h() - self.debug_print( - "Python's GCC status: {} (details: {})".format(status, details) - ) + self.debug_print(f"Python's GCC status: {status} (details: {details})") if status is not CONFIG_H_OK: self.warn( "Python's pyconfig.h doesn't seem to support your compiler. " @@ -108,7 +106,7 @@ def __init__(self, verbose=0, dry_run=0, force=0): compiler_so='%s -mcygwin -mdll -O -Wall' % self.cc, compiler_cxx='%s -mcygwin -O -Wall' % self.cxx, linker_exe='%s -mcygwin' % self.cc, - linker_so=('{} -mcygwin {}'.format(self.linker_dll, shared_option)), + linker_so=(f'{self.linker_dll} -mcygwin {shared_option}'), ) # Include the appropriate MSVC runtime library if Python was built @@ -280,7 +278,7 @@ def __init__(self, verbose=0, dry_run=0, force=0): compiler_so='%s -mdll -O -Wall' % self.cc, compiler_cxx='%s -O -Wall' % self.cxx, linker_exe='%s' % self.cc, - linker_so='{} {}'.format(self.linker_dll, shared_option), + linker_so=f'{self.linker_dll} {shared_option}', ) def runtime_library_dir_option(self, dir): @@ -340,7 +338,7 @@ def check_config_h(): finally: config_h.close() except OSError as exc: - return (CONFIG_H_UNCERTAIN, "couldn't read '{}': {}".format(fn, exc.strerror)) + return (CONFIG_H_UNCERTAIN, f"couldn't read '{fn}': {exc.strerror}") def is_cygwincc(cc): diff --git a/distutils/dir_util.py b/distutils/dir_util.py index 23dc3392a2..819fe56f6d 100644 --- a/distutils/dir_util.py +++ b/distutils/dir_util.py @@ -33,9 +33,7 @@ def mkpath(name, mode=0o777, verbose=1, dry_run=0): # noqa: C901 # Detect a common bug -- name is None if not isinstance(name, str): - raise DistutilsInternalError( - "mkpath: 'name' must be a string (got {!r})".format(name) - ) + raise DistutilsInternalError(f"mkpath: 'name' must be a string (got {name!r})") # XXX what's the better way to handle verbosity? print as we create # each directory in the path (the current behaviour), or only announce @@ -76,7 +74,7 @@ def mkpath(name, mode=0o777, verbose=1, dry_run=0): # noqa: C901 except OSError as exc: if not (exc.errno == errno.EEXIST and os.path.isdir(head)): raise DistutilsFileError( - "could not create '{}': {}".format(head, exc.args[-1]) + f"could not create '{head}': {exc.args[-1]}" ) created_dirs.append(head) @@ -143,9 +141,7 @@ def copy_tree( # noqa: C901 if dry_run: names = [] else: - raise DistutilsFileError( - "error listing files in '{}': {}".format(src, e.strerror) - ) + raise DistutilsFileError(f"error listing files in '{src}': {e.strerror}") if not dry_run: mkpath(dst, verbose=verbose) diff --git a/distutils/dist.py b/distutils/dist.py index 7c0f0e5b78..659583943b 100644 --- a/distutils/dist.py +++ b/distutils/dist.py @@ -821,7 +821,7 @@ def get_command_class(self, command): return klass for pkgname in self.get_command_packages(): - module_name = "{}.{}".format(pkgname, command) + module_name = f"{pkgname}.{command}" klass_name = command try: @@ -889,7 +889,7 @@ def _set_command_options(self, command_obj, option_dict=None): # noqa: C901 self.announce(" setting options for '%s' command:" % command_name) for option, (source, value) in option_dict.items(): if DEBUG: - self.announce(" {} = {} (from {})".format(option, value, source)) + self.announce(f" {option} = {value} (from {source})") try: bool_opts = [translate_longopt(o) for o in command_obj.boolean_options] except AttributeError: @@ -1178,7 +1178,7 @@ def maybe_write(header, val): def _write_list(self, file, name, values): values = values or [] for value in values: - file.write('{}: {}\n'.format(name, value)) + file.write(f'{name}: {value}\n') # -- Metadata query methods ---------------------------------------- @@ -1189,7 +1189,7 @@ def get_version(self): return self.version or "0.0.0" def get_fullname(self): - return "{}-{}".format(self.get_name(), self.get_version()) + return f"{self.get_name()}-{self.get_version()}" def get_author(self): return self.author diff --git a/distutils/fancy_getopt.py b/distutils/fancy_getopt.py index 3b887dc5a4..c025f12062 100644 --- a/distutils/fancy_getopt.py +++ b/distutils/fancy_getopt.py @@ -22,7 +22,7 @@ longopt_re = re.compile(r'^%s$' % longopt_pat) # For recognizing "negative alias" options, eg. "quiet=!verbose" -neg_alias_re = re.compile("^({})=!({})$".format(longopt_pat, longopt_pat)) +neg_alias_re = re.compile(f"^({longopt_pat})=!({longopt_pat})$") # This is used to translate long options to legitimate Python identifiers # (for use as attributes of some object). @@ -157,7 +157,7 @@ def _grok_option_table(self): # noqa: C901 else: # the option table is part of the code, so simply # assert that it is correct - raise ValueError("invalid option tuple: {!r}".format(option)) + raise ValueError(f"invalid option tuple: {option!r}") # Type- and value-check the option names if not isinstance(long, str) or len(long) < 2: @@ -359,7 +359,7 @@ def generate_help(self, header=None): # noqa: C901 # Case 2: we have a short option, so we have to include it # just after the long option else: - opt_names = "{} (-{})".format(long, short) + opt_names = f"{long} (-{short})" if text: lines.append(" --%-*s %s" % (max_opt, opt_names, text[0])) else: diff --git a/distutils/file_util.py b/distutils/file_util.py index 3f3e21b567..8ebd2a790f 100644 --- a/distutils/file_util.py +++ b/distutils/file_util.py @@ -26,30 +26,24 @@ def _copy_file_contents(src, dst, buffer_size=16 * 1024): # noqa: C901 try: fsrc = open(src, 'rb') except OSError as e: - raise DistutilsFileError("could not open '{}': {}".format(src, e.strerror)) + raise DistutilsFileError(f"could not open '{src}': {e.strerror}") if os.path.exists(dst): try: os.unlink(dst) except OSError as e: - raise DistutilsFileError( - "could not delete '{}': {}".format(dst, e.strerror) - ) + raise DistutilsFileError(f"could not delete '{dst}': {e.strerror}") try: fdst = open(dst, 'wb') except OSError as e: - raise DistutilsFileError( - "could not create '{}': {}".format(dst, e.strerror) - ) + raise DistutilsFileError(f"could not create '{dst}': {e.strerror}") while True: try: buf = fsrc.read(buffer_size) except OSError as e: - raise DistutilsFileError( - "could not read from '{}': {}".format(src, e.strerror) - ) + raise DistutilsFileError(f"could not read from '{src}': {e.strerror}") if not buf: break @@ -57,9 +51,7 @@ def _copy_file_contents(src, dst, buffer_size=16 * 1024): # noqa: C901 try: fdst.write(buf) except OSError as e: - raise DistutilsFileError( - "could not write to '{}': {}".format(dst, e.strerror) - ) + raise DistutilsFileError(f"could not write to '{dst}': {e.strerror}") finally: if fdst: fdst.close() @@ -199,12 +191,12 @@ def move_file(src, dst, verbose=1, dry_run=0): # noqa: C901 dst = os.path.join(dst, basename(src)) elif exists(dst): raise DistutilsFileError( - "can't move '{}': destination '{}' already exists".format(src, dst) + f"can't move '{src}': destination '{dst}' already exists" ) if not isdir(dirname(dst)): raise DistutilsFileError( - "can't move '{}': destination '{}' not a valid path".format(src, dst) + f"can't move '{src}': destination '{dst}' not a valid path" ) copy_it = False @@ -215,9 +207,7 @@ def move_file(src, dst, verbose=1, dry_run=0): # noqa: C901 if num == errno.EXDEV: copy_it = True else: - raise DistutilsFileError( - "couldn't move '{}' to '{}': {}".format(src, dst, msg) - ) + raise DistutilsFileError(f"couldn't move '{src}' to '{dst}': {msg}") if copy_it: copy_file(src, dst, verbose=verbose) diff --git a/distutils/filelist.py b/distutils/filelist.py index 6dadf923d7..3205762654 100644 --- a/distutils/filelist.py +++ b/distutils/filelist.py @@ -363,9 +363,9 @@ def translate_pattern(pattern, anchor=1, prefix=None, is_regex=0): if os.sep == '\\': sep = r'\\' pattern_re = pattern_re[len(start) : len(pattern_re) - len(end)] - pattern_re = r'{}\A{}{}.*{}{}'.format(start, prefix_re, sep, pattern_re, end) + pattern_re = rf'{start}\A{prefix_re}{sep}.*{pattern_re}{end}' else: # no prefix -- respect anchor flag if anchor: - pattern_re = r'{}\A{}'.format(start, pattern_re[len(start) :]) + pattern_re = rf'{start}\A{pattern_re[len(start) :]}' return re.compile(pattern_re) diff --git a/distutils/msvc9compiler.py b/distutils/msvc9compiler.py index 724986d89d..402c0c0620 100644 --- a/distutils/msvc9compiler.py +++ b/distutils/msvc9compiler.py @@ -175,7 +175,7 @@ def load_macros(self, version): except RegError: continue key = RegEnumKey(h, 0) - d = Reg.get_value(base, r"{}\{}".format(p, key)) + d = Reg.get_value(base, rf"{p}\{key}") self.macros["$(FrameworkVersion)"] = d["version"] def sub(self, s): @@ -281,7 +281,7 @@ def query_vcvarsall(version, arch="x86"): raise DistutilsPlatformError("Unable to find vcvarsall.bat") log.debug("Calling 'vcvarsall.bat %s' (version=%s)", arch, version) popen = subprocess.Popen( - '"{}" {} & set'.format(vcvarsall, arch), + f'"{vcvarsall}" {arch} & set', stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) @@ -370,9 +370,7 @@ def initialize(self, plat_name=None): # noqa: C901 # sanity check for platforms to prevent obscure errors later. ok_plats = 'win32', 'win-amd64' if plat_name not in ok_plats: - raise DistutilsPlatformError( - "--plat-name must be one of {}".format(ok_plats) - ) + raise DistutilsPlatformError(f"--plat-name must be one of {ok_plats}") if ( "DISTUTILS_USE_SDK" in os.environ @@ -564,9 +562,7 @@ def compile( # noqa: C901 continue else: # how to handle this file? - raise CompileError( - "Don't know how to compile {} to {}".format(src, obj) - ) + raise CompileError(f"Don't know how to compile {src} to {obj}") output_opt = "/Fo" + obj try: @@ -687,7 +683,7 @@ def link( # noqa: C901 mfinfo = self.manifest_get_embed_info(target_desc, ld_args) if mfinfo is not None: mffilename, mfid = mfinfo - out_arg = '-outputresource:{};{}'.format(output_filename, mfid) + out_arg = f'-outputresource:{output_filename};{mfid}' try: self.spawn(['mt.exe', '-nologo', '-manifest', mffilename, out_arg]) except DistutilsExecError as msg: diff --git a/distutils/msvccompiler.py b/distutils/msvccompiler.py index c3823e257e..1a07746bc7 100644 --- a/distutils/msvccompiler.py +++ b/distutils/msvccompiler.py @@ -159,7 +159,7 @@ def load_macros(self, version): except RegError: continue key = RegEnumKey(h, 0) - d = read_values(base, r"{}\{}".format(p, key)) + d = read_values(base, rf"{p}\{key}") self.macros["$(FrameworkVersion)"] = d["version"] def sub(self, s): @@ -454,9 +454,7 @@ def compile( # noqa: C901 continue else: # how to handle this file? - raise CompileError( - "Don't know how to compile {} to {}".format(src, obj) - ) + raise CompileError(f"Don't know how to compile {src} to {obj}") output_opt = "/Fo" + obj try: diff --git a/distutils/py38compat.py b/distutils/py38compat.py index 59224e71e5..ab12119fa5 100644 --- a/distutils/py38compat.py +++ b/distutils/py38compat.py @@ -5,4 +5,4 @@ def aix_platform(osname, version, release): return _aix_support.aix_platform() except ImportError: pass - return "{}-{}.{}".format(osname, version, release) + return f"{osname}-{version}.{release}" diff --git a/distutils/spawn.py b/distutils/spawn.py index afefe525ef..48adceb114 100644 --- a/distutils/spawn.py +++ b/distutils/spawn.py @@ -60,16 +60,12 @@ def spawn(cmd, search_path=1, verbose=0, dry_run=0, env=None): # noqa: C901 except OSError as exc: if not DEBUG: cmd = cmd[0] - raise DistutilsExecError( - "command {!r} failed: {}".format(cmd, exc.args[-1]) - ) from exc + raise DistutilsExecError(f"command {cmd!r} failed: {exc.args[-1]}") from exc if exitcode: if not DEBUG: cmd = cmd[0] - raise DistutilsExecError( - "command {!r} failed with exit code {}".format(cmd, exitcode) - ) + raise DistutilsExecError(f"command {cmd!r} failed with exit code {exitcode}") def find_executable(executable, path=None): diff --git a/distutils/sysconfig.py b/distutils/sysconfig.py index 5fb811c406..40215b8347 100644 --- a/distutils/sysconfig.py +++ b/distutils/sysconfig.py @@ -267,7 +267,7 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None): ) -@functools.lru_cache() +@functools.lru_cache def _customize_macos(): """ Perform first-time customization of compiler-related diff --git a/distutils/tests/test_bdist_dumb.py b/distutils/tests/test_bdist_dumb.py index 95532e83b9..cb4db4e192 100644 --- a/distutils/tests/test_bdist_dumb.py +++ b/distutils/tests/test_bdist_dumb.py @@ -61,7 +61,7 @@ def test_simple_built(self): # see what we have dist_created = os.listdir(os.path.join(pkg_dir, 'dist')) - base = "{}.{}.zip".format(dist.get_fullname(), cmd.plat_name) + base = f"{dist.get_fullname()}.{cmd.plat_name}.zip" assert dist_created == [base] diff --git a/distutils/tests/test_build.py b/distutils/tests/test_build.py index c2cff44523..8617fa9919 100644 --- a/distutils/tests/test_build.py +++ b/distutils/tests/test_build.py @@ -24,7 +24,7 @@ def test_finalize_options(self): # build_platlib is 'build/lib.platform-cache_tag[-pydebug]' # examples: # build/lib.macosx-10.3-i386-cpython39 - plat_spec = '.{}-{}'.format(cmd.plat_name, sys.implementation.cache_tag) + plat_spec = f'.{cmd.plat_name}-{sys.implementation.cache_tag}' if hasattr(sys, 'gettotalrefcount'): assert cmd.build_platlib.endswith('-pydebug') plat_spec += '-pydebug' diff --git a/distutils/tests/test_build_ext.py b/distutils/tests/test_build_ext.py index 51e5cd00cc..e24dea3603 100644 --- a/distutils/tests/test_build_ext.py +++ b/distutils/tests/test_build_ext.py @@ -535,7 +535,7 @@ def _try_compile_deployment_target(self, operator, target): deptarget_ext = Extension( 'deptarget', [self.tmp_path / 'deptargetmodule.c'], - extra_compile_args=['-DTARGET={}'.format(target)], + extra_compile_args=[f'-DTARGET={target}'], ) dist = Distribution({'name': 'deptarget', 'ext_modules': [deptarget_ext]}) dist.package_dir = self.tmp_dir diff --git a/distutils/tests/test_dir_util.py b/distutils/tests/test_dir_util.py index 0738b7c877..e7d69bb6ef 100644 --- a/distutils/tests/test_dir_util.py +++ b/distutils/tests/test_dir_util.py @@ -75,7 +75,7 @@ def test_copy_tree_verbosity(self, caplog): with open(a_file, 'w') as f: f.write('some content') - wanted = ['copying {} -> {}'.format(a_file, self.target2)] + wanted = [f'copying {a_file} -> {self.target2}'] copy_tree(self.target, self.target2, verbose=1) assert caplog.messages == wanted diff --git a/distutils/tests/test_file_util.py b/distutils/tests/test_file_util.py index 3b9f82b71e..e441186e3a 100644 --- a/distutils/tests/test_file_util.py +++ b/distutils/tests/test_file_util.py @@ -35,7 +35,7 @@ def test_move_file_verbosity(self, caplog): move_file(self.target, self.source, verbose=0) move_file(self.source, self.target, verbose=1) - wanted = ['moving {} -> {}'.format(self.source, self.target)] + wanted = [f'moving {self.source} -> {self.target}'] assert caplog.messages == wanted # back to original state @@ -45,7 +45,7 @@ def test_move_file_verbosity(self, caplog): # now the target is a dir os.mkdir(self.target_dir) move_file(self.source, self.target_dir, verbose=1) - wanted = ['moving {} -> {}'.format(self.source, self.target_dir)] + wanted = [f'moving {self.source} -> {self.target_dir}'] assert caplog.messages == wanted def test_move_file_exception_unpacking_rename(self): diff --git a/distutils/tests/test_version.py b/distutils/tests/test_version.py index 900edafa7c..0aaf0a534c 100644 --- a/distutils/tests/test_version.py +++ b/distutils/tests/test_version.py @@ -62,7 +62,7 @@ def test_cmp_strict(self): res = StrictVersion(v1)._cmp(object()) assert ( res is NotImplemented - ), 'cmp({}, {}) should be NotImplemented, got {}'.format(v1, v2, res) + ), f'cmp({v1}, {v2}) should be NotImplemented, got {res}' def test_cmp(self): versions = ( @@ -88,4 +88,4 @@ def test_cmp(self): res = LooseVersion(v1)._cmp(object()) assert ( res is NotImplemented - ), 'cmp({}, {}) should be NotImplemented, got {}'.format(v1, v2, res) + ), f'cmp({v1}, {v2}) should be NotImplemented, got {res}' diff --git a/distutils/util.py b/distutils/util.py index aa0c90cfcd..c26e61ab4a 100644 --- a/distutils/util.py +++ b/distutils/util.py @@ -172,7 +172,7 @@ def change_root(new_root, pathname): raise DistutilsPlatformError(f"nothing known about platform '{os.name}'") -@functools.lru_cache() +@functools.lru_cache def check_environ(): """Ensure that 'os.environ' has all the environment variables we guarantee that users can use in config files, command-line options, @@ -328,7 +328,7 @@ def execute(func, args, msg=None, verbose=0, dry_run=0): print. """ if msg is None: - msg = "{}{!r}".format(func.__name__, args) + msg = f"{func.__name__}{args!r}" if msg[-2:] == ',)': # correct for singleton tuple msg = msg[0:-2] + ')' @@ -350,7 +350,7 @@ def strtobool(val): elif val in ('n', 'no', 'f', 'false', 'off', '0'): return 0 else: - raise ValueError("invalid truth value {!r}".format(val)) + raise ValueError(f"invalid truth value {val!r}") def byte_compile( # noqa: C901 diff --git a/distutils/version.py b/distutils/version.py index 18385cfef2..8ab76ddef4 100644 --- a/distutils/version.py +++ b/distutils/version.py @@ -60,7 +60,7 @@ def __init__(self, vstring=None): ) def __repr__(self): - return "{} ('{}')".format(self.__class__.__name__, str(self)) + return f"{self.__class__.__name__} ('{str(self)}')" def __eq__(self, other): c = self._cmp(other) From b060f26530bb8570f1577b8b4ff562760c336cdf Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 13:47:11 -0500 Subject: [PATCH 0378/1761] Rely on tree builder in test_dir_util. Ref pypa/distutils#232. --- distutils/tests/test_dir_util.py | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/distutils/tests/test_dir_util.py b/distutils/tests/test_dir_util.py index e7d69bb6ef..6fc9ed0883 100644 --- a/distutils/tests/test_dir_util.py +++ b/distutils/tests/test_dir_util.py @@ -4,6 +4,10 @@ import stat import unittest.mock as mock +import jaraco.path +import path +import pytest + from distutils import dir_util, errors from distutils.dir_util import ( mkpath, @@ -14,7 +18,6 @@ ) from distutils.tests import support -import pytest @pytest.fixture(autouse=True) @@ -71,9 +74,8 @@ def test_copy_tree_verbosity(self, caplog): remove_tree(self.root_target, verbose=0) mkpath(self.target, verbose=0) - a_file = os.path.join(self.target, 'ok.txt') - with open(a_file, 'w') as f: - f.write('some content') + a_file = path.Path(self.target) / 'ok.txt' + jaraco.path.build({'ok.txt': 'some content'}, self.target) wanted = [f'copying {a_file} -> {self.target2}'] copy_tree(self.target, self.target2, verbose=1) @@ -85,11 +87,7 @@ def test_copy_tree_verbosity(self, caplog): def test_copy_tree_skips_nfs_temp_files(self): mkpath(self.target, verbose=0) - a_file = os.path.join(self.target, 'ok.txt') - nfs_file = os.path.join(self.target, '.nfs123abc') - for f in a_file, nfs_file: - with open(f, 'w') as fh: - fh.write('some content') + jaraco.path.build({'ok.txt': 'some content', '.nfs123abc': ''}, self.target) copy_tree(self.target, self.target2) assert os.listdir(self.target2) == ['ok.txt'] From 438b37afae271c08dad74e96f59a5b68a80e333c Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 13:56:46 -0500 Subject: [PATCH 0379/1761] Rely on tree builder and path objects. Ref pypa/distutils#232. --- distutils/tests/test_file_util.py | 41 +++++++++++++------------------ 1 file changed, 17 insertions(+), 24 deletions(-) diff --git a/distutils/tests/test_file_util.py b/distutils/tests/test_file_util.py index e441186e3a..888e27b5b5 100644 --- a/distutils/tests/test_file_util.py +++ b/distutils/tests/test_file_util.py @@ -4,29 +4,28 @@ import errno import unittest.mock as mock +import jaraco.path +import path +import pytest + from distutils.file_util import move_file, copy_file from distutils.tests import support from distutils.errors import DistutilsFileError from .py38compat import unlink -import pytest @pytest.fixture(autouse=True) def stuff(request, monkeypatch, distutils_managed_tempdir): self = request.instance - tmp_dir = self.mkdtemp() - self.source = os.path.join(tmp_dir, 'f1') - self.target = os.path.join(tmp_dir, 'f2') - self.target_dir = os.path.join(tmp_dir, 'd1') + tmp_dir = path.Path(self.mkdtemp()) + self.source = tmp_dir / 'f1' + self.target = tmp_dir / 'f2' + self.target_dir = tmp_dir / 'd1' class TestFileUtil(support.TempdirManager): def test_move_file_verbosity(self, caplog): - f = open(self.source, 'w') - try: - f.write('some content') - finally: - f.close() + jaraco.path.build({self.source: 'some content'}) move_file(self.source, self.target, verbose=0) assert not caplog.messages @@ -53,8 +52,7 @@ def test_move_file_exception_unpacking_rename(self): with mock.patch("os.rename", side_effect=OSError("wrong", 1)), pytest.raises( DistutilsFileError ): - with open(self.source, 'w') as fobj: - fobj.write('spam eggs') + jaraco.path.build({self.source: 'spam eggs'}) move_file(self.source, self.target, verbose=0) def test_move_file_exception_unpacking_unlink(self): @@ -64,36 +62,32 @@ def test_move_file_exception_unpacking_unlink(self): ), mock.patch("os.unlink", side_effect=OSError("wrong", 1)), pytest.raises( DistutilsFileError ): - with open(self.source, 'w') as fobj: - fobj.write('spam eggs') + jaraco.path.build({self.source: 'spam eggs'}) move_file(self.source, self.target, verbose=0) def test_copy_file_hard_link(self): - with open(self.source, 'w') as f: - f.write('some content') + jaraco.path.build({self.source: 'some content'}) # Check first that copy_file() will not fall back on copying the file # instead of creating the hard link. try: - os.link(self.source, self.target) + self.source.link(self.target) except OSError as e: self.skipTest('os.link: %s' % e) else: - unlink(self.target) + self.target.unlink() st = os.stat(self.source) copy_file(self.source, self.target, link='hard') st2 = os.stat(self.source) st3 = os.stat(self.target) assert os.path.samestat(st, st2), (st, st2) assert os.path.samestat(st2, st3), (st2, st3) - with open(self.source) as f: - assert f.read() == 'some content' + assert self.source.read_text(encoding='utf-8') == 'some content' def test_copy_file_hard_link_failure(self): # If hard linking fails, copy_file() falls back on copying file # (some special filesystems don't support hard linking even under # Unix, see issue #8876). - with open(self.source, 'w') as f: - f.write('some content') + jaraco.path.build({self.source: 'some content'}) st = os.stat(self.source) with mock.patch("os.link", side_effect=OSError(0, "linking unsupported")): copy_file(self.source, self.target, link='hard') @@ -102,5 +96,4 @@ def test_copy_file_hard_link_failure(self): assert os.path.samestat(st, st2), (st, st2) assert not os.path.samestat(st2, st3), (st2, st3) for fn in (self.source, self.target): - with open(fn) as f: - assert f.read() == 'some content' + assert fn.read_text(encoding='utf-8') == 'some content' From 43ee1e22f58c36d26851a779ea00aa6ec72839a0 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 13:59:11 -0500 Subject: [PATCH 0380/1761] Remove reliance on TempdirManager in test_file_util. --- distutils/tests/test_file_util.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/distutils/tests/test_file_util.py b/distutils/tests/test_file_util.py index 888e27b5b5..27796d9fd5 100644 --- a/distutils/tests/test_file_util.py +++ b/distutils/tests/test_file_util.py @@ -15,15 +15,15 @@ @pytest.fixture(autouse=True) -def stuff(request, monkeypatch, distutils_managed_tempdir): +def stuff(request, tmp_path): self = request.instance - tmp_dir = path.Path(self.mkdtemp()) + tmp_dir = path.Path(tmp_path) self.source = tmp_dir / 'f1' self.target = tmp_dir / 'f2' self.target_dir = tmp_dir / 'd1' -class TestFileUtil(support.TempdirManager): +class TestFileUtil: def test_move_file_verbosity(self, caplog): jaraco.path.build({self.source: 'some content'}) From 5c998067eb1ab64befb831abe891ab67f69ca143 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 14:01:40 -0500 Subject: [PATCH 0381/1761] Rely on tmp_path fixture directly. --- distutils/tests/test_file_util.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/distutils/tests/test_file_util.py b/distutils/tests/test_file_util.py index 27796d9fd5..08f9e19fac 100644 --- a/distutils/tests/test_file_util.py +++ b/distutils/tests/test_file_util.py @@ -5,7 +5,6 @@ import unittest.mock as mock import jaraco.path -import path import pytest from distutils.file_util import move_file, copy_file @@ -17,10 +16,9 @@ @pytest.fixture(autouse=True) def stuff(request, tmp_path): self = request.instance - tmp_dir = path.Path(tmp_path) - self.source = tmp_dir / 'f1' - self.target = tmp_dir / 'f2' - self.target_dir = tmp_dir / 'd1' + self.source = tmp_path / 'f1' + self.target = tmp_path / 'f2' + self.target_dir = tmp_path / 'd1' class TestFileUtil: @@ -70,7 +68,7 @@ def test_copy_file_hard_link(self): # Check first that copy_file() will not fall back on copying the file # instead of creating the hard link. try: - self.source.link(self.target) + os.link(self.source, self.target) except OSError as e: self.skipTest('os.link: %s' % e) else: From e2c4a88b6f4f31c7c8cc205917aa6d71496e97c9 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 14:06:19 -0500 Subject: [PATCH 0382/1761] =?UTF-8?q?=F0=9F=91=B9=20Feed=20the=20hobgoblin?= =?UTF-8?q?s=20(delint).?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- distutils/tests/test_build_ext.py | 1 - distutils/tests/test_file_util.py | 2 -- 2 files changed, 3 deletions(-) diff --git a/distutils/tests/test_build_ext.py b/distutils/tests/test_build_ext.py index e24dea3603..4ae81a22e4 100644 --- a/distutils/tests/test_build_ext.py +++ b/distutils/tests/test_build_ext.py @@ -4,7 +4,6 @@ import textwrap import site import contextlib -import pathlib import platform import tempfile import importlib diff --git a/distutils/tests/test_file_util.py b/distutils/tests/test_file_util.py index 08f9e19fac..6c7019140e 100644 --- a/distutils/tests/test_file_util.py +++ b/distutils/tests/test_file_util.py @@ -8,9 +8,7 @@ import pytest from distutils.file_util import move_file, copy_file -from distutils.tests import support from distutils.errors import DistutilsFileError -from .py38compat import unlink @pytest.fixture(autouse=True) From 1e3fe05c6b02b6ff7dffa8bd902a8643ce2bca20 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 14:13:59 -0500 Subject: [PATCH 0383/1761] Rely on tree builder. Ref pypa/distutils#232. --- distutils/tests/test_filelist.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/distutils/tests/test_filelist.py b/distutils/tests/test_filelist.py index bfffbb1da0..bf1a9d9b45 100644 --- a/distutils/tests/test_filelist.py +++ b/distutils/tests/test_filelist.py @@ -322,14 +322,18 @@ def test_non_local_discovery(self, tmp_path): When findall is called with another path, the full path name should be returned. """ - filename = tmp_path / 'file1.txt' - filename.write_text('') - expected = [str(filename)] + jaraco.path.build({'file1.txt': ''}, tmp_path) + expected = [str(tmp_path / 'file1.txt')] assert filelist.findall(tmp_path) == expected @os_helper.skip_unless_symlink def test_symlink_loop(self, tmp_path): - tmp_path.joinpath('link-to-parent').symlink_to('.') - tmp_path.joinpath('somefile').write_text('') + jaraco.path.build( + { + 'link-to-parent': jaraco.path.Symlink('.'), + 'somefile': '', + }, + tmp_path, + ) files = filelist.findall(tmp_path) assert len(files) == 1 From acff48deeb93775bbf7fa90750baf53f4e99cf42 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 14:16:27 -0500 Subject: [PATCH 0384/1761] Specify encoding in test_install. Ref pypa/distutils#232. --- distutils/tests/test_install.py | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/distutils/tests/test_install.py b/distutils/tests/test_install.py index 082ee1d349..16ac5ca746 100644 --- a/distutils/tests/test_install.py +++ b/distutils/tests/test_install.py @@ -196,13 +196,9 @@ def test_record(self): cmd.ensure_finalized() cmd.run() - f = open(cmd.record) - try: - content = f.read() - finally: - f.close() + content = pathlib.Path(cmd.record).read_text(encoding='utf-8') - found = [os.path.basename(line) for line in content.splitlines()] + found = [pathlib.Path(line).name for line in content.splitlines()] expected = [ 'hello.py', 'hello.%s.pyc' % sys.implementation.cache_tag, @@ -234,9 +230,9 @@ def test_record_extensions(self): cmd.ensure_finalized() cmd.run() - content = pathlib.Path(cmd.record).read_text() + content = pathlib.Path(cmd.record).read_text(encoding='utf-8') - found = [os.path.basename(line) for line in content.splitlines()] + found = [pathlib.Path(line).name for line in content.splitlines()] expected = [ _make_ext_name('xx'), 'UNKNOWN-0.0.0-py%s.%s.egg-info' % sys.version_info[:2], From d3f79e28842d4fd798d0d98eb82460dc7c3e9f8f Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 14:20:13 -0500 Subject: [PATCH 0385/1761] Re-use write_sample_scripts in test_install_scripts. Ref pypa/distutils#232. --- distutils/tests/test_build_scripts.py | 3 ++- distutils/tests/test_install_scripts.py | 26 ++----------------------- 2 files changed, 4 insertions(+), 25 deletions(-) diff --git a/distutils/tests/test_build_scripts.py b/distutils/tests/test_build_scripts.py index 8005b81c64..7e05ec5f9a 100644 --- a/distutils/tests/test_build_scripts.py +++ b/distutils/tests/test_build_scripts.py @@ -48,7 +48,8 @@ def get_build_scripts_cmd(self, target, scripts): ) return build_scripts(dist) - def write_sample_scripts(self, dir): + @staticmethod + def write_sample_scripts(dir): spec = { 'script1.py': textwrap.dedent(""" #! /usr/bin/env python2.3 diff --git a/distutils/tests/test_install_scripts.py b/distutils/tests/test_install_scripts.py index 58313f2864..4da2acb6a8 100644 --- a/distutils/tests/test_install_scripts.py +++ b/distutils/tests/test_install_scripts.py @@ -6,6 +6,7 @@ from distutils.core import Distribution from distutils.tests import support +from . import test_build_scripts class TestInstallScripts(support.TempdirManager): @@ -32,31 +33,8 @@ def test_default_settings(self): def test_installation(self): source = self.mkdtemp() - expected = [] - def write_script(name, text): - expected.append(name) - f = open(os.path.join(source, name), "w") - try: - f.write(text) - finally: - f.close() - - write_script( - "script1.py", - ( - "#! /usr/bin/env python2.3\n" - "# bogus script w/ Python sh-bang\n" - "pass\n" - ), - ) - write_script( - "script2.py", - ("#!/usr/bin/python\n" "# bogus script w/ Python sh-bang\n" "pass\n"), - ) - write_script( - "shell.sh", ("#!/bin/sh\n" "# bogus shell script w/ sh-bang\n" "exit 0\n") - ) + expected = test_build_scripts.TestBuildScripts.write_sample_scripts(source) target = self.mkdtemp() dist = Distribution() From 8b7cee81ac5651691a5d92a6fa805f06fa33fb21 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 14:24:56 -0500 Subject: [PATCH 0386/1761] Use Path objects in test_register. Ref pypa/distutils#232. --- distutils/tests/test_register.py | 15 ++++----------- 1 file changed, 4 insertions(+), 11 deletions(-) diff --git a/distutils/tests/test_register.py b/distutils/tests/test_register.py index 5d3826a1b7..591c5ce0ad 100644 --- a/distutils/tests/test_register.py +++ b/distutils/tests/test_register.py @@ -1,7 +1,8 @@ """Tests for distutils.command.register.""" -import os import getpass +import os +import pathlib import urllib from distutils.command import register as register_module @@ -126,16 +127,8 @@ def test_create_pypirc(self): finally: del register_module.input - # we should have a brand new .pypirc file - assert os.path.exists(self.rc) - - # with the content similar to WANTED_PYPIRC - f = open(self.rc) - try: - content = f.read() - assert content == WANTED_PYPIRC - finally: - f.close() + # A new .pypirc file should contain WANTED_PYPIRC + assert pathlib.Path(self.rc).read_text(encoding='utf-8') == WANTED_PYPIRC # now let's make sure the .pypirc file generated # really works : we shouldn't be asked anything From 5377c3311b5c89cfdd53a044d4ad65688af77802 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 14:36:10 -0500 Subject: [PATCH 0387/1761] Specify encoding in test_sdist. Ref pypa/distutils#232. --- distutils/tests/test_sdist.py | 54 ++++++++++------------------------- 1 file changed, 15 insertions(+), 39 deletions(-) diff --git a/distutils/tests/test_sdist.py b/distutils/tests/test_sdist.py index 00718a37bd..450f68c993 100644 --- a/distutils/tests/test_sdist.py +++ b/distutils/tests/test_sdist.py @@ -1,6 +1,7 @@ """Tests for distutils.command.sdist.""" import os +import pathlib import tarfile import warnings import zipfile @@ -11,6 +12,7 @@ import pytest import path import jaraco.path +from more_itertools import ilen from .py38compat import check_warnings @@ -62,6 +64,11 @@ def project_dir(request, pypirc): yield +def clean_lines(filepath): + with pathlib.Path(filepath).open(encoding='utf-8') as f: + yield from filter(None, map(str.strip, f)) + + class TestSDist(BasePyPIRCCommandTestCase): def get_cmd(self, metadata=None): """Returns a cmd""" @@ -243,11 +250,7 @@ def test_add_defaults(self): assert sorted(content) == ['fake-1.0/' + x for x in expected] # checking the MANIFEST - f = open(join(self.tmp_dir, 'MANIFEST')) - try: - manifest = f.read() - finally: - f.close() + manifest = pathlib.Path(self.tmp_dir, 'MANIFEST').read_text(encoding='utf-8') assert manifest == MANIFEST % {'sep': os.sep} @staticmethod @@ -352,15 +355,7 @@ def test_get_file_list(self): cmd.ensure_finalized() cmd.run() - f = open(cmd.manifest) - try: - manifest = [ - line.strip() for line in f.read().split('\n') if line.strip() != '' - ] - finally: - f.close() - - assert len(manifest) == 5 + assert ilen(clean_lines(cmd.manifest)) == 5 # adding a file self.write_file((self.tmp_dir, 'somecode', 'doc2.txt'), '#') @@ -372,13 +367,7 @@ def test_get_file_list(self): cmd.run() - f = open(cmd.manifest) - try: - manifest2 = [ - line.strip() for line in f.read().split('\n') if line.strip() != '' - ] - finally: - f.close() + manifest2 = list(clean_lines(cmd.manifest)) # do we have the new file in MANIFEST ? assert len(manifest2) == 6 @@ -391,15 +380,10 @@ def test_manifest_marker(self): cmd.ensure_finalized() cmd.run() - f = open(cmd.manifest) - try: - manifest = [ - line.strip() for line in f.read().split('\n') if line.strip() != '' - ] - finally: - f.close() - - assert manifest[0] == '# file GENERATED by distutils, do NOT edit' + assert ( + next(clean_lines(cmd.manifest)) + == '# file GENERATED by distutils, do NOT edit' + ) @pytest.mark.usefixtures('needs_zlib') def test_manifest_comments(self): @@ -434,15 +418,7 @@ def test_manual_manifest(self): cmd.run() assert cmd.filelist.files == ['README.manual'] - f = open(cmd.manifest) - try: - manifest = [ - line.strip() for line in f.read().split('\n') if line.strip() != '' - ] - finally: - f.close() - - assert manifest == ['README.manual'] + assert list(clean_lines(cmd.manifest)) == ['README.manual'] archive_name = join(self.tmp_dir, 'dist', 'fake-1.0.tar.gz') archive = tarfile.open(archive_name) From deb159392d3e925e5d250046c33810b8c7f034e7 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 14:44:10 -0500 Subject: [PATCH 0388/1761] Fix EncodingWarning in test_spawn. Ref pypa/distutils#232. --- distutils/tests/test_spawn.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/distutils/tests/test_spawn.py b/distutils/tests/test_spawn.py index 57cf1a525c..ec4c9982ad 100644 --- a/distutils/tests/test_spawn.py +++ b/distutils/tests/test_spawn.py @@ -54,7 +54,7 @@ def test_find_executable(self, tmp_path): program = program_noeext + ".exe" program_path = tmp_path / program - program_path.write_text("") + program_path.write_text("", encoding='utf-8') program_path.chmod(stat.S_IXUSR) filename = str(program_path) tmp_dir = path.Path(tmp_path) From 433bb4a67460ae2cf130c9f641b515fcda2e827a Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 14:51:23 -0500 Subject: [PATCH 0389/1761] Fix EncodingWarnings in test_sdist. Ref pypa/distutils#232. --- distutils/tests/test_sysconfig.py | 62 ++++++++++++++++--------------- 1 file changed, 32 insertions(+), 30 deletions(-) diff --git a/distutils/tests/test_sysconfig.py b/distutils/tests/test_sysconfig.py index f656be6089..131c1344bb 100644 --- a/distutils/tests/test_sysconfig.py +++ b/distutils/tests/test_sysconfig.py @@ -20,6 +20,11 @@ from . import py37compat +def _gen_makefile(root, contents): + jaraco.path.build({'Makefile': trim(contents)}, root) + return root / 'Makefile' + + @pytest.mark.usefixtures('save_env') class TestSysconfig: def test_get_config_h_filename(self): @@ -167,29 +172,25 @@ def test_customize_compiler(self): assert 'ranlib' not in comp.exes def test_parse_makefile_base(self, tmp_path): - makefile = tmp_path / 'Makefile' - makefile.write_text( - trim( - """ - CONFIG_ARGS= '--arg1=optarg1' 'ENV=LIB' - VAR=$OTHER - OTHER=foo - """ - ) + makefile = _gen_makefile( + tmp_path, + """ + CONFIG_ARGS= '--arg1=optarg1' 'ENV=LIB' + VAR=$OTHER + OTHER=foo + """, ) d = sysconfig.parse_makefile(makefile) assert d == {'CONFIG_ARGS': "'--arg1=optarg1' 'ENV=LIB'", 'OTHER': 'foo'} def test_parse_makefile_literal_dollar(self, tmp_path): - makefile = tmp_path / 'Makefile' - makefile.write_text( - trim( - """ - CONFIG_ARGS= '--arg1=optarg1' 'ENV=\\$$LIB' - VAR=$OTHER - OTHER=foo - """ - ) + makefile = _gen_makefile( + tmp_path, + """ + CONFIG_ARGS= '--arg1=optarg1' 'ENV=\\$$LIB' + VAR=$OTHER + OTHER=foo + """, ) d = sysconfig.parse_makefile(makefile) assert d == {'CONFIG_ARGS': r"'--arg1=optarg1' 'ENV=\$LIB'", 'OTHER': 'foo'} @@ -238,23 +239,24 @@ def test_customize_compiler_before_get_config_vars(self, tmp_path): # Issue #21923: test that a Distribution compiler # instance can be called without an explicit call to # get_config_vars(). - file = tmp_path / 'file' - file.write_text( - trim( - """ - from distutils.core import Distribution - config = Distribution().get_command_obj('config') - # try_compile may pass or it may fail if no compiler - # is found but it should not raise an exception. - rc = config.try_compile('int x;') - """ - ) + jaraco.path.build( + { + 'file': trim(""" + from distutils.core import Distribution + config = Distribution().get_command_obj('config') + # try_compile may pass or it may fail if no compiler + # is found but it should not raise an exception. + rc = config.try_compile('int x;') + """) + }, + tmp_path, ) p = subprocess.Popen( - py37compat.subprocess_args(sys.executable, file), + py37compat.subprocess_args(sys.executable, tmp_path / 'file'), stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True, + encoding='utf-8', ) outs, errs = p.communicate() assert 0 == p.returncode, "Subprocess failed: " + outs From b6f0ec38c1db2b750b32866ef8a02d5df5a9406c Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 14:54:08 -0500 Subject: [PATCH 0390/1761] Rely on tree builder. Ref pypa/distutils#232. --- distutils/tests/test_text_file.py | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/distutils/tests/test_text_file.py b/distutils/tests/test_text_file.py index 4a721b691c..fe787f44c8 100644 --- a/distutils/tests/test_text_file.py +++ b/distutils/tests/test_text_file.py @@ -1,6 +1,8 @@ """Tests for distutils.text_file.""" -import os +import jaraco.path +import path + from distutils.text_file import TextFile from distutils.tests import support @@ -53,13 +55,9 @@ def test_input(count, description, file, expected_result): result = file.readlines() assert result == expected_result - tmpdir = self.mkdtemp() - filename = os.path.join(tmpdir, "test.txt") - out_file = open(filename, "w") - try: - out_file.write(TEST_DATA) - finally: - out_file.close() + tmp_path = path.Path(self.mkdtemp()) + filename = tmp_path / 'test.txt' + jaraco.path.build({filename.name: TEST_DATA}, tmp_path) in_file = TextFile( filename, strip_comments=0, skip_blanks=0, lstrip_ws=0, rstrip_ws=0 From 826d6fd72e146e2719048003e831de68d64e156b Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 15:00:03 -0500 Subject: [PATCH 0391/1761] Ran pyupgrade for Python 3.8+ followed by ruff format. --- distutils/bcppcompiler.py | 6 ++---- distutils/ccompiler.py | 4 +--- distutils/cmd.py | 8 +++----- distutils/command/_framework_compat.py | 4 ++-- distutils/command/bdist_rpm.py | 4 ++-- distutils/command/build.py | 2 +- distutils/command/build_ext.py | 2 +- distutils/command/check.py | 2 +- distutils/command/register.py | 2 +- distutils/command/upload.py | 8 +++----- distutils/core.py | 6 +++--- distutils/cygwinccompiler.py | 10 ++++------ distutils/dir_util.py | 10 +++------- distutils/dist.py | 8 ++++---- distutils/fancy_getopt.py | 6 +++--- distutils/file_util.py | 26 ++++++++------------------ distutils/filelist.py | 4 ++-- distutils/msvc9compiler.py | 14 +++++--------- distutils/msvccompiler.py | 6 ++---- distutils/py38compat.py | 2 +- distutils/spawn.py | 8 ++------ distutils/sysconfig.py | 2 +- distutils/tests/test_bdist_dumb.py | 2 +- distutils/tests/test_build.py | 2 +- distutils/tests/test_build_ext.py | 2 +- distutils/tests/test_dir_util.py | 2 +- distutils/tests/test_file_util.py | 4 ++-- distutils/tests/test_version.py | 4 ++-- distutils/util.py | 6 +++--- distutils/version.py | 2 +- 30 files changed, 67 insertions(+), 101 deletions(-) diff --git a/distutils/bcppcompiler.py b/distutils/bcppcompiler.py index 14d51472f2..d496d5d452 100644 --- a/distutils/bcppcompiler.py +++ b/distutils/bcppcompiler.py @@ -238,7 +238,7 @@ def link( # noqa: C901 def_file = os.path.join(temp_dir, '%s.def' % modname) contents = ['EXPORTS'] for sym in export_symbols or []: - contents.append(' {}=_{}'.format(sym, sym)) + contents.append(f' {sym}=_{sym}') self.execute(write_file, (def_file, contents), "writing %s" % def_file) # Borland C++ has problems with '/' in paths @@ -348,9 +348,7 @@ def object_filenames(self, source_filenames, strip_dir=0, output_dir=''): # use normcase to make sure '.rc' is really '.rc' and not '.RC' (base, ext) = os.path.splitext(os.path.normcase(src_name)) if ext not in (self.src_extensions + ['.rc', '.res']): - raise UnknownFileError( - "unknown file type '{}' (from '{}')".format(ext, src_name) - ) + raise UnknownFileError(f"unknown file type '{ext}' (from '{src_name}')") if strip_dir: base = os.path.basename(base) if ext == '.res': diff --git a/distutils/ccompiler.py b/distutils/ccompiler.py index 67feb16486..6faf546cfe 100644 --- a/distutils/ccompiler.py +++ b/distutils/ccompiler.py @@ -973,9 +973,7 @@ def _make_out_path(self, output_dir, strip_dir, src_name): try: new_ext = self.out_extensions[ext] except LookupError: - raise UnknownFileError( - "unknown file type '{}' (from '{}')".format(ext, src_name) - ) + raise UnknownFileError(f"unknown file type '{ext}' (from '{src_name}')") if strip_dir: base = os.path.basename(base) return os.path.join(output_dir, base + new_ext) diff --git a/distutils/cmd.py b/distutils/cmd.py index 8fdcbc0ea2..8849474cd7 100644 --- a/distutils/cmd.py +++ b/distutils/cmd.py @@ -165,7 +165,7 @@ def dump_options(self, header=None, indent=""): if option[-1] == "=": option = option[:-1] value = getattr(self, option) - self.announce(indent + "{} = {}".format(option, value), level=logging.INFO) + self.announce(indent + f"{option} = {value}", level=logging.INFO) def run(self): """A command's raison d'etre: carry out the action it exists to @@ -213,9 +213,7 @@ def _ensure_stringlike(self, option, what, default=None): setattr(self, option, default) return default elif not isinstance(val, str): - raise DistutilsOptionError( - "'{}' must be a {} (got `{}`)".format(option, what, val) - ) + raise DistutilsOptionError(f"'{option}' must be a {what} (got `{val}`)") return val def ensure_string(self, option, default=None): @@ -242,7 +240,7 @@ def ensure_string_list(self, option): ok = False if not ok: raise DistutilsOptionError( - "'{}' must be a list of strings (got {!r})".format(option, val) + f"'{option}' must be a list of strings (got {val!r})" ) def _ensure_tested_string(self, option, tester, what, error_fmt, default=None): diff --git a/distutils/command/_framework_compat.py b/distutils/command/_framework_compat.py index b4228299f4..397ebf823e 100644 --- a/distutils/command/_framework_compat.py +++ b/distutils/command/_framework_compat.py @@ -9,7 +9,7 @@ import sysconfig -@functools.lru_cache() +@functools.lru_cache def enabled(): """ Only enabled for Python 3.9 framework homebrew builds @@ -37,7 +37,7 @@ def enabled(): ) -@functools.lru_cache() +@functools.lru_cache def vars(): if not enabled(): return {} diff --git a/distutils/command/bdist_rpm.py b/distutils/command/bdist_rpm.py index e96db22bed..675bcebdad 100644 --- a/distutils/command/bdist_rpm.py +++ b/distutils/command/bdist_rpm.py @@ -487,7 +487,7 @@ def _make_spec_file(self): # noqa: C901 if isinstance(val, list): spec_file.append('{}: {}'.format(field, ' '.join(val))) elif val is not None: - spec_file.append('{}: {}'.format(field, val)) + spec_file.append(f'{field}: {val}') if self.distribution.get_url(): spec_file.append('Url: ' + self.distribution.get_url()) @@ -522,7 +522,7 @@ def _make_spec_file(self): # noqa: C901 # rpm scripts # figure out default build script - def_setup_call = "{} {}".format(self.python, os.path.basename(sys.argv[0])) + def_setup_call = f"{self.python} {os.path.basename(sys.argv[0])}" def_build = "%s build" % def_setup_call if self.use_rpm_opt_flags: def_build = 'env CFLAGS="$RPM_OPT_FLAGS" ' + def_build diff --git a/distutils/command/build.py b/distutils/command/build.py index cc9b367ef9..d8704e3583 100644 --- a/distutils/command/build.py +++ b/distutils/command/build.py @@ -78,7 +78,7 @@ def finalize_options(self): # noqa: C901 "using './configure --help' on your platform)" ) - plat_specifier = ".{}-{}".format(self.plat_name, sys.implementation.cache_tag) + plat_specifier = f".{self.plat_name}-{sys.implementation.cache_tag}" # Make it so Python 2.x and Python 2.x with --with-pydebug don't # share the same build directories. Doing so confuses the build diff --git a/distutils/command/build_ext.py b/distutils/command/build_ext.py index ba6580c71e..a15781f28a 100644 --- a/distutils/command/build_ext.py +++ b/distutils/command/build_ext.py @@ -515,7 +515,7 @@ def _filter_build_errors(self, ext): except (CCompilerError, DistutilsError, CompileError) as e: if not ext.optional: raise - self.warn('building extension "{}" failed: {}'.format(ext.name, e)) + self.warn(f'building extension "{ext.name}" failed: {e}') def build_extension(self, ext): sources = ext.sources diff --git a/distutils/command/check.py b/distutils/command/check.py index b59cc23731..28f55fb914 100644 --- a/distutils/command/check.py +++ b/distutils/command/check.py @@ -116,7 +116,7 @@ def check_restructuredtext(self): if line is None: warning = warning[1] else: - warning = '{} (line {})'.format(warning[1], line) + warning = f'{warning[1]} (line {line})' self.warn(warning) def _check_rst_data(self, data): diff --git a/distutils/command/register.py b/distutils/command/register.py index cf1afc8c1f..5a24246ccb 100644 --- a/distutils/command/register.py +++ b/distutils/command/register.py @@ -174,7 +174,7 @@ def send_metadata(self): # noqa: C901 auth.add_password(self.realm, host, username, password) # send the info to the server and report the result code, result = self.post_to_server(self.build_post_data('submit'), auth) - self.announce('Server response ({}): {}'.format(code, result), logging.INFO) + self.announce(f'Server response ({code}): {result}', logging.INFO) # possibly save the login if code == 200: diff --git a/distutils/command/upload.py b/distutils/command/upload.py index caf15f04a6..a9124f2b71 100644 --- a/distutils/command/upload.py +++ b/distutils/command/upload.py @@ -169,7 +169,7 @@ def upload_file(self, command, pyversion, filename): # noqa: C901 body.write(end_boundary) body = body.getvalue() - msg = "Submitting {} to {}".format(filename, self.repository) + msg = f"Submitting {filename} to {self.repository}" self.announce(msg, logging.INFO) # build the Request @@ -193,14 +193,12 @@ def upload_file(self, command, pyversion, filename): # noqa: C901 raise if status == 200: - self.announce( - 'Server response ({}): {}'.format(status, reason), logging.INFO - ) + self.announce(f'Server response ({status}): {reason}', logging.INFO) if self.show_response: text = self._read_pypi_response(result) msg = '\n'.join(('-' * 75, text, '-' * 75)) self.announce(msg, logging.INFO) else: - msg = 'Upload failed ({}): {}'.format(status, reason) + msg = f'Upload failed ({status}): {reason}' self.announce(msg, logging.ERROR) raise DistutilsError(msg) diff --git a/distutils/core.py b/distutils/core.py index 05d2971994..799de9489c 100644 --- a/distutils/core.py +++ b/distutils/core.py @@ -203,10 +203,10 @@ def run_commands(dist): raise SystemExit("interrupted") except OSError as exc: if DEBUG: - sys.stderr.write("error: {}\n".format(exc)) + sys.stderr.write(f"error: {exc}\n") raise else: - raise SystemExit("error: {}".format(exc)) + raise SystemExit(f"error: {exc}") except (DistutilsError, CCompilerError) as msg: if DEBUG: @@ -249,7 +249,7 @@ def run_setup(script_name, script_args=None, stop_after="run"): used to drive the Distutils. """ if stop_after not in ('init', 'config', 'commandline', 'run'): - raise ValueError("invalid value for 'stop_after': {!r}".format(stop_after)) + raise ValueError(f"invalid value for 'stop_after': {stop_after!r}") global _setup_stop_after, _setup_distribution _setup_stop_after = stop_after diff --git a/distutils/cygwinccompiler.py b/distutils/cygwinccompiler.py index b3dbc3be15..84151b7eb9 100644 --- a/distutils/cygwinccompiler.py +++ b/distutils/cygwinccompiler.py @@ -87,9 +87,7 @@ def __init__(self, verbose=0, dry_run=0, force=0): super().__init__(verbose, dry_run, force) status, details = check_config_h() - self.debug_print( - "Python's GCC status: {} (details: {})".format(status, details) - ) + self.debug_print(f"Python's GCC status: {status} (details: {details})") if status is not CONFIG_H_OK: self.warn( "Python's pyconfig.h doesn't seem to support your compiler. " @@ -108,7 +106,7 @@ def __init__(self, verbose=0, dry_run=0, force=0): compiler_so='%s -mcygwin -mdll -O -Wall' % self.cc, compiler_cxx='%s -mcygwin -O -Wall' % self.cxx, linker_exe='%s -mcygwin' % self.cc, - linker_so=('{} -mcygwin {}'.format(self.linker_dll, shared_option)), + linker_so=(f'{self.linker_dll} -mcygwin {shared_option}'), ) # Include the appropriate MSVC runtime library if Python was built @@ -280,7 +278,7 @@ def __init__(self, verbose=0, dry_run=0, force=0): compiler_so='%s -mdll -O -Wall' % self.cc, compiler_cxx='%s -O -Wall' % self.cxx, linker_exe='%s' % self.cc, - linker_so='{} {}'.format(self.linker_dll, shared_option), + linker_so=f'{self.linker_dll} {shared_option}', ) def runtime_library_dir_option(self, dir): @@ -340,7 +338,7 @@ def check_config_h(): finally: config_h.close() except OSError as exc: - return (CONFIG_H_UNCERTAIN, "couldn't read '{}': {}".format(fn, exc.strerror)) + return (CONFIG_H_UNCERTAIN, f"couldn't read '{fn}': {exc.strerror}") def is_cygwincc(cc): diff --git a/distutils/dir_util.py b/distutils/dir_util.py index 23dc3392a2..819fe56f6d 100644 --- a/distutils/dir_util.py +++ b/distutils/dir_util.py @@ -33,9 +33,7 @@ def mkpath(name, mode=0o777, verbose=1, dry_run=0): # noqa: C901 # Detect a common bug -- name is None if not isinstance(name, str): - raise DistutilsInternalError( - "mkpath: 'name' must be a string (got {!r})".format(name) - ) + raise DistutilsInternalError(f"mkpath: 'name' must be a string (got {name!r})") # XXX what's the better way to handle verbosity? print as we create # each directory in the path (the current behaviour), or only announce @@ -76,7 +74,7 @@ def mkpath(name, mode=0o777, verbose=1, dry_run=0): # noqa: C901 except OSError as exc: if not (exc.errno == errno.EEXIST and os.path.isdir(head)): raise DistutilsFileError( - "could not create '{}': {}".format(head, exc.args[-1]) + f"could not create '{head}': {exc.args[-1]}" ) created_dirs.append(head) @@ -143,9 +141,7 @@ def copy_tree( # noqa: C901 if dry_run: names = [] else: - raise DistutilsFileError( - "error listing files in '{}': {}".format(src, e.strerror) - ) + raise DistutilsFileError(f"error listing files in '{src}': {e.strerror}") if not dry_run: mkpath(dst, verbose=verbose) diff --git a/distutils/dist.py b/distutils/dist.py index 7c0f0e5b78..659583943b 100644 --- a/distutils/dist.py +++ b/distutils/dist.py @@ -821,7 +821,7 @@ def get_command_class(self, command): return klass for pkgname in self.get_command_packages(): - module_name = "{}.{}".format(pkgname, command) + module_name = f"{pkgname}.{command}" klass_name = command try: @@ -889,7 +889,7 @@ def _set_command_options(self, command_obj, option_dict=None): # noqa: C901 self.announce(" setting options for '%s' command:" % command_name) for option, (source, value) in option_dict.items(): if DEBUG: - self.announce(" {} = {} (from {})".format(option, value, source)) + self.announce(f" {option} = {value} (from {source})") try: bool_opts = [translate_longopt(o) for o in command_obj.boolean_options] except AttributeError: @@ -1178,7 +1178,7 @@ def maybe_write(header, val): def _write_list(self, file, name, values): values = values or [] for value in values: - file.write('{}: {}\n'.format(name, value)) + file.write(f'{name}: {value}\n') # -- Metadata query methods ---------------------------------------- @@ -1189,7 +1189,7 @@ def get_version(self): return self.version or "0.0.0" def get_fullname(self): - return "{}-{}".format(self.get_name(), self.get_version()) + return f"{self.get_name()}-{self.get_version()}" def get_author(self): return self.author diff --git a/distutils/fancy_getopt.py b/distutils/fancy_getopt.py index 3b887dc5a4..c025f12062 100644 --- a/distutils/fancy_getopt.py +++ b/distutils/fancy_getopt.py @@ -22,7 +22,7 @@ longopt_re = re.compile(r'^%s$' % longopt_pat) # For recognizing "negative alias" options, eg. "quiet=!verbose" -neg_alias_re = re.compile("^({})=!({})$".format(longopt_pat, longopt_pat)) +neg_alias_re = re.compile(f"^({longopt_pat})=!({longopt_pat})$") # This is used to translate long options to legitimate Python identifiers # (for use as attributes of some object). @@ -157,7 +157,7 @@ def _grok_option_table(self): # noqa: C901 else: # the option table is part of the code, so simply # assert that it is correct - raise ValueError("invalid option tuple: {!r}".format(option)) + raise ValueError(f"invalid option tuple: {option!r}") # Type- and value-check the option names if not isinstance(long, str) or len(long) < 2: @@ -359,7 +359,7 @@ def generate_help(self, header=None): # noqa: C901 # Case 2: we have a short option, so we have to include it # just after the long option else: - opt_names = "{} (-{})".format(long, short) + opt_names = f"{long} (-{short})" if text: lines.append(" --%-*s %s" % (max_opt, opt_names, text[0])) else: diff --git a/distutils/file_util.py b/distutils/file_util.py index 3f3e21b567..8ebd2a790f 100644 --- a/distutils/file_util.py +++ b/distutils/file_util.py @@ -26,30 +26,24 @@ def _copy_file_contents(src, dst, buffer_size=16 * 1024): # noqa: C901 try: fsrc = open(src, 'rb') except OSError as e: - raise DistutilsFileError("could not open '{}': {}".format(src, e.strerror)) + raise DistutilsFileError(f"could not open '{src}': {e.strerror}") if os.path.exists(dst): try: os.unlink(dst) except OSError as e: - raise DistutilsFileError( - "could not delete '{}': {}".format(dst, e.strerror) - ) + raise DistutilsFileError(f"could not delete '{dst}': {e.strerror}") try: fdst = open(dst, 'wb') except OSError as e: - raise DistutilsFileError( - "could not create '{}': {}".format(dst, e.strerror) - ) + raise DistutilsFileError(f"could not create '{dst}': {e.strerror}") while True: try: buf = fsrc.read(buffer_size) except OSError as e: - raise DistutilsFileError( - "could not read from '{}': {}".format(src, e.strerror) - ) + raise DistutilsFileError(f"could not read from '{src}': {e.strerror}") if not buf: break @@ -57,9 +51,7 @@ def _copy_file_contents(src, dst, buffer_size=16 * 1024): # noqa: C901 try: fdst.write(buf) except OSError as e: - raise DistutilsFileError( - "could not write to '{}': {}".format(dst, e.strerror) - ) + raise DistutilsFileError(f"could not write to '{dst}': {e.strerror}") finally: if fdst: fdst.close() @@ -199,12 +191,12 @@ def move_file(src, dst, verbose=1, dry_run=0): # noqa: C901 dst = os.path.join(dst, basename(src)) elif exists(dst): raise DistutilsFileError( - "can't move '{}': destination '{}' already exists".format(src, dst) + f"can't move '{src}': destination '{dst}' already exists" ) if not isdir(dirname(dst)): raise DistutilsFileError( - "can't move '{}': destination '{}' not a valid path".format(src, dst) + f"can't move '{src}': destination '{dst}' not a valid path" ) copy_it = False @@ -215,9 +207,7 @@ def move_file(src, dst, verbose=1, dry_run=0): # noqa: C901 if num == errno.EXDEV: copy_it = True else: - raise DistutilsFileError( - "couldn't move '{}' to '{}': {}".format(src, dst, msg) - ) + raise DistutilsFileError(f"couldn't move '{src}' to '{dst}': {msg}") if copy_it: copy_file(src, dst, verbose=verbose) diff --git a/distutils/filelist.py b/distutils/filelist.py index 6dadf923d7..3205762654 100644 --- a/distutils/filelist.py +++ b/distutils/filelist.py @@ -363,9 +363,9 @@ def translate_pattern(pattern, anchor=1, prefix=None, is_regex=0): if os.sep == '\\': sep = r'\\' pattern_re = pattern_re[len(start) : len(pattern_re) - len(end)] - pattern_re = r'{}\A{}{}.*{}{}'.format(start, prefix_re, sep, pattern_re, end) + pattern_re = rf'{start}\A{prefix_re}{sep}.*{pattern_re}{end}' else: # no prefix -- respect anchor flag if anchor: - pattern_re = r'{}\A{}'.format(start, pattern_re[len(start) :]) + pattern_re = rf'{start}\A{pattern_re[len(start) :]}' return re.compile(pattern_re) diff --git a/distutils/msvc9compiler.py b/distutils/msvc9compiler.py index 724986d89d..402c0c0620 100644 --- a/distutils/msvc9compiler.py +++ b/distutils/msvc9compiler.py @@ -175,7 +175,7 @@ def load_macros(self, version): except RegError: continue key = RegEnumKey(h, 0) - d = Reg.get_value(base, r"{}\{}".format(p, key)) + d = Reg.get_value(base, rf"{p}\{key}") self.macros["$(FrameworkVersion)"] = d["version"] def sub(self, s): @@ -281,7 +281,7 @@ def query_vcvarsall(version, arch="x86"): raise DistutilsPlatformError("Unable to find vcvarsall.bat") log.debug("Calling 'vcvarsall.bat %s' (version=%s)", arch, version) popen = subprocess.Popen( - '"{}" {} & set'.format(vcvarsall, arch), + f'"{vcvarsall}" {arch} & set', stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) @@ -370,9 +370,7 @@ def initialize(self, plat_name=None): # noqa: C901 # sanity check for platforms to prevent obscure errors later. ok_plats = 'win32', 'win-amd64' if plat_name not in ok_plats: - raise DistutilsPlatformError( - "--plat-name must be one of {}".format(ok_plats) - ) + raise DistutilsPlatformError(f"--plat-name must be one of {ok_plats}") if ( "DISTUTILS_USE_SDK" in os.environ @@ -564,9 +562,7 @@ def compile( # noqa: C901 continue else: # how to handle this file? - raise CompileError( - "Don't know how to compile {} to {}".format(src, obj) - ) + raise CompileError(f"Don't know how to compile {src} to {obj}") output_opt = "/Fo" + obj try: @@ -687,7 +683,7 @@ def link( # noqa: C901 mfinfo = self.manifest_get_embed_info(target_desc, ld_args) if mfinfo is not None: mffilename, mfid = mfinfo - out_arg = '-outputresource:{};{}'.format(output_filename, mfid) + out_arg = f'-outputresource:{output_filename};{mfid}' try: self.spawn(['mt.exe', '-nologo', '-manifest', mffilename, out_arg]) except DistutilsExecError as msg: diff --git a/distutils/msvccompiler.py b/distutils/msvccompiler.py index c3823e257e..1a07746bc7 100644 --- a/distutils/msvccompiler.py +++ b/distutils/msvccompiler.py @@ -159,7 +159,7 @@ def load_macros(self, version): except RegError: continue key = RegEnumKey(h, 0) - d = read_values(base, r"{}\{}".format(p, key)) + d = read_values(base, rf"{p}\{key}") self.macros["$(FrameworkVersion)"] = d["version"] def sub(self, s): @@ -454,9 +454,7 @@ def compile( # noqa: C901 continue else: # how to handle this file? - raise CompileError( - "Don't know how to compile {} to {}".format(src, obj) - ) + raise CompileError(f"Don't know how to compile {src} to {obj}") output_opt = "/Fo" + obj try: diff --git a/distutils/py38compat.py b/distutils/py38compat.py index 59224e71e5..ab12119fa5 100644 --- a/distutils/py38compat.py +++ b/distutils/py38compat.py @@ -5,4 +5,4 @@ def aix_platform(osname, version, release): return _aix_support.aix_platform() except ImportError: pass - return "{}-{}.{}".format(osname, version, release) + return f"{osname}-{version}.{release}" diff --git a/distutils/spawn.py b/distutils/spawn.py index afefe525ef..48adceb114 100644 --- a/distutils/spawn.py +++ b/distutils/spawn.py @@ -60,16 +60,12 @@ def spawn(cmd, search_path=1, verbose=0, dry_run=0, env=None): # noqa: C901 except OSError as exc: if not DEBUG: cmd = cmd[0] - raise DistutilsExecError( - "command {!r} failed: {}".format(cmd, exc.args[-1]) - ) from exc + raise DistutilsExecError(f"command {cmd!r} failed: {exc.args[-1]}") from exc if exitcode: if not DEBUG: cmd = cmd[0] - raise DistutilsExecError( - "command {!r} failed with exit code {}".format(cmd, exitcode) - ) + raise DistutilsExecError(f"command {cmd!r} failed with exit code {exitcode}") def find_executable(executable, path=None): diff --git a/distutils/sysconfig.py b/distutils/sysconfig.py index 5fb811c406..40215b8347 100644 --- a/distutils/sysconfig.py +++ b/distutils/sysconfig.py @@ -267,7 +267,7 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None): ) -@functools.lru_cache() +@functools.lru_cache def _customize_macos(): """ Perform first-time customization of compiler-related diff --git a/distutils/tests/test_bdist_dumb.py b/distutils/tests/test_bdist_dumb.py index 95532e83b9..cb4db4e192 100644 --- a/distutils/tests/test_bdist_dumb.py +++ b/distutils/tests/test_bdist_dumb.py @@ -61,7 +61,7 @@ def test_simple_built(self): # see what we have dist_created = os.listdir(os.path.join(pkg_dir, 'dist')) - base = "{}.{}.zip".format(dist.get_fullname(), cmd.plat_name) + base = f"{dist.get_fullname()}.{cmd.plat_name}.zip" assert dist_created == [base] diff --git a/distutils/tests/test_build.py b/distutils/tests/test_build.py index c2cff44523..8617fa9919 100644 --- a/distutils/tests/test_build.py +++ b/distutils/tests/test_build.py @@ -24,7 +24,7 @@ def test_finalize_options(self): # build_platlib is 'build/lib.platform-cache_tag[-pydebug]' # examples: # build/lib.macosx-10.3-i386-cpython39 - plat_spec = '.{}-{}'.format(cmd.plat_name, sys.implementation.cache_tag) + plat_spec = f'.{cmd.plat_name}-{sys.implementation.cache_tag}' if hasattr(sys, 'gettotalrefcount'): assert cmd.build_platlib.endswith('-pydebug') plat_spec += '-pydebug' diff --git a/distutils/tests/test_build_ext.py b/distutils/tests/test_build_ext.py index 537959fed6..da4663076b 100644 --- a/distutils/tests/test_build_ext.py +++ b/distutils/tests/test_build_ext.py @@ -535,7 +535,7 @@ def _try_compile_deployment_target(self, operator, target): deptarget_ext = Extension( 'deptarget', [deptarget_c], - extra_compile_args=['-DTARGET={}'.format(target)], + extra_compile_args=[f'-DTARGET={target}'], ) dist = Distribution({'name': 'deptarget', 'ext_modules': [deptarget_ext]}) dist.package_dir = self.tmp_dir diff --git a/distutils/tests/test_dir_util.py b/distutils/tests/test_dir_util.py index 0738b7c877..e7d69bb6ef 100644 --- a/distutils/tests/test_dir_util.py +++ b/distutils/tests/test_dir_util.py @@ -75,7 +75,7 @@ def test_copy_tree_verbosity(self, caplog): with open(a_file, 'w') as f: f.write('some content') - wanted = ['copying {} -> {}'.format(a_file, self.target2)] + wanted = [f'copying {a_file} -> {self.target2}'] copy_tree(self.target, self.target2, verbose=1) assert caplog.messages == wanted diff --git a/distutils/tests/test_file_util.py b/distutils/tests/test_file_util.py index 3b9f82b71e..e441186e3a 100644 --- a/distutils/tests/test_file_util.py +++ b/distutils/tests/test_file_util.py @@ -35,7 +35,7 @@ def test_move_file_verbosity(self, caplog): move_file(self.target, self.source, verbose=0) move_file(self.source, self.target, verbose=1) - wanted = ['moving {} -> {}'.format(self.source, self.target)] + wanted = [f'moving {self.source} -> {self.target}'] assert caplog.messages == wanted # back to original state @@ -45,7 +45,7 @@ def test_move_file_verbosity(self, caplog): # now the target is a dir os.mkdir(self.target_dir) move_file(self.source, self.target_dir, verbose=1) - wanted = ['moving {} -> {}'.format(self.source, self.target_dir)] + wanted = [f'moving {self.source} -> {self.target_dir}'] assert caplog.messages == wanted def test_move_file_exception_unpacking_rename(self): diff --git a/distutils/tests/test_version.py b/distutils/tests/test_version.py index 900edafa7c..0aaf0a534c 100644 --- a/distutils/tests/test_version.py +++ b/distutils/tests/test_version.py @@ -62,7 +62,7 @@ def test_cmp_strict(self): res = StrictVersion(v1)._cmp(object()) assert ( res is NotImplemented - ), 'cmp({}, {}) should be NotImplemented, got {}'.format(v1, v2, res) + ), f'cmp({v1}, {v2}) should be NotImplemented, got {res}' def test_cmp(self): versions = ( @@ -88,4 +88,4 @@ def test_cmp(self): res = LooseVersion(v1)._cmp(object()) assert ( res is NotImplemented - ), 'cmp({}, {}) should be NotImplemented, got {}'.format(v1, v2, res) + ), f'cmp({v1}, {v2}) should be NotImplemented, got {res}' diff --git a/distutils/util.py b/distutils/util.py index 5408b16032..a2ba1fc961 100644 --- a/distutils/util.py +++ b/distutils/util.py @@ -172,7 +172,7 @@ def change_root(new_root, pathname): raise DistutilsPlatformError(f"nothing known about platform '{os.name}'") -@functools.lru_cache() +@functools.lru_cache def check_environ(): """Ensure that 'os.environ' has all the environment variables we guarantee that users can use in config files, command-line options, @@ -328,7 +328,7 @@ def execute(func, args, msg=None, verbose=0, dry_run=0): print. """ if msg is None: - msg = "{}{!r}".format(func.__name__, args) + msg = f"{func.__name__}{args!r}" if msg[-2:] == ',)': # correct for singleton tuple msg = msg[0:-2] + ')' @@ -350,7 +350,7 @@ def strtobool(val): elif val in ('n', 'no', 'f', 'false', 'off', '0'): return 0 else: - raise ValueError("invalid truth value {!r}".format(val)) + raise ValueError(f"invalid truth value {val!r}") def byte_compile( # noqa: C901 diff --git a/distutils/version.py b/distutils/version.py index 18385cfef2..8ab76ddef4 100644 --- a/distutils/version.py +++ b/distutils/version.py @@ -60,7 +60,7 @@ def __init__(self, vstring=None): ) def __repr__(self): - return "{} ('{}')".format(self.__class__.__name__, str(self)) + return f"{self.__class__.__name__} ('{str(self)}')" def __eq__(self, other): c = self._cmp(other) From 592b0d80d781369a2c622ccc73fb8f48ba906f5b Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 15:05:19 -0500 Subject: [PATCH 0392/1761] Suppress diffcov error. --- distutils/util.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/distutils/util.py b/distutils/util.py index c26e61ab4a..bfd30700fa 100644 --- a/distutils/util.py +++ b/distutils/util.py @@ -424,7 +424,7 @@ def byte_compile( # noqa: C901 if not dry_run: if script_fd is not None: script = os.fdopen(script_fd, "w", encoding='utf-8') - else: + else: # pragma: no cover script = open(script_name, "w", encoding='utf-8') with script: From 7a7531b9addbf7fc46280d8d4a629f98c193b01d Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 15:25:25 -0500 Subject: [PATCH 0393/1761] Suppress more diffcov errors. --- distutils/tests/test_build_ext.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/distutils/tests/test_build_ext.py b/distutils/tests/test_build_ext.py index 4ae81a22e4..ae66bc4eb8 100644 --- a/distutils/tests/test_build_ext.py +++ b/distutils/tests/test_build_ext.py @@ -479,7 +479,7 @@ def test_deployment_target_too_low(self): @pytest.mark.skipif('platform.system() != "Darwin"') @pytest.mark.usefixtures('save_env') - def test_deployment_target_higher_ok(self): + def test_deployment_target_higher_ok(self): # pragma: no cover # Issue 9516: Test that an extension module can be compiled with a # deployment target higher than that of the interpreter: the ext # module may depend on some newer OS feature. @@ -491,7 +491,7 @@ def test_deployment_target_higher_ok(self): deptarget = '.'.join(str(i) for i in deptarget) self._try_compile_deployment_target('<', deptarget) - def _try_compile_deployment_target(self, operator, target): + def _try_compile_deployment_target(self, operator, target): # pragma: no cover if target is None: if os.environ.get('MACOSX_DEPLOYMENT_TARGET'): del os.environ['MACOSX_DEPLOYMENT_TARGET'] From 4fd512859b234179879cd9a213bd6288363ff26f Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 15:31:22 -0500 Subject: [PATCH 0394/1761] Address EncodingWarning in ccompiler. Ref pypa/distutils#232. --- distutils/ccompiler.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/distutils/ccompiler.py b/distutils/ccompiler.py index 6faf546cfe..bcf9580c7a 100644 --- a/distutils/ccompiler.py +++ b/distutils/ccompiler.py @@ -858,8 +858,7 @@ def has_function( # noqa: C901 if library_dirs is None: library_dirs = [] fd, fname = tempfile.mkstemp(".c", funcname, text=True) - f = os.fdopen(fd, "w") - try: + with os.fdopen(fd, "w", encoding='utf-8') as f: for incl in includes: f.write("""#include "%s"\n""" % incl) if not includes: @@ -888,8 +887,7 @@ def has_function( # noqa: C901 """ % funcname ) - finally: - f.close() + try: objects = self.compile([fname], include_dirs=include_dirs) except CompileError: From 03ec237712b26d926362a349f837f9cc65e3b547 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 15:38:01 -0500 Subject: [PATCH 0395/1761] Fix EncodingWarnings in distutils/command/config.py. Ref pypa/distutils#232. --- distutils/command/config.py | 20 +++++--------------- 1 file changed, 5 insertions(+), 15 deletions(-) diff --git a/distutils/command/config.py b/distutils/command/config.py index 494d97d16f..573741d772 100644 --- a/distutils/command/config.py +++ b/distutils/command/config.py @@ -10,6 +10,7 @@ """ import os +import pathlib import re from ..core import Command @@ -102,7 +103,7 @@ def _check_compiler(self): def _gen_temp_sourcefile(self, body, headers, lang): filename = "_configtest" + LANG_EXT[lang] - with open(filename, "w") as file: + with open(filename, "w", encoding='utf-8') as file: if headers: for header in headers: file.write("#include <%s>\n" % header) @@ -199,15 +200,8 @@ def search_cpp(self, pattern, body=None, headers=None, include_dirs=None, lang=" if isinstance(pattern, str): pattern = re.compile(pattern) - with open(out) as file: - match = False - while True: - line = file.readline() - if line == '': - break - if pattern.search(line): - match = True - break + with open(out, encoding='utf-8') as file: + match = any(pattern.search(line) for line in file) self._clean() return match @@ -369,8 +363,4 @@ def dump_file(filename, head=None): log.info('%s', filename) else: log.info(head) - file = open(filename) - try: - log.info(file.read()) - finally: - file.close() + log.info(pathlib.Path(filename).read_text(encoding='utf-8')) From b894d6f341b626b289c4d50dc00909606d1bd164 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 15:40:06 -0500 Subject: [PATCH 0396/1761] Fix EncodingWarnings in distutils/config.py. Ref pypa/distutils#232. --- distutils/config.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/distutils/config.py b/distutils/config.py index a55951ed7c..f92ecb9638 100644 --- a/distutils/config.py +++ b/distutils/config.py @@ -42,7 +42,8 @@ def _get_rc_file(self): def _store_pypirc(self, username, password): """Creates a default .pypirc file.""" rc = self._get_rc_file() - with os.fdopen(os.open(rc, os.O_CREAT | os.O_WRONLY, 0o600), 'w') as f: + raw = os.open(rc, os.O_CREAT | os.O_WRONLY, 0o600) + with os.fdopen(raw, 'w', encoding='utf-8') as f: f.write(DEFAULT_PYPIRC % (username, password)) def _read_pypirc(self): # noqa: C901 @@ -53,7 +54,7 @@ def _read_pypirc(self): # noqa: C901 repository = self.repository or self.DEFAULT_REPOSITORY config = RawConfigParser() - config.read(rc) + config.read(rc, encoding='utf-8') sections = config.sections() if 'distutils' in sections: # let's get the list of servers From f0692cf4ccdec21debcfef57202f4af97043f135 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 15:47:35 -0500 Subject: [PATCH 0397/1761] Fix EncodingWarnings in sdist.py. Ref pypa/distutils#232. --- distutils/command/sdist.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/distutils/command/sdist.py b/distutils/command/sdist.py index ac489726ca..b76cb9bc73 100644 --- a/distutils/command/sdist.py +++ b/distutils/command/sdist.py @@ -6,6 +6,7 @@ import sys from glob import glob from warnings import warn +from itertools import filterfalse from ..core import Command from distutils import dir_util @@ -429,11 +430,8 @@ def _manifest_is_not_generated(self): if not os.path.isfile(self.manifest): return False - fp = open(self.manifest) - try: - first_line = fp.readline() - finally: - fp.close() + with open(self.manifest, encoding='utf-8') as fp: + first_line = next(fp) return first_line != '# file GENERATED by distutils, do NOT edit\n' def read_manifest(self): @@ -442,13 +440,11 @@ def read_manifest(self): distribution. """ log.info("reading manifest file '%s'", self.manifest) - with open(self.manifest) as manifest: - for line in manifest: + with open(self.manifest, encoding='utf-8') as lines: + self.filelist.extend( # ignore comments and blank lines - line = line.strip() - if line.startswith('#') or not line: - continue - self.filelist.append(line) + filter(None, filterfalse(is_comment, map(str.strip, lines))) + ) def make_release_tree(self, base_dir, files): """Create the directory tree that will become the source @@ -528,3 +524,7 @@ def get_archive_files(self): was run, or None if the command hasn't run yet. """ return self.archive_files + + +def is_comment(line): + return line.startswith('#') From b420f2dd8ed44251faa2880e791c113f8ea7823c Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 15:49:07 -0500 Subject: [PATCH 0398/1761] Fix EncodingWarnings in text_file.py. Ref pypa/distutils#232. --- distutils/text_file.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/distutils/text_file.py b/distutils/text_file.py index 36f947e51c..6f90cfe21d 100644 --- a/distutils/text_file.py +++ b/distutils/text_file.py @@ -115,7 +115,7 @@ def open(self, filename): """Open a new file named 'filename'. This overrides both the 'filename' and 'file' arguments to the constructor.""" self.filename = filename - self.file = open(self.filename, errors=self.errors) + self.file = open(self.filename, errors=self.errors, encoding='utf-8') self.current_line = 0 def close(self): From 559a4f355fadc8017a9ebdf31afed06ce4e03445 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 15:50:06 -0500 Subject: [PATCH 0399/1761] Fix EncodingWarnings in dist.py. Ref pypa/distutils#232. --- distutils/dist.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/distutils/dist.py b/distutils/dist.py index 659583943b..c4d2a45dc2 100644 --- a/distutils/dist.py +++ b/distutils/dist.py @@ -395,7 +395,7 @@ def parse_config_files(self, filenames=None): # noqa: C901 for filename in filenames: if DEBUG: self.announce(" reading %s" % filename) - parser.read(filename) + parser.read(filename, encoding='utf-8') for section in parser.sections(): options = parser.options(section) opt_dict = self.get_option_dict(section) From 61d103fba380d5e56a4081b11a6680a4a0ba319a Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 15:59:16 -0500 Subject: [PATCH 0400/1761] Fix EncodingWarning in cygwinccompiler. Ref pypa/distutils#232. --- distutils/cygwinccompiler.py | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/distutils/cygwinccompiler.py b/distutils/cygwinccompiler.py index 84151b7eb9..2060950415 100644 --- a/distutils/cygwinccompiler.py +++ b/distutils/cygwinccompiler.py @@ -7,6 +7,7 @@ """ import os +import pathlib import re import sys import copy @@ -329,14 +330,15 @@ def check_config_h(): # let's see if __GNUC__ is mentioned in python.h fn = sysconfig.get_config_h_filename() try: - config_h = open(fn) - try: - if "__GNUC__" in config_h.read(): - return CONFIG_H_OK, "'%s' mentions '__GNUC__'" % fn - else: - return CONFIG_H_NOTOK, "'%s' does not mention '__GNUC__'" % fn - finally: - config_h.close() + config_h = pathlib.Path(fn).read_text(encoding='utf-8') + substring = '__GNUC__' + if substring in config_h: + code = CONFIG_H_OK + mention_inflected = 'mentions' + else: + code = CONFIG_H_NOTOK + mention_inflected = 'does not mention' + return code, f"{fn!r} {mention_inflected} {substring!r}" except OSError as exc: return (CONFIG_H_UNCERTAIN, f"couldn't read '{fn}': {exc.strerror}") From 2b93ccc7e3b7561ef90bac952f52de33ad46735e Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 16:06:30 -0500 Subject: [PATCH 0401/1761] Fix EncodingWarning in file_util. Ref pypa/distutils#232. --- distutils/file_util.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/distutils/file_util.py b/distutils/file_util.py index 8ebd2a790f..0eb9b86107 100644 --- a/distutils/file_util.py +++ b/distutils/file_util.py @@ -230,9 +230,5 @@ def write_file(filename, contents): """Create a file with the specified name and write 'contents' (a sequence of strings without line terminators) to it. """ - f = open(filename, "w") - try: - for line in contents: - f.write(line + "\n") - finally: - f.close() + with open(filename, 'w', encoding='utf-8') as f: + f.writelines(line + '\n' for line in contents) From 9508489953a84a1412ad24e6613650351369462c Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 16:10:53 -0500 Subject: [PATCH 0402/1761] Suppress EncodingWarnings in pyfakefs. Ref pypa/distutils#232. Workaround for pytest-dev/pyfakefs#957. --- pytest.ini | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pytest.ini b/pytest.ini index 3ee2f886ba..42820fc7ed 100644 --- a/pytest.ini +++ b/pytest.ini @@ -34,3 +34,7 @@ filterwarnings= # suppress well know deprecation warning ignore:distutils.log.Log is deprecated + + # pytest-dev/pyfakefs#957 + ignore:UTF-8 Mode affects locale.getpreferredencoding::pyfakefs.fake_file + ignore:'encoding' argument not specified::pyfakefs.helpers From 57d567de0ab8798d418e0b2e48d4048bb86713b8 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 16:19:04 -0500 Subject: [PATCH 0403/1761] Replaced deprecated cgi module with email module. Ref pypa/distutils#232. --- distutils/config.py | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/distutils/config.py b/distutils/config.py index f92ecb9638..e0defd77e6 100644 --- a/distutils/config.py +++ b/distutils/config.py @@ -5,6 +5,7 @@ """ import os +import email.message from configparser import RawConfigParser from .cmd import Command @@ -121,11 +122,8 @@ def _read_pypirc(self): # noqa: C901 def _read_pypi_response(self, response): """Read and decode a PyPI HTTP response.""" - import cgi - content_type = response.getheader('content-type', 'text/plain') - encoding = cgi.parse_header(content_type)[1].get('charset', 'ascii') - return response.read().decode(encoding) + return response.read().decode(_extract_encoding(content_type)) def initialize_options(self): """Initialize options.""" @@ -139,3 +137,15 @@ def finalize_options(self): self.repository = self.DEFAULT_REPOSITORY if self.realm is None: self.realm = self.DEFAULT_REALM + + +def _extract_encoding(content_type): + """ + >>> _extract_encoding('text/plain') + 'ascii' + >>> _extract_encoding('text/html; charset="utf8"') + 'utf8' + """ + msg = email.message.EmailMessage() + msg['content-type'] = content_type + return msg['content-type'].params.get('charset', 'ascii') From 3ff7b64b324cdbf7a12dd406b9bdddcf4add860e Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 20:05:30 -0500 Subject: [PATCH 0404/1761] Fix exception reference in missing_compiler_executable. Ref pypa/distutils#225. Closes pypa/distutils#238. --- distutils/tests/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/distutils/tests/__init__.py b/distutils/tests/__init__.py index aad8edb242..6d9b853215 100644 --- a/distutils/tests/__init__.py +++ b/distutils/tests/__init__.py @@ -26,7 +26,7 @@ def missing_compiler_executable(cmd_names=[]): # pragma: no cover # MSVC has no executables, so check whether initialization succeeds try: compiler.initialize() - except errors.PlatformError: + except errors.DistutilsPlatformError: return "msvc" for name in compiler.executables: if cmd_names and name not in cmd_names: From fa6e2183cdc2d09db9b18f1cb9619116f104dd0e Mon Sep 17 00:00:00 2001 From: Mridul Seth Date: Thu, 29 Feb 2024 13:07:44 +0530 Subject: [PATCH 0405/1761] ENH: Extension should be able to accept PathLike sources objects --- distutils/extension.py | 11 ++++++++--- distutils/tests/test_extension.py | 5 ++++- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/distutils/extension.py b/distutils/extension.py index 8f186b72ff..2d6950fe53 100644 --- a/distutils/extension.py +++ b/distutils/extension.py @@ -26,7 +26,7 @@ class Extension: name : string the full name of the extension, including any packages -- ie. *not* a filename or pathname, but Python dotted name - sources : [string] + sources : [string | os.PathLike] list of source filenames, relative to the distribution root (where the setup script lives), in Unix form (slash-separated) for portability. Source files may be C, C++, SWIG (.i), @@ -106,8 +106,13 @@ def __init__( ): if not isinstance(name, str): raise AssertionError("'name' must be a string") - if not (isinstance(sources, list) and all(isinstance(v, str) for v in sources)): - raise AssertionError("'sources' must be a list of strings") + if not ( + isinstance(sources, list) + and all(isinstance(v, (str, os.PathLike)) for v in sources) + ): + raise AssertionError( + "'sources' must be a list of strings or PathLike objects." + ) self.name = name self.sources = sources diff --git a/distutils/tests/test_extension.py b/distutils/tests/test_extension.py index 297ae44bfe..3fbea21e28 100644 --- a/distutils/tests/test_extension.py +++ b/distutils/tests/test_extension.py @@ -2,6 +2,7 @@ import os import warnings +from pathlib import Path from distutils.extension import read_setup_file, Extension @@ -68,13 +69,15 @@ def test_extension_init(self): assert ext.name == 'name' # the second argument, which is the list of files, must - # be a list of strings + # be a list of strings or PathLike objects with pytest.raises(AssertionError): Extension('name', 'file') with pytest.raises(AssertionError): Extension('name', ['file', 1]) ext = Extension('name', ['file1', 'file2']) assert ext.sources == ['file1', 'file2'] + ext = Extension('name', [Path('file1'), Path('file2')]) + assert ext.sources == ['file1', 'file2'] # others arguments have defaults for attr in ( From 45a232ab88198c488bcb0abf0acb9bfd1ecb518c Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 16:42:03 -0500 Subject: [PATCH 0406/1761] Adjust expectation to match behavior. --- distutils/tests/test_extension.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/distutils/tests/test_extension.py b/distutils/tests/test_extension.py index 3fbea21e28..9a53787333 100644 --- a/distutils/tests/test_extension.py +++ b/distutils/tests/test_extension.py @@ -77,7 +77,7 @@ def test_extension_init(self): ext = Extension('name', ['file1', 'file2']) assert ext.sources == ['file1', 'file2'] ext = Extension('name', [Path('file1'), Path('file2')]) - assert ext.sources == ['file1', 'file2'] + assert ext.sources == [Path('file1'), Path('file2')] # others arguments have defaults for attr in ( From 9cc0c93cf19dfbb737a3ae96c08034203aee0a88 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 16:45:05 -0500 Subject: [PATCH 0407/1761] For consistency, ensure Extension.sources is always a pathlib.Path object and adjust expectations in tests. --- distutils/extension.py | 3 ++- distutils/tests/test_build_ext.py | 3 ++- distutils/tests/test_extension.py | 2 +- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/distutils/extension.py b/distutils/extension.py index 2d6950fe53..3f950d5a73 100644 --- a/distutils/extension.py +++ b/distutils/extension.py @@ -4,6 +4,7 @@ modules in setup scripts.""" import os +import pathlib import warnings # This class is really only used by the "build_ext" command, so it might @@ -115,7 +116,7 @@ def __init__( ) self.name = name - self.sources = sources + self.sources = list(map(pathlib.Path, sources)) self.include_dirs = include_dirs or [] self.define_macros = define_macros or [] self.undef_macros = undef_macros or [] diff --git a/distutils/tests/test_build_ext.py b/distutils/tests/test_build_ext.py index ae66bc4eb8..7e8a4ddb29 100644 --- a/distutils/tests/test_build_ext.py +++ b/distutils/tests/test_build_ext.py @@ -4,6 +4,7 @@ import textwrap import site import contextlib +import pathlib import platform import tempfile import importlib @@ -335,7 +336,7 @@ def test_get_source_files(self): dist = Distribution({'name': 'xx', 'ext_modules': modules}) cmd = self.build_ext(dist) cmd.ensure_finalized() - assert cmd.get_source_files() == ['xxx'] + assert cmd.get_source_files() == [pathlib.Path('xxx')] def test_unicode_module_names(self): modules = [ diff --git a/distutils/tests/test_extension.py b/distutils/tests/test_extension.py index 9a53787333..023c7f9fc3 100644 --- a/distutils/tests/test_extension.py +++ b/distutils/tests/test_extension.py @@ -75,7 +75,7 @@ def test_extension_init(self): with pytest.raises(AssertionError): Extension('name', ['file', 1]) ext = Extension('name', ['file1', 'file2']) - assert ext.sources == ['file1', 'file2'] + assert ext.sources == [Path('file1'), Path('file2')] ext = Extension('name', [Path('file1'), Path('file2')]) assert ext.sources == [Path('file1'), Path('file2')] From c489cdc9ac46aeab6f94e3f510c50ac3983782a5 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 20:53:25 -0500 Subject: [PATCH 0408/1761] When computing input_opt, ensure src is a string (when it could be a pathlib Path). --- distutils/_msvccompiler.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/distutils/_msvccompiler.py b/distutils/_msvccompiler.py index 4f081c7e92..8c6fb5b5e6 100644 --- a/distutils/_msvccompiler.py +++ b/distutils/_msvccompiler.py @@ -367,9 +367,9 @@ def compile( # noqa: C901 src = os.path.abspath(src) if ext in self._c_extensions: - input_opt = "/Tc" + src + input_opt = "/Tc" + str(src) elif ext in self._cpp_extensions: - input_opt = "/Tp" + src + input_opt = "/Tp" + str(src) add_cpp_opts = True elif ext in self._rc_extensions: # compile .RC to .RES file From f2a85c1c290f75415c9df7dba473756ee62f8581 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 2 Mar 2024 21:47:38 -0500 Subject: [PATCH 0409/1761] In filelist, allow for self.files to be pathlib objects. --- distutils/filelist.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/distutils/filelist.py b/distutils/filelist.py index 3205762654..d0ad54aba6 100644 --- a/distutils/filelist.py +++ b/distutils/filelist.py @@ -251,7 +251,7 @@ def exclude_pattern(self, pattern, anchor=1, prefix=None, is_regex=0): pattern_re = translate_pattern(pattern, anchor, prefix, is_regex) self.debug_print("exclude_pattern: applying regex r'%s'" % pattern_re.pattern) for i in range(len(self.files) - 1, -1, -1): - if pattern_re.search(self.files[i]): + if pattern_re.search(str(self.files[i])): self.debug_print(" removing " + self.files[i]) del self.files[i] files_found = True From 38b58a5b3fc343aebdb08f46089049780de4dc44 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 3 Mar 2024 06:21:04 -0500 Subject: [PATCH 0410/1761] Satisfy EncodingWarning by passing the encoding. --- distutils/tests/test_dist.py | 2 +- pytest.ini | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/distutils/tests/test_dist.py b/distutils/tests/test_dist.py index fe979efed5..8e52873dce 100644 --- a/distutils/tests/test_dist.py +++ b/distutils/tests/test_dist.py @@ -257,7 +257,7 @@ def test_find_config_files_permission_error(self, fake_home): """ Finding config files should not fail when directory is inaccessible. """ - fake_home.joinpath(pydistutils_cfg).write_text('') + fake_home.joinpath(pydistutils_cfg).write_text('', encoding='utf-8') fake_home.chmod(0o000) Distribution().find_config_files() diff --git a/pytest.ini b/pytest.ini index 42820fc7ed..fa31fb33dc 100644 --- a/pytest.ini +++ b/pytest.ini @@ -37,4 +37,3 @@ filterwarnings= # pytest-dev/pyfakefs#957 ignore:UTF-8 Mode affects locale.getpreferredencoding::pyfakefs.fake_file - ignore:'encoding' argument not specified::pyfakefs.helpers From dbc6471dbacb5a43ed60625cda8b9fb4d8f0aa34 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 3 Mar 2024 12:19:58 -0500 Subject: [PATCH 0411/1761] Avoid removing ruff.toml during vendoring. Closes #4252. --- tools/vendored.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tools/vendored.py b/tools/vendored.py index 9b7cf729ba..f339497fa1 100644 --- a/tools/vendored.py +++ b/tools/vendored.py @@ -105,7 +105,8 @@ def clean(vendor): Remove all files out of the vendor directory except the meta data (as pip uninstall doesn't support -t). """ - remove_all(path for path in vendor.glob('*') if path.basename() != 'vendored.txt') + ignored = ['vendored.txt', 'ruff.toml'] + remove_all(path for path in vendor.glob('*') if path.basename() not in ignored) def install(vendor): From 56bd481f5e515d70f825d397372f6ba020ebcccb Mon Sep 17 00:00:00 2001 From: Avasam Date: Sun, 3 Mar 2024 19:58:07 -0500 Subject: [PATCH 0412/1761] Made `pkg_resoursces.NullProvider`'s `has_metadata` and `metadata_isdir` methods return actual booleans like all other Providers --- newsfragments/4254.bugfix.rst | 1 + pkg_resources/__init__.py | 26 +++++++++++++------------- pkg_resources/tests/test_resources.py | 2 +- 3 files changed, 15 insertions(+), 14 deletions(-) create mode 100644 newsfragments/4254.bugfix.rst diff --git a/newsfragments/4254.bugfix.rst b/newsfragments/4254.bugfix.rst new file mode 100644 index 0000000000..e944fcfb49 --- /dev/null +++ b/newsfragments/4254.bugfix.rst @@ -0,0 +1 @@ +Made ``pkg_resoursces.NullProvider``'s ``has_metadata`` and ``metadata_isdir`` methods return actual booleans like all other Providers. -- by :user:`Avasam` diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py index 10c6a9cd06..a810bf0082 100644 --- a/pkg_resources/__init__.py +++ b/pkg_resources/__init__.py @@ -531,7 +531,7 @@ def get_entry_info(dist, group, name): class IMetadataProvider(Protocol): - def has_metadata(self, name): + def has_metadata(self, name) -> bool: """Does the package's distribution contain the named metadata?""" def get_metadata(self, name): @@ -543,7 +543,7 @@ def get_metadata_lines(self, name): Leading and trailing whitespace is stripped from each line, and lines with ``#`` as the first non-blank character are omitted.""" - def metadata_isdir(self, name): + def metadata_isdir(self, name) -> bool: """Is the named metadata a directory? (like ``os.path.isdir()``)""" def metadata_listdir(self, name): @@ -1488,9 +1488,9 @@ def has_resource(self, resource_name): def _get_metadata_path(self, name): return self._fn(self.egg_info, name) - def has_metadata(self, name): + def has_metadata(self, name) -> bool: if not self.egg_info: - return self.egg_info + return False path = self._get_metadata_path(name) return self._has(path) @@ -1514,8 +1514,8 @@ def get_metadata_lines(self, name): def resource_isdir(self, resource_name): return self._isdir(self._fn(self.module_path, resource_name)) - def metadata_isdir(self, name): - return self.egg_info and self._isdir(self._fn(self.egg_info, name)) + def metadata_isdir(self, name) -> bool: + return bool(self.egg_info and self._isdir(self._fn(self.egg_info, name))) def resource_listdir(self, resource_name): return self._listdir(self._fn(self.module_path, resource_name)) @@ -1554,12 +1554,12 @@ def run_script(self, script_name, namespace): script_code = compile(script_text, script_filename, 'exec') exec(script_code, namespace, namespace) - def _has(self, path): + def _has(self, path) -> bool: raise NotImplementedError( "Can't perform this operation for unregistered loader type" ) - def _isdir(self, path): + def _isdir(self, path) -> bool: raise NotImplementedError( "Can't perform this operation for unregistered loader type" ) @@ -1694,10 +1694,10 @@ def _set_egg(self, path): class DefaultProvider(EggProvider): """Provides access to package resources in the filesystem""" - def _has(self, path): + def _has(self, path) -> bool: return os.path.exists(path) - def _isdir(self, path): + def _isdir(self, path) -> bool: return os.path.isdir(path) def _listdir(self, path): @@ -1939,11 +1939,11 @@ def _index(self): self._dirindex = ind return ind - def _has(self, fspath): + def _has(self, fspath) -> bool: zip_path = self._zipinfo_name(fspath) return zip_path in self.zipinfo or zip_path in self._index() - def _isdir(self, fspath): + def _isdir(self, fspath) -> bool: return self._zipinfo_name(fspath) in self._index() def _listdir(self, fspath): @@ -1977,7 +1977,7 @@ def __init__(self, path): def _get_metadata_path(self, name): return self.path - def has_metadata(self, name): + def has_metadata(self, name) -> bool: return name == 'PKG-INFO' and os.path.isfile(self.path) def get_metadata(self, name): diff --git a/pkg_resources/tests/test_resources.py b/pkg_resources/tests/test_resources.py index 5b2308aea7..b0a319e60f 100644 --- a/pkg_resources/tests/test_resources.py +++ b/pkg_resources/tests/test_resources.py @@ -35,7 +35,7 @@ class Metadata(pkg_resources.EmptyProvider): def __init__(self, *pairs): self.metadata = dict(pairs) - def has_metadata(self, name): + def has_metadata(self, name) -> bool: return name in self.metadata def get_metadata(self, name): From 226e1a284dde19b09530a27c66dd18e72f2971b1 Mon Sep 17 00:00:00 2001 From: Avasam Date: Tue, 5 Mar 2024 06:07:38 -0500 Subject: [PATCH 0413/1761] Fix mypy issues (#3979) * Fix all mypy issues * Ran black * Exclude tox from mypy check * Fix all mypy issues again * Address PR comments * Fix accidental line ending changes * Update .gitignore * No unused type: ignore * TypeError: 'ABCMeta' object is not subscriptable * Fix RuffError * Fix post-merge mypy issues * RUff format * Ignore more generated files * Disable more mypy errors * Globally ignore attr-defined for now * Update more comments * Address PR comments and fix new exposed typing issues * Comments updates and don't touch vendored * Accidentally removed noqa * Update setuptools/tests/integration/test_pip_install_sdist.py Co-authored-by: Anderson Bravalheri * Post merge comments Update setuptools/tests/integration/test_pip_install_sdist.py Co-authored-by: Anderson Bravalheri * Document that usage of _config_vars is very purposeful Closes #4228 + try to resolve doc issue * sort nitpick_ignore * Make only comment on newline like others * Forgot to re-ignore --------- Co-authored-by: Anderson Bravalheri --- docs/conf.py | 10 ++--- mypy.ini | 43 +++++++++++++++++-- pkg_resources/tests/test_pkg_resources.py | 3 +- setup.py | 3 +- setuptools/__init__.py | 11 +++-- setuptools/command/_requirestxt.py | 2 +- setuptools/command/build_ext.py | 11 +++-- setuptools/command/dist_info.py | 4 +- setuptools/command/easy_install.py | 10 ++--- setuptools/command/editable_wheel.py | 41 +++++++++++++----- setuptools/command/install.py | 5 ++- setuptools/command/rotate.py | 3 +- setuptools/command/upload_docs.py | 2 +- setuptools/config/_apply_pyprojecttoml.py | 3 +- setuptools/config/expand.py | 2 +- setuptools/config/pyprojecttoml.py | 5 ++- setuptools/config/setupcfg.py | 4 +- setuptools/dist.py | 18 +++++--- setuptools/extension.py | 8 +++- setuptools/monkey.py | 7 ++- setuptools/msvc.py | 6 ++- setuptools/sandbox.py | 13 +++--- .../tests/config/test_apply_pyprojecttoml.py | 3 +- .../integration/test_pip_install_sdist.py | 6 ++- setuptools/tests/test_bdist_egg.py | 2 +- setuptools/tests/test_editable_install.py | 2 +- setuptools/tests/test_egg_info.py | 3 +- setuptools/tests/test_manifest.py | 3 +- 28 files changed, 164 insertions(+), 69 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 0a82ff2fe2..be8856849b 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -161,22 +161,23 @@ # Ref: https://stackoverflow.com/a/30624034/595220 nitpick_ignore = [ ('c:func', 'SHGetSpecialFolderPath'), # ref to MS docs + ('envvar', 'DIST_EXTRA_CONFIG'), # undocumented ('envvar', 'DISTUTILS_DEBUG'), # undocumented ('envvar', 'HOME'), # undocumented ('envvar', 'PLAT'), # undocumented - ('envvar', 'DIST_EXTRA_CONFIG'), # undocumented ('py:attr', 'CCompiler.language_map'), # undocumented ('py:attr', 'CCompiler.language_order'), # undocumented - ('py:class', 'distutils.dist.Distribution'), # undocumented - ('py:class', 'distutils.extension.Extension'), # undocumented ('py:class', 'BorlandCCompiler'), # undocumented ('py:class', 'CCompiler'), # undocumented ('py:class', 'CygwinCCompiler'), # undocumented + ('py:class', 'distutils.dist.Distribution'), # undocumented ('py:class', 'distutils.dist.DistributionMetadata'), # undocumented + ('py:class', 'distutils.extension.Extension'), # undocumented ('py:class', 'FileList'), # undocumented ('py:class', 'IShellLink'), # ref to MS docs ('py:class', 'MSVCCompiler'), # undocumented ('py:class', 'OptionDummy'), # undocumented + ('py:class', 'setuptools.dist.Distribution'), # undocumented ('py:class', 'UnixCCompiler'), # undocumented ('py:exc', 'CompileError'), # undocumented ('py:exc', 'DistutilsExecError'), # undocumented @@ -186,8 +187,7 @@ ('py:exc', 'PreprocessError'), # undocumented ('py:exc', 'setuptools.errors.PlatformError'), # sphinx cannot find it ('py:func', 'distutils.CCompiler.new_compiler'), # undocumented - # undocumented: - ('py:func', 'distutils.dist.DistributionMetadata.read_pkg_file'), + ('py:func', 'distutils.dist.DistributionMetadata.read_pkg_file'), # undocumented ('py:func', 'distutils.file_util._copy_file_contents'), # undocumented ('py:func', 'distutils.log.debug'), # undocumented ('py:func', 'distutils.spawn.find_executable'), # undocumented diff --git a/mypy.ini b/mypy.ini index b6f972769e..e0fa8e5c47 100644 --- a/mypy.ini +++ b/mypy.ini @@ -1,5 +1,42 @@ [mypy] -ignore_missing_imports = True -# required to support namespace packages -# https://github.com/python/mypy/issues/14057 +# CI should test for all versions, local development gets hints for oldest supported +python_version = 3.8 +strict = False +warn_unused_ignores = True +# required to support namespace packages: https://github.com/python/mypy/issues/14057 explicit_package_bases = True +exclude = (?x)( + ^build/ + | ^.tox/ + | ^pkg_resources/tests/data/my-test-package-source/setup.py$ # Duplicate module name + | ^.+?/(_vendor|extern)/ # Vendored + | ^setuptools/_distutils/ # Vendored + | ^setuptools/config/_validate_pyproject/ # Auto-generated + ) +disable_error_code = + # TODO: Test environment is not yet properly configured to install all imported packages + # import-not-found, # This can be left commented out for local runs until we enforce running mypy in the CI + # TODO: Not all dependencies are typed. Namely: distutils._modified, wheel.wheelfile, and jaraco.* + import-untyped, + # Ignoring attr-defined because setuptools wraps a lot of distutils classes, adding new attributes, + # w/o updating all the attributes and return types from the base classes for type-checkers to understand + # Especially with setuptools.dist.command vs distutils.dist.command vs setuptools._distutils.dist.command + # *.extern modules that actually live in *._vendor will also cause attr-defined issues on import + attr-defined, + +# Avoid raising issues when importing from "extern" modules, as those are added to path dynamically. +# https://github.com/pypa/setuptools/pull/3979#discussion_r1367968993 +[mypy-pkg_resources.extern.*,setuptools.extern.*] +ignore_missing_imports = True + +[mypy-pkg_resources.tests.*,setuptools.tests.*] +disable_error_code = + # Tests include creating dynamic modules that won't exists statically before the test is run. + # Let's ignore all "import-not-found", as if an import really wasn't found, then the test would fail. + import-not-found, + # mmany untyped "jaraco" modules + import-untyped, + +# Mypy issue, this vendored module is already excluded! +[mypy-setuptools._vendor.packaging._manylinux] +disable_error_code = import-not-found diff --git a/pkg_resources/tests/test_pkg_resources.py b/pkg_resources/tests/test_pkg_resources.py index 0883642080..bfbf619c85 100644 --- a/pkg_resources/tests/test_pkg_resources.py +++ b/pkg_resources/tests/test_pkg_resources.py @@ -9,6 +9,7 @@ import stat import distutils.dist import distutils.command.install_egg_info +from typing import List from unittest import mock @@ -32,7 +33,7 @@ def __call__(self): class TestZipProvider: - finalizers = [] + finalizers: List[EggRemover] = [] ref_time = datetime.datetime(2013, 5, 12, 13, 25, 0) "A reference time for a file modification" diff --git a/setup.py b/setup.py index 075d7c405f..1a6074766a 100755 --- a/setup.py +++ b/setup.py @@ -88,5 +88,6 @@ def _restore_install_lib(self): if __name__ == '__main__': # allow setup.py to run from another directory - here and os.chdir(here) + # TODO: Use a proper conditonal statement here + here and os.chdir(here) # type: ignore[func-returns-value] dist = setuptools.setup(**setup_params) diff --git a/setuptools/__init__.py b/setuptools/__init__.py index 563ca1c4ba..7c88c7e19b 100644 --- a/setuptools/__init__.py +++ b/setuptools/__init__.py @@ -3,6 +3,7 @@ import functools import os import re +from typing import TYPE_CHECKING import _distutils_hack.override # noqa: F401 import distutils.core @@ -105,8 +106,11 @@ def setup(**attrs): setup.__doc__ = distutils.core.setup.__doc__ - -_Command = monkey.get_unpatched(distutils.core.Command) +if TYPE_CHECKING: + # Work around a mypy issue where type[T] can't be used as a base: https://github.com/python/mypy/issues/10962 + _Command = distutils.core.Command +else: + _Command = monkey.get_unpatched(distutils.core.Command) class Command(_Command): @@ -165,8 +169,9 @@ class Command(_Command): """ command_consumes_arguments = False + distribution: Distribution # override distutils.dist.Distribution with setuptools.dist.Distribution - def __init__(self, dist, **kw): + def __init__(self, dist: Distribution, **kw): """ Construct the command for dist, updating vars(self) with any keyword parameters. diff --git a/setuptools/command/_requirestxt.py b/setuptools/command/_requirestxt.py index 7b732b11ab..b0c2d7059a 100644 --- a/setuptools/command/_requirestxt.py +++ b/setuptools/command/_requirestxt.py @@ -35,7 +35,7 @@ def _prepare( def _convert_extras_requirements( - extras_require: _StrOrIter, + extras_require: Mapping[str, _StrOrIter], ) -> Mapping[str, _Ordered[Requirement]]: """ Convert requirements in `extras_require` of the form diff --git a/setuptools/command/build_ext.py b/setuptools/command/build_ext.py index 780afe3aec..1301433a32 100644 --- a/setuptools/command/build_ext.py +++ b/setuptools/command/build_ext.py @@ -26,7 +26,9 @@ # make sure _config_vars is initialized get_config_var("LDSHARED") -from distutils.sysconfig import _config_vars as _CONFIG_VARS # noqa +# Not publicly exposed in typeshed distutils stubs, but this is done on purpose +# See https://github.com/pypa/setuptools/pull/4228#issuecomment-1959856400 +from distutils.sysconfig import _config_vars as _CONFIG_VARS # type: ignore # noqa def _customize_compiler_for_shlib(compiler): @@ -58,7 +60,7 @@ def _customize_compiler_for_shlib(compiler): use_stubs = True elif os.name != 'nt': try: - import dl + import dl # type: ignore[import-not-found] # https://github.com/python/mypy/issues/13002 use_stubs = have_rtld = hasattr(dl, 'RTLD_NOW') except ImportError: @@ -378,7 +380,10 @@ def _compile_and_remove_stub(self, stub_file: str): optimize = self.get_finalized_command('install_lib').optimize if optimize > 0: byte_compile( - [stub_file], optimize=optimize, force=True, dry_run=self.dry_run + [stub_file], + optimize=optimize, + force=True, + dry_run=self.dry_run, ) if os.path.exists(stub_file) and not self.dry_run: os.unlink(stub_file) diff --git a/setuptools/command/dist_info.py b/setuptools/command/dist_info.py index f5061afaaf..52c0721903 100644 --- a/setuptools/command/dist_info.py +++ b/setuptools/command/dist_info.py @@ -9,8 +9,10 @@ from distutils import log from distutils.core import Command from pathlib import Path +from typing import cast from .. import _normalization +from .egg_info import egg_info as egg_info_cls class dist_info(Command): @@ -50,7 +52,7 @@ def finalize_options(self): project_dir = dist.src_root or os.curdir self.output_dir = Path(self.output_dir or project_dir) - egg_info = self.reinitialize_command("egg_info") + egg_info = cast(egg_info_cls, self.reinitialize_command("egg_info")) egg_info.egg_base = str(self.output_dir) if self.tag_date: diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py index cc0c409123..402355bd81 100644 --- a/setuptools/command/easy_install.py +++ b/setuptools/command/easy_install.py @@ -25,6 +25,7 @@ from distutils.command import install import sys import os +from typing import Dict, List import zipimport import shutil import tempfile @@ -43,7 +44,6 @@ import configparser import sysconfig - from sysconfig import get_path from setuptools import Command @@ -1765,7 +1765,7 @@ def _wrap_lines(cls, lines): if os.environ.get('SETUPTOOLS_SYS_PATH_TECHNIQUE', 'raw') == 'rewrite': - PthDistributions = RewritePthDistributions + PthDistributions = RewritePthDistributions # type: ignore[misc] # Overwriting type def _first_line_re(): @@ -2015,7 +2015,7 @@ def is_python_script(script_text, filename): from os import chmod as _chmod except ImportError: # Jython compatibility - def _chmod(*args): + def _chmod(*args: object, **kwargs: object) -> None: # type: ignore[misc] # Mypy re-uses the imported definition anyway pass @@ -2033,8 +2033,8 @@ class CommandSpec(list): those passed to Popen. """ - options = [] - split_args = dict() + options: List[str] = [] + split_args: Dict[str, bool] = dict() @classmethod def best(cls): diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py index a81fcd5bf9..24980edcf4 100644 --- a/setuptools/command/editable_wheel.py +++ b/setuptools/command/editable_wheel.py @@ -33,6 +33,7 @@ Protocol, Tuple, TypeVar, + cast, ) from .. import ( @@ -50,7 +51,12 @@ SetuptoolsDeprecationWarning, SetuptoolsWarning, ) +from .build import build as build_cls from .build_py import build_py as build_py_cls +from .dist_info import dist_info as dist_info_cls +from .egg_info import egg_info as egg_info_cls +from .install import install as install_cls +from .install_scripts import install_scripts as install_scripts_cls if TYPE_CHECKING: from wheel.wheelfile import WheelFile # noqa @@ -155,7 +161,7 @@ def run(self): def _ensure_dist_info(self): if self.dist_info_dir is None: - dist_info = self.reinitialize_command("dist_info") + dist_info = cast(dist_info_cls, self.reinitialize_command("dist_info")) dist_info.output_dir = self.dist_dir dist_info.ensure_finalized() dist_info.run() @@ -202,12 +208,18 @@ def _configure_build( scripts = str(Path(unpacked_wheel, f"{name}.data", "scripts")) # egg-info may be generated again to create a manifest (used for package data) - egg_info = dist.reinitialize_command("egg_info", reinit_subcommands=True) + egg_info = cast( + egg_info_cls, dist.reinitialize_command("egg_info", reinit_subcommands=True) + ) egg_info.egg_base = str(tmp_dir) egg_info.ignore_egg_info_in_manifest = True - build = dist.reinitialize_command("build", reinit_subcommands=True) - install = dist.reinitialize_command("install", reinit_subcommands=True) + build = cast( + build_cls, dist.reinitialize_command("build", reinit_subcommands=True) + ) + install = cast( + install_cls, dist.reinitialize_command("install", reinit_subcommands=True) + ) build.build_platlib = build.build_purelib = build.build_lib = build_lib install.install_purelib = install.install_platlib = install.install_lib = wheel @@ -215,12 +227,14 @@ def _configure_build( install.install_headers = headers install.install_data = data - install_scripts = dist.get_command_obj("install_scripts") + install_scripts = cast( + install_scripts_cls, dist.get_command_obj("install_scripts") + ) install_scripts.no_ep = True build.build_temp = str(tmp_dir) - build_py = dist.get_command_obj("build_py") + build_py = cast(build_py_cls, dist.get_command_obj("build_py")) build_py.compile = False build_py.existing_egg_info_dir = self._find_egg_info_dir() @@ -233,6 +247,7 @@ def _set_editable_mode(self): """Set the ``editable_mode`` flag in the build sub-commands""" dist = self.distribution build = dist.get_command_obj("build") + # TODO: Update typeshed distutils stubs to overload non-None return type by default for cmd_name in build.get_sub_commands(): cmd = dist.get_command_obj(cmd_name) if hasattr(cmd, "editable_mode"): @@ -269,7 +284,7 @@ def _run_build_commands( self._run_install("data") return files, mapping - def _run_build_subcommands(self): + def _run_build_subcommands(self) -> None: """ Issue #3501 indicates that some plugins/customizations might rely on: @@ -283,7 +298,7 @@ def _run_build_subcommands(self): # TODO: Once plugins/customisations had the chance to catch up, replace # `self._run_build_subcommands()` with `self.run_command("build")`. # Also remove _safely_run, TestCustomBuildPy. Suggested date: Aug/2023. - build: Command = self.get_finalized_command("build") + build = self.get_finalized_command("build") for name in build.get_sub_commands(): cmd = self.get_finalized_command(name) if name == "build_py" and type(cmd) != build_py_cls: @@ -432,7 +447,8 @@ def __init__( ): self.auxiliary_dir = Path(auxiliary_dir) self.build_lib = Path(build_lib).resolve() - self._file = dist.get_command_obj("build_py").copy_file + # TODO: Update typeshed distutils stubs to overload non-None return type by default + self._file = dist.get_command_obj("build_py").copy_file # type: ignore[union-attr] super().__init__(dist, name, [self.auxiliary_dir]) def __call__(self, wheel: "WheelFile", files: List[str], mapping: Dict[str, str]): @@ -450,7 +466,9 @@ def _create_file(self, relative_output: str, src_file: str, link=None): dest = self.auxiliary_dir / relative_output if not dest.parent.is_dir(): dest.parent.mkdir(parents=True) - self._file(src_file, dest, link=link) + # TODO: Update typeshed distutils stubs so distutils.cmd.Command.copy_file, accepts PathLike + # same with methods used by copy_file + self._file(src_file, dest, link=link) # type: ignore[arg-type] def _create_links(self, outputs, output_mapping): self.auxiliary_dir.mkdir(parents=True, exist_ok=True) @@ -603,7 +621,8 @@ def _simple_layout( layout = {pkg: find_package_path(pkg, package_dir, project_dir) for pkg in packages} if not layout: return set(package_dir) in ({}, {""}) - parent = os.path.commonpath(starmap(_parent_path, layout.items())) + # TODO: has been fixed upstream, waiting for new mypy release https://github.com/python/typeshed/pull/11310 + parent = os.path.commonpath(starmap(_parent_path, layout.items())) # type: ignore[call-overload] return all( _path.same_path(Path(parent, *key.split('.')), value) for key, value in layout.items() diff --git a/setuptools/command/install.py b/setuptools/command/install.py index b97a9b4713..56c1155b50 100644 --- a/setuptools/command/install.py +++ b/setuptools/command/install.py @@ -3,9 +3,11 @@ import glob import platform import distutils.command.install as orig +from typing import cast import setuptools from ..warnings import SetuptoolsDeprecationWarning, SetuptoolsWarning +from .bdist_egg import bdist_egg as bdist_egg_cls # Prior to numpy 1.9, NumPy relies on the '_install' name, so provide it for # now. See https://github.com/pypa/setuptools/issues/199/ @@ -135,7 +137,8 @@ def do_egg_install(self): cmd.package_index.scan(glob.glob('*.egg')) self.run_command('bdist_egg') - args = [self.distribution.get_command_obj('bdist_egg').egg_output] + bdist_egg = cast(bdist_egg_cls, self.distribution.get_command_obj('bdist_egg')) + args = [bdist_egg.egg_output] if setuptools.bootstrap_install_from: # Bootstrap self-installation of setuptools diff --git a/setuptools/command/rotate.py b/setuptools/command/rotate.py index cfb78ce52d..6f73721c70 100644 --- a/setuptools/command/rotate.py +++ b/setuptools/command/rotate.py @@ -3,6 +3,7 @@ from distutils.errors import DistutilsOptionError import os import shutil +from typing import List from setuptools import Command @@ -17,7 +18,7 @@ class rotate(Command): ('keep=', 'k', "number of matching distributions to keep"), ] - boolean_options = [] + boolean_options: List[str] = [] def initialize_options(self): self.match = None diff --git a/setuptools/command/upload_docs.py b/setuptools/command/upload_docs.py index 32c9abd796..3fbbb62553 100644 --- a/setuptools/command/upload_docs.py +++ b/setuptools/command/upload_docs.py @@ -50,7 +50,7 @@ def has_sphinx(self): and metadata.entry_points(group='distutils.commands', name='build_sphinx') ) - sub_commands = [('build_sphinx', has_sphinx)] + sub_commands = [('build_sphinx', has_sphinx)] # type: ignore[list-item] # TODO: Fix in typeshed distutils stubs def initialize_options(self): upload.initialize_options(self) diff --git a/setuptools/config/_apply_pyprojecttoml.py b/setuptools/config/_apply_pyprojecttoml.py index 7301bc65c1..3626282a79 100644 --- a/setuptools/config/_apply_pyprojecttoml.py +++ b/setuptools/config/_apply_pyprojecttoml.py @@ -34,6 +34,7 @@ from ..warnings import SetuptoolsWarning if TYPE_CHECKING: + from distutils.dist import _OptionsList from setuptools._importlib import metadata # noqa from setuptools.dist import Distribution # noqa @@ -293,7 +294,7 @@ def _normalise_cmd_option_key(name: str) -> str: return json_compatible_key(name).strip("_=") -def _normalise_cmd_options(desc: List[Tuple[str, Optional[str], str]]) -> Set[str]: +def _normalise_cmd_options(desc: "_OptionsList") -> Set[str]: return {_normalise_cmd_option_key(fancy_option[0]) for fancy_option in desc} diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py index e23a762cf5..0d8d58add8 100644 --- a/setuptools/config/expand.py +++ b/setuptools/config/expand.py @@ -63,7 +63,7 @@ class StaticModule: """Proxy to a module object that avoids executing arbitrary code.""" def __init__(self, name: str, spec: ModuleSpec): - module = ast.parse(pathlib.Path(spec.origin).read_bytes()) + module = ast.parse(pathlib.Path(spec.origin).read_bytes()) # type: ignore[arg-type] # Let it raise an error on None vars(self).update(locals()) del self.self diff --git a/setuptools/config/pyprojecttoml.py b/setuptools/config/pyprojecttoml.py index 5eb9421f1f..ff97679895 100644 --- a/setuptools/config/pyprojecttoml.py +++ b/setuptools/config/pyprojecttoml.py @@ -24,6 +24,7 @@ if TYPE_CHECKING: from setuptools.dist import Distribution # noqa + from typing_extensions import Self _logger = logging.getLogger(__name__) @@ -271,7 +272,7 @@ def _ensure_previously_set(self, dist: "Distribution", field: str): def _expand_directive( self, specifier: str, directive, package_dir: Mapping[str, str] ): - from setuptools.extern.more_itertools import always_iterable # type: ignore + from setuptools.extern.more_itertools import always_iterable with _ignore_errors(self.ignore_option_errors): root_dir = self.root_dir @@ -401,7 +402,7 @@ def __init__( self._project_cfg = project_cfg self._setuptools_cfg = setuptools_cfg - def __enter__(self): + def __enter__(self) -> "Self": """When entering the context, the values of ``packages``, ``py_modules`` and ``package_dir`` that are missing in ``dist`` are copied from ``setuptools_cfg``. """ diff --git a/setuptools/config/setupcfg.py b/setuptools/config/setupcfg.py index cfa43a57b5..2912d3e143 100644 --- a/setuptools/config/setupcfg.py +++ b/setuptools/config/setupcfg.py @@ -108,7 +108,7 @@ def _apply( filenames = [*other_files, filepath] try: - _Distribution.parse_config_files(dist, filenames=filenames) + _Distribution.parse_config_files(dist, filenames=filenames) # type: ignore[arg-type] # TODO: fix in disutils stubs handlers = parse_configuration( dist, dist.command_options, ignore_option_errors=ignore_option_errors ) @@ -475,7 +475,7 @@ def parse_section(self, section_options): # Keep silent for a new option may appear anytime. self[name] = value - def parse(self): + def parse(self) -> None: """Parses configuration file items from one or more related sections. diff --git a/setuptools/dist.py b/setuptools/dist.py index d5787ed474..6350e38100 100644 --- a/setuptools/dist.py +++ b/setuptools/dist.py @@ -10,7 +10,7 @@ from contextlib import suppress from glob import iglob from pathlib import Path -from typing import List, Optional, Set +from typing import TYPE_CHECKING, Dict, List, MutableMapping, Optional, Set, Tuple import distutils.cmd import distutils.command @@ -202,7 +202,11 @@ def check_packages(dist, attr, value): ) -_Distribution = get_unpatched(distutils.core.Distribution) +if TYPE_CHECKING: + # Work around a mypy issue where type[T] can't be used as a base: https://github.com/python/mypy/issues/10962 + _Distribution = distutils.core.Distribution +else: + _Distribution = get_unpatched(distutils.core.Distribution) class Distribution(_Distribution): @@ -283,12 +287,12 @@ def patch_missing_pkg_info(self, attrs): dist._version = _normalization.safe_version(str(attrs['version'])) self._patched_dist = dist - def __init__(self, attrs=None): + def __init__(self, attrs: Optional[MutableMapping] = None) -> None: have_package_data = hasattr(self, "package_data") if not have_package_data: - self.package_data = {} + self.package_data: Dict[str, List[str]] = {} attrs = attrs or {} - self.dist_files = [] + self.dist_files: List[Tuple[str, str, str]] = [] # Filter-out setuptools' specific options. self.src_root = attrs.pop("src_root", None) self.patch_missing_pkg_info(attrs) @@ -381,7 +385,7 @@ def _normalize_requires(self): k: list(map(str, _reqs.parse(v or []))) for k, v in extras_require.items() } - def _finalize_license_files(self): + def _finalize_license_files(self) -> None: """Compute names of all license files which should be included.""" license_files: Optional[List[str]] = self.metadata.license_files patterns: List[str] = license_files if license_files else [] @@ -394,7 +398,7 @@ def _finalize_license_files(self): # Default patterns match the ones wheel uses # See https://wheel.readthedocs.io/en/stable/user_guide.html # -> 'Including license files in the generated wheel file' - patterns = ('LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*') + patterns = ['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*'] self.metadata.license_files = list( unique_everseen(self._expand_patterns(patterns)) diff --git a/setuptools/extension.py b/setuptools/extension.py index 58c023f6b4..8caad78d4b 100644 --- a/setuptools/extension.py +++ b/setuptools/extension.py @@ -3,6 +3,7 @@ import distutils.core import distutils.errors import distutils.extension +from typing import TYPE_CHECKING from .monkey import get_unpatched @@ -23,8 +24,11 @@ def _have_cython(): # for compatibility have_pyrex = _have_cython - -_Extension = get_unpatched(distutils.core.Extension) +if TYPE_CHECKING: + # Work around a mypy issue where type[T] can't be used as a base: https://github.com/python/mypy/issues/10962 + _Extension = distutils.core.Extension +else: + _Extension = get_unpatched(distutils.core.Extension) class Extension(_Extension): diff --git a/setuptools/monkey.py b/setuptools/monkey.py index da0993506c..fd07d91dec 100644 --- a/setuptools/monkey.py +++ b/setuptools/monkey.py @@ -8,11 +8,14 @@ import sys import types from importlib import import_module +from typing import List, TypeVar import distutils.filelist -__all__ = [] +_T = TypeVar("_T") + +__all__: List[str] = [] """ Everything is private. Contact the project team if you think you need this functionality. @@ -33,7 +36,7 @@ def _get_mro(cls): return inspect.getmro(cls) -def get_unpatched(item): +def get_unpatched(item: _T) -> _T: lookup = ( get_unpatched_class if isinstance(item, type) diff --git a/setuptools/msvc.py b/setuptools/msvc.py index 53fe7b0de1..b2a0f2bebb 100644 --- a/setuptools/msvc.py +++ b/setuptools/msvc.py @@ -20,9 +20,11 @@ import itertools import subprocess import distutils.errors +from typing import Dict, TYPE_CHECKING from setuptools.extern.more_itertools import unique_everseen -if platform.system() == 'Windows': +# https://github.com/python/mypy/issues/8166 +if not TYPE_CHECKING and platform.system() == 'Windows': import winreg from os import environ else: @@ -34,7 +36,7 @@ class winreg: HKEY_LOCAL_MACHINE = None HKEY_CLASSES_ROOT = None - environ = dict() + environ: Dict[str, str] = dict() def _msvc14_find_vc2015(): diff --git a/setuptools/sandbox.py b/setuptools/sandbox.py index 7634b1320b..6c095e029e 100644 --- a/setuptools/sandbox.py +++ b/setuptools/sandbox.py @@ -9,6 +9,7 @@ import pickle import textwrap import builtins +from typing import Union, List import pkg_resources from distutils.errors import DistutilsError @@ -19,7 +20,7 @@ else: _os = sys.modules[os.name] try: - _file = file + _file = file # type: ignore[name-defined] # Check for global variable except NameError: _file = None _open = open @@ -298,7 +299,7 @@ def run(self, func): with self: return func() - def _mk_dual_path_wrapper(name): + def _mk_dual_path_wrapper(name: str): # type: ignore[misc] # https://github.com/pypa/setuptools/pull/4099 original = getattr(_os, name) def wrap(self, src, dst, *args, **kw): @@ -312,7 +313,7 @@ def wrap(self, src, dst, *args, **kw): if hasattr(_os, name): locals()[name] = _mk_dual_path_wrapper(name) - def _mk_single_path_wrapper(name, original=None): + def _mk_single_path_wrapper(name: str, original=None): # type: ignore[misc] # https://github.com/pypa/setuptools/pull/4099 original = original or getattr(_os, name) def wrap(self, path, *args, **kw): @@ -349,7 +350,7 @@ def wrap(self, path, *args, **kw): if hasattr(_os, name): locals()[name] = _mk_single_path_wrapper(name) - def _mk_single_with_return(name): + def _mk_single_with_return(name: str): # type: ignore[misc] # https://github.com/pypa/setuptools/pull/4099 original = getattr(_os, name) def wrap(self, path, *args, **kw): @@ -364,7 +365,7 @@ def wrap(self, path, *args, **kw): if hasattr(_os, name): locals()[name] = _mk_single_with_return(name) - def _mk_query(name): + def _mk_query(name: str): # type: ignore[misc] # https://github.com/pypa/setuptools/pull/4099 original = getattr(_os, name) def wrap(self, *args, **kw): @@ -424,7 +425,7 @@ class DirectorySandbox(AbstractSandbox): "tempnam", ]) - _exception_patterns = [] + _exception_patterns: List[Union[str, re.Pattern]] = [] "exempt writing to paths that match the pattern" def __init__(self, sandbox, exceptions=_EXCEPTIONS): diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py index e63a89b0b4..555489b140 100644 --- a/setuptools/tests/config/test_apply_pyprojecttoml.py +++ b/setuptools/tests/config/test_apply_pyprojecttoml.py @@ -9,6 +9,7 @@ import tarfile from inspect import cleandoc from pathlib import Path +from typing import Tuple from unittest.mock import Mock from zipfile import ZipFile @@ -455,7 +456,7 @@ def core_metadata(dist) -> str: # Make sure core metadata is valid Metadata.from_email(pkg_file_txt, validate=True) # can raise exceptions - skip_prefixes = () + skip_prefixes: Tuple[str, ...] = () skip_lines = set() # ---- DIFF NORMALISATION ---- # PEP 621 is very particular about author/maintainer metadata conversion, so skip diff --git a/setuptools/tests/integration/test_pip_install_sdist.py b/setuptools/tests/integration/test_pip_install_sdist.py index 3467a5ec07..17bf2af9d2 100644 --- a/setuptools/tests/integration/test_pip_install_sdist.py +++ b/setuptools/tests/integration/test_pip_install_sdist.py @@ -1,3 +1,5 @@ +# https://github.com/python/mypy/issues/16936 +# mypy: disable-error-code="has-type" """Integration tests for setuptools that focus on building packages via pip. The idea behind these tests is not to exhaustively check all the possible @@ -25,10 +27,10 @@ from .helpers import Archive, run - pytestmark = pytest.mark.integration -(LATEST,) = Enum("v", "LATEST") + +(LATEST,) = Enum("v", "LATEST") # type: ignore[misc] # https://github.com/python/mypy/issues/16936 """Default version to be checked""" # There are positive and negative aspects of checking the latest version of the # packages. diff --git a/setuptools/tests/test_bdist_egg.py b/setuptools/tests/test_bdist_egg.py index 0e473d168b..12ed4d328c 100644 --- a/setuptools/tests/test_bdist_egg.py +++ b/setuptools/tests/test_bdist_egg.py @@ -47,7 +47,7 @@ def test_bdist_egg(self, setup_context, user_override): assert re.match(r'foo-0.0.0-py[23].\d+.egg$', content) @pytest.mark.xfail( - os.environ.get('PYTHONDONTWRITEBYTECODE'), + os.environ.get('PYTHONDONTWRITEBYTECODE', False), reason="Byte code disabled", ) def test_exclude_source_files(self, setup_context, user_override): diff --git a/setuptools/tests/test_editable_install.py b/setuptools/tests/test_editable_install.py index 862f8172cd..df85699586 100644 --- a/setuptools/tests/test_editable_install.py +++ b/setuptools/tests/test_editable_install.py @@ -120,7 +120,7 @@ def editable_opts(request): @pytest.mark.parametrize( "files", [ - {**EXAMPLE, "setup.py": SETUP_SCRIPT_STUB}, # type: ignore + {**EXAMPLE, "setup.py": SETUP_SCRIPT_STUB}, EXAMPLE, # No setup.py script ], ) diff --git a/setuptools/tests/test_egg_info.py b/setuptools/tests/test_egg_info.py index af7d2f8295..ba019dc79d 100644 --- a/setuptools/tests/test_egg_info.py +++ b/setuptools/tests/test_egg_info.py @@ -79,7 +79,8 @@ def run(): @staticmethod def _extract_mv_version(pkg_info_lines: List[str]) -> Tuple[int, int]: version_str = pkg_info_lines[0].split(' ')[1] - return tuple(map(int, version_str.split('.')[:2])) + major, minor = map(int, version_str.split('.')[:2]) + return major, minor def test_egg_info_save_version_info_setup_empty(self, tmpdir_cwd, env): """ diff --git a/setuptools/tests/test_manifest.py b/setuptools/tests/test_manifest.py index fbd21b1976..16fa2c2460 100644 --- a/setuptools/tests/test_manifest.py +++ b/setuptools/tests/test_manifest.py @@ -10,6 +10,7 @@ import logging from distutils import log from distutils.errors import DistutilsTemplateError +from typing import List, Tuple from setuptools.command.egg_info import FileList, egg_info, translate_pattern from setuptools.dist import Distribution @@ -75,7 +76,7 @@ def touch(filename): ) -translate_specs = [ +translate_specs: List[Tuple[str, List[str], List[str]]] = [ ('foo', ['foo'], ['bar', 'foobar']), ('foo/bar', ['foo/bar'], ['foo/bar/baz', './foo/bar', 'foo']), # Glob matching From 66dfd2822665a60d88935933816de9e5261b012d Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Tue, 5 Mar 2024 12:02:15 +0000 Subject: [PATCH 0414/1761] Add link to `.gitignore` guidelines --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 311c9b269e..f25d073e24 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,5 @@ # syntax: glob +# See https://blog.jaraco.com/skeleton/#ignoring-artifacts before modifying. bin build dist From 15f7ef76fdfc6e9993fe10b1a6e9b77f7676bc3a Mon Sep 17 00:00:00 2001 From: Avasam Date: Tue, 5 Mar 2024 18:43:37 -0500 Subject: [PATCH 0415/1761] Apply suggestions from code review Co-authored-by: Anderson Bravalheri --- pkg_resources/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py index bff48db73a..907a16da30 100644 --- a/pkg_resources/__init__.py +++ b/pkg_resources/__init__.py @@ -567,7 +567,7 @@ def get_resource_stream(self, manager, resource_name): `manager` must be an ``IResourceManager``""" def get_resource_string(self, manager, resource_name) -> bytes: - """Return a bytes string containing the contents of `resource_name` + """Return the contents of `resource_name` as :obj:`bytes` `manager` must be an ``IResourceManager``""" @@ -1204,7 +1204,7 @@ def resource_stream(self, package_or_requirement, resource_name): ) def resource_string(self, package_or_requirement, resource_name) -> bytes: - """Return specified resource as a bytes string""" + """Return specified resource as :obj:`bytes`""" return get_provider(package_or_requirement).get_resource_string( self, resource_name ) From a33c387d2c475ad2481a30e58b759c34297c4b84 Mon Sep 17 00:00:00 2001 From: Avasam Date: Wed, 6 Mar 2024 11:28:43 -0500 Subject: [PATCH 0416/1761] Enable mypy on CI --- mypy.ini | 37 +++++++------------ pyproject.toml | 3 -- setup.cfg | 4 ++ setuptools/command/build_ext.py | 2 +- setuptools/command/editable_wheel.py | 2 +- .../fastjsonschema_validations.py | 1 - 6 files changed, 20 insertions(+), 29 deletions(-) diff --git a/mypy.ini b/mypy.ini index e0fa8e5c47..42ade6537e 100644 --- a/mypy.ini +++ b/mypy.ini @@ -13,30 +13,21 @@ exclude = (?x)( | ^setuptools/_distutils/ # Vendored | ^setuptools/config/_validate_pyproject/ # Auto-generated ) -disable_error_code = - # TODO: Test environment is not yet properly configured to install all imported packages - # import-not-found, # This can be left commented out for local runs until we enforce running mypy in the CI - # TODO: Not all dependencies are typed. Namely: distutils._modified, wheel.wheelfile, and jaraco.* - import-untyped, - # Ignoring attr-defined because setuptools wraps a lot of distutils classes, adding new attributes, - # w/o updating all the attributes and return types from the base classes for type-checkers to understand - # Especially with setuptools.dist.command vs distutils.dist.command vs setuptools._distutils.dist.command - # *.extern modules that actually live in *._vendor will also cause attr-defined issues on import - attr-defined, +# Ignoring attr-defined because setuptools wraps a lot of distutils classes, adding new attributes, +# w/o updating all the attributes and return types from the base classes for type-checkers to understand +# Especially with setuptools.dist.command vs distutils.dist.command vs setuptools._distutils.dist.command +# *.extern modules that actually live in *._vendor will also cause attr-defined issues on import +disable_error_code = attr-defined -# Avoid raising issues when importing from "extern" modules, as those are added to path dynamically. -# https://github.com/pypa/setuptools/pull/3979#discussion_r1367968993 -[mypy-pkg_resources.extern.*,setuptools.extern.*] +# - Avoid raising issues when importing from "extern" modules, as those are added to path dynamically. +# https://github.com/pypa/setuptools/pull/3979#discussion_r1367968993 +# - distutils._modified has different errors on Python 3.8 [import-untyped], on Python 3.9+ [import-not-found] +# - All jaraco modules are still untyped +[mypy-pkg_resources.extern.*,setuptools.extern.*,distutils._modified,jaraco.*] ignore_missing_imports = True -[mypy-pkg_resources.tests.*,setuptools.tests.*] -disable_error_code = - # Tests include creating dynamic modules that won't exists statically before the test is run. - # Let's ignore all "import-not-found", as if an import really wasn't found, then the test would fail. - import-not-found, - # mmany untyped "jaraco" modules - import-untyped, - -# Mypy issue, this vendored module is already excluded! -[mypy-setuptools._vendor.packaging._manylinux] +# - pkg_resources tests create modules that won't exists statically before the test is run. +# Let's ignore all "import-not-found" since, if an import really wasn't found, then the test would fail. +# - setuptools._vendor.packaging._manylinux: Mypy issue, this vendored module is already excluded! +[mypy-pkg_resources.tests.*,setuptools._vendor.packaging._manylinux] disable_error_code = import-not-found diff --git a/pyproject.toml b/pyproject.toml index cd95aad07f..58aacd9fe3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,6 +4,3 @@ build-backend = "setuptools.build_meta" backend-path = ["."] [tool.setuptools_scm] - -[tool.pytest-enabler.mypy] -# disabled diff --git a/setup.cfg b/setup.cfg index 91fb67724c..4d1155e884 100644 --- a/setup.cfg +++ b/setup.cfg @@ -73,6 +73,10 @@ testing = # for tools/finalize.py jaraco.develop >= 7.21; python_version >= "3.9" and sys_platform != "cygwin" pytest-home >= 0.5 + # No Python 3.11 dependencies require tomli, but needed for type-checking since we import it directly + tomli + # No Python 3.12 dependencies require importlib_metadata, but needed for type-checking since we import it directly + importlib_metadata testing-integration = pytest diff --git a/setuptools/command/build_ext.py b/setuptools/command/build_ext.py index 1301433a32..b5c98c86dc 100644 --- a/setuptools/command/build_ext.py +++ b/setuptools/command/build_ext.py @@ -16,7 +16,7 @@ try: # Attempt to use Cython for building extensions, if available - from Cython.Distutils.build_ext import build_ext as _build_ext + from Cython.Distutils.build_ext import build_ext as _build_ext # type: ignore[import-not-found] # Cython not installed on CI tests # Additionally, assert that the compiler module will load # also. Ref #1229. diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py index 24980edcf4..9d319398c9 100644 --- a/setuptools/command/editable_wheel.py +++ b/setuptools/command/editable_wheel.py @@ -59,7 +59,7 @@ from .install_scripts import install_scripts as install_scripts_cls if TYPE_CHECKING: - from wheel.wheelfile import WheelFile # noqa + from wheel.wheelfile import WheelFile # type:ignore[import-untyped] # noqa _P = TypeVar("_P", bound=StrPath) _logger = logging.getLogger(__name__) diff --git a/setuptools/config/_validate_pyproject/fastjsonschema_validations.py b/setuptools/config/_validate_pyproject/fastjsonschema_validations.py index b81d13c119..8b852bbfd4 100644 --- a/setuptools/config/_validate_pyproject/fastjsonschema_validations.py +++ b/setuptools/config/_validate_pyproject/fastjsonschema_validations.py @@ -1,5 +1,4 @@ # noqa -# type: ignore # flake8: noqa # pylint: skip-file # mypy: ignore-errors From ba0917602dacc86fd725cecd2182abc4837a30e8 Mon Sep 17 00:00:00 2001 From: Avasam Date: Wed, 6 Mar 2024 12:31:31 -0500 Subject: [PATCH 0417/1761] `pkg_resources`: Remove unused and shadowed variables --- newsfragments/4260.misc.rst | 1 + pkg_resources/__init__.py | 15 --------------- 2 files changed, 1 insertion(+), 15 deletions(-) create mode 100644 newsfragments/4260.misc.rst diff --git a/newsfragments/4260.misc.rst b/newsfragments/4260.misc.rst new file mode 100644 index 0000000000..9dfde3498d --- /dev/null +++ b/newsfragments/4260.misc.rst @@ -0,0 +1 @@ +Remove unused ``resources_stream`` ``resource_dir`` and shadowed functions from `pkg_resources` -- by :user:`Avasam` diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py index d0b9b5469a..163a5521d6 100644 --- a/pkg_resources/__init__.py +++ b/pkg_resources/__init__.py @@ -85,9 +85,7 @@ require = None working_set = None add_activation_listener = None -resources_stream = None cleanup_resources = None -resource_dir = None resource_stream = None set_extraction_path = None resource_isdir = None @@ -491,19 +489,6 @@ def compatible_platforms(provided, required): return False -def run_script(dist_spec, script_name): - """Locate distribution `dist_spec` and run its `script_name` script""" - ns = sys._getframe(1).f_globals - name = ns['__name__'] - ns.clear() - ns['__name__'] = name - require(dist_spec)[0].run_script(script_name, ns) - - -# backward compatibility -run_main = run_script - - def get_distribution(dist): """Return a current distribution object for a Requirement or string""" if isinstance(dist, str): From fb90dc349e7434f0603a17176b48588ca74b870e Mon Sep 17 00:00:00 2001 From: Avasam Date: Wed, 6 Mar 2024 14:32:58 -0500 Subject: [PATCH 0418/1761] Runtime fixes to make typing annotations work --- pkg_resources/__init__.py | 32 +++++++++++++++----------------- 1 file changed, 15 insertions(+), 17 deletions(-) diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py index 163a5521d6..002ea4d2f9 100644 --- a/pkg_resources/__init__.py +++ b/pkg_resources/__init__.py @@ -27,7 +27,7 @@ import time import re import types -from typing import List, Protocol +from typing import Dict, List, Protocol import zipfile import zipimport import warnings @@ -920,10 +920,10 @@ def find_plugins(self, plugin_env, full_env=None, installer=None, fallback=True) # success, no need to try any more versions of this project break - distributions = list(distributions) - distributions.sort() + sorted_distributions = list(distributions) + sorted_distributions.sort() - return distributions, error_info + return sorted_distributions, error_info def require(self, *requirements): """Ensure that distributions matching `requirements` are activated @@ -1636,7 +1636,7 @@ def _validate_resource_path(path): ) def _get(self, path) -> bytes: - if hasattr(self.loader, 'get_data'): + if self.loader and hasattr(self.loader, 'get_data'): return self.loader.get_data(path) raise NotImplementedError( "Can't perform this operation for loaders without 'get_data()'" @@ -2492,7 +2492,7 @@ def resolve(self): raise ImportError(str(exc)) from exc def require(self, env=None, installer=None): - if self.extras and not self.dist: + if not self.dist: raise UnknownExtra("Can't require() without a distribution", self) # Get the requirements for this entry point with all its extras and @@ -2559,11 +2559,11 @@ def parse_group(cls, group, lines, dist=None): def parse_map(cls, data, dist=None): """Parse a map of entry point groups""" if isinstance(data, dict): - data = data.items() + _data = data.items() else: - data = split_sections(data) - maps = {} - for group, lines in data: + _data = split_sections(data) + maps: Dict[str, Dict[str, "EntryPoint"]] = {} + for group, lines in _data: if group is None: if not lines: continue @@ -2825,7 +2825,7 @@ def activate(self, path=None, replace=False): if path is None: path = sys.path self.insert_on(path, replace=replace) - if path is sys.path: + if path is sys.path and self.location: fixup_namespace_packages(self.location) for pkg in self._get_metadata('namespace_packages.txt'): if pkg in sys.modules: @@ -2893,15 +2893,13 @@ def load_entry_point(self, group, name): def get_entry_map(self, group=None): """Return the entry point map for `group`, or the full entry map""" - try: - ep_map = self._ep_map - except AttributeError: - ep_map = self._ep_map = EntryPoint.parse_map( + if not hasattr(self, "_ep_map"): + self._ep_map = EntryPoint.parse_map( self._get_metadata('entry_points.txt'), self ) if group is not None: - return ep_map.get(group, {}) - return ep_map + return self._ep_map.get(group, {}) + return self._ep_map def get_entry_info(self, group, name): """Return the EntryPoint object for `group`+`name`, or ``None``""" From 50221262fe31326c8fa6458e15316a69f3ce40ac Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Wed, 6 Mar 2024 15:27:28 +0000 Subject: [PATCH 0419/1761] Add missing ``encoding`` to setuptools tests --- setuptools/tests/config/test_pyprojecttoml.py | 2 +- setuptools/tests/test_build_meta.py | 8 ++-- setuptools/tests/test_build_py.py | 10 ++--- setuptools/tests/test_config_discovery.py | 2 +- setuptools/tests/test_easy_install.py | 16 ++++---- setuptools/tests/test_egg_info.py | 38 +++++++++---------- setuptools/tests/test_find_packages.py | 3 +- setuptools/tests/test_install_scripts.py | 8 ++-- setuptools/tests/test_manifest.py | 8 ++-- setuptools/tests/test_sandbox.py | 4 +- setuptools/tests/test_sdist.py | 22 +++++------ 11 files changed, 59 insertions(+), 62 deletions(-) diff --git a/setuptools/tests/config/test_pyprojecttoml.py b/setuptools/tests/config/test_pyprojecttoml.py index 6a40f3bfd7..dee2ecc7ab 100644 --- a/setuptools/tests/config/test_pyprojecttoml.py +++ b/setuptools/tests/config/test_pyprojecttoml.py @@ -174,7 +174,7 @@ class TestEntryPoints: def write_entry_points(self, tmp_path): entry_points = ConfigParser() entry_points.read_dict(ENTRY_POINTS) - with open(tmp_path / "entry-points.txt", "w") as f: + with open(tmp_path / "entry-points.txt", "w", encoding="utf-8") as f: entry_points.write(f) def pyproject(self, dynamic=None): diff --git a/setuptools/tests/test_build_meta.py b/setuptools/tests/test_build_meta.py index 9332781764..32676aee79 100644 --- a/setuptools/tests/test_build_meta.py +++ b/setuptools/tests/test_build_meta.py @@ -296,7 +296,7 @@ def test_build_with_existing_file_present(self, build_type, tmpdir_cwd): first_result = build_method(dist_dir) # Change version. - with open("VERSION", "wt") as version_file: + with open("VERSION", "wt", encoding="utf-8") as version_file: version_file.write("0.0.2") # Build a *second* sdist/wheel. @@ -306,7 +306,7 @@ def test_build_with_existing_file_present(self, build_type, tmpdir_cwd): assert first_result != second_result # And if rebuilding the exact same sdist/wheel? - open(os.path.join(dist_dir, second_result), 'w').close() + open(os.path.join(dist_dir, second_result), 'w', encoding="utf-8").close() third_result = build_method(dist_dir) assert third_result == second_result assert os.path.getsize(os.path.join(dist_dir, third_result)) > 0 @@ -568,9 +568,9 @@ def test_build_sdist_version_change(self, build_backend): if not os.path.exists(setup_loc): setup_loc = os.path.abspath("setup.cfg") - with open(setup_loc, 'rt') as file_handler: + with open(setup_loc, 'rt', encoding="utf-8") as file_handler: content = file_handler.read() - with open(setup_loc, 'wt') as file_handler: + with open(setup_loc, 'wt', encoding="utf-8") as file_handler: file_handler.write(content.replace("version='0.0.0'", "version='0.0.1'")) shutil.rmtree(sdist_into_directory) diff --git a/setuptools/tests/test_build_py.py b/setuptools/tests/test_build_py.py index 500a9ab6f3..d79cbbdf0c 100644 --- a/setuptools/tests/test_build_py.py +++ b/setuptools/tests/test_build_py.py @@ -49,7 +49,7 @@ def test_recursive_in_package_data_glob(tmpdir_cwd): ) ) os.makedirs('path/subpath/subsubpath') - open('path/subpath/subsubpath/data', 'w').close() + open('path/subpath/subsubpath/data', 'w', encoding="utf-8").close() dist.parse_command_line() dist.run_commands() @@ -77,8 +77,8 @@ def test_read_only(tmpdir_cwd): ) ) os.makedirs('pkg') - open('pkg/__init__.py', 'w').close() - open('pkg/data.dat', 'w').close() + open('pkg/__init__.py', 'w', encoding="utf-8").close() + open('pkg/data.dat', 'w', encoding="utf-8").close() os.chmod('pkg/__init__.py', stat.S_IREAD) os.chmod('pkg/data.dat', stat.S_IREAD) dist.parse_command_line() @@ -108,8 +108,8 @@ def test_executable_data(tmpdir_cwd): ) ) os.makedirs('pkg') - open('pkg/__init__.py', 'w').close() - open('pkg/run-me', 'w').close() + open('pkg/__init__.py', 'w', encoding="utf-8").close() + open('pkg/run-me', 'w', encoding="utf-8").close() os.chmod('pkg/run-me', 0o700) dist.parse_command_line() diff --git a/setuptools/tests/test_config_discovery.py b/setuptools/tests/test_config_discovery.py index ef2979d4f5..409c219ed5 100644 --- a/setuptools/tests/test_config_discovery.py +++ b/setuptools/tests/test_config_discovery.py @@ -591,7 +591,7 @@ def _write_setupcfg(root, options): setupcfg["options"][key] = "\n" + str_value else: setupcfg["options"][key] = str(value) - with open(root / "setup.cfg", "w") as f: + with open(root / "setup.cfg", "w", encoding="utf-8") as f: setupcfg.write(f) print("~~~~~ setup.cfg ~~~~~") print((root / "setup.cfg").read_text()) diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py index a001ae27c5..26778e65ec 100644 --- a/setuptools/tests/test_easy_install.py +++ b/setuptools/tests/test_easy_install.py @@ -467,7 +467,7 @@ def distutils_package(): 'from distutils.core import setup', ) with contexts.tempdir(cd=os.chdir): - with open('setup.py', 'w') as f: + with open('setup.py', 'w', encoding="utf-8") as f: f.write(distutils_setup_py) yield @@ -784,7 +784,7 @@ def test_setup_requires_honors_pip_env(self, mock_index, monkeypatch): setup_attrs=dict(dependency_links=[]), ) test_setup_cfg = os.path.join(test_pkg, 'setup.cfg') - with open(test_setup_cfg, 'w') as fp: + with open(test_setup_cfg, 'w', encoding="utf-8") as fp: fp.write( DALS( """ @@ -918,7 +918,7 @@ def test_setup_requires_with_find_links_in_setup_cfg( test_setup_py = os.path.join(test_pkg, 'setup.py') test_setup_cfg = os.path.join(test_pkg, 'setup.cfg') os.mkdir(test_pkg) - with open(test_setup_py, 'w') as fp: + with open(test_setup_py, 'w', encoding="utf-8") as fp: if with_dependency_links_in_setup_py: dependency_links = [os.path.join(temp_dir, 'links')] else: @@ -932,7 +932,7 @@ def test_setup_requires_with_find_links_in_setup_cfg( """ ).format(dependency_links=dependency_links) ) - with open(test_setup_cfg, 'w') as fp: + with open(test_setup_cfg, 'w', encoding="utf-8") as fp: fp.write( DALS( """ @@ -984,7 +984,7 @@ def test_setup_requires_with_transitive_extra_dependency(self, monkeypatch): test_pkg = os.path.join(temp_dir, 'test_pkg') test_setup_py = os.path.join(test_pkg, 'setup.py') os.mkdir(test_pkg) - with open(test_setup_py, 'w') as fp: + with open(test_setup_py, 'w', encoding="utf-8") as fp: fp.write( DALS( """ @@ -1068,7 +1068,7 @@ class epcmd(build_py): test_pkg = os.path.join(temp_dir, 'test_pkg') test_setup_py = os.path.join(test_pkg, 'setup.py') os.mkdir(test_pkg) - with open(test_setup_py, 'w') as fp: + with open(test_setup_py, 'w', encoding="utf-8") as fp: fp.write( DALS( """ @@ -1244,7 +1244,7 @@ def create_setup_requires_package( ) else: test_setup_cfg_contents = '' - with open(os.path.join(test_pkg, 'setup.cfg'), 'w') as f: + with open(os.path.join(test_pkg, 'setup.cfg'), 'w', encoding="utf-8") as f: f.write(test_setup_cfg_contents) # setup.py @@ -1255,7 +1255,7 @@ def create_setup_requires_package( setuptools.setup(**%r) """ ) - with open(os.path.join(test_pkg, 'setup.py'), 'w') as f: + with open(os.path.join(test_pkg, 'setup.py'), 'w', encoding="utf-8") as f: f.write(setup_py_template % test_setup_attrs) foobar_path = os.path.join(path, '%s-%s.tar.gz' % (distname, version)) diff --git a/setuptools/tests/test_egg_info.py b/setuptools/tests/test_egg_info.py index ba019dc79d..215cb096fc 100644 --- a/setuptools/tests/test_egg_info.py +++ b/setuptools/tests/test_egg_info.py @@ -94,7 +94,7 @@ def test_egg_info_save_version_info_setup_empty(self, tmpdir_cwd, env): ei.initialize_options() ei.save_version_info(setup_cfg) - with open(setup_cfg, 'r') as f: + with open(setup_cfg, 'r', encoding="utf-8") as f: content = f.read() assert '[egg_info]' in content @@ -139,7 +139,7 @@ def test_egg_info_save_version_info_setup_defaults(self, tmpdir_cwd, env): ei.initialize_options() ei.save_version_info(setup_cfg) - with open(setup_cfg, 'r') as f: + with open(setup_cfg, 'r', encoding="utf-8") as f: content = f.read() assert '[egg_info]' in content @@ -251,7 +251,7 @@ def test_manifest_template_is_read(self, tmpdir_cwd, env): self._run_egg_info_command(tmpdir_cwd, env) egg_info_dir = os.path.join('.', 'foo.egg-info') sources_txt = os.path.join(egg_info_dir, 'SOURCES.txt') - with open(sources_txt) as f: + with open(sources_txt, encoding="utf-8") as f: assert 'docs/usage.rst' in f.read().split('\n') def _setup_script_with_requires(self, requires, use_setup_cfg=False): @@ -492,7 +492,7 @@ def test_requires( egg_info_dir = os.path.join('.', 'foo.egg-info') requires_txt = os.path.join(egg_info_dir, 'requires.txt') if os.path.exists(requires_txt): - with open(requires_txt) as fp: + with open(requires_txt, encoding="utf-8") as fp: install_requires = fp.read() else: install_requires = '' @@ -538,7 +538,7 @@ def test_provides_extra(self, tmpdir_cwd, env): env=environ, ) egg_info_dir = os.path.join('.', 'foo.egg-info') - with open(os.path.join(egg_info_dir, 'PKG-INFO')) as pkginfo_file: + with open(os.path.join(egg_info_dir, 'PKG-INFO'), encoding="utf-8") as pkginfo_file: pkg_info_lines = pkginfo_file.read().split('\n') assert 'Provides-Extra: foobar' in pkg_info_lines assert 'Metadata-Version: 2.1' in pkg_info_lines @@ -557,7 +557,7 @@ def test_doesnt_provides_extra(self, tmpdir_cwd, env): env=environ, ) egg_info_dir = os.path.join('.', 'foo.egg-info') - with open(os.path.join(egg_info_dir, 'PKG-INFO')) as pkginfo_file: + with open(os.path.join(egg_info_dir, 'PKG-INFO'), encoding="utf-8") as pkginfo_file: pkg_info_text = pkginfo_file.read() assert 'Provides-Extra:' not in pkg_info_text @@ -636,7 +636,7 @@ def test_setup_cfg_license_file(self, tmpdir_cwd, env, files, license_in_sources ) egg_info_dir = os.path.join('.', 'foo.egg-info') - with open(os.path.join(egg_info_dir, 'SOURCES.txt')) as sources_file: + with open(os.path.join(egg_info_dir, 'SOURCES.txt'), encoding="utf-8") as sources_file: sources_text = sources_file.read() if license_in_sources: @@ -849,7 +849,7 @@ def test_setup_cfg_license_files( ) egg_info_dir = os.path.join('.', 'foo.egg-info') - with open(os.path.join(egg_info_dir, 'SOURCES.txt')) as sources_file: + with open(os.path.join(egg_info_dir, 'SOURCES.txt'), encoding="utf-8") as sources_file: sources_lines = list(line.strip() for line in sources_file) for lf in incl_licenses: @@ -1033,7 +1033,7 @@ def test_setup_cfg_license_file_license_files( ) egg_info_dir = os.path.join('.', 'foo.egg-info') - with open(os.path.join(egg_info_dir, 'SOURCES.txt')) as sources_file: + with open(os.path.join(egg_info_dir, 'SOURCES.txt'), encoding="utf-8") as sources_file: sources_lines = list(line.strip() for line in sources_file) for lf in incl_licenses: @@ -1065,7 +1065,7 @@ def test_license_file_attr_pkg_info(self, tmpdir_cwd, env): pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)]), ) egg_info_dir = os.path.join('.', 'foo.egg-info') - with open(os.path.join(egg_info_dir, 'PKG-INFO')) as pkginfo_file: + with open(os.path.join(egg_info_dir, 'PKG-INFO'), encoding="utf-8") as pkginfo_file: pkg_info_lines = pkginfo_file.read().split('\n') license_file_lines = [ line for line in pkg_info_lines if line.startswith('License-File:') @@ -1086,7 +1086,7 @@ def test_metadata_version(self, tmpdir_cwd, env): data_stream=1, ) egg_info_dir = os.path.join('.', 'foo.egg-info') - with open(os.path.join(egg_info_dir, 'PKG-INFO')) as pkginfo_file: + with open(os.path.join(egg_info_dir, 'PKG-INFO'), encoding="utf-8") as pkginfo_file: pkg_info_lines = pkginfo_file.read().split('\n') # Update metadata version if changed assert self._extract_mv_version(pkg_info_lines) == (2, 1) @@ -1112,7 +1112,7 @@ def test_long_description_content_type(self, tmpdir_cwd, env): env=environ, ) egg_info_dir = os.path.join('.', 'foo.egg-info') - with open(os.path.join(egg_info_dir, 'PKG-INFO')) as pkginfo_file: + with open(os.path.join(egg_info_dir, 'PKG-INFO'), encoding="utf-8") as pkginfo_file: pkg_info_lines = pkginfo_file.read().split('\n') expected_line = 'Description-Content-Type: text/markdown' assert expected_line in pkg_info_lines @@ -1133,7 +1133,7 @@ def test_long_description(self, tmpdir_cwd, env): data_stream=1, ) egg_info_dir = os.path.join('.', 'foo.egg-info') - with open(os.path.join(egg_info_dir, 'PKG-INFO')) as pkginfo_file: + with open(os.path.join(egg_info_dir, 'PKG-INFO'), encoding="utf-8") as pkginfo_file: pkg_info_lines = pkginfo_file.read().split('\n') assert 'Metadata-Version: 2.1' in pkg_info_lines assert '' == pkg_info_lines[-1] # last line should be empty @@ -1165,7 +1165,7 @@ def test_project_urls(self, tmpdir_cwd, env): env=environ, ) egg_info_dir = os.path.join('.', 'foo.egg-info') - with open(os.path.join(egg_info_dir, 'PKG-INFO')) as pkginfo_file: + with open(os.path.join(egg_info_dir, 'PKG-INFO'), encoding="utf-8") as pkginfo_file: pkg_info_lines = pkginfo_file.read().split('\n') expected_line = 'Project-URL: Link One, https://example.com/one/' assert expected_line in pkg_info_lines @@ -1182,7 +1182,7 @@ def test_license(self, tmpdir_cwd, env): data_stream=1, ) egg_info_dir = os.path.join('.', 'foo.egg-info') - with open(os.path.join(egg_info_dir, 'PKG-INFO')) as pkginfo_file: + with open(os.path.join(egg_info_dir, 'PKG-INFO'), encoding="utf-8") as pkginfo_file: pkg_info_lines = pkginfo_file.read().split('\n') assert 'License: MIT' in pkg_info_lines @@ -1197,7 +1197,7 @@ def test_license_escape(self, tmpdir_cwd, env): data_stream=1, ) egg_info_dir = os.path.join('.', 'foo.egg-info') - with open(os.path.join(egg_info_dir, 'PKG-INFO')) as pkginfo_file: + with open(os.path.join(egg_info_dir, 'PKG-INFO'), encoding="utf-8") as pkginfo_file: pkg_info_lines = pkginfo_file.read().split('\n') assert 'License: This is a long license text ' in pkg_info_lines @@ -1216,7 +1216,7 @@ def test_python_requires_egg_info(self, tmpdir_cwd, env): env=environ, ) egg_info_dir = os.path.join('.', 'foo.egg-info') - with open(os.path.join(egg_info_dir, 'PKG-INFO')) as pkginfo_file: + with open(os.path.join(egg_info_dir, 'PKG-INFO'), encoding="utf-8") as pkginfo_file: pkg_info_lines = pkginfo_file.read().split('\n') assert 'Requires-Python: >=2.7.12' in pkg_info_lines assert self._extract_mv_version(pkg_info_lines) >= (1, 2) @@ -1240,7 +1240,7 @@ def test_egg_info_includes_setup_py(self, tmpdir_cwd): assert 'setup.py' in egg_info_instance.filelist.files - with open(egg_info_instance.egg_info + "/SOURCES.txt") as f: + with open(egg_info_instance.egg_info + "/SOURCES.txt", encoding="utf-8") as f: sources = f.read().split('\n') assert 'setup.py' in sources @@ -1277,7 +1277,7 @@ def test_egg_info_tag_only_once(self, tmpdir_cwd, env): }) self._run_egg_info_command(tmpdir_cwd, env) egg_info_dir = os.path.join('.', 'foo.egg-info') - with open(os.path.join(egg_info_dir, 'PKG-INFO')) as pkginfo_file: + with open(os.path.join(egg_info_dir, 'PKG-INFO'), encoding="utf-8") as pkginfo_file: pkg_info_lines = pkginfo_file.read().split('\n') assert 'Version: 0.0.0.dev0' in pkg_info_lines diff --git a/setuptools/tests/test_find_packages.py b/setuptools/tests/test_find_packages.py index cb1900df3d..4fefd3dccf 100644 --- a/setuptools/tests/test_find_packages.py +++ b/setuptools/tests/test_find_packages.py @@ -72,8 +72,7 @@ def _mkdir(self, path, parent_dir=None): def _touch(self, path, dir_=None): if dir_: path = os.path.join(dir_, path) - fp = open(path, 'w') - fp.close() + open(path, 'wb').close() return path def test_regular_package(self): diff --git a/setuptools/tests/test_install_scripts.py b/setuptools/tests/test_install_scripts.py index a783459730..595b7ade67 100644 --- a/setuptools/tests/test_install_scripts.py +++ b/setuptools/tests/test_install_scripts.py @@ -41,7 +41,7 @@ def test_sys_executable_escaping_unix(self, tmpdir, monkeypatch): monkeypatch.setattr('sys.executable', self.unix_exe) with tmpdir.as_cwd(): self._run_install_scripts(str(tmpdir)) - with open(str(tmpdir.join('foo')), 'r') as f: + with open(str(tmpdir.join('foo')), 'r', encoding="utf-8") as f: actual = f.readline() assert actual == expected @@ -55,7 +55,7 @@ def test_sys_executable_escaping_win32(self, tmpdir, monkeypatch): monkeypatch.setattr('sys.executable', self.win32_exe) with tmpdir.as_cwd(): self._run_install_scripts(str(tmpdir)) - with open(str(tmpdir.join('foo-script.py')), 'r') as f: + with open(str(tmpdir.join('foo-script.py')), 'r', encoding="utf-8") as f: actual = f.readline() assert actual == expected @@ -69,7 +69,7 @@ def test_executable_with_spaces_escaping_unix(self, tmpdir): expected = '#!%s\n' % self.unix_spaces_exe with tmpdir.as_cwd(): self._run_install_scripts(str(tmpdir), self.unix_spaces_exe) - with open(str(tmpdir.join('foo')), 'r') as f: + with open(str(tmpdir.join('foo')), 'r', encoding="utf-8") as f: actual = f.readline() assert actual == expected @@ -83,6 +83,6 @@ def test_executable_arg_escaping_win32(self, tmpdir): expected = '#!"%s"\n' % self.win32_exe with tmpdir.as_cwd(): self._run_install_scripts(str(tmpdir), '"' + self.win32_exe + '"') - with open(str(tmpdir.join('foo-script.py')), 'r') as f: + with open(str(tmpdir.join('foo-script.py')), 'r', encoding="utf-8") as f: actual = f.readline() assert actual == expected diff --git a/setuptools/tests/test_manifest.py b/setuptools/tests/test_manifest.py index 16fa2c2460..6fb824f534 100644 --- a/setuptools/tests/test_manifest.py +++ b/setuptools/tests/test_manifest.py @@ -54,7 +54,7 @@ def quiet(): def touch(filename): - open(filename, 'w').close() + open(filename, 'wb').close() # The set of files always in the manifest, including all files in the @@ -174,7 +174,7 @@ class TestManifestTest(TempDirTestCase): def setup_method(self, method): super().setup_method(method) - f = open(os.path.join(self.temp_dir, 'setup.py'), 'w') + f = open(os.path.join(self.temp_dir, 'setup.py'), 'w', encoding="utf-8") f.write(SETUP_PY) f.close() """ @@ -212,7 +212,7 @@ def setup_method(self, method): def make_manifest(self, contents): """Write a MANIFEST.in.""" - with open(os.path.join(self.temp_dir, 'MANIFEST.in'), 'w') as f: + with open(os.path.join(self.temp_dir, 'MANIFEST.in'), 'w', encoding="utf-8") as f: f.write(DALS(contents)) def get_files(self): @@ -369,7 +369,7 @@ def make_files(self, files): file = os.path.join(self.temp_dir, file) dirname, basename = os.path.split(file) os.makedirs(dirname, exist_ok=True) - open(file, 'w').close() + touch(file) def test_process_template_line(self): # testing all MANIFEST.in template patterns diff --git a/setuptools/tests/test_sandbox.py b/setuptools/tests/test_sandbox.py index 9b4937e213..898661acf0 100644 --- a/setuptools/tests/test_sandbox.py +++ b/setuptools/tests/test_sandbox.py @@ -17,7 +17,7 @@ def test_devnull(self, tmpdir): @staticmethod def _file_writer(path): def do_write(): - with open(path, 'w') as f: + with open(path, 'w', encoding="utf-8") as f: f.write('xxx') return do_write @@ -114,7 +114,7 @@ def test_sandbox_violation_raised_hiding_setuptools(self, tmpdir): def write_file(): "Trigger a SandboxViolation by writing outside the sandbox" - with open('/etc/foo', 'w'): + with open('/etc/foo', 'w', encoding="utf-8"): pass with pytest.raises(setuptools.sandbox.SandboxViolation) as caught: diff --git a/setuptools/tests/test_sdist.py b/setuptools/tests/test_sdist.py index 5d597709ed..6eccb35826 100644 --- a/setuptools/tests/test_sdist.py +++ b/setuptools/tests/test_sdist.py @@ -116,10 +116,8 @@ def latin1_fail(): def touch(path): - if isinstance(path, str): - path = Path(path) - path.write_text('', encoding='utf-8') - return path + open(path, 'wb').close() + return Path(path) def symlink_or_skip_test(src, dst): @@ -386,7 +384,7 @@ def test_setup_py_missing(self): assert 'setup.py' not in manifest def test_setup_py_excluded(self): - with open("MANIFEST.in", "w") as manifest_file: + with open("MANIFEST.in", "w", encoding="utf-8") as manifest_file: manifest_file.write("exclude setup.py") dist = Distribution(SETUP_ATTRS) @@ -441,7 +439,7 @@ def test_manifest_is_written_with_utf8_encoding(self): filename = os.path.join('sdist_test', 'smörbröd.py') # Must create the file or it will get stripped. - open(filename, 'w').close() + touch(filename) # Add UTF-8 filename and write manifest with quiet(): @@ -469,7 +467,7 @@ def test_write_manifest_allows_utf8_filenames(self): filename = os.path.join(b'sdist_test', Filenames.utf_8) # Must touch the file or risk removal - open(filename, "w").close() + touch(filename) # Add filename and write manifest with quiet(): @@ -546,7 +544,7 @@ def test_manifest_is_read_with_utf8_encoding(self): manifest.close() # The file must exist to be included in the filelist - open(filename, 'w').close() + touch(filename) # Re-read manifest cmd.filelist.files = [] @@ -577,7 +575,7 @@ def test_read_manifest_skips_non_utf8_filenames(self): manifest.close() # The file must exist to be included in the filelist - open(filename, 'w').close() + touch(filename) # Re-read manifest cmd.filelist.files = [] @@ -598,7 +596,7 @@ def test_sdist_with_utf8_encoded_filename(self): cmd.ensure_finalized() filename = os.path.join(b'sdist_test', Filenames.utf_8) - open(filename, 'w').close() + touch(filename) with quiet(): cmd.run() @@ -639,7 +637,7 @@ def test_sdist_with_latin1_encoded_filename(self): # Latin-1 filename filename = os.path.join(b'sdist_test', Filenames.latin_1) - open(filename, 'w').close() + touch(filename) assert os.path.isfile(filename) with quiet(): @@ -736,7 +734,7 @@ def test_pyproject_toml_excluded(self, source_dir): Check that pyproject.toml can excluded even if present """ touch(source_dir / 'pyproject.toml') - with open('MANIFEST.in', 'w') as mts: + with open('MANIFEST.in', 'w', encoding="utf-8") as mts: print('exclude pyproject.toml', file=mts) dist = Distribution(SETUP_ATTRS) dist.script_name = 'setup.py' From dea08d77d908e5e8ad54ecf531dd78fc7f667fb2 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Wed, 6 Mar 2024 15:39:54 +0000 Subject: [PATCH 0420/1761] Fix formatting errors --- setuptools/tests/test_egg_info.py | 55 +++++++++++++++---------------- setuptools/tests/test_manifest.py | 3 +- 2 files changed, 29 insertions(+), 29 deletions(-) diff --git a/setuptools/tests/test_egg_info.py b/setuptools/tests/test_egg_info.py index 215cb096fc..a4b0ecf398 100644 --- a/setuptools/tests/test_egg_info.py +++ b/setuptools/tests/test_egg_info.py @@ -538,8 +538,8 @@ def test_provides_extra(self, tmpdir_cwd, env): env=environ, ) egg_info_dir = os.path.join('.', 'foo.egg-info') - with open(os.path.join(egg_info_dir, 'PKG-INFO'), encoding="utf-8") as pkginfo_file: - pkg_info_lines = pkginfo_file.read().split('\n') + with open(os.path.join(egg_info_dir, 'PKG-INFO'), encoding="utf-8") as fp: + pkg_info_lines = fp.read().split('\n') assert 'Provides-Extra: foobar' in pkg_info_lines assert 'Metadata-Version: 2.1' in pkg_info_lines @@ -557,8 +557,8 @@ def test_doesnt_provides_extra(self, tmpdir_cwd, env): env=environ, ) egg_info_dir = os.path.join('.', 'foo.egg-info') - with open(os.path.join(egg_info_dir, 'PKG-INFO'), encoding="utf-8") as pkginfo_file: - pkg_info_text = pkginfo_file.read() + with open(os.path.join(egg_info_dir, 'PKG-INFO'), encoding="utf-8") as fp: + pkg_info_text = fp.read() assert 'Provides-Extra:' not in pkg_info_text @pytest.mark.parametrize( @@ -636,8 +636,7 @@ def test_setup_cfg_license_file(self, tmpdir_cwd, env, files, license_in_sources ) egg_info_dir = os.path.join('.', 'foo.egg-info') - with open(os.path.join(egg_info_dir, 'SOURCES.txt'), encoding="utf-8") as sources_file: - sources_text = sources_file.read() + sources_text = Path(egg_info_dir, "SOURCES.txt").read_text(encoding="utf-8") if license_in_sources: assert 'LICENSE' in sources_text @@ -849,8 +848,8 @@ def test_setup_cfg_license_files( ) egg_info_dir = os.path.join('.', 'foo.egg-info') - with open(os.path.join(egg_info_dir, 'SOURCES.txt'), encoding="utf-8") as sources_file: - sources_lines = list(line.strip() for line in sources_file) + sources_text = Path(egg_info_dir, "SOURCES.txt").read_text(encoding="utf-8") + sources_lines = [line.strip() for line in sources_text.splitlines()] for lf in incl_licenses: assert sources_lines.count(lf) == 1 @@ -1033,8 +1032,8 @@ def test_setup_cfg_license_file_license_files( ) egg_info_dir = os.path.join('.', 'foo.egg-info') - with open(os.path.join(egg_info_dir, 'SOURCES.txt'), encoding="utf-8") as sources_file: - sources_lines = list(line.strip() for line in sources_file) + sources_text = Path(egg_info_dir, "SOURCES.txt").read_text(encoding="utf-8") + sources_lines = [line.strip() for line in sources_text.splitlines()] for lf in incl_licenses: assert sources_lines.count(lf) == 1 @@ -1065,8 +1064,8 @@ def test_license_file_attr_pkg_info(self, tmpdir_cwd, env): pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)]), ) egg_info_dir = os.path.join('.', 'foo.egg-info') - with open(os.path.join(egg_info_dir, 'PKG-INFO'), encoding="utf-8") as pkginfo_file: - pkg_info_lines = pkginfo_file.read().split('\n') + with open(os.path.join(egg_info_dir, 'PKG-INFO'), encoding="utf-8") as fp: + pkg_info_lines = fp.read().split('\n') license_file_lines = [ line for line in pkg_info_lines if line.startswith('License-File:') ] @@ -1086,8 +1085,8 @@ def test_metadata_version(self, tmpdir_cwd, env): data_stream=1, ) egg_info_dir = os.path.join('.', 'foo.egg-info') - with open(os.path.join(egg_info_dir, 'PKG-INFO'), encoding="utf-8") as pkginfo_file: - pkg_info_lines = pkginfo_file.read().split('\n') + with open(os.path.join(egg_info_dir, 'PKG-INFO'), encoding="utf-8") as fp: + pkg_info_lines = fp.read().split('\n') # Update metadata version if changed assert self._extract_mv_version(pkg_info_lines) == (2, 1) @@ -1112,8 +1111,8 @@ def test_long_description_content_type(self, tmpdir_cwd, env): env=environ, ) egg_info_dir = os.path.join('.', 'foo.egg-info') - with open(os.path.join(egg_info_dir, 'PKG-INFO'), encoding="utf-8") as pkginfo_file: - pkg_info_lines = pkginfo_file.read().split('\n') + with open(os.path.join(egg_info_dir, 'PKG-INFO'), encoding="utf-8") as fp: + pkg_info_lines = fp.read().split('\n') expected_line = 'Description-Content-Type: text/markdown' assert expected_line in pkg_info_lines assert 'Metadata-Version: 2.1' in pkg_info_lines @@ -1133,8 +1132,8 @@ def test_long_description(self, tmpdir_cwd, env): data_stream=1, ) egg_info_dir = os.path.join('.', 'foo.egg-info') - with open(os.path.join(egg_info_dir, 'PKG-INFO'), encoding="utf-8") as pkginfo_file: - pkg_info_lines = pkginfo_file.read().split('\n') + with open(os.path.join(egg_info_dir, 'PKG-INFO'), encoding="utf-8") as fp: + pkg_info_lines = fp.read().split('\n') assert 'Metadata-Version: 2.1' in pkg_info_lines assert '' == pkg_info_lines[-1] # last line should be empty long_desc_lines = pkg_info_lines[pkg_info_lines.index('') :] @@ -1165,8 +1164,8 @@ def test_project_urls(self, tmpdir_cwd, env): env=environ, ) egg_info_dir = os.path.join('.', 'foo.egg-info') - with open(os.path.join(egg_info_dir, 'PKG-INFO'), encoding="utf-8") as pkginfo_file: - pkg_info_lines = pkginfo_file.read().split('\n') + with open(os.path.join(egg_info_dir, 'PKG-INFO'), encoding="utf-8") as fp: + pkg_info_lines = fp.read().split('\n') expected_line = 'Project-URL: Link One, https://example.com/one/' assert expected_line in pkg_info_lines expected_line = 'Project-URL: Link Two, https://example.com/two/' @@ -1182,8 +1181,8 @@ def test_license(self, tmpdir_cwd, env): data_stream=1, ) egg_info_dir = os.path.join('.', 'foo.egg-info') - with open(os.path.join(egg_info_dir, 'PKG-INFO'), encoding="utf-8") as pkginfo_file: - pkg_info_lines = pkginfo_file.read().split('\n') + with open(os.path.join(egg_info_dir, 'PKG-INFO'), encoding="utf-8") as fp: + pkg_info_lines = fp.read().split('\n') assert 'License: MIT' in pkg_info_lines def test_license_escape(self, tmpdir_cwd, env): @@ -1197,8 +1196,8 @@ def test_license_escape(self, tmpdir_cwd, env): data_stream=1, ) egg_info_dir = os.path.join('.', 'foo.egg-info') - with open(os.path.join(egg_info_dir, 'PKG-INFO'), encoding="utf-8") as pkginfo_file: - pkg_info_lines = pkginfo_file.read().split('\n') + with open(os.path.join(egg_info_dir, 'PKG-INFO'), encoding="utf-8") as fp: + pkg_info_lines = fp.read().split('\n') assert 'License: This is a long license text ' in pkg_info_lines assert ' over multiple lines' in pkg_info_lines @@ -1216,8 +1215,8 @@ def test_python_requires_egg_info(self, tmpdir_cwd, env): env=environ, ) egg_info_dir = os.path.join('.', 'foo.egg-info') - with open(os.path.join(egg_info_dir, 'PKG-INFO'), encoding="utf-8") as pkginfo_file: - pkg_info_lines = pkginfo_file.read().split('\n') + with open(os.path.join(egg_info_dir, 'PKG-INFO'), encoding="utf-8") as fp: + pkg_info_lines = fp.read().split('\n') assert 'Requires-Python: >=2.7.12' in pkg_info_lines assert self._extract_mv_version(pkg_info_lines) >= (1, 2) @@ -1277,8 +1276,8 @@ def test_egg_info_tag_only_once(self, tmpdir_cwd, env): }) self._run_egg_info_command(tmpdir_cwd, env) egg_info_dir = os.path.join('.', 'foo.egg-info') - with open(os.path.join(egg_info_dir, 'PKG-INFO'), encoding="utf-8") as pkginfo_file: - pkg_info_lines = pkginfo_file.read().split('\n') + with open(os.path.join(egg_info_dir, 'PKG-INFO'), encoding="utf-8") as fp: + pkg_info_lines = fp.read().split('\n') assert 'Version: 0.0.0.dev0' in pkg_info_lines diff --git a/setuptools/tests/test_manifest.py b/setuptools/tests/test_manifest.py index 6fb824f534..6911b0224c 100644 --- a/setuptools/tests/test_manifest.py +++ b/setuptools/tests/test_manifest.py @@ -212,7 +212,8 @@ def setup_method(self, method): def make_manifest(self, contents): """Write a MANIFEST.in.""" - with open(os.path.join(self.temp_dir, 'MANIFEST.in'), 'w', encoding="utf-8") as f: + manifest = os.path.join(self.temp_dir, 'MANIFEST.in') + with open(manifest, 'w', encoding="utf-8") as f: f.write(DALS(contents)) def get_files(self): From 96e1074df24400a11008ebe12cb575ebdec5c552 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Wed, 6 Mar 2024 16:12:19 +0000 Subject: [PATCH 0421/1761] Adjust test_sandbox to encoding change --- setuptools/tests/test_sandbox.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/setuptools/tests/test_sandbox.py b/setuptools/tests/test_sandbox.py index 898661acf0..f666615d99 100644 --- a/setuptools/tests/test_sandbox.py +++ b/setuptools/tests/test_sandbox.py @@ -126,8 +126,9 @@ def write_file(): cmd, args, kwargs = caught.value.args assert cmd == 'open' assert args == ('/etc/foo', 'w') - assert kwargs == {} + assert kwargs == {"encoding": "utf-8"} msg = str(caught.value) assert 'open' in msg assert "('/etc/foo', 'w')" in msg + assert "{'encoding': 'utf-8'}" in msg From f51351e10400970454901328f87b8d41aac1bd80 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Wed, 6 Mar 2024 17:26:31 +0000 Subject: [PATCH 0422/1761] Fix `pathlib.Path` error --- setuptools/tests/test_sdist.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/setuptools/tests/test_sdist.py b/setuptools/tests/test_sdist.py index 6eccb35826..387ec3bebf 100644 --- a/setuptools/tests/test_sdist.py +++ b/setuptools/tests/test_sdist.py @@ -10,7 +10,6 @@ import logging import distutils from inspect import cleandoc -from pathlib import Path from unittest import mock import pytest @@ -117,7 +116,7 @@ def latin1_fail(): def touch(path): open(path, 'wb').close() - return Path(path) + return path def symlink_or_skip_test(src, dst): From e1cd88c18900656030ac65bcfa5cb7214e4c03a5 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Wed, 6 Mar 2024 18:21:24 +0000 Subject: [PATCH 0423/1761] Rework test_pyprojecttoml.create_example --- setuptools/tests/config/test_pyprojecttoml.py | 44 +++++++++++-------- 1 file changed, 26 insertions(+), 18 deletions(-) diff --git a/setuptools/tests/config/test_pyprojecttoml.py b/setuptools/tests/config/test_pyprojecttoml.py index dee2ecc7ab..a661b7fc2f 100644 --- a/setuptools/tests/config/test_pyprojecttoml.py +++ b/setuptools/tests/config/test_pyprojecttoml.py @@ -5,6 +5,7 @@ import pytest import tomli_w from path import Path +from jaraco.path import build as path_build from setuptools.config.pyprojecttoml import ( read_configuration, @@ -82,25 +83,32 @@ def create_example(path, pkg_root): - pyproject = path / "pyproject.toml" + files = { + "pyproject.toml": EXAMPLE, + "README.md": "hello world", + "_files": { + "file.txt": "", + }, + } + packages = { + "pkg": { + "__init__.py": "", + "mod.py": "class CustomSdist: pass", + "__version__.py": "VERSION = (3, 10)", + "__main__.py": "def exec(): print('hello')", + }, + } + + assert pkg_root # Meta-test: cannot be empty string. - files = [ - f"{pkg_root}/pkg/__init__.py", - "_files/file.txt", - ] - if pkg_root != ".": # flat-layout will raise error for multi-package dist - # Ensure namespaces are discovered - files.append(f"{pkg_root}/other/nested/__init__.py") - - for file in files: - (path / file).parent.mkdir(exist_ok=True, parents=True) - (path / file).touch() - - pyproject.write_text(EXAMPLE) - (path / "README.md").write_text("hello world") - (path / f"{pkg_root}/pkg/mod.py").write_text("class CustomSdist: pass") - (path / f"{pkg_root}/pkg/__version__.py").write_text("VERSION = (3, 10)") - (path / f"{pkg_root}/pkg/__main__.py").write_text("def exec(): print('hello')") + if pkg_root == ".": + files = {**files, **packages} + # skip other files: flat-layout will raise error for multi-package dist + else: + # Use this opportunity to ensure namespaces are discovered + files[pkg_root] = {**packages, "other": {"nested": {"__init__.py": ""}}} + + path_build(files, prefix=path) def verify_example(config, path, pkg_root): From 390f06a1629891aa1b4a495ba33f5300cf810f71 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Wed, 6 Mar 2024 18:30:32 +0000 Subject: [PATCH 0424/1761] Add more explict encodings in test_pyprojecttoml --- setuptools/tests/config/test_pyprojecttoml.py | 26 ++++++++++--------- 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/setuptools/tests/config/test_pyprojecttoml.py b/setuptools/tests/config/test_pyprojecttoml.py index a661b7fc2f..b7a38f0a88 100644 --- a/setuptools/tests/config/test_pyprojecttoml.py +++ b/setuptools/tests/config/test_pyprojecttoml.py @@ -216,11 +216,13 @@ def test_dynamic(self, tmp_path): # Let's create a project example that has dynamic classifiers # coming from a txt file. create_example(tmp_path, "src") - classifiers = """\ - Framework :: Flask - Programming Language :: Haskell - """ - (tmp_path / "classifiers.txt").write_text(cleandoc(classifiers)) + classifiers = cleandoc( + """ + Framework :: Flask + Programming Language :: Haskell + """ + ) + (tmp_path / "classifiers.txt").write_text(classifiers, encoding="utf-8") pyproject = tmp_path / "pyproject.toml" config = read_configuration(pyproject, expand=False) @@ -248,7 +250,7 @@ def test_dynamic_without_config(self, tmp_path): """ pyproject = tmp_path / "pyproject.toml" - pyproject.write_text(cleandoc(config)) + pyproject.write_text(cleandoc(config), encoding="utf-8") with pytest.raises(OptionError, match="No configuration .* .classifiers."): read_configuration(pyproject) @@ -260,7 +262,7 @@ def test_dynamic_readme_from_setup_script_args(self, tmp_path): dynamic = ["readme"] """ pyproject = tmp_path / "pyproject.toml" - pyproject.write_text(cleandoc(config)) + pyproject.write_text(cleandoc(config), encoding="utf-8") dist = Distribution(attrs={"long_description": "42"}) # No error should occur because of missing `readme` dist = apply_configuration(dist, pyproject) @@ -278,7 +280,7 @@ def test_dynamic_without_file(self, tmp_path): """ pyproject = tmp_path / "pyproject.toml" - pyproject.write_text(cleandoc(config)) + pyproject.write_text(cleandoc(config), encoding="utf-8") with pytest.warns(UserWarning, match="File .*classifiers.txt. cannot be found"): expanded = read_configuration(pyproject) assert "classifiers" not in expanded["project"] @@ -299,7 +301,7 @@ def test_dynamic_without_file(self, tmp_path): ) def test_ignore_unrelated_config(tmp_path, example): pyproject = tmp_path / "pyproject.toml" - pyproject.write_text(cleandoc(example)) + pyproject.write_text(cleandoc(example), encoding="utf-8") # Make sure no error is raised due to 3rd party configs in pyproject.toml assert read_configuration(pyproject) is not None @@ -321,7 +323,7 @@ def test_ignore_unrelated_config(tmp_path, example): ) def test_invalid_example(tmp_path, example, error_msg): pyproject = tmp_path / "pyproject.toml" - pyproject.write_text(cleandoc(example)) + pyproject.write_text(cleandoc(example), encoding="utf-8") pattern = re.compile(f"invalid pyproject.toml.*{error_msg}.*", re.M | re.S) with pytest.raises(ValueError, match=pattern): @@ -331,7 +333,7 @@ def test_invalid_example(tmp_path, example, error_msg): @pytest.mark.parametrize("config", ("", "[tool.something]\nvalue = 42")) def test_empty(tmp_path, config): pyproject = tmp_path / "pyproject.toml" - pyproject.write_text(config) + pyproject.write_text(config, encoding="utf-8") # Make sure no error is raised assert read_configuration(pyproject) == {} @@ -343,7 +345,7 @@ def test_include_package_data_by_default(tmp_path, config): default. """ pyproject = tmp_path / "pyproject.toml" - pyproject.write_text(config) + pyproject.write_text(config, encoding="utf-8") config = read_configuration(pyproject) assert config["tool"]["setuptools"]["include-package-data"] is True From d1019e1affec66ac105d7cecc14a4795e3365377 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Wed, 6 Mar 2024 18:33:22 +0000 Subject: [PATCH 0425/1761] Refactor test_pyproject.toml to use more jaraco.path.build --- setuptools/tests/config/test_pyprojecttoml.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/setuptools/tests/config/test_pyprojecttoml.py b/setuptools/tests/config/test_pyprojecttoml.py index b7a38f0a88..7279ae2e2a 100644 --- a/setuptools/tests/config/test_pyprojecttoml.py +++ b/setuptools/tests/config/test_pyprojecttoml.py @@ -357,10 +357,11 @@ def test_include_package_data_in_setuppy(tmp_path): See https://github.com/pypa/setuptools/issues/3197#issuecomment-1079023889 """ - pyproject = tmp_path / "pyproject.toml" - pyproject.write_text("[project]\nname = 'myproj'\nversion='42'\n") - setuppy = tmp_path / "setup.py" - setuppy.write_text("__import__('setuptools').setup(include_package_data=False)") + files = { + "pyproject.toml": "[project]\nname = 'myproj'\nversion='42'\n", + "setup.py": "__import__('setuptools').setup(include_package_data=False)", + } + path_build(files, prefix=tmp_path) with Path(tmp_path): dist = distutils.core.run_setup("setup.py", {}, stop_after="config") From be016801add784dbd7a4a63a6412080f312e3b29 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Wed, 6 Mar 2024 18:35:11 +0000 Subject: [PATCH 0426/1761] Add utf-8 encoding in test_setupcfg, test_expand --- setuptools/tests/config/test_expand.py | 2 +- setuptools/tests/config/test_setupcfg.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/setuptools/tests/config/test_expand.py b/setuptools/tests/config/test_expand.py index cdcbffc14c..fe80890678 100644 --- a/setuptools/tests/config/test_expand.py +++ b/setuptools/tests/config/test_expand.py @@ -12,7 +12,7 @@ def write_files(files, root_dir): for file, content in files.items(): path = root_dir / file path.parent.mkdir(exist_ok=True, parents=True) - path.write_text(content) + path.write_text(content, encoding="utf-8") def test_glob_relative(tmp_path, monkeypatch): diff --git a/setuptools/tests/config/test_setupcfg.py b/setuptools/tests/config/test_setupcfg.py index 7f93858bd4..706e2d0ebe 100644 --- a/setuptools/tests/config/test_setupcfg.py +++ b/setuptools/tests/config/test_setupcfg.py @@ -904,7 +904,8 @@ def test_cmdclass(self, tmpdir): module_path = Path(tmpdir, "src/custom_build.py") # auto discovery for src module_path.parent.mkdir(parents=True, exist_ok=True) module_path.write_text( - "from distutils.core import Command\n" "class CustomCmd(Command): pass\n" + "from distutils.core import Command\n" "class CustomCmd(Command): pass\n", + encoding="utf-8", ) setup_cfg = """ From 5b57ac96e7be909d8c02b1b6233d395ff3ec199d Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Wed, 6 Mar 2024 18:44:25 +0000 Subject: [PATCH 0427/1761] Use jaraco.path.build in test_pyprojecttoml_dynamic_deps --- .../config/test_pyprojecttoml_dynamic_deps.py | 111 +++++++++--------- 1 file changed, 58 insertions(+), 53 deletions(-) diff --git a/setuptools/tests/config/test_pyprojecttoml_dynamic_deps.py b/setuptools/tests/config/test_pyprojecttoml_dynamic_deps.py index b6516227c0..37e5234a45 100644 --- a/setuptools/tests/config/test_pyprojecttoml_dynamic_deps.py +++ b/setuptools/tests/config/test_pyprojecttoml_dynamic_deps.py @@ -1,57 +1,59 @@ +from inspect import cleandoc + import pytest +from jaraco import path from setuptools.config.pyprojecttoml import apply_configuration from setuptools.dist import Distribution -from setuptools.tests.textwrap import DALS def test_dynamic_dependencies(tmp_path): - (tmp_path / "requirements.txt").write_text("six\n # comment\n") - pyproject = tmp_path / "pyproject.toml" - pyproject.write_text( - DALS( + files = { + "requirements.txt": "six\n # comment\n", + "pyproject.toml": cleandoc( """ - [project] - name = "myproj" - version = "1.0" - dynamic = ["dependencies"] + [project] + name = "myproj" + version = "1.0" + dynamic = ["dependencies"] - [build-system] - requires = ["setuptools", "wheel"] - build-backend = "setuptools.build_meta" + [build-system] + requires = ["setuptools", "wheel"] + build-backend = "setuptools.build_meta" - [tool.setuptools.dynamic.dependencies] - file = ["requirements.txt"] - """ - ) - ) + [tool.setuptools.dynamic.dependencies] + file = ["requirements.txt"] + """ + ), + } + path.build(files, prefix=tmp_path) dist = Distribution() - dist = apply_configuration(dist, pyproject) + dist = apply_configuration(dist, tmp_path / "pyproject.toml") assert dist.install_requires == ["six"] def test_dynamic_optional_dependencies(tmp_path): - (tmp_path / "requirements-docs.txt").write_text("sphinx\n # comment\n") - pyproject = tmp_path / "pyproject.toml" - pyproject.write_text( - DALS( + files = { + "requirements-docs.txt": "sphinx\n # comment\n", + "pyproject.toml": cleandoc( """ - [project] - name = "myproj" - version = "1.0" - dynamic = ["optional-dependencies"] + [project] + name = "myproj" + version = "1.0" + dynamic = ["optional-dependencies"] - [tool.setuptools.dynamic.optional-dependencies.docs] - file = ["requirements-docs.txt"] + [tool.setuptools.dynamic.optional-dependencies.docs] + file = ["requirements-docs.txt"] - [build-system] - requires = ["setuptools", "wheel"] - build-backend = "setuptools.build_meta" - """ - ) - ) + [build-system] + requires = ["setuptools", "wheel"] + build-backend = "setuptools.build_meta" + """ + ), + } + path.build(files, prefix=tmp_path) dist = Distribution() - dist = apply_configuration(dist, pyproject) + dist = apply_configuration(dist, tmp_path / "pyproject.toml") assert dist.extras_require == {"docs": ["sphinx"]} @@ -61,29 +63,32 @@ def test_mixed_dynamic_optional_dependencies(tmp_path): configurations in the case of fields containing sub-fields (groups), things would work out. """ - (tmp_path / "requirements-images.txt").write_text("pillow~=42.0\n # comment\n") - pyproject = tmp_path / "pyproject.toml" - pyproject.write_text( - DALS( + files = { + "requirements-images.txt": "pillow~=42.0\n # comment\n", + "pyproject.toml": cleandoc( """ - [project] - name = "myproj" - version = "1.0" - dynamic = ["optional-dependencies"] + [project] + name = "myproj" + version = "1.0" + dynamic = ["optional-dependencies"] - [project.optional-dependencies] - docs = ["sphinx"] + [project.optional-dependencies] + docs = ["sphinx"] - [tool.setuptools.dynamic.optional-dependencies.images] - file = ["requirements-images.txt"] + [tool.setuptools.dynamic.optional-dependencies.images] + file = ["requirements-images.txt"] + + [build-system] + requires = ["setuptools", "wheel"] + build-backend = "setuptools.build_meta" + """ + ), + } + + path.build(files, prefix=tmp_path) - [build-system] - requires = ["setuptools", "wheel"] - build-backend = "setuptools.build_meta" - """ - ) - ) # Test that the mix-and-match doesn't currently validate. + pyproject = tmp_path / "pyproject.toml" with pytest.raises(ValueError, match="project.optional-dependencies"): apply_configuration(Distribution(), pyproject) From bff5e77db9c4c0ac84f59a1d067039021170aa3c Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Wed, 6 Mar 2024 18:47:48 +0000 Subject: [PATCH 0428/1761] Add utf-8 encoding in test_apply_pyproject --- setuptools/tests/config/test_apply_pyprojecttoml.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py index 555489b140..2ca35759bc 100644 --- a/setuptools/tests/config/test_apply_pyprojecttoml.py +++ b/setuptools/tests/config/test_apply_pyprojecttoml.py @@ -44,8 +44,9 @@ def test_apply_pyproject_equivalent_to_setupcfg(url, monkeypatch, tmp_path): monkeypatch.setattr(expand, "read_attr", Mock(return_value="0.0.1")) setupcfg_example = retrieve_file(url) pyproject_example = Path(tmp_path, "pyproject.toml") - toml_config = Translator().translate(setupcfg_example.read_text(), "setup.cfg") - pyproject_example.write_text(toml_config) + setupcfg_text = setupcfg_example.read_text(encoding="utf-8") + toml_config = Translator().translate(setupcfg_text, "setup.cfg") + pyproject_example.write_text(toml_config, encoding="utf-8") dist_toml = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject_example) dist_cfg = setupcfg.apply_configuration(makedist(tmp_path), setupcfg_example) @@ -177,9 +178,9 @@ def _pep621_example_project( text = text.replace(orig, subst) pyproject.write_text(text, encoding="utf-8") - (tmp_path / readme).write_text("hello world") - (tmp_path / "LICENSE.txt").write_text("--- LICENSE stub ---") - (tmp_path / "spam.py").write_text(PEP621_EXAMPLE_SCRIPT) + (tmp_path / readme).write_text("hello world", encoding="utf-8") + (tmp_path / "LICENSE.txt").write_text("--- LICENSE stub ---", encoding="utf-8") + (tmp_path / "spam.py").write_text(PEP621_EXAMPLE_SCRIPT, encoding="utf-8") return pyproject From e6585999ce2de34e1dec82116bd17a592120f2c7 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Wed, 6 Mar 2024 18:51:53 +0000 Subject: [PATCH 0429/1761] Add utf-8 encoding in test_packageindex --- setuptools/tests/test_packageindex.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/setuptools/tests/test_packageindex.py b/setuptools/tests/test_packageindex.py index 41b96614f8..93474ae5af 100644 --- a/setuptools/tests/test_packageindex.py +++ b/setuptools/tests/test_packageindex.py @@ -2,12 +2,12 @@ import urllib.request import urllib.error import http.client +from inspect import cleandoc from unittest import mock import pytest import setuptools.package_index -from .textwrap import DALS class TestPackageIndex: @@ -257,14 +257,15 @@ class TestPyPIConfig: def test_percent_in_password(self, tmp_home_dir): pypirc = tmp_home_dir / '.pypirc' pypirc.write_text( - DALS( + cleandoc( """ - [pypi] - repository=https://pypi.org - username=jaraco - password=pity% - """ - ) + [pypi] + repository=https://pypi.org + username=jaraco + password=pity% + """ + ), + encoding="utf-8", ) cfg = setuptools.package_index.PyPIConfig() cred = cfg.creds_by_repository['https://pypi.org'] From 9e0a88814d6b95e41f92f75ae3da0a3da4ce5561 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Wed, 6 Mar 2024 18:54:46 +0000 Subject: [PATCH 0430/1761] Add utf-8 encoding in test_editable_install test_logging --- setuptools/tests/test_editable_install.py | 4 ++-- setuptools/tests/test_logging.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/setuptools/tests/test_editable_install.py b/setuptools/tests/test_editable_install.py index df85699586..1df09fd256 100644 --- a/setuptools/tests/test_editable_install.py +++ b/setuptools/tests/test_editable_install.py @@ -144,8 +144,8 @@ def test_editable_with_pyproject(tmp_path, venv, files, editable_opts): cmd = [venv.exe(), "-m", "mypkg"] assert subprocess.check_output(cmd).strip() == b"3.14159.post0 Hello World" - (project / "src/mypkg/data.txt").write_text("foobar") - (project / "src/mypkg/mod.py").write_text("x = 42") + (project / "src/mypkg/data.txt").write_text("foobar", encoding="utf-8") + (project / "src/mypkg/mod.py").write_text("x = 42", encoding="utf-8") assert subprocess.check_output(cmd).strip() == b"3.14159.post0 foobar 42" diff --git a/setuptools/tests/test_logging.py b/setuptools/tests/test_logging.py index 7a9a33f1ea..cf89b3bd00 100644 --- a/setuptools/tests/test_logging.py +++ b/setuptools/tests/test_logging.py @@ -33,7 +33,7 @@ def test_verbosity_level(tmp_path, monkeypatch, flag, expected_level): assert logging.getLevelName(unset_log_level) == "NOTSET" setup_script = tmp_path / "setup.py" - setup_script.write_text(setup_py) + setup_script.write_text(setup_py, encoding="utf-8") dist = distutils.core.run_setup(setup_script, stop_after="init") dist.script_args = [flag, "sdist"] dist.parse_command_line() # <- where the log level is set From 27ec7fa50029b32e7bf8459602e850baf554fa22 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Wed, 6 Mar 2024 19:06:15 +0000 Subject: [PATCH 0431/1761] Improve utf-8 in test_config_discovery --- setuptools/tests/test_config_discovery.py | 52 ++++++++++++++--------- 1 file changed, 31 insertions(+), 21 deletions(-) diff --git a/setuptools/tests/test_config_discovery.py b/setuptools/tests/test_config_discovery.py index 409c219ed5..72772caebf 100644 --- a/setuptools/tests/test_config_discovery.py +++ b/setuptools/tests/test_config_discovery.py @@ -176,11 +176,11 @@ def test_purposefully_empty(self, tmp_path, config_file, param, circumstance): else: # Make sure build works with or without setup.cfg pyproject = self.PURPOSEFULLY_EMPY["template-pyproject.toml"] - (tmp_path / "pyproject.toml").write_text(pyproject) + (tmp_path / "pyproject.toml").write_text(pyproject, encoding="utf-8") template_param = param config = self.PURPOSEFULLY_EMPY[config_file].format(param=template_param) - (tmp_path / config_file).write_text(config) + (tmp_path / config_file).write_text(config, encoding="utf-8") dist = _get_dist(tmp_path, {}) # When either parameter package or py_modules is an empty list, @@ -292,11 +292,13 @@ class TestWithAttrDirective: def test_setupcfg_metadata(self, tmp_path, folder, opts): files = [f"{folder}/pkg/__init__.py", "setup.cfg"] _populate_project_dir(tmp_path, files, opts) - (tmp_path / folder / "pkg/__init__.py").write_text("version = 42") - (tmp_path / "setup.cfg").write_text( - "[metadata]\nversion = attr: pkg.version\n" - + (tmp_path / "setup.cfg").read_text() - ) + + config = (tmp_path / "setup.cfg").read_text(encoding="utf-8") + overwrite = { + folder: {"pkg": {"__init__.py": "version = 42"}}, + "setup.cfg": "[metadata]\nversion = attr: pkg.version\n" + config, + } + jaraco.path.build(overwrite, prefix=tmp_path) dist = _get_dist(tmp_path, {}) assert dist.get_name() == "pkg" @@ -312,11 +314,16 @@ def test_setupcfg_metadata(self, tmp_path, folder, opts): def test_pyproject_metadata(self, tmp_path): _populate_project_dir(tmp_path, ["src/pkg/__init__.py"], {}) - (tmp_path / "src/pkg/__init__.py").write_text("version = 42") - (tmp_path / "pyproject.toml").write_text( - "[project]\nname = 'pkg'\ndynamic = ['version']\n" - "[tool.setuptools.dynamic]\nversion = {attr = 'pkg.version'}\n" - ) + + overwrite = { + "src": {"pkg": {"__init__.py": "version = 42"}}, + "pyproject.toml": ( + "[project]\nname = 'pkg'\ndynamic = ['version']\n" + "[tool.setuptools.dynamic]\nversion = {attr = 'pkg.version'}\n" + ), + } + jaraco.path.build(overwrite, prefix=tmp_path) + dist = _get_dist(tmp_path, {}) assert dist.get_version() == "42" assert dist.package_dir == {"": "src"} @@ -354,7 +361,7 @@ def _simulate_package_with_extension(self, tmp_path): ] setup(ext_modules=ext_modules) """ - (tmp_path / "setup.py").write_text(DALS(setup_script)) + (tmp_path / "setup.py").write_text(DALS(setup_script), encoding="utf-8") def test_skip_discovery_with_setupcfg_metadata(self, tmp_path): """Ensure that auto-discovery is not triggered when the project is based on @@ -367,14 +374,14 @@ def test_skip_discovery_with_setupcfg_metadata(self, tmp_path): requires = [] build-backend = 'setuptools.build_meta' """ - (tmp_path / "pyproject.toml").write_text(DALS(pyproject)) + (tmp_path / "pyproject.toml").write_text(DALS(pyproject), encoding="utf-8") setupcfg = """ [metadata] name = proj version = 42 """ - (tmp_path / "setup.cfg").write_text(DALS(setupcfg)) + (tmp_path / "setup.cfg").write_text(DALS(setupcfg), encoding="utf-8") dist = _get_dist(tmp_path, {}) assert dist.get_name() == "proj" @@ -399,7 +406,7 @@ def test_dont_skip_discovery_with_pyproject_metadata(self, tmp_path): name = 'proj' version = '42' """ - (tmp_path / "pyproject.toml").write_text(DALS(pyproject)) + (tmp_path / "pyproject.toml").write_text(DALS(pyproject), encoding="utf-8") with pytest.raises(PackageDiscoveryError, match="multiple (packages|modules)"): _get_dist(tmp_path, {}) @@ -416,7 +423,7 @@ def _simulate_package_with_data_files(self, tmp_path, src_root): manifest = """ global-include *.py *.txt """ - (tmp_path / "MANIFEST.in").write_text(DALS(manifest)) + (tmp_path / "MANIFEST.in").write_text(DALS(manifest), encoding="utf-8") EXAMPLE_SETUPCFG = """ [metadata] @@ -564,9 +571,12 @@ def _populate_project_dir(root, files, options): # NOTE: Currently pypa/build will refuse to build the project if no # `pyproject.toml` or `setup.py` is found. So it is impossible to do # completely "config-less" projects. - (root / "setup.py").write_text("import setuptools\nsetuptools.setup()") - (root / "README.md").write_text("# Example Package") - (root / "LICENSE").write_text("Copyright (c) 2018") + basic = { + "setup.py": "import setuptools\nsetuptools.setup()", + "README.md": "# Example Package", + "LICENSE": "Copyright (c) 2018", + } + jaraco.path.build(basic, prefix=root) _write_setupcfg(root, options) paths = (root / f for f in files) for path in paths: @@ -594,7 +604,7 @@ def _write_setupcfg(root, options): with open(root / "setup.cfg", "w", encoding="utf-8") as f: setupcfg.write(f) print("~~~~~ setup.cfg ~~~~~") - print((root / "setup.cfg").read_text()) + print((root / "setup.cfg").read_text(encoding="utf-8")) def _run_build(path, *flags): From 337e17532a6c08caf10cd721b576d7c6458d9246 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Wed, 6 Mar 2024 19:50:58 +0000 Subject: [PATCH 0432/1761] Avoid aliasing jaraco.path.build in tests_pyprojecttoml --- setuptools/tests/config/test_pyprojecttoml.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/setuptools/tests/config/test_pyprojecttoml.py b/setuptools/tests/config/test_pyprojecttoml.py index 7279ae2e2a..abec68ab30 100644 --- a/setuptools/tests/config/test_pyprojecttoml.py +++ b/setuptools/tests/config/test_pyprojecttoml.py @@ -2,10 +2,10 @@ from configparser import ConfigParser from inspect import cleandoc +import jaraco.path import pytest import tomli_w from path import Path -from jaraco.path import build as path_build from setuptools.config.pyprojecttoml import ( read_configuration, @@ -108,7 +108,7 @@ def create_example(path, pkg_root): # Use this opportunity to ensure namespaces are discovered files[pkg_root] = {**packages, "other": {"nested": {"__init__.py": ""}}} - path_build(files, prefix=path) + jaraco.path.build(files, prefix=path) def verify_example(config, path, pkg_root): @@ -361,7 +361,7 @@ def test_include_package_data_in_setuppy(tmp_path): "pyproject.toml": "[project]\nname = 'myproj'\nversion='42'\n", "setup.py": "__import__('setuptools').setup(include_package_data=False)", } - path_build(files, prefix=tmp_path) + jaraco.path.build(files, prefix=tmp_path) with Path(tmp_path): dist = distutils.core.run_setup("setup.py", {}, stop_after="config") From 52e1b7047e2577c6e0620c381c5beadaddde800e Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Thu, 7 Mar 2024 00:02:49 +0000 Subject: [PATCH 0433/1761] Add utf-8 encoding to test_pkg_resources --- pkg_resources/tests/test_pkg_resources.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkg_resources/tests/test_pkg_resources.py b/pkg_resources/tests/test_pkg_resources.py index bfbf619c85..4724c82860 100644 --- a/pkg_resources/tests/test_pkg_resources.py +++ b/pkg_resources/tests/test_pkg_resources.py @@ -111,13 +111,13 @@ def test_resource_filename_rewrites_on_change(self): filename = zp.get_resource_filename(manager, 'data.dat') actual = datetime.datetime.fromtimestamp(os.stat(filename).st_mtime) assert actual == self.ref_time - f = open(filename, 'w') + f = open(filename, 'w', encoding="utf-8") f.write('hello, world?') f.close() ts = self.ref_time.timestamp() os.utime(filename, (ts, ts)) filename = zp.get_resource_filename(manager, 'data.dat') - with open(filename) as f: + with open(filename, encoding="utf-8") as f: assert f.read() == 'hello, world!' manager.cleanup_resources() From 7a0a29d628f7bc19287a9f253649299ade422832 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Thu, 7 Mar 2024 00:14:48 +0000 Subject: [PATCH 0434/1761] Add newsfragment --- newsfragments/4263.misc.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 newsfragments/4263.misc.rst diff --git a/newsfragments/4263.misc.rst b/newsfragments/4263.misc.rst new file mode 100644 index 0000000000..f84eb8dd42 --- /dev/null +++ b/newsfragments/4263.misc.rst @@ -0,0 +1 @@ +Avoid implicit ``encoding`` parameter in ``pkg_resources/tests``. From 31ff4dd517442bfb06d8a3973766ebab0008166d Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Thu, 7 Mar 2024 00:17:19 +0000 Subject: [PATCH 0435/1761] Add newsfragment --- newsfragments/4261.misc.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 newsfragments/4261.misc.rst diff --git a/newsfragments/4261.misc.rst b/newsfragments/4261.misc.rst new file mode 100644 index 0000000000..83c10f0f66 --- /dev/null +++ b/newsfragments/4261.misc.rst @@ -0,0 +1 @@ +Avoid implicit ``encoding`` parameter in ``setuptools/tests``. From c646680450fcb91e0e238fcef97dd788c15c2978 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Thu, 7 Mar 2024 00:23:56 +0000 Subject: [PATCH 0436/1761] Use binary mode when 'touch'-ing files in test_build_py and test_build_meta --- setuptools/tests/test_build_meta.py | 2 +- setuptools/tests/test_build_py.py | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/setuptools/tests/test_build_meta.py b/setuptools/tests/test_build_meta.py index 32676aee79..99c0c0543a 100644 --- a/setuptools/tests/test_build_meta.py +++ b/setuptools/tests/test_build_meta.py @@ -306,7 +306,7 @@ def test_build_with_existing_file_present(self, build_type, tmpdir_cwd): assert first_result != second_result # And if rebuilding the exact same sdist/wheel? - open(os.path.join(dist_dir, second_result), 'w', encoding="utf-8").close() + open(os.path.join(dist_dir, second_result), 'wb').close() third_result = build_method(dist_dir) assert third_result == second_result assert os.path.getsize(os.path.join(dist_dir, third_result)) > 0 diff --git a/setuptools/tests/test_build_py.py b/setuptools/tests/test_build_py.py index d79cbbdf0c..4aa1fe68fa 100644 --- a/setuptools/tests/test_build_py.py +++ b/setuptools/tests/test_build_py.py @@ -49,7 +49,7 @@ def test_recursive_in_package_data_glob(tmpdir_cwd): ) ) os.makedirs('path/subpath/subsubpath') - open('path/subpath/subsubpath/data', 'w', encoding="utf-8").close() + open('path/subpath/subsubpath/data', 'wb').close() dist.parse_command_line() dist.run_commands() @@ -77,8 +77,8 @@ def test_read_only(tmpdir_cwd): ) ) os.makedirs('pkg') - open('pkg/__init__.py', 'w', encoding="utf-8").close() - open('pkg/data.dat', 'w', encoding="utf-8").close() + open('pkg/__init__.py', 'wb').close() + open('pkg/data.dat', 'wb').close() os.chmod('pkg/__init__.py', stat.S_IREAD) os.chmod('pkg/data.dat', stat.S_IREAD) dist.parse_command_line() @@ -108,8 +108,8 @@ def test_executable_data(tmpdir_cwd): ) ) os.makedirs('pkg') - open('pkg/__init__.py', 'w', encoding="utf-8").close() - open('pkg/run-me', 'w', encoding="utf-8").close() + open('pkg/__init__.py', 'wb').close() + open('pkg/run-me', 'wb').close() os.chmod('pkg/run-me', 0o700) dist.parse_command_line() From 3115855126402d2678afc9867e6b3f2449c529ac Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Thu, 7 Mar 2024 00:28:34 +0000 Subject: [PATCH 0437/1761] Minor formatting improvement --- setuptools/tests/test_build_meta.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/setuptools/tests/test_build_meta.py b/setuptools/tests/test_build_meta.py index 99c0c0543a..c2a1e6dc75 100644 --- a/setuptools/tests/test_build_meta.py +++ b/setuptools/tests/test_build_meta.py @@ -272,14 +272,14 @@ def test_build_with_existing_file_present(self, build_type, tmpdir_cwd): [metadata] name = foo version = file: VERSION - """ + """ ), 'pyproject.toml': DALS( """ [build-system] requires = ["setuptools", "wheel"] build-backend = "setuptools.build_meta" - """ + """ ), } From 0eceb495e0b5ccd1b9204d1e773b734a1a4d1e05 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Thu, 7 Mar 2024 08:40:46 +0000 Subject: [PATCH 0438/1761] Add `encoding` to subprocess.run inside setuptools/tests --- setuptools/tests/integration/helpers.py | 1 + setuptools/tests/test_dist_info.py | 3 ++- setuptools/tests/test_distutils_adoption.py | 12 +++++++----- setuptools/tests/test_easy_install.py | 1 + 4 files changed, 11 insertions(+), 6 deletions(-) diff --git a/setuptools/tests/integration/helpers.py b/setuptools/tests/integration/helpers.py index 82cb36a2e4..615c43b2e0 100644 --- a/setuptools/tests/integration/helpers.py +++ b/setuptools/tests/integration/helpers.py @@ -17,6 +17,7 @@ def run(cmd, env=None): cmd, capture_output=True, text=True, + encoding="utf-8", env={**os.environ, **(env or {})}, # ^-- allow overwriting instead of discarding the current env ) diff --git a/setuptools/tests/test_dist_info.py b/setuptools/tests/test_dist_info.py index ad6cebad0b..c6fe97e2ba 100644 --- a/setuptools/tests/test_dist_info.py +++ b/setuptools/tests/test_dist_info.py @@ -198,7 +198,8 @@ def run_command_inner(*cmd, **kwargs): "stderr": subprocess.STDOUT, "stdout": subprocess.PIPE, "text": True, - 'check': True, + "encoding": "utf-8", + "check": True, **kwargs, } cmd = [sys.executable, "-c", "__import__('setuptools').setup()", *map(str, cmd)] diff --git a/setuptools/tests/test_distutils_adoption.py b/setuptools/tests/test_distutils_adoption.py index eb7feba637..74883d2199 100644 --- a/setuptools/tests/test_distutils_adoption.py +++ b/setuptools/tests/test_distutils_adoption.py @@ -8,6 +8,8 @@ IS_PYPY = '__pypy__' in sys.builtin_module_names +_TEXT_KWARGS = {"text": True, "encoding": "utf-8"} # For subprocess.run + def win_sr(env): """ @@ -24,7 +26,7 @@ def win_sr(env): def find_distutils(venv, imports='distutils', env=None, **kwargs): py_cmd = 'import {imports}; print(distutils.__file__)'.format(**locals()) cmd = ['python', '-c', py_cmd] - return venv.run(cmd, env=win_sr(env), text=True, **kwargs) + return venv.run(cmd, env=win_sr(env), **_TEXT_KWARGS, **kwargs) def count_meta_path(venv, env=None): @@ -36,7 +38,7 @@ def count_meta_path(venv, env=None): """ ) cmd = ['python', '-c', py_cmd] - return int(venv.run(cmd, env=win_sr(env), text=True)) + return int(venv.run(cmd, env=win_sr(env), **_TEXT_KWARGS)) skip_without_stdlib_distutils = pytest.mark.skipif( @@ -82,7 +84,7 @@ def test_pip_import(venv): Regression test for #3002. """ cmd = ['python', '-c', 'import pip'] - venv.run(cmd, text=True) + venv.run(cmd, **_TEXT_KWARGS) def test_distutils_has_origin(): @@ -130,7 +132,7 @@ def test_modules_are_not_duplicated_on_import( env = dict(SETUPTOOLS_USE_DISTUTILS=distutils_version) script = ENSURE_IMPORTS_ARE_NOT_DUPLICATED.format(imported_module=imported_module) cmd = ['python', '-c', script] - output = venv.run(cmd, env=win_sr(env), text=True).strip() + output = venv.run(cmd, env=win_sr(env), **_TEXT_KWARGS).strip() assert output == "success" @@ -154,5 +156,5 @@ def test_modules_are_not_duplicated_on_import( def test_log_module_is_not_duplicated_on_import(distutils_version, tmpdir_cwd, venv): env = dict(SETUPTOOLS_USE_DISTUTILS=distutils_version) cmd = ['python', '-c', ENSURE_LOG_IMPORT_IS_NOT_DUPLICATED] - output = venv.run(cmd, env=win_sr(env), text=True).strip() + output = venv.run(cmd, env=win_sr(env), **_TEXT_KWARGS).strip() assert output == "success" diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py index 26778e65ec..950cb23d21 100644 --- a/setuptools/tests/test_easy_install.py +++ b/setuptools/tests/test_easy_install.py @@ -530,6 +530,7 @@ def test_setup_install_includes_dependencies(self, tmp_path, mock_index): stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True, + encoding="utf-8", ) assert cp.returncode != 0 try: From 03166bcd5d86426ef055d147697dea1c9a9215e9 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Wed, 6 Mar 2024 15:05:39 +0000 Subject: [PATCH 0439/1761] Add compat.py39.LOCALE_ENCODING --- setuptools/compat/py39.py | 9 +++++++++ 1 file changed, 9 insertions(+) create mode 100644 setuptools/compat/py39.py diff --git a/setuptools/compat/py39.py b/setuptools/compat/py39.py new file mode 100644 index 0000000000..04a4abe5a9 --- /dev/null +++ b/setuptools/compat/py39.py @@ -0,0 +1,9 @@ +import sys + +# Explicitly use the ``"locale"`` encoding in versions that support it, +# otherwise just rely on the implicit handling of ``encoding=None``. +# Since all platforms that support ``EncodingWarning`` also support +# ``encoding="locale"``, this can be used to suppress the warning. +# However, please try to use UTF-8 when possible +# (.pth files are the notorious exception: python/cpython#77102, pypa/setuptools#3937). +LOCALE_ENCODING = "locale" if sys.version_info >= (3, 10) else None From ff99234609151de8abdfbe1d97e41071f93964ce Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Thu, 7 Mar 2024 09:11:21 +0000 Subject: [PATCH 0440/1761] Re-use compat.py39.LOCALE_ENCODING in editable_wheel --- setuptools/command/editable_wheel.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py index 9d319398c9..5f08ab53fc 100644 --- a/setuptools/command/editable_wheel.py +++ b/setuptools/command/editable_wheel.py @@ -14,7 +14,6 @@ import io import os import shutil -import sys import traceback from contextlib import suppress from enum import Enum @@ -44,6 +43,7 @@ namespaces, ) from .._path import StrPath +from ..compat import py39 from ..discovery import find_package_path from ..dist import Distribution from ..warnings import ( @@ -558,9 +558,8 @@ def _encode_pth(content: str) -> bytes: (There seems to be some variety in the way different version of Python handle ``encoding=None``, not all of them use ``locale.getpreferredencoding(False)``). """ - encoding = "locale" if sys.version_info >= (3, 10) else None with io.BytesIO() as buffer: - wrapper = io.TextIOWrapper(buffer, encoding) + wrapper = io.TextIOWrapper(buffer, encoding=py39.LOCALE_ENCODING) wrapper.write(content) wrapper.flush() buffer.seek(0) From 76ac799acfbb4bec9fec0815d282c444eb92f49f Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Thu, 7 Mar 2024 09:44:05 +0000 Subject: [PATCH 0441/1761] Explicitly use 'locale' encoding for .pth files in easy_install --- setuptools/command/easy_install.py | 29 +++++++++++++++++------------ 1 file changed, 17 insertions(+), 12 deletions(-) diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py index 402355bd81..c256770239 100644 --- a/setuptools/command/easy_install.py +++ b/setuptools/command/easy_install.py @@ -74,7 +74,7 @@ DEVELOP_DIST, ) import pkg_resources -from ..compat import py311 +from ..compat import py39, py311 from .._path import ensure_directory from ..extern.jaraco.text import yield_lines @@ -491,7 +491,7 @@ def check_site_dir(self): # noqa: C901 # is too complex (12) # FIXME try: if test_exists: os.unlink(testfile) - open(testfile, 'w').close() + open(testfile, 'wb').close() os.unlink(testfile) except OSError: self.cant_write_to_target() @@ -576,7 +576,7 @@ def check_pth_processing(self): _one_liner( """ import os - f = open({ok_file!r}, 'w') + f = open({ok_file!r}, 'w', encoding="utf-8") f.write('OK') f.close() """ @@ -588,7 +588,8 @@ def check_pth_processing(self): os.unlink(ok_file) dirname = os.path.dirname(ok_file) os.makedirs(dirname, exist_ok=True) - f = open(pth_file, 'w') + f = open(pth_file, 'w', encoding=py39.LOCALE_ENCODING) + # ^-- Requires encoding="locale" instead of "utf-8" (python/cpython#77102). except OSError: self.cant_write_to_target() else: @@ -872,7 +873,7 @@ def write_script(self, script_name, contents, mode="t", blockers=()): ensure_directory(target) if os.path.exists(target): os.unlink(target) - with open(target, "w" + mode) as f: + with open(target, "w" + mode) as f: # TODO: is it safe to use "utf-8"? f.write(contents) chmod(target, 0o777 - mask) @@ -1016,7 +1017,7 @@ def install_exe(self, dist_filename, tmpdir): # Write EGG-INFO/PKG-INFO if not os.path.exists(pkg_inf): - f = open(pkg_inf, 'w') + f = open(pkg_inf, 'w') # TODO: probably it is safe to use "utf-8" f.write('Metadata-Version: 1.0\n') for k, v in cfg.items('metadata'): if k != 'target_version': @@ -1277,7 +1278,9 @@ def update_pth(self, dist): # noqa: C901 # is too complex (11) # FIXME filename = os.path.join(self.install_dir, 'setuptools.pth') if os.path.islink(filename): os.unlink(filename) - with open(filename, 'wt') as f: + + with open(filename, 'wt', encoding=py39.LOCALE_ENCODING) as f: + # Requires encoding="locale" instead of "utf-8" (python/cpython#77102). f.write(self.pth_file.make_relative(dist.location) + '\n') def unpack_progress(self, src, dst): @@ -1503,9 +1506,9 @@ def expand_paths(inputs): # noqa: C901 # is too complex (11) # FIXME continue # Read the .pth file - f = open(os.path.join(dirname, name)) - lines = list(yield_lines(f)) - f.close() + with open(os.path.join(dirname, name), encoding=py39.LOCALE_ENCODING) as f: + # Requires encoding="locale" instead of "utf-8" (python/cpython#77102). + lines = list(yield_lines(f)) # Yield existing non-dupe, non-import directory lines from it for line in lines: @@ -1619,7 +1622,8 @@ def _load_raw(self): paths = [] dirty = saw_import = False seen = dict.fromkeys(self.sitedirs) - f = open(self.filename, 'rt') + f = open(self.filename, 'rt', encoding=py39.LOCALE_ENCODING) + # ^-- Requires encoding="locale" instead of "utf-8" (python/cpython#77102). for line in f: path = line.rstrip() # still keep imports and empty/commented lines for formatting @@ -1690,7 +1694,8 @@ def save(self): data = '\n'.join(lines) + '\n' if os.path.islink(self.filename): os.unlink(self.filename) - with open(self.filename, 'wt') as f: + with open(self.filename, 'wt', encoding=py39.LOCALE_ENCODING) as f: + # Requires encoding="locale" instead of "utf-8" (python/cpython#77102). f.write(data) elif os.path.exists(self.filename): log.debug("Deleting empty %s", self.filename) From fc93ece16304292e6931f8a5730610098dae40dc Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Thu, 7 Mar 2024 09:44:36 +0000 Subject: [PATCH 0442/1761] Add comments to remind about utf-8 in easy-install --- setuptools/command/easy_install.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py index c256770239..858fb20f83 100644 --- a/setuptools/command/easy_install.py +++ b/setuptools/command/easy_install.py @@ -873,7 +873,7 @@ def write_script(self, script_name, contents, mode="t", blockers=()): ensure_directory(target) if os.path.exists(target): os.unlink(target) - with open(target, "w" + mode) as f: # TODO: is it safe to use "utf-8"? + with open(target, "w" + mode) as f: # TODO: is it safe to use utf-8? f.write(contents) chmod(target, 0o777 - mask) @@ -1017,7 +1017,7 @@ def install_exe(self, dist_filename, tmpdir): # Write EGG-INFO/PKG-INFO if not os.path.exists(pkg_inf): - f = open(pkg_inf, 'w') # TODO: probably it is safe to use "utf-8" + f = open(pkg_inf, 'w') # TODO: probably it is safe to use utf-8 f.write('Metadata-Version: 1.0\n') for k, v in cfg.items('metadata'): if k != 'target_version': @@ -1088,7 +1088,7 @@ def process(src, dst): if locals()[name]: txt = os.path.join(egg_tmp, 'EGG-INFO', name + '.txt') if not os.path.exists(txt): - f = open(txt, 'w') + f = open(txt, 'w') # TODO: probably it is safe to use utf-8 f.write('\n'.join(locals()[name]) + '\n') f.close() From 98c877396b9ecd0e94b6c46a41ea9cef87dc2965 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Thu, 7 Mar 2024 09:47:56 +0000 Subject: [PATCH 0443/1761] Explicitly use 'locale' encoding for .pth files in setuptools.namespaces --- setuptools/namespaces.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/setuptools/namespaces.py b/setuptools/namespaces.py index e8f2941d45..0185d55f94 100644 --- a/setuptools/namespaces.py +++ b/setuptools/namespaces.py @@ -2,6 +2,8 @@ from distutils import log import itertools +from .compat import py39 + flatten = itertools.chain.from_iterable @@ -23,7 +25,8 @@ def install_namespaces(self): list(lines) return - with open(filename, 'wt') as f: + with open(filename, 'wt', encoding=py39.LOCALE_ENCODING) as f: + # Requires encoding="locale" instead of "utf-8" (python/cpython#77102). f.writelines(lines) def uninstall_namespaces(self): From 1dd135cba9c40e25b4cd2b650de4b7299aae5e1c Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Thu, 7 Mar 2024 10:08:57 +0000 Subject: [PATCH 0444/1761] Add news fragment --- docs/conf.py | 4 ++++ newsfragments/4265.feature.rst | 3 +++ 2 files changed, 7 insertions(+) create mode 100644 newsfragments/4265.feature.rst diff --git a/docs/conf.py b/docs/conf.py index be8856849b..534da15a37 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -55,6 +55,10 @@ pattern=r'(Python #|bpo-)(?P\d+)', url='https://bugs.python.org/issue{python}', ), + dict( + pattern=r'\bpython/cpython#(?P\d+)', + url='{GH}/python/cpython/issues/{cpython}', + ), dict( pattern=r'Interop #(?P\d+)', url='{GH}/pypa/interoperability-peps/issues/{interop}', diff --git a/newsfragments/4265.feature.rst b/newsfragments/4265.feature.rst new file mode 100644 index 0000000000..bcb0467205 --- /dev/null +++ b/newsfragments/4265.feature.rst @@ -0,0 +1,3 @@ +Explicitly use ``encoding="locale"`` for ``.pth`` files whenever possible, +to reduce ``EncodingWarnings``. +This avoid errors with UTF-8 (see discussion in python/cpython#77102). From b0a4c08f32d3087197331db257919be9355196ef Mon Sep 17 00:00:00 2001 From: Avasam Date: Thu, 7 Mar 2024 12:35:23 -0500 Subject: [PATCH 0445/1761] Avoid mypy note --- pkg_resources/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py index 002ea4d2f9..95bc9a6006 100644 --- a/pkg_resources/__init__.py +++ b/pkg_resources/__init__.py @@ -27,7 +27,7 @@ import time import re import types -from typing import Dict, List, Protocol +from typing import List, Protocol import zipfile import zipimport import warnings @@ -2562,7 +2562,7 @@ def parse_map(cls, data, dist=None): _data = data.items() else: _data = split_sections(data) - maps: Dict[str, Dict[str, "EntryPoint"]] = {} + maps = {} for group, lines in _data: if group is None: if not lines: From 2bb4cc0d83d441f8c06a7b6b7989ba1891683e20 Mon Sep 17 00:00:00 2001 From: Avasam Date: Fri, 8 Mar 2024 14:57:06 -0500 Subject: [PATCH 0446/1761] Updates from PR Review --- newsfragments/4262.feature.rst | 3 +++ pkg_resources/__init__.py | 7 ++++--- 2 files changed, 7 insertions(+), 3 deletions(-) create mode 100644 newsfragments/4262.feature.rst diff --git a/newsfragments/4262.feature.rst b/newsfragments/4262.feature.rst new file mode 100644 index 0000000000..7bbdba87d2 --- /dev/null +++ b/newsfragments/4262.feature.rst @@ -0,0 +1,3 @@ +Improved `AttributeError` error message if ``pkg_resources.EntryPoint.require`` is called without extras or distribution +Gracefully "do nothing" when trying to activate a ``pkg_resources.Distribution`` with a `None` location, rather than raising a `TypeError` +-- by :user:`Avasam` diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py index 95bc9a6006..67e26bb001 100644 --- a/pkg_resources/__init__.py +++ b/pkg_resources/__init__.py @@ -1636,7 +1636,7 @@ def _validate_resource_path(path): ) def _get(self, path) -> bytes: - if self.loader and hasattr(self.loader, 'get_data'): + if hasattr(self.loader, 'get_data') and self.loader: return self.loader.get_data(path) raise NotImplementedError( "Can't perform this operation for loaders without 'get_data()'" @@ -2493,7 +2493,8 @@ def resolve(self): def require(self, env=None, installer=None): if not self.dist: - raise UnknownExtra("Can't require() without a distribution", self) + error_cls = UnknownExtra if self.extras else AttributeError + raise error_cls("Can't require() without a distribution", self) # Get the requirements for this entry point with all its extras and # then resolve them. We have to pass `extras` along when resolving so @@ -2825,7 +2826,7 @@ def activate(self, path=None, replace=False): if path is None: path = sys.path self.insert_on(path, replace=replace) - if path is sys.path and self.location: + if path is sys.path: fixup_namespace_packages(self.location) for pkg in self._get_metadata('namespace_packages.txt'): if pkg in sys.modules: From 5a2add23c8f48ed150de37a4c75b27f849b84f54 Mon Sep 17 00:00:00 2001 From: Avasam Date: Fri, 8 Mar 2024 17:25:56 -0500 Subject: [PATCH 0447/1761] Update mypy to 1.9 --- mypy.ini | 2 ++ setup.cfg | 1 + setuptools/_core_metadata.py | 2 +- setuptools/command/editable_wheel.py | 3 +-- 4 files changed, 5 insertions(+), 3 deletions(-) diff --git a/mypy.ini b/mypy.ini index 42ade6537e..90c8ff13e7 100644 --- a/mypy.ini +++ b/mypy.ini @@ -1,5 +1,6 @@ [mypy] # CI should test for all versions, local development gets hints for oldest supported +# Some upstream typeshed distutils stubs fixes are necessary before we can start testing on Python 3.12 python_version = 3.8 strict = False warn_unused_ignores = True @@ -8,6 +9,7 @@ explicit_package_bases = True exclude = (?x)( ^build/ | ^.tox/ + | ^.egg/ | ^pkg_resources/tests/data/my-test-package-source/setup.py$ # Duplicate module name | ^.+?/(_vendor|extern)/ # Vendored | ^setuptools/_distutils/ # Vendored diff --git a/setup.cfg b/setup.cfg index 4d1155e884..5231358289 100644 --- a/setup.cfg +++ b/setup.cfg @@ -73,6 +73,7 @@ testing = # for tools/finalize.py jaraco.develop >= 7.21; python_version >= "3.9" and sys_platform != "cygwin" pytest-home >= 0.5 + mypy==1.9 # No Python 3.11 dependencies require tomli, but needed for type-checking since we import it directly tomli # No Python 3.12 dependencies require importlib_metadata, but needed for type-checking since we import it directly diff --git a/setuptools/_core_metadata.py b/setuptools/_core_metadata.py index 4bf3c7c947..5dd97c7719 100644 --- a/setuptools/_core_metadata.py +++ b/setuptools/_core_metadata.py @@ -62,7 +62,7 @@ def _read_list_from_msg(msg: Message, field: str) -> Optional[List[str]]: def _read_payload_from_msg(msg: Message) -> Optional[str]: - value = msg.get_payload().strip() + value = str(msg.get_payload()).strip() if value == 'UNKNOWN' or not value: return None return value diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py index 5f08ab53fc..4d21e2253f 100644 --- a/setuptools/command/editable_wheel.py +++ b/setuptools/command/editable_wheel.py @@ -620,8 +620,7 @@ def _simple_layout( layout = {pkg: find_package_path(pkg, package_dir, project_dir) for pkg in packages} if not layout: return set(package_dir) in ({}, {""}) - # TODO: has been fixed upstream, waiting for new mypy release https://github.com/python/typeshed/pull/11310 - parent = os.path.commonpath(starmap(_parent_path, layout.items())) # type: ignore[call-overload] + parent = os.path.commonpath(starmap(_parent_path, layout.items())) return all( _path.same_path(Path(parent, *key.split('.')), value) for key, value in layout.items() From 5bb594c12ef7ddc2cbfd2470266bb85de36d5c86 Mon Sep 17 00:00:00 2001 From: Avasam Date: Fri, 8 Mar 2024 19:20:05 -0500 Subject: [PATCH 0448/1761] update setup-python action to v5 --- .github/workflows/main.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index e841bde57c..d2beaa0c48 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -65,7 +65,7 @@ jobs: - uses: actions/checkout@v4 - name: Setup Python id: python-install - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python }} allow-prereleases: true @@ -122,7 +122,7 @@ jobs: with: fetch-depth: 0 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.x - name: Install tox @@ -171,7 +171,7 @@ jobs: git, - name: Record the currently selected Python version id: python-install - # NOTE: This roughly emulates what `actions/setup-python@v4` provides + # NOTE: This roughly emulates what `actions/setup-python` provides # NOTE: except the action gets the version from the installation path # NOTE: on disk and we get it from runtime. run: | @@ -220,7 +220,7 @@ jobs: sudo apt-get update sudo apt-get install build-essential gfortran libopenblas-dev libyaml-dev - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: # Use a release that is not very new but still have a long life: python-version: "3.10" @@ -241,7 +241,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.x - name: Install tox From dec00d13c2c781b2bee498054d1b8ff4cd3122b4 Mon Sep 17 00:00:00 2001 From: Avasam Date: Fri, 8 Mar 2024 19:22:18 -0500 Subject: [PATCH 0449/1761] Update checkout action to v4 --- .github/workflows/main.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index e841bde57c..715eb1db7c 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -158,7 +158,7 @@ jobs: runs-on: ${{ matrix.platform }} timeout-minutes: 75 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Install Cygwin with Python uses: cygwin/cygwin-install-action@v2 with: @@ -214,7 +214,7 @@ jobs: runs-on: ubuntu-latest timeout-minutes: 75 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Install OS-level dependencies run: | sudo apt-get update From 1749aea0e4c1f92e7e46f4b6dcd250fc0b992933 Mon Sep 17 00:00:00 2001 From: Avasam Date: Fri, 8 Mar 2024 19:27:59 -0500 Subject: [PATCH 0450/1761] Update cache action to v4 --- .github/workflows/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index e841bde57c..4d65b0dad4 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -69,7 +69,7 @@ jobs: with: python-version: ${{ matrix.python }} allow-prereleases: true - - uses: actions/cache@v3 + - uses: actions/cache@v4 id: cache with: path: setuptools/tests/config/downloads/*.cfg From 0575cc5fadf3e49944c901bbdeb6cf3ca94a73ae Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Mon, 11 Mar 2024 11:45:04 +0000 Subject: [PATCH 0451/1761] Update setup.cfg --- setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index 5231358289..c41b226e0c 100644 --- a/setup.cfg +++ b/setup.cfg @@ -73,7 +73,7 @@ testing = # for tools/finalize.py jaraco.develop >= 7.21; python_version >= "3.9" and sys_platform != "cygwin" pytest-home >= 0.5 - mypy==1.9 + mypy==1.9 # pin mypy version so a new version doesn't suddenly cause the CI to fail # No Python 3.11 dependencies require tomli, but needed for type-checking since we import it directly tomli # No Python 3.12 dependencies require importlib_metadata, but needed for type-checking since we import it directly From 6efc720f0fdd79e0689f81acba3fe45878ec43a3 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 10 Mar 2024 20:57:58 +0100 Subject: [PATCH 0452/1761] Fix a couple typos found by codespell --- pkg_resources/__init__.py | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py index 163a5521d6..c2ba0476e5 100644 --- a/pkg_resources/__init__.py +++ b/pkg_resources/__init__.py @@ -3193,7 +3193,7 @@ def _find_adapter(registry, ob): for t in types: if t in registry: return registry[t] - # _find_adapter would previously return None, and immediatly be called. + # _find_adapter would previously return None, and immediately be called. # So we're raising a TypeError to keep backward compatibility if anyone depended on that behaviour. raise TypeError(f"Could not find adapter for {registry} and {ob}") diff --git a/setup.py b/setup.py index 1a6074766a..542edaea68 100755 --- a/setup.py +++ b/setup.py @@ -88,6 +88,6 @@ def _restore_install_lib(self): if __name__ == '__main__': # allow setup.py to run from another directory - # TODO: Use a proper conditonal statement here + # TODO: Use a proper conditional statement here here and os.chdir(here) # type: ignore[func-returns-value] dist = setuptools.setup(**setup_params) From e0cb8e8fb5e0561da909e22703d5c8a1ce4a0f1d Mon Sep 17 00:00:00 2001 From: Avasam Date: Fri, 8 Mar 2024 18:13:25 -0500 Subject: [PATCH 0453/1761] Update cygwin-install-action --- .github/workflows/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index bc0b67003f..87b7317f13 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -160,7 +160,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Install Cygwin with Python - uses: cygwin/cygwin-install-action@v2 + uses: cygwin/cygwin-install-action@v4 with: platform: x86_64 packages: >- From c9e6b2ae2770286aeab5f95063eccb2dc6deb05a Mon Sep 17 00:00:00 2001 From: Avasam Date: Fri, 8 Mar 2024 19:26:41 -0500 Subject: [PATCH 0454/1761] Update upload-artefact action to v4 --- .github/workflows/ci-sage.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-sage.yml b/.github/workflows/ci-sage.yml index 88cc75cabb..a37f30294d 100644 --- a/.github/workflows/ci-sage.yml +++ b/.github/workflows/ci-sage.yml @@ -73,7 +73,7 @@ jobs: && echo "sage-package create ${{ env.SPKG }} --pypi --source normal --type standard; sage-package create ${{ env.SPKG }} --version git --tarball ${{ env.SPKG }}-git.tar.gz --type=standard" > upstream/update-pkgs.sh \ && if [ -n "${{ env.REMOVE_PATCHES }}" ]; then echo "(cd ../build/pkgs/${{ env.SPKG }}/patches && rm -f ${{ env.REMOVE_PATCHES }}; :)" >> upstream/update-pkgs.sh; fi \ && ls -l upstream/ - - uses: actions/upload-artifact@v2 + - uses: actions/upload-artifact@v4 with: path: upstream name: upstream From 50f0459cbd195e548bdfecc08e567c54c76c7f44 Mon Sep 17 00:00:00 2001 From: Avasam Date: Mon, 11 Mar 2024 16:55:55 -0400 Subject: [PATCH 0455/1761] Update .github/workflows/ci-sage.yml --- .github/workflows/ci-sage.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-sage.yml b/.github/workflows/ci-sage.yml index a37f30294d..3da7141573 100644 --- a/.github/workflows/ci-sage.yml +++ b/.github/workflows/ci-sage.yml @@ -73,7 +73,7 @@ jobs: && echo "sage-package create ${{ env.SPKG }} --pypi --source normal --type standard; sage-package create ${{ env.SPKG }} --version git --tarball ${{ env.SPKG }}-git.tar.gz --type=standard" > upstream/update-pkgs.sh \ && if [ -n "${{ env.REMOVE_PATCHES }}" ]; then echo "(cd ../build/pkgs/${{ env.SPKG }}/patches && rm -f ${{ env.REMOVE_PATCHES }}; :)" >> upstream/update-pkgs.sh; fi \ && ls -l upstream/ - - uses: actions/upload-artifact@v4 + - uses: actions/upload-artifact@v3 with: path: upstream name: upstream From 6ee23bf0579c52e1cbe7c97fc20fd085ff2a25c7 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Wed, 13 Mar 2024 10:54:30 +0000 Subject: [PATCH 0456/1761] =?UTF-8?q?Bump=20version:=2069.1.1=20=E2=86=92?= =?UTF-8?q?=2069.2.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- NEWS.rst | 25 +++++++++++++++++++++++++ newsfragments/4237.misc.rst | 1 - newsfragments/4238.misc.rst | 1 - newsfragments/4241.misc.rst | 1 - newsfragments/4243.bugfix.rst | 1 - newsfragments/4244.bugfix.rst | 1 - newsfragments/4254.bugfix.rst | 1 - newsfragments/4260.misc.rst | 1 - newsfragments/4261.misc.rst | 1 - newsfragments/4263.misc.rst | 1 - newsfragments/4265.feature.rst | 3 --- setup.cfg | 2 +- 13 files changed, 27 insertions(+), 14 deletions(-) delete mode 100644 newsfragments/4237.misc.rst delete mode 100644 newsfragments/4238.misc.rst delete mode 100644 newsfragments/4241.misc.rst delete mode 100644 newsfragments/4243.bugfix.rst delete mode 100644 newsfragments/4244.bugfix.rst delete mode 100644 newsfragments/4254.bugfix.rst delete mode 100644 newsfragments/4260.misc.rst delete mode 100644 newsfragments/4261.misc.rst delete mode 100644 newsfragments/4263.misc.rst delete mode 100644 newsfragments/4265.feature.rst diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 8d101ab5af..1236141a7c 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 69.1.1 +current_version = 69.2.0 commit = True tag = True diff --git a/NEWS.rst b/NEWS.rst index abc4bb3f04..2e849bdc5f 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -1,3 +1,28 @@ +v69.2.0 +======= + +Features +-------- + +- Explicitly use ``encoding="locale"`` for ``.pth`` files whenever possible, + to reduce ``EncodingWarnings``. + This avoid errors with UTF-8 (see discussion in python/cpython#77102). (#4265) + + +Bugfixes +-------- + +- Clarify some `pkg_resources` methods return `bytes`, not `str`. Also return an empty `bytes` in ``EmptyProvider._get`` -- by :user:`Avasam` (#4243) +- Return an empty `list` by default in ``pkg_resources.ResourceManager.cleanup_resources`` -- by :user:`Avasam` (#4244) +- Made ``pkg_resoursces.NullProvider``'s ``has_metadata`` and ``metadata_isdir`` methods return actual booleans like all other Providers. -- by :user:`Avasam` (#4254) + + +Misc +---- + +- #4237, #4238, #4241, #4260, #4261, #4263 + + v69.1.1 ======= diff --git a/newsfragments/4237.misc.rst b/newsfragments/4237.misc.rst deleted file mode 100644 index 995bee20e1..0000000000 --- a/newsfragments/4237.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Drop dependency on `py`. Bump ``pytest-xdist`` to ``>=3`` and use `pathlib` instead in tests -- by :user:`Avasam` diff --git a/newsfragments/4238.misc.rst b/newsfragments/4238.misc.rst deleted file mode 100644 index a7ccfc911e..0000000000 --- a/newsfragments/4238.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Drop dependency on Flake8 by using Ruff's YTT rules instead of flake8-2020 -- by :user:`Avasam` diff --git a/newsfragments/4241.misc.rst b/newsfragments/4241.misc.rst deleted file mode 100644 index ef6da2c323..0000000000 --- a/newsfragments/4241.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Improvements to `Path`-related type annotations when it could be ``str | PathLike`` -- by :user:`Avasam` diff --git a/newsfragments/4243.bugfix.rst b/newsfragments/4243.bugfix.rst deleted file mode 100644 index e8212721f3..0000000000 --- a/newsfragments/4243.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Clarify some `pkg_resources` methods return `bytes`, not `str`. Also return an empty `bytes` in ``EmptyProvider._get`` -- by :user:`Avasam` diff --git a/newsfragments/4244.bugfix.rst b/newsfragments/4244.bugfix.rst deleted file mode 100644 index 5d606de718..0000000000 --- a/newsfragments/4244.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Return an empty `list` by default in ``pkg_resources.ResourceManager.cleanup_resources`` -- by :user:`Avasam` diff --git a/newsfragments/4254.bugfix.rst b/newsfragments/4254.bugfix.rst deleted file mode 100644 index e944fcfb49..0000000000 --- a/newsfragments/4254.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Made ``pkg_resoursces.NullProvider``'s ``has_metadata`` and ``metadata_isdir`` methods return actual booleans like all other Providers. -- by :user:`Avasam` diff --git a/newsfragments/4260.misc.rst b/newsfragments/4260.misc.rst deleted file mode 100644 index 9dfde3498d..0000000000 --- a/newsfragments/4260.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Remove unused ``resources_stream`` ``resource_dir`` and shadowed functions from `pkg_resources` -- by :user:`Avasam` diff --git a/newsfragments/4261.misc.rst b/newsfragments/4261.misc.rst deleted file mode 100644 index 83c10f0f66..0000000000 --- a/newsfragments/4261.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Avoid implicit ``encoding`` parameter in ``setuptools/tests``. diff --git a/newsfragments/4263.misc.rst b/newsfragments/4263.misc.rst deleted file mode 100644 index f84eb8dd42..0000000000 --- a/newsfragments/4263.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Avoid implicit ``encoding`` parameter in ``pkg_resources/tests``. diff --git a/newsfragments/4265.feature.rst b/newsfragments/4265.feature.rst deleted file mode 100644 index bcb0467205..0000000000 --- a/newsfragments/4265.feature.rst +++ /dev/null @@ -1,3 +0,0 @@ -Explicitly use ``encoding="locale"`` for ``.pth`` files whenever possible, -to reduce ``EncodingWarnings``. -This avoid errors with UTF-8 (see discussion in python/cpython#77102). diff --git a/setup.cfg b/setup.cfg index c41b226e0c..9b504dd39b 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = setuptools -version = 69.1.1 +version = 69.2.0 author = Python Packaging Authority author_email = distutils-sig@python.org description = Easily download, build, install, upgrade, and uninstall Python packages From 2b339b98bcc26fe9147647054c8fa09344f581ec Mon Sep 17 00:00:00 2001 From: Avasam Date: Fri, 15 Mar 2024 00:44:48 -0400 Subject: [PATCH 0457/1761] Avoid leaking "name" variable in AbstractSandbox --- newsfragments/4280.misc.rst | 1 + setuptools/sandbox.py | 24 ++++++++++++------------ 2 files changed, 13 insertions(+), 12 deletions(-) create mode 100644 newsfragments/4280.misc.rst diff --git a/newsfragments/4280.misc.rst b/newsfragments/4280.misc.rst new file mode 100644 index 0000000000..aff6a7ca1c --- /dev/null +++ b/newsfragments/4280.misc.rst @@ -0,0 +1 @@ +Avoid leaking loop variable ``name`` in ``AbstractSandbox`` -- by :user:`Avasam` diff --git a/setuptools/sandbox.py b/setuptools/sandbox.py index 6c095e029e..e5da9d86f0 100644 --- a/setuptools/sandbox.py +++ b/setuptools/sandbox.py @@ -309,9 +309,9 @@ def wrap(self, src, dst, *args, **kw): return wrap - for name in ["rename", "link", "symlink"]: - if hasattr(_os, name): - locals()[name] = _mk_dual_path_wrapper(name) + for __name in ["rename", "link", "symlink"]: + if hasattr(_os, __name): + locals()[__name] = _mk_dual_path_wrapper(__name) def _mk_single_path_wrapper(name: str, original=None): # type: ignore[misc] # https://github.com/pypa/setuptools/pull/4099 original = original or getattr(_os, name) @@ -326,7 +326,7 @@ def wrap(self, path, *args, **kw): if _file: _file = _mk_single_path_wrapper('file', _file) _open = _mk_single_path_wrapper('open', _open) - for name in [ + for __name in [ "stat", "listdir", "chdir", @@ -347,8 +347,8 @@ def wrap(self, path, *args, **kw): "pathconf", "access", ]: - if hasattr(_os, name): - locals()[name] = _mk_single_path_wrapper(name) + if hasattr(_os, __name): + locals()[__name] = _mk_single_path_wrapper(__name) def _mk_single_with_return(name: str): # type: ignore[misc] # https://github.com/pypa/setuptools/pull/4099 original = getattr(_os, name) @@ -361,9 +361,9 @@ def wrap(self, path, *args, **kw): return wrap - for name in ['readlink', 'tempnam']: - if hasattr(_os, name): - locals()[name] = _mk_single_with_return(name) + for __name in ['readlink', 'tempnam']: + if hasattr(_os, __name): + locals()[__name] = _mk_single_with_return(__name) def _mk_query(name: str): # type: ignore[misc] # https://github.com/pypa/setuptools/pull/4099 original = getattr(_os, name) @@ -376,9 +376,9 @@ def wrap(self, *args, **kw): return wrap - for name in ['getcwd', 'tmpnam']: - if hasattr(_os, name): - locals()[name] = _mk_query(name) + for __name in ['getcwd', 'tmpnam']: + if hasattr(_os, __name): + locals()[__name] = _mk_query(__name) def _validate_path(self, path): """Called to remap or validate any path, whether input or output""" From d377ff738350743ce5e134e04031707605ec3dd3 Mon Sep 17 00:00:00 2001 From: Avasam Date: Sat, 16 Mar 2024 02:20:46 -0400 Subject: [PATCH 0458/1761] Update codecov/codecov-action to v4 --- .github/workflows/main.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 87b7317f13..76178067b8 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -100,7 +100,7 @@ jobs: run: pipx run coverage xml --ignore-errors - name: Publish coverage if: hashFiles('coverage.xml') != '' # Rudimentary `file.exists()` - uses: codecov/codecov-action@v3 + uses: codecov/codecov-action@v4 with: flags: >- # Mark which lines are covered by which envs CI-GHA, @@ -108,6 +108,7 @@ jobs: OS-${{ runner.os }}, VM-${{ matrix.platform }}, Py-${{ steps.python-install.outputs.python-version }} + token: ${{ secrets.CODECOV_TOKEN }} collateral: strategy: @@ -190,7 +191,7 @@ jobs: shell: C:\cygwin\bin\env.exe CYGWIN_NOWINPATH=1 CHERE_INVOKING=1 C:\cygwin\bin\bash.exe -leo pipefail -o igncr {0} - name: Publish coverage if: hashFiles('coverage.xml') != '' # Rudimentary `file.exists()` - uses: codecov/codecov-action@v3 + uses: codecov/codecov-action@v4 with: files: >- ${{ github.workspace }}\coverage.xml @@ -200,6 +201,7 @@ jobs: OS-${{ runner.os }}, VM-${{ matrix.platform }}, Py-${{ steps.python-install.outputs.python-version }} + token: ${{ secrets.CODECOV_TOKEN }} integration-test: needs: test From b0135f5097f32a27b7a14e2c6296ba14bcb4e10b Mon Sep 17 00:00:00 2001 From: Dustin Ingram Date: Wed, 20 Mar 2024 20:46:50 +0000 Subject: [PATCH 0459/1761] Support PEP 625 --- setuptools/_distutils/dist.py | 5 ++++- setuptools/tests/test_config_discovery.py | 6 ++++-- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/setuptools/_distutils/dist.py b/setuptools/_distutils/dist.py index 7c0f0e5b78..d20ce33e00 100644 --- a/setuptools/_distutils/dist.py +++ b/setuptools/_distutils/dist.py @@ -23,6 +23,7 @@ DistutilsArgError, DistutilsClassError, ) +from setuptools.extern.packaging.utils import canonicalize_name, canonicalize_version from .fancy_getopt import FancyGetopt, translate_longopt from .util import check_environ, strtobool, rfc822_escape from ._log import log @@ -1189,7 +1190,9 @@ def get_version(self): return self.version or "0.0.0" def get_fullname(self): - return "{}-{}".format(self.get_name(), self.get_version()) + return "{}-{}".format( + canonicalize_name(self.get_name()), canonicalize_version(self.get_version()) + ) def get_author(self): return self.author diff --git a/setuptools/tests/test_config_discovery.py b/setuptools/tests/test_config_discovery.py index 72772caebf..7d51a47012 100644 --- a/setuptools/tests/test_config_discovery.py +++ b/setuptools/tests/test_config_discovery.py @@ -255,7 +255,7 @@ def test_py_modules_when_wheel_dir_is_cwd(self, tmp_path): class TestNoConfig: - DEFAULT_VERSION = "0.0.0" # Default version given by setuptools + CANONICAL_DEFAULT_VERSION = "0" # Canonical default version given by setuptools EXAMPLES = { "pkg1": ["src/pkg1.py"], @@ -277,7 +277,9 @@ def test_build_with_discovered_name(self, tmp_path): _populate_project_dir(tmp_path, files, {}) _run_build(tmp_path, "--sdist") # Expected distribution file - dist_file = tmp_path / f"dist/ns.nested.pkg-{self.DEFAULT_VERSION}.tar.gz" + dist_file = ( + tmp_path / f"dist/ns-nested-pkg-{self.CANONICAL_DEFAULT_VERSION}.tar.gz" + ) assert dist_file.is_file() From b93e7afba85c7d55d0419c3f544e9348f283a7d6 Mon Sep 17 00:00:00 2001 From: Dustin Ingram Date: Wed, 20 Mar 2024 20:51:32 +0000 Subject: [PATCH 0460/1761] Add news fragment --- newsfragments/3593.feature.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 newsfragments/3593.feature.rst diff --git a/newsfragments/3593.feature.rst b/newsfragments/3593.feature.rst new file mode 100644 index 0000000000..2ec6f9714e --- /dev/null +++ b/newsfragments/3593.feature.rst @@ -0,0 +1 @@ +Support PEP 625 by canonicalizing package name and version in filenames. From 44f67acbbd262ca9376e86c4671ecbea0173147b Mon Sep 17 00:00:00 2001 From: Marcel Telka Date: Wed, 20 Mar 2024 22:54:58 +0100 Subject: [PATCH 0461/1761] Add mypy.ini to MANIFEST.in --- MANIFEST.in | 1 + 1 file changed, 1 insertion(+) diff --git a/MANIFEST.in b/MANIFEST.in index 116840bfa2..c4f12dc68a 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -13,6 +13,7 @@ include MANIFEST.in include LICENSE include launcher.c include msvc-build-launcher.cmd +include mypy.ini include pytest.ini include tox.ini include setuptools/tests/config/setupcfg_examples.txt From a0d0c4b7e87fbfd04cee2546ba452858587516fd Mon Sep 17 00:00:00 2001 From: Avasam Date: Thu, 21 Mar 2024 15:34:23 -0400 Subject: [PATCH 0462/1761] Allow mypy on PyPy (jaraco/skeleton#111) https://github.com/pypa/setuptools/pull/4257 shows that mypy now works with PyPy --- setup.cfg | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/setup.cfg b/setup.cfg index 400a72a5ed..6fa73b6a09 100644 --- a/setup.cfg +++ b/setup.cfg @@ -23,9 +23,7 @@ testing = pytest >= 6 pytest-checkdocs >= 2.4 pytest-cov - pytest-mypy; \ - # workaround for jaraco/skeleton#22 - python_implementation != "PyPy" + pytest-mypy pytest-enabler >= 2.2 pytest-ruff >= 0.2.1 From f0aaeb5c00e5767ce37a760e0199a9fb74f07cc6 Mon Sep 17 00:00:00 2001 From: Dustin Ingram Date: Thu, 21 Mar 2024 20:56:54 +0000 Subject: [PATCH 0463/1761] Revert changes to distutils --- setuptools/_distutils/dist.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/setuptools/_distutils/dist.py b/setuptools/_distutils/dist.py index d20ce33e00..7c0f0e5b78 100644 --- a/setuptools/_distutils/dist.py +++ b/setuptools/_distutils/dist.py @@ -23,7 +23,6 @@ DistutilsArgError, DistutilsClassError, ) -from setuptools.extern.packaging.utils import canonicalize_name, canonicalize_version from .fancy_getopt import FancyGetopt, translate_longopt from .util import check_environ, strtobool, rfc822_escape from ._log import log @@ -1190,9 +1189,7 @@ def get_version(self): return self.version or "0.0.0" def get_fullname(self): - return "{}-{}".format( - canonicalize_name(self.get_name()), canonicalize_version(self.get_version()) - ) + return "{}-{}".format(self.get_name(), self.get_version()) def get_author(self): return self.author From cfc9a82db67324a05986abf349a27b85e74a4aac Mon Sep 17 00:00:00 2001 From: Dustin Ingram Date: Thu, 21 Mar 2024 20:57:10 +0000 Subject: [PATCH 0464/1761] Try monkeypatching right before we use it instead --- setuptools/dist.py | 24 +++++++++++++++++++++++- 1 file changed, 23 insertions(+), 1 deletion(-) diff --git a/setuptools/dist.py b/setuptools/dist.py index 6350e38100..c7a3e5175d 100644 --- a/setuptools/dist.py +++ b/setuptools/dist.py @@ -7,6 +7,7 @@ import os import re import sys +import contextlib from contextlib import suppress from glob import iglob from pathlib import Path @@ -26,6 +27,7 @@ from .extern.ordered_set import OrderedSet from .extern.packaging.markers import InvalidMarker, Marker from .extern.packaging.specifiers import InvalidSpecifier, SpecifierSet +from .extern.packaging.utils import canonicalize_name, canonicalize_version from .extern.packaging.version import Version from . import _entry_points @@ -964,8 +966,28 @@ def run_command(self, command): # Postpone defaults until all explicit configuration is considered # (setup() args, config files, command line and plugins) - super().run_command(command) + with self._override_get_fullname(): + super().run_command(command) + @contextlib.contextmanager + def _override_get_fullname(self): + def _get_fullname_canonicalized(self): + return "{}-{}".format( + canonicalize_name(self.get_name()), + canonicalize_version(self.get_version()), + ) + + class NoValue: + pass + + orig_val = getattr(self, 'get_fullname', NoValue) + self.get_fullname = _get_fullname_canonicalized.__get__(self) + + try: + yield + finally: + if orig_val is not NoValue: + self.get_fullname = orig_val class DistDeprecationWarning(SetuptoolsDeprecationWarning): """Class for warning about deprecations in dist in From 22b81c444cb65e256dcbea191e1b2d60f7e4dab6 Mon Sep 17 00:00:00 2001 From: Dustin Ingram Date: Thu, 21 Mar 2024 22:11:28 +0000 Subject: [PATCH 0465/1761] Linting --- setuptools/dist.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setuptools/dist.py b/setuptools/dist.py index c7a3e5175d..c62187ec25 100644 --- a/setuptools/dist.py +++ b/setuptools/dist.py @@ -989,6 +989,7 @@ class NoValue: if orig_val is not NoValue: self.get_fullname = orig_val + class DistDeprecationWarning(SetuptoolsDeprecationWarning): """Class for warning about deprecations in dist in setuptools. Not ignored by default, unlike DeprecationWarning.""" From c9a7f97ba83be124e173713f5c24564c2b6dd49e Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Thu, 21 Mar 2024 15:49:52 -0400 Subject: [PATCH 0466/1761] Re-enable ignoring of temporary merge queue branches. Closes jaraco/skeleton#103. --- .github/workflows/main.yml | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index cf94f7d816..143b0984b0 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -4,8 +4,11 @@ on: merge_group: push: branches-ignore: - # disabled for jaraco/skeleton#103 - # - gh-readonly-queue/** # Temporary merge queue-related GH-made branches + # temporary GH branches relating to merge queues (jaraco/skeleton#93) + - gh-readonly-queue/** + tags: + # required if branches-ignore is supplied (jaraco/skeleton#103) + - '**' pull_request: permissions: From d72c6a081b67ce18eae654bf3c8d2d627af6939e Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 23 Mar 2024 13:46:21 -0400 Subject: [PATCH 0467/1761] Fetch unshallow clones in readthedocs. Closes jaraco/skeleton#114. --- .readthedocs.yaml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 6848906374..85dfea9d42 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -10,3 +10,7 @@ build: os: ubuntu-lts-latest tools: python: latest + # post-checkout job to ensure the clone isn't shallow jaraco/skeleton#114 + jobs: + post_checkout: + - git fetch --unshallow || true From 3fc7a935dfc0e5c8e330a29efc5518c464795cf8 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Fri, 29 Mar 2024 21:11:46 -0400 Subject: [PATCH 0468/1761] Move Python 3.11 out of the test matrix. Probably should have done this when moving continue-on-error to Python 3.13. --- .github/workflows/main.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 143b0984b0..a15c74a618 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -34,7 +34,6 @@ jobs: matrix: python: - "3.8" - - "3.11" - "3.12" platform: - ubuntu-latest @@ -45,6 +44,8 @@ jobs: platform: ubuntu-latest - python: "3.10" platform: ubuntu-latest + - python: "3.11" + platform: ubuntu-latest - python: pypy3.10 platform: ubuntu-latest runs-on: ${{ matrix.platform }} From 6ff02e0eefcd90e271cefd326b460ecfa0e3eb9e Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 31 Mar 2024 04:27:11 -0400 Subject: [PATCH 0469/1761] Configure pytest to support namespace packages. Ref pytest-dev/pytest#12112. --- pytest.ini | 5 ++++- setup.cfg | 2 +- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/pytest.ini b/pytest.ini index 022a723e7e..9a0f3bce13 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,6 +1,9 @@ [pytest] norecursedirs=dist build .tox .eggs -addopts=--doctest-modules +addopts= + --doctest-modules + --import-mode importlib +consider_namespace_packages=true filterwarnings= ## upstream diff --git a/setup.cfg b/setup.cfg index 6fa73b6a09..f46b6cbff4 100644 --- a/setup.cfg +++ b/setup.cfg @@ -20,7 +20,7 @@ install_requires = [options.extras_require] testing = # upstream - pytest >= 6 + pytest >= 6, != 8.1.1 pytest-checkdocs >= 2.4 pytest-cov pytest-mypy From 9b58da5c84b58743ef9e0f0346d31150afd2229f Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 7 Apr 2024 18:13:25 -0400 Subject: [PATCH 0470/1761] Revert "Suppress EncodingWarnings in pyfakefs. Ref pypa/distutils#232. Workaround for pytest-dev/pyfakefs#957." This reverts commit 9508489953a84a1412ad24e6613650351369462c. --- pytest.ini | 3 --- 1 file changed, 3 deletions(-) diff --git a/pytest.ini b/pytest.ini index fa31fb33dc..3ee2f886ba 100644 --- a/pytest.ini +++ b/pytest.ini @@ -34,6 +34,3 @@ filterwarnings= # suppress well know deprecation warning ignore:distutils.log.Log is deprecated - - # pytest-dev/pyfakefs#957 - ignore:UTF-8 Mode affects locale.getpreferredencoding::pyfakefs.fake_file From 34ba6b2ec0650c8c70d9285a0c7ee1a126406807 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartosz=20S=C5=82awecki?= Date: Mon, 1 Apr 2024 17:47:04 +0200 Subject: [PATCH 0471/1761] Add link to blog entry from jaraco/skeleton#115 above CI build matrix. --- .github/workflows/main.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index a15c74a618..ac0ff69e22 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -31,6 +31,7 @@ env: jobs: test: strategy: + # https://blog.jaraco.com/efficient-use-of-ci-resources/ matrix: python: - "3.8" From bf33f79fee5ba88dba5dde8beb57ba03d856dc31 Mon Sep 17 00:00:00 2001 From: Dustin Ingram Date: Thu, 11 Apr 2024 15:43:12 +0000 Subject: [PATCH 0472/1761] Fix canonicalization --- setuptools/dist.py | 2 +- setuptools/tests/test_config_discovery.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/setuptools/dist.py b/setuptools/dist.py index c62187ec25..202430fb69 100644 --- a/setuptools/dist.py +++ b/setuptools/dist.py @@ -973,7 +973,7 @@ def run_command(self, command): def _override_get_fullname(self): def _get_fullname_canonicalized(self): return "{}-{}".format( - canonicalize_name(self.get_name()), + canonicalize_name(self.get_name()).replace('-', '_'), canonicalize_version(self.get_version()), ) diff --git a/setuptools/tests/test_config_discovery.py b/setuptools/tests/test_config_discovery.py index 7d51a47012..e1e67ffe11 100644 --- a/setuptools/tests/test_config_discovery.py +++ b/setuptools/tests/test_config_discovery.py @@ -278,7 +278,7 @@ def test_build_with_discovered_name(self, tmp_path): _run_build(tmp_path, "--sdist") # Expected distribution file dist_file = ( - tmp_path / f"dist/ns-nested-pkg-{self.CANONICAL_DEFAULT_VERSION}.tar.gz" + tmp_path / f"dist/ns_nested_pkg-{self.CANONICAL_DEFAULT_VERSION}.tar.gz" ) assert dist_file.is_file() From af38e1cd6db5ad272cf2e3c0747c0b478a0c269c Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Thu, 11 Apr 2024 14:05:15 -0400 Subject: [PATCH 0473/1761] =?UTF-8?q?=F0=9F=A7=8E=E2=80=8D=E2=99=80?= =?UTF-8?q?=EF=B8=8F=20Genuflect=20to=20the=20types.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Under some circumstances not fully understood, mypy has started complaining when `_validate_project` tries to import `trove_classifiers` (and it doesn't exist), even though `_validate_project` is excluded from mypy checks. Mysteriously, adding `trove_classifiers` itself to the list of modules for which to ignore imports suppresses this mysterious failure. Ref #4296. --- mypy.ini | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/mypy.ini b/mypy.ini index 90c8ff13e7..ee12ebb193 100644 --- a/mypy.ini +++ b/mypy.ini @@ -25,7 +25,8 @@ disable_error_code = attr-defined # https://github.com/pypa/setuptools/pull/3979#discussion_r1367968993 # - distutils._modified has different errors on Python 3.8 [import-untyped], on Python 3.9+ [import-not-found] # - All jaraco modules are still untyped -[mypy-pkg_resources.extern.*,setuptools.extern.*,distutils._modified,jaraco.*] +# - _validate_project sometimes complains about trove_classifiers (#4296) +[mypy-pkg_resources.extern.*,setuptools.extern.*,distutils._modified,jaraco.*,trove_classifiers] ignore_missing_imports = True # - pkg_resources tests create modules that won't exists statically before the test is run. From 230bde5008fbc7b0764649f39aa8640befd9ec0b Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Thu, 11 Apr 2024 17:01:51 -0400 Subject: [PATCH 0474/1761] Fix ruff.toml syntax and suppress emergent failure. --- ruff.toml | 8 ++++---- setuptools/command/easy_install.py | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/ruff.toml b/ruff.toml index bd1a86ff17..6f620cb890 100644 --- a/ruff.toml +++ b/ruff.toml @@ -2,6 +2,10 @@ extend-select = [ "C901", "W", + + # local + "UP", # pyupgrade + "YTT", # flake8-2020 ] ignore = [ # https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules @@ -20,10 +24,6 @@ ignore = [ "ISC001", "ISC002", ] -extend-select = [ - "UP", # pyupgrade - "YTT", # flake8-2020 -] extend-ignore = [ "UP015", # redundant-open-modes, explicit is preferred "UP030", # temporarily disabled diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py index 858fb20f83..87a68c292a 100644 --- a/setuptools/command/easy_install.py +++ b/setuptools/command/easy_install.py @@ -565,7 +565,7 @@ def cant_write_to_target(self): msg += '\n' + self.__access_msg raise DistutilsError(msg) - def check_pth_processing(self): + def check_pth_processing(self): # noqa: C901 """Empirically verify whether .pth files are supported in inst. dir""" instdir = self.install_dir log.info("Checking .pth file support in %s", instdir) From 21482ac4b2ef50e41218beafba261061daf23d9b Mon Sep 17 00:00:00 2001 From: Dustin Ingram Date: Wed, 20 Mar 2024 20:46:50 +0000 Subject: [PATCH 0475/1761] Support PEP 625 --- distutils/dist.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/distutils/dist.py b/distutils/dist.py index c4d2a45dc2..9f1e23e636 100644 --- a/distutils/dist.py +++ b/distutils/dist.py @@ -23,6 +23,7 @@ DistutilsArgError, DistutilsClassError, ) +from setuptools.extern.packaging.utils import canonicalize_name, canonicalize_version from .fancy_getopt import FancyGetopt, translate_longopt from .util import check_environ, strtobool, rfc822_escape from ._log import log @@ -1189,7 +1190,9 @@ def get_version(self): return self.version or "0.0.0" def get_fullname(self): - return f"{self.get_name()}-{self.get_version()}" + return "{}-{}".format( + canonicalize_name(self.get_name()), canonicalize_version(self.get_version()) + ) def get_author(self): return self.author From 005ba4b6d47d96c3a93e21a8bcae32c10c1f202a Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Thu, 11 Apr 2024 19:10:53 -0400 Subject: [PATCH 0476/1761] Expect to find canonicalize_* functions in packaging. --- distutils/dist.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/distutils/dist.py b/distutils/dist.py index 9f1e23e636..b0cb87d559 100644 --- a/distutils/dist.py +++ b/distutils/dist.py @@ -12,6 +12,8 @@ import logging from email import message_from_file +from packaging.utils import canonicalize_name, canonicalize_version + try: import warnings except ImportError: @@ -23,7 +25,6 @@ DistutilsArgError, DistutilsClassError, ) -from setuptools.extern.packaging.utils import canonicalize_name, canonicalize_version from .fancy_getopt import FancyGetopt, translate_longopt from .util import check_environ, strtobool, rfc822_escape from ._log import log From be32fecc787c3de8c292638c5004a8bcf92dc540 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Thu, 11 Apr 2024 19:14:18 -0400 Subject: [PATCH 0477/1761] Update tests to match new expectation. --- distutils/tests/test_sdist.py | 32 ++++++++++++++++---------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/distutils/tests/test_sdist.py b/distutils/tests/test_sdist.py index 450f68c993..e708edc4e1 100644 --- a/distutils/tests/test_sdist.py +++ b/distutils/tests/test_sdist.py @@ -118,9 +118,9 @@ def test_prune_file_list(self): # now let's check what we have dist_folder = join(self.tmp_dir, 'dist') files = os.listdir(dist_folder) - assert files == ['fake-1.0.zip'] + assert files == ['fake-1.zip'] - zip_file = zipfile.ZipFile(join(dist_folder, 'fake-1.0.zip')) + zip_file = zipfile.ZipFile(join(dist_folder, 'fake-1.zip')) try: content = zip_file.namelist() finally: @@ -135,7 +135,7 @@ def test_prune_file_list(self): 'somecode/', 'somecode/__init__.py', ] - assert sorted(content) == ['fake-1.0/' + x for x in expected] + assert sorted(content) == ['fake-1/' + x for x in expected] @pytest.mark.usefixtures('needs_zlib') @pytest.mark.skipif("not find_executable('tar')") @@ -153,10 +153,10 @@ def test_make_distribution(self): dist_folder = join(self.tmp_dir, 'dist') result = os.listdir(dist_folder) result.sort() - assert result == ['fake-1.0.tar', 'fake-1.0.tar.gz'] + assert result == ['fake-1.tar', 'fake-1.tar.gz'] - os.remove(join(dist_folder, 'fake-1.0.tar')) - os.remove(join(dist_folder, 'fake-1.0.tar.gz')) + os.remove(join(dist_folder, 'fake-1.tar')) + os.remove(join(dist_folder, 'fake-1.tar.gz')) # now trying a tar then a gztar cmd.formats = ['tar', 'gztar'] @@ -166,7 +166,7 @@ def test_make_distribution(self): result = os.listdir(dist_folder) result.sort() - assert result == ['fake-1.0.tar', 'fake-1.0.tar.gz'] + assert result == ['fake-1.tar', 'fake-1.tar.gz'] @pytest.mark.usefixtures('needs_zlib') def test_add_defaults(self): @@ -219,9 +219,9 @@ def test_add_defaults(self): # now let's check what we have dist_folder = join(self.tmp_dir, 'dist') files = os.listdir(dist_folder) - assert files == ['fake-1.0.zip'] + assert files == ['fake-1.zip'] - zip_file = zipfile.ZipFile(join(dist_folder, 'fake-1.0.zip')) + zip_file = zipfile.ZipFile(join(dist_folder, 'fake-1.zip')) try: content = zip_file.namelist() finally: @@ -247,7 +247,7 @@ def test_add_defaults(self): 'somecode/doc.dat', 'somecode/doc.txt', ] - assert sorted(content) == ['fake-1.0/' + x for x in expected] + assert sorted(content) == ['fake-1/' + x for x in expected] # checking the MANIFEST manifest = pathlib.Path(self.tmp_dir, 'MANIFEST').read_text(encoding='utf-8') @@ -420,16 +420,16 @@ def test_manual_manifest(self): assert list(clean_lines(cmd.manifest)) == ['README.manual'] - archive_name = join(self.tmp_dir, 'dist', 'fake-1.0.tar.gz') + archive_name = join(self.tmp_dir, 'dist', 'fake-1.tar.gz') archive = tarfile.open(archive_name) try: filenames = [tarinfo.name for tarinfo in archive] finally: archive.close() assert sorted(filenames) == [ - 'fake-1.0', - 'fake-1.0/PKG-INFO', - 'fake-1.0/README.manual', + 'fake-1', + 'fake-1/PKG-INFO', + 'fake-1/README.manual', ] @pytest.mark.usefixtures('needs_zlib') @@ -449,7 +449,7 @@ def test_make_distribution_owner_group(self): cmd.run() # making sure we have the good rights - archive_name = join(self.tmp_dir, 'dist', 'fake-1.0.tar.gz') + archive_name = join(self.tmp_dir, 'dist', 'fake-1.tar.gz') archive = tarfile.open(archive_name) try: for member in archive.getmembers(): @@ -467,7 +467,7 @@ def test_make_distribution_owner_group(self): cmd.run() # making sure we have the good rights - archive_name = join(self.tmp_dir, 'dist', 'fake-1.0.tar.gz') + archive_name = join(self.tmp_dir, 'dist', 'fake-1.tar.gz') archive = tarfile.open(archive_name) # note that we are not testing the group ownership here From 2316432d6e62a3db3ae701d7cf839258c02a702d Mon Sep 17 00:00:00 2001 From: Dustin Ingram Date: Thu, 11 Apr 2024 15:43:12 +0000 Subject: [PATCH 0478/1761] Fix canonicalization --- distutils/dist.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/distutils/dist.py b/distutils/dist.py index b0cb87d559..4b3824df7e 100644 --- a/distutils/dist.py +++ b/distutils/dist.py @@ -1192,7 +1192,8 @@ def get_version(self): def get_fullname(self): return "{}-{}".format( - canonicalize_name(self.get_name()), canonicalize_version(self.get_version()) + canonicalize_name(self.get_name()).replace('-', '_'), + canonicalize_version(self.get_version()), ) def get_author(self): From d2de2195f48ddc07999cbb173df0adc3185013ce Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Thu, 11 Apr 2024 19:29:06 -0400 Subject: [PATCH 0479/1761] In test_sdist, provide a more complex name to capture canonicalization behavior. --- distutils/tests/test_sdist.py | 34 +++++++++++++++++----------------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/distutils/tests/test_sdist.py b/distutils/tests/test_sdist.py index e708edc4e1..359325d5dc 100644 --- a/distutils/tests/test_sdist.py +++ b/distutils/tests/test_sdist.py @@ -74,7 +74,7 @@ def get_cmd(self, metadata=None): """Returns a cmd""" if metadata is None: metadata = { - 'name': 'fake', + 'name': 'ns.fake--pkg', 'version': '1.0', 'url': 'xxx', 'author': 'xxx', @@ -118,9 +118,9 @@ def test_prune_file_list(self): # now let's check what we have dist_folder = join(self.tmp_dir, 'dist') files = os.listdir(dist_folder) - assert files == ['fake-1.zip'] + assert files == ['ns_fake_pkg-1.zip'] - zip_file = zipfile.ZipFile(join(dist_folder, 'fake-1.zip')) + zip_file = zipfile.ZipFile(join(dist_folder, 'ns_fake_pkg-1.zip')) try: content = zip_file.namelist() finally: @@ -135,7 +135,7 @@ def test_prune_file_list(self): 'somecode/', 'somecode/__init__.py', ] - assert sorted(content) == ['fake-1/' + x for x in expected] + assert sorted(content) == ['ns_fake_pkg-1/' + x for x in expected] @pytest.mark.usefixtures('needs_zlib') @pytest.mark.skipif("not find_executable('tar')") @@ -153,10 +153,10 @@ def test_make_distribution(self): dist_folder = join(self.tmp_dir, 'dist') result = os.listdir(dist_folder) result.sort() - assert result == ['fake-1.tar', 'fake-1.tar.gz'] + assert result == ['ns_fake_pkg-1.tar', 'ns_fake_pkg-1.tar.gz'] - os.remove(join(dist_folder, 'fake-1.tar')) - os.remove(join(dist_folder, 'fake-1.tar.gz')) + os.remove(join(dist_folder, 'ns_fake_pkg-1.tar')) + os.remove(join(dist_folder, 'ns_fake_pkg-1.tar.gz')) # now trying a tar then a gztar cmd.formats = ['tar', 'gztar'] @@ -166,7 +166,7 @@ def test_make_distribution(self): result = os.listdir(dist_folder) result.sort() - assert result == ['fake-1.tar', 'fake-1.tar.gz'] + assert result == ['ns_fake_pkg-1.tar', 'ns_fake_pkg-1.tar.gz'] @pytest.mark.usefixtures('needs_zlib') def test_add_defaults(self): @@ -219,9 +219,9 @@ def test_add_defaults(self): # now let's check what we have dist_folder = join(self.tmp_dir, 'dist') files = os.listdir(dist_folder) - assert files == ['fake-1.zip'] + assert files == ['ns_fake_pkg-1.zip'] - zip_file = zipfile.ZipFile(join(dist_folder, 'fake-1.zip')) + zip_file = zipfile.ZipFile(join(dist_folder, 'ns_fake_pkg-1.zip')) try: content = zip_file.namelist() finally: @@ -247,7 +247,7 @@ def test_add_defaults(self): 'somecode/doc.dat', 'somecode/doc.txt', ] - assert sorted(content) == ['fake-1/' + x for x in expected] + assert sorted(content) == ['ns_fake_pkg-1/' + x for x in expected] # checking the MANIFEST manifest = pathlib.Path(self.tmp_dir, 'MANIFEST').read_text(encoding='utf-8') @@ -420,16 +420,16 @@ def test_manual_manifest(self): assert list(clean_lines(cmd.manifest)) == ['README.manual'] - archive_name = join(self.tmp_dir, 'dist', 'fake-1.tar.gz') + archive_name = join(self.tmp_dir, 'dist', 'ns_fake_pkg-1.tar.gz') archive = tarfile.open(archive_name) try: filenames = [tarinfo.name for tarinfo in archive] finally: archive.close() assert sorted(filenames) == [ - 'fake-1', - 'fake-1/PKG-INFO', - 'fake-1/README.manual', + 'ns_fake_pkg-1', + 'ns_fake_pkg-1/PKG-INFO', + 'ns_fake_pkg-1/README.manual', ] @pytest.mark.usefixtures('needs_zlib') @@ -449,7 +449,7 @@ def test_make_distribution_owner_group(self): cmd.run() # making sure we have the good rights - archive_name = join(self.tmp_dir, 'dist', 'fake-1.tar.gz') + archive_name = join(self.tmp_dir, 'dist', 'ns_fake_pkg-1.tar.gz') archive = tarfile.open(archive_name) try: for member in archive.getmembers(): @@ -467,7 +467,7 @@ def test_make_distribution_owner_group(self): cmd.run() # making sure we have the good rights - archive_name = join(self.tmp_dir, 'dist', 'fake-1.tar.gz') + archive_name = join(self.tmp_dir, 'dist', 'ns_fake_pkg-1.tar.gz') archive = tarfile.open(archive_name) # note that we are not testing the group ownership here From 6e74c881b0a71a06620e7e112ae0f17973e348f6 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Thu, 11 Apr 2024 20:24:09 -0400 Subject: [PATCH 0480/1761] Move implementation to monkey.patch. --- setuptools/_core_metadata.py | 9 +++++++++ setuptools/dist.py | 25 +------------------------ setuptools/monkey.py | 1 + 3 files changed, 11 insertions(+), 24 deletions(-) diff --git a/setuptools/_core_metadata.py b/setuptools/_core_metadata.py index 5dd97c7719..d8732c49bb 100644 --- a/setuptools/_core_metadata.py +++ b/setuptools/_core_metadata.py @@ -17,6 +17,7 @@ from . import _normalization, _reqs from .extern.packaging.markers import Marker from .extern.packaging.requirements import Requirement +from .extern.packaging.utils import canonicalize_name, canonicalize_version from .extern.packaging.version import Version from .warnings import SetuptoolsDeprecationWarning @@ -257,3 +258,11 @@ def _write_provides_extra(file, processed_extras, safe, unsafe): else: processed_extras[safe] = unsafe file.write(f"Provides-Extra: {safe}\n") + + +# from pypa/distutils#244; needed only until that logic is always available +def get_fullname(self): + return "{}-{}".format( + canonicalize_name(self.get_name()).replace('-', '_'), + canonicalize_version(self.get_version()), + ) diff --git a/setuptools/dist.py b/setuptools/dist.py index 202430fb69..6350e38100 100644 --- a/setuptools/dist.py +++ b/setuptools/dist.py @@ -7,7 +7,6 @@ import os import re import sys -import contextlib from contextlib import suppress from glob import iglob from pathlib import Path @@ -27,7 +26,6 @@ from .extern.ordered_set import OrderedSet from .extern.packaging.markers import InvalidMarker, Marker from .extern.packaging.specifiers import InvalidSpecifier, SpecifierSet -from .extern.packaging.utils import canonicalize_name, canonicalize_version from .extern.packaging.version import Version from . import _entry_points @@ -966,28 +964,7 @@ def run_command(self, command): # Postpone defaults until all explicit configuration is considered # (setup() args, config files, command line and plugins) - with self._override_get_fullname(): - super().run_command(command) - - @contextlib.contextmanager - def _override_get_fullname(self): - def _get_fullname_canonicalized(self): - return "{}-{}".format( - canonicalize_name(self.get_name()).replace('-', '_'), - canonicalize_version(self.get_version()), - ) - - class NoValue: - pass - - orig_val = getattr(self, 'get_fullname', NoValue) - self.get_fullname = _get_fullname_canonicalized.__get__(self) - - try: - yield - finally: - if orig_val is not NoValue: - self.get_fullname = orig_val + super().run_command(command) class DistDeprecationWarning(SetuptoolsDeprecationWarning): diff --git a/setuptools/monkey.py b/setuptools/monkey.py index fd07d91dec..1f8d8ffe0f 100644 --- a/setuptools/monkey.py +++ b/setuptools/monkey.py @@ -95,6 +95,7 @@ def _patch_distribution_metadata(): 'write_pkg_file', 'read_pkg_file', 'get_metadata_version', + 'get_fullname', ): new_val = getattr(_core_metadata, attr) setattr(distutils.dist.DistributionMetadata, attr, new_val) From 11e8f76e422a8d21170a6aae65bde409fc8e0901 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Thu, 11 Apr 2024 19:06:14 -0400 Subject: [PATCH 0481/1761] Add packaging as a vendored package. --- .coveragerc | 1 + conftest.py | 5 + distutils/_vendor/__init__.py | 0 .../packaging-24.0.dist-info/INSTALLER | 1 + .../_vendor/packaging-24.0.dist-info/LICENSE | 3 + .../packaging-24.0.dist-info/LICENSE.APACHE | 177 +++ .../packaging-24.0.dist-info/LICENSE.BSD | 23 + .../_vendor/packaging-24.0.dist-info/METADATA | 102 ++ .../_vendor/packaging-24.0.dist-info/RECORD | 37 + .../packaging-24.0.dist-info/REQUESTED | 0 .../_vendor/packaging-24.0.dist-info/WHEEL | 4 + distutils/_vendor/packaging/__init__.py | 15 + distutils/_vendor/packaging/_elffile.py | 108 ++ distutils/_vendor/packaging/_manylinux.py | 260 +++++ distutils/_vendor/packaging/_musllinux.py | 83 ++ distutils/_vendor/packaging/_parser.py | 356 ++++++ distutils/_vendor/packaging/_structures.py | 61 + distutils/_vendor/packaging/_tokenizer.py | 192 ++++ distutils/_vendor/packaging/markers.py | 252 ++++ distutils/_vendor/packaging/metadata.py | 825 +++++++++++++ distutils/_vendor/packaging/py.typed | 0 distutils/_vendor/packaging/requirements.py | 90 ++ distutils/_vendor/packaging/specifiers.py | 1017 +++++++++++++++++ distutils/_vendor/packaging/tags.py | 571 +++++++++ distutils/_vendor/packaging/utils.py | 172 +++ distutils/_vendor/packaging/version.py | 563 +++++++++ distutils/_vendor/ruff.toml | 1 + 27 files changed, 4919 insertions(+) create mode 100644 distutils/_vendor/__init__.py create mode 100644 distutils/_vendor/packaging-24.0.dist-info/INSTALLER create mode 100644 distutils/_vendor/packaging-24.0.dist-info/LICENSE create mode 100644 distutils/_vendor/packaging-24.0.dist-info/LICENSE.APACHE create mode 100644 distutils/_vendor/packaging-24.0.dist-info/LICENSE.BSD create mode 100644 distutils/_vendor/packaging-24.0.dist-info/METADATA create mode 100644 distutils/_vendor/packaging-24.0.dist-info/RECORD create mode 100644 distutils/_vendor/packaging-24.0.dist-info/REQUESTED create mode 100644 distutils/_vendor/packaging-24.0.dist-info/WHEEL create mode 100644 distutils/_vendor/packaging/__init__.py create mode 100644 distutils/_vendor/packaging/_elffile.py create mode 100644 distutils/_vendor/packaging/_manylinux.py create mode 100644 distutils/_vendor/packaging/_musllinux.py create mode 100644 distutils/_vendor/packaging/_parser.py create mode 100644 distutils/_vendor/packaging/_structures.py create mode 100644 distutils/_vendor/packaging/_tokenizer.py create mode 100644 distutils/_vendor/packaging/markers.py create mode 100644 distutils/_vendor/packaging/metadata.py create mode 100644 distutils/_vendor/packaging/py.typed create mode 100644 distutils/_vendor/packaging/requirements.py create mode 100644 distutils/_vendor/packaging/specifiers.py create mode 100644 distutils/_vendor/packaging/tags.py create mode 100644 distutils/_vendor/packaging/utils.py create mode 100644 distutils/_vendor/packaging/version.py create mode 100644 distutils/_vendor/ruff.toml diff --git a/.coveragerc b/.coveragerc index 35b98b1df9..654e4c4ff4 100644 --- a/.coveragerc +++ b/.coveragerc @@ -2,6 +2,7 @@ omit = # leading `*/` for pytest-dev/pytest-cov#456 */.tox/* + */distutils/_vendor/* disable_warnings = couldnt-parse diff --git a/conftest.py b/conftest.py index 06ce3bc6c8..8e05649fce 100644 --- a/conftest.py +++ b/conftest.py @@ -17,6 +17,11 @@ ]) +collect_ignore_glob = [ + 'distutils/_vendor/**/*', +] + + @pytest.fixture def save_env(): orig = os.environ.copy() diff --git a/distutils/_vendor/__init__.py b/distutils/_vendor/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/distutils/_vendor/packaging-24.0.dist-info/INSTALLER b/distutils/_vendor/packaging-24.0.dist-info/INSTALLER new file mode 100644 index 0000000000..a1b589e38a --- /dev/null +++ b/distutils/_vendor/packaging-24.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/distutils/_vendor/packaging-24.0.dist-info/LICENSE b/distutils/_vendor/packaging-24.0.dist-info/LICENSE new file mode 100644 index 0000000000..6f62d44e4e --- /dev/null +++ b/distutils/_vendor/packaging-24.0.dist-info/LICENSE @@ -0,0 +1,3 @@ +This software is made available under the terms of *either* of the licenses +found in LICENSE.APACHE or LICENSE.BSD. Contributions to this software is made +under the terms of *both* these licenses. diff --git a/distutils/_vendor/packaging-24.0.dist-info/LICENSE.APACHE b/distutils/_vendor/packaging-24.0.dist-info/LICENSE.APACHE new file mode 100644 index 0000000000..f433b1a53f --- /dev/null +++ b/distutils/_vendor/packaging-24.0.dist-info/LICENSE.APACHE @@ -0,0 +1,177 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS diff --git a/distutils/_vendor/packaging-24.0.dist-info/LICENSE.BSD b/distutils/_vendor/packaging-24.0.dist-info/LICENSE.BSD new file mode 100644 index 0000000000..42ce7b75c9 --- /dev/null +++ b/distutils/_vendor/packaging-24.0.dist-info/LICENSE.BSD @@ -0,0 +1,23 @@ +Copyright (c) Donald Stufft and individual contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/distutils/_vendor/packaging-24.0.dist-info/METADATA b/distutils/_vendor/packaging-24.0.dist-info/METADATA new file mode 100644 index 0000000000..10ab4390a9 --- /dev/null +++ b/distutils/_vendor/packaging-24.0.dist-info/METADATA @@ -0,0 +1,102 @@ +Metadata-Version: 2.1 +Name: packaging +Version: 24.0 +Summary: Core utilities for Python packages +Author-email: Donald Stufft +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: License :: OSI Approved :: BSD License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Typing :: Typed +Project-URL: Documentation, https://packaging.pypa.io/ +Project-URL: Source, https://github.com/pypa/packaging + +packaging +========= + +.. start-intro + +Reusable core utilities for various Python Packaging +`interoperability specifications `_. + +This library provides utilities that implement the interoperability +specifications which have clearly one correct behaviour (eg: :pep:`440`) +or benefit greatly from having a single shared implementation (eg: :pep:`425`). + +.. end-intro + +The ``packaging`` project includes the following: version handling, specifiers, +markers, requirements, tags, utilities. + +Documentation +------------- + +The `documentation`_ provides information and the API for the following: + +- Version Handling +- Specifiers +- Markers +- Requirements +- Tags +- Utilities + +Installation +------------ + +Use ``pip`` to install these utilities:: + + pip install packaging + +The ``packaging`` library uses calendar-based versioning (``YY.N``). + +Discussion +---------- + +If you run into bugs, you can file them in our `issue tracker`_. + +You can also join ``#pypa`` on Freenode to ask questions or get involved. + + +.. _`documentation`: https://packaging.pypa.io/ +.. _`issue tracker`: https://github.com/pypa/packaging/issues + + +Code of Conduct +--------------- + +Everyone interacting in the packaging project's codebases, issue trackers, chat +rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_. + +.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md + +Contributing +------------ + +The ``CONTRIBUTING.rst`` file outlines how to contribute to this project as +well as how to report a potential security issue. The documentation for this +project also covers information about `project development`_ and `security`_. + +.. _`project development`: https://packaging.pypa.io/en/latest/development/ +.. _`security`: https://packaging.pypa.io/en/latest/security/ + +Project History +--------------- + +Please review the ``CHANGELOG.rst`` file or the `Changelog documentation`_ for +recent changes and project history. + +.. _`Changelog documentation`: https://packaging.pypa.io/en/latest/changelog/ + diff --git a/distutils/_vendor/packaging-24.0.dist-info/RECORD b/distutils/_vendor/packaging-24.0.dist-info/RECORD new file mode 100644 index 0000000000..bcf796c2f4 --- /dev/null +++ b/distutils/_vendor/packaging-24.0.dist-info/RECORD @@ -0,0 +1,37 @@ +packaging-24.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +packaging-24.0.dist-info/LICENSE,sha256=ytHvW9NA1z4HS6YU0m996spceUDD2MNIUuZcSQlobEg,197 +packaging-24.0.dist-info/LICENSE.APACHE,sha256=DVQuDIgE45qn836wDaWnYhSdxoLXgpRRKH4RuTjpRZQ,10174 +packaging-24.0.dist-info/LICENSE.BSD,sha256=tw5-m3QvHMb5SLNMFqo5_-zpQZY2S8iP8NIYDwAo-sU,1344 +packaging-24.0.dist-info/METADATA,sha256=0dESdhY_wHValuOrbgdebiEw04EbX4dkujlxPdEsFus,3203 +packaging-24.0.dist-info/RECORD,, +packaging-24.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +packaging-24.0.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81 +packaging/__init__.py,sha256=UzotcV07p8vcJzd80S-W0srhgY8NMVD_XvJcZ7JN-tA,496 +packaging/__pycache__/__init__.cpython-312.pyc,, +packaging/__pycache__/_elffile.cpython-312.pyc,, +packaging/__pycache__/_manylinux.cpython-312.pyc,, +packaging/__pycache__/_musllinux.cpython-312.pyc,, +packaging/__pycache__/_parser.cpython-312.pyc,, +packaging/__pycache__/_structures.cpython-312.pyc,, +packaging/__pycache__/_tokenizer.cpython-312.pyc,, +packaging/__pycache__/markers.cpython-312.pyc,, +packaging/__pycache__/metadata.cpython-312.pyc,, +packaging/__pycache__/requirements.cpython-312.pyc,, +packaging/__pycache__/specifiers.cpython-312.pyc,, +packaging/__pycache__/tags.cpython-312.pyc,, +packaging/__pycache__/utils.cpython-312.pyc,, +packaging/__pycache__/version.cpython-312.pyc,, +packaging/_elffile.py,sha256=hbmK8OD6Z7fY6hwinHEUcD1by7czkGiNYu7ShnFEk2k,3266 +packaging/_manylinux.py,sha256=1ng_TqyH49hY6s3W_zVHyoJIaogbJqbIF1jJ0fAehc4,9590 +packaging/_musllinux.py,sha256=kgmBGLFybpy8609-KTvzmt2zChCPWYvhp5BWP4JX7dE,2676 +packaging/_parser.py,sha256=zlsFB1FpMRjkUdQb6WLq7xON52ruQadxFpYsDXWhLb4,10347 +packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431 +packaging/_tokenizer.py,sha256=alCtbwXhOFAmFGZ6BQ-wCTSFoRAJ2z-ysIf7__MTJ_k,5292 +packaging/markers.py,sha256=eH-txS2zq1HdNpTd9LcZUcVIwewAiNU0grmq5wjKnOk,8208 +packaging/metadata.py,sha256=w7jPEg6mDf1FTZMn79aFxFuk4SKtynUJtxr2InTxlV4,33036 +packaging/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +packaging/requirements.py,sha256=dgoBeVprPu2YE6Q8nGfwOPTjATHbRa_ZGLyXhFEln6Q,2933 +packaging/specifiers.py,sha256=dB2DwbmvSbEuVilEyiIQ382YfW5JfwzXTfRRPVtaENY,39784 +packaging/tags.py,sha256=fedHXiOHkBxNZTXotXv8uXPmMFU9ae-TKBujgYHigcA,18950 +packaging/utils.py,sha256=XgdmP3yx9-wQEFjO7OvMj9RjEf5JlR5HFFR69v7SQ9E,5268 +packaging/version.py,sha256=XjRBLNK17UMDgLeP8UHnqwiY3TdSi03xFQURtec211A,16236 diff --git a/distutils/_vendor/packaging-24.0.dist-info/REQUESTED b/distutils/_vendor/packaging-24.0.dist-info/REQUESTED new file mode 100644 index 0000000000..e69de29bb2 diff --git a/distutils/_vendor/packaging-24.0.dist-info/WHEEL b/distutils/_vendor/packaging-24.0.dist-info/WHEEL new file mode 100644 index 0000000000..3b5e64b5e6 --- /dev/null +++ b/distutils/_vendor/packaging-24.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.9.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/distutils/_vendor/packaging/__init__.py b/distutils/_vendor/packaging/__init__.py new file mode 100644 index 0000000000..e7c0aa12ca --- /dev/null +++ b/distutils/_vendor/packaging/__init__.py @@ -0,0 +1,15 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +__title__ = "packaging" +__summary__ = "Core utilities for Python packages" +__uri__ = "https://github.com/pypa/packaging" + +__version__ = "24.0" + +__author__ = "Donald Stufft and individual contributors" +__email__ = "donald@stufft.io" + +__license__ = "BSD-2-Clause or Apache-2.0" +__copyright__ = "2014 %s" % __author__ diff --git a/distutils/_vendor/packaging/_elffile.py b/distutils/_vendor/packaging/_elffile.py new file mode 100644 index 0000000000..6fb19b30bb --- /dev/null +++ b/distutils/_vendor/packaging/_elffile.py @@ -0,0 +1,108 @@ +""" +ELF file parser. + +This provides a class ``ELFFile`` that parses an ELF executable in a similar +interface to ``ZipFile``. Only the read interface is implemented. + +Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca +ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html +""" + +import enum +import os +import struct +from typing import IO, Optional, Tuple + + +class ELFInvalid(ValueError): + pass + + +class EIClass(enum.IntEnum): + C32 = 1 + C64 = 2 + + +class EIData(enum.IntEnum): + Lsb = 1 + Msb = 2 + + +class EMachine(enum.IntEnum): + I386 = 3 + S390 = 22 + Arm = 40 + X8664 = 62 + AArc64 = 183 + + +class ELFFile: + """ + Representation of an ELF executable. + """ + + def __init__(self, f: IO[bytes]) -> None: + self._f = f + + try: + ident = self._read("16B") + except struct.error: + raise ELFInvalid("unable to parse identification") + magic = bytes(ident[:4]) + if magic != b"\x7fELF": + raise ELFInvalid(f"invalid magic: {magic!r}") + + self.capacity = ident[4] # Format for program header (bitness). + self.encoding = ident[5] # Data structure encoding (endianness). + + try: + # e_fmt: Format for program header. + # p_fmt: Format for section header. + # p_idx: Indexes to find p_type, p_offset, and p_filesz. + e_fmt, self._p_fmt, self._p_idx = { + (1, 1): ("HHIIIIIHHH", ">IIIIIIII", (0, 1, 4)), # 32-bit MSB. + (2, 1): ("HHIQQQIHHH", ">IIQQQQQQ", (0, 2, 5)), # 64-bit MSB. + }[(self.capacity, self.encoding)] + except KeyError: + raise ELFInvalid( + f"unrecognized capacity ({self.capacity}) or " + f"encoding ({self.encoding})" + ) + + try: + ( + _, + self.machine, # Architecture type. + _, + _, + self._e_phoff, # Offset of program header. + _, + self.flags, # Processor-specific flags. + _, + self._e_phentsize, # Size of section. + self._e_phnum, # Number of sections. + ) = self._read(e_fmt) + except struct.error as e: + raise ELFInvalid("unable to parse machine and section information") from e + + def _read(self, fmt: str) -> Tuple[int, ...]: + return struct.unpack(fmt, self._f.read(struct.calcsize(fmt))) + + @property + def interpreter(self) -> Optional[str]: + """ + The path recorded in the ``PT_INTERP`` section header. + """ + for index in range(self._e_phnum): + self._f.seek(self._e_phoff + self._e_phentsize * index) + try: + data = self._read(self._p_fmt) + except struct.error: + continue + if data[self._p_idx[0]] != 3: # Not PT_INTERP. + continue + self._f.seek(data[self._p_idx[1]]) + return os.fsdecode(self._f.read(data[self._p_idx[2]])).strip("\0") + return None diff --git a/distutils/_vendor/packaging/_manylinux.py b/distutils/_vendor/packaging/_manylinux.py new file mode 100644 index 0000000000..ad62505f3f --- /dev/null +++ b/distutils/_vendor/packaging/_manylinux.py @@ -0,0 +1,260 @@ +import collections +import contextlib +import functools +import os +import re +import sys +import warnings +from typing import Dict, Generator, Iterator, NamedTuple, Optional, Sequence, Tuple + +from ._elffile import EIClass, EIData, ELFFile, EMachine + +EF_ARM_ABIMASK = 0xFF000000 +EF_ARM_ABI_VER5 = 0x05000000 +EF_ARM_ABI_FLOAT_HARD = 0x00000400 + + +# `os.PathLike` not a generic type until Python 3.9, so sticking with `str` +# as the type for `path` until then. +@contextlib.contextmanager +def _parse_elf(path: str) -> Generator[Optional[ELFFile], None, None]: + try: + with open(path, "rb") as f: + yield ELFFile(f) + except (OSError, TypeError, ValueError): + yield None + + +def _is_linux_armhf(executable: str) -> bool: + # hard-float ABI can be detected from the ELF header of the running + # process + # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf + with _parse_elf(executable) as f: + return ( + f is not None + and f.capacity == EIClass.C32 + and f.encoding == EIData.Lsb + and f.machine == EMachine.Arm + and f.flags & EF_ARM_ABIMASK == EF_ARM_ABI_VER5 + and f.flags & EF_ARM_ABI_FLOAT_HARD == EF_ARM_ABI_FLOAT_HARD + ) + + +def _is_linux_i686(executable: str) -> bool: + with _parse_elf(executable) as f: + return ( + f is not None + and f.capacity == EIClass.C32 + and f.encoding == EIData.Lsb + and f.machine == EMachine.I386 + ) + + +def _have_compatible_abi(executable: str, archs: Sequence[str]) -> bool: + if "armv7l" in archs: + return _is_linux_armhf(executable) + if "i686" in archs: + return _is_linux_i686(executable) + allowed_archs = { + "x86_64", + "aarch64", + "ppc64", + "ppc64le", + "s390x", + "loongarch64", + "riscv64", + } + return any(arch in allowed_archs for arch in archs) + + +# If glibc ever changes its major version, we need to know what the last +# minor version was, so we can build the complete list of all versions. +# For now, guess what the highest minor version might be, assume it will +# be 50 for testing. Once this actually happens, update the dictionary +# with the actual value. +_LAST_GLIBC_MINOR: Dict[int, int] = collections.defaultdict(lambda: 50) + + +class _GLibCVersion(NamedTuple): + major: int + minor: int + + +def _glibc_version_string_confstr() -> Optional[str]: + """ + Primary implementation of glibc_version_string using os.confstr. + """ + # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely + # to be broken or missing. This strategy is used in the standard library + # platform module. + # https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183 + try: + # Should be a string like "glibc 2.17". + version_string: Optional[str] = os.confstr("CS_GNU_LIBC_VERSION") + assert version_string is not None + _, version = version_string.rsplit() + except (AssertionError, AttributeError, OSError, ValueError): + # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... + return None + return version + + +def _glibc_version_string_ctypes() -> Optional[str]: + """ + Fallback implementation of glibc_version_string using ctypes. + """ + try: + import ctypes + except ImportError: + return None + + # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen + # manpage says, "If filename is NULL, then the returned handle is for the + # main program". This way we can let the linker do the work to figure out + # which libc our process is actually using. + # + # We must also handle the special case where the executable is not a + # dynamically linked executable. This can occur when using musl libc, + # for example. In this situation, dlopen() will error, leading to an + # OSError. Interestingly, at least in the case of musl, there is no + # errno set on the OSError. The single string argument used to construct + # OSError comes from libc itself and is therefore not portable to + # hard code here. In any case, failure to call dlopen() means we + # can proceed, so we bail on our attempt. + try: + process_namespace = ctypes.CDLL(None) + except OSError: + return None + + try: + gnu_get_libc_version = process_namespace.gnu_get_libc_version + except AttributeError: + # Symbol doesn't exist -> therefore, we are not linked to + # glibc. + return None + + # Call gnu_get_libc_version, which returns a string like "2.5" + gnu_get_libc_version.restype = ctypes.c_char_p + version_str: str = gnu_get_libc_version() + # py2 / py3 compatibility: + if not isinstance(version_str, str): + version_str = version_str.decode("ascii") + + return version_str + + +def _glibc_version_string() -> Optional[str]: + """Returns glibc version string, or None if not using glibc.""" + return _glibc_version_string_confstr() or _glibc_version_string_ctypes() + + +def _parse_glibc_version(version_str: str) -> Tuple[int, int]: + """Parse glibc version. + + We use a regexp instead of str.split because we want to discard any + random junk that might come after the minor version -- this might happen + in patched/forked versions of glibc (e.g. Linaro's version of glibc + uses version strings like "2.20-2014.11"). See gh-3588. + """ + m = re.match(r"(?P[0-9]+)\.(?P[0-9]+)", version_str) + if not m: + warnings.warn( + f"Expected glibc version with 2 components major.minor," + f" got: {version_str}", + RuntimeWarning, + ) + return -1, -1 + return int(m.group("major")), int(m.group("minor")) + + +@functools.lru_cache() +def _get_glibc_version() -> Tuple[int, int]: + version_str = _glibc_version_string() + if version_str is None: + return (-1, -1) + return _parse_glibc_version(version_str) + + +# From PEP 513, PEP 600 +def _is_compatible(arch: str, version: _GLibCVersion) -> bool: + sys_glibc = _get_glibc_version() + if sys_glibc < version: + return False + # Check for presence of _manylinux module. + try: + import _manylinux + except ImportError: + return True + if hasattr(_manylinux, "manylinux_compatible"): + result = _manylinux.manylinux_compatible(version[0], version[1], arch) + if result is not None: + return bool(result) + return True + if version == _GLibCVersion(2, 5): + if hasattr(_manylinux, "manylinux1_compatible"): + return bool(_manylinux.manylinux1_compatible) + if version == _GLibCVersion(2, 12): + if hasattr(_manylinux, "manylinux2010_compatible"): + return bool(_manylinux.manylinux2010_compatible) + if version == _GLibCVersion(2, 17): + if hasattr(_manylinux, "manylinux2014_compatible"): + return bool(_manylinux.manylinux2014_compatible) + return True + + +_LEGACY_MANYLINUX_MAP = { + # CentOS 7 w/ glibc 2.17 (PEP 599) + (2, 17): "manylinux2014", + # CentOS 6 w/ glibc 2.12 (PEP 571) + (2, 12): "manylinux2010", + # CentOS 5 w/ glibc 2.5 (PEP 513) + (2, 5): "manylinux1", +} + + +def platform_tags(archs: Sequence[str]) -> Iterator[str]: + """Generate manylinux tags compatible to the current platform. + + :param archs: Sequence of compatible architectures. + The first one shall be the closest to the actual architecture and be the part of + platform tag after the ``linux_`` prefix, e.g. ``x86_64``. + The ``linux_`` prefix is assumed as a prerequisite for the current platform to + be manylinux-compatible. + + :returns: An iterator of compatible manylinux tags. + """ + if not _have_compatible_abi(sys.executable, archs): + return + # Oldest glibc to be supported regardless of architecture is (2, 17). + too_old_glibc2 = _GLibCVersion(2, 16) + if set(archs) & {"x86_64", "i686"}: + # On x86/i686 also oldest glibc to be supported is (2, 5). + too_old_glibc2 = _GLibCVersion(2, 4) + current_glibc = _GLibCVersion(*_get_glibc_version()) + glibc_max_list = [current_glibc] + # We can assume compatibility across glibc major versions. + # https://sourceware.org/bugzilla/show_bug.cgi?id=24636 + # + # Build a list of maximum glibc versions so that we can + # output the canonical list of all glibc from current_glibc + # down to too_old_glibc2, including all intermediary versions. + for glibc_major in range(current_glibc.major - 1, 1, -1): + glibc_minor = _LAST_GLIBC_MINOR[glibc_major] + glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor)) + for arch in archs: + for glibc_max in glibc_max_list: + if glibc_max.major == too_old_glibc2.major: + min_minor = too_old_glibc2.minor + else: + # For other glibc major versions oldest supported is (x, 0). + min_minor = -1 + for glibc_minor in range(glibc_max.minor, min_minor, -1): + glibc_version = _GLibCVersion(glibc_max.major, glibc_minor) + tag = "manylinux_{}_{}".format(*glibc_version) + if _is_compatible(arch, glibc_version): + yield f"{tag}_{arch}" + # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags. + if glibc_version in _LEGACY_MANYLINUX_MAP: + legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version] + if _is_compatible(arch, glibc_version): + yield f"{legacy_tag}_{arch}" diff --git a/distutils/_vendor/packaging/_musllinux.py b/distutils/_vendor/packaging/_musllinux.py new file mode 100644 index 0000000000..86419df9d7 --- /dev/null +++ b/distutils/_vendor/packaging/_musllinux.py @@ -0,0 +1,83 @@ +"""PEP 656 support. + +This module implements logic to detect if the currently running Python is +linked against musl, and what musl version is used. +""" + +import functools +import re +import subprocess +import sys +from typing import Iterator, NamedTuple, Optional, Sequence + +from ._elffile import ELFFile + + +class _MuslVersion(NamedTuple): + major: int + minor: int + + +def _parse_musl_version(output: str) -> Optional[_MuslVersion]: + lines = [n for n in (n.strip() for n in output.splitlines()) if n] + if len(lines) < 2 or lines[0][:4] != "musl": + return None + m = re.match(r"Version (\d+)\.(\d+)", lines[1]) + if not m: + return None + return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2))) + + +@functools.lru_cache() +def _get_musl_version(executable: str) -> Optional[_MuslVersion]: + """Detect currently-running musl runtime version. + + This is done by checking the specified executable's dynamic linking + information, and invoking the loader to parse its output for a version + string. If the loader is musl, the output would be something like:: + + musl libc (x86_64) + Version 1.2.2 + Dynamic Program Loader + """ + try: + with open(executable, "rb") as f: + ld = ELFFile(f).interpreter + except (OSError, TypeError, ValueError): + return None + if ld is None or "musl" not in ld: + return None + proc = subprocess.run([ld], stderr=subprocess.PIPE, text=True) + return _parse_musl_version(proc.stderr) + + +def platform_tags(archs: Sequence[str]) -> Iterator[str]: + """Generate musllinux tags compatible to the current platform. + + :param archs: Sequence of compatible architectures. + The first one shall be the closest to the actual architecture and be the part of + platform tag after the ``linux_`` prefix, e.g. ``x86_64``. + The ``linux_`` prefix is assumed as a prerequisite for the current platform to + be musllinux-compatible. + + :returns: An iterator of compatible musllinux tags. + """ + sys_musl = _get_musl_version(sys.executable) + if sys_musl is None: # Python not dynamically linked against musl. + return + for arch in archs: + for minor in range(sys_musl.minor, -1, -1): + yield f"musllinux_{sys_musl.major}_{minor}_{arch}" + + +if __name__ == "__main__": # pragma: no cover + import sysconfig + + plat = sysconfig.get_platform() + assert plat.startswith("linux-"), "not linux" + + print("plat:", plat) + print("musl:", _get_musl_version(sys.executable)) + print("tags:", end=" ") + for t in platform_tags(re.sub(r"[.-]", "_", plat.split("-", 1)[-1])): + print(t, end="\n ") diff --git a/distutils/_vendor/packaging/_parser.py b/distutils/_vendor/packaging/_parser.py new file mode 100644 index 0000000000..684df75457 --- /dev/null +++ b/distutils/_vendor/packaging/_parser.py @@ -0,0 +1,356 @@ +"""Handwritten parser of dependency specifiers. + +The docstring for each __parse_* function contains ENBF-inspired grammar representing +the implementation. +""" + +import ast +from typing import Any, List, NamedTuple, Optional, Tuple, Union + +from ._tokenizer import DEFAULT_RULES, Tokenizer + + +class Node: + def __init__(self, value: str) -> None: + self.value = value + + def __str__(self) -> str: + return self.value + + def __repr__(self) -> str: + return f"<{self.__class__.__name__}('{self}')>" + + def serialize(self) -> str: + raise NotImplementedError + + +class Variable(Node): + def serialize(self) -> str: + return str(self) + + +class Value(Node): + def serialize(self) -> str: + return f'"{self}"' + + +class Op(Node): + def serialize(self) -> str: + return str(self) + + +MarkerVar = Union[Variable, Value] +MarkerItem = Tuple[MarkerVar, Op, MarkerVar] +# MarkerAtom = Union[MarkerItem, List["MarkerAtom"]] +# MarkerList = List[Union["MarkerList", MarkerAtom, str]] +# mypy does not support recursive type definition +# https://github.com/python/mypy/issues/731 +MarkerAtom = Any +MarkerList = List[Any] + + +class ParsedRequirement(NamedTuple): + name: str + url: str + extras: List[str] + specifier: str + marker: Optional[MarkerList] + + +# -------------------------------------------------------------------------------------- +# Recursive descent parser for dependency specifier +# -------------------------------------------------------------------------------------- +def parse_requirement(source: str) -> ParsedRequirement: + return _parse_requirement(Tokenizer(source, rules=DEFAULT_RULES)) + + +def _parse_requirement(tokenizer: Tokenizer) -> ParsedRequirement: + """ + requirement = WS? IDENTIFIER WS? extras WS? requirement_details + """ + tokenizer.consume("WS") + + name_token = tokenizer.expect( + "IDENTIFIER", expected="package name at the start of dependency specifier" + ) + name = name_token.text + tokenizer.consume("WS") + + extras = _parse_extras(tokenizer) + tokenizer.consume("WS") + + url, specifier, marker = _parse_requirement_details(tokenizer) + tokenizer.expect("END", expected="end of dependency specifier") + + return ParsedRequirement(name, url, extras, specifier, marker) + + +def _parse_requirement_details( + tokenizer: Tokenizer, +) -> Tuple[str, str, Optional[MarkerList]]: + """ + requirement_details = AT URL (WS requirement_marker?)? + | specifier WS? (requirement_marker)? + """ + + specifier = "" + url = "" + marker = None + + if tokenizer.check("AT"): + tokenizer.read() + tokenizer.consume("WS") + + url_start = tokenizer.position + url = tokenizer.expect("URL", expected="URL after @").text + if tokenizer.check("END", peek=True): + return (url, specifier, marker) + + tokenizer.expect("WS", expected="whitespace after URL") + + # The input might end after whitespace. + if tokenizer.check("END", peek=True): + return (url, specifier, marker) + + marker = _parse_requirement_marker( + tokenizer, span_start=url_start, after="URL and whitespace" + ) + else: + specifier_start = tokenizer.position + specifier = _parse_specifier(tokenizer) + tokenizer.consume("WS") + + if tokenizer.check("END", peek=True): + return (url, specifier, marker) + + marker = _parse_requirement_marker( + tokenizer, + span_start=specifier_start, + after=( + "version specifier" + if specifier + else "name and no valid version specifier" + ), + ) + + return (url, specifier, marker) + + +def _parse_requirement_marker( + tokenizer: Tokenizer, *, span_start: int, after: str +) -> MarkerList: + """ + requirement_marker = SEMICOLON marker WS? + """ + + if not tokenizer.check("SEMICOLON"): + tokenizer.raise_syntax_error( + f"Expected end or semicolon (after {after})", + span_start=span_start, + ) + tokenizer.read() + + marker = _parse_marker(tokenizer) + tokenizer.consume("WS") + + return marker + + +def _parse_extras(tokenizer: Tokenizer) -> List[str]: + """ + extras = (LEFT_BRACKET wsp* extras_list? wsp* RIGHT_BRACKET)? + """ + if not tokenizer.check("LEFT_BRACKET", peek=True): + return [] + + with tokenizer.enclosing_tokens( + "LEFT_BRACKET", + "RIGHT_BRACKET", + around="extras", + ): + tokenizer.consume("WS") + extras = _parse_extras_list(tokenizer) + tokenizer.consume("WS") + + return extras + + +def _parse_extras_list(tokenizer: Tokenizer) -> List[str]: + """ + extras_list = identifier (wsp* ',' wsp* identifier)* + """ + extras: List[str] = [] + + if not tokenizer.check("IDENTIFIER"): + return extras + + extras.append(tokenizer.read().text) + + while True: + tokenizer.consume("WS") + if tokenizer.check("IDENTIFIER", peek=True): + tokenizer.raise_syntax_error("Expected comma between extra names") + elif not tokenizer.check("COMMA"): + break + + tokenizer.read() + tokenizer.consume("WS") + + extra_token = tokenizer.expect("IDENTIFIER", expected="extra name after comma") + extras.append(extra_token.text) + + return extras + + +def _parse_specifier(tokenizer: Tokenizer) -> str: + """ + specifier = LEFT_PARENTHESIS WS? version_many WS? RIGHT_PARENTHESIS + | WS? version_many WS? + """ + with tokenizer.enclosing_tokens( + "LEFT_PARENTHESIS", + "RIGHT_PARENTHESIS", + around="version specifier", + ): + tokenizer.consume("WS") + parsed_specifiers = _parse_version_many(tokenizer) + tokenizer.consume("WS") + + return parsed_specifiers + + +def _parse_version_many(tokenizer: Tokenizer) -> str: + """ + version_many = (SPECIFIER (WS? COMMA WS? SPECIFIER)*)? + """ + parsed_specifiers = "" + while tokenizer.check("SPECIFIER"): + span_start = tokenizer.position + parsed_specifiers += tokenizer.read().text + if tokenizer.check("VERSION_PREFIX_TRAIL", peek=True): + tokenizer.raise_syntax_error( + ".* suffix can only be used with `==` or `!=` operators", + span_start=span_start, + span_end=tokenizer.position + 1, + ) + if tokenizer.check("VERSION_LOCAL_LABEL_TRAIL", peek=True): + tokenizer.raise_syntax_error( + "Local version label can only be used with `==` or `!=` operators", + span_start=span_start, + span_end=tokenizer.position, + ) + tokenizer.consume("WS") + if not tokenizer.check("COMMA"): + break + parsed_specifiers += tokenizer.read().text + tokenizer.consume("WS") + + return parsed_specifiers + + +# -------------------------------------------------------------------------------------- +# Recursive descent parser for marker expression +# -------------------------------------------------------------------------------------- +def parse_marker(source: str) -> MarkerList: + return _parse_full_marker(Tokenizer(source, rules=DEFAULT_RULES)) + + +def _parse_full_marker(tokenizer: Tokenizer) -> MarkerList: + retval = _parse_marker(tokenizer) + tokenizer.expect("END", expected="end of marker expression") + return retval + + +def _parse_marker(tokenizer: Tokenizer) -> MarkerList: + """ + marker = marker_atom (BOOLOP marker_atom)+ + """ + expression = [_parse_marker_atom(tokenizer)] + while tokenizer.check("BOOLOP"): + token = tokenizer.read() + expr_right = _parse_marker_atom(tokenizer) + expression.extend((token.text, expr_right)) + return expression + + +def _parse_marker_atom(tokenizer: Tokenizer) -> MarkerAtom: + """ + marker_atom = WS? LEFT_PARENTHESIS WS? marker WS? RIGHT_PARENTHESIS WS? + | WS? marker_item WS? + """ + + tokenizer.consume("WS") + if tokenizer.check("LEFT_PARENTHESIS", peek=True): + with tokenizer.enclosing_tokens( + "LEFT_PARENTHESIS", + "RIGHT_PARENTHESIS", + around="marker expression", + ): + tokenizer.consume("WS") + marker: MarkerAtom = _parse_marker(tokenizer) + tokenizer.consume("WS") + else: + marker = _parse_marker_item(tokenizer) + tokenizer.consume("WS") + return marker + + +def _parse_marker_item(tokenizer: Tokenizer) -> MarkerItem: + """ + marker_item = WS? marker_var WS? marker_op WS? marker_var WS? + """ + tokenizer.consume("WS") + marker_var_left = _parse_marker_var(tokenizer) + tokenizer.consume("WS") + marker_op = _parse_marker_op(tokenizer) + tokenizer.consume("WS") + marker_var_right = _parse_marker_var(tokenizer) + tokenizer.consume("WS") + return (marker_var_left, marker_op, marker_var_right) + + +def _parse_marker_var(tokenizer: Tokenizer) -> MarkerVar: + """ + marker_var = VARIABLE | QUOTED_STRING + """ + if tokenizer.check("VARIABLE"): + return process_env_var(tokenizer.read().text.replace(".", "_")) + elif tokenizer.check("QUOTED_STRING"): + return process_python_str(tokenizer.read().text) + else: + tokenizer.raise_syntax_error( + message="Expected a marker variable or quoted string" + ) + + +def process_env_var(env_var: str) -> Variable: + if env_var in ("platform_python_implementation", "python_implementation"): + return Variable("platform_python_implementation") + else: + return Variable(env_var) + + +def process_python_str(python_str: str) -> Value: + value = ast.literal_eval(python_str) + return Value(str(value)) + + +def _parse_marker_op(tokenizer: Tokenizer) -> Op: + """ + marker_op = IN | NOT IN | OP + """ + if tokenizer.check("IN"): + tokenizer.read() + return Op("in") + elif tokenizer.check("NOT"): + tokenizer.read() + tokenizer.expect("WS", expected="whitespace after 'not'") + tokenizer.expect("IN", expected="'in' after 'not'") + return Op("not in") + elif tokenizer.check("OP"): + return Op(tokenizer.read().text) + else: + return tokenizer.raise_syntax_error( + "Expected marker operator, one of " + "<=, <, !=, ==, >=, >, ~=, ===, in, not in" + ) diff --git a/distutils/_vendor/packaging/_structures.py b/distutils/_vendor/packaging/_structures.py new file mode 100644 index 0000000000..90a6465f96 --- /dev/null +++ b/distutils/_vendor/packaging/_structures.py @@ -0,0 +1,61 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +class InfinityType: + def __repr__(self) -> str: + return "Infinity" + + def __hash__(self) -> int: + return hash(repr(self)) + + def __lt__(self, other: object) -> bool: + return False + + def __le__(self, other: object) -> bool: + return False + + def __eq__(self, other: object) -> bool: + return isinstance(other, self.__class__) + + def __gt__(self, other: object) -> bool: + return True + + def __ge__(self, other: object) -> bool: + return True + + def __neg__(self: object) -> "NegativeInfinityType": + return NegativeInfinity + + +Infinity = InfinityType() + + +class NegativeInfinityType: + def __repr__(self) -> str: + return "-Infinity" + + def __hash__(self) -> int: + return hash(repr(self)) + + def __lt__(self, other: object) -> bool: + return True + + def __le__(self, other: object) -> bool: + return True + + def __eq__(self, other: object) -> bool: + return isinstance(other, self.__class__) + + def __gt__(self, other: object) -> bool: + return False + + def __ge__(self, other: object) -> bool: + return False + + def __neg__(self: object) -> InfinityType: + return Infinity + + +NegativeInfinity = NegativeInfinityType() diff --git a/distutils/_vendor/packaging/_tokenizer.py b/distutils/_vendor/packaging/_tokenizer.py new file mode 100644 index 0000000000..dd0d648d49 --- /dev/null +++ b/distutils/_vendor/packaging/_tokenizer.py @@ -0,0 +1,192 @@ +import contextlib +import re +from dataclasses import dataclass +from typing import Dict, Iterator, NoReturn, Optional, Tuple, Union + +from .specifiers import Specifier + + +@dataclass +class Token: + name: str + text: str + position: int + + +class ParserSyntaxError(Exception): + """The provided source text could not be parsed correctly.""" + + def __init__( + self, + message: str, + *, + source: str, + span: Tuple[int, int], + ) -> None: + self.span = span + self.message = message + self.source = source + + super().__init__() + + def __str__(self) -> str: + marker = " " * self.span[0] + "~" * (self.span[1] - self.span[0]) + "^" + return "\n ".join([self.message, self.source, marker]) + + +DEFAULT_RULES: "Dict[str, Union[str, re.Pattern[str]]]" = { + "LEFT_PARENTHESIS": r"\(", + "RIGHT_PARENTHESIS": r"\)", + "LEFT_BRACKET": r"\[", + "RIGHT_BRACKET": r"\]", + "SEMICOLON": r";", + "COMMA": r",", + "QUOTED_STRING": re.compile( + r""" + ( + ('[^']*') + | + ("[^"]*") + ) + """, + re.VERBOSE, + ), + "OP": r"(===|==|~=|!=|<=|>=|<|>)", + "BOOLOP": r"\b(or|and)\b", + "IN": r"\bin\b", + "NOT": r"\bnot\b", + "VARIABLE": re.compile( + r""" + \b( + python_version + |python_full_version + |os[._]name + |sys[._]platform + |platform_(release|system) + |platform[._](version|machine|python_implementation) + |python_implementation + |implementation_(name|version) + |extra + )\b + """, + re.VERBOSE, + ), + "SPECIFIER": re.compile( + Specifier._operator_regex_str + Specifier._version_regex_str, + re.VERBOSE | re.IGNORECASE, + ), + "AT": r"\@", + "URL": r"[^ \t]+", + "IDENTIFIER": r"\b[a-zA-Z0-9][a-zA-Z0-9._-]*\b", + "VERSION_PREFIX_TRAIL": r"\.\*", + "VERSION_LOCAL_LABEL_TRAIL": r"\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*", + "WS": r"[ \t]+", + "END": r"$", +} + + +class Tokenizer: + """Context-sensitive token parsing. + + Provides methods to examine the input stream to check whether the next token + matches. + """ + + def __init__( + self, + source: str, + *, + rules: "Dict[str, Union[str, re.Pattern[str]]]", + ) -> None: + self.source = source + self.rules: Dict[str, re.Pattern[str]] = { + name: re.compile(pattern) for name, pattern in rules.items() + } + self.next_token: Optional[Token] = None + self.position = 0 + + def consume(self, name: str) -> None: + """Move beyond provided token name, if at current position.""" + if self.check(name): + self.read() + + def check(self, name: str, *, peek: bool = False) -> bool: + """Check whether the next token has the provided name. + + By default, if the check succeeds, the token *must* be read before + another check. If `peek` is set to `True`, the token is not loaded and + would need to be checked again. + """ + assert ( + self.next_token is None + ), f"Cannot check for {name!r}, already have {self.next_token!r}" + assert name in self.rules, f"Unknown token name: {name!r}" + + expression = self.rules[name] + + match = expression.match(self.source, self.position) + if match is None: + return False + if not peek: + self.next_token = Token(name, match[0], self.position) + return True + + def expect(self, name: str, *, expected: str) -> Token: + """Expect a certain token name next, failing with a syntax error otherwise. + + The token is *not* read. + """ + if not self.check(name): + raise self.raise_syntax_error(f"Expected {expected}") + return self.read() + + def read(self) -> Token: + """Consume the next token and return it.""" + token = self.next_token + assert token is not None + + self.position += len(token.text) + self.next_token = None + + return token + + def raise_syntax_error( + self, + message: str, + *, + span_start: Optional[int] = None, + span_end: Optional[int] = None, + ) -> NoReturn: + """Raise ParserSyntaxError at the given position.""" + span = ( + self.position if span_start is None else span_start, + self.position if span_end is None else span_end, + ) + raise ParserSyntaxError( + message, + source=self.source, + span=span, + ) + + @contextlib.contextmanager + def enclosing_tokens( + self, open_token: str, close_token: str, *, around: str + ) -> Iterator[None]: + if self.check(open_token): + open_position = self.position + self.read() + else: + open_position = None + + yield + + if open_position is None: + return + + if not self.check(close_token): + self.raise_syntax_error( + f"Expected matching {close_token} for {open_token}, after {around}", + span_start=open_position, + ) + + self.read() diff --git a/distutils/_vendor/packaging/markers.py b/distutils/_vendor/packaging/markers.py new file mode 100644 index 0000000000..8b98fca723 --- /dev/null +++ b/distutils/_vendor/packaging/markers.py @@ -0,0 +1,252 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import operator +import os +import platform +import sys +from typing import Any, Callable, Dict, List, Optional, Tuple, Union + +from ._parser import ( + MarkerAtom, + MarkerList, + Op, + Value, + Variable, + parse_marker as _parse_marker, +) +from ._tokenizer import ParserSyntaxError +from .specifiers import InvalidSpecifier, Specifier +from .utils import canonicalize_name + +__all__ = [ + "InvalidMarker", + "UndefinedComparison", + "UndefinedEnvironmentName", + "Marker", + "default_environment", +] + +Operator = Callable[[str, str], bool] + + +class InvalidMarker(ValueError): + """ + An invalid marker was found, users should refer to PEP 508. + """ + + +class UndefinedComparison(ValueError): + """ + An invalid operation was attempted on a value that doesn't support it. + """ + + +class UndefinedEnvironmentName(ValueError): + """ + A name was attempted to be used that does not exist inside of the + environment. + """ + + +def _normalize_extra_values(results: Any) -> Any: + """ + Normalize extra values. + """ + if isinstance(results[0], tuple): + lhs, op, rhs = results[0] + if isinstance(lhs, Variable) and lhs.value == "extra": + normalized_extra = canonicalize_name(rhs.value) + rhs = Value(normalized_extra) + elif isinstance(rhs, Variable) and rhs.value == "extra": + normalized_extra = canonicalize_name(lhs.value) + lhs = Value(normalized_extra) + results[0] = lhs, op, rhs + return results + + +def _format_marker( + marker: Union[List[str], MarkerAtom, str], first: Optional[bool] = True +) -> str: + + assert isinstance(marker, (list, tuple, str)) + + # Sometimes we have a structure like [[...]] which is a single item list + # where the single item is itself it's own list. In that case we want skip + # the rest of this function so that we don't get extraneous () on the + # outside. + if ( + isinstance(marker, list) + and len(marker) == 1 + and isinstance(marker[0], (list, tuple)) + ): + return _format_marker(marker[0]) + + if isinstance(marker, list): + inner = (_format_marker(m, first=False) for m in marker) + if first: + return " ".join(inner) + else: + return "(" + " ".join(inner) + ")" + elif isinstance(marker, tuple): + return " ".join([m.serialize() for m in marker]) + else: + return marker + + +_operators: Dict[str, Operator] = { + "in": lambda lhs, rhs: lhs in rhs, + "not in": lambda lhs, rhs: lhs not in rhs, + "<": operator.lt, + "<=": operator.le, + "==": operator.eq, + "!=": operator.ne, + ">=": operator.ge, + ">": operator.gt, +} + + +def _eval_op(lhs: str, op: Op, rhs: str) -> bool: + try: + spec = Specifier("".join([op.serialize(), rhs])) + except InvalidSpecifier: + pass + else: + return spec.contains(lhs, prereleases=True) + + oper: Optional[Operator] = _operators.get(op.serialize()) + if oper is None: + raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.") + + return oper(lhs, rhs) + + +def _normalize(*values: str, key: str) -> Tuple[str, ...]: + # PEP 685 – Comparison of extra names for optional distribution dependencies + # https://peps.python.org/pep-0685/ + # > When comparing extra names, tools MUST normalize the names being + # > compared using the semantics outlined in PEP 503 for names + if key == "extra": + return tuple(canonicalize_name(v) for v in values) + + # other environment markers don't have such standards + return values + + +def _evaluate_markers(markers: MarkerList, environment: Dict[str, str]) -> bool: + groups: List[List[bool]] = [[]] + + for marker in markers: + assert isinstance(marker, (list, tuple, str)) + + if isinstance(marker, list): + groups[-1].append(_evaluate_markers(marker, environment)) + elif isinstance(marker, tuple): + lhs, op, rhs = marker + + if isinstance(lhs, Variable): + environment_key = lhs.value + lhs_value = environment[environment_key] + rhs_value = rhs.value + else: + lhs_value = lhs.value + environment_key = rhs.value + rhs_value = environment[environment_key] + + lhs_value, rhs_value = _normalize(lhs_value, rhs_value, key=environment_key) + groups[-1].append(_eval_op(lhs_value, op, rhs_value)) + else: + assert marker in ["and", "or"] + if marker == "or": + groups.append([]) + + return any(all(item) for item in groups) + + +def format_full_version(info: "sys._version_info") -> str: + version = "{0.major}.{0.minor}.{0.micro}".format(info) + kind = info.releaselevel + if kind != "final": + version += kind[0] + str(info.serial) + return version + + +def default_environment() -> Dict[str, str]: + iver = format_full_version(sys.implementation.version) + implementation_name = sys.implementation.name + return { + "implementation_name": implementation_name, + "implementation_version": iver, + "os_name": os.name, + "platform_machine": platform.machine(), + "platform_release": platform.release(), + "platform_system": platform.system(), + "platform_version": platform.version(), + "python_full_version": platform.python_version(), + "platform_python_implementation": platform.python_implementation(), + "python_version": ".".join(platform.python_version_tuple()[:2]), + "sys_platform": sys.platform, + } + + +class Marker: + def __init__(self, marker: str) -> None: + # Note: We create a Marker object without calling this constructor in + # packaging.requirements.Requirement. If any additional logic is + # added here, make sure to mirror/adapt Requirement. + try: + self._markers = _normalize_extra_values(_parse_marker(marker)) + # The attribute `_markers` can be described in terms of a recursive type: + # MarkerList = List[Union[Tuple[Node, ...], str, MarkerList]] + # + # For example, the following expression: + # python_version > "3.6" or (python_version == "3.6" and os_name == "unix") + # + # is parsed into: + # [ + # (, ')>, ), + # 'and', + # [ + # (, , ), + # 'or', + # (, , ) + # ] + # ] + except ParserSyntaxError as e: + raise InvalidMarker(str(e)) from e + + def __str__(self) -> str: + return _format_marker(self._markers) + + def __repr__(self) -> str: + return f"" + + def __hash__(self) -> int: + return hash((self.__class__.__name__, str(self))) + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, Marker): + return NotImplemented + + return str(self) == str(other) + + def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool: + """Evaluate a marker. + + Return the boolean from evaluating the given marker against the + environment. environment is an optional argument to override all or + part of the determined environment. + + The environment is determined from the current Python process. + """ + current_environment = default_environment() + current_environment["extra"] = "" + if environment is not None: + current_environment.update(environment) + # The API used to allow setting extra to None. We need to handle this + # case for backwards compatibility. + if current_environment["extra"] is None: + current_environment["extra"] = "" + + return _evaluate_markers(self._markers, current_environment) diff --git a/distutils/_vendor/packaging/metadata.py b/distutils/_vendor/packaging/metadata.py new file mode 100644 index 0000000000..fb27493079 --- /dev/null +++ b/distutils/_vendor/packaging/metadata.py @@ -0,0 +1,825 @@ +import email.feedparser +import email.header +import email.message +import email.parser +import email.policy +import sys +import typing +from typing import ( + Any, + Callable, + Dict, + Generic, + List, + Optional, + Tuple, + Type, + Union, + cast, +) + +from . import requirements, specifiers, utils, version as version_module + +T = typing.TypeVar("T") +if sys.version_info[:2] >= (3, 8): # pragma: no cover + from typing import Literal, TypedDict +else: # pragma: no cover + if typing.TYPE_CHECKING: + from typing_extensions import Literal, TypedDict + else: + try: + from typing_extensions import Literal, TypedDict + except ImportError: + + class Literal: + def __init_subclass__(*_args, **_kwargs): + pass + + class TypedDict: + def __init_subclass__(*_args, **_kwargs): + pass + + +try: + ExceptionGroup +except NameError: # pragma: no cover + + class ExceptionGroup(Exception): # noqa: N818 + """A minimal implementation of :external:exc:`ExceptionGroup` from Python 3.11. + + If :external:exc:`ExceptionGroup` is already defined by Python itself, + that version is used instead. + """ + + message: str + exceptions: List[Exception] + + def __init__(self, message: str, exceptions: List[Exception]) -> None: + self.message = message + self.exceptions = exceptions + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.message!r}, {self.exceptions!r})" + +else: # pragma: no cover + ExceptionGroup = ExceptionGroup + + +class InvalidMetadata(ValueError): + """A metadata field contains invalid data.""" + + field: str + """The name of the field that contains invalid data.""" + + def __init__(self, field: str, message: str) -> None: + self.field = field + super().__init__(message) + + +# The RawMetadata class attempts to make as few assumptions about the underlying +# serialization formats as possible. The idea is that as long as a serialization +# formats offer some very basic primitives in *some* way then we can support +# serializing to and from that format. +class RawMetadata(TypedDict, total=False): + """A dictionary of raw core metadata. + + Each field in core metadata maps to a key of this dictionary (when data is + provided). The key is lower-case and underscores are used instead of dashes + compared to the equivalent core metadata field. Any core metadata field that + can be specified multiple times or can hold multiple values in a single + field have a key with a plural name. See :class:`Metadata` whose attributes + match the keys of this dictionary. + + Core metadata fields that can be specified multiple times are stored as a + list or dict depending on which is appropriate for the field. Any fields + which hold multiple values in a single field are stored as a list. + + """ + + # Metadata 1.0 - PEP 241 + metadata_version: str + name: str + version: str + platforms: List[str] + summary: str + description: str + keywords: List[str] + home_page: str + author: str + author_email: str + license: str + + # Metadata 1.1 - PEP 314 + supported_platforms: List[str] + download_url: str + classifiers: List[str] + requires: List[str] + provides: List[str] + obsoletes: List[str] + + # Metadata 1.2 - PEP 345 + maintainer: str + maintainer_email: str + requires_dist: List[str] + provides_dist: List[str] + obsoletes_dist: List[str] + requires_python: str + requires_external: List[str] + project_urls: Dict[str, str] + + # Metadata 2.0 + # PEP 426 attempted to completely revamp the metadata format + # but got stuck without ever being able to build consensus on + # it and ultimately ended up withdrawn. + # + # However, a number of tools had started emitting METADATA with + # `2.0` Metadata-Version, so for historical reasons, this version + # was skipped. + + # Metadata 2.1 - PEP 566 + description_content_type: str + provides_extra: List[str] + + # Metadata 2.2 - PEP 643 + dynamic: List[str] + + # Metadata 2.3 - PEP 685 + # No new fields were added in PEP 685, just some edge case were + # tightened up to provide better interoptability. + + +_STRING_FIELDS = { + "author", + "author_email", + "description", + "description_content_type", + "download_url", + "home_page", + "license", + "maintainer", + "maintainer_email", + "metadata_version", + "name", + "requires_python", + "summary", + "version", +} + +_LIST_FIELDS = { + "classifiers", + "dynamic", + "obsoletes", + "obsoletes_dist", + "platforms", + "provides", + "provides_dist", + "provides_extra", + "requires", + "requires_dist", + "requires_external", + "supported_platforms", +} + +_DICT_FIELDS = { + "project_urls", +} + + +def _parse_keywords(data: str) -> List[str]: + """Split a string of comma-separate keyboards into a list of keywords.""" + return [k.strip() for k in data.split(",")] + + +def _parse_project_urls(data: List[str]) -> Dict[str, str]: + """Parse a list of label/URL string pairings separated by a comma.""" + urls = {} + for pair in data: + # Our logic is slightly tricky here as we want to try and do + # *something* reasonable with malformed data. + # + # The main thing that we have to worry about, is data that does + # not have a ',' at all to split the label from the Value. There + # isn't a singular right answer here, and we will fail validation + # later on (if the caller is validating) so it doesn't *really* + # matter, but since the missing value has to be an empty str + # and our return value is dict[str, str], if we let the key + # be the missing value, then they'd have multiple '' values that + # overwrite each other in a accumulating dict. + # + # The other potentional issue is that it's possible to have the + # same label multiple times in the metadata, with no solid "right" + # answer with what to do in that case. As such, we'll do the only + # thing we can, which is treat the field as unparseable and add it + # to our list of unparsed fields. + parts = [p.strip() for p in pair.split(",", 1)] + parts.extend([""] * (max(0, 2 - len(parts)))) # Ensure 2 items + + # TODO: The spec doesn't say anything about if the keys should be + # considered case sensitive or not... logically they should + # be case-preserving and case-insensitive, but doing that + # would open up more cases where we might have duplicate + # entries. + label, url = parts + if label in urls: + # The label already exists in our set of urls, so this field + # is unparseable, and we can just add the whole thing to our + # unparseable data and stop processing it. + raise KeyError("duplicate labels in project urls") + urls[label] = url + + return urls + + +def _get_payload(msg: email.message.Message, source: Union[bytes, str]) -> str: + """Get the body of the message.""" + # If our source is a str, then our caller has managed encodings for us, + # and we don't need to deal with it. + if isinstance(source, str): + payload: str = msg.get_payload() + return payload + # If our source is a bytes, then we're managing the encoding and we need + # to deal with it. + else: + bpayload: bytes = msg.get_payload(decode=True) + try: + return bpayload.decode("utf8", "strict") + except UnicodeDecodeError: + raise ValueError("payload in an invalid encoding") + + +# The various parse_FORMAT functions here are intended to be as lenient as +# possible in their parsing, while still returning a correctly typed +# RawMetadata. +# +# To aid in this, we also generally want to do as little touching of the +# data as possible, except where there are possibly some historic holdovers +# that make valid data awkward to work with. +# +# While this is a lower level, intermediate format than our ``Metadata`` +# class, some light touch ups can make a massive difference in usability. + +# Map METADATA fields to RawMetadata. +_EMAIL_TO_RAW_MAPPING = { + "author": "author", + "author-email": "author_email", + "classifier": "classifiers", + "description": "description", + "description-content-type": "description_content_type", + "download-url": "download_url", + "dynamic": "dynamic", + "home-page": "home_page", + "keywords": "keywords", + "license": "license", + "maintainer": "maintainer", + "maintainer-email": "maintainer_email", + "metadata-version": "metadata_version", + "name": "name", + "obsoletes": "obsoletes", + "obsoletes-dist": "obsoletes_dist", + "platform": "platforms", + "project-url": "project_urls", + "provides": "provides", + "provides-dist": "provides_dist", + "provides-extra": "provides_extra", + "requires": "requires", + "requires-dist": "requires_dist", + "requires-external": "requires_external", + "requires-python": "requires_python", + "summary": "summary", + "supported-platform": "supported_platforms", + "version": "version", +} +_RAW_TO_EMAIL_MAPPING = {raw: email for email, raw in _EMAIL_TO_RAW_MAPPING.items()} + + +def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[str]]]: + """Parse a distribution's metadata stored as email headers (e.g. from ``METADATA``). + + This function returns a two-item tuple of dicts. The first dict is of + recognized fields from the core metadata specification. Fields that can be + parsed and translated into Python's built-in types are converted + appropriately. All other fields are left as-is. Fields that are allowed to + appear multiple times are stored as lists. + + The second dict contains all other fields from the metadata. This includes + any unrecognized fields. It also includes any fields which are expected to + be parsed into a built-in type but were not formatted appropriately. Finally, + any fields that are expected to appear only once but are repeated are + included in this dict. + + """ + raw: Dict[str, Union[str, List[str], Dict[str, str]]] = {} + unparsed: Dict[str, List[str]] = {} + + if isinstance(data, str): + parsed = email.parser.Parser(policy=email.policy.compat32).parsestr(data) + else: + parsed = email.parser.BytesParser(policy=email.policy.compat32).parsebytes(data) + + # We have to wrap parsed.keys() in a set, because in the case of multiple + # values for a key (a list), the key will appear multiple times in the + # list of keys, but we're avoiding that by using get_all(). + for name in frozenset(parsed.keys()): + # Header names in RFC are case insensitive, so we'll normalize to all + # lower case to make comparisons easier. + name = name.lower() + + # We use get_all() here, even for fields that aren't multiple use, + # because otherwise someone could have e.g. two Name fields, and we + # would just silently ignore it rather than doing something about it. + headers = parsed.get_all(name) or [] + + # The way the email module works when parsing bytes is that it + # unconditionally decodes the bytes as ascii using the surrogateescape + # handler. When you pull that data back out (such as with get_all() ), + # it looks to see if the str has any surrogate escapes, and if it does + # it wraps it in a Header object instead of returning the string. + # + # As such, we'll look for those Header objects, and fix up the encoding. + value = [] + # Flag if we have run into any issues processing the headers, thus + # signalling that the data belongs in 'unparsed'. + valid_encoding = True + for h in headers: + # It's unclear if this can return more types than just a Header or + # a str, so we'll just assert here to make sure. + assert isinstance(h, (email.header.Header, str)) + + # If it's a header object, we need to do our little dance to get + # the real data out of it. In cases where there is invalid data + # we're going to end up with mojibake, but there's no obvious, good + # way around that without reimplementing parts of the Header object + # ourselves. + # + # That should be fine since, if mojibacked happens, this key is + # going into the unparsed dict anyways. + if isinstance(h, email.header.Header): + # The Header object stores it's data as chunks, and each chunk + # can be independently encoded, so we'll need to check each + # of them. + chunks: List[Tuple[bytes, Optional[str]]] = [] + for bin, encoding in email.header.decode_header(h): + try: + bin.decode("utf8", "strict") + except UnicodeDecodeError: + # Enable mojibake. + encoding = "latin1" + valid_encoding = False + else: + encoding = "utf8" + chunks.append((bin, encoding)) + + # Turn our chunks back into a Header object, then let that + # Header object do the right thing to turn them into a + # string for us. + value.append(str(email.header.make_header(chunks))) + # This is already a string, so just add it. + else: + value.append(h) + + # We've processed all of our values to get them into a list of str, + # but we may have mojibake data, in which case this is an unparsed + # field. + if not valid_encoding: + unparsed[name] = value + continue + + raw_name = _EMAIL_TO_RAW_MAPPING.get(name) + if raw_name is None: + # This is a bit of a weird situation, we've encountered a key that + # we don't know what it means, so we don't know whether it's meant + # to be a list or not. + # + # Since we can't really tell one way or another, we'll just leave it + # as a list, even though it may be a single item list, because that's + # what makes the most sense for email headers. + unparsed[name] = value + continue + + # If this is one of our string fields, then we'll check to see if our + # value is a list of a single item. If it is then we'll assume that + # it was emitted as a single string, and unwrap the str from inside + # the list. + # + # If it's any other kind of data, then we haven't the faintest clue + # what we should parse it as, and we have to just add it to our list + # of unparsed stuff. + if raw_name in _STRING_FIELDS and len(value) == 1: + raw[raw_name] = value[0] + # If this is one of our list of string fields, then we can just assign + # the value, since email *only* has strings, and our get_all() call + # above ensures that this is a list. + elif raw_name in _LIST_FIELDS: + raw[raw_name] = value + # Special Case: Keywords + # The keywords field is implemented in the metadata spec as a str, + # but it conceptually is a list of strings, and is serialized using + # ", ".join(keywords), so we'll do some light data massaging to turn + # this into what it logically is. + elif raw_name == "keywords" and len(value) == 1: + raw[raw_name] = _parse_keywords(value[0]) + # Special Case: Project-URL + # The project urls is implemented in the metadata spec as a list of + # specially-formatted strings that represent a key and a value, which + # is fundamentally a mapping, however the email format doesn't support + # mappings in a sane way, so it was crammed into a list of strings + # instead. + # + # We will do a little light data massaging to turn this into a map as + # it logically should be. + elif raw_name == "project_urls": + try: + raw[raw_name] = _parse_project_urls(value) + except KeyError: + unparsed[name] = value + # Nothing that we've done has managed to parse this, so it'll just + # throw it in our unparseable data and move on. + else: + unparsed[name] = value + + # We need to support getting the Description from the message payload in + # addition to getting it from the the headers. This does mean, though, there + # is the possibility of it being set both ways, in which case we put both + # in 'unparsed' since we don't know which is right. + try: + payload = _get_payload(parsed, data) + except ValueError: + unparsed.setdefault("description", []).append( + parsed.get_payload(decode=isinstance(data, bytes)) + ) + else: + if payload: + # Check to see if we've already got a description, if so then both + # it, and this body move to unparseable. + if "description" in raw: + description_header = cast(str, raw.pop("description")) + unparsed.setdefault("description", []).extend( + [description_header, payload] + ) + elif "description" in unparsed: + unparsed["description"].append(payload) + else: + raw["description"] = payload + + # We need to cast our `raw` to a metadata, because a TypedDict only support + # literal key names, but we're computing our key names on purpose, but the + # way this function is implemented, our `TypedDict` can only have valid key + # names. + return cast(RawMetadata, raw), unparsed + + +_NOT_FOUND = object() + + +# Keep the two values in sync. +_VALID_METADATA_VERSIONS = ["1.0", "1.1", "1.2", "2.1", "2.2", "2.3"] +_MetadataVersion = Literal["1.0", "1.1", "1.2", "2.1", "2.2", "2.3"] + +_REQUIRED_ATTRS = frozenset(["metadata_version", "name", "version"]) + + +class _Validator(Generic[T]): + """Validate a metadata field. + + All _process_*() methods correspond to a core metadata field. The method is + called with the field's raw value. If the raw value is valid it is returned + in its "enriched" form (e.g. ``version.Version`` for the ``Version`` field). + If the raw value is invalid, :exc:`InvalidMetadata` is raised (with a cause + as appropriate). + """ + + name: str + raw_name: str + added: _MetadataVersion + + def __init__( + self, + *, + added: _MetadataVersion = "1.0", + ) -> None: + self.added = added + + def __set_name__(self, _owner: "Metadata", name: str) -> None: + self.name = name + self.raw_name = _RAW_TO_EMAIL_MAPPING[name] + + def __get__(self, instance: "Metadata", _owner: Type["Metadata"]) -> T: + # With Python 3.8, the caching can be replaced with functools.cached_property(). + # No need to check the cache as attribute lookup will resolve into the + # instance's __dict__ before __get__ is called. + cache = instance.__dict__ + value = instance._raw.get(self.name) + + # To make the _process_* methods easier, we'll check if the value is None + # and if this field is NOT a required attribute, and if both of those + # things are true, we'll skip the the converter. This will mean that the + # converters never have to deal with the None union. + if self.name in _REQUIRED_ATTRS or value is not None: + try: + converter: Callable[[Any], T] = getattr(self, f"_process_{self.name}") + except AttributeError: + pass + else: + value = converter(value) + + cache[self.name] = value + try: + del instance._raw[self.name] # type: ignore[misc] + except KeyError: + pass + + return cast(T, value) + + def _invalid_metadata( + self, msg: str, cause: Optional[Exception] = None + ) -> InvalidMetadata: + exc = InvalidMetadata( + self.raw_name, msg.format_map({"field": repr(self.raw_name)}) + ) + exc.__cause__ = cause + return exc + + def _process_metadata_version(self, value: str) -> _MetadataVersion: + # Implicitly makes Metadata-Version required. + if value not in _VALID_METADATA_VERSIONS: + raise self._invalid_metadata(f"{value!r} is not a valid metadata version") + return cast(_MetadataVersion, value) + + def _process_name(self, value: str) -> str: + if not value: + raise self._invalid_metadata("{field} is a required field") + # Validate the name as a side-effect. + try: + utils.canonicalize_name(value, validate=True) + except utils.InvalidName as exc: + raise self._invalid_metadata( + f"{value!r} is invalid for {{field}}", cause=exc + ) + else: + return value + + def _process_version(self, value: str) -> version_module.Version: + if not value: + raise self._invalid_metadata("{field} is a required field") + try: + return version_module.parse(value) + except version_module.InvalidVersion as exc: + raise self._invalid_metadata( + f"{value!r} is invalid for {{field}}", cause=exc + ) + + def _process_summary(self, value: str) -> str: + """Check the field contains no newlines.""" + if "\n" in value: + raise self._invalid_metadata("{field} must be a single line") + return value + + def _process_description_content_type(self, value: str) -> str: + content_types = {"text/plain", "text/x-rst", "text/markdown"} + message = email.message.EmailMessage() + message["content-type"] = value + + content_type, parameters = ( + # Defaults to `text/plain` if parsing failed. + message.get_content_type().lower(), + message["content-type"].params, + ) + # Check if content-type is valid or defaulted to `text/plain` and thus was + # not parseable. + if content_type not in content_types or content_type not in value.lower(): + raise self._invalid_metadata( + f"{{field}} must be one of {list(content_types)}, not {value!r}" + ) + + charset = parameters.get("charset", "UTF-8") + if charset != "UTF-8": + raise self._invalid_metadata( + f"{{field}} can only specify the UTF-8 charset, not {list(charset)}" + ) + + markdown_variants = {"GFM", "CommonMark"} + variant = parameters.get("variant", "GFM") # Use an acceptable default. + if content_type == "text/markdown" and variant not in markdown_variants: + raise self._invalid_metadata( + f"valid Markdown variants for {{field}} are {list(markdown_variants)}, " + f"not {variant!r}", + ) + return value + + def _process_dynamic(self, value: List[str]) -> List[str]: + for dynamic_field in map(str.lower, value): + if dynamic_field in {"name", "version", "metadata-version"}: + raise self._invalid_metadata( + f"{value!r} is not allowed as a dynamic field" + ) + elif dynamic_field not in _EMAIL_TO_RAW_MAPPING: + raise self._invalid_metadata(f"{value!r} is not a valid dynamic field") + return list(map(str.lower, value)) + + def _process_provides_extra( + self, + value: List[str], + ) -> List[utils.NormalizedName]: + normalized_names = [] + try: + for name in value: + normalized_names.append(utils.canonicalize_name(name, validate=True)) + except utils.InvalidName as exc: + raise self._invalid_metadata( + f"{name!r} is invalid for {{field}}", cause=exc + ) + else: + return normalized_names + + def _process_requires_python(self, value: str) -> specifiers.SpecifierSet: + try: + return specifiers.SpecifierSet(value) + except specifiers.InvalidSpecifier as exc: + raise self._invalid_metadata( + f"{value!r} is invalid for {{field}}", cause=exc + ) + + def _process_requires_dist( + self, + value: List[str], + ) -> List[requirements.Requirement]: + reqs = [] + try: + for req in value: + reqs.append(requirements.Requirement(req)) + except requirements.InvalidRequirement as exc: + raise self._invalid_metadata(f"{req!r} is invalid for {{field}}", cause=exc) + else: + return reqs + + +class Metadata: + """Representation of distribution metadata. + + Compared to :class:`RawMetadata`, this class provides objects representing + metadata fields instead of only using built-in types. Any invalid metadata + will cause :exc:`InvalidMetadata` to be raised (with a + :py:attr:`~BaseException.__cause__` attribute as appropriate). + """ + + _raw: RawMetadata + + @classmethod + def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> "Metadata": + """Create an instance from :class:`RawMetadata`. + + If *validate* is true, all metadata will be validated. All exceptions + related to validation will be gathered and raised as an :class:`ExceptionGroup`. + """ + ins = cls() + ins._raw = data.copy() # Mutations occur due to caching enriched values. + + if validate: + exceptions: List[Exception] = [] + try: + metadata_version = ins.metadata_version + metadata_age = _VALID_METADATA_VERSIONS.index(metadata_version) + except InvalidMetadata as metadata_version_exc: + exceptions.append(metadata_version_exc) + metadata_version = None + + # Make sure to check for the fields that are present, the required + # fields (so their absence can be reported). + fields_to_check = frozenset(ins._raw) | _REQUIRED_ATTRS + # Remove fields that have already been checked. + fields_to_check -= {"metadata_version"} + + for key in fields_to_check: + try: + if metadata_version: + # Can't use getattr() as that triggers descriptor protocol which + # will fail due to no value for the instance argument. + try: + field_metadata_version = cls.__dict__[key].added + except KeyError: + exc = InvalidMetadata(key, f"unrecognized field: {key!r}") + exceptions.append(exc) + continue + field_age = _VALID_METADATA_VERSIONS.index( + field_metadata_version + ) + if field_age > metadata_age: + field = _RAW_TO_EMAIL_MAPPING[key] + exc = InvalidMetadata( + field, + "{field} introduced in metadata version " + "{field_metadata_version}, not {metadata_version}", + ) + exceptions.append(exc) + continue + getattr(ins, key) + except InvalidMetadata as exc: + exceptions.append(exc) + + if exceptions: + raise ExceptionGroup("invalid metadata", exceptions) + + return ins + + @classmethod + def from_email( + cls, data: Union[bytes, str], *, validate: bool = True + ) -> "Metadata": + """Parse metadata from email headers. + + If *validate* is true, the metadata will be validated. All exceptions + related to validation will be gathered and raised as an :class:`ExceptionGroup`. + """ + raw, unparsed = parse_email(data) + + if validate: + exceptions: list[Exception] = [] + for unparsed_key in unparsed: + if unparsed_key in _EMAIL_TO_RAW_MAPPING: + message = f"{unparsed_key!r} has invalid data" + else: + message = f"unrecognized field: {unparsed_key!r}" + exceptions.append(InvalidMetadata(unparsed_key, message)) + + if exceptions: + raise ExceptionGroup("unparsed", exceptions) + + try: + return cls.from_raw(raw, validate=validate) + except ExceptionGroup as exc_group: + raise ExceptionGroup( + "invalid or unparsed metadata", exc_group.exceptions + ) from None + + metadata_version: _Validator[_MetadataVersion] = _Validator() + """:external:ref:`core-metadata-metadata-version` + (required; validated to be a valid metadata version)""" + name: _Validator[str] = _Validator() + """:external:ref:`core-metadata-name` + (required; validated using :func:`~packaging.utils.canonicalize_name` and its + *validate* parameter)""" + version: _Validator[version_module.Version] = _Validator() + """:external:ref:`core-metadata-version` (required)""" + dynamic: _Validator[Optional[List[str]]] = _Validator( + added="2.2", + ) + """:external:ref:`core-metadata-dynamic` + (validated against core metadata field names and lowercased)""" + platforms: _Validator[Optional[List[str]]] = _Validator() + """:external:ref:`core-metadata-platform`""" + supported_platforms: _Validator[Optional[List[str]]] = _Validator(added="1.1") + """:external:ref:`core-metadata-supported-platform`""" + summary: _Validator[Optional[str]] = _Validator() + """:external:ref:`core-metadata-summary` (validated to contain no newlines)""" + description: _Validator[Optional[str]] = _Validator() # TODO 2.1: can be in body + """:external:ref:`core-metadata-description`""" + description_content_type: _Validator[Optional[str]] = _Validator(added="2.1") + """:external:ref:`core-metadata-description-content-type` (validated)""" + keywords: _Validator[Optional[List[str]]] = _Validator() + """:external:ref:`core-metadata-keywords`""" + home_page: _Validator[Optional[str]] = _Validator() + """:external:ref:`core-metadata-home-page`""" + download_url: _Validator[Optional[str]] = _Validator(added="1.1") + """:external:ref:`core-metadata-download-url`""" + author: _Validator[Optional[str]] = _Validator() + """:external:ref:`core-metadata-author`""" + author_email: _Validator[Optional[str]] = _Validator() + """:external:ref:`core-metadata-author-email`""" + maintainer: _Validator[Optional[str]] = _Validator(added="1.2") + """:external:ref:`core-metadata-maintainer`""" + maintainer_email: _Validator[Optional[str]] = _Validator(added="1.2") + """:external:ref:`core-metadata-maintainer-email`""" + license: _Validator[Optional[str]] = _Validator() + """:external:ref:`core-metadata-license`""" + classifiers: _Validator[Optional[List[str]]] = _Validator(added="1.1") + """:external:ref:`core-metadata-classifier`""" + requires_dist: _Validator[Optional[List[requirements.Requirement]]] = _Validator( + added="1.2" + ) + """:external:ref:`core-metadata-requires-dist`""" + requires_python: _Validator[Optional[specifiers.SpecifierSet]] = _Validator( + added="1.2" + ) + """:external:ref:`core-metadata-requires-python`""" + # Because `Requires-External` allows for non-PEP 440 version specifiers, we + # don't do any processing on the values. + requires_external: _Validator[Optional[List[str]]] = _Validator(added="1.2") + """:external:ref:`core-metadata-requires-external`""" + project_urls: _Validator[Optional[Dict[str, str]]] = _Validator(added="1.2") + """:external:ref:`core-metadata-project-url`""" + # PEP 685 lets us raise an error if an extra doesn't pass `Name` validation + # regardless of metadata version. + provides_extra: _Validator[Optional[List[utils.NormalizedName]]] = _Validator( + added="2.1", + ) + """:external:ref:`core-metadata-provides-extra`""" + provides_dist: _Validator[Optional[List[str]]] = _Validator(added="1.2") + """:external:ref:`core-metadata-provides-dist`""" + obsoletes_dist: _Validator[Optional[List[str]]] = _Validator(added="1.2") + """:external:ref:`core-metadata-obsoletes-dist`""" + requires: _Validator[Optional[List[str]]] = _Validator(added="1.1") + """``Requires`` (deprecated)""" + provides: _Validator[Optional[List[str]]] = _Validator(added="1.1") + """``Provides`` (deprecated)""" + obsoletes: _Validator[Optional[List[str]]] = _Validator(added="1.1") + """``Obsoletes`` (deprecated)""" diff --git a/distutils/_vendor/packaging/py.typed b/distutils/_vendor/packaging/py.typed new file mode 100644 index 0000000000..e69de29bb2 diff --git a/distutils/_vendor/packaging/requirements.py b/distutils/_vendor/packaging/requirements.py new file mode 100644 index 0000000000..bdc43a7e98 --- /dev/null +++ b/distutils/_vendor/packaging/requirements.py @@ -0,0 +1,90 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from typing import Any, Iterator, Optional, Set + +from ._parser import parse_requirement as _parse_requirement +from ._tokenizer import ParserSyntaxError +from .markers import Marker, _normalize_extra_values +from .specifiers import SpecifierSet +from .utils import canonicalize_name + + +class InvalidRequirement(ValueError): + """ + An invalid requirement was found, users should refer to PEP 508. + """ + + +class Requirement: + """Parse a requirement. + + Parse a given requirement string into its parts, such as name, specifier, + URL, and extras. Raises InvalidRequirement on a badly-formed requirement + string. + """ + + # TODO: Can we test whether something is contained within a requirement? + # If so how do we do that? Do we need to test against the _name_ of + # the thing as well as the version? What about the markers? + # TODO: Can we normalize the name and extra name? + + def __init__(self, requirement_string: str) -> None: + try: + parsed = _parse_requirement(requirement_string) + except ParserSyntaxError as e: + raise InvalidRequirement(str(e)) from e + + self.name: str = parsed.name + self.url: Optional[str] = parsed.url or None + self.extras: Set[str] = set(parsed.extras or []) + self.specifier: SpecifierSet = SpecifierSet(parsed.specifier) + self.marker: Optional[Marker] = None + if parsed.marker is not None: + self.marker = Marker.__new__(Marker) + self.marker._markers = _normalize_extra_values(parsed.marker) + + def _iter_parts(self, name: str) -> Iterator[str]: + yield name + + if self.extras: + formatted_extras = ",".join(sorted(self.extras)) + yield f"[{formatted_extras}]" + + if self.specifier: + yield str(self.specifier) + + if self.url: + yield f"@ {self.url}" + if self.marker: + yield " " + + if self.marker: + yield f"; {self.marker}" + + def __str__(self) -> str: + return "".join(self._iter_parts(self.name)) + + def __repr__(self) -> str: + return f"" + + def __hash__(self) -> int: + return hash( + ( + self.__class__.__name__, + *self._iter_parts(canonicalize_name(self.name)), + ) + ) + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, Requirement): + return NotImplemented + + return ( + canonicalize_name(self.name) == canonicalize_name(other.name) + and self.extras == other.extras + and self.specifier == other.specifier + and self.url == other.url + and self.marker == other.marker + ) diff --git a/distutils/_vendor/packaging/specifiers.py b/distutils/_vendor/packaging/specifiers.py new file mode 100644 index 0000000000..2d015bab59 --- /dev/null +++ b/distutils/_vendor/packaging/specifiers.py @@ -0,0 +1,1017 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +""" +.. testsetup:: + + from packaging.specifiers import Specifier, SpecifierSet, InvalidSpecifier + from packaging.version import Version +""" + +import abc +import itertools +import re +from typing import Callable, Iterable, Iterator, List, Optional, Tuple, TypeVar, Union + +from .utils import canonicalize_version +from .version import Version + +UnparsedVersion = Union[Version, str] +UnparsedVersionVar = TypeVar("UnparsedVersionVar", bound=UnparsedVersion) +CallableOperator = Callable[[Version, str], bool] + + +def _coerce_version(version: UnparsedVersion) -> Version: + if not isinstance(version, Version): + version = Version(version) + return version + + +class InvalidSpecifier(ValueError): + """ + Raised when attempting to create a :class:`Specifier` with a specifier + string that is invalid. + + >>> Specifier("lolwat") + Traceback (most recent call last): + ... + packaging.specifiers.InvalidSpecifier: Invalid specifier: 'lolwat' + """ + + +class BaseSpecifier(metaclass=abc.ABCMeta): + @abc.abstractmethod + def __str__(self) -> str: + """ + Returns the str representation of this Specifier-like object. This + should be representative of the Specifier itself. + """ + + @abc.abstractmethod + def __hash__(self) -> int: + """ + Returns a hash value for this Specifier-like object. + """ + + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Returns a boolean representing whether or not the two Specifier-like + objects are equal. + + :param other: The other object to check against. + """ + + @property + @abc.abstractmethod + def prereleases(self) -> Optional[bool]: + """Whether or not pre-releases as a whole are allowed. + + This can be set to either ``True`` or ``False`` to explicitly enable or disable + prereleases or it can be set to ``None`` (the default) to use default semantics. + """ + + @prereleases.setter + def prereleases(self, value: bool) -> None: + """Setter for :attr:`prereleases`. + + :param value: The value to set. + """ + + @abc.abstractmethod + def contains(self, item: str, prereleases: Optional[bool] = None) -> bool: + """ + Determines if the given item is contained within this specifier. + """ + + @abc.abstractmethod + def filter( + self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None + ) -> Iterator[UnparsedVersionVar]: + """ + Takes an iterable of items and filters them so that only items which + are contained within this specifier are allowed in it. + """ + + +class Specifier(BaseSpecifier): + """This class abstracts handling of version specifiers. + + .. tip:: + + It is generally not required to instantiate this manually. You should instead + prefer to work with :class:`SpecifierSet` instead, which can parse + comma-separated version specifiers (which is what package metadata contains). + """ + + _operator_regex_str = r""" + (?P(~=|==|!=|<=|>=|<|>|===)) + """ + _version_regex_str = r""" + (?P + (?: + # The identity operators allow for an escape hatch that will + # do an exact string match of the version you wish to install. + # This will not be parsed by PEP 440 and we cannot determine + # any semantic meaning from it. This operator is discouraged + # but included entirely as an escape hatch. + (?<====) # Only match for the identity operator + \s* + [^\s;)]* # The arbitrary version can be just about anything, + # we match everything except for whitespace, a + # semi-colon for marker support, and a closing paren + # since versions can be enclosed in them. + ) + | + (?: + # The (non)equality operators allow for wild card and local + # versions to be specified so we have to define these two + # operators separately to enable that. + (?<===|!=) # Only match for equals and not equals + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)* # release + + # You cannot use a wild card and a pre-release, post-release, a dev or + # local version together so group them with a | and make them optional. + (?: + \.\* # Wild card syntax of .* + | + (?: # pre release + [-_\.]? + (alpha|beta|preview|pre|a|b|c|rc) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local + )? + ) + | + (?: + # The compatible operator requires at least two digits in the + # release segment. + (?<=~=) # Only match for the compatible operator + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *) + (?: # pre release + [-_\.]? + (alpha|beta|preview|pre|a|b|c|rc) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + ) + | + (?: + # All other operators only allow a sub set of what the + # (non)equality operators do. Specifically they do not allow + # local versions to be specified nor do they allow the prefix + # matching wild cards. + (?=": "greater_than_equal", + "<": "less_than", + ">": "greater_than", + "===": "arbitrary", + } + + def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None: + """Initialize a Specifier instance. + + :param spec: + The string representation of a specifier which will be parsed and + normalized before use. + :param prereleases: + This tells the specifier if it should accept prerelease versions if + applicable or not. The default of ``None`` will autodetect it from the + given specifiers. + :raises InvalidSpecifier: + If the given specifier is invalid (i.e. bad syntax). + """ + match = self._regex.search(spec) + if not match: + raise InvalidSpecifier(f"Invalid specifier: '{spec}'") + + self._spec: Tuple[str, str] = ( + match.group("operator").strip(), + match.group("version").strip(), + ) + + # Store whether or not this Specifier should accept prereleases + self._prereleases = prereleases + + # https://github.com/python/mypy/pull/13475#pullrequestreview-1079784515 + @property # type: ignore[override] + def prereleases(self) -> bool: + # If there is an explicit prereleases set for this, then we'll just + # blindly use that. + if self._prereleases is not None: + return self._prereleases + + # Look at all of our specifiers and determine if they are inclusive + # operators, and if they are if they are including an explicit + # prerelease. + operator, version = self._spec + if operator in ["==", ">=", "<=", "~=", "==="]: + # The == specifier can include a trailing .*, if it does we + # want to remove before parsing. + if operator == "==" and version.endswith(".*"): + version = version[:-2] + + # Parse the version, and if it is a pre-release than this + # specifier allows pre-releases. + if Version(version).is_prerelease: + return True + + return False + + @prereleases.setter + def prereleases(self, value: bool) -> None: + self._prereleases = value + + @property + def operator(self) -> str: + """The operator of this specifier. + + >>> Specifier("==1.2.3").operator + '==' + """ + return self._spec[0] + + @property + def version(self) -> str: + """The version of this specifier. + + >>> Specifier("==1.2.3").version + '1.2.3' + """ + return self._spec[1] + + def __repr__(self) -> str: + """A representation of the Specifier that shows all internal state. + + >>> Specifier('>=1.0.0') + =1.0.0')> + >>> Specifier('>=1.0.0', prereleases=False) + =1.0.0', prereleases=False)> + >>> Specifier('>=1.0.0', prereleases=True) + =1.0.0', prereleases=True)> + """ + pre = ( + f", prereleases={self.prereleases!r}" + if self._prereleases is not None + else "" + ) + + return f"<{self.__class__.__name__}({str(self)!r}{pre})>" + + def __str__(self) -> str: + """A string representation of the Specifier that can be round-tripped. + + >>> str(Specifier('>=1.0.0')) + '>=1.0.0' + >>> str(Specifier('>=1.0.0', prereleases=False)) + '>=1.0.0' + """ + return "{}{}".format(*self._spec) + + @property + def _canonical_spec(self) -> Tuple[str, str]: + canonical_version = canonicalize_version( + self._spec[1], + strip_trailing_zero=(self._spec[0] != "~="), + ) + return self._spec[0], canonical_version + + def __hash__(self) -> int: + return hash(self._canonical_spec) + + def __eq__(self, other: object) -> bool: + """Whether or not the two Specifier-like objects are equal. + + :param other: The other object to check against. + + The value of :attr:`prereleases` is ignored. + + >>> Specifier("==1.2.3") == Specifier("== 1.2.3.0") + True + >>> (Specifier("==1.2.3", prereleases=False) == + ... Specifier("==1.2.3", prereleases=True)) + True + >>> Specifier("==1.2.3") == "==1.2.3" + True + >>> Specifier("==1.2.3") == Specifier("==1.2.4") + False + >>> Specifier("==1.2.3") == Specifier("~=1.2.3") + False + """ + if isinstance(other, str): + try: + other = self.__class__(str(other)) + except InvalidSpecifier: + return NotImplemented + elif not isinstance(other, self.__class__): + return NotImplemented + + return self._canonical_spec == other._canonical_spec + + def _get_operator(self, op: str) -> CallableOperator: + operator_callable: CallableOperator = getattr( + self, f"_compare_{self._operators[op]}" + ) + return operator_callable + + def _compare_compatible(self, prospective: Version, spec: str) -> bool: + + # Compatible releases have an equivalent combination of >= and ==. That + # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to + # implement this in terms of the other specifiers instead of + # implementing it ourselves. The only thing we need to do is construct + # the other specifiers. + + # We want everything but the last item in the version, but we want to + # ignore suffix segments. + prefix = _version_join( + list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1] + ) + + # Add the prefix notation to the end of our string + prefix += ".*" + + return self._get_operator(">=")(prospective, spec) and self._get_operator("==")( + prospective, prefix + ) + + def _compare_equal(self, prospective: Version, spec: str) -> bool: + + # We need special logic to handle prefix matching + if spec.endswith(".*"): + # In the case of prefix matching we want to ignore local segment. + normalized_prospective = canonicalize_version( + prospective.public, strip_trailing_zero=False + ) + # Get the normalized version string ignoring the trailing .* + normalized_spec = canonicalize_version(spec[:-2], strip_trailing_zero=False) + # Split the spec out by bangs and dots, and pretend that there is + # an implicit dot in between a release segment and a pre-release segment. + split_spec = _version_split(normalized_spec) + + # Split the prospective version out by bangs and dots, and pretend + # that there is an implicit dot in between a release segment and + # a pre-release segment. + split_prospective = _version_split(normalized_prospective) + + # 0-pad the prospective version before shortening it to get the correct + # shortened version. + padded_prospective, _ = _pad_version(split_prospective, split_spec) + + # Shorten the prospective version to be the same length as the spec + # so that we can determine if the specifier is a prefix of the + # prospective version or not. + shortened_prospective = padded_prospective[: len(split_spec)] + + return shortened_prospective == split_spec + else: + # Convert our spec string into a Version + spec_version = Version(spec) + + # If the specifier does not have a local segment, then we want to + # act as if the prospective version also does not have a local + # segment. + if not spec_version.local: + prospective = Version(prospective.public) + + return prospective == spec_version + + def _compare_not_equal(self, prospective: Version, spec: str) -> bool: + return not self._compare_equal(prospective, spec) + + def _compare_less_than_equal(self, prospective: Version, spec: str) -> bool: + + # NB: Local version identifiers are NOT permitted in the version + # specifier, so local version labels can be universally removed from + # the prospective version. + return Version(prospective.public) <= Version(spec) + + def _compare_greater_than_equal(self, prospective: Version, spec: str) -> bool: + + # NB: Local version identifiers are NOT permitted in the version + # specifier, so local version labels can be universally removed from + # the prospective version. + return Version(prospective.public) >= Version(spec) + + def _compare_less_than(self, prospective: Version, spec_str: str) -> bool: + + # Convert our spec to a Version instance, since we'll want to work with + # it as a version. + spec = Version(spec_str) + + # Check to see if the prospective version is less than the spec + # version. If it's not we can short circuit and just return False now + # instead of doing extra unneeded work. + if not prospective < spec: + return False + + # This special case is here so that, unless the specifier itself + # includes is a pre-release version, that we do not accept pre-release + # versions for the version mentioned in the specifier (e.g. <3.1 should + # not match 3.1.dev0, but should match 3.0.dev0). + if not spec.is_prerelease and prospective.is_prerelease: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # If we've gotten to here, it means that prospective version is both + # less than the spec version *and* it's not a pre-release of the same + # version in the spec. + return True + + def _compare_greater_than(self, prospective: Version, spec_str: str) -> bool: + + # Convert our spec to a Version instance, since we'll want to work with + # it as a version. + spec = Version(spec_str) + + # Check to see if the prospective version is greater than the spec + # version. If it's not we can short circuit and just return False now + # instead of doing extra unneeded work. + if not prospective > spec: + return False + + # This special case is here so that, unless the specifier itself + # includes is a post-release version, that we do not accept + # post-release versions for the version mentioned in the specifier + # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0). + if not spec.is_postrelease and prospective.is_postrelease: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # Ensure that we do not allow a local version of the version mentioned + # in the specifier, which is technically greater than, to match. + if prospective.local is not None: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # If we've gotten to here, it means that prospective version is both + # greater than the spec version *and* it's not a pre-release of the + # same version in the spec. + return True + + def _compare_arbitrary(self, prospective: Version, spec: str) -> bool: + return str(prospective).lower() == str(spec).lower() + + def __contains__(self, item: Union[str, Version]) -> bool: + """Return whether or not the item is contained in this specifier. + + :param item: The item to check for. + + This is used for the ``in`` operator and behaves the same as + :meth:`contains` with no ``prereleases`` argument passed. + + >>> "1.2.3" in Specifier(">=1.2.3") + True + >>> Version("1.2.3") in Specifier(">=1.2.3") + True + >>> "1.0.0" in Specifier(">=1.2.3") + False + >>> "1.3.0a1" in Specifier(">=1.2.3") + False + >>> "1.3.0a1" in Specifier(">=1.2.3", prereleases=True) + True + """ + return self.contains(item) + + def contains( + self, item: UnparsedVersion, prereleases: Optional[bool] = None + ) -> bool: + """Return whether or not the item is contained in this specifier. + + :param item: + The item to check for, which can be a version string or a + :class:`Version` instance. + :param prereleases: + Whether or not to match prereleases with this Specifier. If set to + ``None`` (the default), it uses :attr:`prereleases` to determine + whether or not prereleases are allowed. + + >>> Specifier(">=1.2.3").contains("1.2.3") + True + >>> Specifier(">=1.2.3").contains(Version("1.2.3")) + True + >>> Specifier(">=1.2.3").contains("1.0.0") + False + >>> Specifier(">=1.2.3").contains("1.3.0a1") + False + >>> Specifier(">=1.2.3", prereleases=True).contains("1.3.0a1") + True + >>> Specifier(">=1.2.3").contains("1.3.0a1", prereleases=True) + True + """ + + # Determine if prereleases are to be allowed or not. + if prereleases is None: + prereleases = self.prereleases + + # Normalize item to a Version, this allows us to have a shortcut for + # "2.0" in Specifier(">=2") + normalized_item = _coerce_version(item) + + # Determine if we should be supporting prereleases in this specifier + # or not, if we do not support prereleases than we can short circuit + # logic if this version is a prereleases. + if normalized_item.is_prerelease and not prereleases: + return False + + # Actually do the comparison to determine if this item is contained + # within this Specifier or not. + operator_callable: CallableOperator = self._get_operator(self.operator) + return operator_callable(normalized_item, self.version) + + def filter( + self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None + ) -> Iterator[UnparsedVersionVar]: + """Filter items in the given iterable, that match the specifier. + + :param iterable: + An iterable that can contain version strings and :class:`Version` instances. + The items in the iterable will be filtered according to the specifier. + :param prereleases: + Whether or not to allow prereleases in the returned iterator. If set to + ``None`` (the default), it will be intelligently decide whether to allow + prereleases or not (based on the :attr:`prereleases` attribute, and + whether the only versions matching are prereleases). + + This method is smarter than just ``filter(Specifier().contains, [...])`` + because it implements the rule from :pep:`440` that a prerelease item + SHOULD be accepted if no other versions match the given specifier. + + >>> list(Specifier(">=1.2.3").filter(["1.2", "1.3", "1.5a1"])) + ['1.3'] + >>> list(Specifier(">=1.2.3").filter(["1.2", "1.2.3", "1.3", Version("1.4")])) + ['1.2.3', '1.3', ] + >>> list(Specifier(">=1.2.3").filter(["1.2", "1.5a1"])) + ['1.5a1'] + >>> list(Specifier(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True)) + ['1.3', '1.5a1'] + >>> list(Specifier(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"])) + ['1.3', '1.5a1'] + """ + + yielded = False + found_prereleases = [] + + kw = {"prereleases": prereleases if prereleases is not None else True} + + # Attempt to iterate over all the values in the iterable and if any of + # them match, yield them. + for version in iterable: + parsed_version = _coerce_version(version) + + if self.contains(parsed_version, **kw): + # If our version is a prerelease, and we were not set to allow + # prereleases, then we'll store it for later in case nothing + # else matches this specifier. + if parsed_version.is_prerelease and not ( + prereleases or self.prereleases + ): + found_prereleases.append(version) + # Either this is not a prerelease, or we should have been + # accepting prereleases from the beginning. + else: + yielded = True + yield version + + # Now that we've iterated over everything, determine if we've yielded + # any values, and if we have not and we have any prereleases stored up + # then we will go ahead and yield the prereleases. + if not yielded and found_prereleases: + for version in found_prereleases: + yield version + + +_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$") + + +def _version_split(version: str) -> List[str]: + """Split version into components. + + The split components are intended for version comparison. The logic does + not attempt to retain the original version string, so joining the + components back with :func:`_version_join` may not produce the original + version string. + """ + result: List[str] = [] + + epoch, _, rest = version.rpartition("!") + result.append(epoch or "0") + + for item in rest.split("."): + match = _prefix_regex.search(item) + if match: + result.extend(match.groups()) + else: + result.append(item) + return result + + +def _version_join(components: List[str]) -> str: + """Join split version components into a version string. + + This function assumes the input came from :func:`_version_split`, where the + first component must be the epoch (either empty or numeric), and all other + components numeric. + """ + epoch, *rest = components + return f"{epoch}!{'.'.join(rest)}" + + +def _is_not_suffix(segment: str) -> bool: + return not any( + segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post") + ) + + +def _pad_version(left: List[str], right: List[str]) -> Tuple[List[str], List[str]]: + left_split, right_split = [], [] + + # Get the release segment of our versions + left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left))) + right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right))) + + # Get the rest of our versions + left_split.append(left[len(left_split[0]) :]) + right_split.append(right[len(right_split[0]) :]) + + # Insert our padding + left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0]))) + right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0]))) + + return ( + list(itertools.chain.from_iterable(left_split)), + list(itertools.chain.from_iterable(right_split)), + ) + + +class SpecifierSet(BaseSpecifier): + """This class abstracts handling of a set of version specifiers. + + It can be passed a single specifier (``>=3.0``), a comma-separated list of + specifiers (``>=3.0,!=3.1``), or no specifier at all. + """ + + def __init__( + self, specifiers: str = "", prereleases: Optional[bool] = None + ) -> None: + """Initialize a SpecifierSet instance. + + :param specifiers: + The string representation of a specifier or a comma-separated list of + specifiers which will be parsed and normalized before use. + :param prereleases: + This tells the SpecifierSet if it should accept prerelease versions if + applicable or not. The default of ``None`` will autodetect it from the + given specifiers. + + :raises InvalidSpecifier: + If the given ``specifiers`` are not parseable than this exception will be + raised. + """ + + # Split on `,` to break each individual specifier into it's own item, and + # strip each item to remove leading/trailing whitespace. + split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] + + # Make each individual specifier a Specifier and save in a frozen set for later. + self._specs = frozenset(map(Specifier, split_specifiers)) + + # Store our prereleases value so we can use it later to determine if + # we accept prereleases or not. + self._prereleases = prereleases + + @property + def prereleases(self) -> Optional[bool]: + # If we have been given an explicit prerelease modifier, then we'll + # pass that through here. + if self._prereleases is not None: + return self._prereleases + + # If we don't have any specifiers, and we don't have a forced value, + # then we'll just return None since we don't know if this should have + # pre-releases or not. + if not self._specs: + return None + + # Otherwise we'll see if any of the given specifiers accept + # prereleases, if any of them do we'll return True, otherwise False. + return any(s.prereleases for s in self._specs) + + @prereleases.setter + def prereleases(self, value: bool) -> None: + self._prereleases = value + + def __repr__(self) -> str: + """A representation of the specifier set that shows all internal state. + + Note that the ordering of the individual specifiers within the set may not + match the input string. + + >>> SpecifierSet('>=1.0.0,!=2.0.0') + =1.0.0')> + >>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=False) + =1.0.0', prereleases=False)> + >>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=True) + =1.0.0', prereleases=True)> + """ + pre = ( + f", prereleases={self.prereleases!r}" + if self._prereleases is not None + else "" + ) + + return f"" + + def __str__(self) -> str: + """A string representation of the specifier set that can be round-tripped. + + Note that the ordering of the individual specifiers within the set may not + match the input string. + + >>> str(SpecifierSet(">=1.0.0,!=1.0.1")) + '!=1.0.1,>=1.0.0' + >>> str(SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False)) + '!=1.0.1,>=1.0.0' + """ + return ",".join(sorted(str(s) for s in self._specs)) + + def __hash__(self) -> int: + return hash(self._specs) + + def __and__(self, other: Union["SpecifierSet", str]) -> "SpecifierSet": + """Return a SpecifierSet which is a combination of the two sets. + + :param other: The other object to combine with. + + >>> SpecifierSet(">=1.0.0,!=1.0.1") & '<=2.0.0,!=2.0.1' + =1.0.0')> + >>> SpecifierSet(">=1.0.0,!=1.0.1") & SpecifierSet('<=2.0.0,!=2.0.1') + =1.0.0')> + """ + if isinstance(other, str): + other = SpecifierSet(other) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + specifier = SpecifierSet() + specifier._specs = frozenset(self._specs | other._specs) + + if self._prereleases is None and other._prereleases is not None: + specifier._prereleases = other._prereleases + elif self._prereleases is not None and other._prereleases is None: + specifier._prereleases = self._prereleases + elif self._prereleases == other._prereleases: + specifier._prereleases = self._prereleases + else: + raise ValueError( + "Cannot combine SpecifierSets with True and False prerelease " + "overrides." + ) + + return specifier + + def __eq__(self, other: object) -> bool: + """Whether or not the two SpecifierSet-like objects are equal. + + :param other: The other object to check against. + + The value of :attr:`prereleases` is ignored. + + >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.1") + True + >>> (SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False) == + ... SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True)) + True + >>> SpecifierSet(">=1.0.0,!=1.0.1") == ">=1.0.0,!=1.0.1" + True + >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0") + False + >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.2") + False + """ + if isinstance(other, (str, Specifier)): + other = SpecifierSet(str(other)) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + return self._specs == other._specs + + def __len__(self) -> int: + """Returns the number of specifiers in this specifier set.""" + return len(self._specs) + + def __iter__(self) -> Iterator[Specifier]: + """ + Returns an iterator over all the underlying :class:`Specifier` instances + in this specifier set. + + >>> sorted(SpecifierSet(">=1.0.0,!=1.0.1"), key=str) + [, =1.0.0')>] + """ + return iter(self._specs) + + def __contains__(self, item: UnparsedVersion) -> bool: + """Return whether or not the item is contained in this specifier. + + :param item: The item to check for. + + This is used for the ``in`` operator and behaves the same as + :meth:`contains` with no ``prereleases`` argument passed. + + >>> "1.2.3" in SpecifierSet(">=1.0.0,!=1.0.1") + True + >>> Version("1.2.3") in SpecifierSet(">=1.0.0,!=1.0.1") + True + >>> "1.0.1" in SpecifierSet(">=1.0.0,!=1.0.1") + False + >>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1") + False + >>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True) + True + """ + return self.contains(item) + + def contains( + self, + item: UnparsedVersion, + prereleases: Optional[bool] = None, + installed: Optional[bool] = None, + ) -> bool: + """Return whether or not the item is contained in this SpecifierSet. + + :param item: + The item to check for, which can be a version string or a + :class:`Version` instance. + :param prereleases: + Whether or not to match prereleases with this SpecifierSet. If set to + ``None`` (the default), it uses :attr:`prereleases` to determine + whether or not prereleases are allowed. + + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.2.3") + True + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains(Version("1.2.3")) + True + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.0.1") + False + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1") + False + >>> SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True).contains("1.3.0a1") + True + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1", prereleases=True) + True + """ + # Ensure that our item is a Version instance. + if not isinstance(item, Version): + item = Version(item) + + # Determine if we're forcing a prerelease or not, if we're not forcing + # one for this particular filter call, then we'll use whatever the + # SpecifierSet thinks for whether or not we should support prereleases. + if prereleases is None: + prereleases = self.prereleases + + # We can determine if we're going to allow pre-releases by looking to + # see if any of the underlying items supports them. If none of them do + # and this item is a pre-release then we do not allow it and we can + # short circuit that here. + # Note: This means that 1.0.dev1 would not be contained in something + # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0 + if not prereleases and item.is_prerelease: + return False + + if installed and item.is_prerelease: + item = Version(item.base_version) + + # We simply dispatch to the underlying specs here to make sure that the + # given version is contained within all of them. + # Note: This use of all() here means that an empty set of specifiers + # will always return True, this is an explicit design decision. + return all(s.contains(item, prereleases=prereleases) for s in self._specs) + + def filter( + self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None + ) -> Iterator[UnparsedVersionVar]: + """Filter items in the given iterable, that match the specifiers in this set. + + :param iterable: + An iterable that can contain version strings and :class:`Version` instances. + The items in the iterable will be filtered according to the specifier. + :param prereleases: + Whether or not to allow prereleases in the returned iterator. If set to + ``None`` (the default), it will be intelligently decide whether to allow + prereleases or not (based on the :attr:`prereleases` attribute, and + whether the only versions matching are prereleases). + + This method is smarter than just ``filter(SpecifierSet(...).contains, [...])`` + because it implements the rule from :pep:`440` that a prerelease item + SHOULD be accepted if no other versions match the given specifier. + + >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", "1.5a1"])) + ['1.3'] + >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", Version("1.4")])) + ['1.3', ] + >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.5a1"])) + [] + >>> list(SpecifierSet(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True)) + ['1.3', '1.5a1'] + >>> list(SpecifierSet(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"])) + ['1.3', '1.5a1'] + + An "empty" SpecifierSet will filter items based on the presence of prerelease + versions in the set. + + >>> list(SpecifierSet("").filter(["1.3", "1.5a1"])) + ['1.3'] + >>> list(SpecifierSet("").filter(["1.5a1"])) + ['1.5a1'] + >>> list(SpecifierSet("", prereleases=True).filter(["1.3", "1.5a1"])) + ['1.3', '1.5a1'] + >>> list(SpecifierSet("").filter(["1.3", "1.5a1"], prereleases=True)) + ['1.3', '1.5a1'] + """ + # Determine if we're forcing a prerelease or not, if we're not forcing + # one for this particular filter call, then we'll use whatever the + # SpecifierSet thinks for whether or not we should support prereleases. + if prereleases is None: + prereleases = self.prereleases + + # If we have any specifiers, then we want to wrap our iterable in the + # filter method for each one, this will act as a logical AND amongst + # each specifier. + if self._specs: + for spec in self._specs: + iterable = spec.filter(iterable, prereleases=bool(prereleases)) + return iter(iterable) + # If we do not have any specifiers, then we need to have a rough filter + # which will filter out any pre-releases, unless there are no final + # releases. + else: + filtered: List[UnparsedVersionVar] = [] + found_prereleases: List[UnparsedVersionVar] = [] + + for item in iterable: + parsed_version = _coerce_version(item) + + # Store any item which is a pre-release for later unless we've + # already found a final version or we are accepting prereleases + if parsed_version.is_prerelease and not prereleases: + if not filtered: + found_prereleases.append(item) + else: + filtered.append(item) + + # If we've found no items except for pre-releases, then we'll go + # ahead and use the pre-releases + if not filtered and found_prereleases and prereleases is None: + return iter(found_prereleases) + + return iter(filtered) diff --git a/distutils/_vendor/packaging/tags.py b/distutils/_vendor/packaging/tags.py new file mode 100644 index 0000000000..89f1926137 --- /dev/null +++ b/distutils/_vendor/packaging/tags.py @@ -0,0 +1,571 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import logging +import platform +import re +import struct +import subprocess +import sys +import sysconfig +from importlib.machinery import EXTENSION_SUFFIXES +from typing import ( + Dict, + FrozenSet, + Iterable, + Iterator, + List, + Optional, + Sequence, + Tuple, + Union, + cast, +) + +from . import _manylinux, _musllinux + +logger = logging.getLogger(__name__) + +PythonVersion = Sequence[int] +MacVersion = Tuple[int, int] + +INTERPRETER_SHORT_NAMES: Dict[str, str] = { + "python": "py", # Generic. + "cpython": "cp", + "pypy": "pp", + "ironpython": "ip", + "jython": "jy", +} + + +_32_BIT_INTERPRETER = struct.calcsize("P") == 4 + + +class Tag: + """ + A representation of the tag triple for a wheel. + + Instances are considered immutable and thus are hashable. Equality checking + is also supported. + """ + + __slots__ = ["_interpreter", "_abi", "_platform", "_hash"] + + def __init__(self, interpreter: str, abi: str, platform: str) -> None: + self._interpreter = interpreter.lower() + self._abi = abi.lower() + self._platform = platform.lower() + # The __hash__ of every single element in a Set[Tag] will be evaluated each time + # that a set calls its `.disjoint()` method, which may be called hundreds of + # times when scanning a page of links for packages with tags matching that + # Set[Tag]. Pre-computing the value here produces significant speedups for + # downstream consumers. + self._hash = hash((self._interpreter, self._abi, self._platform)) + + @property + def interpreter(self) -> str: + return self._interpreter + + @property + def abi(self) -> str: + return self._abi + + @property + def platform(self) -> str: + return self._platform + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Tag): + return NotImplemented + + return ( + (self._hash == other._hash) # Short-circuit ASAP for perf reasons. + and (self._platform == other._platform) + and (self._abi == other._abi) + and (self._interpreter == other._interpreter) + ) + + def __hash__(self) -> int: + return self._hash + + def __str__(self) -> str: + return f"{self._interpreter}-{self._abi}-{self._platform}" + + def __repr__(self) -> str: + return f"<{self} @ {id(self)}>" + + +def parse_tag(tag: str) -> FrozenSet[Tag]: + """ + Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances. + + Returning a set is required due to the possibility that the tag is a + compressed tag set. + """ + tags = set() + interpreters, abis, platforms = tag.split("-") + for interpreter in interpreters.split("."): + for abi in abis.split("."): + for platform_ in platforms.split("."): + tags.add(Tag(interpreter, abi, platform_)) + return frozenset(tags) + + +def _get_config_var(name: str, warn: bool = False) -> Union[int, str, None]: + value: Union[int, str, None] = sysconfig.get_config_var(name) + if value is None and warn: + logger.debug( + "Config variable '%s' is unset, Python ABI tag may be incorrect", name + ) + return value + + +def _normalize_string(string: str) -> str: + return string.replace(".", "_").replace("-", "_").replace(" ", "_") + + +def _is_threaded_cpython(abis: List[str]) -> bool: + """ + Determine if the ABI corresponds to a threaded (`--disable-gil`) build. + + The threaded builds are indicated by a "t" in the abiflags. + """ + if len(abis) == 0: + return False + # expect e.g., cp313 + m = re.match(r"cp\d+(.*)", abis[0]) + if not m: + return False + abiflags = m.group(1) + return "t" in abiflags + + +def _abi3_applies(python_version: PythonVersion, threading: bool) -> bool: + """ + Determine if the Python version supports abi3. + + PEP 384 was first implemented in Python 3.2. The threaded (`--disable-gil`) + builds do not support abi3. + """ + return len(python_version) > 1 and tuple(python_version) >= (3, 2) and not threading + + +def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]: + py_version = tuple(py_version) # To allow for version comparison. + abis = [] + version = _version_nodot(py_version[:2]) + threading = debug = pymalloc = ucs4 = "" + with_debug = _get_config_var("Py_DEBUG", warn) + has_refcount = hasattr(sys, "gettotalrefcount") + # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled + # extension modules is the best option. + # https://github.com/pypa/pip/issues/3383#issuecomment-173267692 + has_ext = "_d.pyd" in EXTENSION_SUFFIXES + if with_debug or (with_debug is None and (has_refcount or has_ext)): + debug = "d" + if py_version >= (3, 13) and _get_config_var("Py_GIL_DISABLED", warn): + threading = "t" + if py_version < (3, 8): + with_pymalloc = _get_config_var("WITH_PYMALLOC", warn) + if with_pymalloc or with_pymalloc is None: + pymalloc = "m" + if py_version < (3, 3): + unicode_size = _get_config_var("Py_UNICODE_SIZE", warn) + if unicode_size == 4 or ( + unicode_size is None and sys.maxunicode == 0x10FFFF + ): + ucs4 = "u" + elif debug: + # Debug builds can also load "normal" extension modules. + # We can also assume no UCS-4 or pymalloc requirement. + abis.append(f"cp{version}{threading}") + abis.insert(0, f"cp{version}{threading}{debug}{pymalloc}{ucs4}") + return abis + + +def cpython_tags( + python_version: Optional[PythonVersion] = None, + abis: Optional[Iterable[str]] = None, + platforms: Optional[Iterable[str]] = None, + *, + warn: bool = False, +) -> Iterator[Tag]: + """ + Yields the tags for a CPython interpreter. + + The tags consist of: + - cp-- + - cp-abi3- + - cp-none- + - cp-abi3- # Older Python versions down to 3.2. + + If python_version only specifies a major version then user-provided ABIs and + the 'none' ABItag will be used. + + If 'abi3' or 'none' are specified in 'abis' then they will be yielded at + their normal position and not at the beginning. + """ + if not python_version: + python_version = sys.version_info[:2] + + interpreter = f"cp{_version_nodot(python_version[:2])}" + + if abis is None: + if len(python_version) > 1: + abis = _cpython_abis(python_version, warn) + else: + abis = [] + abis = list(abis) + # 'abi3' and 'none' are explicitly handled later. + for explicit_abi in ("abi3", "none"): + try: + abis.remove(explicit_abi) + except ValueError: + pass + + platforms = list(platforms or platform_tags()) + for abi in abis: + for platform_ in platforms: + yield Tag(interpreter, abi, platform_) + + threading = _is_threaded_cpython(abis) + use_abi3 = _abi3_applies(python_version, threading) + if use_abi3: + yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms) + yield from (Tag(interpreter, "none", platform_) for platform_ in platforms) + + if use_abi3: + for minor_version in range(python_version[1] - 1, 1, -1): + for platform_ in platforms: + interpreter = "cp{version}".format( + version=_version_nodot((python_version[0], minor_version)) + ) + yield Tag(interpreter, "abi3", platform_) + + +def _generic_abi() -> List[str]: + """ + Return the ABI tag based on EXT_SUFFIX. + """ + # The following are examples of `EXT_SUFFIX`. + # We want to keep the parts which are related to the ABI and remove the + # parts which are related to the platform: + # - linux: '.cpython-310-x86_64-linux-gnu.so' => cp310 + # - mac: '.cpython-310-darwin.so' => cp310 + # - win: '.cp310-win_amd64.pyd' => cp310 + # - win: '.pyd' => cp37 (uses _cpython_abis()) + # - pypy: '.pypy38-pp73-x86_64-linux-gnu.so' => pypy38_pp73 + # - graalpy: '.graalpy-38-native-x86_64-darwin.dylib' + # => graalpy_38_native + + ext_suffix = _get_config_var("EXT_SUFFIX", warn=True) + if not isinstance(ext_suffix, str) or ext_suffix[0] != ".": + raise SystemError("invalid sysconfig.get_config_var('EXT_SUFFIX')") + parts = ext_suffix.split(".") + if len(parts) < 3: + # CPython3.7 and earlier uses ".pyd" on Windows. + return _cpython_abis(sys.version_info[:2]) + soabi = parts[1] + if soabi.startswith("cpython"): + # non-windows + abi = "cp" + soabi.split("-")[1] + elif soabi.startswith("cp"): + # windows + abi = soabi.split("-")[0] + elif soabi.startswith("pypy"): + abi = "-".join(soabi.split("-")[:2]) + elif soabi.startswith("graalpy"): + abi = "-".join(soabi.split("-")[:3]) + elif soabi: + # pyston, ironpython, others? + abi = soabi + else: + return [] + return [_normalize_string(abi)] + + +def generic_tags( + interpreter: Optional[str] = None, + abis: Optional[Iterable[str]] = None, + platforms: Optional[Iterable[str]] = None, + *, + warn: bool = False, +) -> Iterator[Tag]: + """ + Yields the tags for a generic interpreter. + + The tags consist of: + - -- + + The "none" ABI will be added if it was not explicitly provided. + """ + if not interpreter: + interp_name = interpreter_name() + interp_version = interpreter_version(warn=warn) + interpreter = "".join([interp_name, interp_version]) + if abis is None: + abis = _generic_abi() + else: + abis = list(abis) + platforms = list(platforms or platform_tags()) + if "none" not in abis: + abis.append("none") + for abi in abis: + for platform_ in platforms: + yield Tag(interpreter, abi, platform_) + + +def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]: + """ + Yields Python versions in descending order. + + After the latest version, the major-only version will be yielded, and then + all previous versions of that major version. + """ + if len(py_version) > 1: + yield f"py{_version_nodot(py_version[:2])}" + yield f"py{py_version[0]}" + if len(py_version) > 1: + for minor in range(py_version[1] - 1, -1, -1): + yield f"py{_version_nodot((py_version[0], minor))}" + + +def compatible_tags( + python_version: Optional[PythonVersion] = None, + interpreter: Optional[str] = None, + platforms: Optional[Iterable[str]] = None, +) -> Iterator[Tag]: + """ + Yields the sequence of tags that are compatible with a specific version of Python. + + The tags consist of: + - py*-none- + - -none-any # ... if `interpreter` is provided. + - py*-none-any + """ + if not python_version: + python_version = sys.version_info[:2] + platforms = list(platforms or platform_tags()) + for version in _py_interpreter_range(python_version): + for platform_ in platforms: + yield Tag(version, "none", platform_) + if interpreter: + yield Tag(interpreter, "none", "any") + for version in _py_interpreter_range(python_version): + yield Tag(version, "none", "any") + + +def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str: + if not is_32bit: + return arch + + if arch.startswith("ppc"): + return "ppc" + + return "i386" + + +def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> List[str]: + formats = [cpu_arch] + if cpu_arch == "x86_64": + if version < (10, 4): + return [] + formats.extend(["intel", "fat64", "fat32"]) + + elif cpu_arch == "i386": + if version < (10, 4): + return [] + formats.extend(["intel", "fat32", "fat"]) + + elif cpu_arch == "ppc64": + # TODO: Need to care about 32-bit PPC for ppc64 through 10.2? + if version > (10, 5) or version < (10, 4): + return [] + formats.append("fat64") + + elif cpu_arch == "ppc": + if version > (10, 6): + return [] + formats.extend(["fat32", "fat"]) + + if cpu_arch in {"arm64", "x86_64"}: + formats.append("universal2") + + if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}: + formats.append("universal") + + return formats + + +def mac_platforms( + version: Optional[MacVersion] = None, arch: Optional[str] = None +) -> Iterator[str]: + """ + Yields the platform tags for a macOS system. + + The `version` parameter is a two-item tuple specifying the macOS version to + generate platform tags for. The `arch` parameter is the CPU architecture to + generate platform tags for. Both parameters default to the appropriate value + for the current system. + """ + version_str, _, cpu_arch = platform.mac_ver() + if version is None: + version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2]))) + if version == (10, 16): + # When built against an older macOS SDK, Python will report macOS 10.16 + # instead of the real version. + version_str = subprocess.run( + [ + sys.executable, + "-sS", + "-c", + "import platform; print(platform.mac_ver()[0])", + ], + check=True, + env={"SYSTEM_VERSION_COMPAT": "0"}, + stdout=subprocess.PIPE, + text=True, + ).stdout + version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2]))) + else: + version = version + if arch is None: + arch = _mac_arch(cpu_arch) + else: + arch = arch + + if (10, 0) <= version and version < (11, 0): + # Prior to Mac OS 11, each yearly release of Mac OS bumped the + # "minor" version number. The major version was always 10. + for minor_version in range(version[1], -1, -1): + compat_version = 10, minor_version + binary_formats = _mac_binary_formats(compat_version, arch) + for binary_format in binary_formats: + yield "macosx_{major}_{minor}_{binary_format}".format( + major=10, minor=minor_version, binary_format=binary_format + ) + + if version >= (11, 0): + # Starting with Mac OS 11, each yearly release bumps the major version + # number. The minor versions are now the midyear updates. + for major_version in range(version[0], 10, -1): + compat_version = major_version, 0 + binary_formats = _mac_binary_formats(compat_version, arch) + for binary_format in binary_formats: + yield "macosx_{major}_{minor}_{binary_format}".format( + major=major_version, minor=0, binary_format=binary_format + ) + + if version >= (11, 0): + # Mac OS 11 on x86_64 is compatible with binaries from previous releases. + # Arm64 support was introduced in 11.0, so no Arm binaries from previous + # releases exist. + # + # However, the "universal2" binary format can have a + # macOS version earlier than 11.0 when the x86_64 part of the binary supports + # that version of macOS. + if arch == "x86_64": + for minor_version in range(16, 3, -1): + compat_version = 10, minor_version + binary_formats = _mac_binary_formats(compat_version, arch) + for binary_format in binary_formats: + yield "macosx_{major}_{minor}_{binary_format}".format( + major=compat_version[0], + minor=compat_version[1], + binary_format=binary_format, + ) + else: + for minor_version in range(16, 3, -1): + compat_version = 10, minor_version + binary_format = "universal2" + yield "macosx_{major}_{minor}_{binary_format}".format( + major=compat_version[0], + minor=compat_version[1], + binary_format=binary_format, + ) + + +def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]: + linux = _normalize_string(sysconfig.get_platform()) + if not linux.startswith("linux_"): + # we should never be here, just yield the sysconfig one and return + yield linux + return + if is_32bit: + if linux == "linux_x86_64": + linux = "linux_i686" + elif linux == "linux_aarch64": + linux = "linux_armv8l" + _, arch = linux.split("_", 1) + archs = {"armv8l": ["armv8l", "armv7l"]}.get(arch, [arch]) + yield from _manylinux.platform_tags(archs) + yield from _musllinux.platform_tags(archs) + for arch in archs: + yield f"linux_{arch}" + + +def _generic_platforms() -> Iterator[str]: + yield _normalize_string(sysconfig.get_platform()) + + +def platform_tags() -> Iterator[str]: + """ + Provides the platform tags for this installation. + """ + if platform.system() == "Darwin": + return mac_platforms() + elif platform.system() == "Linux": + return _linux_platforms() + else: + return _generic_platforms() + + +def interpreter_name() -> str: + """ + Returns the name of the running interpreter. + + Some implementations have a reserved, two-letter abbreviation which will + be returned when appropriate. + """ + name = sys.implementation.name + return INTERPRETER_SHORT_NAMES.get(name) or name + + +def interpreter_version(*, warn: bool = False) -> str: + """ + Returns the version of the running interpreter. + """ + version = _get_config_var("py_version_nodot", warn=warn) + if version: + version = str(version) + else: + version = _version_nodot(sys.version_info[:2]) + return version + + +def _version_nodot(version: PythonVersion) -> str: + return "".join(map(str, version)) + + +def sys_tags(*, warn: bool = False) -> Iterator[Tag]: + """ + Returns the sequence of tag triples for the running interpreter. + + The order of the sequence corresponds to priority order for the + interpreter, from most to least important. + """ + + interp_name = interpreter_name() + if interp_name == "cp": + yield from cpython_tags(warn=warn) + else: + yield from generic_tags() + + if interp_name == "pp": + interp = "pp3" + elif interp_name == "cp": + interp = "cp" + interpreter_version(warn=warn) + else: + interp = None + yield from compatible_tags(interpreter=interp) diff --git a/distutils/_vendor/packaging/utils.py b/distutils/_vendor/packaging/utils.py new file mode 100644 index 0000000000..c2c2f75aa8 --- /dev/null +++ b/distutils/_vendor/packaging/utils.py @@ -0,0 +1,172 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import re +from typing import FrozenSet, NewType, Tuple, Union, cast + +from .tags import Tag, parse_tag +from .version import InvalidVersion, Version + +BuildTag = Union[Tuple[()], Tuple[int, str]] +NormalizedName = NewType("NormalizedName", str) + + +class InvalidName(ValueError): + """ + An invalid distribution name; users should refer to the packaging user guide. + """ + + +class InvalidWheelFilename(ValueError): + """ + An invalid wheel filename was found, users should refer to PEP 427. + """ + + +class InvalidSdistFilename(ValueError): + """ + An invalid sdist filename was found, users should refer to the packaging user guide. + """ + + +# Core metadata spec for `Name` +_validate_regex = re.compile( + r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE +) +_canonicalize_regex = re.compile(r"[-_.]+") +_normalized_regex = re.compile(r"^([a-z0-9]|[a-z0-9]([a-z0-9-](?!--))*[a-z0-9])$") +# PEP 427: The build number must start with a digit. +_build_tag_regex = re.compile(r"(\d+)(.*)") + + +def canonicalize_name(name: str, *, validate: bool = False) -> NormalizedName: + if validate and not _validate_regex.match(name): + raise InvalidName(f"name is invalid: {name!r}") + # This is taken from PEP 503. + value = _canonicalize_regex.sub("-", name).lower() + return cast(NormalizedName, value) + + +def is_normalized_name(name: str) -> bool: + return _normalized_regex.match(name) is not None + + +def canonicalize_version( + version: Union[Version, str], *, strip_trailing_zero: bool = True +) -> str: + """ + This is very similar to Version.__str__, but has one subtle difference + with the way it handles the release segment. + """ + if isinstance(version, str): + try: + parsed = Version(version) + except InvalidVersion: + # Legacy versions cannot be normalized + return version + else: + parsed = version + + parts = [] + + # Epoch + if parsed.epoch != 0: + parts.append(f"{parsed.epoch}!") + + # Release segment + release_segment = ".".join(str(x) for x in parsed.release) + if strip_trailing_zero: + # NB: This strips trailing '.0's to normalize + release_segment = re.sub(r"(\.0)+$", "", release_segment) + parts.append(release_segment) + + # Pre-release + if parsed.pre is not None: + parts.append("".join(str(x) for x in parsed.pre)) + + # Post-release + if parsed.post is not None: + parts.append(f".post{parsed.post}") + + # Development release + if parsed.dev is not None: + parts.append(f".dev{parsed.dev}") + + # Local version segment + if parsed.local is not None: + parts.append(f"+{parsed.local}") + + return "".join(parts) + + +def parse_wheel_filename( + filename: str, +) -> Tuple[NormalizedName, Version, BuildTag, FrozenSet[Tag]]: + if not filename.endswith(".whl"): + raise InvalidWheelFilename( + f"Invalid wheel filename (extension must be '.whl'): {filename}" + ) + + filename = filename[:-4] + dashes = filename.count("-") + if dashes not in (4, 5): + raise InvalidWheelFilename( + f"Invalid wheel filename (wrong number of parts): {filename}" + ) + + parts = filename.split("-", dashes - 2) + name_part = parts[0] + # See PEP 427 for the rules on escaping the project name. + if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None: + raise InvalidWheelFilename(f"Invalid project name: {filename}") + name = canonicalize_name(name_part) + + try: + version = Version(parts[1]) + except InvalidVersion as e: + raise InvalidWheelFilename( + f"Invalid wheel filename (invalid version): {filename}" + ) from e + + if dashes == 5: + build_part = parts[2] + build_match = _build_tag_regex.match(build_part) + if build_match is None: + raise InvalidWheelFilename( + f"Invalid build number: {build_part} in '{filename}'" + ) + build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2))) + else: + build = () + tags = parse_tag(parts[-1]) + return (name, version, build, tags) + + +def parse_sdist_filename(filename: str) -> Tuple[NormalizedName, Version]: + if filename.endswith(".tar.gz"): + file_stem = filename[: -len(".tar.gz")] + elif filename.endswith(".zip"): + file_stem = filename[: -len(".zip")] + else: + raise InvalidSdistFilename( + f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):" + f" {filename}" + ) + + # We are requiring a PEP 440 version, which cannot contain dashes, + # so we split on the last dash. + name_part, sep, version_part = file_stem.rpartition("-") + if not sep: + raise InvalidSdistFilename(f"Invalid sdist filename: {filename}") + + name = canonicalize_name(name_part) + + try: + version = Version(version_part) + except InvalidVersion as e: + raise InvalidSdistFilename( + f"Invalid sdist filename (invalid version): {filename}" + ) from e + + return (name, version) diff --git a/distutils/_vendor/packaging/version.py b/distutils/_vendor/packaging/version.py new file mode 100644 index 0000000000..5faab9bd0d --- /dev/null +++ b/distutils/_vendor/packaging/version.py @@ -0,0 +1,563 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +""" +.. testsetup:: + + from packaging.version import parse, Version +""" + +import itertools +import re +from typing import Any, Callable, NamedTuple, Optional, SupportsInt, Tuple, Union + +from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType + +__all__ = ["VERSION_PATTERN", "parse", "Version", "InvalidVersion"] + +LocalType = Tuple[Union[int, str], ...] + +CmpPrePostDevType = Union[InfinityType, NegativeInfinityType, Tuple[str, int]] +CmpLocalType = Union[ + NegativeInfinityType, + Tuple[Union[Tuple[int, str], Tuple[NegativeInfinityType, Union[int, str]]], ...], +] +CmpKey = Tuple[ + int, + Tuple[int, ...], + CmpPrePostDevType, + CmpPrePostDevType, + CmpPrePostDevType, + CmpLocalType, +] +VersionComparisonMethod = Callable[[CmpKey, CmpKey], bool] + + +class _Version(NamedTuple): + epoch: int + release: Tuple[int, ...] + dev: Optional[Tuple[str, int]] + pre: Optional[Tuple[str, int]] + post: Optional[Tuple[str, int]] + local: Optional[LocalType] + + +def parse(version: str) -> "Version": + """Parse the given version string. + + >>> parse('1.0.dev1') + + + :param version: The version string to parse. + :raises InvalidVersion: When the version string is not a valid version. + """ + return Version(version) + + +class InvalidVersion(ValueError): + """Raised when a version string is not a valid version. + + >>> Version("invalid") + Traceback (most recent call last): + ... + packaging.version.InvalidVersion: Invalid version: 'invalid' + """ + + +class _BaseVersion: + _key: Tuple[Any, ...] + + def __hash__(self) -> int: + return hash(self._key) + + # Please keep the duplicated `isinstance` check + # in the six comparisons hereunder + # unless you find a way to avoid adding overhead function calls. + def __lt__(self, other: "_BaseVersion") -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key < other._key + + def __le__(self, other: "_BaseVersion") -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key <= other._key + + def __eq__(self, other: object) -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key == other._key + + def __ge__(self, other: "_BaseVersion") -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key >= other._key + + def __gt__(self, other: "_BaseVersion") -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key > other._key + + def __ne__(self, other: object) -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key != other._key + + +# Deliberately not anchored to the start and end of the string, to make it +# easier for 3rd party code to reuse +_VERSION_PATTERN = r""" + v? + (?: + (?:(?P[0-9]+)!)? # epoch + (?P[0-9]+(?:\.[0-9]+)*) # release segment + (?P
                                          # pre-release
+            [-_\.]?
+            (?Palpha|a|beta|b|preview|pre|c|rc)
+            [-_\.]?
+            (?P[0-9]+)?
+        )?
+        (?P                                         # post release
+            (?:-(?P[0-9]+))
+            |
+            (?:
+                [-_\.]?
+                (?Ppost|rev|r)
+                [-_\.]?
+                (?P[0-9]+)?
+            )
+        )?
+        (?P                                          # dev release
+            [-_\.]?
+            (?Pdev)
+            [-_\.]?
+            (?P[0-9]+)?
+        )?
+    )
+    (?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
+"""
+
+VERSION_PATTERN = _VERSION_PATTERN
+"""
+A string containing the regular expression used to match a valid version.
+
+The pattern is not anchored at either end, and is intended for embedding in larger
+expressions (for example, matching a version number as part of a file name). The
+regular expression should be compiled with the ``re.VERBOSE`` and ``re.IGNORECASE``
+flags set.
+
+:meta hide-value:
+"""
+
+
+class Version(_BaseVersion):
+    """This class abstracts handling of a project's versions.
+
+    A :class:`Version` instance is comparison aware and can be compared and
+    sorted using the standard Python interfaces.
+
+    >>> v1 = Version("1.0a5")
+    >>> v2 = Version("1.0")
+    >>> v1
+    
+    >>> v2
+    
+    >>> v1 < v2
+    True
+    >>> v1 == v2
+    False
+    >>> v1 > v2
+    False
+    >>> v1 >= v2
+    False
+    >>> v1 <= v2
+    True
+    """
+
+    _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
+    _key: CmpKey
+
+    def __init__(self, version: str) -> None:
+        """Initialize a Version object.
+
+        :param version:
+            The string representation of a version which will be parsed and normalized
+            before use.
+        :raises InvalidVersion:
+            If the ``version`` does not conform to PEP 440 in any way then this
+            exception will be raised.
+        """
+
+        # Validate the version and parse it into pieces
+        match = self._regex.search(version)
+        if not match:
+            raise InvalidVersion(f"Invalid version: '{version}'")
+
+        # Store the parsed out pieces of the version
+        self._version = _Version(
+            epoch=int(match.group("epoch")) if match.group("epoch") else 0,
+            release=tuple(int(i) for i in match.group("release").split(".")),
+            pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
+            post=_parse_letter_version(
+                match.group("post_l"), match.group("post_n1") or match.group("post_n2")
+            ),
+            dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
+            local=_parse_local_version(match.group("local")),
+        )
+
+        # Generate a key which will be used for sorting
+        self._key = _cmpkey(
+            self._version.epoch,
+            self._version.release,
+            self._version.pre,
+            self._version.post,
+            self._version.dev,
+            self._version.local,
+        )
+
+    def __repr__(self) -> str:
+        """A representation of the Version that shows all internal state.
+
+        >>> Version('1.0.0')
+        
+        """
+        return f""
+
+    def __str__(self) -> str:
+        """A string representation of the version that can be rounded-tripped.
+
+        >>> str(Version("1.0a5"))
+        '1.0a5'
+        """
+        parts = []
+
+        # Epoch
+        if self.epoch != 0:
+            parts.append(f"{self.epoch}!")
+
+        # Release segment
+        parts.append(".".join(str(x) for x in self.release))
+
+        # Pre-release
+        if self.pre is not None:
+            parts.append("".join(str(x) for x in self.pre))
+
+        # Post-release
+        if self.post is not None:
+            parts.append(f".post{self.post}")
+
+        # Development release
+        if self.dev is not None:
+            parts.append(f".dev{self.dev}")
+
+        # Local version segment
+        if self.local is not None:
+            parts.append(f"+{self.local}")
+
+        return "".join(parts)
+
+    @property
+    def epoch(self) -> int:
+        """The epoch of the version.
+
+        >>> Version("2.0.0").epoch
+        0
+        >>> Version("1!2.0.0").epoch
+        1
+        """
+        return self._version.epoch
+
+    @property
+    def release(self) -> Tuple[int, ...]:
+        """The components of the "release" segment of the version.
+
+        >>> Version("1.2.3").release
+        (1, 2, 3)
+        >>> Version("2.0.0").release
+        (2, 0, 0)
+        >>> Version("1!2.0.0.post0").release
+        (2, 0, 0)
+
+        Includes trailing zeroes but not the epoch or any pre-release / development /
+        post-release suffixes.
+        """
+        return self._version.release
+
+    @property
+    def pre(self) -> Optional[Tuple[str, int]]:
+        """The pre-release segment of the version.
+
+        >>> print(Version("1.2.3").pre)
+        None
+        >>> Version("1.2.3a1").pre
+        ('a', 1)
+        >>> Version("1.2.3b1").pre
+        ('b', 1)
+        >>> Version("1.2.3rc1").pre
+        ('rc', 1)
+        """
+        return self._version.pre
+
+    @property
+    def post(self) -> Optional[int]:
+        """The post-release number of the version.
+
+        >>> print(Version("1.2.3").post)
+        None
+        >>> Version("1.2.3.post1").post
+        1
+        """
+        return self._version.post[1] if self._version.post else None
+
+    @property
+    def dev(self) -> Optional[int]:
+        """The development number of the version.
+
+        >>> print(Version("1.2.3").dev)
+        None
+        >>> Version("1.2.3.dev1").dev
+        1
+        """
+        return self._version.dev[1] if self._version.dev else None
+
+    @property
+    def local(self) -> Optional[str]:
+        """The local version segment of the version.
+
+        >>> print(Version("1.2.3").local)
+        None
+        >>> Version("1.2.3+abc").local
+        'abc'
+        """
+        if self._version.local:
+            return ".".join(str(x) for x in self._version.local)
+        else:
+            return None
+
+    @property
+    def public(self) -> str:
+        """The public portion of the version.
+
+        >>> Version("1.2.3").public
+        '1.2.3'
+        >>> Version("1.2.3+abc").public
+        '1.2.3'
+        >>> Version("1.2.3+abc.dev1").public
+        '1.2.3'
+        """
+        return str(self).split("+", 1)[0]
+
+    @property
+    def base_version(self) -> str:
+        """The "base version" of the version.
+
+        >>> Version("1.2.3").base_version
+        '1.2.3'
+        >>> Version("1.2.3+abc").base_version
+        '1.2.3'
+        >>> Version("1!1.2.3+abc.dev1").base_version
+        '1!1.2.3'
+
+        The "base version" is the public version of the project without any pre or post
+        release markers.
+        """
+        parts = []
+
+        # Epoch
+        if self.epoch != 0:
+            parts.append(f"{self.epoch}!")
+
+        # Release segment
+        parts.append(".".join(str(x) for x in self.release))
+
+        return "".join(parts)
+
+    @property
+    def is_prerelease(self) -> bool:
+        """Whether this version is a pre-release.
+
+        >>> Version("1.2.3").is_prerelease
+        False
+        >>> Version("1.2.3a1").is_prerelease
+        True
+        >>> Version("1.2.3b1").is_prerelease
+        True
+        >>> Version("1.2.3rc1").is_prerelease
+        True
+        >>> Version("1.2.3dev1").is_prerelease
+        True
+        """
+        return self.dev is not None or self.pre is not None
+
+    @property
+    def is_postrelease(self) -> bool:
+        """Whether this version is a post-release.
+
+        >>> Version("1.2.3").is_postrelease
+        False
+        >>> Version("1.2.3.post1").is_postrelease
+        True
+        """
+        return self.post is not None
+
+    @property
+    def is_devrelease(self) -> bool:
+        """Whether this version is a development release.
+
+        >>> Version("1.2.3").is_devrelease
+        False
+        >>> Version("1.2.3.dev1").is_devrelease
+        True
+        """
+        return self.dev is not None
+
+    @property
+    def major(self) -> int:
+        """The first item of :attr:`release` or ``0`` if unavailable.
+
+        >>> Version("1.2.3").major
+        1
+        """
+        return self.release[0] if len(self.release) >= 1 else 0
+
+    @property
+    def minor(self) -> int:
+        """The second item of :attr:`release` or ``0`` if unavailable.
+
+        >>> Version("1.2.3").minor
+        2
+        >>> Version("1").minor
+        0
+        """
+        return self.release[1] if len(self.release) >= 2 else 0
+
+    @property
+    def micro(self) -> int:
+        """The third item of :attr:`release` or ``0`` if unavailable.
+
+        >>> Version("1.2.3").micro
+        3
+        >>> Version("1").micro
+        0
+        """
+        return self.release[2] if len(self.release) >= 3 else 0
+
+
+def _parse_letter_version(
+    letter: Optional[str], number: Union[str, bytes, SupportsInt, None]
+) -> Optional[Tuple[str, int]]:
+
+    if letter:
+        # We consider there to be an implicit 0 in a pre-release if there is
+        # not a numeral associated with it.
+        if number is None:
+            number = 0
+
+        # We normalize any letters to their lower case form
+        letter = letter.lower()
+
+        # We consider some words to be alternate spellings of other words and
+        # in those cases we want to normalize the spellings to our preferred
+        # spelling.
+        if letter == "alpha":
+            letter = "a"
+        elif letter == "beta":
+            letter = "b"
+        elif letter in ["c", "pre", "preview"]:
+            letter = "rc"
+        elif letter in ["rev", "r"]:
+            letter = "post"
+
+        return letter, int(number)
+    if not letter and number:
+        # We assume if we are given a number, but we are not given a letter
+        # then this is using the implicit post release syntax (e.g. 1.0-1)
+        letter = "post"
+
+        return letter, int(number)
+
+    return None
+
+
+_local_version_separators = re.compile(r"[\._-]")
+
+
+def _parse_local_version(local: Optional[str]) -> Optional[LocalType]:
+    """
+    Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
+    """
+    if local is not None:
+        return tuple(
+            part.lower() if not part.isdigit() else int(part)
+            for part in _local_version_separators.split(local)
+        )
+    return None
+
+
+def _cmpkey(
+    epoch: int,
+    release: Tuple[int, ...],
+    pre: Optional[Tuple[str, int]],
+    post: Optional[Tuple[str, int]],
+    dev: Optional[Tuple[str, int]],
+    local: Optional[LocalType],
+) -> CmpKey:
+
+    # When we compare a release version, we want to compare it with all of the
+    # trailing zeros removed. So we'll use a reverse the list, drop all the now
+    # leading zeros until we come to something non zero, then take the rest
+    # re-reverse it back into the correct order and make it a tuple and use
+    # that for our sorting key.
+    _release = tuple(
+        reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
+    )
+
+    # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
+    # We'll do this by abusing the pre segment, but we _only_ want to do this
+    # if there is not a pre or a post segment. If we have one of those then
+    # the normal sorting rules will handle this case correctly.
+    if pre is None and post is None and dev is not None:
+        _pre: CmpPrePostDevType = NegativeInfinity
+    # Versions without a pre-release (except as noted above) should sort after
+    # those with one.
+    elif pre is None:
+        _pre = Infinity
+    else:
+        _pre = pre
+
+    # Versions without a post segment should sort before those with one.
+    if post is None:
+        _post: CmpPrePostDevType = NegativeInfinity
+
+    else:
+        _post = post
+
+    # Versions without a development segment should sort after those with one.
+    if dev is None:
+        _dev: CmpPrePostDevType = Infinity
+
+    else:
+        _dev = dev
+
+    if local is None:
+        # Versions without a local segment should sort before those with one.
+        _local: CmpLocalType = NegativeInfinity
+    else:
+        # Versions with a local segment need that segment parsed to implement
+        # the sorting rules in PEP440.
+        # - Alpha numeric segments sort before numeric segments
+        # - Alpha numeric segments sort lexicographically
+        # - Numeric segments sort numerically
+        # - Shorter versions sort before longer versions when the prefixes
+        #   match exactly
+        _local = tuple(
+            (i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
+        )
+
+    return epoch, _release, _pre, _post, _dev, _local
diff --git a/distutils/_vendor/ruff.toml b/distutils/_vendor/ruff.toml
new file mode 100644
index 0000000000..00fee625a5
--- /dev/null
+++ b/distutils/_vendor/ruff.toml
@@ -0,0 +1 @@
+exclude = ["*"]

From 96ce7aa3f26e60c72dcaa4e3b467bc563b100a17 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 11 Apr 2024 19:31:14 -0400
Subject: [PATCH 0482/1761] Use vendored packaging.

---
 distutils/dist.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/distutils/dist.py b/distutils/dist.py
index 4b3824df7e..f4eb6e8958 100644
--- a/distutils/dist.py
+++ b/distutils/dist.py
@@ -12,7 +12,7 @@
 import logging
 from email import message_from_file
 
-from packaging.utils import canonicalize_name, canonicalize_version
+from ._vendor.packaging.utils import canonicalize_name, canonicalize_version
 
 try:
     import warnings

From 842cc23a1b0af16fa09b8e4b86433531716ffc8d Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 12 Apr 2024 09:03:46 -0400
Subject: [PATCH 0483/1761] Update readme to reflect current state.

---
 README.rst | 9 +++------
 1 file changed, 3 insertions(+), 6 deletions(-)

diff --git a/README.rst b/README.rst
index 822809de2b..aa3b65f15e 100644
--- a/README.rst
+++ b/README.rst
@@ -19,12 +19,9 @@
 
 Python Module Distribution Utilities extracted from the Python Standard Library
 
-Synchronizing
-=============
+This package is unsupported except as integrated into and exposed by Setuptools.
 
-This project is no longer kept in sync with the code still in stdlib, which is deprecated and scheduled for removal.
-
-To Setuptools
--------------
+Integration
+-----------
 
 Simply merge the changes directly into setuptools' repo.

From 62b9a8edb7871d165f3503bc1cb671f75a7e84ce Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 12 Apr 2024 09:04:55 -0400
Subject: [PATCH 0484/1761] Apply ruff --select UP safe fixes.

---
 distutils/command/bdist_dumb.py       |  4 +---
 distutils/command/bdist_rpm.py        |  6 +-----
 distutils/command/build_scripts.py    |  6 +++---
 distutils/extension.py                |  7 +------
 distutils/msvccompiler.py             |  5 +----
 distutils/tests/test_unixccompiler.py |  5 +----
 distutils/tests/test_util.py          |  2 +-
 distutils/tests/test_version.py       | 16 ++++------------
 8 files changed, 13 insertions(+), 38 deletions(-)

diff --git a/distutils/command/bdist_dumb.py b/distutils/command/bdist_dumb.py
index 01dd79079b..4beb123630 100644
--- a/distutils/command/bdist_dumb.py
+++ b/distutils/command/bdist_dumb.py
@@ -104,9 +104,7 @@ def run(self):
 
         # And make an archive relative to the root of the
         # pseudo-installation tree.
-        archive_basename = "{}.{}".format(
-            self.distribution.get_fullname(), self.plat_name
-        )
+        archive_basename = f"{self.distribution.get_fullname()}.{self.plat_name}"
 
         pseudoinstall_root = os.path.join(self.dist_dir, archive_basename)
         if not self.relative:
diff --git a/distutils/command/bdist_rpm.py b/distutils/command/bdist_rpm.py
index 675bcebdad..bb3bee7eb9 100644
--- a/distutils/command/bdist_rpm.py
+++ b/distutils/command/bdist_rpm.py
@@ -352,11 +352,7 @@ def run(self):  # noqa: C901
         nvr_string = "%{name}-%{version}-%{release}"
         src_rpm = nvr_string + ".src.rpm"
         non_src_rpm = "%{arch}/" + nvr_string + ".%{arch}.rpm"
-        q_cmd = r"rpm -q --qf '{} {}\n' --specfile '{}'".format(
-            src_rpm,
-            non_src_rpm,
-            spec_path,
-        )
+        q_cmd = rf"rpm -q --qf '{src_rpm} {non_src_rpm}\n' --specfile '{spec_path}'"
 
         out = os.popen(q_cmd)
         try:
diff --git a/distutils/command/build_scripts.py b/distutils/command/build_scripts.py
index 1a4d67f492..68caf5a65b 100644
--- a/distutils/command/build_scripts.py
+++ b/distutils/command/build_scripts.py
@@ -157,7 +157,7 @@ def _validate_shebang(shebang, encoding):
             shebang.encode('utf-8')
         except UnicodeEncodeError:
             raise ValueError(
-                "The shebang ({!r}) is not encodable " "to utf-8".format(shebang)
+                f"The shebang ({shebang!r}) is not encodable " "to utf-8"
             )
 
         # If the script is encoded to a custom encoding (use a
@@ -167,6 +167,6 @@ def _validate_shebang(shebang, encoding):
             shebang.encode(encoding)
         except UnicodeEncodeError:
             raise ValueError(
-                "The shebang ({!r}) is not encodable "
-                "to the script encoding ({})".format(shebang, encoding)
+                f"The shebang ({shebang!r}) is not encodable "
+                f"to the script encoding ({encoding})"
             )
diff --git a/distutils/extension.py b/distutils/extension.py
index 8f186b72ff..00ca61d569 100644
--- a/distutils/extension.py
+++ b/distutils/extension.py
@@ -134,12 +134,7 @@ def __init__(
             warnings.warn(msg)
 
     def __repr__(self):
-        return '<{}.{}({!r}) at {:#x}>'.format(
-            self.__class__.__module__,
-            self.__class__.__qualname__,
-            self.name,
-            id(self),
-        )
+        return f'<{self.__class__.__module__}.{self.__class__.__qualname__}({self.name!r}) at {id(self):#x}>'
 
 
 def read_setup_file(filename):  # noqa: C901
diff --git a/distutils/msvccompiler.py b/distutils/msvccompiler.py
index 1a07746bc7..8b4f7046c7 100644
--- a/distutils/msvccompiler.py
+++ b/distutils/msvccompiler.py
@@ -635,10 +635,7 @@ def get_msvc_paths(self, path, platform='x86'):
 
         path = path + " dirs"
         if self.__version >= 7:
-            key = r"{}\{:0.1f}\VC\VC_OBJECTS_PLATFORM_INFO\Win32\Directories".format(
-                self.__root,
-                self.__version,
-            )
+            key = rf"{self.__root}\{self.__version:0.1f}\VC\VC_OBJECTS_PLATFORM_INFO\Win32\Directories"
         else:
             key = (
                 r"%s\6.0\Build System\Components\Platforms"
diff --git a/distutils/tests/test_unixccompiler.py b/distutils/tests/test_unixccompiler.py
index 2763db9c02..ca198873ad 100644
--- a/distutils/tests/test_unixccompiler.py
+++ b/distutils/tests/test_unixccompiler.py
@@ -73,10 +73,7 @@ def gcv(var):
 
         def do_darwin_test(syscfg_macosx_ver, env_macosx_ver, expected_flag):
             env = os.environ
-            msg = "macOS version = (sysconfig={!r}, env={!r})".format(
-                syscfg_macosx_ver,
-                env_macosx_ver,
-            )
+            msg = f"macOS version = (sysconfig={syscfg_macosx_ver!r}, env={env_macosx_ver!r})"
 
             # Save
             old_gcv = sysconfig.get_config_var
diff --git a/distutils/tests/test_util.py b/distutils/tests/test_util.py
index c632b3910f..53c131e9e5 100644
--- a/distutils/tests/test_util.py
+++ b/distutils/tests/test_util.py
@@ -259,6 +259,6 @@ def test_dont_write_bytecode(self):
 
     def test_grok_environment_error(self):
         # test obsolete function to ensure backward compat (#4931)
-        exc = IOError("Unable to find batch file")
+        exc = OSError("Unable to find batch file")
         msg = grok_environment_error(exc)
         assert msg == "error: Unable to find batch file"
diff --git a/distutils/tests/test_version.py b/distutils/tests/test_version.py
index 0aaf0a534c..7e42227e19 100644
--- a/distutils/tests/test_version.py
+++ b/distutils/tests/test_version.py
@@ -52,13 +52,9 @@ def test_cmp_strict(self):
                     raise AssertionError(
                         ("cmp(%s, %s) " "shouldn't raise ValueError") % (v1, v2)
                     )
-            assert res == wanted, 'cmp({}, {}) should be {}, got {}'.format(
-                v1, v2, wanted, res
-            )
+            assert res == wanted, f'cmp({v1}, {v2}) should be {wanted}, got {res}'
             res = StrictVersion(v1)._cmp(v2)
-            assert res == wanted, 'cmp({}, {}) should be {}, got {}'.format(
-                v1, v2, wanted, res
-            )
+            assert res == wanted, f'cmp({v1}, {v2}) should be {wanted}, got {res}'
             res = StrictVersion(v1)._cmp(object())
             assert (
                 res is NotImplemented
@@ -78,13 +74,9 @@ def test_cmp(self):
 
         for v1, v2, wanted in versions:
             res = LooseVersion(v1)._cmp(LooseVersion(v2))
-            assert res == wanted, 'cmp({}, {}) should be {}, got {}'.format(
-                v1, v2, wanted, res
-            )
+            assert res == wanted, f'cmp({v1}, {v2}) should be {wanted}, got {res}'
             res = LooseVersion(v1)._cmp(v2)
-            assert res == wanted, 'cmp({}, {}) should be {}, got {}'.format(
-                v1, v2, wanted, res
-            )
+            assert res == wanted, f'cmp({v1}, {v2}) should be {wanted}, got {res}'
             res = LooseVersion(v1)._cmp(object())
             assert (
                 res is NotImplemented

From f8ab1e8b72f4ab82bdb1402d6b66ddb02d6ef657 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 12 Apr 2024 09:06:12 -0400
Subject: [PATCH 0485/1761] Apply ruff --select UP unsafe fixes.

---
 distutils/ccompiler.py             |  9 ++++-----
 distutils/command/bdist_dumb.py    |  3 +--
 distutils/command/bdist_rpm.py     |  3 +--
 distutils/command/build_scripts.py |  3 +--
 distutils/command/install_data.py  |  2 +-
 distutils/dist.py                  |  9 +++------
 distutils/fancy_getopt.py          | 16 +++++++---------
 distutils/file_util.py             |  4 ++--
 distutils/msvccompiler.py          |  4 ++--
 distutils/tests/test_bdist_dumb.py |  2 +-
 distutils/tests/test_install.py    |  4 ++--
 distutils/tests/test_version.py    |  2 +-
 distutils/util.py                  | 22 +++++++---------------
 13 files changed, 33 insertions(+), 50 deletions(-)

diff --git a/distutils/ccompiler.py b/distutils/ccompiler.py
index bcf9580c7a..cdfe9d74ef 100644
--- a/distutils/ccompiler.py
+++ b/distutils/ccompiler.py
@@ -169,8 +169,7 @@ class (via the 'executables' class attribute), but most will have:
         for key in kwargs:
             if key not in self.executables:
                 raise ValueError(
-                    "unknown executable '%s' for class %s"
-                    % (key, self.__class__.__name__)
+                    f"unknown executable '{key}' for class {self.__class__.__name__}"
                 )
             self.set_executable(key, kwargs[key])
 
@@ -1162,8 +1161,8 @@ def new_compiler(plat=None, compiler=None, verbose=0, dry_run=0, force=0):
         )
     except KeyError:
         raise DistutilsModuleError(
-            "can't compile C/C++ code: unable to find class '%s' "
-            "in module '%s'" % (class_name, module_name)
+            f"can't compile C/C++ code: unable to find class '{class_name}' "
+            f"in module '{module_name}'"
         )
 
     # XXX The None is necessary to preserve backwards compatibility
@@ -1210,7 +1209,7 @@ def gen_preprocess_options(macros, include_dirs):
                 # XXX *don't* need to be clever about quoting the
                 # macro value here, because we're going to avoid the
                 # shell at all costs when we spawn the command!
-                pp_opts.append("-D%s=%s" % macro)
+                pp_opts.append("-D{}={}".format(*macro))
 
     for dir in include_dirs:
         pp_opts.append("-I%s" % dir)
diff --git a/distutils/command/bdist_dumb.py b/distutils/command/bdist_dumb.py
index 4beb123630..5880ad2ba4 100644
--- a/distutils/command/bdist_dumb.py
+++ b/distutils/command/bdist_dumb.py
@@ -115,8 +115,7 @@ def run(self):
             ):
                 raise DistutilsPlatformError(
                     "can't make a dumb built distribution where "
-                    "base and platbase are different (%s, %s)"
-                    % (repr(install.install_base), repr(install.install_platbase))
+                    f"base and platbase are different ({repr(install.install_base)}, {repr(install.install_platbase)})"
                 )
             else:
                 archive_root = os.path.join(
diff --git a/distutils/command/bdist_rpm.py b/distutils/command/bdist_rpm.py
index bb3bee7eb9..64af0db0cf 100644
--- a/distutils/command/bdist_rpm.py
+++ b/distutils/command/bdist_rpm.py
@@ -232,8 +232,7 @@ def finalize_package_data(self):
         self.ensure_string('group', "Development/Libraries")
         self.ensure_string(
             'vendor',
-            "%s <%s>"
-            % (self.distribution.get_contact(), self.distribution.get_contact_email()),
+            f"{self.distribution.get_contact()} <{self.distribution.get_contact_email()}>",
         )
         self.ensure_string('packager')
         self.ensure_string_list('doc_files')
diff --git a/distutils/command/build_scripts.py b/distutils/command/build_scripts.py
index 68caf5a65b..6a5e6ed081 100644
--- a/distutils/command/build_scripts.py
+++ b/distutils/command/build_scripts.py
@@ -109,8 +109,7 @@ def _copy_script(self, script, outfiles, updated_files):  # noqa: C901
                 else:
                     executable = os.path.join(
                         sysconfig.get_config_var("BINDIR"),
-                        "python%s%s"
-                        % (
+                        "python{}{}".format(
                             sysconfig.get_config_var("VERSION"),
                             sysconfig.get_config_var("EXE"),
                         ),
diff --git a/distutils/command/install_data.py b/distutils/command/install_data.py
index 7ba35eef82..31ae4350dc 100644
--- a/distutils/command/install_data.py
+++ b/distutils/command/install_data.py
@@ -51,7 +51,7 @@ def run(self):
                 if self.warn_dir:
                     self.warn(
                         "setup script did not provide a directory for "
-                        "'%s' -- installing right in '%s'" % (f, self.install_dir)
+                        f"'{f}' -- installing right in '{self.install_dir}'"
                     )
                 (out, _) = self.copy_file(f, self.install_dir)
                 self.outfiles.append(out)
diff --git a/distutils/dist.py b/distutils/dist.py
index c4d2a45dc2..bbea155556 100644
--- a/distutils/dist.py
+++ b/distutils/dist.py
@@ -592,9 +592,8 @@ def _parse_command_opts(self, parser, args):  # noqa: C901
                         func()
                     else:
                         raise DistutilsClassError(
-                            "invalid help function %r for help option '%s': "
+                            f"invalid help function {func!r} for help option '{help_option}': "
                             "must be a callable object (function, etc.)"
-                            % (func, help_option)
                         )
 
             if help_option_found:
@@ -834,8 +833,7 @@ def get_command_class(self, command):
                 klass = getattr(module, klass_name)
             except AttributeError:
                 raise DistutilsModuleError(
-                    "invalid command '%s' (no class '%s' in module '%s')"
-                    % (command, klass_name, module_name)
+                    f"invalid command '{command}' (no class '{klass_name}' in module '{module_name}')"
                 )
 
             self.cmdclass[command] = klass
@@ -909,8 +907,7 @@ def _set_command_options(self, command_obj, option_dict=None):  # noqa: C901
                     setattr(command_obj, option, value)
                 else:
                     raise DistutilsOptionError(
-                        "error in %s: command '%s' has no such option '%s'"
-                        % (source, command_name, option)
+                        f"error in {source}: command '{command_name}' has no such option '{option}'"
                     )
             except ValueError as msg:
                 raise DistutilsOptionError(msg)
diff --git a/distutils/fancy_getopt.py b/distutils/fancy_getopt.py
index c025f12062..e41b6064bd 100644
--- a/distutils/fancy_getopt.py
+++ b/distutils/fancy_getopt.py
@@ -116,13 +116,11 @@ def _check_alias_dict(self, aliases, what):
         for alias, opt in aliases.items():
             if alias not in self.option_index:
                 raise DistutilsGetoptError(
-                    ("invalid %s '%s': " "option '%s' not defined")
-                    % (what, alias, alias)
+                    f"invalid {what} '{alias}': " f"option '{alias}' not defined"
                 )
             if opt not in self.option_index:
                 raise DistutilsGetoptError(
-                    ("invalid %s '%s': " "aliased option '%s' not defined")
-                    % (what, alias, opt)
+                    f"invalid {what} '{alias}': " f"aliased option '{opt}' not defined"
                 )
 
     def set_aliases(self, alias):
@@ -187,8 +185,8 @@ def _grok_option_table(self):  # noqa: C901
                 if alias_to is not None:
                     if self.takes_arg[alias_to]:
                         raise DistutilsGetoptError(
-                            "invalid negative alias '%s': "
-                            "aliased option '%s' takes a value" % (long, alias_to)
+                            f"invalid negative alias '{long}': "
+                            f"aliased option '{alias_to}' takes a value"
                         )
 
                     self.long_opts[-1] = long  # XXX redundant?!
@@ -200,9 +198,9 @@ def _grok_option_table(self):  # noqa: C901
             if alias_to is not None:
                 if self.takes_arg[long] != self.takes_arg[alias_to]:
                     raise DistutilsGetoptError(
-                        "invalid alias '%s': inconsistent with "
-                        "aliased option '%s' (one of them takes a value, "
-                        "the other doesn't" % (long, alias_to)
+                        f"invalid alias '{long}': inconsistent with "
+                        f"aliased option '{alias_to}' (one of them takes a value, "
+                        "the other doesn't"
                     )
 
             # Now enforce some bondage on the long option name, so we can
diff --git a/distutils/file_util.py b/distutils/file_util.py
index 0eb9b86107..6c8193e9b7 100644
--- a/distutils/file_util.py
+++ b/distutils/file_util.py
@@ -220,8 +220,8 @@ def move_file(src, dst, verbose=1, dry_run=0):  # noqa: C901
             except OSError:
                 pass
             raise DistutilsFileError(
-                "couldn't move '%s' to '%s' by copy/delete: "
-                "delete '%s' failed: %s" % (src, dst, src, msg)
+                f"couldn't move '{src}' to '{dst}' by copy/delete: "
+                f"delete '{src}' failed: {msg}"
             )
     return dst
 
diff --git a/distutils/msvccompiler.py b/distutils/msvccompiler.py
index 8b4f7046c7..b8694dd6d8 100644
--- a/distutils/msvccompiler.py
+++ b/distutils/msvccompiler.py
@@ -638,8 +638,8 @@ def get_msvc_paths(self, path, platform='x86'):
             key = rf"{self.__root}\{self.__version:0.1f}\VC\VC_OBJECTS_PLATFORM_INFO\Win32\Directories"
         else:
             key = (
-                r"%s\6.0\Build System\Components\Platforms"
-                r"\Win32 (%s)\Directories" % (self.__root, platform)
+                rf"{self.__root}\6.0\Build System\Components\Platforms"
+                rf"\Win32 ({platform})\Directories"
             )
 
         for base in HKEYS:
diff --git a/distutils/tests/test_bdist_dumb.py b/distutils/tests/test_bdist_dumb.py
index cb4db4e192..cfe7fa9e62 100644
--- a/distutils/tests/test_bdist_dumb.py
+++ b/distutils/tests/test_bdist_dumb.py
@@ -73,7 +73,7 @@ def test_simple_built(self):
             fp.close()
 
         contents = sorted(filter(None, map(os.path.basename, contents)))
-        wanted = ['foo-0.1-py%s.%s.egg-info' % sys.version_info[:2], 'foo.py']
+        wanted = ['foo-0.1-py{}.{}.egg-info'.format(*sys.version_info[:2]), 'foo.py']
         if not sys.dont_write_bytecode:
             wanted.append('foo.%s.pyc' % sys.implementation.cache_tag)
         assert contents == sorted(wanted)
diff --git a/distutils/tests/test_install.py b/distutils/tests/test_install.py
index 16ac5ca746..08c72c1be0 100644
--- a/distutils/tests/test_install.py
+++ b/distutils/tests/test_install.py
@@ -203,7 +203,7 @@ def test_record(self):
             'hello.py',
             'hello.%s.pyc' % sys.implementation.cache_tag,
             'sayhi',
-            'UNKNOWN-0.0.0-py%s.%s.egg-info' % sys.version_info[:2],
+            'UNKNOWN-0.0.0-py{}.{}.egg-info'.format(*sys.version_info[:2]),
         ]
         assert found == expected
 
@@ -235,7 +235,7 @@ def test_record_extensions(self):
         found = [pathlib.Path(line).name for line in content.splitlines()]
         expected = [
             _make_ext_name('xx'),
-            'UNKNOWN-0.0.0-py%s.%s.egg-info' % sys.version_info[:2],
+            'UNKNOWN-0.0.0-py{}.{}.egg-info'.format(*sys.version_info[:2]),
         ]
         assert found == expected
 
diff --git a/distutils/tests/test_version.py b/distutils/tests/test_version.py
index 7e42227e19..f89d1b3580 100644
--- a/distutils/tests/test_version.py
+++ b/distutils/tests/test_version.py
@@ -50,7 +50,7 @@ def test_cmp_strict(self):
                     continue
                 else:
                     raise AssertionError(
-                        ("cmp(%s, %s) " "shouldn't raise ValueError") % (v1, v2)
+                        f"cmp({v1}, {v2}) " "shouldn't raise ValueError"
                     )
             assert res == wanted, f'cmp({v1}, {v2}) should be {wanted}, got {res}'
             res = StrictVersion(v1)._cmp(v2)
diff --git a/distutils/util.py b/distutils/util.py
index bfd30700fa..ce5bc55f36 100644
--- a/distutils/util.py
+++ b/distutils/util.py
@@ -30,12 +30,6 @@ def get_host_platform():
     # even with older Python versions when distutils was split out.
     # Now it delegates to stdlib sysconfig, but maintains compatibility.
 
-    if sys.version_info < (3, 8):
-        if os.name == 'nt':
-            if '(arm)' in sys.version.lower():
-                return 'win-arm32'
-            if '(arm64)' in sys.version.lower():
-                return 'win-arm64'
 
     if sys.version_info < (3, 9):
         if os.name == "posix" and hasattr(os, 'uname'):
@@ -109,8 +103,8 @@ def get_macosx_target_ver():
         ):
             my_msg = (
                 '$' + MACOSX_VERSION_VAR + ' mismatch: '
-                'now "%s" but "%s" during configure; '
-                'must use 10.3 or later' % (env_ver, syscfg_ver)
+                f'now "{env_ver}" but "{syscfg_ver}" during configure; '
+                'must use 10.3 or later'
             )
             raise DistutilsPlatformError(my_msg)
         return env_ver
@@ -447,13 +441,12 @@ def byte_compile(  # noqa: C901
 
                 script.write(",\n".join(map(repr, py_files)) + "]\n")
                 script.write(
-                    """
-byte_compile(files, optimize=%r, force=%r,
-             prefix=%r, base_dir=%r,
-             verbose=%r, dry_run=0,
+                    f"""
+byte_compile(files, optimize={optimize!r}, force={force!r},
+             prefix={prefix!r}, base_dir={base_dir!r},
+             verbose={verbose!r}, dry_run=0,
              direct=1)
 """
-                    % (optimize, force, prefix, base_dir, verbose)
                 )
 
         cmd = [sys.executable]
@@ -487,8 +480,7 @@ def byte_compile(  # noqa: C901
             if prefix:
                 if file[: len(prefix)] != prefix:
                     raise ValueError(
-                        "invalid prefix: filename %r doesn't start with %r"
-                        % (file, prefix)
+                        f"invalid prefix: filename {file!r} doesn't start with {prefix!r}"
                     )
                 dfile = dfile[len(prefix) :]
             if base_dir:

From 13b1f91e5d883bcd2132c9e7ae08940841bbee34 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 12 Apr 2024 09:06:18 -0400
Subject: [PATCH 0486/1761] =?UTF-8?q?=F0=9F=91=B9=20Feed=20the=20hobgoblin?=
 =?UTF-8?q?s=20(delint).?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 distutils/command/build_scripts.py | 4 +---
 distutils/util.py                  | 1 -
 2 files changed, 1 insertion(+), 4 deletions(-)

diff --git a/distutils/command/build_scripts.py b/distutils/command/build_scripts.py
index 6a5e6ed081..29d9c27829 100644
--- a/distutils/command/build_scripts.py
+++ b/distutils/command/build_scripts.py
@@ -155,9 +155,7 @@ def _validate_shebang(shebang, encoding):
         try:
             shebang.encode('utf-8')
         except UnicodeEncodeError:
-            raise ValueError(
-                f"The shebang ({shebang!r}) is not encodable " "to utf-8"
-            )
+            raise ValueError(f"The shebang ({shebang!r}) is not encodable " "to utf-8")
 
         # If the script is encoded to a custom encoding (use a
         # #coding:xxx cookie), the shebang has to be encodable to
diff --git a/distutils/util.py b/distutils/util.py
index ce5bc55f36..a24c940102 100644
--- a/distutils/util.py
+++ b/distutils/util.py
@@ -30,7 +30,6 @@ def get_host_platform():
     # even with older Python versions when distutils was split out.
     # Now it delegates to stdlib sysconfig, but maintains compatibility.
 
-
     if sys.version_info < (3, 9):
         if os.name == "posix" and hasattr(os, 'uname'):
             osname, host, release, version, machine = os.uname()

From 2415d50bf5f9034b1c7661795368a68c8293c3b1 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 12 Apr 2024 09:06:56 -0400
Subject: [PATCH 0487/1761] =?UTF-8?q?=F0=9F=91=B9=20Feed=20the=20hobgoblin?=
 =?UTF-8?q?s=20(delint).?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

Apply isort rules using `ruff --select I --fix`.
---
 conftest.py                             | 12 +++----
 distutils/__init__.py                   |  2 +-
 distutils/_log.py                       |  1 -
 distutils/_macos_compat.py              |  2 +-
 distutils/_modified.py                  |  2 +-
 distutils/_msvccompiler.py              | 14 ++++----
 distutils/archive_util.py               |  6 ++--
 distutils/bcppcompiler.py               |  9 +++--
 distutils/ccompiler.py                  | 18 +++++-----
 distutils/cmd.py                        |  8 ++---
 distutils/command/_framework_compat.py  |  4 +--
 distutils/command/bdist.py              |  2 +-
 distutils/command/bdist_dumb.py         |  7 ++--
 distutils/command/bdist_rpm.py          | 10 +++---
 distutils/command/build.py              |  3 +-
 distutils/command/build_clib.py         |  3 +-
 distutils/command/build_ext.py          | 17 +++++-----
 distutils/command/build_py.py           |  8 ++---
 distutils/command/build_scripts.py      |  9 ++---
 distutils/command/check.py              |  4 +--
 distutils/command/clean.py              |  3 +-
 distutils/command/config.py             |  2 +-
 distutils/command/install.py            | 21 +++++-------
 distutils/command/install_data.py       |  1 +
 distutils/command/install_egg_info.py   |  4 +--
 distutils/command/install_lib.py        |  3 +-
 distutils/command/install_scripts.py    |  3 +-
 distutils/command/register.py           |  2 +-
 distutils/command/sdist.py              | 14 ++++----
 distutils/command/upload.py             | 10 +++---
 distutils/config.py                     |  2 +-
 distutils/core.py                       | 15 ++++-----
 distutils/cygwinccompiler.py            | 15 ++++-----
 distutils/dir_util.py                   |  5 +--
 distutils/dist.py                       | 20 +++++------
 distutils/extension.py                  |  3 +-
 distutils/fancy_getopt.py               |  9 ++---
 distutils/file_util.py                  |  7 ++--
 distutils/filelist.py                   |  8 ++---
 distutils/log.py                        |  1 -
 distutils/msvc9compiler.py              | 11 +++---
 distutils/msvccompiler.py               | 16 +++++----
 distutils/spawn.py                      |  6 ++--
 distutils/sysconfig.py                  |  6 ++--
 distutils/tests/__init__.py             |  3 +-
 distutils/tests/py37compat.py           |  2 +-
 distutils/tests/support.py              | 11 +++---
 distutils/tests/test_archive_util.py    | 21 ++++++------
 distutils/tests/test_bdist_dumb.py      |  7 ++--
 distutils/tests/test_bdist_rpm.py       | 12 +++----
 distutils/tests/test_build.py           |  1 -
 distutils/tests/test_build_clib.py      |  7 ++--
 distutils/tests/test_build_ext.py       | 45 ++++++++++++-------------
 distutils/tests/test_build_py.py        |  8 ++---
 distutils/tests/test_build_scripts.py   |  8 ++---
 distutils/tests/test_ccompiler.py       |  7 ++--
 distutils/tests/test_check.py           |  7 ++--
 distutils/tests/test_clean.py           |  1 -
 distutils/tests/test_cmd.py             |  4 +--
 distutils/tests/test_config.py          |  3 +-
 distutils/tests/test_config_cmd.py      |  7 ++--
 distutils/tests/test_core.py            |  5 ++-
 distutils/tests/test_cygwinccompiler.py | 13 ++++---
 distutils/tests/test_dir_util.py        | 16 ++++-----
 distutils/tests/test_dist.py            | 19 +++++------
 distutils/tests/test_extension.py       |  4 +--
 distutils/tests/test_file_util.py       |  7 ++--
 distutils/tests/test_filelist.py        | 11 +++---
 distutils/tests/test_install.py         | 15 ++++-----
 distutils/tests/test_install_data.py    |  5 ++-
 distutils/tests/test_install_headers.py |  5 ++-
 distutils/tests/test_install_lib.py     | 11 +++---
 distutils/tests/test_install_scripts.py |  3 +-
 distutils/tests/test_log.py             |  1 -
 distutils/tests/test_modified.py        |  7 ++--
 distutils/tests/test_msvc9compiler.py   |  4 +--
 distutils/tests/test_msvccompiler.py    |  8 ++---
 distutils/tests/test_register.py        |  3 +-
 distutils/tests/test_sdist.py           | 21 ++++++------
 distutils/tests/test_spawn.py           | 11 +++---
 distutils/tests/test_sysconfig.py       | 15 ++++-----
 distutils/tests/test_text_file.py       |  6 ++--
 distutils/tests/test_unixccompiler.py   |  7 ++--
 distutils/tests/test_upload.py          |  6 ++--
 distutils/tests/test_util.py            | 24 ++++++-------
 distutils/tests/test_version.py         |  7 ++--
 distutils/tests/unix_compat.py          |  1 -
 distutils/unixccompiler.py              | 10 +++---
 distutils/util.py                       |  6 ++--
 distutils/version.py                    |  2 +-
 distutils/versionpredicate.py           |  4 +--
 distutils/zosccompiler.py               |  5 +--
 92 files changed, 344 insertions(+), 400 deletions(-)

diff --git a/conftest.py b/conftest.py
index 06ce3bc6c8..3ce3411535 100644
--- a/conftest.py
+++ b/conftest.py
@@ -1,12 +1,11 @@
+import logging
 import os
-import sys
-import platform
 import pathlib
-import logging
+import platform
+import sys
 
-import pytest
 import path
-
+import pytest
 
 collect_ignore = []
 
@@ -93,8 +92,7 @@ def temp_cwd(tmp_path):
 
 @pytest.fixture
 def pypirc(request, save_env, distutils_managed_tempdir):
-    from distutils.core import PyPIRCCommand
-    from distutils.core import Distribution
+    from distutils.core import Distribution, PyPIRCCommand
 
     self = request.instance
     self.tmp_dir = self.mkdtemp()
diff --git a/distutils/__init__.py b/distutils/__init__.py
index 1a188c35cb..e374d5c560 100644
--- a/distutils/__init__.py
+++ b/distutils/__init__.py
@@ -1,5 +1,5 @@
-import sys
 import importlib
+import sys
 
 __version__, _, _ = sys.version.partition(' ')
 
diff --git a/distutils/_log.py b/distutils/_log.py
index 4a2ae0acb8..0148f157ff 100644
--- a/distutils/_log.py
+++ b/distutils/_log.py
@@ -1,4 +1,3 @@
 import logging
 
-
 log = logging.getLogger()
diff --git a/distutils/_macos_compat.py b/distutils/_macos_compat.py
index 17769e9154..76ecb96abe 100644
--- a/distutils/_macos_compat.py
+++ b/distutils/_macos_compat.py
@@ -1,5 +1,5 @@
-import sys
 import importlib
+import sys
 
 
 def bypass_compiler_fixup(cmd, args):
diff --git a/distutils/_modified.py b/distutils/_modified.py
index fbb95a8f27..78485dc25e 100644
--- a/distutils/_modified.py
+++ b/distutils/_modified.py
@@ -3,9 +3,9 @@
 import functools
 import os.path
 
+from ._functools import splat
 from .errors import DistutilsFileError
 from .py39compat import zip_strict
-from ._functools import splat
 
 
 def _newer(source, target):
diff --git a/distutils/_msvccompiler.py b/distutils/_msvccompiler.py
index 4f081c7e92..d08910ecf9 100644
--- a/distutils/_msvccompiler.py
+++ b/distutils/_msvccompiler.py
@@ -13,28 +13,28 @@
 # ported to VS 2005 and VS 2008 by Christian Heimes
 # ported to VS 2015 by Steve Dower
 
+import contextlib
 import os
 import subprocess
-import contextlib
-import warnings
 import unittest.mock as mock
+import warnings
 
 with contextlib.suppress(ImportError):
     import winreg
 
+from itertools import count
+
+from ._log import log
+from .ccompiler import CCompiler, gen_lib_options
 from .errors import (
+    CompileError,
     DistutilsExecError,
     DistutilsPlatformError,
-    CompileError,
     LibError,
     LinkError,
 )
-from .ccompiler import CCompiler, gen_lib_options
-from ._log import log
 from .util import get_platform
 
-from itertools import count
-
 
 def _find_vc2015():
     try:
diff --git a/distutils/archive_util.py b/distutils/archive_util.py
index 7f9e1e00cc..052f6e4646 100644
--- a/distutils/archive_util.py
+++ b/distutils/archive_util.py
@@ -4,8 +4,8 @@
 that sort of thing)."""
 
 import os
-from warnings import warn
 import sys
+from warnings import warn
 
 try:
     import zipfile
@@ -13,10 +13,10 @@
     zipfile = None
 
 
+from ._log import log
+from .dir_util import mkpath
 from .errors import DistutilsExecError
 from .spawn import spawn
-from .dir_util import mkpath
-from ._log import log
 
 try:
     from pwd import getpwnam
diff --git a/distutils/bcppcompiler.py b/distutils/bcppcompiler.py
index d496d5d452..c1341e43cb 100644
--- a/distutils/bcppcompiler.py
+++ b/distutils/bcppcompiler.py
@@ -14,18 +14,17 @@
 import os
 import warnings
 
+from ._log import log
+from ._modified import newer
+from .ccompiler import CCompiler, gen_preprocess_options
 from .errors import (
-    DistutilsExecError,
     CompileError,
+    DistutilsExecError,
     LibError,
     LinkError,
     UnknownFileError,
 )
-from .ccompiler import CCompiler, gen_preprocess_options
 from .file_util import write_file
-from ._modified import newer
-from ._log import log
-
 
 warnings.warn(
     "bcppcompiler is deprecated and slated to be removed "
diff --git a/distutils/ccompiler.py b/distutils/ccompiler.py
index cdfe9d74ef..03181cfb7c 100644
--- a/distutils/ccompiler.py
+++ b/distutils/ccompiler.py
@@ -3,25 +3,25 @@
 Contains CCompiler, an abstract base class that defines the interface
 for the Distutils compiler abstraction model."""
 
-import sys
 import os
 import re
+import sys
 import warnings
 
+from ._itertools import always_iterable
+from ._log import log
+from ._modified import newer_group
+from .dir_util import mkpath
 from .errors import (
     CompileError,
+    DistutilsModuleError,
+    DistutilsPlatformError,
     LinkError,
     UnknownFileError,
-    DistutilsPlatformError,
-    DistutilsModuleError,
 )
-from .spawn import spawn
 from .file_util import move_file
-from .dir_util import mkpath
-from ._modified import newer_group
-from .util import split_quoted, execute
-from ._log import log
-from ._itertools import always_iterable
+from .spawn import spawn
+from .util import execute, split_quoted
 
 
 class CCompiler:
diff --git a/distutils/cmd.py b/distutils/cmd.py
index 8849474cd7..02dbf165f5 100644
--- a/distutils/cmd.py
+++ b/distutils/cmd.py
@@ -4,14 +4,14 @@
 in the distutils.command package.
 """
 
-import sys
+import logging
 import os
 import re
-import logging
+import sys
 
-from .errors import DistutilsOptionError
-from . import util, dir_util, file_util, archive_util, _modified
+from . import _modified, archive_util, dir_util, file_util, util
 from ._log import log
+from .errors import DistutilsOptionError
 
 
 class Command:
diff --git a/distutils/command/_framework_compat.py b/distutils/command/_framework_compat.py
index 397ebf823e..00d34bc7d8 100644
--- a/distutils/command/_framework_compat.py
+++ b/distutils/command/_framework_compat.py
@@ -2,10 +2,10 @@
 Backward compatibility for homebrew builds on macOS.
 """
 
-import sys
-import os
 import functools
+import os
 import subprocess
+import sys
 import sysconfig
 
 
diff --git a/distutils/command/bdist.py b/distutils/command/bdist.py
index 237b14656f..f681b5531d 100644
--- a/distutils/command/bdist.py
+++ b/distutils/command/bdist.py
@@ -7,7 +7,7 @@
 import warnings
 
 from ..core import Command
-from ..errors import DistutilsPlatformError, DistutilsOptionError
+from ..errors import DistutilsOptionError, DistutilsPlatformError
 from ..util import get_platform
 
 
diff --git a/distutils/command/bdist_dumb.py b/distutils/command/bdist_dumb.py
index 5880ad2ba4..41adf01418 100644
--- a/distutils/command/bdist_dumb.py
+++ b/distutils/command/bdist_dumb.py
@@ -5,12 +5,13 @@
 $exec_prefix)."""
 
 import os
+from distutils._log import log
+
 from ..core import Command
-from ..util import get_platform
-from ..dir_util import remove_tree, ensure_relative
+from ..dir_util import ensure_relative, remove_tree
 from ..errors import DistutilsPlatformError
 from ..sysconfig import get_python_version
-from distutils._log import log
+from ..util import get_platform
 
 
 class bdist_dumb(Command):
diff --git a/distutils/command/bdist_rpm.py b/distutils/command/bdist_rpm.py
index 64af0db0cf..6a75e32fb1 100644
--- a/distutils/command/bdist_rpm.py
+++ b/distutils/command/bdist_rpm.py
@@ -3,21 +3,21 @@
 Implements the Distutils 'bdist_rpm' command (create RPM source and binary
 distributions)."""
 
+import os
 import subprocess
 import sys
-import os
+from distutils._log import log
 
 from ..core import Command
 from ..debug import DEBUG
-from ..file_util import write_file
 from ..errors import (
+    DistutilsExecError,
+    DistutilsFileError,
     DistutilsOptionError,
     DistutilsPlatformError,
-    DistutilsFileError,
-    DistutilsExecError,
 )
+from ..file_util import write_file
 from ..sysconfig import get_python_version
-from distutils._log import log
 
 
 class bdist_rpm(Command):
diff --git a/distutils/command/build.py b/distutils/command/build.py
index d8704e3583..d18ed503e3 100644
--- a/distutils/command/build.py
+++ b/distutils/command/build.py
@@ -2,8 +2,9 @@
 
 Implements the Distutils 'build' command."""
 
-import sys
 import os
+import sys
+
 from ..core import Command
 from ..errors import DistutilsOptionError
 from ..util import get_platform
diff --git a/distutils/command/build_clib.py b/distutils/command/build_clib.py
index b3f679b67d..811e607e70 100644
--- a/distutils/command/build_clib.py
+++ b/distutils/command/build_clib.py
@@ -15,10 +15,11 @@
 # cut 'n paste.  Sigh.
 
 import os
+from distutils._log import log
+
 from ..core import Command
 from ..errors import DistutilsSetupError
 from ..sysconfig import customize_compiler
-from distutils._log import log
 
 
 def show_compilers():
diff --git a/distutils/command/build_ext.py b/distutils/command/build_ext.py
index a15781f28a..aa9ed578f8 100644
--- a/distutils/command/build_ext.py
+++ b/distutils/command/build_ext.py
@@ -8,25 +8,24 @@
 import os
 import re
 import sys
+from distutils._log import log
+from site import USER_BASE
+
+from .._modified import newer_group
 from ..core import Command
 from ..errors import (
-    DistutilsOptionError,
-    DistutilsSetupError,
     CCompilerError,
-    DistutilsError,
     CompileError,
+    DistutilsError,
+    DistutilsOptionError,
     DistutilsPlatformError,
+    DistutilsSetupError,
 )
-from ..sysconfig import customize_compiler, get_python_version
-from ..sysconfig import get_config_h_filename
-from .._modified import newer_group
 from ..extension import Extension
+from ..sysconfig import customize_compiler, get_config_h_filename, get_python_version
 from ..util import get_platform
-from distutils._log import log
 from . import py37compat
 
-from site import USER_BASE
-
 # An extension name is just a dot-separated list of Python NAMEs (ie.
 # the same as a fully-qualified module name).
 extension_name_re = re.compile(r'^[a-zA-Z_][a-zA-Z_0-9]*(\.[a-zA-Z_][a-zA-Z_0-9]*)*$')
diff --git a/distutils/command/build_py.py b/distutils/command/build_py.py
index e16011d46a..a15d0af519 100644
--- a/distutils/command/build_py.py
+++ b/distutils/command/build_py.py
@@ -2,15 +2,15 @@
 
 Implements the Distutils 'build_py' command."""
 
-import os
+import glob
 import importlib.util
+import os
 import sys
-import glob
+from distutils._log import log
 
 from ..core import Command
-from ..errors import DistutilsOptionError, DistutilsFileError
+from ..errors import DistutilsFileError, DistutilsOptionError
 from ..util import convert_path
-from distutils._log import log
 
 
 class build_py(Command):
diff --git a/distutils/command/build_scripts.py b/distutils/command/build_scripts.py
index 29d9c27829..37bc585038 100644
--- a/distutils/command/build_scripts.py
+++ b/distutils/command/build_scripts.py
@@ -4,13 +4,14 @@
 
 import os
 import re
-from stat import ST_MODE
+import tokenize
 from distutils import sysconfig
-from ..core import Command
+from distutils._log import log
+from stat import ST_MODE
+
 from .._modified import newer
+from ..core import Command
 from ..util import convert_path
-from distutils._log import log
-import tokenize
 
 shebang_pattern = re.compile('^#!.*python[0-9.]*([ \t].*)?$')
 """
diff --git a/distutils/command/check.py b/distutils/command/check.py
index 28f55fb914..6b42a34f6d 100644
--- a/distutils/command/check.py
+++ b/distutils/command/check.py
@@ -9,10 +9,10 @@
 from ..errors import DistutilsSetupError
 
 with contextlib.suppress(ImportError):
-    import docutils.utils
-    import docutils.parsers.rst
     import docutils.frontend
     import docutils.nodes
+    import docutils.parsers.rst
+    import docutils.utils
 
     class SilentReporter(docutils.utils.Reporter):
         def __init__(
diff --git a/distutils/command/clean.py b/distutils/command/clean.py
index 9413f7cfcb..4167a83fb3 100644
--- a/distutils/command/clean.py
+++ b/distutils/command/clean.py
@@ -5,9 +5,10 @@
 # contributed by Bastian Kleineidam , added 2000-03-18
 
 import os
+from distutils._log import log
+
 from ..core import Command
 from ..dir_util import remove_tree
-from distutils._log import log
 
 
 class clean(Command):
diff --git a/distutils/command/config.py b/distutils/command/config.py
index 573741d772..38a5ff5159 100644
--- a/distutils/command/config.py
+++ b/distutils/command/config.py
@@ -12,11 +12,11 @@
 import os
 import pathlib
 import re
+from distutils._log import log
 
 from ..core import Command
 from ..errors import DistutilsExecError
 from ..sysconfig import customize_compiler
-from distutils._log import log
 
 LANG_EXT = {"c": ".c", "c++": ".cxx"}
 
diff --git a/distutils/command/install.py b/distutils/command/install.py
index 927c3ed3a2..575cebdbc8 100644
--- a/distutils/command/install.py
+++ b/distutils/command/install.py
@@ -2,25 +2,22 @@
 
 Implements the Distutils 'install' command."""
 
-import sys
-import os
 import contextlib
-import sysconfig
 import itertools
-
+import os
+import sys
+import sysconfig
 from distutils._log import log
+from site import USER_BASE, USER_SITE
+
+from .. import _collections
 from ..core import Command
 from ..debug import DEBUG
-from ..sysconfig import get_config_vars
-from ..file_util import write_file
-from ..util import convert_path, subst_vars, change_root
-from ..util import get_platform
 from ..errors import DistutilsOptionError, DistutilsPlatformError
+from ..file_util import write_file
+from ..sysconfig import get_config_vars
+from ..util import change_root, convert_path, get_platform, subst_vars
 from . import _framework_compat as fw
-from .. import _collections
-
-from site import USER_BASE
-from site import USER_SITE
 
 HAS_USER_SITE = True
 
diff --git a/distutils/command/install_data.py b/distutils/command/install_data.py
index 31ae4350dc..b63a1af25e 100644
--- a/distutils/command/install_data.py
+++ b/distutils/command/install_data.py
@@ -6,6 +6,7 @@
 # contributed by Bastian Kleineidam
 
 import os
+
 from ..core import Command
 from ..util import change_root, convert_path
 
diff --git a/distutils/command/install_egg_info.py b/distutils/command/install_egg_info.py
index f3e8f3447d..4fbb3440ab 100644
--- a/distutils/command/install_egg_info.py
+++ b/distutils/command/install_egg_info.py
@@ -6,12 +6,12 @@
 """
 
 import os
-import sys
 import re
+import sys
 
-from ..cmd import Command
 from .. import dir_util
 from .._log import log
+from ..cmd import Command
 
 
 class install_egg_info(Command):
diff --git a/distutils/command/install_lib.py b/distutils/command/install_lib.py
index be4c243321..b1f346f018 100644
--- a/distutils/command/install_lib.py
+++ b/distutils/command/install_lib.py
@@ -3,14 +3,13 @@
 Implements the Distutils 'install_lib' command
 (install all Python modules)."""
 
-import os
 import importlib.util
+import os
 import sys
 
 from ..core import Command
 from ..errors import DistutilsOptionError
 
-
 # Extension for Python source files.
 PYTHON_SOURCE_EXTENSION = ".py"
 
diff --git a/distutils/command/install_scripts.py b/distutils/command/install_scripts.py
index 20f07aaa27..e66b13a16d 100644
--- a/distutils/command/install_scripts.py
+++ b/distutils/command/install_scripts.py
@@ -6,10 +6,11 @@
 # contributed by Bastian Kleineidam
 
 import os
-from ..core import Command
 from distutils._log import log
 from stat import ST_MODE
 
+from ..core import Command
+
 
 class install_scripts(Command):
     description = "install scripts (Python or otherwise)"
diff --git a/distutils/command/register.py b/distutils/command/register.py
index 5a24246ccb..e5e6b379ad 100644
--- a/distutils/command/register.py
+++ b/distutils/command/register.py
@@ -10,10 +10,10 @@
 import logging
 import urllib.parse
 import urllib.request
+from distutils._log import log
 from warnings import warn
 
 from ..core import PyPIRCCommand
-from distutils._log import log
 
 
 class register(PyPIRCCommand):
diff --git a/distutils/command/sdist.py b/distutils/command/sdist.py
index b76cb9bc73..6414ef5c06 100644
--- a/distutils/command/sdist.py
+++ b/distutils/command/sdist.py
@@ -4,27 +4,25 @@
 
 import os
 import sys
+from distutils import archive_util, dir_util, file_util
+from distutils._log import log
 from glob import glob
-from warnings import warn
 from itertools import filterfalse
+from warnings import warn
 
 from ..core import Command
-from distutils import dir_util
-from distutils import file_util
-from distutils import archive_util
-from ..text_file import TextFile
+from ..errors import DistutilsOptionError, DistutilsTemplateError
 from ..filelist import FileList
-from distutils._log import log
+from ..text_file import TextFile
 from ..util import convert_path
-from ..errors import DistutilsOptionError, DistutilsTemplateError
 
 
 def show_formats():
     """Print all possible values for the 'formats' option (used by
     the "--help-formats" command-line option).
     """
-    from ..fancy_getopt import FancyGetopt
     from ..archive_util import ARCHIVE_FORMATS
+    from ..fancy_getopt import FancyGetopt
 
     formats = []
     for format in ARCHIVE_FORMATS.keys():
diff --git a/distutils/command/upload.py b/distutils/command/upload.py
index a9124f2b71..e61a9ea8a5 100644
--- a/distutils/command/upload.py
+++ b/distutils/command/upload.py
@@ -5,18 +5,18 @@
 index).
 """
 
-import os
-import io
 import hashlib
+import io
 import logging
+import os
 from base64 import standard_b64encode
-from urllib.request import urlopen, Request, HTTPError
 from urllib.parse import urlparse
-from ..errors import DistutilsError, DistutilsOptionError
+from urllib.request import HTTPError, Request, urlopen
+
 from ..core import PyPIRCCommand
+from ..errors import DistutilsError, DistutilsOptionError
 from ..spawn import spawn
 
-
 # PyPI Warehouse supports MD5, SHA256, and Blake2 (blake2-256)
 # https://bugs.python.org/issue40698
 _FILE_CONTENT_DIGESTS = {
diff --git a/distutils/config.py b/distutils/config.py
index e0defd77e6..83f96a9eec 100644
--- a/distutils/config.py
+++ b/distutils/config.py
@@ -4,8 +4,8 @@
 that uses .pypirc in the distutils.command package.
 """
 
-import os
 import email.message
+import os
 from configparser import RawConfigParser
 
 from .cmd import Command
diff --git a/distutils/core.py b/distutils/core.py
index 799de9489c..309ce696fa 100644
--- a/distutils/core.py
+++ b/distutils/core.py
@@ -10,21 +10,20 @@
 import sys
 import tokenize
 
+from .cmd import Command
+from .config import PyPIRCCommand
 from .debug import DEBUG
+
+# Mainly import these so setup scripts can "from distutils.core import" them.
+from .dist import Distribution
 from .errors import (
-    DistutilsSetupError,
-    DistutilsError,
     CCompilerError,
     DistutilsArgError,
+    DistutilsError,
+    DistutilsSetupError,
 )
-
-# Mainly import these so setup scripts can "from distutils.core import" them.
-from .dist import Distribution
-from .cmd import Command
-from .config import PyPIRCCommand
 from .extension import Extension
 
-
 __all__ = ['Distribution', 'Command', 'PyPIRCCommand', 'Extension', 'setup']
 
 # This is a barebones help message generated displayed when the user
diff --git a/distutils/cygwinccompiler.py b/distutils/cygwinccompiler.py
index 2060950415..539f09d8f3 100644
--- a/distutils/cygwinccompiler.py
+++ b/distutils/cygwinccompiler.py
@@ -6,26 +6,25 @@
 cygwin in no-cygwin mode).
 """
 
+import copy
 import os
 import pathlib
 import re
-import sys
-import copy
 import shlex
+import sys
 import warnings
 from subprocess import check_output
 
-from .unixccompiler import UnixCCompiler
-from .file_util import write_file
+from ._collections import RangeMap
 from .errors import (
-    DistutilsExecError,
-    DistutilsPlatformError,
     CCompilerError,
     CompileError,
+    DistutilsExecError,
+    DistutilsPlatformError,
 )
+from .file_util import write_file
+from .unixccompiler import UnixCCompiler
 from .version import LooseVersion, suppress_known_deprecation
-from ._collections import RangeMap
-
 
 _msvcr_lookup = RangeMap.left(
     {
diff --git a/distutils/dir_util.py b/distutils/dir_util.py
index 819fe56f6d..2021bed82e 100644
--- a/distutils/dir_util.py
+++ b/distutils/dir_util.py
@@ -2,10 +2,11 @@
 
 Utility functions for manipulating directories and directory trees."""
 
-import os
 import errno
-from .errors import DistutilsInternalError, DistutilsFileError
+import os
+
 from ._log import log
+from .errors import DistutilsFileError, DistutilsInternalError
 
 # cache for by mkpath() -- in addition to cheapening redundant calls,
 # eliminates redundant "creating /foo/bar/baz" messages in dry-run mode
diff --git a/distutils/dist.py b/distutils/dist.py
index bbea155556..1759120c92 100644
--- a/distutils/dist.py
+++ b/distutils/dist.py
@@ -4,12 +4,12 @@
 being built/installed/distributed.
 """
 
-import sys
-import os
-import re
-import pathlib
 import contextlib
 import logging
+import os
+import pathlib
+import re
+import sys
 from email import message_from_file
 
 try:
@@ -17,16 +17,16 @@
 except ImportError:
     warnings = None
 
+from ._log import log
+from .debug import DEBUG
 from .errors import (
-    DistutilsOptionError,
-    DistutilsModuleError,
     DistutilsArgError,
     DistutilsClassError,
+    DistutilsModuleError,
+    DistutilsOptionError,
 )
 from .fancy_getopt import FancyGetopt, translate_longopt
-from .util import check_environ, strtobool, rfc822_escape
-from ._log import log
-from .debug import DEBUG
+from .util import check_environ, rfc822_escape, strtobool
 
 # Regex to define acceptable Distutils command names.  This is not *quite*
 # the same as a Python NAME -- I don't allow leading underscores.  The fact
@@ -634,8 +634,8 @@ def _show_help(self, parser, global_options=1, display_options=1, commands=[]):
         in 'commands'.
         """
         # late import because of mutual dependence between these modules
-        from distutils.core import gen_usage
         from distutils.cmd import Command
+        from distutils.core import gen_usage
 
         if global_options:
             if display_options:
diff --git a/distutils/extension.py b/distutils/extension.py
index 00ca61d569..94e71635d9 100644
--- a/distutils/extension.py
+++ b/distutils/extension.py
@@ -139,8 +139,7 @@ def __repr__(self):
 
 def read_setup_file(filename):  # noqa: C901
     """Reads a Setup file and returns Extension instances."""
-    from distutils.sysconfig import parse_makefile, expand_makefile_vars, _variable_rx
-
+    from distutils.sysconfig import _variable_rx, expand_makefile_vars, parse_makefile
     from distutils.text_file import TextFile
     from distutils.util import split_quoted
 
diff --git a/distutils/fancy_getopt.py b/distutils/fancy_getopt.py
index e41b6064bd..cb646c6d9b 100644
--- a/distutils/fancy_getopt.py
+++ b/distutils/fancy_getopt.py
@@ -8,11 +8,12 @@
   * options set attributes of a passed-in object
 """
 
-import sys
-import string
-import re
 import getopt
-from .errors import DistutilsGetoptError, DistutilsArgError
+import re
+import string
+import sys
+
+from .errors import DistutilsArgError, DistutilsGetoptError
 
 # Much like command_re in distutils.core, this is close to but not quite
 # the same as a Python NAME -- except, in the spirit of most GNU
diff --git a/distutils/file_util.py b/distutils/file_util.py
index 6c8193e9b7..960def9cf9 100644
--- a/distutils/file_util.py
+++ b/distutils/file_util.py
@@ -4,8 +4,9 @@
 """
 
 import os
-from .errors import DistutilsFileError
+
 from ._log import log
+from .errors import DistutilsFileError
 
 # for generating verbose output in 'copy_file()'
 _copy_action = {None: 'copying', 'hard': 'hard linking', 'sym': 'symbolically linking'}
@@ -101,7 +102,7 @@ def copy_file(  # noqa: C901
     # (not update) and (src newer than dst).
 
     from distutils._modified import newer
-    from stat import ST_ATIME, ST_MTIME, ST_MODE, S_IMODE
+    from stat import S_IMODE, ST_ATIME, ST_MODE, ST_MTIME
 
     if not os.path.isfile(src):
         raise DistutilsFileError(
@@ -175,8 +176,8 @@ def move_file(src, dst, verbose=1, dry_run=0):  # noqa: C901
     Handles cross-device moves on Unix using 'copy_file()'.  What about
     other systems???
     """
-    from os.path import exists, isfile, isdir, basename, dirname
     import errno
+    from os.path import basename, dirname, exists, isdir, isfile
 
     if verbose >= 1:
         log.info("moving %s -> %s", src, dst)
diff --git a/distutils/filelist.py b/distutils/filelist.py
index 3205762654..5ce47936a9 100644
--- a/distutils/filelist.py
+++ b/distutils/filelist.py
@@ -4,14 +4,14 @@
 and building lists of files.
 """
 
-import os
-import re
 import fnmatch
 import functools
+import os
+import re
 
-from .util import convert_path
-from .errors import DistutilsTemplateError, DistutilsInternalError
 from ._log import log
+from .errors import DistutilsInternalError, DistutilsTemplateError
+from .util import convert_path
 
 
 class FileList:
diff --git a/distutils/log.py b/distutils/log.py
index 239f315850..8abb09cfa2 100644
--- a/distutils/log.py
+++ b/distutils/log.py
@@ -9,7 +9,6 @@
 
 from ._log import log as _global_log
 
-
 DEBUG = logging.DEBUG
 INFO = logging.INFO
 WARN = logging.WARN
diff --git a/distutils/msvc9compiler.py b/distutils/msvc9compiler.py
index 402c0c0620..6a0105e484 100644
--- a/distutils/msvc9compiler.py
+++ b/distutils/msvc9compiler.py
@@ -13,24 +13,23 @@
 # ported to VS2005 and VS 2008 by Christian Heimes
 
 import os
+import re
 import subprocess
 import sys
-import re
 import warnings
+import winreg
 
+from ._log import log
+from .ccompiler import CCompiler, gen_lib_options
 from .errors import (
+    CompileError,
     DistutilsExecError,
     DistutilsPlatformError,
-    CompileError,
     LibError,
     LinkError,
 )
-from .ccompiler import CCompiler, gen_lib_options
-from ._log import log
 from .util import get_platform
 
-import winreg
-
 warnings.warn(
     "msvc9compiler is deprecated and slated to be removed "
     "in the future. Please discontinue use or file an issue "
diff --git a/distutils/msvccompiler.py b/distutils/msvccompiler.py
index b8694dd6d8..ac8b68c08c 100644
--- a/distutils/msvccompiler.py
+++ b/distutils/msvccompiler.py
@@ -8,18 +8,19 @@
 # hacked by Robin Becker and Thomas Heller to do a better job of
 #   finding DevStudio (through the registry)
 
-import sys
 import os
+import sys
 import warnings
+
+from ._log import log
+from .ccompiler import CCompiler, gen_lib_options
 from .errors import (
+    CompileError,
     DistutilsExecError,
     DistutilsPlatformError,
-    CompileError,
     LibError,
     LinkError,
 )
-from .ccompiler import CCompiler, gen_lib_options
-from ._log import log
 
 _can_read_reg = False
 try:
@@ -681,7 +682,8 @@ def set_path_env_var(self, name):
 if get_build_version() >= 8.0:
     log.debug("Importing new compiler from distutils.msvc9compiler")
     OldMSVCCompiler = MSVCCompiler
-    from distutils.msvc9compiler import MSVCCompiler
-
     # get_build_architecture not really relevant now we support cross-compile
-    from distutils.msvc9compiler import MacroExpander  # noqa: F811
+    from distutils.msvc9compiler import (
+        MacroExpander,  # noqa: F811
+        MSVCCompiler,
+    )
diff --git a/distutils/spawn.py b/distutils/spawn.py
index 48adceb114..046b5bbb82 100644
--- a/distutils/spawn.py
+++ b/distutils/spawn.py
@@ -6,13 +6,13 @@
 executable name.
 """
 
-import sys
 import os
 import subprocess
+import sys
 
-from .errors import DistutilsExecError
-from .debug import DEBUG
 from ._log import log
+from .debug import DEBUG
+from .errors import DistutilsExecError
 
 
 def spawn(cmd, search_path=1, verbose=0, dry_run=0, env=None):  # noqa: C901
diff --git a/distutils/sysconfig.py b/distutils/sysconfig.py
index 40215b8347..1a38e9fa79 100644
--- a/distutils/sysconfig.py
+++ b/distutils/sysconfig.py
@@ -9,16 +9,16 @@
 Email:        
 """
 
-import os
 import functools
+import os
+import pathlib
 import re
 import sys
 import sysconfig
-import pathlib
 
-from .errors import DistutilsPlatformError
 from . import py39compat
 from ._functools import pass_none
+from .errors import DistutilsPlatformError
 
 IS_PYPY = '__pypy__' in sys.builtin_module_names
 
diff --git a/distutils/tests/__init__.py b/distutils/tests/__init__.py
index 6d9b853215..c475e5d0f2 100644
--- a/distutils/tests/__init__.py
+++ b/distutils/tests/__init__.py
@@ -17,8 +17,7 @@ def missing_compiler_executable(cmd_names=[]):  # pragma: no cover
     missing.
 
     """
-    from distutils import ccompiler, sysconfig, spawn
-    from distutils import errors
+    from distutils import ccompiler, errors, spawn, sysconfig
 
     compiler = ccompiler.new_compiler()
     sysconfig.customize_compiler(compiler)
diff --git a/distutils/tests/py37compat.py b/distutils/tests/py37compat.py
index e5d406a3b6..76d3551c49 100644
--- a/distutils/tests/py37compat.py
+++ b/distutils/tests/py37compat.py
@@ -1,6 +1,6 @@
 import os
-import sys
 import platform
+import sys
 
 
 def subprocess_args_compat(*args):
diff --git a/distutils/tests/support.py b/distutils/tests/support.py
index ddf7bf1dba..9cd2b8a9ee 100644
--- a/distutils/tests/support.py
+++ b/distutils/tests/support.py
@@ -1,18 +1,17 @@
 """Support code for distutils test cases."""
 
+import itertools
 import os
-import sys
+import pathlib
 import shutil
-import tempfile
+import sys
 import sysconfig
-import itertools
-import pathlib
+import tempfile
+from distutils.core import Distribution
 
 import pytest
 from more_itertools import always_iterable
 
-from distutils.core import Distribution
-
 
 @pytest.mark.usefixtures('distutils_managed_tempdir')
 class TempdirManager:
diff --git a/distutils/tests/test_archive_util.py b/distutils/tests/test_archive_util.py
index 2b5eafd27e..145cce915d 100644
--- a/distutils/tests/test_archive_util.py
+++ b/distutils/tests/test_archive_util.py
@@ -1,31 +1,30 @@
 """Tests for distutils.archive_util."""
 
+import functools
+import operator
 import os
+import pathlib
 import sys
 import tarfile
-from os.path import splitdrive
 import warnings
-import functools
-import operator
-import pathlib
-
-import pytest
-import path
-
 from distutils import archive_util
 from distutils.archive_util import (
+    ARCHIVE_FORMATS,
     check_archive_formats,
+    make_archive,
     make_tarball,
     make_zipfile,
-    make_archive,
-    ARCHIVE_FORMATS,
 )
 from distutils.spawn import spawn
 from distutils.tests import support
+from os.path import splitdrive
 from test.support import patch
-from .unix_compat import require_unix_id, require_uid_0, grp, pwd, UID_0_SUPPORT
+
+import path
+import pytest
 
 from .py38compat import check_warnings
+from .unix_compat import UID_0_SUPPORT, grp, pwd, require_uid_0, require_unix_id
 
 
 def can_fs_encode(filename):
diff --git a/distutils/tests/test_bdist_dumb.py b/distutils/tests/test_bdist_dumb.py
index cfe7fa9e62..78928fea24 100644
--- a/distutils/tests/test_bdist_dumb.py
+++ b/distutils/tests/test_bdist_dumb.py
@@ -3,13 +3,12 @@
 import os
 import sys
 import zipfile
-
-import pytest
-
-from distutils.core import Distribution
 from distutils.command.bdist_dumb import bdist_dumb
+from distutils.core import Distribution
 from distutils.tests import support
 
+import pytest
+
 SETUP_PY = """\
 from distutils.core import setup
 import foo
diff --git a/distutils/tests/test_bdist_rpm.py b/distutils/tests/test_bdist_rpm.py
index e6804088da..769623cbb8 100644
--- a/distutils/tests/test_bdist_rpm.py
+++ b/distutils/tests/test_bdist_rpm.py
@@ -1,17 +1,15 @@
 """Tests for distutils.command.bdist_rpm."""
 
-import sys
 import os
-
-import pytest
-
-from distutils.core import Distribution
+import sys
 from distutils.command.bdist_rpm import bdist_rpm
-from distutils.tests import support
+from distutils.core import Distribution
 from distutils.spawn import find_executable  # noqa: F401
+from distutils.tests import support
 
-from .py38compat import requires_zlib
+import pytest
 
+from .py38compat import requires_zlib
 
 SETUP_PY = """\
 from distutils.core import setup
diff --git a/distutils/tests/test_build.py b/distutils/tests/test_build.py
index 8617fa9919..25483ad76b 100644
--- a/distutils/tests/test_build.py
+++ b/distutils/tests/test_build.py
@@ -2,7 +2,6 @@
 
 import os
 import sys
-
 from distutils.command.build import build
 from distutils.tests import support
 from sysconfig import get_platform
diff --git a/distutils/tests/test_build_clib.py b/distutils/tests/test_build_clib.py
index f855454256..9c69b3e7fc 100644
--- a/distutils/tests/test_build_clib.py
+++ b/distutils/tests/test_build_clib.py
@@ -1,12 +1,11 @@
 """Tests for distutils.command.build_clib."""
 
 import os
-
-import pytest
-
 from distutils.command.build_clib import build_clib
 from distutils.errors import DistutilsSetupError
-from distutils.tests import support, missing_compiler_executable
+from distutils.tests import missing_compiler_executable, support
+
+import pytest
 
 
 class TestBuildCLib(support.TempdirManager):
diff --git a/distutils/tests/test_build_ext.py b/distutils/tests/test_build_ext.py
index ae66bc4eb8..ca5d9d57cd 100644
--- a/distutils/tests/test_build_ext.py
+++ b/distutils/tests/test_build_ext.py
@@ -1,37 +1,36 @@
-import sys
-import os
-from io import StringIO
-import textwrap
-import site
 import contextlib
-import platform
-import tempfile
 import importlib
-import shutil
+import os
+import platform
 import re
-
-import path
-import pytest
-import jaraco.path
-
-from distutils.core import Distribution
-from distutils.command.build_ext import build_ext
+import shutil
+import site
+import sys
+import tempfile
+import textwrap
 from distutils import sysconfig
-from distutils.tests import missing_compiler_executable
-from distutils.tests.support import (
-    TempdirManager,
-    copy_xxmodule_c,
-    fixup_build_ext,
-)
-from distutils.extension import Extension
+from distutils.command.build_ext import build_ext
+from distutils.core import Distribution
 from distutils.errors import (
     CompileError,
     DistutilsPlatformError,
     DistutilsSetupError,
     UnknownFileError,
 )
-
+from distutils.extension import Extension
+from distutils.tests import missing_compiler_executable
+from distutils.tests.support import (
+    TempdirManager,
+    copy_xxmodule_c,
+    fixup_build_ext,
+)
+from io import StringIO
 from test import support
+
+import jaraco.path
+import path
+import pytest
+
 from . import py38compat as import_helper
 
 
diff --git a/distutils/tests/test_build_py.py b/distutils/tests/test_build_py.py
index 6730878e96..8bc0e98a4f 100644
--- a/distutils/tests/test_build_py.py
+++ b/distutils/tests/test_build_py.py
@@ -2,16 +2,14 @@
 
 import os
 import sys
-
-import pytest
-import jaraco.path
-
 from distutils.command.build_py import build_py
 from distutils.core import Distribution
 from distutils.errors import DistutilsFileError
-
 from distutils.tests import support
 
+import jaraco.path
+import pytest
+
 
 @support.combine_markers
 class TestBuildPy(support.TempdirManager):
diff --git a/distutils/tests/test_build_scripts.py b/distutils/tests/test_build_scripts.py
index 7e05ec5f9a..208b1f6e65 100644
--- a/distutils/tests/test_build_scripts.py
+++ b/distutils/tests/test_build_scripts.py
@@ -2,15 +2,13 @@
 
 import os
 import textwrap
-
-import jaraco.path
-
+from distutils import sysconfig
 from distutils.command.build_scripts import build_scripts
 from distutils.core import Distribution
-from distutils import sysconfig
-
 from distutils.tests import support
 
+import jaraco.path
+
 
 class TestBuildScripts(support.TempdirManager):
     def test_default_settings(self):
diff --git a/distutils/tests/test_ccompiler.py b/distutils/tests/test_ccompiler.py
index b6512e6d77..d23b907cad 100644
--- a/distutils/tests/test_ccompiler.py
+++ b/distutils/tests/test_ccompiler.py
@@ -1,13 +1,12 @@
 import os
-import sys
 import platform
-import textwrap
+import sys
 import sysconfig
+import textwrap
+from distutils import ccompiler
 
 import pytest
 
-from distutils import ccompiler
-
 
 def _make_strs(paths):
     """
diff --git a/distutils/tests/test_check.py b/distutils/tests/test_check.py
index 8215300b97..580cb2a267 100644
--- a/distutils/tests/test_check.py
+++ b/distutils/tests/test_check.py
@@ -2,12 +2,11 @@
 
 import os
 import textwrap
-
-import pytest
-
 from distutils.command.check import check
-from distutils.tests import support
 from distutils.errors import DistutilsSetupError
+from distutils.tests import support
+
+import pytest
 
 try:
     import pygments
diff --git a/distutils/tests/test_clean.py b/distutils/tests/test_clean.py
index e2459aa0c1..9b11fa40f7 100644
--- a/distutils/tests/test_clean.py
+++ b/distutils/tests/test_clean.py
@@ -1,7 +1,6 @@
 """Tests for distutils.command.clean."""
 
 import os
-
 from distutils.command.clean import clean
 from distutils.tests import support
 
diff --git a/distutils/tests/test_cmd.py b/distutils/tests/test_cmd.py
index 684662d32e..f366aa6522 100644
--- a/distutils/tests/test_cmd.py
+++ b/distutils/tests/test_cmd.py
@@ -1,11 +1,11 @@
 """Tests for distutils.cmd."""
 
 import os
-
+from distutils import debug
 from distutils.cmd import Command
 from distutils.dist import Distribution
 from distutils.errors import DistutilsOptionError
-from distutils import debug
+
 import pytest
 
 
diff --git a/distutils/tests/test_config.py b/distutils/tests/test_config.py
index 11c23d837e..be5ae0a687 100644
--- a/distutils/tests/test_config.py
+++ b/distutils/tests/test_config.py
@@ -1,11 +1,10 @@
 """Tests for distutils.pypirc.pypirc."""
 
 import os
+from distutils.tests import support
 
 import pytest
 
-from distutils.tests import support
-
 PYPIRC = """\
 [distutils]
 
diff --git a/distutils/tests/test_config_cmd.py b/distutils/tests/test_config_cmd.py
index 90c8f90679..fc0a7885cd 100644
--- a/distutils/tests/test_config_cmd.py
+++ b/distutils/tests/test_config_cmd.py
@@ -2,15 +2,14 @@
 
 import os
 import sys
+from distutils._log import log
+from distutils.command.config import config, dump_file
+from distutils.tests import missing_compiler_executable, support
 
 import more_itertools
 import path
 import pytest
 
-from distutils.command.config import dump_file, config
-from distutils.tests import support, missing_compiler_executable
-from distutils._log import log
-
 
 @pytest.fixture(autouse=True)
 def info_log(request, monkeypatch):
diff --git a/distutils/tests/test_core.py b/distutils/tests/test_core.py
index 95aa299889..5916718027 100644
--- a/distutils/tests/test_core.py
+++ b/distutils/tests/test_core.py
@@ -1,14 +1,13 @@
 """Tests for distutils.core."""
 
-import io
 import distutils.core
+import io
 import os
 import sys
+from distutils.dist import Distribution
 
 import pytest
 
-from distutils.dist import Distribution
-
 # setup script that uses __file__
 setup_using___file__ = """\
 
diff --git a/distutils/tests/test_cygwinccompiler.py b/distutils/tests/test_cygwinccompiler.py
index fc67d75f82..0a66193d35 100644
--- a/distutils/tests/test_cygwinccompiler.py
+++ b/distutils/tests/test_cygwinccompiler.py
@@ -1,19 +1,18 @@
 """Tests for distutils.cygwinccompiler."""
 
-import sys
 import os
-
-import pytest
-
+import sys
+from distutils import sysconfig
 from distutils.cygwinccompiler import (
-    check_config_h,
-    CONFIG_H_OK,
     CONFIG_H_NOTOK,
+    CONFIG_H_OK,
     CONFIG_H_UNCERTAIN,
+    check_config_h,
     get_msvcr,
 )
 from distutils.tests import support
-from distutils import sysconfig
+
+import pytest
 
 
 @pytest.fixture(autouse=True)
diff --git a/distutils/tests/test_dir_util.py b/distutils/tests/test_dir_util.py
index 6fc9ed0883..84cda619ba 100644
--- a/distutils/tests/test_dir_util.py
+++ b/distutils/tests/test_dir_util.py
@@ -3,22 +3,20 @@
 import os
 import stat
 import unittest.mock as mock
-
-import jaraco.path
-import path
-import pytest
-
 from distutils import dir_util, errors
 from distutils.dir_util import (
-    mkpath,
-    remove_tree,
-    create_tree,
     copy_tree,
+    create_tree,
     ensure_relative,
+    mkpath,
+    remove_tree,
 )
-
 from distutils.tests import support
 
+import jaraco.path
+import path
+import pytest
+
 
 @pytest.fixture(autouse=True)
 def stuff(request, monkeypatch, distutils_managed_tempdir):
diff --git a/distutils/tests/test_dist.py b/distutils/tests/test_dist.py
index 8e52873dce..9ed4d16dd8 100644
--- a/distutils/tests/test_dist.py
+++ b/distutils/tests/test_dist.py
@@ -1,24 +1,21 @@
 """Tests for distutils.dist."""
 
-import os
-import io
 import email
-import email.policy
 import email.generator
+import email.policy
+import functools
+import io
+import os
 import sys
-import warnings
 import textwrap
-import functools
 import unittest.mock as mock
-
-import pytest
-import jaraco.path
-
-from distutils.dist import Distribution, fix_help_options
+import warnings
 from distutils.cmd import Command
-
+from distutils.dist import Distribution, fix_help_options
 from distutils.tests import support
 
+import jaraco.path
+import pytest
 
 pydistutils_cfg = '.' * (os.name == 'posix') + 'pydistutils.cfg'
 
diff --git a/distutils/tests/test_extension.py b/distutils/tests/test_extension.py
index 297ae44bfe..77bb147bfd 100644
--- a/distutils/tests/test_extension.py
+++ b/distutils/tests/test_extension.py
@@ -2,11 +2,11 @@
 
 import os
 import warnings
+from distutils.extension import Extension, read_setup_file
 
-from distutils.extension import read_setup_file, Extension
+import pytest
 
 from .py38compat import check_warnings
-import pytest
 
 
 class TestExtension:
diff --git a/distutils/tests/test_file_util.py b/distutils/tests/test_file_util.py
index 6c7019140e..4c2abd2453 100644
--- a/distutils/tests/test_file_util.py
+++ b/distutils/tests/test_file_util.py
@@ -1,15 +1,14 @@
 """Tests for distutils.file_util."""
 
-import os
 import errno
+import os
 import unittest.mock as mock
+from distutils.errors import DistutilsFileError
+from distutils.file_util import copy_file, move_file
 
 import jaraco.path
 import pytest
 
-from distutils.file_util import move_file, copy_file
-from distutils.errors import DistutilsFileError
-
 
 @pytest.fixture(autouse=True)
 def stuff(request, tmp_path):
diff --git a/distutils/tests/test_filelist.py b/distutils/tests/test_filelist.py
index bf1a9d9b45..6a379a6323 100644
--- a/distutils/tests/test_filelist.py
+++ b/distutils/tests/test_filelist.py
@@ -1,20 +1,17 @@
 """Tests for distutils.filelist."""
 
+import logging
 import os
 import re
-import logging
-
-from distutils import debug
+from distutils import debug, filelist
 from distutils.errors import DistutilsTemplateError
-from distutils.filelist import glob_to_re, translate_pattern, FileList
-from distutils import filelist
+from distutils.filelist import FileList, glob_to_re, translate_pattern
 
-import pytest
 import jaraco.path
+import pytest
 
 from . import py38compat as os_helper
 
-
 MANIFEST_IN = """\
 include ok
 include xo
diff --git a/distutils/tests/test_install.py b/distutils/tests/test_install.py
index 08c72c1be0..08f0f83993 100644
--- a/distutils/tests/test_install.py
+++ b/distutils/tests/test_install.py
@@ -1,23 +1,20 @@
 """Tests for distutils.command.install."""
 
+import logging
 import os
-import sys
-import site
 import pathlib
-import logging
-
-import pytest
-
+import site
+import sys
 from distutils import sysconfig
-from distutils.command.install import install
 from distutils.command import install as install_module
 from distutils.command.build_ext import build_ext
-from distutils.command.install import INSTALL_SCHEMES
+from distutils.command.install import INSTALL_SCHEMES, install
 from distutils.core import Distribution
 from distutils.errors import DistutilsOptionError
 from distutils.extension import Extension
+from distutils.tests import missing_compiler_executable, support
 
-from distutils.tests import support, missing_compiler_executable
+import pytest
 
 
 def _make_ext_name(modname):
diff --git a/distutils/tests/test_install_data.py b/distutils/tests/test_install_data.py
index 198c10da8d..e453d01f1a 100644
--- a/distutils/tests/test_install_data.py
+++ b/distutils/tests/test_install_data.py
@@ -1,12 +1,11 @@
 """Tests for distutils.command.install_data."""
 
 import os
-
-import pytest
-
 from distutils.command.install_data import install_data
 from distutils.tests import support
 
+import pytest
+
 
 @pytest.mark.usefixtures('save_env')
 class TestInstallData(
diff --git a/distutils/tests/test_install_headers.py b/distutils/tests/test_install_headers.py
index 8b86b6eaed..2c74f06b97 100644
--- a/distutils/tests/test_install_headers.py
+++ b/distutils/tests/test_install_headers.py
@@ -1,12 +1,11 @@
 """Tests for distutils.command.install_headers."""
 
 import os
-
-import pytest
-
 from distutils.command.install_headers import install_headers
 from distutils.tests import support
 
+import pytest
+
 
 @pytest.mark.usefixtures('save_env')
 class TestInstallHeaders(
diff --git a/distutils/tests/test_install_lib.py b/distutils/tests/test_install_lib.py
index 0efe39fe86..964106fa00 100644
--- a/distutils/tests/test_install_lib.py
+++ b/distutils/tests/test_install_lib.py
@@ -1,15 +1,14 @@
 """Tests for distutils.command.install_data."""
 
-import sys
-import os
 import importlib.util
-
-import pytest
-
+import os
+import sys
 from distutils.command.install_lib import install_lib
+from distutils.errors import DistutilsOptionError
 from distutils.extension import Extension
 from distutils.tests import support
-from distutils.errors import DistutilsOptionError
+
+import pytest
 
 
 @support.combine_markers
diff --git a/distutils/tests/test_install_scripts.py b/distutils/tests/test_install_scripts.py
index 4da2acb6a8..5d9f13a426 100644
--- a/distutils/tests/test_install_scripts.py
+++ b/distutils/tests/test_install_scripts.py
@@ -1,11 +1,10 @@
 """Tests for distutils.command.install_scripts."""
 
 import os
-
 from distutils.command.install_scripts import install_scripts
 from distutils.core import Distribution
-
 from distutils.tests import support
+
 from . import test_build_scripts
 
 
diff --git a/distutils/tests/test_log.py b/distutils/tests/test_log.py
index ec6a0c8051..d67779fc9f 100644
--- a/distutils/tests/test_log.py
+++ b/distutils/tests/test_log.py
@@ -1,7 +1,6 @@
 """Tests for distutils.log"""
 
 import logging
-
 from distutils._log import log
 
 
diff --git a/distutils/tests/test_modified.py b/distutils/tests/test_modified.py
index 5fde7a5971..2bd82346cf 100644
--- a/distutils/tests/test_modified.py
+++ b/distutils/tests/test_modified.py
@@ -2,13 +2,12 @@
 
 import os
 import types
-
-import pytest
-
-from distutils._modified import newer, newer_pairwise, newer_group, newer_pairwise_group
+from distutils._modified import newer, newer_group, newer_pairwise, newer_pairwise_group
 from distutils.errors import DistutilsFileError
 from distutils.tests import support
 
+import pytest
+
 
 class TestDepUtil(support.TempdirManager):
     def test_newer(self):
diff --git a/distutils/tests/test_msvc9compiler.py b/distutils/tests/test_msvc9compiler.py
index dfb34122bc..58e24f017a 100644
--- a/distutils/tests/test_msvc9compiler.py
+++ b/distutils/tests/test_msvc9compiler.py
@@ -1,10 +1,10 @@
 """Tests for distutils.msvc9compiler."""
 
-import sys
 import os
-
+import sys
 from distutils.errors import DistutilsPlatformError
 from distutils.tests import support
+
 import pytest
 
 # A manifest with the only assembly reference being the msvcrt assembly, so
diff --git a/distutils/tests/test_msvccompiler.py b/distutils/tests/test_msvccompiler.py
index f65a5a25a3..23b6c732c3 100644
--- a/distutils/tests/test_msvccompiler.py
+++ b/distutils/tests/test_msvccompiler.py
@@ -1,16 +1,14 @@
 """Tests for distutils._msvccompiler."""
 
-import sys
 import os
+import sys
 import threading
 import unittest.mock as mock
-
-import pytest
-
+from distutils import _msvccompiler
 from distutils.errors import DistutilsPlatformError
 from distutils.tests import support
-from distutils import _msvccompiler
 
+import pytest
 
 needs_winreg = pytest.mark.skipif('not hasattr(_msvccompiler, "winreg")')
 
diff --git a/distutils/tests/test_register.py b/distutils/tests/test_register.py
index 591c5ce0ad..d071bbe951 100644
--- a/distutils/tests/test_register.py
+++ b/distutils/tests/test_register.py
@@ -4,12 +4,11 @@
 import os
 import pathlib
 import urllib
-
 from distutils.command import register as register_module
 from distutils.command.register import register
 from distutils.errors import DistutilsSetupError
-
 from distutils.tests.test_config import BasePyPIRCCommandTestCase
+
 import pytest
 
 try:
diff --git a/distutils/tests/test_sdist.py b/distutils/tests/test_sdist.py
index 450f68c993..66a4194706 100644
--- a/distutils/tests/test_sdist.py
+++ b/distutils/tests/test_sdist.py
@@ -5,24 +5,23 @@
 import tarfile
 import warnings
 import zipfile
+from distutils.archive_util import ARCHIVE_FORMATS
+from distutils.command.sdist import sdist, show_formats
+from distutils.core import Distribution
+from distutils.errors import DistutilsOptionError
+from distutils.filelist import FileList
+from distutils.spawn import find_executable  # noqa: F401
+from distutils.tests.test_config import BasePyPIRCCommandTestCase
 from os.path import join
 from textwrap import dedent
-from .unix_compat import require_unix_id, require_uid_0, pwd, grp
 
-import pytest
-import path
 import jaraco.path
+import path
+import pytest
 from more_itertools import ilen
 
 from .py38compat import check_warnings
-
-from distutils.command.sdist import sdist, show_formats
-from distutils.core import Distribution
-from distutils.tests.test_config import BasePyPIRCCommandTestCase
-from distutils.errors import DistutilsOptionError
-from distutils.spawn import find_executable  # noqa: F401
-from distutils.filelist import FileList
-from distutils.archive_util import ARCHIVE_FORMATS
+from .unix_compat import grp, pwd, require_uid_0, require_unix_id
 
 SETUP_PY = """
 from distutils.core import setup
diff --git a/distutils/tests/test_spawn.py b/distutils/tests/test_spawn.py
index ec4c9982ad..abbac4c23f 100644
--- a/distutils/tests/test_spawn.py
+++ b/distutils/tests/test_spawn.py
@@ -4,19 +4,16 @@
 import stat
 import sys
 import unittest.mock as mock
-
+from distutils.errors import DistutilsExecError
+from distutils.spawn import find_executable, spawn
+from distutils.tests import support
 from test.support import unix_shell
 
 import path
+import pytest
 
 from . import py38compat as os_helper
 
-from distutils.spawn import find_executable
-from distutils.spawn import spawn
-from distutils.errors import DistutilsExecError
-from distutils.tests import support
-import pytest
-
 
 class TestSpawn(support.TempdirManager):
     @pytest.mark.skipif("os.name not in ('nt', 'posix')")
diff --git a/distutils/tests/test_sysconfig.py b/distutils/tests/test_sysconfig.py
index 131c1344bb..ce13d6bdc3 100644
--- a/distutils/tests/test_sysconfig.py
+++ b/distutils/tests/test_sysconfig.py
@@ -1,22 +1,21 @@
 """Tests for distutils.sysconfig."""
 
 import contextlib
+import distutils
 import os
+import pathlib
 import subprocess
 import sys
-import pathlib
-
-import pytest
-import jaraco.envs
-import path
-from jaraco.text import trim
-
-import distutils
 from distutils import sysconfig
 from distutils.ccompiler import get_default_compiler  # noqa: F401
 from distutils.unixccompiler import UnixCCompiler
 from test.support import swap_item
 
+import jaraco.envs
+import path
+import pytest
+from jaraco.text import trim
+
 from . import py37compat
 
 
diff --git a/distutils/tests/test_text_file.py b/distutils/tests/test_text_file.py
index fe787f44c8..c5c910a820 100644
--- a/distutils/tests/test_text_file.py
+++ b/distutils/tests/test_text_file.py
@@ -1,11 +1,11 @@
 """Tests for distutils.text_file."""
 
+from distutils.tests import support
+from distutils.text_file import TextFile
+
 import jaraco.path
 import path
 
-from distutils.text_file import TextFile
-from distutils.tests import support
-
 TEST_DATA = """# test file
 
 line 3 \\
diff --git a/distutils/tests/test_unixccompiler.py b/distutils/tests/test_unixccompiler.py
index ca198873ad..f17edf2f6b 100644
--- a/distutils/tests/test_unixccompiler.py
+++ b/distutils/tests/test_unixccompiler.py
@@ -3,17 +3,16 @@
 import os
 import sys
 import unittest.mock as mock
-
-from .py38compat import EnvironmentVarGuard
-
 from distutils import sysconfig
 from distutils.errors import DistutilsPlatformError
 from distutils.unixccompiler import UnixCCompiler
 from distutils.util import _clear_cached_macosx_ver
 
-from . import support
 import pytest
 
+from . import support
+from .py38compat import EnvironmentVarGuard
+
 
 @pytest.fixture(autouse=True)
 def save_values(monkeypatch):
diff --git a/distutils/tests/test_upload.py b/distutils/tests/test_upload.py
index 5c5bc59a40..0692f00160 100644
--- a/distutils/tests/test_upload.py
+++ b/distutils/tests/test_upload.py
@@ -2,15 +2,13 @@
 
 import os
 import unittest.mock as mock
-from urllib.request import HTTPError
-
-
 from distutils.command import upload as upload_mod
 from distutils.command.upload import upload
 from distutils.core import Distribution
 from distutils.errors import DistutilsError
-
 from distutils.tests.test_config import PYPIRC, BasePyPIRCCommandTestCase
+from urllib.request import HTTPError
+
 import pytest
 
 PYPIRC_LONG_PASSWORD = """\
diff --git a/distutils/tests/test_util.py b/distutils/tests/test_util.py
index 53c131e9e5..78d8b1e3b6 100644
--- a/distutils/tests/test_util.py
+++ b/distutils/tests/test_util.py
@@ -1,32 +1,30 @@
 """Tests for distutils.util."""
 
 import email
-import email.policy
 import email.generator
+import email.policy
 import io
 import os
 import sys
 import sysconfig as stdlib_sysconfig
 import unittest.mock as mock
 from copy import copy
-
-import pytest
-
+from distutils import sysconfig, util
+from distutils.errors import DistutilsByteCompileError, DistutilsPlatformError
 from distutils.util import (
-    get_platform,
-    convert_path,
+    byte_compile,
     change_root,
     check_environ,
+    convert_path,
+    get_host_platform,
+    get_platform,
+    grok_environment_error,
+    rfc822_escape,
     split_quoted,
     strtobool,
-    rfc822_escape,
-    byte_compile,
-    grok_environment_error,
-    get_host_platform,
 )
-from distutils import util
-from distutils import sysconfig
-from distutils.errors import DistutilsPlatformError, DistutilsByteCompileError
+
+import pytest
 
 
 @pytest.fixture(autouse=True)
diff --git a/distutils/tests/test_version.py b/distutils/tests/test_version.py
index f89d1b3580..ddf1789b44 100644
--- a/distutils/tests/test_version.py
+++ b/distutils/tests/test_version.py
@@ -1,10 +1,9 @@
 """Tests for distutils.version."""
 
-import pytest
-
 import distutils
-from distutils.version import LooseVersion
-from distutils.version import StrictVersion
+from distutils.version import LooseVersion, StrictVersion
+
+import pytest
 
 
 @pytest.fixture(autouse=True)
diff --git a/distutils/tests/unix_compat.py b/distutils/tests/unix_compat.py
index 95fc8eebe2..a5d9ee45cc 100644
--- a/distutils/tests/unix_compat.py
+++ b/distutils/tests/unix_compat.py
@@ -8,7 +8,6 @@
 
 import pytest
 
-
 UNIX_ID_SUPPORT = grp and pwd
 UID_0_SUPPORT = UNIX_ID_SUPPORT and sys.platform != "cygwin"
 
diff --git a/distutils/unixccompiler.py b/distutils/unixccompiler.py
index d749fe2529..a1fe2b57a2 100644
--- a/distutils/unixccompiler.py
+++ b/distutils/unixccompiler.py
@@ -13,18 +13,18 @@
   * link shared library handled by 'cc -shared'
 """
 
+import itertools
 import os
-import sys
 import re
 import shlex
-import itertools
+import sys
 
 from . import sysconfig
-from ._modified import newer
-from .ccompiler import CCompiler, gen_preprocess_options, gen_lib_options
-from .errors import DistutilsExecError, CompileError, LibError, LinkError
 from ._log import log
 from ._macos_compat import compiler_fixup
+from ._modified import newer
+from .ccompiler import CCompiler, gen_lib_options, gen_preprocess_options
+from .errors import CompileError, DistutilsExecError, LibError, LinkError
 
 # XXX Things not currently handled:
 #   * optimization/debug/warning flags; we just use whatever's in Python's
diff --git a/distutils/util.py b/distutils/util.py
index a24c940102..9ee77721b3 100644
--- a/distutils/util.py
+++ b/distutils/util.py
@@ -4,6 +4,7 @@
 one of the other *util.py modules.
 """
 
+import functools
 import importlib.util
 import os
 import re
@@ -11,12 +12,11 @@
 import subprocess
 import sys
 import sysconfig
-import functools
 
-from .errors import DistutilsPlatformError, DistutilsByteCompileError
+from ._log import log
 from ._modified import newer
+from .errors import DistutilsByteCompileError, DistutilsPlatformError
 from .spawn import spawn
-from ._log import log
 
 
 def get_host_platform():
diff --git a/distutils/version.py b/distutils/version.py
index 8ab76ddef4..aa7c5385ae 100644
--- a/distutils/version.py
+++ b/distutils/version.py
@@ -26,9 +26,9 @@
     of the same class, thus must follow the same rules)
 """
 
+import contextlib
 import re
 import warnings
-import contextlib
 
 
 @contextlib.contextmanager
diff --git a/distutils/versionpredicate.py b/distutils/versionpredicate.py
index c75e49486f..31c420168c 100644
--- a/distutils/versionpredicate.py
+++ b/distutils/versionpredicate.py
@@ -1,9 +1,9 @@
 """Module for parsing and testing package version predicate strings."""
 
-import re
-from . import version
 import operator
+import re
 
+from . import version
 
 re_validPackage = re.compile(r"(?i)^\s*([a-z_]\w*(?:\.[a-z_]\w*)*)(.*)", re.ASCII)
 # (package) (rest)
diff --git a/distutils/zosccompiler.py b/distutils/zosccompiler.py
index 6d70b7f04f..c7a7ca61cf 100644
--- a/distutils/zosccompiler.py
+++ b/distutils/zosccompiler.py
@@ -12,9 +12,10 @@
 """
 
 import os
-from .unixccompiler import UnixCCompiler
+
 from . import sysconfig
-from .errors import DistutilsExecError, CompileError
+from .errors import CompileError, DistutilsExecError
+from .unixccompiler import UnixCCompiler
 
 _cc_args = {
     'ibm-openxl': [

From 1d11b1c3e21d82be2d7645f2aa4bd6115d335b75 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 12 Apr 2024 09:11:14 -0400
Subject: [PATCH 0488/1761] =?UTF-8?q?=F0=9F=91=B9=20Feed=20the=20hobgoblin?=
 =?UTF-8?q?s=20(delint).?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

Remove now extraneous adjacent strings.
---
 distutils/_msvccompiler.py         | 2 +-
 distutils/ccompiler.py             | 4 ++--
 distutils/command/bdist.py         | 6 +++---
 distutils/command/bdist_dumb.py    | 8 ++++----
 distutils/command/bdist_rpm.py     | 6 +++---
 distutils/command/build_ext.py     | 4 +---
 distutils/command/build_scripts.py | 2 +-
 distutils/command/install.py       | 2 +-
 distutils/command/sdist.py         | 4 ++--
 distutils/fancy_getopt.py          | 3 +--
 distutils/filelist.py              | 6 ++----
 distutils/tests/test_version.py    | 4 +---
 distutils/text_file.py             | 2 +-
 13 files changed, 23 insertions(+), 30 deletions(-)

diff --git a/distutils/_msvccompiler.py b/distutils/_msvccompiler.py
index d08910ecf9..a2159fef83 100644
--- a/distutils/_msvccompiler.py
+++ b/distutils/_msvccompiler.py
@@ -253,7 +253,7 @@ def initialize(self, plat_name=None):
         vc_env = _get_vc_env(plat_spec)
         if not vc_env:
             raise DistutilsPlatformError(
-                "Unable to find a compatible " "Visual Studio installation."
+                "Unable to find a compatible Visual Studio installation."
             )
         self._configure(vc_env)
 
diff --git a/distutils/ccompiler.py b/distutils/ccompiler.py
index 03181cfb7c..8876d73098 100644
--- a/distutils/ccompiler.py
+++ b/distutils/ccompiler.py
@@ -465,7 +465,7 @@ def _fix_lib_args(self, libraries, library_dirs, runtime_library_dirs):
             )
         else:
             raise TypeError(
-                "'runtime_library_dirs' (if supplied) " "must be a list of strings"
+                "'runtime_library_dirs' (if supplied) must be a list of strings"
             )
 
         return (libraries, library_dirs, runtime_library_dirs)
@@ -1245,7 +1245,7 @@ def gen_lib_options(compiler, library_dirs, runtime_library_dirs, libraries):
                 lib_opts.append(lib_file)
             else:
                 compiler.warn(
-                    "no library file corresponding to " "'%s' found (skipping)" % lib
+                    "no library file corresponding to '%s' found (skipping)" % lib
                 )
         else:
             lib_opts.append(compiler.library_option(lib))
diff --git a/distutils/command/bdist.py b/distutils/command/bdist.py
index f681b5531d..ade98445ba 100644
--- a/distutils/command/bdist.py
+++ b/distutils/command/bdist.py
@@ -47,18 +47,18 @@ class bdist(Command):
         (
             'dist-dir=',
             'd',
-            "directory to put final built distributions in " "[default: dist]",
+            "directory to put final built distributions in [default: dist]",
         ),
         ('skip-build', None, "skip rebuilding everything (for testing/debugging)"),
         (
             'owner=',
             'u',
-            "Owner name used when creating a tar file" " [default: current user]",
+            "Owner name used when creating a tar file [default: current user]",
         ),
         (
             'group=',
             'g',
-            "Group name used when creating a tar file" " [default: current group]",
+            "Group name used when creating a tar file [default: current group]",
         ),
     ]
 
diff --git a/distutils/command/bdist_dumb.py b/distutils/command/bdist_dumb.py
index 41adf01418..06502d201e 100644
--- a/distutils/command/bdist_dumb.py
+++ b/distutils/command/bdist_dumb.py
@@ -28,7 +28,7 @@ class bdist_dumb(Command):
         (
             'format=',
             'f',
-            "archive format to create (tar, gztar, bztar, xztar, " "ztar, zip)",
+            "archive format to create (tar, gztar, bztar, xztar, ztar, zip)",
         ),
         (
             'keep-temp',
@@ -41,17 +41,17 @@ class bdist_dumb(Command):
         (
             'relative',
             None,
-            "build the archive using relative paths " "(default: false)",
+            "build the archive using relative paths (default: false)",
         ),
         (
             'owner=',
             'u',
-            "Owner name used when creating a tar file" " [default: current user]",
+            "Owner name used when creating a tar file [default: current user]",
         ),
         (
             'group=',
             'g',
-            "Group name used when creating a tar file" " [default: current group]",
+            "Group name used when creating a tar file [default: current group]",
         ),
     ]
 
diff --git a/distutils/command/bdist_rpm.py b/distutils/command/bdist_rpm.py
index 6a75e32fb1..649968a5eb 100644
--- a/distutils/command/bdist_rpm.py
+++ b/distutils/command/bdist_rpm.py
@@ -34,7 +34,7 @@ class bdist_rpm(Command):
         (
             'dist-dir=',
             'd',
-            "directory to put final RPM files in " "(and .spec files if --spec-only)",
+            "directory to put final RPM files in (and .spec files if --spec-only)",
         ),
         (
             'python=',
@@ -75,7 +75,7 @@ class bdist_rpm(Command):
         (
             'packager=',
             None,
-            "RPM packager (eg. \"Jane Doe \") " "[default: vendor]",
+            "RPM packager (eg. \"Jane Doe \") [default: vendor]",
         ),
         ('doc-files=', None, "list of documentation files (space or comma-separated)"),
         ('changelog=', None, "RPM changelog"),
@@ -214,7 +214,7 @@ def finalize_options(self):
 
         if os.name != 'posix':
             raise DistutilsPlatformError(
-                "don't know how to create RPM " "distributions on platform %s" % os.name
+                "don't know how to create RPM distributions on platform %s" % os.name
             )
         if self.binary_only and self.source_only:
             raise DistutilsOptionError(
diff --git a/distutils/command/build_ext.py b/distutils/command/build_ext.py
index aa9ed578f8..82e1e02070 100644
--- a/distutils/command/build_ext.py
+++ b/distutils/command/build_ext.py
@@ -427,9 +427,7 @@ def check_extensions_list(self, extensions):  # noqa: C901
             # Medium-easy stuff: same syntax/semantics, different names.
             ext.runtime_library_dirs = build_info.get('rpath')
             if 'def_file' in build_info:
-                log.warning(
-                    "'def_file' element of build info dict " "no longer supported"
-                )
+                log.warning("'def_file' element of build info dict no longer supported")
 
             # Non-trivial stuff: 'macros' split into 'define_macros'
             # and 'undef_macros'.
diff --git a/distutils/command/build_scripts.py b/distutils/command/build_scripts.py
index 37bc585038..5f3902a027 100644
--- a/distutils/command/build_scripts.py
+++ b/distutils/command/build_scripts.py
@@ -156,7 +156,7 @@ def _validate_shebang(shebang, encoding):
         try:
             shebang.encode('utf-8')
         except UnicodeEncodeError:
-            raise ValueError(f"The shebang ({shebang!r}) is not encodable " "to utf-8")
+            raise ValueError(f"The shebang ({shebang!r}) is not encodable to utf-8")
 
         # If the script is encoded to a custom encoding (use a
         # #coding:xxx cookie), the shebang has to be encodable to
diff --git a/distutils/command/install.py b/distutils/command/install.py
index 575cebdbc8..85165717a7 100644
--- a/distutils/command/install.py
+++ b/distutils/command/install.py
@@ -701,7 +701,7 @@ def run(self):
             # internally, and not to sys.path, so we don't check the platform
             # matches what we are running.
             if self.warn_dir and build_plat != get_platform():
-                raise DistutilsPlatformError("Can't install when " "cross-compiling")
+                raise DistutilsPlatformError("Can't install when cross-compiling")
 
         # Run all sub-commands (at least those that need to be run)
         for cmd_name in self.get_sub_commands():
diff --git a/distutils/command/sdist.py b/distutils/command/sdist.py
index 6414ef5c06..97bae8279d 100644
--- a/distutils/command/sdist.py
+++ b/distutils/command/sdist.py
@@ -61,7 +61,7 @@ def checking_metadata(self):
         (
             'manifest-only',
             'o',
-            "just regenerate the manifest and then stop " "(implies --force-manifest)",
+            "just regenerate the manifest and then stop (implies --force-manifest)",
         ),
         (
             'force-manifest',
@@ -78,7 +78,7 @@ def checking_metadata(self):
         (
             'dist-dir=',
             'd',
-            "directory to put the source distribution archive(s) in " "[default: dist]",
+            "directory to put the source distribution archive(s) in [default: dist]",
         ),
         (
             'metadata-check',
diff --git a/distutils/fancy_getopt.py b/distutils/fancy_getopt.py
index cb646c6d9b..dccc54923f 100644
--- a/distutils/fancy_getopt.py
+++ b/distutils/fancy_getopt.py
@@ -161,8 +161,7 @@ def _grok_option_table(self):  # noqa: C901
             # Type- and value-check the option names
             if not isinstance(long, str) or len(long) < 2:
                 raise DistutilsGetoptError(
-                    ("invalid long option '%s': " "must be a string of length >= 2")
-                    % long
+                    ("invalid long option '%s': must be a string of length >= 2") % long
                 )
 
             if not ((short is None) or (isinstance(short, str) and len(short) == 1)):
diff --git a/distutils/filelist.py b/distutils/filelist.py
index 5ce47936a9..71ffb2abe7 100644
--- a/distutils/filelist.py
+++ b/distutils/filelist.py
@@ -162,9 +162,7 @@ def process_template_line(self, line):  # noqa: C901
             self.debug_print("recursive-include {} {}".format(dir, ' '.join(patterns)))
             for pattern in patterns:
                 if not self.include_pattern(pattern, prefix=dir):
-                    msg = (
-                        "warning: no files found matching '%s' " "under directory '%s'"
-                    )
+                    msg = "warning: no files found matching '%s' under directory '%s'"
                     log.warning(msg, pattern, dir)
 
         elif action == 'recursive-exclude':
@@ -189,7 +187,7 @@ def process_template_line(self, line):  # noqa: C901
             self.debug_print("prune " + dir_pattern)
             if not self.exclude_pattern(None, prefix=dir_pattern):
                 log.warning(
-                    ("no previously-included directories found " "matching '%s'"),
+                    ("no previously-included directories found matching '%s'"),
                     dir_pattern,
                 )
         else:
diff --git a/distutils/tests/test_version.py b/distutils/tests/test_version.py
index ddf1789b44..1508e1cc0a 100644
--- a/distutils/tests/test_version.py
+++ b/distutils/tests/test_version.py
@@ -48,9 +48,7 @@ def test_cmp_strict(self):
                 if wanted is ValueError:
                     continue
                 else:
-                    raise AssertionError(
-                        f"cmp({v1}, {v2}) " "shouldn't raise ValueError"
-                    )
+                    raise AssertionError(f"cmp({v1}, {v2}) shouldn't raise ValueError")
             assert res == wanted, f'cmp({v1}, {v2}) should be {wanted}, got {res}'
             res = StrictVersion(v1)._cmp(v2)
             assert res == wanted, f'cmp({v1}, {v2}) should be {wanted}, got {res}'
diff --git a/distutils/text_file.py b/distutils/text_file.py
index 6f90cfe21d..0f846e3c52 100644
--- a/distutils/text_file.py
+++ b/distutils/text_file.py
@@ -220,7 +220,7 @@ def readline(self):  # noqa: C901
             if self.join_lines and buildup_line:
                 # oops: end of file
                 if line is None:
-                    self.warn("continuation line immediately precedes " "end-of-file")
+                    self.warn("continuation line immediately precedes end-of-file")
                     return buildup_line
 
                 if self.collapse_join:

From 7c006d8f0902ad602556e58f7180320abf18da3f Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 12 Apr 2024 09:19:17 -0400
Subject: [PATCH 0489/1761] Remove unreachable branch

---
 distutils/tests/test_clean.py | 2 +-
 distutils/version.py          | 2 --
 2 files changed, 1 insertion(+), 3 deletions(-)

diff --git a/distutils/tests/test_clean.py b/distutils/tests/test_clean.py
index 9b11fa40f7..bdbcd4fa46 100644
--- a/distutils/tests/test_clean.py
+++ b/distutils/tests/test_clean.py
@@ -36,7 +36,7 @@ def test_simple_run(self):
         cmd.run()
 
         # make sure the files where removed
-        for name, path in dirs:
+        for _name, path in dirs:
             assert not os.path.exists(path), '%s was not removed' % path
 
         # let's run the command again (should spit warnings but succeed)
diff --git a/distutils/version.py b/distutils/version.py
index aa7c5385ae..6e26e03007 100644
--- a/distutils/version.py
+++ b/distutils/version.py
@@ -212,8 +212,6 @@ def _cmp(self, other):  # noqa: C901
                 return -1
             else:
                 return 1
-        else:
-            assert False, "never get here"
 
 
 # end class StrictVersion

From 854780a8a9d5fd2038cc8826159d3639c81e6e15 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 12 Apr 2024 09:21:29 -0400
Subject: [PATCH 0490/1761] Extract method for comparing prerelease. Satisfies
 complexity check.

---
 distutils/version.py | 29 ++++++++++++++++-------------
 1 file changed, 16 insertions(+), 13 deletions(-)

diff --git a/distutils/version.py b/distutils/version.py
index 6e26e03007..90adbc718a 100644
--- a/distutils/version.py
+++ b/distutils/version.py
@@ -178,7 +178,7 @@ def __str__(self):
 
         return vstring
 
-    def _cmp(self, other):  # noqa: C901
+    def _cmp(self, other):
         if isinstance(other, str):
             with suppress_known_deprecation():
                 other = StrictVersion(other)
@@ -193,25 +193,28 @@ def _cmp(self, other):  # noqa: C901
             else:
                 return 1
 
-        # have to compare prerelease
-        # case 1: neither has prerelease; they're equal
-        # case 2: self has prerelease, other doesn't; other is greater
-        # case 3: self doesn't have prerelease, other does: self is greater
-        # case 4: both have prerelease: must compare them!
+        return self._cmp_prerelease(other)
 
+    def _cmp_prerelease(self, other):
+        """
+        case 1: neither has prerelease; they're equal
+        case 2: self has prerelease, other doesn't; other is greater
+        case 3: self doesn't have prerelease, other does: self is greater
+        case 4: both have prerelease: must compare them!
+        """
         if not self.prerelease and not other.prerelease:
             return 0
         elif self.prerelease and not other.prerelease:
             return -1
         elif not self.prerelease and other.prerelease:
             return 1
-        elif self.prerelease and other.prerelease:
-            if self.prerelease == other.prerelease:
-                return 0
-            elif self.prerelease < other.prerelease:
-                return -1
-            else:
-                return 1
+
+        if self.prerelease == other.prerelease:
+            return 0
+        elif self.prerelease < other.prerelease:
+            return -1
+        else:
+            return 1
 
 
 # end class StrictVersion

From cec4ce55bf5eb16d7d654ca845375381d08fcd51 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 12 Apr 2024 09:25:00 -0400
Subject: [PATCH 0491/1761] Re-organize for brevity.

---
 distutils/version.py | 14 +++++---------
 1 file changed, 5 insertions(+), 9 deletions(-)

diff --git a/distutils/version.py b/distutils/version.py
index 90adbc718a..30546a9dd6 100644
--- a/distutils/version.py
+++ b/distutils/version.py
@@ -185,15 +185,11 @@ def _cmp(self, other):
         elif not isinstance(other, StrictVersion):
             return NotImplemented
 
-        if self.version != other.version:
-            # numeric versions don't match
-            # prerelease stuff doesn't matter
-            if self.version < other.version:
-                return -1
-            else:
-                return 1
-
-        return self._cmp_prerelease(other)
+        if self.version == other.version:
+            # versions match; pre-release drives the comparison
+            return self._cmp_prerelease(other)
+
+        return -1 if self.version < other.version else 1
 
     def _cmp_prerelease(self, other):
         """

From 47db63930c35143f3b0dd8dab305b0b8194ff82a Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 12 Apr 2024 09:30:38 -0400
Subject: [PATCH 0492/1761] Rely on None==None and handle two cases together.

---
 distutils/version.py | 11 ++++-------
 1 file changed, 4 insertions(+), 7 deletions(-)

diff --git a/distutils/version.py b/distutils/version.py
index 30546a9dd6..806d233ca5 100644
--- a/distutils/version.py
+++ b/distutils/version.py
@@ -193,14 +193,11 @@ def _cmp(self, other):
 
     def _cmp_prerelease(self, other):
         """
-        case 1: neither has prerelease; they're equal
-        case 2: self has prerelease, other doesn't; other is greater
-        case 3: self doesn't have prerelease, other does: self is greater
-        case 4: both have prerelease: must compare them!
+        case 1: self has prerelease, other doesn't; other is greater
+        case 2: self doesn't have prerelease, other does: self is greater
+        case 3: both or neither have prerelease: compare them!
         """
-        if not self.prerelease and not other.prerelease:
-            return 0
-        elif self.prerelease and not other.prerelease:
+        if self.prerelease and not other.prerelease:
             return -1
         elif not self.prerelease and other.prerelease:
             return 1

From 9390f46d67801364375653065922ab0d1b540c72 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 12 Apr 2024 09:49:27 -0400
Subject: [PATCH 0493/1761] Refresh RangeMap from jaraco.collections 5.0.1.

---
 distutils/_collections.py | 18 ++++++++++++++----
 1 file changed, 14 insertions(+), 4 deletions(-)

diff --git a/distutils/_collections.py b/distutils/_collections.py
index 5ad21cc7c9..6810a5e24d 100644
--- a/distutils/_collections.py
+++ b/distutils/_collections.py
@@ -1,8 +1,13 @@
+from __future__ import annotations
+
 import collections
 import functools
 import itertools
 import operator
 
+from collections.abc import Mapping
+from typing import Any
+
 
 # from jaraco.collections 3.5.1
 class DictStack(list, collections.abc.Mapping):
@@ -58,7 +63,7 @@ def __len__(self):
         return len(list(iter(self)))
 
 
-# from jaraco.collections 3.7
+# from jaraco.collections 5.0.1
 class RangeMap(dict):
     """
     A dictionary-like object that uses the keys as bounds for a range.
@@ -70,7 +75,7 @@ class RangeMap(dict):
     One may supply keyword parameters to be passed to the sort function used
     to sort keys (i.e. key, reverse) as sort_params.
 
-    Let's create a map that maps 1-3 -> 'a', 4-6 -> 'b'
+    Create a map that maps 1-3 -> 'a', 4-6 -> 'b'
 
     >>> r = RangeMap({3: 'a', 6: 'b'})  # boy, that was easy
     >>> r[1], r[2], r[3], r[4], r[5], r[6]
@@ -82,7 +87,7 @@ class RangeMap(dict):
     >>> r[4.5]
     'b'
 
-    But you'll notice that the way rangemap is defined, it must be open-ended
+    Notice that the way rangemap is defined, it must be open-ended
     on one side.
 
     >>> r[0]
@@ -140,7 +145,12 @@ class RangeMap(dict):
 
     """
 
-    def __init__(self, source, sort_params={}, key_match_comparator=operator.le):
+    def __init__(
+        self,
+        source,
+        sort_params: Mapping[str, Any] = {},
+        key_match_comparator=operator.le,
+    ):
         dict.__init__(self, source)
         self.sort_params = sort_params
         self.match = key_match_comparator

From 7414bc5f5459ad67385cc3e2de6d6995fe90ed1e Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 12 Apr 2024 09:51:08 -0400
Subject: [PATCH 0494/1761] Ruff fixes B007.

---
 distutils/command/build_clib.py | 2 +-
 distutils/command/build_py.py   | 4 ++--
 distutils/command/install.py    | 2 +-
 distutils/command/sdist.py      | 2 +-
 distutils/dist.py               | 4 ++--
 5 files changed, 7 insertions(+), 7 deletions(-)

diff --git a/distutils/command/build_clib.py b/distutils/command/build_clib.py
index 811e607e70..360575d0cb 100644
--- a/distutils/command/build_clib.py
+++ b/distutils/command/build_clib.py
@@ -155,7 +155,7 @@ def get_library_names(self):
             return None
 
         lib_names = []
-        for lib_name, build_info in self.libraries:
+        for lib_name, _build_info in self.libraries:
             lib_names.append(lib_name)
         return lib_names
 
diff --git a/distutils/command/build_py.py b/distutils/command/build_py.py
index a15d0af519..56e6fa2e66 100644
--- a/distutils/command/build_py.py
+++ b/distutils/command/build_py.py
@@ -136,7 +136,7 @@ def find_data_files(self, package, src_dir):
 
     def build_package_data(self):
         """Copy data files into build directory"""
-        for package, src_dir, build_dir, filenames in self.data_files:
+        for _package, src_dir, build_dir, filenames in self.data_files:
             for filename in filenames:
                 target = os.path.join(build_dir, filename)
                 self.mkpath(os.path.dirname(target))
@@ -309,7 +309,7 @@ def get_module_outfile(self, build_dir, package, module):
     def get_outputs(self, include_bytecode=1):
         modules = self.find_all_modules()
         outputs = []
-        for package, module, module_file in modules:
+        for package, module, _module_file in modules:
             package = package.split('.')
             filename = self.get_module_outfile(self.build_lib, package, module)
             outputs.append(filename)
diff --git a/distutils/command/install.py b/distutils/command/install.py
index 85165717a7..8e920be4de 100644
--- a/distutils/command/install.py
+++ b/distutils/command/install.py
@@ -683,7 +683,7 @@ def create_home_path(self):
         if not self.user:
             return
         home = convert_path(os.path.expanduser("~"))
-        for name, path in self.config_vars.items():
+        for _name, path in self.config_vars.items():
             if str(path).startswith(home) and not os.path.isdir(path):
                 self.debug_print("os.makedirs('%s', 0o700)" % path)
                 os.makedirs(path, 0o700)
diff --git a/distutils/command/sdist.py b/distutils/command/sdist.py
index 97bae8279d..387d27c90b 100644
--- a/distutils/command/sdist.py
+++ b/distutils/command/sdist.py
@@ -308,7 +308,7 @@ def _add_defaults_python(self):
 
         # getting package_data files
         # (computed in build_py.data_files by build_py.finalize_options)
-        for pkg, src_dir, build_dir, filenames in build_py.data_files:
+        for _pkg, src_dir, _build_dir, filenames in build_py.data_files:
             for filename in filenames:
                 self.filelist.append(os.path.join(src_dir, filename))
 
diff --git a/distutils/dist.py b/distutils/dist.py
index 1759120c92..c32ffb6c0e 100644
--- a/distutils/dist.py
+++ b/distutils/dist.py
@@ -414,7 +414,7 @@ def parse_config_files(self, filenames=None):  # noqa: C901
         # to set Distribution options.
 
         if 'global' in self.command_options:
-            for opt, (src, val) in self.command_options['global'].items():
+            for opt, (_src, val) in self.command_options['global'].items():
                 alias = self.negative_opt.get(opt)
                 try:
                     if alias:
@@ -585,7 +585,7 @@ def _parse_command_opts(self, parser, args):  # noqa: C901
             cmd_class.help_options, list
         ):
             help_option_found = 0
-            for help_option, short, desc, func in cmd_class.help_options:
+            for help_option, _short, _desc, func in cmd_class.help_options:
                 if hasattr(opts, parser.get_attr_name(help_option)):
                     help_option_found = 1
                     if callable(func):

From 448a2a12848ca7e99b83958f59db44bb68f6120b Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 12 Apr 2024 09:58:22 -0400
Subject: [PATCH 0495/1761] =?UTF-8?q?=F0=9F=91=B9=20Feed=20the=20hobgoblin?=
 =?UTF-8?q?s=20(delint).?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

Add immutable type declarations to satisfy B006 checks.
---
 distutils/_collections.py   | 1 -
 distutils/command/config.py | 7 +++++--
 distutils/dist.py           | 5 ++++-
 distutils/fancy_getopt.py   | 3 ++-
 distutils/tests/__init__.py | 4 +++-
 5 files changed, 14 insertions(+), 6 deletions(-)

diff --git a/distutils/_collections.py b/distutils/_collections.py
index 6810a5e24d..d11a83467c 100644
--- a/distutils/_collections.py
+++ b/distutils/_collections.py
@@ -4,7 +4,6 @@
 import functools
 import itertools
 import operator
-
 from collections.abc import Mapping
 from typing import Any
 
diff --git a/distutils/command/config.py b/distutils/command/config.py
index 38a5ff5159..d4b2b0a362 100644
--- a/distutils/command/config.py
+++ b/distutils/command/config.py
@@ -9,9 +9,12 @@
 this header file lives".
 """
 
+from __future__ import annotations
+
 import os
 import pathlib
 import re
+from collections.abc import Sequence
 from distutils._log import log
 
 from ..core import Command
@@ -325,7 +328,7 @@ def check_lib(
         library_dirs=None,
         headers=None,
         include_dirs=None,
-        other_libraries=[],
+        other_libraries: Sequence[str] = [],
     ):
         """Determine if 'library' is available to be linked against,
         without actually checking that any particular symbols are provided
@@ -340,7 +343,7 @@ def check_lib(
             "int main (void) { }",
             headers,
             include_dirs,
-            [library] + other_libraries,
+            [library] + list(other_libraries),
             library_dirs,
         )
 
diff --git a/distutils/dist.py b/distutils/dist.py
index c32ffb6c0e..f29a34faba 100644
--- a/distutils/dist.py
+++ b/distutils/dist.py
@@ -10,6 +10,7 @@
 import pathlib
 import re
 import sys
+from collections.abc import Iterable
 from email import message_from_file
 
 try:
@@ -620,7 +621,9 @@ def finalize_options(self):
                 value = [elm.strip() for elm in value.split(',')]
                 setattr(self.metadata, attr, value)
 
-    def _show_help(self, parser, global_options=1, display_options=1, commands=[]):
+    def _show_help(
+        self, parser, global_options=1, display_options=1, commands: Iterable = ()
+    ):
         """Show help for the setup script command-line in the form of
         several lists of command-line options.  'parser' should be a
         FancyGetopt instance; do not expect it to be returned in the
diff --git a/distutils/fancy_getopt.py b/distutils/fancy_getopt.py
index dccc54923f..e905aede4d 100644
--- a/distutils/fancy_getopt.py
+++ b/distutils/fancy_getopt.py
@@ -12,6 +12,7 @@
 import re
 import string
 import sys
+from typing import Any, Sequence
 
 from .errors import DistutilsArgError, DistutilsGetoptError
 
@@ -448,7 +449,7 @@ class OptionDummy:
     """Dummy class just used as a place to hold command-line option
     values as instance attributes."""
 
-    def __init__(self, options=[]):
+    def __init__(self, options: Sequence[Any] = []):
         """Create a new OptionDummy instance.  The attributes listed in
         'options' will be initialized to None."""
         for opt in options:
diff --git a/distutils/tests/__init__.py b/distutils/tests/__init__.py
index c475e5d0f2..20dfe8f19b 100644
--- a/distutils/tests/__init__.py
+++ b/distutils/tests/__init__.py
@@ -7,8 +7,10 @@
 by import rather than matching pre-defined names.
 """
 
+from typing import Sequence
 
-def missing_compiler_executable(cmd_names=[]):  # pragma: no cover
+
+def missing_compiler_executable(cmd_names: Sequence[str] = []):  # pragma: no cover
     """Check if the compiler components used to build the interpreter exist.
 
     Check for the existence of the compiler executables whose names are listed

From a53e4258e144f03f1b48f1fced74aaf9d770f911 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 12 Apr 2024 10:41:53 -0400
Subject: [PATCH 0496/1761] Fix B026 by moving star arg ahead of keyword arg.

---
 distutils/command/check.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/distutils/command/check.py b/distutils/command/check.py
index 6b42a34f6d..28599e109c 100644
--- a/distutils/command/check.py
+++ b/distutils/command/check.py
@@ -33,7 +33,7 @@ def __init__(
         def system_message(self, level, message, *children, **kwargs):
             self.messages.append((level, message, children, kwargs))
             return docutils.nodes.system_message(
-                message, level=level, type=self.levels[level], *children, **kwargs
+                message, *children, level=level, type=self.levels[level], **kwargs
             )
 
 

From db216f48ffc06eee7631f7060d3288b32e4d61f5 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 12 Apr 2024 10:53:23 -0400
Subject: [PATCH 0497/1761] Extract 'make_iterable' for upload and register
 commands, avoiding masking loop input variable (B020).

---
 distutils/command/register.py | 15 +++++++++------
 distutils/command/upload.py   | 14 +++++++++-----
 2 files changed, 18 insertions(+), 11 deletions(-)

diff --git a/distutils/command/register.py b/distutils/command/register.py
index e5e6b379ad..ee6c54daba 100644
--- a/distutils/command/register.py
+++ b/distutils/command/register.py
@@ -13,6 +13,7 @@
 from distutils._log import log
 from warnings import warn
 
+from .._itertools import always_iterable
 from ..core import PyPIRCCommand
 
 
@@ -273,12 +274,8 @@ def post_to_server(self, data, auth=None):  # noqa: C901
         sep_boundary = '\n--' + boundary
         end_boundary = sep_boundary + '--'
         body = io.StringIO()
-        for key, value in data.items():
-            # handle multiple entries for the same name
-            if type(value) not in (type([]), type(())):
-                value = [value]
-            for value in value:
-                value = str(value)
+        for key, values in data.items():
+            for value in map(str, make_iterable(values)):
                 body.write(sep_boundary)
                 body.write('\nContent-Disposition: form-data; name="%s"' % key)
                 body.write("\n\n")
@@ -318,3 +315,9 @@ def post_to_server(self, data, auth=None):  # noqa: C901
             msg = '\n'.join(('-' * 75, data, '-' * 75))
             self.announce(msg, logging.INFO)
         return result
+
+
+def make_iterable(values):
+    if values is None:
+        return [None]
+    return always_iterable(values)
diff --git a/distutils/command/upload.py b/distutils/command/upload.py
index e61a9ea8a5..cf541f8a82 100644
--- a/distutils/command/upload.py
+++ b/distutils/command/upload.py
@@ -13,6 +13,7 @@
 from urllib.parse import urlparse
 from urllib.request import HTTPError, Request, urlopen
 
+from .._itertools import always_iterable
 from ..core import PyPIRCCommand
 from ..errors import DistutilsError, DistutilsOptionError
 from ..spawn import spawn
@@ -151,12 +152,9 @@ def upload_file(self, command, pyversion, filename):  # noqa: C901
         sep_boundary = b'\r\n--' + boundary.encode('ascii')
         end_boundary = sep_boundary + b'--\r\n'
         body = io.BytesIO()
-        for key, value in data.items():
+        for key, values in data.items():
             title = '\r\nContent-Disposition: form-data; name="%s"' % key
-            # handle multiple entries for the same name
-            if not isinstance(value, list):
-                value = [value]
-            for value in value:
+            for value in make_iterable(values):
                 if type(value) is tuple:
                     title += '; filename="%s"' % value[0]
                     value = value[1]
@@ -202,3 +200,9 @@ def upload_file(self, command, pyversion, filename):  # noqa: C901
             msg = f'Upload failed ({status}): {reason}'
             self.announce(msg, logging.ERROR)
             raise DistutilsError(msg)
+
+
+def make_iterable(values):
+    if values is None:
+        return [None]
+    return always_iterable(values, base_type=(bytes, str, tuple))

From 9f2922d9d035de477f7c97a2dd6a23004c024e4f Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 12 Apr 2024 10:54:38 -0400
Subject: [PATCH 0498/1761] Fix pointless comparison (B015).

---
 distutils/tests/test_core.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/distutils/tests/test_core.py b/distutils/tests/test_core.py
index 5916718027..bad3fb7e83 100644
--- a/distutils/tests/test_core.py
+++ b/distutils/tests/test_core.py
@@ -123,7 +123,7 @@ def test_debug_mode(self, capsys, monkeypatch):
         # this covers the code called when DEBUG is set
         sys.argv = ['setup.py', '--name']
         distutils.core.setup(name='bar')
-        capsys.readouterr().out == 'bar\n'
+        assert capsys.readouterr().out == 'bar\n'
         monkeypatch.setattr(distutils.core, 'DEBUG', True)
         distutils.core.setup(name='bar')
         wanted = "options (after parsing config files):\n"

From 0543254d8bd57746429b9a6650689cc90429fc10 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 12 Apr 2024 11:01:46 -0400
Subject: [PATCH 0499/1761] Remove Python 3.7 compatibility from build_ext

---
 distutils/command/build_ext.py  |  3 +--
 distutils/command/py37compat.py | 31 -------------------------------
 2 files changed, 1 insertion(+), 33 deletions(-)
 delete mode 100644 distutils/command/py37compat.py

diff --git a/distutils/command/build_ext.py b/distutils/command/build_ext.py
index 82e1e02070..06d949aff1 100644
--- a/distutils/command/build_ext.py
+++ b/distutils/command/build_ext.py
@@ -24,7 +24,6 @@
 from ..extension import Extension
 from ..sysconfig import customize_compiler, get_config_h_filename, get_python_version
 from ..util import get_platform
-from . import py37compat
 
 # An extension name is just a dot-separated list of Python NAMEs (ie.
 # the same as a fully-qualified module name).
@@ -798,4 +797,4 @@ def get_libraries(self, ext):  # noqa: C901
                 ldversion = get_config_var('LDVERSION')
                 return ext.libraries + ['python' + ldversion]
 
-        return ext.libraries + py37compat.pythonlib()
+        return ext.libraries
diff --git a/distutils/command/py37compat.py b/distutils/command/py37compat.py
deleted file mode 100644
index aa0c0a7fcd..0000000000
--- a/distutils/command/py37compat.py
+++ /dev/null
@@ -1,31 +0,0 @@
-import sys
-
-
-def _pythonlib_compat():
-    """
-    On Python 3.7 and earlier, distutils would include the Python
-    library. See pypa/distutils#9.
-    """
-    from distutils import sysconfig
-
-    if not sysconfig.get_config_var('Py_ENABLED_SHARED'):
-        return
-
-    yield 'python{}.{}{}'.format(
-        sys.hexversion >> 24,
-        (sys.hexversion >> 16) & 0xFF,
-        sysconfig.get_config_var('ABIFLAGS'),
-    )
-
-
-def compose(f1, f2):
-    return lambda *args, **kwargs: f1(f2(*args, **kwargs))
-
-
-pythonlib = (
-    compose(list, _pythonlib_compat)
-    if sys.version_info < (3, 8)
-    and sys.platform != 'darwin'
-    and sys.platform[:3] != 'aix'
-    else list
-)

From 6b6633af0e0c53243d9991fe9df3f29365c67db6 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 12 Apr 2024 11:03:45 -0400
Subject: [PATCH 0500/1761] Remove Python 3.7 compatibility from
 test_sysconfig.

---
 distutils/tests/py37compat.py     | 18 ------------------
 distutils/tests/test_sysconfig.py |  4 +---
 2 files changed, 1 insertion(+), 21 deletions(-)
 delete mode 100644 distutils/tests/py37compat.py

diff --git a/distutils/tests/py37compat.py b/distutils/tests/py37compat.py
deleted file mode 100644
index 76d3551c49..0000000000
--- a/distutils/tests/py37compat.py
+++ /dev/null
@@ -1,18 +0,0 @@
-import os
-import platform
-import sys
-
-
-def subprocess_args_compat(*args):
-    return list(map(os.fspath, args))
-
-
-def subprocess_args_passthrough(*args):
-    return list(args)
-
-
-subprocess_args = (
-    subprocess_args_compat
-    if platform.system() == "Windows" and sys.version_info < (3, 8)
-    else subprocess_args_passthrough
-)
diff --git a/distutils/tests/test_sysconfig.py b/distutils/tests/test_sysconfig.py
index ce13d6bdc3..bc14d3c05a 100644
--- a/distutils/tests/test_sysconfig.py
+++ b/distutils/tests/test_sysconfig.py
@@ -16,8 +16,6 @@
 import pytest
 from jaraco.text import trim
 
-from . import py37compat
-
 
 def _gen_makefile(root, contents):
     jaraco.path.build({'Makefile': trim(contents)}, root)
@@ -251,7 +249,7 @@ def test_customize_compiler_before_get_config_vars(self, tmp_path):
             tmp_path,
         )
         p = subprocess.Popen(
-            py37compat.subprocess_args(sys.executable, tmp_path / 'file'),
+            [sys.executable, tmp_path / 'file'],
             stdout=subprocess.PIPE,
             stderr=subprocess.STDOUT,
             universal_newlines=True,

From 55982565e745262ae031a2001bd35a74867218aa Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 12 Apr 2024 11:07:52 -0400
Subject: [PATCH 0501/1761] Move comment nearer the skip directive. Update
 wording.

---
 distutils/tests/test_sysconfig.py | 27 +++++++++++++--------------
 1 file changed, 13 insertions(+), 14 deletions(-)

diff --git a/distutils/tests/test_sysconfig.py b/distutils/tests/test_sysconfig.py
index bc14d3c05a..c55896661f 100644
--- a/distutils/tests/test_sysconfig.py
+++ b/distutils/tests/test_sysconfig.py
@@ -202,22 +202,21 @@ def test_sysconfig_module(self):
             'LDFLAGS'
         )
 
+    # On macOS, binary installers support extension module building on
+    # various levels of the operating system with differing Xcode
+    # configurations, requiring customization of some of the
+    # compiler configuration directives to suit the environment on
+    # the installed machine. Some of these customizations may require
+    # running external programs and are thus deferred until needed by
+    # the first extension module build. Only
+    # the Distutils version of sysconfig is used for extension module
+    # builds, which happens earlier in the Distutils tests. This may
+    # cause the following tests to fail since no tests have caused
+    # the global version of sysconfig to call the customization yet.
+    # The solution for now is to simply skip this test in this case.
+    # The longer-term solution is to only have one version of sysconfig.
     @pytest.mark.skipif("sysconfig.get_config_var('CUSTOMIZED_OSX_COMPILER')")
     def test_sysconfig_compiler_vars(self):
-        # On OS X, binary installers support extension module building on
-        # various levels of the operating system with differing Xcode
-        # configurations.  This requires customization of some of the
-        # compiler configuration directives to suit the environment on
-        # the installed machine.  Some of these customizations may require
-        # running external programs and, so, are deferred until needed by
-        # the first extension module build.  With Python 3.3, only
-        # the Distutils version of sysconfig is used for extension module
-        # builds, which happens earlier in the Distutils tests.  This may
-        # cause the following tests to fail since no tests have caused
-        # the global version of sysconfig to call the customization yet.
-        # The solution for now is to simply skip this test in this case.
-        # The longer-term solution is to only have one version of sysconfig.
-
         import sysconfig as global_sysconfig
 
         if sysconfig.get_config_var('CUSTOMIZED_OSX_COMPILER'):

From 48919ee0881caba6930ea8cdc79aaf834203a165 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 12 Apr 2024 11:55:51 -0400
Subject: [PATCH 0502/1761] Add news fragment.

---
 newsfragments/4298.feature.rst | 1 +
 1 file changed, 1 insertion(+)
 create mode 100644 newsfragments/4298.feature.rst

diff --git a/newsfragments/4298.feature.rst b/newsfragments/4298.feature.rst
new file mode 100644
index 0000000000..21d680d486
--- /dev/null
+++ b/newsfragments/4298.feature.rst
@@ -0,0 +1 @@
+Merged with pypa/distutils@55982565e, including interoperability improvements for rfc822_escape (pypa/distutils#213), dynamic resolution of config_h_filename for Python 3.13 compatibility (pypa/distutils#219), added support for the z/OS compiler (pypa/distutils#216), modernized compiler options in unixcompiler (pypa/distutils#214), fixed accumulating flags bug after compile/link (pypa/distutils#207), fixed enconding warnings (pypa/distutils#236), and general quality improvements (pypa/distutils#234).

From 6969162030244196b59fb561e0f316230e82db01 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 12 Apr 2024 11:58:12 -0400
Subject: [PATCH 0503/1761] Omit distutils from coverage checks.

---
 .coveragerc | 1 +
 1 file changed, 1 insertion(+)

diff --git a/.coveragerc b/.coveragerc
index 14424a43dd..1f214acf38 100644
--- a/.coveragerc
+++ b/.coveragerc
@@ -2,6 +2,7 @@
 omit =
 	# leading `*/` for pytest-dev/pytest-cov#456
 	*/.tox/*
+	*/setuptools/_distutils/*
 disable_warnings =
 	couldnt-parse
 

From e5087502969cd3ebf6aa2015b805142fbe1afc84 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 12 Apr 2024 12:13:38 -0400
Subject: [PATCH 0504/1761] Fix EncodingWarnings in tools.finalize.

---
 tools/finalize.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/tools/finalize.py b/tools/finalize.py
index f79f5b3b45..3ba5d16ac7 100644
--- a/tools/finalize.py
+++ b/tools/finalize.py
@@ -23,7 +23,7 @@
 
 def get_version():
     cmd = bump_version_command + ['--dry-run', '--verbose']
-    out = subprocess.check_output(cmd, text=True)
+    out = subprocess.check_output(cmd, text=True, encoding='utf-8')
     return re.search('^new_version=(.*)', out, re.MULTILINE).group(1)
 
 

From 92b45e9817ae829a5ca5a5962313a56b943cad91 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 12 Apr 2024 12:13:44 -0400
Subject: [PATCH 0505/1761] =?UTF-8?q?Bump=20version:=2069.2.0=20=E2=86=92?=
 =?UTF-8?q?=2069.3.0?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg               | 2 +-
 NEWS.rst                       | 9 +++++++++
 newsfragments/3593.feature.rst | 1 -
 setup.cfg                      | 2 +-
 4 files changed, 11 insertions(+), 3 deletions(-)
 delete mode 100644 newsfragments/3593.feature.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 1236141a7c..a76d5b66d7 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 69.2.0
+current_version = 69.3.0
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index 2e849bdc5f..7822ec6325 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,12 @@
+v69.3.0
+=======
+
+Features
+--------
+
+- Support PEP 625 by canonicalizing package name and version in filenames. (#3593)
+
+
 v69.2.0
 =======
 
diff --git a/newsfragments/3593.feature.rst b/newsfragments/3593.feature.rst
deleted file mode 100644
index 2ec6f9714e..0000000000
--- a/newsfragments/3593.feature.rst
+++ /dev/null
@@ -1 +0,0 @@
-Support PEP 625 by canonicalizing package name and version in filenames.
diff --git a/setup.cfg b/setup.cfg
index aae1465375..bab3efa52c 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,6 +1,6 @@
 [metadata]
 name = setuptools
-version = 69.2.0
+version = 69.3.0
 author = Python Packaging Authority
 author_email = distutils-sig@python.org
 description = Easily download, build, install, upgrade, and uninstall Python packages

From fd5f55ea008b32d427d7059799302d65fd0cd0cd Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 12 Apr 2024 12:35:18 -0400
Subject: [PATCH 0506/1761] Refresh unpinned vendored dependencies.

Closes #4253
---
 newsfragments/4253.feature.rst                |    1 +
 .../INSTALLER                                 |    0
 .../LICENSE                                   |    2 -
 .../METADATA                                  |   44 +
 .../backports.tarfile-1.0.0.dist-info/RECORD  |    9 +
 .../REQUESTED                                 |    0
 .../WHEEL                                     |    2 +-
 .../top_level.txt                             |    1 +
 pkg_resources/_vendor/backports/tarfile.py    | 2900 +++++++++++++++++
 .../RECORD                                    |   58 +-
 .../jaraco.context-4.3.0.dist-info/METADATA   |   68 -
 .../jaraco.context-4.3.0.dist-info/RECORD     |    8 -
 .../INSTALLER                                 |    0
 .../LICENSE                                   |    2 -
 .../jaraco.context-5.3.0.dist-info/METADATA   |   75 +
 .../jaraco.context-5.3.0.dist-info/RECORD     |    8 +
 .../jaraco.context-5.3.0.dist-info}/WHEEL     |    2 +-
 .../top_level.txt                             |    0
 .../jaraco.functools-3.6.0.dist-info/RECORD   |    8 -
 .../INSTALLER                                 |    0
 .../jaraco.functools-4.0.0.dist-info}/LICENSE |    2 -
 .../METADATA                                  |   37 +-
 .../jaraco.functools-4.0.0.dist-info/RECORD   |   10 +
 .../jaraco.functools-4.0.0.dist-info}/WHEEL   |    2 +-
 .../top_level.txt                             |    0
 .../jaraco.text-3.7.0.dist-info/RECORD        |    2 +-
 pkg_resources/_vendor/jaraco/context.py       |  137 +-
 .../{functools.py => functools/__init__.py}   |  205 +-
 .../_vendor/jaraco/functools/__init__.pyi     |  128 +
 .../_vendor/jaraco/functools/py.typed         |    0
 .../INSTALLER                                 |    0
 .../LICENSE                                   |    0
 .../METADATA                                  |   35 +-
 .../more_itertools-10.2.0.dist-info/RECORD    |   15 +
 .../WHEEL                                     |    0
 .../more_itertools-9.1.0.dist-info/RECORD     |   15 -
 .../_vendor/more_itertools/__init__.py        |    2 +-
 pkg_resources/_vendor/more_itertools/more.py  |  400 ++-
 pkg_resources/_vendor/more_itertools/more.pyi |   41 +-
 .../_vendor/more_itertools/recipes.py         |  230 +-
 .../_vendor/more_itertools/recipes.pyi        |   29 +-
 .../_vendor/packaging-23.1.dist-info/RECORD   |   28 +-
 .../platformdirs-2.6.2.dist-info/RECORD       |   16 +-
 .../typing_extensions-4.4.0.dist-info/RECORD  |    2 +-
 pkg_resources/_vendor/vendored.txt            |    2 +
 .../_vendor/zipp-3.7.0.dist-info/RECORD       |    2 +-
 .../INSTALLER                                 |    0
 .../LICENSE                                   |    2 -
 .../METADATA                                  |   44 +
 .../backports.tarfile-1.0.0.dist-info/RECORD  |    9 +
 .../REQUESTED                                 |    0
 .../backports.tarfile-1.0.0.dist-info}/WHEEL  |    2 +-
 .../top_level.txt                             |    1 +
 setuptools/_vendor/backports/tarfile.py       | 2900 +++++++++++++++++
 .../importlib_metadata-6.0.0.dist-info/RECORD |   18 +-
 .../RECORD                                    |   58 +-
 .../jaraco.context-4.3.0.dist-info/METADATA   |   68 -
 .../jaraco.context-4.3.0.dist-info/RECORD     |    8 -
 .../jaraco.context-5.3.0.dist-info/INSTALLER  |    1 +
 .../jaraco.context-5.3.0.dist-info/LICENSE    |   17 +
 .../jaraco.context-5.3.0.dist-info/METADATA   |   75 +
 .../jaraco.context-5.3.0.dist-info/RECORD     |    8 +
 .../jaraco.context-5.3.0.dist-info/WHEEL      |    5 +
 .../top_level.txt                             |    0
 .../jaraco.functools-3.6.0.dist-info/RECORD   |    8 -
 .../INSTALLER                                 |    1 +
 .../jaraco.functools-4.0.0.dist-info/LICENSE  |   17 +
 .../METADATA                                  |   37 +-
 .../jaraco.functools-4.0.0.dist-info/RECORD   |   10 +
 .../jaraco.functools-4.0.0.dist-info/WHEEL    |    5 +
 .../top_level.txt                             |    0
 .../jaraco.text-3.7.0.dist-info/RECORD        |    2 +-
 setuptools/_vendor/jaraco/context.py          |  137 +-
 .../{functools.py => functools/__init__.py}   |  205 +-
 .../_vendor/jaraco/functools/__init__.pyi     |  128 +
 setuptools/_vendor/jaraco/functools/py.typed  |    0
 .../more_itertools-8.8.0.dist-info/RECORD     |    6 +-
 .../ordered_set-3.1.1.dist-info/RECORD        |    4 +-
 .../_vendor/ordered_set-3.1.1.dist-info/WHEEL |    2 +-
 .../_vendor/packaging-23.1.dist-info/RECORD   |   28 +-
 .../_vendor/tomli-2.0.1.dist-info/RECORD      |    8 +-
 .../typing_extensions-4.0.1.dist-info/RECORD  |    2 +-
 setuptools/_vendor/vendored.txt               |    2 +
 .../_vendor/zipp-3.7.0.dist-info/RECORD       |    2 +-
 tools/vendored.py                             |   15 +-
 85 files changed, 7642 insertions(+), 721 deletions(-)
 create mode 100644 newsfragments/4253.feature.rst
 rename pkg_resources/_vendor/{jaraco.context-4.3.0.dist-info => backports.tarfile-1.0.0.dist-info}/INSTALLER (100%)
 rename pkg_resources/_vendor/{jaraco.context-4.3.0.dist-info => backports.tarfile-1.0.0.dist-info}/LICENSE (97%)
 create mode 100644 pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/METADATA
 create mode 100644 pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/RECORD
 create mode 100644 pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/REQUESTED
 rename pkg_resources/_vendor/{jaraco.functools-3.6.0.dist-info => backports.tarfile-1.0.0.dist-info}/WHEEL (65%)
 create mode 100644 pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/top_level.txt
 create mode 100644 pkg_resources/_vendor/backports/tarfile.py
 delete mode 100644 pkg_resources/_vendor/jaraco.context-4.3.0.dist-info/METADATA
 delete mode 100644 pkg_resources/_vendor/jaraco.context-4.3.0.dist-info/RECORD
 rename pkg_resources/_vendor/{jaraco.functools-3.6.0.dist-info => jaraco.context-5.3.0.dist-info}/INSTALLER (100%)
 rename pkg_resources/_vendor/{jaraco.functools-3.6.0.dist-info => jaraco.context-5.3.0.dist-info}/LICENSE (97%)
 create mode 100644 pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/METADATA
 create mode 100644 pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/RECORD
 rename {setuptools/_vendor/jaraco.context-4.3.0.dist-info => pkg_resources/_vendor/jaraco.context-5.3.0.dist-info}/WHEEL (65%)
 rename pkg_resources/_vendor/{jaraco.context-4.3.0.dist-info => jaraco.context-5.3.0.dist-info}/top_level.txt (100%)
 delete mode 100644 pkg_resources/_vendor/jaraco.functools-3.6.0.dist-info/RECORD
 rename pkg_resources/_vendor/{more_itertools-9.1.0.dist-info => jaraco.functools-4.0.0.dist-info}/INSTALLER (100%)
 rename {setuptools/_vendor/jaraco.functools-3.6.0.dist-info => pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info}/LICENSE (97%)
 rename pkg_resources/_vendor/{jaraco.functools-3.6.0.dist-info => jaraco.functools-4.0.0.dist-info}/METADATA (69%)
 create mode 100644 pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/RECORD
 rename {setuptools/_vendor/jaraco.functools-3.6.0.dist-info => pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info}/WHEEL (65%)
 rename pkg_resources/_vendor/{jaraco.functools-3.6.0.dist-info => jaraco.functools-4.0.0.dist-info}/top_level.txt (100%)
 rename pkg_resources/_vendor/jaraco/{functools.py => functools/__init__.py} (79%)
 create mode 100644 pkg_resources/_vendor/jaraco/functools/__init__.pyi
 create mode 100644 pkg_resources/_vendor/jaraco/functools/py.typed
 rename {setuptools/_vendor/jaraco.context-4.3.0.dist-info => pkg_resources/_vendor/more_itertools-10.2.0.dist-info}/INSTALLER (100%)
 rename pkg_resources/_vendor/{more_itertools-9.1.0.dist-info => more_itertools-10.2.0.dist-info}/LICENSE (100%)
 rename pkg_resources/_vendor/{more_itertools-9.1.0.dist-info => more_itertools-10.2.0.dist-info}/METADATA (90%)
 create mode 100644 pkg_resources/_vendor/more_itertools-10.2.0.dist-info/RECORD
 rename pkg_resources/_vendor/{more_itertools-9.1.0.dist-info => more_itertools-10.2.0.dist-info}/WHEEL (100%)
 delete mode 100644 pkg_resources/_vendor/more_itertools-9.1.0.dist-info/RECORD
 rename setuptools/_vendor/{jaraco.functools-3.6.0.dist-info => backports.tarfile-1.0.0.dist-info}/INSTALLER (100%)
 rename setuptools/_vendor/{jaraco.context-4.3.0.dist-info => backports.tarfile-1.0.0.dist-info}/LICENSE (97%)
 create mode 100644 setuptools/_vendor/backports.tarfile-1.0.0.dist-info/METADATA
 create mode 100644 setuptools/_vendor/backports.tarfile-1.0.0.dist-info/RECORD
 create mode 100644 setuptools/_vendor/backports.tarfile-1.0.0.dist-info/REQUESTED
 rename {pkg_resources/_vendor/jaraco.context-4.3.0.dist-info => setuptools/_vendor/backports.tarfile-1.0.0.dist-info}/WHEEL (65%)
 create mode 100644 setuptools/_vendor/backports.tarfile-1.0.0.dist-info/top_level.txt
 create mode 100644 setuptools/_vendor/backports/tarfile.py
 delete mode 100644 setuptools/_vendor/jaraco.context-4.3.0.dist-info/METADATA
 delete mode 100644 setuptools/_vendor/jaraco.context-4.3.0.dist-info/RECORD
 create mode 100644 setuptools/_vendor/jaraco.context-5.3.0.dist-info/INSTALLER
 create mode 100644 setuptools/_vendor/jaraco.context-5.3.0.dist-info/LICENSE
 create mode 100644 setuptools/_vendor/jaraco.context-5.3.0.dist-info/METADATA
 create mode 100644 setuptools/_vendor/jaraco.context-5.3.0.dist-info/RECORD
 create mode 100644 setuptools/_vendor/jaraco.context-5.3.0.dist-info/WHEEL
 rename setuptools/_vendor/{jaraco.context-4.3.0.dist-info => jaraco.context-5.3.0.dist-info}/top_level.txt (100%)
 delete mode 100644 setuptools/_vendor/jaraco.functools-3.6.0.dist-info/RECORD
 create mode 100644 setuptools/_vendor/jaraco.functools-4.0.0.dist-info/INSTALLER
 create mode 100644 setuptools/_vendor/jaraco.functools-4.0.0.dist-info/LICENSE
 rename setuptools/_vendor/{jaraco.functools-3.6.0.dist-info => jaraco.functools-4.0.0.dist-info}/METADATA (69%)
 create mode 100644 setuptools/_vendor/jaraco.functools-4.0.0.dist-info/RECORD
 create mode 100644 setuptools/_vendor/jaraco.functools-4.0.0.dist-info/WHEEL
 rename setuptools/_vendor/{jaraco.functools-3.6.0.dist-info => jaraco.functools-4.0.0.dist-info}/top_level.txt (100%)
 rename setuptools/_vendor/jaraco/{functools.py => functools/__init__.py} (79%)
 create mode 100644 setuptools/_vendor/jaraco/functools/__init__.pyi
 create mode 100644 setuptools/_vendor/jaraco/functools/py.typed

diff --git a/newsfragments/4253.feature.rst b/newsfragments/4253.feature.rst
new file mode 100644
index 0000000000..acc51ea4bd
--- /dev/null
+++ b/newsfragments/4253.feature.rst
@@ -0,0 +1 @@
+Refresh unpinned vendored dependencies.
\ No newline at end of file
diff --git a/pkg_resources/_vendor/jaraco.context-4.3.0.dist-info/INSTALLER b/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/INSTALLER
similarity index 100%
rename from pkg_resources/_vendor/jaraco.context-4.3.0.dist-info/INSTALLER
rename to pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/INSTALLER
diff --git a/pkg_resources/_vendor/jaraco.context-4.3.0.dist-info/LICENSE b/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/LICENSE
similarity index 97%
rename from pkg_resources/_vendor/jaraco.context-4.3.0.dist-info/LICENSE
rename to pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/LICENSE
index 353924be0e..1bb5a44356 100644
--- a/pkg_resources/_vendor/jaraco.context-4.3.0.dist-info/LICENSE
+++ b/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/LICENSE
@@ -1,5 +1,3 @@
-Copyright Jason R. Coombs
-
 Permission is hereby granted, free of charge, to any person obtaining a copy
 of this software and associated documentation files (the "Software"), to
 deal in the Software without restriction, including without limitation the
diff --git a/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/METADATA b/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/METADATA
new file mode 100644
index 0000000000..e7b64c87f8
--- /dev/null
+++ b/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/METADATA
@@ -0,0 +1,44 @@
+Metadata-Version: 2.1
+Name: backports.tarfile
+Version: 1.0.0
+Summary: Backport of CPython tarfile module
+Home-page: https://github.com/jaraco/backports.tarfile
+Author: Jason R. Coombs
+Author-email: jaraco@jaraco.com
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Requires-Python: >=3.8
+License-File: LICENSE
+Provides-Extra: docs
+Requires-Dist: sphinx >=3.5 ; extra == 'docs'
+Requires-Dist: jaraco.packaging >=9.3 ; extra == 'docs'
+Requires-Dist: rst.linker >=1.9 ; extra == 'docs'
+Requires-Dist: furo ; extra == 'docs'
+Requires-Dist: sphinx-lint ; extra == 'docs'
+Provides-Extra: testing
+Requires-Dist: pytest !=8.1.1,>=6 ; extra == 'testing'
+Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'testing'
+Requires-Dist: pytest-cov ; extra == 'testing'
+Requires-Dist: pytest-enabler >=2.2 ; extra == 'testing'
+
+.. image:: https://img.shields.io/pypi/v/backports.tarfile.svg
+   :target: https://pypi.org/project/backports.tarfile
+
+.. image:: https://img.shields.io/pypi/pyversions/backports.tarfile.svg
+
+.. image:: https://github.com/jaraco/backports.tarfile/actions/workflows/main.yml/badge.svg
+   :target: https://github.com/jaraco/backports.tarfile/actions?query=workflow%3A%22tests%22
+   :alt: tests
+
+.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
+    :target: https://github.com/astral-sh/ruff
+    :alt: Ruff
+
+.. .. image:: https://readthedocs.org/projects/backportstarfile/badge/?version=latest
+..    :target: https://backportstarfile.readthedocs.io/en/latest/?badge=latest
+
+.. image:: https://img.shields.io/badge/skeleton-2024-informational
+   :target: https://blog.jaraco.com/skeleton
diff --git a/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/RECORD b/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/RECORD
new file mode 100644
index 0000000000..a6a44d8fcc
--- /dev/null
+++ b/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/RECORD
@@ -0,0 +1,9 @@
+backports.tarfile-1.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+backports.tarfile-1.0.0.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
+backports.tarfile-1.0.0.dist-info/METADATA,sha256=XlT7JAFR04zDMIjs-EFhqc0CkkVyeh-SiVUoKXONXJ0,1876
+backports.tarfile-1.0.0.dist-info/RECORD,,
+backports.tarfile-1.0.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+backports.tarfile-1.0.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
+backports.tarfile-1.0.0.dist-info/top_level.txt,sha256=cGjaLMOoBR1FK0ApojtzWVmViTtJ7JGIK_HwXiEsvtU,10
+backports/__pycache__/tarfile.cpython-312.pyc,,
+backports/tarfile.py,sha256=IO3YX_ZYqn13VOi-3QLM0lnktn102U4d9wUrHc230LY,106920
diff --git a/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/REQUESTED b/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/REQUESTED
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/pkg_resources/_vendor/jaraco.functools-3.6.0.dist-info/WHEEL b/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/WHEEL
similarity index 65%
rename from pkg_resources/_vendor/jaraco.functools-3.6.0.dist-info/WHEEL
rename to pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/WHEEL
index 57e3d840d5..bab98d6758 100644
--- a/pkg_resources/_vendor/jaraco.functools-3.6.0.dist-info/WHEEL
+++ b/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/WHEEL
@@ -1,5 +1,5 @@
 Wheel-Version: 1.0
-Generator: bdist_wheel (0.38.4)
+Generator: bdist_wheel (0.43.0)
 Root-Is-Purelib: true
 Tag: py3-none-any
 
diff --git a/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/top_level.txt b/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/top_level.txt
new file mode 100644
index 0000000000..99d2be5b64
--- /dev/null
+++ b/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+backports
diff --git a/pkg_resources/_vendor/backports/tarfile.py b/pkg_resources/_vendor/backports/tarfile.py
new file mode 100644
index 0000000000..a7a9a6e7b9
--- /dev/null
+++ b/pkg_resources/_vendor/backports/tarfile.py
@@ -0,0 +1,2900 @@
+#!/usr/bin/env python3
+#-------------------------------------------------------------------
+# tarfile.py
+#-------------------------------------------------------------------
+# Copyright (C) 2002 Lars Gustaebel 
+# All rights reserved.
+#
+# Permission  is  hereby granted,  free  of charge,  to  any person
+# obtaining a  copy of  this software  and associated documentation
+# files  (the  "Software"),  to   deal  in  the  Software   without
+# restriction,  including  without limitation  the  rights to  use,
+# copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies  of  the  Software,  and to  permit  persons  to  whom the
+# Software  is  furnished  to  do  so,  subject  to  the  following
+# conditions:
+#
+# The above copyright  notice and this  permission notice shall  be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS  IS", WITHOUT WARRANTY OF ANY  KIND,
+# EXPRESS OR IMPLIED, INCLUDING  BUT NOT LIMITED TO  THE WARRANTIES
+# OF  MERCHANTABILITY,  FITNESS   FOR  A  PARTICULAR   PURPOSE  AND
+# NONINFRINGEMENT.  IN  NO  EVENT SHALL  THE  AUTHORS  OR COPYRIGHT
+# HOLDERS  BE LIABLE  FOR ANY  CLAIM, DAMAGES  OR OTHER  LIABILITY,
+# WHETHER  IN AN  ACTION OF  CONTRACT, TORT  OR OTHERWISE,  ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+# OTHER DEALINGS IN THE SOFTWARE.
+#
+"""Read from and write to tar format archives.
+"""
+
+version     = "0.9.0"
+__author__  = "Lars Gust\u00e4bel (lars@gustaebel.de)"
+__credits__ = "Gustavo Niemeyer, Niels Gust\u00e4bel, Richard Townsend."
+
+#---------
+# Imports
+#---------
+from builtins import open as bltn_open
+import sys
+import os
+import io
+import shutil
+import stat
+import time
+import struct
+import copy
+import re
+import warnings
+
+try:
+    import pwd
+except ImportError:
+    pwd = None
+try:
+    import grp
+except ImportError:
+    grp = None
+
+# os.symlink on Windows prior to 6.0 raises NotImplementedError
+# OSError (winerror=1314) will be raised if the caller does not hold the
+# SeCreateSymbolicLinkPrivilege privilege
+symlink_exception = (AttributeError, NotImplementedError, OSError)
+
+# from tarfile import *
+__all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError", "ReadError",
+           "CompressionError", "StreamError", "ExtractError", "HeaderError",
+           "ENCODING", "USTAR_FORMAT", "GNU_FORMAT", "PAX_FORMAT",
+           "DEFAULT_FORMAT", "open","fully_trusted_filter", "data_filter",
+           "tar_filter", "FilterError", "AbsoluteLinkError",
+           "OutsideDestinationError", "SpecialFileError", "AbsolutePathError",
+           "LinkOutsideDestinationError"]
+
+
+#---------------------------------------------------------
+# tar constants
+#---------------------------------------------------------
+NUL = b"\0"                     # the null character
+BLOCKSIZE = 512                 # length of processing blocks
+RECORDSIZE = BLOCKSIZE * 20     # length of records
+GNU_MAGIC = b"ustar  \0"        # magic gnu tar string
+POSIX_MAGIC = b"ustar\x0000"    # magic posix tar string
+
+LENGTH_NAME = 100               # maximum length of a filename
+LENGTH_LINK = 100               # maximum length of a linkname
+LENGTH_PREFIX = 155             # maximum length of the prefix field
+
+REGTYPE = b"0"                  # regular file
+AREGTYPE = b"\0"                # regular file
+LNKTYPE = b"1"                  # link (inside tarfile)
+SYMTYPE = b"2"                  # symbolic link
+CHRTYPE = b"3"                  # character special device
+BLKTYPE = b"4"                  # block special device
+DIRTYPE = b"5"                  # directory
+FIFOTYPE = b"6"                 # fifo special device
+CONTTYPE = b"7"                 # contiguous file
+
+GNUTYPE_LONGNAME = b"L"         # GNU tar longname
+GNUTYPE_LONGLINK = b"K"         # GNU tar longlink
+GNUTYPE_SPARSE = b"S"           # GNU tar sparse file
+
+XHDTYPE = b"x"                  # POSIX.1-2001 extended header
+XGLTYPE = b"g"                  # POSIX.1-2001 global header
+SOLARIS_XHDTYPE = b"X"          # Solaris extended header
+
+USTAR_FORMAT = 0                # POSIX.1-1988 (ustar) format
+GNU_FORMAT = 1                  # GNU tar format
+PAX_FORMAT = 2                  # POSIX.1-2001 (pax) format
+DEFAULT_FORMAT = PAX_FORMAT
+
+#---------------------------------------------------------
+# tarfile constants
+#---------------------------------------------------------
+# File types that tarfile supports:
+SUPPORTED_TYPES = (REGTYPE, AREGTYPE, LNKTYPE,
+                   SYMTYPE, DIRTYPE, FIFOTYPE,
+                   CONTTYPE, CHRTYPE, BLKTYPE,
+                   GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
+                   GNUTYPE_SPARSE)
+
+# File types that will be treated as a regular file.
+REGULAR_TYPES = (REGTYPE, AREGTYPE,
+                 CONTTYPE, GNUTYPE_SPARSE)
+
+# File types that are part of the GNU tar format.
+GNU_TYPES = (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
+             GNUTYPE_SPARSE)
+
+# Fields from a pax header that override a TarInfo attribute.
+PAX_FIELDS = ("path", "linkpath", "size", "mtime",
+              "uid", "gid", "uname", "gname")
+
+# Fields from a pax header that are affected by hdrcharset.
+PAX_NAME_FIELDS = {"path", "linkpath", "uname", "gname"}
+
+# Fields in a pax header that are numbers, all other fields
+# are treated as strings.
+PAX_NUMBER_FIELDS = {
+    "atime": float,
+    "ctime": float,
+    "mtime": float,
+    "uid": int,
+    "gid": int,
+    "size": int
+}
+
+#---------------------------------------------------------
+# initialization
+#---------------------------------------------------------
+if os.name == "nt":
+    ENCODING = "utf-8"
+else:
+    ENCODING = sys.getfilesystemencoding()
+
+#---------------------------------------------------------
+# Some useful functions
+#---------------------------------------------------------
+
+def stn(s, length, encoding, errors):
+    """Convert a string to a null-terminated bytes object.
+    """
+    if s is None:
+        raise ValueError("metadata cannot contain None")
+    s = s.encode(encoding, errors)
+    return s[:length] + (length - len(s)) * NUL
+
+def nts(s, encoding, errors):
+    """Convert a null-terminated bytes object to a string.
+    """
+    p = s.find(b"\0")
+    if p != -1:
+        s = s[:p]
+    return s.decode(encoding, errors)
+
+def nti(s):
+    """Convert a number field to a python number.
+    """
+    # There are two possible encodings for a number field, see
+    # itn() below.
+    if s[0] in (0o200, 0o377):
+        n = 0
+        for i in range(len(s) - 1):
+            n <<= 8
+            n += s[i + 1]
+        if s[0] == 0o377:
+            n = -(256 ** (len(s) - 1) - n)
+    else:
+        try:
+            s = nts(s, "ascii", "strict")
+            n = int(s.strip() or "0", 8)
+        except ValueError:
+            raise InvalidHeaderError("invalid header")
+    return n
+
+def itn(n, digits=8, format=DEFAULT_FORMAT):
+    """Convert a python number to a number field.
+    """
+    # POSIX 1003.1-1988 requires numbers to be encoded as a string of
+    # octal digits followed by a null-byte, this allows values up to
+    # (8**(digits-1))-1. GNU tar allows storing numbers greater than
+    # that if necessary. A leading 0o200 or 0o377 byte indicate this
+    # particular encoding, the following digits-1 bytes are a big-endian
+    # base-256 representation. This allows values up to (256**(digits-1))-1.
+    # A 0o200 byte indicates a positive number, a 0o377 byte a negative
+    # number.
+    original_n = n
+    n = int(n)
+    if 0 <= n < 8 ** (digits - 1):
+        s = bytes("%0*o" % (digits - 1, n), "ascii") + NUL
+    elif format == GNU_FORMAT and -256 ** (digits - 1) <= n < 256 ** (digits - 1):
+        if n >= 0:
+            s = bytearray([0o200])
+        else:
+            s = bytearray([0o377])
+            n = 256 ** digits + n
+
+        for i in range(digits - 1):
+            s.insert(1, n & 0o377)
+            n >>= 8
+    else:
+        raise ValueError("overflow in number field")
+
+    return s
+
+def calc_chksums(buf):
+    """Calculate the checksum for a member's header by summing up all
+       characters except for the chksum field which is treated as if
+       it was filled with spaces. According to the GNU tar sources,
+       some tars (Sun and NeXT) calculate chksum with signed char,
+       which will be different if there are chars in the buffer with
+       the high bit set. So we calculate two checksums, unsigned and
+       signed.
+    """
+    unsigned_chksum = 256 + sum(struct.unpack_from("148B8x356B", buf))
+    signed_chksum = 256 + sum(struct.unpack_from("148b8x356b", buf))
+    return unsigned_chksum, signed_chksum
+
+def copyfileobj(src, dst, length=None, exception=OSError, bufsize=None):
+    """Copy length bytes from fileobj src to fileobj dst.
+       If length is None, copy the entire content.
+    """
+    bufsize = bufsize or 16 * 1024
+    if length == 0:
+        return
+    if length is None:
+        shutil.copyfileobj(src, dst, bufsize)
+        return
+
+    blocks, remainder = divmod(length, bufsize)
+    for b in range(blocks):
+        buf = src.read(bufsize)
+        if len(buf) < bufsize:
+            raise exception("unexpected end of data")
+        dst.write(buf)
+
+    if remainder != 0:
+        buf = src.read(remainder)
+        if len(buf) < remainder:
+            raise exception("unexpected end of data")
+        dst.write(buf)
+    return
+
+def _safe_print(s):
+    encoding = getattr(sys.stdout, 'encoding', None)
+    if encoding is not None:
+        s = s.encode(encoding, 'backslashreplace').decode(encoding)
+    print(s, end=' ')
+
+
+class TarError(Exception):
+    """Base exception."""
+    pass
+class ExtractError(TarError):
+    """General exception for extract errors."""
+    pass
+class ReadError(TarError):
+    """Exception for unreadable tar archives."""
+    pass
+class CompressionError(TarError):
+    """Exception for unavailable compression methods."""
+    pass
+class StreamError(TarError):
+    """Exception for unsupported operations on stream-like TarFiles."""
+    pass
+class HeaderError(TarError):
+    """Base exception for header errors."""
+    pass
+class EmptyHeaderError(HeaderError):
+    """Exception for empty headers."""
+    pass
+class TruncatedHeaderError(HeaderError):
+    """Exception for truncated headers."""
+    pass
+class EOFHeaderError(HeaderError):
+    """Exception for end of file headers."""
+    pass
+class InvalidHeaderError(HeaderError):
+    """Exception for invalid headers."""
+    pass
+class SubsequentHeaderError(HeaderError):
+    """Exception for missing and invalid extended headers."""
+    pass
+
+#---------------------------
+# internal stream interface
+#---------------------------
+class _LowLevelFile:
+    """Low-level file object. Supports reading and writing.
+       It is used instead of a regular file object for streaming
+       access.
+    """
+
+    def __init__(self, name, mode):
+        mode = {
+            "r": os.O_RDONLY,
+            "w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC,
+        }[mode]
+        if hasattr(os, "O_BINARY"):
+            mode |= os.O_BINARY
+        self.fd = os.open(name, mode, 0o666)
+
+    def close(self):
+        os.close(self.fd)
+
+    def read(self, size):
+        return os.read(self.fd, size)
+
+    def write(self, s):
+        os.write(self.fd, s)
+
+class _Stream:
+    """Class that serves as an adapter between TarFile and
+       a stream-like object.  The stream-like object only
+       needs to have a read() or write() method that works with bytes,
+       and the method is accessed blockwise.
+       Use of gzip or bzip2 compression is possible.
+       A stream-like object could be for example: sys.stdin.buffer,
+       sys.stdout.buffer, a socket, a tape device etc.
+
+       _Stream is intended to be used only internally.
+    """
+
+    def __init__(self, name, mode, comptype, fileobj, bufsize,
+                 compresslevel):
+        """Construct a _Stream object.
+        """
+        self._extfileobj = True
+        if fileobj is None:
+            fileobj = _LowLevelFile(name, mode)
+            self._extfileobj = False
+
+        if comptype == '*':
+            # Enable transparent compression detection for the
+            # stream interface
+            fileobj = _StreamProxy(fileobj)
+            comptype = fileobj.getcomptype()
+
+        self.name     = name or ""
+        self.mode     = mode
+        self.comptype = comptype
+        self.fileobj  = fileobj
+        self.bufsize  = bufsize
+        self.buf      = b""
+        self.pos      = 0
+        self.closed   = False
+
+        try:
+            if comptype == "gz":
+                try:
+                    import zlib
+                except ImportError:
+                    raise CompressionError("zlib module is not available") from None
+                self.zlib = zlib
+                self.crc = zlib.crc32(b"")
+                if mode == "r":
+                    self.exception = zlib.error
+                    self._init_read_gz()
+                else:
+                    self._init_write_gz(compresslevel)
+
+            elif comptype == "bz2":
+                try:
+                    import bz2
+                except ImportError:
+                    raise CompressionError("bz2 module is not available") from None
+                if mode == "r":
+                    self.dbuf = b""
+                    self.cmp = bz2.BZ2Decompressor()
+                    self.exception = OSError
+                else:
+                    self.cmp = bz2.BZ2Compressor(compresslevel)
+
+            elif comptype == "xz":
+                try:
+                    import lzma
+                except ImportError:
+                    raise CompressionError("lzma module is not available") from None
+                if mode == "r":
+                    self.dbuf = b""
+                    self.cmp = lzma.LZMADecompressor()
+                    self.exception = lzma.LZMAError
+                else:
+                    self.cmp = lzma.LZMACompressor()
+
+            elif comptype != "tar":
+                raise CompressionError("unknown compression type %r" % comptype)
+
+        except:
+            if not self._extfileobj:
+                self.fileobj.close()
+            self.closed = True
+            raise
+
+    def __del__(self):
+        if hasattr(self, "closed") and not self.closed:
+            self.close()
+
+    def _init_write_gz(self, compresslevel):
+        """Initialize for writing with gzip compression.
+        """
+        self.cmp = self.zlib.compressobj(compresslevel,
+                                         self.zlib.DEFLATED,
+                                         -self.zlib.MAX_WBITS,
+                                         self.zlib.DEF_MEM_LEVEL,
+                                         0)
+        timestamp = struct.pack(" self.bufsize:
+            self.fileobj.write(self.buf[:self.bufsize])
+            self.buf = self.buf[self.bufsize:]
+
+    def close(self):
+        """Close the _Stream object. No operation should be
+           done on it afterwards.
+        """
+        if self.closed:
+            return
+
+        self.closed = True
+        try:
+            if self.mode == "w" and self.comptype != "tar":
+                self.buf += self.cmp.flush()
+
+            if self.mode == "w" and self.buf:
+                self.fileobj.write(self.buf)
+                self.buf = b""
+                if self.comptype == "gz":
+                    self.fileobj.write(struct.pack("= 0:
+            blocks, remainder = divmod(pos - self.pos, self.bufsize)
+            for i in range(blocks):
+                self.read(self.bufsize)
+            self.read(remainder)
+        else:
+            raise StreamError("seeking backwards is not allowed")
+        return self.pos
+
+    def read(self, size):
+        """Return the next size number of bytes from the stream."""
+        assert size is not None
+        buf = self._read(size)
+        self.pos += len(buf)
+        return buf
+
+    def _read(self, size):
+        """Return size bytes from the stream.
+        """
+        if self.comptype == "tar":
+            return self.__read(size)
+
+        c = len(self.dbuf)
+        t = [self.dbuf]
+        while c < size:
+            # Skip underlying buffer to avoid unaligned double buffering.
+            if self.buf:
+                buf = self.buf
+                self.buf = b""
+            else:
+                buf = self.fileobj.read(self.bufsize)
+                if not buf:
+                    break
+            try:
+                buf = self.cmp.decompress(buf)
+            except self.exception as e:
+                raise ReadError("invalid compressed data") from e
+            t.append(buf)
+            c += len(buf)
+        t = b"".join(t)
+        self.dbuf = t[size:]
+        return t[:size]
+
+    def __read(self, size):
+        """Return size bytes from stream. If internal buffer is empty,
+           read another block from the stream.
+        """
+        c = len(self.buf)
+        t = [self.buf]
+        while c < size:
+            buf = self.fileobj.read(self.bufsize)
+            if not buf:
+                break
+            t.append(buf)
+            c += len(buf)
+        t = b"".join(t)
+        self.buf = t[size:]
+        return t[:size]
+# class _Stream
+
+class _StreamProxy(object):
+    """Small proxy class that enables transparent compression
+       detection for the Stream interface (mode 'r|*').
+    """
+
+    def __init__(self, fileobj):
+        self.fileobj = fileobj
+        self.buf = self.fileobj.read(BLOCKSIZE)
+
+    def read(self, size):
+        self.read = self.fileobj.read
+        return self.buf
+
+    def getcomptype(self):
+        if self.buf.startswith(b"\x1f\x8b\x08"):
+            return "gz"
+        elif self.buf[0:3] == b"BZh" and self.buf[4:10] == b"1AY&SY":
+            return "bz2"
+        elif self.buf.startswith((b"\x5d\x00\x00\x80", b"\xfd7zXZ")):
+            return "xz"
+        else:
+            return "tar"
+
+    def close(self):
+        self.fileobj.close()
+# class StreamProxy
+
+#------------------------
+# Extraction file object
+#------------------------
+class _FileInFile(object):
+    """A thin wrapper around an existing file object that
+       provides a part of its data as an individual file
+       object.
+    """
+
+    def __init__(self, fileobj, offset, size, name, blockinfo=None):
+        self.fileobj = fileobj
+        self.offset = offset
+        self.size = size
+        self.position = 0
+        self.name = name
+        self.closed = False
+
+        if blockinfo is None:
+            blockinfo = [(0, size)]
+
+        # Construct a map with data and zero blocks.
+        self.map_index = 0
+        self.map = []
+        lastpos = 0
+        realpos = self.offset
+        for offset, size in blockinfo:
+            if offset > lastpos:
+                self.map.append((False, lastpos, offset, None))
+            self.map.append((True, offset, offset + size, realpos))
+            realpos += size
+            lastpos = offset + size
+        if lastpos < self.size:
+            self.map.append((False, lastpos, self.size, None))
+
+    def flush(self):
+        pass
+
+    def readable(self):
+        return True
+
+    def writable(self):
+        return False
+
+    def seekable(self):
+        return self.fileobj.seekable()
+
+    def tell(self):
+        """Return the current file position.
+        """
+        return self.position
+
+    def seek(self, position, whence=io.SEEK_SET):
+        """Seek to a position in the file.
+        """
+        if whence == io.SEEK_SET:
+            self.position = min(max(position, 0), self.size)
+        elif whence == io.SEEK_CUR:
+            if position < 0:
+                self.position = max(self.position + position, 0)
+            else:
+                self.position = min(self.position + position, self.size)
+        elif whence == io.SEEK_END:
+            self.position = max(min(self.size + position, self.size), 0)
+        else:
+            raise ValueError("Invalid argument")
+        return self.position
+
+    def read(self, size=None):
+        """Read data from the file.
+        """
+        if size is None:
+            size = self.size - self.position
+        else:
+            size = min(size, self.size - self.position)
+
+        buf = b""
+        while size > 0:
+            while True:
+                data, start, stop, offset = self.map[self.map_index]
+                if start <= self.position < stop:
+                    break
+                else:
+                    self.map_index += 1
+                    if self.map_index == len(self.map):
+                        self.map_index = 0
+            length = min(size, stop - self.position)
+            if data:
+                self.fileobj.seek(offset + (self.position - start))
+                b = self.fileobj.read(length)
+                if len(b) != length:
+                    raise ReadError("unexpected end of data")
+                buf += b
+            else:
+                buf += NUL * length
+            size -= length
+            self.position += length
+        return buf
+
+    def readinto(self, b):
+        buf = self.read(len(b))
+        b[:len(buf)] = buf
+        return len(buf)
+
+    def close(self):
+        self.closed = True
+#class _FileInFile
+
+class ExFileObject(io.BufferedReader):
+
+    def __init__(self, tarfile, tarinfo):
+        fileobj = _FileInFile(tarfile.fileobj, tarinfo.offset_data,
+                tarinfo.size, tarinfo.name, tarinfo.sparse)
+        super().__init__(fileobj)
+#class ExFileObject
+
+
+#-----------------------------
+# extraction filters (PEP 706)
+#-----------------------------
+
+class FilterError(TarError):
+    pass
+
+class AbsolutePathError(FilterError):
+    def __init__(self, tarinfo):
+        self.tarinfo = tarinfo
+        super().__init__(f'member {tarinfo.name!r} has an absolute path')
+
+class OutsideDestinationError(FilterError):
+    def __init__(self, tarinfo, path):
+        self.tarinfo = tarinfo
+        self._path = path
+        super().__init__(f'{tarinfo.name!r} would be extracted to {path!r}, '
+                         + 'which is outside the destination')
+
+class SpecialFileError(FilterError):
+    def __init__(self, tarinfo):
+        self.tarinfo = tarinfo
+        super().__init__(f'{tarinfo.name!r} is a special file')
+
+class AbsoluteLinkError(FilterError):
+    def __init__(self, tarinfo):
+        self.tarinfo = tarinfo
+        super().__init__(f'{tarinfo.name!r} is a link to an absolute path')
+
+class LinkOutsideDestinationError(FilterError):
+    def __init__(self, tarinfo, path):
+        self.tarinfo = tarinfo
+        self._path = path
+        super().__init__(f'{tarinfo.name!r} would link to {path!r}, '
+                         + 'which is outside the destination')
+
+def _get_filtered_attrs(member, dest_path, for_data=True):
+    new_attrs = {}
+    name = member.name
+    dest_path = os.path.realpath(dest_path)
+    # Strip leading / (tar's directory separator) from filenames.
+    # Include os.sep (target OS directory separator) as well.
+    if name.startswith(('/', os.sep)):
+        name = new_attrs['name'] = member.path.lstrip('/' + os.sep)
+    if os.path.isabs(name):
+        # Path is absolute even after stripping.
+        # For example, 'C:/foo' on Windows.
+        raise AbsolutePathError(member)
+    # Ensure we stay in the destination
+    target_path = os.path.realpath(os.path.join(dest_path, name))
+    if os.path.commonpath([target_path, dest_path]) != dest_path:
+        raise OutsideDestinationError(member, target_path)
+    # Limit permissions (no high bits, and go-w)
+    mode = member.mode
+    if mode is not None:
+        # Strip high bits & group/other write bits
+        mode = mode & 0o755
+        if for_data:
+            # For data, handle permissions & file types
+            if member.isreg() or member.islnk():
+                if not mode & 0o100:
+                    # Clear executable bits if not executable by user
+                    mode &= ~0o111
+                # Ensure owner can read & write
+                mode |= 0o600
+            elif member.isdir() or member.issym():
+                # Ignore mode for directories & symlinks
+                mode = None
+            else:
+                # Reject special files
+                raise SpecialFileError(member)
+        if mode != member.mode:
+            new_attrs['mode'] = mode
+    if for_data:
+        # Ignore ownership for 'data'
+        if member.uid is not None:
+            new_attrs['uid'] = None
+        if member.gid is not None:
+            new_attrs['gid'] = None
+        if member.uname is not None:
+            new_attrs['uname'] = None
+        if member.gname is not None:
+            new_attrs['gname'] = None
+        # Check link destination for 'data'
+        if member.islnk() or member.issym():
+            if os.path.isabs(member.linkname):
+                raise AbsoluteLinkError(member)
+            if member.issym():
+                target_path = os.path.join(dest_path,
+                                           os.path.dirname(name),
+                                           member.linkname)
+            else:
+                target_path = os.path.join(dest_path,
+                                           member.linkname)
+            target_path = os.path.realpath(target_path)
+            if os.path.commonpath([target_path, dest_path]) != dest_path:
+                raise LinkOutsideDestinationError(member, target_path)
+    return new_attrs
+
+def fully_trusted_filter(member, dest_path):
+    return member
+
+def tar_filter(member, dest_path):
+    new_attrs = _get_filtered_attrs(member, dest_path, False)
+    if new_attrs:
+        return member.replace(**new_attrs, deep=False)
+    return member
+
+def data_filter(member, dest_path):
+    new_attrs = _get_filtered_attrs(member, dest_path, True)
+    if new_attrs:
+        return member.replace(**new_attrs, deep=False)
+    return member
+
+_NAMED_FILTERS = {
+    "fully_trusted": fully_trusted_filter,
+    "tar": tar_filter,
+    "data": data_filter,
+}
+
+#------------------
+# Exported Classes
+#------------------
+
+# Sentinel for replace() defaults, meaning "don't change the attribute"
+_KEEP = object()
+
+class TarInfo(object):
+    """Informational class which holds the details about an
+       archive member given by a tar header block.
+       TarInfo objects are returned by TarFile.getmember(),
+       TarFile.getmembers() and TarFile.gettarinfo() and are
+       usually created internally.
+    """
+
+    __slots__ = dict(
+        name = 'Name of the archive member.',
+        mode = 'Permission bits.',
+        uid = 'User ID of the user who originally stored this member.',
+        gid = 'Group ID of the user who originally stored this member.',
+        size = 'Size in bytes.',
+        mtime = 'Time of last modification.',
+        chksum = 'Header checksum.',
+        type = ('File type. type is usually one of these constants: '
+                'REGTYPE, AREGTYPE, LNKTYPE, SYMTYPE, DIRTYPE, FIFOTYPE, '
+                'CONTTYPE, CHRTYPE, BLKTYPE, GNUTYPE_SPARSE.'),
+        linkname = ('Name of the target file name, which is only present '
+                    'in TarInfo objects of type LNKTYPE and SYMTYPE.'),
+        uname = 'User name.',
+        gname = 'Group name.',
+        devmajor = 'Device major number.',
+        devminor = 'Device minor number.',
+        offset = 'The tar header starts here.',
+        offset_data = "The file's data starts here.",
+        pax_headers = ('A dictionary containing key-value pairs of an '
+                       'associated pax extended header.'),
+        sparse = 'Sparse member information.',
+        tarfile = None,
+        _sparse_structs = None,
+        _link_target = None,
+        )
+
+    def __init__(self, name=""):
+        """Construct a TarInfo object. name is the optional name
+           of the member.
+        """
+        self.name = name        # member name
+        self.mode = 0o644       # file permissions
+        self.uid = 0            # user id
+        self.gid = 0            # group id
+        self.size = 0           # file size
+        self.mtime = 0          # modification time
+        self.chksum = 0         # header checksum
+        self.type = REGTYPE     # member type
+        self.linkname = ""      # link name
+        self.uname = ""         # user name
+        self.gname = ""         # group name
+        self.devmajor = 0       # device major number
+        self.devminor = 0       # device minor number
+
+        self.offset = 0         # the tar header starts here
+        self.offset_data = 0    # the file's data starts here
+
+        self.sparse = None      # sparse member information
+        self.pax_headers = {}   # pax header information
+
+    @property
+    def path(self):
+        'In pax headers, "name" is called "path".'
+        return self.name
+
+    @path.setter
+    def path(self, name):
+        self.name = name
+
+    @property
+    def linkpath(self):
+        'In pax headers, "linkname" is called "linkpath".'
+        return self.linkname
+
+    @linkpath.setter
+    def linkpath(self, linkname):
+        self.linkname = linkname
+
+    def __repr__(self):
+        return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self))
+
+    def replace(self, *,
+                name=_KEEP, mtime=_KEEP, mode=_KEEP, linkname=_KEEP,
+                uid=_KEEP, gid=_KEEP, uname=_KEEP, gname=_KEEP,
+                deep=True, _KEEP=_KEEP):
+        """Return a deep copy of self with the given attributes replaced.
+        """
+        if deep:
+            result = copy.deepcopy(self)
+        else:
+            result = copy.copy(self)
+        if name is not _KEEP:
+            result.name = name
+        if mtime is not _KEEP:
+            result.mtime = mtime
+        if mode is not _KEEP:
+            result.mode = mode
+        if linkname is not _KEEP:
+            result.linkname = linkname
+        if uid is not _KEEP:
+            result.uid = uid
+        if gid is not _KEEP:
+            result.gid = gid
+        if uname is not _KEEP:
+            result.uname = uname
+        if gname is not _KEEP:
+            result.gname = gname
+        return result
+
+    def get_info(self):
+        """Return the TarInfo's attributes as a dictionary.
+        """
+        if self.mode is None:
+            mode = None
+        else:
+            mode = self.mode & 0o7777
+        info = {
+            "name":     self.name,
+            "mode":     mode,
+            "uid":      self.uid,
+            "gid":      self.gid,
+            "size":     self.size,
+            "mtime":    self.mtime,
+            "chksum":   self.chksum,
+            "type":     self.type,
+            "linkname": self.linkname,
+            "uname":    self.uname,
+            "gname":    self.gname,
+            "devmajor": self.devmajor,
+            "devminor": self.devminor
+        }
+
+        if info["type"] == DIRTYPE and not info["name"].endswith("/"):
+            info["name"] += "/"
+
+        return info
+
+    def tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="surrogateescape"):
+        """Return a tar header as a string of 512 byte blocks.
+        """
+        info = self.get_info()
+        for name, value in info.items():
+            if value is None:
+                raise ValueError("%s may not be None" % name)
+
+        if format == USTAR_FORMAT:
+            return self.create_ustar_header(info, encoding, errors)
+        elif format == GNU_FORMAT:
+            return self.create_gnu_header(info, encoding, errors)
+        elif format == PAX_FORMAT:
+            return self.create_pax_header(info, encoding)
+        else:
+            raise ValueError("invalid format")
+
+    def create_ustar_header(self, info, encoding, errors):
+        """Return the object as a ustar header block.
+        """
+        info["magic"] = POSIX_MAGIC
+
+        if len(info["linkname"].encode(encoding, errors)) > LENGTH_LINK:
+            raise ValueError("linkname is too long")
+
+        if len(info["name"].encode(encoding, errors)) > LENGTH_NAME:
+            info["prefix"], info["name"] = self._posix_split_name(info["name"], encoding, errors)
+
+        return self._create_header(info, USTAR_FORMAT, encoding, errors)
+
+    def create_gnu_header(self, info, encoding, errors):
+        """Return the object as a GNU header block sequence.
+        """
+        info["magic"] = GNU_MAGIC
+
+        buf = b""
+        if len(info["linkname"].encode(encoding, errors)) > LENGTH_LINK:
+            buf += self._create_gnu_long_header(info["linkname"], GNUTYPE_LONGLINK, encoding, errors)
+
+        if len(info["name"].encode(encoding, errors)) > LENGTH_NAME:
+            buf += self._create_gnu_long_header(info["name"], GNUTYPE_LONGNAME, encoding, errors)
+
+        return buf + self._create_header(info, GNU_FORMAT, encoding, errors)
+
+    def create_pax_header(self, info, encoding):
+        """Return the object as a ustar header block. If it cannot be
+           represented this way, prepend a pax extended header sequence
+           with supplement information.
+        """
+        info["magic"] = POSIX_MAGIC
+        pax_headers = self.pax_headers.copy()
+
+        # Test string fields for values that exceed the field length or cannot
+        # be represented in ASCII encoding.
+        for name, hname, length in (
+                ("name", "path", LENGTH_NAME), ("linkname", "linkpath", LENGTH_LINK),
+                ("uname", "uname", 32), ("gname", "gname", 32)):
+
+            if hname in pax_headers:
+                # The pax header has priority.
+                continue
+
+            # Try to encode the string as ASCII.
+            try:
+                info[name].encode("ascii", "strict")
+            except UnicodeEncodeError:
+                pax_headers[hname] = info[name]
+                continue
+
+            if len(info[name]) > length:
+                pax_headers[hname] = info[name]
+
+        # Test number fields for values that exceed the field limit or values
+        # that like to be stored as float.
+        for name, digits in (("uid", 8), ("gid", 8), ("size", 12), ("mtime", 12)):
+            needs_pax = False
+
+            val = info[name]
+            val_is_float = isinstance(val, float)
+            val_int = round(val) if val_is_float else val
+            if not 0 <= val_int < 8 ** (digits - 1):
+                # Avoid overflow.
+                info[name] = 0
+                needs_pax = True
+            elif val_is_float:
+                # Put rounded value in ustar header, and full
+                # precision value in pax header.
+                info[name] = val_int
+                needs_pax = True
+
+            # The existing pax header has priority.
+            if needs_pax and name not in pax_headers:
+                pax_headers[name] = str(val)
+
+        # Create a pax extended header if necessary.
+        if pax_headers:
+            buf = self._create_pax_generic_header(pax_headers, XHDTYPE, encoding)
+        else:
+            buf = b""
+
+        return buf + self._create_header(info, USTAR_FORMAT, "ascii", "replace")
+
+    @classmethod
+    def create_pax_global_header(cls, pax_headers):
+        """Return the object as a pax global header block sequence.
+        """
+        return cls._create_pax_generic_header(pax_headers, XGLTYPE, "utf-8")
+
+    def _posix_split_name(self, name, encoding, errors):
+        """Split a name longer than 100 chars into a prefix
+           and a name part.
+        """
+        components = name.split("/")
+        for i in range(1, len(components)):
+            prefix = "/".join(components[:i])
+            name = "/".join(components[i:])
+            if len(prefix.encode(encoding, errors)) <= LENGTH_PREFIX and \
+                    len(name.encode(encoding, errors)) <= LENGTH_NAME:
+                break
+        else:
+            raise ValueError("name is too long")
+
+        return prefix, name
+
+    @staticmethod
+    def _create_header(info, format, encoding, errors):
+        """Return a header block. info is a dictionary with file
+           information, format must be one of the *_FORMAT constants.
+        """
+        has_device_fields = info.get("type") in (CHRTYPE, BLKTYPE)
+        if has_device_fields:
+            devmajor = itn(info.get("devmajor", 0), 8, format)
+            devminor = itn(info.get("devminor", 0), 8, format)
+        else:
+            devmajor = stn("", 8, encoding, errors)
+            devminor = stn("", 8, encoding, errors)
+
+        # None values in metadata should cause ValueError.
+        # itn()/stn() do this for all fields except type.
+        filetype = info.get("type", REGTYPE)
+        if filetype is None:
+            raise ValueError("TarInfo.type must not be None")
+
+        parts = [
+            stn(info.get("name", ""), 100, encoding, errors),
+            itn(info.get("mode", 0) & 0o7777, 8, format),
+            itn(info.get("uid", 0), 8, format),
+            itn(info.get("gid", 0), 8, format),
+            itn(info.get("size", 0), 12, format),
+            itn(info.get("mtime", 0), 12, format),
+            b"        ", # checksum field
+            filetype,
+            stn(info.get("linkname", ""), 100, encoding, errors),
+            info.get("magic", POSIX_MAGIC),
+            stn(info.get("uname", ""), 32, encoding, errors),
+            stn(info.get("gname", ""), 32, encoding, errors),
+            devmajor,
+            devminor,
+            stn(info.get("prefix", ""), 155, encoding, errors)
+        ]
+
+        buf = struct.pack("%ds" % BLOCKSIZE, b"".join(parts))
+        chksum = calc_chksums(buf[-BLOCKSIZE:])[0]
+        buf = buf[:-364] + bytes("%06o\0" % chksum, "ascii") + buf[-357:]
+        return buf
+
+    @staticmethod
+    def _create_payload(payload):
+        """Return the string payload filled with zero bytes
+           up to the next 512 byte border.
+        """
+        blocks, remainder = divmod(len(payload), BLOCKSIZE)
+        if remainder > 0:
+            payload += (BLOCKSIZE - remainder) * NUL
+        return payload
+
+    @classmethod
+    def _create_gnu_long_header(cls, name, type, encoding, errors):
+        """Return a GNUTYPE_LONGNAME or GNUTYPE_LONGLINK sequence
+           for name.
+        """
+        name = name.encode(encoding, errors) + NUL
+
+        info = {}
+        info["name"] = "././@LongLink"
+        info["type"] = type
+        info["size"] = len(name)
+        info["magic"] = GNU_MAGIC
+
+        # create extended header + name blocks.
+        return cls._create_header(info, USTAR_FORMAT, encoding, errors) + \
+                cls._create_payload(name)
+
+    @classmethod
+    def _create_pax_generic_header(cls, pax_headers, type, encoding):
+        """Return a POSIX.1-2008 extended or global header sequence
+           that contains a list of keyword, value pairs. The values
+           must be strings.
+        """
+        # Check if one of the fields contains surrogate characters and thereby
+        # forces hdrcharset=BINARY, see _proc_pax() for more information.
+        binary = False
+        for keyword, value in pax_headers.items():
+            try:
+                value.encode("utf-8", "strict")
+            except UnicodeEncodeError:
+                binary = True
+                break
+
+        records = b""
+        if binary:
+            # Put the hdrcharset field at the beginning of the header.
+            records += b"21 hdrcharset=BINARY\n"
+
+        for keyword, value in pax_headers.items():
+            keyword = keyword.encode("utf-8")
+            if binary:
+                # Try to restore the original byte representation of `value'.
+                # Needless to say, that the encoding must match the string.
+                value = value.encode(encoding, "surrogateescape")
+            else:
+                value = value.encode("utf-8")
+
+            l = len(keyword) + len(value) + 3   # ' ' + '=' + '\n'
+            n = p = 0
+            while True:
+                n = l + len(str(p))
+                if n == p:
+                    break
+                p = n
+            records += bytes(str(p), "ascii") + b" " + keyword + b"=" + value + b"\n"
+
+        # We use a hardcoded "././@PaxHeader" name like star does
+        # instead of the one that POSIX recommends.
+        info = {}
+        info["name"] = "././@PaxHeader"
+        info["type"] = type
+        info["size"] = len(records)
+        info["magic"] = POSIX_MAGIC
+
+        # Create pax header + record blocks.
+        return cls._create_header(info, USTAR_FORMAT, "ascii", "replace") + \
+                cls._create_payload(records)
+
+    @classmethod
+    def frombuf(cls, buf, encoding, errors):
+        """Construct a TarInfo object from a 512 byte bytes object.
+        """
+        if len(buf) == 0:
+            raise EmptyHeaderError("empty header")
+        if len(buf) != BLOCKSIZE:
+            raise TruncatedHeaderError("truncated header")
+        if buf.count(NUL) == BLOCKSIZE:
+            raise EOFHeaderError("end of file header")
+
+        chksum = nti(buf[148:156])
+        if chksum not in calc_chksums(buf):
+            raise InvalidHeaderError("bad checksum")
+
+        obj = cls()
+        obj.name = nts(buf[0:100], encoding, errors)
+        obj.mode = nti(buf[100:108])
+        obj.uid = nti(buf[108:116])
+        obj.gid = nti(buf[116:124])
+        obj.size = nti(buf[124:136])
+        obj.mtime = nti(buf[136:148])
+        obj.chksum = chksum
+        obj.type = buf[156:157]
+        obj.linkname = nts(buf[157:257], encoding, errors)
+        obj.uname = nts(buf[265:297], encoding, errors)
+        obj.gname = nts(buf[297:329], encoding, errors)
+        obj.devmajor = nti(buf[329:337])
+        obj.devminor = nti(buf[337:345])
+        prefix = nts(buf[345:500], encoding, errors)
+
+        # Old V7 tar format represents a directory as a regular
+        # file with a trailing slash.
+        if obj.type == AREGTYPE and obj.name.endswith("/"):
+            obj.type = DIRTYPE
+
+        # The old GNU sparse format occupies some of the unused
+        # space in the buffer for up to 4 sparse structures.
+        # Save them for later processing in _proc_sparse().
+        if obj.type == GNUTYPE_SPARSE:
+            pos = 386
+            structs = []
+            for i in range(4):
+                try:
+                    offset = nti(buf[pos:pos + 12])
+                    numbytes = nti(buf[pos + 12:pos + 24])
+                except ValueError:
+                    break
+                structs.append((offset, numbytes))
+                pos += 24
+            isextended = bool(buf[482])
+            origsize = nti(buf[483:495])
+            obj._sparse_structs = (structs, isextended, origsize)
+
+        # Remove redundant slashes from directories.
+        if obj.isdir():
+            obj.name = obj.name.rstrip("/")
+
+        # Reconstruct a ustar longname.
+        if prefix and obj.type not in GNU_TYPES:
+            obj.name = prefix + "/" + obj.name
+        return obj
+
+    @classmethod
+    def fromtarfile(cls, tarfile):
+        """Return the next TarInfo object from TarFile object
+           tarfile.
+        """
+        buf = tarfile.fileobj.read(BLOCKSIZE)
+        obj = cls.frombuf(buf, tarfile.encoding, tarfile.errors)
+        obj.offset = tarfile.fileobj.tell() - BLOCKSIZE
+        return obj._proc_member(tarfile)
+
+    #--------------------------------------------------------------------------
+    # The following are methods that are called depending on the type of a
+    # member. The entry point is _proc_member() which can be overridden in a
+    # subclass to add custom _proc_*() methods. A _proc_*() method MUST
+    # implement the following
+    # operations:
+    # 1. Set self.offset_data to the position where the data blocks begin,
+    #    if there is data that follows.
+    # 2. Set tarfile.offset to the position where the next member's header will
+    #    begin.
+    # 3. Return self or another valid TarInfo object.
+    def _proc_member(self, tarfile):
+        """Choose the right processing method depending on
+           the type and call it.
+        """
+        if self.type in (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK):
+            return self._proc_gnulong(tarfile)
+        elif self.type == GNUTYPE_SPARSE:
+            return self._proc_sparse(tarfile)
+        elif self.type in (XHDTYPE, XGLTYPE, SOLARIS_XHDTYPE):
+            return self._proc_pax(tarfile)
+        else:
+            return self._proc_builtin(tarfile)
+
+    def _proc_builtin(self, tarfile):
+        """Process a builtin type or an unknown type which
+           will be treated as a regular file.
+        """
+        self.offset_data = tarfile.fileobj.tell()
+        offset = self.offset_data
+        if self.isreg() or self.type not in SUPPORTED_TYPES:
+            # Skip the following data blocks.
+            offset += self._block(self.size)
+        tarfile.offset = offset
+
+        # Patch the TarInfo object with saved global
+        # header information.
+        self._apply_pax_info(tarfile.pax_headers, tarfile.encoding, tarfile.errors)
+
+        # Remove redundant slashes from directories. This is to be consistent
+        # with frombuf().
+        if self.isdir():
+            self.name = self.name.rstrip("/")
+
+        return self
+
+    def _proc_gnulong(self, tarfile):
+        """Process the blocks that hold a GNU longname
+           or longlink member.
+        """
+        buf = tarfile.fileobj.read(self._block(self.size))
+
+        # Fetch the next header and process it.
+        try:
+            next = self.fromtarfile(tarfile)
+        except HeaderError as e:
+            raise SubsequentHeaderError(str(e)) from None
+
+        # Patch the TarInfo object from the next header with
+        # the longname information.
+        next.offset = self.offset
+        if self.type == GNUTYPE_LONGNAME:
+            next.name = nts(buf, tarfile.encoding, tarfile.errors)
+        elif self.type == GNUTYPE_LONGLINK:
+            next.linkname = nts(buf, tarfile.encoding, tarfile.errors)
+
+        # Remove redundant slashes from directories. This is to be consistent
+        # with frombuf().
+        if next.isdir():
+            next.name = next.name.removesuffix("/")
+
+        return next
+
+    def _proc_sparse(self, tarfile):
+        """Process a GNU sparse header plus extra headers.
+        """
+        # We already collected some sparse structures in frombuf().
+        structs, isextended, origsize = self._sparse_structs
+        del self._sparse_structs
+
+        # Collect sparse structures from extended header blocks.
+        while isextended:
+            buf = tarfile.fileobj.read(BLOCKSIZE)
+            pos = 0
+            for i in range(21):
+                try:
+                    offset = nti(buf[pos:pos + 12])
+                    numbytes = nti(buf[pos + 12:pos + 24])
+                except ValueError:
+                    break
+                if offset and numbytes:
+                    structs.append((offset, numbytes))
+                pos += 24
+            isextended = bool(buf[504])
+        self.sparse = structs
+
+        self.offset_data = tarfile.fileobj.tell()
+        tarfile.offset = self.offset_data + self._block(self.size)
+        self.size = origsize
+        return self
+
+    def _proc_pax(self, tarfile):
+        """Process an extended or global header as described in
+           POSIX.1-2008.
+        """
+        # Read the header information.
+        buf = tarfile.fileobj.read(self._block(self.size))
+
+        # A pax header stores supplemental information for either
+        # the following file (extended) or all following files
+        # (global).
+        if self.type == XGLTYPE:
+            pax_headers = tarfile.pax_headers
+        else:
+            pax_headers = tarfile.pax_headers.copy()
+
+        # Check if the pax header contains a hdrcharset field. This tells us
+        # the encoding of the path, linkpath, uname and gname fields. Normally,
+        # these fields are UTF-8 encoded but since POSIX.1-2008 tar
+        # implementations are allowed to store them as raw binary strings if
+        # the translation to UTF-8 fails.
+        match = re.search(br"\d+ hdrcharset=([^\n]+)\n", buf)
+        if match is not None:
+            pax_headers["hdrcharset"] = match.group(1).decode("utf-8")
+
+        # For the time being, we don't care about anything other than "BINARY".
+        # The only other value that is currently allowed by the standard is
+        # "ISO-IR 10646 2000 UTF-8" in other words UTF-8.
+        hdrcharset = pax_headers.get("hdrcharset")
+        if hdrcharset == "BINARY":
+            encoding = tarfile.encoding
+        else:
+            encoding = "utf-8"
+
+        # Parse pax header information. A record looks like that:
+        # "%d %s=%s\n" % (length, keyword, value). length is the size
+        # of the complete record including the length field itself and
+        # the newline. keyword and value are both UTF-8 encoded strings.
+        regex = re.compile(br"(\d+) ([^=]+)=")
+        pos = 0
+        while match := regex.match(buf, pos):
+            length, keyword = match.groups()
+            length = int(length)
+            if length == 0:
+                raise InvalidHeaderError("invalid header")
+            value = buf[match.end(2) + 1:match.start(1) + length - 1]
+
+            # Normally, we could just use "utf-8" as the encoding and "strict"
+            # as the error handler, but we better not take the risk. For
+            # example, GNU tar <= 1.23 is known to store filenames it cannot
+            # translate to UTF-8 as raw strings (unfortunately without a
+            # hdrcharset=BINARY header).
+            # We first try the strict standard encoding, and if that fails we
+            # fall back on the user's encoding and error handler.
+            keyword = self._decode_pax_field(keyword, "utf-8", "utf-8",
+                    tarfile.errors)
+            if keyword in PAX_NAME_FIELDS:
+                value = self._decode_pax_field(value, encoding, tarfile.encoding,
+                        tarfile.errors)
+            else:
+                value = self._decode_pax_field(value, "utf-8", "utf-8",
+                        tarfile.errors)
+
+            pax_headers[keyword] = value
+            pos += length
+
+        # Fetch the next header.
+        try:
+            next = self.fromtarfile(tarfile)
+        except HeaderError as e:
+            raise SubsequentHeaderError(str(e)) from None
+
+        # Process GNU sparse information.
+        if "GNU.sparse.map" in pax_headers:
+            # GNU extended sparse format version 0.1.
+            self._proc_gnusparse_01(next, pax_headers)
+
+        elif "GNU.sparse.size" in pax_headers:
+            # GNU extended sparse format version 0.0.
+            self._proc_gnusparse_00(next, pax_headers, buf)
+
+        elif pax_headers.get("GNU.sparse.major") == "1" and pax_headers.get("GNU.sparse.minor") == "0":
+            # GNU extended sparse format version 1.0.
+            self._proc_gnusparse_10(next, pax_headers, tarfile)
+
+        if self.type in (XHDTYPE, SOLARIS_XHDTYPE):
+            # Patch the TarInfo object with the extended header info.
+            next._apply_pax_info(pax_headers, tarfile.encoding, tarfile.errors)
+            next.offset = self.offset
+
+            if "size" in pax_headers:
+                # If the extended header replaces the size field,
+                # we need to recalculate the offset where the next
+                # header starts.
+                offset = next.offset_data
+                if next.isreg() or next.type not in SUPPORTED_TYPES:
+                    offset += next._block(next.size)
+                tarfile.offset = offset
+
+        return next
+
+    def _proc_gnusparse_00(self, next, pax_headers, buf):
+        """Process a GNU tar extended sparse header, version 0.0.
+        """
+        offsets = []
+        for match in re.finditer(br"\d+ GNU.sparse.offset=(\d+)\n", buf):
+            offsets.append(int(match.group(1)))
+        numbytes = []
+        for match in re.finditer(br"\d+ GNU.sparse.numbytes=(\d+)\n", buf):
+            numbytes.append(int(match.group(1)))
+        next.sparse = list(zip(offsets, numbytes))
+
+    def _proc_gnusparse_01(self, next, pax_headers):
+        """Process a GNU tar extended sparse header, version 0.1.
+        """
+        sparse = [int(x) for x in pax_headers["GNU.sparse.map"].split(",")]
+        next.sparse = list(zip(sparse[::2], sparse[1::2]))
+
+    def _proc_gnusparse_10(self, next, pax_headers, tarfile):
+        """Process a GNU tar extended sparse header, version 1.0.
+        """
+        fields = None
+        sparse = []
+        buf = tarfile.fileobj.read(BLOCKSIZE)
+        fields, buf = buf.split(b"\n", 1)
+        fields = int(fields)
+        while len(sparse) < fields * 2:
+            if b"\n" not in buf:
+                buf += tarfile.fileobj.read(BLOCKSIZE)
+            number, buf = buf.split(b"\n", 1)
+            sparse.append(int(number))
+        next.offset_data = tarfile.fileobj.tell()
+        next.sparse = list(zip(sparse[::2], sparse[1::2]))
+
+    def _apply_pax_info(self, pax_headers, encoding, errors):
+        """Replace fields with supplemental information from a previous
+           pax extended or global header.
+        """
+        for keyword, value in pax_headers.items():
+            if keyword == "GNU.sparse.name":
+                setattr(self, "path", value)
+            elif keyword == "GNU.sparse.size":
+                setattr(self, "size", int(value))
+            elif keyword == "GNU.sparse.realsize":
+                setattr(self, "size", int(value))
+            elif keyword in PAX_FIELDS:
+                if keyword in PAX_NUMBER_FIELDS:
+                    try:
+                        value = PAX_NUMBER_FIELDS[keyword](value)
+                    except ValueError:
+                        value = 0
+                if keyword == "path":
+                    value = value.rstrip("/")
+                setattr(self, keyword, value)
+
+        self.pax_headers = pax_headers.copy()
+
+    def _decode_pax_field(self, value, encoding, fallback_encoding, fallback_errors):
+        """Decode a single field from a pax record.
+        """
+        try:
+            return value.decode(encoding, "strict")
+        except UnicodeDecodeError:
+            return value.decode(fallback_encoding, fallback_errors)
+
+    def _block(self, count):
+        """Round up a byte count by BLOCKSIZE and return it,
+           e.g. _block(834) => 1024.
+        """
+        blocks, remainder = divmod(count, BLOCKSIZE)
+        if remainder:
+            blocks += 1
+        return blocks * BLOCKSIZE
+
+    def isreg(self):
+        'Return True if the Tarinfo object is a regular file.'
+        return self.type in REGULAR_TYPES
+
+    def isfile(self):
+        'Return True if the Tarinfo object is a regular file.'
+        return self.isreg()
+
+    def isdir(self):
+        'Return True if it is a directory.'
+        return self.type == DIRTYPE
+
+    def issym(self):
+        'Return True if it is a symbolic link.'
+        return self.type == SYMTYPE
+
+    def islnk(self):
+        'Return True if it is a hard link.'
+        return self.type == LNKTYPE
+
+    def ischr(self):
+        'Return True if it is a character device.'
+        return self.type == CHRTYPE
+
+    def isblk(self):
+        'Return True if it is a block device.'
+        return self.type == BLKTYPE
+
+    def isfifo(self):
+        'Return True if it is a FIFO.'
+        return self.type == FIFOTYPE
+
+    def issparse(self):
+        return self.sparse is not None
+
+    def isdev(self):
+        'Return True if it is one of character device, block device or FIFO.'
+        return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE)
+# class TarInfo
+
+class TarFile(object):
+    """The TarFile Class provides an interface to tar archives.
+    """
+
+    debug = 0                   # May be set from 0 (no msgs) to 3 (all msgs)
+
+    dereference = False         # If true, add content of linked file to the
+                                # tar file, else the link.
+
+    ignore_zeros = False        # If true, skips empty or invalid blocks and
+                                # continues processing.
+
+    errorlevel = 1              # If 0, fatal errors only appear in debug
+                                # messages (if debug >= 0). If > 0, errors
+                                # are passed to the caller as exceptions.
+
+    format = DEFAULT_FORMAT     # The format to use when creating an archive.
+
+    encoding = ENCODING         # Encoding for 8-bit character strings.
+
+    errors = None               # Error handler for unicode conversion.
+
+    tarinfo = TarInfo           # The default TarInfo class to use.
+
+    fileobject = ExFileObject   # The file-object for extractfile().
+
+    extraction_filter = None    # The default filter for extraction.
+
+    def __init__(self, name=None, mode="r", fileobj=None, format=None,
+            tarinfo=None, dereference=None, ignore_zeros=None, encoding=None,
+            errors="surrogateescape", pax_headers=None, debug=None,
+            errorlevel=None, copybufsize=None):
+        """Open an (uncompressed) tar archive `name'. `mode' is either 'r' to
+           read from an existing archive, 'a' to append data to an existing
+           file or 'w' to create a new file overwriting an existing one. `mode'
+           defaults to 'r'.
+           If `fileobj' is given, it is used for reading or writing data. If it
+           can be determined, `mode' is overridden by `fileobj's mode.
+           `fileobj' is not closed, when TarFile is closed.
+        """
+        modes = {"r": "rb", "a": "r+b", "w": "wb", "x": "xb"}
+        if mode not in modes:
+            raise ValueError("mode must be 'r', 'a', 'w' or 'x'")
+        self.mode = mode
+        self._mode = modes[mode]
+
+        if not fileobj:
+            if self.mode == "a" and not os.path.exists(name):
+                # Create nonexistent files in append mode.
+                self.mode = "w"
+                self._mode = "wb"
+            fileobj = bltn_open(name, self._mode)
+            self._extfileobj = False
+        else:
+            if (name is None and hasattr(fileobj, "name") and
+                isinstance(fileobj.name, (str, bytes))):
+                name = fileobj.name
+            if hasattr(fileobj, "mode"):
+                self._mode = fileobj.mode
+            self._extfileobj = True
+        self.name = os.path.abspath(name) if name else None
+        self.fileobj = fileobj
+
+        # Init attributes.
+        if format is not None:
+            self.format = format
+        if tarinfo is not None:
+            self.tarinfo = tarinfo
+        if dereference is not None:
+            self.dereference = dereference
+        if ignore_zeros is not None:
+            self.ignore_zeros = ignore_zeros
+        if encoding is not None:
+            self.encoding = encoding
+        self.errors = errors
+
+        if pax_headers is not None and self.format == PAX_FORMAT:
+            self.pax_headers = pax_headers
+        else:
+            self.pax_headers = {}
+
+        if debug is not None:
+            self.debug = debug
+        if errorlevel is not None:
+            self.errorlevel = errorlevel
+
+        # Init datastructures.
+        self.copybufsize = copybufsize
+        self.closed = False
+        self.members = []       # list of members as TarInfo objects
+        self._loaded = False    # flag if all members have been read
+        self.offset = self.fileobj.tell()
+                                # current position in the archive file
+        self.inodes = {}        # dictionary caching the inodes of
+                                # archive members already added
+
+        try:
+            if self.mode == "r":
+                self.firstmember = None
+                self.firstmember = self.next()
+
+            if self.mode == "a":
+                # Move to the end of the archive,
+                # before the first empty block.
+                while True:
+                    self.fileobj.seek(self.offset)
+                    try:
+                        tarinfo = self.tarinfo.fromtarfile(self)
+                        self.members.append(tarinfo)
+                    except EOFHeaderError:
+                        self.fileobj.seek(self.offset)
+                        break
+                    except HeaderError as e:
+                        raise ReadError(str(e)) from None
+
+            if self.mode in ("a", "w", "x"):
+                self._loaded = True
+
+                if self.pax_headers:
+                    buf = self.tarinfo.create_pax_global_header(self.pax_headers.copy())
+                    self.fileobj.write(buf)
+                    self.offset += len(buf)
+        except:
+            if not self._extfileobj:
+                self.fileobj.close()
+            self.closed = True
+            raise
+
+    #--------------------------------------------------------------------------
+    # Below are the classmethods which act as alternate constructors to the
+    # TarFile class. The open() method is the only one that is needed for
+    # public use; it is the "super"-constructor and is able to select an
+    # adequate "sub"-constructor for a particular compression using the mapping
+    # from OPEN_METH.
+    #
+    # This concept allows one to subclass TarFile without losing the comfort of
+    # the super-constructor. A sub-constructor is registered and made available
+    # by adding it to the mapping in OPEN_METH.
+
+    @classmethod
+    def open(cls, name=None, mode="r", fileobj=None, bufsize=RECORDSIZE, **kwargs):
+        r"""Open a tar archive for reading, writing or appending. Return
+           an appropriate TarFile class.
+
+           mode:
+           'r' or 'r:\*' open for reading with transparent compression
+           'r:'         open for reading exclusively uncompressed
+           'r:gz'       open for reading with gzip compression
+           'r:bz2'      open for reading with bzip2 compression
+           'r:xz'       open for reading with lzma compression
+           'a' or 'a:'  open for appending, creating the file if necessary
+           'w' or 'w:'  open for writing without compression
+           'w:gz'       open for writing with gzip compression
+           'w:bz2'      open for writing with bzip2 compression
+           'w:xz'       open for writing with lzma compression
+
+           'x' or 'x:'  create a tarfile exclusively without compression, raise
+                        an exception if the file is already created
+           'x:gz'       create a gzip compressed tarfile, raise an exception
+                        if the file is already created
+           'x:bz2'      create a bzip2 compressed tarfile, raise an exception
+                        if the file is already created
+           'x:xz'       create an lzma compressed tarfile, raise an exception
+                        if the file is already created
+
+           'r|\*'        open a stream of tar blocks with transparent compression
+           'r|'         open an uncompressed stream of tar blocks for reading
+           'r|gz'       open a gzip compressed stream of tar blocks
+           'r|bz2'      open a bzip2 compressed stream of tar blocks
+           'r|xz'       open an lzma compressed stream of tar blocks
+           'w|'         open an uncompressed stream for writing
+           'w|gz'       open a gzip compressed stream for writing
+           'w|bz2'      open a bzip2 compressed stream for writing
+           'w|xz'       open an lzma compressed stream for writing
+        """
+
+        if not name and not fileobj:
+            raise ValueError("nothing to open")
+
+        if mode in ("r", "r:*"):
+            # Find out which *open() is appropriate for opening the file.
+            def not_compressed(comptype):
+                return cls.OPEN_METH[comptype] == 'taropen'
+            error_msgs = []
+            for comptype in sorted(cls.OPEN_METH, key=not_compressed):
+                func = getattr(cls, cls.OPEN_METH[comptype])
+                if fileobj is not None:
+                    saved_pos = fileobj.tell()
+                try:
+                    return func(name, "r", fileobj, **kwargs)
+                except (ReadError, CompressionError) as e:
+                    error_msgs.append(f'- method {comptype}: {e!r}')
+                    if fileobj is not None:
+                        fileobj.seek(saved_pos)
+                    continue
+            error_msgs_summary = '\n'.join(error_msgs)
+            raise ReadError(f"file could not be opened successfully:\n{error_msgs_summary}")
+
+        elif ":" in mode:
+            filemode, comptype = mode.split(":", 1)
+            filemode = filemode or "r"
+            comptype = comptype or "tar"
+
+            # Select the *open() function according to
+            # given compression.
+            if comptype in cls.OPEN_METH:
+                func = getattr(cls, cls.OPEN_METH[comptype])
+            else:
+                raise CompressionError("unknown compression type %r" % comptype)
+            return func(name, filemode, fileobj, **kwargs)
+
+        elif "|" in mode:
+            filemode, comptype = mode.split("|", 1)
+            filemode = filemode or "r"
+            comptype = comptype or "tar"
+
+            if filemode not in ("r", "w"):
+                raise ValueError("mode must be 'r' or 'w'")
+
+            compresslevel = kwargs.pop("compresslevel", 9)
+            stream = _Stream(name, filemode, comptype, fileobj, bufsize,
+                             compresslevel)
+            try:
+                t = cls(name, filemode, stream, **kwargs)
+            except:
+                stream.close()
+                raise
+            t._extfileobj = False
+            return t
+
+        elif mode in ("a", "w", "x"):
+            return cls.taropen(name, mode, fileobj, **kwargs)
+
+        raise ValueError("undiscernible mode")
+
+    @classmethod
+    def taropen(cls, name, mode="r", fileobj=None, **kwargs):
+        """Open uncompressed tar archive name for reading or writing.
+        """
+        if mode not in ("r", "a", "w", "x"):
+            raise ValueError("mode must be 'r', 'a', 'w' or 'x'")
+        return cls(name, mode, fileobj, **kwargs)
+
+    @classmethod
+    def gzopen(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
+        """Open gzip compressed tar archive name for reading or writing.
+           Appending is not allowed.
+        """
+        if mode not in ("r", "w", "x"):
+            raise ValueError("mode must be 'r', 'w' or 'x'")
+
+        try:
+            from gzip import GzipFile
+        except ImportError:
+            raise CompressionError("gzip module is not available") from None
+
+        try:
+            fileobj = GzipFile(name, mode + "b", compresslevel, fileobj)
+        except OSError as e:
+            if fileobj is not None and mode == 'r':
+                raise ReadError("not a gzip file") from e
+            raise
+
+        try:
+            t = cls.taropen(name, mode, fileobj, **kwargs)
+        except OSError as e:
+            fileobj.close()
+            if mode == 'r':
+                raise ReadError("not a gzip file") from e
+            raise
+        except:
+            fileobj.close()
+            raise
+        t._extfileobj = False
+        return t
+
+    @classmethod
+    def bz2open(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
+        """Open bzip2 compressed tar archive name for reading or writing.
+           Appending is not allowed.
+        """
+        if mode not in ("r", "w", "x"):
+            raise ValueError("mode must be 'r', 'w' or 'x'")
+
+        try:
+            from bz2 import BZ2File
+        except ImportError:
+            raise CompressionError("bz2 module is not available") from None
+
+        fileobj = BZ2File(fileobj or name, mode, compresslevel=compresslevel)
+
+        try:
+            t = cls.taropen(name, mode, fileobj, **kwargs)
+        except (OSError, EOFError) as e:
+            fileobj.close()
+            if mode == 'r':
+                raise ReadError("not a bzip2 file") from e
+            raise
+        except:
+            fileobj.close()
+            raise
+        t._extfileobj = False
+        return t
+
+    @classmethod
+    def xzopen(cls, name, mode="r", fileobj=None, preset=None, **kwargs):
+        """Open lzma compressed tar archive name for reading or writing.
+           Appending is not allowed.
+        """
+        if mode not in ("r", "w", "x"):
+            raise ValueError("mode must be 'r', 'w' or 'x'")
+
+        try:
+            from lzma import LZMAFile, LZMAError
+        except ImportError:
+            raise CompressionError("lzma module is not available") from None
+
+        fileobj = LZMAFile(fileobj or name, mode, preset=preset)
+
+        try:
+            t = cls.taropen(name, mode, fileobj, **kwargs)
+        except (LZMAError, EOFError) as e:
+            fileobj.close()
+            if mode == 'r':
+                raise ReadError("not an lzma file") from e
+            raise
+        except:
+            fileobj.close()
+            raise
+        t._extfileobj = False
+        return t
+
+    # All *open() methods are registered here.
+    OPEN_METH = {
+        "tar": "taropen",   # uncompressed tar
+        "gz":  "gzopen",    # gzip compressed tar
+        "bz2": "bz2open",   # bzip2 compressed tar
+        "xz":  "xzopen"     # lzma compressed tar
+    }
+
+    #--------------------------------------------------------------------------
+    # The public methods which TarFile provides:
+
+    def close(self):
+        """Close the TarFile. In write-mode, two finishing zero blocks are
+           appended to the archive.
+        """
+        if self.closed:
+            return
+
+        self.closed = True
+        try:
+            if self.mode in ("a", "w", "x"):
+                self.fileobj.write(NUL * (BLOCKSIZE * 2))
+                self.offset += (BLOCKSIZE * 2)
+                # fill up the end with zero-blocks
+                # (like option -b20 for tar does)
+                blocks, remainder = divmod(self.offset, RECORDSIZE)
+                if remainder > 0:
+                    self.fileobj.write(NUL * (RECORDSIZE - remainder))
+        finally:
+            if not self._extfileobj:
+                self.fileobj.close()
+
+    def getmember(self, name):
+        """Return a TarInfo object for member ``name``. If ``name`` can not be
+           found in the archive, KeyError is raised. If a member occurs more
+           than once in the archive, its last occurrence is assumed to be the
+           most up-to-date version.
+        """
+        tarinfo = self._getmember(name.rstrip('/'))
+        if tarinfo is None:
+            raise KeyError("filename %r not found" % name)
+        return tarinfo
+
+    def getmembers(self):
+        """Return the members of the archive as a list of TarInfo objects. The
+           list has the same order as the members in the archive.
+        """
+        self._check()
+        if not self._loaded:    # if we want to obtain a list of
+            self._load()        # all members, we first have to
+                                # scan the whole archive.
+        return self.members
+
+    def getnames(self):
+        """Return the members of the archive as a list of their names. It has
+           the same order as the list returned by getmembers().
+        """
+        return [tarinfo.name for tarinfo in self.getmembers()]
+
+    def gettarinfo(self, name=None, arcname=None, fileobj=None):
+        """Create a TarInfo object from the result of os.stat or equivalent
+           on an existing file. The file is either named by ``name``, or
+           specified as a file object ``fileobj`` with a file descriptor. If
+           given, ``arcname`` specifies an alternative name for the file in the
+           archive, otherwise, the name is taken from the 'name' attribute of
+           'fileobj', or the 'name' argument. The name should be a text
+           string.
+        """
+        self._check("awx")
+
+        # When fileobj is given, replace name by
+        # fileobj's real name.
+        if fileobj is not None:
+            name = fileobj.name
+
+        # Building the name of the member in the archive.
+        # Backward slashes are converted to forward slashes,
+        # Absolute paths are turned to relative paths.
+        if arcname is None:
+            arcname = name
+        drv, arcname = os.path.splitdrive(arcname)
+        arcname = arcname.replace(os.sep, "/")
+        arcname = arcname.lstrip("/")
+
+        # Now, fill the TarInfo object with
+        # information specific for the file.
+        tarinfo = self.tarinfo()
+        tarinfo.tarfile = self  # Not needed
+
+        # Use os.stat or os.lstat, depending on if symlinks shall be resolved.
+        if fileobj is None:
+            if not self.dereference:
+                statres = os.lstat(name)
+            else:
+                statres = os.stat(name)
+        else:
+            statres = os.fstat(fileobj.fileno())
+        linkname = ""
+
+        stmd = statres.st_mode
+        if stat.S_ISREG(stmd):
+            inode = (statres.st_ino, statres.st_dev)
+            if not self.dereference and statres.st_nlink > 1 and \
+                    inode in self.inodes and arcname != self.inodes[inode]:
+                # Is it a hardlink to an already
+                # archived file?
+                type = LNKTYPE
+                linkname = self.inodes[inode]
+            else:
+                # The inode is added only if its valid.
+                # For win32 it is always 0.
+                type = REGTYPE
+                if inode[0]:
+                    self.inodes[inode] = arcname
+        elif stat.S_ISDIR(stmd):
+            type = DIRTYPE
+        elif stat.S_ISFIFO(stmd):
+            type = FIFOTYPE
+        elif stat.S_ISLNK(stmd):
+            type = SYMTYPE
+            linkname = os.readlink(name)
+        elif stat.S_ISCHR(stmd):
+            type = CHRTYPE
+        elif stat.S_ISBLK(stmd):
+            type = BLKTYPE
+        else:
+            return None
+
+        # Fill the TarInfo object with all
+        # information we can get.
+        tarinfo.name = arcname
+        tarinfo.mode = stmd
+        tarinfo.uid = statres.st_uid
+        tarinfo.gid = statres.st_gid
+        if type == REGTYPE:
+            tarinfo.size = statres.st_size
+        else:
+            tarinfo.size = 0
+        tarinfo.mtime = statres.st_mtime
+        tarinfo.type = type
+        tarinfo.linkname = linkname
+        if pwd:
+            try:
+                tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0]
+            except KeyError:
+                pass
+        if grp:
+            try:
+                tarinfo.gname = grp.getgrgid(tarinfo.gid)[0]
+            except KeyError:
+                pass
+
+        if type in (CHRTYPE, BLKTYPE):
+            if hasattr(os, "major") and hasattr(os, "minor"):
+                tarinfo.devmajor = os.major(statres.st_rdev)
+                tarinfo.devminor = os.minor(statres.st_rdev)
+        return tarinfo
+
+    def list(self, verbose=True, *, members=None):
+        """Print a table of contents to sys.stdout. If ``verbose`` is False, only
+           the names of the members are printed. If it is True, an `ls -l'-like
+           output is produced. ``members`` is optional and must be a subset of the
+           list returned by getmembers().
+        """
+        self._check()
+
+        if members is None:
+            members = self
+        for tarinfo in members:
+            if verbose:
+                if tarinfo.mode is None:
+                    _safe_print("??????????")
+                else:
+                    _safe_print(stat.filemode(tarinfo.mode))
+                _safe_print("%s/%s" % (tarinfo.uname or tarinfo.uid,
+                                       tarinfo.gname or tarinfo.gid))
+                if tarinfo.ischr() or tarinfo.isblk():
+                    _safe_print("%10s" %
+                            ("%d,%d" % (tarinfo.devmajor, tarinfo.devminor)))
+                else:
+                    _safe_print("%10d" % tarinfo.size)
+                if tarinfo.mtime is None:
+                    _safe_print("????-??-?? ??:??:??")
+                else:
+                    _safe_print("%d-%02d-%02d %02d:%02d:%02d" \
+                                % time.localtime(tarinfo.mtime)[:6])
+
+            _safe_print(tarinfo.name + ("/" if tarinfo.isdir() else ""))
+
+            if verbose:
+                if tarinfo.issym():
+                    _safe_print("-> " + tarinfo.linkname)
+                if tarinfo.islnk():
+                    _safe_print("link to " + tarinfo.linkname)
+            print()
+
+    def add(self, name, arcname=None, recursive=True, *, filter=None):
+        """Add the file ``name`` to the archive. ``name`` may be any type of file
+           (directory, fifo, symbolic link, etc.). If given, ``arcname``
+           specifies an alternative name for the file in the archive.
+           Directories are added recursively by default. This can be avoided by
+           setting ``recursive`` to False. ``filter`` is a function
+           that expects a TarInfo object argument and returns the changed
+           TarInfo object, if it returns None the TarInfo object will be
+           excluded from the archive.
+        """
+        self._check("awx")
+
+        if arcname is None:
+            arcname = name
+
+        # Skip if somebody tries to archive the archive...
+        if self.name is not None and os.path.abspath(name) == self.name:
+            self._dbg(2, "tarfile: Skipped %r" % name)
+            return
+
+        self._dbg(1, name)
+
+        # Create a TarInfo object from the file.
+        tarinfo = self.gettarinfo(name, arcname)
+
+        if tarinfo is None:
+            self._dbg(1, "tarfile: Unsupported type %r" % name)
+            return
+
+        # Change or exclude the TarInfo object.
+        if filter is not None:
+            tarinfo = filter(tarinfo)
+            if tarinfo is None:
+                self._dbg(2, "tarfile: Excluded %r" % name)
+                return
+
+        # Append the tar header and data to the archive.
+        if tarinfo.isreg():
+            with bltn_open(name, "rb") as f:
+                self.addfile(tarinfo, f)
+
+        elif tarinfo.isdir():
+            self.addfile(tarinfo)
+            if recursive:
+                for f in sorted(os.listdir(name)):
+                    self.add(os.path.join(name, f), os.path.join(arcname, f),
+                            recursive, filter=filter)
+
+        else:
+            self.addfile(tarinfo)
+
+    def addfile(self, tarinfo, fileobj=None):
+        """Add the TarInfo object ``tarinfo`` to the archive. If ``fileobj`` is
+           given, it should be a binary file, and tarinfo.size bytes are read
+           from it and added to the archive. You can create TarInfo objects
+           directly, or by using gettarinfo().
+        """
+        self._check("awx")
+
+        tarinfo = copy.copy(tarinfo)
+
+        buf = tarinfo.tobuf(self.format, self.encoding, self.errors)
+        self.fileobj.write(buf)
+        self.offset += len(buf)
+        bufsize=self.copybufsize
+        # If there's data to follow, append it.
+        if fileobj is not None:
+            copyfileobj(fileobj, self.fileobj, tarinfo.size, bufsize=bufsize)
+            blocks, remainder = divmod(tarinfo.size, BLOCKSIZE)
+            if remainder > 0:
+                self.fileobj.write(NUL * (BLOCKSIZE - remainder))
+                blocks += 1
+            self.offset += blocks * BLOCKSIZE
+
+        self.members.append(tarinfo)
+
+    def _get_filter_function(self, filter):
+        if filter is None:
+            filter = self.extraction_filter
+            if filter is None:
+                warnings.warn(
+                    'Python 3.14 will, by default, filter extracted tar '
+                    + 'archives and reject files or modify their metadata. '
+                    + 'Use the filter argument to control this behavior.',
+                    DeprecationWarning)
+                return fully_trusted_filter
+            if isinstance(filter, str):
+                raise TypeError(
+                    'String names are not supported for '
+                    + 'TarFile.extraction_filter. Use a function such as '
+                    + 'tarfile.data_filter directly.')
+            return filter
+        if callable(filter):
+            return filter
+        try:
+            return _NAMED_FILTERS[filter]
+        except KeyError:
+            raise ValueError(f"filter {filter!r} not found") from None
+
+    def extractall(self, path=".", members=None, *, numeric_owner=False,
+                   filter=None):
+        """Extract all members from the archive to the current working
+           directory and set owner, modification time and permissions on
+           directories afterwards. `path' specifies a different directory
+           to extract to. `members' is optional and must be a subset of the
+           list returned by getmembers(). If `numeric_owner` is True, only
+           the numbers for user/group names are used and not the names.
+
+           The `filter` function will be called on each member just
+           before extraction.
+           It can return a changed TarInfo or None to skip the member.
+           String names of common filters are accepted.
+        """
+        directories = []
+
+        filter_function = self._get_filter_function(filter)
+        if members is None:
+            members = self
+
+        for member in members:
+            tarinfo = self._get_extract_tarinfo(member, filter_function, path)
+            if tarinfo is None:
+                continue
+            if tarinfo.isdir():
+                # For directories, delay setting attributes until later,
+                # since permissions can interfere with extraction and
+                # extracting contents can reset mtime.
+                directories.append(tarinfo)
+            self._extract_one(tarinfo, path, set_attrs=not tarinfo.isdir(),
+                              numeric_owner=numeric_owner)
+
+        # Reverse sort directories.
+        directories.sort(key=lambda a: a.name, reverse=True)
+
+        # Set correct owner, mtime and filemode on directories.
+        for tarinfo in directories:
+            dirpath = os.path.join(path, tarinfo.name)
+            try:
+                self.chown(tarinfo, dirpath, numeric_owner=numeric_owner)
+                self.utime(tarinfo, dirpath)
+                self.chmod(tarinfo, dirpath)
+            except ExtractError as e:
+                self._handle_nonfatal_error(e)
+
+    def extract(self, member, path="", set_attrs=True, *, numeric_owner=False,
+                filter=None):
+        """Extract a member from the archive to the current working directory,
+           using its full name. Its file information is extracted as accurately
+           as possible. `member' may be a filename or a TarInfo object. You can
+           specify a different directory using `path'. File attributes (owner,
+           mtime, mode) are set unless `set_attrs' is False. If `numeric_owner`
+           is True, only the numbers for user/group names are used and not
+           the names.
+
+           The `filter` function will be called before extraction.
+           It can return a changed TarInfo or None to skip the member.
+           String names of common filters are accepted.
+        """
+        filter_function = self._get_filter_function(filter)
+        tarinfo = self._get_extract_tarinfo(member, filter_function, path)
+        if tarinfo is not None:
+            self._extract_one(tarinfo, path, set_attrs, numeric_owner)
+
+    def _get_extract_tarinfo(self, member, filter_function, path):
+        """Get filtered TarInfo (or None) from member, which might be a str"""
+        if isinstance(member, str):
+            tarinfo = self.getmember(member)
+        else:
+            tarinfo = member
+
+        unfiltered = tarinfo
+        try:
+            tarinfo = filter_function(tarinfo, path)
+        except (OSError, FilterError) as e:
+            self._handle_fatal_error(e)
+        except ExtractError as e:
+            self._handle_nonfatal_error(e)
+        if tarinfo is None:
+            self._dbg(2, "tarfile: Excluded %r" % unfiltered.name)
+            return None
+        # Prepare the link target for makelink().
+        if tarinfo.islnk():
+            tarinfo = copy.copy(tarinfo)
+            tarinfo._link_target = os.path.join(path, tarinfo.linkname)
+        return tarinfo
+
+    def _extract_one(self, tarinfo, path, set_attrs, numeric_owner):
+        """Extract from filtered tarinfo to disk"""
+        self._check("r")
+
+        try:
+            self._extract_member(tarinfo, os.path.join(path, tarinfo.name),
+                                 set_attrs=set_attrs,
+                                 numeric_owner=numeric_owner)
+        except OSError as e:
+            self._handle_fatal_error(e)
+        except ExtractError as e:
+            self._handle_nonfatal_error(e)
+
+    def _handle_nonfatal_error(self, e):
+        """Handle non-fatal error (ExtractError) according to errorlevel"""
+        if self.errorlevel > 1:
+            raise
+        else:
+            self._dbg(1, "tarfile: %s" % e)
+
+    def _handle_fatal_error(self, e):
+        """Handle "fatal" error according to self.errorlevel"""
+        if self.errorlevel > 0:
+            raise
+        elif isinstance(e, OSError):
+            if e.filename is None:
+                self._dbg(1, "tarfile: %s" % e.strerror)
+            else:
+                self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename))
+        else:
+            self._dbg(1, "tarfile: %s %s" % (type(e).__name__, e))
+
+    def extractfile(self, member):
+        """Extract a member from the archive as a file object. ``member`` may be
+           a filename or a TarInfo object. If ``member`` is a regular file or
+           a link, an io.BufferedReader object is returned. For all other
+           existing members, None is returned. If ``member`` does not appear
+           in the archive, KeyError is raised.
+        """
+        self._check("r")
+
+        if isinstance(member, str):
+            tarinfo = self.getmember(member)
+        else:
+            tarinfo = member
+
+        if tarinfo.isreg() or tarinfo.type not in SUPPORTED_TYPES:
+            # Members with unknown types are treated as regular files.
+            return self.fileobject(self, tarinfo)
+
+        elif tarinfo.islnk() or tarinfo.issym():
+            if isinstance(self.fileobj, _Stream):
+                # A small but ugly workaround for the case that someone tries
+                # to extract a (sym)link as a file-object from a non-seekable
+                # stream of tar blocks.
+                raise StreamError("cannot extract (sym)link as file object")
+            else:
+                # A (sym)link's file object is its target's file object.
+                return self.extractfile(self._find_link_target(tarinfo))
+        else:
+            # If there's no data associated with the member (directory, chrdev,
+            # blkdev, etc.), return None instead of a file object.
+            return None
+
+    def _extract_member(self, tarinfo, targetpath, set_attrs=True,
+                        numeric_owner=False):
+        """Extract the TarInfo object tarinfo to a physical
+           file called targetpath.
+        """
+        # Fetch the TarInfo object for the given name
+        # and build the destination pathname, replacing
+        # forward slashes to platform specific separators.
+        targetpath = targetpath.rstrip("/")
+        targetpath = targetpath.replace("/", os.sep)
+
+        # Create all upper directories.
+        upperdirs = os.path.dirname(targetpath)
+        if upperdirs and not os.path.exists(upperdirs):
+            # Create directories that are not part of the archive with
+            # default permissions.
+            os.makedirs(upperdirs)
+
+        if tarinfo.islnk() or tarinfo.issym():
+            self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname))
+        else:
+            self._dbg(1, tarinfo.name)
+
+        if tarinfo.isreg():
+            self.makefile(tarinfo, targetpath)
+        elif tarinfo.isdir():
+            self.makedir(tarinfo, targetpath)
+        elif tarinfo.isfifo():
+            self.makefifo(tarinfo, targetpath)
+        elif tarinfo.ischr() or tarinfo.isblk():
+            self.makedev(tarinfo, targetpath)
+        elif tarinfo.islnk() or tarinfo.issym():
+            self.makelink(tarinfo, targetpath)
+        elif tarinfo.type not in SUPPORTED_TYPES:
+            self.makeunknown(tarinfo, targetpath)
+        else:
+            self.makefile(tarinfo, targetpath)
+
+        if set_attrs:
+            self.chown(tarinfo, targetpath, numeric_owner)
+            if not tarinfo.issym():
+                self.chmod(tarinfo, targetpath)
+                self.utime(tarinfo, targetpath)
+
+    #--------------------------------------------------------------------------
+    # Below are the different file methods. They are called via
+    # _extract_member() when extract() is called. They can be replaced in a
+    # subclass to implement other functionality.
+
+    def makedir(self, tarinfo, targetpath):
+        """Make a directory called targetpath.
+        """
+        try:
+            if tarinfo.mode is None:
+                # Use the system's default mode
+                os.mkdir(targetpath)
+            else:
+                # Use a safe mode for the directory, the real mode is set
+                # later in _extract_member().
+                os.mkdir(targetpath, 0o700)
+        except FileExistsError:
+            if not os.path.isdir(targetpath):
+                raise
+
+    def makefile(self, tarinfo, targetpath):
+        """Make a file called targetpath.
+        """
+        source = self.fileobj
+        source.seek(tarinfo.offset_data)
+        bufsize = self.copybufsize
+        with bltn_open(targetpath, "wb") as target:
+            if tarinfo.sparse is not None:
+                for offset, size in tarinfo.sparse:
+                    target.seek(offset)
+                    copyfileobj(source, target, size, ReadError, bufsize)
+                target.seek(tarinfo.size)
+                target.truncate()
+            else:
+                copyfileobj(source, target, tarinfo.size, ReadError, bufsize)
+
+    def makeunknown(self, tarinfo, targetpath):
+        """Make a file from a TarInfo object with an unknown type
+           at targetpath.
+        """
+        self.makefile(tarinfo, targetpath)
+        self._dbg(1, "tarfile: Unknown file type %r, " \
+                     "extracted as regular file." % tarinfo.type)
+
+    def makefifo(self, tarinfo, targetpath):
+        """Make a fifo called targetpath.
+        """
+        if hasattr(os, "mkfifo"):
+            os.mkfifo(targetpath)
+        else:
+            raise ExtractError("fifo not supported by system")
+
+    def makedev(self, tarinfo, targetpath):
+        """Make a character or block device called targetpath.
+        """
+        if not hasattr(os, "mknod") or not hasattr(os, "makedev"):
+            raise ExtractError("special devices not supported by system")
+
+        mode = tarinfo.mode
+        if mode is None:
+            # Use mknod's default
+            mode = 0o600
+        if tarinfo.isblk():
+            mode |= stat.S_IFBLK
+        else:
+            mode |= stat.S_IFCHR
+
+        os.mknod(targetpath, mode,
+                 os.makedev(tarinfo.devmajor, tarinfo.devminor))
+
+    def makelink(self, tarinfo, targetpath):
+        """Make a (symbolic) link called targetpath. If it cannot be created
+          (platform limitation), we try to make a copy of the referenced file
+          instead of a link.
+        """
+        try:
+            # For systems that support symbolic and hard links.
+            if tarinfo.issym():
+                if os.path.lexists(targetpath):
+                    # Avoid FileExistsError on following os.symlink.
+                    os.unlink(targetpath)
+                os.symlink(tarinfo.linkname, targetpath)
+            else:
+                if os.path.exists(tarinfo._link_target):
+                    os.link(tarinfo._link_target, targetpath)
+                else:
+                    self._extract_member(self._find_link_target(tarinfo),
+                                         targetpath)
+        except symlink_exception:
+            try:
+                self._extract_member(self._find_link_target(tarinfo),
+                                     targetpath)
+            except KeyError:
+                raise ExtractError("unable to resolve link inside archive") from None
+
+    def chown(self, tarinfo, targetpath, numeric_owner):
+        """Set owner of targetpath according to tarinfo. If numeric_owner
+           is True, use .gid/.uid instead of .gname/.uname. If numeric_owner
+           is False, fall back to .gid/.uid when the search based on name
+           fails.
+        """
+        if hasattr(os, "geteuid") and os.geteuid() == 0:
+            # We have to be root to do so.
+            g = tarinfo.gid
+            u = tarinfo.uid
+            if not numeric_owner:
+                try:
+                    if grp and tarinfo.gname:
+                        g = grp.getgrnam(tarinfo.gname)[2]
+                except KeyError:
+                    pass
+                try:
+                    if pwd and tarinfo.uname:
+                        u = pwd.getpwnam(tarinfo.uname)[2]
+                except KeyError:
+                    pass
+            if g is None:
+                g = -1
+            if u is None:
+                u = -1
+            try:
+                if tarinfo.issym() and hasattr(os, "lchown"):
+                    os.lchown(targetpath, u, g)
+                else:
+                    os.chown(targetpath, u, g)
+            except OSError as e:
+                raise ExtractError("could not change owner") from e
+
+    def chmod(self, tarinfo, targetpath):
+        """Set file permissions of targetpath according to tarinfo.
+        """
+        if tarinfo.mode is None:
+            return
+        try:
+            os.chmod(targetpath, tarinfo.mode)
+        except OSError as e:
+            raise ExtractError("could not change mode") from e
+
+    def utime(self, tarinfo, targetpath):
+        """Set modification time of targetpath according to tarinfo.
+        """
+        mtime = tarinfo.mtime
+        if mtime is None:
+            return
+        if not hasattr(os, 'utime'):
+            return
+        try:
+            os.utime(targetpath, (mtime, mtime))
+        except OSError as e:
+            raise ExtractError("could not change modification time") from e
+
+    #--------------------------------------------------------------------------
+    def next(self):
+        """Return the next member of the archive as a TarInfo object, when
+           TarFile is opened for reading. Return None if there is no more
+           available.
+        """
+        self._check("ra")
+        if self.firstmember is not None:
+            m = self.firstmember
+            self.firstmember = None
+            return m
+
+        # Advance the file pointer.
+        if self.offset != self.fileobj.tell():
+            if self.offset == 0:
+                return None
+            self.fileobj.seek(self.offset - 1)
+            if not self.fileobj.read(1):
+                raise ReadError("unexpected end of data")
+
+        # Read the next block.
+        tarinfo = None
+        while True:
+            try:
+                tarinfo = self.tarinfo.fromtarfile(self)
+            except EOFHeaderError as e:
+                if self.ignore_zeros:
+                    self._dbg(2, "0x%X: %s" % (self.offset, e))
+                    self.offset += BLOCKSIZE
+                    continue
+            except InvalidHeaderError as e:
+                if self.ignore_zeros:
+                    self._dbg(2, "0x%X: %s" % (self.offset, e))
+                    self.offset += BLOCKSIZE
+                    continue
+                elif self.offset == 0:
+                    raise ReadError(str(e)) from None
+            except EmptyHeaderError:
+                if self.offset == 0:
+                    raise ReadError("empty file") from None
+            except TruncatedHeaderError as e:
+                if self.offset == 0:
+                    raise ReadError(str(e)) from None
+            except SubsequentHeaderError as e:
+                raise ReadError(str(e)) from None
+            except Exception as e:
+                try:
+                    import zlib
+                    if isinstance(e, zlib.error):
+                        raise ReadError(f'zlib error: {e}') from None
+                    else:
+                        raise e
+                except ImportError:
+                    raise e
+            break
+
+        if tarinfo is not None:
+            self.members.append(tarinfo)
+        else:
+            self._loaded = True
+
+        return tarinfo
+
+    #--------------------------------------------------------------------------
+    # Little helper methods:
+
+    def _getmember(self, name, tarinfo=None, normalize=False):
+        """Find an archive member by name from bottom to top.
+           If tarinfo is given, it is used as the starting point.
+        """
+        # Ensure that all members have been loaded.
+        members = self.getmembers()
+
+        # Limit the member search list up to tarinfo.
+        skipping = False
+        if tarinfo is not None:
+            try:
+                index = members.index(tarinfo)
+            except ValueError:
+                # The given starting point might be a (modified) copy.
+                # We'll later skip members until we find an equivalent.
+                skipping = True
+            else:
+                # Happy fast path
+                members = members[:index]
+
+        if normalize:
+            name = os.path.normpath(name)
+
+        for member in reversed(members):
+            if skipping:
+                if tarinfo.offset == member.offset:
+                    skipping = False
+                continue
+            if normalize:
+                member_name = os.path.normpath(member.name)
+            else:
+                member_name = member.name
+
+            if name == member_name:
+                return member
+
+        if skipping:
+            # Starting point was not found
+            raise ValueError(tarinfo)
+
+    def _load(self):
+        """Read through the entire archive file and look for readable
+           members.
+        """
+        while self.next() is not None:
+            pass
+        self._loaded = True
+
+    def _check(self, mode=None):
+        """Check if TarFile is still open, and if the operation's mode
+           corresponds to TarFile's mode.
+        """
+        if self.closed:
+            raise OSError("%s is closed" % self.__class__.__name__)
+        if mode is not None and self.mode not in mode:
+            raise OSError("bad operation for mode %r" % self.mode)
+
+    def _find_link_target(self, tarinfo):
+        """Find the target member of a symlink or hardlink member in the
+           archive.
+        """
+        if tarinfo.issym():
+            # Always search the entire archive.
+            linkname = "/".join(filter(None, (os.path.dirname(tarinfo.name), tarinfo.linkname)))
+            limit = None
+        else:
+            # Search the archive before the link, because a hard link is
+            # just a reference to an already archived file.
+            linkname = tarinfo.linkname
+            limit = tarinfo
+
+        member = self._getmember(linkname, tarinfo=limit, normalize=True)
+        if member is None:
+            raise KeyError("linkname %r not found" % linkname)
+        return member
+
+    def __iter__(self):
+        """Provide an iterator object.
+        """
+        if self._loaded:
+            yield from self.members
+            return
+
+        # Yield items using TarFile's next() method.
+        # When all members have been read, set TarFile as _loaded.
+        index = 0
+        # Fix for SF #1100429: Under rare circumstances it can
+        # happen that getmembers() is called during iteration,
+        # which will have already exhausted the next() method.
+        if self.firstmember is not None:
+            tarinfo = self.next()
+            index += 1
+            yield tarinfo
+
+        while True:
+            if index < len(self.members):
+                tarinfo = self.members[index]
+            elif not self._loaded:
+                tarinfo = self.next()
+                if not tarinfo:
+                    self._loaded = True
+                    return
+            else:
+                return
+            index += 1
+            yield tarinfo
+
+    def _dbg(self, level, msg):
+        """Write debugging output to sys.stderr.
+        """
+        if level <= self.debug:
+            print(msg, file=sys.stderr)
+
+    def __enter__(self):
+        self._check()
+        return self
+
+    def __exit__(self, type, value, traceback):
+        if type is None:
+            self.close()
+        else:
+            # An exception occurred. We must not call close() because
+            # it would try to write end-of-archive blocks and padding.
+            if not self._extfileobj:
+                self.fileobj.close()
+            self.closed = True
+
+#--------------------
+# exported functions
+#--------------------
+
+def is_tarfile(name):
+    """Return True if name points to a tar archive that we
+       are able to handle, else return False.
+
+       'name' should be a string, file, or file-like object.
+    """
+    try:
+        if hasattr(name, "read"):
+            pos = name.tell()
+            t = open(fileobj=name)
+            name.seek(pos)
+        else:
+            t = open(name)
+        t.close()
+        return True
+    except TarError:
+        return False
+
+open = TarFile.open
+
+
+def main():
+    import argparse
+
+    description = 'A simple command-line interface for tarfile module.'
+    parser = argparse.ArgumentParser(description=description)
+    parser.add_argument('-v', '--verbose', action='store_true', default=False,
+                        help='Verbose output')
+    parser.add_argument('--filter', metavar='',
+                        choices=_NAMED_FILTERS,
+                        help='Filter for extraction')
+
+    group = parser.add_mutually_exclusive_group(required=True)
+    group.add_argument('-l', '--list', metavar='',
+                       help='Show listing of a tarfile')
+    group.add_argument('-e', '--extract', nargs='+',
+                       metavar=('', ''),
+                       help='Extract tarfile into target dir')
+    group.add_argument('-c', '--create', nargs='+',
+                       metavar=('', ''),
+                       help='Create tarfile from sources')
+    group.add_argument('-t', '--test', metavar='',
+                       help='Test if a tarfile is valid')
+
+    args = parser.parse_args()
+
+    if args.filter and args.extract is None:
+        parser.exit(1, '--filter is only valid for extraction\n')
+
+    if args.test is not None:
+        src = args.test
+        if is_tarfile(src):
+            with open(src, 'r') as tar:
+                tar.getmembers()
+                print(tar.getmembers(), file=sys.stderr)
+            if args.verbose:
+                print('{!r} is a tar archive.'.format(src))
+        else:
+            parser.exit(1, '{!r} is not a tar archive.\n'.format(src))
+
+    elif args.list is not None:
+        src = args.list
+        if is_tarfile(src):
+            with TarFile.open(src, 'r:*') as tf:
+                tf.list(verbose=args.verbose)
+        else:
+            parser.exit(1, '{!r} is not a tar archive.\n'.format(src))
+
+    elif args.extract is not None:
+        if len(args.extract) == 1:
+            src = args.extract[0]
+            curdir = os.curdir
+        elif len(args.extract) == 2:
+            src, curdir = args.extract
+        else:
+            parser.exit(1, parser.format_help())
+
+        if is_tarfile(src):
+            with TarFile.open(src, 'r:*') as tf:
+                tf.extractall(path=curdir, filter=args.filter)
+            if args.verbose:
+                if curdir == '.':
+                    msg = '{!r} file is extracted.'.format(src)
+                else:
+                    msg = ('{!r} file is extracted '
+                           'into {!r} directory.').format(src, curdir)
+                print(msg)
+        else:
+            parser.exit(1, '{!r} is not a tar archive.\n'.format(src))
+
+    elif args.create is not None:
+        tar_name = args.create.pop(0)
+        _, ext = os.path.splitext(tar_name)
+        compressions = {
+            # gz
+            '.gz': 'gz',
+            '.tgz': 'gz',
+            # xz
+            '.xz': 'xz',
+            '.txz': 'xz',
+            # bz2
+            '.bz2': 'bz2',
+            '.tbz': 'bz2',
+            '.tbz2': 'bz2',
+            '.tb2': 'bz2',
+        }
+        tar_mode = 'w:' + compressions[ext] if ext in compressions else 'w'
+        tar_files = args.create
+
+        with TarFile.open(tar_name, tar_mode) as tf:
+            for file_name in tar_files:
+                tf.add(file_name)
+
+        if args.verbose:
+            print('{!r} file created.'.format(tar_name))
+
+if __name__ == '__main__':
+    main()
diff --git a/pkg_resources/_vendor/importlib_resources-5.10.2.dist-info/RECORD b/pkg_resources/_vendor/importlib_resources-5.10.2.dist-info/RECORD
index 7d19852d4a..ba764991ee 100644
--- a/pkg_resources/_vendor/importlib_resources-5.10.2.dist-info/RECORD
+++ b/pkg_resources/_vendor/importlib_resources-5.10.2.dist-info/RECORD
@@ -6,15 +6,15 @@ importlib_resources-5.10.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQe
 importlib_resources-5.10.2.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
 importlib_resources-5.10.2.dist-info/top_level.txt,sha256=fHIjHU1GZwAjvcydpmUnUrTnbvdiWjG4OEVZK8by0TQ,20
 importlib_resources/__init__.py,sha256=evPm12kLgYqTm-pbzm60bOuumumT8IpBNWFp0uMyrzE,506
-importlib_resources/__pycache__/__init__.cpython-311.pyc,,
-importlib_resources/__pycache__/_adapters.cpython-311.pyc,,
-importlib_resources/__pycache__/_common.cpython-311.pyc,,
-importlib_resources/__pycache__/_compat.cpython-311.pyc,,
-importlib_resources/__pycache__/_itertools.cpython-311.pyc,,
-importlib_resources/__pycache__/_legacy.cpython-311.pyc,,
-importlib_resources/__pycache__/abc.cpython-311.pyc,,
-importlib_resources/__pycache__/readers.cpython-311.pyc,,
-importlib_resources/__pycache__/simple.cpython-311.pyc,,
+importlib_resources/__pycache__/__init__.cpython-312.pyc,,
+importlib_resources/__pycache__/_adapters.cpython-312.pyc,,
+importlib_resources/__pycache__/_common.cpython-312.pyc,,
+importlib_resources/__pycache__/_compat.cpython-312.pyc,,
+importlib_resources/__pycache__/_itertools.cpython-312.pyc,,
+importlib_resources/__pycache__/_legacy.cpython-312.pyc,,
+importlib_resources/__pycache__/abc.cpython-312.pyc,,
+importlib_resources/__pycache__/readers.cpython-312.pyc,,
+importlib_resources/__pycache__/simple.cpython-312.pyc,,
 importlib_resources/_adapters.py,sha256=o51tP2hpVtohP33gSYyAkGNpLfYDBqxxYsadyiRZi1E,4504
 importlib_resources/_common.py,sha256=jSC4xfLdcMNbtbWHtpzbFkNa0W7kvf__nsYn14C_AEU,5457
 importlib_resources/_compat.py,sha256=dSadF6WPt8MwOqSm_NIOQPhw4x0iaMYTWxi-XS93p7M,2923
@@ -25,36 +25,36 @@ importlib_resources/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,
 importlib_resources/readers.py,sha256=PZsi5qacr2Qn3KHw4qw3Gm1MzrBblPHoTdjqjH7EKWw,3581
 importlib_resources/simple.py,sha256=0__2TQBTQoqkajYmNPt1HxERcReAT6boVKJA328pr04,2576
 importlib_resources/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/__pycache__/__init__.cpython-311.pyc,,
-importlib_resources/tests/__pycache__/_compat.cpython-311.pyc,,
-importlib_resources/tests/__pycache__/_path.cpython-311.pyc,,
-importlib_resources/tests/__pycache__/test_compatibilty_files.cpython-311.pyc,,
-importlib_resources/tests/__pycache__/test_contents.cpython-311.pyc,,
-importlib_resources/tests/__pycache__/test_files.cpython-311.pyc,,
-importlib_resources/tests/__pycache__/test_open.cpython-311.pyc,,
-importlib_resources/tests/__pycache__/test_path.cpython-311.pyc,,
-importlib_resources/tests/__pycache__/test_read.cpython-311.pyc,,
-importlib_resources/tests/__pycache__/test_reader.cpython-311.pyc,,
-importlib_resources/tests/__pycache__/test_resource.cpython-311.pyc,,
-importlib_resources/tests/__pycache__/update-zips.cpython-311.pyc,,
-importlib_resources/tests/__pycache__/util.cpython-311.pyc,,
+importlib_resources/tests/__pycache__/__init__.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/_compat.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/_path.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/test_compatibilty_files.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/test_contents.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/test_files.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/test_open.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/test_path.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/test_read.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/test_reader.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/test_resource.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/update-zips.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/util.cpython-312.pyc,,
 importlib_resources/tests/_compat.py,sha256=YTSB0U1R9oADnh6GrQcOCgojxcF_N6H1LklymEWf9SQ,708
 importlib_resources/tests/_path.py,sha256=yZyWsQzJZQ1Z8ARAxWkjAdaVVsjlzyqxO0qjBUofJ8M,1039
 importlib_resources/tests/data01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/data01/__pycache__/__init__.cpython-311.pyc,,
+importlib_resources/tests/data01/__pycache__/__init__.cpython-312.pyc,,
 importlib_resources/tests/data01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4
 importlib_resources/tests/data01/subdirectory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/data01/subdirectory/__pycache__/__init__.cpython-311.pyc,,
+importlib_resources/tests/data01/subdirectory/__pycache__/__init__.cpython-312.pyc,,
 importlib_resources/tests/data01/subdirectory/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4
 importlib_resources/tests/data01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44
 importlib_resources/tests/data01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20
 importlib_resources/tests/data02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/data02/__pycache__/__init__.cpython-311.pyc,,
+importlib_resources/tests/data02/__pycache__/__init__.cpython-312.pyc,,
 importlib_resources/tests/data02/one/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/data02/one/__pycache__/__init__.cpython-311.pyc,,
+importlib_resources/tests/data02/one/__pycache__/__init__.cpython-312.pyc,,
 importlib_resources/tests/data02/one/resource1.txt,sha256=10flKac7c-XXFzJ3t-AB5MJjlBy__dSZvPE_dOm2q6U,13
 importlib_resources/tests/data02/two/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/data02/two/__pycache__/__init__.cpython-311.pyc,,
+importlib_resources/tests/data02/two/__pycache__/__init__.cpython-312.pyc,,
 importlib_resources/tests/data02/two/resource2.txt,sha256=lt2jbN3TMn9QiFKM832X39bU_62UptDdUkoYzkvEbl0,13
 importlib_resources/tests/namespacedata01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4
 importlib_resources/tests/namespacedata01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44
@@ -70,8 +70,8 @@ importlib_resources/tests/test_resource.py,sha256=EMoarxTEHcrq8R41LQDsndIG8Idtm4
 importlib_resources/tests/update-zips.py,sha256=x-SrO5v87iLLUMXyefxDwAd3imAs_slI94sLWvJ6N40,1417
 importlib_resources/tests/util.py,sha256=ARAlxZ47wC-lgR7PGlmgBoi4HnhzcykD5Is2-TAwY0I,4873
 importlib_resources/tests/zipdata01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/zipdata01/__pycache__/__init__.cpython-311.pyc,,
+importlib_resources/tests/zipdata01/__pycache__/__init__.cpython-312.pyc,,
 importlib_resources/tests/zipdata01/ziptestdata.zip,sha256=z5Of4dsv3T0t-46B0MsVhxlhsPGMz28aUhJDWpj3_oY,876
 importlib_resources/tests/zipdata02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/zipdata02/__pycache__/__init__.cpython-311.pyc,,
+importlib_resources/tests/zipdata02/__pycache__/__init__.cpython-312.pyc,,
 importlib_resources/tests/zipdata02/ziptestdata.zip,sha256=ydI-_j-xgQ7tDxqBp9cjOqXBGxUp6ZBbwVJu6Xj-nrY,698
diff --git a/pkg_resources/_vendor/jaraco.context-4.3.0.dist-info/METADATA b/pkg_resources/_vendor/jaraco.context-4.3.0.dist-info/METADATA
deleted file mode 100644
index 281137a035..0000000000
--- a/pkg_resources/_vendor/jaraco.context-4.3.0.dist-info/METADATA
+++ /dev/null
@@ -1,68 +0,0 @@
-Metadata-Version: 2.1
-Name: jaraco.context
-Version: 4.3.0
-Summary: Context managers by jaraco
-Home-page: https://github.com/jaraco/jaraco.context
-Author: Jason R. Coombs
-Author-email: jaraco@jaraco.com
-Classifier: Development Status :: 5 - Production/Stable
-Classifier: Intended Audience :: Developers
-Classifier: License :: OSI Approved :: MIT License
-Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3 :: Only
-Requires-Python: >=3.7
-License-File: LICENSE
-Provides-Extra: docs
-Requires-Dist: sphinx (>=3.5) ; extra == 'docs'
-Requires-Dist: jaraco.packaging (>=9) ; extra == 'docs'
-Requires-Dist: rst.linker (>=1.9) ; extra == 'docs'
-Requires-Dist: furo ; extra == 'docs'
-Requires-Dist: sphinx-lint ; extra == 'docs'
-Requires-Dist: jaraco.tidelift (>=1.4) ; extra == 'docs'
-Provides-Extra: testing
-Requires-Dist: pytest (>=6) ; extra == 'testing'
-Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing'
-Requires-Dist: flake8 (<5) ; extra == 'testing'
-Requires-Dist: pytest-cov ; extra == 'testing'
-Requires-Dist: pytest-enabler (>=1.3) ; extra == 'testing'
-Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing'
-Requires-Dist: pytest-mypy (>=0.9.1) ; (platform_python_implementation != "PyPy") and extra == 'testing'
-Requires-Dist: pytest-flake8 ; (python_version < "3.12") and extra == 'testing'
-
-.. image:: https://img.shields.io/pypi/v/jaraco.context.svg
-   :target: https://pypi.org/project/jaraco.context
-
-.. image:: https://img.shields.io/pypi/pyversions/jaraco.context.svg
-
-.. image:: https://github.com/jaraco/jaraco.context/workflows/tests/badge.svg
-   :target: https://github.com/jaraco/jaraco.context/actions?query=workflow%3A%22tests%22
-   :alt: tests
-
-.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
-   :target: https://github.com/psf/black
-   :alt: Code style: Black
-
-.. image:: https://readthedocs.org/projects/jaracocontext/badge/?version=latest
-   :target: https://jaracocontext.readthedocs.io/en/latest/?badge=latest
-
-.. image:: https://img.shields.io/badge/skeleton-2023-informational
-   :target: https://blog.jaraco.com/skeleton
-
-.. image:: https://tidelift.com/badges/package/pypi/jaraco.context
-   :target: https://tidelift.com/subscription/pkg/pypi-jaraco.context?utm_source=pypi-jaraco.context&utm_medium=readme
-
-For Enterprise
-==============
-
-Available as part of the Tidelift Subscription.
-
-This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use.
-
-`Learn more `_.
-
-Security Contact
-================
-
-To report a security vulnerability, please use the
-`Tidelift security contact `_.
-Tidelift will coordinate the fix and disclosure.
diff --git a/pkg_resources/_vendor/jaraco.context-4.3.0.dist-info/RECORD b/pkg_resources/_vendor/jaraco.context-4.3.0.dist-info/RECORD
deleted file mode 100644
index 03122364a2..0000000000
--- a/pkg_resources/_vendor/jaraco.context-4.3.0.dist-info/RECORD
+++ /dev/null
@@ -1,8 +0,0 @@
-jaraco.context-4.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-jaraco.context-4.3.0.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050
-jaraco.context-4.3.0.dist-info/METADATA,sha256=GqMykAm33E7Tt_t_MHc5O7GJN62Qwp6MEHX9WD-LPow,2958
-jaraco.context-4.3.0.dist-info/RECORD,,
-jaraco.context-4.3.0.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
-jaraco.context-4.3.0.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
-jaraco/__pycache__/context.cpython-311.pyc,,
-jaraco/context.py,sha256=vlyDzb_PvZ9H7R9bbTr_CMRnveW5Dc56eC7eyd_GfoA,7460
diff --git a/pkg_resources/_vendor/jaraco.functools-3.6.0.dist-info/INSTALLER b/pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/INSTALLER
similarity index 100%
rename from pkg_resources/_vendor/jaraco.functools-3.6.0.dist-info/INSTALLER
rename to pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/INSTALLER
diff --git a/pkg_resources/_vendor/jaraco.functools-3.6.0.dist-info/LICENSE b/pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/LICENSE
similarity index 97%
rename from pkg_resources/_vendor/jaraco.functools-3.6.0.dist-info/LICENSE
rename to pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/LICENSE
index 353924be0e..1bb5a44356 100644
--- a/pkg_resources/_vendor/jaraco.functools-3.6.0.dist-info/LICENSE
+++ b/pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/LICENSE
@@ -1,5 +1,3 @@
-Copyright Jason R. Coombs
-
 Permission is hereby granted, free of charge, to any person obtaining a copy
 of this software and associated documentation files (the "Software"), to
 deal in the Software without restriction, including without limitation the
diff --git a/pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/METADATA b/pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/METADATA
new file mode 100644
index 0000000000..a36f7c5e82
--- /dev/null
+++ b/pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/METADATA
@@ -0,0 +1,75 @@
+Metadata-Version: 2.1
+Name: jaraco.context
+Version: 5.3.0
+Summary: Useful decorators and context managers
+Home-page: https://github.com/jaraco/jaraco.context
+Author: Jason R. Coombs
+Author-email: jaraco@jaraco.com
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Requires-Python: >=3.8
+License-File: LICENSE
+Requires-Dist: backports.tarfile ; python_version < "3.12"
+Provides-Extra: docs
+Requires-Dist: sphinx >=3.5 ; extra == 'docs'
+Requires-Dist: jaraco.packaging >=9.3 ; extra == 'docs'
+Requires-Dist: rst.linker >=1.9 ; extra == 'docs'
+Requires-Dist: furo ; extra == 'docs'
+Requires-Dist: sphinx-lint ; extra == 'docs'
+Requires-Dist: jaraco.tidelift >=1.4 ; extra == 'docs'
+Provides-Extra: testing
+Requires-Dist: pytest !=8.1.1,>=6 ; extra == 'testing'
+Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'testing'
+Requires-Dist: pytest-cov ; extra == 'testing'
+Requires-Dist: pytest-mypy ; extra == 'testing'
+Requires-Dist: pytest-enabler >=2.2 ; extra == 'testing'
+Requires-Dist: pytest-ruff >=0.2.1 ; extra == 'testing'
+Requires-Dist: portend ; extra == 'testing'
+
+.. image:: https://img.shields.io/pypi/v/jaraco.context.svg
+   :target: https://pypi.org/project/jaraco.context
+
+.. image:: https://img.shields.io/pypi/pyversions/jaraco.context.svg
+
+.. image:: https://github.com/jaraco/jaraco.context/actions/workflows/main.yml/badge.svg
+   :target: https://github.com/jaraco/jaraco.context/actions?query=workflow%3A%22tests%22
+   :alt: tests
+
+.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
+    :target: https://github.com/astral-sh/ruff
+    :alt: Ruff
+
+.. image:: https://readthedocs.org/projects/jaracocontext/badge/?version=latest
+   :target: https://jaracocontext.readthedocs.io/en/latest/?badge=latest
+
+.. image:: https://img.shields.io/badge/skeleton-2024-informational
+   :target: https://blog.jaraco.com/skeleton
+
+.. image:: https://tidelift.com/badges/package/pypi/jaraco.context
+   :target: https://tidelift.com/subscription/pkg/pypi-jaraco.context?utm_source=pypi-jaraco.context&utm_medium=readme
+
+
+Highlights
+==========
+
+See the docs linked from the badge above for the full details, but here are some features that may be of interest.
+
+- ``ExceptionTrap`` provides a general-purpose wrapper for trapping exceptions and then acting on the outcome. Includes ``passes`` and ``raises`` decorators to replace the result of a wrapped function by a boolean indicating the outcome of the exception trap. See `this keyring commit `_ for an example of it in production.
+- ``suppress`` simply enables ``contextlib.suppress`` as a decorator.
+- ``on_interrupt`` is a decorator used by CLI entry points to affect the handling of a ``KeyboardInterrupt``. Inspired by `Lucretiel/autocommand#18 `_.
+- ``pushd`` is similar to pytest's ``monkeypatch.chdir`` or path's `default context `_, changes the current working directory for the duration of the context.
+- ``tarball`` will download a tarball, extract it, change directory, yield, then clean up after. Convenient when working with web assets.
+- ``null`` is there for those times when one code branch needs a context and the other doesn't; this null context provides symmetry across those branches.
+
+
+For Enterprise
+==============
+
+Available as part of the Tidelift Subscription.
+
+This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use.
+
+`Learn more `_.
diff --git a/pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/RECORD b/pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/RECORD
new file mode 100644
index 0000000000..09d191f214
--- /dev/null
+++ b/pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/RECORD
@@ -0,0 +1,8 @@
+jaraco.context-5.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+jaraco.context-5.3.0.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
+jaraco.context-5.3.0.dist-info/METADATA,sha256=xDtguJej0tN9iEXCUvxEJh2a7xceIRVBEakBLSr__tY,4020
+jaraco.context-5.3.0.dist-info/RECORD,,
+jaraco.context-5.3.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
+jaraco.context-5.3.0.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
+jaraco/__pycache__/context.cpython-312.pyc,,
+jaraco/context.py,sha256=REoLIxDkO5MfEYowt_WoupNCRoxBS5v7YX2PbW8lIcs,9552
diff --git a/setuptools/_vendor/jaraco.context-4.3.0.dist-info/WHEEL b/pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/WHEEL
similarity index 65%
rename from setuptools/_vendor/jaraco.context-4.3.0.dist-info/WHEEL
rename to pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/WHEEL
index 57e3d840d5..bab98d6758 100644
--- a/setuptools/_vendor/jaraco.context-4.3.0.dist-info/WHEEL
+++ b/pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/WHEEL
@@ -1,5 +1,5 @@
 Wheel-Version: 1.0
-Generator: bdist_wheel (0.38.4)
+Generator: bdist_wheel (0.43.0)
 Root-Is-Purelib: true
 Tag: py3-none-any
 
diff --git a/pkg_resources/_vendor/jaraco.context-4.3.0.dist-info/top_level.txt b/pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/top_level.txt
similarity index 100%
rename from pkg_resources/_vendor/jaraco.context-4.3.0.dist-info/top_level.txt
rename to pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/top_level.txt
diff --git a/pkg_resources/_vendor/jaraco.functools-3.6.0.dist-info/RECORD b/pkg_resources/_vendor/jaraco.functools-3.6.0.dist-info/RECORD
deleted file mode 100644
index 70a3521307..0000000000
--- a/pkg_resources/_vendor/jaraco.functools-3.6.0.dist-info/RECORD
+++ /dev/null
@@ -1,8 +0,0 @@
-jaraco.functools-3.6.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-jaraco.functools-3.6.0.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050
-jaraco.functools-3.6.0.dist-info/METADATA,sha256=ImGoa1WEbhsibIb288yWqkDAvqLwlPzayjravRvW_Bs,3136
-jaraco.functools-3.6.0.dist-info/RECORD,,
-jaraco.functools-3.6.0.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
-jaraco.functools-3.6.0.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
-jaraco/__pycache__/functools.cpython-311.pyc,,
-jaraco/functools.py,sha256=GhSJGMVMcb0U4-axXaY_au30hT-ceW-HM1EbV1_9NzI,15035
diff --git a/pkg_resources/_vendor/more_itertools-9.1.0.dist-info/INSTALLER b/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/INSTALLER
similarity index 100%
rename from pkg_resources/_vendor/more_itertools-9.1.0.dist-info/INSTALLER
rename to pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/INSTALLER
diff --git a/setuptools/_vendor/jaraco.functools-3.6.0.dist-info/LICENSE b/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/LICENSE
similarity index 97%
rename from setuptools/_vendor/jaraco.functools-3.6.0.dist-info/LICENSE
rename to pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/LICENSE
index 353924be0e..1bb5a44356 100644
--- a/setuptools/_vendor/jaraco.functools-3.6.0.dist-info/LICENSE
+++ b/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/LICENSE
@@ -1,5 +1,3 @@
-Copyright Jason R. Coombs
-
 Permission is hereby granted, free of charge, to any person obtaining a copy
 of this software and associated documentation files (the "Software"), to
 deal in the Software without restriction, including without limitation the
diff --git a/pkg_resources/_vendor/jaraco.functools-3.6.0.dist-info/METADATA b/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/METADATA
similarity index 69%
rename from pkg_resources/_vendor/jaraco.functools-3.6.0.dist-info/METADATA
rename to pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/METADATA
index 23c6f5ef2b..581b308378 100644
--- a/pkg_resources/_vendor/jaraco.functools-3.6.0.dist-info/METADATA
+++ b/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/METADATA
@@ -1,6 +1,6 @@
 Metadata-Version: 2.1
 Name: jaraco.functools
-Version: 3.6.0
+Version: 4.0.0
 Summary: Functools like those found in stdlib
 Home-page: https://github.com/jaraco/jaraco.functools
 Author: Jason R. Coombs
@@ -10,26 +10,26 @@ Classifier: Intended Audience :: Developers
 Classifier: License :: OSI Approved :: MIT License
 Classifier: Programming Language :: Python :: 3
 Classifier: Programming Language :: Python :: 3 :: Only
-Requires-Python: >=3.7
+Requires-Python: >=3.8
 License-File: LICENSE
 Requires-Dist: more-itertools
 Provides-Extra: docs
-Requires-Dist: sphinx (>=3.5) ; extra == 'docs'
-Requires-Dist: jaraco.packaging (>=9) ; extra == 'docs'
-Requires-Dist: rst.linker (>=1.9) ; extra == 'docs'
+Requires-Dist: sphinx >=3.5 ; extra == 'docs'
+Requires-Dist: sphinx <7.2.5 ; extra == 'docs'
+Requires-Dist: jaraco.packaging >=9.3 ; extra == 'docs'
+Requires-Dist: rst.linker >=1.9 ; extra == 'docs'
 Requires-Dist: furo ; extra == 'docs'
 Requires-Dist: sphinx-lint ; extra == 'docs'
-Requires-Dist: jaraco.tidelift (>=1.4) ; extra == 'docs'
+Requires-Dist: jaraco.tidelift >=1.4 ; extra == 'docs'
 Provides-Extra: testing
-Requires-Dist: pytest (>=6) ; extra == 'testing'
-Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing'
-Requires-Dist: flake8 (<5) ; extra == 'testing'
+Requires-Dist: pytest >=6 ; extra == 'testing'
+Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'testing'
 Requires-Dist: pytest-cov ; extra == 'testing'
-Requires-Dist: pytest-enabler (>=1.3) ; extra == 'testing'
+Requires-Dist: pytest-enabler >=2.2 ; extra == 'testing'
+Requires-Dist: pytest-ruff ; extra == 'testing'
 Requires-Dist: jaraco.classes ; extra == 'testing'
-Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing'
-Requires-Dist: pytest-mypy (>=0.9.1) ; (platform_python_implementation != "PyPy") and extra == 'testing'
-Requires-Dist: pytest-flake8 ; (python_version < "3.12") and extra == 'testing'
+Requires-Dist: pytest-black >=0.3.7 ; (platform_python_implementation != "PyPy") and extra == 'testing'
+Requires-Dist: pytest-mypy >=0.9.1 ; (platform_python_implementation != "PyPy") and extra == 'testing'
 
 .. image:: https://img.shields.io/pypi/v/jaraco.functools.svg
    :target: https://pypi.org/project/jaraco.functools
@@ -40,6 +40,10 @@ Requires-Dist: pytest-flake8 ; (python_version < "3.12") and extra == 'testing'
    :target: https://github.com/jaraco/jaraco.functools/actions?query=workflow%3A%22tests%22
    :alt: tests
 
+.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
+    :target: https://github.com/astral-sh/ruff
+    :alt: Ruff
+
 .. image:: https://img.shields.io/badge/code%20style-black-000000.svg
    :target: https://github.com/psf/black
    :alt: Code style: Black
@@ -63,10 +67,3 @@ Available as part of the Tidelift Subscription.
 This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use.
 
 `Learn more `_.
-
-Security Contact
-================
-
-To report a security vulnerability, please use the
-`Tidelift security contact `_.
-Tidelift will coordinate the fix and disclosure.
diff --git a/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/RECORD b/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/RECORD
new file mode 100644
index 0000000000..783aa7d2b9
--- /dev/null
+++ b/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/RECORD
@@ -0,0 +1,10 @@
+jaraco.functools-4.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+jaraco.functools-4.0.0.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
+jaraco.functools-4.0.0.dist-info/METADATA,sha256=nVOe_vWvaN2iWJ2aBVkhKvmvH-gFksNCXHwCNvcj65I,3078
+jaraco.functools-4.0.0.dist-info/RECORD,,
+jaraco.functools-4.0.0.dist-info/WHEEL,sha256=Xo9-1PvkuimrydujYJAjF7pCkriuXBpUPEjma1nZyJ0,92
+jaraco.functools-4.0.0.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
+jaraco/functools/__init__.py,sha256=hEAJaS2uSZRuF_JY4CxCHIYh79ZpxaPp9OiHyr9EJ1w,16642
+jaraco/functools/__init__.pyi,sha256=N4lLbdhMtrmwiK3UuMGhYsiOLLZx69CUNOdmFPSVh6Q,3982
+jaraco/functools/__pycache__/__init__.cpython-312.pyc,,
+jaraco/functools/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
diff --git a/setuptools/_vendor/jaraco.functools-3.6.0.dist-info/WHEEL b/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/WHEEL
similarity index 65%
rename from setuptools/_vendor/jaraco.functools-3.6.0.dist-info/WHEEL
rename to pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/WHEEL
index 57e3d840d5..ba48cbcf92 100644
--- a/setuptools/_vendor/jaraco.functools-3.6.0.dist-info/WHEEL
+++ b/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/WHEEL
@@ -1,5 +1,5 @@
 Wheel-Version: 1.0
-Generator: bdist_wheel (0.38.4)
+Generator: bdist_wheel (0.41.3)
 Root-Is-Purelib: true
 Tag: py3-none-any
 
diff --git a/pkg_resources/_vendor/jaraco.functools-3.6.0.dist-info/top_level.txt b/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/top_level.txt
similarity index 100%
rename from pkg_resources/_vendor/jaraco.functools-3.6.0.dist-info/top_level.txt
rename to pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/top_level.txt
diff --git a/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/RECORD b/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/RECORD
index dd471b0708..c698101cb4 100644
--- a/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/RECORD
+++ b/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/RECORD
@@ -7,4 +7,4 @@ jaraco.text-3.7.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FG
 jaraco.text-3.7.0.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
 jaraco/text/Lorem ipsum.txt,sha256=N_7c_79zxOufBY9HZ3yzMgOkNv-TkOTTio4BydrSjgs,1335
 jaraco/text/__init__.py,sha256=I56MW2ZFwPrYXIxzqxMBe2A1t-T4uZBgEgAKe9-JoqM,15538
-jaraco/text/__pycache__/__init__.cpython-311.pyc,,
+jaraco/text/__pycache__/__init__.cpython-312.pyc,,
diff --git a/pkg_resources/_vendor/jaraco/context.py b/pkg_resources/_vendor/jaraco/context.py
index b0d1ef37cb..61b27135df 100644
--- a/pkg_resources/_vendor/jaraco/context.py
+++ b/pkg_resources/_vendor/jaraco/context.py
@@ -1,15 +1,26 @@
-import os
-import subprocess
+from __future__ import annotations
+
 import contextlib
 import functools
-import tempfile
-import shutil
 import operator
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+import urllib.request
 import warnings
+from typing import Iterator
+
+
+if sys.version_info < (3, 12):
+    from backports import tarfile
+else:
+    import tarfile
 
 
 @contextlib.contextmanager
-def pushd(dir):
+def pushd(dir: str | os.PathLike) -> Iterator[str | os.PathLike]:
     """
     >>> tmp_path = getfixture('tmp_path')
     >>> with pushd(tmp_path):
@@ -26,33 +37,88 @@ def pushd(dir):
 
 
 @contextlib.contextmanager
-def tarball_context(url, target_dir=None, runner=None, pushd=pushd):
+def tarball(
+    url, target_dir: str | os.PathLike | None = None
+) -> Iterator[str | os.PathLike]:
     """
-    Get a tarball, extract it, change to that directory, yield, then
-    clean up.
-    `runner` is the function to invoke commands.
-    `pushd` is a context manager for changing the directory.
+    Get a tarball, extract it, yield, then clean up.
+
+    >>> import urllib.request
+    >>> url = getfixture('tarfile_served')
+    >>> target = getfixture('tmp_path') / 'out'
+    >>> tb = tarball(url, target_dir=target)
+    >>> import pathlib
+    >>> with tb as extracted:
+    ...     contents = pathlib.Path(extracted, 'contents.txt').read_text(encoding='utf-8')
+    >>> assert not os.path.exists(extracted)
     """
     if target_dir is None:
         target_dir = os.path.basename(url).replace('.tar.gz', '').replace('.tgz', '')
-    if runner is None:
-        runner = functools.partial(subprocess.check_call, shell=True)
-    else:
-        warnings.warn("runner parameter is deprecated", DeprecationWarning)
     # In the tar command, use --strip-components=1 to strip the first path and
     #  then
     #  use -C to cause the files to be extracted to {target_dir}. This ensures
     #  that we always know where the files were extracted.
-    runner('mkdir {target_dir}'.format(**vars()))
+    os.mkdir(target_dir)
     try:
-        getter = 'wget {url} -O -'
-        extract = 'tar x{compression} --strip-components=1 -C {target_dir}'
-        cmd = ' | '.join((getter, extract))
-        runner(cmd.format(compression=infer_compression(url), **vars()))
-        with pushd(target_dir):
-            yield target_dir
+        req = urllib.request.urlopen(url)
+        with tarfile.open(fileobj=req, mode='r|*') as tf:
+            tf.extractall(path=target_dir, filter=strip_first_component)
+        yield target_dir
     finally:
-        runner('rm -Rf {target_dir}'.format(**vars()))
+        shutil.rmtree(target_dir)
+
+
+def strip_first_component(
+    member: tarfile.TarInfo,
+    path,
+) -> tarfile.TarInfo:
+    _, member.name = member.name.split('/', 1)
+    return member
+
+
+def _compose(*cmgrs):
+    """
+    Compose any number of dependent context managers into a single one.
+
+    The last, innermost context manager may take arbitrary arguments, but
+    each successive context manager should accept the result from the
+    previous as a single parameter.
+
+    Like :func:`jaraco.functools.compose`, behavior works from right to
+    left, so the context manager should be indicated from outermost to
+    innermost.
+
+    Example, to create a context manager to change to a temporary
+    directory:
+
+    >>> temp_dir_as_cwd = _compose(pushd, temp_dir)
+    >>> with temp_dir_as_cwd() as dir:
+    ...     assert os.path.samefile(os.getcwd(), dir)
+    """
+
+    def compose_two(inner, outer):
+        def composed(*args, **kwargs):
+            with inner(*args, **kwargs) as saved, outer(saved) as res:
+                yield res
+
+        return contextlib.contextmanager(composed)
+
+    return functools.reduce(compose_two, reversed(cmgrs))
+
+
+tarball_cwd = _compose(pushd, tarball)
+
+
+@contextlib.contextmanager
+def tarball_context(*args, **kwargs):
+    warnings.warn(
+        "tarball_context is deprecated. Use tarball or tarball_cwd instead.",
+        DeprecationWarning,
+        stacklevel=2,
+    )
+    pushd_ctx = kwargs.pop('pushd', pushd)
+    with tarball(*args, **kwargs) as tball, pushd_ctx(tball) as dir:
+        yield dir
 
 
 def infer_compression(url):
@@ -68,6 +134,11 @@ def infer_compression(url):
     >>> infer_compression('file.xz')
     'J'
     """
+    warnings.warn(
+        "infer_compression is deprecated with no replacement",
+        DeprecationWarning,
+        stacklevel=2,
+    )
     # cheat and just assume it's the last two characters
     compression_indicator = url[-2:]
     mapping = dict(gz='z', bz='j', xz='J')
@@ -84,7 +155,7 @@ def temp_dir(remover=shutil.rmtree):
     >>> import pathlib
     >>> with temp_dir() as the_dir:
     ...     assert os.path.isdir(the_dir)
-    ...     _ = pathlib.Path(the_dir).joinpath('somefile').write_text('contents')
+    ...     _ = pathlib.Path(the_dir).joinpath('somefile').write_text('contents', encoding='utf-8')
     >>> assert not os.path.exists(the_dir)
     """
     temp_dir = tempfile.mkdtemp()
@@ -113,15 +184,23 @@ def repo_context(url, branch=None, quiet=True, dest_ctx=temp_dir):
         yield repo_dir
 
 
-@contextlib.contextmanager
 def null():
     """
     A null context suitable to stand in for a meaningful context.
 
     >>> with null() as value:
     ...     assert value is None
+
+    This context is most useful when dealing with two or more code
+    branches but only some need a context. Wrap the others in a null
+    context to provide symmetry across all options.
     """
-    yield
+    warnings.warn(
+        "null is deprecated. Use contextlib.nullcontext",
+        DeprecationWarning,
+        stacklevel=2,
+    )
+    return contextlib.nullcontext()
 
 
 class ExceptionTrap:
@@ -267,13 +346,7 @@ class on_interrupt(contextlib.ContextDecorator):
     ...     on_interrupt('ignore')(do_interrupt)()
     """
 
-    def __init__(
-        self,
-        action='error',
-        # py3.7 compat
-        # /,
-        code=1,
-    ):
+    def __init__(self, action='error', /, code=1):
         self.action = action
         self.code = code
 
diff --git a/pkg_resources/_vendor/jaraco/functools.py b/pkg_resources/_vendor/jaraco/functools/__init__.py
similarity index 79%
rename from pkg_resources/_vendor/jaraco/functools.py
rename to pkg_resources/_vendor/jaraco/functools/__init__.py
index 67aeadc353..f523099c72 100644
--- a/pkg_resources/_vendor/jaraco/functools.py
+++ b/pkg_resources/_vendor/jaraco/functools/__init__.py
@@ -1,18 +1,14 @@
+import collections.abc
 import functools
-import time
 import inspect
-import collections
-import types
 import itertools
+import operator
+import time
+import types
 import warnings
 
 import pkg_resources.extern.more_itertools
 
-from typing import Callable, TypeVar
-
-
-CallableT = TypeVar("CallableT", bound=Callable[..., object])
-
 
 def compose(*funcs):
     """
@@ -38,24 +34,6 @@ def compose_two(f1, f2):
     return functools.reduce(compose_two, funcs)
 
 
-def method_caller(method_name, *args, **kwargs):
-    """
-    Return a function that will call a named method on the
-    target object with optional positional and keyword
-    arguments.
-
-    >>> lower = method_caller('lower')
-    >>> lower('MyString')
-    'mystring'
-    """
-
-    def call_method(target):
-        func = getattr(target, method_name)
-        return func(*args, **kwargs)
-
-    return call_method
-
-
 def once(func):
     """
     Decorate func so it's only ever called the first time.
@@ -98,12 +76,7 @@ def wrapper(*args, **kwargs):
     return wrapper
 
 
-def method_cache(
-    method: CallableT,
-    cache_wrapper: Callable[
-        [CallableT], CallableT
-    ] = functools.lru_cache(),  # type: ignore[assignment]
-) -> CallableT:
+def method_cache(method, cache_wrapper=functools.lru_cache()):
     """
     Wrap lru_cache to support storing the cache data in the object instances.
 
@@ -171,21 +144,17 @@ def method_cache(
     for another implementation and additional justification.
     """
 
-    def wrapper(self: object, *args: object, **kwargs: object) -> object:
+    def wrapper(self, *args, **kwargs):
         # it's the first call, replace the method with a cached, bound method
-        bound_method: CallableT = types.MethodType(  # type: ignore[assignment]
-            method, self
-        )
+        bound_method = types.MethodType(method, self)
         cached_method = cache_wrapper(bound_method)
         setattr(self, method.__name__, cached_method)
         return cached_method(*args, **kwargs)
 
     # Support cache clear even before cache has been created.
-    wrapper.cache_clear = lambda: None  # type: ignore[attr-defined]
+    wrapper.cache_clear = lambda: None
 
-    return (  # type: ignore[return-value]
-        _special_method_cache(method, cache_wrapper) or wrapper
-    )
+    return _special_method_cache(method, cache_wrapper) or wrapper
 
 
 def _special_method_cache(method, cache_wrapper):
@@ -201,12 +170,13 @@ def _special_method_cache(method, cache_wrapper):
     """
     name = method.__name__
     special_names = '__getattr__', '__getitem__'
+
     if name not in special_names:
-        return
+        return None
 
     wrapper_name = '__cached' + name
 
-    def proxy(self, *args, **kwargs):
+    def proxy(self, /, *args, **kwargs):
         if wrapper_name not in vars(self):
             bound = types.MethodType(method, self)
             cache = cache_wrapper(bound)
@@ -243,7 +213,7 @@ def result_invoke(action):
     r"""
     Decorate a function with an action function that is
     invoked on the results returned from the decorated
-    function (for its side-effect), then return the original
+    function (for its side effect), then return the original
     result.
 
     >>> @result_invoke(print)
@@ -267,7 +237,7 @@ def wrapper(*args, **kwargs):
     return wrap
 
 
-def invoke(f, *args, **kwargs):
+def invoke(f, /, *args, **kwargs):
     """
     Call a function for its side effect after initialization.
 
@@ -302,25 +272,15 @@ def invoke(f, *args, **kwargs):
     Use functools.partial to pass parameters to the initial call
 
     >>> @functools.partial(invoke, name='bingo')
-    ... def func(name): print("called with", name)
+    ... def func(name): print('called with', name)
     called with bingo
     """
     f(*args, **kwargs)
     return f
 
 
-def call_aside(*args, **kwargs):
-    """
-    Deprecated name for invoke.
-    """
-    warnings.warn("call_aside is deprecated, use invoke", DeprecationWarning)
-    return invoke(*args, **kwargs)
-
-
 class Throttler:
-    """
-    Rate-limit a function (or other callable)
-    """
+    """Rate-limit a function (or other callable)."""
 
     def __init__(self, func, max_rate=float('Inf')):
         if isinstance(func, Throttler):
@@ -337,20 +297,20 @@ def __call__(self, *args, **kwargs):
         return self.func(*args, **kwargs)
 
     def _wait(self):
-        "ensure at least 1/max_rate seconds from last call"
+        """Ensure at least 1/max_rate seconds from last call."""
         elapsed = time.time() - self.last_called
         must_wait = 1 / self.max_rate - elapsed
         time.sleep(max(0, must_wait))
         self.last_called = time.time()
 
-    def __get__(self, obj, type=None):
+    def __get__(self, obj, owner=None):
         return first_invoke(self._wait, functools.partial(self.func, obj))
 
 
 def first_invoke(func1, func2):
     """
     Return a function that when invoked will invoke func1 without
-    any parameters (for its side-effect) and then invoke func2
+    any parameters (for its side effect) and then invoke func2
     with whatever parameters were passed, returning its result.
     """
 
@@ -361,6 +321,17 @@ def wrapper(*args, **kwargs):
     return wrapper
 
 
+method_caller = first_invoke(
+    lambda: warnings.warn(
+        '`jaraco.functools.method_caller` is deprecated, '
+        'use `operator.methodcaller` instead',
+        DeprecationWarning,
+        stacklevel=3,
+    ),
+    operator.methodcaller,
+)
+
+
 def retry_call(func, cleanup=lambda: None, retries=0, trap=()):
     """
     Given a callable func, trap the indicated exceptions
@@ -369,7 +340,7 @@ def retry_call(func, cleanup=lambda: None, retries=0, trap=()):
     to propagate.
     """
     attempts = itertools.count() if retries == float('inf') else range(retries)
-    for attempt in attempts:
+    for _ in attempts:
         try:
             return func()
         except trap:
@@ -406,7 +377,7 @@ def wrapper(*f_args, **f_kwargs):
 
 def print_yielded(func):
     """
-    Convert a generator into a function that prints all yielded elements
+    Convert a generator into a function that prints all yielded elements.
 
     >>> @print_yielded
     ... def x():
@@ -422,7 +393,7 @@ def print_yielded(func):
 
 def pass_none(func):
     """
-    Wrap func so it's not called if its first param is None
+    Wrap func so it's not called if its first param is None.
 
     >>> print_text = pass_none(print)
     >>> print_text('text')
@@ -431,9 +402,10 @@ def pass_none(func):
     """
 
     @functools.wraps(func)
-    def wrapper(param, *args, **kwargs):
+    def wrapper(param, /, *args, **kwargs):
         if param is not None:
             return func(param, *args, **kwargs)
+        return None
 
     return wrapper
 
@@ -507,7 +479,7 @@ def save_method_args(method):
     args_and_kwargs = collections.namedtuple('args_and_kwargs', 'args kwargs')
 
     @functools.wraps(method)
-    def wrapper(self, *args, **kwargs):
+    def wrapper(self, /, *args, **kwargs):
         attr_name = '_saved_' + method.__name__
         attr = args_and_kwargs(args, kwargs)
         setattr(self, attr_name, attr)
@@ -554,3 +526,108 @@ def wrapper(*args, **kwargs):
         return wrapper
 
     return decorate
+
+
+def identity(x):
+    """
+    Return the argument.
+
+    >>> o = object()
+    >>> identity(o) is o
+    True
+    """
+    return x
+
+
+def bypass_when(check, *, _op=identity):
+    """
+    Decorate a function to return its parameter when ``check``.
+
+    >>> bypassed = []  # False
+
+    >>> @bypass_when(bypassed)
+    ... def double(x):
+    ...     return x * 2
+    >>> double(2)
+    4
+    >>> bypassed[:] = [object()]  # True
+    >>> double(2)
+    2
+    """
+
+    def decorate(func):
+        @functools.wraps(func)
+        def wrapper(param, /):
+            return param if _op(check) else func(param)
+
+        return wrapper
+
+    return decorate
+
+
+def bypass_unless(check):
+    """
+    Decorate a function to return its parameter unless ``check``.
+
+    >>> enabled = [object()]  # True
+
+    >>> @bypass_unless(enabled)
+    ... def double(x):
+    ...     return x * 2
+    >>> double(2)
+    4
+    >>> del enabled[:]  # False
+    >>> double(2)
+    2
+    """
+    return bypass_when(check, _op=operator.not_)
+
+
+@functools.singledispatch
+def _splat_inner(args, func):
+    """Splat args to func."""
+    return func(*args)
+
+
+@_splat_inner.register
+def _(args: collections.abc.Mapping, func):
+    """Splat kargs to func as kwargs."""
+    return func(**args)
+
+
+def splat(func):
+    """
+    Wrap func to expect its parameters to be passed positionally in a tuple.
+
+    Has a similar effect to that of ``itertools.starmap`` over
+    simple ``map``.
+
+    >>> pairs = [(-1, 1), (0, 2)]
+    >>> pkg_resources.extern.more_itertools.consume(itertools.starmap(print, pairs))
+    -1 1
+    0 2
+    >>> pkg_resources.extern.more_itertools.consume(map(splat(print), pairs))
+    -1 1
+    0 2
+
+    The approach generalizes to other iterators that don't have a "star"
+    equivalent, such as a "starfilter".
+
+    >>> list(filter(splat(operator.add), pairs))
+    [(0, 2)]
+
+    Splat also accepts a mapping argument.
+
+    >>> def is_nice(msg, code):
+    ...     return "smile" in msg or code == 0
+    >>> msgs = [
+    ...     dict(msg='smile!', code=20),
+    ...     dict(msg='error :(', code=1),
+    ...     dict(msg='unknown', code=0),
+    ... ]
+    >>> for msg in filter(splat(is_nice), msgs):
+    ...     print(msg)
+    {'msg': 'smile!', 'code': 20}
+    {'msg': 'unknown', 'code': 0}
+    """
+    return functools.wraps(func)(functools.partial(_splat_inner, func=func))
diff --git a/pkg_resources/_vendor/jaraco/functools/__init__.pyi b/pkg_resources/_vendor/jaraco/functools/__init__.pyi
new file mode 100644
index 0000000000..c2b9ab1757
--- /dev/null
+++ b/pkg_resources/_vendor/jaraco/functools/__init__.pyi
@@ -0,0 +1,128 @@
+from collections.abc import Callable, Hashable, Iterator
+from functools import partial
+from operator import methodcaller
+import sys
+from typing import (
+    Any,
+    Generic,
+    Protocol,
+    TypeVar,
+    overload,
+)
+
+if sys.version_info >= (3, 10):
+    from typing import Concatenate, ParamSpec
+else:
+    from typing_extensions import Concatenate, ParamSpec
+
+_P = ParamSpec('_P')
+_R = TypeVar('_R')
+_T = TypeVar('_T')
+_R1 = TypeVar('_R1')
+_R2 = TypeVar('_R2')
+_V = TypeVar('_V')
+_S = TypeVar('_S')
+_R_co = TypeVar('_R_co', covariant=True)
+
+class _OnceCallable(Protocol[_P, _R]):
+    saved_result: _R
+    reset: Callable[[], None]
+    def __call__(self, *args: _P.args, **kwargs: _P.kwargs) -> _R: ...
+
+class _ProxyMethodCacheWrapper(Protocol[_R_co]):
+    cache_clear: Callable[[], None]
+    def __call__(self, *args: Hashable, **kwargs: Hashable) -> _R_co: ...
+
+class _MethodCacheWrapper(Protocol[_R_co]):
+    def cache_clear(self) -> None: ...
+    def __call__(self, *args: Hashable, **kwargs: Hashable) -> _R_co: ...
+
+# `compose()` overloads below will cover most use cases.
+
+@overload
+def compose(
+    __func1: Callable[[_R], _T],
+    __func2: Callable[_P, _R],
+    /,
+) -> Callable[_P, _T]: ...
+@overload
+def compose(
+    __func1: Callable[[_R], _T],
+    __func2: Callable[[_R1], _R],
+    __func3: Callable[_P, _R1],
+    /,
+) -> Callable[_P, _T]: ...
+@overload
+def compose(
+    __func1: Callable[[_R], _T],
+    __func2: Callable[[_R2], _R],
+    __func3: Callable[[_R1], _R2],
+    __func4: Callable[_P, _R1],
+    /,
+) -> Callable[_P, _T]: ...
+def once(func: Callable[_P, _R]) -> _OnceCallable[_P, _R]: ...
+def method_cache(
+    method: Callable[..., _R],
+    cache_wrapper: Callable[[Callable[..., _R]], _MethodCacheWrapper[_R]] = ...,
+) -> _MethodCacheWrapper[_R] | _ProxyMethodCacheWrapper[_R]: ...
+def apply(
+    transform: Callable[[_R], _T]
+) -> Callable[[Callable[_P, _R]], Callable[_P, _T]]: ...
+def result_invoke(
+    action: Callable[[_R], Any]
+) -> Callable[[Callable[_P, _R]], Callable[_P, _R]]: ...
+def invoke(
+    f: Callable[_P, _R], /, *args: _P.args, **kwargs: _P.kwargs
+) -> Callable[_P, _R]: ...
+def call_aside(
+    f: Callable[_P, _R], *args: _P.args, **kwargs: _P.kwargs
+) -> Callable[_P, _R]: ...
+
+class Throttler(Generic[_R]):
+    last_called: float
+    func: Callable[..., _R]
+    max_rate: float
+    def __init__(
+        self, func: Callable[..., _R] | Throttler[_R], max_rate: float = ...
+    ) -> None: ...
+    def reset(self) -> None: ...
+    def __call__(self, *args: Any, **kwargs: Any) -> _R: ...
+    def __get__(self, obj: Any, owner: type[Any] | None = ...) -> Callable[..., _R]: ...
+
+def first_invoke(
+    func1: Callable[..., Any], func2: Callable[_P, _R]
+) -> Callable[_P, _R]: ...
+
+method_caller: Callable[..., methodcaller]
+
+def retry_call(
+    func: Callable[..., _R],
+    cleanup: Callable[..., None] = ...,
+    retries: int | float = ...,
+    trap: type[BaseException] | tuple[type[BaseException], ...] = ...,
+) -> _R: ...
+def retry(
+    cleanup: Callable[..., None] = ...,
+    retries: int | float = ...,
+    trap: type[BaseException] | tuple[type[BaseException], ...] = ...,
+) -> Callable[[Callable[..., _R]], Callable[..., _R]]: ...
+def print_yielded(func: Callable[_P, Iterator[Any]]) -> Callable[_P, None]: ...
+def pass_none(
+    func: Callable[Concatenate[_T, _P], _R]
+) -> Callable[Concatenate[_T, _P], _R]: ...
+def assign_params(
+    func: Callable[..., _R], namespace: dict[str, Any]
+) -> partial[_R]: ...
+def save_method_args(
+    method: Callable[Concatenate[_S, _P], _R]
+) -> Callable[Concatenate[_S, _P], _R]: ...
+def except_(
+    *exceptions: type[BaseException], replace: Any = ..., use: Any = ...
+) -> Callable[[Callable[_P, Any]], Callable[_P, Any]]: ...
+def identity(x: _T) -> _T: ...
+def bypass_when(
+    check: _V, *, _op: Callable[[_V], Any] = ...
+) -> Callable[[Callable[[_T], _R]], Callable[[_T], _T | _R]]: ...
+def bypass_unless(
+    check: Any,
+) -> Callable[[Callable[[_T], _R]], Callable[[_T], _T | _R]]: ...
diff --git a/pkg_resources/_vendor/jaraco/functools/py.typed b/pkg_resources/_vendor/jaraco/functools/py.typed
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/setuptools/_vendor/jaraco.context-4.3.0.dist-info/INSTALLER b/pkg_resources/_vendor/more_itertools-10.2.0.dist-info/INSTALLER
similarity index 100%
rename from setuptools/_vendor/jaraco.context-4.3.0.dist-info/INSTALLER
rename to pkg_resources/_vendor/more_itertools-10.2.0.dist-info/INSTALLER
diff --git a/pkg_resources/_vendor/more_itertools-9.1.0.dist-info/LICENSE b/pkg_resources/_vendor/more_itertools-10.2.0.dist-info/LICENSE
similarity index 100%
rename from pkg_resources/_vendor/more_itertools-9.1.0.dist-info/LICENSE
rename to pkg_resources/_vendor/more_itertools-10.2.0.dist-info/LICENSE
diff --git a/pkg_resources/_vendor/more_itertools-9.1.0.dist-info/METADATA b/pkg_resources/_vendor/more_itertools-10.2.0.dist-info/METADATA
similarity index 90%
rename from pkg_resources/_vendor/more_itertools-9.1.0.dist-info/METADATA
rename to pkg_resources/_vendor/more_itertools-10.2.0.dist-info/METADATA
index bee8776239..f54f1ff279 100644
--- a/pkg_resources/_vendor/more_itertools-9.1.0.dist-info/METADATA
+++ b/pkg_resources/_vendor/more_itertools-10.2.0.dist-info/METADATA
@@ -1,21 +1,21 @@
 Metadata-Version: 2.1
 Name: more-itertools
-Version: 9.1.0
+Version: 10.2.0
 Summary: More routines for operating on iterables, beyond itertools
 Keywords: itertools,iterator,iteration,filter,peek,peekable,chunk,chunked
 Author-email: Erik Rose 
-Requires-Python: >=3.7
+Requires-Python: >=3.8
 Description-Content-Type: text/x-rst
 Classifier: Development Status :: 5 - Production/Stable
 Classifier: Intended Audience :: Developers
 Classifier: Natural Language :: English
 Classifier: License :: OSI Approved :: MIT License
 Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3.7
 Classifier: Programming Language :: Python :: 3.8
 Classifier: Programming Language :: Python :: 3.9
 Classifier: Programming Language :: Python :: 3.10
 Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
 Classifier: Programming Language :: Python :: 3 :: Only
 Classifier: Programming Language :: Python :: Implementation :: CPython
 Classifier: Programming Language :: Python :: Implementation :: PyPy
@@ -51,7 +51,7 @@ Python iterables.
 |                        | `unzip `_,                                                                                   |
 |                        | `batched `_,                                                                               |
 |                        | `grouper `_,                                                                               |
-|                        | `partition `_                                                                            |
+|                        | `partition `_,                                                                           |
 |                        | `transpose `_                                                                            |
 +------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
 | Lookahead and lookback | `spy `_,                                                                                       |
@@ -92,7 +92,8 @@ Python iterables.
 |                        | `flatten `_,                                                                               |
 |                        | `roundrobin `_,                                                                         |
 |                        | `prepend `_,                                                                               |
-|                        | `value_chain `_                                                                        |
+|                        | `value_chain `_,                                                                       |
+|                        | `partial_product `_                                                                |
 +------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
 | Summarizing            | `ilen `_,                                                                                     |
 |                        | `unique_to_each `_,                                                                 |
@@ -120,17 +121,21 @@ Python iterables.
 |                        | `rstrip `_,                                                                                 |
 |                        | `filter_except `_,                                                                   |
 |                        | `map_except `_,                                                                         |
+|                        | `filter_map `_,                                                                         |
+|                        | `iter_suppress `_,                                                                   |
 |                        | `nth_or_last `_,                                                                       |
 |                        | `unique_in_window `_,                                                             |
 |                        | `before_and_after `_,                                                             |
 |                        | `nth `_,                                                                                       |
 |                        | `take `_,                                                                                     |
 |                        | `tail `_,                                                                                     |
-|                        | `unique_everseen `_,                                                              |
+|                        | `unique_everseen `_,                                                               |
 |                        | `unique_justseen `_,                                                               |
 |                        | `duplicates_everseen `_,                                                       |
 |                        | `duplicates_justseen `_,                                                       |
-|                        | `longest_common_prefix `_                                                    |
+|                        | `classify_unique `_,                                                               |
+|                        | `longest_common_prefix `_,                                                   |
+|                        | `takewhile_inclusive `_                                                        |
 +------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
 | Combinatorics          | `distinct_permutations `_,                                                   |
 |                        | `distinct_combinations `_,                                                   |
@@ -140,7 +145,9 @@ Python iterables.
 |                        | `product_index `_,                                                                   |
 |                        | `combination_index `_,                                                           |
 |                        | `permutation_index `_,                                                           |
+|                        | `combination_with_replacement_index `_,                         |
 |                        | `gray_product  `_,                                                                    |
+|                        | `outer_product  `_,                                                                  |
 |                        | `powerset `_,                                                                             |
 |                        | `random_product `_,                                                                 |
 |                        | `random_permutation `_,                                                         |
@@ -148,7 +155,8 @@ Python iterables.
 |                        | `random_combination_with_replacement `_,                       |
 |                        | `nth_product `_,                                                                       |
 |                        | `nth_permutation `_,                                                               |
-|                        | `nth_combination `_                                                                |
+|                        | `nth_combination `_,                                                               |
+|                        | `nth_combination_with_replacement `_                              |
 +------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
 | Wrapping               | `always_iterable `_,                                                               |
 |                        | `always_reversible `_,                                                           |
@@ -173,9 +181,14 @@ Python iterables.
 |                        | `tabulate `_,                                                                             |
 |                        | `repeatfunc `_,                                                                         |
 |                        | `polynomial_from_roots `_,                                                   |
-|                        | `sieve `_                                                                                    |
-|                        | `factor `_                                                                                  |
-|                        | `matmul `_                                                                                  |
+|                        | `polynomial_eval `_,                                                               |
+|                        | `polynomial_derivative `_,                                                   |
+|                        | `sieve `_,                                                                                   |
+|                        | `factor `_,                                                                                 |
+|                        | `matmul `_,                                                                                 |
+|                        | `sum_of_squares `_,                                                                 |
+|                        | `totient `_,                                                                               |
+|                        | `reshape `_                                                                                |
 +------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
 
 
diff --git a/pkg_resources/_vendor/more_itertools-10.2.0.dist-info/RECORD b/pkg_resources/_vendor/more_itertools-10.2.0.dist-info/RECORD
new file mode 100644
index 0000000000..2ce6e4a6f5
--- /dev/null
+++ b/pkg_resources/_vendor/more_itertools-10.2.0.dist-info/RECORD
@@ -0,0 +1,15 @@
+more_itertools-10.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+more_itertools-10.2.0.dist-info/LICENSE,sha256=CfHIyelBrz5YTVlkHqm4fYPAyw_QB-te85Gn4mQ8GkY,1053
+more_itertools-10.2.0.dist-info/METADATA,sha256=lTIPxfD4IiP6aHzPjP4dXmzRRUmiXicAB6qnY82T-Gs,34886
+more_itertools-10.2.0.dist-info/RECORD,,
+more_itertools-10.2.0.dist-info/WHEEL,sha256=rSgq_JpHF9fHR1lx53qwg_1-2LypZE_qmcuXbVUq948,81
+more_itertools/__init__.py,sha256=VodgFyRJvpnHbAMgseYRiP7r928FFOAakmQrl6J88os,149
+more_itertools/__init__.pyi,sha256=5B3eTzON1BBuOLob1vCflyEb2lSd6usXQQ-Cv-hXkeA,43
+more_itertools/__pycache__/__init__.cpython-312.pyc,,
+more_itertools/__pycache__/more.cpython-312.pyc,,
+more_itertools/__pycache__/recipes.cpython-312.pyc,,
+more_itertools/more.py,sha256=jYdpbgXHf8yZDByPrhluxpe0D_IXRk2tfQnyfOFMi74,143045
+more_itertools/more.pyi,sha256=KTHYeqr0rFbn1GWRnv0jY64JRNnKKT0kA3kmsah8DYQ,21044
+more_itertools/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+more_itertools/recipes.py,sha256=Rb3OhzJTCn2biutDEUSImbuY-8NDS1lkHt0My-uCOf4,27548
+more_itertools/recipes.pyi,sha256=T1IuEVXCqw2NeJJNW036MtWi8BVfR8Ilpf7cBmvhBaQ,4436
diff --git a/pkg_resources/_vendor/more_itertools-9.1.0.dist-info/WHEEL b/pkg_resources/_vendor/more_itertools-10.2.0.dist-info/WHEEL
similarity index 100%
rename from pkg_resources/_vendor/more_itertools-9.1.0.dist-info/WHEEL
rename to pkg_resources/_vendor/more_itertools-10.2.0.dist-info/WHEEL
diff --git a/pkg_resources/_vendor/more_itertools-9.1.0.dist-info/RECORD b/pkg_resources/_vendor/more_itertools-9.1.0.dist-info/RECORD
deleted file mode 100644
index c2fd4da0ac..0000000000
--- a/pkg_resources/_vendor/more_itertools-9.1.0.dist-info/RECORD
+++ /dev/null
@@ -1,15 +0,0 @@
-more_itertools-9.1.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-more_itertools-9.1.0.dist-info/LICENSE,sha256=CfHIyelBrz5YTVlkHqm4fYPAyw_QB-te85Gn4mQ8GkY,1053
-more_itertools-9.1.0.dist-info/METADATA,sha256=qP4FQl-r_CTDFj9wwQAf_KrRs4u_HZBIeyc2WCLW69c,32271
-more_itertools-9.1.0.dist-info/RECORD,,
-more_itertools-9.1.0.dist-info/WHEEL,sha256=rSgq_JpHF9fHR1lx53qwg_1-2LypZE_qmcuXbVUq948,81
-more_itertools/__init__.py,sha256=mTzXsWGDHiVW5x8zHzcRu1imUMzrEtJnUhfsN-dBrV4,148
-more_itertools/__init__.pyi,sha256=5B3eTzON1BBuOLob1vCflyEb2lSd6usXQQ-Cv-hXkeA,43
-more_itertools/__pycache__/__init__.cpython-311.pyc,,
-more_itertools/__pycache__/more.cpython-311.pyc,,
-more_itertools/__pycache__/recipes.cpython-311.pyc,,
-more_itertools/more.py,sha256=YlrEMtcLMdcmcwL-T9YIQvMKjrAomEDbvQxQd4i5LnA,134968
-more_itertools/more.pyi,sha256=tZNfrCeIQLfOYhRyp0Wq7no_ryJ5h3FDskNNUBD-zmU,20105
-more_itertools/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-more_itertools/recipes.py,sha256=lgw5bP3UoNfvUPhRaz1VIAfRFkF9pKWN-8UB6H0W5Eo,25416
-more_itertools/recipes.pyi,sha256=Um3BGANEFi4papnQfKBJnlEEuSpXS8-nbxro8OyuOt8,4056
diff --git a/pkg_resources/_vendor/more_itertools/__init__.py b/pkg_resources/_vendor/more_itertools/__init__.py
index 66443971df..aff94a9abd 100644
--- a/pkg_resources/_vendor/more_itertools/__init__.py
+++ b/pkg_resources/_vendor/more_itertools/__init__.py
@@ -3,4 +3,4 @@
 from .more import *  # noqa
 from .recipes import *  # noqa
 
-__version__ = '9.1.0'
+__version__ = '10.2.0'
diff --git a/pkg_resources/_vendor/more_itertools/more.py b/pkg_resources/_vendor/more_itertools/more.py
index e0e2d3de92..d0957681f5 100755
--- a/pkg_resources/_vendor/more_itertools/more.py
+++ b/pkg_resources/_vendor/more_itertools/more.py
@@ -2,7 +2,7 @@
 
 from collections import Counter, defaultdict, deque, abc
 from collections.abc import Sequence
-from functools import partial, reduce, wraps
+from functools import cached_property, partial, reduce, wraps
 from heapq import heapify, heapreplace, heappop
 from itertools import (
     chain,
@@ -17,8 +17,9 @@
     takewhile,
     tee,
     zip_longest,
+    product,
 )
-from math import exp, factorial, floor, log
+from math import exp, factorial, floor, log, perm, comb
 from queue import Empty, Queue
 from random import random, randrange, uniform
 from operator import itemgetter, mul, sub, gt, lt, ge, le
@@ -36,6 +37,7 @@
     take,
     unique_everseen,
     all_equal,
+    batched,
 )
 
 __all__ = [
@@ -53,6 +55,7 @@
     'circular_shifts',
     'collapse',
     'combination_index',
+    'combination_with_replacement_index',
     'consecutive_groups',
     'constrained_batches',
     'consumer',
@@ -65,8 +68,10 @@
     'divide',
     'duplicates_everseen',
     'duplicates_justseen',
+    'classify_unique',
     'exactly_n',
     'filter_except',
+    'filter_map',
     'first',
     'gray_product',
     'groupby_transform',
@@ -80,6 +85,7 @@
     'is_sorted',
     'islice_extended',
     'iterate',
+    'iter_suppress',
     'last',
     'locate',
     'longest_common_prefix',
@@ -93,10 +99,13 @@
     'nth_or_last',
     'nth_permutation',
     'nth_product',
+    'nth_combination_with_replacement',
     'numeric_range',
     'one',
     'only',
+    'outer_product',
     'padded',
+    'partial_product',
     'partitions',
     'peekable',
     'permutation_index',
@@ -125,6 +134,7 @@
     'strictly_n',
     'substrings',
     'substrings_indexes',
+    'takewhile_inclusive',
     'time_limited',
     'unique_in_window',
     'unique_to_each',
@@ -191,15 +201,14 @@ def first(iterable, default=_marker):
     ``next(iter(iterable), default)``.
 
     """
-    try:
-        return next(iter(iterable))
-    except StopIteration as e:
-        if default is _marker:
-            raise ValueError(
-                'first() was called on an empty iterable, and no '
-                'default value was provided.'
-            ) from e
-        return default
+    for item in iterable:
+        return item
+    if default is _marker:
+        raise ValueError(
+            'first() was called on an empty iterable, and no '
+            'default value was provided.'
+        )
+    return default
 
 
 def last(iterable, default=_marker):
@@ -472,7 +481,10 @@ def iterate(func, start):
     """
     while True:
         yield start
-        start = func(start)
+        try:
+            start = func(start)
+        except StopIteration:
+            break
 
 
 def with_iter(context_manager):
@@ -572,6 +584,9 @@ def strictly_n(iterable, n, too_short=None, too_long=None):
         >>> list(strictly_n(iterable, n))
         ['a', 'b', 'c', 'd']
 
+    Note that the returned iterable must be consumed in order for the check to
+    be made.
+
     By default, *too_short* and *too_long* are functions that raise
     ``ValueError``.
 
@@ -909,7 +924,7 @@ def substrings_indexes(seq, reverse=False):
 
 
 class bucket:
-    """Wrap *iterable* and return an object that buckets it iterable into
+    """Wrap *iterable* and return an object that buckets the iterable into
     child iterables based on a *key* function.
 
         >>> iterable = ['a1', 'b1', 'c1', 'a2', 'b2', 'c2', 'b3']
@@ -2069,7 +2084,6 @@ def __init__(self, *args):
         if self._step == self._zero:
             raise ValueError('numeric_range() arg 3 must not be zero')
         self._growing = self._step > self._zero
-        self._init_len()
 
     def __bool__(self):
         if self._growing:
@@ -2145,7 +2159,8 @@ def __iter__(self):
     def __len__(self):
         return self._len
 
-    def _init_len(self):
+    @cached_property
+    def _len(self):
         if self._growing:
             start = self._start
             stop = self._stop
@@ -2156,10 +2171,10 @@ def _init_len(self):
             step = -self._step
         distance = stop - start
         if distance <= self._zero:
-            self._len = 0
+            return 0
         else:  # distance > 0 and step > 0: regular euclidean division
             q, r = divmod(distance, step)
-            self._len = int(q) + int(r != self._zero)
+            return int(q) + int(r != self._zero)
 
     def __reduce__(self):
         return numeric_range, (self._start, self._stop, self._step)
@@ -2699,6 +2714,9 @@ class seekable:
         >>> it.seek(10)
         >>> next(it)
         '10'
+        >>> it.relative_seek(-2)  # Seeking relative to the current position
+        >>> next(it)
+        '9'
         >>> it.seek(20)  # Seeking past the end of the source isn't a problem
         >>> list(it)
         []
@@ -2812,6 +2830,10 @@ def seek(self, index):
         if remainder > 0:
             consume(self, remainder)
 
+    def relative_seek(self, count):
+        index = len(self._cache)
+        self.seek(max(index + count, 0))
+
 
 class run_length:
     """
@@ -3205,6 +3227,8 @@ class time_limited:
     stops if  the time elapsed is greater than *limit_seconds*. If your time
     limit is 1 second, but it takes 2 seconds to generate the first item from
     the iterable, the function will run for 2 seconds and not yield anything.
+    As a special case, when *limit_seconds* is zero, the iterator never
+    returns anything.
 
     """
 
@@ -3220,6 +3244,9 @@ def __iter__(self):
         return self
 
     def __next__(self):
+        if self.limit_seconds == 0:
+            self.timed_out = True
+            raise StopIteration
         item = next(self._iterable)
         if monotonic() - self._start_time > self.limit_seconds:
             self.timed_out = True
@@ -3339,7 +3366,7 @@ def iequals(*iterables):
     >>> iequals("abc", "acb")
     False
 
-    Not to be confused with :func:`all_equals`, which checks whether all
+    Not to be confused with :func:`all_equal`, which checks whether all
     elements of iterable are equal to each other.
 
     """
@@ -3835,7 +3862,7 @@ def nth_permutation(iterable, r, index):
     elif not 0 <= r < n:
         raise ValueError
     else:
-        c = factorial(n) // factorial(n - r)
+        c = perm(n, r)
 
     if index < 0:
         index += c
@@ -3858,6 +3885,52 @@ def nth_permutation(iterable, r, index):
     return tuple(map(pool.pop, result))
 
 
+def nth_combination_with_replacement(iterable, r, index):
+    """Equivalent to
+    ``list(combinations_with_replacement(iterable, r))[index]``.
+
+
+    The subsequences with repetition of *iterable* that are of length *r* can
+    be ordered lexicographically. :func:`nth_combination_with_replacement`
+    computes the subsequence at sort position *index* directly, without
+    computing the previous subsequences with replacement.
+
+        >>> nth_combination_with_replacement(range(5), 3, 5)
+        (0, 1, 1)
+
+    ``ValueError`` will be raised If *r* is negative or greater than the length
+    of *iterable*.
+    ``IndexError`` will be raised if the given *index* is invalid.
+    """
+    pool = tuple(iterable)
+    n = len(pool)
+    if (r < 0) or (r > n):
+        raise ValueError
+
+    c = comb(n + r - 1, r)
+
+    if index < 0:
+        index += c
+
+    if (index < 0) or (index >= c):
+        raise IndexError
+
+    result = []
+    i = 0
+    while r:
+        r -= 1
+        while n >= 0:
+            num_combs = comb(n + r - 1, r)
+            if index < num_combs:
+                break
+            n -= 1
+            i += 1
+            index -= num_combs
+        result.append(pool[i])
+
+    return tuple(result)
+
+
 def value_chain(*args):
     """Yield all arguments passed to the function in the same order in which
     they were passed. If an argument itself is iterable then iterate over its
@@ -3949,9 +4022,66 @@ def combination_index(element, iterable):
     for i, j in enumerate(reversed(indexes), start=1):
         j = n - j
         if i <= j:
-            index += factorial(j) // (factorial(i) * factorial(j - i))
+            index += comb(j, i)
+
+    return comb(n + 1, k + 1) - index
+
+
+def combination_with_replacement_index(element, iterable):
+    """Equivalent to
+    ``list(combinations_with_replacement(iterable, r)).index(element)``
+
+    The subsequences with repetition of *iterable* that are of length *r* can
+    be ordered lexicographically. :func:`combination_with_replacement_index`
+    computes the index of the first *element*, without computing the previous
+    combinations with replacement.
+
+        >>> combination_with_replacement_index('adf', 'abcdefg')
+        20
+
+    ``ValueError`` will be raised if the given *element* isn't one of the
+    combinations with replacement of *iterable*.
+    """
+    element = tuple(element)
+    l = len(element)
+    element = enumerate(element)
+
+    k, y = next(element, (None, None))
+    if k is None:
+        return 0
+
+    indexes = []
+    pool = tuple(iterable)
+    for n, x in enumerate(pool):
+        while x == y:
+            indexes.append(n)
+            tmp, y = next(element, (None, None))
+            if tmp is None:
+                break
+            else:
+                k = tmp
+        if y is None:
+            break
+    else:
+        raise ValueError(
+            'element is not a combination with replacement of iterable'
+        )
+
+    n = len(pool)
+    occupations = [0] * n
+    for p in indexes:
+        occupations[p] += 1
+
+    index = 0
+    cumulative_sum = 0
+    for k in range(1, n):
+        cumulative_sum += occupations[k - 1]
+        j = l + n - 1 - k - cumulative_sum
+        i = n - k
+        if i <= j:
+            index += comb(j, i)
 
-    return factorial(n + 1) // (factorial(k + 1) * factorial(n - k)) - index
+    return index
 
 
 def permutation_index(element, iterable):
@@ -4056,26 +4186,20 @@ def _chunked_even_finite(iterable, N, n):
     num_full = N - partial_size * num_lists
     num_partial = num_lists - num_full
 
-    buffer = []
-    iterator = iter(iterable)
-
     # Yield num_full lists of full_size
-    for x in iterator:
-        buffer.append(x)
-        if len(buffer) == full_size:
-            yield buffer
-            buffer = []
-            num_full -= 1
-            if num_full <= 0:
-                break
+    partial_start_idx = num_full * full_size
+    if full_size > 0:
+        for i in range(0, partial_start_idx, full_size):
+            yield list(islice(iterable, i, i + full_size))
 
     # Yield num_partial lists of partial_size
-    for x in iterator:
-        buffer.append(x)
-        if len(buffer) == partial_size:
-            yield buffer
-            buffer = []
-            num_partial -= 1
+    if partial_size > 0:
+        for i in range(
+            partial_start_idx,
+            partial_start_idx + (num_partial * partial_size),
+            partial_size,
+        ):
+            yield list(islice(iterable, i, i + partial_size))
 
 
 def zip_broadcast(*objects, scalar_types=(str, bytes), strict=False):
@@ -4114,30 +4238,23 @@ def is_scalar(obj):
     if not size:
         return
 
+    new_item = [None] * size
     iterables, iterable_positions = [], []
-    scalars, scalar_positions = [], []
     for i, obj in enumerate(objects):
         if is_scalar(obj):
-            scalars.append(obj)
-            scalar_positions.append(i)
+            new_item[i] = obj
         else:
             iterables.append(iter(obj))
             iterable_positions.append(i)
 
-    if len(scalars) == size:
+    if not iterables:
         yield tuple(objects)
         return
 
     zipper = _zip_equal if strict else zip
     for item in zipper(*iterables):
-        new_item = [None] * size
-
-        for i, elem in zip(iterable_positions, item):
-            new_item[i] = elem
-
-        for i, elem in zip(scalar_positions, scalars):
-            new_item[i] = elem
-
+        for i, new_item[i] in zip(iterable_positions, item):
+            pass
         yield tuple(new_item)
 
 
@@ -4162,22 +4279,23 @@ def unique_in_window(iterable, n, key=None):
         raise ValueError('n must be greater than 0')
 
     window = deque(maxlen=n)
-    uniques = set()
+    counts = defaultdict(int)
     use_key = key is not None
 
     for item in iterable:
-        k = key(item) if use_key else item
-        if k in uniques:
-            continue
-
-        if len(uniques) == n:
-            uniques.discard(window[0])
+        if len(window) == n:
+            to_discard = window[0]
+            if counts[to_discard] == 1:
+                del counts[to_discard]
+            else:
+                counts[to_discard] -= 1
 
-        uniques.add(k)
+        k = key(item) if use_key else item
+        if k not in counts:
+            yield item
+        counts[k] += 1
         window.append(k)
 
-        yield item
-
 
 def duplicates_everseen(iterable, key=None):
     """Yield duplicate elements after their first appearance.
@@ -4187,7 +4305,7 @@ def duplicates_everseen(iterable, key=None):
     >>> list(duplicates_everseen('AaaBbbCccAaa', str.lower))
     ['a', 'a', 'b', 'b', 'c', 'c', 'A', 'a', 'a']
 
-    This function is analagous to :func:`unique_everseen` and is subject to
+    This function is analogous to :func:`unique_everseen` and is subject to
     the same performance considerations.
 
     """
@@ -4217,15 +4335,52 @@ def duplicates_justseen(iterable, key=None):
     >>> list(duplicates_justseen('AaaBbbCccAaa', str.lower))
     ['a', 'a', 'b', 'b', 'c', 'c', 'a', 'a']
 
-    This function is analagous to :func:`unique_justseen`.
+    This function is analogous to :func:`unique_justseen`.
 
     """
-    return flatten(
-        map(
-            lambda group_tuple: islice_extended(group_tuple[1])[1:],
-            groupby(iterable, key),
-        )
-    )
+    return flatten(g for _, g in groupby(iterable, key) for _ in g)
+
+
+def classify_unique(iterable, key=None):
+    """Classify each element in terms of its uniqueness.
+
+    For each element in the input iterable, return a 3-tuple consisting of:
+
+    1. The element itself
+    2. ``False`` if the element is equal to the one preceding it in the input,
+       ``True`` otherwise (i.e. the equivalent of :func:`unique_justseen`)
+    3. ``False`` if this element has been seen anywhere in the input before,
+       ``True`` otherwise (i.e. the equivalent of :func:`unique_everseen`)
+
+    >>> list(classify_unique('otto'))    # doctest: +NORMALIZE_WHITESPACE
+    [('o', True,  True),
+     ('t', True,  True),
+     ('t', False, False),
+     ('o', True,  False)]
+
+    This function is analogous to :func:`unique_everseen` and is subject to
+    the same performance considerations.
+
+    """
+    seen_set = set()
+    seen_list = []
+    use_key = key is not None
+    previous = None
+
+    for i, element in enumerate(iterable):
+        k = key(element) if use_key else element
+        is_unique_justseen = not i or previous != k
+        previous = k
+        is_unique_everseen = False
+        try:
+            if k not in seen_set:
+                seen_set.add(k)
+                is_unique_everseen = True
+        except TypeError:
+            if k not in seen_list:
+                seen_list.append(k)
+                is_unique_everseen = True
+        yield element, is_unique_justseen, is_unique_everseen
 
 
 def minmax(iterable_or_value, *others, key=None, default=_marker):
@@ -4389,3 +4544,112 @@ def gray_product(*iterables):
             o[j] = -o[j]
             f[j] = f[j + 1]
             f[j + 1] = j + 1
+
+
+def partial_product(*iterables):
+    """Yields tuples containing one item from each iterator, with subsequent
+    tuples changing a single item at a time by advancing each iterator until it
+    is exhausted. This sequence guarantees every value in each iterable is
+    output at least once without generating all possible combinations.
+
+    This may be useful, for example, when testing an expensive function.
+
+        >>> list(partial_product('AB', 'C', 'DEF'))
+        [('A', 'C', 'D'), ('B', 'C', 'D'), ('B', 'C', 'E'), ('B', 'C', 'F')]
+    """
+
+    iterators = list(map(iter, iterables))
+
+    try:
+        prod = [next(it) for it in iterators]
+    except StopIteration:
+        return
+    yield tuple(prod)
+
+    for i, it in enumerate(iterators):
+        for prod[i] in it:
+            yield tuple(prod)
+
+
+def takewhile_inclusive(predicate, iterable):
+    """A variant of :func:`takewhile` that yields one additional element.
+
+        >>> list(takewhile_inclusive(lambda x: x < 5, [1, 4, 6, 4, 1]))
+        [1, 4, 6]
+
+    :func:`takewhile` would return ``[1, 4]``.
+    """
+    for x in iterable:
+        yield x
+        if not predicate(x):
+            break
+
+
+def outer_product(func, xs, ys, *args, **kwargs):
+    """A generalized outer product that applies a binary function to all
+    pairs of items. Returns a 2D matrix with ``len(xs)`` rows and ``len(ys)``
+    columns.
+    Also accepts ``*args`` and ``**kwargs`` that are passed to ``func``.
+
+    Multiplication table:
+
+    >>> list(outer_product(mul, range(1, 4), range(1, 6)))
+    [(1, 2, 3, 4, 5), (2, 4, 6, 8, 10), (3, 6, 9, 12, 15)]
+
+    Cross tabulation:
+
+    >>> xs = ['A', 'B', 'A', 'A', 'B', 'B', 'A', 'A', 'B', 'B']
+    >>> ys = ['X', 'X', 'X', 'Y', 'Z', 'Z', 'Y', 'Y', 'Z', 'Z']
+    >>> rows = list(zip(xs, ys))
+    >>> count_rows = lambda x, y: rows.count((x, y))
+    >>> list(outer_product(count_rows, sorted(set(xs)), sorted(set(ys))))
+    [(2, 3, 0), (1, 0, 4)]
+
+    Usage with ``*args`` and ``**kwargs``:
+
+    >>> animals = ['cat', 'wolf', 'mouse']
+    >>> list(outer_product(min, animals, animals, key=len))
+    [('cat', 'cat', 'cat'), ('cat', 'wolf', 'wolf'), ('cat', 'wolf', 'mouse')]
+    """
+    ys = tuple(ys)
+    return batched(
+        starmap(lambda x, y: func(x, y, *args, **kwargs), product(xs, ys)),
+        n=len(ys),
+    )
+
+
+def iter_suppress(iterable, *exceptions):
+    """Yield each of the items from *iterable*. If the iteration raises one of
+    the specified *exceptions*, that exception will be suppressed and iteration
+    will stop.
+
+    >>> from itertools import chain
+    >>> def breaks_at_five(x):
+    ...     while True:
+    ...         if x >= 5:
+    ...             raise RuntimeError
+    ...         yield x
+    ...         x += 1
+    >>> it_1 = iter_suppress(breaks_at_five(1), RuntimeError)
+    >>> it_2 = iter_suppress(breaks_at_five(2), RuntimeError)
+    >>> list(chain(it_1, it_2))
+    [1, 2, 3, 4, 2, 3, 4]
+    """
+    try:
+        yield from iterable
+    except exceptions:
+        return
+
+
+def filter_map(func, iterable):
+    """Apply *func* to every element of *iterable*, yielding only those which
+    are not ``None``.
+
+    >>> elems = ['1', 'a', '2', 'b', '3']
+    >>> list(filter_map(lambda s: int(s) if s.isnumeric() else None, elems))
+    [1, 2, 3]
+    """
+    for x in iterable:
+        y = func(x)
+        if y is not None:
+            yield y
diff --git a/pkg_resources/_vendor/more_itertools/more.pyi b/pkg_resources/_vendor/more_itertools/more.pyi
index 75c5232c1a..9a5fc911a3 100644
--- a/pkg_resources/_vendor/more_itertools/more.pyi
+++ b/pkg_resources/_vendor/more_itertools/more.pyi
@@ -29,7 +29,7 @@ _U = TypeVar('_U')
 _V = TypeVar('_V')
 _W = TypeVar('_W')
 _T_co = TypeVar('_T_co', covariant=True)
-_GenFn = TypeVar('_GenFn', bound=Callable[..., Iterator[object]])
+_GenFn = TypeVar('_GenFn', bound=Callable[..., Iterator[Any]])
 _Raisable = BaseException | Type[BaseException]
 
 @type_check_only
@@ -74,7 +74,7 @@ class peekable(Generic[_T], Iterator[_T]):
     def __getitem__(self, index: slice) -> list[_T]: ...
 
 def consumer(func: _GenFn) -> _GenFn: ...
-def ilen(iterable: Iterable[object]) -> int: ...
+def ilen(iterable: Iterable[_T]) -> int: ...
 def iterate(func: Callable[[_T], _T], start: _T) -> Iterator[_T]: ...
 def with_iter(
     context_manager: ContextManager[Iterable[_T]],
@@ -116,7 +116,7 @@ class bucket(Generic[_T, _U], Container[_U]):
         self,
         iterable: Iterable[_T],
         key: Callable[[_T], _U],
-        validator: Callable[[object], object] | None = ...,
+        validator: Callable[[_U], object] | None = ...,
     ) -> None: ...
     def __contains__(self, value: object) -> bool: ...
     def __iter__(self) -> Iterator[_U]: ...
@@ -383,7 +383,7 @@ def mark_ends(
     iterable: Iterable[_T],
 ) -> Iterable[tuple[bool, bool, _T]]: ...
 def locate(
-    iterable: Iterable[object],
+    iterable: Iterable[_T],
     pred: Callable[..., Any] = ...,
     window_size: int | None = ...,
 ) -> Iterator[int]: ...
@@ -440,6 +440,7 @@ class seekable(Generic[_T], Iterator[_T]):
     def peek(self, default: _U) -> _T | _U: ...
     def elements(self) -> SequenceView[_T]: ...
     def seek(self, index: int) -> None: ...
+    def relative_seek(self, count: int) -> None: ...
 
 class run_length:
     @staticmethod
@@ -578,6 +579,9 @@ def all_unique(
     iterable: Iterable[_T], key: Callable[[_T], _U] | None = ...
 ) -> bool: ...
 def nth_product(index: int, *args: Iterable[_T]) -> tuple[_T, ...]: ...
+def nth_combination_with_replacement(
+    iterable: Iterable[_T], r: int, index: int
+) -> tuple[_T, ...]: ...
 def nth_permutation(
     iterable: Iterable[_T], r: int, index: int
 ) -> tuple[_T, ...]: ...
@@ -586,6 +590,9 @@ def product_index(element: Iterable[_T], *args: Iterable[_T]) -> int: ...
 def combination_index(
     element: Iterable[_T], iterable: Iterable[_T]
 ) -> int: ...
+def combination_with_replacement_index(
+    element: Iterable[_T], iterable: Iterable[_T]
+) -> int: ...
 def permutation_index(
     element: Iterable[_T], iterable: Iterable[_T]
 ) -> int: ...
@@ -611,6 +618,9 @@ def duplicates_everseen(
 def duplicates_justseen(
     iterable: Iterable[_T], key: Callable[[_T], _U] | None = ...
 ) -> Iterator[_T]: ...
+def classify_unique(
+    iterable: Iterable[_T], key: Callable[[_T], _U] | None = ...
+) -> Iterator[tuple[_T, bool, bool]]: ...
 
 class _SupportsLessThan(Protocol):
     def __lt__(self, __other: Any) -> bool: ...
@@ -655,12 +665,31 @@ def minmax(
 def longest_common_prefix(
     iterables: Iterable[Iterable[_T]],
 ) -> Iterator[_T]: ...
-def iequals(*iterables: Iterable[object]) -> bool: ...
+def iequals(*iterables: Iterable[Any]) -> bool: ...
 def constrained_batches(
-    iterable: Iterable[object],
+    iterable: Iterable[_T],
     max_size: int,
     max_count: int | None = ...,
     get_len: Callable[[_T], object] = ...,
     strict: bool = ...,
 ) -> Iterator[tuple[_T]]: ...
 def gray_product(*iterables: Iterable[_T]) -> Iterator[tuple[_T, ...]]: ...
+def partial_product(*iterables: Iterable[_T]) -> Iterator[tuple[_T, ...]]: ...
+def takewhile_inclusive(
+    predicate: Callable[[_T], bool], iterable: Iterable[_T]
+) -> Iterator[_T]: ...
+def outer_product(
+    func: Callable[[_T, _U], _V],
+    xs: Iterable[_T],
+    ys: Iterable[_U],
+    *args: Any,
+    **kwargs: Any,
+) -> Iterator[tuple[_V, ...]]: ...
+def iter_suppress(
+    iterable: Iterable[_T],
+    *exceptions: Type[BaseException],
+) -> Iterator[_T]: ...
+def filter_map(
+    func: Callable[[_T], _V | None],
+    iterable: Iterable[_T],
+) -> Iterator[_V]: ...
diff --git a/pkg_resources/_vendor/more_itertools/recipes.py b/pkg_resources/_vendor/more_itertools/recipes.py
index 3facc2e3a6..145e3cb5bd 100644
--- a/pkg_resources/_vendor/more_itertools/recipes.py
+++ b/pkg_resources/_vendor/more_itertools/recipes.py
@@ -9,11 +9,10 @@
 """
 import math
 import operator
-import warnings
 
 from collections import deque
 from collections.abc import Sized
-from functools import reduce
+from functools import partial, reduce
 from itertools import (
     chain,
     combinations,
@@ -52,10 +51,13 @@
     'pad_none',
     'pairwise',
     'partition',
+    'polynomial_eval',
     'polynomial_from_roots',
+    'polynomial_derivative',
     'powerset',
     'prepend',
     'quantify',
+    'reshape',
     'random_combination_with_replacement',
     'random_combination',
     'random_permutation',
@@ -65,9 +67,11 @@
     'sieve',
     'sliding_window',
     'subslices',
+    'sum_of_squares',
     'tabulate',
     'tail',
     'take',
+    'totient',
     'transpose',
     'triplewise',
     'unique_everseen',
@@ -77,6 +81,18 @@
 _marker = object()
 
 
+# zip with strict is available for Python 3.10+
+try:
+    zip(strict=True)
+except TypeError:
+    _zip_strict = zip
+else:
+    _zip_strict = partial(zip, strict=True)
+
+# math.sumprod is available for Python 3.12+
+_sumprod = getattr(math, 'sumprod', lambda x, y: dotproduct(x, y))
+
+
 def take(n, iterable):
     """Return first *n* items of the iterable as a list.
 
@@ -293,7 +309,7 @@ def _pairwise(iterable):
     """
     a, b = tee(iterable)
     next(b, None)
-    yield from zip(a, b)
+    return zip(a, b)
 
 
 try:
@@ -303,7 +319,7 @@ def _pairwise(iterable):
 else:
 
     def pairwise(iterable):
-        yield from itertools_pairwise(iterable)
+        return itertools_pairwise(iterable)
 
     pairwise.__doc__ = _pairwise.__doc__
 
@@ -334,13 +350,9 @@ def _zip_equal(*iterables):
         for i, it in enumerate(iterables[1:], 1):
             size = len(it)
             if size != first_size:
-                break
-        else:
-            # If we didn't break out, we can use the built-in zip.
-            return zip(*iterables)
-
-        # If we did break out, there was a mismatch.
-        raise UnequalIterablesError(details=(first_size, i, size))
+                raise UnequalIterablesError(details=(first_size, i, size))
+        # All sizes are equal, we can use the built-in zip.
+        return zip(*iterables)
     # If any one of the iterables didn't have a length, start reading
     # them until one runs out.
     except TypeError:
@@ -433,12 +445,9 @@ def partition(pred, iterable):
     if pred is None:
         pred = bool
 
-    evaluations = ((pred(x), x) for x in iterable)
-    t1, t2 = tee(evaluations)
-    return (
-        (x for (cond, x) in t1 if not cond),
-        (x for (cond, x) in t2 if cond),
-    )
+    t1, t2, p = tee(iterable, 3)
+    p1, p2 = tee(map(pred, p))
+    return (compress(t1, map(operator.not_, p1)), compress(t2, p2))
 
 
 def powerset(iterable):
@@ -486,7 +495,7 @@ def unique_everseen(iterable, key=None):
         >>> list(unique_everseen(iterable, key=tuple))  # Faster
         [[1, 2], [2, 3]]
 
-    Similary, you may want to convert unhashable ``set`` objects with
+    Similarly, you may want to convert unhashable ``set`` objects with
     ``key=frozenset``. For ``dict`` objects,
     ``key=lambda x: frozenset(x.items())`` can be used.
 
@@ -518,6 +527,9 @@ def unique_justseen(iterable, key=None):
     ['A', 'B', 'C', 'A', 'D']
 
     """
+    if key is None:
+        return map(operator.itemgetter(0), groupby(iterable))
+
     return map(next, map(operator.itemgetter(1), groupby(iterable, key)))
 
 
@@ -712,12 +724,14 @@ def convolve(signal, kernel):
     is immediately consumed and stored.
 
     """
+    # This implementation intentionally doesn't match the one in the itertools
+    # documentation.
     kernel = tuple(kernel)[::-1]
     n = len(kernel)
     window = deque([0], maxlen=n) * n
     for x in chain(signal, repeat(0, n - 1)):
         window.append(x)
-        yield sum(map(operator.mul, kernel, window))
+        yield _sumprod(kernel, window)
 
 
 def before_and_after(predicate, it):
@@ -778,9 +792,7 @@ def sliding_window(iterable, n):
     For a variant with more features, see :func:`windowed`.
     """
     it = iter(iterable)
-    window = deque(islice(it, n), maxlen=n)
-    if len(window) == n:
-        yield tuple(window)
+    window = deque(islice(it, n - 1), maxlen=n)
     for x in it:
         window.append(x)
         yield tuple(window)
@@ -807,39 +819,38 @@ def polynomial_from_roots(roots):
     >>> polynomial_from_roots(roots)  # x^3 - 4 * x^2 - 17 * x + 60
     [1, -4, -17, 60]
     """
-    # Use math.prod for Python 3.8+,
-    prod = getattr(math, 'prod', lambda x: reduce(operator.mul, x, 1))
-    roots = list(map(operator.neg, roots))
-    return [
-        sum(map(prod, combinations(roots, k))) for k in range(len(roots) + 1)
-    ]
+    factors = zip(repeat(1), map(operator.neg, roots))
+    return list(reduce(convolve, factors, [1]))
 
 
-def iter_index(iterable, value, start=0):
+def iter_index(iterable, value, start=0, stop=None):
     """Yield the index of each place in *iterable* that *value* occurs,
-    beginning with index *start*.
+    beginning with index *start* and ending before index *stop*.
 
     See :func:`locate` for a more general means of finding the indexes
     associated with particular values.
 
     >>> list(iter_index('AABCADEAF', 'A'))
     [0, 1, 4, 7]
+    >>> list(iter_index('AABCADEAF', 'A', 1))  # start index is inclusive
+    [1, 4, 7]
+    >>> list(iter_index('AABCADEAF', 'A', 1, 7))  # stop index is not inclusive
+    [1, 4]
     """
-    try:
-        seq_index = iterable.index
-    except AttributeError:
+    seq_index = getattr(iterable, 'index', None)
+    if seq_index is None:
         # Slow path for general iterables
-        it = islice(iterable, start, None)
+        it = islice(iterable, start, stop)
         for i, element in enumerate(it, start):
             if element is value or element == value:
                 yield i
     else:
         # Fast path for sequences
+        stop = len(iterable) if stop is None else stop
         i = start - 1
         try:
             while True:
-                i = seq_index(value, i + 1)
-                yield i
+                yield (i := seq_index(value, i + 1, stop))
         except ValueError:
             pass
 
@@ -850,81 +861,152 @@ def sieve(n):
     >>> list(sieve(30))
     [2, 3, 5, 7, 11, 13, 17, 19, 23, 29]
     """
-    isqrt = getattr(math, 'isqrt', lambda x: int(math.sqrt(x)))
+    if n > 2:
+        yield 2
+    start = 3
     data = bytearray((0, 1)) * (n // 2)
-    data[:3] = 0, 0, 0
-    limit = isqrt(n) + 1
-    for p in compress(range(limit), data):
+    limit = math.isqrt(n) + 1
+    for p in iter_index(data, 1, start, limit):
+        yield from iter_index(data, 1, start, p * p)
         data[p * p : n : p + p] = bytes(len(range(p * p, n, p + p)))
-    data[2] = 1
-    return iter_index(data, 1) if n > 2 else iter([])
+        start = p * p
+    yield from iter_index(data, 1, start)
 
 
-def batched(iterable, n):
-    """Batch data into lists of length *n*. The last batch may be shorter.
+def _batched(iterable, n, *, strict=False):
+    """Batch data into tuples of length *n*. If the number of items in
+    *iterable* is not divisible by *n*:
+    * The last batch will be shorter if *strict* is ``False``.
+    * :exc:`ValueError` will be raised if *strict* is ``True``.
 
     >>> list(batched('ABCDEFG', 3))
-    [['A', 'B', 'C'], ['D', 'E', 'F'], ['G']]
+    [('A', 'B', 'C'), ('D', 'E', 'F'), ('G',)]
 
-    This recipe is from the ``itertools`` docs. This library also provides
-    :func:`chunked`, which has a different implementation.
+    On Python 3.13 and above, this is an alias for :func:`itertools.batched`.
     """
-    if hexversion >= 0x30C00A0:  # Python 3.12.0a0
-        warnings.warn(
-            (
-                'batched will be removed in a future version of '
-                'more-itertools. Use the standard library '
-                'itertools.batched function instead'
-            ),
-            DeprecationWarning,
-        )
-
+    if n < 1:
+        raise ValueError('n must be at least one')
     it = iter(iterable)
-    while True:
-        batch = list(islice(it, n))
-        if not batch:
-            break
+    while batch := tuple(islice(it, n)):
+        if strict and len(batch) != n:
+            raise ValueError('batched(): incomplete batch')
         yield batch
 
 
+if hexversion >= 0x30D00A2:
+    from itertools import batched as itertools_batched
+
+    def batched(iterable, n, *, strict=False):
+        return itertools_batched(iterable, n, strict=strict)
+
+else:
+    batched = _batched
+
+    batched.__doc__ = _batched.__doc__
+
+
 def transpose(it):
-    """Swap the rows and columns of the input.
+    """Swap the rows and columns of the input matrix.
 
     >>> list(transpose([(1, 2, 3), (11, 22, 33)]))
     [(1, 11), (2, 22), (3, 33)]
 
     The caller should ensure that the dimensions of the input are compatible.
+    If the input is empty, no output will be produced.
+    """
+    return _zip_strict(*it)
+
+
+def reshape(matrix, cols):
+    """Reshape the 2-D input *matrix* to have a column count given by *cols*.
+
+    >>> matrix = [(0, 1), (2, 3), (4, 5)]
+    >>> cols = 3
+    >>> list(reshape(matrix, cols))
+    [(0, 1, 2), (3, 4, 5)]
     """
-    # TODO: when 3.9 goes end-of-life, add stric=True to this.
-    return zip(*it)
+    return batched(chain.from_iterable(matrix), cols)
 
 
 def matmul(m1, m2):
     """Multiply two matrices.
+
     >>> list(matmul([(7, 5), (3, 5)], [(2, 5), (7, 9)]))
-    [[49, 80], [41, 60]]
+    [(49, 80), (41, 60)]
 
     The caller should ensure that the dimensions of the input matrices are
     compatible with each other.
     """
     n = len(m2[0])
-    return batched(starmap(dotproduct, product(m1, transpose(m2))), n)
+    return batched(starmap(_sumprod, product(m1, transpose(m2))), n)
 
 
 def factor(n):
     """Yield the prime factors of n.
+
     >>> list(factor(360))
     [2, 2, 2, 3, 3, 5]
     """
-    isqrt = getattr(math, 'isqrt', lambda x: int(math.sqrt(x)))
-    for prime in sieve(isqrt(n) + 1):
-        while True:
-            quotient, remainder = divmod(n, prime)
-            if remainder:
-                break
+    for prime in sieve(math.isqrt(n) + 1):
+        while not n % prime:
             yield prime
-            n = quotient
+            n //= prime
             if n == 1:
                 return
-    if n >= 2:
+    if n > 1:
         yield n
+
+
+def polynomial_eval(coefficients, x):
+    """Evaluate a polynomial at a specific value.
+
+    Example: evaluating x^3 - 4 * x^2 - 17 * x + 60 at x = 2.5:
+
+    >>> coefficients = [1, -4, -17, 60]
+    >>> x = 2.5
+    >>> polynomial_eval(coefficients, x)
+    8.125
+    """
+    n = len(coefficients)
+    if n == 0:
+        return x * 0  # coerce zero to the type of x
+    powers = map(pow, repeat(x), reversed(range(n)))
+    return _sumprod(coefficients, powers)
+
+
+def sum_of_squares(it):
+    """Return the sum of the squares of the input values.
+
+    >>> sum_of_squares([10, 20, 30])
+    1400
+    """
+    return _sumprod(*tee(it))
+
+
+def polynomial_derivative(coefficients):
+    """Compute the first derivative of a polynomial.
+
+    Example: evaluating the derivative of x^3 - 4 * x^2 - 17 * x + 60
+
+    >>> coefficients = [1, -4, -17, 60]
+    >>> derivative_coefficients = polynomial_derivative(coefficients)
+    >>> derivative_coefficients
+    [3, -8, -17]
+    """
+    n = len(coefficients)
+    powers = reversed(range(1, n))
+    return list(map(operator.mul, coefficients, powers))
+
+
+def totient(n):
+    """Return the count of natural numbers up to *n* that are coprime with *n*.
+
+    >>> totient(9)
+    6
+    >>> totient(12)
+    4
+    """
+    for p in unique_justseen(factor(n)):
+        n = n // p * (p - 1)
+
+    return n
diff --git a/pkg_resources/_vendor/more_itertools/recipes.pyi b/pkg_resources/_vendor/more_itertools/recipes.pyi
index 0267ed569e..ed4c19db49 100644
--- a/pkg_resources/_vendor/more_itertools/recipes.pyi
+++ b/pkg_resources/_vendor/more_itertools/recipes.pyi
@@ -14,6 +14,8 @@ from typing import (
 
 # Type and type variable definitions
 _T = TypeVar('_T')
+_T1 = TypeVar('_T1')
+_T2 = TypeVar('_T2')
 _U = TypeVar('_U')
 
 def take(n: int, iterable: Iterable[_T]) -> list[_T]: ...
@@ -21,19 +23,19 @@ def tabulate(
     function: Callable[[int], _T], start: int = ...
 ) -> Iterator[_T]: ...
 def tail(n: int, iterable: Iterable[_T]) -> Iterator[_T]: ...
-def consume(iterator: Iterable[object], n: int | None = ...) -> None: ...
+def consume(iterator: Iterable[_T], n: int | None = ...) -> None: ...
 @overload
 def nth(iterable: Iterable[_T], n: int) -> _T | None: ...
 @overload
 def nth(iterable: Iterable[_T], n: int, default: _U) -> _T | _U: ...
-def all_equal(iterable: Iterable[object]) -> bool: ...
+def all_equal(iterable: Iterable[_T]) -> bool: ...
 def quantify(
     iterable: Iterable[_T], pred: Callable[[_T], bool] = ...
 ) -> int: ...
 def pad_none(iterable: Iterable[_T]) -> Iterator[_T | None]: ...
 def padnone(iterable: Iterable[_T]) -> Iterator[_T | None]: ...
 def ncycles(iterable: Iterable[_T], n: int) -> Iterator[_T]: ...
-def dotproduct(vec1: Iterable[object], vec2: Iterable[object]) -> object: ...
+def dotproduct(vec1: Iterable[_T1], vec2: Iterable[_T2]) -> Any: ...
 def flatten(listOfLists: Iterable[Iterable[_T]]) -> Iterator[_T]: ...
 def repeatfunc(
     func: Callable[..., _U], times: int | None = ..., *args: Any
@@ -101,19 +103,26 @@ def sliding_window(
     iterable: Iterable[_T], n: int
 ) -> Iterator[tuple[_T, ...]]: ...
 def subslices(iterable: Iterable[_T]) -> Iterator[list[_T]]: ...
-def polynomial_from_roots(roots: Sequence[int]) -> list[int]: ...
+def polynomial_from_roots(roots: Sequence[_T]) -> list[_T]: ...
 def iter_index(
-    iterable: Iterable[object],
+    iterable: Iterable[_T],
     value: Any,
     start: int | None = ...,
+    stop: int | None = ...,
 ) -> Iterator[int]: ...
 def sieve(n: int) -> Iterator[int]: ...
 def batched(
-    iterable: Iterable[_T],
-    n: int,
-) -> Iterator[list[_T]]: ...
+    iterable: Iterable[_T], n: int, *, strict: bool = False
+) -> Iterator[tuple[_T]]: ...
 def transpose(
     it: Iterable[Iterable[_T]],
-) -> tuple[Iterator[_T], ...]: ...
-def matmul(m1: Sequence[_T], m2: Sequence[_T]) -> Iterator[list[_T]]: ...
+) -> Iterator[tuple[_T, ...]]: ...
+def reshape(
+    matrix: Iterable[Iterable[_T]], cols: int
+) -> Iterator[tuple[_T, ...]]: ...
+def matmul(m1: Sequence[_T], m2: Sequence[_T]) -> Iterator[tuple[_T]]: ...
 def factor(n: int) -> Iterator[int]: ...
+def polynomial_eval(coefficients: Sequence[_T], x: _U) -> _U: ...
+def sum_of_squares(it: Iterable[_T]) -> _T: ...
+def polynomial_derivative(coefficients: Sequence[_T]) -> list[_T]: ...
+def totient(n: int) -> int: ...
diff --git a/pkg_resources/_vendor/packaging-23.1.dist-info/RECORD b/pkg_resources/_vendor/packaging-23.1.dist-info/RECORD
index e240a8408d..e041f20f6a 100644
--- a/pkg_resources/_vendor/packaging-23.1.dist-info/RECORD
+++ b/pkg_resources/_vendor/packaging-23.1.dist-info/RECORD
@@ -7,20 +7,20 @@ packaging-23.1.dist-info/RECORD,,
 packaging-23.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
 packaging-23.1.dist-info/WHEEL,sha256=rSgq_JpHF9fHR1lx53qwg_1-2LypZE_qmcuXbVUq948,81
 packaging/__init__.py,sha256=kYVZSmXT6CWInT4UJPDtrSQBAZu8fMuFBxpv5GsDTLk,501
-packaging/__pycache__/__init__.cpython-311.pyc,,
-packaging/__pycache__/_elffile.cpython-311.pyc,,
-packaging/__pycache__/_manylinux.cpython-311.pyc,,
-packaging/__pycache__/_musllinux.cpython-311.pyc,,
-packaging/__pycache__/_parser.cpython-311.pyc,,
-packaging/__pycache__/_structures.cpython-311.pyc,,
-packaging/__pycache__/_tokenizer.cpython-311.pyc,,
-packaging/__pycache__/markers.cpython-311.pyc,,
-packaging/__pycache__/metadata.cpython-311.pyc,,
-packaging/__pycache__/requirements.cpython-311.pyc,,
-packaging/__pycache__/specifiers.cpython-311.pyc,,
-packaging/__pycache__/tags.cpython-311.pyc,,
-packaging/__pycache__/utils.cpython-311.pyc,,
-packaging/__pycache__/version.cpython-311.pyc,,
+packaging/__pycache__/__init__.cpython-312.pyc,,
+packaging/__pycache__/_elffile.cpython-312.pyc,,
+packaging/__pycache__/_manylinux.cpython-312.pyc,,
+packaging/__pycache__/_musllinux.cpython-312.pyc,,
+packaging/__pycache__/_parser.cpython-312.pyc,,
+packaging/__pycache__/_structures.cpython-312.pyc,,
+packaging/__pycache__/_tokenizer.cpython-312.pyc,,
+packaging/__pycache__/markers.cpython-312.pyc,,
+packaging/__pycache__/metadata.cpython-312.pyc,,
+packaging/__pycache__/requirements.cpython-312.pyc,,
+packaging/__pycache__/specifiers.cpython-312.pyc,,
+packaging/__pycache__/tags.cpython-312.pyc,,
+packaging/__pycache__/utils.cpython-312.pyc,,
+packaging/__pycache__/version.cpython-312.pyc,,
 packaging/_elffile.py,sha256=hbmK8OD6Z7fY6hwinHEUcD1by7czkGiNYu7ShnFEk2k,3266
 packaging/_manylinux.py,sha256=ESGrDEVmBc8jYTtdZRAWiLk72lOzAKWeezFgoJ_MuBc,8926
 packaging/_musllinux.py,sha256=mvPk7FNjjILKRLIdMxR7IvJ1uggLgCszo-L9rjfpi0M,2524
diff --git a/pkg_resources/_vendor/platformdirs-2.6.2.dist-info/RECORD b/pkg_resources/_vendor/platformdirs-2.6.2.dist-info/RECORD
index 843a5baf9d..a721322694 100644
--- a/pkg_resources/_vendor/platformdirs-2.6.2.dist-info/RECORD
+++ b/pkg_resources/_vendor/platformdirs-2.6.2.dist-info/RECORD
@@ -6,14 +6,14 @@ platformdirs-2.6.2.dist-info/WHEEL,sha256=NaLmgHHW_f9jTvv_wRh9vcK7c7EK9o5fwsIXMO
 platformdirs-2.6.2.dist-info/licenses/LICENSE,sha256=KeD9YukphQ6G6yjD_czwzv30-pSHkBHP-z0NS-1tTbY,1089
 platformdirs/__init__.py,sha256=td0a-fHENmnG8ess2WRoysKv9ud5j6TQ-p_iUM_uE18,12864
 platformdirs/__main__.py,sha256=VsC0t5m-6f0YVr96PVks93G3EDF8MSNY4KpUMvPahDA,1164
-platformdirs/__pycache__/__init__.cpython-311.pyc,,
-platformdirs/__pycache__/__main__.cpython-311.pyc,,
-platformdirs/__pycache__/android.cpython-311.pyc,,
-platformdirs/__pycache__/api.cpython-311.pyc,,
-platformdirs/__pycache__/macos.cpython-311.pyc,,
-platformdirs/__pycache__/unix.cpython-311.pyc,,
-platformdirs/__pycache__/version.cpython-311.pyc,,
-platformdirs/__pycache__/windows.cpython-311.pyc,,
+platformdirs/__pycache__/__init__.cpython-312.pyc,,
+platformdirs/__pycache__/__main__.cpython-312.pyc,,
+platformdirs/__pycache__/android.cpython-312.pyc,,
+platformdirs/__pycache__/api.cpython-312.pyc,,
+platformdirs/__pycache__/macos.cpython-312.pyc,,
+platformdirs/__pycache__/unix.cpython-312.pyc,,
+platformdirs/__pycache__/version.cpython-312.pyc,,
+platformdirs/__pycache__/windows.cpython-312.pyc,,
 platformdirs/android.py,sha256=GKizhyS7ESRiU67u8UnBJLm46goau9937EchXWbPBlk,4068
 platformdirs/api.py,sha256=MXKHXOL3eh_-trSok-JUTjAR_zjmmKF3rjREVABjP8s,4910
 platformdirs/macos.py,sha256=-3UXQewbT0yMhMdkzRXfXGAntmLIH7Qt4a9Hlf8I5_Y,2655
diff --git a/pkg_resources/_vendor/typing_extensions-4.4.0.dist-info/RECORD b/pkg_resources/_vendor/typing_extensions-4.4.0.dist-info/RECORD
index b9e1bb0391..e1132566df 100644
--- a/pkg_resources/_vendor/typing_extensions-4.4.0.dist-info/RECORD
+++ b/pkg_resources/_vendor/typing_extensions-4.4.0.dist-info/RECORD
@@ -1,4 +1,4 @@
-__pycache__/typing_extensions.cpython-311.pyc,,
+__pycache__/typing_extensions.cpython-312.pyc,,
 typing_extensions-4.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
 typing_extensions-4.4.0.dist-info/LICENSE,sha256=x6-2XnVXB7n7kEhziaF20-09ADHVExr95FwjcV_16JE,12787
 typing_extensions-4.4.0.dist-info/METADATA,sha256=1zSh1eMLnLkLMMC6aZSGRKx3eRnivEGDFWGSVD1zqhA,7249
diff --git a/pkg_resources/_vendor/vendored.txt b/pkg_resources/_vendor/vendored.txt
index 4cd4ab8cb8..1138915921 100644
--- a/pkg_resources/_vendor/vendored.txt
+++ b/pkg_resources/_vendor/vendored.txt
@@ -9,3 +9,5 @@ jaraco.text==3.7.0
 importlib_resources==5.10.2
 # required for importlib_resources on older Pythons
 zipp==3.7.0
+# required for jaraco.context on older Pythons
+backports.tarfile
diff --git a/pkg_resources/_vendor/zipp-3.7.0.dist-info/RECORD b/pkg_resources/_vendor/zipp-3.7.0.dist-info/RECORD
index 0a88551ce0..adc797bc2e 100644
--- a/pkg_resources/_vendor/zipp-3.7.0.dist-info/RECORD
+++ b/pkg_resources/_vendor/zipp-3.7.0.dist-info/RECORD
@@ -1,4 +1,4 @@
-__pycache__/zipp.cpython-311.pyc,,
+__pycache__/zipp.cpython-312.pyc,,
 zipp-3.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
 zipp-3.7.0.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050
 zipp-3.7.0.dist-info/METADATA,sha256=ZLzgaXTyZX_MxTU0lcGfhdPY4CjFrT_3vyQ2Fo49pl8,2261
diff --git a/setuptools/_vendor/jaraco.functools-3.6.0.dist-info/INSTALLER b/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/INSTALLER
similarity index 100%
rename from setuptools/_vendor/jaraco.functools-3.6.0.dist-info/INSTALLER
rename to setuptools/_vendor/backports.tarfile-1.0.0.dist-info/INSTALLER
diff --git a/setuptools/_vendor/jaraco.context-4.3.0.dist-info/LICENSE b/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/LICENSE
similarity index 97%
rename from setuptools/_vendor/jaraco.context-4.3.0.dist-info/LICENSE
rename to setuptools/_vendor/backports.tarfile-1.0.0.dist-info/LICENSE
index 353924be0e..1bb5a44356 100644
--- a/setuptools/_vendor/jaraco.context-4.3.0.dist-info/LICENSE
+++ b/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/LICENSE
@@ -1,5 +1,3 @@
-Copyright Jason R. Coombs
-
 Permission is hereby granted, free of charge, to any person obtaining a copy
 of this software and associated documentation files (the "Software"), to
 deal in the Software without restriction, including without limitation the
diff --git a/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/METADATA b/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/METADATA
new file mode 100644
index 0000000000..e7b64c87f8
--- /dev/null
+++ b/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/METADATA
@@ -0,0 +1,44 @@
+Metadata-Version: 2.1
+Name: backports.tarfile
+Version: 1.0.0
+Summary: Backport of CPython tarfile module
+Home-page: https://github.com/jaraco/backports.tarfile
+Author: Jason R. Coombs
+Author-email: jaraco@jaraco.com
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Requires-Python: >=3.8
+License-File: LICENSE
+Provides-Extra: docs
+Requires-Dist: sphinx >=3.5 ; extra == 'docs'
+Requires-Dist: jaraco.packaging >=9.3 ; extra == 'docs'
+Requires-Dist: rst.linker >=1.9 ; extra == 'docs'
+Requires-Dist: furo ; extra == 'docs'
+Requires-Dist: sphinx-lint ; extra == 'docs'
+Provides-Extra: testing
+Requires-Dist: pytest !=8.1.1,>=6 ; extra == 'testing'
+Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'testing'
+Requires-Dist: pytest-cov ; extra == 'testing'
+Requires-Dist: pytest-enabler >=2.2 ; extra == 'testing'
+
+.. image:: https://img.shields.io/pypi/v/backports.tarfile.svg
+   :target: https://pypi.org/project/backports.tarfile
+
+.. image:: https://img.shields.io/pypi/pyversions/backports.tarfile.svg
+
+.. image:: https://github.com/jaraco/backports.tarfile/actions/workflows/main.yml/badge.svg
+   :target: https://github.com/jaraco/backports.tarfile/actions?query=workflow%3A%22tests%22
+   :alt: tests
+
+.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
+    :target: https://github.com/astral-sh/ruff
+    :alt: Ruff
+
+.. .. image:: https://readthedocs.org/projects/backportstarfile/badge/?version=latest
+..    :target: https://backportstarfile.readthedocs.io/en/latest/?badge=latest
+
+.. image:: https://img.shields.io/badge/skeleton-2024-informational
+   :target: https://blog.jaraco.com/skeleton
diff --git a/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/RECORD b/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/RECORD
new file mode 100644
index 0000000000..a6a44d8fcc
--- /dev/null
+++ b/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/RECORD
@@ -0,0 +1,9 @@
+backports.tarfile-1.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+backports.tarfile-1.0.0.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
+backports.tarfile-1.0.0.dist-info/METADATA,sha256=XlT7JAFR04zDMIjs-EFhqc0CkkVyeh-SiVUoKXONXJ0,1876
+backports.tarfile-1.0.0.dist-info/RECORD,,
+backports.tarfile-1.0.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+backports.tarfile-1.0.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
+backports.tarfile-1.0.0.dist-info/top_level.txt,sha256=cGjaLMOoBR1FK0ApojtzWVmViTtJ7JGIK_HwXiEsvtU,10
+backports/__pycache__/tarfile.cpython-312.pyc,,
+backports/tarfile.py,sha256=IO3YX_ZYqn13VOi-3QLM0lnktn102U4d9wUrHc230LY,106920
diff --git a/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/REQUESTED b/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/REQUESTED
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/pkg_resources/_vendor/jaraco.context-4.3.0.dist-info/WHEEL b/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/WHEEL
similarity index 65%
rename from pkg_resources/_vendor/jaraco.context-4.3.0.dist-info/WHEEL
rename to setuptools/_vendor/backports.tarfile-1.0.0.dist-info/WHEEL
index 57e3d840d5..bab98d6758 100644
--- a/pkg_resources/_vendor/jaraco.context-4.3.0.dist-info/WHEEL
+++ b/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/WHEEL
@@ -1,5 +1,5 @@
 Wheel-Version: 1.0
-Generator: bdist_wheel (0.38.4)
+Generator: bdist_wheel (0.43.0)
 Root-Is-Purelib: true
 Tag: py3-none-any
 
diff --git a/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/top_level.txt b/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/top_level.txt
new file mode 100644
index 0000000000..99d2be5b64
--- /dev/null
+++ b/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+backports
diff --git a/setuptools/_vendor/backports/tarfile.py b/setuptools/_vendor/backports/tarfile.py
new file mode 100644
index 0000000000..a7a9a6e7b9
--- /dev/null
+++ b/setuptools/_vendor/backports/tarfile.py
@@ -0,0 +1,2900 @@
+#!/usr/bin/env python3
+#-------------------------------------------------------------------
+# tarfile.py
+#-------------------------------------------------------------------
+# Copyright (C) 2002 Lars Gustaebel 
+# All rights reserved.
+#
+# Permission  is  hereby granted,  free  of charge,  to  any person
+# obtaining a  copy of  this software  and associated documentation
+# files  (the  "Software"),  to   deal  in  the  Software   without
+# restriction,  including  without limitation  the  rights to  use,
+# copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies  of  the  Software,  and to  permit  persons  to  whom the
+# Software  is  furnished  to  do  so,  subject  to  the  following
+# conditions:
+#
+# The above copyright  notice and this  permission notice shall  be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS  IS", WITHOUT WARRANTY OF ANY  KIND,
+# EXPRESS OR IMPLIED, INCLUDING  BUT NOT LIMITED TO  THE WARRANTIES
+# OF  MERCHANTABILITY,  FITNESS   FOR  A  PARTICULAR   PURPOSE  AND
+# NONINFRINGEMENT.  IN  NO  EVENT SHALL  THE  AUTHORS  OR COPYRIGHT
+# HOLDERS  BE LIABLE  FOR ANY  CLAIM, DAMAGES  OR OTHER  LIABILITY,
+# WHETHER  IN AN  ACTION OF  CONTRACT, TORT  OR OTHERWISE,  ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+# OTHER DEALINGS IN THE SOFTWARE.
+#
+"""Read from and write to tar format archives.
+"""
+
+version     = "0.9.0"
+__author__  = "Lars Gust\u00e4bel (lars@gustaebel.de)"
+__credits__ = "Gustavo Niemeyer, Niels Gust\u00e4bel, Richard Townsend."
+
+#---------
+# Imports
+#---------
+from builtins import open as bltn_open
+import sys
+import os
+import io
+import shutil
+import stat
+import time
+import struct
+import copy
+import re
+import warnings
+
+try:
+    import pwd
+except ImportError:
+    pwd = None
+try:
+    import grp
+except ImportError:
+    grp = None
+
+# os.symlink on Windows prior to 6.0 raises NotImplementedError
+# OSError (winerror=1314) will be raised if the caller does not hold the
+# SeCreateSymbolicLinkPrivilege privilege
+symlink_exception = (AttributeError, NotImplementedError, OSError)
+
+# from tarfile import *
+__all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError", "ReadError",
+           "CompressionError", "StreamError", "ExtractError", "HeaderError",
+           "ENCODING", "USTAR_FORMAT", "GNU_FORMAT", "PAX_FORMAT",
+           "DEFAULT_FORMAT", "open","fully_trusted_filter", "data_filter",
+           "tar_filter", "FilterError", "AbsoluteLinkError",
+           "OutsideDestinationError", "SpecialFileError", "AbsolutePathError",
+           "LinkOutsideDestinationError"]
+
+
+#---------------------------------------------------------
+# tar constants
+#---------------------------------------------------------
+NUL = b"\0"                     # the null character
+BLOCKSIZE = 512                 # length of processing blocks
+RECORDSIZE = BLOCKSIZE * 20     # length of records
+GNU_MAGIC = b"ustar  \0"        # magic gnu tar string
+POSIX_MAGIC = b"ustar\x0000"    # magic posix tar string
+
+LENGTH_NAME = 100               # maximum length of a filename
+LENGTH_LINK = 100               # maximum length of a linkname
+LENGTH_PREFIX = 155             # maximum length of the prefix field
+
+REGTYPE = b"0"                  # regular file
+AREGTYPE = b"\0"                # regular file
+LNKTYPE = b"1"                  # link (inside tarfile)
+SYMTYPE = b"2"                  # symbolic link
+CHRTYPE = b"3"                  # character special device
+BLKTYPE = b"4"                  # block special device
+DIRTYPE = b"5"                  # directory
+FIFOTYPE = b"6"                 # fifo special device
+CONTTYPE = b"7"                 # contiguous file
+
+GNUTYPE_LONGNAME = b"L"         # GNU tar longname
+GNUTYPE_LONGLINK = b"K"         # GNU tar longlink
+GNUTYPE_SPARSE = b"S"           # GNU tar sparse file
+
+XHDTYPE = b"x"                  # POSIX.1-2001 extended header
+XGLTYPE = b"g"                  # POSIX.1-2001 global header
+SOLARIS_XHDTYPE = b"X"          # Solaris extended header
+
+USTAR_FORMAT = 0                # POSIX.1-1988 (ustar) format
+GNU_FORMAT = 1                  # GNU tar format
+PAX_FORMAT = 2                  # POSIX.1-2001 (pax) format
+DEFAULT_FORMAT = PAX_FORMAT
+
+#---------------------------------------------------------
+# tarfile constants
+#---------------------------------------------------------
+# File types that tarfile supports:
+SUPPORTED_TYPES = (REGTYPE, AREGTYPE, LNKTYPE,
+                   SYMTYPE, DIRTYPE, FIFOTYPE,
+                   CONTTYPE, CHRTYPE, BLKTYPE,
+                   GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
+                   GNUTYPE_SPARSE)
+
+# File types that will be treated as a regular file.
+REGULAR_TYPES = (REGTYPE, AREGTYPE,
+                 CONTTYPE, GNUTYPE_SPARSE)
+
+# File types that are part of the GNU tar format.
+GNU_TYPES = (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
+             GNUTYPE_SPARSE)
+
+# Fields from a pax header that override a TarInfo attribute.
+PAX_FIELDS = ("path", "linkpath", "size", "mtime",
+              "uid", "gid", "uname", "gname")
+
+# Fields from a pax header that are affected by hdrcharset.
+PAX_NAME_FIELDS = {"path", "linkpath", "uname", "gname"}
+
+# Fields in a pax header that are numbers, all other fields
+# are treated as strings.
+PAX_NUMBER_FIELDS = {
+    "atime": float,
+    "ctime": float,
+    "mtime": float,
+    "uid": int,
+    "gid": int,
+    "size": int
+}
+
+#---------------------------------------------------------
+# initialization
+#---------------------------------------------------------
+if os.name == "nt":
+    ENCODING = "utf-8"
+else:
+    ENCODING = sys.getfilesystemencoding()
+
+#---------------------------------------------------------
+# Some useful functions
+#---------------------------------------------------------
+
+def stn(s, length, encoding, errors):
+    """Convert a string to a null-terminated bytes object.
+    """
+    if s is None:
+        raise ValueError("metadata cannot contain None")
+    s = s.encode(encoding, errors)
+    return s[:length] + (length - len(s)) * NUL
+
+def nts(s, encoding, errors):
+    """Convert a null-terminated bytes object to a string.
+    """
+    p = s.find(b"\0")
+    if p != -1:
+        s = s[:p]
+    return s.decode(encoding, errors)
+
+def nti(s):
+    """Convert a number field to a python number.
+    """
+    # There are two possible encodings for a number field, see
+    # itn() below.
+    if s[0] in (0o200, 0o377):
+        n = 0
+        for i in range(len(s) - 1):
+            n <<= 8
+            n += s[i + 1]
+        if s[0] == 0o377:
+            n = -(256 ** (len(s) - 1) - n)
+    else:
+        try:
+            s = nts(s, "ascii", "strict")
+            n = int(s.strip() or "0", 8)
+        except ValueError:
+            raise InvalidHeaderError("invalid header")
+    return n
+
+def itn(n, digits=8, format=DEFAULT_FORMAT):
+    """Convert a python number to a number field.
+    """
+    # POSIX 1003.1-1988 requires numbers to be encoded as a string of
+    # octal digits followed by a null-byte, this allows values up to
+    # (8**(digits-1))-1. GNU tar allows storing numbers greater than
+    # that if necessary. A leading 0o200 or 0o377 byte indicate this
+    # particular encoding, the following digits-1 bytes are a big-endian
+    # base-256 representation. This allows values up to (256**(digits-1))-1.
+    # A 0o200 byte indicates a positive number, a 0o377 byte a negative
+    # number.
+    original_n = n
+    n = int(n)
+    if 0 <= n < 8 ** (digits - 1):
+        s = bytes("%0*o" % (digits - 1, n), "ascii") + NUL
+    elif format == GNU_FORMAT and -256 ** (digits - 1) <= n < 256 ** (digits - 1):
+        if n >= 0:
+            s = bytearray([0o200])
+        else:
+            s = bytearray([0o377])
+            n = 256 ** digits + n
+
+        for i in range(digits - 1):
+            s.insert(1, n & 0o377)
+            n >>= 8
+    else:
+        raise ValueError("overflow in number field")
+
+    return s
+
+def calc_chksums(buf):
+    """Calculate the checksum for a member's header by summing up all
+       characters except for the chksum field which is treated as if
+       it was filled with spaces. According to the GNU tar sources,
+       some tars (Sun and NeXT) calculate chksum with signed char,
+       which will be different if there are chars in the buffer with
+       the high bit set. So we calculate two checksums, unsigned and
+       signed.
+    """
+    unsigned_chksum = 256 + sum(struct.unpack_from("148B8x356B", buf))
+    signed_chksum = 256 + sum(struct.unpack_from("148b8x356b", buf))
+    return unsigned_chksum, signed_chksum
+
+def copyfileobj(src, dst, length=None, exception=OSError, bufsize=None):
+    """Copy length bytes from fileobj src to fileobj dst.
+       If length is None, copy the entire content.
+    """
+    bufsize = bufsize or 16 * 1024
+    if length == 0:
+        return
+    if length is None:
+        shutil.copyfileobj(src, dst, bufsize)
+        return
+
+    blocks, remainder = divmod(length, bufsize)
+    for b in range(blocks):
+        buf = src.read(bufsize)
+        if len(buf) < bufsize:
+            raise exception("unexpected end of data")
+        dst.write(buf)
+
+    if remainder != 0:
+        buf = src.read(remainder)
+        if len(buf) < remainder:
+            raise exception("unexpected end of data")
+        dst.write(buf)
+    return
+
+def _safe_print(s):
+    encoding = getattr(sys.stdout, 'encoding', None)
+    if encoding is not None:
+        s = s.encode(encoding, 'backslashreplace').decode(encoding)
+    print(s, end=' ')
+
+
+class TarError(Exception):
+    """Base exception."""
+    pass
+class ExtractError(TarError):
+    """General exception for extract errors."""
+    pass
+class ReadError(TarError):
+    """Exception for unreadable tar archives."""
+    pass
+class CompressionError(TarError):
+    """Exception for unavailable compression methods."""
+    pass
+class StreamError(TarError):
+    """Exception for unsupported operations on stream-like TarFiles."""
+    pass
+class HeaderError(TarError):
+    """Base exception for header errors."""
+    pass
+class EmptyHeaderError(HeaderError):
+    """Exception for empty headers."""
+    pass
+class TruncatedHeaderError(HeaderError):
+    """Exception for truncated headers."""
+    pass
+class EOFHeaderError(HeaderError):
+    """Exception for end of file headers."""
+    pass
+class InvalidHeaderError(HeaderError):
+    """Exception for invalid headers."""
+    pass
+class SubsequentHeaderError(HeaderError):
+    """Exception for missing and invalid extended headers."""
+    pass
+
+#---------------------------
+# internal stream interface
+#---------------------------
+class _LowLevelFile:
+    """Low-level file object. Supports reading and writing.
+       It is used instead of a regular file object for streaming
+       access.
+    """
+
+    def __init__(self, name, mode):
+        mode = {
+            "r": os.O_RDONLY,
+            "w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC,
+        }[mode]
+        if hasattr(os, "O_BINARY"):
+            mode |= os.O_BINARY
+        self.fd = os.open(name, mode, 0o666)
+
+    def close(self):
+        os.close(self.fd)
+
+    def read(self, size):
+        return os.read(self.fd, size)
+
+    def write(self, s):
+        os.write(self.fd, s)
+
+class _Stream:
+    """Class that serves as an adapter between TarFile and
+       a stream-like object.  The stream-like object only
+       needs to have a read() or write() method that works with bytes,
+       and the method is accessed blockwise.
+       Use of gzip or bzip2 compression is possible.
+       A stream-like object could be for example: sys.stdin.buffer,
+       sys.stdout.buffer, a socket, a tape device etc.
+
+       _Stream is intended to be used only internally.
+    """
+
+    def __init__(self, name, mode, comptype, fileobj, bufsize,
+                 compresslevel):
+        """Construct a _Stream object.
+        """
+        self._extfileobj = True
+        if fileobj is None:
+            fileobj = _LowLevelFile(name, mode)
+            self._extfileobj = False
+
+        if comptype == '*':
+            # Enable transparent compression detection for the
+            # stream interface
+            fileobj = _StreamProxy(fileobj)
+            comptype = fileobj.getcomptype()
+
+        self.name     = name or ""
+        self.mode     = mode
+        self.comptype = comptype
+        self.fileobj  = fileobj
+        self.bufsize  = bufsize
+        self.buf      = b""
+        self.pos      = 0
+        self.closed   = False
+
+        try:
+            if comptype == "gz":
+                try:
+                    import zlib
+                except ImportError:
+                    raise CompressionError("zlib module is not available") from None
+                self.zlib = zlib
+                self.crc = zlib.crc32(b"")
+                if mode == "r":
+                    self.exception = zlib.error
+                    self._init_read_gz()
+                else:
+                    self._init_write_gz(compresslevel)
+
+            elif comptype == "bz2":
+                try:
+                    import bz2
+                except ImportError:
+                    raise CompressionError("bz2 module is not available") from None
+                if mode == "r":
+                    self.dbuf = b""
+                    self.cmp = bz2.BZ2Decompressor()
+                    self.exception = OSError
+                else:
+                    self.cmp = bz2.BZ2Compressor(compresslevel)
+
+            elif comptype == "xz":
+                try:
+                    import lzma
+                except ImportError:
+                    raise CompressionError("lzma module is not available") from None
+                if mode == "r":
+                    self.dbuf = b""
+                    self.cmp = lzma.LZMADecompressor()
+                    self.exception = lzma.LZMAError
+                else:
+                    self.cmp = lzma.LZMACompressor()
+
+            elif comptype != "tar":
+                raise CompressionError("unknown compression type %r" % comptype)
+
+        except:
+            if not self._extfileobj:
+                self.fileobj.close()
+            self.closed = True
+            raise
+
+    def __del__(self):
+        if hasattr(self, "closed") and not self.closed:
+            self.close()
+
+    def _init_write_gz(self, compresslevel):
+        """Initialize for writing with gzip compression.
+        """
+        self.cmp = self.zlib.compressobj(compresslevel,
+                                         self.zlib.DEFLATED,
+                                         -self.zlib.MAX_WBITS,
+                                         self.zlib.DEF_MEM_LEVEL,
+                                         0)
+        timestamp = struct.pack(" self.bufsize:
+            self.fileobj.write(self.buf[:self.bufsize])
+            self.buf = self.buf[self.bufsize:]
+
+    def close(self):
+        """Close the _Stream object. No operation should be
+           done on it afterwards.
+        """
+        if self.closed:
+            return
+
+        self.closed = True
+        try:
+            if self.mode == "w" and self.comptype != "tar":
+                self.buf += self.cmp.flush()
+
+            if self.mode == "w" and self.buf:
+                self.fileobj.write(self.buf)
+                self.buf = b""
+                if self.comptype == "gz":
+                    self.fileobj.write(struct.pack("= 0:
+            blocks, remainder = divmod(pos - self.pos, self.bufsize)
+            for i in range(blocks):
+                self.read(self.bufsize)
+            self.read(remainder)
+        else:
+            raise StreamError("seeking backwards is not allowed")
+        return self.pos
+
+    def read(self, size):
+        """Return the next size number of bytes from the stream."""
+        assert size is not None
+        buf = self._read(size)
+        self.pos += len(buf)
+        return buf
+
+    def _read(self, size):
+        """Return size bytes from the stream.
+        """
+        if self.comptype == "tar":
+            return self.__read(size)
+
+        c = len(self.dbuf)
+        t = [self.dbuf]
+        while c < size:
+            # Skip underlying buffer to avoid unaligned double buffering.
+            if self.buf:
+                buf = self.buf
+                self.buf = b""
+            else:
+                buf = self.fileobj.read(self.bufsize)
+                if not buf:
+                    break
+            try:
+                buf = self.cmp.decompress(buf)
+            except self.exception as e:
+                raise ReadError("invalid compressed data") from e
+            t.append(buf)
+            c += len(buf)
+        t = b"".join(t)
+        self.dbuf = t[size:]
+        return t[:size]
+
+    def __read(self, size):
+        """Return size bytes from stream. If internal buffer is empty,
+           read another block from the stream.
+        """
+        c = len(self.buf)
+        t = [self.buf]
+        while c < size:
+            buf = self.fileobj.read(self.bufsize)
+            if not buf:
+                break
+            t.append(buf)
+            c += len(buf)
+        t = b"".join(t)
+        self.buf = t[size:]
+        return t[:size]
+# class _Stream
+
+class _StreamProxy(object):
+    """Small proxy class that enables transparent compression
+       detection for the Stream interface (mode 'r|*').
+    """
+
+    def __init__(self, fileobj):
+        self.fileobj = fileobj
+        self.buf = self.fileobj.read(BLOCKSIZE)
+
+    def read(self, size):
+        self.read = self.fileobj.read
+        return self.buf
+
+    def getcomptype(self):
+        if self.buf.startswith(b"\x1f\x8b\x08"):
+            return "gz"
+        elif self.buf[0:3] == b"BZh" and self.buf[4:10] == b"1AY&SY":
+            return "bz2"
+        elif self.buf.startswith((b"\x5d\x00\x00\x80", b"\xfd7zXZ")):
+            return "xz"
+        else:
+            return "tar"
+
+    def close(self):
+        self.fileobj.close()
+# class StreamProxy
+
+#------------------------
+# Extraction file object
+#------------------------
+class _FileInFile(object):
+    """A thin wrapper around an existing file object that
+       provides a part of its data as an individual file
+       object.
+    """
+
+    def __init__(self, fileobj, offset, size, name, blockinfo=None):
+        self.fileobj = fileobj
+        self.offset = offset
+        self.size = size
+        self.position = 0
+        self.name = name
+        self.closed = False
+
+        if blockinfo is None:
+            blockinfo = [(0, size)]
+
+        # Construct a map with data and zero blocks.
+        self.map_index = 0
+        self.map = []
+        lastpos = 0
+        realpos = self.offset
+        for offset, size in blockinfo:
+            if offset > lastpos:
+                self.map.append((False, lastpos, offset, None))
+            self.map.append((True, offset, offset + size, realpos))
+            realpos += size
+            lastpos = offset + size
+        if lastpos < self.size:
+            self.map.append((False, lastpos, self.size, None))
+
+    def flush(self):
+        pass
+
+    def readable(self):
+        return True
+
+    def writable(self):
+        return False
+
+    def seekable(self):
+        return self.fileobj.seekable()
+
+    def tell(self):
+        """Return the current file position.
+        """
+        return self.position
+
+    def seek(self, position, whence=io.SEEK_SET):
+        """Seek to a position in the file.
+        """
+        if whence == io.SEEK_SET:
+            self.position = min(max(position, 0), self.size)
+        elif whence == io.SEEK_CUR:
+            if position < 0:
+                self.position = max(self.position + position, 0)
+            else:
+                self.position = min(self.position + position, self.size)
+        elif whence == io.SEEK_END:
+            self.position = max(min(self.size + position, self.size), 0)
+        else:
+            raise ValueError("Invalid argument")
+        return self.position
+
+    def read(self, size=None):
+        """Read data from the file.
+        """
+        if size is None:
+            size = self.size - self.position
+        else:
+            size = min(size, self.size - self.position)
+
+        buf = b""
+        while size > 0:
+            while True:
+                data, start, stop, offset = self.map[self.map_index]
+                if start <= self.position < stop:
+                    break
+                else:
+                    self.map_index += 1
+                    if self.map_index == len(self.map):
+                        self.map_index = 0
+            length = min(size, stop - self.position)
+            if data:
+                self.fileobj.seek(offset + (self.position - start))
+                b = self.fileobj.read(length)
+                if len(b) != length:
+                    raise ReadError("unexpected end of data")
+                buf += b
+            else:
+                buf += NUL * length
+            size -= length
+            self.position += length
+        return buf
+
+    def readinto(self, b):
+        buf = self.read(len(b))
+        b[:len(buf)] = buf
+        return len(buf)
+
+    def close(self):
+        self.closed = True
+#class _FileInFile
+
+class ExFileObject(io.BufferedReader):
+
+    def __init__(self, tarfile, tarinfo):
+        fileobj = _FileInFile(tarfile.fileobj, tarinfo.offset_data,
+                tarinfo.size, tarinfo.name, tarinfo.sparse)
+        super().__init__(fileobj)
+#class ExFileObject
+
+
+#-----------------------------
+# extraction filters (PEP 706)
+#-----------------------------
+
+class FilterError(TarError):
+    pass
+
+class AbsolutePathError(FilterError):
+    def __init__(self, tarinfo):
+        self.tarinfo = tarinfo
+        super().__init__(f'member {tarinfo.name!r} has an absolute path')
+
+class OutsideDestinationError(FilterError):
+    def __init__(self, tarinfo, path):
+        self.tarinfo = tarinfo
+        self._path = path
+        super().__init__(f'{tarinfo.name!r} would be extracted to {path!r}, '
+                         + 'which is outside the destination')
+
+class SpecialFileError(FilterError):
+    def __init__(self, tarinfo):
+        self.tarinfo = tarinfo
+        super().__init__(f'{tarinfo.name!r} is a special file')
+
+class AbsoluteLinkError(FilterError):
+    def __init__(self, tarinfo):
+        self.tarinfo = tarinfo
+        super().__init__(f'{tarinfo.name!r} is a link to an absolute path')
+
+class LinkOutsideDestinationError(FilterError):
+    def __init__(self, tarinfo, path):
+        self.tarinfo = tarinfo
+        self._path = path
+        super().__init__(f'{tarinfo.name!r} would link to {path!r}, '
+                         + 'which is outside the destination')
+
+def _get_filtered_attrs(member, dest_path, for_data=True):
+    new_attrs = {}
+    name = member.name
+    dest_path = os.path.realpath(dest_path)
+    # Strip leading / (tar's directory separator) from filenames.
+    # Include os.sep (target OS directory separator) as well.
+    if name.startswith(('/', os.sep)):
+        name = new_attrs['name'] = member.path.lstrip('/' + os.sep)
+    if os.path.isabs(name):
+        # Path is absolute even after stripping.
+        # For example, 'C:/foo' on Windows.
+        raise AbsolutePathError(member)
+    # Ensure we stay in the destination
+    target_path = os.path.realpath(os.path.join(dest_path, name))
+    if os.path.commonpath([target_path, dest_path]) != dest_path:
+        raise OutsideDestinationError(member, target_path)
+    # Limit permissions (no high bits, and go-w)
+    mode = member.mode
+    if mode is not None:
+        # Strip high bits & group/other write bits
+        mode = mode & 0o755
+        if for_data:
+            # For data, handle permissions & file types
+            if member.isreg() or member.islnk():
+                if not mode & 0o100:
+                    # Clear executable bits if not executable by user
+                    mode &= ~0o111
+                # Ensure owner can read & write
+                mode |= 0o600
+            elif member.isdir() or member.issym():
+                # Ignore mode for directories & symlinks
+                mode = None
+            else:
+                # Reject special files
+                raise SpecialFileError(member)
+        if mode != member.mode:
+            new_attrs['mode'] = mode
+    if for_data:
+        # Ignore ownership for 'data'
+        if member.uid is not None:
+            new_attrs['uid'] = None
+        if member.gid is not None:
+            new_attrs['gid'] = None
+        if member.uname is not None:
+            new_attrs['uname'] = None
+        if member.gname is not None:
+            new_attrs['gname'] = None
+        # Check link destination for 'data'
+        if member.islnk() or member.issym():
+            if os.path.isabs(member.linkname):
+                raise AbsoluteLinkError(member)
+            if member.issym():
+                target_path = os.path.join(dest_path,
+                                           os.path.dirname(name),
+                                           member.linkname)
+            else:
+                target_path = os.path.join(dest_path,
+                                           member.linkname)
+            target_path = os.path.realpath(target_path)
+            if os.path.commonpath([target_path, dest_path]) != dest_path:
+                raise LinkOutsideDestinationError(member, target_path)
+    return new_attrs
+
+def fully_trusted_filter(member, dest_path):
+    return member
+
+def tar_filter(member, dest_path):
+    new_attrs = _get_filtered_attrs(member, dest_path, False)
+    if new_attrs:
+        return member.replace(**new_attrs, deep=False)
+    return member
+
+def data_filter(member, dest_path):
+    new_attrs = _get_filtered_attrs(member, dest_path, True)
+    if new_attrs:
+        return member.replace(**new_attrs, deep=False)
+    return member
+
+_NAMED_FILTERS = {
+    "fully_trusted": fully_trusted_filter,
+    "tar": tar_filter,
+    "data": data_filter,
+}
+
+#------------------
+# Exported Classes
+#------------------
+
+# Sentinel for replace() defaults, meaning "don't change the attribute"
+_KEEP = object()
+
+class TarInfo(object):
+    """Informational class which holds the details about an
+       archive member given by a tar header block.
+       TarInfo objects are returned by TarFile.getmember(),
+       TarFile.getmembers() and TarFile.gettarinfo() and are
+       usually created internally.
+    """
+
+    __slots__ = dict(
+        name = 'Name of the archive member.',
+        mode = 'Permission bits.',
+        uid = 'User ID of the user who originally stored this member.',
+        gid = 'Group ID of the user who originally stored this member.',
+        size = 'Size in bytes.',
+        mtime = 'Time of last modification.',
+        chksum = 'Header checksum.',
+        type = ('File type. type is usually one of these constants: '
+                'REGTYPE, AREGTYPE, LNKTYPE, SYMTYPE, DIRTYPE, FIFOTYPE, '
+                'CONTTYPE, CHRTYPE, BLKTYPE, GNUTYPE_SPARSE.'),
+        linkname = ('Name of the target file name, which is only present '
+                    'in TarInfo objects of type LNKTYPE and SYMTYPE.'),
+        uname = 'User name.',
+        gname = 'Group name.',
+        devmajor = 'Device major number.',
+        devminor = 'Device minor number.',
+        offset = 'The tar header starts here.',
+        offset_data = "The file's data starts here.",
+        pax_headers = ('A dictionary containing key-value pairs of an '
+                       'associated pax extended header.'),
+        sparse = 'Sparse member information.',
+        tarfile = None,
+        _sparse_structs = None,
+        _link_target = None,
+        )
+
+    def __init__(self, name=""):
+        """Construct a TarInfo object. name is the optional name
+           of the member.
+        """
+        self.name = name        # member name
+        self.mode = 0o644       # file permissions
+        self.uid = 0            # user id
+        self.gid = 0            # group id
+        self.size = 0           # file size
+        self.mtime = 0          # modification time
+        self.chksum = 0         # header checksum
+        self.type = REGTYPE     # member type
+        self.linkname = ""      # link name
+        self.uname = ""         # user name
+        self.gname = ""         # group name
+        self.devmajor = 0       # device major number
+        self.devminor = 0       # device minor number
+
+        self.offset = 0         # the tar header starts here
+        self.offset_data = 0    # the file's data starts here
+
+        self.sparse = None      # sparse member information
+        self.pax_headers = {}   # pax header information
+
+    @property
+    def path(self):
+        'In pax headers, "name" is called "path".'
+        return self.name
+
+    @path.setter
+    def path(self, name):
+        self.name = name
+
+    @property
+    def linkpath(self):
+        'In pax headers, "linkname" is called "linkpath".'
+        return self.linkname
+
+    @linkpath.setter
+    def linkpath(self, linkname):
+        self.linkname = linkname
+
+    def __repr__(self):
+        return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self))
+
+    def replace(self, *,
+                name=_KEEP, mtime=_KEEP, mode=_KEEP, linkname=_KEEP,
+                uid=_KEEP, gid=_KEEP, uname=_KEEP, gname=_KEEP,
+                deep=True, _KEEP=_KEEP):
+        """Return a deep copy of self with the given attributes replaced.
+        """
+        if deep:
+            result = copy.deepcopy(self)
+        else:
+            result = copy.copy(self)
+        if name is not _KEEP:
+            result.name = name
+        if mtime is not _KEEP:
+            result.mtime = mtime
+        if mode is not _KEEP:
+            result.mode = mode
+        if linkname is not _KEEP:
+            result.linkname = linkname
+        if uid is not _KEEP:
+            result.uid = uid
+        if gid is not _KEEP:
+            result.gid = gid
+        if uname is not _KEEP:
+            result.uname = uname
+        if gname is not _KEEP:
+            result.gname = gname
+        return result
+
+    def get_info(self):
+        """Return the TarInfo's attributes as a dictionary.
+        """
+        if self.mode is None:
+            mode = None
+        else:
+            mode = self.mode & 0o7777
+        info = {
+            "name":     self.name,
+            "mode":     mode,
+            "uid":      self.uid,
+            "gid":      self.gid,
+            "size":     self.size,
+            "mtime":    self.mtime,
+            "chksum":   self.chksum,
+            "type":     self.type,
+            "linkname": self.linkname,
+            "uname":    self.uname,
+            "gname":    self.gname,
+            "devmajor": self.devmajor,
+            "devminor": self.devminor
+        }
+
+        if info["type"] == DIRTYPE and not info["name"].endswith("/"):
+            info["name"] += "/"
+
+        return info
+
+    def tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="surrogateescape"):
+        """Return a tar header as a string of 512 byte blocks.
+        """
+        info = self.get_info()
+        for name, value in info.items():
+            if value is None:
+                raise ValueError("%s may not be None" % name)
+
+        if format == USTAR_FORMAT:
+            return self.create_ustar_header(info, encoding, errors)
+        elif format == GNU_FORMAT:
+            return self.create_gnu_header(info, encoding, errors)
+        elif format == PAX_FORMAT:
+            return self.create_pax_header(info, encoding)
+        else:
+            raise ValueError("invalid format")
+
+    def create_ustar_header(self, info, encoding, errors):
+        """Return the object as a ustar header block.
+        """
+        info["magic"] = POSIX_MAGIC
+
+        if len(info["linkname"].encode(encoding, errors)) > LENGTH_LINK:
+            raise ValueError("linkname is too long")
+
+        if len(info["name"].encode(encoding, errors)) > LENGTH_NAME:
+            info["prefix"], info["name"] = self._posix_split_name(info["name"], encoding, errors)
+
+        return self._create_header(info, USTAR_FORMAT, encoding, errors)
+
+    def create_gnu_header(self, info, encoding, errors):
+        """Return the object as a GNU header block sequence.
+        """
+        info["magic"] = GNU_MAGIC
+
+        buf = b""
+        if len(info["linkname"].encode(encoding, errors)) > LENGTH_LINK:
+            buf += self._create_gnu_long_header(info["linkname"], GNUTYPE_LONGLINK, encoding, errors)
+
+        if len(info["name"].encode(encoding, errors)) > LENGTH_NAME:
+            buf += self._create_gnu_long_header(info["name"], GNUTYPE_LONGNAME, encoding, errors)
+
+        return buf + self._create_header(info, GNU_FORMAT, encoding, errors)
+
+    def create_pax_header(self, info, encoding):
+        """Return the object as a ustar header block. If it cannot be
+           represented this way, prepend a pax extended header sequence
+           with supplement information.
+        """
+        info["magic"] = POSIX_MAGIC
+        pax_headers = self.pax_headers.copy()
+
+        # Test string fields for values that exceed the field length or cannot
+        # be represented in ASCII encoding.
+        for name, hname, length in (
+                ("name", "path", LENGTH_NAME), ("linkname", "linkpath", LENGTH_LINK),
+                ("uname", "uname", 32), ("gname", "gname", 32)):
+
+            if hname in pax_headers:
+                # The pax header has priority.
+                continue
+
+            # Try to encode the string as ASCII.
+            try:
+                info[name].encode("ascii", "strict")
+            except UnicodeEncodeError:
+                pax_headers[hname] = info[name]
+                continue
+
+            if len(info[name]) > length:
+                pax_headers[hname] = info[name]
+
+        # Test number fields for values that exceed the field limit or values
+        # that like to be stored as float.
+        for name, digits in (("uid", 8), ("gid", 8), ("size", 12), ("mtime", 12)):
+            needs_pax = False
+
+            val = info[name]
+            val_is_float = isinstance(val, float)
+            val_int = round(val) if val_is_float else val
+            if not 0 <= val_int < 8 ** (digits - 1):
+                # Avoid overflow.
+                info[name] = 0
+                needs_pax = True
+            elif val_is_float:
+                # Put rounded value in ustar header, and full
+                # precision value in pax header.
+                info[name] = val_int
+                needs_pax = True
+
+            # The existing pax header has priority.
+            if needs_pax and name not in pax_headers:
+                pax_headers[name] = str(val)
+
+        # Create a pax extended header if necessary.
+        if pax_headers:
+            buf = self._create_pax_generic_header(pax_headers, XHDTYPE, encoding)
+        else:
+            buf = b""
+
+        return buf + self._create_header(info, USTAR_FORMAT, "ascii", "replace")
+
+    @classmethod
+    def create_pax_global_header(cls, pax_headers):
+        """Return the object as a pax global header block sequence.
+        """
+        return cls._create_pax_generic_header(pax_headers, XGLTYPE, "utf-8")
+
+    def _posix_split_name(self, name, encoding, errors):
+        """Split a name longer than 100 chars into a prefix
+           and a name part.
+        """
+        components = name.split("/")
+        for i in range(1, len(components)):
+            prefix = "/".join(components[:i])
+            name = "/".join(components[i:])
+            if len(prefix.encode(encoding, errors)) <= LENGTH_PREFIX and \
+                    len(name.encode(encoding, errors)) <= LENGTH_NAME:
+                break
+        else:
+            raise ValueError("name is too long")
+
+        return prefix, name
+
+    @staticmethod
+    def _create_header(info, format, encoding, errors):
+        """Return a header block. info is a dictionary with file
+           information, format must be one of the *_FORMAT constants.
+        """
+        has_device_fields = info.get("type") in (CHRTYPE, BLKTYPE)
+        if has_device_fields:
+            devmajor = itn(info.get("devmajor", 0), 8, format)
+            devminor = itn(info.get("devminor", 0), 8, format)
+        else:
+            devmajor = stn("", 8, encoding, errors)
+            devminor = stn("", 8, encoding, errors)
+
+        # None values in metadata should cause ValueError.
+        # itn()/stn() do this for all fields except type.
+        filetype = info.get("type", REGTYPE)
+        if filetype is None:
+            raise ValueError("TarInfo.type must not be None")
+
+        parts = [
+            stn(info.get("name", ""), 100, encoding, errors),
+            itn(info.get("mode", 0) & 0o7777, 8, format),
+            itn(info.get("uid", 0), 8, format),
+            itn(info.get("gid", 0), 8, format),
+            itn(info.get("size", 0), 12, format),
+            itn(info.get("mtime", 0), 12, format),
+            b"        ", # checksum field
+            filetype,
+            stn(info.get("linkname", ""), 100, encoding, errors),
+            info.get("magic", POSIX_MAGIC),
+            stn(info.get("uname", ""), 32, encoding, errors),
+            stn(info.get("gname", ""), 32, encoding, errors),
+            devmajor,
+            devminor,
+            stn(info.get("prefix", ""), 155, encoding, errors)
+        ]
+
+        buf = struct.pack("%ds" % BLOCKSIZE, b"".join(parts))
+        chksum = calc_chksums(buf[-BLOCKSIZE:])[0]
+        buf = buf[:-364] + bytes("%06o\0" % chksum, "ascii") + buf[-357:]
+        return buf
+
+    @staticmethod
+    def _create_payload(payload):
+        """Return the string payload filled with zero bytes
+           up to the next 512 byte border.
+        """
+        blocks, remainder = divmod(len(payload), BLOCKSIZE)
+        if remainder > 0:
+            payload += (BLOCKSIZE - remainder) * NUL
+        return payload
+
+    @classmethod
+    def _create_gnu_long_header(cls, name, type, encoding, errors):
+        """Return a GNUTYPE_LONGNAME or GNUTYPE_LONGLINK sequence
+           for name.
+        """
+        name = name.encode(encoding, errors) + NUL
+
+        info = {}
+        info["name"] = "././@LongLink"
+        info["type"] = type
+        info["size"] = len(name)
+        info["magic"] = GNU_MAGIC
+
+        # create extended header + name blocks.
+        return cls._create_header(info, USTAR_FORMAT, encoding, errors) + \
+                cls._create_payload(name)
+
+    @classmethod
+    def _create_pax_generic_header(cls, pax_headers, type, encoding):
+        """Return a POSIX.1-2008 extended or global header sequence
+           that contains a list of keyword, value pairs. The values
+           must be strings.
+        """
+        # Check if one of the fields contains surrogate characters and thereby
+        # forces hdrcharset=BINARY, see _proc_pax() for more information.
+        binary = False
+        for keyword, value in pax_headers.items():
+            try:
+                value.encode("utf-8", "strict")
+            except UnicodeEncodeError:
+                binary = True
+                break
+
+        records = b""
+        if binary:
+            # Put the hdrcharset field at the beginning of the header.
+            records += b"21 hdrcharset=BINARY\n"
+
+        for keyword, value in pax_headers.items():
+            keyword = keyword.encode("utf-8")
+            if binary:
+                # Try to restore the original byte representation of `value'.
+                # Needless to say, that the encoding must match the string.
+                value = value.encode(encoding, "surrogateescape")
+            else:
+                value = value.encode("utf-8")
+
+            l = len(keyword) + len(value) + 3   # ' ' + '=' + '\n'
+            n = p = 0
+            while True:
+                n = l + len(str(p))
+                if n == p:
+                    break
+                p = n
+            records += bytes(str(p), "ascii") + b" " + keyword + b"=" + value + b"\n"
+
+        # We use a hardcoded "././@PaxHeader" name like star does
+        # instead of the one that POSIX recommends.
+        info = {}
+        info["name"] = "././@PaxHeader"
+        info["type"] = type
+        info["size"] = len(records)
+        info["magic"] = POSIX_MAGIC
+
+        # Create pax header + record blocks.
+        return cls._create_header(info, USTAR_FORMAT, "ascii", "replace") + \
+                cls._create_payload(records)
+
+    @classmethod
+    def frombuf(cls, buf, encoding, errors):
+        """Construct a TarInfo object from a 512 byte bytes object.
+        """
+        if len(buf) == 0:
+            raise EmptyHeaderError("empty header")
+        if len(buf) != BLOCKSIZE:
+            raise TruncatedHeaderError("truncated header")
+        if buf.count(NUL) == BLOCKSIZE:
+            raise EOFHeaderError("end of file header")
+
+        chksum = nti(buf[148:156])
+        if chksum not in calc_chksums(buf):
+            raise InvalidHeaderError("bad checksum")
+
+        obj = cls()
+        obj.name = nts(buf[0:100], encoding, errors)
+        obj.mode = nti(buf[100:108])
+        obj.uid = nti(buf[108:116])
+        obj.gid = nti(buf[116:124])
+        obj.size = nti(buf[124:136])
+        obj.mtime = nti(buf[136:148])
+        obj.chksum = chksum
+        obj.type = buf[156:157]
+        obj.linkname = nts(buf[157:257], encoding, errors)
+        obj.uname = nts(buf[265:297], encoding, errors)
+        obj.gname = nts(buf[297:329], encoding, errors)
+        obj.devmajor = nti(buf[329:337])
+        obj.devminor = nti(buf[337:345])
+        prefix = nts(buf[345:500], encoding, errors)
+
+        # Old V7 tar format represents a directory as a regular
+        # file with a trailing slash.
+        if obj.type == AREGTYPE and obj.name.endswith("/"):
+            obj.type = DIRTYPE
+
+        # The old GNU sparse format occupies some of the unused
+        # space in the buffer for up to 4 sparse structures.
+        # Save them for later processing in _proc_sparse().
+        if obj.type == GNUTYPE_SPARSE:
+            pos = 386
+            structs = []
+            for i in range(4):
+                try:
+                    offset = nti(buf[pos:pos + 12])
+                    numbytes = nti(buf[pos + 12:pos + 24])
+                except ValueError:
+                    break
+                structs.append((offset, numbytes))
+                pos += 24
+            isextended = bool(buf[482])
+            origsize = nti(buf[483:495])
+            obj._sparse_structs = (structs, isextended, origsize)
+
+        # Remove redundant slashes from directories.
+        if obj.isdir():
+            obj.name = obj.name.rstrip("/")
+
+        # Reconstruct a ustar longname.
+        if prefix and obj.type not in GNU_TYPES:
+            obj.name = prefix + "/" + obj.name
+        return obj
+
+    @classmethod
+    def fromtarfile(cls, tarfile):
+        """Return the next TarInfo object from TarFile object
+           tarfile.
+        """
+        buf = tarfile.fileobj.read(BLOCKSIZE)
+        obj = cls.frombuf(buf, tarfile.encoding, tarfile.errors)
+        obj.offset = tarfile.fileobj.tell() - BLOCKSIZE
+        return obj._proc_member(tarfile)
+
+    #--------------------------------------------------------------------------
+    # The following are methods that are called depending on the type of a
+    # member. The entry point is _proc_member() which can be overridden in a
+    # subclass to add custom _proc_*() methods. A _proc_*() method MUST
+    # implement the following
+    # operations:
+    # 1. Set self.offset_data to the position where the data blocks begin,
+    #    if there is data that follows.
+    # 2. Set tarfile.offset to the position where the next member's header will
+    #    begin.
+    # 3. Return self or another valid TarInfo object.
+    def _proc_member(self, tarfile):
+        """Choose the right processing method depending on
+           the type and call it.
+        """
+        if self.type in (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK):
+            return self._proc_gnulong(tarfile)
+        elif self.type == GNUTYPE_SPARSE:
+            return self._proc_sparse(tarfile)
+        elif self.type in (XHDTYPE, XGLTYPE, SOLARIS_XHDTYPE):
+            return self._proc_pax(tarfile)
+        else:
+            return self._proc_builtin(tarfile)
+
+    def _proc_builtin(self, tarfile):
+        """Process a builtin type or an unknown type which
+           will be treated as a regular file.
+        """
+        self.offset_data = tarfile.fileobj.tell()
+        offset = self.offset_data
+        if self.isreg() or self.type not in SUPPORTED_TYPES:
+            # Skip the following data blocks.
+            offset += self._block(self.size)
+        tarfile.offset = offset
+
+        # Patch the TarInfo object with saved global
+        # header information.
+        self._apply_pax_info(tarfile.pax_headers, tarfile.encoding, tarfile.errors)
+
+        # Remove redundant slashes from directories. This is to be consistent
+        # with frombuf().
+        if self.isdir():
+            self.name = self.name.rstrip("/")
+
+        return self
+
+    def _proc_gnulong(self, tarfile):
+        """Process the blocks that hold a GNU longname
+           or longlink member.
+        """
+        buf = tarfile.fileobj.read(self._block(self.size))
+
+        # Fetch the next header and process it.
+        try:
+            next = self.fromtarfile(tarfile)
+        except HeaderError as e:
+            raise SubsequentHeaderError(str(e)) from None
+
+        # Patch the TarInfo object from the next header with
+        # the longname information.
+        next.offset = self.offset
+        if self.type == GNUTYPE_LONGNAME:
+            next.name = nts(buf, tarfile.encoding, tarfile.errors)
+        elif self.type == GNUTYPE_LONGLINK:
+            next.linkname = nts(buf, tarfile.encoding, tarfile.errors)
+
+        # Remove redundant slashes from directories. This is to be consistent
+        # with frombuf().
+        if next.isdir():
+            next.name = next.name.removesuffix("/")
+
+        return next
+
+    def _proc_sparse(self, tarfile):
+        """Process a GNU sparse header plus extra headers.
+        """
+        # We already collected some sparse structures in frombuf().
+        structs, isextended, origsize = self._sparse_structs
+        del self._sparse_structs
+
+        # Collect sparse structures from extended header blocks.
+        while isextended:
+            buf = tarfile.fileobj.read(BLOCKSIZE)
+            pos = 0
+            for i in range(21):
+                try:
+                    offset = nti(buf[pos:pos + 12])
+                    numbytes = nti(buf[pos + 12:pos + 24])
+                except ValueError:
+                    break
+                if offset and numbytes:
+                    structs.append((offset, numbytes))
+                pos += 24
+            isextended = bool(buf[504])
+        self.sparse = structs
+
+        self.offset_data = tarfile.fileobj.tell()
+        tarfile.offset = self.offset_data + self._block(self.size)
+        self.size = origsize
+        return self
+
+    def _proc_pax(self, tarfile):
+        """Process an extended or global header as described in
+           POSIX.1-2008.
+        """
+        # Read the header information.
+        buf = tarfile.fileobj.read(self._block(self.size))
+
+        # A pax header stores supplemental information for either
+        # the following file (extended) or all following files
+        # (global).
+        if self.type == XGLTYPE:
+            pax_headers = tarfile.pax_headers
+        else:
+            pax_headers = tarfile.pax_headers.copy()
+
+        # Check if the pax header contains a hdrcharset field. This tells us
+        # the encoding of the path, linkpath, uname and gname fields. Normally,
+        # these fields are UTF-8 encoded but since POSIX.1-2008 tar
+        # implementations are allowed to store them as raw binary strings if
+        # the translation to UTF-8 fails.
+        match = re.search(br"\d+ hdrcharset=([^\n]+)\n", buf)
+        if match is not None:
+            pax_headers["hdrcharset"] = match.group(1).decode("utf-8")
+
+        # For the time being, we don't care about anything other than "BINARY".
+        # The only other value that is currently allowed by the standard is
+        # "ISO-IR 10646 2000 UTF-8" in other words UTF-8.
+        hdrcharset = pax_headers.get("hdrcharset")
+        if hdrcharset == "BINARY":
+            encoding = tarfile.encoding
+        else:
+            encoding = "utf-8"
+
+        # Parse pax header information. A record looks like that:
+        # "%d %s=%s\n" % (length, keyword, value). length is the size
+        # of the complete record including the length field itself and
+        # the newline. keyword and value are both UTF-8 encoded strings.
+        regex = re.compile(br"(\d+) ([^=]+)=")
+        pos = 0
+        while match := regex.match(buf, pos):
+            length, keyword = match.groups()
+            length = int(length)
+            if length == 0:
+                raise InvalidHeaderError("invalid header")
+            value = buf[match.end(2) + 1:match.start(1) + length - 1]
+
+            # Normally, we could just use "utf-8" as the encoding and "strict"
+            # as the error handler, but we better not take the risk. For
+            # example, GNU tar <= 1.23 is known to store filenames it cannot
+            # translate to UTF-8 as raw strings (unfortunately without a
+            # hdrcharset=BINARY header).
+            # We first try the strict standard encoding, and if that fails we
+            # fall back on the user's encoding and error handler.
+            keyword = self._decode_pax_field(keyword, "utf-8", "utf-8",
+                    tarfile.errors)
+            if keyword in PAX_NAME_FIELDS:
+                value = self._decode_pax_field(value, encoding, tarfile.encoding,
+                        tarfile.errors)
+            else:
+                value = self._decode_pax_field(value, "utf-8", "utf-8",
+                        tarfile.errors)
+
+            pax_headers[keyword] = value
+            pos += length
+
+        # Fetch the next header.
+        try:
+            next = self.fromtarfile(tarfile)
+        except HeaderError as e:
+            raise SubsequentHeaderError(str(e)) from None
+
+        # Process GNU sparse information.
+        if "GNU.sparse.map" in pax_headers:
+            # GNU extended sparse format version 0.1.
+            self._proc_gnusparse_01(next, pax_headers)
+
+        elif "GNU.sparse.size" in pax_headers:
+            # GNU extended sparse format version 0.0.
+            self._proc_gnusparse_00(next, pax_headers, buf)
+
+        elif pax_headers.get("GNU.sparse.major") == "1" and pax_headers.get("GNU.sparse.minor") == "0":
+            # GNU extended sparse format version 1.0.
+            self._proc_gnusparse_10(next, pax_headers, tarfile)
+
+        if self.type in (XHDTYPE, SOLARIS_XHDTYPE):
+            # Patch the TarInfo object with the extended header info.
+            next._apply_pax_info(pax_headers, tarfile.encoding, tarfile.errors)
+            next.offset = self.offset
+
+            if "size" in pax_headers:
+                # If the extended header replaces the size field,
+                # we need to recalculate the offset where the next
+                # header starts.
+                offset = next.offset_data
+                if next.isreg() or next.type not in SUPPORTED_TYPES:
+                    offset += next._block(next.size)
+                tarfile.offset = offset
+
+        return next
+
+    def _proc_gnusparse_00(self, next, pax_headers, buf):
+        """Process a GNU tar extended sparse header, version 0.0.
+        """
+        offsets = []
+        for match in re.finditer(br"\d+ GNU.sparse.offset=(\d+)\n", buf):
+            offsets.append(int(match.group(1)))
+        numbytes = []
+        for match in re.finditer(br"\d+ GNU.sparse.numbytes=(\d+)\n", buf):
+            numbytes.append(int(match.group(1)))
+        next.sparse = list(zip(offsets, numbytes))
+
+    def _proc_gnusparse_01(self, next, pax_headers):
+        """Process a GNU tar extended sparse header, version 0.1.
+        """
+        sparse = [int(x) for x in pax_headers["GNU.sparse.map"].split(",")]
+        next.sparse = list(zip(sparse[::2], sparse[1::2]))
+
+    def _proc_gnusparse_10(self, next, pax_headers, tarfile):
+        """Process a GNU tar extended sparse header, version 1.0.
+        """
+        fields = None
+        sparse = []
+        buf = tarfile.fileobj.read(BLOCKSIZE)
+        fields, buf = buf.split(b"\n", 1)
+        fields = int(fields)
+        while len(sparse) < fields * 2:
+            if b"\n" not in buf:
+                buf += tarfile.fileobj.read(BLOCKSIZE)
+            number, buf = buf.split(b"\n", 1)
+            sparse.append(int(number))
+        next.offset_data = tarfile.fileobj.tell()
+        next.sparse = list(zip(sparse[::2], sparse[1::2]))
+
+    def _apply_pax_info(self, pax_headers, encoding, errors):
+        """Replace fields with supplemental information from a previous
+           pax extended or global header.
+        """
+        for keyword, value in pax_headers.items():
+            if keyword == "GNU.sparse.name":
+                setattr(self, "path", value)
+            elif keyword == "GNU.sparse.size":
+                setattr(self, "size", int(value))
+            elif keyword == "GNU.sparse.realsize":
+                setattr(self, "size", int(value))
+            elif keyword in PAX_FIELDS:
+                if keyword in PAX_NUMBER_FIELDS:
+                    try:
+                        value = PAX_NUMBER_FIELDS[keyword](value)
+                    except ValueError:
+                        value = 0
+                if keyword == "path":
+                    value = value.rstrip("/")
+                setattr(self, keyword, value)
+
+        self.pax_headers = pax_headers.copy()
+
+    def _decode_pax_field(self, value, encoding, fallback_encoding, fallback_errors):
+        """Decode a single field from a pax record.
+        """
+        try:
+            return value.decode(encoding, "strict")
+        except UnicodeDecodeError:
+            return value.decode(fallback_encoding, fallback_errors)
+
+    def _block(self, count):
+        """Round up a byte count by BLOCKSIZE and return it,
+           e.g. _block(834) => 1024.
+        """
+        blocks, remainder = divmod(count, BLOCKSIZE)
+        if remainder:
+            blocks += 1
+        return blocks * BLOCKSIZE
+
+    def isreg(self):
+        'Return True if the Tarinfo object is a regular file.'
+        return self.type in REGULAR_TYPES
+
+    def isfile(self):
+        'Return True if the Tarinfo object is a regular file.'
+        return self.isreg()
+
+    def isdir(self):
+        'Return True if it is a directory.'
+        return self.type == DIRTYPE
+
+    def issym(self):
+        'Return True if it is a symbolic link.'
+        return self.type == SYMTYPE
+
+    def islnk(self):
+        'Return True if it is a hard link.'
+        return self.type == LNKTYPE
+
+    def ischr(self):
+        'Return True if it is a character device.'
+        return self.type == CHRTYPE
+
+    def isblk(self):
+        'Return True if it is a block device.'
+        return self.type == BLKTYPE
+
+    def isfifo(self):
+        'Return True if it is a FIFO.'
+        return self.type == FIFOTYPE
+
+    def issparse(self):
+        return self.sparse is not None
+
+    def isdev(self):
+        'Return True if it is one of character device, block device or FIFO.'
+        return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE)
+# class TarInfo
+
+class TarFile(object):
+    """The TarFile Class provides an interface to tar archives.
+    """
+
+    debug = 0                   # May be set from 0 (no msgs) to 3 (all msgs)
+
+    dereference = False         # If true, add content of linked file to the
+                                # tar file, else the link.
+
+    ignore_zeros = False        # If true, skips empty or invalid blocks and
+                                # continues processing.
+
+    errorlevel = 1              # If 0, fatal errors only appear in debug
+                                # messages (if debug >= 0). If > 0, errors
+                                # are passed to the caller as exceptions.
+
+    format = DEFAULT_FORMAT     # The format to use when creating an archive.
+
+    encoding = ENCODING         # Encoding for 8-bit character strings.
+
+    errors = None               # Error handler for unicode conversion.
+
+    tarinfo = TarInfo           # The default TarInfo class to use.
+
+    fileobject = ExFileObject   # The file-object for extractfile().
+
+    extraction_filter = None    # The default filter for extraction.
+
+    def __init__(self, name=None, mode="r", fileobj=None, format=None,
+            tarinfo=None, dereference=None, ignore_zeros=None, encoding=None,
+            errors="surrogateescape", pax_headers=None, debug=None,
+            errorlevel=None, copybufsize=None):
+        """Open an (uncompressed) tar archive `name'. `mode' is either 'r' to
+           read from an existing archive, 'a' to append data to an existing
+           file or 'w' to create a new file overwriting an existing one. `mode'
+           defaults to 'r'.
+           If `fileobj' is given, it is used for reading or writing data. If it
+           can be determined, `mode' is overridden by `fileobj's mode.
+           `fileobj' is not closed, when TarFile is closed.
+        """
+        modes = {"r": "rb", "a": "r+b", "w": "wb", "x": "xb"}
+        if mode not in modes:
+            raise ValueError("mode must be 'r', 'a', 'w' or 'x'")
+        self.mode = mode
+        self._mode = modes[mode]
+
+        if not fileobj:
+            if self.mode == "a" and not os.path.exists(name):
+                # Create nonexistent files in append mode.
+                self.mode = "w"
+                self._mode = "wb"
+            fileobj = bltn_open(name, self._mode)
+            self._extfileobj = False
+        else:
+            if (name is None and hasattr(fileobj, "name") and
+                isinstance(fileobj.name, (str, bytes))):
+                name = fileobj.name
+            if hasattr(fileobj, "mode"):
+                self._mode = fileobj.mode
+            self._extfileobj = True
+        self.name = os.path.abspath(name) if name else None
+        self.fileobj = fileobj
+
+        # Init attributes.
+        if format is not None:
+            self.format = format
+        if tarinfo is not None:
+            self.tarinfo = tarinfo
+        if dereference is not None:
+            self.dereference = dereference
+        if ignore_zeros is not None:
+            self.ignore_zeros = ignore_zeros
+        if encoding is not None:
+            self.encoding = encoding
+        self.errors = errors
+
+        if pax_headers is not None and self.format == PAX_FORMAT:
+            self.pax_headers = pax_headers
+        else:
+            self.pax_headers = {}
+
+        if debug is not None:
+            self.debug = debug
+        if errorlevel is not None:
+            self.errorlevel = errorlevel
+
+        # Init datastructures.
+        self.copybufsize = copybufsize
+        self.closed = False
+        self.members = []       # list of members as TarInfo objects
+        self._loaded = False    # flag if all members have been read
+        self.offset = self.fileobj.tell()
+                                # current position in the archive file
+        self.inodes = {}        # dictionary caching the inodes of
+                                # archive members already added
+
+        try:
+            if self.mode == "r":
+                self.firstmember = None
+                self.firstmember = self.next()
+
+            if self.mode == "a":
+                # Move to the end of the archive,
+                # before the first empty block.
+                while True:
+                    self.fileobj.seek(self.offset)
+                    try:
+                        tarinfo = self.tarinfo.fromtarfile(self)
+                        self.members.append(tarinfo)
+                    except EOFHeaderError:
+                        self.fileobj.seek(self.offset)
+                        break
+                    except HeaderError as e:
+                        raise ReadError(str(e)) from None
+
+            if self.mode in ("a", "w", "x"):
+                self._loaded = True
+
+                if self.pax_headers:
+                    buf = self.tarinfo.create_pax_global_header(self.pax_headers.copy())
+                    self.fileobj.write(buf)
+                    self.offset += len(buf)
+        except:
+            if not self._extfileobj:
+                self.fileobj.close()
+            self.closed = True
+            raise
+
+    #--------------------------------------------------------------------------
+    # Below are the classmethods which act as alternate constructors to the
+    # TarFile class. The open() method is the only one that is needed for
+    # public use; it is the "super"-constructor and is able to select an
+    # adequate "sub"-constructor for a particular compression using the mapping
+    # from OPEN_METH.
+    #
+    # This concept allows one to subclass TarFile without losing the comfort of
+    # the super-constructor. A sub-constructor is registered and made available
+    # by adding it to the mapping in OPEN_METH.
+
+    @classmethod
+    def open(cls, name=None, mode="r", fileobj=None, bufsize=RECORDSIZE, **kwargs):
+        r"""Open a tar archive for reading, writing or appending. Return
+           an appropriate TarFile class.
+
+           mode:
+           'r' or 'r:\*' open for reading with transparent compression
+           'r:'         open for reading exclusively uncompressed
+           'r:gz'       open for reading with gzip compression
+           'r:bz2'      open for reading with bzip2 compression
+           'r:xz'       open for reading with lzma compression
+           'a' or 'a:'  open for appending, creating the file if necessary
+           'w' or 'w:'  open for writing without compression
+           'w:gz'       open for writing with gzip compression
+           'w:bz2'      open for writing with bzip2 compression
+           'w:xz'       open for writing with lzma compression
+
+           'x' or 'x:'  create a tarfile exclusively without compression, raise
+                        an exception if the file is already created
+           'x:gz'       create a gzip compressed tarfile, raise an exception
+                        if the file is already created
+           'x:bz2'      create a bzip2 compressed tarfile, raise an exception
+                        if the file is already created
+           'x:xz'       create an lzma compressed tarfile, raise an exception
+                        if the file is already created
+
+           'r|\*'        open a stream of tar blocks with transparent compression
+           'r|'         open an uncompressed stream of tar blocks for reading
+           'r|gz'       open a gzip compressed stream of tar blocks
+           'r|bz2'      open a bzip2 compressed stream of tar blocks
+           'r|xz'       open an lzma compressed stream of tar blocks
+           'w|'         open an uncompressed stream for writing
+           'w|gz'       open a gzip compressed stream for writing
+           'w|bz2'      open a bzip2 compressed stream for writing
+           'w|xz'       open an lzma compressed stream for writing
+        """
+
+        if not name and not fileobj:
+            raise ValueError("nothing to open")
+
+        if mode in ("r", "r:*"):
+            # Find out which *open() is appropriate for opening the file.
+            def not_compressed(comptype):
+                return cls.OPEN_METH[comptype] == 'taropen'
+            error_msgs = []
+            for comptype in sorted(cls.OPEN_METH, key=not_compressed):
+                func = getattr(cls, cls.OPEN_METH[comptype])
+                if fileobj is not None:
+                    saved_pos = fileobj.tell()
+                try:
+                    return func(name, "r", fileobj, **kwargs)
+                except (ReadError, CompressionError) as e:
+                    error_msgs.append(f'- method {comptype}: {e!r}')
+                    if fileobj is not None:
+                        fileobj.seek(saved_pos)
+                    continue
+            error_msgs_summary = '\n'.join(error_msgs)
+            raise ReadError(f"file could not be opened successfully:\n{error_msgs_summary}")
+
+        elif ":" in mode:
+            filemode, comptype = mode.split(":", 1)
+            filemode = filemode or "r"
+            comptype = comptype or "tar"
+
+            # Select the *open() function according to
+            # given compression.
+            if comptype in cls.OPEN_METH:
+                func = getattr(cls, cls.OPEN_METH[comptype])
+            else:
+                raise CompressionError("unknown compression type %r" % comptype)
+            return func(name, filemode, fileobj, **kwargs)
+
+        elif "|" in mode:
+            filemode, comptype = mode.split("|", 1)
+            filemode = filemode or "r"
+            comptype = comptype or "tar"
+
+            if filemode not in ("r", "w"):
+                raise ValueError("mode must be 'r' or 'w'")
+
+            compresslevel = kwargs.pop("compresslevel", 9)
+            stream = _Stream(name, filemode, comptype, fileobj, bufsize,
+                             compresslevel)
+            try:
+                t = cls(name, filemode, stream, **kwargs)
+            except:
+                stream.close()
+                raise
+            t._extfileobj = False
+            return t
+
+        elif mode in ("a", "w", "x"):
+            return cls.taropen(name, mode, fileobj, **kwargs)
+
+        raise ValueError("undiscernible mode")
+
+    @classmethod
+    def taropen(cls, name, mode="r", fileobj=None, **kwargs):
+        """Open uncompressed tar archive name for reading or writing.
+        """
+        if mode not in ("r", "a", "w", "x"):
+            raise ValueError("mode must be 'r', 'a', 'w' or 'x'")
+        return cls(name, mode, fileobj, **kwargs)
+
+    @classmethod
+    def gzopen(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
+        """Open gzip compressed tar archive name for reading or writing.
+           Appending is not allowed.
+        """
+        if mode not in ("r", "w", "x"):
+            raise ValueError("mode must be 'r', 'w' or 'x'")
+
+        try:
+            from gzip import GzipFile
+        except ImportError:
+            raise CompressionError("gzip module is not available") from None
+
+        try:
+            fileobj = GzipFile(name, mode + "b", compresslevel, fileobj)
+        except OSError as e:
+            if fileobj is not None and mode == 'r':
+                raise ReadError("not a gzip file") from e
+            raise
+
+        try:
+            t = cls.taropen(name, mode, fileobj, **kwargs)
+        except OSError as e:
+            fileobj.close()
+            if mode == 'r':
+                raise ReadError("not a gzip file") from e
+            raise
+        except:
+            fileobj.close()
+            raise
+        t._extfileobj = False
+        return t
+
+    @classmethod
+    def bz2open(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
+        """Open bzip2 compressed tar archive name for reading or writing.
+           Appending is not allowed.
+        """
+        if mode not in ("r", "w", "x"):
+            raise ValueError("mode must be 'r', 'w' or 'x'")
+
+        try:
+            from bz2 import BZ2File
+        except ImportError:
+            raise CompressionError("bz2 module is not available") from None
+
+        fileobj = BZ2File(fileobj or name, mode, compresslevel=compresslevel)
+
+        try:
+            t = cls.taropen(name, mode, fileobj, **kwargs)
+        except (OSError, EOFError) as e:
+            fileobj.close()
+            if mode == 'r':
+                raise ReadError("not a bzip2 file") from e
+            raise
+        except:
+            fileobj.close()
+            raise
+        t._extfileobj = False
+        return t
+
+    @classmethod
+    def xzopen(cls, name, mode="r", fileobj=None, preset=None, **kwargs):
+        """Open lzma compressed tar archive name for reading or writing.
+           Appending is not allowed.
+        """
+        if mode not in ("r", "w", "x"):
+            raise ValueError("mode must be 'r', 'w' or 'x'")
+
+        try:
+            from lzma import LZMAFile, LZMAError
+        except ImportError:
+            raise CompressionError("lzma module is not available") from None
+
+        fileobj = LZMAFile(fileobj or name, mode, preset=preset)
+
+        try:
+            t = cls.taropen(name, mode, fileobj, **kwargs)
+        except (LZMAError, EOFError) as e:
+            fileobj.close()
+            if mode == 'r':
+                raise ReadError("not an lzma file") from e
+            raise
+        except:
+            fileobj.close()
+            raise
+        t._extfileobj = False
+        return t
+
+    # All *open() methods are registered here.
+    OPEN_METH = {
+        "tar": "taropen",   # uncompressed tar
+        "gz":  "gzopen",    # gzip compressed tar
+        "bz2": "bz2open",   # bzip2 compressed tar
+        "xz":  "xzopen"     # lzma compressed tar
+    }
+
+    #--------------------------------------------------------------------------
+    # The public methods which TarFile provides:
+
+    def close(self):
+        """Close the TarFile. In write-mode, two finishing zero blocks are
+           appended to the archive.
+        """
+        if self.closed:
+            return
+
+        self.closed = True
+        try:
+            if self.mode in ("a", "w", "x"):
+                self.fileobj.write(NUL * (BLOCKSIZE * 2))
+                self.offset += (BLOCKSIZE * 2)
+                # fill up the end with zero-blocks
+                # (like option -b20 for tar does)
+                blocks, remainder = divmod(self.offset, RECORDSIZE)
+                if remainder > 0:
+                    self.fileobj.write(NUL * (RECORDSIZE - remainder))
+        finally:
+            if not self._extfileobj:
+                self.fileobj.close()
+
+    def getmember(self, name):
+        """Return a TarInfo object for member ``name``. If ``name`` can not be
+           found in the archive, KeyError is raised. If a member occurs more
+           than once in the archive, its last occurrence is assumed to be the
+           most up-to-date version.
+        """
+        tarinfo = self._getmember(name.rstrip('/'))
+        if tarinfo is None:
+            raise KeyError("filename %r not found" % name)
+        return tarinfo
+
+    def getmembers(self):
+        """Return the members of the archive as a list of TarInfo objects. The
+           list has the same order as the members in the archive.
+        """
+        self._check()
+        if not self._loaded:    # if we want to obtain a list of
+            self._load()        # all members, we first have to
+                                # scan the whole archive.
+        return self.members
+
+    def getnames(self):
+        """Return the members of the archive as a list of their names. It has
+           the same order as the list returned by getmembers().
+        """
+        return [tarinfo.name for tarinfo in self.getmembers()]
+
+    def gettarinfo(self, name=None, arcname=None, fileobj=None):
+        """Create a TarInfo object from the result of os.stat or equivalent
+           on an existing file. The file is either named by ``name``, or
+           specified as a file object ``fileobj`` with a file descriptor. If
+           given, ``arcname`` specifies an alternative name for the file in the
+           archive, otherwise, the name is taken from the 'name' attribute of
+           'fileobj', or the 'name' argument. The name should be a text
+           string.
+        """
+        self._check("awx")
+
+        # When fileobj is given, replace name by
+        # fileobj's real name.
+        if fileobj is not None:
+            name = fileobj.name
+
+        # Building the name of the member in the archive.
+        # Backward slashes are converted to forward slashes,
+        # Absolute paths are turned to relative paths.
+        if arcname is None:
+            arcname = name
+        drv, arcname = os.path.splitdrive(arcname)
+        arcname = arcname.replace(os.sep, "/")
+        arcname = arcname.lstrip("/")
+
+        # Now, fill the TarInfo object with
+        # information specific for the file.
+        tarinfo = self.tarinfo()
+        tarinfo.tarfile = self  # Not needed
+
+        # Use os.stat or os.lstat, depending on if symlinks shall be resolved.
+        if fileobj is None:
+            if not self.dereference:
+                statres = os.lstat(name)
+            else:
+                statres = os.stat(name)
+        else:
+            statres = os.fstat(fileobj.fileno())
+        linkname = ""
+
+        stmd = statres.st_mode
+        if stat.S_ISREG(stmd):
+            inode = (statres.st_ino, statres.st_dev)
+            if not self.dereference and statres.st_nlink > 1 and \
+                    inode in self.inodes and arcname != self.inodes[inode]:
+                # Is it a hardlink to an already
+                # archived file?
+                type = LNKTYPE
+                linkname = self.inodes[inode]
+            else:
+                # The inode is added only if its valid.
+                # For win32 it is always 0.
+                type = REGTYPE
+                if inode[0]:
+                    self.inodes[inode] = arcname
+        elif stat.S_ISDIR(stmd):
+            type = DIRTYPE
+        elif stat.S_ISFIFO(stmd):
+            type = FIFOTYPE
+        elif stat.S_ISLNK(stmd):
+            type = SYMTYPE
+            linkname = os.readlink(name)
+        elif stat.S_ISCHR(stmd):
+            type = CHRTYPE
+        elif stat.S_ISBLK(stmd):
+            type = BLKTYPE
+        else:
+            return None
+
+        # Fill the TarInfo object with all
+        # information we can get.
+        tarinfo.name = arcname
+        tarinfo.mode = stmd
+        tarinfo.uid = statres.st_uid
+        tarinfo.gid = statres.st_gid
+        if type == REGTYPE:
+            tarinfo.size = statres.st_size
+        else:
+            tarinfo.size = 0
+        tarinfo.mtime = statres.st_mtime
+        tarinfo.type = type
+        tarinfo.linkname = linkname
+        if pwd:
+            try:
+                tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0]
+            except KeyError:
+                pass
+        if grp:
+            try:
+                tarinfo.gname = grp.getgrgid(tarinfo.gid)[0]
+            except KeyError:
+                pass
+
+        if type in (CHRTYPE, BLKTYPE):
+            if hasattr(os, "major") and hasattr(os, "minor"):
+                tarinfo.devmajor = os.major(statres.st_rdev)
+                tarinfo.devminor = os.minor(statres.st_rdev)
+        return tarinfo
+
+    def list(self, verbose=True, *, members=None):
+        """Print a table of contents to sys.stdout. If ``verbose`` is False, only
+           the names of the members are printed. If it is True, an `ls -l'-like
+           output is produced. ``members`` is optional and must be a subset of the
+           list returned by getmembers().
+        """
+        self._check()
+
+        if members is None:
+            members = self
+        for tarinfo in members:
+            if verbose:
+                if tarinfo.mode is None:
+                    _safe_print("??????????")
+                else:
+                    _safe_print(stat.filemode(tarinfo.mode))
+                _safe_print("%s/%s" % (tarinfo.uname or tarinfo.uid,
+                                       tarinfo.gname or tarinfo.gid))
+                if tarinfo.ischr() or tarinfo.isblk():
+                    _safe_print("%10s" %
+                            ("%d,%d" % (tarinfo.devmajor, tarinfo.devminor)))
+                else:
+                    _safe_print("%10d" % tarinfo.size)
+                if tarinfo.mtime is None:
+                    _safe_print("????-??-?? ??:??:??")
+                else:
+                    _safe_print("%d-%02d-%02d %02d:%02d:%02d" \
+                                % time.localtime(tarinfo.mtime)[:6])
+
+            _safe_print(tarinfo.name + ("/" if tarinfo.isdir() else ""))
+
+            if verbose:
+                if tarinfo.issym():
+                    _safe_print("-> " + tarinfo.linkname)
+                if tarinfo.islnk():
+                    _safe_print("link to " + tarinfo.linkname)
+            print()
+
+    def add(self, name, arcname=None, recursive=True, *, filter=None):
+        """Add the file ``name`` to the archive. ``name`` may be any type of file
+           (directory, fifo, symbolic link, etc.). If given, ``arcname``
+           specifies an alternative name for the file in the archive.
+           Directories are added recursively by default. This can be avoided by
+           setting ``recursive`` to False. ``filter`` is a function
+           that expects a TarInfo object argument and returns the changed
+           TarInfo object, if it returns None the TarInfo object will be
+           excluded from the archive.
+        """
+        self._check("awx")
+
+        if arcname is None:
+            arcname = name
+
+        # Skip if somebody tries to archive the archive...
+        if self.name is not None and os.path.abspath(name) == self.name:
+            self._dbg(2, "tarfile: Skipped %r" % name)
+            return
+
+        self._dbg(1, name)
+
+        # Create a TarInfo object from the file.
+        tarinfo = self.gettarinfo(name, arcname)
+
+        if tarinfo is None:
+            self._dbg(1, "tarfile: Unsupported type %r" % name)
+            return
+
+        # Change or exclude the TarInfo object.
+        if filter is not None:
+            tarinfo = filter(tarinfo)
+            if tarinfo is None:
+                self._dbg(2, "tarfile: Excluded %r" % name)
+                return
+
+        # Append the tar header and data to the archive.
+        if tarinfo.isreg():
+            with bltn_open(name, "rb") as f:
+                self.addfile(tarinfo, f)
+
+        elif tarinfo.isdir():
+            self.addfile(tarinfo)
+            if recursive:
+                for f in sorted(os.listdir(name)):
+                    self.add(os.path.join(name, f), os.path.join(arcname, f),
+                            recursive, filter=filter)
+
+        else:
+            self.addfile(tarinfo)
+
+    def addfile(self, tarinfo, fileobj=None):
+        """Add the TarInfo object ``tarinfo`` to the archive. If ``fileobj`` is
+           given, it should be a binary file, and tarinfo.size bytes are read
+           from it and added to the archive. You can create TarInfo objects
+           directly, or by using gettarinfo().
+        """
+        self._check("awx")
+
+        tarinfo = copy.copy(tarinfo)
+
+        buf = tarinfo.tobuf(self.format, self.encoding, self.errors)
+        self.fileobj.write(buf)
+        self.offset += len(buf)
+        bufsize=self.copybufsize
+        # If there's data to follow, append it.
+        if fileobj is not None:
+            copyfileobj(fileobj, self.fileobj, tarinfo.size, bufsize=bufsize)
+            blocks, remainder = divmod(tarinfo.size, BLOCKSIZE)
+            if remainder > 0:
+                self.fileobj.write(NUL * (BLOCKSIZE - remainder))
+                blocks += 1
+            self.offset += blocks * BLOCKSIZE
+
+        self.members.append(tarinfo)
+
+    def _get_filter_function(self, filter):
+        if filter is None:
+            filter = self.extraction_filter
+            if filter is None:
+                warnings.warn(
+                    'Python 3.14 will, by default, filter extracted tar '
+                    + 'archives and reject files or modify their metadata. '
+                    + 'Use the filter argument to control this behavior.',
+                    DeprecationWarning)
+                return fully_trusted_filter
+            if isinstance(filter, str):
+                raise TypeError(
+                    'String names are not supported for '
+                    + 'TarFile.extraction_filter. Use a function such as '
+                    + 'tarfile.data_filter directly.')
+            return filter
+        if callable(filter):
+            return filter
+        try:
+            return _NAMED_FILTERS[filter]
+        except KeyError:
+            raise ValueError(f"filter {filter!r} not found") from None
+
+    def extractall(self, path=".", members=None, *, numeric_owner=False,
+                   filter=None):
+        """Extract all members from the archive to the current working
+           directory and set owner, modification time and permissions on
+           directories afterwards. `path' specifies a different directory
+           to extract to. `members' is optional and must be a subset of the
+           list returned by getmembers(). If `numeric_owner` is True, only
+           the numbers for user/group names are used and not the names.
+
+           The `filter` function will be called on each member just
+           before extraction.
+           It can return a changed TarInfo or None to skip the member.
+           String names of common filters are accepted.
+        """
+        directories = []
+
+        filter_function = self._get_filter_function(filter)
+        if members is None:
+            members = self
+
+        for member in members:
+            tarinfo = self._get_extract_tarinfo(member, filter_function, path)
+            if tarinfo is None:
+                continue
+            if tarinfo.isdir():
+                # For directories, delay setting attributes until later,
+                # since permissions can interfere with extraction and
+                # extracting contents can reset mtime.
+                directories.append(tarinfo)
+            self._extract_one(tarinfo, path, set_attrs=not tarinfo.isdir(),
+                              numeric_owner=numeric_owner)
+
+        # Reverse sort directories.
+        directories.sort(key=lambda a: a.name, reverse=True)
+
+        # Set correct owner, mtime and filemode on directories.
+        for tarinfo in directories:
+            dirpath = os.path.join(path, tarinfo.name)
+            try:
+                self.chown(tarinfo, dirpath, numeric_owner=numeric_owner)
+                self.utime(tarinfo, dirpath)
+                self.chmod(tarinfo, dirpath)
+            except ExtractError as e:
+                self._handle_nonfatal_error(e)
+
+    def extract(self, member, path="", set_attrs=True, *, numeric_owner=False,
+                filter=None):
+        """Extract a member from the archive to the current working directory,
+           using its full name. Its file information is extracted as accurately
+           as possible. `member' may be a filename or a TarInfo object. You can
+           specify a different directory using `path'. File attributes (owner,
+           mtime, mode) are set unless `set_attrs' is False. If `numeric_owner`
+           is True, only the numbers for user/group names are used and not
+           the names.
+
+           The `filter` function will be called before extraction.
+           It can return a changed TarInfo or None to skip the member.
+           String names of common filters are accepted.
+        """
+        filter_function = self._get_filter_function(filter)
+        tarinfo = self._get_extract_tarinfo(member, filter_function, path)
+        if tarinfo is not None:
+            self._extract_one(tarinfo, path, set_attrs, numeric_owner)
+
+    def _get_extract_tarinfo(self, member, filter_function, path):
+        """Get filtered TarInfo (or None) from member, which might be a str"""
+        if isinstance(member, str):
+            tarinfo = self.getmember(member)
+        else:
+            tarinfo = member
+
+        unfiltered = tarinfo
+        try:
+            tarinfo = filter_function(tarinfo, path)
+        except (OSError, FilterError) as e:
+            self._handle_fatal_error(e)
+        except ExtractError as e:
+            self._handle_nonfatal_error(e)
+        if tarinfo is None:
+            self._dbg(2, "tarfile: Excluded %r" % unfiltered.name)
+            return None
+        # Prepare the link target for makelink().
+        if tarinfo.islnk():
+            tarinfo = copy.copy(tarinfo)
+            tarinfo._link_target = os.path.join(path, tarinfo.linkname)
+        return tarinfo
+
+    def _extract_one(self, tarinfo, path, set_attrs, numeric_owner):
+        """Extract from filtered tarinfo to disk"""
+        self._check("r")
+
+        try:
+            self._extract_member(tarinfo, os.path.join(path, tarinfo.name),
+                                 set_attrs=set_attrs,
+                                 numeric_owner=numeric_owner)
+        except OSError as e:
+            self._handle_fatal_error(e)
+        except ExtractError as e:
+            self._handle_nonfatal_error(e)
+
+    def _handle_nonfatal_error(self, e):
+        """Handle non-fatal error (ExtractError) according to errorlevel"""
+        if self.errorlevel > 1:
+            raise
+        else:
+            self._dbg(1, "tarfile: %s" % e)
+
+    def _handle_fatal_error(self, e):
+        """Handle "fatal" error according to self.errorlevel"""
+        if self.errorlevel > 0:
+            raise
+        elif isinstance(e, OSError):
+            if e.filename is None:
+                self._dbg(1, "tarfile: %s" % e.strerror)
+            else:
+                self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename))
+        else:
+            self._dbg(1, "tarfile: %s %s" % (type(e).__name__, e))
+
+    def extractfile(self, member):
+        """Extract a member from the archive as a file object. ``member`` may be
+           a filename or a TarInfo object. If ``member`` is a regular file or
+           a link, an io.BufferedReader object is returned. For all other
+           existing members, None is returned. If ``member`` does not appear
+           in the archive, KeyError is raised.
+        """
+        self._check("r")
+
+        if isinstance(member, str):
+            tarinfo = self.getmember(member)
+        else:
+            tarinfo = member
+
+        if tarinfo.isreg() or tarinfo.type not in SUPPORTED_TYPES:
+            # Members with unknown types are treated as regular files.
+            return self.fileobject(self, tarinfo)
+
+        elif tarinfo.islnk() or tarinfo.issym():
+            if isinstance(self.fileobj, _Stream):
+                # A small but ugly workaround for the case that someone tries
+                # to extract a (sym)link as a file-object from a non-seekable
+                # stream of tar blocks.
+                raise StreamError("cannot extract (sym)link as file object")
+            else:
+                # A (sym)link's file object is its target's file object.
+                return self.extractfile(self._find_link_target(tarinfo))
+        else:
+            # If there's no data associated with the member (directory, chrdev,
+            # blkdev, etc.), return None instead of a file object.
+            return None
+
+    def _extract_member(self, tarinfo, targetpath, set_attrs=True,
+                        numeric_owner=False):
+        """Extract the TarInfo object tarinfo to a physical
+           file called targetpath.
+        """
+        # Fetch the TarInfo object for the given name
+        # and build the destination pathname, replacing
+        # forward slashes to platform specific separators.
+        targetpath = targetpath.rstrip("/")
+        targetpath = targetpath.replace("/", os.sep)
+
+        # Create all upper directories.
+        upperdirs = os.path.dirname(targetpath)
+        if upperdirs and not os.path.exists(upperdirs):
+            # Create directories that are not part of the archive with
+            # default permissions.
+            os.makedirs(upperdirs)
+
+        if tarinfo.islnk() or tarinfo.issym():
+            self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname))
+        else:
+            self._dbg(1, tarinfo.name)
+
+        if tarinfo.isreg():
+            self.makefile(tarinfo, targetpath)
+        elif tarinfo.isdir():
+            self.makedir(tarinfo, targetpath)
+        elif tarinfo.isfifo():
+            self.makefifo(tarinfo, targetpath)
+        elif tarinfo.ischr() or tarinfo.isblk():
+            self.makedev(tarinfo, targetpath)
+        elif tarinfo.islnk() or tarinfo.issym():
+            self.makelink(tarinfo, targetpath)
+        elif tarinfo.type not in SUPPORTED_TYPES:
+            self.makeunknown(tarinfo, targetpath)
+        else:
+            self.makefile(tarinfo, targetpath)
+
+        if set_attrs:
+            self.chown(tarinfo, targetpath, numeric_owner)
+            if not tarinfo.issym():
+                self.chmod(tarinfo, targetpath)
+                self.utime(tarinfo, targetpath)
+
+    #--------------------------------------------------------------------------
+    # Below are the different file methods. They are called via
+    # _extract_member() when extract() is called. They can be replaced in a
+    # subclass to implement other functionality.
+
+    def makedir(self, tarinfo, targetpath):
+        """Make a directory called targetpath.
+        """
+        try:
+            if tarinfo.mode is None:
+                # Use the system's default mode
+                os.mkdir(targetpath)
+            else:
+                # Use a safe mode for the directory, the real mode is set
+                # later in _extract_member().
+                os.mkdir(targetpath, 0o700)
+        except FileExistsError:
+            if not os.path.isdir(targetpath):
+                raise
+
+    def makefile(self, tarinfo, targetpath):
+        """Make a file called targetpath.
+        """
+        source = self.fileobj
+        source.seek(tarinfo.offset_data)
+        bufsize = self.copybufsize
+        with bltn_open(targetpath, "wb") as target:
+            if tarinfo.sparse is not None:
+                for offset, size in tarinfo.sparse:
+                    target.seek(offset)
+                    copyfileobj(source, target, size, ReadError, bufsize)
+                target.seek(tarinfo.size)
+                target.truncate()
+            else:
+                copyfileobj(source, target, tarinfo.size, ReadError, bufsize)
+
+    def makeunknown(self, tarinfo, targetpath):
+        """Make a file from a TarInfo object with an unknown type
+           at targetpath.
+        """
+        self.makefile(tarinfo, targetpath)
+        self._dbg(1, "tarfile: Unknown file type %r, " \
+                     "extracted as regular file." % tarinfo.type)
+
+    def makefifo(self, tarinfo, targetpath):
+        """Make a fifo called targetpath.
+        """
+        if hasattr(os, "mkfifo"):
+            os.mkfifo(targetpath)
+        else:
+            raise ExtractError("fifo not supported by system")
+
+    def makedev(self, tarinfo, targetpath):
+        """Make a character or block device called targetpath.
+        """
+        if not hasattr(os, "mknod") or not hasattr(os, "makedev"):
+            raise ExtractError("special devices not supported by system")
+
+        mode = tarinfo.mode
+        if mode is None:
+            # Use mknod's default
+            mode = 0o600
+        if tarinfo.isblk():
+            mode |= stat.S_IFBLK
+        else:
+            mode |= stat.S_IFCHR
+
+        os.mknod(targetpath, mode,
+                 os.makedev(tarinfo.devmajor, tarinfo.devminor))
+
+    def makelink(self, tarinfo, targetpath):
+        """Make a (symbolic) link called targetpath. If it cannot be created
+          (platform limitation), we try to make a copy of the referenced file
+          instead of a link.
+        """
+        try:
+            # For systems that support symbolic and hard links.
+            if tarinfo.issym():
+                if os.path.lexists(targetpath):
+                    # Avoid FileExistsError on following os.symlink.
+                    os.unlink(targetpath)
+                os.symlink(tarinfo.linkname, targetpath)
+            else:
+                if os.path.exists(tarinfo._link_target):
+                    os.link(tarinfo._link_target, targetpath)
+                else:
+                    self._extract_member(self._find_link_target(tarinfo),
+                                         targetpath)
+        except symlink_exception:
+            try:
+                self._extract_member(self._find_link_target(tarinfo),
+                                     targetpath)
+            except KeyError:
+                raise ExtractError("unable to resolve link inside archive") from None
+
+    def chown(self, tarinfo, targetpath, numeric_owner):
+        """Set owner of targetpath according to tarinfo. If numeric_owner
+           is True, use .gid/.uid instead of .gname/.uname. If numeric_owner
+           is False, fall back to .gid/.uid when the search based on name
+           fails.
+        """
+        if hasattr(os, "geteuid") and os.geteuid() == 0:
+            # We have to be root to do so.
+            g = tarinfo.gid
+            u = tarinfo.uid
+            if not numeric_owner:
+                try:
+                    if grp and tarinfo.gname:
+                        g = grp.getgrnam(tarinfo.gname)[2]
+                except KeyError:
+                    pass
+                try:
+                    if pwd and tarinfo.uname:
+                        u = pwd.getpwnam(tarinfo.uname)[2]
+                except KeyError:
+                    pass
+            if g is None:
+                g = -1
+            if u is None:
+                u = -1
+            try:
+                if tarinfo.issym() and hasattr(os, "lchown"):
+                    os.lchown(targetpath, u, g)
+                else:
+                    os.chown(targetpath, u, g)
+            except OSError as e:
+                raise ExtractError("could not change owner") from e
+
+    def chmod(self, tarinfo, targetpath):
+        """Set file permissions of targetpath according to tarinfo.
+        """
+        if tarinfo.mode is None:
+            return
+        try:
+            os.chmod(targetpath, tarinfo.mode)
+        except OSError as e:
+            raise ExtractError("could not change mode") from e
+
+    def utime(self, tarinfo, targetpath):
+        """Set modification time of targetpath according to tarinfo.
+        """
+        mtime = tarinfo.mtime
+        if mtime is None:
+            return
+        if not hasattr(os, 'utime'):
+            return
+        try:
+            os.utime(targetpath, (mtime, mtime))
+        except OSError as e:
+            raise ExtractError("could not change modification time") from e
+
+    #--------------------------------------------------------------------------
+    def next(self):
+        """Return the next member of the archive as a TarInfo object, when
+           TarFile is opened for reading. Return None if there is no more
+           available.
+        """
+        self._check("ra")
+        if self.firstmember is not None:
+            m = self.firstmember
+            self.firstmember = None
+            return m
+
+        # Advance the file pointer.
+        if self.offset != self.fileobj.tell():
+            if self.offset == 0:
+                return None
+            self.fileobj.seek(self.offset - 1)
+            if not self.fileobj.read(1):
+                raise ReadError("unexpected end of data")
+
+        # Read the next block.
+        tarinfo = None
+        while True:
+            try:
+                tarinfo = self.tarinfo.fromtarfile(self)
+            except EOFHeaderError as e:
+                if self.ignore_zeros:
+                    self._dbg(2, "0x%X: %s" % (self.offset, e))
+                    self.offset += BLOCKSIZE
+                    continue
+            except InvalidHeaderError as e:
+                if self.ignore_zeros:
+                    self._dbg(2, "0x%X: %s" % (self.offset, e))
+                    self.offset += BLOCKSIZE
+                    continue
+                elif self.offset == 0:
+                    raise ReadError(str(e)) from None
+            except EmptyHeaderError:
+                if self.offset == 0:
+                    raise ReadError("empty file") from None
+            except TruncatedHeaderError as e:
+                if self.offset == 0:
+                    raise ReadError(str(e)) from None
+            except SubsequentHeaderError as e:
+                raise ReadError(str(e)) from None
+            except Exception as e:
+                try:
+                    import zlib
+                    if isinstance(e, zlib.error):
+                        raise ReadError(f'zlib error: {e}') from None
+                    else:
+                        raise e
+                except ImportError:
+                    raise e
+            break
+
+        if tarinfo is not None:
+            self.members.append(tarinfo)
+        else:
+            self._loaded = True
+
+        return tarinfo
+
+    #--------------------------------------------------------------------------
+    # Little helper methods:
+
+    def _getmember(self, name, tarinfo=None, normalize=False):
+        """Find an archive member by name from bottom to top.
+           If tarinfo is given, it is used as the starting point.
+        """
+        # Ensure that all members have been loaded.
+        members = self.getmembers()
+
+        # Limit the member search list up to tarinfo.
+        skipping = False
+        if tarinfo is not None:
+            try:
+                index = members.index(tarinfo)
+            except ValueError:
+                # The given starting point might be a (modified) copy.
+                # We'll later skip members until we find an equivalent.
+                skipping = True
+            else:
+                # Happy fast path
+                members = members[:index]
+
+        if normalize:
+            name = os.path.normpath(name)
+
+        for member in reversed(members):
+            if skipping:
+                if tarinfo.offset == member.offset:
+                    skipping = False
+                continue
+            if normalize:
+                member_name = os.path.normpath(member.name)
+            else:
+                member_name = member.name
+
+            if name == member_name:
+                return member
+
+        if skipping:
+            # Starting point was not found
+            raise ValueError(tarinfo)
+
+    def _load(self):
+        """Read through the entire archive file and look for readable
+           members.
+        """
+        while self.next() is not None:
+            pass
+        self._loaded = True
+
+    def _check(self, mode=None):
+        """Check if TarFile is still open, and if the operation's mode
+           corresponds to TarFile's mode.
+        """
+        if self.closed:
+            raise OSError("%s is closed" % self.__class__.__name__)
+        if mode is not None and self.mode not in mode:
+            raise OSError("bad operation for mode %r" % self.mode)
+
+    def _find_link_target(self, tarinfo):
+        """Find the target member of a symlink or hardlink member in the
+           archive.
+        """
+        if tarinfo.issym():
+            # Always search the entire archive.
+            linkname = "/".join(filter(None, (os.path.dirname(tarinfo.name), tarinfo.linkname)))
+            limit = None
+        else:
+            # Search the archive before the link, because a hard link is
+            # just a reference to an already archived file.
+            linkname = tarinfo.linkname
+            limit = tarinfo
+
+        member = self._getmember(linkname, tarinfo=limit, normalize=True)
+        if member is None:
+            raise KeyError("linkname %r not found" % linkname)
+        return member
+
+    def __iter__(self):
+        """Provide an iterator object.
+        """
+        if self._loaded:
+            yield from self.members
+            return
+
+        # Yield items using TarFile's next() method.
+        # When all members have been read, set TarFile as _loaded.
+        index = 0
+        # Fix for SF #1100429: Under rare circumstances it can
+        # happen that getmembers() is called during iteration,
+        # which will have already exhausted the next() method.
+        if self.firstmember is not None:
+            tarinfo = self.next()
+            index += 1
+            yield tarinfo
+
+        while True:
+            if index < len(self.members):
+                tarinfo = self.members[index]
+            elif not self._loaded:
+                tarinfo = self.next()
+                if not tarinfo:
+                    self._loaded = True
+                    return
+            else:
+                return
+            index += 1
+            yield tarinfo
+
+    def _dbg(self, level, msg):
+        """Write debugging output to sys.stderr.
+        """
+        if level <= self.debug:
+            print(msg, file=sys.stderr)
+
+    def __enter__(self):
+        self._check()
+        return self
+
+    def __exit__(self, type, value, traceback):
+        if type is None:
+            self.close()
+        else:
+            # An exception occurred. We must not call close() because
+            # it would try to write end-of-archive blocks and padding.
+            if not self._extfileobj:
+                self.fileobj.close()
+            self.closed = True
+
+#--------------------
+# exported functions
+#--------------------
+
+def is_tarfile(name):
+    """Return True if name points to a tar archive that we
+       are able to handle, else return False.
+
+       'name' should be a string, file, or file-like object.
+    """
+    try:
+        if hasattr(name, "read"):
+            pos = name.tell()
+            t = open(fileobj=name)
+            name.seek(pos)
+        else:
+            t = open(name)
+        t.close()
+        return True
+    except TarError:
+        return False
+
+open = TarFile.open
+
+
+def main():
+    import argparse
+
+    description = 'A simple command-line interface for tarfile module.'
+    parser = argparse.ArgumentParser(description=description)
+    parser.add_argument('-v', '--verbose', action='store_true', default=False,
+                        help='Verbose output')
+    parser.add_argument('--filter', metavar='',
+                        choices=_NAMED_FILTERS,
+                        help='Filter for extraction')
+
+    group = parser.add_mutually_exclusive_group(required=True)
+    group.add_argument('-l', '--list', metavar='',
+                       help='Show listing of a tarfile')
+    group.add_argument('-e', '--extract', nargs='+',
+                       metavar=('', ''),
+                       help='Extract tarfile into target dir')
+    group.add_argument('-c', '--create', nargs='+',
+                       metavar=('', ''),
+                       help='Create tarfile from sources')
+    group.add_argument('-t', '--test', metavar='',
+                       help='Test if a tarfile is valid')
+
+    args = parser.parse_args()
+
+    if args.filter and args.extract is None:
+        parser.exit(1, '--filter is only valid for extraction\n')
+
+    if args.test is not None:
+        src = args.test
+        if is_tarfile(src):
+            with open(src, 'r') as tar:
+                tar.getmembers()
+                print(tar.getmembers(), file=sys.stderr)
+            if args.verbose:
+                print('{!r} is a tar archive.'.format(src))
+        else:
+            parser.exit(1, '{!r} is not a tar archive.\n'.format(src))
+
+    elif args.list is not None:
+        src = args.list
+        if is_tarfile(src):
+            with TarFile.open(src, 'r:*') as tf:
+                tf.list(verbose=args.verbose)
+        else:
+            parser.exit(1, '{!r} is not a tar archive.\n'.format(src))
+
+    elif args.extract is not None:
+        if len(args.extract) == 1:
+            src = args.extract[0]
+            curdir = os.curdir
+        elif len(args.extract) == 2:
+            src, curdir = args.extract
+        else:
+            parser.exit(1, parser.format_help())
+
+        if is_tarfile(src):
+            with TarFile.open(src, 'r:*') as tf:
+                tf.extractall(path=curdir, filter=args.filter)
+            if args.verbose:
+                if curdir == '.':
+                    msg = '{!r} file is extracted.'.format(src)
+                else:
+                    msg = ('{!r} file is extracted '
+                           'into {!r} directory.').format(src, curdir)
+                print(msg)
+        else:
+            parser.exit(1, '{!r} is not a tar archive.\n'.format(src))
+
+    elif args.create is not None:
+        tar_name = args.create.pop(0)
+        _, ext = os.path.splitext(tar_name)
+        compressions = {
+            # gz
+            '.gz': 'gz',
+            '.tgz': 'gz',
+            # xz
+            '.xz': 'xz',
+            '.txz': 'xz',
+            # bz2
+            '.bz2': 'bz2',
+            '.tbz': 'bz2',
+            '.tbz2': 'bz2',
+            '.tb2': 'bz2',
+        }
+        tar_mode = 'w:' + compressions[ext] if ext in compressions else 'w'
+        tar_files = args.create
+
+        with TarFile.open(tar_name, tar_mode) as tf:
+            for file_name in tar_files:
+                tf.add(file_name)
+
+        if args.verbose:
+            print('{!r} file created.'.format(tar_name))
+
+if __name__ == '__main__':
+    main()
diff --git a/setuptools/_vendor/importlib_metadata-6.0.0.dist-info/RECORD b/setuptools/_vendor/importlib_metadata-6.0.0.dist-info/RECORD
index 01f235677f..c5ed31bf55 100644
--- a/setuptools/_vendor/importlib_metadata-6.0.0.dist-info/RECORD
+++ b/setuptools/_vendor/importlib_metadata-6.0.0.dist-info/RECORD
@@ -6,15 +6,15 @@ importlib_metadata-6.0.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRk
 importlib_metadata-6.0.0.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
 importlib_metadata-6.0.0.dist-info/top_level.txt,sha256=CO3fD9yylANiXkrMo4qHLV_mqXL2sC5JFKgt1yWAT-A,19
 importlib_metadata/__init__.py,sha256=wiMJxNXXhPtRRHSX2N9gGLnTh0YszmE1rn3uKYRrNcs,26490
-importlib_metadata/__pycache__/__init__.cpython-311.pyc,,
-importlib_metadata/__pycache__/_adapters.cpython-311.pyc,,
-importlib_metadata/__pycache__/_collections.cpython-311.pyc,,
-importlib_metadata/__pycache__/_compat.cpython-311.pyc,,
-importlib_metadata/__pycache__/_functools.cpython-311.pyc,,
-importlib_metadata/__pycache__/_itertools.cpython-311.pyc,,
-importlib_metadata/__pycache__/_meta.cpython-311.pyc,,
-importlib_metadata/__pycache__/_py39compat.cpython-311.pyc,,
-importlib_metadata/__pycache__/_text.cpython-311.pyc,,
+importlib_metadata/__pycache__/__init__.cpython-312.pyc,,
+importlib_metadata/__pycache__/_adapters.cpython-312.pyc,,
+importlib_metadata/__pycache__/_collections.cpython-312.pyc,,
+importlib_metadata/__pycache__/_compat.cpython-312.pyc,,
+importlib_metadata/__pycache__/_functools.cpython-312.pyc,,
+importlib_metadata/__pycache__/_itertools.cpython-312.pyc,,
+importlib_metadata/__pycache__/_meta.cpython-312.pyc,,
+importlib_metadata/__pycache__/_py39compat.cpython-312.pyc,,
+importlib_metadata/__pycache__/_text.cpython-312.pyc,,
 importlib_metadata/_adapters.py,sha256=i8S6Ib1OQjcILA-l4gkzktMZe18TaeUNI49PLRp6OBU,2454
 importlib_metadata/_collections.py,sha256=CJ0OTCHIjWA0ZIVS4voORAsn2R4R2cQBEtPsZEJpASY,743
 importlib_metadata/_compat.py,sha256=9zOKf0eDgkCMnnaEhU5kQVxHd1P8BIYV7Stso7av5h8,1857
diff --git a/setuptools/_vendor/importlib_resources-5.10.2.dist-info/RECORD b/setuptools/_vendor/importlib_resources-5.10.2.dist-info/RECORD
index 7d19852d4a..ba764991ee 100644
--- a/setuptools/_vendor/importlib_resources-5.10.2.dist-info/RECORD
+++ b/setuptools/_vendor/importlib_resources-5.10.2.dist-info/RECORD
@@ -6,15 +6,15 @@ importlib_resources-5.10.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQe
 importlib_resources-5.10.2.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
 importlib_resources-5.10.2.dist-info/top_level.txt,sha256=fHIjHU1GZwAjvcydpmUnUrTnbvdiWjG4OEVZK8by0TQ,20
 importlib_resources/__init__.py,sha256=evPm12kLgYqTm-pbzm60bOuumumT8IpBNWFp0uMyrzE,506
-importlib_resources/__pycache__/__init__.cpython-311.pyc,,
-importlib_resources/__pycache__/_adapters.cpython-311.pyc,,
-importlib_resources/__pycache__/_common.cpython-311.pyc,,
-importlib_resources/__pycache__/_compat.cpython-311.pyc,,
-importlib_resources/__pycache__/_itertools.cpython-311.pyc,,
-importlib_resources/__pycache__/_legacy.cpython-311.pyc,,
-importlib_resources/__pycache__/abc.cpython-311.pyc,,
-importlib_resources/__pycache__/readers.cpython-311.pyc,,
-importlib_resources/__pycache__/simple.cpython-311.pyc,,
+importlib_resources/__pycache__/__init__.cpython-312.pyc,,
+importlib_resources/__pycache__/_adapters.cpython-312.pyc,,
+importlib_resources/__pycache__/_common.cpython-312.pyc,,
+importlib_resources/__pycache__/_compat.cpython-312.pyc,,
+importlib_resources/__pycache__/_itertools.cpython-312.pyc,,
+importlib_resources/__pycache__/_legacy.cpython-312.pyc,,
+importlib_resources/__pycache__/abc.cpython-312.pyc,,
+importlib_resources/__pycache__/readers.cpython-312.pyc,,
+importlib_resources/__pycache__/simple.cpython-312.pyc,,
 importlib_resources/_adapters.py,sha256=o51tP2hpVtohP33gSYyAkGNpLfYDBqxxYsadyiRZi1E,4504
 importlib_resources/_common.py,sha256=jSC4xfLdcMNbtbWHtpzbFkNa0W7kvf__nsYn14C_AEU,5457
 importlib_resources/_compat.py,sha256=dSadF6WPt8MwOqSm_NIOQPhw4x0iaMYTWxi-XS93p7M,2923
@@ -25,36 +25,36 @@ importlib_resources/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,
 importlib_resources/readers.py,sha256=PZsi5qacr2Qn3KHw4qw3Gm1MzrBblPHoTdjqjH7EKWw,3581
 importlib_resources/simple.py,sha256=0__2TQBTQoqkajYmNPt1HxERcReAT6boVKJA328pr04,2576
 importlib_resources/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/__pycache__/__init__.cpython-311.pyc,,
-importlib_resources/tests/__pycache__/_compat.cpython-311.pyc,,
-importlib_resources/tests/__pycache__/_path.cpython-311.pyc,,
-importlib_resources/tests/__pycache__/test_compatibilty_files.cpython-311.pyc,,
-importlib_resources/tests/__pycache__/test_contents.cpython-311.pyc,,
-importlib_resources/tests/__pycache__/test_files.cpython-311.pyc,,
-importlib_resources/tests/__pycache__/test_open.cpython-311.pyc,,
-importlib_resources/tests/__pycache__/test_path.cpython-311.pyc,,
-importlib_resources/tests/__pycache__/test_read.cpython-311.pyc,,
-importlib_resources/tests/__pycache__/test_reader.cpython-311.pyc,,
-importlib_resources/tests/__pycache__/test_resource.cpython-311.pyc,,
-importlib_resources/tests/__pycache__/update-zips.cpython-311.pyc,,
-importlib_resources/tests/__pycache__/util.cpython-311.pyc,,
+importlib_resources/tests/__pycache__/__init__.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/_compat.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/_path.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/test_compatibilty_files.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/test_contents.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/test_files.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/test_open.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/test_path.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/test_read.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/test_reader.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/test_resource.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/update-zips.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/util.cpython-312.pyc,,
 importlib_resources/tests/_compat.py,sha256=YTSB0U1R9oADnh6GrQcOCgojxcF_N6H1LklymEWf9SQ,708
 importlib_resources/tests/_path.py,sha256=yZyWsQzJZQ1Z8ARAxWkjAdaVVsjlzyqxO0qjBUofJ8M,1039
 importlib_resources/tests/data01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/data01/__pycache__/__init__.cpython-311.pyc,,
+importlib_resources/tests/data01/__pycache__/__init__.cpython-312.pyc,,
 importlib_resources/tests/data01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4
 importlib_resources/tests/data01/subdirectory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/data01/subdirectory/__pycache__/__init__.cpython-311.pyc,,
+importlib_resources/tests/data01/subdirectory/__pycache__/__init__.cpython-312.pyc,,
 importlib_resources/tests/data01/subdirectory/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4
 importlib_resources/tests/data01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44
 importlib_resources/tests/data01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20
 importlib_resources/tests/data02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/data02/__pycache__/__init__.cpython-311.pyc,,
+importlib_resources/tests/data02/__pycache__/__init__.cpython-312.pyc,,
 importlib_resources/tests/data02/one/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/data02/one/__pycache__/__init__.cpython-311.pyc,,
+importlib_resources/tests/data02/one/__pycache__/__init__.cpython-312.pyc,,
 importlib_resources/tests/data02/one/resource1.txt,sha256=10flKac7c-XXFzJ3t-AB5MJjlBy__dSZvPE_dOm2q6U,13
 importlib_resources/tests/data02/two/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/data02/two/__pycache__/__init__.cpython-311.pyc,,
+importlib_resources/tests/data02/two/__pycache__/__init__.cpython-312.pyc,,
 importlib_resources/tests/data02/two/resource2.txt,sha256=lt2jbN3TMn9QiFKM832X39bU_62UptDdUkoYzkvEbl0,13
 importlib_resources/tests/namespacedata01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4
 importlib_resources/tests/namespacedata01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44
@@ -70,8 +70,8 @@ importlib_resources/tests/test_resource.py,sha256=EMoarxTEHcrq8R41LQDsndIG8Idtm4
 importlib_resources/tests/update-zips.py,sha256=x-SrO5v87iLLUMXyefxDwAd3imAs_slI94sLWvJ6N40,1417
 importlib_resources/tests/util.py,sha256=ARAlxZ47wC-lgR7PGlmgBoi4HnhzcykD5Is2-TAwY0I,4873
 importlib_resources/tests/zipdata01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/zipdata01/__pycache__/__init__.cpython-311.pyc,,
+importlib_resources/tests/zipdata01/__pycache__/__init__.cpython-312.pyc,,
 importlib_resources/tests/zipdata01/ziptestdata.zip,sha256=z5Of4dsv3T0t-46B0MsVhxlhsPGMz28aUhJDWpj3_oY,876
 importlib_resources/tests/zipdata02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/zipdata02/__pycache__/__init__.cpython-311.pyc,,
+importlib_resources/tests/zipdata02/__pycache__/__init__.cpython-312.pyc,,
 importlib_resources/tests/zipdata02/ziptestdata.zip,sha256=ydI-_j-xgQ7tDxqBp9cjOqXBGxUp6ZBbwVJu6Xj-nrY,698
diff --git a/setuptools/_vendor/jaraco.context-4.3.0.dist-info/METADATA b/setuptools/_vendor/jaraco.context-4.3.0.dist-info/METADATA
deleted file mode 100644
index 281137a035..0000000000
--- a/setuptools/_vendor/jaraco.context-4.3.0.dist-info/METADATA
+++ /dev/null
@@ -1,68 +0,0 @@
-Metadata-Version: 2.1
-Name: jaraco.context
-Version: 4.3.0
-Summary: Context managers by jaraco
-Home-page: https://github.com/jaraco/jaraco.context
-Author: Jason R. Coombs
-Author-email: jaraco@jaraco.com
-Classifier: Development Status :: 5 - Production/Stable
-Classifier: Intended Audience :: Developers
-Classifier: License :: OSI Approved :: MIT License
-Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3 :: Only
-Requires-Python: >=3.7
-License-File: LICENSE
-Provides-Extra: docs
-Requires-Dist: sphinx (>=3.5) ; extra == 'docs'
-Requires-Dist: jaraco.packaging (>=9) ; extra == 'docs'
-Requires-Dist: rst.linker (>=1.9) ; extra == 'docs'
-Requires-Dist: furo ; extra == 'docs'
-Requires-Dist: sphinx-lint ; extra == 'docs'
-Requires-Dist: jaraco.tidelift (>=1.4) ; extra == 'docs'
-Provides-Extra: testing
-Requires-Dist: pytest (>=6) ; extra == 'testing'
-Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing'
-Requires-Dist: flake8 (<5) ; extra == 'testing'
-Requires-Dist: pytest-cov ; extra == 'testing'
-Requires-Dist: pytest-enabler (>=1.3) ; extra == 'testing'
-Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing'
-Requires-Dist: pytest-mypy (>=0.9.1) ; (platform_python_implementation != "PyPy") and extra == 'testing'
-Requires-Dist: pytest-flake8 ; (python_version < "3.12") and extra == 'testing'
-
-.. image:: https://img.shields.io/pypi/v/jaraco.context.svg
-   :target: https://pypi.org/project/jaraco.context
-
-.. image:: https://img.shields.io/pypi/pyversions/jaraco.context.svg
-
-.. image:: https://github.com/jaraco/jaraco.context/workflows/tests/badge.svg
-   :target: https://github.com/jaraco/jaraco.context/actions?query=workflow%3A%22tests%22
-   :alt: tests
-
-.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
-   :target: https://github.com/psf/black
-   :alt: Code style: Black
-
-.. image:: https://readthedocs.org/projects/jaracocontext/badge/?version=latest
-   :target: https://jaracocontext.readthedocs.io/en/latest/?badge=latest
-
-.. image:: https://img.shields.io/badge/skeleton-2023-informational
-   :target: https://blog.jaraco.com/skeleton
-
-.. image:: https://tidelift.com/badges/package/pypi/jaraco.context
-   :target: https://tidelift.com/subscription/pkg/pypi-jaraco.context?utm_source=pypi-jaraco.context&utm_medium=readme
-
-For Enterprise
-==============
-
-Available as part of the Tidelift Subscription.
-
-This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use.
-
-`Learn more `_.
-
-Security Contact
-================
-
-To report a security vulnerability, please use the
-`Tidelift security contact `_.
-Tidelift will coordinate the fix and disclosure.
diff --git a/setuptools/_vendor/jaraco.context-4.3.0.dist-info/RECORD b/setuptools/_vendor/jaraco.context-4.3.0.dist-info/RECORD
deleted file mode 100644
index 03122364a2..0000000000
--- a/setuptools/_vendor/jaraco.context-4.3.0.dist-info/RECORD
+++ /dev/null
@@ -1,8 +0,0 @@
-jaraco.context-4.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-jaraco.context-4.3.0.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050
-jaraco.context-4.3.0.dist-info/METADATA,sha256=GqMykAm33E7Tt_t_MHc5O7GJN62Qwp6MEHX9WD-LPow,2958
-jaraco.context-4.3.0.dist-info/RECORD,,
-jaraco.context-4.3.0.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
-jaraco.context-4.3.0.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
-jaraco/__pycache__/context.cpython-311.pyc,,
-jaraco/context.py,sha256=vlyDzb_PvZ9H7R9bbTr_CMRnveW5Dc56eC7eyd_GfoA,7460
diff --git a/setuptools/_vendor/jaraco.context-5.3.0.dist-info/INSTALLER b/setuptools/_vendor/jaraco.context-5.3.0.dist-info/INSTALLER
new file mode 100644
index 0000000000..a1b589e38a
--- /dev/null
+++ b/setuptools/_vendor/jaraco.context-5.3.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/setuptools/_vendor/jaraco.context-5.3.0.dist-info/LICENSE b/setuptools/_vendor/jaraco.context-5.3.0.dist-info/LICENSE
new file mode 100644
index 0000000000..1bb5a44356
--- /dev/null
+++ b/setuptools/_vendor/jaraco.context-5.3.0.dist-info/LICENSE
@@ -0,0 +1,17 @@
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
diff --git a/setuptools/_vendor/jaraco.context-5.3.0.dist-info/METADATA b/setuptools/_vendor/jaraco.context-5.3.0.dist-info/METADATA
new file mode 100644
index 0000000000..a36f7c5e82
--- /dev/null
+++ b/setuptools/_vendor/jaraco.context-5.3.0.dist-info/METADATA
@@ -0,0 +1,75 @@
+Metadata-Version: 2.1
+Name: jaraco.context
+Version: 5.3.0
+Summary: Useful decorators and context managers
+Home-page: https://github.com/jaraco/jaraco.context
+Author: Jason R. Coombs
+Author-email: jaraco@jaraco.com
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Requires-Python: >=3.8
+License-File: LICENSE
+Requires-Dist: backports.tarfile ; python_version < "3.12"
+Provides-Extra: docs
+Requires-Dist: sphinx >=3.5 ; extra == 'docs'
+Requires-Dist: jaraco.packaging >=9.3 ; extra == 'docs'
+Requires-Dist: rst.linker >=1.9 ; extra == 'docs'
+Requires-Dist: furo ; extra == 'docs'
+Requires-Dist: sphinx-lint ; extra == 'docs'
+Requires-Dist: jaraco.tidelift >=1.4 ; extra == 'docs'
+Provides-Extra: testing
+Requires-Dist: pytest !=8.1.1,>=6 ; extra == 'testing'
+Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'testing'
+Requires-Dist: pytest-cov ; extra == 'testing'
+Requires-Dist: pytest-mypy ; extra == 'testing'
+Requires-Dist: pytest-enabler >=2.2 ; extra == 'testing'
+Requires-Dist: pytest-ruff >=0.2.1 ; extra == 'testing'
+Requires-Dist: portend ; extra == 'testing'
+
+.. image:: https://img.shields.io/pypi/v/jaraco.context.svg
+   :target: https://pypi.org/project/jaraco.context
+
+.. image:: https://img.shields.io/pypi/pyversions/jaraco.context.svg
+
+.. image:: https://github.com/jaraco/jaraco.context/actions/workflows/main.yml/badge.svg
+   :target: https://github.com/jaraco/jaraco.context/actions?query=workflow%3A%22tests%22
+   :alt: tests
+
+.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
+    :target: https://github.com/astral-sh/ruff
+    :alt: Ruff
+
+.. image:: https://readthedocs.org/projects/jaracocontext/badge/?version=latest
+   :target: https://jaracocontext.readthedocs.io/en/latest/?badge=latest
+
+.. image:: https://img.shields.io/badge/skeleton-2024-informational
+   :target: https://blog.jaraco.com/skeleton
+
+.. image:: https://tidelift.com/badges/package/pypi/jaraco.context
+   :target: https://tidelift.com/subscription/pkg/pypi-jaraco.context?utm_source=pypi-jaraco.context&utm_medium=readme
+
+
+Highlights
+==========
+
+See the docs linked from the badge above for the full details, but here are some features that may be of interest.
+
+- ``ExceptionTrap`` provides a general-purpose wrapper for trapping exceptions and then acting on the outcome. Includes ``passes`` and ``raises`` decorators to replace the result of a wrapped function by a boolean indicating the outcome of the exception trap. See `this keyring commit `_ for an example of it in production.
+- ``suppress`` simply enables ``contextlib.suppress`` as a decorator.
+- ``on_interrupt`` is a decorator used by CLI entry points to affect the handling of a ``KeyboardInterrupt``. Inspired by `Lucretiel/autocommand#18 `_.
+- ``pushd`` is similar to pytest's ``monkeypatch.chdir`` or path's `default context `_, changes the current working directory for the duration of the context.
+- ``tarball`` will download a tarball, extract it, change directory, yield, then clean up after. Convenient when working with web assets.
+- ``null`` is there for those times when one code branch needs a context and the other doesn't; this null context provides symmetry across those branches.
+
+
+For Enterprise
+==============
+
+Available as part of the Tidelift Subscription.
+
+This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use.
+
+`Learn more `_.
diff --git a/setuptools/_vendor/jaraco.context-5.3.0.dist-info/RECORD b/setuptools/_vendor/jaraco.context-5.3.0.dist-info/RECORD
new file mode 100644
index 0000000000..09d191f214
--- /dev/null
+++ b/setuptools/_vendor/jaraco.context-5.3.0.dist-info/RECORD
@@ -0,0 +1,8 @@
+jaraco.context-5.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+jaraco.context-5.3.0.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
+jaraco.context-5.3.0.dist-info/METADATA,sha256=xDtguJej0tN9iEXCUvxEJh2a7xceIRVBEakBLSr__tY,4020
+jaraco.context-5.3.0.dist-info/RECORD,,
+jaraco.context-5.3.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
+jaraco.context-5.3.0.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
+jaraco/__pycache__/context.cpython-312.pyc,,
+jaraco/context.py,sha256=REoLIxDkO5MfEYowt_WoupNCRoxBS5v7YX2PbW8lIcs,9552
diff --git a/setuptools/_vendor/jaraco.context-5.3.0.dist-info/WHEEL b/setuptools/_vendor/jaraco.context-5.3.0.dist-info/WHEEL
new file mode 100644
index 0000000000..bab98d6758
--- /dev/null
+++ b/setuptools/_vendor/jaraco.context-5.3.0.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.43.0)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/setuptools/_vendor/jaraco.context-4.3.0.dist-info/top_level.txt b/setuptools/_vendor/jaraco.context-5.3.0.dist-info/top_level.txt
similarity index 100%
rename from setuptools/_vendor/jaraco.context-4.3.0.dist-info/top_level.txt
rename to setuptools/_vendor/jaraco.context-5.3.0.dist-info/top_level.txt
diff --git a/setuptools/_vendor/jaraco.functools-3.6.0.dist-info/RECORD b/setuptools/_vendor/jaraco.functools-3.6.0.dist-info/RECORD
deleted file mode 100644
index 70a3521307..0000000000
--- a/setuptools/_vendor/jaraco.functools-3.6.0.dist-info/RECORD
+++ /dev/null
@@ -1,8 +0,0 @@
-jaraco.functools-3.6.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-jaraco.functools-3.6.0.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050
-jaraco.functools-3.6.0.dist-info/METADATA,sha256=ImGoa1WEbhsibIb288yWqkDAvqLwlPzayjravRvW_Bs,3136
-jaraco.functools-3.6.0.dist-info/RECORD,,
-jaraco.functools-3.6.0.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
-jaraco.functools-3.6.0.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
-jaraco/__pycache__/functools.cpython-311.pyc,,
-jaraco/functools.py,sha256=GhSJGMVMcb0U4-axXaY_au30hT-ceW-HM1EbV1_9NzI,15035
diff --git a/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/INSTALLER b/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/INSTALLER
new file mode 100644
index 0000000000..a1b589e38a
--- /dev/null
+++ b/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/LICENSE b/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/LICENSE
new file mode 100644
index 0000000000..1bb5a44356
--- /dev/null
+++ b/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/LICENSE
@@ -0,0 +1,17 @@
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
diff --git a/setuptools/_vendor/jaraco.functools-3.6.0.dist-info/METADATA b/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/METADATA
similarity index 69%
rename from setuptools/_vendor/jaraco.functools-3.6.0.dist-info/METADATA
rename to setuptools/_vendor/jaraco.functools-4.0.0.dist-info/METADATA
index 23c6f5ef2b..581b308378 100644
--- a/setuptools/_vendor/jaraco.functools-3.6.0.dist-info/METADATA
+++ b/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/METADATA
@@ -1,6 +1,6 @@
 Metadata-Version: 2.1
 Name: jaraco.functools
-Version: 3.6.0
+Version: 4.0.0
 Summary: Functools like those found in stdlib
 Home-page: https://github.com/jaraco/jaraco.functools
 Author: Jason R. Coombs
@@ -10,26 +10,26 @@ Classifier: Intended Audience :: Developers
 Classifier: License :: OSI Approved :: MIT License
 Classifier: Programming Language :: Python :: 3
 Classifier: Programming Language :: Python :: 3 :: Only
-Requires-Python: >=3.7
+Requires-Python: >=3.8
 License-File: LICENSE
 Requires-Dist: more-itertools
 Provides-Extra: docs
-Requires-Dist: sphinx (>=3.5) ; extra == 'docs'
-Requires-Dist: jaraco.packaging (>=9) ; extra == 'docs'
-Requires-Dist: rst.linker (>=1.9) ; extra == 'docs'
+Requires-Dist: sphinx >=3.5 ; extra == 'docs'
+Requires-Dist: sphinx <7.2.5 ; extra == 'docs'
+Requires-Dist: jaraco.packaging >=9.3 ; extra == 'docs'
+Requires-Dist: rst.linker >=1.9 ; extra == 'docs'
 Requires-Dist: furo ; extra == 'docs'
 Requires-Dist: sphinx-lint ; extra == 'docs'
-Requires-Dist: jaraco.tidelift (>=1.4) ; extra == 'docs'
+Requires-Dist: jaraco.tidelift >=1.4 ; extra == 'docs'
 Provides-Extra: testing
-Requires-Dist: pytest (>=6) ; extra == 'testing'
-Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing'
-Requires-Dist: flake8 (<5) ; extra == 'testing'
+Requires-Dist: pytest >=6 ; extra == 'testing'
+Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'testing'
 Requires-Dist: pytest-cov ; extra == 'testing'
-Requires-Dist: pytest-enabler (>=1.3) ; extra == 'testing'
+Requires-Dist: pytest-enabler >=2.2 ; extra == 'testing'
+Requires-Dist: pytest-ruff ; extra == 'testing'
 Requires-Dist: jaraco.classes ; extra == 'testing'
-Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing'
-Requires-Dist: pytest-mypy (>=0.9.1) ; (platform_python_implementation != "PyPy") and extra == 'testing'
-Requires-Dist: pytest-flake8 ; (python_version < "3.12") and extra == 'testing'
+Requires-Dist: pytest-black >=0.3.7 ; (platform_python_implementation != "PyPy") and extra == 'testing'
+Requires-Dist: pytest-mypy >=0.9.1 ; (platform_python_implementation != "PyPy") and extra == 'testing'
 
 .. image:: https://img.shields.io/pypi/v/jaraco.functools.svg
    :target: https://pypi.org/project/jaraco.functools
@@ -40,6 +40,10 @@ Requires-Dist: pytest-flake8 ; (python_version < "3.12") and extra == 'testing'
    :target: https://github.com/jaraco/jaraco.functools/actions?query=workflow%3A%22tests%22
    :alt: tests
 
+.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
+    :target: https://github.com/astral-sh/ruff
+    :alt: Ruff
+
 .. image:: https://img.shields.io/badge/code%20style-black-000000.svg
    :target: https://github.com/psf/black
    :alt: Code style: Black
@@ -63,10 +67,3 @@ Available as part of the Tidelift Subscription.
 This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use.
 
 `Learn more `_.
-
-Security Contact
-================
-
-To report a security vulnerability, please use the
-`Tidelift security contact `_.
-Tidelift will coordinate the fix and disclosure.
diff --git a/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/RECORD b/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/RECORD
new file mode 100644
index 0000000000..783aa7d2b9
--- /dev/null
+++ b/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/RECORD
@@ -0,0 +1,10 @@
+jaraco.functools-4.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+jaraco.functools-4.0.0.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
+jaraco.functools-4.0.0.dist-info/METADATA,sha256=nVOe_vWvaN2iWJ2aBVkhKvmvH-gFksNCXHwCNvcj65I,3078
+jaraco.functools-4.0.0.dist-info/RECORD,,
+jaraco.functools-4.0.0.dist-info/WHEEL,sha256=Xo9-1PvkuimrydujYJAjF7pCkriuXBpUPEjma1nZyJ0,92
+jaraco.functools-4.0.0.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
+jaraco/functools/__init__.py,sha256=hEAJaS2uSZRuF_JY4CxCHIYh79ZpxaPp9OiHyr9EJ1w,16642
+jaraco/functools/__init__.pyi,sha256=N4lLbdhMtrmwiK3UuMGhYsiOLLZx69CUNOdmFPSVh6Q,3982
+jaraco/functools/__pycache__/__init__.cpython-312.pyc,,
+jaraco/functools/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
diff --git a/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/WHEEL b/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/WHEEL
new file mode 100644
index 0000000000..ba48cbcf92
--- /dev/null
+++ b/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.41.3)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/setuptools/_vendor/jaraco.functools-3.6.0.dist-info/top_level.txt b/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/top_level.txt
similarity index 100%
rename from setuptools/_vendor/jaraco.functools-3.6.0.dist-info/top_level.txt
rename to setuptools/_vendor/jaraco.functools-4.0.0.dist-info/top_level.txt
diff --git a/setuptools/_vendor/jaraco.text-3.7.0.dist-info/RECORD b/setuptools/_vendor/jaraco.text-3.7.0.dist-info/RECORD
index dd471b0708..c698101cb4 100644
--- a/setuptools/_vendor/jaraco.text-3.7.0.dist-info/RECORD
+++ b/setuptools/_vendor/jaraco.text-3.7.0.dist-info/RECORD
@@ -7,4 +7,4 @@ jaraco.text-3.7.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FG
 jaraco.text-3.7.0.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
 jaraco/text/Lorem ipsum.txt,sha256=N_7c_79zxOufBY9HZ3yzMgOkNv-TkOTTio4BydrSjgs,1335
 jaraco/text/__init__.py,sha256=I56MW2ZFwPrYXIxzqxMBe2A1t-T4uZBgEgAKe9-JoqM,15538
-jaraco/text/__pycache__/__init__.cpython-311.pyc,,
+jaraco/text/__pycache__/__init__.cpython-312.pyc,,
diff --git a/setuptools/_vendor/jaraco/context.py b/setuptools/_vendor/jaraco/context.py
index b0d1ef37cb..61b27135df 100644
--- a/setuptools/_vendor/jaraco/context.py
+++ b/setuptools/_vendor/jaraco/context.py
@@ -1,15 +1,26 @@
-import os
-import subprocess
+from __future__ import annotations
+
 import contextlib
 import functools
-import tempfile
-import shutil
 import operator
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+import urllib.request
 import warnings
+from typing import Iterator
+
+
+if sys.version_info < (3, 12):
+    from backports import tarfile
+else:
+    import tarfile
 
 
 @contextlib.contextmanager
-def pushd(dir):
+def pushd(dir: str | os.PathLike) -> Iterator[str | os.PathLike]:
     """
     >>> tmp_path = getfixture('tmp_path')
     >>> with pushd(tmp_path):
@@ -26,33 +37,88 @@ def pushd(dir):
 
 
 @contextlib.contextmanager
-def tarball_context(url, target_dir=None, runner=None, pushd=pushd):
+def tarball(
+    url, target_dir: str | os.PathLike | None = None
+) -> Iterator[str | os.PathLike]:
     """
-    Get a tarball, extract it, change to that directory, yield, then
-    clean up.
-    `runner` is the function to invoke commands.
-    `pushd` is a context manager for changing the directory.
+    Get a tarball, extract it, yield, then clean up.
+
+    >>> import urllib.request
+    >>> url = getfixture('tarfile_served')
+    >>> target = getfixture('tmp_path') / 'out'
+    >>> tb = tarball(url, target_dir=target)
+    >>> import pathlib
+    >>> with tb as extracted:
+    ...     contents = pathlib.Path(extracted, 'contents.txt').read_text(encoding='utf-8')
+    >>> assert not os.path.exists(extracted)
     """
     if target_dir is None:
         target_dir = os.path.basename(url).replace('.tar.gz', '').replace('.tgz', '')
-    if runner is None:
-        runner = functools.partial(subprocess.check_call, shell=True)
-    else:
-        warnings.warn("runner parameter is deprecated", DeprecationWarning)
     # In the tar command, use --strip-components=1 to strip the first path and
     #  then
     #  use -C to cause the files to be extracted to {target_dir}. This ensures
     #  that we always know where the files were extracted.
-    runner('mkdir {target_dir}'.format(**vars()))
+    os.mkdir(target_dir)
     try:
-        getter = 'wget {url} -O -'
-        extract = 'tar x{compression} --strip-components=1 -C {target_dir}'
-        cmd = ' | '.join((getter, extract))
-        runner(cmd.format(compression=infer_compression(url), **vars()))
-        with pushd(target_dir):
-            yield target_dir
+        req = urllib.request.urlopen(url)
+        with tarfile.open(fileobj=req, mode='r|*') as tf:
+            tf.extractall(path=target_dir, filter=strip_first_component)
+        yield target_dir
     finally:
-        runner('rm -Rf {target_dir}'.format(**vars()))
+        shutil.rmtree(target_dir)
+
+
+def strip_first_component(
+    member: tarfile.TarInfo,
+    path,
+) -> tarfile.TarInfo:
+    _, member.name = member.name.split('/', 1)
+    return member
+
+
+def _compose(*cmgrs):
+    """
+    Compose any number of dependent context managers into a single one.
+
+    The last, innermost context manager may take arbitrary arguments, but
+    each successive context manager should accept the result from the
+    previous as a single parameter.
+
+    Like :func:`jaraco.functools.compose`, behavior works from right to
+    left, so the context manager should be indicated from outermost to
+    innermost.
+
+    Example, to create a context manager to change to a temporary
+    directory:
+
+    >>> temp_dir_as_cwd = _compose(pushd, temp_dir)
+    >>> with temp_dir_as_cwd() as dir:
+    ...     assert os.path.samefile(os.getcwd(), dir)
+    """
+
+    def compose_two(inner, outer):
+        def composed(*args, **kwargs):
+            with inner(*args, **kwargs) as saved, outer(saved) as res:
+                yield res
+
+        return contextlib.contextmanager(composed)
+
+    return functools.reduce(compose_two, reversed(cmgrs))
+
+
+tarball_cwd = _compose(pushd, tarball)
+
+
+@contextlib.contextmanager
+def tarball_context(*args, **kwargs):
+    warnings.warn(
+        "tarball_context is deprecated. Use tarball or tarball_cwd instead.",
+        DeprecationWarning,
+        stacklevel=2,
+    )
+    pushd_ctx = kwargs.pop('pushd', pushd)
+    with tarball(*args, **kwargs) as tball, pushd_ctx(tball) as dir:
+        yield dir
 
 
 def infer_compression(url):
@@ -68,6 +134,11 @@ def infer_compression(url):
     >>> infer_compression('file.xz')
     'J'
     """
+    warnings.warn(
+        "infer_compression is deprecated with no replacement",
+        DeprecationWarning,
+        stacklevel=2,
+    )
     # cheat and just assume it's the last two characters
     compression_indicator = url[-2:]
     mapping = dict(gz='z', bz='j', xz='J')
@@ -84,7 +155,7 @@ def temp_dir(remover=shutil.rmtree):
     >>> import pathlib
     >>> with temp_dir() as the_dir:
     ...     assert os.path.isdir(the_dir)
-    ...     _ = pathlib.Path(the_dir).joinpath('somefile').write_text('contents')
+    ...     _ = pathlib.Path(the_dir).joinpath('somefile').write_text('contents', encoding='utf-8')
     >>> assert not os.path.exists(the_dir)
     """
     temp_dir = tempfile.mkdtemp()
@@ -113,15 +184,23 @@ def repo_context(url, branch=None, quiet=True, dest_ctx=temp_dir):
         yield repo_dir
 
 
-@contextlib.contextmanager
 def null():
     """
     A null context suitable to stand in for a meaningful context.
 
     >>> with null() as value:
     ...     assert value is None
+
+    This context is most useful when dealing with two or more code
+    branches but only some need a context. Wrap the others in a null
+    context to provide symmetry across all options.
     """
-    yield
+    warnings.warn(
+        "null is deprecated. Use contextlib.nullcontext",
+        DeprecationWarning,
+        stacklevel=2,
+    )
+    return contextlib.nullcontext()
 
 
 class ExceptionTrap:
@@ -267,13 +346,7 @@ class on_interrupt(contextlib.ContextDecorator):
     ...     on_interrupt('ignore')(do_interrupt)()
     """
 
-    def __init__(
-        self,
-        action='error',
-        # py3.7 compat
-        # /,
-        code=1,
-    ):
+    def __init__(self, action='error', /, code=1):
         self.action = action
         self.code = code
 
diff --git a/setuptools/_vendor/jaraco/functools.py b/setuptools/_vendor/jaraco/functools/__init__.py
similarity index 79%
rename from setuptools/_vendor/jaraco/functools.py
rename to setuptools/_vendor/jaraco/functools/__init__.py
index ebf7a36137..130b87a485 100644
--- a/setuptools/_vendor/jaraco/functools.py
+++ b/setuptools/_vendor/jaraco/functools/__init__.py
@@ -1,18 +1,14 @@
+import collections.abc
 import functools
-import time
 import inspect
-import collections
-import types
 import itertools
+import operator
+import time
+import types
 import warnings
 
 import setuptools.extern.more_itertools
 
-from typing import Callable, TypeVar
-
-
-CallableT = TypeVar("CallableT", bound=Callable[..., object])
-
 
 def compose(*funcs):
     """
@@ -38,24 +34,6 @@ def compose_two(f1, f2):
     return functools.reduce(compose_two, funcs)
 
 
-def method_caller(method_name, *args, **kwargs):
-    """
-    Return a function that will call a named method on the
-    target object with optional positional and keyword
-    arguments.
-
-    >>> lower = method_caller('lower')
-    >>> lower('MyString')
-    'mystring'
-    """
-
-    def call_method(target):
-        func = getattr(target, method_name)
-        return func(*args, **kwargs)
-
-    return call_method
-
-
 def once(func):
     """
     Decorate func so it's only ever called the first time.
@@ -98,12 +76,7 @@ def wrapper(*args, **kwargs):
     return wrapper
 
 
-def method_cache(
-    method: CallableT,
-    cache_wrapper: Callable[
-        [CallableT], CallableT
-    ] = functools.lru_cache(),  # type: ignore[assignment]
-) -> CallableT:
+def method_cache(method, cache_wrapper=functools.lru_cache()):
     """
     Wrap lru_cache to support storing the cache data in the object instances.
 
@@ -171,21 +144,17 @@ def method_cache(
     for another implementation and additional justification.
     """
 
-    def wrapper(self: object, *args: object, **kwargs: object) -> object:
+    def wrapper(self, *args, **kwargs):
         # it's the first call, replace the method with a cached, bound method
-        bound_method: CallableT = types.MethodType(  # type: ignore[assignment]
-            method, self
-        )
+        bound_method = types.MethodType(method, self)
         cached_method = cache_wrapper(bound_method)
         setattr(self, method.__name__, cached_method)
         return cached_method(*args, **kwargs)
 
     # Support cache clear even before cache has been created.
-    wrapper.cache_clear = lambda: None  # type: ignore[attr-defined]
+    wrapper.cache_clear = lambda: None
 
-    return (  # type: ignore[return-value]
-        _special_method_cache(method, cache_wrapper) or wrapper
-    )
+    return _special_method_cache(method, cache_wrapper) or wrapper
 
 
 def _special_method_cache(method, cache_wrapper):
@@ -201,12 +170,13 @@ def _special_method_cache(method, cache_wrapper):
     """
     name = method.__name__
     special_names = '__getattr__', '__getitem__'
+
     if name not in special_names:
-        return
+        return None
 
     wrapper_name = '__cached' + name
 
-    def proxy(self, *args, **kwargs):
+    def proxy(self, /, *args, **kwargs):
         if wrapper_name not in vars(self):
             bound = types.MethodType(method, self)
             cache = cache_wrapper(bound)
@@ -243,7 +213,7 @@ def result_invoke(action):
     r"""
     Decorate a function with an action function that is
     invoked on the results returned from the decorated
-    function (for its side-effect), then return the original
+    function (for its side effect), then return the original
     result.
 
     >>> @result_invoke(print)
@@ -267,7 +237,7 @@ def wrapper(*args, **kwargs):
     return wrap
 
 
-def invoke(f, *args, **kwargs):
+def invoke(f, /, *args, **kwargs):
     """
     Call a function for its side effect after initialization.
 
@@ -302,25 +272,15 @@ def invoke(f, *args, **kwargs):
     Use functools.partial to pass parameters to the initial call
 
     >>> @functools.partial(invoke, name='bingo')
-    ... def func(name): print("called with", name)
+    ... def func(name): print('called with', name)
     called with bingo
     """
     f(*args, **kwargs)
     return f
 
 
-def call_aside(*args, **kwargs):
-    """
-    Deprecated name for invoke.
-    """
-    warnings.warn("call_aside is deprecated, use invoke", DeprecationWarning)
-    return invoke(*args, **kwargs)
-
-
 class Throttler:
-    """
-    Rate-limit a function (or other callable)
-    """
+    """Rate-limit a function (or other callable)."""
 
     def __init__(self, func, max_rate=float('Inf')):
         if isinstance(func, Throttler):
@@ -337,20 +297,20 @@ def __call__(self, *args, **kwargs):
         return self.func(*args, **kwargs)
 
     def _wait(self):
-        "ensure at least 1/max_rate seconds from last call"
+        """Ensure at least 1/max_rate seconds from last call."""
         elapsed = time.time() - self.last_called
         must_wait = 1 / self.max_rate - elapsed
         time.sleep(max(0, must_wait))
         self.last_called = time.time()
 
-    def __get__(self, obj, type=None):
+    def __get__(self, obj, owner=None):
         return first_invoke(self._wait, functools.partial(self.func, obj))
 
 
 def first_invoke(func1, func2):
     """
     Return a function that when invoked will invoke func1 without
-    any parameters (for its side-effect) and then invoke func2
+    any parameters (for its side effect) and then invoke func2
     with whatever parameters were passed, returning its result.
     """
 
@@ -361,6 +321,17 @@ def wrapper(*args, **kwargs):
     return wrapper
 
 
+method_caller = first_invoke(
+    lambda: warnings.warn(
+        '`jaraco.functools.method_caller` is deprecated, '
+        'use `operator.methodcaller` instead',
+        DeprecationWarning,
+        stacklevel=3,
+    ),
+    operator.methodcaller,
+)
+
+
 def retry_call(func, cleanup=lambda: None, retries=0, trap=()):
     """
     Given a callable func, trap the indicated exceptions
@@ -369,7 +340,7 @@ def retry_call(func, cleanup=lambda: None, retries=0, trap=()):
     to propagate.
     """
     attempts = itertools.count() if retries == float('inf') else range(retries)
-    for attempt in attempts:
+    for _ in attempts:
         try:
             return func()
         except trap:
@@ -406,7 +377,7 @@ def wrapper(*f_args, **f_kwargs):
 
 def print_yielded(func):
     """
-    Convert a generator into a function that prints all yielded elements
+    Convert a generator into a function that prints all yielded elements.
 
     >>> @print_yielded
     ... def x():
@@ -422,7 +393,7 @@ def print_yielded(func):
 
 def pass_none(func):
     """
-    Wrap func so it's not called if its first param is None
+    Wrap func so it's not called if its first param is None.
 
     >>> print_text = pass_none(print)
     >>> print_text('text')
@@ -431,9 +402,10 @@ def pass_none(func):
     """
 
     @functools.wraps(func)
-    def wrapper(param, *args, **kwargs):
+    def wrapper(param, /, *args, **kwargs):
         if param is not None:
             return func(param, *args, **kwargs)
+        return None
 
     return wrapper
 
@@ -507,7 +479,7 @@ def save_method_args(method):
     args_and_kwargs = collections.namedtuple('args_and_kwargs', 'args kwargs')
 
     @functools.wraps(method)
-    def wrapper(self, *args, **kwargs):
+    def wrapper(self, /, *args, **kwargs):
         attr_name = '_saved_' + method.__name__
         attr = args_and_kwargs(args, kwargs)
         setattr(self, attr_name, attr)
@@ -554,3 +526,108 @@ def wrapper(*args, **kwargs):
         return wrapper
 
     return decorate
+
+
+def identity(x):
+    """
+    Return the argument.
+
+    >>> o = object()
+    >>> identity(o) is o
+    True
+    """
+    return x
+
+
+def bypass_when(check, *, _op=identity):
+    """
+    Decorate a function to return its parameter when ``check``.
+
+    >>> bypassed = []  # False
+
+    >>> @bypass_when(bypassed)
+    ... def double(x):
+    ...     return x * 2
+    >>> double(2)
+    4
+    >>> bypassed[:] = [object()]  # True
+    >>> double(2)
+    2
+    """
+
+    def decorate(func):
+        @functools.wraps(func)
+        def wrapper(param, /):
+            return param if _op(check) else func(param)
+
+        return wrapper
+
+    return decorate
+
+
+def bypass_unless(check):
+    """
+    Decorate a function to return its parameter unless ``check``.
+
+    >>> enabled = [object()]  # True
+
+    >>> @bypass_unless(enabled)
+    ... def double(x):
+    ...     return x * 2
+    >>> double(2)
+    4
+    >>> del enabled[:]  # False
+    >>> double(2)
+    2
+    """
+    return bypass_when(check, _op=operator.not_)
+
+
+@functools.singledispatch
+def _splat_inner(args, func):
+    """Splat args to func."""
+    return func(*args)
+
+
+@_splat_inner.register
+def _(args: collections.abc.Mapping, func):
+    """Splat kargs to func as kwargs."""
+    return func(**args)
+
+
+def splat(func):
+    """
+    Wrap func to expect its parameters to be passed positionally in a tuple.
+
+    Has a similar effect to that of ``itertools.starmap`` over
+    simple ``map``.
+
+    >>> pairs = [(-1, 1), (0, 2)]
+    >>> setuptools.extern.more_itertools.consume(itertools.starmap(print, pairs))
+    -1 1
+    0 2
+    >>> setuptools.extern.more_itertools.consume(map(splat(print), pairs))
+    -1 1
+    0 2
+
+    The approach generalizes to other iterators that don't have a "star"
+    equivalent, such as a "starfilter".
+
+    >>> list(filter(splat(operator.add), pairs))
+    [(0, 2)]
+
+    Splat also accepts a mapping argument.
+
+    >>> def is_nice(msg, code):
+    ...     return "smile" in msg or code == 0
+    >>> msgs = [
+    ...     dict(msg='smile!', code=20),
+    ...     dict(msg='error :(', code=1),
+    ...     dict(msg='unknown', code=0),
+    ... ]
+    >>> for msg in filter(splat(is_nice), msgs):
+    ...     print(msg)
+    {'msg': 'smile!', 'code': 20}
+    {'msg': 'unknown', 'code': 0}
+    """
+    return functools.wraps(func)(functools.partial(_splat_inner, func=func))
diff --git a/setuptools/_vendor/jaraco/functools/__init__.pyi b/setuptools/_vendor/jaraco/functools/__init__.pyi
new file mode 100644
index 0000000000..c2b9ab1757
--- /dev/null
+++ b/setuptools/_vendor/jaraco/functools/__init__.pyi
@@ -0,0 +1,128 @@
+from collections.abc import Callable, Hashable, Iterator
+from functools import partial
+from operator import methodcaller
+import sys
+from typing import (
+    Any,
+    Generic,
+    Protocol,
+    TypeVar,
+    overload,
+)
+
+if sys.version_info >= (3, 10):
+    from typing import Concatenate, ParamSpec
+else:
+    from typing_extensions import Concatenate, ParamSpec
+
+_P = ParamSpec('_P')
+_R = TypeVar('_R')
+_T = TypeVar('_T')
+_R1 = TypeVar('_R1')
+_R2 = TypeVar('_R2')
+_V = TypeVar('_V')
+_S = TypeVar('_S')
+_R_co = TypeVar('_R_co', covariant=True)
+
+class _OnceCallable(Protocol[_P, _R]):
+    saved_result: _R
+    reset: Callable[[], None]
+    def __call__(self, *args: _P.args, **kwargs: _P.kwargs) -> _R: ...
+
+class _ProxyMethodCacheWrapper(Protocol[_R_co]):
+    cache_clear: Callable[[], None]
+    def __call__(self, *args: Hashable, **kwargs: Hashable) -> _R_co: ...
+
+class _MethodCacheWrapper(Protocol[_R_co]):
+    def cache_clear(self) -> None: ...
+    def __call__(self, *args: Hashable, **kwargs: Hashable) -> _R_co: ...
+
+# `compose()` overloads below will cover most use cases.
+
+@overload
+def compose(
+    __func1: Callable[[_R], _T],
+    __func2: Callable[_P, _R],
+    /,
+) -> Callable[_P, _T]: ...
+@overload
+def compose(
+    __func1: Callable[[_R], _T],
+    __func2: Callable[[_R1], _R],
+    __func3: Callable[_P, _R1],
+    /,
+) -> Callable[_P, _T]: ...
+@overload
+def compose(
+    __func1: Callable[[_R], _T],
+    __func2: Callable[[_R2], _R],
+    __func3: Callable[[_R1], _R2],
+    __func4: Callable[_P, _R1],
+    /,
+) -> Callable[_P, _T]: ...
+def once(func: Callable[_P, _R]) -> _OnceCallable[_P, _R]: ...
+def method_cache(
+    method: Callable[..., _R],
+    cache_wrapper: Callable[[Callable[..., _R]], _MethodCacheWrapper[_R]] = ...,
+) -> _MethodCacheWrapper[_R] | _ProxyMethodCacheWrapper[_R]: ...
+def apply(
+    transform: Callable[[_R], _T]
+) -> Callable[[Callable[_P, _R]], Callable[_P, _T]]: ...
+def result_invoke(
+    action: Callable[[_R], Any]
+) -> Callable[[Callable[_P, _R]], Callable[_P, _R]]: ...
+def invoke(
+    f: Callable[_P, _R], /, *args: _P.args, **kwargs: _P.kwargs
+) -> Callable[_P, _R]: ...
+def call_aside(
+    f: Callable[_P, _R], *args: _P.args, **kwargs: _P.kwargs
+) -> Callable[_P, _R]: ...
+
+class Throttler(Generic[_R]):
+    last_called: float
+    func: Callable[..., _R]
+    max_rate: float
+    def __init__(
+        self, func: Callable[..., _R] | Throttler[_R], max_rate: float = ...
+    ) -> None: ...
+    def reset(self) -> None: ...
+    def __call__(self, *args: Any, **kwargs: Any) -> _R: ...
+    def __get__(self, obj: Any, owner: type[Any] | None = ...) -> Callable[..., _R]: ...
+
+def first_invoke(
+    func1: Callable[..., Any], func2: Callable[_P, _R]
+) -> Callable[_P, _R]: ...
+
+method_caller: Callable[..., methodcaller]
+
+def retry_call(
+    func: Callable[..., _R],
+    cleanup: Callable[..., None] = ...,
+    retries: int | float = ...,
+    trap: type[BaseException] | tuple[type[BaseException], ...] = ...,
+) -> _R: ...
+def retry(
+    cleanup: Callable[..., None] = ...,
+    retries: int | float = ...,
+    trap: type[BaseException] | tuple[type[BaseException], ...] = ...,
+) -> Callable[[Callable[..., _R]], Callable[..., _R]]: ...
+def print_yielded(func: Callable[_P, Iterator[Any]]) -> Callable[_P, None]: ...
+def pass_none(
+    func: Callable[Concatenate[_T, _P], _R]
+) -> Callable[Concatenate[_T, _P], _R]: ...
+def assign_params(
+    func: Callable[..., _R], namespace: dict[str, Any]
+) -> partial[_R]: ...
+def save_method_args(
+    method: Callable[Concatenate[_S, _P], _R]
+) -> Callable[Concatenate[_S, _P], _R]: ...
+def except_(
+    *exceptions: type[BaseException], replace: Any = ..., use: Any = ...
+) -> Callable[[Callable[_P, Any]], Callable[_P, Any]]: ...
+def identity(x: _T) -> _T: ...
+def bypass_when(
+    check: _V, *, _op: Callable[[_V], Any] = ...
+) -> Callable[[Callable[[_T], _R]], Callable[[_T], _T | _R]]: ...
+def bypass_unless(
+    check: Any,
+) -> Callable[[Callable[[_T], _R]], Callable[[_T], _T | _R]]: ...
diff --git a/setuptools/_vendor/jaraco/functools/py.typed b/setuptools/_vendor/jaraco/functools/py.typed
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/setuptools/_vendor/more_itertools-8.8.0.dist-info/RECORD b/setuptools/_vendor/more_itertools-8.8.0.dist-info/RECORD
index c3cbb83382..d1a6ea0d22 100644
--- a/setuptools/_vendor/more_itertools-8.8.0.dist-info/RECORD
+++ b/setuptools/_vendor/more_itertools-8.8.0.dist-info/RECORD
@@ -7,9 +7,9 @@ more_itertools-8.8.0.dist-info/WHEEL,sha256=OqRkF0eY5GHssMorFjlbTIq072vpHpF60fIQ
 more_itertools-8.8.0.dist-info/top_level.txt,sha256=fAuqRXu9LPhxdB9ujJowcFOu1rZ8wzSpOW9_jlKis6M,15
 more_itertools/__init__.py,sha256=C7sXffHTXM3P-iaLPPfqfmDoxOflQMJLcM7ed9p3jak,82
 more_itertools/__init__.pyi,sha256=5B3eTzON1BBuOLob1vCflyEb2lSd6usXQQ-Cv-hXkeA,43
-more_itertools/__pycache__/__init__.cpython-311.pyc,,
-more_itertools/__pycache__/more.cpython-311.pyc,,
-more_itertools/__pycache__/recipes.cpython-311.pyc,,
+more_itertools/__pycache__/__init__.cpython-312.pyc,,
+more_itertools/__pycache__/more.cpython-312.pyc,,
+more_itertools/__pycache__/recipes.cpython-312.pyc,,
 more_itertools/more.py,sha256=DlZa8v6JihVwfQ5zHidOA-xDE0orcQIUyxVnCaUoDKE,117968
 more_itertools/more.pyi,sha256=r32pH2raBC1zih3evK4fyvAXvrUamJqc6dgV7QCRL_M,14977
 more_itertools/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
diff --git a/setuptools/_vendor/ordered_set-3.1.1.dist-info/RECORD b/setuptools/_vendor/ordered_set-3.1.1.dist-info/RECORD
index 3c699595fb..3267872d45 100644
--- a/setuptools/_vendor/ordered_set-3.1.1.dist-info/RECORD
+++ b/setuptools/_vendor/ordered_set-3.1.1.dist-info/RECORD
@@ -1,9 +1,9 @@
-__pycache__/ordered_set.cpython-311.pyc,,
+__pycache__/ordered_set.cpython-312.pyc,,
 ordered_set-3.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
 ordered_set-3.1.1.dist-info/METADATA,sha256=qEaJM9CbGNixB_jvfohisKbXTUjcef6nCCcBJju6f4U,5357
 ordered_set-3.1.1.dist-info/MIT-LICENSE,sha256=TvRE7qUSUBcd0ols7wgNf3zDEEJWW7kv7WDRySrMBBE,1071
 ordered_set-3.1.1.dist-info/RECORD,,
 ordered_set-3.1.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-ordered_set-3.1.1.dist-info/WHEEL,sha256=a-zpFRIJzOq5QfuhBzbhiA1eHTzNCJn8OdRvhdNX0Rk,110
+ordered_set-3.1.1.dist-info/WHEEL,sha256=DZajD4pwLWue70CAfc7YaxT1wLUciNBvN_TTcvXpltE,110
 ordered_set-3.1.1.dist-info/top_level.txt,sha256=NTY2_aDi1Do9fl3Z9EmWPxasFkUeW2dzO2D3RDx5CfM,12
 ordered_set.py,sha256=dbaCcs27dyN9gnMWGF5nA_BrVn6Q-NrjKYJpV9_fgBs,15130
diff --git a/setuptools/_vendor/ordered_set-3.1.1.dist-info/WHEEL b/setuptools/_vendor/ordered_set-3.1.1.dist-info/WHEEL
index f771c29b87..832be11132 100644
--- a/setuptools/_vendor/ordered_set-3.1.1.dist-info/WHEEL
+++ b/setuptools/_vendor/ordered_set-3.1.1.dist-info/WHEEL
@@ -1,5 +1,5 @@
 Wheel-Version: 1.0
-Generator: bdist_wheel (0.40.0)
+Generator: bdist_wheel (0.43.0)
 Root-Is-Purelib: true
 Tag: py2-none-any
 Tag: py3-none-any
diff --git a/setuptools/_vendor/packaging-23.1.dist-info/RECORD b/setuptools/_vendor/packaging-23.1.dist-info/RECORD
index e240a8408d..e041f20f6a 100644
--- a/setuptools/_vendor/packaging-23.1.dist-info/RECORD
+++ b/setuptools/_vendor/packaging-23.1.dist-info/RECORD
@@ -7,20 +7,20 @@ packaging-23.1.dist-info/RECORD,,
 packaging-23.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
 packaging-23.1.dist-info/WHEEL,sha256=rSgq_JpHF9fHR1lx53qwg_1-2LypZE_qmcuXbVUq948,81
 packaging/__init__.py,sha256=kYVZSmXT6CWInT4UJPDtrSQBAZu8fMuFBxpv5GsDTLk,501
-packaging/__pycache__/__init__.cpython-311.pyc,,
-packaging/__pycache__/_elffile.cpython-311.pyc,,
-packaging/__pycache__/_manylinux.cpython-311.pyc,,
-packaging/__pycache__/_musllinux.cpython-311.pyc,,
-packaging/__pycache__/_parser.cpython-311.pyc,,
-packaging/__pycache__/_structures.cpython-311.pyc,,
-packaging/__pycache__/_tokenizer.cpython-311.pyc,,
-packaging/__pycache__/markers.cpython-311.pyc,,
-packaging/__pycache__/metadata.cpython-311.pyc,,
-packaging/__pycache__/requirements.cpython-311.pyc,,
-packaging/__pycache__/specifiers.cpython-311.pyc,,
-packaging/__pycache__/tags.cpython-311.pyc,,
-packaging/__pycache__/utils.cpython-311.pyc,,
-packaging/__pycache__/version.cpython-311.pyc,,
+packaging/__pycache__/__init__.cpython-312.pyc,,
+packaging/__pycache__/_elffile.cpython-312.pyc,,
+packaging/__pycache__/_manylinux.cpython-312.pyc,,
+packaging/__pycache__/_musllinux.cpython-312.pyc,,
+packaging/__pycache__/_parser.cpython-312.pyc,,
+packaging/__pycache__/_structures.cpython-312.pyc,,
+packaging/__pycache__/_tokenizer.cpython-312.pyc,,
+packaging/__pycache__/markers.cpython-312.pyc,,
+packaging/__pycache__/metadata.cpython-312.pyc,,
+packaging/__pycache__/requirements.cpython-312.pyc,,
+packaging/__pycache__/specifiers.cpython-312.pyc,,
+packaging/__pycache__/tags.cpython-312.pyc,,
+packaging/__pycache__/utils.cpython-312.pyc,,
+packaging/__pycache__/version.cpython-312.pyc,,
 packaging/_elffile.py,sha256=hbmK8OD6Z7fY6hwinHEUcD1by7czkGiNYu7ShnFEk2k,3266
 packaging/_manylinux.py,sha256=ESGrDEVmBc8jYTtdZRAWiLk72lOzAKWeezFgoJ_MuBc,8926
 packaging/_musllinux.py,sha256=mvPk7FNjjILKRLIdMxR7IvJ1uggLgCszo-L9rjfpi0M,2524
diff --git a/setuptools/_vendor/tomli-2.0.1.dist-info/RECORD b/setuptools/_vendor/tomli-2.0.1.dist-info/RECORD
index 5f7a6b06b3..1db8063ec5 100644
--- a/setuptools/_vendor/tomli-2.0.1.dist-info/RECORD
+++ b/setuptools/_vendor/tomli-2.0.1.dist-info/RECORD
@@ -5,10 +5,10 @@ tomli-2.0.1.dist-info/RECORD,,
 tomli-2.0.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
 tomli-2.0.1.dist-info/WHEEL,sha256=jPMR_Dzkc4X4icQtmz81lnNY_kAsfog7ry7qoRvYLXw,81
 tomli/__init__.py,sha256=JhUwV66DB1g4Hvt1UQCVMdfCu-IgAV8FXmvDU9onxd4,396
-tomli/__pycache__/__init__.cpython-311.pyc,,
-tomli/__pycache__/_parser.cpython-311.pyc,,
-tomli/__pycache__/_re.cpython-311.pyc,,
-tomli/__pycache__/_types.cpython-311.pyc,,
+tomli/__pycache__/__init__.cpython-312.pyc,,
+tomli/__pycache__/_parser.cpython-312.pyc,,
+tomli/__pycache__/_re.cpython-312.pyc,,
+tomli/__pycache__/_types.cpython-312.pyc,,
 tomli/_parser.py,sha256=g9-ENaALS-B8dokYpCuzUFalWlog7T-SIYMjLZSWrtM,22633
 tomli/_re.py,sha256=dbjg5ChZT23Ka9z9DHOXfdtSpPwUfdgMXnj8NOoly-w,2943
 tomli/_types.py,sha256=-GTG2VUqkpxwMqzmVO4F7ybKddIbAnuAHXfmWQcTi3Q,254
diff --git a/setuptools/_vendor/typing_extensions-4.0.1.dist-info/RECORD b/setuptools/_vendor/typing_extensions-4.0.1.dist-info/RECORD
index 786de8542d..efc5f26cf3 100644
--- a/setuptools/_vendor/typing_extensions-4.0.1.dist-info/RECORD
+++ b/setuptools/_vendor/typing_extensions-4.0.1.dist-info/RECORD
@@ -1,4 +1,4 @@
-__pycache__/typing_extensions.cpython-311.pyc,,
+__pycache__/typing_extensions.cpython-312.pyc,,
 typing_extensions-4.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
 typing_extensions-4.0.1.dist-info/LICENSE,sha256=_xfOlOECAk3raHc-scx0ynbaTmWPNzUx8Kwi1oprsa0,12755
 typing_extensions-4.0.1.dist-info/METADATA,sha256=iZ_5HONZZBXtF4kroz-IPZYIl9M8IE1B00R82dWcBqE,1736
diff --git a/setuptools/_vendor/vendored.txt b/setuptools/_vendor/vendored.txt
index 0fed8eeeae..592fe491a1 100644
--- a/setuptools/_vendor/vendored.txt
+++ b/setuptools/_vendor/vendored.txt
@@ -9,3 +9,5 @@ typing_extensions==4.0.1
 # required for importlib_resources and _metadata on older Pythons
 zipp==3.7.0
 tomli==2.0.1
+# required for jaraco.context on older Pythons
+backports.tarfile
diff --git a/setuptools/_vendor/zipp-3.7.0.dist-info/RECORD b/setuptools/_vendor/zipp-3.7.0.dist-info/RECORD
index 0a88551ce0..adc797bc2e 100644
--- a/setuptools/_vendor/zipp-3.7.0.dist-info/RECORD
+++ b/setuptools/_vendor/zipp-3.7.0.dist-info/RECORD
@@ -1,4 +1,4 @@
-__pycache__/zipp.cpython-311.pyc,,
+__pycache__/zipp.cpython-312.pyc,,
 zipp-3.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
 zipp-3.7.0.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050
 zipp-3.7.0.dist-info/METADATA,sha256=ZLzgaXTyZX_MxTU0lcGfhdPY4CjFrT_3vyQ2Fo49pl8,2261
diff --git a/tools/vendored.py b/tools/vendored.py
index f339497fa1..e33a44f291 100644
--- a/tools/vendored.py
+++ b/tools/vendored.py
@@ -42,7 +42,12 @@ def rewrite_jaraco_text(pkg_files, new_root):
         file.write_text(text)
 
 
-def rewrite_jaraco(pkg_files, new_root):
+def repair_jaraco_namespace(pkg_files):
+    # required for zip-packaged setuptools #3084
+    pkg_files.joinpath('__init__.py').write_text('')
+
+
+def rewrite_jaraco_functools(pkg_files, new_root):
     """
     Rewrite imports in jaraco.functools to redirect to vendored copies.
     """
@@ -50,8 +55,6 @@ def rewrite_jaraco(pkg_files, new_root):
         text = file.read_text()
         text = re.sub(r' (more_itertools)', rf' {new_root}.\1', text)
         file.write_text(text)
-    # required for zip-packaged setuptools #3084
-    pkg_files.joinpath('__init__.py').write_text('')
 
 
 def rewrite_importlib_resources(pkg_files, new_root):
@@ -129,8 +132,9 @@ def update_pkg_resources():
     vendor = Path('pkg_resources/_vendor')
     install(vendor)
     rewrite_packaging(vendor / 'packaging', 'pkg_resources.extern')
+    repair_jaraco_namespace(vendor / 'jaraco')
     rewrite_jaraco_text(vendor / 'jaraco/text', 'pkg_resources.extern')
-    rewrite_jaraco(vendor / 'jaraco', 'pkg_resources.extern')
+    rewrite_jaraco_functools(vendor / 'jaraco/functools', 'pkg_resources.extern')
     rewrite_importlib_resources(vendor / 'importlib_resources', 'pkg_resources.extern')
     rewrite_more_itertools(vendor / "more_itertools")
     rewrite_platformdirs(vendor / "platformdirs")
@@ -140,8 +144,9 @@ def update_setuptools():
     vendor = Path('setuptools/_vendor')
     install(vendor)
     rewrite_packaging(vendor / 'packaging', 'setuptools.extern')
+    repair_jaraco_namespace(vendor / 'jaraco')
     rewrite_jaraco_text(vendor / 'jaraco/text', 'setuptools.extern')
-    rewrite_jaraco(vendor / 'jaraco', 'setuptools.extern')
+    rewrite_jaraco_functools(vendor / 'jaraco/functools', 'setuptools.extern')
     rewrite_importlib_resources(vendor / 'importlib_resources', 'setuptools.extern')
     rewrite_importlib_metadata(vendor / 'importlib_metadata', 'setuptools.extern')
     rewrite_more_itertools(vendor / "more_itertools")

From 528fe53b78e11baeb70b9819845f09aa33cbecb6 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 12 Apr 2024 12:50:54 -0400
Subject: [PATCH 0507/1761] Ensure that 'backports' is included on older
 Pythons

---
 pkg_resources/_vendor/backports/__init__.py |  0
 pkg_resources/_vendor/jaraco/context.py     |  2 +-
 pkg_resources/extern/__init__.py            |  1 +
 setuptools/_vendor/backports/__init__.py    |  0
 setuptools/_vendor/jaraco/context.py        |  2 +-
 setuptools/extern/__init__.py               |  1 +
 tools/vendored.py                           | 20 +++++++++++++++++---
 7 files changed, 21 insertions(+), 5 deletions(-)
 create mode 100644 pkg_resources/_vendor/backports/__init__.py
 create mode 100644 setuptools/_vendor/backports/__init__.py

diff --git a/pkg_resources/_vendor/backports/__init__.py b/pkg_resources/_vendor/backports/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/pkg_resources/_vendor/jaraco/context.py b/pkg_resources/_vendor/jaraco/context.py
index 61b27135df..c42f6135d5 100644
--- a/pkg_resources/_vendor/jaraco/context.py
+++ b/pkg_resources/_vendor/jaraco/context.py
@@ -14,7 +14,7 @@
 
 
 if sys.version_info < (3, 12):
-    from backports import tarfile
+    from pkg_resources.extern.backports import tarfile
 else:
     import tarfile
 
diff --git a/pkg_resources/extern/__init__.py b/pkg_resources/extern/__init__.py
index 948bcc6094..df96f7f26d 100644
--- a/pkg_resources/extern/__init__.py
+++ b/pkg_resources/extern/__init__.py
@@ -76,5 +76,6 @@ def install(self):
     'jaraco',
     'importlib_resources',
     'more_itertools',
+    'backports',
 )
 VendorImporter(__name__, names).install()
diff --git a/setuptools/_vendor/backports/__init__.py b/setuptools/_vendor/backports/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/setuptools/_vendor/jaraco/context.py b/setuptools/_vendor/jaraco/context.py
index 61b27135df..0322c45d4a 100644
--- a/setuptools/_vendor/jaraco/context.py
+++ b/setuptools/_vendor/jaraco/context.py
@@ -14,7 +14,7 @@
 
 
 if sys.version_info < (3, 12):
-    from backports import tarfile
+    from setuptools.extern.backports import tarfile
 else:
     import tarfile
 
diff --git a/setuptools/extern/__init__.py b/setuptools/extern/__init__.py
index 67c4a4552f..427b27cb80 100644
--- a/setuptools/extern/__init__.py
+++ b/setuptools/extern/__init__.py
@@ -80,5 +80,6 @@ def install(self):
     'jaraco',
     'typing_extensions',
     'tomli',
+    'backports',
 )
 VendorImporter(__name__, names, 'setuptools._vendor').install()
diff --git a/tools/vendored.py b/tools/vendored.py
index e33a44f291..232e9625d2 100644
--- a/tools/vendored.py
+++ b/tools/vendored.py
@@ -42,7 +42,7 @@ def rewrite_jaraco_text(pkg_files, new_root):
         file.write_text(text)
 
 
-def repair_jaraco_namespace(pkg_files):
+def repair_namespace(pkg_files):
     # required for zip-packaged setuptools #3084
     pkg_files.joinpath('__init__.py').write_text('')
 
@@ -57,6 +57,16 @@ def rewrite_jaraco_functools(pkg_files, new_root):
         file.write_text(text)
 
 
+def rewrite_jaraco_context(pkg_files, new_root):
+    """
+    Rewrite imports in jaraco.context to redirect to vendored copies.
+    """
+    for file in pkg_files.glob('context.py'):
+        text = file.read_text()
+        text = re.sub(r' (backports)', rf' {new_root}.\1', text)
+        file.write_text(text)
+
+
 def rewrite_importlib_resources(pkg_files, new_root):
     """
     Rewrite imports in importlib_resources to redirect to vendored copies.
@@ -132,9 +142,11 @@ def update_pkg_resources():
     vendor = Path('pkg_resources/_vendor')
     install(vendor)
     rewrite_packaging(vendor / 'packaging', 'pkg_resources.extern')
-    repair_jaraco_namespace(vendor / 'jaraco')
+    repair_namespace(vendor / 'jaraco')
+    repair_namespace(vendor / 'backports')
     rewrite_jaraco_text(vendor / 'jaraco/text', 'pkg_resources.extern')
     rewrite_jaraco_functools(vendor / 'jaraco/functools', 'pkg_resources.extern')
+    rewrite_jaraco_context(vendor / 'jaraco', 'pkg_resources.extern')
     rewrite_importlib_resources(vendor / 'importlib_resources', 'pkg_resources.extern')
     rewrite_more_itertools(vendor / "more_itertools")
     rewrite_platformdirs(vendor / "platformdirs")
@@ -144,9 +156,11 @@ def update_setuptools():
     vendor = Path('setuptools/_vendor')
     install(vendor)
     rewrite_packaging(vendor / 'packaging', 'setuptools.extern')
-    repair_jaraco_namespace(vendor / 'jaraco')
+    repair_namespace(vendor / 'jaraco')
+    repair_namespace(vendor / 'backports')
     rewrite_jaraco_text(vendor / 'jaraco/text', 'setuptools.extern')
     rewrite_jaraco_functools(vendor / 'jaraco/functools', 'setuptools.extern')
+    rewrite_jaraco_context(vendor / 'jaraco', 'setuptools.extern')
     rewrite_importlib_resources(vendor / 'importlib_resources', 'setuptools.extern')
     rewrite_importlib_metadata(vendor / 'importlib_metadata', 'setuptools.extern')
     rewrite_more_itertools(vendor / "more_itertools")

From c509c6cb4bbca6cf9ea189308ea7e1d6471055c2 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 12 Apr 2024 14:00:49 -0400
Subject: [PATCH 0508/1761] Exclude vendored packages and tools from coverage
 checks.

---
 .coveragerc | 5 +++++
 1 file changed, 5 insertions(+)

diff --git a/.coveragerc b/.coveragerc
index 1f214acf38..5b7fdefd2a 100644
--- a/.coveragerc
+++ b/.coveragerc
@@ -2,7 +2,12 @@
 omit =
 	# leading `*/` for pytest-dev/pytest-cov#456
 	*/.tox/*
+
+	# local
+	*/_vendor/*
+	*/tools/*
 	*/setuptools/_distutils/*
+
 disable_warnings =
 	couldnt-parse
 

From 88a8caebc82a706da03c8002fc0f77ffb110fe64 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 12 Apr 2024 14:19:41 -0400
Subject: [PATCH 0509/1761] =?UTF-8?q?Bump=20version:=2069.3.0=20=E2=86=92?=
 =?UTF-8?q?=2069.4.0?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg               | 2 +-
 NEWS.rst                       | 9 +++++++++
 newsfragments/4298.feature.rst | 1 -
 setup.cfg                      | 2 +-
 4 files changed, 11 insertions(+), 3 deletions(-)
 delete mode 100644 newsfragments/4298.feature.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index a76d5b66d7..007a8ec0f5 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 69.3.0
+current_version = 69.4.0
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index 7822ec6325..0fcbdfc9a6 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,12 @@
+v69.4.0
+=======
+
+Features
+--------
+
+- Merged with pypa/distutils@55982565e, including interoperability improvements for rfc822_escape (pypa/distutils#213), dynamic resolution of config_h_filename for Python 3.13 compatibility (pypa/distutils#219), added support for the z/OS compiler (pypa/distutils#216), modernized compiler options in unixcompiler (pypa/distutils#214), fixed accumulating flags bug after compile/link (pypa/distutils#207), fixed enconding warnings (pypa/distutils#236), and general quality improvements (pypa/distutils#234). (#4298)
+
+
 v69.3.0
 =======
 
diff --git a/newsfragments/4298.feature.rst b/newsfragments/4298.feature.rst
deleted file mode 100644
index 21d680d486..0000000000
--- a/newsfragments/4298.feature.rst
+++ /dev/null
@@ -1 +0,0 @@
-Merged with pypa/distutils@55982565e, including interoperability improvements for rfc822_escape (pypa/distutils#213), dynamic resolution of config_h_filename for Python 3.13 compatibility (pypa/distutils#219), added support for the z/OS compiler (pypa/distutils#216), modernized compiler options in unixcompiler (pypa/distutils#214), fixed accumulating flags bug after compile/link (pypa/distutils#207), fixed enconding warnings (pypa/distutils#236), and general quality improvements (pypa/distutils#234).
diff --git a/setup.cfg b/setup.cfg
index bab3efa52c..02078f7466 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,6 +1,6 @@
 [metadata]
 name = setuptools
-version = 69.3.0
+version = 69.4.0
 author = Python Packaging Authority
 author_email = distutils-sig@python.org
 description = Easily download, build, install, upgrade, and uninstall Python packages

From ab67b5e17158dcb208b81cec3c248b31228c5bb5 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 12 Apr 2024 12:39:41 -0400
Subject: [PATCH 0510/1761] Update to packaging 24

---
 newsfragments/4301.feature.rst                |   1 +
 .../_vendor/packaging-23.1.dist-info/RECORD   |  37 --
 .../INSTALLER                                 |   0
 .../LICENSE                                   |   0
 .../LICENSE.APACHE                            |   0
 .../LICENSE.BSD                               |   0
 .../METADATA                                  |   5 +-
 .../_vendor/packaging-24.0.dist-info/RECORD   |  37 ++
 .../REQUESTED                                 |   0
 .../_vendor/packaging-24.0.dist-info}/WHEEL   |   2 +-
 pkg_resources/_vendor/packaging/__init__.py   |   4 +-
 pkg_resources/_vendor/packaging/_manylinux.py |  74 +--
 pkg_resources/_vendor/packaging/_musllinux.py |  19 +-
 pkg_resources/_vendor/packaging/_parser.py    |  13 +-
 pkg_resources/_vendor/packaging/metadata.py   | 441 +++++++++++++++++-
 .../_vendor/packaging/requirements.py         |  45 +-
 pkg_resources/_vendor/packaging/specifiers.py |  63 +--
 pkg_resources/_vendor/packaging/tags.py       |  63 ++-
 pkg_resources/_vendor/packaging/utils.py      |  39 +-
 pkg_resources/_vendor/packaging/version.py    |  63 ++-
 pkg_resources/_vendor/vendored.txt            |   2 +-
 .../_vendor/packaging-23.1.dist-info/RECORD   |  37 --
 .../INSTALLER                                 |   0
 .../LICENSE                                   |   0
 .../LICENSE.APACHE                            |   0
 .../LICENSE.BSD                               |   0
 .../METADATA                                  |   5 +-
 .../_vendor/packaging-24.0.dist-info/RECORD   |  37 ++
 .../REQUESTED                                 |   0
 .../_vendor/packaging-24.0.dist-info}/WHEEL   |   2 +-
 setuptools/_vendor/packaging/__init__.py      |   4 +-
 setuptools/_vendor/packaging/_manylinux.py    |  74 +--
 setuptools/_vendor/packaging/_musllinux.py    |  19 +-
 setuptools/_vendor/packaging/_parser.py       |  13 +-
 setuptools/_vendor/packaging/metadata.py      | 441 +++++++++++++++++-
 setuptools/_vendor/packaging/requirements.py  |  45 +-
 setuptools/_vendor/packaging/specifiers.py    |  63 +--
 setuptools/_vendor/packaging/tags.py          |  63 ++-
 setuptools/_vendor/packaging/utils.py         |  39 +-
 setuptools/_vendor/packaging/version.py       |  63 ++-
 setuptools/_vendor/vendored.txt               |   2 +-
 41 files changed, 1413 insertions(+), 402 deletions(-)
 create mode 100644 newsfragments/4301.feature.rst
 delete mode 100644 pkg_resources/_vendor/packaging-23.1.dist-info/RECORD
 rename pkg_resources/_vendor/{packaging-23.1.dist-info => packaging-24.0.dist-info}/INSTALLER (100%)
 rename pkg_resources/_vendor/{packaging-23.1.dist-info => packaging-24.0.dist-info}/LICENSE (100%)
 rename pkg_resources/_vendor/{packaging-23.1.dist-info => packaging-24.0.dist-info}/LICENSE.APACHE (100%)
 rename pkg_resources/_vendor/{packaging-23.1.dist-info => packaging-24.0.dist-info}/LICENSE.BSD (100%)
 rename pkg_resources/_vendor/{packaging-23.1.dist-info => packaging-24.0.dist-info}/METADATA (95%)
 create mode 100644 pkg_resources/_vendor/packaging-24.0.dist-info/RECORD
 rename pkg_resources/_vendor/{packaging-23.1.dist-info => packaging-24.0.dist-info}/REQUESTED (100%)
 rename {setuptools/_vendor/packaging-23.1.dist-info => pkg_resources/_vendor/packaging-24.0.dist-info}/WHEEL (72%)
 delete mode 100644 setuptools/_vendor/packaging-23.1.dist-info/RECORD
 rename setuptools/_vendor/{packaging-23.1.dist-info => packaging-24.0.dist-info}/INSTALLER (100%)
 rename setuptools/_vendor/{packaging-23.1.dist-info => packaging-24.0.dist-info}/LICENSE (100%)
 rename setuptools/_vendor/{packaging-23.1.dist-info => packaging-24.0.dist-info}/LICENSE.APACHE (100%)
 rename setuptools/_vendor/{packaging-23.1.dist-info => packaging-24.0.dist-info}/LICENSE.BSD (100%)
 rename setuptools/_vendor/{packaging-23.1.dist-info => packaging-24.0.dist-info}/METADATA (95%)
 create mode 100644 setuptools/_vendor/packaging-24.0.dist-info/RECORD
 rename setuptools/_vendor/{packaging-23.1.dist-info => packaging-24.0.dist-info}/REQUESTED (100%)
 rename {pkg_resources/_vendor/packaging-23.1.dist-info => setuptools/_vendor/packaging-24.0.dist-info}/WHEEL (72%)

diff --git a/newsfragments/4301.feature.rst b/newsfragments/4301.feature.rst
new file mode 100644
index 0000000000..28ceb2a689
--- /dev/null
+++ b/newsfragments/4301.feature.rst
@@ -0,0 +1 @@
+Updated vendored packaging to version 24.0.
diff --git a/pkg_resources/_vendor/packaging-23.1.dist-info/RECORD b/pkg_resources/_vendor/packaging-23.1.dist-info/RECORD
deleted file mode 100644
index e041f20f6a..0000000000
--- a/pkg_resources/_vendor/packaging-23.1.dist-info/RECORD
+++ /dev/null
@@ -1,37 +0,0 @@
-packaging-23.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-packaging-23.1.dist-info/LICENSE,sha256=ytHvW9NA1z4HS6YU0m996spceUDD2MNIUuZcSQlobEg,197
-packaging-23.1.dist-info/LICENSE.APACHE,sha256=DVQuDIgE45qn836wDaWnYhSdxoLXgpRRKH4RuTjpRZQ,10174
-packaging-23.1.dist-info/LICENSE.BSD,sha256=tw5-m3QvHMb5SLNMFqo5_-zpQZY2S8iP8NIYDwAo-sU,1344
-packaging-23.1.dist-info/METADATA,sha256=JnduJDlxs2IVeB-nIqAC3-HyNcPhP_MADd9_k_MjmaI,3082
-packaging-23.1.dist-info/RECORD,,
-packaging-23.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-packaging-23.1.dist-info/WHEEL,sha256=rSgq_JpHF9fHR1lx53qwg_1-2LypZE_qmcuXbVUq948,81
-packaging/__init__.py,sha256=kYVZSmXT6CWInT4UJPDtrSQBAZu8fMuFBxpv5GsDTLk,501
-packaging/__pycache__/__init__.cpython-312.pyc,,
-packaging/__pycache__/_elffile.cpython-312.pyc,,
-packaging/__pycache__/_manylinux.cpython-312.pyc,,
-packaging/__pycache__/_musllinux.cpython-312.pyc,,
-packaging/__pycache__/_parser.cpython-312.pyc,,
-packaging/__pycache__/_structures.cpython-312.pyc,,
-packaging/__pycache__/_tokenizer.cpython-312.pyc,,
-packaging/__pycache__/markers.cpython-312.pyc,,
-packaging/__pycache__/metadata.cpython-312.pyc,,
-packaging/__pycache__/requirements.cpython-312.pyc,,
-packaging/__pycache__/specifiers.cpython-312.pyc,,
-packaging/__pycache__/tags.cpython-312.pyc,,
-packaging/__pycache__/utils.cpython-312.pyc,,
-packaging/__pycache__/version.cpython-312.pyc,,
-packaging/_elffile.py,sha256=hbmK8OD6Z7fY6hwinHEUcD1by7czkGiNYu7ShnFEk2k,3266
-packaging/_manylinux.py,sha256=ESGrDEVmBc8jYTtdZRAWiLk72lOzAKWeezFgoJ_MuBc,8926
-packaging/_musllinux.py,sha256=mvPk7FNjjILKRLIdMxR7IvJ1uggLgCszo-L9rjfpi0M,2524
-packaging/_parser.py,sha256=KJQkBh_Xbfb-qsB560YIEItrTpCZaOh4_YMfBtd5XIY,10194
-packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431
-packaging/_tokenizer.py,sha256=alCtbwXhOFAmFGZ6BQ-wCTSFoRAJ2z-ysIf7__MTJ_k,5292
-packaging/markers.py,sha256=eH-txS2zq1HdNpTd9LcZUcVIwewAiNU0grmq5wjKnOk,8208
-packaging/metadata.py,sha256=PjELMLxKG_iu3HWjKAOdKhuNrHfWgpdTF2Q4nObsZeM,16397
-packaging/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-packaging/requirements.py,sha256=hJzvtJyAvENc_VfwfhnOZV1851-VW8JCGh-R96NE4Pc,3287
-packaging/specifiers.py,sha256=ZOpqL_w_Kj6ZF_OWdliQUzhEyHlDbi6989kr-sF5GHs,39206
-packaging/tags.py,sha256=_1gLX8h1SgpjAdYCP9XqU37zRjXtU5ZliGy3IM-WcSM,18106
-packaging/utils.py,sha256=es0cCezKspzriQ-3V88h3yJzxz028euV2sUwM61kE-o,4355
-packaging/version.py,sha256=2NH3E57hzRhn0BV9boUBvgPsxlTqLJeI0EpYQoNvGi0,16326
diff --git a/pkg_resources/_vendor/packaging-23.1.dist-info/INSTALLER b/pkg_resources/_vendor/packaging-24.0.dist-info/INSTALLER
similarity index 100%
rename from pkg_resources/_vendor/packaging-23.1.dist-info/INSTALLER
rename to pkg_resources/_vendor/packaging-24.0.dist-info/INSTALLER
diff --git a/pkg_resources/_vendor/packaging-23.1.dist-info/LICENSE b/pkg_resources/_vendor/packaging-24.0.dist-info/LICENSE
similarity index 100%
rename from pkg_resources/_vendor/packaging-23.1.dist-info/LICENSE
rename to pkg_resources/_vendor/packaging-24.0.dist-info/LICENSE
diff --git a/pkg_resources/_vendor/packaging-23.1.dist-info/LICENSE.APACHE b/pkg_resources/_vendor/packaging-24.0.dist-info/LICENSE.APACHE
similarity index 100%
rename from pkg_resources/_vendor/packaging-23.1.dist-info/LICENSE.APACHE
rename to pkg_resources/_vendor/packaging-24.0.dist-info/LICENSE.APACHE
diff --git a/pkg_resources/_vendor/packaging-23.1.dist-info/LICENSE.BSD b/pkg_resources/_vendor/packaging-24.0.dist-info/LICENSE.BSD
similarity index 100%
rename from pkg_resources/_vendor/packaging-23.1.dist-info/LICENSE.BSD
rename to pkg_resources/_vendor/packaging-24.0.dist-info/LICENSE.BSD
diff --git a/pkg_resources/_vendor/packaging-23.1.dist-info/METADATA b/pkg_resources/_vendor/packaging-24.0.dist-info/METADATA
similarity index 95%
rename from pkg_resources/_vendor/packaging-23.1.dist-info/METADATA
rename to pkg_resources/_vendor/packaging-24.0.dist-info/METADATA
index c43882a826..10ab4390a9 100644
--- a/pkg_resources/_vendor/packaging-23.1.dist-info/METADATA
+++ b/pkg_resources/_vendor/packaging-24.0.dist-info/METADATA
@@ -1,6 +1,6 @@
 Metadata-Version: 2.1
 Name: packaging
-Version: 23.1
+Version: 24.0
 Summary: Core utilities for Python packages
 Author-email: Donald Stufft 
 Requires-Python: >=3.7
@@ -17,6 +17,7 @@ Classifier: Programming Language :: Python :: 3.8
 Classifier: Programming Language :: Python :: 3.9
 Classifier: Programming Language :: Python :: 3.10
 Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
 Classifier: Programming Language :: Python :: Implementation :: CPython
 Classifier: Programming Language :: Python :: Implementation :: PyPy
 Classifier: Typing :: Typed
@@ -59,6 +60,8 @@ Use ``pip`` to install these utilities::
 
     pip install packaging
 
+The ``packaging`` library uses calendar-based versioning (``YY.N``).
+
 Discussion
 ----------
 
diff --git a/pkg_resources/_vendor/packaging-24.0.dist-info/RECORD b/pkg_resources/_vendor/packaging-24.0.dist-info/RECORD
new file mode 100644
index 0000000000..bcf796c2f4
--- /dev/null
+++ b/pkg_resources/_vendor/packaging-24.0.dist-info/RECORD
@@ -0,0 +1,37 @@
+packaging-24.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+packaging-24.0.dist-info/LICENSE,sha256=ytHvW9NA1z4HS6YU0m996spceUDD2MNIUuZcSQlobEg,197
+packaging-24.0.dist-info/LICENSE.APACHE,sha256=DVQuDIgE45qn836wDaWnYhSdxoLXgpRRKH4RuTjpRZQ,10174
+packaging-24.0.dist-info/LICENSE.BSD,sha256=tw5-m3QvHMb5SLNMFqo5_-zpQZY2S8iP8NIYDwAo-sU,1344
+packaging-24.0.dist-info/METADATA,sha256=0dESdhY_wHValuOrbgdebiEw04EbX4dkujlxPdEsFus,3203
+packaging-24.0.dist-info/RECORD,,
+packaging-24.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+packaging-24.0.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
+packaging/__init__.py,sha256=UzotcV07p8vcJzd80S-W0srhgY8NMVD_XvJcZ7JN-tA,496
+packaging/__pycache__/__init__.cpython-312.pyc,,
+packaging/__pycache__/_elffile.cpython-312.pyc,,
+packaging/__pycache__/_manylinux.cpython-312.pyc,,
+packaging/__pycache__/_musllinux.cpython-312.pyc,,
+packaging/__pycache__/_parser.cpython-312.pyc,,
+packaging/__pycache__/_structures.cpython-312.pyc,,
+packaging/__pycache__/_tokenizer.cpython-312.pyc,,
+packaging/__pycache__/markers.cpython-312.pyc,,
+packaging/__pycache__/metadata.cpython-312.pyc,,
+packaging/__pycache__/requirements.cpython-312.pyc,,
+packaging/__pycache__/specifiers.cpython-312.pyc,,
+packaging/__pycache__/tags.cpython-312.pyc,,
+packaging/__pycache__/utils.cpython-312.pyc,,
+packaging/__pycache__/version.cpython-312.pyc,,
+packaging/_elffile.py,sha256=hbmK8OD6Z7fY6hwinHEUcD1by7czkGiNYu7ShnFEk2k,3266
+packaging/_manylinux.py,sha256=1ng_TqyH49hY6s3W_zVHyoJIaogbJqbIF1jJ0fAehc4,9590
+packaging/_musllinux.py,sha256=kgmBGLFybpy8609-KTvzmt2zChCPWYvhp5BWP4JX7dE,2676
+packaging/_parser.py,sha256=zlsFB1FpMRjkUdQb6WLq7xON52ruQadxFpYsDXWhLb4,10347
+packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431
+packaging/_tokenizer.py,sha256=alCtbwXhOFAmFGZ6BQ-wCTSFoRAJ2z-ysIf7__MTJ_k,5292
+packaging/markers.py,sha256=eH-txS2zq1HdNpTd9LcZUcVIwewAiNU0grmq5wjKnOk,8208
+packaging/metadata.py,sha256=w7jPEg6mDf1FTZMn79aFxFuk4SKtynUJtxr2InTxlV4,33036
+packaging/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+packaging/requirements.py,sha256=dgoBeVprPu2YE6Q8nGfwOPTjATHbRa_ZGLyXhFEln6Q,2933
+packaging/specifiers.py,sha256=dB2DwbmvSbEuVilEyiIQ382YfW5JfwzXTfRRPVtaENY,39784
+packaging/tags.py,sha256=fedHXiOHkBxNZTXotXv8uXPmMFU9ae-TKBujgYHigcA,18950
+packaging/utils.py,sha256=XgdmP3yx9-wQEFjO7OvMj9RjEf5JlR5HFFR69v7SQ9E,5268
+packaging/version.py,sha256=XjRBLNK17UMDgLeP8UHnqwiY3TdSi03xFQURtec211A,16236
diff --git a/pkg_resources/_vendor/packaging-23.1.dist-info/REQUESTED b/pkg_resources/_vendor/packaging-24.0.dist-info/REQUESTED
similarity index 100%
rename from pkg_resources/_vendor/packaging-23.1.dist-info/REQUESTED
rename to pkg_resources/_vendor/packaging-24.0.dist-info/REQUESTED
diff --git a/setuptools/_vendor/packaging-23.1.dist-info/WHEEL b/pkg_resources/_vendor/packaging-24.0.dist-info/WHEEL
similarity index 72%
rename from setuptools/_vendor/packaging-23.1.dist-info/WHEEL
rename to pkg_resources/_vendor/packaging-24.0.dist-info/WHEEL
index db4a255f3a..3b5e64b5e6 100644
--- a/setuptools/_vendor/packaging-23.1.dist-info/WHEEL
+++ b/pkg_resources/_vendor/packaging-24.0.dist-info/WHEEL
@@ -1,4 +1,4 @@
 Wheel-Version: 1.0
-Generator: flit 3.8.0
+Generator: flit 3.9.0
 Root-Is-Purelib: true
 Tag: py3-none-any
diff --git a/pkg_resources/_vendor/packaging/__init__.py b/pkg_resources/_vendor/packaging/__init__.py
index 13cadc7f04..e7c0aa12ca 100644
--- a/pkg_resources/_vendor/packaging/__init__.py
+++ b/pkg_resources/_vendor/packaging/__init__.py
@@ -6,10 +6,10 @@
 __summary__ = "Core utilities for Python packages"
 __uri__ = "https://github.com/pypa/packaging"
 
-__version__ = "23.1"
+__version__ = "24.0"
 
 __author__ = "Donald Stufft and individual contributors"
 __email__ = "donald@stufft.io"
 
 __license__ = "BSD-2-Clause or Apache-2.0"
-__copyright__ = "2014-2019 %s" % __author__
+__copyright__ = "2014 %s" % __author__
diff --git a/pkg_resources/_vendor/packaging/_manylinux.py b/pkg_resources/_vendor/packaging/_manylinux.py
index 449c655be6..ad62505f3f 100644
--- a/pkg_resources/_vendor/packaging/_manylinux.py
+++ b/pkg_resources/_vendor/packaging/_manylinux.py
@@ -5,7 +5,7 @@
 import re
 import sys
 import warnings
-from typing import Dict, Generator, Iterator, NamedTuple, Optional, Tuple
+from typing import Dict, Generator, Iterator, NamedTuple, Optional, Sequence, Tuple
 
 from ._elffile import EIClass, EIData, ELFFile, EMachine
 
@@ -50,12 +50,21 @@ def _is_linux_i686(executable: str) -> bool:
         )
 
 
-def _have_compatible_abi(executable: str, arch: str) -> bool:
-    if arch == "armv7l":
+def _have_compatible_abi(executable: str, archs: Sequence[str]) -> bool:
+    if "armv7l" in archs:
         return _is_linux_armhf(executable)
-    if arch == "i686":
+    if "i686" in archs:
         return _is_linux_i686(executable)
-    return arch in {"x86_64", "aarch64", "ppc64", "ppc64le", "s390x"}
+    allowed_archs = {
+        "x86_64",
+        "aarch64",
+        "ppc64",
+        "ppc64le",
+        "s390x",
+        "loongarch64",
+        "riscv64",
+    }
+    return any(arch in allowed_archs for arch in archs)
 
 
 # If glibc ever changes its major version, we need to know what the last
@@ -81,7 +90,7 @@ def _glibc_version_string_confstr() -> Optional[str]:
     # https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183
     try:
         # Should be a string like "glibc 2.17".
-        version_string: str = getattr(os, "confstr")("CS_GNU_LIBC_VERSION")
+        version_string: Optional[str] = os.confstr("CS_GNU_LIBC_VERSION")
         assert version_string is not None
         _, version = version_string.rsplit()
     except (AssertionError, AttributeError, OSError, ValueError):
@@ -167,13 +176,13 @@ def _get_glibc_version() -> Tuple[int, int]:
 
 
 # From PEP 513, PEP 600
-def _is_compatible(name: str, arch: str, version: _GLibCVersion) -> bool:
+def _is_compatible(arch: str, version: _GLibCVersion) -> bool:
     sys_glibc = _get_glibc_version()
     if sys_glibc < version:
         return False
     # Check for presence of _manylinux module.
     try:
-        import _manylinux  # noqa
+        import _manylinux
     except ImportError:
         return True
     if hasattr(_manylinux, "manylinux_compatible"):
@@ -203,12 +212,22 @@ def _is_compatible(name: str, arch: str, version: _GLibCVersion) -> bool:
 }
 
 
-def platform_tags(linux: str, arch: str) -> Iterator[str]:
-    if not _have_compatible_abi(sys.executable, arch):
+def platform_tags(archs: Sequence[str]) -> Iterator[str]:
+    """Generate manylinux tags compatible to the current platform.
+
+    :param archs: Sequence of compatible architectures.
+        The first one shall be the closest to the actual architecture and be the part of
+        platform tag after the ``linux_`` prefix, e.g. ``x86_64``.
+        The ``linux_`` prefix is assumed as a prerequisite for the current platform to
+        be manylinux-compatible.
+
+    :returns: An iterator of compatible manylinux tags.
+    """
+    if not _have_compatible_abi(sys.executable, archs):
         return
     # Oldest glibc to be supported regardless of architecture is (2, 17).
     too_old_glibc2 = _GLibCVersion(2, 16)
-    if arch in {"x86_64", "i686"}:
+    if set(archs) & {"x86_64", "i686"}:
         # On x86/i686 also oldest glibc to be supported is (2, 5).
         too_old_glibc2 = _GLibCVersion(2, 4)
     current_glibc = _GLibCVersion(*_get_glibc_version())
@@ -222,19 +241,20 @@ def platform_tags(linux: str, arch: str) -> Iterator[str]:
     for glibc_major in range(current_glibc.major - 1, 1, -1):
         glibc_minor = _LAST_GLIBC_MINOR[glibc_major]
         glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor))
-    for glibc_max in glibc_max_list:
-        if glibc_max.major == too_old_glibc2.major:
-            min_minor = too_old_glibc2.minor
-        else:
-            # For other glibc major versions oldest supported is (x, 0).
-            min_minor = -1
-        for glibc_minor in range(glibc_max.minor, min_minor, -1):
-            glibc_version = _GLibCVersion(glibc_max.major, glibc_minor)
-            tag = "manylinux_{}_{}".format(*glibc_version)
-            if _is_compatible(tag, arch, glibc_version):
-                yield linux.replace("linux", tag)
-            # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags.
-            if glibc_version in _LEGACY_MANYLINUX_MAP:
-                legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version]
-                if _is_compatible(legacy_tag, arch, glibc_version):
-                    yield linux.replace("linux", legacy_tag)
+    for arch in archs:
+        for glibc_max in glibc_max_list:
+            if glibc_max.major == too_old_glibc2.major:
+                min_minor = too_old_glibc2.minor
+            else:
+                # For other glibc major versions oldest supported is (x, 0).
+                min_minor = -1
+            for glibc_minor in range(glibc_max.minor, min_minor, -1):
+                glibc_version = _GLibCVersion(glibc_max.major, glibc_minor)
+                tag = "manylinux_{}_{}".format(*glibc_version)
+                if _is_compatible(arch, glibc_version):
+                    yield f"{tag}_{arch}"
+                # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags.
+                if glibc_version in _LEGACY_MANYLINUX_MAP:
+                    legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version]
+                    if _is_compatible(arch, glibc_version):
+                        yield f"{legacy_tag}_{arch}"
diff --git a/pkg_resources/_vendor/packaging/_musllinux.py b/pkg_resources/_vendor/packaging/_musllinux.py
index 706ba600a9..86419df9d7 100644
--- a/pkg_resources/_vendor/packaging/_musllinux.py
+++ b/pkg_resources/_vendor/packaging/_musllinux.py
@@ -8,7 +8,7 @@
 import re
 import subprocess
 import sys
-from typing import Iterator, NamedTuple, Optional
+from typing import Iterator, NamedTuple, Optional, Sequence
 
 from ._elffile import ELFFile
 
@@ -47,24 +47,27 @@ def _get_musl_version(executable: str) -> Optional[_MuslVersion]:
         return None
     if ld is None or "musl" not in ld:
         return None
-    proc = subprocess.run([ld], stderr=subprocess.PIPE, universal_newlines=True)
+    proc = subprocess.run([ld], stderr=subprocess.PIPE, text=True)
     return _parse_musl_version(proc.stderr)
 
 
-def platform_tags(arch: str) -> Iterator[str]:
+def platform_tags(archs: Sequence[str]) -> Iterator[str]:
     """Generate musllinux tags compatible to the current platform.
 
-    :param arch: Should be the part of platform tag after the ``linux_``
-        prefix, e.g. ``x86_64``. The ``linux_`` prefix is assumed as a
-        prerequisite for the current platform to be musllinux-compatible.
+    :param archs: Sequence of compatible architectures.
+        The first one shall be the closest to the actual architecture and be the part of
+        platform tag after the ``linux_`` prefix, e.g. ``x86_64``.
+        The ``linux_`` prefix is assumed as a prerequisite for the current platform to
+        be musllinux-compatible.
 
     :returns: An iterator of compatible musllinux tags.
     """
     sys_musl = _get_musl_version(sys.executable)
     if sys_musl is None:  # Python not dynamically linked against musl.
         return
-    for minor in range(sys_musl.minor, -1, -1):
-        yield f"musllinux_{sys_musl.major}_{minor}_{arch}"
+    for arch in archs:
+        for minor in range(sys_musl.minor, -1, -1):
+            yield f"musllinux_{sys_musl.major}_{minor}_{arch}"
 
 
 if __name__ == "__main__":  # pragma: no cover
diff --git a/pkg_resources/_vendor/packaging/_parser.py b/pkg_resources/_vendor/packaging/_parser.py
index 5a18b758fe..684df75457 100644
--- a/pkg_resources/_vendor/packaging/_parser.py
+++ b/pkg_resources/_vendor/packaging/_parser.py
@@ -252,7 +252,13 @@ def _parse_version_many(tokenizer: Tokenizer) -> str:
 # Recursive descent parser for marker expression
 # --------------------------------------------------------------------------------------
 def parse_marker(source: str) -> MarkerList:
-    return _parse_marker(Tokenizer(source, rules=DEFAULT_RULES))
+    return _parse_full_marker(Tokenizer(source, rules=DEFAULT_RULES))
+
+
+def _parse_full_marker(tokenizer: Tokenizer) -> MarkerList:
+    retval = _parse_marker(tokenizer)
+    tokenizer.expect("END", expected="end of marker expression")
+    return retval
 
 
 def _parse_marker(tokenizer: Tokenizer) -> MarkerList:
@@ -318,10 +324,7 @@ def _parse_marker_var(tokenizer: Tokenizer) -> MarkerVar:
 
 
 def process_env_var(env_var: str) -> Variable:
-    if (
-        env_var == "platform_python_implementation"
-        or env_var == "python_implementation"
-    ):
+    if env_var in ("platform_python_implementation", "python_implementation"):
         return Variable("platform_python_implementation")
     else:
         return Variable(env_var)
diff --git a/pkg_resources/_vendor/packaging/metadata.py b/pkg_resources/_vendor/packaging/metadata.py
index e76a60c395..fb27493079 100644
--- a/pkg_resources/_vendor/packaging/metadata.py
+++ b/pkg_resources/_vendor/packaging/metadata.py
@@ -5,23 +5,77 @@
 import email.policy
 import sys
 import typing
-from typing import Dict, List, Optional, Tuple, Union, cast
-
-if sys.version_info >= (3, 8):  # pragma: no cover
-    from typing import TypedDict
+from typing import (
+    Any,
+    Callable,
+    Dict,
+    Generic,
+    List,
+    Optional,
+    Tuple,
+    Type,
+    Union,
+    cast,
+)
+
+from . import requirements, specifiers, utils, version as version_module
+
+T = typing.TypeVar("T")
+if sys.version_info[:2] >= (3, 8):  # pragma: no cover
+    from typing import Literal, TypedDict
 else:  # pragma: no cover
     if typing.TYPE_CHECKING:
-        from typing_extensions import TypedDict
+        from typing_extensions import Literal, TypedDict
     else:
         try:
-            from typing_extensions import TypedDict
+            from typing_extensions import Literal, TypedDict
         except ImportError:
 
+            class Literal:
+                def __init_subclass__(*_args, **_kwargs):
+                    pass
+
             class TypedDict:
                 def __init_subclass__(*_args, **_kwargs):
                     pass
 
 
+try:
+    ExceptionGroup
+except NameError:  # pragma: no cover
+
+    class ExceptionGroup(Exception):  # noqa: N818
+        """A minimal implementation of :external:exc:`ExceptionGroup` from Python 3.11.
+
+        If :external:exc:`ExceptionGroup` is already defined by Python itself,
+        that version is used instead.
+        """
+
+        message: str
+        exceptions: List[Exception]
+
+        def __init__(self, message: str, exceptions: List[Exception]) -> None:
+            self.message = message
+            self.exceptions = exceptions
+
+        def __repr__(self) -> str:
+            return f"{self.__class__.__name__}({self.message!r}, {self.exceptions!r})"
+
+else:  # pragma: no cover
+    ExceptionGroup = ExceptionGroup
+
+
+class InvalidMetadata(ValueError):
+    """A metadata field contains invalid data."""
+
+    field: str
+    """The name of the field that contains invalid data."""
+
+    def __init__(self, field: str, message: str) -> None:
+        self.field = field
+        super().__init__(message)
+
+
 # The RawMetadata class attempts to make as few assumptions about the underlying
 # serialization formats as possible. The idea is that as long as a serialization
 # formats offer some very basic primitives in *some* way then we can support
@@ -33,7 +87,8 @@ class RawMetadata(TypedDict, total=False):
     provided). The key is lower-case and underscores are used instead of dashes
     compared to the equivalent core metadata field. Any core metadata field that
     can be specified multiple times or can hold multiple values in a single
-    field have a key with a plural name.
+    field have a key with a plural name. See :class:`Metadata` whose attributes
+    match the keys of this dictionary.
 
     Core metadata fields that can be specified multiple times are stored as a
     list or dict depending on which is appropriate for the field. Any fields
@@ -77,7 +132,7 @@ class RawMetadata(TypedDict, total=False):
     # but got stuck without ever being able to build consensus on
     # it and ultimately ended up withdrawn.
     #
-    # However, a number of tools had started emiting METADATA with
+    # However, a number of tools had started emitting METADATA with
     # `2.0` Metadata-Version, so for historical reasons, this version
     # was skipped.
 
@@ -110,7 +165,7 @@ class RawMetadata(TypedDict, total=False):
     "version",
 }
 
-_LIST_STRING_FIELDS = {
+_LIST_FIELDS = {
     "classifiers",
     "dynamic",
     "obsoletes",
@@ -125,6 +180,10 @@ class RawMetadata(TypedDict, total=False):
     "supported_platforms",
 }
 
+_DICT_FIELDS = {
+    "project_urls",
+}
+
 
 def _parse_keywords(data: str) -> List[str]:
     """Split a string of comma-separate keyboards into a list of keywords."""
@@ -230,10 +289,11 @@ def _get_payload(msg: email.message.Message, source: Union[bytes, str]) -> str:
     "supported-platform": "supported_platforms",
     "version": "version",
 }
+_RAW_TO_EMAIL_MAPPING = {raw: email for email, raw in _EMAIL_TO_RAW_MAPPING.items()}
 
 
 def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[str]]]:
-    """Parse a distribution's metadata.
+    """Parse a distribution's metadata stored as email headers (e.g. from ``METADATA``).
 
     This function returns a two-item tuple of dicts. The first dict is of
     recognized fields from the core metadata specification. Fields that can be
@@ -267,7 +327,7 @@ def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[st
         # We use get_all() here, even for fields that aren't multiple use,
         # because otherwise someone could have e.g. two Name fields, and we
         # would just silently ignore it rather than doing something about it.
-        headers = parsed.get_all(name)
+        headers = parsed.get_all(name) or []
 
         # The way the email module works when parsing bytes is that it
         # unconditionally decodes the bytes as ascii using the surrogateescape
@@ -349,7 +409,7 @@ def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[st
         # If this is one of our list of string fields, then we can just assign
         # the value, since email *only* has strings, and our get_all() call
         # above ensures that this is a list.
-        elif raw_name in _LIST_STRING_FIELDS:
+        elif raw_name in _LIST_FIELDS:
             raw[raw_name] = value
         # Special Case: Keywords
         # The keywords field is implemented in the metadata spec as a str,
@@ -406,3 +466,360 @@ def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[st
     # way this function is implemented, our `TypedDict` can only have valid key
     # names.
     return cast(RawMetadata, raw), unparsed
+
+
+_NOT_FOUND = object()
+
+
+# Keep the two values in sync.
+_VALID_METADATA_VERSIONS = ["1.0", "1.1", "1.2", "2.1", "2.2", "2.3"]
+_MetadataVersion = Literal["1.0", "1.1", "1.2", "2.1", "2.2", "2.3"]
+
+_REQUIRED_ATTRS = frozenset(["metadata_version", "name", "version"])
+
+
+class _Validator(Generic[T]):
+    """Validate a metadata field.
+
+    All _process_*() methods correspond to a core metadata field. The method is
+    called with the field's raw value. If the raw value is valid it is returned
+    in its "enriched" form (e.g. ``version.Version`` for the ``Version`` field).
+    If the raw value is invalid, :exc:`InvalidMetadata` is raised (with a cause
+    as appropriate).
+    """
+
+    name: str
+    raw_name: str
+    added: _MetadataVersion
+
+    def __init__(
+        self,
+        *,
+        added: _MetadataVersion = "1.0",
+    ) -> None:
+        self.added = added
+
+    def __set_name__(self, _owner: "Metadata", name: str) -> None:
+        self.name = name
+        self.raw_name = _RAW_TO_EMAIL_MAPPING[name]
+
+    def __get__(self, instance: "Metadata", _owner: Type["Metadata"]) -> T:
+        # With Python 3.8, the caching can be replaced with functools.cached_property().
+        # No need to check the cache as attribute lookup will resolve into the
+        # instance's __dict__ before __get__ is called.
+        cache = instance.__dict__
+        value = instance._raw.get(self.name)
+
+        # To make the _process_* methods easier, we'll check if the value is None
+        # and if this field is NOT a required attribute, and if both of those
+        # things are true, we'll skip the the converter. This will mean that the
+        # converters never have to deal with the None union.
+        if self.name in _REQUIRED_ATTRS or value is not None:
+            try:
+                converter: Callable[[Any], T] = getattr(self, f"_process_{self.name}")
+            except AttributeError:
+                pass
+            else:
+                value = converter(value)
+
+        cache[self.name] = value
+        try:
+            del instance._raw[self.name]  # type: ignore[misc]
+        except KeyError:
+            pass
+
+        return cast(T, value)
+
+    def _invalid_metadata(
+        self, msg: str, cause: Optional[Exception] = None
+    ) -> InvalidMetadata:
+        exc = InvalidMetadata(
+            self.raw_name, msg.format_map({"field": repr(self.raw_name)})
+        )
+        exc.__cause__ = cause
+        return exc
+
+    def _process_metadata_version(self, value: str) -> _MetadataVersion:
+        # Implicitly makes Metadata-Version required.
+        if value not in _VALID_METADATA_VERSIONS:
+            raise self._invalid_metadata(f"{value!r} is not a valid metadata version")
+        return cast(_MetadataVersion, value)
+
+    def _process_name(self, value: str) -> str:
+        if not value:
+            raise self._invalid_metadata("{field} is a required field")
+        # Validate the name as a side-effect.
+        try:
+            utils.canonicalize_name(value, validate=True)
+        except utils.InvalidName as exc:
+            raise self._invalid_metadata(
+                f"{value!r} is invalid for {{field}}", cause=exc
+            )
+        else:
+            return value
+
+    def _process_version(self, value: str) -> version_module.Version:
+        if not value:
+            raise self._invalid_metadata("{field} is a required field")
+        try:
+            return version_module.parse(value)
+        except version_module.InvalidVersion as exc:
+            raise self._invalid_metadata(
+                f"{value!r} is invalid for {{field}}", cause=exc
+            )
+
+    def _process_summary(self, value: str) -> str:
+        """Check the field contains no newlines."""
+        if "\n" in value:
+            raise self._invalid_metadata("{field} must be a single line")
+        return value
+
+    def _process_description_content_type(self, value: str) -> str:
+        content_types = {"text/plain", "text/x-rst", "text/markdown"}
+        message = email.message.EmailMessage()
+        message["content-type"] = value
+
+        content_type, parameters = (
+            # Defaults to `text/plain` if parsing failed.
+            message.get_content_type().lower(),
+            message["content-type"].params,
+        )
+        # Check if content-type is valid or defaulted to `text/plain` and thus was
+        # not parseable.
+        if content_type not in content_types or content_type not in value.lower():
+            raise self._invalid_metadata(
+                f"{{field}} must be one of {list(content_types)}, not {value!r}"
+            )
+
+        charset = parameters.get("charset", "UTF-8")
+        if charset != "UTF-8":
+            raise self._invalid_metadata(
+                f"{{field}} can only specify the UTF-8 charset, not {list(charset)}"
+            )
+
+        markdown_variants = {"GFM", "CommonMark"}
+        variant = parameters.get("variant", "GFM")  # Use an acceptable default.
+        if content_type == "text/markdown" and variant not in markdown_variants:
+            raise self._invalid_metadata(
+                f"valid Markdown variants for {{field}} are {list(markdown_variants)}, "
+                f"not {variant!r}",
+            )
+        return value
+
+    def _process_dynamic(self, value: List[str]) -> List[str]:
+        for dynamic_field in map(str.lower, value):
+            if dynamic_field in {"name", "version", "metadata-version"}:
+                raise self._invalid_metadata(
+                    f"{value!r} is not allowed as a dynamic field"
+                )
+            elif dynamic_field not in _EMAIL_TO_RAW_MAPPING:
+                raise self._invalid_metadata(f"{value!r} is not a valid dynamic field")
+        return list(map(str.lower, value))
+
+    def _process_provides_extra(
+        self,
+        value: List[str],
+    ) -> List[utils.NormalizedName]:
+        normalized_names = []
+        try:
+            for name in value:
+                normalized_names.append(utils.canonicalize_name(name, validate=True))
+        except utils.InvalidName as exc:
+            raise self._invalid_metadata(
+                f"{name!r} is invalid for {{field}}", cause=exc
+            )
+        else:
+            return normalized_names
+
+    def _process_requires_python(self, value: str) -> specifiers.SpecifierSet:
+        try:
+            return specifiers.SpecifierSet(value)
+        except specifiers.InvalidSpecifier as exc:
+            raise self._invalid_metadata(
+                f"{value!r} is invalid for {{field}}", cause=exc
+            )
+
+    def _process_requires_dist(
+        self,
+        value: List[str],
+    ) -> List[requirements.Requirement]:
+        reqs = []
+        try:
+            for req in value:
+                reqs.append(requirements.Requirement(req))
+        except requirements.InvalidRequirement as exc:
+            raise self._invalid_metadata(f"{req!r} is invalid for {{field}}", cause=exc)
+        else:
+            return reqs
+
+
+class Metadata:
+    """Representation of distribution metadata.
+
+    Compared to :class:`RawMetadata`, this class provides objects representing
+    metadata fields instead of only using built-in types. Any invalid metadata
+    will cause :exc:`InvalidMetadata` to be raised (with a
+    :py:attr:`~BaseException.__cause__` attribute as appropriate).
+    """
+
+    _raw: RawMetadata
+
+    @classmethod
+    def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> "Metadata":
+        """Create an instance from :class:`RawMetadata`.
+
+        If *validate* is true, all metadata will be validated. All exceptions
+        related to validation will be gathered and raised as an :class:`ExceptionGroup`.
+        """
+        ins = cls()
+        ins._raw = data.copy()  # Mutations occur due to caching enriched values.
+
+        if validate:
+            exceptions: List[Exception] = []
+            try:
+                metadata_version = ins.metadata_version
+                metadata_age = _VALID_METADATA_VERSIONS.index(metadata_version)
+            except InvalidMetadata as metadata_version_exc:
+                exceptions.append(metadata_version_exc)
+                metadata_version = None
+
+            # Make sure to check for the fields that are present, the required
+            # fields (so their absence can be reported).
+            fields_to_check = frozenset(ins._raw) | _REQUIRED_ATTRS
+            # Remove fields that have already been checked.
+            fields_to_check -= {"metadata_version"}
+
+            for key in fields_to_check:
+                try:
+                    if metadata_version:
+                        # Can't use getattr() as that triggers descriptor protocol which
+                        # will fail due to no value for the instance argument.
+                        try:
+                            field_metadata_version = cls.__dict__[key].added
+                        except KeyError:
+                            exc = InvalidMetadata(key, f"unrecognized field: {key!r}")
+                            exceptions.append(exc)
+                            continue
+                        field_age = _VALID_METADATA_VERSIONS.index(
+                            field_metadata_version
+                        )
+                        if field_age > metadata_age:
+                            field = _RAW_TO_EMAIL_MAPPING[key]
+                            exc = InvalidMetadata(
+                                field,
+                                "{field} introduced in metadata version "
+                                "{field_metadata_version}, not {metadata_version}",
+                            )
+                            exceptions.append(exc)
+                            continue
+                    getattr(ins, key)
+                except InvalidMetadata as exc:
+                    exceptions.append(exc)
+
+            if exceptions:
+                raise ExceptionGroup("invalid metadata", exceptions)
+
+        return ins
+
+    @classmethod
+    def from_email(
+        cls, data: Union[bytes, str], *, validate: bool = True
+    ) -> "Metadata":
+        """Parse metadata from email headers.
+
+        If *validate* is true, the metadata will be validated. All exceptions
+        related to validation will be gathered and raised as an :class:`ExceptionGroup`.
+        """
+        raw, unparsed = parse_email(data)
+
+        if validate:
+            exceptions: list[Exception] = []
+            for unparsed_key in unparsed:
+                if unparsed_key in _EMAIL_TO_RAW_MAPPING:
+                    message = f"{unparsed_key!r} has invalid data"
+                else:
+                    message = f"unrecognized field: {unparsed_key!r}"
+                exceptions.append(InvalidMetadata(unparsed_key, message))
+
+            if exceptions:
+                raise ExceptionGroup("unparsed", exceptions)
+
+        try:
+            return cls.from_raw(raw, validate=validate)
+        except ExceptionGroup as exc_group:
+            raise ExceptionGroup(
+                "invalid or unparsed metadata", exc_group.exceptions
+            ) from None
+
+    metadata_version: _Validator[_MetadataVersion] = _Validator()
+    """:external:ref:`core-metadata-metadata-version`
+    (required; validated to be a valid metadata version)"""
+    name: _Validator[str] = _Validator()
+    """:external:ref:`core-metadata-name`
+    (required; validated using :func:`~packaging.utils.canonicalize_name` and its
+    *validate* parameter)"""
+    version: _Validator[version_module.Version] = _Validator()
+    """:external:ref:`core-metadata-version` (required)"""
+    dynamic: _Validator[Optional[List[str]]] = _Validator(
+        added="2.2",
+    )
+    """:external:ref:`core-metadata-dynamic`
+    (validated against core metadata field names and lowercased)"""
+    platforms: _Validator[Optional[List[str]]] = _Validator()
+    """:external:ref:`core-metadata-platform`"""
+    supported_platforms: _Validator[Optional[List[str]]] = _Validator(added="1.1")
+    """:external:ref:`core-metadata-supported-platform`"""
+    summary: _Validator[Optional[str]] = _Validator()
+    """:external:ref:`core-metadata-summary` (validated to contain no newlines)"""
+    description: _Validator[Optional[str]] = _Validator()  # TODO 2.1: can be in body
+    """:external:ref:`core-metadata-description`"""
+    description_content_type: _Validator[Optional[str]] = _Validator(added="2.1")
+    """:external:ref:`core-metadata-description-content-type` (validated)"""
+    keywords: _Validator[Optional[List[str]]] = _Validator()
+    """:external:ref:`core-metadata-keywords`"""
+    home_page: _Validator[Optional[str]] = _Validator()
+    """:external:ref:`core-metadata-home-page`"""
+    download_url: _Validator[Optional[str]] = _Validator(added="1.1")
+    """:external:ref:`core-metadata-download-url`"""
+    author: _Validator[Optional[str]] = _Validator()
+    """:external:ref:`core-metadata-author`"""
+    author_email: _Validator[Optional[str]] = _Validator()
+    """:external:ref:`core-metadata-author-email`"""
+    maintainer: _Validator[Optional[str]] = _Validator(added="1.2")
+    """:external:ref:`core-metadata-maintainer`"""
+    maintainer_email: _Validator[Optional[str]] = _Validator(added="1.2")
+    """:external:ref:`core-metadata-maintainer-email`"""
+    license: _Validator[Optional[str]] = _Validator()
+    """:external:ref:`core-metadata-license`"""
+    classifiers: _Validator[Optional[List[str]]] = _Validator(added="1.1")
+    """:external:ref:`core-metadata-classifier`"""
+    requires_dist: _Validator[Optional[List[requirements.Requirement]]] = _Validator(
+        added="1.2"
+    )
+    """:external:ref:`core-metadata-requires-dist`"""
+    requires_python: _Validator[Optional[specifiers.SpecifierSet]] = _Validator(
+        added="1.2"
+    )
+    """:external:ref:`core-metadata-requires-python`"""
+    # Because `Requires-External` allows for non-PEP 440 version specifiers, we
+    # don't do any processing on the values.
+    requires_external: _Validator[Optional[List[str]]] = _Validator(added="1.2")
+    """:external:ref:`core-metadata-requires-external`"""
+    project_urls: _Validator[Optional[Dict[str, str]]] = _Validator(added="1.2")
+    """:external:ref:`core-metadata-project-url`"""
+    # PEP 685 lets us raise an error if an extra doesn't pass `Name` validation
+    # regardless of metadata version.
+    provides_extra: _Validator[Optional[List[utils.NormalizedName]]] = _Validator(
+        added="2.1",
+    )
+    """:external:ref:`core-metadata-provides-extra`"""
+    provides_dist: _Validator[Optional[List[str]]] = _Validator(added="1.2")
+    """:external:ref:`core-metadata-provides-dist`"""
+    obsoletes_dist: _Validator[Optional[List[str]]] = _Validator(added="1.2")
+    """:external:ref:`core-metadata-obsoletes-dist`"""
+    requires: _Validator[Optional[List[str]]] = _Validator(added="1.1")
+    """``Requires`` (deprecated)"""
+    provides: _Validator[Optional[List[str]]] = _Validator(added="1.1")
+    """``Provides`` (deprecated)"""
+    obsoletes: _Validator[Optional[List[str]]] = _Validator(added="1.1")
+    """``Obsoletes`` (deprecated)"""
diff --git a/pkg_resources/_vendor/packaging/requirements.py b/pkg_resources/_vendor/packaging/requirements.py
index f34bfa85c8..bdc43a7e98 100644
--- a/pkg_resources/_vendor/packaging/requirements.py
+++ b/pkg_resources/_vendor/packaging/requirements.py
@@ -2,13 +2,13 @@
 # 2.0, and the BSD License. See the LICENSE file in the root of this repository
 # for complete details.
 
-import urllib.parse
-from typing import Any, List, Optional, Set
+from typing import Any, Iterator, Optional, Set
 
 from ._parser import parse_requirement as _parse_requirement
 from ._tokenizer import ParserSyntaxError
 from .markers import Marker, _normalize_extra_values
 from .specifiers import SpecifierSet
+from .utils import canonicalize_name
 
 
 class InvalidRequirement(ValueError):
@@ -37,57 +37,52 @@ def __init__(self, requirement_string: str) -> None:
             raise InvalidRequirement(str(e)) from e
 
         self.name: str = parsed.name
-        if parsed.url:
-            parsed_url = urllib.parse.urlparse(parsed.url)
-            if parsed_url.scheme == "file":
-                if urllib.parse.urlunparse(parsed_url) != parsed.url:
-                    raise InvalidRequirement("Invalid URL given")
-            elif not (parsed_url.scheme and parsed_url.netloc) or (
-                not parsed_url.scheme and not parsed_url.netloc
-            ):
-                raise InvalidRequirement(f"Invalid URL: {parsed.url}")
-            self.url: Optional[str] = parsed.url
-        else:
-            self.url = None
-        self.extras: Set[str] = set(parsed.extras if parsed.extras else [])
+        self.url: Optional[str] = parsed.url or None
+        self.extras: Set[str] = set(parsed.extras or [])
         self.specifier: SpecifierSet = SpecifierSet(parsed.specifier)
         self.marker: Optional[Marker] = None
         if parsed.marker is not None:
             self.marker = Marker.__new__(Marker)
             self.marker._markers = _normalize_extra_values(parsed.marker)
 
-    def __str__(self) -> str:
-        parts: List[str] = [self.name]
+    def _iter_parts(self, name: str) -> Iterator[str]:
+        yield name
 
         if self.extras:
             formatted_extras = ",".join(sorted(self.extras))
-            parts.append(f"[{formatted_extras}]")
+            yield f"[{formatted_extras}]"
 
         if self.specifier:
-            parts.append(str(self.specifier))
+            yield str(self.specifier)
 
         if self.url:
-            parts.append(f"@ {self.url}")
+            yield f"@ {self.url}"
             if self.marker:
-                parts.append(" ")
+                yield " "
 
         if self.marker:
-            parts.append(f"; {self.marker}")
+            yield f"; {self.marker}"
 
-        return "".join(parts)
+    def __str__(self) -> str:
+        return "".join(self._iter_parts(self.name))
 
     def __repr__(self) -> str:
         return f""
 
     def __hash__(self) -> int:
-        return hash((self.__class__.__name__, str(self)))
+        return hash(
+            (
+                self.__class__.__name__,
+                *self._iter_parts(canonicalize_name(self.name)),
+            )
+        )
 
     def __eq__(self, other: Any) -> bool:
         if not isinstance(other, Requirement):
             return NotImplemented
 
         return (
-            self.name == other.name
+            canonicalize_name(self.name) == canonicalize_name(other.name)
             and self.extras == other.extras
             and self.specifier == other.specifier
             and self.url == other.url
diff --git a/pkg_resources/_vendor/packaging/specifiers.py b/pkg_resources/_vendor/packaging/specifiers.py
index ba8fe37b7f..2d015bab59 100644
--- a/pkg_resources/_vendor/packaging/specifiers.py
+++ b/pkg_resources/_vendor/packaging/specifiers.py
@@ -11,17 +11,7 @@
 import abc
 import itertools
 import re
-from typing import (
-    Callable,
-    Iterable,
-    Iterator,
-    List,
-    Optional,
-    Set,
-    Tuple,
-    TypeVar,
-    Union,
-)
+from typing import Callable, Iterable, Iterator, List, Optional, Tuple, TypeVar, Union
 
 from .utils import canonicalize_version
 from .version import Version
@@ -383,7 +373,7 @@ def _compare_compatible(self, prospective: Version, spec: str) -> bool:
 
         # We want everything but the last item in the version, but we want to
         # ignore suffix segments.
-        prefix = ".".join(
+        prefix = _version_join(
             list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1]
         )
 
@@ -404,13 +394,13 @@ def _compare_equal(self, prospective: Version, spec: str) -> bool:
             )
             # Get the normalized version string ignoring the trailing .*
             normalized_spec = canonicalize_version(spec[:-2], strip_trailing_zero=False)
-            # Split the spec out by dots, and pretend that there is an implicit
-            # dot in between a release segment and a pre-release segment.
+            # Split the spec out by bangs and dots, and pretend that there is
+            # an implicit dot in between a release segment and a pre-release segment.
             split_spec = _version_split(normalized_spec)
 
-            # Split the prospective version out by dots, and pretend that there
-            # is an implicit dot in between a release segment and a pre-release
-            # segment.
+            # Split the prospective version out by bangs and dots, and pretend
+            # that there is an implicit dot in between a release segment and
+            # a pre-release segment.
             split_prospective = _version_split(normalized_prospective)
 
             # 0-pad the prospective version before shortening it to get the correct
@@ -644,8 +634,19 @@ def filter(
 
 
 def _version_split(version: str) -> List[str]:
+    """Split version into components.
+
+    The split components are intended for version comparison. The logic does
+    not attempt to retain the original version string, so joining the
+    components back with :func:`_version_join` may not produce the original
+    version string.
+    """
     result: List[str] = []
-    for item in version.split("."):
+
+    epoch, _, rest = version.rpartition("!")
+    result.append(epoch or "0")
+
+    for item in rest.split("."):
         match = _prefix_regex.search(item)
         if match:
             result.extend(match.groups())
@@ -654,6 +655,17 @@ def _version_split(version: str) -> List[str]:
     return result
 
 
+def _version_join(components: List[str]) -> str:
+    """Join split version components into a version string.
+
+    This function assumes the input came from :func:`_version_split`, where the
+    first component must be the epoch (either empty or numeric), and all other
+    components numeric.
+    """
+    epoch, *rest = components
+    return f"{epoch}!{'.'.join(rest)}"
+
+
 def _is_not_suffix(segment: str) -> bool:
     return not any(
         segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post")
@@ -675,7 +687,10 @@ def _pad_version(left: List[str], right: List[str]) -> Tuple[List[str], List[str
     left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0])))
     right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0])))
 
-    return (list(itertools.chain(*left_split)), list(itertools.chain(*right_split)))
+    return (
+        list(itertools.chain.from_iterable(left_split)),
+        list(itertools.chain.from_iterable(right_split)),
+    )
 
 
 class SpecifierSet(BaseSpecifier):
@@ -707,14 +722,8 @@ def __init__(
         # strip each item to remove leading/trailing whitespace.
         split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
 
-        # Parsed each individual specifier, attempting first to make it a
-        # Specifier.
-        parsed: Set[Specifier] = set()
-        for specifier in split_specifiers:
-            parsed.add(Specifier(specifier))
-
-        # Turn our parsed specifiers into a frozen set and save them for later.
-        self._specs = frozenset(parsed)
+        # Make each individual specifier a Specifier and save in a frozen set for later.
+        self._specs = frozenset(map(Specifier, split_specifiers))
 
         # Store our prereleases value so we can use it later to determine if
         # we accept prereleases or not.
diff --git a/pkg_resources/_vendor/packaging/tags.py b/pkg_resources/_vendor/packaging/tags.py
index 76d243414d..89f1926137 100644
--- a/pkg_resources/_vendor/packaging/tags.py
+++ b/pkg_resources/_vendor/packaging/tags.py
@@ -4,6 +4,8 @@
 
 import logging
 import platform
+import re
+import struct
 import subprocess
 import sys
 import sysconfig
@@ -37,7 +39,7 @@
 }
 
 
-_32_BIT_INTERPRETER = sys.maxsize <= 2**32
+_32_BIT_INTERPRETER = struct.calcsize("P") == 4
 
 
 class Tag:
@@ -123,20 +125,37 @@ def _normalize_string(string: str) -> str:
     return string.replace(".", "_").replace("-", "_").replace(" ", "_")
 
 
-def _abi3_applies(python_version: PythonVersion) -> bool:
+def _is_threaded_cpython(abis: List[str]) -> bool:
+    """
+    Determine if the ABI corresponds to a threaded (`--disable-gil`) build.
+
+    The threaded builds are indicated by a "t" in the abiflags.
+    """
+    if len(abis) == 0:
+        return False
+    # expect e.g., cp313
+    m = re.match(r"cp\d+(.*)", abis[0])
+    if not m:
+        return False
+    abiflags = m.group(1)
+    return "t" in abiflags
+
+
+def _abi3_applies(python_version: PythonVersion, threading: bool) -> bool:
     """
     Determine if the Python version supports abi3.
 
-    PEP 384 was first implemented in Python 3.2.
+    PEP 384 was first implemented in Python 3.2. The threaded (`--disable-gil`)
+    builds do not support abi3.
     """
-    return len(python_version) > 1 and tuple(python_version) >= (3, 2)
+    return len(python_version) > 1 and tuple(python_version) >= (3, 2) and not threading
 
 
 def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]:
     py_version = tuple(py_version)  # To allow for version comparison.
     abis = []
     version = _version_nodot(py_version[:2])
-    debug = pymalloc = ucs4 = ""
+    threading = debug = pymalloc = ucs4 = ""
     with_debug = _get_config_var("Py_DEBUG", warn)
     has_refcount = hasattr(sys, "gettotalrefcount")
     # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled
@@ -145,6 +164,8 @@ def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]:
     has_ext = "_d.pyd" in EXTENSION_SUFFIXES
     if with_debug or (with_debug is None and (has_refcount or has_ext)):
         debug = "d"
+    if py_version >= (3, 13) and _get_config_var("Py_GIL_DISABLED", warn):
+        threading = "t"
     if py_version < (3, 8):
         with_pymalloc = _get_config_var("WITH_PYMALLOC", warn)
         if with_pymalloc or with_pymalloc is None:
@@ -158,13 +179,8 @@ def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]:
     elif debug:
         # Debug builds can also load "normal" extension modules.
         # We can also assume no UCS-4 or pymalloc requirement.
-        abis.append(f"cp{version}")
-    abis.insert(
-        0,
-        "cp{version}{debug}{pymalloc}{ucs4}".format(
-            version=version, debug=debug, pymalloc=pymalloc, ucs4=ucs4
-        ),
-    )
+        abis.append(f"cp{version}{threading}")
+    abis.insert(0, f"cp{version}{threading}{debug}{pymalloc}{ucs4}")
     return abis
 
 
@@ -212,11 +228,14 @@ def cpython_tags(
     for abi in abis:
         for platform_ in platforms:
             yield Tag(interpreter, abi, platform_)
-    if _abi3_applies(python_version):
+
+    threading = _is_threaded_cpython(abis)
+    use_abi3 = _abi3_applies(python_version, threading)
+    if use_abi3:
         yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms)
     yield from (Tag(interpreter, "none", platform_) for platform_ in platforms)
 
-    if _abi3_applies(python_version):
+    if use_abi3:
         for minor_version in range(python_version[1] - 1, 1, -1):
             for platform_ in platforms:
                 interpreter = "cp{version}".format(
@@ -406,7 +425,7 @@ def mac_platforms(
                 check=True,
                 env={"SYSTEM_VERSION_COMPAT": "0"},
                 stdout=subprocess.PIPE,
-                universal_newlines=True,
+                text=True,
             ).stdout
             version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
     else:
@@ -469,15 +488,21 @@ def mac_platforms(
 
 def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]:
     linux = _normalize_string(sysconfig.get_platform())
+    if not linux.startswith("linux_"):
+        # we should never be here, just yield the sysconfig one and return
+        yield linux
+        return
     if is_32bit:
         if linux == "linux_x86_64":
             linux = "linux_i686"
         elif linux == "linux_aarch64":
-            linux = "linux_armv7l"
+            linux = "linux_armv8l"
     _, arch = linux.split("_", 1)
-    yield from _manylinux.platform_tags(linux, arch)
-    yield from _musllinux.platform_tags(arch)
-    yield linux
+    archs = {"armv8l": ["armv8l", "armv7l"]}.get(arch, [arch])
+    yield from _manylinux.platform_tags(archs)
+    yield from _musllinux.platform_tags(archs)
+    for arch in archs:
+        yield f"linux_{arch}"
 
 
 def _generic_platforms() -> Iterator[str]:
diff --git a/pkg_resources/_vendor/packaging/utils.py b/pkg_resources/_vendor/packaging/utils.py
index 33c613b749..c2c2f75aa8 100644
--- a/pkg_resources/_vendor/packaging/utils.py
+++ b/pkg_resources/_vendor/packaging/utils.py
@@ -12,6 +12,12 @@
 NormalizedName = NewType("NormalizedName", str)
 
 
+class InvalidName(ValueError):
+    """
+    An invalid distribution name; users should refer to the packaging user guide.
+    """
+
+
 class InvalidWheelFilename(ValueError):
     """
     An invalid wheel filename was found, users should refer to PEP 427.
@@ -24,17 +30,28 @@ class InvalidSdistFilename(ValueError):
     """
 
 
+# Core metadata spec for `Name`
+_validate_regex = re.compile(
+    r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE
+)
 _canonicalize_regex = re.compile(r"[-_.]+")
+_normalized_regex = re.compile(r"^([a-z0-9]|[a-z0-9]([a-z0-9-](?!--))*[a-z0-9])$")
 # PEP 427: The build number must start with a digit.
 _build_tag_regex = re.compile(r"(\d+)(.*)")
 
 
-def canonicalize_name(name: str) -> NormalizedName:
+def canonicalize_name(name: str, *, validate: bool = False) -> NormalizedName:
+    if validate and not _validate_regex.match(name):
+        raise InvalidName(f"name is invalid: {name!r}")
     # This is taken from PEP 503.
     value = _canonicalize_regex.sub("-", name).lower()
     return cast(NormalizedName, value)
 
 
+def is_normalized_name(name: str) -> bool:
+    return _normalized_regex.match(name) is not None
+
+
 def canonicalize_version(
     version: Union[Version, str], *, strip_trailing_zero: bool = True
 ) -> str:
@@ -100,11 +117,18 @@ def parse_wheel_filename(
 
     parts = filename.split("-", dashes - 2)
     name_part = parts[0]
-    # See PEP 427 for the rules on escaping the project name
+    # See PEP 427 for the rules on escaping the project name.
     if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None:
         raise InvalidWheelFilename(f"Invalid project name: {filename}")
     name = canonicalize_name(name_part)
-    version = Version(parts[1])
+
+    try:
+        version = Version(parts[1])
+    except InvalidVersion as e:
+        raise InvalidWheelFilename(
+            f"Invalid wheel filename (invalid version): {filename}"
+        ) from e
+
     if dashes == 5:
         build_part = parts[2]
         build_match = _build_tag_regex.match(build_part)
@@ -137,5 +161,12 @@ def parse_sdist_filename(filename: str) -> Tuple[NormalizedName, Version]:
         raise InvalidSdistFilename(f"Invalid sdist filename: {filename}")
 
     name = canonicalize_name(name_part)
-    version = Version(version_part)
+
+    try:
+        version = Version(version_part)
+    except InvalidVersion as e:
+        raise InvalidSdistFilename(
+            f"Invalid sdist filename (invalid version): {filename}"
+        ) from e
+
     return (name, version)
diff --git a/pkg_resources/_vendor/packaging/version.py b/pkg_resources/_vendor/packaging/version.py
index b30e8cbf84..5faab9bd0d 100644
--- a/pkg_resources/_vendor/packaging/version.py
+++ b/pkg_resources/_vendor/packaging/version.py
@@ -7,37 +7,39 @@
     from packaging.version import parse, Version
 """
 
-import collections
 import itertools
 import re
-from typing import Any, Callable, Optional, SupportsInt, Tuple, Union
+from typing import Any, Callable, NamedTuple, Optional, SupportsInt, Tuple, Union
 
 from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType
 
 __all__ = ["VERSION_PATTERN", "parse", "Version", "InvalidVersion"]
 
-InfiniteTypes = Union[InfinityType, NegativeInfinityType]
-PrePostDevType = Union[InfiniteTypes, Tuple[str, int]]
-SubLocalType = Union[InfiniteTypes, int, str]
-LocalType = Union[
+LocalType = Tuple[Union[int, str], ...]
+
+CmpPrePostDevType = Union[InfinityType, NegativeInfinityType, Tuple[str, int]]
+CmpLocalType = Union[
     NegativeInfinityType,
-    Tuple[
-        Union[
-            SubLocalType,
-            Tuple[SubLocalType, str],
-            Tuple[NegativeInfinityType, SubLocalType],
-        ],
-        ...,
-    ],
+    Tuple[Union[Tuple[int, str], Tuple[NegativeInfinityType, Union[int, str]]], ...],
 ]
 CmpKey = Tuple[
-    int, Tuple[int, ...], PrePostDevType, PrePostDevType, PrePostDevType, LocalType
+    int,
+    Tuple[int, ...],
+    CmpPrePostDevType,
+    CmpPrePostDevType,
+    CmpPrePostDevType,
+    CmpLocalType,
 ]
 VersionComparisonMethod = Callable[[CmpKey, CmpKey], bool]
 
-_Version = collections.namedtuple(
-    "_Version", ["epoch", "release", "dev", "pre", "post", "local"]
-)
+
+class _Version(NamedTuple):
+    epoch: int
+    release: Tuple[int, ...]
+    dev: Optional[Tuple[str, int]]
+    pre: Optional[Tuple[str, int]]
+    post: Optional[Tuple[str, int]]
+    local: Optional[LocalType]
 
 
 def parse(version: str) -> "Version":
@@ -117,7 +119,7 @@ def __ne__(self, other: object) -> bool:
         (?P[0-9]+(?:\.[0-9]+)*)                  # release segment
         (?P
                                          # pre-release
             [-_\.]?
-            (?P(a|b|c|rc|alpha|beta|pre|preview))
+            (?Palpha|a|beta|b|preview|pre|c|rc)
             [-_\.]?
             (?P[0-9]+)?
         )?
@@ -269,8 +271,7 @@ def epoch(self) -> int:
         >>> Version("1!2.0.0").epoch
         1
         """
-        _epoch: int = self._version.epoch
-        return _epoch
+        return self._version.epoch
 
     @property
     def release(self) -> Tuple[int, ...]:
@@ -286,8 +287,7 @@ def release(self) -> Tuple[int, ...]:
         Includes trailing zeroes but not the epoch or any pre-release / development /
         post-release suffixes.
         """
-        _release: Tuple[int, ...] = self._version.release
-        return _release
+        return self._version.release
 
     @property
     def pre(self) -> Optional[Tuple[str, int]]:
@@ -302,8 +302,7 @@ def pre(self) -> Optional[Tuple[str, int]]:
         >>> Version("1.2.3rc1").pre
         ('rc', 1)
         """
-        _pre: Optional[Tuple[str, int]] = self._version.pre
-        return _pre
+        return self._version.pre
 
     @property
     def post(self) -> Optional[int]:
@@ -451,7 +450,7 @@ def micro(self) -> int:
 
 
 def _parse_letter_version(
-    letter: str, number: Union[str, bytes, SupportsInt]
+    letter: Optional[str], number: Union[str, bytes, SupportsInt, None]
 ) -> Optional[Tuple[str, int]]:
 
     if letter:
@@ -489,7 +488,7 @@ def _parse_letter_version(
 _local_version_separators = re.compile(r"[\._-]")
 
 
-def _parse_local_version(local: str) -> Optional[LocalType]:
+def _parse_local_version(local: Optional[str]) -> Optional[LocalType]:
     """
     Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
     """
@@ -507,7 +506,7 @@ def _cmpkey(
     pre: Optional[Tuple[str, int]],
     post: Optional[Tuple[str, int]],
     dev: Optional[Tuple[str, int]],
-    local: Optional[Tuple[SubLocalType]],
+    local: Optional[LocalType],
 ) -> CmpKey:
 
     # When we compare a release version, we want to compare it with all of the
@@ -524,7 +523,7 @@ def _cmpkey(
     # if there is not a pre or a post segment. If we have one of those then
     # the normal sorting rules will handle this case correctly.
     if pre is None and post is None and dev is not None:
-        _pre: PrePostDevType = NegativeInfinity
+        _pre: CmpPrePostDevType = NegativeInfinity
     # Versions without a pre-release (except as noted above) should sort after
     # those with one.
     elif pre is None:
@@ -534,21 +533,21 @@ def _cmpkey(
 
     # Versions without a post segment should sort before those with one.
     if post is None:
-        _post: PrePostDevType = NegativeInfinity
+        _post: CmpPrePostDevType = NegativeInfinity
 
     else:
         _post = post
 
     # Versions without a development segment should sort after those with one.
     if dev is None:
-        _dev: PrePostDevType = Infinity
+        _dev: CmpPrePostDevType = Infinity
 
     else:
         _dev = dev
 
     if local is None:
         # Versions without a local segment should sort before those with one.
-        _local: LocalType = NegativeInfinity
+        _local: CmpLocalType = NegativeInfinity
     else:
         # Versions with a local segment need that segment parsed to implement
         # the sorting rules in PEP440.
diff --git a/pkg_resources/_vendor/vendored.txt b/pkg_resources/_vendor/vendored.txt
index 1138915921..c18a2cc0eb 100644
--- a/pkg_resources/_vendor/vendored.txt
+++ b/pkg_resources/_vendor/vendored.txt
@@ -1,4 +1,4 @@
-packaging==23.1
+packaging==24
 
 platformdirs==2.6.2
 # required for platformdirs on Python < 3.8
diff --git a/setuptools/_vendor/packaging-23.1.dist-info/RECORD b/setuptools/_vendor/packaging-23.1.dist-info/RECORD
deleted file mode 100644
index e041f20f6a..0000000000
--- a/setuptools/_vendor/packaging-23.1.dist-info/RECORD
+++ /dev/null
@@ -1,37 +0,0 @@
-packaging-23.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-packaging-23.1.dist-info/LICENSE,sha256=ytHvW9NA1z4HS6YU0m996spceUDD2MNIUuZcSQlobEg,197
-packaging-23.1.dist-info/LICENSE.APACHE,sha256=DVQuDIgE45qn836wDaWnYhSdxoLXgpRRKH4RuTjpRZQ,10174
-packaging-23.1.dist-info/LICENSE.BSD,sha256=tw5-m3QvHMb5SLNMFqo5_-zpQZY2S8iP8NIYDwAo-sU,1344
-packaging-23.1.dist-info/METADATA,sha256=JnduJDlxs2IVeB-nIqAC3-HyNcPhP_MADd9_k_MjmaI,3082
-packaging-23.1.dist-info/RECORD,,
-packaging-23.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-packaging-23.1.dist-info/WHEEL,sha256=rSgq_JpHF9fHR1lx53qwg_1-2LypZE_qmcuXbVUq948,81
-packaging/__init__.py,sha256=kYVZSmXT6CWInT4UJPDtrSQBAZu8fMuFBxpv5GsDTLk,501
-packaging/__pycache__/__init__.cpython-312.pyc,,
-packaging/__pycache__/_elffile.cpython-312.pyc,,
-packaging/__pycache__/_manylinux.cpython-312.pyc,,
-packaging/__pycache__/_musllinux.cpython-312.pyc,,
-packaging/__pycache__/_parser.cpython-312.pyc,,
-packaging/__pycache__/_structures.cpython-312.pyc,,
-packaging/__pycache__/_tokenizer.cpython-312.pyc,,
-packaging/__pycache__/markers.cpython-312.pyc,,
-packaging/__pycache__/metadata.cpython-312.pyc,,
-packaging/__pycache__/requirements.cpython-312.pyc,,
-packaging/__pycache__/specifiers.cpython-312.pyc,,
-packaging/__pycache__/tags.cpython-312.pyc,,
-packaging/__pycache__/utils.cpython-312.pyc,,
-packaging/__pycache__/version.cpython-312.pyc,,
-packaging/_elffile.py,sha256=hbmK8OD6Z7fY6hwinHEUcD1by7czkGiNYu7ShnFEk2k,3266
-packaging/_manylinux.py,sha256=ESGrDEVmBc8jYTtdZRAWiLk72lOzAKWeezFgoJ_MuBc,8926
-packaging/_musllinux.py,sha256=mvPk7FNjjILKRLIdMxR7IvJ1uggLgCszo-L9rjfpi0M,2524
-packaging/_parser.py,sha256=KJQkBh_Xbfb-qsB560YIEItrTpCZaOh4_YMfBtd5XIY,10194
-packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431
-packaging/_tokenizer.py,sha256=alCtbwXhOFAmFGZ6BQ-wCTSFoRAJ2z-ysIf7__MTJ_k,5292
-packaging/markers.py,sha256=eH-txS2zq1HdNpTd9LcZUcVIwewAiNU0grmq5wjKnOk,8208
-packaging/metadata.py,sha256=PjELMLxKG_iu3HWjKAOdKhuNrHfWgpdTF2Q4nObsZeM,16397
-packaging/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-packaging/requirements.py,sha256=hJzvtJyAvENc_VfwfhnOZV1851-VW8JCGh-R96NE4Pc,3287
-packaging/specifiers.py,sha256=ZOpqL_w_Kj6ZF_OWdliQUzhEyHlDbi6989kr-sF5GHs,39206
-packaging/tags.py,sha256=_1gLX8h1SgpjAdYCP9XqU37zRjXtU5ZliGy3IM-WcSM,18106
-packaging/utils.py,sha256=es0cCezKspzriQ-3V88h3yJzxz028euV2sUwM61kE-o,4355
-packaging/version.py,sha256=2NH3E57hzRhn0BV9boUBvgPsxlTqLJeI0EpYQoNvGi0,16326
diff --git a/setuptools/_vendor/packaging-23.1.dist-info/INSTALLER b/setuptools/_vendor/packaging-24.0.dist-info/INSTALLER
similarity index 100%
rename from setuptools/_vendor/packaging-23.1.dist-info/INSTALLER
rename to setuptools/_vendor/packaging-24.0.dist-info/INSTALLER
diff --git a/setuptools/_vendor/packaging-23.1.dist-info/LICENSE b/setuptools/_vendor/packaging-24.0.dist-info/LICENSE
similarity index 100%
rename from setuptools/_vendor/packaging-23.1.dist-info/LICENSE
rename to setuptools/_vendor/packaging-24.0.dist-info/LICENSE
diff --git a/setuptools/_vendor/packaging-23.1.dist-info/LICENSE.APACHE b/setuptools/_vendor/packaging-24.0.dist-info/LICENSE.APACHE
similarity index 100%
rename from setuptools/_vendor/packaging-23.1.dist-info/LICENSE.APACHE
rename to setuptools/_vendor/packaging-24.0.dist-info/LICENSE.APACHE
diff --git a/setuptools/_vendor/packaging-23.1.dist-info/LICENSE.BSD b/setuptools/_vendor/packaging-24.0.dist-info/LICENSE.BSD
similarity index 100%
rename from setuptools/_vendor/packaging-23.1.dist-info/LICENSE.BSD
rename to setuptools/_vendor/packaging-24.0.dist-info/LICENSE.BSD
diff --git a/setuptools/_vendor/packaging-23.1.dist-info/METADATA b/setuptools/_vendor/packaging-24.0.dist-info/METADATA
similarity index 95%
rename from setuptools/_vendor/packaging-23.1.dist-info/METADATA
rename to setuptools/_vendor/packaging-24.0.dist-info/METADATA
index c43882a826..10ab4390a9 100644
--- a/setuptools/_vendor/packaging-23.1.dist-info/METADATA
+++ b/setuptools/_vendor/packaging-24.0.dist-info/METADATA
@@ -1,6 +1,6 @@
 Metadata-Version: 2.1
 Name: packaging
-Version: 23.1
+Version: 24.0
 Summary: Core utilities for Python packages
 Author-email: Donald Stufft 
 Requires-Python: >=3.7
@@ -17,6 +17,7 @@ Classifier: Programming Language :: Python :: 3.8
 Classifier: Programming Language :: Python :: 3.9
 Classifier: Programming Language :: Python :: 3.10
 Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
 Classifier: Programming Language :: Python :: Implementation :: CPython
 Classifier: Programming Language :: Python :: Implementation :: PyPy
 Classifier: Typing :: Typed
@@ -59,6 +60,8 @@ Use ``pip`` to install these utilities::
 
     pip install packaging
 
+The ``packaging`` library uses calendar-based versioning (``YY.N``).
+
 Discussion
 ----------
 
diff --git a/setuptools/_vendor/packaging-24.0.dist-info/RECORD b/setuptools/_vendor/packaging-24.0.dist-info/RECORD
new file mode 100644
index 0000000000..bcf796c2f4
--- /dev/null
+++ b/setuptools/_vendor/packaging-24.0.dist-info/RECORD
@@ -0,0 +1,37 @@
+packaging-24.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+packaging-24.0.dist-info/LICENSE,sha256=ytHvW9NA1z4HS6YU0m996spceUDD2MNIUuZcSQlobEg,197
+packaging-24.0.dist-info/LICENSE.APACHE,sha256=DVQuDIgE45qn836wDaWnYhSdxoLXgpRRKH4RuTjpRZQ,10174
+packaging-24.0.dist-info/LICENSE.BSD,sha256=tw5-m3QvHMb5SLNMFqo5_-zpQZY2S8iP8NIYDwAo-sU,1344
+packaging-24.0.dist-info/METADATA,sha256=0dESdhY_wHValuOrbgdebiEw04EbX4dkujlxPdEsFus,3203
+packaging-24.0.dist-info/RECORD,,
+packaging-24.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+packaging-24.0.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
+packaging/__init__.py,sha256=UzotcV07p8vcJzd80S-W0srhgY8NMVD_XvJcZ7JN-tA,496
+packaging/__pycache__/__init__.cpython-312.pyc,,
+packaging/__pycache__/_elffile.cpython-312.pyc,,
+packaging/__pycache__/_manylinux.cpython-312.pyc,,
+packaging/__pycache__/_musllinux.cpython-312.pyc,,
+packaging/__pycache__/_parser.cpython-312.pyc,,
+packaging/__pycache__/_structures.cpython-312.pyc,,
+packaging/__pycache__/_tokenizer.cpython-312.pyc,,
+packaging/__pycache__/markers.cpython-312.pyc,,
+packaging/__pycache__/metadata.cpython-312.pyc,,
+packaging/__pycache__/requirements.cpython-312.pyc,,
+packaging/__pycache__/specifiers.cpython-312.pyc,,
+packaging/__pycache__/tags.cpython-312.pyc,,
+packaging/__pycache__/utils.cpython-312.pyc,,
+packaging/__pycache__/version.cpython-312.pyc,,
+packaging/_elffile.py,sha256=hbmK8OD6Z7fY6hwinHEUcD1by7czkGiNYu7ShnFEk2k,3266
+packaging/_manylinux.py,sha256=1ng_TqyH49hY6s3W_zVHyoJIaogbJqbIF1jJ0fAehc4,9590
+packaging/_musllinux.py,sha256=kgmBGLFybpy8609-KTvzmt2zChCPWYvhp5BWP4JX7dE,2676
+packaging/_parser.py,sha256=zlsFB1FpMRjkUdQb6WLq7xON52ruQadxFpYsDXWhLb4,10347
+packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431
+packaging/_tokenizer.py,sha256=alCtbwXhOFAmFGZ6BQ-wCTSFoRAJ2z-ysIf7__MTJ_k,5292
+packaging/markers.py,sha256=eH-txS2zq1HdNpTd9LcZUcVIwewAiNU0grmq5wjKnOk,8208
+packaging/metadata.py,sha256=w7jPEg6mDf1FTZMn79aFxFuk4SKtynUJtxr2InTxlV4,33036
+packaging/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+packaging/requirements.py,sha256=dgoBeVprPu2YE6Q8nGfwOPTjATHbRa_ZGLyXhFEln6Q,2933
+packaging/specifiers.py,sha256=dB2DwbmvSbEuVilEyiIQ382YfW5JfwzXTfRRPVtaENY,39784
+packaging/tags.py,sha256=fedHXiOHkBxNZTXotXv8uXPmMFU9ae-TKBujgYHigcA,18950
+packaging/utils.py,sha256=XgdmP3yx9-wQEFjO7OvMj9RjEf5JlR5HFFR69v7SQ9E,5268
+packaging/version.py,sha256=XjRBLNK17UMDgLeP8UHnqwiY3TdSi03xFQURtec211A,16236
diff --git a/setuptools/_vendor/packaging-23.1.dist-info/REQUESTED b/setuptools/_vendor/packaging-24.0.dist-info/REQUESTED
similarity index 100%
rename from setuptools/_vendor/packaging-23.1.dist-info/REQUESTED
rename to setuptools/_vendor/packaging-24.0.dist-info/REQUESTED
diff --git a/pkg_resources/_vendor/packaging-23.1.dist-info/WHEEL b/setuptools/_vendor/packaging-24.0.dist-info/WHEEL
similarity index 72%
rename from pkg_resources/_vendor/packaging-23.1.dist-info/WHEEL
rename to setuptools/_vendor/packaging-24.0.dist-info/WHEEL
index db4a255f3a..3b5e64b5e6 100644
--- a/pkg_resources/_vendor/packaging-23.1.dist-info/WHEEL
+++ b/setuptools/_vendor/packaging-24.0.dist-info/WHEEL
@@ -1,4 +1,4 @@
 Wheel-Version: 1.0
-Generator: flit 3.8.0
+Generator: flit 3.9.0
 Root-Is-Purelib: true
 Tag: py3-none-any
diff --git a/setuptools/_vendor/packaging/__init__.py b/setuptools/_vendor/packaging/__init__.py
index 13cadc7f04..e7c0aa12ca 100644
--- a/setuptools/_vendor/packaging/__init__.py
+++ b/setuptools/_vendor/packaging/__init__.py
@@ -6,10 +6,10 @@
 __summary__ = "Core utilities for Python packages"
 __uri__ = "https://github.com/pypa/packaging"
 
-__version__ = "23.1"
+__version__ = "24.0"
 
 __author__ = "Donald Stufft and individual contributors"
 __email__ = "donald@stufft.io"
 
 __license__ = "BSD-2-Clause or Apache-2.0"
-__copyright__ = "2014-2019 %s" % __author__
+__copyright__ = "2014 %s" % __author__
diff --git a/setuptools/_vendor/packaging/_manylinux.py b/setuptools/_vendor/packaging/_manylinux.py
index 449c655be6..ad62505f3f 100644
--- a/setuptools/_vendor/packaging/_manylinux.py
+++ b/setuptools/_vendor/packaging/_manylinux.py
@@ -5,7 +5,7 @@
 import re
 import sys
 import warnings
-from typing import Dict, Generator, Iterator, NamedTuple, Optional, Tuple
+from typing import Dict, Generator, Iterator, NamedTuple, Optional, Sequence, Tuple
 
 from ._elffile import EIClass, EIData, ELFFile, EMachine
 
@@ -50,12 +50,21 @@ def _is_linux_i686(executable: str) -> bool:
         )
 
 
-def _have_compatible_abi(executable: str, arch: str) -> bool:
-    if arch == "armv7l":
+def _have_compatible_abi(executable: str, archs: Sequence[str]) -> bool:
+    if "armv7l" in archs:
         return _is_linux_armhf(executable)
-    if arch == "i686":
+    if "i686" in archs:
         return _is_linux_i686(executable)
-    return arch in {"x86_64", "aarch64", "ppc64", "ppc64le", "s390x"}
+    allowed_archs = {
+        "x86_64",
+        "aarch64",
+        "ppc64",
+        "ppc64le",
+        "s390x",
+        "loongarch64",
+        "riscv64",
+    }
+    return any(arch in allowed_archs for arch in archs)
 
 
 # If glibc ever changes its major version, we need to know what the last
@@ -81,7 +90,7 @@ def _glibc_version_string_confstr() -> Optional[str]:
     # https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183
     try:
         # Should be a string like "glibc 2.17".
-        version_string: str = getattr(os, "confstr")("CS_GNU_LIBC_VERSION")
+        version_string: Optional[str] = os.confstr("CS_GNU_LIBC_VERSION")
         assert version_string is not None
         _, version = version_string.rsplit()
     except (AssertionError, AttributeError, OSError, ValueError):
@@ -167,13 +176,13 @@ def _get_glibc_version() -> Tuple[int, int]:
 
 
 # From PEP 513, PEP 600
-def _is_compatible(name: str, arch: str, version: _GLibCVersion) -> bool:
+def _is_compatible(arch: str, version: _GLibCVersion) -> bool:
     sys_glibc = _get_glibc_version()
     if sys_glibc < version:
         return False
     # Check for presence of _manylinux module.
     try:
-        import _manylinux  # noqa
+        import _manylinux
     except ImportError:
         return True
     if hasattr(_manylinux, "manylinux_compatible"):
@@ -203,12 +212,22 @@ def _is_compatible(name: str, arch: str, version: _GLibCVersion) -> bool:
 }
 
 
-def platform_tags(linux: str, arch: str) -> Iterator[str]:
-    if not _have_compatible_abi(sys.executable, arch):
+def platform_tags(archs: Sequence[str]) -> Iterator[str]:
+    """Generate manylinux tags compatible to the current platform.
+
+    :param archs: Sequence of compatible architectures.
+        The first one shall be the closest to the actual architecture and be the part of
+        platform tag after the ``linux_`` prefix, e.g. ``x86_64``.
+        The ``linux_`` prefix is assumed as a prerequisite for the current platform to
+        be manylinux-compatible.
+
+    :returns: An iterator of compatible manylinux tags.
+    """
+    if not _have_compatible_abi(sys.executable, archs):
         return
     # Oldest glibc to be supported regardless of architecture is (2, 17).
     too_old_glibc2 = _GLibCVersion(2, 16)
-    if arch in {"x86_64", "i686"}:
+    if set(archs) & {"x86_64", "i686"}:
         # On x86/i686 also oldest glibc to be supported is (2, 5).
         too_old_glibc2 = _GLibCVersion(2, 4)
     current_glibc = _GLibCVersion(*_get_glibc_version())
@@ -222,19 +241,20 @@ def platform_tags(linux: str, arch: str) -> Iterator[str]:
     for glibc_major in range(current_glibc.major - 1, 1, -1):
         glibc_minor = _LAST_GLIBC_MINOR[glibc_major]
         glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor))
-    for glibc_max in glibc_max_list:
-        if glibc_max.major == too_old_glibc2.major:
-            min_minor = too_old_glibc2.minor
-        else:
-            # For other glibc major versions oldest supported is (x, 0).
-            min_minor = -1
-        for glibc_minor in range(glibc_max.minor, min_minor, -1):
-            glibc_version = _GLibCVersion(glibc_max.major, glibc_minor)
-            tag = "manylinux_{}_{}".format(*glibc_version)
-            if _is_compatible(tag, arch, glibc_version):
-                yield linux.replace("linux", tag)
-            # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags.
-            if glibc_version in _LEGACY_MANYLINUX_MAP:
-                legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version]
-                if _is_compatible(legacy_tag, arch, glibc_version):
-                    yield linux.replace("linux", legacy_tag)
+    for arch in archs:
+        for glibc_max in glibc_max_list:
+            if glibc_max.major == too_old_glibc2.major:
+                min_minor = too_old_glibc2.minor
+            else:
+                # For other glibc major versions oldest supported is (x, 0).
+                min_minor = -1
+            for glibc_minor in range(glibc_max.minor, min_minor, -1):
+                glibc_version = _GLibCVersion(glibc_max.major, glibc_minor)
+                tag = "manylinux_{}_{}".format(*glibc_version)
+                if _is_compatible(arch, glibc_version):
+                    yield f"{tag}_{arch}"
+                # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags.
+                if glibc_version in _LEGACY_MANYLINUX_MAP:
+                    legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version]
+                    if _is_compatible(arch, glibc_version):
+                        yield f"{legacy_tag}_{arch}"
diff --git a/setuptools/_vendor/packaging/_musllinux.py b/setuptools/_vendor/packaging/_musllinux.py
index 706ba600a9..86419df9d7 100644
--- a/setuptools/_vendor/packaging/_musllinux.py
+++ b/setuptools/_vendor/packaging/_musllinux.py
@@ -8,7 +8,7 @@
 import re
 import subprocess
 import sys
-from typing import Iterator, NamedTuple, Optional
+from typing import Iterator, NamedTuple, Optional, Sequence
 
 from ._elffile import ELFFile
 
@@ -47,24 +47,27 @@ def _get_musl_version(executable: str) -> Optional[_MuslVersion]:
         return None
     if ld is None or "musl" not in ld:
         return None
-    proc = subprocess.run([ld], stderr=subprocess.PIPE, universal_newlines=True)
+    proc = subprocess.run([ld], stderr=subprocess.PIPE, text=True)
     return _parse_musl_version(proc.stderr)
 
 
-def platform_tags(arch: str) -> Iterator[str]:
+def platform_tags(archs: Sequence[str]) -> Iterator[str]:
     """Generate musllinux tags compatible to the current platform.
 
-    :param arch: Should be the part of platform tag after the ``linux_``
-        prefix, e.g. ``x86_64``. The ``linux_`` prefix is assumed as a
-        prerequisite for the current platform to be musllinux-compatible.
+    :param archs: Sequence of compatible architectures.
+        The first one shall be the closest to the actual architecture and be the part of
+        platform tag after the ``linux_`` prefix, e.g. ``x86_64``.
+        The ``linux_`` prefix is assumed as a prerequisite for the current platform to
+        be musllinux-compatible.
 
     :returns: An iterator of compatible musllinux tags.
     """
     sys_musl = _get_musl_version(sys.executable)
     if sys_musl is None:  # Python not dynamically linked against musl.
         return
-    for minor in range(sys_musl.minor, -1, -1):
-        yield f"musllinux_{sys_musl.major}_{minor}_{arch}"
+    for arch in archs:
+        for minor in range(sys_musl.minor, -1, -1):
+            yield f"musllinux_{sys_musl.major}_{minor}_{arch}"
 
 
 if __name__ == "__main__":  # pragma: no cover
diff --git a/setuptools/_vendor/packaging/_parser.py b/setuptools/_vendor/packaging/_parser.py
index 5a18b758fe..684df75457 100644
--- a/setuptools/_vendor/packaging/_parser.py
+++ b/setuptools/_vendor/packaging/_parser.py
@@ -252,7 +252,13 @@ def _parse_version_many(tokenizer: Tokenizer) -> str:
 # Recursive descent parser for marker expression
 # --------------------------------------------------------------------------------------
 def parse_marker(source: str) -> MarkerList:
-    return _parse_marker(Tokenizer(source, rules=DEFAULT_RULES))
+    return _parse_full_marker(Tokenizer(source, rules=DEFAULT_RULES))
+
+
+def _parse_full_marker(tokenizer: Tokenizer) -> MarkerList:
+    retval = _parse_marker(tokenizer)
+    tokenizer.expect("END", expected="end of marker expression")
+    return retval
 
 
 def _parse_marker(tokenizer: Tokenizer) -> MarkerList:
@@ -318,10 +324,7 @@ def _parse_marker_var(tokenizer: Tokenizer) -> MarkerVar:
 
 
 def process_env_var(env_var: str) -> Variable:
-    if (
-        env_var == "platform_python_implementation"
-        or env_var == "python_implementation"
-    ):
+    if env_var in ("platform_python_implementation", "python_implementation"):
         return Variable("platform_python_implementation")
     else:
         return Variable(env_var)
diff --git a/setuptools/_vendor/packaging/metadata.py b/setuptools/_vendor/packaging/metadata.py
index e76a60c395..fb27493079 100644
--- a/setuptools/_vendor/packaging/metadata.py
+++ b/setuptools/_vendor/packaging/metadata.py
@@ -5,23 +5,77 @@
 import email.policy
 import sys
 import typing
-from typing import Dict, List, Optional, Tuple, Union, cast
-
-if sys.version_info >= (3, 8):  # pragma: no cover
-    from typing import TypedDict
+from typing import (
+    Any,
+    Callable,
+    Dict,
+    Generic,
+    List,
+    Optional,
+    Tuple,
+    Type,
+    Union,
+    cast,
+)
+
+from . import requirements, specifiers, utils, version as version_module
+
+T = typing.TypeVar("T")
+if sys.version_info[:2] >= (3, 8):  # pragma: no cover
+    from typing import Literal, TypedDict
 else:  # pragma: no cover
     if typing.TYPE_CHECKING:
-        from typing_extensions import TypedDict
+        from typing_extensions import Literal, TypedDict
     else:
         try:
-            from typing_extensions import TypedDict
+            from typing_extensions import Literal, TypedDict
         except ImportError:
 
+            class Literal:
+                def __init_subclass__(*_args, **_kwargs):
+                    pass
+
             class TypedDict:
                 def __init_subclass__(*_args, **_kwargs):
                     pass
 
 
+try:
+    ExceptionGroup
+except NameError:  # pragma: no cover
+
+    class ExceptionGroup(Exception):  # noqa: N818
+        """A minimal implementation of :external:exc:`ExceptionGroup` from Python 3.11.
+
+        If :external:exc:`ExceptionGroup` is already defined by Python itself,
+        that version is used instead.
+        """
+
+        message: str
+        exceptions: List[Exception]
+
+        def __init__(self, message: str, exceptions: List[Exception]) -> None:
+            self.message = message
+            self.exceptions = exceptions
+
+        def __repr__(self) -> str:
+            return f"{self.__class__.__name__}({self.message!r}, {self.exceptions!r})"
+
+else:  # pragma: no cover
+    ExceptionGroup = ExceptionGroup
+
+
+class InvalidMetadata(ValueError):
+    """A metadata field contains invalid data."""
+
+    field: str
+    """The name of the field that contains invalid data."""
+
+    def __init__(self, field: str, message: str) -> None:
+        self.field = field
+        super().__init__(message)
+
+
 # The RawMetadata class attempts to make as few assumptions about the underlying
 # serialization formats as possible. The idea is that as long as a serialization
 # formats offer some very basic primitives in *some* way then we can support
@@ -33,7 +87,8 @@ class RawMetadata(TypedDict, total=False):
     provided). The key is lower-case and underscores are used instead of dashes
     compared to the equivalent core metadata field. Any core metadata field that
     can be specified multiple times or can hold multiple values in a single
-    field have a key with a plural name.
+    field have a key with a plural name. See :class:`Metadata` whose attributes
+    match the keys of this dictionary.
 
     Core metadata fields that can be specified multiple times are stored as a
     list or dict depending on which is appropriate for the field. Any fields
@@ -77,7 +132,7 @@ class RawMetadata(TypedDict, total=False):
     # but got stuck without ever being able to build consensus on
     # it and ultimately ended up withdrawn.
     #
-    # However, a number of tools had started emiting METADATA with
+    # However, a number of tools had started emitting METADATA with
     # `2.0` Metadata-Version, so for historical reasons, this version
     # was skipped.
 
@@ -110,7 +165,7 @@ class RawMetadata(TypedDict, total=False):
     "version",
 }
 
-_LIST_STRING_FIELDS = {
+_LIST_FIELDS = {
     "classifiers",
     "dynamic",
     "obsoletes",
@@ -125,6 +180,10 @@ class RawMetadata(TypedDict, total=False):
     "supported_platforms",
 }
 
+_DICT_FIELDS = {
+    "project_urls",
+}
+
 
 def _parse_keywords(data: str) -> List[str]:
     """Split a string of comma-separate keyboards into a list of keywords."""
@@ -230,10 +289,11 @@ def _get_payload(msg: email.message.Message, source: Union[bytes, str]) -> str:
     "supported-platform": "supported_platforms",
     "version": "version",
 }
+_RAW_TO_EMAIL_MAPPING = {raw: email for email, raw in _EMAIL_TO_RAW_MAPPING.items()}
 
 
 def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[str]]]:
-    """Parse a distribution's metadata.
+    """Parse a distribution's metadata stored as email headers (e.g. from ``METADATA``).
 
     This function returns a two-item tuple of dicts. The first dict is of
     recognized fields from the core metadata specification. Fields that can be
@@ -267,7 +327,7 @@ def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[st
         # We use get_all() here, even for fields that aren't multiple use,
         # because otherwise someone could have e.g. two Name fields, and we
         # would just silently ignore it rather than doing something about it.
-        headers = parsed.get_all(name)
+        headers = parsed.get_all(name) or []
 
         # The way the email module works when parsing bytes is that it
         # unconditionally decodes the bytes as ascii using the surrogateescape
@@ -349,7 +409,7 @@ def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[st
         # If this is one of our list of string fields, then we can just assign
         # the value, since email *only* has strings, and our get_all() call
         # above ensures that this is a list.
-        elif raw_name in _LIST_STRING_FIELDS:
+        elif raw_name in _LIST_FIELDS:
             raw[raw_name] = value
         # Special Case: Keywords
         # The keywords field is implemented in the metadata spec as a str,
@@ -406,3 +466,360 @@ def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[st
     # way this function is implemented, our `TypedDict` can only have valid key
     # names.
     return cast(RawMetadata, raw), unparsed
+
+
+_NOT_FOUND = object()
+
+
+# Keep the two values in sync.
+_VALID_METADATA_VERSIONS = ["1.0", "1.1", "1.2", "2.1", "2.2", "2.3"]
+_MetadataVersion = Literal["1.0", "1.1", "1.2", "2.1", "2.2", "2.3"]
+
+_REQUIRED_ATTRS = frozenset(["metadata_version", "name", "version"])
+
+
+class _Validator(Generic[T]):
+    """Validate a metadata field.
+
+    All _process_*() methods correspond to a core metadata field. The method is
+    called with the field's raw value. If the raw value is valid it is returned
+    in its "enriched" form (e.g. ``version.Version`` for the ``Version`` field).
+    If the raw value is invalid, :exc:`InvalidMetadata` is raised (with a cause
+    as appropriate).
+    """
+
+    name: str
+    raw_name: str
+    added: _MetadataVersion
+
+    def __init__(
+        self,
+        *,
+        added: _MetadataVersion = "1.0",
+    ) -> None:
+        self.added = added
+
+    def __set_name__(self, _owner: "Metadata", name: str) -> None:
+        self.name = name
+        self.raw_name = _RAW_TO_EMAIL_MAPPING[name]
+
+    def __get__(self, instance: "Metadata", _owner: Type["Metadata"]) -> T:
+        # With Python 3.8, the caching can be replaced with functools.cached_property().
+        # No need to check the cache as attribute lookup will resolve into the
+        # instance's __dict__ before __get__ is called.
+        cache = instance.__dict__
+        value = instance._raw.get(self.name)
+
+        # To make the _process_* methods easier, we'll check if the value is None
+        # and if this field is NOT a required attribute, and if both of those
+        # things are true, we'll skip the the converter. This will mean that the
+        # converters never have to deal with the None union.
+        if self.name in _REQUIRED_ATTRS or value is not None:
+            try:
+                converter: Callable[[Any], T] = getattr(self, f"_process_{self.name}")
+            except AttributeError:
+                pass
+            else:
+                value = converter(value)
+
+        cache[self.name] = value
+        try:
+            del instance._raw[self.name]  # type: ignore[misc]
+        except KeyError:
+            pass
+
+        return cast(T, value)
+
+    def _invalid_metadata(
+        self, msg: str, cause: Optional[Exception] = None
+    ) -> InvalidMetadata:
+        exc = InvalidMetadata(
+            self.raw_name, msg.format_map({"field": repr(self.raw_name)})
+        )
+        exc.__cause__ = cause
+        return exc
+
+    def _process_metadata_version(self, value: str) -> _MetadataVersion:
+        # Implicitly makes Metadata-Version required.
+        if value not in _VALID_METADATA_VERSIONS:
+            raise self._invalid_metadata(f"{value!r} is not a valid metadata version")
+        return cast(_MetadataVersion, value)
+
+    def _process_name(self, value: str) -> str:
+        if not value:
+            raise self._invalid_metadata("{field} is a required field")
+        # Validate the name as a side-effect.
+        try:
+            utils.canonicalize_name(value, validate=True)
+        except utils.InvalidName as exc:
+            raise self._invalid_metadata(
+                f"{value!r} is invalid for {{field}}", cause=exc
+            )
+        else:
+            return value
+
+    def _process_version(self, value: str) -> version_module.Version:
+        if not value:
+            raise self._invalid_metadata("{field} is a required field")
+        try:
+            return version_module.parse(value)
+        except version_module.InvalidVersion as exc:
+            raise self._invalid_metadata(
+                f"{value!r} is invalid for {{field}}", cause=exc
+            )
+
+    def _process_summary(self, value: str) -> str:
+        """Check the field contains no newlines."""
+        if "\n" in value:
+            raise self._invalid_metadata("{field} must be a single line")
+        return value
+
+    def _process_description_content_type(self, value: str) -> str:
+        content_types = {"text/plain", "text/x-rst", "text/markdown"}
+        message = email.message.EmailMessage()
+        message["content-type"] = value
+
+        content_type, parameters = (
+            # Defaults to `text/plain` if parsing failed.
+            message.get_content_type().lower(),
+            message["content-type"].params,
+        )
+        # Check if content-type is valid or defaulted to `text/plain` and thus was
+        # not parseable.
+        if content_type not in content_types or content_type not in value.lower():
+            raise self._invalid_metadata(
+                f"{{field}} must be one of {list(content_types)}, not {value!r}"
+            )
+
+        charset = parameters.get("charset", "UTF-8")
+        if charset != "UTF-8":
+            raise self._invalid_metadata(
+                f"{{field}} can only specify the UTF-8 charset, not {list(charset)}"
+            )
+
+        markdown_variants = {"GFM", "CommonMark"}
+        variant = parameters.get("variant", "GFM")  # Use an acceptable default.
+        if content_type == "text/markdown" and variant not in markdown_variants:
+            raise self._invalid_metadata(
+                f"valid Markdown variants for {{field}} are {list(markdown_variants)}, "
+                f"not {variant!r}",
+            )
+        return value
+
+    def _process_dynamic(self, value: List[str]) -> List[str]:
+        for dynamic_field in map(str.lower, value):
+            if dynamic_field in {"name", "version", "metadata-version"}:
+                raise self._invalid_metadata(
+                    f"{value!r} is not allowed as a dynamic field"
+                )
+            elif dynamic_field not in _EMAIL_TO_RAW_MAPPING:
+                raise self._invalid_metadata(f"{value!r} is not a valid dynamic field")
+        return list(map(str.lower, value))
+
+    def _process_provides_extra(
+        self,
+        value: List[str],
+    ) -> List[utils.NormalizedName]:
+        normalized_names = []
+        try:
+            for name in value:
+                normalized_names.append(utils.canonicalize_name(name, validate=True))
+        except utils.InvalidName as exc:
+            raise self._invalid_metadata(
+                f"{name!r} is invalid for {{field}}", cause=exc
+            )
+        else:
+            return normalized_names
+
+    def _process_requires_python(self, value: str) -> specifiers.SpecifierSet:
+        try:
+            return specifiers.SpecifierSet(value)
+        except specifiers.InvalidSpecifier as exc:
+            raise self._invalid_metadata(
+                f"{value!r} is invalid for {{field}}", cause=exc
+            )
+
+    def _process_requires_dist(
+        self,
+        value: List[str],
+    ) -> List[requirements.Requirement]:
+        reqs = []
+        try:
+            for req in value:
+                reqs.append(requirements.Requirement(req))
+        except requirements.InvalidRequirement as exc:
+            raise self._invalid_metadata(f"{req!r} is invalid for {{field}}", cause=exc)
+        else:
+            return reqs
+
+
+class Metadata:
+    """Representation of distribution metadata.
+
+    Compared to :class:`RawMetadata`, this class provides objects representing
+    metadata fields instead of only using built-in types. Any invalid metadata
+    will cause :exc:`InvalidMetadata` to be raised (with a
+    :py:attr:`~BaseException.__cause__` attribute as appropriate).
+    """
+
+    _raw: RawMetadata
+
+    @classmethod
+    def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> "Metadata":
+        """Create an instance from :class:`RawMetadata`.
+
+        If *validate* is true, all metadata will be validated. All exceptions
+        related to validation will be gathered and raised as an :class:`ExceptionGroup`.
+        """
+        ins = cls()
+        ins._raw = data.copy()  # Mutations occur due to caching enriched values.
+
+        if validate:
+            exceptions: List[Exception] = []
+            try:
+                metadata_version = ins.metadata_version
+                metadata_age = _VALID_METADATA_VERSIONS.index(metadata_version)
+            except InvalidMetadata as metadata_version_exc:
+                exceptions.append(metadata_version_exc)
+                metadata_version = None
+
+            # Make sure to check for the fields that are present, the required
+            # fields (so their absence can be reported).
+            fields_to_check = frozenset(ins._raw) | _REQUIRED_ATTRS
+            # Remove fields that have already been checked.
+            fields_to_check -= {"metadata_version"}
+
+            for key in fields_to_check:
+                try:
+                    if metadata_version:
+                        # Can't use getattr() as that triggers descriptor protocol which
+                        # will fail due to no value for the instance argument.
+                        try:
+                            field_metadata_version = cls.__dict__[key].added
+                        except KeyError:
+                            exc = InvalidMetadata(key, f"unrecognized field: {key!r}")
+                            exceptions.append(exc)
+                            continue
+                        field_age = _VALID_METADATA_VERSIONS.index(
+                            field_metadata_version
+                        )
+                        if field_age > metadata_age:
+                            field = _RAW_TO_EMAIL_MAPPING[key]
+                            exc = InvalidMetadata(
+                                field,
+                                "{field} introduced in metadata version "
+                                "{field_metadata_version}, not {metadata_version}",
+                            )
+                            exceptions.append(exc)
+                            continue
+                    getattr(ins, key)
+                except InvalidMetadata as exc:
+                    exceptions.append(exc)
+
+            if exceptions:
+                raise ExceptionGroup("invalid metadata", exceptions)
+
+        return ins
+
+    @classmethod
+    def from_email(
+        cls, data: Union[bytes, str], *, validate: bool = True
+    ) -> "Metadata":
+        """Parse metadata from email headers.
+
+        If *validate* is true, the metadata will be validated. All exceptions
+        related to validation will be gathered and raised as an :class:`ExceptionGroup`.
+        """
+        raw, unparsed = parse_email(data)
+
+        if validate:
+            exceptions: list[Exception] = []
+            for unparsed_key in unparsed:
+                if unparsed_key in _EMAIL_TO_RAW_MAPPING:
+                    message = f"{unparsed_key!r} has invalid data"
+                else:
+                    message = f"unrecognized field: {unparsed_key!r}"
+                exceptions.append(InvalidMetadata(unparsed_key, message))
+
+            if exceptions:
+                raise ExceptionGroup("unparsed", exceptions)
+
+        try:
+            return cls.from_raw(raw, validate=validate)
+        except ExceptionGroup as exc_group:
+            raise ExceptionGroup(
+                "invalid or unparsed metadata", exc_group.exceptions
+            ) from None
+
+    metadata_version: _Validator[_MetadataVersion] = _Validator()
+    """:external:ref:`core-metadata-metadata-version`
+    (required; validated to be a valid metadata version)"""
+    name: _Validator[str] = _Validator()
+    """:external:ref:`core-metadata-name`
+    (required; validated using :func:`~packaging.utils.canonicalize_name` and its
+    *validate* parameter)"""
+    version: _Validator[version_module.Version] = _Validator()
+    """:external:ref:`core-metadata-version` (required)"""
+    dynamic: _Validator[Optional[List[str]]] = _Validator(
+        added="2.2",
+    )
+    """:external:ref:`core-metadata-dynamic`
+    (validated against core metadata field names and lowercased)"""
+    platforms: _Validator[Optional[List[str]]] = _Validator()
+    """:external:ref:`core-metadata-platform`"""
+    supported_platforms: _Validator[Optional[List[str]]] = _Validator(added="1.1")
+    """:external:ref:`core-metadata-supported-platform`"""
+    summary: _Validator[Optional[str]] = _Validator()
+    """:external:ref:`core-metadata-summary` (validated to contain no newlines)"""
+    description: _Validator[Optional[str]] = _Validator()  # TODO 2.1: can be in body
+    """:external:ref:`core-metadata-description`"""
+    description_content_type: _Validator[Optional[str]] = _Validator(added="2.1")
+    """:external:ref:`core-metadata-description-content-type` (validated)"""
+    keywords: _Validator[Optional[List[str]]] = _Validator()
+    """:external:ref:`core-metadata-keywords`"""
+    home_page: _Validator[Optional[str]] = _Validator()
+    """:external:ref:`core-metadata-home-page`"""
+    download_url: _Validator[Optional[str]] = _Validator(added="1.1")
+    """:external:ref:`core-metadata-download-url`"""
+    author: _Validator[Optional[str]] = _Validator()
+    """:external:ref:`core-metadata-author`"""
+    author_email: _Validator[Optional[str]] = _Validator()
+    """:external:ref:`core-metadata-author-email`"""
+    maintainer: _Validator[Optional[str]] = _Validator(added="1.2")
+    """:external:ref:`core-metadata-maintainer`"""
+    maintainer_email: _Validator[Optional[str]] = _Validator(added="1.2")
+    """:external:ref:`core-metadata-maintainer-email`"""
+    license: _Validator[Optional[str]] = _Validator()
+    """:external:ref:`core-metadata-license`"""
+    classifiers: _Validator[Optional[List[str]]] = _Validator(added="1.1")
+    """:external:ref:`core-metadata-classifier`"""
+    requires_dist: _Validator[Optional[List[requirements.Requirement]]] = _Validator(
+        added="1.2"
+    )
+    """:external:ref:`core-metadata-requires-dist`"""
+    requires_python: _Validator[Optional[specifiers.SpecifierSet]] = _Validator(
+        added="1.2"
+    )
+    """:external:ref:`core-metadata-requires-python`"""
+    # Because `Requires-External` allows for non-PEP 440 version specifiers, we
+    # don't do any processing on the values.
+    requires_external: _Validator[Optional[List[str]]] = _Validator(added="1.2")
+    """:external:ref:`core-metadata-requires-external`"""
+    project_urls: _Validator[Optional[Dict[str, str]]] = _Validator(added="1.2")
+    """:external:ref:`core-metadata-project-url`"""
+    # PEP 685 lets us raise an error if an extra doesn't pass `Name` validation
+    # regardless of metadata version.
+    provides_extra: _Validator[Optional[List[utils.NormalizedName]]] = _Validator(
+        added="2.1",
+    )
+    """:external:ref:`core-metadata-provides-extra`"""
+    provides_dist: _Validator[Optional[List[str]]] = _Validator(added="1.2")
+    """:external:ref:`core-metadata-provides-dist`"""
+    obsoletes_dist: _Validator[Optional[List[str]]] = _Validator(added="1.2")
+    """:external:ref:`core-metadata-obsoletes-dist`"""
+    requires: _Validator[Optional[List[str]]] = _Validator(added="1.1")
+    """``Requires`` (deprecated)"""
+    provides: _Validator[Optional[List[str]]] = _Validator(added="1.1")
+    """``Provides`` (deprecated)"""
+    obsoletes: _Validator[Optional[List[str]]] = _Validator(added="1.1")
+    """``Obsoletes`` (deprecated)"""
diff --git a/setuptools/_vendor/packaging/requirements.py b/setuptools/_vendor/packaging/requirements.py
index f34bfa85c8..bdc43a7e98 100644
--- a/setuptools/_vendor/packaging/requirements.py
+++ b/setuptools/_vendor/packaging/requirements.py
@@ -2,13 +2,13 @@
 # 2.0, and the BSD License. See the LICENSE file in the root of this repository
 # for complete details.
 
-import urllib.parse
-from typing import Any, List, Optional, Set
+from typing import Any, Iterator, Optional, Set
 
 from ._parser import parse_requirement as _parse_requirement
 from ._tokenizer import ParserSyntaxError
 from .markers import Marker, _normalize_extra_values
 from .specifiers import SpecifierSet
+from .utils import canonicalize_name
 
 
 class InvalidRequirement(ValueError):
@@ -37,57 +37,52 @@ def __init__(self, requirement_string: str) -> None:
             raise InvalidRequirement(str(e)) from e
 
         self.name: str = parsed.name
-        if parsed.url:
-            parsed_url = urllib.parse.urlparse(parsed.url)
-            if parsed_url.scheme == "file":
-                if urllib.parse.urlunparse(parsed_url) != parsed.url:
-                    raise InvalidRequirement("Invalid URL given")
-            elif not (parsed_url.scheme and parsed_url.netloc) or (
-                not parsed_url.scheme and not parsed_url.netloc
-            ):
-                raise InvalidRequirement(f"Invalid URL: {parsed.url}")
-            self.url: Optional[str] = parsed.url
-        else:
-            self.url = None
-        self.extras: Set[str] = set(parsed.extras if parsed.extras else [])
+        self.url: Optional[str] = parsed.url or None
+        self.extras: Set[str] = set(parsed.extras or [])
         self.specifier: SpecifierSet = SpecifierSet(parsed.specifier)
         self.marker: Optional[Marker] = None
         if parsed.marker is not None:
             self.marker = Marker.__new__(Marker)
             self.marker._markers = _normalize_extra_values(parsed.marker)
 
-    def __str__(self) -> str:
-        parts: List[str] = [self.name]
+    def _iter_parts(self, name: str) -> Iterator[str]:
+        yield name
 
         if self.extras:
             formatted_extras = ",".join(sorted(self.extras))
-            parts.append(f"[{formatted_extras}]")
+            yield f"[{formatted_extras}]"
 
         if self.specifier:
-            parts.append(str(self.specifier))
+            yield str(self.specifier)
 
         if self.url:
-            parts.append(f"@ {self.url}")
+            yield f"@ {self.url}"
             if self.marker:
-                parts.append(" ")
+                yield " "
 
         if self.marker:
-            parts.append(f"; {self.marker}")
+            yield f"; {self.marker}"
 
-        return "".join(parts)
+    def __str__(self) -> str:
+        return "".join(self._iter_parts(self.name))
 
     def __repr__(self) -> str:
         return f""
 
     def __hash__(self) -> int:
-        return hash((self.__class__.__name__, str(self)))
+        return hash(
+            (
+                self.__class__.__name__,
+                *self._iter_parts(canonicalize_name(self.name)),
+            )
+        )
 
     def __eq__(self, other: Any) -> bool:
         if not isinstance(other, Requirement):
             return NotImplemented
 
         return (
-            self.name == other.name
+            canonicalize_name(self.name) == canonicalize_name(other.name)
             and self.extras == other.extras
             and self.specifier == other.specifier
             and self.url == other.url
diff --git a/setuptools/_vendor/packaging/specifiers.py b/setuptools/_vendor/packaging/specifiers.py
index ba8fe37b7f..2d015bab59 100644
--- a/setuptools/_vendor/packaging/specifiers.py
+++ b/setuptools/_vendor/packaging/specifiers.py
@@ -11,17 +11,7 @@
 import abc
 import itertools
 import re
-from typing import (
-    Callable,
-    Iterable,
-    Iterator,
-    List,
-    Optional,
-    Set,
-    Tuple,
-    TypeVar,
-    Union,
-)
+from typing import Callable, Iterable, Iterator, List, Optional, Tuple, TypeVar, Union
 
 from .utils import canonicalize_version
 from .version import Version
@@ -383,7 +373,7 @@ def _compare_compatible(self, prospective: Version, spec: str) -> bool:
 
         # We want everything but the last item in the version, but we want to
         # ignore suffix segments.
-        prefix = ".".join(
+        prefix = _version_join(
             list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1]
         )
 
@@ -404,13 +394,13 @@ def _compare_equal(self, prospective: Version, spec: str) -> bool:
             )
             # Get the normalized version string ignoring the trailing .*
             normalized_spec = canonicalize_version(spec[:-2], strip_trailing_zero=False)
-            # Split the spec out by dots, and pretend that there is an implicit
-            # dot in between a release segment and a pre-release segment.
+            # Split the spec out by bangs and dots, and pretend that there is
+            # an implicit dot in between a release segment and a pre-release segment.
             split_spec = _version_split(normalized_spec)
 
-            # Split the prospective version out by dots, and pretend that there
-            # is an implicit dot in between a release segment and a pre-release
-            # segment.
+            # Split the prospective version out by bangs and dots, and pretend
+            # that there is an implicit dot in between a release segment and
+            # a pre-release segment.
             split_prospective = _version_split(normalized_prospective)
 
             # 0-pad the prospective version before shortening it to get the correct
@@ -644,8 +634,19 @@ def filter(
 
 
 def _version_split(version: str) -> List[str]:
+    """Split version into components.
+
+    The split components are intended for version comparison. The logic does
+    not attempt to retain the original version string, so joining the
+    components back with :func:`_version_join` may not produce the original
+    version string.
+    """
     result: List[str] = []
-    for item in version.split("."):
+
+    epoch, _, rest = version.rpartition("!")
+    result.append(epoch or "0")
+
+    for item in rest.split("."):
         match = _prefix_regex.search(item)
         if match:
             result.extend(match.groups())
@@ -654,6 +655,17 @@ def _version_split(version: str) -> List[str]:
     return result
 
 
+def _version_join(components: List[str]) -> str:
+    """Join split version components into a version string.
+
+    This function assumes the input came from :func:`_version_split`, where the
+    first component must be the epoch (either empty or numeric), and all other
+    components numeric.
+    """
+    epoch, *rest = components
+    return f"{epoch}!{'.'.join(rest)}"
+
+
 def _is_not_suffix(segment: str) -> bool:
     return not any(
         segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post")
@@ -675,7 +687,10 @@ def _pad_version(left: List[str], right: List[str]) -> Tuple[List[str], List[str
     left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0])))
     right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0])))
 
-    return (list(itertools.chain(*left_split)), list(itertools.chain(*right_split)))
+    return (
+        list(itertools.chain.from_iterable(left_split)),
+        list(itertools.chain.from_iterable(right_split)),
+    )
 
 
 class SpecifierSet(BaseSpecifier):
@@ -707,14 +722,8 @@ def __init__(
         # strip each item to remove leading/trailing whitespace.
         split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
 
-        # Parsed each individual specifier, attempting first to make it a
-        # Specifier.
-        parsed: Set[Specifier] = set()
-        for specifier in split_specifiers:
-            parsed.add(Specifier(specifier))
-
-        # Turn our parsed specifiers into a frozen set and save them for later.
-        self._specs = frozenset(parsed)
+        # Make each individual specifier a Specifier and save in a frozen set for later.
+        self._specs = frozenset(map(Specifier, split_specifiers))
 
         # Store our prereleases value so we can use it later to determine if
         # we accept prereleases or not.
diff --git a/setuptools/_vendor/packaging/tags.py b/setuptools/_vendor/packaging/tags.py
index 76d243414d..89f1926137 100644
--- a/setuptools/_vendor/packaging/tags.py
+++ b/setuptools/_vendor/packaging/tags.py
@@ -4,6 +4,8 @@
 
 import logging
 import platform
+import re
+import struct
 import subprocess
 import sys
 import sysconfig
@@ -37,7 +39,7 @@
 }
 
 
-_32_BIT_INTERPRETER = sys.maxsize <= 2**32
+_32_BIT_INTERPRETER = struct.calcsize("P") == 4
 
 
 class Tag:
@@ -123,20 +125,37 @@ def _normalize_string(string: str) -> str:
     return string.replace(".", "_").replace("-", "_").replace(" ", "_")
 
 
-def _abi3_applies(python_version: PythonVersion) -> bool:
+def _is_threaded_cpython(abis: List[str]) -> bool:
+    """
+    Determine if the ABI corresponds to a threaded (`--disable-gil`) build.
+
+    The threaded builds are indicated by a "t" in the abiflags.
+    """
+    if len(abis) == 0:
+        return False
+    # expect e.g., cp313
+    m = re.match(r"cp\d+(.*)", abis[0])
+    if not m:
+        return False
+    abiflags = m.group(1)
+    return "t" in abiflags
+
+
+def _abi3_applies(python_version: PythonVersion, threading: bool) -> bool:
     """
     Determine if the Python version supports abi3.
 
-    PEP 384 was first implemented in Python 3.2.
+    PEP 384 was first implemented in Python 3.2. The threaded (`--disable-gil`)
+    builds do not support abi3.
     """
-    return len(python_version) > 1 and tuple(python_version) >= (3, 2)
+    return len(python_version) > 1 and tuple(python_version) >= (3, 2) and not threading
 
 
 def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]:
     py_version = tuple(py_version)  # To allow for version comparison.
     abis = []
     version = _version_nodot(py_version[:2])
-    debug = pymalloc = ucs4 = ""
+    threading = debug = pymalloc = ucs4 = ""
     with_debug = _get_config_var("Py_DEBUG", warn)
     has_refcount = hasattr(sys, "gettotalrefcount")
     # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled
@@ -145,6 +164,8 @@ def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]:
     has_ext = "_d.pyd" in EXTENSION_SUFFIXES
     if with_debug or (with_debug is None and (has_refcount or has_ext)):
         debug = "d"
+    if py_version >= (3, 13) and _get_config_var("Py_GIL_DISABLED", warn):
+        threading = "t"
     if py_version < (3, 8):
         with_pymalloc = _get_config_var("WITH_PYMALLOC", warn)
         if with_pymalloc or with_pymalloc is None:
@@ -158,13 +179,8 @@ def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]:
     elif debug:
         # Debug builds can also load "normal" extension modules.
         # We can also assume no UCS-4 or pymalloc requirement.
-        abis.append(f"cp{version}")
-    abis.insert(
-        0,
-        "cp{version}{debug}{pymalloc}{ucs4}".format(
-            version=version, debug=debug, pymalloc=pymalloc, ucs4=ucs4
-        ),
-    )
+        abis.append(f"cp{version}{threading}")
+    abis.insert(0, f"cp{version}{threading}{debug}{pymalloc}{ucs4}")
     return abis
 
 
@@ -212,11 +228,14 @@ def cpython_tags(
     for abi in abis:
         for platform_ in platforms:
             yield Tag(interpreter, abi, platform_)
-    if _abi3_applies(python_version):
+
+    threading = _is_threaded_cpython(abis)
+    use_abi3 = _abi3_applies(python_version, threading)
+    if use_abi3:
         yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms)
     yield from (Tag(interpreter, "none", platform_) for platform_ in platforms)
 
-    if _abi3_applies(python_version):
+    if use_abi3:
         for minor_version in range(python_version[1] - 1, 1, -1):
             for platform_ in platforms:
                 interpreter = "cp{version}".format(
@@ -406,7 +425,7 @@ def mac_platforms(
                 check=True,
                 env={"SYSTEM_VERSION_COMPAT": "0"},
                 stdout=subprocess.PIPE,
-                universal_newlines=True,
+                text=True,
             ).stdout
             version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
     else:
@@ -469,15 +488,21 @@ def mac_platforms(
 
 def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]:
     linux = _normalize_string(sysconfig.get_platform())
+    if not linux.startswith("linux_"):
+        # we should never be here, just yield the sysconfig one and return
+        yield linux
+        return
     if is_32bit:
         if linux == "linux_x86_64":
             linux = "linux_i686"
         elif linux == "linux_aarch64":
-            linux = "linux_armv7l"
+            linux = "linux_armv8l"
     _, arch = linux.split("_", 1)
-    yield from _manylinux.platform_tags(linux, arch)
-    yield from _musllinux.platform_tags(arch)
-    yield linux
+    archs = {"armv8l": ["armv8l", "armv7l"]}.get(arch, [arch])
+    yield from _manylinux.platform_tags(archs)
+    yield from _musllinux.platform_tags(archs)
+    for arch in archs:
+        yield f"linux_{arch}"
 
 
 def _generic_platforms() -> Iterator[str]:
diff --git a/setuptools/_vendor/packaging/utils.py b/setuptools/_vendor/packaging/utils.py
index 33c613b749..c2c2f75aa8 100644
--- a/setuptools/_vendor/packaging/utils.py
+++ b/setuptools/_vendor/packaging/utils.py
@@ -12,6 +12,12 @@
 NormalizedName = NewType("NormalizedName", str)
 
 
+class InvalidName(ValueError):
+    """
+    An invalid distribution name; users should refer to the packaging user guide.
+    """
+
+
 class InvalidWheelFilename(ValueError):
     """
     An invalid wheel filename was found, users should refer to PEP 427.
@@ -24,17 +30,28 @@ class InvalidSdistFilename(ValueError):
     """
 
 
+# Core metadata spec for `Name`
+_validate_regex = re.compile(
+    r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE
+)
 _canonicalize_regex = re.compile(r"[-_.]+")
+_normalized_regex = re.compile(r"^([a-z0-9]|[a-z0-9]([a-z0-9-](?!--))*[a-z0-9])$")
 # PEP 427: The build number must start with a digit.
 _build_tag_regex = re.compile(r"(\d+)(.*)")
 
 
-def canonicalize_name(name: str) -> NormalizedName:
+def canonicalize_name(name: str, *, validate: bool = False) -> NormalizedName:
+    if validate and not _validate_regex.match(name):
+        raise InvalidName(f"name is invalid: {name!r}")
     # This is taken from PEP 503.
     value = _canonicalize_regex.sub("-", name).lower()
     return cast(NormalizedName, value)
 
 
+def is_normalized_name(name: str) -> bool:
+    return _normalized_regex.match(name) is not None
+
+
 def canonicalize_version(
     version: Union[Version, str], *, strip_trailing_zero: bool = True
 ) -> str:
@@ -100,11 +117,18 @@ def parse_wheel_filename(
 
     parts = filename.split("-", dashes - 2)
     name_part = parts[0]
-    # See PEP 427 for the rules on escaping the project name
+    # See PEP 427 for the rules on escaping the project name.
     if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None:
         raise InvalidWheelFilename(f"Invalid project name: {filename}")
     name = canonicalize_name(name_part)
-    version = Version(parts[1])
+
+    try:
+        version = Version(parts[1])
+    except InvalidVersion as e:
+        raise InvalidWheelFilename(
+            f"Invalid wheel filename (invalid version): {filename}"
+        ) from e
+
     if dashes == 5:
         build_part = parts[2]
         build_match = _build_tag_regex.match(build_part)
@@ -137,5 +161,12 @@ def parse_sdist_filename(filename: str) -> Tuple[NormalizedName, Version]:
         raise InvalidSdistFilename(f"Invalid sdist filename: {filename}")
 
     name = canonicalize_name(name_part)
-    version = Version(version_part)
+
+    try:
+        version = Version(version_part)
+    except InvalidVersion as e:
+        raise InvalidSdistFilename(
+            f"Invalid sdist filename (invalid version): {filename}"
+        ) from e
+
     return (name, version)
diff --git a/setuptools/_vendor/packaging/version.py b/setuptools/_vendor/packaging/version.py
index b30e8cbf84..5faab9bd0d 100644
--- a/setuptools/_vendor/packaging/version.py
+++ b/setuptools/_vendor/packaging/version.py
@@ -7,37 +7,39 @@
     from packaging.version import parse, Version
 """
 
-import collections
 import itertools
 import re
-from typing import Any, Callable, Optional, SupportsInt, Tuple, Union
+from typing import Any, Callable, NamedTuple, Optional, SupportsInt, Tuple, Union
 
 from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType
 
 __all__ = ["VERSION_PATTERN", "parse", "Version", "InvalidVersion"]
 
-InfiniteTypes = Union[InfinityType, NegativeInfinityType]
-PrePostDevType = Union[InfiniteTypes, Tuple[str, int]]
-SubLocalType = Union[InfiniteTypes, int, str]
-LocalType = Union[
+LocalType = Tuple[Union[int, str], ...]
+
+CmpPrePostDevType = Union[InfinityType, NegativeInfinityType, Tuple[str, int]]
+CmpLocalType = Union[
     NegativeInfinityType,
-    Tuple[
-        Union[
-            SubLocalType,
-            Tuple[SubLocalType, str],
-            Tuple[NegativeInfinityType, SubLocalType],
-        ],
-        ...,
-    ],
+    Tuple[Union[Tuple[int, str], Tuple[NegativeInfinityType, Union[int, str]]], ...],
 ]
 CmpKey = Tuple[
-    int, Tuple[int, ...], PrePostDevType, PrePostDevType, PrePostDevType, LocalType
+    int,
+    Tuple[int, ...],
+    CmpPrePostDevType,
+    CmpPrePostDevType,
+    CmpPrePostDevType,
+    CmpLocalType,
 ]
 VersionComparisonMethod = Callable[[CmpKey, CmpKey], bool]
 
-_Version = collections.namedtuple(
-    "_Version", ["epoch", "release", "dev", "pre", "post", "local"]
-)
+
+class _Version(NamedTuple):
+    epoch: int
+    release: Tuple[int, ...]
+    dev: Optional[Tuple[str, int]]
+    pre: Optional[Tuple[str, int]]
+    post: Optional[Tuple[str, int]]
+    local: Optional[LocalType]
 
 
 def parse(version: str) -> "Version":
@@ -117,7 +119,7 @@ def __ne__(self, other: object) -> bool:
         (?P[0-9]+(?:\.[0-9]+)*)                  # release segment
         (?P
                                          # pre-release
             [-_\.]?
-            (?P(a|b|c|rc|alpha|beta|pre|preview))
+            (?Palpha|a|beta|b|preview|pre|c|rc)
             [-_\.]?
             (?P[0-9]+)?
         )?
@@ -269,8 +271,7 @@ def epoch(self) -> int:
         >>> Version("1!2.0.0").epoch
         1
         """
-        _epoch: int = self._version.epoch
-        return _epoch
+        return self._version.epoch
 
     @property
     def release(self) -> Tuple[int, ...]:
@@ -286,8 +287,7 @@ def release(self) -> Tuple[int, ...]:
         Includes trailing zeroes but not the epoch or any pre-release / development /
         post-release suffixes.
         """
-        _release: Tuple[int, ...] = self._version.release
-        return _release
+        return self._version.release
 
     @property
     def pre(self) -> Optional[Tuple[str, int]]:
@@ -302,8 +302,7 @@ def pre(self) -> Optional[Tuple[str, int]]:
         >>> Version("1.2.3rc1").pre
         ('rc', 1)
         """
-        _pre: Optional[Tuple[str, int]] = self._version.pre
-        return _pre
+        return self._version.pre
 
     @property
     def post(self) -> Optional[int]:
@@ -451,7 +450,7 @@ def micro(self) -> int:
 
 
 def _parse_letter_version(
-    letter: str, number: Union[str, bytes, SupportsInt]
+    letter: Optional[str], number: Union[str, bytes, SupportsInt, None]
 ) -> Optional[Tuple[str, int]]:
 
     if letter:
@@ -489,7 +488,7 @@ def _parse_letter_version(
 _local_version_separators = re.compile(r"[\._-]")
 
 
-def _parse_local_version(local: str) -> Optional[LocalType]:
+def _parse_local_version(local: Optional[str]) -> Optional[LocalType]:
     """
     Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
     """
@@ -507,7 +506,7 @@ def _cmpkey(
     pre: Optional[Tuple[str, int]],
     post: Optional[Tuple[str, int]],
     dev: Optional[Tuple[str, int]],
-    local: Optional[Tuple[SubLocalType]],
+    local: Optional[LocalType],
 ) -> CmpKey:
 
     # When we compare a release version, we want to compare it with all of the
@@ -524,7 +523,7 @@ def _cmpkey(
     # if there is not a pre or a post segment. If we have one of those then
     # the normal sorting rules will handle this case correctly.
     if pre is None and post is None and dev is not None:
-        _pre: PrePostDevType = NegativeInfinity
+        _pre: CmpPrePostDevType = NegativeInfinity
     # Versions without a pre-release (except as noted above) should sort after
     # those with one.
     elif pre is None:
@@ -534,21 +533,21 @@ def _cmpkey(
 
     # Versions without a post segment should sort before those with one.
     if post is None:
-        _post: PrePostDevType = NegativeInfinity
+        _post: CmpPrePostDevType = NegativeInfinity
 
     else:
         _post = post
 
     # Versions without a development segment should sort after those with one.
     if dev is None:
-        _dev: PrePostDevType = Infinity
+        _dev: CmpPrePostDevType = Infinity
 
     else:
         _dev = dev
 
     if local is None:
         # Versions without a local segment should sort before those with one.
-        _local: LocalType = NegativeInfinity
+        _local: CmpLocalType = NegativeInfinity
     else:
         # Versions with a local segment need that segment parsed to implement
         # the sorting rules in PEP440.
diff --git a/setuptools/_vendor/vendored.txt b/setuptools/_vendor/vendored.txt
index 592fe491a1..e67c7845c8 100644
--- a/setuptools/_vendor/vendored.txt
+++ b/setuptools/_vendor/vendored.txt
@@ -1,4 +1,4 @@
-packaging==23.1
+packaging==24
 ordered-set==3.1.1
 more_itertools==8.8.0
 jaraco.text==3.7.0

From b81e1a5c23e6f94631d5ee1c5accf8ab0f0d4a23 Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Sun, 10 Mar 2024 14:51:21 +0100
Subject: [PATCH 0511/1761] Remove extra pairs of quotes from litteral strings

Automated formatting runs such as #149 / ab77f7d left over these extra quotes.
---
 distutils/tests/test_cygwinccompiler.py | 10 +++++-----
 distutils/tests/test_sysconfig.py       |  4 ++--
 2 files changed, 7 insertions(+), 7 deletions(-)

diff --git a/distutils/tests/test_cygwinccompiler.py b/distutils/tests/test_cygwinccompiler.py
index 0a66193d35..d95654f5a1 100644
--- a/distutils/tests/test_cygwinccompiler.py
+++ b/distutils/tests/test_cygwinccompiler.py
@@ -80,25 +80,25 @@ def test_get_msvcr(self):
 
         # MSVC 7.0
         sys.version = (
-            '2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' '[MSC v.1300 32 bits (Intel)]'
+            '2.5.1 (r251:54863, Apr 18 2007, 08:51:08) [MSC v.1300 32 bits (Intel)]'
         )
         assert get_msvcr() == ['msvcr70']
 
         # MSVC 7.1
         sys.version = (
-            '2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' '[MSC v.1310 32 bits (Intel)]'
+            '2.5.1 (r251:54863, Apr 18 2007, 08:51:08) [MSC v.1310 32 bits (Intel)]'
         )
         assert get_msvcr() == ['msvcr71']
 
         # VS2005 / MSVC 8.0
         sys.version = (
-            '2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' '[MSC v.1400 32 bits (Intel)]'
+            '2.5.1 (r251:54863, Apr 18 2007, 08:51:08) [MSC v.1400 32 bits (Intel)]'
         )
         assert get_msvcr() == ['msvcr80']
 
         # VS2008 / MSVC 9.0
         sys.version = (
-            '2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' '[MSC v.1500 32 bits (Intel)]'
+            '2.5.1 (r251:54863, Apr 18 2007, 08:51:08) [MSC v.1500 32 bits (Intel)]'
         )
         assert get_msvcr() == ['msvcr90']
 
@@ -110,7 +110,7 @@ def test_get_msvcr(self):
 
         # unknown
         sys.version = (
-            '2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' '[MSC v.2000 32 bits (Intel)]'
+            '2.5.1 (r251:54863, Apr 18 2007, 08:51:08) [MSC v.2000 32 bits (Intel)]'
         )
         with pytest.raises(ValueError):
             get_msvcr()
diff --git a/distutils/tests/test_sysconfig.py b/distutils/tests/test_sysconfig.py
index c55896661f..faa8e31c98 100644
--- a/distutils/tests/test_sysconfig.py
+++ b/distutils/tests/test_sysconfig.py
@@ -132,12 +132,12 @@ def test_customize_compiler(self):
         assert comp.exes['preprocessor'] == 'env_cpp --env-cppflags'
         assert comp.exes['compiler'] == 'env_cc --sc-cflags --env-cflags --env-cppflags'
         assert comp.exes['compiler_so'] == (
-            'env_cc --sc-cflags ' '--env-cflags ' '--env-cppflags --sc-ccshared'
+            'env_cc --sc-cflags --env-cflags --env-cppflags --sc-ccshared'
         )
         assert comp.exes['compiler_cxx'] == 'env_cxx --env-cxx-flags'
         assert comp.exes['linker_exe'] == 'env_cc'
         assert comp.exes['linker_so'] == (
-            'env_ldshared --env-ldflags --env-cflags' ' --env-cppflags'
+            'env_ldshared --env-ldflags --env-cflags --env-cppflags'
         )
         assert comp.shared_lib_extension == 'sc_shutil_suffix'
 

From ea55396cc4df42720b8557a13f4fd80283fc32e8 Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Sat, 13 Apr 2024 11:46:42 +0200
Subject: [PATCH 0512/1761] Apply ruff/refurb rule (FURB105)

FURB105 Unnecessary empty string passed to `print`
---
 distutils/dist.py | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/distutils/dist.py b/distutils/dist.py
index f29a34faba..668ce7eb0a 100644
--- a/distutils/dist.py
+++ b/distutils/dist.py
@@ -647,7 +647,7 @@ def _show_help(
                 options = self.global_options
             parser.set_option_table(options)
             parser.print_help(self.common_usage + "\nGlobal options:")
-            print('')
+            print()
 
         if display_options:
             parser.set_option_table(self.display_options)
@@ -655,7 +655,7 @@ def _show_help(
                 "Information display options (just display "
                 + "information, ignore any commands)"
             )
-            print('')
+            print()
 
         for command in self.commands:
             if isinstance(command, type) and issubclass(command, Command):
@@ -669,7 +669,7 @@ def _show_help(
             else:
                 parser.set_option_table(klass.user_options)
             parser.print_help("Options for '%s' command:" % klass.__name__)
-            print('')
+            print()
 
         print(gen_usage(self.script_name))
 
@@ -686,7 +686,7 @@ def handle_display_options(self, option_order):
         # we ignore "foo bar").
         if self.help_commands:
             self.print_commands()
-            print('')
+            print()
             print(gen_usage(self.script_name))
             return 1
 

From 0d6794fdc2987703982f7d0e89123fffc9bbda79 Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Sat, 13 Apr 2024 11:48:29 +0200
Subject: [PATCH 0513/1761] Apply ruff/refurb rule (FURB129)

FURB129 Instead of calling `readlines()`, iterate over file object directly
---
 distutils/tests/test_msvc9compiler.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/distutils/tests/test_msvc9compiler.py b/distutils/tests/test_msvc9compiler.py
index 58e24f017a..6f6aabee4d 100644
--- a/distutils/tests/test_msvc9compiler.py
+++ b/distutils/tests/test_msvc9compiler.py
@@ -161,7 +161,7 @@ def test_remove_visual_c_ref(self):
         f = open(manifest)
         try:
             # removing trailing spaces
-            content = '\n'.join([line.rstrip() for line in f.readlines()])
+            content = '\n'.join([line.rstrip() for line in f])
         finally:
             f.close()
 

From bfadc24bc9c120a6feae918cea5a9d80453cc8c6 Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Sat, 13 Apr 2024 11:50:52 +0200
Subject: [PATCH 0514/1761] Apply ruff/refurb rule (FURB142)

FURB142 Use of `set.add()` in a for loop
---
 distutils/dir_util.py | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)

diff --git a/distutils/dir_util.py b/distutils/dir_util.py
index 2021bed82e..8a3aca6521 100644
--- a/distutils/dir_util.py
+++ b/distutils/dir_util.py
@@ -95,8 +95,7 @@ def create_tree(base_dir, files, mode=0o777, verbose=1, dry_run=0):
     """
     # First get the list of directories to create
     need_dir = set()
-    for file in files:
-        need_dir.add(os.path.join(base_dir, os.path.dirname(file)))
+    need_dir.update(os.path.join(base_dir, os.path.dirname(file)) for file in files)
 
     # Now create them
     for dir in sorted(need_dir):

From ec303d5963920fb8e6fce5919615fcffb0c93fe5 Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Sat, 13 Apr 2024 11:53:21 +0200
Subject: [PATCH 0515/1761] Apply ruff/refurb rule (FURB140)

FURB140 Use `itertools.starmap` instead of the generator
---
 distutils/unixccompiler.py | 5 +----
 1 file changed, 1 insertion(+), 4 deletions(-)

diff --git a/distutils/unixccompiler.py b/distutils/unixccompiler.py
index a1fe2b57a2..caf4cd338e 100644
--- a/distutils/unixccompiler.py
+++ b/distutils/unixccompiler.py
@@ -389,10 +389,7 @@ def find_library_file(self, dirs, lib, debug=0):
 
         roots = map(self._library_root, dirs)
 
-        searched = (
-            os.path.join(root, lib_name)
-            for root, lib_name in itertools.product(roots, lib_names)
-        )
+        searched = itertools.starmap(os.path.join, itertools.product(roots, lib_names))
 
         found = filter(os.path.exists, searched)
 

From df45427cbb67c1149fcf5d2d1e2705e69b3baf0c Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 13 Apr 2024 09:10:21 -0400
Subject: [PATCH 0516/1761] Remove attempt to canonicalize the version. It's
 already canonical enough.

Closes #4302
Closes #3593
---
 newsfragments/4302.bugfix.rst             | 1 +
 setuptools/_core_metadata.py              | 4 ++--
 setuptools/tests/test_config_discovery.py | 6 ++----
 3 files changed, 5 insertions(+), 6 deletions(-)
 create mode 100644 newsfragments/4302.bugfix.rst

diff --git a/newsfragments/4302.bugfix.rst b/newsfragments/4302.bugfix.rst
new file mode 100644
index 0000000000..666549bcab
--- /dev/null
+++ b/newsfragments/4302.bugfix.rst
@@ -0,0 +1 @@
+Remove attempt to canonicalize the version. It's already canonical enough.
\ No newline at end of file
diff --git a/setuptools/_core_metadata.py b/setuptools/_core_metadata.py
index d8732c49bb..9b4f38ded2 100644
--- a/setuptools/_core_metadata.py
+++ b/setuptools/_core_metadata.py
@@ -17,7 +17,7 @@
 from . import _normalization, _reqs
 from .extern.packaging.markers import Marker
 from .extern.packaging.requirements import Requirement
-from .extern.packaging.utils import canonicalize_name, canonicalize_version
+from .extern.packaging.utils import canonicalize_name
 from .extern.packaging.version import Version
 from .warnings import SetuptoolsDeprecationWarning
 
@@ -264,5 +264,5 @@ def _write_provides_extra(file, processed_extras, safe, unsafe):
 def get_fullname(self):
     return "{}-{}".format(
         canonicalize_name(self.get_name()).replace('-', '_'),
-        canonicalize_version(self.get_version()),
+        self.get_version(),
     )
diff --git a/setuptools/tests/test_config_discovery.py b/setuptools/tests/test_config_discovery.py
index e1e67ffe11..ff9e672b68 100644
--- a/setuptools/tests/test_config_discovery.py
+++ b/setuptools/tests/test_config_discovery.py
@@ -255,7 +255,7 @@ def test_py_modules_when_wheel_dir_is_cwd(self, tmp_path):
 
 
 class TestNoConfig:
-    CANONICAL_DEFAULT_VERSION = "0"  # Canonical default version given by setuptools
+    DEFAULT_VERSION = "0.0.0"  # Default version given by setuptools
 
     EXAMPLES = {
         "pkg1": ["src/pkg1.py"],
@@ -277,9 +277,7 @@ def test_build_with_discovered_name(self, tmp_path):
         _populate_project_dir(tmp_path, files, {})
         _run_build(tmp_path, "--sdist")
         # Expected distribution file
-        dist_file = (
-            tmp_path / f"dist/ns_nested_pkg-{self.CANONICAL_DEFAULT_VERSION}.tar.gz"
-        )
+        dist_file = tmp_path / f"dist/ns_nested_pkg-{self.DEFAULT_VERSION}.tar.gz"
         assert dist_file.is_file()
 
 

From 5fc21f6bda88648c021e45d6e7e5e5229293d561 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 13 Apr 2024 09:13:00 -0400
Subject: [PATCH 0517/1761] =?UTF-8?q?Bump=20version:=2069.3.0=20=E2=86=92?=
 =?UTF-8?q?=2069.3.1?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg              | 2 +-
 NEWS.rst                      | 9 +++++++++
 newsfragments/4302.bugfix.rst | 1 -
 setup.cfg                     | 2 +-
 4 files changed, 11 insertions(+), 3 deletions(-)
 delete mode 100644 newsfragments/4302.bugfix.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index a76d5b66d7..d9cfd1ad7c 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 69.3.0
+current_version = 69.3.1
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index 7822ec6325..8a45a961eb 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,12 @@
+v69.3.1
+=======
+
+Bugfixes
+--------
+
+- Remove attempt to canonicalize the version. It's already canonical enough. (#4302)
+
+
 v69.3.0
 =======
 
diff --git a/newsfragments/4302.bugfix.rst b/newsfragments/4302.bugfix.rst
deleted file mode 100644
index 666549bcab..0000000000
--- a/newsfragments/4302.bugfix.rst
+++ /dev/null
@@ -1 +0,0 @@
-Remove attempt to canonicalize the version. It's already canonical enough.
\ No newline at end of file
diff --git a/setup.cfg b/setup.cfg
index bab3efa52c..78b9166b85 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,6 +1,6 @@
 [metadata]
 name = setuptools
-version = 69.3.0
+version = 69.3.1
 author = Python Packaging Authority
 author_email = distutils-sig@python.org
 description = Easily download, build, install, upgrade, and uninstall Python packages

From d4affe01ceb1fa4ed4c51f21473dd4c77d764d70 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 13 Apr 2024 09:17:42 -0400
Subject: [PATCH 0518/1761] =?UTF-8?q?Bump=20version:=2069.4.0=20=E2=86=92?=
 =?UTF-8?q?=2069.4.1?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg | 2 +-
 NEWS.rst         | 6 ++++++
 setup.cfg        | 2 +-
 3 files changed, 8 insertions(+), 2 deletions(-)

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 007a8ec0f5..09a7b690f0 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 69.4.0
+current_version = 69.4.1
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index e01087fc2f..fc213d160d 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,9 @@
+v69.4.1
+=======
+
+No significant changes.
+
+
 v69.3.1
 =======
 
diff --git a/setup.cfg b/setup.cfg
index 02078f7466..a579bf5ff7 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,6 +1,6 @@
 [metadata]
 name = setuptools
-version = 69.4.0
+version = 69.4.1
 author = Python Packaging Authority
 author_email = distutils-sig@python.org
 description = Easily download, build, install, upgrade, and uninstall Python packages

From 5d9e57fd3b529505d765f6806ef0c8dc1e239acd Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 13 Apr 2024 09:18:48 -0400
Subject: [PATCH 0519/1761] =?UTF-8?q?Bump=20version:=2069.4.1=20=E2=86=92?=
 =?UTF-8?q?=2069.5.0?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg               |  2 +-
 NEWS.rst                       | 10 ++++++++++
 newsfragments/4253.feature.rst |  1 -
 newsfragments/4301.feature.rst |  1 -
 setup.cfg                      |  2 +-
 5 files changed, 12 insertions(+), 4 deletions(-)
 delete mode 100644 newsfragments/4253.feature.rst
 delete mode 100644 newsfragments/4301.feature.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 09a7b690f0..f12875d186 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 69.4.1
+current_version = 69.5.0
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index fc213d160d..b2eb9bb62a 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,13 @@
+v69.5.0
+=======
+
+Features
+--------
+
+- Refresh unpinned vendored dependencies. (#4253)
+- Updated vendored packaging to version 24.0. (#4301)
+
+
 v69.4.1
 =======
 
diff --git a/newsfragments/4253.feature.rst b/newsfragments/4253.feature.rst
deleted file mode 100644
index acc51ea4bd..0000000000
--- a/newsfragments/4253.feature.rst
+++ /dev/null
@@ -1 +0,0 @@
-Refresh unpinned vendored dependencies.
\ No newline at end of file
diff --git a/newsfragments/4301.feature.rst b/newsfragments/4301.feature.rst
deleted file mode 100644
index 28ceb2a689..0000000000
--- a/newsfragments/4301.feature.rst
+++ /dev/null
@@ -1 +0,0 @@
-Updated vendored packaging to version 24.0.
diff --git a/setup.cfg b/setup.cfg
index a579bf5ff7..62a759e54d 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,6 +1,6 @@
 [metadata]
 name = setuptools
-version = 69.4.1
+version = 69.5.0
 author = Python Packaging Authority
 author_email = distutils-sig@python.org
 description = Easily download, build, install, upgrade, and uninstall Python packages

From b26678d5cc10ae3e97a1025a6298d75cd4ec9ab6 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 13 Apr 2024 09:23:53 -0400
Subject: [PATCH 0520/1761] Revert the canonicalization of the version. Ref
 pypa/setuptools#3593.

---
 distutils/dist.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/distutils/dist.py b/distutils/dist.py
index f4eb6e8958..fb996e4d95 100644
--- a/distutils/dist.py
+++ b/distutils/dist.py
@@ -12,7 +12,7 @@
 import logging
 from email import message_from_file
 
-from ._vendor.packaging.utils import canonicalize_name, canonicalize_version
+from ._vendor.packaging.utils import canonicalize_name
 
 try:
     import warnings
@@ -1193,7 +1193,7 @@ def get_version(self):
     def get_fullname(self):
         return "{}-{}".format(
             canonicalize_name(self.get_name()).replace('-', '_'),
-            canonicalize_version(self.get_version()),
+            self.get_version(),
         )
 
     def get_author(self):

From c28f347e93b82fb2e81cb40f284bead6ac3ee095 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 13 Apr 2024 09:26:43 -0400
Subject: [PATCH 0521/1761] Revert "Update tests to match new expectation."

This reverts commit be32fecc787c3de8c292638c5004a8bcf92dc540.
---
 distutils/tests/test_sdist.py | 32 ++++++++++++++++----------------
 1 file changed, 16 insertions(+), 16 deletions(-)

diff --git a/distutils/tests/test_sdist.py b/distutils/tests/test_sdist.py
index 359325d5dc..9856193f71 100644
--- a/distutils/tests/test_sdist.py
+++ b/distutils/tests/test_sdist.py
@@ -118,9 +118,9 @@ def test_prune_file_list(self):
         # now let's check what we have
         dist_folder = join(self.tmp_dir, 'dist')
         files = os.listdir(dist_folder)
-        assert files == ['ns_fake_pkg-1.zip']
+        assert files == ['ns_fake_pkg-1.0.zip']
 
-        zip_file = zipfile.ZipFile(join(dist_folder, 'ns_fake_pkg-1.zip'))
+        zip_file = zipfile.ZipFile(join(dist_folder, 'ns_fake_pkg-1.0.zip'))
         try:
             content = zip_file.namelist()
         finally:
@@ -135,7 +135,7 @@ def test_prune_file_list(self):
             'somecode/',
             'somecode/__init__.py',
         ]
-        assert sorted(content) == ['ns_fake_pkg-1/' + x for x in expected]
+        assert sorted(content) == ['ns_fake_pkg-1.0/' + x for x in expected]
 
     @pytest.mark.usefixtures('needs_zlib')
     @pytest.mark.skipif("not find_executable('tar')")
@@ -153,10 +153,10 @@ def test_make_distribution(self):
         dist_folder = join(self.tmp_dir, 'dist')
         result = os.listdir(dist_folder)
         result.sort()
-        assert result == ['ns_fake_pkg-1.tar', 'ns_fake_pkg-1.tar.gz']
+        assert result == ['ns_fake_pkg-1.0.tar', 'ns_fake_pkg-1.0.tar.gz']
 
-        os.remove(join(dist_folder, 'ns_fake_pkg-1.tar'))
-        os.remove(join(dist_folder, 'ns_fake_pkg-1.tar.gz'))
+        os.remove(join(dist_folder, 'ns_fake_pkg-1.0.tar'))
+        os.remove(join(dist_folder, 'ns_fake_pkg-1.0.tar.gz'))
 
         # now trying a tar then a gztar
         cmd.formats = ['tar', 'gztar']
@@ -166,7 +166,7 @@ def test_make_distribution(self):
 
         result = os.listdir(dist_folder)
         result.sort()
-        assert result == ['ns_fake_pkg-1.tar', 'ns_fake_pkg-1.tar.gz']
+        assert result == ['ns_fake_pkg-1.0.tar', 'ns_fake_pkg-1.0.tar.gz']
 
     @pytest.mark.usefixtures('needs_zlib')
     def test_add_defaults(self):
@@ -219,9 +219,9 @@ def test_add_defaults(self):
         # now let's check what we have
         dist_folder = join(self.tmp_dir, 'dist')
         files = os.listdir(dist_folder)
-        assert files == ['ns_fake_pkg-1.zip']
+        assert files == ['ns_fake_pkg-1.0.zip']
 
-        zip_file = zipfile.ZipFile(join(dist_folder, 'ns_fake_pkg-1.zip'))
+        zip_file = zipfile.ZipFile(join(dist_folder, 'ns_fake_pkg-1.0.zip'))
         try:
             content = zip_file.namelist()
         finally:
@@ -247,7 +247,7 @@ def test_add_defaults(self):
             'somecode/doc.dat',
             'somecode/doc.txt',
         ]
-        assert sorted(content) == ['ns_fake_pkg-1/' + x for x in expected]
+        assert sorted(content) == ['ns_fake_pkg-1.0/' + x for x in expected]
 
         # checking the MANIFEST
         manifest = pathlib.Path(self.tmp_dir, 'MANIFEST').read_text(encoding='utf-8')
@@ -420,16 +420,16 @@ def test_manual_manifest(self):
 
         assert list(clean_lines(cmd.manifest)) == ['README.manual']
 
-        archive_name = join(self.tmp_dir, 'dist', 'ns_fake_pkg-1.tar.gz')
+        archive_name = join(self.tmp_dir, 'dist', 'ns_fake_pkg-1.0.tar.gz')
         archive = tarfile.open(archive_name)
         try:
             filenames = [tarinfo.name for tarinfo in archive]
         finally:
             archive.close()
         assert sorted(filenames) == [
-            'ns_fake_pkg-1',
-            'ns_fake_pkg-1/PKG-INFO',
-            'ns_fake_pkg-1/README.manual',
+            'ns_fake_pkg-1.0',
+            'ns_fake_pkg-1.0/PKG-INFO',
+            'ns_fake_pkg-1.0/README.manual',
         ]
 
     @pytest.mark.usefixtures('needs_zlib')
@@ -449,7 +449,7 @@ def test_make_distribution_owner_group(self):
         cmd.run()
 
         # making sure we have the good rights
-        archive_name = join(self.tmp_dir, 'dist', 'ns_fake_pkg-1.tar.gz')
+        archive_name = join(self.tmp_dir, 'dist', 'ns_fake_pkg-1.0.tar.gz')
         archive = tarfile.open(archive_name)
         try:
             for member in archive.getmembers():
@@ -467,7 +467,7 @@ def test_make_distribution_owner_group(self):
         cmd.run()
 
         # making sure we have the good rights
-        archive_name = join(self.tmp_dir, 'dist', 'ns_fake_pkg-1.tar.gz')
+        archive_name = join(self.tmp_dir, 'dist', 'ns_fake_pkg-1.0.tar.gz')
         archive = tarfile.open(archive_name)
 
         # note that we are not testing the group ownership here

From 8b9f35e00549615b43793efd3c90f75739b55abf Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 13 Apr 2024 09:31:27 -0400
Subject: [PATCH 0522/1761] Construct the set in one expression.

---
 distutils/dir_util.py | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)

diff --git a/distutils/dir_util.py b/distutils/dir_util.py
index 8a3aca6521..370c6ffd49 100644
--- a/distutils/dir_util.py
+++ b/distutils/dir_util.py
@@ -94,8 +94,7 @@ def create_tree(base_dir, files, mode=0o777, verbose=1, dry_run=0):
     'dry_run' flags are as for 'mkpath()'.
     """
     # First get the list of directories to create
-    need_dir = set()
-    need_dir.update(os.path.join(base_dir, os.path.dirname(file)) for file in files)
+    need_dir = set(os.path.join(base_dir, os.path.dirname(file)) for file in files)
 
     # Now create them
     for dir in sorted(need_dir):

From a04913a51327c64f807e85119fd750485bbceb0a Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 13 Apr 2024 13:33:48 -0400
Subject: [PATCH 0523/1761] Add type declaration for
 runtime_library_dir_option, making explicit the different return types one
 might expect.

---
 distutils/unixccompiler.py | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)

diff --git a/distutils/unixccompiler.py b/distutils/unixccompiler.py
index caf4cd338e..a54481c01b 100644
--- a/distutils/unixccompiler.py
+++ b/distutils/unixccompiler.py
@@ -13,6 +13,8 @@
   * link shared library handled by 'cc -shared'
 """
 
+from __future__ import annotations
+
 import itertools
 import os
 import re
@@ -281,7 +283,7 @@ def _is_gcc(self):
         compiler = os.path.basename(shlex.split(cc_var)[0])
         return "gcc" in compiler or "g++" in compiler
 
-    def runtime_library_dir_option(self, dir):
+    def runtime_library_dir_option(self, dir: str) -> str | list[str]:
         # XXX Hackish, at the very least.  See Python bug #445902:
         # https://bugs.python.org/issue445902
         # Linkers on different platforms need different options to

From d2581bf30b6cfaa64f8b570b368a6f4ed5a710ff Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 13 Apr 2024 13:47:03 -0400
Subject: [PATCH 0524/1761] Add 'consolidate_linker_args' wrapper to protect
 the old behavior for now.

Closes pypa/distutils#246.
---
 distutils/compat/__init__.py          | 15 +++++++++++++++
 distutils/compat/py38.py              | 23 +++++++++++++++++++++++
 distutils/tests/test_unixccompiler.py | 17 +++++++++--------
 distutils/unixccompiler.py            |  5 +++--
 4 files changed, 50 insertions(+), 10 deletions(-)
 create mode 100644 distutils/compat/__init__.py
 create mode 100644 distutils/compat/py38.py

diff --git a/distutils/compat/__init__.py b/distutils/compat/__init__.py
new file mode 100644
index 0000000000..b7be72678f
--- /dev/null
+++ b/distutils/compat/__init__.py
@@ -0,0 +1,15 @@
+from __future__ import annotations
+
+from .py38 import removeprefix
+
+
+def consolidate_linker_args(args: list[str]) -> str:
+    """
+    Ensure the return value is a string for backward compatibility.
+
+    Retain until at least 2024-10-31.
+    """
+
+    if not all(arg.startswith('-Wl,') for arg in args):
+        return args
+    return '-Wl,' + ','.join(removeprefix(arg, '-Wl,') for arg in args)
diff --git a/distutils/compat/py38.py b/distutils/compat/py38.py
new file mode 100644
index 0000000000..0af3814017
--- /dev/null
+++ b/distutils/compat/py38.py
@@ -0,0 +1,23 @@
+import sys
+
+if sys.version_info < (3, 9):
+
+    def removesuffix(self, suffix):
+        # suffix='' should not call self[:-0].
+        if suffix and self.endswith(suffix):
+            return self[: -len(suffix)]
+        else:
+            return self[:]
+
+    def removeprefix(self, prefix):
+        if self.startswith(prefix):
+            return self[len(prefix) :]
+        else:
+            return self[:]
+else:
+
+    def removesuffix(self, suffix):
+        return self.removesuffix(suffix)
+
+    def removeprefix(self, prefix):
+        return self.removeprefix(prefix)
diff --git a/distutils/tests/test_unixccompiler.py b/distutils/tests/test_unixccompiler.py
index f17edf2f6b..6f05fa6989 100644
--- a/distutils/tests/test_unixccompiler.py
+++ b/distutils/tests/test_unixccompiler.py
@@ -4,6 +4,7 @@
 import sys
 import unittest.mock as mock
 from distutils import sysconfig
+from distutils.compat import consolidate_linker_args
 from distutils.errors import DistutilsPlatformError
 from distutils.unixccompiler import UnixCCompiler
 from distutils.util import _clear_cached_macosx_ver
@@ -149,10 +150,10 @@ def gcv(v):
                 return 'yes'
 
         sysconfig.get_config_var = gcv
-        assert self.cc.rpath_foo() == [
+        assert self.cc.rpath_foo() == consolidate_linker_args([
             '-Wl,--enable-new-dtags',
             '-Wl,-rpath,/foo',
-        ]
+        ])
 
         def gcv(v):
             if v == 'CC':
@@ -161,10 +162,10 @@ def gcv(v):
                 return 'yes'
 
         sysconfig.get_config_var = gcv
-        assert self.cc.rpath_foo() == [
+        assert self.cc.rpath_foo() == consolidate_linker_args([
             '-Wl,--enable-new-dtags',
             '-Wl,-rpath,/foo',
-        ]
+        ])
 
         # GCC non-GNULD
         sys.platform = 'bar'
@@ -189,10 +190,10 @@ def gcv(v):
                 return 'yes'
 
         sysconfig.get_config_var = gcv
-        assert self.cc.rpath_foo() == [
+        assert self.cc.rpath_foo() == consolidate_linker_args([
             '-Wl,--enable-new-dtags',
             '-Wl,-rpath,/foo',
-        ]
+        ])
 
         # non-GCC GNULD
         sys.platform = 'bar'
@@ -204,10 +205,10 @@ def gcv(v):
                 return 'yes'
 
         sysconfig.get_config_var = gcv
-        assert self.cc.rpath_foo() == [
+        assert self.cc.rpath_foo() == consolidate_linker_args([
             '-Wl,--enable-new-dtags',
             '-Wl,-rpath,/foo',
-        ]
+        ])
 
         # non-GCC non-GNULD
         sys.platform = 'bar'
diff --git a/distutils/unixccompiler.py b/distutils/unixccompiler.py
index a54481c01b..0248bde87b 100644
--- a/distutils/unixccompiler.py
+++ b/distutils/unixccompiler.py
@@ -22,6 +22,7 @@
 import sys
 
 from . import sysconfig
+from .compat import consolidate_linker_args
 from ._log import log
 from ._macos_compat import compiler_fixup
 from ._modified import newer
@@ -315,11 +316,11 @@ def runtime_library_dir_option(self, dir: str) -> str | list[str]:
         # For all compilers, `-Wl` is the presumed way to pass a
         # compiler option to the linker
         if sysconfig.get_config_var("GNULD") == "yes":
-            return [
+            return consolidate_linker_args([
                 # Force RUNPATH instead of RPATH
                 "-Wl,--enable-new-dtags",
                 "-Wl,-rpath," + dir,
-            ]
+            ])
         else:
             return "-Wl,-R" + dir
 

From 98eee7f74c93fb84226d18f370f883956e644619 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 13 Apr 2024 14:03:03 -0400
Subject: [PATCH 0525/1761] Exclude compat package from coverage.

---
 .coveragerc | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/.coveragerc b/.coveragerc
index 35b98b1df9..bcef31d957 100644
--- a/.coveragerc
+++ b/.coveragerc
@@ -2,6 +2,9 @@
 omit =
 	# leading `*/` for pytest-dev/pytest-cov#456
 	*/.tox/*
+
+	# local
+	*/compat/*
 disable_warnings =
 	couldnt-parse
 

From ef297f26182823d54acfe3719416aa2661706b29 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 13 Apr 2024 16:40:21 -0400
Subject: [PATCH 0526/1761] Extend the retention of the compatibility.

---
 distutils/compat/__init__.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/distutils/compat/__init__.py b/distutils/compat/__init__.py
index b7be72678f..b1ee3fe8b0 100644
--- a/distutils/compat/__init__.py
+++ b/distutils/compat/__init__.py
@@ -7,7 +7,7 @@ def consolidate_linker_args(args: list[str]) -> str:
     """
     Ensure the return value is a string for backward compatibility.
 
-    Retain until at least 2024-10-31.
+    Retain until at least 2024-04-31. See pypa/distutils#246
     """
 
     if not all(arg.startswith('-Wl,') for arg in args):

From f07b037161c9640e4518c5f71e78af49a478d5b2 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 13 Apr 2024 16:47:30 -0400
Subject: [PATCH 0527/1761] Add news fragment.

---
 newsfragments/+27489545.bugfix.rst | 1 +
 1 file changed, 1 insertion(+)
 create mode 100644 newsfragments/+27489545.bugfix.rst

diff --git a/newsfragments/+27489545.bugfix.rst b/newsfragments/+27489545.bugfix.rst
new file mode 100644
index 0000000000..83ed1520be
--- /dev/null
+++ b/newsfragments/+27489545.bugfix.rst
@@ -0,0 +1 @@
+Merged bugfix for pypa/distutils#246
\ No newline at end of file

From 5de8e14572713629991f3097e3c3bc197a8d4890 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 13 Apr 2024 16:47:55 -0400
Subject: [PATCH 0528/1761] =?UTF-8?q?Bump=20version:=2069.4.1=20=E2=86=92?=
 =?UTF-8?q?=2069.4.2?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg                   | 2 +-
 NEWS.rst                           | 9 +++++++++
 newsfragments/+27489545.bugfix.rst | 1 -
 setup.cfg                          | 2 +-
 4 files changed, 11 insertions(+), 3 deletions(-)
 delete mode 100644 newsfragments/+27489545.bugfix.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 09a7b690f0..0570d58bb9 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 69.4.1
+current_version = 69.4.2
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index fc213d160d..c4aa039229 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,12 @@
+v69.4.2
+=======
+
+Bugfixes
+--------
+
+- Merged bugfix for pypa/distutils#246 (#27489545)
+
+
 v69.4.1
 =======
 
diff --git a/newsfragments/+27489545.bugfix.rst b/newsfragments/+27489545.bugfix.rst
deleted file mode 100644
index 83ed1520be..0000000000
--- a/newsfragments/+27489545.bugfix.rst
+++ /dev/null
@@ -1 +0,0 @@
-Merged bugfix for pypa/distutils#246
\ No newline at end of file
diff --git a/setup.cfg b/setup.cfg
index a579bf5ff7..c51168c71b 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,6 +1,6 @@
 [metadata]
 name = setuptools
-version = 69.4.1
+version = 69.4.2
 author = Python Packaging Authority
 author_email = distutils-sig@python.org
 description = Easily download, build, install, upgrade, and uninstall Python packages

From ff58075cdf3459ecdf73486d2a83cecdd70c7e4a Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 13 Apr 2024 16:49:55 -0400
Subject: [PATCH 0529/1761] =?UTF-8?q?Bump=20version:=2069.5.0=20=E2=86=92?=
 =?UTF-8?q?=2069.5.1?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg | 2 +-
 NEWS.rst         | 6 ++++++
 setup.cfg        | 2 +-
 3 files changed, 8 insertions(+), 2 deletions(-)

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index f12875d186..557ae0ce34 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 69.5.0
+current_version = 69.5.1
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index 9fa3ade1fa..08e28ecc28 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,9 @@
+v69.5.1
+=======
+
+No significant changes.
+
+
 v69.4.2
 =======
 
diff --git a/setup.cfg b/setup.cfg
index 62a759e54d..f7479e047f 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,6 +1,6 @@
 [metadata]
 name = setuptools
-version = 69.5.0
+version = 69.5.1
 author = Python Packaging Authority
 author_email = distutils-sig@python.org
 description = Easily download, build, install, upgrade, and uninstall Python packages

From 486a8afea286d4d67e5038b58bf4452ccfeebd69 Mon Sep 17 00:00:00 2001
From: Hugo van Kemenade <1324225+hugovk@users.noreply.github.com>
Date: Sun, 14 Apr 2024 03:06:34 -0600
Subject: [PATCH 0530/1761] NEWS: Put releases in numerical order

---
 NEWS.rst | 26 +++++++++++++-------------
 1 file changed, 13 insertions(+), 13 deletions(-)

diff --git a/NEWS.rst b/NEWS.rst
index 08e28ecc28..9bf82560cc 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -4,23 +4,23 @@ v69.5.1
 No significant changes.
 
 
-v69.4.2
+v69.5.0
 =======
 
-Bugfixes
+Features
 --------
 
-- Merged bugfix for pypa/distutils#246 (#27489545)
+- Refresh unpinned vendored dependencies. (#4253)
+- Updated vendored packaging to version 24.0. (#4301)
 
 
-v69.5.0
+v69.4.2
 =======
 
-Features
+Bugfixes
 --------
 
-- Refresh unpinned vendored dependencies. (#4253)
-- Updated vendored packaging to version 24.0. (#4301)
+- Merged bugfix for pypa/distutils#246 (#27489545)
 
 
 v69.4.1
@@ -29,22 +29,22 @@ v69.4.1
 No significant changes.
 
 
-v69.3.1
+v69.4.0
 =======
 
-Bugfixes
+Features
 --------
 
-- Remove attempt to canonicalize the version. It's already canonical enough. (#4302)
+- Merged with pypa/distutils@55982565e, including interoperability improvements for rfc822_escape (pypa/distutils#213), dynamic resolution of config_h_filename for Python 3.13 compatibility (pypa/distutils#219), added support for the z/OS compiler (pypa/distutils#216), modernized compiler options in unixcompiler (pypa/distutils#214), fixed accumulating flags bug after compile/link (pypa/distutils#207), fixed enconding warnings (pypa/distutils#236), and general quality improvements (pypa/distutils#234). (#4298)
 
 
-v69.4.0
+v69.3.1
 =======
 
-Features
+Bugfixes
 --------
 
-- Merged with pypa/distutils@55982565e, including interoperability improvements for rfc822_escape (pypa/distutils#213), dynamic resolution of config_h_filename for Python 3.13 compatibility (pypa/distutils#219), added support for the z/OS compiler (pypa/distutils#216), modernized compiler options in unixcompiler (pypa/distutils#214), fixed accumulating flags bug after compile/link (pypa/distutils#207), fixed enconding warnings (pypa/distutils#236), and general quality improvements (pypa/distutils#234). (#4298)
+- Remove attempt to canonicalize the version. It's already canonical enough. (#4302)
 
 
 v69.3.0

From 9698925c4f6d5cf59d182dcc73682a07cb16b924 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Sat, 16 Mar 2024 12:16:34 -0400
Subject: [PATCH 0531/1761] Deduplicate testing dependencies by dropping
 testing-integration

---
 newsfragments/4282.misc.rst |  1 +
 setup.cfg                   | 15 +--------------
 tox.ini                     |  2 +-
 3 files changed, 3 insertions(+), 15 deletions(-)
 create mode 100644 newsfragments/4282.misc.rst

diff --git a/newsfragments/4282.misc.rst b/newsfragments/4282.misc.rst
new file mode 100644
index 0000000000..841d1b292c
--- /dev/null
+++ b/newsfragments/4282.misc.rst
@@ -0,0 +1 @@
+Removed the ``setuptools[testing-integration]`` in favor of ``setuptools[testing]`` -- by :user:`Avasam`
diff --git a/setup.cfg b/setup.cfg
index f7479e047f..214964fa98 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -60,7 +60,7 @@ testing =
 	jaraco.envs>=2.2
 	pytest-xdist>=3 # Dropped dependency on pytest-fork and py
 	jaraco.path>=3.2.0
-	build[virtualenv]
+	build[virtualenv]>=1.0.3
 	filelock>=3.4.0
 	ini2toml[lite]>=0.9
 	tomli-w>=1.0.0
@@ -77,19 +77,6 @@ testing =
 	# No Python 3.12 dependencies require importlib_metadata, but needed for type-checking since we import it directly
 	importlib_metadata
 
-testing-integration =
-	pytest
-	pytest-xdist
-	pytest-enabler
-	virtualenv>=13.0.0
-	tomli
-	wheel
-	jaraco.path>=3.2.0
-	jaraco.envs>=2.2
-	build[virtualenv]>=1.0.3
-	filelock>=3.4.0
-	packaging>=23.2
-
 docs =
 	# upstream
 	sphinx >= 3.5
diff --git a/tox.ini b/tox.ini
index 8815a697ab..fa4864f27b 100644
--- a/tox.ini
+++ b/tox.ini
@@ -23,7 +23,7 @@ pass_env =
 
 [testenv:integration]
 deps = {[testenv]deps}
-extras = testing-integration
+extras = testing
 pass_env =
 	{[testenv]pass_env}
 	DOWNLOAD_PATH

From 3b6781d1d980d7ce16caacf3310c9f418b1feb56 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 20 Mar 2024 15:40:32 +0000
Subject: [PATCH 0532/1761] Update tox.ini

---
 tox.ini | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/tox.ini b/tox.ini
index fa4864f27b..7412730008 100644
--- a/tox.ini
+++ b/tox.ini
@@ -23,7 +23,7 @@ pass_env =
 
 [testenv:integration]
 deps = {[testenv]deps}
-extras = testing
+extras = {[testenv]extras}
 pass_env =
 	{[testenv]pass_env}
 	DOWNLOAD_PATH

From 3ca45b7374ca8262b71e8197aba28f5580ab9550 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 20 Mar 2024 09:19:25 -0700
Subject: [PATCH 0533/1761] Ignore 'import-not-found' for _validate_pyproject

---
 mypy.ini | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/mypy.ini b/mypy.ini
index ee12ebb193..45671826b1 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -32,5 +32,5 @@ ignore_missing_imports = True
 # - pkg_resources tests create modules that won't exists statically before the test is run.
 #   Let's ignore all "import-not-found" since, if an import really wasn't found, then the test would fail.
 # - setuptools._vendor.packaging._manylinux: Mypy issue, this vendored module is already excluded!
-[mypy-pkg_resources.tests.*,setuptools._vendor.packaging._manylinux]
+[mypy-pkg_resources.tests.*,setuptools._vendor.packaging._manylinux,setuptools.config._validate_pyproject.*]
 disable_error_code = import-not-found

From 6c118beac827d233e0d5af76a1555092f631ce70 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 16 Apr 2024 12:02:47 +0100
Subject: [PATCH 0534/1761] Disable plugins that already run on normal tests
 when running integration

---
 conftest.py | 29 +++++++++++++++++++++++------
 1 file changed, 23 insertions(+), 6 deletions(-)

diff --git a/conftest.py b/conftest.py
index 90b653146f..328d45d351 100644
--- a/conftest.py
+++ b/conftest.py
@@ -24,6 +24,7 @@ def pytest_addoption(parser):
 def pytest_configure(config):
     config.addinivalue_line("markers", "integration: integration tests")
     config.addinivalue_line("markers", "uses_network: tests may try to download files")
+    _IntegrationTestSpeedups.disable_plugins_already_run(config)
 
 
 collect_ignore = [
@@ -47,9 +48,25 @@ def pytest_configure(config):
 
 @pytest.fixture(autouse=True)
 def _skip_integration(request):
-    running_integration_tests = request.config.getoption("--integration")
-    is_integration_test = request.node.get_closest_marker("integration")
-    if running_integration_tests and not is_integration_test:
-        pytest.skip("running integration tests only")
-    if not running_integration_tests and is_integration_test:
-        pytest.skip("skipping integration tests")
+    _IntegrationTestSpeedups.conditional_skip(request)
+
+
+class _IntegrationTestSpeedups:
+    """Speed-up integration tests by only running what does not run in other tests."""
+
+    RUNS_ON_NORMAL_TESTS = ("checkdocks", "cov", "mypy", "perf", "ruff")
+
+    @classmethod
+    def disable_plugins_already_run(cls, config):
+        if config.getoption("--integration"):
+            for plugin in cls.RUNS_ON_NORMAL_TESTS:  # no need to run again
+                config.pluginmanager.set_blocked(plugin)
+
+    @staticmethod
+    def conditional_skip(request):
+        running_integration_tests = request.config.getoption("--integration")
+        is_integration_test = request.node.get_closest_marker("integration")
+        if running_integration_tests and not is_integration_test:
+            pytest.skip("running integration tests only")
+        if not running_integration_tests and is_integration_test:
+            pytest.skip("skipping integration tests")

From 7ad4f2fa9fb2b030d3ecc231fc24de181705622d Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 16 Apr 2024 10:31:45 -0400
Subject: [PATCH 0535/1761] Pin against pytest 8.1.x due to
 pytest-dev/pytest#12194.

---
 setup.cfg | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setup.cfg b/setup.cfg
index f46b6cbff4..05ac4c7620 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -20,7 +20,7 @@ install_requires =
 [options.extras_require]
 testing =
 	# upstream
-	pytest >= 6, != 8.1.1
+	pytest >= 6, != 8.1.*
 	pytest-checkdocs >= 2.4
 	pytest-cov
 	pytest-mypy

From 0e98638760ca714fef90b2cb0f361024e9ec570c Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Sviatoslav=20Sydorenko=20=28=D0=A1=D0=B2=D1=8F=D1=82=D0=BE?=
 =?UTF-8?q?=D1=81=D0=BB=D0=B0=D0=B2=20=D0=A1=D0=B8=D0=B4=D0=BE=D1=80=D0=B5?=
 =?UTF-8?q?=D0=BD=D0=BA=D0=BE=29?= 
Date: Tue, 16 Apr 2024 17:44:37 +0200
Subject: [PATCH 0536/1761] Fix the 608de826 commit reference in changelog

Previously, the orphaned filename was used, making the current Sphinx setup link a non-existing issue.
---
 NEWS.rst | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/NEWS.rst b/NEWS.rst
index 9bf82560cc..a0714ab7d2 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -20,7 +20,7 @@ v69.4.2
 Bugfixes
 --------
 
-- Merged bugfix for pypa/distutils#246 (#27489545)
+- Merged bugfix for pypa/distutils#246 (pypa/setuptools@608de826)
 
 
 v69.4.1

From 6067fa41a3d1dfa8255ce571c9789153bb03630a Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 16 Apr 2024 14:14:14 -0400
Subject: [PATCH 0537/1761] Removed meaningless reference from 69.4.2 release
 notes.

---
 NEWS.rst | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/NEWS.rst b/NEWS.rst
index a0714ab7d2..20c6903a33 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -20,7 +20,7 @@ v69.4.2
 Bugfixes
 --------
 
-- Merged bugfix for pypa/distutils#246 (pypa/setuptools@608de826)
+- Merged bugfix for pypa/distutils#246.
 
 
 v69.4.1

From f4529af6a66e34d423860566be7882d665e10569 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Bartosz=20S=C5=82awecki?= 
Date: Tue, 16 Apr 2024 22:37:50 +0200
Subject: [PATCH 0538/1761] Move project metadata to `pyproject.toml`
 (jaraco/skeleton#122)

Intentionally omitted specifying `tool.setuptools.include-package-data`: it's true by default in `pyproject.toml` according to https://setuptools.pypa.io/en/latest/userguide/datafiles.html#include-package-data.

Closes jaraco/skeleton#121
---
 pyproject.toml | 49 ++++++++++++++++++++++++++++++++++++++++++++++++-
 setup.cfg      | 42 ------------------------------------------
 2 files changed, 48 insertions(+), 43 deletions(-)
 delete mode 100644 setup.cfg

diff --git a/pyproject.toml b/pyproject.toml
index a853c5789e..869fe7e5ba 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,5 +1,52 @@
 [build-system]
-requires = ["setuptools>=56", "setuptools_scm[toml]>=3.4.1"]
+requires = ["setuptools>=61.2", "setuptools_scm[toml]>=3.4.1"]
 build-backend = "setuptools.build_meta"
 
+[project]
+name = "PROJECT"
+authors = [
+	{ name = "Jason R. Coombs", email = "jaraco@jaraco.com" },
+]
+description = "PROJECT_DESCRIPTION"
+readme = "README.rst"
+classifiers = [
+	"Development Status :: 5 - Production/Stable",
+	"Intended Audience :: Developers",
+	"License :: OSI Approved :: MIT License",
+	"Programming Language :: Python :: 3",
+	"Programming Language :: Python :: 3 :: Only",
+]
+requires-python = ">=3.8"
+dependencies = [
+]
+dynamic = ["version"]
+
+[project.optional-dependencies]
+testing = [
+	# upstream
+	"pytest >= 6, != 8.1.*",
+	"pytest-checkdocs >= 2.4",
+	"pytest-cov",
+	"pytest-mypy",
+	"pytest-enabler >= 2.2",
+	"pytest-ruff >= 0.2.1",
+
+	# local
+]
+docs = [
+	# upstream
+	"sphinx >= 3.5",
+	"jaraco.packaging >= 9.3",
+	"rst.linker >= 1.9",
+	"furo",
+	"sphinx-lint",
+
+	# local
+]
+
+[project.urls]
+Homepage = "https://github.com/PROJECT_PATH"
+
+[project.scripts]
+
 [tool.setuptools_scm]
diff --git a/setup.cfg b/setup.cfg
deleted file mode 100644
index 05ac4c7620..0000000000
--- a/setup.cfg
+++ /dev/null
@@ -1,42 +0,0 @@
-[metadata]
-name = PROJECT
-author = Jason R. Coombs
-author_email = jaraco@jaraco.com
-description = PROJECT_DESCRIPTION
-long_description = file:README.rst
-url = https://github.com/PROJECT_PATH
-classifiers =
-	Development Status :: 5 - Production/Stable
-	Intended Audience :: Developers
-	License :: OSI Approved :: MIT License
-	Programming Language :: Python :: 3
-	Programming Language :: Python :: 3 :: Only
-
-[options]
-include_package_data = true
-python_requires = >=3.8
-install_requires =
-
-[options.extras_require]
-testing =
-	# upstream
-	pytest >= 6, != 8.1.*
-	pytest-checkdocs >= 2.4
-	pytest-cov
-	pytest-mypy
-	pytest-enabler >= 2.2
-	pytest-ruff >= 0.2.1
-
-	# local
-
-docs =
-	# upstream
-	sphinx >= 3.5
-	jaraco.packaging >= 9.3
-	rst.linker >= 1.9
-	furo
-	sphinx-lint
-
-	# local
-
-[options.entry_points]

From 9296481c5777b1b4c71bc4ca8c1a4d77866f073f Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 18 Apr 2024 16:38:22 -0400
Subject: [PATCH 0539/1761] Migrated config to pyproject.toml using
 jaraco.develop.migrate-config and ini2toml.

---
 pyproject.toml | 166 +++++++++++++++++++++++++++++++++++++++++++++++++
 setup.cfg      | 163 ------------------------------------------------
 2 files changed, 166 insertions(+), 163 deletions(-)
 delete mode 100644 setup.cfg

diff --git a/pyproject.toml b/pyproject.toml
index 58aacd9fe3..f7ba3029e3 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -3,4 +3,170 @@ requires = []
 build-backend = "setuptools.build_meta"
 backend-path = ["."]
 
+[project]
+name = "setuptools"
+version = "69.5.1"
+authors = [
+	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
+]
+description = "Easily download, build, install, upgrade, and uninstall Python packages"
+readme = "README.rst"
+classifiers = [
+	"Development Status :: 5 - Production/Stable",
+	"Intended Audience :: Developers",
+	"License :: OSI Approved :: MIT License",
+	"Programming Language :: Python :: 3",
+	"Programming Language :: Python :: 3 :: Only",
+	"Topic :: Software Development :: Libraries :: Python Modules",
+	"Topic :: System :: Archiving :: Packaging",
+	"Topic :: System :: Systems Administration",
+	"Topic :: Utilities",
+]
+keywords = ["CPAN PyPI distutils eggs package management"]
+requires-python = ">=3.8"
+dependencies = []
+
+[project.urls]
+Homepage = "https://github.com/pypa/setuptools"
+Documentation = "https://setuptools.pypa.io/"
+Changelog = "https://setuptools.pypa.io/en/stable/history.html"
+
+[project.optional-dependencies]
+testing = [
+	# upstream
+	"pytest >= 6, != 8.1.1",
+	"pytest-checkdocs >= 2.4",
+	'pytest-cov; python_implementation != "PyPy"', # coverage seems to make PyPy extremely slow
+	"pytest-mypy",
+	"pytest-enabler >= 2.2",
+	# workaround for pypa/setuptools#3921
+	'pytest-ruff >= 0.2.1; sys_platform != "cygwin"',
+
+	# local
+	"virtualenv>=13.0.0",
+	"wheel",
+	"pip>=19.1", # For proper file:// URLs support.
+	"packaging>=23.2",
+	"jaraco.envs>=2.2",
+	"pytest-xdist>=3", # Dropped dependency on pytest-fork and py
+	"jaraco.path>=3.2.0",
+	"build[virtualenv]>=1.0.3",
+	"filelock>=3.4.0",
+	"ini2toml[lite]>=0.9",
+	"tomli-w>=1.0.0",
+	"pytest-timeout",
+	'pytest-perf; sys_platform != "cygwin"', # workaround for jaraco/inflect#195, pydantic/pydantic-core#773 (see #3986)
+	# for tools/finalize.py
+	'jaraco.develop >= 7.21; python_version >= "3.9" and sys_platform != "cygwin"',
+	"pytest-home >= 0.5",
+	"mypy==1.9", # pin mypy version so a new version doesn't suddenly cause the CI to fail
+	# No Python 3.11 dependencies require tomli, but needed for type-checking since we import it directly
+	"tomli",
+	# No Python 3.12 dependencies require importlib_metadata, but needed for type-checking since we import it directly
+	"importlib_metadata",
+]
+docs = [
+	# upstream
+	"sphinx >= 3.5",
+	"jaraco.packaging >= 9.3",
+	"rst.linker >= 1.9",
+	"furo",
+	"sphinx-lint",
+
+	# tidelift
+	"jaraco.tidelift >= 1.4",
+
+	# local
+	"pygments-github-lexers==0.0.5",
+	"sphinx-favicon",
+	"sphinx-inline-tabs",
+	"sphinx-reredirects",
+	"sphinxcontrib-towncrier",
+	"sphinx-notfound-page >=1,<2",
+]
+ssl = []
+certs = []
+
+[project.entry-points."distutils.commands"]
+alias = "setuptools.command.alias:alias"
+bdist_egg = "setuptools.command.bdist_egg:bdist_egg"
+bdist_rpm = "setuptools.command.bdist_rpm:bdist_rpm"
+build = "setuptools.command.build:build"
+build_clib = "setuptools.command.build_clib:build_clib"
+build_ext = "setuptools.command.build_ext:build_ext"
+build_py = "setuptools.command.build_py:build_py"
+develop = "setuptools.command.develop:develop"
+dist_info = "setuptools.command.dist_info:dist_info"
+easy_install = "setuptools.command.easy_install:easy_install"
+editable_wheel = "setuptools.command.editable_wheel:editable_wheel"
+egg_info = "setuptools.command.egg_info:egg_info"
+install = "setuptools.command.install:install"
+install_egg_info = "setuptools.command.install_egg_info:install_egg_info"
+install_lib = "setuptools.command.install_lib:install_lib"
+install_scripts = "setuptools.command.install_scripts:install_scripts"
+rotate = "setuptools.command.rotate:rotate"
+saveopts = "setuptools.command.saveopts:saveopts"
+sdist = "setuptools.command.sdist:sdist"
+setopt = "setuptools.command.setopt:setopt"
+test = "setuptools.command.test:test"
+upload_docs = "setuptools.command.upload_docs:upload_docs"
+
+[project.entry-points."setuptools.finalize_distribution_options"]
+parent_finalize = "setuptools.dist:_Distribution.finalize_options"
+keywords = "setuptools.dist:Distribution._finalize_setup_keywords"
+
+[project.entry-points."distutils.setup_keywords"]
+eager_resources = "setuptools.dist:assert_string_list"
+namespace_packages = "setuptools.dist:check_nsp"
+extras_require = "setuptools.dist:check_extras"
+install_requires = "setuptools.dist:check_requirements"
+tests_require = "setuptools.dist:check_requirements"
+setup_requires = "setuptools.dist:check_requirements"
+python_requires = "setuptools.dist:check_specifier"
+entry_points = "setuptools.dist:check_entry_points"
+test_suite = "setuptools.dist:check_test_suite"
+zip_safe = "setuptools.dist:assert_bool"
+package_data = "setuptools.dist:check_package_data"
+exclude_package_data = "setuptools.dist:check_package_data"
+include_package_data = "setuptools.dist:assert_bool"
+packages = "setuptools.dist:check_packages"
+dependency_links = "setuptools.dist:assert_string_list"
+test_loader = "setuptools.dist:check_importable"
+test_runner = "setuptools.dist:check_importable"
+use_2to3 = "setuptools.dist:invalid_unless_false"
+
+[project.entry-points."egg_info.writers"]
+PKG-INFO = "setuptools.command.egg_info:write_pkg_info"
+"requires.txt" = "setuptools.command.egg_info:write_requirements"
+"entry_points.txt" = "setuptools.command.egg_info:write_entries"
+"eager_resources.txt" = "setuptools.command.egg_info:overwrite_arg"
+"namespace_packages.txt" = "setuptools.command.egg_info:overwrite_arg"
+"top_level.txt" = "setuptools.command.egg_info:write_toplevel_names"
+"dependency_links.txt" = "setuptools.command.egg_info:overwrite_arg"
+
+[tool.setuptools]
+# disabled as it causes tests to be included #2505
+# include_package_data = true
+include-package-data = false
+
+[tool.setuptools.packages.find]
+exclude = [
+	"*.tests",
+	"*.tests.*",
+	"tools*",
+	"debian*",
+	"launcher*",
+	"newsfragments*",
+	"docs",
+	"docs.*",
+]
+namespaces = true
+
+[tool.distutils.egg_info]
+tag-build = ".post"
+tag-date = 1
+
+[tool.distutils.sdist]
+formats = "zip"
+
 [tool.setuptools_scm]
diff --git a/setup.cfg b/setup.cfg
deleted file mode 100644
index 214964fa98..0000000000
--- a/setup.cfg
+++ /dev/null
@@ -1,163 +0,0 @@
-[metadata]
-name = setuptools
-version = 69.5.1
-author = Python Packaging Authority
-author_email = distutils-sig@python.org
-description = Easily download, build, install, upgrade, and uninstall Python packages
-long_description = file:README.rst
-url = https://github.com/pypa/setuptools
-classifiers =
-	Development Status :: 5 - Production/Stable
-	Intended Audience :: Developers
-	License :: OSI Approved :: MIT License
-	Programming Language :: Python :: 3
-	Programming Language :: Python :: 3 :: Only
-	Topic :: Software Development :: Libraries :: Python Modules
-	Topic :: System :: Archiving :: Packaging
-	Topic :: System :: Systems Administration
-	Topic :: Utilities
-keywords = CPAN PyPI distutils eggs package management
-project_urls =
-	Documentation = https://setuptools.pypa.io/
-	Changelog = https://setuptools.pypa.io/en/stable/history.html
-
-[options]
-packages = find_namespace:
-# disabled as it causes tests to be included #2505
-# include_package_data = true
-python_requires = >=3.8
-install_requires =
-
-[options.packages.find]
-exclude =
-	*.tests
-	*.tests.*
-	tools*
-	debian*
-	launcher*
-	newsfragments*
-	docs
-	docs.*
-
-[options.extras_require]
-testing =
-	# upstream
-	pytest >= 6, != 8.1.1
-	pytest-checkdocs >= 2.4
-	pytest-cov; \
-		# coverage seems to make PyPy extremely slow
-		python_implementation != "PyPy"
-	pytest-mypy
-	pytest-enabler >= 2.2
-	# workaround for pypa/setuptools#3921
-	pytest-ruff >= 0.2.1; sys_platform != "cygwin"
-
-	# local
-	virtualenv>=13.0.0
-	wheel
-	pip>=19.1 # For proper file:// URLs support.
-	packaging>=23.2
-	jaraco.envs>=2.2
-	pytest-xdist>=3 # Dropped dependency on pytest-fork and py
-	jaraco.path>=3.2.0
-	build[virtualenv]>=1.0.3
-	filelock>=3.4.0
-	ini2toml[lite]>=0.9
-	tomli-w>=1.0.0
-	pytest-timeout
-	pytest-perf; \
-		# workaround for jaraco/inflect#195, pydantic/pydantic-core#773 (see #3986)
-		sys_platform != "cygwin"
-	# for tools/finalize.py
-	jaraco.develop >= 7.21; python_version >= "3.9" and sys_platform != "cygwin"
-	pytest-home >= 0.5
-	mypy==1.9  # pin mypy version so a new version doesn't suddenly cause the CI to fail
-	# No Python 3.11 dependencies require tomli, but needed for type-checking since we import it directly
-	tomli
-	# No Python 3.12 dependencies require importlib_metadata, but needed for type-checking since we import it directly
-	importlib_metadata
-
-docs =
-	# upstream
-	sphinx >= 3.5
-	jaraco.packaging >= 9.3
-	rst.linker >= 1.9
-	furo
-	sphinx-lint
-
-	# tidelift
-	jaraco.tidelift >= 1.4
-
-	# local
-	pygments-github-lexers==0.0.5
-	sphinx-favicon
-	sphinx-inline-tabs
-	sphinx-reredirects
-	sphinxcontrib-towncrier
-	sphinx-notfound-page >=1,<2
-
-ssl =
-
-certs =
-
-[options.entry_points]
-distutils.commands =
-	alias = setuptools.command.alias:alias
-	bdist_egg = setuptools.command.bdist_egg:bdist_egg
-	bdist_rpm = setuptools.command.bdist_rpm:bdist_rpm
-	build = setuptools.command.build:build
-	build_clib = setuptools.command.build_clib:build_clib
-	build_ext = setuptools.command.build_ext:build_ext
-	build_py = setuptools.command.build_py:build_py
-	develop = setuptools.command.develop:develop
-	dist_info = setuptools.command.dist_info:dist_info
-	easy_install = setuptools.command.easy_install:easy_install
-	editable_wheel = setuptools.command.editable_wheel:editable_wheel
-	egg_info = setuptools.command.egg_info:egg_info
-	install = setuptools.command.install:install
-	install_egg_info = setuptools.command.install_egg_info:install_egg_info
-	install_lib = setuptools.command.install_lib:install_lib
-	install_scripts = setuptools.command.install_scripts:install_scripts
-	rotate = setuptools.command.rotate:rotate
-	saveopts = setuptools.command.saveopts:saveopts
-	sdist = setuptools.command.sdist:sdist
-	setopt = setuptools.command.setopt:setopt
-	test = setuptools.command.test:test
-	upload_docs = setuptools.command.upload_docs:upload_docs
-setuptools.finalize_distribution_options =
-	parent_finalize = setuptools.dist:_Distribution.finalize_options
-	keywords = setuptools.dist:Distribution._finalize_setup_keywords
-distutils.setup_keywords =
-	eager_resources = setuptools.dist:assert_string_list
-	namespace_packages = setuptools.dist:check_nsp
-	extras_require = setuptools.dist:check_extras
-	install_requires = setuptools.dist:check_requirements
-	tests_require = setuptools.dist:check_requirements
-	setup_requires = setuptools.dist:check_requirements
-	python_requires = setuptools.dist:check_specifier
-	entry_points = setuptools.dist:check_entry_points
-	test_suite = setuptools.dist:check_test_suite
-	zip_safe = setuptools.dist:assert_bool
-	package_data = setuptools.dist:check_package_data
-	exclude_package_data = setuptools.dist:check_package_data
-	include_package_data = setuptools.dist:assert_bool
-	packages = setuptools.dist:check_packages
-	dependency_links = setuptools.dist:assert_string_list
-	test_loader = setuptools.dist:check_importable
-	test_runner = setuptools.dist:check_importable
-	use_2to3 = setuptools.dist:invalid_unless_false
-egg_info.writers =
-	PKG-INFO = setuptools.command.egg_info:write_pkg_info
-	requires.txt = setuptools.command.egg_info:write_requirements
-	entry_points.txt = setuptools.command.egg_info:write_entries
-	eager_resources.txt = setuptools.command.egg_info:overwrite_arg
-	namespace_packages.txt = setuptools.command.egg_info:overwrite_arg
-	top_level.txt = setuptools.command.egg_info:write_toplevel_names
-	dependency_links.txt = setuptools.command.egg_info:overwrite_arg
-
-[egg_info]
-tag_build = .post
-tag_date = 1
-
-[sdist]
-formats = zip

From 42d1b3ceaaeeaa01eef8ad1e3a883a5324c6275b Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 19 Apr 2024 06:33:06 -0400
Subject: [PATCH 0540/1761] Mark tests as xfail.

All are marked as xfail even though only two are failing. As far as I know, there's no easy way to mark some of the parameterized tests as xfail without splitting and selecting, so just get the whole bunch.

Ref #4315
---
 setuptools/tests/config/test_apply_pyprojecttoml.py | 1 +
 1 file changed, 1 insertion(+)

diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py
index 2ca35759bc..27e57b27c7 100644
--- a/setuptools/tests/config/test_apply_pyprojecttoml.py
+++ b/setuptools/tests/config/test_apply_pyprojecttoml.py
@@ -37,6 +37,7 @@ def makedist(path, **attrs):
     return Distribution({"src_root": path, **attrs})
 
 
+@pytest.mark.xfail(reason="#4315")
 @pytest.mark.parametrize("url", urls_from_file(HERE / EXAMPLES_FILE))
 @pytest.mark.filterwarnings("ignore")
 @pytest.mark.uses_network

From 12ab7d85b74ce299d983e29cb4caff68818731dd Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Sat, 20 Apr 2024 20:20:03 +0100
Subject: [PATCH 0541/1761] Make test_apply_pyprojecttoml more deterministic
 with new version of ini2toml

---
 setup.cfg                                           | 2 +-
 setuptools/tests/config/test_apply_pyprojecttoml.py | 4 ++--
 2 files changed, 3 insertions(+), 3 deletions(-)

diff --git a/setup.cfg b/setup.cfg
index 214964fa98..c8bb0ed41d 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -62,7 +62,7 @@ testing =
 	jaraco.path>=3.2.0
 	build[virtualenv]>=1.0.3
 	filelock>=3.4.0
-	ini2toml[lite]>=0.9
+	ini2toml[lite]>=0.14
 	tomli-w>=1.0.0
 	pytest-timeout
 	pytest-perf; \
diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py
index 27e57b27c7..1b9fd6b683 100644
--- a/setuptools/tests/config/test_apply_pyprojecttoml.py
+++ b/setuptools/tests/config/test_apply_pyprojecttoml.py
@@ -14,7 +14,7 @@
 from zipfile import ZipFile
 
 import pytest
-from ini2toml.api import Translator
+from ini2toml.api import LiteTranslator
 
 from packaging.metadata import Metadata
 
@@ -46,7 +46,7 @@ def test_apply_pyproject_equivalent_to_setupcfg(url, monkeypatch, tmp_path):
     setupcfg_example = retrieve_file(url)
     pyproject_example = Path(tmp_path, "pyproject.toml")
     setupcfg_text = setupcfg_example.read_text(encoding="utf-8")
-    toml_config = Translator().translate(setupcfg_text, "setup.cfg")
+    toml_config = LiteTranslator().translate(setupcfg_text, "setup.cfg")
     pyproject_example.write_text(toml_config, encoding="utf-8")
 
     dist_toml = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject_example)

From fb6dc4de0ca47dc4356dc25ac63ea187f2ad8f5a Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Sat, 20 Apr 2024 20:26:59 +0100
Subject: [PATCH 0542/1761] Remove solved xfail in
 test_apply_pyproject_equivalent_to_setupcfg

---
 setuptools/tests/config/test_apply_pyprojecttoml.py | 1 -
 1 file changed, 1 deletion(-)

diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py
index 1b9fd6b683..bb78f64310 100644
--- a/setuptools/tests/config/test_apply_pyprojecttoml.py
+++ b/setuptools/tests/config/test_apply_pyprojecttoml.py
@@ -37,7 +37,6 @@ def makedist(path, **attrs):
     return Distribution({"src_root": path, **attrs})
 
 
-@pytest.mark.xfail(reason="#4315")
 @pytest.mark.parametrize("url", urls_from_file(HERE / EXAMPLES_FILE))
 @pytest.mark.filterwarnings("ignore")
 @pytest.mark.uses_network

From 8837459281c144cd5bbec7a43aaf5e0a727c0efa Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 16 Apr 2024 19:52:26 +0100
Subject: [PATCH 0543/1761] Add sanity check for 'build/lib/build/lib' when
 creating distribution fixtures

---
 setuptools/tests/fixtures.py | 18 ++++++++++++++++++
 1 file changed, 18 insertions(+)

diff --git a/setuptools/tests/fixtures.py b/setuptools/tests/fixtures.py
index 629daf93d4..1b8f520e84 100644
--- a/setuptools/tests/fixtures.py
+++ b/setuptools/tests/fixtures.py
@@ -77,6 +77,10 @@ def setuptools_sdist(tmp_path_factory, request):
         if dist:
             return dist
 
+        # Sanity check
+        # Building should not create recursive `setuptools/build/lib/build/lib/...`
+        assert not Path(request.config.rootdir, "build/lib/build").exists()
+
         subprocess.check_output([
             sys.executable,
             "-m",
@@ -86,6 +90,11 @@ def setuptools_sdist(tmp_path_factory, request):
             str(tmp),
             str(request.config.rootdir),
         ])
+
+        # Sanity check
+        # Building should not create recursive `setuptools/build/lib/build/lib/...`
+        assert not Path(request.config.rootdir, "build/lib/build").exists()
+
         return next(tmp.glob("*.tar.gz"))
 
 
@@ -102,6 +111,10 @@ def setuptools_wheel(tmp_path_factory, request):
         if dist:
             return dist
 
+        # Sanity check
+        # Building should not create recursive `setuptools/build/lib/build/lib/...`
+        assert not Path(request.config.rootdir, "build/lib/build").exists()
+
         subprocess.check_output([
             sys.executable,
             "-m",
@@ -111,6 +124,11 @@ def setuptools_wheel(tmp_path_factory, request):
             str(tmp),
             str(request.config.rootdir),
         ])
+
+        # Sanity check
+        # Building should not create recursive `setuptools/build/lib/build/lib/...`
+        assert not Path(request.config.rootdir, "build/lib/build").exists()
+
         return next(tmp.glob("*.whl"))
 
 

From f1ecea0486c8ea6fcf388b48ef0aae08e5fd8feb Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 16 Apr 2024 20:04:04 +0100
Subject: [PATCH 0544/1761] Avoid setuptools_wheel fixture to create
 recursively nested build/lib/build/lib/... directories

Based on the test introduced in b4d3e83f0, we can see that when none of
`PRE_BUILT_SETUPTOOLS_SDIST` or `PRE_BUILT_SETUPTOOLS_WHEEL` is set,
the `setuptools_wheel` fixture keeps recursively creating
`build/lib/build/lib/...` directories which slows down the tests and
creates a huge amount of unnecessary files.

This change tries to target that.
---
 setuptools/tests/fixtures.py | 63 +++++++++++++-----------------------
 1 file changed, 23 insertions(+), 40 deletions(-)

diff --git a/setuptools/tests/fixtures.py b/setuptools/tests/fixtures.py
index 1b8f520e84..a2870f11e1 100644
--- a/setuptools/tests/fixtures.py
+++ b/setuptools/tests/fixtures.py
@@ -63,73 +63,56 @@ def sample_project(tmp_path):
 # sdist and wheel artifacts should be stable across a round of tests
 # so we can build them once per session and use the files as "readonly"
 
+# In the case of setuptools, building the wheel without sdist may cause
+# it to contain the `build` directory, and therefore create situations with
+# `setuptools/build/lib/build/lib/...`. To avoid that, build both artifacts at once.
 
-@pytest.fixture(scope="session")
-def setuptools_sdist(tmp_path_factory, request):
-    prebuilt = os.getenv("PRE_BUILT_SETUPTOOLS_SDIST")
-    if prebuilt and os.path.exists(prebuilt):  # pragma: no cover
-        return Path(prebuilt).resolve()
 
+def _build_distributions(tmp_path_factory, request):
     with contexts.session_locked_tmp_dir(
-        request, tmp_path_factory, "sdist_build"
+        request, tmp_path_factory, "dist_build"
     ) as tmp:  # pragma: no cover
-        dist = next(tmp.glob("*.tar.gz"), None)
-        if dist:
-            return dist
+        sdist = next(tmp.glob("*.tar.gz"), None)
+        wheel = next(tmp.glob("*.whl"), None)
+        if sdist and wheel:
+            return (sdist, wheel)
 
-        # Sanity check
-        # Building should not create recursive `setuptools/build/lib/build/lib/...`
+        # Sanity check: should not create recursive setuptools/build/lib/build/lib/...
         assert not Path(request.config.rootdir, "build/lib/build").exists()
 
         subprocess.check_output([
             sys.executable,
             "-m",
             "build",
-            "--sdist",
             "--outdir",
             str(tmp),
             str(request.config.rootdir),
         ])
 
-        # Sanity check
-        # Building should not create recursive `setuptools/build/lib/build/lib/...`
+        # Sanity check: should not create recursive setuptools/build/lib/build/lib/...
         assert not Path(request.config.rootdir, "build/lib/build").exists()
 
-        return next(tmp.glob("*.tar.gz"))
+        return next(tmp.glob("*.tar.gz")), next(tmp.glob("*.whl"))
 
 
 @pytest.fixture(scope="session")
-def setuptools_wheel(tmp_path_factory, request):
-    prebuilt = os.getenv("PRE_BUILT_SETUPTOOLS_WHEEL")
+def setuptools_sdist(tmp_path_factory, request):
+    prebuilt = os.getenv("PRE_BUILT_SETUPTOOLS_SDIST")
     if prebuilt and os.path.exists(prebuilt):  # pragma: no cover
         return Path(prebuilt).resolve()
 
-    with contexts.session_locked_tmp_dir(
-        request, tmp_path_factory, "wheel_build"
-    ) as tmp:  # pragma: no cover
-        dist = next(tmp.glob("*.whl"), None)
-        if dist:
-            return dist
+    sdist, _ = _build_distributions(tmp_path_factory, request)
+    return sdist
 
-        # Sanity check
-        # Building should not create recursive `setuptools/build/lib/build/lib/...`
-        assert not Path(request.config.rootdir, "build/lib/build").exists()
 
-        subprocess.check_output([
-            sys.executable,
-            "-m",
-            "build",
-            "--wheel",
-            "--outdir",
-            str(tmp),
-            str(request.config.rootdir),
-        ])
-
-        # Sanity check
-        # Building should not create recursive `setuptools/build/lib/build/lib/...`
-        assert not Path(request.config.rootdir, "build/lib/build").exists()
+@pytest.fixture(scope="session")
+def setuptools_wheel(tmp_path_factory, request):
+    prebuilt = os.getenv("PRE_BUILT_SETUPTOOLS_WHEEL")
+    if prebuilt and os.path.exists(prebuilt):  # pragma: no cover
+        return Path(prebuilt).resolve()
 
-        return next(tmp.glob("*.whl"))
+    _, wheel = _build_distributions(tmp_path_factory, request)
+    return wheel
 
 
 @pytest.fixture

From 14ce35000023749b41015c7b2884080be3803768 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 16 Apr 2024 20:17:06 +0100
Subject: [PATCH 0545/1761] Add news fragment

---
 newsfragments/4308.misc.rst | 2 ++
 1 file changed, 2 insertions(+)
 create mode 100644 newsfragments/4308.misc.rst

diff --git a/newsfragments/4308.misc.rst b/newsfragments/4308.misc.rst
new file mode 100644
index 0000000000..6c43f6338e
--- /dev/null
+++ b/newsfragments/4308.misc.rst
@@ -0,0 +1,2 @@
+Fix ``setuptools_wheel`` fixture and avoid the recursive creation of
+``build/lib/build/lib/build/...`` directories in the project root during tests.

From 153d75eaa3f2009d0a2e5d47a729428c24fc8913 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 16 Apr 2024 19:25:12 +0100
Subject: [PATCH 0546/1761] Refactor _TopLevelFinder so it is easier to test

---
 setuptools/command/editable_wheel.py | 17 +++++++++++++----
 1 file changed, 13 insertions(+), 4 deletions(-)

diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py
index 4d21e2253f..1167346069 100644
--- a/setuptools/command/editable_wheel.py
+++ b/setuptools/command/editable_wheel.py
@@ -505,7 +505,7 @@ def __init__(self, dist: Distribution, name: str):
         self.dist = dist
         self.name = name
 
-    def __call__(self, wheel: "WheelFile", files: List[str], mapping: Dict[str, str]):
+    def template_vars(self) -> Tuple[str, str, Dict[str, str], Dict[str, List[str]]]:
         src_root = self.dist.src_root or os.curdir
         top_level = chain(_find_packages(self.dist), _find_top_level_modules(self.dist))
         package_dir = self.dist.package_dir or {}
@@ -519,7 +519,7 @@ def __call__(self, wheel: "WheelFile", files: List[str], mapping: Dict[str, str]
         )
 
         legacy_namespaces = {
-            pkg: find_package_path(pkg, roots, self.dist.src_root or "")
+            cast(str, pkg): find_package_path(pkg, roots, self.dist.src_root or "")
             for pkg in self.dist.namespace_packages or []
         }
 
@@ -530,11 +530,20 @@ def __call__(self, wheel: "WheelFile", files: List[str], mapping: Dict[str, str]
 
         name = f"__editable__.{self.name}.finder"
         finder = _normalization.safe_identifier(name)
+        return finder, name, mapping, namespaces_
+
+    def get_implementation(self) -> Iterator[Tuple[str, bytes]]:
+        finder, name, mapping, namespaces_ = self.template_vars()
+
         content = bytes(_finder_template(name, mapping, namespaces_), "utf-8")
-        wheel.writestr(f"{finder}.py", content)
+        yield (f"{finder}.py", content)
 
         content = _encode_pth(f"import {finder}; {finder}.install()")
-        wheel.writestr(f"__editable__.{self.name}.pth", content)
+        yield (f"__editable__.{self.name}.pth", content)
+
+    def __call__(self, wheel: "WheelFile", files: List[str], mapping: Dict[str, str]):
+        for file, content in self.get_implementation():
+            wheel.writestr(file, content)
 
     def __enter__(self):
         msg = "Editable install will be performed using a meta path finder.\n"

From a39d3623f2d9088a6914cfa833838ceac182af57 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 16 Apr 2024 19:25:48 +0100
Subject: [PATCH 0547/1761] Add test for issue 4248

---
 setuptools/tests/test_editable_install.py | 44 +++++++++++++++++++++++
 1 file changed, 44 insertions(+)

diff --git a/setuptools/tests/test_editable_install.py b/setuptools/tests/test_editable_install.py
index 1df09fd256..5da4fccefa 100644
--- a/setuptools/tests/test_editable_install.py
+++ b/setuptools/tests/test_editable_install.py
@@ -24,6 +24,7 @@
 from setuptools.command.editable_wheel import (
     _DebuggingTips,
     _LinkTree,
+    _TopLevelFinder,
     _encode_pth,
     _find_virtual_namespaces,
     _find_namespaces,
@@ -530,6 +531,49 @@ def test_combine_namespaces(self, tmp_path):
             assert pkgA.a == 13
             assert mod2.b == 37
 
+    def test_combine_namespaces_nested(self, tmp_path):
+        """
+        Users may attempt to combine namespace packages in a nested way via
+        ``package_dir`` as shown in pypa/setuptools#4248.
+        """
+
+        files = {
+            "src": {"my_package": {"my_module.py": "a = 13"}},
+            "src2": {"my_package2": {"my_module2.py": "b = 37"}},
+        }
+
+        stack = jaraco.path.DirectoryStack()
+        with stack.context(tmp_path):
+            jaraco.path.build(files)
+            attrs = {
+                "script_name": "%PEP 517%",
+                "package_dir": {
+                    "different_name": "src/my_package",
+                    "different_name.subpkg": "src2/my_package2",
+                },
+                "packages": ["different_name", "different_name.subpkg"],
+            }
+            dist = Distribution(attrs)
+            finder = _TopLevelFinder(dist, str(uuid4()))
+            code = next(v for k, v in finder.get_implementation() if k.endswith(".py"))
+
+        with contexts.save_paths(), contexts.save_sys_modules():
+            for mod in attrs["packages"]:
+                sys.modules.pop(mod, None)
+
+            self.install_finder(code)
+            mod1 = import_module("different_name.my_module")
+            mod2 = import_module("different_name.subpkg.my_module2")
+
+            expected = str((tmp_path / "src/my_package/my_module.py").resolve())
+            assert str(Path(mod1.__file__).resolve()) == expected
+
+            expected = str((tmp_path / "src2/my_package2/my_module2.py").resolve())
+            assert str(Path(mod2.__file__).resolve()) == expected
+
+            assert mod1.a == 13
+            assert mod2.b == 37
+
     def test_dynamic_path_computation(self, tmp_path):
         # Follows the example in PEP 420
         files = {

From 19b63d1b81cd0544e2718f82c482efdc99742ef8 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 7 Mar 2024 02:46:09 +0000
Subject: [PATCH 0548/1761] Fix PathEntryFinder / MetaPathFinder in
 editable_wheel

It seems that the import machinery skips PathEntryFinder when trying to
locate nested namespaces, if the `sys.path_hook` item corresponding to
the finder cannot be located in the submodule locations of the parent
namespace.

This means that we should probably always add the PATH_PLACEHOLDER to
the namespace spec.

This PR also add some type hints to the template, because it helped to
debug some type errors.
---
 setuptools/command/editable_wheel.py | 32 +++++++++++++++++-----------
 1 file changed, 19 insertions(+), 13 deletions(-)

diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py
index 1167346069..1722817f82 100644
--- a/setuptools/command/editable_wheel.py
+++ b/setuptools/command/editable_wheel.py
@@ -793,6 +793,7 @@ def _get_root(self):
 
 
 _FINDER_TEMPLATE = """\
+from __future__ import annotations
 import sys
 from importlib.machinery import ModuleSpec, PathFinder
 from importlib.machinery import all_suffixes as module_suffixes
@@ -800,16 +801,14 @@ def _get_root(self):
 from itertools import chain
 from pathlib import Path
 
-MAPPING = {mapping!r}
-NAMESPACES = {namespaces!r}
+MAPPING: dict[str, str] = {mapping!r}
+NAMESPACES: dict[str, list[str]] = {namespaces!r}
 PATH_PLACEHOLDER = {name!r} + ".__path_hook__"
 
 
 class _EditableFinder:  # MetaPathFinder
     @classmethod
-    def find_spec(cls, fullname, path=None, target=None):
-        extra_path = []
-
+    def find_spec(cls, fullname: str, _path=None, _target=None) -> ModuleSpec | None:
         # Top-level packages and modules (we know these exist in the FS)
         if fullname in MAPPING:
             pkg_path = MAPPING[fullname]
@@ -820,35 +819,42 @@ def find_spec(cls, fullname, path=None, target=None):
         # to the importlib.machinery implementation.
         parent, _, child = fullname.rpartition(".")
         if parent and parent in MAPPING:
-            return PathFinder.find_spec(fullname, path=[MAPPING[parent], *extra_path])
+            return PathFinder.find_spec(fullname, path=[MAPPING[parent]])
 
         # Other levels of nesting should be handled automatically by importlib
         # using the parent path.
         return None
 
     @classmethod
-    def _find_spec(cls, fullname, candidate_path):
+    def _find_spec(cls, fullname: str, candidate_path: Path) -> ModuleSpec | None:
         init = candidate_path / "__init__.py"
         candidates = (candidate_path.with_suffix(x) for x in module_suffixes())
         for candidate in chain([init], candidates):
             if candidate.exists():
                 return spec_from_file_location(fullname, candidate)
+        return None
 
 
 class _EditableNamespaceFinder:  # PathEntryFinder
     @classmethod
-    def _path_hook(cls, path):
+    def _path_hook(cls, path) -> type[_EditableNamespaceFinder]:
         if path == PATH_PLACEHOLDER:
             return cls
         raise ImportError
 
     @classmethod
-    def _paths(cls, fullname):
-        # Ensure __path__ is not empty for the spec to be considered a namespace.
-        return NAMESPACES[fullname] or MAPPING.get(fullname) or [PATH_PLACEHOLDER]
+    def _paths(cls, fullname: str) -> list[str]:
+        paths = NAMESPACES[fullname]
+        if not paths and fullname in MAPPING:
+            paths = [MAPPING[fullname]]
+        # Always add placeholder, for 2 reasons:
+        # 1. __path__ cannot be empty for the spec to be considered namespace.
+        # 2. In the case of nested namespaces, we need to force
+        #    import machinery to query _EditableNamespaceFinder again.
+        return [*paths, PATH_PLACEHOLDER]
 
     @classmethod
-    def find_spec(cls, fullname, target=None):
+    def find_spec(cls, fullname: str, _target=None) -> ModuleSpec | None:
         if fullname in NAMESPACES:
             spec = ModuleSpec(fullname, None, is_package=True)
             spec.submodule_search_locations = cls._paths(fullname)
@@ -856,7 +862,7 @@ def find_spec(cls, fullname, target=None):
         return None
 
     @classmethod
-    def find_module(cls, fullname):
+    def find_module(cls, _fullname) -> None:
         return None
 
 

From 2b7ea603325bf8f2edbc7f12cc6b8cb2a7bd41e7 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 16 Apr 2024 19:35:57 +0100
Subject: [PATCH 0549/1761] Add news fragment

---
 newsfragments/4278.bugfix.rst | 2 ++
 1 file changed, 2 insertions(+)
 create mode 100644 newsfragments/4278.bugfix.rst

diff --git a/newsfragments/4278.bugfix.rst b/newsfragments/4278.bugfix.rst
new file mode 100644
index 0000000000..5e606cced8
--- /dev/null
+++ b/newsfragments/4278.bugfix.rst
@@ -0,0 +1,2 @@
+Fix finder template for lenient editable installs of implicit nested namespaces
+constructed by using ``package_dir`` to reorganise directory structure.

From 3d539f30913fedb6bf1e3bb9f4ef52de09a9eb09 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 17 Apr 2024 10:16:27 +0100
Subject: [PATCH 0550/1761] Convert safe txt files to UTF-8

---
 setuptools/command/bdist_egg.py | 5 ++---
 setuptools/dist.py              | 2 +-
 2 files changed, 3 insertions(+), 4 deletions(-)

diff --git a/setuptools/command/bdist_egg.py b/setuptools/command/bdist_egg.py
index 3687efdf9c..b2897bfbb4 100644
--- a/setuptools/command/bdist_egg.py
+++ b/setuptools/command/bdist_egg.py
@@ -350,9 +350,8 @@ def write_safety_flag(egg_dir, safe):
             if safe is None or bool(safe) != flag:
                 os.unlink(fn)
         elif safe is not None and bool(safe) == flag:
-            f = open(fn, 'wt')
-            f.write('\n')
-            f.close()
+            with open(fn, 'wt', encoding="utf-8") as f:
+                f.write('\n')
 
 
 safety_flags = {
diff --git a/setuptools/dist.py b/setuptools/dist.py
index 6350e38100..076f9a2327 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -685,7 +685,7 @@ def get_egg_cache_dir(self):
             os.mkdir(egg_cache_dir)
             windows_support.hide_file(egg_cache_dir)
             readme_txt_filename = os.path.join(egg_cache_dir, 'README.txt')
-            with open(readme_txt_filename, 'w') as f:
+            with open(readme_txt_filename, 'w', encoding="utf-8") as f:
                 f.write(
                     'This directory contains eggs that were downloaded '
                     'by setuptools to build, test, and run plug-ins.\n\n'

From d6651219d5284b2703b9849fe5fe2fcb47a54230 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 17 Apr 2024 10:19:05 +0100
Subject: [PATCH 0551/1761] Try to read some files as UTF-8 before attempting
 locale

---
 setuptools/command/setopt.py | 11 +++++++++--
 setuptools/package_index.py  | 16 +++++++++++++---
 setuptools/wheel.py          | 11 +++++++++--
 3 files changed, 31 insertions(+), 7 deletions(-)

diff --git a/setuptools/command/setopt.py b/setuptools/command/setopt.py
index f9a6075128..aa800492f7 100644
--- a/setuptools/command/setopt.py
+++ b/setuptools/command/setopt.py
@@ -5,7 +5,8 @@
 import os
 import configparser
 
-from setuptools import Command
+from .. import Command
+from ..compat import py39
 
 __all__ = ['config_file', 'edit_config', 'option_base', 'setopt']
 
@@ -36,7 +37,13 @@ def edit_config(filename, settings, dry_run=False):
     log.debug("Reading configuration from %s", filename)
     opts = configparser.RawConfigParser()
     opts.optionxform = lambda x: x
-    opts.read([filename])
+
+    try:
+        opts.read([filename], encoding="utf-8")
+    except UnicodeDecodeError:  # pragma: no cover
+        opts.clear()
+        opts.read([filename], encoding=py39.LOCALE_ENCODING)
+
     for section, options in settings.items():
         if options is None:
             log.info("Deleting section [%s] from %s", section, filename)
diff --git a/setuptools/package_index.py b/setuptools/package_index.py
index 271aa97f71..f835bdcf14 100644
--- a/setuptools/package_index.py
+++ b/setuptools/package_index.py
@@ -40,6 +40,8 @@
 from setuptools.wheel import Wheel
 from setuptools.extern.more_itertools import unique_everseen
 
+from .compat import py39
+
 
 EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.+!]+)$')
 HREF = re.compile(r"""href\s*=\s*['"]?([^'"> ]+)""", re.I)
@@ -1011,7 +1013,11 @@ def __init__(self):
 
         rc = os.path.join(os.path.expanduser('~'), '.pypirc')
         if os.path.exists(rc):
-            self.read(rc)
+            try:
+                self.read(rc, encoding="utf-8")
+            except UnicodeDecodeError:  # pragma: no cover
+                self.clean()
+                self.read(rc, encoding=py39.LOCALE_ENCODING)
 
     @property
     def creds_by_repository(self):
@@ -1114,8 +1120,12 @@ def local_open(url):
         for f in os.listdir(filename):
             filepath = os.path.join(filename, f)
             if f == 'index.html':
-                with open(filepath, 'r') as fp:
-                    body = fp.read()
+                try:
+                    with open(filepath, 'r', encoding="utf-8") as fp:
+                        body = fp.read()
+                except UnicodeDecodeError:  # pragma: no cover
+                    with open(filepath, 'r', encoding=py39.LOCALE_ENCODING) as fp:
+                        body = fp.read()
                 break
             elif os.path.isdir(filepath):
                 f += '/'
diff --git a/setuptools/wheel.py b/setuptools/wheel.py
index 9861b5cf1c..4cf3f4ca0d 100644
--- a/setuptools/wheel.py
+++ b/setuptools/wheel.py
@@ -18,6 +18,8 @@
 from setuptools.command.egg_info import write_requirements, _egg_basename
 from setuptools.archive_util import _unpack_zipfile_obj
 
+from .compat import py39
+
 
 WHEEL_NAME = re.compile(
     r"""^(?P.+?)-(?P\d.*?)
@@ -222,8 +224,13 @@ def _move_data_entries(destination_eggdir, dist_data):
     def _fix_namespace_packages(egg_info, destination_eggdir):
         namespace_packages = os.path.join(egg_info, 'namespace_packages.txt')
         if os.path.exists(namespace_packages):
-            with open(namespace_packages) as fp:
-                namespace_packages = fp.read().split()
+            try:
+                with open(namespace_packages, encoding="utf-8") as fp:
+                    namespace_packages = fp.read().split()
+            except UnicodeDecodeError:  # pragma: no cover
+                with open(namespace_packages, encoding=py39.LOCALE_ENCODING) as fp:
+                    namespace_packages = fp.read().split()
+
             for mod in namespace_packages:
                 mod_dir = os.path.join(destination_eggdir, *mod.split('.'))
                 mod_init = os.path.join(mod_dir, '__init__.py')

From e8e59831978216b60712c1411d9f6bd6eabebe23 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 17 Apr 2024 10:19:59 +0100
Subject: [PATCH 0552/1761] Attempt to use utf-8 with Popen

---
 setuptools/tests/environment.py | 1 +
 1 file changed, 1 insertion(+)

diff --git a/setuptools/tests/environment.py b/setuptools/tests/environment.py
index df2bd37ff6..6173936f7b 100644
--- a/setuptools/tests/environment.py
+++ b/setuptools/tests/environment.py
@@ -76,6 +76,7 @@ def run_setup_py(cmd, pypath=None, path=None, data_stream=0, env=None):
             stderr=_PIPE,
             shell=shell,
             env=env,
+            encoding="utf-8",
         )
 
         if isinstance(data_stream, tuple):

From 13fdfaa2c9aa8a9decd8ec6624e536e2ed9cc7c6 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 17 Apr 2024 10:21:18 +0100
Subject: [PATCH 0553/1761] Use utf-8 for NAMESPACE_PACKAGE_INIT

This change should be relatively safe
---
 setuptools/wheel.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setuptools/wheel.py b/setuptools/wheel.py
index 4cf3f4ca0d..19f4157423 100644
--- a/setuptools/wheel.py
+++ b/setuptools/wheel.py
@@ -237,5 +237,5 @@ def _fix_namespace_packages(egg_info, destination_eggdir):
                 if not os.path.exists(mod_dir):
                     os.mkdir(mod_dir)
                 if not os.path.exists(mod_init):
-                    with open(mod_init, 'w') as fp:
+                    with open(mod_init, 'w', encoding="utf-8") as fp:
                         fp.write(NAMESPACE_PACKAGE_INIT)

From 851e972aae6efa2cf6ead909db7653bce4368c5f Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 17 Apr 2024 10:22:08 +0100
Subject: [PATCH 0554/1761] Attempt to use UTF-8 when rewriting 'setup.cfg'

---
 setuptools/command/setopt.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setuptools/command/setopt.py b/setuptools/command/setopt.py
index aa800492f7..89b1ac7307 100644
--- a/setuptools/command/setopt.py
+++ b/setuptools/command/setopt.py
@@ -69,7 +69,7 @@ def edit_config(filename, settings, dry_run=False):
 
     log.info("Writing %s", filename)
     if not dry_run:
-        with open(filename, 'w') as f:
+        with open(filename, 'w', encoding="utf-8") as f:
             opts.write(f)
 
 

From 20c0f82778a82d57c061f57d56c99192294e0acf Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 17 Apr 2024 10:24:42 +0100
Subject: [PATCH 0555/1761] Attempt to use UTF-8 when writing str scripts with
 easy_install/install_scripts

---
 setuptools/command/easy_install.py    |  8 +++++++-
 setuptools/command/install_scripts.py | 10 +++++++---
 2 files changed, 14 insertions(+), 4 deletions(-)

diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py
index 87a68c292a..3ad984f212 100644
--- a/setuptools/command/easy_install.py
+++ b/setuptools/command/easy_install.py
@@ -873,7 +873,13 @@ def write_script(self, script_name, contents, mode="t", blockers=()):
         ensure_directory(target)
         if os.path.exists(target):
             os.unlink(target)
-        with open(target, "w" + mode) as f:  # TODO: is it safe to use utf-8?
+
+        if "b" not in mode and isinstance(contents, str):
+            kw = {"encoding": "utf-8"}
+        else:
+            kw = {}
+
+        with open(target, "w" + mode, **kw) as f:
             f.write(contents)
         chmod(target, 0o777 - mask)
 
diff --git a/setuptools/command/install_scripts.py b/setuptools/command/install_scripts.py
index 72b2e45cbc..758937b614 100644
--- a/setuptools/command/install_scripts.py
+++ b/setuptools/command/install_scripts.py
@@ -57,10 +57,14 @@ def write_script(self, script_name, contents, mode="t", *ignored):
         target = os.path.join(self.install_dir, script_name)
         self.outfiles.append(target)
 
+        if "b" not in mode and isinstance(contents, str):
+            kw = {"encoding": "utf-8"}
+        else:
+            kw = {}
+
         mask = current_umask()
         if not self.dry_run:
             ensure_directory(target)
-            f = open(target, "w" + mode)
-            f.write(contents)
-            f.close()
+            with open(target, "w" + mode, **kw) as f:
+                f.write(contents)
             chmod(target, 0o777 - mask)

From e44a63cb16a19d31d4d0c080efb71aac63ff0948 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 17 Apr 2024 10:28:43 +0100
Subject: [PATCH 0556/1761] Prevent missing UTF-8 warnings in setuptools._imp

This should be safe because we use `tokenize.open` for source codes.
---
 setuptools/_imp.py | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)

diff --git a/setuptools/_imp.py b/setuptools/_imp.py
index 9d4ead0eb0..38b146fc4d 100644
--- a/setuptools/_imp.py
+++ b/setuptools/_imp.py
@@ -6,6 +6,7 @@
 import os
 import importlib.util
 import importlib.machinery
+import tokenize
 
 from importlib.util import module_from_spec
 
@@ -60,13 +61,13 @@ def find_module(module, paths=None):
 
         if suffix in importlib.machinery.SOURCE_SUFFIXES:
             kind = PY_SOURCE
+            file = tokenize.open(path)
         elif suffix in importlib.machinery.BYTECODE_SUFFIXES:
             kind = PY_COMPILED
+            file = open(path, 'rb')
         elif suffix in importlib.machinery.EXTENSION_SUFFIXES:
             kind = C_EXTENSION
 
-        if kind in {PY_SOURCE, PY_COMPILED}:
-            file = open(path, mode)
     else:
         path = None
         suffix = mode = ''

From b540f93cdf7edd0a0ce3e06a23ff4714bf13420e Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 17 Apr 2024 16:01:56 +0100
Subject: [PATCH 0557/1761] Use UTF-8 with venv.run and avoid encoding warnings

---
 setuptools/tests/environment.py           |  2 +-
 setuptools/tests/test_build_meta.py       |  4 +-
 setuptools/tests/test_editable_install.py | 66 +++++++++++------------
 3 files changed, 36 insertions(+), 36 deletions(-)

diff --git a/setuptools/tests/environment.py b/setuptools/tests/environment.py
index 6173936f7b..b9de4fda6b 100644
--- a/setuptools/tests/environment.py
+++ b/setuptools/tests/environment.py
@@ -17,7 +17,7 @@ class VirtualEnv(jaraco.envs.VirtualEnv):
 
     def run(self, cmd, *args, **kwargs):
         cmd = [self.exe(cmd[0])] + cmd[1:]
-        kwargs = {"cwd": self.root, **kwargs}  # Allow overriding
+        kwargs = {"cwd": self.root, "encoding": "utf-8", **kwargs}  # Allow overriding
         # In some environments (eg. downstream distro packaging), where:
         # - tox isn't used to run tests and
         # - PYTHONPATH is set to point to a specific setuptools codebase and
diff --git a/setuptools/tests/test_build_meta.py b/setuptools/tests/test_build_meta.py
index c2a1e6dc75..43830feb77 100644
--- a/setuptools/tests/test_build_meta.py
+++ b/setuptools/tests/test_build_meta.py
@@ -941,14 +941,14 @@ def test_legacy_editable_install(venv, tmpdir, tmpdir_cwd):
 
     # First: sanity check
     cmd = ["pip", "install", "--no-build-isolation", "-e", "."]
-    output = str(venv.run(cmd, cwd=tmpdir), "utf-8").lower()
+    output = venv.run(cmd, cwd=tmpdir).lower()
     assert "running setup.py develop for myproj" not in output
     assert "created wheel for myproj" in output
 
     # Then: real test
     env = {**os.environ, "SETUPTOOLS_ENABLE_FEATURES": "legacy-editable"}
     cmd = ["pip", "install", "--no-build-isolation", "-e", "."]
-    output = str(venv.run(cmd, cwd=tmpdir, env=env), "utf-8").lower()
+    output = venv.run(cmd, cwd=tmpdir, env=env).lower()
     assert "running setup.py develop for myproj" in output
 
 
diff --git a/setuptools/tests/test_editable_install.py b/setuptools/tests/test_editable_install.py
index 5da4fccefa..119b128694 100644
--- a/setuptools/tests/test_editable_install.py
+++ b/setuptools/tests/test_editable_install.py
@@ -131,7 +131,7 @@ def test_editable_with_pyproject(tmp_path, venv, files, editable_opts):
     jaraco.path.build(files, prefix=project)
 
     cmd = [
-        venv.exe(),
+        "python",
         "-m",
         "pip",
         "install",
@@ -140,14 +140,14 @@ def test_editable_with_pyproject(tmp_path, venv, files, editable_opts):
         str(project),
         *editable_opts,
     ]
-    print(str(subprocess.check_output(cmd), "utf-8"))
+    print(venv.run(cmd))
 
-    cmd = [venv.exe(), "-m", "mypkg"]
-    assert subprocess.check_output(cmd).strip() == b"3.14159.post0 Hello World"
+    cmd = ["python", "-m", "mypkg"]
+    assert venv.run(cmd).strip() == "3.14159.post0 Hello World"
 
     (project / "src/mypkg/data.txt").write_text("foobar", encoding="utf-8")
     (project / "src/mypkg/mod.py").write_text("x = 42", encoding="utf-8")
-    assert subprocess.check_output(cmd).strip() == b"3.14159.post0 foobar 42"
+    assert venv.run(cmd).strip() == "3.14159.post0 foobar 42"
 
 
 def test_editable_with_flat_layout(tmp_path, venv, editable_opts):
@@ -176,7 +176,7 @@ def test_editable_with_flat_layout(tmp_path, venv, editable_opts):
     project = tmp_path / "mypkg"
 
     cmd = [
-        venv.exe(),
+        "python",
         "-m",
         "pip",
         "install",
@@ -185,9 +185,9 @@ def test_editable_with_flat_layout(tmp_path, venv, editable_opts):
         str(project),
         *editable_opts,
     ]
-    print(str(subprocess.check_output(cmd), "utf-8"))
-    cmd = [venv.exe(), "-c", "import pkg, mod; print(pkg.a, mod.b)"]
-    assert subprocess.check_output(cmd).strip() == b"4 2"
+    print(venv.run(cmd))
+    cmd = ["python", "-c", "import pkg, mod; print(pkg.a, mod.b)"]
+    assert venv.run(cmd).strip() == "4 2"
 
 
 def test_editable_with_single_module(tmp_path, venv, editable_opts):
@@ -214,7 +214,7 @@ def test_editable_with_single_module(tmp_path, venv, editable_opts):
     project = tmp_path / "mypkg"
 
     cmd = [
-        venv.exe(),
+        "python",
         "-m",
         "pip",
         "install",
@@ -223,9 +223,9 @@ def test_editable_with_single_module(tmp_path, venv, editable_opts):
         str(project),
         *editable_opts,
     ]
-    print(str(subprocess.check_output(cmd), "utf-8"))
-    cmd = [venv.exe(), "-c", "import mod; print(mod.b)"]
-    assert subprocess.check_output(cmd).strip() == b"2"
+    print(venv.run(cmd))
+    cmd = ["python", "-c", "import mod; print(mod.b)"]
+    assert venv.run(cmd).strip() == "2"
 
 
 class TestLegacyNamespaces:
@@ -384,7 +384,7 @@ def test_namespace_accidental_config_in_lenient_mode(self, venv, tmp_path):
         opts = ["--no-build-isolation"]  # force current version of setuptools
         venv.run(["python", "-m", "pip", "-v", "install", "-e", str(pkg_A), *opts])
         out = venv.run(["python", "-c", "from mypkg.n import pkgA; print(pkgA.a)"])
-        assert str(out, "utf-8").strip() == "1"
+        assert out.strip() == "1"
         cmd = """\
         try:
             import mypkg.other
@@ -392,7 +392,7 @@ def test_namespace_accidental_config_in_lenient_mode(self, venv, tmp_path):
             print("mypkg.other not defined")
         """
         out = venv.run(["python", "-c", dedent(cmd)])
-        assert "mypkg.other not defined" in str(out, "utf-8")
+        assert "mypkg.other not defined" in out
 
 
 # Moved here from test_develop:
@@ -911,7 +911,7 @@ def test_editable_install(self, tmp_path, venv, layout, editable_opts):
             print(ex)
         """
         out = venv.run(["python", "-c", dedent(cmd_import_error)])
-        assert b"No module named 'otherfile'" in out
+        assert "No module named 'otherfile'" in out
 
         # Ensure the modules are importable
         cmd_get_vars = """\
@@ -919,7 +919,7 @@ def test_editable_install(self, tmp_path, venv, layout, editable_opts):
         print(mypkg.mod1.var, mypkg.subpackage.mod2.var)
         """
         out = venv.run(["python", "-c", dedent(cmd_get_vars)])
-        assert b"42 13" in out
+        assert "42 13" in out
 
         # Ensure resources are reachable
         cmd_get_resource = """\
@@ -929,7 +929,7 @@ def test_editable_install(self, tmp_path, venv, layout, editable_opts):
         print(text.read_text(encoding="utf-8"))
         """
         out = venv.run(["python", "-c", dedent(cmd_get_resource)])
-        assert b"resource 39" in out
+        assert "resource 39" in out
 
         # Ensure files are editable
         mod1 = next(project.glob("**/mod1.py"))
@@ -941,12 +941,12 @@ def test_editable_install(self, tmp_path, venv, layout, editable_opts):
         resource_file.write_text("resource 374", encoding="utf-8")
 
         out = venv.run(["python", "-c", dedent(cmd_get_vars)])
-        assert b"42 13" not in out
-        assert b"17 781" in out
+        assert "42 13" not in out
+        assert "17 781" in out
 
         out = venv.run(["python", "-c", dedent(cmd_get_resource)])
-        assert b"resource 39" not in out
-        assert b"resource 374" in out
+        assert "resource 39" not in out
+        assert "resource 374" in out
 
 
 class TestLinkTree:
@@ -1005,7 +1005,7 @@ def test_strict_install(self, tmp_path, venv):
         install_project("mypkg", venv, tmp_path, self.FILES, *opts)
 
         out = venv.run(["python", "-c", "import mypkg.mod1; print(mypkg.mod1.var)"])
-        assert b"42" in out
+        assert "42" in out
 
         # Ensure packages excluded from distribution are not importable
         cmd_import_error = """\
@@ -1015,7 +1015,7 @@ def test_strict_install(self, tmp_path, venv):
             print(ex)
         """
         out = venv.run(["python", "-c", dedent(cmd_import_error)])
-        assert b"cannot import name 'subpackage'" in out
+        assert "cannot import name 'subpackage'" in out
 
         # Ensure resource files excluded from distribution are not reachable
         cmd_get_resource = """\
@@ -1028,8 +1028,8 @@ def test_strict_install(self, tmp_path, venv):
             print(ex)
         """
         out = venv.run(["python", "-c", dedent(cmd_get_resource)])
-        assert b"No such file or directory" in out
-        assert b"resource.not_in_manifest" in out
+        assert "No such file or directory" in out
+        assert "resource.not_in_manifest" in out
 
 
 @pytest.mark.filterwarnings("ignore:.*compat.*:setuptools.SetuptoolsDeprecationWarning")
@@ -1040,7 +1040,7 @@ def test_compat_install(tmp_path, venv):
     install_project("mypkg", venv, tmp_path, files, *opts)
 
     out = venv.run(["python", "-c", "import mypkg.mod1; print(mypkg.mod1.var)"])
-    assert b"42" in out
+    assert "42" in out
 
     expected_path = comparable_path(str(tmp_path))
 
@@ -1051,7 +1051,7 @@ def test_compat_install(tmp_path, venv):
         "import other; print(other)",
         "import mypkg; print(mypkg)",
     ):
-        out = comparable_path(str(venv.run(["python", "-c", cmd]), "utf-8"))
+        out = comparable_path(venv.run(["python", "-c", cmd]))
         assert expected_path in out
 
     # Compatible behaviour will not consider custom mappings
@@ -1061,7 +1061,7 @@ def test_compat_install(tmp_path, venv):
     except ImportError as ex:
         print(ex)
     """
-    out = str(venv.run(["python", "-c", dedent(cmd)]), "utf-8")
+    out = venv.run(["python", "-c", dedent(cmd)])
     assert "cannot import name 'subpackage'" in out
 
 
@@ -1105,7 +1105,7 @@ def test_pbr_integration(tmp_path, venv, editable_opts):
         install_project("mypkg", venv, tmp_path, files, *editable_opts)
 
     out = venv.run(["python", "-c", "import mypkg.hello"])
-    assert b"Hello world!" in out
+    assert "Hello world!" in out
 
 
 class TestCustomBuildPy:
@@ -1143,11 +1143,11 @@ def test_safeguarded_from_errors(self, tmp_path, venv):
         """Ensure that errors in custom build_py are reported as warnings"""
         # Warnings should show up
         _, out = install_project("mypkg", venv, tmp_path, self.FILES)
-        assert b"SetuptoolsDeprecationWarning" in out
-        assert b"ValueError: TEST_RAISE" in out
+        assert "SetuptoolsDeprecationWarning" in out
+        assert "ValueError: TEST_RAISE" in out
         # but installation should be successful
         out = venv.run(["python", "-c", "import mypkg.mod1; print(mypkg.mod1.var)"])
-        assert b"42" in out
+        assert "42" in out
 
 
 class TestCustomBuildWheel:

From eae4e26b0a1fb9ff8ca31ca2be4b58220b59a32f Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 17 Apr 2024 17:56:36 +0100
Subject: [PATCH 0558/1761] Ignore warning caused by 3rd-party setup.py

---
 setuptools/tests/test_integration.py | 4 ++++
 1 file changed, 4 insertions(+)

diff --git a/setuptools/tests/test_integration.py b/setuptools/tests/test_integration.py
index 1aa16172b5..77ef733698 100644
--- a/setuptools/tests/test_integration.py
+++ b/setuptools/tests/test_integration.py
@@ -99,6 +99,10 @@ def test_pbr(install_context):
 
 
 @pytest.mark.xfail
+@pytest.mark.filterwarnings("ignore::EncodingWarning")
+# ^-- Dependency chain: `python-novaclient` < `oslo-utils` < `netifaces==0.11.0`
+#     netifaces' setup.py uses `open` without `encoding="utf-8"` which is hijacked by
+#     `setuptools.sandbox._open` and triggers the EncodingWarning.
 def test_python_novaclient(install_context):
     _install_one('python-novaclient', install_context, 'novaclient', 'base.py')
 

From 8a75f99ca60a8a78f1dbba94b2969d292ea5557c Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 17 Apr 2024 17:57:26 +0100
Subject: [PATCH 0559/1761] Use UTF-8 in setuptools/tests/test_easy_install.py

---
 setuptools/tests/test_easy_install.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py
index 950cb23d21..ada4c32285 100644
--- a/setuptools/tests/test_easy_install.py
+++ b/setuptools/tests/test_easy_install.py
@@ -361,7 +361,7 @@ def test_many_pth_distributions_merge_together(self, tmpdir):
 
 @pytest.fixture
 def setup_context(tmpdir):
-    with (tmpdir / 'setup.py').open('w') as f:
+    with (tmpdir / 'setup.py').open('w', encoding="utf-8") as f:
         f.write(SETUP_PY)
     with tmpdir.as_cwd():
         yield tmpdir

From 2b82912b4b4a4576cd6edfd307f14ff615f21ed4 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 17 Apr 2024 17:57:49 +0100
Subject: [PATCH 0560/1761] Attempt to use UTF-8 with develop command.

This change tries to use UTF-8 when writing `.egg-link` files.
When reading other files, we first attempt to use UTF-8 and then
fallback for the locale encoding.
---
 setuptools/command/develop.py | 27 +++++++++++++++++++++------
 1 file changed, 21 insertions(+), 6 deletions(-)

diff --git a/setuptools/command/develop.py b/setuptools/command/develop.py
index d8c1b49b3d..aeb491fe2c 100644
--- a/setuptools/command/develop.py
+++ b/setuptools/command/develop.py
@@ -10,6 +10,8 @@
 from setuptools import namespaces
 import setuptools
 
+from ..compat import py39
+
 
 class develop(namespaces.DevelopInstaller, easy_install):
     """Set up package for development"""
@@ -119,7 +121,7 @@ def install_for_development(self):
         # create an .egg-link in the installation dir, pointing to our egg
         log.info("Creating %s (link to %s)", self.egg_link, self.egg_base)
         if not self.dry_run:
-            with open(self.egg_link, "w") as f:
+            with open(self.egg_link, "w", encoding="utf-8") as f:
                 f.write(self.egg_path + "\n" + self.setup_path)
         # postprocess the installed distro, fixing up .pth, installing scripts,
         # and handling requirements
@@ -128,9 +130,16 @@ def install_for_development(self):
     def uninstall_link(self):
         if os.path.exists(self.egg_link):
             log.info("Removing %s (link to %s)", self.egg_link, self.egg_base)
-            egg_link_file = open(self.egg_link)
-            contents = [line.rstrip() for line in egg_link_file]
-            egg_link_file.close()
+
+            try:
+                with open(self.egg_link, encoding="utf-8") as egg_link_file:
+                    contents = [line.rstrip() for line in egg_link_file]
+            except UnicodeDecodeError:  # pragma: no cover
+                with open(
+                    self.egg_link, encoding=py39.LOCALE_ENCODING
+                ) as egg_link_file:
+                    contents = [line.rstrip() for line in egg_link_file]
+
             if contents not in ([self.egg_path], [self.egg_path, self.setup_path]):
                 log.warn("Link points to %s: uninstall aborted", contents)
                 return
@@ -156,8 +165,14 @@ def install_egg_scripts(self, dist):
         for script_name in self.distribution.scripts or []:
             script_path = os.path.abspath(convert_path(script_name))
             script_name = os.path.basename(script_path)
-            with open(script_path) as strm:
-                script_text = strm.read()
+
+            try:
+                with open(script_path, encoding="utf-8") as strm:
+                    script_text = strm.read()
+            except UnicodeDecodeError:  # pragma: no cover
+                with open(script_path, encoding=py39.LOCALE_ENCODING) as strm:
+                    script_text = strm.read()
+
             self.install_script(dist, script_name, script_text, script_path)
 
         return None

From 74a622833716b9fbbf2b1a94c58b59d5defe0dd3 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 17 Apr 2024 18:32:08 +0100
Subject: [PATCH 0561/1761] Refactor some try..excepts into
 read_utf8_with_fallback

Extract common pattern for reading a file with UTF-8 into the
unicode_utils module.
---
 setuptools/command/develop.py | 23 ++++++-----------------
 setuptools/package_index.py   |  8 ++------
 setuptools/unicode_utils.py   | 17 +++++++++++++++++
 setuptools/wheel.py           |  9 ++-------
 4 files changed, 27 insertions(+), 30 deletions(-)

diff --git a/setuptools/command/develop.py b/setuptools/command/develop.py
index aeb491fe2c..9966681bad 100644
--- a/setuptools/command/develop.py
+++ b/setuptools/command/develop.py
@@ -10,7 +10,7 @@
 from setuptools import namespaces
 import setuptools
 
-from ..compat import py39
+from ..unicode_utils import read_utf8_with_fallback
 
 
 class develop(namespaces.DevelopInstaller, easy_install):
@@ -131,14 +131,10 @@ def uninstall_link(self):
         if os.path.exists(self.egg_link):
             log.info("Removing %s (link to %s)", self.egg_link, self.egg_base)
 
-            try:
-                with open(self.egg_link, encoding="utf-8") as egg_link_file:
-                    contents = [line.rstrip() for line in egg_link_file]
-            except UnicodeDecodeError:  # pragma: no cover
-                with open(
-                    self.egg_link, encoding=py39.LOCALE_ENCODING
-                ) as egg_link_file:
-                    contents = [line.rstrip() for line in egg_link_file]
+            contents = [
+                line.rstrip()
+                for line in read_utf8_with_fallback(self.egg_link).splitlines()
+            ]
 
             if contents not in ([self.egg_path], [self.egg_path, self.setup_path]):
                 log.warn("Link points to %s: uninstall aborted", contents)
@@ -165,14 +161,7 @@ def install_egg_scripts(self, dist):
         for script_name in self.distribution.scripts or []:
             script_path = os.path.abspath(convert_path(script_name))
             script_name = os.path.basename(script_path)
-
-            try:
-                with open(script_path, encoding="utf-8") as strm:
-                    script_text = strm.read()
-            except UnicodeDecodeError:  # pragma: no cover
-                with open(script_path, encoding=py39.LOCALE_ENCODING) as strm:
-                    script_text = strm.read()
-
+            script_text = read_utf8_with_fallback(script_path)
             self.install_script(dist, script_name, script_text, script_path)
 
         return None
diff --git a/setuptools/package_index.py b/setuptools/package_index.py
index f835bdcf14..2aa8464162 100644
--- a/setuptools/package_index.py
+++ b/setuptools/package_index.py
@@ -41,6 +41,7 @@
 from setuptools.extern.more_itertools import unique_everseen
 
 from .compat import py39
+from .unicode_utils import read_utf8_with_fallback
 
 
 EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.+!]+)$')
@@ -1120,12 +1121,7 @@ def local_open(url):
         for f in os.listdir(filename):
             filepath = os.path.join(filename, f)
             if f == 'index.html':
-                try:
-                    with open(filepath, 'r', encoding="utf-8") as fp:
-                        body = fp.read()
-                except UnicodeDecodeError:  # pragma: no cover
-                    with open(filepath, 'r', encoding=py39.LOCALE_ENCODING) as fp:
-                        body = fp.read()
+                body = read_utf8_with_fallback(filepath)
                 break
             elif os.path.isdir(filepath):
                 f += '/'
diff --git a/setuptools/unicode_utils.py b/setuptools/unicode_utils.py
index d43dcc11f9..4bc67feba0 100644
--- a/setuptools/unicode_utils.py
+++ b/setuptools/unicode_utils.py
@@ -1,6 +1,8 @@
 import unicodedata
 import sys
 
+from .compat import py39
+
 
 # HFS Plus uses decomposed UTF-8
 def decompose(path):
@@ -42,3 +44,18 @@ def try_encode(string, enc):
         return string.encode(enc)
     except UnicodeEncodeError:
         return None
+
+
+def read_utf8_with_fallback(file: str, fallback_encoding=py39.LOCALE_ENCODING) -> str:
+    """
+    First try to read the file with UTF-8, if there is an error fallback to a
+    different encoding ("locale" by default). Returns the content of the file.
+    Also useful when reading files that might have been produced by an older version of
+    setuptools.
+    """
+    try:
+        with open(file, "r", encoding="utf-8") as f:
+            return f.read()
+    except UnicodeDecodeError:  # pragma: no cover
+        with open(file, "r", encoding=fallback_encoding) as f:
+            return f.read()
diff --git a/setuptools/wheel.py b/setuptools/wheel.py
index 19f4157423..babd45940f 100644
--- a/setuptools/wheel.py
+++ b/setuptools/wheel.py
@@ -18,7 +18,7 @@
 from setuptools.command.egg_info import write_requirements, _egg_basename
 from setuptools.archive_util import _unpack_zipfile_obj
 
-from .compat import py39
+from .unicode_utils import read_utf8_with_fallback
 
 
 WHEEL_NAME = re.compile(
@@ -224,12 +224,7 @@ def _move_data_entries(destination_eggdir, dist_data):
     def _fix_namespace_packages(egg_info, destination_eggdir):
         namespace_packages = os.path.join(egg_info, 'namespace_packages.txt')
         if os.path.exists(namespace_packages):
-            try:
-                with open(namespace_packages, encoding="utf-8") as fp:
-                    namespace_packages = fp.read().split()
-            except UnicodeDecodeError:  # pragma: no cover
-                with open(namespace_packages, encoding=py39.LOCALE_ENCODING) as fp:
-                    namespace_packages = fp.read().split()
+            namespace_packages = read_utf8_with_fallback(namespace_packages).split()
 
             for mod in namespace_packages:
                 mod_dir = os.path.join(destination_eggdir, *mod.split('.'))

From aeac45b14d910044e2c1f0d2faec231b8c41fbeb Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 17 Apr 2024 19:05:14 +0100
Subject: [PATCH 0562/1761] Avoid using EncodingWarning because it is not
 defined for Python < 3.10

---
 setuptools/tests/test_integration.py | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/setuptools/tests/test_integration.py b/setuptools/tests/test_integration.py
index 77ef733698..71f10d9a6e 100644
--- a/setuptools/tests/test_integration.py
+++ b/setuptools/tests/test_integration.py
@@ -99,10 +99,11 @@ def test_pbr(install_context):
 
 
 @pytest.mark.xfail
-@pytest.mark.filterwarnings("ignore::EncodingWarning")
+@pytest.mark.filterwarnings("ignore:'encoding' argument not specified")
 # ^-- Dependency chain: `python-novaclient` < `oslo-utils` < `netifaces==0.11.0`
 #     netifaces' setup.py uses `open` without `encoding="utf-8"` which is hijacked by
 #     `setuptools.sandbox._open` and triggers the EncodingWarning.
+#     Can't use EncodingWarning in the filter, as it does not exist on Python < 3.10.
 def test_python_novaclient(install_context):
     _install_one('python-novaclient', install_context, 'novaclient', 'base.py')
 

From 9fd598172bc4af6e90f95ff9916faf3e8717e497 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Sun, 21 Apr 2024 10:59:52 +0100
Subject: [PATCH 0563/1761] Mark read_utf8_with_fallback as private

---
 setuptools/command/develop.py | 6 +++---
 setuptools/package_index.py   | 4 ++--
 setuptools/unicode_utils.py   | 2 +-
 setuptools/wheel.py           | 4 ++--
 4 files changed, 8 insertions(+), 8 deletions(-)

diff --git a/setuptools/command/develop.py b/setuptools/command/develop.py
index 9966681bad..d07736a005 100644
--- a/setuptools/command/develop.py
+++ b/setuptools/command/develop.py
@@ -10,7 +10,7 @@
 from setuptools import namespaces
 import setuptools
 
-from ..unicode_utils import read_utf8_with_fallback
+from ..unicode_utils import _read_utf8_with_fallback
 
 
 class develop(namespaces.DevelopInstaller, easy_install):
@@ -133,7 +133,7 @@ def uninstall_link(self):
 
             contents = [
                 line.rstrip()
-                for line in read_utf8_with_fallback(self.egg_link).splitlines()
+                for line in _read_utf8_with_fallback(self.egg_link).splitlines()
             ]
 
             if contents not in ([self.egg_path], [self.egg_path, self.setup_path]):
@@ -161,7 +161,7 @@ def install_egg_scripts(self, dist):
         for script_name in self.distribution.scripts or []:
             script_path = os.path.abspath(convert_path(script_name))
             script_name = os.path.basename(script_path)
-            script_text = read_utf8_with_fallback(script_path)
+            script_text = _read_utf8_with_fallback(script_path)
             self.install_script(dist, script_name, script_text, script_path)
 
         return None
diff --git a/setuptools/package_index.py b/setuptools/package_index.py
index 2aa8464162..0ca87df357 100644
--- a/setuptools/package_index.py
+++ b/setuptools/package_index.py
@@ -41,7 +41,7 @@
 from setuptools.extern.more_itertools import unique_everseen
 
 from .compat import py39
-from .unicode_utils import read_utf8_with_fallback
+from .unicode_utils import _read_utf8_with_fallback
 
 
 EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.+!]+)$')
@@ -1121,7 +1121,7 @@ def local_open(url):
         for f in os.listdir(filename):
             filepath = os.path.join(filename, f)
             if f == 'index.html':
-                body = read_utf8_with_fallback(filepath)
+                body = _read_utf8_with_fallback(filepath)
                 break
             elif os.path.isdir(filepath):
                 f += '/'
diff --git a/setuptools/unicode_utils.py b/setuptools/unicode_utils.py
index 4bc67feba0..6b60417a91 100644
--- a/setuptools/unicode_utils.py
+++ b/setuptools/unicode_utils.py
@@ -46,7 +46,7 @@ def try_encode(string, enc):
         return None
 
 
-def read_utf8_with_fallback(file: str, fallback_encoding=py39.LOCALE_ENCODING) -> str:
+def _read_utf8_with_fallback(file: str, fallback_encoding=py39.LOCALE_ENCODING) -> str:
     """
     First try to read the file with UTF-8, if there is an error fallback to a
     different encoding ("locale" by default). Returns the content of the file.
diff --git a/setuptools/wheel.py b/setuptools/wheel.py
index babd45940f..e06daec4d0 100644
--- a/setuptools/wheel.py
+++ b/setuptools/wheel.py
@@ -18,7 +18,7 @@
 from setuptools.command.egg_info import write_requirements, _egg_basename
 from setuptools.archive_util import _unpack_zipfile_obj
 
-from .unicode_utils import read_utf8_with_fallback
+from .unicode_utils import _read_utf8_with_fallback
 
 
 WHEEL_NAME = re.compile(
@@ -224,7 +224,7 @@ def _move_data_entries(destination_eggdir, dist_data):
     def _fix_namespace_packages(egg_info, destination_eggdir):
         namespace_packages = os.path.join(egg_info, 'namespace_packages.txt')
         if os.path.exists(namespace_packages):
-            namespace_packages = read_utf8_with_fallback(namespace_packages).split()
+            namespace_packages = _read_utf8_with_fallback(namespace_packages).split()
 
             for mod in namespace_packages:
                 mod_dir = os.path.join(destination_eggdir, *mod.split('.'))

From 9aa9f22e04b473af110461fea591560678bc1284 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Sun, 21 Apr 2024 11:18:45 +0100
Subject: [PATCH 0564/1761] Add newsfragment

---
 newsfragments/4309.removal.rst | 5 +++++
 1 file changed, 5 insertions(+)
 create mode 100644 newsfragments/4309.removal.rst

diff --git a/newsfragments/4309.removal.rst b/newsfragments/4309.removal.rst
new file mode 100644
index 0000000000..08818104f9
--- /dev/null
+++ b/newsfragments/4309.removal.rst
@@ -0,0 +1,5 @@
+Further adoption of UTF-8 in ``setuptools``.
+This change regards mostly files produced and consumed during the build process
+(e.g. metadata files, script wrappers, automatically updated config files, etc..)
+Although precautions were taken to minimize disruptions, some edge cases might
+be subject to backwards incompatibility.

From 8b2009176279f977f6b6b50e8b8c4e4ba5b9f99e Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Sun, 21 Apr 2024 11:25:18 +0100
Subject: [PATCH 0565/1761] Read files using UTF-8 in pkg_resources, with
 fallback to locale

---
 pkg_resources/__init__.py | 25 ++++++++++++++++++-------
 1 file changed, 18 insertions(+), 7 deletions(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index c2ba0476e5..5d773da541 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -1524,8 +1524,7 @@ def run_script(self, script_name, namespace):
         script_filename = self._fn(self.egg_info, script)
         namespace['__file__'] = script_filename
         if os.path.exists(script_filename):
-            with open(script_filename) as fid:
-                source = fid.read()
+            source = _read_utf8_with_fallback(script_filename)
             code = compile(source, script_filename, 'exec')
             exec(code, namespace, namespace)
         else:
@@ -2175,11 +2174,10 @@ def non_empty_lines(path):
     """
     Yield non-empty lines from file at path
     """
-    with open(path) as f:
-        for line in f:
-            line = line.strip()
-            if line:
-                yield line
+    for line in _read_utf8_with_fallback(path).splitlines():
+        line = line.strip()
+        if line:
+            yield line
 
 
 def resolve_egg_link(path):
@@ -3323,3 +3321,16 @@ def _initialize_master_working_set():
     # match order
     list(map(working_set.add_entry, sys.path))
     globals().update(locals())
+
+
+#  ---- Ported from ``setuptools`` to avoid introducing dependencies ----
+LOCALE_ENCODING = "locale" if sys.version_info >= (3, 10) else None
+
+
+def _read_utf8_with_fallback(file: str, fallback_encoding=LOCALE_ENCODING) -> str:
+    try:
+        with open(file, "r", encoding="utf-8") as f:
+            return f.read()
+    except UnicodeDecodeError:  # pragma: no cover
+        with open(file, "r", encoding=fallback_encoding) as f:
+            return f.read()

From 69f580687de2af6760fe171c81d84e6bfcc665ea Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Sun, 21 Apr 2024 11:39:50 +0100
Subject: [PATCH 0566/1761] Use UTF-8 for writing python stubs

Since Python3 is "UTF-8 first", this change should not cause problems.
---
 setuptools/command/bdist_egg.py | 2 +-
 setuptools/command/build_ext.py | 6 +++---
 setuptools/package_index.py     | 2 +-
 3 files changed, 5 insertions(+), 5 deletions(-)

diff --git a/setuptools/command/bdist_egg.py b/setuptools/command/bdist_egg.py
index b2897bfbb4..5581b1d2e0 100644
--- a/setuptools/command/bdist_egg.py
+++ b/setuptools/command/bdist_egg.py
@@ -54,7 +54,7 @@ def __bootstrap__():
         __bootstrap__()
         """
     ).lstrip()
-    with open(pyfile, 'w') as f:
+    with open(pyfile, 'w', encoding="utf-8") as f:
         f.write(_stub_template % resource)
 
 
diff --git a/setuptools/command/build_ext.py b/setuptools/command/build_ext.py
index b5c98c86dc..49699d30ec 100644
--- a/setuptools/command/build_ext.py
+++ b/setuptools/command/build_ext.py
@@ -342,8 +342,7 @@ def _write_stub_file(self, stub_file: str, ext: Extension, compile=False):
         if compile and os.path.exists(stub_file):
             raise BaseError(stub_file + " already exists! Please delete.")
         if not self.dry_run:
-            f = open(stub_file, 'w')
-            f.write(
+            content = (
                 '\n'.join([
                     "def __bootstrap__():",
                     "   global __bootstrap__, __file__, __loader__",
@@ -369,7 +368,8 @@ def _write_stub_file(self, stub_file: str, ext: Extension, compile=False):
                     "",  # terminal \n
                 ])
             )
-            f.close()
+            with open(stub_file, 'w', encoding="utf-8") as f:
+                f.write(content)
         if compile:
             self._compile_and_remove_stub(stub_file)
 
diff --git a/setuptools/package_index.py b/setuptools/package_index.py
index 0ca87df357..42a98b919a 100644
--- a/setuptools/package_index.py
+++ b/setuptools/package_index.py
@@ -717,7 +717,7 @@ def gen_setup(self, filename, fragment, tmpdir):
                     shutil.copy2(filename, dst)
                     filename = dst
 
-            with open(os.path.join(tmpdir, 'setup.py'), 'w') as file:
+            with open(os.path.join(tmpdir, 'setup.py'), 'w', encoding="utf-8") as file:
                 file.write(
                     "from setuptools import setup\n"
                     "setup(name=%r, version=%r, py_modules=[%r])\n"

From 2675e85a20cff489b9bdce0d958968eca24d542d Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Sun, 21 Apr 2024 11:41:45 +0100
Subject: [PATCH 0567/1761] Use UTF-8 to write metadata files

---
 setuptools/command/bdist_egg.py    |  7 +++----
 setuptools/command/easy_install.py | 16 +++++++---------
 2 files changed, 10 insertions(+), 13 deletions(-)

diff --git a/setuptools/command/bdist_egg.py b/setuptools/command/bdist_egg.py
index 5581b1d2e0..adcb0a1ba1 100644
--- a/setuptools/command/bdist_egg.py
+++ b/setuptools/command/bdist_egg.py
@@ -200,10 +200,9 @@ def run(self):  # noqa: C901  # is too complex (14)  # FIXME
             log.info("writing %s", native_libs)
             if not self.dry_run:
                 ensure_directory(native_libs)
-                libs_file = open(native_libs, 'wt')
-                libs_file.write('\n'.join(all_outputs))
-                libs_file.write('\n')
-                libs_file.close()
+                with open(native_libs, 'wt', encoding="utf-8") as libs_file:
+                    libs_file.write('\n'.join(all_outputs))
+                    libs_file.write('\n')
         elif os.path.isfile(native_libs):
             log.info("removing %s", native_libs)
             if not self.dry_run:
diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py
index 3ad984f212..bfacf1e46f 100644
--- a/setuptools/command/easy_install.py
+++ b/setuptools/command/easy_install.py
@@ -1023,12 +1023,11 @@ def install_exe(self, dist_filename, tmpdir):
 
         # Write EGG-INFO/PKG-INFO
         if not os.path.exists(pkg_inf):
-            f = open(pkg_inf, 'w')  # TODO: probably it is safe to use utf-8
-            f.write('Metadata-Version: 1.0\n')
-            for k, v in cfg.items('metadata'):
-                if k != 'target_version':
-                    f.write('%s: %s\n' % (k.replace('_', '-').title(), v))
-            f.close()
+            with open(pkg_inf, 'w', encoding="utf-8") as f:
+                f.write('Metadata-Version: 1.0\n')
+                for k, v in cfg.items('metadata'):
+                    if k != 'target_version':
+                        f.write('%s: %s\n' % (k.replace('_', '-').title(), v))
         script_dir = os.path.join(_egg_info, 'scripts')
         # delete entry-point scripts to avoid duping
         self.delete_blockers([
@@ -1094,9 +1093,8 @@ def process(src, dst):
             if locals()[name]:
                 txt = os.path.join(egg_tmp, 'EGG-INFO', name + '.txt')
                 if not os.path.exists(txt):
-                    f = open(txt, 'w')  # TODO: probably it is safe to use utf-8
-                    f.write('\n'.join(locals()[name]) + '\n')
-                    f.close()
+                    with open(txt, 'w', encoding="utf-8") as f:
+                        f.write('\n'.join(locals()[name]) + '\n')
 
     def install_wheel(self, wheel_path, tmpdir):
         wheel = Wheel(wheel_path)

From 5305908063aaea823dfc337cb1e5667e7bf2220a Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Sun, 21 Apr 2024 11:42:20 +0100
Subject: [PATCH 0568/1761] Attempt to use UTF-8 to read egg-link files in
 package_index

---
 setuptools/package_index.py | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/setuptools/package_index.py b/setuptools/package_index.py
index 42a98b919a..918a34e102 100644
--- a/setuptools/package_index.py
+++ b/setuptools/package_index.py
@@ -422,9 +422,9 @@ def scan_egg_links(self, search_path):
         list(itertools.starmap(self.scan_egg_link, egg_links))
 
     def scan_egg_link(self, path, entry):
-        with open(os.path.join(path, entry)) as raw_lines:
-            # filter non-empty lines
-            lines = list(filter(None, map(str.strip, raw_lines)))
+        content = _read_utf8_with_fallback(os.path.join(path, entry))
+        # filter non-empty lines
+        lines = list(filter(None, map(str.strip, content.splitlines())))
 
         if len(lines) != 2:
             # format is not recognized; punt

From f35f9122376186da349ad040dfb1fc59328e52d4 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Sun, 21 Apr 2024 11:52:36 +0100
Subject: [PATCH 0569/1761] Apply ruff formatting

---
 setuptools/command/build_ext.py | 6 ++----
 1 file changed, 2 insertions(+), 4 deletions(-)

diff --git a/setuptools/command/build_ext.py b/setuptools/command/build_ext.py
index 49699d30ec..6056fe9b24 100644
--- a/setuptools/command/build_ext.py
+++ b/setuptools/command/build_ext.py
@@ -342,8 +342,8 @@ def _write_stub_file(self, stub_file: str, ext: Extension, compile=False):
         if compile and os.path.exists(stub_file):
             raise BaseError(stub_file + " already exists! Please delete.")
         if not self.dry_run:
-            content = (
-                '\n'.join([
+            with open(stub_file, 'w', encoding="utf-8") as f:
+                content = '\n'.join([
                     "def __bootstrap__():",
                     "   global __bootstrap__, __file__, __loader__",
                     "   import sys, os, pkg_resources, importlib.util" + if_dl(", dl"),
@@ -367,8 +367,6 @@ def _write_stub_file(self, stub_file: str, ext: Extension, compile=False):
                     "__bootstrap__()",
                     "",  # terminal \n
                 ])
-            )
-            with open(stub_file, 'w', encoding="utf-8") as f:
                 f.write(content)
         if compile:
             self._compile_and_remove_stub(stub_file)

From 39a8ef47dbaa5f569cfc327175aa8b74fd572eeb Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Sun, 21 Apr 2024 12:11:16 +0100
Subject: [PATCH 0570/1761] Simplify conditional encoding in install_scripts
 and easy_install

---
 setuptools/command/easy_install.py    | 8 ++------
 setuptools/command/install_scripts.py | 8 ++------
 2 files changed, 4 insertions(+), 12 deletions(-)

diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py
index bfacf1e46f..41ff382fe4 100644
--- a/setuptools/command/easy_install.py
+++ b/setuptools/command/easy_install.py
@@ -874,12 +874,8 @@ def write_script(self, script_name, contents, mode="t", blockers=()):
         if os.path.exists(target):
             os.unlink(target)
 
-        if "b" not in mode and isinstance(contents, str):
-            kw = {"encoding": "utf-8"}
-        else:
-            kw = {}
-
-        with open(target, "w" + mode, **kw) as f:
+        encoding = None if "b" in mode else "utf-8"
+        with open(target, "w" + mode, encoding=encoding) as f:
             f.write(contents)
         chmod(target, 0o777 - mask)
 
diff --git a/setuptools/command/install_scripts.py b/setuptools/command/install_scripts.py
index 758937b614..d79a4ab7b0 100644
--- a/setuptools/command/install_scripts.py
+++ b/setuptools/command/install_scripts.py
@@ -57,14 +57,10 @@ def write_script(self, script_name, contents, mode="t", *ignored):
         target = os.path.join(self.install_dir, script_name)
         self.outfiles.append(target)
 
-        if "b" not in mode and isinstance(contents, str):
-            kw = {"encoding": "utf-8"}
-        else:
-            kw = {}
-
+        encoding = None if "b" in mode else "utf-8"
         mask = current_umask()
         if not self.dry_run:
             ensure_directory(target)
-            with open(target, "w" + mode, **kw) as f:
+            with open(target, "w" + mode, encoding=encoding) as f:
                 f.write(contents)
             chmod(target, 0o777 - mask)

From 0d8c01f15fce24f9ca96f25b093dca2e360bd77b Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Sun, 21 Apr 2024 16:26:41 -0400
Subject: [PATCH 0571/1761] Check for self.location is not None in
 Distribution.activate

---
 pkg_resources/__init__.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index 5e24bc9d8e..f8c93fe37b 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -2826,7 +2826,7 @@ def activate(self, path=None, replace=False):
         if path is None:
             path = sys.path
         self.insert_on(path, replace=replace)
-        if path is sys.path:
+        if path is sys.path and self.location is not None:
             fixup_namespace_packages(self.location)
             for pkg in self._get_metadata('namespace_packages.txt'):
                 if pkg in sys.modules:

From b8da410f4121c527996ed63affa686f13215a216 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Sun, 21 Apr 2024 16:32:00 -0400
Subject: [PATCH 0572/1761] Fix missing backtick in changelog

---
 NEWS.rst | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/NEWS.rst b/NEWS.rst
index 20c6903a33..73a8148d9c 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -119,7 +119,7 @@ Improved Documentation
 ----------------------
 
 - Updated documentation referencing obsolete Python 3.7 code. -- by :user:`Avasam` (#4096)
-- Changed ``versionadded`` for "Type information included by default" feature from ``v68.3.0`` to ``v69.0.0`` -- by :user:Avasam` (#4182)
+- Changed ``versionadded`` for "Type information included by default" feature from ``v68.3.0`` to ``v69.0.0`` -- by :user:`Avasam` (#4182)
 - Described the auto-generated files -- by :user:`VladimirFokow` (#4198)
 - Updated "Quickstart" to describe the current status of ``setup.cfg`` and ``pyproject.toml`` -- by :user:`VladimirFokow` (#4200)
 

From 3fbaa4c6d5af1f7846fc21c7fb54952c5cc23621 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 22 Apr 2024 10:31:20 +0100
Subject: [PATCH 0573/1761] Add deprecation warning for non utf-8

---
 pkg_resources/__init__.py    | 21 +++++++++++++++++-
 setuptools/command/setopt.py |  9 ++------
 setuptools/package_index.py  |  9 ++------
 setuptools/unicode_utils.py  | 41 ++++++++++++++++++++++++++++++++++++
 4 files changed, 65 insertions(+), 15 deletions(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index 5d773da541..675b728f9d 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -3323,14 +3323,33 @@ def _initialize_master_working_set():
     globals().update(locals())
 
 
-#  ---- Ported from ``setuptools`` to avoid introducing dependencies ----
+#  ---- Ported from ``setuptools`` to avoid introducing an import inter-dependency ----
 LOCALE_ENCODING = "locale" if sys.version_info >= (3, 10) else None
 
 
 def _read_utf8_with_fallback(file: str, fallback_encoding=LOCALE_ENCODING) -> str:
+    """See setuptools.unicode_utils._read_utf8_with_fallback"""
     try:
         with open(file, "r", encoding="utf-8") as f:
             return f.read()
     except UnicodeDecodeError:  # pragma: no cover
+        msg = f"""\
+        ********************************************************************************
+        `encoding="utf-8"` fails with {file!r}, trying `encoding={fallback_encoding!r}`.
+
+        This fallback behaviour is considered **deprecated** and future versions of
+        `setuptools/pkg_resources` may not implement it.
+
+        Please encode {file!r} with "utf-8" to ensure future builds will succeed.
+
+        If this file was produced by `setuptools` itself, cleaning up the cached files
+        and re-building/re-installing the package with a newer version of `setuptools`
+        (e.g. by updating `build-system.requires` in its `pyproject.toml`)
+        might solve the problem.
+        ********************************************************************************
+        """
+        # TODO: Add a deadline?
+        #       See comment in setuptools.unicode_utils._Utf8EncodingNeeded
+        warnings.warns(msg, PkgResourcesDeprecationWarning, stacklevel=2)
         with open(file, "r", encoding=fallback_encoding) as f:
             return f.read()
diff --git a/setuptools/command/setopt.py b/setuptools/command/setopt.py
index 89b1ac7307..b78d845e60 100644
--- a/setuptools/command/setopt.py
+++ b/setuptools/command/setopt.py
@@ -6,7 +6,7 @@
 import configparser
 
 from .. import Command
-from ..compat import py39
+from ..unicode_utils import _cfg_read_utf8_with_fallback
 
 __all__ = ['config_file', 'edit_config', 'option_base', 'setopt']
 
@@ -37,12 +37,7 @@ def edit_config(filename, settings, dry_run=False):
     log.debug("Reading configuration from %s", filename)
     opts = configparser.RawConfigParser()
     opts.optionxform = lambda x: x
-
-    try:
-        opts.read([filename], encoding="utf-8")
-    except UnicodeDecodeError:  # pragma: no cover
-        opts.clear()
-        opts.read([filename], encoding=py39.LOCALE_ENCODING)
+    _cfg_read_utf8_with_fallback(opts, filename)
 
     for section, options in settings.items():
         if options is None:
diff --git a/setuptools/package_index.py b/setuptools/package_index.py
index 918a34e102..f5a7d77eed 100644
--- a/setuptools/package_index.py
+++ b/setuptools/package_index.py
@@ -40,8 +40,7 @@
 from setuptools.wheel import Wheel
 from setuptools.extern.more_itertools import unique_everseen
 
-from .compat import py39
-from .unicode_utils import _read_utf8_with_fallback
+from .unicode_utils import _read_utf8_with_fallback, _cfg_read_utf8_with_fallback
 
 
 EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.+!]+)$')
@@ -1014,11 +1013,7 @@ def __init__(self):
 
         rc = os.path.join(os.path.expanduser('~'), '.pypirc')
         if os.path.exists(rc):
-            try:
-                self.read(rc, encoding="utf-8")
-            except UnicodeDecodeError:  # pragma: no cover
-                self.clean()
-                self.read(rc, encoding=py39.LOCALE_ENCODING)
+            _cfg_read_utf8_with_fallback(self, rc)
 
     @property
     def creds_by_repository(self):
diff --git a/setuptools/unicode_utils.py b/setuptools/unicode_utils.py
index 6b60417a91..9934330da9 100644
--- a/setuptools/unicode_utils.py
+++ b/setuptools/unicode_utils.py
@@ -1,7 +1,9 @@
 import unicodedata
 import sys
+from configparser import ConfigParser
 
 from .compat import py39
+from .warnings import SetuptoolsDeprecationWarning
 
 
 # HFS Plus uses decomposed UTF-8
@@ -57,5 +59,44 @@ def _read_utf8_with_fallback(file: str, fallback_encoding=py39.LOCALE_ENCODING)
         with open(file, "r", encoding="utf-8") as f:
             return f.read()
     except UnicodeDecodeError:  # pragma: no cover
+        _Utf8EncodingNeeded.emit(file=file, fallback_encoding=fallback_encoding)
         with open(file, "r", encoding=fallback_encoding) as f:
             return f.read()
+
+
+def _cfg_read_utf8_with_fallback(
+    cfg: ConfigParser, file: str, fallback_encoding=py39.LOCALE_ENCODING
+) -> str:
+    """Same idea as :func:`_read_utf8_with_fallback`, but for the
+    :meth:`ConfigParser.read` method.
+
+    This method may call ``cfg.clear()``.
+    """
+    try:
+        cfg.read(file, encoding="utf-8")
+    except UnicodeDecodeError:  # pragma: no cover
+        _Utf8EncodingNeeded.emit(file=file, fallback_encoding=fallback_encoding)
+        cfg.clear()
+        cfg.read(file, encoding=fallback_encoding)
+
+
+class _Utf8EncodingNeeded(SetuptoolsDeprecationWarning):
+    _SUMMARY = """
+    `encoding="utf-8"` fails with {file!r}, trying `encoding={fallback_encoding!r}`.
+    """
+
+    _DETAILS = """
+    Fallback behaviour for UTF-8 is considered **deprecated** and future versions of
+    `setuptools` may not implement it.
+
+    Please encode {file!r} with "utf-8" to ensure future builds will succeed.
+
+    If this file was produced by `setuptools` itself, cleaning up the cached files
+    and re-building/re-installing the package with a newer version of `setuptools`
+    (e.g. by updating `build-system.requires` in its `pyproject.toml`)
+    might solve the problem.
+    """
+    # TODO: Add a deadline?
+    #       Will we be able to remove this?
+    #       The question comes to mind mainly because of sdists that have been produced
+    #       by old versions of setuptools and published to PyPI...

From 463b60c8801e7ae1f902bf2f53fa32889b867866 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 22 Apr 2024 10:34:52 +0100
Subject: [PATCH 0574/1761] Update news fragment to mention deprecation

---
 newsfragments/4309.removal.rst | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/newsfragments/4309.removal.rst b/newsfragments/4309.removal.rst
index 08818104f9..b69b17d45f 100644
--- a/newsfragments/4309.removal.rst
+++ b/newsfragments/4309.removal.rst
@@ -3,3 +3,5 @@ This change regards mostly files produced and consumed during the build process
 (e.g. metadata files, script wrappers, automatically updated config files, etc..)
 Although precautions were taken to minimize disruptions, some edge cases might
 be subject to backwards incompatibility.
+
+Support for ``"locale"`` encoding is now **deprecated**.

From 1c91ac81823a32d2d7ff55cde8abef7e4ebfc3e3 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 22 Apr 2024 10:39:41 +0100
Subject: [PATCH 0575/1761] Fix type hint in setuptools/unicode_utils.py

---
 setuptools/unicode_utils.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setuptools/unicode_utils.py b/setuptools/unicode_utils.py
index 9934330da9..696b34c46a 100644
--- a/setuptools/unicode_utils.py
+++ b/setuptools/unicode_utils.py
@@ -66,7 +66,7 @@ def _read_utf8_with_fallback(file: str, fallback_encoding=py39.LOCALE_ENCODING)
 
 def _cfg_read_utf8_with_fallback(
     cfg: ConfigParser, file: str, fallback_encoding=py39.LOCALE_ENCODING
-) -> str:
+) -> None:
     """Same idea as :func:`_read_utf8_with_fallback`, but for the
     :meth:`ConfigParser.read` method.
 

From 57a29feea3917cad0fc45e5b6148a70b7aab0f5b Mon Sep 17 00:00:00 2001
From: shenxianpeng 
Date: Fri, 19 Apr 2024 04:58:32 +0000
Subject: [PATCH 0576/1761] Uses RST substitution to put badges in 1 line

---
 README.rst | 26 ++++++++++++++------------
 1 file changed, 14 insertions(+), 12 deletions(-)

diff --git a/README.rst b/README.rst
index eec6e35531..181c3b2af6 100644
--- a/README.rst
+++ b/README.rst
@@ -1,32 +1,34 @@
-.. image:: https://img.shields.io/pypi/v/setuptools.svg
+.. |pypi-version| image:: https://img.shields.io/pypi/v/setuptools.svg
    :target: https://pypi.org/project/setuptools
 
-.. image:: https://img.shields.io/pypi/pyversions/setuptools.svg
+.. |py-version| image:: https://img.shields.io/pypi/pyversions/setuptools.svg
 
-.. image:: https://github.com/pypa/setuptools/actions/workflows/main.yml/badge.svg
+.. |test-badge| image:: https://github.com/pypa/setuptools/actions/workflows/main.yml/badge.svg
    :target: https://github.com/pypa/setuptools/actions?query=workflow%3A%22tests%22
    :alt: tests
 
-.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
-    :target: https://github.com/astral-sh/ruff
-    :alt: Ruff
+.. |ruff-badge| image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
+   :target: https://github.com/astral-sh/ruff
+   :alt: Ruff
 
-.. image:: https://img.shields.io/readthedocs/setuptools/latest.svg
-    :target: https://setuptools.pypa.io
+.. |docs-badge| image:: https://img.shields.io/readthedocs/setuptools/latest.svg
+   :target: https://setuptools.pypa.io
 
-.. image:: https://img.shields.io/badge/skeleton-2024-informational
+.. |skeleton-badge| image:: https://img.shields.io/badge/skeleton-2024-informational
    :target: https://blog.jaraco.com/skeleton
 
-.. image:: https://img.shields.io/codecov/c/github/pypa/setuptools/master.svg?logo=codecov&logoColor=white
+.. |codecov-badge| image:: https://img.shields.io/codecov/c/github/pypa/setuptools/master.svg?logo=codecov&logoColor=white
    :target: https://codecov.io/gh/pypa/setuptools
 
-.. image:: https://tidelift.com/badges/github/pypa/setuptools?style=flat
+.. |tidelift-badge| image:: https://tidelift.com/badges/github/pypa/setuptools?style=flat
    :target: https://tidelift.com/subscription/pkg/pypi-setuptools?utm_source=pypi-setuptools&utm_medium=readme
 
-.. image:: https://img.shields.io/discord/803025117553754132
+.. |discord-badge| image:: https://img.shields.io/discord/803025117553754132
    :target: https://discord.com/channels/803025117553754132/815945031150993468
    :alt: Discord
 
+|pypi-version| |py-version| |test-badge| |ruff-badge| |docs-badge| |skeleton-badge| |codecov-badge| |discord-badge|
+
 See the `Quickstart `_
 and the `User's Guide `_ for
 instructions on how to use Setuptools.

From d16f1921723de09bcb61e814c63c78f23631f4ef Mon Sep 17 00:00:00 2001
From: shenxianpeng 
Date: Fri, 19 Apr 2024 05:04:39 +0000
Subject: [PATCH 0577/1761] Added a news fragment

---
 newsfragments/4312.doc.rst | 1 +
 1 file changed, 1 insertion(+)
 create mode 100644 newsfragments/4312.doc.rst

diff --git a/newsfragments/4312.doc.rst b/newsfragments/4312.doc.rst
new file mode 100644
index 0000000000..7ada954876
--- /dev/null
+++ b/newsfragments/4312.doc.rst
@@ -0,0 +1 @@
+Uses RST substitution to put badges in 1 line.

From a14e36a670ec76fc631eba04f3bbde3b57ef5547 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 22 Apr 2024 13:01:07 +0100
Subject: [PATCH 0578/1761] Add cog annotations to extern/__init__.py for
 future checks

---
 pkg_resources/extern/__init__.py | 7 +++++++
 setuptools/extern/__init__.py    | 7 +++++++
 2 files changed, 14 insertions(+)

diff --git a/pkg_resources/extern/__init__.py b/pkg_resources/extern/__init__.py
index df96f7f26d..12a8fccda1 100644
--- a/pkg_resources/extern/__init__.py
+++ b/pkg_resources/extern/__init__.py
@@ -70,6 +70,12 @@ def install(self):
             sys.meta_path.append(self)
 
 
+# [[[cog
+# import cog
+# from tools.vendored import yield_root_package
+# names = "\n".join(f"    {x!r}," for x in yield_root_package('pkg_resources'))
+# cog.outl(f"names = (\n{names}\n)")
+# ]]]
 names = (
     'packaging',
     'platformdirs',
@@ -78,4 +84,5 @@ def install(self):
     'more_itertools',
     'backports',
 )
+# [[[end]]]
 VendorImporter(__name__, names).install()
diff --git a/setuptools/extern/__init__.py b/setuptools/extern/__init__.py
index 427b27cb80..66e216f9b4 100644
--- a/setuptools/extern/__init__.py
+++ b/setuptools/extern/__init__.py
@@ -70,6 +70,12 @@ def install(self):
             sys.meta_path.append(self)
 
 
+# [[[cog
+# import cog
+# from tools.vendored import yield_root_package
+# names = "\n".join(f"    {x!r}," for x in yield_root_package('setuptools'))
+# cog.outl(f"names = (\n{names}\n)")
+# ]]]
 names = (
     'packaging',
     'ordered_set',
@@ -82,4 +88,5 @@ def install(self):
     'tomli',
     'backports',
 )
+# [[[end]]]
 VendorImporter(__name__, names, 'setuptools._vendor').install()

From a19973b25e46219865310402ee17494ff02a0809 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 22 Apr 2024 13:25:43 +0100
Subject: [PATCH 0579/1761] Add a function to tools/vendored to list root
 packages

---
 tools/vendored.py | 13 +++++++++++++
 1 file changed, 13 insertions(+)

diff --git a/tools/vendored.py b/tools/vendored.py
index 232e9625d2..685b084134 100644
--- a/tools/vendored.py
+++ b/tools/vendored.py
@@ -166,4 +166,17 @@ def update_setuptools():
     rewrite_more_itertools(vendor / "more_itertools")
 
 
+def yield_root_package(name):
+    """Useful when defining the MetaPathFinder
+    >>> set(yield_root_package("setuptools")) & {"jaraco", "backports"}
+    {'jaraco', 'backports'}
+    """
+    vendored = Path(f"{name}/_vendor/vendored.txt")
+    yield from (
+        line.partition("=")[0].partition(".")[0].replace("-", "_")
+        for line in vendored.read_text(encoding="utf-8").splitlines()
+        if line and not line.startswith("#")
+    )
+
+
 __name__ == '__main__' and update_vendored()

From bf573220152f47d0b90ef6f6e2d90890fb41f564 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 22 Apr 2024 13:28:47 +0100
Subject: [PATCH 0580/1761] Update tox testenv 'vendor' to use cog to
 automatically update/check *.extern

---
 tox.ini | 8 ++++++--
 1 file changed, 6 insertions(+), 2 deletions(-)

diff --git a/tox.ini b/tox.ini
index 7412730008..22dd7af8da 100644
--- a/tox.ini
+++ b/tox.ini
@@ -69,12 +69,16 @@ pass_env = *
 commands =
 	python tools/finalize.py
 
-[testenv:vendor]
+[testenv:{vendor,check-extern}]
 skip_install = True
+allowlist_externals = sh
 deps =
 	path
+	cogapp
 commands =
-	python -m tools.vendored
+	vendor: python -m tools.vendored
+	vendor: sh -c "git grep -l -F '\[\[\[cog' | xargs cog -I {toxinidir} -r"  # update `*.extern`
+	check-extern: sh -c "git grep -l -F '\[\[\[cog' | xargs cog -I {toxinidir} --check"
 
 [testenv:generate-validation-code]
 skip_install = True

From 536ff950a05f7adfd5626d1cf3cdf7bffb3f887e Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 22 Apr 2024 13:39:38 +0100
Subject: [PATCH 0581/1761] Sync */vendored.txt and *.extern.py

---
 pkg_resources/_vendor/vendored.txt | 2 ++
 pkg_resources/extern/__init__.py   | 2 ++
 setuptools/extern/__init__.py      | 6 +++---
 3 files changed, 7 insertions(+), 3 deletions(-)

diff --git a/pkg_resources/_vendor/vendored.txt b/pkg_resources/_vendor/vendored.txt
index c18a2cc0eb..967a2841f3 100644
--- a/pkg_resources/_vendor/vendored.txt
+++ b/pkg_resources/_vendor/vendored.txt
@@ -9,5 +9,7 @@ jaraco.text==3.7.0
 importlib_resources==5.10.2
 # required for importlib_resources on older Pythons
 zipp==3.7.0
+# required for jaraco.functools
+more_itertools==8.8.0
 # required for jaraco.context on older Pythons
 backports.tarfile
diff --git a/pkg_resources/extern/__init__.py b/pkg_resources/extern/__init__.py
index 12a8fccda1..7f80b04164 100644
--- a/pkg_resources/extern/__init__.py
+++ b/pkg_resources/extern/__init__.py
@@ -79,8 +79,10 @@ def install(self):
 names = (
     'packaging',
     'platformdirs',
+    'typing_extensions',
     'jaraco',
     'importlib_resources',
+    'zipp',
     'more_itertools',
     'backports',
 )
diff --git a/setuptools/extern/__init__.py b/setuptools/extern/__init__.py
index 66e216f9b4..16e2c9ea9e 100644
--- a/setuptools/extern/__init__.py
+++ b/setuptools/extern/__init__.py
@@ -80,11 +80,11 @@ def install(self):
     'packaging',
     'ordered_set',
     'more_itertools',
-    'importlib_metadata',
-    'zipp',
-    'importlib_resources',
     'jaraco',
+    'importlib_resources',
+    'importlib_metadata',
     'typing_extensions',
+    'zipp',
     'tomli',
     'backports',
 )

From 35bb574c99b1730d4284b37bd7770c2bf9832e82 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 22 Apr 2024 13:49:20 +0100
Subject: [PATCH 0582/1761] Add 'check-extern' as collateral to github actions
 workflow

---
 .github/workflows/main.yml | 1 +
 1 file changed, 1 insertion(+)

diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index 3f3b53aa07..6ec4f83be5 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -122,6 +122,7 @@ jobs:
         job:
         - diffcov
         - docs
+        - check-extern
     runs-on: ubuntu-latest
     steps:
       - uses: actions/checkout@v4

From 86770badfb4290cfbdce19880f5590fb33390896 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 22 Apr 2024 13:58:58 +0100
Subject: [PATCH 0583/1761] Match version of more_itertools that is already
 installed in pkg_resources with vendored.txt

---
 pkg_resources/_vendor/vendored.txt | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/pkg_resources/_vendor/vendored.txt b/pkg_resources/_vendor/vendored.txt
index 967a2841f3..f8cbfd967e 100644
--- a/pkg_resources/_vendor/vendored.txt
+++ b/pkg_resources/_vendor/vendored.txt
@@ -10,6 +10,6 @@ importlib_resources==5.10.2
 # required for importlib_resources on older Pythons
 zipp==3.7.0
 # required for jaraco.functools
-more_itertools==8.8.0
+more_itertools==10.2.0
 # required for jaraco.context on older Pythons
 backports.tarfile

From 175787e1066c865dcaf18e66563a00233eab2c9d Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 22 Apr 2024 14:25:11 +0100
Subject: [PATCH 0584/1761] Improve determinism in doctest for tools/vendored

---
 tools/vendored.py | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)

diff --git a/tools/vendored.py b/tools/vendored.py
index 685b084134..63797ea24a 100644
--- a/tools/vendored.py
+++ b/tools/vendored.py
@@ -168,8 +168,9 @@ def update_setuptools():
 
 def yield_root_package(name):
     """Useful when defining the MetaPathFinder
-    >>> set(yield_root_package("setuptools")) & {"jaraco", "backports"}
-    {'jaraco', 'backports'}
+    >>> examples = set(yield_root_package("setuptools")) & {"jaraco", "backports"}
+    >>> list(sorted(examples))
+    ['backports', 'jaraco']
     """
     vendored = Path(f"{name}/_vendor/vendored.txt")
     yield from (

From 744cf2a2befb6a616657c105e5c9be9f3f921224 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Wed, 24 Apr 2024 10:48:06 -0400
Subject: [PATCH 0585/1761] Allow macos on Python 3.8 to fail as GitHub CI has
 dropped support.

Closes jaraco/skeleton#124.
---
 .github/workflows/main.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index ac0ff69e22..5ace4c504f 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -50,7 +50,7 @@ jobs:
         - python: pypy3.10
           platform: ubuntu-latest
     runs-on: ${{ matrix.platform }}
-    continue-on-error: ${{ matrix.python == '3.13' }}
+    continue-on-error: ${{ matrix.python == '3.13' || (matrix.python == '3.8' || matrix.python == '3.9') && matrix.platform == 'macos-latest' }}
     steps:
       - uses: actions/checkout@v4
       - name: Setup Python

From bcf8f079eb729e7bcd50c10cf4da522620b00635 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Wed, 24 Apr 2024 11:06:12 -0400
Subject: [PATCH 0586/1761] Move project.urls to appear in the order that
 ini2toml generates it. Remove project.scripts.

---
 pyproject.toml | 8 +++-----
 1 file changed, 3 insertions(+), 5 deletions(-)

diff --git a/pyproject.toml b/pyproject.toml
index 869fe7e5ba..04b14cbc79 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -21,6 +21,9 @@ dependencies = [
 ]
 dynamic = ["version"]
 
+[project.urls]
+Homepage = "https://github.com/PROJECT_PATH"
+
 [project.optional-dependencies]
 testing = [
 	# upstream
@@ -44,9 +47,4 @@ docs = [
 	# local
 ]
 
-[project.urls]
-Homepage = "https://github.com/PROJECT_PATH"
-
-[project.scripts]
-
 [tool.setuptools_scm]

From a30589bf3ce8b94b1b8abbb14e8a470778680950 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 25 Apr 2024 12:48:42 +0100
Subject: [PATCH 0587/1761] Avoid errors on Python 3.8 macos-latest as GitHub
 CI has dropped support

---
 .github/workflows/main.yml | 6 ++++++
 1 file changed, 6 insertions(+)

diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index 3f3b53aa07..d08b857eca 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -61,6 +61,12 @@ jobs:
         - platform: ubuntu-latest
           python: "3.10"
           distutils: stdlib
+        # Python 3.9 is on macos-13 but not macos-latest (macos-14-arm64)
+        # https://github.com/actions/setup-python/issues/850
+        # https://github.com/actions/setup-python/issues/696#issuecomment-1637587760
+        - {python: "3.8", platform: "macos-13"}
+        exclude:
+        - {python: "3.8", platform: "macos-latest"}
     runs-on: ${{ matrix.platform }}
     continue-on-error: ${{ matrix.python == '3.13' }}
     env:

From 37f20201aec28a61f011a6248f042d174292976d Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 25 Apr 2024 13:02:57 +0100
Subject: [PATCH 0588/1761] Add review suggestions.

---
 .github/workflows/main.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index d08b857eca..82757f478c 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -61,7 +61,7 @@ jobs:
         - platform: ubuntu-latest
           python: "3.10"
           distutils: stdlib
-        # Python 3.9 is on macos-13 but not macos-latest (macos-14-arm64)
+        # Python 3.8, 3.9 are on macos-13 but not macos-latest (macos-14-arm64)
         # https://github.com/actions/setup-python/issues/850
         # https://github.com/actions/setup-python/issues/696#issuecomment-1637587760
         - {python: "3.8", platform: "macos-13"}

From b4cfab303aa1f3c2b52ff62da479f321f0681a08 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 25 Apr 2024 13:34:38 +0100
Subject: [PATCH 0589/1761] Mark unstable tests on macOS

---
 setuptools/tests/test_editable_install.py | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/setuptools/tests/test_editable_install.py b/setuptools/tests/test_editable_install.py
index 119b128694..91b65e5a38 100644
--- a/setuptools/tests/test_editable_install.py
+++ b/setuptools/tests/test_editable_install.py
@@ -118,6 +118,7 @@ def editable_opts(request):
 SETUP_SCRIPT_STUB = "__import__('setuptools').setup()"
 
 
+@pytest.mark.xfail(sys.platform == "darwin", reason="Test is unstable on macOS?")
 @pytest.mark.parametrize(
     "files",
     [
@@ -897,6 +898,7 @@ class TestOverallBehaviour:
         },
     }
 
+    @pytest.mark.xfail(sys.platform == "darwin", reason="Test is unstable on macOS?")
     @pytest.mark.parametrize("layout", EXAMPLES.keys())
     def test_editable_install(self, tmp_path, venv, layout, editable_opts):
         project, _ = install_project(

From f3f5bf7869e87d80eb9457c2e0537f82af255500 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 25 Apr 2024 13:44:51 +0100
Subject: [PATCH 0590/1761] Add proper xfail mark

---
 setuptools/tests/test_editable_install.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/setuptools/tests/test_editable_install.py b/setuptools/tests/test_editable_install.py
index 91b65e5a38..300a02cfb9 100644
--- a/setuptools/tests/test_editable_install.py
+++ b/setuptools/tests/test_editable_install.py
@@ -118,7 +118,7 @@ def editable_opts(request):
 SETUP_SCRIPT_STUB = "__import__('setuptools').setup()"
 
 
-@pytest.mark.xfail(sys.platform == "darwin", reason="Test is unstable on macOS?")
+@pytest.mark.xfail(sys.platform == "darwin", reason="pypa/setuptools#4328")
 @pytest.mark.parametrize(
     "files",
     [
@@ -898,7 +898,7 @@ class TestOverallBehaviour:
         },
     }
 
-    @pytest.mark.xfail(sys.platform == "darwin", reason="Test is unstable on macOS?")
+    @pytest.mark.xfail(sys.platform == "darwin", reason="pypa/setuptools#4328")
     @pytest.mark.parametrize("layout", EXAMPLES.keys())
     def test_editable_install(self, tmp_path, venv, layout, editable_opts):
         project, _ = install_project(

From 80d101eea16a6fc72759e193882ef60451f51d98 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Fri, 8 Mar 2024 17:21:56 -0500
Subject: [PATCH 0591/1761] Update `pytest.ini` for `EncodingWarning` from
 external libraries + avoid getpreferredencoding when possible

---
 pytest.ini                           | 27 ++++++++++++++-------------
 setuptools/command/editable_wheel.py |  3 ++-
 setuptools/tests/__init__.py         |  9 +++++++--
 3 files changed, 23 insertions(+), 16 deletions(-)

diff --git a/pytest.ini b/pytest.ini
index e7c96274a3..40a64b5cd4 100644
--- a/pytest.ini
+++ b/pytest.ini
@@ -10,6 +10,18 @@ filterwarnings=
 	# Fail on warnings
 	error
 
+	# Workarounds for pypa/setuptools#3810
+	# Can't use EncodingWarning as it doesn't exist on Python 3.9.
+	# These warnings only appear on Python 3.10+
+	default:'encoding' argument not specified
+
+	# pypa/distutils#236
+	ignore:'encoding' argument not specified::distutils
+	ignore:'encoding' argument not specified::setuptools._distutils
+
+	# subprocess.check_output still warns with EncodingWarning even with encoding set
+	ignore:'encoding' argument not specified::setuptools.tests.environment
+
 	## upstream
 
 	# Ensure ResourceWarnings are emitted
@@ -18,14 +30,8 @@ filterwarnings=
 	# realpython/pytest-mypy#152
 	ignore:'encoding' argument not specified::pytest_mypy
 
-	# python/cpython#100750
-	ignore:'encoding' argument not specified::platform
-
-	# pypa/build#615
-	ignore:'encoding' argument not specified::build.env
-
-	# dateutil/dateutil#1284
-	ignore:datetime.datetime.utcfromtimestamp:DeprecationWarning:dateutil.tz.tz
+	# pytest-dev/pytest # TODO: Raise issue upstream 
+	ignore:'encoding' argument not specified::_pytest
 
 	## end upstream
 
@@ -69,11 +75,6 @@ filterwarnings=
 	# https://github.com/pypa/setuptools/issues/3655
 	ignore:The --rsyncdir command line argument and rsyncdirs config variable are deprecated.:DeprecationWarning
 
-	# Workarounds for pypa/setuptools#3810
-	# Can't use EncodingWarning as it doesn't exist on Python 3.9
-	default:'encoding' argument not specified
-	default:UTF-8 Mode affects locale.getpreferredencoding().
-
 	# Avoid errors when testing pkg_resources.declare_namespace
 	ignore:.*pkg_resources\.declare_namespace.*:DeprecationWarning
 
diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py
index 1722817f82..b8ed84750a 100644
--- a/setuptools/command/editable_wheel.py
+++ b/setuptools/command/editable_wheel.py
@@ -565,7 +565,8 @@ def _encode_pth(content: str) -> bytes:
     This function tries to simulate this behaviour without having to create an
     actual file, in a way that supports a range of active Python versions.
     (There seems to be some variety in the way different version of Python handle
-    ``encoding=None``, not all of them use ``locale.getpreferredencoding(False)``).
+    ``encoding=None``, not all of them use ``locale.getpreferredencoding(False)``
+    or ``locale.getencoding()``).
     """
     with io.BytesIO() as buffer:
         wrapper = io.TextIOWrapper(buffer, encoding=py39.LOCALE_ENCODING)
diff --git a/setuptools/tests/__init__.py b/setuptools/tests/__init__.py
index 564adf2b0a..738ebf43be 100644
--- a/setuptools/tests/__init__.py
+++ b/setuptools/tests/__init__.py
@@ -1,10 +1,15 @@
 import locale
+import sys
 
 import pytest
 
 
 __all__ = ['fail_on_ascii']
 
-
-is_ascii = locale.getpreferredencoding() == 'ANSI_X3.4-1968'
+locale_encoding = (
+    locale.getencoding()
+    if sys.version_info >= (3, 11)
+    else locale.getpreferredencoding(False)
+)
+is_ascii = locale_encoding == 'ANSI_X3.4-1968'
 fail_on_ascii = pytest.mark.xfail(is_ascii, reason="Test fails in this locale")

From 9acea31d0e8c3f94db7dca3fedaa256d0f8a2250 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Sun, 21 Apr 2024 16:18:38 -0400
Subject: [PATCH 0592/1761] Remove distutils EncodingWarning exclusion in
 pytest.ini Vendored distutils was updated with fixes

---
 pytest.ini | 4 ----
 1 file changed, 4 deletions(-)

diff --git a/pytest.ini b/pytest.ini
index 40a64b5cd4..2ce6e3e1e7 100644
--- a/pytest.ini
+++ b/pytest.ini
@@ -15,10 +15,6 @@ filterwarnings=
 	# These warnings only appear on Python 3.10+
 	default:'encoding' argument not specified
 
-	# pypa/distutils#236
-	ignore:'encoding' argument not specified::distutils
-	ignore:'encoding' argument not specified::setuptools._distutils
-
 	# subprocess.check_output still warns with EncodingWarning even with encoding set
 	ignore:'encoding' argument not specified::setuptools.tests.environment
 

From 27f5e0ae4ba6fae8a30bc7b7aa674f8be2afc22f Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 22 Apr 2024 16:34:43 +0100
Subject: [PATCH 0593/1761] Ignore encoding warnings bu only in in stdlib's
 distutils

---
 pytest.ini | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/pytest.ini b/pytest.ini
index 2ce6e3e1e7..648b145b69 100644
--- a/pytest.ini
+++ b/pytest.ini
@@ -29,6 +29,9 @@ filterwarnings=
 	# pytest-dev/pytest # TODO: Raise issue upstream 
 	ignore:'encoding' argument not specified::_pytest
 
+	# Already fixed in pypa/distutils, but present in stdlib
+	ignore:'encoding' argument not specified::distutils
+
 	## end upstream
 
 	# https://github.com/pypa/setuptools/issues/1823

From ef7d2590ec6b4e4a410b7e4f983386ae07f13f64 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 22 Apr 2024 14:36:08 +0100
Subject: [PATCH 0594/1761] Remove EncodingWarning workarounds for setuptools
 from pytest.ini

---
 pytest.ini | 8 --------
 1 file changed, 8 deletions(-)

diff --git a/pytest.ini b/pytest.ini
index 648b145b69..1b565222e2 100644
--- a/pytest.ini
+++ b/pytest.ini
@@ -10,14 +10,6 @@ filterwarnings=
 	# Fail on warnings
 	error
 
-	# Workarounds for pypa/setuptools#3810
-	# Can't use EncodingWarning as it doesn't exist on Python 3.9.
-	# These warnings only appear on Python 3.10+
-	default:'encoding' argument not specified
-
-	# subprocess.check_output still warns with EncodingWarning even with encoding set
-	ignore:'encoding' argument not specified::setuptools.tests.environment
-
 	## upstream
 
 	# Ensure ResourceWarnings are emitted

From 4fc0b15d424ae97663d48a34ee7bd586b3aade69 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 22 Apr 2024 15:56:28 +0100
Subject: [PATCH 0595/1761] Fix EncodingWarning in test_build_meta

---
 setuptools/tests/test_build_meta.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setuptools/tests/test_build_meta.py b/setuptools/tests/test_build_meta.py
index 43830feb77..cc996b4255 100644
--- a/setuptools/tests/test_build_meta.py
+++ b/setuptools/tests/test_build_meta.py
@@ -160,7 +160,7 @@ def run():
             # to obtain a distribution object first, and then run the distutils
             # commands later, because these files will be removed in the meantime.
 
-            with open('world.py', 'w') as f:
+            with open('world.py', 'w', encoding="utf-8") as f:
                 f.write('x = 42')
 
             try:

From d7ac06f0d5892183c9ac9ce5501d785348b4bbde Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 22 Apr 2024 21:48:32 +0100
Subject: [PATCH 0596/1761] Return comment to pytest.ini that got lost in
 changes

---
 pytest.ini | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/pytest.ini b/pytest.ini
index 1b565222e2..2aceea2d58 100644
--- a/pytest.ini
+++ b/pytest.ini
@@ -10,6 +10,9 @@ filterwarnings=
 	# Fail on warnings
 	error
 
+	# Workarounds for pypa/setuptools#3810
+	# Can't use EncodingWarning as it doesn't exist on Python 3.9.
+	# These warnings only appear on Python 3.10+
 	## upstream
 
 	# Ensure ResourceWarnings are emitted

From 969f00b16d190567ce27d9788e90ab4806a32443 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 22 Apr 2024 22:54:45 +0100
Subject: [PATCH 0597/1761] Re-enable warning filter for distutils.text_file
 inside test_excluded_subpackages

---
 setuptools/tests/test_build_py.py | 13 +++++++++++++
 1 file changed, 13 insertions(+)

diff --git a/setuptools/tests/test_build_py.py b/setuptools/tests/test_build_py.py
index 4aa1fe68fa..db2052a586 100644
--- a/setuptools/tests/test_build_py.py
+++ b/setuptools/tests/test_build_py.py
@@ -1,6 +1,7 @@
 import os
 import stat
 import shutil
+import warnings
 from pathlib import Path
 from unittest.mock import Mock
 
@@ -162,11 +163,23 @@ def test_excluded_subpackages(tmpdir_cwd):
     dist.parse_config_files()
 
     build_py = dist.get_command_obj("build_py")
+
     msg = r"Python recognizes 'mypkg\.tests' as an importable package"
     with pytest.warns(SetuptoolsDeprecationWarning, match=msg):
         # TODO: To fix #3260 we need some transition period to deprecate the
         # existing behavior of `include_package_data`. After the transition, we
         # should remove the warning and fix the behaviour.
+
+        if os.getenv("SETUPTOOLS_USE_DISTUTILS") == "stdlib":
+            # pytest.warns reset the warning filter temporarily
+            # https://github.com/pytest-dev/pytest/issues/4011#issuecomment-423494810
+            warnings.filterwarnings(
+                "ignore",
+                "'encoding' argument not specified",
+                module="distutils.text_file",
+                # This warning is already fixed in pypa/distutils but not in stdlib
+            )
+
         build_py.finalize_options()
         build_py.run()
 

From 919e3934c9e7a8085bd4ee72d3259be76fe0186b Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 23 Apr 2024 00:02:06 +0100
Subject: [PATCH 0598/1761] Attempt to fix errors in mypy for PyPy (test of
 hypothesis)

---
 pytest.ini | 6 ++++++
 1 file changed, 6 insertions(+)

diff --git a/pytest.ini b/pytest.ini
index 2aceea2d58..4bebae0fd9 100644
--- a/pytest.ini
+++ b/pytest.ini
@@ -18,6 +18,12 @@ filterwarnings=
 	# Ensure ResourceWarnings are emitted
 	default::ResourceWarning
 
+	# python/mypy#17057
+	ignore:'encoding' argument not specified::mypy.config_parser
+	ignore:'encoding' argument not specified::mypy.build
+	ignore:'encoding' argument not specified::mypy.modulefinder
+	ignore:'encoding' argument not specified::mypy.metastore
+
 	# realpython/pytest-mypy#152
 	ignore:'encoding' argument not specified::pytest_mypy
 

From 57ea91b448ca3852c33a2b164b5d6bdc3551a3a6 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 24 Apr 2024 16:34:19 +0100
Subject: [PATCH 0599/1761] Attempt to solve the problem in PyPy

---
 pytest.ini | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/pytest.ini b/pytest.ini
index 4bebae0fd9..8ab6d5ebf1 100644
--- a/pytest.ini
+++ b/pytest.ini
@@ -19,10 +19,10 @@ filterwarnings=
 	default::ResourceWarning
 
 	# python/mypy#17057
-	ignore:'encoding' argument not specified::mypy.config_parser
-	ignore:'encoding' argument not specified::mypy.build
-	ignore:'encoding' argument not specified::mypy.modulefinder
-	ignore:'encoding' argument not specified::mypy.metastore
+	ignore:'encoding' argument not specified::mypy
+	ignore:'encoding' argument not specified::configparser
+	# ^-- ConfigParser is called by mypy,
+	#     but ignoring the warning in `mypy` is not enough on PyPy
 
 	# realpython/pytest-mypy#152
 	ignore:'encoding' argument not specified::pytest_mypy

From 1316a611f2faf5828f3d10a1bb4df7a2475ebeef Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 24 Apr 2024 16:35:03 +0100
Subject: [PATCH 0600/1761] Better wording for comment in pytest.ini

---
 pytest.ini | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/pytest.ini b/pytest.ini
index 8ab6d5ebf1..000e663471 100644
--- a/pytest.ini
+++ b/pytest.ini
@@ -22,7 +22,8 @@ filterwarnings=
 	ignore:'encoding' argument not specified::mypy
 	ignore:'encoding' argument not specified::configparser
 	# ^-- ConfigParser is called by mypy,
-	#     but ignoring the warning in `mypy` is not enough on PyPy
+	#     but ignoring the warning in `mypy` is not enough
+	#     to make it work on PyPy
 
 	# realpython/pytest-mypy#152
 	ignore:'encoding' argument not specified::pytest_mypy

From 8dc50ccc2fe55a3c6b2c99f5c95597242c61572d Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Wed, 24 Apr 2024 14:02:04 -0400
Subject: [PATCH 0601/1761] Add newsfragment

---
 newsfragments/4255.misc.rst | 1 +
 1 file changed, 1 insertion(+)
 create mode 100644 newsfragments/4255.misc.rst

diff --git a/newsfragments/4255.misc.rst b/newsfragments/4255.misc.rst
new file mode 100644
index 0000000000..1f9fde768b
--- /dev/null
+++ b/newsfragments/4255.misc.rst
@@ -0,0 +1 @@
+Treat `EncodingWarning`s as an errors in tests. -- by :user:`Avasam`

From b69c0de234a9b648828e5f3a180639f5ea0e24fa Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Wed, 24 Apr 2024 14:05:12 -0400
Subject: [PATCH 0602/1761] Update comment

---
 pytest.ini | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/pytest.ini b/pytest.ini
index 000e663471..87e3d9aae3 100644
--- a/pytest.ini
+++ b/pytest.ini
@@ -28,7 +28,7 @@ filterwarnings=
 	# realpython/pytest-mypy#152
 	ignore:'encoding' argument not specified::pytest_mypy
 
-	# pytest-dev/pytest # TODO: Raise issue upstream 
+	# TODO: Set encoding when openning tmpdir files with pytest's LocalPath.open
 	ignore:'encoding' argument not specified::_pytest
 
 	# Already fixed in pypa/distutils, but present in stdlib

From b341011f3b8be7e10d933831750aed48b381f382 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Wed, 24 Apr 2024 14:38:00 -0400
Subject: [PATCH 0603/1761] Fix Windows issue

---
 setuptools/tests/test_windows_wrappers.py | 14 ++++++++++++--
 1 file changed, 12 insertions(+), 2 deletions(-)

diff --git a/setuptools/tests/test_windows_wrappers.py b/setuptools/tests/test_windows_wrappers.py
index 3f321386f1..b272689351 100644
--- a/setuptools/tests/test_windows_wrappers.py
+++ b/setuptools/tests/test_windows_wrappers.py
@@ -110,7 +110,11 @@ def test_basic(self, tmpdir):
             'arg5 a\\\\b',
         ]
         proc = subprocess.Popen(
-            cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, text=True
+            cmd,
+            stdout=subprocess.PIPE,
+            stdin=subprocess.PIPE,
+            text=True,
+            encoding="utf-8",
         )
         stdout, stderr = proc.communicate('hello\nworld\n')
         actual = stdout.replace('\r\n', '\n')
@@ -143,7 +147,11 @@ def test_symlink(self, tmpdir):
             'arg5 a\\\\b',
         ]
         proc = subprocess.Popen(
-            cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, text=True
+            cmd,
+            stdout=subprocess.PIPE,
+            stdin=subprocess.PIPE,
+            text=True,
+            encoding="utf-8",
         )
         stdout, stderr = proc.communicate('hello\nworld\n')
         actual = stdout.replace('\r\n', '\n')
@@ -191,6 +199,7 @@ def test_with_options(self, tmpdir):
             stdin=subprocess.PIPE,
             stderr=subprocess.STDOUT,
             text=True,
+            encoding="utf-8",
         )
         stdout, stderr = proc.communicate()
         actual = stdout.replace('\r\n', '\n')
@@ -240,6 +249,7 @@ def test_basic(self, tmpdir):
             stdin=subprocess.PIPE,
             stderr=subprocess.STDOUT,
             text=True,
+            encoding="utf-8",
         )
         stdout, stderr = proc.communicate()
         assert not stdout

From 22ca7e5ba90cce639e241428226279b0c7be2242 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 25 Apr 2024 12:29:09 +0100
Subject: [PATCH 0604/1761] Update comment in pytest.ini

---
 pytest.ini | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)

diff --git a/pytest.ini b/pytest.ini
index 87e3d9aae3..0c9651d96f 100644
--- a/pytest.ini
+++ b/pytest.ini
@@ -13,6 +13,7 @@ filterwarnings=
 	# Workarounds for pypa/setuptools#3810
 	# Can't use EncodingWarning as it doesn't exist on Python 3.9.
 	# These warnings only appear on Python 3.10+
+
 	## upstream
 
 	# Ensure ResourceWarnings are emitted
@@ -28,7 +29,8 @@ filterwarnings=
 	# realpython/pytest-mypy#152
 	ignore:'encoding' argument not specified::pytest_mypy
 
-	# TODO: Set encoding when openning tmpdir files with pytest's LocalPath.open
+	# TODO: Set encoding when openning/writing tmpdir files with pytest's LocalPath.open
+	# see pypa/setuptools#4326
 	ignore:'encoding' argument not specified::_pytest
 
 	# Already fixed in pypa/distutils, but present in stdlib

From 3ea4aa933ba140cb1c19ce44dfef4564563a79ac Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 25 Apr 2024 14:40:49 +0100
Subject: [PATCH 0605/1761] Improve RST syntax on news fragment.

---
 newsfragments/4255.misc.rst | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/newsfragments/4255.misc.rst b/newsfragments/4255.misc.rst
index 1f9fde768b..e5e5728d70 100644
--- a/newsfragments/4255.misc.rst
+++ b/newsfragments/4255.misc.rst
@@ -1 +1 @@
-Treat `EncodingWarning`s as an errors in tests. -- by :user:`Avasam`
+Treat ``EncodingWarning``s as an errors in tests. -- by :user:`Avasam`

From 0faba5086564eab2feb2fa94f2e4592c50589952 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Thu, 25 Apr 2024 10:05:44 -0400
Subject: [PATCH 0606/1761] Fix typo

---
 newsfragments/4255.misc.rst | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/newsfragments/4255.misc.rst b/newsfragments/4255.misc.rst
index e5e5728d70..50a0a3d195 100644
--- a/newsfragments/4255.misc.rst
+++ b/newsfragments/4255.misc.rst
@@ -1 +1 @@
-Treat ``EncodingWarning``s as an errors in tests. -- by :user:`Avasam`
+Treat ``EncodingWarning``s as errors in tests. -- by :user:`Avasam`

From 7b17049aabb7b493ad106fcefe00856607b6f181 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 29 Apr 2024 15:24:49 -0400
Subject: [PATCH 0607/1761] Pin against pyproject-hooks==1.1. Closes #4333.

---
 .github/workflows/main.yml | 3 ++-
 setup.cfg                  | 3 +++
 2 files changed, 5 insertions(+), 1 deletion(-)

diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index c5aed1d1a0..ec2e567a1e 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -95,7 +95,8 @@ jobs:
         shell: bash
         run: |
           rm -rf dist
-          pipx run build
+          # workaround for pypa/setuptools#4333
+          pipx run --pip-args 'pyproject-hooks!=1.1' build
           echo "PRE_BUILT_SETUPTOOLS_SDIST=$(ls dist/*.tar.gz)" >> $GITHUB_ENV
           echo "PRE_BUILT_SETUPTOOLS_WHEEL=$(ls dist/*.whl)" >> $GITHUB_ENV
           rm -rf setuptools.egg-info  # Avoid interfering with the other tests
diff --git a/setup.cfg b/setup.cfg
index c8bb0ed41d..68be6c8e7c 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -77,6 +77,9 @@ testing =
 	# No Python 3.12 dependencies require importlib_metadata, but needed for type-checking since we import it directly
 	importlib_metadata
 
+	# workaround for pypa/setuptools#4333
+	pyproject-hooks!=1.1
+
 docs =
 	# upstream
 	sphinx >= 3.5

From 4a0a9ce587515edce83ab97aa5c7943c045ac180 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 29 Apr 2024 16:13:58 -0400
Subject: [PATCH 0608/1761] Make the test less fragile and search simply for
 the presence of a ValueError in the traceback. Closes #4334.

---
 setuptools/tests/test_egg_info.py | 10 +++-------
 1 file changed, 3 insertions(+), 7 deletions(-)

diff --git a/setuptools/tests/test_egg_info.py b/setuptools/tests/test_egg_info.py
index a4b0ecf398..f6b2302d97 100644
--- a/setuptools/tests/test_egg_info.py
+++ b/setuptools/tests/test_egg_info.py
@@ -213,13 +213,9 @@ def test_license_is_a_string(self, tmpdir_cwd, env):
         with pytest.raises(AssertionError) as exc:
             self._run_egg_info_command(tmpdir_cwd, env)
 
-        # Hopefully this is not too fragile: the only argument to the
-        # assertion error should be a traceback, ending with:
-        #     ValueError: ....
-        #
-        #     assert not 1
-        tb = exc.value.args[0].split('\n')
-        assert tb[-3].lstrip().startswith('ValueError')
+        # The only argument to the assertion error should be a traceback
+        # containing a ValueError
+        assert 'ValueError' in exc.value.args[0]
 
     def test_rebuilt(self, tmpdir_cwd, env):
         """Ensure timestamps are updated when the command is re-run."""

From fe8980b4505cea1982979fdca20c4078ed8fb8c6 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 29 Apr 2024 09:38:31 -0400
Subject: [PATCH 0609/1761] Remove pop_prefix parameter, unused.

---
 setuptools/package_index.py | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/setuptools/package_index.py b/setuptools/package_index.py
index f5a7d77eed..345344c2c2 100644
--- a/setuptools/package_index.py
+++ b/setuptools/package_index.py
@@ -863,7 +863,7 @@ def _download_svn(self, url, _filename):
         raise DistutilsError(f"Invalid config, SVN download is not supported: {url}")
 
     @staticmethod
-    def _vcs_split_rev_from_url(url, pop_prefix=False):
+    def _vcs_split_rev_from_url(url):
         scheme, netloc, path, query, frag = urllib.parse.urlsplit(url)
 
         scheme = scheme.split('+', 1)[-1]
@@ -882,7 +882,7 @@ def _vcs_split_rev_from_url(url, pop_prefix=False):
 
     def _download_git(self, url, filename):
         filename = filename.split('#', 1)[0]
-        url, rev = self._vcs_split_rev_from_url(url, pop_prefix=True)
+        url, rev = self._vcs_split_rev_from_url(url)
 
         self.info("Doing git clone from %s to %s", url, filename)
         os.system("git clone --quiet %s %s" % (url, filename))
@@ -901,7 +901,7 @@ def _download_git(self, url, filename):
 
     def _download_hg(self, url, filename):
         filename = filename.split('#', 1)[0]
-        url, rev = self._vcs_split_rev_from_url(url, pop_prefix=True)
+        url, rev = self._vcs_split_rev_from_url(url)
 
         self.info("Doing hg clone from %s to %s", url, filename)
         os.system("hg clone --quiet %s %s" % (url, filename))

From 35ee2b4abd8bc745766c809d31fc6bf19e6979dc Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 29 Apr 2024 09:49:56 -0400
Subject: [PATCH 0610/1761] Add a test capturing the basic expectation.

---
 setuptools/package_index.py | 9 +++++++++
 1 file changed, 9 insertions(+)

diff --git a/setuptools/package_index.py b/setuptools/package_index.py
index 345344c2c2..d2985cc1f9 100644
--- a/setuptools/package_index.py
+++ b/setuptools/package_index.py
@@ -864,6 +864,15 @@ def _download_svn(self, url, _filename):
 
     @staticmethod
     def _vcs_split_rev_from_url(url):
+        """
+        >>> vsrfu = PackageIndex._vcs_split_rev_from_url
+        >>> vsrfu('git+https://github.com/pypa/setuptools@v69.0.0#egg-info=setuptools')
+        ('https://github.com/pypa/setuptools', 'v69.0.0')
+        >>> vsrfu('git+https://github.com/pypa/setuptools#egg-info=setuptools')
+        ('https://github.com/pypa/setuptools', None)
+        >>> vsrfu('http://foo/bar')
+        ('http://foo/bar', None)
+        """
         scheme, netloc, path, query, frag = urllib.parse.urlsplit(url)
 
         scheme = scheme.split('+', 1)[-1]

From eb42e5c45b5888863aa1877517f6dbf6f7b080cc Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 29 Apr 2024 10:15:31 -0400
Subject: [PATCH 0611/1761] Update _vcs_split_rev_from_url to use modern
 constructs.

---
 setuptools/package_index.py | 23 ++++++++++++++---------
 1 file changed, 14 insertions(+), 9 deletions(-)

diff --git a/setuptools/package_index.py b/setuptools/package_index.py
index d2985cc1f9..9da138d87c 100644
--- a/setuptools/package_index.py
+++ b/setuptools/package_index.py
@@ -865,6 +865,8 @@ def _download_svn(self, url, _filename):
     @staticmethod
     def _vcs_split_rev_from_url(url):
         """
+        Given a possible VCS URL, return a clean URL and resolved revision if any.
+
         >>> vsrfu = PackageIndex._vcs_split_rev_from_url
         >>> vsrfu('git+https://github.com/pypa/setuptools@v69.0.0#egg-info=setuptools')
         ('https://github.com/pypa/setuptools', 'v69.0.0')
@@ -873,21 +875,24 @@ def _vcs_split_rev_from_url(url):
         >>> vsrfu('http://foo/bar')
         ('http://foo/bar', None)
         """
-        scheme, netloc, path, query, frag = urllib.parse.urlsplit(url)
+        parts = urllib.parse.urlsplit(url)
 
-        scheme = scheme.split('+', 1)[-1]
+        clean_scheme = parts.scheme.split('+', 1)[-1]
 
         # Some fragment identification fails
-        path = path.split('#', 1)[0]
+        no_fragment_path, _, _ = parts.path.partition('#')
 
-        rev = None
-        if '@' in path:
-            path, rev = path.rsplit('@', 1)
+        pre, sep, post = no_fragment_path.rpartition('@')
+        clean_path, rev = (pre, post) if sep else (post, None)
 
-        # Also, discard fragment
-        url = urllib.parse.urlunsplit((scheme, netloc, path, query, ''))
+        resolved = parts._replace(
+            scheme=clean_scheme,
+            path=clean_path,
+            # discard the fragment
+            fragment='',
+        ).geturl()
 
-        return url, rev
+        return resolved, rev
 
     def _download_git(self, url, filename):
         filename = filename.split('#', 1)[0]

From 7c1c29b56dcff03bb637eeabba139c31600a55d1 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 29 Apr 2024 10:25:24 -0400
Subject: [PATCH 0612/1761] package-index: Extract fall-through methods
 _download_vcs and _download_other.

---
 setuptools/package_index.py | 19 ++++++++++++-------
 1 file changed, 12 insertions(+), 7 deletions(-)

diff --git a/setuptools/package_index.py b/setuptools/package_index.py
index 9da138d87c..5a3e9db2a2 100644
--- a/setuptools/package_index.py
+++ b/setuptools/package_index.py
@@ -831,19 +831,24 @@ def _download_url(self, scheme, url, tmpdir):
 
         filename = os.path.join(tmpdir, name)
 
-        # Download the file
-        #
+        return self._download_vcs(url, filename) or self._download_other(url, filename)
+
+    def _download_vcs(self, url, filename):
+        scheme = urllib.parse.urlsplit(url).scheme
         if scheme == 'svn' or scheme.startswith('svn+'):
             return self._download_svn(url, filename)
         elif scheme == 'git' or scheme.startswith('git+'):
             return self._download_git(url, filename)
         elif scheme.startswith('hg+'):
             return self._download_hg(url, filename)
-        elif scheme == 'file':
-            return urllib.request.url2pathname(urllib.parse.urlparse(url)[2])
-        else:
-            self.url_ok(url, True)  # raises error if not allowed
-            return self._attempt_download(url, filename)
+
+    def _download_other(self, url, filename):
+        scheme = urllib.parse.urlsplit(url).scheme
+        if scheme == 'file':
+            return urllib.request.url2pathname(urllib.parse.urlparse(url).path)
+        # raise error if not allowed
+        self.url_ok(url, True)
+        return self._attempt_download(url, filename)
 
     def scan_url(self, url):
         self.process_url(url, True)

From 4d54fa77943c78f393217b2931665d3ab64cd3f2 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 29 Apr 2024 10:46:34 -0400
Subject: [PATCH 0613/1761] Extract _resolve_vcs for resolving a VCS from a
 URL.

---
 setuptools/package_index.py | 34 +++++++++++++++++++++++++---------
 1 file changed, 25 insertions(+), 9 deletions(-)

diff --git a/setuptools/package_index.py b/setuptools/package_index.py
index 5a3e9db2a2..8d46a70c47 100644
--- a/setuptools/package_index.py
+++ b/setuptools/package_index.py
@@ -1,5 +1,6 @@
 """PyPI and direct package downloading."""
 
+import contextlib
 import sys
 import os
 import re
@@ -587,7 +588,7 @@ def download(self, spec, tmpdir):
             scheme = URL_SCHEME(spec)
             if scheme:
                 # It's a url, download it to tmpdir
-                found = self._download_url(scheme.group(1), spec, tmpdir)
+                found = self._download_url(spec, tmpdir)
                 base, fragment = egg_info_for_url(spec)
                 if base.endswith('.py'):
                     found = self.gen_setup(found, fragment, tmpdir)
@@ -816,7 +817,7 @@ def open_url(self, url, warning=None):  # noqa: C901  # is too complex (12)
             else:
                 raise DistutilsError("Download error for %s: %s" % (url, v)) from v
 
-    def _download_url(self, scheme, url, tmpdir):
+    def _download_url(self, url, tmpdir):
         # Determine download filename
         #
         name, fragment = egg_info_for_url(url)
@@ -833,14 +834,29 @@ def _download_url(self, scheme, url, tmpdir):
 
         return self._download_vcs(url, filename) or self._download_other(url, filename)
 
-    def _download_vcs(self, url, filename):
+    @staticmethod
+    def _resolve_vcs(url):
+        """
+        >>> rvcs = PackageIndex._resolve_vcs
+        >>> rvcs('git+http://foo/bar')
+        'git'
+        >>> rvcs('hg+https://foo/bar')
+        'hg'
+        >>> rvcs('git:myhost')
+        'git'
+        >>> rvcs('hg:myhost')
+        >>> rvcs('http://foo/bar')
+        """
         scheme = urllib.parse.urlsplit(url).scheme
-        if scheme == 'svn' or scheme.startswith('svn+'):
-            return self._download_svn(url, filename)
-        elif scheme == 'git' or scheme.startswith('git+'):
-            return self._download_git(url, filename)
-        elif scheme.startswith('hg+'):
-            return self._download_hg(url, filename)
+        pre, sep, post = scheme.partition('+')
+        # svn and git have their own protocol; hg does not
+        allowed = set(['svn', 'git'] + ['hg'] * bool(sep))
+        return next(iter({pre} & allowed), None)
+
+    def _download_vcs(self, url, filename):
+        vcs = self._resolve_vcs(url)
+        with contextlib.suppress(AttributeError):
+            return getattr(self, f'_download_{vcs}')(url, filename)
 
     def _download_other(self, url, filename):
         scheme = urllib.parse.urlsplit(url).scheme

From cf18f716c1fe638d812d487f61aa987101a763a8 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 29 Apr 2024 10:55:48 -0400
Subject: [PATCH 0614/1761] Consolidated all _download_vcs methods into one.

---
 setuptools/package_index.py | 68 +++++++++++++------------------------
 1 file changed, 23 insertions(+), 45 deletions(-)

diff --git a/setuptools/package_index.py b/setuptools/package_index.py
index 8d46a70c47..ba7819304f 100644
--- a/setuptools/package_index.py
+++ b/setuptools/package_index.py
@@ -1,6 +1,5 @@
 """PyPI and direct package downloading."""
 
-import contextlib
 import sys
 import os
 import re
@@ -853,10 +852,30 @@ def _resolve_vcs(url):
         allowed = set(['svn', 'git'] + ['hg'] * bool(sep))
         return next(iter({pre} & allowed), None)
 
-    def _download_vcs(self, url, filename):
+    def _download_vcs(self, url, spec_filename):
         vcs = self._resolve_vcs(url)
-        with contextlib.suppress(AttributeError):
-            return getattr(self, f'_download_{vcs}')(url, filename)
+        if not vcs:
+            return
+        if vcs == 'svn':
+            raise DistutilsError(
+                f"Invalid config, SVN download is not supported: {url}"
+            )
+
+        filename, _, _ = spec_filename.partition('#')
+        url, rev = self._vcs_split_rev_from_url(url)
+
+        self.info(f"Doing {vcs} clone from {url} to {filename}")
+        os.system(f"{vcs} clone --quiet {url} {filename}")
+
+        co_commands = dict(
+            git=f"git -C {filename} checkout --quiet {rev}",
+            hg=f"hg --cwd {filename} up -C -r {rev} -q",
+        )
+        if rev is not None:
+            self.info(f"Checking out {rev}")
+            os.system(co_commands[vcs])
+
+        return filename
 
     def _download_other(self, url, filename):
         scheme = urllib.parse.urlsplit(url).scheme
@@ -880,9 +899,6 @@ def _invalid_download_html(self, url, headers, filename):
         os.unlink(filename)
         raise DistutilsError(f"Unexpected HTML page found at {url}")
 
-    def _download_svn(self, url, _filename):
-        raise DistutilsError(f"Invalid config, SVN download is not supported: {url}")
-
     @staticmethod
     def _vcs_split_rev_from_url(url):
         """
@@ -915,44 +931,6 @@ def _vcs_split_rev_from_url(url):
 
         return resolved, rev
 
-    def _download_git(self, url, filename):
-        filename = filename.split('#', 1)[0]
-        url, rev = self._vcs_split_rev_from_url(url)
-
-        self.info("Doing git clone from %s to %s", url, filename)
-        os.system("git clone --quiet %s %s" % (url, filename))
-
-        if rev is not None:
-            self.info("Checking out %s", rev)
-            os.system(
-                "git -C %s checkout --quiet %s"
-                % (
-                    filename,
-                    rev,
-                )
-            )
-
-        return filename
-
-    def _download_hg(self, url, filename):
-        filename = filename.split('#', 1)[0]
-        url, rev = self._vcs_split_rev_from_url(url)
-
-        self.info("Doing hg clone from %s to %s", url, filename)
-        os.system("hg clone --quiet %s %s" % (url, filename))
-
-        if rev is not None:
-            self.info("Updating to %s", rev)
-            os.system(
-                "hg --cwd %s up -C -r %s -q"
-                % (
-                    filename,
-                    rev,
-                )
-            )
-
-        return filename
-
     def debug(self, msg, *args):
         log.debug(msg, *args)
 

From f0cda0b9a3cf9d81844738ba96b1df95e2abb799 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 29 Apr 2024 11:40:10 -0400
Subject: [PATCH 0615/1761] Replace os.system calls with subprocess calls.

---
 setup.cfg                             |  1 +
 setuptools/package_index.py           |  9 +++---
 setuptools/tests/test_packageindex.py | 46 ++++++++++++---------------
 3 files changed, 27 insertions(+), 29 deletions(-)

diff --git a/setup.cfg b/setup.cfg
index 68be6c8e7c..1226c940fc 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -76,6 +76,7 @@ testing =
 	tomli
 	# No Python 3.12 dependencies require importlib_metadata, but needed for type-checking since we import it directly
 	importlib_metadata
+	pytest-subprocess
 
 	# workaround for pypa/setuptools#4333
 	pyproject-hooks!=1.1
diff --git a/setuptools/package_index.py b/setuptools/package_index.py
index ba7819304f..bbc5846ed9 100644
--- a/setuptools/package_index.py
+++ b/setuptools/package_index.py
@@ -1,6 +1,7 @@
 """PyPI and direct package downloading."""
 
 import sys
+import subprocess
 import os
 import re
 import io
@@ -865,15 +866,15 @@ def _download_vcs(self, url, spec_filename):
         url, rev = self._vcs_split_rev_from_url(url)
 
         self.info(f"Doing {vcs} clone from {url} to {filename}")
-        os.system(f"{vcs} clone --quiet {url} {filename}")
+        subprocess.check_call([vcs, 'clone', '--quiet', url, filename])
 
         co_commands = dict(
-            git=f"git -C {filename} checkout --quiet {rev}",
-            hg=f"hg --cwd {filename} up -C -r {rev} -q",
+            git=[vcs, '-C', filename, 'checkout', '--quiet', rev],
+            hg=[vcs, '--cwd', filename, 'up', '-C', '-r', rev, '-q'],
         )
         if rev is not None:
             self.info(f"Checking out {rev}")
-            os.system(co_commands[vcs])
+            subprocess.check_call(co_commands[vcs])
 
         return filename
 
diff --git a/setuptools/tests/test_packageindex.py b/setuptools/tests/test_packageindex.py
index 93474ae5af..2776ba9f63 100644
--- a/setuptools/tests/test_packageindex.py
+++ b/setuptools/tests/test_packageindex.py
@@ -3,7 +3,6 @@
 import urllib.error
 import http.client
 from inspect import cleandoc
-from unittest import mock
 
 import pytest
 
@@ -171,41 +170,38 @@ def test_egg_fragment(self):
             assert dists[0].version == ''
             assert dists[1].version == vc
 
-    def test_download_git_with_rev(self, tmpdir):
+    def test_download_git_with_rev(self, tmpdir, fp):
         url = 'git+https://github.example/group/project@master#egg=foo'
         index = setuptools.package_index.PackageIndex()
 
-        with mock.patch("os.system") as os_system_mock:
-            result = index.download(url, str(tmpdir))
+        expected_dir = str(tmpdir / 'project@master')
+        fp.register([
+            'git',
+            'clone',
+            '--quiet',
+            'https://github.example/group/project',
+            expected_dir,
+        ])
+        fp.register(['git', '-C', expected_dir, 'checkout', '--quiet', 'master'])
 
-        os_system_mock.assert_called()
+        result = index.download(url, str(tmpdir))
 
-        expected_dir = str(tmpdir / 'project@master')
-        expected = (
-            'git clone --quiet ' 'https://github.example/group/project {expected_dir}'
-        ).format(**locals())
-        first_call_args = os_system_mock.call_args_list[0][0]
-        assert first_call_args == (expected,)
-
-        tmpl = 'git -C {expected_dir} checkout --quiet master'
-        expected = tmpl.format(**locals())
-        assert os_system_mock.call_args_list[1][0] == (expected,)
         assert result == expected_dir
+        assert len(fp.calls) == 2
 
-    def test_download_git_no_rev(self, tmpdir):
+    def test_download_git_no_rev(self, tmpdir, fp):
         url = 'git+https://github.example/group/project#egg=foo'
         index = setuptools.package_index.PackageIndex()
 
-        with mock.patch("os.system") as os_system_mock:
-            result = index.download(url, str(tmpdir))
-
-        os_system_mock.assert_called()
-
         expected_dir = str(tmpdir / 'project')
-        expected = (
-            'git clone --quiet ' 'https://github.example/group/project {expected_dir}'
-        ).format(**locals())
-        os_system_mock.assert_called_once_with(expected)
+        fp.register([
+            'git',
+            'clone',
+            '--quiet',
+            'https://github.example/group/project',
+            expected_dir,
+        ])
+        index.download(url, str(tmpdir))
 
     def test_download_svn(self, tmpdir):
         url = 'svn+https://svn.example/project#egg=foo'

From a36b1121d817ef82aef0971aaa37989941019c8f Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 29 Apr 2024 11:55:20 -0400
Subject: [PATCH 0616/1761] Prefer tmp_path fixture.

---
 setuptools/tests/test_packageindex.py | 18 +++++++++---------
 1 file changed, 9 insertions(+), 9 deletions(-)

diff --git a/setuptools/tests/test_packageindex.py b/setuptools/tests/test_packageindex.py
index 2776ba9f63..f5f37e0563 100644
--- a/setuptools/tests/test_packageindex.py
+++ b/setuptools/tests/test_packageindex.py
@@ -170,11 +170,11 @@ def test_egg_fragment(self):
             assert dists[0].version == ''
             assert dists[1].version == vc
 
-    def test_download_git_with_rev(self, tmpdir, fp):
+    def test_download_git_with_rev(self, tmp_path, fp):
         url = 'git+https://github.example/group/project@master#egg=foo'
         index = setuptools.package_index.PackageIndex()
 
-        expected_dir = str(tmpdir / 'project@master')
+        expected_dir = tmp_path / 'project@master'
         fp.register([
             'git',
             'clone',
@@ -184,16 +184,16 @@ def test_download_git_with_rev(self, tmpdir, fp):
         ])
         fp.register(['git', '-C', expected_dir, 'checkout', '--quiet', 'master'])
 
-        result = index.download(url, str(tmpdir))
+        result = index.download(url, tmp_path)
 
-        assert result == expected_dir
+        assert result == str(expected_dir)
         assert len(fp.calls) == 2
 
-    def test_download_git_no_rev(self, tmpdir, fp):
+    def test_download_git_no_rev(self, tmp_path, fp):
         url = 'git+https://github.example/group/project#egg=foo'
         index = setuptools.package_index.PackageIndex()
 
-        expected_dir = str(tmpdir / 'project')
+        expected_dir = tmp_path / 'project'
         fp.register([
             'git',
             'clone',
@@ -201,15 +201,15 @@ def test_download_git_no_rev(self, tmpdir, fp):
             'https://github.example/group/project',
             expected_dir,
         ])
-        index.download(url, str(tmpdir))
+        index.download(url, tmp_path)
 
-    def test_download_svn(self, tmpdir):
+    def test_download_svn(self, tmp_path):
         url = 'svn+https://svn.example/project#egg=foo'
         index = setuptools.package_index.PackageIndex()
 
         msg = r".*SVN download is not supported.*"
         with pytest.raises(distutils.errors.DistutilsError, match=msg):
-            index.download(url, str(tmpdir))
+            index.download(url, tmp_path)
 
 
 class TestContentCheckers:

From 1a0cbf5f59b5fa1debb4b46fd5e1c41ebc2344dd Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 29 Apr 2024 17:07:40 -0400
Subject: [PATCH 0617/1761] Add news fragment.

---
 newsfragments/4332.feature.rst | 1 +
 1 file changed, 1 insertion(+)
 create mode 100644 newsfragments/4332.feature.rst

diff --git a/newsfragments/4332.feature.rst b/newsfragments/4332.feature.rst
new file mode 100644
index 0000000000..9f46298adc
--- /dev/null
+++ b/newsfragments/4332.feature.rst
@@ -0,0 +1 @@
+Modernized and refactored VCS handling in package_index.
\ No newline at end of file

From 9bc2e87fc74e726927d208438385956591c96aa5 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 29 Apr 2024 17:29:45 -0400
Subject: [PATCH 0618/1761] Ignore coverage for file urls.

---
 setuptools/package_index.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setuptools/package_index.py b/setuptools/package_index.py
index bbc5846ed9..c3ffee41a7 100644
--- a/setuptools/package_index.py
+++ b/setuptools/package_index.py
@@ -880,7 +880,7 @@ def _download_vcs(self, url, spec_filename):
 
     def _download_other(self, url, filename):
         scheme = urllib.parse.urlsplit(url).scheme
-        if scheme == 'file':
+        if scheme == 'file':  # pragma: no cover
             return urllib.request.url2pathname(urllib.parse.urlparse(url).path)
         # raise error if not allowed
         self.url_ok(url, True)

From 225d15808ba140799673b730e90b25e4117cc365 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 29 Apr 2024 20:19:04 -0400
Subject: [PATCH 0619/1761] Pin against pyproject-hooks==1.1 (docs). Closes
 #4333.

---
 setup.cfg | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/setup.cfg b/setup.cfg
index 1226c940fc..0756fa92ea 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -100,6 +100,9 @@ docs =
 	sphinxcontrib-towncrier
 	sphinx-notfound-page >=1,<2
 
+	# workaround for pypa/setuptools#4333
+	pyproject-hooks!=1.1
+
 ssl =
 
 certs =

From a84b262a8d50ae8a3ed74bca58e6cadd1ac46495 Mon Sep 17 00:00:00 2001
From: wim glenn 
Date: Tue, 30 Apr 2024 16:09:19 -0500
Subject: [PATCH 0620/1761] Typo fix in build_meta.py docstring

---
 setuptools/build_meta.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setuptools/build_meta.py b/setuptools/build_meta.py
index 2decd2d214..be2742d73d 100644
--- a/setuptools/build_meta.py
+++ b/setuptools/build_meta.py
@@ -2,7 +2,7 @@
 
 Previously, when a user or a command line tool (let's call it a "frontend")
 needed to make a request of setuptools to take a certain action, for
-example, generating a list of installation requirements, the frontend would
+example, generating a list of installation requirements, the frontend
 would call "setup.py egg_info" or "setup.py bdist_wheel" on the command line.
 
 PEP 517 defines a different method of interfacing with setuptools. Rather

From 9cf334d45e32d767d394fa6cc9ffa8829b150af0 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 1 May 2024 17:27:36 +0100
Subject: [PATCH 0621/1761] Avoid newer importlib-metadata APIs for backwards
 compatibility

---
 setuptools/dist.py | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/setuptools/dist.py b/setuptools/dist.py
index 076f9a2327..03f6c0398b 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -535,7 +535,8 @@ def warn_dash_deprecation(self, opt, section):
 
     def _setuptools_commands(self):
         try:
-            return metadata.distribution('setuptools').entry_points.names
+            entry_points = metadata.distribution('setuptools').entry_points
+            return {ep.name for ep in entry_points}  # Avoid newer API for compatibility
         except metadata.PackageNotFoundError:
             # during bootstrapping, distribution doesn't exist
             return []

From 67aab1554c7c9cbb19bb546a5b6476267030c5b5 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 2 May 2024 15:36:22 -0400
Subject: [PATCH 0622/1761] Revert "Allow macos on Python 3.8 to fail as GitHub
 CI has dropped support."

This reverts commit 744cf2a2befb6a616657c105e5c9be9f3f921224.
---
 .github/workflows/main.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index 5ace4c504f..ac0ff69e22 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -50,7 +50,7 @@ jobs:
         - python: pypy3.10
           platform: ubuntu-latest
     runs-on: ${{ matrix.platform }}
-    continue-on-error: ${{ matrix.python == '3.13' || (matrix.python == '3.8' || matrix.python == '3.9') && matrix.platform == 'macos-latest' }}
+    continue-on-error: ${{ matrix.python == '3.13' }}
     steps:
       - uses: actions/checkout@v4
       - name: Setup Python

From 77bfebf36673858dc97c67155794adb6145a14fd Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 22 Apr 2024 16:41:10 +0100
Subject: [PATCH 0623/1761] Re-enable deprecation checking in CI

---
 tox.ini | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)

diff --git a/tox.ini b/tox.ini
index 22dd7af8da..c4cb4b5c64 100644
--- a/tox.ini
+++ b/tox.ini
@@ -4,8 +4,7 @@ deps =
 	# Ideally all the dependencies should be set as "extras"
 setenv =
 	PYTHONWARNDEFAULTENCODING = 1
-	SETUPTOOLS_ENFORCE_DEPRECATION = {env:SETUPTOOLS_ENFORCE_DEPRECATION:0}
-	# ^-- Temporarily disable enforcement so CI don't fail on due dates
+	SETUPTOOLS_ENFORCE_DEPRECATION = {env:SETUPTOOLS_ENFORCE_DEPRECATION:1}
 commands =
 	pytest {posargs}
 usedevelop = True

From 118c5e44c1aca9ca67375a2e3a87988116986129 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 22 Apr 2024 20:23:13 +0100
Subject: [PATCH 0624/1761] Remove deprecated 'setuptools.convert_path'

---
 setuptools/__init__.py              | 17 -----------------
 setuptools/tests/test_setuptools.py |  5 -----
 2 files changed, 22 deletions(-)

diff --git a/setuptools/__init__.py b/setuptools/__init__.py
index 7c88c7e19b..a59bbe1177 100644
--- a/setuptools/__init__.py
+++ b/setuptools/__init__.py
@@ -8,7 +8,6 @@
 import _distutils_hack.override  # noqa: F401
 import distutils.core
 from distutils.errors import DistutilsOptionError
-from distutils.util import convert_path as _convert_path
 
 from . import logging, monkey
 from . import version as _version_module
@@ -247,22 +246,6 @@ def findall(dir=os.curdir):
     return list(files)
 
 
-@functools.wraps(_convert_path)
-def convert_path(pathname):
-    SetuptoolsDeprecationWarning.emit(
-        "Access to implementation detail",
-        """
-        The function `convert_path` is not provided by setuptools itself,
-        and therefore not part of the public API.
-
-        Its direct usage by 3rd-party packages is considered improper and the function
-        may be removed in the future.
-        """,
-        due_date=(2023, 12, 13),  # initial deprecation 2022-03-25, see #3201
-    )
-    return _convert_path(pathname)
-
-
 class sic(str):
     """Treat this string as-is (https://en.wikipedia.org/wiki/Sic)"""
 
diff --git a/setuptools/tests/test_setuptools.py b/setuptools/tests/test_setuptools.py
index 0dc4769b93..b1ca2396bd 100644
--- a/setuptools/tests/test_setuptools.py
+++ b/setuptools/tests/test_setuptools.py
@@ -307,8 +307,3 @@ def test_its_own_wheel_does_not_contain_tests(setuptools_wheel):
 
     for member in contents:
         assert '/tests/' not in member
-
-
-def test_convert_path_deprecated():
-    with pytest.warns(setuptools.SetuptoolsDeprecationWarning):
-        setuptools.convert_path('setuptools/tests')

From 880b30ebba806c356686e8efc5fdc5fb4f910b81 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 22 Apr 2024 20:24:39 +0100
Subject: [PATCH 0625/1761] Remove deprecated fallback for
 'distutils.commands.build.build.sub_commands +='

---
 setuptools/command/build.py    | 18 -----------------
 setuptools/tests/test_build.py | 36 +---------------------------------
 2 files changed, 1 insertion(+), 53 deletions(-)

diff --git a/setuptools/command/build.py b/setuptools/command/build.py
index afda7e3be9..16c077b7cc 100644
--- a/setuptools/command/build.py
+++ b/setuptools/command/build.py
@@ -1,8 +1,6 @@
 from typing import Dict, List, Protocol
 from distutils.command.build import build as _build
 
-from ..warnings import SetuptoolsDeprecationWarning
-
 _ORIGINAL_SUBCOMMANDS = {"build_py", "build_clib", "build_ext", "build_scripts"}
 
 
@@ -10,22 +8,6 @@ class build(_build):
     # copy to avoid sharing the object with parent class
     sub_commands = _build.sub_commands[:]
 
-    def get_sub_commands(self):
-        subcommands = {cmd[0] for cmd in _build.sub_commands}
-        if subcommands - _ORIGINAL_SUBCOMMANDS:
-            SetuptoolsDeprecationWarning.emit(
-                "Direct usage of `distutils` commands",
-                """
-                It seems that you are using `distutils.command.build` to add
-                new subcommands. Using `distutils` directly is considered deprecated,
-                please use `setuptools.command.build`.
-                """,
-                due_date=(2023, 12, 13),  # Warning introduced in 13 Jun 2022.
-                see_url="https://peps.python.org/pep-0632/",
-            )
-            self.sub_commands = _build.sub_commands
-        return super().get_sub_commands()
-
 
 class SubCommand(Protocol):
     """In order to support editable installations (see :pep:`660`) all
diff --git a/setuptools/tests/test_build.py b/setuptools/tests/test_build.py
index 4a3b11de18..141522efd4 100644
--- a/setuptools/tests/test_build.py
+++ b/setuptools/tests/test_build.py
@@ -1,10 +1,6 @@
-from contextlib import contextmanager
-from setuptools import Command, SetuptoolsDeprecationWarning
+from setuptools import Command
 from setuptools.dist import Distribution
 from setuptools.command.build import build
-from distutils.command.build import build as distutils_build
-
-import pytest
 
 
 def test_distribution_gives_setuptools_build_obj(tmpdir_cwd):
@@ -24,15 +20,6 @@ def test_distribution_gives_setuptools_build_obj(tmpdir_cwd):
     assert isinstance(dist.get_command_obj("build"), build)
 
 
-@contextmanager
-def _restore_sub_commands():
-    orig = distutils_build.sub_commands[:]
-    try:
-        yield
-    finally:
-        distutils_build.sub_commands = orig
-
-
 class Subcommand(Command):
     """Dummy command to be used in tests"""
 
@@ -44,24 +31,3 @@ def finalize_options(self):
 
     def run(self):
         raise NotImplementedError("just to check if the command runs")
-
-
-@_restore_sub_commands()
-def test_subcommand_in_distutils(tmpdir_cwd):
-    """
-    Ensure that sub commands registered in ``distutils`` run,
-    after instructing the users to migrate to ``setuptools``.
-    """
-    dist = Distribution(
-        dict(
-            packages=[],
-            cmdclass={'subcommand': Subcommand},
-        )
-    )
-    distutils_build.sub_commands.append(('subcommand', None))
-
-    warning_msg = "please use .setuptools.command.build."
-    with pytest.warns(SetuptoolsDeprecationWarning, match=warning_msg):
-        # For backward compatibility, the subcommand should run anyway:
-        with pytest.raises(NotImplementedError, match="the command runs"):
-            dist.run_command("build")

From a32e2aa10cb56ef2338b6b6289140ec6525caaff Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 22 Apr 2024 23:10:14 +0100
Subject: [PATCH 0626/1761] Add news fragments

---
 newsfragments/4322.removal.1.rst | 3 +++
 newsfragments/4322.removal.2.rst | 3 +++
 2 files changed, 6 insertions(+)
 create mode 100644 newsfragments/4322.removal.1.rst
 create mode 100644 newsfragments/4322.removal.2.rst

diff --git a/newsfragments/4322.removal.1.rst b/newsfragments/4322.removal.1.rst
new file mode 100644
index 0000000000..33360172d5
--- /dev/null
+++ b/newsfragments/4322.removal.1.rst
@@ -0,0 +1,3 @@
+Remove ``setuptools.convert_path`` after long deprecation period.
+This function was never defined by ``setuptools`` itself, but rather a
+side-effect of an import for internal usage.
diff --git a/newsfragments/4322.removal.2.rst b/newsfragments/4322.removal.2.rst
new file mode 100644
index 0000000000..88380f4c8d
--- /dev/null
+++ b/newsfragments/4322.removal.2.rst
@@ -0,0 +1,3 @@
+Remove fallback for customisations of ``distutils``' ``build.sub_command`` after long
+deprecated period.
+Users are advised to import ``build`` directly from ``setuptools.command.build``.

From 94f0089a9f9e0f4ab38840fe9a03c1335089e4a5 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Tue, 5 Mar 2024 20:30:58 -0500
Subject: [PATCH 0627/1761] Type _declare_state and make it work statically

---
 pkg_resources/__init__.py | 19 +++++++++----------
 1 file changed, 9 insertions(+), 10 deletions(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index d32b095a88..7693b071e7 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -27,7 +27,7 @@
 import time
 import re
 import types
-from typing import List, Protocol
+from typing import Any, Dict, List, Protocol
 import zipfile
 import zipimport
 import warnings
@@ -94,9 +94,6 @@
 resource_listdir = None
 resource_filename = None
 resource_exists = None
-_distribution_finders = None
-_namespace_handlers = None
-_namespace_packages = None
 
 
 warnings.warn(
@@ -120,11 +117,10 @@ class PEP440Warning(RuntimeWarning):
 parse_version = packaging.version.Version
 
 
-_state_vars = {}
+_state_vars: Dict[str, Any] = {}
 
 
-def _declare_state(vartype, **kw):
-    globals().update(kw)
+def _declare_state(vartype: str, **kw: object) -> None:
     _state_vars.update(dict.fromkeys(kw, vartype))
 
 
@@ -2025,7 +2021,8 @@ def __init__(self, importer):
         self._setup_prefix()
 
 
-_declare_state('dict', _distribution_finders={})
+_distribution_finders = {}
+_declare_state('dict', _distribution_finders=_distribution_finders)
 
 
 def register_finder(importer_type, distribution_finder):
@@ -2198,8 +2195,10 @@ def resolve_egg_link(path):
 
 register_finder(importlib.machinery.FileFinder, find_on_path)
 
-_declare_state('dict', _namespace_handlers={})
-_declare_state('dict', _namespace_packages={})
+_namespace_handlers = {}
+_declare_state('dict', _namespace_handlers=_namespace_handlers)
+_namespace_packages = {}
+_declare_state('dict', _namespace_packages=_namespace_packages)
 
 
 def register_namespace_handler(importer_type, namespace_handler):

From 957e991abd3d5a5c6eeafb0545a71ab30f53a917 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Wed, 6 Mar 2024 12:19:38 -0500
Subject: [PATCH 0628/1761] Add type annotations to moved definitions

---
 pkg_resources/__init__.py | 12 ++++++++----
 1 file changed, 8 insertions(+), 4 deletions(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index 7693b071e7..d3213b3237 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -27,7 +27,7 @@
 import time
 import re
 import types
-from typing import Any, Dict, List, Protocol
+from typing import Any, Callable, Dict, Iterable, List, Protocol, Optional
 import zipfile
 import zipimport
 import warnings
@@ -2021,7 +2021,9 @@ def __init__(self, importer):
         self._setup_prefix()
 
 
-_distribution_finders = {}
+_distribution_finders: Dict[
+    type, Callable[[object, str, bool], Iterable["Distribution"]]
+] = {}
 _declare_state('dict', _distribution_finders=_distribution_finders)
 
 
@@ -2195,9 +2197,11 @@ def resolve_egg_link(path):
 
 register_finder(importlib.machinery.FileFinder, find_on_path)
 
-_namespace_handlers = {}
+_namespace_handlers: Dict[
+    type, Callable[[object, str, str, types.ModuleType], Optional[str]]
+] = {}
 _declare_state('dict', _namespace_handlers=_namespace_handlers)
-_namespace_packages = {}
+_namespace_packages: Dict[Optional[str], List[str]] = {}
 _declare_state('dict', _namespace_packages=_namespace_packages)
 
 

From 7f59cd1bd026133e3705b7837401c0df13a5d947 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 7 May 2024 18:49:38 +0100
Subject: [PATCH 0629/1761] Update used version validate-pyproject to 0.17

---
 tox.ini | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/tox.ini b/tox.ini
index c4cb4b5c64..e13ad53da3 100644
--- a/tox.ini
+++ b/tox.ini
@@ -82,7 +82,7 @@ commands =
 [testenv:generate-validation-code]
 skip_install = True
 deps =
-	validate-pyproject[all]==0.12.2
+	validate-pyproject[all]==0.17
 commands =
 	python -m tools.generate_validation_code
 

From 8283cb66244bc90f3510cbe6236f84e92a6876f9 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 7 May 2024 18:56:45 +0100
Subject: [PATCH 0630/1761] Update code automatically generated by
 validate_pyproject

---
 setuptools/config/_validate_pyproject/NOTICE  |   1 -
 .../config/_validate_pyproject/__init__.py    |   2 +-
 .../_validate_pyproject/error_reporting.py    |  46 ++-
 .../_validate_pyproject/extra_validations.py  |  28 +-
 .../fastjsonschema_validations.py             | 389 ++++++++++--------
 .../config/_validate_pyproject/formats.py     |  91 +++-
 6 files changed, 362 insertions(+), 195 deletions(-)

diff --git a/setuptools/config/_validate_pyproject/NOTICE b/setuptools/config/_validate_pyproject/NOTICE
index 286d29082e..121ba5fd22 100644
--- a/setuptools/config/_validate_pyproject/NOTICE
+++ b/setuptools/config/_validate_pyproject/NOTICE
@@ -436,4 +436,3 @@ Exhibit B - "Incompatible With Secondary Licenses" Notice
       This Source Code Form is "Incompatible
       With Secondary Licenses", as defined by
       the Mozilla Public License, v. 2.0.
-
diff --git a/setuptools/config/_validate_pyproject/__init__.py b/setuptools/config/_validate_pyproject/__init__.py
index dbe6cb4ca4..4f612bd51c 100644
--- a/setuptools/config/_validate_pyproject/__init__.py
+++ b/setuptools/config/_validate_pyproject/__init__.py
@@ -30,5 +30,5 @@ def validate(data: Any) -> bool:
     """
     with detailed_errors():
         _validate(data, custom_formats=FORMAT_FUNCTIONS)
-    reduce(lambda acc, fn: fn(acc), EXTRA_VALIDATIONS, data)
+        reduce(lambda acc, fn: fn(acc), EXTRA_VALIDATIONS, data)
     return True
diff --git a/setuptools/config/_validate_pyproject/error_reporting.py b/setuptools/config/_validate_pyproject/error_reporting.py
index d44e290e36..a6753604f5 100644
--- a/setuptools/config/_validate_pyproject/error_reporting.py
+++ b/setuptools/config/_validate_pyproject/error_reporting.py
@@ -3,12 +3,21 @@
 import logging
 import os
 import re
+import typing
 from contextlib import contextmanager
 from textwrap import indent, wrap
-from typing import Any, Dict, Iterator, List, Optional, Sequence, Union, cast
+from typing import Any, Dict, Generator, Iterator, List, Optional, Sequence, Union, cast
 
 from .fastjsonschema_exceptions import JsonSchemaValueException
 
+if typing.TYPE_CHECKING:
+    import sys
+
+    if sys.version_info < (3, 11):
+        from typing_extensions import Self
+    else:
+        from typing import Self
+
 _logger = logging.getLogger(__name__)
 
 _MESSAGE_REPLACEMENTS = {
@@ -36,6 +45,11 @@
     "property names": "keys",
 }
 
+_FORMATS_HELP = """
+For more details about `format` see
+https://validate-pyproject.readthedocs.io/en/latest/api/validate_pyproject.formats.html
+"""
+
 
 class ValidationError(JsonSchemaValueException):
     """Report violations of a given JSON schema.
@@ -59,7 +73,7 @@ class ValidationError(JsonSchemaValueException):
     _original_message = ""
 
     @classmethod
-    def _from_jsonschema(cls, ex: JsonSchemaValueException):
+    def _from_jsonschema(cls, ex: JsonSchemaValueException) -> "Self":
         formatter = _ErrorFormatting(ex)
         obj = cls(str(formatter), ex.value, formatter.name, ex.definition, ex.rule)
         debug_code = os.getenv("JSONSCHEMA_DEBUG_CODE_GENERATION", "false").lower()
@@ -72,7 +86,7 @@ def _from_jsonschema(cls, ex: JsonSchemaValueException):
 
 
 @contextmanager
-def detailed_errors():
+def detailed_errors() -> Generator[None, None, None]:
     try:
         yield
     except JsonSchemaValueException as ex:
@@ -83,7 +97,7 @@ class _ErrorFormatting:
     def __init__(self, ex: JsonSchemaValueException):
         self.ex = ex
         self.name = f"`{self._simplify_name(ex.name)}`"
-        self._original_message = self.ex.message.replace(ex.name, self.name)
+        self._original_message: str = self.ex.message.replace(ex.name, self.name)
         self._summary = ""
         self._details = ""
 
@@ -107,11 +121,12 @@ def details(self) -> str:
 
         return self._details
 
-    def _simplify_name(self, name):
+    @staticmethod
+    def _simplify_name(name: str) -> str:
         x = len("data.")
         return name[x:] if name.startswith("data.") else name
 
-    def _expand_summary(self):
+    def _expand_summary(self) -> str:
         msg = self._original_message
 
         for bad, repl in _MESSAGE_REPLACEMENTS.items():
@@ -129,8 +144,9 @@ def _expand_summary(self):
 
     def _expand_details(self) -> str:
         optional = []
-        desc_lines = self.ex.definition.pop("$$description", [])
-        desc = self.ex.definition.pop("description", None) or " ".join(desc_lines)
+        definition = self.ex.definition or {}
+        desc_lines = definition.pop("$$description", [])
+        desc = definition.pop("description", None) or " ".join(desc_lines)
         if desc:
             description = "\n".join(
                 wrap(
@@ -142,18 +158,20 @@ def _expand_details(self) -> str:
                 )
             )
             optional.append(f"DESCRIPTION:\n{description}")
-        schema = json.dumps(self.ex.definition, indent=4)
+        schema = json.dumps(definition, indent=4)
         value = json.dumps(self.ex.value, indent=4)
         defaults = [
             f"GIVEN VALUE:\n{indent(value, '    ')}",
             f"OFFENDING RULE: {self.ex.rule!r}",
             f"DEFINITION:\n{indent(schema, '    ')}",
         ]
-        return "\n\n".join(optional + defaults)
+        msg = "\n\n".join(optional + defaults)
+        epilog = f"\n{_FORMATS_HELP}" if "format" in msg.lower() else ""
+        return msg + epilog
 
 
 class _SummaryWriter:
-    _IGNORE = {"description", "default", "title", "examples"}
+    _IGNORE = frozenset(("description", "default", "title", "examples"))
 
     def __init__(self, jargon: Optional[Dict[str, str]] = None):
         self.jargon: Dict[str, str] = jargon or {}
@@ -242,7 +260,9 @@ def _is_unecessary(self, path: Sequence[str]) -> bool:
         key = path[-1]
         return any(key.startswith(k) for k in "$_") or key in self._IGNORE
 
-    def _filter_unecessary(self, schema: dict, path: Sequence[str]):
+    def _filter_unecessary(
+        self, schema: Dict[str, Any], path: Sequence[str]
+    ) -> Dict[str, Any]:
         return {
             key: value
             for key, value in schema.items()
@@ -271,7 +291,7 @@ def _handle_list(
             self(v, item_prefix, _path=[*path, f"[{i}]"]) for i, v in enumerate(schemas)
         )
 
-    def _is_property(self, path: Sequence[str]):
+    def _is_property(self, path: Sequence[str]) -> bool:
         """Check if the given path can correspond to an arbitrarily named property"""
         counter = 0
         for key in path[-2::-1]:
diff --git a/setuptools/config/_validate_pyproject/extra_validations.py b/setuptools/config/_validate_pyproject/extra_validations.py
index 4130a421cf..c4ffe651dd 100644
--- a/setuptools/config/_validate_pyproject/extra_validations.py
+++ b/setuptools/config/_validate_pyproject/extra_validations.py
@@ -3,6 +3,7 @@
 JSON Schema library).
 """
 
+from inspect import cleandoc
 from typing import Mapping, TypeVar
 
 from .error_reporting import ValidationError
@@ -11,11 +12,16 @@
 
 
 class RedefiningStaticFieldAsDynamic(ValidationError):
-    """According to PEP 621:
+    _DESC = """According to PEP 621:
 
     Build back-ends MUST raise an error if the metadata specifies a field
     statically as well as being listed in dynamic.
     """
+    __doc__ = _DESC
+    _URL = (
+        "https://packaging.python.org/en/latest/specifications/"
+        "pyproject-toml/#dynamic"
+    )
 
 
 def validate_project_dynamic(pyproject: T) -> T:
@@ -24,11 +30,21 @@ def validate_project_dynamic(pyproject: T) -> T:
 
     for field in dynamic:
         if field in project_table:
-            msg = f"You cannot provide a value for `project.{field}` and "
-            msg += "list it under `project.dynamic` at the same time"
-            name = f"data.project.{field}"
-            value = {field: project_table[field], "...": " # ...", "dynamic": dynamic}
-            raise RedefiningStaticFieldAsDynamic(msg, value, name, rule="PEP 621")
+            raise RedefiningStaticFieldAsDynamic(
+                message=f"You cannot provide a value for `project.{field}` and "
+                "list it under `project.dynamic` at the same time",
+                value={
+                    field: project_table[field],
+                    "...": " # ...",
+                    "dynamic": dynamic,
+                },
+                name=f"data.project.{field}",
+                definition={
+                    "description": cleandoc(RedefiningStaticFieldAsDynamic._DESC),
+                    "see": RedefiningStaticFieldAsDynamic._URL,
+                },
+                rule="PEP 621",
+            )
 
     return pyproject
 
diff --git a/setuptools/config/_validate_pyproject/fastjsonschema_validations.py b/setuptools/config/_validate_pyproject/fastjsonschema_validations.py
index 8b852bbfd4..1c58a55ea8 100644
--- a/setuptools/config/_validate_pyproject/fastjsonschema_validations.py
+++ b/setuptools/config/_validate_pyproject/fastjsonschema_validations.py
@@ -1,4 +1,5 @@
 # noqa
+# ruff: noqa
 # flake8: noqa
 # pylint: skip-file
 # mypy: ignore-errors
@@ -9,7 +10,8 @@
 # *** PLEASE DO NOT MODIFY DIRECTLY: Automatically generated code *** 
 
 
-VERSION = "2.16.3"
+VERSION = "2.19.1"
+from decimal import Decimal
 import re
 from .fastjsonschema_exceptions import JsonSchemaValueException
 
@@ -29,7 +31,7 @@ def validate(data, custom_formats={}, name_prefix=None):
 
 def validate_https___packaging_python_org_en_latest_specifications_declaring_build_dependencies(data, custom_formats={}, name_prefix=None):
     if not isinstance(data, (dict)):
-        raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-build-dependencies/', 'title': 'Data structure for ``pyproject.toml`` files', '$$description': ['File format containing build-time configurations for the Python ecosystem. ', ':pep:`517` initially defined a build-system independent format for source trees', 'which was complemented by :pep:`518` to provide a way of specifying dependencies ', 'for building Python projects.', 'Please notice the ``project`` table (as initially defined in  :pep:`621`) is not included', 'in this schema and should be considered separately.'], 'type': 'object', 'additionalProperties': False, 'properties': {'build-system': {'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, 'tool': {'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://docs.python.org/3/install/', 'title': '``tool.distutils`` table', '$$description': ['Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` scripts via `distutils configuration files', '`_.', '``tool.distutils`` subtables could be used with the same purpose', '(NOT CURRENTLY IMPLEMENTED).'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://setuptools.pypa.io/en/latest/references/keywords.html', 'title': '``tool.setuptools`` table', '$$description': ['Please notice for the time being the ``setuptools`` project does not specify', 'a way of configuring builds via ``pyproject.toml``.', 'Therefore this schema should be taken just as a *"thought experiment"* on how', 'this *might be done*, by following the principles established in', '`ini2toml `_.', 'It considers only ``setuptools`` `parameters', '`_', 'that can currently be configured via ``setup.cfg`` and are not covered by :pep:`621`', 'but intentionally excludes ``dependency_links`` and ``setup_requires``.', 'NOTE: ``scripts`` was renamed to ``script-files`` to avoid confusion with', 'entry-point based scripts (defined in :pep:`621`).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'description': 'Whether the project can be safely installed and run from a zip file.', 'type': 'boolean'}, 'script-files': {'description': 'Legacy way of defining scripts (entry-points are preferred).', 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$ref': '#/definitions/package-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'const': ''}, {'$ref': '#/definitions/package-name'}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['**DEPRECATED**: dict-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', "Please notice this don't work with wheels. See `data files support", '`_'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', '    cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might become standard with PEP 639).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'dependencies': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'format': 'python-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$ref': '#/definitions/file-directive'}}}, 'readme': {'anyOf': [{'$ref': '#/definitions/file-directive'}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or PEP 561).', 'type': 'string', 'anyOf': [{'format': 'python-module-name'}, {'format': 'pep561-stub-name'}]}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='type')
+        raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-build-dependencies/', 'title': 'Data structure for ``pyproject.toml`` files', '$$description': ['File format containing build-time configurations for the Python ecosystem. ', ':pep:`517` initially defined a build-system independent format for source trees', 'which was complemented by :pep:`518` to provide a way of specifying dependencies ', 'for building Python projects.', 'Please notice the ``project`` table (as initially defined in  :pep:`621`) is not included', 'in this schema and should be considered separately.'], 'type': 'object', 'additionalProperties': False, 'properties': {'build-system': {'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/pyproject-toml/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, 'tool': {'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/deprecated/distutils/configfile.html', 'title': '``tool.distutils`` table', '$$description': ['**EXPERIMENTAL** (NOT OFFICIALLY SUPPORTED): Use ``tool.distutils``', 'subtables to configure arguments for ``distutils`` commands.', 'Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` commands via `distutils configuration files', '`_.', 'See also `the old Python docs _`.'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html', 'title': '``tool.setuptools`` table', '$$description': ['``setuptools``-specific configurations that can be set by users that require', 'customization.', 'These configurations are completely optional and probably can be skipped when', 'creating simple packages. They are equivalent to some of the `Keywords', '`_', 'used by the ``setup.py`` file, and can be set via the ``tool.setuptools`` table.', 'It considers only ``setuptools`` `parameters', '`_', 'that are not covered by :pep:`621`; and intentionally excludes ``dependency_links``', 'and ``setup_requires`` (incompatible with modern workflows/standards).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'$$description': ['Whether the project can be safely installed and run from a zip file.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'boolean'}, 'script-files': {'$$description': ['Legacy way of defining scripts (entry-points are preferred).', 'Equivalent to the ``script`` keyword in ``setup.py``', '(it was renamed to avoid confusion with entry-point based ``project.scripts``', 'defined in :pep:`621`).', '**DISCOURAGED**: generic script wrappers are tricky and may not work properly.', 'Whenever possible, please use ``project.scripts`` instead.'], 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$ref': '#/definitions/package-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$ref': '#/definitions/package-name'}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html', 'description': '**DEPRECATED**: use implicit namespaces instead (:pep:`420`).'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['``dict``-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', '**DISCOURAGED**: please notice this might not work as expected with wheels.', 'Whenever possible, consider using data files inside the package directories', '(or create a new namespace package that only contains data files).', 'See `data files support', '`_.'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', '    cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['**PROVISIONAL**: list of glob patterns for all license files being distributed.', '(likely to become standard with :pep:`639`).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'dependencies': {'$ref': '#/definitions/file-directive-for-dependencies'}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$ref': '#/definitions/file-directive-for-dependencies'}}}, 'readme': {'type': 'object', 'anyOf': [{'$ref': '#/definitions/file-directive'}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'$ref': '#/definitions/file-directive/properties/file'}}, 'additionalProperties': False}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'file-directive-for-dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/pyproject-toml/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='type')
     data_is_dict = isinstance(data, dict)
     if data_is_dict:
         data_keys = set(data.keys())
@@ -40,9 +42,9 @@ def validate_https___packaging_python_org_en_latest_specifications_declaring_bui
                 raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system must be object", value=data__buildsystem, name="" + (name_prefix or "data") + ".build-system", definition={'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, rule='type')
             data__buildsystem_is_dict = isinstance(data__buildsystem, dict)
             if data__buildsystem_is_dict:
-                data__buildsystem_len = len(data__buildsystem)
-                if not all(prop in data__buildsystem for prop in ['requires']):
-                    raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system must contain ['requires'] properties", value=data__buildsystem, name="" + (name_prefix or "data") + ".build-system", definition={'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, rule='required')
+                data__buildsystem__missing_keys = set(['requires']) - data__buildsystem.keys()
+                if data__buildsystem__missing_keys:
+                    raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system must contain " + (str(sorted(data__buildsystem__missing_keys)) + " properties"), value=data__buildsystem, name="" + (name_prefix or "data") + ".build-system", definition={'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, rule='required')
                 data__buildsystem_keys = set(data__buildsystem.keys())
                 if "requires" in data__buildsystem_keys:
                     data__buildsystem_keys.remove("requires")
@@ -79,30 +81,30 @@ def validate_https___packaging_python_org_en_latest_specifications_declaring_bui
         if "project" in data_keys:
             data_keys.remove("project")
             data__project = data["project"]
-            validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata(data__project, custom_formats, (name_prefix or "data") + ".project")
+            validate_https___packaging_python_org_en_latest_specifications_pyproject_toml(data__project, custom_formats, (name_prefix or "data") + ".project")
         if "tool" in data_keys:
             data_keys.remove("tool")
             data__tool = data["tool"]
             if not isinstance(data__tool, (dict)):
-                raise JsonSchemaValueException("" + (name_prefix or "data") + ".tool must be object", value=data__tool, name="" + (name_prefix or "data") + ".tool", definition={'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://docs.python.org/3/install/', 'title': '``tool.distutils`` table', '$$description': ['Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` scripts via `distutils configuration files', '`_.', '``tool.distutils`` subtables could be used with the same purpose', '(NOT CURRENTLY IMPLEMENTED).'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://setuptools.pypa.io/en/latest/references/keywords.html', 'title': '``tool.setuptools`` table', '$$description': ['Please notice for the time being the ``setuptools`` project does not specify', 'a way of configuring builds via ``pyproject.toml``.', 'Therefore this schema should be taken just as a *"thought experiment"* on how', 'this *might be done*, by following the principles established in', '`ini2toml `_.', 'It considers only ``setuptools`` `parameters', '`_', 'that can currently be configured via ``setup.cfg`` and are not covered by :pep:`621`', 'but intentionally excludes ``dependency_links`` and ``setup_requires``.', 'NOTE: ``scripts`` was renamed to ``script-files`` to avoid confusion with', 'entry-point based scripts (defined in :pep:`621`).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'description': 'Whether the project can be safely installed and run from a zip file.', 'type': 'boolean'}, 'script-files': {'description': 'Legacy way of defining scripts (entry-points are preferred).', 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$ref': '#/definitions/package-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'const': ''}, {'$ref': '#/definitions/package-name'}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['**DEPRECATED**: dict-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', "Please notice this don't work with wheels. See `data files support", '`_'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', '    cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might become standard with PEP 639).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'dependencies': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'format': 'python-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$ref': '#/definitions/file-directive'}}}, 'readme': {'anyOf': [{'$ref': '#/definitions/file-directive'}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or PEP 561).', 'type': 'string', 'anyOf': [{'format': 'python-module-name'}, {'format': 'pep561-stub-name'}]}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}, rule='type')
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".tool must be object", value=data__tool, name="" + (name_prefix or "data") + ".tool", definition={'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/deprecated/distutils/configfile.html', 'title': '``tool.distutils`` table', '$$description': ['**EXPERIMENTAL** (NOT OFFICIALLY SUPPORTED): Use ``tool.distutils``', 'subtables to configure arguments for ``distutils`` commands.', 'Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` commands via `distutils configuration files', '`_.', 'See also `the old Python docs _`.'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html', 'title': '``tool.setuptools`` table', '$$description': ['``setuptools``-specific configurations that can be set by users that require', 'customization.', 'These configurations are completely optional and probably can be skipped when', 'creating simple packages. They are equivalent to some of the `Keywords', '`_', 'used by the ``setup.py`` file, and can be set via the ``tool.setuptools`` table.', 'It considers only ``setuptools`` `parameters', '`_', 'that are not covered by :pep:`621`; and intentionally excludes ``dependency_links``', 'and ``setup_requires`` (incompatible with modern workflows/standards).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'$$description': ['Whether the project can be safely installed and run from a zip file.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'boolean'}, 'script-files': {'$$description': ['Legacy way of defining scripts (entry-points are preferred).', 'Equivalent to the ``script`` keyword in ``setup.py``', '(it was renamed to avoid confusion with entry-point based ``project.scripts``', 'defined in :pep:`621`).', '**DISCOURAGED**: generic script wrappers are tricky and may not work properly.', 'Whenever possible, please use ``project.scripts`` instead.'], 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$ref': '#/definitions/package-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$ref': '#/definitions/package-name'}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html', 'description': '**DEPRECATED**: use implicit namespaces instead (:pep:`420`).'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['``dict``-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', '**DISCOURAGED**: please notice this might not work as expected with wheels.', 'Whenever possible, consider using data files inside the package directories', '(or create a new namespace package that only contains data files).', 'See `data files support', '`_.'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', '    cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['**PROVISIONAL**: list of glob patterns for all license files being distributed.', '(likely to become standard with :pep:`639`).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'dependencies': {'$ref': '#/definitions/file-directive-for-dependencies'}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$ref': '#/definitions/file-directive-for-dependencies'}}}, 'readme': {'type': 'object', 'anyOf': [{'$ref': '#/definitions/file-directive'}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'$ref': '#/definitions/file-directive/properties/file'}}, 'additionalProperties': False}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'file-directive-for-dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}, rule='type')
             data__tool_is_dict = isinstance(data__tool, dict)
             if data__tool_is_dict:
                 data__tool_keys = set(data__tool.keys())
                 if "distutils" in data__tool_keys:
                     data__tool_keys.remove("distutils")
                     data__tool__distutils = data__tool["distutils"]
-                    validate_https___docs_python_org_3_install(data__tool__distutils, custom_formats, (name_prefix or "data") + ".tool.distutils")
+                    validate_https___setuptools_pypa_io_en_latest_deprecated_distutils_configfile_html(data__tool__distutils, custom_formats, (name_prefix or "data") + ".tool.distutils")
                 if "setuptools" in data__tool_keys:
                     data__tool_keys.remove("setuptools")
                     data__tool__setuptools = data__tool["setuptools"]
-                    validate_https___setuptools_pypa_io_en_latest_references_keywords_html(data__tool__setuptools, custom_formats, (name_prefix or "data") + ".tool.setuptools")
+                    validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html(data__tool__setuptools, custom_formats, (name_prefix or "data") + ".tool.setuptools")
         if data_keys:
-            raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-build-dependencies/', 'title': 'Data structure for ``pyproject.toml`` files', '$$description': ['File format containing build-time configurations for the Python ecosystem. ', ':pep:`517` initially defined a build-system independent format for source trees', 'which was complemented by :pep:`518` to provide a way of specifying dependencies ', 'for building Python projects.', 'Please notice the ``project`` table (as initially defined in  :pep:`621`) is not included', 'in this schema and should be considered separately.'], 'type': 'object', 'additionalProperties': False, 'properties': {'build-system': {'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, 'tool': {'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://docs.python.org/3/install/', 'title': '``tool.distutils`` table', '$$description': ['Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` scripts via `distutils configuration files', '`_.', '``tool.distutils`` subtables could be used with the same purpose', '(NOT CURRENTLY IMPLEMENTED).'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://setuptools.pypa.io/en/latest/references/keywords.html', 'title': '``tool.setuptools`` table', '$$description': ['Please notice for the time being the ``setuptools`` project does not specify', 'a way of configuring builds via ``pyproject.toml``.', 'Therefore this schema should be taken just as a *"thought experiment"* on how', 'this *might be done*, by following the principles established in', '`ini2toml `_.', 'It considers only ``setuptools`` `parameters', '`_', 'that can currently be configured via ``setup.cfg`` and are not covered by :pep:`621`', 'but intentionally excludes ``dependency_links`` and ``setup_requires``.', 'NOTE: ``scripts`` was renamed to ``script-files`` to avoid confusion with', 'entry-point based scripts (defined in :pep:`621`).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'description': 'Whether the project can be safely installed and run from a zip file.', 'type': 'boolean'}, 'script-files': {'description': 'Legacy way of defining scripts (entry-points are preferred).', 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$ref': '#/definitions/package-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'const': ''}, {'$ref': '#/definitions/package-name'}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['**DEPRECATED**: dict-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', "Please notice this don't work with wheels. See `data files support", '`_'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', '    cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might become standard with PEP 639).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'dependencies': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'format': 'python-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$ref': '#/definitions/file-directive'}}}, 'readme': {'anyOf': [{'$ref': '#/definitions/file-directive'}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or PEP 561).', 'type': 'string', 'anyOf': [{'format': 'python-module-name'}, {'format': 'pep561-stub-name'}]}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='additionalProperties')
+            raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-build-dependencies/', 'title': 'Data structure for ``pyproject.toml`` files', '$$description': ['File format containing build-time configurations for the Python ecosystem. ', ':pep:`517` initially defined a build-system independent format for source trees', 'which was complemented by :pep:`518` to provide a way of specifying dependencies ', 'for building Python projects.', 'Please notice the ``project`` table (as initially defined in  :pep:`621`) is not included', 'in this schema and should be considered separately.'], 'type': 'object', 'additionalProperties': False, 'properties': {'build-system': {'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/pyproject-toml/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, 'tool': {'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/deprecated/distutils/configfile.html', 'title': '``tool.distutils`` table', '$$description': ['**EXPERIMENTAL** (NOT OFFICIALLY SUPPORTED): Use ``tool.distutils``', 'subtables to configure arguments for ``distutils`` commands.', 'Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` commands via `distutils configuration files', '`_.', 'See also `the old Python docs _`.'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html', 'title': '``tool.setuptools`` table', '$$description': ['``setuptools``-specific configurations that can be set by users that require', 'customization.', 'These configurations are completely optional and probably can be skipped when', 'creating simple packages. They are equivalent to some of the `Keywords', '`_', 'used by the ``setup.py`` file, and can be set via the ``tool.setuptools`` table.', 'It considers only ``setuptools`` `parameters', '`_', 'that are not covered by :pep:`621`; and intentionally excludes ``dependency_links``', 'and ``setup_requires`` (incompatible with modern workflows/standards).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'$$description': ['Whether the project can be safely installed and run from a zip file.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'boolean'}, 'script-files': {'$$description': ['Legacy way of defining scripts (entry-points are preferred).', 'Equivalent to the ``script`` keyword in ``setup.py``', '(it was renamed to avoid confusion with entry-point based ``project.scripts``', 'defined in :pep:`621`).', '**DISCOURAGED**: generic script wrappers are tricky and may not work properly.', 'Whenever possible, please use ``project.scripts`` instead.'], 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$ref': '#/definitions/package-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$ref': '#/definitions/package-name'}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html', 'description': '**DEPRECATED**: use implicit namespaces instead (:pep:`420`).'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['``dict``-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', '**DISCOURAGED**: please notice this might not work as expected with wheels.', 'Whenever possible, consider using data files inside the package directories', '(or create a new namespace package that only contains data files).', 'See `data files support', '`_.'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', '    cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['**PROVISIONAL**: list of glob patterns for all license files being distributed.', '(likely to become standard with :pep:`639`).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'dependencies': {'$ref': '#/definitions/file-directive-for-dependencies'}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$ref': '#/definitions/file-directive-for-dependencies'}}}, 'readme': {'type': 'object', 'anyOf': [{'$ref': '#/definitions/file-directive'}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'$ref': '#/definitions/file-directive/properties/file'}}, 'additionalProperties': False}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'file-directive-for-dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/pyproject-toml/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='additionalProperties')
     return data
 
-def validate_https___setuptools_pypa_io_en_latest_references_keywords_html(data, custom_formats={}, name_prefix=None):
+def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html(data, custom_formats={}, name_prefix=None):
     if not isinstance(data, (dict)):
-        raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://setuptools.pypa.io/en/latest/references/keywords.html', 'title': '``tool.setuptools`` table', '$$description': ['Please notice for the time being the ``setuptools`` project does not specify', 'a way of configuring builds via ``pyproject.toml``.', 'Therefore this schema should be taken just as a *"thought experiment"* on how', 'this *might be done*, by following the principles established in', '`ini2toml `_.', 'It considers only ``setuptools`` `parameters', '`_', 'that can currently be configured via ``setup.cfg`` and are not covered by :pep:`621`', 'but intentionally excludes ``dependency_links`` and ``setup_requires``.', 'NOTE: ``scripts`` was renamed to ``script-files`` to avoid confusion with', 'entry-point based scripts (defined in :pep:`621`).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'description': 'Whether the project can be safely installed and run from a zip file.', 'type': 'boolean'}, 'script-files': {'description': 'Legacy way of defining scripts (entry-points are preferred).', 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or PEP 561).', 'type': 'string', 'anyOf': [{'format': 'python-module-name'}, {'format': 'pep561-stub-name'}]}}, {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'const': ''}, {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or PEP 561).', 'type': 'string', 'anyOf': [{'format': 'python-module-name'}, {'format': 'pep561-stub-name'}]}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['**DEPRECATED**: dict-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', "Please notice this don't work with wheels. See `data files support", '`_'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', '    cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might become standard with PEP 639).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'dependencies': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'format': 'python-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}}}, 'readme': {'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or PEP 561).', 'type': 'string', 'anyOf': [{'format': 'python-module-name'}, {'format': 'pep561-stub-name'}]}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}, rule='type')
+        raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html', 'title': '``tool.setuptools`` table', '$$description': ['``setuptools``-specific configurations that can be set by users that require', 'customization.', 'These configurations are completely optional and probably can be skipped when', 'creating simple packages. They are equivalent to some of the `Keywords', '`_', 'used by the ``setup.py`` file, and can be set via the ``tool.setuptools`` table.', 'It considers only ``setuptools`` `parameters', '`_', 'that are not covered by :pep:`621`; and intentionally excludes ``dependency_links``', 'and ``setup_requires`` (incompatible with modern workflows/standards).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'$$description': ['Whether the project can be safely installed and run from a zip file.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'boolean'}, 'script-files': {'$$description': ['Legacy way of defining scripts (entry-points are preferred).', 'Equivalent to the ``script`` keyword in ``setup.py``', '(it was renamed to avoid confusion with entry-point based ``project.scripts``', 'defined in :pep:`621`).', '**DISCOURAGED**: generic script wrappers are tricky and may not work properly.', 'Whenever possible, please use ``project.scripts`` instead.'], 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'type': 'string', 'format': 'pep561-stub-name'}]}}, {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'type': 'string', 'format': 'pep561-stub-name'}]}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html', 'description': '**DEPRECATED**: use implicit namespaces instead (:pep:`420`).'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['``dict``-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', '**DISCOURAGED**: please notice this might not work as expected with wheels.', 'Whenever possible, consider using data files inside the package directories', '(or create a new namespace package that only contains data files).', 'See `data files support', '`_.'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', '    cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['**PROVISIONAL**: list of glob patterns for all license files being distributed.', '(likely to become standard with :pep:`639`).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}}}, 'readme': {'type': 'object', 'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'additionalProperties': False}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'file-directive-for-dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}, rule='type')
     data_is_dict = isinstance(data, dict)
     if data_is_dict:
         data_keys = set(data.keys())
@@ -149,12 +151,12 @@ def validate_https___setuptools_pypa_io_en_latest_references_keywords_html(data,
             data_keys.remove("zip-safe")
             data__zipsafe = data["zip-safe"]
             if not isinstance(data__zipsafe, (bool)):
-                raise JsonSchemaValueException("" + (name_prefix or "data") + ".zip-safe must be boolean", value=data__zipsafe, name="" + (name_prefix or "data") + ".zip-safe", definition={'description': 'Whether the project can be safely installed and run from a zip file.', 'type': 'boolean'}, rule='type')
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".zip-safe must be boolean", value=data__zipsafe, name="" + (name_prefix or "data") + ".zip-safe", definition={'$$description': ['Whether the project can be safely installed and run from a zip file.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'boolean'}, rule='type')
         if "script-files" in data_keys:
             data_keys.remove("script-files")
             data__scriptfiles = data["script-files"]
             if not isinstance(data__scriptfiles, (list, tuple)):
-                raise JsonSchemaValueException("" + (name_prefix or "data") + ".script-files must be array", value=data__scriptfiles, name="" + (name_prefix or "data") + ".script-files", definition={'description': 'Legacy way of defining scripts (entry-points are preferred).', 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, rule='type')
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".script-files must be array", value=data__scriptfiles, name="" + (name_prefix or "data") + ".script-files", definition={'$$description': ['Legacy way of defining scripts (entry-points are preferred).', 'Equivalent to the ``script`` keyword in ``setup.py``', '(it was renamed to avoid confusion with entry-point based ``project.scripts``', 'defined in :pep:`621`).', '**DISCOURAGED**: generic script wrappers are tricky and may not work properly.', 'Whenever possible, please use ``project.scripts`` instead.'], 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, rule='type')
             data__scriptfiles_is_list = isinstance(data__scriptfiles, (list, tuple))
             if data__scriptfiles_is_list:
                 data__scriptfiles_len = len(data__scriptfiles)
@@ -165,7 +167,7 @@ def validate_https___setuptools_pypa_io_en_latest_references_keywords_html(data,
             data_keys.remove("eager-resources")
             data__eagerresources = data["eager-resources"]
             if not isinstance(data__eagerresources, (list, tuple)):
-                raise JsonSchemaValueException("" + (name_prefix or "data") + ".eager-resources must be array", value=data__eagerresources, name="" + (name_prefix or "data") + ".eager-resources", definition={'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.'], 'type': 'array', 'items': {'type': 'string'}}, rule='type')
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".eager-resources must be array", value=data__eagerresources, name="" + (name_prefix or "data") + ".eager-resources", definition={'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'array', 'items': {'type': 'string'}}, rule='type')
             data__eagerresources_is_list = isinstance(data__eagerresources, (list, tuple))
             if data__eagerresources_is_list:
                 data__eagerresources_len = len(data__eagerresources)
@@ -179,26 +181,26 @@ def validate_https___setuptools_pypa_io_en_latest_references_keywords_html(data,
             if data__packages_one_of_count1 < 2:
                 try:
                     if not isinstance(data__packages, (list, tuple)):
-                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".packages must be array", value=data__packages, name="" + (name_prefix or "data") + ".packages", definition={'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or PEP 561).', 'type': 'string', 'anyOf': [{'format': 'python-module-name'}, {'format': 'pep561-stub-name'}]}}, rule='type')
+                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".packages must be array", value=data__packages, name="" + (name_prefix or "data") + ".packages", definition={'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'type': 'string', 'format': 'pep561-stub-name'}]}}, rule='type')
                     data__packages_is_list = isinstance(data__packages, (list, tuple))
                     if data__packages_is_list:
                         data__packages_len = len(data__packages)
                         for data__packages_x, data__packages_item in enumerate(data__packages):
-                            validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_package_name(data__packages_item, custom_formats, (name_prefix or "data") + ".packages[{data__packages_x}]".format(**locals()))
+                            validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html__definitions_package_name(data__packages_item, custom_formats, (name_prefix or "data") + ".packages[{data__packages_x}]".format(**locals()))
                     data__packages_one_of_count1 += 1
                 except JsonSchemaValueException: pass
             if data__packages_one_of_count1 < 2:
                 try:
-                    validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_find_directive(data__packages, custom_formats, (name_prefix or "data") + ".packages")
+                    validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html__definitions_find_directive(data__packages, custom_formats, (name_prefix or "data") + ".packages")
                     data__packages_one_of_count1 += 1
                 except JsonSchemaValueException: pass
             if data__packages_one_of_count1 != 1:
-                raise JsonSchemaValueException("" + (name_prefix or "data") + ".packages must be valid exactly by one definition" + (" (" + str(data__packages_one_of_count1) + " matches found)"), value=data__packages, name="" + (name_prefix or "data") + ".packages", definition={'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or PEP 561).', 'type': 'string', 'anyOf': [{'format': 'python-module-name'}, {'format': 'pep561-stub-name'}]}}, {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}]}, rule='oneOf')
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".packages must be valid exactly by one definition" + (" (" + str(data__packages_one_of_count1) + " matches found)"), value=data__packages, name="" + (name_prefix or "data") + ".packages", definition={'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'type': 'string', 'format': 'pep561-stub-name'}]}}, {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}]}, rule='oneOf')
         if "package-dir" in data_keys:
             data_keys.remove("package-dir")
             data__packagedir = data["package-dir"]
             if not isinstance(data__packagedir, (dict)):
-                raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must be object", value=data__packagedir, name="" + (name_prefix or "data") + ".package-dir", definition={'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'const': ''}, {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or PEP 561).', 'type': 'string', 'anyOf': [{'format': 'python-module-name'}, {'format': 'pep561-stub-name'}]}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, rule='type')
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must be object", value=data__packagedir, name="" + (name_prefix or "data") + ".package-dir", definition={'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'type': 'string', 'format': 'pep561-stub-name'}]}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, rule='type')
             data__packagedir_is_dict = isinstance(data__packagedir, dict)
             if data__packagedir_is_dict:
                 data__packagedir_keys = set(data__packagedir.keys())
@@ -209,35 +211,35 @@ def validate_https___setuptools_pypa_io_en_latest_references_keywords_html(data,
                         if not isinstance(data__packagedir_val, (str)):
                             raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir.{data__packagedir_key}".format(**locals()) + " must be string", value=data__packagedir_val, name="" + (name_prefix or "data") + ".package-dir.{data__packagedir_key}".format(**locals()) + "", definition={'type': 'string'}, rule='type')
                 if data__packagedir_keys:
-                    raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must not contain "+str(data__packagedir_keys)+" properties", value=data__packagedir, name="" + (name_prefix or "data") + ".package-dir", definition={'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'const': ''}, {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or PEP 561).', 'type': 'string', 'anyOf': [{'format': 'python-module-name'}, {'format': 'pep561-stub-name'}]}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, rule='additionalProperties')
+                    raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must not contain "+str(data__packagedir_keys)+" properties", value=data__packagedir, name="" + (name_prefix or "data") + ".package-dir", definition={'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'type': 'string', 'format': 'pep561-stub-name'}]}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, rule='additionalProperties')
                 data__packagedir_len = len(data__packagedir)
                 if data__packagedir_len != 0:
                     data__packagedir_property_names = True
                     for data__packagedir_key in data__packagedir:
                         try:
-                            data__packagedir_key_one_of_count2 = 0
-                            if data__packagedir_key_one_of_count2 < 2:
+                            data__packagedir_key_any_of_count2 = 0
+                            if not data__packagedir_key_any_of_count2:
                                 try:
                                     if data__packagedir_key != "":
                                         raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must be same as const definition: ", value=data__packagedir_key, name="" + (name_prefix or "data") + ".package-dir", definition={'const': ''}, rule='const')
-                                    data__packagedir_key_one_of_count2 += 1
+                                    data__packagedir_key_any_of_count2 += 1
                                 except JsonSchemaValueException: pass
-                            if data__packagedir_key_one_of_count2 < 2:
+                            if not data__packagedir_key_any_of_count2:
                                 try:
-                                    validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_package_name(data__packagedir_key, custom_formats, (name_prefix or "data") + ".package-dir")
-                                    data__packagedir_key_one_of_count2 += 1
+                                    validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html__definitions_package_name(data__packagedir_key, custom_formats, (name_prefix or "data") + ".package-dir")
+                                    data__packagedir_key_any_of_count2 += 1
                                 except JsonSchemaValueException: pass
-                            if data__packagedir_key_one_of_count2 != 1:
-                                raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must be valid exactly by one definition" + (" (" + str(data__packagedir_key_one_of_count2) + " matches found)"), value=data__packagedir_key, name="" + (name_prefix or "data") + ".package-dir", definition={'oneOf': [{'const': ''}, {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or PEP 561).', 'type': 'string', 'anyOf': [{'format': 'python-module-name'}, {'format': 'pep561-stub-name'}]}]}, rule='oneOf')
+                            if not data__packagedir_key_any_of_count2:
+                                raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir cannot be validated by any definition", value=data__packagedir_key, name="" + (name_prefix or "data") + ".package-dir", definition={'anyOf': [{'const': ''}, {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'type': 'string', 'format': 'pep561-stub-name'}]}]}, rule='anyOf')
                         except JsonSchemaValueException:
                             data__packagedir_property_names = False
                     if not data__packagedir_property_names:
-                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must be named by propertyName definition", value=data__packagedir, name="" + (name_prefix or "data") + ".package-dir", definition={'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'const': ''}, {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or PEP 561).', 'type': 'string', 'anyOf': [{'format': 'python-module-name'}, {'format': 'pep561-stub-name'}]}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, rule='propertyNames')
+                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must be named by propertyName definition", value=data__packagedir, name="" + (name_prefix or "data") + ".package-dir", definition={'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'type': 'string', 'format': 'pep561-stub-name'}]}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, rule='propertyNames')
         if "package-data" in data_keys:
             data_keys.remove("package-data")
             data__packagedata = data["package-data"]
             if not isinstance(data__packagedata, (dict)):
-                raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must be object", value=data__packagedata, name="" + (name_prefix or "data") + ".package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='type')
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must be object", value=data__packagedata, name="" + (name_prefix or "data") + ".package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='type')
             data__packagedata_is_dict = isinstance(data__packagedata, dict)
             if data__packagedata_is_dict:
                 data__packagedata_keys = set(data__packagedata.keys())
@@ -254,32 +256,34 @@ def validate_https___setuptools_pypa_io_en_latest_references_keywords_html(data,
                                 if not isinstance(data__packagedata_val_item, (str)):
                                     raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data.{data__packagedata_key}[{data__packagedata_val_x}]".format(**locals()) + " must be string", value=data__packagedata_val_item, name="" + (name_prefix or "data") + ".package-data.{data__packagedata_key}[{data__packagedata_val_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
                 if data__packagedata_keys:
-                    raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must not contain "+str(data__packagedata_keys)+" properties", value=data__packagedata, name="" + (name_prefix or "data") + ".package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='additionalProperties')
+                    raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must not contain "+str(data__packagedata_keys)+" properties", value=data__packagedata, name="" + (name_prefix or "data") + ".package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='additionalProperties')
                 data__packagedata_len = len(data__packagedata)
                 if data__packagedata_len != 0:
                     data__packagedata_property_names = True
                     for data__packagedata_key in data__packagedata:
                         try:
-                            data__packagedata_key_one_of_count3 = 0
-                            if data__packagedata_key_one_of_count3 < 2:
+                            data__packagedata_key_any_of_count3 = 0
+                            if not data__packagedata_key_any_of_count3:
                                 try:
+                                    if not isinstance(data__packagedata_key, (str)):
+                                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must be string", value=data__packagedata_key, name="" + (name_prefix or "data") + ".package-data", definition={'type': 'string', 'format': 'python-module-name'}, rule='type')
                                     if isinstance(data__packagedata_key, str):
                                         if not custom_formats["python-module-name"](data__packagedata_key):
-                                            raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must be python-module-name", value=data__packagedata_key, name="" + (name_prefix or "data") + ".package-data", definition={'format': 'python-module-name'}, rule='format')
-                                    data__packagedata_key_one_of_count3 += 1
+                                            raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must be python-module-name", value=data__packagedata_key, name="" + (name_prefix or "data") + ".package-data", definition={'type': 'string', 'format': 'python-module-name'}, rule='format')
+                                    data__packagedata_key_any_of_count3 += 1
                                 except JsonSchemaValueException: pass
-                            if data__packagedata_key_one_of_count3 < 2:
+                            if not data__packagedata_key_any_of_count3:
                                 try:
                                     if data__packagedata_key != "*":
                                         raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must be same as const definition: *", value=data__packagedata_key, name="" + (name_prefix or "data") + ".package-data", definition={'const': '*'}, rule='const')
-                                    data__packagedata_key_one_of_count3 += 1
+                                    data__packagedata_key_any_of_count3 += 1
                                 except JsonSchemaValueException: pass
-                            if data__packagedata_key_one_of_count3 != 1:
-                                raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must be valid exactly by one definition" + (" (" + str(data__packagedata_key_one_of_count3) + " matches found)"), value=data__packagedata_key, name="" + (name_prefix or "data") + ".package-data", definition={'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, rule='oneOf')
+                            if not data__packagedata_key_any_of_count3:
+                                raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data cannot be validated by any definition", value=data__packagedata_key, name="" + (name_prefix or "data") + ".package-data", definition={'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, rule='anyOf')
                         except JsonSchemaValueException:
                             data__packagedata_property_names = False
                     if not data__packagedata_property_names:
-                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must be named by propertyName definition", value=data__packagedata, name="" + (name_prefix or "data") + ".package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='propertyNames')
+                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must be named by propertyName definition", value=data__packagedata, name="" + (name_prefix or "data") + ".package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='propertyNames')
         if "include-package-data" in data_keys:
             data_keys.remove("include-package-data")
             data__includepackagedata = data["include-package-data"]
@@ -289,7 +293,7 @@ def validate_https___setuptools_pypa_io_en_latest_references_keywords_html(data,
             data_keys.remove("exclude-package-data")
             data__excludepackagedata = data["exclude-package-data"]
             if not isinstance(data__excludepackagedata, (dict)):
-                raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must be object", value=data__excludepackagedata, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='type')
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must be object", value=data__excludepackagedata, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='type')
             data__excludepackagedata_is_dict = isinstance(data__excludepackagedata, dict)
             if data__excludepackagedata_is_dict:
                 data__excludepackagedata_keys = set(data__excludepackagedata.keys())
@@ -306,37 +310,39 @@ def validate_https___setuptools_pypa_io_en_latest_references_keywords_html(data,
                                 if not isinstance(data__excludepackagedata_val_item, (str)):
                                     raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data.{data__excludepackagedata_key}[{data__excludepackagedata_val_x}]".format(**locals()) + " must be string", value=data__excludepackagedata_val_item, name="" + (name_prefix or "data") + ".exclude-package-data.{data__excludepackagedata_key}[{data__excludepackagedata_val_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
                 if data__excludepackagedata_keys:
-                    raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must not contain "+str(data__excludepackagedata_keys)+" properties", value=data__excludepackagedata, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='additionalProperties')
+                    raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must not contain "+str(data__excludepackagedata_keys)+" properties", value=data__excludepackagedata, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='additionalProperties')
                 data__excludepackagedata_len = len(data__excludepackagedata)
                 if data__excludepackagedata_len != 0:
                     data__excludepackagedata_property_names = True
                     for data__excludepackagedata_key in data__excludepackagedata:
                         try:
-                            data__excludepackagedata_key_one_of_count4 = 0
-                            if data__excludepackagedata_key_one_of_count4 < 2:
+                            data__excludepackagedata_key_any_of_count4 = 0
+                            if not data__excludepackagedata_key_any_of_count4:
                                 try:
+                                    if not isinstance(data__excludepackagedata_key, (str)):
+                                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must be string", value=data__excludepackagedata_key, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'type': 'string', 'format': 'python-module-name'}, rule='type')
                                     if isinstance(data__excludepackagedata_key, str):
                                         if not custom_formats["python-module-name"](data__excludepackagedata_key):
-                                            raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must be python-module-name", value=data__excludepackagedata_key, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'format': 'python-module-name'}, rule='format')
-                                    data__excludepackagedata_key_one_of_count4 += 1
+                                            raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must be python-module-name", value=data__excludepackagedata_key, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'type': 'string', 'format': 'python-module-name'}, rule='format')
+                                    data__excludepackagedata_key_any_of_count4 += 1
                                 except JsonSchemaValueException: pass
-                            if data__excludepackagedata_key_one_of_count4 < 2:
+                            if not data__excludepackagedata_key_any_of_count4:
                                 try:
                                     if data__excludepackagedata_key != "*":
                                         raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must be same as const definition: *", value=data__excludepackagedata_key, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'const': '*'}, rule='const')
-                                    data__excludepackagedata_key_one_of_count4 += 1
+                                    data__excludepackagedata_key_any_of_count4 += 1
                                 except JsonSchemaValueException: pass
-                            if data__excludepackagedata_key_one_of_count4 != 1:
-                                raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must be valid exactly by one definition" + (" (" + str(data__excludepackagedata_key_one_of_count4) + " matches found)"), value=data__excludepackagedata_key, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, rule='oneOf')
+                            if not data__excludepackagedata_key_any_of_count4:
+                                raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data cannot be validated by any definition", value=data__excludepackagedata_key, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, rule='anyOf')
                         except JsonSchemaValueException:
                             data__excludepackagedata_property_names = False
                     if not data__excludepackagedata_property_names:
-                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must be named by propertyName definition", value=data__excludepackagedata, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='propertyNames')
+                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must be named by propertyName definition", value=data__excludepackagedata, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='propertyNames')
         if "namespace-packages" in data_keys:
             data_keys.remove("namespace-packages")
             data__namespacepackages = data["namespace-packages"]
             if not isinstance(data__namespacepackages, (list, tuple)):
-                raise JsonSchemaValueException("" + (name_prefix or "data") + ".namespace-packages must be array", value=data__namespacepackages, name="" + (name_prefix or "data") + ".namespace-packages", definition={'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html'}, rule='type')
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".namespace-packages must be array", value=data__namespacepackages, name="" + (name_prefix or "data") + ".namespace-packages", definition={'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html', 'description': '**DEPRECATED**: use implicit namespaces instead (:pep:`420`).'}, rule='type')
             data__namespacepackages_is_list = isinstance(data__namespacepackages, (list, tuple))
             if data__namespacepackages_is_list:
                 data__namespacepackages_len = len(data__namespacepackages)
@@ -364,7 +370,7 @@ def validate_https___setuptools_pypa_io_en_latest_references_keywords_html(data,
             data_keys.remove("data-files")
             data__datafiles = data["data-files"]
             if not isinstance(data__datafiles, (dict)):
-                raise JsonSchemaValueException("" + (name_prefix or "data") + ".data-files must be object", value=data__datafiles, name="" + (name_prefix or "data") + ".data-files", definition={'$$description': ['**DEPRECATED**: dict-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', "Please notice this don't work with wheels. See `data files support", '`_'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='type')
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".data-files must be object", value=data__datafiles, name="" + (name_prefix or "data") + ".data-files", definition={'$$description': ['``dict``-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', '**DISCOURAGED**: please notice this might not work as expected with wheels.', 'Whenever possible, consider using data files inside the package directories', '(or create a new namespace package that only contains data files).', 'See `data files support', '`_.'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='type')
             data__datafiles_is_dict = isinstance(data__datafiles, dict)
             if data__datafiles_is_dict:
                 data__datafiles_keys = set(data__datafiles.keys())
@@ -401,7 +407,7 @@ def validate_https___setuptools_pypa_io_en_latest_references_keywords_html(data,
             data_keys.remove("license-files")
             data__licensefiles = data["license-files"]
             if not isinstance(data__licensefiles, (list, tuple)):
-                raise JsonSchemaValueException("" + (name_prefix or "data") + ".license-files must be array", value=data__licensefiles, name="" + (name_prefix or "data") + ".license-files", definition={'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might become standard with PEP 639).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, rule='type')
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".license-files must be array", value=data__licensefiles, name="" + (name_prefix or "data") + ".license-files", definition={'type': 'array', 'items': {'type': 'string'}, '$$description': ['**PROVISIONAL**: list of glob patterns for all license files being distributed.', '(likely to become standard with :pep:`639`).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, rule='type')
             data__licensefiles_is_list = isinstance(data__licensefiles, (list, tuple))
             if data__licensefiles_is_list:
                 data__licensefiles_len = len(data__licensefiles)
@@ -412,7 +418,7 @@ def validate_https___setuptools_pypa_io_en_latest_references_keywords_html(data,
             data_keys.remove("dynamic")
             data__dynamic = data["dynamic"]
             if not isinstance(data__dynamic, (dict)):
-                raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic must be object", value=data__dynamic, name="" + (name_prefix or "data") + ".dynamic", definition={'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'dependencies': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'format': 'python-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}}}, 'readme': {'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}, rule='type')
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic must be object", value=data__dynamic, name="" + (name_prefix or "data") + ".dynamic", definition={'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}}}, 'readme': {'type': 'object', 'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'additionalProperties': False}], 'required': ['file']}}}, rule='type')
             data__dynamic_is_dict = isinstance(data__dynamic, dict)
             if data__dynamic_is_dict:
                 data__dynamic_keys = set(data__dynamic.keys())
@@ -422,37 +428,37 @@ def validate_https___setuptools_pypa_io_en_latest_references_keywords_html(data,
                     data__dynamic__version_one_of_count5 = 0
                     if data__dynamic__version_one_of_count5 < 2:
                         try:
-                            validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_attr_directive(data__dynamic__version, custom_formats, (name_prefix or "data") + ".dynamic.version")
+                            validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html__definitions_attr_directive(data__dynamic__version, custom_formats, (name_prefix or "data") + ".dynamic.version")
                             data__dynamic__version_one_of_count5 += 1
                         except JsonSchemaValueException: pass
                     if data__dynamic__version_one_of_count5 < 2:
                         try:
-                            validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__version, custom_formats, (name_prefix or "data") + ".dynamic.version")
+                            validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html__definitions_file_directive(data__dynamic__version, custom_formats, (name_prefix or "data") + ".dynamic.version")
                             data__dynamic__version_one_of_count5 += 1
                         except JsonSchemaValueException: pass
                     if data__dynamic__version_one_of_count5 != 1:
-                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.version must be valid exactly by one definition" + (" (" + str(data__dynamic__version_one_of_count5) + " matches found)"), value=data__dynamic__version, name="" + (name_prefix or "data") + ".dynamic.version", definition={'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, rule='oneOf')
+                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.version must be valid exactly by one definition" + (" (" + str(data__dynamic__version_one_of_count5) + " matches found)"), value=data__dynamic__version, name="" + (name_prefix or "data") + ".dynamic.version", definition={'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, rule='oneOf')
                 if "classifiers" in data__dynamic_keys:
                     data__dynamic_keys.remove("classifiers")
                     data__dynamic__classifiers = data__dynamic["classifiers"]
-                    validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__classifiers, custom_formats, (name_prefix or "data") + ".dynamic.classifiers")
+                    validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html__definitions_file_directive(data__dynamic__classifiers, custom_formats, (name_prefix or "data") + ".dynamic.classifiers")
                 if "description" in data__dynamic_keys:
                     data__dynamic_keys.remove("description")
                     data__dynamic__description = data__dynamic["description"]
-                    validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__description, custom_formats, (name_prefix or "data") + ".dynamic.description")
-                if "dependencies" in data__dynamic_keys:
-                    data__dynamic_keys.remove("dependencies")
-                    data__dynamic__dependencies = data__dynamic["dependencies"]
-                    validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__dependencies, custom_formats, (name_prefix or "data") + ".dynamic.dependencies")
+                    validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html__definitions_file_directive(data__dynamic__description, custom_formats, (name_prefix or "data") + ".dynamic.description")
                 if "entry-points" in data__dynamic_keys:
                     data__dynamic_keys.remove("entry-points")
                     data__dynamic__entrypoints = data__dynamic["entry-points"]
-                    validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__entrypoints, custom_formats, (name_prefix or "data") + ".dynamic.entry-points")
+                    validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html__definitions_file_directive(data__dynamic__entrypoints, custom_formats, (name_prefix or "data") + ".dynamic.entry-points")
+                if "dependencies" in data__dynamic_keys:
+                    data__dynamic_keys.remove("dependencies")
+                    data__dynamic__dependencies = data__dynamic["dependencies"]
+                    validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html__definitions_file_directive_for_dependencies(data__dynamic__dependencies, custom_formats, (name_prefix or "data") + ".dynamic.dependencies")
                 if "optional-dependencies" in data__dynamic_keys:
                     data__dynamic_keys.remove("optional-dependencies")
                     data__dynamic__optionaldependencies = data__dynamic["optional-dependencies"]
                     if not isinstance(data__dynamic__optionaldependencies, (dict)):
-                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.optional-dependencies must be object", value=data__dynamic__optionaldependencies, name="" + (name_prefix or "data") + ".dynamic.optional-dependencies", definition={'type': 'object', 'propertyNames': {'format': 'python-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}}}, rule='type')
+                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.optional-dependencies must be object", value=data__dynamic__optionaldependencies, name="" + (name_prefix or "data") + ".dynamic.optional-dependencies", definition={'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}}}, rule='type')
                     data__dynamic__optionaldependencies_is_dict = isinstance(data__dynamic__optionaldependencies, dict)
                     if data__dynamic__optionaldependencies_is_dict:
                         data__dynamic__optionaldependencies_keys = set(data__dynamic__optionaldependencies.keys())
@@ -460,32 +466,38 @@ def validate_https___setuptools_pypa_io_en_latest_references_keywords_html(data,
                             if REGEX_PATTERNS['.+'].search(data__dynamic__optionaldependencies_key):
                                 if data__dynamic__optionaldependencies_key in data__dynamic__optionaldependencies_keys:
                                     data__dynamic__optionaldependencies_keys.remove(data__dynamic__optionaldependencies_key)
-                                validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__optionaldependencies_val, custom_formats, (name_prefix or "data") + ".dynamic.optional-dependencies.{data__dynamic__optionaldependencies_key}".format(**locals()))
+                                validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html__definitions_file_directive_for_dependencies(data__dynamic__optionaldependencies_val, custom_formats, (name_prefix or "data") + ".dynamic.optional-dependencies.{data__dynamic__optionaldependencies_key}".format(**locals()))
                         if data__dynamic__optionaldependencies_keys:
-                            raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.optional-dependencies must not contain "+str(data__dynamic__optionaldependencies_keys)+" properties", value=data__dynamic__optionaldependencies, name="" + (name_prefix or "data") + ".dynamic.optional-dependencies", definition={'type': 'object', 'propertyNames': {'format': 'python-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}}}, rule='additionalProperties')
+                            raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.optional-dependencies must not contain "+str(data__dynamic__optionaldependencies_keys)+" properties", value=data__dynamic__optionaldependencies, name="" + (name_prefix or "data") + ".dynamic.optional-dependencies", definition={'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}}}, rule='additionalProperties')
                         data__dynamic__optionaldependencies_len = len(data__dynamic__optionaldependencies)
                         if data__dynamic__optionaldependencies_len != 0:
                             data__dynamic__optionaldependencies_property_names = True
                             for data__dynamic__optionaldependencies_key in data__dynamic__optionaldependencies:
                                 try:
+                                    if not isinstance(data__dynamic__optionaldependencies_key, (str)):
+                                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.optional-dependencies must be string", value=data__dynamic__optionaldependencies_key, name="" + (name_prefix or "data") + ".dynamic.optional-dependencies", definition={'type': 'string', 'format': 'pep508-identifier'}, rule='type')
                                     if isinstance(data__dynamic__optionaldependencies_key, str):
-                                        if not custom_formats["python-identifier"](data__dynamic__optionaldependencies_key):
-                                            raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.optional-dependencies must be python-identifier", value=data__dynamic__optionaldependencies_key, name="" + (name_prefix or "data") + ".dynamic.optional-dependencies", definition={'format': 'python-identifier'}, rule='format')
+                                        if not custom_formats["pep508-identifier"](data__dynamic__optionaldependencies_key):
+                                            raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.optional-dependencies must be pep508-identifier", value=data__dynamic__optionaldependencies_key, name="" + (name_prefix or "data") + ".dynamic.optional-dependencies", definition={'type': 'string', 'format': 'pep508-identifier'}, rule='format')
                                 except JsonSchemaValueException:
                                     data__dynamic__optionaldependencies_property_names = False
                             if not data__dynamic__optionaldependencies_property_names:
-                                raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.optional-dependencies must be named by propertyName definition", value=data__dynamic__optionaldependencies, name="" + (name_prefix or "data") + ".dynamic.optional-dependencies", definition={'type': 'object', 'propertyNames': {'format': 'python-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}}}, rule='propertyNames')
+                                raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.optional-dependencies must be named by propertyName definition", value=data__dynamic__optionaldependencies, name="" + (name_prefix or "data") + ".dynamic.optional-dependencies", definition={'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}}}, rule='propertyNames')
                 if "readme" in data__dynamic_keys:
                     data__dynamic_keys.remove("readme")
                     data__dynamic__readme = data__dynamic["readme"]
+                    if not isinstance(data__dynamic__readme, (dict)):
+                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.readme must be object", value=data__dynamic__readme, name="" + (name_prefix or "data") + ".dynamic.readme", definition={'type': 'object', 'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'additionalProperties': False}], 'required': ['file']}, rule='type')
                     data__dynamic__readme_any_of_count6 = 0
                     if not data__dynamic__readme_any_of_count6:
                         try:
-                            validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__readme, custom_formats, (name_prefix or "data") + ".dynamic.readme")
+                            validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html__definitions_file_directive(data__dynamic__readme, custom_formats, (name_prefix or "data") + ".dynamic.readme")
                             data__dynamic__readme_any_of_count6 += 1
                         except JsonSchemaValueException: pass
                     if not data__dynamic__readme_any_of_count6:
                         try:
+                            if not isinstance(data__dynamic__readme, (dict)):
+                                raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.readme must be object", value=data__dynamic__readme, name="" + (name_prefix or "data") + ".dynamic.readme", definition={'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'additionalProperties': False}, rule='type')
                             data__dynamic__readme_is_dict = isinstance(data__dynamic__readme, dict)
                             if data__dynamic__readme_is_dict:
                                 data__dynamic__readme_keys = set(data__dynamic__readme.keys())
@@ -494,41 +506,75 @@ def validate_https___setuptools_pypa_io_en_latest_references_keywords_html(data,
                                     data__dynamic__readme__contenttype = data__dynamic__readme["content-type"]
                                     if not isinstance(data__dynamic__readme__contenttype, (str)):
                                         raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.readme.content-type must be string", value=data__dynamic__readme__contenttype, name="" + (name_prefix or "data") + ".dynamic.readme.content-type", definition={'type': 'string'}, rule='type')
+                                if "file" in data__dynamic__readme_keys:
+                                    data__dynamic__readme_keys.remove("file")
+                                    data__dynamic__readme__file = data__dynamic__readme["file"]
+                                    validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html__definitions_file_directive_properties_file(data__dynamic__readme__file, custom_formats, (name_prefix or "data") + ".dynamic.readme.file")
+                                if data__dynamic__readme_keys:
+                                    raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.readme must not contain "+str(data__dynamic__readme_keys)+" properties", value=data__dynamic__readme, name="" + (name_prefix or "data") + ".dynamic.readme", definition={'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'additionalProperties': False}, rule='additionalProperties')
                             data__dynamic__readme_any_of_count6 += 1
                         except JsonSchemaValueException: pass
                     if not data__dynamic__readme_any_of_count6:
-                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.readme cannot be validated by any definition", value=data__dynamic__readme, name="" + (name_prefix or "data") + ".dynamic.readme", definition={'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}, rule='anyOf')
+                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.readme cannot be validated by any definition", value=data__dynamic__readme, name="" + (name_prefix or "data") + ".dynamic.readme", definition={'type': 'object', 'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'additionalProperties': False}], 'required': ['file']}, rule='anyOf')
                     data__dynamic__readme_is_dict = isinstance(data__dynamic__readme, dict)
                     if data__dynamic__readme_is_dict:
-                        data__dynamic__readme_len = len(data__dynamic__readme)
-                        if not all(prop in data__dynamic__readme for prop in ['file']):
-                            raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.readme must contain ['file'] properties", value=data__dynamic__readme, name="" + (name_prefix or "data") + ".dynamic.readme", definition={'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}, rule='required')
+                        data__dynamic__readme__missing_keys = set(['file']) - data__dynamic__readme.keys()
+                        if data__dynamic__readme__missing_keys:
+                            raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.readme must contain " + (str(sorted(data__dynamic__readme__missing_keys)) + " properties"), value=data__dynamic__readme, name="" + (name_prefix or "data") + ".dynamic.readme", definition={'type': 'object', 'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'additionalProperties': False}], 'required': ['file']}, rule='required')
                 if data__dynamic_keys:
-                    raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic must not contain "+str(data__dynamic_keys)+" properties", value=data__dynamic, name="" + (name_prefix or "data") + ".dynamic", definition={'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'dependencies': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'format': 'python-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}}}, 'readme': {'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}, rule='additionalProperties')
+                    raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic must not contain "+str(data__dynamic_keys)+" properties", value=data__dynamic, name="" + (name_prefix or "data") + ".dynamic", definition={'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}}}, 'readme': {'type': 'object', 'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'additionalProperties': False}], 'required': ['file']}}}, rule='additionalProperties')
         if data_keys:
-            raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://setuptools.pypa.io/en/latest/references/keywords.html', 'title': '``tool.setuptools`` table', '$$description': ['Please notice for the time being the ``setuptools`` project does not specify', 'a way of configuring builds via ``pyproject.toml``.', 'Therefore this schema should be taken just as a *"thought experiment"* on how', 'this *might be done*, by following the principles established in', '`ini2toml `_.', 'It considers only ``setuptools`` `parameters', '`_', 'that can currently be configured via ``setup.cfg`` and are not covered by :pep:`621`', 'but intentionally excludes ``dependency_links`` and ``setup_requires``.', 'NOTE: ``scripts`` was renamed to ``script-files`` to avoid confusion with', 'entry-point based scripts (defined in :pep:`621`).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'description': 'Whether the project can be safely installed and run from a zip file.', 'type': 'boolean'}, 'script-files': {'description': 'Legacy way of defining scripts (entry-points are preferred).', 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or PEP 561).', 'type': 'string', 'anyOf': [{'format': 'python-module-name'}, {'format': 'pep561-stub-name'}]}}, {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'const': ''}, {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or PEP 561).', 'type': 'string', 'anyOf': [{'format': 'python-module-name'}, {'format': 'pep561-stub-name'}]}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['**DEPRECATED**: dict-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', "Please notice this don't work with wheels. See `data files support", '`_'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', '    cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might become standard with PEP 639).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'dependencies': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'format': 'python-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}}}, 'readme': {'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or PEP 561).', 'type': 'string', 'anyOf': [{'format': 'python-module-name'}, {'format': 'pep561-stub-name'}]}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}, rule='additionalProperties')
+            raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html', 'title': '``tool.setuptools`` table', '$$description': ['``setuptools``-specific configurations that can be set by users that require', 'customization.', 'These configurations are completely optional and probably can be skipped when', 'creating simple packages. They are equivalent to some of the `Keywords', '`_', 'used by the ``setup.py`` file, and can be set via the ``tool.setuptools`` table.', 'It considers only ``setuptools`` `parameters', '`_', 'that are not covered by :pep:`621`; and intentionally excludes ``dependency_links``', 'and ``setup_requires`` (incompatible with modern workflows/standards).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'$$description': ['Whether the project can be safely installed and run from a zip file.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'boolean'}, 'script-files': {'$$description': ['Legacy way of defining scripts (entry-points are preferred).', 'Equivalent to the ``script`` keyword in ``setup.py``', '(it was renamed to avoid confusion with entry-point based ``project.scripts``', 'defined in :pep:`621`).', '**DISCOURAGED**: generic script wrappers are tricky and may not work properly.', 'Whenever possible, please use ``project.scripts`` instead.'], 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'type': 'string', 'format': 'pep561-stub-name'}]}}, {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'type': 'string', 'format': 'pep561-stub-name'}]}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html', 'description': '**DEPRECATED**: use implicit namespaces instead (:pep:`420`).'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['``dict``-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', '**DISCOURAGED**: please notice this might not work as expected with wheels.', 'Whenever possible, consider using data files inside the package directories', '(or create a new namespace package that only contains data files).', 'See `data files support', '`_.'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', '    cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['**PROVISIONAL**: list of glob patterns for all license files being distributed.', '(likely to become standard with :pep:`639`).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}}}, 'readme': {'type': 'object', 'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'additionalProperties': False}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'file-directive-for-dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}, rule='additionalProperties')
     return data
 
-def validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data, custom_formats={}, name_prefix=None):
+def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html__definitions_file_directive_properties_file(data, custom_formats={}, name_prefix=None):
+    data_one_of_count7 = 0
+    if data_one_of_count7 < 2:
+        try:
+            if not isinstance(data, (str)):
+                raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string'}, rule='type')
+            data_one_of_count7 += 1
+        except JsonSchemaValueException: pass
+    if data_one_of_count7 < 2:
+        try:
+            if not isinstance(data, (list, tuple)):
+                raise JsonSchemaValueException("" + (name_prefix or "data") + " must be array", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type')
+            data_is_list = isinstance(data, (list, tuple))
+            if data_is_list:
+                data_len = len(data)
+                for data_x, data_item in enumerate(data):
+                    if not isinstance(data_item, (str)):
+                        raise JsonSchemaValueException("" + (name_prefix or "data") + "[{data_x}]".format(**locals()) + " must be string", value=data_item, name="" + (name_prefix or "data") + "[{data_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+            data_one_of_count7 += 1
+        except JsonSchemaValueException: pass
+    if data_one_of_count7 != 1:
+        raise JsonSchemaValueException("" + (name_prefix or "data") + " must be valid exactly by one definition" + (" (" + str(data_one_of_count7) + " matches found)"), value=data, name="" + (name_prefix or "data") + "", definition={'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}, rule='oneOf')
+    return data
+
+def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html__definitions_file_directive_for_dependencies(data, custom_formats={}, name_prefix=None):
+    validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html__definitions_file_directive(data, custom_formats, (name_prefix or "data") + "")
+    return data
+
+def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html__definitions_file_directive(data, custom_formats={}, name_prefix=None):
     if not isinstance(data, (dict)):
         raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, rule='type')
     data_is_dict = isinstance(data, dict)
     if data_is_dict:
-        data_len = len(data)
-        if not all(prop in data for prop in ['file']):
-            raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['file'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, rule='required')
+        data__missing_keys = set(['file']) - data.keys()
+        if data__missing_keys:
+            raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain " + (str(sorted(data__missing_keys)) + " properties"), value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, rule='required')
         data_keys = set(data.keys())
         if "file" in data_keys:
             data_keys.remove("file")
             data__file = data["file"]
-            data__file_one_of_count7 = 0
-            if data__file_one_of_count7 < 2:
+            data__file_one_of_count8 = 0
+            if data__file_one_of_count8 < 2:
                 try:
                     if not isinstance(data__file, (str)):
                         raise JsonSchemaValueException("" + (name_prefix or "data") + ".file must be string", value=data__file, name="" + (name_prefix or "data") + ".file", definition={'type': 'string'}, rule='type')
-                    data__file_one_of_count7 += 1
+                    data__file_one_of_count8 += 1
                 except JsonSchemaValueException: pass
-            if data__file_one_of_count7 < 2:
+            if data__file_one_of_count8 < 2:
                 try:
                     if not isinstance(data__file, (list, tuple)):
                         raise JsonSchemaValueException("" + (name_prefix or "data") + ".file must be array", value=data__file, name="" + (name_prefix or "data") + ".file", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type')
@@ -538,33 +584,36 @@ def validate_https___setuptools_pypa_io_en_latest_references_keywords_html__defi
                         for data__file_x, data__file_item in enumerate(data__file):
                             if not isinstance(data__file_item, (str)):
                                 raise JsonSchemaValueException("" + (name_prefix or "data") + ".file[{data__file_x}]".format(**locals()) + " must be string", value=data__file_item, name="" + (name_prefix or "data") + ".file[{data__file_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
-                    data__file_one_of_count7 += 1
+                    data__file_one_of_count8 += 1
                 except JsonSchemaValueException: pass
-            if data__file_one_of_count7 != 1:
-                raise JsonSchemaValueException("" + (name_prefix or "data") + ".file must be valid exactly by one definition" + (" (" + str(data__file_one_of_count7) + " matches found)"), value=data__file, name="" + (name_prefix or "data") + ".file", definition={'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}, rule='oneOf')
+            if data__file_one_of_count8 != 1:
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".file must be valid exactly by one definition" + (" (" + str(data__file_one_of_count8) + " matches found)"), value=data__file, name="" + (name_prefix or "data") + ".file", definition={'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}, rule='oneOf')
         if data_keys:
             raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, rule='additionalProperties')
     return data
 
-def validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_attr_directive(data, custom_formats={}, name_prefix=None):
+def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html__definitions_attr_directive(data, custom_formats={}, name_prefix=None):
     if not isinstance(data, (dict)):
-        raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, rule='type')
+        raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, rule='type')
     data_is_dict = isinstance(data, dict)
     if data_is_dict:
-        data_len = len(data)
-        if not all(prop in data for prop in ['attr']):
-            raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['attr'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, rule='required')
+        data__missing_keys = set(['attr']) - data.keys()
+        if data__missing_keys:
+            raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain " + (str(sorted(data__missing_keys)) + " properties"), value=data, name="" + (name_prefix or "data") + "", definition={'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, rule='required')
         data_keys = set(data.keys())
         if "attr" in data_keys:
             data_keys.remove("attr")
             data__attr = data["attr"]
             if not isinstance(data__attr, (str)):
-                raise JsonSchemaValueException("" + (name_prefix or "data") + ".attr must be string", value=data__attr, name="" + (name_prefix or "data") + ".attr", definition={'type': 'string'}, rule='type')
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".attr must be string", value=data__attr, name="" + (name_prefix or "data") + ".attr", definition={'type': 'string', 'format': 'python-qualified-identifier'}, rule='type')
+            if isinstance(data__attr, str):
+                if not custom_formats["python-qualified-identifier"](data__attr):
+                    raise JsonSchemaValueException("" + (name_prefix or "data") + ".attr must be python-qualified-identifier", value=data__attr, name="" + (name_prefix or "data") + ".attr", definition={'type': 'string', 'format': 'python-qualified-identifier'}, rule='format')
         if data_keys:
-            raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, rule='additionalProperties')
+            raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, rule='additionalProperties')
     return data
 
-def validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_find_directive(data, custom_formats={}, name_prefix=None):
+def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html__definitions_find_directive(data, custom_formats={}, name_prefix=None):
     if not isinstance(data, (dict)):
         raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}, rule='type')
     data_is_dict = isinstance(data, dict)
@@ -622,31 +671,35 @@ def validate_https___setuptools_pypa_io_en_latest_references_keywords_html__defi
             raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}, rule='additionalProperties')
     return data
 
-def validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_package_name(data, custom_formats={}, name_prefix=None):
+def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html__definitions_package_name(data, custom_formats={}, name_prefix=None):
     if not isinstance(data, (str)):
-        raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or PEP 561).', 'type': 'string', 'anyOf': [{'format': 'python-module-name'}, {'format': 'pep561-stub-name'}]}, rule='type')
-    data_any_of_count8 = 0
-    if not data_any_of_count8:
+        raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, rule='type')
+    data_any_of_count9 = 0
+    if not data_any_of_count9:
         try:
+            if not isinstance(data, (str)):
+                raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'format': 'python-module-name'}, rule='type')
             if isinstance(data, str):
                 if not custom_formats["python-module-name"](data):
-                    raise JsonSchemaValueException("" + (name_prefix or "data") + " must be python-module-name", value=data, name="" + (name_prefix or "data") + "", definition={'format': 'python-module-name'}, rule='format')
-            data_any_of_count8 += 1
+                    raise JsonSchemaValueException("" + (name_prefix or "data") + " must be python-module-name", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'format': 'python-module-name'}, rule='format')
+            data_any_of_count9 += 1
         except JsonSchemaValueException: pass
-    if not data_any_of_count8:
+    if not data_any_of_count9:
         try:
+            if not isinstance(data, (str)):
+                raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'format': 'pep561-stub-name'}, rule='type')
             if isinstance(data, str):
                 if not custom_formats["pep561-stub-name"](data):
-                    raise JsonSchemaValueException("" + (name_prefix or "data") + " must be pep561-stub-name", value=data, name="" + (name_prefix or "data") + "", definition={'format': 'pep561-stub-name'}, rule='format')
-            data_any_of_count8 += 1
+                    raise JsonSchemaValueException("" + (name_prefix or "data") + " must be pep561-stub-name", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'format': 'pep561-stub-name'}, rule='format')
+            data_any_of_count9 += 1
         except JsonSchemaValueException: pass
-    if not data_any_of_count8:
-        raise JsonSchemaValueException("" + (name_prefix or "data") + " cannot be validated by any definition", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or PEP 561).', 'type': 'string', 'anyOf': [{'format': 'python-module-name'}, {'format': 'pep561-stub-name'}]}, rule='anyOf')
+    if not data_any_of_count9:
+        raise JsonSchemaValueException("" + (name_prefix or "data") + " cannot be validated by any definition", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, rule='anyOf')
     return data
 
-def validate_https___docs_python_org_3_install(data, custom_formats={}, name_prefix=None):
+def validate_https___setuptools_pypa_io_en_latest_deprecated_distutils_configfile_html(data, custom_formats={}, name_prefix=None):
     if not isinstance(data, (dict)):
-        raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://docs.python.org/3/install/', 'title': '``tool.distutils`` table', '$$description': ['Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` scripts via `distutils configuration files', '`_.', '``tool.distutils`` subtables could be used with the same purpose', '(NOT CURRENTLY IMPLEMENTED).'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, rule='type')
+        raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/deprecated/distutils/configfile.html', 'title': '``tool.distutils`` table', '$$description': ['**EXPERIMENTAL** (NOT OFFICIALLY SUPPORTED): Use ``tool.distutils``', 'subtables to configure arguments for ``distutils`` commands.', 'Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` commands via `distutils configuration files', '`_.', 'See also `the old Python docs _`.'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, rule='type')
     data_is_dict = isinstance(data, dict)
     if data_is_dict:
         data_keys = set(data.keys())
@@ -663,14 +716,14 @@ def validate_https___docs_python_org_3_install(data, custom_formats={}, name_pre
                     raise JsonSchemaValueException("" + (name_prefix or "data") + ".{data_key}".format(**locals()) + " must be object", value=data_val, name="" + (name_prefix or "data") + ".{data_key}".format(**locals()) + "", definition={'type': 'object'}, rule='type')
     return data
 
-def validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata(data, custom_formats={}, name_prefix=None):
+def validate_https___packaging_python_org_en_latest_specifications_pyproject_toml(data, custom_formats={}, name_prefix=None):
     if not isinstance(data, (dict)):
-        raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'gui-scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, rule='type')
+        raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/pyproject-toml/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'gui-scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, rule='type')
     data_is_dict = isinstance(data, dict)
     if data_is_dict:
-        data_len = len(data)
-        if not all(prop in data for prop in ['name']):
-            raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['name'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'gui-scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, rule='required')
+        data__missing_keys = set(['name']) - data.keys()
+        if data__missing_keys:
+            raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain " + (str(sorted(data__missing_keys)) + " properties"), value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/pyproject-toml/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'gui-scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, rule='required')
         data_keys = set(data.keys())
         if "name" in data_keys:
             data_keys.remove("name")
@@ -696,65 +749,65 @@ def validate_https___packaging_python_org_en_latest_specifications_declaring_pro
         if "readme" in data_keys:
             data_keys.remove("readme")
             data__readme = data["readme"]
-            data__readme_one_of_count9 = 0
-            if data__readme_one_of_count9 < 2:
+            data__readme_one_of_count10 = 0
+            if data__readme_one_of_count10 < 2:
                 try:
                     if not isinstance(data__readme, (str)):
                         raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must be string", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, rule='type')
-                    data__readme_one_of_count9 += 1
+                    data__readme_one_of_count10 += 1
                 except JsonSchemaValueException: pass
-            if data__readme_one_of_count9 < 2:
+            if data__readme_one_of_count10 < 2:
                 try:
                     if not isinstance(data__readme, (dict)):
                         raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must be object", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}, rule='type')
-                    data__readme_any_of_count10 = 0
-                    if not data__readme_any_of_count10:
+                    data__readme_any_of_count11 = 0
+                    if not data__readme_any_of_count11:
                         try:
                             data__readme_is_dict = isinstance(data__readme, dict)
                             if data__readme_is_dict:
-                                data__readme_len = len(data__readme)
-                                if not all(prop in data__readme for prop in ['file']):
-                                    raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must contain ['file'] properties", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, rule='required')
+                                data__readme__missing_keys = set(['file']) - data__readme.keys()
+                                if data__readme__missing_keys:
+                                    raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must contain " + (str(sorted(data__readme__missing_keys)) + " properties"), value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, rule='required')
                                 data__readme_keys = set(data__readme.keys())
                                 if "file" in data__readme_keys:
                                     data__readme_keys.remove("file")
                                     data__readme__file = data__readme["file"]
                                     if not isinstance(data__readme__file, (str)):
                                         raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme.file must be string", value=data__readme__file, name="" + (name_prefix or "data") + ".readme.file", definition={'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}, rule='type')
-                            data__readme_any_of_count10 += 1
+                            data__readme_any_of_count11 += 1
                         except JsonSchemaValueException: pass
-                    if not data__readme_any_of_count10:
+                    if not data__readme_any_of_count11:
                         try:
                             data__readme_is_dict = isinstance(data__readme, dict)
                             if data__readme_is_dict:
-                                data__readme_len = len(data__readme)
-                                if not all(prop in data__readme for prop in ['text']):
-                                    raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must contain ['text'] properties", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}, rule='required')
+                                data__readme__missing_keys = set(['text']) - data__readme.keys()
+                                if data__readme__missing_keys:
+                                    raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must contain " + (str(sorted(data__readme__missing_keys)) + " properties"), value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}, rule='required')
                                 data__readme_keys = set(data__readme.keys())
                                 if "text" in data__readme_keys:
                                     data__readme_keys.remove("text")
                                     data__readme__text = data__readme["text"]
                                     if not isinstance(data__readme__text, (str)):
                                         raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme.text must be string", value=data__readme__text, name="" + (name_prefix or "data") + ".readme.text", definition={'type': 'string', 'description': 'Full text describing the project.'}, rule='type')
-                            data__readme_any_of_count10 += 1
+                            data__readme_any_of_count11 += 1
                         except JsonSchemaValueException: pass
-                    if not data__readme_any_of_count10:
+                    if not data__readme_any_of_count11:
                         raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme cannot be validated by any definition", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, rule='anyOf')
                     data__readme_is_dict = isinstance(data__readme, dict)
                     if data__readme_is_dict:
-                        data__readme_len = len(data__readme)
-                        if not all(prop in data__readme for prop in ['content-type']):
-                            raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must contain ['content-type'] properties", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}, rule='required')
+                        data__readme__missing_keys = set(['content-type']) - data__readme.keys()
+                        if data__readme__missing_keys:
+                            raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must contain " + (str(sorted(data__readme__missing_keys)) + " properties"), value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}, rule='required')
                         data__readme_keys = set(data__readme.keys())
                         if "content-type" in data__readme_keys:
                             data__readme_keys.remove("content-type")
                             data__readme__contenttype = data__readme["content-type"]
                             if not isinstance(data__readme__contenttype, (str)):
                                 raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme.content-type must be string", value=data__readme__contenttype, name="" + (name_prefix or "data") + ".readme.content-type", definition={'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}, rule='type')
-                    data__readme_one_of_count9 += 1
+                    data__readme_one_of_count10 += 1
                 except JsonSchemaValueException: pass
-            if data__readme_one_of_count9 != 1:
-                raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must be valid exactly by one definition" + (" (" + str(data__readme_one_of_count9) + " matches found)"), value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, rule='oneOf')
+            if data__readme_one_of_count10 != 1:
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must be valid exactly by one definition" + (" (" + str(data__readme_one_of_count10) + " matches found)"), value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, rule='oneOf')
         if "requires-python" in data_keys:
             data_keys.remove("requires-python")
             data__requirespython = data["requires-python"]
@@ -766,39 +819,39 @@ def validate_https___packaging_python_org_en_latest_specifications_declaring_pro
         if "license" in data_keys:
             data_keys.remove("license")
             data__license = data["license"]
-            data__license_one_of_count11 = 0
-            if data__license_one_of_count11 < 2:
+            data__license_one_of_count12 = 0
+            if data__license_one_of_count12 < 2:
                 try:
                     data__license_is_dict = isinstance(data__license, dict)
                     if data__license_is_dict:
-                        data__license_len = len(data__license)
-                        if not all(prop in data__license for prop in ['file']):
-                            raise JsonSchemaValueException("" + (name_prefix or "data") + ".license must contain ['file'] properties", value=data__license, name="" + (name_prefix or "data") + ".license", definition={'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, rule='required')
+                        data__license__missing_keys = set(['file']) - data__license.keys()
+                        if data__license__missing_keys:
+                            raise JsonSchemaValueException("" + (name_prefix or "data") + ".license must contain " + (str(sorted(data__license__missing_keys)) + " properties"), value=data__license, name="" + (name_prefix or "data") + ".license", definition={'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, rule='required')
                         data__license_keys = set(data__license.keys())
                         if "file" in data__license_keys:
                             data__license_keys.remove("file")
                             data__license__file = data__license["file"]
                             if not isinstance(data__license__file, (str)):
                                 raise JsonSchemaValueException("" + (name_prefix or "data") + ".license.file must be string", value=data__license__file, name="" + (name_prefix or "data") + ".license.file", definition={'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}, rule='type')
-                    data__license_one_of_count11 += 1
+                    data__license_one_of_count12 += 1
                 except JsonSchemaValueException: pass
-            if data__license_one_of_count11 < 2:
+            if data__license_one_of_count12 < 2:
                 try:
                     data__license_is_dict = isinstance(data__license, dict)
                     if data__license_is_dict:
-                        data__license_len = len(data__license)
-                        if not all(prop in data__license for prop in ['text']):
-                            raise JsonSchemaValueException("" + (name_prefix or "data") + ".license must contain ['text'] properties", value=data__license, name="" + (name_prefix or "data") + ".license", definition={'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}, rule='required')
+                        data__license__missing_keys = set(['text']) - data__license.keys()
+                        if data__license__missing_keys:
+                            raise JsonSchemaValueException("" + (name_prefix or "data") + ".license must contain " + (str(sorted(data__license__missing_keys)) + " properties"), value=data__license, name="" + (name_prefix or "data") + ".license", definition={'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}, rule='required')
                         data__license_keys = set(data__license.keys())
                         if "text" in data__license_keys:
                             data__license_keys.remove("text")
                             data__license__text = data__license["text"]
                             if not isinstance(data__license__text, (str)):
                                 raise JsonSchemaValueException("" + (name_prefix or "data") + ".license.text must be string", value=data__license__text, name="" + (name_prefix or "data") + ".license.text", definition={'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}, rule='type')
-                    data__license_one_of_count11 += 1
+                    data__license_one_of_count12 += 1
                 except JsonSchemaValueException: pass
-            if data__license_one_of_count11 != 1:
-                raise JsonSchemaValueException("" + (name_prefix or "data") + ".license must be valid exactly by one definition" + (" (" + str(data__license_one_of_count11) + " matches found)"), value=data__license, name="" + (name_prefix or "data") + ".license", definition={'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, rule='oneOf')
+            if data__license_one_of_count12 != 1:
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".license must be valid exactly by one definition" + (" (" + str(data__license_one_of_count12) + " matches found)"), value=data__license, name="" + (name_prefix or "data") + ".license", definition={'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, rule='oneOf')
         if "authors" in data_keys:
             data_keys.remove("authors")
             data__authors = data["authors"]
@@ -808,7 +861,7 @@ def validate_https___packaging_python_org_en_latest_specifications_declaring_pro
             if data__authors_is_list:
                 data__authors_len = len(data__authors)
                 for data__authors_x, data__authors_item in enumerate(data__authors):
-                    validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_author(data__authors_item, custom_formats, (name_prefix or "data") + ".authors[{data__authors_x}]".format(**locals()))
+                    validate_https___packaging_python_org_en_latest_specifications_pyproject_toml___definitions_author(data__authors_item, custom_formats, (name_prefix or "data") + ".authors[{data__authors_x}]".format(**locals()))
         if "maintainers" in data_keys:
             data_keys.remove("maintainers")
             data__maintainers = data["maintainers"]
@@ -818,7 +871,7 @@ def validate_https___packaging_python_org_en_latest_specifications_declaring_pro
             if data__maintainers_is_list:
                 data__maintainers_len = len(data__maintainers)
                 for data__maintainers_x, data__maintainers_item in enumerate(data__maintainers):
-                    validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_author(data__maintainers_item, custom_formats, (name_prefix or "data") + ".maintainers[{data__maintainers_x}]".format(**locals()))
+                    validate_https___packaging_python_org_en_latest_specifications_pyproject_toml___definitions_author(data__maintainers_item, custom_formats, (name_prefix or "data") + ".maintainers[{data__maintainers_x}]".format(**locals()))
         if "keywords" in data_keys:
             data_keys.remove("keywords")
             data__keywords = data["keywords"]
@@ -866,11 +919,11 @@ def validate_https___packaging_python_org_en_latest_specifications_declaring_pro
         if "scripts" in data_keys:
             data_keys.remove("scripts")
             data__scripts = data["scripts"]
-            validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_entry_point_group(data__scripts, custom_formats, (name_prefix or "data") + ".scripts")
+            validate_https___packaging_python_org_en_latest_specifications_pyproject_toml___definitions_entry_point_group(data__scripts, custom_formats, (name_prefix or "data") + ".scripts")
         if "gui-scripts" in data_keys:
             data_keys.remove("gui-scripts")
             data__guiscripts = data["gui-scripts"]
-            validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_entry_point_group(data__guiscripts, custom_formats, (name_prefix or "data") + ".gui-scripts")
+            validate_https___packaging_python_org_en_latest_specifications_pyproject_toml___definitions_entry_point_group(data__guiscripts, custom_formats, (name_prefix or "data") + ".gui-scripts")
         if "entry-points" in data_keys:
             data_keys.remove("entry-points")
             data__entrypoints = data["entry-points"]
@@ -881,7 +934,7 @@ def validate_https___packaging_python_org_en_latest_specifications_declaring_pro
                     if REGEX_PATTERNS['^.+$'].search(data__entrypoints_key):
                         if data__entrypoints_key in data__entrypoints_keys:
                             data__entrypoints_keys.remove(data__entrypoints_key)
-                        validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_entry_point_group(data__entrypoints_val, custom_formats, (name_prefix or "data") + ".entry-points.{data__entrypoints_key}".format(**locals()))
+                        validate_https___packaging_python_org_en_latest_specifications_pyproject_toml___definitions_entry_point_group(data__entrypoints_val, custom_formats, (name_prefix or "data") + ".entry-points.{data__entrypoints_key}".format(**locals()))
                 if data__entrypoints_keys:
                     raise JsonSchemaValueException("" + (name_prefix or "data") + ".entry-points must not contain "+str(data__entrypoints_keys)+" properties", value=data__entrypoints, name="" + (name_prefix or "data") + ".entry-points", definition={'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}}}, rule='additionalProperties')
                 data__entrypoints_len = len(data__entrypoints)
@@ -905,7 +958,7 @@ def validate_https___packaging_python_org_en_latest_specifications_declaring_pro
             if data__dependencies_is_list:
                 data__dependencies_len = len(data__dependencies)
                 for data__dependencies_x, data__dependencies_item in enumerate(data__dependencies):
-                    validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_dependency(data__dependencies_item, custom_formats, (name_prefix or "data") + ".dependencies[{data__dependencies_x}]".format(**locals()))
+                    validate_https___packaging_python_org_en_latest_specifications_pyproject_toml___definitions_dependency(data__dependencies_item, custom_formats, (name_prefix or "data") + ".dependencies[{data__dependencies_x}]".format(**locals()))
         if "optional-dependencies" in data_keys:
             data_keys.remove("optional-dependencies")
             data__optionaldependencies = data["optional-dependencies"]
@@ -924,7 +977,7 @@ def validate_https___packaging_python_org_en_latest_specifications_declaring_pro
                         if data__optionaldependencies_val_is_list:
                             data__optionaldependencies_val_len = len(data__optionaldependencies_val)
                             for data__optionaldependencies_val_x, data__optionaldependencies_val_item in enumerate(data__optionaldependencies_val):
-                                validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_dependency(data__optionaldependencies_val_item, custom_formats, (name_prefix or "data") + ".optional-dependencies.{data__optionaldependencies_key}[{data__optionaldependencies_val_x}]".format(**locals()))
+                                validate_https___packaging_python_org_en_latest_specifications_pyproject_toml___definitions_dependency(data__optionaldependencies_val_item, custom_formats, (name_prefix or "data") + ".optional-dependencies.{data__optionaldependencies_key}[{data__optionaldependencies_val_x}]".format(**locals()))
                 if data__optionaldependencies_keys:
                     raise JsonSchemaValueException("" + (name_prefix or "data") + ".optional-dependencies must not contain "+str(data__optionaldependencies_keys)+" properties", value=data__optionaldependencies, name="" + (name_prefix or "data") + ".optional-dependencies", definition={'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='additionalProperties')
                 data__optionaldependencies_len = len(data__optionaldependencies)
@@ -951,14 +1004,14 @@ def validate_https___packaging_python_org_en_latest_specifications_declaring_pro
                     if data__dynamic_item not in ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']:
                         raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic[{data__dynamic_x}]".format(**locals()) + " must be one of ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']", value=data__dynamic_item, name="" + (name_prefix or "data") + ".dynamic[{data__dynamic_x}]".format(**locals()) + "", definition={'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}, rule='enum')
         if data_keys:
-            raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'gui-scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, rule='additionalProperties')
+            raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/pyproject-toml/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'gui-scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, rule='additionalProperties')
     try:
         try:
             data_is_dict = isinstance(data, dict)
             if data_is_dict:
-                data_len = len(data)
-                if not all(prop in data for prop in ['dynamic']):
-                    raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['dynamic'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, rule='required')
+                data__missing_keys = set(['dynamic']) - data.keys()
+                if data__missing_keys:
+                    raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain " + (str(sorted(data__missing_keys)) + " properties"), value=data, name="" + (name_prefix or "data") + "", definition={'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, rule='required')
                 data_keys = set(data.keys())
                 if "dynamic" in data_keys:
                     data_keys.remove("dynamic")
@@ -983,12 +1036,12 @@ def validate_https___packaging_python_org_en_latest_specifications_declaring_pro
     else:
         data_is_dict = isinstance(data, dict)
         if data_is_dict:
-            data_len = len(data)
-            if not all(prop in data for prop in ['version']):
-                raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['version'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, rule='required')
+            data__missing_keys = set(['version']) - data.keys()
+            if data__missing_keys:
+                raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain " + (str(sorted(data__missing_keys)) + " properties"), value=data, name="" + (name_prefix or "data") + "", definition={'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, rule='required')
     return data
 
-def validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_dependency(data, custom_formats={}, name_prefix=None):
+def validate_https___packaging_python_org_en_latest_specifications_pyproject_toml___definitions_dependency(data, custom_formats={}, name_prefix=None):
     if not isinstance(data, (str)):
         raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}, rule='type')
     if isinstance(data, str):
@@ -996,7 +1049,7 @@ def validate_https___packaging_python_org_en_latest_specifications_declaring_pro
             raise JsonSchemaValueException("" + (name_prefix or "data") + " must be pep508", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}, rule='format')
     return data
 
-def validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_entry_point_group(data, custom_formats={}, name_prefix=None):
+def validate_https___packaging_python_org_en_latest_specifications_pyproject_toml___definitions_entry_point_group(data, custom_formats={}, name_prefix=None):
     if not isinstance(data, (dict)):
         raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, rule='type')
     data_is_dict = isinstance(data, dict)
@@ -1027,7 +1080,7 @@ def validate_https___packaging_python_org_en_latest_specifications_declaring_pro
                 raise JsonSchemaValueException("" + (name_prefix or "data") + " must be named by propertyName definition", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, rule='propertyNames')
     return data
 
-def validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_author(data, custom_formats={}, name_prefix=None):
+def validate_https___packaging_python_org_en_latest_specifications_pyproject_toml___definitions_author(data, custom_formats={}, name_prefix=None):
     if not isinstance(data, (dict)):
         raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, rule='type')
     data_is_dict = isinstance(data, dict)
diff --git a/setuptools/config/_validate_pyproject/formats.py b/setuptools/config/_validate_pyproject/formats.py
index e73961661d..5a0599cbb5 100644
--- a/setuptools/config/_validate_pyproject/formats.py
+++ b/setuptools/config/_validate_pyproject/formats.py
@@ -1,3 +1,13 @@
+"""
+The functions in this module are used to validate schemas with the
+`format JSON Schema keyword
+`_.
+
+The correspondence is given by replacing the ``_`` character in the name of the
+function with a ``-`` to obtain the format name and vice versa.
+"""
+
+import builtins
 import logging
 import os
 import re
@@ -20,7 +30,7 @@
         (?P[0-9]+(?:\.[0-9]+)*)                  # release segment
         (?P
                                          # pre-release
             [-_\.]?
-            (?P(a|b|c|rc|alpha|beta|pre|preview))
+            (?Palpha|a|beta|b|preview|pre|c|rc)
             [-_\.]?
             (?P[0-9]+)?
         )?
@@ -48,6 +58,9 @@
 
 
 def pep440(version: str) -> bool:
+    """See :ref:`PyPA's version specification `
+    (initially introduced in :pep:`440`).
+    """
     return VERSION_REGEX.match(version) is not None
 
 
@@ -59,6 +72,9 @@ def pep440(version: str) -> bool:
 
 
 def pep508_identifier(name: str) -> bool:
+    """See :ref:`PyPA's name specification `
+    (initially introduced in :pep:`508#names`).
+    """
     return PEP508_IDENTIFIER_REGEX.match(name) is not None
 
 
@@ -70,6 +86,9 @@ def pep508_identifier(name: str) -> bool:
         from setuptools._vendor.packaging import requirements as _req  # type: ignore
 
     def pep508(value: str) -> bool:
+        """See :ref:`PyPA's dependency specifiers `
+        (initially introduced in :pep:`508`).
+        """
         try:
             _req.Requirement(value)
             return True
@@ -88,7 +107,10 @@ def pep508(value: str) -> bool:
 
 
 def pep508_versionspec(value: str) -> bool:
-    """Expression that can be used to specify/lock versions (including ranges)"""
+    """Expression that can be used to specify/lock versions (including ranges)
+    See ``versionspec`` in :ref:`PyPA's dependency specifiers
+    ` (initially introduced in :pep:`508`).
+    """
     if any(c in value for c in (";", "]", "@")):
         # In PEP 508:
         # conditional markers, extras and URL specs are not included in the
@@ -104,6 +126,11 @@ def pep508_versionspec(value: str) -> bool:
 
 
 def pep517_backend_reference(value: str) -> bool:
+    """See PyPA's specification for defining build-backend references
+    introduced in :pep:`517#source-trees`.
+
+    This is similar to an entry-point reference (e.g., ``package.module:object``).
+    """
     module, _, obj = value.partition(":")
     identifiers = (i.strip() for i in _chain(module.split("."), obj.split(".")))
     return all(python_identifier(i) for i in identifiers if i)
@@ -120,10 +147,10 @@ def _download_classifiers() -> str:
 
     url = "https://pypi.org/pypi?:action=list_classifiers"
     context = ssl.create_default_context()
-    with urlopen(url, context=context) as response:
+    with urlopen(url, context=context) as response:  # noqa: S310 (audit URLs)
         headers = Message()
         headers["content_type"] = response.getheader("content-type", "text/plain")
-        return response.read().decode(headers.get_param("charset", "utf-8"))
+        return response.read().decode(headers.get_param("charset", "utf-8"))  # type: ignore[no-any-return]
 
 
 class _TroveClassifier:
@@ -136,14 +163,14 @@ class _TroveClassifier:
 
     downloaded: typing.Union[None, "Literal[False]", typing.Set[str]]
 
-    def __init__(self):
+    def __init__(self) -> None:
         self.downloaded = None
         self._skip_download = False
         # None => not cached yet
         # False => cache not available
         self.__name__ = "trove_classifier"  # Emulate a public function
 
-    def _disable_download(self):
+    def _disable_download(self) -> None:
         # This is a private API. Only setuptools has the consent of using it.
         self._skip_download = True
 
@@ -180,6 +207,7 @@ def __call__(self, value: str) -> bool:
     from trove_classifiers import classifiers as _trove_classifiers
 
     def trove_classifier(value: str) -> bool:
+        """See https://pypi.org/classifiers/"""
         return value in _trove_classifiers or value.lower().startswith("private ::")
 
 except ImportError:  # pragma: no cover
@@ -191,6 +219,10 @@ def trove_classifier(value: str) -> bool:
 
 
 def pep561_stub_name(value: str) -> bool:
+    """Name of a directory containing type stubs.
+    It must follow the name scheme ``-stubs`` as defined in
+    :pep:`561#stub-only-packages`.
+    """
     top, *children = value.split(".")
     if not top.endswith("-stubs"):
         return False
@@ -202,6 +234,10 @@ def pep561_stub_name(value: str) -> bool:
 
 
 def url(value: str) -> bool:
+    """Valid URL (validation uses :obj:`urllib.parse`).
+    For maximum compatibility please make sure to include a ``scheme`` prefix
+    in your URL (e.g. ``http://``).
+    """
     from urllib.parse import urlparse
 
     try:
@@ -230,24 +266,40 @@ def url(value: str) -> bool:
 
 
 def python_identifier(value: str) -> bool:
+    """Can be used as identifier in Python.
+    (Validation uses :obj:`str.isidentifier`).
+    """
     return value.isidentifier()
 
 
 def python_qualified_identifier(value: str) -> bool:
+    """
+    Python "dotted identifier", i.e. a sequence of :obj:`python_identifier`
+    concatenated with ``"."`` (e.g.: ``package.module.submodule``).
+    """
     if value.startswith(".") or value.endswith("."):
         return False
     return all(python_identifier(m) for m in value.split("."))
 
 
 def python_module_name(value: str) -> bool:
+    """Module name that can be used in an ``import``-statement in Python.
+    See :obj:`python_qualified_identifier`.
+    """
     return python_qualified_identifier(value)
 
 
 def python_entrypoint_group(value: str) -> bool:
+    """See ``Data model > group`` in the :ref:`PyPA's entry-points specification
+    `.
+    """
     return ENTRYPOINT_GROUP_REGEX.match(value) is not None
 
 
 def python_entrypoint_name(value: str) -> bool:
+    """See ``Data model > name`` in the :ref:`PyPA's entry-points specification
+    `.
+    """
     if not ENTRYPOINT_REGEX.match(value):
         return False
     if not RECOMMEDED_ENTRYPOINT_REGEX.match(value):
@@ -258,6 +310,13 @@ def python_entrypoint_name(value: str) -> bool:
 
 
 def python_entrypoint_reference(value: str) -> bool:
+    """Reference to a Python object using in the format::
+
+        importable.module:object.attr
+
+    See ``Data model >object reference`` in the :ref:`PyPA's entry-points specification
+    `.
+    """
     module, _, rest = value.partition(":")
     if "[" in rest:
         obj, _, extras_ = rest.partition("[")
@@ -273,3 +332,23 @@ def python_entrypoint_reference(value: str) -> bool:
     module_parts = module.split(".")
     identifiers = _chain(module_parts, obj.split(".")) if rest else module_parts
     return all(python_identifier(i.strip()) for i in identifiers)
+
+
+def uint8(value: builtins.int) -> bool:
+    r"""Unsigned 8-bit integer (:math:`0 \leq x < 2^8`)"""
+    return 0 <= value < 2**8
+
+
+def uint16(value: builtins.int) -> bool:
+    r"""Unsigned 16-bit integer (:math:`0 \leq x < 2^{16}`)"""
+    return 0 <= value < 2**16
+
+
+def uint(value: builtins.int) -> bool:
+    r"""Unsigned 64-bit integer (:math:`0 \leq x < 2^{64}`)"""
+    return 0 <= value < 2**64
+
+
+def int(value: builtins.int) -> bool:
+    r"""Signed 64-bit integer (:math:`-2^{63} \leq x < 2^{63}`)"""
+    return -(2**63) <= value < 2**63

From 0576d60a00519a3c677de98f449b0a536c1630d7 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 7 May 2024 19:02:27 +0100
Subject: [PATCH 0631/1761] Add exception to mypy.ini

---
 mypy.ini | 5 +++++
 1 file changed, 5 insertions(+)

diff --git a/mypy.ini b/mypy.ini
index 45671826b1..9fab958288 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -34,3 +34,8 @@ ignore_missing_imports = True
 # - setuptools._vendor.packaging._manylinux: Mypy issue, this vendored module is already excluded!
 [mypy-pkg_resources.tests.*,setuptools._vendor.packaging._manylinux,setuptools.config._validate_pyproject.*]
 disable_error_code = import-not-found
+
+# - The unused-ignore comment in setuptools.config._validate_pyproject.* is probably evaluated differently
+#   in different versions of Python. Also, this file should already be ignored...
+[mypy-setuptools.config._validate_pyproject.*]
+disable_error_code = unused-ignore

From 1a1d7a6f02a5e17db9d14e6771542afe1b8de70b Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Tue, 7 May 2024 15:17:31 -0400
Subject: [PATCH 0632/1761] Simplified _declare_state

---
 pkg_resources/__init__.py | 14 +++++++-------
 1 file changed, 7 insertions(+), 7 deletions(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index d3213b3237..63b0bf0bd4 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -117,11 +117,11 @@ class PEP440Warning(RuntimeWarning):
 parse_version = packaging.version.Version
 
 
-_state_vars: Dict[str, Any] = {}
+_state_vars: Dict[str, str] = {}
 
 
-def _declare_state(vartype: str, **kw: object) -> None:
-    _state_vars.update(dict.fromkeys(kw, vartype))
+def _declare_state(vartype: str, varname: str) -> None:
+    _state_vars[varname] = vartype
 
 
 def __getstate__():
@@ -2024,7 +2024,7 @@ def __init__(self, importer):
 _distribution_finders: Dict[
     type, Callable[[object, str, bool], Iterable["Distribution"]]
 ] = {}
-_declare_state('dict', _distribution_finders=_distribution_finders)
+_declare_state('dict', '_distribution_finders')
 
 
 def register_finder(importer_type, distribution_finder):
@@ -2200,9 +2200,9 @@ def resolve_egg_link(path):
 _namespace_handlers: Dict[
     type, Callable[[object, str, str, types.ModuleType], Optional[str]]
 ] = {}
-_declare_state('dict', _namespace_handlers=_namespace_handlers)
+_declare_state('dict', '_namespace_handlers')
 _namespace_packages: Dict[Optional[str], List[str]] = {}
-_declare_state('dict', _namespace_packages=_namespace_packages)
+_declare_state('dict', '_namespace_packages')
 
 
 def register_namespace_handler(importer_type, namespace_handler):
@@ -3302,7 +3302,7 @@ def _initialize_master_working_set():
     at their own risk.
     """
     working_set = WorkingSet._build_master()
-    _declare_state('object', working_set=working_set)
+    _declare_state('object', 'working_set')
 
     require = working_set.require
     iter_entry_points = working_set.iter_entry_points

From 47e8a11bcfe6eaea7470691a9bc3a10665c2a82b Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Tue, 7 May 2024 15:19:33 -0400
Subject: [PATCH 0633/1761] Include initial_value to _declare_state

---
 pkg_resources/__init__.py | 21 +++++++++++----------
 1 file changed, 11 insertions(+), 10 deletions(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index 63b0bf0bd4..a66e701a3d 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -27,7 +27,7 @@
 import time
 import re
 import types
-from typing import Any, Callable, Dict, Iterable, List, Protocol, Optional
+from typing import Any, Callable, Dict, Iterable, List, Protocol, Optional, TypeVar
 import zipfile
 import zipimport
 import warnings
@@ -103,6 +103,8 @@
     stacklevel=2,
 )
 
+T = TypeVar("T")
+
 
 _PEP440_FALLBACK = re.compile(r"^v?(?P(?:[0-9]+!)?[0-9]+(?:\.[0-9]+)*)", re.I)
 
@@ -120,8 +122,9 @@ class PEP440Warning(RuntimeWarning):
 _state_vars: Dict[str, str] = {}
 
 
-def _declare_state(vartype: str, varname: str) -> None:
+def _declare_state(vartype: str, varname: str, initial_value: T) -> T:
     _state_vars[varname] = vartype
+    return initial_value
 
 
 def __getstate__():
@@ -2023,8 +2026,7 @@ def __init__(self, importer):
 
 _distribution_finders: Dict[
     type, Callable[[object, str, bool], Iterable["Distribution"]]
-] = {}
-_declare_state('dict', '_distribution_finders')
+] = _declare_state('dict', '_distribution_finders', {})
 
 
 def register_finder(importer_type, distribution_finder):
@@ -2199,10 +2201,10 @@ def resolve_egg_link(path):
 
 _namespace_handlers: Dict[
     type, Callable[[object, str, str, types.ModuleType], Optional[str]]
-] = {}
-_declare_state('dict', '_namespace_handlers')
-_namespace_packages: Dict[Optional[str], List[str]] = {}
-_declare_state('dict', '_namespace_packages')
+] = _declare_state('dict', '_namespace_handlers', {})
+_namespace_packages: Dict[Optional[str], List[str]] = _declare_state(
+    'dict', '_namespace_packages', {}
+)
 
 
 def register_namespace_handler(importer_type, namespace_handler):
@@ -3301,8 +3303,7 @@ def _initialize_master_working_set():
     Invocation by other packages is unsupported and done
     at their own risk.
     """
-    working_set = WorkingSet._build_master()
-    _declare_state('object', 'working_set')
+    working_set = _declare_state('object', 'working_set', WorkingSet._build_master())
 
     require = working_set.require
     iter_entry_points = working_set.iter_entry_points

From d93d2e4ccc630ae41fbb1316f939719e8f5461a1 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Tue, 7 May 2024 15:46:43 -0400
Subject: [PATCH 0634/1761] Removed `typing_extensions` from vendored
 dependencies

---
 newsfragments/4324.removal.rst                |    1 +
 .../INSTALLER                                 |    1 -
 .../typing_extensions-4.4.0.dist-info/LICENSE |  254 --
 .../METADATA                                  |  189 --
 .../typing_extensions-4.4.0.dist-info/RECORD  |    8 -
 .../REQUESTED                                 |    0
 .../typing_extensions-4.4.0.dist-info/WHEEL   |    4 -
 pkg_resources/_vendor/typing_extensions.py    | 2209 ----------------
 pkg_resources/_vendor/vendored.txt            |    2 -
 pkg_resources/extern/__init__.py              |    1 -
 .../INSTALLER                                 |    1 -
 .../typing_extensions-4.0.1.dist-info/LICENSE |  254 --
 .../METADATA                                  |   35 -
 .../typing_extensions-4.0.1.dist-info/RECORD  |    8 -
 .../REQUESTED                                 |    0
 .../typing_extensions-4.0.1.dist-info/WHEEL   |    4 -
 setuptools/_vendor/typing_extensions.py       | 2296 -----------------
 setuptools/_vendor/vendored.txt               |    2 -
 setuptools/extern/__init__.py                 |    1 -
 tools/vendored.py                             |    3 +-
 20 files changed, 2 insertions(+), 5271 deletions(-)
 create mode 100644 newsfragments/4324.removal.rst
 delete mode 100644 pkg_resources/_vendor/typing_extensions-4.4.0.dist-info/INSTALLER
 delete mode 100644 pkg_resources/_vendor/typing_extensions-4.4.0.dist-info/LICENSE
 delete mode 100644 pkg_resources/_vendor/typing_extensions-4.4.0.dist-info/METADATA
 delete mode 100644 pkg_resources/_vendor/typing_extensions-4.4.0.dist-info/RECORD
 delete mode 100644 pkg_resources/_vendor/typing_extensions-4.4.0.dist-info/REQUESTED
 delete mode 100644 pkg_resources/_vendor/typing_extensions-4.4.0.dist-info/WHEEL
 delete mode 100644 pkg_resources/_vendor/typing_extensions.py
 delete mode 100644 setuptools/_vendor/typing_extensions-4.0.1.dist-info/INSTALLER
 delete mode 100644 setuptools/_vendor/typing_extensions-4.0.1.dist-info/LICENSE
 delete mode 100644 setuptools/_vendor/typing_extensions-4.0.1.dist-info/METADATA
 delete mode 100644 setuptools/_vendor/typing_extensions-4.0.1.dist-info/RECORD
 delete mode 100644 setuptools/_vendor/typing_extensions-4.0.1.dist-info/REQUESTED
 delete mode 100644 setuptools/_vendor/typing_extensions-4.0.1.dist-info/WHEEL
 delete mode 100644 setuptools/_vendor/typing_extensions.py

diff --git a/newsfragments/4324.removal.rst b/newsfragments/4324.removal.rst
new file mode 100644
index 0000000000..bd5e1cb641
--- /dev/null
+++ b/newsfragments/4324.removal.rst
@@ -0,0 +1 @@
+Removed `typing_extensions` from vendored dependencies -- by :user:`Avasam`
diff --git a/pkg_resources/_vendor/typing_extensions-4.4.0.dist-info/INSTALLER b/pkg_resources/_vendor/typing_extensions-4.4.0.dist-info/INSTALLER
deleted file mode 100644
index a1b589e38a..0000000000
--- a/pkg_resources/_vendor/typing_extensions-4.4.0.dist-info/INSTALLER
+++ /dev/null
@@ -1 +0,0 @@
-pip
diff --git a/pkg_resources/_vendor/typing_extensions-4.4.0.dist-info/LICENSE b/pkg_resources/_vendor/typing_extensions-4.4.0.dist-info/LICENSE
deleted file mode 100644
index 1df6b3b8de..0000000000
--- a/pkg_resources/_vendor/typing_extensions-4.4.0.dist-info/LICENSE
+++ /dev/null
@@ -1,254 +0,0 @@
-A. HISTORY OF THE SOFTWARE
-==========================
-
-Python was created in the early 1990s by Guido van Rossum at Stichting
-Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands
-as a successor of a language called ABC.  Guido remains Python's
-principal author, although it includes many contributions from others.
-
-In 1995, Guido continued his work on Python at the Corporation for
-National Research Initiatives (CNRI, see http://www.cnri.reston.va.us)
-in Reston, Virginia where he released several versions of the
-software.
-
-In May 2000, Guido and the Python core development team moved to
-BeOpen.com to form the BeOpen PythonLabs team.  In October of the same
-year, the PythonLabs team moved to Digital Creations, which became
-Zope Corporation.  In 2001, the Python Software Foundation (PSF, see
-https://www.python.org/psf/) was formed, a non-profit organization
-created specifically to own Python-related Intellectual Property.
-Zope Corporation was a sponsoring member of the PSF.
-
-All Python releases are Open Source (see http://www.opensource.org for
-the Open Source Definition).  Historically, most, but not all, Python
-releases have also been GPL-compatible; the table below summarizes
-the various releases.
-
-    Release         Derived     Year        Owner       GPL-
-                    from                                compatible? (1)
-
-    0.9.0 thru 1.2              1991-1995   CWI         yes
-    1.3 thru 1.5.2  1.2         1995-1999   CNRI        yes
-    1.6             1.5.2       2000        CNRI        no
-    2.0             1.6         2000        BeOpen.com  no
-    1.6.1           1.6         2001        CNRI        yes (2)
-    2.1             2.0+1.6.1   2001        PSF         no
-    2.0.1           2.0+1.6.1   2001        PSF         yes
-    2.1.1           2.1+2.0.1   2001        PSF         yes
-    2.1.2           2.1.1       2002        PSF         yes
-    2.1.3           2.1.2       2002        PSF         yes
-    2.2 and above   2.1.1       2001-now    PSF         yes
-
-Footnotes:
-
-(1) GPL-compatible doesn't mean that we're distributing Python under
-    the GPL.  All Python licenses, unlike the GPL, let you distribute
-    a modified version without making your changes open source.  The
-    GPL-compatible licenses make it possible to combine Python with
-    other software that is released under the GPL; the others don't.
-
-(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
-    because its license has a choice of law clause.  According to
-    CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
-    is "not incompatible" with the GPL.
-
-Thanks to the many outside volunteers who have worked under Guido's
-direction to make these releases possible.
-
-
-B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
-===============================================================
-
-PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
---------------------------------------------
-
-1. This LICENSE AGREEMENT is between the Python Software Foundation
-("PSF"), and the Individual or Organization ("Licensee") accessing and
-otherwise using this software ("Python") in source or binary form and
-its associated documentation.
-
-2. Subject to the terms and conditions of this License Agreement, PSF hereby
-grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
-analyze, test, perform and/or display publicly, prepare derivative works,
-distribute, and otherwise use Python alone or in any derivative version,
-provided, however, that PSF's License Agreement and PSF's notice of copyright,
-i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
-2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022 Python Software Foundation;
-All Rights Reserved" are retained in Python alone or in any derivative version
-prepared by Licensee.
-
-3. In the event Licensee prepares a derivative work that is based on
-or incorporates Python or any part thereof, and wants to make
-the derivative work available to others as provided herein, then
-Licensee hereby agrees to include in any such work a brief summary of
-the changes made to Python.
-
-4. PSF is making Python available to Licensee on an "AS IS"
-basis.  PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
-IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
-DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
-FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
-INFRINGE ANY THIRD PARTY RIGHTS.
-
-5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
-FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
-A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
-OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
-
-6. This License Agreement will automatically terminate upon a material
-breach of its terms and conditions.
-
-7. Nothing in this License Agreement shall be deemed to create any
-relationship of agency, partnership, or joint venture between PSF and
-Licensee.  This License Agreement does not grant permission to use PSF
-trademarks or trade name in a trademark sense to endorse or promote
-products or services of Licensee, or any third party.
-
-8. By copying, installing or otherwise using Python, Licensee
-agrees to be bound by the terms and conditions of this License
-Agreement.
-
-
-BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
--------------------------------------------
-
-BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
-
-1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
-office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
-Individual or Organization ("Licensee") accessing and otherwise using
-this software in source or binary form and its associated
-documentation ("the Software").
-
-2. Subject to the terms and conditions of this BeOpen Python License
-Agreement, BeOpen hereby grants Licensee a non-exclusive,
-royalty-free, world-wide license to reproduce, analyze, test, perform
-and/or display publicly, prepare derivative works, distribute, and
-otherwise use the Software alone or in any derivative version,
-provided, however, that the BeOpen Python License is retained in the
-Software, alone or in any derivative version prepared by Licensee.
-
-3. BeOpen is making the Software available to Licensee on an "AS IS"
-basis.  BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
-IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
-DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
-FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
-INFRINGE ANY THIRD PARTY RIGHTS.
-
-4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
-SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
-AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
-DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
-
-5. This License Agreement will automatically terminate upon a material
-breach of its terms and conditions.
-
-6. This License Agreement shall be governed by and interpreted in all
-respects by the law of the State of California, excluding conflict of
-law provisions.  Nothing in this License Agreement shall be deemed to
-create any relationship of agency, partnership, or joint venture
-between BeOpen and Licensee.  This License Agreement does not grant
-permission to use BeOpen trademarks or trade names in a trademark
-sense to endorse or promote products or services of Licensee, or any
-third party.  As an exception, the "BeOpen Python" logos available at
-http://www.pythonlabs.com/logos.html may be used according to the
-permissions granted on that web page.
-
-7. By copying, installing or otherwise using the software, Licensee
-agrees to be bound by the terms and conditions of this License
-Agreement.
-
-
-CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
----------------------------------------
-
-1. This LICENSE AGREEMENT is between the Corporation for National
-Research Initiatives, having an office at 1895 Preston White Drive,
-Reston, VA 20191 ("CNRI"), and the Individual or Organization
-("Licensee") accessing and otherwise using Python 1.6.1 software in
-source or binary form and its associated documentation.
-
-2. Subject to the terms and conditions of this License Agreement, CNRI
-hereby grants Licensee a nonexclusive, royalty-free, world-wide
-license to reproduce, analyze, test, perform and/or display publicly,
-prepare derivative works, distribute, and otherwise use Python 1.6.1
-alone or in any derivative version, provided, however, that CNRI's
-License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
-1995-2001 Corporation for National Research Initiatives; All Rights
-Reserved" are retained in Python 1.6.1 alone or in any derivative
-version prepared by Licensee.  Alternately, in lieu of CNRI's License
-Agreement, Licensee may substitute the following text (omitting the
-quotes): "Python 1.6.1 is made available subject to the terms and
-conditions in CNRI's License Agreement.  This Agreement together with
-Python 1.6.1 may be located on the internet using the following
-unique, persistent identifier (known as a handle): 1895.22/1013.  This
-Agreement may also be obtained from a proxy server on the internet
-using the following URL: http://hdl.handle.net/1895.22/1013".
-
-3. In the event Licensee prepares a derivative work that is based on
-or incorporates Python 1.6.1 or any part thereof, and wants to make
-the derivative work available to others as provided herein, then
-Licensee hereby agrees to include in any such work a brief summary of
-the changes made to Python 1.6.1.
-
-4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
-basis.  CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
-IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
-DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
-FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
-INFRINGE ANY THIRD PARTY RIGHTS.
-
-5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
-1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
-A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
-OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
-
-6. This License Agreement will automatically terminate upon a material
-breach of its terms and conditions.
-
-7. This License Agreement shall be governed by the federal
-intellectual property law of the United States, including without
-limitation the federal copyright law, and, to the extent such
-U.S. federal law does not apply, by the law of the Commonwealth of
-Virginia, excluding Virginia's conflict of law provisions.
-Notwithstanding the foregoing, with regard to derivative works based
-on Python 1.6.1 that incorporate non-separable material that was
-previously distributed under the GNU General Public License (GPL), the
-law of the Commonwealth of Virginia shall govern this License
-Agreement only as to issues arising under or with respect to
-Paragraphs 4, 5, and 7 of this License Agreement.  Nothing in this
-License Agreement shall be deemed to create any relationship of
-agency, partnership, or joint venture between CNRI and Licensee.  This
-License Agreement does not grant permission to use CNRI trademarks or
-trade name in a trademark sense to endorse or promote products or
-services of Licensee, or any third party.
-
-8. By clicking on the "ACCEPT" button where indicated, or by copying,
-installing or otherwise using Python 1.6.1, Licensee agrees to be
-bound by the terms and conditions of this License Agreement.
-
-        ACCEPT
-
-
-CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
---------------------------------------------------
-
-Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
-The Netherlands.  All rights reserved.
-
-Permission to use, copy, modify, and distribute this software and its
-documentation for any purpose and without fee is hereby granted,
-provided that the above copyright notice appear in all copies and that
-both that copyright notice and this permission notice appear in
-supporting documentation, and that the name of Stichting Mathematisch
-Centrum or CWI not be used in advertising or publicity pertaining to
-distribution of the software without specific, written prior
-permission.
-
-STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
-THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
-FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
-FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
-OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/pkg_resources/_vendor/typing_extensions-4.4.0.dist-info/METADATA b/pkg_resources/_vendor/typing_extensions-4.4.0.dist-info/METADATA
deleted file mode 100644
index 1ed963a12c..0000000000
--- a/pkg_resources/_vendor/typing_extensions-4.4.0.dist-info/METADATA
+++ /dev/null
@@ -1,189 +0,0 @@
-Metadata-Version: 2.1
-Name: typing_extensions
-Version: 4.4.0
-Summary: Backported and Experimental Type Hints for Python 3.7+
-Keywords: annotations,backport,checker,checking,function,hinting,hints,type,typechecking,typehinting,typehints,typing
-Author-email: "Guido van Rossum, Jukka Lehtosalo, Łukasz Langa, Michael Lee" 
-Requires-Python: >=3.7
-Description-Content-Type: text/markdown
-Classifier: Development Status :: 5 - Production/Stable
-Classifier: Environment :: Console
-Classifier: Intended Audience :: Developers
-Classifier: License :: OSI Approved :: Python Software Foundation License
-Classifier: Operating System :: OS Independent
-Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3 :: Only
-Classifier: Programming Language :: Python :: 3.7
-Classifier: Programming Language :: Python :: 3.8
-Classifier: Programming Language :: Python :: 3.9
-Classifier: Programming Language :: Python :: 3.10
-Classifier: Topic :: Software Development
-Project-URL: Bug Tracker, https://github.com/python/typing_extensions/issues
-Project-URL: Changes, https://github.com/python/typing_extensions/blob/main/CHANGELOG.md
-Project-URL: Documentation, https://typing.readthedocs.io/
-Project-URL: Home, https://github.com/python/typing_extensions
-Project-URL: Q & A, https://github.com/python/typing/discussions
-Project-URL: Repository, https://github.com/python/typing_extensions
-
-# Typing Extensions
-
-[![Chat at https://gitter.im/python/typing](https://badges.gitter.im/python/typing.svg)](https://gitter.im/python/typing)
-
-## Overview
-
-The `typing_extensions` module serves two related purposes:
-
-- Enable use of new type system features on older Python versions. For example,
-  `typing.TypeGuard` is new in Python 3.10, but `typing_extensions` allows
-  users on previous Python versions to use it too.
-- Enable experimentation with new type system PEPs before they are accepted and
-  added to the `typing` module.
-
-New features may be added to `typing_extensions` as soon as they are specified
-in a PEP that has been added to the [python/peps](https://github.com/python/peps)
-repository. If the PEP is accepted, the feature will then be added to `typing`
-for the next CPython release. No typing PEP has been rejected so far, so we
-haven't yet figured out how to deal with that possibility.
-
-Starting with version 4.0.0, `typing_extensions` uses
-[Semantic Versioning](https://semver.org/). The
-major version is incremented for all backwards-incompatible changes.
-Therefore, it's safe to depend
-on `typing_extensions` like this: `typing_extensions >=x.y, <(x+1)`,
-where `x.y` is the first version that includes all features you need.
-
-`typing_extensions` supports Python versions 3.7 and higher. In the future,
-support for older Python versions will be dropped some time after that version
-reaches end of life.
-
-## Included items
-
-This module currently contains the following:
-
-- Experimental features
-
-  - `override` (see PEP 698)
-  - The `default=` argument to `TypeVar`, `ParamSpec`, and `TypeVarTuple` (see PEP 696)
-  - The `infer_variance=` argument to `TypeVar` (see PEP 695)
-
-- In `typing` since Python 3.11
-
-  - `assert_never`
-  - `assert_type`
-  - `clear_overloads`
-  - `@dataclass_transform()` (see PEP 681)
-  - `get_overloads`
-  - `LiteralString` (see PEP 675)
-  - `Never`
-  - `NotRequired` (see PEP 655)
-  - `reveal_type`
-  - `Required` (see PEP 655)
-  - `Self` (see PEP 673)
-  - `TypeVarTuple` (see PEP 646; the `typing_extensions` version supports the `default=` argument from PEP 696)
-  - `Unpack` (see PEP 646)
-
-- In `typing` since Python 3.10
-
-  - `Concatenate` (see PEP 612)
-  - `ParamSpec` (see PEP 612; the `typing_extensions` version supports the `default=` argument from PEP 696)
-  - `ParamSpecArgs` (see PEP 612)
-  - `ParamSpecKwargs` (see PEP 612)
-  - `TypeAlias` (see PEP 613)
-  - `TypeGuard` (see PEP 647)
-  - `is_typeddict`
-
-- In `typing` since Python 3.9
-
-  - `Annotated` (see PEP 593)
-
-- In `typing` since Python 3.8
-
-  - `final` (see PEP 591)
-  - `Final` (see PEP 591)
-  - `Literal` (see PEP 586)
-  - `Protocol` (see PEP 544)
-  - `runtime_checkable` (see PEP 544)
-  - `TypedDict` (see PEP 589)
-  - `get_origin` (`typing_extensions` provides this function only in Python 3.7+)
-  - `get_args` (`typing_extensions` provides this function only in Python 3.7+)
-
-- In `typing` since Python 3.7
-
-  - `OrderedDict`
-
-- In `typing` since Python 3.5 or 3.6 (see [the typing documentation](https://docs.python.org/3.10/library/typing.html) for details)
-
-  - `AsyncContextManager`
-  - `AsyncGenerator`
-  - `AsyncIterable`
-  - `AsyncIterator`
-  - `Awaitable`
-  - `ChainMap`
-  - `ClassVar` (see PEP 526)
-  - `ContextManager`
-  - `Coroutine`
-  - `Counter`
-  - `DefaultDict`
-  - `Deque`
-  - `NewType`
-  - `NoReturn`
-  - `overload`
-  - `Text`
-  - `Type`
-  - `TYPE_CHECKING`
-  - `get_type_hints`
-
-- The following have always been present in `typing`, but the `typing_extensions` versions provide
-  additional features:
-
-  - `Any` (supports inheritance since Python 3.11)
-  - `NamedTuple` (supports multiple inheritance with `Generic` since Python 3.11)
-  - `TypeVar` (see PEPs 695 and 696)
-
-# Other Notes and Limitations
-
-Certain objects were changed after they were added to `typing`, and
-`typing_extensions` provides a backport even on newer Python versions:
-
-- `TypedDict` does not store runtime information
-  about which (if any) keys are non-required in Python 3.8, and does not
-  honor the `total` keyword with old-style `TypedDict()` in Python
-  3.9.0 and 3.9.1. `TypedDict` also does not support multiple inheritance
-  with `typing.Generic` on Python <3.11.
-- `get_origin` and `get_args` lack support for `Annotated` in
-  Python 3.8 and lack support for `ParamSpecArgs` and `ParamSpecKwargs`
-  in 3.9.
-- `@final` was changed in Python 3.11 to set the `.__final__` attribute.
-- `@overload` was changed in Python 3.11 to make function overloads
-  introspectable at runtime. In order to access overloads with
-  `typing_extensions.get_overloads()`, you must use
-  `@typing_extensions.overload`.
-- `NamedTuple` was changed in Python 3.11 to allow for multiple inheritance
-  with `typing.Generic`.
-- Since Python 3.11, it has been possible to inherit from `Any` at
-  runtime. `typing_extensions.Any` also provides this capability.
-- `TypeVar` gains two additional parameters, `default=` and `infer_variance=`,
-  in the draft PEPs 695 and 696, which are being considered for inclusion
-  in Python 3.12.
-
-There are a few types whose interface was modified between different
-versions of typing. For example, `typing.Sequence` was modified to
-subclass `typing.Reversible` as of Python 3.5.3.
-
-These changes are _not_ backported to prevent subtle compatibility
-issues when mixing the differing implementations of modified classes.
-
-Certain types have incorrect runtime behavior due to limitations of older
-versions of the typing module:
-
-- `ParamSpec` and `Concatenate` will not work with `get_args` and
-  `get_origin`. Certain PEP 612 special cases in user-defined
-  `Generic`s are also not available.
-
-These types are only guaranteed to work for static type checking.
-
-## Running tests
-
-To run tests, navigate into the appropriate source directory and run
-`test_typing_extensions.py`.
-
diff --git a/pkg_resources/_vendor/typing_extensions-4.4.0.dist-info/RECORD b/pkg_resources/_vendor/typing_extensions-4.4.0.dist-info/RECORD
deleted file mode 100644
index e1132566df..0000000000
--- a/pkg_resources/_vendor/typing_extensions-4.4.0.dist-info/RECORD
+++ /dev/null
@@ -1,8 +0,0 @@
-__pycache__/typing_extensions.cpython-312.pyc,,
-typing_extensions-4.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-typing_extensions-4.4.0.dist-info/LICENSE,sha256=x6-2XnVXB7n7kEhziaF20-09ADHVExr95FwjcV_16JE,12787
-typing_extensions-4.4.0.dist-info/METADATA,sha256=1zSh1eMLnLkLMMC6aZSGRKx3eRnivEGDFWGSVD1zqhA,7249
-typing_extensions-4.4.0.dist-info/RECORD,,
-typing_extensions-4.4.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-typing_extensions-4.4.0.dist-info/WHEEL,sha256=4TfKIB_xu-04bc2iKz6_zFt-gEFEEDU_31HGhqzOCE8,81
-typing_extensions.py,sha256=ipqWiq5AHzrwczt6c26AP05Llh6a5_GaXRpOBqbogHA,80078
diff --git a/pkg_resources/_vendor/typing_extensions-4.4.0.dist-info/REQUESTED b/pkg_resources/_vendor/typing_extensions-4.4.0.dist-info/REQUESTED
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/pkg_resources/_vendor/typing_extensions-4.4.0.dist-info/WHEEL b/pkg_resources/_vendor/typing_extensions-4.4.0.dist-info/WHEEL
deleted file mode 100644
index 668ba4d015..0000000000
--- a/pkg_resources/_vendor/typing_extensions-4.4.0.dist-info/WHEEL
+++ /dev/null
@@ -1,4 +0,0 @@
-Wheel-Version: 1.0
-Generator: flit 3.7.1
-Root-Is-Purelib: true
-Tag: py3-none-any
diff --git a/pkg_resources/_vendor/typing_extensions.py b/pkg_resources/_vendor/typing_extensions.py
deleted file mode 100644
index ef42417c20..0000000000
--- a/pkg_resources/_vendor/typing_extensions.py
+++ /dev/null
@@ -1,2209 +0,0 @@
-import abc
-import collections
-import collections.abc
-import functools
-import operator
-import sys
-import types as _types
-import typing
-
-
-__all__ = [
-    # Super-special typing primitives.
-    'Any',
-    'ClassVar',
-    'Concatenate',
-    'Final',
-    'LiteralString',
-    'ParamSpec',
-    'ParamSpecArgs',
-    'ParamSpecKwargs',
-    'Self',
-    'Type',
-    'TypeVar',
-    'TypeVarTuple',
-    'Unpack',
-
-    # ABCs (from collections.abc).
-    'Awaitable',
-    'AsyncIterator',
-    'AsyncIterable',
-    'Coroutine',
-    'AsyncGenerator',
-    'AsyncContextManager',
-    'ChainMap',
-
-    # Concrete collection types.
-    'ContextManager',
-    'Counter',
-    'Deque',
-    'DefaultDict',
-    'NamedTuple',
-    'OrderedDict',
-    'TypedDict',
-
-    # Structural checks, a.k.a. protocols.
-    'SupportsIndex',
-
-    # One-off things.
-    'Annotated',
-    'assert_never',
-    'assert_type',
-    'clear_overloads',
-    'dataclass_transform',
-    'get_overloads',
-    'final',
-    'get_args',
-    'get_origin',
-    'get_type_hints',
-    'IntVar',
-    'is_typeddict',
-    'Literal',
-    'NewType',
-    'overload',
-    'override',
-    'Protocol',
-    'reveal_type',
-    'runtime',
-    'runtime_checkable',
-    'Text',
-    'TypeAlias',
-    'TypeGuard',
-    'TYPE_CHECKING',
-    'Never',
-    'NoReturn',
-    'Required',
-    'NotRequired',
-]
-
-# for backward compatibility
-PEP_560 = True
-GenericMeta = type
-
-# The functions below are modified copies of typing internal helpers.
-# They are needed by _ProtocolMeta and they provide support for PEP 646.
-
-_marker = object()
-
-
-def _check_generic(cls, parameters, elen=_marker):
-    """Check correct count for parameters of a generic cls (internal helper).
-    This gives a nice error message in case of count mismatch.
-    """
-    if not elen:
-        raise TypeError(f"{cls} is not a generic class")
-    if elen is _marker:
-        if not hasattr(cls, "__parameters__") or not cls.__parameters__:
-            raise TypeError(f"{cls} is not a generic class")
-        elen = len(cls.__parameters__)
-    alen = len(parameters)
-    if alen != elen:
-        if hasattr(cls, "__parameters__"):
-            parameters = [p for p in cls.__parameters__ if not _is_unpack(p)]
-            num_tv_tuples = sum(isinstance(p, TypeVarTuple) for p in parameters)
-            if (num_tv_tuples > 0) and (alen >= elen - num_tv_tuples):
-                return
-        raise TypeError(f"Too {'many' if alen > elen else 'few'} parameters for {cls};"
-                        f" actual {alen}, expected {elen}")
-
-
-if sys.version_info >= (3, 10):
-    def _should_collect_from_parameters(t):
-        return isinstance(
-            t, (typing._GenericAlias, _types.GenericAlias, _types.UnionType)
-        )
-elif sys.version_info >= (3, 9):
-    def _should_collect_from_parameters(t):
-        return isinstance(t, (typing._GenericAlias, _types.GenericAlias))
-else:
-    def _should_collect_from_parameters(t):
-        return isinstance(t, typing._GenericAlias) and not t._special
-
-
-def _collect_type_vars(types, typevar_types=None):
-    """Collect all type variable contained in types in order of
-    first appearance (lexicographic order). For example::
-
-        _collect_type_vars((T, List[S, T])) == (T, S)
-    """
-    if typevar_types is None:
-        typevar_types = typing.TypeVar
-    tvars = []
-    for t in types:
-        if (
-            isinstance(t, typevar_types) and
-            t not in tvars and
-            not _is_unpack(t)
-        ):
-            tvars.append(t)
-        if _should_collect_from_parameters(t):
-            tvars.extend([t for t in t.__parameters__ if t not in tvars])
-    return tuple(tvars)
-
-
-NoReturn = typing.NoReturn
-
-# Some unconstrained type variables.  These are used by the container types.
-# (These are not for export.)
-T = typing.TypeVar('T')  # Any type.
-KT = typing.TypeVar('KT')  # Key type.
-VT = typing.TypeVar('VT')  # Value type.
-T_co = typing.TypeVar('T_co', covariant=True)  # Any type covariant containers.
-T_contra = typing.TypeVar('T_contra', contravariant=True)  # Ditto contravariant.
-
-
-if sys.version_info >= (3, 11):
-    from typing import Any
-else:
-
-    class _AnyMeta(type):
-        def __instancecheck__(self, obj):
-            if self is Any:
-                raise TypeError("typing_extensions.Any cannot be used with isinstance()")
-            return super().__instancecheck__(obj)
-
-        def __repr__(self):
-            if self is Any:
-                return "typing_extensions.Any"
-            return super().__repr__()
-
-    class Any(metaclass=_AnyMeta):
-        """Special type indicating an unconstrained type.
-        - Any is compatible with every type.
-        - Any assumed to have all methods.
-        - All values assumed to be instances of Any.
-        Note that all the above statements are true from the point of view of
-        static type checkers. At runtime, Any should not be used with instance
-        checks.
-        """
-        def __new__(cls, *args, **kwargs):
-            if cls is Any:
-                raise TypeError("Any cannot be instantiated")
-            return super().__new__(cls, *args, **kwargs)
-
-
-ClassVar = typing.ClassVar
-
-# On older versions of typing there is an internal class named "Final".
-# 3.8+
-if hasattr(typing, 'Final') and sys.version_info[:2] >= (3, 7):
-    Final = typing.Final
-# 3.7
-else:
-    class _FinalForm(typing._SpecialForm, _root=True):
-
-        def __repr__(self):
-            return 'typing_extensions.' + self._name
-
-        def __getitem__(self, parameters):
-            item = typing._type_check(parameters,
-                                      f'{self._name} accepts only a single type.')
-            return typing._GenericAlias(self, (item,))
-
-    Final = _FinalForm('Final',
-                       doc="""A special typing construct to indicate that a name
-                       cannot be re-assigned or overridden in a subclass.
-                       For example:
-
-                           MAX_SIZE: Final = 9000
-                           MAX_SIZE += 1  # Error reported by type checker
-
-                           class Connection:
-                               TIMEOUT: Final[int] = 10
-                           class FastConnector(Connection):
-                               TIMEOUT = 1  # Error reported by type checker
-
-                       There is no runtime checking of these properties.""")
-
-if sys.version_info >= (3, 11):
-    final = typing.final
-else:
-    # @final exists in 3.8+, but we backport it for all versions
-    # before 3.11 to keep support for the __final__ attribute.
-    # See https://bugs.python.org/issue46342
-    def final(f):
-        """This decorator can be used to indicate to type checkers that
-        the decorated method cannot be overridden, and decorated class
-        cannot be subclassed. For example:
-
-            class Base:
-                @final
-                def done(self) -> None:
-                    ...
-            class Sub(Base):
-                def done(self) -> None:  # Error reported by type checker
-                    ...
-            @final
-            class Leaf:
-                ...
-            class Other(Leaf):  # Error reported by type checker
-                ...
-
-        There is no runtime checking of these properties. The decorator
-        sets the ``__final__`` attribute to ``True`` on the decorated object
-        to allow runtime introspection.
-        """
-        try:
-            f.__final__ = True
-        except (AttributeError, TypeError):
-            # Skip the attribute silently if it is not writable.
-            # AttributeError happens if the object has __slots__ or a
-            # read-only property, TypeError if it's a builtin class.
-            pass
-        return f
-
-
-def IntVar(name):
-    return typing.TypeVar(name)
-
-
-# 3.8+:
-if hasattr(typing, 'Literal'):
-    Literal = typing.Literal
-# 3.7:
-else:
-    class _LiteralForm(typing._SpecialForm, _root=True):
-
-        def __repr__(self):
-            return 'typing_extensions.' + self._name
-
-        def __getitem__(self, parameters):
-            return typing._GenericAlias(self, parameters)
-
-    Literal = _LiteralForm('Literal',
-                           doc="""A type that can be used to indicate to type checkers
-                           that the corresponding value has a value literally equivalent
-                           to the provided parameter. For example:
-
-                               var: Literal[4] = 4
-
-                           The type checker understands that 'var' is literally equal to
-                           the value 4 and no other value.
-
-                           Literal[...] cannot be subclassed. There is no runtime
-                           checking verifying that the parameter is actually a value
-                           instead of a type.""")
-
-
-_overload_dummy = typing._overload_dummy  # noqa
-
-
-if hasattr(typing, "get_overloads"):  # 3.11+
-    overload = typing.overload
-    get_overloads = typing.get_overloads
-    clear_overloads = typing.clear_overloads
-else:
-    # {module: {qualname: {firstlineno: func}}}
-    _overload_registry = collections.defaultdict(
-        functools.partial(collections.defaultdict, dict)
-    )
-
-    def overload(func):
-        """Decorator for overloaded functions/methods.
-
-        In a stub file, place two or more stub definitions for the same
-        function in a row, each decorated with @overload.  For example:
-
-        @overload
-        def utf8(value: None) -> None: ...
-        @overload
-        def utf8(value: bytes) -> bytes: ...
-        @overload
-        def utf8(value: str) -> bytes: ...
-
-        In a non-stub file (i.e. a regular .py file), do the same but
-        follow it with an implementation.  The implementation should *not*
-        be decorated with @overload.  For example:
-
-        @overload
-        def utf8(value: None) -> None: ...
-        @overload
-        def utf8(value: bytes) -> bytes: ...
-        @overload
-        def utf8(value: str) -> bytes: ...
-        def utf8(value):
-            # implementation goes here
-
-        The overloads for a function can be retrieved at runtime using the
-        get_overloads() function.
-        """
-        # classmethod and staticmethod
-        f = getattr(func, "__func__", func)
-        try:
-            _overload_registry[f.__module__][f.__qualname__][
-                f.__code__.co_firstlineno
-            ] = func
-        except AttributeError:
-            # Not a normal function; ignore.
-            pass
-        return _overload_dummy
-
-    def get_overloads(func):
-        """Return all defined overloads for *func* as a sequence."""
-        # classmethod and staticmethod
-        f = getattr(func, "__func__", func)
-        if f.__module__ not in _overload_registry:
-            return []
-        mod_dict = _overload_registry[f.__module__]
-        if f.__qualname__ not in mod_dict:
-            return []
-        return list(mod_dict[f.__qualname__].values())
-
-    def clear_overloads():
-        """Clear all overloads in the registry."""
-        _overload_registry.clear()
-
-
-# This is not a real generic class.  Don't use outside annotations.
-Type = typing.Type
-
-# Various ABCs mimicking those in collections.abc.
-# A few are simply re-exported for completeness.
-
-
-Awaitable = typing.Awaitable
-Coroutine = typing.Coroutine
-AsyncIterable = typing.AsyncIterable
-AsyncIterator = typing.AsyncIterator
-Deque = typing.Deque
-ContextManager = typing.ContextManager
-AsyncContextManager = typing.AsyncContextManager
-DefaultDict = typing.DefaultDict
-
-# 3.7.2+
-if hasattr(typing, 'OrderedDict'):
-    OrderedDict = typing.OrderedDict
-# 3.7.0-3.7.2
-else:
-    OrderedDict = typing._alias(collections.OrderedDict, (KT, VT))
-
-Counter = typing.Counter
-ChainMap = typing.ChainMap
-AsyncGenerator = typing.AsyncGenerator
-NewType = typing.NewType
-Text = typing.Text
-TYPE_CHECKING = typing.TYPE_CHECKING
-
-
-_PROTO_WHITELIST = ['Callable', 'Awaitable',
-                    'Iterable', 'Iterator', 'AsyncIterable', 'AsyncIterator',
-                    'Hashable', 'Sized', 'Container', 'Collection', 'Reversible',
-                    'ContextManager', 'AsyncContextManager']
-
-
-def _get_protocol_attrs(cls):
-    attrs = set()
-    for base in cls.__mro__[:-1]:  # without object
-        if base.__name__ in ('Protocol', 'Generic'):
-            continue
-        annotations = getattr(base, '__annotations__', {})
-        for attr in list(base.__dict__.keys()) + list(annotations.keys()):
-            if (not attr.startswith('_abc_') and attr not in (
-                    '__abstractmethods__', '__annotations__', '__weakref__',
-                    '_is_protocol', '_is_runtime_protocol', '__dict__',
-                    '__args__', '__slots__',
-                    '__next_in_mro__', '__parameters__', '__origin__',
-                    '__orig_bases__', '__extra__', '__tree_hash__',
-                    '__doc__', '__subclasshook__', '__init__', '__new__',
-                    '__module__', '_MutableMapping__marker', '_gorg')):
-                attrs.add(attr)
-    return attrs
-
-
-def _is_callable_members_only(cls):
-    return all(callable(getattr(cls, attr, None)) for attr in _get_protocol_attrs(cls))
-
-
-def _maybe_adjust_parameters(cls):
-    """Helper function used in Protocol.__init_subclass__ and _TypedDictMeta.__new__.
-
-    The contents of this function are very similar
-    to logic found in typing.Generic.__init_subclass__
-    on the CPython main branch.
-    """
-    tvars = []
-    if '__orig_bases__' in cls.__dict__:
-        tvars = typing._collect_type_vars(cls.__orig_bases__)
-        # Look for Generic[T1, ..., Tn] or Protocol[T1, ..., Tn].
-        # If found, tvars must be a subset of it.
-        # If not found, tvars is it.
-        # Also check for and reject plain Generic,
-        # and reject multiple Generic[...] and/or Protocol[...].
-        gvars = None
-        for base in cls.__orig_bases__:
-            if (isinstance(base, typing._GenericAlias) and
-                    base.__origin__ in (typing.Generic, Protocol)):
-                # for error messages
-                the_base = base.__origin__.__name__
-                if gvars is not None:
-                    raise TypeError(
-                        "Cannot inherit from Generic[...]"
-                        " and/or Protocol[...] multiple types.")
-                gvars = base.__parameters__
-        if gvars is None:
-            gvars = tvars
-        else:
-            tvarset = set(tvars)
-            gvarset = set(gvars)
-            if not tvarset <= gvarset:
-                s_vars = ', '.join(str(t) for t in tvars if t not in gvarset)
-                s_args = ', '.join(str(g) for g in gvars)
-                raise TypeError(f"Some type variables ({s_vars}) are"
-                                f" not listed in {the_base}[{s_args}]")
-            tvars = gvars
-    cls.__parameters__ = tuple(tvars)
-
-
-# 3.8+
-if hasattr(typing, 'Protocol'):
-    Protocol = typing.Protocol
-# 3.7
-else:
-
-    def _no_init(self, *args, **kwargs):
-        if type(self)._is_protocol:
-            raise TypeError('Protocols cannot be instantiated')
-
-    class _ProtocolMeta(abc.ABCMeta):  # noqa: B024
-        # This metaclass is a bit unfortunate and exists only because of the lack
-        # of __instancehook__.
-        def __instancecheck__(cls, instance):
-            # We need this method for situations where attributes are
-            # assigned in __init__.
-            if ((not getattr(cls, '_is_protocol', False) or
-                 _is_callable_members_only(cls)) and
-                    issubclass(instance.__class__, cls)):
-                return True
-            if cls._is_protocol:
-                if all(hasattr(instance, attr) and
-                       (not callable(getattr(cls, attr, None)) or
-                        getattr(instance, attr) is not None)
-                       for attr in _get_protocol_attrs(cls)):
-                    return True
-            return super().__instancecheck__(instance)
-
-    class Protocol(metaclass=_ProtocolMeta):
-        # There is quite a lot of overlapping code with typing.Generic.
-        # Unfortunately it is hard to avoid this while these live in two different
-        # modules. The duplicated code will be removed when Protocol is moved to typing.
-        """Base class for protocol classes. Protocol classes are defined as::
-
-            class Proto(Protocol):
-                def meth(self) -> int:
-                    ...
-
-        Such classes are primarily used with static type checkers that recognize
-        structural subtyping (static duck-typing), for example::
-
-            class C:
-                def meth(self) -> int:
-                    return 0
-
-            def func(x: Proto) -> int:
-                return x.meth()
-
-            func(C())  # Passes static type check
-
-        See PEP 544 for details. Protocol classes decorated with
-        @typing_extensions.runtime act as simple-minded runtime protocol that checks
-        only the presence of given attributes, ignoring their type signatures.
-
-        Protocol classes can be generic, they are defined as::
-
-            class GenProto(Protocol[T]):
-                def meth(self) -> T:
-                    ...
-        """
-        __slots__ = ()
-        _is_protocol = True
-
-        def __new__(cls, *args, **kwds):
-            if cls is Protocol:
-                raise TypeError("Type Protocol cannot be instantiated; "
-                                "it can only be used as a base class")
-            return super().__new__(cls)
-
-        @typing._tp_cache
-        def __class_getitem__(cls, params):
-            if not isinstance(params, tuple):
-                params = (params,)
-            if not params and cls is not typing.Tuple:
-                raise TypeError(
-                    f"Parameter list to {cls.__qualname__}[...] cannot be empty")
-            msg = "Parameters to generic types must be types."
-            params = tuple(typing._type_check(p, msg) for p in params)  # noqa
-            if cls is Protocol:
-                # Generic can only be subscripted with unique type variables.
-                if not all(isinstance(p, typing.TypeVar) for p in params):
-                    i = 0
-                    while isinstance(params[i], typing.TypeVar):
-                        i += 1
-                    raise TypeError(
-                        "Parameters to Protocol[...] must all be type variables."
-                        f" Parameter {i + 1} is {params[i]}")
-                if len(set(params)) != len(params):
-                    raise TypeError(
-                        "Parameters to Protocol[...] must all be unique")
-            else:
-                # Subscripting a regular Generic subclass.
-                _check_generic(cls, params, len(cls.__parameters__))
-            return typing._GenericAlias(cls, params)
-
-        def __init_subclass__(cls, *args, **kwargs):
-            if '__orig_bases__' in cls.__dict__:
-                error = typing.Generic in cls.__orig_bases__
-            else:
-                error = typing.Generic in cls.__bases__
-            if error:
-                raise TypeError("Cannot inherit from plain Generic")
-            _maybe_adjust_parameters(cls)
-
-            # Determine if this is a protocol or a concrete subclass.
-            if not cls.__dict__.get('_is_protocol', None):
-                cls._is_protocol = any(b is Protocol for b in cls.__bases__)
-
-            # Set (or override) the protocol subclass hook.
-            def _proto_hook(other):
-                if not cls.__dict__.get('_is_protocol', None):
-                    return NotImplemented
-                if not getattr(cls, '_is_runtime_protocol', False):
-                    if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']:
-                        return NotImplemented
-                    raise TypeError("Instance and class checks can only be used with"
-                                    " @runtime protocols")
-                if not _is_callable_members_only(cls):
-                    if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']:
-                        return NotImplemented
-                    raise TypeError("Protocols with non-method members"
-                                    " don't support issubclass()")
-                if not isinstance(other, type):
-                    # Same error as for issubclass(1, int)
-                    raise TypeError('issubclass() arg 1 must be a class')
-                for attr in _get_protocol_attrs(cls):
-                    for base in other.__mro__:
-                        if attr in base.__dict__:
-                            if base.__dict__[attr] is None:
-                                return NotImplemented
-                            break
-                        annotations = getattr(base, '__annotations__', {})
-                        if (isinstance(annotations, typing.Mapping) and
-                                attr in annotations and
-                                isinstance(other, _ProtocolMeta) and
-                                other._is_protocol):
-                            break
-                    else:
-                        return NotImplemented
-                return True
-            if '__subclasshook__' not in cls.__dict__:
-                cls.__subclasshook__ = _proto_hook
-
-            # We have nothing more to do for non-protocols.
-            if not cls._is_protocol:
-                return
-
-            # Check consistency of bases.
-            for base in cls.__bases__:
-                if not (base in (object, typing.Generic) or
-                        base.__module__ == 'collections.abc' and
-                        base.__name__ in _PROTO_WHITELIST or
-                        isinstance(base, _ProtocolMeta) and base._is_protocol):
-                    raise TypeError('Protocols can only inherit from other'
-                                    f' protocols, got {repr(base)}')
-            cls.__init__ = _no_init
-
-
-# 3.8+
-if hasattr(typing, 'runtime_checkable'):
-    runtime_checkable = typing.runtime_checkable
-# 3.7
-else:
-    def runtime_checkable(cls):
-        """Mark a protocol class as a runtime protocol, so that it
-        can be used with isinstance() and issubclass(). Raise TypeError
-        if applied to a non-protocol class.
-
-        This allows a simple-minded structural check very similar to the
-        one-offs in collections.abc such as Hashable.
-        """
-        if not isinstance(cls, _ProtocolMeta) or not cls._is_protocol:
-            raise TypeError('@runtime_checkable can be only applied to protocol classes,'
-                            f' got {cls!r}')
-        cls._is_runtime_protocol = True
-        return cls
-
-
-# Exists for backwards compatibility.
-runtime = runtime_checkable
-
-
-# 3.8+
-if hasattr(typing, 'SupportsIndex'):
-    SupportsIndex = typing.SupportsIndex
-# 3.7
-else:
-    @runtime_checkable
-    class SupportsIndex(Protocol):
-        __slots__ = ()
-
-        @abc.abstractmethod
-        def __index__(self) -> int:
-            pass
-
-
-if hasattr(typing, "Required"):
-    # The standard library TypedDict in Python 3.8 does not store runtime information
-    # about which (if any) keys are optional.  See https://bugs.python.org/issue38834
-    # The standard library TypedDict in Python 3.9.0/1 does not honour the "total"
-    # keyword with old-style TypedDict().  See https://bugs.python.org/issue42059
-    # The standard library TypedDict below Python 3.11 does not store runtime
-    # information about optional and required keys when using Required or NotRequired.
-    # Generic TypedDicts are also impossible using typing.TypedDict on Python <3.11.
-    TypedDict = typing.TypedDict
-    _TypedDictMeta = typing._TypedDictMeta
-    is_typeddict = typing.is_typeddict
-else:
-    def _check_fails(cls, other):
-        try:
-            if sys._getframe(1).f_globals['__name__'] not in ['abc',
-                                                              'functools',
-                                                              'typing']:
-                # Typed dicts are only for static structural subtyping.
-                raise TypeError('TypedDict does not support instance and class checks')
-        except (AttributeError, ValueError):
-            pass
-        return False
-
-    def _dict_new(*args, **kwargs):
-        if not args:
-            raise TypeError('TypedDict.__new__(): not enough arguments')
-        _, args = args[0], args[1:]  # allow the "cls" keyword be passed
-        return dict(*args, **kwargs)
-
-    _dict_new.__text_signature__ = '($cls, _typename, _fields=None, /, **kwargs)'
-
-    def _typeddict_new(*args, total=True, **kwargs):
-        if not args:
-            raise TypeError('TypedDict.__new__(): not enough arguments')
-        _, args = args[0], args[1:]  # allow the "cls" keyword be passed
-        if args:
-            typename, args = args[0], args[1:]  # allow the "_typename" keyword be passed
-        elif '_typename' in kwargs:
-            typename = kwargs.pop('_typename')
-            import warnings
-            warnings.warn("Passing '_typename' as keyword argument is deprecated",
-                          DeprecationWarning, stacklevel=2)
-        else:
-            raise TypeError("TypedDict.__new__() missing 1 required positional "
-                            "argument: '_typename'")
-        if args:
-            try:
-                fields, = args  # allow the "_fields" keyword be passed
-            except ValueError:
-                raise TypeError('TypedDict.__new__() takes from 2 to 3 '
-                                f'positional arguments but {len(args) + 2} '
-                                'were given')
-        elif '_fields' in kwargs and len(kwargs) == 1:
-            fields = kwargs.pop('_fields')
-            import warnings
-            warnings.warn("Passing '_fields' as keyword argument is deprecated",
-                          DeprecationWarning, stacklevel=2)
-        else:
-            fields = None
-
-        if fields is None:
-            fields = kwargs
-        elif kwargs:
-            raise TypeError("TypedDict takes either a dict or keyword arguments,"
-                            " but not both")
-
-        ns = {'__annotations__': dict(fields)}
-        try:
-            # Setting correct module is necessary to make typed dict classes pickleable.
-            ns['__module__'] = sys._getframe(1).f_globals.get('__name__', '__main__')
-        except (AttributeError, ValueError):
-            pass
-
-        return _TypedDictMeta(typename, (), ns, total=total)
-
-    _typeddict_new.__text_signature__ = ('($cls, _typename, _fields=None,'
-                                         ' /, *, total=True, **kwargs)')
-
-    class _TypedDictMeta(type):
-        def __init__(cls, name, bases, ns, total=True):
-            super().__init__(name, bases, ns)
-
-        def __new__(cls, name, bases, ns, total=True):
-            # Create new typed dict class object.
-            # This method is called directly when TypedDict is subclassed,
-            # or via _typeddict_new when TypedDict is instantiated. This way
-            # TypedDict supports all three syntaxes described in its docstring.
-            # Subclasses and instances of TypedDict return actual dictionaries
-            # via _dict_new.
-            ns['__new__'] = _typeddict_new if name == 'TypedDict' else _dict_new
-            # Don't insert typing.Generic into __bases__ here,
-            # or Generic.__init_subclass__ will raise TypeError
-            # in the super().__new__() call.
-            # Instead, monkey-patch __bases__ onto the class after it's been created.
-            tp_dict = super().__new__(cls, name, (dict,), ns)
-
-            if any(issubclass(base, typing.Generic) for base in bases):
-                tp_dict.__bases__ = (typing.Generic, dict)
-                _maybe_adjust_parameters(tp_dict)
-
-            annotations = {}
-            own_annotations = ns.get('__annotations__', {})
-            msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type"
-            own_annotations = {
-                n: typing._type_check(tp, msg) for n, tp in own_annotations.items()
-            }
-            required_keys = set()
-            optional_keys = set()
-
-            for base in bases:
-                annotations.update(base.__dict__.get('__annotations__', {}))
-                required_keys.update(base.__dict__.get('__required_keys__', ()))
-                optional_keys.update(base.__dict__.get('__optional_keys__', ()))
-
-            annotations.update(own_annotations)
-            for annotation_key, annotation_type in own_annotations.items():
-                annotation_origin = get_origin(annotation_type)
-                if annotation_origin is Annotated:
-                    annotation_args = get_args(annotation_type)
-                    if annotation_args:
-                        annotation_type = annotation_args[0]
-                        annotation_origin = get_origin(annotation_type)
-
-                if annotation_origin is Required:
-                    required_keys.add(annotation_key)
-                elif annotation_origin is NotRequired:
-                    optional_keys.add(annotation_key)
-                elif total:
-                    required_keys.add(annotation_key)
-                else:
-                    optional_keys.add(annotation_key)
-
-            tp_dict.__annotations__ = annotations
-            tp_dict.__required_keys__ = frozenset(required_keys)
-            tp_dict.__optional_keys__ = frozenset(optional_keys)
-            if not hasattr(tp_dict, '__total__'):
-                tp_dict.__total__ = total
-            return tp_dict
-
-        __instancecheck__ = __subclasscheck__ = _check_fails
-
-    TypedDict = _TypedDictMeta('TypedDict', (dict,), {})
-    TypedDict.__module__ = __name__
-    TypedDict.__doc__ = \
-        """A simple typed name space. At runtime it is equivalent to a plain dict.
-
-        TypedDict creates a dictionary type that expects all of its
-        instances to have a certain set of keys, with each key
-        associated with a value of a consistent type. This expectation
-        is not checked at runtime but is only enforced by type checkers.
-        Usage::
-
-            class Point2D(TypedDict):
-                x: int
-                y: int
-                label: str
-
-            a: Point2D = {'x': 1, 'y': 2, 'label': 'good'}  # OK
-            b: Point2D = {'z': 3, 'label': 'bad'}           # Fails type check
-
-            assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first')
-
-        The type info can be accessed via the Point2D.__annotations__ dict, and
-        the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets.
-        TypedDict supports two additional equivalent forms::
-
-            Point2D = TypedDict('Point2D', x=int, y=int, label=str)
-            Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str})
-
-        The class syntax is only supported in Python 3.6+, while two other
-        syntax forms work for Python 2.7 and 3.2+
-        """
-
-    if hasattr(typing, "_TypedDictMeta"):
-        _TYPEDDICT_TYPES = (typing._TypedDictMeta, _TypedDictMeta)
-    else:
-        _TYPEDDICT_TYPES = (_TypedDictMeta,)
-
-    def is_typeddict(tp):
-        """Check if an annotation is a TypedDict class
-
-        For example::
-            class Film(TypedDict):
-                title: str
-                year: int
-
-            is_typeddict(Film)  # => True
-            is_typeddict(Union[list, str])  # => False
-        """
-        return isinstance(tp, tuple(_TYPEDDICT_TYPES))
-
-
-if hasattr(typing, "assert_type"):
-    assert_type = typing.assert_type
-
-else:
-    def assert_type(__val, __typ):
-        """Assert (to the type checker) that the value is of the given type.
-
-        When the type checker encounters a call to assert_type(), it
-        emits an error if the value is not of the specified type::
-
-            def greet(name: str) -> None:
-                assert_type(name, str)  # ok
-                assert_type(name, int)  # type checker error
-
-        At runtime this returns the first argument unchanged and otherwise
-        does nothing.
-        """
-        return __val
-
-
-if hasattr(typing, "Required"):
-    get_type_hints = typing.get_type_hints
-else:
-    import functools
-    import types
-
-    # replaces _strip_annotations()
-    def _strip_extras(t):
-        """Strips Annotated, Required and NotRequired from a given type."""
-        if isinstance(t, _AnnotatedAlias):
-            return _strip_extras(t.__origin__)
-        if hasattr(t, "__origin__") and t.__origin__ in (Required, NotRequired):
-            return _strip_extras(t.__args__[0])
-        if isinstance(t, typing._GenericAlias):
-            stripped_args = tuple(_strip_extras(a) for a in t.__args__)
-            if stripped_args == t.__args__:
-                return t
-            return t.copy_with(stripped_args)
-        if hasattr(types, "GenericAlias") and isinstance(t, types.GenericAlias):
-            stripped_args = tuple(_strip_extras(a) for a in t.__args__)
-            if stripped_args == t.__args__:
-                return t
-            return types.GenericAlias(t.__origin__, stripped_args)
-        if hasattr(types, "UnionType") and isinstance(t, types.UnionType):
-            stripped_args = tuple(_strip_extras(a) for a in t.__args__)
-            if stripped_args == t.__args__:
-                return t
-            return functools.reduce(operator.or_, stripped_args)
-
-        return t
-
-    def get_type_hints(obj, globalns=None, localns=None, include_extras=False):
-        """Return type hints for an object.
-
-        This is often the same as obj.__annotations__, but it handles
-        forward references encoded as string literals, adds Optional[t] if a
-        default value equal to None is set and recursively replaces all
-        'Annotated[T, ...]', 'Required[T]' or 'NotRequired[T]' with 'T'
-        (unless 'include_extras=True').
-
-        The argument may be a module, class, method, or function. The annotations
-        are returned as a dictionary. For classes, annotations include also
-        inherited members.
-
-        TypeError is raised if the argument is not of a type that can contain
-        annotations, and an empty dictionary is returned if no annotations are
-        present.
-
-        BEWARE -- the behavior of globalns and localns is counterintuitive
-        (unless you are familiar with how eval() and exec() work).  The
-        search order is locals first, then globals.
-
-        - If no dict arguments are passed, an attempt is made to use the
-          globals from obj (or the respective module's globals for classes),
-          and these are also used as the locals.  If the object does not appear
-          to have globals, an empty dictionary is used.
-
-        - If one dict argument is passed, it is used for both globals and
-          locals.
-
-        - If two dict arguments are passed, they specify globals and
-          locals, respectively.
-        """
-        if hasattr(typing, "Annotated"):
-            hint = typing.get_type_hints(
-                obj, globalns=globalns, localns=localns, include_extras=True
-            )
-        else:
-            hint = typing.get_type_hints(obj, globalns=globalns, localns=localns)
-        if include_extras:
-            return hint
-        return {k: _strip_extras(t) for k, t in hint.items()}
-
-
-# Python 3.9+ has PEP 593 (Annotated)
-if hasattr(typing, 'Annotated'):
-    Annotated = typing.Annotated
-    # Not exported and not a public API, but needed for get_origin() and get_args()
-    # to work.
-    _AnnotatedAlias = typing._AnnotatedAlias
-# 3.7-3.8
-else:
-    class _AnnotatedAlias(typing._GenericAlias, _root=True):
-        """Runtime representation of an annotated type.
-
-        At its core 'Annotated[t, dec1, dec2, ...]' is an alias for the type 't'
-        with extra annotations. The alias behaves like a normal typing alias,
-        instantiating is the same as instantiating the underlying type, binding
-        it to types is also the same.
-        """
-        def __init__(self, origin, metadata):
-            if isinstance(origin, _AnnotatedAlias):
-                metadata = origin.__metadata__ + metadata
-                origin = origin.__origin__
-            super().__init__(origin, origin)
-            self.__metadata__ = metadata
-
-        def copy_with(self, params):
-            assert len(params) == 1
-            new_type = params[0]
-            return _AnnotatedAlias(new_type, self.__metadata__)
-
-        def __repr__(self):
-            return (f"typing_extensions.Annotated[{typing._type_repr(self.__origin__)}, "
-                    f"{', '.join(repr(a) for a in self.__metadata__)}]")
-
-        def __reduce__(self):
-            return operator.getitem, (
-                Annotated, (self.__origin__,) + self.__metadata__
-            )
-
-        def __eq__(self, other):
-            if not isinstance(other, _AnnotatedAlias):
-                return NotImplemented
-            if self.__origin__ != other.__origin__:
-                return False
-            return self.__metadata__ == other.__metadata__
-
-        def __hash__(self):
-            return hash((self.__origin__, self.__metadata__))
-
-    class Annotated:
-        """Add context specific metadata to a type.
-
-        Example: Annotated[int, runtime_check.Unsigned] indicates to the
-        hypothetical runtime_check module that this type is an unsigned int.
-        Every other consumer of this type can ignore this metadata and treat
-        this type as int.
-
-        The first argument to Annotated must be a valid type (and will be in
-        the __origin__ field), the remaining arguments are kept as a tuple in
-        the __extra__ field.
-
-        Details:
-
-        - It's an error to call `Annotated` with less than two arguments.
-        - Nested Annotated are flattened::
-
-            Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3]
-
-        - Instantiating an annotated type is equivalent to instantiating the
-        underlying type::
-
-            Annotated[C, Ann1](5) == C(5)
-
-        - Annotated can be used as a generic type alias::
-
-            Optimized = Annotated[T, runtime.Optimize()]
-            Optimized[int] == Annotated[int, runtime.Optimize()]
-
-            OptimizedList = Annotated[List[T], runtime.Optimize()]
-            OptimizedList[int] == Annotated[List[int], runtime.Optimize()]
-        """
-
-        __slots__ = ()
-
-        def __new__(cls, *args, **kwargs):
-            raise TypeError("Type Annotated cannot be instantiated.")
-
-        @typing._tp_cache
-        def __class_getitem__(cls, params):
-            if not isinstance(params, tuple) or len(params) < 2:
-                raise TypeError("Annotated[...] should be used "
-                                "with at least two arguments (a type and an "
-                                "annotation).")
-            allowed_special_forms = (ClassVar, Final)
-            if get_origin(params[0]) in allowed_special_forms:
-                origin = params[0]
-            else:
-                msg = "Annotated[t, ...]: t must be a type."
-                origin = typing._type_check(params[0], msg)
-            metadata = tuple(params[1:])
-            return _AnnotatedAlias(origin, metadata)
-
-        def __init_subclass__(cls, *args, **kwargs):
-            raise TypeError(
-                f"Cannot subclass {cls.__module__}.Annotated"
-            )
-
-# Python 3.8 has get_origin() and get_args() but those implementations aren't
-# Annotated-aware, so we can't use those. Python 3.9's versions don't support
-# ParamSpecArgs and ParamSpecKwargs, so only Python 3.10's versions will do.
-if sys.version_info[:2] >= (3, 10):
-    get_origin = typing.get_origin
-    get_args = typing.get_args
-# 3.7-3.9
-else:
-    try:
-        # 3.9+
-        from typing import _BaseGenericAlias
-    except ImportError:
-        _BaseGenericAlias = typing._GenericAlias
-    try:
-        # 3.9+
-        from typing import GenericAlias as _typing_GenericAlias
-    except ImportError:
-        _typing_GenericAlias = typing._GenericAlias
-
-    def get_origin(tp):
-        """Get the unsubscripted version of a type.
-
-        This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar
-        and Annotated. Return None for unsupported types. Examples::
-
-            get_origin(Literal[42]) is Literal
-            get_origin(int) is None
-            get_origin(ClassVar[int]) is ClassVar
-            get_origin(Generic) is Generic
-            get_origin(Generic[T]) is Generic
-            get_origin(Union[T, int]) is Union
-            get_origin(List[Tuple[T, T]][int]) == list
-            get_origin(P.args) is P
-        """
-        if isinstance(tp, _AnnotatedAlias):
-            return Annotated
-        if isinstance(tp, (typing._GenericAlias, _typing_GenericAlias, _BaseGenericAlias,
-                           ParamSpecArgs, ParamSpecKwargs)):
-            return tp.__origin__
-        if tp is typing.Generic:
-            return typing.Generic
-        return None
-
-    def get_args(tp):
-        """Get type arguments with all substitutions performed.
-
-        For unions, basic simplifications used by Union constructor are performed.
-        Examples::
-            get_args(Dict[str, int]) == (str, int)
-            get_args(int) == ()
-            get_args(Union[int, Union[T, int], str][int]) == (int, str)
-            get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int])
-            get_args(Callable[[], T][int]) == ([], int)
-        """
-        if isinstance(tp, _AnnotatedAlias):
-            return (tp.__origin__,) + tp.__metadata__
-        if isinstance(tp, (typing._GenericAlias, _typing_GenericAlias)):
-            if getattr(tp, "_special", False):
-                return ()
-            res = tp.__args__
-            if get_origin(tp) is collections.abc.Callable and res[0] is not Ellipsis:
-                res = (list(res[:-1]), res[-1])
-            return res
-        return ()
-
-
-# 3.10+
-if hasattr(typing, 'TypeAlias'):
-    TypeAlias = typing.TypeAlias
-# 3.9
-elif sys.version_info[:2] >= (3, 9):
-    class _TypeAliasForm(typing._SpecialForm, _root=True):
-        def __repr__(self):
-            return 'typing_extensions.' + self._name
-
-    @_TypeAliasForm
-    def TypeAlias(self, parameters):
-        """Special marker indicating that an assignment should
-        be recognized as a proper type alias definition by type
-        checkers.
-
-        For example::
-
-            Predicate: TypeAlias = Callable[..., bool]
-
-        It's invalid when used anywhere except as in the example above.
-        """
-        raise TypeError(f"{self} is not subscriptable")
-# 3.7-3.8
-else:
-    class _TypeAliasForm(typing._SpecialForm, _root=True):
-        def __repr__(self):
-            return 'typing_extensions.' + self._name
-
-    TypeAlias = _TypeAliasForm('TypeAlias',
-                               doc="""Special marker indicating that an assignment should
-                               be recognized as a proper type alias definition by type
-                               checkers.
-
-                               For example::
-
-                                   Predicate: TypeAlias = Callable[..., bool]
-
-                               It's invalid when used anywhere except as in the example
-                               above.""")
-
-
-class _DefaultMixin:
-    """Mixin for TypeVarLike defaults."""
-
-    __slots__ = ()
-
-    def __init__(self, default):
-        if isinstance(default, (tuple, list)):
-            self.__default__ = tuple((typing._type_check(d, "Default must be a type")
-                                      for d in default))
-        elif default:
-            self.__default__ = typing._type_check(default, "Default must be a type")
-        else:
-            self.__default__ = None
-
-
-# Add default and infer_variance parameters from PEP 696 and 695
-class TypeVar(typing.TypeVar, _DefaultMixin, _root=True):
-    """Type variable."""
-
-    __module__ = 'typing'
-
-    def __init__(self, name, *constraints, bound=None,
-                 covariant=False, contravariant=False,
-                 default=None, infer_variance=False):
-        super().__init__(name, *constraints, bound=bound, covariant=covariant,
-                         contravariant=contravariant)
-        _DefaultMixin.__init__(self, default)
-        self.__infer_variance__ = infer_variance
-
-        # for pickling:
-        try:
-            def_mod = sys._getframe(1).f_globals.get('__name__', '__main__')
-        except (AttributeError, ValueError):
-            def_mod = None
-        if def_mod != 'typing_extensions':
-            self.__module__ = def_mod
-
-
-# Python 3.10+ has PEP 612
-if hasattr(typing, 'ParamSpecArgs'):
-    ParamSpecArgs = typing.ParamSpecArgs
-    ParamSpecKwargs = typing.ParamSpecKwargs
-# 3.7-3.9
-else:
-    class _Immutable:
-        """Mixin to indicate that object should not be copied."""
-        __slots__ = ()
-
-        def __copy__(self):
-            return self
-
-        def __deepcopy__(self, memo):
-            return self
-
-    class ParamSpecArgs(_Immutable):
-        """The args for a ParamSpec object.
-
-        Given a ParamSpec object P, P.args is an instance of ParamSpecArgs.
-
-        ParamSpecArgs objects have a reference back to their ParamSpec:
-
-        P.args.__origin__ is P
-
-        This type is meant for runtime introspection and has no special meaning to
-        static type checkers.
-        """
-        def __init__(self, origin):
-            self.__origin__ = origin
-
-        def __repr__(self):
-            return f"{self.__origin__.__name__}.args"
-
-        def __eq__(self, other):
-            if not isinstance(other, ParamSpecArgs):
-                return NotImplemented
-            return self.__origin__ == other.__origin__
-
-    class ParamSpecKwargs(_Immutable):
-        """The kwargs for a ParamSpec object.
-
-        Given a ParamSpec object P, P.kwargs is an instance of ParamSpecKwargs.
-
-        ParamSpecKwargs objects have a reference back to their ParamSpec:
-
-        P.kwargs.__origin__ is P
-
-        This type is meant for runtime introspection and has no special meaning to
-        static type checkers.
-        """
-        def __init__(self, origin):
-            self.__origin__ = origin
-
-        def __repr__(self):
-            return f"{self.__origin__.__name__}.kwargs"
-
-        def __eq__(self, other):
-            if not isinstance(other, ParamSpecKwargs):
-                return NotImplemented
-            return self.__origin__ == other.__origin__
-
-# 3.10+
-if hasattr(typing, 'ParamSpec'):
-
-    # Add default Parameter - PEP 696
-    class ParamSpec(typing.ParamSpec, _DefaultMixin, _root=True):
-        """Parameter specification variable."""
-
-        __module__ = 'typing'
-
-        def __init__(self, name, *, bound=None, covariant=False, contravariant=False,
-                     default=None):
-            super().__init__(name, bound=bound, covariant=covariant,
-                             contravariant=contravariant)
-            _DefaultMixin.__init__(self, default)
-
-            # for pickling:
-            try:
-                def_mod = sys._getframe(1).f_globals.get('__name__', '__main__')
-            except (AttributeError, ValueError):
-                def_mod = None
-            if def_mod != 'typing_extensions':
-                self.__module__ = def_mod
-
-# 3.7-3.9
-else:
-
-    # Inherits from list as a workaround for Callable checks in Python < 3.9.2.
-    class ParamSpec(list, _DefaultMixin):
-        """Parameter specification variable.
-
-        Usage::
-
-           P = ParamSpec('P')
-
-        Parameter specification variables exist primarily for the benefit of static
-        type checkers.  They are used to forward the parameter types of one
-        callable to another callable, a pattern commonly found in higher order
-        functions and decorators.  They are only valid when used in ``Concatenate``,
-        or s the first argument to ``Callable``. In Python 3.10 and higher,
-        they are also supported in user-defined Generics at runtime.
-        See class Generic for more information on generic types.  An
-        example for annotating a decorator::
-
-           T = TypeVar('T')
-           P = ParamSpec('P')
-
-           def add_logging(f: Callable[P, T]) -> Callable[P, T]:
-               '''A type-safe decorator to add logging to a function.'''
-               def inner(*args: P.args, **kwargs: P.kwargs) -> T:
-                   logging.info(f'{f.__name__} was called')
-                   return f(*args, **kwargs)
-               return inner
-
-           @add_logging
-           def add_two(x: float, y: float) -> float:
-               '''Add two numbers together.'''
-               return x + y
-
-        Parameter specification variables defined with covariant=True or
-        contravariant=True can be used to declare covariant or contravariant
-        generic types.  These keyword arguments are valid, but their actual semantics
-        are yet to be decided.  See PEP 612 for details.
-
-        Parameter specification variables can be introspected. e.g.:
-
-           P.__name__ == 'T'
-           P.__bound__ == None
-           P.__covariant__ == False
-           P.__contravariant__ == False
-
-        Note that only parameter specification variables defined in global scope can
-        be pickled.
-        """
-
-        # Trick Generic __parameters__.
-        __class__ = typing.TypeVar
-
-        @property
-        def args(self):
-            return ParamSpecArgs(self)
-
-        @property
-        def kwargs(self):
-            return ParamSpecKwargs(self)
-
-        def __init__(self, name, *, bound=None, covariant=False, contravariant=False,
-                     default=None):
-            super().__init__([self])
-            self.__name__ = name
-            self.__covariant__ = bool(covariant)
-            self.__contravariant__ = bool(contravariant)
-            if bound:
-                self.__bound__ = typing._type_check(bound, 'Bound must be a type.')
-            else:
-                self.__bound__ = None
-            _DefaultMixin.__init__(self, default)
-
-            # for pickling:
-            try:
-                def_mod = sys._getframe(1).f_globals.get('__name__', '__main__')
-            except (AttributeError, ValueError):
-                def_mod = None
-            if def_mod != 'typing_extensions':
-                self.__module__ = def_mod
-
-        def __repr__(self):
-            if self.__covariant__:
-                prefix = '+'
-            elif self.__contravariant__:
-                prefix = '-'
-            else:
-                prefix = '~'
-            return prefix + self.__name__
-
-        def __hash__(self):
-            return object.__hash__(self)
-
-        def __eq__(self, other):
-            return self is other
-
-        def __reduce__(self):
-            return self.__name__
-
-        # Hack to get typing._type_check to pass.
-        def __call__(self, *args, **kwargs):
-            pass
-
-
-# 3.7-3.9
-if not hasattr(typing, 'Concatenate'):
-    # Inherits from list as a workaround for Callable checks in Python < 3.9.2.
-    class _ConcatenateGenericAlias(list):
-
-        # Trick Generic into looking into this for __parameters__.
-        __class__ = typing._GenericAlias
-
-        # Flag in 3.8.
-        _special = False
-
-        def __init__(self, origin, args):
-            super().__init__(args)
-            self.__origin__ = origin
-            self.__args__ = args
-
-        def __repr__(self):
-            _type_repr = typing._type_repr
-            return (f'{_type_repr(self.__origin__)}'
-                    f'[{", ".join(_type_repr(arg) for arg in self.__args__)}]')
-
-        def __hash__(self):
-            return hash((self.__origin__, self.__args__))
-
-        # Hack to get typing._type_check to pass in Generic.
-        def __call__(self, *args, **kwargs):
-            pass
-
-        @property
-        def __parameters__(self):
-            return tuple(
-                tp for tp in self.__args__ if isinstance(tp, (typing.TypeVar, ParamSpec))
-            )
-
-
-# 3.7-3.9
-@typing._tp_cache
-def _concatenate_getitem(self, parameters):
-    if parameters == ():
-        raise TypeError("Cannot take a Concatenate of no types.")
-    if not isinstance(parameters, tuple):
-        parameters = (parameters,)
-    if not isinstance(parameters[-1], ParamSpec):
-        raise TypeError("The last parameter to Concatenate should be a "
-                        "ParamSpec variable.")
-    msg = "Concatenate[arg, ...]: each arg must be a type."
-    parameters = tuple(typing._type_check(p, msg) for p in parameters)
-    return _ConcatenateGenericAlias(self, parameters)
-
-
-# 3.10+
-if hasattr(typing, 'Concatenate'):
-    Concatenate = typing.Concatenate
-    _ConcatenateGenericAlias = typing._ConcatenateGenericAlias # noqa
-# 3.9
-elif sys.version_info[:2] >= (3, 9):
-    @_TypeAliasForm
-    def Concatenate(self, parameters):
-        """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a
-        higher order function which adds, removes or transforms parameters of a
-        callable.
-
-        For example::
-
-           Callable[Concatenate[int, P], int]
-
-        See PEP 612 for detailed information.
-        """
-        return _concatenate_getitem(self, parameters)
-# 3.7-8
-else:
-    class _ConcatenateForm(typing._SpecialForm, _root=True):
-        def __repr__(self):
-            return 'typing_extensions.' + self._name
-
-        def __getitem__(self, parameters):
-            return _concatenate_getitem(self, parameters)
-
-    Concatenate = _ConcatenateForm(
-        'Concatenate',
-        doc="""Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a
-        higher order function which adds, removes or transforms parameters of a
-        callable.
-
-        For example::
-
-           Callable[Concatenate[int, P], int]
-
-        See PEP 612 for detailed information.
-        """)
-
-# 3.10+
-if hasattr(typing, 'TypeGuard'):
-    TypeGuard = typing.TypeGuard
-# 3.9
-elif sys.version_info[:2] >= (3, 9):
-    class _TypeGuardForm(typing._SpecialForm, _root=True):
-        def __repr__(self):
-            return 'typing_extensions.' + self._name
-
-    @_TypeGuardForm
-    def TypeGuard(self, parameters):
-        """Special typing form used to annotate the return type of a user-defined
-        type guard function.  ``TypeGuard`` only accepts a single type argument.
-        At runtime, functions marked this way should return a boolean.
-
-        ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static
-        type checkers to determine a more precise type of an expression within a
-        program's code flow.  Usually type narrowing is done by analyzing
-        conditional code flow and applying the narrowing to a block of code.  The
-        conditional expression here is sometimes referred to as a "type guard".
-
-        Sometimes it would be convenient to use a user-defined boolean function
-        as a type guard.  Such a function should use ``TypeGuard[...]`` as its
-        return type to alert static type checkers to this intention.
-
-        Using  ``-> TypeGuard`` tells the static type checker that for a given
-        function:
-
-        1. The return value is a boolean.
-        2. If the return value is ``True``, the type of its argument
-        is the type inside ``TypeGuard``.
-
-        For example::
-
-            def is_str(val: Union[str, float]):
-                # "isinstance" type guard
-                if isinstance(val, str):
-                    # Type of ``val`` is narrowed to ``str``
-                    ...
-                else:
-                    # Else, type of ``val`` is narrowed to ``float``.
-                    ...
-
-        Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower
-        form of ``TypeA`` (it can even be a wider form) and this may lead to
-        type-unsafe results.  The main reason is to allow for things like
-        narrowing ``List[object]`` to ``List[str]`` even though the latter is not
-        a subtype of the former, since ``List`` is invariant.  The responsibility of
-        writing type-safe type guards is left to the user.
-
-        ``TypeGuard`` also works with type variables.  For more information, see
-        PEP 647 (User-Defined Type Guards).
-        """
-        item = typing._type_check(parameters, f'{self} accepts only a single type.')
-        return typing._GenericAlias(self, (item,))
-# 3.7-3.8
-else:
-    class _TypeGuardForm(typing._SpecialForm, _root=True):
-
-        def __repr__(self):
-            return 'typing_extensions.' + self._name
-
-        def __getitem__(self, parameters):
-            item = typing._type_check(parameters,
-                                      f'{self._name} accepts only a single type')
-            return typing._GenericAlias(self, (item,))
-
-    TypeGuard = _TypeGuardForm(
-        'TypeGuard',
-        doc="""Special typing form used to annotate the return type of a user-defined
-        type guard function.  ``TypeGuard`` only accepts a single type argument.
-        At runtime, functions marked this way should return a boolean.
-
-        ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static
-        type checkers to determine a more precise type of an expression within a
-        program's code flow.  Usually type narrowing is done by analyzing
-        conditional code flow and applying the narrowing to a block of code.  The
-        conditional expression here is sometimes referred to as a "type guard".
-
-        Sometimes it would be convenient to use a user-defined boolean function
-        as a type guard.  Such a function should use ``TypeGuard[...]`` as its
-        return type to alert static type checkers to this intention.
-
-        Using  ``-> TypeGuard`` tells the static type checker that for a given
-        function:
-
-        1. The return value is a boolean.
-        2. If the return value is ``True``, the type of its argument
-        is the type inside ``TypeGuard``.
-
-        For example::
-
-            def is_str(val: Union[str, float]):
-                # "isinstance" type guard
-                if isinstance(val, str):
-                    # Type of ``val`` is narrowed to ``str``
-                    ...
-                else:
-                    # Else, type of ``val`` is narrowed to ``float``.
-                    ...
-
-        Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower
-        form of ``TypeA`` (it can even be a wider form) and this may lead to
-        type-unsafe results.  The main reason is to allow for things like
-        narrowing ``List[object]`` to ``List[str]`` even though the latter is not
-        a subtype of the former, since ``List`` is invariant.  The responsibility of
-        writing type-safe type guards is left to the user.
-
-        ``TypeGuard`` also works with type variables.  For more information, see
-        PEP 647 (User-Defined Type Guards).
-        """)
-
-
-# Vendored from cpython typing._SpecialFrom
-class _SpecialForm(typing._Final, _root=True):
-    __slots__ = ('_name', '__doc__', '_getitem')
-
-    def __init__(self, getitem):
-        self._getitem = getitem
-        self._name = getitem.__name__
-        self.__doc__ = getitem.__doc__
-
-    def __getattr__(self, item):
-        if item in {'__name__', '__qualname__'}:
-            return self._name
-
-        raise AttributeError(item)
-
-    def __mro_entries__(self, bases):
-        raise TypeError(f"Cannot subclass {self!r}")
-
-    def __repr__(self):
-        return f'typing_extensions.{self._name}'
-
-    def __reduce__(self):
-        return self._name
-
-    def __call__(self, *args, **kwds):
-        raise TypeError(f"Cannot instantiate {self!r}")
-
-    def __or__(self, other):
-        return typing.Union[self, other]
-
-    def __ror__(self, other):
-        return typing.Union[other, self]
-
-    def __instancecheck__(self, obj):
-        raise TypeError(f"{self} cannot be used with isinstance()")
-
-    def __subclasscheck__(self, cls):
-        raise TypeError(f"{self} cannot be used with issubclass()")
-
-    @typing._tp_cache
-    def __getitem__(self, parameters):
-        return self._getitem(self, parameters)
-
-
-if hasattr(typing, "LiteralString"):
-    LiteralString = typing.LiteralString
-else:
-    @_SpecialForm
-    def LiteralString(self, params):
-        """Represents an arbitrary literal string.
-
-        Example::
-
-          from typing_extensions import LiteralString
-
-          def query(sql: LiteralString) -> ...:
-              ...
-
-          query("SELECT * FROM table")  # ok
-          query(f"SELECT * FROM {input()}")  # not ok
-
-        See PEP 675 for details.
-
-        """
-        raise TypeError(f"{self} is not subscriptable")
-
-
-if hasattr(typing, "Self"):
-    Self = typing.Self
-else:
-    @_SpecialForm
-    def Self(self, params):
-        """Used to spell the type of "self" in classes.
-
-        Example::
-
-          from typing import Self
-
-          class ReturnsSelf:
-              def parse(self, data: bytes) -> Self:
-                  ...
-                  return self
-
-        """
-
-        raise TypeError(f"{self} is not subscriptable")
-
-
-if hasattr(typing, "Never"):
-    Never = typing.Never
-else:
-    @_SpecialForm
-    def Never(self, params):
-        """The bottom type, a type that has no members.
-
-        This can be used to define a function that should never be
-        called, or a function that never returns::
-
-            from typing_extensions import Never
-
-            def never_call_me(arg: Never) -> None:
-                pass
-
-            def int_or_str(arg: int | str) -> None:
-                never_call_me(arg)  # type checker error
-                match arg:
-                    case int():
-                        print("It's an int")
-                    case str():
-                        print("It's a str")
-                    case _:
-                        never_call_me(arg)  # ok, arg is of type Never
-
-        """
-
-        raise TypeError(f"{self} is not subscriptable")
-
-
-if hasattr(typing, 'Required'):
-    Required = typing.Required
-    NotRequired = typing.NotRequired
-elif sys.version_info[:2] >= (3, 9):
-    class _ExtensionsSpecialForm(typing._SpecialForm, _root=True):
-        def __repr__(self):
-            return 'typing_extensions.' + self._name
-
-    @_ExtensionsSpecialForm
-    def Required(self, parameters):
-        """A special typing construct to mark a key of a total=False TypedDict
-        as required. For example:
-
-            class Movie(TypedDict, total=False):
-                title: Required[str]
-                year: int
-
-            m = Movie(
-                title='The Matrix',  # typechecker error if key is omitted
-                year=1999,
-            )
-
-        There is no runtime checking that a required key is actually provided
-        when instantiating a related TypedDict.
-        """
-        item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
-        return typing._GenericAlias(self, (item,))
-
-    @_ExtensionsSpecialForm
-    def NotRequired(self, parameters):
-        """A special typing construct to mark a key of a TypedDict as
-        potentially missing. For example:
-
-            class Movie(TypedDict):
-                title: str
-                year: NotRequired[int]
-
-            m = Movie(
-                title='The Matrix',  # typechecker error if key is omitted
-                year=1999,
-            )
-        """
-        item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
-        return typing._GenericAlias(self, (item,))
-
-else:
-    class _RequiredForm(typing._SpecialForm, _root=True):
-        def __repr__(self):
-            return 'typing_extensions.' + self._name
-
-        def __getitem__(self, parameters):
-            item = typing._type_check(parameters,
-                                      f'{self._name} accepts only a single type.')
-            return typing._GenericAlias(self, (item,))
-
-    Required = _RequiredForm(
-        'Required',
-        doc="""A special typing construct to mark a key of a total=False TypedDict
-        as required. For example:
-
-            class Movie(TypedDict, total=False):
-                title: Required[str]
-                year: int
-
-            m = Movie(
-                title='The Matrix',  # typechecker error if key is omitted
-                year=1999,
-            )
-
-        There is no runtime checking that a required key is actually provided
-        when instantiating a related TypedDict.
-        """)
-    NotRequired = _RequiredForm(
-        'NotRequired',
-        doc="""A special typing construct to mark a key of a TypedDict as
-        potentially missing. For example:
-
-            class Movie(TypedDict):
-                title: str
-                year: NotRequired[int]
-
-            m = Movie(
-                title='The Matrix',  # typechecker error if key is omitted
-                year=1999,
-            )
-        """)
-
-
-if hasattr(typing, "Unpack"):  # 3.11+
-    Unpack = typing.Unpack
-elif sys.version_info[:2] >= (3, 9):
-    class _UnpackSpecialForm(typing._SpecialForm, _root=True):
-        def __repr__(self):
-            return 'typing_extensions.' + self._name
-
-    class _UnpackAlias(typing._GenericAlias, _root=True):
-        __class__ = typing.TypeVar
-
-    @_UnpackSpecialForm
-    def Unpack(self, parameters):
-        """A special typing construct to unpack a variadic type. For example:
-
-            Shape = TypeVarTuple('Shape')
-            Batch = NewType('Batch', int)
-
-            def add_batch_axis(
-                x: Array[Unpack[Shape]]
-            ) -> Array[Batch, Unpack[Shape]]: ...
-
-        """
-        item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
-        return _UnpackAlias(self, (item,))
-
-    def _is_unpack(obj):
-        return isinstance(obj, _UnpackAlias)
-
-else:
-    class _UnpackAlias(typing._GenericAlias, _root=True):
-        __class__ = typing.TypeVar
-
-    class _UnpackForm(typing._SpecialForm, _root=True):
-        def __repr__(self):
-            return 'typing_extensions.' + self._name
-
-        def __getitem__(self, parameters):
-            item = typing._type_check(parameters,
-                                      f'{self._name} accepts only a single type.')
-            return _UnpackAlias(self, (item,))
-
-    Unpack = _UnpackForm(
-        'Unpack',
-        doc="""A special typing construct to unpack a variadic type. For example:
-
-            Shape = TypeVarTuple('Shape')
-            Batch = NewType('Batch', int)
-
-            def add_batch_axis(
-                x: Array[Unpack[Shape]]
-            ) -> Array[Batch, Unpack[Shape]]: ...
-
-        """)
-
-    def _is_unpack(obj):
-        return isinstance(obj, _UnpackAlias)
-
-
-if hasattr(typing, "TypeVarTuple"):  # 3.11+
-
-    # Add default Parameter - PEP 696
-    class TypeVarTuple(typing.TypeVarTuple, _DefaultMixin, _root=True):
-        """Type variable tuple."""
-
-        def __init__(self, name, *, default=None):
-            super().__init__(name)
-            _DefaultMixin.__init__(self, default)
-
-            # for pickling:
-            try:
-                def_mod = sys._getframe(1).f_globals.get('__name__', '__main__')
-            except (AttributeError, ValueError):
-                def_mod = None
-            if def_mod != 'typing_extensions':
-                self.__module__ = def_mod
-
-else:
-    class TypeVarTuple(_DefaultMixin):
-        """Type variable tuple.
-
-        Usage::
-
-            Ts = TypeVarTuple('Ts')
-
-        In the same way that a normal type variable is a stand-in for a single
-        type such as ``int``, a type variable *tuple* is a stand-in for a *tuple*
-        type such as ``Tuple[int, str]``.
-
-        Type variable tuples can be used in ``Generic`` declarations.
-        Consider the following example::
-
-            class Array(Generic[*Ts]): ...
-
-        The ``Ts`` type variable tuple here behaves like ``tuple[T1, T2]``,
-        where ``T1`` and ``T2`` are type variables. To use these type variables
-        as type parameters of ``Array``, we must *unpack* the type variable tuple using
-        the star operator: ``*Ts``. The signature of ``Array`` then behaves
-        as if we had simply written ``class Array(Generic[T1, T2]): ...``.
-        In contrast to ``Generic[T1, T2]``, however, ``Generic[*Shape]`` allows
-        us to parameterise the class with an *arbitrary* number of type parameters.
-
-        Type variable tuples can be used anywhere a normal ``TypeVar`` can.
-        This includes class definitions, as shown above, as well as function
-        signatures and variable annotations::
-
-            class Array(Generic[*Ts]):
-
-                def __init__(self, shape: Tuple[*Ts]):
-                    self._shape: Tuple[*Ts] = shape
-
-                def get_shape(self) -> Tuple[*Ts]:
-                    return self._shape
-
-            shape = (Height(480), Width(640))
-            x: Array[Height, Width] = Array(shape)
-            y = abs(x)  # Inferred type is Array[Height, Width]
-            z = x + x   #        ...    is Array[Height, Width]
-            x.get_shape()  #     ...    is tuple[Height, Width]
-
-        """
-
-        # Trick Generic __parameters__.
-        __class__ = typing.TypeVar
-
-        def __iter__(self):
-            yield self.__unpacked__
-
-        def __init__(self, name, *, default=None):
-            self.__name__ = name
-            _DefaultMixin.__init__(self, default)
-
-            # for pickling:
-            try:
-                def_mod = sys._getframe(1).f_globals.get('__name__', '__main__')
-            except (AttributeError, ValueError):
-                def_mod = None
-            if def_mod != 'typing_extensions':
-                self.__module__ = def_mod
-
-            self.__unpacked__ = Unpack[self]
-
-        def __repr__(self):
-            return self.__name__
-
-        def __hash__(self):
-            return object.__hash__(self)
-
-        def __eq__(self, other):
-            return self is other
-
-        def __reduce__(self):
-            return self.__name__
-
-        def __init_subclass__(self, *args, **kwds):
-            if '_root' not in kwds:
-                raise TypeError("Cannot subclass special typing classes")
-
-
-if hasattr(typing, "reveal_type"):
-    reveal_type = typing.reveal_type
-else:
-    def reveal_type(__obj: T) -> T:
-        """Reveal the inferred type of a variable.
-
-        When a static type checker encounters a call to ``reveal_type()``,
-        it will emit the inferred type of the argument::
-
-            x: int = 1
-            reveal_type(x)
-
-        Running a static type checker (e.g., ``mypy``) on this example
-        will produce output similar to 'Revealed type is "builtins.int"'.
-
-        At runtime, the function prints the runtime type of the
-        argument and returns it unchanged.
-
-        """
-        print(f"Runtime type is {type(__obj).__name__!r}", file=sys.stderr)
-        return __obj
-
-
-if hasattr(typing, "assert_never"):
-    assert_never = typing.assert_never
-else:
-    def assert_never(__arg: Never) -> Never:
-        """Assert to the type checker that a line of code is unreachable.
-
-        Example::
-
-            def int_or_str(arg: int | str) -> None:
-                match arg:
-                    case int():
-                        print("It's an int")
-                    case str():
-                        print("It's a str")
-                    case _:
-                        assert_never(arg)
-
-        If a type checker finds that a call to assert_never() is
-        reachable, it will emit an error.
-
-        At runtime, this throws an exception when called.
-
-        """
-        raise AssertionError("Expected code to be unreachable")
-
-
-if hasattr(typing, 'dataclass_transform'):
-    dataclass_transform = typing.dataclass_transform
-else:
-    def dataclass_transform(
-        *,
-        eq_default: bool = True,
-        order_default: bool = False,
-        kw_only_default: bool = False,
-        field_specifiers: typing.Tuple[
-            typing.Union[typing.Type[typing.Any], typing.Callable[..., typing.Any]],
-            ...
-        ] = (),
-        **kwargs: typing.Any,
-    ) -> typing.Callable[[T], T]:
-        """Decorator that marks a function, class, or metaclass as providing
-        dataclass-like behavior.
-
-        Example:
-
-            from typing_extensions import dataclass_transform
-
-            _T = TypeVar("_T")
-
-            # Used on a decorator function
-            @dataclass_transform()
-            def create_model(cls: type[_T]) -> type[_T]:
-                ...
-                return cls
-
-            @create_model
-            class CustomerModel:
-                id: int
-                name: str
-
-            # Used on a base class
-            @dataclass_transform()
-            class ModelBase: ...
-
-            class CustomerModel(ModelBase):
-                id: int
-                name: str
-
-            # Used on a metaclass
-            @dataclass_transform()
-            class ModelMeta(type): ...
-
-            class ModelBase(metaclass=ModelMeta): ...
-
-            class CustomerModel(ModelBase):
-                id: int
-                name: str
-
-        Each of the ``CustomerModel`` classes defined in this example will now
-        behave similarly to a dataclass created with the ``@dataclasses.dataclass``
-        decorator. For example, the type checker will synthesize an ``__init__``
-        method.
-
-        The arguments to this decorator can be used to customize this behavior:
-        - ``eq_default`` indicates whether the ``eq`` parameter is assumed to be
-          True or False if it is omitted by the caller.
-        - ``order_default`` indicates whether the ``order`` parameter is
-          assumed to be True or False if it is omitted by the caller.
-        - ``kw_only_default`` indicates whether the ``kw_only`` parameter is
-          assumed to be True or False if it is omitted by the caller.
-        - ``field_specifiers`` specifies a static list of supported classes
-          or functions that describe fields, similar to ``dataclasses.field()``.
-
-        At runtime, this decorator records its arguments in the
-        ``__dataclass_transform__`` attribute on the decorated object.
-
-        See PEP 681 for details.
-
-        """
-        def decorator(cls_or_fn):
-            cls_or_fn.__dataclass_transform__ = {
-                "eq_default": eq_default,
-                "order_default": order_default,
-                "kw_only_default": kw_only_default,
-                "field_specifiers": field_specifiers,
-                "kwargs": kwargs,
-            }
-            return cls_or_fn
-        return decorator
-
-
-if hasattr(typing, "override"):
-    override = typing.override
-else:
-    _F = typing.TypeVar("_F", bound=typing.Callable[..., typing.Any])
-
-    def override(__arg: _F) -> _F:
-        """Indicate that a method is intended to override a method in a base class.
-
-        Usage:
-
-            class Base:
-                def method(self) -> None: ...
-                    pass
-
-            class Child(Base):
-                @override
-                def method(self) -> None:
-                    super().method()
-
-        When this decorator is applied to a method, the type checker will
-        validate that it overrides a method with the same name on a base class.
-        This helps prevent bugs that may occur when a base class is changed
-        without an equivalent change to a child class.
-
-        See PEP 698 for details.
-
-        """
-        return __arg
-
-
-# We have to do some monkey patching to deal with the dual nature of
-# Unpack/TypeVarTuple:
-# - We want Unpack to be a kind of TypeVar so it gets accepted in
-#   Generic[Unpack[Ts]]
-# - We want it to *not* be treated as a TypeVar for the purposes of
-#   counting generic parameters, so that when we subscript a generic,
-#   the runtime doesn't try to substitute the Unpack with the subscripted type.
-if not hasattr(typing, "TypeVarTuple"):
-    typing._collect_type_vars = _collect_type_vars
-    typing._check_generic = _check_generic
-
-
-# Backport typing.NamedTuple as it exists in Python 3.11.
-# In 3.11, the ability to define generic `NamedTuple`s was supported.
-# This was explicitly disallowed in 3.9-3.10, and only half-worked in <=3.8.
-if sys.version_info >= (3, 11):
-    NamedTuple = typing.NamedTuple
-else:
-    def _caller():
-        try:
-            return sys._getframe(2).f_globals.get('__name__', '__main__')
-        except (AttributeError, ValueError):  # For platforms without _getframe()
-            return None
-
-    def _make_nmtuple(name, types, module, defaults=()):
-        fields = [n for n, t in types]
-        annotations = {n: typing._type_check(t, f"field {n} annotation must be a type")
-                       for n, t in types}
-        nm_tpl = collections.namedtuple(name, fields,
-                                        defaults=defaults, module=module)
-        nm_tpl.__annotations__ = nm_tpl.__new__.__annotations__ = annotations
-        # The `_field_types` attribute was removed in 3.9;
-        # in earlier versions, it is the same as the `__annotations__` attribute
-        if sys.version_info < (3, 9):
-            nm_tpl._field_types = annotations
-        return nm_tpl
-
-    _prohibited_namedtuple_fields = typing._prohibited
-    _special_namedtuple_fields = frozenset({'__module__', '__name__', '__annotations__'})
-
-    class _NamedTupleMeta(type):
-        def __new__(cls, typename, bases, ns):
-            assert _NamedTuple in bases
-            for base in bases:
-                if base is not _NamedTuple and base is not typing.Generic:
-                    raise TypeError(
-                        'can only inherit from a NamedTuple type and Generic')
-            bases = tuple(tuple if base is _NamedTuple else base for base in bases)
-            types = ns.get('__annotations__', {})
-            default_names = []
-            for field_name in types:
-                if field_name in ns:
-                    default_names.append(field_name)
-                elif default_names:
-                    raise TypeError(f"Non-default namedtuple field {field_name} "
-                                    f"cannot follow default field"
-                                    f"{'s' if len(default_names) > 1 else ''} "
-                                    f"{', '.join(default_names)}")
-            nm_tpl = _make_nmtuple(
-                typename, types.items(),
-                defaults=[ns[n] for n in default_names],
-                module=ns['__module__']
-            )
-            nm_tpl.__bases__ = bases
-            if typing.Generic in bases:
-                class_getitem = typing.Generic.__class_getitem__.__func__
-                nm_tpl.__class_getitem__ = classmethod(class_getitem)
-            # update from user namespace without overriding special namedtuple attributes
-            for key in ns:
-                if key in _prohibited_namedtuple_fields:
-                    raise AttributeError("Cannot overwrite NamedTuple attribute " + key)
-                elif key not in _special_namedtuple_fields and key not in nm_tpl._fields:
-                    setattr(nm_tpl, key, ns[key])
-            if typing.Generic in bases:
-                nm_tpl.__init_subclass__()
-            return nm_tpl
-
-    def NamedTuple(__typename, __fields=None, **kwargs):
-        if __fields is None:
-            __fields = kwargs.items()
-        elif kwargs:
-            raise TypeError("Either list of fields or keywords"
-                            " can be provided to NamedTuple, not both")
-        return _make_nmtuple(__typename, __fields, module=_caller())
-
-    NamedTuple.__doc__ = typing.NamedTuple.__doc__
-    _NamedTuple = type.__new__(_NamedTupleMeta, 'NamedTuple', (), {})
-
-    # On 3.8+, alter the signature so that it matches typing.NamedTuple.
-    # The signature of typing.NamedTuple on >=3.8 is invalid syntax in Python 3.7,
-    # so just leave the signature as it is on 3.7.
-    if sys.version_info >= (3, 8):
-        NamedTuple.__text_signature__ = '(typename, fields=None, /, **kwargs)'
-
-    def _namedtuple_mro_entries(bases):
-        assert NamedTuple in bases
-        return (_NamedTuple,)
-
-    NamedTuple.__mro_entries__ = _namedtuple_mro_entries
diff --git a/pkg_resources/_vendor/vendored.txt b/pkg_resources/_vendor/vendored.txt
index f8cbfd967e..0c8fdc3823 100644
--- a/pkg_resources/_vendor/vendored.txt
+++ b/pkg_resources/_vendor/vendored.txt
@@ -1,8 +1,6 @@
 packaging==24
 
 platformdirs==2.6.2
-# required for platformdirs on Python < 3.8
-typing_extensions==4.4.0
 
 jaraco.text==3.7.0
 # required for jaraco.text on older Pythons
diff --git a/pkg_resources/extern/__init__.py b/pkg_resources/extern/__init__.py
index 7f80b04164..363bdf3f06 100644
--- a/pkg_resources/extern/__init__.py
+++ b/pkg_resources/extern/__init__.py
@@ -79,7 +79,6 @@ def install(self):
 names = (
     'packaging',
     'platformdirs',
-    'typing_extensions',
     'jaraco',
     'importlib_resources',
     'zipp',
diff --git a/setuptools/_vendor/typing_extensions-4.0.1.dist-info/INSTALLER b/setuptools/_vendor/typing_extensions-4.0.1.dist-info/INSTALLER
deleted file mode 100644
index a1b589e38a..0000000000
--- a/setuptools/_vendor/typing_extensions-4.0.1.dist-info/INSTALLER
+++ /dev/null
@@ -1 +0,0 @@
-pip
diff --git a/setuptools/_vendor/typing_extensions-4.0.1.dist-info/LICENSE b/setuptools/_vendor/typing_extensions-4.0.1.dist-info/LICENSE
deleted file mode 100644
index 583f9f6e61..0000000000
--- a/setuptools/_vendor/typing_extensions-4.0.1.dist-info/LICENSE
+++ /dev/null
@@ -1,254 +0,0 @@
-A. HISTORY OF THE SOFTWARE
-==========================
-
-Python was created in the early 1990s by Guido van Rossum at Stichting
-Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands
-as a successor of a language called ABC.  Guido remains Python's
-principal author, although it includes many contributions from others.
-
-In 1995, Guido continued his work on Python at the Corporation for
-National Research Initiatives (CNRI, see http://www.cnri.reston.va.us)
-in Reston, Virginia where he released several versions of the
-software.
-
-In May 2000, Guido and the Python core development team moved to
-BeOpen.com to form the BeOpen PythonLabs team.  In October of the same
-year, the PythonLabs team moved to Digital Creations (now Zope
-Corporation, see http://www.zope.com).  In 2001, the Python Software
-Foundation (PSF, see http://www.python.org/psf/) was formed, a
-non-profit organization created specifically to own Python-related
-Intellectual Property.  Zope Corporation is a sponsoring member of
-the PSF.
-
-All Python releases are Open Source (see http://www.opensource.org for
-the Open Source Definition).  Historically, most, but not all, Python
-releases have also been GPL-compatible; the table below summarizes
-the various releases.
-
-    Release         Derived     Year        Owner       GPL-
-                    from                                compatible? (1)
-
-    0.9.0 thru 1.2              1991-1995   CWI         yes
-    1.3 thru 1.5.2  1.2         1995-1999   CNRI        yes
-    1.6             1.5.2       2000        CNRI        no
-    2.0             1.6         2000        BeOpen.com  no
-    1.6.1           1.6         2001        CNRI        yes (2)
-    2.1             2.0+1.6.1   2001        PSF         no
-    2.0.1           2.0+1.6.1   2001        PSF         yes
-    2.1.1           2.1+2.0.1   2001        PSF         yes
-    2.1.2           2.1.1       2002        PSF         yes
-    2.1.3           2.1.2       2002        PSF         yes
-    2.2 and above   2.1.1       2001-now    PSF         yes
-
-Footnotes:
-
-(1) GPL-compatible doesn't mean that we're distributing Python under
-    the GPL.  All Python licenses, unlike the GPL, let you distribute
-    a modified version without making your changes open source.  The
-    GPL-compatible licenses make it possible to combine Python with
-    other software that is released under the GPL; the others don't.
-
-(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
-    because its license has a choice of law clause.  According to
-    CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
-    is "not incompatible" with the GPL.
-
-Thanks to the many outside volunteers who have worked under Guido's
-direction to make these releases possible.
-
-
-B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
-===============================================================
-
-PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
---------------------------------------------
-
-1. This LICENSE AGREEMENT is between the Python Software Foundation
-("PSF"), and the Individual or Organization ("Licensee") accessing and
-otherwise using this software ("Python") in source or binary form and
-its associated documentation.
-
-2. Subject to the terms and conditions of this License Agreement, PSF hereby
-grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
-analyze, test, perform and/or display publicly, prepare derivative works,
-distribute, and otherwise use Python alone or in any derivative version,
-provided, however, that PSF's License Agreement and PSF's notice of copyright,
-i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
-2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved" are
-retained in Python alone or in any derivative version prepared by Licensee.
-
-3. In the event Licensee prepares a derivative work that is based on
-or incorporates Python or any part thereof, and wants to make
-the derivative work available to others as provided herein, then
-Licensee hereby agrees to include in any such work a brief summary of
-the changes made to Python.
-
-4. PSF is making Python available to Licensee on an "AS IS"
-basis.  PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
-IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
-DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
-FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
-INFRINGE ANY THIRD PARTY RIGHTS.
-
-5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
-FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
-A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
-OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
-
-6. This License Agreement will automatically terminate upon a material
-breach of its terms and conditions.
-
-7. Nothing in this License Agreement shall be deemed to create any
-relationship of agency, partnership, or joint venture between PSF and
-Licensee.  This License Agreement does not grant permission to use PSF
-trademarks or trade name in a trademark sense to endorse or promote
-products or services of Licensee, or any third party.
-
-8. By copying, installing or otherwise using Python, Licensee
-agrees to be bound by the terms and conditions of this License
-Agreement.
-
-
-BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
--------------------------------------------
-
-BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
-
-1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
-office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
-Individual or Organization ("Licensee") accessing and otherwise using
-this software in source or binary form and its associated
-documentation ("the Software").
-
-2. Subject to the terms and conditions of this BeOpen Python License
-Agreement, BeOpen hereby grants Licensee a non-exclusive,
-royalty-free, world-wide license to reproduce, analyze, test, perform
-and/or display publicly, prepare derivative works, distribute, and
-otherwise use the Software alone or in any derivative version,
-provided, however, that the BeOpen Python License is retained in the
-Software, alone or in any derivative version prepared by Licensee.
-
-3. BeOpen is making the Software available to Licensee on an "AS IS"
-basis.  BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
-IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
-DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
-FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
-INFRINGE ANY THIRD PARTY RIGHTS.
-
-4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
-SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
-AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
-DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
-
-5. This License Agreement will automatically terminate upon a material
-breach of its terms and conditions.
-
-6. This License Agreement shall be governed by and interpreted in all
-respects by the law of the State of California, excluding conflict of
-law provisions.  Nothing in this License Agreement shall be deemed to
-create any relationship of agency, partnership, or joint venture
-between BeOpen and Licensee.  This License Agreement does not grant
-permission to use BeOpen trademarks or trade names in a trademark
-sense to endorse or promote products or services of Licensee, or any
-third party.  As an exception, the "BeOpen Python" logos available at
-http://www.pythonlabs.com/logos.html may be used according to the
-permissions granted on that web page.
-
-7. By copying, installing or otherwise using the software, Licensee
-agrees to be bound by the terms and conditions of this License
-Agreement.
-
-
-CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
----------------------------------------
-
-1. This LICENSE AGREEMENT is between the Corporation for National
-Research Initiatives, having an office at 1895 Preston White Drive,
-Reston, VA 20191 ("CNRI"), and the Individual or Organization
-("Licensee") accessing and otherwise using Python 1.6.1 software in
-source or binary form and its associated documentation.
-
-2. Subject to the terms and conditions of this License Agreement, CNRI
-hereby grants Licensee a nonexclusive, royalty-free, world-wide
-license to reproduce, analyze, test, perform and/or display publicly,
-prepare derivative works, distribute, and otherwise use Python 1.6.1
-alone or in any derivative version, provided, however, that CNRI's
-License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
-1995-2001 Corporation for National Research Initiatives; All Rights
-Reserved" are retained in Python 1.6.1 alone or in any derivative
-version prepared by Licensee.  Alternately, in lieu of CNRI's License
-Agreement, Licensee may substitute the following text (omitting the
-quotes): "Python 1.6.1 is made available subject to the terms and
-conditions in CNRI's License Agreement.  This Agreement together with
-Python 1.6.1 may be located on the Internet using the following
-unique, persistent identifier (known as a handle): 1895.22/1013.  This
-Agreement may also be obtained from a proxy server on the Internet
-using the following URL: http://hdl.handle.net/1895.22/1013".
-
-3. In the event Licensee prepares a derivative work that is based on
-or incorporates Python 1.6.1 or any part thereof, and wants to make
-the derivative work available to others as provided herein, then
-Licensee hereby agrees to include in any such work a brief summary of
-the changes made to Python 1.6.1.
-
-4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
-basis.  CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
-IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
-DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
-FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
-INFRINGE ANY THIRD PARTY RIGHTS.
-
-5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
-1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
-A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
-OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
-
-6. This License Agreement will automatically terminate upon a material
-breach of its terms and conditions.
-
-7. This License Agreement shall be governed by the federal
-intellectual property law of the United States, including without
-limitation the federal copyright law, and, to the extent such
-U.S. federal law does not apply, by the law of the Commonwealth of
-Virginia, excluding Virginia's conflict of law provisions.
-Notwithstanding the foregoing, with regard to derivative works based
-on Python 1.6.1 that incorporate non-separable material that was
-previously distributed under the GNU General Public License (GPL), the
-law of the Commonwealth of Virginia shall govern this License
-Agreement only as to issues arising under or with respect to
-Paragraphs 4, 5, and 7 of this License Agreement.  Nothing in this
-License Agreement shall be deemed to create any relationship of
-agency, partnership, or joint venture between CNRI and Licensee.  This
-License Agreement does not grant permission to use CNRI trademarks or
-trade name in a trademark sense to endorse or promote products or
-services of Licensee, or any third party.
-
-8. By clicking on the "ACCEPT" button where indicated, or by copying,
-installing or otherwise using Python 1.6.1, Licensee agrees to be
-bound by the terms and conditions of this License Agreement.
-
-        ACCEPT
-
-
-CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
---------------------------------------------------
-
-Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
-The Netherlands.  All rights reserved.
-
-Permission to use, copy, modify, and distribute this software and its
-documentation for any purpose and without fee is hereby granted,
-provided that the above copyright notice appear in all copies and that
-both that copyright notice and this permission notice appear in
-supporting documentation, and that the name of Stichting Mathematisch
-Centrum or CWI not be used in advertising or publicity pertaining to
-distribution of the software without specific, written prior
-permission.
-
-STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
-THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
-FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
-FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
-OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/setuptools/_vendor/typing_extensions-4.0.1.dist-info/METADATA b/setuptools/_vendor/typing_extensions-4.0.1.dist-info/METADATA
deleted file mode 100644
index fe10dfd02a..0000000000
--- a/setuptools/_vendor/typing_extensions-4.0.1.dist-info/METADATA
+++ /dev/null
@@ -1,35 +0,0 @@
-Metadata-Version: 2.1
-Name: typing_extensions
-Version: 4.0.1
-Summary: Backported and Experimental Type Hints for Python 3.6+
-Keywords: annotations,backport,checker,checking,function,hinting,hints,type,typechecking,typehinting,typehints,typing
-Author-email: "Guido van Rossum, Jukka Lehtosalo, Łukasz Langa, Michael Lee" 
-Requires-Python: >=3.6
-Description-Content-Type: text/x-rst
-Classifier: Development Status :: 3 - Alpha
-Classifier: Environment :: Console
-Classifier: Intended Audience :: Developers
-Classifier: License :: OSI Approved :: Python Software Foundation License
-Classifier: Operating System :: OS Independent
-Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3 :: Only
-Classifier: Programming Language :: Python :: 3.6
-Classifier: Programming Language :: Python :: 3.7
-Classifier: Programming Language :: Python :: 3.8
-Classifier: Programming Language :: Python :: 3.9
-Classifier: Programming Language :: Python :: 3.10
-Classifier: Topic :: Software Development
-Project-URL: Home, https://github.com/python/typing/blob/master/typing_extensions/README.rst
-
-Typing Extensions -- Backported and Experimental Type Hints for Python
-
-The ``typing`` module was added to the standard library in Python 3.5, but
-many new features have been added to the module since then.
-This means users of older Python versions who are unable to upgrade will not be
-able to take advantage of new types added to the ``typing`` module, such as
-``typing.Protocol`` or ``typing.TypedDict``.
-
-The ``typing_extensions`` module contains backports of these changes.
-Experimental types that may eventually be added to the ``typing``
-module are also included in ``typing_extensions``.
-
diff --git a/setuptools/_vendor/typing_extensions-4.0.1.dist-info/RECORD b/setuptools/_vendor/typing_extensions-4.0.1.dist-info/RECORD
deleted file mode 100644
index efc5f26cf3..0000000000
--- a/setuptools/_vendor/typing_extensions-4.0.1.dist-info/RECORD
+++ /dev/null
@@ -1,8 +0,0 @@
-__pycache__/typing_extensions.cpython-312.pyc,,
-typing_extensions-4.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-typing_extensions-4.0.1.dist-info/LICENSE,sha256=_xfOlOECAk3raHc-scx0ynbaTmWPNzUx8Kwi1oprsa0,12755
-typing_extensions-4.0.1.dist-info/METADATA,sha256=iZ_5HONZZBXtF4kroz-IPZYIl9M8IE1B00R82dWcBqE,1736
-typing_extensions-4.0.1.dist-info/RECORD,,
-typing_extensions-4.0.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-typing_extensions-4.0.1.dist-info/WHEEL,sha256=LVOPL_YDMEiGvRLgDK1hLkfhFCnTcxcAYZJtpNFses0,81
-typing_extensions.py,sha256=1uqi_RSlI7gos4eJB_NEV3d5wQwzTUQHd3_jrkbTo8Q,87149
diff --git a/setuptools/_vendor/typing_extensions-4.0.1.dist-info/REQUESTED b/setuptools/_vendor/typing_extensions-4.0.1.dist-info/REQUESTED
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/setuptools/_vendor/typing_extensions-4.0.1.dist-info/WHEEL b/setuptools/_vendor/typing_extensions-4.0.1.dist-info/WHEEL
deleted file mode 100644
index 884ceb565c..0000000000
--- a/setuptools/_vendor/typing_extensions-4.0.1.dist-info/WHEEL
+++ /dev/null
@@ -1,4 +0,0 @@
-Wheel-Version: 1.0
-Generator: flit 3.5.1
-Root-Is-Purelib: true
-Tag: py3-none-any
diff --git a/setuptools/_vendor/typing_extensions.py b/setuptools/_vendor/typing_extensions.py
deleted file mode 100644
index 9f1c7aa31e..0000000000
--- a/setuptools/_vendor/typing_extensions.py
+++ /dev/null
@@ -1,2296 +0,0 @@
-import abc
-import collections
-import collections.abc
-import operator
-import sys
-import typing
-
-# After PEP 560, internal typing API was substantially reworked.
-# This is especially important for Protocol class which uses internal APIs
-# quite extensively.
-PEP_560 = sys.version_info[:3] >= (3, 7, 0)
-
-if PEP_560:
-    GenericMeta = type
-else:
-    # 3.6
-    from typing import GenericMeta, _type_vars  # noqa
-
-# The two functions below are copies of typing internal helpers.
-# They are needed by _ProtocolMeta
-
-
-def _no_slots_copy(dct):
-    dict_copy = dict(dct)
-    if '__slots__' in dict_copy:
-        for slot in dict_copy['__slots__']:
-            dict_copy.pop(slot, None)
-    return dict_copy
-
-
-def _check_generic(cls, parameters):
-    if not cls.__parameters__:
-        raise TypeError(f"{cls} is not a generic class")
-    alen = len(parameters)
-    elen = len(cls.__parameters__)
-    if alen != elen:
-        raise TypeError(f"Too {'many' if alen > elen else 'few'} arguments for {cls};"
-                        f" actual {alen}, expected {elen}")
-
-
-# Please keep __all__ alphabetized within each category.
-__all__ = [
-    # Super-special typing primitives.
-    'ClassVar',
-    'Concatenate',
-    'Final',
-    'ParamSpec',
-    'Self',
-    'Type',
-
-    # ABCs (from collections.abc).
-    'Awaitable',
-    'AsyncIterator',
-    'AsyncIterable',
-    'Coroutine',
-    'AsyncGenerator',
-    'AsyncContextManager',
-    'ChainMap',
-
-    # Concrete collection types.
-    'ContextManager',
-    'Counter',
-    'Deque',
-    'DefaultDict',
-    'OrderedDict',
-    'TypedDict',
-
-    # Structural checks, a.k.a. protocols.
-    'SupportsIndex',
-
-    # One-off things.
-    'Annotated',
-    'final',
-    'IntVar',
-    'Literal',
-    'NewType',
-    'overload',
-    'Protocol',
-    'runtime',
-    'runtime_checkable',
-    'Text',
-    'TypeAlias',
-    'TypeGuard',
-    'TYPE_CHECKING',
-]
-
-if PEP_560:
-    __all__.extend(["get_args", "get_origin", "get_type_hints"])
-
-# 3.6.2+
-if hasattr(typing, 'NoReturn'):
-    NoReturn = typing.NoReturn
-# 3.6.0-3.6.1
-else:
-    class _NoReturn(typing._FinalTypingBase, _root=True):
-        """Special type indicating functions that never return.
-        Example::
-
-          from typing import NoReturn
-
-          def stop() -> NoReturn:
-              raise Exception('no way')
-
-        This type is invalid in other positions, e.g., ``List[NoReturn]``
-        will fail in static type checkers.
-        """
-        __slots__ = ()
-
-        def __instancecheck__(self, obj):
-            raise TypeError("NoReturn cannot be used with isinstance().")
-
-        def __subclasscheck__(self, cls):
-            raise TypeError("NoReturn cannot be used with issubclass().")
-
-    NoReturn = _NoReturn(_root=True)
-
-# Some unconstrained type variables.  These are used by the container types.
-# (These are not for export.)
-T = typing.TypeVar('T')  # Any type.
-KT = typing.TypeVar('KT')  # Key type.
-VT = typing.TypeVar('VT')  # Value type.
-T_co = typing.TypeVar('T_co', covariant=True)  # Any type covariant containers.
-T_contra = typing.TypeVar('T_contra', contravariant=True)  # Ditto contravariant.
-
-ClassVar = typing.ClassVar
-
-# On older versions of typing there is an internal class named "Final".
-# 3.8+
-if hasattr(typing, 'Final') and sys.version_info[:2] >= (3, 7):
-    Final = typing.Final
-# 3.7
-elif sys.version_info[:2] >= (3, 7):
-    class _FinalForm(typing._SpecialForm, _root=True):
-
-        def __repr__(self):
-            return 'typing_extensions.' + self._name
-
-        def __getitem__(self, parameters):
-            item = typing._type_check(parameters,
-                                      f'{self._name} accepts only single type')
-            return typing._GenericAlias(self, (item,))
-
-    Final = _FinalForm('Final',
-                       doc="""A special typing construct to indicate that a name
-                       cannot be re-assigned or overridden in a subclass.
-                       For example:
-
-                           MAX_SIZE: Final = 9000
-                           MAX_SIZE += 1  # Error reported by type checker
-
-                           class Connection:
-                               TIMEOUT: Final[int] = 10
-                           class FastConnector(Connection):
-                               TIMEOUT = 1  # Error reported by type checker
-
-                       There is no runtime checking of these properties.""")
-# 3.6
-else:
-    class _Final(typing._FinalTypingBase, _root=True):
-        """A special typing construct to indicate that a name
-        cannot be re-assigned or overridden in a subclass.
-        For example:
-
-            MAX_SIZE: Final = 9000
-            MAX_SIZE += 1  # Error reported by type checker
-
-            class Connection:
-                TIMEOUT: Final[int] = 10
-            class FastConnector(Connection):
-                TIMEOUT = 1  # Error reported by type checker
-
-        There is no runtime checking of these properties.
-        """
-
-        __slots__ = ('__type__',)
-
-        def __init__(self, tp=None, **kwds):
-            self.__type__ = tp
-
-        def __getitem__(self, item):
-            cls = type(self)
-            if self.__type__ is None:
-                return cls(typing._type_check(item,
-                           f'{cls.__name__[1:]} accepts only single type.'),
-                           _root=True)
-            raise TypeError(f'{cls.__name__[1:]} cannot be further subscripted')
-
-        def _eval_type(self, globalns, localns):
-            new_tp = typing._eval_type(self.__type__, globalns, localns)
-            if new_tp == self.__type__:
-                return self
-            return type(self)(new_tp, _root=True)
-
-        def __repr__(self):
-            r = super().__repr__()
-            if self.__type__ is not None:
-                r += f'[{typing._type_repr(self.__type__)}]'
-            return r
-
-        def __hash__(self):
-            return hash((type(self).__name__, self.__type__))
-
-        def __eq__(self, other):
-            if not isinstance(other, _Final):
-                return NotImplemented
-            if self.__type__ is not None:
-                return self.__type__ == other.__type__
-            return self is other
-
-    Final = _Final(_root=True)
-
-
-# 3.8+
-if hasattr(typing, 'final'):
-    final = typing.final
-# 3.6-3.7
-else:
-    def final(f):
-        """This decorator can be used to indicate to type checkers that
-        the decorated method cannot be overridden, and decorated class
-        cannot be subclassed. For example:
-
-            class Base:
-                @final
-                def done(self) -> None:
-                    ...
-            class Sub(Base):
-                def done(self) -> None:  # Error reported by type checker
-                    ...
-            @final
-            class Leaf:
-                ...
-            class Other(Leaf):  # Error reported by type checker
-                ...
-
-        There is no runtime checking of these properties.
-        """
-        return f
-
-
-def IntVar(name):
-    return typing.TypeVar(name)
-
-
-# 3.8+:
-if hasattr(typing, 'Literal'):
-    Literal = typing.Literal
-# 3.7:
-elif sys.version_info[:2] >= (3, 7):
-    class _LiteralForm(typing._SpecialForm, _root=True):
-
-        def __repr__(self):
-            return 'typing_extensions.' + self._name
-
-        def __getitem__(self, parameters):
-            return typing._GenericAlias(self, parameters)
-
-    Literal = _LiteralForm('Literal',
-                           doc="""A type that can be used to indicate to type checkers
-                           that the corresponding value has a value literally equivalent
-                           to the provided parameter. For example:
-
-                               var: Literal[4] = 4
-
-                           The type checker understands that 'var' is literally equal to
-                           the value 4 and no other value.
-
-                           Literal[...] cannot be subclassed. There is no runtime
-                           checking verifying that the parameter is actually a value
-                           instead of a type.""")
-# 3.6:
-else:
-    class _Literal(typing._FinalTypingBase, _root=True):
-        """A type that can be used to indicate to type checkers that the
-        corresponding value has a value literally equivalent to the
-        provided parameter. For example:
-
-            var: Literal[4] = 4
-
-        The type checker understands that 'var' is literally equal to the
-        value 4 and no other value.
-
-        Literal[...] cannot be subclassed. There is no runtime checking
-        verifying that the parameter is actually a value instead of a type.
-        """
-
-        __slots__ = ('__values__',)
-
-        def __init__(self, values=None, **kwds):
-            self.__values__ = values
-
-        def __getitem__(self, values):
-            cls = type(self)
-            if self.__values__ is None:
-                if not isinstance(values, tuple):
-                    values = (values,)
-                return cls(values, _root=True)
-            raise TypeError(f'{cls.__name__[1:]} cannot be further subscripted')
-
-        def _eval_type(self, globalns, localns):
-            return self
-
-        def __repr__(self):
-            r = super().__repr__()
-            if self.__values__ is not None:
-                r += f'[{", ".join(map(typing._type_repr, self.__values__))}]'
-            return r
-
-        def __hash__(self):
-            return hash((type(self).__name__, self.__values__))
-
-        def __eq__(self, other):
-            if not isinstance(other, _Literal):
-                return NotImplemented
-            if self.__values__ is not None:
-                return self.__values__ == other.__values__
-            return self is other
-
-    Literal = _Literal(_root=True)
-
-
-_overload_dummy = typing._overload_dummy  # noqa
-overload = typing.overload
-
-
-# This is not a real generic class.  Don't use outside annotations.
-Type = typing.Type
-
-# Various ABCs mimicking those in collections.abc.
-# A few are simply re-exported for completeness.
-
-
-class _ExtensionsGenericMeta(GenericMeta):
-    def __subclasscheck__(self, subclass):
-        """This mimics a more modern GenericMeta.__subclasscheck__() logic
-        (that does not have problems with recursion) to work around interactions
-        between collections, typing, and typing_extensions on older
-        versions of Python, see https://github.com/python/typing/issues/501.
-        """
-        if self.__origin__ is not None:
-            if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools']:
-                raise TypeError("Parameterized generics cannot be used with class "
-                                "or instance checks")
-            return False
-        if not self.__extra__:
-            return super().__subclasscheck__(subclass)
-        res = self.__extra__.__subclasshook__(subclass)
-        if res is not NotImplemented:
-            return res
-        if self.__extra__ in subclass.__mro__:
-            return True
-        for scls in self.__extra__.__subclasses__():
-            if isinstance(scls, GenericMeta):
-                continue
-            if issubclass(subclass, scls):
-                return True
-        return False
-
-
-Awaitable = typing.Awaitable
-Coroutine = typing.Coroutine
-AsyncIterable = typing.AsyncIterable
-AsyncIterator = typing.AsyncIterator
-
-# 3.6.1+
-if hasattr(typing, 'Deque'):
-    Deque = typing.Deque
-# 3.6.0
-else:
-    class Deque(collections.deque, typing.MutableSequence[T],
-                metaclass=_ExtensionsGenericMeta,
-                extra=collections.deque):
-        __slots__ = ()
-
-        def __new__(cls, *args, **kwds):
-            if cls._gorg is Deque:
-                return collections.deque(*args, **kwds)
-            return typing._generic_new(collections.deque, cls, *args, **kwds)
-
-ContextManager = typing.ContextManager
-# 3.6.2+
-if hasattr(typing, 'AsyncContextManager'):
-    AsyncContextManager = typing.AsyncContextManager
-# 3.6.0-3.6.1
-else:
-    from _collections_abc import _check_methods as _check_methods_in_mro  # noqa
-
-    class AsyncContextManager(typing.Generic[T_co]):
-        __slots__ = ()
-
-        async def __aenter__(self):
-            return self
-
-        @abc.abstractmethod
-        async def __aexit__(self, exc_type, exc_value, traceback):
-            return None
-
-        @classmethod
-        def __subclasshook__(cls, C):
-            if cls is AsyncContextManager:
-                return _check_methods_in_mro(C, "__aenter__", "__aexit__")
-            return NotImplemented
-
-DefaultDict = typing.DefaultDict
-
-# 3.7.2+
-if hasattr(typing, 'OrderedDict'):
-    OrderedDict = typing.OrderedDict
-# 3.7.0-3.7.2
-elif (3, 7, 0) <= sys.version_info[:3] < (3, 7, 2):
-    OrderedDict = typing._alias(collections.OrderedDict, (KT, VT))
-# 3.6
-else:
-    class OrderedDict(collections.OrderedDict, typing.MutableMapping[KT, VT],
-                      metaclass=_ExtensionsGenericMeta,
-                      extra=collections.OrderedDict):
-
-        __slots__ = ()
-
-        def __new__(cls, *args, **kwds):
-            if cls._gorg is OrderedDict:
-                return collections.OrderedDict(*args, **kwds)
-            return typing._generic_new(collections.OrderedDict, cls, *args, **kwds)
-
-# 3.6.2+
-if hasattr(typing, 'Counter'):
-    Counter = typing.Counter
-# 3.6.0-3.6.1
-else:
-    class Counter(collections.Counter,
-                  typing.Dict[T, int],
-                  metaclass=_ExtensionsGenericMeta, extra=collections.Counter):
-
-        __slots__ = ()
-
-        def __new__(cls, *args, **kwds):
-            if cls._gorg is Counter:
-                return collections.Counter(*args, **kwds)
-            return typing._generic_new(collections.Counter, cls, *args, **kwds)
-
-# 3.6.1+
-if hasattr(typing, 'ChainMap'):
-    ChainMap = typing.ChainMap
-elif hasattr(collections, 'ChainMap'):
-    class ChainMap(collections.ChainMap, typing.MutableMapping[KT, VT],
-                   metaclass=_ExtensionsGenericMeta,
-                   extra=collections.ChainMap):
-
-        __slots__ = ()
-
-        def __new__(cls, *args, **kwds):
-            if cls._gorg is ChainMap:
-                return collections.ChainMap(*args, **kwds)
-            return typing._generic_new(collections.ChainMap, cls, *args, **kwds)
-
-# 3.6.1+
-if hasattr(typing, 'AsyncGenerator'):
-    AsyncGenerator = typing.AsyncGenerator
-# 3.6.0
-else:
-    class AsyncGenerator(AsyncIterator[T_co], typing.Generic[T_co, T_contra],
-                         metaclass=_ExtensionsGenericMeta,
-                         extra=collections.abc.AsyncGenerator):
-        __slots__ = ()
-
-NewType = typing.NewType
-Text = typing.Text
-TYPE_CHECKING = typing.TYPE_CHECKING
-
-
-def _gorg(cls):
-    """This function exists for compatibility with old typing versions."""
-    assert isinstance(cls, GenericMeta)
-    if hasattr(cls, '_gorg'):
-        return cls._gorg
-    while cls.__origin__ is not None:
-        cls = cls.__origin__
-    return cls
-
-
-_PROTO_WHITELIST = ['Callable', 'Awaitable',
-                    'Iterable', 'Iterator', 'AsyncIterable', 'AsyncIterator',
-                    'Hashable', 'Sized', 'Container', 'Collection', 'Reversible',
-                    'ContextManager', 'AsyncContextManager']
-
-
-def _get_protocol_attrs(cls):
-    attrs = set()
-    for base in cls.__mro__[:-1]:  # without object
-        if base.__name__ in ('Protocol', 'Generic'):
-            continue
-        annotations = getattr(base, '__annotations__', {})
-        for attr in list(base.__dict__.keys()) + list(annotations.keys()):
-            if (not attr.startswith('_abc_') and attr not in (
-                    '__abstractmethods__', '__annotations__', '__weakref__',
-                    '_is_protocol', '_is_runtime_protocol', '__dict__',
-                    '__args__', '__slots__',
-                    '__next_in_mro__', '__parameters__', '__origin__',
-                    '__orig_bases__', '__extra__', '__tree_hash__',
-                    '__doc__', '__subclasshook__', '__init__', '__new__',
-                    '__module__', '_MutableMapping__marker', '_gorg')):
-                attrs.add(attr)
-    return attrs
-
-
-def _is_callable_members_only(cls):
-    return all(callable(getattr(cls, attr, None)) for attr in _get_protocol_attrs(cls))
-
-
-# 3.8+
-if hasattr(typing, 'Protocol'):
-    Protocol = typing.Protocol
-# 3.7
-elif PEP_560:
-    from typing import _collect_type_vars  # noqa
-
-    def _no_init(self, *args, **kwargs):
-        if type(self)._is_protocol:
-            raise TypeError('Protocols cannot be instantiated')
-
-    class _ProtocolMeta(abc.ABCMeta):
-        # This metaclass is a bit unfortunate and exists only because of the lack
-        # of __instancehook__.
-        def __instancecheck__(cls, instance):
-            # We need this method for situations where attributes are
-            # assigned in __init__.
-            if ((not getattr(cls, '_is_protocol', False) or
-                 _is_callable_members_only(cls)) and
-                    issubclass(instance.__class__, cls)):
-                return True
-            if cls._is_protocol:
-                if all(hasattr(instance, attr) and
-                       (not callable(getattr(cls, attr, None)) or
-                        getattr(instance, attr) is not None)
-                       for attr in _get_protocol_attrs(cls)):
-                    return True
-            return super().__instancecheck__(instance)
-
-    class Protocol(metaclass=_ProtocolMeta):
-        # There is quite a lot of overlapping code with typing.Generic.
-        # Unfortunately it is hard to avoid this while these live in two different
-        # modules. The duplicated code will be removed when Protocol is moved to typing.
-        """Base class for protocol classes. Protocol classes are defined as::
-
-            class Proto(Protocol):
-                def meth(self) -> int:
-                    ...
-
-        Such classes are primarily used with static type checkers that recognize
-        structural subtyping (static duck-typing), for example::
-
-            class C:
-                def meth(self) -> int:
-                    return 0
-
-            def func(x: Proto) -> int:
-                return x.meth()
-
-            func(C())  # Passes static type check
-
-        See PEP 544 for details. Protocol classes decorated with
-        @typing_extensions.runtime act as simple-minded runtime protocol that checks
-        only the presence of given attributes, ignoring their type signatures.
-
-        Protocol classes can be generic, they are defined as::
-
-            class GenProto(Protocol[T]):
-                def meth(self) -> T:
-                    ...
-        """
-        __slots__ = ()
-        _is_protocol = True
-
-        def __new__(cls, *args, **kwds):
-            if cls is Protocol:
-                raise TypeError("Type Protocol cannot be instantiated; "
-                                "it can only be used as a base class")
-            return super().__new__(cls)
-
-        @typing._tp_cache
-        def __class_getitem__(cls, params):
-            if not isinstance(params, tuple):
-                params = (params,)
-            if not params and cls is not typing.Tuple:
-                raise TypeError(
-                    f"Parameter list to {cls.__qualname__}[...] cannot be empty")
-            msg = "Parameters to generic types must be types."
-            params = tuple(typing._type_check(p, msg) for p in params)  # noqa
-            if cls is Protocol:
-                # Generic can only be subscripted with unique type variables.
-                if not all(isinstance(p, typing.TypeVar) for p in params):
-                    i = 0
-                    while isinstance(params[i], typing.TypeVar):
-                        i += 1
-                    raise TypeError(
-                        "Parameters to Protocol[...] must all be type variables."
-                        f" Parameter {i + 1} is {params[i]}")
-                if len(set(params)) != len(params):
-                    raise TypeError(
-                        "Parameters to Protocol[...] must all be unique")
-            else:
-                # Subscripting a regular Generic subclass.
-                _check_generic(cls, params)
-            return typing._GenericAlias(cls, params)
-
-        def __init_subclass__(cls, *args, **kwargs):
-            tvars = []
-            if '__orig_bases__' in cls.__dict__:
-                error = typing.Generic in cls.__orig_bases__
-            else:
-                error = typing.Generic in cls.__bases__
-            if error:
-                raise TypeError("Cannot inherit from plain Generic")
-            if '__orig_bases__' in cls.__dict__:
-                tvars = _collect_type_vars(cls.__orig_bases__)
-                # Look for Generic[T1, ..., Tn] or Protocol[T1, ..., Tn].
-                # If found, tvars must be a subset of it.
-                # If not found, tvars is it.
-                # Also check for and reject plain Generic,
-                # and reject multiple Generic[...] and/or Protocol[...].
-                gvars = None
-                for base in cls.__orig_bases__:
-                    if (isinstance(base, typing._GenericAlias) and
-                            base.__origin__ in (typing.Generic, Protocol)):
-                        # for error messages
-                        the_base = base.__origin__.__name__
-                        if gvars is not None:
-                            raise TypeError(
-                                "Cannot inherit from Generic[...]"
-                                " and/or Protocol[...] multiple types.")
-                        gvars = base.__parameters__
-                if gvars is None:
-                    gvars = tvars
-                else:
-                    tvarset = set(tvars)
-                    gvarset = set(gvars)
-                    if not tvarset <= gvarset:
-                        s_vars = ', '.join(str(t) for t in tvars if t not in gvarset)
-                        s_args = ', '.join(str(g) for g in gvars)
-                        raise TypeError(f"Some type variables ({s_vars}) are"
-                                        f" not listed in {the_base}[{s_args}]")
-                    tvars = gvars
-            cls.__parameters__ = tuple(tvars)
-
-            # Determine if this is a protocol or a concrete subclass.
-            if not cls.__dict__.get('_is_protocol', None):
-                cls._is_protocol = any(b is Protocol for b in cls.__bases__)
-
-            # Set (or override) the protocol subclass hook.
-            def _proto_hook(other):
-                if not cls.__dict__.get('_is_protocol', None):
-                    return NotImplemented
-                if not getattr(cls, '_is_runtime_protocol', False):
-                    if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']:
-                        return NotImplemented
-                    raise TypeError("Instance and class checks can only be used with"
-                                    " @runtime protocols")
-                if not _is_callable_members_only(cls):
-                    if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']:
-                        return NotImplemented
-                    raise TypeError("Protocols with non-method members"
-                                    " don't support issubclass()")
-                if not isinstance(other, type):
-                    # Same error as for issubclass(1, int)
-                    raise TypeError('issubclass() arg 1 must be a class')
-                for attr in _get_protocol_attrs(cls):
-                    for base in other.__mro__:
-                        if attr in base.__dict__:
-                            if base.__dict__[attr] is None:
-                                return NotImplemented
-                            break
-                        annotations = getattr(base, '__annotations__', {})
-                        if (isinstance(annotations, typing.Mapping) and
-                                attr in annotations and
-                                isinstance(other, _ProtocolMeta) and
-                                other._is_protocol):
-                            break
-                    else:
-                        return NotImplemented
-                return True
-            if '__subclasshook__' not in cls.__dict__:
-                cls.__subclasshook__ = _proto_hook
-
-            # We have nothing more to do for non-protocols.
-            if not cls._is_protocol:
-                return
-
-            # Check consistency of bases.
-            for base in cls.__bases__:
-                if not (base in (object, typing.Generic) or
-                        base.__module__ == 'collections.abc' and
-                        base.__name__ in _PROTO_WHITELIST or
-                        isinstance(base, _ProtocolMeta) and base._is_protocol):
-                    raise TypeError('Protocols can only inherit from other'
-                                    f' protocols, got {repr(base)}')
-            cls.__init__ = _no_init
-# 3.6
-else:
-    from typing import _next_in_mro, _type_check  # noqa
-
-    def _no_init(self, *args, **kwargs):
-        if type(self)._is_protocol:
-            raise TypeError('Protocols cannot be instantiated')
-
-    class _ProtocolMeta(GenericMeta):
-        """Internal metaclass for Protocol.
-
-        This exists so Protocol classes can be generic without deriving
-        from Generic.
-        """
-        def __new__(cls, name, bases, namespace,
-                    tvars=None, args=None, origin=None, extra=None, orig_bases=None):
-            # This is just a version copied from GenericMeta.__new__ that
-            # includes "Protocol" special treatment. (Comments removed for brevity.)
-            assert extra is None  # Protocols should not have extra
-            if tvars is not None:
-                assert origin is not None
-                assert all(isinstance(t, typing.TypeVar) for t in tvars), tvars
-            else:
-                tvars = _type_vars(bases)
-                gvars = None
-                for base in bases:
-                    if base is typing.Generic:
-                        raise TypeError("Cannot inherit from plain Generic")
-                    if (isinstance(base, GenericMeta) and
-                            base.__origin__ in (typing.Generic, Protocol)):
-                        if gvars is not None:
-                            raise TypeError(
-                                "Cannot inherit from Generic[...] or"
-                                " Protocol[...] multiple times.")
-                        gvars = base.__parameters__
-                if gvars is None:
-                    gvars = tvars
-                else:
-                    tvarset = set(tvars)
-                    gvarset = set(gvars)
-                    if not tvarset <= gvarset:
-                        s_vars = ", ".join(str(t) for t in tvars if t not in gvarset)
-                        s_args = ", ".join(str(g) for g in gvars)
-                        cls_name = "Generic" if any(b.__origin__ is typing.Generic
-                                                    for b in bases) else "Protocol"
-                        raise TypeError(f"Some type variables ({s_vars}) are"
-                                        f" not listed in {cls_name}[{s_args}]")
-                    tvars = gvars
-
-            initial_bases = bases
-            if (extra is not None and type(extra) is abc.ABCMeta and
-                    extra not in bases):
-                bases = (extra,) + bases
-            bases = tuple(_gorg(b) if isinstance(b, GenericMeta) else b
-                          for b in bases)
-            if any(isinstance(b, GenericMeta) and b is not typing.Generic for b in bases):
-                bases = tuple(b for b in bases if b is not typing.Generic)
-            namespace.update({'__origin__': origin, '__extra__': extra})
-            self = super(GenericMeta, cls).__new__(cls, name, bases, namespace,
-                                                   _root=True)
-            super(GenericMeta, self).__setattr__('_gorg',
-                                                 self if not origin else
-                                                 _gorg(origin))
-            self.__parameters__ = tvars
-            self.__args__ = tuple(... if a is typing._TypingEllipsis else
-                                  () if a is typing._TypingEmpty else
-                                  a for a in args) if args else None
-            self.__next_in_mro__ = _next_in_mro(self)
-            if orig_bases is None:
-                self.__orig_bases__ = initial_bases
-            elif origin is not None:
-                self._abc_registry = origin._abc_registry
-                self._abc_cache = origin._abc_cache
-            if hasattr(self, '_subs_tree'):
-                self.__tree_hash__ = (hash(self._subs_tree()) if origin else
-                                      super(GenericMeta, self).__hash__())
-            return self
-
-        def __init__(cls, *args, **kwargs):
-            super().__init__(*args, **kwargs)
-            if not cls.__dict__.get('_is_protocol', None):
-                cls._is_protocol = any(b is Protocol or
-                                       isinstance(b, _ProtocolMeta) and
-                                       b.__origin__ is Protocol
-                                       for b in cls.__bases__)
-            if cls._is_protocol:
-                for base in cls.__mro__[1:]:
-                    if not (base in (object, typing.Generic) or
-                            base.__module__ == 'collections.abc' and
-                            base.__name__ in _PROTO_WHITELIST or
-                            isinstance(base, typing.TypingMeta) and base._is_protocol or
-                            isinstance(base, GenericMeta) and
-                            base.__origin__ is typing.Generic):
-                        raise TypeError(f'Protocols can only inherit from other'
-                                        f' protocols, got {repr(base)}')
-
-                cls.__init__ = _no_init
-
-            def _proto_hook(other):
-                if not cls.__dict__.get('_is_protocol', None):
-                    return NotImplemented
-                if not isinstance(other, type):
-                    # Same error as for issubclass(1, int)
-                    raise TypeError('issubclass() arg 1 must be a class')
-                for attr in _get_protocol_attrs(cls):
-                    for base in other.__mro__:
-                        if attr in base.__dict__:
-                            if base.__dict__[attr] is None:
-                                return NotImplemented
-                            break
-                        annotations = getattr(base, '__annotations__', {})
-                        if (isinstance(annotations, typing.Mapping) and
-                                attr in annotations and
-                                isinstance(other, _ProtocolMeta) and
-                                other._is_protocol):
-                            break
-                    else:
-                        return NotImplemented
-                return True
-            if '__subclasshook__' not in cls.__dict__:
-                cls.__subclasshook__ = _proto_hook
-
-        def __instancecheck__(self, instance):
-            # We need this method for situations where attributes are
-            # assigned in __init__.
-            if ((not getattr(self, '_is_protocol', False) or
-                    _is_callable_members_only(self)) and
-                    issubclass(instance.__class__, self)):
-                return True
-            if self._is_protocol:
-                if all(hasattr(instance, attr) and
-                        (not callable(getattr(self, attr, None)) or
-                         getattr(instance, attr) is not None)
-                        for attr in _get_protocol_attrs(self)):
-                    return True
-            return super(GenericMeta, self).__instancecheck__(instance)
-
-        def __subclasscheck__(self, cls):
-            if self.__origin__ is not None:
-                if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools']:
-                    raise TypeError("Parameterized generics cannot be used with class "
-                                    "or instance checks")
-                return False
-            if (self.__dict__.get('_is_protocol', None) and
-                    not self.__dict__.get('_is_runtime_protocol', None)):
-                if sys._getframe(1).f_globals['__name__'] in ['abc',
-                                                              'functools',
-                                                              'typing']:
-                    return False
-                raise TypeError("Instance and class checks can only be used with"
-                                " @runtime protocols")
-            if (self.__dict__.get('_is_runtime_protocol', None) and
-                    not _is_callable_members_only(self)):
-                if sys._getframe(1).f_globals['__name__'] in ['abc',
-                                                              'functools',
-                                                              'typing']:
-                    return super(GenericMeta, self).__subclasscheck__(cls)
-                raise TypeError("Protocols with non-method members"
-                                " don't support issubclass()")
-            return super(GenericMeta, self).__subclasscheck__(cls)
-
-        @typing._tp_cache
-        def __getitem__(self, params):
-            # We also need to copy this from GenericMeta.__getitem__ to get
-            # special treatment of "Protocol". (Comments removed for brevity.)
-            if not isinstance(params, tuple):
-                params = (params,)
-            if not params and _gorg(self) is not typing.Tuple:
-                raise TypeError(
-                    f"Parameter list to {self.__qualname__}[...] cannot be empty")
-            msg = "Parameters to generic types must be types."
-            params = tuple(_type_check(p, msg) for p in params)
-            if self in (typing.Generic, Protocol):
-                if not all(isinstance(p, typing.TypeVar) for p in params):
-                    raise TypeError(
-                        f"Parameters to {repr(self)}[...] must all be type variables")
-                if len(set(params)) != len(params):
-                    raise TypeError(
-                        f"Parameters to {repr(self)}[...] must all be unique")
-                tvars = params
-                args = params
-            elif self in (typing.Tuple, typing.Callable):
-                tvars = _type_vars(params)
-                args = params
-            elif self.__origin__ in (typing.Generic, Protocol):
-                raise TypeError(f"Cannot subscript already-subscripted {repr(self)}")
-            else:
-                _check_generic(self, params)
-                tvars = _type_vars(params)
-                args = params
-
-            prepend = (self,) if self.__origin__ is None else ()
-            return self.__class__(self.__name__,
-                                  prepend + self.__bases__,
-                                  _no_slots_copy(self.__dict__),
-                                  tvars=tvars,
-                                  args=args,
-                                  origin=self,
-                                  extra=self.__extra__,
-                                  orig_bases=self.__orig_bases__)
-
-    class Protocol(metaclass=_ProtocolMeta):
-        """Base class for protocol classes. Protocol classes are defined as::
-
-          class Proto(Protocol):
-              def meth(self) -> int:
-                  ...
-
-        Such classes are primarily used with static type checkers that recognize
-        structural subtyping (static duck-typing), for example::
-
-          class C:
-              def meth(self) -> int:
-                  return 0
-
-          def func(x: Proto) -> int:
-              return x.meth()
-
-          func(C())  # Passes static type check
-
-        See PEP 544 for details. Protocol classes decorated with
-        @typing_extensions.runtime act as simple-minded runtime protocol that checks
-        only the presence of given attributes, ignoring their type signatures.
-
-        Protocol classes can be generic, they are defined as::
-
-          class GenProto(Protocol[T]):
-              def meth(self) -> T:
-                  ...
-        """
-        __slots__ = ()
-        _is_protocol = True
-
-        def __new__(cls, *args, **kwds):
-            if _gorg(cls) is Protocol:
-                raise TypeError("Type Protocol cannot be instantiated; "
-                                "it can be used only as a base class")
-            return typing._generic_new(cls.__next_in_mro__, cls, *args, **kwds)
-
-
-# 3.8+
-if hasattr(typing, 'runtime_checkable'):
-    runtime_checkable = typing.runtime_checkable
-# 3.6-3.7
-else:
-    def runtime_checkable(cls):
-        """Mark a protocol class as a runtime protocol, so that it
-        can be used with isinstance() and issubclass(). Raise TypeError
-        if applied to a non-protocol class.
-
-        This allows a simple-minded structural check very similar to the
-        one-offs in collections.abc such as Hashable.
-        """
-        if not isinstance(cls, _ProtocolMeta) or not cls._is_protocol:
-            raise TypeError('@runtime_checkable can be only applied to protocol classes,'
-                            f' got {cls!r}')
-        cls._is_runtime_protocol = True
-        return cls
-
-
-# Exists for backwards compatibility.
-runtime = runtime_checkable
-
-
-# 3.8+
-if hasattr(typing, 'SupportsIndex'):
-    SupportsIndex = typing.SupportsIndex
-# 3.6-3.7
-else:
-    @runtime_checkable
-    class SupportsIndex(Protocol):
-        __slots__ = ()
-
-        @abc.abstractmethod
-        def __index__(self) -> int:
-            pass
-
-
-if sys.version_info >= (3, 9, 2):
-    # The standard library TypedDict in Python 3.8 does not store runtime information
-    # about which (if any) keys are optional.  See https://bugs.python.org/issue38834
-    # The standard library TypedDict in Python 3.9.0/1 does not honour the "total"
-    # keyword with old-style TypedDict().  See https://bugs.python.org/issue42059
-    TypedDict = typing.TypedDict
-else:
-    def _check_fails(cls, other):
-        try:
-            if sys._getframe(1).f_globals['__name__'] not in ['abc',
-                                                              'functools',
-                                                              'typing']:
-                # Typed dicts are only for static structural subtyping.
-                raise TypeError('TypedDict does not support instance and class checks')
-        except (AttributeError, ValueError):
-            pass
-        return False
-
-    def _dict_new(*args, **kwargs):
-        if not args:
-            raise TypeError('TypedDict.__new__(): not enough arguments')
-        _, args = args[0], args[1:]  # allow the "cls" keyword be passed
-        return dict(*args, **kwargs)
-
-    _dict_new.__text_signature__ = '($cls, _typename, _fields=None, /, **kwargs)'
-
-    def _typeddict_new(*args, total=True, **kwargs):
-        if not args:
-            raise TypeError('TypedDict.__new__(): not enough arguments')
-        _, args = args[0], args[1:]  # allow the "cls" keyword be passed
-        if args:
-            typename, args = args[0], args[1:]  # allow the "_typename" keyword be passed
-        elif '_typename' in kwargs:
-            typename = kwargs.pop('_typename')
-            import warnings
-            warnings.warn("Passing '_typename' as keyword argument is deprecated",
-                          DeprecationWarning, stacklevel=2)
-        else:
-            raise TypeError("TypedDict.__new__() missing 1 required positional "
-                            "argument: '_typename'")
-        if args:
-            try:
-                fields, = args  # allow the "_fields" keyword be passed
-            except ValueError:
-                raise TypeError('TypedDict.__new__() takes from 2 to 3 '
-                                f'positional arguments but {len(args) + 2} '
-                                'were given')
-        elif '_fields' in kwargs and len(kwargs) == 1:
-            fields = kwargs.pop('_fields')
-            import warnings
-            warnings.warn("Passing '_fields' as keyword argument is deprecated",
-                          DeprecationWarning, stacklevel=2)
-        else:
-            fields = None
-
-        if fields is None:
-            fields = kwargs
-        elif kwargs:
-            raise TypeError("TypedDict takes either a dict or keyword arguments,"
-                            " but not both")
-
-        ns = {'__annotations__': dict(fields)}
-        try:
-            # Setting correct module is necessary to make typed dict classes pickleable.
-            ns['__module__'] = sys._getframe(1).f_globals.get('__name__', '__main__')
-        except (AttributeError, ValueError):
-            pass
-
-        return _TypedDictMeta(typename, (), ns, total=total)
-
-    _typeddict_new.__text_signature__ = ('($cls, _typename, _fields=None,'
-                                         ' /, *, total=True, **kwargs)')
-
-    class _TypedDictMeta(type):
-        def __init__(cls, name, bases, ns, total=True):
-            super().__init__(name, bases, ns)
-
-        def __new__(cls, name, bases, ns, total=True):
-            # Create new typed dict class object.
-            # This method is called directly when TypedDict is subclassed,
-            # or via _typeddict_new when TypedDict is instantiated. This way
-            # TypedDict supports all three syntaxes described in its docstring.
-            # Subclasses and instances of TypedDict return actual dictionaries
-            # via _dict_new.
-            ns['__new__'] = _typeddict_new if name == 'TypedDict' else _dict_new
-            tp_dict = super().__new__(cls, name, (dict,), ns)
-
-            annotations = {}
-            own_annotations = ns.get('__annotations__', {})
-            own_annotation_keys = set(own_annotations.keys())
-            msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type"
-            own_annotations = {
-                n: typing._type_check(tp, msg) for n, tp in own_annotations.items()
-            }
-            required_keys = set()
-            optional_keys = set()
-
-            for base in bases:
-                annotations.update(base.__dict__.get('__annotations__', {}))
-                required_keys.update(base.__dict__.get('__required_keys__', ()))
-                optional_keys.update(base.__dict__.get('__optional_keys__', ()))
-
-            annotations.update(own_annotations)
-            if total:
-                required_keys.update(own_annotation_keys)
-            else:
-                optional_keys.update(own_annotation_keys)
-
-            tp_dict.__annotations__ = annotations
-            tp_dict.__required_keys__ = frozenset(required_keys)
-            tp_dict.__optional_keys__ = frozenset(optional_keys)
-            if not hasattr(tp_dict, '__total__'):
-                tp_dict.__total__ = total
-            return tp_dict
-
-        __instancecheck__ = __subclasscheck__ = _check_fails
-
-    TypedDict = _TypedDictMeta('TypedDict', (dict,), {})
-    TypedDict.__module__ = __name__
-    TypedDict.__doc__ = \
-        """A simple typed name space. At runtime it is equivalent to a plain dict.
-
-        TypedDict creates a dictionary type that expects all of its
-        instances to have a certain set of keys, with each key
-        associated with a value of a consistent type. This expectation
-        is not checked at runtime but is only enforced by type checkers.
-        Usage::
-
-            class Point2D(TypedDict):
-                x: int
-                y: int
-                label: str
-
-            a: Point2D = {'x': 1, 'y': 2, 'label': 'good'}  # OK
-            b: Point2D = {'z': 3, 'label': 'bad'}           # Fails type check
-
-            assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first')
-
-        The type info can be accessed via the Point2D.__annotations__ dict, and
-        the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets.
-        TypedDict supports two additional equivalent forms::
-
-            Point2D = TypedDict('Point2D', x=int, y=int, label=str)
-            Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str})
-
-        The class syntax is only supported in Python 3.6+, while two other
-        syntax forms work for Python 2.7 and 3.2+
-        """
-
-
-# Python 3.9+ has PEP 593 (Annotated and modified get_type_hints)
-if hasattr(typing, 'Annotated'):
-    Annotated = typing.Annotated
-    get_type_hints = typing.get_type_hints
-    # Not exported and not a public API, but needed for get_origin() and get_args()
-    # to work.
-    _AnnotatedAlias = typing._AnnotatedAlias
-# 3.7-3.8
-elif PEP_560:
-    class _AnnotatedAlias(typing._GenericAlias, _root=True):
-        """Runtime representation of an annotated type.
-
-        At its core 'Annotated[t, dec1, dec2, ...]' is an alias for the type 't'
-        with extra annotations. The alias behaves like a normal typing alias,
-        instantiating is the same as instantiating the underlying type, binding
-        it to types is also the same.
-        """
-        def __init__(self, origin, metadata):
-            if isinstance(origin, _AnnotatedAlias):
-                metadata = origin.__metadata__ + metadata
-                origin = origin.__origin__
-            super().__init__(origin, origin)
-            self.__metadata__ = metadata
-
-        def copy_with(self, params):
-            assert len(params) == 1
-            new_type = params[0]
-            return _AnnotatedAlias(new_type, self.__metadata__)
-
-        def __repr__(self):
-            return (f"typing_extensions.Annotated[{typing._type_repr(self.__origin__)}, "
-                    f"{', '.join(repr(a) for a in self.__metadata__)}]")
-
-        def __reduce__(self):
-            return operator.getitem, (
-                Annotated, (self.__origin__,) + self.__metadata__
-            )
-
-        def __eq__(self, other):
-            if not isinstance(other, _AnnotatedAlias):
-                return NotImplemented
-            if self.__origin__ != other.__origin__:
-                return False
-            return self.__metadata__ == other.__metadata__
-
-        def __hash__(self):
-            return hash((self.__origin__, self.__metadata__))
-
-    class Annotated:
-        """Add context specific metadata to a type.
-
-        Example: Annotated[int, runtime_check.Unsigned] indicates to the
-        hypothetical runtime_check module that this type is an unsigned int.
-        Every other consumer of this type can ignore this metadata and treat
-        this type as int.
-
-        The first argument to Annotated must be a valid type (and will be in
-        the __origin__ field), the remaining arguments are kept as a tuple in
-        the __extra__ field.
-
-        Details:
-
-        - It's an error to call `Annotated` with less than two arguments.
-        - Nested Annotated are flattened::
-
-            Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3]
-
-        - Instantiating an annotated type is equivalent to instantiating the
-        underlying type::
-
-            Annotated[C, Ann1](5) == C(5)
-
-        - Annotated can be used as a generic type alias::
-
-            Optimized = Annotated[T, runtime.Optimize()]
-            Optimized[int] == Annotated[int, runtime.Optimize()]
-
-            OptimizedList = Annotated[List[T], runtime.Optimize()]
-            OptimizedList[int] == Annotated[List[int], runtime.Optimize()]
-        """
-
-        __slots__ = ()
-
-        def __new__(cls, *args, **kwargs):
-            raise TypeError("Type Annotated cannot be instantiated.")
-
-        @typing._tp_cache
-        def __class_getitem__(cls, params):
-            if not isinstance(params, tuple) or len(params) < 2:
-                raise TypeError("Annotated[...] should be used "
-                                "with at least two arguments (a type and an "
-                                "annotation).")
-            msg = "Annotated[t, ...]: t must be a type."
-            origin = typing._type_check(params[0], msg)
-            metadata = tuple(params[1:])
-            return _AnnotatedAlias(origin, metadata)
-
-        def __init_subclass__(cls, *args, **kwargs):
-            raise TypeError(
-                f"Cannot subclass {cls.__module__}.Annotated"
-            )
-
-    def _strip_annotations(t):
-        """Strips the annotations from a given type.
-        """
-        if isinstance(t, _AnnotatedAlias):
-            return _strip_annotations(t.__origin__)
-        if isinstance(t, typing._GenericAlias):
-            stripped_args = tuple(_strip_annotations(a) for a in t.__args__)
-            if stripped_args == t.__args__:
-                return t
-            res = t.copy_with(stripped_args)
-            res._special = t._special
-            return res
-        return t
-
-    def get_type_hints(obj, globalns=None, localns=None, include_extras=False):
-        """Return type hints for an object.
-
-        This is often the same as obj.__annotations__, but it handles
-        forward references encoded as string literals, adds Optional[t] if a
-        default value equal to None is set and recursively replaces all
-        'Annotated[T, ...]' with 'T' (unless 'include_extras=True').
-
-        The argument may be a module, class, method, or function. The annotations
-        are returned as a dictionary. For classes, annotations include also
-        inherited members.
-
-        TypeError is raised if the argument is not of a type that can contain
-        annotations, and an empty dictionary is returned if no annotations are
-        present.
-
-        BEWARE -- the behavior of globalns and localns is counterintuitive
-        (unless you are familiar with how eval() and exec() work).  The
-        search order is locals first, then globals.
-
-        - If no dict arguments are passed, an attempt is made to use the
-          globals from obj (or the respective module's globals for classes),
-          and these are also used as the locals.  If the object does not appear
-          to have globals, an empty dictionary is used.
-
-        - If one dict argument is passed, it is used for both globals and
-          locals.
-
-        - If two dict arguments are passed, they specify globals and
-          locals, respectively.
-        """
-        hint = typing.get_type_hints(obj, globalns=globalns, localns=localns)
-        if include_extras:
-            return hint
-        return {k: _strip_annotations(t) for k, t in hint.items()}
-# 3.6
-else:
-
-    def _is_dunder(name):
-        """Returns True if name is a __dunder_variable_name__."""
-        return len(name) > 4 and name.startswith('__') and name.endswith('__')
-
-    # Prior to Python 3.7 types did not have `copy_with`. A lot of the equality
-    # checks, argument expansion etc. are done on the _subs_tre. As a result we
-    # can't provide a get_type_hints function that strips out annotations.
-
-    class AnnotatedMeta(typing.GenericMeta):
-        """Metaclass for Annotated"""
-
-        def __new__(cls, name, bases, namespace, **kwargs):
-            if any(b is not object for b in bases):
-                raise TypeError("Cannot subclass " + str(Annotated))
-            return super().__new__(cls, name, bases, namespace, **kwargs)
-
-        @property
-        def __metadata__(self):
-            return self._subs_tree()[2]
-
-        def _tree_repr(self, tree):
-            cls, origin, metadata = tree
-            if not isinstance(origin, tuple):
-                tp_repr = typing._type_repr(origin)
-            else:
-                tp_repr = origin[0]._tree_repr(origin)
-            metadata_reprs = ", ".join(repr(arg) for arg in metadata)
-            return f'{cls}[{tp_repr}, {metadata_reprs}]'
-
-        def _subs_tree(self, tvars=None, args=None):  # noqa
-            if self is Annotated:
-                return Annotated
-            res = super()._subs_tree(tvars=tvars, args=args)
-            # Flatten nested Annotated
-            if isinstance(res[1], tuple) and res[1][0] is Annotated:
-                sub_tp = res[1][1]
-                sub_annot = res[1][2]
-                return (Annotated, sub_tp, sub_annot + res[2])
-            return res
-
-        def _get_cons(self):
-            """Return the class used to create instance of this type."""
-            if self.__origin__ is None:
-                raise TypeError("Cannot get the underlying type of a "
-                                "non-specialized Annotated type.")
-            tree = self._subs_tree()
-            while isinstance(tree, tuple) and tree[0] is Annotated:
-                tree = tree[1]
-            if isinstance(tree, tuple):
-                return tree[0]
-            else:
-                return tree
-
-        @typing._tp_cache
-        def __getitem__(self, params):
-            if not isinstance(params, tuple):
-                params = (params,)
-            if self.__origin__ is not None:  # specializing an instantiated type
-                return super().__getitem__(params)
-            elif not isinstance(params, tuple) or len(params) < 2:
-                raise TypeError("Annotated[...] should be instantiated "
-                                "with at least two arguments (a type and an "
-                                "annotation).")
-            else:
-                msg = "Annotated[t, ...]: t must be a type."
-                tp = typing._type_check(params[0], msg)
-                metadata = tuple(params[1:])
-            return self.__class__(
-                self.__name__,
-                self.__bases__,
-                _no_slots_copy(self.__dict__),
-                tvars=_type_vars((tp,)),
-                # Metadata is a tuple so it won't be touched by _replace_args et al.
-                args=(tp, metadata),
-                origin=self,
-            )
-
-        def __call__(self, *args, **kwargs):
-            cons = self._get_cons()
-            result = cons(*args, **kwargs)
-            try:
-                result.__orig_class__ = self
-            except AttributeError:
-                pass
-            return result
-
-        def __getattr__(self, attr):
-            # For simplicity we just don't relay all dunder names
-            if self.__origin__ is not None and not _is_dunder(attr):
-                return getattr(self._get_cons(), attr)
-            raise AttributeError(attr)
-
-        def __setattr__(self, attr, value):
-            if _is_dunder(attr) or attr.startswith('_abc_'):
-                super().__setattr__(attr, value)
-            elif self.__origin__ is None:
-                raise AttributeError(attr)
-            else:
-                setattr(self._get_cons(), attr, value)
-
-        def __instancecheck__(self, obj):
-            raise TypeError("Annotated cannot be used with isinstance().")
-
-        def __subclasscheck__(self, cls):
-            raise TypeError("Annotated cannot be used with issubclass().")
-
-    class Annotated(metaclass=AnnotatedMeta):
-        """Add context specific metadata to a type.
-
-        Example: Annotated[int, runtime_check.Unsigned] indicates to the
-        hypothetical runtime_check module that this type is an unsigned int.
-        Every other consumer of this type can ignore this metadata and treat
-        this type as int.
-
-        The first argument to Annotated must be a valid type, the remaining
-        arguments are kept as a tuple in the __metadata__ field.
-
-        Details:
-
-        - It's an error to call `Annotated` with less than two arguments.
-        - Nested Annotated are flattened::
-
-            Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3]
-
-        - Instantiating an annotated type is equivalent to instantiating the
-        underlying type::
-
-            Annotated[C, Ann1](5) == C(5)
-
-        - Annotated can be used as a generic type alias::
-
-            Optimized = Annotated[T, runtime.Optimize()]
-            Optimized[int] == Annotated[int, runtime.Optimize()]
-
-            OptimizedList = Annotated[List[T], runtime.Optimize()]
-            OptimizedList[int] == Annotated[List[int], runtime.Optimize()]
-        """
-
-# Python 3.8 has get_origin() and get_args() but those implementations aren't
-# Annotated-aware, so we can't use those. Python 3.9's versions don't support
-# ParamSpecArgs and ParamSpecKwargs, so only Python 3.10's versions will do.
-if sys.version_info[:2] >= (3, 10):
-    get_origin = typing.get_origin
-    get_args = typing.get_args
-# 3.7-3.9
-elif PEP_560:
-    try:
-        # 3.9+
-        from typing import _BaseGenericAlias
-    except ImportError:
-        _BaseGenericAlias = typing._GenericAlias
-    try:
-        # 3.9+
-        from typing import GenericAlias
-    except ImportError:
-        GenericAlias = typing._GenericAlias
-
-    def get_origin(tp):
-        """Get the unsubscripted version of a type.
-
-        This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar
-        and Annotated. Return None for unsupported types. Examples::
-
-            get_origin(Literal[42]) is Literal
-            get_origin(int) is None
-            get_origin(ClassVar[int]) is ClassVar
-            get_origin(Generic) is Generic
-            get_origin(Generic[T]) is Generic
-            get_origin(Union[T, int]) is Union
-            get_origin(List[Tuple[T, T]][int]) == list
-            get_origin(P.args) is P
-        """
-        if isinstance(tp, _AnnotatedAlias):
-            return Annotated
-        if isinstance(tp, (typing._GenericAlias, GenericAlias, _BaseGenericAlias,
-                           ParamSpecArgs, ParamSpecKwargs)):
-            return tp.__origin__
-        if tp is typing.Generic:
-            return typing.Generic
-        return None
-
-    def get_args(tp):
-        """Get type arguments with all substitutions performed.
-
-        For unions, basic simplifications used by Union constructor are performed.
-        Examples::
-            get_args(Dict[str, int]) == (str, int)
-            get_args(int) == ()
-            get_args(Union[int, Union[T, int], str][int]) == (int, str)
-            get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int])
-            get_args(Callable[[], T][int]) == ([], int)
-        """
-        if isinstance(tp, _AnnotatedAlias):
-            return (tp.__origin__,) + tp.__metadata__
-        if isinstance(tp, (typing._GenericAlias, GenericAlias)):
-            if getattr(tp, "_special", False):
-                return ()
-            res = tp.__args__
-            if get_origin(tp) is collections.abc.Callable and res[0] is not Ellipsis:
-                res = (list(res[:-1]), res[-1])
-            return res
-        return ()
-
-
-# 3.10+
-if hasattr(typing, 'TypeAlias'):
-    TypeAlias = typing.TypeAlias
-# 3.9
-elif sys.version_info[:2] >= (3, 9):
-    class _TypeAliasForm(typing._SpecialForm, _root=True):
-        def __repr__(self):
-            return 'typing_extensions.' + self._name
-
-    @_TypeAliasForm
-    def TypeAlias(self, parameters):
-        """Special marker indicating that an assignment should
-        be recognized as a proper type alias definition by type
-        checkers.
-
-        For example::
-
-            Predicate: TypeAlias = Callable[..., bool]
-
-        It's invalid when used anywhere except as in the example above.
-        """
-        raise TypeError(f"{self} is not subscriptable")
-# 3.7-3.8
-elif sys.version_info[:2] >= (3, 7):
-    class _TypeAliasForm(typing._SpecialForm, _root=True):
-        def __repr__(self):
-            return 'typing_extensions.' + self._name
-
-    TypeAlias = _TypeAliasForm('TypeAlias',
-                               doc="""Special marker indicating that an assignment should
-                               be recognized as a proper type alias definition by type
-                               checkers.
-
-                               For example::
-
-                                   Predicate: TypeAlias = Callable[..., bool]
-
-                               It's invalid when used anywhere except as in the example
-                               above.""")
-# 3.6
-else:
-    class _TypeAliasMeta(typing.TypingMeta):
-        """Metaclass for TypeAlias"""
-
-        def __repr__(self):
-            return 'typing_extensions.TypeAlias'
-
-    class _TypeAliasBase(typing._FinalTypingBase, metaclass=_TypeAliasMeta, _root=True):
-        """Special marker indicating that an assignment should
-        be recognized as a proper type alias definition by type
-        checkers.
-
-        For example::
-
-            Predicate: TypeAlias = Callable[..., bool]
-
-        It's invalid when used anywhere except as in the example above.
-        """
-        __slots__ = ()
-
-        def __instancecheck__(self, obj):
-            raise TypeError("TypeAlias cannot be used with isinstance().")
-
-        def __subclasscheck__(self, cls):
-            raise TypeError("TypeAlias cannot be used with issubclass().")
-
-        def __repr__(self):
-            return 'typing_extensions.TypeAlias'
-
-    TypeAlias = _TypeAliasBase(_root=True)
-
-
-# Python 3.10+ has PEP 612
-if hasattr(typing, 'ParamSpecArgs'):
-    ParamSpecArgs = typing.ParamSpecArgs
-    ParamSpecKwargs = typing.ParamSpecKwargs
-# 3.6-3.9
-else:
-    class _Immutable:
-        """Mixin to indicate that object should not be copied."""
-        __slots__ = ()
-
-        def __copy__(self):
-            return self
-
-        def __deepcopy__(self, memo):
-            return self
-
-    class ParamSpecArgs(_Immutable):
-        """The args for a ParamSpec object.
-
-        Given a ParamSpec object P, P.args is an instance of ParamSpecArgs.
-
-        ParamSpecArgs objects have a reference back to their ParamSpec:
-
-        P.args.__origin__ is P
-
-        This type is meant for runtime introspection and has no special meaning to
-        static type checkers.
-        """
-        def __init__(self, origin):
-            self.__origin__ = origin
-
-        def __repr__(self):
-            return f"{self.__origin__.__name__}.args"
-
-    class ParamSpecKwargs(_Immutable):
-        """The kwargs for a ParamSpec object.
-
-        Given a ParamSpec object P, P.kwargs is an instance of ParamSpecKwargs.
-
-        ParamSpecKwargs objects have a reference back to their ParamSpec:
-
-        P.kwargs.__origin__ is P
-
-        This type is meant for runtime introspection and has no special meaning to
-        static type checkers.
-        """
-        def __init__(self, origin):
-            self.__origin__ = origin
-
-        def __repr__(self):
-            return f"{self.__origin__.__name__}.kwargs"
-
-# 3.10+
-if hasattr(typing, 'ParamSpec'):
-    ParamSpec = typing.ParamSpec
-# 3.6-3.9
-else:
-
-    # Inherits from list as a workaround for Callable checks in Python < 3.9.2.
-    class ParamSpec(list):
-        """Parameter specification variable.
-
-        Usage::
-
-           P = ParamSpec('P')
-
-        Parameter specification variables exist primarily for the benefit of static
-        type checkers.  They are used to forward the parameter types of one
-        callable to another callable, a pattern commonly found in higher order
-        functions and decorators.  They are only valid when used in ``Concatenate``,
-        or s the first argument to ``Callable``. In Python 3.10 and higher,
-        they are also supported in user-defined Generics at runtime.
-        See class Generic for more information on generic types.  An
-        example for annotating a decorator::
-
-           T = TypeVar('T')
-           P = ParamSpec('P')
-
-           def add_logging(f: Callable[P, T]) -> Callable[P, T]:
-               '''A type-safe decorator to add logging to a function.'''
-               def inner(*args: P.args, **kwargs: P.kwargs) -> T:
-                   logging.info(f'{f.__name__} was called')
-                   return f(*args, **kwargs)
-               return inner
-
-           @add_logging
-           def add_two(x: float, y: float) -> float:
-               '''Add two numbers together.'''
-               return x + y
-
-        Parameter specification variables defined with covariant=True or
-        contravariant=True can be used to declare covariant or contravariant
-        generic types.  These keyword arguments are valid, but their actual semantics
-        are yet to be decided.  See PEP 612 for details.
-
-        Parameter specification variables can be introspected. e.g.:
-
-           P.__name__ == 'T'
-           P.__bound__ == None
-           P.__covariant__ == False
-           P.__contravariant__ == False
-
-        Note that only parameter specification variables defined in global scope can
-        be pickled.
-        """
-
-        # Trick Generic __parameters__.
-        __class__ = typing.TypeVar
-
-        @property
-        def args(self):
-            return ParamSpecArgs(self)
-
-        @property
-        def kwargs(self):
-            return ParamSpecKwargs(self)
-
-        def __init__(self, name, *, bound=None, covariant=False, contravariant=False):
-            super().__init__([self])
-            self.__name__ = name
-            self.__covariant__ = bool(covariant)
-            self.__contravariant__ = bool(contravariant)
-            if bound:
-                self.__bound__ = typing._type_check(bound, 'Bound must be a type.')
-            else:
-                self.__bound__ = None
-
-            # for pickling:
-            try:
-                def_mod = sys._getframe(1).f_globals.get('__name__', '__main__')
-            except (AttributeError, ValueError):
-                def_mod = None
-            if def_mod != 'typing_extensions':
-                self.__module__ = def_mod
-
-        def __repr__(self):
-            if self.__covariant__:
-                prefix = '+'
-            elif self.__contravariant__:
-                prefix = '-'
-            else:
-                prefix = '~'
-            return prefix + self.__name__
-
-        def __hash__(self):
-            return object.__hash__(self)
-
-        def __eq__(self, other):
-            return self is other
-
-        def __reduce__(self):
-            return self.__name__
-
-        # Hack to get typing._type_check to pass.
-        def __call__(self, *args, **kwargs):
-            pass
-
-        if not PEP_560:
-            # Only needed in 3.6.
-            def _get_type_vars(self, tvars):
-                if self not in tvars:
-                    tvars.append(self)
-
-
-# 3.6-3.9
-if not hasattr(typing, 'Concatenate'):
-    # Inherits from list as a workaround for Callable checks in Python < 3.9.2.
-    class _ConcatenateGenericAlias(list):
-
-        # Trick Generic into looking into this for __parameters__.
-        if PEP_560:
-            __class__ = typing._GenericAlias
-        else:
-            __class__ = typing._TypingBase
-
-        # Flag in 3.8.
-        _special = False
-        # Attribute in 3.6 and earlier.
-        _gorg = typing.Generic
-
-        def __init__(self, origin, args):
-            super().__init__(args)
-            self.__origin__ = origin
-            self.__args__ = args
-
-        def __repr__(self):
-            _type_repr = typing._type_repr
-            return (f'{_type_repr(self.__origin__)}'
-                    f'[{", ".join(_type_repr(arg) for arg in self.__args__)}]')
-
-        def __hash__(self):
-            return hash((self.__origin__, self.__args__))
-
-        # Hack to get typing._type_check to pass in Generic.
-        def __call__(self, *args, **kwargs):
-            pass
-
-        @property
-        def __parameters__(self):
-            return tuple(
-                tp for tp in self.__args__ if isinstance(tp, (typing.TypeVar, ParamSpec))
-            )
-
-        if not PEP_560:
-            # Only required in 3.6.
-            def _get_type_vars(self, tvars):
-                if self.__origin__ and self.__parameters__:
-                    typing._get_type_vars(self.__parameters__, tvars)
-
-
-# 3.6-3.9
-@typing._tp_cache
-def _concatenate_getitem(self, parameters):
-    if parameters == ():
-        raise TypeError("Cannot take a Concatenate of no types.")
-    if not isinstance(parameters, tuple):
-        parameters = (parameters,)
-    if not isinstance(parameters[-1], ParamSpec):
-        raise TypeError("The last parameter to Concatenate should be a "
-                        "ParamSpec variable.")
-    msg = "Concatenate[arg, ...]: each arg must be a type."
-    parameters = tuple(typing._type_check(p, msg) for p in parameters)
-    return _ConcatenateGenericAlias(self, parameters)
-
-
-# 3.10+
-if hasattr(typing, 'Concatenate'):
-    Concatenate = typing.Concatenate
-    _ConcatenateGenericAlias = typing._ConcatenateGenericAlias # noqa
-# 3.9
-elif sys.version_info[:2] >= (3, 9):
-    @_TypeAliasForm
-    def Concatenate(self, parameters):
-        """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a
-        higher order function which adds, removes or transforms parameters of a
-        callable.
-
-        For example::
-
-           Callable[Concatenate[int, P], int]
-
-        See PEP 612 for detailed information.
-        """
-        return _concatenate_getitem(self, parameters)
-# 3.7-8
-elif sys.version_info[:2] >= (3, 7):
-    class _ConcatenateForm(typing._SpecialForm, _root=True):
-        def __repr__(self):
-            return 'typing_extensions.' + self._name
-
-        def __getitem__(self, parameters):
-            return _concatenate_getitem(self, parameters)
-
-    Concatenate = _ConcatenateForm(
-        'Concatenate',
-        doc="""Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a
-        higher order function which adds, removes or transforms parameters of a
-        callable.
-
-        For example::
-
-           Callable[Concatenate[int, P], int]
-
-        See PEP 612 for detailed information.
-        """)
-# 3.6
-else:
-    class _ConcatenateAliasMeta(typing.TypingMeta):
-        """Metaclass for Concatenate."""
-
-        def __repr__(self):
-            return 'typing_extensions.Concatenate'
-
-    class _ConcatenateAliasBase(typing._FinalTypingBase,
-                                metaclass=_ConcatenateAliasMeta,
-                                _root=True):
-        """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a
-        higher order function which adds, removes or transforms parameters of a
-        callable.
-
-        For example::
-
-           Callable[Concatenate[int, P], int]
-
-        See PEP 612 for detailed information.
-        """
-        __slots__ = ()
-
-        def __instancecheck__(self, obj):
-            raise TypeError("Concatenate cannot be used with isinstance().")
-
-        def __subclasscheck__(self, cls):
-            raise TypeError("Concatenate cannot be used with issubclass().")
-
-        def __repr__(self):
-            return 'typing_extensions.Concatenate'
-
-        def __getitem__(self, parameters):
-            return _concatenate_getitem(self, parameters)
-
-    Concatenate = _ConcatenateAliasBase(_root=True)
-
-# 3.10+
-if hasattr(typing, 'TypeGuard'):
-    TypeGuard = typing.TypeGuard
-# 3.9
-elif sys.version_info[:2] >= (3, 9):
-    class _TypeGuardForm(typing._SpecialForm, _root=True):
-        def __repr__(self):
-            return 'typing_extensions.' + self._name
-
-    @_TypeGuardForm
-    def TypeGuard(self, parameters):
-        """Special typing form used to annotate the return type of a user-defined
-        type guard function.  ``TypeGuard`` only accepts a single type argument.
-        At runtime, functions marked this way should return a boolean.
-
-        ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static
-        type checkers to determine a more precise type of an expression within a
-        program's code flow.  Usually type narrowing is done by analyzing
-        conditional code flow and applying the narrowing to a block of code.  The
-        conditional expression here is sometimes referred to as a "type guard".
-
-        Sometimes it would be convenient to use a user-defined boolean function
-        as a type guard.  Such a function should use ``TypeGuard[...]`` as its
-        return type to alert static type checkers to this intention.
-
-        Using  ``-> TypeGuard`` tells the static type checker that for a given
-        function:
-
-        1. The return value is a boolean.
-        2. If the return value is ``True``, the type of its argument
-        is the type inside ``TypeGuard``.
-
-        For example::
-
-            def is_str(val: Union[str, float]):
-                # "isinstance" type guard
-                if isinstance(val, str):
-                    # Type of ``val`` is narrowed to ``str``
-                    ...
-                else:
-                    # Else, type of ``val`` is narrowed to ``float``.
-                    ...
-
-        Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower
-        form of ``TypeA`` (it can even be a wider form) and this may lead to
-        type-unsafe results.  The main reason is to allow for things like
-        narrowing ``List[object]`` to ``List[str]`` even though the latter is not
-        a subtype of the former, since ``List`` is invariant.  The responsibility of
-        writing type-safe type guards is left to the user.
-
-        ``TypeGuard`` also works with type variables.  For more information, see
-        PEP 647 (User-Defined Type Guards).
-        """
-        item = typing._type_check(parameters, f'{self} accepts only single type.')
-        return typing._GenericAlias(self, (item,))
-# 3.7-3.8
-elif sys.version_info[:2] >= (3, 7):
-    class _TypeGuardForm(typing._SpecialForm, _root=True):
-
-        def __repr__(self):
-            return 'typing_extensions.' + self._name
-
-        def __getitem__(self, parameters):
-            item = typing._type_check(parameters,
-                                      f'{self._name} accepts only a single type')
-            return typing._GenericAlias(self, (item,))
-
-    TypeGuard = _TypeGuardForm(
-        'TypeGuard',
-        doc="""Special typing form used to annotate the return type of a user-defined
-        type guard function.  ``TypeGuard`` only accepts a single type argument.
-        At runtime, functions marked this way should return a boolean.
-
-        ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static
-        type checkers to determine a more precise type of an expression within a
-        program's code flow.  Usually type narrowing is done by analyzing
-        conditional code flow and applying the narrowing to a block of code.  The
-        conditional expression here is sometimes referred to as a "type guard".
-
-        Sometimes it would be convenient to use a user-defined boolean function
-        as a type guard.  Such a function should use ``TypeGuard[...]`` as its
-        return type to alert static type checkers to this intention.
-
-        Using  ``-> TypeGuard`` tells the static type checker that for a given
-        function:
-
-        1. The return value is a boolean.
-        2. If the return value is ``True``, the type of its argument
-        is the type inside ``TypeGuard``.
-
-        For example::
-
-            def is_str(val: Union[str, float]):
-                # "isinstance" type guard
-                if isinstance(val, str):
-                    # Type of ``val`` is narrowed to ``str``
-                    ...
-                else:
-                    # Else, type of ``val`` is narrowed to ``float``.
-                    ...
-
-        Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower
-        form of ``TypeA`` (it can even be a wider form) and this may lead to
-        type-unsafe results.  The main reason is to allow for things like
-        narrowing ``List[object]`` to ``List[str]`` even though the latter is not
-        a subtype of the former, since ``List`` is invariant.  The responsibility of
-        writing type-safe type guards is left to the user.
-
-        ``TypeGuard`` also works with type variables.  For more information, see
-        PEP 647 (User-Defined Type Guards).
-        """)
-# 3.6
-else:
-    class _TypeGuard(typing._FinalTypingBase, _root=True):
-        """Special typing form used to annotate the return type of a user-defined
-        type guard function.  ``TypeGuard`` only accepts a single type argument.
-        At runtime, functions marked this way should return a boolean.
-
-        ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static
-        type checkers to determine a more precise type of an expression within a
-        program's code flow.  Usually type narrowing is done by analyzing
-        conditional code flow and applying the narrowing to a block of code.  The
-        conditional expression here is sometimes referred to as a "type guard".
-
-        Sometimes it would be convenient to use a user-defined boolean function
-        as a type guard.  Such a function should use ``TypeGuard[...]`` as its
-        return type to alert static type checkers to this intention.
-
-        Using  ``-> TypeGuard`` tells the static type checker that for a given
-        function:
-
-        1. The return value is a boolean.
-        2. If the return value is ``True``, the type of its argument
-        is the type inside ``TypeGuard``.
-
-        For example::
-
-            def is_str(val: Union[str, float]):
-                # "isinstance" type guard
-                if isinstance(val, str):
-                    # Type of ``val`` is narrowed to ``str``
-                    ...
-                else:
-                    # Else, type of ``val`` is narrowed to ``float``.
-                    ...
-
-        Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower
-        form of ``TypeA`` (it can even be a wider form) and this may lead to
-        type-unsafe results.  The main reason is to allow for things like
-        narrowing ``List[object]`` to ``List[str]`` even though the latter is not
-        a subtype of the former, since ``List`` is invariant.  The responsibility of
-        writing type-safe type guards is left to the user.
-
-        ``TypeGuard`` also works with type variables.  For more information, see
-        PEP 647 (User-Defined Type Guards).
-        """
-
-        __slots__ = ('__type__',)
-
-        def __init__(self, tp=None, **kwds):
-            self.__type__ = tp
-
-        def __getitem__(self, item):
-            cls = type(self)
-            if self.__type__ is None:
-                return cls(typing._type_check(item,
-                           f'{cls.__name__[1:]} accepts only a single type.'),
-                           _root=True)
-            raise TypeError(f'{cls.__name__[1:]} cannot be further subscripted')
-
-        def _eval_type(self, globalns, localns):
-            new_tp = typing._eval_type(self.__type__, globalns, localns)
-            if new_tp == self.__type__:
-                return self
-            return type(self)(new_tp, _root=True)
-
-        def __repr__(self):
-            r = super().__repr__()
-            if self.__type__ is not None:
-                r += f'[{typing._type_repr(self.__type__)}]'
-            return r
-
-        def __hash__(self):
-            return hash((type(self).__name__, self.__type__))
-
-        def __eq__(self, other):
-            if not isinstance(other, _TypeGuard):
-                return NotImplemented
-            if self.__type__ is not None:
-                return self.__type__ == other.__type__
-            return self is other
-
-    TypeGuard = _TypeGuard(_root=True)
-
-if hasattr(typing, "Self"):
-    Self = typing.Self
-elif sys.version_info[:2] >= (3, 7):
-    # Vendored from cpython typing._SpecialFrom
-    class _SpecialForm(typing._Final, _root=True):
-        __slots__ = ('_name', '__doc__', '_getitem')
-
-        def __init__(self, getitem):
-            self._getitem = getitem
-            self._name = getitem.__name__
-            self.__doc__ = getitem.__doc__
-
-        def __getattr__(self, item):
-            if item in {'__name__', '__qualname__'}:
-                return self._name
-
-            raise AttributeError(item)
-
-        def __mro_entries__(self, bases):
-            raise TypeError(f"Cannot subclass {self!r}")
-
-        def __repr__(self):
-            return f'typing_extensions.{self._name}'
-
-        def __reduce__(self):
-            return self._name
-
-        def __call__(self, *args, **kwds):
-            raise TypeError(f"Cannot instantiate {self!r}")
-
-        def __or__(self, other):
-            return typing.Union[self, other]
-
-        def __ror__(self, other):
-            return typing.Union[other, self]
-
-        def __instancecheck__(self, obj):
-            raise TypeError(f"{self} cannot be used with isinstance()")
-
-        def __subclasscheck__(self, cls):
-            raise TypeError(f"{self} cannot be used with issubclass()")
-
-        @typing._tp_cache
-        def __getitem__(self, parameters):
-            return self._getitem(self, parameters)
-
-    @_SpecialForm
-    def Self(self, params):
-        """Used to spell the type of "self" in classes.
-
-        Example::
-
-          from typing import Self
-
-          class ReturnsSelf:
-              def parse(self, data: bytes) -> Self:
-                  ...
-                  return self
-
-        """
-
-        raise TypeError(f"{self} is not subscriptable")
-else:
-    class _Self(typing._FinalTypingBase, _root=True):
-        """Used to spell the type of "self" in classes.
-
-        Example::
-
-          from typing import Self
-
-          class ReturnsSelf:
-              def parse(self, data: bytes) -> Self:
-                  ...
-                  return self
-
-        """
-
-        __slots__ = ()
-
-        def __instancecheck__(self, obj):
-            raise TypeError(f"{self} cannot be used with isinstance().")
-
-        def __subclasscheck__(self, cls):
-            raise TypeError(f"{self} cannot be used with issubclass().")
-
-    Self = _Self(_root=True)
-
-
-if hasattr(typing, 'Required'):
-    Required = typing.Required
-    NotRequired = typing.NotRequired
-elif sys.version_info[:2] >= (3, 9):
-    class _ExtensionsSpecialForm(typing._SpecialForm, _root=True):
-        def __repr__(self):
-            return 'typing_extensions.' + self._name
-
-    @_ExtensionsSpecialForm
-    def Required(self, parameters):
-        """A special typing construct to mark a key of a total=False TypedDict
-        as required. For example:
-
-            class Movie(TypedDict, total=False):
-                title: Required[str]
-                year: int
-
-            m = Movie(
-                title='The Matrix',  # typechecker error if key is omitted
-                year=1999,
-            )
-
-        There is no runtime checking that a required key is actually provided
-        when instantiating a related TypedDict.
-        """
-        item = typing._type_check(parameters, f'{self._name} accepts only single type')
-        return typing._GenericAlias(self, (item,))
-
-    @_ExtensionsSpecialForm
-    def NotRequired(self, parameters):
-        """A special typing construct to mark a key of a TypedDict as
-        potentially missing. For example:
-
-            class Movie(TypedDict):
-                title: str
-                year: NotRequired[int]
-
-            m = Movie(
-                title='The Matrix',  # typechecker error if key is omitted
-                year=1999,
-            )
-        """
-        item = typing._type_check(parameters, f'{self._name} accepts only single type')
-        return typing._GenericAlias(self, (item,))
-
-elif sys.version_info[:2] >= (3, 7):
-    class _RequiredForm(typing._SpecialForm, _root=True):
-        def __repr__(self):
-            return 'typing_extensions.' + self._name
-
-        def __getitem__(self, parameters):
-            item = typing._type_check(parameters,
-                                      '{} accepts only single type'.format(self._name))
-            return typing._GenericAlias(self, (item,))
-
-    Required = _RequiredForm(
-        'Required',
-        doc="""A special typing construct to mark a key of a total=False TypedDict
-        as required. For example:
-
-            class Movie(TypedDict, total=False):
-                title: Required[str]
-                year: int
-
-            m = Movie(
-                title='The Matrix',  # typechecker error if key is omitted
-                year=1999,
-            )
-
-        There is no runtime checking that a required key is actually provided
-        when instantiating a related TypedDict.
-        """)
-    NotRequired = _RequiredForm(
-        'NotRequired',
-        doc="""A special typing construct to mark a key of a TypedDict as
-        potentially missing. For example:
-
-            class Movie(TypedDict):
-                title: str
-                year: NotRequired[int]
-
-            m = Movie(
-                title='The Matrix',  # typechecker error if key is omitted
-                year=1999,
-            )
-        """)
-else:
-    # NOTE: Modeled after _Final's implementation when _FinalTypingBase available
-    class _MaybeRequired(typing._FinalTypingBase, _root=True):
-        __slots__ = ('__type__',)
-
-        def __init__(self, tp=None, **kwds):
-            self.__type__ = tp
-
-        def __getitem__(self, item):
-            cls = type(self)
-            if self.__type__ is None:
-                return cls(typing._type_check(item,
-                           '{} accepts only single type.'.format(cls.__name__[1:])),
-                           _root=True)
-            raise TypeError('{} cannot be further subscripted'
-                            .format(cls.__name__[1:]))
-
-        def _eval_type(self, globalns, localns):
-            new_tp = typing._eval_type(self.__type__, globalns, localns)
-            if new_tp == self.__type__:
-                return self
-            return type(self)(new_tp, _root=True)
-
-        def __repr__(self):
-            r = super().__repr__()
-            if self.__type__ is not None:
-                r += '[{}]'.format(typing._type_repr(self.__type__))
-            return r
-
-        def __hash__(self):
-            return hash((type(self).__name__, self.__type__))
-
-        def __eq__(self, other):
-            if not isinstance(other, type(self)):
-                return NotImplemented
-            if self.__type__ is not None:
-                return self.__type__ == other.__type__
-            return self is other
-
-    class _Required(_MaybeRequired, _root=True):
-        """A special typing construct to mark a key of a total=False TypedDict
-        as required. For example:
-
-            class Movie(TypedDict, total=False):
-                title: Required[str]
-                year: int
-
-            m = Movie(
-                title='The Matrix',  # typechecker error if key is omitted
-                year=1999,
-            )
-
-        There is no runtime checking that a required key is actually provided
-        when instantiating a related TypedDict.
-        """
-
-    class _NotRequired(_MaybeRequired, _root=True):
-        """A special typing construct to mark a key of a TypedDict as
-        potentially missing. For example:
-
-            class Movie(TypedDict):
-                title: str
-                year: NotRequired[int]
-
-            m = Movie(
-                title='The Matrix',  # typechecker error if key is omitted
-                year=1999,
-            )
-        """
-
-    Required = _Required(_root=True)
-    NotRequired = _NotRequired(_root=True)
diff --git a/setuptools/_vendor/vendored.txt b/setuptools/_vendor/vendored.txt
index e67c7845c8..7255f98aee 100644
--- a/setuptools/_vendor/vendored.txt
+++ b/setuptools/_vendor/vendored.txt
@@ -4,8 +4,6 @@ more_itertools==8.8.0
 jaraco.text==3.7.0
 importlib_resources==5.10.2
 importlib_metadata==6.0.0
-# required for importlib_metadata on older Pythons
-typing_extensions==4.0.1
 # required for importlib_resources and _metadata on older Pythons
 zipp==3.7.0
 tomli==2.0.1
diff --git a/setuptools/extern/__init__.py b/setuptools/extern/__init__.py
index 16e2c9ea9e..3e66269bec 100644
--- a/setuptools/extern/__init__.py
+++ b/setuptools/extern/__init__.py
@@ -83,7 +83,6 @@ def install(self):
     'jaraco',
     'importlib_resources',
     'importlib_metadata',
-    'typing_extensions',
     'zipp',
     'tomli',
     'backports',
diff --git a/tools/vendored.py b/tools/vendored.py
index 63797ea24a..7d4399bbdc 100644
--- a/tools/vendored.py
+++ b/tools/vendored.py
@@ -82,7 +82,7 @@ def rewrite_importlib_metadata(pkg_files, new_root):
     Rewrite imports in importlib_metadata to redirect to vendored copies.
     """
     for file in pkg_files.glob('*.py'):
-        text = file.read_text().replace('typing_extensions', '..typing_extensions')
+        text = file.read_text()
         text = text.replace('import zipp', 'from .. import zipp')
         file.write_text(text)
 
@@ -109,7 +109,6 @@ def rewrite_platformdirs(pkg_files: Path):
     init = pkg_files.joinpath('__init__.py')
     text = init.read_text()
     text = text.replace('from platformdirs.', 'from .')
-    text = text.replace('from typing_extensions', 'from ..typing_extensions')
     init.write_text(text)
 
 

From 44bf554feed0b6cf88d2f9465006ed47626f06d6 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Tue, 7 May 2024 15:58:44 -0400
Subject: [PATCH 0635/1761] Fix Ruff

---
 pkg_resources/__init__.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index a66e701a3d..1cd657fb6d 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -27,7 +27,7 @@
 import time
 import re
 import types
-from typing import Any, Callable, Dict, Iterable, List, Protocol, Optional, TypeVar
+from typing import Callable, Dict, Iterable, List, Protocol, Optional, TypeVar
 import zipfile
 import zipimport
 import warnings

From a3fa4679fc21f33e7eed6b5817ef81a10354bfd4 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Tue, 7 May 2024 16:03:11 -0400
Subject: [PATCH 0636/1761] Update newsfragments/4324.removal.rst

---
 newsfragments/4324.removal.rst | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/newsfragments/4324.removal.rst b/newsfragments/4324.removal.rst
index bd5e1cb641..3782a0b81b 100644
--- a/newsfragments/4324.removal.rst
+++ b/newsfragments/4324.removal.rst
@@ -1 +1 @@
-Removed `typing_extensions` from vendored dependencies -- by :user:`Avasam`
+Removed ``typing_extensions`` from vendored dependencies -- by :user:`Avasam`

From ed7a12c602f44db068ef4063a51fccdf3245f947 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 9 May 2024 15:32:31 +0100
Subject: [PATCH 0637/1761] Simplify `mypy.ini`

According to review suggestion

Co-authored-by: Avasam 
---
 mypy.ini | 10 +++-------
 1 file changed, 3 insertions(+), 7 deletions(-)

diff --git a/mypy.ini b/mypy.ini
index 9fab958288..3a26daf1c9 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -31,11 +31,7 @@ ignore_missing_imports = True
 
 # - pkg_resources tests create modules that won't exists statically before the test is run.
 #   Let's ignore all "import-not-found" since, if an import really wasn't found, then the test would fail.
-# - setuptools._vendor.packaging._manylinux: Mypy issue, this vendored module is already excluded!
+# Even when excluding vendored/generated modules, there might be problems: https://github.com/python/mypy/issues/11936#issuecomment-1466764006
 [mypy-pkg_resources.tests.*,setuptools._vendor.packaging._manylinux,setuptools.config._validate_pyproject.*]
-disable_error_code = import-not-found
-
-# - The unused-ignore comment in setuptools.config._validate_pyproject.* is probably evaluated differently
-#   in different versions of Python. Also, this file should already be ignored...
-[mypy-setuptools.config._validate_pyproject.*]
-disable_error_code = unused-ignore
+follow_imports = silent
+# silent => ignore errors when following imports

From 5c9d37abdb3f112a502340a75c4ddd91e5950c5c Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 9 May 2024 16:14:17 +0100
Subject: [PATCH 0638/1761] Fix mypy error

---
 mypy.ini | 9 ++++++---
 1 file changed, 6 insertions(+), 3 deletions(-)

diff --git a/mypy.ini b/mypy.ini
index 3a26daf1c9..de545a103c 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -21,6 +21,11 @@ exclude = (?x)(
 # *.extern modules that actually live in *._vendor will also cause attr-defined issues on import
 disable_error_code = attr-defined
 
+# - pkg_resources tests create modules that won't exists statically before the test is run.
+#   Let's ignore all "import-not-found" since, if an import really wasn't found, then the test would fail.
+[mypy-pkg_resources.tests.*]
+disable_error_code = import-not-found
+
 # - Avoid raising issues when importing from "extern" modules, as those are added to path dynamically.
 #   https://github.com/pypa/setuptools/pull/3979#discussion_r1367968993
 # - distutils._modified has different errors on Python 3.8 [import-untyped], on Python 3.9+ [import-not-found]
@@ -29,9 +34,7 @@ disable_error_code = attr-defined
 [mypy-pkg_resources.extern.*,setuptools.extern.*,distutils._modified,jaraco.*,trove_classifiers]
 ignore_missing_imports = True
 
-# - pkg_resources tests create modules that won't exists statically before the test is run.
-#   Let's ignore all "import-not-found" since, if an import really wasn't found, then the test would fail.
 # Even when excluding vendored/generated modules, there might be problems: https://github.com/python/mypy/issues/11936#issuecomment-1466764006
-[mypy-pkg_resources.tests.*,setuptools._vendor.packaging._manylinux,setuptools.config._validate_pyproject.*]
+[mypy-setuptools._vendor.packaging._manylinux,setuptools.config._validate_pyproject.*]
 follow_imports = silent
 # silent => ignore errors when following imports

From e9995828311c5e0c843622ca2be85e7f09f1ff0d Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Thu, 9 May 2024 11:35:59 -0400
Subject: [PATCH 0639/1761] Update dynamic module imports in ``pkg_resources``
 to private alias static imports Enabled ``attr-defined`` checks in mypy for
 ``pkg_resources``

---
 mypy.ini                              |  3 +-
 newsfragments/4348.bugfix.rst         |  1 +
 newsfragments/4348.misc.rst           |  1 +
 pkg_resources/__init__.py             | 44 ++++++++++++---------------
 pkg_resources/tests/test_resources.py |  6 ++--
 5 files changed, 26 insertions(+), 29 deletions(-)
 create mode 100644 newsfragments/4348.bugfix.rst
 create mode 100644 newsfragments/4348.misc.rst

diff --git a/mypy.ini b/mypy.ini
index 45671826b1..4eebe5f77c 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -15,10 +15,11 @@ exclude = (?x)(
 	| ^setuptools/_distutils/ # Vendored
 	| ^setuptools/config/_validate_pyproject/ # Auto-generated
 	)
+
 # Ignoring attr-defined because setuptools wraps a lot of distutils classes, adding new attributes,
 # w/o updating all the attributes and return types from the base classes for type-checkers to understand
 # Especially with setuptools.dist.command vs distutils.dist.command vs setuptools._distutils.dist.command
-# *.extern modules that actually live in *._vendor will also cause attr-defined issues on import
+[mypy-setuptools.*]
 disable_error_code = attr-defined
 
 # - Avoid raising issues when importing from "extern" modules, as those are added to path dynamically.
diff --git a/newsfragments/4348.bugfix.rst b/newsfragments/4348.bugfix.rst
new file mode 100644
index 0000000000..a8bb79a123
--- /dev/null
+++ b/newsfragments/4348.bugfix.rst
@@ -0,0 +1 @@
+Fix an error with `UnicodeDecodeError` handling in ``pkg_resources`` when trying to read files in UTF-8 with a fallback -- by :user:`Avasam`
diff --git a/newsfragments/4348.misc.rst b/newsfragments/4348.misc.rst
new file mode 100644
index 0000000000..989226c4b3
--- /dev/null
+++ b/newsfragments/4348.misc.rst
@@ -0,0 +1 @@
+Update dynamic module imports in ``pkg_resources`` to private alias static imports. Enabled ``attr-defined`` checks in mypy for ``pkg_resources`` -- by :user:`Avasam`
diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index 1cd657fb6d..b595ec5965 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -70,15 +70,11 @@
     drop_comment,
     join_continuation,
 )
-
-from pkg_resources.extern import platformdirs
-from pkg_resources.extern import packaging
-
-__import__('pkg_resources.extern.packaging.version')
-__import__('pkg_resources.extern.packaging.specifiers')
-__import__('pkg_resources.extern.packaging.requirements')
-__import__('pkg_resources.extern.packaging.markers')
-__import__('pkg_resources.extern.packaging.utils')
+from pkg_resources.extern.packaging import markers as _packaging_markers
+from pkg_resources.extern.packaging import requirements as _packaging_requirements
+from pkg_resources.extern.packaging import utils as _packaging_utils
+from pkg_resources.extern.packaging import version as _packaging_version
+from pkg_resources.extern.platformdirs import user_cache_dir
 
 # declare some globals that will be defined later to
 # satisfy the linters.
@@ -116,7 +112,7 @@ class PEP440Warning(RuntimeWarning):
     """
 
 
-parse_version = packaging.version.Version
+parse_version = _packaging_version.Version
 
 
 _state_vars: Dict[str, str] = {}
@@ -730,7 +726,7 @@ def add(self, dist, entry=None, insert=True, replace=False):
             return
 
         self.by_key[dist.key] = dist
-        normalized_name = packaging.utils.canonicalize_name(dist.key)
+        normalized_name = _packaging_utils.canonicalize_name(dist.key)
         self.normalized_to_canonical_keys[normalized_name] = dist.key
         if dist.key not in keys:
             keys.append(dist.key)
@@ -1344,9 +1340,7 @@ def get_default_cache():
     or a platform-relevant user cache dir for an app
     named "Python-Eggs".
     """
-    return os.environ.get('PYTHON_EGG_CACHE') or platformdirs.user_cache_dir(
-        appname='Python-Eggs'
-    )
+    return os.environ.get('PYTHON_EGG_CACHE') or user_cache_dir(appname='Python-Eggs')
 
 
 def safe_name(name):
@@ -1363,8 +1357,8 @@ def safe_version(version):
     """
     try:
         # normalize the version
-        return str(packaging.version.Version(version))
-    except packaging.version.InvalidVersion:
+        return str(_packaging_version.Version(version))
+    except _packaging_version.InvalidVersion:
         version = version.replace(' ', '.')
         return re.sub('[^A-Za-z0-9.]+', '-', version)
 
@@ -1441,9 +1435,9 @@ def evaluate_marker(text, extra=None):
     This implementation uses the 'pyparsing' module.
     """
     try:
-        marker = packaging.markers.Marker(text)
+        marker = _packaging_markers.Marker(text)
         return marker.evaluate()
-    except packaging.markers.InvalidMarker as e:
+    except _packaging_markers.InvalidMarker as e:
         raise SyntaxError(e) from e
 
 
@@ -2695,12 +2689,12 @@ def parsed_version(self):
         if not hasattr(self, "_parsed_version"):
             try:
                 self._parsed_version = parse_version(self.version)
-            except packaging.version.InvalidVersion as ex:
+            except _packaging_version.InvalidVersion as ex:
                 info = f"(package: {self.project_name})"
                 if hasattr(ex, "add_note"):
                     ex.add_note(info)  # PEP 678
                     raise
-                raise packaging.version.InvalidVersion(f"{str(ex)} {info}") from None
+                raise _packaging_version.InvalidVersion(f"{str(ex)} {info}") from None
 
         return self._parsed_version
 
@@ -2708,7 +2702,7 @@ def parsed_version(self):
     def _forgiving_parsed_version(self):
         try:
             return self.parsed_version
-        except packaging.version.InvalidVersion as ex:
+        except _packaging_version.InvalidVersion as ex:
             self._parsed_version = parse_version(_forgiving_version(self.version))
 
             notes = "\n".join(getattr(ex, "__notes__", []))  # PEP 678
@@ -2881,7 +2875,7 @@ def from_filename(cls, filename, metadata=None, **kw):
 
     def as_requirement(self):
         """Return a ``Requirement`` that matches this distribution exactly"""
-        if isinstance(self.parsed_version, packaging.version.Version):
+        if isinstance(self.parsed_version, _packaging_version.Version):
             spec = "%s==%s" % (self.project_name, self.parsed_version)
         else:
             spec = "%s===%s" % (self.project_name, self.parsed_version)
@@ -3127,11 +3121,11 @@ def parse_requirements(strs):
     return map(Requirement, join_continuation(map(drop_comment, yield_lines(strs))))
 
 
-class RequirementParseError(packaging.requirements.InvalidRequirement):
+class RequirementParseError(_packaging_requirements.InvalidRequirement):
     "Compatibility wrapper for InvalidRequirement"
 
 
-class Requirement(packaging.requirements.Requirement):
+class Requirement(_packaging_requirements.Requirement):
     def __init__(self, requirement_string):
         """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
         super().__init__(requirement_string)
@@ -3353,6 +3347,6 @@ def _read_utf8_with_fallback(file: str, fallback_encoding=LOCALE_ENCODING) -> st
         """
         # TODO: Add a deadline?
         #       See comment in setuptools.unicode_utils._Utf8EncodingNeeded
-        warnings.warns(msg, PkgResourcesDeprecationWarning, stacklevel=2)
+        warnings.warn(msg, PkgResourcesDeprecationWarning, stacklevel=2)
         with open(file, "r", encoding=fallback_encoding) as f:
             return f.read()
diff --git a/pkg_resources/tests/test_resources.py b/pkg_resources/tests/test_resources.py
index b0a319e60f..83199af7b8 100644
--- a/pkg_resources/tests/test_resources.py
+++ b/pkg_resources/tests/test_resources.py
@@ -5,7 +5,7 @@
 import itertools
 
 import pytest
-from pkg_resources.extern import packaging
+from pkg_resources.extern.packaging.specifiers import SpecifierSet
 
 import pkg_resources
 from pkg_resources import (
@@ -567,7 +567,7 @@ def testOptionsAndHashing(self):
         assert hash(r1) == hash((
             "twisted",
             None,
-            packaging.specifiers.SpecifierSet(">=1.2"),
+            SpecifierSet(">=1.2"),
             frozenset(["foo", "bar"]),
             None,
         ))
@@ -576,7 +576,7 @@ def testOptionsAndHashing(self):
         ) == hash((
             "twisted",
             "https://localhost/twisted.zip",
-            packaging.specifiers.SpecifierSet(),
+            SpecifierSet(),
             frozenset(),
             None,
         ))

From 6c5da6cc6a6320331d4325a5d4501a3f36530d5d Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Thu, 9 May 2024 11:49:59 -0400
Subject: [PATCH 0640/1761] Apply suggestions from code review

Co-authored-by: Anderson Bravalheri 
---
 pkg_resources/__init__.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index b595ec5965..3a412a718c 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -74,7 +74,7 @@
 from pkg_resources.extern.packaging import requirements as _packaging_requirements
 from pkg_resources.extern.packaging import utils as _packaging_utils
 from pkg_resources.extern.packaging import version as _packaging_version
-from pkg_resources.extern.platformdirs import user_cache_dir
+from pkg_resources.extern.platformdirs import user_cache_dir as _user_cache_dir
 
 # declare some globals that will be defined later to
 # satisfy the linters.
@@ -1340,7 +1340,7 @@ def get_default_cache():
     or a platform-relevant user cache dir for an app
     named "Python-Eggs".
     """
-    return os.environ.get('PYTHON_EGG_CACHE') or user_cache_dir(appname='Python-Eggs')
+    return os.environ.get('PYTHON_EGG_CACHE') or _user_cache_dir(appname='Python-Eggs')
 
 
 def safe_name(name):

From 0802e21030ff750d30a48b2d77ce049af201e0ad Mon Sep 17 00:00:00 2001
From: James Gerity 
Date: Thu, 7 Dec 2023 21:08:18 -0500
Subject: [PATCH 0641/1761] Emit warning for invalid [tools.setuptools] table

---
 newsfragments/4150.feature.rst     |  1 +
 setuptools/config/pyprojecttoml.py | 10 ++++++++++
 2 files changed, 11 insertions(+)
 create mode 100644 newsfragments/4150.feature.rst

diff --git a/newsfragments/4150.feature.rst b/newsfragments/4150.feature.rst
new file mode 100644
index 0000000000..2521fbd805
--- /dev/null
+++ b/newsfragments/4150.feature.rst
@@ -0,0 +1 @@
+Emit a warning when ``[tools.setuptools]`` is present in ``pyproject.toml`` and will be ignored. -- by user:`SnoopJ`
diff --git a/setuptools/config/pyprojecttoml.py b/setuptools/config/pyprojecttoml.py
index ff97679895..d379405595 100644
--- a/setuptools/config/pyprojecttoml.py
+++ b/setuptools/config/pyprojecttoml.py
@@ -108,6 +108,10 @@ def read_configuration(
     if not asdict or not (project_table or setuptools_table):
         return {}  # User is not using pyproject to configure setuptools
 
+    if "setuptools" in asdict.get("tools", {}):
+        # let the user know they probably have a typo in their metadata
+        _ToolsTypoInMetadata.emit()
+
     if "distutils" in tool_table:
         _ExperimentalConfiguration.emit(subject="[tool.distutils]")
 
@@ -439,3 +443,9 @@ class _ExperimentalConfiguration(SetuptoolsWarning):
         "`{subject}` in `pyproject.toml` is still *experimental* "
         "and likely to change in future releases."
     )
+
+
+class _ToolsTypoInMetadata(SetuptoolsWarning):
+    _SUMMARY = (
+        "Ignoring [tools.setuptools] in pyproject.toml, did you mean [tool.setuptools]?"
+    )

From 8d1809d8c5df487d3296270bfda0737211354e82 Mon Sep 17 00:00:00 2001
From: James Gerity 
Date: Sat, 6 Apr 2024 20:07:18 -0400
Subject: [PATCH 0642/1761] Fix typo in test metadata

---
 setuptools/tests/test_build_py.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/setuptools/tests/test_build_py.py b/setuptools/tests/test_build_py.py
index db2052a586..c293fec142 100644
--- a/setuptools/tests/test_build_py.py
+++ b/setuptools/tests/test_build_py.py
@@ -349,7 +349,7 @@ class TestTypeInfoFiles:
             name = "foo"
             version = "1"
 
-            [tools.setuptools]
+            [tool.setuptools]
             include-package-data = false
             """
         ),
@@ -359,7 +359,7 @@ class TestTypeInfoFiles:
             name = "foo"
             version = "1"
 
-            [tools.setuptools]
+            [tool.setuptools]
             include-package-data = false
 
             [tool.setuptools.exclude-package-data]

From 26e9a71ff2d2314430effc2a4faa8b6b0c8a9d91 Mon Sep 17 00:00:00 2001
From: James Gerity 
Date: Sat, 6 Apr 2024 22:20:05 -0400
Subject: [PATCH 0643/1761] Add test for tools.setuptools warning

---
 setuptools/tests/config/test_pyprojecttoml.py | 26 +++++++++++++++++++
 1 file changed, 26 insertions(+)

diff --git a/setuptools/tests/config/test_pyprojecttoml.py b/setuptools/tests/config/test_pyprojecttoml.py
index abec68ab30..4de11d8b1f 100644
--- a/setuptools/tests/config/test_pyprojecttoml.py
+++ b/setuptools/tests/config/test_pyprojecttoml.py
@@ -8,6 +8,7 @@
 from path import Path
 
 from setuptools.config.pyprojecttoml import (
+    _ToolsTypoInMetadata,
     read_configuration,
     expand_configuration,
     apply_configuration,
@@ -369,3 +370,28 @@ def test_include_package_data_in_setuppy(tmp_path):
     assert dist.get_name() == "myproj"
     assert dist.get_version() == "42"
     assert dist.include_package_data is False
+
+
+def test_warn_tools_typo(tmp_path):
+    """Test that the common ``tools.setuptools`` typo in ``pyproject.toml`` issues a warning
+
+    See https://github.com/pypa/setuptools/issues/4150
+    """
+    config = """
+    [build-system]
+    requires = ["setuptools"]
+    build-backend = "setuptools.build_meta"
+
+    [project]
+    name = "myproj"
+    version = '42'
+
+    [tools.setuptools]
+    packages = ["package"]
+    """
+
+    pyproject = tmp_path / "pyproject.toml"
+    pyproject.write_text(cleandoc(config), encoding="utf-8")
+
+    with pytest.warns(_ToolsTypoInMetadata):
+        expanded = read_configuration(pyproject)

From 2bfc431d197e49b5232639c7916887135ba29014 Mon Sep 17 00:00:00 2001
From: James Gerity 
Date: Sun, 7 Apr 2024 14:58:29 -0400
Subject: [PATCH 0644/1761] Remove unused variable

---
 setuptools/tests/config/test_pyprojecttoml.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setuptools/tests/config/test_pyprojecttoml.py b/setuptools/tests/config/test_pyprojecttoml.py
index 4de11d8b1f..bf8cae5a24 100644
--- a/setuptools/tests/config/test_pyprojecttoml.py
+++ b/setuptools/tests/config/test_pyprojecttoml.py
@@ -394,4 +394,4 @@ def test_warn_tools_typo(tmp_path):
     pyproject.write_text(cleandoc(config), encoding="utf-8")
 
     with pytest.warns(_ToolsTypoInMetadata):
-        expanded = read_configuration(pyproject)
+        read_configuration(pyproject)

From 7bfee6ff5f26050472805ce3f9594a0e58cf20e8 Mon Sep 17 00:00:00 2001
From: James Gerity 
Date: Tue, 9 Apr 2024 12:49:11 -0400
Subject: [PATCH 0645/1761] Invoke build command in typing file test

Co-authored-by: Daniel Naylor 
---
 setuptools/tests/test_build_py.py | 1 +
 1 file changed, 1 insertion(+)

diff --git a/setuptools/tests/test_build_py.py b/setuptools/tests/test_build_py.py
index c293fec142..a1f595e27a 100644
--- a/setuptools/tests/test_build_py.py
+++ b/setuptools/tests/test_build_py.py
@@ -421,6 +421,7 @@ def test_type_files_included_by_default(self, tmpdir_cwd, pyproject, example):
         jaraco.path.build(structure)
 
         build_py = get_finalized_build_py()
+        build_py.run_command("build")
         outputs = get_outputs(build_py)
         assert expected_type_files <= outputs
 

From 9d86cfe905983a9c6b035bbd895a9cd42e06d64d Mon Sep 17 00:00:00 2001
From: James Gerity 
Date: Tue, 9 Apr 2024 18:50:17 -0400
Subject: [PATCH 0646/1761] Fix typo in NEWS fragment

---
 newsfragments/4150.feature.rst | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/newsfragments/4150.feature.rst b/newsfragments/4150.feature.rst
index 2521fbd805..5e536fd755 100644
--- a/newsfragments/4150.feature.rst
+++ b/newsfragments/4150.feature.rst
@@ -1 +1 @@
-Emit a warning when ``[tools.setuptools]`` is present in ``pyproject.toml`` and will be ignored. -- by user:`SnoopJ`
+Emit a warning when ``[tools.setuptools]`` is present in ``pyproject.toml`` and will be ignored. -- by :user:`SnoopJ`

From 1cbafacbdbdae7074285c46967338fcd67bc7d7c Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 9 May 2024 17:47:53 +0100
Subject: [PATCH 0647/1761] Use xfail to temporarily bypass failing test

---
 setuptools/tests/test_build_py.py | 10 ++++++++--
 1 file changed, 8 insertions(+), 2 deletions(-)

diff --git a/setuptools/tests/test_build_py.py b/setuptools/tests/test_build_py.py
index a1f595e27a..6900fdefbd 100644
--- a/setuptools/tests/test_build_py.py
+++ b/setuptools/tests/test_build_py.py
@@ -409,7 +409,14 @@ class TestTypeInfoFiles:
     }
 
     @pytest.mark.parametrize(
-        "pyproject", ["default_pyproject", "dont_include_package_data"]
+        "pyproject",
+        [
+            "default_pyproject",
+            pytest.param(
+                "dont_include_package_data",
+                marks=pytest.mark.xfail(reason="pypa/setuptools#4350"),
+            ),
+        ],
     )
     @pytest.mark.parametrize("example", EXAMPLES.keys())
     def test_type_files_included_by_default(self, tmpdir_cwd, pyproject, example):
@@ -421,7 +428,6 @@ def test_type_files_included_by_default(self, tmpdir_cwd, pyproject, example):
         jaraco.path.build(structure)
 
         build_py = get_finalized_build_py()
-        build_py.run_command("build")
         outputs = get_outputs(build_py)
         assert expected_type_files <= outputs
 

From 3d7599beb613f76afd54209fea18c8d569b04a39 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 9 May 2024 18:09:23 +0100
Subject: [PATCH 0648/1761] Address warning

---
 tools/vendored.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/tools/vendored.py b/tools/vendored.py
index 7d4399bbdc..6257a6bf33 100644
--- a/tools/vendored.py
+++ b/tools/vendored.py
@@ -7,7 +7,7 @@
 
 def remove_all(paths):
     for path in paths:
-        path.rmtree() if path.isdir() else path.remove()
+        path.rmtree() if path.is_dir() else path.remove()
 
 
 def update_vendored():

From bb52e72388f5ec8008f1008b019bcca21d2782c1 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 9 May 2024 18:13:25 +0100
Subject: [PATCH 0649/1761] Improve auto-update of extern list

---
 pkg_resources/extern/__init__.py |  4 ++--
 setuptools/extern/__init__.py    |  4 ++--
 tools/vendored.py                | 27 ++++++++++++++++-----------
 3 files changed, 20 insertions(+), 15 deletions(-)

diff --git a/pkg_resources/extern/__init__.py b/pkg_resources/extern/__init__.py
index 363bdf3f06..b6294dbfdb 100644
--- a/pkg_resources/extern/__init__.py
+++ b/pkg_resources/extern/__init__.py
@@ -72,8 +72,8 @@ def install(self):
 
 # [[[cog
 # import cog
-# from tools.vendored import yield_root_package
-# names = "\n".join(f"    {x!r}," for x in yield_root_package('pkg_resources'))
+# from tools.vendored import yield_top_level
+# names = "\n".join(f"    {x!r}," for x in yield_top_level('pkg_resources'))
 # cog.outl(f"names = (\n{names}\n)")
 # ]]]
 names = (
diff --git a/setuptools/extern/__init__.py b/setuptools/extern/__init__.py
index 3e66269bec..b0e646bc8c 100644
--- a/setuptools/extern/__init__.py
+++ b/setuptools/extern/__init__.py
@@ -72,8 +72,8 @@ def install(self):
 
 # [[[cog
 # import cog
-# from tools.vendored import yield_root_package
-# names = "\n".join(f"    {x!r}," for x in yield_root_package('setuptools'))
+# from tools.vendored import yield_top_level
+# names = "\n".join(f"    {x!r}," for x in yield_top_level('setuptools'))
 # cog.outl(f"names = (\n{names}\n)")
 # ]]]
 names = (
diff --git a/tools/vendored.py b/tools/vendored.py
index 6257a6bf33..4c7b03ebce 100644
--- a/tools/vendored.py
+++ b/tools/vendored.py
@@ -165,18 +165,23 @@ def update_setuptools():
     rewrite_more_itertools(vendor / "more_itertools")
 
 
-def yield_root_package(name):
-    """Useful when defining the MetaPathFinder
-    >>> examples = set(yield_root_package("setuptools")) & {"jaraco", "backports"}
+def yield_top_level(name):
+    """Iterate over all modules and (top level) packages vendored
+    >>> roots = set(yield_top_level("setuptools"))
+    >>> examples = roots & {"jaraco", "backports", "zipp"}
     >>> list(sorted(examples))
-    ['backports', 'jaraco']
-    """
-    vendored = Path(f"{name}/_vendor/vendored.txt")
-    yield from (
-        line.partition("=")[0].partition(".")[0].replace("-", "_")
-        for line in vendored.read_text(encoding="utf-8").splitlines()
-        if line and not line.startswith("#")
-    )
+    ['backports', 'jaraco', 'zipp']
+    """
+    vendor = Path(f"{name}/_vendor")
+    ignore = {"__pycache__", "__init__.py", ".ruff_cache"}
+
+    for item in vendor.iterdir():
+        if item.name in ignore:
+            continue
+        if item.is_dir() and item.suffix != ".dist-info":
+            yield str(item.name)
+        if item.is_file() and item.suffix == ".py":
+            yield str(item.stem)
 
 
 __name__ == '__main__' and update_vendored()

From c279742ed6eb3696ee592322e5ae7b1eb4c640ab Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Fri, 10 May 2024 10:53:19 +0100
Subject: [PATCH 0650/1761] Reorder imports in vendor to match validation

---
 pkg_resources/extern/__init__.py |  8 ++++----
 setuptools/extern/__init__.py    | 14 +++++++-------
 2 files changed, 11 insertions(+), 11 deletions(-)

diff --git a/pkg_resources/extern/__init__.py b/pkg_resources/extern/__init__.py
index b6294dbfdb..bfb9eb8bdf 100644
--- a/pkg_resources/extern/__init__.py
+++ b/pkg_resources/extern/__init__.py
@@ -77,13 +77,13 @@ def install(self):
 # cog.outl(f"names = (\n{names}\n)")
 # ]]]
 names = (
+    'backports',
+    'importlib_resources',
+    'jaraco',
+    'more_itertools',
     'packaging',
     'platformdirs',
-    'jaraco',
-    'importlib_resources',
     'zipp',
-    'more_itertools',
-    'backports',
 )
 # [[[end]]]
 VendorImporter(__name__, names).install()
diff --git a/setuptools/extern/__init__.py b/setuptools/extern/__init__.py
index b0e646bc8c..8eb02ac6d3 100644
--- a/setuptools/extern/__init__.py
+++ b/setuptools/extern/__init__.py
@@ -77,15 +77,15 @@ def install(self):
 # cog.outl(f"names = (\n{names}\n)")
 # ]]]
 names = (
-    'packaging',
-    'ordered_set',
-    'more_itertools',
-    'jaraco',
-    'importlib_resources',
+    'backports',
     'importlib_metadata',
-    'zipp',
+    'importlib_resources',
+    'jaraco',
+    'more_itertools',
+    'ordered_set',
+    'packaging',
     'tomli',
-    'backports',
+    'zipp',
 )
 # [[[end]]]
 VendorImporter(__name__, names, 'setuptools._vendor').install()

From 639b93d88c9764a29d127ec7b60251ac7f433d3e Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Fri, 10 May 2024 11:22:28 +0100
Subject: [PATCH 0651/1761] Show diff in check-extern for better debuging

---
 tox.ini | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/tox.ini b/tox.ini
index e13ad53da3..e6fc063af1 100644
--- a/tox.ini
+++ b/tox.ini
@@ -70,14 +70,14 @@ commands =
 
 [testenv:{vendor,check-extern}]
 skip_install = True
-allowlist_externals = sh
+allowlist_externals = git, sh
 deps =
 	path
 	cogapp
 commands =
 	vendor: python -m tools.vendored
-	vendor: sh -c "git grep -l -F '\[\[\[cog' | xargs cog -I {toxinidir} -r"  # update `*.extern`
-	check-extern: sh -c "git grep -l -F '\[\[\[cog' | xargs cog -I {toxinidir} --check"
+	sh -c "git grep -l -F '\[\[\[cog' | xargs -t cog -I {toxinidir} -r"  # update `*.extern`
+	check-extern: git diff --exit-code
 
 [testenv:generate-validation-code]
 skip_install = True

From 3cfe932d88e21acc4c7c448089cb2c409a9f4491 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Fri, 10 May 2024 11:44:03 +0100
Subject: [PATCH 0652/1761] Sort items in yield_top_level for better
 reproducibility

---
 tools/vendored.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/tools/vendored.py b/tools/vendored.py
index 4c7b03ebce..69a538f20c 100644
--- a/tools/vendored.py
+++ b/tools/vendored.py
@@ -175,7 +175,7 @@ def yield_top_level(name):
     vendor = Path(f"{name}/_vendor")
     ignore = {"__pycache__", "__init__.py", ".ruff_cache"}
 
-    for item in vendor.iterdir():
+    for item in sorted(vendor.iterdir()):
         if item.name in ignore:
             continue
         if item.is_dir() and item.suffix != ".dist-info":

From bfb117a8c86dd95f3c237863ed7ac6cd330116fe Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 10 May 2024 11:50:46 -0400
Subject: [PATCH 0653/1761] Update doctest to match new project layout.

---
 setuptools/dist.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/setuptools/dist.py b/setuptools/dist.py
index 03f6c0398b..4ccb915902 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -409,8 +409,8 @@ def _expand_patterns(patterns):
         """
         >>> list(Distribution._expand_patterns(['LICENSE']))
         ['LICENSE']
-        >>> list(Distribution._expand_patterns(['setup.cfg', 'LIC*']))
-        ['setup.cfg', 'LICENSE']
+        >>> list(Distribution._expand_patterns(['pyproject.toml', 'LIC*']))
+        ['pyproject.toml', 'LICENSE']
         """
         return (
             path

From 544b332bd78d8d274597923e89b9bd7839f8a0f4 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Mon, 13 May 2024 02:27:49 -0400
Subject: [PATCH 0654/1761] `pkg_resources` type the declared global variables
 (#4267)

---
 newsfragments/4267.feature.rst |  1 +
 pkg_resources/__init__.py      | 64 +++++++++++++++++++++-------------
 2 files changed, 40 insertions(+), 25 deletions(-)
 create mode 100644 newsfragments/4267.feature.rst

diff --git a/newsfragments/4267.feature.rst b/newsfragments/4267.feature.rst
new file mode 100644
index 0000000000..5a69c23914
--- /dev/null
+++ b/newsfragments/4267.feature.rst
@@ -0,0 +1 @@
+Typed the dynamically defined variables from `pkg_resources` -- by :user:`Avasam`
diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index 3a412a718c..713d9bdfa3 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -27,7 +27,16 @@
 import time
 import re
 import types
-from typing import Callable, Dict, Iterable, List, Protocol, Optional, TypeVar
+from typing import (
+    TYPE_CHECKING,
+    List,
+    Protocol,
+    Callable,
+    Dict,
+    Iterable,
+    Optional,
+    TypeVar,
+)
 import zipfile
 import zipimport
 import warnings
@@ -76,21 +85,6 @@
 from pkg_resources.extern.packaging import version as _packaging_version
 from pkg_resources.extern.platformdirs import user_cache_dir as _user_cache_dir
 
-# declare some globals that will be defined later to
-# satisfy the linters.
-require = None
-working_set = None
-add_activation_listener = None
-cleanup_resources = None
-resource_stream = None
-set_extraction_path = None
-resource_isdir = None
-resource_string = None
-iter_entry_points = None
-resource_listdir = None
-resource_filename = None
-resource_exists = None
-
 
 warnings.warn(
     "pkg_resources is deprecated as an API. "
@@ -3257,6 +3251,15 @@ def _mkstemp(*args, **kw):
 warnings.filterwarnings("ignore", category=PEP440Warning, append=True)
 
 
+class PkgResourcesDeprecationWarning(Warning):
+    """
+    Base class for warning about deprecations in ``pkg_resources``
+
+    This class is not derived from ``DeprecationWarning``, and as such is
+    visible by default.
+    """
+
+
 # from jaraco.functools 1.3
 def _call_aside(f, *args, **kwargs):
     f(*args, **kwargs)
@@ -3275,15 +3278,6 @@ def _initialize(g=globals()):
     )
 
 
-class PkgResourcesDeprecationWarning(Warning):
-    """
-    Base class for warning about deprecations in ``pkg_resources``
-
-    This class is not derived from ``DeprecationWarning``, and as such is
-    visible by default.
-    """
-
-
 @_call_aside
 def _initialize_master_working_set():
     """
@@ -3320,6 +3314,26 @@ def _initialize_master_working_set():
     globals().update(locals())
 
 
+if TYPE_CHECKING:
+    # All of these are set by the @_call_aside methods above
+    __resource_manager = ResourceManager()  # Won't exist at runtime
+    resource_exists = __resource_manager.resource_exists
+    resource_isdir = __resource_manager.resource_isdir
+    resource_filename = __resource_manager.resource_filename
+    resource_stream = __resource_manager.resource_stream
+    resource_string = __resource_manager.resource_string
+    resource_listdir = __resource_manager.resource_listdir
+    set_extraction_path = __resource_manager.set_extraction_path
+    cleanup_resources = __resource_manager.cleanup_resources
+
+    working_set = WorkingSet()
+    require = working_set.require
+    iter_entry_points = working_set.iter_entry_points
+    add_activation_listener = working_set.subscribe
+    run_script = working_set.run_script
+    run_main = run_script
+
+
 #  ---- Ported from ``setuptools`` to avoid introducing an import inter-dependency ----
 LOCALE_ENCODING = "locale" if sys.version_info >= (3, 10) else None
 

From d53bf1509f40c8e84feb62ac13e91b76074a063a Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Miro=20Hron=C4=8Dok?= 
Date: Tue, 14 May 2024 16:19:02 +0200
Subject: [PATCH 0655/1761] Explicitly disallow resource paths starting with
 single backslash

Previously, such paths were disallowed implicitly
as they were treated as Windows absolute paths.

Since Python 3.13, paths starting with a single backslash are not considered
Windows-absolute, so we treat them specially.

This change makes the existing doctest pass with Python 3.13.

Partially fixes https://github.com/pypa/setuptools/issues/4196
---
 pkg_resources/__init__.py | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index 713d9bdfa3..faee7dec79 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -1604,6 +1604,7 @@ def _validate_resource_path(path):
             os.path.pardir in path.split(posixpath.sep)
             or posixpath.isabs(path)
             or ntpath.isabs(path)
+            or path.startswith("\\")
         )
         if not invalid:
             return
@@ -1611,7 +1612,7 @@ def _validate_resource_path(path):
         msg = "Use of .. or absolute path in a resource path is not allowed."
 
         # Aggressively disallow Windows absolute paths
-        if ntpath.isabs(path) and not posixpath.isabs(path):
+        if (path.startswith("\\") or ntpath.isabs(path)) and not posixpath.isabs(path):
             raise ValueError(msg)
 
         # for compatibility, warn; in future

From c6266e423fa26aafa01f1df71de7c6613273155e Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Miro=20Hron=C4=8Dok?= 
Date: Tue, 14 May 2024 16:24:07 +0200
Subject: [PATCH 0656/1761] Make the validation test for entry-points work with
 Python 3.13+

The exception in importlib.metadata has changed.
See https://github.com/python/importlib_metadata/issues/488

This makes an existing test pass with Python 3.13.

Partially fixes https://github.com/pypa/setuptools/issues/4196
---
 setuptools/_entry_points.py | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/setuptools/_entry_points.py b/setuptools/_entry_points.py
index 747a69067e..b244e78387 100644
--- a/setuptools/_entry_points.py
+++ b/setuptools/_entry_points.py
@@ -17,7 +17,8 @@ def ensure_valid(ep):
     """
     try:
         ep.extras
-    except AttributeError as ex:
+    except (AttributeError, AssertionError) as ex:
+        # Why both? See https://github.com/python/importlib_metadata/issues/488
         msg = (
             f"Problems to parse {ep}.\nPlease ensure entry-point follows the spec: "
             "https://packaging.python.org/en/latest/specifications/entry-points/"

From 06ee10669eb4d8c477d1b3eb2b3884ea93eb3ccd Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 15 May 2024 16:48:33 +0100
Subject: [PATCH 0657/1761] Remove deprecated `setuptools.dep_util`

Users should rely on `setuptools.modified` instead.
---
 setuptools/dep_util.py | 16 ----------------
 1 file changed, 16 deletions(-)
 delete mode 100644 setuptools/dep_util.py

diff --git a/setuptools/dep_util.py b/setuptools/dep_util.py
deleted file mode 100644
index 998ffa206e..0000000000
--- a/setuptools/dep_util.py
+++ /dev/null
@@ -1,16 +0,0 @@
-from ._distutils import _modified
-from .warnings import SetuptoolsDeprecationWarning
-
-
-def __getattr__(name):
-    if name not in ['newer_group', 'newer_pairwise_group']:
-        raise AttributeError(name)
-    SetuptoolsDeprecationWarning.emit(
-        "dep_util is Deprecated. Use functions from setuptools.modified instead.",
-        "Please use `setuptools.modified` instead of `setuptools.dep_util`.",
-        see_url="https://github.com/pypa/setuptools/pull/4069",
-        due_date=(2024, 5, 21),
-        # Warning added in v69.0.0 on 2023/11/20,
-        # See https://github.com/pypa/setuptools/discussions/4128
-    )
-    return getattr(_modified, name)

From 126b730c0ff1cb01e2e2aa6bf8c1feee4a216c34 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 15 May 2024 17:18:16 +0100
Subject: [PATCH 0658/1761] Add news fragment

---
 newsfragments/4360.removal.1.rst | 2 ++
 1 file changed, 2 insertions(+)
 create mode 100644 newsfragments/4360.removal.1.rst

diff --git a/newsfragments/4360.removal.1.rst b/newsfragments/4360.removal.1.rst
new file mode 100644
index 0000000000..f00d6be518
--- /dev/null
+++ b/newsfragments/4360.removal.1.rst
@@ -0,0 +1,2 @@
+Remove deprecated ``setuptools.dep_util``.
+The provided alternative is ``setuptools.modified``.

From 59c47e42e304f0c56a9069b86bca2f11ca6ced7b Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 16 May 2024 21:30:08 -0400
Subject: [PATCH 0659/1761] In install command, use super to call the
 superclass methods. Avoids race conditions when monkeypatching from
 _distutils_system_mod occurs late.

Fixes #4136
---
 newsfragments/4136.bugfix.rst | 1 +
 setuptools/command/install.py | 8 ++++----
 2 files changed, 5 insertions(+), 4 deletions(-)
 create mode 100644 newsfragments/4136.bugfix.rst

diff --git a/newsfragments/4136.bugfix.rst b/newsfragments/4136.bugfix.rst
new file mode 100644
index 0000000000..f56346f0c7
--- /dev/null
+++ b/newsfragments/4136.bugfix.rst
@@ -0,0 +1 @@
+In install command, use super to call the superclass methods. Avoids race conditions when monkeypatching from _distutils_system_mod occurs late.
\ No newline at end of file
diff --git a/setuptools/command/install.py b/setuptools/command/install.py
index 56c1155b50..c49fcda939 100644
--- a/setuptools/command/install.py
+++ b/setuptools/command/install.py
@@ -49,12 +49,12 @@ def initialize_options(self):
             #       and then add a due_date to this warning.
         )
 
-        orig.install.initialize_options(self)
+        super().initialize_options()
         self.old_and_unmanageable = None
         self.single_version_externally_managed = None
 
     def finalize_options(self):
-        orig.install.finalize_options(self)
+        super().finalize_options()
         if self.root:
             self.single_version_externally_managed = True
         elif self.single_version_externally_managed:
@@ -78,11 +78,11 @@ def handle_extra_path(self):
     def run(self):
         # Explicit request for old-style install?  Just do it
         if self.old_and_unmanageable or self.single_version_externally_managed:
-            return orig.install.run(self)
+            return super().run()
 
         if not self._called_from_setup(inspect.currentframe()):
             # Run in backward-compatibility mode to support bdist_* commands.
-            orig.install.run(self)
+            super().run()
         else:
             self.do_egg_install()
 

From 3dad80ae43c631a0df0b7ba5b105929186d26e27 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 16 May 2024 22:33:09 -0400
Subject: [PATCH 0660/1761] Add news fragment.

---
 newsfragments/4310.feature.rst | 1 +
 1 file changed, 1 insertion(+)
 create mode 100644 newsfragments/4310.feature.rst

diff --git a/newsfragments/4310.feature.rst b/newsfragments/4310.feature.rst
new file mode 100644
index 0000000000..2379f3f342
--- /dev/null
+++ b/newsfragments/4310.feature.rst
@@ -0,0 +1 @@
+Migrated Setuptools' own config to pyproject.toml
\ No newline at end of file

From 96d681aa405460f724c62c00ca125ae722ad810a Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Fri, 17 May 2024 10:43:17 +0100
Subject: [PATCH 0661/1761] Remove call to deprecated validate_pyproject
 command

---
 tools/generate_validation_code.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/tools/generate_validation_code.py b/tools/generate_validation_code.py
index b575fb1e1c..0171325bd0 100644
--- a/tools/generate_validation_code.py
+++ b/tools/generate_validation_code.py
@@ -14,7 +14,7 @@ def generate_pyproject_validation(dest: Union[str, PathLike]):
     cmd = [
         sys.executable,
         "-m",
-        "validate_pyproject.vendoring",
+        "validate_pyproject.pre_compile",
         f"--output-dir={dest}",
         "--enable-plugins",
         "setuptools",

From 491f2ca271b698df4f08d676e2485ce0eff04727 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 17 May 2024 15:22:07 -0400
Subject: [PATCH 0662/1761] Redirect bumpversion to rely on pyproject.toml.

---
 .bumpversion.cfg | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 557ae0ce34..3db4d1de2a 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -3,4 +3,4 @@ current_version = 69.5.1
 commit = True
 tag = True
 
-[bumpversion:file:setup.cfg]
+[bumpversion:file:pyproject.toml]

From b3d3e93a834426f1bef8712e1405495084eb3b7f Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alex=20Gr=C3=B6nholm?= 
Date: Sat, 18 May 2024 20:39:58 -0400
Subject: [PATCH 0663/1761] Adopted the bdist_wheel command from "wheel"

Closes #1386.
---
 docs/build_meta.rst                           |    6 +-
 docs/userguide/quickstart.rst                 |    6 +-
 newsfragments/1386.feature.rst                |    1 +
 .../backports.tarfile-1.0.0.dist-info/LICENSE |   17 -
 .../METADATA                                  |   44 -
 .../backports.tarfile-1.0.0.dist-info/RECORD  |    9 -
 .../backports.tarfile-1.0.0.dist-info/WHEEL   |    5 -
 .../top_level.txt                             |    1 -
 pkg_resources/_vendor/backports/tarfile.py    | 2900 -----------------
 .../INSTALLER                                 |    1 -
 .../jaraco.functools-4.0.0.dist-info/LICENSE  |   17 -
 .../jaraco.functools-4.0.0.dist-info/METADATA |   69 -
 .../jaraco.functools-4.0.0.dist-info/RECORD   |   10 -
 .../top_level.txt                             |    1 -
 .../_vendor/jaraco/functools/__init__.pyi     |    3 -
 .../more_itertools-10.2.0.dist-info/RECORD    |    1 +
 .../_vendor/platformdirs/__init__.py          |    2 +-
 setup.cfg                                     |    1 +
 .../INSTALLER                                 |    1 -
 .../backports.tarfile-1.0.0.dist-info/LICENSE |   17 -
 .../METADATA                                  |   44 -
 .../backports.tarfile-1.0.0.dist-info/RECORD  |    9 -
 .../REQUESTED                                 |    0
 .../backports.tarfile-1.0.0.dist-info/WHEEL   |    5 -
 .../top_level.txt                             |    1 -
 setuptools/_vendor/backports/tarfile.py       | 2900 -----------------
 .../_vendor/importlib_metadata/_compat.py     |    2 +-
 .../INSTALLER                                 |    1 -
 .../jaraco.functools-4.0.0.dist-info/LICENSE  |   17 -
 .../jaraco.functools-4.0.0.dist-info/METADATA |   69 -
 .../jaraco.functools-4.0.0.dist-info/RECORD   |   10 -
 .../jaraco.functools-4.0.0.dist-info/WHEEL    |    5 -
 .../top_level.txt                             |    1 -
 .../_vendor/jaraco/functools/__init__.pyi     |    3 -
 setuptools/_vendor/vendored.txt               |    1 +
 .../wheel-0.43.0.dist-info/LICENSE.txt        |   21 +
 .../_vendor/wheel-0.43.0.dist-info/METADATA   |   61 +
 .../_vendor/wheel-0.43.0.dist-info/RECORD     |   63 +
 .../wheel-0.43.0.dist-info/entry_points.txt   |    6 +
 setuptools/_vendor/wheel/__init__.py          |    3 +
 setuptools/_vendor/wheel/macosx_libfile.py    |  469 +++
 setuptools/_vendor/wheel/metadata.py          |  180 +
 setuptools/_vendor/wheel/util.py              |   26 +
 setuptools/_vendor/wheel/wheelfile.py         |  199 ++
 setuptools/build_meta.py                      |    2 +-
 setuptools/command/bdist_wheel.py             |  602 ++++
 setuptools/command/editable_wheel.py          |    4 +-
 setuptools/dist.py                            |    6 +
 setuptools/extern/__init__.py                 |    2 +
 setuptools/tests/test_bdist_wheel.py          |  460 +++
 tools/vendored.py                             |   57 +
 51 files changed, 2169 insertions(+), 6172 deletions(-)
 create mode 100644 newsfragments/1386.feature.rst
 delete mode 100644 pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/LICENSE
 delete mode 100644 pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/METADATA
 delete mode 100644 pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/RECORD
 delete mode 100644 pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/WHEEL
 delete mode 100644 pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/top_level.txt
 delete mode 100644 pkg_resources/_vendor/backports/tarfile.py
 delete mode 100644 pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/INSTALLER
 delete mode 100644 pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/LICENSE
 delete mode 100644 pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/METADATA
 delete mode 100644 pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/RECORD
 delete mode 100644 pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/top_level.txt
 delete mode 100644 setuptools/_vendor/backports.tarfile-1.0.0.dist-info/INSTALLER
 delete mode 100644 setuptools/_vendor/backports.tarfile-1.0.0.dist-info/LICENSE
 delete mode 100644 setuptools/_vendor/backports.tarfile-1.0.0.dist-info/METADATA
 delete mode 100644 setuptools/_vendor/backports.tarfile-1.0.0.dist-info/RECORD
 delete mode 100644 setuptools/_vendor/backports.tarfile-1.0.0.dist-info/REQUESTED
 delete mode 100644 setuptools/_vendor/backports.tarfile-1.0.0.dist-info/WHEEL
 delete mode 100644 setuptools/_vendor/backports.tarfile-1.0.0.dist-info/top_level.txt
 delete mode 100644 setuptools/_vendor/backports/tarfile.py
 delete mode 100644 setuptools/_vendor/jaraco.functools-4.0.0.dist-info/INSTALLER
 delete mode 100644 setuptools/_vendor/jaraco.functools-4.0.0.dist-info/LICENSE
 delete mode 100644 setuptools/_vendor/jaraco.functools-4.0.0.dist-info/METADATA
 delete mode 100644 setuptools/_vendor/jaraco.functools-4.0.0.dist-info/RECORD
 delete mode 100644 setuptools/_vendor/jaraco.functools-4.0.0.dist-info/WHEEL
 delete mode 100644 setuptools/_vendor/jaraco.functools-4.0.0.dist-info/top_level.txt
 create mode 100644 setuptools/_vendor/wheel-0.43.0.dist-info/LICENSE.txt
 create mode 100644 setuptools/_vendor/wheel-0.43.0.dist-info/METADATA
 create mode 100644 setuptools/_vendor/wheel-0.43.0.dist-info/RECORD
 create mode 100644 setuptools/_vendor/wheel-0.43.0.dist-info/entry_points.txt
 create mode 100644 setuptools/_vendor/wheel/__init__.py
 create mode 100644 setuptools/_vendor/wheel/macosx_libfile.py
 create mode 100644 setuptools/_vendor/wheel/metadata.py
 create mode 100644 setuptools/_vendor/wheel/util.py
 create mode 100644 setuptools/_vendor/wheel/wheelfile.py
 create mode 100644 setuptools/command/bdist_wheel.py
 create mode 100644 setuptools/tests/test_bdist_wheel.py

diff --git a/docs/build_meta.rst b/docs/build_meta.rst
index aa4f190712..5cb383227e 100644
--- a/docs/build_meta.rst
+++ b/docs/build_meta.rst
@@ -60,10 +60,8 @@ being used to package your scripts and install from source). To use it with
     build-backend = "setuptools.build_meta"
 
 ``build_meta`` implements ``setuptools``' build system support.
-The ``setuptools`` package implements the ``build_sdist``
-command and the ``wheel`` package implements the ``build_wheel``
-command; the latter is a dependency of the former
-exposed via :pep:`517` hooks.
+The ``setuptools`` package implements the ``build_sdist`` and
+``build_wheel`` commands.
 
 Use ``setuptools``' :ref:`declarative config ` to
 specify the package information in ``setup.cfg``::
diff --git a/docs/userguide/quickstart.rst b/docs/userguide/quickstart.rst
index a3f285f010..c4875d71fc 100644
--- a/docs/userguide/quickstart.rst
+++ b/docs/userguide/quickstart.rst
@@ -60,9 +60,9 @@ library will be used to actually do the packaging.
 
    Historically this documentation has unnecessarily listed ``wheel``
    in the ``requires`` list, and many projects still do that. This is
-   not recommended. The backend automatically adds ``wheel`` dependency
-   when it is required, and listing it explicitly causes it to be
-   unnecessarily required for source distribution builds.
+   not recommended, as the backend no longer requires the ``wheel``
+   package, and listing it explicitly causes it to be unnecessarily
+   required for source distribution builds.
    You should only include ``wheel`` in ``requires`` if you need to explicitly
    access it during build time (e.g. if your project needs a ``setup.py``
    script that imports ``wheel``).
diff --git a/newsfragments/1386.feature.rst b/newsfragments/1386.feature.rst
new file mode 100644
index 0000000000..c8d50bc22e
--- /dev/null
+++ b/newsfragments/1386.feature.rst
@@ -0,0 +1 @@
+Adopted the ``bdist_wheel`` command from the ``wheel`` project -- by :user:`agronholm`
diff --git a/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/LICENSE b/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/LICENSE
deleted file mode 100644
index 1bb5a44356..0000000000
--- a/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/LICENSE
+++ /dev/null
@@ -1,17 +0,0 @@
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to
-deal in the Software without restriction, including without limitation the
-rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
-sell copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
-IN THE SOFTWARE.
diff --git a/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/METADATA b/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/METADATA
deleted file mode 100644
index e7b64c87f8..0000000000
--- a/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/METADATA
+++ /dev/null
@@ -1,44 +0,0 @@
-Metadata-Version: 2.1
-Name: backports.tarfile
-Version: 1.0.0
-Summary: Backport of CPython tarfile module
-Home-page: https://github.com/jaraco/backports.tarfile
-Author: Jason R. Coombs
-Author-email: jaraco@jaraco.com
-Classifier: Development Status :: 5 - Production/Stable
-Classifier: Intended Audience :: Developers
-Classifier: License :: OSI Approved :: MIT License
-Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3 :: Only
-Requires-Python: >=3.8
-License-File: LICENSE
-Provides-Extra: docs
-Requires-Dist: sphinx >=3.5 ; extra == 'docs'
-Requires-Dist: jaraco.packaging >=9.3 ; extra == 'docs'
-Requires-Dist: rst.linker >=1.9 ; extra == 'docs'
-Requires-Dist: furo ; extra == 'docs'
-Requires-Dist: sphinx-lint ; extra == 'docs'
-Provides-Extra: testing
-Requires-Dist: pytest !=8.1.1,>=6 ; extra == 'testing'
-Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'testing'
-Requires-Dist: pytest-cov ; extra == 'testing'
-Requires-Dist: pytest-enabler >=2.2 ; extra == 'testing'
-
-.. image:: https://img.shields.io/pypi/v/backports.tarfile.svg
-   :target: https://pypi.org/project/backports.tarfile
-
-.. image:: https://img.shields.io/pypi/pyversions/backports.tarfile.svg
-
-.. image:: https://github.com/jaraco/backports.tarfile/actions/workflows/main.yml/badge.svg
-   :target: https://github.com/jaraco/backports.tarfile/actions?query=workflow%3A%22tests%22
-   :alt: tests
-
-.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
-    :target: https://github.com/astral-sh/ruff
-    :alt: Ruff
-
-.. .. image:: https://readthedocs.org/projects/backportstarfile/badge/?version=latest
-..    :target: https://backportstarfile.readthedocs.io/en/latest/?badge=latest
-
-.. image:: https://img.shields.io/badge/skeleton-2024-informational
-   :target: https://blog.jaraco.com/skeleton
diff --git a/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/RECORD b/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/RECORD
deleted file mode 100644
index a6a44d8fcc..0000000000
--- a/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/RECORD
+++ /dev/null
@@ -1,9 +0,0 @@
-backports.tarfile-1.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-backports.tarfile-1.0.0.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
-backports.tarfile-1.0.0.dist-info/METADATA,sha256=XlT7JAFR04zDMIjs-EFhqc0CkkVyeh-SiVUoKXONXJ0,1876
-backports.tarfile-1.0.0.dist-info/RECORD,,
-backports.tarfile-1.0.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-backports.tarfile-1.0.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
-backports.tarfile-1.0.0.dist-info/top_level.txt,sha256=cGjaLMOoBR1FK0ApojtzWVmViTtJ7JGIK_HwXiEsvtU,10
-backports/__pycache__/tarfile.cpython-312.pyc,,
-backports/tarfile.py,sha256=IO3YX_ZYqn13VOi-3QLM0lnktn102U4d9wUrHc230LY,106920
diff --git a/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/WHEEL b/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/WHEEL
deleted file mode 100644
index bab98d6758..0000000000
--- a/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/WHEEL
+++ /dev/null
@@ -1,5 +0,0 @@
-Wheel-Version: 1.0
-Generator: bdist_wheel (0.43.0)
-Root-Is-Purelib: true
-Tag: py3-none-any
-
diff --git a/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/top_level.txt b/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/top_level.txt
deleted file mode 100644
index 99d2be5b64..0000000000
--- a/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/top_level.txt
+++ /dev/null
@@ -1 +0,0 @@
-backports
diff --git a/pkg_resources/_vendor/backports/tarfile.py b/pkg_resources/_vendor/backports/tarfile.py
deleted file mode 100644
index a7a9a6e7b9..0000000000
--- a/pkg_resources/_vendor/backports/tarfile.py
+++ /dev/null
@@ -1,2900 +0,0 @@
-#!/usr/bin/env python3
-#-------------------------------------------------------------------
-# tarfile.py
-#-------------------------------------------------------------------
-# Copyright (C) 2002 Lars Gustaebel 
-# All rights reserved.
-#
-# Permission  is  hereby granted,  free  of charge,  to  any person
-# obtaining a  copy of  this software  and associated documentation
-# files  (the  "Software"),  to   deal  in  the  Software   without
-# restriction,  including  without limitation  the  rights to  use,
-# copy, modify, merge, publish, distribute, sublicense, and/or sell
-# copies  of  the  Software,  and to  permit  persons  to  whom the
-# Software  is  furnished  to  do  so,  subject  to  the  following
-# conditions:
-#
-# The above copyright  notice and this  permission notice shall  be
-# included in all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS  IS", WITHOUT WARRANTY OF ANY  KIND,
-# EXPRESS OR IMPLIED, INCLUDING  BUT NOT LIMITED TO  THE WARRANTIES
-# OF  MERCHANTABILITY,  FITNESS   FOR  A  PARTICULAR   PURPOSE  AND
-# NONINFRINGEMENT.  IN  NO  EVENT SHALL  THE  AUTHORS  OR COPYRIGHT
-# HOLDERS  BE LIABLE  FOR ANY  CLAIM, DAMAGES  OR OTHER  LIABILITY,
-# WHETHER  IN AN  ACTION OF  CONTRACT, TORT  OR OTHERWISE,  ARISING
-# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
-# OTHER DEALINGS IN THE SOFTWARE.
-#
-"""Read from and write to tar format archives.
-"""
-
-version     = "0.9.0"
-__author__  = "Lars Gust\u00e4bel (lars@gustaebel.de)"
-__credits__ = "Gustavo Niemeyer, Niels Gust\u00e4bel, Richard Townsend."
-
-#---------
-# Imports
-#---------
-from builtins import open as bltn_open
-import sys
-import os
-import io
-import shutil
-import stat
-import time
-import struct
-import copy
-import re
-import warnings
-
-try:
-    import pwd
-except ImportError:
-    pwd = None
-try:
-    import grp
-except ImportError:
-    grp = None
-
-# os.symlink on Windows prior to 6.0 raises NotImplementedError
-# OSError (winerror=1314) will be raised if the caller does not hold the
-# SeCreateSymbolicLinkPrivilege privilege
-symlink_exception = (AttributeError, NotImplementedError, OSError)
-
-# from tarfile import *
-__all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError", "ReadError",
-           "CompressionError", "StreamError", "ExtractError", "HeaderError",
-           "ENCODING", "USTAR_FORMAT", "GNU_FORMAT", "PAX_FORMAT",
-           "DEFAULT_FORMAT", "open","fully_trusted_filter", "data_filter",
-           "tar_filter", "FilterError", "AbsoluteLinkError",
-           "OutsideDestinationError", "SpecialFileError", "AbsolutePathError",
-           "LinkOutsideDestinationError"]
-
-
-#---------------------------------------------------------
-# tar constants
-#---------------------------------------------------------
-NUL = b"\0"                     # the null character
-BLOCKSIZE = 512                 # length of processing blocks
-RECORDSIZE = BLOCKSIZE * 20     # length of records
-GNU_MAGIC = b"ustar  \0"        # magic gnu tar string
-POSIX_MAGIC = b"ustar\x0000"    # magic posix tar string
-
-LENGTH_NAME = 100               # maximum length of a filename
-LENGTH_LINK = 100               # maximum length of a linkname
-LENGTH_PREFIX = 155             # maximum length of the prefix field
-
-REGTYPE = b"0"                  # regular file
-AREGTYPE = b"\0"                # regular file
-LNKTYPE = b"1"                  # link (inside tarfile)
-SYMTYPE = b"2"                  # symbolic link
-CHRTYPE = b"3"                  # character special device
-BLKTYPE = b"4"                  # block special device
-DIRTYPE = b"5"                  # directory
-FIFOTYPE = b"6"                 # fifo special device
-CONTTYPE = b"7"                 # contiguous file
-
-GNUTYPE_LONGNAME = b"L"         # GNU tar longname
-GNUTYPE_LONGLINK = b"K"         # GNU tar longlink
-GNUTYPE_SPARSE = b"S"           # GNU tar sparse file
-
-XHDTYPE = b"x"                  # POSIX.1-2001 extended header
-XGLTYPE = b"g"                  # POSIX.1-2001 global header
-SOLARIS_XHDTYPE = b"X"          # Solaris extended header
-
-USTAR_FORMAT = 0                # POSIX.1-1988 (ustar) format
-GNU_FORMAT = 1                  # GNU tar format
-PAX_FORMAT = 2                  # POSIX.1-2001 (pax) format
-DEFAULT_FORMAT = PAX_FORMAT
-
-#---------------------------------------------------------
-# tarfile constants
-#---------------------------------------------------------
-# File types that tarfile supports:
-SUPPORTED_TYPES = (REGTYPE, AREGTYPE, LNKTYPE,
-                   SYMTYPE, DIRTYPE, FIFOTYPE,
-                   CONTTYPE, CHRTYPE, BLKTYPE,
-                   GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
-                   GNUTYPE_SPARSE)
-
-# File types that will be treated as a regular file.
-REGULAR_TYPES = (REGTYPE, AREGTYPE,
-                 CONTTYPE, GNUTYPE_SPARSE)
-
-# File types that are part of the GNU tar format.
-GNU_TYPES = (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
-             GNUTYPE_SPARSE)
-
-# Fields from a pax header that override a TarInfo attribute.
-PAX_FIELDS = ("path", "linkpath", "size", "mtime",
-              "uid", "gid", "uname", "gname")
-
-# Fields from a pax header that are affected by hdrcharset.
-PAX_NAME_FIELDS = {"path", "linkpath", "uname", "gname"}
-
-# Fields in a pax header that are numbers, all other fields
-# are treated as strings.
-PAX_NUMBER_FIELDS = {
-    "atime": float,
-    "ctime": float,
-    "mtime": float,
-    "uid": int,
-    "gid": int,
-    "size": int
-}
-
-#---------------------------------------------------------
-# initialization
-#---------------------------------------------------------
-if os.name == "nt":
-    ENCODING = "utf-8"
-else:
-    ENCODING = sys.getfilesystemencoding()
-
-#---------------------------------------------------------
-# Some useful functions
-#---------------------------------------------------------
-
-def stn(s, length, encoding, errors):
-    """Convert a string to a null-terminated bytes object.
-    """
-    if s is None:
-        raise ValueError("metadata cannot contain None")
-    s = s.encode(encoding, errors)
-    return s[:length] + (length - len(s)) * NUL
-
-def nts(s, encoding, errors):
-    """Convert a null-terminated bytes object to a string.
-    """
-    p = s.find(b"\0")
-    if p != -1:
-        s = s[:p]
-    return s.decode(encoding, errors)
-
-def nti(s):
-    """Convert a number field to a python number.
-    """
-    # There are two possible encodings for a number field, see
-    # itn() below.
-    if s[0] in (0o200, 0o377):
-        n = 0
-        for i in range(len(s) - 1):
-            n <<= 8
-            n += s[i + 1]
-        if s[0] == 0o377:
-            n = -(256 ** (len(s) - 1) - n)
-    else:
-        try:
-            s = nts(s, "ascii", "strict")
-            n = int(s.strip() or "0", 8)
-        except ValueError:
-            raise InvalidHeaderError("invalid header")
-    return n
-
-def itn(n, digits=8, format=DEFAULT_FORMAT):
-    """Convert a python number to a number field.
-    """
-    # POSIX 1003.1-1988 requires numbers to be encoded as a string of
-    # octal digits followed by a null-byte, this allows values up to
-    # (8**(digits-1))-1. GNU tar allows storing numbers greater than
-    # that if necessary. A leading 0o200 or 0o377 byte indicate this
-    # particular encoding, the following digits-1 bytes are a big-endian
-    # base-256 representation. This allows values up to (256**(digits-1))-1.
-    # A 0o200 byte indicates a positive number, a 0o377 byte a negative
-    # number.
-    original_n = n
-    n = int(n)
-    if 0 <= n < 8 ** (digits - 1):
-        s = bytes("%0*o" % (digits - 1, n), "ascii") + NUL
-    elif format == GNU_FORMAT and -256 ** (digits - 1) <= n < 256 ** (digits - 1):
-        if n >= 0:
-            s = bytearray([0o200])
-        else:
-            s = bytearray([0o377])
-            n = 256 ** digits + n
-
-        for i in range(digits - 1):
-            s.insert(1, n & 0o377)
-            n >>= 8
-    else:
-        raise ValueError("overflow in number field")
-
-    return s
-
-def calc_chksums(buf):
-    """Calculate the checksum for a member's header by summing up all
-       characters except for the chksum field which is treated as if
-       it was filled with spaces. According to the GNU tar sources,
-       some tars (Sun and NeXT) calculate chksum with signed char,
-       which will be different if there are chars in the buffer with
-       the high bit set. So we calculate two checksums, unsigned and
-       signed.
-    """
-    unsigned_chksum = 256 + sum(struct.unpack_from("148B8x356B", buf))
-    signed_chksum = 256 + sum(struct.unpack_from("148b8x356b", buf))
-    return unsigned_chksum, signed_chksum
-
-def copyfileobj(src, dst, length=None, exception=OSError, bufsize=None):
-    """Copy length bytes from fileobj src to fileobj dst.
-       If length is None, copy the entire content.
-    """
-    bufsize = bufsize or 16 * 1024
-    if length == 0:
-        return
-    if length is None:
-        shutil.copyfileobj(src, dst, bufsize)
-        return
-
-    blocks, remainder = divmod(length, bufsize)
-    for b in range(blocks):
-        buf = src.read(bufsize)
-        if len(buf) < bufsize:
-            raise exception("unexpected end of data")
-        dst.write(buf)
-
-    if remainder != 0:
-        buf = src.read(remainder)
-        if len(buf) < remainder:
-            raise exception("unexpected end of data")
-        dst.write(buf)
-    return
-
-def _safe_print(s):
-    encoding = getattr(sys.stdout, 'encoding', None)
-    if encoding is not None:
-        s = s.encode(encoding, 'backslashreplace').decode(encoding)
-    print(s, end=' ')
-
-
-class TarError(Exception):
-    """Base exception."""
-    pass
-class ExtractError(TarError):
-    """General exception for extract errors."""
-    pass
-class ReadError(TarError):
-    """Exception for unreadable tar archives."""
-    pass
-class CompressionError(TarError):
-    """Exception for unavailable compression methods."""
-    pass
-class StreamError(TarError):
-    """Exception for unsupported operations on stream-like TarFiles."""
-    pass
-class HeaderError(TarError):
-    """Base exception for header errors."""
-    pass
-class EmptyHeaderError(HeaderError):
-    """Exception for empty headers."""
-    pass
-class TruncatedHeaderError(HeaderError):
-    """Exception for truncated headers."""
-    pass
-class EOFHeaderError(HeaderError):
-    """Exception for end of file headers."""
-    pass
-class InvalidHeaderError(HeaderError):
-    """Exception for invalid headers."""
-    pass
-class SubsequentHeaderError(HeaderError):
-    """Exception for missing and invalid extended headers."""
-    pass
-
-#---------------------------
-# internal stream interface
-#---------------------------
-class _LowLevelFile:
-    """Low-level file object. Supports reading and writing.
-       It is used instead of a regular file object for streaming
-       access.
-    """
-
-    def __init__(self, name, mode):
-        mode = {
-            "r": os.O_RDONLY,
-            "w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC,
-        }[mode]
-        if hasattr(os, "O_BINARY"):
-            mode |= os.O_BINARY
-        self.fd = os.open(name, mode, 0o666)
-
-    def close(self):
-        os.close(self.fd)
-
-    def read(self, size):
-        return os.read(self.fd, size)
-
-    def write(self, s):
-        os.write(self.fd, s)
-
-class _Stream:
-    """Class that serves as an adapter between TarFile and
-       a stream-like object.  The stream-like object only
-       needs to have a read() or write() method that works with bytes,
-       and the method is accessed blockwise.
-       Use of gzip or bzip2 compression is possible.
-       A stream-like object could be for example: sys.stdin.buffer,
-       sys.stdout.buffer, a socket, a tape device etc.
-
-       _Stream is intended to be used only internally.
-    """
-
-    def __init__(self, name, mode, comptype, fileobj, bufsize,
-                 compresslevel):
-        """Construct a _Stream object.
-        """
-        self._extfileobj = True
-        if fileobj is None:
-            fileobj = _LowLevelFile(name, mode)
-            self._extfileobj = False
-
-        if comptype == '*':
-            # Enable transparent compression detection for the
-            # stream interface
-            fileobj = _StreamProxy(fileobj)
-            comptype = fileobj.getcomptype()
-
-        self.name     = name or ""
-        self.mode     = mode
-        self.comptype = comptype
-        self.fileobj  = fileobj
-        self.bufsize  = bufsize
-        self.buf      = b""
-        self.pos      = 0
-        self.closed   = False
-
-        try:
-            if comptype == "gz":
-                try:
-                    import zlib
-                except ImportError:
-                    raise CompressionError("zlib module is not available") from None
-                self.zlib = zlib
-                self.crc = zlib.crc32(b"")
-                if mode == "r":
-                    self.exception = zlib.error
-                    self._init_read_gz()
-                else:
-                    self._init_write_gz(compresslevel)
-
-            elif comptype == "bz2":
-                try:
-                    import bz2
-                except ImportError:
-                    raise CompressionError("bz2 module is not available") from None
-                if mode == "r":
-                    self.dbuf = b""
-                    self.cmp = bz2.BZ2Decompressor()
-                    self.exception = OSError
-                else:
-                    self.cmp = bz2.BZ2Compressor(compresslevel)
-
-            elif comptype == "xz":
-                try:
-                    import lzma
-                except ImportError:
-                    raise CompressionError("lzma module is not available") from None
-                if mode == "r":
-                    self.dbuf = b""
-                    self.cmp = lzma.LZMADecompressor()
-                    self.exception = lzma.LZMAError
-                else:
-                    self.cmp = lzma.LZMACompressor()
-
-            elif comptype != "tar":
-                raise CompressionError("unknown compression type %r" % comptype)
-
-        except:
-            if not self._extfileobj:
-                self.fileobj.close()
-            self.closed = True
-            raise
-
-    def __del__(self):
-        if hasattr(self, "closed") and not self.closed:
-            self.close()
-
-    def _init_write_gz(self, compresslevel):
-        """Initialize for writing with gzip compression.
-        """
-        self.cmp = self.zlib.compressobj(compresslevel,
-                                         self.zlib.DEFLATED,
-                                         -self.zlib.MAX_WBITS,
-                                         self.zlib.DEF_MEM_LEVEL,
-                                         0)
-        timestamp = struct.pack(" self.bufsize:
-            self.fileobj.write(self.buf[:self.bufsize])
-            self.buf = self.buf[self.bufsize:]
-
-    def close(self):
-        """Close the _Stream object. No operation should be
-           done on it afterwards.
-        """
-        if self.closed:
-            return
-
-        self.closed = True
-        try:
-            if self.mode == "w" and self.comptype != "tar":
-                self.buf += self.cmp.flush()
-
-            if self.mode == "w" and self.buf:
-                self.fileobj.write(self.buf)
-                self.buf = b""
-                if self.comptype == "gz":
-                    self.fileobj.write(struct.pack("= 0:
-            blocks, remainder = divmod(pos - self.pos, self.bufsize)
-            for i in range(blocks):
-                self.read(self.bufsize)
-            self.read(remainder)
-        else:
-            raise StreamError("seeking backwards is not allowed")
-        return self.pos
-
-    def read(self, size):
-        """Return the next size number of bytes from the stream."""
-        assert size is not None
-        buf = self._read(size)
-        self.pos += len(buf)
-        return buf
-
-    def _read(self, size):
-        """Return size bytes from the stream.
-        """
-        if self.comptype == "tar":
-            return self.__read(size)
-
-        c = len(self.dbuf)
-        t = [self.dbuf]
-        while c < size:
-            # Skip underlying buffer to avoid unaligned double buffering.
-            if self.buf:
-                buf = self.buf
-                self.buf = b""
-            else:
-                buf = self.fileobj.read(self.bufsize)
-                if not buf:
-                    break
-            try:
-                buf = self.cmp.decompress(buf)
-            except self.exception as e:
-                raise ReadError("invalid compressed data") from e
-            t.append(buf)
-            c += len(buf)
-        t = b"".join(t)
-        self.dbuf = t[size:]
-        return t[:size]
-
-    def __read(self, size):
-        """Return size bytes from stream. If internal buffer is empty,
-           read another block from the stream.
-        """
-        c = len(self.buf)
-        t = [self.buf]
-        while c < size:
-            buf = self.fileobj.read(self.bufsize)
-            if not buf:
-                break
-            t.append(buf)
-            c += len(buf)
-        t = b"".join(t)
-        self.buf = t[size:]
-        return t[:size]
-# class _Stream
-
-class _StreamProxy(object):
-    """Small proxy class that enables transparent compression
-       detection for the Stream interface (mode 'r|*').
-    """
-
-    def __init__(self, fileobj):
-        self.fileobj = fileobj
-        self.buf = self.fileobj.read(BLOCKSIZE)
-
-    def read(self, size):
-        self.read = self.fileobj.read
-        return self.buf
-
-    def getcomptype(self):
-        if self.buf.startswith(b"\x1f\x8b\x08"):
-            return "gz"
-        elif self.buf[0:3] == b"BZh" and self.buf[4:10] == b"1AY&SY":
-            return "bz2"
-        elif self.buf.startswith((b"\x5d\x00\x00\x80", b"\xfd7zXZ")):
-            return "xz"
-        else:
-            return "tar"
-
-    def close(self):
-        self.fileobj.close()
-# class StreamProxy
-
-#------------------------
-# Extraction file object
-#------------------------
-class _FileInFile(object):
-    """A thin wrapper around an existing file object that
-       provides a part of its data as an individual file
-       object.
-    """
-
-    def __init__(self, fileobj, offset, size, name, blockinfo=None):
-        self.fileobj = fileobj
-        self.offset = offset
-        self.size = size
-        self.position = 0
-        self.name = name
-        self.closed = False
-
-        if blockinfo is None:
-            blockinfo = [(0, size)]
-
-        # Construct a map with data and zero blocks.
-        self.map_index = 0
-        self.map = []
-        lastpos = 0
-        realpos = self.offset
-        for offset, size in blockinfo:
-            if offset > lastpos:
-                self.map.append((False, lastpos, offset, None))
-            self.map.append((True, offset, offset + size, realpos))
-            realpos += size
-            lastpos = offset + size
-        if lastpos < self.size:
-            self.map.append((False, lastpos, self.size, None))
-
-    def flush(self):
-        pass
-
-    def readable(self):
-        return True
-
-    def writable(self):
-        return False
-
-    def seekable(self):
-        return self.fileobj.seekable()
-
-    def tell(self):
-        """Return the current file position.
-        """
-        return self.position
-
-    def seek(self, position, whence=io.SEEK_SET):
-        """Seek to a position in the file.
-        """
-        if whence == io.SEEK_SET:
-            self.position = min(max(position, 0), self.size)
-        elif whence == io.SEEK_CUR:
-            if position < 0:
-                self.position = max(self.position + position, 0)
-            else:
-                self.position = min(self.position + position, self.size)
-        elif whence == io.SEEK_END:
-            self.position = max(min(self.size + position, self.size), 0)
-        else:
-            raise ValueError("Invalid argument")
-        return self.position
-
-    def read(self, size=None):
-        """Read data from the file.
-        """
-        if size is None:
-            size = self.size - self.position
-        else:
-            size = min(size, self.size - self.position)
-
-        buf = b""
-        while size > 0:
-            while True:
-                data, start, stop, offset = self.map[self.map_index]
-                if start <= self.position < stop:
-                    break
-                else:
-                    self.map_index += 1
-                    if self.map_index == len(self.map):
-                        self.map_index = 0
-            length = min(size, stop - self.position)
-            if data:
-                self.fileobj.seek(offset + (self.position - start))
-                b = self.fileobj.read(length)
-                if len(b) != length:
-                    raise ReadError("unexpected end of data")
-                buf += b
-            else:
-                buf += NUL * length
-            size -= length
-            self.position += length
-        return buf
-
-    def readinto(self, b):
-        buf = self.read(len(b))
-        b[:len(buf)] = buf
-        return len(buf)
-
-    def close(self):
-        self.closed = True
-#class _FileInFile
-
-class ExFileObject(io.BufferedReader):
-
-    def __init__(self, tarfile, tarinfo):
-        fileobj = _FileInFile(tarfile.fileobj, tarinfo.offset_data,
-                tarinfo.size, tarinfo.name, tarinfo.sparse)
-        super().__init__(fileobj)
-#class ExFileObject
-
-
-#-----------------------------
-# extraction filters (PEP 706)
-#-----------------------------
-
-class FilterError(TarError):
-    pass
-
-class AbsolutePathError(FilterError):
-    def __init__(self, tarinfo):
-        self.tarinfo = tarinfo
-        super().__init__(f'member {tarinfo.name!r} has an absolute path')
-
-class OutsideDestinationError(FilterError):
-    def __init__(self, tarinfo, path):
-        self.tarinfo = tarinfo
-        self._path = path
-        super().__init__(f'{tarinfo.name!r} would be extracted to {path!r}, '
-                         + 'which is outside the destination')
-
-class SpecialFileError(FilterError):
-    def __init__(self, tarinfo):
-        self.tarinfo = tarinfo
-        super().__init__(f'{tarinfo.name!r} is a special file')
-
-class AbsoluteLinkError(FilterError):
-    def __init__(self, tarinfo):
-        self.tarinfo = tarinfo
-        super().__init__(f'{tarinfo.name!r} is a link to an absolute path')
-
-class LinkOutsideDestinationError(FilterError):
-    def __init__(self, tarinfo, path):
-        self.tarinfo = tarinfo
-        self._path = path
-        super().__init__(f'{tarinfo.name!r} would link to {path!r}, '
-                         + 'which is outside the destination')
-
-def _get_filtered_attrs(member, dest_path, for_data=True):
-    new_attrs = {}
-    name = member.name
-    dest_path = os.path.realpath(dest_path)
-    # Strip leading / (tar's directory separator) from filenames.
-    # Include os.sep (target OS directory separator) as well.
-    if name.startswith(('/', os.sep)):
-        name = new_attrs['name'] = member.path.lstrip('/' + os.sep)
-    if os.path.isabs(name):
-        # Path is absolute even after stripping.
-        # For example, 'C:/foo' on Windows.
-        raise AbsolutePathError(member)
-    # Ensure we stay in the destination
-    target_path = os.path.realpath(os.path.join(dest_path, name))
-    if os.path.commonpath([target_path, dest_path]) != dest_path:
-        raise OutsideDestinationError(member, target_path)
-    # Limit permissions (no high bits, and go-w)
-    mode = member.mode
-    if mode is not None:
-        # Strip high bits & group/other write bits
-        mode = mode & 0o755
-        if for_data:
-            # For data, handle permissions & file types
-            if member.isreg() or member.islnk():
-                if not mode & 0o100:
-                    # Clear executable bits if not executable by user
-                    mode &= ~0o111
-                # Ensure owner can read & write
-                mode |= 0o600
-            elif member.isdir() or member.issym():
-                # Ignore mode for directories & symlinks
-                mode = None
-            else:
-                # Reject special files
-                raise SpecialFileError(member)
-        if mode != member.mode:
-            new_attrs['mode'] = mode
-    if for_data:
-        # Ignore ownership for 'data'
-        if member.uid is not None:
-            new_attrs['uid'] = None
-        if member.gid is not None:
-            new_attrs['gid'] = None
-        if member.uname is not None:
-            new_attrs['uname'] = None
-        if member.gname is not None:
-            new_attrs['gname'] = None
-        # Check link destination for 'data'
-        if member.islnk() or member.issym():
-            if os.path.isabs(member.linkname):
-                raise AbsoluteLinkError(member)
-            if member.issym():
-                target_path = os.path.join(dest_path,
-                                           os.path.dirname(name),
-                                           member.linkname)
-            else:
-                target_path = os.path.join(dest_path,
-                                           member.linkname)
-            target_path = os.path.realpath(target_path)
-            if os.path.commonpath([target_path, dest_path]) != dest_path:
-                raise LinkOutsideDestinationError(member, target_path)
-    return new_attrs
-
-def fully_trusted_filter(member, dest_path):
-    return member
-
-def tar_filter(member, dest_path):
-    new_attrs = _get_filtered_attrs(member, dest_path, False)
-    if new_attrs:
-        return member.replace(**new_attrs, deep=False)
-    return member
-
-def data_filter(member, dest_path):
-    new_attrs = _get_filtered_attrs(member, dest_path, True)
-    if new_attrs:
-        return member.replace(**new_attrs, deep=False)
-    return member
-
-_NAMED_FILTERS = {
-    "fully_trusted": fully_trusted_filter,
-    "tar": tar_filter,
-    "data": data_filter,
-}
-
-#------------------
-# Exported Classes
-#------------------
-
-# Sentinel for replace() defaults, meaning "don't change the attribute"
-_KEEP = object()
-
-class TarInfo(object):
-    """Informational class which holds the details about an
-       archive member given by a tar header block.
-       TarInfo objects are returned by TarFile.getmember(),
-       TarFile.getmembers() and TarFile.gettarinfo() and are
-       usually created internally.
-    """
-
-    __slots__ = dict(
-        name = 'Name of the archive member.',
-        mode = 'Permission bits.',
-        uid = 'User ID of the user who originally stored this member.',
-        gid = 'Group ID of the user who originally stored this member.',
-        size = 'Size in bytes.',
-        mtime = 'Time of last modification.',
-        chksum = 'Header checksum.',
-        type = ('File type. type is usually one of these constants: '
-                'REGTYPE, AREGTYPE, LNKTYPE, SYMTYPE, DIRTYPE, FIFOTYPE, '
-                'CONTTYPE, CHRTYPE, BLKTYPE, GNUTYPE_SPARSE.'),
-        linkname = ('Name of the target file name, which is only present '
-                    'in TarInfo objects of type LNKTYPE and SYMTYPE.'),
-        uname = 'User name.',
-        gname = 'Group name.',
-        devmajor = 'Device major number.',
-        devminor = 'Device minor number.',
-        offset = 'The tar header starts here.',
-        offset_data = "The file's data starts here.",
-        pax_headers = ('A dictionary containing key-value pairs of an '
-                       'associated pax extended header.'),
-        sparse = 'Sparse member information.',
-        tarfile = None,
-        _sparse_structs = None,
-        _link_target = None,
-        )
-
-    def __init__(self, name=""):
-        """Construct a TarInfo object. name is the optional name
-           of the member.
-        """
-        self.name = name        # member name
-        self.mode = 0o644       # file permissions
-        self.uid = 0            # user id
-        self.gid = 0            # group id
-        self.size = 0           # file size
-        self.mtime = 0          # modification time
-        self.chksum = 0         # header checksum
-        self.type = REGTYPE     # member type
-        self.linkname = ""      # link name
-        self.uname = ""         # user name
-        self.gname = ""         # group name
-        self.devmajor = 0       # device major number
-        self.devminor = 0       # device minor number
-
-        self.offset = 0         # the tar header starts here
-        self.offset_data = 0    # the file's data starts here
-
-        self.sparse = None      # sparse member information
-        self.pax_headers = {}   # pax header information
-
-    @property
-    def path(self):
-        'In pax headers, "name" is called "path".'
-        return self.name
-
-    @path.setter
-    def path(self, name):
-        self.name = name
-
-    @property
-    def linkpath(self):
-        'In pax headers, "linkname" is called "linkpath".'
-        return self.linkname
-
-    @linkpath.setter
-    def linkpath(self, linkname):
-        self.linkname = linkname
-
-    def __repr__(self):
-        return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self))
-
-    def replace(self, *,
-                name=_KEEP, mtime=_KEEP, mode=_KEEP, linkname=_KEEP,
-                uid=_KEEP, gid=_KEEP, uname=_KEEP, gname=_KEEP,
-                deep=True, _KEEP=_KEEP):
-        """Return a deep copy of self with the given attributes replaced.
-        """
-        if deep:
-            result = copy.deepcopy(self)
-        else:
-            result = copy.copy(self)
-        if name is not _KEEP:
-            result.name = name
-        if mtime is not _KEEP:
-            result.mtime = mtime
-        if mode is not _KEEP:
-            result.mode = mode
-        if linkname is not _KEEP:
-            result.linkname = linkname
-        if uid is not _KEEP:
-            result.uid = uid
-        if gid is not _KEEP:
-            result.gid = gid
-        if uname is not _KEEP:
-            result.uname = uname
-        if gname is not _KEEP:
-            result.gname = gname
-        return result
-
-    def get_info(self):
-        """Return the TarInfo's attributes as a dictionary.
-        """
-        if self.mode is None:
-            mode = None
-        else:
-            mode = self.mode & 0o7777
-        info = {
-            "name":     self.name,
-            "mode":     mode,
-            "uid":      self.uid,
-            "gid":      self.gid,
-            "size":     self.size,
-            "mtime":    self.mtime,
-            "chksum":   self.chksum,
-            "type":     self.type,
-            "linkname": self.linkname,
-            "uname":    self.uname,
-            "gname":    self.gname,
-            "devmajor": self.devmajor,
-            "devminor": self.devminor
-        }
-
-        if info["type"] == DIRTYPE and not info["name"].endswith("/"):
-            info["name"] += "/"
-
-        return info
-
-    def tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="surrogateescape"):
-        """Return a tar header as a string of 512 byte blocks.
-        """
-        info = self.get_info()
-        for name, value in info.items():
-            if value is None:
-                raise ValueError("%s may not be None" % name)
-
-        if format == USTAR_FORMAT:
-            return self.create_ustar_header(info, encoding, errors)
-        elif format == GNU_FORMAT:
-            return self.create_gnu_header(info, encoding, errors)
-        elif format == PAX_FORMAT:
-            return self.create_pax_header(info, encoding)
-        else:
-            raise ValueError("invalid format")
-
-    def create_ustar_header(self, info, encoding, errors):
-        """Return the object as a ustar header block.
-        """
-        info["magic"] = POSIX_MAGIC
-
-        if len(info["linkname"].encode(encoding, errors)) > LENGTH_LINK:
-            raise ValueError("linkname is too long")
-
-        if len(info["name"].encode(encoding, errors)) > LENGTH_NAME:
-            info["prefix"], info["name"] = self._posix_split_name(info["name"], encoding, errors)
-
-        return self._create_header(info, USTAR_FORMAT, encoding, errors)
-
-    def create_gnu_header(self, info, encoding, errors):
-        """Return the object as a GNU header block sequence.
-        """
-        info["magic"] = GNU_MAGIC
-
-        buf = b""
-        if len(info["linkname"].encode(encoding, errors)) > LENGTH_LINK:
-            buf += self._create_gnu_long_header(info["linkname"], GNUTYPE_LONGLINK, encoding, errors)
-
-        if len(info["name"].encode(encoding, errors)) > LENGTH_NAME:
-            buf += self._create_gnu_long_header(info["name"], GNUTYPE_LONGNAME, encoding, errors)
-
-        return buf + self._create_header(info, GNU_FORMAT, encoding, errors)
-
-    def create_pax_header(self, info, encoding):
-        """Return the object as a ustar header block. If it cannot be
-           represented this way, prepend a pax extended header sequence
-           with supplement information.
-        """
-        info["magic"] = POSIX_MAGIC
-        pax_headers = self.pax_headers.copy()
-
-        # Test string fields for values that exceed the field length or cannot
-        # be represented in ASCII encoding.
-        for name, hname, length in (
-                ("name", "path", LENGTH_NAME), ("linkname", "linkpath", LENGTH_LINK),
-                ("uname", "uname", 32), ("gname", "gname", 32)):
-
-            if hname in pax_headers:
-                # The pax header has priority.
-                continue
-
-            # Try to encode the string as ASCII.
-            try:
-                info[name].encode("ascii", "strict")
-            except UnicodeEncodeError:
-                pax_headers[hname] = info[name]
-                continue
-
-            if len(info[name]) > length:
-                pax_headers[hname] = info[name]
-
-        # Test number fields for values that exceed the field limit or values
-        # that like to be stored as float.
-        for name, digits in (("uid", 8), ("gid", 8), ("size", 12), ("mtime", 12)):
-            needs_pax = False
-
-            val = info[name]
-            val_is_float = isinstance(val, float)
-            val_int = round(val) if val_is_float else val
-            if not 0 <= val_int < 8 ** (digits - 1):
-                # Avoid overflow.
-                info[name] = 0
-                needs_pax = True
-            elif val_is_float:
-                # Put rounded value in ustar header, and full
-                # precision value in pax header.
-                info[name] = val_int
-                needs_pax = True
-
-            # The existing pax header has priority.
-            if needs_pax and name not in pax_headers:
-                pax_headers[name] = str(val)
-
-        # Create a pax extended header if necessary.
-        if pax_headers:
-            buf = self._create_pax_generic_header(pax_headers, XHDTYPE, encoding)
-        else:
-            buf = b""
-
-        return buf + self._create_header(info, USTAR_FORMAT, "ascii", "replace")
-
-    @classmethod
-    def create_pax_global_header(cls, pax_headers):
-        """Return the object as a pax global header block sequence.
-        """
-        return cls._create_pax_generic_header(pax_headers, XGLTYPE, "utf-8")
-
-    def _posix_split_name(self, name, encoding, errors):
-        """Split a name longer than 100 chars into a prefix
-           and a name part.
-        """
-        components = name.split("/")
-        for i in range(1, len(components)):
-            prefix = "/".join(components[:i])
-            name = "/".join(components[i:])
-            if len(prefix.encode(encoding, errors)) <= LENGTH_PREFIX and \
-                    len(name.encode(encoding, errors)) <= LENGTH_NAME:
-                break
-        else:
-            raise ValueError("name is too long")
-
-        return prefix, name
-
-    @staticmethod
-    def _create_header(info, format, encoding, errors):
-        """Return a header block. info is a dictionary with file
-           information, format must be one of the *_FORMAT constants.
-        """
-        has_device_fields = info.get("type") in (CHRTYPE, BLKTYPE)
-        if has_device_fields:
-            devmajor = itn(info.get("devmajor", 0), 8, format)
-            devminor = itn(info.get("devminor", 0), 8, format)
-        else:
-            devmajor = stn("", 8, encoding, errors)
-            devminor = stn("", 8, encoding, errors)
-
-        # None values in metadata should cause ValueError.
-        # itn()/stn() do this for all fields except type.
-        filetype = info.get("type", REGTYPE)
-        if filetype is None:
-            raise ValueError("TarInfo.type must not be None")
-
-        parts = [
-            stn(info.get("name", ""), 100, encoding, errors),
-            itn(info.get("mode", 0) & 0o7777, 8, format),
-            itn(info.get("uid", 0), 8, format),
-            itn(info.get("gid", 0), 8, format),
-            itn(info.get("size", 0), 12, format),
-            itn(info.get("mtime", 0), 12, format),
-            b"        ", # checksum field
-            filetype,
-            stn(info.get("linkname", ""), 100, encoding, errors),
-            info.get("magic", POSIX_MAGIC),
-            stn(info.get("uname", ""), 32, encoding, errors),
-            stn(info.get("gname", ""), 32, encoding, errors),
-            devmajor,
-            devminor,
-            stn(info.get("prefix", ""), 155, encoding, errors)
-        ]
-
-        buf = struct.pack("%ds" % BLOCKSIZE, b"".join(parts))
-        chksum = calc_chksums(buf[-BLOCKSIZE:])[0]
-        buf = buf[:-364] + bytes("%06o\0" % chksum, "ascii") + buf[-357:]
-        return buf
-
-    @staticmethod
-    def _create_payload(payload):
-        """Return the string payload filled with zero bytes
-           up to the next 512 byte border.
-        """
-        blocks, remainder = divmod(len(payload), BLOCKSIZE)
-        if remainder > 0:
-            payload += (BLOCKSIZE - remainder) * NUL
-        return payload
-
-    @classmethod
-    def _create_gnu_long_header(cls, name, type, encoding, errors):
-        """Return a GNUTYPE_LONGNAME or GNUTYPE_LONGLINK sequence
-           for name.
-        """
-        name = name.encode(encoding, errors) + NUL
-
-        info = {}
-        info["name"] = "././@LongLink"
-        info["type"] = type
-        info["size"] = len(name)
-        info["magic"] = GNU_MAGIC
-
-        # create extended header + name blocks.
-        return cls._create_header(info, USTAR_FORMAT, encoding, errors) + \
-                cls._create_payload(name)
-
-    @classmethod
-    def _create_pax_generic_header(cls, pax_headers, type, encoding):
-        """Return a POSIX.1-2008 extended or global header sequence
-           that contains a list of keyword, value pairs. The values
-           must be strings.
-        """
-        # Check if one of the fields contains surrogate characters and thereby
-        # forces hdrcharset=BINARY, see _proc_pax() for more information.
-        binary = False
-        for keyword, value in pax_headers.items():
-            try:
-                value.encode("utf-8", "strict")
-            except UnicodeEncodeError:
-                binary = True
-                break
-
-        records = b""
-        if binary:
-            # Put the hdrcharset field at the beginning of the header.
-            records += b"21 hdrcharset=BINARY\n"
-
-        for keyword, value in pax_headers.items():
-            keyword = keyword.encode("utf-8")
-            if binary:
-                # Try to restore the original byte representation of `value'.
-                # Needless to say, that the encoding must match the string.
-                value = value.encode(encoding, "surrogateescape")
-            else:
-                value = value.encode("utf-8")
-
-            l = len(keyword) + len(value) + 3   # ' ' + '=' + '\n'
-            n = p = 0
-            while True:
-                n = l + len(str(p))
-                if n == p:
-                    break
-                p = n
-            records += bytes(str(p), "ascii") + b" " + keyword + b"=" + value + b"\n"
-
-        # We use a hardcoded "././@PaxHeader" name like star does
-        # instead of the one that POSIX recommends.
-        info = {}
-        info["name"] = "././@PaxHeader"
-        info["type"] = type
-        info["size"] = len(records)
-        info["magic"] = POSIX_MAGIC
-
-        # Create pax header + record blocks.
-        return cls._create_header(info, USTAR_FORMAT, "ascii", "replace") + \
-                cls._create_payload(records)
-
-    @classmethod
-    def frombuf(cls, buf, encoding, errors):
-        """Construct a TarInfo object from a 512 byte bytes object.
-        """
-        if len(buf) == 0:
-            raise EmptyHeaderError("empty header")
-        if len(buf) != BLOCKSIZE:
-            raise TruncatedHeaderError("truncated header")
-        if buf.count(NUL) == BLOCKSIZE:
-            raise EOFHeaderError("end of file header")
-
-        chksum = nti(buf[148:156])
-        if chksum not in calc_chksums(buf):
-            raise InvalidHeaderError("bad checksum")
-
-        obj = cls()
-        obj.name = nts(buf[0:100], encoding, errors)
-        obj.mode = nti(buf[100:108])
-        obj.uid = nti(buf[108:116])
-        obj.gid = nti(buf[116:124])
-        obj.size = nti(buf[124:136])
-        obj.mtime = nti(buf[136:148])
-        obj.chksum = chksum
-        obj.type = buf[156:157]
-        obj.linkname = nts(buf[157:257], encoding, errors)
-        obj.uname = nts(buf[265:297], encoding, errors)
-        obj.gname = nts(buf[297:329], encoding, errors)
-        obj.devmajor = nti(buf[329:337])
-        obj.devminor = nti(buf[337:345])
-        prefix = nts(buf[345:500], encoding, errors)
-
-        # Old V7 tar format represents a directory as a regular
-        # file with a trailing slash.
-        if obj.type == AREGTYPE and obj.name.endswith("/"):
-            obj.type = DIRTYPE
-
-        # The old GNU sparse format occupies some of the unused
-        # space in the buffer for up to 4 sparse structures.
-        # Save them for later processing in _proc_sparse().
-        if obj.type == GNUTYPE_SPARSE:
-            pos = 386
-            structs = []
-            for i in range(4):
-                try:
-                    offset = nti(buf[pos:pos + 12])
-                    numbytes = nti(buf[pos + 12:pos + 24])
-                except ValueError:
-                    break
-                structs.append((offset, numbytes))
-                pos += 24
-            isextended = bool(buf[482])
-            origsize = nti(buf[483:495])
-            obj._sparse_structs = (structs, isextended, origsize)
-
-        # Remove redundant slashes from directories.
-        if obj.isdir():
-            obj.name = obj.name.rstrip("/")
-
-        # Reconstruct a ustar longname.
-        if prefix and obj.type not in GNU_TYPES:
-            obj.name = prefix + "/" + obj.name
-        return obj
-
-    @classmethod
-    def fromtarfile(cls, tarfile):
-        """Return the next TarInfo object from TarFile object
-           tarfile.
-        """
-        buf = tarfile.fileobj.read(BLOCKSIZE)
-        obj = cls.frombuf(buf, tarfile.encoding, tarfile.errors)
-        obj.offset = tarfile.fileobj.tell() - BLOCKSIZE
-        return obj._proc_member(tarfile)
-
-    #--------------------------------------------------------------------------
-    # The following are methods that are called depending on the type of a
-    # member. The entry point is _proc_member() which can be overridden in a
-    # subclass to add custom _proc_*() methods. A _proc_*() method MUST
-    # implement the following
-    # operations:
-    # 1. Set self.offset_data to the position where the data blocks begin,
-    #    if there is data that follows.
-    # 2. Set tarfile.offset to the position where the next member's header will
-    #    begin.
-    # 3. Return self or another valid TarInfo object.
-    def _proc_member(self, tarfile):
-        """Choose the right processing method depending on
-           the type and call it.
-        """
-        if self.type in (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK):
-            return self._proc_gnulong(tarfile)
-        elif self.type == GNUTYPE_SPARSE:
-            return self._proc_sparse(tarfile)
-        elif self.type in (XHDTYPE, XGLTYPE, SOLARIS_XHDTYPE):
-            return self._proc_pax(tarfile)
-        else:
-            return self._proc_builtin(tarfile)
-
-    def _proc_builtin(self, tarfile):
-        """Process a builtin type or an unknown type which
-           will be treated as a regular file.
-        """
-        self.offset_data = tarfile.fileobj.tell()
-        offset = self.offset_data
-        if self.isreg() or self.type not in SUPPORTED_TYPES:
-            # Skip the following data blocks.
-            offset += self._block(self.size)
-        tarfile.offset = offset
-
-        # Patch the TarInfo object with saved global
-        # header information.
-        self._apply_pax_info(tarfile.pax_headers, tarfile.encoding, tarfile.errors)
-
-        # Remove redundant slashes from directories. This is to be consistent
-        # with frombuf().
-        if self.isdir():
-            self.name = self.name.rstrip("/")
-
-        return self
-
-    def _proc_gnulong(self, tarfile):
-        """Process the blocks that hold a GNU longname
-           or longlink member.
-        """
-        buf = tarfile.fileobj.read(self._block(self.size))
-
-        # Fetch the next header and process it.
-        try:
-            next = self.fromtarfile(tarfile)
-        except HeaderError as e:
-            raise SubsequentHeaderError(str(e)) from None
-
-        # Patch the TarInfo object from the next header with
-        # the longname information.
-        next.offset = self.offset
-        if self.type == GNUTYPE_LONGNAME:
-            next.name = nts(buf, tarfile.encoding, tarfile.errors)
-        elif self.type == GNUTYPE_LONGLINK:
-            next.linkname = nts(buf, tarfile.encoding, tarfile.errors)
-
-        # Remove redundant slashes from directories. This is to be consistent
-        # with frombuf().
-        if next.isdir():
-            next.name = next.name.removesuffix("/")
-
-        return next
-
-    def _proc_sparse(self, tarfile):
-        """Process a GNU sparse header plus extra headers.
-        """
-        # We already collected some sparse structures in frombuf().
-        structs, isextended, origsize = self._sparse_structs
-        del self._sparse_structs
-
-        # Collect sparse structures from extended header blocks.
-        while isextended:
-            buf = tarfile.fileobj.read(BLOCKSIZE)
-            pos = 0
-            for i in range(21):
-                try:
-                    offset = nti(buf[pos:pos + 12])
-                    numbytes = nti(buf[pos + 12:pos + 24])
-                except ValueError:
-                    break
-                if offset and numbytes:
-                    structs.append((offset, numbytes))
-                pos += 24
-            isextended = bool(buf[504])
-        self.sparse = structs
-
-        self.offset_data = tarfile.fileobj.tell()
-        tarfile.offset = self.offset_data + self._block(self.size)
-        self.size = origsize
-        return self
-
-    def _proc_pax(self, tarfile):
-        """Process an extended or global header as described in
-           POSIX.1-2008.
-        """
-        # Read the header information.
-        buf = tarfile.fileobj.read(self._block(self.size))
-
-        # A pax header stores supplemental information for either
-        # the following file (extended) or all following files
-        # (global).
-        if self.type == XGLTYPE:
-            pax_headers = tarfile.pax_headers
-        else:
-            pax_headers = tarfile.pax_headers.copy()
-
-        # Check if the pax header contains a hdrcharset field. This tells us
-        # the encoding of the path, linkpath, uname and gname fields. Normally,
-        # these fields are UTF-8 encoded but since POSIX.1-2008 tar
-        # implementations are allowed to store them as raw binary strings if
-        # the translation to UTF-8 fails.
-        match = re.search(br"\d+ hdrcharset=([^\n]+)\n", buf)
-        if match is not None:
-            pax_headers["hdrcharset"] = match.group(1).decode("utf-8")
-
-        # For the time being, we don't care about anything other than "BINARY".
-        # The only other value that is currently allowed by the standard is
-        # "ISO-IR 10646 2000 UTF-8" in other words UTF-8.
-        hdrcharset = pax_headers.get("hdrcharset")
-        if hdrcharset == "BINARY":
-            encoding = tarfile.encoding
-        else:
-            encoding = "utf-8"
-
-        # Parse pax header information. A record looks like that:
-        # "%d %s=%s\n" % (length, keyword, value). length is the size
-        # of the complete record including the length field itself and
-        # the newline. keyword and value are both UTF-8 encoded strings.
-        regex = re.compile(br"(\d+) ([^=]+)=")
-        pos = 0
-        while match := regex.match(buf, pos):
-            length, keyword = match.groups()
-            length = int(length)
-            if length == 0:
-                raise InvalidHeaderError("invalid header")
-            value = buf[match.end(2) + 1:match.start(1) + length - 1]
-
-            # Normally, we could just use "utf-8" as the encoding and "strict"
-            # as the error handler, but we better not take the risk. For
-            # example, GNU tar <= 1.23 is known to store filenames it cannot
-            # translate to UTF-8 as raw strings (unfortunately without a
-            # hdrcharset=BINARY header).
-            # We first try the strict standard encoding, and if that fails we
-            # fall back on the user's encoding and error handler.
-            keyword = self._decode_pax_field(keyword, "utf-8", "utf-8",
-                    tarfile.errors)
-            if keyword in PAX_NAME_FIELDS:
-                value = self._decode_pax_field(value, encoding, tarfile.encoding,
-                        tarfile.errors)
-            else:
-                value = self._decode_pax_field(value, "utf-8", "utf-8",
-                        tarfile.errors)
-
-            pax_headers[keyword] = value
-            pos += length
-
-        # Fetch the next header.
-        try:
-            next = self.fromtarfile(tarfile)
-        except HeaderError as e:
-            raise SubsequentHeaderError(str(e)) from None
-
-        # Process GNU sparse information.
-        if "GNU.sparse.map" in pax_headers:
-            # GNU extended sparse format version 0.1.
-            self._proc_gnusparse_01(next, pax_headers)
-
-        elif "GNU.sparse.size" in pax_headers:
-            # GNU extended sparse format version 0.0.
-            self._proc_gnusparse_00(next, pax_headers, buf)
-
-        elif pax_headers.get("GNU.sparse.major") == "1" and pax_headers.get("GNU.sparse.minor") == "0":
-            # GNU extended sparse format version 1.0.
-            self._proc_gnusparse_10(next, pax_headers, tarfile)
-
-        if self.type in (XHDTYPE, SOLARIS_XHDTYPE):
-            # Patch the TarInfo object with the extended header info.
-            next._apply_pax_info(pax_headers, tarfile.encoding, tarfile.errors)
-            next.offset = self.offset
-
-            if "size" in pax_headers:
-                # If the extended header replaces the size field,
-                # we need to recalculate the offset where the next
-                # header starts.
-                offset = next.offset_data
-                if next.isreg() or next.type not in SUPPORTED_TYPES:
-                    offset += next._block(next.size)
-                tarfile.offset = offset
-
-        return next
-
-    def _proc_gnusparse_00(self, next, pax_headers, buf):
-        """Process a GNU tar extended sparse header, version 0.0.
-        """
-        offsets = []
-        for match in re.finditer(br"\d+ GNU.sparse.offset=(\d+)\n", buf):
-            offsets.append(int(match.group(1)))
-        numbytes = []
-        for match in re.finditer(br"\d+ GNU.sparse.numbytes=(\d+)\n", buf):
-            numbytes.append(int(match.group(1)))
-        next.sparse = list(zip(offsets, numbytes))
-
-    def _proc_gnusparse_01(self, next, pax_headers):
-        """Process a GNU tar extended sparse header, version 0.1.
-        """
-        sparse = [int(x) for x in pax_headers["GNU.sparse.map"].split(",")]
-        next.sparse = list(zip(sparse[::2], sparse[1::2]))
-
-    def _proc_gnusparse_10(self, next, pax_headers, tarfile):
-        """Process a GNU tar extended sparse header, version 1.0.
-        """
-        fields = None
-        sparse = []
-        buf = tarfile.fileobj.read(BLOCKSIZE)
-        fields, buf = buf.split(b"\n", 1)
-        fields = int(fields)
-        while len(sparse) < fields * 2:
-            if b"\n" not in buf:
-                buf += tarfile.fileobj.read(BLOCKSIZE)
-            number, buf = buf.split(b"\n", 1)
-            sparse.append(int(number))
-        next.offset_data = tarfile.fileobj.tell()
-        next.sparse = list(zip(sparse[::2], sparse[1::2]))
-
-    def _apply_pax_info(self, pax_headers, encoding, errors):
-        """Replace fields with supplemental information from a previous
-           pax extended or global header.
-        """
-        for keyword, value in pax_headers.items():
-            if keyword == "GNU.sparse.name":
-                setattr(self, "path", value)
-            elif keyword == "GNU.sparse.size":
-                setattr(self, "size", int(value))
-            elif keyword == "GNU.sparse.realsize":
-                setattr(self, "size", int(value))
-            elif keyword in PAX_FIELDS:
-                if keyword in PAX_NUMBER_FIELDS:
-                    try:
-                        value = PAX_NUMBER_FIELDS[keyword](value)
-                    except ValueError:
-                        value = 0
-                if keyword == "path":
-                    value = value.rstrip("/")
-                setattr(self, keyword, value)
-
-        self.pax_headers = pax_headers.copy()
-
-    def _decode_pax_field(self, value, encoding, fallback_encoding, fallback_errors):
-        """Decode a single field from a pax record.
-        """
-        try:
-            return value.decode(encoding, "strict")
-        except UnicodeDecodeError:
-            return value.decode(fallback_encoding, fallback_errors)
-
-    def _block(self, count):
-        """Round up a byte count by BLOCKSIZE and return it,
-           e.g. _block(834) => 1024.
-        """
-        blocks, remainder = divmod(count, BLOCKSIZE)
-        if remainder:
-            blocks += 1
-        return blocks * BLOCKSIZE
-
-    def isreg(self):
-        'Return True if the Tarinfo object is a regular file.'
-        return self.type in REGULAR_TYPES
-
-    def isfile(self):
-        'Return True if the Tarinfo object is a regular file.'
-        return self.isreg()
-
-    def isdir(self):
-        'Return True if it is a directory.'
-        return self.type == DIRTYPE
-
-    def issym(self):
-        'Return True if it is a symbolic link.'
-        return self.type == SYMTYPE
-
-    def islnk(self):
-        'Return True if it is a hard link.'
-        return self.type == LNKTYPE
-
-    def ischr(self):
-        'Return True if it is a character device.'
-        return self.type == CHRTYPE
-
-    def isblk(self):
-        'Return True if it is a block device.'
-        return self.type == BLKTYPE
-
-    def isfifo(self):
-        'Return True if it is a FIFO.'
-        return self.type == FIFOTYPE
-
-    def issparse(self):
-        return self.sparse is not None
-
-    def isdev(self):
-        'Return True if it is one of character device, block device or FIFO.'
-        return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE)
-# class TarInfo
-
-class TarFile(object):
-    """The TarFile Class provides an interface to tar archives.
-    """
-
-    debug = 0                   # May be set from 0 (no msgs) to 3 (all msgs)
-
-    dereference = False         # If true, add content of linked file to the
-                                # tar file, else the link.
-
-    ignore_zeros = False        # If true, skips empty or invalid blocks and
-                                # continues processing.
-
-    errorlevel = 1              # If 0, fatal errors only appear in debug
-                                # messages (if debug >= 0). If > 0, errors
-                                # are passed to the caller as exceptions.
-
-    format = DEFAULT_FORMAT     # The format to use when creating an archive.
-
-    encoding = ENCODING         # Encoding for 8-bit character strings.
-
-    errors = None               # Error handler for unicode conversion.
-
-    tarinfo = TarInfo           # The default TarInfo class to use.
-
-    fileobject = ExFileObject   # The file-object for extractfile().
-
-    extraction_filter = None    # The default filter for extraction.
-
-    def __init__(self, name=None, mode="r", fileobj=None, format=None,
-            tarinfo=None, dereference=None, ignore_zeros=None, encoding=None,
-            errors="surrogateescape", pax_headers=None, debug=None,
-            errorlevel=None, copybufsize=None):
-        """Open an (uncompressed) tar archive `name'. `mode' is either 'r' to
-           read from an existing archive, 'a' to append data to an existing
-           file or 'w' to create a new file overwriting an existing one. `mode'
-           defaults to 'r'.
-           If `fileobj' is given, it is used for reading or writing data. If it
-           can be determined, `mode' is overridden by `fileobj's mode.
-           `fileobj' is not closed, when TarFile is closed.
-        """
-        modes = {"r": "rb", "a": "r+b", "w": "wb", "x": "xb"}
-        if mode not in modes:
-            raise ValueError("mode must be 'r', 'a', 'w' or 'x'")
-        self.mode = mode
-        self._mode = modes[mode]
-
-        if not fileobj:
-            if self.mode == "a" and not os.path.exists(name):
-                # Create nonexistent files in append mode.
-                self.mode = "w"
-                self._mode = "wb"
-            fileobj = bltn_open(name, self._mode)
-            self._extfileobj = False
-        else:
-            if (name is None and hasattr(fileobj, "name") and
-                isinstance(fileobj.name, (str, bytes))):
-                name = fileobj.name
-            if hasattr(fileobj, "mode"):
-                self._mode = fileobj.mode
-            self._extfileobj = True
-        self.name = os.path.abspath(name) if name else None
-        self.fileobj = fileobj
-
-        # Init attributes.
-        if format is not None:
-            self.format = format
-        if tarinfo is not None:
-            self.tarinfo = tarinfo
-        if dereference is not None:
-            self.dereference = dereference
-        if ignore_zeros is not None:
-            self.ignore_zeros = ignore_zeros
-        if encoding is not None:
-            self.encoding = encoding
-        self.errors = errors
-
-        if pax_headers is not None and self.format == PAX_FORMAT:
-            self.pax_headers = pax_headers
-        else:
-            self.pax_headers = {}
-
-        if debug is not None:
-            self.debug = debug
-        if errorlevel is not None:
-            self.errorlevel = errorlevel
-
-        # Init datastructures.
-        self.copybufsize = copybufsize
-        self.closed = False
-        self.members = []       # list of members as TarInfo objects
-        self._loaded = False    # flag if all members have been read
-        self.offset = self.fileobj.tell()
-                                # current position in the archive file
-        self.inodes = {}        # dictionary caching the inodes of
-                                # archive members already added
-
-        try:
-            if self.mode == "r":
-                self.firstmember = None
-                self.firstmember = self.next()
-
-            if self.mode == "a":
-                # Move to the end of the archive,
-                # before the first empty block.
-                while True:
-                    self.fileobj.seek(self.offset)
-                    try:
-                        tarinfo = self.tarinfo.fromtarfile(self)
-                        self.members.append(tarinfo)
-                    except EOFHeaderError:
-                        self.fileobj.seek(self.offset)
-                        break
-                    except HeaderError as e:
-                        raise ReadError(str(e)) from None
-
-            if self.mode in ("a", "w", "x"):
-                self._loaded = True
-
-                if self.pax_headers:
-                    buf = self.tarinfo.create_pax_global_header(self.pax_headers.copy())
-                    self.fileobj.write(buf)
-                    self.offset += len(buf)
-        except:
-            if not self._extfileobj:
-                self.fileobj.close()
-            self.closed = True
-            raise
-
-    #--------------------------------------------------------------------------
-    # Below are the classmethods which act as alternate constructors to the
-    # TarFile class. The open() method is the only one that is needed for
-    # public use; it is the "super"-constructor and is able to select an
-    # adequate "sub"-constructor for a particular compression using the mapping
-    # from OPEN_METH.
-    #
-    # This concept allows one to subclass TarFile without losing the comfort of
-    # the super-constructor. A sub-constructor is registered and made available
-    # by adding it to the mapping in OPEN_METH.
-
-    @classmethod
-    def open(cls, name=None, mode="r", fileobj=None, bufsize=RECORDSIZE, **kwargs):
-        r"""Open a tar archive for reading, writing or appending. Return
-           an appropriate TarFile class.
-
-           mode:
-           'r' or 'r:\*' open for reading with transparent compression
-           'r:'         open for reading exclusively uncompressed
-           'r:gz'       open for reading with gzip compression
-           'r:bz2'      open for reading with bzip2 compression
-           'r:xz'       open for reading with lzma compression
-           'a' or 'a:'  open for appending, creating the file if necessary
-           'w' or 'w:'  open for writing without compression
-           'w:gz'       open for writing with gzip compression
-           'w:bz2'      open for writing with bzip2 compression
-           'w:xz'       open for writing with lzma compression
-
-           'x' or 'x:'  create a tarfile exclusively without compression, raise
-                        an exception if the file is already created
-           'x:gz'       create a gzip compressed tarfile, raise an exception
-                        if the file is already created
-           'x:bz2'      create a bzip2 compressed tarfile, raise an exception
-                        if the file is already created
-           'x:xz'       create an lzma compressed tarfile, raise an exception
-                        if the file is already created
-
-           'r|\*'        open a stream of tar blocks with transparent compression
-           'r|'         open an uncompressed stream of tar blocks for reading
-           'r|gz'       open a gzip compressed stream of tar blocks
-           'r|bz2'      open a bzip2 compressed stream of tar blocks
-           'r|xz'       open an lzma compressed stream of tar blocks
-           'w|'         open an uncompressed stream for writing
-           'w|gz'       open a gzip compressed stream for writing
-           'w|bz2'      open a bzip2 compressed stream for writing
-           'w|xz'       open an lzma compressed stream for writing
-        """
-
-        if not name and not fileobj:
-            raise ValueError("nothing to open")
-
-        if mode in ("r", "r:*"):
-            # Find out which *open() is appropriate for opening the file.
-            def not_compressed(comptype):
-                return cls.OPEN_METH[comptype] == 'taropen'
-            error_msgs = []
-            for comptype in sorted(cls.OPEN_METH, key=not_compressed):
-                func = getattr(cls, cls.OPEN_METH[comptype])
-                if fileobj is not None:
-                    saved_pos = fileobj.tell()
-                try:
-                    return func(name, "r", fileobj, **kwargs)
-                except (ReadError, CompressionError) as e:
-                    error_msgs.append(f'- method {comptype}: {e!r}')
-                    if fileobj is not None:
-                        fileobj.seek(saved_pos)
-                    continue
-            error_msgs_summary = '\n'.join(error_msgs)
-            raise ReadError(f"file could not be opened successfully:\n{error_msgs_summary}")
-
-        elif ":" in mode:
-            filemode, comptype = mode.split(":", 1)
-            filemode = filemode or "r"
-            comptype = comptype or "tar"
-
-            # Select the *open() function according to
-            # given compression.
-            if comptype in cls.OPEN_METH:
-                func = getattr(cls, cls.OPEN_METH[comptype])
-            else:
-                raise CompressionError("unknown compression type %r" % comptype)
-            return func(name, filemode, fileobj, **kwargs)
-
-        elif "|" in mode:
-            filemode, comptype = mode.split("|", 1)
-            filemode = filemode or "r"
-            comptype = comptype or "tar"
-
-            if filemode not in ("r", "w"):
-                raise ValueError("mode must be 'r' or 'w'")
-
-            compresslevel = kwargs.pop("compresslevel", 9)
-            stream = _Stream(name, filemode, comptype, fileobj, bufsize,
-                             compresslevel)
-            try:
-                t = cls(name, filemode, stream, **kwargs)
-            except:
-                stream.close()
-                raise
-            t._extfileobj = False
-            return t
-
-        elif mode in ("a", "w", "x"):
-            return cls.taropen(name, mode, fileobj, **kwargs)
-
-        raise ValueError("undiscernible mode")
-
-    @classmethod
-    def taropen(cls, name, mode="r", fileobj=None, **kwargs):
-        """Open uncompressed tar archive name for reading or writing.
-        """
-        if mode not in ("r", "a", "w", "x"):
-            raise ValueError("mode must be 'r', 'a', 'w' or 'x'")
-        return cls(name, mode, fileobj, **kwargs)
-
-    @classmethod
-    def gzopen(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
-        """Open gzip compressed tar archive name for reading or writing.
-           Appending is not allowed.
-        """
-        if mode not in ("r", "w", "x"):
-            raise ValueError("mode must be 'r', 'w' or 'x'")
-
-        try:
-            from gzip import GzipFile
-        except ImportError:
-            raise CompressionError("gzip module is not available") from None
-
-        try:
-            fileobj = GzipFile(name, mode + "b", compresslevel, fileobj)
-        except OSError as e:
-            if fileobj is not None and mode == 'r':
-                raise ReadError("not a gzip file") from e
-            raise
-
-        try:
-            t = cls.taropen(name, mode, fileobj, **kwargs)
-        except OSError as e:
-            fileobj.close()
-            if mode == 'r':
-                raise ReadError("not a gzip file") from e
-            raise
-        except:
-            fileobj.close()
-            raise
-        t._extfileobj = False
-        return t
-
-    @classmethod
-    def bz2open(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
-        """Open bzip2 compressed tar archive name for reading or writing.
-           Appending is not allowed.
-        """
-        if mode not in ("r", "w", "x"):
-            raise ValueError("mode must be 'r', 'w' or 'x'")
-
-        try:
-            from bz2 import BZ2File
-        except ImportError:
-            raise CompressionError("bz2 module is not available") from None
-
-        fileobj = BZ2File(fileobj or name, mode, compresslevel=compresslevel)
-
-        try:
-            t = cls.taropen(name, mode, fileobj, **kwargs)
-        except (OSError, EOFError) as e:
-            fileobj.close()
-            if mode == 'r':
-                raise ReadError("not a bzip2 file") from e
-            raise
-        except:
-            fileobj.close()
-            raise
-        t._extfileobj = False
-        return t
-
-    @classmethod
-    def xzopen(cls, name, mode="r", fileobj=None, preset=None, **kwargs):
-        """Open lzma compressed tar archive name for reading or writing.
-           Appending is not allowed.
-        """
-        if mode not in ("r", "w", "x"):
-            raise ValueError("mode must be 'r', 'w' or 'x'")
-
-        try:
-            from lzma import LZMAFile, LZMAError
-        except ImportError:
-            raise CompressionError("lzma module is not available") from None
-
-        fileobj = LZMAFile(fileobj or name, mode, preset=preset)
-
-        try:
-            t = cls.taropen(name, mode, fileobj, **kwargs)
-        except (LZMAError, EOFError) as e:
-            fileobj.close()
-            if mode == 'r':
-                raise ReadError("not an lzma file") from e
-            raise
-        except:
-            fileobj.close()
-            raise
-        t._extfileobj = False
-        return t
-
-    # All *open() methods are registered here.
-    OPEN_METH = {
-        "tar": "taropen",   # uncompressed tar
-        "gz":  "gzopen",    # gzip compressed tar
-        "bz2": "bz2open",   # bzip2 compressed tar
-        "xz":  "xzopen"     # lzma compressed tar
-    }
-
-    #--------------------------------------------------------------------------
-    # The public methods which TarFile provides:
-
-    def close(self):
-        """Close the TarFile. In write-mode, two finishing zero blocks are
-           appended to the archive.
-        """
-        if self.closed:
-            return
-
-        self.closed = True
-        try:
-            if self.mode in ("a", "w", "x"):
-                self.fileobj.write(NUL * (BLOCKSIZE * 2))
-                self.offset += (BLOCKSIZE * 2)
-                # fill up the end with zero-blocks
-                # (like option -b20 for tar does)
-                blocks, remainder = divmod(self.offset, RECORDSIZE)
-                if remainder > 0:
-                    self.fileobj.write(NUL * (RECORDSIZE - remainder))
-        finally:
-            if not self._extfileobj:
-                self.fileobj.close()
-
-    def getmember(self, name):
-        """Return a TarInfo object for member ``name``. If ``name`` can not be
-           found in the archive, KeyError is raised. If a member occurs more
-           than once in the archive, its last occurrence is assumed to be the
-           most up-to-date version.
-        """
-        tarinfo = self._getmember(name.rstrip('/'))
-        if tarinfo is None:
-            raise KeyError("filename %r not found" % name)
-        return tarinfo
-
-    def getmembers(self):
-        """Return the members of the archive as a list of TarInfo objects. The
-           list has the same order as the members in the archive.
-        """
-        self._check()
-        if not self._loaded:    # if we want to obtain a list of
-            self._load()        # all members, we first have to
-                                # scan the whole archive.
-        return self.members
-
-    def getnames(self):
-        """Return the members of the archive as a list of their names. It has
-           the same order as the list returned by getmembers().
-        """
-        return [tarinfo.name for tarinfo in self.getmembers()]
-
-    def gettarinfo(self, name=None, arcname=None, fileobj=None):
-        """Create a TarInfo object from the result of os.stat or equivalent
-           on an existing file. The file is either named by ``name``, or
-           specified as a file object ``fileobj`` with a file descriptor. If
-           given, ``arcname`` specifies an alternative name for the file in the
-           archive, otherwise, the name is taken from the 'name' attribute of
-           'fileobj', or the 'name' argument. The name should be a text
-           string.
-        """
-        self._check("awx")
-
-        # When fileobj is given, replace name by
-        # fileobj's real name.
-        if fileobj is not None:
-            name = fileobj.name
-
-        # Building the name of the member in the archive.
-        # Backward slashes are converted to forward slashes,
-        # Absolute paths are turned to relative paths.
-        if arcname is None:
-            arcname = name
-        drv, arcname = os.path.splitdrive(arcname)
-        arcname = arcname.replace(os.sep, "/")
-        arcname = arcname.lstrip("/")
-
-        # Now, fill the TarInfo object with
-        # information specific for the file.
-        tarinfo = self.tarinfo()
-        tarinfo.tarfile = self  # Not needed
-
-        # Use os.stat or os.lstat, depending on if symlinks shall be resolved.
-        if fileobj is None:
-            if not self.dereference:
-                statres = os.lstat(name)
-            else:
-                statres = os.stat(name)
-        else:
-            statres = os.fstat(fileobj.fileno())
-        linkname = ""
-
-        stmd = statres.st_mode
-        if stat.S_ISREG(stmd):
-            inode = (statres.st_ino, statres.st_dev)
-            if not self.dereference and statres.st_nlink > 1 and \
-                    inode in self.inodes and arcname != self.inodes[inode]:
-                # Is it a hardlink to an already
-                # archived file?
-                type = LNKTYPE
-                linkname = self.inodes[inode]
-            else:
-                # The inode is added only if its valid.
-                # For win32 it is always 0.
-                type = REGTYPE
-                if inode[0]:
-                    self.inodes[inode] = arcname
-        elif stat.S_ISDIR(stmd):
-            type = DIRTYPE
-        elif stat.S_ISFIFO(stmd):
-            type = FIFOTYPE
-        elif stat.S_ISLNK(stmd):
-            type = SYMTYPE
-            linkname = os.readlink(name)
-        elif stat.S_ISCHR(stmd):
-            type = CHRTYPE
-        elif stat.S_ISBLK(stmd):
-            type = BLKTYPE
-        else:
-            return None
-
-        # Fill the TarInfo object with all
-        # information we can get.
-        tarinfo.name = arcname
-        tarinfo.mode = stmd
-        tarinfo.uid = statres.st_uid
-        tarinfo.gid = statres.st_gid
-        if type == REGTYPE:
-            tarinfo.size = statres.st_size
-        else:
-            tarinfo.size = 0
-        tarinfo.mtime = statres.st_mtime
-        tarinfo.type = type
-        tarinfo.linkname = linkname
-        if pwd:
-            try:
-                tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0]
-            except KeyError:
-                pass
-        if grp:
-            try:
-                tarinfo.gname = grp.getgrgid(tarinfo.gid)[0]
-            except KeyError:
-                pass
-
-        if type in (CHRTYPE, BLKTYPE):
-            if hasattr(os, "major") and hasattr(os, "minor"):
-                tarinfo.devmajor = os.major(statres.st_rdev)
-                tarinfo.devminor = os.minor(statres.st_rdev)
-        return tarinfo
-
-    def list(self, verbose=True, *, members=None):
-        """Print a table of contents to sys.stdout. If ``verbose`` is False, only
-           the names of the members are printed. If it is True, an `ls -l'-like
-           output is produced. ``members`` is optional and must be a subset of the
-           list returned by getmembers().
-        """
-        self._check()
-
-        if members is None:
-            members = self
-        for tarinfo in members:
-            if verbose:
-                if tarinfo.mode is None:
-                    _safe_print("??????????")
-                else:
-                    _safe_print(stat.filemode(tarinfo.mode))
-                _safe_print("%s/%s" % (tarinfo.uname or tarinfo.uid,
-                                       tarinfo.gname or tarinfo.gid))
-                if tarinfo.ischr() or tarinfo.isblk():
-                    _safe_print("%10s" %
-                            ("%d,%d" % (tarinfo.devmajor, tarinfo.devminor)))
-                else:
-                    _safe_print("%10d" % tarinfo.size)
-                if tarinfo.mtime is None:
-                    _safe_print("????-??-?? ??:??:??")
-                else:
-                    _safe_print("%d-%02d-%02d %02d:%02d:%02d" \
-                                % time.localtime(tarinfo.mtime)[:6])
-
-            _safe_print(tarinfo.name + ("/" if tarinfo.isdir() else ""))
-
-            if verbose:
-                if tarinfo.issym():
-                    _safe_print("-> " + tarinfo.linkname)
-                if tarinfo.islnk():
-                    _safe_print("link to " + tarinfo.linkname)
-            print()
-
-    def add(self, name, arcname=None, recursive=True, *, filter=None):
-        """Add the file ``name`` to the archive. ``name`` may be any type of file
-           (directory, fifo, symbolic link, etc.). If given, ``arcname``
-           specifies an alternative name for the file in the archive.
-           Directories are added recursively by default. This can be avoided by
-           setting ``recursive`` to False. ``filter`` is a function
-           that expects a TarInfo object argument and returns the changed
-           TarInfo object, if it returns None the TarInfo object will be
-           excluded from the archive.
-        """
-        self._check("awx")
-
-        if arcname is None:
-            arcname = name
-
-        # Skip if somebody tries to archive the archive...
-        if self.name is not None and os.path.abspath(name) == self.name:
-            self._dbg(2, "tarfile: Skipped %r" % name)
-            return
-
-        self._dbg(1, name)
-
-        # Create a TarInfo object from the file.
-        tarinfo = self.gettarinfo(name, arcname)
-
-        if tarinfo is None:
-            self._dbg(1, "tarfile: Unsupported type %r" % name)
-            return
-
-        # Change or exclude the TarInfo object.
-        if filter is not None:
-            tarinfo = filter(tarinfo)
-            if tarinfo is None:
-                self._dbg(2, "tarfile: Excluded %r" % name)
-                return
-
-        # Append the tar header and data to the archive.
-        if tarinfo.isreg():
-            with bltn_open(name, "rb") as f:
-                self.addfile(tarinfo, f)
-
-        elif tarinfo.isdir():
-            self.addfile(tarinfo)
-            if recursive:
-                for f in sorted(os.listdir(name)):
-                    self.add(os.path.join(name, f), os.path.join(arcname, f),
-                            recursive, filter=filter)
-
-        else:
-            self.addfile(tarinfo)
-
-    def addfile(self, tarinfo, fileobj=None):
-        """Add the TarInfo object ``tarinfo`` to the archive. If ``fileobj`` is
-           given, it should be a binary file, and tarinfo.size bytes are read
-           from it and added to the archive. You can create TarInfo objects
-           directly, or by using gettarinfo().
-        """
-        self._check("awx")
-
-        tarinfo = copy.copy(tarinfo)
-
-        buf = tarinfo.tobuf(self.format, self.encoding, self.errors)
-        self.fileobj.write(buf)
-        self.offset += len(buf)
-        bufsize=self.copybufsize
-        # If there's data to follow, append it.
-        if fileobj is not None:
-            copyfileobj(fileobj, self.fileobj, tarinfo.size, bufsize=bufsize)
-            blocks, remainder = divmod(tarinfo.size, BLOCKSIZE)
-            if remainder > 0:
-                self.fileobj.write(NUL * (BLOCKSIZE - remainder))
-                blocks += 1
-            self.offset += blocks * BLOCKSIZE
-
-        self.members.append(tarinfo)
-
-    def _get_filter_function(self, filter):
-        if filter is None:
-            filter = self.extraction_filter
-            if filter is None:
-                warnings.warn(
-                    'Python 3.14 will, by default, filter extracted tar '
-                    + 'archives and reject files or modify their metadata. '
-                    + 'Use the filter argument to control this behavior.',
-                    DeprecationWarning)
-                return fully_trusted_filter
-            if isinstance(filter, str):
-                raise TypeError(
-                    'String names are not supported for '
-                    + 'TarFile.extraction_filter. Use a function such as '
-                    + 'tarfile.data_filter directly.')
-            return filter
-        if callable(filter):
-            return filter
-        try:
-            return _NAMED_FILTERS[filter]
-        except KeyError:
-            raise ValueError(f"filter {filter!r} not found") from None
-
-    def extractall(self, path=".", members=None, *, numeric_owner=False,
-                   filter=None):
-        """Extract all members from the archive to the current working
-           directory and set owner, modification time and permissions on
-           directories afterwards. `path' specifies a different directory
-           to extract to. `members' is optional and must be a subset of the
-           list returned by getmembers(). If `numeric_owner` is True, only
-           the numbers for user/group names are used and not the names.
-
-           The `filter` function will be called on each member just
-           before extraction.
-           It can return a changed TarInfo or None to skip the member.
-           String names of common filters are accepted.
-        """
-        directories = []
-
-        filter_function = self._get_filter_function(filter)
-        if members is None:
-            members = self
-
-        for member in members:
-            tarinfo = self._get_extract_tarinfo(member, filter_function, path)
-            if tarinfo is None:
-                continue
-            if tarinfo.isdir():
-                # For directories, delay setting attributes until later,
-                # since permissions can interfere with extraction and
-                # extracting contents can reset mtime.
-                directories.append(tarinfo)
-            self._extract_one(tarinfo, path, set_attrs=not tarinfo.isdir(),
-                              numeric_owner=numeric_owner)
-
-        # Reverse sort directories.
-        directories.sort(key=lambda a: a.name, reverse=True)
-
-        # Set correct owner, mtime and filemode on directories.
-        for tarinfo in directories:
-            dirpath = os.path.join(path, tarinfo.name)
-            try:
-                self.chown(tarinfo, dirpath, numeric_owner=numeric_owner)
-                self.utime(tarinfo, dirpath)
-                self.chmod(tarinfo, dirpath)
-            except ExtractError as e:
-                self._handle_nonfatal_error(e)
-
-    def extract(self, member, path="", set_attrs=True, *, numeric_owner=False,
-                filter=None):
-        """Extract a member from the archive to the current working directory,
-           using its full name. Its file information is extracted as accurately
-           as possible. `member' may be a filename or a TarInfo object. You can
-           specify a different directory using `path'. File attributes (owner,
-           mtime, mode) are set unless `set_attrs' is False. If `numeric_owner`
-           is True, only the numbers for user/group names are used and not
-           the names.
-
-           The `filter` function will be called before extraction.
-           It can return a changed TarInfo or None to skip the member.
-           String names of common filters are accepted.
-        """
-        filter_function = self._get_filter_function(filter)
-        tarinfo = self._get_extract_tarinfo(member, filter_function, path)
-        if tarinfo is not None:
-            self._extract_one(tarinfo, path, set_attrs, numeric_owner)
-
-    def _get_extract_tarinfo(self, member, filter_function, path):
-        """Get filtered TarInfo (or None) from member, which might be a str"""
-        if isinstance(member, str):
-            tarinfo = self.getmember(member)
-        else:
-            tarinfo = member
-
-        unfiltered = tarinfo
-        try:
-            tarinfo = filter_function(tarinfo, path)
-        except (OSError, FilterError) as e:
-            self._handle_fatal_error(e)
-        except ExtractError as e:
-            self._handle_nonfatal_error(e)
-        if tarinfo is None:
-            self._dbg(2, "tarfile: Excluded %r" % unfiltered.name)
-            return None
-        # Prepare the link target for makelink().
-        if tarinfo.islnk():
-            tarinfo = copy.copy(tarinfo)
-            tarinfo._link_target = os.path.join(path, tarinfo.linkname)
-        return tarinfo
-
-    def _extract_one(self, tarinfo, path, set_attrs, numeric_owner):
-        """Extract from filtered tarinfo to disk"""
-        self._check("r")
-
-        try:
-            self._extract_member(tarinfo, os.path.join(path, tarinfo.name),
-                                 set_attrs=set_attrs,
-                                 numeric_owner=numeric_owner)
-        except OSError as e:
-            self._handle_fatal_error(e)
-        except ExtractError as e:
-            self._handle_nonfatal_error(e)
-
-    def _handle_nonfatal_error(self, e):
-        """Handle non-fatal error (ExtractError) according to errorlevel"""
-        if self.errorlevel > 1:
-            raise
-        else:
-            self._dbg(1, "tarfile: %s" % e)
-
-    def _handle_fatal_error(self, e):
-        """Handle "fatal" error according to self.errorlevel"""
-        if self.errorlevel > 0:
-            raise
-        elif isinstance(e, OSError):
-            if e.filename is None:
-                self._dbg(1, "tarfile: %s" % e.strerror)
-            else:
-                self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename))
-        else:
-            self._dbg(1, "tarfile: %s %s" % (type(e).__name__, e))
-
-    def extractfile(self, member):
-        """Extract a member from the archive as a file object. ``member`` may be
-           a filename or a TarInfo object. If ``member`` is a regular file or
-           a link, an io.BufferedReader object is returned. For all other
-           existing members, None is returned. If ``member`` does not appear
-           in the archive, KeyError is raised.
-        """
-        self._check("r")
-
-        if isinstance(member, str):
-            tarinfo = self.getmember(member)
-        else:
-            tarinfo = member
-
-        if tarinfo.isreg() or tarinfo.type not in SUPPORTED_TYPES:
-            # Members with unknown types are treated as regular files.
-            return self.fileobject(self, tarinfo)
-
-        elif tarinfo.islnk() or tarinfo.issym():
-            if isinstance(self.fileobj, _Stream):
-                # A small but ugly workaround for the case that someone tries
-                # to extract a (sym)link as a file-object from a non-seekable
-                # stream of tar blocks.
-                raise StreamError("cannot extract (sym)link as file object")
-            else:
-                # A (sym)link's file object is its target's file object.
-                return self.extractfile(self._find_link_target(tarinfo))
-        else:
-            # If there's no data associated with the member (directory, chrdev,
-            # blkdev, etc.), return None instead of a file object.
-            return None
-
-    def _extract_member(self, tarinfo, targetpath, set_attrs=True,
-                        numeric_owner=False):
-        """Extract the TarInfo object tarinfo to a physical
-           file called targetpath.
-        """
-        # Fetch the TarInfo object for the given name
-        # and build the destination pathname, replacing
-        # forward slashes to platform specific separators.
-        targetpath = targetpath.rstrip("/")
-        targetpath = targetpath.replace("/", os.sep)
-
-        # Create all upper directories.
-        upperdirs = os.path.dirname(targetpath)
-        if upperdirs and not os.path.exists(upperdirs):
-            # Create directories that are not part of the archive with
-            # default permissions.
-            os.makedirs(upperdirs)
-
-        if tarinfo.islnk() or tarinfo.issym():
-            self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname))
-        else:
-            self._dbg(1, tarinfo.name)
-
-        if tarinfo.isreg():
-            self.makefile(tarinfo, targetpath)
-        elif tarinfo.isdir():
-            self.makedir(tarinfo, targetpath)
-        elif tarinfo.isfifo():
-            self.makefifo(tarinfo, targetpath)
-        elif tarinfo.ischr() or tarinfo.isblk():
-            self.makedev(tarinfo, targetpath)
-        elif tarinfo.islnk() or tarinfo.issym():
-            self.makelink(tarinfo, targetpath)
-        elif tarinfo.type not in SUPPORTED_TYPES:
-            self.makeunknown(tarinfo, targetpath)
-        else:
-            self.makefile(tarinfo, targetpath)
-
-        if set_attrs:
-            self.chown(tarinfo, targetpath, numeric_owner)
-            if not tarinfo.issym():
-                self.chmod(tarinfo, targetpath)
-                self.utime(tarinfo, targetpath)
-
-    #--------------------------------------------------------------------------
-    # Below are the different file methods. They are called via
-    # _extract_member() when extract() is called. They can be replaced in a
-    # subclass to implement other functionality.
-
-    def makedir(self, tarinfo, targetpath):
-        """Make a directory called targetpath.
-        """
-        try:
-            if tarinfo.mode is None:
-                # Use the system's default mode
-                os.mkdir(targetpath)
-            else:
-                # Use a safe mode for the directory, the real mode is set
-                # later in _extract_member().
-                os.mkdir(targetpath, 0o700)
-        except FileExistsError:
-            if not os.path.isdir(targetpath):
-                raise
-
-    def makefile(self, tarinfo, targetpath):
-        """Make a file called targetpath.
-        """
-        source = self.fileobj
-        source.seek(tarinfo.offset_data)
-        bufsize = self.copybufsize
-        with bltn_open(targetpath, "wb") as target:
-            if tarinfo.sparse is not None:
-                for offset, size in tarinfo.sparse:
-                    target.seek(offset)
-                    copyfileobj(source, target, size, ReadError, bufsize)
-                target.seek(tarinfo.size)
-                target.truncate()
-            else:
-                copyfileobj(source, target, tarinfo.size, ReadError, bufsize)
-
-    def makeunknown(self, tarinfo, targetpath):
-        """Make a file from a TarInfo object with an unknown type
-           at targetpath.
-        """
-        self.makefile(tarinfo, targetpath)
-        self._dbg(1, "tarfile: Unknown file type %r, " \
-                     "extracted as regular file." % tarinfo.type)
-
-    def makefifo(self, tarinfo, targetpath):
-        """Make a fifo called targetpath.
-        """
-        if hasattr(os, "mkfifo"):
-            os.mkfifo(targetpath)
-        else:
-            raise ExtractError("fifo not supported by system")
-
-    def makedev(self, tarinfo, targetpath):
-        """Make a character or block device called targetpath.
-        """
-        if not hasattr(os, "mknod") or not hasattr(os, "makedev"):
-            raise ExtractError("special devices not supported by system")
-
-        mode = tarinfo.mode
-        if mode is None:
-            # Use mknod's default
-            mode = 0o600
-        if tarinfo.isblk():
-            mode |= stat.S_IFBLK
-        else:
-            mode |= stat.S_IFCHR
-
-        os.mknod(targetpath, mode,
-                 os.makedev(tarinfo.devmajor, tarinfo.devminor))
-
-    def makelink(self, tarinfo, targetpath):
-        """Make a (symbolic) link called targetpath. If it cannot be created
-          (platform limitation), we try to make a copy of the referenced file
-          instead of a link.
-        """
-        try:
-            # For systems that support symbolic and hard links.
-            if tarinfo.issym():
-                if os.path.lexists(targetpath):
-                    # Avoid FileExistsError on following os.symlink.
-                    os.unlink(targetpath)
-                os.symlink(tarinfo.linkname, targetpath)
-            else:
-                if os.path.exists(tarinfo._link_target):
-                    os.link(tarinfo._link_target, targetpath)
-                else:
-                    self._extract_member(self._find_link_target(tarinfo),
-                                         targetpath)
-        except symlink_exception:
-            try:
-                self._extract_member(self._find_link_target(tarinfo),
-                                     targetpath)
-            except KeyError:
-                raise ExtractError("unable to resolve link inside archive") from None
-
-    def chown(self, tarinfo, targetpath, numeric_owner):
-        """Set owner of targetpath according to tarinfo. If numeric_owner
-           is True, use .gid/.uid instead of .gname/.uname. If numeric_owner
-           is False, fall back to .gid/.uid when the search based on name
-           fails.
-        """
-        if hasattr(os, "geteuid") and os.geteuid() == 0:
-            # We have to be root to do so.
-            g = tarinfo.gid
-            u = tarinfo.uid
-            if not numeric_owner:
-                try:
-                    if grp and tarinfo.gname:
-                        g = grp.getgrnam(tarinfo.gname)[2]
-                except KeyError:
-                    pass
-                try:
-                    if pwd and tarinfo.uname:
-                        u = pwd.getpwnam(tarinfo.uname)[2]
-                except KeyError:
-                    pass
-            if g is None:
-                g = -1
-            if u is None:
-                u = -1
-            try:
-                if tarinfo.issym() and hasattr(os, "lchown"):
-                    os.lchown(targetpath, u, g)
-                else:
-                    os.chown(targetpath, u, g)
-            except OSError as e:
-                raise ExtractError("could not change owner") from e
-
-    def chmod(self, tarinfo, targetpath):
-        """Set file permissions of targetpath according to tarinfo.
-        """
-        if tarinfo.mode is None:
-            return
-        try:
-            os.chmod(targetpath, tarinfo.mode)
-        except OSError as e:
-            raise ExtractError("could not change mode") from e
-
-    def utime(self, tarinfo, targetpath):
-        """Set modification time of targetpath according to tarinfo.
-        """
-        mtime = tarinfo.mtime
-        if mtime is None:
-            return
-        if not hasattr(os, 'utime'):
-            return
-        try:
-            os.utime(targetpath, (mtime, mtime))
-        except OSError as e:
-            raise ExtractError("could not change modification time") from e
-
-    #--------------------------------------------------------------------------
-    def next(self):
-        """Return the next member of the archive as a TarInfo object, when
-           TarFile is opened for reading. Return None if there is no more
-           available.
-        """
-        self._check("ra")
-        if self.firstmember is not None:
-            m = self.firstmember
-            self.firstmember = None
-            return m
-
-        # Advance the file pointer.
-        if self.offset != self.fileobj.tell():
-            if self.offset == 0:
-                return None
-            self.fileobj.seek(self.offset - 1)
-            if not self.fileobj.read(1):
-                raise ReadError("unexpected end of data")
-
-        # Read the next block.
-        tarinfo = None
-        while True:
-            try:
-                tarinfo = self.tarinfo.fromtarfile(self)
-            except EOFHeaderError as e:
-                if self.ignore_zeros:
-                    self._dbg(2, "0x%X: %s" % (self.offset, e))
-                    self.offset += BLOCKSIZE
-                    continue
-            except InvalidHeaderError as e:
-                if self.ignore_zeros:
-                    self._dbg(2, "0x%X: %s" % (self.offset, e))
-                    self.offset += BLOCKSIZE
-                    continue
-                elif self.offset == 0:
-                    raise ReadError(str(e)) from None
-            except EmptyHeaderError:
-                if self.offset == 0:
-                    raise ReadError("empty file") from None
-            except TruncatedHeaderError as e:
-                if self.offset == 0:
-                    raise ReadError(str(e)) from None
-            except SubsequentHeaderError as e:
-                raise ReadError(str(e)) from None
-            except Exception as e:
-                try:
-                    import zlib
-                    if isinstance(e, zlib.error):
-                        raise ReadError(f'zlib error: {e}') from None
-                    else:
-                        raise e
-                except ImportError:
-                    raise e
-            break
-
-        if tarinfo is not None:
-            self.members.append(tarinfo)
-        else:
-            self._loaded = True
-
-        return tarinfo
-
-    #--------------------------------------------------------------------------
-    # Little helper methods:
-
-    def _getmember(self, name, tarinfo=None, normalize=False):
-        """Find an archive member by name from bottom to top.
-           If tarinfo is given, it is used as the starting point.
-        """
-        # Ensure that all members have been loaded.
-        members = self.getmembers()
-
-        # Limit the member search list up to tarinfo.
-        skipping = False
-        if tarinfo is not None:
-            try:
-                index = members.index(tarinfo)
-            except ValueError:
-                # The given starting point might be a (modified) copy.
-                # We'll later skip members until we find an equivalent.
-                skipping = True
-            else:
-                # Happy fast path
-                members = members[:index]
-
-        if normalize:
-            name = os.path.normpath(name)
-
-        for member in reversed(members):
-            if skipping:
-                if tarinfo.offset == member.offset:
-                    skipping = False
-                continue
-            if normalize:
-                member_name = os.path.normpath(member.name)
-            else:
-                member_name = member.name
-
-            if name == member_name:
-                return member
-
-        if skipping:
-            # Starting point was not found
-            raise ValueError(tarinfo)
-
-    def _load(self):
-        """Read through the entire archive file and look for readable
-           members.
-        """
-        while self.next() is not None:
-            pass
-        self._loaded = True
-
-    def _check(self, mode=None):
-        """Check if TarFile is still open, and if the operation's mode
-           corresponds to TarFile's mode.
-        """
-        if self.closed:
-            raise OSError("%s is closed" % self.__class__.__name__)
-        if mode is not None and self.mode not in mode:
-            raise OSError("bad operation for mode %r" % self.mode)
-
-    def _find_link_target(self, tarinfo):
-        """Find the target member of a symlink or hardlink member in the
-           archive.
-        """
-        if tarinfo.issym():
-            # Always search the entire archive.
-            linkname = "/".join(filter(None, (os.path.dirname(tarinfo.name), tarinfo.linkname)))
-            limit = None
-        else:
-            # Search the archive before the link, because a hard link is
-            # just a reference to an already archived file.
-            linkname = tarinfo.linkname
-            limit = tarinfo
-
-        member = self._getmember(linkname, tarinfo=limit, normalize=True)
-        if member is None:
-            raise KeyError("linkname %r not found" % linkname)
-        return member
-
-    def __iter__(self):
-        """Provide an iterator object.
-        """
-        if self._loaded:
-            yield from self.members
-            return
-
-        # Yield items using TarFile's next() method.
-        # When all members have been read, set TarFile as _loaded.
-        index = 0
-        # Fix for SF #1100429: Under rare circumstances it can
-        # happen that getmembers() is called during iteration,
-        # which will have already exhausted the next() method.
-        if self.firstmember is not None:
-            tarinfo = self.next()
-            index += 1
-            yield tarinfo
-
-        while True:
-            if index < len(self.members):
-                tarinfo = self.members[index]
-            elif not self._loaded:
-                tarinfo = self.next()
-                if not tarinfo:
-                    self._loaded = True
-                    return
-            else:
-                return
-            index += 1
-            yield tarinfo
-
-    def _dbg(self, level, msg):
-        """Write debugging output to sys.stderr.
-        """
-        if level <= self.debug:
-            print(msg, file=sys.stderr)
-
-    def __enter__(self):
-        self._check()
-        return self
-
-    def __exit__(self, type, value, traceback):
-        if type is None:
-            self.close()
-        else:
-            # An exception occurred. We must not call close() because
-            # it would try to write end-of-archive blocks and padding.
-            if not self._extfileobj:
-                self.fileobj.close()
-            self.closed = True
-
-#--------------------
-# exported functions
-#--------------------
-
-def is_tarfile(name):
-    """Return True if name points to a tar archive that we
-       are able to handle, else return False.
-
-       'name' should be a string, file, or file-like object.
-    """
-    try:
-        if hasattr(name, "read"):
-            pos = name.tell()
-            t = open(fileobj=name)
-            name.seek(pos)
-        else:
-            t = open(name)
-        t.close()
-        return True
-    except TarError:
-        return False
-
-open = TarFile.open
-
-
-def main():
-    import argparse
-
-    description = 'A simple command-line interface for tarfile module.'
-    parser = argparse.ArgumentParser(description=description)
-    parser.add_argument('-v', '--verbose', action='store_true', default=False,
-                        help='Verbose output')
-    parser.add_argument('--filter', metavar='',
-                        choices=_NAMED_FILTERS,
-                        help='Filter for extraction')
-
-    group = parser.add_mutually_exclusive_group(required=True)
-    group.add_argument('-l', '--list', metavar='',
-                       help='Show listing of a tarfile')
-    group.add_argument('-e', '--extract', nargs='+',
-                       metavar=('', ''),
-                       help='Extract tarfile into target dir')
-    group.add_argument('-c', '--create', nargs='+',
-                       metavar=('', ''),
-                       help='Create tarfile from sources')
-    group.add_argument('-t', '--test', metavar='',
-                       help='Test if a tarfile is valid')
-
-    args = parser.parse_args()
-
-    if args.filter and args.extract is None:
-        parser.exit(1, '--filter is only valid for extraction\n')
-
-    if args.test is not None:
-        src = args.test
-        if is_tarfile(src):
-            with open(src, 'r') as tar:
-                tar.getmembers()
-                print(tar.getmembers(), file=sys.stderr)
-            if args.verbose:
-                print('{!r} is a tar archive.'.format(src))
-        else:
-            parser.exit(1, '{!r} is not a tar archive.\n'.format(src))
-
-    elif args.list is not None:
-        src = args.list
-        if is_tarfile(src):
-            with TarFile.open(src, 'r:*') as tf:
-                tf.list(verbose=args.verbose)
-        else:
-            parser.exit(1, '{!r} is not a tar archive.\n'.format(src))
-
-    elif args.extract is not None:
-        if len(args.extract) == 1:
-            src = args.extract[0]
-            curdir = os.curdir
-        elif len(args.extract) == 2:
-            src, curdir = args.extract
-        else:
-            parser.exit(1, parser.format_help())
-
-        if is_tarfile(src):
-            with TarFile.open(src, 'r:*') as tf:
-                tf.extractall(path=curdir, filter=args.filter)
-            if args.verbose:
-                if curdir == '.':
-                    msg = '{!r} file is extracted.'.format(src)
-                else:
-                    msg = ('{!r} file is extracted '
-                           'into {!r} directory.').format(src, curdir)
-                print(msg)
-        else:
-            parser.exit(1, '{!r} is not a tar archive.\n'.format(src))
-
-    elif args.create is not None:
-        tar_name = args.create.pop(0)
-        _, ext = os.path.splitext(tar_name)
-        compressions = {
-            # gz
-            '.gz': 'gz',
-            '.tgz': 'gz',
-            # xz
-            '.xz': 'xz',
-            '.txz': 'xz',
-            # bz2
-            '.bz2': 'bz2',
-            '.tbz': 'bz2',
-            '.tbz2': 'bz2',
-            '.tb2': 'bz2',
-        }
-        tar_mode = 'w:' + compressions[ext] if ext in compressions else 'w'
-        tar_files = args.create
-
-        with TarFile.open(tar_name, tar_mode) as tf:
-            for file_name in tar_files:
-                tf.add(file_name)
-
-        if args.verbose:
-            print('{!r} file created.'.format(tar_name))
-
-if __name__ == '__main__':
-    main()
diff --git a/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/INSTALLER b/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/INSTALLER
deleted file mode 100644
index a1b589e38a..0000000000
--- a/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/INSTALLER
+++ /dev/null
@@ -1 +0,0 @@
-pip
diff --git a/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/LICENSE b/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/LICENSE
deleted file mode 100644
index 1bb5a44356..0000000000
--- a/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/LICENSE
+++ /dev/null
@@ -1,17 +0,0 @@
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to
-deal in the Software without restriction, including without limitation the
-rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
-sell copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
-IN THE SOFTWARE.
diff --git a/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/METADATA b/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/METADATA
deleted file mode 100644
index 581b308378..0000000000
--- a/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/METADATA
+++ /dev/null
@@ -1,69 +0,0 @@
-Metadata-Version: 2.1
-Name: jaraco.functools
-Version: 4.0.0
-Summary: Functools like those found in stdlib
-Home-page: https://github.com/jaraco/jaraco.functools
-Author: Jason R. Coombs
-Author-email: jaraco@jaraco.com
-Classifier: Development Status :: 5 - Production/Stable
-Classifier: Intended Audience :: Developers
-Classifier: License :: OSI Approved :: MIT License
-Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3 :: Only
-Requires-Python: >=3.8
-License-File: LICENSE
-Requires-Dist: more-itertools
-Provides-Extra: docs
-Requires-Dist: sphinx >=3.5 ; extra == 'docs'
-Requires-Dist: sphinx <7.2.5 ; extra == 'docs'
-Requires-Dist: jaraco.packaging >=9.3 ; extra == 'docs'
-Requires-Dist: rst.linker >=1.9 ; extra == 'docs'
-Requires-Dist: furo ; extra == 'docs'
-Requires-Dist: sphinx-lint ; extra == 'docs'
-Requires-Dist: jaraco.tidelift >=1.4 ; extra == 'docs'
-Provides-Extra: testing
-Requires-Dist: pytest >=6 ; extra == 'testing'
-Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'testing'
-Requires-Dist: pytest-cov ; extra == 'testing'
-Requires-Dist: pytest-enabler >=2.2 ; extra == 'testing'
-Requires-Dist: pytest-ruff ; extra == 'testing'
-Requires-Dist: jaraco.classes ; extra == 'testing'
-Requires-Dist: pytest-black >=0.3.7 ; (platform_python_implementation != "PyPy") and extra == 'testing'
-Requires-Dist: pytest-mypy >=0.9.1 ; (platform_python_implementation != "PyPy") and extra == 'testing'
-
-.. image:: https://img.shields.io/pypi/v/jaraco.functools.svg
-   :target: https://pypi.org/project/jaraco.functools
-
-.. image:: https://img.shields.io/pypi/pyversions/jaraco.functools.svg
-
-.. image:: https://github.com/jaraco/jaraco.functools/workflows/tests/badge.svg
-   :target: https://github.com/jaraco/jaraco.functools/actions?query=workflow%3A%22tests%22
-   :alt: tests
-
-.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
-    :target: https://github.com/astral-sh/ruff
-    :alt: Ruff
-
-.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
-   :target: https://github.com/psf/black
-   :alt: Code style: Black
-
-.. image:: https://readthedocs.org/projects/jaracofunctools/badge/?version=latest
-   :target: https://jaracofunctools.readthedocs.io/en/latest/?badge=latest
-
-.. image:: https://img.shields.io/badge/skeleton-2023-informational
-   :target: https://blog.jaraco.com/skeleton
-
-.. image:: https://tidelift.com/badges/package/pypi/jaraco.functools
-   :target: https://tidelift.com/subscription/pkg/pypi-jaraco.functools?utm_source=pypi-jaraco.functools&utm_medium=readme
-
-Additional functools in the spirit of stdlib's functools.
-
-For Enterprise
-==============
-
-Available as part of the Tidelift Subscription.
-
-This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use.
-
-`Learn more `_.
diff --git a/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/RECORD b/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/RECORD
deleted file mode 100644
index 783aa7d2b9..0000000000
--- a/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/RECORD
+++ /dev/null
@@ -1,10 +0,0 @@
-jaraco.functools-4.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-jaraco.functools-4.0.0.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
-jaraco.functools-4.0.0.dist-info/METADATA,sha256=nVOe_vWvaN2iWJ2aBVkhKvmvH-gFksNCXHwCNvcj65I,3078
-jaraco.functools-4.0.0.dist-info/RECORD,,
-jaraco.functools-4.0.0.dist-info/WHEEL,sha256=Xo9-1PvkuimrydujYJAjF7pCkriuXBpUPEjma1nZyJ0,92
-jaraco.functools-4.0.0.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
-jaraco/functools/__init__.py,sha256=hEAJaS2uSZRuF_JY4CxCHIYh79ZpxaPp9OiHyr9EJ1w,16642
-jaraco/functools/__init__.pyi,sha256=N4lLbdhMtrmwiK3UuMGhYsiOLLZx69CUNOdmFPSVh6Q,3982
-jaraco/functools/__pycache__/__init__.cpython-312.pyc,,
-jaraco/functools/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
diff --git a/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/top_level.txt b/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/top_level.txt
deleted file mode 100644
index f6205a5f19..0000000000
--- a/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/top_level.txt
+++ /dev/null
@@ -1 +0,0 @@
-jaraco
diff --git a/pkg_resources/_vendor/jaraco/functools/__init__.pyi b/pkg_resources/_vendor/jaraco/functools/__init__.pyi
index c2b9ab1757..19191bf93e 100644
--- a/pkg_resources/_vendor/jaraco/functools/__init__.pyi
+++ b/pkg_resources/_vendor/jaraco/functools/__init__.pyi
@@ -74,9 +74,6 @@ def result_invoke(
 def invoke(
     f: Callable[_P, _R], /, *args: _P.args, **kwargs: _P.kwargs
 ) -> Callable[_P, _R]: ...
-def call_aside(
-    f: Callable[_P, _R], *args: _P.args, **kwargs: _P.kwargs
-) -> Callable[_P, _R]: ...
 
 class Throttler(Generic[_R]):
     last_called: float
diff --git a/pkg_resources/_vendor/more_itertools-10.2.0.dist-info/RECORD b/pkg_resources/_vendor/more_itertools-10.2.0.dist-info/RECORD
index 2ce6e4a6f5..5331c3fc91 100644
--- a/pkg_resources/_vendor/more_itertools-10.2.0.dist-info/RECORD
+++ b/pkg_resources/_vendor/more_itertools-10.2.0.dist-info/RECORD
@@ -2,6 +2,7 @@ more_itertools-10.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQr
 more_itertools-10.2.0.dist-info/LICENSE,sha256=CfHIyelBrz5YTVlkHqm4fYPAyw_QB-te85Gn4mQ8GkY,1053
 more_itertools-10.2.0.dist-info/METADATA,sha256=lTIPxfD4IiP6aHzPjP4dXmzRRUmiXicAB6qnY82T-Gs,34886
 more_itertools-10.2.0.dist-info/RECORD,,
+more_itertools-10.2.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
 more_itertools-10.2.0.dist-info/WHEEL,sha256=rSgq_JpHF9fHR1lx53qwg_1-2LypZE_qmcuXbVUq948,81
 more_itertools/__init__.py,sha256=VodgFyRJvpnHbAMgseYRiP7r928FFOAakmQrl6J88os,149
 more_itertools/__init__.pyi,sha256=5B3eTzON1BBuOLob1vCflyEb2lSd6usXQQ-Cv-hXkeA,43
diff --git a/pkg_resources/_vendor/platformdirs/__init__.py b/pkg_resources/_vendor/platformdirs/__init__.py
index aef2821b83..881a596e67 100644
--- a/pkg_resources/_vendor/platformdirs/__init__.py
+++ b/pkg_resources/_vendor/platformdirs/__init__.py
@@ -11,7 +11,7 @@
 if sys.version_info >= (3, 8):  # pragma: no cover (py38+)
     from typing import Literal
 else:  # pragma: no cover (py38+)
-    from ..typing_extensions import Literal
+    from typing_extensions import Literal
 
 from .api import PlatformDirsABC
 from .version import __version__
diff --git a/setup.cfg b/setup.cfg
index 0756fa92ea..9a91c01490 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -112,6 +112,7 @@ distutils.commands =
 	alias = setuptools.command.alias:alias
 	bdist_egg = setuptools.command.bdist_egg:bdist_egg
 	bdist_rpm = setuptools.command.bdist_rpm:bdist_rpm
+	bdist_wheel = setuptools.command.bdist_wheel:bdist_wheel
 	build = setuptools.command.build:build
 	build_clib = setuptools.command.build_clib:build_clib
 	build_ext = setuptools.command.build_ext:build_ext
diff --git a/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/INSTALLER b/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/INSTALLER
deleted file mode 100644
index a1b589e38a..0000000000
--- a/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/INSTALLER
+++ /dev/null
@@ -1 +0,0 @@
-pip
diff --git a/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/LICENSE b/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/LICENSE
deleted file mode 100644
index 1bb5a44356..0000000000
--- a/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/LICENSE
+++ /dev/null
@@ -1,17 +0,0 @@
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to
-deal in the Software without restriction, including without limitation the
-rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
-sell copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
-IN THE SOFTWARE.
diff --git a/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/METADATA b/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/METADATA
deleted file mode 100644
index e7b64c87f8..0000000000
--- a/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/METADATA
+++ /dev/null
@@ -1,44 +0,0 @@
-Metadata-Version: 2.1
-Name: backports.tarfile
-Version: 1.0.0
-Summary: Backport of CPython tarfile module
-Home-page: https://github.com/jaraco/backports.tarfile
-Author: Jason R. Coombs
-Author-email: jaraco@jaraco.com
-Classifier: Development Status :: 5 - Production/Stable
-Classifier: Intended Audience :: Developers
-Classifier: License :: OSI Approved :: MIT License
-Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3 :: Only
-Requires-Python: >=3.8
-License-File: LICENSE
-Provides-Extra: docs
-Requires-Dist: sphinx >=3.5 ; extra == 'docs'
-Requires-Dist: jaraco.packaging >=9.3 ; extra == 'docs'
-Requires-Dist: rst.linker >=1.9 ; extra == 'docs'
-Requires-Dist: furo ; extra == 'docs'
-Requires-Dist: sphinx-lint ; extra == 'docs'
-Provides-Extra: testing
-Requires-Dist: pytest !=8.1.1,>=6 ; extra == 'testing'
-Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'testing'
-Requires-Dist: pytest-cov ; extra == 'testing'
-Requires-Dist: pytest-enabler >=2.2 ; extra == 'testing'
-
-.. image:: https://img.shields.io/pypi/v/backports.tarfile.svg
-   :target: https://pypi.org/project/backports.tarfile
-
-.. image:: https://img.shields.io/pypi/pyversions/backports.tarfile.svg
-
-.. image:: https://github.com/jaraco/backports.tarfile/actions/workflows/main.yml/badge.svg
-   :target: https://github.com/jaraco/backports.tarfile/actions?query=workflow%3A%22tests%22
-   :alt: tests
-
-.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
-    :target: https://github.com/astral-sh/ruff
-    :alt: Ruff
-
-.. .. image:: https://readthedocs.org/projects/backportstarfile/badge/?version=latest
-..    :target: https://backportstarfile.readthedocs.io/en/latest/?badge=latest
-
-.. image:: https://img.shields.io/badge/skeleton-2024-informational
-   :target: https://blog.jaraco.com/skeleton
diff --git a/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/RECORD b/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/RECORD
deleted file mode 100644
index a6a44d8fcc..0000000000
--- a/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/RECORD
+++ /dev/null
@@ -1,9 +0,0 @@
-backports.tarfile-1.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-backports.tarfile-1.0.0.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
-backports.tarfile-1.0.0.dist-info/METADATA,sha256=XlT7JAFR04zDMIjs-EFhqc0CkkVyeh-SiVUoKXONXJ0,1876
-backports.tarfile-1.0.0.dist-info/RECORD,,
-backports.tarfile-1.0.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-backports.tarfile-1.0.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
-backports.tarfile-1.0.0.dist-info/top_level.txt,sha256=cGjaLMOoBR1FK0ApojtzWVmViTtJ7JGIK_HwXiEsvtU,10
-backports/__pycache__/tarfile.cpython-312.pyc,,
-backports/tarfile.py,sha256=IO3YX_ZYqn13VOi-3QLM0lnktn102U4d9wUrHc230LY,106920
diff --git a/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/REQUESTED b/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/REQUESTED
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/WHEEL b/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/WHEEL
deleted file mode 100644
index bab98d6758..0000000000
--- a/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/WHEEL
+++ /dev/null
@@ -1,5 +0,0 @@
-Wheel-Version: 1.0
-Generator: bdist_wheel (0.43.0)
-Root-Is-Purelib: true
-Tag: py3-none-any
-
diff --git a/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/top_level.txt b/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/top_level.txt
deleted file mode 100644
index 99d2be5b64..0000000000
--- a/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/top_level.txt
+++ /dev/null
@@ -1 +0,0 @@
-backports
diff --git a/setuptools/_vendor/backports/tarfile.py b/setuptools/_vendor/backports/tarfile.py
deleted file mode 100644
index a7a9a6e7b9..0000000000
--- a/setuptools/_vendor/backports/tarfile.py
+++ /dev/null
@@ -1,2900 +0,0 @@
-#!/usr/bin/env python3
-#-------------------------------------------------------------------
-# tarfile.py
-#-------------------------------------------------------------------
-# Copyright (C) 2002 Lars Gustaebel 
-# All rights reserved.
-#
-# Permission  is  hereby granted,  free  of charge,  to  any person
-# obtaining a  copy of  this software  and associated documentation
-# files  (the  "Software"),  to   deal  in  the  Software   without
-# restriction,  including  without limitation  the  rights to  use,
-# copy, modify, merge, publish, distribute, sublicense, and/or sell
-# copies  of  the  Software,  and to  permit  persons  to  whom the
-# Software  is  furnished  to  do  so,  subject  to  the  following
-# conditions:
-#
-# The above copyright  notice and this  permission notice shall  be
-# included in all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS  IS", WITHOUT WARRANTY OF ANY  KIND,
-# EXPRESS OR IMPLIED, INCLUDING  BUT NOT LIMITED TO  THE WARRANTIES
-# OF  MERCHANTABILITY,  FITNESS   FOR  A  PARTICULAR   PURPOSE  AND
-# NONINFRINGEMENT.  IN  NO  EVENT SHALL  THE  AUTHORS  OR COPYRIGHT
-# HOLDERS  BE LIABLE  FOR ANY  CLAIM, DAMAGES  OR OTHER  LIABILITY,
-# WHETHER  IN AN  ACTION OF  CONTRACT, TORT  OR OTHERWISE,  ARISING
-# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
-# OTHER DEALINGS IN THE SOFTWARE.
-#
-"""Read from and write to tar format archives.
-"""
-
-version     = "0.9.0"
-__author__  = "Lars Gust\u00e4bel (lars@gustaebel.de)"
-__credits__ = "Gustavo Niemeyer, Niels Gust\u00e4bel, Richard Townsend."
-
-#---------
-# Imports
-#---------
-from builtins import open as bltn_open
-import sys
-import os
-import io
-import shutil
-import stat
-import time
-import struct
-import copy
-import re
-import warnings
-
-try:
-    import pwd
-except ImportError:
-    pwd = None
-try:
-    import grp
-except ImportError:
-    grp = None
-
-# os.symlink on Windows prior to 6.0 raises NotImplementedError
-# OSError (winerror=1314) will be raised if the caller does not hold the
-# SeCreateSymbolicLinkPrivilege privilege
-symlink_exception = (AttributeError, NotImplementedError, OSError)
-
-# from tarfile import *
-__all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError", "ReadError",
-           "CompressionError", "StreamError", "ExtractError", "HeaderError",
-           "ENCODING", "USTAR_FORMAT", "GNU_FORMAT", "PAX_FORMAT",
-           "DEFAULT_FORMAT", "open","fully_trusted_filter", "data_filter",
-           "tar_filter", "FilterError", "AbsoluteLinkError",
-           "OutsideDestinationError", "SpecialFileError", "AbsolutePathError",
-           "LinkOutsideDestinationError"]
-
-
-#---------------------------------------------------------
-# tar constants
-#---------------------------------------------------------
-NUL = b"\0"                     # the null character
-BLOCKSIZE = 512                 # length of processing blocks
-RECORDSIZE = BLOCKSIZE * 20     # length of records
-GNU_MAGIC = b"ustar  \0"        # magic gnu tar string
-POSIX_MAGIC = b"ustar\x0000"    # magic posix tar string
-
-LENGTH_NAME = 100               # maximum length of a filename
-LENGTH_LINK = 100               # maximum length of a linkname
-LENGTH_PREFIX = 155             # maximum length of the prefix field
-
-REGTYPE = b"0"                  # regular file
-AREGTYPE = b"\0"                # regular file
-LNKTYPE = b"1"                  # link (inside tarfile)
-SYMTYPE = b"2"                  # symbolic link
-CHRTYPE = b"3"                  # character special device
-BLKTYPE = b"4"                  # block special device
-DIRTYPE = b"5"                  # directory
-FIFOTYPE = b"6"                 # fifo special device
-CONTTYPE = b"7"                 # contiguous file
-
-GNUTYPE_LONGNAME = b"L"         # GNU tar longname
-GNUTYPE_LONGLINK = b"K"         # GNU tar longlink
-GNUTYPE_SPARSE = b"S"           # GNU tar sparse file
-
-XHDTYPE = b"x"                  # POSIX.1-2001 extended header
-XGLTYPE = b"g"                  # POSIX.1-2001 global header
-SOLARIS_XHDTYPE = b"X"          # Solaris extended header
-
-USTAR_FORMAT = 0                # POSIX.1-1988 (ustar) format
-GNU_FORMAT = 1                  # GNU tar format
-PAX_FORMAT = 2                  # POSIX.1-2001 (pax) format
-DEFAULT_FORMAT = PAX_FORMAT
-
-#---------------------------------------------------------
-# tarfile constants
-#---------------------------------------------------------
-# File types that tarfile supports:
-SUPPORTED_TYPES = (REGTYPE, AREGTYPE, LNKTYPE,
-                   SYMTYPE, DIRTYPE, FIFOTYPE,
-                   CONTTYPE, CHRTYPE, BLKTYPE,
-                   GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
-                   GNUTYPE_SPARSE)
-
-# File types that will be treated as a regular file.
-REGULAR_TYPES = (REGTYPE, AREGTYPE,
-                 CONTTYPE, GNUTYPE_SPARSE)
-
-# File types that are part of the GNU tar format.
-GNU_TYPES = (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
-             GNUTYPE_SPARSE)
-
-# Fields from a pax header that override a TarInfo attribute.
-PAX_FIELDS = ("path", "linkpath", "size", "mtime",
-              "uid", "gid", "uname", "gname")
-
-# Fields from a pax header that are affected by hdrcharset.
-PAX_NAME_FIELDS = {"path", "linkpath", "uname", "gname"}
-
-# Fields in a pax header that are numbers, all other fields
-# are treated as strings.
-PAX_NUMBER_FIELDS = {
-    "atime": float,
-    "ctime": float,
-    "mtime": float,
-    "uid": int,
-    "gid": int,
-    "size": int
-}
-
-#---------------------------------------------------------
-# initialization
-#---------------------------------------------------------
-if os.name == "nt":
-    ENCODING = "utf-8"
-else:
-    ENCODING = sys.getfilesystemencoding()
-
-#---------------------------------------------------------
-# Some useful functions
-#---------------------------------------------------------
-
-def stn(s, length, encoding, errors):
-    """Convert a string to a null-terminated bytes object.
-    """
-    if s is None:
-        raise ValueError("metadata cannot contain None")
-    s = s.encode(encoding, errors)
-    return s[:length] + (length - len(s)) * NUL
-
-def nts(s, encoding, errors):
-    """Convert a null-terminated bytes object to a string.
-    """
-    p = s.find(b"\0")
-    if p != -1:
-        s = s[:p]
-    return s.decode(encoding, errors)
-
-def nti(s):
-    """Convert a number field to a python number.
-    """
-    # There are two possible encodings for a number field, see
-    # itn() below.
-    if s[0] in (0o200, 0o377):
-        n = 0
-        for i in range(len(s) - 1):
-            n <<= 8
-            n += s[i + 1]
-        if s[0] == 0o377:
-            n = -(256 ** (len(s) - 1) - n)
-    else:
-        try:
-            s = nts(s, "ascii", "strict")
-            n = int(s.strip() or "0", 8)
-        except ValueError:
-            raise InvalidHeaderError("invalid header")
-    return n
-
-def itn(n, digits=8, format=DEFAULT_FORMAT):
-    """Convert a python number to a number field.
-    """
-    # POSIX 1003.1-1988 requires numbers to be encoded as a string of
-    # octal digits followed by a null-byte, this allows values up to
-    # (8**(digits-1))-1. GNU tar allows storing numbers greater than
-    # that if necessary. A leading 0o200 or 0o377 byte indicate this
-    # particular encoding, the following digits-1 bytes are a big-endian
-    # base-256 representation. This allows values up to (256**(digits-1))-1.
-    # A 0o200 byte indicates a positive number, a 0o377 byte a negative
-    # number.
-    original_n = n
-    n = int(n)
-    if 0 <= n < 8 ** (digits - 1):
-        s = bytes("%0*o" % (digits - 1, n), "ascii") + NUL
-    elif format == GNU_FORMAT and -256 ** (digits - 1) <= n < 256 ** (digits - 1):
-        if n >= 0:
-            s = bytearray([0o200])
-        else:
-            s = bytearray([0o377])
-            n = 256 ** digits + n
-
-        for i in range(digits - 1):
-            s.insert(1, n & 0o377)
-            n >>= 8
-    else:
-        raise ValueError("overflow in number field")
-
-    return s
-
-def calc_chksums(buf):
-    """Calculate the checksum for a member's header by summing up all
-       characters except for the chksum field which is treated as if
-       it was filled with spaces. According to the GNU tar sources,
-       some tars (Sun and NeXT) calculate chksum with signed char,
-       which will be different if there are chars in the buffer with
-       the high bit set. So we calculate two checksums, unsigned and
-       signed.
-    """
-    unsigned_chksum = 256 + sum(struct.unpack_from("148B8x356B", buf))
-    signed_chksum = 256 + sum(struct.unpack_from("148b8x356b", buf))
-    return unsigned_chksum, signed_chksum
-
-def copyfileobj(src, dst, length=None, exception=OSError, bufsize=None):
-    """Copy length bytes from fileobj src to fileobj dst.
-       If length is None, copy the entire content.
-    """
-    bufsize = bufsize or 16 * 1024
-    if length == 0:
-        return
-    if length is None:
-        shutil.copyfileobj(src, dst, bufsize)
-        return
-
-    blocks, remainder = divmod(length, bufsize)
-    for b in range(blocks):
-        buf = src.read(bufsize)
-        if len(buf) < bufsize:
-            raise exception("unexpected end of data")
-        dst.write(buf)
-
-    if remainder != 0:
-        buf = src.read(remainder)
-        if len(buf) < remainder:
-            raise exception("unexpected end of data")
-        dst.write(buf)
-    return
-
-def _safe_print(s):
-    encoding = getattr(sys.stdout, 'encoding', None)
-    if encoding is not None:
-        s = s.encode(encoding, 'backslashreplace').decode(encoding)
-    print(s, end=' ')
-
-
-class TarError(Exception):
-    """Base exception."""
-    pass
-class ExtractError(TarError):
-    """General exception for extract errors."""
-    pass
-class ReadError(TarError):
-    """Exception for unreadable tar archives."""
-    pass
-class CompressionError(TarError):
-    """Exception for unavailable compression methods."""
-    pass
-class StreamError(TarError):
-    """Exception for unsupported operations on stream-like TarFiles."""
-    pass
-class HeaderError(TarError):
-    """Base exception for header errors."""
-    pass
-class EmptyHeaderError(HeaderError):
-    """Exception for empty headers."""
-    pass
-class TruncatedHeaderError(HeaderError):
-    """Exception for truncated headers."""
-    pass
-class EOFHeaderError(HeaderError):
-    """Exception for end of file headers."""
-    pass
-class InvalidHeaderError(HeaderError):
-    """Exception for invalid headers."""
-    pass
-class SubsequentHeaderError(HeaderError):
-    """Exception for missing and invalid extended headers."""
-    pass
-
-#---------------------------
-# internal stream interface
-#---------------------------
-class _LowLevelFile:
-    """Low-level file object. Supports reading and writing.
-       It is used instead of a regular file object for streaming
-       access.
-    """
-
-    def __init__(self, name, mode):
-        mode = {
-            "r": os.O_RDONLY,
-            "w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC,
-        }[mode]
-        if hasattr(os, "O_BINARY"):
-            mode |= os.O_BINARY
-        self.fd = os.open(name, mode, 0o666)
-
-    def close(self):
-        os.close(self.fd)
-
-    def read(self, size):
-        return os.read(self.fd, size)
-
-    def write(self, s):
-        os.write(self.fd, s)
-
-class _Stream:
-    """Class that serves as an adapter between TarFile and
-       a stream-like object.  The stream-like object only
-       needs to have a read() or write() method that works with bytes,
-       and the method is accessed blockwise.
-       Use of gzip or bzip2 compression is possible.
-       A stream-like object could be for example: sys.stdin.buffer,
-       sys.stdout.buffer, a socket, a tape device etc.
-
-       _Stream is intended to be used only internally.
-    """
-
-    def __init__(self, name, mode, comptype, fileobj, bufsize,
-                 compresslevel):
-        """Construct a _Stream object.
-        """
-        self._extfileobj = True
-        if fileobj is None:
-            fileobj = _LowLevelFile(name, mode)
-            self._extfileobj = False
-
-        if comptype == '*':
-            # Enable transparent compression detection for the
-            # stream interface
-            fileobj = _StreamProxy(fileobj)
-            comptype = fileobj.getcomptype()
-
-        self.name     = name or ""
-        self.mode     = mode
-        self.comptype = comptype
-        self.fileobj  = fileobj
-        self.bufsize  = bufsize
-        self.buf      = b""
-        self.pos      = 0
-        self.closed   = False
-
-        try:
-            if comptype == "gz":
-                try:
-                    import zlib
-                except ImportError:
-                    raise CompressionError("zlib module is not available") from None
-                self.zlib = zlib
-                self.crc = zlib.crc32(b"")
-                if mode == "r":
-                    self.exception = zlib.error
-                    self._init_read_gz()
-                else:
-                    self._init_write_gz(compresslevel)
-
-            elif comptype == "bz2":
-                try:
-                    import bz2
-                except ImportError:
-                    raise CompressionError("bz2 module is not available") from None
-                if mode == "r":
-                    self.dbuf = b""
-                    self.cmp = bz2.BZ2Decompressor()
-                    self.exception = OSError
-                else:
-                    self.cmp = bz2.BZ2Compressor(compresslevel)
-
-            elif comptype == "xz":
-                try:
-                    import lzma
-                except ImportError:
-                    raise CompressionError("lzma module is not available") from None
-                if mode == "r":
-                    self.dbuf = b""
-                    self.cmp = lzma.LZMADecompressor()
-                    self.exception = lzma.LZMAError
-                else:
-                    self.cmp = lzma.LZMACompressor()
-
-            elif comptype != "tar":
-                raise CompressionError("unknown compression type %r" % comptype)
-
-        except:
-            if not self._extfileobj:
-                self.fileobj.close()
-            self.closed = True
-            raise
-
-    def __del__(self):
-        if hasattr(self, "closed") and not self.closed:
-            self.close()
-
-    def _init_write_gz(self, compresslevel):
-        """Initialize for writing with gzip compression.
-        """
-        self.cmp = self.zlib.compressobj(compresslevel,
-                                         self.zlib.DEFLATED,
-                                         -self.zlib.MAX_WBITS,
-                                         self.zlib.DEF_MEM_LEVEL,
-                                         0)
-        timestamp = struct.pack(" self.bufsize:
-            self.fileobj.write(self.buf[:self.bufsize])
-            self.buf = self.buf[self.bufsize:]
-
-    def close(self):
-        """Close the _Stream object. No operation should be
-           done on it afterwards.
-        """
-        if self.closed:
-            return
-
-        self.closed = True
-        try:
-            if self.mode == "w" and self.comptype != "tar":
-                self.buf += self.cmp.flush()
-
-            if self.mode == "w" and self.buf:
-                self.fileobj.write(self.buf)
-                self.buf = b""
-                if self.comptype == "gz":
-                    self.fileobj.write(struct.pack("= 0:
-            blocks, remainder = divmod(pos - self.pos, self.bufsize)
-            for i in range(blocks):
-                self.read(self.bufsize)
-            self.read(remainder)
-        else:
-            raise StreamError("seeking backwards is not allowed")
-        return self.pos
-
-    def read(self, size):
-        """Return the next size number of bytes from the stream."""
-        assert size is not None
-        buf = self._read(size)
-        self.pos += len(buf)
-        return buf
-
-    def _read(self, size):
-        """Return size bytes from the stream.
-        """
-        if self.comptype == "tar":
-            return self.__read(size)
-
-        c = len(self.dbuf)
-        t = [self.dbuf]
-        while c < size:
-            # Skip underlying buffer to avoid unaligned double buffering.
-            if self.buf:
-                buf = self.buf
-                self.buf = b""
-            else:
-                buf = self.fileobj.read(self.bufsize)
-                if not buf:
-                    break
-            try:
-                buf = self.cmp.decompress(buf)
-            except self.exception as e:
-                raise ReadError("invalid compressed data") from e
-            t.append(buf)
-            c += len(buf)
-        t = b"".join(t)
-        self.dbuf = t[size:]
-        return t[:size]
-
-    def __read(self, size):
-        """Return size bytes from stream. If internal buffer is empty,
-           read another block from the stream.
-        """
-        c = len(self.buf)
-        t = [self.buf]
-        while c < size:
-            buf = self.fileobj.read(self.bufsize)
-            if not buf:
-                break
-            t.append(buf)
-            c += len(buf)
-        t = b"".join(t)
-        self.buf = t[size:]
-        return t[:size]
-# class _Stream
-
-class _StreamProxy(object):
-    """Small proxy class that enables transparent compression
-       detection for the Stream interface (mode 'r|*').
-    """
-
-    def __init__(self, fileobj):
-        self.fileobj = fileobj
-        self.buf = self.fileobj.read(BLOCKSIZE)
-
-    def read(self, size):
-        self.read = self.fileobj.read
-        return self.buf
-
-    def getcomptype(self):
-        if self.buf.startswith(b"\x1f\x8b\x08"):
-            return "gz"
-        elif self.buf[0:3] == b"BZh" and self.buf[4:10] == b"1AY&SY":
-            return "bz2"
-        elif self.buf.startswith((b"\x5d\x00\x00\x80", b"\xfd7zXZ")):
-            return "xz"
-        else:
-            return "tar"
-
-    def close(self):
-        self.fileobj.close()
-# class StreamProxy
-
-#------------------------
-# Extraction file object
-#------------------------
-class _FileInFile(object):
-    """A thin wrapper around an existing file object that
-       provides a part of its data as an individual file
-       object.
-    """
-
-    def __init__(self, fileobj, offset, size, name, blockinfo=None):
-        self.fileobj = fileobj
-        self.offset = offset
-        self.size = size
-        self.position = 0
-        self.name = name
-        self.closed = False
-
-        if blockinfo is None:
-            blockinfo = [(0, size)]
-
-        # Construct a map with data and zero blocks.
-        self.map_index = 0
-        self.map = []
-        lastpos = 0
-        realpos = self.offset
-        for offset, size in blockinfo:
-            if offset > lastpos:
-                self.map.append((False, lastpos, offset, None))
-            self.map.append((True, offset, offset + size, realpos))
-            realpos += size
-            lastpos = offset + size
-        if lastpos < self.size:
-            self.map.append((False, lastpos, self.size, None))
-
-    def flush(self):
-        pass
-
-    def readable(self):
-        return True
-
-    def writable(self):
-        return False
-
-    def seekable(self):
-        return self.fileobj.seekable()
-
-    def tell(self):
-        """Return the current file position.
-        """
-        return self.position
-
-    def seek(self, position, whence=io.SEEK_SET):
-        """Seek to a position in the file.
-        """
-        if whence == io.SEEK_SET:
-            self.position = min(max(position, 0), self.size)
-        elif whence == io.SEEK_CUR:
-            if position < 0:
-                self.position = max(self.position + position, 0)
-            else:
-                self.position = min(self.position + position, self.size)
-        elif whence == io.SEEK_END:
-            self.position = max(min(self.size + position, self.size), 0)
-        else:
-            raise ValueError("Invalid argument")
-        return self.position
-
-    def read(self, size=None):
-        """Read data from the file.
-        """
-        if size is None:
-            size = self.size - self.position
-        else:
-            size = min(size, self.size - self.position)
-
-        buf = b""
-        while size > 0:
-            while True:
-                data, start, stop, offset = self.map[self.map_index]
-                if start <= self.position < stop:
-                    break
-                else:
-                    self.map_index += 1
-                    if self.map_index == len(self.map):
-                        self.map_index = 0
-            length = min(size, stop - self.position)
-            if data:
-                self.fileobj.seek(offset + (self.position - start))
-                b = self.fileobj.read(length)
-                if len(b) != length:
-                    raise ReadError("unexpected end of data")
-                buf += b
-            else:
-                buf += NUL * length
-            size -= length
-            self.position += length
-        return buf
-
-    def readinto(self, b):
-        buf = self.read(len(b))
-        b[:len(buf)] = buf
-        return len(buf)
-
-    def close(self):
-        self.closed = True
-#class _FileInFile
-
-class ExFileObject(io.BufferedReader):
-
-    def __init__(self, tarfile, tarinfo):
-        fileobj = _FileInFile(tarfile.fileobj, tarinfo.offset_data,
-                tarinfo.size, tarinfo.name, tarinfo.sparse)
-        super().__init__(fileobj)
-#class ExFileObject
-
-
-#-----------------------------
-# extraction filters (PEP 706)
-#-----------------------------
-
-class FilterError(TarError):
-    pass
-
-class AbsolutePathError(FilterError):
-    def __init__(self, tarinfo):
-        self.tarinfo = tarinfo
-        super().__init__(f'member {tarinfo.name!r} has an absolute path')
-
-class OutsideDestinationError(FilterError):
-    def __init__(self, tarinfo, path):
-        self.tarinfo = tarinfo
-        self._path = path
-        super().__init__(f'{tarinfo.name!r} would be extracted to {path!r}, '
-                         + 'which is outside the destination')
-
-class SpecialFileError(FilterError):
-    def __init__(self, tarinfo):
-        self.tarinfo = tarinfo
-        super().__init__(f'{tarinfo.name!r} is a special file')
-
-class AbsoluteLinkError(FilterError):
-    def __init__(self, tarinfo):
-        self.tarinfo = tarinfo
-        super().__init__(f'{tarinfo.name!r} is a link to an absolute path')
-
-class LinkOutsideDestinationError(FilterError):
-    def __init__(self, tarinfo, path):
-        self.tarinfo = tarinfo
-        self._path = path
-        super().__init__(f'{tarinfo.name!r} would link to {path!r}, '
-                         + 'which is outside the destination')
-
-def _get_filtered_attrs(member, dest_path, for_data=True):
-    new_attrs = {}
-    name = member.name
-    dest_path = os.path.realpath(dest_path)
-    # Strip leading / (tar's directory separator) from filenames.
-    # Include os.sep (target OS directory separator) as well.
-    if name.startswith(('/', os.sep)):
-        name = new_attrs['name'] = member.path.lstrip('/' + os.sep)
-    if os.path.isabs(name):
-        # Path is absolute even after stripping.
-        # For example, 'C:/foo' on Windows.
-        raise AbsolutePathError(member)
-    # Ensure we stay in the destination
-    target_path = os.path.realpath(os.path.join(dest_path, name))
-    if os.path.commonpath([target_path, dest_path]) != dest_path:
-        raise OutsideDestinationError(member, target_path)
-    # Limit permissions (no high bits, and go-w)
-    mode = member.mode
-    if mode is not None:
-        # Strip high bits & group/other write bits
-        mode = mode & 0o755
-        if for_data:
-            # For data, handle permissions & file types
-            if member.isreg() or member.islnk():
-                if not mode & 0o100:
-                    # Clear executable bits if not executable by user
-                    mode &= ~0o111
-                # Ensure owner can read & write
-                mode |= 0o600
-            elif member.isdir() or member.issym():
-                # Ignore mode for directories & symlinks
-                mode = None
-            else:
-                # Reject special files
-                raise SpecialFileError(member)
-        if mode != member.mode:
-            new_attrs['mode'] = mode
-    if for_data:
-        # Ignore ownership for 'data'
-        if member.uid is not None:
-            new_attrs['uid'] = None
-        if member.gid is not None:
-            new_attrs['gid'] = None
-        if member.uname is not None:
-            new_attrs['uname'] = None
-        if member.gname is not None:
-            new_attrs['gname'] = None
-        # Check link destination for 'data'
-        if member.islnk() or member.issym():
-            if os.path.isabs(member.linkname):
-                raise AbsoluteLinkError(member)
-            if member.issym():
-                target_path = os.path.join(dest_path,
-                                           os.path.dirname(name),
-                                           member.linkname)
-            else:
-                target_path = os.path.join(dest_path,
-                                           member.linkname)
-            target_path = os.path.realpath(target_path)
-            if os.path.commonpath([target_path, dest_path]) != dest_path:
-                raise LinkOutsideDestinationError(member, target_path)
-    return new_attrs
-
-def fully_trusted_filter(member, dest_path):
-    return member
-
-def tar_filter(member, dest_path):
-    new_attrs = _get_filtered_attrs(member, dest_path, False)
-    if new_attrs:
-        return member.replace(**new_attrs, deep=False)
-    return member
-
-def data_filter(member, dest_path):
-    new_attrs = _get_filtered_attrs(member, dest_path, True)
-    if new_attrs:
-        return member.replace(**new_attrs, deep=False)
-    return member
-
-_NAMED_FILTERS = {
-    "fully_trusted": fully_trusted_filter,
-    "tar": tar_filter,
-    "data": data_filter,
-}
-
-#------------------
-# Exported Classes
-#------------------
-
-# Sentinel for replace() defaults, meaning "don't change the attribute"
-_KEEP = object()
-
-class TarInfo(object):
-    """Informational class which holds the details about an
-       archive member given by a tar header block.
-       TarInfo objects are returned by TarFile.getmember(),
-       TarFile.getmembers() and TarFile.gettarinfo() and are
-       usually created internally.
-    """
-
-    __slots__ = dict(
-        name = 'Name of the archive member.',
-        mode = 'Permission bits.',
-        uid = 'User ID of the user who originally stored this member.',
-        gid = 'Group ID of the user who originally stored this member.',
-        size = 'Size in bytes.',
-        mtime = 'Time of last modification.',
-        chksum = 'Header checksum.',
-        type = ('File type. type is usually one of these constants: '
-                'REGTYPE, AREGTYPE, LNKTYPE, SYMTYPE, DIRTYPE, FIFOTYPE, '
-                'CONTTYPE, CHRTYPE, BLKTYPE, GNUTYPE_SPARSE.'),
-        linkname = ('Name of the target file name, which is only present '
-                    'in TarInfo objects of type LNKTYPE and SYMTYPE.'),
-        uname = 'User name.',
-        gname = 'Group name.',
-        devmajor = 'Device major number.',
-        devminor = 'Device minor number.',
-        offset = 'The tar header starts here.',
-        offset_data = "The file's data starts here.",
-        pax_headers = ('A dictionary containing key-value pairs of an '
-                       'associated pax extended header.'),
-        sparse = 'Sparse member information.',
-        tarfile = None,
-        _sparse_structs = None,
-        _link_target = None,
-        )
-
-    def __init__(self, name=""):
-        """Construct a TarInfo object. name is the optional name
-           of the member.
-        """
-        self.name = name        # member name
-        self.mode = 0o644       # file permissions
-        self.uid = 0            # user id
-        self.gid = 0            # group id
-        self.size = 0           # file size
-        self.mtime = 0          # modification time
-        self.chksum = 0         # header checksum
-        self.type = REGTYPE     # member type
-        self.linkname = ""      # link name
-        self.uname = ""         # user name
-        self.gname = ""         # group name
-        self.devmajor = 0       # device major number
-        self.devminor = 0       # device minor number
-
-        self.offset = 0         # the tar header starts here
-        self.offset_data = 0    # the file's data starts here
-
-        self.sparse = None      # sparse member information
-        self.pax_headers = {}   # pax header information
-
-    @property
-    def path(self):
-        'In pax headers, "name" is called "path".'
-        return self.name
-
-    @path.setter
-    def path(self, name):
-        self.name = name
-
-    @property
-    def linkpath(self):
-        'In pax headers, "linkname" is called "linkpath".'
-        return self.linkname
-
-    @linkpath.setter
-    def linkpath(self, linkname):
-        self.linkname = linkname
-
-    def __repr__(self):
-        return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self))
-
-    def replace(self, *,
-                name=_KEEP, mtime=_KEEP, mode=_KEEP, linkname=_KEEP,
-                uid=_KEEP, gid=_KEEP, uname=_KEEP, gname=_KEEP,
-                deep=True, _KEEP=_KEEP):
-        """Return a deep copy of self with the given attributes replaced.
-        """
-        if deep:
-            result = copy.deepcopy(self)
-        else:
-            result = copy.copy(self)
-        if name is not _KEEP:
-            result.name = name
-        if mtime is not _KEEP:
-            result.mtime = mtime
-        if mode is not _KEEP:
-            result.mode = mode
-        if linkname is not _KEEP:
-            result.linkname = linkname
-        if uid is not _KEEP:
-            result.uid = uid
-        if gid is not _KEEP:
-            result.gid = gid
-        if uname is not _KEEP:
-            result.uname = uname
-        if gname is not _KEEP:
-            result.gname = gname
-        return result
-
-    def get_info(self):
-        """Return the TarInfo's attributes as a dictionary.
-        """
-        if self.mode is None:
-            mode = None
-        else:
-            mode = self.mode & 0o7777
-        info = {
-            "name":     self.name,
-            "mode":     mode,
-            "uid":      self.uid,
-            "gid":      self.gid,
-            "size":     self.size,
-            "mtime":    self.mtime,
-            "chksum":   self.chksum,
-            "type":     self.type,
-            "linkname": self.linkname,
-            "uname":    self.uname,
-            "gname":    self.gname,
-            "devmajor": self.devmajor,
-            "devminor": self.devminor
-        }
-
-        if info["type"] == DIRTYPE and not info["name"].endswith("/"):
-            info["name"] += "/"
-
-        return info
-
-    def tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="surrogateescape"):
-        """Return a tar header as a string of 512 byte blocks.
-        """
-        info = self.get_info()
-        for name, value in info.items():
-            if value is None:
-                raise ValueError("%s may not be None" % name)
-
-        if format == USTAR_FORMAT:
-            return self.create_ustar_header(info, encoding, errors)
-        elif format == GNU_FORMAT:
-            return self.create_gnu_header(info, encoding, errors)
-        elif format == PAX_FORMAT:
-            return self.create_pax_header(info, encoding)
-        else:
-            raise ValueError("invalid format")
-
-    def create_ustar_header(self, info, encoding, errors):
-        """Return the object as a ustar header block.
-        """
-        info["magic"] = POSIX_MAGIC
-
-        if len(info["linkname"].encode(encoding, errors)) > LENGTH_LINK:
-            raise ValueError("linkname is too long")
-
-        if len(info["name"].encode(encoding, errors)) > LENGTH_NAME:
-            info["prefix"], info["name"] = self._posix_split_name(info["name"], encoding, errors)
-
-        return self._create_header(info, USTAR_FORMAT, encoding, errors)
-
-    def create_gnu_header(self, info, encoding, errors):
-        """Return the object as a GNU header block sequence.
-        """
-        info["magic"] = GNU_MAGIC
-
-        buf = b""
-        if len(info["linkname"].encode(encoding, errors)) > LENGTH_LINK:
-            buf += self._create_gnu_long_header(info["linkname"], GNUTYPE_LONGLINK, encoding, errors)
-
-        if len(info["name"].encode(encoding, errors)) > LENGTH_NAME:
-            buf += self._create_gnu_long_header(info["name"], GNUTYPE_LONGNAME, encoding, errors)
-
-        return buf + self._create_header(info, GNU_FORMAT, encoding, errors)
-
-    def create_pax_header(self, info, encoding):
-        """Return the object as a ustar header block. If it cannot be
-           represented this way, prepend a pax extended header sequence
-           with supplement information.
-        """
-        info["magic"] = POSIX_MAGIC
-        pax_headers = self.pax_headers.copy()
-
-        # Test string fields for values that exceed the field length or cannot
-        # be represented in ASCII encoding.
-        for name, hname, length in (
-                ("name", "path", LENGTH_NAME), ("linkname", "linkpath", LENGTH_LINK),
-                ("uname", "uname", 32), ("gname", "gname", 32)):
-
-            if hname in pax_headers:
-                # The pax header has priority.
-                continue
-
-            # Try to encode the string as ASCII.
-            try:
-                info[name].encode("ascii", "strict")
-            except UnicodeEncodeError:
-                pax_headers[hname] = info[name]
-                continue
-
-            if len(info[name]) > length:
-                pax_headers[hname] = info[name]
-
-        # Test number fields for values that exceed the field limit or values
-        # that like to be stored as float.
-        for name, digits in (("uid", 8), ("gid", 8), ("size", 12), ("mtime", 12)):
-            needs_pax = False
-
-            val = info[name]
-            val_is_float = isinstance(val, float)
-            val_int = round(val) if val_is_float else val
-            if not 0 <= val_int < 8 ** (digits - 1):
-                # Avoid overflow.
-                info[name] = 0
-                needs_pax = True
-            elif val_is_float:
-                # Put rounded value in ustar header, and full
-                # precision value in pax header.
-                info[name] = val_int
-                needs_pax = True
-
-            # The existing pax header has priority.
-            if needs_pax and name not in pax_headers:
-                pax_headers[name] = str(val)
-
-        # Create a pax extended header if necessary.
-        if pax_headers:
-            buf = self._create_pax_generic_header(pax_headers, XHDTYPE, encoding)
-        else:
-            buf = b""
-
-        return buf + self._create_header(info, USTAR_FORMAT, "ascii", "replace")
-
-    @classmethod
-    def create_pax_global_header(cls, pax_headers):
-        """Return the object as a pax global header block sequence.
-        """
-        return cls._create_pax_generic_header(pax_headers, XGLTYPE, "utf-8")
-
-    def _posix_split_name(self, name, encoding, errors):
-        """Split a name longer than 100 chars into a prefix
-           and a name part.
-        """
-        components = name.split("/")
-        for i in range(1, len(components)):
-            prefix = "/".join(components[:i])
-            name = "/".join(components[i:])
-            if len(prefix.encode(encoding, errors)) <= LENGTH_PREFIX and \
-                    len(name.encode(encoding, errors)) <= LENGTH_NAME:
-                break
-        else:
-            raise ValueError("name is too long")
-
-        return prefix, name
-
-    @staticmethod
-    def _create_header(info, format, encoding, errors):
-        """Return a header block. info is a dictionary with file
-           information, format must be one of the *_FORMAT constants.
-        """
-        has_device_fields = info.get("type") in (CHRTYPE, BLKTYPE)
-        if has_device_fields:
-            devmajor = itn(info.get("devmajor", 0), 8, format)
-            devminor = itn(info.get("devminor", 0), 8, format)
-        else:
-            devmajor = stn("", 8, encoding, errors)
-            devminor = stn("", 8, encoding, errors)
-
-        # None values in metadata should cause ValueError.
-        # itn()/stn() do this for all fields except type.
-        filetype = info.get("type", REGTYPE)
-        if filetype is None:
-            raise ValueError("TarInfo.type must not be None")
-
-        parts = [
-            stn(info.get("name", ""), 100, encoding, errors),
-            itn(info.get("mode", 0) & 0o7777, 8, format),
-            itn(info.get("uid", 0), 8, format),
-            itn(info.get("gid", 0), 8, format),
-            itn(info.get("size", 0), 12, format),
-            itn(info.get("mtime", 0), 12, format),
-            b"        ", # checksum field
-            filetype,
-            stn(info.get("linkname", ""), 100, encoding, errors),
-            info.get("magic", POSIX_MAGIC),
-            stn(info.get("uname", ""), 32, encoding, errors),
-            stn(info.get("gname", ""), 32, encoding, errors),
-            devmajor,
-            devminor,
-            stn(info.get("prefix", ""), 155, encoding, errors)
-        ]
-
-        buf = struct.pack("%ds" % BLOCKSIZE, b"".join(parts))
-        chksum = calc_chksums(buf[-BLOCKSIZE:])[0]
-        buf = buf[:-364] + bytes("%06o\0" % chksum, "ascii") + buf[-357:]
-        return buf
-
-    @staticmethod
-    def _create_payload(payload):
-        """Return the string payload filled with zero bytes
-           up to the next 512 byte border.
-        """
-        blocks, remainder = divmod(len(payload), BLOCKSIZE)
-        if remainder > 0:
-            payload += (BLOCKSIZE - remainder) * NUL
-        return payload
-
-    @classmethod
-    def _create_gnu_long_header(cls, name, type, encoding, errors):
-        """Return a GNUTYPE_LONGNAME or GNUTYPE_LONGLINK sequence
-           for name.
-        """
-        name = name.encode(encoding, errors) + NUL
-
-        info = {}
-        info["name"] = "././@LongLink"
-        info["type"] = type
-        info["size"] = len(name)
-        info["magic"] = GNU_MAGIC
-
-        # create extended header + name blocks.
-        return cls._create_header(info, USTAR_FORMAT, encoding, errors) + \
-                cls._create_payload(name)
-
-    @classmethod
-    def _create_pax_generic_header(cls, pax_headers, type, encoding):
-        """Return a POSIX.1-2008 extended or global header sequence
-           that contains a list of keyword, value pairs. The values
-           must be strings.
-        """
-        # Check if one of the fields contains surrogate characters and thereby
-        # forces hdrcharset=BINARY, see _proc_pax() for more information.
-        binary = False
-        for keyword, value in pax_headers.items():
-            try:
-                value.encode("utf-8", "strict")
-            except UnicodeEncodeError:
-                binary = True
-                break
-
-        records = b""
-        if binary:
-            # Put the hdrcharset field at the beginning of the header.
-            records += b"21 hdrcharset=BINARY\n"
-
-        for keyword, value in pax_headers.items():
-            keyword = keyword.encode("utf-8")
-            if binary:
-                # Try to restore the original byte representation of `value'.
-                # Needless to say, that the encoding must match the string.
-                value = value.encode(encoding, "surrogateescape")
-            else:
-                value = value.encode("utf-8")
-
-            l = len(keyword) + len(value) + 3   # ' ' + '=' + '\n'
-            n = p = 0
-            while True:
-                n = l + len(str(p))
-                if n == p:
-                    break
-                p = n
-            records += bytes(str(p), "ascii") + b" " + keyword + b"=" + value + b"\n"
-
-        # We use a hardcoded "././@PaxHeader" name like star does
-        # instead of the one that POSIX recommends.
-        info = {}
-        info["name"] = "././@PaxHeader"
-        info["type"] = type
-        info["size"] = len(records)
-        info["magic"] = POSIX_MAGIC
-
-        # Create pax header + record blocks.
-        return cls._create_header(info, USTAR_FORMAT, "ascii", "replace") + \
-                cls._create_payload(records)
-
-    @classmethod
-    def frombuf(cls, buf, encoding, errors):
-        """Construct a TarInfo object from a 512 byte bytes object.
-        """
-        if len(buf) == 0:
-            raise EmptyHeaderError("empty header")
-        if len(buf) != BLOCKSIZE:
-            raise TruncatedHeaderError("truncated header")
-        if buf.count(NUL) == BLOCKSIZE:
-            raise EOFHeaderError("end of file header")
-
-        chksum = nti(buf[148:156])
-        if chksum not in calc_chksums(buf):
-            raise InvalidHeaderError("bad checksum")
-
-        obj = cls()
-        obj.name = nts(buf[0:100], encoding, errors)
-        obj.mode = nti(buf[100:108])
-        obj.uid = nti(buf[108:116])
-        obj.gid = nti(buf[116:124])
-        obj.size = nti(buf[124:136])
-        obj.mtime = nti(buf[136:148])
-        obj.chksum = chksum
-        obj.type = buf[156:157]
-        obj.linkname = nts(buf[157:257], encoding, errors)
-        obj.uname = nts(buf[265:297], encoding, errors)
-        obj.gname = nts(buf[297:329], encoding, errors)
-        obj.devmajor = nti(buf[329:337])
-        obj.devminor = nti(buf[337:345])
-        prefix = nts(buf[345:500], encoding, errors)
-
-        # Old V7 tar format represents a directory as a regular
-        # file with a trailing slash.
-        if obj.type == AREGTYPE and obj.name.endswith("/"):
-            obj.type = DIRTYPE
-
-        # The old GNU sparse format occupies some of the unused
-        # space in the buffer for up to 4 sparse structures.
-        # Save them for later processing in _proc_sparse().
-        if obj.type == GNUTYPE_SPARSE:
-            pos = 386
-            structs = []
-            for i in range(4):
-                try:
-                    offset = nti(buf[pos:pos + 12])
-                    numbytes = nti(buf[pos + 12:pos + 24])
-                except ValueError:
-                    break
-                structs.append((offset, numbytes))
-                pos += 24
-            isextended = bool(buf[482])
-            origsize = nti(buf[483:495])
-            obj._sparse_structs = (structs, isextended, origsize)
-
-        # Remove redundant slashes from directories.
-        if obj.isdir():
-            obj.name = obj.name.rstrip("/")
-
-        # Reconstruct a ustar longname.
-        if prefix and obj.type not in GNU_TYPES:
-            obj.name = prefix + "/" + obj.name
-        return obj
-
-    @classmethod
-    def fromtarfile(cls, tarfile):
-        """Return the next TarInfo object from TarFile object
-           tarfile.
-        """
-        buf = tarfile.fileobj.read(BLOCKSIZE)
-        obj = cls.frombuf(buf, tarfile.encoding, tarfile.errors)
-        obj.offset = tarfile.fileobj.tell() - BLOCKSIZE
-        return obj._proc_member(tarfile)
-
-    #--------------------------------------------------------------------------
-    # The following are methods that are called depending on the type of a
-    # member. The entry point is _proc_member() which can be overridden in a
-    # subclass to add custom _proc_*() methods. A _proc_*() method MUST
-    # implement the following
-    # operations:
-    # 1. Set self.offset_data to the position where the data blocks begin,
-    #    if there is data that follows.
-    # 2. Set tarfile.offset to the position where the next member's header will
-    #    begin.
-    # 3. Return self or another valid TarInfo object.
-    def _proc_member(self, tarfile):
-        """Choose the right processing method depending on
-           the type and call it.
-        """
-        if self.type in (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK):
-            return self._proc_gnulong(tarfile)
-        elif self.type == GNUTYPE_SPARSE:
-            return self._proc_sparse(tarfile)
-        elif self.type in (XHDTYPE, XGLTYPE, SOLARIS_XHDTYPE):
-            return self._proc_pax(tarfile)
-        else:
-            return self._proc_builtin(tarfile)
-
-    def _proc_builtin(self, tarfile):
-        """Process a builtin type or an unknown type which
-           will be treated as a regular file.
-        """
-        self.offset_data = tarfile.fileobj.tell()
-        offset = self.offset_data
-        if self.isreg() or self.type not in SUPPORTED_TYPES:
-            # Skip the following data blocks.
-            offset += self._block(self.size)
-        tarfile.offset = offset
-
-        # Patch the TarInfo object with saved global
-        # header information.
-        self._apply_pax_info(tarfile.pax_headers, tarfile.encoding, tarfile.errors)
-
-        # Remove redundant slashes from directories. This is to be consistent
-        # with frombuf().
-        if self.isdir():
-            self.name = self.name.rstrip("/")
-
-        return self
-
-    def _proc_gnulong(self, tarfile):
-        """Process the blocks that hold a GNU longname
-           or longlink member.
-        """
-        buf = tarfile.fileobj.read(self._block(self.size))
-
-        # Fetch the next header and process it.
-        try:
-            next = self.fromtarfile(tarfile)
-        except HeaderError as e:
-            raise SubsequentHeaderError(str(e)) from None
-
-        # Patch the TarInfo object from the next header with
-        # the longname information.
-        next.offset = self.offset
-        if self.type == GNUTYPE_LONGNAME:
-            next.name = nts(buf, tarfile.encoding, tarfile.errors)
-        elif self.type == GNUTYPE_LONGLINK:
-            next.linkname = nts(buf, tarfile.encoding, tarfile.errors)
-
-        # Remove redundant slashes from directories. This is to be consistent
-        # with frombuf().
-        if next.isdir():
-            next.name = next.name.removesuffix("/")
-
-        return next
-
-    def _proc_sparse(self, tarfile):
-        """Process a GNU sparse header plus extra headers.
-        """
-        # We already collected some sparse structures in frombuf().
-        structs, isextended, origsize = self._sparse_structs
-        del self._sparse_structs
-
-        # Collect sparse structures from extended header blocks.
-        while isextended:
-            buf = tarfile.fileobj.read(BLOCKSIZE)
-            pos = 0
-            for i in range(21):
-                try:
-                    offset = nti(buf[pos:pos + 12])
-                    numbytes = nti(buf[pos + 12:pos + 24])
-                except ValueError:
-                    break
-                if offset and numbytes:
-                    structs.append((offset, numbytes))
-                pos += 24
-            isextended = bool(buf[504])
-        self.sparse = structs
-
-        self.offset_data = tarfile.fileobj.tell()
-        tarfile.offset = self.offset_data + self._block(self.size)
-        self.size = origsize
-        return self
-
-    def _proc_pax(self, tarfile):
-        """Process an extended or global header as described in
-           POSIX.1-2008.
-        """
-        # Read the header information.
-        buf = tarfile.fileobj.read(self._block(self.size))
-
-        # A pax header stores supplemental information for either
-        # the following file (extended) or all following files
-        # (global).
-        if self.type == XGLTYPE:
-            pax_headers = tarfile.pax_headers
-        else:
-            pax_headers = tarfile.pax_headers.copy()
-
-        # Check if the pax header contains a hdrcharset field. This tells us
-        # the encoding of the path, linkpath, uname and gname fields. Normally,
-        # these fields are UTF-8 encoded but since POSIX.1-2008 tar
-        # implementations are allowed to store them as raw binary strings if
-        # the translation to UTF-8 fails.
-        match = re.search(br"\d+ hdrcharset=([^\n]+)\n", buf)
-        if match is not None:
-            pax_headers["hdrcharset"] = match.group(1).decode("utf-8")
-
-        # For the time being, we don't care about anything other than "BINARY".
-        # The only other value that is currently allowed by the standard is
-        # "ISO-IR 10646 2000 UTF-8" in other words UTF-8.
-        hdrcharset = pax_headers.get("hdrcharset")
-        if hdrcharset == "BINARY":
-            encoding = tarfile.encoding
-        else:
-            encoding = "utf-8"
-
-        # Parse pax header information. A record looks like that:
-        # "%d %s=%s\n" % (length, keyword, value). length is the size
-        # of the complete record including the length field itself and
-        # the newline. keyword and value are both UTF-8 encoded strings.
-        regex = re.compile(br"(\d+) ([^=]+)=")
-        pos = 0
-        while match := regex.match(buf, pos):
-            length, keyword = match.groups()
-            length = int(length)
-            if length == 0:
-                raise InvalidHeaderError("invalid header")
-            value = buf[match.end(2) + 1:match.start(1) + length - 1]
-
-            # Normally, we could just use "utf-8" as the encoding and "strict"
-            # as the error handler, but we better not take the risk. For
-            # example, GNU tar <= 1.23 is known to store filenames it cannot
-            # translate to UTF-8 as raw strings (unfortunately without a
-            # hdrcharset=BINARY header).
-            # We first try the strict standard encoding, and if that fails we
-            # fall back on the user's encoding and error handler.
-            keyword = self._decode_pax_field(keyword, "utf-8", "utf-8",
-                    tarfile.errors)
-            if keyword in PAX_NAME_FIELDS:
-                value = self._decode_pax_field(value, encoding, tarfile.encoding,
-                        tarfile.errors)
-            else:
-                value = self._decode_pax_field(value, "utf-8", "utf-8",
-                        tarfile.errors)
-
-            pax_headers[keyword] = value
-            pos += length
-
-        # Fetch the next header.
-        try:
-            next = self.fromtarfile(tarfile)
-        except HeaderError as e:
-            raise SubsequentHeaderError(str(e)) from None
-
-        # Process GNU sparse information.
-        if "GNU.sparse.map" in pax_headers:
-            # GNU extended sparse format version 0.1.
-            self._proc_gnusparse_01(next, pax_headers)
-
-        elif "GNU.sparse.size" in pax_headers:
-            # GNU extended sparse format version 0.0.
-            self._proc_gnusparse_00(next, pax_headers, buf)
-
-        elif pax_headers.get("GNU.sparse.major") == "1" and pax_headers.get("GNU.sparse.minor") == "0":
-            # GNU extended sparse format version 1.0.
-            self._proc_gnusparse_10(next, pax_headers, tarfile)
-
-        if self.type in (XHDTYPE, SOLARIS_XHDTYPE):
-            # Patch the TarInfo object with the extended header info.
-            next._apply_pax_info(pax_headers, tarfile.encoding, tarfile.errors)
-            next.offset = self.offset
-
-            if "size" in pax_headers:
-                # If the extended header replaces the size field,
-                # we need to recalculate the offset where the next
-                # header starts.
-                offset = next.offset_data
-                if next.isreg() or next.type not in SUPPORTED_TYPES:
-                    offset += next._block(next.size)
-                tarfile.offset = offset
-
-        return next
-
-    def _proc_gnusparse_00(self, next, pax_headers, buf):
-        """Process a GNU tar extended sparse header, version 0.0.
-        """
-        offsets = []
-        for match in re.finditer(br"\d+ GNU.sparse.offset=(\d+)\n", buf):
-            offsets.append(int(match.group(1)))
-        numbytes = []
-        for match in re.finditer(br"\d+ GNU.sparse.numbytes=(\d+)\n", buf):
-            numbytes.append(int(match.group(1)))
-        next.sparse = list(zip(offsets, numbytes))
-
-    def _proc_gnusparse_01(self, next, pax_headers):
-        """Process a GNU tar extended sparse header, version 0.1.
-        """
-        sparse = [int(x) for x in pax_headers["GNU.sparse.map"].split(",")]
-        next.sparse = list(zip(sparse[::2], sparse[1::2]))
-
-    def _proc_gnusparse_10(self, next, pax_headers, tarfile):
-        """Process a GNU tar extended sparse header, version 1.0.
-        """
-        fields = None
-        sparse = []
-        buf = tarfile.fileobj.read(BLOCKSIZE)
-        fields, buf = buf.split(b"\n", 1)
-        fields = int(fields)
-        while len(sparse) < fields * 2:
-            if b"\n" not in buf:
-                buf += tarfile.fileobj.read(BLOCKSIZE)
-            number, buf = buf.split(b"\n", 1)
-            sparse.append(int(number))
-        next.offset_data = tarfile.fileobj.tell()
-        next.sparse = list(zip(sparse[::2], sparse[1::2]))
-
-    def _apply_pax_info(self, pax_headers, encoding, errors):
-        """Replace fields with supplemental information from a previous
-           pax extended or global header.
-        """
-        for keyword, value in pax_headers.items():
-            if keyword == "GNU.sparse.name":
-                setattr(self, "path", value)
-            elif keyword == "GNU.sparse.size":
-                setattr(self, "size", int(value))
-            elif keyword == "GNU.sparse.realsize":
-                setattr(self, "size", int(value))
-            elif keyword in PAX_FIELDS:
-                if keyword in PAX_NUMBER_FIELDS:
-                    try:
-                        value = PAX_NUMBER_FIELDS[keyword](value)
-                    except ValueError:
-                        value = 0
-                if keyword == "path":
-                    value = value.rstrip("/")
-                setattr(self, keyword, value)
-
-        self.pax_headers = pax_headers.copy()
-
-    def _decode_pax_field(self, value, encoding, fallback_encoding, fallback_errors):
-        """Decode a single field from a pax record.
-        """
-        try:
-            return value.decode(encoding, "strict")
-        except UnicodeDecodeError:
-            return value.decode(fallback_encoding, fallback_errors)
-
-    def _block(self, count):
-        """Round up a byte count by BLOCKSIZE and return it,
-           e.g. _block(834) => 1024.
-        """
-        blocks, remainder = divmod(count, BLOCKSIZE)
-        if remainder:
-            blocks += 1
-        return blocks * BLOCKSIZE
-
-    def isreg(self):
-        'Return True if the Tarinfo object is a regular file.'
-        return self.type in REGULAR_TYPES
-
-    def isfile(self):
-        'Return True if the Tarinfo object is a regular file.'
-        return self.isreg()
-
-    def isdir(self):
-        'Return True if it is a directory.'
-        return self.type == DIRTYPE
-
-    def issym(self):
-        'Return True if it is a symbolic link.'
-        return self.type == SYMTYPE
-
-    def islnk(self):
-        'Return True if it is a hard link.'
-        return self.type == LNKTYPE
-
-    def ischr(self):
-        'Return True if it is a character device.'
-        return self.type == CHRTYPE
-
-    def isblk(self):
-        'Return True if it is a block device.'
-        return self.type == BLKTYPE
-
-    def isfifo(self):
-        'Return True if it is a FIFO.'
-        return self.type == FIFOTYPE
-
-    def issparse(self):
-        return self.sparse is not None
-
-    def isdev(self):
-        'Return True if it is one of character device, block device or FIFO.'
-        return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE)
-# class TarInfo
-
-class TarFile(object):
-    """The TarFile Class provides an interface to tar archives.
-    """
-
-    debug = 0                   # May be set from 0 (no msgs) to 3 (all msgs)
-
-    dereference = False         # If true, add content of linked file to the
-                                # tar file, else the link.
-
-    ignore_zeros = False        # If true, skips empty or invalid blocks and
-                                # continues processing.
-
-    errorlevel = 1              # If 0, fatal errors only appear in debug
-                                # messages (if debug >= 0). If > 0, errors
-                                # are passed to the caller as exceptions.
-
-    format = DEFAULT_FORMAT     # The format to use when creating an archive.
-
-    encoding = ENCODING         # Encoding for 8-bit character strings.
-
-    errors = None               # Error handler for unicode conversion.
-
-    tarinfo = TarInfo           # The default TarInfo class to use.
-
-    fileobject = ExFileObject   # The file-object for extractfile().
-
-    extraction_filter = None    # The default filter for extraction.
-
-    def __init__(self, name=None, mode="r", fileobj=None, format=None,
-            tarinfo=None, dereference=None, ignore_zeros=None, encoding=None,
-            errors="surrogateescape", pax_headers=None, debug=None,
-            errorlevel=None, copybufsize=None):
-        """Open an (uncompressed) tar archive `name'. `mode' is either 'r' to
-           read from an existing archive, 'a' to append data to an existing
-           file or 'w' to create a new file overwriting an existing one. `mode'
-           defaults to 'r'.
-           If `fileobj' is given, it is used for reading or writing data. If it
-           can be determined, `mode' is overridden by `fileobj's mode.
-           `fileobj' is not closed, when TarFile is closed.
-        """
-        modes = {"r": "rb", "a": "r+b", "w": "wb", "x": "xb"}
-        if mode not in modes:
-            raise ValueError("mode must be 'r', 'a', 'w' or 'x'")
-        self.mode = mode
-        self._mode = modes[mode]
-
-        if not fileobj:
-            if self.mode == "a" and not os.path.exists(name):
-                # Create nonexistent files in append mode.
-                self.mode = "w"
-                self._mode = "wb"
-            fileobj = bltn_open(name, self._mode)
-            self._extfileobj = False
-        else:
-            if (name is None and hasattr(fileobj, "name") and
-                isinstance(fileobj.name, (str, bytes))):
-                name = fileobj.name
-            if hasattr(fileobj, "mode"):
-                self._mode = fileobj.mode
-            self._extfileobj = True
-        self.name = os.path.abspath(name) if name else None
-        self.fileobj = fileobj
-
-        # Init attributes.
-        if format is not None:
-            self.format = format
-        if tarinfo is not None:
-            self.tarinfo = tarinfo
-        if dereference is not None:
-            self.dereference = dereference
-        if ignore_zeros is not None:
-            self.ignore_zeros = ignore_zeros
-        if encoding is not None:
-            self.encoding = encoding
-        self.errors = errors
-
-        if pax_headers is not None and self.format == PAX_FORMAT:
-            self.pax_headers = pax_headers
-        else:
-            self.pax_headers = {}
-
-        if debug is not None:
-            self.debug = debug
-        if errorlevel is not None:
-            self.errorlevel = errorlevel
-
-        # Init datastructures.
-        self.copybufsize = copybufsize
-        self.closed = False
-        self.members = []       # list of members as TarInfo objects
-        self._loaded = False    # flag if all members have been read
-        self.offset = self.fileobj.tell()
-                                # current position in the archive file
-        self.inodes = {}        # dictionary caching the inodes of
-                                # archive members already added
-
-        try:
-            if self.mode == "r":
-                self.firstmember = None
-                self.firstmember = self.next()
-
-            if self.mode == "a":
-                # Move to the end of the archive,
-                # before the first empty block.
-                while True:
-                    self.fileobj.seek(self.offset)
-                    try:
-                        tarinfo = self.tarinfo.fromtarfile(self)
-                        self.members.append(tarinfo)
-                    except EOFHeaderError:
-                        self.fileobj.seek(self.offset)
-                        break
-                    except HeaderError as e:
-                        raise ReadError(str(e)) from None
-
-            if self.mode in ("a", "w", "x"):
-                self._loaded = True
-
-                if self.pax_headers:
-                    buf = self.tarinfo.create_pax_global_header(self.pax_headers.copy())
-                    self.fileobj.write(buf)
-                    self.offset += len(buf)
-        except:
-            if not self._extfileobj:
-                self.fileobj.close()
-            self.closed = True
-            raise
-
-    #--------------------------------------------------------------------------
-    # Below are the classmethods which act as alternate constructors to the
-    # TarFile class. The open() method is the only one that is needed for
-    # public use; it is the "super"-constructor and is able to select an
-    # adequate "sub"-constructor for a particular compression using the mapping
-    # from OPEN_METH.
-    #
-    # This concept allows one to subclass TarFile without losing the comfort of
-    # the super-constructor. A sub-constructor is registered and made available
-    # by adding it to the mapping in OPEN_METH.
-
-    @classmethod
-    def open(cls, name=None, mode="r", fileobj=None, bufsize=RECORDSIZE, **kwargs):
-        r"""Open a tar archive for reading, writing or appending. Return
-           an appropriate TarFile class.
-
-           mode:
-           'r' or 'r:\*' open for reading with transparent compression
-           'r:'         open for reading exclusively uncompressed
-           'r:gz'       open for reading with gzip compression
-           'r:bz2'      open for reading with bzip2 compression
-           'r:xz'       open for reading with lzma compression
-           'a' or 'a:'  open for appending, creating the file if necessary
-           'w' or 'w:'  open for writing without compression
-           'w:gz'       open for writing with gzip compression
-           'w:bz2'      open for writing with bzip2 compression
-           'w:xz'       open for writing with lzma compression
-
-           'x' or 'x:'  create a tarfile exclusively without compression, raise
-                        an exception if the file is already created
-           'x:gz'       create a gzip compressed tarfile, raise an exception
-                        if the file is already created
-           'x:bz2'      create a bzip2 compressed tarfile, raise an exception
-                        if the file is already created
-           'x:xz'       create an lzma compressed tarfile, raise an exception
-                        if the file is already created
-
-           'r|\*'        open a stream of tar blocks with transparent compression
-           'r|'         open an uncompressed stream of tar blocks for reading
-           'r|gz'       open a gzip compressed stream of tar blocks
-           'r|bz2'      open a bzip2 compressed stream of tar blocks
-           'r|xz'       open an lzma compressed stream of tar blocks
-           'w|'         open an uncompressed stream for writing
-           'w|gz'       open a gzip compressed stream for writing
-           'w|bz2'      open a bzip2 compressed stream for writing
-           'w|xz'       open an lzma compressed stream for writing
-        """
-
-        if not name and not fileobj:
-            raise ValueError("nothing to open")
-
-        if mode in ("r", "r:*"):
-            # Find out which *open() is appropriate for opening the file.
-            def not_compressed(comptype):
-                return cls.OPEN_METH[comptype] == 'taropen'
-            error_msgs = []
-            for comptype in sorted(cls.OPEN_METH, key=not_compressed):
-                func = getattr(cls, cls.OPEN_METH[comptype])
-                if fileobj is not None:
-                    saved_pos = fileobj.tell()
-                try:
-                    return func(name, "r", fileobj, **kwargs)
-                except (ReadError, CompressionError) as e:
-                    error_msgs.append(f'- method {comptype}: {e!r}')
-                    if fileobj is not None:
-                        fileobj.seek(saved_pos)
-                    continue
-            error_msgs_summary = '\n'.join(error_msgs)
-            raise ReadError(f"file could not be opened successfully:\n{error_msgs_summary}")
-
-        elif ":" in mode:
-            filemode, comptype = mode.split(":", 1)
-            filemode = filemode or "r"
-            comptype = comptype or "tar"
-
-            # Select the *open() function according to
-            # given compression.
-            if comptype in cls.OPEN_METH:
-                func = getattr(cls, cls.OPEN_METH[comptype])
-            else:
-                raise CompressionError("unknown compression type %r" % comptype)
-            return func(name, filemode, fileobj, **kwargs)
-
-        elif "|" in mode:
-            filemode, comptype = mode.split("|", 1)
-            filemode = filemode or "r"
-            comptype = comptype or "tar"
-
-            if filemode not in ("r", "w"):
-                raise ValueError("mode must be 'r' or 'w'")
-
-            compresslevel = kwargs.pop("compresslevel", 9)
-            stream = _Stream(name, filemode, comptype, fileobj, bufsize,
-                             compresslevel)
-            try:
-                t = cls(name, filemode, stream, **kwargs)
-            except:
-                stream.close()
-                raise
-            t._extfileobj = False
-            return t
-
-        elif mode in ("a", "w", "x"):
-            return cls.taropen(name, mode, fileobj, **kwargs)
-
-        raise ValueError("undiscernible mode")
-
-    @classmethod
-    def taropen(cls, name, mode="r", fileobj=None, **kwargs):
-        """Open uncompressed tar archive name for reading or writing.
-        """
-        if mode not in ("r", "a", "w", "x"):
-            raise ValueError("mode must be 'r', 'a', 'w' or 'x'")
-        return cls(name, mode, fileobj, **kwargs)
-
-    @classmethod
-    def gzopen(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
-        """Open gzip compressed tar archive name for reading or writing.
-           Appending is not allowed.
-        """
-        if mode not in ("r", "w", "x"):
-            raise ValueError("mode must be 'r', 'w' or 'x'")
-
-        try:
-            from gzip import GzipFile
-        except ImportError:
-            raise CompressionError("gzip module is not available") from None
-
-        try:
-            fileobj = GzipFile(name, mode + "b", compresslevel, fileobj)
-        except OSError as e:
-            if fileobj is not None and mode == 'r':
-                raise ReadError("not a gzip file") from e
-            raise
-
-        try:
-            t = cls.taropen(name, mode, fileobj, **kwargs)
-        except OSError as e:
-            fileobj.close()
-            if mode == 'r':
-                raise ReadError("not a gzip file") from e
-            raise
-        except:
-            fileobj.close()
-            raise
-        t._extfileobj = False
-        return t
-
-    @classmethod
-    def bz2open(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
-        """Open bzip2 compressed tar archive name for reading or writing.
-           Appending is not allowed.
-        """
-        if mode not in ("r", "w", "x"):
-            raise ValueError("mode must be 'r', 'w' or 'x'")
-
-        try:
-            from bz2 import BZ2File
-        except ImportError:
-            raise CompressionError("bz2 module is not available") from None
-
-        fileobj = BZ2File(fileobj or name, mode, compresslevel=compresslevel)
-
-        try:
-            t = cls.taropen(name, mode, fileobj, **kwargs)
-        except (OSError, EOFError) as e:
-            fileobj.close()
-            if mode == 'r':
-                raise ReadError("not a bzip2 file") from e
-            raise
-        except:
-            fileobj.close()
-            raise
-        t._extfileobj = False
-        return t
-
-    @classmethod
-    def xzopen(cls, name, mode="r", fileobj=None, preset=None, **kwargs):
-        """Open lzma compressed tar archive name for reading or writing.
-           Appending is not allowed.
-        """
-        if mode not in ("r", "w", "x"):
-            raise ValueError("mode must be 'r', 'w' or 'x'")
-
-        try:
-            from lzma import LZMAFile, LZMAError
-        except ImportError:
-            raise CompressionError("lzma module is not available") from None
-
-        fileobj = LZMAFile(fileobj or name, mode, preset=preset)
-
-        try:
-            t = cls.taropen(name, mode, fileobj, **kwargs)
-        except (LZMAError, EOFError) as e:
-            fileobj.close()
-            if mode == 'r':
-                raise ReadError("not an lzma file") from e
-            raise
-        except:
-            fileobj.close()
-            raise
-        t._extfileobj = False
-        return t
-
-    # All *open() methods are registered here.
-    OPEN_METH = {
-        "tar": "taropen",   # uncompressed tar
-        "gz":  "gzopen",    # gzip compressed tar
-        "bz2": "bz2open",   # bzip2 compressed tar
-        "xz":  "xzopen"     # lzma compressed tar
-    }
-
-    #--------------------------------------------------------------------------
-    # The public methods which TarFile provides:
-
-    def close(self):
-        """Close the TarFile. In write-mode, two finishing zero blocks are
-           appended to the archive.
-        """
-        if self.closed:
-            return
-
-        self.closed = True
-        try:
-            if self.mode in ("a", "w", "x"):
-                self.fileobj.write(NUL * (BLOCKSIZE * 2))
-                self.offset += (BLOCKSIZE * 2)
-                # fill up the end with zero-blocks
-                # (like option -b20 for tar does)
-                blocks, remainder = divmod(self.offset, RECORDSIZE)
-                if remainder > 0:
-                    self.fileobj.write(NUL * (RECORDSIZE - remainder))
-        finally:
-            if not self._extfileobj:
-                self.fileobj.close()
-
-    def getmember(self, name):
-        """Return a TarInfo object for member ``name``. If ``name`` can not be
-           found in the archive, KeyError is raised. If a member occurs more
-           than once in the archive, its last occurrence is assumed to be the
-           most up-to-date version.
-        """
-        tarinfo = self._getmember(name.rstrip('/'))
-        if tarinfo is None:
-            raise KeyError("filename %r not found" % name)
-        return tarinfo
-
-    def getmembers(self):
-        """Return the members of the archive as a list of TarInfo objects. The
-           list has the same order as the members in the archive.
-        """
-        self._check()
-        if not self._loaded:    # if we want to obtain a list of
-            self._load()        # all members, we first have to
-                                # scan the whole archive.
-        return self.members
-
-    def getnames(self):
-        """Return the members of the archive as a list of their names. It has
-           the same order as the list returned by getmembers().
-        """
-        return [tarinfo.name for tarinfo in self.getmembers()]
-
-    def gettarinfo(self, name=None, arcname=None, fileobj=None):
-        """Create a TarInfo object from the result of os.stat or equivalent
-           on an existing file. The file is either named by ``name``, or
-           specified as a file object ``fileobj`` with a file descriptor. If
-           given, ``arcname`` specifies an alternative name for the file in the
-           archive, otherwise, the name is taken from the 'name' attribute of
-           'fileobj', or the 'name' argument. The name should be a text
-           string.
-        """
-        self._check("awx")
-
-        # When fileobj is given, replace name by
-        # fileobj's real name.
-        if fileobj is not None:
-            name = fileobj.name
-
-        # Building the name of the member in the archive.
-        # Backward slashes are converted to forward slashes,
-        # Absolute paths are turned to relative paths.
-        if arcname is None:
-            arcname = name
-        drv, arcname = os.path.splitdrive(arcname)
-        arcname = arcname.replace(os.sep, "/")
-        arcname = arcname.lstrip("/")
-
-        # Now, fill the TarInfo object with
-        # information specific for the file.
-        tarinfo = self.tarinfo()
-        tarinfo.tarfile = self  # Not needed
-
-        # Use os.stat or os.lstat, depending on if symlinks shall be resolved.
-        if fileobj is None:
-            if not self.dereference:
-                statres = os.lstat(name)
-            else:
-                statres = os.stat(name)
-        else:
-            statres = os.fstat(fileobj.fileno())
-        linkname = ""
-
-        stmd = statres.st_mode
-        if stat.S_ISREG(stmd):
-            inode = (statres.st_ino, statres.st_dev)
-            if not self.dereference and statres.st_nlink > 1 and \
-                    inode in self.inodes and arcname != self.inodes[inode]:
-                # Is it a hardlink to an already
-                # archived file?
-                type = LNKTYPE
-                linkname = self.inodes[inode]
-            else:
-                # The inode is added only if its valid.
-                # For win32 it is always 0.
-                type = REGTYPE
-                if inode[0]:
-                    self.inodes[inode] = arcname
-        elif stat.S_ISDIR(stmd):
-            type = DIRTYPE
-        elif stat.S_ISFIFO(stmd):
-            type = FIFOTYPE
-        elif stat.S_ISLNK(stmd):
-            type = SYMTYPE
-            linkname = os.readlink(name)
-        elif stat.S_ISCHR(stmd):
-            type = CHRTYPE
-        elif stat.S_ISBLK(stmd):
-            type = BLKTYPE
-        else:
-            return None
-
-        # Fill the TarInfo object with all
-        # information we can get.
-        tarinfo.name = arcname
-        tarinfo.mode = stmd
-        tarinfo.uid = statres.st_uid
-        tarinfo.gid = statres.st_gid
-        if type == REGTYPE:
-            tarinfo.size = statres.st_size
-        else:
-            tarinfo.size = 0
-        tarinfo.mtime = statres.st_mtime
-        tarinfo.type = type
-        tarinfo.linkname = linkname
-        if pwd:
-            try:
-                tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0]
-            except KeyError:
-                pass
-        if grp:
-            try:
-                tarinfo.gname = grp.getgrgid(tarinfo.gid)[0]
-            except KeyError:
-                pass
-
-        if type in (CHRTYPE, BLKTYPE):
-            if hasattr(os, "major") and hasattr(os, "minor"):
-                tarinfo.devmajor = os.major(statres.st_rdev)
-                tarinfo.devminor = os.minor(statres.st_rdev)
-        return tarinfo
-
-    def list(self, verbose=True, *, members=None):
-        """Print a table of contents to sys.stdout. If ``verbose`` is False, only
-           the names of the members are printed. If it is True, an `ls -l'-like
-           output is produced. ``members`` is optional and must be a subset of the
-           list returned by getmembers().
-        """
-        self._check()
-
-        if members is None:
-            members = self
-        for tarinfo in members:
-            if verbose:
-                if tarinfo.mode is None:
-                    _safe_print("??????????")
-                else:
-                    _safe_print(stat.filemode(tarinfo.mode))
-                _safe_print("%s/%s" % (tarinfo.uname or tarinfo.uid,
-                                       tarinfo.gname or tarinfo.gid))
-                if tarinfo.ischr() or tarinfo.isblk():
-                    _safe_print("%10s" %
-                            ("%d,%d" % (tarinfo.devmajor, tarinfo.devminor)))
-                else:
-                    _safe_print("%10d" % tarinfo.size)
-                if tarinfo.mtime is None:
-                    _safe_print("????-??-?? ??:??:??")
-                else:
-                    _safe_print("%d-%02d-%02d %02d:%02d:%02d" \
-                                % time.localtime(tarinfo.mtime)[:6])
-
-            _safe_print(tarinfo.name + ("/" if tarinfo.isdir() else ""))
-
-            if verbose:
-                if tarinfo.issym():
-                    _safe_print("-> " + tarinfo.linkname)
-                if tarinfo.islnk():
-                    _safe_print("link to " + tarinfo.linkname)
-            print()
-
-    def add(self, name, arcname=None, recursive=True, *, filter=None):
-        """Add the file ``name`` to the archive. ``name`` may be any type of file
-           (directory, fifo, symbolic link, etc.). If given, ``arcname``
-           specifies an alternative name for the file in the archive.
-           Directories are added recursively by default. This can be avoided by
-           setting ``recursive`` to False. ``filter`` is a function
-           that expects a TarInfo object argument and returns the changed
-           TarInfo object, if it returns None the TarInfo object will be
-           excluded from the archive.
-        """
-        self._check("awx")
-
-        if arcname is None:
-            arcname = name
-
-        # Skip if somebody tries to archive the archive...
-        if self.name is not None and os.path.abspath(name) == self.name:
-            self._dbg(2, "tarfile: Skipped %r" % name)
-            return
-
-        self._dbg(1, name)
-
-        # Create a TarInfo object from the file.
-        tarinfo = self.gettarinfo(name, arcname)
-
-        if tarinfo is None:
-            self._dbg(1, "tarfile: Unsupported type %r" % name)
-            return
-
-        # Change or exclude the TarInfo object.
-        if filter is not None:
-            tarinfo = filter(tarinfo)
-            if tarinfo is None:
-                self._dbg(2, "tarfile: Excluded %r" % name)
-                return
-
-        # Append the tar header and data to the archive.
-        if tarinfo.isreg():
-            with bltn_open(name, "rb") as f:
-                self.addfile(tarinfo, f)
-
-        elif tarinfo.isdir():
-            self.addfile(tarinfo)
-            if recursive:
-                for f in sorted(os.listdir(name)):
-                    self.add(os.path.join(name, f), os.path.join(arcname, f),
-                            recursive, filter=filter)
-
-        else:
-            self.addfile(tarinfo)
-
-    def addfile(self, tarinfo, fileobj=None):
-        """Add the TarInfo object ``tarinfo`` to the archive. If ``fileobj`` is
-           given, it should be a binary file, and tarinfo.size bytes are read
-           from it and added to the archive. You can create TarInfo objects
-           directly, or by using gettarinfo().
-        """
-        self._check("awx")
-
-        tarinfo = copy.copy(tarinfo)
-
-        buf = tarinfo.tobuf(self.format, self.encoding, self.errors)
-        self.fileobj.write(buf)
-        self.offset += len(buf)
-        bufsize=self.copybufsize
-        # If there's data to follow, append it.
-        if fileobj is not None:
-            copyfileobj(fileobj, self.fileobj, tarinfo.size, bufsize=bufsize)
-            blocks, remainder = divmod(tarinfo.size, BLOCKSIZE)
-            if remainder > 0:
-                self.fileobj.write(NUL * (BLOCKSIZE - remainder))
-                blocks += 1
-            self.offset += blocks * BLOCKSIZE
-
-        self.members.append(tarinfo)
-
-    def _get_filter_function(self, filter):
-        if filter is None:
-            filter = self.extraction_filter
-            if filter is None:
-                warnings.warn(
-                    'Python 3.14 will, by default, filter extracted tar '
-                    + 'archives and reject files or modify their metadata. '
-                    + 'Use the filter argument to control this behavior.',
-                    DeprecationWarning)
-                return fully_trusted_filter
-            if isinstance(filter, str):
-                raise TypeError(
-                    'String names are not supported for '
-                    + 'TarFile.extraction_filter. Use a function such as '
-                    + 'tarfile.data_filter directly.')
-            return filter
-        if callable(filter):
-            return filter
-        try:
-            return _NAMED_FILTERS[filter]
-        except KeyError:
-            raise ValueError(f"filter {filter!r} not found") from None
-
-    def extractall(self, path=".", members=None, *, numeric_owner=False,
-                   filter=None):
-        """Extract all members from the archive to the current working
-           directory and set owner, modification time and permissions on
-           directories afterwards. `path' specifies a different directory
-           to extract to. `members' is optional and must be a subset of the
-           list returned by getmembers(). If `numeric_owner` is True, only
-           the numbers for user/group names are used and not the names.
-
-           The `filter` function will be called on each member just
-           before extraction.
-           It can return a changed TarInfo or None to skip the member.
-           String names of common filters are accepted.
-        """
-        directories = []
-
-        filter_function = self._get_filter_function(filter)
-        if members is None:
-            members = self
-
-        for member in members:
-            tarinfo = self._get_extract_tarinfo(member, filter_function, path)
-            if tarinfo is None:
-                continue
-            if tarinfo.isdir():
-                # For directories, delay setting attributes until later,
-                # since permissions can interfere with extraction and
-                # extracting contents can reset mtime.
-                directories.append(tarinfo)
-            self._extract_one(tarinfo, path, set_attrs=not tarinfo.isdir(),
-                              numeric_owner=numeric_owner)
-
-        # Reverse sort directories.
-        directories.sort(key=lambda a: a.name, reverse=True)
-
-        # Set correct owner, mtime and filemode on directories.
-        for tarinfo in directories:
-            dirpath = os.path.join(path, tarinfo.name)
-            try:
-                self.chown(tarinfo, dirpath, numeric_owner=numeric_owner)
-                self.utime(tarinfo, dirpath)
-                self.chmod(tarinfo, dirpath)
-            except ExtractError as e:
-                self._handle_nonfatal_error(e)
-
-    def extract(self, member, path="", set_attrs=True, *, numeric_owner=False,
-                filter=None):
-        """Extract a member from the archive to the current working directory,
-           using its full name. Its file information is extracted as accurately
-           as possible. `member' may be a filename or a TarInfo object. You can
-           specify a different directory using `path'. File attributes (owner,
-           mtime, mode) are set unless `set_attrs' is False. If `numeric_owner`
-           is True, only the numbers for user/group names are used and not
-           the names.
-
-           The `filter` function will be called before extraction.
-           It can return a changed TarInfo or None to skip the member.
-           String names of common filters are accepted.
-        """
-        filter_function = self._get_filter_function(filter)
-        tarinfo = self._get_extract_tarinfo(member, filter_function, path)
-        if tarinfo is not None:
-            self._extract_one(tarinfo, path, set_attrs, numeric_owner)
-
-    def _get_extract_tarinfo(self, member, filter_function, path):
-        """Get filtered TarInfo (or None) from member, which might be a str"""
-        if isinstance(member, str):
-            tarinfo = self.getmember(member)
-        else:
-            tarinfo = member
-
-        unfiltered = tarinfo
-        try:
-            tarinfo = filter_function(tarinfo, path)
-        except (OSError, FilterError) as e:
-            self._handle_fatal_error(e)
-        except ExtractError as e:
-            self._handle_nonfatal_error(e)
-        if tarinfo is None:
-            self._dbg(2, "tarfile: Excluded %r" % unfiltered.name)
-            return None
-        # Prepare the link target for makelink().
-        if tarinfo.islnk():
-            tarinfo = copy.copy(tarinfo)
-            tarinfo._link_target = os.path.join(path, tarinfo.linkname)
-        return tarinfo
-
-    def _extract_one(self, tarinfo, path, set_attrs, numeric_owner):
-        """Extract from filtered tarinfo to disk"""
-        self._check("r")
-
-        try:
-            self._extract_member(tarinfo, os.path.join(path, tarinfo.name),
-                                 set_attrs=set_attrs,
-                                 numeric_owner=numeric_owner)
-        except OSError as e:
-            self._handle_fatal_error(e)
-        except ExtractError as e:
-            self._handle_nonfatal_error(e)
-
-    def _handle_nonfatal_error(self, e):
-        """Handle non-fatal error (ExtractError) according to errorlevel"""
-        if self.errorlevel > 1:
-            raise
-        else:
-            self._dbg(1, "tarfile: %s" % e)
-
-    def _handle_fatal_error(self, e):
-        """Handle "fatal" error according to self.errorlevel"""
-        if self.errorlevel > 0:
-            raise
-        elif isinstance(e, OSError):
-            if e.filename is None:
-                self._dbg(1, "tarfile: %s" % e.strerror)
-            else:
-                self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename))
-        else:
-            self._dbg(1, "tarfile: %s %s" % (type(e).__name__, e))
-
-    def extractfile(self, member):
-        """Extract a member from the archive as a file object. ``member`` may be
-           a filename or a TarInfo object. If ``member`` is a regular file or
-           a link, an io.BufferedReader object is returned. For all other
-           existing members, None is returned. If ``member`` does not appear
-           in the archive, KeyError is raised.
-        """
-        self._check("r")
-
-        if isinstance(member, str):
-            tarinfo = self.getmember(member)
-        else:
-            tarinfo = member
-
-        if tarinfo.isreg() or tarinfo.type not in SUPPORTED_TYPES:
-            # Members with unknown types are treated as regular files.
-            return self.fileobject(self, tarinfo)
-
-        elif tarinfo.islnk() or tarinfo.issym():
-            if isinstance(self.fileobj, _Stream):
-                # A small but ugly workaround for the case that someone tries
-                # to extract a (sym)link as a file-object from a non-seekable
-                # stream of tar blocks.
-                raise StreamError("cannot extract (sym)link as file object")
-            else:
-                # A (sym)link's file object is its target's file object.
-                return self.extractfile(self._find_link_target(tarinfo))
-        else:
-            # If there's no data associated with the member (directory, chrdev,
-            # blkdev, etc.), return None instead of a file object.
-            return None
-
-    def _extract_member(self, tarinfo, targetpath, set_attrs=True,
-                        numeric_owner=False):
-        """Extract the TarInfo object tarinfo to a physical
-           file called targetpath.
-        """
-        # Fetch the TarInfo object for the given name
-        # and build the destination pathname, replacing
-        # forward slashes to platform specific separators.
-        targetpath = targetpath.rstrip("/")
-        targetpath = targetpath.replace("/", os.sep)
-
-        # Create all upper directories.
-        upperdirs = os.path.dirname(targetpath)
-        if upperdirs and not os.path.exists(upperdirs):
-            # Create directories that are not part of the archive with
-            # default permissions.
-            os.makedirs(upperdirs)
-
-        if tarinfo.islnk() or tarinfo.issym():
-            self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname))
-        else:
-            self._dbg(1, tarinfo.name)
-
-        if tarinfo.isreg():
-            self.makefile(tarinfo, targetpath)
-        elif tarinfo.isdir():
-            self.makedir(tarinfo, targetpath)
-        elif tarinfo.isfifo():
-            self.makefifo(tarinfo, targetpath)
-        elif tarinfo.ischr() or tarinfo.isblk():
-            self.makedev(tarinfo, targetpath)
-        elif tarinfo.islnk() or tarinfo.issym():
-            self.makelink(tarinfo, targetpath)
-        elif tarinfo.type not in SUPPORTED_TYPES:
-            self.makeunknown(tarinfo, targetpath)
-        else:
-            self.makefile(tarinfo, targetpath)
-
-        if set_attrs:
-            self.chown(tarinfo, targetpath, numeric_owner)
-            if not tarinfo.issym():
-                self.chmod(tarinfo, targetpath)
-                self.utime(tarinfo, targetpath)
-
-    #--------------------------------------------------------------------------
-    # Below are the different file methods. They are called via
-    # _extract_member() when extract() is called. They can be replaced in a
-    # subclass to implement other functionality.
-
-    def makedir(self, tarinfo, targetpath):
-        """Make a directory called targetpath.
-        """
-        try:
-            if tarinfo.mode is None:
-                # Use the system's default mode
-                os.mkdir(targetpath)
-            else:
-                # Use a safe mode for the directory, the real mode is set
-                # later in _extract_member().
-                os.mkdir(targetpath, 0o700)
-        except FileExistsError:
-            if not os.path.isdir(targetpath):
-                raise
-
-    def makefile(self, tarinfo, targetpath):
-        """Make a file called targetpath.
-        """
-        source = self.fileobj
-        source.seek(tarinfo.offset_data)
-        bufsize = self.copybufsize
-        with bltn_open(targetpath, "wb") as target:
-            if tarinfo.sparse is not None:
-                for offset, size in tarinfo.sparse:
-                    target.seek(offset)
-                    copyfileobj(source, target, size, ReadError, bufsize)
-                target.seek(tarinfo.size)
-                target.truncate()
-            else:
-                copyfileobj(source, target, tarinfo.size, ReadError, bufsize)
-
-    def makeunknown(self, tarinfo, targetpath):
-        """Make a file from a TarInfo object with an unknown type
-           at targetpath.
-        """
-        self.makefile(tarinfo, targetpath)
-        self._dbg(1, "tarfile: Unknown file type %r, " \
-                     "extracted as regular file." % tarinfo.type)
-
-    def makefifo(self, tarinfo, targetpath):
-        """Make a fifo called targetpath.
-        """
-        if hasattr(os, "mkfifo"):
-            os.mkfifo(targetpath)
-        else:
-            raise ExtractError("fifo not supported by system")
-
-    def makedev(self, tarinfo, targetpath):
-        """Make a character or block device called targetpath.
-        """
-        if not hasattr(os, "mknod") or not hasattr(os, "makedev"):
-            raise ExtractError("special devices not supported by system")
-
-        mode = tarinfo.mode
-        if mode is None:
-            # Use mknod's default
-            mode = 0o600
-        if tarinfo.isblk():
-            mode |= stat.S_IFBLK
-        else:
-            mode |= stat.S_IFCHR
-
-        os.mknod(targetpath, mode,
-                 os.makedev(tarinfo.devmajor, tarinfo.devminor))
-
-    def makelink(self, tarinfo, targetpath):
-        """Make a (symbolic) link called targetpath. If it cannot be created
-          (platform limitation), we try to make a copy of the referenced file
-          instead of a link.
-        """
-        try:
-            # For systems that support symbolic and hard links.
-            if tarinfo.issym():
-                if os.path.lexists(targetpath):
-                    # Avoid FileExistsError on following os.symlink.
-                    os.unlink(targetpath)
-                os.symlink(tarinfo.linkname, targetpath)
-            else:
-                if os.path.exists(tarinfo._link_target):
-                    os.link(tarinfo._link_target, targetpath)
-                else:
-                    self._extract_member(self._find_link_target(tarinfo),
-                                         targetpath)
-        except symlink_exception:
-            try:
-                self._extract_member(self._find_link_target(tarinfo),
-                                     targetpath)
-            except KeyError:
-                raise ExtractError("unable to resolve link inside archive") from None
-
-    def chown(self, tarinfo, targetpath, numeric_owner):
-        """Set owner of targetpath according to tarinfo. If numeric_owner
-           is True, use .gid/.uid instead of .gname/.uname. If numeric_owner
-           is False, fall back to .gid/.uid when the search based on name
-           fails.
-        """
-        if hasattr(os, "geteuid") and os.geteuid() == 0:
-            # We have to be root to do so.
-            g = tarinfo.gid
-            u = tarinfo.uid
-            if not numeric_owner:
-                try:
-                    if grp and tarinfo.gname:
-                        g = grp.getgrnam(tarinfo.gname)[2]
-                except KeyError:
-                    pass
-                try:
-                    if pwd and tarinfo.uname:
-                        u = pwd.getpwnam(tarinfo.uname)[2]
-                except KeyError:
-                    pass
-            if g is None:
-                g = -1
-            if u is None:
-                u = -1
-            try:
-                if tarinfo.issym() and hasattr(os, "lchown"):
-                    os.lchown(targetpath, u, g)
-                else:
-                    os.chown(targetpath, u, g)
-            except OSError as e:
-                raise ExtractError("could not change owner") from e
-
-    def chmod(self, tarinfo, targetpath):
-        """Set file permissions of targetpath according to tarinfo.
-        """
-        if tarinfo.mode is None:
-            return
-        try:
-            os.chmod(targetpath, tarinfo.mode)
-        except OSError as e:
-            raise ExtractError("could not change mode") from e
-
-    def utime(self, tarinfo, targetpath):
-        """Set modification time of targetpath according to tarinfo.
-        """
-        mtime = tarinfo.mtime
-        if mtime is None:
-            return
-        if not hasattr(os, 'utime'):
-            return
-        try:
-            os.utime(targetpath, (mtime, mtime))
-        except OSError as e:
-            raise ExtractError("could not change modification time") from e
-
-    #--------------------------------------------------------------------------
-    def next(self):
-        """Return the next member of the archive as a TarInfo object, when
-           TarFile is opened for reading. Return None if there is no more
-           available.
-        """
-        self._check("ra")
-        if self.firstmember is not None:
-            m = self.firstmember
-            self.firstmember = None
-            return m
-
-        # Advance the file pointer.
-        if self.offset != self.fileobj.tell():
-            if self.offset == 0:
-                return None
-            self.fileobj.seek(self.offset - 1)
-            if not self.fileobj.read(1):
-                raise ReadError("unexpected end of data")
-
-        # Read the next block.
-        tarinfo = None
-        while True:
-            try:
-                tarinfo = self.tarinfo.fromtarfile(self)
-            except EOFHeaderError as e:
-                if self.ignore_zeros:
-                    self._dbg(2, "0x%X: %s" % (self.offset, e))
-                    self.offset += BLOCKSIZE
-                    continue
-            except InvalidHeaderError as e:
-                if self.ignore_zeros:
-                    self._dbg(2, "0x%X: %s" % (self.offset, e))
-                    self.offset += BLOCKSIZE
-                    continue
-                elif self.offset == 0:
-                    raise ReadError(str(e)) from None
-            except EmptyHeaderError:
-                if self.offset == 0:
-                    raise ReadError("empty file") from None
-            except TruncatedHeaderError as e:
-                if self.offset == 0:
-                    raise ReadError(str(e)) from None
-            except SubsequentHeaderError as e:
-                raise ReadError(str(e)) from None
-            except Exception as e:
-                try:
-                    import zlib
-                    if isinstance(e, zlib.error):
-                        raise ReadError(f'zlib error: {e}') from None
-                    else:
-                        raise e
-                except ImportError:
-                    raise e
-            break
-
-        if tarinfo is not None:
-            self.members.append(tarinfo)
-        else:
-            self._loaded = True
-
-        return tarinfo
-
-    #--------------------------------------------------------------------------
-    # Little helper methods:
-
-    def _getmember(self, name, tarinfo=None, normalize=False):
-        """Find an archive member by name from bottom to top.
-           If tarinfo is given, it is used as the starting point.
-        """
-        # Ensure that all members have been loaded.
-        members = self.getmembers()
-
-        # Limit the member search list up to tarinfo.
-        skipping = False
-        if tarinfo is not None:
-            try:
-                index = members.index(tarinfo)
-            except ValueError:
-                # The given starting point might be a (modified) copy.
-                # We'll later skip members until we find an equivalent.
-                skipping = True
-            else:
-                # Happy fast path
-                members = members[:index]
-
-        if normalize:
-            name = os.path.normpath(name)
-
-        for member in reversed(members):
-            if skipping:
-                if tarinfo.offset == member.offset:
-                    skipping = False
-                continue
-            if normalize:
-                member_name = os.path.normpath(member.name)
-            else:
-                member_name = member.name
-
-            if name == member_name:
-                return member
-
-        if skipping:
-            # Starting point was not found
-            raise ValueError(tarinfo)
-
-    def _load(self):
-        """Read through the entire archive file and look for readable
-           members.
-        """
-        while self.next() is not None:
-            pass
-        self._loaded = True
-
-    def _check(self, mode=None):
-        """Check if TarFile is still open, and if the operation's mode
-           corresponds to TarFile's mode.
-        """
-        if self.closed:
-            raise OSError("%s is closed" % self.__class__.__name__)
-        if mode is not None and self.mode not in mode:
-            raise OSError("bad operation for mode %r" % self.mode)
-
-    def _find_link_target(self, tarinfo):
-        """Find the target member of a symlink or hardlink member in the
-           archive.
-        """
-        if tarinfo.issym():
-            # Always search the entire archive.
-            linkname = "/".join(filter(None, (os.path.dirname(tarinfo.name), tarinfo.linkname)))
-            limit = None
-        else:
-            # Search the archive before the link, because a hard link is
-            # just a reference to an already archived file.
-            linkname = tarinfo.linkname
-            limit = tarinfo
-
-        member = self._getmember(linkname, tarinfo=limit, normalize=True)
-        if member is None:
-            raise KeyError("linkname %r not found" % linkname)
-        return member
-
-    def __iter__(self):
-        """Provide an iterator object.
-        """
-        if self._loaded:
-            yield from self.members
-            return
-
-        # Yield items using TarFile's next() method.
-        # When all members have been read, set TarFile as _loaded.
-        index = 0
-        # Fix for SF #1100429: Under rare circumstances it can
-        # happen that getmembers() is called during iteration,
-        # which will have already exhausted the next() method.
-        if self.firstmember is not None:
-            tarinfo = self.next()
-            index += 1
-            yield tarinfo
-
-        while True:
-            if index < len(self.members):
-                tarinfo = self.members[index]
-            elif not self._loaded:
-                tarinfo = self.next()
-                if not tarinfo:
-                    self._loaded = True
-                    return
-            else:
-                return
-            index += 1
-            yield tarinfo
-
-    def _dbg(self, level, msg):
-        """Write debugging output to sys.stderr.
-        """
-        if level <= self.debug:
-            print(msg, file=sys.stderr)
-
-    def __enter__(self):
-        self._check()
-        return self
-
-    def __exit__(self, type, value, traceback):
-        if type is None:
-            self.close()
-        else:
-            # An exception occurred. We must not call close() because
-            # it would try to write end-of-archive blocks and padding.
-            if not self._extfileobj:
-                self.fileobj.close()
-            self.closed = True
-
-#--------------------
-# exported functions
-#--------------------
-
-def is_tarfile(name):
-    """Return True if name points to a tar archive that we
-       are able to handle, else return False.
-
-       'name' should be a string, file, or file-like object.
-    """
-    try:
-        if hasattr(name, "read"):
-            pos = name.tell()
-            t = open(fileobj=name)
-            name.seek(pos)
-        else:
-            t = open(name)
-        t.close()
-        return True
-    except TarError:
-        return False
-
-open = TarFile.open
-
-
-def main():
-    import argparse
-
-    description = 'A simple command-line interface for tarfile module.'
-    parser = argparse.ArgumentParser(description=description)
-    parser.add_argument('-v', '--verbose', action='store_true', default=False,
-                        help='Verbose output')
-    parser.add_argument('--filter', metavar='',
-                        choices=_NAMED_FILTERS,
-                        help='Filter for extraction')
-
-    group = parser.add_mutually_exclusive_group(required=True)
-    group.add_argument('-l', '--list', metavar='',
-                       help='Show listing of a tarfile')
-    group.add_argument('-e', '--extract', nargs='+',
-                       metavar=('', ''),
-                       help='Extract tarfile into target dir')
-    group.add_argument('-c', '--create', nargs='+',
-                       metavar=('', ''),
-                       help='Create tarfile from sources')
-    group.add_argument('-t', '--test', metavar='',
-                       help='Test if a tarfile is valid')
-
-    args = parser.parse_args()
-
-    if args.filter and args.extract is None:
-        parser.exit(1, '--filter is only valid for extraction\n')
-
-    if args.test is not None:
-        src = args.test
-        if is_tarfile(src):
-            with open(src, 'r') as tar:
-                tar.getmembers()
-                print(tar.getmembers(), file=sys.stderr)
-            if args.verbose:
-                print('{!r} is a tar archive.'.format(src))
-        else:
-            parser.exit(1, '{!r} is not a tar archive.\n'.format(src))
-
-    elif args.list is not None:
-        src = args.list
-        if is_tarfile(src):
-            with TarFile.open(src, 'r:*') as tf:
-                tf.list(verbose=args.verbose)
-        else:
-            parser.exit(1, '{!r} is not a tar archive.\n'.format(src))
-
-    elif args.extract is not None:
-        if len(args.extract) == 1:
-            src = args.extract[0]
-            curdir = os.curdir
-        elif len(args.extract) == 2:
-            src, curdir = args.extract
-        else:
-            parser.exit(1, parser.format_help())
-
-        if is_tarfile(src):
-            with TarFile.open(src, 'r:*') as tf:
-                tf.extractall(path=curdir, filter=args.filter)
-            if args.verbose:
-                if curdir == '.':
-                    msg = '{!r} file is extracted.'.format(src)
-                else:
-                    msg = ('{!r} file is extracted '
-                           'into {!r} directory.').format(src, curdir)
-                print(msg)
-        else:
-            parser.exit(1, '{!r} is not a tar archive.\n'.format(src))
-
-    elif args.create is not None:
-        tar_name = args.create.pop(0)
-        _, ext = os.path.splitext(tar_name)
-        compressions = {
-            # gz
-            '.gz': 'gz',
-            '.tgz': 'gz',
-            # xz
-            '.xz': 'xz',
-            '.txz': 'xz',
-            # bz2
-            '.bz2': 'bz2',
-            '.tbz': 'bz2',
-            '.tbz2': 'bz2',
-            '.tb2': 'bz2',
-        }
-        tar_mode = 'w:' + compressions[ext] if ext in compressions else 'w'
-        tar_files = args.create
-
-        with TarFile.open(tar_name, tar_mode) as tf:
-            for file_name in tar_files:
-                tf.add(file_name)
-
-        if args.verbose:
-            print('{!r} file created.'.format(tar_name))
-
-if __name__ == '__main__':
-    main()
diff --git a/setuptools/_vendor/importlib_metadata/_compat.py b/setuptools/_vendor/importlib_metadata/_compat.py
index 84f9eea4f3..3d78566ea3 100644
--- a/setuptools/_vendor/importlib_metadata/_compat.py
+++ b/setuptools/_vendor/importlib_metadata/_compat.py
@@ -9,7 +9,7 @@
     from typing import Protocol
 except ImportError:  # pragma: no cover
     # Python 3.7 compatibility
-    from ..typing_extensions import Protocol  # type: ignore
+    from typing_extensions import Protocol  # type: ignore
 
 
 def install(cls):
diff --git a/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/INSTALLER b/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/INSTALLER
deleted file mode 100644
index a1b589e38a..0000000000
--- a/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/INSTALLER
+++ /dev/null
@@ -1 +0,0 @@
-pip
diff --git a/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/LICENSE b/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/LICENSE
deleted file mode 100644
index 1bb5a44356..0000000000
--- a/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/LICENSE
+++ /dev/null
@@ -1,17 +0,0 @@
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to
-deal in the Software without restriction, including without limitation the
-rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
-sell copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
-IN THE SOFTWARE.
diff --git a/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/METADATA b/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/METADATA
deleted file mode 100644
index 581b308378..0000000000
--- a/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/METADATA
+++ /dev/null
@@ -1,69 +0,0 @@
-Metadata-Version: 2.1
-Name: jaraco.functools
-Version: 4.0.0
-Summary: Functools like those found in stdlib
-Home-page: https://github.com/jaraco/jaraco.functools
-Author: Jason R. Coombs
-Author-email: jaraco@jaraco.com
-Classifier: Development Status :: 5 - Production/Stable
-Classifier: Intended Audience :: Developers
-Classifier: License :: OSI Approved :: MIT License
-Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3 :: Only
-Requires-Python: >=3.8
-License-File: LICENSE
-Requires-Dist: more-itertools
-Provides-Extra: docs
-Requires-Dist: sphinx >=3.5 ; extra == 'docs'
-Requires-Dist: sphinx <7.2.5 ; extra == 'docs'
-Requires-Dist: jaraco.packaging >=9.3 ; extra == 'docs'
-Requires-Dist: rst.linker >=1.9 ; extra == 'docs'
-Requires-Dist: furo ; extra == 'docs'
-Requires-Dist: sphinx-lint ; extra == 'docs'
-Requires-Dist: jaraco.tidelift >=1.4 ; extra == 'docs'
-Provides-Extra: testing
-Requires-Dist: pytest >=6 ; extra == 'testing'
-Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'testing'
-Requires-Dist: pytest-cov ; extra == 'testing'
-Requires-Dist: pytest-enabler >=2.2 ; extra == 'testing'
-Requires-Dist: pytest-ruff ; extra == 'testing'
-Requires-Dist: jaraco.classes ; extra == 'testing'
-Requires-Dist: pytest-black >=0.3.7 ; (platform_python_implementation != "PyPy") and extra == 'testing'
-Requires-Dist: pytest-mypy >=0.9.1 ; (platform_python_implementation != "PyPy") and extra == 'testing'
-
-.. image:: https://img.shields.io/pypi/v/jaraco.functools.svg
-   :target: https://pypi.org/project/jaraco.functools
-
-.. image:: https://img.shields.io/pypi/pyversions/jaraco.functools.svg
-
-.. image:: https://github.com/jaraco/jaraco.functools/workflows/tests/badge.svg
-   :target: https://github.com/jaraco/jaraco.functools/actions?query=workflow%3A%22tests%22
-   :alt: tests
-
-.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
-    :target: https://github.com/astral-sh/ruff
-    :alt: Ruff
-
-.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
-   :target: https://github.com/psf/black
-   :alt: Code style: Black
-
-.. image:: https://readthedocs.org/projects/jaracofunctools/badge/?version=latest
-   :target: https://jaracofunctools.readthedocs.io/en/latest/?badge=latest
-
-.. image:: https://img.shields.io/badge/skeleton-2023-informational
-   :target: https://blog.jaraco.com/skeleton
-
-.. image:: https://tidelift.com/badges/package/pypi/jaraco.functools
-   :target: https://tidelift.com/subscription/pkg/pypi-jaraco.functools?utm_source=pypi-jaraco.functools&utm_medium=readme
-
-Additional functools in the spirit of stdlib's functools.
-
-For Enterprise
-==============
-
-Available as part of the Tidelift Subscription.
-
-This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use.
-
-`Learn more `_.
diff --git a/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/RECORD b/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/RECORD
deleted file mode 100644
index 783aa7d2b9..0000000000
--- a/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/RECORD
+++ /dev/null
@@ -1,10 +0,0 @@
-jaraco.functools-4.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-jaraco.functools-4.0.0.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
-jaraco.functools-4.0.0.dist-info/METADATA,sha256=nVOe_vWvaN2iWJ2aBVkhKvmvH-gFksNCXHwCNvcj65I,3078
-jaraco.functools-4.0.0.dist-info/RECORD,,
-jaraco.functools-4.0.0.dist-info/WHEEL,sha256=Xo9-1PvkuimrydujYJAjF7pCkriuXBpUPEjma1nZyJ0,92
-jaraco.functools-4.0.0.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
-jaraco/functools/__init__.py,sha256=hEAJaS2uSZRuF_JY4CxCHIYh79ZpxaPp9OiHyr9EJ1w,16642
-jaraco/functools/__init__.pyi,sha256=N4lLbdhMtrmwiK3UuMGhYsiOLLZx69CUNOdmFPSVh6Q,3982
-jaraco/functools/__pycache__/__init__.cpython-312.pyc,,
-jaraco/functools/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
diff --git a/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/WHEEL b/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/WHEEL
deleted file mode 100644
index ba48cbcf92..0000000000
--- a/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/WHEEL
+++ /dev/null
@@ -1,5 +0,0 @@
-Wheel-Version: 1.0
-Generator: bdist_wheel (0.41.3)
-Root-Is-Purelib: true
-Tag: py3-none-any
-
diff --git a/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/top_level.txt b/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/top_level.txt
deleted file mode 100644
index f6205a5f19..0000000000
--- a/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/top_level.txt
+++ /dev/null
@@ -1 +0,0 @@
-jaraco
diff --git a/setuptools/_vendor/jaraco/functools/__init__.pyi b/setuptools/_vendor/jaraco/functools/__init__.pyi
index c2b9ab1757..19191bf93e 100644
--- a/setuptools/_vendor/jaraco/functools/__init__.pyi
+++ b/setuptools/_vendor/jaraco/functools/__init__.pyi
@@ -74,9 +74,6 @@ def result_invoke(
 def invoke(
     f: Callable[_P, _R], /, *args: _P.args, **kwargs: _P.kwargs
 ) -> Callable[_P, _R]: ...
-def call_aside(
-    f: Callable[_P, _R], *args: _P.args, **kwargs: _P.kwargs
-) -> Callable[_P, _R]: ...
 
 class Throttler(Generic[_R]):
     last_called: float
diff --git a/setuptools/_vendor/vendored.txt b/setuptools/_vendor/vendored.txt
index 7255f98aee..c981dde807 100644
--- a/setuptools/_vendor/vendored.txt
+++ b/setuptools/_vendor/vendored.txt
@@ -9,3 +9,4 @@ zipp==3.7.0
 tomli==2.0.1
 # required for jaraco.context on older Pythons
 backports.tarfile
+wheel==0.43.0
diff --git a/setuptools/_vendor/wheel-0.43.0.dist-info/LICENSE.txt b/setuptools/_vendor/wheel-0.43.0.dist-info/LICENSE.txt
new file mode 100644
index 0000000000..a31470f14c
--- /dev/null
+++ b/setuptools/_vendor/wheel-0.43.0.dist-info/LICENSE.txt
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2012 Daniel Holth  and contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the "Software"),
+to deal in the Software without restriction, including without limitation
+the rights to use, copy, modify, merge, publish, distribute, sublicense,
+and/or sell copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included
+in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
diff --git a/setuptools/_vendor/wheel-0.43.0.dist-info/METADATA b/setuptools/_vendor/wheel-0.43.0.dist-info/METADATA
new file mode 100644
index 0000000000..e3722c00b9
--- /dev/null
+++ b/setuptools/_vendor/wheel-0.43.0.dist-info/METADATA
@@ -0,0 +1,61 @@
+Metadata-Version: 2.1
+Name: wheel
+Version: 0.43.0
+Summary: A built-package format for Python
+Keywords: wheel,packaging
+Author-email: Daniel Holth 
+Maintainer-email: Alex Grönholm 
+Requires-Python: >=3.8
+Description-Content-Type: text/x-rst
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Topic :: System :: Archiving :: Packaging
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Requires-Dist: pytest >= 6.0.0 ; extra == "test"
+Requires-Dist: setuptools >= 65 ; extra == "test"
+Project-URL: Changelog, https://wheel.readthedocs.io/en/stable/news.html
+Project-URL: Documentation, https://wheel.readthedocs.io/
+Project-URL: Issue Tracker, https://github.com/pypa/wheel/issues
+Project-URL: Source, https://github.com/pypa/wheel
+Provides-Extra: test
+
+wheel
+=====
+
+This library is the reference implementation of the Python wheel packaging
+standard, as defined in `PEP 427`_.
+
+It has two different roles:
+
+#. A setuptools_ extension for building wheels that provides the
+   ``bdist_wheel`` setuptools command
+#. A command line tool for working with wheel files
+
+It should be noted that wheel is **not** intended to be used as a library, and
+as such there is no stable, public API.
+
+.. _PEP 427: https://www.python.org/dev/peps/pep-0427/
+.. _setuptools: https://pypi.org/project/setuptools/
+
+Documentation
+-------------
+
+The documentation_ can be found on Read The Docs.
+
+.. _documentation: https://wheel.readthedocs.io/
+
+Code of Conduct
+---------------
+
+Everyone interacting in the wheel project's codebases, issue trackers, chat
+rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_.
+
+.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md
+
diff --git a/setuptools/_vendor/wheel-0.43.0.dist-info/RECORD b/setuptools/_vendor/wheel-0.43.0.dist-info/RECORD
new file mode 100644
index 0000000000..786fe55190
--- /dev/null
+++ b/setuptools/_vendor/wheel-0.43.0.dist-info/RECORD
@@ -0,0 +1,63 @@
+../../bin/wheel,sha256=Y73OywJ5gxOkyLS7G4Z9CS6Pb63oCt-LMViLs-ygeGE,245
+wheel-0.43.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+wheel-0.43.0.dist-info/LICENSE.txt,sha256=MMI2GGeRCPPo6h0qZYx8pBe9_IkcmO8aifpP8MmChlQ,1107
+wheel-0.43.0.dist-info/METADATA,sha256=WbrCKwClnT5WCKVrjPjvxDgxo2tyeS7kOJyc1GaceEE,2153
+wheel-0.43.0.dist-info/RECORD,,
+wheel-0.43.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+wheel-0.43.0.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
+wheel-0.43.0.dist-info/entry_points.txt,sha256=rTY1BbkPHhkGMm4Q3F0pIzJBzW2kMxoG1oriffvGdA0,104
+wheel/__init__.py,sha256=D6jhH00eMzbgrXGAeOwVfD5i-lCAMMycuG1L0useDlo,59
+wheel/__main__.py,sha256=NkMUnuTCGcOkgY0IBLgBCVC_BGGcWORx2K8jYGS12UE,455
+wheel/__pycache__/__init__.cpython-312.pyc,,
+wheel/__pycache__/__main__.cpython-312.pyc,,
+wheel/__pycache__/_setuptools_logging.cpython-312.pyc,,
+wheel/__pycache__/bdist_wheel.cpython-312.pyc,,
+wheel/__pycache__/macosx_libfile.cpython-312.pyc,,
+wheel/__pycache__/metadata.cpython-312.pyc,,
+wheel/__pycache__/util.cpython-312.pyc,,
+wheel/__pycache__/wheelfile.cpython-312.pyc,,
+wheel/_setuptools_logging.py,sha256=NoCnjJ4DFEZ45Eo-2BdXLsWJCwGkait1tp_17paleVw,746
+wheel/bdist_wheel.py,sha256=OKJyp9E831zJrxoRfmM9AgOjByG1CB-pzF5kXQFmaKk,20938
+wheel/cli/__init__.py,sha256=eBNhnPwWTtdKAJHy77lvz7gOQ5Eu3GavGugXxhSsn-U,4264
+wheel/cli/__pycache__/__init__.cpython-312.pyc,,
+wheel/cli/__pycache__/convert.cpython-312.pyc,,
+wheel/cli/__pycache__/pack.cpython-312.pyc,,
+wheel/cli/__pycache__/tags.cpython-312.pyc,,
+wheel/cli/__pycache__/unpack.cpython-312.pyc,,
+wheel/cli/convert.py,sha256=qJcpYGKqdfw1P6BelgN1Hn_suNgM6bvyEWFlZeuSWx0,9439
+wheel/cli/pack.py,sha256=CAFcHdBVulvsHYJlndKVO7KMI9JqBTZz5ii0PKxxCOs,3103
+wheel/cli/tags.py,sha256=lHw-LaWrkS5Jy_qWcw-6pSjeNM6yAjDnqKI3E5JTTCU,4760
+wheel/cli/unpack.py,sha256=Y_J7ynxPSoFFTT7H0fMgbBlVErwyDGcObgme5MBuz58,1021
+wheel/macosx_libfile.py,sha256=HnW6OPdN993psStvwl49xtx2kw7hoVbe6nvwmf8WsKI,16103
+wheel/metadata.py,sha256=q-xCCqSAK7HzyZxK9A6_HAWmhqS1oB4BFw1-rHQxBiQ,5884
+wheel/util.py,sha256=e0jpnsbbM9QhaaMSyap-_ZgUxcxwpyLDk6RHcrduPLg,621
+wheel/vendored/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+wheel/vendored/__pycache__/__init__.cpython-312.pyc,,
+wheel/vendored/packaging/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+wheel/vendored/packaging/__pycache__/__init__.cpython-312.pyc,,
+wheel/vendored/packaging/__pycache__/_elffile.cpython-312.pyc,,
+wheel/vendored/packaging/__pycache__/_manylinux.cpython-312.pyc,,
+wheel/vendored/packaging/__pycache__/_musllinux.cpython-312.pyc,,
+wheel/vendored/packaging/__pycache__/_parser.cpython-312.pyc,,
+wheel/vendored/packaging/__pycache__/_structures.cpython-312.pyc,,
+wheel/vendored/packaging/__pycache__/_tokenizer.cpython-312.pyc,,
+wheel/vendored/packaging/__pycache__/markers.cpython-312.pyc,,
+wheel/vendored/packaging/__pycache__/requirements.cpython-312.pyc,,
+wheel/vendored/packaging/__pycache__/specifiers.cpython-312.pyc,,
+wheel/vendored/packaging/__pycache__/tags.cpython-312.pyc,,
+wheel/vendored/packaging/__pycache__/utils.cpython-312.pyc,,
+wheel/vendored/packaging/__pycache__/version.cpython-312.pyc,,
+wheel/vendored/packaging/_elffile.py,sha256=hbmK8OD6Z7fY6hwinHEUcD1by7czkGiNYu7ShnFEk2k,3266
+wheel/vendored/packaging/_manylinux.py,sha256=P7sdR5_7XBY09LVYYPhHmydMJIIwPXWsh4olk74Uuj4,9588
+wheel/vendored/packaging/_musllinux.py,sha256=z1s8To2hQ0vpn_d-O2i5qxGwEK8WmGlLt3d_26V7NeY,2674
+wheel/vendored/packaging/_parser.py,sha256=4tT4emSl2qTaU7VTQE1Xa9o1jMPCsBezsYBxyNMUN-s,10347
+wheel/vendored/packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431
+wheel/vendored/packaging/_tokenizer.py,sha256=alCtbwXhOFAmFGZ6BQ-wCTSFoRAJ2z-ysIf7__MTJ_k,5292
+wheel/vendored/packaging/markers.py,sha256=_TSPI1BhJYO7Bp9AzTmHQxIqHEVXaTjmDh9G-w8qzPA,8232
+wheel/vendored/packaging/requirements.py,sha256=dgoBeVprPu2YE6Q8nGfwOPTjATHbRa_ZGLyXhFEln6Q,2933
+wheel/vendored/packaging/specifiers.py,sha256=IWSt0SrLSP72heWhAC8UL0eGvas7XIQHjqiViVfmPKE,39778
+wheel/vendored/packaging/tags.py,sha256=fedHXiOHkBxNZTXotXv8uXPmMFU9ae-TKBujgYHigcA,18950
+wheel/vendored/packaging/utils.py,sha256=XgdmP3yx9-wQEFjO7OvMj9RjEf5JlR5HFFR69v7SQ9E,5268
+wheel/vendored/packaging/version.py,sha256=PFJaYZDxBgyxkfYhH3SQw4qfE9ICCWrTmitvq14y3bs,16234
+wheel/vendored/vendor.txt,sha256=Z2ENjB1i5prfez8CdM1Sdr3c6Zxv2rRRolMpLmBncAE,16
+wheel/wheelfile.py,sha256=DtJDWoZMvnBh4leNMDPGOprQU9d_dp6q-MmV0U--4xc,7694
diff --git a/setuptools/_vendor/wheel-0.43.0.dist-info/entry_points.txt b/setuptools/_vendor/wheel-0.43.0.dist-info/entry_points.txt
new file mode 100644
index 0000000000..06c9f69deb
--- /dev/null
+++ b/setuptools/_vendor/wheel-0.43.0.dist-info/entry_points.txt
@@ -0,0 +1,6 @@
+[console_scripts]
+wheel=wheel.cli:main
+
+[distutils.commands]
+bdist_wheel=wheel.bdist_wheel:bdist_wheel
+
diff --git a/setuptools/_vendor/wheel/__init__.py b/setuptools/_vendor/wheel/__init__.py
new file mode 100644
index 0000000000..a773bbbcd7
--- /dev/null
+++ b/setuptools/_vendor/wheel/__init__.py
@@ -0,0 +1,3 @@
+from __future__ import annotations
+
+__version__ = "0.43.0"
diff --git a/setuptools/_vendor/wheel/macosx_libfile.py b/setuptools/_vendor/wheel/macosx_libfile.py
new file mode 100644
index 0000000000..8953c3f805
--- /dev/null
+++ b/setuptools/_vendor/wheel/macosx_libfile.py
@@ -0,0 +1,469 @@
+"""
+This module contains function to analyse dynamic library
+headers to extract system information
+
+Currently only for MacOSX
+
+Library file on macosx system starts with Mach-O or Fat field.
+This can be distinguish by first 32 bites and it is called magic number.
+Proper value of magic number is with suffix _MAGIC. Suffix _CIGAM means
+reversed bytes order.
+Both fields can occur in two types: 32 and 64 bytes.
+
+FAT field inform that this library contains few version of library
+(typically for different types version). It contains
+information where Mach-O headers starts.
+
+Each section started with Mach-O header contains one library
+(So if file starts with this field it contains only one version).
+
+After filed Mach-O there are section fields.
+Each of them starts with two fields:
+cmd - magic number for this command
+cmdsize - total size occupied by this section information.
+
+In this case only sections LC_VERSION_MIN_MACOSX (for macosx 10.13 and earlier)
+and LC_BUILD_VERSION (for macosx 10.14 and newer) are interesting,
+because them contains information about minimal system version.
+
+Important remarks:
+- For fat files this implementation looks for maximum number version.
+  It not check if it is 32 or 64 and do not compare it with currently built package.
+  So it is possible to false report higher version that needed.
+- All structures signatures are taken form macosx header files.
+- I think that binary format will be more stable than `otool` output.
+  and if apple introduce some changes both implementation will need to be updated.
+- The system compile will set the deployment target no lower than
+  11.0 for arm64 builds. For "Universal 2" builds use the x86_64 deployment
+  target when the arm64 target is 11.0.
+"""
+
+from __future__ import annotations
+
+import ctypes
+import os
+import sys
+
+"""here the needed const and struct from mach-o header files"""
+
+FAT_MAGIC = 0xCAFEBABE
+FAT_CIGAM = 0xBEBAFECA
+FAT_MAGIC_64 = 0xCAFEBABF
+FAT_CIGAM_64 = 0xBFBAFECA
+MH_MAGIC = 0xFEEDFACE
+MH_CIGAM = 0xCEFAEDFE
+MH_MAGIC_64 = 0xFEEDFACF
+MH_CIGAM_64 = 0xCFFAEDFE
+
+LC_VERSION_MIN_MACOSX = 0x24
+LC_BUILD_VERSION = 0x32
+
+CPU_TYPE_ARM64 = 0x0100000C
+
+mach_header_fields = [
+    ("magic", ctypes.c_uint32),
+    ("cputype", ctypes.c_int),
+    ("cpusubtype", ctypes.c_int),
+    ("filetype", ctypes.c_uint32),
+    ("ncmds", ctypes.c_uint32),
+    ("sizeofcmds", ctypes.c_uint32),
+    ("flags", ctypes.c_uint32),
+]
+"""
+struct mach_header {
+    uint32_t	magic;		/* mach magic number identifier */
+    cpu_type_t	cputype;	/* cpu specifier */
+    cpu_subtype_t	cpusubtype;	/* machine specifier */
+    uint32_t	filetype;	/* type of file */
+    uint32_t	ncmds;		/* number of load commands */
+    uint32_t	sizeofcmds;	/* the size of all the load commands */
+    uint32_t	flags;		/* flags */
+};
+typedef integer_t cpu_type_t;
+typedef integer_t cpu_subtype_t;
+"""
+
+mach_header_fields_64 = mach_header_fields + [("reserved", ctypes.c_uint32)]
+"""
+struct mach_header_64 {
+    uint32_t	magic;		/* mach magic number identifier */
+    cpu_type_t	cputype;	/* cpu specifier */
+    cpu_subtype_t	cpusubtype;	/* machine specifier */
+    uint32_t	filetype;	/* type of file */
+    uint32_t	ncmds;		/* number of load commands */
+    uint32_t	sizeofcmds;	/* the size of all the load commands */
+    uint32_t	flags;		/* flags */
+    uint32_t	reserved;	/* reserved */
+};
+"""
+
+fat_header_fields = [("magic", ctypes.c_uint32), ("nfat_arch", ctypes.c_uint32)]
+"""
+struct fat_header {
+    uint32_t	magic;		/* FAT_MAGIC or FAT_MAGIC_64 */
+    uint32_t	nfat_arch;	/* number of structs that follow */
+};
+"""
+
+fat_arch_fields = [
+    ("cputype", ctypes.c_int),
+    ("cpusubtype", ctypes.c_int),
+    ("offset", ctypes.c_uint32),
+    ("size", ctypes.c_uint32),
+    ("align", ctypes.c_uint32),
+]
+"""
+struct fat_arch {
+    cpu_type_t	cputype;	/* cpu specifier (int) */
+    cpu_subtype_t	cpusubtype;	/* machine specifier (int) */
+    uint32_t	offset;		/* file offset to this object file */
+    uint32_t	size;		/* size of this object file */
+    uint32_t	align;		/* alignment as a power of 2 */
+};
+"""
+
+fat_arch_64_fields = [
+    ("cputype", ctypes.c_int),
+    ("cpusubtype", ctypes.c_int),
+    ("offset", ctypes.c_uint64),
+    ("size", ctypes.c_uint64),
+    ("align", ctypes.c_uint32),
+    ("reserved", ctypes.c_uint32),
+]
+"""
+struct fat_arch_64 {
+    cpu_type_t	cputype;	/* cpu specifier (int) */
+    cpu_subtype_t	cpusubtype;	/* machine specifier (int) */
+    uint64_t	offset;		/* file offset to this object file */
+    uint64_t	size;		/* size of this object file */
+    uint32_t	align;		/* alignment as a power of 2 */
+    uint32_t	reserved;	/* reserved */
+};
+"""
+
+segment_base_fields = [("cmd", ctypes.c_uint32), ("cmdsize", ctypes.c_uint32)]
+"""base for reading segment info"""
+
+segment_command_fields = [
+    ("cmd", ctypes.c_uint32),
+    ("cmdsize", ctypes.c_uint32),
+    ("segname", ctypes.c_char * 16),
+    ("vmaddr", ctypes.c_uint32),
+    ("vmsize", ctypes.c_uint32),
+    ("fileoff", ctypes.c_uint32),
+    ("filesize", ctypes.c_uint32),
+    ("maxprot", ctypes.c_int),
+    ("initprot", ctypes.c_int),
+    ("nsects", ctypes.c_uint32),
+    ("flags", ctypes.c_uint32),
+]
+"""
+struct segment_command { /* for 32-bit architectures */
+    uint32_t	cmd;		/* LC_SEGMENT */
+    uint32_t	cmdsize;	/* includes sizeof section structs */
+    char		segname[16];	/* segment name */
+    uint32_t	vmaddr;		/* memory address of this segment */
+    uint32_t	vmsize;		/* memory size of this segment */
+    uint32_t	fileoff;	/* file offset of this segment */
+    uint32_t	filesize;	/* amount to map from the file */
+    vm_prot_t	maxprot;	/* maximum VM protection */
+    vm_prot_t	initprot;	/* initial VM protection */
+    uint32_t	nsects;		/* number of sections in segment */
+    uint32_t	flags;		/* flags */
+};
+typedef int vm_prot_t;
+"""
+
+segment_command_fields_64 = [
+    ("cmd", ctypes.c_uint32),
+    ("cmdsize", ctypes.c_uint32),
+    ("segname", ctypes.c_char * 16),
+    ("vmaddr", ctypes.c_uint64),
+    ("vmsize", ctypes.c_uint64),
+    ("fileoff", ctypes.c_uint64),
+    ("filesize", ctypes.c_uint64),
+    ("maxprot", ctypes.c_int),
+    ("initprot", ctypes.c_int),
+    ("nsects", ctypes.c_uint32),
+    ("flags", ctypes.c_uint32),
+]
+"""
+struct segment_command_64 { /* for 64-bit architectures */
+    uint32_t	cmd;		/* LC_SEGMENT_64 */
+    uint32_t	cmdsize;	/* includes sizeof section_64 structs */
+    char		segname[16];	/* segment name */
+    uint64_t	vmaddr;		/* memory address of this segment */
+    uint64_t	vmsize;		/* memory size of this segment */
+    uint64_t	fileoff;	/* file offset of this segment */
+    uint64_t	filesize;	/* amount to map from the file */
+    vm_prot_t	maxprot;	/* maximum VM protection */
+    vm_prot_t	initprot;	/* initial VM protection */
+    uint32_t	nsects;		/* number of sections in segment */
+    uint32_t	flags;		/* flags */
+};
+"""
+
+version_min_command_fields = segment_base_fields + [
+    ("version", ctypes.c_uint32),
+    ("sdk", ctypes.c_uint32),
+]
+"""
+struct version_min_command {
+    uint32_t	cmd;		/* LC_VERSION_MIN_MACOSX or
+                               LC_VERSION_MIN_IPHONEOS or
+                               LC_VERSION_MIN_WATCHOS or
+                               LC_VERSION_MIN_TVOS */
+    uint32_t	cmdsize;	/* sizeof(struct min_version_command) */
+    uint32_t	version;	/* X.Y.Z is encoded in nibbles xxxx.yy.zz */
+    uint32_t	sdk;		/* X.Y.Z is encoded in nibbles xxxx.yy.zz */
+};
+"""
+
+build_version_command_fields = segment_base_fields + [
+    ("platform", ctypes.c_uint32),
+    ("minos", ctypes.c_uint32),
+    ("sdk", ctypes.c_uint32),
+    ("ntools", ctypes.c_uint32),
+]
+"""
+struct build_version_command {
+    uint32_t	cmd;		/* LC_BUILD_VERSION */
+    uint32_t	cmdsize;	/* sizeof(struct build_version_command) plus */
+                                /* ntools * sizeof(struct build_tool_version) */
+    uint32_t	platform;	/* platform */
+    uint32_t	minos;		/* X.Y.Z is encoded in nibbles xxxx.yy.zz */
+    uint32_t	sdk;		/* X.Y.Z is encoded in nibbles xxxx.yy.zz */
+    uint32_t	ntools;		/* number of tool entries following this */
+};
+"""
+
+
+def swap32(x):
+    return (
+        ((x << 24) & 0xFF000000)
+        | ((x << 8) & 0x00FF0000)
+        | ((x >> 8) & 0x0000FF00)
+        | ((x >> 24) & 0x000000FF)
+    )
+
+
+def get_base_class_and_magic_number(lib_file, seek=None):
+    if seek is None:
+        seek = lib_file.tell()
+    else:
+        lib_file.seek(seek)
+    magic_number = ctypes.c_uint32.from_buffer_copy(
+        lib_file.read(ctypes.sizeof(ctypes.c_uint32))
+    ).value
+
+    # Handle wrong byte order
+    if magic_number in [FAT_CIGAM, FAT_CIGAM_64, MH_CIGAM, MH_CIGAM_64]:
+        if sys.byteorder == "little":
+            BaseClass = ctypes.BigEndianStructure
+        else:
+            BaseClass = ctypes.LittleEndianStructure
+
+        magic_number = swap32(magic_number)
+    else:
+        BaseClass = ctypes.Structure
+
+    lib_file.seek(seek)
+    return BaseClass, magic_number
+
+
+def read_data(struct_class, lib_file):
+    return struct_class.from_buffer_copy(lib_file.read(ctypes.sizeof(struct_class)))
+
+
+def extract_macosx_min_system_version(path_to_lib):
+    with open(path_to_lib, "rb") as lib_file:
+        BaseClass, magic_number = get_base_class_and_magic_number(lib_file, 0)
+        if magic_number not in [FAT_MAGIC, FAT_MAGIC_64, MH_MAGIC, MH_MAGIC_64]:
+            return
+
+        if magic_number in [FAT_MAGIC, FAT_CIGAM_64]:
+
+            class FatHeader(BaseClass):
+                _fields_ = fat_header_fields
+
+            fat_header = read_data(FatHeader, lib_file)
+            if magic_number == FAT_MAGIC:
+
+                class FatArch(BaseClass):
+                    _fields_ = fat_arch_fields
+
+            else:
+
+                class FatArch(BaseClass):
+                    _fields_ = fat_arch_64_fields
+
+            fat_arch_list = [
+                read_data(FatArch, lib_file) for _ in range(fat_header.nfat_arch)
+            ]
+
+            versions_list = []
+            for el in fat_arch_list:
+                try:
+                    version = read_mach_header(lib_file, el.offset)
+                    if version is not None:
+                        if el.cputype == CPU_TYPE_ARM64 and len(fat_arch_list) != 1:
+                            # Xcode will not set the deployment target below 11.0.0
+                            # for the arm64 architecture. Ignore the arm64 deployment
+                            # in fat binaries when the target is 11.0.0, that way
+                            # the other architectures can select a lower deployment
+                            # target.
+                            # This is safe because there is no arm64 variant for
+                            # macOS 10.15 or earlier.
+                            if version == (11, 0, 0):
+                                continue
+                        versions_list.append(version)
+                except ValueError:
+                    pass
+
+            if len(versions_list) > 0:
+                return max(versions_list)
+            else:
+                return None
+
+        else:
+            try:
+                return read_mach_header(lib_file, 0)
+            except ValueError:
+                """when some error during read library files"""
+                return None
+
+
+def read_mach_header(lib_file, seek=None):
+    """
+    This function parses a Mach-O header and extracts
+    information about the minimal macOS version.
+
+    :param lib_file: reference to opened library file with pointer
+    """
+    base_class, magic_number = get_base_class_and_magic_number(lib_file, seek)
+    arch = "32" if magic_number == MH_MAGIC else "64"
+
+    class SegmentBase(base_class):
+        _fields_ = segment_base_fields
+
+    if arch == "32":
+
+        class MachHeader(base_class):
+            _fields_ = mach_header_fields
+
+    else:
+
+        class MachHeader(base_class):
+            _fields_ = mach_header_fields_64
+
+    mach_header = read_data(MachHeader, lib_file)
+    for _i in range(mach_header.ncmds):
+        pos = lib_file.tell()
+        segment_base = read_data(SegmentBase, lib_file)
+        lib_file.seek(pos)
+        if segment_base.cmd == LC_VERSION_MIN_MACOSX:
+
+            class VersionMinCommand(base_class):
+                _fields_ = version_min_command_fields
+
+            version_info = read_data(VersionMinCommand, lib_file)
+            return parse_version(version_info.version)
+        elif segment_base.cmd == LC_BUILD_VERSION:
+
+            class VersionBuild(base_class):
+                _fields_ = build_version_command_fields
+
+            version_info = read_data(VersionBuild, lib_file)
+            return parse_version(version_info.minos)
+        else:
+            lib_file.seek(pos + segment_base.cmdsize)
+            continue
+
+
+def parse_version(version):
+    x = (version & 0xFFFF0000) >> 16
+    y = (version & 0x0000FF00) >> 8
+    z = version & 0x000000FF
+    return x, y, z
+
+
+def calculate_macosx_platform_tag(archive_root, platform_tag):
+    """
+    Calculate proper macosx platform tag basing on files which are included to wheel
+
+    Example platform tag `macosx-10.14-x86_64`
+    """
+    prefix, base_version, suffix = platform_tag.split("-")
+    base_version = tuple(int(x) for x in base_version.split("."))
+    base_version = base_version[:2]
+    if base_version[0] > 10:
+        base_version = (base_version[0], 0)
+    assert len(base_version) == 2
+    if "MACOSX_DEPLOYMENT_TARGET" in os.environ:
+        deploy_target = tuple(
+            int(x) for x in os.environ["MACOSX_DEPLOYMENT_TARGET"].split(".")
+        )
+        deploy_target = deploy_target[:2]
+        if deploy_target[0] > 10:
+            deploy_target = (deploy_target[0], 0)
+        if deploy_target < base_version:
+            sys.stderr.write(
+                "[WARNING] MACOSX_DEPLOYMENT_TARGET is set to a lower value ({}) than "
+                "the version on which the Python interpreter was compiled ({}), and "
+                "will be ignored.\n".format(
+                    ".".join(str(x) for x in deploy_target),
+                    ".".join(str(x) for x in base_version),
+                )
+            )
+        else:
+            base_version = deploy_target
+
+    assert len(base_version) == 2
+    start_version = base_version
+    versions_dict = {}
+    for dirpath, _dirnames, filenames in os.walk(archive_root):
+        for filename in filenames:
+            if filename.endswith(".dylib") or filename.endswith(".so"):
+                lib_path = os.path.join(dirpath, filename)
+                min_ver = extract_macosx_min_system_version(lib_path)
+                if min_ver is not None:
+                    min_ver = min_ver[0:2]
+                    if min_ver[0] > 10:
+                        min_ver = (min_ver[0], 0)
+                    versions_dict[lib_path] = min_ver
+
+    if len(versions_dict) > 0:
+        base_version = max(base_version, max(versions_dict.values()))
+
+    # macosx platform tag do not support minor bugfix release
+    fin_base_version = "_".join([str(x) for x in base_version])
+    if start_version < base_version:
+        problematic_files = [k for k, v in versions_dict.items() if v > start_version]
+        problematic_files = "\n".join(problematic_files)
+        if len(problematic_files) == 1:
+            files_form = "this file"
+        else:
+            files_form = "these files"
+        error_message = (
+            "[WARNING] This wheel needs a higher macOS version than {}  "
+            "To silence this warning, set MACOSX_DEPLOYMENT_TARGET to at least "
+            + fin_base_version
+            + " or recreate "
+            + files_form
+            + " with lower "
+            "MACOSX_DEPLOYMENT_TARGET:  \n" + problematic_files
+        )
+
+        if "MACOSX_DEPLOYMENT_TARGET" in os.environ:
+            error_message = error_message.format(
+                "is set in MACOSX_DEPLOYMENT_TARGET variable."
+            )
+        else:
+            error_message = error_message.format(
+                "the version your Python interpreter is compiled against."
+            )
+
+        sys.stderr.write(error_message)
+
+    platform_tag = prefix + "_" + fin_base_version + "_" + suffix
+    return platform_tag
diff --git a/setuptools/_vendor/wheel/metadata.py b/setuptools/_vendor/wheel/metadata.py
new file mode 100644
index 0000000000..341f614ceb
--- /dev/null
+++ b/setuptools/_vendor/wheel/metadata.py
@@ -0,0 +1,180 @@
+"""
+Tools for converting old- to new-style metadata.
+"""
+
+from __future__ import annotations
+
+import functools
+import itertools
+import os.path
+import re
+import textwrap
+from email.message import Message
+from email.parser import Parser
+from typing import Iterator
+
+from ..packaging.requirements import Requirement
+
+
+def _nonblank(str):
+    return str and not str.startswith("#")
+
+
+@functools.singledispatch
+def yield_lines(iterable):
+    r"""
+    Yield valid lines of a string or iterable.
+    >>> list(yield_lines(''))
+    []
+    >>> list(yield_lines(['foo', 'bar']))
+    ['foo', 'bar']
+    >>> list(yield_lines('foo\nbar'))
+    ['foo', 'bar']
+    >>> list(yield_lines('\nfoo\n#bar\nbaz #comment'))
+    ['foo', 'baz #comment']
+    >>> list(yield_lines(['foo\nbar', 'baz', 'bing\n\n\n']))
+    ['foo', 'bar', 'baz', 'bing']
+    """
+    return itertools.chain.from_iterable(map(yield_lines, iterable))
+
+
+@yield_lines.register(str)
+def _(text):
+    return filter(_nonblank, map(str.strip, text.splitlines()))
+
+
+def split_sections(s):
+    """Split a string or iterable thereof into (section, content) pairs
+    Each ``section`` is a stripped version of the section header ("[section]")
+    and each ``content`` is a list of stripped lines excluding blank lines and
+    comment-only lines.  If there are any such lines before the first section
+    header, they're returned in a first ``section`` of ``None``.
+    """
+    section = None
+    content = []
+    for line in yield_lines(s):
+        if line.startswith("["):
+            if line.endswith("]"):
+                if section or content:
+                    yield section, content
+                section = line[1:-1].strip()
+                content = []
+            else:
+                raise ValueError("Invalid section heading", line)
+        else:
+            content.append(line)
+
+    # wrap up last segment
+    yield section, content
+
+
+def safe_extra(extra):
+    """Convert an arbitrary string to a standard 'extra' name
+    Any runs of non-alphanumeric characters are replaced with a single '_',
+    and the result is always lowercased.
+    """
+    return re.sub("[^A-Za-z0-9.-]+", "_", extra).lower()
+
+
+def safe_name(name):
+    """Convert an arbitrary string to a standard distribution name
+    Any runs of non-alphanumeric/. characters are replaced with a single '-'.
+    """
+    return re.sub("[^A-Za-z0-9.]+", "-", name)
+
+
+def requires_to_requires_dist(requirement: Requirement) -> str:
+    """Return the version specifier for a requirement in PEP 345/566 fashion."""
+    if getattr(requirement, "url", None):
+        return " @ " + requirement.url
+
+    requires_dist = []
+    for spec in requirement.specifier:
+        requires_dist.append(spec.operator + spec.version)
+
+    if requires_dist:
+        return " " + ",".join(sorted(requires_dist))
+    else:
+        return ""
+
+
+def convert_requirements(requirements: list[str]) -> Iterator[str]:
+    """Yield Requires-Dist: strings for parsed requirements strings."""
+    for req in requirements:
+        parsed_requirement = Requirement(req)
+        spec = requires_to_requires_dist(parsed_requirement)
+        extras = ",".join(sorted(safe_extra(e) for e in parsed_requirement.extras))
+        if extras:
+            extras = f"[{extras}]"
+
+        yield safe_name(parsed_requirement.name) + extras + spec
+
+
+def generate_requirements(
+    extras_require: dict[str, list[str]],
+) -> Iterator[tuple[str, str]]:
+    """
+    Convert requirements from a setup()-style dictionary to
+    ('Requires-Dist', 'requirement') and ('Provides-Extra', 'extra') tuples.
+
+    extras_require is a dictionary of {extra: [requirements]} as passed to setup(),
+    using the empty extra {'': [requirements]} to hold install_requires.
+    """
+    for extra, depends in extras_require.items():
+        condition = ""
+        extra = extra or ""
+        if ":" in extra:  # setuptools extra:condition syntax
+            extra, condition = extra.split(":", 1)
+
+        extra = safe_extra(extra)
+        if extra:
+            yield "Provides-Extra", extra
+            if condition:
+                condition = "(" + condition + ") and "
+            condition += "extra == '%s'" % extra
+
+        if condition:
+            condition = " ; " + condition
+
+        for new_req in convert_requirements(depends):
+            yield "Requires-Dist", new_req + condition
+
+
+def pkginfo_to_metadata(egg_info_path: str, pkginfo_path: str) -> Message:
+    """
+    Convert .egg-info directory with PKG-INFO to the Metadata 2.1 format
+    """
+    with open(pkginfo_path, encoding="utf-8") as headers:
+        pkg_info = Parser().parse(headers)
+
+    pkg_info.replace_header("Metadata-Version", "2.1")
+    # Those will be regenerated from `requires.txt`.
+    del pkg_info["Provides-Extra"]
+    del pkg_info["Requires-Dist"]
+    requires_path = os.path.join(egg_info_path, "requires.txt")
+    if os.path.exists(requires_path):
+        with open(requires_path, encoding="utf-8") as requires_file:
+            requires = requires_file.read()
+
+        parsed_requirements = sorted(split_sections(requires), key=lambda x: x[0] or "")
+        for extra, reqs in parsed_requirements:
+            for key, value in generate_requirements({extra: reqs}):
+                if (key, value) not in pkg_info.items():
+                    pkg_info[key] = value
+
+    description = pkg_info["Description"]
+    if description:
+        description_lines = pkg_info["Description"].splitlines()
+        dedented_description = "\n".join(
+            # if the first line of long_description is blank,
+            # the first line here will be indented.
+            (
+                description_lines[0].lstrip(),
+                textwrap.dedent("\n".join(description_lines[1:])),
+                "\n",
+            )
+        )
+        pkg_info.set_payload(dedented_description)
+        del pkg_info["Description"]
+
+    return pkg_info
diff --git a/setuptools/_vendor/wheel/util.py b/setuptools/_vendor/wheel/util.py
new file mode 100644
index 0000000000..d98d98cb52
--- /dev/null
+++ b/setuptools/_vendor/wheel/util.py
@@ -0,0 +1,26 @@
+from __future__ import annotations
+
+import base64
+import logging
+
+log = logging.getLogger("wheel")
+
+# ensure Python logging is configured
+try:
+    __import__("setuptools.logging")
+except ImportError:
+    # setuptools < ??
+    from . import _setuptools_logging
+
+    _setuptools_logging.configure()
+
+
+def urlsafe_b64encode(data: bytes) -> bytes:
+    """urlsafe_b64encode without padding"""
+    return base64.urlsafe_b64encode(data).rstrip(b"=")
+
+
+def urlsafe_b64decode(data: bytes) -> bytes:
+    """urlsafe_b64decode without padding"""
+    pad = b"=" * (4 - (len(data) & 3))
+    return base64.urlsafe_b64decode(data + pad)
diff --git a/setuptools/_vendor/wheel/wheelfile.py b/setuptools/_vendor/wheel/wheelfile.py
new file mode 100644
index 0000000000..83a31772bd
--- /dev/null
+++ b/setuptools/_vendor/wheel/wheelfile.py
@@ -0,0 +1,199 @@
+from __future__ import annotations
+
+import csv
+import hashlib
+import os.path
+import re
+import stat
+import time
+from io import StringIO, TextIOWrapper
+from zipfile import ZIP_DEFLATED, ZipFile, ZipInfo
+
+from .util import log, urlsafe_b64decode, urlsafe_b64encode
+
+# Non-greedy matching of an optional build number may be too clever (more
+# invalid wheel filenames will match). Separate regex for .dist-info?
+WHEEL_INFO_RE = re.compile(
+    r"""^(?P(?P[^\s-]+?)-(?P[^\s-]+?))(-(?P\d[^\s-]*))?
+     -(?P[^\s-]+?)-(?P[^\s-]+?)-(?P\S+)\.whl$""",
+    re.VERBOSE,
+)
+MINIMUM_TIMESTAMP = 315532800  # 1980-01-01 00:00:00 UTC
+
+
+def get_zipinfo_datetime(timestamp=None):
+    # Some applications need reproducible .whl files, but they can't do this without
+    # forcing the timestamp of the individual ZipInfo objects. See issue #143.
+    timestamp = int(os.environ.get("SOURCE_DATE_EPOCH", timestamp or time.time()))
+    timestamp = max(timestamp, MINIMUM_TIMESTAMP)
+    return time.gmtime(timestamp)[0:6]
+
+
+class WheelFile(ZipFile):
+    """A ZipFile derivative class that also reads SHA-256 hashes from
+    .dist-info/RECORD and checks any read files against those.
+    """
+
+    _default_algorithm = hashlib.sha256
+
+    def __init__(self, file, mode="r", compression=ZIP_DEFLATED):
+        basename = os.path.basename(file)
+        self.parsed_filename = WHEEL_INFO_RE.match(basename)
+        if not basename.endswith(".whl") or self.parsed_filename is None:
+            raise WheelError(f"Bad wheel filename {basename!r}")
+
+        ZipFile.__init__(self, file, mode, compression=compression, allowZip64=True)
+
+        self.dist_info_path = "{}.dist-info".format(
+            self.parsed_filename.group("namever")
+        )
+        self.record_path = self.dist_info_path + "/RECORD"
+        self._file_hashes = {}
+        self._file_sizes = {}
+        if mode == "r":
+            # Ignore RECORD and any embedded wheel signatures
+            self._file_hashes[self.record_path] = None, None
+            self._file_hashes[self.record_path + ".jws"] = None, None
+            self._file_hashes[self.record_path + ".p7s"] = None, None
+
+            # Fill in the expected hashes by reading them from RECORD
+            try:
+                record = self.open(self.record_path)
+            except KeyError:
+                raise WheelError(f"Missing {self.record_path} file") from None
+
+            with record:
+                for line in csv.reader(
+                    TextIOWrapper(record, newline="", encoding="utf-8")
+                ):
+                    path, hash_sum, size = line
+                    if not hash_sum:
+                        continue
+
+                    algorithm, hash_sum = hash_sum.split("=")
+                    try:
+                        hashlib.new(algorithm)
+                    except ValueError:
+                        raise WheelError(
+                            f"Unsupported hash algorithm: {algorithm}"
+                        ) from None
+
+                    if algorithm.lower() in {"md5", "sha1"}:
+                        raise WheelError(
+                            f"Weak hash algorithm ({algorithm}) is not permitted by "
+                            f"PEP 427"
+                        )
+
+                    self._file_hashes[path] = (
+                        algorithm,
+                        urlsafe_b64decode(hash_sum.encode("ascii")),
+                    )
+
+    def open(self, name_or_info, mode="r", pwd=None):
+        def _update_crc(newdata):
+            eof = ef._eof
+            update_crc_orig(newdata)
+            running_hash.update(newdata)
+            if eof and running_hash.digest() != expected_hash:
+                raise WheelError(f"Hash mismatch for file '{ef_name}'")
+
+        ef_name = (
+            name_or_info.filename if isinstance(name_or_info, ZipInfo) else name_or_info
+        )
+        if (
+            mode == "r"
+            and not ef_name.endswith("/")
+            and ef_name not in self._file_hashes
+        ):
+            raise WheelError(f"No hash found for file '{ef_name}'")
+
+        ef = ZipFile.open(self, name_or_info, mode, pwd)
+        if mode == "r" and not ef_name.endswith("/"):
+            algorithm, expected_hash = self._file_hashes[ef_name]
+            if expected_hash is not None:
+                # Monkey patch the _update_crc method to also check for the hash from
+                # RECORD
+                running_hash = hashlib.new(algorithm)
+                update_crc_orig, ef._update_crc = ef._update_crc, _update_crc
+
+        return ef
+
+    def write_files(self, base_dir):
+        log.info(f"creating '{self.filename}' and adding '{base_dir}' to it")
+        deferred = []
+        for root, dirnames, filenames in os.walk(base_dir):
+            # Sort the directory names so that `os.walk` will walk them in a
+            # defined order on the next iteration.
+            dirnames.sort()
+            for name in sorted(filenames):
+                path = os.path.normpath(os.path.join(root, name))
+                if os.path.isfile(path):
+                    arcname = os.path.relpath(path, base_dir).replace(os.path.sep, "/")
+                    if arcname == self.record_path:
+                        pass
+                    elif root.endswith(".dist-info"):
+                        deferred.append((path, arcname))
+                    else:
+                        self.write(path, arcname)
+
+        deferred.sort()
+        for path, arcname in deferred:
+            self.write(path, arcname)
+
+    def write(self, filename, arcname=None, compress_type=None):
+        with open(filename, "rb") as f:
+            st = os.fstat(f.fileno())
+            data = f.read()
+
+        zinfo = ZipInfo(
+            arcname or filename, date_time=get_zipinfo_datetime(st.st_mtime)
+        )
+        zinfo.external_attr = (stat.S_IMODE(st.st_mode) | stat.S_IFMT(st.st_mode)) << 16
+        zinfo.compress_type = compress_type or self.compression
+        self.writestr(zinfo, data, compress_type)
+
+    def writestr(self, zinfo_or_arcname, data, compress_type=None):
+        if isinstance(zinfo_or_arcname, str):
+            zinfo_or_arcname = ZipInfo(
+                zinfo_or_arcname, date_time=get_zipinfo_datetime()
+            )
+            zinfo_or_arcname.compress_type = self.compression
+            zinfo_or_arcname.external_attr = (0o664 | stat.S_IFREG) << 16
+
+        if isinstance(data, str):
+            data = data.encode("utf-8")
+
+        ZipFile.writestr(self, zinfo_or_arcname, data, compress_type)
+        fname = (
+            zinfo_or_arcname.filename
+            if isinstance(zinfo_or_arcname, ZipInfo)
+            else zinfo_or_arcname
+        )
+        log.info(f"adding '{fname}'")
+        if fname != self.record_path:
+            hash_ = self._default_algorithm(data)
+            self._file_hashes[fname] = (
+                hash_.name,
+                urlsafe_b64encode(hash_.digest()).decode("ascii"),
+            )
+            self._file_sizes[fname] = len(data)
+
+    def close(self):
+        # Write RECORD
+        if self.fp is not None and self.mode == "w" and self._file_hashes:
+            data = StringIO()
+            writer = csv.writer(data, delimiter=",", quotechar='"', lineterminator="\n")
+            writer.writerows(
+                (
+                    (fname, algorithm + "=" + hash_, self._file_sizes[fname])
+                    for fname, (algorithm, hash_) in self._file_hashes.items()
+                )
+            )
+            writer.writerow((format(self.record_path), "", ""))
+            self.writestr(self.record_path, data.getvalue())
+
+        ZipFile.close(self)
+
+
+class WheelError(Exception):
+    pass
diff --git a/setuptools/build_meta.py b/setuptools/build_meta.py
index be2742d73d..5799c06ed6 100644
--- a/setuptools/build_meta.py
+++ b/setuptools/build_meta.py
@@ -322,7 +322,7 @@ def run_setup(self, setup_script='setup.py'):
             )
 
     def get_requires_for_build_wheel(self, config_settings=None):
-        return self._get_build_requires(config_settings, requirements=['wheel'])
+        return self._get_build_requires(config_settings, requirements=[])
 
     def get_requires_for_build_sdist(self, config_settings=None):
         return self._get_build_requires(config_settings, requirements=[])
diff --git a/setuptools/command/bdist_wheel.py b/setuptools/command/bdist_wheel.py
new file mode 100644
index 0000000000..1d0a77207e
--- /dev/null
+++ b/setuptools/command/bdist_wheel.py
@@ -0,0 +1,602 @@
+"""
+Create a wheel (.whl) distribution.
+
+A wheel is a built archive format.
+"""
+
+from __future__ import annotations
+
+import os
+import re
+import shutil
+import stat
+import struct
+import sys
+import sysconfig
+import warnings
+from email.generator import BytesGenerator, Generator
+from email.policy import EmailPolicy
+from distutils import log
+from glob import iglob
+from shutil import rmtree
+from typing import TYPE_CHECKING, Callable, Iterable, Literal, Sequence, cast
+from zipfile import ZIP_DEFLATED, ZIP_STORED
+
+import setuptools
+from setuptools import Command
+
+from .. import __version__
+from .._vendor.wheel.metadata import pkginfo_to_metadata
+from .._vendor.packaging import tags
+from .._vendor.packaging import version as _packaging_version
+from .._vendor.wheel.wheelfile import WheelFile
+
+if TYPE_CHECKING:
+    import types
+
+
+def safe_name(name: str) -> str:
+    """Convert an arbitrary string to a standard distribution name
+    Any runs of non-alphanumeric/. characters are replaced with a single '-'.
+    """
+    return re.sub("[^A-Za-z0-9.]+", "-", name)
+
+
+def safe_version(version: str) -> str:
+    """
+    Convert an arbitrary string to a standard version string
+    """
+    try:
+        # normalize the version
+        return str(_packaging_version.Version(version))
+    except _packaging_version.InvalidVersion:
+        version = version.replace(" ", ".")
+        return re.sub("[^A-Za-z0-9.]+", "-", version)
+
+
+setuptools_major_version = int(setuptools.__version__.split(".")[0])
+
+PY_LIMITED_API_PATTERN = r"cp3\d"
+
+
+def _is_32bit_interpreter() -> bool:
+    return struct.calcsize("P") == 4
+
+
+def python_tag() -> str:
+    return f"py{sys.version_info[0]}"
+
+
+def get_platform(archive_root: str | None) -> str:
+    """Return our platform name 'win32', 'linux_x86_64'"""
+    result = sysconfig.get_platform()
+    if result.startswith("macosx") and archive_root is not None:
+        from .._vendor.wheel.macosx_libfile import calculate_macosx_platform_tag
+
+        result = calculate_macosx_platform_tag(archive_root, result)
+    elif _is_32bit_interpreter():
+        if result == "linux-x86_64":
+            # pip pull request #3497
+            result = "linux-i686"
+        elif result == "linux-aarch64":
+            # packaging pull request #234
+            # TODO armv8l, packaging pull request #690 => this did not land
+            # in pip/packaging yet
+            result = "linux-armv7l"
+
+    return result.replace("-", "_")
+
+
+def get_flag(
+    var: str, fallback: bool, expected: bool = True, warn: bool = True
+) -> bool:
+    """Use a fallback value for determining SOABI flags if the needed config
+    var is unset or unavailable."""
+    val = sysconfig.get_config_var(var)
+    if val is None:
+        if warn:
+            warnings.warn(
+                f"Config variable '{var}' is unset, Python ABI tag may be incorrect",
+                RuntimeWarning,
+                stacklevel=2,
+            )
+        return fallback
+    return val == expected
+
+
+def get_abi_tag() -> str | None:
+    """Return the ABI tag based on SOABI (if available) or emulate SOABI (PyPy2)."""
+    soabi: str = sysconfig.get_config_var("SOABI")
+    impl = tags.interpreter_name()
+    if not soabi and impl in ("cp", "pp") and hasattr(sys, "maxunicode"):
+        d = ""
+        m = ""
+        u = ""
+        if get_flag("Py_DEBUG", hasattr(sys, "gettotalrefcount"), warn=(impl == "cp")):
+            d = "d"
+
+        if get_flag(
+            "WITH_PYMALLOC",
+            impl == "cp",
+            warn=(impl == "cp" and sys.version_info < (3, 8)),
+        ) and sys.version_info < (3, 8):
+            m = "m"
+
+        abi = f"{impl}{tags.interpreter_version()}{d}{m}{u}"
+    elif soabi and impl == "cp" and soabi.startswith("cpython"):
+        # non-Windows
+        abi = "cp" + soabi.split("-")[1]
+    elif soabi and impl == "cp" and soabi.startswith("cp"):
+        # Windows
+        abi = soabi.split("-")[0]
+    elif soabi and impl == "pp":
+        # we want something like pypy36-pp73
+        abi = "-".join(soabi.split("-")[:2])
+        abi = abi.replace(".", "_").replace("-", "_")
+    elif soabi and impl == "graalpy":
+        abi = "-".join(soabi.split("-")[:3])
+        abi = abi.replace(".", "_").replace("-", "_")
+    elif soabi:
+        abi = soabi.replace(".", "_").replace("-", "_")
+    else:
+        abi = None
+
+    return abi
+
+
+def safer_name(name: str) -> str:
+    return safe_name(name).replace("-", "_")
+
+
+def safer_version(version: str) -> str:
+    return safe_version(version).replace("-", "_")
+
+
+def remove_readonly(
+    func: Callable[..., object],
+    path: str,
+    excinfo: tuple[type[Exception], Exception, types.TracebackType],
+) -> None:
+    remove_readonly_exc(func, path, excinfo[1])
+
+
+def remove_readonly_exc(func: Callable[..., object], path: str, exc: Exception) -> None:
+    os.chmod(path, stat.S_IWRITE)
+    func(path)
+
+
+class bdist_wheel(Command):
+    description = "create a wheel distribution"
+
+    supported_compressions = {
+        "stored": ZIP_STORED,
+        "deflated": ZIP_DEFLATED,
+    }
+
+    user_options = [
+        ("bdist-dir=", "b", "temporary directory for creating the distribution"),
+        (
+            "plat-name=",
+            "p",
+            "platform name to embed in generated filenames "
+            f"(default: {get_platform(None)})",
+        ),
+        (
+            "keep-temp",
+            "k",
+            "keep the pseudo-installation tree around after "
+            "creating the distribution archive",
+        ),
+        ("dist-dir=", "d", "directory to put final built distributions in"),
+        ("skip-build", None, "skip rebuilding everything (for testing/debugging)"),
+        (
+            "relative",
+            None,
+            "build the archive using relative paths (default: false)",
+        ),
+        (
+            "owner=",
+            "u",
+            "Owner name used when creating a tar file [default: current user]",
+        ),
+        (
+            "group=",
+            "g",
+            "Group name used when creating a tar file [default: current group]",
+        ),
+        ("universal", None, "make a universal wheel (default: false)"),
+        (
+            "compression=",
+            None,
+            "zipfile compression (one of: {}) (default: 'deflated')".format(
+                ", ".join(supported_compressions)
+            ),
+        ),
+        (
+            "python-tag=",
+            None,
+            f"Python implementation compatibility tag (default: '{python_tag()}')",
+        ),
+        (
+            "build-number=",
+            None,
+            "Build number for this particular version. "
+            "As specified in PEP-0427, this must start with a digit. "
+            "[default: None]",
+        ),
+        (
+            "py-limited-api=",
+            None,
+            "Python tag (cp32|cp33|cpNN) for abi3 wheel tag (default: false)",
+        ),
+    ]
+
+    boolean_options = ["keep-temp", "skip-build", "relative", "universal"]
+
+    def initialize_options(self):
+        self.bdist_dir: str = None
+        self.data_dir = None
+        self.plat_name: str | None = None
+        self.plat_tag = None
+        self.format = "zip"
+        self.keep_temp = False
+        self.dist_dir: str | None = None
+        self.egginfo_dir = None
+        self.root_is_pure: bool | None = None
+        self.skip_build = None
+        self.relative = False
+        self.owner = None
+        self.group = None
+        self.universal: bool = False
+        self.compression: str | int = "deflated"
+        self.python_tag: str = python_tag()
+        self.build_number: str | None = None
+        self.py_limited_api: str | Literal[False] = False
+        self.plat_name_supplied = False
+
+    def finalize_options(self):
+        if self.bdist_dir is None:
+            bdist_base = self.get_finalized_command("bdist").bdist_base
+            self.bdist_dir = os.path.join(bdist_base, "wheel")
+
+        egg_info = self.distribution.get_command_obj("egg_info")
+        egg_info.ensure_finalized()  # needed for correct `wheel_dist_name`
+
+        self.data_dir = self.wheel_dist_name + ".data"
+        self.plat_name_supplied = self.plat_name is not None
+
+        try:
+            self.compression = self.supported_compressions[self.compression]
+        except KeyError:
+            raise ValueError(f"Unsupported compression: {self.compression}") from None
+
+        need_options = ("dist_dir", "plat_name", "skip_build")
+
+        self.set_undefined_options("bdist", *zip(need_options, need_options))
+
+        self.root_is_pure = not (
+            self.distribution.has_ext_modules() or self.distribution.has_c_libraries()
+        )
+
+        if self.py_limited_api and not re.match(
+            PY_LIMITED_API_PATTERN, self.py_limited_api
+        ):
+            raise ValueError(f"py-limited-api must match '{PY_LIMITED_API_PATTERN}'")
+
+        # Support legacy [wheel] section for setting universal
+        wheel = self.distribution.get_option_dict("wheel")
+        if "universal" in wheel:
+            # please don't define this in your global configs
+            log.warning(
+                "The [wheel] section is deprecated. Use [bdist_wheel] instead.",
+            )
+            val = wheel["universal"][1].strip()
+            if val.lower() in ("1", "true", "yes"):
+                self.universal = True
+
+        if self.build_number is not None and not self.build_number[:1].isdigit():
+            raise ValueError("Build tag (build-number) must start with a digit.")
+
+    @property
+    def wheel_dist_name(self):
+        """Return distribution full name with - replaced with _"""
+        components = (
+            safer_name(self.distribution.get_name()),
+            safer_version(self.distribution.get_version()),
+        )
+        if self.build_number:
+            components += (self.build_number,)
+        return "-".join(components)
+
+    def get_tag(self) -> tuple[str, str, str]:
+        # bdist sets self.plat_name if unset, we should only use it for purepy
+        # wheels if the user supplied it.
+        if self.plat_name_supplied:
+            plat_name = cast(str, self.plat_name)
+        elif self.root_is_pure:
+            plat_name = "any"
+        else:
+            # macosx contains system version in platform name so need special handle
+            if self.plat_name and not self.plat_name.startswith("macosx"):
+                plat_name = self.plat_name
+            else:
+                # on macosx always limit the platform name to comply with any
+                # c-extension modules in bdist_dir, since the user can specify
+                # a higher MACOSX_DEPLOYMENT_TARGET via tools like CMake
+
+                # on other platforms, and on macosx if there are no c-extension
+                # modules, use the default platform name.
+                plat_name = get_platform(self.bdist_dir)
+
+            if _is_32bit_interpreter():
+                if plat_name in ("linux-x86_64", "linux_x86_64"):
+                    plat_name = "linux_i686"
+                if plat_name in ("linux-aarch64", "linux_aarch64"):
+                    # TODO armv8l, packaging pull request #690 => this did not land
+                    # in pip/packaging yet
+                    plat_name = "linux_armv7l"
+
+        plat_name = (
+            plat_name.lower().replace("-", "_").replace(".", "_").replace(" ", "_")
+        )
+
+        if self.root_is_pure:
+            if self.universal:
+                impl = "py2.py3"
+            else:
+                impl = self.python_tag
+            tag = (impl, "none", plat_name)
+        else:
+            impl_name = tags.interpreter_name()
+            impl_ver = tags.interpreter_version()
+            impl = impl_name + impl_ver
+            # We don't work on CPython 3.1, 3.0.
+            if self.py_limited_api and (impl_name + impl_ver).startswith("cp3"):
+                impl = self.py_limited_api
+                abi_tag = "abi3"
+            else:
+                abi_tag = str(get_abi_tag()).lower()
+            tag = (impl, abi_tag, plat_name)
+            # issue gh-374: allow overriding plat_name
+            supported_tags = [
+                (t.interpreter, t.abi, plat_name) for t in tags.sys_tags()
+            ]
+            assert (
+                tag in supported_tags
+            ), f"would build wheel with unsupported tag {tag}"
+        return tag
+
+    def run(self):
+        build_scripts = self.reinitialize_command("build_scripts")
+        build_scripts.executable = "python"
+        build_scripts.force = True
+
+        build_ext = self.reinitialize_command("build_ext")
+        build_ext.inplace = False
+
+        if not self.skip_build:
+            self.run_command("build")
+
+        install = self.reinitialize_command("install", reinit_subcommands=True)
+        install.root = self.bdist_dir
+        install.compile = False
+        install.skip_build = self.skip_build
+        install.warn_dir = False
+
+        # A wheel without setuptools scripts is more cross-platform.
+        # Use the (undocumented) `no_ep` option to setuptools'
+        # install_scripts command to avoid creating entry point scripts.
+        install_scripts = self.reinitialize_command("install_scripts")
+        install_scripts.no_ep = True
+
+        # Use a custom scheme for the archive, because we have to decide
+        # at installation time which scheme to use.
+        for key in ("headers", "scripts", "data", "purelib", "platlib"):
+            setattr(install, "install_" + key, os.path.join(self.data_dir, key))
+
+        basedir_observed = ""
+
+        if os.name == "nt":
+            # win32 barfs if any of these are ''; could be '.'?
+            # (distutils.command.install:change_roots bug)
+            basedir_observed = os.path.normpath(os.path.join(self.data_dir, ".."))
+            self.install_libbase = self.install_lib = basedir_observed
+
+        setattr(
+            install,
+            "install_purelib" if self.root_is_pure else "install_platlib",
+            basedir_observed,
+        )
+
+        log.info(f"installing to {self.bdist_dir}")
+
+        self.run_command("install")
+
+        impl_tag, abi_tag, plat_tag = self.get_tag()
+        archive_basename = f"{self.wheel_dist_name}-{impl_tag}-{abi_tag}-{plat_tag}"
+        if not self.relative:
+            archive_root = self.bdist_dir
+        else:
+            archive_root = os.path.join(
+                self.bdist_dir, self._ensure_relative(install.install_base)
+            )
+
+        self.set_undefined_options("install_egg_info", ("target", "egginfo_dir"))
+        distinfo_dirname = (
+            f"{safer_name(self.distribution.get_name())}-"
+            f"{safer_version(self.distribution.get_version())}.dist-info"
+        )
+        distinfo_dir = os.path.join(self.bdist_dir, distinfo_dirname)
+        self.egg2dist(self.egginfo_dir, distinfo_dir)
+
+        self.write_wheelfile(distinfo_dir)
+
+        # Make the archive
+        if not os.path.exists(self.dist_dir):
+            os.makedirs(self.dist_dir)
+
+        wheel_path = os.path.join(self.dist_dir, archive_basename + ".whl")
+        with WheelFile(wheel_path, "w", self.compression) as wf:
+            wf.write_files(archive_root)
+
+        # Add to 'Distribution.dist_files' so that the "upload" command works
+        getattr(self.distribution, "dist_files", []).append((
+            "bdist_wheel",
+            "{}.{}".format(*sys.version_info[:2]),  # like 3.7
+            wheel_path,
+        ))
+
+        if not self.keep_temp:
+            log.info(f"removing {self.bdist_dir}")
+            if not self.dry_run:
+                if sys.version_info < (3, 12):
+                    rmtree(self.bdist_dir, onerror=remove_readonly)
+                else:
+                    rmtree(self.bdist_dir, onexc=remove_readonly_exc)
+
+    def write_wheelfile(
+        self, wheelfile_base: str, generator: str = f"setuptools ({__version__})"
+    ):
+        from email.message import Message
+
+        msg = Message()
+        msg["Wheel-Version"] = "1.0"  # of the spec
+        msg["Generator"] = generator
+        msg["Root-Is-Purelib"] = str(self.root_is_pure).lower()
+        if self.build_number is not None:
+            msg["Build"] = self.build_number
+
+        # Doesn't work for bdist_wininst
+        impl_tag, abi_tag, plat_tag = self.get_tag()
+        for impl in impl_tag.split("."):
+            for abi in abi_tag.split("."):
+                for plat in plat_tag.split("."):
+                    msg["Tag"] = "-".join((impl, abi, plat))
+
+        wheelfile_path = os.path.join(wheelfile_base, "WHEEL")
+        log.info(f"creating {wheelfile_path}")
+        with open(wheelfile_path, "wb") as f:
+            BytesGenerator(f, maxheaderlen=0).flatten(msg)
+
+    def _ensure_relative(self, path: str) -> str:
+        # copied from dir_util, deleted
+        drive, path = os.path.splitdrive(path)
+        if path[0:1] == os.sep:
+            path = drive + path[1:]
+        return path
+
+    @property
+    def license_paths(self) -> Iterable[str]:
+        if setuptools_major_version >= 57:
+            # Setuptools has resolved any patterns to actual file names
+            return self.distribution.metadata.license_files or ()
+
+        files: set[str] = set()
+        metadata = self.distribution.get_option_dict("metadata")
+        if setuptools_major_version >= 42:
+            # Setuptools recognizes the license_files option but does not do globbing
+            patterns = cast(Sequence[str], self.distribution.metadata.license_files)
+        else:
+            # Prior to those, wheel is entirely responsible for handling license files
+            if "license_files" in metadata:
+                patterns = metadata["license_files"][1].split()
+            else:
+                patterns = ()
+
+        if "license_file" in metadata:
+            warnings.warn(
+                'The "license_file" option is deprecated. Use "license_files" instead.',
+                DeprecationWarning,
+                stacklevel=2,
+            )
+            files.add(metadata["license_file"][1])
+
+        if not files and not patterns and not isinstance(patterns, list):
+            patterns = ("LICEN[CS]E*", "COPYING*", "NOTICE*", "AUTHORS*")
+
+        for pattern in patterns:
+            for path in iglob(pattern):
+                if path.endswith("~"):
+                    log.debug(
+                        f'ignoring license file "{path}" as it looks like a backup'
+                    )
+                    continue
+
+                if path not in files and os.path.isfile(path):
+                    log.info(
+                        f'adding license file "{path}" (matched pattern "{pattern}")'
+                    )
+                    files.add(path)
+
+        return files
+
+    def egg2dist(self, egginfo_path: str, distinfo_path: str):
+        """Convert an .egg-info directory into a .dist-info directory"""
+
+        def adios(p: str) -> None:
+            """Appropriately delete directory, file or link."""
+            if os.path.exists(p) and not os.path.islink(p) and os.path.isdir(p):
+                shutil.rmtree(p)
+            elif os.path.exists(p):
+                os.unlink(p)
+
+        adios(distinfo_path)
+
+        if not os.path.exists(egginfo_path):
+            # There is no egg-info. This is probably because the egg-info
+            # file/directory is not named matching the distribution name used
+            # to name the archive file. Check for this case and report
+            # accordingly.
+            import glob
+
+            pat = os.path.join(os.path.dirname(egginfo_path), "*.egg-info")
+            possible = glob.glob(pat)
+            err = f"Egg metadata expected at {egginfo_path} but not found"
+            if possible:
+                alt = os.path.basename(possible[0])
+                err += f" ({alt} found - possible misnamed archive file?)"
+
+            raise ValueError(err)
+
+        if os.path.isfile(egginfo_path):
+            # .egg-info is a single file
+            pkg_info = pkginfo_to_metadata(egginfo_path, egginfo_path)
+            os.mkdir(distinfo_path)
+        else:
+            # .egg-info is a directory
+            pkginfo_path = os.path.join(egginfo_path, "PKG-INFO")
+            pkg_info = pkginfo_to_metadata(egginfo_path, pkginfo_path)
+
+            # ignore common egg metadata that is useless to wheel
+            shutil.copytree(
+                egginfo_path,
+                distinfo_path,
+                ignore=lambda x, y: {
+                    "PKG-INFO",
+                    "requires.txt",
+                    "SOURCES.txt",
+                    "not-zip-safe",
+                },
+            )
+
+            # delete dependency_links if it is only whitespace
+            dependency_links_path = os.path.join(distinfo_path, "dependency_links.txt")
+            with open(dependency_links_path, encoding="utf-8") as dependency_links_file:
+                dependency_links = dependency_links_file.read().strip()
+            if not dependency_links:
+                adios(dependency_links_path)
+
+        pkg_info_path = os.path.join(distinfo_path, "METADATA")
+        serialization_policy = EmailPolicy(
+            utf8=True,
+            mangle_from_=False,
+            max_line_length=0,
+        )
+        with open(pkg_info_path, "w", encoding="utf-8") as out:
+            Generator(out, policy=serialization_policy).flatten(pkg_info)
+
+        for license_path in self.license_paths:
+            filename = os.path.basename(license_path)
+            shutil.copy(license_path, os.path.join(distinfo_path, filename))
+
+        adios(egginfo_path)
diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py
index b8ed84750a..dc812ba752 100644
--- a/setuptools/command/editable_wheel.py
+++ b/setuptools/command/editable_wheel.py
@@ -59,7 +59,7 @@
 from .install_scripts import install_scripts as install_scripts_cls
 
 if TYPE_CHECKING:
-    from wheel.wheelfile import WheelFile  # type:ignore[import-untyped] # noqa
+    from .._vendor.wheelfile import WheelFile  # type:ignore[import-untyped] # noqa
 
 _P = TypeVar("_P", bound=StrPath)
 _logger = logging.getLogger(__name__)
@@ -335,7 +335,7 @@ def _safely_run(self, cmd_name: str):
             )
 
     def _create_wheel_file(self, bdist_wheel):
-        from wheel.wheelfile import WheelFile
+        from .._vendor.wheel.wheelfile import WheelFile
 
         dist_info = self.get_finalized_command("dist_info")
         dist_name = dist_info.name
diff --git a/setuptools/dist.py b/setuptools/dist.py
index 03f6c0398b..9db70f2496 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -710,6 +710,12 @@ def get_command_class(self, command):
         if command in self.cmdclass:
             return self.cmdclass[command]
 
+        # Special case bdist_wheel so it's never loaded from "wheel"
+        if command == 'bdist_wheel':
+            from .command.bdist_wheel import bdist_wheel
+
+            return bdist_wheel
+
         eps = metadata.entry_points(group='distutils.commands', name=command)
         for ep in eps:
             self.cmdclass[command] = cmdclass = ep.load()
diff --git a/setuptools/extern/__init__.py b/setuptools/extern/__init__.py
index 8eb02ac6d3..6a3444cd35 100644
--- a/setuptools/extern/__init__.py
+++ b/setuptools/extern/__init__.py
@@ -78,6 +78,7 @@ def install(self):
 # ]]]
 names = (
     'backports',
+    'bin',
     'importlib_metadata',
     'importlib_resources',
     'jaraco',
@@ -85,6 +86,7 @@ def install(self):
     'ordered_set',
     'packaging',
     'tomli',
+    'wheel',
     'zipp',
 )
 # [[[end]]]
diff --git a/setuptools/tests/test_bdist_wheel.py b/setuptools/tests/test_bdist_wheel.py
new file mode 100644
index 0000000000..161fb88e23
--- /dev/null
+++ b/setuptools/tests/test_bdist_wheel.py
@@ -0,0 +1,460 @@
+from __future__ import annotations
+
+import builtins
+import importlib
+import os.path
+import platform
+import shutil
+import stat
+import struct
+import subprocess
+import sys
+import sysconfig
+from inspect import cleandoc
+from unittest.mock import Mock
+from zipfile import ZipFile
+
+import pytest
+import setuptools
+from setuptools.command.bdist_wheel import (
+    bdist_wheel,
+    get_abi_tag,
+    remove_readonly,
+    remove_readonly_exc,
+)
+from setuptools._vendor.packaging import tags
+from setuptools._vendor.wheel.wheelfile import WheelFile
+
+DEFAULT_FILES = {
+    "dummy_dist-1.0.dist-info/top_level.txt",
+    "dummy_dist-1.0.dist-info/METADATA",
+    "dummy_dist-1.0.dist-info/WHEEL",
+    "dummy_dist-1.0.dist-info/RECORD",
+}
+DEFAULT_LICENSE_FILES = {
+    "LICENSE",
+    "LICENSE.txt",
+    "LICENCE",
+    "LICENCE.txt",
+    "COPYING",
+    "COPYING.md",
+    "NOTICE",
+    "NOTICE.rst",
+    "AUTHORS",
+    "AUTHORS.txt",
+}
+OTHER_IGNORED_FILES = {
+    "LICENSE~",
+    "AUTHORS~",
+}
+SETUPPY_EXAMPLE = """\
+from setuptools import setup
+
+setup(
+    name='dummy_dist',
+    version='1.0',
+)
+"""
+
+
+@pytest.fixture
+def dummy_dist(tmp_path_factory):
+    basedir = tmp_path_factory.mktemp("dummy_dist")
+    basedir.joinpath("setup.py").write_text(SETUPPY_EXAMPLE, encoding="utf-8")
+    for fname in DEFAULT_LICENSE_FILES | OTHER_IGNORED_FILES:
+        basedir.joinpath(fname).write_text("", encoding="utf-8")
+
+    licensedir = basedir.joinpath("licenses")
+    licensedir.mkdir()
+    licensedir.joinpath("DUMMYFILE").write_text("", encoding="utf-8")
+    return basedir
+
+
+def test_no_scripts(wheel_paths):
+    """Make sure entry point scripts are not generated."""
+    path = next(path for path in wheel_paths if "complex_dist" in path)
+    for entry in ZipFile(path).infolist():
+        assert ".data/scripts/" not in entry.filename
+
+
+def test_unicode_record(wheel_paths):
+    path = next(path for path in wheel_paths if "unicode.dist" in path)
+    with ZipFile(path) as zf:
+        record = zf.read("unicode.dist-0.1.dist-info/RECORD")
+
+    assert "åäö_日本語.py".encode() in record
+
+
+UTF8_PKG_INFO = """\
+Metadata-Version: 2.1
+Name: helloworld
+Version: 42
+Author-email: "John X. Ãørçeč" , Γαμα קּ 東 
+
+
+UTF-8 描述 説明
+"""
+
+
+def test_preserve_unicode_metadata(monkeypatch, tmp_path):
+    monkeypatch.chdir(tmp_path)
+    egginfo = tmp_path / "dummy_dist.egg-info"
+    distinfo = tmp_path / "dummy_dist.dist-info"
+
+    egginfo.mkdir()
+    (egginfo / "PKG-INFO").write_text(UTF8_PKG_INFO, encoding="utf-8")
+    (egginfo / "dependency_links.txt").touch()
+
+    class simpler_bdist_wheel(bdist_wheel):
+        """Avoid messing with setuptools/distutils internals"""
+
+        def __init__(self):
+            pass
+
+        @property
+        def license_paths(self):
+            return []
+
+    cmd_obj = simpler_bdist_wheel()
+    cmd_obj.egg2dist(egginfo, distinfo)
+
+    metadata = (distinfo / "METADATA").read_text(encoding="utf-8")
+    assert 'Author-email: "John X. Ãørçeč"' in metadata
+    assert "Γαμα קּ 東 " in metadata
+    assert "UTF-8 描述 説明" in metadata
+
+
+def test_licenses_default(dummy_dist, monkeypatch, tmp_path):
+    monkeypatch.chdir(dummy_dist)
+    subprocess.check_call([
+        sys.executable,
+        "setup.py",
+        "bdist_wheel",
+        "-b",
+        str(tmp_path),
+        "--universal",
+    ])
+    with WheelFile("dist/dummy_dist-1.0-py2.py3-none-any.whl") as wf:
+        license_files = {
+            "dummy_dist-1.0.dist-info/" + fname for fname in DEFAULT_LICENSE_FILES
+        }
+        assert set(wf.namelist()) == DEFAULT_FILES | license_files
+
+
+def test_licenses_deprecated(dummy_dist, monkeypatch, tmp_path):
+    dummy_dist.joinpath("setup.cfg").write_text(
+        "[metadata]\nlicense_file=licenses/DUMMYFILE", encoding="utf-8"
+    )
+    monkeypatch.chdir(dummy_dist)
+    subprocess.check_call([
+        sys.executable,
+        "setup.py",
+        "bdist_wheel",
+        "-b",
+        str(tmp_path),
+        "--universal",
+    ])
+    with WheelFile("dist/dummy_dist-1.0-py2.py3-none-any.whl") as wf:
+        license_files = {"dummy_dist-1.0.dist-info/DUMMYFILE"}
+        assert set(wf.namelist()) == DEFAULT_FILES | license_files
+
+
+@pytest.mark.parametrize(
+    "config_file, config",
+    [
+        ("setup.cfg", "[metadata]\nlicense_files=licenses/*\n  LICENSE"),
+        ("setup.cfg", "[metadata]\nlicense_files=licenses/*, LICENSE"),
+        (
+            "setup.py",
+            SETUPPY_EXAMPLE.replace(
+                ")", "  license_files=['licenses/DUMMYFILE', 'LICENSE'])"
+            ),
+        ),
+    ],
+)
+def test_licenses_override(dummy_dist, monkeypatch, tmp_path, config_file, config):
+    dummy_dist.joinpath(config_file).write_text(config, encoding="utf-8")
+    monkeypatch.chdir(dummy_dist)
+    subprocess.check_call([
+        sys.executable,
+        "setup.py",
+        "bdist_wheel",
+        "-b",
+        str(tmp_path),
+        "--universal",
+    ])
+    with WheelFile("dist/dummy_dist-1.0-py2.py3-none-any.whl") as wf:
+        license_files = {
+            "dummy_dist-1.0.dist-info/" + fname for fname in {"DUMMYFILE", "LICENSE"}
+        }
+        assert set(wf.namelist()) == DEFAULT_FILES | license_files
+
+
+def test_licenses_disabled(dummy_dist, monkeypatch, tmp_path):
+    dummy_dist.joinpath("setup.cfg").write_text(
+        "[metadata]\nlicense_files=\n", encoding="utf-8"
+    )
+    monkeypatch.chdir(dummy_dist)
+    subprocess.check_call([
+        sys.executable,
+        "setup.py",
+        "bdist_wheel",
+        "-b",
+        str(tmp_path),
+        "--universal",
+    ])
+    with WheelFile("dist/dummy_dist-1.0-py2.py3-none-any.whl") as wf:
+        assert set(wf.namelist()) == DEFAULT_FILES
+
+
+def test_build_number(dummy_dist, monkeypatch, tmp_path):
+    monkeypatch.chdir(dummy_dist)
+    subprocess.check_call([
+        sys.executable,
+        "setup.py",
+        "bdist_wheel",
+        "-b",
+        str(tmp_path),
+        "--universal",
+        "--build-number=2",
+    ])
+    with WheelFile("dist/dummy_dist-1.0-2-py2.py3-none-any.whl") as wf:
+        filenames = set(wf.namelist())
+        assert "dummy_dist-1.0.dist-info/RECORD" in filenames
+        assert "dummy_dist-1.0.dist-info/METADATA" in filenames
+
+
+def test_limited_abi(monkeypatch, tmp_path):
+    """Test that building a binary wheel with the limited ABI works."""
+    this_dir = os.path.dirname(__file__)
+    source_dir = os.path.join(this_dir, "testdata", "extension.dist")
+    build_dir = tmp_path.joinpath("build")
+    dist_dir = tmp_path.joinpath("dist")
+    monkeypatch.chdir(source_dir)
+    subprocess.check_call([
+        sys.executable,
+        "setup.py",
+        "bdist_wheel",
+        "-b",
+        str(build_dir),
+        "-d",
+        str(dist_dir),
+    ])
+
+
+def test_build_from_readonly_tree(dummy_dist, monkeypatch, tmp_path):
+    basedir = str(tmp_path.joinpath("dummy"))
+    shutil.copytree(str(dummy_dist), basedir)
+    monkeypatch.chdir(basedir)
+
+    # Make the tree read-only
+    for root, _dirs, files in os.walk(basedir):
+        for fname in files:
+            os.chmod(os.path.join(root, fname), stat.S_IREAD)
+
+    subprocess.check_call([sys.executable, "setup.py", "bdist_wheel"])
+
+
+@pytest.mark.parametrize(
+    "option, compress_type",
+    list(bdist_wheel.supported_compressions.items()),
+    ids=list(bdist_wheel.supported_compressions),
+)
+def test_compression(dummy_dist, monkeypatch, tmp_path, option, compress_type):
+    monkeypatch.chdir(dummy_dist)
+    subprocess.check_call([
+        sys.executable,
+        "setup.py",
+        "bdist_wheel",
+        "-b",
+        str(tmp_path),
+        "--universal",
+        f"--compression={option}",
+    ])
+    with WheelFile("dist/dummy_dist-1.0-py2.py3-none-any.whl") as wf:
+        filenames = set(wf.namelist())
+        assert "dummy_dist-1.0.dist-info/RECORD" in filenames
+        assert "dummy_dist-1.0.dist-info/METADATA" in filenames
+        for zinfo in wf.filelist:
+            assert zinfo.compress_type == compress_type
+
+
+def test_wheelfile_line_endings(wheel_paths):
+    for path in wheel_paths:
+        with WheelFile(path) as wf:
+            wheelfile = next(fn for fn in wf.filelist if fn.filename.endswith("WHEEL"))
+            wheelfile_contents = wf.read(wheelfile)
+            assert b"\r" not in wheelfile_contents
+
+
+def test_unix_epoch_timestamps(dummy_dist, monkeypatch, tmp_path):
+    monkeypatch.setenv("SOURCE_DATE_EPOCH", "0")
+    monkeypatch.chdir(dummy_dist)
+    subprocess.check_call([
+        sys.executable,
+        "setup.py",
+        "bdist_wheel",
+        "-b",
+        str(tmp_path),
+        "--universal",
+        "--build-number=2",
+    ])
+
+
+def test_get_abi_tag_windows(monkeypatch):
+    monkeypatch.setattr(tags, "interpreter_name", lambda: "cp")
+    monkeypatch.setattr(sysconfig, "get_config_var", lambda x: "cp313-win_amd64")
+    assert get_abi_tag() == "cp313"
+
+
+def test_get_abi_tag_pypy_old(monkeypatch):
+    monkeypatch.setattr(tags, "interpreter_name", lambda: "pp")
+    monkeypatch.setattr(sysconfig, "get_config_var", lambda x: "pypy36-pp73")
+    assert get_abi_tag() == "pypy36_pp73"
+
+
+def test_get_abi_tag_pypy_new(monkeypatch):
+    monkeypatch.setattr(sysconfig, "get_config_var", lambda x: "pypy37-pp73-darwin")
+    monkeypatch.setattr(tags, "interpreter_name", lambda: "pp")
+    assert get_abi_tag() == "pypy37_pp73"
+
+
+def test_get_abi_tag_graalpy(monkeypatch):
+    monkeypatch.setattr(
+        sysconfig, "get_config_var", lambda x: "graalpy231-310-native-x86_64-linux"
+    )
+    monkeypatch.setattr(tags, "interpreter_name", lambda: "graalpy")
+    assert get_abi_tag() == "graalpy231_310_native"
+
+
+def test_get_abi_tag_fallback(monkeypatch):
+    monkeypatch.setattr(sysconfig, "get_config_var", lambda x: "unknown-python-310")
+    monkeypatch.setattr(tags, "interpreter_name", lambda: "unknown-python")
+    assert get_abi_tag() == "unknown_python_310"
+
+
+def test_platform_with_space(dummy_dist, monkeypatch):
+    """Ensure building on platforms with a space in the name succeed."""
+    monkeypatch.chdir(dummy_dist)
+    subprocess.check_call([
+        sys.executable,
+        "setup.py",
+        "bdist_wheel",
+        "--plat-name",
+        "isilon onefs",
+    ])
+
+
+def test_rmtree_readonly(monkeypatch, tmp_path):
+    """Verify onerr works as expected"""
+
+    bdist_dir = tmp_path / "with_readonly"
+    bdist_dir.mkdir()
+    some_file = bdist_dir.joinpath("file.txt")
+    some_file.touch()
+    some_file.chmod(stat.S_IREAD)
+
+    expected_count = 1 if sys.platform.startswith("win") else 0
+
+    if sys.version_info < (3, 12):
+        count_remove_readonly = Mock(side_effect=remove_readonly)
+        shutil.rmtree(bdist_dir, onerror=count_remove_readonly)
+        assert count_remove_readonly.call_count == expected_count
+    else:
+        count_remove_readonly_exc = Mock(side_effect=remove_readonly_exc)
+        shutil.rmtree(bdist_dir, onexc=count_remove_readonly_exc)
+        assert count_remove_readonly_exc.call_count == expected_count
+
+    assert not bdist_dir.is_dir()
+
+
+def test_data_dir_with_tag_build(monkeypatch, tmp_path):
+    """
+    Setuptools allow authors to set PEP 440's local version segments
+    using ``egg_info.tag_build``. This should be reflected not only in the
+    ``.whl`` file name, but also in the ``.dist-info`` and ``.data`` dirs.
+    See pypa/setuptools#3997.
+    """
+    monkeypatch.chdir(tmp_path)
+    files = {
+        "setup.py": """
+            from setuptools import setup
+            setup(headers=["hello.h"])
+            """,
+        "setup.cfg": """
+            [metadata]
+            name = test
+            version = 1.0
+
+            [options.data_files]
+            hello/world = file.txt
+
+            [egg_info]
+            tag_build = +what
+            tag_date = 0
+            """,
+        "file.txt": "",
+        "hello.h": "",
+    }
+    for file, content in files.items():
+        with open(file, "w", encoding="utf-8") as fh:
+            fh.write(cleandoc(content))
+
+    subprocess.check_call([sys.executable, "setup.py", "bdist_wheel"])
+
+    # Ensure .whl, .dist-info and .data contain the local segment
+    wheel_path = "dist/test-1.0+what-py3-none-any.whl"
+    assert os.path.exists(wheel_path)
+    entries = set(ZipFile(wheel_path).namelist())
+    for expected in (
+        "test-1.0+what.data/headers/hello.h",
+        "test-1.0+what.data/data/hello/world/file.txt",
+        "test-1.0+what.dist-info/METADATA",
+        "test-1.0+what.dist-info/WHEEL",
+    ):
+        assert expected in entries
+
+    for not_expected in (
+        "test.data/headers/hello.h",
+        "test-1.0.data/data/hello/world/file.txt",
+        "test.dist-info/METADATA",
+        "test-1.0.dist-info/WHEEL",
+    ):
+        assert not_expected not in entries
+
+
+@pytest.mark.parametrize(
+    "reported,expected",
+    [("linux-x86_64", "linux_i686"), ("linux-aarch64", "linux_armv7l")],
+)
+@pytest.mark.skipif(
+    platform.system() != "Linux", reason="Only makes sense to test on Linux"
+)
+def test_platform_linux32(reported, expected, monkeypatch):
+    monkeypatch.setattr(struct, "calcsize", lambda x: 4)
+    dist = setuptools.Distribution()
+    cmd = bdist_wheel(dist)
+    cmd.plat_name = reported
+    cmd.root_is_pure = False
+    _, _, actual = cmd.get_tag()
+    assert actual == expected
+
+
+def test_no_ctypes(monkeypatch) -> None:
+    def _fake_import(name: str, *args, **kwargs):
+        if name == "ctypes":
+            raise ModuleNotFoundError(f"No module named {name}")
+
+        return importlib.__import__(name, *args, **kwargs)
+
+    # Install an importer shim that refuses to load ctypes
+    monkeypatch.setattr(builtins, "__import__", _fake_import)
+
+    # Unload all wheel modules
+    for module in list(sys.modules):
+        if module.startswith("wheel"):
+            monkeypatch.delitem(sys.modules, module)
+
+    from wheel import bdist_wheel
+
+    assert bdist_wheel
diff --git a/tools/vendored.py b/tools/vendored.py
index 69a538f20c..381818f2cf 100644
--- a/tools/vendored.py
+++ b/tools/vendored.py
@@ -1,6 +1,8 @@
 import re
+import shutil
 import sys
 import subprocess
+from textwrap import dedent
 
 from path import Path
 
@@ -102,6 +104,60 @@ def rewrite_more_itertools(pkg_files: Path):
     more_file.write_text(text)
 
 
+def rewrite_wheel(pkg_files: Path):
+    """
+    Remove parts of wheel not needed by bdist_wheel, and rewrite imports to use
+    setuptools's own code or vendored dependencies.
+    """
+    shutil.rmtree(pkg_files / 'cli')
+    shutil.rmtree(pkg_files / 'vendored')
+    pkg_files.joinpath('_setuptools_logging.py').unlink()
+    pkg_files.joinpath('__main__.py').unlink()
+    pkg_files.joinpath('bdist_wheel.py').unlink()
+
+    # Rewrite vendored imports to use setuptools's own vendored libraries
+    for path in pkg_files.iterdir():
+        if path.suffix == '.py':
+            code = path.read_text()
+            if path.name == 'wheelfile.py':
+                code = re.sub(
+                    r"^from wheel.util import ",
+                    r"from .util import ",
+                    code,
+                    flags=re.MULTILINE,
+                )
+
+                # No need to keep the wheel.cli package just for this trivial exception
+                code = re.sub(
+                    r"^from wheel.cli import WheelError\n",
+                    r"",
+                    code,
+                    flags=re.MULTILINE,
+                )
+                code += dedent(
+                    """
+
+                    class WheelError(Exception):
+                        pass
+                    """
+                )
+            else:
+                code = re.sub(
+                    r"^from \.vendored\.([\w.]+) import ",
+                    r"from ..\1 import ",
+                    code,
+                    flags=re.MULTILINE,
+                )
+                code = re.sub(
+                    r"^from \.util import log$",
+                    r"from distutils import log$",
+                    code,
+                    flags=re.MULTILINE,
+                )
+
+            path.write_text(code)
+
+
 def rewrite_platformdirs(pkg_files: Path):
     """
     Replace some absolute imports with relative ones.
@@ -163,6 +219,7 @@ def update_setuptools():
     rewrite_importlib_resources(vendor / 'importlib_resources', 'setuptools.extern')
     rewrite_importlib_metadata(vendor / 'importlib_metadata', 'setuptools.extern')
     rewrite_more_itertools(vendor / "more_itertools")
+    rewrite_wheel(vendor / "wheel")
 
 
 def yield_top_level(name):

From b5930743a2b3a21b38c027c80a50c019a165110e Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alex=20Gr=C3=B6nholm?= 
Date: Sat, 18 May 2024 20:48:57 -0400
Subject: [PATCH 0664/1761] Added missing files and removed wheel as a testing
 dependency

---
 setup.cfg                                                      | 1 -
 .../_vendor/wheel-0.43.0.dist-info}/INSTALLER                  | 0
 .../_vendor/wheel-0.43.0.dist-info}/REQUESTED                  | 0
 .../_vendor/wheel-0.43.0.dist-info}/WHEEL                      | 3 +--
 4 files changed, 1 insertion(+), 3 deletions(-)
 rename {pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info => setuptools/_vendor/wheel-0.43.0.dist-info}/INSTALLER (100%)
 rename {pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info => setuptools/_vendor/wheel-0.43.0.dist-info}/REQUESTED (100%)
 rename {pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info => setuptools/_vendor/wheel-0.43.0.dist-info}/WHEEL (64%)

diff --git a/setup.cfg b/setup.cfg
index 9a91c01490..b083a262f4 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -54,7 +54,6 @@ testing =
 
 	# local
 	virtualenv>=13.0.0
-	wheel
 	pip>=19.1 # For proper file:// URLs support.
 	packaging>=23.2
 	jaraco.envs>=2.2
diff --git a/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/INSTALLER b/setuptools/_vendor/wheel-0.43.0.dist-info/INSTALLER
similarity index 100%
rename from pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/INSTALLER
rename to setuptools/_vendor/wheel-0.43.0.dist-info/INSTALLER
diff --git a/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/REQUESTED b/setuptools/_vendor/wheel-0.43.0.dist-info/REQUESTED
similarity index 100%
rename from pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/REQUESTED
rename to setuptools/_vendor/wheel-0.43.0.dist-info/REQUESTED
diff --git a/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/WHEEL b/setuptools/_vendor/wheel-0.43.0.dist-info/WHEEL
similarity index 64%
rename from pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/WHEEL
rename to setuptools/_vendor/wheel-0.43.0.dist-info/WHEEL
index ba48cbcf92..3b5e64b5e6 100644
--- a/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/WHEEL
+++ b/setuptools/_vendor/wheel-0.43.0.dist-info/WHEEL
@@ -1,5 +1,4 @@
 Wheel-Version: 1.0
-Generator: bdist_wheel (0.41.3)
+Generator: flit 3.9.0
 Root-Is-Purelib: true
 Tag: py3-none-any
-

From ea5d0e167d8309de6a7e0bf3831c51b1f04aff92 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alex=20Gr=C3=B6nholm?= 
Date: Sun, 19 May 2024 06:07:00 -0400
Subject: [PATCH 0665/1761] Cleaned up the imports

---
 setuptools/command/bdist_wheel.py | 15 ++++++---------
 1 file changed, 6 insertions(+), 9 deletions(-)

diff --git a/setuptools/command/bdist_wheel.py b/setuptools/command/bdist_wheel.py
index 1d0a77207e..39ef73418d 100644
--- a/setuptools/command/bdist_wheel.py
+++ b/setuptools/command/bdist_wheel.py
@@ -22,14 +22,11 @@
 from typing import TYPE_CHECKING, Callable, Iterable, Literal, Sequence, cast
 from zipfile import ZIP_DEFLATED, ZIP_STORED
 
-import setuptools
-from setuptools import Command
-
-from .. import __version__
-from .._vendor.wheel.metadata import pkginfo_to_metadata
-from .._vendor.packaging import tags
-from .._vendor.packaging import version as _packaging_version
-from .._vendor.wheel.wheelfile import WheelFile
+from .. import Command, __version__
+from ..extern.wheel.metadata import pkginfo_to_metadata
+from ..extern.packaging import tags
+from ..extern.packaging import version as _packaging_version
+from ..extern.wheel.wheelfile import WheelFile
 
 if TYPE_CHECKING:
     import types
@@ -54,7 +51,7 @@ def safe_version(version: str) -> str:
         return re.sub("[^A-Za-z0-9.]+", "-", version)
 
 
-setuptools_major_version = int(setuptools.__version__.split(".")[0])
+setuptools_major_version = int(__version__.split(".")[0])
 
 PY_LIMITED_API_PATTERN = r"cp3\d"
 

From 2a53cc1200ec4b14e08e84be3c042f8983dfb7d7 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 20 May 2024 09:19:50 +0100
Subject: [PATCH 0666/1761] Prevent 'bin' folders to be taken as extern
 packages

---
 tools/vendored.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/tools/vendored.py b/tools/vendored.py
index 69a538f20c..57fd093aab 100644
--- a/tools/vendored.py
+++ b/tools/vendored.py
@@ -173,7 +173,7 @@ def yield_top_level(name):
     ['backports', 'jaraco', 'zipp']
     """
     vendor = Path(f"{name}/_vendor")
-    ignore = {"__pycache__", "__init__.py", ".ruff_cache"}
+    ignore = {"__pycache__", "__init__.py", ".ruff_cache", "bin"}
 
     for item in sorted(vendor.iterdir()):
         if item.name in ignore:

From 69141f69f8bf38da34cbea552d6fdaa9c8619c53 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 20 May 2024 09:36:27 +0100
Subject: [PATCH 0667/1761] Add doctest for vendorised bin folder

---
 tools/vendored.py | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/tools/vendored.py b/tools/vendored.py
index 57fd093aab..41079e1330 100644
--- a/tools/vendored.py
+++ b/tools/vendored.py
@@ -171,6 +171,8 @@ def yield_top_level(name):
     >>> examples = roots & {"jaraco", "backports", "zipp"}
     >>> list(sorted(examples))
     ['backports', 'jaraco', 'zipp']
+    >>> 'bin' in examples
+    False
     """
     vendor = Path(f"{name}/_vendor")
     ignore = {"__pycache__", "__init__.py", ".ruff_cache", "bin"}

From a4f3ed048e89c0b144d9cf0aca4bed4d90ad8b05 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alex=20Gr=C3=B6nholm?= 
Date: Mon, 20 May 2024 05:38:14 -0400
Subject: [PATCH 0668/1761] Updated a leftover vendored wheel import

---
 setuptools/command/bdist_wheel.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setuptools/command/bdist_wheel.py b/setuptools/command/bdist_wheel.py
index 39ef73418d..9851466fe9 100644
--- a/setuptools/command/bdist_wheel.py
+++ b/setuptools/command/bdist_wheel.py
@@ -68,7 +68,7 @@ def get_platform(archive_root: str | None) -> str:
     """Return our platform name 'win32', 'linux_x86_64'"""
     result = sysconfig.get_platform()
     if result.startswith("macosx") and archive_root is not None:
-        from .._vendor.wheel.macosx_libfile import calculate_macosx_platform_tag
+        from ..extern.wheel.macosx_libfile import calculate_macosx_platform_tag
 
         result = calculate_macosx_platform_tag(archive_root, result)
     elif _is_32bit_interpreter():

From c3b9614c552ac460d427f95d739cb7c416c47de0 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alex=20Gr=C3=B6nholm?= 
Date: Mon, 20 May 2024 05:39:04 -0400
Subject: [PATCH 0669/1761] Updated another leftover vendored wheel import

---
 setuptools/command/editable_wheel.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py
index dc812ba752..f086dc22df 100644
--- a/setuptools/command/editable_wheel.py
+++ b/setuptools/command/editable_wheel.py
@@ -335,7 +335,7 @@ def _safely_run(self, cmd_name: str):
             )
 
     def _create_wheel_file(self, bdist_wheel):
-        from .._vendor.wheel.wheelfile import WheelFile
+        from ..extern.wheel.wheelfile import WheelFile
 
         dist_info = self.get_finalized_command("dist_info")
         dist_name = dist_info.name

From 6fd9265218e79de2fa767f91b09d3c4dfeaba987 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alex=20Gr=C3=B6nholm?= 
Date: Mon, 20 May 2024 06:17:44 -0400
Subject: [PATCH 0670/1761] Updated another leftover vendored wheel import

---
 setuptools/command/editable_wheel.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py
index f086dc22df..faadd4f892 100644
--- a/setuptools/command/editable_wheel.py
+++ b/setuptools/command/editable_wheel.py
@@ -59,7 +59,7 @@
 from .install_scripts import install_scripts as install_scripts_cls
 
 if TYPE_CHECKING:
-    from .._vendor.wheelfile import WheelFile  # type:ignore[import-untyped] # noqa
+    from ..extern.wheel.wheelfile import WheelFile  # type:ignore[import-untyped]
 
 _P = TypeVar("_P", bound=StrPath)
 _logger = logging.getLogger(__name__)

From 82f0006df806175f197e0125c7b4bc297e92faeb Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alex=20Gr=C3=B6nholm?= 
Date: Mon, 20 May 2024 13:25:05 +0300
Subject: [PATCH 0671/1761] Update setuptools/extern/__init__.py

Co-authored-by: Anderson Bravalheri 
---
 setuptools/extern/__init__.py | 1 -
 1 file changed, 1 deletion(-)

diff --git a/setuptools/extern/__init__.py b/setuptools/extern/__init__.py
index 6a3444cd35..5ad7169e3b 100644
--- a/setuptools/extern/__init__.py
+++ b/setuptools/extern/__init__.py
@@ -78,7 +78,6 @@ def install(self):
 # ]]]
 names = (
     'backports',
-    'bin',
     'importlib_metadata',
     'importlib_resources',
     'jaraco',

From c8e005d7e266893e34c7ef0a611fdc5e502222c6 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alex=20Gr=C3=B6nholm?= 
Date: Mon, 20 May 2024 07:13:44 -0400
Subject: [PATCH 0672/1761] Reverted typechecker-only import

---
 setuptools/command/editable_wheel.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py
index faadd4f892..7f689cac70 100644
--- a/setuptools/command/editable_wheel.py
+++ b/setuptools/command/editable_wheel.py
@@ -59,7 +59,7 @@
 from .install_scripts import install_scripts as install_scripts_cls
 
 if TYPE_CHECKING:
-    from ..extern.wheel.wheelfile import WheelFile  # type:ignore[import-untyped]
+    from .._vendor.wheel.wheelfile import WheelFile  # type:ignore[import-untyped]
 
 _P = TypeVar("_P", bound=StrPath)
 _logger = logging.getLogger(__name__)

From 6fbe5fc0b06b38bb06a94653e17edadd421a3369 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alex=20Gr=C3=B6nholm?= 
Date: Mon, 20 May 2024 07:18:02 -0400
Subject: [PATCH 0673/1761] Updated imports in bdist_wheel tests

---
 setuptools/tests/test_bdist_wheel.py | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/setuptools/tests/test_bdist_wheel.py b/setuptools/tests/test_bdist_wheel.py
index 161fb88e23..5beee075f9 100644
--- a/setuptools/tests/test_bdist_wheel.py
+++ b/setuptools/tests/test_bdist_wheel.py
@@ -22,8 +22,8 @@
     remove_readonly,
     remove_readonly_exc,
 )
-from setuptools._vendor.packaging import tags
-from setuptools._vendor.wheel.wheelfile import WheelFile
+from setuptools.extern.packaging import tags
+from setuptools.extern.wheel.wheelfile import WheelFile
 
 DEFAULT_FILES = {
     "dummy_dist-1.0.dist-info/top_level.txt",
@@ -455,6 +455,6 @@ def _fake_import(name: str, *args, **kwargs):
         if module.startswith("wheel"):
             monkeypatch.delitem(sys.modules, module)
 
-    from wheel import bdist_wheel
+    from setuptools.extern.wheel import bdist_wheel
 
     assert bdist_wheel

From 854e59728ca6e3378e830bea04c8201d237a0cfe Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alex=20Gr=C3=B6nholm?= 
Date: Mon, 20 May 2024 07:19:58 -0400
Subject: [PATCH 0674/1761] Fixed wrong import

---
 setuptools/tests/test_bdist_wheel.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setuptools/tests/test_bdist_wheel.py b/setuptools/tests/test_bdist_wheel.py
index 5beee075f9..dc5304a88b 100644
--- a/setuptools/tests/test_bdist_wheel.py
+++ b/setuptools/tests/test_bdist_wheel.py
@@ -455,6 +455,6 @@ def _fake_import(name: str, *args, **kwargs):
         if module.startswith("wheel"):
             monkeypatch.delitem(sys.modules, module)
 
-    from setuptools.extern.wheel import bdist_wheel
+    from setuptools.command import bdist_wheel
 
     assert bdist_wheel

From 339453204576480d7ffc6302420dc9e0696ec925 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alex=20Gr=C3=B6nholm?= 
Date: Mon, 20 May 2024 07:52:41 -0400
Subject: [PATCH 0675/1761] Added back accidentally removed files

---
 .../INSTALLER                                 |    1 +
 .../backports.tarfile-1.1.1.dist-info/LICENSE |   17 +
 .../METADATA                                  |   45 +
 .../backports.tarfile-1.1.1.dist-info/RECORD  |   17 +
 .../REQUESTED                                 |    0
 .../backports.tarfile-1.1.1.dist-info/WHEEL   |    5 +
 .../top_level.txt                             |    1 +
 .../_vendor/backports/tarfile/__init__.py     | 2902 +++++++++++++++++
 .../_vendor/backports/tarfile/__main__.py     |    5 +
 .../backports/tarfile/compat/__init__.py      |    0
 .../_vendor/backports/tarfile/compat/py38.py  |   24 +
 .../INSTALLER                                 |    1 +
 .../jaraco.functools-4.0.1.dist-info/LICENSE  |   17 +
 .../jaraco.functools-4.0.1.dist-info/METADATA |   64 +
 .../jaraco.functools-4.0.1.dist-info/RECORD   |   10 +
 .../jaraco.functools-4.0.1.dist-info/WHEEL    |    5 +
 .../top_level.txt                             |    1 +
 .../more_itertools-10.2.0.dist-info/REQUESTED |    0
 .../INSTALLER                                 |    1 +
 .../backports.tarfile-1.1.1.dist-info/LICENSE |   17 +
 .../METADATA                                  |   45 +
 .../backports.tarfile-1.1.1.dist-info/RECORD  |   17 +
 .../REQUESTED                                 |    0
 .../backports.tarfile-1.1.1.dist-info/WHEEL   |    5 +
 .../top_level.txt                             |    1 +
 .../_vendor/backports/tarfile/__init__.py     | 2902 +++++++++++++++++
 .../_vendor/backports/tarfile/__main__.py     |    5 +
 .../backports/tarfile/compat/__init__.py      |    0
 .../_vendor/backports/tarfile/compat/py38.py  |   24 +
 .../INSTALLER                                 |    1 +
 .../jaraco.functools-4.0.1.dist-info/LICENSE  |   17 +
 .../jaraco.functools-4.0.1.dist-info/METADATA |   64 +
 .../jaraco.functools-4.0.1.dist-info/RECORD   |   10 +
 .../jaraco.functools-4.0.1.dist-info/WHEEL    |    5 +
 .../top_level.txt                             |    1 +
 35 files changed, 6230 insertions(+)
 create mode 100644 pkg_resources/_vendor/backports.tarfile-1.1.1.dist-info/INSTALLER
 create mode 100644 pkg_resources/_vendor/backports.tarfile-1.1.1.dist-info/LICENSE
 create mode 100644 pkg_resources/_vendor/backports.tarfile-1.1.1.dist-info/METADATA
 create mode 100644 pkg_resources/_vendor/backports.tarfile-1.1.1.dist-info/RECORD
 create mode 100644 pkg_resources/_vendor/backports.tarfile-1.1.1.dist-info/REQUESTED
 create mode 100644 pkg_resources/_vendor/backports.tarfile-1.1.1.dist-info/WHEEL
 create mode 100644 pkg_resources/_vendor/backports.tarfile-1.1.1.dist-info/top_level.txt
 create mode 100644 pkg_resources/_vendor/backports/tarfile/__init__.py
 create mode 100644 pkg_resources/_vendor/backports/tarfile/__main__.py
 create mode 100644 pkg_resources/_vendor/backports/tarfile/compat/__init__.py
 create mode 100644 pkg_resources/_vendor/backports/tarfile/compat/py38.py
 create mode 100644 pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/INSTALLER
 create mode 100644 pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/LICENSE
 create mode 100644 pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/METADATA
 create mode 100644 pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/RECORD
 create mode 100644 pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/WHEEL
 create mode 100644 pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/top_level.txt
 create mode 100644 pkg_resources/_vendor/more_itertools-10.2.0.dist-info/REQUESTED
 create mode 100644 setuptools/_vendor/backports.tarfile-1.1.1.dist-info/INSTALLER
 create mode 100644 setuptools/_vendor/backports.tarfile-1.1.1.dist-info/LICENSE
 create mode 100644 setuptools/_vendor/backports.tarfile-1.1.1.dist-info/METADATA
 create mode 100644 setuptools/_vendor/backports.tarfile-1.1.1.dist-info/RECORD
 create mode 100644 setuptools/_vendor/backports.tarfile-1.1.1.dist-info/REQUESTED
 create mode 100644 setuptools/_vendor/backports.tarfile-1.1.1.dist-info/WHEEL
 create mode 100644 setuptools/_vendor/backports.tarfile-1.1.1.dist-info/top_level.txt
 create mode 100644 setuptools/_vendor/backports/tarfile/__init__.py
 create mode 100644 setuptools/_vendor/backports/tarfile/__main__.py
 create mode 100644 setuptools/_vendor/backports/tarfile/compat/__init__.py
 create mode 100644 setuptools/_vendor/backports/tarfile/compat/py38.py
 create mode 100644 setuptools/_vendor/jaraco.functools-4.0.1.dist-info/INSTALLER
 create mode 100644 setuptools/_vendor/jaraco.functools-4.0.1.dist-info/LICENSE
 create mode 100644 setuptools/_vendor/jaraco.functools-4.0.1.dist-info/METADATA
 create mode 100644 setuptools/_vendor/jaraco.functools-4.0.1.dist-info/RECORD
 create mode 100644 setuptools/_vendor/jaraco.functools-4.0.1.dist-info/WHEEL
 create mode 100644 setuptools/_vendor/jaraco.functools-4.0.1.dist-info/top_level.txt

diff --git a/pkg_resources/_vendor/backports.tarfile-1.1.1.dist-info/INSTALLER b/pkg_resources/_vendor/backports.tarfile-1.1.1.dist-info/INSTALLER
new file mode 100644
index 0000000000..a1b589e38a
--- /dev/null
+++ b/pkg_resources/_vendor/backports.tarfile-1.1.1.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/pkg_resources/_vendor/backports.tarfile-1.1.1.dist-info/LICENSE b/pkg_resources/_vendor/backports.tarfile-1.1.1.dist-info/LICENSE
new file mode 100644
index 0000000000..1bb5a44356
--- /dev/null
+++ b/pkg_resources/_vendor/backports.tarfile-1.1.1.dist-info/LICENSE
@@ -0,0 +1,17 @@
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
diff --git a/pkg_resources/_vendor/backports.tarfile-1.1.1.dist-info/METADATA b/pkg_resources/_vendor/backports.tarfile-1.1.1.dist-info/METADATA
new file mode 100644
index 0000000000..d29c50158a
--- /dev/null
+++ b/pkg_resources/_vendor/backports.tarfile-1.1.1.dist-info/METADATA
@@ -0,0 +1,45 @@
+Metadata-Version: 2.1
+Name: backports.tarfile
+Version: 1.1.1
+Summary: Backport of CPython tarfile module
+Author-email: "Jason R. Coombs" 
+Project-URL: Homepage, https://github.com/jaraco/backports.tarfile
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Requires-Python: >=3.8
+Description-Content-Type: text/x-rst
+License-File: LICENSE
+Provides-Extra: docs
+Requires-Dist: sphinx >=3.5 ; extra == 'docs'
+Requires-Dist: jaraco.packaging >=9.3 ; extra == 'docs'
+Requires-Dist: rst.linker >=1.9 ; extra == 'docs'
+Requires-Dist: furo ; extra == 'docs'
+Requires-Dist: sphinx-lint ; extra == 'docs'
+Provides-Extra: testing
+Requires-Dist: pytest !=8.1.*,>=6 ; extra == 'testing'
+Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'testing'
+Requires-Dist: pytest-cov ; extra == 'testing'
+Requires-Dist: pytest-enabler >=2.2 ; extra == 'testing'
+Requires-Dist: jaraco.test ; extra == 'testing'
+
+.. image:: https://img.shields.io/pypi/v/backports.tarfile.svg
+   :target: https://pypi.org/project/backports.tarfile
+
+.. image:: https://img.shields.io/pypi/pyversions/backports.tarfile.svg
+
+.. image:: https://github.com/jaraco/backports.tarfile/actions/workflows/main.yml/badge.svg
+   :target: https://github.com/jaraco/backports.tarfile/actions?query=workflow%3A%22tests%22
+   :alt: tests
+
+.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
+    :target: https://github.com/astral-sh/ruff
+    :alt: Ruff
+
+.. .. image:: https://readthedocs.org/projects/backportstarfile/badge/?version=latest
+..    :target: https://backportstarfile.readthedocs.io/en/latest/?badge=latest
+
+.. image:: https://img.shields.io/badge/skeleton-2024-informational
+   :target: https://blog.jaraco.com/skeleton
diff --git a/pkg_resources/_vendor/backports.tarfile-1.1.1.dist-info/RECORD b/pkg_resources/_vendor/backports.tarfile-1.1.1.dist-info/RECORD
new file mode 100644
index 0000000000..af585a832d
--- /dev/null
+++ b/pkg_resources/_vendor/backports.tarfile-1.1.1.dist-info/RECORD
@@ -0,0 +1,17 @@
+backports.tarfile-1.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+backports.tarfile-1.1.1.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
+backports.tarfile-1.1.1.dist-info/METADATA,sha256=XtPZDjwuCtDbN49cpJgthPJm40mfbhk5BllI-jBVVxc,1969
+backports.tarfile-1.1.1.dist-info/RECORD,,
+backports.tarfile-1.1.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+backports.tarfile-1.1.1.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
+backports.tarfile-1.1.1.dist-info/top_level.txt,sha256=cGjaLMOoBR1FK0ApojtzWVmViTtJ7JGIK_HwXiEsvtU,10
+backports/__init__.py,sha256=iOEMwnlORWezdO8-2vxBIPSR37D7JGjluZ8f55vzxls,81
+backports/__pycache__/__init__.cpython-312.pyc,,
+backports/tarfile/__init__.py,sha256=QOayikyptGOBh_dN1WFI5w0nnUYpX5Gma7p2JgksJIY,106960
+backports/tarfile/__main__.py,sha256=Yw2oGT1afrz2eBskzdPYL8ReB_3liApmhFkN2EbDmc4,59
+backports/tarfile/__pycache__/__init__.cpython-312.pyc,,
+backports/tarfile/__pycache__/__main__.cpython-312.pyc,,
+backports/tarfile/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+backports/tarfile/compat/__pycache__/__init__.cpython-312.pyc,,
+backports/tarfile/compat/__pycache__/py38.cpython-312.pyc,,
+backports/tarfile/compat/py38.py,sha256=iYkyt_gvWjLzGUTJD9TuTfMMjOk-ersXZmRlvQYN2qE,568
diff --git a/pkg_resources/_vendor/backports.tarfile-1.1.1.dist-info/REQUESTED b/pkg_resources/_vendor/backports.tarfile-1.1.1.dist-info/REQUESTED
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/pkg_resources/_vendor/backports.tarfile-1.1.1.dist-info/WHEEL b/pkg_resources/_vendor/backports.tarfile-1.1.1.dist-info/WHEEL
new file mode 100644
index 0000000000..bab98d6758
--- /dev/null
+++ b/pkg_resources/_vendor/backports.tarfile-1.1.1.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.43.0)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/pkg_resources/_vendor/backports.tarfile-1.1.1.dist-info/top_level.txt b/pkg_resources/_vendor/backports.tarfile-1.1.1.dist-info/top_level.txt
new file mode 100644
index 0000000000..99d2be5b64
--- /dev/null
+++ b/pkg_resources/_vendor/backports.tarfile-1.1.1.dist-info/top_level.txt
@@ -0,0 +1 @@
+backports
diff --git a/pkg_resources/_vendor/backports/tarfile/__init__.py b/pkg_resources/_vendor/backports/tarfile/__init__.py
new file mode 100644
index 0000000000..6dd498dc04
--- /dev/null
+++ b/pkg_resources/_vendor/backports/tarfile/__init__.py
@@ -0,0 +1,2902 @@
+#!/usr/bin/env python3
+#-------------------------------------------------------------------
+# tarfile.py
+#-------------------------------------------------------------------
+# Copyright (C) 2002 Lars Gustaebel 
+# All rights reserved.
+#
+# Permission  is  hereby granted,  free  of charge,  to  any person
+# obtaining a  copy of  this software  and associated documentation
+# files  (the  "Software"),  to   deal  in  the  Software   without
+# restriction,  including  without limitation  the  rights to  use,
+# copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies  of  the  Software,  and to  permit  persons  to  whom the
+# Software  is  furnished  to  do  so,  subject  to  the  following
+# conditions:
+#
+# The above copyright  notice and this  permission notice shall  be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS  IS", WITHOUT WARRANTY OF ANY  KIND,
+# EXPRESS OR IMPLIED, INCLUDING  BUT NOT LIMITED TO  THE WARRANTIES
+# OF  MERCHANTABILITY,  FITNESS   FOR  A  PARTICULAR   PURPOSE  AND
+# NONINFRINGEMENT.  IN  NO  EVENT SHALL  THE  AUTHORS  OR COPYRIGHT
+# HOLDERS  BE LIABLE  FOR ANY  CLAIM, DAMAGES  OR OTHER  LIABILITY,
+# WHETHER  IN AN  ACTION OF  CONTRACT, TORT  OR OTHERWISE,  ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+# OTHER DEALINGS IN THE SOFTWARE.
+#
+"""Read from and write to tar format archives.
+"""
+
+version     = "0.9.0"
+__author__  = "Lars Gust\u00e4bel (lars@gustaebel.de)"
+__credits__ = "Gustavo Niemeyer, Niels Gust\u00e4bel, Richard Townsend."
+
+#---------
+# Imports
+#---------
+from builtins import open as bltn_open
+import sys
+import os
+import io
+import shutil
+import stat
+import time
+import struct
+import copy
+import re
+import warnings
+
+from .compat.py38 import removesuffix
+
+try:
+    import pwd
+except ImportError:
+    pwd = None
+try:
+    import grp
+except ImportError:
+    grp = None
+
+# os.symlink on Windows prior to 6.0 raises NotImplementedError
+# OSError (winerror=1314) will be raised if the caller does not hold the
+# SeCreateSymbolicLinkPrivilege privilege
+symlink_exception = (AttributeError, NotImplementedError, OSError)
+
+# from tarfile import *
+__all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError", "ReadError",
+           "CompressionError", "StreamError", "ExtractError", "HeaderError",
+           "ENCODING", "USTAR_FORMAT", "GNU_FORMAT", "PAX_FORMAT",
+           "DEFAULT_FORMAT", "open","fully_trusted_filter", "data_filter",
+           "tar_filter", "FilterError", "AbsoluteLinkError",
+           "OutsideDestinationError", "SpecialFileError", "AbsolutePathError",
+           "LinkOutsideDestinationError"]
+
+
+#---------------------------------------------------------
+# tar constants
+#---------------------------------------------------------
+NUL = b"\0"                     # the null character
+BLOCKSIZE = 512                 # length of processing blocks
+RECORDSIZE = BLOCKSIZE * 20     # length of records
+GNU_MAGIC = b"ustar  \0"        # magic gnu tar string
+POSIX_MAGIC = b"ustar\x0000"    # magic posix tar string
+
+LENGTH_NAME = 100               # maximum length of a filename
+LENGTH_LINK = 100               # maximum length of a linkname
+LENGTH_PREFIX = 155             # maximum length of the prefix field
+
+REGTYPE = b"0"                  # regular file
+AREGTYPE = b"\0"                # regular file
+LNKTYPE = b"1"                  # link (inside tarfile)
+SYMTYPE = b"2"                  # symbolic link
+CHRTYPE = b"3"                  # character special device
+BLKTYPE = b"4"                  # block special device
+DIRTYPE = b"5"                  # directory
+FIFOTYPE = b"6"                 # fifo special device
+CONTTYPE = b"7"                 # contiguous file
+
+GNUTYPE_LONGNAME = b"L"         # GNU tar longname
+GNUTYPE_LONGLINK = b"K"         # GNU tar longlink
+GNUTYPE_SPARSE = b"S"           # GNU tar sparse file
+
+XHDTYPE = b"x"                  # POSIX.1-2001 extended header
+XGLTYPE = b"g"                  # POSIX.1-2001 global header
+SOLARIS_XHDTYPE = b"X"          # Solaris extended header
+
+USTAR_FORMAT = 0                # POSIX.1-1988 (ustar) format
+GNU_FORMAT = 1                  # GNU tar format
+PAX_FORMAT = 2                  # POSIX.1-2001 (pax) format
+DEFAULT_FORMAT = PAX_FORMAT
+
+#---------------------------------------------------------
+# tarfile constants
+#---------------------------------------------------------
+# File types that tarfile supports:
+SUPPORTED_TYPES = (REGTYPE, AREGTYPE, LNKTYPE,
+                   SYMTYPE, DIRTYPE, FIFOTYPE,
+                   CONTTYPE, CHRTYPE, BLKTYPE,
+                   GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
+                   GNUTYPE_SPARSE)
+
+# File types that will be treated as a regular file.
+REGULAR_TYPES = (REGTYPE, AREGTYPE,
+                 CONTTYPE, GNUTYPE_SPARSE)
+
+# File types that are part of the GNU tar format.
+GNU_TYPES = (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
+             GNUTYPE_SPARSE)
+
+# Fields from a pax header that override a TarInfo attribute.
+PAX_FIELDS = ("path", "linkpath", "size", "mtime",
+              "uid", "gid", "uname", "gname")
+
+# Fields from a pax header that are affected by hdrcharset.
+PAX_NAME_FIELDS = {"path", "linkpath", "uname", "gname"}
+
+# Fields in a pax header that are numbers, all other fields
+# are treated as strings.
+PAX_NUMBER_FIELDS = {
+    "atime": float,
+    "ctime": float,
+    "mtime": float,
+    "uid": int,
+    "gid": int,
+    "size": int
+}
+
+#---------------------------------------------------------
+# initialization
+#---------------------------------------------------------
+if os.name == "nt":
+    ENCODING = "utf-8"
+else:
+    ENCODING = sys.getfilesystemencoding()
+
+#---------------------------------------------------------
+# Some useful functions
+#---------------------------------------------------------
+
+def stn(s, length, encoding, errors):
+    """Convert a string to a null-terminated bytes object.
+    """
+    if s is None:
+        raise ValueError("metadata cannot contain None")
+    s = s.encode(encoding, errors)
+    return s[:length] + (length - len(s)) * NUL
+
+def nts(s, encoding, errors):
+    """Convert a null-terminated bytes object to a string.
+    """
+    p = s.find(b"\0")
+    if p != -1:
+        s = s[:p]
+    return s.decode(encoding, errors)
+
+def nti(s):
+    """Convert a number field to a python number.
+    """
+    # There are two possible encodings for a number field, see
+    # itn() below.
+    if s[0] in (0o200, 0o377):
+        n = 0
+        for i in range(len(s) - 1):
+            n <<= 8
+            n += s[i + 1]
+        if s[0] == 0o377:
+            n = -(256 ** (len(s) - 1) - n)
+    else:
+        try:
+            s = nts(s, "ascii", "strict")
+            n = int(s.strip() or "0", 8)
+        except ValueError:
+            raise InvalidHeaderError("invalid header")
+    return n
+
+def itn(n, digits=8, format=DEFAULT_FORMAT):
+    """Convert a python number to a number field.
+    """
+    # POSIX 1003.1-1988 requires numbers to be encoded as a string of
+    # octal digits followed by a null-byte, this allows values up to
+    # (8**(digits-1))-1. GNU tar allows storing numbers greater than
+    # that if necessary. A leading 0o200 or 0o377 byte indicate this
+    # particular encoding, the following digits-1 bytes are a big-endian
+    # base-256 representation. This allows values up to (256**(digits-1))-1.
+    # A 0o200 byte indicates a positive number, a 0o377 byte a negative
+    # number.
+    original_n = n
+    n = int(n)
+    if 0 <= n < 8 ** (digits - 1):
+        s = bytes("%0*o" % (digits - 1, n), "ascii") + NUL
+    elif format == GNU_FORMAT and -256 ** (digits - 1) <= n < 256 ** (digits - 1):
+        if n >= 0:
+            s = bytearray([0o200])
+        else:
+            s = bytearray([0o377])
+            n = 256 ** digits + n
+
+        for i in range(digits - 1):
+            s.insert(1, n & 0o377)
+            n >>= 8
+    else:
+        raise ValueError("overflow in number field")
+
+    return s
+
+def calc_chksums(buf):
+    """Calculate the checksum for a member's header by summing up all
+       characters except for the chksum field which is treated as if
+       it was filled with spaces. According to the GNU tar sources,
+       some tars (Sun and NeXT) calculate chksum with signed char,
+       which will be different if there are chars in the buffer with
+       the high bit set. So we calculate two checksums, unsigned and
+       signed.
+    """
+    unsigned_chksum = 256 + sum(struct.unpack_from("148B8x356B", buf))
+    signed_chksum = 256 + sum(struct.unpack_from("148b8x356b", buf))
+    return unsigned_chksum, signed_chksum
+
+def copyfileobj(src, dst, length=None, exception=OSError, bufsize=None):
+    """Copy length bytes from fileobj src to fileobj dst.
+       If length is None, copy the entire content.
+    """
+    bufsize = bufsize or 16 * 1024
+    if length == 0:
+        return
+    if length is None:
+        shutil.copyfileobj(src, dst, bufsize)
+        return
+
+    blocks, remainder = divmod(length, bufsize)
+    for b in range(blocks):
+        buf = src.read(bufsize)
+        if len(buf) < bufsize:
+            raise exception("unexpected end of data")
+        dst.write(buf)
+
+    if remainder != 0:
+        buf = src.read(remainder)
+        if len(buf) < remainder:
+            raise exception("unexpected end of data")
+        dst.write(buf)
+    return
+
+def _safe_print(s):
+    encoding = getattr(sys.stdout, 'encoding', None)
+    if encoding is not None:
+        s = s.encode(encoding, 'backslashreplace').decode(encoding)
+    print(s, end=' ')
+
+
+class TarError(Exception):
+    """Base exception."""
+    pass
+class ExtractError(TarError):
+    """General exception for extract errors."""
+    pass
+class ReadError(TarError):
+    """Exception for unreadable tar archives."""
+    pass
+class CompressionError(TarError):
+    """Exception for unavailable compression methods."""
+    pass
+class StreamError(TarError):
+    """Exception for unsupported operations on stream-like TarFiles."""
+    pass
+class HeaderError(TarError):
+    """Base exception for header errors."""
+    pass
+class EmptyHeaderError(HeaderError):
+    """Exception for empty headers."""
+    pass
+class TruncatedHeaderError(HeaderError):
+    """Exception for truncated headers."""
+    pass
+class EOFHeaderError(HeaderError):
+    """Exception for end of file headers."""
+    pass
+class InvalidHeaderError(HeaderError):
+    """Exception for invalid headers."""
+    pass
+class SubsequentHeaderError(HeaderError):
+    """Exception for missing and invalid extended headers."""
+    pass
+
+#---------------------------
+# internal stream interface
+#---------------------------
+class _LowLevelFile:
+    """Low-level file object. Supports reading and writing.
+       It is used instead of a regular file object for streaming
+       access.
+    """
+
+    def __init__(self, name, mode):
+        mode = {
+            "r": os.O_RDONLY,
+            "w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC,
+        }[mode]
+        if hasattr(os, "O_BINARY"):
+            mode |= os.O_BINARY
+        self.fd = os.open(name, mode, 0o666)
+
+    def close(self):
+        os.close(self.fd)
+
+    def read(self, size):
+        return os.read(self.fd, size)
+
+    def write(self, s):
+        os.write(self.fd, s)
+
+class _Stream:
+    """Class that serves as an adapter between TarFile and
+       a stream-like object.  The stream-like object only
+       needs to have a read() or write() method that works with bytes,
+       and the method is accessed blockwise.
+       Use of gzip or bzip2 compression is possible.
+       A stream-like object could be for example: sys.stdin.buffer,
+       sys.stdout.buffer, a socket, a tape device etc.
+
+       _Stream is intended to be used only internally.
+    """
+
+    def __init__(self, name, mode, comptype, fileobj, bufsize,
+                 compresslevel):
+        """Construct a _Stream object.
+        """
+        self._extfileobj = True
+        if fileobj is None:
+            fileobj = _LowLevelFile(name, mode)
+            self._extfileobj = False
+
+        if comptype == '*':
+            # Enable transparent compression detection for the
+            # stream interface
+            fileobj = _StreamProxy(fileobj)
+            comptype = fileobj.getcomptype()
+
+        self.name     = name or ""
+        self.mode     = mode
+        self.comptype = comptype
+        self.fileobj  = fileobj
+        self.bufsize  = bufsize
+        self.buf      = b""
+        self.pos      = 0
+        self.closed   = False
+
+        try:
+            if comptype == "gz":
+                try:
+                    import zlib
+                except ImportError:
+                    raise CompressionError("zlib module is not available") from None
+                self.zlib = zlib
+                self.crc = zlib.crc32(b"")
+                if mode == "r":
+                    self.exception = zlib.error
+                    self._init_read_gz()
+                else:
+                    self._init_write_gz(compresslevel)
+
+            elif comptype == "bz2":
+                try:
+                    import bz2
+                except ImportError:
+                    raise CompressionError("bz2 module is not available") from None
+                if mode == "r":
+                    self.dbuf = b""
+                    self.cmp = bz2.BZ2Decompressor()
+                    self.exception = OSError
+                else:
+                    self.cmp = bz2.BZ2Compressor(compresslevel)
+
+            elif comptype == "xz":
+                try:
+                    import lzma
+                except ImportError:
+                    raise CompressionError("lzma module is not available") from None
+                if mode == "r":
+                    self.dbuf = b""
+                    self.cmp = lzma.LZMADecompressor()
+                    self.exception = lzma.LZMAError
+                else:
+                    self.cmp = lzma.LZMACompressor()
+
+            elif comptype != "tar":
+                raise CompressionError("unknown compression type %r" % comptype)
+
+        except:
+            if not self._extfileobj:
+                self.fileobj.close()
+            self.closed = True
+            raise
+
+    def __del__(self):
+        if hasattr(self, "closed") and not self.closed:
+            self.close()
+
+    def _init_write_gz(self, compresslevel):
+        """Initialize for writing with gzip compression.
+        """
+        self.cmp = self.zlib.compressobj(compresslevel,
+                                         self.zlib.DEFLATED,
+                                         -self.zlib.MAX_WBITS,
+                                         self.zlib.DEF_MEM_LEVEL,
+                                         0)
+        timestamp = struct.pack(" self.bufsize:
+            self.fileobj.write(self.buf[:self.bufsize])
+            self.buf = self.buf[self.bufsize:]
+
+    def close(self):
+        """Close the _Stream object. No operation should be
+           done on it afterwards.
+        """
+        if self.closed:
+            return
+
+        self.closed = True
+        try:
+            if self.mode == "w" and self.comptype != "tar":
+                self.buf += self.cmp.flush()
+
+            if self.mode == "w" and self.buf:
+                self.fileobj.write(self.buf)
+                self.buf = b""
+                if self.comptype == "gz":
+                    self.fileobj.write(struct.pack("= 0:
+            blocks, remainder = divmod(pos - self.pos, self.bufsize)
+            for i in range(blocks):
+                self.read(self.bufsize)
+            self.read(remainder)
+        else:
+            raise StreamError("seeking backwards is not allowed")
+        return self.pos
+
+    def read(self, size):
+        """Return the next size number of bytes from the stream."""
+        assert size is not None
+        buf = self._read(size)
+        self.pos += len(buf)
+        return buf
+
+    def _read(self, size):
+        """Return size bytes from the stream.
+        """
+        if self.comptype == "tar":
+            return self.__read(size)
+
+        c = len(self.dbuf)
+        t = [self.dbuf]
+        while c < size:
+            # Skip underlying buffer to avoid unaligned double buffering.
+            if self.buf:
+                buf = self.buf
+                self.buf = b""
+            else:
+                buf = self.fileobj.read(self.bufsize)
+                if not buf:
+                    break
+            try:
+                buf = self.cmp.decompress(buf)
+            except self.exception as e:
+                raise ReadError("invalid compressed data") from e
+            t.append(buf)
+            c += len(buf)
+        t = b"".join(t)
+        self.dbuf = t[size:]
+        return t[:size]
+
+    def __read(self, size):
+        """Return size bytes from stream. If internal buffer is empty,
+           read another block from the stream.
+        """
+        c = len(self.buf)
+        t = [self.buf]
+        while c < size:
+            buf = self.fileobj.read(self.bufsize)
+            if not buf:
+                break
+            t.append(buf)
+            c += len(buf)
+        t = b"".join(t)
+        self.buf = t[size:]
+        return t[:size]
+# class _Stream
+
+class _StreamProxy(object):
+    """Small proxy class that enables transparent compression
+       detection for the Stream interface (mode 'r|*').
+    """
+
+    def __init__(self, fileobj):
+        self.fileobj = fileobj
+        self.buf = self.fileobj.read(BLOCKSIZE)
+
+    def read(self, size):
+        self.read = self.fileobj.read
+        return self.buf
+
+    def getcomptype(self):
+        if self.buf.startswith(b"\x1f\x8b\x08"):
+            return "gz"
+        elif self.buf[0:3] == b"BZh" and self.buf[4:10] == b"1AY&SY":
+            return "bz2"
+        elif self.buf.startswith((b"\x5d\x00\x00\x80", b"\xfd7zXZ")):
+            return "xz"
+        else:
+            return "tar"
+
+    def close(self):
+        self.fileobj.close()
+# class StreamProxy
+
+#------------------------
+# Extraction file object
+#------------------------
+class _FileInFile(object):
+    """A thin wrapper around an existing file object that
+       provides a part of its data as an individual file
+       object.
+    """
+
+    def __init__(self, fileobj, offset, size, name, blockinfo=None):
+        self.fileobj = fileobj
+        self.offset = offset
+        self.size = size
+        self.position = 0
+        self.name = name
+        self.closed = False
+
+        if blockinfo is None:
+            blockinfo = [(0, size)]
+
+        # Construct a map with data and zero blocks.
+        self.map_index = 0
+        self.map = []
+        lastpos = 0
+        realpos = self.offset
+        for offset, size in blockinfo:
+            if offset > lastpos:
+                self.map.append((False, lastpos, offset, None))
+            self.map.append((True, offset, offset + size, realpos))
+            realpos += size
+            lastpos = offset + size
+        if lastpos < self.size:
+            self.map.append((False, lastpos, self.size, None))
+
+    def flush(self):
+        pass
+
+    def readable(self):
+        return True
+
+    def writable(self):
+        return False
+
+    def seekable(self):
+        return self.fileobj.seekable()
+
+    def tell(self):
+        """Return the current file position.
+        """
+        return self.position
+
+    def seek(self, position, whence=io.SEEK_SET):
+        """Seek to a position in the file.
+        """
+        if whence == io.SEEK_SET:
+            self.position = min(max(position, 0), self.size)
+        elif whence == io.SEEK_CUR:
+            if position < 0:
+                self.position = max(self.position + position, 0)
+            else:
+                self.position = min(self.position + position, self.size)
+        elif whence == io.SEEK_END:
+            self.position = max(min(self.size + position, self.size), 0)
+        else:
+            raise ValueError("Invalid argument")
+        return self.position
+
+    def read(self, size=None):
+        """Read data from the file.
+        """
+        if size is None:
+            size = self.size - self.position
+        else:
+            size = min(size, self.size - self.position)
+
+        buf = b""
+        while size > 0:
+            while True:
+                data, start, stop, offset = self.map[self.map_index]
+                if start <= self.position < stop:
+                    break
+                else:
+                    self.map_index += 1
+                    if self.map_index == len(self.map):
+                        self.map_index = 0
+            length = min(size, stop - self.position)
+            if data:
+                self.fileobj.seek(offset + (self.position - start))
+                b = self.fileobj.read(length)
+                if len(b) != length:
+                    raise ReadError("unexpected end of data")
+                buf += b
+            else:
+                buf += NUL * length
+            size -= length
+            self.position += length
+        return buf
+
+    def readinto(self, b):
+        buf = self.read(len(b))
+        b[:len(buf)] = buf
+        return len(buf)
+
+    def close(self):
+        self.closed = True
+#class _FileInFile
+
+class ExFileObject(io.BufferedReader):
+
+    def __init__(self, tarfile, tarinfo):
+        fileobj = _FileInFile(tarfile.fileobj, tarinfo.offset_data,
+                tarinfo.size, tarinfo.name, tarinfo.sparse)
+        super().__init__(fileobj)
+#class ExFileObject
+
+
+#-----------------------------
+# extraction filters (PEP 706)
+#-----------------------------
+
+class FilterError(TarError):
+    pass
+
+class AbsolutePathError(FilterError):
+    def __init__(self, tarinfo):
+        self.tarinfo = tarinfo
+        super().__init__(f'member {tarinfo.name!r} has an absolute path')
+
+class OutsideDestinationError(FilterError):
+    def __init__(self, tarinfo, path):
+        self.tarinfo = tarinfo
+        self._path = path
+        super().__init__(f'{tarinfo.name!r} would be extracted to {path!r}, '
+                         + 'which is outside the destination')
+
+class SpecialFileError(FilterError):
+    def __init__(self, tarinfo):
+        self.tarinfo = tarinfo
+        super().__init__(f'{tarinfo.name!r} is a special file')
+
+class AbsoluteLinkError(FilterError):
+    def __init__(self, tarinfo):
+        self.tarinfo = tarinfo
+        super().__init__(f'{tarinfo.name!r} is a link to an absolute path')
+
+class LinkOutsideDestinationError(FilterError):
+    def __init__(self, tarinfo, path):
+        self.tarinfo = tarinfo
+        self._path = path
+        super().__init__(f'{tarinfo.name!r} would link to {path!r}, '
+                         + 'which is outside the destination')
+
+def _get_filtered_attrs(member, dest_path, for_data=True):
+    new_attrs = {}
+    name = member.name
+    dest_path = os.path.realpath(dest_path)
+    # Strip leading / (tar's directory separator) from filenames.
+    # Include os.sep (target OS directory separator) as well.
+    if name.startswith(('/', os.sep)):
+        name = new_attrs['name'] = member.path.lstrip('/' + os.sep)
+    if os.path.isabs(name):
+        # Path is absolute even after stripping.
+        # For example, 'C:/foo' on Windows.
+        raise AbsolutePathError(member)
+    # Ensure we stay in the destination
+    target_path = os.path.realpath(os.path.join(dest_path, name))
+    if os.path.commonpath([target_path, dest_path]) != dest_path:
+        raise OutsideDestinationError(member, target_path)
+    # Limit permissions (no high bits, and go-w)
+    mode = member.mode
+    if mode is not None:
+        # Strip high bits & group/other write bits
+        mode = mode & 0o755
+        if for_data:
+            # For data, handle permissions & file types
+            if member.isreg() or member.islnk():
+                if not mode & 0o100:
+                    # Clear executable bits if not executable by user
+                    mode &= ~0o111
+                # Ensure owner can read & write
+                mode |= 0o600
+            elif member.isdir() or member.issym():
+                # Ignore mode for directories & symlinks
+                mode = None
+            else:
+                # Reject special files
+                raise SpecialFileError(member)
+        if mode != member.mode:
+            new_attrs['mode'] = mode
+    if for_data:
+        # Ignore ownership for 'data'
+        if member.uid is not None:
+            new_attrs['uid'] = None
+        if member.gid is not None:
+            new_attrs['gid'] = None
+        if member.uname is not None:
+            new_attrs['uname'] = None
+        if member.gname is not None:
+            new_attrs['gname'] = None
+        # Check link destination for 'data'
+        if member.islnk() or member.issym():
+            if os.path.isabs(member.linkname):
+                raise AbsoluteLinkError(member)
+            if member.issym():
+                target_path = os.path.join(dest_path,
+                                           os.path.dirname(name),
+                                           member.linkname)
+            else:
+                target_path = os.path.join(dest_path,
+                                           member.linkname)
+            target_path = os.path.realpath(target_path)
+            if os.path.commonpath([target_path, dest_path]) != dest_path:
+                raise LinkOutsideDestinationError(member, target_path)
+    return new_attrs
+
+def fully_trusted_filter(member, dest_path):
+    return member
+
+def tar_filter(member, dest_path):
+    new_attrs = _get_filtered_attrs(member, dest_path, False)
+    if new_attrs:
+        return member.replace(**new_attrs, deep=False)
+    return member
+
+def data_filter(member, dest_path):
+    new_attrs = _get_filtered_attrs(member, dest_path, True)
+    if new_attrs:
+        return member.replace(**new_attrs, deep=False)
+    return member
+
+_NAMED_FILTERS = {
+    "fully_trusted": fully_trusted_filter,
+    "tar": tar_filter,
+    "data": data_filter,
+}
+
+#------------------
+# Exported Classes
+#------------------
+
+# Sentinel for replace() defaults, meaning "don't change the attribute"
+_KEEP = object()
+
+class TarInfo(object):
+    """Informational class which holds the details about an
+       archive member given by a tar header block.
+       TarInfo objects are returned by TarFile.getmember(),
+       TarFile.getmembers() and TarFile.gettarinfo() and are
+       usually created internally.
+    """
+
+    __slots__ = dict(
+        name = 'Name of the archive member.',
+        mode = 'Permission bits.',
+        uid = 'User ID of the user who originally stored this member.',
+        gid = 'Group ID of the user who originally stored this member.',
+        size = 'Size in bytes.',
+        mtime = 'Time of last modification.',
+        chksum = 'Header checksum.',
+        type = ('File type. type is usually one of these constants: '
+                'REGTYPE, AREGTYPE, LNKTYPE, SYMTYPE, DIRTYPE, FIFOTYPE, '
+                'CONTTYPE, CHRTYPE, BLKTYPE, GNUTYPE_SPARSE.'),
+        linkname = ('Name of the target file name, which is only present '
+                    'in TarInfo objects of type LNKTYPE and SYMTYPE.'),
+        uname = 'User name.',
+        gname = 'Group name.',
+        devmajor = 'Device major number.',
+        devminor = 'Device minor number.',
+        offset = 'The tar header starts here.',
+        offset_data = "The file's data starts here.",
+        pax_headers = ('A dictionary containing key-value pairs of an '
+                       'associated pax extended header.'),
+        sparse = 'Sparse member information.',
+        tarfile = None,
+        _sparse_structs = None,
+        _link_target = None,
+        )
+
+    def __init__(self, name=""):
+        """Construct a TarInfo object. name is the optional name
+           of the member.
+        """
+        self.name = name        # member name
+        self.mode = 0o644       # file permissions
+        self.uid = 0            # user id
+        self.gid = 0            # group id
+        self.size = 0           # file size
+        self.mtime = 0          # modification time
+        self.chksum = 0         # header checksum
+        self.type = REGTYPE     # member type
+        self.linkname = ""      # link name
+        self.uname = ""         # user name
+        self.gname = ""         # group name
+        self.devmajor = 0       # device major number
+        self.devminor = 0       # device minor number
+
+        self.offset = 0         # the tar header starts here
+        self.offset_data = 0    # the file's data starts here
+
+        self.sparse = None      # sparse member information
+        self.pax_headers = {}   # pax header information
+
+    @property
+    def path(self):
+        'In pax headers, "name" is called "path".'
+        return self.name
+
+    @path.setter
+    def path(self, name):
+        self.name = name
+
+    @property
+    def linkpath(self):
+        'In pax headers, "linkname" is called "linkpath".'
+        return self.linkname
+
+    @linkpath.setter
+    def linkpath(self, linkname):
+        self.linkname = linkname
+
+    def __repr__(self):
+        return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self))
+
+    def replace(self, *,
+                name=_KEEP, mtime=_KEEP, mode=_KEEP, linkname=_KEEP,
+                uid=_KEEP, gid=_KEEP, uname=_KEEP, gname=_KEEP,
+                deep=True, _KEEP=_KEEP):
+        """Return a deep copy of self with the given attributes replaced.
+        """
+        if deep:
+            result = copy.deepcopy(self)
+        else:
+            result = copy.copy(self)
+        if name is not _KEEP:
+            result.name = name
+        if mtime is not _KEEP:
+            result.mtime = mtime
+        if mode is not _KEEP:
+            result.mode = mode
+        if linkname is not _KEEP:
+            result.linkname = linkname
+        if uid is not _KEEP:
+            result.uid = uid
+        if gid is not _KEEP:
+            result.gid = gid
+        if uname is not _KEEP:
+            result.uname = uname
+        if gname is not _KEEP:
+            result.gname = gname
+        return result
+
+    def get_info(self):
+        """Return the TarInfo's attributes as a dictionary.
+        """
+        if self.mode is None:
+            mode = None
+        else:
+            mode = self.mode & 0o7777
+        info = {
+            "name":     self.name,
+            "mode":     mode,
+            "uid":      self.uid,
+            "gid":      self.gid,
+            "size":     self.size,
+            "mtime":    self.mtime,
+            "chksum":   self.chksum,
+            "type":     self.type,
+            "linkname": self.linkname,
+            "uname":    self.uname,
+            "gname":    self.gname,
+            "devmajor": self.devmajor,
+            "devminor": self.devminor
+        }
+
+        if info["type"] == DIRTYPE and not info["name"].endswith("/"):
+            info["name"] += "/"
+
+        return info
+
+    def tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="surrogateescape"):
+        """Return a tar header as a string of 512 byte blocks.
+        """
+        info = self.get_info()
+        for name, value in info.items():
+            if value is None:
+                raise ValueError("%s may not be None" % name)
+
+        if format == USTAR_FORMAT:
+            return self.create_ustar_header(info, encoding, errors)
+        elif format == GNU_FORMAT:
+            return self.create_gnu_header(info, encoding, errors)
+        elif format == PAX_FORMAT:
+            return self.create_pax_header(info, encoding)
+        else:
+            raise ValueError("invalid format")
+
+    def create_ustar_header(self, info, encoding, errors):
+        """Return the object as a ustar header block.
+        """
+        info["magic"] = POSIX_MAGIC
+
+        if len(info["linkname"].encode(encoding, errors)) > LENGTH_LINK:
+            raise ValueError("linkname is too long")
+
+        if len(info["name"].encode(encoding, errors)) > LENGTH_NAME:
+            info["prefix"], info["name"] = self._posix_split_name(info["name"], encoding, errors)
+
+        return self._create_header(info, USTAR_FORMAT, encoding, errors)
+
+    def create_gnu_header(self, info, encoding, errors):
+        """Return the object as a GNU header block sequence.
+        """
+        info["magic"] = GNU_MAGIC
+
+        buf = b""
+        if len(info["linkname"].encode(encoding, errors)) > LENGTH_LINK:
+            buf += self._create_gnu_long_header(info["linkname"], GNUTYPE_LONGLINK, encoding, errors)
+
+        if len(info["name"].encode(encoding, errors)) > LENGTH_NAME:
+            buf += self._create_gnu_long_header(info["name"], GNUTYPE_LONGNAME, encoding, errors)
+
+        return buf + self._create_header(info, GNU_FORMAT, encoding, errors)
+
+    def create_pax_header(self, info, encoding):
+        """Return the object as a ustar header block. If it cannot be
+           represented this way, prepend a pax extended header sequence
+           with supplement information.
+        """
+        info["magic"] = POSIX_MAGIC
+        pax_headers = self.pax_headers.copy()
+
+        # Test string fields for values that exceed the field length or cannot
+        # be represented in ASCII encoding.
+        for name, hname, length in (
+                ("name", "path", LENGTH_NAME), ("linkname", "linkpath", LENGTH_LINK),
+                ("uname", "uname", 32), ("gname", "gname", 32)):
+
+            if hname in pax_headers:
+                # The pax header has priority.
+                continue
+
+            # Try to encode the string as ASCII.
+            try:
+                info[name].encode("ascii", "strict")
+            except UnicodeEncodeError:
+                pax_headers[hname] = info[name]
+                continue
+
+            if len(info[name]) > length:
+                pax_headers[hname] = info[name]
+
+        # Test number fields for values that exceed the field limit or values
+        # that like to be stored as float.
+        for name, digits in (("uid", 8), ("gid", 8), ("size", 12), ("mtime", 12)):
+            needs_pax = False
+
+            val = info[name]
+            val_is_float = isinstance(val, float)
+            val_int = round(val) if val_is_float else val
+            if not 0 <= val_int < 8 ** (digits - 1):
+                # Avoid overflow.
+                info[name] = 0
+                needs_pax = True
+            elif val_is_float:
+                # Put rounded value in ustar header, and full
+                # precision value in pax header.
+                info[name] = val_int
+                needs_pax = True
+
+            # The existing pax header has priority.
+            if needs_pax and name not in pax_headers:
+                pax_headers[name] = str(val)
+
+        # Create a pax extended header if necessary.
+        if pax_headers:
+            buf = self._create_pax_generic_header(pax_headers, XHDTYPE, encoding)
+        else:
+            buf = b""
+
+        return buf + self._create_header(info, USTAR_FORMAT, "ascii", "replace")
+
+    @classmethod
+    def create_pax_global_header(cls, pax_headers):
+        """Return the object as a pax global header block sequence.
+        """
+        return cls._create_pax_generic_header(pax_headers, XGLTYPE, "utf-8")
+
+    def _posix_split_name(self, name, encoding, errors):
+        """Split a name longer than 100 chars into a prefix
+           and a name part.
+        """
+        components = name.split("/")
+        for i in range(1, len(components)):
+            prefix = "/".join(components[:i])
+            name = "/".join(components[i:])
+            if len(prefix.encode(encoding, errors)) <= LENGTH_PREFIX and \
+                    len(name.encode(encoding, errors)) <= LENGTH_NAME:
+                break
+        else:
+            raise ValueError("name is too long")
+
+        return prefix, name
+
+    @staticmethod
+    def _create_header(info, format, encoding, errors):
+        """Return a header block. info is a dictionary with file
+           information, format must be one of the *_FORMAT constants.
+        """
+        has_device_fields = info.get("type") in (CHRTYPE, BLKTYPE)
+        if has_device_fields:
+            devmajor = itn(info.get("devmajor", 0), 8, format)
+            devminor = itn(info.get("devminor", 0), 8, format)
+        else:
+            devmajor = stn("", 8, encoding, errors)
+            devminor = stn("", 8, encoding, errors)
+
+        # None values in metadata should cause ValueError.
+        # itn()/stn() do this for all fields except type.
+        filetype = info.get("type", REGTYPE)
+        if filetype is None:
+            raise ValueError("TarInfo.type must not be None")
+
+        parts = [
+            stn(info.get("name", ""), 100, encoding, errors),
+            itn(info.get("mode", 0) & 0o7777, 8, format),
+            itn(info.get("uid", 0), 8, format),
+            itn(info.get("gid", 0), 8, format),
+            itn(info.get("size", 0), 12, format),
+            itn(info.get("mtime", 0), 12, format),
+            b"        ", # checksum field
+            filetype,
+            stn(info.get("linkname", ""), 100, encoding, errors),
+            info.get("magic", POSIX_MAGIC),
+            stn(info.get("uname", ""), 32, encoding, errors),
+            stn(info.get("gname", ""), 32, encoding, errors),
+            devmajor,
+            devminor,
+            stn(info.get("prefix", ""), 155, encoding, errors)
+        ]
+
+        buf = struct.pack("%ds" % BLOCKSIZE, b"".join(parts))
+        chksum = calc_chksums(buf[-BLOCKSIZE:])[0]
+        buf = buf[:-364] + bytes("%06o\0" % chksum, "ascii") + buf[-357:]
+        return buf
+
+    @staticmethod
+    def _create_payload(payload):
+        """Return the string payload filled with zero bytes
+           up to the next 512 byte border.
+        """
+        blocks, remainder = divmod(len(payload), BLOCKSIZE)
+        if remainder > 0:
+            payload += (BLOCKSIZE - remainder) * NUL
+        return payload
+
+    @classmethod
+    def _create_gnu_long_header(cls, name, type, encoding, errors):
+        """Return a GNUTYPE_LONGNAME or GNUTYPE_LONGLINK sequence
+           for name.
+        """
+        name = name.encode(encoding, errors) + NUL
+
+        info = {}
+        info["name"] = "././@LongLink"
+        info["type"] = type
+        info["size"] = len(name)
+        info["magic"] = GNU_MAGIC
+
+        # create extended header + name blocks.
+        return cls._create_header(info, USTAR_FORMAT, encoding, errors) + \
+                cls._create_payload(name)
+
+    @classmethod
+    def _create_pax_generic_header(cls, pax_headers, type, encoding):
+        """Return a POSIX.1-2008 extended or global header sequence
+           that contains a list of keyword, value pairs. The values
+           must be strings.
+        """
+        # Check if one of the fields contains surrogate characters and thereby
+        # forces hdrcharset=BINARY, see _proc_pax() for more information.
+        binary = False
+        for keyword, value in pax_headers.items():
+            try:
+                value.encode("utf-8", "strict")
+            except UnicodeEncodeError:
+                binary = True
+                break
+
+        records = b""
+        if binary:
+            # Put the hdrcharset field at the beginning of the header.
+            records += b"21 hdrcharset=BINARY\n"
+
+        for keyword, value in pax_headers.items():
+            keyword = keyword.encode("utf-8")
+            if binary:
+                # Try to restore the original byte representation of `value'.
+                # Needless to say, that the encoding must match the string.
+                value = value.encode(encoding, "surrogateescape")
+            else:
+                value = value.encode("utf-8")
+
+            l = len(keyword) + len(value) + 3   # ' ' + '=' + '\n'
+            n = p = 0
+            while True:
+                n = l + len(str(p))
+                if n == p:
+                    break
+                p = n
+            records += bytes(str(p), "ascii") + b" " + keyword + b"=" + value + b"\n"
+
+        # We use a hardcoded "././@PaxHeader" name like star does
+        # instead of the one that POSIX recommends.
+        info = {}
+        info["name"] = "././@PaxHeader"
+        info["type"] = type
+        info["size"] = len(records)
+        info["magic"] = POSIX_MAGIC
+
+        # Create pax header + record blocks.
+        return cls._create_header(info, USTAR_FORMAT, "ascii", "replace") + \
+                cls._create_payload(records)
+
+    @classmethod
+    def frombuf(cls, buf, encoding, errors):
+        """Construct a TarInfo object from a 512 byte bytes object.
+        """
+        if len(buf) == 0:
+            raise EmptyHeaderError("empty header")
+        if len(buf) != BLOCKSIZE:
+            raise TruncatedHeaderError("truncated header")
+        if buf.count(NUL) == BLOCKSIZE:
+            raise EOFHeaderError("end of file header")
+
+        chksum = nti(buf[148:156])
+        if chksum not in calc_chksums(buf):
+            raise InvalidHeaderError("bad checksum")
+
+        obj = cls()
+        obj.name = nts(buf[0:100], encoding, errors)
+        obj.mode = nti(buf[100:108])
+        obj.uid = nti(buf[108:116])
+        obj.gid = nti(buf[116:124])
+        obj.size = nti(buf[124:136])
+        obj.mtime = nti(buf[136:148])
+        obj.chksum = chksum
+        obj.type = buf[156:157]
+        obj.linkname = nts(buf[157:257], encoding, errors)
+        obj.uname = nts(buf[265:297], encoding, errors)
+        obj.gname = nts(buf[297:329], encoding, errors)
+        obj.devmajor = nti(buf[329:337])
+        obj.devminor = nti(buf[337:345])
+        prefix = nts(buf[345:500], encoding, errors)
+
+        # Old V7 tar format represents a directory as a regular
+        # file with a trailing slash.
+        if obj.type == AREGTYPE and obj.name.endswith("/"):
+            obj.type = DIRTYPE
+
+        # The old GNU sparse format occupies some of the unused
+        # space in the buffer for up to 4 sparse structures.
+        # Save them for later processing in _proc_sparse().
+        if obj.type == GNUTYPE_SPARSE:
+            pos = 386
+            structs = []
+            for i in range(4):
+                try:
+                    offset = nti(buf[pos:pos + 12])
+                    numbytes = nti(buf[pos + 12:pos + 24])
+                except ValueError:
+                    break
+                structs.append((offset, numbytes))
+                pos += 24
+            isextended = bool(buf[482])
+            origsize = nti(buf[483:495])
+            obj._sparse_structs = (structs, isextended, origsize)
+
+        # Remove redundant slashes from directories.
+        if obj.isdir():
+            obj.name = obj.name.rstrip("/")
+
+        # Reconstruct a ustar longname.
+        if prefix and obj.type not in GNU_TYPES:
+            obj.name = prefix + "/" + obj.name
+        return obj
+
+    @classmethod
+    def fromtarfile(cls, tarfile):
+        """Return the next TarInfo object from TarFile object
+           tarfile.
+        """
+        buf = tarfile.fileobj.read(BLOCKSIZE)
+        obj = cls.frombuf(buf, tarfile.encoding, tarfile.errors)
+        obj.offset = tarfile.fileobj.tell() - BLOCKSIZE
+        return obj._proc_member(tarfile)
+
+    #--------------------------------------------------------------------------
+    # The following are methods that are called depending on the type of a
+    # member. The entry point is _proc_member() which can be overridden in a
+    # subclass to add custom _proc_*() methods. A _proc_*() method MUST
+    # implement the following
+    # operations:
+    # 1. Set self.offset_data to the position where the data blocks begin,
+    #    if there is data that follows.
+    # 2. Set tarfile.offset to the position where the next member's header will
+    #    begin.
+    # 3. Return self or another valid TarInfo object.
+    def _proc_member(self, tarfile):
+        """Choose the right processing method depending on
+           the type and call it.
+        """
+        if self.type in (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK):
+            return self._proc_gnulong(tarfile)
+        elif self.type == GNUTYPE_SPARSE:
+            return self._proc_sparse(tarfile)
+        elif self.type in (XHDTYPE, XGLTYPE, SOLARIS_XHDTYPE):
+            return self._proc_pax(tarfile)
+        else:
+            return self._proc_builtin(tarfile)
+
+    def _proc_builtin(self, tarfile):
+        """Process a builtin type or an unknown type which
+           will be treated as a regular file.
+        """
+        self.offset_data = tarfile.fileobj.tell()
+        offset = self.offset_data
+        if self.isreg() or self.type not in SUPPORTED_TYPES:
+            # Skip the following data blocks.
+            offset += self._block(self.size)
+        tarfile.offset = offset
+
+        # Patch the TarInfo object with saved global
+        # header information.
+        self._apply_pax_info(tarfile.pax_headers, tarfile.encoding, tarfile.errors)
+
+        # Remove redundant slashes from directories. This is to be consistent
+        # with frombuf().
+        if self.isdir():
+            self.name = self.name.rstrip("/")
+
+        return self
+
+    def _proc_gnulong(self, tarfile):
+        """Process the blocks that hold a GNU longname
+           or longlink member.
+        """
+        buf = tarfile.fileobj.read(self._block(self.size))
+
+        # Fetch the next header and process it.
+        try:
+            next = self.fromtarfile(tarfile)
+        except HeaderError as e:
+            raise SubsequentHeaderError(str(e)) from None
+
+        # Patch the TarInfo object from the next header with
+        # the longname information.
+        next.offset = self.offset
+        if self.type == GNUTYPE_LONGNAME:
+            next.name = nts(buf, tarfile.encoding, tarfile.errors)
+        elif self.type == GNUTYPE_LONGLINK:
+            next.linkname = nts(buf, tarfile.encoding, tarfile.errors)
+
+        # Remove redundant slashes from directories. This is to be consistent
+        # with frombuf().
+        if next.isdir():
+            next.name = removesuffix(next.name, "/")
+
+        return next
+
+    def _proc_sparse(self, tarfile):
+        """Process a GNU sparse header plus extra headers.
+        """
+        # We already collected some sparse structures in frombuf().
+        structs, isextended, origsize = self._sparse_structs
+        del self._sparse_structs
+
+        # Collect sparse structures from extended header blocks.
+        while isextended:
+            buf = tarfile.fileobj.read(BLOCKSIZE)
+            pos = 0
+            for i in range(21):
+                try:
+                    offset = nti(buf[pos:pos + 12])
+                    numbytes = nti(buf[pos + 12:pos + 24])
+                except ValueError:
+                    break
+                if offset and numbytes:
+                    structs.append((offset, numbytes))
+                pos += 24
+            isextended = bool(buf[504])
+        self.sparse = structs
+
+        self.offset_data = tarfile.fileobj.tell()
+        tarfile.offset = self.offset_data + self._block(self.size)
+        self.size = origsize
+        return self
+
+    def _proc_pax(self, tarfile):
+        """Process an extended or global header as described in
+           POSIX.1-2008.
+        """
+        # Read the header information.
+        buf = tarfile.fileobj.read(self._block(self.size))
+
+        # A pax header stores supplemental information for either
+        # the following file (extended) or all following files
+        # (global).
+        if self.type == XGLTYPE:
+            pax_headers = tarfile.pax_headers
+        else:
+            pax_headers = tarfile.pax_headers.copy()
+
+        # Check if the pax header contains a hdrcharset field. This tells us
+        # the encoding of the path, linkpath, uname and gname fields. Normally,
+        # these fields are UTF-8 encoded but since POSIX.1-2008 tar
+        # implementations are allowed to store them as raw binary strings if
+        # the translation to UTF-8 fails.
+        match = re.search(br"\d+ hdrcharset=([^\n]+)\n", buf)
+        if match is not None:
+            pax_headers["hdrcharset"] = match.group(1).decode("utf-8")
+
+        # For the time being, we don't care about anything other than "BINARY".
+        # The only other value that is currently allowed by the standard is
+        # "ISO-IR 10646 2000 UTF-8" in other words UTF-8.
+        hdrcharset = pax_headers.get("hdrcharset")
+        if hdrcharset == "BINARY":
+            encoding = tarfile.encoding
+        else:
+            encoding = "utf-8"
+
+        # Parse pax header information. A record looks like that:
+        # "%d %s=%s\n" % (length, keyword, value). length is the size
+        # of the complete record including the length field itself and
+        # the newline. keyword and value are both UTF-8 encoded strings.
+        regex = re.compile(br"(\d+) ([^=]+)=")
+        pos = 0
+        while match := regex.match(buf, pos):
+            length, keyword = match.groups()
+            length = int(length)
+            if length == 0:
+                raise InvalidHeaderError("invalid header")
+            value = buf[match.end(2) + 1:match.start(1) + length - 1]
+
+            # Normally, we could just use "utf-8" as the encoding and "strict"
+            # as the error handler, but we better not take the risk. For
+            # example, GNU tar <= 1.23 is known to store filenames it cannot
+            # translate to UTF-8 as raw strings (unfortunately without a
+            # hdrcharset=BINARY header).
+            # We first try the strict standard encoding, and if that fails we
+            # fall back on the user's encoding and error handler.
+            keyword = self._decode_pax_field(keyword, "utf-8", "utf-8",
+                    tarfile.errors)
+            if keyword in PAX_NAME_FIELDS:
+                value = self._decode_pax_field(value, encoding, tarfile.encoding,
+                        tarfile.errors)
+            else:
+                value = self._decode_pax_field(value, "utf-8", "utf-8",
+                        tarfile.errors)
+
+            pax_headers[keyword] = value
+            pos += length
+
+        # Fetch the next header.
+        try:
+            next = self.fromtarfile(tarfile)
+        except HeaderError as e:
+            raise SubsequentHeaderError(str(e)) from None
+
+        # Process GNU sparse information.
+        if "GNU.sparse.map" in pax_headers:
+            # GNU extended sparse format version 0.1.
+            self._proc_gnusparse_01(next, pax_headers)
+
+        elif "GNU.sparse.size" in pax_headers:
+            # GNU extended sparse format version 0.0.
+            self._proc_gnusparse_00(next, pax_headers, buf)
+
+        elif pax_headers.get("GNU.sparse.major") == "1" and pax_headers.get("GNU.sparse.minor") == "0":
+            # GNU extended sparse format version 1.0.
+            self._proc_gnusparse_10(next, pax_headers, tarfile)
+
+        if self.type in (XHDTYPE, SOLARIS_XHDTYPE):
+            # Patch the TarInfo object with the extended header info.
+            next._apply_pax_info(pax_headers, tarfile.encoding, tarfile.errors)
+            next.offset = self.offset
+
+            if "size" in pax_headers:
+                # If the extended header replaces the size field,
+                # we need to recalculate the offset where the next
+                # header starts.
+                offset = next.offset_data
+                if next.isreg() or next.type not in SUPPORTED_TYPES:
+                    offset += next._block(next.size)
+                tarfile.offset = offset
+
+        return next
+
+    def _proc_gnusparse_00(self, next, pax_headers, buf):
+        """Process a GNU tar extended sparse header, version 0.0.
+        """
+        offsets = []
+        for match in re.finditer(br"\d+ GNU.sparse.offset=(\d+)\n", buf):
+            offsets.append(int(match.group(1)))
+        numbytes = []
+        for match in re.finditer(br"\d+ GNU.sparse.numbytes=(\d+)\n", buf):
+            numbytes.append(int(match.group(1)))
+        next.sparse = list(zip(offsets, numbytes))
+
+    def _proc_gnusparse_01(self, next, pax_headers):
+        """Process a GNU tar extended sparse header, version 0.1.
+        """
+        sparse = [int(x) for x in pax_headers["GNU.sparse.map"].split(",")]
+        next.sparse = list(zip(sparse[::2], sparse[1::2]))
+
+    def _proc_gnusparse_10(self, next, pax_headers, tarfile):
+        """Process a GNU tar extended sparse header, version 1.0.
+        """
+        fields = None
+        sparse = []
+        buf = tarfile.fileobj.read(BLOCKSIZE)
+        fields, buf = buf.split(b"\n", 1)
+        fields = int(fields)
+        while len(sparse) < fields * 2:
+            if b"\n" not in buf:
+                buf += tarfile.fileobj.read(BLOCKSIZE)
+            number, buf = buf.split(b"\n", 1)
+            sparse.append(int(number))
+        next.offset_data = tarfile.fileobj.tell()
+        next.sparse = list(zip(sparse[::2], sparse[1::2]))
+
+    def _apply_pax_info(self, pax_headers, encoding, errors):
+        """Replace fields with supplemental information from a previous
+           pax extended or global header.
+        """
+        for keyword, value in pax_headers.items():
+            if keyword == "GNU.sparse.name":
+                setattr(self, "path", value)
+            elif keyword == "GNU.sparse.size":
+                setattr(self, "size", int(value))
+            elif keyword == "GNU.sparse.realsize":
+                setattr(self, "size", int(value))
+            elif keyword in PAX_FIELDS:
+                if keyword in PAX_NUMBER_FIELDS:
+                    try:
+                        value = PAX_NUMBER_FIELDS[keyword](value)
+                    except ValueError:
+                        value = 0
+                if keyword == "path":
+                    value = value.rstrip("/")
+                setattr(self, keyword, value)
+
+        self.pax_headers = pax_headers.copy()
+
+    def _decode_pax_field(self, value, encoding, fallback_encoding, fallback_errors):
+        """Decode a single field from a pax record.
+        """
+        try:
+            return value.decode(encoding, "strict")
+        except UnicodeDecodeError:
+            return value.decode(fallback_encoding, fallback_errors)
+
+    def _block(self, count):
+        """Round up a byte count by BLOCKSIZE and return it,
+           e.g. _block(834) => 1024.
+        """
+        blocks, remainder = divmod(count, BLOCKSIZE)
+        if remainder:
+            blocks += 1
+        return blocks * BLOCKSIZE
+
+    def isreg(self):
+        'Return True if the Tarinfo object is a regular file.'
+        return self.type in REGULAR_TYPES
+
+    def isfile(self):
+        'Return True if the Tarinfo object is a regular file.'
+        return self.isreg()
+
+    def isdir(self):
+        'Return True if it is a directory.'
+        return self.type == DIRTYPE
+
+    def issym(self):
+        'Return True if it is a symbolic link.'
+        return self.type == SYMTYPE
+
+    def islnk(self):
+        'Return True if it is a hard link.'
+        return self.type == LNKTYPE
+
+    def ischr(self):
+        'Return True if it is a character device.'
+        return self.type == CHRTYPE
+
+    def isblk(self):
+        'Return True if it is a block device.'
+        return self.type == BLKTYPE
+
+    def isfifo(self):
+        'Return True if it is a FIFO.'
+        return self.type == FIFOTYPE
+
+    def issparse(self):
+        return self.sparse is not None
+
+    def isdev(self):
+        'Return True if it is one of character device, block device or FIFO.'
+        return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE)
+# class TarInfo
+
+class TarFile(object):
+    """The TarFile Class provides an interface to tar archives.
+    """
+
+    debug = 0                   # May be set from 0 (no msgs) to 3 (all msgs)
+
+    dereference = False         # If true, add content of linked file to the
+                                # tar file, else the link.
+
+    ignore_zeros = False        # If true, skips empty or invalid blocks and
+                                # continues processing.
+
+    errorlevel = 1              # If 0, fatal errors only appear in debug
+                                # messages (if debug >= 0). If > 0, errors
+                                # are passed to the caller as exceptions.
+
+    format = DEFAULT_FORMAT     # The format to use when creating an archive.
+
+    encoding = ENCODING         # Encoding for 8-bit character strings.
+
+    errors = None               # Error handler for unicode conversion.
+
+    tarinfo = TarInfo           # The default TarInfo class to use.
+
+    fileobject = ExFileObject   # The file-object for extractfile().
+
+    extraction_filter = None    # The default filter for extraction.
+
+    def __init__(self, name=None, mode="r", fileobj=None, format=None,
+            tarinfo=None, dereference=None, ignore_zeros=None, encoding=None,
+            errors="surrogateescape", pax_headers=None, debug=None,
+            errorlevel=None, copybufsize=None):
+        """Open an (uncompressed) tar archive `name'. `mode' is either 'r' to
+           read from an existing archive, 'a' to append data to an existing
+           file or 'w' to create a new file overwriting an existing one. `mode'
+           defaults to 'r'.
+           If `fileobj' is given, it is used for reading or writing data. If it
+           can be determined, `mode' is overridden by `fileobj's mode.
+           `fileobj' is not closed, when TarFile is closed.
+        """
+        modes = {"r": "rb", "a": "r+b", "w": "wb", "x": "xb"}
+        if mode not in modes:
+            raise ValueError("mode must be 'r', 'a', 'w' or 'x'")
+        self.mode = mode
+        self._mode = modes[mode]
+
+        if not fileobj:
+            if self.mode == "a" and not os.path.exists(name):
+                # Create nonexistent files in append mode.
+                self.mode = "w"
+                self._mode = "wb"
+            fileobj = bltn_open(name, self._mode)
+            self._extfileobj = False
+        else:
+            if (name is None and hasattr(fileobj, "name") and
+                isinstance(fileobj.name, (str, bytes))):
+                name = fileobj.name
+            if hasattr(fileobj, "mode"):
+                self._mode = fileobj.mode
+            self._extfileobj = True
+        self.name = os.path.abspath(name) if name else None
+        self.fileobj = fileobj
+
+        # Init attributes.
+        if format is not None:
+            self.format = format
+        if tarinfo is not None:
+            self.tarinfo = tarinfo
+        if dereference is not None:
+            self.dereference = dereference
+        if ignore_zeros is not None:
+            self.ignore_zeros = ignore_zeros
+        if encoding is not None:
+            self.encoding = encoding
+        self.errors = errors
+
+        if pax_headers is not None and self.format == PAX_FORMAT:
+            self.pax_headers = pax_headers
+        else:
+            self.pax_headers = {}
+
+        if debug is not None:
+            self.debug = debug
+        if errorlevel is not None:
+            self.errorlevel = errorlevel
+
+        # Init datastructures.
+        self.copybufsize = copybufsize
+        self.closed = False
+        self.members = []       # list of members as TarInfo objects
+        self._loaded = False    # flag if all members have been read
+        self.offset = self.fileobj.tell()
+                                # current position in the archive file
+        self.inodes = {}        # dictionary caching the inodes of
+                                # archive members already added
+
+        try:
+            if self.mode == "r":
+                self.firstmember = None
+                self.firstmember = self.next()
+
+            if self.mode == "a":
+                # Move to the end of the archive,
+                # before the first empty block.
+                while True:
+                    self.fileobj.seek(self.offset)
+                    try:
+                        tarinfo = self.tarinfo.fromtarfile(self)
+                        self.members.append(tarinfo)
+                    except EOFHeaderError:
+                        self.fileobj.seek(self.offset)
+                        break
+                    except HeaderError as e:
+                        raise ReadError(str(e)) from None
+
+            if self.mode in ("a", "w", "x"):
+                self._loaded = True
+
+                if self.pax_headers:
+                    buf = self.tarinfo.create_pax_global_header(self.pax_headers.copy())
+                    self.fileobj.write(buf)
+                    self.offset += len(buf)
+        except:
+            if not self._extfileobj:
+                self.fileobj.close()
+            self.closed = True
+            raise
+
+    #--------------------------------------------------------------------------
+    # Below are the classmethods which act as alternate constructors to the
+    # TarFile class. The open() method is the only one that is needed for
+    # public use; it is the "super"-constructor and is able to select an
+    # adequate "sub"-constructor for a particular compression using the mapping
+    # from OPEN_METH.
+    #
+    # This concept allows one to subclass TarFile without losing the comfort of
+    # the super-constructor. A sub-constructor is registered and made available
+    # by adding it to the mapping in OPEN_METH.
+
+    @classmethod
+    def open(cls, name=None, mode="r", fileobj=None, bufsize=RECORDSIZE, **kwargs):
+        r"""Open a tar archive for reading, writing or appending. Return
+           an appropriate TarFile class.
+
+           mode:
+           'r' or 'r:\*' open for reading with transparent compression
+           'r:'         open for reading exclusively uncompressed
+           'r:gz'       open for reading with gzip compression
+           'r:bz2'      open for reading with bzip2 compression
+           'r:xz'       open for reading with lzma compression
+           'a' or 'a:'  open for appending, creating the file if necessary
+           'w' or 'w:'  open for writing without compression
+           'w:gz'       open for writing with gzip compression
+           'w:bz2'      open for writing with bzip2 compression
+           'w:xz'       open for writing with lzma compression
+
+           'x' or 'x:'  create a tarfile exclusively without compression, raise
+                        an exception if the file is already created
+           'x:gz'       create a gzip compressed tarfile, raise an exception
+                        if the file is already created
+           'x:bz2'      create a bzip2 compressed tarfile, raise an exception
+                        if the file is already created
+           'x:xz'       create an lzma compressed tarfile, raise an exception
+                        if the file is already created
+
+           'r|\*'        open a stream of tar blocks with transparent compression
+           'r|'         open an uncompressed stream of tar blocks for reading
+           'r|gz'       open a gzip compressed stream of tar blocks
+           'r|bz2'      open a bzip2 compressed stream of tar blocks
+           'r|xz'       open an lzma compressed stream of tar blocks
+           'w|'         open an uncompressed stream for writing
+           'w|gz'       open a gzip compressed stream for writing
+           'w|bz2'      open a bzip2 compressed stream for writing
+           'w|xz'       open an lzma compressed stream for writing
+        """
+
+        if not name and not fileobj:
+            raise ValueError("nothing to open")
+
+        if mode in ("r", "r:*"):
+            # Find out which *open() is appropriate for opening the file.
+            def not_compressed(comptype):
+                return cls.OPEN_METH[comptype] == 'taropen'
+            error_msgs = []
+            for comptype in sorted(cls.OPEN_METH, key=not_compressed):
+                func = getattr(cls, cls.OPEN_METH[comptype])
+                if fileobj is not None:
+                    saved_pos = fileobj.tell()
+                try:
+                    return func(name, "r", fileobj, **kwargs)
+                except (ReadError, CompressionError) as e:
+                    error_msgs.append(f'- method {comptype}: {e!r}')
+                    if fileobj is not None:
+                        fileobj.seek(saved_pos)
+                    continue
+            error_msgs_summary = '\n'.join(error_msgs)
+            raise ReadError(f"file could not be opened successfully:\n{error_msgs_summary}")
+
+        elif ":" in mode:
+            filemode, comptype = mode.split(":", 1)
+            filemode = filemode or "r"
+            comptype = comptype or "tar"
+
+            # Select the *open() function according to
+            # given compression.
+            if comptype in cls.OPEN_METH:
+                func = getattr(cls, cls.OPEN_METH[comptype])
+            else:
+                raise CompressionError("unknown compression type %r" % comptype)
+            return func(name, filemode, fileobj, **kwargs)
+
+        elif "|" in mode:
+            filemode, comptype = mode.split("|", 1)
+            filemode = filemode or "r"
+            comptype = comptype or "tar"
+
+            if filemode not in ("r", "w"):
+                raise ValueError("mode must be 'r' or 'w'")
+
+            compresslevel = kwargs.pop("compresslevel", 9)
+            stream = _Stream(name, filemode, comptype, fileobj, bufsize,
+                             compresslevel)
+            try:
+                t = cls(name, filemode, stream, **kwargs)
+            except:
+                stream.close()
+                raise
+            t._extfileobj = False
+            return t
+
+        elif mode in ("a", "w", "x"):
+            return cls.taropen(name, mode, fileobj, **kwargs)
+
+        raise ValueError("undiscernible mode")
+
+    @classmethod
+    def taropen(cls, name, mode="r", fileobj=None, **kwargs):
+        """Open uncompressed tar archive name for reading or writing.
+        """
+        if mode not in ("r", "a", "w", "x"):
+            raise ValueError("mode must be 'r', 'a', 'w' or 'x'")
+        return cls(name, mode, fileobj, **kwargs)
+
+    @classmethod
+    def gzopen(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
+        """Open gzip compressed tar archive name for reading or writing.
+           Appending is not allowed.
+        """
+        if mode not in ("r", "w", "x"):
+            raise ValueError("mode must be 'r', 'w' or 'x'")
+
+        try:
+            from gzip import GzipFile
+        except ImportError:
+            raise CompressionError("gzip module is not available") from None
+
+        try:
+            fileobj = GzipFile(name, mode + "b", compresslevel, fileobj)
+        except OSError as e:
+            if fileobj is not None and mode == 'r':
+                raise ReadError("not a gzip file") from e
+            raise
+
+        try:
+            t = cls.taropen(name, mode, fileobj, **kwargs)
+        except OSError as e:
+            fileobj.close()
+            if mode == 'r':
+                raise ReadError("not a gzip file") from e
+            raise
+        except:
+            fileobj.close()
+            raise
+        t._extfileobj = False
+        return t
+
+    @classmethod
+    def bz2open(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
+        """Open bzip2 compressed tar archive name for reading or writing.
+           Appending is not allowed.
+        """
+        if mode not in ("r", "w", "x"):
+            raise ValueError("mode must be 'r', 'w' or 'x'")
+
+        try:
+            from bz2 import BZ2File
+        except ImportError:
+            raise CompressionError("bz2 module is not available") from None
+
+        fileobj = BZ2File(fileobj or name, mode, compresslevel=compresslevel)
+
+        try:
+            t = cls.taropen(name, mode, fileobj, **kwargs)
+        except (OSError, EOFError) as e:
+            fileobj.close()
+            if mode == 'r':
+                raise ReadError("not a bzip2 file") from e
+            raise
+        except:
+            fileobj.close()
+            raise
+        t._extfileobj = False
+        return t
+
+    @classmethod
+    def xzopen(cls, name, mode="r", fileobj=None, preset=None, **kwargs):
+        """Open lzma compressed tar archive name for reading or writing.
+           Appending is not allowed.
+        """
+        if mode not in ("r", "w", "x"):
+            raise ValueError("mode must be 'r', 'w' or 'x'")
+
+        try:
+            from lzma import LZMAFile, LZMAError
+        except ImportError:
+            raise CompressionError("lzma module is not available") from None
+
+        fileobj = LZMAFile(fileobj or name, mode, preset=preset)
+
+        try:
+            t = cls.taropen(name, mode, fileobj, **kwargs)
+        except (LZMAError, EOFError) as e:
+            fileobj.close()
+            if mode == 'r':
+                raise ReadError("not an lzma file") from e
+            raise
+        except:
+            fileobj.close()
+            raise
+        t._extfileobj = False
+        return t
+
+    # All *open() methods are registered here.
+    OPEN_METH = {
+        "tar": "taropen",   # uncompressed tar
+        "gz":  "gzopen",    # gzip compressed tar
+        "bz2": "bz2open",   # bzip2 compressed tar
+        "xz":  "xzopen"     # lzma compressed tar
+    }
+
+    #--------------------------------------------------------------------------
+    # The public methods which TarFile provides:
+
+    def close(self):
+        """Close the TarFile. In write-mode, two finishing zero blocks are
+           appended to the archive.
+        """
+        if self.closed:
+            return
+
+        self.closed = True
+        try:
+            if self.mode in ("a", "w", "x"):
+                self.fileobj.write(NUL * (BLOCKSIZE * 2))
+                self.offset += (BLOCKSIZE * 2)
+                # fill up the end with zero-blocks
+                # (like option -b20 for tar does)
+                blocks, remainder = divmod(self.offset, RECORDSIZE)
+                if remainder > 0:
+                    self.fileobj.write(NUL * (RECORDSIZE - remainder))
+        finally:
+            if not self._extfileobj:
+                self.fileobj.close()
+
+    def getmember(self, name):
+        """Return a TarInfo object for member ``name``. If ``name`` can not be
+           found in the archive, KeyError is raised. If a member occurs more
+           than once in the archive, its last occurrence is assumed to be the
+           most up-to-date version.
+        """
+        tarinfo = self._getmember(name.rstrip('/'))
+        if tarinfo is None:
+            raise KeyError("filename %r not found" % name)
+        return tarinfo
+
+    def getmembers(self):
+        """Return the members of the archive as a list of TarInfo objects. The
+           list has the same order as the members in the archive.
+        """
+        self._check()
+        if not self._loaded:    # if we want to obtain a list of
+            self._load()        # all members, we first have to
+                                # scan the whole archive.
+        return self.members
+
+    def getnames(self):
+        """Return the members of the archive as a list of their names. It has
+           the same order as the list returned by getmembers().
+        """
+        return [tarinfo.name for tarinfo in self.getmembers()]
+
+    def gettarinfo(self, name=None, arcname=None, fileobj=None):
+        """Create a TarInfo object from the result of os.stat or equivalent
+           on an existing file. The file is either named by ``name``, or
+           specified as a file object ``fileobj`` with a file descriptor. If
+           given, ``arcname`` specifies an alternative name for the file in the
+           archive, otherwise, the name is taken from the 'name' attribute of
+           'fileobj', or the 'name' argument. The name should be a text
+           string.
+        """
+        self._check("awx")
+
+        # When fileobj is given, replace name by
+        # fileobj's real name.
+        if fileobj is not None:
+            name = fileobj.name
+
+        # Building the name of the member in the archive.
+        # Backward slashes are converted to forward slashes,
+        # Absolute paths are turned to relative paths.
+        if arcname is None:
+            arcname = name
+        drv, arcname = os.path.splitdrive(arcname)
+        arcname = arcname.replace(os.sep, "/")
+        arcname = arcname.lstrip("/")
+
+        # Now, fill the TarInfo object with
+        # information specific for the file.
+        tarinfo = self.tarinfo()
+        tarinfo.tarfile = self  # Not needed
+
+        # Use os.stat or os.lstat, depending on if symlinks shall be resolved.
+        if fileobj is None:
+            if not self.dereference:
+                statres = os.lstat(name)
+            else:
+                statres = os.stat(name)
+        else:
+            statres = os.fstat(fileobj.fileno())
+        linkname = ""
+
+        stmd = statres.st_mode
+        if stat.S_ISREG(stmd):
+            inode = (statres.st_ino, statres.st_dev)
+            if not self.dereference and statres.st_nlink > 1 and \
+                    inode in self.inodes and arcname != self.inodes[inode]:
+                # Is it a hardlink to an already
+                # archived file?
+                type = LNKTYPE
+                linkname = self.inodes[inode]
+            else:
+                # The inode is added only if its valid.
+                # For win32 it is always 0.
+                type = REGTYPE
+                if inode[0]:
+                    self.inodes[inode] = arcname
+        elif stat.S_ISDIR(stmd):
+            type = DIRTYPE
+        elif stat.S_ISFIFO(stmd):
+            type = FIFOTYPE
+        elif stat.S_ISLNK(stmd):
+            type = SYMTYPE
+            linkname = os.readlink(name)
+        elif stat.S_ISCHR(stmd):
+            type = CHRTYPE
+        elif stat.S_ISBLK(stmd):
+            type = BLKTYPE
+        else:
+            return None
+
+        # Fill the TarInfo object with all
+        # information we can get.
+        tarinfo.name = arcname
+        tarinfo.mode = stmd
+        tarinfo.uid = statres.st_uid
+        tarinfo.gid = statres.st_gid
+        if type == REGTYPE:
+            tarinfo.size = statres.st_size
+        else:
+            tarinfo.size = 0
+        tarinfo.mtime = statres.st_mtime
+        tarinfo.type = type
+        tarinfo.linkname = linkname
+        if pwd:
+            try:
+                tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0]
+            except KeyError:
+                pass
+        if grp:
+            try:
+                tarinfo.gname = grp.getgrgid(tarinfo.gid)[0]
+            except KeyError:
+                pass
+
+        if type in (CHRTYPE, BLKTYPE):
+            if hasattr(os, "major") and hasattr(os, "minor"):
+                tarinfo.devmajor = os.major(statres.st_rdev)
+                tarinfo.devminor = os.minor(statres.st_rdev)
+        return tarinfo
+
+    def list(self, verbose=True, *, members=None):
+        """Print a table of contents to sys.stdout. If ``verbose`` is False, only
+           the names of the members are printed. If it is True, an `ls -l'-like
+           output is produced. ``members`` is optional and must be a subset of the
+           list returned by getmembers().
+        """
+        self._check()
+
+        if members is None:
+            members = self
+        for tarinfo in members:
+            if verbose:
+                if tarinfo.mode is None:
+                    _safe_print("??????????")
+                else:
+                    _safe_print(stat.filemode(tarinfo.mode))
+                _safe_print("%s/%s" % (tarinfo.uname or tarinfo.uid,
+                                       tarinfo.gname or tarinfo.gid))
+                if tarinfo.ischr() or tarinfo.isblk():
+                    _safe_print("%10s" %
+                            ("%d,%d" % (tarinfo.devmajor, tarinfo.devminor)))
+                else:
+                    _safe_print("%10d" % tarinfo.size)
+                if tarinfo.mtime is None:
+                    _safe_print("????-??-?? ??:??:??")
+                else:
+                    _safe_print("%d-%02d-%02d %02d:%02d:%02d" \
+                                % time.localtime(tarinfo.mtime)[:6])
+
+            _safe_print(tarinfo.name + ("/" if tarinfo.isdir() else ""))
+
+            if verbose:
+                if tarinfo.issym():
+                    _safe_print("-> " + tarinfo.linkname)
+                if tarinfo.islnk():
+                    _safe_print("link to " + tarinfo.linkname)
+            print()
+
+    def add(self, name, arcname=None, recursive=True, *, filter=None):
+        """Add the file ``name`` to the archive. ``name`` may be any type of file
+           (directory, fifo, symbolic link, etc.). If given, ``arcname``
+           specifies an alternative name for the file in the archive.
+           Directories are added recursively by default. This can be avoided by
+           setting ``recursive`` to False. ``filter`` is a function
+           that expects a TarInfo object argument and returns the changed
+           TarInfo object, if it returns None the TarInfo object will be
+           excluded from the archive.
+        """
+        self._check("awx")
+
+        if arcname is None:
+            arcname = name
+
+        # Skip if somebody tries to archive the archive...
+        if self.name is not None and os.path.abspath(name) == self.name:
+            self._dbg(2, "tarfile: Skipped %r" % name)
+            return
+
+        self._dbg(1, name)
+
+        # Create a TarInfo object from the file.
+        tarinfo = self.gettarinfo(name, arcname)
+
+        if tarinfo is None:
+            self._dbg(1, "tarfile: Unsupported type %r" % name)
+            return
+
+        # Change or exclude the TarInfo object.
+        if filter is not None:
+            tarinfo = filter(tarinfo)
+            if tarinfo is None:
+                self._dbg(2, "tarfile: Excluded %r" % name)
+                return
+
+        # Append the tar header and data to the archive.
+        if tarinfo.isreg():
+            with bltn_open(name, "rb") as f:
+                self.addfile(tarinfo, f)
+
+        elif tarinfo.isdir():
+            self.addfile(tarinfo)
+            if recursive:
+                for f in sorted(os.listdir(name)):
+                    self.add(os.path.join(name, f), os.path.join(arcname, f),
+                            recursive, filter=filter)
+
+        else:
+            self.addfile(tarinfo)
+
+    def addfile(self, tarinfo, fileobj=None):
+        """Add the TarInfo object ``tarinfo`` to the archive. If ``fileobj`` is
+           given, it should be a binary file, and tarinfo.size bytes are read
+           from it and added to the archive. You can create TarInfo objects
+           directly, or by using gettarinfo().
+        """
+        self._check("awx")
+
+        tarinfo = copy.copy(tarinfo)
+
+        buf = tarinfo.tobuf(self.format, self.encoding, self.errors)
+        self.fileobj.write(buf)
+        self.offset += len(buf)
+        bufsize=self.copybufsize
+        # If there's data to follow, append it.
+        if fileobj is not None:
+            copyfileobj(fileobj, self.fileobj, tarinfo.size, bufsize=bufsize)
+            blocks, remainder = divmod(tarinfo.size, BLOCKSIZE)
+            if remainder > 0:
+                self.fileobj.write(NUL * (BLOCKSIZE - remainder))
+                blocks += 1
+            self.offset += blocks * BLOCKSIZE
+
+        self.members.append(tarinfo)
+
+    def _get_filter_function(self, filter):
+        if filter is None:
+            filter = self.extraction_filter
+            if filter is None:
+                warnings.warn(
+                    'Python 3.14 will, by default, filter extracted tar '
+                    + 'archives and reject files or modify their metadata. '
+                    + 'Use the filter argument to control this behavior.',
+                    DeprecationWarning)
+                return fully_trusted_filter
+            if isinstance(filter, str):
+                raise TypeError(
+                    'String names are not supported for '
+                    + 'TarFile.extraction_filter. Use a function such as '
+                    + 'tarfile.data_filter directly.')
+            return filter
+        if callable(filter):
+            return filter
+        try:
+            return _NAMED_FILTERS[filter]
+        except KeyError:
+            raise ValueError(f"filter {filter!r} not found") from None
+
+    def extractall(self, path=".", members=None, *, numeric_owner=False,
+                   filter=None):
+        """Extract all members from the archive to the current working
+           directory and set owner, modification time and permissions on
+           directories afterwards. `path' specifies a different directory
+           to extract to. `members' is optional and must be a subset of the
+           list returned by getmembers(). If `numeric_owner` is True, only
+           the numbers for user/group names are used and not the names.
+
+           The `filter` function will be called on each member just
+           before extraction.
+           It can return a changed TarInfo or None to skip the member.
+           String names of common filters are accepted.
+        """
+        directories = []
+
+        filter_function = self._get_filter_function(filter)
+        if members is None:
+            members = self
+
+        for member in members:
+            tarinfo = self._get_extract_tarinfo(member, filter_function, path)
+            if tarinfo is None:
+                continue
+            if tarinfo.isdir():
+                # For directories, delay setting attributes until later,
+                # since permissions can interfere with extraction and
+                # extracting contents can reset mtime.
+                directories.append(tarinfo)
+            self._extract_one(tarinfo, path, set_attrs=not tarinfo.isdir(),
+                              numeric_owner=numeric_owner)
+
+        # Reverse sort directories.
+        directories.sort(key=lambda a: a.name, reverse=True)
+
+        # Set correct owner, mtime and filemode on directories.
+        for tarinfo in directories:
+            dirpath = os.path.join(path, tarinfo.name)
+            try:
+                self.chown(tarinfo, dirpath, numeric_owner=numeric_owner)
+                self.utime(tarinfo, dirpath)
+                self.chmod(tarinfo, dirpath)
+            except ExtractError as e:
+                self._handle_nonfatal_error(e)
+
+    def extract(self, member, path="", set_attrs=True, *, numeric_owner=False,
+                filter=None):
+        """Extract a member from the archive to the current working directory,
+           using its full name. Its file information is extracted as accurately
+           as possible. `member' may be a filename or a TarInfo object. You can
+           specify a different directory using `path'. File attributes (owner,
+           mtime, mode) are set unless `set_attrs' is False. If `numeric_owner`
+           is True, only the numbers for user/group names are used and not
+           the names.
+
+           The `filter` function will be called before extraction.
+           It can return a changed TarInfo or None to skip the member.
+           String names of common filters are accepted.
+        """
+        filter_function = self._get_filter_function(filter)
+        tarinfo = self._get_extract_tarinfo(member, filter_function, path)
+        if tarinfo is not None:
+            self._extract_one(tarinfo, path, set_attrs, numeric_owner)
+
+    def _get_extract_tarinfo(self, member, filter_function, path):
+        """Get filtered TarInfo (or None) from member, which might be a str"""
+        if isinstance(member, str):
+            tarinfo = self.getmember(member)
+        else:
+            tarinfo = member
+
+        unfiltered = tarinfo
+        try:
+            tarinfo = filter_function(tarinfo, path)
+        except (OSError, FilterError) as e:
+            self._handle_fatal_error(e)
+        except ExtractError as e:
+            self._handle_nonfatal_error(e)
+        if tarinfo is None:
+            self._dbg(2, "tarfile: Excluded %r" % unfiltered.name)
+            return None
+        # Prepare the link target for makelink().
+        if tarinfo.islnk():
+            tarinfo = copy.copy(tarinfo)
+            tarinfo._link_target = os.path.join(path, tarinfo.linkname)
+        return tarinfo
+
+    def _extract_one(self, tarinfo, path, set_attrs, numeric_owner):
+        """Extract from filtered tarinfo to disk"""
+        self._check("r")
+
+        try:
+            self._extract_member(tarinfo, os.path.join(path, tarinfo.name),
+                                 set_attrs=set_attrs,
+                                 numeric_owner=numeric_owner)
+        except OSError as e:
+            self._handle_fatal_error(e)
+        except ExtractError as e:
+            self._handle_nonfatal_error(e)
+
+    def _handle_nonfatal_error(self, e):
+        """Handle non-fatal error (ExtractError) according to errorlevel"""
+        if self.errorlevel > 1:
+            raise
+        else:
+            self._dbg(1, "tarfile: %s" % e)
+
+    def _handle_fatal_error(self, e):
+        """Handle "fatal" error according to self.errorlevel"""
+        if self.errorlevel > 0:
+            raise
+        elif isinstance(e, OSError):
+            if e.filename is None:
+                self._dbg(1, "tarfile: %s" % e.strerror)
+            else:
+                self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename))
+        else:
+            self._dbg(1, "tarfile: %s %s" % (type(e).__name__, e))
+
+    def extractfile(self, member):
+        """Extract a member from the archive as a file object. ``member`` may be
+           a filename or a TarInfo object. If ``member`` is a regular file or
+           a link, an io.BufferedReader object is returned. For all other
+           existing members, None is returned. If ``member`` does not appear
+           in the archive, KeyError is raised.
+        """
+        self._check("r")
+
+        if isinstance(member, str):
+            tarinfo = self.getmember(member)
+        else:
+            tarinfo = member
+
+        if tarinfo.isreg() or tarinfo.type not in SUPPORTED_TYPES:
+            # Members with unknown types are treated as regular files.
+            return self.fileobject(self, tarinfo)
+
+        elif tarinfo.islnk() or tarinfo.issym():
+            if isinstance(self.fileobj, _Stream):
+                # A small but ugly workaround for the case that someone tries
+                # to extract a (sym)link as a file-object from a non-seekable
+                # stream of tar blocks.
+                raise StreamError("cannot extract (sym)link as file object")
+            else:
+                # A (sym)link's file object is its target's file object.
+                return self.extractfile(self._find_link_target(tarinfo))
+        else:
+            # If there's no data associated with the member (directory, chrdev,
+            # blkdev, etc.), return None instead of a file object.
+            return None
+
+    def _extract_member(self, tarinfo, targetpath, set_attrs=True,
+                        numeric_owner=False):
+        """Extract the TarInfo object tarinfo to a physical
+           file called targetpath.
+        """
+        # Fetch the TarInfo object for the given name
+        # and build the destination pathname, replacing
+        # forward slashes to platform specific separators.
+        targetpath = targetpath.rstrip("/")
+        targetpath = targetpath.replace("/", os.sep)
+
+        # Create all upper directories.
+        upperdirs = os.path.dirname(targetpath)
+        if upperdirs and not os.path.exists(upperdirs):
+            # Create directories that are not part of the archive with
+            # default permissions.
+            os.makedirs(upperdirs)
+
+        if tarinfo.islnk() or tarinfo.issym():
+            self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname))
+        else:
+            self._dbg(1, tarinfo.name)
+
+        if tarinfo.isreg():
+            self.makefile(tarinfo, targetpath)
+        elif tarinfo.isdir():
+            self.makedir(tarinfo, targetpath)
+        elif tarinfo.isfifo():
+            self.makefifo(tarinfo, targetpath)
+        elif tarinfo.ischr() or tarinfo.isblk():
+            self.makedev(tarinfo, targetpath)
+        elif tarinfo.islnk() or tarinfo.issym():
+            self.makelink(tarinfo, targetpath)
+        elif tarinfo.type not in SUPPORTED_TYPES:
+            self.makeunknown(tarinfo, targetpath)
+        else:
+            self.makefile(tarinfo, targetpath)
+
+        if set_attrs:
+            self.chown(tarinfo, targetpath, numeric_owner)
+            if not tarinfo.issym():
+                self.chmod(tarinfo, targetpath)
+                self.utime(tarinfo, targetpath)
+
+    #--------------------------------------------------------------------------
+    # Below are the different file methods. They are called via
+    # _extract_member() when extract() is called. They can be replaced in a
+    # subclass to implement other functionality.
+
+    def makedir(self, tarinfo, targetpath):
+        """Make a directory called targetpath.
+        """
+        try:
+            if tarinfo.mode is None:
+                # Use the system's default mode
+                os.mkdir(targetpath)
+            else:
+                # Use a safe mode for the directory, the real mode is set
+                # later in _extract_member().
+                os.mkdir(targetpath, 0o700)
+        except FileExistsError:
+            if not os.path.isdir(targetpath):
+                raise
+
+    def makefile(self, tarinfo, targetpath):
+        """Make a file called targetpath.
+        """
+        source = self.fileobj
+        source.seek(tarinfo.offset_data)
+        bufsize = self.copybufsize
+        with bltn_open(targetpath, "wb") as target:
+            if tarinfo.sparse is not None:
+                for offset, size in tarinfo.sparse:
+                    target.seek(offset)
+                    copyfileobj(source, target, size, ReadError, bufsize)
+                target.seek(tarinfo.size)
+                target.truncate()
+            else:
+                copyfileobj(source, target, tarinfo.size, ReadError, bufsize)
+
+    def makeunknown(self, tarinfo, targetpath):
+        """Make a file from a TarInfo object with an unknown type
+           at targetpath.
+        """
+        self.makefile(tarinfo, targetpath)
+        self._dbg(1, "tarfile: Unknown file type %r, " \
+                     "extracted as regular file." % tarinfo.type)
+
+    def makefifo(self, tarinfo, targetpath):
+        """Make a fifo called targetpath.
+        """
+        if hasattr(os, "mkfifo"):
+            os.mkfifo(targetpath)
+        else:
+            raise ExtractError("fifo not supported by system")
+
+    def makedev(self, tarinfo, targetpath):
+        """Make a character or block device called targetpath.
+        """
+        if not hasattr(os, "mknod") or not hasattr(os, "makedev"):
+            raise ExtractError("special devices not supported by system")
+
+        mode = tarinfo.mode
+        if mode is None:
+            # Use mknod's default
+            mode = 0o600
+        if tarinfo.isblk():
+            mode |= stat.S_IFBLK
+        else:
+            mode |= stat.S_IFCHR
+
+        os.mknod(targetpath, mode,
+                 os.makedev(tarinfo.devmajor, tarinfo.devminor))
+
+    def makelink(self, tarinfo, targetpath):
+        """Make a (symbolic) link called targetpath. If it cannot be created
+          (platform limitation), we try to make a copy of the referenced file
+          instead of a link.
+        """
+        try:
+            # For systems that support symbolic and hard links.
+            if tarinfo.issym():
+                if os.path.lexists(targetpath):
+                    # Avoid FileExistsError on following os.symlink.
+                    os.unlink(targetpath)
+                os.symlink(tarinfo.linkname, targetpath)
+            else:
+                if os.path.exists(tarinfo._link_target):
+                    os.link(tarinfo._link_target, targetpath)
+                else:
+                    self._extract_member(self._find_link_target(tarinfo),
+                                         targetpath)
+        except symlink_exception:
+            try:
+                self._extract_member(self._find_link_target(tarinfo),
+                                     targetpath)
+            except KeyError:
+                raise ExtractError("unable to resolve link inside archive") from None
+
+    def chown(self, tarinfo, targetpath, numeric_owner):
+        """Set owner of targetpath according to tarinfo. If numeric_owner
+           is True, use .gid/.uid instead of .gname/.uname. If numeric_owner
+           is False, fall back to .gid/.uid when the search based on name
+           fails.
+        """
+        if hasattr(os, "geteuid") and os.geteuid() == 0:
+            # We have to be root to do so.
+            g = tarinfo.gid
+            u = tarinfo.uid
+            if not numeric_owner:
+                try:
+                    if grp and tarinfo.gname:
+                        g = grp.getgrnam(tarinfo.gname)[2]
+                except KeyError:
+                    pass
+                try:
+                    if pwd and tarinfo.uname:
+                        u = pwd.getpwnam(tarinfo.uname)[2]
+                except KeyError:
+                    pass
+            if g is None:
+                g = -1
+            if u is None:
+                u = -1
+            try:
+                if tarinfo.issym() and hasattr(os, "lchown"):
+                    os.lchown(targetpath, u, g)
+                else:
+                    os.chown(targetpath, u, g)
+            except OSError as e:
+                raise ExtractError("could not change owner") from e
+
+    def chmod(self, tarinfo, targetpath):
+        """Set file permissions of targetpath according to tarinfo.
+        """
+        if tarinfo.mode is None:
+            return
+        try:
+            os.chmod(targetpath, tarinfo.mode)
+        except OSError as e:
+            raise ExtractError("could not change mode") from e
+
+    def utime(self, tarinfo, targetpath):
+        """Set modification time of targetpath according to tarinfo.
+        """
+        mtime = tarinfo.mtime
+        if mtime is None:
+            return
+        if not hasattr(os, 'utime'):
+            return
+        try:
+            os.utime(targetpath, (mtime, mtime))
+        except OSError as e:
+            raise ExtractError("could not change modification time") from e
+
+    #--------------------------------------------------------------------------
+    def next(self):
+        """Return the next member of the archive as a TarInfo object, when
+           TarFile is opened for reading. Return None if there is no more
+           available.
+        """
+        self._check("ra")
+        if self.firstmember is not None:
+            m = self.firstmember
+            self.firstmember = None
+            return m
+
+        # Advance the file pointer.
+        if self.offset != self.fileobj.tell():
+            if self.offset == 0:
+                return None
+            self.fileobj.seek(self.offset - 1)
+            if not self.fileobj.read(1):
+                raise ReadError("unexpected end of data")
+
+        # Read the next block.
+        tarinfo = None
+        while True:
+            try:
+                tarinfo = self.tarinfo.fromtarfile(self)
+            except EOFHeaderError as e:
+                if self.ignore_zeros:
+                    self._dbg(2, "0x%X: %s" % (self.offset, e))
+                    self.offset += BLOCKSIZE
+                    continue
+            except InvalidHeaderError as e:
+                if self.ignore_zeros:
+                    self._dbg(2, "0x%X: %s" % (self.offset, e))
+                    self.offset += BLOCKSIZE
+                    continue
+                elif self.offset == 0:
+                    raise ReadError(str(e)) from None
+            except EmptyHeaderError:
+                if self.offset == 0:
+                    raise ReadError("empty file") from None
+            except TruncatedHeaderError as e:
+                if self.offset == 0:
+                    raise ReadError(str(e)) from None
+            except SubsequentHeaderError as e:
+                raise ReadError(str(e)) from None
+            except Exception as e:
+                try:
+                    import zlib
+                    if isinstance(e, zlib.error):
+                        raise ReadError(f'zlib error: {e}') from None
+                    else:
+                        raise e
+                except ImportError:
+                    raise e
+            break
+
+        if tarinfo is not None:
+            self.members.append(tarinfo)
+        else:
+            self._loaded = True
+
+        return tarinfo
+
+    #--------------------------------------------------------------------------
+    # Little helper methods:
+
+    def _getmember(self, name, tarinfo=None, normalize=False):
+        """Find an archive member by name from bottom to top.
+           If tarinfo is given, it is used as the starting point.
+        """
+        # Ensure that all members have been loaded.
+        members = self.getmembers()
+
+        # Limit the member search list up to tarinfo.
+        skipping = False
+        if tarinfo is not None:
+            try:
+                index = members.index(tarinfo)
+            except ValueError:
+                # The given starting point might be a (modified) copy.
+                # We'll later skip members until we find an equivalent.
+                skipping = True
+            else:
+                # Happy fast path
+                members = members[:index]
+
+        if normalize:
+            name = os.path.normpath(name)
+
+        for member in reversed(members):
+            if skipping:
+                if tarinfo.offset == member.offset:
+                    skipping = False
+                continue
+            if normalize:
+                member_name = os.path.normpath(member.name)
+            else:
+                member_name = member.name
+
+            if name == member_name:
+                return member
+
+        if skipping:
+            # Starting point was not found
+            raise ValueError(tarinfo)
+
+    def _load(self):
+        """Read through the entire archive file and look for readable
+           members.
+        """
+        while self.next() is not None:
+            pass
+        self._loaded = True
+
+    def _check(self, mode=None):
+        """Check if TarFile is still open, and if the operation's mode
+           corresponds to TarFile's mode.
+        """
+        if self.closed:
+            raise OSError("%s is closed" % self.__class__.__name__)
+        if mode is not None and self.mode not in mode:
+            raise OSError("bad operation for mode %r" % self.mode)
+
+    def _find_link_target(self, tarinfo):
+        """Find the target member of a symlink or hardlink member in the
+           archive.
+        """
+        if tarinfo.issym():
+            # Always search the entire archive.
+            linkname = "/".join(filter(None, (os.path.dirname(tarinfo.name), tarinfo.linkname)))
+            limit = None
+        else:
+            # Search the archive before the link, because a hard link is
+            # just a reference to an already archived file.
+            linkname = tarinfo.linkname
+            limit = tarinfo
+
+        member = self._getmember(linkname, tarinfo=limit, normalize=True)
+        if member is None:
+            raise KeyError("linkname %r not found" % linkname)
+        return member
+
+    def __iter__(self):
+        """Provide an iterator object.
+        """
+        if self._loaded:
+            yield from self.members
+            return
+
+        # Yield items using TarFile's next() method.
+        # When all members have been read, set TarFile as _loaded.
+        index = 0
+        # Fix for SF #1100429: Under rare circumstances it can
+        # happen that getmembers() is called during iteration,
+        # which will have already exhausted the next() method.
+        if self.firstmember is not None:
+            tarinfo = self.next()
+            index += 1
+            yield tarinfo
+
+        while True:
+            if index < len(self.members):
+                tarinfo = self.members[index]
+            elif not self._loaded:
+                tarinfo = self.next()
+                if not tarinfo:
+                    self._loaded = True
+                    return
+            else:
+                return
+            index += 1
+            yield tarinfo
+
+    def _dbg(self, level, msg):
+        """Write debugging output to sys.stderr.
+        """
+        if level <= self.debug:
+            print(msg, file=sys.stderr)
+
+    def __enter__(self):
+        self._check()
+        return self
+
+    def __exit__(self, type, value, traceback):
+        if type is None:
+            self.close()
+        else:
+            # An exception occurred. We must not call close() because
+            # it would try to write end-of-archive blocks and padding.
+            if not self._extfileobj:
+                self.fileobj.close()
+            self.closed = True
+
+#--------------------
+# exported functions
+#--------------------
+
+def is_tarfile(name):
+    """Return True if name points to a tar archive that we
+       are able to handle, else return False.
+
+       'name' should be a string, file, or file-like object.
+    """
+    try:
+        if hasattr(name, "read"):
+            pos = name.tell()
+            t = open(fileobj=name)
+            name.seek(pos)
+        else:
+            t = open(name)
+        t.close()
+        return True
+    except TarError:
+        return False
+
+open = TarFile.open
+
+
+def main():
+    import argparse
+
+    description = 'A simple command-line interface for tarfile module.'
+    parser = argparse.ArgumentParser(description=description)
+    parser.add_argument('-v', '--verbose', action='store_true', default=False,
+                        help='Verbose output')
+    parser.add_argument('--filter', metavar='',
+                        choices=_NAMED_FILTERS,
+                        help='Filter for extraction')
+
+    group = parser.add_mutually_exclusive_group(required=True)
+    group.add_argument('-l', '--list', metavar='',
+                       help='Show listing of a tarfile')
+    group.add_argument('-e', '--extract', nargs='+',
+                       metavar=('', ''),
+                       help='Extract tarfile into target dir')
+    group.add_argument('-c', '--create', nargs='+',
+                       metavar=('', ''),
+                       help='Create tarfile from sources')
+    group.add_argument('-t', '--test', metavar='',
+                       help='Test if a tarfile is valid')
+
+    args = parser.parse_args()
+
+    if args.filter and args.extract is None:
+        parser.exit(1, '--filter is only valid for extraction\n')
+
+    if args.test is not None:
+        src = args.test
+        if is_tarfile(src):
+            with open(src, 'r') as tar:
+                tar.getmembers()
+                print(tar.getmembers(), file=sys.stderr)
+            if args.verbose:
+                print('{!r} is a tar archive.'.format(src))
+        else:
+            parser.exit(1, '{!r} is not a tar archive.\n'.format(src))
+
+    elif args.list is not None:
+        src = args.list
+        if is_tarfile(src):
+            with TarFile.open(src, 'r:*') as tf:
+                tf.list(verbose=args.verbose)
+        else:
+            parser.exit(1, '{!r} is not a tar archive.\n'.format(src))
+
+    elif args.extract is not None:
+        if len(args.extract) == 1:
+            src = args.extract[0]
+            curdir = os.curdir
+        elif len(args.extract) == 2:
+            src, curdir = args.extract
+        else:
+            parser.exit(1, parser.format_help())
+
+        if is_tarfile(src):
+            with TarFile.open(src, 'r:*') as tf:
+                tf.extractall(path=curdir, filter=args.filter)
+            if args.verbose:
+                if curdir == '.':
+                    msg = '{!r} file is extracted.'.format(src)
+                else:
+                    msg = ('{!r} file is extracted '
+                           'into {!r} directory.').format(src, curdir)
+                print(msg)
+        else:
+            parser.exit(1, '{!r} is not a tar archive.\n'.format(src))
+
+    elif args.create is not None:
+        tar_name = args.create.pop(0)
+        _, ext = os.path.splitext(tar_name)
+        compressions = {
+            # gz
+            '.gz': 'gz',
+            '.tgz': 'gz',
+            # xz
+            '.xz': 'xz',
+            '.txz': 'xz',
+            # bz2
+            '.bz2': 'bz2',
+            '.tbz': 'bz2',
+            '.tbz2': 'bz2',
+            '.tb2': 'bz2',
+        }
+        tar_mode = 'w:' + compressions[ext] if ext in compressions else 'w'
+        tar_files = args.create
+
+        with TarFile.open(tar_name, tar_mode) as tf:
+            for file_name in tar_files:
+                tf.add(file_name)
+
+        if args.verbose:
+            print('{!r} file created.'.format(tar_name))
+
+if __name__ == '__main__':
+    main()
diff --git a/pkg_resources/_vendor/backports/tarfile/__main__.py b/pkg_resources/_vendor/backports/tarfile/__main__.py
new file mode 100644
index 0000000000..daf5509086
--- /dev/null
+++ b/pkg_resources/_vendor/backports/tarfile/__main__.py
@@ -0,0 +1,5 @@
+from . import main
+
+
+if __name__ == '__main__':
+    main()
diff --git a/pkg_resources/_vendor/backports/tarfile/compat/__init__.py b/pkg_resources/_vendor/backports/tarfile/compat/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/pkg_resources/_vendor/backports/tarfile/compat/py38.py b/pkg_resources/_vendor/backports/tarfile/compat/py38.py
new file mode 100644
index 0000000000..20fbbfc1c0
--- /dev/null
+++ b/pkg_resources/_vendor/backports/tarfile/compat/py38.py
@@ -0,0 +1,24 @@
+import sys
+
+
+if sys.version_info < (3, 9):
+
+    def removesuffix(self, suffix):
+        # suffix='' should not call self[:-0].
+        if suffix and self.endswith(suffix):
+            return self[: -len(suffix)]
+        else:
+            return self[:]
+
+    def removeprefix(self, prefix):
+        if self.startswith(prefix):
+            return self[len(prefix) :]
+        else:
+            return self[:]
+else:
+
+    def removesuffix(self, suffix):
+        return self.removesuffix(suffix)
+
+    def removeprefix(self, prefix):
+        return self.removeprefix(prefix)
diff --git a/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/INSTALLER b/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/INSTALLER
new file mode 100644
index 0000000000..a1b589e38a
--- /dev/null
+++ b/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/LICENSE b/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/LICENSE
new file mode 100644
index 0000000000..1bb5a44356
--- /dev/null
+++ b/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/LICENSE
@@ -0,0 +1,17 @@
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
diff --git a/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/METADATA b/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/METADATA
new file mode 100644
index 0000000000..c865140ab2
--- /dev/null
+++ b/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/METADATA
@@ -0,0 +1,64 @@
+Metadata-Version: 2.1
+Name: jaraco.functools
+Version: 4.0.1
+Summary: Functools like those found in stdlib
+Author-email: "Jason R. Coombs" 
+Project-URL: Homepage, https://github.com/jaraco/jaraco.functools
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Requires-Python: >=3.8
+Description-Content-Type: text/x-rst
+License-File: LICENSE
+Requires-Dist: more-itertools
+Provides-Extra: docs
+Requires-Dist: sphinx >=3.5 ; extra == 'docs'
+Requires-Dist: sphinx <7.2.5 ; extra == 'docs'
+Requires-Dist: jaraco.packaging >=9.3 ; extra == 'docs'
+Requires-Dist: rst.linker >=1.9 ; extra == 'docs'
+Requires-Dist: furo ; extra == 'docs'
+Requires-Dist: sphinx-lint ; extra == 'docs'
+Requires-Dist: jaraco.tidelift >=1.4 ; extra == 'docs'
+Provides-Extra: testing
+Requires-Dist: pytest >=6 ; extra == 'testing'
+Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'testing'
+Requires-Dist: pytest-cov ; extra == 'testing'
+Requires-Dist: pytest-enabler >=2.2 ; extra == 'testing'
+Requires-Dist: pytest-ruff >=0.2.1 ; extra == 'testing'
+Requires-Dist: jaraco.classes ; extra == 'testing'
+Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing'
+
+.. image:: https://img.shields.io/pypi/v/jaraco.functools.svg
+   :target: https://pypi.org/project/jaraco.functools
+
+.. image:: https://img.shields.io/pypi/pyversions/jaraco.functools.svg
+
+.. image:: https://github.com/jaraco/jaraco.functools/actions/workflows/main.yml/badge.svg
+   :target: https://github.com/jaraco/jaraco.functools/actions?query=workflow%3A%22tests%22
+   :alt: tests
+
+.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
+    :target: https://github.com/astral-sh/ruff
+    :alt: Ruff
+
+.. image:: https://readthedocs.org/projects/jaracofunctools/badge/?version=latest
+   :target: https://jaracofunctools.readthedocs.io/en/latest/?badge=latest
+
+.. image:: https://img.shields.io/badge/skeleton-2024-informational
+   :target: https://blog.jaraco.com/skeleton
+
+.. image:: https://tidelift.com/badges/package/pypi/jaraco.functools
+   :target: https://tidelift.com/subscription/pkg/pypi-jaraco.functools?utm_source=pypi-jaraco.functools&utm_medium=readme
+
+Additional functools in the spirit of stdlib's functools.
+
+For Enterprise
+==============
+
+Available as part of the Tidelift Subscription.
+
+This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use.
+
+`Learn more `_.
diff --git a/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/RECORD b/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/RECORD
new file mode 100644
index 0000000000..cf552f0f48
--- /dev/null
+++ b/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/RECORD
@@ -0,0 +1,10 @@
+jaraco.functools-4.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+jaraco.functools-4.0.1.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
+jaraco.functools-4.0.1.dist-info/METADATA,sha256=i4aUaQDX-jjdEQK5wevhegyx8JyLfin2HyvaSk3FHso,2891
+jaraco.functools-4.0.1.dist-info/RECORD,,
+jaraco.functools-4.0.1.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
+jaraco.functools-4.0.1.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
+jaraco/functools/__init__.py,sha256=hEAJaS2uSZRuF_JY4CxCHIYh79ZpxaPp9OiHyr9EJ1w,16642
+jaraco/functools/__init__.pyi,sha256=gk3dsgHzo5F_U74HzAvpNivFAPCkPJ1b2-yCd62dfnw,3878
+jaraco/functools/__pycache__/__init__.cpython-312.pyc,,
+jaraco/functools/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
diff --git a/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/WHEEL b/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/WHEEL
new file mode 100644
index 0000000000..bab98d6758
--- /dev/null
+++ b/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.43.0)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/top_level.txt b/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/top_level.txt
new file mode 100644
index 0000000000..f6205a5f19
--- /dev/null
+++ b/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/top_level.txt
@@ -0,0 +1 @@
+jaraco
diff --git a/pkg_resources/_vendor/more_itertools-10.2.0.dist-info/REQUESTED b/pkg_resources/_vendor/more_itertools-10.2.0.dist-info/REQUESTED
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/setuptools/_vendor/backports.tarfile-1.1.1.dist-info/INSTALLER b/setuptools/_vendor/backports.tarfile-1.1.1.dist-info/INSTALLER
new file mode 100644
index 0000000000..a1b589e38a
--- /dev/null
+++ b/setuptools/_vendor/backports.tarfile-1.1.1.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/setuptools/_vendor/backports.tarfile-1.1.1.dist-info/LICENSE b/setuptools/_vendor/backports.tarfile-1.1.1.dist-info/LICENSE
new file mode 100644
index 0000000000..1bb5a44356
--- /dev/null
+++ b/setuptools/_vendor/backports.tarfile-1.1.1.dist-info/LICENSE
@@ -0,0 +1,17 @@
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
diff --git a/setuptools/_vendor/backports.tarfile-1.1.1.dist-info/METADATA b/setuptools/_vendor/backports.tarfile-1.1.1.dist-info/METADATA
new file mode 100644
index 0000000000..d29c50158a
--- /dev/null
+++ b/setuptools/_vendor/backports.tarfile-1.1.1.dist-info/METADATA
@@ -0,0 +1,45 @@
+Metadata-Version: 2.1
+Name: backports.tarfile
+Version: 1.1.1
+Summary: Backport of CPython tarfile module
+Author-email: "Jason R. Coombs" 
+Project-URL: Homepage, https://github.com/jaraco/backports.tarfile
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Requires-Python: >=3.8
+Description-Content-Type: text/x-rst
+License-File: LICENSE
+Provides-Extra: docs
+Requires-Dist: sphinx >=3.5 ; extra == 'docs'
+Requires-Dist: jaraco.packaging >=9.3 ; extra == 'docs'
+Requires-Dist: rst.linker >=1.9 ; extra == 'docs'
+Requires-Dist: furo ; extra == 'docs'
+Requires-Dist: sphinx-lint ; extra == 'docs'
+Provides-Extra: testing
+Requires-Dist: pytest !=8.1.*,>=6 ; extra == 'testing'
+Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'testing'
+Requires-Dist: pytest-cov ; extra == 'testing'
+Requires-Dist: pytest-enabler >=2.2 ; extra == 'testing'
+Requires-Dist: jaraco.test ; extra == 'testing'
+
+.. image:: https://img.shields.io/pypi/v/backports.tarfile.svg
+   :target: https://pypi.org/project/backports.tarfile
+
+.. image:: https://img.shields.io/pypi/pyversions/backports.tarfile.svg
+
+.. image:: https://github.com/jaraco/backports.tarfile/actions/workflows/main.yml/badge.svg
+   :target: https://github.com/jaraco/backports.tarfile/actions?query=workflow%3A%22tests%22
+   :alt: tests
+
+.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
+    :target: https://github.com/astral-sh/ruff
+    :alt: Ruff
+
+.. .. image:: https://readthedocs.org/projects/backportstarfile/badge/?version=latest
+..    :target: https://backportstarfile.readthedocs.io/en/latest/?badge=latest
+
+.. image:: https://img.shields.io/badge/skeleton-2024-informational
+   :target: https://blog.jaraco.com/skeleton
diff --git a/setuptools/_vendor/backports.tarfile-1.1.1.dist-info/RECORD b/setuptools/_vendor/backports.tarfile-1.1.1.dist-info/RECORD
new file mode 100644
index 0000000000..af585a832d
--- /dev/null
+++ b/setuptools/_vendor/backports.tarfile-1.1.1.dist-info/RECORD
@@ -0,0 +1,17 @@
+backports.tarfile-1.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+backports.tarfile-1.1.1.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
+backports.tarfile-1.1.1.dist-info/METADATA,sha256=XtPZDjwuCtDbN49cpJgthPJm40mfbhk5BllI-jBVVxc,1969
+backports.tarfile-1.1.1.dist-info/RECORD,,
+backports.tarfile-1.1.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+backports.tarfile-1.1.1.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
+backports.tarfile-1.1.1.dist-info/top_level.txt,sha256=cGjaLMOoBR1FK0ApojtzWVmViTtJ7JGIK_HwXiEsvtU,10
+backports/__init__.py,sha256=iOEMwnlORWezdO8-2vxBIPSR37D7JGjluZ8f55vzxls,81
+backports/__pycache__/__init__.cpython-312.pyc,,
+backports/tarfile/__init__.py,sha256=QOayikyptGOBh_dN1WFI5w0nnUYpX5Gma7p2JgksJIY,106960
+backports/tarfile/__main__.py,sha256=Yw2oGT1afrz2eBskzdPYL8ReB_3liApmhFkN2EbDmc4,59
+backports/tarfile/__pycache__/__init__.cpython-312.pyc,,
+backports/tarfile/__pycache__/__main__.cpython-312.pyc,,
+backports/tarfile/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+backports/tarfile/compat/__pycache__/__init__.cpython-312.pyc,,
+backports/tarfile/compat/__pycache__/py38.cpython-312.pyc,,
+backports/tarfile/compat/py38.py,sha256=iYkyt_gvWjLzGUTJD9TuTfMMjOk-ersXZmRlvQYN2qE,568
diff --git a/setuptools/_vendor/backports.tarfile-1.1.1.dist-info/REQUESTED b/setuptools/_vendor/backports.tarfile-1.1.1.dist-info/REQUESTED
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/setuptools/_vendor/backports.tarfile-1.1.1.dist-info/WHEEL b/setuptools/_vendor/backports.tarfile-1.1.1.dist-info/WHEEL
new file mode 100644
index 0000000000..bab98d6758
--- /dev/null
+++ b/setuptools/_vendor/backports.tarfile-1.1.1.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.43.0)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/setuptools/_vendor/backports.tarfile-1.1.1.dist-info/top_level.txt b/setuptools/_vendor/backports.tarfile-1.1.1.dist-info/top_level.txt
new file mode 100644
index 0000000000..99d2be5b64
--- /dev/null
+++ b/setuptools/_vendor/backports.tarfile-1.1.1.dist-info/top_level.txt
@@ -0,0 +1 @@
+backports
diff --git a/setuptools/_vendor/backports/tarfile/__init__.py b/setuptools/_vendor/backports/tarfile/__init__.py
new file mode 100644
index 0000000000..6dd498dc04
--- /dev/null
+++ b/setuptools/_vendor/backports/tarfile/__init__.py
@@ -0,0 +1,2902 @@
+#!/usr/bin/env python3
+#-------------------------------------------------------------------
+# tarfile.py
+#-------------------------------------------------------------------
+# Copyright (C) 2002 Lars Gustaebel 
+# All rights reserved.
+#
+# Permission  is  hereby granted,  free  of charge,  to  any person
+# obtaining a  copy of  this software  and associated documentation
+# files  (the  "Software"),  to   deal  in  the  Software   without
+# restriction,  including  without limitation  the  rights to  use,
+# copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies  of  the  Software,  and to  permit  persons  to  whom the
+# Software  is  furnished  to  do  so,  subject  to  the  following
+# conditions:
+#
+# The above copyright  notice and this  permission notice shall  be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS  IS", WITHOUT WARRANTY OF ANY  KIND,
+# EXPRESS OR IMPLIED, INCLUDING  BUT NOT LIMITED TO  THE WARRANTIES
+# OF  MERCHANTABILITY,  FITNESS   FOR  A  PARTICULAR   PURPOSE  AND
+# NONINFRINGEMENT.  IN  NO  EVENT SHALL  THE  AUTHORS  OR COPYRIGHT
+# HOLDERS  BE LIABLE  FOR ANY  CLAIM, DAMAGES  OR OTHER  LIABILITY,
+# WHETHER  IN AN  ACTION OF  CONTRACT, TORT  OR OTHERWISE,  ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+# OTHER DEALINGS IN THE SOFTWARE.
+#
+"""Read from and write to tar format archives.
+"""
+
+version     = "0.9.0"
+__author__  = "Lars Gust\u00e4bel (lars@gustaebel.de)"
+__credits__ = "Gustavo Niemeyer, Niels Gust\u00e4bel, Richard Townsend."
+
+#---------
+# Imports
+#---------
+from builtins import open as bltn_open
+import sys
+import os
+import io
+import shutil
+import stat
+import time
+import struct
+import copy
+import re
+import warnings
+
+from .compat.py38 import removesuffix
+
+try:
+    import pwd
+except ImportError:
+    pwd = None
+try:
+    import grp
+except ImportError:
+    grp = None
+
+# os.symlink on Windows prior to 6.0 raises NotImplementedError
+# OSError (winerror=1314) will be raised if the caller does not hold the
+# SeCreateSymbolicLinkPrivilege privilege
+symlink_exception = (AttributeError, NotImplementedError, OSError)
+
+# from tarfile import *
+__all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError", "ReadError",
+           "CompressionError", "StreamError", "ExtractError", "HeaderError",
+           "ENCODING", "USTAR_FORMAT", "GNU_FORMAT", "PAX_FORMAT",
+           "DEFAULT_FORMAT", "open","fully_trusted_filter", "data_filter",
+           "tar_filter", "FilterError", "AbsoluteLinkError",
+           "OutsideDestinationError", "SpecialFileError", "AbsolutePathError",
+           "LinkOutsideDestinationError"]
+
+
+#---------------------------------------------------------
+# tar constants
+#---------------------------------------------------------
+NUL = b"\0"                     # the null character
+BLOCKSIZE = 512                 # length of processing blocks
+RECORDSIZE = BLOCKSIZE * 20     # length of records
+GNU_MAGIC = b"ustar  \0"        # magic gnu tar string
+POSIX_MAGIC = b"ustar\x0000"    # magic posix tar string
+
+LENGTH_NAME = 100               # maximum length of a filename
+LENGTH_LINK = 100               # maximum length of a linkname
+LENGTH_PREFIX = 155             # maximum length of the prefix field
+
+REGTYPE = b"0"                  # regular file
+AREGTYPE = b"\0"                # regular file
+LNKTYPE = b"1"                  # link (inside tarfile)
+SYMTYPE = b"2"                  # symbolic link
+CHRTYPE = b"3"                  # character special device
+BLKTYPE = b"4"                  # block special device
+DIRTYPE = b"5"                  # directory
+FIFOTYPE = b"6"                 # fifo special device
+CONTTYPE = b"7"                 # contiguous file
+
+GNUTYPE_LONGNAME = b"L"         # GNU tar longname
+GNUTYPE_LONGLINK = b"K"         # GNU tar longlink
+GNUTYPE_SPARSE = b"S"           # GNU tar sparse file
+
+XHDTYPE = b"x"                  # POSIX.1-2001 extended header
+XGLTYPE = b"g"                  # POSIX.1-2001 global header
+SOLARIS_XHDTYPE = b"X"          # Solaris extended header
+
+USTAR_FORMAT = 0                # POSIX.1-1988 (ustar) format
+GNU_FORMAT = 1                  # GNU tar format
+PAX_FORMAT = 2                  # POSIX.1-2001 (pax) format
+DEFAULT_FORMAT = PAX_FORMAT
+
+#---------------------------------------------------------
+# tarfile constants
+#---------------------------------------------------------
+# File types that tarfile supports:
+SUPPORTED_TYPES = (REGTYPE, AREGTYPE, LNKTYPE,
+                   SYMTYPE, DIRTYPE, FIFOTYPE,
+                   CONTTYPE, CHRTYPE, BLKTYPE,
+                   GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
+                   GNUTYPE_SPARSE)
+
+# File types that will be treated as a regular file.
+REGULAR_TYPES = (REGTYPE, AREGTYPE,
+                 CONTTYPE, GNUTYPE_SPARSE)
+
+# File types that are part of the GNU tar format.
+GNU_TYPES = (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
+             GNUTYPE_SPARSE)
+
+# Fields from a pax header that override a TarInfo attribute.
+PAX_FIELDS = ("path", "linkpath", "size", "mtime",
+              "uid", "gid", "uname", "gname")
+
+# Fields from a pax header that are affected by hdrcharset.
+PAX_NAME_FIELDS = {"path", "linkpath", "uname", "gname"}
+
+# Fields in a pax header that are numbers, all other fields
+# are treated as strings.
+PAX_NUMBER_FIELDS = {
+    "atime": float,
+    "ctime": float,
+    "mtime": float,
+    "uid": int,
+    "gid": int,
+    "size": int
+}
+
+#---------------------------------------------------------
+# initialization
+#---------------------------------------------------------
+if os.name == "nt":
+    ENCODING = "utf-8"
+else:
+    ENCODING = sys.getfilesystemencoding()
+
+#---------------------------------------------------------
+# Some useful functions
+#---------------------------------------------------------
+
+def stn(s, length, encoding, errors):
+    """Convert a string to a null-terminated bytes object.
+    """
+    if s is None:
+        raise ValueError("metadata cannot contain None")
+    s = s.encode(encoding, errors)
+    return s[:length] + (length - len(s)) * NUL
+
+def nts(s, encoding, errors):
+    """Convert a null-terminated bytes object to a string.
+    """
+    p = s.find(b"\0")
+    if p != -1:
+        s = s[:p]
+    return s.decode(encoding, errors)
+
+def nti(s):
+    """Convert a number field to a python number.
+    """
+    # There are two possible encodings for a number field, see
+    # itn() below.
+    if s[0] in (0o200, 0o377):
+        n = 0
+        for i in range(len(s) - 1):
+            n <<= 8
+            n += s[i + 1]
+        if s[0] == 0o377:
+            n = -(256 ** (len(s) - 1) - n)
+    else:
+        try:
+            s = nts(s, "ascii", "strict")
+            n = int(s.strip() or "0", 8)
+        except ValueError:
+            raise InvalidHeaderError("invalid header")
+    return n
+
+def itn(n, digits=8, format=DEFAULT_FORMAT):
+    """Convert a python number to a number field.
+    """
+    # POSIX 1003.1-1988 requires numbers to be encoded as a string of
+    # octal digits followed by a null-byte, this allows values up to
+    # (8**(digits-1))-1. GNU tar allows storing numbers greater than
+    # that if necessary. A leading 0o200 or 0o377 byte indicate this
+    # particular encoding, the following digits-1 bytes are a big-endian
+    # base-256 representation. This allows values up to (256**(digits-1))-1.
+    # A 0o200 byte indicates a positive number, a 0o377 byte a negative
+    # number.
+    original_n = n
+    n = int(n)
+    if 0 <= n < 8 ** (digits - 1):
+        s = bytes("%0*o" % (digits - 1, n), "ascii") + NUL
+    elif format == GNU_FORMAT and -256 ** (digits - 1) <= n < 256 ** (digits - 1):
+        if n >= 0:
+            s = bytearray([0o200])
+        else:
+            s = bytearray([0o377])
+            n = 256 ** digits + n
+
+        for i in range(digits - 1):
+            s.insert(1, n & 0o377)
+            n >>= 8
+    else:
+        raise ValueError("overflow in number field")
+
+    return s
+
+def calc_chksums(buf):
+    """Calculate the checksum for a member's header by summing up all
+       characters except for the chksum field which is treated as if
+       it was filled with spaces. According to the GNU tar sources,
+       some tars (Sun and NeXT) calculate chksum with signed char,
+       which will be different if there are chars in the buffer with
+       the high bit set. So we calculate two checksums, unsigned and
+       signed.
+    """
+    unsigned_chksum = 256 + sum(struct.unpack_from("148B8x356B", buf))
+    signed_chksum = 256 + sum(struct.unpack_from("148b8x356b", buf))
+    return unsigned_chksum, signed_chksum
+
+def copyfileobj(src, dst, length=None, exception=OSError, bufsize=None):
+    """Copy length bytes from fileobj src to fileobj dst.
+       If length is None, copy the entire content.
+    """
+    bufsize = bufsize or 16 * 1024
+    if length == 0:
+        return
+    if length is None:
+        shutil.copyfileobj(src, dst, bufsize)
+        return
+
+    blocks, remainder = divmod(length, bufsize)
+    for b in range(blocks):
+        buf = src.read(bufsize)
+        if len(buf) < bufsize:
+            raise exception("unexpected end of data")
+        dst.write(buf)
+
+    if remainder != 0:
+        buf = src.read(remainder)
+        if len(buf) < remainder:
+            raise exception("unexpected end of data")
+        dst.write(buf)
+    return
+
+def _safe_print(s):
+    encoding = getattr(sys.stdout, 'encoding', None)
+    if encoding is not None:
+        s = s.encode(encoding, 'backslashreplace').decode(encoding)
+    print(s, end=' ')
+
+
+class TarError(Exception):
+    """Base exception."""
+    pass
+class ExtractError(TarError):
+    """General exception for extract errors."""
+    pass
+class ReadError(TarError):
+    """Exception for unreadable tar archives."""
+    pass
+class CompressionError(TarError):
+    """Exception for unavailable compression methods."""
+    pass
+class StreamError(TarError):
+    """Exception for unsupported operations on stream-like TarFiles."""
+    pass
+class HeaderError(TarError):
+    """Base exception for header errors."""
+    pass
+class EmptyHeaderError(HeaderError):
+    """Exception for empty headers."""
+    pass
+class TruncatedHeaderError(HeaderError):
+    """Exception for truncated headers."""
+    pass
+class EOFHeaderError(HeaderError):
+    """Exception for end of file headers."""
+    pass
+class InvalidHeaderError(HeaderError):
+    """Exception for invalid headers."""
+    pass
+class SubsequentHeaderError(HeaderError):
+    """Exception for missing and invalid extended headers."""
+    pass
+
+#---------------------------
+# internal stream interface
+#---------------------------
+class _LowLevelFile:
+    """Low-level file object. Supports reading and writing.
+       It is used instead of a regular file object for streaming
+       access.
+    """
+
+    def __init__(self, name, mode):
+        mode = {
+            "r": os.O_RDONLY,
+            "w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC,
+        }[mode]
+        if hasattr(os, "O_BINARY"):
+            mode |= os.O_BINARY
+        self.fd = os.open(name, mode, 0o666)
+
+    def close(self):
+        os.close(self.fd)
+
+    def read(self, size):
+        return os.read(self.fd, size)
+
+    def write(self, s):
+        os.write(self.fd, s)
+
+class _Stream:
+    """Class that serves as an adapter between TarFile and
+       a stream-like object.  The stream-like object only
+       needs to have a read() or write() method that works with bytes,
+       and the method is accessed blockwise.
+       Use of gzip or bzip2 compression is possible.
+       A stream-like object could be for example: sys.stdin.buffer,
+       sys.stdout.buffer, a socket, a tape device etc.
+
+       _Stream is intended to be used only internally.
+    """
+
+    def __init__(self, name, mode, comptype, fileobj, bufsize,
+                 compresslevel):
+        """Construct a _Stream object.
+        """
+        self._extfileobj = True
+        if fileobj is None:
+            fileobj = _LowLevelFile(name, mode)
+            self._extfileobj = False
+
+        if comptype == '*':
+            # Enable transparent compression detection for the
+            # stream interface
+            fileobj = _StreamProxy(fileobj)
+            comptype = fileobj.getcomptype()
+
+        self.name     = name or ""
+        self.mode     = mode
+        self.comptype = comptype
+        self.fileobj  = fileobj
+        self.bufsize  = bufsize
+        self.buf      = b""
+        self.pos      = 0
+        self.closed   = False
+
+        try:
+            if comptype == "gz":
+                try:
+                    import zlib
+                except ImportError:
+                    raise CompressionError("zlib module is not available") from None
+                self.zlib = zlib
+                self.crc = zlib.crc32(b"")
+                if mode == "r":
+                    self.exception = zlib.error
+                    self._init_read_gz()
+                else:
+                    self._init_write_gz(compresslevel)
+
+            elif comptype == "bz2":
+                try:
+                    import bz2
+                except ImportError:
+                    raise CompressionError("bz2 module is not available") from None
+                if mode == "r":
+                    self.dbuf = b""
+                    self.cmp = bz2.BZ2Decompressor()
+                    self.exception = OSError
+                else:
+                    self.cmp = bz2.BZ2Compressor(compresslevel)
+
+            elif comptype == "xz":
+                try:
+                    import lzma
+                except ImportError:
+                    raise CompressionError("lzma module is not available") from None
+                if mode == "r":
+                    self.dbuf = b""
+                    self.cmp = lzma.LZMADecompressor()
+                    self.exception = lzma.LZMAError
+                else:
+                    self.cmp = lzma.LZMACompressor()
+
+            elif comptype != "tar":
+                raise CompressionError("unknown compression type %r" % comptype)
+
+        except:
+            if not self._extfileobj:
+                self.fileobj.close()
+            self.closed = True
+            raise
+
+    def __del__(self):
+        if hasattr(self, "closed") and not self.closed:
+            self.close()
+
+    def _init_write_gz(self, compresslevel):
+        """Initialize for writing with gzip compression.
+        """
+        self.cmp = self.zlib.compressobj(compresslevel,
+                                         self.zlib.DEFLATED,
+                                         -self.zlib.MAX_WBITS,
+                                         self.zlib.DEF_MEM_LEVEL,
+                                         0)
+        timestamp = struct.pack(" self.bufsize:
+            self.fileobj.write(self.buf[:self.bufsize])
+            self.buf = self.buf[self.bufsize:]
+
+    def close(self):
+        """Close the _Stream object. No operation should be
+           done on it afterwards.
+        """
+        if self.closed:
+            return
+
+        self.closed = True
+        try:
+            if self.mode == "w" and self.comptype != "tar":
+                self.buf += self.cmp.flush()
+
+            if self.mode == "w" and self.buf:
+                self.fileobj.write(self.buf)
+                self.buf = b""
+                if self.comptype == "gz":
+                    self.fileobj.write(struct.pack("= 0:
+            blocks, remainder = divmod(pos - self.pos, self.bufsize)
+            for i in range(blocks):
+                self.read(self.bufsize)
+            self.read(remainder)
+        else:
+            raise StreamError("seeking backwards is not allowed")
+        return self.pos
+
+    def read(self, size):
+        """Return the next size number of bytes from the stream."""
+        assert size is not None
+        buf = self._read(size)
+        self.pos += len(buf)
+        return buf
+
+    def _read(self, size):
+        """Return size bytes from the stream.
+        """
+        if self.comptype == "tar":
+            return self.__read(size)
+
+        c = len(self.dbuf)
+        t = [self.dbuf]
+        while c < size:
+            # Skip underlying buffer to avoid unaligned double buffering.
+            if self.buf:
+                buf = self.buf
+                self.buf = b""
+            else:
+                buf = self.fileobj.read(self.bufsize)
+                if not buf:
+                    break
+            try:
+                buf = self.cmp.decompress(buf)
+            except self.exception as e:
+                raise ReadError("invalid compressed data") from e
+            t.append(buf)
+            c += len(buf)
+        t = b"".join(t)
+        self.dbuf = t[size:]
+        return t[:size]
+
+    def __read(self, size):
+        """Return size bytes from stream. If internal buffer is empty,
+           read another block from the stream.
+        """
+        c = len(self.buf)
+        t = [self.buf]
+        while c < size:
+            buf = self.fileobj.read(self.bufsize)
+            if not buf:
+                break
+            t.append(buf)
+            c += len(buf)
+        t = b"".join(t)
+        self.buf = t[size:]
+        return t[:size]
+# class _Stream
+
+class _StreamProxy(object):
+    """Small proxy class that enables transparent compression
+       detection for the Stream interface (mode 'r|*').
+    """
+
+    def __init__(self, fileobj):
+        self.fileobj = fileobj
+        self.buf = self.fileobj.read(BLOCKSIZE)
+
+    def read(self, size):
+        self.read = self.fileobj.read
+        return self.buf
+
+    def getcomptype(self):
+        if self.buf.startswith(b"\x1f\x8b\x08"):
+            return "gz"
+        elif self.buf[0:3] == b"BZh" and self.buf[4:10] == b"1AY&SY":
+            return "bz2"
+        elif self.buf.startswith((b"\x5d\x00\x00\x80", b"\xfd7zXZ")):
+            return "xz"
+        else:
+            return "tar"
+
+    def close(self):
+        self.fileobj.close()
+# class StreamProxy
+
+#------------------------
+# Extraction file object
+#------------------------
+class _FileInFile(object):
+    """A thin wrapper around an existing file object that
+       provides a part of its data as an individual file
+       object.
+    """
+
+    def __init__(self, fileobj, offset, size, name, blockinfo=None):
+        self.fileobj = fileobj
+        self.offset = offset
+        self.size = size
+        self.position = 0
+        self.name = name
+        self.closed = False
+
+        if blockinfo is None:
+            blockinfo = [(0, size)]
+
+        # Construct a map with data and zero blocks.
+        self.map_index = 0
+        self.map = []
+        lastpos = 0
+        realpos = self.offset
+        for offset, size in blockinfo:
+            if offset > lastpos:
+                self.map.append((False, lastpos, offset, None))
+            self.map.append((True, offset, offset + size, realpos))
+            realpos += size
+            lastpos = offset + size
+        if lastpos < self.size:
+            self.map.append((False, lastpos, self.size, None))
+
+    def flush(self):
+        pass
+
+    def readable(self):
+        return True
+
+    def writable(self):
+        return False
+
+    def seekable(self):
+        return self.fileobj.seekable()
+
+    def tell(self):
+        """Return the current file position.
+        """
+        return self.position
+
+    def seek(self, position, whence=io.SEEK_SET):
+        """Seek to a position in the file.
+        """
+        if whence == io.SEEK_SET:
+            self.position = min(max(position, 0), self.size)
+        elif whence == io.SEEK_CUR:
+            if position < 0:
+                self.position = max(self.position + position, 0)
+            else:
+                self.position = min(self.position + position, self.size)
+        elif whence == io.SEEK_END:
+            self.position = max(min(self.size + position, self.size), 0)
+        else:
+            raise ValueError("Invalid argument")
+        return self.position
+
+    def read(self, size=None):
+        """Read data from the file.
+        """
+        if size is None:
+            size = self.size - self.position
+        else:
+            size = min(size, self.size - self.position)
+
+        buf = b""
+        while size > 0:
+            while True:
+                data, start, stop, offset = self.map[self.map_index]
+                if start <= self.position < stop:
+                    break
+                else:
+                    self.map_index += 1
+                    if self.map_index == len(self.map):
+                        self.map_index = 0
+            length = min(size, stop - self.position)
+            if data:
+                self.fileobj.seek(offset + (self.position - start))
+                b = self.fileobj.read(length)
+                if len(b) != length:
+                    raise ReadError("unexpected end of data")
+                buf += b
+            else:
+                buf += NUL * length
+            size -= length
+            self.position += length
+        return buf
+
+    def readinto(self, b):
+        buf = self.read(len(b))
+        b[:len(buf)] = buf
+        return len(buf)
+
+    def close(self):
+        self.closed = True
+#class _FileInFile
+
+class ExFileObject(io.BufferedReader):
+
+    def __init__(self, tarfile, tarinfo):
+        fileobj = _FileInFile(tarfile.fileobj, tarinfo.offset_data,
+                tarinfo.size, tarinfo.name, tarinfo.sparse)
+        super().__init__(fileobj)
+#class ExFileObject
+
+
+#-----------------------------
+# extraction filters (PEP 706)
+#-----------------------------
+
+class FilterError(TarError):
+    pass
+
+class AbsolutePathError(FilterError):
+    def __init__(self, tarinfo):
+        self.tarinfo = tarinfo
+        super().__init__(f'member {tarinfo.name!r} has an absolute path')
+
+class OutsideDestinationError(FilterError):
+    def __init__(self, tarinfo, path):
+        self.tarinfo = tarinfo
+        self._path = path
+        super().__init__(f'{tarinfo.name!r} would be extracted to {path!r}, '
+                         + 'which is outside the destination')
+
+class SpecialFileError(FilterError):
+    def __init__(self, tarinfo):
+        self.tarinfo = tarinfo
+        super().__init__(f'{tarinfo.name!r} is a special file')
+
+class AbsoluteLinkError(FilterError):
+    def __init__(self, tarinfo):
+        self.tarinfo = tarinfo
+        super().__init__(f'{tarinfo.name!r} is a link to an absolute path')
+
+class LinkOutsideDestinationError(FilterError):
+    def __init__(self, tarinfo, path):
+        self.tarinfo = tarinfo
+        self._path = path
+        super().__init__(f'{tarinfo.name!r} would link to {path!r}, '
+                         + 'which is outside the destination')
+
+def _get_filtered_attrs(member, dest_path, for_data=True):
+    new_attrs = {}
+    name = member.name
+    dest_path = os.path.realpath(dest_path)
+    # Strip leading / (tar's directory separator) from filenames.
+    # Include os.sep (target OS directory separator) as well.
+    if name.startswith(('/', os.sep)):
+        name = new_attrs['name'] = member.path.lstrip('/' + os.sep)
+    if os.path.isabs(name):
+        # Path is absolute even after stripping.
+        # For example, 'C:/foo' on Windows.
+        raise AbsolutePathError(member)
+    # Ensure we stay in the destination
+    target_path = os.path.realpath(os.path.join(dest_path, name))
+    if os.path.commonpath([target_path, dest_path]) != dest_path:
+        raise OutsideDestinationError(member, target_path)
+    # Limit permissions (no high bits, and go-w)
+    mode = member.mode
+    if mode is not None:
+        # Strip high bits & group/other write bits
+        mode = mode & 0o755
+        if for_data:
+            # For data, handle permissions & file types
+            if member.isreg() or member.islnk():
+                if not mode & 0o100:
+                    # Clear executable bits if not executable by user
+                    mode &= ~0o111
+                # Ensure owner can read & write
+                mode |= 0o600
+            elif member.isdir() or member.issym():
+                # Ignore mode for directories & symlinks
+                mode = None
+            else:
+                # Reject special files
+                raise SpecialFileError(member)
+        if mode != member.mode:
+            new_attrs['mode'] = mode
+    if for_data:
+        # Ignore ownership for 'data'
+        if member.uid is not None:
+            new_attrs['uid'] = None
+        if member.gid is not None:
+            new_attrs['gid'] = None
+        if member.uname is not None:
+            new_attrs['uname'] = None
+        if member.gname is not None:
+            new_attrs['gname'] = None
+        # Check link destination for 'data'
+        if member.islnk() or member.issym():
+            if os.path.isabs(member.linkname):
+                raise AbsoluteLinkError(member)
+            if member.issym():
+                target_path = os.path.join(dest_path,
+                                           os.path.dirname(name),
+                                           member.linkname)
+            else:
+                target_path = os.path.join(dest_path,
+                                           member.linkname)
+            target_path = os.path.realpath(target_path)
+            if os.path.commonpath([target_path, dest_path]) != dest_path:
+                raise LinkOutsideDestinationError(member, target_path)
+    return new_attrs
+
+def fully_trusted_filter(member, dest_path):
+    return member
+
+def tar_filter(member, dest_path):
+    new_attrs = _get_filtered_attrs(member, dest_path, False)
+    if new_attrs:
+        return member.replace(**new_attrs, deep=False)
+    return member
+
+def data_filter(member, dest_path):
+    new_attrs = _get_filtered_attrs(member, dest_path, True)
+    if new_attrs:
+        return member.replace(**new_attrs, deep=False)
+    return member
+
+_NAMED_FILTERS = {
+    "fully_trusted": fully_trusted_filter,
+    "tar": tar_filter,
+    "data": data_filter,
+}
+
+#------------------
+# Exported Classes
+#------------------
+
+# Sentinel for replace() defaults, meaning "don't change the attribute"
+_KEEP = object()
+
+class TarInfo(object):
+    """Informational class which holds the details about an
+       archive member given by a tar header block.
+       TarInfo objects are returned by TarFile.getmember(),
+       TarFile.getmembers() and TarFile.gettarinfo() and are
+       usually created internally.
+    """
+
+    __slots__ = dict(
+        name = 'Name of the archive member.',
+        mode = 'Permission bits.',
+        uid = 'User ID of the user who originally stored this member.',
+        gid = 'Group ID of the user who originally stored this member.',
+        size = 'Size in bytes.',
+        mtime = 'Time of last modification.',
+        chksum = 'Header checksum.',
+        type = ('File type. type is usually one of these constants: '
+                'REGTYPE, AREGTYPE, LNKTYPE, SYMTYPE, DIRTYPE, FIFOTYPE, '
+                'CONTTYPE, CHRTYPE, BLKTYPE, GNUTYPE_SPARSE.'),
+        linkname = ('Name of the target file name, which is only present '
+                    'in TarInfo objects of type LNKTYPE and SYMTYPE.'),
+        uname = 'User name.',
+        gname = 'Group name.',
+        devmajor = 'Device major number.',
+        devminor = 'Device minor number.',
+        offset = 'The tar header starts here.',
+        offset_data = "The file's data starts here.",
+        pax_headers = ('A dictionary containing key-value pairs of an '
+                       'associated pax extended header.'),
+        sparse = 'Sparse member information.',
+        tarfile = None,
+        _sparse_structs = None,
+        _link_target = None,
+        )
+
+    def __init__(self, name=""):
+        """Construct a TarInfo object. name is the optional name
+           of the member.
+        """
+        self.name = name        # member name
+        self.mode = 0o644       # file permissions
+        self.uid = 0            # user id
+        self.gid = 0            # group id
+        self.size = 0           # file size
+        self.mtime = 0          # modification time
+        self.chksum = 0         # header checksum
+        self.type = REGTYPE     # member type
+        self.linkname = ""      # link name
+        self.uname = ""         # user name
+        self.gname = ""         # group name
+        self.devmajor = 0       # device major number
+        self.devminor = 0       # device minor number
+
+        self.offset = 0         # the tar header starts here
+        self.offset_data = 0    # the file's data starts here
+
+        self.sparse = None      # sparse member information
+        self.pax_headers = {}   # pax header information
+
+    @property
+    def path(self):
+        'In pax headers, "name" is called "path".'
+        return self.name
+
+    @path.setter
+    def path(self, name):
+        self.name = name
+
+    @property
+    def linkpath(self):
+        'In pax headers, "linkname" is called "linkpath".'
+        return self.linkname
+
+    @linkpath.setter
+    def linkpath(self, linkname):
+        self.linkname = linkname
+
+    def __repr__(self):
+        return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self))
+
+    def replace(self, *,
+                name=_KEEP, mtime=_KEEP, mode=_KEEP, linkname=_KEEP,
+                uid=_KEEP, gid=_KEEP, uname=_KEEP, gname=_KEEP,
+                deep=True, _KEEP=_KEEP):
+        """Return a deep copy of self with the given attributes replaced.
+        """
+        if deep:
+            result = copy.deepcopy(self)
+        else:
+            result = copy.copy(self)
+        if name is not _KEEP:
+            result.name = name
+        if mtime is not _KEEP:
+            result.mtime = mtime
+        if mode is not _KEEP:
+            result.mode = mode
+        if linkname is not _KEEP:
+            result.linkname = linkname
+        if uid is not _KEEP:
+            result.uid = uid
+        if gid is not _KEEP:
+            result.gid = gid
+        if uname is not _KEEP:
+            result.uname = uname
+        if gname is not _KEEP:
+            result.gname = gname
+        return result
+
+    def get_info(self):
+        """Return the TarInfo's attributes as a dictionary.
+        """
+        if self.mode is None:
+            mode = None
+        else:
+            mode = self.mode & 0o7777
+        info = {
+            "name":     self.name,
+            "mode":     mode,
+            "uid":      self.uid,
+            "gid":      self.gid,
+            "size":     self.size,
+            "mtime":    self.mtime,
+            "chksum":   self.chksum,
+            "type":     self.type,
+            "linkname": self.linkname,
+            "uname":    self.uname,
+            "gname":    self.gname,
+            "devmajor": self.devmajor,
+            "devminor": self.devminor
+        }
+
+        if info["type"] == DIRTYPE and not info["name"].endswith("/"):
+            info["name"] += "/"
+
+        return info
+
+    def tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="surrogateescape"):
+        """Return a tar header as a string of 512 byte blocks.
+        """
+        info = self.get_info()
+        for name, value in info.items():
+            if value is None:
+                raise ValueError("%s may not be None" % name)
+
+        if format == USTAR_FORMAT:
+            return self.create_ustar_header(info, encoding, errors)
+        elif format == GNU_FORMAT:
+            return self.create_gnu_header(info, encoding, errors)
+        elif format == PAX_FORMAT:
+            return self.create_pax_header(info, encoding)
+        else:
+            raise ValueError("invalid format")
+
+    def create_ustar_header(self, info, encoding, errors):
+        """Return the object as a ustar header block.
+        """
+        info["magic"] = POSIX_MAGIC
+
+        if len(info["linkname"].encode(encoding, errors)) > LENGTH_LINK:
+            raise ValueError("linkname is too long")
+
+        if len(info["name"].encode(encoding, errors)) > LENGTH_NAME:
+            info["prefix"], info["name"] = self._posix_split_name(info["name"], encoding, errors)
+
+        return self._create_header(info, USTAR_FORMAT, encoding, errors)
+
+    def create_gnu_header(self, info, encoding, errors):
+        """Return the object as a GNU header block sequence.
+        """
+        info["magic"] = GNU_MAGIC
+
+        buf = b""
+        if len(info["linkname"].encode(encoding, errors)) > LENGTH_LINK:
+            buf += self._create_gnu_long_header(info["linkname"], GNUTYPE_LONGLINK, encoding, errors)
+
+        if len(info["name"].encode(encoding, errors)) > LENGTH_NAME:
+            buf += self._create_gnu_long_header(info["name"], GNUTYPE_LONGNAME, encoding, errors)
+
+        return buf + self._create_header(info, GNU_FORMAT, encoding, errors)
+
+    def create_pax_header(self, info, encoding):
+        """Return the object as a ustar header block. If it cannot be
+           represented this way, prepend a pax extended header sequence
+           with supplement information.
+        """
+        info["magic"] = POSIX_MAGIC
+        pax_headers = self.pax_headers.copy()
+
+        # Test string fields for values that exceed the field length or cannot
+        # be represented in ASCII encoding.
+        for name, hname, length in (
+                ("name", "path", LENGTH_NAME), ("linkname", "linkpath", LENGTH_LINK),
+                ("uname", "uname", 32), ("gname", "gname", 32)):
+
+            if hname in pax_headers:
+                # The pax header has priority.
+                continue
+
+            # Try to encode the string as ASCII.
+            try:
+                info[name].encode("ascii", "strict")
+            except UnicodeEncodeError:
+                pax_headers[hname] = info[name]
+                continue
+
+            if len(info[name]) > length:
+                pax_headers[hname] = info[name]
+
+        # Test number fields for values that exceed the field limit or values
+        # that like to be stored as float.
+        for name, digits in (("uid", 8), ("gid", 8), ("size", 12), ("mtime", 12)):
+            needs_pax = False
+
+            val = info[name]
+            val_is_float = isinstance(val, float)
+            val_int = round(val) if val_is_float else val
+            if not 0 <= val_int < 8 ** (digits - 1):
+                # Avoid overflow.
+                info[name] = 0
+                needs_pax = True
+            elif val_is_float:
+                # Put rounded value in ustar header, and full
+                # precision value in pax header.
+                info[name] = val_int
+                needs_pax = True
+
+            # The existing pax header has priority.
+            if needs_pax and name not in pax_headers:
+                pax_headers[name] = str(val)
+
+        # Create a pax extended header if necessary.
+        if pax_headers:
+            buf = self._create_pax_generic_header(pax_headers, XHDTYPE, encoding)
+        else:
+            buf = b""
+
+        return buf + self._create_header(info, USTAR_FORMAT, "ascii", "replace")
+
+    @classmethod
+    def create_pax_global_header(cls, pax_headers):
+        """Return the object as a pax global header block sequence.
+        """
+        return cls._create_pax_generic_header(pax_headers, XGLTYPE, "utf-8")
+
+    def _posix_split_name(self, name, encoding, errors):
+        """Split a name longer than 100 chars into a prefix
+           and a name part.
+        """
+        components = name.split("/")
+        for i in range(1, len(components)):
+            prefix = "/".join(components[:i])
+            name = "/".join(components[i:])
+            if len(prefix.encode(encoding, errors)) <= LENGTH_PREFIX and \
+                    len(name.encode(encoding, errors)) <= LENGTH_NAME:
+                break
+        else:
+            raise ValueError("name is too long")
+
+        return prefix, name
+
+    @staticmethod
+    def _create_header(info, format, encoding, errors):
+        """Return a header block. info is a dictionary with file
+           information, format must be one of the *_FORMAT constants.
+        """
+        has_device_fields = info.get("type") in (CHRTYPE, BLKTYPE)
+        if has_device_fields:
+            devmajor = itn(info.get("devmajor", 0), 8, format)
+            devminor = itn(info.get("devminor", 0), 8, format)
+        else:
+            devmajor = stn("", 8, encoding, errors)
+            devminor = stn("", 8, encoding, errors)
+
+        # None values in metadata should cause ValueError.
+        # itn()/stn() do this for all fields except type.
+        filetype = info.get("type", REGTYPE)
+        if filetype is None:
+            raise ValueError("TarInfo.type must not be None")
+
+        parts = [
+            stn(info.get("name", ""), 100, encoding, errors),
+            itn(info.get("mode", 0) & 0o7777, 8, format),
+            itn(info.get("uid", 0), 8, format),
+            itn(info.get("gid", 0), 8, format),
+            itn(info.get("size", 0), 12, format),
+            itn(info.get("mtime", 0), 12, format),
+            b"        ", # checksum field
+            filetype,
+            stn(info.get("linkname", ""), 100, encoding, errors),
+            info.get("magic", POSIX_MAGIC),
+            stn(info.get("uname", ""), 32, encoding, errors),
+            stn(info.get("gname", ""), 32, encoding, errors),
+            devmajor,
+            devminor,
+            stn(info.get("prefix", ""), 155, encoding, errors)
+        ]
+
+        buf = struct.pack("%ds" % BLOCKSIZE, b"".join(parts))
+        chksum = calc_chksums(buf[-BLOCKSIZE:])[0]
+        buf = buf[:-364] + bytes("%06o\0" % chksum, "ascii") + buf[-357:]
+        return buf
+
+    @staticmethod
+    def _create_payload(payload):
+        """Return the string payload filled with zero bytes
+           up to the next 512 byte border.
+        """
+        blocks, remainder = divmod(len(payload), BLOCKSIZE)
+        if remainder > 0:
+            payload += (BLOCKSIZE - remainder) * NUL
+        return payload
+
+    @classmethod
+    def _create_gnu_long_header(cls, name, type, encoding, errors):
+        """Return a GNUTYPE_LONGNAME or GNUTYPE_LONGLINK sequence
+           for name.
+        """
+        name = name.encode(encoding, errors) + NUL
+
+        info = {}
+        info["name"] = "././@LongLink"
+        info["type"] = type
+        info["size"] = len(name)
+        info["magic"] = GNU_MAGIC
+
+        # create extended header + name blocks.
+        return cls._create_header(info, USTAR_FORMAT, encoding, errors) + \
+                cls._create_payload(name)
+
+    @classmethod
+    def _create_pax_generic_header(cls, pax_headers, type, encoding):
+        """Return a POSIX.1-2008 extended or global header sequence
+           that contains a list of keyword, value pairs. The values
+           must be strings.
+        """
+        # Check if one of the fields contains surrogate characters and thereby
+        # forces hdrcharset=BINARY, see _proc_pax() for more information.
+        binary = False
+        for keyword, value in pax_headers.items():
+            try:
+                value.encode("utf-8", "strict")
+            except UnicodeEncodeError:
+                binary = True
+                break
+
+        records = b""
+        if binary:
+            # Put the hdrcharset field at the beginning of the header.
+            records += b"21 hdrcharset=BINARY\n"
+
+        for keyword, value in pax_headers.items():
+            keyword = keyword.encode("utf-8")
+            if binary:
+                # Try to restore the original byte representation of `value'.
+                # Needless to say, that the encoding must match the string.
+                value = value.encode(encoding, "surrogateescape")
+            else:
+                value = value.encode("utf-8")
+
+            l = len(keyword) + len(value) + 3   # ' ' + '=' + '\n'
+            n = p = 0
+            while True:
+                n = l + len(str(p))
+                if n == p:
+                    break
+                p = n
+            records += bytes(str(p), "ascii") + b" " + keyword + b"=" + value + b"\n"
+
+        # We use a hardcoded "././@PaxHeader" name like star does
+        # instead of the one that POSIX recommends.
+        info = {}
+        info["name"] = "././@PaxHeader"
+        info["type"] = type
+        info["size"] = len(records)
+        info["magic"] = POSIX_MAGIC
+
+        # Create pax header + record blocks.
+        return cls._create_header(info, USTAR_FORMAT, "ascii", "replace") + \
+                cls._create_payload(records)
+
+    @classmethod
+    def frombuf(cls, buf, encoding, errors):
+        """Construct a TarInfo object from a 512 byte bytes object.
+        """
+        if len(buf) == 0:
+            raise EmptyHeaderError("empty header")
+        if len(buf) != BLOCKSIZE:
+            raise TruncatedHeaderError("truncated header")
+        if buf.count(NUL) == BLOCKSIZE:
+            raise EOFHeaderError("end of file header")
+
+        chksum = nti(buf[148:156])
+        if chksum not in calc_chksums(buf):
+            raise InvalidHeaderError("bad checksum")
+
+        obj = cls()
+        obj.name = nts(buf[0:100], encoding, errors)
+        obj.mode = nti(buf[100:108])
+        obj.uid = nti(buf[108:116])
+        obj.gid = nti(buf[116:124])
+        obj.size = nti(buf[124:136])
+        obj.mtime = nti(buf[136:148])
+        obj.chksum = chksum
+        obj.type = buf[156:157]
+        obj.linkname = nts(buf[157:257], encoding, errors)
+        obj.uname = nts(buf[265:297], encoding, errors)
+        obj.gname = nts(buf[297:329], encoding, errors)
+        obj.devmajor = nti(buf[329:337])
+        obj.devminor = nti(buf[337:345])
+        prefix = nts(buf[345:500], encoding, errors)
+
+        # Old V7 tar format represents a directory as a regular
+        # file with a trailing slash.
+        if obj.type == AREGTYPE and obj.name.endswith("/"):
+            obj.type = DIRTYPE
+
+        # The old GNU sparse format occupies some of the unused
+        # space in the buffer for up to 4 sparse structures.
+        # Save them for later processing in _proc_sparse().
+        if obj.type == GNUTYPE_SPARSE:
+            pos = 386
+            structs = []
+            for i in range(4):
+                try:
+                    offset = nti(buf[pos:pos + 12])
+                    numbytes = nti(buf[pos + 12:pos + 24])
+                except ValueError:
+                    break
+                structs.append((offset, numbytes))
+                pos += 24
+            isextended = bool(buf[482])
+            origsize = nti(buf[483:495])
+            obj._sparse_structs = (structs, isextended, origsize)
+
+        # Remove redundant slashes from directories.
+        if obj.isdir():
+            obj.name = obj.name.rstrip("/")
+
+        # Reconstruct a ustar longname.
+        if prefix and obj.type not in GNU_TYPES:
+            obj.name = prefix + "/" + obj.name
+        return obj
+
+    @classmethod
+    def fromtarfile(cls, tarfile):
+        """Return the next TarInfo object from TarFile object
+           tarfile.
+        """
+        buf = tarfile.fileobj.read(BLOCKSIZE)
+        obj = cls.frombuf(buf, tarfile.encoding, tarfile.errors)
+        obj.offset = tarfile.fileobj.tell() - BLOCKSIZE
+        return obj._proc_member(tarfile)
+
+    #--------------------------------------------------------------------------
+    # The following are methods that are called depending on the type of a
+    # member. The entry point is _proc_member() which can be overridden in a
+    # subclass to add custom _proc_*() methods. A _proc_*() method MUST
+    # implement the following
+    # operations:
+    # 1. Set self.offset_data to the position where the data blocks begin,
+    #    if there is data that follows.
+    # 2. Set tarfile.offset to the position where the next member's header will
+    #    begin.
+    # 3. Return self or another valid TarInfo object.
+    def _proc_member(self, tarfile):
+        """Choose the right processing method depending on
+           the type and call it.
+        """
+        if self.type in (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK):
+            return self._proc_gnulong(tarfile)
+        elif self.type == GNUTYPE_SPARSE:
+            return self._proc_sparse(tarfile)
+        elif self.type in (XHDTYPE, XGLTYPE, SOLARIS_XHDTYPE):
+            return self._proc_pax(tarfile)
+        else:
+            return self._proc_builtin(tarfile)
+
+    def _proc_builtin(self, tarfile):
+        """Process a builtin type or an unknown type which
+           will be treated as a regular file.
+        """
+        self.offset_data = tarfile.fileobj.tell()
+        offset = self.offset_data
+        if self.isreg() or self.type not in SUPPORTED_TYPES:
+            # Skip the following data blocks.
+            offset += self._block(self.size)
+        tarfile.offset = offset
+
+        # Patch the TarInfo object with saved global
+        # header information.
+        self._apply_pax_info(tarfile.pax_headers, tarfile.encoding, tarfile.errors)
+
+        # Remove redundant slashes from directories. This is to be consistent
+        # with frombuf().
+        if self.isdir():
+            self.name = self.name.rstrip("/")
+
+        return self
+
+    def _proc_gnulong(self, tarfile):
+        """Process the blocks that hold a GNU longname
+           or longlink member.
+        """
+        buf = tarfile.fileobj.read(self._block(self.size))
+
+        # Fetch the next header and process it.
+        try:
+            next = self.fromtarfile(tarfile)
+        except HeaderError as e:
+            raise SubsequentHeaderError(str(e)) from None
+
+        # Patch the TarInfo object from the next header with
+        # the longname information.
+        next.offset = self.offset
+        if self.type == GNUTYPE_LONGNAME:
+            next.name = nts(buf, tarfile.encoding, tarfile.errors)
+        elif self.type == GNUTYPE_LONGLINK:
+            next.linkname = nts(buf, tarfile.encoding, tarfile.errors)
+
+        # Remove redundant slashes from directories. This is to be consistent
+        # with frombuf().
+        if next.isdir():
+            next.name = removesuffix(next.name, "/")
+
+        return next
+
+    def _proc_sparse(self, tarfile):
+        """Process a GNU sparse header plus extra headers.
+        """
+        # We already collected some sparse structures in frombuf().
+        structs, isextended, origsize = self._sparse_structs
+        del self._sparse_structs
+
+        # Collect sparse structures from extended header blocks.
+        while isextended:
+            buf = tarfile.fileobj.read(BLOCKSIZE)
+            pos = 0
+            for i in range(21):
+                try:
+                    offset = nti(buf[pos:pos + 12])
+                    numbytes = nti(buf[pos + 12:pos + 24])
+                except ValueError:
+                    break
+                if offset and numbytes:
+                    structs.append((offset, numbytes))
+                pos += 24
+            isextended = bool(buf[504])
+        self.sparse = structs
+
+        self.offset_data = tarfile.fileobj.tell()
+        tarfile.offset = self.offset_data + self._block(self.size)
+        self.size = origsize
+        return self
+
+    def _proc_pax(self, tarfile):
+        """Process an extended or global header as described in
+           POSIX.1-2008.
+        """
+        # Read the header information.
+        buf = tarfile.fileobj.read(self._block(self.size))
+
+        # A pax header stores supplemental information for either
+        # the following file (extended) or all following files
+        # (global).
+        if self.type == XGLTYPE:
+            pax_headers = tarfile.pax_headers
+        else:
+            pax_headers = tarfile.pax_headers.copy()
+
+        # Check if the pax header contains a hdrcharset field. This tells us
+        # the encoding of the path, linkpath, uname and gname fields. Normally,
+        # these fields are UTF-8 encoded but since POSIX.1-2008 tar
+        # implementations are allowed to store them as raw binary strings if
+        # the translation to UTF-8 fails.
+        match = re.search(br"\d+ hdrcharset=([^\n]+)\n", buf)
+        if match is not None:
+            pax_headers["hdrcharset"] = match.group(1).decode("utf-8")
+
+        # For the time being, we don't care about anything other than "BINARY".
+        # The only other value that is currently allowed by the standard is
+        # "ISO-IR 10646 2000 UTF-8" in other words UTF-8.
+        hdrcharset = pax_headers.get("hdrcharset")
+        if hdrcharset == "BINARY":
+            encoding = tarfile.encoding
+        else:
+            encoding = "utf-8"
+
+        # Parse pax header information. A record looks like that:
+        # "%d %s=%s\n" % (length, keyword, value). length is the size
+        # of the complete record including the length field itself and
+        # the newline. keyword and value are both UTF-8 encoded strings.
+        regex = re.compile(br"(\d+) ([^=]+)=")
+        pos = 0
+        while match := regex.match(buf, pos):
+            length, keyword = match.groups()
+            length = int(length)
+            if length == 0:
+                raise InvalidHeaderError("invalid header")
+            value = buf[match.end(2) + 1:match.start(1) + length - 1]
+
+            # Normally, we could just use "utf-8" as the encoding and "strict"
+            # as the error handler, but we better not take the risk. For
+            # example, GNU tar <= 1.23 is known to store filenames it cannot
+            # translate to UTF-8 as raw strings (unfortunately without a
+            # hdrcharset=BINARY header).
+            # We first try the strict standard encoding, and if that fails we
+            # fall back on the user's encoding and error handler.
+            keyword = self._decode_pax_field(keyword, "utf-8", "utf-8",
+                    tarfile.errors)
+            if keyword in PAX_NAME_FIELDS:
+                value = self._decode_pax_field(value, encoding, tarfile.encoding,
+                        tarfile.errors)
+            else:
+                value = self._decode_pax_field(value, "utf-8", "utf-8",
+                        tarfile.errors)
+
+            pax_headers[keyword] = value
+            pos += length
+
+        # Fetch the next header.
+        try:
+            next = self.fromtarfile(tarfile)
+        except HeaderError as e:
+            raise SubsequentHeaderError(str(e)) from None
+
+        # Process GNU sparse information.
+        if "GNU.sparse.map" in pax_headers:
+            # GNU extended sparse format version 0.1.
+            self._proc_gnusparse_01(next, pax_headers)
+
+        elif "GNU.sparse.size" in pax_headers:
+            # GNU extended sparse format version 0.0.
+            self._proc_gnusparse_00(next, pax_headers, buf)
+
+        elif pax_headers.get("GNU.sparse.major") == "1" and pax_headers.get("GNU.sparse.minor") == "0":
+            # GNU extended sparse format version 1.0.
+            self._proc_gnusparse_10(next, pax_headers, tarfile)
+
+        if self.type in (XHDTYPE, SOLARIS_XHDTYPE):
+            # Patch the TarInfo object with the extended header info.
+            next._apply_pax_info(pax_headers, tarfile.encoding, tarfile.errors)
+            next.offset = self.offset
+
+            if "size" in pax_headers:
+                # If the extended header replaces the size field,
+                # we need to recalculate the offset where the next
+                # header starts.
+                offset = next.offset_data
+                if next.isreg() or next.type not in SUPPORTED_TYPES:
+                    offset += next._block(next.size)
+                tarfile.offset = offset
+
+        return next
+
+    def _proc_gnusparse_00(self, next, pax_headers, buf):
+        """Process a GNU tar extended sparse header, version 0.0.
+        """
+        offsets = []
+        for match in re.finditer(br"\d+ GNU.sparse.offset=(\d+)\n", buf):
+            offsets.append(int(match.group(1)))
+        numbytes = []
+        for match in re.finditer(br"\d+ GNU.sparse.numbytes=(\d+)\n", buf):
+            numbytes.append(int(match.group(1)))
+        next.sparse = list(zip(offsets, numbytes))
+
+    def _proc_gnusparse_01(self, next, pax_headers):
+        """Process a GNU tar extended sparse header, version 0.1.
+        """
+        sparse = [int(x) for x in pax_headers["GNU.sparse.map"].split(",")]
+        next.sparse = list(zip(sparse[::2], sparse[1::2]))
+
+    def _proc_gnusparse_10(self, next, pax_headers, tarfile):
+        """Process a GNU tar extended sparse header, version 1.0.
+        """
+        fields = None
+        sparse = []
+        buf = tarfile.fileobj.read(BLOCKSIZE)
+        fields, buf = buf.split(b"\n", 1)
+        fields = int(fields)
+        while len(sparse) < fields * 2:
+            if b"\n" not in buf:
+                buf += tarfile.fileobj.read(BLOCKSIZE)
+            number, buf = buf.split(b"\n", 1)
+            sparse.append(int(number))
+        next.offset_data = tarfile.fileobj.tell()
+        next.sparse = list(zip(sparse[::2], sparse[1::2]))
+
+    def _apply_pax_info(self, pax_headers, encoding, errors):
+        """Replace fields with supplemental information from a previous
+           pax extended or global header.
+        """
+        for keyword, value in pax_headers.items():
+            if keyword == "GNU.sparse.name":
+                setattr(self, "path", value)
+            elif keyword == "GNU.sparse.size":
+                setattr(self, "size", int(value))
+            elif keyword == "GNU.sparse.realsize":
+                setattr(self, "size", int(value))
+            elif keyword in PAX_FIELDS:
+                if keyword in PAX_NUMBER_FIELDS:
+                    try:
+                        value = PAX_NUMBER_FIELDS[keyword](value)
+                    except ValueError:
+                        value = 0
+                if keyword == "path":
+                    value = value.rstrip("/")
+                setattr(self, keyword, value)
+
+        self.pax_headers = pax_headers.copy()
+
+    def _decode_pax_field(self, value, encoding, fallback_encoding, fallback_errors):
+        """Decode a single field from a pax record.
+        """
+        try:
+            return value.decode(encoding, "strict")
+        except UnicodeDecodeError:
+            return value.decode(fallback_encoding, fallback_errors)
+
+    def _block(self, count):
+        """Round up a byte count by BLOCKSIZE and return it,
+           e.g. _block(834) => 1024.
+        """
+        blocks, remainder = divmod(count, BLOCKSIZE)
+        if remainder:
+            blocks += 1
+        return blocks * BLOCKSIZE
+
+    def isreg(self):
+        'Return True if the Tarinfo object is a regular file.'
+        return self.type in REGULAR_TYPES
+
+    def isfile(self):
+        'Return True if the Tarinfo object is a regular file.'
+        return self.isreg()
+
+    def isdir(self):
+        'Return True if it is a directory.'
+        return self.type == DIRTYPE
+
+    def issym(self):
+        'Return True if it is a symbolic link.'
+        return self.type == SYMTYPE
+
+    def islnk(self):
+        'Return True if it is a hard link.'
+        return self.type == LNKTYPE
+
+    def ischr(self):
+        'Return True if it is a character device.'
+        return self.type == CHRTYPE
+
+    def isblk(self):
+        'Return True if it is a block device.'
+        return self.type == BLKTYPE
+
+    def isfifo(self):
+        'Return True if it is a FIFO.'
+        return self.type == FIFOTYPE
+
+    def issparse(self):
+        return self.sparse is not None
+
+    def isdev(self):
+        'Return True if it is one of character device, block device or FIFO.'
+        return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE)
+# class TarInfo
+
+class TarFile(object):
+    """The TarFile Class provides an interface to tar archives.
+    """
+
+    debug = 0                   # May be set from 0 (no msgs) to 3 (all msgs)
+
+    dereference = False         # If true, add content of linked file to the
+                                # tar file, else the link.
+
+    ignore_zeros = False        # If true, skips empty or invalid blocks and
+                                # continues processing.
+
+    errorlevel = 1              # If 0, fatal errors only appear in debug
+                                # messages (if debug >= 0). If > 0, errors
+                                # are passed to the caller as exceptions.
+
+    format = DEFAULT_FORMAT     # The format to use when creating an archive.
+
+    encoding = ENCODING         # Encoding for 8-bit character strings.
+
+    errors = None               # Error handler for unicode conversion.
+
+    tarinfo = TarInfo           # The default TarInfo class to use.
+
+    fileobject = ExFileObject   # The file-object for extractfile().
+
+    extraction_filter = None    # The default filter for extraction.
+
+    def __init__(self, name=None, mode="r", fileobj=None, format=None,
+            tarinfo=None, dereference=None, ignore_zeros=None, encoding=None,
+            errors="surrogateescape", pax_headers=None, debug=None,
+            errorlevel=None, copybufsize=None):
+        """Open an (uncompressed) tar archive `name'. `mode' is either 'r' to
+           read from an existing archive, 'a' to append data to an existing
+           file or 'w' to create a new file overwriting an existing one. `mode'
+           defaults to 'r'.
+           If `fileobj' is given, it is used for reading or writing data. If it
+           can be determined, `mode' is overridden by `fileobj's mode.
+           `fileobj' is not closed, when TarFile is closed.
+        """
+        modes = {"r": "rb", "a": "r+b", "w": "wb", "x": "xb"}
+        if mode not in modes:
+            raise ValueError("mode must be 'r', 'a', 'w' or 'x'")
+        self.mode = mode
+        self._mode = modes[mode]
+
+        if not fileobj:
+            if self.mode == "a" and not os.path.exists(name):
+                # Create nonexistent files in append mode.
+                self.mode = "w"
+                self._mode = "wb"
+            fileobj = bltn_open(name, self._mode)
+            self._extfileobj = False
+        else:
+            if (name is None and hasattr(fileobj, "name") and
+                isinstance(fileobj.name, (str, bytes))):
+                name = fileobj.name
+            if hasattr(fileobj, "mode"):
+                self._mode = fileobj.mode
+            self._extfileobj = True
+        self.name = os.path.abspath(name) if name else None
+        self.fileobj = fileobj
+
+        # Init attributes.
+        if format is not None:
+            self.format = format
+        if tarinfo is not None:
+            self.tarinfo = tarinfo
+        if dereference is not None:
+            self.dereference = dereference
+        if ignore_zeros is not None:
+            self.ignore_zeros = ignore_zeros
+        if encoding is not None:
+            self.encoding = encoding
+        self.errors = errors
+
+        if pax_headers is not None and self.format == PAX_FORMAT:
+            self.pax_headers = pax_headers
+        else:
+            self.pax_headers = {}
+
+        if debug is not None:
+            self.debug = debug
+        if errorlevel is not None:
+            self.errorlevel = errorlevel
+
+        # Init datastructures.
+        self.copybufsize = copybufsize
+        self.closed = False
+        self.members = []       # list of members as TarInfo objects
+        self._loaded = False    # flag if all members have been read
+        self.offset = self.fileobj.tell()
+                                # current position in the archive file
+        self.inodes = {}        # dictionary caching the inodes of
+                                # archive members already added
+
+        try:
+            if self.mode == "r":
+                self.firstmember = None
+                self.firstmember = self.next()
+
+            if self.mode == "a":
+                # Move to the end of the archive,
+                # before the first empty block.
+                while True:
+                    self.fileobj.seek(self.offset)
+                    try:
+                        tarinfo = self.tarinfo.fromtarfile(self)
+                        self.members.append(tarinfo)
+                    except EOFHeaderError:
+                        self.fileobj.seek(self.offset)
+                        break
+                    except HeaderError as e:
+                        raise ReadError(str(e)) from None
+
+            if self.mode in ("a", "w", "x"):
+                self._loaded = True
+
+                if self.pax_headers:
+                    buf = self.tarinfo.create_pax_global_header(self.pax_headers.copy())
+                    self.fileobj.write(buf)
+                    self.offset += len(buf)
+        except:
+            if not self._extfileobj:
+                self.fileobj.close()
+            self.closed = True
+            raise
+
+    #--------------------------------------------------------------------------
+    # Below are the classmethods which act as alternate constructors to the
+    # TarFile class. The open() method is the only one that is needed for
+    # public use; it is the "super"-constructor and is able to select an
+    # adequate "sub"-constructor for a particular compression using the mapping
+    # from OPEN_METH.
+    #
+    # This concept allows one to subclass TarFile without losing the comfort of
+    # the super-constructor. A sub-constructor is registered and made available
+    # by adding it to the mapping in OPEN_METH.
+
+    @classmethod
+    def open(cls, name=None, mode="r", fileobj=None, bufsize=RECORDSIZE, **kwargs):
+        r"""Open a tar archive for reading, writing or appending. Return
+           an appropriate TarFile class.
+
+           mode:
+           'r' or 'r:\*' open for reading with transparent compression
+           'r:'         open for reading exclusively uncompressed
+           'r:gz'       open for reading with gzip compression
+           'r:bz2'      open for reading with bzip2 compression
+           'r:xz'       open for reading with lzma compression
+           'a' or 'a:'  open for appending, creating the file if necessary
+           'w' or 'w:'  open for writing without compression
+           'w:gz'       open for writing with gzip compression
+           'w:bz2'      open for writing with bzip2 compression
+           'w:xz'       open for writing with lzma compression
+
+           'x' or 'x:'  create a tarfile exclusively without compression, raise
+                        an exception if the file is already created
+           'x:gz'       create a gzip compressed tarfile, raise an exception
+                        if the file is already created
+           'x:bz2'      create a bzip2 compressed tarfile, raise an exception
+                        if the file is already created
+           'x:xz'       create an lzma compressed tarfile, raise an exception
+                        if the file is already created
+
+           'r|\*'        open a stream of tar blocks with transparent compression
+           'r|'         open an uncompressed stream of tar blocks for reading
+           'r|gz'       open a gzip compressed stream of tar blocks
+           'r|bz2'      open a bzip2 compressed stream of tar blocks
+           'r|xz'       open an lzma compressed stream of tar blocks
+           'w|'         open an uncompressed stream for writing
+           'w|gz'       open a gzip compressed stream for writing
+           'w|bz2'      open a bzip2 compressed stream for writing
+           'w|xz'       open an lzma compressed stream for writing
+        """
+
+        if not name and not fileobj:
+            raise ValueError("nothing to open")
+
+        if mode in ("r", "r:*"):
+            # Find out which *open() is appropriate for opening the file.
+            def not_compressed(comptype):
+                return cls.OPEN_METH[comptype] == 'taropen'
+            error_msgs = []
+            for comptype in sorted(cls.OPEN_METH, key=not_compressed):
+                func = getattr(cls, cls.OPEN_METH[comptype])
+                if fileobj is not None:
+                    saved_pos = fileobj.tell()
+                try:
+                    return func(name, "r", fileobj, **kwargs)
+                except (ReadError, CompressionError) as e:
+                    error_msgs.append(f'- method {comptype}: {e!r}')
+                    if fileobj is not None:
+                        fileobj.seek(saved_pos)
+                    continue
+            error_msgs_summary = '\n'.join(error_msgs)
+            raise ReadError(f"file could not be opened successfully:\n{error_msgs_summary}")
+
+        elif ":" in mode:
+            filemode, comptype = mode.split(":", 1)
+            filemode = filemode or "r"
+            comptype = comptype or "tar"
+
+            # Select the *open() function according to
+            # given compression.
+            if comptype in cls.OPEN_METH:
+                func = getattr(cls, cls.OPEN_METH[comptype])
+            else:
+                raise CompressionError("unknown compression type %r" % comptype)
+            return func(name, filemode, fileobj, **kwargs)
+
+        elif "|" in mode:
+            filemode, comptype = mode.split("|", 1)
+            filemode = filemode or "r"
+            comptype = comptype or "tar"
+
+            if filemode not in ("r", "w"):
+                raise ValueError("mode must be 'r' or 'w'")
+
+            compresslevel = kwargs.pop("compresslevel", 9)
+            stream = _Stream(name, filemode, comptype, fileobj, bufsize,
+                             compresslevel)
+            try:
+                t = cls(name, filemode, stream, **kwargs)
+            except:
+                stream.close()
+                raise
+            t._extfileobj = False
+            return t
+
+        elif mode in ("a", "w", "x"):
+            return cls.taropen(name, mode, fileobj, **kwargs)
+
+        raise ValueError("undiscernible mode")
+
+    @classmethod
+    def taropen(cls, name, mode="r", fileobj=None, **kwargs):
+        """Open uncompressed tar archive name for reading or writing.
+        """
+        if mode not in ("r", "a", "w", "x"):
+            raise ValueError("mode must be 'r', 'a', 'w' or 'x'")
+        return cls(name, mode, fileobj, **kwargs)
+
+    @classmethod
+    def gzopen(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
+        """Open gzip compressed tar archive name for reading or writing.
+           Appending is not allowed.
+        """
+        if mode not in ("r", "w", "x"):
+            raise ValueError("mode must be 'r', 'w' or 'x'")
+
+        try:
+            from gzip import GzipFile
+        except ImportError:
+            raise CompressionError("gzip module is not available") from None
+
+        try:
+            fileobj = GzipFile(name, mode + "b", compresslevel, fileobj)
+        except OSError as e:
+            if fileobj is not None and mode == 'r':
+                raise ReadError("not a gzip file") from e
+            raise
+
+        try:
+            t = cls.taropen(name, mode, fileobj, **kwargs)
+        except OSError as e:
+            fileobj.close()
+            if mode == 'r':
+                raise ReadError("not a gzip file") from e
+            raise
+        except:
+            fileobj.close()
+            raise
+        t._extfileobj = False
+        return t
+
+    @classmethod
+    def bz2open(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
+        """Open bzip2 compressed tar archive name for reading or writing.
+           Appending is not allowed.
+        """
+        if mode not in ("r", "w", "x"):
+            raise ValueError("mode must be 'r', 'w' or 'x'")
+
+        try:
+            from bz2 import BZ2File
+        except ImportError:
+            raise CompressionError("bz2 module is not available") from None
+
+        fileobj = BZ2File(fileobj or name, mode, compresslevel=compresslevel)
+
+        try:
+            t = cls.taropen(name, mode, fileobj, **kwargs)
+        except (OSError, EOFError) as e:
+            fileobj.close()
+            if mode == 'r':
+                raise ReadError("not a bzip2 file") from e
+            raise
+        except:
+            fileobj.close()
+            raise
+        t._extfileobj = False
+        return t
+
+    @classmethod
+    def xzopen(cls, name, mode="r", fileobj=None, preset=None, **kwargs):
+        """Open lzma compressed tar archive name for reading or writing.
+           Appending is not allowed.
+        """
+        if mode not in ("r", "w", "x"):
+            raise ValueError("mode must be 'r', 'w' or 'x'")
+
+        try:
+            from lzma import LZMAFile, LZMAError
+        except ImportError:
+            raise CompressionError("lzma module is not available") from None
+
+        fileobj = LZMAFile(fileobj or name, mode, preset=preset)
+
+        try:
+            t = cls.taropen(name, mode, fileobj, **kwargs)
+        except (LZMAError, EOFError) as e:
+            fileobj.close()
+            if mode == 'r':
+                raise ReadError("not an lzma file") from e
+            raise
+        except:
+            fileobj.close()
+            raise
+        t._extfileobj = False
+        return t
+
+    # All *open() methods are registered here.
+    OPEN_METH = {
+        "tar": "taropen",   # uncompressed tar
+        "gz":  "gzopen",    # gzip compressed tar
+        "bz2": "bz2open",   # bzip2 compressed tar
+        "xz":  "xzopen"     # lzma compressed tar
+    }
+
+    #--------------------------------------------------------------------------
+    # The public methods which TarFile provides:
+
+    def close(self):
+        """Close the TarFile. In write-mode, two finishing zero blocks are
+           appended to the archive.
+        """
+        if self.closed:
+            return
+
+        self.closed = True
+        try:
+            if self.mode in ("a", "w", "x"):
+                self.fileobj.write(NUL * (BLOCKSIZE * 2))
+                self.offset += (BLOCKSIZE * 2)
+                # fill up the end with zero-blocks
+                # (like option -b20 for tar does)
+                blocks, remainder = divmod(self.offset, RECORDSIZE)
+                if remainder > 0:
+                    self.fileobj.write(NUL * (RECORDSIZE - remainder))
+        finally:
+            if not self._extfileobj:
+                self.fileobj.close()
+
+    def getmember(self, name):
+        """Return a TarInfo object for member ``name``. If ``name`` can not be
+           found in the archive, KeyError is raised. If a member occurs more
+           than once in the archive, its last occurrence is assumed to be the
+           most up-to-date version.
+        """
+        tarinfo = self._getmember(name.rstrip('/'))
+        if tarinfo is None:
+            raise KeyError("filename %r not found" % name)
+        return tarinfo
+
+    def getmembers(self):
+        """Return the members of the archive as a list of TarInfo objects. The
+           list has the same order as the members in the archive.
+        """
+        self._check()
+        if not self._loaded:    # if we want to obtain a list of
+            self._load()        # all members, we first have to
+                                # scan the whole archive.
+        return self.members
+
+    def getnames(self):
+        """Return the members of the archive as a list of their names. It has
+           the same order as the list returned by getmembers().
+        """
+        return [tarinfo.name for tarinfo in self.getmembers()]
+
+    def gettarinfo(self, name=None, arcname=None, fileobj=None):
+        """Create a TarInfo object from the result of os.stat or equivalent
+           on an existing file. The file is either named by ``name``, or
+           specified as a file object ``fileobj`` with a file descriptor. If
+           given, ``arcname`` specifies an alternative name for the file in the
+           archive, otherwise, the name is taken from the 'name' attribute of
+           'fileobj', or the 'name' argument. The name should be a text
+           string.
+        """
+        self._check("awx")
+
+        # When fileobj is given, replace name by
+        # fileobj's real name.
+        if fileobj is not None:
+            name = fileobj.name
+
+        # Building the name of the member in the archive.
+        # Backward slashes are converted to forward slashes,
+        # Absolute paths are turned to relative paths.
+        if arcname is None:
+            arcname = name
+        drv, arcname = os.path.splitdrive(arcname)
+        arcname = arcname.replace(os.sep, "/")
+        arcname = arcname.lstrip("/")
+
+        # Now, fill the TarInfo object with
+        # information specific for the file.
+        tarinfo = self.tarinfo()
+        tarinfo.tarfile = self  # Not needed
+
+        # Use os.stat or os.lstat, depending on if symlinks shall be resolved.
+        if fileobj is None:
+            if not self.dereference:
+                statres = os.lstat(name)
+            else:
+                statres = os.stat(name)
+        else:
+            statres = os.fstat(fileobj.fileno())
+        linkname = ""
+
+        stmd = statres.st_mode
+        if stat.S_ISREG(stmd):
+            inode = (statres.st_ino, statres.st_dev)
+            if not self.dereference and statres.st_nlink > 1 and \
+                    inode in self.inodes and arcname != self.inodes[inode]:
+                # Is it a hardlink to an already
+                # archived file?
+                type = LNKTYPE
+                linkname = self.inodes[inode]
+            else:
+                # The inode is added only if its valid.
+                # For win32 it is always 0.
+                type = REGTYPE
+                if inode[0]:
+                    self.inodes[inode] = arcname
+        elif stat.S_ISDIR(stmd):
+            type = DIRTYPE
+        elif stat.S_ISFIFO(stmd):
+            type = FIFOTYPE
+        elif stat.S_ISLNK(stmd):
+            type = SYMTYPE
+            linkname = os.readlink(name)
+        elif stat.S_ISCHR(stmd):
+            type = CHRTYPE
+        elif stat.S_ISBLK(stmd):
+            type = BLKTYPE
+        else:
+            return None
+
+        # Fill the TarInfo object with all
+        # information we can get.
+        tarinfo.name = arcname
+        tarinfo.mode = stmd
+        tarinfo.uid = statres.st_uid
+        tarinfo.gid = statres.st_gid
+        if type == REGTYPE:
+            tarinfo.size = statres.st_size
+        else:
+            tarinfo.size = 0
+        tarinfo.mtime = statres.st_mtime
+        tarinfo.type = type
+        tarinfo.linkname = linkname
+        if pwd:
+            try:
+                tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0]
+            except KeyError:
+                pass
+        if grp:
+            try:
+                tarinfo.gname = grp.getgrgid(tarinfo.gid)[0]
+            except KeyError:
+                pass
+
+        if type in (CHRTYPE, BLKTYPE):
+            if hasattr(os, "major") and hasattr(os, "minor"):
+                tarinfo.devmajor = os.major(statres.st_rdev)
+                tarinfo.devminor = os.minor(statres.st_rdev)
+        return tarinfo
+
+    def list(self, verbose=True, *, members=None):
+        """Print a table of contents to sys.stdout. If ``verbose`` is False, only
+           the names of the members are printed. If it is True, an `ls -l'-like
+           output is produced. ``members`` is optional and must be a subset of the
+           list returned by getmembers().
+        """
+        self._check()
+
+        if members is None:
+            members = self
+        for tarinfo in members:
+            if verbose:
+                if tarinfo.mode is None:
+                    _safe_print("??????????")
+                else:
+                    _safe_print(stat.filemode(tarinfo.mode))
+                _safe_print("%s/%s" % (tarinfo.uname or tarinfo.uid,
+                                       tarinfo.gname or tarinfo.gid))
+                if tarinfo.ischr() or tarinfo.isblk():
+                    _safe_print("%10s" %
+                            ("%d,%d" % (tarinfo.devmajor, tarinfo.devminor)))
+                else:
+                    _safe_print("%10d" % tarinfo.size)
+                if tarinfo.mtime is None:
+                    _safe_print("????-??-?? ??:??:??")
+                else:
+                    _safe_print("%d-%02d-%02d %02d:%02d:%02d" \
+                                % time.localtime(tarinfo.mtime)[:6])
+
+            _safe_print(tarinfo.name + ("/" if tarinfo.isdir() else ""))
+
+            if verbose:
+                if tarinfo.issym():
+                    _safe_print("-> " + tarinfo.linkname)
+                if tarinfo.islnk():
+                    _safe_print("link to " + tarinfo.linkname)
+            print()
+
+    def add(self, name, arcname=None, recursive=True, *, filter=None):
+        """Add the file ``name`` to the archive. ``name`` may be any type of file
+           (directory, fifo, symbolic link, etc.). If given, ``arcname``
+           specifies an alternative name for the file in the archive.
+           Directories are added recursively by default. This can be avoided by
+           setting ``recursive`` to False. ``filter`` is a function
+           that expects a TarInfo object argument and returns the changed
+           TarInfo object, if it returns None the TarInfo object will be
+           excluded from the archive.
+        """
+        self._check("awx")
+
+        if arcname is None:
+            arcname = name
+
+        # Skip if somebody tries to archive the archive...
+        if self.name is not None and os.path.abspath(name) == self.name:
+            self._dbg(2, "tarfile: Skipped %r" % name)
+            return
+
+        self._dbg(1, name)
+
+        # Create a TarInfo object from the file.
+        tarinfo = self.gettarinfo(name, arcname)
+
+        if tarinfo is None:
+            self._dbg(1, "tarfile: Unsupported type %r" % name)
+            return
+
+        # Change or exclude the TarInfo object.
+        if filter is not None:
+            tarinfo = filter(tarinfo)
+            if tarinfo is None:
+                self._dbg(2, "tarfile: Excluded %r" % name)
+                return
+
+        # Append the tar header and data to the archive.
+        if tarinfo.isreg():
+            with bltn_open(name, "rb") as f:
+                self.addfile(tarinfo, f)
+
+        elif tarinfo.isdir():
+            self.addfile(tarinfo)
+            if recursive:
+                for f in sorted(os.listdir(name)):
+                    self.add(os.path.join(name, f), os.path.join(arcname, f),
+                            recursive, filter=filter)
+
+        else:
+            self.addfile(tarinfo)
+
+    def addfile(self, tarinfo, fileobj=None):
+        """Add the TarInfo object ``tarinfo`` to the archive. If ``fileobj`` is
+           given, it should be a binary file, and tarinfo.size bytes are read
+           from it and added to the archive. You can create TarInfo objects
+           directly, or by using gettarinfo().
+        """
+        self._check("awx")
+
+        tarinfo = copy.copy(tarinfo)
+
+        buf = tarinfo.tobuf(self.format, self.encoding, self.errors)
+        self.fileobj.write(buf)
+        self.offset += len(buf)
+        bufsize=self.copybufsize
+        # If there's data to follow, append it.
+        if fileobj is not None:
+            copyfileobj(fileobj, self.fileobj, tarinfo.size, bufsize=bufsize)
+            blocks, remainder = divmod(tarinfo.size, BLOCKSIZE)
+            if remainder > 0:
+                self.fileobj.write(NUL * (BLOCKSIZE - remainder))
+                blocks += 1
+            self.offset += blocks * BLOCKSIZE
+
+        self.members.append(tarinfo)
+
+    def _get_filter_function(self, filter):
+        if filter is None:
+            filter = self.extraction_filter
+            if filter is None:
+                warnings.warn(
+                    'Python 3.14 will, by default, filter extracted tar '
+                    + 'archives and reject files or modify their metadata. '
+                    + 'Use the filter argument to control this behavior.',
+                    DeprecationWarning)
+                return fully_trusted_filter
+            if isinstance(filter, str):
+                raise TypeError(
+                    'String names are not supported for '
+                    + 'TarFile.extraction_filter. Use a function such as '
+                    + 'tarfile.data_filter directly.')
+            return filter
+        if callable(filter):
+            return filter
+        try:
+            return _NAMED_FILTERS[filter]
+        except KeyError:
+            raise ValueError(f"filter {filter!r} not found") from None
+
+    def extractall(self, path=".", members=None, *, numeric_owner=False,
+                   filter=None):
+        """Extract all members from the archive to the current working
+           directory and set owner, modification time and permissions on
+           directories afterwards. `path' specifies a different directory
+           to extract to. `members' is optional and must be a subset of the
+           list returned by getmembers(). If `numeric_owner` is True, only
+           the numbers for user/group names are used and not the names.
+
+           The `filter` function will be called on each member just
+           before extraction.
+           It can return a changed TarInfo or None to skip the member.
+           String names of common filters are accepted.
+        """
+        directories = []
+
+        filter_function = self._get_filter_function(filter)
+        if members is None:
+            members = self
+
+        for member in members:
+            tarinfo = self._get_extract_tarinfo(member, filter_function, path)
+            if tarinfo is None:
+                continue
+            if tarinfo.isdir():
+                # For directories, delay setting attributes until later,
+                # since permissions can interfere with extraction and
+                # extracting contents can reset mtime.
+                directories.append(tarinfo)
+            self._extract_one(tarinfo, path, set_attrs=not tarinfo.isdir(),
+                              numeric_owner=numeric_owner)
+
+        # Reverse sort directories.
+        directories.sort(key=lambda a: a.name, reverse=True)
+
+        # Set correct owner, mtime and filemode on directories.
+        for tarinfo in directories:
+            dirpath = os.path.join(path, tarinfo.name)
+            try:
+                self.chown(tarinfo, dirpath, numeric_owner=numeric_owner)
+                self.utime(tarinfo, dirpath)
+                self.chmod(tarinfo, dirpath)
+            except ExtractError as e:
+                self._handle_nonfatal_error(e)
+
+    def extract(self, member, path="", set_attrs=True, *, numeric_owner=False,
+                filter=None):
+        """Extract a member from the archive to the current working directory,
+           using its full name. Its file information is extracted as accurately
+           as possible. `member' may be a filename or a TarInfo object. You can
+           specify a different directory using `path'. File attributes (owner,
+           mtime, mode) are set unless `set_attrs' is False. If `numeric_owner`
+           is True, only the numbers for user/group names are used and not
+           the names.
+
+           The `filter` function will be called before extraction.
+           It can return a changed TarInfo or None to skip the member.
+           String names of common filters are accepted.
+        """
+        filter_function = self._get_filter_function(filter)
+        tarinfo = self._get_extract_tarinfo(member, filter_function, path)
+        if tarinfo is not None:
+            self._extract_one(tarinfo, path, set_attrs, numeric_owner)
+
+    def _get_extract_tarinfo(self, member, filter_function, path):
+        """Get filtered TarInfo (or None) from member, which might be a str"""
+        if isinstance(member, str):
+            tarinfo = self.getmember(member)
+        else:
+            tarinfo = member
+
+        unfiltered = tarinfo
+        try:
+            tarinfo = filter_function(tarinfo, path)
+        except (OSError, FilterError) as e:
+            self._handle_fatal_error(e)
+        except ExtractError as e:
+            self._handle_nonfatal_error(e)
+        if tarinfo is None:
+            self._dbg(2, "tarfile: Excluded %r" % unfiltered.name)
+            return None
+        # Prepare the link target for makelink().
+        if tarinfo.islnk():
+            tarinfo = copy.copy(tarinfo)
+            tarinfo._link_target = os.path.join(path, tarinfo.linkname)
+        return tarinfo
+
+    def _extract_one(self, tarinfo, path, set_attrs, numeric_owner):
+        """Extract from filtered tarinfo to disk"""
+        self._check("r")
+
+        try:
+            self._extract_member(tarinfo, os.path.join(path, tarinfo.name),
+                                 set_attrs=set_attrs,
+                                 numeric_owner=numeric_owner)
+        except OSError as e:
+            self._handle_fatal_error(e)
+        except ExtractError as e:
+            self._handle_nonfatal_error(e)
+
+    def _handle_nonfatal_error(self, e):
+        """Handle non-fatal error (ExtractError) according to errorlevel"""
+        if self.errorlevel > 1:
+            raise
+        else:
+            self._dbg(1, "tarfile: %s" % e)
+
+    def _handle_fatal_error(self, e):
+        """Handle "fatal" error according to self.errorlevel"""
+        if self.errorlevel > 0:
+            raise
+        elif isinstance(e, OSError):
+            if e.filename is None:
+                self._dbg(1, "tarfile: %s" % e.strerror)
+            else:
+                self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename))
+        else:
+            self._dbg(1, "tarfile: %s %s" % (type(e).__name__, e))
+
+    def extractfile(self, member):
+        """Extract a member from the archive as a file object. ``member`` may be
+           a filename or a TarInfo object. If ``member`` is a regular file or
+           a link, an io.BufferedReader object is returned. For all other
+           existing members, None is returned. If ``member`` does not appear
+           in the archive, KeyError is raised.
+        """
+        self._check("r")
+
+        if isinstance(member, str):
+            tarinfo = self.getmember(member)
+        else:
+            tarinfo = member
+
+        if tarinfo.isreg() or tarinfo.type not in SUPPORTED_TYPES:
+            # Members with unknown types are treated as regular files.
+            return self.fileobject(self, tarinfo)
+
+        elif tarinfo.islnk() or tarinfo.issym():
+            if isinstance(self.fileobj, _Stream):
+                # A small but ugly workaround for the case that someone tries
+                # to extract a (sym)link as a file-object from a non-seekable
+                # stream of tar blocks.
+                raise StreamError("cannot extract (sym)link as file object")
+            else:
+                # A (sym)link's file object is its target's file object.
+                return self.extractfile(self._find_link_target(tarinfo))
+        else:
+            # If there's no data associated with the member (directory, chrdev,
+            # blkdev, etc.), return None instead of a file object.
+            return None
+
+    def _extract_member(self, tarinfo, targetpath, set_attrs=True,
+                        numeric_owner=False):
+        """Extract the TarInfo object tarinfo to a physical
+           file called targetpath.
+        """
+        # Fetch the TarInfo object for the given name
+        # and build the destination pathname, replacing
+        # forward slashes to platform specific separators.
+        targetpath = targetpath.rstrip("/")
+        targetpath = targetpath.replace("/", os.sep)
+
+        # Create all upper directories.
+        upperdirs = os.path.dirname(targetpath)
+        if upperdirs and not os.path.exists(upperdirs):
+            # Create directories that are not part of the archive with
+            # default permissions.
+            os.makedirs(upperdirs)
+
+        if tarinfo.islnk() or tarinfo.issym():
+            self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname))
+        else:
+            self._dbg(1, tarinfo.name)
+
+        if tarinfo.isreg():
+            self.makefile(tarinfo, targetpath)
+        elif tarinfo.isdir():
+            self.makedir(tarinfo, targetpath)
+        elif tarinfo.isfifo():
+            self.makefifo(tarinfo, targetpath)
+        elif tarinfo.ischr() or tarinfo.isblk():
+            self.makedev(tarinfo, targetpath)
+        elif tarinfo.islnk() or tarinfo.issym():
+            self.makelink(tarinfo, targetpath)
+        elif tarinfo.type not in SUPPORTED_TYPES:
+            self.makeunknown(tarinfo, targetpath)
+        else:
+            self.makefile(tarinfo, targetpath)
+
+        if set_attrs:
+            self.chown(tarinfo, targetpath, numeric_owner)
+            if not tarinfo.issym():
+                self.chmod(tarinfo, targetpath)
+                self.utime(tarinfo, targetpath)
+
+    #--------------------------------------------------------------------------
+    # Below are the different file methods. They are called via
+    # _extract_member() when extract() is called. They can be replaced in a
+    # subclass to implement other functionality.
+
+    def makedir(self, tarinfo, targetpath):
+        """Make a directory called targetpath.
+        """
+        try:
+            if tarinfo.mode is None:
+                # Use the system's default mode
+                os.mkdir(targetpath)
+            else:
+                # Use a safe mode for the directory, the real mode is set
+                # later in _extract_member().
+                os.mkdir(targetpath, 0o700)
+        except FileExistsError:
+            if not os.path.isdir(targetpath):
+                raise
+
+    def makefile(self, tarinfo, targetpath):
+        """Make a file called targetpath.
+        """
+        source = self.fileobj
+        source.seek(tarinfo.offset_data)
+        bufsize = self.copybufsize
+        with bltn_open(targetpath, "wb") as target:
+            if tarinfo.sparse is not None:
+                for offset, size in tarinfo.sparse:
+                    target.seek(offset)
+                    copyfileobj(source, target, size, ReadError, bufsize)
+                target.seek(tarinfo.size)
+                target.truncate()
+            else:
+                copyfileobj(source, target, tarinfo.size, ReadError, bufsize)
+
+    def makeunknown(self, tarinfo, targetpath):
+        """Make a file from a TarInfo object with an unknown type
+           at targetpath.
+        """
+        self.makefile(tarinfo, targetpath)
+        self._dbg(1, "tarfile: Unknown file type %r, " \
+                     "extracted as regular file." % tarinfo.type)
+
+    def makefifo(self, tarinfo, targetpath):
+        """Make a fifo called targetpath.
+        """
+        if hasattr(os, "mkfifo"):
+            os.mkfifo(targetpath)
+        else:
+            raise ExtractError("fifo not supported by system")
+
+    def makedev(self, tarinfo, targetpath):
+        """Make a character or block device called targetpath.
+        """
+        if not hasattr(os, "mknod") or not hasattr(os, "makedev"):
+            raise ExtractError("special devices not supported by system")
+
+        mode = tarinfo.mode
+        if mode is None:
+            # Use mknod's default
+            mode = 0o600
+        if tarinfo.isblk():
+            mode |= stat.S_IFBLK
+        else:
+            mode |= stat.S_IFCHR
+
+        os.mknod(targetpath, mode,
+                 os.makedev(tarinfo.devmajor, tarinfo.devminor))
+
+    def makelink(self, tarinfo, targetpath):
+        """Make a (symbolic) link called targetpath. If it cannot be created
+          (platform limitation), we try to make a copy of the referenced file
+          instead of a link.
+        """
+        try:
+            # For systems that support symbolic and hard links.
+            if tarinfo.issym():
+                if os.path.lexists(targetpath):
+                    # Avoid FileExistsError on following os.symlink.
+                    os.unlink(targetpath)
+                os.symlink(tarinfo.linkname, targetpath)
+            else:
+                if os.path.exists(tarinfo._link_target):
+                    os.link(tarinfo._link_target, targetpath)
+                else:
+                    self._extract_member(self._find_link_target(tarinfo),
+                                         targetpath)
+        except symlink_exception:
+            try:
+                self._extract_member(self._find_link_target(tarinfo),
+                                     targetpath)
+            except KeyError:
+                raise ExtractError("unable to resolve link inside archive") from None
+
+    def chown(self, tarinfo, targetpath, numeric_owner):
+        """Set owner of targetpath according to tarinfo. If numeric_owner
+           is True, use .gid/.uid instead of .gname/.uname. If numeric_owner
+           is False, fall back to .gid/.uid when the search based on name
+           fails.
+        """
+        if hasattr(os, "geteuid") and os.geteuid() == 0:
+            # We have to be root to do so.
+            g = tarinfo.gid
+            u = tarinfo.uid
+            if not numeric_owner:
+                try:
+                    if grp and tarinfo.gname:
+                        g = grp.getgrnam(tarinfo.gname)[2]
+                except KeyError:
+                    pass
+                try:
+                    if pwd and tarinfo.uname:
+                        u = pwd.getpwnam(tarinfo.uname)[2]
+                except KeyError:
+                    pass
+            if g is None:
+                g = -1
+            if u is None:
+                u = -1
+            try:
+                if tarinfo.issym() and hasattr(os, "lchown"):
+                    os.lchown(targetpath, u, g)
+                else:
+                    os.chown(targetpath, u, g)
+            except OSError as e:
+                raise ExtractError("could not change owner") from e
+
+    def chmod(self, tarinfo, targetpath):
+        """Set file permissions of targetpath according to tarinfo.
+        """
+        if tarinfo.mode is None:
+            return
+        try:
+            os.chmod(targetpath, tarinfo.mode)
+        except OSError as e:
+            raise ExtractError("could not change mode") from e
+
+    def utime(self, tarinfo, targetpath):
+        """Set modification time of targetpath according to tarinfo.
+        """
+        mtime = tarinfo.mtime
+        if mtime is None:
+            return
+        if not hasattr(os, 'utime'):
+            return
+        try:
+            os.utime(targetpath, (mtime, mtime))
+        except OSError as e:
+            raise ExtractError("could not change modification time") from e
+
+    #--------------------------------------------------------------------------
+    def next(self):
+        """Return the next member of the archive as a TarInfo object, when
+           TarFile is opened for reading. Return None if there is no more
+           available.
+        """
+        self._check("ra")
+        if self.firstmember is not None:
+            m = self.firstmember
+            self.firstmember = None
+            return m
+
+        # Advance the file pointer.
+        if self.offset != self.fileobj.tell():
+            if self.offset == 0:
+                return None
+            self.fileobj.seek(self.offset - 1)
+            if not self.fileobj.read(1):
+                raise ReadError("unexpected end of data")
+
+        # Read the next block.
+        tarinfo = None
+        while True:
+            try:
+                tarinfo = self.tarinfo.fromtarfile(self)
+            except EOFHeaderError as e:
+                if self.ignore_zeros:
+                    self._dbg(2, "0x%X: %s" % (self.offset, e))
+                    self.offset += BLOCKSIZE
+                    continue
+            except InvalidHeaderError as e:
+                if self.ignore_zeros:
+                    self._dbg(2, "0x%X: %s" % (self.offset, e))
+                    self.offset += BLOCKSIZE
+                    continue
+                elif self.offset == 0:
+                    raise ReadError(str(e)) from None
+            except EmptyHeaderError:
+                if self.offset == 0:
+                    raise ReadError("empty file") from None
+            except TruncatedHeaderError as e:
+                if self.offset == 0:
+                    raise ReadError(str(e)) from None
+            except SubsequentHeaderError as e:
+                raise ReadError(str(e)) from None
+            except Exception as e:
+                try:
+                    import zlib
+                    if isinstance(e, zlib.error):
+                        raise ReadError(f'zlib error: {e}') from None
+                    else:
+                        raise e
+                except ImportError:
+                    raise e
+            break
+
+        if tarinfo is not None:
+            self.members.append(tarinfo)
+        else:
+            self._loaded = True
+
+        return tarinfo
+
+    #--------------------------------------------------------------------------
+    # Little helper methods:
+
+    def _getmember(self, name, tarinfo=None, normalize=False):
+        """Find an archive member by name from bottom to top.
+           If tarinfo is given, it is used as the starting point.
+        """
+        # Ensure that all members have been loaded.
+        members = self.getmembers()
+
+        # Limit the member search list up to tarinfo.
+        skipping = False
+        if tarinfo is not None:
+            try:
+                index = members.index(tarinfo)
+            except ValueError:
+                # The given starting point might be a (modified) copy.
+                # We'll later skip members until we find an equivalent.
+                skipping = True
+            else:
+                # Happy fast path
+                members = members[:index]
+
+        if normalize:
+            name = os.path.normpath(name)
+
+        for member in reversed(members):
+            if skipping:
+                if tarinfo.offset == member.offset:
+                    skipping = False
+                continue
+            if normalize:
+                member_name = os.path.normpath(member.name)
+            else:
+                member_name = member.name
+
+            if name == member_name:
+                return member
+
+        if skipping:
+            # Starting point was not found
+            raise ValueError(tarinfo)
+
+    def _load(self):
+        """Read through the entire archive file and look for readable
+           members.
+        """
+        while self.next() is not None:
+            pass
+        self._loaded = True
+
+    def _check(self, mode=None):
+        """Check if TarFile is still open, and if the operation's mode
+           corresponds to TarFile's mode.
+        """
+        if self.closed:
+            raise OSError("%s is closed" % self.__class__.__name__)
+        if mode is not None and self.mode not in mode:
+            raise OSError("bad operation for mode %r" % self.mode)
+
+    def _find_link_target(self, tarinfo):
+        """Find the target member of a symlink or hardlink member in the
+           archive.
+        """
+        if tarinfo.issym():
+            # Always search the entire archive.
+            linkname = "/".join(filter(None, (os.path.dirname(tarinfo.name), tarinfo.linkname)))
+            limit = None
+        else:
+            # Search the archive before the link, because a hard link is
+            # just a reference to an already archived file.
+            linkname = tarinfo.linkname
+            limit = tarinfo
+
+        member = self._getmember(linkname, tarinfo=limit, normalize=True)
+        if member is None:
+            raise KeyError("linkname %r not found" % linkname)
+        return member
+
+    def __iter__(self):
+        """Provide an iterator object.
+        """
+        if self._loaded:
+            yield from self.members
+            return
+
+        # Yield items using TarFile's next() method.
+        # When all members have been read, set TarFile as _loaded.
+        index = 0
+        # Fix for SF #1100429: Under rare circumstances it can
+        # happen that getmembers() is called during iteration,
+        # which will have already exhausted the next() method.
+        if self.firstmember is not None:
+            tarinfo = self.next()
+            index += 1
+            yield tarinfo
+
+        while True:
+            if index < len(self.members):
+                tarinfo = self.members[index]
+            elif not self._loaded:
+                tarinfo = self.next()
+                if not tarinfo:
+                    self._loaded = True
+                    return
+            else:
+                return
+            index += 1
+            yield tarinfo
+
+    def _dbg(self, level, msg):
+        """Write debugging output to sys.stderr.
+        """
+        if level <= self.debug:
+            print(msg, file=sys.stderr)
+
+    def __enter__(self):
+        self._check()
+        return self
+
+    def __exit__(self, type, value, traceback):
+        if type is None:
+            self.close()
+        else:
+            # An exception occurred. We must not call close() because
+            # it would try to write end-of-archive blocks and padding.
+            if not self._extfileobj:
+                self.fileobj.close()
+            self.closed = True
+
+#--------------------
+# exported functions
+#--------------------
+
+def is_tarfile(name):
+    """Return True if name points to a tar archive that we
+       are able to handle, else return False.
+
+       'name' should be a string, file, or file-like object.
+    """
+    try:
+        if hasattr(name, "read"):
+            pos = name.tell()
+            t = open(fileobj=name)
+            name.seek(pos)
+        else:
+            t = open(name)
+        t.close()
+        return True
+    except TarError:
+        return False
+
+open = TarFile.open
+
+
+def main():
+    import argparse
+
+    description = 'A simple command-line interface for tarfile module.'
+    parser = argparse.ArgumentParser(description=description)
+    parser.add_argument('-v', '--verbose', action='store_true', default=False,
+                        help='Verbose output')
+    parser.add_argument('--filter', metavar='',
+                        choices=_NAMED_FILTERS,
+                        help='Filter for extraction')
+
+    group = parser.add_mutually_exclusive_group(required=True)
+    group.add_argument('-l', '--list', metavar='',
+                       help='Show listing of a tarfile')
+    group.add_argument('-e', '--extract', nargs='+',
+                       metavar=('', ''),
+                       help='Extract tarfile into target dir')
+    group.add_argument('-c', '--create', nargs='+',
+                       metavar=('', ''),
+                       help='Create tarfile from sources')
+    group.add_argument('-t', '--test', metavar='',
+                       help='Test if a tarfile is valid')
+
+    args = parser.parse_args()
+
+    if args.filter and args.extract is None:
+        parser.exit(1, '--filter is only valid for extraction\n')
+
+    if args.test is not None:
+        src = args.test
+        if is_tarfile(src):
+            with open(src, 'r') as tar:
+                tar.getmembers()
+                print(tar.getmembers(), file=sys.stderr)
+            if args.verbose:
+                print('{!r} is a tar archive.'.format(src))
+        else:
+            parser.exit(1, '{!r} is not a tar archive.\n'.format(src))
+
+    elif args.list is not None:
+        src = args.list
+        if is_tarfile(src):
+            with TarFile.open(src, 'r:*') as tf:
+                tf.list(verbose=args.verbose)
+        else:
+            parser.exit(1, '{!r} is not a tar archive.\n'.format(src))
+
+    elif args.extract is not None:
+        if len(args.extract) == 1:
+            src = args.extract[0]
+            curdir = os.curdir
+        elif len(args.extract) == 2:
+            src, curdir = args.extract
+        else:
+            parser.exit(1, parser.format_help())
+
+        if is_tarfile(src):
+            with TarFile.open(src, 'r:*') as tf:
+                tf.extractall(path=curdir, filter=args.filter)
+            if args.verbose:
+                if curdir == '.':
+                    msg = '{!r} file is extracted.'.format(src)
+                else:
+                    msg = ('{!r} file is extracted '
+                           'into {!r} directory.').format(src, curdir)
+                print(msg)
+        else:
+            parser.exit(1, '{!r} is not a tar archive.\n'.format(src))
+
+    elif args.create is not None:
+        tar_name = args.create.pop(0)
+        _, ext = os.path.splitext(tar_name)
+        compressions = {
+            # gz
+            '.gz': 'gz',
+            '.tgz': 'gz',
+            # xz
+            '.xz': 'xz',
+            '.txz': 'xz',
+            # bz2
+            '.bz2': 'bz2',
+            '.tbz': 'bz2',
+            '.tbz2': 'bz2',
+            '.tb2': 'bz2',
+        }
+        tar_mode = 'w:' + compressions[ext] if ext in compressions else 'w'
+        tar_files = args.create
+
+        with TarFile.open(tar_name, tar_mode) as tf:
+            for file_name in tar_files:
+                tf.add(file_name)
+
+        if args.verbose:
+            print('{!r} file created.'.format(tar_name))
+
+if __name__ == '__main__':
+    main()
diff --git a/setuptools/_vendor/backports/tarfile/__main__.py b/setuptools/_vendor/backports/tarfile/__main__.py
new file mode 100644
index 0000000000..daf5509086
--- /dev/null
+++ b/setuptools/_vendor/backports/tarfile/__main__.py
@@ -0,0 +1,5 @@
+from . import main
+
+
+if __name__ == '__main__':
+    main()
diff --git a/setuptools/_vendor/backports/tarfile/compat/__init__.py b/setuptools/_vendor/backports/tarfile/compat/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/setuptools/_vendor/backports/tarfile/compat/py38.py b/setuptools/_vendor/backports/tarfile/compat/py38.py
new file mode 100644
index 0000000000..20fbbfc1c0
--- /dev/null
+++ b/setuptools/_vendor/backports/tarfile/compat/py38.py
@@ -0,0 +1,24 @@
+import sys
+
+
+if sys.version_info < (3, 9):
+
+    def removesuffix(self, suffix):
+        # suffix='' should not call self[:-0].
+        if suffix and self.endswith(suffix):
+            return self[: -len(suffix)]
+        else:
+            return self[:]
+
+    def removeprefix(self, prefix):
+        if self.startswith(prefix):
+            return self[len(prefix) :]
+        else:
+            return self[:]
+else:
+
+    def removesuffix(self, suffix):
+        return self.removesuffix(suffix)
+
+    def removeprefix(self, prefix):
+        return self.removeprefix(prefix)
diff --git a/setuptools/_vendor/jaraco.functools-4.0.1.dist-info/INSTALLER b/setuptools/_vendor/jaraco.functools-4.0.1.dist-info/INSTALLER
new file mode 100644
index 0000000000..a1b589e38a
--- /dev/null
+++ b/setuptools/_vendor/jaraco.functools-4.0.1.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/setuptools/_vendor/jaraco.functools-4.0.1.dist-info/LICENSE b/setuptools/_vendor/jaraco.functools-4.0.1.dist-info/LICENSE
new file mode 100644
index 0000000000..1bb5a44356
--- /dev/null
+++ b/setuptools/_vendor/jaraco.functools-4.0.1.dist-info/LICENSE
@@ -0,0 +1,17 @@
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
diff --git a/setuptools/_vendor/jaraco.functools-4.0.1.dist-info/METADATA b/setuptools/_vendor/jaraco.functools-4.0.1.dist-info/METADATA
new file mode 100644
index 0000000000..c865140ab2
--- /dev/null
+++ b/setuptools/_vendor/jaraco.functools-4.0.1.dist-info/METADATA
@@ -0,0 +1,64 @@
+Metadata-Version: 2.1
+Name: jaraco.functools
+Version: 4.0.1
+Summary: Functools like those found in stdlib
+Author-email: "Jason R. Coombs" 
+Project-URL: Homepage, https://github.com/jaraco/jaraco.functools
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Requires-Python: >=3.8
+Description-Content-Type: text/x-rst
+License-File: LICENSE
+Requires-Dist: more-itertools
+Provides-Extra: docs
+Requires-Dist: sphinx >=3.5 ; extra == 'docs'
+Requires-Dist: sphinx <7.2.5 ; extra == 'docs'
+Requires-Dist: jaraco.packaging >=9.3 ; extra == 'docs'
+Requires-Dist: rst.linker >=1.9 ; extra == 'docs'
+Requires-Dist: furo ; extra == 'docs'
+Requires-Dist: sphinx-lint ; extra == 'docs'
+Requires-Dist: jaraco.tidelift >=1.4 ; extra == 'docs'
+Provides-Extra: testing
+Requires-Dist: pytest >=6 ; extra == 'testing'
+Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'testing'
+Requires-Dist: pytest-cov ; extra == 'testing'
+Requires-Dist: pytest-enabler >=2.2 ; extra == 'testing'
+Requires-Dist: pytest-ruff >=0.2.1 ; extra == 'testing'
+Requires-Dist: jaraco.classes ; extra == 'testing'
+Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing'
+
+.. image:: https://img.shields.io/pypi/v/jaraco.functools.svg
+   :target: https://pypi.org/project/jaraco.functools
+
+.. image:: https://img.shields.io/pypi/pyversions/jaraco.functools.svg
+
+.. image:: https://github.com/jaraco/jaraco.functools/actions/workflows/main.yml/badge.svg
+   :target: https://github.com/jaraco/jaraco.functools/actions?query=workflow%3A%22tests%22
+   :alt: tests
+
+.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
+    :target: https://github.com/astral-sh/ruff
+    :alt: Ruff
+
+.. image:: https://readthedocs.org/projects/jaracofunctools/badge/?version=latest
+   :target: https://jaracofunctools.readthedocs.io/en/latest/?badge=latest
+
+.. image:: https://img.shields.io/badge/skeleton-2024-informational
+   :target: https://blog.jaraco.com/skeleton
+
+.. image:: https://tidelift.com/badges/package/pypi/jaraco.functools
+   :target: https://tidelift.com/subscription/pkg/pypi-jaraco.functools?utm_source=pypi-jaraco.functools&utm_medium=readme
+
+Additional functools in the spirit of stdlib's functools.
+
+For Enterprise
+==============
+
+Available as part of the Tidelift Subscription.
+
+This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use.
+
+`Learn more `_.
diff --git a/setuptools/_vendor/jaraco.functools-4.0.1.dist-info/RECORD b/setuptools/_vendor/jaraco.functools-4.0.1.dist-info/RECORD
new file mode 100644
index 0000000000..cf552f0f48
--- /dev/null
+++ b/setuptools/_vendor/jaraco.functools-4.0.1.dist-info/RECORD
@@ -0,0 +1,10 @@
+jaraco.functools-4.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+jaraco.functools-4.0.1.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
+jaraco.functools-4.0.1.dist-info/METADATA,sha256=i4aUaQDX-jjdEQK5wevhegyx8JyLfin2HyvaSk3FHso,2891
+jaraco.functools-4.0.1.dist-info/RECORD,,
+jaraco.functools-4.0.1.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
+jaraco.functools-4.0.1.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
+jaraco/functools/__init__.py,sha256=hEAJaS2uSZRuF_JY4CxCHIYh79ZpxaPp9OiHyr9EJ1w,16642
+jaraco/functools/__init__.pyi,sha256=gk3dsgHzo5F_U74HzAvpNivFAPCkPJ1b2-yCd62dfnw,3878
+jaraco/functools/__pycache__/__init__.cpython-312.pyc,,
+jaraco/functools/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
diff --git a/setuptools/_vendor/jaraco.functools-4.0.1.dist-info/WHEEL b/setuptools/_vendor/jaraco.functools-4.0.1.dist-info/WHEEL
new file mode 100644
index 0000000000..bab98d6758
--- /dev/null
+++ b/setuptools/_vendor/jaraco.functools-4.0.1.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.43.0)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/setuptools/_vendor/jaraco.functools-4.0.1.dist-info/top_level.txt b/setuptools/_vendor/jaraco.functools-4.0.1.dist-info/top_level.txt
new file mode 100644
index 0000000000..f6205a5f19
--- /dev/null
+++ b/setuptools/_vendor/jaraco.functools-4.0.1.dist-info/top_level.txt
@@ -0,0 +1 @@
+jaraco

From e510f4342778978f6ff367fcc6d369595b9c94a4 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alex=20Gr=C3=B6nholm?= 
Date: Mon, 20 May 2024 08:09:44 -0400
Subject: [PATCH 0676/1761] Rolled back some inadvertently changed files

---
 .../INSTALLER                                 |  0
 .../LICENSE                                   |  0
 .../METADATA                                  | 11 +++++-----
 .../backports.tarfile-1.0.0.dist-info/RECORD  |  9 ++++++++
 .../REQUESTED                                 |  0
 .../WHEEL                                     |  0
 .../top_level.txt                             |  0
 .../backports.tarfile-1.1.1.dist-info/RECORD  | 17 ---------------
 .../INSTALLER                                 |  0
 .../LICENSE                                   |  0
 .../METADATA                                  | 21 ++++++++++++-------
 .../jaraco.functools-4.0.0.dist-info/RECORD   | 10 +++++++++
 .../jaraco.functools-4.0.0.dist-info}/WHEEL   |  2 +-
 .../top_level.txt                             |  0
 .../jaraco.functools-4.0.1.dist-info/RECORD   | 10 ---------
 .../_vendor/jaraco/functools/__init__.pyi     |  3 +++
 .../more_itertools-10.2.0.dist-info/RECORD    |  1 -
 .../more_itertools-10.2.0.dist-info/REQUESTED |  0
 .../_vendor/platformdirs/__init__.py          |  2 +-
 .../INSTALLER                                 |  0
 .../LICENSE                                   |  0
 .../METADATA                                  | 11 +++++-----
 .../backports.tarfile-1.0.0.dist-info/RECORD  |  9 ++++++++
 .../backports.tarfile-1.0.0.dist-info}/WHEEL  |  0
 .../top_level.txt                             |  0
 .../backports.tarfile-1.1.1.dist-info/RECORD  | 17 ---------------
 .../REQUESTED                                 |  0
 .../_vendor/importlib_metadata/_compat.py     |  2 +-
 .../INSTALLER                                 |  0
 .../LICENSE                                   |  0
 .../METADATA                                  | 21 ++++++++++++-------
 .../jaraco.functools-4.0.0.dist-info/RECORD   | 10 +++++++++
 .../WHEEL                                     |  2 +-
 .../top_level.txt                             |  0
 .../jaraco.functools-4.0.1.dist-info/RECORD   | 10 ---------
 .../_vendor/jaraco/functools/__init__.pyi     |  3 +++
 36 files changed, 84 insertions(+), 87 deletions(-)
 rename pkg_resources/_vendor/{backports.tarfile-1.1.1.dist-info => backports.tarfile-1.0.0.dist-info}/INSTALLER (100%)
 rename pkg_resources/_vendor/{backports.tarfile-1.1.1.dist-info => backports.tarfile-1.0.0.dist-info}/LICENSE (100%)
 rename pkg_resources/_vendor/{backports.tarfile-1.1.1.dist-info => backports.tarfile-1.0.0.dist-info}/METADATA (86%)
 create mode 100644 pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/RECORD
 rename pkg_resources/_vendor/{backports.tarfile-1.1.1.dist-info => backports.tarfile-1.0.0.dist-info}/REQUESTED (100%)
 rename pkg_resources/_vendor/{backports.tarfile-1.1.1.dist-info => backports.tarfile-1.0.0.dist-info}/WHEEL (100%)
 rename pkg_resources/_vendor/{backports.tarfile-1.1.1.dist-info => backports.tarfile-1.0.0.dist-info}/top_level.txt (100%)
 delete mode 100644 pkg_resources/_vendor/backports.tarfile-1.1.1.dist-info/RECORD
 rename pkg_resources/_vendor/{jaraco.functools-4.0.1.dist-info => jaraco.functools-4.0.0.dist-info}/INSTALLER (100%)
 rename pkg_resources/_vendor/{jaraco.functools-4.0.1.dist-info => jaraco.functools-4.0.0.dist-info}/LICENSE (100%)
 rename pkg_resources/_vendor/{jaraco.functools-4.0.1.dist-info => jaraco.functools-4.0.0.dist-info}/METADATA (78%)
 create mode 100644 pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/RECORD
 rename {setuptools/_vendor/backports.tarfile-1.1.1.dist-info => pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info}/WHEEL (65%)
 rename pkg_resources/_vendor/{jaraco.functools-4.0.1.dist-info => jaraco.functools-4.0.0.dist-info}/top_level.txt (100%)
 delete mode 100644 pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/RECORD
 delete mode 100644 pkg_resources/_vendor/more_itertools-10.2.0.dist-info/REQUESTED
 rename setuptools/_vendor/{backports.tarfile-1.1.1.dist-info => backports.tarfile-1.0.0.dist-info}/INSTALLER (100%)
 rename setuptools/_vendor/{backports.tarfile-1.1.1.dist-info => backports.tarfile-1.0.0.dist-info}/LICENSE (100%)
 rename setuptools/_vendor/{backports.tarfile-1.1.1.dist-info => backports.tarfile-1.0.0.dist-info}/METADATA (86%)
 create mode 100644 setuptools/_vendor/backports.tarfile-1.0.0.dist-info/RECORD
 rename {pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info => setuptools/_vendor/backports.tarfile-1.0.0.dist-info}/WHEEL (100%)
 rename setuptools/_vendor/{backports.tarfile-1.1.1.dist-info => backports.tarfile-1.0.0.dist-info}/top_level.txt (100%)
 delete mode 100644 setuptools/_vendor/backports.tarfile-1.1.1.dist-info/RECORD
 delete mode 100644 setuptools/_vendor/backports.tarfile-1.1.1.dist-info/REQUESTED
 rename setuptools/_vendor/{jaraco.functools-4.0.1.dist-info => jaraco.functools-4.0.0.dist-info}/INSTALLER (100%)
 rename setuptools/_vendor/{jaraco.functools-4.0.1.dist-info => jaraco.functools-4.0.0.dist-info}/LICENSE (100%)
 rename setuptools/_vendor/{jaraco.functools-4.0.1.dist-info => jaraco.functools-4.0.0.dist-info}/METADATA (78%)
 create mode 100644 setuptools/_vendor/jaraco.functools-4.0.0.dist-info/RECORD
 rename setuptools/_vendor/{jaraco.functools-4.0.1.dist-info => jaraco.functools-4.0.0.dist-info}/WHEEL (65%)
 rename setuptools/_vendor/{jaraco.functools-4.0.1.dist-info => jaraco.functools-4.0.0.dist-info}/top_level.txt (100%)
 delete mode 100644 setuptools/_vendor/jaraco.functools-4.0.1.dist-info/RECORD

diff --git a/pkg_resources/_vendor/backports.tarfile-1.1.1.dist-info/INSTALLER b/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/INSTALLER
similarity index 100%
rename from pkg_resources/_vendor/backports.tarfile-1.1.1.dist-info/INSTALLER
rename to pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/INSTALLER
diff --git a/pkg_resources/_vendor/backports.tarfile-1.1.1.dist-info/LICENSE b/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/LICENSE
similarity index 100%
rename from pkg_resources/_vendor/backports.tarfile-1.1.1.dist-info/LICENSE
rename to pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/LICENSE
diff --git a/pkg_resources/_vendor/backports.tarfile-1.1.1.dist-info/METADATA b/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/METADATA
similarity index 86%
rename from pkg_resources/_vendor/backports.tarfile-1.1.1.dist-info/METADATA
rename to pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/METADATA
index d29c50158a..e7b64c87f8 100644
--- a/pkg_resources/_vendor/backports.tarfile-1.1.1.dist-info/METADATA
+++ b/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/METADATA
@@ -1,16 +1,16 @@
 Metadata-Version: 2.1
 Name: backports.tarfile
-Version: 1.1.1
+Version: 1.0.0
 Summary: Backport of CPython tarfile module
-Author-email: "Jason R. Coombs" 
-Project-URL: Homepage, https://github.com/jaraco/backports.tarfile
+Home-page: https://github.com/jaraco/backports.tarfile
+Author: Jason R. Coombs
+Author-email: jaraco@jaraco.com
 Classifier: Development Status :: 5 - Production/Stable
 Classifier: Intended Audience :: Developers
 Classifier: License :: OSI Approved :: MIT License
 Classifier: Programming Language :: Python :: 3
 Classifier: Programming Language :: Python :: 3 :: Only
 Requires-Python: >=3.8
-Description-Content-Type: text/x-rst
 License-File: LICENSE
 Provides-Extra: docs
 Requires-Dist: sphinx >=3.5 ; extra == 'docs'
@@ -19,11 +19,10 @@ Requires-Dist: rst.linker >=1.9 ; extra == 'docs'
 Requires-Dist: furo ; extra == 'docs'
 Requires-Dist: sphinx-lint ; extra == 'docs'
 Provides-Extra: testing
-Requires-Dist: pytest !=8.1.*,>=6 ; extra == 'testing'
+Requires-Dist: pytest !=8.1.1,>=6 ; extra == 'testing'
 Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'testing'
 Requires-Dist: pytest-cov ; extra == 'testing'
 Requires-Dist: pytest-enabler >=2.2 ; extra == 'testing'
-Requires-Dist: jaraco.test ; extra == 'testing'
 
 .. image:: https://img.shields.io/pypi/v/backports.tarfile.svg
    :target: https://pypi.org/project/backports.tarfile
diff --git a/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/RECORD b/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/RECORD
new file mode 100644
index 0000000000..ba15839660
--- /dev/null
+++ b/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/RECORD
@@ -0,0 +1,9 @@
+backports.tarfile-1.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+backports.tarfile-1.0.0.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
+backports.tarfile-1.0.0.dist-info/METADATA,sha256=XlT7JAFR04zDMIjs-EFhqc0CkkVyeh-SiVUoKXONXJ0,1876
+backports.tarfile-1.0.0.dist-info/RECORD,,
+backports.tarfile-1.0.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+backports.tarfile-1.0.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
+backports.tarfile-1.0.0.dist-info/top_level.txt,sha256=cGjaLMOoBR1FK0ApojtzWVmViTtJ7JGIK_HwXiEsvtU,10
+backports/__pycache__/tarfile.cpython-312.pyc,,
+backports/tarfile.py,sha256=IO3YX_ZYqn13VOi-3QLM0lnktn102U4d9wUrHc230LY,106920
diff --git a/pkg_resources/_vendor/backports.tarfile-1.1.1.dist-info/REQUESTED b/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/REQUESTED
similarity index 100%
rename from pkg_resources/_vendor/backports.tarfile-1.1.1.dist-info/REQUESTED
rename to pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/REQUESTED
diff --git a/pkg_resources/_vendor/backports.tarfile-1.1.1.dist-info/WHEEL b/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/WHEEL
similarity index 100%
rename from pkg_resources/_vendor/backports.tarfile-1.1.1.dist-info/WHEEL
rename to pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/WHEEL
diff --git a/pkg_resources/_vendor/backports.tarfile-1.1.1.dist-info/top_level.txt b/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/top_level.txt
similarity index 100%
rename from pkg_resources/_vendor/backports.tarfile-1.1.1.dist-info/top_level.txt
rename to pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/top_level.txt
diff --git a/pkg_resources/_vendor/backports.tarfile-1.1.1.dist-info/RECORD b/pkg_resources/_vendor/backports.tarfile-1.1.1.dist-info/RECORD
deleted file mode 100644
index af585a832d..0000000000
--- a/pkg_resources/_vendor/backports.tarfile-1.1.1.dist-info/RECORD
+++ /dev/null
@@ -1,17 +0,0 @@
-backports.tarfile-1.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-backports.tarfile-1.1.1.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
-backports.tarfile-1.1.1.dist-info/METADATA,sha256=XtPZDjwuCtDbN49cpJgthPJm40mfbhk5BllI-jBVVxc,1969
-backports.tarfile-1.1.1.dist-info/RECORD,,
-backports.tarfile-1.1.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-backports.tarfile-1.1.1.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
-backports.tarfile-1.1.1.dist-info/top_level.txt,sha256=cGjaLMOoBR1FK0ApojtzWVmViTtJ7JGIK_HwXiEsvtU,10
-backports/__init__.py,sha256=iOEMwnlORWezdO8-2vxBIPSR37D7JGjluZ8f55vzxls,81
-backports/__pycache__/__init__.cpython-312.pyc,,
-backports/tarfile/__init__.py,sha256=QOayikyptGOBh_dN1WFI5w0nnUYpX5Gma7p2JgksJIY,106960
-backports/tarfile/__main__.py,sha256=Yw2oGT1afrz2eBskzdPYL8ReB_3liApmhFkN2EbDmc4,59
-backports/tarfile/__pycache__/__init__.cpython-312.pyc,,
-backports/tarfile/__pycache__/__main__.cpython-312.pyc,,
-backports/tarfile/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-backports/tarfile/compat/__pycache__/__init__.cpython-312.pyc,,
-backports/tarfile/compat/__pycache__/py38.cpython-312.pyc,,
-backports/tarfile/compat/py38.py,sha256=iYkyt_gvWjLzGUTJD9TuTfMMjOk-ersXZmRlvQYN2qE,568
diff --git a/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/INSTALLER b/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/INSTALLER
similarity index 100%
rename from pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/INSTALLER
rename to pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/INSTALLER
diff --git a/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/LICENSE b/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/LICENSE
similarity index 100%
rename from pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/LICENSE
rename to pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/LICENSE
diff --git a/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/METADATA b/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/METADATA
similarity index 78%
rename from pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/METADATA
rename to pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/METADATA
index c865140ab2..581b308378 100644
--- a/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/METADATA
+++ b/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/METADATA
@@ -1,16 +1,16 @@
 Metadata-Version: 2.1
 Name: jaraco.functools
-Version: 4.0.1
+Version: 4.0.0
 Summary: Functools like those found in stdlib
-Author-email: "Jason R. Coombs" 
-Project-URL: Homepage, https://github.com/jaraco/jaraco.functools
+Home-page: https://github.com/jaraco/jaraco.functools
+Author: Jason R. Coombs
+Author-email: jaraco@jaraco.com
 Classifier: Development Status :: 5 - Production/Stable
 Classifier: Intended Audience :: Developers
 Classifier: License :: OSI Approved :: MIT License
 Classifier: Programming Language :: Python :: 3
 Classifier: Programming Language :: Python :: 3 :: Only
 Requires-Python: >=3.8
-Description-Content-Type: text/x-rst
 License-File: LICENSE
 Requires-Dist: more-itertools
 Provides-Extra: docs
@@ -26,16 +26,17 @@ Requires-Dist: pytest >=6 ; extra == 'testing'
 Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'testing'
 Requires-Dist: pytest-cov ; extra == 'testing'
 Requires-Dist: pytest-enabler >=2.2 ; extra == 'testing'
-Requires-Dist: pytest-ruff >=0.2.1 ; extra == 'testing'
+Requires-Dist: pytest-ruff ; extra == 'testing'
 Requires-Dist: jaraco.classes ; extra == 'testing'
-Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing'
+Requires-Dist: pytest-black >=0.3.7 ; (platform_python_implementation != "PyPy") and extra == 'testing'
+Requires-Dist: pytest-mypy >=0.9.1 ; (platform_python_implementation != "PyPy") and extra == 'testing'
 
 .. image:: https://img.shields.io/pypi/v/jaraco.functools.svg
    :target: https://pypi.org/project/jaraco.functools
 
 .. image:: https://img.shields.io/pypi/pyversions/jaraco.functools.svg
 
-.. image:: https://github.com/jaraco/jaraco.functools/actions/workflows/main.yml/badge.svg
+.. image:: https://github.com/jaraco/jaraco.functools/workflows/tests/badge.svg
    :target: https://github.com/jaraco/jaraco.functools/actions?query=workflow%3A%22tests%22
    :alt: tests
 
@@ -43,10 +44,14 @@ Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extr
     :target: https://github.com/astral-sh/ruff
     :alt: Ruff
 
+.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+   :target: https://github.com/psf/black
+   :alt: Code style: Black
+
 .. image:: https://readthedocs.org/projects/jaracofunctools/badge/?version=latest
    :target: https://jaracofunctools.readthedocs.io/en/latest/?badge=latest
 
-.. image:: https://img.shields.io/badge/skeleton-2024-informational
+.. image:: https://img.shields.io/badge/skeleton-2023-informational
    :target: https://blog.jaraco.com/skeleton
 
 .. image:: https://tidelift.com/badges/package/pypi/jaraco.functools
diff --git a/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/RECORD b/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/RECORD
new file mode 100644
index 0000000000..a3186021ac
--- /dev/null
+++ b/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/RECORD
@@ -0,0 +1,10 @@
+jaraco.functools-4.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+jaraco.functools-4.0.0.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
+jaraco.functools-4.0.0.dist-info/METADATA,sha256=nVOe_vWvaN2iWJ2aBVkhKvmvH-gFksNCXHwCNvcj65I,3078
+jaraco.functools-4.0.0.dist-info/RECORD,,
+jaraco.functools-4.0.0.dist-info/WHEEL,sha256=Xo9-1PvkuimrydujYJAjF7pCkriuXBpUPEjma1nZyJ0,92
+jaraco.functools-4.0.0.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
+jaraco/functools/__init__.py,sha256=hEAJaS2uSZRuF_JY4CxCHIYh79ZpxaPp9OiHyr9EJ1w,16642
+jaraco/functools/__init__.pyi,sha256=N4lLbdhMtrmwiK3UuMGhYsiOLLZx69CUNOdmFPSVh6Q,3982
+jaraco/functools/__pycache__/__init__.cpython-312.pyc,,
+jaraco/functools/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
diff --git a/setuptools/_vendor/backports.tarfile-1.1.1.dist-info/WHEEL b/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/WHEEL
similarity index 65%
rename from setuptools/_vendor/backports.tarfile-1.1.1.dist-info/WHEEL
rename to pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/WHEEL
index bab98d6758..ba48cbcf92 100644
--- a/setuptools/_vendor/backports.tarfile-1.1.1.dist-info/WHEEL
+++ b/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/WHEEL
@@ -1,5 +1,5 @@
 Wheel-Version: 1.0
-Generator: bdist_wheel (0.43.0)
+Generator: bdist_wheel (0.41.3)
 Root-Is-Purelib: true
 Tag: py3-none-any
 
diff --git a/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/top_level.txt b/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/top_level.txt
similarity index 100%
rename from pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/top_level.txt
rename to pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/top_level.txt
diff --git a/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/RECORD b/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/RECORD
deleted file mode 100644
index cf552f0f48..0000000000
--- a/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/RECORD
+++ /dev/null
@@ -1,10 +0,0 @@
-jaraco.functools-4.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-jaraco.functools-4.0.1.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
-jaraco.functools-4.0.1.dist-info/METADATA,sha256=i4aUaQDX-jjdEQK5wevhegyx8JyLfin2HyvaSk3FHso,2891
-jaraco.functools-4.0.1.dist-info/RECORD,,
-jaraco.functools-4.0.1.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
-jaraco.functools-4.0.1.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
-jaraco/functools/__init__.py,sha256=hEAJaS2uSZRuF_JY4CxCHIYh79ZpxaPp9OiHyr9EJ1w,16642
-jaraco/functools/__init__.pyi,sha256=gk3dsgHzo5F_U74HzAvpNivFAPCkPJ1b2-yCd62dfnw,3878
-jaraco/functools/__pycache__/__init__.cpython-312.pyc,,
-jaraco/functools/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
diff --git a/pkg_resources/_vendor/jaraco/functools/__init__.pyi b/pkg_resources/_vendor/jaraco/functools/__init__.pyi
index 19191bf93e..c2b9ab1757 100644
--- a/pkg_resources/_vendor/jaraco/functools/__init__.pyi
+++ b/pkg_resources/_vendor/jaraco/functools/__init__.pyi
@@ -74,6 +74,9 @@ def result_invoke(
 def invoke(
     f: Callable[_P, _R], /, *args: _P.args, **kwargs: _P.kwargs
 ) -> Callable[_P, _R]: ...
+def call_aside(
+    f: Callable[_P, _R], *args: _P.args, **kwargs: _P.kwargs
+) -> Callable[_P, _R]: ...
 
 class Throttler(Generic[_R]):
     last_called: float
diff --git a/pkg_resources/_vendor/more_itertools-10.2.0.dist-info/RECORD b/pkg_resources/_vendor/more_itertools-10.2.0.dist-info/RECORD
index 5331c3fc91..2ce6e4a6f5 100644
--- a/pkg_resources/_vendor/more_itertools-10.2.0.dist-info/RECORD
+++ b/pkg_resources/_vendor/more_itertools-10.2.0.dist-info/RECORD
@@ -2,7 +2,6 @@ more_itertools-10.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQr
 more_itertools-10.2.0.dist-info/LICENSE,sha256=CfHIyelBrz5YTVlkHqm4fYPAyw_QB-te85Gn4mQ8GkY,1053
 more_itertools-10.2.0.dist-info/METADATA,sha256=lTIPxfD4IiP6aHzPjP4dXmzRRUmiXicAB6qnY82T-Gs,34886
 more_itertools-10.2.0.dist-info/RECORD,,
-more_itertools-10.2.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
 more_itertools-10.2.0.dist-info/WHEEL,sha256=rSgq_JpHF9fHR1lx53qwg_1-2LypZE_qmcuXbVUq948,81
 more_itertools/__init__.py,sha256=VodgFyRJvpnHbAMgseYRiP7r928FFOAakmQrl6J88os,149
 more_itertools/__init__.pyi,sha256=5B3eTzON1BBuOLob1vCflyEb2lSd6usXQQ-Cv-hXkeA,43
diff --git a/pkg_resources/_vendor/more_itertools-10.2.0.dist-info/REQUESTED b/pkg_resources/_vendor/more_itertools-10.2.0.dist-info/REQUESTED
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/pkg_resources/_vendor/platformdirs/__init__.py b/pkg_resources/_vendor/platformdirs/__init__.py
index 881a596e67..aef2821b83 100644
--- a/pkg_resources/_vendor/platformdirs/__init__.py
+++ b/pkg_resources/_vendor/platformdirs/__init__.py
@@ -11,7 +11,7 @@
 if sys.version_info >= (3, 8):  # pragma: no cover (py38+)
     from typing import Literal
 else:  # pragma: no cover (py38+)
-    from typing_extensions import Literal
+    from ..typing_extensions import Literal
 
 from .api import PlatformDirsABC
 from .version import __version__
diff --git a/setuptools/_vendor/backports.tarfile-1.1.1.dist-info/INSTALLER b/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/INSTALLER
similarity index 100%
rename from setuptools/_vendor/backports.tarfile-1.1.1.dist-info/INSTALLER
rename to setuptools/_vendor/backports.tarfile-1.0.0.dist-info/INSTALLER
diff --git a/setuptools/_vendor/backports.tarfile-1.1.1.dist-info/LICENSE b/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/LICENSE
similarity index 100%
rename from setuptools/_vendor/backports.tarfile-1.1.1.dist-info/LICENSE
rename to setuptools/_vendor/backports.tarfile-1.0.0.dist-info/LICENSE
diff --git a/setuptools/_vendor/backports.tarfile-1.1.1.dist-info/METADATA b/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/METADATA
similarity index 86%
rename from setuptools/_vendor/backports.tarfile-1.1.1.dist-info/METADATA
rename to setuptools/_vendor/backports.tarfile-1.0.0.dist-info/METADATA
index d29c50158a..e7b64c87f8 100644
--- a/setuptools/_vendor/backports.tarfile-1.1.1.dist-info/METADATA
+++ b/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/METADATA
@@ -1,16 +1,16 @@
 Metadata-Version: 2.1
 Name: backports.tarfile
-Version: 1.1.1
+Version: 1.0.0
 Summary: Backport of CPython tarfile module
-Author-email: "Jason R. Coombs" 
-Project-URL: Homepage, https://github.com/jaraco/backports.tarfile
+Home-page: https://github.com/jaraco/backports.tarfile
+Author: Jason R. Coombs
+Author-email: jaraco@jaraco.com
 Classifier: Development Status :: 5 - Production/Stable
 Classifier: Intended Audience :: Developers
 Classifier: License :: OSI Approved :: MIT License
 Classifier: Programming Language :: Python :: 3
 Classifier: Programming Language :: Python :: 3 :: Only
 Requires-Python: >=3.8
-Description-Content-Type: text/x-rst
 License-File: LICENSE
 Provides-Extra: docs
 Requires-Dist: sphinx >=3.5 ; extra == 'docs'
@@ -19,11 +19,10 @@ Requires-Dist: rst.linker >=1.9 ; extra == 'docs'
 Requires-Dist: furo ; extra == 'docs'
 Requires-Dist: sphinx-lint ; extra == 'docs'
 Provides-Extra: testing
-Requires-Dist: pytest !=8.1.*,>=6 ; extra == 'testing'
+Requires-Dist: pytest !=8.1.1,>=6 ; extra == 'testing'
 Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'testing'
 Requires-Dist: pytest-cov ; extra == 'testing'
 Requires-Dist: pytest-enabler >=2.2 ; extra == 'testing'
-Requires-Dist: jaraco.test ; extra == 'testing'
 
 .. image:: https://img.shields.io/pypi/v/backports.tarfile.svg
    :target: https://pypi.org/project/backports.tarfile
diff --git a/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/RECORD b/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/RECORD
new file mode 100644
index 0000000000..ba15839660
--- /dev/null
+++ b/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/RECORD
@@ -0,0 +1,9 @@
+backports.tarfile-1.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+backports.tarfile-1.0.0.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
+backports.tarfile-1.0.0.dist-info/METADATA,sha256=XlT7JAFR04zDMIjs-EFhqc0CkkVyeh-SiVUoKXONXJ0,1876
+backports.tarfile-1.0.0.dist-info/RECORD,,
+backports.tarfile-1.0.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+backports.tarfile-1.0.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
+backports.tarfile-1.0.0.dist-info/top_level.txt,sha256=cGjaLMOoBR1FK0ApojtzWVmViTtJ7JGIK_HwXiEsvtU,10
+backports/__pycache__/tarfile.cpython-312.pyc,,
+backports/tarfile.py,sha256=IO3YX_ZYqn13VOi-3QLM0lnktn102U4d9wUrHc230LY,106920
diff --git a/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/WHEEL b/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/WHEEL
similarity index 100%
rename from pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/WHEEL
rename to setuptools/_vendor/backports.tarfile-1.0.0.dist-info/WHEEL
diff --git a/setuptools/_vendor/backports.tarfile-1.1.1.dist-info/top_level.txt b/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/top_level.txt
similarity index 100%
rename from setuptools/_vendor/backports.tarfile-1.1.1.dist-info/top_level.txt
rename to setuptools/_vendor/backports.tarfile-1.0.0.dist-info/top_level.txt
diff --git a/setuptools/_vendor/backports.tarfile-1.1.1.dist-info/RECORD b/setuptools/_vendor/backports.tarfile-1.1.1.dist-info/RECORD
deleted file mode 100644
index af585a832d..0000000000
--- a/setuptools/_vendor/backports.tarfile-1.1.1.dist-info/RECORD
+++ /dev/null
@@ -1,17 +0,0 @@
-backports.tarfile-1.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-backports.tarfile-1.1.1.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
-backports.tarfile-1.1.1.dist-info/METADATA,sha256=XtPZDjwuCtDbN49cpJgthPJm40mfbhk5BllI-jBVVxc,1969
-backports.tarfile-1.1.1.dist-info/RECORD,,
-backports.tarfile-1.1.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-backports.tarfile-1.1.1.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
-backports.tarfile-1.1.1.dist-info/top_level.txt,sha256=cGjaLMOoBR1FK0ApojtzWVmViTtJ7JGIK_HwXiEsvtU,10
-backports/__init__.py,sha256=iOEMwnlORWezdO8-2vxBIPSR37D7JGjluZ8f55vzxls,81
-backports/__pycache__/__init__.cpython-312.pyc,,
-backports/tarfile/__init__.py,sha256=QOayikyptGOBh_dN1WFI5w0nnUYpX5Gma7p2JgksJIY,106960
-backports/tarfile/__main__.py,sha256=Yw2oGT1afrz2eBskzdPYL8ReB_3liApmhFkN2EbDmc4,59
-backports/tarfile/__pycache__/__init__.cpython-312.pyc,,
-backports/tarfile/__pycache__/__main__.cpython-312.pyc,,
-backports/tarfile/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-backports/tarfile/compat/__pycache__/__init__.cpython-312.pyc,,
-backports/tarfile/compat/__pycache__/py38.cpython-312.pyc,,
-backports/tarfile/compat/py38.py,sha256=iYkyt_gvWjLzGUTJD9TuTfMMjOk-ersXZmRlvQYN2qE,568
diff --git a/setuptools/_vendor/backports.tarfile-1.1.1.dist-info/REQUESTED b/setuptools/_vendor/backports.tarfile-1.1.1.dist-info/REQUESTED
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/setuptools/_vendor/importlib_metadata/_compat.py b/setuptools/_vendor/importlib_metadata/_compat.py
index 3d78566ea3..84f9eea4f3 100644
--- a/setuptools/_vendor/importlib_metadata/_compat.py
+++ b/setuptools/_vendor/importlib_metadata/_compat.py
@@ -9,7 +9,7 @@
     from typing import Protocol
 except ImportError:  # pragma: no cover
     # Python 3.7 compatibility
-    from typing_extensions import Protocol  # type: ignore
+    from ..typing_extensions import Protocol  # type: ignore
 
 
 def install(cls):
diff --git a/setuptools/_vendor/jaraco.functools-4.0.1.dist-info/INSTALLER b/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/INSTALLER
similarity index 100%
rename from setuptools/_vendor/jaraco.functools-4.0.1.dist-info/INSTALLER
rename to setuptools/_vendor/jaraco.functools-4.0.0.dist-info/INSTALLER
diff --git a/setuptools/_vendor/jaraco.functools-4.0.1.dist-info/LICENSE b/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/LICENSE
similarity index 100%
rename from setuptools/_vendor/jaraco.functools-4.0.1.dist-info/LICENSE
rename to setuptools/_vendor/jaraco.functools-4.0.0.dist-info/LICENSE
diff --git a/setuptools/_vendor/jaraco.functools-4.0.1.dist-info/METADATA b/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/METADATA
similarity index 78%
rename from setuptools/_vendor/jaraco.functools-4.0.1.dist-info/METADATA
rename to setuptools/_vendor/jaraco.functools-4.0.0.dist-info/METADATA
index c865140ab2..581b308378 100644
--- a/setuptools/_vendor/jaraco.functools-4.0.1.dist-info/METADATA
+++ b/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/METADATA
@@ -1,16 +1,16 @@
 Metadata-Version: 2.1
 Name: jaraco.functools
-Version: 4.0.1
+Version: 4.0.0
 Summary: Functools like those found in stdlib
-Author-email: "Jason R. Coombs" 
-Project-URL: Homepage, https://github.com/jaraco/jaraco.functools
+Home-page: https://github.com/jaraco/jaraco.functools
+Author: Jason R. Coombs
+Author-email: jaraco@jaraco.com
 Classifier: Development Status :: 5 - Production/Stable
 Classifier: Intended Audience :: Developers
 Classifier: License :: OSI Approved :: MIT License
 Classifier: Programming Language :: Python :: 3
 Classifier: Programming Language :: Python :: 3 :: Only
 Requires-Python: >=3.8
-Description-Content-Type: text/x-rst
 License-File: LICENSE
 Requires-Dist: more-itertools
 Provides-Extra: docs
@@ -26,16 +26,17 @@ Requires-Dist: pytest >=6 ; extra == 'testing'
 Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'testing'
 Requires-Dist: pytest-cov ; extra == 'testing'
 Requires-Dist: pytest-enabler >=2.2 ; extra == 'testing'
-Requires-Dist: pytest-ruff >=0.2.1 ; extra == 'testing'
+Requires-Dist: pytest-ruff ; extra == 'testing'
 Requires-Dist: jaraco.classes ; extra == 'testing'
-Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing'
+Requires-Dist: pytest-black >=0.3.7 ; (platform_python_implementation != "PyPy") and extra == 'testing'
+Requires-Dist: pytest-mypy >=0.9.1 ; (platform_python_implementation != "PyPy") and extra == 'testing'
 
 .. image:: https://img.shields.io/pypi/v/jaraco.functools.svg
    :target: https://pypi.org/project/jaraco.functools
 
 .. image:: https://img.shields.io/pypi/pyversions/jaraco.functools.svg
 
-.. image:: https://github.com/jaraco/jaraco.functools/actions/workflows/main.yml/badge.svg
+.. image:: https://github.com/jaraco/jaraco.functools/workflows/tests/badge.svg
    :target: https://github.com/jaraco/jaraco.functools/actions?query=workflow%3A%22tests%22
    :alt: tests
 
@@ -43,10 +44,14 @@ Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extr
     :target: https://github.com/astral-sh/ruff
     :alt: Ruff
 
+.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+   :target: https://github.com/psf/black
+   :alt: Code style: Black
+
 .. image:: https://readthedocs.org/projects/jaracofunctools/badge/?version=latest
    :target: https://jaracofunctools.readthedocs.io/en/latest/?badge=latest
 
-.. image:: https://img.shields.io/badge/skeleton-2024-informational
+.. image:: https://img.shields.io/badge/skeleton-2023-informational
    :target: https://blog.jaraco.com/skeleton
 
 .. image:: https://tidelift.com/badges/package/pypi/jaraco.functools
diff --git a/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/RECORD b/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/RECORD
new file mode 100644
index 0000000000..a3186021ac
--- /dev/null
+++ b/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/RECORD
@@ -0,0 +1,10 @@
+jaraco.functools-4.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+jaraco.functools-4.0.0.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
+jaraco.functools-4.0.0.dist-info/METADATA,sha256=nVOe_vWvaN2iWJ2aBVkhKvmvH-gFksNCXHwCNvcj65I,3078
+jaraco.functools-4.0.0.dist-info/RECORD,,
+jaraco.functools-4.0.0.dist-info/WHEEL,sha256=Xo9-1PvkuimrydujYJAjF7pCkriuXBpUPEjma1nZyJ0,92
+jaraco.functools-4.0.0.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
+jaraco/functools/__init__.py,sha256=hEAJaS2uSZRuF_JY4CxCHIYh79ZpxaPp9OiHyr9EJ1w,16642
+jaraco/functools/__init__.pyi,sha256=N4lLbdhMtrmwiK3UuMGhYsiOLLZx69CUNOdmFPSVh6Q,3982
+jaraco/functools/__pycache__/__init__.cpython-312.pyc,,
+jaraco/functools/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
diff --git a/setuptools/_vendor/jaraco.functools-4.0.1.dist-info/WHEEL b/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/WHEEL
similarity index 65%
rename from setuptools/_vendor/jaraco.functools-4.0.1.dist-info/WHEEL
rename to setuptools/_vendor/jaraco.functools-4.0.0.dist-info/WHEEL
index bab98d6758..ba48cbcf92 100644
--- a/setuptools/_vendor/jaraco.functools-4.0.1.dist-info/WHEEL
+++ b/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/WHEEL
@@ -1,5 +1,5 @@
 Wheel-Version: 1.0
-Generator: bdist_wheel (0.43.0)
+Generator: bdist_wheel (0.41.3)
 Root-Is-Purelib: true
 Tag: py3-none-any
 
diff --git a/setuptools/_vendor/jaraco.functools-4.0.1.dist-info/top_level.txt b/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/top_level.txt
similarity index 100%
rename from setuptools/_vendor/jaraco.functools-4.0.1.dist-info/top_level.txt
rename to setuptools/_vendor/jaraco.functools-4.0.0.dist-info/top_level.txt
diff --git a/setuptools/_vendor/jaraco.functools-4.0.1.dist-info/RECORD b/setuptools/_vendor/jaraco.functools-4.0.1.dist-info/RECORD
deleted file mode 100644
index cf552f0f48..0000000000
--- a/setuptools/_vendor/jaraco.functools-4.0.1.dist-info/RECORD
+++ /dev/null
@@ -1,10 +0,0 @@
-jaraco.functools-4.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-jaraco.functools-4.0.1.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
-jaraco.functools-4.0.1.dist-info/METADATA,sha256=i4aUaQDX-jjdEQK5wevhegyx8JyLfin2HyvaSk3FHso,2891
-jaraco.functools-4.0.1.dist-info/RECORD,,
-jaraco.functools-4.0.1.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
-jaraco.functools-4.0.1.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
-jaraco/functools/__init__.py,sha256=hEAJaS2uSZRuF_JY4CxCHIYh79ZpxaPp9OiHyr9EJ1w,16642
-jaraco/functools/__init__.pyi,sha256=gk3dsgHzo5F_U74HzAvpNivFAPCkPJ1b2-yCd62dfnw,3878
-jaraco/functools/__pycache__/__init__.cpython-312.pyc,,
-jaraco/functools/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
diff --git a/setuptools/_vendor/jaraco/functools/__init__.pyi b/setuptools/_vendor/jaraco/functools/__init__.pyi
index 19191bf93e..c2b9ab1757 100644
--- a/setuptools/_vendor/jaraco/functools/__init__.pyi
+++ b/setuptools/_vendor/jaraco/functools/__init__.pyi
@@ -74,6 +74,9 @@ def result_invoke(
 def invoke(
     f: Callable[_P, _R], /, *args: _P.args, **kwargs: _P.kwargs
 ) -> Callable[_P, _R]: ...
+def call_aside(
+    f: Callable[_P, _R], *args: _P.args, **kwargs: _P.kwargs
+) -> Callable[_P, _R]: ...
 
 class Throttler(Generic[_R]):
     last_called: float

From d1f11d0e8b21d312832cc88bbc894aae6dda22cd Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alex=20Gr=C3=B6nholm?= 
Date: Mon, 20 May 2024 08:12:58 -0400
Subject: [PATCH 0677/1761] Rolled back more inadvertently changed files

---
 .../_vendor/backports/tarfile/__main__.py     |  5 ----
 .../backports/tarfile/compat/__init__.py      |  0
 .../_vendor/backports/tarfile/compat/py38.py  | 24 -------------------
 3 files changed, 29 deletions(-)
 delete mode 100644 setuptools/_vendor/backports/tarfile/__main__.py
 delete mode 100644 setuptools/_vendor/backports/tarfile/compat/__init__.py
 delete mode 100644 setuptools/_vendor/backports/tarfile/compat/py38.py

diff --git a/setuptools/_vendor/backports/tarfile/__main__.py b/setuptools/_vendor/backports/tarfile/__main__.py
deleted file mode 100644
index daf5509086..0000000000
--- a/setuptools/_vendor/backports/tarfile/__main__.py
+++ /dev/null
@@ -1,5 +0,0 @@
-from . import main
-
-
-if __name__ == '__main__':
-    main()
diff --git a/setuptools/_vendor/backports/tarfile/compat/__init__.py b/setuptools/_vendor/backports/tarfile/compat/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/setuptools/_vendor/backports/tarfile/compat/py38.py b/setuptools/_vendor/backports/tarfile/compat/py38.py
deleted file mode 100644
index 20fbbfc1c0..0000000000
--- a/setuptools/_vendor/backports/tarfile/compat/py38.py
+++ /dev/null
@@ -1,24 +0,0 @@
-import sys
-
-
-if sys.version_info < (3, 9):
-
-    def removesuffix(self, suffix):
-        # suffix='' should not call self[:-0].
-        if suffix and self.endswith(suffix):
-            return self[: -len(suffix)]
-        else:
-            return self[:]
-
-    def removeprefix(self, prefix):
-        if self.startswith(prefix):
-            return self[len(prefix) :]
-        else:
-            return self[:]
-else:
-
-    def removesuffix(self, suffix):
-        return self.removesuffix(suffix)
-
-    def removeprefix(self, prefix):
-        return self.removeprefix(prefix)

From 2d83e52bcb7ece4637434fe0bd568958ebd0dd59 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alex=20Gr=C3=B6nholm?= 
Date: Mon, 20 May 2024 10:04:18 -0400
Subject: [PATCH 0678/1761] Rolled back more inadvertently changed files

---
 setuptools/_vendor/backports/tarfile/__init__.py | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)

diff --git a/setuptools/_vendor/backports/tarfile/__init__.py b/setuptools/_vendor/backports/tarfile/__init__.py
index 6dd498dc04..a7a9a6e7b9 100644
--- a/setuptools/_vendor/backports/tarfile/__init__.py
+++ b/setuptools/_vendor/backports/tarfile/__init__.py
@@ -48,8 +48,6 @@
 import re
 import warnings
 
-from .compat.py38 import removesuffix
-
 try:
     import pwd
 except ImportError:
@@ -1367,7 +1365,7 @@ def _proc_gnulong(self, tarfile):
         # Remove redundant slashes from directories. This is to be consistent
         # with frombuf().
         if next.isdir():
-            next.name = removesuffix(next.name, "/")
+            next.name = next.name.removesuffix("/")
 
         return next
 

From 200f9414f1d5cd6569169fbbc94fcab27f1ae177 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alex=20Gr=C3=B6nholm?= 
Date: Mon, 20 May 2024 10:07:24 -0400
Subject: [PATCH 0679/1761] Rolled back more inadvertently changed files

---
 .../_vendor/backports/tarfile/__init__.py     |  4 +---
 .../_vendor/backports/tarfile/__main__.py     |  5 ----
 .../backports/tarfile/compat/__init__.py      |  0
 .../_vendor/backports/tarfile/compat/py38.py  | 24 -------------------
 4 files changed, 1 insertion(+), 32 deletions(-)
 delete mode 100644 pkg_resources/_vendor/backports/tarfile/__main__.py
 delete mode 100644 pkg_resources/_vendor/backports/tarfile/compat/__init__.py
 delete mode 100644 pkg_resources/_vendor/backports/tarfile/compat/py38.py

diff --git a/pkg_resources/_vendor/backports/tarfile/__init__.py b/pkg_resources/_vendor/backports/tarfile/__init__.py
index 6dd498dc04..a7a9a6e7b9 100644
--- a/pkg_resources/_vendor/backports/tarfile/__init__.py
+++ b/pkg_resources/_vendor/backports/tarfile/__init__.py
@@ -48,8 +48,6 @@
 import re
 import warnings
 
-from .compat.py38 import removesuffix
-
 try:
     import pwd
 except ImportError:
@@ -1367,7 +1365,7 @@ def _proc_gnulong(self, tarfile):
         # Remove redundant slashes from directories. This is to be consistent
         # with frombuf().
         if next.isdir():
-            next.name = removesuffix(next.name, "/")
+            next.name = next.name.removesuffix("/")
 
         return next
 
diff --git a/pkg_resources/_vendor/backports/tarfile/__main__.py b/pkg_resources/_vendor/backports/tarfile/__main__.py
deleted file mode 100644
index daf5509086..0000000000
--- a/pkg_resources/_vendor/backports/tarfile/__main__.py
+++ /dev/null
@@ -1,5 +0,0 @@
-from . import main
-
-
-if __name__ == '__main__':
-    main()
diff --git a/pkg_resources/_vendor/backports/tarfile/compat/__init__.py b/pkg_resources/_vendor/backports/tarfile/compat/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/pkg_resources/_vendor/backports/tarfile/compat/py38.py b/pkg_resources/_vendor/backports/tarfile/compat/py38.py
deleted file mode 100644
index 20fbbfc1c0..0000000000
--- a/pkg_resources/_vendor/backports/tarfile/compat/py38.py
+++ /dev/null
@@ -1,24 +0,0 @@
-import sys
-
-
-if sys.version_info < (3, 9):
-
-    def removesuffix(self, suffix):
-        # suffix='' should not call self[:-0].
-        if suffix and self.endswith(suffix):
-            return self[: -len(suffix)]
-        else:
-            return self[:]
-
-    def removeprefix(self, prefix):
-        if self.startswith(prefix):
-            return self[len(prefix) :]
-        else:
-            return self[:]
-else:
-
-    def removesuffix(self, suffix):
-        return self.removesuffix(suffix)
-
-    def removeprefix(self, prefix):
-        return self.removeprefix(prefix)

From 5c2d351637d3533a9910623f4fe17c10d639ae2e Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alex=20Gr=C3=B6nholm?= 
Date: Mon, 20 May 2024 10:12:07 -0400
Subject: [PATCH 0680/1761] Rolled back more inadvertently changed files

---
 .../_vendor/backports/{tarfile/__init__.py => tarfile.py}         | 0
 setuptools/_vendor/backports/{tarfile/__init__.py => tarfile.py}  | 0
 2 files changed, 0 insertions(+), 0 deletions(-)
 rename pkg_resources/_vendor/backports/{tarfile/__init__.py => tarfile.py} (100%)
 rename setuptools/_vendor/backports/{tarfile/__init__.py => tarfile.py} (100%)

diff --git a/pkg_resources/_vendor/backports/tarfile/__init__.py b/pkg_resources/_vendor/backports/tarfile.py
similarity index 100%
rename from pkg_resources/_vendor/backports/tarfile/__init__.py
rename to pkg_resources/_vendor/backports/tarfile.py
diff --git a/setuptools/_vendor/backports/tarfile/__init__.py b/setuptools/_vendor/backports/tarfile.py
similarity index 100%
rename from setuptools/_vendor/backports/tarfile/__init__.py
rename to setuptools/_vendor/backports/tarfile.py

From 92a33109701a16281db434daaed2841f72d100b2 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alex=20Gr=C3=B6nholm?= 
Date: Mon, 20 May 2024 10:16:36 -0400
Subject: [PATCH 0681/1761] Rolled back more inadvertently changed files

---
 .../backports.tarfile-1.0.0.dist-info/RECORD  | 18 ++++++++---------
 .../jaraco.functools-4.0.0.dist-info/RECORD   | 20 +++++++++----------
 .../backports.tarfile-1.0.0.dist-info/RECORD  | 18 ++++++++---------
 .../jaraco.functools-4.0.0.dist-info/RECORD   | 20 +++++++++----------
 4 files changed, 38 insertions(+), 38 deletions(-)

diff --git a/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/RECORD b/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/RECORD
index ba15839660..a6a44d8fcc 100644
--- a/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/RECORD
+++ b/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/RECORD
@@ -1,9 +1,9 @@
-backports.tarfile-1.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-backports.tarfile-1.0.0.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
-backports.tarfile-1.0.0.dist-info/METADATA,sha256=XlT7JAFR04zDMIjs-EFhqc0CkkVyeh-SiVUoKXONXJ0,1876
-backports.tarfile-1.0.0.dist-info/RECORD,,
-backports.tarfile-1.0.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-backports.tarfile-1.0.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
-backports.tarfile-1.0.0.dist-info/top_level.txt,sha256=cGjaLMOoBR1FK0ApojtzWVmViTtJ7JGIK_HwXiEsvtU,10
-backports/__pycache__/tarfile.cpython-312.pyc,,
-backports/tarfile.py,sha256=IO3YX_ZYqn13VOi-3QLM0lnktn102U4d9wUrHc230LY,106920
+backports.tarfile-1.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+backports.tarfile-1.0.0.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
+backports.tarfile-1.0.0.dist-info/METADATA,sha256=XlT7JAFR04zDMIjs-EFhqc0CkkVyeh-SiVUoKXONXJ0,1876
+backports.tarfile-1.0.0.dist-info/RECORD,,
+backports.tarfile-1.0.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+backports.tarfile-1.0.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
+backports.tarfile-1.0.0.dist-info/top_level.txt,sha256=cGjaLMOoBR1FK0ApojtzWVmViTtJ7JGIK_HwXiEsvtU,10
+backports/__pycache__/tarfile.cpython-312.pyc,,
+backports/tarfile.py,sha256=IO3YX_ZYqn13VOi-3QLM0lnktn102U4d9wUrHc230LY,106920
diff --git a/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/RECORD b/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/RECORD
index a3186021ac..783aa7d2b9 100644
--- a/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/RECORD
+++ b/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/RECORD
@@ -1,10 +1,10 @@
-jaraco.functools-4.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-jaraco.functools-4.0.0.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
-jaraco.functools-4.0.0.dist-info/METADATA,sha256=nVOe_vWvaN2iWJ2aBVkhKvmvH-gFksNCXHwCNvcj65I,3078
-jaraco.functools-4.0.0.dist-info/RECORD,,
-jaraco.functools-4.0.0.dist-info/WHEEL,sha256=Xo9-1PvkuimrydujYJAjF7pCkriuXBpUPEjma1nZyJ0,92
-jaraco.functools-4.0.0.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
-jaraco/functools/__init__.py,sha256=hEAJaS2uSZRuF_JY4CxCHIYh79ZpxaPp9OiHyr9EJ1w,16642
-jaraco/functools/__init__.pyi,sha256=N4lLbdhMtrmwiK3UuMGhYsiOLLZx69CUNOdmFPSVh6Q,3982
-jaraco/functools/__pycache__/__init__.cpython-312.pyc,,
-jaraco/functools/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+jaraco.functools-4.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+jaraco.functools-4.0.0.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
+jaraco.functools-4.0.0.dist-info/METADATA,sha256=nVOe_vWvaN2iWJ2aBVkhKvmvH-gFksNCXHwCNvcj65I,3078
+jaraco.functools-4.0.0.dist-info/RECORD,,
+jaraco.functools-4.0.0.dist-info/WHEEL,sha256=Xo9-1PvkuimrydujYJAjF7pCkriuXBpUPEjma1nZyJ0,92
+jaraco.functools-4.0.0.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
+jaraco/functools/__init__.py,sha256=hEAJaS2uSZRuF_JY4CxCHIYh79ZpxaPp9OiHyr9EJ1w,16642
+jaraco/functools/__init__.pyi,sha256=N4lLbdhMtrmwiK3UuMGhYsiOLLZx69CUNOdmFPSVh6Q,3982
+jaraco/functools/__pycache__/__init__.cpython-312.pyc,,
+jaraco/functools/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
diff --git a/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/RECORD b/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/RECORD
index ba15839660..a6a44d8fcc 100644
--- a/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/RECORD
+++ b/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/RECORD
@@ -1,9 +1,9 @@
-backports.tarfile-1.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-backports.tarfile-1.0.0.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
-backports.tarfile-1.0.0.dist-info/METADATA,sha256=XlT7JAFR04zDMIjs-EFhqc0CkkVyeh-SiVUoKXONXJ0,1876
-backports.tarfile-1.0.0.dist-info/RECORD,,
-backports.tarfile-1.0.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-backports.tarfile-1.0.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
-backports.tarfile-1.0.0.dist-info/top_level.txt,sha256=cGjaLMOoBR1FK0ApojtzWVmViTtJ7JGIK_HwXiEsvtU,10
-backports/__pycache__/tarfile.cpython-312.pyc,,
-backports/tarfile.py,sha256=IO3YX_ZYqn13VOi-3QLM0lnktn102U4d9wUrHc230LY,106920
+backports.tarfile-1.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+backports.tarfile-1.0.0.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
+backports.tarfile-1.0.0.dist-info/METADATA,sha256=XlT7JAFR04zDMIjs-EFhqc0CkkVyeh-SiVUoKXONXJ0,1876
+backports.tarfile-1.0.0.dist-info/RECORD,,
+backports.tarfile-1.0.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+backports.tarfile-1.0.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
+backports.tarfile-1.0.0.dist-info/top_level.txt,sha256=cGjaLMOoBR1FK0ApojtzWVmViTtJ7JGIK_HwXiEsvtU,10
+backports/__pycache__/tarfile.cpython-312.pyc,,
+backports/tarfile.py,sha256=IO3YX_ZYqn13VOi-3QLM0lnktn102U4d9wUrHc230LY,106920
diff --git a/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/RECORD b/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/RECORD
index a3186021ac..783aa7d2b9 100644
--- a/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/RECORD
+++ b/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/RECORD
@@ -1,10 +1,10 @@
-jaraco.functools-4.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-jaraco.functools-4.0.0.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
-jaraco.functools-4.0.0.dist-info/METADATA,sha256=nVOe_vWvaN2iWJ2aBVkhKvmvH-gFksNCXHwCNvcj65I,3078
-jaraco.functools-4.0.0.dist-info/RECORD,,
-jaraco.functools-4.0.0.dist-info/WHEEL,sha256=Xo9-1PvkuimrydujYJAjF7pCkriuXBpUPEjma1nZyJ0,92
-jaraco.functools-4.0.0.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
-jaraco/functools/__init__.py,sha256=hEAJaS2uSZRuF_JY4CxCHIYh79ZpxaPp9OiHyr9EJ1w,16642
-jaraco/functools/__init__.pyi,sha256=N4lLbdhMtrmwiK3UuMGhYsiOLLZx69CUNOdmFPSVh6Q,3982
-jaraco/functools/__pycache__/__init__.cpython-312.pyc,,
-jaraco/functools/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+jaraco.functools-4.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+jaraco.functools-4.0.0.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
+jaraco.functools-4.0.0.dist-info/METADATA,sha256=nVOe_vWvaN2iWJ2aBVkhKvmvH-gFksNCXHwCNvcj65I,3078
+jaraco.functools-4.0.0.dist-info/RECORD,,
+jaraco.functools-4.0.0.dist-info/WHEEL,sha256=Xo9-1PvkuimrydujYJAjF7pCkriuXBpUPEjma1nZyJ0,92
+jaraco.functools-4.0.0.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
+jaraco/functools/__init__.py,sha256=hEAJaS2uSZRuF_JY4CxCHIYh79ZpxaPp9OiHyr9EJ1w,16642
+jaraco/functools/__init__.pyi,sha256=N4lLbdhMtrmwiK3UuMGhYsiOLLZx69CUNOdmFPSVh6Q,3982
+jaraco/functools/__pycache__/__init__.cpython-312.pyc,,
+jaraco/functools/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0

From d14fa0162c95450898c11534caf26a0f03553176 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 20 May 2024 12:25:42 -0400
Subject: [PATCH 0682/1761] Add all site-packages dirs when creating simulated
 environment for test_editable_prefix.

Closes #4371.
---
 setuptools/tests/test_editable_install.py | 23 ++++++++++++++---------
 1 file changed, 14 insertions(+), 9 deletions(-)

diff --git a/setuptools/tests/test_editable_install.py b/setuptools/tests/test_editable_install.py
index 300a02cfb9..2fe096f0dd 100644
--- a/setuptools/tests/test_editable_install.py
+++ b/setuptools/tests/test_editable_install.py
@@ -408,17 +408,19 @@ def test_editable_with_prefix(tmp_path, sample_project, editable_opts):
     prefix = tmp_path / 'prefix'
 
     # figure out where pip will likely install the package
-    site_packages = prefix / next(
-        Path(path).relative_to(sys.prefix)
+    site_packages_all = [
+        prefix / Path(path).relative_to(sys.prefix)
         for path in sys.path
         if 'site-packages' in path and path.startswith(sys.prefix)
-    )
-    site_packages.mkdir(parents=True)
+    ]
+
+    for sp in site_packages_all:
+        sp.mkdir(parents=True)
 
     # install workaround
-    _addsitedir(site_packages)
+    _addsitedirs(site_packages_all)
 
-    env = dict(os.environ, PYTHONPATH=str(site_packages))
+    env = dict(os.environ, PYTHONPATH=os.pathsep.join(map(str, site_packages_all)))
     cmd = [
         sys.executable,
         '-m',
@@ -1250,14 +1252,17 @@ def install_project(name, venv, tmp_path, files, *opts):
     return project, out
 
 
-def _addsitedir(new_dir: Path):
+def _addsitedirs(new_dirs):
     """To use this function, it is necessary to insert new_dir in front of sys.path.
     The Python process will try to import a ``sitecustomize`` module on startup.
     If we manipulate sys.path/PYTHONPATH, we can force it to run our code,
     which invokes ``addsitedir`` and ensure ``.pth`` files are loaded.
     """
-    file = f"import site; site.addsitedir({os.fspath(new_dir)!r})\n"
-    (new_dir / "sitecustomize.py").write_text(file, encoding="utf-8")
+    content = '\n'.join(
+        ("import site",)
+        + tuple(f"site.addsitedir({os.fspath(new_dir)!r})" for new_dir in new_dirs)
+    )
+    (new_dirs[0] / "sitecustomize.py").write_text(content, encoding="utf-8")
 
 
 # ---- Assertion Helpers ----

From 6c1ef5748dbd70c8c5423e12680345766ee101d9 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 20 May 2024 12:27:35 -0400
Subject: [PATCH 0683/1761] Remove xfail now that test passes. Ref #4371.

---
 setuptools/tests/test_editable_install.py | 5 -----
 1 file changed, 5 deletions(-)

diff --git a/setuptools/tests/test_editable_install.py b/setuptools/tests/test_editable_install.py
index 2fe096f0dd..24c10a5054 100644
--- a/setuptools/tests/test_editable_install.py
+++ b/setuptools/tests/test_editable_install.py
@@ -396,11 +396,6 @@ def test_namespace_accidental_config_in_lenient_mode(self, venv, tmp_path):
         assert "mypkg.other not defined" in out
 
 
-# Moved here from test_develop:
-@pytest.mark.xfail(
-    platform.python_implementation() == 'PyPy',
-    reason="Workaround fails on PyPy (why?)",
-)
 def test_editable_with_prefix(tmp_path, sample_project, editable_opts):
     """
     Editable install to a prefix should be discoverable.

From 1a82c45164c1215408015a48e90884c7e2881ab5 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alex=20Gr=C3=B6nholm?= 
Date: Mon, 20 May 2024 14:19:42 -0400
Subject: [PATCH 0684/1761] Readded wheel into test dependencies

"pip wheel" won't work without it for now.
---
 setup.cfg | 1 +
 1 file changed, 1 insertion(+)

diff --git a/setup.cfg b/setup.cfg
index b083a262f4..9a91c01490 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -54,6 +54,7 @@ testing =
 
 	# local
 	virtualenv>=13.0.0
+	wheel
 	pip>=19.1 # For proper file:// URLs support.
 	packaging>=23.2
 	jaraco.envs>=2.2

From 44e1731b990671821444e260a23ee60e80828684 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alex=20Gr=C3=B6nholm?= 
Date: Mon, 20 May 2024 14:19:58 -0400
Subject: [PATCH 0685/1761] Fixed test failures

---
 setuptools/command/bdist_wheel.py    |  4 +--
 setuptools/tests/test_bdist_wheel.py | 37 +++++++++++++++++++++++++++-
 setuptools/tests/test_build_meta.py  | 10 +++-----
 3 files changed, 42 insertions(+), 9 deletions(-)

diff --git a/setuptools/command/bdist_wheel.py b/setuptools/command/bdist_wheel.py
index 9851466fe9..ad34539eb8 100644
--- a/setuptools/command/bdist_wheel.py
+++ b/setuptools/command/bdist_wheel.py
@@ -230,8 +230,8 @@ class bdist_wheel(Command):
 
     boolean_options = ["keep-temp", "skip-build", "relative", "universal"]
 
-    def initialize_options(self):
-        self.bdist_dir: str = None
+    def initialize_options(self) -> None:
+        self.bdist_dir: str | None = None
         self.data_dir = None
         self.plat_name: str | None = None
         self.plat_tag = None
diff --git a/setuptools/tests/test_bdist_wheel.py b/setuptools/tests/test_bdist_wheel.py
index dc5304a88b..8ba6060bb2 100644
--- a/setuptools/tests/test_bdist_wheel.py
+++ b/setuptools/tests/test_bdist_wheel.py
@@ -10,6 +10,7 @@
 import subprocess
 import sys
 import sysconfig
+from functools import partial
 from inspect import cleandoc
 from unittest.mock import Mock
 from zipfile import ZipFile
@@ -57,6 +58,40 @@
 """
 
 
+@pytest.fixture(scope="module")
+def wheel_paths(request, tmp_path_factory):
+    test_distributions = (
+        "complex-dist",
+        "simple.dist",
+        "headers.dist",
+        "commasinfilenames.dist",
+        "unicode.dist",
+    )
+
+    if sys.platform != "win32":
+        # ABI3 extensions don't really work on Windows
+        test_distributions += ("abi3extension.dist",)
+
+    pwd = os.path.abspath(os.curdir)
+    request.addfinalizer(partial(os.chdir, pwd))
+    this_dir = os.path.dirname(__file__)
+    build_dir = tmp_path_factory.mktemp("build")
+    dist_dir = tmp_path_factory.mktemp("dist")
+    for dist in test_distributions:
+        os.chdir(os.path.join(this_dir, "bdist_wheel_testdata", dist))
+        subprocess.check_call([
+            sys.executable,
+            "setup.py",
+            "bdist_wheel",
+            "-b",
+            str(build_dir),
+            "-d",
+            str(dist_dir),
+        ])
+
+    return sorted(str(fname) for fname in dist_dir.iterdir() if fname.suffix == ".whl")
+
+
 @pytest.fixture
 def dummy_dist(tmp_path_factory):
     basedir = tmp_path_factory.mktemp("dummy_dist")
@@ -227,7 +262,7 @@ def test_build_number(dummy_dist, monkeypatch, tmp_path):
 def test_limited_abi(monkeypatch, tmp_path):
     """Test that building a binary wheel with the limited ABI works."""
     this_dir = os.path.dirname(__file__)
-    source_dir = os.path.join(this_dir, "testdata", "extension.dist")
+    source_dir = os.path.join(this_dir, "bdist_wheel_testdata", "extension.dist")
     build_dir = tmp_path.joinpath("build")
     dist_dir = tmp_path.joinpath("dist")
     monkeypatch.chdir(source_dir)
diff --git a/setuptools/tests/test_build_meta.py b/setuptools/tests/test_build_meta.py
index cc996b4255..ecb1dcfd87 100644
--- a/setuptools/tests/test_build_meta.py
+++ b/setuptools/tests/test_build_meta.py
@@ -232,7 +232,7 @@ def build_backend(self, tmpdir, request):
 
     def test_get_requires_for_build_wheel(self, build_backend):
         actual = build_backend.get_requires_for_build_wheel()
-        expected = ['six', 'wheel']
+        expected = ['six']
         assert sorted(actual) == sorted(expected)
 
     def test_get_requires_for_build_sdist(self, build_backend):
@@ -783,14 +783,12 @@ def run():
         build_backend = self.get_build_backend()
 
         if use_wheel:
-            base_requirements = ['wheel']
             get_requires = build_backend.get_requires_for_build_wheel
         else:
-            base_requirements = []
             get_requires = build_backend.get_requires_for_build_sdist
 
         # Ensure that the build requirements are properly parsed
-        expected = sorted(base_requirements + requirements)
+        expected = sorted(requirements)
         actual = get_requires()
 
         assert expected == sorted(actual)
@@ -821,7 +819,7 @@ def test_setup_requires_with_auto_discovery(self, tmpdir_cwd):
         path.build(files)
         build_backend = self.get_build_backend()
         setup_requires = build_backend.get_requires_for_build_wheel()
-        assert setup_requires == ["wheel", "foo"]
+        assert setup_requires == ["foo"]
 
     def test_dont_install_setup_requires(self, tmpdir_cwd):
         files = {
@@ -963,7 +961,7 @@ def test_sys_exit_0_in_setuppy(monkeypatch, tmp_path):
         """
     (tmp_path / "setup.py").write_text(DALS(setuppy), encoding="utf-8")
     backend = BuildBackend(backend_name="setuptools.build_meta")
-    assert backend.get_requires_for_build_wheel() == ["wheel"]
+    assert backend.get_requires_for_build_wheel() == []
 
 
 def test_system_exit_in_setuppy(monkeypatch, tmp_path):

From becb8a2966986cf7af13755d03a1b765f2204d1e Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alex=20Gr=C3=B6nholm?= 
Date: Mon, 20 May 2024 14:44:17 -0400
Subject: [PATCH 0686/1761] Fixed more test failures

---
 mypy.ini | 1 +
 tox.ini  | 1 +
 2 files changed, 2 insertions(+)

diff --git a/mypy.ini b/mypy.ini
index 146a0e1929..f1dead70cd 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -14,6 +14,7 @@ exclude = (?x)(
 	| ^.+?/(_vendor|extern)/ # Vendored
 	| ^setuptools/_distutils/ # Vendored
 	| ^setuptools/config/_validate_pyproject/ # Auto-generated
+    | ^setuptools/tests/bdist_wheel_testdata/  # Duplicate module name
 	)
 
 # Ignoring attr-defined because setuptools wraps a lot of distutils classes, adding new attributes,
diff --git a/tox.ini b/tox.ini
index e6fc063af1..90757dada9 100644
--- a/tox.ini
+++ b/tox.ini
@@ -16,6 +16,7 @@ pass_env =
 	PRE_BUILT_SETUPTOOLS_WHEEL
 	PRE_BUILT_SETUPTOOLS_SDIST
 	TIMEOUT_BACKEND_TEST  # timeout (in seconds) for test_build_meta
+	SSH_AUTH_SOCK  # for exercise.py if repo was checked out with ssh
 	windir  # required for test_pkg_resources
 	# honor git config in pytest-perf
 	HOME

From 7f632e2cfa94949b2b09bdb2258eb021d69224c5 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alex=20Gr=C3=B6nholm?= 
Date: Mon, 20 May 2024 14:53:29 -0400
Subject: [PATCH 0687/1761] Added missing test data

---
 .../abi3extension.dist/extension.c            |   2 ++
 .../abi3extension.dist/setup.cfg              |   2 ++
 .../abi3extension.dist/setup.py               |  12 +++++++++
 .../mypackage/__init__.py                     |   0
 .../mypackage/data/1,2,3.txt                  |   0
 .../mypackage/data/__init__.py                |   0
 .../commasinfilenames.dist/setup.py           |  12 +++++++++
 .../testrepo-0.1.0/mypackage/__init__.py      |   0
 .../complex-dist/complexdist/__init__.py      |   5 ++++
 .../complex-dist/setup.py                     |  24 ++++++++++++++++++
 .../extension.dist/extension.abi3.so          | Bin 0 -> 23312 bytes
 .../extension.dist/extension.c                |  17 +++++++++++++
 .../extension.dist/setup.py                   |  10 ++++++++
 .../headers.dist/header.h                     |   0
 .../headers.dist/headersdist.py               |   0
 .../headers.dist/setup.cfg                    |   2 ++
 .../headers.dist/setup.py                     |  10 ++++++++
 .../macosx_minimal_system_version/libb.dylib  | Bin 0 -> 9544 bytes
 .../macosx_minimal_system_version/test_lib.c  |  13 ++++++++++
 .../test_lib_10_10.dylib                      | Bin 0 -> 756 bytes
 .../test_lib_10_10_10.dylib                   | Bin 0 -> 756 bytes
 .../test_lib_10_10_386.dylib                  | Bin 0 -> 668 bytes
 .../test_lib_10_10_fat.dylib                  | Bin 0 -> 8948 bytes
 .../test_lib_10_14.dylib                      | Bin 0 -> 764 bytes
 .../test_lib_10_14_386.dylib                  | Bin 0 -> 676 bytes
 .../test_lib_10_14_fat.dylib                  | Bin 0 -> 8956 bytes
 .../test_lib_10_6.dylib                       | Bin 0 -> 756 bytes
 .../test_lib_10_6_386.dylib                   | Bin 0 -> 668 bytes
 .../test_lib_10_6_fat.dylib                   | Bin 0 -> 8948 bytes
 .../test_lib_10_9_universal2.dylib            | Bin 0 -> 65936 bytes
 .../test_lib_11.dylib                         | Bin 0 -> 16464 bytes
 .../test_lib_multiple_fat.dylib               | Bin 0 -> 8956 bytes
 .../bdist_wheel_testdata/simple.dist/setup.py |  11 ++++++++
 .../simple.dist/simpledist/__init__.py        |   0
 .../unicode.dist/setup.py                     |  11 ++++++++
 .../unicode.dist/unicodedist/__init__.py      |   0
 ...6_\346\227\245\346\234\254\350\252\236.py" |   0
 37 files changed, 131 insertions(+)
 create mode 100644 setuptools/tests/bdist_wheel_testdata/abi3extension.dist/extension.c
 create mode 100644 setuptools/tests/bdist_wheel_testdata/abi3extension.dist/setup.cfg
 create mode 100644 setuptools/tests/bdist_wheel_testdata/abi3extension.dist/setup.py
 create mode 100644 setuptools/tests/bdist_wheel_testdata/commasinfilenames.dist/mypackage/__init__.py
 create mode 100644 setuptools/tests/bdist_wheel_testdata/commasinfilenames.dist/mypackage/data/1,2,3.txt
 create mode 100644 setuptools/tests/bdist_wheel_testdata/commasinfilenames.dist/mypackage/data/__init__.py
 create mode 100644 setuptools/tests/bdist_wheel_testdata/commasinfilenames.dist/setup.py
 create mode 100644 setuptools/tests/bdist_wheel_testdata/commasinfilenames.dist/testrepo-0.1.0/mypackage/__init__.py
 create mode 100644 setuptools/tests/bdist_wheel_testdata/complex-dist/complexdist/__init__.py
 create mode 100644 setuptools/tests/bdist_wheel_testdata/complex-dist/setup.py
 create mode 100644 setuptools/tests/bdist_wheel_testdata/extension.dist/extension.abi3.so
 create mode 100644 setuptools/tests/bdist_wheel_testdata/extension.dist/extension.c
 create mode 100644 setuptools/tests/bdist_wheel_testdata/extension.dist/setup.py
 create mode 100644 setuptools/tests/bdist_wheel_testdata/headers.dist/header.h
 create mode 100644 setuptools/tests/bdist_wheel_testdata/headers.dist/headersdist.py
 create mode 100644 setuptools/tests/bdist_wheel_testdata/headers.dist/setup.cfg
 create mode 100644 setuptools/tests/bdist_wheel_testdata/headers.dist/setup.py
 create mode 100644 setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/libb.dylib
 create mode 100644 setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib.c
 create mode 100644 setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_10_10.dylib
 create mode 100644 setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_10_10_10.dylib
 create mode 100644 setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_10_10_386.dylib
 create mode 100644 setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_10_10_fat.dylib
 create mode 100644 setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_10_14.dylib
 create mode 100644 setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_10_14_386.dylib
 create mode 100644 setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_10_14_fat.dylib
 create mode 100644 setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_10_6.dylib
 create mode 100644 setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_10_6_386.dylib
 create mode 100644 setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_10_6_fat.dylib
 create mode 100755 setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_10_9_universal2.dylib
 create mode 100644 setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_11.dylib
 create mode 100644 setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_multiple_fat.dylib
 create mode 100644 setuptools/tests/bdist_wheel_testdata/simple.dist/setup.py
 create mode 100644 setuptools/tests/bdist_wheel_testdata/simple.dist/simpledist/__init__.py
 create mode 100644 setuptools/tests/bdist_wheel_testdata/unicode.dist/setup.py
 create mode 100644 setuptools/tests/bdist_wheel_testdata/unicode.dist/unicodedist/__init__.py
 create mode 100644 "setuptools/tests/bdist_wheel_testdata/unicode.dist/unicodedist/\303\245\303\244\303\266_\346\227\245\346\234\254\350\252\236.py"

diff --git a/setuptools/tests/bdist_wheel_testdata/abi3extension.dist/extension.c b/setuptools/tests/bdist_wheel_testdata/abi3extension.dist/extension.c
new file mode 100644
index 0000000000..a37c3fa2dc
--- /dev/null
+++ b/setuptools/tests/bdist_wheel_testdata/abi3extension.dist/extension.c
@@ -0,0 +1,2 @@
+#define Py_LIMITED_API 0x03020000
+#include 
diff --git a/setuptools/tests/bdist_wheel_testdata/abi3extension.dist/setup.cfg b/setuptools/tests/bdist_wheel_testdata/abi3extension.dist/setup.cfg
new file mode 100644
index 0000000000..9f6ff39a0f
--- /dev/null
+++ b/setuptools/tests/bdist_wheel_testdata/abi3extension.dist/setup.cfg
@@ -0,0 +1,2 @@
+[bdist_wheel]
+py_limited_api=cp32
diff --git a/setuptools/tests/bdist_wheel_testdata/abi3extension.dist/setup.py b/setuptools/tests/bdist_wheel_testdata/abi3extension.dist/setup.py
new file mode 100644
index 0000000000..5962bd1552
--- /dev/null
+++ b/setuptools/tests/bdist_wheel_testdata/abi3extension.dist/setup.py
@@ -0,0 +1,12 @@
+from __future__ import annotations
+
+from setuptools import Extension, setup
+
+setup(
+    name="extension.dist",
+    version="0.1",
+    description="A testing distribution \N{SNOWMAN}",
+    ext_modules=[
+        Extension(name="extension", sources=["extension.c"], py_limited_api=True)
+    ],
+)
diff --git a/setuptools/tests/bdist_wheel_testdata/commasinfilenames.dist/mypackage/__init__.py b/setuptools/tests/bdist_wheel_testdata/commasinfilenames.dist/mypackage/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/setuptools/tests/bdist_wheel_testdata/commasinfilenames.dist/mypackage/data/1,2,3.txt b/setuptools/tests/bdist_wheel_testdata/commasinfilenames.dist/mypackage/data/1,2,3.txt
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/setuptools/tests/bdist_wheel_testdata/commasinfilenames.dist/mypackage/data/__init__.py b/setuptools/tests/bdist_wheel_testdata/commasinfilenames.dist/mypackage/data/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/setuptools/tests/bdist_wheel_testdata/commasinfilenames.dist/setup.py b/setuptools/tests/bdist_wheel_testdata/commasinfilenames.dist/setup.py
new file mode 100644
index 0000000000..a2783a3b62
--- /dev/null
+++ b/setuptools/tests/bdist_wheel_testdata/commasinfilenames.dist/setup.py
@@ -0,0 +1,12 @@
+from __future__ import annotations
+
+from setuptools import setup
+
+setup(
+    name="testrepo",
+    version="0.1",
+    packages=["mypackage"],
+    description="A test package with commas in file names",
+    include_package_data=True,
+    package_data={"mypackage.data": ["*"]},
+)
diff --git a/setuptools/tests/bdist_wheel_testdata/commasinfilenames.dist/testrepo-0.1.0/mypackage/__init__.py b/setuptools/tests/bdist_wheel_testdata/commasinfilenames.dist/testrepo-0.1.0/mypackage/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/setuptools/tests/bdist_wheel_testdata/complex-dist/complexdist/__init__.py b/setuptools/tests/bdist_wheel_testdata/complex-dist/complexdist/__init__.py
new file mode 100644
index 0000000000..88aa7b76a4
--- /dev/null
+++ b/setuptools/tests/bdist_wheel_testdata/complex-dist/complexdist/__init__.py
@@ -0,0 +1,5 @@
+from __future__ import annotations
+
+
+def main():
+    return
diff --git a/setuptools/tests/bdist_wheel_testdata/complex-dist/setup.py b/setuptools/tests/bdist_wheel_testdata/complex-dist/setup.py
new file mode 100644
index 0000000000..e0439d9ef4
--- /dev/null
+++ b/setuptools/tests/bdist_wheel_testdata/complex-dist/setup.py
@@ -0,0 +1,24 @@
+from __future__ import annotations
+
+from setuptools import setup
+
+setup(
+    name="complex-dist",
+    version="0.1",
+    description="Another testing distribution \N{SNOWMAN}",
+    long_description="Another testing distribution \N{SNOWMAN}",
+    author="Illustrious Author",
+    author_email="illustrious@example.org",
+    url="http://example.org/exemplary",
+    packages=["complexdist"],
+    setup_requires=["wheel", "setuptools"],
+    install_requires=["quux", "splort"],
+    extras_require={"simple": ["simple.dist"]},
+    tests_require=["foo", "bar>=10.0.0"],
+    entry_points={
+        "console_scripts": [
+            "complex-dist=complexdist:main",
+            "complex-dist2=complexdist:main",
+        ],
+    },
+)
diff --git a/setuptools/tests/bdist_wheel_testdata/extension.dist/extension.abi3.so b/setuptools/tests/bdist_wheel_testdata/extension.dist/extension.abi3.so
new file mode 100644
index 0000000000000000000000000000000000000000..cf9e0b0a491284e40d703523d7fa602a091232ec
GIT binary patch
literal 23312
zcmeHPeQ;dWb$_e1q+Lr|$(C#^+n_bb$ga`K-yawU%d%t%BuiK}4v^&e?e1G?@$P%e
zdv9f}NhfY-`AFiHb~24=LgFNpnwF4E0|^1rnvaI2NjoKNI>0a)6G{pp!w}L?LuPP)
z=bn3a->%j`XZjbjccgdEIrn_sbI<+QeQ)2V21keL0s&2xp!N}su!%ON?34|6PD%jP
zPHn3e#`PlYLcu$))HDZblZ~p@q+YU7Puvo(tUIMks#fhp7Am78j1m>sV%;rKRr}={
z&=!g@L+B&r^;v08tCI?(UxboeRLVuAoT^VsIaP1*cg@RQlKlLjMTLDI7yo0X~n!EgvtW0+El(Vs-2bt
zu%v>n@H1C3BaJEbL6uj0bd{>q_J)s6W1`AgA6?{?YVn<0<{R#B2bG!Edn3D2>hn?qIZUG`BiB6v-Pj`Y1;4~o%ZU^{p;{8PlcWoojted
zYaqXQZspfewRUO5KTR!fzWU<1bLZ|P9IeMi>uxAGzT-a9bG)A9hPR#d7D4r`1VeTB
z+?m#{V@`F6=3Y&EY7wYKpca7-K?LZYAl-_jd+~H17xxrT
z6U~1Q5ooQ{B2bG!EdsR&)FM!eKrI5b2-G4_i$E;`wFrDzA|Rhrt$Z%ES@lmoui7H<
zX35_varNBmGKI@}sjSuWugfdifBWt^n@ES`!>r<&aNuVpuAXT=Ch_@F{(!8TWW7Yz
z>T~O9`FyJs(^NY>vrL7ad!=##SK3)AsOS?CSI<|Ul6+xVD}Ab#WbJq7xIgNd+P5XH
z{8D`XU$uG;y;9#=IbN0YN_@}2z~#|Z_~gGjx-GUYwl2DU?YeDiH?7?eUA50lMu!cL
z>AmY#i)YP+T6Atte|KPmv99<#0rh(UidQ8M6EtnH^r;$;%BK#QSJilwjqrQ$s5Vts
z)1%r{U5!Wiy6ioCR=##he<-&(-$>)!vKz@u->u*&@xTGFH2(>*{ns+1>n0!?c;OCIYrGor!9YWk!0O3N{k;0L|rRrtr~V{p4b)`o|P?FxZx4Id;s
zI|Q;b{AudgPJs-E`!TD7{Q{W?x00P*0{Lk8emEN3y?7f$4~4tPx1o+JflP;AC*OvJ
z*SYZbN!y4(yzs+h=c=XzxM#x&LdF*E1ad5V7lmmDGV4j;xfI35}V!!6zYxg7;eR02hlFpZ!?#oC;-Qxi_1
zNs&*$yKpya`8M(^yq+yR220^B48H)`@OA;$ok9=h@9+E^3|)l9zq9DoRLK#O!C!CSr(
z`3Xpot1vxVPO>*wL3PVl7tmCUtRS9ynP-%E9$+3a-||ht({?GrO9Jdy6C7@f+}Ng5
zx9^5@B-1tv3tD&s+>c~C=@&d9t!W!*4I$#HtA~8V3T^@15V{f9h4+!^j|P@gb6Yz!
zMqUjp{UjmJAP|vXN+e4fUz5m7)Qvw5E+z$S-3WB#1S63_w6tC<Vi;6qz4+t>%;1&5{D`!GxkGK@4GN2Ww>g}Q~!EcQHD+vhXPB
zF9Cn&8ir}f?v!1+9{io$rO0;lqBF)k2LLZ!&+rcs+oc<%ENm^^$nY
z-!XiS^#47>zfby4GkhcY@HoQ}((?qv%SrZGhR+cFIfl0r{bvlnMDU9YucjFOoZ){W
z`b!L7O8)#O!`G6nml^L8L11O1y(t)37HPxhm%7DTFmid*_NHx3BkkDQ
zy0xaJkDwK2iB^Evyd?#lX&?%5HD0Pw6>1LzBTY>#9RMNR;#0O2!t+bou~oLC^$L9E
zYYqfP+KInq1@!cRFS5}mcx5LswXQ(l+a$f7ykq);EyDIfsadpOZC|AIbdpaUivk3Y
z_C@uAQ(J>Esnb^C8VLL@(t@k_$ub5Y(3f|ezDzpaH&>LC
z{A}O0zO4^G4ihG&wvLe6$b1}T*_ylH0h(phT9q>~U#*zp_9gyf7DJkxL-qfkk7{R9|J*h$k*!tY|I
z{CM29Gk!dku?^2pNZ8q2#+>z&9K)I_Q<^oiwo~ScmxqvFh63Zqlj%emZd&=QzuiTg
zeErRu7`S8^#ZKnSO{CK0c&?BzaxR!SxSnGq%(#&_%&415C(1lb3DZiZt*PB+&M^~)m$t2ghGT&u+jkkR
z2|hDNO`z?kaT!5R_5GMCnl5a_^C`(bS-{VyvxBo~4}>u@8#kTZ6b9y=DD1|lU2P)O
zg*4Pb$+#FM@b5xS_ES!l^s|!(I1L9WHkv+B(1(nSYfg%JrBOmg#U*dK=_w0eTTquL
z3Prut&+9QZyVEmi7u{ezm`F*(GilOE=MPwMJ8vaN?Szq$wse|3NAXi|unZWPOr>Z2
zu4_6J8s;zgB3+o&N32}l8@BDkvV*Q)#hA#-vIvp0lLgB&W~B=gg@c?o`aUxy!$wRa
zGq%y?ST&eIR-
zs|YZwok>#aV(KDTL$r{nJfSpX=M`mq67evSSme#2ypo5(r8keQJk+eGI>);
zARV#DT$?tJC<=vf6ho8G$xwPoM|(~>`q_DC%IAo%qM|ry7M&hNX8L6*xER<{x=?Jg
zhI1G`(@tP%dde~J5(1avjBb#BGWV55to`^CgSr=KDdzwwI^v{}%{0L$3Zqyo{dq=;
z_L!FGU}<*b>>S{A+3z4`X52QM7q}Rum}OA-Fi*Ol4ep^>^x-y^#HG9P}aELljco%
z(@K~K5+zV_C~anv<#F3>Qt~M=ii`9RvtqC9jnLkd#i*D`70S|ln{ez|u|h$m9EM`F
zu-hnjk~RW`(3tO|?X47kbJjDlGTBxvA;xWB+DshYXJQ5saWtL9Vu_%pF>N_>MLx+T
zvNWibBef6WBM^gp*sIVxSD#
zXU>#Hms4|}>DqZGVJaz|j&qoA*sf?#kfWf_c=Se*(i#U%rsJAPr5y=f(o75YekZTy
z_(VbEm15P0uL_Z&G!mi@d^`ueA
zq^G7mdO9#Vux`uy-X@6lW;bud#migfjwxDm(g~o3lbGIt^LZ~dPKm$q_0isxsm>NI
z5WI~gakv-TgNJjf?ew}HvJK2B!!1~eX&`pq?af<9Fjsm-C`(caEHTqc7V`>HiIflD
zk;-Q>ARa-M&7htb8Bn>3EnIbYqA*Eg&zZ=f*7cl`+yq{lZYuiMr*jIV;Nrh@LZV~h
zE_6gOAmM$3>O1E{r<^CHb}R&xH4_CaK9~$%S=w`EmW@_Eg*nO;aqQc?QSQ{@xF?q1
z;$qAzi13#uGA(>8YsYok`(#9li<4*^v9LIoZAK!~igS804@XsJk(KBGV|w1oBe~bp
zDsC1}S$nZkvr*)?I$N-M%yMp_bKI1Xl~&JtI8~$8H*IIlJ_DaJ`VzFbV$0||GHsff
zKIF3JiYv}~Mqg8Kc7<@7zM50^PF@X+f~u~VZ(i|
zS-2M&$3vzff-|u=sHq+g}Ii4LQ*mX!7=u~VdzCZ?=s
z$jH&6ue=>IWXkHgceHpk<5Pp&rsal`_3b@$;FGQ7tUTX~gE)`bP~OWszGbCi^TRpL
zv5%To@mPdR!)KrbPU!Dfg7_S*-kas;4dO%fA^KjWq6G2T`dC%^yyE?2zeo_DnALsW
za(YAYesDP*UjfPcf#vj&7OhIhThgl1=NIprmg{NK)P2x$I=%>s`Y8+twH7T}l^!X+
zhsrM%#H|{2AHJO4rm6Q%mD3k!)$T6`wS}6xzh2IduWWYuDGUe2o5HHm7ipnG)#!`o
z5ZycMBfhc0moUGkpMjSZL>}2Tcf9cJnxG%67~d6=
ze!7BAHv$7%DL!P6$`JkoRmLR?I@wk44ditUwKFM&34Lf^|Ja~D
zxOX?b!EX0;d;7<5sL^%2y-pFazuEe)p*H528H0!t%4Iw)CVCVD7<0{p7V{7uEhah`E9%B^QXHhTm^sbC
zWjT&3vn!akPBIj8Y+=~QrlEGqhSs?2YB7v2otfB8;dIPUrvfx5&3Jway6E#?5lveu
zQpQd3xMR*JjJ~^2FhWu#ZU4_Cdj1Nt3v-N%+CNp@E$OPlx58zGUUT^to>@}zha{kC
zdd7tczQq;=_Y}&?R$#arP_%zc%By--a+6(5J@HHtk(ST}Wheegx3Yg$0;(4GF}3fAf#K#tNmTorzCTw|H}S8DZfkdsprB}t@ubDtKJ;{=?*{ngzw$CsC7=(>iIa;
zTp9m^Xz#jk%x#VG(MDjN?x7c)cb2sl5!Lk2j#aK
z$0yL{x3A85r}5?-Dm%rsbW!rErhT7aWxpt}mSq&5(jeVYsPv+uWtxVG(g%at$N&AeFyVpyIe((M7=?r-dD5O@%-Os{Joq
C1PR0d

literal 0
HcmV?d00001

diff --git a/setuptools/tests/bdist_wheel_testdata/extension.dist/extension.c b/setuptools/tests/bdist_wheel_testdata/extension.dist/extension.c
new file mode 100644
index 0000000000..26403efa82
--- /dev/null
+++ b/setuptools/tests/bdist_wheel_testdata/extension.dist/extension.c
@@ -0,0 +1,17 @@
+#include 
+
+static PyMethodDef methods[] = {
+	{ NULL, NULL, 0, NULL }
+};
+
+static struct PyModuleDef module_def = {
+	PyModuleDef_HEAD_INIT,
+	"extension",
+	"Dummy extension module",
+	-1,
+	methods
+};
+
+PyMODINIT_FUNC PyInit_extension(void) {
+	return PyModule_Create(&module_def);
+}
diff --git a/setuptools/tests/bdist_wheel_testdata/extension.dist/setup.py b/setuptools/tests/bdist_wheel_testdata/extension.dist/setup.py
new file mode 100644
index 0000000000..9a6eed8cfd
--- /dev/null
+++ b/setuptools/tests/bdist_wheel_testdata/extension.dist/setup.py
@@ -0,0 +1,10 @@
+from __future__ import annotations
+
+from setuptools import Extension, setup
+
+setup(
+    name="extension.dist",
+    version="0.1",
+    description="A testing distribution \N{SNOWMAN}",
+    ext_modules=[Extension(name="extension", sources=["extension.c"])],
+)
diff --git a/setuptools/tests/bdist_wheel_testdata/headers.dist/header.h b/setuptools/tests/bdist_wheel_testdata/headers.dist/header.h
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/setuptools/tests/bdist_wheel_testdata/headers.dist/headersdist.py b/setuptools/tests/bdist_wheel_testdata/headers.dist/headersdist.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/setuptools/tests/bdist_wheel_testdata/headers.dist/setup.cfg b/setuptools/tests/bdist_wheel_testdata/headers.dist/setup.cfg
new file mode 100644
index 0000000000..3c6e79cf31
--- /dev/null
+++ b/setuptools/tests/bdist_wheel_testdata/headers.dist/setup.cfg
@@ -0,0 +1,2 @@
+[bdist_wheel]
+universal=1
diff --git a/setuptools/tests/bdist_wheel_testdata/headers.dist/setup.py b/setuptools/tests/bdist_wheel_testdata/headers.dist/setup.py
new file mode 100644
index 0000000000..6cf9b46faf
--- /dev/null
+++ b/setuptools/tests/bdist_wheel_testdata/headers.dist/setup.py
@@ -0,0 +1,10 @@
+from __future__ import annotations
+
+from setuptools import setup
+
+setup(
+    name="headers.dist",
+    version="0.1",
+    description="A distribution with headers",
+    headers=["header.h"],
+)
diff --git a/setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/libb.dylib b/setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/libb.dylib
new file mode 100644
index 0000000000000000000000000000000000000000..25c954656b07e8abb28f87fbc296f90214207dd2
GIT binary patch
literal 9544
zcmeHNU2IfE6rOD>EVQsKKLJrm3q@-X+J#a|kfi)vyqi|6-4d(B47=Mcd&B;ey?1Nd
zu%yyry4EI*(fB}Oh%u4yL=35Yf(Av3G0_+wc+jK|Xtn_Y8Xu}6>v!hPE!*9K)%bW%
zdglDhnKNh3o!Q~UaK0b`uEDs8PoHM=OC
zEix&?CxaW|lPowlg53sjCgl2?ppF5z9VR6i-9CUK*7rale|k^E9}9EcNci3P@r^Dq
zt?OhXlBH{*aY}zQW&+Lwx)Il+M@%tqytkykB3W+Y-?YkoY0(mWx*R;-*yVl+t2;2nu*;GzDURu}M_
z9oHgDFDd6y%n@^{bQ$5r8QiK(@12&(7_7E$c}PEe#RiL1P!f6y~F%HYKBZeF%pt3J(CjP!@!C5tLCVrHrBYvj|CWz
zFMC{MR}Z9^gxw;u_v(ZmuZA(9ZnOtBZroh=sCLB#NqBIu>~`KB-3UfD@5yclBOB}t
z*cq@hU}wP2z`xGGK{fT0+JCDwmAscqO!p`6ITD-ROipJqj3p}7;a%_G1uOj{{-LVF
zE1kb(G8d@_=s$o;=&yZFE-q4oOVF|OxlATwD!p*rrlyi(YASJFO&vU|4kgc~Pmr_B
zgql(&)gk4)$$qO2CxFSxbdsuv7^<(MdJ=TvOXZG3y{e2;*Ect+Lx~CDXeTUPqouGK
zR|}Of*C?yM?0Ro}P`RUKUYt-fP*c@T<+@(om%PIgS8hHt)A&AonT@Z{)wn-7!Hfl>
z)z|Q1*$j-I*y0*}{SNgnGLasg%g=}7uF>&|>!-$D?^8IgQH0e0McW}WE$Z;Lb7-Xw
z?Ye|Fg!H?_r>?3)+r~&d3NcMDU(z_0oJRWYA_yQ2cAB!Y(GrvdjhpQf5P!D7!`3E6U@dJR!}B$?i~NS08xTE2reKtcGs3k9G7Nbm9`_-d>aEg8diprafwW59y#OPIs_j&qbz
z9|Vrx<`(AYE?sc?0JVM!qf&y$OB|#BYFD-qmpoqF!VtxOSJw2_`
zzfAj8!V46+_C@r<++Rrc1Hi$1ru|;<0&?x8n(QwCPfL4x#})7h`x`L$Q^vDKn&-tM
z_2hpAVm>16H~oXXRb5qv4p{1KIR;e
zaxmH+_5{3!+Z_n`;@pV)G(-0U6r}~at$v@b1$Yc6=Rse@)5=?GxZbI2#`k!!Wzs8quPjh|<#7%Dr3Y51||u2FGg3
z$i(gILW5)VBHROwUcFY=dV`8kHq0osZY4rys;TkD!n)GDzr`E#a)tEGQg762F`Z)&
z!CDVaocFp-M{RRg?~~ObQSCLtx@UfQ3A7?B+jw(hfI9D7PRR*$`?wERA~XX!0tZHi
z)6D|6zl*K+8u^*1DrBFnVTDCE#8wdPr>(X2sKBoPE`@B}qfn%5?eDxIW!g(4=mC=b
zX}Ldh67t$2#;kh?6x7nij9KT;XCS*^k3Bd6p9r~+S^GP*i|SyGnTW2KGKDGNdz7f3
zktcKHUvuPQc*?e~6!L>@#OfURXpa5Irc8T$0zS>LpD^W_dOAN)Zi{wyNmt
znr^}@1JX8SDR4Ou&_Fn!c4S=6aI+!A#mPBth4^UZ4g-F
zav$z`vU;f;qTC6;f>@_DvvD%2Az0QPYt`#;{
ig;-mtuzrC|_P_o3h_?S_an4?rbN_= 'a' && *lett <= 'z'){
+            num += 1;
+        } else if (*lett >= 'A' && *lett <= 'Z'){
+            num += 1;
+        }
+        lett += 1;
+    }
+    return num;
+}
diff --git a/setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_10_10.dylib b/setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_10_10.dylib
new file mode 100644
index 0000000000000000000000000000000000000000..eaf1a94e5f8a259da16f00db62453e0c7796c717
GIT binary patch
literal 756
zcmX^A>+L^w1_nlE1|R{%EI_;g#83cYAdm!N3lJX%_yHBa096Cy1I=UrVUW2X5FcNX
zT2TUFL---C5g`aZj5P(y0GS7J3mmX8G{D(FMtpp7er`cxa!Gt?UU_C-N_@PJ3$pwS
zC`SQGXD~tBaso(0oeE?jKzw{^MtoXPVs0u#EIuC9eLJ8^9iVgv)I2Dcft!IL1I7l?
zDnKj%VsLRWFz|6PfGm>$;tNpw9{_0)ARnlYApnSx0myw&HzErY#SZo8eCp9z`{TIl
z7m#|7?%E$f?hB9R+7}G`ExBMJ*BAWzIGTSZ^7qIg^Q-v#`GEXx*DuWn7&~3RywpRM
zb>!~>r3H^}sO~8KeohqCKn_^e1E|}h)Ahy6j{pDv0}YEk43dEXX#8S$4-^(4z*rT;
zz*r%`D8R$cF#)Iw6pk`LtN}C(1fcpE7##To+L)Yq*+8100H~Y|WH3Ln>G64`x$*gF
O@j0m_C8+L^w1_nlE1|R{%EI_;g#83cYAdm!N3lJX%_yHBa096Cy1I=UrVUW2X5FcNX
zT2TUFL---C5g`aZj5P(y0GS7J3mmX8G{D(FMtpp7er`cxa!Gt?UU_C-N_@PJ3$pwS
zC`SQGXD~tBaso(0oeE?jKzw{^MtoXPVs0u#EIuC9eLJ8^9iVgv)I2Dcft!IL1I7l?
zDnKj%#9Ull3=Di+3?R!SfcOH`{s%x>1jq;KV+a6ZWB_s>)Q!l(M6p9XI-h!U*8Vu|
z`URxkqr3J8ko&@;x%LGEe@iY{$n^#PK91&}iTpis$owk)em)?-+x1KH0me?(FE90w
zWgYo@Kxx6F8>%~szn>FDHIM_A^#JPj=yZMYvg7~%|3JfH4})Z202;p--UEdN2ryO!
zF)&sLFbeRnb4&oL0)?Xt5NiMp0|BUh1_nnyfi@;*UN(>>C;%#F0~ySZYNRe
PT6|7wNl9u^F-R!@L6mUR

literal 0
HcmV?d00001

diff --git a/setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_10_10_386.dylib b/setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_10_10_386.dylib
new file mode 100644
index 0000000000000000000000000000000000000000..8f543875d6ad19063ba0e006ada538d6bd9d606e
GIT binary patch
literal 668
zcma)3ze~eV5WduiG1k5g4h{+qjvXBwt@t(zA{9iCqtuiNww0QqlnO#RG^9`k2mb{B
z3K10C{0m$i3c5K23D&zLucYAMN50&B_v_u`y?%fG%mBa?Q6uWa4m&HcX8#YoQS6R#
zY`I^};8H}}y0F=cfLN6KMWrHpLx%hLyfJjRQa)n2Q9CV_ZEV%gkLyQ>+gmEf_LjIn
z%%uS61Dk&Fsw_hwT2-txOEpXB$vy5VyhhxJXGB#1a{yK4uac8xVVVFLGdydNWG6na
zN0#@X9@rr!!TZEJCBy&n_uO~qBkf7!MF!m96%4oFyYkDq^g(nh9Fo3x0LL$h{&GUL
z#UK;PzpCR!6
z+n0Sc_zTSYA`5NNq9kIU)(6N(#I}(mOZ#;XDwWc$uDG;{Map^m=DhY|`vive(|L(x
z#Us6za$&J?zp~)U<>te?%{x+Fy`pnux1cAW(_k)$ym0ZOu1ansQdaA-wp3|UwLgB3
zX9!<}&Z-`9l@P~;sO$APEQk>ixttJ%Ty)k+xGi;F2bS)EJ>nWPEp#7sr*!zY);Ih;
z?{mIun>+T9HgxAqe=hXxz-x!XTB}jW4y~T>!iu$iCgrYL8->Undan%M9uB5c$+ERh
zx7S;5SsTa3YT-~a+8GWuN25{K9(r%`w_j5RLhlWdJQh7WiLrWJ7%kH%=8tAO$kHjN
zp)+U^?I{<`wald{{k~C>r6!i{B%8|(xmc6;s;*mIdVmxQBftnS0*nA7zz8q`i~u9R
z2rvSSz+Xh*Lp=W*_~SpG_hFyYOjd$IPC;k%j21=4e?sN%sv7P82`50Uwo?7{tQdt-
z+yME9@dMkcPf6K2=5%_-w`y+36009_7+3H@mCh@azST3muiy>Nru~FC2%_;n7+gqa!5q-H;B?jQ
zggVgGNJd!)yB*)|4>}Qh5G2$A73lZ-(-$?yBdip`BTSl`s3IJRM>vmWB>MXhG!^AF&B$2Q}+-Cm=
zhLZ0|jqhnDFnKNm(K4!*r#+w6j?)wGt9ubVk9Qd(4A)~#0x?@OOPQ>NmBQ6|;S6zq
zFL?eSCMmHL@^*QnD65j10MW=}uADIqFNZk2Usp^l30kwI#jhzxmDr@4P!_@Kq{WN4ec&QxJf|e5gx1tr(>aF
PVljsohGiM$OD_EZ=Ie1K

literal 0
HcmV?d00001

diff --git a/setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_10_14_386.dylib b/setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_10_14_386.dylib
new file mode 100644
index 0000000000000000000000000000000000000000..c85b71691ee684af0f05edb64c424873d1081ef2
GIT binary patch
literal 676
zcma)3y-LGS6h5gDqt;#r2L}Zoz)3o|R=nU~L8O8RatJl0f^DUyD5Zjs42Be1!3S{k
zDMV0k^9fuX3c5K23C45M+>(NWANg{AzjM#!zJ7oHOaZ_IQ6uU^mz|X)X8#YoQSOdv
zY+0{na4Vv1UD)gmfmo9JB^3?r4H?$+c_Zj>rF_J4qjqYRZEV%gkLyQ>+ncJ!_LjIn
z%x3`TgAV;JFmxU@PlkilRIwnsyuFehh~D&AF9lqTmBFANMUgK<=Q(@$#1#U+A7OMYzB@x}
z?}FAbwE66$eoa+AWr?^>BVtVB0w2DA
zJ3&d!pdeeharSvPSKJA7g<(zwG&RE0s85XLi`z4YU
zPt0D*`Ni5&dES>R^~VqD_ocjX-Nb0!hMt5@gSjB`VjUNKR5F!FSy_IR#a{^T&lCh0smN-_-YQW2N(;R@{W9g=+QL((d2d-wyYK
zFS(X$@4Cdlh}Ntf&c>nJ3mUO-npJYSvC|PjTy{3krpz^GE3eFc@Wu+=e)nQ3SaLS$
z^rrN#vvq7F7X}4YW!!BK27{K{58mbOy`>0*-XA1+EP8elYweb>)@`ehJDS}mqBu__-{e7~~%2q6|mfDvE>7y(9r5nu!u0Y-ok
zU<4R}zktBU;rwsm$G;lpXfE5KS*!?!oQBSr*-UvxFR?cFbdItAgcG1v+o^tLMpUj8
zH$e6{e&D(8Q`GAc=5u<+PkLU*7Q3G^jw{&FsaNz$^FH-6lI9G^H;y;h)8~k={uAE7
z3-`US5%0P^I*!+B#4sZ-Xhfh5YRctCPR9cC_psG=UKz0i%i8;U*XCDZ{WnD_k4mo>s8tQNo{OxkT^oktDv2p7+L^w1_nlE1|R{%EI_;g#83cYAdm!N3lJX%_yHBa096Cy1I=UrVUW2X5FcNX
zT2TUFL---C5g`aZj5P(y0GS7J3mmX8G{D(FMtpp7er`cxa!Gt?UU_C-N_@PJ3$pwS
zC`SQGXD~tBaso(0oeE?jKzw{^MtoXPVs0u#EIuC9eLJ8^9iVgv)I2Dcft!IL1I7l?
zDnKj%Vz6;BFz|6PfGm>$;tNpw9{_0)ARnlYApnSx0myw&HzErY#SZo8eCp9z`{TIl
z7m#|7?%E$f?hB9R+7}G`ExBMJ*BAWzIGTSZ^7qIg^Q-v#`GEXx*DuWn7&~3RywpRM
zb>!~>r3H^}sO~8KeohqCKn_^e1E|}h)Ahy6j{pDv0}YEk43dEXX#8S$4-^(4z*rT;
zz*r%`D8R$cF#)Iw6pk`LtN}C(1fcpE7##To+L)Yq*+8100H~Y|WH3Ln>G64`x$*gF
O@j0m_C8+L^w1_lOZAZ7$&79g$xF%+PD5E~1)0~DJ9lmpQq{V+2?O5@{8QY%V8EC`5?
z4{?nMf$<>pL?8vy53&yqSQr{$?D+WP{M>@XW
zzyL%HB|rk|R1g;o;^R{@;?s%}b5mh_RClZZ3Y!421Jn$NDh38_28Ik6TLs7v0CCv3
z7#R4tz;;OhxiwJxI)F6D9Y8${0YHolK<w4fxjgUB;@*oe;-5hk3{|+StM>1e?K3X`=|K;W2ft%mpVw2j{H3!*Tb|%
z@%MA0s0DIBl3<-bUQYP`|9`jZpXMVxv4=r2Kmdw2Ab@)dpCR!6
z+n0Sc_zTSYA`5NNq9kIU)(6N(#I}(mOZ#;XDwWc$uDG;{Map^m=DhY|`vive(|L(x
z#Us6za$&J?zp~)U<>te?%{x+Fy`pnux1cAW(_k)$ym0ZOu1ansQdaA-wp3|UwLgB3
zX9!<}&Z-`9l@P~;sO$APEQk>ixk(`klhIix;kMLy9ay>t_K0iHw9tLjozmgoTHo;Z
zywCZrZSL4Z+R&Xd{khP$1FszlYpq5hJG6Sj3oF+8nUuR~Z4@GR=)E$0dpMX*CCk=2
z-Cl3KWo;ZAtA#_!XlFRs9F0a@d+5E%-+oOQ2)#E*@>uljB*yA>VYE!6m_M5BAWNs5
zhR&cxw5MD!*D{x;^!r9hmYP_)lWZbwKi~u9R2rvSS03*N%FanGK
zBftnS0)G*K5ApnO;E(@!-iLioGg%1=IR%~3Gg=fG{|S}5t7^3WC!7Ga+Di4)vtkrV
zaRcNZ#t&?(J|$)AnA7PQ->SJCORRp#VO+rrRXVFI&HdC*OPVvF*spklU3LB^*nh$s
zIR2jFx5FJfpldkYb_locc{(g40#I
z6Y4-$BN=5K>~?&+Kj=j4L6A@fRG{DQPhZp+kFZh%k1%0wqKa@N9^pKik?8M3lncej
Wnafi-YN`a~Icl763KTzpSNIMHtKS0v

literal 0
HcmV?d00001

diff --git a/setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_10_9_universal2.dylib b/setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_10_9_universal2.dylib
new file mode 100755
index 0000000000000000000000000000000000000000..26ab109c9957ee2453ddd94a14a7db02bd3e8689
GIT binary patch
literal 65936
zcmeI*Z)jCz9KiAC?vHM6?k+W)XmL>!wcMsHb8qx0UbsV1%Voomhh26@qixsip6fr{
zTFr$+!ATPJCW->zD4C3Ok}?PsBM=KrjKmlH*#e_L!mV)qerM;*y9;SVZ=&zR{hjB}
zdCqgr`EcLA97<%YSZXPQ5^MY9P$EG=009ILKmY**
z5I_I{1Q0*~0R#|0009ILKmY**5I_I{1Q0*~0R#|0009ILKmY**5I_I{1Q0*~0R#|0
z009ILKmY**5I_I{1Q0*~fqz-x%INKTlK3Bz#D6nILKFV`a*XLQrQV(9$Z<%*+S+Jc
ztv59J;7sm%+Xu){z8|ubSFCjtdtARGRrY$3HRI>r@~3D&@BVb{bY*s}O}FlDYHhGh
zt&K@Jk-gqof4!K0k@kaU$y_IB>wD^4Yd6+x+7Mk|6TGYA&&aBEg9I(IM}ik#M-I~^
zR!Nje=((V{iIi)bo8oqPrLC4{Z`ze;Pt`ZCC~w#+qsGiPrmHfT{Gi$yUt8Zr?-7x{
zwjHS#OI;+fMIy5V&zSiCVW6M
z&t+&Re4(Iwq&|H3@y!0d@R1VfABx`T9t`zH?+lk`o?RP0?B$#XS8op=DV#K0I^No`
ze9ha|n-0G*HZ~^9?f9&(Psirz6e*dt=2~m+e3jH1XZ$>PZEKEyvE*QvO86yYLbf&C
zVv~)wIpMmJlWu0_n)f>O=M`NqEYpMh{29`h?!*ESlYPA;v^Ut4n~vMKG$hTNz#AM_}ATy
z?olu#VQp=+u2ve^#mO4*`|`1OqoMl;XtCB!>~Y82Qe8=~uLf_HWNiCML`
z0fU@UxQWdWq^*9zckeUD-9*Z@^38j@ywX<7vp4OMy%y@5SCq?k2~BZh<{Q&hDc3%<
zIL4I8Bz+gHBhuHlBlTjbizK#4WR~C=GoL5({Daza<)q$c_~ZwpooAk2KG61(x#aZ!
zu|mco660&?U5mW7?lEDe*5@phs#M#4k^k6)i$1UHL?M5Vkf}(g+ABmrW9#0On`kLt
z7nta*MH0GBnPW_W)Jy!nzPFagB=r1FN1o@Oyr5i5K>z^+5I_I{1Q0*~0R#|0009IL
zKmY**5I_I{1Q0*~0R#|0009ILKmY**5I_I{1Q0*~0R#|0009ILKmY**5I_I{1Q0*~
zf&Y%cShu-)D{Mx_W}AVburo3=)krua2Xjro)`g+L>NP!~wkJcTcA`HVYCB=1Kie57
z$TyD+$+eOLJ!|58eyFV|bAD5(?Qqt7oh#QK)%$%nHa7c~tR>HKbTHRm(^ThqK4E69
zxz(CGUnRB1fS(7iZOzRtmYnNS39m#Xbv$HS(=9gHXqywRD>>os+Hx*v}=;dvN^|pINhIj{@l*I;sa-E
zlcz4P`R2y;s@GTl6o2&hvXif#I9->#INUgx_x07QujFj5%OBg{zkgrPjq`_My-yu~
z*!gYGefjCNj%)8Mn{xA8=h$octDb%4#dqIcy1Ap|moMtK&At5VkAt5S-i;TQJu$uQ
a=fp<`e(Q`ad4AfJZ8Zat?HBV_Fa8rZUx$7G

literal 0
HcmV?d00001

diff --git a/setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_11.dylib b/setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_11.dylib
new file mode 100644
index 0000000000000000000000000000000000000000..80202c11ba026146f7596191a1043216590fa9f2
GIT binary patch
literal 16464
zcmeI4y-EW?6ov2l4=PEcC|Zb!s6`@Tk@+b@&bZ|
z&mfk958wk>ieTdlh=}Lz&gv2bOPe!ra(`y#?(DbI-aNj)eRhjD?IO|z^+C-xk#mW|
z6B&kHA!9bP+nbtc8R_5SPNY>RwD^2V^){fRzjnPDKk5aH&OV5i8u7e}VdaarS
z0@cP*%`5IF7Qdlj)g+{K;%Si{*l8Q7@56xy%`>;pt!J<}st+jW49#(a0
z`{32rUhO{gV8?A2H$q1ov*)_f(=OL1i0yCCvmL`j!oGa`M}Y1M+P~qCiN6o+>;+Tv
M>Uon#Gk&ouU-!LLLjV8(

literal 0
HcmV?d00001

diff --git a/setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_multiple_fat.dylib b/setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_multiple_fat.dylib
new file mode 100644
index 0000000000000000000000000000000000000000..5f7fd5091ea98cf4db80642362748f384c61200a
GIT binary patch
literal 8956
zcmeHM&ubGw6rPQ-U2B@n!9xX=96Wd^9>hbBjk>T03nCRnkRjBB3MMUei=`wWY>_M}
zG(rzO_y_nW6c0gAD0onhJ$Nl7ptl@W360;I%&Z$MR74Mj_u%Et`!R23zun9G^5O7J
zTL_VYjzdogkt6pNelk`Z%TugRF(?Zozz8q`i~u9R2rvSS03*N%FanGKBftopFa$pS
z_;#oVe}Q>lq@guhLV#{-!eI}+t72+X)qT=UikRYS0z`8l%+*kUMW^e#vi@M
zYlN>uXLXNZl@MozSTyU)SP&y3#wLWwPN=h{;MVkc4OqGd_J}*sq|klzos!`{THgx~
zg0GpTYwx+lyNKqT9nM9e+Y9QEa5l;mawDfLf~e?hUQD>_&Q?~r{ot(?y8Z5SB3W=Y
z>Go#p183{ZNG%*ns-01{H5d$q+D`N{v!V(m
zxB>Ey;|F$hpS-rqnA7PQKkK<2ORRp$aa_T!E}hl3rTduS#Cdvsv`3xD8+2VPi@_S_yF>ow~U+>RI2JD?9LD&%&YwjF#2e{d=3#{@!8(Ez>f
z)npb70(}!+Q}2!`1XGQq%6i!Cg>I+YP%J_(rVyyWyyxG(tPwt8EeAef+-{+Yco^an
huAmu-`A$S3n|qPEIXOm6wV-^A8pqrM#gE_@egaI<-)aB=

literal 0
HcmV?d00001

diff --git a/setuptools/tests/bdist_wheel_testdata/simple.dist/setup.py b/setuptools/tests/bdist_wheel_testdata/simple.dist/setup.py
new file mode 100644
index 0000000000..1e7a78a224
--- /dev/null
+++ b/setuptools/tests/bdist_wheel_testdata/simple.dist/setup.py
@@ -0,0 +1,11 @@
+from __future__ import annotations
+
+from setuptools import setup
+
+setup(
+    name="simple.dist",
+    version="0.1",
+    description="A testing distribution \N{SNOWMAN}",
+    packages=["simpledist"],
+    extras_require={"voting": ["beaglevote"]},
+)
diff --git a/setuptools/tests/bdist_wheel_testdata/simple.dist/simpledist/__init__.py b/setuptools/tests/bdist_wheel_testdata/simple.dist/simpledist/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/setuptools/tests/bdist_wheel_testdata/unicode.dist/setup.py b/setuptools/tests/bdist_wheel_testdata/unicode.dist/setup.py
new file mode 100644
index 0000000000..ec66d1e6af
--- /dev/null
+++ b/setuptools/tests/bdist_wheel_testdata/unicode.dist/setup.py
@@ -0,0 +1,11 @@
+from __future__ import annotations
+
+from setuptools import setup
+
+setup(
+    name="unicode.dist",
+    version="0.1",
+    description="A testing distribution \N{SNOWMAN}",
+    packages=["unicodedist"],
+    zip_safe=True,
+)
diff --git a/setuptools/tests/bdist_wheel_testdata/unicode.dist/unicodedist/__init__.py b/setuptools/tests/bdist_wheel_testdata/unicode.dist/unicodedist/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git "a/setuptools/tests/bdist_wheel_testdata/unicode.dist/unicodedist/\303\245\303\244\303\266_\346\227\245\346\234\254\350\252\236.py" "b/setuptools/tests/bdist_wheel_testdata/unicode.dist/unicodedist/\303\245\303\244\303\266_\346\227\245\346\234\254\350\252\236.py"
new file mode 100644
index 0000000000..e69de29bb2

From 9c1bcc3417bd12668123f7e731e241d9e57bfc57 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 21 May 2024 10:05:22 +0100
Subject: [PATCH 0688/1761] =?UTF-8?q?Bump=20version:=2069.5.1=20=E2=86=92?=
 =?UTF-8?q?=2070.0.0?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg                 |  2 +-
 NEWS.rst                         | 56 ++++++++++++++++++++++++++++++++
 newsfragments/4136.bugfix.rst    |  1 -
 newsfragments/4150.feature.rst   |  1 -
 newsfragments/4255.misc.rst      |  1 -
 newsfragments/4262.feature.rst   |  3 --
 newsfragments/4267.feature.rst   |  1 -
 newsfragments/4278.bugfix.rst    |  2 --
 newsfragments/4280.misc.rst      |  1 -
 newsfragments/4282.misc.rst      |  1 -
 newsfragments/4308.misc.rst      |  2 --
 newsfragments/4309.removal.rst   |  7 ----
 newsfragments/4312.doc.rst       |  1 -
 newsfragments/4322.removal.1.rst |  3 --
 newsfragments/4322.removal.2.rst |  3 --
 newsfragments/4324.removal.rst   |  1 -
 newsfragments/4332.feature.rst   |  1 -
 newsfragments/4348.bugfix.rst    |  1 -
 newsfragments/4348.misc.rst      |  1 -
 newsfragments/4360.removal.1.rst |  2 --
 setup.cfg                        |  2 +-
 21 files changed, 58 insertions(+), 35 deletions(-)
 delete mode 100644 newsfragments/4136.bugfix.rst
 delete mode 100644 newsfragments/4150.feature.rst
 delete mode 100644 newsfragments/4255.misc.rst
 delete mode 100644 newsfragments/4262.feature.rst
 delete mode 100644 newsfragments/4267.feature.rst
 delete mode 100644 newsfragments/4278.bugfix.rst
 delete mode 100644 newsfragments/4280.misc.rst
 delete mode 100644 newsfragments/4282.misc.rst
 delete mode 100644 newsfragments/4308.misc.rst
 delete mode 100644 newsfragments/4309.removal.rst
 delete mode 100644 newsfragments/4312.doc.rst
 delete mode 100644 newsfragments/4322.removal.1.rst
 delete mode 100644 newsfragments/4322.removal.2.rst
 delete mode 100644 newsfragments/4324.removal.rst
 delete mode 100644 newsfragments/4332.feature.rst
 delete mode 100644 newsfragments/4348.bugfix.rst
 delete mode 100644 newsfragments/4348.misc.rst
 delete mode 100644 newsfragments/4360.removal.1.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 557ae0ce34..70a34a3ed9 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 69.5.1
+current_version = 70.0.0
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index 73a8148d9c..06da16714b 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,59 @@
+v70.0.0
+=======
+
+Features
+--------
+
+- Emit a warning when ``[tools.setuptools]`` is present in ``pyproject.toml`` and will be ignored. -- by :user:`SnoopJ` (#4150)
+- Improved `AttributeError` error message if ``pkg_resources.EntryPoint.require`` is called without extras or distribution
+  Gracefully "do nothing" when trying to activate a ``pkg_resources.Distribution`` with a `None` location, rather than raising a `TypeError`
+  -- by :user:`Avasam` (#4262)
+- Typed the dynamically defined variables from `pkg_resources` -- by :user:`Avasam` (#4267)
+- Modernized and refactored VCS handling in package_index. (#4332)
+
+
+Bugfixes
+--------
+
+- In install command, use super to call the superclass methods. Avoids race conditions when monkeypatching from _distutils_system_mod occurs late. (#4136)
+- Fix finder template for lenient editable installs of implicit nested namespaces
+  constructed by using ``package_dir`` to reorganise directory structure. (#4278)
+- Fix an error with `UnicodeDecodeError` handling in ``pkg_resources`` when trying to read files in UTF-8 with a fallback -- by :user:`Avasam` (#4348)
+
+
+Improved Documentation
+----------------------
+
+- Uses RST substitution to put badges in 1 line. (#4312)
+
+
+Deprecations and Removals
+-------------------------
+
+- Further adoption of UTF-8 in ``setuptools``.
+  This change regards mostly files produced and consumed during the build process
+  (e.g. metadata files, script wrappers, automatically updated config files, etc..)
+  Although precautions were taken to minimize disruptions, some edge cases might
+  be subject to backwards incompatibility.
+
+  Support for ``"locale"`` encoding is now **deprecated**. (#4309)
+- Remove ``setuptools.convert_path`` after long deprecation period.
+  This function was never defined by ``setuptools`` itself, but rather a
+  side-effect of an import for internal usage. (#4322)
+- Remove fallback for customisations of ``distutils``' ``build.sub_command`` after long
+  deprecated period.
+  Users are advised to import ``build`` directly from ``setuptools.command.build``. (#4322)
+- Removed ``typing_extensions`` from vendored dependencies -- by :user:`Avasam` (#4324)
+- Remove deprecated ``setuptools.dep_util``.
+  The provided alternative is ``setuptools.modified``. (#4360)
+
+
+Misc
+----
+
+- #4255, #4280, #4282, #4308, #4348
+
+
 v69.5.1
 =======
 
diff --git a/newsfragments/4136.bugfix.rst b/newsfragments/4136.bugfix.rst
deleted file mode 100644
index f56346f0c7..0000000000
--- a/newsfragments/4136.bugfix.rst
+++ /dev/null
@@ -1 +0,0 @@
-In install command, use super to call the superclass methods. Avoids race conditions when monkeypatching from _distutils_system_mod occurs late.
\ No newline at end of file
diff --git a/newsfragments/4150.feature.rst b/newsfragments/4150.feature.rst
deleted file mode 100644
index 5e536fd755..0000000000
--- a/newsfragments/4150.feature.rst
+++ /dev/null
@@ -1 +0,0 @@
-Emit a warning when ``[tools.setuptools]`` is present in ``pyproject.toml`` and will be ignored. -- by :user:`SnoopJ`
diff --git a/newsfragments/4255.misc.rst b/newsfragments/4255.misc.rst
deleted file mode 100644
index 50a0a3d195..0000000000
--- a/newsfragments/4255.misc.rst
+++ /dev/null
@@ -1 +0,0 @@
-Treat ``EncodingWarning``s as errors in tests. -- by :user:`Avasam`
diff --git a/newsfragments/4262.feature.rst b/newsfragments/4262.feature.rst
deleted file mode 100644
index 7bbdba87d2..0000000000
--- a/newsfragments/4262.feature.rst
+++ /dev/null
@@ -1,3 +0,0 @@
-Improved `AttributeError` error message if ``pkg_resources.EntryPoint.require`` is called without extras or distribution
-Gracefully "do nothing" when trying to activate a ``pkg_resources.Distribution`` with a `None` location, rather than raising a `TypeError`
--- by :user:`Avasam`
diff --git a/newsfragments/4267.feature.rst b/newsfragments/4267.feature.rst
deleted file mode 100644
index 5a69c23914..0000000000
--- a/newsfragments/4267.feature.rst
+++ /dev/null
@@ -1 +0,0 @@
-Typed the dynamically defined variables from `pkg_resources` -- by :user:`Avasam`
diff --git a/newsfragments/4278.bugfix.rst b/newsfragments/4278.bugfix.rst
deleted file mode 100644
index 5e606cced8..0000000000
--- a/newsfragments/4278.bugfix.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Fix finder template for lenient editable installs of implicit nested namespaces
-constructed by using ``package_dir`` to reorganise directory structure.
diff --git a/newsfragments/4280.misc.rst b/newsfragments/4280.misc.rst
deleted file mode 100644
index aff6a7ca1c..0000000000
--- a/newsfragments/4280.misc.rst
+++ /dev/null
@@ -1 +0,0 @@
-Avoid leaking loop variable ``name`` in ``AbstractSandbox`` -- by :user:`Avasam`
diff --git a/newsfragments/4282.misc.rst b/newsfragments/4282.misc.rst
deleted file mode 100644
index 841d1b292c..0000000000
--- a/newsfragments/4282.misc.rst
+++ /dev/null
@@ -1 +0,0 @@
-Removed the ``setuptools[testing-integration]`` in favor of ``setuptools[testing]`` -- by :user:`Avasam`
diff --git a/newsfragments/4308.misc.rst b/newsfragments/4308.misc.rst
deleted file mode 100644
index 6c43f6338e..0000000000
--- a/newsfragments/4308.misc.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Fix ``setuptools_wheel`` fixture and avoid the recursive creation of
-``build/lib/build/lib/build/...`` directories in the project root during tests.
diff --git a/newsfragments/4309.removal.rst b/newsfragments/4309.removal.rst
deleted file mode 100644
index b69b17d45f..0000000000
--- a/newsfragments/4309.removal.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-Further adoption of UTF-8 in ``setuptools``.
-This change regards mostly files produced and consumed during the build process
-(e.g. metadata files, script wrappers, automatically updated config files, etc..)
-Although precautions were taken to minimize disruptions, some edge cases might
-be subject to backwards incompatibility.
-
-Support for ``"locale"`` encoding is now **deprecated**.
diff --git a/newsfragments/4312.doc.rst b/newsfragments/4312.doc.rst
deleted file mode 100644
index 7ada954876..0000000000
--- a/newsfragments/4312.doc.rst
+++ /dev/null
@@ -1 +0,0 @@
-Uses RST substitution to put badges in 1 line.
diff --git a/newsfragments/4322.removal.1.rst b/newsfragments/4322.removal.1.rst
deleted file mode 100644
index 33360172d5..0000000000
--- a/newsfragments/4322.removal.1.rst
+++ /dev/null
@@ -1,3 +0,0 @@
-Remove ``setuptools.convert_path`` after long deprecation period.
-This function was never defined by ``setuptools`` itself, but rather a
-side-effect of an import for internal usage.
diff --git a/newsfragments/4322.removal.2.rst b/newsfragments/4322.removal.2.rst
deleted file mode 100644
index 88380f4c8d..0000000000
--- a/newsfragments/4322.removal.2.rst
+++ /dev/null
@@ -1,3 +0,0 @@
-Remove fallback for customisations of ``distutils``' ``build.sub_command`` after long
-deprecated period.
-Users are advised to import ``build`` directly from ``setuptools.command.build``.
diff --git a/newsfragments/4324.removal.rst b/newsfragments/4324.removal.rst
deleted file mode 100644
index 3782a0b81b..0000000000
--- a/newsfragments/4324.removal.rst
+++ /dev/null
@@ -1 +0,0 @@
-Removed ``typing_extensions`` from vendored dependencies -- by :user:`Avasam`
diff --git a/newsfragments/4332.feature.rst b/newsfragments/4332.feature.rst
deleted file mode 100644
index 9f46298adc..0000000000
--- a/newsfragments/4332.feature.rst
+++ /dev/null
@@ -1 +0,0 @@
-Modernized and refactored VCS handling in package_index.
\ No newline at end of file
diff --git a/newsfragments/4348.bugfix.rst b/newsfragments/4348.bugfix.rst
deleted file mode 100644
index a8bb79a123..0000000000
--- a/newsfragments/4348.bugfix.rst
+++ /dev/null
@@ -1 +0,0 @@
-Fix an error with `UnicodeDecodeError` handling in ``pkg_resources`` when trying to read files in UTF-8 with a fallback -- by :user:`Avasam`
diff --git a/newsfragments/4348.misc.rst b/newsfragments/4348.misc.rst
deleted file mode 100644
index 989226c4b3..0000000000
--- a/newsfragments/4348.misc.rst
+++ /dev/null
@@ -1 +0,0 @@
-Update dynamic module imports in ``pkg_resources`` to private alias static imports. Enabled ``attr-defined`` checks in mypy for ``pkg_resources`` -- by :user:`Avasam`
diff --git a/newsfragments/4360.removal.1.rst b/newsfragments/4360.removal.1.rst
deleted file mode 100644
index f00d6be518..0000000000
--- a/newsfragments/4360.removal.1.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Remove deprecated ``setuptools.dep_util``.
-The provided alternative is ``setuptools.modified``.
diff --git a/setup.cfg b/setup.cfg
index 0756fa92ea..baed6f84ae 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,6 +1,6 @@
 [metadata]
 name = setuptools
-version = 69.5.1
+version = 70.0.0
 author = Python Packaging Authority
 author_email = distutils-sig@python.org
 description = Easily download, build, install, upgrade, and uninstall Python packages

From 5cbf12a9b63fd37985a4525617b46576b8ac3a7b Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 21 May 2024 11:08:23 +0100
Subject: [PATCH 0689/1761] Workaround for release error in v70

---
 tox.ini | 1 +
 1 file changed, 1 insertion(+)

diff --git a/tox.ini b/tox.ini
index e6fc063af1..c6d7068907 100644
--- a/tox.ini
+++ b/tox.ini
@@ -91,6 +91,7 @@ description = publish the package to PyPI and GitHub
 skip_install = True
 deps =
 	build
+	pyproject-hooks!=1.1 # workaround for pypa/setuptools#4333
 	twine>=3
 	jaraco.develop>=7.1
 pass_env =

From da9dc8cb0b4f25e50cce8eac5eaed2f3d7d28965 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alex=20Gr=C3=B6nholm?= 
Date: Tue, 21 May 2024 10:02:03 -0400
Subject: [PATCH 0690/1761] Fixed remaining test failures

---
 conftest.py                          | 1 +
 setuptools/command/editable_wheel.py | 2 +-
 tools/vendored.py                    | 2 +-
 3 files changed, 3 insertions(+), 2 deletions(-)

diff --git a/conftest.py b/conftest.py
index 328d45d351..99d020b733 100644
--- a/conftest.py
+++ b/conftest.py
@@ -39,6 +39,7 @@ def pytest_configure(config):
     'pkg_resources/_vendor',
     'setuptools/config/_validate_pyproject',
     'setuptools/modified.py',
+    'setuptools/tests/bdist_wheel_testdata',
 ]
 
 
diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py
index 7f689cac70..a835a8194b 100644
--- a/setuptools/command/editable_wheel.py
+++ b/setuptools/command/editable_wheel.py
@@ -59,7 +59,7 @@
 from .install_scripts import install_scripts as install_scripts_cls
 
 if TYPE_CHECKING:
-    from .._vendor.wheel.wheelfile import WheelFile  # type:ignore[import-untyped]
+    from .._vendor.wheel.wheelfile import WheelFile
 
 _P = TypeVar("_P", bound=StrPath)
 _logger = logging.getLogger(__name__)
diff --git a/tools/vendored.py b/tools/vendored.py
index 9a2e84e2dd..dcf944d493 100644
--- a/tools/vendored.py
+++ b/tools/vendored.py
@@ -4,7 +4,7 @@
 import subprocess
 from textwrap import dedent
 
-from path import Path
+from pathlib import Path
 
 
 def remove_all(paths):

From 4123506b0242921571cd5f43a3c9a834d92da5ba Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alex=20Gr=C3=B6nholm?= 
Date: Tue, 21 May 2024 11:57:58 -0400
Subject: [PATCH 0691/1761] Fixed test_no_ctypes not testing the right thing(s)

---
 setuptools/tests/test_bdist_wheel.py | 17 ++++++++++-------
 1 file changed, 10 insertions(+), 7 deletions(-)

diff --git a/setuptools/tests/test_bdist_wheel.py b/setuptools/tests/test_bdist_wheel.py
index 8ba6060bb2..5d28368c88 100644
--- a/setuptools/tests/test_bdist_wheel.py
+++ b/setuptools/tests/test_bdist_wheel.py
@@ -10,6 +10,7 @@
 import subprocess
 import sys
 import sysconfig
+from contextlib import suppress
 from functools import partial
 from inspect import cleandoc
 from unittest.mock import Mock
@@ -482,14 +483,16 @@ def _fake_import(name: str, *args, **kwargs):
 
         return importlib.__import__(name, *args, **kwargs)
 
+    with suppress(KeyError):
+        monkeypatch.delitem(sys.modules, "setuptools.extern.wheel.macosx_libfile")
+
     # Install an importer shim that refuses to load ctypes
     monkeypatch.setattr(builtins, "__import__", _fake_import)
+    with pytest.raises(ModuleNotFoundError, match="No module named ctypes"):
+        import setuptools.extern.wheel.macosx_libfile
 
-    # Unload all wheel modules
-    for module in list(sys.modules):
-        if module.startswith("wheel"):
-            monkeypatch.delitem(sys.modules, module)
-
-    from setuptools.command import bdist_wheel
+    # Unload and reimport the bdist_wheel command module to make sure it won't try to
+    # import ctypes
+    monkeypatch.delitem(sys.modules, "setuptools.command.bdist_wheel")
 
-    assert bdist_wheel
+    import setuptools.command.bdist_wheel  # noqa: F401

From e775f9906a323a5a3261ab6188a45ec15f573284 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 21 May 2024 13:38:36 -0400
Subject: [PATCH 0692/1761] Move prerelease tag settings back to setup.cfg

---
 pyproject.toml | 4 ----
 setup.cfg      | 3 +++
 2 files changed, 3 insertions(+), 4 deletions(-)
 create mode 100644 setup.cfg

diff --git a/pyproject.toml b/pyproject.toml
index ccd92bb793..aa7fa372b3 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -169,10 +169,6 @@ exclude = [
 ]
 namespaces = true
 
-[tool.distutils.egg_info]
-tag-build = ".post"
-tag-date = 1
-
 [tool.distutils.sdist]
 formats = "zip"
 
diff --git a/setup.cfg b/setup.cfg
new file mode 100644
index 0000000000..38922089ad
--- /dev/null
+++ b/setup.cfg
@@ -0,0 +1,3 @@
+[egg_info]
+tag_build = .post
+tag_date = 1

From 7faa0821588e7c4dd95f7c459baf1f48c2d0f102 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alex=20Gr=C3=B6nholm?= 
Date: Tue, 21 May 2024 13:57:27 -0400
Subject: [PATCH 0693/1761] Reverted the change from path to pathlib and added
 type ignore comments

---
 tools/vendored.py | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/tools/vendored.py b/tools/vendored.py
index dcf944d493..edc9195f3c 100644
--- a/tools/vendored.py
+++ b/tools/vendored.py
@@ -4,7 +4,7 @@
 import subprocess
 from textwrap import dedent
 
-from pathlib import Path
+from path import Path
 
 
 def remove_all(paths):
@@ -117,7 +117,7 @@ def rewrite_wheel(pkg_files: Path):
 
     # Rewrite vendored imports to use setuptools's own vendored libraries
     for path in pkg_files.iterdir():
-        if path.suffix == '.py':
+        if path.suffix == '.py':  # type: ignore[attr-defined]
             code = path.read_text()
             if path.name == 'wheelfile.py':
                 code = re.sub(
@@ -155,7 +155,7 @@ class WheelError(Exception):
                     flags=re.MULTILINE,
                 )
 
-            path.write_text(code)
+            path.write_text(code)  # type: ignore[attr-defined]
 
 
 def rewrite_platformdirs(pkg_files: Path):

From 5c7e6c96b13e2232a060c2ec1de7163ecd4cf890 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Alex=20Gr=C3=B6nholm?= 
Date: Tue, 21 May 2024 14:17:41 -0400
Subject: [PATCH 0694/1761] Added test data egg-info directories to .gitignore

---
 .gitignore | 1 +
 1 file changed, 1 insertion(+)

diff --git a/.gitignore b/.gitignore
index f25d073e24..9ad7e2dd67 100644
--- a/.gitignore
+++ b/.gitignore
@@ -8,6 +8,7 @@ include
 lib
 distribute.egg-info
 setuptools.egg-info
+setuptools/tests/bdist_wheel_testdata/*/*.egg-info/
 .coverage
 .eggs
 .tox

From e53c82b3c5fc3f90925231e2f982fb9c314adacd Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Wed, 22 May 2024 03:44:03 +0000
Subject: [PATCH 0695/1761] --- updated-dependencies: - dependency-name: mypy  
 dependency-type: direct:production   update-type: version-update:semver-minor
 ...

Signed-off-by: dependabot[bot] 
---
 pyproject.toml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/pyproject.toml b/pyproject.toml
index aa7fa372b3..7f74060916 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -59,7 +59,7 @@ testing = [
 	# for tools/finalize.py
 	'jaraco.develop >= 7.21; python_version >= "3.9" and sys_platform != "cygwin"',
 	"pytest-home >= 0.5",
-	"mypy==1.9", # pin mypy version so a new version doesn't suddenly cause the CI to fail
+	"mypy==1.10.0", # pin mypy version so a new version doesn't suddenly cause the CI to fail
 	# No Python 3.11 dependencies require tomli, but needed for type-checking since we import it directly
 	"tomli",
 	# No Python 3.12 dependencies require importlib_metadata, but needed for type-checking since we import it directly

From 131b6e95c7b5306370d392ed479d117bd3654191 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Wed, 22 May 2024 05:30:48 -0400
Subject: [PATCH 0696/1761] Merge and update variable and params annotations
 from typeshed (#4246)

---
 mypy.ini                         |   2 +-
 newsfragments/4246.feature.rst   |   4 +
 pkg_resources/__init__.py        | 469 ++++++++++++++++++++-----------
 pkg_resources/extern/__init__.py |  26 +-
 4 files changed, 331 insertions(+), 170 deletions(-)
 create mode 100644 newsfragments/4246.feature.rst

diff --git a/mypy.ini b/mypy.ini
index 146a0e1929..231330d270 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -9,7 +9,7 @@ explicit_package_bases = True
 exclude = (?x)(
 	^build/
 	| ^.tox/
-	| ^.egg/
+	| ^.eggs/
 	| ^pkg_resources/tests/data/my-test-package-source/setup.py$ # Duplicate module name
 	| ^.+?/(_vendor|extern)/ # Vendored
 	| ^setuptools/_distutils/ # Vendored
diff --git a/newsfragments/4246.feature.rst b/newsfragments/4246.feature.rst
new file mode 100644
index 0000000000..d5dd2ead98
--- /dev/null
+++ b/newsfragments/4246.feature.rst
@@ -0,0 +1,4 @@
+Improve error message when ``pkg_resources.ZipProvider`` tries to extract resources with a missing Egg -- by :user:`Avasam`
+
+Added variables and parameter type annotations to ``pkg_resources`` to be nearly on par with typeshed.\* -- by :user:`Avasam`
+\* Excluding ``TypeVar`` and ``overload``. Return types are currently inferred. 
diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index faee7dec79..c86d9f095c 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -1,3 +1,6 @@
+# TODO: Add Generic type annotations to initialized collections.
+# For now we'd simply use implicit Any/Unknown which would add redundant annotations
+# mypy: disable-error-code="var-annotated"
 """
 Package resource API
 --------------------
@@ -28,6 +31,16 @@
 import re
 import types
 from typing import (
+    Any,
+    Mapping,
+    MutableSequence,
+    NamedTuple,
+    NoReturn,
+    Sequence,
+    Set,
+    Tuple,
+    Type,
+    Union,
     TYPE_CHECKING,
     List,
     Protocol,
@@ -55,6 +68,7 @@
 import ntpath
 import posixpath
 import importlib
+import importlib.abc
 import importlib.machinery
 from pkgutil import get_importer
 
@@ -62,6 +76,8 @@
 
 # capture these to bypass sandboxing
 from os import utime
+from os import open as os_open
+from os.path import isdir, split
 
 try:
     from os import mkdir, rename, unlink
@@ -71,9 +87,6 @@
     # no write support, probably under GAE
     WRITE_SUPPORT = False
 
-from os import open as os_open
-from os.path import isdir, split
-
 from pkg_resources.extern.jaraco.text import (
     yield_lines,
     drop_comment,
@@ -85,6 +98,8 @@
 from pkg_resources.extern.packaging import version as _packaging_version
 from pkg_resources.extern.platformdirs import user_cache_dir as _user_cache_dir
 
+if TYPE_CHECKING:
+    from _typeshed import StrPath
 
 warnings.warn(
     "pkg_resources is deprecated as an API. "
@@ -93,7 +108,22 @@
     stacklevel=2,
 )
 
+
 T = TypeVar("T")
+# Type aliases
+_NestedStr = Union[str, Iterable[Union[str, Iterable["_NestedStr"]]]]
+_InstallerType = Callable[["Requirement"], Optional["Distribution"]]
+_PkgReqType = Union[str, "Requirement"]
+_EPDistType = Union["Distribution", _PkgReqType]
+_MetadataType = Optional["IResourceProvider"]
+# Any object works, but let's indicate we expect something like a module (optionally has __loader__ or __file__)
+_ModuleLike = Union[object, types.ModuleType]
+_AdapterType = Callable[..., Any]  # Incomplete
+
+
+# Use _typeshed.importlib.LoaderProtocol once available https://github.com/python/typeshed/pull/11890
+class _LoaderProtocol(Protocol):
+    def load_module(self, fullname: str, /) -> types.ModuleType: ...
 
 
 _PEP440_FALLBACK = re.compile(r"^v?(?P(?:[0-9]+!)?[0-9]+(?:\.[0-9]+)*)", re.I)
@@ -290,7 +320,7 @@ def req(self):
     def report(self):
         return self._template.format(**locals())
 
-    def with_context(self, required_by):
+    def with_context(self, required_by: Set[Union["Distribution", str]]):
         """
         If required_by is non-empty, return a version of self that is a
         ContextualVersionConflict.
@@ -347,7 +377,7 @@ class UnknownExtra(ResolutionError):
     """Distribution doesn't have an "extra feature" of the given name"""
 
 
-_provider_factories = {}
+_provider_factories: Dict[Type[_ModuleLike], _AdapterType] = {}
 
 PY_MAJOR = '{}.{}'.format(*sys.version_info)
 EGG_DIST = 3
@@ -357,7 +387,9 @@ class UnknownExtra(ResolutionError):
 DEVELOP_DIST = -1
 
 
-def register_loader_type(loader_type, provider_factory):
+def register_loader_type(
+    loader_type: Type[_ModuleLike], provider_factory: _AdapterType
+):
     """Register `provider_factory` to make providers for `loader_type`
 
     `loader_type` is the type or class of a PEP 302 ``module.__loader__``,
@@ -367,7 +399,7 @@ def register_loader_type(loader_type, provider_factory):
     _provider_factories[loader_type] = provider_factory
 
 
-def get_provider(moduleOrReq):
+def get_provider(moduleOrReq: Union[str, "Requirement"]):
     """Return an IResourceProvider for the named module or requirement"""
     if isinstance(moduleOrReq, Requirement):
         return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
@@ -429,7 +461,7 @@ def get_build_platform():
 get_platform = get_build_platform
 
 
-def compatible_platforms(provided, required):
+def compatible_platforms(provided: Optional[str], required: Optional[str]):
     """Can code for the `provided` platform run on the `required` platform?
 
     Returns true if either platform is ``None``, or the platforms are equal.
@@ -478,7 +510,7 @@ def compatible_platforms(provided, required):
     return False
 
 
-def get_distribution(dist):
+def get_distribution(dist: _EPDistType):
     """Return a current distribution object for a Requirement or string"""
     if isinstance(dist, str):
         dist = Requirement.parse(dist)
@@ -489,78 +521,80 @@ def get_distribution(dist):
     return dist
 
 
-def load_entry_point(dist, group, name):
+def load_entry_point(dist: _EPDistType, group: str, name: str):
     """Return `name` entry point of `group` for `dist` or raise ImportError"""
     return get_distribution(dist).load_entry_point(group, name)
 
 
-def get_entry_map(dist, group=None):
+def get_entry_map(dist: _EPDistType, group: Optional[str] = None):
     """Return the entry point map for `group`, or the full entry map"""
     return get_distribution(dist).get_entry_map(group)
 
 
-def get_entry_info(dist, group, name):
+def get_entry_info(dist: _EPDistType, group: str, name: str):
     """Return the EntryPoint object for `group`+`name`, or ``None``"""
     return get_distribution(dist).get_entry_info(group, name)
 
 
 class IMetadataProvider(Protocol):
-    def has_metadata(self, name) -> bool:
+    def has_metadata(self, name: str) -> bool:
         """Does the package's distribution contain the named metadata?"""
 
-    def get_metadata(self, name):
+    def get_metadata(self, name: str):
         """The named metadata resource as a string"""
 
-    def get_metadata_lines(self, name):
+    def get_metadata_lines(self, name: str):
         """Yield named metadata resource as list of non-blank non-comment lines
 
         Leading and trailing whitespace is stripped from each line, and lines
         with ``#`` as the first non-blank character are omitted."""
 
-    def metadata_isdir(self, name) -> bool:
+    def metadata_isdir(self, name: str) -> bool:
         """Is the named metadata a directory?  (like ``os.path.isdir()``)"""
 
-    def metadata_listdir(self, name):
+    def metadata_listdir(self, name: str):
         """List of metadata names in the directory (like ``os.listdir()``)"""
 
-    def run_script(self, script_name, namespace):
+    def run_script(self, script_name: str, namespace: Dict[str, Any]):
         """Execute the named script in the supplied namespace dictionary"""
 
 
 class IResourceProvider(IMetadataProvider, Protocol):
     """An object that provides access to package resources"""
 
-    def get_resource_filename(self, manager, resource_name):
+    def get_resource_filename(self, manager: "ResourceManager", resource_name: str):
         """Return a true filesystem path for `resource_name`
 
-        `manager` must be an ``IResourceManager``"""
+        `manager` must be a ``ResourceManager``"""
 
-    def get_resource_stream(self, manager, resource_name):
+    def get_resource_stream(self, manager: "ResourceManager", resource_name: str):
         """Return a readable file-like object for `resource_name`
 
-        `manager` must be an ``IResourceManager``"""
+        `manager` must be a ``ResourceManager``"""
 
-    def get_resource_string(self, manager, resource_name) -> bytes:
+    def get_resource_string(
+        self, manager: "ResourceManager", resource_name: str
+    ) -> bytes:
         """Return the contents of `resource_name` as :obj:`bytes`
 
-        `manager` must be an ``IResourceManager``"""
+        `manager` must be a ``ResourceManager``"""
 
-    def has_resource(self, resource_name):
+    def has_resource(self, resource_name: str):
         """Does the package contain the named resource?"""
 
-    def resource_isdir(self, resource_name):
+    def resource_isdir(self, resource_name: str):
         """Is the named resource a directory?  (like ``os.path.isdir()``)"""
 
-    def resource_listdir(self, resource_name):
+    def resource_listdir(self, resource_name: str):
         """List of resource names in the directory (like ``os.listdir()``)"""
 
 
 class WorkingSet:
     """A collection of active distributions on sys.path (or a similar list)"""
 
-    def __init__(self, entries=None):
+    def __init__(self, entries: Optional[Iterable[str]] = None):
         """Create working set from list of path entries (default=sys.path)"""
-        self.entries = []
+        self.entries: List[str] = []
         self.entry_keys = {}
         self.by_key = {}
         self.normalized_to_canonical_keys = {}
@@ -614,7 +648,7 @@ def _build_from_requirements(cls, req_spec):
         sys.path[:] = ws.entries
         return ws
 
-    def add_entry(self, entry):
+    def add_entry(self, entry: str):
         """Add a path item to ``.entries``, finding any distributions on it
 
         ``find_distributions(entry, True)`` is used to find distributions
@@ -629,11 +663,11 @@ def add_entry(self, entry):
         for dist in find_distributions(entry, True):
             self.add(dist, entry, False)
 
-    def __contains__(self, dist):
+    def __contains__(self, dist: "Distribution"):
         """True if `dist` is the active distribution for its project"""
         return self.by_key.get(dist.key) == dist
 
-    def find(self, req):
+    def find(self, req: "Requirement"):
         """Find a distribution matching requirement `req`
 
         If there is an active distribution for the requested project, this
@@ -657,7 +691,7 @@ def find(self, req):
             raise VersionConflict(dist, req)
         return dist
 
-    def iter_entry_points(self, group, name=None):
+    def iter_entry_points(self, group: str, name: Optional[str] = None):
         """Yield entry point objects from `group` matching `name`
 
         If `name` is None, yields all entry points in `group` from all
@@ -671,7 +705,7 @@ def iter_entry_points(self, group, name=None):
             if name is None or name == entry.name
         )
 
-    def run_script(self, requires, script_name):
+    def run_script(self, requires: str, script_name: str):
         """Locate distribution for `requires` and run `script_name` script"""
         ns = sys._getframe(1).f_globals
         name = ns['__name__']
@@ -696,7 +730,13 @@ def __iter__(self):
                     seen[key] = 1
                     yield self.by_key[key]
 
-    def add(self, dist, entry=None, insert=True, replace=False):
+    def add(
+        self,
+        dist: "Distribution",
+        entry: Optional[str] = None,
+        insert: bool = True,
+        replace: bool = False,
+    ):
         """Add `dist` to working set, associated with `entry`
 
         If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
@@ -730,11 +770,11 @@ def add(self, dist, entry=None, insert=True, replace=False):
 
     def resolve(
         self,
-        requirements,
-        env=None,
-        installer=None,
-        replace_conflicting=False,
-        extras=None,
+        requirements: Iterable["Requirement"],
+        env: Optional["Environment"] = None,
+        installer: Optional[_InstallerType] = None,
+        replace_conflicting: bool = False,
+        extras: Optional[Tuple[str, ...]] = None,
     ):
         """List all distributions needed to (recursively) meet `requirements`
 
@@ -804,7 +844,7 @@ def resolve(
 
     def _resolve_dist(
         self, req, best, replace_conflicting, env, installer, required_by, to_activate
-    ):
+    ) -> "Distribution":
         dist = best.get(req.key)
         if dist is None:
             # Find the best distribution and add it to the map
@@ -833,7 +873,13 @@ def _resolve_dist(
             raise VersionConflict(dist, req).with_context(dependent_req)
         return dist
 
-    def find_plugins(self, plugin_env, full_env=None, installer=None, fallback=True):
+    def find_plugins(
+        self,
+        plugin_env: "Environment",
+        full_env: Optional["Environment"] = None,
+        installer: Optional[_InstallerType] = None,
+        fallback: bool = True,
+    ):
         """Find all activatable distributions in `plugin_env`
 
         Example usage::
@@ -914,7 +960,7 @@ def find_plugins(self, plugin_env, full_env=None, installer=None, fallback=True)
 
         return sorted_distributions, error_info
 
-    def require(self, *requirements):
+    def require(self, *requirements: _NestedStr):
         """Ensure that distributions matching `requirements` are activated
 
         `requirements` must be a string or a (possibly-nested) sequence
@@ -930,7 +976,9 @@ def require(self, *requirements):
 
         return needed
 
-    def subscribe(self, callback, existing=True):
+    def subscribe(
+        self, callback: Callable[["Distribution"], object], existing: bool = True
+    ):
         """Invoke `callback` for all distributions
 
         If `existing=True` (default),
@@ -966,12 +1014,14 @@ def __setstate__(self, e_k_b_n_c):
         self.callbacks = callbacks[:]
 
 
-class _ReqExtras(dict):
+class _ReqExtras(Dict["Requirement", Tuple[str, ...]]):
     """
     Map each requirement to the extras that demanded it.
     """
 
-    def markers_pass(self, req, extras=None):
+    def markers_pass(
+        self, req: "Requirement", extras: Optional[Tuple[str, ...]] = None
+    ):
         """
         Evaluate markers for req against each extra that
         demanded it.
@@ -990,7 +1040,10 @@ class Environment:
     """Searchable snapshot of distributions on a search path"""
 
     def __init__(
-        self, search_path=None, platform=get_supported_platform(), python=PY_MAJOR
+        self,
+        search_path: Optional[Sequence[str]] = None,
+        platform: Optional[str] = get_supported_platform(),
+        python: Optional[str] = PY_MAJOR,
     ):
         """Snapshot distributions available on a search path
 
@@ -1013,7 +1066,7 @@ def __init__(
         self.python = python
         self.scan(search_path)
 
-    def can_add(self, dist):
+    def can_add(self, dist: "Distribution"):
         """Is distribution `dist` acceptable for this environment?
 
         The distribution must match the platform and python version
@@ -1027,11 +1080,11 @@ def can_add(self, dist):
         )
         return py_compat and compatible_platforms(dist.platform, self.platform)
 
-    def remove(self, dist):
+    def remove(self, dist: "Distribution"):
         """Remove `dist` from the environment"""
         self._distmap[dist.key].remove(dist)
 
-    def scan(self, search_path=None):
+    def scan(self, search_path: Optional[Sequence[str]] = None):
         """Scan `search_path` for distributions usable in this environment
 
         Any distributions found are added to the environment.
@@ -1046,7 +1099,7 @@ def scan(self, search_path=None):
             for dist in find_distributions(item):
                 self.add(dist)
 
-    def __getitem__(self, project_name):
+    def __getitem__(self, project_name: str):
         """Return a newest-to-oldest list of distributions for `project_name`
 
         Uses case-insensitive `project_name` comparison, assuming all the
@@ -1057,7 +1110,7 @@ def __getitem__(self, project_name):
         distribution_key = project_name.lower()
         return self._distmap.get(distribution_key, [])
 
-    def add(self, dist):
+    def add(self, dist: "Distribution"):
         """Add `dist` if we ``can_add()`` it and it has not already been added"""
         if self.can_add(dist) and dist.has_version():
             dists = self._distmap.setdefault(dist.key, [])
@@ -1065,7 +1118,13 @@ def add(self, dist):
                 dists.append(dist)
                 dists.sort(key=operator.attrgetter('hashcmp'), reverse=True)
 
-    def best_match(self, req, working_set, installer=None, replace_conflicting=False):
+    def best_match(
+        self,
+        req: "Requirement",
+        working_set: WorkingSet,
+        installer: Optional[Callable[["Requirement"], Any]] = None,
+        replace_conflicting: bool = False,
+    ):
         """Find distribution best matching `req` and usable on `working_set`
 
         This calls the ``find(req)`` method of the `working_set` to see if a
@@ -1092,7 +1151,11 @@ def best_match(self, req, working_set, installer=None, replace_conflicting=False
         # try to download/install
         return self.obtain(req, installer)
 
-    def obtain(self, requirement, installer=None):
+    def obtain(
+        self,
+        requirement: "Requirement",
+        installer: Optional[Callable[["Requirement"], Any]] = None,
+    ):
         """Obtain a distribution matching `requirement` (e.g. via download)
 
         Obtain a distro that matches requirement (e.g. via download).  In the
@@ -1109,7 +1172,7 @@ def __iter__(self):
             if self[key]:
                 yield key
 
-    def __iadd__(self, other):
+    def __iadd__(self, other: Union["Distribution", "Environment"]):
         """In-place addition of a distribution or environment"""
         if isinstance(other, Distribution):
             self.add(other)
@@ -1121,7 +1184,7 @@ def __iadd__(self, other):
             raise TypeError("Can't add %r to environment" % (other,))
         return self
 
-    def __add__(self, other):
+    def __add__(self, other: Union["Distribution", "Environment"]):
         """Add an environment or distribution to an environment"""
         new = self.__class__([], platform=None, python=None)
         for env in self, other:
@@ -1148,46 +1211,54 @@ class ExtractionError(RuntimeError):
         The exception instance that caused extraction to fail
     """
 
+    manager: "ResourceManager"
+    cache_path: str
+    original_error: Optional[BaseException]
+
 
 class ResourceManager:
     """Manage resource extraction and packages"""
 
-    extraction_path = None
+    extraction_path: Optional[str] = None
 
     def __init__(self):
         self.cached_files = {}
 
-    def resource_exists(self, package_or_requirement, resource_name):
+    def resource_exists(self, package_or_requirement: _PkgReqType, resource_name: str):
         """Does the named resource exist?"""
         return get_provider(package_or_requirement).has_resource(resource_name)
 
-    def resource_isdir(self, package_or_requirement, resource_name):
+    def resource_isdir(self, package_or_requirement: _PkgReqType, resource_name: str):
         """Is the named resource an existing directory?"""
         return get_provider(package_or_requirement).resource_isdir(resource_name)
 
-    def resource_filename(self, package_or_requirement, resource_name):
+    def resource_filename(
+        self, package_or_requirement: _PkgReqType, resource_name: str
+    ):
         """Return a true filesystem path for specified resource"""
         return get_provider(package_or_requirement).get_resource_filename(
             self, resource_name
         )
 
-    def resource_stream(self, package_or_requirement, resource_name):
+    def resource_stream(self, package_or_requirement: _PkgReqType, resource_name: str):
         """Return a readable file-like object for specified resource"""
         return get_provider(package_or_requirement).get_resource_stream(
             self, resource_name
         )
 
-    def resource_string(self, package_or_requirement, resource_name) -> bytes:
+    def resource_string(
+        self, package_or_requirement: _PkgReqType, resource_name: str
+    ) -> bytes:
         """Return specified resource as :obj:`bytes`"""
         return get_provider(package_or_requirement).get_resource_string(
             self, resource_name
         )
 
-    def resource_listdir(self, package_or_requirement, resource_name):
+    def resource_listdir(self, package_or_requirement: _PkgReqType, resource_name: str):
         """List the contents of the named resource directory"""
         return get_provider(package_or_requirement).resource_listdir(resource_name)
 
-    def extraction_error(self):
+    def extraction_error(self) -> NoReturn:
         """Give an error message for problems extracting file(s)"""
 
         old_exc = sys.exc_info()[1]
@@ -1217,7 +1288,7 @@ def extraction_error(self):
         err.original_error = old_exc
         raise err
 
-    def get_cache_path(self, archive_name, names=()):
+    def get_cache_path(self, archive_name: str, names: Iterable[str] = ()):
         """Return absolute location in cache for `archive_name` and `names`
 
         The parent directory of the resulting path will be created if it does
@@ -1269,7 +1340,7 @@ def _warn_unsafe_extraction_path(path):
             ).format(**locals())
             warnings.warn(msg, UserWarning)
 
-    def postprocess(self, tempname, filename):
+    def postprocess(self, tempname: str, filename: str):
         """Perform any platform-specific postprocessing of `tempname`
 
         This is where Mac header rewrites should be done; other platforms don't
@@ -1289,7 +1360,7 @@ def postprocess(self, tempname, filename):
             mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777
             os.chmod(tempname, mode)
 
-    def set_extraction_path(self, path):
+    def set_extraction_path(self, path: str):
         """Set the base path where resources will be extracted to, if needed.
 
         If you do not call this routine before any extractions take place, the
@@ -1313,7 +1384,7 @@ def set_extraction_path(self, path):
 
         self.extraction_path = path
 
-    def cleanup_resources(self, force=False) -> List[str]:
+    def cleanup_resources(self, force: bool = False) -> List[str]:
         """
         Delete all extracted resource files and directories, returning a list
         of the file and directory names that could not be successfully removed.
@@ -1337,7 +1408,7 @@ def get_default_cache():
     return os.environ.get('PYTHON_EGG_CACHE') or _user_cache_dir(appname='Python-Eggs')
 
 
-def safe_name(name):
+def safe_name(name: str):
     """Convert an arbitrary string to a standard distribution name
 
     Any runs of non-alphanumeric/. characters are replaced with a single '-'.
@@ -1345,7 +1416,7 @@ def safe_name(name):
     return re.sub('[^A-Za-z0-9.]+', '-', name)
 
 
-def safe_version(version):
+def safe_version(version: str):
     """
     Convert an arbitrary string to a standard version string
     """
@@ -1389,7 +1460,7 @@ def _safe_segment(segment):
     return re.sub(r'\.[^A-Za-z0-9]+', '.', segment).strip(".-")
 
 
-def safe_extra(extra):
+def safe_extra(extra: str):
     """Convert an arbitrary string to a standard 'extra' name
 
     Any runs of non-alphanumeric characters are replaced with a single '_',
@@ -1398,7 +1469,7 @@ def safe_extra(extra):
     return re.sub('[^A-Za-z0-9.-]+', '_', extra).lower()
 
 
-def to_filename(name):
+def to_filename(name: str):
     """Convert a project or version name to its filename-escaped form
 
     Any '-' characters are currently replaced with '_'.
@@ -1406,7 +1477,7 @@ def to_filename(name):
     return name.replace('-', '_')
 
 
-def invalid_marker(text):
+def invalid_marker(text: str):
     """
     Validate text as a PEP 508 environment marker; return an exception
     if invalid or False otherwise.
@@ -1420,7 +1491,7 @@ def invalid_marker(text):
     return False
 
 
-def evaluate_marker(text, extra=None):
+def evaluate_marker(text: str, extra: Optional[str] = None):
     """
     Evaluate a PEP 508 environment marker.
     Return a boolean indicating the marker result in this environment.
@@ -1438,37 +1509,40 @@ def evaluate_marker(text, extra=None):
 class NullProvider:
     """Try to implement resources and metadata for arbitrary PEP 302 loaders"""
 
-    egg_name = None
-    egg_info = None
-    loader = None
+    egg_name: Optional[str] = None
+    egg_info: Optional[str] = None
+    loader: Optional[_LoaderProtocol] = None
+    module_path: Optional[str]  # Some subclasses can have a None module_path
 
-    def __init__(self, module):
+    def __init__(self, module: _ModuleLike):
         self.loader = getattr(module, '__loader__', None)
         self.module_path = os.path.dirname(getattr(module, '__file__', ''))
 
-    def get_resource_filename(self, manager, resource_name):
+    def get_resource_filename(self, manager: ResourceManager, resource_name: str):
         return self._fn(self.module_path, resource_name)
 
-    def get_resource_stream(self, manager, resource_name):
+    def get_resource_stream(self, manager: ResourceManager, resource_name: str):
         return io.BytesIO(self.get_resource_string(manager, resource_name))
 
-    def get_resource_string(self, manager, resource_name) -> bytes:
+    def get_resource_string(
+        self, manager: ResourceManager, resource_name: str
+    ) -> bytes:
         return self._get(self._fn(self.module_path, resource_name))
 
-    def has_resource(self, resource_name):
+    def has_resource(self, resource_name: str):
         return self._has(self._fn(self.module_path, resource_name))
 
     def _get_metadata_path(self, name):
         return self._fn(self.egg_info, name)
 
-    def has_metadata(self, name) -> bool:
+    def has_metadata(self, name: str) -> bool:
         if not self.egg_info:
             return False
 
         path = self._get_metadata_path(name)
         return self._has(path)
 
-    def get_metadata(self, name):
+    def get_metadata(self, name: str):
         if not self.egg_info:
             return ""
         path = self._get_metadata_path(name)
@@ -1481,24 +1555,24 @@ def get_metadata(self, name):
             exc.reason += ' in {} file at path: {}'.format(name, path)
             raise
 
-    def get_metadata_lines(self, name):
+    def get_metadata_lines(self, name: str):
         return yield_lines(self.get_metadata(name))
 
-    def resource_isdir(self, resource_name):
+    def resource_isdir(self, resource_name: str):
         return self._isdir(self._fn(self.module_path, resource_name))
 
-    def metadata_isdir(self, name) -> bool:
+    def metadata_isdir(self, name: str) -> bool:
         return bool(self.egg_info and self._isdir(self._fn(self.egg_info, name)))
 
-    def resource_listdir(self, resource_name):
+    def resource_listdir(self, resource_name: str):
         return self._listdir(self._fn(self.module_path, resource_name))
 
-    def metadata_listdir(self, name):
+    def metadata_listdir(self, name: str):
         if self.egg_info:
             return self._listdir(self._fn(self.egg_info, name))
         return []
 
-    def run_script(self, script_name, namespace):
+    def run_script(self, script_name: str, namespace: Dict[str, Any]):
         script = 'scripts/' + script_name
         if not self.has_metadata(script):
             raise ResolutionError(
@@ -1541,7 +1615,7 @@ def _listdir(self, path):
             "Can't perform this operation for unregistered loader type"
         )
 
-    def _fn(self, base, resource_name):
+    def _fn(self, base, resource_name: str):
         self._validate_resource_path(resource_name)
         if resource_name:
             return os.path.join(base, *resource_name.split('/'))
@@ -1624,7 +1698,8 @@ def _validate_resource_path(path):
 
     def _get(self, path) -> bytes:
         if hasattr(self.loader, 'get_data') and self.loader:
-            return self.loader.get_data(path)
+            # Already checked get_data exists
+            return self.loader.get_data(path)  # type: ignore[attr-defined]
         raise NotImplementedError(
             "Can't perform this operation for loaders without 'get_data()'"
         )
@@ -1647,7 +1722,7 @@ def _parents(path):
 class EggProvider(NullProvider):
     """Provider based on a virtual filesystem"""
 
-    def __init__(self, module):
+    def __init__(self, module: _ModuleLike):
         super().__init__(module)
         self._setup_prefix()
 
@@ -1658,7 +1733,7 @@ def _setup_prefix(self):
         egg = next(eggs, None)
         egg and self._set_egg(egg)
 
-    def _set_egg(self, path):
+    def _set_egg(self, path: str):
         self.egg_name = os.path.basename(path)
         self.egg_info = os.path.join(path, 'EGG-INFO')
         self.egg_root = path
@@ -1676,7 +1751,7 @@ def _isdir(self, path) -> bool:
     def _listdir(self, path):
         return os.listdir(path)
 
-    def get_resource_stream(self, manager, resource_name):
+    def get_resource_stream(self, manager: object, resource_name: str):
         return open(self._fn(self.module_path, resource_name), 'rb')
 
     def _get(self, path) -> bytes:
@@ -1717,13 +1792,14 @@ def __init__(self):
 empty_provider = EmptyProvider()
 
 
-class ZipManifests(dict):
+class ZipManifests(Dict[str, "MemoizedZipManifests.manifest_mod"]):
     """
     zip manifest builder
     """
 
+    # `path` could be `Union["StrPath", IO[bytes]]` but that violates the LSP for `MemoizedZipManifests.load`
     @classmethod
-    def build(cls, path):
+    def build(cls, path: str):
         """
         Build a dictionary similar to the zipimport directory
         caches, except instead of tuples, store ZipInfo objects.
@@ -1749,9 +1825,11 @@ class MemoizedZipManifests(ZipManifests):
     Memoized zipfile manifests.
     """
 
-    manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime')
+    class manifest_mod(NamedTuple):
+        manifest: Dict[str, zipfile.ZipInfo]
+        mtime: float
 
-    def load(self, path):
+    def load(self, path: str):  # type: ignore[override] # ZipManifests.load is a classmethod
         """
         Load a manifest at path or return a suitable manifest already loaded.
         """
@@ -1768,10 +1846,12 @@ def load(self, path):
 class ZipProvider(EggProvider):
     """Resource support for zips and eggs"""
 
-    eagers = None
+    eagers: Optional[List[str]] = None
     _zip_manifests = MemoizedZipManifests()
+    # ZipProvider's loader should always be a zipimporter or equivalent
+    loader: zipimport.zipimporter
 
-    def __init__(self, module):
+    def __init__(self, module: _ModuleLike):
         super().__init__(module)
         self.zip_pre = self.loader.archive + os.sep
 
@@ -1797,7 +1877,7 @@ def _parts(self, zip_path):
     def zipinfo(self):
         return self._zip_manifests.load(self.loader.archive)
 
-    def get_resource_filename(self, manager, resource_name):
+    def get_resource_filename(self, manager: ResourceManager, resource_name: str):
         if not self.egg_name:
             raise NotImplementedError(
                 "resource_filename() only supported for .egg, not .zip"
@@ -1820,7 +1900,7 @@ def _get_date_and_size(zip_stat):
         return timestamp, size
 
     # FIXME: 'ZipProvider._extract_resource' is too complex (12)
-    def _extract_resource(self, manager, zip_path):  # noqa: C901
+    def _extract_resource(self, manager: ResourceManager, zip_path):  # noqa: C901
         if zip_path in self._index():
             for name in self._index()[zip_path]:
                 last = self._extract_resource(manager, os.path.join(zip_path, name))
@@ -1834,6 +1914,10 @@ def _extract_resource(self, manager, zip_path):  # noqa: C901
                 '"os.rename" and "os.unlink" are not supported ' 'on this platform'
             )
         try:
+            if not self.egg_name:
+                raise OSError(
+                    '"egg_name" is empty. This likely means no egg could be found from the "module_path".'
+                )
             real_path = manager.get_cache_path(self.egg_name, self._parts(zip_path))
 
             if self._is_current(real_path, zip_path):
@@ -1922,10 +2006,10 @@ def _isdir(self, fspath) -> bool:
     def _listdir(self, fspath):
         return list(self._index().get(self._zipinfo_name(fspath), ()))
 
-    def _eager_to_zip(self, resource_name):
+    def _eager_to_zip(self, resource_name: str):
         return self._zipinfo_name(self._fn(self.egg_root, resource_name))
 
-    def _resource_to_zip(self, resource_name):
+    def _resource_to_zip(self, resource_name: str):
         return self._zipinfo_name(self._fn(self.module_path, resource_name))
 
 
@@ -1944,13 +2028,13 @@ class FileMetadata(EmptyProvider):
     the provided location.
     """
 
-    def __init__(self, path):
+    def __init__(self, path: "StrPath"):
         self.path = path
 
     def _get_metadata_path(self, name):
         return self.path
 
-    def has_metadata(self, name) -> bool:
+    def has_metadata(self, name: str) -> bool:
         return name == 'PKG-INFO' and os.path.isfile(self.path)
 
     def get_metadata(self, name):
@@ -1993,7 +2077,7 @@ class PathMetadata(DefaultProvider):
         dist = Distribution.from_filename(egg_path, metadata=metadata)
     """
 
-    def __init__(self, path, egg_info):
+    def __init__(self, path: str, egg_info: str):
         self.module_path = path
         self.egg_info = egg_info
 
@@ -2001,7 +2085,7 @@ def __init__(self, path, egg_info):
 class EggMetadata(ZipProvider):
     """Metadata provider for .egg files"""
 
-    def __init__(self, importer):
+    def __init__(self, importer: zipimport.zipimporter):
         """Create a metadata provider from a zipimporter"""
 
         self.zip_pre = importer.archive + os.sep
@@ -2018,7 +2102,7 @@ def __init__(self, importer):
 ] = _declare_state('dict', '_distribution_finders', {})
 
 
-def register_finder(importer_type, distribution_finder):
+def register_finder(importer_type: type, distribution_finder: _AdapterType):
     """Register `distribution_finder` to find distributions in sys.path items
 
     `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
@@ -2028,14 +2112,16 @@ def register_finder(importer_type, distribution_finder):
     _distribution_finders[importer_type] = distribution_finder
 
 
-def find_distributions(path_item, only=False):
+def find_distributions(path_item: str, only: bool = False):
     """Yield distributions accessible via `path_item`"""
     importer = get_importer(path_item)
     finder = _find_adapter(_distribution_finders, importer)
     return finder(importer, path_item, only)
 
 
-def find_eggs_in_zip(importer, path_item, only=False):
+def find_eggs_in_zip(
+    importer: zipimport.zipimporter, path_item: str, only: bool = False
+):
     """
     Find eggs in zip files; possibly multiple nested eggs.
     """
@@ -2064,14 +2150,16 @@ def find_eggs_in_zip(importer, path_item, only=False):
 register_finder(zipimport.zipimporter, find_eggs_in_zip)
 
 
-def find_nothing(importer, path_item, only=False):
+def find_nothing(
+    importer: Optional[object], path_item: Optional[str], only: Optional[bool] = False
+):
     return ()
 
 
 register_finder(object, find_nothing)
 
 
-def find_on_path(importer, path_item, only=False):
+def find_on_path(importer: Optional[object], path_item, only=False):
     """Yield distributions accessible on a sys.path directory"""
     path_item = _normalize_cached(path_item)
 
@@ -2196,7 +2284,7 @@ def resolve_egg_link(path):
 )
 
 
-def register_namespace_handler(importer_type, namespace_handler):
+def register_namespace_handler(importer_type: type, namespace_handler: _AdapterType):
     """Register `namespace_handler` to declare namespace packages
 
     `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
@@ -2251,7 +2339,7 @@ def _handle_ns(packageName, path_item):
     return subpath
 
 
-def _rebuild_mod_path(orig_path, package_name, module):
+def _rebuild_mod_path(orig_path, package_name, module: types.ModuleType):
     """
     Rebuild module.__path__ ensuring that all entries are ordered
     corresponding to their sys.path order
@@ -2285,7 +2373,7 @@ def position_in_sys_path(path):
         module.__path__ = new_path
 
 
-def declare_namespace(packageName):
+def declare_namespace(packageName: str):
     """Declare that package 'packageName' is a namespace package"""
 
     msg = (
@@ -2302,7 +2390,7 @@ def declare_namespace(packageName):
         if packageName in _namespace_packages:
             return
 
-        path = sys.path
+        path: MutableSequence[str] = sys.path
         parent, _, _ = packageName.rpartition('.')
 
         if parent:
@@ -2328,7 +2416,7 @@ def declare_namespace(packageName):
         _imp.release_lock()
 
 
-def fixup_namespace_packages(path_item, parent=None):
+def fixup_namespace_packages(path_item: str, parent: Optional[str] = None):
     """Ensure that previously-declared namespace packages include path_item"""
     _imp.acquire_lock()
     try:
@@ -2340,7 +2428,12 @@ def fixup_namespace_packages(path_item, parent=None):
         _imp.release_lock()
 
 
-def file_ns_handler(importer, path_item, packageName, module):
+def file_ns_handler(
+    importer: Optional[importlib.abc.PathEntryFinder],
+    path_item,
+    packageName,
+    module: types.ModuleType,
+):
     """Compute an ns-package subpath for a filesystem or zipfile importer"""
 
     subpath = os.path.join(path_item, packageName.split('.')[-1])
@@ -2360,19 +2453,24 @@ def file_ns_handler(importer, path_item, packageName, module):
 register_namespace_handler(importlib.machinery.FileFinder, file_ns_handler)
 
 
-def null_ns_handler(importer, path_item, packageName, module):
+def null_ns_handler(
+    importer: Optional[importlib.abc.PathEntryFinder],
+    path_item: Optional[str],
+    packageName: Optional[str],
+    module: Optional[_ModuleLike],
+):
     return None
 
 
 register_namespace_handler(object, null_ns_handler)
 
 
-def normalize_path(filename):
+def normalize_path(filename: "StrPath"):
     """Normalize a file/dir name for comparison purposes"""
     return os.path.normcase(os.path.realpath(os.path.normpath(_cygwin_patch(filename))))
 
 
-def _cygwin_patch(filename):  # pragma: nocover
+def _cygwin_patch(filename: "StrPath"):  # pragma: nocover
     """
     Contrary to POSIX 2008, on Cygwin, getcwd (3) contains
     symlink components. Using
@@ -2438,7 +2536,14 @@ def _set_parent_ns(packageName):
 class EntryPoint:
     """Object representing an advertised importable object"""
 
-    def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
+    def __init__(
+        self,
+        name: str,
+        module_name: str,
+        attrs: Iterable[str] = (),
+        extras: Iterable[str] = (),
+        dist: Optional["Distribution"] = None,
+    ):
         if not MODULE(module_name):
             raise ValueError("Invalid module name", module_name)
         self.name = name
@@ -2458,7 +2563,12 @@ def __str__(self):
     def __repr__(self):
         return "EntryPoint.parse(%r)" % str(self)
 
-    def load(self, require=True, *args, **kwargs):
+    def load(
+        self,
+        require: bool = True,
+        *args: Optional[Union[Environment, _InstallerType]],
+        **kwargs: Optional[Union[Environment, _InstallerType]],
+    ):
         """
         Require packages for this EntryPoint, then resolve it.
         """
@@ -2470,7 +2580,9 @@ def load(self, require=True, *args, **kwargs):
                 stacklevel=2,
             )
         if require:
-            self.require(*args, **kwargs)
+            # We could pass `env` and `installer` directly,
+            # but keeping `*args` and `**kwargs` for backwards compatibility
+            self.require(*args, **kwargs)  # type: ignore
         return self.resolve()
 
     def resolve(self):
@@ -2483,7 +2595,11 @@ def resolve(self):
         except AttributeError as exc:
             raise ImportError(str(exc)) from exc
 
-    def require(self, env=None, installer=None):
+    def require(
+        self,
+        env: Optional[Environment] = None,
+        installer: Optional[_InstallerType] = None,
+    ):
         if not self.dist:
             error_cls = UnknownExtra if self.extras else AttributeError
             raise error_cls("Can't require() without a distribution", self)
@@ -2507,7 +2623,7 @@ def require(self, env=None, installer=None):
     )
 
     @classmethod
-    def parse(cls, src, dist=None):
+    def parse(cls, src: str, dist: Optional["Distribution"] = None):
         """Parse a single entry point from string `src`
 
         Entry point syntax follows the form::
@@ -2536,7 +2652,12 @@ def _parse_extras(cls, extras_spec):
         return req.extras
 
     @classmethod
-    def parse_group(cls, group, lines, dist=None):
+    def parse_group(
+        cls,
+        group: str,
+        lines: _NestedStr,
+        dist: Optional["Distribution"] = None,
+    ):
         """Parse an entry point group"""
         if not MODULE(group):
             raise ValueError("Invalid group name", group)
@@ -2549,13 +2670,17 @@ def parse_group(cls, group, lines, dist=None):
         return this
 
     @classmethod
-    def parse_map(cls, data, dist=None):
+    def parse_map(
+        cls,
+        data: Union[str, Iterable[str], Dict[str, Union[str, Iterable[str]]]],
+        dist: Optional["Distribution"] = None,
+    ):
         """Parse a map of entry point groups"""
         if isinstance(data, dict):
             _data = data.items()
         else:
             _data = split_sections(data)
-        maps = {}
+        maps: Dict[str, Dict[str, "EntryPoint"]] = {}
         for group, lines in _data:
             if group is None:
                 if not lines:
@@ -2590,13 +2715,13 @@ class Distribution:
 
     def __init__(
         self,
-        location=None,
-        metadata=None,
-        project_name=None,
-        version=None,
-        py_version=PY_MAJOR,
-        platform=None,
-        precedence=EGG_DIST,
+        location: Optional[str] = None,
+        metadata: _MetadataType = None,
+        project_name: Optional[str] = None,
+        version: Optional[str] = None,
+        py_version: Optional[str] = PY_MAJOR,
+        platform: Optional[str] = None,
+        precedence: int = EGG_DIST,
     ):
         self.project_name = safe_name(project_name or 'Unknown')
         if version is not None:
@@ -2608,7 +2733,13 @@ def __init__(
         self._provider = metadata or empty_provider
 
     @classmethod
-    def from_location(cls, location, basename, metadata=None, **kw):
+    def from_location(
+        cls,
+        location: str,
+        basename: str,
+        metadata: _MetadataType = None,
+        **kw: int,  # We could set `precedence` explicitly, but keeping this as `**kw` for full backwards and subclassing compatibility
+    ):
         project_name, version, py_version, platform = [None] * 4
         basename, ext = os.path.splitext(basename)
         if ext.lower() in _distributionImpl:
@@ -2646,25 +2777,25 @@ def hashcmp(self):
     def __hash__(self):
         return hash(self.hashcmp)
 
-    def __lt__(self, other):
+    def __lt__(self, other: "Distribution"):
         return self.hashcmp < other.hashcmp
 
-    def __le__(self, other):
+    def __le__(self, other: "Distribution"):
         return self.hashcmp <= other.hashcmp
 
-    def __gt__(self, other):
+    def __gt__(self, other: "Distribution"):
         return self.hashcmp > other.hashcmp
 
-    def __ge__(self, other):
+    def __ge__(self, other: "Distribution"):
         return self.hashcmp >= other.hashcmp
 
-    def __eq__(self, other):
+    def __eq__(self, other: object):
         if not isinstance(other, self.__class__):
             # It's not a Distribution, so they are not equal
             return False
         return self.hashcmp == other.hashcmp
 
-    def __ne__(self, other):
+    def __ne__(self, other: object):
         return not self == other
 
     # These properties have to be lazy so that we don't have to load any
@@ -2774,7 +2905,7 @@ def _build_dep_map(self):
                 dm.setdefault(extra, []).extend(parse_requirements(reqs))
         return dm
 
-    def requires(self, extras=()):
+    def requires(self, extras: Iterable[str] = ()):
         """List of Requirements needed for this distro if `extras` are used"""
         dm = self._dep_map
         deps = []
@@ -2813,7 +2944,7 @@ def _get_version(self):
         lines = self._get_metadata(self.PKG_INFO)
         return _version_from_file(lines)
 
-    def activate(self, path=None, replace=False):
+    def activate(self, path: Optional[List[str]] = None, replace: bool = False):
         """Ensure distribution is importable on `path` (default=sys.path)"""
         if path is None:
             path = sys.path
@@ -2863,7 +2994,12 @@ def __dir__(self):
         )
 
     @classmethod
-    def from_filename(cls, filename, metadata=None, **kw):
+    def from_filename(
+        cls,
+        filename: str,
+        metadata: _MetadataType = None,
+        **kw: int,  # We could set `precedence` explicitly, but keeping this as `**kw` for full backwards and subclassing compatibility
+    ):
         return cls.from_location(
             _normalize_cached(filename), os.path.basename(filename), metadata, **kw
         )
@@ -2877,14 +3013,14 @@ def as_requirement(self):
 
         return Requirement.parse(spec)
 
-    def load_entry_point(self, group, name):
+    def load_entry_point(self, group: str, name: str):
         """Return the `name` entry point of `group` or raise ImportError"""
         ep = self.get_entry_info(group, name)
         if ep is None:
             raise ImportError("Entry point %r not found" % ((group, name),))
         return ep.load()
 
-    def get_entry_map(self, group=None):
+    def get_entry_map(self, group: Optional[str] = None):
         """Return the entry point map for `group`, or the full entry map"""
         if not hasattr(self, "_ep_map"):
             self._ep_map = EntryPoint.parse_map(
@@ -2894,12 +3030,17 @@ def get_entry_map(self, group=None):
             return self._ep_map.get(group, {})
         return self._ep_map
 
-    def get_entry_info(self, group, name):
+    def get_entry_info(self, group: str, name: str):
         """Return the EntryPoint object for `group`+`name`, or ``None``"""
         return self.get_entry_map(group).get(name)
 
     # FIXME: 'Distribution.insert_on' is too complex (13)
-    def insert_on(self, path, loc=None, replace=False):  # noqa: C901
+    def insert_on(  # noqa: C901
+        self,
+        path: List[str],
+        loc=None,
+        replace: bool = False,
+    ):
         """Ensure self.location is on path
 
         If replace=False (default):
@@ -3004,13 +3145,14 @@ def has_version(self):
             return False
         return True
 
-    def clone(self, **kw):
+    def clone(self, **kw: Optional[Union[str, int, IResourceProvider]]):
         """Copy this distribution, substituting in any changed keyword args"""
         names = 'project_name version py_version platform location precedence'
         for attr in names.split():
             kw.setdefault(attr, getattr(self, attr, None))
         kw.setdefault('metadata', self._provider)
-        return self.__class__(**kw)
+        # Unsafely unpacking. But keeping **kw for backwards and subclassing compatibility
+        return self.__class__(**kw)  # type:ignore[arg-type]
 
     @property
     def extras(self):
@@ -3107,7 +3249,7 @@ def issue_warning(*args, **kw):
     warnings.warn(stacklevel=level + 1, *args, **kw)
 
 
-def parse_requirements(strs):
+def parse_requirements(strs: _NestedStr):
     """
     Yield ``Requirement`` objects for each specification in `strs`.
 
@@ -3121,14 +3263,15 @@ class RequirementParseError(_packaging_requirements.InvalidRequirement):
 
 
 class Requirement(_packaging_requirements.Requirement):
-    def __init__(self, requirement_string):
+    def __init__(self, requirement_string: str):
         """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
         super().__init__(requirement_string)
         self.unsafe_name = self.name
         project_name = safe_name(self.name)
         self.project_name, self.key = project_name, project_name.lower()
         self.specs = [(spec.operator, spec.version) for spec in self.specifier]
-        self.extras = tuple(map(safe_extra, self.extras))
+        # packaging.requirements.Requirement uses a set for its extras. We use a variable-length tuple
+        self.extras: Tuple[str] = tuple(map(safe_extra, self.extras))
         self.hashCmp = (
             self.key,
             self.url,
@@ -3138,13 +3281,13 @@ def __init__(self, requirement_string):
         )
         self.__hash = hash(self.hashCmp)
 
-    def __eq__(self, other):
+    def __eq__(self, other: object):
         return isinstance(other, Requirement) and self.hashCmp == other.hashCmp
 
     def __ne__(self, other):
         return not self == other
 
-    def __contains__(self, item):
+    def __contains__(self, item: Union[Distribution, str, Tuple[str, ...]]):
         if isinstance(item, Distribution):
             if item.key != self.key:
                 return False
@@ -3163,7 +3306,7 @@ def __repr__(self):
         return "Requirement.parse(%r)" % str(self)
 
     @staticmethod
-    def parse(s):
+    def parse(s: Union[str, Iterable[str]]):
         (req,) = parse_requirements(s)
         return req
 
@@ -3178,7 +3321,7 @@ def _always_object(classes):
     return classes
 
 
-def _find_adapter(registry, ob):
+def _find_adapter(registry: Mapping[type, _AdapterType], ob: object):
     """Return an adapter factory for `ob` from `registry`"""
     types = _always_object(inspect.getmro(getattr(ob, '__class__', type(ob))))
     for t in types:
@@ -3189,7 +3332,7 @@ def _find_adapter(registry, ob):
     raise TypeError(f"Could not find adapter for {registry} and {ob}")
 
 
-def ensure_directory(path):
+def ensure_directory(path: str):
     """Ensure that the parent directory of `path` exists"""
     dirname = os.path.dirname(path)
     os.makedirs(dirname, exist_ok=True)
@@ -3208,7 +3351,7 @@ def _bypass_ensure_directory(path):
             pass
 
 
-def split_sections(s):
+def split_sections(s: _NestedStr):
     """Split a string or iterable thereof into (section, content) pairs
 
     Each ``section`` is a stripped version of the section header ("[section]")
diff --git a/pkg_resources/extern/__init__.py b/pkg_resources/extern/__init__.py
index bfb9eb8bdf..a1b7490dfb 100644
--- a/pkg_resources/extern/__init__.py
+++ b/pkg_resources/extern/__init__.py
@@ -1,5 +1,8 @@
+from importlib.machinery import ModuleSpec
 import importlib.util
 import sys
+from types import ModuleType
+from typing import Iterable, Optional, Sequence
 
 
 class VendorImporter:
@@ -8,7 +11,12 @@ class VendorImporter:
     or otherwise naturally-installed packages from root_name.
     """
 
-    def __init__(self, root_name, vendored_names=(), vendor_pkg=None):
+    def __init__(
+        self,
+        root_name: str,
+        vendored_names: Iterable[str] = (),
+        vendor_pkg: Optional[str] = None,
+    ):
         self.root_name = root_name
         self.vendored_names = set(vendored_names)
         self.vendor_pkg = vendor_pkg or root_name.replace('extern', '_vendor')
@@ -26,7 +34,7 @@ def _module_matches_namespace(self, fullname):
         root, base, target = fullname.partition(self.root_name + '.')
         return not root and any(map(target.startswith, self.vendored_names))
 
-    def load_module(self, fullname):
+    def load_module(self, fullname: str):
         """
         Iterate over the search path to locate and load fullname.
         """
@@ -48,16 +56,22 @@ def load_module(self, fullname):
                 "distribution.".format(**locals())
             )
 
-    def create_module(self, spec):
+    def create_module(self, spec: ModuleSpec):
         return self.load_module(spec.name)
 
-    def exec_module(self, module):
+    def exec_module(self, module: ModuleType):
         pass
 
-    def find_spec(self, fullname, path=None, target=None):
+    def find_spec(
+        self,
+        fullname: str,
+        path: Optional[Sequence[str]] = None,
+        target: Optional[ModuleType] = None,
+    ):
         """Return a module spec for vendored names."""
         return (
-            importlib.util.spec_from_loader(fullname, self)
+            # This should fix itself next mypy release https://github.com/python/typeshed/pull/11890
+            importlib.util.spec_from_loader(fullname, self)  # type: ignore[arg-type]
             if self._module_matches_namespace(fullname)
             else None
         )

From d6cc7ad33b10c25baecc2ef362280b30e0900920 Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Wed, 22 May 2024 12:41:10 +0200
Subject: [PATCH 0697/1761] Fix a couple typos found by codespell

---
 pytest.ini                    | 2 +-
 setuptools/config/setupcfg.py | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/pytest.ini b/pytest.ini
index 0c9651d96f..57ab865366 100644
--- a/pytest.ini
+++ b/pytest.ini
@@ -29,7 +29,7 @@ filterwarnings=
 	# realpython/pytest-mypy#152
 	ignore:'encoding' argument not specified::pytest_mypy
 
-	# TODO: Set encoding when openning/writing tmpdir files with pytest's LocalPath.open
+	# TODO: Set encoding when opening/writing tmpdir files with pytest's LocalPath.open
 	# see pypa/setuptools#4326
 	ignore:'encoding' argument not specified::_pytest
 
diff --git a/setuptools/config/setupcfg.py b/setuptools/config/setupcfg.py
index 2912d3e143..59d9cf8adb 100644
--- a/setuptools/config/setupcfg.py
+++ b/setuptools/config/setupcfg.py
@@ -108,7 +108,7 @@ def _apply(
     filenames = [*other_files, filepath]
 
     try:
-        _Distribution.parse_config_files(dist, filenames=filenames)  # type: ignore[arg-type] # TODO: fix in disutils stubs
+        _Distribution.parse_config_files(dist, filenames=filenames)  # type: ignore[arg-type] # TODO: fix in distutils stubs
         handlers = parse_configuration(
             dist, dist.command_options, ignore_option_errors=ignore_option_errors
         )

From 0ab0a4d2e3f66c94eacf57d9c42ce55c7b60a550 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Wed, 22 May 2024 12:12:36 -0400
Subject: [PATCH 0698/1761] Use set instead of True-only dict for non-public
 names

---
 pkg_resources/__init__.py          | 10 +++++-----
 setuptools/command/easy_install.py | 16 ++++++++--------
 setuptools/package_index.py        |  4 ++--
 setuptools/tests/test_dist_info.py |  2 +-
 4 files changed, 16 insertions(+), 16 deletions(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index c86d9f095c..d29b127e15 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -719,7 +719,7 @@ def __iter__(self):
         The yield order is the order in which the items' path entries were
         added to the working set.
         """
-        seen = {}
+        seen = set()
         for item in self.entries:
             if item not in self.entry_keys:
                 # workaround a cache issue
@@ -727,7 +727,7 @@ def __iter__(self):
 
             for key in self.entry_keys[item]:
                 if key not in seen:
-                    seen[key] = 1
+                    seen.add(key)
                     yield self.by_key[key]
 
     def add(
@@ -803,7 +803,7 @@ def resolve(
         # set up the stack
         requirements = list(requirements)[::-1]
         # set of processed requirements
-        processed = {}
+        processed = set()
         # key -> dist
         best = {}
         to_activate = []
@@ -837,7 +837,7 @@ def resolve(
                 required_by[new_requirement].add(req.project_name)
                 req_extras[new_requirement] = req.extras
 
-            processed[req] = True
+            processed.add(req)
 
         # return list of distros to activate
         return to_activate
@@ -1310,7 +1310,7 @@ def get_cache_path(self, archive_name: str, names: Iterable[str] = ()):
 
         self._warn_unsafe_extraction_path(extract_path)
 
-        self.cached_files[target_path] = 1
+        self.cached_files[target_path] = True
         return target_path
 
     @staticmethod
diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py
index 41ff382fe4..849295e166 100644
--- a/setuptools/command/easy_install.py
+++ b/setuptools/command/easy_install.py
@@ -1046,7 +1046,7 @@ def exe_to_egg(self, dist_filename, egg_tmp):  # noqa: C901
         prefixes = get_exe_prefixes(dist_filename)
         to_compile = []
         native_libs = []
-        top_level = {}
+        top_level = set()
 
         def process(src, dst):
             s = src.lower()
@@ -1058,10 +1058,10 @@ def process(src, dst):
                     dl = dst.lower()
                     if dl.endswith('.pyd') or dl.endswith('.dll'):
                         parts[-1] = bdist_egg.strip_module(parts[-1])
-                        top_level[os.path.splitext(parts[0])[0]] = 1
+                        top_level.add([os.path.splitext(parts[0])[0]])
                         native_libs.append(src)
                     elif dl.endswith('.py') and old != 'SCRIPTS/':
-                        top_level[os.path.splitext(parts[0])[0]] = 1
+                        top_level.add([os.path.splitext(parts[0])[0]])
                         to_compile.append(dst)
                     return dst
             if not src.endswith('.pth'):
@@ -1483,14 +1483,14 @@ def get_site_dirs():
 def expand_paths(inputs):  # noqa: C901  # is too complex (11)  # FIXME
     """Yield sys.path directories that might contain "old-style" packages"""
 
-    seen = {}
+    seen = set()
 
     for dirname in inputs:
         dirname = normalize_path(dirname)
         if dirname in seen:
             continue
 
-        seen[dirname] = 1
+        seen.add(dirname)
         if not os.path.isdir(dirname):
             continue
 
@@ -1519,7 +1519,7 @@ def expand_paths(inputs):  # noqa: C901  # is too complex (11)  # FIXME
                 if line in seen:
                     continue
 
-                seen[line] = 1
+                seen.add(line)
                 if not os.path.isdir(line):
                     continue
 
@@ -1621,7 +1621,7 @@ def __init__(self, filename, sitedirs=()):
     def _load_raw(self):
         paths = []
         dirty = saw_import = False
-        seen = dict.fromkeys(self.sitedirs)
+        seen = set(self.sitedirs)
         f = open(self.filename, 'rt', encoding=py39.LOCALE_ENCODING)
         # ^-- Requires encoding="locale" instead of "utf-8" (python/cpython#77102).
         for line in f:
@@ -1642,7 +1642,7 @@ def _load_raw(self):
                 dirty = True
                 paths.pop()
                 continue
-            seen[normalized_path] = 1
+            seen.add(normalized_path)
         f.close()
         # remove any trailing empty/blank line
         while paths and not paths[-1].strip():
diff --git a/setuptools/package_index.py b/setuptools/package_index.py
index c3ffee41a7..c91e419923 100644
--- a/setuptools/package_index.py
+++ b/setuptools/package_index.py
@@ -627,7 +627,7 @@ def fetch_distribution(  # noqa: C901  # is too complex (14)  # FIXME
         """
         # process a Requirement
         self.info("Searching for %s", requirement)
-        skipped = {}
+        skipped = set()
         dist = None
 
         def find(req, env=None):
@@ -642,7 +642,7 @@ def find(req, env=None):
                             "Skipping development or system egg: %s",
                             dist,
                         )
-                        skipped[dist] = 1
+                        skipped.add(dist)
                     continue
 
                 test = dist in req and (dist.precedence <= SOURCE_DIST or not source)
diff --git a/setuptools/tests/test_dist_info.py b/setuptools/tests/test_dist_info.py
index c6fe97e2ba..44be6c3284 100644
--- a/setuptools/tests/test_dist_info.py
+++ b/setuptools/tests/test_dist_info.py
@@ -122,7 +122,7 @@ def test_output_dir(self, tmp_path, keep_egg_info):
         run_command("dist_info", "--output-dir", out, *opts, cwd=tmp_path)
         assert len(list(out.glob("*.dist-info"))) == 1
         assert len(list(tmp_path.glob("*.dist-info"))) == 0
-        expected_egg_info = 1 if keep_egg_info else 0
+        expected_egg_info = int(keep_egg_info)
         assert len(list(out.glob("*.egg-info"))) == expected_egg_info
         assert len(list(tmp_path.glob("*.egg-info"))) == 0
         assert len(list(out.glob("*.__bkp__"))) == 0

From efb8655de9713d59335cf2e8e9847f457c8cd711 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Fri, 24 May 2024 12:33:10 +0100
Subject: [PATCH 0699/1761] Fix mypy error

---
 pkg_resources/__init__.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index c86d9f095c..c70b792e39 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -2680,7 +2680,7 @@ def parse_map(
             _data = data.items()
         else:
             _data = split_sections(data)
-        maps: Dict[str, Dict[str, "EntryPoint"]] = {}
+        maps: Dict[str, Dict[str, EntryPoint]] = {}
         for group, lines in _data:
             if group is None:
                 if not lines:

From c22fd02b835c9301a5b05b25af38766f1c616d3c Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Fri, 24 May 2024 13:25:37 +0100
Subject: [PATCH 0700/1761] Simplify tool.setuptools.packages.find by using
 include rule

---
 pyproject.toml | 11 +++++------
 1 file changed, 5 insertions(+), 6 deletions(-)

diff --git a/pyproject.toml b/pyproject.toml
index 7e9e66df9f..dd2140473a 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -158,15 +158,14 @@ PKG-INFO = "setuptools.command.egg_info:write_pkg_info"
 include-package-data = false
 
 [tool.setuptools.packages.find]
+include = [
+	"setuptools*",
+	"pkg_resources*",
+	"_distutils_hack*",
+]
 exclude = [
 	"*.tests",
 	"*.tests.*",
-	"tools*",
-	"debian*",
-	"launcher*",
-	"newsfragments*",
-	"docs",
-	"docs.*",
 ]
 namespaces = true
 

From 3ce6d3ff332808ac1ff0dce22e06d6b30eebd274 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Wed, 22 May 2024 13:30:30 -0400
Subject: [PATCH 0701/1761] Generic typing for register methods in
 `pkg_resources`

---
 pkg_resources/__init__.py | 43 +++++++++++++++++++++++----------------
 1 file changed, 25 insertions(+), 18 deletions(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index c70b792e39..c10a88e5fd 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -109,7 +109,7 @@
 )
 
 
-T = TypeVar("T")
+_T = TypeVar("_T")
 # Type aliases
 _NestedStr = Union[str, Iterable[Union[str, Iterable["_NestedStr"]]]]
 _InstallerType = Callable[["Requirement"], Optional["Distribution"]]
@@ -118,7 +118,12 @@
 _MetadataType = Optional["IResourceProvider"]
 # Any object works, but let's indicate we expect something like a module (optionally has __loader__ or __file__)
 _ModuleLike = Union[object, types.ModuleType]
-_AdapterType = Callable[..., Any]  # Incomplete
+_ProviderFactoryType = Callable[[_ModuleLike], "IResourceProvider"]
+_DistFinderType = Callable[[_T, str, bool], Iterable["Distribution"]]
+_NSHandlerType = Callable[[_T, str, str, types.ModuleType], Optional[str]]
+_AdapterT = TypeVar(
+    "_AdapterT", _DistFinderType[Any], _ProviderFactoryType, _NSHandlerType[Any]
+)
 
 
 # Use _typeshed.importlib.LoaderProtocol once available https://github.com/python/typeshed/pull/11890
@@ -142,7 +147,7 @@ class PEP440Warning(RuntimeWarning):
 _state_vars: Dict[str, str] = {}
 
 
-def _declare_state(vartype: str, varname: str, initial_value: T) -> T:
+def _declare_state(vartype: str, varname: str, initial_value: _T) -> _T:
     _state_vars[varname] = vartype
     return initial_value
 
@@ -377,7 +382,7 @@ class UnknownExtra(ResolutionError):
     """Distribution doesn't have an "extra feature" of the given name"""
 
 
-_provider_factories: Dict[Type[_ModuleLike], _AdapterType] = {}
+_provider_factories: Dict[Type[_ModuleLike], _ProviderFactoryType] = {}
 
 PY_MAJOR = '{}.{}'.format(*sys.version_info)
 EGG_DIST = 3
@@ -388,7 +393,7 @@ class UnknownExtra(ResolutionError):
 
 
 def register_loader_type(
-    loader_type: Type[_ModuleLike], provider_factory: _AdapterType
+    loader_type: Type[_ModuleLike], provider_factory: _ProviderFactoryType
 ):
     """Register `provider_factory` to make providers for `loader_type`
 
@@ -2097,12 +2102,12 @@ def __init__(self, importer: zipimport.zipimporter):
         self._setup_prefix()
 
 
-_distribution_finders: Dict[
-    type, Callable[[object, str, bool], Iterable["Distribution"]]
-] = _declare_state('dict', '_distribution_finders', {})
+_distribution_finders: Dict[type, _DistFinderType[Any]] = _declare_state(
+    'dict', '_distribution_finders', {}
+)
 
 
-def register_finder(importer_type: type, distribution_finder: _AdapterType):
+def register_finder(importer_type: Type[_T], distribution_finder: _DistFinderType[_T]):
     """Register `distribution_finder` to find distributions in sys.path items
 
     `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
@@ -2276,15 +2281,17 @@ def resolve_egg_link(path):
 
 register_finder(importlib.machinery.FileFinder, find_on_path)
 
-_namespace_handlers: Dict[
-    type, Callable[[object, str, str, types.ModuleType], Optional[str]]
-] = _declare_state('dict', '_namespace_handlers', {})
+_namespace_handlers: Dict[type, _NSHandlerType[Any]] = _declare_state(
+    'dict', '_namespace_handlers', {}
+)
 _namespace_packages: Dict[Optional[str], List[str]] = _declare_state(
     'dict', '_namespace_packages', {}
 )
 
 
-def register_namespace_handler(importer_type: type, namespace_handler: _AdapterType):
+def register_namespace_handler(
+    importer_type: Type[_T], namespace_handler: _NSHandlerType[_T]
+):
     """Register `namespace_handler` to declare namespace packages
 
     `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
@@ -2429,9 +2436,9 @@ def fixup_namespace_packages(path_item: str, parent: Optional[str] = None):
 
 
 def file_ns_handler(
-    importer: Optional[importlib.abc.PathEntryFinder],
-    path_item,
-    packageName,
+    importer: object,
+    path_item: "StrPath",
+    packageName: str,
     module: types.ModuleType,
 ):
     """Compute an ns-package subpath for a filesystem or zipfile importer"""
@@ -2454,7 +2461,7 @@ def file_ns_handler(
 
 
 def null_ns_handler(
-    importer: Optional[importlib.abc.PathEntryFinder],
+    importer: object,
     path_item: Optional[str],
     packageName: Optional[str],
     module: Optional[_ModuleLike],
@@ -3321,7 +3328,7 @@ def _always_object(classes):
     return classes
 
 
-def _find_adapter(registry: Mapping[type, _AdapterType], ob: object):
+def _find_adapter(registry: Mapping[type, _AdapterT], ob: object) -> _AdapterT:
     """Return an adapter factory for `ob` from `registry`"""
     types = _always_object(inspect.getmro(getattr(ob, '__class__', type(ob))))
     for t in types:

From 6e4cf9ffc5813d9d6c21fb8c54b34f079949fd15 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Fri, 24 May 2024 15:08:04 -0400
Subject: [PATCH 0702/1761] Prevent a TypeError: 'NoneType' object is not
 callable when ``shutil_rmtree`is called without an`onexc`` parameter on
 Python<=3.11

---
 newsfragments/4382.bugfix.rst |  1 +
 setuptools/compat/py311.py    | 22 ++++++++++++++++++----
 2 files changed, 19 insertions(+), 4 deletions(-)
 create mode 100644 newsfragments/4382.bugfix.rst

diff --git a/newsfragments/4382.bugfix.rst b/newsfragments/4382.bugfix.rst
new file mode 100644
index 0000000000..3aa9e18573
--- /dev/null
+++ b/newsfragments/4382.bugfix.rst
@@ -0,0 +1 @@
+Prevent a ``TypeError: 'NoneType' object is not callable`` when ``shutil_rmtree`` is called without an ``onexc`` parameter on Python<=3.11 -- by :user:`Avasam`
diff --git a/setuptools/compat/py311.py b/setuptools/compat/py311.py
index 28175b1f75..5069c441c4 100644
--- a/setuptools/compat/py311.py
+++ b/setuptools/compat/py311.py
@@ -1,12 +1,26 @@
-import sys
+from __future__ import annotations
+
 import shutil
+import sys
+from typing import Any, Callable, TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from _typeshed import StrOrBytesPath, ExcInfo
+
+# Same as shutil._OnExcCallback from typeshed
+_OnExcCallback = Callable[[Callable[..., Any], str, BaseException], object]
 
 
-def shutil_rmtree(path, ignore_errors=False, onexc=None):
+def shutil_rmtree(
+    path: StrOrBytesPath,
+    ignore_errors: bool = False,
+    onexc: _OnExcCallback | None = None,
+) -> None:
     if sys.version_info >= (3, 12):
         return shutil.rmtree(path, ignore_errors, onexc=onexc)
 
-    def _handler(fn, path, excinfo):
-        return onexc(fn, path, excinfo[1])
+    def _handler(fn: Callable[..., Any], path: str, excinfo: ExcInfo) -> None:
+        if onexc:
+            onexc(fn, path, excinfo[1])
 
     return shutil.rmtree(path, ignore_errors, onerror=_handler)

From 6d39656ee6e9ccdb274a0654704c2e653f9efd9a Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Wed, 29 May 2024 12:15:34 -0400
Subject: [PATCH 0703/1761] Replace use of mktemp with can_symlink from the
 stdlib test suite.

Closes #4403
---
 newsfragments/4403.bugfix.rst            |  1 +
 pyproject.toml                           |  2 ++
 setuptools/tests/compat/__init__.py      |  0
 setuptools/tests/compat/py39.py          |  4 ++++
 setuptools/tests/test_find_packages.py   | 17 ++---------------
 setuptools/tests/test_find_py_modules.py |  5 +++--
 6 files changed, 12 insertions(+), 17 deletions(-)
 create mode 100644 newsfragments/4403.bugfix.rst
 create mode 100644 setuptools/tests/compat/__init__.py
 create mode 100644 setuptools/tests/compat/py39.py

diff --git a/newsfragments/4403.bugfix.rst b/newsfragments/4403.bugfix.rst
new file mode 100644
index 0000000000..c07cd48c7e
--- /dev/null
+++ b/newsfragments/4403.bugfix.rst
@@ -0,0 +1 @@
+Replace use of mktemp with can_symlink from the stdlib test suite.
\ No newline at end of file
diff --git a/pyproject.toml b/pyproject.toml
index aa7fa372b3..9b46f93bbe 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -68,6 +68,8 @@ testing = [
 
 	# workaround for pypa/setuptools#4333
 	"pyproject-hooks!=1.1",
+
+	"jaraco.test",
 ]
 docs = [
 	# upstream
diff --git a/setuptools/tests/compat/__init__.py b/setuptools/tests/compat/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/setuptools/tests/compat/py39.py b/setuptools/tests/compat/py39.py
new file mode 100644
index 0000000000..9c86065cd2
--- /dev/null
+++ b/setuptools/tests/compat/py39.py
@@ -0,0 +1,4 @@
+from jaraco.test.cpython import from_test_support, try_import
+
+
+os_helper = try_import('os_helper') or from_test_support('can_symlink')
diff --git a/setuptools/tests/test_find_packages.py b/setuptools/tests/test_find_packages.py
index 4fefd3dccf..f32cc2f58e 100644
--- a/setuptools/tests/test_find_packages.py
+++ b/setuptools/tests/test_find_packages.py
@@ -10,20 +10,7 @@
 from setuptools import find_namespace_packages
 from setuptools.discovery import FlatLayoutPackageFinder
 
-
-# modeled after CPython's test.support.can_symlink
-def can_symlink():
-    TESTFN = tempfile.mktemp()
-    symlink_path = TESTFN + "can_symlink"
-    try:
-        os.symlink(TESTFN, symlink_path)
-        can = True
-    except (OSError, NotImplementedError, AttributeError):
-        can = False
-    else:
-        os.remove(symlink_path)
-    globals().update(can_symlink=lambda: can)
-    return can
+from .compat.py39 import os_helper
 
 
 class TestFindPackages:
@@ -123,7 +110,7 @@ def test_dir_with_packages_in_subdir_is_excluded(self):
         packages = find_packages(self.dist_dir)
         assert 'build.pkg' not in packages
 
-    @pytest.mark.skipif(not can_symlink(), reason='Symlink support required')
+    @pytest.mark.skipif(not os_helper.can_symlink(), reason='Symlink support required')
     def test_symlinked_packages_are_included(self):
         """
         A symbolically-linked directory should be treated like any other
diff --git a/setuptools/tests/test_find_py_modules.py b/setuptools/tests/test_find_py_modules.py
index c110f8f561..2459c98fa3 100644
--- a/setuptools/tests/test_find_py_modules.py
+++ b/setuptools/tests/test_find_py_modules.py
@@ -6,7 +6,8 @@
 
 from setuptools.discovery import FlatLayoutModuleFinder, ModuleFinder
 
-from .test_find_packages import can_symlink, ensure_files
+from .test_find_packages import ensure_files
+from .compat.py39 import os_helper
 
 
 class TestModuleFinder:
@@ -39,7 +40,7 @@ def test_finder(self, tmp_path, example):
         ensure_files(tmp_path, files)
         assert self.find(tmp_path, **kwargs) == set(expected_modules)
 
-    @pytest.mark.skipif(not can_symlink(), reason='Symlink support required')
+    @pytest.mark.skipif(not os_helper.can_symlink(), reason='Symlink support required')
     def test_symlinked_packages_are_included(self, tmp_path):
         src = "_myfiles/file.py"
         ensure_files(tmp_path, [src])

From bb9dba5cae2a8577cd42b6f72a2c6046945223aa Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 29 May 2024 20:28:05 +0100
Subject: [PATCH 0704/1761] Add doctests for expand._find_module

---
 setuptools/config/expand.py | 25 +++++++++++++++++++++++++
 1 file changed, 25 insertions(+)

diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py
index 0d8d58add8..75b7b7acc9 100644
--- a/setuptools/config/expand.py
+++ b/setuptools/config/expand.py
@@ -223,6 +223,31 @@ def _find_module(
     after the build is complete), find the path to the parent directory where
     it is contained and the canonical name that could be used to import it
     considering the ``package_dir`` in the build configuration and ``root_dir``
+
+    >>> import pytest
+    >>> if os.sep != "/": pytest.skip("require UNIX path separator")
+    >>> tmp = getfixture('tmpdir')
+    >>> _ = tmp.ensure("a/b/c.py")
+    >>> _ = tmp.ensure("a/b/d/__init__.py")
+    >>> cwd = tmp.as_cwd()
+    >>> _ = cwd.__enter__()
+    >>> _find_module("a.b.c", None, ".")
+    ('.', './a/b/c.py', 'a.b.c')
+    >>> _find_module("a.b.d", None, ".")
+    ('.', './a/b/d/__init__.py', 'a.b.d')
+    >>> _find_module("ab.c", {"ab": "a/b"}, "")
+    ('a', 'a/b/c.py', 'b.c')
+    >>> _find_module("b.c", {"": "a"}, "")
+    ('a', 'a/b/c.py', 'b.c')
+    >>> [str(x).replace(str(tmp), ".") for x in  _find_module("a.b.c", None, tmp)]
+    ['.', './a/b/c.py', 'a.b.c']
+    >>> _find_module("f.c", {"f": "a/b"}, "")
+    ('a', 'a/b/c.py', 'b.c')
+    >>> _find_module("f.g.c", {"f.g": "a/b"}, "")
+    ('a', 'a/b/c.py', 'b.c')
+    >>> _find_module("f.g.h", {"": "1", "f": "2", "f.g": "3", "f.g.h": "a/b/d"}, "")
+    ('a/b', 'a/b/d/__init__.py', 'd')
+    >>> _ = cwd.__exit__(None, None, None)
     """
     parent_path = root_dir
     module_parts = module_name.split('.')

From 1ec814fe4891f3003cc831762adaa664095c8192 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 29 May 2024 20:37:57 +0100
Subject: [PATCH 0705/1761] Fix _find_module

---
 setuptools/config/expand.py | 30 +++++++++++++++++-------------
 1 file changed, 17 insertions(+), 13 deletions(-)

diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py
index 75b7b7acc9..e76093f319 100644
--- a/setuptools/config/expand.py
+++ b/setuptools/config/expand.py
@@ -252,19 +252,23 @@ def _find_module(
     parent_path = root_dir
     module_parts = module_name.split('.')
     if package_dir:
-        if module_parts[0] in package_dir:
-            # A custom path was specified for the module we want to import
-            custom_path = package_dir[module_parts[0]]
-            parts = custom_path.rsplit('/', 1)
-            if len(parts) > 1:
-                parent_path = os.path.join(root_dir, parts[0])
-                parent_module = parts[1]
-            else:
-                parent_module = custom_path
-            module_name = ".".join([parent_module, *module_parts[1:]])
-        elif '' in package_dir:
-            # A custom parent directory was specified for all root modules
-            parent_path = os.path.join(root_dir, package_dir[''])
+        for i in range(len(module_parts), 0, -1):
+            parent = ".".join(module_parts[:i])
+            if parent in package_dir:
+                # A custom path was specified for the module we want to import
+                custom_path = package_dir[parent]
+                parts = custom_path.rsplit('/', 1)
+                if len(parts) > 1:
+                    parent_path = os.path.join(root_dir, parts[0])
+                    parent_module = parts[1]
+                else:
+                    parent_module = custom_path
+                module_name = ".".join([parent_module, *module_parts[i:]])
+                break
+        else:
+            if '' in package_dir:
+                # A custom parent directory was specified for all root modules
+                parent_path = os.path.join(root_dir, package_dir[''])
 
     path_start = os.path.join(parent_path, *module_name.split("."))
     candidates = chain(

From 0390754afe9f762e2a3b9dc060f738bc8b4e6414 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 29 May 2024 21:40:37 +0100
Subject: [PATCH 0706/1761] Improve reproducibility in tests

---
 setuptools/tests/config/test_expand.py | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)

diff --git a/setuptools/tests/config/test_expand.py b/setuptools/tests/config/test_expand.py
index fe80890678..f15d8fa7c2 100644
--- a/setuptools/tests/config/test_expand.py
+++ b/setuptools/tests/config/test_expand.py
@@ -1,4 +1,5 @@
 import os
+import sys
 from pathlib import Path
 
 import pytest
@@ -147,7 +148,8 @@ def test_import_order(self, tmp_path):
         ({}, "flat_layout/pkg.py", "flat_layout.pkg", 836),
     ],
 )
-def test_resolve_class(tmp_path, package_dir, file, module, return_value):
+def test_resolve_class(monkeypatch, tmp_path, package_dir, file, module, return_value):
+    monkeypatch.setattr(sys, "modules", {})  # reproducibility
     files = {file: f"class Custom:\n    def testing(self): return {return_value}"}
     write_files(files, tmp_path)
     cls = expand.resolve_class(f"{module}.Custom", package_dir, tmp_path)

From e9ecb057a983d96fe0187249be2443315ee3b19a Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 29 May 2024 21:41:32 +0100
Subject: [PATCH 0707/1761] Reuse find_package_path in setuptools.config.expand

---
 setuptools/config/expand.py | 63 +++++++++++++------------------------
 1 file changed, 21 insertions(+), 42 deletions(-)

diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py
index e76093f319..f32f17adbb 100644
--- a/setuptools/config/expand.py
+++ b/setuptools/config/expand.py
@@ -185,7 +185,7 @@ def read_attr(
     attr_name = attrs_path.pop()
     module_name = '.'.join(attrs_path)
     module_name = module_name or '__init__'
-    _parent_path, path, module_name = _find_module(module_name, package_dir, root_dir)
+    path = _find_module(module_name, package_dir, root_dir)
     spec = _find_spec(module_name, path)
 
     try:
@@ -218,11 +218,9 @@ def _load_spec(spec: ModuleSpec, module_name: str) -> ModuleType:
 
 def _find_module(
     module_name: str, package_dir: Optional[Mapping[str, str]], root_dir: StrPath
-) -> Tuple[StrPath, Optional[str], str]:
-    """Given a module (that could normally be imported by ``module_name``
-    after the build is complete), find the path to the parent directory where
-    it is contained and the canonical name that could be used to import it
-    considering the ``package_dir`` in the build configuration and ``root_dir``
+) -> Optional[str]:
+    """Find the path to the module named ``module_name``,
+    considering the ``package_dir`` in the build configuration and ``root_dir``.
 
     >>> import pytest
     >>> if os.sep != "/": pytest.skip("require UNIX path separator")
@@ -232,51 +230,32 @@ def _find_module(
     >>> cwd = tmp.as_cwd()
     >>> _ = cwd.__enter__()
     >>> _find_module("a.b.c", None, ".")
-    ('.', './a/b/c.py', 'a.b.c')
+    './a/b/c.py'
     >>> _find_module("a.b.d", None, ".")
-    ('.', './a/b/d/__init__.py', 'a.b.d')
+    './a/b/d/__init__.py'
     >>> _find_module("ab.c", {"ab": "a/b"}, "")
-    ('a', 'a/b/c.py', 'b.c')
+    'a/b/c.py'
     >>> _find_module("b.c", {"": "a"}, "")
-    ('a', 'a/b/c.py', 'b.c')
-    >>> [str(x).replace(str(tmp), ".") for x in  _find_module("a.b.c", None, tmp)]
-    ['.', './a/b/c.py', 'a.b.c']
+    'a/b/c.py'
+    >>> _find_module("a.b.c", None, tmp).replace(str(tmp), ".")
+    './a/b/c.py'
     >>> _find_module("f.c", {"f": "a/b"}, "")
-    ('a', 'a/b/c.py', 'b.c')
+    'a/b/c.py'
     >>> _find_module("f.g.c", {"f.g": "a/b"}, "")
-    ('a', 'a/b/c.py', 'b.c')
+    'a/b/c.py'
     >>> _find_module("f.g.h", {"": "1", "f": "2", "f.g": "3", "f.g.h": "a/b/d"}, "")
-    ('a/b', 'a/b/d/__init__.py', 'd')
+    'a/b/d/__init__.py'
     >>> _ = cwd.__exit__(None, None, None)
     """
-    parent_path = root_dir
-    module_parts = module_name.split('.')
-    if package_dir:
-        for i in range(len(module_parts), 0, -1):
-            parent = ".".join(module_parts[:i])
-            if parent in package_dir:
-                # A custom path was specified for the module we want to import
-                custom_path = package_dir[parent]
-                parts = custom_path.rsplit('/', 1)
-                if len(parts) > 1:
-                    parent_path = os.path.join(root_dir, parts[0])
-                    parent_module = parts[1]
-                else:
-                    parent_module = custom_path
-                module_name = ".".join([parent_module, *module_parts[i:]])
-                break
-        else:
-            if '' in package_dir:
-                # A custom parent directory was specified for all root modules
-                parent_path = os.path.join(root_dir, package_dir[''])
+    from importlib.machinery import all_suffixes
+    from ..discovery import find_package_path
 
-    path_start = os.path.join(parent_path, *module_name.split("."))
+    path_start = find_package_path(module_name, package_dir or {}, root_dir)
     candidates = chain(
-        (f"{path_start}.py", os.path.join(path_start, "__init__.py")),
-        iglob(f"{path_start}.*"),
+        [os.path.join(path_start, "__init__.py")],
+        (f"{path_start}{ext}" for ext in all_suffixes()),
     )
-    module_path = next((x for x in candidates if os.path.isfile(x)), None)
-    return parent_path, module_path, module_name
+    return next((x for x in candidates if os.path.isfile(x)), None)
 
 
 def resolve_class(
@@ -290,8 +269,8 @@ def resolve_class(
     class_name = qualified_class_name[idx + 1 :]
     pkg_name = qualified_class_name[:idx]
 
-    _parent_path, path, module_name = _find_module(pkg_name, package_dir, root_dir)
-    module = _load_spec(_find_spec(module_name, path), module_name)
+    path = _find_module(pkg_name, package_dir, root_dir)
+    module = _load_spec(_find_spec(pkg_name, path), pkg_name)
     return getattr(module, class_name)
 
 

From cde754c3de534a94340df3cc2c04c6eebc68d445 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 30 May 2024 10:10:40 +0100
Subject: [PATCH 0708/1761] Use all_suffixes for __init__ too

---
 setuptools/config/expand.py | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py
index f32f17adbb..a8fd38fcd8 100644
--- a/setuptools/config/expand.py
+++ b/setuptools/config/expand.py
@@ -251,9 +251,9 @@ def _find_module(
     from ..discovery import find_package_path
 
     path_start = find_package_path(module_name, package_dir or {}, root_dir)
-    candidates = chain(
-        [os.path.join(path_start, "__init__.py")],
-        (f"{path_start}{ext}" for ext in all_suffixes()),
+    candidates = chain.from_iterable(
+        (f"{path_start}{ext}", os.path.join(path_start, f"__init__{ext}"))
+        for ext in all_suffixes()
     )
     return next((x for x in candidates if os.path.isfile(x)), None)
 

From 419c8d714a591f6611dc0dbc8a04e9258d08fa47 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 30 May 2024 10:12:39 +0100
Subject: [PATCH 0709/1761] Do not delay imports when not needed

---
 setuptools/config/expand.py | 6 ++----
 1 file changed, 2 insertions(+), 4 deletions(-)

diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py
index a8fd38fcd8..ca3d3d9b59 100644
--- a/setuptools/config/expand.py
+++ b/setuptools/config/expand.py
@@ -25,7 +25,7 @@
 import sys
 from glob import iglob
 from configparser import ConfigParser
-from importlib.machinery import ModuleSpec
+from importlib.machinery import ModuleSpec, all_suffixes
 from itertools import chain
 from typing import (
     TYPE_CHECKING,
@@ -47,6 +47,7 @@
 from distutils.errors import DistutilsOptionError
 
 from .._path import same_path as _same_path, StrPath
+from ..discovery import find_package_path
 from ..warnings import SetuptoolsWarning
 
 if TYPE_CHECKING:
@@ -247,9 +248,6 @@ def _find_module(
     'a/b/d/__init__.py'
     >>> _ = cwd.__exit__(None, None, None)
     """
-    from importlib.machinery import all_suffixes
-    from ..discovery import find_package_path
-
     path_start = find_package_path(module_name, package_dir or {}, root_dir)
     candidates = chain.from_iterable(
         (f"{path_start}{ext}", os.path.join(path_start, f"__init__{ext}"))

From a5f62213b1789d6d6b6033ae563522de1bf09c9f Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 30 May 2024 10:15:49 +0100
Subject: [PATCH 0710/1761] Cleanup unused assignments

---
 setuptools/config/expand.py | 3 ---
 1 file changed, 3 deletions(-)

diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py
index ca3d3d9b59..f077f3a613 100644
--- a/setuptools/config/expand.py
+++ b/setuptools/config/expand.py
@@ -52,10 +52,7 @@
 
 if TYPE_CHECKING:
     from setuptools.dist import Distribution  # noqa
-    from setuptools.discovery import ConfigDiscovery  # noqa
-    from distutils.dist import DistributionMetadata  # noqa
 
-chain_iter = chain.from_iterable
 _K = TypeVar("_K")
 _V = TypeVar("_V", covariant=True)
 

From 0cb209eeb1f8cea88ac0ebd9644d4892954b9ea4 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 30 May 2024 10:22:12 +0100
Subject: [PATCH 0711/1761] Simplify doctest

---
 setuptools/config/expand.py | 26 +++++---------------------
 1 file changed, 5 insertions(+), 21 deletions(-)

diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py
index f077f3a613..22a6c6d70a 100644
--- a/setuptools/config/expand.py
+++ b/setuptools/config/expand.py
@@ -220,30 +220,14 @@ def _find_module(
     """Find the path to the module named ``module_name``,
     considering the ``package_dir`` in the build configuration and ``root_dir``.
 
-    >>> import pytest
-    >>> if os.sep != "/": pytest.skip("require UNIX path separator")
     >>> tmp = getfixture('tmpdir')
     >>> _ = tmp.ensure("a/b/c.py")
     >>> _ = tmp.ensure("a/b/d/__init__.py")
-    >>> cwd = tmp.as_cwd()
-    >>> _ = cwd.__enter__()
-    >>> _find_module("a.b.c", None, ".")
-    './a/b/c.py'
-    >>> _find_module("a.b.d", None, ".")
-    './a/b/d/__init__.py'
-    >>> _find_module("ab.c", {"ab": "a/b"}, "")
-    'a/b/c.py'
-    >>> _find_module("b.c", {"": "a"}, "")
-    'a/b/c.py'
-    >>> _find_module("a.b.c", None, tmp).replace(str(tmp), ".")
-    './a/b/c.py'
-    >>> _find_module("f.c", {"f": "a/b"}, "")
-    'a/b/c.py'
-    >>> _find_module("f.g.c", {"f.g": "a/b"}, "")
-    'a/b/c.py'
-    >>> _find_module("f.g.h", {"": "1", "f": "2", "f.g": "3", "f.g.h": "a/b/d"}, "")
-    'a/b/d/__init__.py'
-    >>> _ = cwd.__exit__(None, None, None)
+    >>> r = lambda x: x.replace(str(tmp), "tmp")
+    >>> r(_find_module("a.b.c", None, tmp))
+    'tmp/a/b/c.py'
+    >>> r(_find_module("f.g.h", {"": "1", "f": "2", "f.g": "3", "f.g.h": "a/b/d"}, tmp))
+    'tmp/a/b/d/__init__.py'
     """
     path_start = find_package_path(module_name, package_dir or {}, root_dir)
     candidates = chain.from_iterable(

From 4b109e34cd8d31a34aa1b401be5b1eeb41bde03e Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 30 May 2024 11:04:11 +0100
Subject: [PATCH 0712/1761] Add newsfragment

---
 newsfragments/4405.bugfix.rst | 2 ++
 1 file changed, 2 insertions(+)
 create mode 100644 newsfragments/4405.bugfix.rst

diff --git a/newsfragments/4405.bugfix.rst b/newsfragments/4405.bugfix.rst
new file mode 100644
index 0000000000..164ace4934
--- /dev/null
+++ b/newsfragments/4405.bugfix.rst
@@ -0,0 +1,2 @@
+Improvement for ``attr:`` directives in configuration to handle
+more edge cases related to complex ``package_dir``.

From 6c8d66c0df1348fae2a7a2856afc7a7bcf286047 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 30 May 2024 11:04:57 +0100
Subject: [PATCH 0713/1761] Fix doctest on Windows

---
 setuptools/config/expand.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py
index 22a6c6d70a..d352799922 100644
--- a/setuptools/config/expand.py
+++ b/setuptools/config/expand.py
@@ -223,7 +223,7 @@ def _find_module(
     >>> tmp = getfixture('tmpdir')
     >>> _ = tmp.ensure("a/b/c.py")
     >>> _ = tmp.ensure("a/b/d/__init__.py")
-    >>> r = lambda x: x.replace(str(tmp), "tmp")
+    >>> r = lambda x: x.replace(str(tmp), "tmp").replace(os.sep, "/")
     >>> r(_find_module("a.b.c", None, tmp))
     'tmp/a/b/c.py'
     >>> r(_find_module("f.g.h", {"": "1", "f": "2", "f.g": "3", "f.g.h": "a/b/d"}, tmp))

From a595a0fad054cd20b69d3e954c99174e3a548938 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 31 May 2024 03:53:48 -0400
Subject: [PATCH 0714/1761] Rename extras to align with core metadata spec.

Closes jaraco/skeleton#125.
---
 .readthedocs.yaml | 2 +-
 pyproject.toml    | 4 ++--
 tox.ini           | 6 +++---
 3 files changed, 6 insertions(+), 6 deletions(-)

diff --git a/.readthedocs.yaml b/.readthedocs.yaml
index 85dfea9d42..dc8516ac20 100644
--- a/.readthedocs.yaml
+++ b/.readthedocs.yaml
@@ -3,7 +3,7 @@ python:
   install:
   - path: .
     extra_requirements:
-      - docs
+      - doc
 
 # required boilerplate readthedocs/readthedocs.org#10401
 build:
diff --git a/pyproject.toml b/pyproject.toml
index 04b14cbc79..50845ee304 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -25,7 +25,7 @@ dynamic = ["version"]
 Homepage = "https://github.com/PROJECT_PATH"
 
 [project.optional-dependencies]
-testing = [
+test = [
 	# upstream
 	"pytest >= 6, != 8.1.*",
 	"pytest-checkdocs >= 2.4",
@@ -36,7 +36,7 @@ testing = [
 
 	# local
 ]
-docs = [
+doc = [
 	# upstream
 	"sphinx >= 3.5",
 	"jaraco.packaging >= 9.3",
diff --git a/tox.ini b/tox.ini
index 4c39a5b139..cc4db36e76 100644
--- a/tox.ini
+++ b/tox.ini
@@ -7,7 +7,7 @@ commands =
 	pytest {posargs}
 usedevelop = True
 extras =
-	testing
+	test
 
 [testenv:diffcov]
 description = run tests and check that diff from main is covered
@@ -22,8 +22,8 @@ commands =
 [testenv:docs]
 description = build the documentation
 extras =
-	docs
-	testing
+	doc
+	test
 changedir = docs
 commands =
 	python -m sphinx -W --keep-going . {toxinidir}/build/html

From ac4f52ad8c34613bd82579bd3a3b38d57914bda9 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 1 Jun 2024 05:02:27 -0400
Subject: [PATCH 0715/1761] Suppress deprecation warning about stdlib distutils
 being deprecated.

---
 pytest.ini | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/pytest.ini b/pytest.ini
index 57ab865366..e49b81561a 100644
--- a/pytest.ini
+++ b/pytest.ini
@@ -73,6 +73,9 @@ filterwarnings=
 	# suppress warnings in deprecated msvc compilers
 	ignore:(bcpp|msvc9?)compiler is deprecated
 
+	# Ignore warnings about deprecated stdlib distutils pypa/setuptools#4137
+	ignore:Reliance on distutils from stdlib is deprecated
+
 	ignore::setuptools.command.editable_wheel.InformationOnly
 
 	# https://github.com/pypa/setuptools/issues/3655

From dc90abca19072da5807ba57c2077e00f6e47fb8c Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 1 Jun 2024 05:06:52 -0400
Subject: [PATCH 0716/1761] Add news fragment.

---
 newsfragments/4137.feature.rst | 1 +
 1 file changed, 1 insertion(+)
 create mode 100644 newsfragments/4137.feature.rst

diff --git a/newsfragments/4137.feature.rst b/newsfragments/4137.feature.rst
new file mode 100644
index 0000000000..89ca0cb758
--- /dev/null
+++ b/newsfragments/4137.feature.rst
@@ -0,0 +1 @@
+Support for loading distutils from the standard library is now deprecated, including use of SETUPTOOLS_USE_DISTUTILS=stdlib and importing distutils before importing setuptools.
\ No newline at end of file

From 9d0f07508d5559c1ad00d18aac76bc8c6793e0e1 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Thu, 23 May 2024 11:30:15 -0400
Subject: [PATCH 0717/1761] Merge "Relax path related params" from typeshed

---
 pkg_resources/__init__.py             | 44 ++++++++++++++++++---------
 setuptools/build_meta.py              | 34 +++++++++++++--------
 setuptools/command/install_lib.py     |  7 +++--
 setuptools/command/install_scripts.py |  5 +--
 setuptools/config/expand.py           |  4 ++-
 5 files changed, 61 insertions(+), 33 deletions(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index c86d9f095c..cf6798a270 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -36,7 +36,6 @@
     MutableSequence,
     NamedTuple,
     NoReturn,
-    Sequence,
     Set,
     Tuple,
     Type,
@@ -49,6 +48,7 @@
     Iterable,
     Optional,
     TypeVar,
+    overload,
 )
 import zipfile
 import zipimport
@@ -99,7 +99,7 @@
 from pkg_resources.extern.platformdirs import user_cache_dir as _user_cache_dir
 
 if TYPE_CHECKING:
-    from _typeshed import StrPath
+    from _typeshed import StrPath, StrOrBytesPath, BytesPath
 
 warnings.warn(
     "pkg_resources is deprecated as an API. "
@@ -1041,7 +1041,7 @@ class Environment:
 
     def __init__(
         self,
-        search_path: Optional[Sequence[str]] = None,
+        search_path: Optional[Iterable[str]] = None,
         platform: Optional[str] = get_supported_platform(),
         python: Optional[str] = PY_MAJOR,
     ):
@@ -1084,7 +1084,7 @@ def remove(self, dist: "Distribution"):
         """Remove `dist` from the environment"""
         self._distmap[dist.key].remove(dist)
 
-    def scan(self, search_path: Optional[Sequence[str]] = None):
+    def scan(self, search_path: Optional[Iterable[str]] = None):
         """Scan `search_path` for distributions usable in this environment
 
         Any distributions found are added to the environment.
@@ -1288,7 +1288,7 @@ def extraction_error(self) -> NoReturn:
         err.original_error = old_exc
         raise err
 
-    def get_cache_path(self, archive_name: str, names: Iterable[str] = ()):
+    def get_cache_path(self, archive_name: str, names: Iterable["StrPath"] = ()):
         """Return absolute location in cache for `archive_name` and `names`
 
         The parent directory of the resulting path will be created if it does
@@ -1340,7 +1340,7 @@ def _warn_unsafe_extraction_path(path):
             ).format(**locals())
             warnings.warn(msg, UserWarning)
 
-    def postprocess(self, tempname: str, filename: str):
+    def postprocess(self, tempname: "StrOrBytesPath", filename: "StrOrBytesPath"):
         """Perform any platform-specific postprocessing of `tempname`
 
         This is where Mac header rewrites should be done; other platforms don't
@@ -2465,12 +2465,16 @@ def null_ns_handler(
 register_namespace_handler(object, null_ns_handler)
 
 
-def normalize_path(filename: "StrPath"):
+@overload
+def normalize_path(filename: "StrPath") -> str: ...
+@overload
+def normalize_path(filename: "BytesPath") -> bytes: ...
+def normalize_path(filename: "StrOrBytesPath"):
     """Normalize a file/dir name for comparison purposes"""
     return os.path.normcase(os.path.realpath(os.path.normpath(_cygwin_patch(filename))))
 
 
-def _cygwin_patch(filename: "StrPath"):  # pragma: nocover
+def _cygwin_patch(filename: "StrOrBytesPath"):  # pragma: nocover
     """
     Contrary to POSIX 2008, on Cygwin, getcwd (3) contains
     symlink components. Using
@@ -2481,9 +2485,19 @@ def _cygwin_patch(filename: "StrPath"):  # pragma: nocover
     return os.path.abspath(filename) if sys.platform == 'cygwin' else filename
 
 
-@functools.lru_cache(maxsize=None)
-def _normalize_cached(filename):
-    return normalize_path(filename)
+if TYPE_CHECKING:
+    # https://github.com/python/mypy/issues/16261
+    # https://github.com/python/typeshed/issues/6347
+    @overload
+    def _normalize_cached(filename: "StrPath") -> str: ...
+    @overload
+    def _normalize_cached(filename: "BytesPath") -> bytes: ...
+    def _normalize_cached(filename: "StrOrBytesPath") -> Union[str, bytes]: ...
+else:
+
+    @functools.lru_cache(maxsize=None)
+    def _normalize_cached(filename):
+        return normalize_path(filename)
 
 
 def _is_egg_path(path):
@@ -2680,7 +2694,7 @@ def parse_map(
             _data = data.items()
         else:
             _data = split_sections(data)
-        maps: Dict[str, Dict[str, "EntryPoint"]] = {}
+        maps: Dict[str, Dict[str, EntryPoint]] = {}
         for group, lines in _data:
             if group is None:
                 if not lines:
@@ -2736,7 +2750,7 @@ def __init__(
     def from_location(
         cls,
         location: str,
-        basename: str,
+        basename: "StrPath",
         metadata: _MetadataType = None,
         **kw: int,  # We could set `precedence` explicitly, but keeping this as `**kw` for full backwards and subclassing compatibility
     ):
@@ -2996,7 +3010,7 @@ def __dir__(self):
     @classmethod
     def from_filename(
         cls,
-        filename: str,
+        filename: "StrPath",
         metadata: _MetadataType = None,
         **kw: int,  # We could set `precedence` explicitly, but keeping this as `**kw` for full backwards and subclassing compatibility
     ):
@@ -3332,7 +3346,7 @@ def _find_adapter(registry: Mapping[type, _AdapterType], ob: object):
     raise TypeError(f"Could not find adapter for {registry} and {ob}")
 
 
-def ensure_directory(path: str):
+def ensure_directory(path: "StrOrBytesPath"):
     """Ensure that the parent directory of `path` exists"""
     dirname = os.path.dirname(path)
     os.makedirs(dirname, exist_ok=True)
diff --git a/setuptools/build_meta.py b/setuptools/build_meta.py
index 5799c06ed6..419403b8e6 100644
--- a/setuptools/build_meta.py
+++ b/setuptools/build_meta.py
@@ -36,12 +36,12 @@
 import tempfile
 import warnings
 from pathlib import Path
-from typing import Dict, Iterator, List, Optional, Union
+from typing import Dict, Iterator, List, Optional, Tuple, Union, Iterable
 
 import setuptools
 import distutils
 from . import errors
-from ._path import same_path
+from ._path import same_path, StrPath
 from ._reqs import parse_strings
 from .warnings import SetuptoolsDeprecationWarning
 from distutils.util import strtobool
@@ -113,7 +113,7 @@ def _get_immediate_subdirectories(a_dir):
     ]
 
 
-def _file_with_extension(directory, extension):
+def _file_with_extension(directory: StrPath, extension: Union[str, Tuple[str, ...]]):
     matching = (f for f in os.listdir(directory) if f.endswith(extension))
     try:
         (file,) = matching
@@ -370,11 +370,11 @@ def prepare_metadata_for_build_wheel(
 
     def _build_with_temp_dir(
         self,
-        setup_command,
-        result_extension,
-        result_directory,
-        config_settings,
-        arbitrary_args=(),
+        setup_command: Iterable[str],
+        result_extension: Union[str, Tuple[str, ...]],
+        result_directory: StrPath,
+        config_settings: _ConfigSettings,
+        arbitrary_args: Iterable[str] = (),
     ):
         result_directory = os.path.abspath(result_directory)
 
@@ -404,7 +404,10 @@ def _build_with_temp_dir(
         return result_basename
 
     def build_wheel(
-        self, wheel_directory, config_settings=None, metadata_directory=None
+        self,
+        wheel_directory: StrPath,
+        config_settings: _ConfigSettings = None,
+        metadata_directory: Optional[StrPath] = None,
     ):
         with suppress_known_deprecation():
             return self._build_with_temp_dir(
@@ -415,12 +418,16 @@ def build_wheel(
                 self._arbitrary_args(config_settings),
             )
 
-    def build_sdist(self, sdist_directory, config_settings=None):
+    def build_sdist(
+        self, sdist_directory: StrPath, config_settings: _ConfigSettings = None
+    ):
         return self._build_with_temp_dir(
             ['sdist', '--formats', 'gztar'], '.tar.gz', sdist_directory, config_settings
         )
 
-    def _get_dist_info_dir(self, metadata_directory: Optional[str]) -> Optional[str]:
+    def _get_dist_info_dir(
+        self, metadata_directory: Optional[StrPath]
+    ) -> Optional[str]:
         if not metadata_directory:
             return None
         dist_info_candidates = list(Path(metadata_directory).glob("*.dist-info"))
@@ -433,7 +440,10 @@ def _get_dist_info_dir(self, metadata_directory: Optional[str]) -> Optional[str]
         # get_requires_for_build_editable
         # prepare_metadata_for_build_editable
         def build_editable(
-            self, wheel_directory, config_settings=None, metadata_directory=None
+            self,
+            wheel_directory: StrPath,
+            config_settings: _ConfigSettings = None,
+            metadata_directory: Optional[str] = None,
         ):
             # XXX can or should we hide our editable_wheel command normally?
             info_dir = self._get_dist_info_dir(metadata_directory)
diff --git a/setuptools/command/install_lib.py b/setuptools/command/install_lib.py
index 32ff65e783..9d76e429e8 100644
--- a/setuptools/command/install_lib.py
+++ b/setuptools/command/install_lib.py
@@ -2,6 +2,7 @@
 import sys
 from itertools import product, starmap
 import distutils.command.install_lib as orig
+from .._path import StrPath
 
 
 class install_lib(orig.install_lib):
@@ -85,8 +86,8 @@ def _gen_exclusion_paths():
 
     def copy_tree(
         self,
-        infile,
-        outfile,
+        infile: StrPath,
+        outfile: str,
         preserve_mode=1,
         preserve_times=1,
         preserve_symlinks=0,
@@ -96,7 +97,7 @@ def copy_tree(
         exclude = self.get_exclusions()
 
         if not exclude:
-            return orig.install_lib.copy_tree(self, infile, outfile)
+            return orig.install_lib.copy_tree(self, infile, outfile)  # type: ignore[arg-type] # Fixed upstream
 
         # Exclude namespace package __init__.py* files from the output
 
diff --git a/setuptools/command/install_scripts.py b/setuptools/command/install_scripts.py
index d79a4ab7b0..e9b0a0fe02 100644
--- a/setuptools/command/install_scripts.py
+++ b/setuptools/command/install_scripts.py
@@ -2,6 +2,7 @@
 import distutils.command.install_scripts as orig
 import os
 import sys
+from typing import List
 
 from .._path import ensure_directory
 
@@ -13,12 +14,12 @@ def initialize_options(self):
         orig.install_scripts.initialize_options(self)
         self.no_ep = False
 
-    def run(self):
+    def run(self) -> None:
         self.run_command("egg_info")
         if self.distribution.scripts:
             orig.install_scripts.run(self)  # run first to set up self.outfiles
         else:
-            self.outfiles = []
+            self.outfiles: List[str] = []
         if self.no_ep:
             # don't install entry point scripts into .egg file!
             return
diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py
index 0d8d58add8..3dc156a8d7 100644
--- a/setuptools/config/expand.py
+++ b/setuptools/config/expand.py
@@ -119,7 +119,9 @@ def glob_relative(
     return expanded_values
 
 
-def read_files(filepaths: Union[str, bytes, Iterable[StrPath]], root_dir=None) -> str:
+def read_files(
+    filepaths: Union[StrPath, Iterable[StrPath]], root_dir: Optional[StrPath] = None
+) -> str:
     """Return the content of the files concatenated using ``\n`` as str
 
     This function is sandboxed and won't reach anything outside ``root_dir``

From 551364d3623b28305bbd7ce83f958b9f2526e42f Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Mon, 17 Jun 2024 15:00:10 +0200
Subject: [PATCH 0718/1761] Enforce ruff/flake8-implicit-str-concat rule ISC001

ISC001 Implicitly concatenated string literals on one line

Only fix actual bugs in this PR, at least until unrelated CI failures
are fixed.
---
 setuptools/command/bdist_egg.py        | 3 ++-
 setuptools/tests/test_find_packages.py | 3 ++-
 2 files changed, 4 insertions(+), 2 deletions(-)

diff --git a/setuptools/command/bdist_egg.py b/setuptools/command/bdist_egg.py
index adcb0a1ba1..f72294fe58 100644
--- a/setuptools/command/bdist_egg.py
+++ b/setuptools/command/bdist_egg.py
@@ -382,8 +382,9 @@ def scan_module(egg_dir, base, name, stubs):
         for bad in [
             'getsource',
             'getabsfile',
+            'getfile',
             'getsourcefile',
-            'getfile' 'getsourcelines',
+            'getsourcelines',
             'findsource',
             'getcomments',
             'getframeinfo',
diff --git a/setuptools/tests/test_find_packages.py b/setuptools/tests/test_find_packages.py
index f32cc2f58e..0f4e2bef89 100644
--- a/setuptools/tests/test_find_packages.py
+++ b/setuptools/tests/test_find_packages.py
@@ -180,7 +180,8 @@ class TestFlatLayoutPackageFinder:
             [
                 "pkg/__init__.py",
                 "examples/__init__.py",
-                "examples/file.py" "example/other_file.py",
+                "examples/file.py",
+                "example/other_file.py",
                 # Sub-packages should always be fine
                 "pkg/example/__init__.py",
                 "pkg/examples/__init__.py",

From 5acf3d6be14f913be320fb4452188fe83f6a5495 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 17 Jun 2024 14:58:35 +0100
Subject: [PATCH 0719/1761] Add news fragment

---
 newsfragments/4411.bugfix.rst | 1 +
 1 file changed, 1 insertion(+)
 create mode 100644 newsfragments/4411.bugfix.rst

diff --git a/newsfragments/4411.bugfix.rst b/newsfragments/4411.bugfix.rst
new file mode 100644
index 0000000000..e306f3ef0a
--- /dev/null
+++ b/newsfragments/4411.bugfix.rst
@@ -0,0 +1 @@
+Fix accidental implicit string concatenation.

From 40f7d62827bf94871c95c46fc0518fee0a452e6a Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 17 Jun 2024 15:14:22 +0100
Subject: [PATCH 0720/1761] Avoid unstable ruff configuration in pre-commit

---
 .pre-commit-config.yaml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 5a4a7e9166..ffcfca062f 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,6 +1,6 @@
 repos:
 - repo: https://github.com/astral-sh/ruff-pre-commit
-  rev: v0.1.8
+  rev: v0.4.9
   hooks:
   - id: ruff
   - id: ruff-format

From a001ec89d0287cbfb911e25d23d37ec92f64e7bb Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Mon, 17 Jun 2024 13:01:46 -0400
Subject: [PATCH 0721/1761] Enforce modern annotations syntax (#4368)

---
 pkg_resources/__init__.py                     | 221 +++++++++---------
 pkg_resources/extern/__init__.py              |   9 +-
 pkg_resources/tests/test_pkg_resources.py     |   5 +-
 pyproject.toml                                |   2 +-
 ruff.toml                                     |  23 +-
 setuptools/_core_metadata.py                  |  11 +-
 setuptools/build_meta.py                      |  18 +-
 setuptools/command/_requirestxt.py            |   8 +-
 setuptools/command/build.py                   |  10 +-
 setuptools/command/build_ext.py               |  14 +-
 setuptools/command/build_py.py                |  14 +-
 setuptools/command/easy_install.py            |   7 +-
 setuptools/command/editable_wheel.py          |  56 +++--
 setuptools/command/install_scripts.py         |   5 +-
 setuptools/command/rotate.py                  |   5 +-
 setuptools/config/_apply_pyprojecttoml.py     |  52 ++---
 setuptools/config/expand.py                   |  58 +++--
 setuptools/config/pyprojecttoml.py            |  48 ++--
 setuptools/config/setupcfg.py                 |  33 ++-
 setuptools/discovery.py                       |  34 ++-
 setuptools/dist.py                            |  20 +-
 setuptools/monkey.py                          |   6 +-
 setuptools/msvc.py                            |   6 +-
 setuptools/sandbox.py                         |   5 +-
 .../tests/config/test_apply_pyprojecttoml.py  |   5 +-
 setuptools/tests/test_egg_info.py             |   5 +-
 setuptools/tests/test_manifest.py             |   5 +-
 setuptools/warnings.py                        |  20 +-
 tools/generate_validation_code.py             |   5 +-
 29 files changed, 363 insertions(+), 347 deletions(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index 5783a0fa05..45a18bf73c 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -20,9 +20,11 @@
 :mod:`importlib.metadata` and :pypi:`packaging` instead.
 """
 
+from __future__ import annotations
+
 import sys
 
-if sys.version_info < (3, 8):
+if sys.version_info < (3, 8):  # noqa: UP036 # Check for unsupported versions
     raise RuntimeError("Python 3.8 or later is required")
 
 import os
@@ -36,18 +38,15 @@
     MutableSequence,
     NamedTuple,
     NoReturn,
-    Set,
     Tuple,
-    Type,
     Union,
     TYPE_CHECKING,
-    List,
     Protocol,
     Callable,
-    Dict,
     Iterable,
-    Optional,
     TypeVar,
+    Optional,
+    Dict,
     overload,
 )
 import zipfile
@@ -144,7 +143,7 @@ class PEP440Warning(RuntimeWarning):
 parse_version = _packaging_version.Version
 
 
-_state_vars: Dict[str, str] = {}
+_state_vars: dict[str, str] = {}
 
 
 def _declare_state(vartype: str, varname: str, initial_value: _T) -> _T:
@@ -325,7 +324,7 @@ def req(self):
     def report(self):
         return self._template.format(**locals())
 
-    def with_context(self, required_by: Set[Union["Distribution", str]]):
+    def with_context(self, required_by: set[Distribution | str]):
         """
         If required_by is non-empty, return a version of self that is a
         ContextualVersionConflict.
@@ -382,7 +381,7 @@ class UnknownExtra(ResolutionError):
     """Distribution doesn't have an "extra feature" of the given name"""
 
 
-_provider_factories: Dict[Type[_ModuleLike], _ProviderFactoryType] = {}
+_provider_factories: dict[type[_ModuleLike], _ProviderFactoryType] = {}
 
 PY_MAJOR = '{}.{}'.format(*sys.version_info)
 EGG_DIST = 3
@@ -393,7 +392,7 @@ class UnknownExtra(ResolutionError):
 
 
 def register_loader_type(
-    loader_type: Type[_ModuleLike], provider_factory: _ProviderFactoryType
+    loader_type: type[_ModuleLike], provider_factory: _ProviderFactoryType
 ):
     """Register `provider_factory` to make providers for `loader_type`
 
@@ -404,7 +403,7 @@ def register_loader_type(
     _provider_factories[loader_type] = provider_factory
 
 
-def get_provider(moduleOrReq: Union[str, "Requirement"]):
+def get_provider(moduleOrReq: str | Requirement):
     """Return an IResourceProvider for the named module or requirement"""
     if isinstance(moduleOrReq, Requirement):
         return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
@@ -466,7 +465,7 @@ def get_build_platform():
 get_platform = get_build_platform
 
 
-def compatible_platforms(provided: Optional[str], required: Optional[str]):
+def compatible_platforms(provided: str | None, required: str | None):
     """Can code for the `provided` platform run on the `required` platform?
 
     Returns true if either platform is ``None``, or the platforms are equal.
@@ -531,7 +530,7 @@ def load_entry_point(dist: _EPDistType, group: str, name: str):
     return get_distribution(dist).load_entry_point(group, name)
 
 
-def get_entry_map(dist: _EPDistType, group: Optional[str] = None):
+def get_entry_map(dist: _EPDistType, group: str | None = None):
     """Return the entry point map for `group`, or the full entry map"""
     return get_distribution(dist).get_entry_map(group)
 
@@ -560,25 +559,25 @@ def metadata_isdir(self, name: str) -> bool:
     def metadata_listdir(self, name: str):
         """List of metadata names in the directory (like ``os.listdir()``)"""
 
-    def run_script(self, script_name: str, namespace: Dict[str, Any]):
+    def run_script(self, script_name: str, namespace: dict[str, Any]):
         """Execute the named script in the supplied namespace dictionary"""
 
 
 class IResourceProvider(IMetadataProvider, Protocol):
     """An object that provides access to package resources"""
 
-    def get_resource_filename(self, manager: "ResourceManager", resource_name: str):
+    def get_resource_filename(self, manager: ResourceManager, resource_name: str):
         """Return a true filesystem path for `resource_name`
 
         `manager` must be a ``ResourceManager``"""
 
-    def get_resource_stream(self, manager: "ResourceManager", resource_name: str):
+    def get_resource_stream(self, manager: ResourceManager, resource_name: str):
         """Return a readable file-like object for `resource_name`
 
         `manager` must be a ``ResourceManager``"""
 
     def get_resource_string(
-        self, manager: "ResourceManager", resource_name: str
+        self, manager: ResourceManager, resource_name: str
     ) -> bytes:
         """Return the contents of `resource_name` as :obj:`bytes`
 
@@ -597,9 +596,9 @@ def resource_listdir(self, resource_name: str):
 class WorkingSet:
     """A collection of active distributions on sys.path (or a similar list)"""
 
-    def __init__(self, entries: Optional[Iterable[str]] = None):
+    def __init__(self, entries: Iterable[str] | None = None):
         """Create working set from list of path entries (default=sys.path)"""
-        self.entries: List[str] = []
+        self.entries: list[str] = []
         self.entry_keys = {}
         self.by_key = {}
         self.normalized_to_canonical_keys = {}
@@ -668,11 +667,11 @@ def add_entry(self, entry: str):
         for dist in find_distributions(entry, True):
             self.add(dist, entry, False)
 
-    def __contains__(self, dist: "Distribution"):
+    def __contains__(self, dist: Distribution):
         """True if `dist` is the active distribution for its project"""
         return self.by_key.get(dist.key) == dist
 
-    def find(self, req: "Requirement"):
+    def find(self, req: Requirement):
         """Find a distribution matching requirement `req`
 
         If there is an active distribution for the requested project, this
@@ -696,7 +695,7 @@ def find(self, req: "Requirement"):
             raise VersionConflict(dist, req)
         return dist
 
-    def iter_entry_points(self, group: str, name: Optional[str] = None):
+    def iter_entry_points(self, group: str, name: str | None = None):
         """Yield entry point objects from `group` matching `name`
 
         If `name` is None, yields all entry points in `group` from all
@@ -737,8 +736,8 @@ def __iter__(self):
 
     def add(
         self,
-        dist: "Distribution",
-        entry: Optional[str] = None,
+        dist: Distribution,
+        entry: str | None = None,
         insert: bool = True,
         replace: bool = False,
     ):
@@ -775,11 +774,11 @@ def add(
 
     def resolve(
         self,
-        requirements: Iterable["Requirement"],
-        env: Optional["Environment"] = None,
-        installer: Optional[_InstallerType] = None,
+        requirements: Iterable[Requirement],
+        env: Environment | None = None,
+        installer: _InstallerType | None = None,
         replace_conflicting: bool = False,
-        extras: Optional[Tuple[str, ...]] = None,
+        extras: tuple[str, ...] | None = None,
     ):
         """List all distributions needed to (recursively) meet `requirements`
 
@@ -849,7 +848,7 @@ def resolve(
 
     def _resolve_dist(
         self, req, best, replace_conflicting, env, installer, required_by, to_activate
-    ) -> "Distribution":
+    ) -> Distribution:
         dist = best.get(req.key)
         if dist is None:
             # Find the best distribution and add it to the map
@@ -880,9 +879,9 @@ def _resolve_dist(
 
     def find_plugins(
         self,
-        plugin_env: "Environment",
-        full_env: Optional["Environment"] = None,
-        installer: Optional[_InstallerType] = None,
+        plugin_env: Environment,
+        full_env: Environment | None = None,
+        installer: _InstallerType | None = None,
         fallback: bool = True,
     ):
         """Find all activatable distributions in `plugin_env`
@@ -982,7 +981,7 @@ def require(self, *requirements: _NestedStr):
         return needed
 
     def subscribe(
-        self, callback: Callable[["Distribution"], object], existing: bool = True
+        self, callback: Callable[[Distribution], object], existing: bool = True
     ):
         """Invoke `callback` for all distributions
 
@@ -1024,9 +1023,7 @@ class _ReqExtras(Dict["Requirement", Tuple[str, ...]]):
     Map each requirement to the extras that demanded it.
     """
 
-    def markers_pass(
-        self, req: "Requirement", extras: Optional[Tuple[str, ...]] = None
-    ):
+    def markers_pass(self, req: Requirement, extras: tuple[str, ...] | None = None):
         """
         Evaluate markers for req against each extra that
         demanded it.
@@ -1046,9 +1043,9 @@ class Environment:
 
     def __init__(
         self,
-        search_path: Optional[Iterable[str]] = None,
-        platform: Optional[str] = get_supported_platform(),
-        python: Optional[str] = PY_MAJOR,
+        search_path: Iterable[str] | None = None,
+        platform: str | None = get_supported_platform(),
+        python: str | None = PY_MAJOR,
     ):
         """Snapshot distributions available on a search path
 
@@ -1071,7 +1068,7 @@ def __init__(
         self.python = python
         self.scan(search_path)
 
-    def can_add(self, dist: "Distribution"):
+    def can_add(self, dist: Distribution):
         """Is distribution `dist` acceptable for this environment?
 
         The distribution must match the platform and python version
@@ -1085,11 +1082,11 @@ def can_add(self, dist: "Distribution"):
         )
         return py_compat and compatible_platforms(dist.platform, self.platform)
 
-    def remove(self, dist: "Distribution"):
+    def remove(self, dist: Distribution):
         """Remove `dist` from the environment"""
         self._distmap[dist.key].remove(dist)
 
-    def scan(self, search_path: Optional[Iterable[str]] = None):
+    def scan(self, search_path: Iterable[str] | None = None):
         """Scan `search_path` for distributions usable in this environment
 
         Any distributions found are added to the environment.
@@ -1115,7 +1112,7 @@ def __getitem__(self, project_name: str):
         distribution_key = project_name.lower()
         return self._distmap.get(distribution_key, [])
 
-    def add(self, dist: "Distribution"):
+    def add(self, dist: Distribution):
         """Add `dist` if we ``can_add()`` it and it has not already been added"""
         if self.can_add(dist) and dist.has_version():
             dists = self._distmap.setdefault(dist.key, [])
@@ -1125,9 +1122,9 @@ def add(self, dist: "Distribution"):
 
     def best_match(
         self,
-        req: "Requirement",
+        req: Requirement,
         working_set: WorkingSet,
-        installer: Optional[Callable[["Requirement"], Any]] = None,
+        installer: Callable[[Requirement], Any] | None = None,
         replace_conflicting: bool = False,
     ):
         """Find distribution best matching `req` and usable on `working_set`
@@ -1158,8 +1155,8 @@ def best_match(
 
     def obtain(
         self,
-        requirement: "Requirement",
-        installer: Optional[Callable[["Requirement"], Any]] = None,
+        requirement: Requirement,
+        installer: Callable[[Requirement], Any] | None = None,
     ):
         """Obtain a distribution matching `requirement` (e.g. via download)
 
@@ -1177,7 +1174,7 @@ def __iter__(self):
             if self[key]:
                 yield key
 
-    def __iadd__(self, other: Union["Distribution", "Environment"]):
+    def __iadd__(self, other: Distribution | Environment):
         """In-place addition of a distribution or environment"""
         if isinstance(other, Distribution):
             self.add(other)
@@ -1189,7 +1186,7 @@ def __iadd__(self, other: Union["Distribution", "Environment"]):
             raise TypeError("Can't add %r to environment" % (other,))
         return self
 
-    def __add__(self, other: Union["Distribution", "Environment"]):
+    def __add__(self, other: Distribution | Environment):
         """Add an environment or distribution to an environment"""
         new = self.__class__([], platform=None, python=None)
         for env in self, other:
@@ -1216,15 +1213,15 @@ class ExtractionError(RuntimeError):
         The exception instance that caused extraction to fail
     """
 
-    manager: "ResourceManager"
+    manager: ResourceManager
     cache_path: str
-    original_error: Optional[BaseException]
+    original_error: BaseException | None
 
 
 class ResourceManager:
     """Manage resource extraction and packages"""
 
-    extraction_path: Optional[str] = None
+    extraction_path: str | None = None
 
     def __init__(self):
         self.cached_files = {}
@@ -1293,7 +1290,7 @@ def extraction_error(self) -> NoReturn:
         err.original_error = old_exc
         raise err
 
-    def get_cache_path(self, archive_name: str, names: Iterable["StrPath"] = ()):
+    def get_cache_path(self, archive_name: str, names: Iterable[StrPath] = ()):
         """Return absolute location in cache for `archive_name` and `names`
 
         The parent directory of the resulting path will be created if it does
@@ -1345,7 +1342,7 @@ def _warn_unsafe_extraction_path(path):
             ).format(**locals())
             warnings.warn(msg, UserWarning)
 
-    def postprocess(self, tempname: "StrOrBytesPath", filename: "StrOrBytesPath"):
+    def postprocess(self, tempname: StrOrBytesPath, filename: StrOrBytesPath):
         """Perform any platform-specific postprocessing of `tempname`
 
         This is where Mac header rewrites should be done; other platforms don't
@@ -1389,7 +1386,7 @@ def set_extraction_path(self, path: str):
 
         self.extraction_path = path
 
-    def cleanup_resources(self, force: bool = False) -> List[str]:
+    def cleanup_resources(self, force: bool = False) -> list[str]:
         """
         Delete all extracted resource files and directories, returning a list
         of the file and directory names that could not be successfully removed.
@@ -1496,7 +1493,7 @@ def invalid_marker(text: str):
     return False
 
 
-def evaluate_marker(text: str, extra: Optional[str] = None):
+def evaluate_marker(text: str, extra: str | None = None):
     """
     Evaluate a PEP 508 environment marker.
     Return a boolean indicating the marker result in this environment.
@@ -1514,10 +1511,10 @@ def evaluate_marker(text: str, extra: Optional[str] = None):
 class NullProvider:
     """Try to implement resources and metadata for arbitrary PEP 302 loaders"""
 
-    egg_name: Optional[str] = None
-    egg_info: Optional[str] = None
-    loader: Optional[_LoaderProtocol] = None
-    module_path: Optional[str]  # Some subclasses can have a None module_path
+    egg_name: str | None = None
+    egg_info: str | None = None
+    loader: _LoaderProtocol | None = None
+    module_path: str | None  # Some subclasses can have a None module_path
 
     def __init__(self, module: _ModuleLike):
         self.loader = getattr(module, '__loader__', None)
@@ -1577,7 +1574,7 @@ def metadata_listdir(self, name: str):
             return self._listdir(self._fn(self.egg_info, name))
         return []
 
-    def run_script(self, script_name: str, namespace: Dict[str, Any]):
+    def run_script(self, script_name: str, namespace: dict[str, Any]):
         script = 'scripts/' + script_name
         if not self.has_metadata(script):
             raise ResolutionError(
@@ -1831,7 +1828,7 @@ class MemoizedZipManifests(ZipManifests):
     """
 
     class manifest_mod(NamedTuple):
-        manifest: Dict[str, zipfile.ZipInfo]
+        manifest: dict[str, zipfile.ZipInfo]
         mtime: float
 
     def load(self, path: str):  # type: ignore[override] # ZipManifests.load is a classmethod
@@ -1851,7 +1848,7 @@ def load(self, path: str):  # type: ignore[override] # ZipManifests.load is a cl
 class ZipProvider(EggProvider):
     """Resource support for zips and eggs"""
 
-    eagers: Optional[List[str]] = None
+    eagers: list[str] | None = None
     _zip_manifests = MemoizedZipManifests()
     # ZipProvider's loader should always be a zipimporter or equivalent
     loader: zipimport.zipimporter
@@ -2033,7 +2030,7 @@ class FileMetadata(EmptyProvider):
     the provided location.
     """
 
-    def __init__(self, path: "StrPath"):
+    def __init__(self, path: StrPath):
         self.path = path
 
     def _get_metadata_path(self, name):
@@ -2102,12 +2099,12 @@ def __init__(self, importer: zipimport.zipimporter):
         self._setup_prefix()
 
 
-_distribution_finders: Dict[type, _DistFinderType[Any]] = _declare_state(
+_distribution_finders: dict[type, _DistFinderType[Any]] = _declare_state(
     'dict', '_distribution_finders', {}
 )
 
 
-def register_finder(importer_type: Type[_T], distribution_finder: _DistFinderType[_T]):
+def register_finder(importer_type: type[_T], distribution_finder: _DistFinderType[_T]):
     """Register `distribution_finder` to find distributions in sys.path items
 
     `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
@@ -2156,7 +2153,7 @@ def find_eggs_in_zip(
 
 
 def find_nothing(
-    importer: Optional[object], path_item: Optional[str], only: Optional[bool] = False
+    importer: object | None, path_item: str | None, only: bool | None = False
 ):
     return ()
 
@@ -2164,7 +2161,7 @@ def find_nothing(
 register_finder(object, find_nothing)
 
 
-def find_on_path(importer: Optional[object], path_item, only=False):
+def find_on_path(importer: object | None, path_item, only=False):
     """Yield distributions accessible on a sys.path directory"""
     path_item = _normalize_cached(path_item)
 
@@ -2281,16 +2278,16 @@ def resolve_egg_link(path):
 
 register_finder(importlib.machinery.FileFinder, find_on_path)
 
-_namespace_handlers: Dict[type, _NSHandlerType[Any]] = _declare_state(
+_namespace_handlers: dict[type, _NSHandlerType[Any]] = _declare_state(
     'dict', '_namespace_handlers', {}
 )
-_namespace_packages: Dict[Optional[str], List[str]] = _declare_state(
+_namespace_packages: dict[str | None, list[str]] = _declare_state(
     'dict', '_namespace_packages', {}
 )
 
 
 def register_namespace_handler(
-    importer_type: Type[_T], namespace_handler: _NSHandlerType[_T]
+    importer_type: type[_T], namespace_handler: _NSHandlerType[_T]
 ):
     """Register `namespace_handler` to declare namespace packages
 
@@ -2423,7 +2420,7 @@ def declare_namespace(packageName: str):
         _imp.release_lock()
 
 
-def fixup_namespace_packages(path_item: str, parent: Optional[str] = None):
+def fixup_namespace_packages(path_item: str, parent: str | None = None):
     """Ensure that previously-declared namespace packages include path_item"""
     _imp.acquire_lock()
     try:
@@ -2437,7 +2434,7 @@ def fixup_namespace_packages(path_item: str, parent: Optional[str] = None):
 
 def file_ns_handler(
     importer: object,
-    path_item: "StrPath",
+    path_item: StrPath,
     packageName: str,
     module: types.ModuleType,
 ):
@@ -2462,9 +2459,9 @@ def file_ns_handler(
 
 def null_ns_handler(
     importer: object,
-    path_item: Optional[str],
-    packageName: Optional[str],
-    module: Optional[_ModuleLike],
+    path_item: str | None,
+    packageName: str | None,
+    module: _ModuleLike | None,
 ):
     return None
 
@@ -2473,15 +2470,15 @@ def null_ns_handler(
 
 
 @overload
-def normalize_path(filename: "StrPath") -> str: ...
+def normalize_path(filename: StrPath) -> str: ...
 @overload
-def normalize_path(filename: "BytesPath") -> bytes: ...
-def normalize_path(filename: "StrOrBytesPath"):
+def normalize_path(filename: BytesPath) -> bytes: ...
+def normalize_path(filename: StrOrBytesPath):
     """Normalize a file/dir name for comparison purposes"""
     return os.path.normcase(os.path.realpath(os.path.normpath(_cygwin_patch(filename))))
 
 
-def _cygwin_patch(filename: "StrOrBytesPath"):  # pragma: nocover
+def _cygwin_patch(filename: StrOrBytesPath):  # pragma: nocover
     """
     Contrary to POSIX 2008, on Cygwin, getcwd (3) contains
     symlink components. Using
@@ -2496,10 +2493,10 @@ def _cygwin_patch(filename: "StrOrBytesPath"):  # pragma: nocover
     # https://github.com/python/mypy/issues/16261
     # https://github.com/python/typeshed/issues/6347
     @overload
-    def _normalize_cached(filename: "StrPath") -> str: ...
+    def _normalize_cached(filename: StrPath) -> str: ...
     @overload
-    def _normalize_cached(filename: "BytesPath") -> bytes: ...
-    def _normalize_cached(filename: "StrOrBytesPath") -> Union[str, bytes]: ...
+    def _normalize_cached(filename: BytesPath) -> bytes: ...
+    def _normalize_cached(filename: StrOrBytesPath) -> str | bytes: ...
 else:
 
     @functools.lru_cache(maxsize=None)
@@ -2563,7 +2560,7 @@ def __init__(
         module_name: str,
         attrs: Iterable[str] = (),
         extras: Iterable[str] = (),
-        dist: Optional["Distribution"] = None,
+        dist: Distribution | None = None,
     ):
         if not MODULE(module_name):
             raise ValueError("Invalid module name", module_name)
@@ -2587,8 +2584,8 @@ def __repr__(self):
     def load(
         self,
         require: bool = True,
-        *args: Optional[Union[Environment, _InstallerType]],
-        **kwargs: Optional[Union[Environment, _InstallerType]],
+        *args: Environment | _InstallerType | None,
+        **kwargs: Environment | _InstallerType | None,
     ):
         """
         Require packages for this EntryPoint, then resolve it.
@@ -2618,8 +2615,8 @@ def resolve(self):
 
     def require(
         self,
-        env: Optional[Environment] = None,
-        installer: Optional[_InstallerType] = None,
+        env: Environment | None = None,
+        installer: _InstallerType | None = None,
     ):
         if not self.dist:
             error_cls = UnknownExtra if self.extras else AttributeError
@@ -2644,7 +2641,7 @@ def require(
     )
 
     @classmethod
-    def parse(cls, src: str, dist: Optional["Distribution"] = None):
+    def parse(cls, src: str, dist: Distribution | None = None):
         """Parse a single entry point from string `src`
 
         Entry point syntax follows the form::
@@ -2677,7 +2674,7 @@ def parse_group(
         cls,
         group: str,
         lines: _NestedStr,
-        dist: Optional["Distribution"] = None,
+        dist: Distribution | None = None,
     ):
         """Parse an entry point group"""
         if not MODULE(group):
@@ -2693,15 +2690,15 @@ def parse_group(
     @classmethod
     def parse_map(
         cls,
-        data: Union[str, Iterable[str], Dict[str, Union[str, Iterable[str]]]],
-        dist: Optional["Distribution"] = None,
+        data: str | Iterable[str] | dict[str, str | Iterable[str]],
+        dist: Distribution | None = None,
     ):
         """Parse a map of entry point groups"""
         if isinstance(data, dict):
             _data = data.items()
         else:
             _data = split_sections(data)
-        maps: Dict[str, Dict[str, EntryPoint]] = {}
+        maps: dict[str, dict[str, EntryPoint]] = {}
         for group, lines in _data:
             if group is None:
                 if not lines:
@@ -2736,12 +2733,12 @@ class Distribution:
 
     def __init__(
         self,
-        location: Optional[str] = None,
+        location: str | None = None,
         metadata: _MetadataType = None,
-        project_name: Optional[str] = None,
-        version: Optional[str] = None,
-        py_version: Optional[str] = PY_MAJOR,
-        platform: Optional[str] = None,
+        project_name: str | None = None,
+        version: str | None = None,
+        py_version: str | None = PY_MAJOR,
+        platform: str | None = None,
         precedence: int = EGG_DIST,
     ):
         self.project_name = safe_name(project_name or 'Unknown')
@@ -2757,7 +2754,7 @@ def __init__(
     def from_location(
         cls,
         location: str,
-        basename: "StrPath",
+        basename: StrPath,
         metadata: _MetadataType = None,
         **kw: int,  # We could set `precedence` explicitly, but keeping this as `**kw` for full backwards and subclassing compatibility
     ):
@@ -2798,16 +2795,16 @@ def hashcmp(self):
     def __hash__(self):
         return hash(self.hashcmp)
 
-    def __lt__(self, other: "Distribution"):
+    def __lt__(self, other: Distribution):
         return self.hashcmp < other.hashcmp
 
-    def __le__(self, other: "Distribution"):
+    def __le__(self, other: Distribution):
         return self.hashcmp <= other.hashcmp
 
-    def __gt__(self, other: "Distribution"):
+    def __gt__(self, other: Distribution):
         return self.hashcmp > other.hashcmp
 
-    def __ge__(self, other: "Distribution"):
+    def __ge__(self, other: Distribution):
         return self.hashcmp >= other.hashcmp
 
     def __eq__(self, other: object):
@@ -2965,7 +2962,7 @@ def _get_version(self):
         lines = self._get_metadata(self.PKG_INFO)
         return _version_from_file(lines)
 
-    def activate(self, path: Optional[List[str]] = None, replace: bool = False):
+    def activate(self, path: list[str] | None = None, replace: bool = False):
         """Ensure distribution is importable on `path` (default=sys.path)"""
         if path is None:
             path = sys.path
@@ -3017,7 +3014,7 @@ def __dir__(self):
     @classmethod
     def from_filename(
         cls,
-        filename: "StrPath",
+        filename: StrPath,
         metadata: _MetadataType = None,
         **kw: int,  # We could set `precedence` explicitly, but keeping this as `**kw` for full backwards and subclassing compatibility
     ):
@@ -3041,7 +3038,7 @@ def load_entry_point(self, group: str, name: str):
             raise ImportError("Entry point %r not found" % ((group, name),))
         return ep.load()
 
-    def get_entry_map(self, group: Optional[str] = None):
+    def get_entry_map(self, group: str | None = None):
         """Return the entry point map for `group`, or the full entry map"""
         if not hasattr(self, "_ep_map"):
             self._ep_map = EntryPoint.parse_map(
@@ -3058,7 +3055,7 @@ def get_entry_info(self, group: str, name: str):
     # FIXME: 'Distribution.insert_on' is too complex (13)
     def insert_on(  # noqa: C901
         self,
-        path: List[str],
+        path: list[str],
         loc=None,
         replace: bool = False,
     ):
@@ -3166,7 +3163,7 @@ def has_version(self):
             return False
         return True
 
-    def clone(self, **kw: Optional[Union[str, int, IResourceProvider]]):
+    def clone(self, **kw: str | int | IResourceProvider | None):
         """Copy this distribution, substituting in any changed keyword args"""
         names = 'project_name version py_version platform location precedence'
         for attr in names.split():
@@ -3292,7 +3289,7 @@ def __init__(self, requirement_string: str):
         self.project_name, self.key = project_name, project_name.lower()
         self.specs = [(spec.operator, spec.version) for spec in self.specifier]
         # packaging.requirements.Requirement uses a set for its extras. We use a variable-length tuple
-        self.extras: Tuple[str] = tuple(map(safe_extra, self.extras))
+        self.extras: tuple[str] = tuple(map(safe_extra, self.extras))
         self.hashCmp = (
             self.key,
             self.url,
@@ -3308,7 +3305,7 @@ def __eq__(self, other: object):
     def __ne__(self, other):
         return not self == other
 
-    def __contains__(self, item: Union[Distribution, str, Tuple[str, ...]]):
+    def __contains__(self, item: Distribution | str | tuple[str, ...]):
         if isinstance(item, Distribution):
             if item.key != self.key:
                 return False
@@ -3327,7 +3324,7 @@ def __repr__(self):
         return "Requirement.parse(%r)" % str(self)
 
     @staticmethod
-    def parse(s: Union[str, Iterable[str]]):
+    def parse(s: str | Iterable[str]):
         (req,) = parse_requirements(s)
         return req
 
@@ -3353,7 +3350,7 @@ def _find_adapter(registry: Mapping[type, _AdapterT], ob: object) -> _AdapterT:
     raise TypeError(f"Could not find adapter for {registry} and {ob}")
 
 
-def ensure_directory(path: "StrOrBytesPath"):
+def ensure_directory(path: StrOrBytesPath):
     """Ensure that the parent directory of `path` exists"""
     dirname = os.path.dirname(path)
     os.makedirs(dirname, exist_ok=True)
diff --git a/pkg_resources/extern/__init__.py b/pkg_resources/extern/__init__.py
index a1b7490dfb..9b9ac10aa9 100644
--- a/pkg_resources/extern/__init__.py
+++ b/pkg_resources/extern/__init__.py
@@ -1,8 +1,9 @@
+from __future__ import annotations
 from importlib.machinery import ModuleSpec
 import importlib.util
 import sys
 from types import ModuleType
-from typing import Iterable, Optional, Sequence
+from typing import Iterable, Sequence
 
 
 class VendorImporter:
@@ -15,7 +16,7 @@ def __init__(
         self,
         root_name: str,
         vendored_names: Iterable[str] = (),
-        vendor_pkg: Optional[str] = None,
+        vendor_pkg: str | None = None,
     ):
         self.root_name = root_name
         self.vendored_names = set(vendored_names)
@@ -65,8 +66,8 @@ def exec_module(self, module: ModuleType):
     def find_spec(
         self,
         fullname: str,
-        path: Optional[Sequence[str]] = None,
-        target: Optional[ModuleType] = None,
+        path: Sequence[str] | None = None,
+        target: ModuleType | None = None,
     ):
         """Return a module spec for vendored names."""
         return (
diff --git a/pkg_resources/tests/test_pkg_resources.py b/pkg_resources/tests/test_pkg_resources.py
index 4724c82860..17e1ff0c2f 100644
--- a/pkg_resources/tests/test_pkg_resources.py
+++ b/pkg_resources/tests/test_pkg_resources.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import builtins
 import sys
 import tempfile
@@ -9,7 +11,6 @@
 import stat
 import distutils.dist
 import distutils.command.install_egg_info
-from typing import List
 
 from unittest import mock
 
@@ -33,7 +34,7 @@ def __call__(self):
 
 
 class TestZipProvider:
-    finalizers: List[EggRemover] = []
+    finalizers: list[EggRemover] = []
 
     ref_time = datetime.datetime(2013, 5, 12, 13, 25, 0)
     "A reference time for a file modification"
diff --git a/pyproject.toml b/pyproject.toml
index d444c13189..a7d1f3e99c 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -40,7 +40,7 @@ testing = [
 	"pytest-mypy",
 	"pytest-enabler >= 2.2",
 	# workaround for pypa/setuptools#3921
-	'pytest-ruff >= 0.2.1; sys_platform != "cygwin"',
+	'pytest-ruff >= 0.3.2; sys_platform != "cygwin"',
 
 	# local
 	"virtualenv>=13.0.0",
diff --git a/ruff.toml b/ruff.toml
index 6f620cb890..731e52e0e2 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -4,10 +4,18 @@ extend-select = [
 	"W",
 
 	# local
-	"UP",  # pyupgrade
-	"YTT",  # flake8-2020
+	"FA", # flake8-future-annotations
+	"F404", # late-future-import
+	"UP", # pyupgrade
+	"YTT", # flake8-2020
 ]
 ignore = [
+	"UP015", # redundant-open-modes, explicit is preferred
+	"UP030", # temporarily disabled
+	"UP031", # temporarily disabled
+	"UP032", # temporarily disabled
+	"UP038", # Using `X | Y` in `isinstance` call is slower and more verbose https://github.com/astral-sh/ruff/issues/7871
+
 	# https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules
 	"W191",
 	"E111",
@@ -24,19 +32,16 @@ ignore = [
 	"ISC001",
 	"ISC002",
 ]
-extend-ignore = [
-	"UP015",  # redundant-open-modes, explicit is preferred
-	"UP030",  # temporarily disabled
-	"UP031",  # temporarily disabled
-	"UP032",  # temporarily disabled
-	"UP036",  # temporarily disabled
-]
 exclude = [
 	"**/_vendor",
 	"setuptools/_distutils",
 	"setuptools/config/_validate_pyproject",
 ]
 
+[lint.per-file-ignores]
+# Auto-generated code
+"setuptools/config/_validate_pyproject/*" = ["FA100"]
+
 [format]
 exclude = [
 	"**/_vendor",
diff --git a/setuptools/_core_metadata.py b/setuptools/_core_metadata.py
index 9b4f38ded2..f1de9c9ba6 100644
--- a/setuptools/_core_metadata.py
+++ b/setuptools/_core_metadata.py
@@ -4,13 +4,14 @@
 See: https://packaging.python.org/en/latest/specifications/core-metadata/
 """
 
+from __future__ import annotations
+
 import os
 import stat
 import textwrap
 from email import message_from_file
 from email.message import Message
 from tempfile import NamedTemporaryFile
-from typing import Optional, List
 
 from distutils.util import rfc822_escape
 
@@ -38,7 +39,7 @@ def rfc822_unescape(content: str) -> str:
     return '\n'.join((lines[0].lstrip(), textwrap.dedent('\n'.join(lines[1:]))))
 
 
-def _read_field_from_msg(msg: Message, field: str) -> Optional[str]:
+def _read_field_from_msg(msg: Message, field: str) -> str | None:
     """Read Message header field."""
     value = msg[field]
     if value == 'UNKNOWN':
@@ -46,7 +47,7 @@ def _read_field_from_msg(msg: Message, field: str) -> Optional[str]:
     return value
 
 
-def _read_field_unescaped_from_msg(msg: Message, field: str) -> Optional[str]:
+def _read_field_unescaped_from_msg(msg: Message, field: str) -> str | None:
     """Read Message header field and apply rfc822_unescape."""
     value = _read_field_from_msg(msg, field)
     if value is None:
@@ -54,7 +55,7 @@ def _read_field_unescaped_from_msg(msg: Message, field: str) -> Optional[str]:
     return rfc822_unescape(value)
 
 
-def _read_list_from_msg(msg: Message, field: str) -> Optional[List[str]]:
+def _read_list_from_msg(msg: Message, field: str) -> list[str] | None:
     """Read Message header field and return all results as list."""
     values = msg.get_all(field, None)
     if values == []:
@@ -62,7 +63,7 @@ def _read_list_from_msg(msg: Message, field: str) -> Optional[List[str]]:
     return values
 
 
-def _read_payload_from_msg(msg: Message) -> Optional[str]:
+def _read_payload_from_msg(msg: Message) -> str | None:
     value = str(msg.get_payload()).strip()
     if value == 'UNKNOWN' or not value:
         return None
diff --git a/setuptools/build_meta.py b/setuptools/build_meta.py
index 419403b8e6..c52c872fd0 100644
--- a/setuptools/build_meta.py
+++ b/setuptools/build_meta.py
@@ -26,6 +26,8 @@
 Again, this is not a formal definition! Just a "taste" of the module.
 """
 
+from __future__ import annotations
+
 import io
 import os
 import shlex
@@ -36,7 +38,7 @@
 import tempfile
 import warnings
 from pathlib import Path
-from typing import Dict, Iterator, List, Optional, Tuple, Union, Iterable
+from typing import Dict, Iterator, List, Optional, Union, Iterable
 
 import setuptools
 import distutils
@@ -113,7 +115,7 @@ def _get_immediate_subdirectories(a_dir):
     ]
 
 
-def _file_with_extension(directory: StrPath, extension: Union[str, Tuple[str, ...]]):
+def _file_with_extension(directory: StrPath, extension: str | tuple[str, ...]):
     matching = (f for f in os.listdir(directory) if f.endswith(extension))
     try:
         (file,) = matching
@@ -163,7 +165,7 @@ class _ConfigSettingsTranslator:
 
     # See pypa/setuptools#1928 pypa/setuptools#2491
 
-    def _get_config(self, key: str, config_settings: _ConfigSettings) -> List[str]:
+    def _get_config(self, key: str, config_settings: _ConfigSettings) -> list[str]:
         """
         Get the value of a specific key in ``config_settings`` as a list of strings.
 
@@ -371,7 +373,7 @@ def prepare_metadata_for_build_wheel(
     def _build_with_temp_dir(
         self,
         setup_command: Iterable[str],
-        result_extension: Union[str, Tuple[str, ...]],
+        result_extension: str | tuple[str, ...],
         result_directory: StrPath,
         config_settings: _ConfigSettings,
         arbitrary_args: Iterable[str] = (),
@@ -407,7 +409,7 @@ def build_wheel(
         self,
         wheel_directory: StrPath,
         config_settings: _ConfigSettings = None,
-        metadata_directory: Optional[StrPath] = None,
+        metadata_directory: StrPath | None = None,
     ):
         with suppress_known_deprecation():
             return self._build_with_temp_dir(
@@ -425,9 +427,7 @@ def build_sdist(
             ['sdist', '--formats', 'gztar'], '.tar.gz', sdist_directory, config_settings
         )
 
-    def _get_dist_info_dir(
-        self, metadata_directory: Optional[StrPath]
-    ) -> Optional[str]:
+    def _get_dist_info_dir(self, metadata_directory: StrPath | None) -> str | None:
         if not metadata_directory:
             return None
         dist_info_candidates = list(Path(metadata_directory).glob("*.dist-info"))
@@ -443,7 +443,7 @@ def build_editable(
             self,
             wheel_directory: StrPath,
             config_settings: _ConfigSettings = None,
-            metadata_directory: Optional[str] = None,
+            metadata_directory: str | None = None,
         ):
             # XXX can or should we hide our editable_wheel command normally?
             info_dir = self._get_dist_info_dir(metadata_directory)
diff --git a/setuptools/command/_requirestxt.py b/setuptools/command/_requirestxt.py
index b0c2d7059a..1f1967e7aa 100644
--- a/setuptools/command/_requirestxt.py
+++ b/setuptools/command/_requirestxt.py
@@ -7,10 +7,12 @@
 See https://setuptools.pypa.io/en/latest/deprecated/python_eggs.html#requires-txt
 """
 
+from __future__ import annotations
+
 import io
 from collections import defaultdict
 from itertools import filterfalse
-from typing import Dict, List, Tuple, Mapping, TypeVar
+from typing import Dict, Mapping, TypeVar
 
 from .. import _reqs
 from ..extern.jaraco.text import yield_lines
@@ -26,7 +28,7 @@
 
 def _prepare(
     install_requires: _StrOrIter, extras_require: Mapping[str, _StrOrIter]
-) -> Tuple[List[str], Dict[str, List[str]]]:
+) -> tuple[list[str], dict[str, list[str]]]:
     """Given values for ``install_requires`` and ``extras_require``
     create modified versions in a way that can be written in ``requires.txt``
     """
@@ -54,7 +56,7 @@ def _convert_extras_requirements(
 
 def _move_install_requirements_markers(
     install_requires: _StrOrIter, extras_require: Mapping[str, _Ordered[Requirement]]
-) -> Tuple[List[str], Dict[str, List[str]]]:
+) -> tuple[list[str], dict[str, list[str]]]:
     """
     The ``requires.txt`` file has an specific format:
         - Environment markers need to be part of the section headers and
diff --git a/setuptools/command/build.py b/setuptools/command/build.py
index 16c077b7cc..bc765a17ae 100644
--- a/setuptools/command/build.py
+++ b/setuptools/command/build.py
@@ -1,4 +1,6 @@
-from typing import Dict, List, Protocol
+from __future__ import annotations
+
+from typing import Protocol
 from distutils.command.build import build as _build
 
 _ORIGINAL_SUBCOMMANDS = {"build_py", "build_clib", "build_ext", "build_scripts"}
@@ -87,7 +89,7 @@ def finalize_options(self):
     def run(self):
         """(Required by the original :class:`setuptools.Command` interface)"""
 
-    def get_source_files(self) -> List[str]:
+    def get_source_files(self) -> list[str]:
         """
         Return a list of all files that are used by the command to create the expected
         outputs.
@@ -98,7 +100,7 @@ def get_source_files(self) -> List[str]:
         All files should be strings relative to the project root directory.
         """
 
-    def get_outputs(self) -> List[str]:
+    def get_outputs(self) -> list[str]:
         """
         Return a list of files intended for distribution as they would have been
         produced by the build.
@@ -111,7 +113,7 @@ def get_outputs(self) -> List[str]:
            and don't correspond to any source file already present in the project.
         """
 
-    def get_output_mapping(self) -> Dict[str, str]:
+    def get_output_mapping(self) -> dict[str, str]:
         """
         Return a mapping between destination files as they would be produced by the
         build (dict keys) into the respective existing (source) files (dict values).
diff --git a/setuptools/command/build_ext.py b/setuptools/command/build_ext.py
index 6056fe9b24..9d8aa7fcdc 100644
--- a/setuptools/command/build_ext.py
+++ b/setuptools/command/build_ext.py
@@ -1,9 +1,11 @@
+from __future__ import annotations
+
 import os
 import sys
 import itertools
 from importlib.machinery import EXTENSION_SUFFIXES
 from importlib.util import cache_from_source as _compiled_file_name
-from typing import Dict, Iterator, List, Tuple
+from typing import Iterator
 from pathlib import Path
 
 from distutils.command.build_ext import build_ext as _du_build_ext
@@ -93,7 +95,7 @@ def run(self):
         if old_inplace:
             self.copy_extensions_to_source()
 
-    def _get_inplace_equivalent(self, build_py, ext: Extension) -> Tuple[str, str]:
+    def _get_inplace_equivalent(self, build_py, ext: Extension) -> tuple[str, str]:
         fullname = self.get_ext_fullname(ext.name)
         filename = self.get_ext_filename(fullname)
         modpath = fullname.split('.')
@@ -125,7 +127,7 @@ def _get_equivalent_stub(self, ext: Extension, output_file: str) -> str:
         _, _, name = ext.name.rpartition(".")
         return f"{os.path.join(dir_, name)}.py"
 
-    def _get_output_mapping(self) -> Iterator[Tuple[str, str]]:
+    def _get_output_mapping(self) -> Iterator[tuple[str, str]]:
         if not self.inplace:
             return
 
@@ -265,7 +267,7 @@ def links_to_dynamic(self, ext):
         pkg = '.'.join(ext._full_name.split('.')[:-1] + [''])
         return any(pkg + libname in libnames for libname in ext.libraries)
 
-    def get_source_files(self) -> List[str]:
+    def get_source_files(self) -> list[str]:
         return [*_build_ext.get_source_files(self), *self._get_internal_depends()]
 
     def _get_internal_depends(self) -> Iterator[str]:
@@ -306,12 +308,12 @@ def skip(orig_path: str, reason: str) -> None:
 
             yield path.as_posix()
 
-    def get_outputs(self) -> List[str]:
+    def get_outputs(self) -> list[str]:
         if self.inplace:
             return list(self.get_output_mapping().keys())
         return sorted(_build_ext.get_outputs(self) + self.__get_stubs_outputs())
 
-    def get_output_mapping(self) -> Dict[str, str]:
+    def get_output_mapping(self) -> dict[str, str]:
         """See :class:`setuptools.commands.build.SubCommand`"""
         mapping = self._get_output_mapping()
         return dict(sorted(mapping, key=lambda x: x[0]))
diff --git a/setuptools/command/build_py.py b/setuptools/command/build_py.py
index 3f40b060b3..e74946f601 100644
--- a/setuptools/command/build_py.py
+++ b/setuptools/command/build_py.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 from functools import partial
 from glob import glob
 from distutils.util import convert_path
@@ -9,7 +11,7 @@
 import itertools
 import stat
 from pathlib import Path
-from typing import Dict, Iterable, Iterator, List, Optional, Tuple
+from typing import Iterable, Iterator
 
 from ..extern.more_itertools import unique_everseen
 from ..warnings import SetuptoolsDeprecationWarning
@@ -33,7 +35,7 @@ class build_py(orig.build_py):
     """
 
     editable_mode: bool = False
-    existing_egg_info_dir: Optional[str] = None  #: Private API, internal use only.
+    existing_egg_info_dir: str | None = None  #: Private API, internal use only.
 
     def finalize_options(self):
         orig.build_py.finalize_options(self)
@@ -130,13 +132,13 @@ def find_data_files(self, package, src_dir):
         )
         return self.exclude_data_files(package, src_dir, files)
 
-    def get_outputs(self, include_bytecode=1) -> List[str]:
+    def get_outputs(self, include_bytecode=1) -> list[str]:
         """See :class:`setuptools.commands.build.SubCommand`"""
         if self.editable_mode:
             return list(self.get_output_mapping().keys())
         return super().get_outputs(include_bytecode)
 
-    def get_output_mapping(self) -> Dict[str, str]:
+    def get_output_mapping(self) -> dict[str, str]:
         """See :class:`setuptools.commands.build.SubCommand`"""
         mapping = itertools.chain(
             self._get_package_data_output_mapping(),
@@ -144,14 +146,14 @@ def get_output_mapping(self) -> Dict[str, str]:
         )
         return dict(sorted(mapping, key=lambda x: x[0]))
 
-    def _get_module_mapping(self) -> Iterator[Tuple[str, str]]:
+    def _get_module_mapping(self) -> Iterator[tuple[str, str]]:
         """Iterate over all modules producing (dest, src) pairs."""
         for package, module, module_file in self.find_all_modules():
             package = package.split('.')
             filename = self.get_module_outfile(self.build_lib, package, module)
             yield (filename, module_file)
 
-    def _get_package_data_output_mapping(self) -> Iterator[Tuple[str, str]]:
+    def _get_package_data_output_mapping(self) -> Iterator[tuple[str, str]]:
         """Iterate over package data producing (dest, src) pairs."""
         for package, src_dir, build_dir, filenames in self.data_files:
             for filename in filenames:
diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py
index 41ff382fe4..df4d34570d 100644
--- a/setuptools/command/easy_install.py
+++ b/setuptools/command/easy_install.py
@@ -10,6 +10,8 @@
 
 """
 
+from __future__ import annotations
+
 from glob import glob
 from distutils.util import get_platform
 from distutils.util import convert_path, subst_vars
@@ -25,7 +27,6 @@
 from distutils.command import install
 import sys
 import os
-from typing import Dict, List
 import zipimport
 import shutil
 import tempfile
@@ -2038,8 +2039,8 @@ class CommandSpec(list):
     those passed to Popen.
     """
 
-    options: List[str] = []
-    split_args: Dict[str, bool] = dict()
+    options: list[str] = []
+    split_args: dict[str, bool] = dict()
 
     @classmethod
     def best(cls):
diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py
index a835a8194b..55d477eebf 100644
--- a/setuptools/command/editable_wheel.py
+++ b/setuptools/command/editable_wheel.py
@@ -10,6 +10,8 @@
    *auxiliary build directory* or ``auxiliary_dir``.
 """
 
+from __future__ import annotations
+
 import logging
 import io
 import os
@@ -23,14 +25,10 @@
 from tempfile import TemporaryDirectory
 from typing import (
     TYPE_CHECKING,
-    Dict,
     Iterable,
     Iterator,
-    List,
     Mapping,
-    Optional,
     Protocol,
-    Tuple,
     TypeVar,
     cast,
 )
@@ -78,7 +76,7 @@ class _EditableMode(Enum):
     COMPAT = "compat"  # TODO: Remove `compat` after Dec/2022.
 
     @classmethod
-    def convert(cls, mode: Optional[str]) -> "_EditableMode":
+    def convert(cls, mode: str | None) -> _EditableMode:
         if not mode:
             return _EditableMode.LENIENT  # default
 
@@ -180,7 +178,7 @@ def _install_namespaces(self, installation_dir, pth_prefix):
         installer = _NamespaceInstaller(dist, installation_dir, pth_prefix, src_root)
         installer.install_namespaces()
 
-    def _find_egg_info_dir(self) -> Optional[str]:
+    def _find_egg_info_dir(self) -> str | None:
         parent_dir = Path(self.dist_info_dir).parent if self.dist_info_dir else Path()
         candidates = map(str, parent_dir.glob("*.egg-info"))
         return next(candidates, None)
@@ -255,9 +253,9 @@ def _set_editable_mode(self):
             elif hasattr(cmd, "inplace"):
                 cmd.inplace = True  # backward compatibility with distutils
 
-    def _collect_build_outputs(self) -> Tuple[List[str], Dict[str, str]]:
-        files: List[str] = []
-        mapping: Dict[str, str] = {}
+    def _collect_build_outputs(self) -> tuple[list[str], dict[str, str]]:
+        files: list[str] = []
+        mapping: dict[str, str] = {}
         build = self.get_finalized_command("build")
 
         for cmd_name in build.get_sub_commands():
@@ -275,7 +273,7 @@ def _run_build_commands(
         unpacked_wheel: StrPath,
         build_lib: StrPath,
         tmp_dir: StrPath,
-    ) -> Tuple[List[str], Dict[str, str]]:
+    ) -> tuple[list[str], dict[str, str]]:
         self._configure_build(dist_name, unpacked_wheel, build_lib, tmp_dir)
         self._run_build_subcommands()
         files, mapping = self._collect_build_outputs()
@@ -373,7 +371,7 @@ def _select_strategy(
         name: str,
         tag: str,
         build_lib: StrPath,
-    ) -> "EditableStrategy":
+    ) -> EditableStrategy:
         """Decides which strategy to use to implement an editable installation."""
         build_name = f"__editable__.{name}-{tag}"
         project_dir = Path(self.project_dir)
@@ -396,9 +394,7 @@ def _select_strategy(
 
 
 class EditableStrategy(Protocol):
-    def __call__(
-        self, wheel: "WheelFile", files: List[str], mapping: Dict[str, str]
-    ): ...
+    def __call__(self, wheel: WheelFile, files: list[str], mapping: dict[str, str]): ...
 
     def __enter__(self): ...
 
@@ -406,12 +402,12 @@ def __exit__(self, _exc_type, _exc_value, _traceback): ...
 
 
 class _StaticPth:
-    def __init__(self, dist: Distribution, name: str, path_entries: List[Path]):
+    def __init__(self, dist: Distribution, name: str, path_entries: list[Path]):
         self.dist = dist
         self.name = name
         self.path_entries = path_entries
 
-    def __call__(self, wheel: "WheelFile", files: List[str], mapping: Dict[str, str]):
+    def __call__(self, wheel: WheelFile, files: list[str], mapping: dict[str, str]):
         entries = "\n".join(str(p.resolve()) for p in self.path_entries)
         contents = _encode_pth(f"{entries}\n")
         wheel.writestr(f"__editable__.{self.name}.pth", contents)
@@ -451,11 +447,11 @@ def __init__(
         self._file = dist.get_command_obj("build_py").copy_file  # type: ignore[union-attr]
         super().__init__(dist, name, [self.auxiliary_dir])
 
-    def __call__(self, wheel: "WheelFile", files: List[str], mapping: Dict[str, str]):
+    def __call__(self, wheel: WheelFile, files: list[str], mapping: dict[str, str]):
         self._create_links(files, mapping)
         super().__call__(wheel, files, mapping)
 
-    def _normalize_output(self, file: str) -> Optional[str]:
+    def _normalize_output(self, file: str) -> str | None:
         # Files relative to build_lib will be normalized to None
         with suppress(ValueError):
             path = Path(file).resolve().relative_to(self.build_lib)
@@ -505,13 +501,13 @@ def __init__(self, dist: Distribution, name: str):
         self.dist = dist
         self.name = name
 
-    def template_vars(self) -> Tuple[str, str, Dict[str, str], Dict[str, List[str]]]:
+    def template_vars(self) -> tuple[str, str, dict[str, str], dict[str, list[str]]]:
         src_root = self.dist.src_root or os.curdir
         top_level = chain(_find_packages(self.dist), _find_top_level_modules(self.dist))
         package_dir = self.dist.package_dir or {}
         roots = _find_package_roots(top_level, package_dir, src_root)
 
-        namespaces_: Dict[str, List[str]] = dict(
+        namespaces_: dict[str, list[str]] = dict(
             chain(
                 _find_namespaces(self.dist.packages or [], roots),
                 ((ns, []) for ns in _find_virtual_namespaces(roots)),
@@ -532,7 +528,7 @@ def template_vars(self) -> Tuple[str, str, Dict[str, str], Dict[str, List[str]]]
         finder = _normalization.safe_identifier(name)
         return finder, name, mapping, namespaces_
 
-    def get_implementation(self) -> Iterator[Tuple[str, bytes]]:
+    def get_implementation(self) -> Iterator[tuple[str, bytes]]:
         finder, name, mapping, namespaces_ = self.template_vars()
 
         content = bytes(_finder_template(name, mapping, namespaces_), "utf-8")
@@ -541,7 +537,7 @@ def get_implementation(self) -> Iterator[Tuple[str, bytes]]:
         content = _encode_pth(f"import {finder}; {finder}.install()")
         yield (f"__editable__.{self.name}.pth", content)
 
-    def __call__(self, wheel: "WheelFile", files: List[str], mapping: Dict[str, str]):
+    def __call__(self, wheel: WheelFile, files: list[str], mapping: dict[str, str]):
         for file, content in self.get_implementation():
             wheel.writestr(file, content)
 
@@ -597,7 +593,7 @@ def _can_symlink_files(base_dir: Path) -> bool:
 
 
 def _simple_layout(
-    packages: Iterable[str], package_dir: Dict[str, str], project_dir: StrPath
+    packages: Iterable[str], package_dir: dict[str, str], project_dir: StrPath
 ) -> bool:
     """Return ``True`` if:
     - all packages are contained by the same parent directory, **and**
@@ -680,8 +676,8 @@ def _find_package_roots(
     packages: Iterable[str],
     package_dir: Mapping[str, str],
     src_root: StrPath,
-) -> Dict[str, str]:
-    pkg_roots: Dict[str, str] = {
+) -> dict[str, str]:
+    pkg_roots: dict[str, str] = {
         pkg: _absolute_root(find_package_path(pkg, package_dir, src_root))
         for pkg in sorted(packages)
     }
@@ -700,7 +696,7 @@ def _absolute_root(path: StrPath) -> str:
         return str(parent.resolve() / path_.name)
 
 
-def _find_virtual_namespaces(pkg_roots: Dict[str, str]) -> Iterator[str]:
+def _find_virtual_namespaces(pkg_roots: dict[str, str]) -> Iterator[str]:
     """By carefully designing ``package_dir``, it is possible to implement the logical
     structure of PEP 420 in a package without the corresponding directories.
 
@@ -725,15 +721,15 @@ def _find_virtual_namespaces(pkg_roots: Dict[str, str]) -> Iterator[str]:
 
 
 def _find_namespaces(
-    packages: List[str], pkg_roots: Dict[str, str]
-) -> Iterator[Tuple[str, List[str]]]:
+    packages: list[str], pkg_roots: dict[str, str]
+) -> Iterator[tuple[str, list[str]]]:
     for pkg in packages:
         path = find_package_path(pkg, pkg_roots, "")
         if Path(path).exists() and not Path(path, "__init__.py").exists():
             yield (pkg, [path])
 
 
-def _remove_nested(pkg_roots: Dict[str, str]) -> Dict[str, str]:
+def _remove_nested(pkg_roots: dict[str, str]) -> dict[str, str]:
     output = dict(pkg_roots.copy())
 
     for pkg, path in reversed(list(pkg_roots.items())):
@@ -883,7 +879,7 @@ def install():
 
 
 def _finder_template(
-    name: str, mapping: Mapping[str, str], namespaces: Dict[str, List[str]]
+    name: str, mapping: Mapping[str, str], namespaces: dict[str, list[str]]
 ) -> str:
     """Create a string containing the code for the``MetaPathFinder`` and
     ``PathEntryFinder``.
diff --git a/setuptools/command/install_scripts.py b/setuptools/command/install_scripts.py
index e9b0a0fe02..f44281b49b 100644
--- a/setuptools/command/install_scripts.py
+++ b/setuptools/command/install_scripts.py
@@ -1,8 +1,9 @@
+from __future__ import annotations
+
 from distutils import log
 import distutils.command.install_scripts as orig
 import os
 import sys
-from typing import List
 
 from .._path import ensure_directory
 
@@ -19,7 +20,7 @@ def run(self) -> None:
         if self.distribution.scripts:
             orig.install_scripts.run(self)  # run first to set up self.outfiles
         else:
-            self.outfiles: List[str] = []
+            self.outfiles: list[str] = []
         if self.no_ep:
             # don't install entry point scripts into .egg file!
             return
diff --git a/setuptools/command/rotate.py b/setuptools/command/rotate.py
index 6f73721c70..064d7959ff 100644
--- a/setuptools/command/rotate.py
+++ b/setuptools/command/rotate.py
@@ -1,9 +1,10 @@
+from __future__ import annotations
+
 from distutils.util import convert_path
 from distutils import log
 from distutils.errors import DistutilsOptionError
 import os
 import shutil
-from typing import List
 
 from setuptools import Command
 
@@ -18,7 +19,7 @@ class rotate(Command):
         ('keep=', 'k', "number of matching distributions to keep"),
     ]
 
-    boolean_options: List[str] = []
+    boolean_options: list[str] = []
 
     def initialize_options(self):
         self.match = None
diff --git a/setuptools/config/_apply_pyprojecttoml.py b/setuptools/config/_apply_pyprojecttoml.py
index 3626282a79..c7e25b755f 100644
--- a/setuptools/config/_apply_pyprojecttoml.py
+++ b/setuptools/config/_apply_pyprojecttoml.py
@@ -8,6 +8,8 @@
 **PRIVATE MODULE**: API reserved for setuptools internal usage only.
 """
 
+from __future__ import annotations
+
 import logging
 import os
 from collections.abc import Mapping
@@ -20,12 +22,6 @@
     TYPE_CHECKING,
     Any,
     Callable,
-    Dict,
-    List,
-    Optional,
-    Set,
-    Tuple,
-    Type,
     Union,
     cast,
 )
@@ -46,7 +42,7 @@
 _logger = logging.getLogger(__name__)
 
 
-def apply(dist: "Distribution", config: dict, filename: StrPath) -> "Distribution":
+def apply(dist: Distribution, config: dict, filename: StrPath) -> Distribution:
     """Apply configuration dict read with :func:`read_configuration`"""
 
     if not config:
@@ -68,7 +64,7 @@ def apply(dist: "Distribution", config: dict, filename: StrPath) -> "Distributio
     return dist
 
 
-def _apply_project_table(dist: "Distribution", config: dict, root_dir: StrPath):
+def _apply_project_table(dist: Distribution, config: dict, root_dir: StrPath):
     project_table = config.get("project", {}).copy()
     if not project_table:
         return  # short-circuit
@@ -85,7 +81,7 @@ def _apply_project_table(dist: "Distribution", config: dict, root_dir: StrPath):
             _set_config(dist, corresp, value)
 
 
-def _apply_tool_table(dist: "Distribution", config: dict, filename: StrPath):
+def _apply_tool_table(dist: Distribution, config: dict, filename: StrPath):
     tool_table = config.get("tool", {}).get("setuptools", {})
     if not tool_table:
         return  # short-circuit
@@ -107,7 +103,7 @@ def _apply_tool_table(dist: "Distribution", config: dict, filename: StrPath):
     _copy_command_options(config, dist, filename)
 
 
-def _handle_missing_dynamic(dist: "Distribution", project_table: dict):
+def _handle_missing_dynamic(dist: Distribution, project_table: dict):
     """Be temporarily forgiving with ``dynamic`` fields not listed in ``dynamic``"""
     dynamic = set(project_table.get("dynamic", []))
     for field, getter in _PREVIOUSLY_DEFINED.items():
@@ -123,7 +119,7 @@ def json_compatible_key(key: str) -> str:
     return key.lower().replace("-", "_")
 
 
-def _set_config(dist: "Distribution", field: str, value: Any):
+def _set_config(dist: Distribution, field: str, value: Any):
     setter = getattr(dist.metadata, f"set_{field}", None)
     if setter:
         setter(value)
@@ -140,7 +136,7 @@ def _set_config(dist: "Distribution", field: str, value: Any):
 }
 
 
-def _guess_content_type(file: str) -> Optional[str]:
+def _guess_content_type(file: str) -> str | None:
     _, ext = os.path.splitext(file.lower())
     if not ext:
         return None
@@ -153,11 +149,11 @@ def _guess_content_type(file: str) -> Optional[str]:
     raise ValueError(f"Undefined content type for {file}, {msg}")
 
 
-def _long_description(dist: "Distribution", val: _DictOrStr, root_dir: StrPath):
+def _long_description(dist: Distribution, val: _DictOrStr, root_dir: StrPath):
     from setuptools.config import expand
 
     if isinstance(val, str):
-        file: Union[str, list] = val
+        file: str | list = val
         text = expand.read_files(file, root_dir)
         ctype = _guess_content_type(val)
     else:
@@ -174,7 +170,7 @@ def _long_description(dist: "Distribution", val: _DictOrStr, root_dir: StrPath):
         dist._referenced_files.add(cast(str, file))
 
 
-def _license(dist: "Distribution", val: dict, root_dir: StrPath):
+def _license(dist: Distribution, val: dict, root_dir: StrPath):
     from setuptools.config import expand
 
     if "file" in val:
@@ -184,7 +180,7 @@ def _license(dist: "Distribution", val: dict, root_dir: StrPath):
         _set_config(dist, "license", val["text"])
 
 
-def _people(dist: "Distribution", val: List[dict], _root_dir: StrPath, kind: str):
+def _people(dist: Distribution, val: list[dict], _root_dir: StrPath, kind: str):
     field = []
     email_field = []
     for person in val:
@@ -202,24 +198,24 @@ def _people(dist: "Distribution", val: List[dict], _root_dir: StrPath, kind: str
         _set_config(dist, f"{kind}_email", ", ".join(email_field))
 
 
-def _project_urls(dist: "Distribution", val: dict, _root_dir):
+def _project_urls(dist: Distribution, val: dict, _root_dir):
     _set_config(dist, "project_urls", val)
 
 
-def _python_requires(dist: "Distribution", val: dict, _root_dir):
+def _python_requires(dist: Distribution, val: dict, _root_dir):
     from setuptools.extern.packaging.specifiers import SpecifierSet
 
     _set_config(dist, "python_requires", SpecifierSet(val))
 
 
-def _dependencies(dist: "Distribution", val: list, _root_dir):
+def _dependencies(dist: Distribution, val: list, _root_dir):
     if getattr(dist, "install_requires", []):
         msg = "`install_requires` overwritten in `pyproject.toml` (dependencies)"
         SetuptoolsWarning.emit(msg)
     dist.install_requires = val
 
 
-def _optional_dependencies(dist: "Distribution", val: dict, _root_dir):
+def _optional_dependencies(dist: Distribution, val: dict, _root_dir):
     existing = getattr(dist, "extras_require", None) or {}
     dist.extras_require = {**existing, **val}
 
@@ -244,7 +240,7 @@ def _unify_entry_points(project_table: dict):
         # intentional (for resetting configurations that are missing `dynamic`).
 
 
-def _copy_command_options(pyproject: dict, dist: "Distribution", filename: StrPath):
+def _copy_command_options(pyproject: dict, dist: Distribution, filename: StrPath):
     tool_table = pyproject.get("tool", {})
     cmdclass = tool_table.get("setuptools", {}).get("cmdclass", {})
     valid_options = _valid_command_options(cmdclass)
@@ -263,7 +259,7 @@ def _copy_command_options(pyproject: dict, dist: "Distribution", filename: StrPa
                 _logger.warning(f"Command option {cmd}.{key} is not defined")
 
 
-def _valid_command_options(cmdclass: Mapping = EMPTY) -> Dict[str, Set[str]]:
+def _valid_command_options(cmdclass: Mapping = EMPTY) -> dict[str, set[str]]:
     from .._importlib import metadata
     from setuptools.dist import Distribution
 
@@ -280,7 +276,7 @@ def _valid_command_options(cmdclass: Mapping = EMPTY) -> Dict[str, Set[str]]:
     return valid_options
 
 
-def _load_ep(ep: "metadata.EntryPoint") -> Optional[Tuple[str, Type]]:
+def _load_ep(ep: metadata.EntryPoint) -> tuple[str, type] | None:
     # Ignore all the errors
     try:
         return (ep.name, ep.load())
@@ -294,22 +290,22 @@ def _normalise_cmd_option_key(name: str) -> str:
     return json_compatible_key(name).strip("_=")
 
 
-def _normalise_cmd_options(desc: "_OptionsList") -> Set[str]:
+def _normalise_cmd_options(desc: _OptionsList) -> set[str]:
     return {_normalise_cmd_option_key(fancy_option[0]) for fancy_option in desc}
 
 
-def _get_previous_entrypoints(dist: "Distribution") -> Dict[str, list]:
+def _get_previous_entrypoints(dist: Distribution) -> dict[str, list]:
     ignore = ("console_scripts", "gui_scripts")
     value = getattr(dist, "entry_points", None) or {}
     return {k: v for k, v in value.items() if k not in ignore}
 
 
-def _get_previous_scripts(dist: "Distribution") -> Optional[list]:
+def _get_previous_scripts(dist: Distribution) -> list | None:
     value = getattr(dist, "entry_points", None) or {}
     return value.get("console_scripts")
 
 
-def _get_previous_gui_scripts(dist: "Distribution") -> Optional[list]:
+def _get_previous_gui_scripts(dist: Distribution) -> list | None:
     value = getattr(dist, "entry_points", None) or {}
     return value.get("gui_scripts")
 
@@ -349,7 +345,7 @@ def _acessor(obj):
     return _acessor
 
 
-PYPROJECT_CORRESPONDENCE: Dict[str, _Correspondence] = {
+PYPROJECT_CORRESPONDENCE: dict[str, _Correspondence] = {
     "readme": _long_description,
     "license": _license,
     "authors": partial(_people, kind="author"),
diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py
index 2f8868f04b..7140dc8ed8 100644
--- a/setuptools/config/expand.py
+++ b/setuptools/config/expand.py
@@ -18,6 +18,8 @@
 **PRIVATE MODULE**: API reserved for setuptools internal usage only.
 """
 
+from __future__ import annotations
+
 import ast
 import importlib
 import os
@@ -30,13 +32,9 @@
 from typing import (
     TYPE_CHECKING,
     Callable,
-    Dict,
     Iterable,
     Iterator,
-    List,
     Mapping,
-    Optional,
-    Tuple,
     TypeVar,
     Union,
     cast,
@@ -65,7 +63,7 @@ def __init__(self, name: str, spec: ModuleSpec):
         vars(self).update(locals())
         del self.self
 
-    def _find_assignments(self) -> Iterator[Tuple[ast.AST, ast.AST]]:
+    def _find_assignments(self) -> Iterator[tuple[ast.AST, ast.AST]]:
         for statement in self.module.body:
             if isinstance(statement, ast.Assign):
                 yield from ((target, statement.value) for target in statement.targets)
@@ -85,8 +83,8 @@ def __getattr__(self, attr):
 
 
 def glob_relative(
-    patterns: Iterable[str], root_dir: Optional[StrPath] = None
-) -> List[str]:
+    patterns: Iterable[str], root_dir: StrPath | None = None
+) -> list[str]:
     """Expand the list of glob patterns, but preserving relative paths.
 
     :param list[str] patterns: List of glob patterns
@@ -118,7 +116,7 @@ def glob_relative(
 
 
 def read_files(
-    filepaths: Union[StrPath, Iterable[StrPath]], root_dir: Optional[StrPath] = None
+    filepaths: StrPath | Iterable[StrPath], root_dir: StrPath | None = None
 ) -> str:
     """Return the content of the files concatenated using ``\n`` as str
 
@@ -145,7 +143,7 @@ def _filter_existing_files(filepaths: Iterable[StrPath]) -> Iterator[StrPath]:
             SetuptoolsWarning.emit(f"File {path!r} cannot be found")
 
 
-def _read_file(filepath: Union[bytes, StrPath]) -> str:
+def _read_file(filepath: bytes | StrPath) -> str:
     with open(filepath, encoding='utf-8') as f:
         return f.read()
 
@@ -160,8 +158,8 @@ def _assert_local(filepath: StrPath, root_dir: str):
 
 def read_attr(
     attr_desc: str,
-    package_dir: Optional[Mapping[str, str]] = None,
-    root_dir: Optional[StrPath] = None,
+    package_dir: Mapping[str, str] | None = None,
+    root_dir: StrPath | None = None,
 ):
     """Reads the value of an attribute from a module.
 
@@ -196,7 +194,7 @@ def read_attr(
         return getattr(module, attr_name)
 
 
-def _find_spec(module_name: str, module_path: Optional[StrPath]) -> ModuleSpec:
+def _find_spec(module_name: str, module_path: StrPath | None) -> ModuleSpec:
     spec = importlib.util.spec_from_file_location(module_name, module_path)
     spec = spec or importlib.util.find_spec(module_name)
 
@@ -217,8 +215,8 @@ def _load_spec(spec: ModuleSpec, module_name: str) -> ModuleType:
 
 
 def _find_module(
-    module_name: str, package_dir: Optional[Mapping[str, str]], root_dir: StrPath
-) -> Optional[str]:
+    module_name: str, package_dir: Mapping[str, str] | None, root_dir: StrPath
+) -> str | None:
     """Find the path to the module named ``module_name``,
     considering the ``package_dir`` in the build configuration and ``root_dir``.
 
@@ -241,8 +239,8 @@ def _find_module(
 
 def resolve_class(
     qualified_class_name: str,
-    package_dir: Optional[Mapping[str, str]] = None,
-    root_dir: Optional[StrPath] = None,
+    package_dir: Mapping[str, str] | None = None,
+    root_dir: StrPath | None = None,
 ) -> Callable:
     """Given a qualified class name, return the associated class object"""
     root_dir = root_dir or os.getcwd()
@@ -256,10 +254,10 @@ def resolve_class(
 
 
 def cmdclass(
-    values: Dict[str, str],
-    package_dir: Optional[Mapping[str, str]] = None,
-    root_dir: Optional[StrPath] = None,
-) -> Dict[str, Callable]:
+    values: dict[str, str],
+    package_dir: Mapping[str, str] | None = None,
+    root_dir: StrPath | None = None,
+) -> dict[str, Callable]:
     """Given a dictionary mapping command names to strings for qualified class
     names, apply :func:`resolve_class` to the dict values.
     """
@@ -269,10 +267,10 @@ def cmdclass(
 def find_packages(
     *,
     namespaces=True,
-    fill_package_dir: Optional[Dict[str, str]] = None,
-    root_dir: Optional[StrPath] = None,
+    fill_package_dir: dict[str, str] | None = None,
+    root_dir: StrPath | None = None,
     **kwargs,
-) -> List[str]:
+) -> list[str]:
     """Works similarly to :func:`setuptools.find_packages`, but with all
     arguments given as keyword arguments. Moreover, ``where`` can be given
     as a list (the results will be simply concatenated).
@@ -300,7 +298,7 @@ def find_packages(
 
     root_dir = root_dir or os.curdir
     where = kwargs.pop('where', ['.'])
-    packages: List[str] = []
+    packages: list[str] = []
     fill_package_dir = {} if fill_package_dir is None else fill_package_dir
     search = list(unique_everseen(always_iterable(where)))
 
@@ -324,7 +322,7 @@ def _nest_path(parent: StrPath, path: StrPath) -> str:
     return os.path.normpath(path)
 
 
-def version(value: Union[Callable, Iterable[Union[str, int]], str]) -> str:
+def version(value: Callable | Iterable[str | int] | str) -> str:
     """When getting the version directly from an attribute,
     it should be normalised to string.
     """
@@ -349,8 +347,8 @@ def canonic_package_data(package_data: dict) -> dict:
 
 
 def canonic_data_files(
-    data_files: Union[list, dict], root_dir: Optional[StrPath] = None
-) -> List[Tuple[str, List[str]]]:
+    data_files: list | dict, root_dir: StrPath | None = None
+) -> list[tuple[str, list[str]]]:
     """For compatibility with ``setup.py``, ``data_files`` should be a list
     of pairs instead of a dict.
 
@@ -365,7 +363,7 @@ def canonic_data_files(
     ]
 
 
-def entry_points(text: str, text_source="entry-points") -> Dict[str, dict]:
+def entry_points(text: str, text_source="entry-points") -> dict[str, dict]:
     """Given the contents of entry-points file,
     process it into a 2-level dictionary (``dict[str, dict[str, str]]``).
     The first level keys are entry-point groups, the second level keys are
@@ -390,7 +388,7 @@ class EnsurePackagesDiscovered:
     and those might not have been processed yet.
     """
 
-    def __init__(self, distribution: "Distribution"):
+    def __init__(self, distribution: Distribution):
         self._dist = distribution
         self._called = False
 
@@ -434,7 +432,7 @@ class LazyMappingProxy(Mapping[_K, _V]):
 
     def __init__(self, obtain_mapping_value: Callable[[], Mapping[_K, _V]]):
         self._obtain = obtain_mapping_value
-        self._value: Optional[Mapping[_K, _V]] = None
+        self._value: Mapping[_K, _V] | None = None
 
     def _target(self) -> Mapping[_K, _V]:
         if self._value is None:
diff --git a/setuptools/config/pyprojecttoml.py b/setuptools/config/pyprojecttoml.py
index d379405595..c8dae5f751 100644
--- a/setuptools/config/pyprojecttoml.py
+++ b/setuptools/config/pyprojecttoml.py
@@ -9,11 +9,13 @@
 with the help of ``tomllib`` or ``tomli``.
 """
 
+from __future__ import annotations
+
 import logging
 import os
 from contextlib import contextmanager
 from functools import partial
-from typing import TYPE_CHECKING, Callable, Dict, Mapping, Optional, Set
+from typing import TYPE_CHECKING, Callable, Mapping
 
 from .._path import StrPath
 from ..errors import FileError, InvalidConfigError
@@ -58,10 +60,10 @@ def validate(config: dict, filepath: StrPath) -> bool:
 
 
 def apply_configuration(
-    dist: "Distribution",
+    dist: Distribution,
     filepath: StrPath,
     ignore_option_errors=False,
-) -> "Distribution":
+) -> Distribution:
     """Apply the configuration from a ``pyproject.toml`` file into an existing
     distribution object.
     """
@@ -73,7 +75,7 @@ def read_configuration(
     filepath: StrPath,
     expand=True,
     ignore_option_errors=False,
-    dist: Optional["Distribution"] = None,
+    dist: Distribution | None = None,
 ):
     """Read given configuration file and returns options from it as a dict.
 
@@ -141,9 +143,9 @@ def read_configuration(
 
 def expand_configuration(
     config: dict,
-    root_dir: Optional[StrPath] = None,
+    root_dir: StrPath | None = None,
     ignore_option_errors: bool = False,
-    dist: Optional["Distribution"] = None,
+    dist: Distribution | None = None,
 ) -> dict:
     """Given a configuration with unresolved fields (e.g. dynamic, cmdclass, ...)
     find their final values.
@@ -166,9 +168,9 @@ class _ConfigExpander:
     def __init__(
         self,
         config: dict,
-        root_dir: Optional[StrPath] = None,
+        root_dir: StrPath | None = None,
         ignore_option_errors: bool = False,
-        dist: Optional["Distribution"] = None,
+        dist: Distribution | None = None,
     ):
         self.config = config
         self.root_dir = root_dir or os.getcwd()
@@ -178,9 +180,9 @@ def __init__(
         self.dynamic_cfg = self.setuptools_cfg.get("dynamic", {})
         self.ignore_option_errors = ignore_option_errors
         self._dist = dist
-        self._referenced_files: Set[str] = set()
+        self._referenced_files: set[str] = set()
 
-    def _ensure_dist(self) -> "Distribution":
+    def _ensure_dist(self) -> Distribution:
         from setuptools.dist import Distribution
 
         attrs = {"src_root": self.root_dir, "name": self.project_cfg.get("name", None)}
@@ -233,7 +235,7 @@ def _expand_cmdclass(self, package_dir: Mapping[str, str]):
         cmdclass = partial(_expand.cmdclass, package_dir=package_dir, root_dir=root_dir)
         self._process_field(self.setuptools_cfg, "cmdclass", cmdclass)
 
-    def _expand_all_dynamic(self, dist: "Distribution", package_dir: Mapping[str, str]):
+    def _expand_all_dynamic(self, dist: Distribution, package_dir: Mapping[str, str]):
         special = (  # need special handling
             "version",
             "readme",
@@ -263,7 +265,7 @@ def _expand_all_dynamic(self, dist: "Distribution", package_dir: Mapping[str, st
         updates = {k: v for k, v in obtained_dynamic.items() if v is not None}
         self.project_cfg.update(updates)
 
-    def _ensure_previously_set(self, dist: "Distribution", field: str):
+    def _ensure_previously_set(self, dist: Distribution, field: str):
         previous = _PREVIOUSLY_DEFINED[field](dist)
         if previous is None and not self.ignore_option_errors:
             msg = (
@@ -288,7 +290,7 @@ def _expand_directive(
             raise ValueError(f"invalid `{specifier}`: {directive!r}")
         return None
 
-    def _obtain(self, dist: "Distribution", field: str, package_dir: Mapping[str, str]):
+    def _obtain(self, dist: Distribution, field: str, package_dir: Mapping[str, str]):
         if field in self.dynamic_cfg:
             return self._expand_directive(
                 f"tool.setuptools.dynamic.{field}",
@@ -298,13 +300,13 @@ def _obtain(self, dist: "Distribution", field: str, package_dir: Mapping[str, st
         self._ensure_previously_set(dist, field)
         return None
 
-    def _obtain_version(self, dist: "Distribution", package_dir: Mapping[str, str]):
+    def _obtain_version(self, dist: Distribution, package_dir: Mapping[str, str]):
         # Since plugins can set version, let's silently skip if it cannot be obtained
         if "version" in self.dynamic and "version" in self.dynamic_cfg:
             return _expand.version(self._obtain(dist, "version", package_dir))
         return None
 
-    def _obtain_readme(self, dist: "Distribution") -> Optional[Dict[str, str]]:
+    def _obtain_readme(self, dist: Distribution) -> dict[str, str] | None:
         if "readme" not in self.dynamic:
             return None
 
@@ -319,8 +321,8 @@ def _obtain_readme(self, dist: "Distribution") -> Optional[Dict[str, str]]:
         return None
 
     def _obtain_entry_points(
-        self, dist: "Distribution", package_dir: Mapping[str, str]
-    ) -> Optional[Dict[str, dict]]:
+        self, dist: Distribution, package_dir: Mapping[str, str]
+    ) -> dict[str, dict] | None:
         fields = ("entry-points", "scripts", "gui-scripts")
         if not any(field in self.dynamic for field in fields):
             return None
@@ -344,21 +346,21 @@ def _set_scripts(field: str, group: str):
 
         return expanded
 
-    def _obtain_classifiers(self, dist: "Distribution"):
+    def _obtain_classifiers(self, dist: Distribution):
         if "classifiers" in self.dynamic:
             value = self._obtain(dist, "classifiers", {})
             if value:
                 return value.splitlines()
         return None
 
-    def _obtain_dependencies(self, dist: "Distribution"):
+    def _obtain_dependencies(self, dist: Distribution):
         if "dependencies" in self.dynamic:
             value = self._obtain(dist, "dependencies", {})
             if value:
                 return _parse_requirements_list(value)
         return None
 
-    def _obtain_optional_dependencies(self, dist: "Distribution"):
+    def _obtain_optional_dependencies(self, dist: Distribution):
         if "optional-dependencies" not in self.dynamic:
             return None
         if "optional-dependencies" in self.dynamic_cfg:
@@ -400,18 +402,18 @@ def _ignore_errors(ignore_option_errors: bool):
 
 class _EnsurePackagesDiscovered(_expand.EnsurePackagesDiscovered):
     def __init__(
-        self, distribution: "Distribution", project_cfg: dict, setuptools_cfg: dict
+        self, distribution: Distribution, project_cfg: dict, setuptools_cfg: dict
     ):
         super().__init__(distribution)
         self._project_cfg = project_cfg
         self._setuptools_cfg = setuptools_cfg
 
-    def __enter__(self) -> "Self":
+    def __enter__(self) -> Self:
         """When entering the context, the values of ``packages``, ``py_modules`` and
         ``package_dir`` that are missing in ``dist`` are copied from ``setuptools_cfg``.
         """
         dist, cfg = self._dist, self._setuptools_cfg
-        package_dir: Dict[str, str] = cfg.setdefault("package-dir", {})
+        package_dir: dict[str, str] = cfg.setdefault("package-dir", {})
         package_dir.update(dist.package_dir or {})
         dist.package_dir = package_dir  # needs to be the same object
 
diff --git a/setuptools/config/setupcfg.py b/setuptools/config/setupcfg.py
index 59d9cf8adb..0a7a42eb09 100644
--- a/setuptools/config/setupcfg.py
+++ b/setuptools/config/setupcfg.py
@@ -9,6 +9,8 @@
 with the help of ``configparser``.
 """
 
+from __future__ import annotations
+
 import contextlib
 import functools
 import os
@@ -22,9 +24,6 @@
     Dict,
     Generic,
     Iterable,
-    List,
-    Optional,
-    Set,
     Tuple,
     TypeVar,
     Union,
@@ -80,7 +79,7 @@ def read_configuration(
     return configuration_to_dict(handlers)
 
 
-def apply_configuration(dist: "Distribution", filepath: StrPath) -> "Distribution":
+def apply_configuration(dist: Distribution, filepath: StrPath) -> Distribution:
     """Apply the configuration from a ``setup.cfg`` file into an existing
     distribution object.
     """
@@ -90,11 +89,11 @@ def apply_configuration(dist: "Distribution", filepath: StrPath) -> "Distributio
 
 
 def _apply(
-    dist: "Distribution",
+    dist: Distribution,
     filepath: StrPath,
     other_files: Iterable[StrPath] = (),
     ignore_option_errors: bool = False,
-) -> Tuple["ConfigHandler", ...]:
+) -> tuple[ConfigHandler, ...]:
     """Read configuration from ``filepath`` and applies to the ``dist`` object."""
     from setuptools.dist import _Distribution
 
@@ -131,7 +130,7 @@ def _get_option(target_obj: Target, key: str):
     return getter()
 
 
-def configuration_to_dict(handlers: Tuple["ConfigHandler", ...]) -> dict:
+def configuration_to_dict(handlers: tuple[ConfigHandler, ...]) -> dict:
     """Returns configuration data gathered by given handlers as a dict.
 
     :param list[ConfigHandler] handlers: Handlers list,
@@ -150,10 +149,10 @@ def configuration_to_dict(handlers: Tuple["ConfigHandler", ...]) -> dict:
 
 
 def parse_configuration(
-    distribution: "Distribution",
+    distribution: Distribution,
     command_options: AllCommandOptions,
     ignore_option_errors=False,
-) -> Tuple["ConfigMetadataHandler", "ConfigOptionsHandler"]:
+) -> tuple[ConfigMetadataHandler, ConfigOptionsHandler]:
     """Performs additional parsing of configuration options
     for a distribution.
 
@@ -236,7 +235,7 @@ class ConfigHandler(Generic[Target]):
 
     """
 
-    aliases: Dict[str, str] = {}
+    aliases: dict[str, str] = {}
     """Options aliases.
     For compatibility with various packages. E.g.: d2to1 and pbr.
     Note: `-` in keys is replaced with `_` by config parser.
@@ -253,9 +252,9 @@ def __init__(
         self.ignore_option_errors = ignore_option_errors
         self.target_obj = target_obj
         self.sections = dict(self._section_options(options))
-        self.set_options: List[str] = []
+        self.set_options: list[str] = []
         self.ensure_discovered = ensure_discovered
-        self._referenced_files: Set[str] = set()
+        self._referenced_files: set[str] = set()
         """After parsing configurations, this property will enumerate
         all files referenced by the "file:" directive. Private API for setuptools only.
         """
@@ -485,7 +484,7 @@ def parse(self) -> None:
             if section_name:  # [section.option] variant
                 method_postfix = '_%s' % section_name
 
-            section_parser_method: Optional[Callable] = getattr(
+            section_parser_method: Callable | None = getattr(
                 self,
                 # Dots in section names are translated into dunderscores.
                 ('parse_section%s' % method_postfix).replace('.', '__'),
@@ -534,11 +533,11 @@ class ConfigMetadataHandler(ConfigHandler["DistributionMetadata"]):
 
     def __init__(
         self,
-        target_obj: "DistributionMetadata",
+        target_obj: DistributionMetadata,
         options: AllCommandOptions,
         ignore_option_errors: bool,
         ensure_discovered: expand.EnsurePackagesDiscovered,
-        package_dir: Optional[dict] = None,
+        package_dir: dict | None = None,
         root_dir: StrPath = os.curdir,
     ):
         super().__init__(target_obj, options, ignore_option_errors, ensure_discovered)
@@ -598,14 +597,14 @@ class ConfigOptionsHandler(ConfigHandler["Distribution"]):
 
     def __init__(
         self,
-        target_obj: "Distribution",
+        target_obj: Distribution,
         options: AllCommandOptions,
         ignore_option_errors: bool,
         ensure_discovered: expand.EnsurePackagesDiscovered,
     ):
         super().__init__(target_obj, options, ignore_option_errors, ensure_discovered)
         self.root_dir = target_obj.src_root
-        self.package_dir: Dict[str, str] = {}  # To be filled by `find_packages`
+        self.package_dir: dict[str, str] = {}  # To be filled by `find_packages`
 
     @classmethod
     def _parse_list_semicolon(cls, value):
diff --git a/setuptools/discovery.py b/setuptools/discovery.py
index 571be12bf4..880d414033 100644
--- a/setuptools/discovery.py
+++ b/setuptools/discovery.py
@@ -37,6 +37,8 @@
 
 """
 
+from __future__ import annotations
+
 import itertools
 import os
 from fnmatch import fnmatchcase
@@ -44,13 +46,9 @@
 from pathlib import Path
 from typing import (
     TYPE_CHECKING,
-    Dict,
     Iterable,
     Iterator,
-    List,
     Mapping,
-    Optional,
-    Tuple,
 )
 
 import _distutils_hack.override  # noqa: F401
@@ -91,8 +89,8 @@ def __contains__(self, item: str) -> bool:
 class _Finder:
     """Base class that exposes functionality for module/package finders"""
 
-    ALWAYS_EXCLUDE: Tuple[str, ...] = ()
-    DEFAULT_EXCLUDE: Tuple[str, ...] = ()
+    ALWAYS_EXCLUDE: tuple[str, ...] = ()
+    DEFAULT_EXCLUDE: tuple[str, ...] = ()
 
     @classmethod
     def find(
@@ -100,7 +98,7 @@ def find(
         where: StrPath = '.',
         exclude: Iterable[str] = (),
         include: Iterable[str] = ('*',),
-    ) -> List[str]:
+    ) -> list[str]:
         """Return a list of all Python items (packages or modules, depending on
         the finder implementation) found within directory 'where'.
 
@@ -291,7 +289,7 @@ class FlatLayoutModuleFinder(ModuleFinder):
     """Reserved top-level module names"""
 
 
-def _find_packages_within(root_pkg: str, pkg_dir: StrPath) -> List[str]:
+def _find_packages_within(root_pkg: str, pkg_dir: StrPath) -> list[str]:
     nested = PEP420PackageFinder.find(pkg_dir)
     return [root_pkg] + [".".join((root_pkg, n)) for n in nested]
 
@@ -301,7 +299,7 @@ class ConfigDiscovery:
     (from other metadata/options, the file system or conventions)
     """
 
-    def __init__(self, distribution: "Distribution"):
+    def __init__(self, distribution: Distribution):
         self.dist = distribution
         self._called = False
         self._disabled = False
@@ -329,7 +327,7 @@ def _root_dir(self) -> StrPath:
         return self.dist.src_root or os.curdir
 
     @property
-    def _package_dir(self) -> Dict[str, str]:
+    def _package_dir(self) -> dict[str, str]:
         if self.dist.package_dir is None:
             return {}
         return self.dist.package_dir
@@ -455,7 +453,7 @@ def _analyse_flat_modules(self) -> bool:
         self._ensure_no_accidental_inclusion(self.dist.py_modules, "modules")
         return bool(self.dist.py_modules)
 
-    def _ensure_no_accidental_inclusion(self, detected: List[str], kind: str):
+    def _ensure_no_accidental_inclusion(self, detected: list[str], kind: str):
         if len(detected) > 1:
             from inspect import cleandoc
 
@@ -495,7 +493,7 @@ def analyse_name(self):
         if name:
             self.dist.metadata.name = name
 
-    def _find_name_single_package_or_module(self) -> Optional[str]:
+    def _find_name_single_package_or_module(self) -> str | None:
         """Exactly one module or package"""
         for field in ('packages', 'py_modules'):
             items = getattr(self.dist, field, None) or []
@@ -505,7 +503,7 @@ def _find_name_single_package_or_module(self) -> Optional[str]:
 
         return None
 
-    def _find_name_from_packages(self) -> Optional[str]:
+    def _find_name_from_packages(self) -> str | None:
         """Try to find the root package that is not a PEP 420 namespace"""
         if not self.dist.packages:
             return None
@@ -522,7 +520,7 @@ def _find_name_from_packages(self) -> Optional[str]:
         return None
 
 
-def remove_nested_packages(packages: List[str]) -> List[str]:
+def remove_nested_packages(packages: list[str]) -> list[str]:
     """Remove nested packages from a list of packages.
 
     >>> remove_nested_packages(["a", "a.b1", "a.b2", "a.b1.c1"])
@@ -540,7 +538,7 @@ def remove_nested_packages(packages: List[str]) -> List[str]:
     return top_level
 
 
-def remove_stubs(packages: List[str]) -> List[str]:
+def remove_stubs(packages: list[str]) -> list[str]:
     """Remove type stubs (:pep:`561`) from a list of packages.
 
     >>> remove_stubs(["a", "a.b", "a-stubs", "a-stubs.b.c", "b", "c-stubs"])
@@ -550,8 +548,8 @@ def remove_stubs(packages: List[str]) -> List[str]:
 
 
 def find_parent_package(
-    packages: List[str], package_dir: Mapping[str, str], root_dir: StrPath
-) -> Optional[str]:
+    packages: list[str], package_dir: Mapping[str, str], root_dir: StrPath
+) -> str | None:
     """Find the parent package that is not a namespace."""
     packages = sorted(packages, key=len)
     common_ancestors = []
@@ -607,7 +605,7 @@ def find_package_path(
     return os.path.join(root_dir, *parent.split("/"), *parts)
 
 
-def construct_package_dir(packages: List[str], package_path: StrPath) -> Dict[str, str]:
+def construct_package_dir(packages: list[str], package_path: StrPath) -> dict[str, str]:
     parent_pkgs = remove_nested_packages(packages)
     prefix = Path(package_path).parts
     return {pkg: "/".join([*prefix, *pkg.split(".")]) for pkg in parent_pkgs}
diff --git a/setuptools/dist.py b/setuptools/dist.py
index 80ae589d4f..30cdfdb10b 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -1,5 +1,4 @@
-__all__ = ['Distribution']
-
+from __future__ import annotations
 
 import io
 import itertools
@@ -10,7 +9,7 @@
 from contextlib import suppress
 from glob import iglob
 from pathlib import Path
-from typing import TYPE_CHECKING, Dict, List, MutableMapping, Optional, Set, Tuple
+from typing import TYPE_CHECKING, MutableMapping
 
 import distutils.cmd
 import distutils.command
@@ -38,6 +37,7 @@
 from .monkey import get_unpatched
 from .warnings import InformationOnly, SetuptoolsDeprecationWarning
 
+__all__ = ['Distribution']
 
 sequence = tuple, list
 
@@ -287,12 +287,12 @@ def patch_missing_pkg_info(self, attrs):
                 dist._version = _normalization.safe_version(str(attrs['version']))
                 self._patched_dist = dist
 
-    def __init__(self, attrs: Optional[MutableMapping] = None) -> None:
+    def __init__(self, attrs: MutableMapping | None = None) -> None:
         have_package_data = hasattr(self, "package_data")
         if not have_package_data:
-            self.package_data: Dict[str, List[str]] = {}
+            self.package_data: dict[str, list[str]] = {}
         attrs = attrs or {}
-        self.dist_files: List[Tuple[str, str, str]] = []
+        self.dist_files: list[tuple[str, str, str]] = []
         # Filter-out setuptools' specific options.
         self.src_root = attrs.pop("src_root", None)
         self.patch_missing_pkg_info(attrs)
@@ -309,7 +309,7 @@ def __init__(self, attrs: Optional[MutableMapping] = None) -> None:
         # Private API (setuptools-use only, not restricted to Distribution)
         # Stores files that are referenced by the configuration and need to be in the
         # sdist (e.g. `version = file: VERSION.txt`)
-        self._referenced_files: Set[str] = set()
+        self._referenced_files: set[str] = set()
 
         self.set_defaults = ConfigDiscovery(self)
 
@@ -387,10 +387,10 @@ def _normalize_requires(self):
 
     def _finalize_license_files(self) -> None:
         """Compute names of all license files which should be included."""
-        license_files: Optional[List[str]] = self.metadata.license_files
-        patterns: List[str] = license_files if license_files else []
+        license_files: list[str] | None = self.metadata.license_files
+        patterns: list[str] = license_files if license_files else []
 
-        license_file: Optional[str] = self.metadata.license_file
+        license_file: str | None = self.metadata.license_file
         if license_file and license_file not in patterns:
             patterns.append(license_file)
 
diff --git a/setuptools/monkey.py b/setuptools/monkey.py
index 1f8d8ffe0f..e513f95245 100644
--- a/setuptools/monkey.py
+++ b/setuptools/monkey.py
@@ -2,20 +2,22 @@
 Monkey patching of distutils.
 """
 
+from __future__ import annotations
+
 import functools
 import inspect
 import platform
 import sys
 import types
 from importlib import import_module
-from typing import List, TypeVar
+from typing import TypeVar
 
 import distutils.filelist
 
 
 _T = TypeVar("_T")
 
-__all__: List[str] = []
+__all__: list[str] = []
 """
 Everything is private. Contact the project team
 if you think you need this functionality.
diff --git a/setuptools/msvc.py b/setuptools/msvc.py
index b2a0f2bebb..f86c480d18 100644
--- a/setuptools/msvc.py
+++ b/setuptools/msvc.py
@@ -11,6 +11,8 @@
 This may also support compilers shipped with compatible Visual Studio versions.
 """
 
+from __future__ import annotations
+
 import json
 from os import listdir, pathsep
 from os.path import join, isfile, isdir, dirname
@@ -20,7 +22,7 @@
 import itertools
 import subprocess
 import distutils.errors
-from typing import Dict, TYPE_CHECKING
+from typing import TYPE_CHECKING
 from setuptools.extern.more_itertools import unique_everseen
 
 # https://github.com/python/mypy/issues/8166
@@ -36,7 +38,7 @@ class winreg:
         HKEY_LOCAL_MACHINE = None
         HKEY_CLASSES_ROOT = None
 
-    environ: Dict[str, str] = dict()
+    environ: dict[str, str] = dict()
 
 
 def _msvc14_find_vc2015():
diff --git a/setuptools/sandbox.py b/setuptools/sandbox.py
index e5da9d86f0..147b26749e 100644
--- a/setuptools/sandbox.py
+++ b/setuptools/sandbox.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import os
 import sys
 import tempfile
@@ -9,7 +11,6 @@
 import pickle
 import textwrap
 import builtins
-from typing import Union, List
 
 import pkg_resources
 from distutils.errors import DistutilsError
@@ -425,7 +426,7 @@ class DirectorySandbox(AbstractSandbox):
         "tempnam",
     ])
 
-    _exception_patterns: List[Union[str, re.Pattern]] = []
+    _exception_patterns: list[str | re.Pattern] = []
     "exempt writing to paths that match the pattern"
 
     def __init__(self, sandbox, exceptions=_EXCEPTIONS):
diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py
index bb78f64310..6b3ee9cf1e 100644
--- a/setuptools/tests/config/test_apply_pyprojecttoml.py
+++ b/setuptools/tests/config/test_apply_pyprojecttoml.py
@@ -4,12 +4,13 @@
 To run these tests offline, please have a look on ``./downloads/preload.py``
 """
 
+from __future__ import annotations
+
 import io
 import re
 import tarfile
 from inspect import cleandoc
 from pathlib import Path
-from typing import Tuple
 from unittest.mock import Mock
 from zipfile import ZipFile
 
@@ -457,7 +458,7 @@ def core_metadata(dist) -> str:
     # Make sure core metadata is valid
     Metadata.from_email(pkg_file_txt, validate=True)  # can raise exceptions
 
-    skip_prefixes: Tuple[str, ...] = ()
+    skip_prefixes: tuple[str, ...] = ()
     skip_lines = set()
     # ---- DIFF NORMALISATION ----
     # PEP 621 is very particular about author/maintainer metadata conversion, so skip
diff --git a/setuptools/tests/test_egg_info.py b/setuptools/tests/test_egg_info.py
index f6b2302d97..f2489896b3 100644
--- a/setuptools/tests/test_egg_info.py
+++ b/setuptools/tests/test_egg_info.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import sys
 import ast
 import os
@@ -5,7 +7,6 @@
 import re
 import stat
 import time
-from typing import List, Tuple
 from pathlib import Path
 from unittest import mock
 
@@ -77,7 +78,7 @@ def run():
         })
 
     @staticmethod
-    def _extract_mv_version(pkg_info_lines: List[str]) -> Tuple[int, int]:
+    def _extract_mv_version(pkg_info_lines: list[str]) -> tuple[int, int]:
         version_str = pkg_info_lines[0].split(' ')[1]
         major, minor = map(int, version_str.split('.')[:2])
         return major, minor
diff --git a/setuptools/tests/test_manifest.py b/setuptools/tests/test_manifest.py
index 6911b0224c..f3eba733d9 100644
--- a/setuptools/tests/test_manifest.py
+++ b/setuptools/tests/test_manifest.py
@@ -1,5 +1,7 @@
 """sdist tests"""
 
+from __future__ import annotations
+
 import contextlib
 import os
 import shutil
@@ -10,7 +12,6 @@
 import logging
 from distutils import log
 from distutils.errors import DistutilsTemplateError
-from typing import List, Tuple
 
 from setuptools.command.egg_info import FileList, egg_info, translate_pattern
 from setuptools.dist import Distribution
@@ -76,7 +77,7 @@ def touch(filename):
 )
 
 
-translate_specs: List[Tuple[str, List[str], List[str]]] = [
+translate_specs: list[tuple[str, list[str], list[str]]] = [
     ('foo', ['foo'], ['bar', 'foobar']),
     ('foo/bar', ['foo/bar'], ['foo/bar/baz', './foo/bar', 'foo']),
     # Glob matching
diff --git a/setuptools/warnings.py b/setuptools/warnings.py
index b3e252ca57..5d9cca6c37 100644
--- a/setuptools/warnings.py
+++ b/setuptools/warnings.py
@@ -5,12 +5,14 @@
 setuptools.
 """
 
+from __future__ import annotations
+
 import os
 import warnings
 from datetime import date
 from inspect import cleandoc
 from textwrap import indent
-from typing import Optional, Tuple
+from typing import Tuple
 
 _DueDate = Tuple[int, int, int]  # time tuple
 _INDENT = 8 * " "
@@ -23,11 +25,11 @@ class SetuptoolsWarning(UserWarning):
     @classmethod
     def emit(
         cls,
-        summary: Optional[str] = None,
-        details: Optional[str] = None,
-        due_date: Optional[_DueDate] = None,
-        see_docs: Optional[str] = None,
-        see_url: Optional[str] = None,
+        summary: str | None = None,
+        details: str | None = None,
+        due_date: _DueDate | None = None,
+        see_docs: str | None = None,
+        see_url: str | None = None,
         stacklevel: int = 2,
         **kwargs,
     ):
@@ -51,9 +53,9 @@ def _format(
         cls,
         summary: str,
         details: str,
-        due_date: Optional[date] = None,
-        see_url: Optional[str] = None,
-        format_args: Optional[dict] = None,
+        due_date: date | None = None,
+        see_url: str | None = None,
+        format_args: dict | None = None,
     ):
         """Private: reserved for ``setuptools`` internal use only"""
         today = date.today()
diff --git a/tools/generate_validation_code.py b/tools/generate_validation_code.py
index 0171325bd0..ca933e8f3b 100644
--- a/tools/generate_validation_code.py
+++ b/tools/generate_validation_code.py
@@ -1,12 +1,13 @@
+from __future__ import annotations
+
 from os import PathLike
 import subprocess
 import sys
 
 from pathlib import Path
-from typing import Union
 
 
-def generate_pyproject_validation(dest: Union[str, PathLike]):
+def generate_pyproject_validation(dest: str | PathLike[str]):
     """
     Generates validation code for ``pyproject.toml`` based on JSON schemas and the
     ``validate-pyproject`` library.

From b4403a1d6297ed14a8017efa2bff1bacec56990c Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Mon, 17 Jun 2024 13:20:12 -0400
Subject: [PATCH 0722/1761] Start checking using Flake8-PYI (#4380)

---
 ruff.toml                             |  19 ++--
 setuptools/depends.py                 | 141 ++++++++++++--------------
 setuptools/tests/test_easy_install.py |   9 +-
 3 files changed, 79 insertions(+), 90 deletions(-)

diff --git a/ruff.toml b/ruff.toml
index 731e52e0e2..9f0b42cea9 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -1,3 +1,9 @@
+exclude = [
+	"**/_vendor",
+	"setuptools/_distutils",
+	"setuptools/config/_validate_pyproject",
+]
+
 [lint]
 extend-select = [
 	"C901",
@@ -6,6 +12,7 @@ extend-select = [
 	# local
 	"FA", # flake8-future-annotations
 	"F404", # late-future-import
+  "PYI", # flake8-pyi
 	"UP", # pyupgrade
 	"YTT", # flake8-2020
 ]
@@ -32,23 +39,13 @@ ignore = [
 	"ISC001",
 	"ISC002",
 ]
-exclude = [
-	"**/_vendor",
-	"setuptools/_distutils",
-	"setuptools/config/_validate_pyproject",
-]
 
 [lint.per-file-ignores]
 # Auto-generated code
 "setuptools/config/_validate_pyproject/*" = ["FA100"]
 
 [format]
-exclude = [
-	"**/_vendor",
-	"setuptools/_distutils",
-	"setuptools/config/_validate_pyproject",
-]
-# Enable preview, required for quote-style = "preserve"
+# Enable preview to get hugged parenthesis unwrapping
 preview = true
 # https://docs.astral.sh/ruff/settings/#format-quote-style
 quote-style = "preserve"
diff --git a/setuptools/depends.py b/setuptools/depends.py
index c0ca84d404..b6af51c410 100644
--- a/setuptools/depends.py
+++ b/setuptools/depends.py
@@ -9,7 +9,7 @@
 from .extern.packaging.version import Version
 
 
-__all__ = ['Require', 'find_module', 'get_module_constant', 'extract_constant']
+__all__ = ['Require', 'find_module']
 
 
 class Require:
@@ -95,86 +95,73 @@ def empty():
     return contextlib.closing(f)
 
 
-def get_module_constant(module, symbol, default=-1, paths=None):
-    """Find 'module' by searching 'paths', and extract 'symbol'
-
-    Return 'None' if 'module' does not exist on 'paths', or it does not define
-    'symbol'.  If the module defines 'symbol' as a constant, return the
-    constant.  Otherwise, return 'default'."""
-
-    try:
-        f, path, (suffix, mode, kind) = info = find_module(module, paths)
-    except ImportError:
-        # Module doesn't exist
-        return None
-
-    with maybe_close(f):
-        if kind == PY_COMPILED:
-            f.read(8)  # skip magic & date
-            code = marshal.load(f)
-        elif kind == PY_FROZEN:
-            code = _imp.get_frozen_object(module, paths)
-        elif kind == PY_SOURCE:
-            code = compile(f.read(), path, 'exec')
-        else:
-            # Not something we can parse; we'll have to import it.  :(
-            imported = _imp.get_module(module, paths, info)
-            return getattr(imported, symbol, None)
-
-    return extract_constant(code, symbol, default)
-
-
-def extract_constant(code, symbol, default=-1):
-    """Extract the constant value of 'symbol' from 'code'
-
-    If the name 'symbol' is bound to a constant value by the Python code
-    object 'code', return that value.  If 'symbol' is bound to an expression,
-    return 'default'.  Otherwise, return 'None'.
-
-    Return value is based on the first assignment to 'symbol'.  'symbol' must
-    be a global, or at least a non-"fast" local in the code block.  That is,
-    only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol'
-    must be present in 'code.co_names'.
-    """
-    if symbol not in code.co_names:
-        # name's not there, can't possibly be an assignment
-        return None
-
-    name_idx = list(code.co_names).index(symbol)
-
-    STORE_NAME = dis.opmap['STORE_NAME']
-    STORE_GLOBAL = dis.opmap['STORE_GLOBAL']
-    LOAD_CONST = dis.opmap['LOAD_CONST']
-
-    const = default
-
-    for byte_code in dis.Bytecode(code):
-        op = byte_code.opcode
-        arg = byte_code.arg
-
-        if op == LOAD_CONST:
-            const = code.co_consts[arg]
-        elif arg == name_idx and (op == STORE_NAME or op == STORE_GLOBAL):
-            return const
-        else:
-            const = default
+# Some objects are not available on some platforms.
+# XXX it'd be better to test assertions about bytecode instead.
+if not sys.platform.startswith('java') and sys.platform != 'cli':
+
+    def get_module_constant(module, symbol, default=-1, paths=None):
+        """Find 'module' by searching 'paths', and extract 'symbol'
+
+        Return 'None' if 'module' does not exist on 'paths', or it does not define
+        'symbol'.  If the module defines 'symbol' as a constant, return the
+        constant.  Otherwise, return 'default'."""
+
+        try:
+            f, path, (suffix, mode, kind) = info = find_module(module, paths)
+        except ImportError:
+            # Module doesn't exist
+            return None
+
+        with maybe_close(f):
+            if kind == PY_COMPILED:
+                f.read(8)  # skip magic & date
+                code = marshal.load(f)
+            elif kind == PY_FROZEN:
+                code = _imp.get_frozen_object(module, paths)
+            elif kind == PY_SOURCE:
+                code = compile(f.read(), path, 'exec')
+            else:
+                # Not something we can parse; we'll have to import it.  :(
+                imported = _imp.get_module(module, paths, info)
+                return getattr(imported, symbol, None)
+
+        return extract_constant(code, symbol, default)
+
+    def extract_constant(code, symbol, default=-1):
+        """Extract the constant value of 'symbol' from 'code'
+
+        If the name 'symbol' is bound to a constant value by the Python code
+        object 'code', return that value.  If 'symbol' is bound to an expression,
+        return 'default'.  Otherwise, return 'None'.
+
+        Return value is based on the first assignment to 'symbol'.  'symbol' must
+        be a global, or at least a non-"fast" local in the code block.  That is,
+        only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol'
+        must be present in 'code.co_names'.
+        """
+        if symbol not in code.co_names:
+            # name's not there, can't possibly be an assignment
+            return None
 
-    return None
+        name_idx = list(code.co_names).index(symbol)
 
+        STORE_NAME = dis.opmap['STORE_NAME']
+        STORE_GLOBAL = dis.opmap['STORE_GLOBAL']
+        LOAD_CONST = dis.opmap['LOAD_CONST']
 
-def _update_globals():
-    """
-    Patch the globals to remove the objects not available on some platforms.
+        const = default
 
-    XXX it'd be better to test assertions about bytecode instead.
-    """
+        for byte_code in dis.Bytecode(code):
+            op = byte_code.opcode
+            arg = byte_code.arg
 
-    if not sys.platform.startswith('java') and sys.platform != 'cli':
-        return
-    incompatible = 'extract_constant', 'get_module_constant'
-    for name in incompatible:
-        del globals()[name]
-        __all__.remove(name)
+            if op == LOAD_CONST:
+                const = code.co_consts[arg]
+            elif arg == name_idx and (op == STORE_NAME or op == STORE_GLOBAL):
+                return const
+            else:
+                const = default
 
+        return None
 
-_update_globals()
+    __all__ += ['get_module_constant', 'extract_constant']
diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py
index ada4c32285..7346a07929 100644
--- a/setuptools/tests/test_easy_install.py
+++ b/setuptools/tests/test_easy_install.py
@@ -10,13 +10,13 @@
 import itertools
 import distutils.errors
 import io
+from typing import NamedTuple
 import zipfile
 import time
 import re
 import subprocess
 import pathlib
 import warnings
-from collections import namedtuple
 from pathlib import Path
 from unittest import mock
 
@@ -1344,7 +1344,12 @@ def test_header(self):
         assert not hdr.startswith('\\"')
 
 
-VersionStub = namedtuple("VersionStub", "major, minor, micro, releaselevel, serial")
+class VersionStub(NamedTuple):
+    major: int
+    minor: int
+    micro: int
+    releaselevel: str
+    serial: int
 
 
 def test_use_correct_python_version_string(tmpdir, tmpdir_cwd, monkeypatch):

From d1bea1b94583a4e581739a9cb8365a8177781134 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Mon, 17 Jun 2024 13:23:12 -0400
Subject: [PATCH 0723/1761] Use actual boolean parameters and variables (#4365)

Co-authored-by: Anderson Bravalheri 
---
 newsfragments/4365.misc.rst        |  1 +
 pkg_resources/__init__.py          |  2 +-
 setuptools/__init__.py             |  2 +-
 setuptools/command/bdist_egg.py    | 14 ++++++++------
 setuptools/command/build_ext.py    |  4 ++--
 setuptools/command/build_py.py     | 12 +++++++++---
 setuptools/command/develop.py      |  2 +-
 setuptools/command/easy_install.py | 16 ++++++++--------
 setuptools/command/egg_info.py     | 10 +++++-----
 setuptools/command/install_lib.py  |  6 +++---
 setuptools/command/test.py         |  2 +-
 setuptools/package_index.py        |  2 +-
 12 files changed, 41 insertions(+), 32 deletions(-)
 create mode 100644 newsfragments/4365.misc.rst

diff --git a/newsfragments/4365.misc.rst b/newsfragments/4365.misc.rst
new file mode 100644
index 0000000000..7badfff8f0
--- /dev/null
+++ b/newsfragments/4365.misc.rst
@@ -0,0 +1 @@
+Use actual boolean parameters and variables instead of 0-1 literals. -- by :user:`Avasam`
diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index 45a18bf73c..7e3cf54ab8 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -731,7 +731,7 @@ def __iter__(self):
 
             for key in self.entry_keys[item]:
                 if key not in seen:
-                    seen[key] = 1
+                    seen[key] = True
                     yield self.by_key[key]
 
     def add(
diff --git a/setuptools/__init__.py b/setuptools/__init__.py
index a59bbe1177..5ef0f7dbd8 100644
--- a/setuptools/__init__.py
+++ b/setuptools/__init__.py
@@ -216,7 +216,7 @@ def ensure_string_list(self, option):
                     "'%s' must be a list of strings (got %r)" % (option, val)
                 )
 
-    def reinitialize_command(self, command, reinit_subcommands=0, **kw):
+    def reinitialize_command(self, command, reinit_subcommands=False, **kw):
         cmd = _Command.reinitialize_command(self, command, reinit_subcommands)
         vars(cmd).update(kw)
         return cmd
diff --git a/setuptools/command/bdist_egg.py b/setuptools/command/bdist_egg.py
index f72294fe58..73476e0cec 100644
--- a/setuptools/command/bdist_egg.py
+++ b/setuptools/command/bdist_egg.py
@@ -85,9 +85,9 @@ class bdist_egg(Command):
     def initialize_options(self):
         self.bdist_dir = None
         self.plat_name = None
-        self.keep_temp = 0
+        self.keep_temp = False
         self.dist_dir = None
-        self.skip_build = 0
+        self.skip_build = False
         self.egg_output = None
         self.exclude_source_files = None
 
@@ -136,7 +136,7 @@ def do_install_data(self):
 
         try:
             log.info("installing package data to %s", self.bdist_dir)
-            self.call_command('install_data', force=0, root=None)
+            self.call_command('install_data', force=False, root=None)
         finally:
             self.distribution.data_files = old
 
@@ -164,7 +164,7 @@ def run(self):  # noqa: C901  # is too complex (14)  # FIXME
         instcmd.root = None
         if self.distribution.has_c_libraries() and not self.skip_build:
             self.run_command('build_clib')
-        cmd = self.call_command('install_lib', warn_dir=0)
+        cmd = self.call_command('install_lib', warn_dir=False)
         instcmd.root = old_root
 
         all_outputs, ext_outputs = self.get_ext_outputs()
@@ -192,7 +192,7 @@ def run(self):  # noqa: C901  # is too complex (14)  # FIXME
         if self.distribution.scripts:
             script_dir = os.path.join(egg_info, 'scripts')
             log.info("installing scripts to %s", script_dir)
-            self.call_command('install_scripts', install_dir=script_dir, no_ep=1)
+            self.call_command('install_scripts', install_dir=script_dir, no_ep=True)
 
         self.copy_metadata_to(egg_info)
         native_libs = os.path.join(egg_info, "native_libs.txt")
@@ -427,7 +427,9 @@ def can_scan():
 INSTALL_DIRECTORY_ATTRS = ['install_lib', 'install_dir', 'install_data', 'install_base']
 
 
-def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=True, mode='w'):
+def make_zipfile(
+    zip_filename, base_dir, verbose=False, dry_run=False, compress=True, mode='w'
+):
     """Create a zip file from all the files under 'base_dir'.  The output
     zip file will be named 'base_dir' + ".zip".  Uses either the "zipfile"
     Python module (if available) or the InfoZIP "zip" utility (if installed
diff --git a/setuptools/command/build_ext.py b/setuptools/command/build_ext.py
index 9d8aa7fcdc..508704f3c0 100644
--- a/setuptools/command/build_ext.py
+++ b/setuptools/command/build_ext.py
@@ -401,7 +401,7 @@ def link_shared_object(
         library_dirs=None,
         runtime_library_dirs=None,
         export_symbols=None,
-        debug=0,
+        debug=False,
         extra_preargs=None,
         extra_postargs=None,
         build_temp=None,
@@ -436,7 +436,7 @@ def link_shared_object(
         library_dirs=None,
         runtime_library_dirs=None,
         export_symbols=None,
-        debug=0,
+        debug=False,
         extra_preargs=None,
         extra_postargs=None,
         build_temp=None,
diff --git a/setuptools/command/build_py.py b/setuptools/command/build_py.py
index e74946f601..ab49874635 100644
--- a/setuptools/command/build_py.py
+++ b/setuptools/command/build_py.py
@@ -46,7 +46,13 @@ def finalize_options(self):
         self.__updated_files = []
 
     def copy_file(
-        self, infile, outfile, preserve_mode=1, preserve_times=1, link=None, level=1
+        self,
+        infile,
+        outfile,
+        preserve_mode=True,
+        preserve_times=True,
+        link=None,
+        level=1,
     ):
         # Overwrite base class to allow using links
         if link:
@@ -70,7 +76,7 @@ def run(self):
 
         # Only compile actual .py files, using our base class' idea of what our
         # output files are.
-        self.byte_compile(orig.build_py.get_outputs(self, include_bytecode=0))
+        self.byte_compile(orig.build_py.get_outputs(self, include_bytecode=False))
 
     def __getattr__(self, attr):
         "lazily compute data files"
@@ -132,7 +138,7 @@ def find_data_files(self, package, src_dir):
         )
         return self.exclude_data_files(package, src_dir, files)
 
-    def get_outputs(self, include_bytecode=1) -> list[str]:
+    def get_outputs(self, include_bytecode=True) -> list[str]:
         """See :class:`setuptools.commands.build.SubCommand`"""
         if self.editable_mode:
             return list(self.get_output_mapping().keys())
diff --git a/setuptools/command/develop.py b/setuptools/command/develop.py
index d07736a005..55f24f396c 100644
--- a/setuptools/command/develop.py
+++ b/setuptools/command/develop.py
@@ -109,7 +109,7 @@ def install_for_development(self):
         self.run_command('egg_info')
 
         # Build extensions in-place
-        self.reinitialize_command('build_ext', inplace=1)
+        self.reinitialize_command('build_ext', inplace=True)
         self.run_command('build_ext')
 
         if setuptools.bootstrap_install_from:
diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py
index df4d34570d..eb6ba1025f 100644
--- a/setuptools/command/easy_install.py
+++ b/setuptools/command/easy_install.py
@@ -171,7 +171,7 @@ def initialize_options(self):
 
         # the --user option seems to be an opt-in one,
         # so the default should be False.
-        self.user = 0
+        self.user = False
         self.zip_ok = self.local_snapshots_ok = None
         self.install_dir = self.script_dir = self.exclude_scripts = None
         self.index_url = None
@@ -1059,10 +1059,10 @@ def process(src, dst):
                     dl = dst.lower()
                     if dl.endswith('.pyd') or dl.endswith('.dll'):
                         parts[-1] = bdist_egg.strip_module(parts[-1])
-                        top_level[os.path.splitext(parts[0])[0]] = 1
+                        top_level[os.path.splitext(parts[0])[0]] = True
                         native_libs.append(src)
                     elif dl.endswith('.py') and old != 'SCRIPTS/':
-                        top_level[os.path.splitext(parts[0])[0]] = 1
+                        top_level[os.path.splitext(parts[0])[0]] = True
                         to_compile.append(dst)
                     return dst
             if not src.endswith('.pth'):
@@ -1318,12 +1318,12 @@ def byte_compile(self, to_compile):
             # try to make the byte compile messages quieter
             log.set_verbosity(self.verbose - 1)
 
-            byte_compile(to_compile, optimize=0, force=1, dry_run=self.dry_run)
+            byte_compile(to_compile, optimize=0, force=True, dry_run=self.dry_run)
             if self.optimize:
                 byte_compile(
                     to_compile,
                     optimize=self.optimize,
-                    force=1,
+                    force=True,
                     dry_run=self.dry_run,
                 )
         finally:
@@ -1491,7 +1491,7 @@ def expand_paths(inputs):  # noqa: C901  # is too complex (11)  # FIXME
         if dirname in seen:
             continue
 
-        seen[dirname] = 1
+        seen[dirname] = True
         if not os.path.isdir(dirname):
             continue
 
@@ -1520,7 +1520,7 @@ def expand_paths(inputs):  # noqa: C901  # is too complex (11)  # FIXME
                 if line in seen:
                     continue
 
-                seen[line] = 1
+                seen[line] = True
                 if not os.path.isdir(line):
                     continue
 
@@ -1643,7 +1643,7 @@ def _load_raw(self):
                 dirty = True
                 paths.pop()
                 continue
-            seen[normalized_path] = 1
+            seen[normalized_path] = True
         f.close()
         # remove any trailing empty/blank line
         while paths and not paths[-1].strip():
diff --git a/setuptools/command/egg_info.py b/setuptools/command/egg_info.py
index 62d2feea9b..ccc2db8972 100644
--- a/setuptools/command/egg_info.py
+++ b/setuptools/command/egg_info.py
@@ -534,10 +534,10 @@ class manifest_maker(sdist):
     template = "MANIFEST.in"
 
     def initialize_options(self):
-        self.use_defaults = 1
-        self.prune = 1
-        self.manifest_only = 1
-        self.force_manifest = 1
+        self.use_defaults = True
+        self.prune = True
+        self.manifest_only = True
+        self.force_manifest = True
         self.ignore_egg_info_dir = False
 
     def finalize_options(self):
@@ -623,7 +623,7 @@ def prune_file_list(self):
         self.filelist.prune(base_dir)
         sep = re.escape(os.sep)
         self.filelist.exclude_pattern(
-            r'(^|' + sep + r')(RCS|CVS|\.svn)' + sep, is_regex=1
+            r'(^|' + sep + r')(RCS|CVS|\.svn)' + sep, is_regex=True
         )
 
     def _safe_data_files(self, build_py):
diff --git a/setuptools/command/install_lib.py b/setuptools/command/install_lib.py
index 9d76e429e8..5e74be247e 100644
--- a/setuptools/command/install_lib.py
+++ b/setuptools/command/install_lib.py
@@ -88,9 +88,9 @@ def copy_tree(
         self,
         infile: StrPath,
         outfile: str,
-        preserve_mode=1,
-        preserve_times=1,
-        preserve_symlinks=0,
+        preserve_mode=True,
+        preserve_times=True,
+        preserve_symlinks=False,
         level=1,
     ):
         assert preserve_mode and preserve_times and not preserve_symlinks
diff --git a/setuptools/command/test.py b/setuptools/command/test.py
index 0a128f2a7a..af1349e1c6 100644
--- a/setuptools/command/test.py
+++ b/setuptools/command/test.py
@@ -136,7 +136,7 @@ def project_on_sys_path(self, include_dists=()):
         self.run_command('egg_info')
 
         # Build extensions in-place
-        self.reinitialize_command('build_ext', inplace=1)
+        self.reinitialize_command('build_ext', inplace=True)
         self.run_command('build_ext')
 
         ei_cmd = self.get_finalized_command("egg_info")
diff --git a/setuptools/package_index.py b/setuptools/package_index.py
index c3ffee41a7..c8789e279f 100644
--- a/setuptools/package_index.py
+++ b/setuptools/package_index.py
@@ -642,7 +642,7 @@ def find(req, env=None):
                             "Skipping development or system egg: %s",
                             dist,
                         )
-                        skipped[dist] = 1
+                        skipped[dist] = True
                     continue
 
                 test = dist in req and (dist.precedence <= SOURCE_DIST or not source)

From 327940683918b5a706c25cd750eafa801e63e2a8 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Mon, 17 Jun 2024 13:36:27 -0400
Subject: [PATCH 0724/1761] pkg_resources: Merge `@overload` and `TypeVar`
 annotations from typeshed

---
 mypy.ini                  |   2 +
 pkg_resources/__init__.py | 216 +++++++++++++++++++++++++++++++-------
 ruff.toml                 |   6 +-
 3 files changed, 182 insertions(+), 42 deletions(-)

diff --git a/mypy.ini b/mypy.ini
index c5b13942c4..b8825fa8da 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -16,6 +16,8 @@ exclude = (?x)(
 	| ^setuptools/config/_validate_pyproject/ # Auto-generated
     | ^setuptools/tests/bdist_wheel_testdata/  # Duplicate module name
 	)
+# Too many false-positives
+disable_error_code = overload-overlap
 
 # Ignoring attr-defined because setuptools wraps a lot of distutils classes, adding new attributes,
 # w/o updating all the attributes and return types from the base classes for type-checkers to understand
diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index 7e3cf54ab8..157f4754e1 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -34,6 +34,8 @@
 import types
 from typing import (
     Any,
+    Iterator,
+    Literal,
     Mapping,
     MutableSequence,
     NamedTuple,
@@ -98,7 +100,7 @@
 from pkg_resources.extern.platformdirs import user_cache_dir as _user_cache_dir
 
 if TYPE_CHECKING:
-    from _typeshed import StrPath, StrOrBytesPath, BytesPath
+    from _typeshed import BytesPath, StrPath, StrOrBytesPath
 
 warnings.warn(
     "pkg_resources is deprecated as an API. "
@@ -109,14 +111,18 @@
 
 
 _T = TypeVar("_T")
+_DistributionT = TypeVar("_DistributionT", bound="Distribution")
 # Type aliases
 _NestedStr = Union[str, Iterable[Union[str, Iterable["_NestedStr"]]]]
+_InstallerTypeT = Callable[["Requirement"], "_DistributionT"]
 _InstallerType = Callable[["Requirement"], Optional["Distribution"]]
 _PkgReqType = Union[str, "Requirement"]
 _EPDistType = Union["Distribution", _PkgReqType]
 _MetadataType = Optional["IResourceProvider"]
+_ResolvedEntryPoint = Any  # Can be any attribute in the module
+_ResourceStream = Any  # TODO / Incomplete: A readable file-like object
 # Any object works, but let's indicate we expect something like a module (optionally has __loader__ or __file__)
-_ModuleLike = Union[object, types.ModuleType]
+_ModuleLike = Any
 _ProviderFactoryType = Callable[[_ModuleLike], "IResourceProvider"]
 _DistFinderType = Callable[[_T, str, bool], Iterable["Distribution"]]
 _NSHandlerType = Callable[[_T, str, str, types.ModuleType], Optional[str]]
@@ -130,6 +136,10 @@ class _LoaderProtocol(Protocol):
     def load_module(self, fullname: str, /) -> types.ModuleType: ...
 
 
+class _ZipLoaderModule(Protocol):
+    __loader__: zipimport.zipimporter
+
+
 _PEP440_FALLBACK = re.compile(r"^v?(?P(?:[0-9]+!)?[0-9]+(?:\.[0-9]+)*)", re.I)
 
 
@@ -403,7 +413,11 @@ def register_loader_type(
     _provider_factories[loader_type] = provider_factory
 
 
-def get_provider(moduleOrReq: str | Requirement):
+@overload
+def get_provider(moduleOrReq: str) -> IResourceProvider: ...
+@overload
+def get_provider(moduleOrReq: Requirement) -> Distribution: ...
+def get_provider(moduleOrReq: str | Requirement) -> IResourceProvider | Distribution:
     """Return an IResourceProvider for the named module or requirement"""
     if isinstance(moduleOrReq, Requirement):
         return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
@@ -514,22 +528,33 @@ def compatible_platforms(provided: str | None, required: str | None):
     return False
 
 
-def get_distribution(dist: _EPDistType):
+@overload
+def get_distribution(dist: _DistributionT) -> _DistributionT: ...
+@overload
+def get_distribution(dist: _PkgReqType) -> Distribution: ...
+def get_distribution(dist: Distribution | _PkgReqType) -> Distribution:
     """Return a current distribution object for a Requirement or string"""
     if isinstance(dist, str):
         dist = Requirement.parse(dist)
     if isinstance(dist, Requirement):
-        dist = get_provider(dist)
+        # Bad type narrowing, dist has to be a Requirement here, so get_provider has to return Distribution
+        dist = get_provider(dist)  # type: ignore[assignment]
     if not isinstance(dist, Distribution):
-        raise TypeError("Expected string, Requirement, or Distribution", dist)
+        raise TypeError("Expected str, Requirement, or Distribution", dist)
     return dist
 
 
-def load_entry_point(dist: _EPDistType, group: str, name: str):
+def load_entry_point(dist: _EPDistType, group: str, name: str) -> _ResolvedEntryPoint:
     """Return `name` entry point of `group` for `dist` or raise ImportError"""
     return get_distribution(dist).load_entry_point(group, name)
 
 
+@overload
+def get_entry_map(
+    dist: _EPDistType, group: None = None
+) -> dict[str, dict[str, EntryPoint]]: ...
+@overload
+def get_entry_map(dist: _EPDistType, group: str) -> dict[str, EntryPoint]: ...
 def get_entry_map(dist: _EPDistType, group: str | None = None):
     """Return the entry point map for `group`, or the full entry map"""
     return get_distribution(dist).get_entry_map(group)
@@ -544,10 +569,10 @@ class IMetadataProvider(Protocol):
     def has_metadata(self, name: str) -> bool:
         """Does the package's distribution contain the named metadata?"""
 
-    def get_metadata(self, name: str):
+    def get_metadata(self, name: str) -> str:
         """The named metadata resource as a string"""
 
-    def get_metadata_lines(self, name: str):
+    def get_metadata_lines(self, name: str) -> Iterator[str]:
         """Yield named metadata resource as list of non-blank non-comment lines
 
         Leading and trailing whitespace is stripped from each line, and lines
@@ -556,22 +581,26 @@ def get_metadata_lines(self, name: str):
     def metadata_isdir(self, name: str) -> bool:
         """Is the named metadata a directory?  (like ``os.path.isdir()``)"""
 
-    def metadata_listdir(self, name: str):
+    def metadata_listdir(self, name: str) -> list[str]:
         """List of metadata names in the directory (like ``os.listdir()``)"""
 
-    def run_script(self, script_name: str, namespace: dict[str, Any]):
+    def run_script(self, script_name: str, namespace: dict[str, Any]) -> None:
         """Execute the named script in the supplied namespace dictionary"""
 
 
 class IResourceProvider(IMetadataProvider, Protocol):
     """An object that provides access to package resources"""
 
-    def get_resource_filename(self, manager: ResourceManager, resource_name: str):
+    def get_resource_filename(
+        self, manager: ResourceManager, resource_name: str
+    ) -> str:
         """Return a true filesystem path for `resource_name`
 
         `manager` must be a ``ResourceManager``"""
 
-    def get_resource_stream(self, manager: ResourceManager, resource_name: str):
+    def get_resource_stream(
+        self, manager: ResourceManager, resource_name: str
+    ) -> _ResourceStream:
         """Return a readable file-like object for `resource_name`
 
         `manager` must be a ``ResourceManager``"""
@@ -583,13 +612,13 @@ def get_resource_string(
 
         `manager` must be a ``ResourceManager``"""
 
-    def has_resource(self, resource_name: str):
+    def has_resource(self, resource_name: str) -> bool:
         """Does the package contain the named resource?"""
 
-    def resource_isdir(self, resource_name: str):
+    def resource_isdir(self, resource_name: str) -> bool:
         """Is the named resource a directory?  (like ``os.path.isdir()``)"""
 
-    def resource_listdir(self, resource_name: str):
+    def resource_listdir(self, resource_name: str) -> list[str]:
         """List of resource names in the directory (like ``os.listdir()``)"""
 
 
@@ -772,6 +801,26 @@ def add(
             keys2.append(dist.key)
         self._added_new(dist)
 
+    @overload
+    def resolve(
+        self,
+        requirements: Iterable[Requirement],
+        env: Environment | None,
+        installer: _InstallerTypeT[_DistributionT],
+        replace_conflicting: bool = False,
+        extras: tuple[str, ...] | None = None,
+    ) -> list[_DistributionT]: ...
+    @overload
+    def resolve(
+        self,
+        requirements: Iterable[Requirement],
+        env: Environment | None = None,
+        *,
+        installer: _InstallerTypeT[_DistributionT],
+        replace_conflicting: bool = False,
+        extras: tuple[str, ...] | None = None,
+    ) -> list[_DistributionT]: ...
+    @overload
     def resolve(
         self,
         requirements: Iterable[Requirement],
@@ -779,7 +828,15 @@ def resolve(
         installer: _InstallerType | None = None,
         replace_conflicting: bool = False,
         extras: tuple[str, ...] | None = None,
-    ):
+    ) -> list[Distribution]: ...
+    def resolve(
+        self,
+        requirements: Iterable[Requirement],
+        env: Environment | None = None,
+        installer: _InstallerType | None | _InstallerTypeT[_DistributionT] = None,
+        replace_conflicting: bool = False,
+        extras: tuple[str, ...] | None = None,
+    ) -> list[Distribution] | list[_DistributionT]:
         """List all distributions needed to (recursively) meet `requirements`
 
         `requirements` must be a sequence of ``Requirement`` objects.  `env`,
@@ -877,13 +934,41 @@ def _resolve_dist(
             raise VersionConflict(dist, req).with_context(dependent_req)
         return dist
 
+    @overload
+    def find_plugins(
+        self,
+        plugin_env: Environment,
+        full_env: Environment | None,
+        installer: _InstallerTypeT[_DistributionT],
+        fallback: bool = True,
+    ) -> tuple[list[_DistributionT], dict[Distribution, Exception]]: ...
+    @overload
+    def find_plugins(
+        self,
+        plugin_env: Environment,
+        full_env: Environment | None = None,
+        *,
+        installer: _InstallerTypeT[_DistributionT],
+        fallback: bool = True,
+    ) -> tuple[list[_DistributionT], dict[Distribution, Exception]]: ...
+    @overload
     def find_plugins(
         self,
         plugin_env: Environment,
         full_env: Environment | None = None,
         installer: _InstallerType | None = None,
         fallback: bool = True,
-    ):
+    ) -> tuple[list[Distribution], dict[Distribution, Exception]]: ...
+    def find_plugins(
+        self,
+        plugin_env: Environment,
+        full_env: Environment | None = None,
+        installer: _InstallerType | None | _InstallerTypeT[_DistributionT] = None,
+        fallback: bool = True,
+    ) -> tuple[
+        list[Distribution] | list[_DistributionT],
+        dict[Distribution, Exception],
+    ]:
         """Find all activatable distributions in `plugin_env`
 
         Example usage::
@@ -922,8 +1007,8 @@ def find_plugins(
         # scan project names in alphabetic order
         plugin_projects.sort()
 
-        error_info = {}
-        distributions = {}
+        error_info: dict[Distribution, Exception] = {}
+        distributions: dict[Distribution, Exception | None] = {}
 
         if full_env is None:
             env = Environment(self.entries)
@@ -1120,13 +1205,29 @@ def add(self, dist: Distribution):
                 dists.append(dist)
                 dists.sort(key=operator.attrgetter('hashcmp'), reverse=True)
 
+    @overload
     def best_match(
         self,
         req: Requirement,
         working_set: WorkingSet,
-        installer: Callable[[Requirement], Any] | None = None,
+        installer: _InstallerTypeT[_DistributionT],
         replace_conflicting: bool = False,
-    ):
+    ) -> _DistributionT: ...
+    @overload
+    def best_match(
+        self,
+        req: Requirement,
+        working_set: WorkingSet,
+        installer: _InstallerType | None = None,
+        replace_conflicting: bool = False,
+    ) -> Distribution | None: ...
+    def best_match(
+        self,
+        req: Requirement,
+        working_set: WorkingSet,
+        installer: _InstallerType | None | _InstallerTypeT[_DistributionT] = None,
+        replace_conflicting: bool = False,
+    ) -> Distribution | None:
         """Find distribution best matching `req` and usable on `working_set`
 
         This calls the ``find(req)`` method of the `working_set` to see if a
@@ -1153,11 +1254,32 @@ def best_match(
         # try to download/install
         return self.obtain(req, installer)
 
+    @overload
     def obtain(
         self,
         requirement: Requirement,
-        installer: Callable[[Requirement], Any] | None = None,
-    ):
+        installer: _InstallerTypeT[_DistributionT],
+    ) -> _DistributionT: ...
+    @overload
+    def obtain(
+        self,
+        requirement: Requirement,
+        installer: Callable[[Requirement], None] | None = None,
+    ) -> None: ...
+    @overload
+    def obtain(
+        self,
+        requirement: Requirement,
+        installer: _InstallerType | None = None,
+    ) -> Distribution | None: ...
+    def obtain(
+        self,
+        requirement: Requirement,
+        installer: Callable[[Requirement], None]
+        | _InstallerType
+        | None
+        | _InstallerTypeT[_DistributionT] = None,
+    ) -> Distribution | None:
         """Obtain a distribution matching `requirement` (e.g. via download)
 
         Obtain a distro that matches requirement (e.g. via download).  In the
@@ -1514,7 +1636,6 @@ class NullProvider:
     egg_name: str | None = None
     egg_info: str | None = None
     loader: _LoaderProtocol | None = None
-    module_path: str | None  # Some subclasses can have a None module_path
 
     def __init__(self, module: _ModuleLike):
         self.loader = getattr(module, '__loader__', None)
@@ -1557,7 +1678,7 @@ def get_metadata(self, name: str):
             exc.reason += ' in {} file at path: {}'.format(name, path)
             raise
 
-    def get_metadata_lines(self, name: str):
+    def get_metadata_lines(self, name: str) -> Iterator[str]:
         return yield_lines(self.get_metadata(name))
 
     def resource_isdir(self, resource_name: str):
@@ -1569,7 +1690,7 @@ def metadata_isdir(self, name: str) -> bool:
     def resource_listdir(self, resource_name: str):
         return self._listdir(self._fn(self.module_path, resource_name))
 
-    def metadata_listdir(self, name: str):
+    def metadata_listdir(self, name: str) -> list[str]:
         if self.egg_info:
             return self._listdir(self._fn(self.egg_info, name))
         return []
@@ -1582,6 +1703,8 @@ def run_script(self, script_name: str, namespace: dict[str, Any]):
                     **locals()
                 ),
             )
+        if not self.egg_info:
+            raise TypeError("Provider is missing egg_info", self.egg_info)
         script_text = self.get_metadata(script).replace('\r\n', '\n')
         script_text = script_text.replace('\r', '\n')
         script_filename = self._fn(self.egg_info, script)
@@ -1612,12 +1735,12 @@ def _isdir(self, path) -> bool:
             "Can't perform this operation for unregistered loader type"
         )
 
-    def _listdir(self, path):
+    def _listdir(self, path) -> list[str]:
         raise NotImplementedError(
             "Can't perform this operation for unregistered loader type"
         )
 
-    def _fn(self, base, resource_name: str):
+    def _fn(self, base: str, resource_name: str):
         self._validate_resource_path(resource_name)
         if resource_name:
             return os.path.join(base, *resource_name.split('/'))
@@ -1777,7 +1900,8 @@ def _register(cls):
 class EmptyProvider(NullProvider):
     """Provider that returns nothing for all requests"""
 
-    module_path = None
+    # A special case, we don't want all Providers inheriting from NullProvider to have a potentially None module_path
+    module_path: str | None = None  # type: ignore[assignment]
 
     _isdir = _has = lambda self, path: False
 
@@ -1853,7 +1977,7 @@ class ZipProvider(EggProvider):
     # ZipProvider's loader should always be a zipimporter or equivalent
     loader: zipimport.zipimporter
 
-    def __init__(self, module: _ModuleLike):
+    def __init__(self, module: _ZipLoaderModule):
         super().__init__(module)
         self.zip_pre = self.loader.archive + os.sep
 
@@ -1902,7 +2026,7 @@ def _get_date_and_size(zip_stat):
         return timestamp, size
 
     # FIXME: 'ZipProvider._extract_resource' is too complex (12)
-    def _extract_resource(self, manager: ResourceManager, zip_path):  # noqa: C901
+    def _extract_resource(self, manager: ResourceManager, zip_path) -> str:  # noqa: C901
         if zip_path in self._index():
             for name in self._index()[zip_path]:
                 last = self._extract_resource(manager, os.path.join(zip_path, name))
@@ -2039,7 +2163,7 @@ def _get_metadata_path(self, name):
     def has_metadata(self, name: str) -> bool:
         return name == 'PKG-INFO' and os.path.isfile(self.path)
 
-    def get_metadata(self, name):
+    def get_metadata(self, name: str):
         if name != 'PKG-INFO':
             raise KeyError("No metadata except PKG-INFO is available")
 
@@ -2055,7 +2179,7 @@ def _warn_on_replacement(self, metadata):
             msg = tmpl.format(**locals())
             warnings.warn(msg)
 
-    def get_metadata_lines(self, name):
+    def get_metadata_lines(self, name: str) -> Iterator[str]:
         return yield_lines(self.get_metadata(name))
 
 
@@ -2581,12 +2705,26 @@ def __str__(self):
     def __repr__(self):
         return "EntryPoint.parse(%r)" % str(self)
 
+    @overload
+    def load(
+        self,
+        require: Literal[True] = True,
+        env: Environment | None = None,
+        installer: _InstallerType | None = None,
+    ) -> _ResolvedEntryPoint: ...
+    @overload
+    def load(
+        self,
+        require: Literal[False],
+        *args: Any,
+        **kwargs: Any,
+    ) -> _ResolvedEntryPoint: ...
     def load(
         self,
         require: bool = True,
         *args: Environment | _InstallerType | None,
         **kwargs: Environment | _InstallerType | None,
-    ):
+    ) -> _ResolvedEntryPoint:
         """
         Require packages for this EntryPoint, then resolve it.
         """
@@ -2603,7 +2741,7 @@ def load(
             self.require(*args, **kwargs)  # type: ignore
         return self.resolve()
 
-    def resolve(self):
+    def resolve(self) -> _ResolvedEntryPoint:
         """
         Resolve the entry point from its module and attrs.
         """
@@ -3031,13 +3169,17 @@ def as_requirement(self):
 
         return Requirement.parse(spec)
 
-    def load_entry_point(self, group: str, name: str):
+    def load_entry_point(self, group: str, name: str) -> _ResolvedEntryPoint:
         """Return the `name` entry point of `group` or raise ImportError"""
         ep = self.get_entry_info(group, name)
         if ep is None:
             raise ImportError("Entry point %r not found" % ((group, name),))
         return ep.load()
 
+    @overload
+    def get_entry_map(self, group: None = None) -> dict[str, dict[str, EntryPoint]]: ...
+    @overload
+    def get_entry_map(self, group: str) -> dict[str, EntryPoint]: ...
     def get_entry_map(self, group: str | None = None):
         """Return the entry point map for `group`, or the full entry map"""
         if not hasattr(self, "_ep_map"):
diff --git a/ruff.toml b/ruff.toml
index 9f0b42cea9..8828fe61a5 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -12,7 +12,7 @@ extend-select = [
 	# local
 	"FA", # flake8-future-annotations
 	"F404", # late-future-import
-  "PYI", # flake8-pyi
+	"PYI", # flake8-pyi
 	"UP", # pyupgrade
 	"YTT", # flake8-2020
 ]
@@ -40,10 +40,6 @@ ignore = [
 	"ISC002",
 ]
 
-[lint.per-file-ignores]
-# Auto-generated code
-"setuptools/config/_validate_pyproject/*" = ["FA100"]
-
 [format]
 # Enable preview to get hugged parenthesis unwrapping
 preview = true

From 8ac08a0996c28a79950f012fccc0ccada0d661e6 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Mon, 17 Jun 2024 14:37:58 -0400
Subject: [PATCH 0725/1761] pkg_resources: fix incorrect implicit return types
 (#4391)

---
 pkg_resources/__init__.py | 82 ++++++++++++++++++++-------------------
 1 file changed, 43 insertions(+), 39 deletions(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index 7e3cf54ab8..894b45ac25 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -34,6 +34,8 @@
 import types
 from typing import (
     Any,
+    Dict,
+    Iterator,
     Mapping,
     MutableSequence,
     NamedTuple,
@@ -45,8 +47,6 @@
     Callable,
     Iterable,
     TypeVar,
-    Optional,
-    Dict,
     overload,
 )
 import zipfile
@@ -98,6 +98,7 @@
 from pkg_resources.extern.platformdirs import user_cache_dir as _user_cache_dir
 
 if TYPE_CHECKING:
+    from typing_extensions import Self
     from _typeshed import StrPath, StrOrBytesPath, BytesPath
 
 warnings.warn(
@@ -111,15 +112,15 @@
 _T = TypeVar("_T")
 # Type aliases
 _NestedStr = Union[str, Iterable[Union[str, Iterable["_NestedStr"]]]]
-_InstallerType = Callable[["Requirement"], Optional["Distribution"]]
+_InstallerType = Callable[["Requirement"], Union["Distribution", None]]
 _PkgReqType = Union[str, "Requirement"]
 _EPDistType = Union["Distribution", _PkgReqType]
-_MetadataType = Optional["IResourceProvider"]
+_MetadataType = Union["IResourceProvider", None]
 # Any object works, but let's indicate we expect something like a module (optionally has __loader__ or __file__)
 _ModuleLike = Union[object, types.ModuleType]
 _ProviderFactoryType = Callable[[_ModuleLike], "IResourceProvider"]
 _DistFinderType = Callable[[_T, str, bool], Iterable["Distribution"]]
-_NSHandlerType = Callable[[_T, str, str, types.ModuleType], Optional[str]]
+_NSHandlerType = Callable[[_T, str, str, types.ModuleType], Union[str, None]]
 _AdapterT = TypeVar(
     "_AdapterT", _DistFinderType[Any], _ProviderFactoryType, _NSHandlerType[Any]
 )
@@ -151,7 +152,7 @@ def _declare_state(vartype: str, varname: str, initial_value: _T) -> _T:
     return initial_value
 
 
-def __getstate__():
+def __getstate__() -> dict[str, Any]:
     state = {}
     g = globals()
     for k, v in _state_vars.items():
@@ -159,7 +160,7 @@ def __getstate__():
     return state
 
 
-def __setstate__(state):
+def __setstate__(state: dict[str, Any]) -> dict[str, Any]:
     g = globals()
     for k, v in state.items():
         g['_sset_' + _state_vars[k]](k, g[k], v)
@@ -314,11 +315,11 @@ class VersionConflict(ResolutionError):
     _template = "{self.dist} is installed but {self.req} is required"
 
     @property
-    def dist(self):
+    def dist(self) -> Distribution:
         return self.args[0]
 
     @property
-    def req(self):
+    def req(self) -> Requirement:
         return self.args[1]
 
     def report(self):
@@ -344,7 +345,7 @@ class ContextualVersionConflict(VersionConflict):
     _template = VersionConflict._template + ' by {self.required_by}'
 
     @property
-    def required_by(self):
+    def required_by(self) -> set[str]:
         return self.args[2]
 
 
@@ -357,11 +358,11 @@ class DistributionNotFound(ResolutionError):
     )
 
     @property
-    def req(self):
+    def req(self) -> Requirement:
         return self.args[0]
 
     @property
-    def requirers(self):
+    def requirers(self) -> set[str] | None:
         return self.args[1]
 
     @property
@@ -667,11 +668,11 @@ def add_entry(self, entry: str):
         for dist in find_distributions(entry, True):
             self.add(dist, entry, False)
 
-    def __contains__(self, dist: Distribution):
+    def __contains__(self, dist: Distribution) -> bool:
         """True if `dist` is the active distribution for its project"""
         return self.by_key.get(dist.key) == dist
 
-    def find(self, req: Requirement):
+    def find(self, req: Requirement) -> Distribution | None:
         """Find a distribution matching requirement `req`
 
         If there is an active distribution for the requested project, this
@@ -717,7 +718,7 @@ def run_script(self, requires: str, script_name: str):
         ns['__name__'] = name
         self.require(requires)[0].run_script(script_name, ns)
 
-    def __iter__(self):
+    def __iter__(self) -> Iterator[Distribution]:
         """Yield distributions for non-duplicate projects in the working set
 
         The yield order is the order in which the items' path entries were
@@ -1101,7 +1102,7 @@ def scan(self, search_path: Iterable[str] | None = None):
             for dist in find_distributions(item):
                 self.add(dist)
 
-    def __getitem__(self, project_name: str):
+    def __getitem__(self, project_name: str) -> list[Distribution]:
         """Return a newest-to-oldest list of distributions for `project_name`
 
         Uses case-insensitive `project_name` comparison, assuming all the
@@ -1168,7 +1169,7 @@ def obtain(
         to the `installer` argument."""
         return installer(requirement) if installer else None
 
-    def __iter__(self):
+    def __iter__(self) -> Iterator[str]:
         """Yield the unique project names of the available distributions"""
         for key in self._distmap.keys():
             if self[key]:
@@ -1401,7 +1402,7 @@ def cleanup_resources(self, force: bool = False) -> list[str]:
         return []
 
 
-def get_default_cache():
+def get_default_cache() -> str:
     """
     Return the ``PYTHON_EGG_CACHE`` environment variable
     or a platform-relevant user cache dir for an app
@@ -1493,7 +1494,7 @@ def invalid_marker(text: str):
     return False
 
 
-def evaluate_marker(text: str, extra: str | None = None):
+def evaluate_marker(text: str, extra: str | None = None) -> bool:
     """
     Evaluate a PEP 508 environment marker.
     Return a boolean indicating the marker result in this environment.
@@ -1799,7 +1800,7 @@ class ZipManifests(Dict[str, "MemoizedZipManifests.manifest_mod"]):
     zip manifest builder
     """
 
-    # `path` could be `Union["StrPath", IO[bytes]]` but that violates the LSP for `MemoizedZipManifests.load`
+    # `path` could be `StrPath | IO[bytes]` but that violates the LSP for `MemoizedZipManifests.load`
     @classmethod
     def build(cls, path: str):
         """
@@ -1831,7 +1832,7 @@ class manifest_mod(NamedTuple):
         manifest: dict[str, zipfile.ZipInfo]
         mtime: float
 
-    def load(self, path: str):  # type: ignore[override] # ZipManifests.load is a classmethod
+    def load(self, path: str) -> dict[str, zipfile.ZipInfo]:  # type: ignore[override] # ZipManifests.load is a classmethod
         """
         Load a manifest at path or return a suitable manifest already loaded.
         """
@@ -2123,7 +2124,7 @@ def find_distributions(path_item: str, only: bool = False):
 
 def find_eggs_in_zip(
     importer: zipimport.zipimporter, path_item: str, only: bool = False
-):
+) -> Iterator[Distribution]:
     """
     Find eggs in zip files; possibly multiple nested eggs.
     """
@@ -2216,7 +2217,7 @@ def __call__(self, fullpath):
         return iter(())
 
 
-def safe_listdir(path):
+def safe_listdir(path: StrOrBytesPath):
     """
     Attempt to list contents of path, but suppress some exceptions.
     """
@@ -2232,13 +2233,13 @@ def safe_listdir(path):
     return ()
 
 
-def distributions_from_metadata(path):
+def distributions_from_metadata(path: str):
     root = os.path.dirname(path)
     if os.path.isdir(path):
         if len(os.listdir(path)) == 0:
             # empty metadata dir; skip
             return
-        metadata = PathMetadata(root, path)
+        metadata: _MetadataType = PathMetadata(root, path)
     else:
         metadata = FileMetadata(path)
     entry = os.path.basename(path)
@@ -2679,7 +2680,7 @@ def parse_group(
         """Parse an entry point group"""
         if not MODULE(group):
             raise ValueError("Invalid group name", group)
-        this = {}
+        this: dict[str, Self] = {}
         for line in yield_lines(lines):
             ep = cls.parse(line, dist)
             if ep.name in this:
@@ -2694,11 +2695,12 @@ def parse_map(
         dist: Distribution | None = None,
     ):
         """Parse a map of entry point groups"""
+        _data: Iterable[tuple[str | None, str | Iterable[str]]]
         if isinstance(data, dict):
             _data = data.items()
         else:
             _data = split_sections(data)
-        maps: dict[str, dict[str, EntryPoint]] = {}
+        maps: dict[str, dict[str, Self]] = {}
         for group, lines in _data:
             if group is None:
                 if not lines:
@@ -2757,7 +2759,7 @@ def from_location(
         basename: StrPath,
         metadata: _MetadataType = None,
         **kw: int,  # We could set `precedence` explicitly, but keeping this as `**kw` for full backwards and subclassing compatibility
-    ):
+    ) -> Distribution:
         project_name, version, py_version, platform = [None] * 4
         basename, ext = os.path.splitext(basename)
         if ext.lower() in _distributionImpl:
@@ -2896,14 +2898,14 @@ def _dep_map(self):
         return self.__dep_map
 
     @staticmethod
-    def _filter_extras(dm):
+    def _filter_extras(dm: dict[str | None, list[Requirement]]):
         """
         Given a mapping of extras to dependencies, strip off
         environment markers and filter out any dependencies
         not matching the markers.
         """
         for extra in list(filter(None, dm)):
-            new_extra = extra
+            new_extra: str | None = extra
             reqs = dm.pop(extra)
             new_extra, _, marker = extra.partition(':')
             fails_marker = marker and (
@@ -2926,7 +2928,7 @@ def _build_dep_map(self):
     def requires(self, extras: Iterable[str] = ()):
         """List of Requirements needed for this distro if `extras` are used"""
         dm = self._dep_map
-        deps = []
+        deps: list[Requirement] = []
         deps.extend(dm.get(None, ()))
         for ext in extras:
             try:
@@ -3223,11 +3225,11 @@ def _dep_map(self):
             self.__dep_map = self._compute_dependencies()
             return self.__dep_map
 
-    def _compute_dependencies(self):
+    def _compute_dependencies(self) -> dict[str | None, list[Requirement]]:
         """Recompute this distribution's dependencies."""
-        dm = self.__dep_map = {None: []}
+        self.__dep_map: dict[str | None, list[Requirement]] = {None: []}
 
-        reqs = []
+        reqs: list[Requirement] = []
         # Including any condition expressions
         for req in self._parsed_pkg_info.get_all('Requires-Dist') or []:
             reqs.extend(parse_requirements(req))
@@ -3238,13 +3240,15 @@ def reqs_for_extra(extra):
                     yield req
 
         common = types.MappingProxyType(dict.fromkeys(reqs_for_extra(None)))
-        dm[None].extend(common)
+        self.__dep_map[None].extend(common)
 
         for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []:
             s_extra = safe_extra(extra.strip())
-            dm[s_extra] = [r for r in reqs_for_extra(extra) if r not in common]
+            self.__dep_map[s_extra] = [
+                r for r in reqs_for_extra(extra) if r not in common
+            ]
 
-        return dm
+        return self.__dep_map
 
 
 _distributionImpl = {
@@ -3305,7 +3309,7 @@ def __eq__(self, other: object):
     def __ne__(self, other):
         return not self == other
 
-    def __contains__(self, item: Distribution | str | tuple[str, ...]):
+    def __contains__(self, item: Distribution | str | tuple[str, ...]) -> bool:
         if isinstance(item, Distribution):
             if item.key != self.key:
                 return False
@@ -3369,7 +3373,7 @@ def _bypass_ensure_directory(path):
             pass
 
 
-def split_sections(s: _NestedStr):
+def split_sections(s: _NestedStr) -> Iterator[tuple[str | None, list[str]]]:
     """Split a string or iterable thereof into (section, content) pairs
 
     Each ``section`` is a stripped version of the section header ("[section]")

From 22993191cfea75c7f53ca2d27962c014511f0d92 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Mon, 17 Jun 2024 14:38:44 -0400
Subject: [PATCH 0726/1761] Remove calls to `typing.cast` with better type
 narrowing and definitions (#4375)

---
 mypy.ini                                  |  1 +
 setuptools/command/editable_wheel.py      |  2 +-
 setuptools/config/_apply_pyprojecttoml.py | 17 +++++++++--------
 setuptools/config/expand.py               | 19 ++++++-------------
 setuptools/dist.py                        |  2 ++
 5 files changed, 19 insertions(+), 22 deletions(-)

diff --git a/mypy.ini b/mypy.ini
index c5b13942c4..a63ccf787c 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -4,6 +4,7 @@
 python_version = 3.8
 strict = False
 warn_unused_ignores = True
+warn_redundant_casts = True
 # required to support namespace packages: https://github.com/python/mypy/issues/14057
 explicit_package_bases = True
 exclude = (?x)(
diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py
index 55d477eebf..ae31bb4c79 100644
--- a/setuptools/command/editable_wheel.py
+++ b/setuptools/command/editable_wheel.py
@@ -515,7 +515,7 @@ def template_vars(self) -> tuple[str, str, dict[str, str], dict[str, list[str]]]
         )
 
         legacy_namespaces = {
-            cast(str, pkg): find_package_path(pkg, roots, self.dist.src_root or "")
+            pkg: find_package_path(pkg, roots, self.dist.src_root or "")
             for pkg in self.dist.namespace_packages or []
         }
 
diff --git a/setuptools/config/_apply_pyprojecttoml.py b/setuptools/config/_apply_pyprojecttoml.py
index c7e25b755f..5a8700051e 100644
--- a/setuptools/config/_apply_pyprojecttoml.py
+++ b/setuptools/config/_apply_pyprojecttoml.py
@@ -12,7 +12,6 @@
 
 import logging
 import os
-from collections.abc import Mapping
 from email.headerregistry import Address
 from functools import partial, reduce
 from inspect import cleandoc
@@ -22,8 +21,9 @@
     TYPE_CHECKING,
     Any,
     Callable,
+    Dict,
+    Mapping,
     Union,
-    cast,
 )
 from .._path import StrPath
 from ..errors import RemovedConfigError
@@ -35,7 +35,7 @@
     from setuptools.dist import Distribution  # noqa
 
 EMPTY: Mapping = MappingProxyType({})  # Immutable dict-like
-_DictOrStr = Union[dict, str]
+_ProjectReadmeValue = Union[str, Dict[str, str]]
 _CorrespFn = Callable[["Distribution", Any, StrPath], None]
 _Correspondence = Union[str, _CorrespFn]
 
@@ -149,15 +149,16 @@ def _guess_content_type(file: str) -> str | None:
     raise ValueError(f"Undefined content type for {file}, {msg}")
 
 
-def _long_description(dist: Distribution, val: _DictOrStr, root_dir: StrPath):
+def _long_description(dist: Distribution, val: _ProjectReadmeValue, root_dir: StrPath):
     from setuptools.config import expand
 
+    file: str | tuple[()]
     if isinstance(val, str):
-        file: str | list = val
+        file = val
         text = expand.read_files(file, root_dir)
-        ctype = _guess_content_type(val)
+        ctype = _guess_content_type(file)
     else:
-        file = val.get("file") or []
+        file = val.get("file") or ()
         text = val.get("text") or expand.read_files(file, root_dir)
         ctype = val["content-type"]
 
@@ -167,7 +168,7 @@ def _long_description(dist: Distribution, val: _DictOrStr, root_dir: StrPath):
         _set_config(dist, "long_description_content_type", ctype)
 
     if file:
-        dist._referenced_files.add(cast(str, file))
+        dist._referenced_files.add(file)
 
 
 def _license(dist: Distribution, val: dict, root_dir: StrPath):
diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py
index 7140dc8ed8..6ea6cf6d0e 100644
--- a/setuptools/config/expand.py
+++ b/setuptools/config/expand.py
@@ -36,8 +36,6 @@
     Iterator,
     Mapping,
     TypeVar,
-    Union,
-    cast,
 )
 from pathlib import Path
 from types import ModuleType
@@ -326,18 +324,13 @@ def version(value: Callable | Iterable[str | int] | str) -> str:
     """When getting the version directly from an attribute,
     it should be normalised to string.
     """
-    if callable(value):
-        value = value()
+    _value = value() if callable(value) else value
 
-    value = cast(Iterable[Union[str, int]], value)
-
-    if not isinstance(value, str):
-        if hasattr(value, '__iter__'):
-            value = '.'.join(map(str, value))
-        else:
-            value = '%s' % value
-
-    return value
+    if isinstance(_value, str):
+        return _value
+    if hasattr(_value, '__iter__'):
+        return '.'.join(map(str, _value))
+    return '%s' % _value
 
 
 def canonic_package_data(package_data: dict) -> dict:
diff --git a/setuptools/dist.py b/setuptools/dist.py
index 30cdfdb10b..43762960ba 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -272,6 +272,8 @@ class Distribution(_Distribution):
     }
 
     _patched_dist = None
+    # Used by build_py, editable_wheel and install_lib commands for legacy namespaces
+    namespace_packages: list[str]  #: :meta private: DEPRECATED
 
     def patch_missing_pkg_info(self, attrs):
         # Fake up a replacement for the data that would normally come from

From ad033e8f8cd055fcecdbabc647d36cad42d0baff Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Mon, 17 Jun 2024 15:22:28 -0400
Subject: [PATCH 0727/1761] Keep _ModuleLike  not Any

---
 pkg_resources/__init__.py | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index 8d9289d06f..16a5bd8eef 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -122,8 +122,9 @@
 _ResolvedEntryPoint = Any  # Can be any attribute in the module
 _ResourceStream = Any  # TODO / Incomplete: A readable file-like object
 # Any object works, but let's indicate we expect something like a module (optionally has __loader__ or __file__)
-_ModuleLike = Any
-_ProviderFactoryType = Callable[[_ModuleLike], "IResourceProvider"]
+_ModuleLike = Union[object, types.ModuleType]
+# Any: Should be _ModuleLike but we end up with issues where _ModuleLike doesn't have _ZipLoaderModule's __loader__
+_ProviderFactoryType = Callable[[Any], "IResourceProvider"]
 _DistFinderType = Callable[[_T, str, bool], Iterable["Distribution"]]
 _NSHandlerType = Callable[[_T, str, str, types.ModuleType], Union[str, None]]
 _AdapterT = TypeVar(

From 63a2eb3b8246be5a6b50a837b26175e0c8cb6cf8 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 19 Jun 2024 10:12:59 +0100
Subject: [PATCH 0728/1761] Fix undefined log function in bdist_wheel

---
 setuptools/command/bdist_wheel.py | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)

diff --git a/setuptools/command/bdist_wheel.py b/setuptools/command/bdist_wheel.py
index ad34539eb8..a81187598a 100644
--- a/setuptools/command/bdist_wheel.py
+++ b/setuptools/command/bdist_wheel.py
@@ -284,9 +284,7 @@ def finalize_options(self):
         wheel = self.distribution.get_option_dict("wheel")
         if "universal" in wheel:
             # please don't define this in your global configs
-            log.warning(
-                "The [wheel] section is deprecated. Use [bdist_wheel] instead.",
-            )
+            log.warn("The [wheel] section is deprecated. Use [bdist_wheel] instead.")
             val = wheel["universal"][1].strip()
             if val.lower() in ("1", "true", "yes"):
                 self.universal = True

From be847e002eacf1431470f5d6592de5caddb521e5 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 19 Jun 2024 12:59:30 +0100
Subject: [PATCH 0729/1761] Add interop tests for pkg_resources and
 zope-interface

---
 .../tests/integration/test_zope_interface.py  | 61 +++++++++++++++++++
 1 file changed, 61 insertions(+)
 create mode 100644 setuptools/tests/integration/test_zope_interface.py

diff --git a/setuptools/tests/integration/test_zope_interface.py b/setuptools/tests/integration/test_zope_interface.py
new file mode 100644
index 0000000000..c340c4137d
--- /dev/null
+++ b/setuptools/tests/integration/test_zope_interface.py
@@ -0,0 +1,61 @@
+import platform
+import subprocess
+import sys
+from inspect import cleandoc
+
+import jaraco.path
+import pytest
+
+pytestmark = pytest.mark.integration
+
+VIRTUALENV = (sys.executable, "-m", "virtualenv")
+
+
+def run(cmd, **kwargs):
+    proc = subprocess.run(cmd, encoding="utf-8", capture_output=True, **kwargs)
+    if proc.returncode != 0:
+        pytest.fail(f"Command {cmd} failed with:\n{proc.stdout=!s}\n{proc.stderr=!s}")
+    return proc.stdout
+
+
+@pytest.mark.skipif(
+    platform.system() != "Linux",
+    reason="only demonstrated to fail on Linux in #4399",
+)
+def test_interop_pkg_resources_iter_entry_points(tmp_path, venv):
+    """
+    Importing pkg_resources.iter_entry_points on console_scripts
+    seems to cause trouble with zope-interface, when deprecates installation method
+    is used. See #4399.
+    """
+    project = {
+        "pkg": {
+            "foo.py": cleandoc(
+                """
+                from pkg_resources import iter_entry_points
+
+                def bar():
+                    print("Print me if you can")
+                """
+            ),
+            "setup.py": cleandoc(
+                """
+                from setuptools import setup, find_packages
+
+                setup(
+                    install_requires=["zope-interface==6.4.post2"],
+                    entry_points={
+                        "console_scripts": [
+                            "foo=foo:bar",
+                        ],
+                    },
+                )
+                """
+            ),
+        }
+    }
+    jaraco.path.build(project, prefix=tmp_path)
+    cmd = [venv.exe("pip"), "install", "-e", ".", "--no-use-pep517"]
+    run(cmd, cwd=tmp_path / "pkg")
+    out = run([venv.exe("foo")])
+    assert "Print me if you can" in out

From b95d168faa3fe25d7e9e947472094d23124428cf Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 17 Jun 2024 15:12:06 +0100
Subject: [PATCH 0730/1761] Move piece of code inside pkg_resources/__init__.py

---
 pkg_resources/__init__.py | 64 +++++++++++++++++++--------------------
 1 file changed, 32 insertions(+), 32 deletions(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index 894b45ac25..15f96693e7 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -3426,6 +3426,38 @@ class PkgResourcesDeprecationWarning(Warning):
     """
 
 
+# Ported from ``setuptools`` to avoid introducing an import inter-dependency:
+_LOCALE_ENCODING = "locale" if sys.version_info >= (3, 10) else None
+
+
+def _read_utf8_with_fallback(file: str, fallback_encoding=_LOCALE_ENCODING) -> str:
+    """See setuptools.unicode_utils._read_utf8_with_fallback"""
+    try:
+        with open(file, "r", encoding="utf-8") as f:
+            return f.read()
+    except UnicodeDecodeError:  # pragma: no cover
+        msg = f"""\
+        ********************************************************************************
+        `encoding="utf-8"` fails with {file!r}, trying `encoding={fallback_encoding!r}`.
+
+        This fallback behaviour is considered **deprecated** and future versions of
+        `setuptools/pkg_resources` may not implement it.
+
+        Please encode {file!r} with "utf-8" to ensure future builds will succeed.
+
+        If this file was produced by `setuptools` itself, cleaning up the cached files
+        and re-building/re-installing the package with a newer version of `setuptools`
+        (e.g. by updating `build-system.requires` in its `pyproject.toml`)
+        might solve the problem.
+        ********************************************************************************
+        """
+        # TODO: Add a deadline?
+        #       See comment in setuptools.unicode_utils._Utf8EncodingNeeded
+        warnings.warn(msg, PkgResourcesDeprecationWarning, stacklevel=2)
+        with open(file, "r", encoding=fallback_encoding) as f:
+            return f.read()
+
+
 # from jaraco.functools 1.3
 def _call_aside(f, *args, **kwargs):
     f(*args, **kwargs)
@@ -3498,35 +3530,3 @@ def _initialize_master_working_set():
     add_activation_listener = working_set.subscribe
     run_script = working_set.run_script
     run_main = run_script
-
-
-#  ---- Ported from ``setuptools`` to avoid introducing an import inter-dependency ----
-LOCALE_ENCODING = "locale" if sys.version_info >= (3, 10) else None
-
-
-def _read_utf8_with_fallback(file: str, fallback_encoding=LOCALE_ENCODING) -> str:
-    """See setuptools.unicode_utils._read_utf8_with_fallback"""
-    try:
-        with open(file, "r", encoding="utf-8") as f:
-            return f.read()
-    except UnicodeDecodeError:  # pragma: no cover
-        msg = f"""\
-        ********************************************************************************
-        `encoding="utf-8"` fails with {file!r}, trying `encoding={fallback_encoding!r}`.
-
-        This fallback behaviour is considered **deprecated** and future versions of
-        `setuptools/pkg_resources` may not implement it.
-
-        Please encode {file!r} with "utf-8" to ensure future builds will succeed.
-
-        If this file was produced by `setuptools` itself, cleaning up the cached files
-        and re-building/re-installing the package with a newer version of `setuptools`
-        (e.g. by updating `build-system.requires` in its `pyproject.toml`)
-        might solve the problem.
-        ********************************************************************************
-        """
-        # TODO: Add a deadline?
-        #       See comment in setuptools.unicode_utils._Utf8EncodingNeeded
-        warnings.warn(msg, PkgResourcesDeprecationWarning, stacklevel=2)
-        with open(file, "r", encoding=fallback_encoding) as f:
-            return f.read()

From 03edaaa41059cf580709ec714586a6f7762cd99d Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 17 Jun 2024 17:14:59 +0100
Subject: [PATCH 0731/1761] Add newsfragment

---
 newsfragments/4422.misc.rst | 1 +
 1 file changed, 1 insertion(+)
 create mode 100644 newsfragments/4422.misc.rst

diff --git a/newsfragments/4422.misc.rst b/newsfragments/4422.misc.rst
new file mode 100644
index 0000000000..e45b6d44b2
--- /dev/null
+++ b/newsfragments/4422.misc.rst
@@ -0,0 +1 @@
+Reorder code in ``pkg_resources/__init__.py`` to avoid definitions after ``@_call_aside``.

From 06fd687e048224fc2293be50ed30d7f1a04378f4 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 19 Jun 2024 13:04:11 +0100
Subject: [PATCH 0732/1761] Move integration test to pkg_resources

---
 .../tests/test_integration_zope_interface.py                      | 0
 1 file changed, 0 insertions(+), 0 deletions(-)
 rename setuptools/tests/integration/test_zope_interface.py => pkg_resources/tests/test_integration_zope_interface.py (100%)

diff --git a/setuptools/tests/integration/test_zope_interface.py b/pkg_resources/tests/test_integration_zope_interface.py
similarity index 100%
rename from setuptools/tests/integration/test_zope_interface.py
rename to pkg_resources/tests/test_integration_zope_interface.py

From 051e70d9b232c5c24b3a64a4fdac07e9a347c4fb Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 19 Jun 2024 13:06:28 +0100
Subject: [PATCH 0733/1761] Simplify integration test for zop interface

---
 .../tests/test_integration_zope_interface.py    | 17 +++--------------
 1 file changed, 3 insertions(+), 14 deletions(-)

diff --git a/pkg_resources/tests/test_integration_zope_interface.py b/pkg_resources/tests/test_integration_zope_interface.py
index c340c4137d..634025c238 100644
--- a/pkg_resources/tests/test_integration_zope_interface.py
+++ b/pkg_resources/tests/test_integration_zope_interface.py
@@ -1,6 +1,4 @@
 import platform
-import subprocess
-import sys
 from inspect import cleandoc
 
 import jaraco.path
@@ -8,15 +6,6 @@
 
 pytestmark = pytest.mark.integration
 
-VIRTUALENV = (sys.executable, "-m", "virtualenv")
-
-
-def run(cmd, **kwargs):
-    proc = subprocess.run(cmd, encoding="utf-8", capture_output=True, **kwargs)
-    if proc.returncode != 0:
-        pytest.fail(f"Command {cmd} failed with:\n{proc.stdout=!s}\n{proc.stderr=!s}")
-    return proc.stdout
-
 
 @pytest.mark.skipif(
     platform.system() != "Linux",
@@ -55,7 +44,7 @@ def bar():
         }
     }
     jaraco.path.build(project, prefix=tmp_path)
-    cmd = [venv.exe("pip"), "install", "-e", ".", "--no-use-pep517"]
-    run(cmd, cwd=tmp_path / "pkg")
-    out = run([venv.exe("foo")])
+    cmd = ["pip", "install", "-e", ".", "--no-use-pep517"]
+    venv.run(cmd, cwd=tmp_path / "pkg")  # Needs this version of pkg_resources installed
+    out = venv.run(["foo"])
     assert "Print me if you can" in out

From a4b15f3a07c914c1da9bda2cf458237c97d0e042 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 19 Jun 2024 13:09:32 +0100
Subject: [PATCH 0734/1761] Add comments on test

---
 pkg_resources/tests/test_integration_zope_interface.py | 4 ++++
 1 file changed, 4 insertions(+)

diff --git a/pkg_resources/tests/test_integration_zope_interface.py b/pkg_resources/tests/test_integration_zope_interface.py
index 634025c238..4e37c3401b 100644
--- a/pkg_resources/tests/test_integration_zope_interface.py
+++ b/pkg_resources/tests/test_integration_zope_interface.py
@@ -7,6 +7,10 @@
 pytestmark = pytest.mark.integration
 
 
+# For the sake of simplicity this test uses fixtures defined in
+# `setuptools.test.fixtures`,
+# and it also exercise conditions considered deprecated...
+# So if needed this test can be deleted.
 @pytest.mark.skipif(
     platform.system() != "Linux",
     reason="only demonstrated to fail on Linux in #4399",

From 3466f9f1e14c4b0ef87299fef414f73600d4a46f Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 19 Jun 2024 13:29:12 +0100
Subject: [PATCH 0735/1761] =?UTF-8?q?Bump=20version:=2070.0.0=20=E2=86=92?=
 =?UTF-8?q?=2070.1.0?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg               |  2 +-
 NEWS.rst                       | 30 ++++++++++++++++++++++++++++++
 newsfragments/1386.feature.rst |  1 -
 newsfragments/4246.feature.rst |  4 ----
 newsfragments/4310.feature.rst |  1 -
 newsfragments/4365.misc.rst    |  1 -
 newsfragments/4382.bugfix.rst  |  1 -
 newsfragments/4403.bugfix.rst  |  1 -
 newsfragments/4405.bugfix.rst  |  2 --
 newsfragments/4411.bugfix.rst  |  1 -
 newsfragments/4422.misc.rst    |  1 -
 pyproject.toml                 |  2 +-
 12 files changed, 32 insertions(+), 15 deletions(-)
 delete mode 100644 newsfragments/1386.feature.rst
 delete mode 100644 newsfragments/4246.feature.rst
 delete mode 100644 newsfragments/4310.feature.rst
 delete mode 100644 newsfragments/4365.misc.rst
 delete mode 100644 newsfragments/4382.bugfix.rst
 delete mode 100644 newsfragments/4403.bugfix.rst
 delete mode 100644 newsfragments/4405.bugfix.rst
 delete mode 100644 newsfragments/4411.bugfix.rst
 delete mode 100644 newsfragments/4422.misc.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 25968653ac..4856098dff 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 70.0.0
+current_version = 70.1.0
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index 06da16714b..265e545d54 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,33 @@
+v70.1.0
+=======
+
+Features
+--------
+
+- Adopted the ``bdist_wheel`` command from the ``wheel`` project -- by :user:`agronholm` (#1386)
+- Improve error message when ``pkg_resources.ZipProvider`` tries to extract resources with a missing Egg -- by :user:`Avasam`
+
+  Added variables and parameter type annotations to ``pkg_resources`` to be nearly on par with typeshed.\* -- by :user:`Avasam`
+  \* Excluding ``TypeVar`` and ``overload``. Return types are currently inferred. (#4246)
+- Migrated Setuptools' own config to pyproject.toml (#4310)
+
+
+Bugfixes
+--------
+
+- Prevent a ``TypeError: 'NoneType' object is not callable`` when ``shutil_rmtree`` is called without an ``onexc`` parameter on Python<=3.11 -- by :user:`Avasam` (#4382)
+- Replace use of mktemp with can_symlink from the stdlib test suite. (#4403)
+- Improvement for ``attr:`` directives in configuration to handle
+  more edge cases related to complex ``package_dir``. (#4405)
+- Fix accidental implicit string concatenation. (#4411)
+
+
+Misc
+----
+
+- #4365, #4422
+
+
 v70.0.0
 =======
 
diff --git a/newsfragments/1386.feature.rst b/newsfragments/1386.feature.rst
deleted file mode 100644
index c8d50bc22e..0000000000
--- a/newsfragments/1386.feature.rst
+++ /dev/null
@@ -1 +0,0 @@
-Adopted the ``bdist_wheel`` command from the ``wheel`` project -- by :user:`agronholm`
diff --git a/newsfragments/4246.feature.rst b/newsfragments/4246.feature.rst
deleted file mode 100644
index d5dd2ead98..0000000000
--- a/newsfragments/4246.feature.rst
+++ /dev/null
@@ -1,4 +0,0 @@
-Improve error message when ``pkg_resources.ZipProvider`` tries to extract resources with a missing Egg -- by :user:`Avasam`
-
-Added variables and parameter type annotations to ``pkg_resources`` to be nearly on par with typeshed.\* -- by :user:`Avasam`
-\* Excluding ``TypeVar`` and ``overload``. Return types are currently inferred. 
diff --git a/newsfragments/4310.feature.rst b/newsfragments/4310.feature.rst
deleted file mode 100644
index 2379f3f342..0000000000
--- a/newsfragments/4310.feature.rst
+++ /dev/null
@@ -1 +0,0 @@
-Migrated Setuptools' own config to pyproject.toml
\ No newline at end of file
diff --git a/newsfragments/4365.misc.rst b/newsfragments/4365.misc.rst
deleted file mode 100644
index 7badfff8f0..0000000000
--- a/newsfragments/4365.misc.rst
+++ /dev/null
@@ -1 +0,0 @@
-Use actual boolean parameters and variables instead of 0-1 literals. -- by :user:`Avasam`
diff --git a/newsfragments/4382.bugfix.rst b/newsfragments/4382.bugfix.rst
deleted file mode 100644
index 3aa9e18573..0000000000
--- a/newsfragments/4382.bugfix.rst
+++ /dev/null
@@ -1 +0,0 @@
-Prevent a ``TypeError: 'NoneType' object is not callable`` when ``shutil_rmtree`` is called without an ``onexc`` parameter on Python<=3.11 -- by :user:`Avasam`
diff --git a/newsfragments/4403.bugfix.rst b/newsfragments/4403.bugfix.rst
deleted file mode 100644
index c07cd48c7e..0000000000
--- a/newsfragments/4403.bugfix.rst
+++ /dev/null
@@ -1 +0,0 @@
-Replace use of mktemp with can_symlink from the stdlib test suite.
\ No newline at end of file
diff --git a/newsfragments/4405.bugfix.rst b/newsfragments/4405.bugfix.rst
deleted file mode 100644
index 164ace4934..0000000000
--- a/newsfragments/4405.bugfix.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Improvement for ``attr:`` directives in configuration to handle
-more edge cases related to complex ``package_dir``.
diff --git a/newsfragments/4411.bugfix.rst b/newsfragments/4411.bugfix.rst
deleted file mode 100644
index e306f3ef0a..0000000000
--- a/newsfragments/4411.bugfix.rst
+++ /dev/null
@@ -1 +0,0 @@
-Fix accidental implicit string concatenation.
diff --git a/newsfragments/4422.misc.rst b/newsfragments/4422.misc.rst
deleted file mode 100644
index e45b6d44b2..0000000000
--- a/newsfragments/4422.misc.rst
+++ /dev/null
@@ -1 +0,0 @@
-Reorder code in ``pkg_resources/__init__.py`` to avoid definitions after ``@_call_aside``.
diff --git a/pyproject.toml b/pyproject.toml
index a7d1f3e99c..7c490f7ce9 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "70.0.0"
+version = "70.1.0"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From dc527c152bd74a9e6680db619933316f3c4b73d8 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Wed, 19 Jun 2024 12:12:36 -0400
Subject: [PATCH 0736/1761] Expand changelog to provide more context. Ref #3593

---
 NEWS.rst | 14 +++++++++++++-
 1 file changed, 13 insertions(+), 1 deletion(-)

diff --git a/NEWS.rst b/NEWS.rst
index 265e545d54..5c3f5b4319 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -139,7 +139,19 @@ v69.3.0
 Features
 --------
 
-- Support PEP 625 by canonicalizing package name and version in filenames. (#3593)
+- Support PEP 625 by canonicalizing package name and version in filenames
+  per
+  `the spec `_.
+  Projects whose names contain uppercase characters, dashes, or periods will
+  now see their sdist names normalized to match the standard and the format
+  previously seen in wheels. For example:
+
+  - ``zope.interface`` -> ``zope_interface``
+  - ``CherryPy`` -> ``cherrypy``
+  - ``foo-bar_baz`` -> ``foo_bar_baz``
+
+  Projects are encouraged to adopt this change to align with standards and
+  other backend build systems. (#3593)
 
 
 v69.2.0

From d4ffbb1d2ad0da57422a8b35f80b276c570f383b Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 20 Jun 2024 11:10:08 +0100
Subject: [PATCH 0737/1761] Remove unecessary WheelFile usage in tests

---
 setuptools/tests/test_bdist_wheel.py | 15 +++++++--------
 1 file changed, 7 insertions(+), 8 deletions(-)

diff --git a/setuptools/tests/test_bdist_wheel.py b/setuptools/tests/test_bdist_wheel.py
index 5d28368c88..d257651ad3 100644
--- a/setuptools/tests/test_bdist_wheel.py
+++ b/setuptools/tests/test_bdist_wheel.py
@@ -25,7 +25,6 @@
     remove_readonly_exc,
 )
 from setuptools.extern.packaging import tags
-from setuptools.extern.wheel.wheelfile import WheelFile
 
 DEFAULT_FILES = {
     "dummy_dist-1.0.dist-info/top_level.txt",
@@ -170,7 +169,7 @@ def test_licenses_default(dummy_dist, monkeypatch, tmp_path):
         str(tmp_path),
         "--universal",
     ])
-    with WheelFile("dist/dummy_dist-1.0-py2.py3-none-any.whl") as wf:
+    with ZipFile("dist/dummy_dist-1.0-py2.py3-none-any.whl") as wf:
         license_files = {
             "dummy_dist-1.0.dist-info/" + fname for fname in DEFAULT_LICENSE_FILES
         }
@@ -190,7 +189,7 @@ def test_licenses_deprecated(dummy_dist, monkeypatch, tmp_path):
         str(tmp_path),
         "--universal",
     ])
-    with WheelFile("dist/dummy_dist-1.0-py2.py3-none-any.whl") as wf:
+    with ZipFile("dist/dummy_dist-1.0-py2.py3-none-any.whl") as wf:
         license_files = {"dummy_dist-1.0.dist-info/DUMMYFILE"}
         assert set(wf.namelist()) == DEFAULT_FILES | license_files
 
@@ -219,7 +218,7 @@ def test_licenses_override(dummy_dist, monkeypatch, tmp_path, config_file, confi
         str(tmp_path),
         "--universal",
     ])
-    with WheelFile("dist/dummy_dist-1.0-py2.py3-none-any.whl") as wf:
+    with ZipFile("dist/dummy_dist-1.0-py2.py3-none-any.whl") as wf:
         license_files = {
             "dummy_dist-1.0.dist-info/" + fname for fname in {"DUMMYFILE", "LICENSE"}
         }
@@ -239,7 +238,7 @@ def test_licenses_disabled(dummy_dist, monkeypatch, tmp_path):
         str(tmp_path),
         "--universal",
     ])
-    with WheelFile("dist/dummy_dist-1.0-py2.py3-none-any.whl") as wf:
+    with ZipFile("dist/dummy_dist-1.0-py2.py3-none-any.whl") as wf:
         assert set(wf.namelist()) == DEFAULT_FILES
 
 
@@ -254,7 +253,7 @@ def test_build_number(dummy_dist, monkeypatch, tmp_path):
         "--universal",
         "--build-number=2",
     ])
-    with WheelFile("dist/dummy_dist-1.0-2-py2.py3-none-any.whl") as wf:
+    with ZipFile("dist/dummy_dist-1.0-2-py2.py3-none-any.whl") as wf:
         filenames = set(wf.namelist())
         assert "dummy_dist-1.0.dist-info/RECORD" in filenames
         assert "dummy_dist-1.0.dist-info/METADATA" in filenames
@@ -307,7 +306,7 @@ def test_compression(dummy_dist, monkeypatch, tmp_path, option, compress_type):
         "--universal",
         f"--compression={option}",
     ])
-    with WheelFile("dist/dummy_dist-1.0-py2.py3-none-any.whl") as wf:
+    with ZipFile("dist/dummy_dist-1.0-py2.py3-none-any.whl") as wf:
         filenames = set(wf.namelist())
         assert "dummy_dist-1.0.dist-info/RECORD" in filenames
         assert "dummy_dist-1.0.dist-info/METADATA" in filenames
@@ -317,7 +316,7 @@ def test_compression(dummy_dist, monkeypatch, tmp_path, option, compress_type):
 
 def test_wheelfile_line_endings(wheel_paths):
     for path in wheel_paths:
-        with WheelFile(path) as wf:
+        with ZipFile(path) as wf:
             wheelfile = next(fn for fn in wf.filelist if fn.filename.endswith("WHEEL"))
             wheelfile_contents = wf.read(wheelfile)
             assert b"\r" not in wheelfile_contents

From beb112ab78a12c659c06b3c7c01836ee6acbe903 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 20 Jun 2024 12:01:21 +0100
Subject: [PATCH 0738/1761] Inline test_bdist_wheel examples and run
 bdist_wheel in process

---
 setuptools/tests/test_bdist_wheel.py | 350 ++++++++++++++++++---------
 1 file changed, 232 insertions(+), 118 deletions(-)

diff --git a/setuptools/tests/test_bdist_wheel.py b/setuptools/tests/test_bdist_wheel.py
index d257651ad3..a20ebd0546 100644
--- a/setuptools/tests/test_bdist_wheel.py
+++ b/setuptools/tests/test_bdist_wheel.py
@@ -7,23 +7,25 @@
 import shutil
 import stat
 import struct
-import subprocess
 import sys
 import sysconfig
 from contextlib import suppress
-from functools import partial
 from inspect import cleandoc
 from unittest.mock import Mock
 from zipfile import ZipFile
 
+import jaraco.path
 import pytest
+
 import setuptools
+from distutils.core import run_setup
 from setuptools.command.bdist_wheel import (
     bdist_wheel,
     get_abi_tag,
     remove_readonly,
     remove_readonly_exc,
 )
+from setuptools.dist import Distribution
 from setuptools.extern.packaging import tags
 
 DEFAULT_FILES = {
@@ -58,51 +60,187 @@
 """
 
 
-@pytest.fixture(scope="module")
-def wheel_paths(request, tmp_path_factory):
-    test_distributions = (
-        "complex-dist",
-        "simple.dist",
-        "headers.dist",
-        "commasinfilenames.dist",
-        "unicode.dist",
+EXAMPLES = {
+    "dummy-dist": {
+        "setup.py": SETUPPY_EXAMPLE,
+        "licenses": {"DUMMYFILE": ""},
+        **dict.fromkeys(DEFAULT_LICENSE_FILES | OTHER_IGNORED_FILES, ""),
+    },
+    "simple-dist": {
+        "setup.py": cleandoc(
+            """
+            from setuptools import setup
+
+            setup(
+                name="simple.dist",
+                version="0.1",
+                description="A testing distribution \N{SNOWMAN}",
+                extras_require={"voting": ["beaglevote"]},
+            )
+            """
+        ),
+        "simpledist": "",
+    },
+    "complex-dist": {
+        "setup.py": cleandoc(
+            """
+            from setuptools import setup
+
+            setup(
+                name="complex-dist",
+                version="0.1",
+                description="Another testing distribution \N{SNOWMAN}",
+                long_description="Another testing distribution \N{SNOWMAN}",
+                author="Illustrious Author",
+                author_email="illustrious@example.org",
+                url="http://example.org/exemplary",
+                packages=["complexdist"],
+                setup_requires=["setuptools"],
+                install_requires=["quux", "splort"],
+                extras_require={"simple": ["simple.dist"]},
+                tests_require=["foo", "bar>=10.0.0"],
+                entry_points={
+                    "console_scripts": [
+                        "complex-dist=complexdist:main",
+                        "complex-dist2=complexdist:main",
+                    ],
+                },
+            )
+            """
+        ),
+        "complexdist": {"__init__.py": "def main(): return"},
+    },
+    "headers-dist": {
+        "setup.py": cleandoc(
+            """
+            from setuptools import setup
+
+            setup(
+                name="headers.dist",
+                version="0.1",
+                description="A distribution with headers",
+                headers=["header.h"],
+            )
+            """
+        ),
+        "setup.cfg": "[bdist_wheel]\nuniversal=1",
+        "headersdist.py": "",
+        "header.h": "",
+    },
+    "commasinfilenames-dist": {
+        "setup.py": cleandoc(
+            """
+            from setuptools import setup
+
+            setup(
+                name="testrepo",
+                version="0.1",
+                packages=["mypackage"],
+                description="A test package with commas in file names",
+                include_package_data=True,
+                package_data={"mypackage.data": ["*"]},
+            )
+            """
+        ),
+        "mypackage": {
+            "__init__.py": "",
+            "data": {"__init__.py": "", "1,2,3.txt": ""},
+        },
+        "testrepo-0.1.0": {
+            "mypackage": {"__init__.py": ""},
+        },
+    },
+    "unicode-dist": {
+        "setup.py": cleandoc(
+            """
+            from setuptools import setup
+
+            setup(
+                name="unicode.dist",
+                version="0.1",
+                description="A testing distribution \N{SNOWMAN}",
+                packages=["unicodedist"],
+            )
+            """
+        ),
+        "unicodedist": {"__init__.py": "", "åäö_日本語.py": ""},
+    },
+    "utf8-metadata-dist": {
+        "setup.cfg": cleandoc(
+            """
+            [metadata]
+            name = utf8-metadata-dist
+            version = 42
+            author_email = "John X. Ãørçeč" , Γαμα קּ 東 
+            long_description = file: README.rst
+            """
+        ),
+        "README.rst": "UTF-8 描述 説明",
+    },
+}
+
+
+if sys.platform != "win32":
+    # ABI3 extensions don't really work on Windows
+    EXAMPLES["abi3extension-dist"] = {
+        "setup.py": cleandoc(
+            """
+            from setuptools import Extension, setup
+
+            setup(
+                name="extension.dist",
+                version="0.1",
+                description="A testing distribution \N{SNOWMAN}",
+                ext_modules=[
+                    Extension(
+                        name="extension", sources=["extension.c"], py_limited_api=True
+                    )
+                ],
+            )
+            """
+        ),
+        "setup.cfg": "[bdist_wheel]\npy_limited_api=cp32",
+        "extension.c": "#define Py_LIMITED_API 0x03020000\n#include ",
+    }
+
+
+def bdist_wheel_cmd(**kwargs):
+    """Run command in the same process so that it is easier to collect coverage"""
+    dist_obj = (
+        run_setup("setup.py", stop_after="init")
+        if os.path.exists("setup.py")
+        else Distribution({"script_name": "%%build_meta%%"})
     )
+    dist_obj.parse_config_files()
+    cmd = bdist_wheel(dist_obj)
+    for attr, value in kwargs.items():
+        setattr(cmd, attr, value)
+    cmd.finalize_options()
+    return cmd
+
+
+def mkexample(tmp_path_factory, name):
+    basedir = tmp_path_factory.mktemp(name)
+    jaraco.path.build(EXAMPLES[name], prefix=str(basedir))
+    return basedir
 
-    if sys.platform != "win32":
-        # ABI3 extensions don't really work on Windows
-        test_distributions += ("abi3extension.dist",)
 
-    pwd = os.path.abspath(os.curdir)
-    request.addfinalizer(partial(os.chdir, pwd))
-    this_dir = os.path.dirname(__file__)
-    build_dir = tmp_path_factory.mktemp("build")
+@pytest.fixture(scope="session")
+def wheel_paths(tmp_path_factory):
+    build_base = tmp_path_factory.mktemp("build")
     dist_dir = tmp_path_factory.mktemp("dist")
-    for dist in test_distributions:
-        os.chdir(os.path.join(this_dir, "bdist_wheel_testdata", dist))
-        subprocess.check_call([
-            sys.executable,
-            "setup.py",
-            "bdist_wheel",
-            "-b",
-            str(build_dir),
-            "-d",
-            str(dist_dir),
-        ])
+    for name in EXAMPLES:
+        example_dir = mkexample(tmp_path_factory, name)
+        build_dir = build_base / name
+        with jaraco.path.DirectoryStack().context(example_dir):
+            bdist_wheel_cmd(bdist_dir=str(build_dir), dist_dir=str(dist_dir)).run()
 
-    return sorted(str(fname) for fname in dist_dir.iterdir() if fname.suffix == ".whl")
+    return sorted(str(fname) for fname in dist_dir.glob("*.whl"))
 
 
 @pytest.fixture
 def dummy_dist(tmp_path_factory):
-    basedir = tmp_path_factory.mktemp("dummy_dist")
-    basedir.joinpath("setup.py").write_text(SETUPPY_EXAMPLE, encoding="utf-8")
-    for fname in DEFAULT_LICENSE_FILES | OTHER_IGNORED_FILES:
-        basedir.joinpath(fname).write_text("", encoding="utf-8")
-
-    licensedir = basedir.joinpath("licenses")
-    licensedir.mkdir()
-    licensedir.joinpath("DUMMYFILE").write_text("", encoding="utf-8")
-    return basedir
+    return mkexample(tmp_path_factory, "dummy-dist")
 
 
 def test_no_scripts(wheel_paths):
@@ -161,14 +299,7 @@ def license_paths(self):
 
 def test_licenses_default(dummy_dist, monkeypatch, tmp_path):
     monkeypatch.chdir(dummy_dist)
-    subprocess.check_call([
-        sys.executable,
-        "setup.py",
-        "bdist_wheel",
-        "-b",
-        str(tmp_path),
-        "--universal",
-    ])
+    bdist_wheel_cmd(bdist_dir=str(tmp_path), universal=True).run()
     with ZipFile("dist/dummy_dist-1.0-py2.py3-none-any.whl") as wf:
         license_files = {
             "dummy_dist-1.0.dist-info/" + fname for fname in DEFAULT_LICENSE_FILES
@@ -181,14 +312,9 @@ def test_licenses_deprecated(dummy_dist, monkeypatch, tmp_path):
         "[metadata]\nlicense_file=licenses/DUMMYFILE", encoding="utf-8"
     )
     monkeypatch.chdir(dummy_dist)
-    subprocess.check_call([
-        sys.executable,
-        "setup.py",
-        "bdist_wheel",
-        "-b",
-        str(tmp_path),
-        "--universal",
-    ])
+
+    bdist_wheel_cmd(bdist_dir=str(tmp_path), universal=True).run()
+
     with ZipFile("dist/dummy_dist-1.0-py2.py3-none-any.whl") as wf:
         license_files = {"dummy_dist-1.0.dist-info/DUMMYFILE"}
         assert set(wf.namelist()) == DEFAULT_FILES | license_files
@@ -210,14 +336,7 @@ def test_licenses_deprecated(dummy_dist, monkeypatch, tmp_path):
 def test_licenses_override(dummy_dist, monkeypatch, tmp_path, config_file, config):
     dummy_dist.joinpath(config_file).write_text(config, encoding="utf-8")
     monkeypatch.chdir(dummy_dist)
-    subprocess.check_call([
-        sys.executable,
-        "setup.py",
-        "bdist_wheel",
-        "-b",
-        str(tmp_path),
-        "--universal",
-    ])
+    bdist_wheel_cmd(bdist_dir=str(tmp_path), universal=True).run()
     with ZipFile("dist/dummy_dist-1.0-py2.py3-none-any.whl") as wf:
         license_files = {
             "dummy_dist-1.0.dist-info/" + fname for fname in {"DUMMYFILE", "LICENSE"}
@@ -230,51 +349,65 @@ def test_licenses_disabled(dummy_dist, monkeypatch, tmp_path):
         "[metadata]\nlicense_files=\n", encoding="utf-8"
     )
     monkeypatch.chdir(dummy_dist)
-    subprocess.check_call([
-        sys.executable,
-        "setup.py",
-        "bdist_wheel",
-        "-b",
-        str(tmp_path),
-        "--universal",
-    ])
+    bdist_wheel_cmd(bdist_dir=str(tmp_path), universal=True).run()
     with ZipFile("dist/dummy_dist-1.0-py2.py3-none-any.whl") as wf:
         assert set(wf.namelist()) == DEFAULT_FILES
 
 
 def test_build_number(dummy_dist, monkeypatch, tmp_path):
     monkeypatch.chdir(dummy_dist)
-    subprocess.check_call([
-        sys.executable,
-        "setup.py",
-        "bdist_wheel",
-        "-b",
-        str(tmp_path),
-        "--universal",
-        "--build-number=2",
-    ])
+    bdist_wheel_cmd(bdist_dir=str(tmp_path), build_number="2", universal=True).run()
     with ZipFile("dist/dummy_dist-1.0-2-py2.py3-none-any.whl") as wf:
         filenames = set(wf.namelist())
         assert "dummy_dist-1.0.dist-info/RECORD" in filenames
         assert "dummy_dist-1.0.dist-info/METADATA" in filenames
 
 
-def test_limited_abi(monkeypatch, tmp_path):
+EXTENSION_EXAMPLE = """\
+#include 
+
+static PyMethodDef methods[] = {
+  { NULL, NULL, 0, NULL }
+};
+
+static struct PyModuleDef module_def = {
+  PyModuleDef_HEAD_INIT,
+  "extension",
+  "Dummy extension module",
+  -1,
+  methods
+};
+
+PyMODINIT_FUNC PyInit_extension(void) {
+  return PyModule_Create(&module_def);
+}
+"""
+EXTENSION_SETUPPY = """\
+from __future__ import annotations
+
+from setuptools import Extension, setup
+
+setup(
+    name="extension.dist",
+    version="0.1",
+    description="A testing distribution \N{SNOWMAN}",
+    ext_modules=[Extension(name="extension", sources=["extension.c"])],
+)
+"""
+
+
+@pytest.mark.filterwarnings(
+    "once:Config variable '.*' is unset.*, Python ABI tag may be incorrect"
+)
+def test_limited_abi(monkeypatch, tmp_path, tmp_path_factory):
     """Test that building a binary wheel with the limited ABI works."""
-    this_dir = os.path.dirname(__file__)
-    source_dir = os.path.join(this_dir, "bdist_wheel_testdata", "extension.dist")
+    source_dir = tmp_path_factory.mktemp("extension_dist")
+    (source_dir / "setup.py").write_text(EXTENSION_SETUPPY, encoding="utf-8")
+    (source_dir / "extension.c").write_text(EXTENSION_EXAMPLE, encoding="utf-8")
     build_dir = tmp_path.joinpath("build")
     dist_dir = tmp_path.joinpath("dist")
     monkeypatch.chdir(source_dir)
-    subprocess.check_call([
-        sys.executable,
-        "setup.py",
-        "bdist_wheel",
-        "-b",
-        str(build_dir),
-        "-d",
-        str(dist_dir),
-    ])
+    bdist_wheel_cmd(bdist_dir=str(build_dir), dist_dir=str(dist_dir)).run()
 
 
 def test_build_from_readonly_tree(dummy_dist, monkeypatch, tmp_path):
@@ -287,7 +420,7 @@ def test_build_from_readonly_tree(dummy_dist, monkeypatch, tmp_path):
         for fname in files:
             os.chmod(os.path.join(root, fname), stat.S_IREAD)
 
-    subprocess.check_call([sys.executable, "setup.py", "bdist_wheel"])
+    bdist_wheel_cmd().run()
 
 
 @pytest.mark.parametrize(
@@ -297,15 +430,7 @@ def test_build_from_readonly_tree(dummy_dist, monkeypatch, tmp_path):
 )
 def test_compression(dummy_dist, monkeypatch, tmp_path, option, compress_type):
     monkeypatch.chdir(dummy_dist)
-    subprocess.check_call([
-        sys.executable,
-        "setup.py",
-        "bdist_wheel",
-        "-b",
-        str(tmp_path),
-        "--universal",
-        f"--compression={option}",
-    ])
+    bdist_wheel_cmd(bdist_dir=str(tmp_path), universal=True, compression=option).run()
     with ZipFile("dist/dummy_dist-1.0-py2.py3-none-any.whl") as wf:
         filenames = set(wf.namelist())
         assert "dummy_dist-1.0.dist-info/RECORD" in filenames
@@ -325,15 +450,10 @@ def test_wheelfile_line_endings(wheel_paths):
 def test_unix_epoch_timestamps(dummy_dist, monkeypatch, tmp_path):
     monkeypatch.setenv("SOURCE_DATE_EPOCH", "0")
     monkeypatch.chdir(dummy_dist)
-    subprocess.check_call([
-        sys.executable,
-        "setup.py",
-        "bdist_wheel",
-        "-b",
-        str(tmp_path),
-        "--universal",
-        "--build-number=2",
-    ])
+    bdist_wheel_cmd(bdist_dir=str(tmp_path), build_number="2a", universal=True).run()
+    with ZipFile("dist/dummy_dist-1.0-2a-py2.py3-none-any.whl") as wf:
+        for zinfo in wf.filelist:
+            assert zinfo.date_time >= (1980, 1, 1, 0, 0, 0)  # min epoch is used
 
 
 def test_get_abi_tag_windows(monkeypatch):
@@ -371,13 +491,7 @@ def test_get_abi_tag_fallback(monkeypatch):
 def test_platform_with_space(dummy_dist, monkeypatch):
     """Ensure building on platforms with a space in the name succeed."""
     monkeypatch.chdir(dummy_dist)
-    subprocess.check_call([
-        sys.executable,
-        "setup.py",
-        "bdist_wheel",
-        "--plat-name",
-        "isilon onefs",
-    ])
+    bdist_wheel_cmd(plat_name="isilon onefs").run()
 
 
 def test_rmtree_readonly(monkeypatch, tmp_path):
@@ -435,7 +549,7 @@ def test_data_dir_with_tag_build(monkeypatch, tmp_path):
         with open(file, "w", encoding="utf-8") as fh:
             fh.write(cleandoc(content))
 
-    subprocess.check_call([sys.executable, "setup.py", "bdist_wheel"])
+    bdist_wheel_cmd().run()
 
     # Ensure .whl, .dist-info and .data contain the local segment
     wheel_path = "dist/test-1.0+what-py3-none-any.whl"

From 61e18ff75de5e77b2587efaa56f473e3ed78936d Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 20 Jun 2024 12:07:00 +0100
Subject: [PATCH 0739/1761] Adopt latest change from wheel test

From https://github.com/pypa/wheel/commit/dedcc19f517c08899cbe730783cb900e0c6111c0
---
 setuptools/tests/test_bdist_wheel.py | 1 +
 1 file changed, 1 insertion(+)

diff --git a/setuptools/tests/test_bdist_wheel.py b/setuptools/tests/test_bdist_wheel.py
index a20ebd0546..232b66d368 100644
--- a/setuptools/tests/test_bdist_wheel.py
+++ b/setuptools/tests/test_bdist_wheel.py
@@ -160,6 +160,7 @@
                 version="0.1",
                 description="A testing distribution \N{SNOWMAN}",
                 packages=["unicodedist"],
+                zip_safe=True,
             )
             """
         ),

From ffea3b0abc3425e21c578bcd1460e366f7d0f34f Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 20 Jun 2024 12:08:19 +0100
Subject: [PATCH 0740/1761] Remove no longer used bdist_wheel_testdata
 directory

---
 .../abi3extension.dist/extension.c            |   2 --
 .../abi3extension.dist/setup.cfg              |   2 --
 .../abi3extension.dist/setup.py               |  12 ---------
 .../mypackage/__init__.py                     |   0
 .../mypackage/data/1,2,3.txt                  |   0
 .../mypackage/data/__init__.py                |   0
 .../commasinfilenames.dist/setup.py           |  12 ---------
 .../testrepo-0.1.0/mypackage/__init__.py      |   0
 .../complex-dist/complexdist/__init__.py      |   5 ----
 .../complex-dist/setup.py                     |  24 ------------------
 .../extension.dist/extension.abi3.so          | Bin 23312 -> 0 bytes
 .../extension.dist/extension.c                |  17 -------------
 .../extension.dist/setup.py                   |  10 --------
 .../headers.dist/header.h                     |   0
 .../headers.dist/headersdist.py               |   0
 .../headers.dist/setup.cfg                    |   2 --
 .../headers.dist/setup.py                     |  10 --------
 .../macosx_minimal_system_version/libb.dylib  | Bin 9544 -> 0 bytes
 .../macosx_minimal_system_version/test_lib.c  |  13 ----------
 .../test_lib_10_10.dylib                      | Bin 756 -> 0 bytes
 .../test_lib_10_10_10.dylib                   | Bin 756 -> 0 bytes
 .../test_lib_10_10_386.dylib                  | Bin 668 -> 0 bytes
 .../test_lib_10_10_fat.dylib                  | Bin 8948 -> 0 bytes
 .../test_lib_10_14.dylib                      | Bin 764 -> 0 bytes
 .../test_lib_10_14_386.dylib                  | Bin 676 -> 0 bytes
 .../test_lib_10_14_fat.dylib                  | Bin 8956 -> 0 bytes
 .../test_lib_10_6.dylib                       | Bin 756 -> 0 bytes
 .../test_lib_10_6_386.dylib                   | Bin 668 -> 0 bytes
 .../test_lib_10_6_fat.dylib                   | Bin 8948 -> 0 bytes
 .../test_lib_10_9_universal2.dylib            | Bin 65936 -> 0 bytes
 .../test_lib_11.dylib                         | Bin 16464 -> 0 bytes
 .../test_lib_multiple_fat.dylib               | Bin 8956 -> 0 bytes
 .../bdist_wheel_testdata/simple.dist/setup.py |  11 --------
 .../simple.dist/simpledist/__init__.py        |   0
 .../unicode.dist/setup.py                     |  11 --------
 .../unicode.dist/unicodedist/__init__.py      |   0
 ...6_\346\227\245\346\234\254\350\252\236.py" |   0
 37 files changed, 131 deletions(-)
 delete mode 100644 setuptools/tests/bdist_wheel_testdata/abi3extension.dist/extension.c
 delete mode 100644 setuptools/tests/bdist_wheel_testdata/abi3extension.dist/setup.cfg
 delete mode 100644 setuptools/tests/bdist_wheel_testdata/abi3extension.dist/setup.py
 delete mode 100644 setuptools/tests/bdist_wheel_testdata/commasinfilenames.dist/mypackage/__init__.py
 delete mode 100644 setuptools/tests/bdist_wheel_testdata/commasinfilenames.dist/mypackage/data/1,2,3.txt
 delete mode 100644 setuptools/tests/bdist_wheel_testdata/commasinfilenames.dist/mypackage/data/__init__.py
 delete mode 100644 setuptools/tests/bdist_wheel_testdata/commasinfilenames.dist/setup.py
 delete mode 100644 setuptools/tests/bdist_wheel_testdata/commasinfilenames.dist/testrepo-0.1.0/mypackage/__init__.py
 delete mode 100644 setuptools/tests/bdist_wheel_testdata/complex-dist/complexdist/__init__.py
 delete mode 100644 setuptools/tests/bdist_wheel_testdata/complex-dist/setup.py
 delete mode 100644 setuptools/tests/bdist_wheel_testdata/extension.dist/extension.abi3.so
 delete mode 100644 setuptools/tests/bdist_wheel_testdata/extension.dist/extension.c
 delete mode 100644 setuptools/tests/bdist_wheel_testdata/extension.dist/setup.py
 delete mode 100644 setuptools/tests/bdist_wheel_testdata/headers.dist/header.h
 delete mode 100644 setuptools/tests/bdist_wheel_testdata/headers.dist/headersdist.py
 delete mode 100644 setuptools/tests/bdist_wheel_testdata/headers.dist/setup.cfg
 delete mode 100644 setuptools/tests/bdist_wheel_testdata/headers.dist/setup.py
 delete mode 100644 setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/libb.dylib
 delete mode 100644 setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib.c
 delete mode 100644 setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_10_10.dylib
 delete mode 100644 setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_10_10_10.dylib
 delete mode 100644 setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_10_10_386.dylib
 delete mode 100644 setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_10_10_fat.dylib
 delete mode 100644 setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_10_14.dylib
 delete mode 100644 setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_10_14_386.dylib
 delete mode 100644 setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_10_14_fat.dylib
 delete mode 100644 setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_10_6.dylib
 delete mode 100644 setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_10_6_386.dylib
 delete mode 100644 setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_10_6_fat.dylib
 delete mode 100755 setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_10_9_universal2.dylib
 delete mode 100644 setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_11.dylib
 delete mode 100644 setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_multiple_fat.dylib
 delete mode 100644 setuptools/tests/bdist_wheel_testdata/simple.dist/setup.py
 delete mode 100644 setuptools/tests/bdist_wheel_testdata/simple.dist/simpledist/__init__.py
 delete mode 100644 setuptools/tests/bdist_wheel_testdata/unicode.dist/setup.py
 delete mode 100644 setuptools/tests/bdist_wheel_testdata/unicode.dist/unicodedist/__init__.py
 delete mode 100644 "setuptools/tests/bdist_wheel_testdata/unicode.dist/unicodedist/\303\245\303\244\303\266_\346\227\245\346\234\254\350\252\236.py"

diff --git a/setuptools/tests/bdist_wheel_testdata/abi3extension.dist/extension.c b/setuptools/tests/bdist_wheel_testdata/abi3extension.dist/extension.c
deleted file mode 100644
index a37c3fa2dc..0000000000
--- a/setuptools/tests/bdist_wheel_testdata/abi3extension.dist/extension.c
+++ /dev/null
@@ -1,2 +0,0 @@
-#define Py_LIMITED_API 0x03020000
-#include 
diff --git a/setuptools/tests/bdist_wheel_testdata/abi3extension.dist/setup.cfg b/setuptools/tests/bdist_wheel_testdata/abi3extension.dist/setup.cfg
deleted file mode 100644
index 9f6ff39a0f..0000000000
--- a/setuptools/tests/bdist_wheel_testdata/abi3extension.dist/setup.cfg
+++ /dev/null
@@ -1,2 +0,0 @@
-[bdist_wheel]
-py_limited_api=cp32
diff --git a/setuptools/tests/bdist_wheel_testdata/abi3extension.dist/setup.py b/setuptools/tests/bdist_wheel_testdata/abi3extension.dist/setup.py
deleted file mode 100644
index 5962bd1552..0000000000
--- a/setuptools/tests/bdist_wheel_testdata/abi3extension.dist/setup.py
+++ /dev/null
@@ -1,12 +0,0 @@
-from __future__ import annotations
-
-from setuptools import Extension, setup
-
-setup(
-    name="extension.dist",
-    version="0.1",
-    description="A testing distribution \N{SNOWMAN}",
-    ext_modules=[
-        Extension(name="extension", sources=["extension.c"], py_limited_api=True)
-    ],
-)
diff --git a/setuptools/tests/bdist_wheel_testdata/commasinfilenames.dist/mypackage/__init__.py b/setuptools/tests/bdist_wheel_testdata/commasinfilenames.dist/mypackage/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/setuptools/tests/bdist_wheel_testdata/commasinfilenames.dist/mypackage/data/1,2,3.txt b/setuptools/tests/bdist_wheel_testdata/commasinfilenames.dist/mypackage/data/1,2,3.txt
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/setuptools/tests/bdist_wheel_testdata/commasinfilenames.dist/mypackage/data/__init__.py b/setuptools/tests/bdist_wheel_testdata/commasinfilenames.dist/mypackage/data/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/setuptools/tests/bdist_wheel_testdata/commasinfilenames.dist/setup.py b/setuptools/tests/bdist_wheel_testdata/commasinfilenames.dist/setup.py
deleted file mode 100644
index a2783a3b62..0000000000
--- a/setuptools/tests/bdist_wheel_testdata/commasinfilenames.dist/setup.py
+++ /dev/null
@@ -1,12 +0,0 @@
-from __future__ import annotations
-
-from setuptools import setup
-
-setup(
-    name="testrepo",
-    version="0.1",
-    packages=["mypackage"],
-    description="A test package with commas in file names",
-    include_package_data=True,
-    package_data={"mypackage.data": ["*"]},
-)
diff --git a/setuptools/tests/bdist_wheel_testdata/commasinfilenames.dist/testrepo-0.1.0/mypackage/__init__.py b/setuptools/tests/bdist_wheel_testdata/commasinfilenames.dist/testrepo-0.1.0/mypackage/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/setuptools/tests/bdist_wheel_testdata/complex-dist/complexdist/__init__.py b/setuptools/tests/bdist_wheel_testdata/complex-dist/complexdist/__init__.py
deleted file mode 100644
index 88aa7b76a4..0000000000
--- a/setuptools/tests/bdist_wheel_testdata/complex-dist/complexdist/__init__.py
+++ /dev/null
@@ -1,5 +0,0 @@
-from __future__ import annotations
-
-
-def main():
-    return
diff --git a/setuptools/tests/bdist_wheel_testdata/complex-dist/setup.py b/setuptools/tests/bdist_wheel_testdata/complex-dist/setup.py
deleted file mode 100644
index e0439d9ef4..0000000000
--- a/setuptools/tests/bdist_wheel_testdata/complex-dist/setup.py
+++ /dev/null
@@ -1,24 +0,0 @@
-from __future__ import annotations
-
-from setuptools import setup
-
-setup(
-    name="complex-dist",
-    version="0.1",
-    description="Another testing distribution \N{SNOWMAN}",
-    long_description="Another testing distribution \N{SNOWMAN}",
-    author="Illustrious Author",
-    author_email="illustrious@example.org",
-    url="http://example.org/exemplary",
-    packages=["complexdist"],
-    setup_requires=["wheel", "setuptools"],
-    install_requires=["quux", "splort"],
-    extras_require={"simple": ["simple.dist"]},
-    tests_require=["foo", "bar>=10.0.0"],
-    entry_points={
-        "console_scripts": [
-            "complex-dist=complexdist:main",
-            "complex-dist2=complexdist:main",
-        ],
-    },
-)
diff --git a/setuptools/tests/bdist_wheel_testdata/extension.dist/extension.abi3.so b/setuptools/tests/bdist_wheel_testdata/extension.dist/extension.abi3.so
deleted file mode 100644
index cf9e0b0a491284e40d703523d7fa602a091232ec..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 23312
zcmeHPeQ;dWb$_e1q+Lr|$(C#^+n_bb$ga`K-yawU%d%t%BuiK}4v^&e?e1G?@$P%e
zdv9f}NhfY-`AFiHb~24=LgFNpnwF4E0|^1rnvaI2NjoKNI>0a)6G{pp!w}L?LuPP)
z=bn3a->%j`XZjbjccgdEIrn_sbI<+QeQ)2V21keL0s&2xp!N}su!%ON?34|6PD%jP
zPHn3e#`PlYLcu$))HDZblZ~p@q+YU7Puvo(tUIMks#fhp7Am78j1m>sV%;rKRr}={
z&=!g@L+B&r^;v08tCI?(UxboeRLVuAoT^VsIaP1*cg@RQlKlLjMTLDI7yo0X~n!EgvtW0+El(Vs-2bt
zu%v>n@H1C3BaJEbL6uj0bd{>q_J)s6W1`AgA6?{?YVn<0<{R#B2bG!Edn3D2>hn?qIZUG`BiB6v-Pj`Y1;4~o%ZU^{p;{8PlcWoojted
zYaqXQZspfewRUO5KTR!fzWU<1bLZ|P9IeMi>uxAGzT-a9bG)A9hPR#d7D4r`1VeTB
z+?m#{V@`F6=3Y&EY7wYKpca7-K?LZYAl-_jd+~H17xxrT
z6U~1Q5ooQ{B2bG!EdsR&)FM!eKrI5b2-G4_i$E;`wFrDzA|Rhrt$Z%ES@lmoui7H<
zX35_varNBmGKI@}sjSuWugfdifBWt^n@ES`!>r<&aNuVpuAXT=Ch_@F{(!8TWW7Yz
z>T~O9`FyJs(^NY>vrL7ad!=##SK3)AsOS?CSI<|Ul6+xVD}Ab#WbJq7xIgNd+P5XH
z{8D`XU$uG;y;9#=IbN0YN_@}2z~#|Z_~gGjx-GUYwl2DU?YeDiH?7?eUA50lMu!cL
z>AmY#i)YP+T6Atte|KPmv99<#0rh(UidQ8M6EtnH^r;$;%BK#QSJilwjqrQ$s5Vts
z)1%r{U5!Wiy6ioCR=##he<-&(-$>)!vKz@u->u*&@xTGFH2(>*{ns+1>n0!?c;OCIYrGor!9YWk!0O3N{k;0L|rRrtr~V{p4b)`o|P?FxZx4Id;s
zI|Q;b{AudgPJs-E`!TD7{Q{W?x00P*0{Lk8emEN3y?7f$4~4tPx1o+JflP;AC*OvJ
z*SYZbN!y4(yzs+h=c=XzxM#x&LdF*E1ad5V7lmmDGV4j;xfI35}V!!6zYxg7;eR02hlFpZ!?#oC;-Qxi_1
zNs&*$yKpya`8M(^yq+yR220^B48H)`@OA;$ok9=h@9+E^3|)l9zq9DoRLK#O!C!CSr(
z`3Xpot1vxVPO>*wL3PVl7tmCUtRS9ynP-%E9$+3a-||ht({?GrO9Jdy6C7@f+}Ng5
zx9^5@B-1tv3tD&s+>c~C=@&d9t!W!*4I$#HtA~8V3T^@15V{f9h4+!^j|P@gb6Yz!
zMqUjp{UjmJAP|vXN+e4fUz5m7)Qvw5E+z$S-3WB#1S63_w6tC<Vi;6qz4+t>%;1&5{D`!GxkGK@4GN2Ww>g}Q~!EcQHD+vhXPB
zF9Cn&8ir}f?v!1+9{io$rO0;lqBF)k2LLZ!&+rcs+oc<%ENm^^$nY
z-!XiS^#47>zfby4GkhcY@HoQ}((?qv%SrZGhR+cFIfl0r{bvlnMDU9YucjFOoZ){W
z`b!L7O8)#O!`G6nml^L8L11O1y(t)37HPxhm%7DTFmid*_NHx3BkkDQ
zy0xaJkDwK2iB^Evyd?#lX&?%5HD0Pw6>1LzBTY>#9RMNR;#0O2!t+bou~oLC^$L9E
zYYqfP+KInq1@!cRFS5}mcx5LswXQ(l+a$f7ykq);EyDIfsadpOZC|AIbdpaUivk3Y
z_C@uAQ(J>Esnb^C8VLL@(t@k_$ub5Y(3f|ezDzpaH&>LC
z{A}O0zO4^G4ihG&wvLe6$b1}T*_ylH0h(phT9q>~U#*zp_9gyf7DJkxL-qfkk7{R9|J*h$k*!tY|I
z{CM29Gk!dku?^2pNZ8q2#+>z&9K)I_Q<^oiwo~ScmxqvFh63Zqlj%emZd&=QzuiTg
zeErRu7`S8^#ZKnSO{CK0c&?BzaxR!SxSnGq%(#&_%&415C(1lb3DZiZt*PB+&M^~)m$t2ghGT&u+jkkR
z2|hDNO`z?kaT!5R_5GMCnl5a_^C`(bS-{VyvxBo~4}>u@8#kTZ6b9y=DD1|lU2P)O
zg*4Pb$+#FM@b5xS_ES!l^s|!(I1L9WHkv+B(1(nSYfg%JrBOmg#U*dK=_w0eTTquL
z3Prut&+9QZyVEmi7u{ezm`F*(GilOE=MPwMJ8vaN?Szq$wse|3NAXi|unZWPOr>Z2
zu4_6J8s;zgB3+o&N32}l8@BDkvV*Q)#hA#-vIvp0lLgB&W~B=gg@c?o`aUxy!$wRa
zGq%y?ST&eIR-
zs|YZwok>#aV(KDTL$r{nJfSpX=M`mq67evSSme#2ypo5(r8keQJk+eGI>);
zARV#DT$?tJC<=vf6ho8G$xwPoM|(~>`q_DC%IAo%qM|ry7M&hNX8L6*xER<{x=?Jg
zhI1G`(@tP%dde~J5(1avjBb#BGWV55to`^CgSr=KDdzwwI^v{}%{0L$3Zqyo{dq=;
z_L!FGU}<*b>>S{A+3z4`X52QM7q}Rum}OA-Fi*Ol4ep^>^x-y^#HG9P}aELljco%
z(@K~K5+zV_C~anv<#F3>Qt~M=ii`9RvtqC9jnLkd#i*D`70S|ln{ez|u|h$m9EM`F
zu-hnjk~RW`(3tO|?X47kbJjDlGTBxvA;xWB+DshYXJQ5saWtL9Vu_%pF>N_>MLx+T
zvNWibBef6WBM^gp*sIVxSD#
zXU>#Hms4|}>DqZGVJaz|j&qoA*sf?#kfWf_c=Se*(i#U%rsJAPr5y=f(o75YekZTy
z_(VbEm15P0uL_Z&G!mi@d^`ueA
zq^G7mdO9#Vux`uy-X@6lW;bud#migfjwxDm(g~o3lbGIt^LZ~dPKm$q_0isxsm>NI
z5WI~gakv-TgNJjf?ew}HvJK2B!!1~eX&`pq?af<9Fjsm-C`(caEHTqc7V`>HiIflD
zk;-Q>ARa-M&7htb8Bn>3EnIbYqA*Eg&zZ=f*7cl`+yq{lZYuiMr*jIV;Nrh@LZV~h
zE_6gOAmM$3>O1E{r<^CHb}R&xH4_CaK9~$%S=w`EmW@_Eg*nO;aqQc?QSQ{@xF?q1
z;$qAzi13#uGA(>8YsYok`(#9li<4*^v9LIoZAK!~igS804@XsJk(KBGV|w1oBe~bp
zDsC1}S$nZkvr*)?I$N-M%yMp_bKI1Xl~&JtI8~$8H*IIlJ_DaJ`VzFbV$0||GHsff
zKIF3JiYv}~Mqg8Kc7<@7zM50^PF@X+f~u~VZ(i|
zS-2M&$3vzff-|u=sHq+g}Ii4LQ*mX!7=u~VdzCZ?=s
z$jH&6ue=>IWXkHgceHpk<5Pp&rsal`_3b@$;FGQ7tUTX~gE)`bP~OWszGbCi^TRpL
zv5%To@mPdR!)KrbPU!Dfg7_S*-kas;4dO%fA^KjWq6G2T`dC%^yyE?2zeo_DnALsW
za(YAYesDP*UjfPcf#vj&7OhIhThgl1=NIprmg{NK)P2x$I=%>s`Y8+twH7T}l^!X+
zhsrM%#H|{2AHJO4rm6Q%mD3k!)$T6`wS}6xzh2IduWWYuDGUe2o5HHm7ipnG)#!`o
z5ZycMBfhc0moUGkpMjSZL>}2Tcf9cJnxG%67~d6=
ze!7BAHv$7%DL!P6$`JkoRmLR?I@wk44ditUwKFM&34Lf^|Ja~D
zxOX?b!EX0;d;7<5sL^%2y-pFazuEe)p*H528H0!t%4Iw)CVCVD7<0{p7V{7uEhah`E9%B^QXHhTm^sbC
zWjT&3vn!akPBIj8Y+=~QrlEGqhSs?2YB7v2otfB8;dIPUrvfx5&3Jway6E#?5lveu
zQpQd3xMR*JjJ~^2FhWu#ZU4_Cdj1Nt3v-N%+CNp@E$OPlx58zGUUT^to>@}zha{kC
zdd7tczQq;=_Y}&?R$#arP_%zc%By--a+6(5J@HHtk(ST}Wheegx3Yg$0;(4GF}3fAf#K#tNmTorzCTw|H}S8DZfkdsprB}t@ubDtKJ;{=?*{ngzw$CsC7=(>iIa;
zTp9m^Xz#jk%x#VG(MDjN?x7c)cb2sl5!Lk2j#aK
z$0yL{x3A85r}5?-Dm%rsbW!rErhT7aWxpt}mSq&5(jeVYsPv+uWtxVG(g%at$N&AeFyVpyIe((M7=?r-dD5O@%-Os{Joq
C1PR0d

diff --git a/setuptools/tests/bdist_wheel_testdata/extension.dist/extension.c b/setuptools/tests/bdist_wheel_testdata/extension.dist/extension.c
deleted file mode 100644
index 26403efa82..0000000000
--- a/setuptools/tests/bdist_wheel_testdata/extension.dist/extension.c
+++ /dev/null
@@ -1,17 +0,0 @@
-#include 
-
-static PyMethodDef methods[] = {
-	{ NULL, NULL, 0, NULL }
-};
-
-static struct PyModuleDef module_def = {
-	PyModuleDef_HEAD_INIT,
-	"extension",
-	"Dummy extension module",
-	-1,
-	methods
-};
-
-PyMODINIT_FUNC PyInit_extension(void) {
-	return PyModule_Create(&module_def);
-}
diff --git a/setuptools/tests/bdist_wheel_testdata/extension.dist/setup.py b/setuptools/tests/bdist_wheel_testdata/extension.dist/setup.py
deleted file mode 100644
index 9a6eed8cfd..0000000000
--- a/setuptools/tests/bdist_wheel_testdata/extension.dist/setup.py
+++ /dev/null
@@ -1,10 +0,0 @@
-from __future__ import annotations
-
-from setuptools import Extension, setup
-
-setup(
-    name="extension.dist",
-    version="0.1",
-    description="A testing distribution \N{SNOWMAN}",
-    ext_modules=[Extension(name="extension", sources=["extension.c"])],
-)
diff --git a/setuptools/tests/bdist_wheel_testdata/headers.dist/header.h b/setuptools/tests/bdist_wheel_testdata/headers.dist/header.h
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/setuptools/tests/bdist_wheel_testdata/headers.dist/headersdist.py b/setuptools/tests/bdist_wheel_testdata/headers.dist/headersdist.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/setuptools/tests/bdist_wheel_testdata/headers.dist/setup.cfg b/setuptools/tests/bdist_wheel_testdata/headers.dist/setup.cfg
deleted file mode 100644
index 3c6e79cf31..0000000000
--- a/setuptools/tests/bdist_wheel_testdata/headers.dist/setup.cfg
+++ /dev/null
@@ -1,2 +0,0 @@
-[bdist_wheel]
-universal=1
diff --git a/setuptools/tests/bdist_wheel_testdata/headers.dist/setup.py b/setuptools/tests/bdist_wheel_testdata/headers.dist/setup.py
deleted file mode 100644
index 6cf9b46faf..0000000000
--- a/setuptools/tests/bdist_wheel_testdata/headers.dist/setup.py
+++ /dev/null
@@ -1,10 +0,0 @@
-from __future__ import annotations
-
-from setuptools import setup
-
-setup(
-    name="headers.dist",
-    version="0.1",
-    description="A distribution with headers",
-    headers=["header.h"],
-)
diff --git a/setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/libb.dylib b/setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/libb.dylib
deleted file mode 100644
index 25c954656b07e8abb28f87fbc296f90214207dd2..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 9544
zcmeHNU2IfE6rOD>EVQsKKLJrm3q@-X+J#a|kfi)vyqi|6-4d(B47=Mcd&B;ey?1Nd
zu%yyry4EI*(fB}Oh%u4yL=35Yf(Av3G0_+wc+jK|Xtn_Y8Xu}6>v!hPE!*9K)%bW%
zdglDhnKNh3o!Q~UaK0b`uEDs8PoHM=OC
zEix&?CxaW|lPowlg53sjCgl2?ppF5z9VR6i-9CUK*7rale|k^E9}9EcNci3P@r^Dq
zt?OhXlBH{*aY}zQW&+Lwx)Il+M@%tqytkykB3W+Y-?YkoY0(mWx*R;-*yVl+t2;2nu*;GzDURu}M_
z9oHgDFDd6y%n@^{bQ$5r8QiK(@12&(7_7E$c}PEe#RiL1P!f6y~F%HYKBZeF%pt3J(CjP!@!C5tLCVrHrBYvj|CWz
zFMC{MR}Z9^gxw;u_v(ZmuZA(9ZnOtBZroh=sCLB#NqBIu>~`KB-3UfD@5yclBOB}t
z*cq@hU}wP2z`xGGK{fT0+JCDwmAscqO!p`6ITD-ROipJqj3p}7;a%_G1uOj{{-LVF
zE1kb(G8d@_=s$o;=&yZFE-q4oOVF|OxlATwD!p*rrlyi(YASJFO&vU|4kgc~Pmr_B
zgql(&)gk4)$$qO2CxFSxbdsuv7^<(MdJ=TvOXZG3y{e2;*Ect+Lx~CDXeTUPqouGK
zR|}Of*C?yM?0Ro}P`RUKUYt-fP*c@T<+@(om%PIgS8hHt)A&AonT@Z{)wn-7!Hfl>
z)z|Q1*$j-I*y0*}{SNgnGLasg%g=}7uF>&|>!-$D?^8IgQH0e0McW}WE$Z;Lb7-Xw
z?Ye|Fg!H?_r>?3)+r~&d3NcMDU(z_0oJRWYA_yQ2cAB!Y(GrvdjhpQf5P!D7!`3E6U@dJR!}B$?i~NS08xTE2reKtcGs3k9G7Nbm9`_-d>aEg8diprafwW59y#OPIs_j&qbz
z9|Vrx<`(AYE?sc?0JVM!qf&y$OB|#BYFD-qmpoqF!VtxOSJw2_`
zzfAj8!V46+_C@r<++Rrc1Hi$1ru|;<0&?x8n(QwCPfL4x#})7h`x`L$Q^vDKn&-tM
z_2hpAVm>16H~oXXRb5qv4p{1KIR;e
zaxmH+_5{3!+Z_n`;@pV)G(-0U6r}~at$v@b1$Yc6=Rse@)5=?GxZbI2#`k!!Wzs8quPjh|<#7%Dr3Y51||u2FGg3
z$i(gILW5)VBHROwUcFY=dV`8kHq0osZY4rys;TkD!n)GDzr`E#a)tEGQg762F`Z)&
z!CDVaocFp-M{RRg?~~ObQSCLtx@UfQ3A7?B+jw(hfI9D7PRR*$`?wERA~XX!0tZHi
z)6D|6zl*K+8u^*1DrBFnVTDCE#8wdPr>(X2sKBoPE`@B}qfn%5?eDxIW!g(4=mC=b
zX}Ldh67t$2#;kh?6x7nij9KT;XCS*^k3Bd6p9r~+S^GP*i|SyGnTW2KGKDGNdz7f3
zktcKHUvuPQc*?e~6!L>@#OfURXpa5Irc8T$0zS>LpD^W_dOAN)Zi{wyNmt
znr^}@1JX8SDR4Ou&_Fn!c4S=6aI+!A#mPBth4^UZ4g-F
zav$z`vU;f;qTC6;f>@_DvvD%2Az0QPYt`#;{
ig;-mtuzrC|_P_o3h_?S_an4?rbN_= 'a' && *lett <= 'z'){
-            num += 1;
-        } else if (*lett >= 'A' && *lett <= 'Z'){
-            num += 1;
-        }
-        lett += 1;
-    }
-    return num;
-}
diff --git a/setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_10_10.dylib b/setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_10_10.dylib
deleted file mode 100644
index eaf1a94e5f8a259da16f00db62453e0c7796c717..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 756
zcmX^A>+L^w1_nlE1|R{%EI_;g#83cYAdm!N3lJX%_yHBa096Cy1I=UrVUW2X5FcNX
zT2TUFL---C5g`aZj5P(y0GS7J3mmX8G{D(FMtpp7er`cxa!Gt?UU_C-N_@PJ3$pwS
zC`SQGXD~tBaso(0oeE?jKzw{^MtoXPVs0u#EIuC9eLJ8^9iVgv)I2Dcft!IL1I7l?
zDnKj%VsLRWFz|6PfGm>$;tNpw9{_0)ARnlYApnSx0myw&HzErY#SZo8eCp9z`{TIl
z7m#|7?%E$f?hB9R+7}G`ExBMJ*BAWzIGTSZ^7qIg^Q-v#`GEXx*DuWn7&~3RywpRM
zb>!~>r3H^}sO~8KeohqCKn_^e1E|}h)Ahy6j{pDv0}YEk43dEXX#8S$4-^(4z*rT;
zz*r%`D8R$cF#)Iw6pk`LtN}C(1fcpE7##To+L)Yq*+8100H~Y|WH3Ln>G64`x$*gF
O@j0m_C8+L^w1_nlE1|R{%EI_;g#83cYAdm!N3lJX%_yHBa096Cy1I=UrVUW2X5FcNX
zT2TUFL---C5g`aZj5P(y0GS7J3mmX8G{D(FMtpp7er`cxa!Gt?UU_C-N_@PJ3$pwS
zC`SQGXD~tBaso(0oeE?jKzw{^MtoXPVs0u#EIuC9eLJ8^9iVgv)I2Dcft!IL1I7l?
zDnKj%#9Ull3=Di+3?R!SfcOH`{s%x>1jq;KV+a6ZWB_s>)Q!l(M6p9XI-h!U*8Vu|
z`URxkqr3J8ko&@;x%LGEe@iY{$n^#PK91&}iTpis$owk)em)?-+x1KH0me?(FE90w
zWgYo@Kxx6F8>%~szn>FDHIM_A^#JPj=yZMYvg7~%|3JfH4})Z202;p--UEdN2ryO!
zF)&sLFbeRnb4&oL0)?Xt5NiMp0|BUh1_nnyfi@;*UN(>>C;%#F0~ySZYNRe
PT6|7wNl9u^F-R!@L6mUR

diff --git a/setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_10_10_386.dylib b/setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_10_10_386.dylib
deleted file mode 100644
index 8f543875d6ad19063ba0e006ada538d6bd9d606e..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 668
zcma)3ze~eV5WduiG1k5g4h{+qjvXBwt@t(zA{9iCqtuiNww0QqlnO#RG^9`k2mb{B
z3K10C{0m$i3c5K23D&zLucYAMN50&B_v_u`y?%fG%mBa?Q6uWa4m&HcX8#YoQS6R#
zY`I^};8H}}y0F=cfLN6KMWrHpLx%hLyfJjRQa)n2Q9CV_ZEV%gkLyQ>+gmEf_LjIn
z%%uS61Dk&Fsw_hwT2-txOEpXB$vy5VyhhxJXGB#1a{yK4uac8xVVVFLGdydNWG6na
zN0#@X9@rr!!TZEJCBy&n_uO~qBkf7!MF!m96%4oFyYkDq^g(nh9Fo3x0LL$h{&GUL
z#UK;PzpCR!6
z+n0Sc_zTSYA`5NNq9kIU)(6N(#I}(mOZ#;XDwWc$uDG;{Map^m=DhY|`vive(|L(x
z#Us6za$&J?zp~)U<>te?%{x+Fy`pnux1cAW(_k)$ym0ZOu1ansQdaA-wp3|UwLgB3
zX9!<}&Z-`9l@P~;sO$APEQk>ixttJ%Ty)k+xGi;F2bS)EJ>nWPEp#7sr*!zY);Ih;
z?{mIun>+T9HgxAqe=hXxz-x!XTB}jW4y~T>!iu$iCgrYL8->Undan%M9uB5c$+ERh
zx7S;5SsTa3YT-~a+8GWuN25{K9(r%`w_j5RLhlWdJQh7WiLrWJ7%kH%=8tAO$kHjN
zp)+U^?I{<`wald{{k~C>r6!i{B%8|(xmc6;s;*mIdVmxQBftnS0*nA7zz8q`i~u9R
z2rvSSz+Xh*Lp=W*_~SpG_hFyYOjd$IPC;k%j21=4e?sN%sv7P82`50Uwo?7{tQdt-
z+yME9@dMkcPf6K2=5%_-w`y+36009_7+3H@mCh@azST3muiy>Nru~FC2%_;n7+gqa!5q-H;B?jQ
zggVgGNJd!)yB*)|4>}Qh5G2$A73lZ-(-$?yBdip`BTSl`s3IJRM>vmWB>MXhG!^AF&B$2Q}+-Cm=
zhLZ0|jqhnDFnKNm(K4!*r#+w6j?)wGt9ubVk9Qd(4A)~#0x?@OOPQ>NmBQ6|;S6zq
zFL?eSCMmHL@^*QnD65j10MW=}uADIqFNZk2Usp^l30kwI#jhzxmDr@4P!_@Kq{WN4ec&QxJf|e5gx1tr(>aF
PVljsohGiM$OD_EZ=Ie1K

diff --git a/setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_10_14_386.dylib b/setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_10_14_386.dylib
deleted file mode 100644
index c85b71691ee684af0f05edb64c424873d1081ef2..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 676
zcma)3y-LGS6h5gDqt;#r2L}Zoz)3o|R=nU~L8O8RatJl0f^DUyD5Zjs42Be1!3S{k
zDMV0k^9fuX3c5K23C45M+>(NWANg{AzjM#!zJ7oHOaZ_IQ6uU^mz|X)X8#YoQSOdv
zY+0{na4Vv1UD)gmfmo9JB^3?r4H?$+c_Zj>rF_J4qjqYRZEV%gkLyQ>+ncJ!_LjIn
z%x3`TgAV;JFmxU@PlkilRIwnsyuFehh~D&AF9lqTmBFANMUgK<=Q(@$#1#U+A7OMYzB@x}
z?}FAbwE66$eoa+AWr?^>BVtVB0w2DA
zJ3&d!pdeeharSvPSKJA7g<(zwG&RE0s85XLi`z4YU
zPt0D*`Ni5&dES>R^~VqD_ocjX-Nb0!hMt5@gSjB`VjUNKR5F!FSy_IR#a{^T&lCh0smN-_-YQW2N(;R@{W9g=+QL((d2d-wyYK
zFS(X$@4Cdlh}Ntf&c>nJ3mUO-npJYSvC|PjTy{3krpz^GE3eFc@Wu+=e)nQ3SaLS$
z^rrN#vvq7F7X}4YW!!BK27{K{58mbOy`>0*-XA1+EP8elYweb>)@`ehJDS}mqBu__-{e7~~%2q6|mfDvE>7y(9r5nu!u0Y-ok
zU<4R}zktBU;rwsm$G;lpXfE5KS*!?!oQBSr*-UvxFR?cFbdItAgcG1v+o^tLMpUj8
zH$e6{e&D(8Q`GAc=5u<+PkLU*7Q3G^jw{&FsaNz$^FH-6lI9G^H;y;h)8~k={uAE7
z3-`US5%0P^I*!+B#4sZ-Xhfh5YRctCPR9cC_psG=UKz0i%i8;U*XCDZ{WnD_k4mo>s8tQNo{OxkT^oktDv2p7+L^w1_nlE1|R{%EI_;g#83cYAdm!N3lJX%_yHBa096Cy1I=UrVUW2X5FcNX
zT2TUFL---C5g`aZj5P(y0GS7J3mmX8G{D(FMtpp7er`cxa!Gt?UU_C-N_@PJ3$pwS
zC`SQGXD~tBaso(0oeE?jKzw{^MtoXPVs0u#EIuC9eLJ8^9iVgv)I2Dcft!IL1I7l?
zDnKj%Vz6;BFz|6PfGm>$;tNpw9{_0)ARnlYApnSx0myw&HzErY#SZo8eCp9z`{TIl
z7m#|7?%E$f?hB9R+7}G`ExBMJ*BAWzIGTSZ^7qIg^Q-v#`GEXx*DuWn7&~3RywpRM
zb>!~>r3H^}sO~8KeohqCKn_^e1E|}h)Ahy6j{pDv0}YEk43dEXX#8S$4-^(4z*rT;
zz*r%`D8R$cF#)Iw6pk`LtN}C(1fcpE7##To+L)Yq*+8100H~Y|WH3Ln>G64`x$*gF
O@j0m_C8+L^w1_lOZAZ7$&79g$xF%+PD5E~1)0~DJ9lmpQq{V+2?O5@{8QY%V8EC`5?
z4{?nMf$<>pL?8vy53&yqSQr{$?D+WP{M>@XW
zzyL%HB|rk|R1g;o;^R{@;?s%}b5mh_RClZZ3Y!421Jn$NDh38_28Ik6TLs7v0CCv3
z7#R4tz;;OhxiwJxI)F6D9Y8${0YHolK<w4fxjgUB;@*oe;-5hk3{|+StM>1e?K3X`=|K;W2ft%mpVw2j{H3!*Tb|%
z@%MA0s0DIBl3<-bUQYP`|9`jZpXMVxv4=r2Kmdw2Ab@)dpCR!6
z+n0Sc_zTSYA`5NNq9kIU)(6N(#I}(mOZ#;XDwWc$uDG;{Map^m=DhY|`vive(|L(x
z#Us6za$&J?zp~)U<>te?%{x+Fy`pnux1cAW(_k)$ym0ZOu1ansQdaA-wp3|UwLgB3
zX9!<}&Z-`9l@P~;sO$APEQk>ixk(`klhIix;kMLy9ay>t_K0iHw9tLjozmgoTHo;Z
zywCZrZSL4Z+R&Xd{khP$1FszlYpq5hJG6Sj3oF+8nUuR~Z4@GR=)E$0dpMX*CCk=2
z-Cl3KWo;ZAtA#_!XlFRs9F0a@d+5E%-+oOQ2)#E*@>uljB*yA>VYE!6m_M5BAWNs5
zhR&cxw5MD!*D{x;^!r9hmYP_)lWZbwKi~u9R2rvSS03*N%FanGK
zBftnS0)G*K5ApnO;E(@!-iLioGg%1=IR%~3Gg=fG{|S}5t7^3WC!7Ga+Di4)vtkrV
zaRcNZ#t&?(J|$)AnA7PQ->SJCORRp#VO+rrRXVFI&HdC*OPVvF*spklU3LB^*nh$s
zIR2jFx5FJfpldkYb_locc{(g40#I
z6Y4-$BN=5K>~?&+Kj=j4L6A@fRG{DQPhZp+kFZh%k1%0wqKa@N9^pKik?8M3lncej
Wnafi-YN`a~Icl763KTzpSNIMHtKS0v

diff --git a/setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_10_9_universal2.dylib b/setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_10_9_universal2.dylib
deleted file mode 100755
index 26ab109c9957ee2453ddd94a14a7db02bd3e8689..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 65936
zcmeI*Z)jCz9KiAC?vHM6?k+W)XmL>!wcMsHb8qx0UbsV1%Voomhh26@qixsip6fr{
zTFr$+!ATPJCW->zD4C3Ok}?PsBM=KrjKmlH*#e_L!mV)qerM;*y9;SVZ=&zR{hjB}
zdCqgr`EcLA97<%YSZXPQ5^MY9P$EG=009ILKmY**
z5I_I{1Q0*~0R#|0009ILKmY**5I_I{1Q0*~0R#|0009ILKmY**5I_I{1Q0*~0R#|0
z009ILKmY**5I_I{1Q0*~fqz-x%INKTlK3Bz#D6nILKFV`a*XLQrQV(9$Z<%*+S+Jc
ztv59J;7sm%+Xu){z8|ubSFCjtdtARGRrY$3HRI>r@~3D&@BVb{bY*s}O}FlDYHhGh
zt&K@Jk-gqof4!K0k@kaU$y_IB>wD^4Yd6+x+7Mk|6TGYA&&aBEg9I(IM}ik#M-I~^
zR!Nje=((V{iIi)bo8oqPrLC4{Z`ze;Pt`ZCC~w#+qsGiPrmHfT{Gi$yUt8Zr?-7x{
zwjHS#OI;+fMIy5V&zSiCVW6M
z&t+&Re4(Iwq&|H3@y!0d@R1VfABx`T9t`zH?+lk`o?RP0?B$#XS8op=DV#K0I^No`
ze9ha|n-0G*HZ~^9?f9&(Psirz6e*dt=2~m+e3jH1XZ$>PZEKEyvE*QvO86yYLbf&C
zVv~)wIpMmJlWu0_n)f>O=M`NqEYpMh{29`h?!*ESlYPA;v^Ut4n~vMKG$hTNz#AM_}ATy
z?olu#VQp=+u2ve^#mO4*`|`1OqoMl;XtCB!>~Y82Qe8=~uLf_HWNiCML`
z0fU@UxQWdWq^*9zckeUD-9*Z@^38j@ywX<7vp4OMy%y@5SCq?k2~BZh<{Q&hDc3%<
zIL4I8Bz+gHBhuHlBlTjbizK#4WR~C=GoL5({Daza<)q$c_~ZwpooAk2KG61(x#aZ!
zu|mco660&?U5mW7?lEDe*5@phs#M#4k^k6)i$1UHL?M5Vkf}(g+ABmrW9#0On`kLt
z7nta*MH0GBnPW_W)Jy!nzPFagB=r1FN1o@Oyr5i5K>z^+5I_I{1Q0*~0R#|0009IL
zKmY**5I_I{1Q0*~0R#|0009ILKmY**5I_I{1Q0*~0R#|0009ILKmY**5I_I{1Q0*~
zf&Y%cShu-)D{Mx_W}AVburo3=)krua2Xjro)`g+L>NP!~wkJcTcA`HVYCB=1Kie57
z$TyD+$+eOLJ!|58eyFV|bAD5(?Qqt7oh#QK)%$%nHa7c~tR>HKbTHRm(^ThqK4E69
zxz(CGUnRB1fS(7iZOzRtmYnNS39m#Xbv$HS(=9gHXqywRD>>os+Hx*v}=;dvN^|pINhIj{@l*I;sa-E
zlcz4P`R2y;s@GTl6o2&hvXif#I9->#INUgx_x07QujFj5%OBg{zkgrPjq`_My-yu~
z*!gYGefjCNj%)8Mn{xA8=h$octDb%4#dqIcy1Ap|moMtK&At5VkAt5S-i;TQJu$uQ
a=fp<`e(Q`ad4AfJZ8Zat?HBV_Fa8rZUx$7G

diff --git a/setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_11.dylib b/setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_11.dylib
deleted file mode 100644
index 80202c11ba026146f7596191a1043216590fa9f2..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 16464
zcmeI4y-EW?6ov2l4=PEcC|Zb!s6`@Tk@+b@&bZ|
z&mfk958wk>ieTdlh=}Lz&gv2bOPe!ra(`y#?(DbI-aNj)eRhjD?IO|z^+C-xk#mW|
z6B&kHA!9bP+nbtc8R_5SPNY>RwD^2V^){fRzjnPDKk5aH&OV5i8u7e}VdaarS
z0@cP*%`5IF7Qdlj)g+{K;%Si{*l8Q7@56xy%`>;pt!J<}st+jW49#(a0
z`{32rUhO{gV8?A2H$q1ov*)_f(=OL1i0yCCvmL`j!oGa`M}Y1M+P~qCiN6o+>;+Tv
M>Uon#Gk&ouU-!LLLjV8(

diff --git a/setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_multiple_fat.dylib b/setuptools/tests/bdist_wheel_testdata/macosx_minimal_system_version/test_lib_multiple_fat.dylib
deleted file mode 100644
index 5f7fd5091ea98cf4db80642362748f384c61200a..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 8956
zcmeHM&ubGw6rPQ-U2B@n!9xX=96Wd^9>hbBjk>T03nCRnkRjBB3MMUei=`wWY>_M}
zG(rzO_y_nW6c0gAD0onhJ$Nl7ptl@W360;I%&Z$MR74Mj_u%Et`!R23zun9G^5O7J
zTL_VYjzdogkt6pNelk`Z%TugRF(?Zozz8q`i~u9R2rvSS03*N%FanGKBftopFa$pS
z_;#oVe}Q>lq@guhLV#{-!eI}+t72+X)qT=UikRYS0z`8l%+*kUMW^e#vi@M
zYlN>uXLXNZl@MozSTyU)SP&y3#wLWwPN=h{;MVkc4OqGd_J}*sq|klzos!`{THgx~
zg0GpTYwx+lyNKqT9nM9e+Y9QEa5l;mawDfLf~e?hUQD>_&Q?~r{ot(?y8Z5SB3W=Y
z>Go#p183{ZNG%*ns-01{H5d$q+D`N{v!V(m
zxB>Ey;|F$hpS-rqnA7PQKkK<2ORRp$aa_T!E}hl3rTduS#Cdvsv`3xD8+2VPi@_S_yF>ow~U+>RI2JD?9LD&%&YwjF#2e{d=3#{@!8(Ez>f
z)npb70(}!+Q}2!`1XGQq%6i!Cg>I+YP%J_(rVyyWyyxG(tPwt8EeAef+-{+Yco^an
huAmu-`A$S3n|qPEIXOm6wV-^A8pqrM#gE_@egaI<-)aB=

diff --git a/setuptools/tests/bdist_wheel_testdata/simple.dist/setup.py b/setuptools/tests/bdist_wheel_testdata/simple.dist/setup.py
deleted file mode 100644
index 1e7a78a224..0000000000
--- a/setuptools/tests/bdist_wheel_testdata/simple.dist/setup.py
+++ /dev/null
@@ -1,11 +0,0 @@
-from __future__ import annotations
-
-from setuptools import setup
-
-setup(
-    name="simple.dist",
-    version="0.1",
-    description="A testing distribution \N{SNOWMAN}",
-    packages=["simpledist"],
-    extras_require={"voting": ["beaglevote"]},
-)
diff --git a/setuptools/tests/bdist_wheel_testdata/simple.dist/simpledist/__init__.py b/setuptools/tests/bdist_wheel_testdata/simple.dist/simpledist/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/setuptools/tests/bdist_wheel_testdata/unicode.dist/setup.py b/setuptools/tests/bdist_wheel_testdata/unicode.dist/setup.py
deleted file mode 100644
index ec66d1e6af..0000000000
--- a/setuptools/tests/bdist_wheel_testdata/unicode.dist/setup.py
+++ /dev/null
@@ -1,11 +0,0 @@
-from __future__ import annotations
-
-from setuptools import setup
-
-setup(
-    name="unicode.dist",
-    version="0.1",
-    description="A testing distribution \N{SNOWMAN}",
-    packages=["unicodedist"],
-    zip_safe=True,
-)
diff --git a/setuptools/tests/bdist_wheel_testdata/unicode.dist/unicodedist/__init__.py b/setuptools/tests/bdist_wheel_testdata/unicode.dist/unicodedist/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git "a/setuptools/tests/bdist_wheel_testdata/unicode.dist/unicodedist/\303\245\303\244\303\266_\346\227\245\346\234\254\350\252\236.py" "b/setuptools/tests/bdist_wheel_testdata/unicode.dist/unicodedist/\303\245\303\244\303\266_\346\227\245\346\234\254\350\252\236.py"
deleted file mode 100644
index e69de29bb2..0000000000

From b4f5a07734dc8c2bfc6456b3ceb0538095bbc8aa Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 20 Jun 2024 12:35:26 +0100
Subject: [PATCH 0741/1761] Add news fragment

---
 newsfragments/4429.misc.rst | 2 ++
 1 file changed, 2 insertions(+)
 create mode 100644 newsfragments/4429.misc.rst

diff --git a/newsfragments/4429.misc.rst b/newsfragments/4429.misc.rst
new file mode 100644
index 0000000000..9c4c11ef89
--- /dev/null
+++ b/newsfragments/4429.misc.rst
@@ -0,0 +1,2 @@
+Refactored ``test_bdist_wheel`` such that it creates examples in temporary
+folders and run the ``bdist_wheel`` command programmatically without subprocesses.

From 807d7c14bd11709f51b2d32a9ead08604ce05d44 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Fri, 21 Jun 2024 12:25:56 -0400
Subject: [PATCH 0742/1761] Move check to _fn

---
 pkg_resources/__init__.py | 9 ++++++---
 1 file changed, 6 insertions(+), 3 deletions(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index 16a5bd8eef..89a416dc2c 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -1704,8 +1704,7 @@ def run_script(self, script_name: str, namespace: dict[str, Any]):
                     **locals()
                 ),
             )
-        if not self.egg_info:
-            raise TypeError("Provider is missing egg_info", self.egg_info)
+
         script_text = self.get_metadata(script).replace('\r\n', '\n')
         script_text = script_text.replace('\r', '\n')
         script_filename = self._fn(self.egg_info, script)
@@ -1741,7 +1740,11 @@ def _listdir(self, path) -> list[str]:
             "Can't perform this operation for unregistered loader type"
         )
 
-    def _fn(self, base: str, resource_name: str):
+    def _fn(self, base: str | None, resource_name: str):
+        if base is None:
+            raise TypeError(
+                "`base` parameter in `_fn` is `None`. Either override this method or check the parameter first."
+            )
         self._validate_resource_path(resource_name)
         if resource_name:
             return os.path.join(base, *resource_name.split('/'))

From c9729e1a0f66b7adad70c629518b7dab82ccd8c6 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 21 Jun 2024 13:09:07 -0400
Subject: [PATCH 0743/1761] Prefer "Source" to "Homepage" for the repository
 label.

Closes jaraco/skeleton#129
---
 pyproject.toml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/pyproject.toml b/pyproject.toml
index 50845ee304..ad67d3b12d 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -22,7 +22,7 @@ dependencies = [
 dynamic = ["version"]
 
 [project.urls]
-Homepage = "https://github.com/PROJECT_PATH"
+Source = "https://github.com/PROJECT_PATH"
 
 [project.optional-dependencies]
 test = [

From 9fcd7fa5755610ed6807e4bf3a9be63a54cf793b Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Mon, 24 Jun 2024 13:34:30 -0400
Subject: [PATCH 0744/1761] Enforce return types for typed public functions

---
 newsfragments/4409.feature.rst     |   3 +
 pkg_resources/__init__.py          | 146 +++++++++++++++++------------
 pkg_resources/extern/__init__.py   |   8 +-
 pkg_resources/py.typed             |   0
 ruff.toml                          |  12 +++
 setuptools/command/bdist_wheel.py  |   4 +-
 setuptools/command/install_lib.py  |   7 +-
 setuptools/config/expand.py        |   3 +-
 setuptools/config/pyprojecttoml.py |   4 +-
 setuptools/config/setupcfg.py      |   5 +-
 setuptools/warnings.py             |   4 +-
 11 files changed, 120 insertions(+), 76 deletions(-)
 create mode 100644 newsfragments/4409.feature.rst
 create mode 100644 pkg_resources/py.typed

diff --git a/newsfragments/4409.feature.rst b/newsfragments/4409.feature.rst
new file mode 100644
index 0000000000..9dd157092c
--- /dev/null
+++ b/newsfragments/4409.feature.rst
@@ -0,0 +1,3 @@
+Added return types to typed public functions -- by :user:`Avasam`
+
+Marked `pkg_resources` as ``py.typed`` -- by :user:`Avasam`
diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index c4ace5aa77..dbcce2671f 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -34,6 +34,7 @@
 import types
 from typing import (
     Any,
+    BinaryIO,
     Literal,
     Dict,
     Iterator,
@@ -99,7 +100,7 @@
 from pkg_resources.extern.platformdirs import user_cache_dir as _user_cache_dir
 
 if TYPE_CHECKING:
-    from _typeshed import BytesPath, StrPath, StrOrBytesPath
+    from _typeshed import StrPath, BytesPath, StrOrBytesPath
     from typing_extensions import Self
 
 warnings.warn(
@@ -109,7 +110,6 @@
     stacklevel=2,
 )
 
-
 _T = TypeVar("_T")
 _DistributionT = TypeVar("_DistributionT", bound="Distribution")
 # Type aliases
@@ -153,7 +153,6 @@ class PEP440Warning(RuntimeWarning):
 
 parse_version = _packaging_version.Version
 
-
 _state_vars: dict[str, str] = {}
 
 
@@ -335,7 +334,9 @@ def req(self) -> Requirement:
     def report(self):
         return self._template.format(**locals())
 
-    def with_context(self, required_by: set[Distribution | str]):
+    def with_context(
+        self, required_by: set[Distribution | str]
+    ) -> Self | ContextualVersionConflict:
         """
         If required_by is non-empty, return a version of self that is a
         ContextualVersionConflict.
@@ -404,7 +405,7 @@ class UnknownExtra(ResolutionError):
 
 def register_loader_type(
     loader_type: type[_ModuleLike], provider_factory: _ProviderFactoryType
-):
+) -> None:
     """Register `provider_factory` to make providers for `loader_type`
 
     `loader_type` is the type or class of a PEP 302 ``module.__loader__``,
@@ -480,7 +481,7 @@ def get_build_platform():
 get_platform = get_build_platform
 
 
-def compatible_platforms(provided: str | None, required: str | None):
+def compatible_platforms(provided: str | None, required: str | None) -> bool:
     """Can code for the `provided` platform run on the `required` platform?
 
     Returns true if either platform is ``None``, or the platforms are equal.
@@ -561,7 +562,7 @@ def get_entry_map(dist: _EPDistType, group: str | None = None):
     return get_distribution(dist).get_entry_map(group)
 
 
-def get_entry_info(dist: _EPDistType, group: str, name: str):
+def get_entry_info(dist: _EPDistType, group: str, name: str) -> EntryPoint | None:
     """Return the EntryPoint object for `group`+`name`, or ``None``"""
     return get_distribution(dist).get_entry_info(group, name)
 
@@ -682,7 +683,7 @@ def _build_from_requirements(cls, req_spec):
         sys.path[:] = ws.entries
         return ws
 
-    def add_entry(self, entry: str):
+    def add_entry(self, entry: str) -> None:
         """Add a path item to ``.entries``, finding any distributions on it
 
         ``find_distributions(entry, True)`` is used to find distributions
@@ -725,7 +726,9 @@ def find(self, req: Requirement) -> Distribution | None:
             raise VersionConflict(dist, req)
         return dist
 
-    def iter_entry_points(self, group: str, name: str | None = None):
+    def iter_entry_points(
+        self, group: str, name: str | None = None
+    ) -> Iterator[EntryPoint]:
         """Yield entry point objects from `group` matching `name`
 
         If `name` is None, yields all entry points in `group` from all
@@ -739,7 +742,7 @@ def iter_entry_points(self, group: str, name: str | None = None):
             if name is None or name == entry.name
         )
 
-    def run_script(self, requires: str, script_name: str):
+    def run_script(self, requires: str, script_name: str) -> None:
         """Locate distribution for `requires` and run `script_name` script"""
         ns = sys._getframe(1).f_globals
         name = ns['__name__']
@@ -770,7 +773,7 @@ def add(
         entry: str | None = None,
         insert: bool = True,
         replace: bool = False,
-    ):
+    ) -> None:
         """Add `dist` to working set, associated with `entry`
 
         If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
@@ -1050,7 +1053,7 @@ def find_plugins(
 
         return sorted_distributions, error_info
 
-    def require(self, *requirements: _NestedStr):
+    def require(self, *requirements: _NestedStr) -> list[Distribution]:
         """Ensure that distributions matching `requirements` are activated
 
         `requirements` must be a string or a (possibly-nested) sequence
@@ -1068,7 +1071,7 @@ def require(self, *requirements: _NestedStr):
 
     def subscribe(
         self, callback: Callable[[Distribution], object], existing: bool = True
-    ):
+    ) -> None:
         """Invoke `callback` for all distributions
 
         If `existing=True` (default),
@@ -1154,7 +1157,7 @@ def __init__(
         self.python = python
         self.scan(search_path)
 
-    def can_add(self, dist: Distribution):
+    def can_add(self, dist: Distribution) -> bool:
         """Is distribution `dist` acceptable for this environment?
 
         The distribution must match the platform and python version
@@ -1168,11 +1171,11 @@ def can_add(self, dist: Distribution):
         )
         return py_compat and compatible_platforms(dist.platform, self.platform)
 
-    def remove(self, dist: Distribution):
+    def remove(self, dist: Distribution) -> None:
         """Remove `dist` from the environment"""
         self._distmap[dist.key].remove(dist)
 
-    def scan(self, search_path: Iterable[str] | None = None):
+    def scan(self, search_path: Iterable[str] | None = None) -> None:
         """Scan `search_path` for distributions usable in this environment
 
         Any distributions found are added to the environment.
@@ -1198,7 +1201,7 @@ def __getitem__(self, project_name: str) -> list[Distribution]:
         distribution_key = project_name.lower()
         return self._distmap.get(distribution_key, [])
 
-    def add(self, dist: Distribution):
+    def add(self, dist: Distribution) -> None:
         """Add `dist` if we ``can_add()`` it and it has not already been added"""
         if self.can_add(dist) and dist.has_version():
             dists = self._distmap.setdefault(dist.key, [])
@@ -1349,23 +1352,29 @@ class ResourceManager:
     def __init__(self):
         self.cached_files = {}
 
-    def resource_exists(self, package_or_requirement: _PkgReqType, resource_name: str):
+    def resource_exists(
+        self, package_or_requirement: _PkgReqType, resource_name: str
+    ) -> bool:
         """Does the named resource exist?"""
         return get_provider(package_or_requirement).has_resource(resource_name)
 
-    def resource_isdir(self, package_or_requirement: _PkgReqType, resource_name: str):
+    def resource_isdir(
+        self, package_or_requirement: _PkgReqType, resource_name: str
+    ) -> bool:
         """Is the named resource an existing directory?"""
         return get_provider(package_or_requirement).resource_isdir(resource_name)
 
     def resource_filename(
         self, package_or_requirement: _PkgReqType, resource_name: str
-    ):
+    ) -> str:
         """Return a true filesystem path for specified resource"""
         return get_provider(package_or_requirement).get_resource_filename(
             self, resource_name
         )
 
-    def resource_stream(self, package_or_requirement: _PkgReqType, resource_name: str):
+    def resource_stream(
+        self, package_or_requirement: _PkgReqType, resource_name: str
+    ) -> _ResourceStream:
         """Return a readable file-like object for specified resource"""
         return get_provider(package_or_requirement).get_resource_stream(
             self, resource_name
@@ -1379,7 +1388,9 @@ def resource_string(
             self, resource_name
         )
 
-    def resource_listdir(self, package_or_requirement: _PkgReqType, resource_name: str):
+    def resource_listdir(
+        self, package_or_requirement: _PkgReqType, resource_name: str
+    ) -> list[str]:
         """List the contents of the named resource directory"""
         return get_provider(package_or_requirement).resource_listdir(resource_name)
 
@@ -1413,7 +1424,7 @@ def extraction_error(self) -> NoReturn:
         err.original_error = old_exc
         raise err
 
-    def get_cache_path(self, archive_name: str, names: Iterable[StrPath] = ()):
+    def get_cache_path(self, archive_name: str, names: Iterable[StrPath] = ()) -> str:
         """Return absolute location in cache for `archive_name` and `names`
 
         The parent directory of the resulting path will be created if it does
@@ -1465,7 +1476,7 @@ def _warn_unsafe_extraction_path(path):
             ).format(**locals())
             warnings.warn(msg, UserWarning)
 
-    def postprocess(self, tempname: StrOrBytesPath, filename: StrOrBytesPath):
+    def postprocess(self, tempname: StrOrBytesPath, filename: StrOrBytesPath) -> None:
         """Perform any platform-specific postprocessing of `tempname`
 
         This is where Mac header rewrites should be done; other platforms don't
@@ -1485,7 +1496,7 @@ def postprocess(self, tempname: StrOrBytesPath, filename: StrOrBytesPath):
             mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777
             os.chmod(tempname, mode)
 
-    def set_extraction_path(self, path: str):
+    def set_extraction_path(self, path: str) -> None:
         """Set the base path where resources will be extracted to, if needed.
 
         If you do not call this routine before any extractions take place, the
@@ -1533,7 +1544,7 @@ def get_default_cache() -> str:
     return os.environ.get('PYTHON_EGG_CACHE') or _user_cache_dir(appname='Python-Eggs')
 
 
-def safe_name(name: str):
+def safe_name(name: str) -> str:
     """Convert an arbitrary string to a standard distribution name
 
     Any runs of non-alphanumeric/. characters are replaced with a single '-'.
@@ -1541,7 +1552,7 @@ def safe_name(name: str):
     return re.sub('[^A-Za-z0-9.]+', '-', name)
 
 
-def safe_version(version: str):
+def safe_version(version: str) -> str:
     """
     Convert an arbitrary string to a standard version string
     """
@@ -1585,7 +1596,7 @@ def _safe_segment(segment):
     return re.sub(r'\.[^A-Za-z0-9]+', '.', segment).strip(".-")
 
 
-def safe_extra(extra: str):
+def safe_extra(extra: str) -> str:
     """Convert an arbitrary string to a standard 'extra' name
 
     Any runs of non-alphanumeric characters are replaced with a single '_',
@@ -1594,7 +1605,7 @@ def safe_extra(extra: str):
     return re.sub('[^A-Za-z0-9.-]+', '_', extra).lower()
 
 
-def to_filename(name: str):
+def to_filename(name: str) -> str:
     """Convert a project or version name to its filename-escaped form
 
     Any '-' characters are currently replaced with '_'.
@@ -1602,7 +1613,7 @@ def to_filename(name: str):
     return name.replace('-', '_')
 
 
-def invalid_marker(text: str):
+def invalid_marker(text: str) -> SyntaxError | Literal[False]:
     """
     Validate text as a PEP 508 environment marker; return an exception
     if invalid or False otherwise.
@@ -1642,10 +1653,14 @@ def __init__(self, module: _ModuleLike):
         self.loader = getattr(module, '__loader__', None)
         self.module_path = os.path.dirname(getattr(module, '__file__', ''))
 
-    def get_resource_filename(self, manager: ResourceManager, resource_name: str):
+    def get_resource_filename(
+        self, manager: ResourceManager, resource_name: str
+    ) -> str:
         return self._fn(self.module_path, resource_name)
 
-    def get_resource_stream(self, manager: ResourceManager, resource_name: str):
+    def get_resource_stream(
+        self, manager: ResourceManager, resource_name: str
+    ) -> BinaryIO:
         return io.BytesIO(self.get_resource_string(manager, resource_name))
 
     def get_resource_string(
@@ -1653,7 +1668,7 @@ def get_resource_string(
     ) -> bytes:
         return self._get(self._fn(self.module_path, resource_name))
 
-    def has_resource(self, resource_name: str):
+    def has_resource(self, resource_name: str) -> bool:
         return self._has(self._fn(self.module_path, resource_name))
 
     def _get_metadata_path(self, name):
@@ -1666,7 +1681,7 @@ def has_metadata(self, name: str) -> bool:
         path = self._get_metadata_path(name)
         return self._has(path)
 
-    def get_metadata(self, name: str):
+    def get_metadata(self, name: str) -> str:
         if not self.egg_info:
             return ""
         path = self._get_metadata_path(name)
@@ -1682,13 +1697,13 @@ def get_metadata(self, name: str):
     def get_metadata_lines(self, name: str) -> Iterator[str]:
         return yield_lines(self.get_metadata(name))
 
-    def resource_isdir(self, resource_name: str):
+    def resource_isdir(self, resource_name: str) -> bool:
         return self._isdir(self._fn(self.module_path, resource_name))
 
     def metadata_isdir(self, name: str) -> bool:
         return bool(self.egg_info and self._isdir(self._fn(self.egg_info, name)))
 
-    def resource_listdir(self, resource_name: str):
+    def resource_listdir(self, resource_name: str) -> list[str]:
         return self._listdir(self._fn(self.module_path, resource_name))
 
     def metadata_listdir(self, name: str) -> list[str]:
@@ -1696,7 +1711,7 @@ def metadata_listdir(self, name: str) -> list[str]:
             return self._listdir(self._fn(self.egg_info, name))
         return []
 
-    def run_script(self, script_name: str, namespace: dict[str, Any]):
+    def run_script(self, script_name: str, namespace: dict[str, Any]) -> None:
         script = 'scripts/' + script_name
         if not self.has_metadata(script):
             raise ResolutionError(
@@ -1880,7 +1895,9 @@ def _isdir(self, path) -> bool:
     def _listdir(self, path):
         return os.listdir(path)
 
-    def get_resource_stream(self, manager: object, resource_name: str):
+    def get_resource_stream(
+        self, manager: object, resource_name: str
+    ) -> io.BufferedReader:
         return open(self._fn(self.module_path, resource_name), 'rb')
 
     def _get(self, path) -> bytes:
@@ -1929,7 +1946,7 @@ class ZipManifests(Dict[str, "MemoizedZipManifests.manifest_mod"]):
 
     # `path` could be `StrPath | IO[bytes]` but that violates the LSP for `MemoizedZipManifests.load`
     @classmethod
-    def build(cls, path: str):
+    def build(cls, path: str) -> dict[str, zipfile.ZipInfo]:
         """
         Build a dictionary similar to the zipimport directory
         caches, except instead of tuples, store ZipInfo objects.
@@ -2007,7 +2024,9 @@ def _parts(self, zip_path):
     def zipinfo(self):
         return self._zip_manifests.load(self.loader.archive)
 
-    def get_resource_filename(self, manager: ResourceManager, resource_name: str):
+    def get_resource_filename(
+        self, manager: ResourceManager, resource_name: str
+    ) -> str:
         if not self.egg_name:
             raise NotImplementedError(
                 "resource_filename() only supported for .egg, not .zip"
@@ -2167,7 +2186,7 @@ def _get_metadata_path(self, name):
     def has_metadata(self, name: str) -> bool:
         return name == 'PKG-INFO' and os.path.isfile(self.path)
 
-    def get_metadata(self, name: str):
+    def get_metadata(self, name: str) -> str:
         if name != 'PKG-INFO':
             raise KeyError("No metadata except PKG-INFO is available")
 
@@ -2232,7 +2251,9 @@ def __init__(self, importer: zipimport.zipimporter):
 )
 
 
-def register_finder(importer_type: type[_T], distribution_finder: _DistFinderType[_T]):
+def register_finder(
+    importer_type: type[_T], distribution_finder: _DistFinderType[_T]
+) -> None:
     """Register `distribution_finder` to find distributions in sys.path items
 
     `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
@@ -2242,7 +2263,7 @@ def register_finder(importer_type: type[_T], distribution_finder: _DistFinderTyp
     _distribution_finders[importer_type] = distribution_finder
 
 
-def find_distributions(path_item: str, only: bool = False):
+def find_distributions(path_item: str, only: bool = False) -> Iterable[Distribution]:
     """Yield distributions accessible via `path_item`"""
     importer = get_importer(path_item)
     finder = _find_adapter(_distribution_finders, importer)
@@ -2416,7 +2437,7 @@ def resolve_egg_link(path):
 
 def register_namespace_handler(
     importer_type: type[_T], namespace_handler: _NSHandlerType[_T]
-):
+) -> None:
     """Register `namespace_handler` to declare namespace packages
 
     `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
@@ -2505,7 +2526,7 @@ def position_in_sys_path(path):
         module.__path__ = new_path
 
 
-def declare_namespace(packageName: str):
+def declare_namespace(packageName: str) -> None:
     """Declare that package 'packageName' is a namespace package"""
 
     msg = (
@@ -2548,7 +2569,7 @@ def declare_namespace(packageName: str):
         _imp.release_lock()
 
 
-def fixup_namespace_packages(path_item: str, parent: str | None = None):
+def fixup_namespace_packages(path_item: str, parent: str | None = None) -> None:
     """Ensure that previously-declared namespace packages include path_item"""
     _imp.acquire_lock()
     try:
@@ -2759,7 +2780,7 @@ def require(
         self,
         env: Environment | None = None,
         installer: _InstallerType | None = None,
-    ):
+    ) -> None:
         if not self.dist:
             error_cls = UnknownExtra if self.extras else AttributeError
             raise error_cls("Can't require() without a distribution", self)
@@ -2783,7 +2804,7 @@ def require(
     )
 
     @classmethod
-    def parse(cls, src: str, dist: Distribution | None = None):
+    def parse(cls, src: str, dist: Distribution | None = None) -> Self:
         """Parse a single entry point from string `src`
 
         Entry point syntax follows the form::
@@ -2817,7 +2838,7 @@ def parse_group(
         group: str,
         lines: _NestedStr,
         dist: Distribution | None = None,
-    ):
+    ) -> dict[str, Self]:
         """Parse an entry point group"""
         if not MODULE(group):
             raise ValueError("Invalid group name", group)
@@ -2834,7 +2855,7 @@ def parse_map(
         cls,
         data: str | Iterable[str] | dict[str, str | Iterable[str]],
         dist: Distribution | None = None,
-    ):
+    ) -> dict[str, dict[str, Self]]:
         """Parse a map of entry point groups"""
         _data: Iterable[tuple[str | None, str | Iterable[str]]]
         if isinstance(data, dict):
@@ -3039,7 +3060,9 @@ def _dep_map(self):
         return self.__dep_map
 
     @staticmethod
-    def _filter_extras(dm: dict[str | None, list[Requirement]]):
+    def _filter_extras(
+        dm: dict[str | None, list[Requirement]],
+    ) -> dict[str | None, list[Requirement]]:
         """
         Given a mapping of extras to dependencies, strip off
         environment markers and filter out any dependencies
@@ -3066,7 +3089,7 @@ def _build_dep_map(self):
                 dm.setdefault(extra, []).extend(parse_requirements(reqs))
         return dm
 
-    def requires(self, extras: Iterable[str] = ()):
+    def requires(self, extras: Iterable[str] = ()) -> list[Requirement]:
         """List of Requirements needed for this distro if `extras` are used"""
         dm = self._dep_map
         deps: list[Requirement] = []
@@ -3105,7 +3128,7 @@ def _get_version(self):
         lines = self._get_metadata(self.PKG_INFO)
         return _version_from_file(lines)
 
-    def activate(self, path: list[str] | None = None, replace: bool = False):
+    def activate(self, path: list[str] | None = None, replace: bool = False) -> None:
         """Ensure distribution is importable on `path` (default=sys.path)"""
         if path is None:
             path = sys.path
@@ -3160,7 +3183,7 @@ def from_filename(
         filename: StrPath,
         metadata: _MetadataType = None,
         **kw: int,  # We could set `precedence` explicitly, but keeping this as `**kw` for full backwards and subclassing compatibility
-    ):
+    ) -> Distribution:
         return cls.from_location(
             _normalize_cached(filename), os.path.basename(filename), metadata, **kw
         )
@@ -3195,7 +3218,7 @@ def get_entry_map(self, group: str | None = None):
             return self._ep_map.get(group, {})
         return self._ep_map
 
-    def get_entry_info(self, group: str, name: str):
+    def get_entry_info(self, group: str, name: str) -> EntryPoint | None:
         """Return the EntryPoint object for `group`+`name`, or ``None``"""
         return self.get_entry_map(group).get(name)
 
@@ -3205,7 +3228,7 @@ def insert_on(  # noqa: C901
         path: list[str],
         loc=None,
         replace: bool = False,
-    ):
+    ) -> None:
         """Ensure self.location is on path
 
         If replace=False (default):
@@ -3310,7 +3333,7 @@ def has_version(self):
             return False
         return True
 
-    def clone(self, **kw: str | int | IResourceProvider | None):
+    def clone(self, **kw: str | int | IResourceProvider | None) -> Self:
         """Copy this distribution, substituting in any changed keyword args"""
         names = 'project_name version py_version platform location precedence'
         for attr in names.split():
@@ -3416,7 +3439,7 @@ def issue_warning(*args, **kw):
     warnings.warn(stacklevel=level + 1, *args, **kw)
 
 
-def parse_requirements(strs: _NestedStr):
+def parse_requirements(strs: _NestedStr) -> map[Requirement]:
     """
     Yield ``Requirement`` objects for each specification in `strs`.
 
@@ -3438,7 +3461,7 @@ def __init__(self, requirement_string: str):
         self.project_name, self.key = project_name, project_name.lower()
         self.specs = [(spec.operator, spec.version) for spec in self.specifier]
         # packaging.requirements.Requirement uses a set for its extras. We use a variable-length tuple
-        self.extras: tuple[str] = tuple(map(safe_extra, self.extras))
+        self.extras: tuple[str, ...] = tuple(map(safe_extra, self.extras))
         self.hashCmp = (
             self.key,
             self.url,
@@ -3473,7 +3496,7 @@ def __repr__(self):
         return "Requirement.parse(%r)" % str(self)
 
     @staticmethod
-    def parse(s: str | Iterable[str]):
+    def parse(s: str | Iterable[str]) -> Requirement:
         (req,) = parse_requirements(s)
         return req
 
@@ -3499,7 +3522,7 @@ def _find_adapter(registry: Mapping[type, _AdapterT], ob: object) -> _AdapterT:
     raise TypeError(f"Could not find adapter for {registry} and {ob}")
 
 
-def ensure_directory(path: StrOrBytesPath):
+def ensure_directory(path: StrOrBytesPath) -> None:
     """Ensure that the parent directory of `path` exists"""
     dirname = os.path.dirname(path)
     os.makedirs(dirname, exist_ok=True)
@@ -3575,6 +3598,7 @@ class PkgResourcesDeprecationWarning(Warning):
 _LOCALE_ENCODING = "locale" if sys.version_info >= (3, 10) else None
 
 
+# This must go before calls to `_call_aside`. See https://github.com/pypa/setuptools/pull/4422
 def _read_utf8_with_fallback(file: str, fallback_encoding=_LOCALE_ENCODING) -> str:
     """See setuptools.unicode_utils._read_utf8_with_fallback"""
     try:
diff --git a/pkg_resources/extern/__init__.py b/pkg_resources/extern/__init__.py
index 9b9ac10aa9..97b8a1f9a7 100644
--- a/pkg_resources/extern/__init__.py
+++ b/pkg_resources/extern/__init__.py
@@ -35,7 +35,7 @@ def _module_matches_namespace(self, fullname):
         root, base, target = fullname.partition(self.root_name + '.')
         return not root and any(map(target.startswith, self.vendored_names))
 
-    def load_module(self, fullname: str):
+    def load_module(self, fullname: str) -> ModuleType:
         """
         Iterate over the search path to locate and load fullname.
         """
@@ -57,10 +57,10 @@ def load_module(self, fullname: str):
                 "distribution.".format(**locals())
             )
 
-    def create_module(self, spec: ModuleSpec):
+    def create_module(self, spec: ModuleSpec) -> ModuleType:
         return self.load_module(spec.name)
 
-    def exec_module(self, module: ModuleType):
+    def exec_module(self, module: ModuleType) -> None:
         pass
 
     def find_spec(
@@ -68,7 +68,7 @@ def find_spec(
         fullname: str,
         path: Sequence[str] | None = None,
         target: ModuleType | None = None,
-    ):
+    ) -> ModuleSpec | None:
         """Return a module spec for vendored names."""
         return (
             # This should fix itself next mypy release https://github.com/python/typeshed/pull/11890
diff --git a/pkg_resources/py.typed b/pkg_resources/py.typed
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/ruff.toml b/ruff.toml
index 8828fe61a5..51aa1f8467 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -10,6 +10,7 @@ extend-select = [
 	"W",
 
 	# local
+	"ANN2", # missing-return-type-*
 	"FA", # flake8-future-annotations
 	"F404", # late-future-import
 	"PYI", # flake8-pyi
@@ -22,6 +23,9 @@ ignore = [
 	"UP031", # temporarily disabled
 	"UP032", # temporarily disabled
 	"UP038", # Using `X | Y` in `isinstance` call is slower and more verbose https://github.com/astral-sh/ruff/issues/7871
+	# Only enforcing return type annotations for public functions
+	"ANN202", # missing-return-type-private-function
+	"ANN204", # missing-return-type-special-method
 
 	# https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules
 	"W191",
@@ -40,6 +44,14 @@ ignore = [
 	"ISC002",
 ]
 
+[lint.per-file-ignores]
+# Only enforcing return type annotations for public modules
+"**/tests/**" = ["ANN2"]
+"tools/**" = ["ANN2"]
+
+[lint.flake8-annotations]
+ignore-fully-untyped = true
+
 [format]
 # Enable preview to get hugged parenthesis unwrapping
 preview = true
diff --git a/setuptools/command/bdist_wheel.py b/setuptools/command/bdist_wheel.py
index a81187598a..97ce3f6a43 100644
--- a/setuptools/command/bdist_wheel.py
+++ b/setuptools/command/bdist_wheel.py
@@ -451,7 +451,7 @@ def run(self):
 
     def write_wheelfile(
         self, wheelfile_base: str, generator: str = f"setuptools ({__version__})"
-    ):
+    ) -> None:
         from email.message import Message
 
         msg = Message()
@@ -525,7 +525,7 @@ def license_paths(self) -> Iterable[str]:
 
         return files
 
-    def egg2dist(self, egginfo_path: str, distinfo_path: str):
+    def egg2dist(self, egginfo_path: str, distinfo_path: str) -> None:
         """Convert an .egg-info directory into a .dist-info directory"""
 
         def adios(p: str) -> None:
diff --git a/setuptools/command/install_lib.py b/setuptools/command/install_lib.py
index 5e74be247e..598752143d 100644
--- a/setuptools/command/install_lib.py
+++ b/setuptools/command/install_lib.py
@@ -1,3 +1,4 @@
+from __future__ import annotations
 import os
 import sys
 from itertools import product, starmap
@@ -92,7 +93,7 @@ def copy_tree(
         preserve_times=True,
         preserve_symlinks=False,
         level=1,
-    ):
+    ) -> list[str]:
         assert preserve_mode and preserve_times and not preserve_symlinks
         exclude = self.get_exclusions()
 
@@ -104,9 +105,9 @@ def copy_tree(
         from setuptools.archive_util import unpack_directory
         from distutils import log
 
-        outfiles = []
+        outfiles: list[str] = []
 
-        def pf(src, dst):
+        def pf(src: str, dst: str):
             if dst in exclude:
                 log.warn("Skipping installation of %s (namespace package)", dst)
                 return False
diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py
index 6ea6cf6d0e..de8a1e6e2b 100644
--- a/setuptools/config/expand.py
+++ b/setuptools/config/expand.py
@@ -31,6 +31,7 @@
 from itertools import chain
 from typing import (
     TYPE_CHECKING,
+    Any,
     Callable,
     Iterable,
     Iterator,
@@ -158,7 +159,7 @@ def read_attr(
     attr_desc: str,
     package_dir: Mapping[str, str] | None = None,
     root_dir: StrPath | None = None,
-):
+) -> Any:
     """Reads the value of an attribute from a module.
 
     This function will try to read the attributed statically first
diff --git a/setuptools/config/pyprojecttoml.py b/setuptools/config/pyprojecttoml.py
index c8dae5f751..3e8d47db8c 100644
--- a/setuptools/config/pyprojecttoml.py
+++ b/setuptools/config/pyprojecttoml.py
@@ -15,7 +15,7 @@
 import os
 from contextlib import contextmanager
 from functools import partial
-from typing import TYPE_CHECKING, Callable, Mapping
+from typing import TYPE_CHECKING, Any, Callable, Mapping
 
 from .._path import StrPath
 from ..errors import FileError, InvalidConfigError
@@ -76,7 +76,7 @@ def read_configuration(
     expand=True,
     ignore_option_errors=False,
     dist: Distribution | None = None,
-):
+) -> dict[str, Any]:
     """Read given configuration file and returns options from it as a dict.
 
     :param str|unicode filepath: Path to configuration file in the ``pyproject.toml``
diff --git a/setuptools/config/setupcfg.py b/setuptools/config/setupcfg.py
index 0a7a42eb09..7e51450634 100644
--- a/setuptools/config/setupcfg.py
+++ b/setuptools/config/setupcfg.py
@@ -24,6 +24,7 @@
     Dict,
     Generic,
     Iterable,
+    Iterator,
     Tuple,
     TypeVar,
     Union,
@@ -260,7 +261,9 @@ def __init__(
         """
 
     @classmethod
-    def _section_options(cls, options: AllCommandOptions):
+    def _section_options(
+        cls, options: AllCommandOptions
+    ) -> Iterator[tuple[str, SingleCommandOptions]]:
         for full_name, value in options.items():
             pre, sep, name = full_name.partition(cls.section_prefix)
             if pre:
diff --git a/setuptools/warnings.py b/setuptools/warnings.py
index 5d9cca6c37..8c94bc96e6 100644
--- a/setuptools/warnings.py
+++ b/setuptools/warnings.py
@@ -32,7 +32,7 @@ def emit(
         see_url: str | None = None,
         stacklevel: int = 2,
         **kwargs,
-    ):
+    ) -> None:
         """Private: reserved for ``setuptools`` internal use only"""
         # Default values:
         summary_ = summary or getattr(cls, "_SUMMARY", None) or ""
@@ -56,7 +56,7 @@ def _format(
         due_date: date | None = None,
         see_url: str | None = None,
         format_args: dict | None = None,
-    ):
+    ) -> str:
         """Private: reserved for ``setuptools`` internal use only"""
         today = date.today()
         summary = cleandoc(summary).format_map(format_args or {})

From 222ebf921a4a1cca69d6a5193121f3090580e502 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 25 Jun 2024 10:40:11 +0100
Subject: [PATCH 0745/1761] =?UTF-8?q?Bump=20version:=2070.1.0=20=E2=86=92?=
 =?UTF-8?q?=2070.1.1?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg            | 2 +-
 NEWS.rst                    | 9 +++++++++
 newsfragments/4429.misc.rst | 2 --
 pyproject.toml              | 2 +-
 4 files changed, 11 insertions(+), 4 deletions(-)
 delete mode 100644 newsfragments/4429.misc.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 4856098dff..a11458c8dd 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 70.1.0
+current_version = 70.1.1
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index 5c3f5b4319..a33251ec82 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,12 @@
+v70.1.1
+=======
+
+Misc
+----
+
+- #4429
+
+
 v70.1.0
 =======
 
diff --git a/newsfragments/4429.misc.rst b/newsfragments/4429.misc.rst
deleted file mode 100644
index 9c4c11ef89..0000000000
--- a/newsfragments/4429.misc.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Refactored ``test_bdist_wheel`` such that it creates examples in temporary
-folders and run the ``bdist_wheel`` command programmatically without subprocesses.
diff --git a/pyproject.toml b/pyproject.toml
index 712b4ee31f..2e1f5110e1 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "70.1.0"
+version = "70.1.1"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From c1ff48b553076dca8bcf955f7d7a0860aa2c04bf Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 25 Jun 2024 10:18:42 +0100
Subject: [PATCH 0746/1761] Add doctest to capture edge cases of PEP 625

---
 setuptools/_core_metadata.py | 18 ++++++++++++++++--
 1 file changed, 16 insertions(+), 2 deletions(-)

diff --git a/setuptools/_core_metadata.py b/setuptools/_core_metadata.py
index f1de9c9ba6..514280cf06 100644
--- a/setuptools/_core_metadata.py
+++ b/setuptools/_core_metadata.py
@@ -263,7 +263,21 @@ def _write_provides_extra(file, processed_extras, safe, unsafe):
 
 # from pypa/distutils#244; needed only until that logic is always available
 def get_fullname(self):
+    return _distribution_fullname(self.get_name(), self.get_version())
+
+
+def _distribution_fullname(name: str, version: str) -> str:
+    """
+    >>> _distribution_fullname('setup.tools', '1.0-2')
+    'setup_tools-1.0.post2'
+    >>> _distribution_fullname('setup-tools', '1.2post2')
+    'setup_tools-1.2.post2'
+    >>> _distribution_fullname('setup-tools', '1.0-r2')
+    'setup_tools-1.0.post2'
+    >>> _distribution_fullname('setup.tools', '1.0.post')
+    'setup_tools-1.0.post0'
+    """
     return "{}-{}".format(
-        canonicalize_name(self.get_name()).replace('-', '_'),
-        self.get_version(),
+        canonicalize_name(name).replace('-', '_'),
+        version,
     )

From 555f7db16aad8eaf121c464eae76df871c825fd1 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 25 Jun 2024 10:26:24 +0100
Subject: [PATCH 0747/1761] Use canonicalize_version to produce fullname

---
 setuptools/_core_metadata.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/setuptools/_core_metadata.py b/setuptools/_core_metadata.py
index 514280cf06..0fe59c36e5 100644
--- a/setuptools/_core_metadata.py
+++ b/setuptools/_core_metadata.py
@@ -18,7 +18,7 @@
 from . import _normalization, _reqs
 from .extern.packaging.markers import Marker
 from .extern.packaging.requirements import Requirement
-from .extern.packaging.utils import canonicalize_name
+from .extern.packaging.utils import canonicalize_name, canonicalize_version
 from .extern.packaging.version import Version
 from .warnings import SetuptoolsDeprecationWarning
 
@@ -279,5 +279,5 @@ def _distribution_fullname(name: str, version: str) -> str:
     """
     return "{}-{}".format(
         canonicalize_name(name).replace('-', '_'),
-        version,
+        canonicalize_version(version, strip_trailing_zero=False),
     )

From 0b2119a6c415ef9c0b433b7a9cfe7650a84d1236 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 25 Jun 2024 10:30:12 +0100
Subject: [PATCH 0748/1761] Add news fragment

---
 newsfragments/4434.bugfix.rst | 2 ++
 1 file changed, 2 insertions(+)
 create mode 100644 newsfragments/4434.bugfix.rst

diff --git a/newsfragments/4434.bugfix.rst b/newsfragments/4434.bugfix.rst
new file mode 100644
index 0000000000..5eeb674297
--- /dev/null
+++ b/newsfragments/4434.bugfix.rst
@@ -0,0 +1,2 @@
+Fix distribution name normalisation (:pep:`625`) for valid versions that are
+not canonical (e.g. ``1.0-2``).

From 63985e62cc07e52c2566a7c3a89fdccccd74f24f Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 25 Jun 2024 10:32:55 +0100
Subject: [PATCH 0749/1761] Add another test case for version

---
 setuptools/_core_metadata.py | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/setuptools/_core_metadata.py b/setuptools/_core_metadata.py
index 0fe59c36e5..45aae7d70b 100644
--- a/setuptools/_core_metadata.py
+++ b/setuptools/_core_metadata.py
@@ -276,6 +276,8 @@ def _distribution_fullname(name: str, version: str) -> str:
     'setup_tools-1.0.post2'
     >>> _distribution_fullname('setup.tools', '1.0.post')
     'setup_tools-1.0.post0'
+    >>> _distribution_fullname('setup.tools', '1.0+ubuntu-1')
+    'setup_tools-1.0+ubuntu.1'
     """
     return "{}-{}".format(
         canonicalize_name(name).replace('-', '_'),

From 74c20b6f3af53280d240f2f1c676178d16dd4cab Mon Sep 17 00:00:00 2001
From: Sviatoslav Sydorenko 
Date: Wed, 25 Oct 2023 23:46:01 +0200
Subject: [PATCH 0750/1761] Let codecov-action autodetect the coverage report

Ref: https://github.com/codecov/feedback/issues/84.
---
 .github/workflows/main.yml | 2 --
 1 file changed, 2 deletions(-)

diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index ec2e567a1e..b8bbc750cc 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -206,8 +206,6 @@ jobs:
         if: hashFiles('coverage.xml') != ''  # Rudimentary `file.exists()`
         uses: codecov/codecov-action@v4
         with:
-          files: >-
-            ${{ github.workspace }}\coverage.xml
           flags: >-  # Mark which lines are covered by which envs
             CI-GHA,
             ${{ github.job }},

From c45346633178a4e2b342bc2ef4efd316c801bdf4 Mon Sep 17 00:00:00 2001
From: Sviatoslav Sydorenko 
Date: Wed, 26 Jun 2024 20:07:10 +0200
Subject: [PATCH 0751/1761] =?UTF-8?q?=F0=9F=A7=AA=20Unignore=20errors=20in?=
 =?UTF-8?q?=20`coverage=20xml`=20@=20Cygwin?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .github/workflows/main.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index b8bbc750cc..a1b1d23727 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -200,7 +200,7 @@ jobs:
         if: hashFiles('.coverage') != ''  # Rudimentary `file.exists()`
         run: |
           python -m pip install coverage
-          python -m coverage xml --ignore-errors
+          python -m coverage xml
         shell: C:\cygwin\bin\env.exe CYGWIN_NOWINPATH=1 CHERE_INVOKING=1 C:\cygwin\bin\bash.exe -leo pipefail -o igncr {0}
       - name: Publish coverage
         if: hashFiles('coverage.xml') != ''  # Rudimentary `file.exists()`

From f0e8bb1309158bd40d1af8e27280c2686e59a8e9 Mon Sep 17 00:00:00 2001
From: Sviatoslav Sydorenko 
Date: Wed, 26 Jun 2024 20:23:36 +0200
Subject: [PATCH 0752/1761] =?UTF-8?q?Revert=20"=F0=9F=A7=AA=20Unignore=20e?=
 =?UTF-8?q?rrors=20in=20`coverage=20xml`=20@=20Cygwin"?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

This reverts commit c45346633178a4e2b342bc2ef4efd316c801bdf4.
---
 .github/workflows/main.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index a1b1d23727..b8bbc750cc 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -200,7 +200,7 @@ jobs:
         if: hashFiles('.coverage') != ''  # Rudimentary `file.exists()`
         run: |
           python -m pip install coverage
-          python -m coverage xml
+          python -m coverage xml --ignore-errors
         shell: C:\cygwin\bin\env.exe CYGWIN_NOWINPATH=1 CHERE_INVOKING=1 C:\cygwin\bin\bash.exe -leo pipefail -o igncr {0}
       - name: Publish coverage
         if: hashFiles('coverage.xml') != ''  # Rudimentary `file.exists()`

From d02b6b19375065e1875af5f874583df0343b8484 Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Fri, 28 Jun 2024 16:55:52 +0200
Subject: [PATCH 0753/1761] Use brackets for the default value of option
 arguments

The goal is to standardize the format of the help text printed
by commands. It is not easy to choose between brackets `[]` and
parentheses `()`. I went for the docopt style, which is the
closest to a standard I could find:
	http://docopt.org/

	[...] and whether that argument has a default value ([default: 10]).

This change has already been applied to distutils:
	https://github.com/pypa/distutils/pull/262
---
 setuptools/command/bdist_wheel.py | 12 ++++++------
 setuptools/command/dist_info.py   |  2 +-
 setuptools/command/egg_info.py    |  2 +-
 3 files changed, 8 insertions(+), 8 deletions(-)

diff --git a/setuptools/command/bdist_wheel.py b/setuptools/command/bdist_wheel.py
index a81187598a..d8cdd4e406 100644
--- a/setuptools/command/bdist_wheel.py
+++ b/setuptools/command/bdist_wheel.py
@@ -176,7 +176,7 @@ class bdist_wheel(Command):
             "plat-name=",
             "p",
             "platform name to embed in generated filenames "
-            f"(default: {get_platform(None)})",
+            f"[default: {get_platform(None)}]",
         ),
         (
             "keep-temp",
@@ -189,7 +189,7 @@ class bdist_wheel(Command):
         (
             "relative",
             None,
-            "build the archive using relative paths (default: false)",
+            "build the archive using relative paths [default: false]",
         ),
         (
             "owner=",
@@ -201,18 +201,18 @@ class bdist_wheel(Command):
             "g",
             "Group name used when creating a tar file [default: current group]",
         ),
-        ("universal", None, "make a universal wheel (default: false)"),
+        ("universal", None, "make a universal wheel [default: false]"),
         (
             "compression=",
             None,
-            "zipfile compression (one of: {}) (default: 'deflated')".format(
+            "zipfile compression (one of: {}) [default: 'deflated']".format(
                 ", ".join(supported_compressions)
             ),
         ),
         (
             "python-tag=",
             None,
-            f"Python implementation compatibility tag (default: '{python_tag()}')",
+            f"Python implementation compatibility tag [default: '{python_tag()}']",
         ),
         (
             "build-number=",
@@ -224,7 +224,7 @@ class bdist_wheel(Command):
         (
             "py-limited-api=",
             None,
-            "Python tag (cp32|cp33|cpNN) for abi3 wheel tag (default: false)",
+            "Python tag (cp32|cp33|cpNN) for abi3 wheel tag [default: false]",
         ),
     ]
 
diff --git a/setuptools/command/dist_info.py b/setuptools/command/dist_info.py
index 52c0721903..2adc1c46f3 100644
--- a/setuptools/command/dist_info.py
+++ b/setuptools/command/dist_info.py
@@ -28,7 +28,7 @@ class dist_info(Command):
             'output-dir=',
             'o',
             "directory inside of which the .dist-info will be"
-            "created (default: top of the source tree)",
+            "created [default: top of the source tree]",
         ),
         ('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"),
         ('tag-build=', 'b', "Specify explicit tag to add to version number"),
diff --git a/setuptools/command/egg_info.py b/setuptools/command/egg_info.py
index ccc2db8972..8f2d642794 100644
--- a/setuptools/command/egg_info.py
+++ b/setuptools/command/egg_info.py
@@ -172,7 +172,7 @@ class egg_info(InfoCommon, Command):
             'egg-base=',
             'e',
             "directory containing .egg-info directories"
-            " (default: top of the source tree)",
+            " [default: top of the source tree]",
         ),
         ('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"),
         ('tag-build=', 'b', "Specify explicit tag to add to version number"),

From ea3807beacf1d81a397d783be1d24ec3d1c3c1ed Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Fri, 28 Jun 2024 18:46:24 +0200
Subject: [PATCH 0754/1761] Enforce ruff/flake8-implicit-str-concat rule ISC001

ISC001 Implicitly concatenated string literals on one line
---
 pkg_resources/__init__.py                 |  2 +-
 pkg_resources/tests/test_pkg_resources.py |  4 +-
 pkg_resources/tests/test_resources.py     |  2 +-
 setuptools/command/egg_info.py            |  6 +--
 setuptools/command/sdist.py               |  2 +-
 setuptools/dist.py                        |  2 +-
 setuptools/namespaces.py                  |  2 +-
 setuptools/package_index.py               |  2 +-
 setuptools/tests/config/test_expand.py    |  2 +-
 setuptools/tests/config/test_setupcfg.py  | 54 +++++++++++------------
 setuptools/tests/test_egg_info.py         |  2 +-
 setuptools/tests/test_integration.py      |  2 +-
 12 files changed, 41 insertions(+), 41 deletions(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index c4ace5aa77..b722af2c2f 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -2041,7 +2041,7 @@ def _extract_resource(self, manager: ResourceManager, zip_path) -> str:  # noqa:
 
         if not WRITE_SUPPORT:
             raise OSError(
-                '"os.rename" and "os.unlink" are not supported ' 'on this platform'
+                '"os.rename" and "os.unlink" are not supported on this platform'
             )
         try:
             if not self.egg_name:
diff --git a/pkg_resources/tests/test_pkg_resources.py b/pkg_resources/tests/test_pkg_resources.py
index 17e1ff0c2f..4c4c68dfff 100644
--- a/pkg_resources/tests/test_pkg_resources.py
+++ b/pkg_resources/tests/test_pkg_resources.py
@@ -171,7 +171,7 @@ def test_setuptools_not_imported(self):
         lines = (
             'import pkg_resources',
             'import sys',
-            ('assert "setuptools" not in sys.modules, ' '"setuptools was imported"'),
+            ('assert "setuptools" not in sys.modules, "setuptools was imported"'),
         )
         cmd = [sys.executable, '-c', '; '.join(lines)]
         subprocess.check_call(cmd)
@@ -299,7 +299,7 @@ def test_distribution_version_missing_undetected_path():
 
     msg, dist = excinfo.value.args
     expected = (
-        "Missing 'Version:' header and/or PKG-INFO file at path: " '[could not detect]'
+        "Missing 'Version:' header and/or PKG-INFO file at path: [could not detect]"
     )
     assert msg == expected
 
diff --git a/pkg_resources/tests/test_resources.py b/pkg_resources/tests/test_resources.py
index 83199af7b8..826d691b83 100644
--- a/pkg_resources/tests/test_resources.py
+++ b/pkg_resources/tests/test_resources.py
@@ -257,7 +257,7 @@ def test_marker_evaluation_with_extras(self):
             "/foo_dir/Foo-1.2.dist-info",
             metadata=Metadata((
                 "METADATA",
-                "Provides-Extra: baz\n" "Requires-Dist: quux; extra=='baz'",
+                "Provides-Extra: baz\nRequires-Dist: quux; extra=='baz'",
             )),
         )
         ad.add(Foo)
diff --git a/setuptools/command/egg_info.py b/setuptools/command/egg_info.py
index ccc2db8972..26e8e5220d 100644
--- a/setuptools/command/egg_info.py
+++ b/setuptools/command/egg_info.py
@@ -363,16 +363,16 @@ def process_template_line(self, line):
         }
         log_map = {
             'include': "warning: no files found matching '%s'",
-            'exclude': ("warning: no previously-included files found " "matching '%s'"),
+            'exclude': ("warning: no previously-included files found matching '%s'"),
             'global-include': (
-                "warning: no files found matching '%s' " "anywhere in distribution"
+                "warning: no files found matching '%s' anywhere in distribution"
             ),
             'global-exclude': (
                 "warning: no previously-included files matching "
                 "'%s' found anywhere in distribution"
             ),
             'recursive-include': (
-                "warning: no files found matching '%s' " "under directory '%s'"
+                "warning: no files found matching '%s' under directory '%s'"
             ),
             'recursive-exclude': (
                 "warning: no previously-included files matching "
diff --git a/setuptools/command/sdist.py b/setuptools/command/sdist.py
index d455f44c5e..a834ba4a78 100644
--- a/setuptools/command/sdist.py
+++ b/setuptools/command/sdist.py
@@ -29,7 +29,7 @@ class sdist(orig.sdist):
         (
             'dist-dir=',
             'd',
-            "directory to put the source distribution archive(s) in " "[default: dist]",
+            "directory to put the source distribution archive(s) in [default: dist]",
         ),
         (
             'owner=',
diff --git a/setuptools/dist.py b/setuptools/dist.py
index 43762960ba..a60bd44445 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -159,7 +159,7 @@ def check_specifier(dist, attr, value):
         SpecifierSet(value)
     except (InvalidSpecifier, AttributeError) as error:
         tmpl = (
-            "{attr!r} must be a string " "containing valid version specifiers; {error}"
+            "{attr!r} must be a string containing valid version specifiers; {error}"
         )
         raise DistutilsSetupError(tmpl.format(attr=attr, error=error)) from error
 
diff --git a/setuptools/namespaces.py b/setuptools/namespaces.py
index 0185d55f94..2f2c1cfbe1 100644
--- a/setuptools/namespaces.py
+++ b/setuptools/namespaces.py
@@ -55,7 +55,7 @@ def _get_target(self):
             "importlib.machinery.PathFinder.find_spec(%(pkg)r, "
             "[os.path.dirname(p)])))"
         ),
-        ("m = m or " "sys.modules.setdefault(%(pkg)r, types.ModuleType(%(pkg)r))"),
+        ("m = m or sys.modules.setdefault(%(pkg)r, types.ModuleType(%(pkg)r))"),
         "mp = (m or []) and m.__dict__.setdefault('__path__',[])",
         "(p not in mp) and mp.append(p)",
     )
diff --git a/setuptools/package_index.py b/setuptools/package_index.py
index c91e419923..7a1190df99 100644
--- a/setuptools/package_index.py
+++ b/setuptools/package_index.py
@@ -1137,7 +1137,7 @@ def local_open(url):
             files.append('{name}'.format(name=f))
         else:
             tmpl = (
-                "{url}" "{files}"
+                "{url}{files}"
             )
             body = tmpl.format(url=url, files='\n'.join(files))
         status, message = 200, "OK"
diff --git a/setuptools/tests/config/test_expand.py b/setuptools/tests/config/test_expand.py
index f15d8fa7c2..b309a1ce7c 100644
--- a/setuptools/tests/config/test_expand.py
+++ b/setuptools/tests/config/test_expand.py
@@ -84,7 +84,7 @@ def test_read_attr(self, tmp_path, monkeypatch):
             "pkg/__init__.py": "",
             "pkg/sub/__init__.py": "VERSION = '0.1.1'",
             "pkg/sub/mod.py": (
-                "VALUES = {'a': 0, 'b': {42}, 'c': (0, 1, 1)}\n" "raise SystemExit(1)"
+                "VALUES = {'a': 0, 'b': {42}, 'c': (0, 1, 1)}\nraise SystemExit(1)"
             ),
         }
         write_files(files, tmp_path)
diff --git a/setuptools/tests/config/test_setupcfg.py b/setuptools/tests/config/test_setupcfg.py
index 706e2d0ebe..6bb40c32e1 100644
--- a/setuptools/tests/config/test_setupcfg.py
+++ b/setuptools/tests/config/test_setupcfg.py
@@ -35,7 +35,7 @@ def fake_env(
     tmpdir, setup_cfg, setup_py=None, encoding='ascii', package_path='fake_package'
 ):
     if setup_py is None:
-        setup_py = 'from setuptools import setup\n' 'setup()\n'
+        setup_py = 'from setuptools import setup\nsetup()\n'
 
     tmpdir.join('setup.py').write(setup_py)
     config = tmpdir.join('setup.cfg')
@@ -97,7 +97,7 @@ def test_no_config(self, tmpdir):
     def test_ignore_errors(self, tmpdir):
         _, config = fake_env(
             tmpdir,
-            '[metadata]\n' 'version = attr: none.VERSION\n' 'keywords = one, two\n',
+            '[metadata]\nversion = attr: none.VERSION\nkeywords = one, two\n',
         )
         with pytest.raises(ImportError):
             read_configuration('%s' % config)
@@ -171,7 +171,7 @@ def test_license_cfg(self, tmpdir):
     def test_file_mixed(self, tmpdir):
         fake_env(
             tmpdir,
-            '[metadata]\n' 'long_description = file: README.rst, CHANGES.rst\n' '\n',
+            '[metadata]\nlong_description = file: README.rst, CHANGES.rst\n\n',
         )
 
         tmpdir.join('README.rst').write('readme contents\nline2')
@@ -179,14 +179,14 @@ def test_file_mixed(self, tmpdir):
 
         with get_dist(tmpdir) as dist:
             assert dist.metadata.long_description == (
-                'readme contents\nline2\n' 'changelog contents\nand stuff'
+                'readme contents\nline2\nchangelog contents\nand stuff'
             )
 
     def test_file_sandboxed(self, tmpdir):
         tmpdir.ensure("README")
         project = tmpdir.join('depth1', 'depth2')
         project.ensure(dir=True)
-        fake_env(project, '[metadata]\n' 'long_description = file: ../../README\n')
+        fake_env(project, '[metadata]\nlong_description = file: ../../README\n')
 
         with get_dist(project, parse=False) as dist:
             with pytest.raises(DistutilsOptionError):
@@ -253,7 +253,7 @@ def test_dict(self, tmpdir):
 
     def test_version(self, tmpdir):
         package_dir, config = fake_env(
-            tmpdir, '[metadata]\n' 'version = attr: fake_package.VERSION\n'
+            tmpdir, '[metadata]\nversion = attr: fake_package.VERSION\n'
         )
 
         sub_a = package_dir.mkdir('subpkg_a')
@@ -263,35 +263,35 @@ def test_version(self, tmpdir):
         sub_b = package_dir.mkdir('subpkg_b')
         sub_b.join('__init__.py').write('')
         sub_b.join('mod.py').write(
-            'import third_party_module\n' 'VERSION = (2016, 11, 26)'
+            'import third_party_module\nVERSION = (2016, 11, 26)'
         )
 
         with get_dist(tmpdir) as dist:
             assert dist.metadata.version == '1.2.3'
 
-        config.write('[metadata]\n' 'version = attr: fake_package.get_version\n')
+        config.write('[metadata]\nversion = attr: fake_package.get_version\n')
         with get_dist(tmpdir) as dist:
             assert dist.metadata.version == '3.4.5.dev'
 
-        config.write('[metadata]\n' 'version = attr: fake_package.VERSION_MAJOR\n')
+        config.write('[metadata]\nversion = attr: fake_package.VERSION_MAJOR\n')
         with get_dist(tmpdir) as dist:
             assert dist.metadata.version == '1'
 
         config.write(
-            '[metadata]\n' 'version = attr: fake_package.subpkg_a.mod.VERSION\n'
+            '[metadata]\nversion = attr: fake_package.subpkg_a.mod.VERSION\n'
         )
         with get_dist(tmpdir) as dist:
             assert dist.metadata.version == '2016.11.26'
 
         config.write(
-            '[metadata]\n' 'version = attr: fake_package.subpkg_b.mod.VERSION\n'
+            '[metadata]\nversion = attr: fake_package.subpkg_b.mod.VERSION\n'
         )
         with get_dist(tmpdir) as dist:
             assert dist.metadata.version == '2016.11.26'
 
     def test_version_file(self, tmpdir):
         _, config = fake_env(
-            tmpdir, '[metadata]\n' 'version = file: fake_package/version.txt\n'
+            tmpdir, '[metadata]\nversion = file: fake_package/version.txt\n'
         )
         tmpdir.join('fake_package', 'version.txt').write('1.2.3\n')
 
@@ -346,12 +346,12 @@ def test_version_with_package_dir_complex(self, tmpdir):
             assert dist.metadata.version == '1.2.3'
 
     def test_unknown_meta_item(self, tmpdir):
-        fake_env(tmpdir, '[metadata]\n' 'name = fake_name\n' 'unknown = some\n')
+        fake_env(tmpdir, '[metadata]\nname = fake_name\nunknown = some\n')
         with get_dist(tmpdir, parse=False) as dist:
             dist.parse_config_files()  # Skip unknown.
 
     def test_usupported_section(self, tmpdir):
-        fake_env(tmpdir, '[metadata.some]\n' 'key = val\n')
+        fake_env(tmpdir, '[metadata.some]\nkey = val\n')
         with get_dist(tmpdir, parse=False) as dist:
             with pytest.raises(DistutilsOptionError):
                 dist.parse_config_files()
@@ -364,7 +364,7 @@ def test_classifiers(self, tmpdir):
         ])
 
         # From file.
-        _, config = fake_env(tmpdir, '[metadata]\n' 'classifiers = file: classifiers\n')
+        _, config = fake_env(tmpdir, '[metadata]\nclassifiers = file: classifiers\n')
 
         tmpdir.join('classifiers').write(
             'Framework :: Django\n'
@@ -387,25 +387,25 @@ def test_classifiers(self, tmpdir):
             assert set(dist.metadata.classifiers) == expected
 
     def test_interpolation(self, tmpdir):
-        fake_env(tmpdir, '[metadata]\n' 'description = %(message)s\n')
+        fake_env(tmpdir, '[metadata]\ndescription = %(message)s\n')
         with pytest.raises(configparser.InterpolationMissingOptionError):
             with get_dist(tmpdir):
                 pass
 
     def test_non_ascii_1(self, tmpdir):
-        fake_env(tmpdir, '[metadata]\n' 'description = éàïôñ\n', encoding='utf-8')
+        fake_env(tmpdir, '[metadata]\ndescription = éàïôñ\n', encoding='utf-8')
         with get_dist(tmpdir):
             pass
 
     def test_non_ascii_3(self, tmpdir):
-        fake_env(tmpdir, '\n' '# -*- coding: invalid\n')
+        fake_env(tmpdir, '\n# -*- coding: invalid\n')
         with get_dist(tmpdir):
             pass
 
     def test_non_ascii_4(self, tmpdir):
         fake_env(
             tmpdir,
-            '# -*- coding: utf-8\n' '[metadata]\n' 'description = éàïôñ\n',
+            '# -*- coding: utf-8\n[metadata]\ndescription = éàïôñ\n',
             encoding='utf-8',
         )
         with get_dist(tmpdir) as dist:
@@ -447,7 +447,7 @@ def test_make_option_lowercase(self, tmpdir):
         # remove this test and the method make_option_lowercase() in setuptools.dist
         # when no longer needed
         fake_env(
-            tmpdir, '[metadata]\n' 'Name = foo\n' 'description = Some description\n'
+            tmpdir, '[metadata]\nName = foo\ndescription = Some description\n'
         )
         msg = "Usage of uppercase key 'Name' in 'metadata' will not be supported"
         with pytest.warns(SetuptoolsDeprecationWarning, match=msg):
@@ -561,7 +561,7 @@ def test_multiline(self, tmpdir):
             assert dist.tests_require == ['mock==0.7.2', 'pytest']
 
     def test_package_dir_fail(self, tmpdir):
-        fake_env(tmpdir, '[options]\n' 'package_dir = a b\n')
+        fake_env(tmpdir, '[options]\npackage_dir = a b\n')
         with get_dist(tmpdir, parse=False) as dist:
             with pytest.raises(DistutilsOptionError):
                 dist.parse_config_files()
@@ -589,13 +589,13 @@ def test_package_data(self, tmpdir):
             }
 
     def test_packages(self, tmpdir):
-        fake_env(tmpdir, '[options]\n' 'packages = find:\n')
+        fake_env(tmpdir, '[options]\npackages = find:\n')
 
         with get_dist(tmpdir) as dist:
             assert dist.packages == ['fake_package']
 
     def test_find_directive(self, tmpdir):
-        dir_package, config = fake_env(tmpdir, '[options]\n' 'packages = find:\n')
+        dir_package, config = fake_env(tmpdir, '[options]\npackages = find:\n')
 
         dir_sub_one, _ = make_package_dir('sub_one', dir_package)
         dir_sub_two, _ = make_package_dir('sub_two', dir_package)
@@ -633,7 +633,7 @@ def test_find_directive(self, tmpdir):
 
     def test_find_namespace_directive(self, tmpdir):
         dir_package, config = fake_env(
-            tmpdir, '[options]\n' 'packages = find_namespace:\n'
+            tmpdir, '[options]\npackages = find_namespace:\n'
         )
 
         dir_sub_one, _ = make_package_dir('sub_one', dir_package)
@@ -754,7 +754,7 @@ def test_nowarn_accidental_env_marker_misconfig(self, config, tmpdir, recwarn):
         assert len(recwarn) == num_warnings
 
     def test_dash_preserved_extras_require(self, tmpdir):
-        fake_env(tmpdir, '[options.extras_require]\n' 'foo-a = foo\n' 'foo_b = test\n')
+        fake_env(tmpdir, '[options.extras_require]\nfoo-a = foo\nfoo_b = test\n')
 
         with get_dist(tmpdir) as dist:
             assert dist.extras_require == {'foo-a': ['foo'], 'foo_b': ['test']}
@@ -785,7 +785,7 @@ def test_entry_points(self, tmpdir):
         tmpdir.join('entry_points').write(expected)
 
         # From file.
-        config.write('[options]\n' 'entry_points = file: entry_points\n')
+        config.write('[options]\nentry_points = file: entry_points\n')
 
         with get_dist(tmpdir) as dist:
             assert dist.entry_points == expected
@@ -904,7 +904,7 @@ def test_cmdclass(self, tmpdir):
         module_path = Path(tmpdir, "src/custom_build.py")  # auto discovery for src
         module_path.parent.mkdir(parents=True, exist_ok=True)
         module_path.write_text(
-            "from distutils.core import Command\n" "class CustomCmd(Command): pass\n",
+            "from distutils.core import Command\nclass CustomCmd(Command): pass\n",
             encoding="utf-8",
         )
 
diff --git a/setuptools/tests/test_egg_info.py b/setuptools/tests/test_egg_info.py
index f2489896b3..8616f813c1 100644
--- a/setuptools/tests/test_egg_info.py
+++ b/setuptools/tests/test_egg_info.py
@@ -310,7 +310,7 @@ def parametrize(*test_list, **format_dict):
                         )
                     )
             return pytest.mark.parametrize(
-                'requires,use_setup_cfg,' 'expected_requires,install_cmd_kwargs',
+                'requires,use_setup_cfg,expected_requires,install_cmd_kwargs',
                 argvalues,
                 ids=idlist,
             )
diff --git a/setuptools/tests/test_integration.py b/setuptools/tests/test_integration.py
index 71f10d9a6e..78f459d018 100644
--- a/setuptools/tests/test_integration.py
+++ b/setuptools/tests/test_integration.py
@@ -16,7 +16,7 @@
 
 
 pytestmark = pytest.mark.skipif(
-    'platform.python_implementation() == "PyPy" and ' 'platform.system() == "Windows"',
+    'platform.python_implementation() == "PyPy" and platform.system() == "Windows"',
     reason="pypa/setuptools#2496",
 )
 

From c908af325c23559ef1e1201c75d0a774b192170a Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Fri, 28 Jun 2024 18:50:24 +0200
Subject: [PATCH 0755/1761] A round of `ruff format` after  `ruff check --fix`

---
 setuptools/dist.py                       |  4 +---
 setuptools/package_index.py              |  4 +---
 setuptools/tests/config/test_setupcfg.py | 12 +++---------
 3 files changed, 5 insertions(+), 15 deletions(-)

diff --git a/setuptools/dist.py b/setuptools/dist.py
index a60bd44445..32e8d43c64 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -158,9 +158,7 @@ def check_specifier(dist, attr, value):
     try:
         SpecifierSet(value)
     except (InvalidSpecifier, AttributeError) as error:
-        tmpl = (
-            "{attr!r} must be a string containing valid version specifiers; {error}"
-        )
+        tmpl = "{attr!r} must be a string containing valid version specifiers; {error}"
         raise DistutilsSetupError(tmpl.format(attr=attr, error=error)) from error
 
 
diff --git a/setuptools/package_index.py b/setuptools/package_index.py
index 7a1190df99..950059cab9 100644
--- a/setuptools/package_index.py
+++ b/setuptools/package_index.py
@@ -1136,9 +1136,7 @@ def local_open(url):
                 f += '/'
             files.append('{name}'.format(name=f))
         else:
-            tmpl = (
-                "{url}{files}"
-            )
+            tmpl = "{url}{files}"
             body = tmpl.format(url=url, files='\n'.join(files))
         status, message = 200, "OK"
     else:
diff --git a/setuptools/tests/config/test_setupcfg.py b/setuptools/tests/config/test_setupcfg.py
index 6bb40c32e1..bf9777c668 100644
--- a/setuptools/tests/config/test_setupcfg.py
+++ b/setuptools/tests/config/test_setupcfg.py
@@ -277,15 +277,11 @@ def test_version(self, tmpdir):
         with get_dist(tmpdir) as dist:
             assert dist.metadata.version == '1'
 
-        config.write(
-            '[metadata]\nversion = attr: fake_package.subpkg_a.mod.VERSION\n'
-        )
+        config.write('[metadata]\nversion = attr: fake_package.subpkg_a.mod.VERSION\n')
         with get_dist(tmpdir) as dist:
             assert dist.metadata.version == '2016.11.26'
 
-        config.write(
-            '[metadata]\nversion = attr: fake_package.subpkg_b.mod.VERSION\n'
-        )
+        config.write('[metadata]\nversion = attr: fake_package.subpkg_b.mod.VERSION\n')
         with get_dist(tmpdir) as dist:
             assert dist.metadata.version == '2016.11.26'
 
@@ -446,9 +442,7 @@ def test_warn_dash_deprecation(self, tmpdir):
     def test_make_option_lowercase(self, tmpdir):
         # remove this test and the method make_option_lowercase() in setuptools.dist
         # when no longer needed
-        fake_env(
-            tmpdir, '[metadata]\nName = foo\ndescription = Some description\n'
-        )
+        fake_env(tmpdir, '[metadata]\nName = foo\ndescription = Some description\n')
         msg = "Usage of uppercase key 'Name' in 'metadata' will not be supported"
         with pytest.warns(SetuptoolsDeprecationWarning, match=msg):
             with get_dist(tmpdir) as dist:

From e14a73a8e8c1dc225ca2a87ed72fa7157c4afb0d Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Fri, 28 Jun 2024 17:49:08 +0200
Subject: [PATCH 0756/1761] Enforce ruff/flake8-implicit-str-concat rule ISC003

ISC003 Explicitly concatenated string should be implicitly concatenated
---
 setuptools/command/bdist_egg.py | 2 +-
 setuptools/command/register.py  | 2 +-
 setuptools/command/upload.py    | 2 +-
 3 files changed, 3 insertions(+), 3 deletions(-)

diff --git a/setuptools/command/bdist_egg.py b/setuptools/command/bdist_egg.py
index 73476e0cec..6a2dac1115 100644
--- a/setuptools/command/bdist_egg.py
+++ b/setuptools/command/bdist_egg.py
@@ -74,7 +74,7 @@ class bdist_egg(Command):
             'keep-temp',
             'k',
             "keep the pseudo-installation tree around after "
-            + "creating the distribution archive",
+            "creating the distribution archive",
         ),
         ('dist-dir=', 'd', "directory to put final built distributions in"),
         ('skip-build', None, "skip rebuilding everything (for testing/debugging)"),
diff --git a/setuptools/command/register.py b/setuptools/command/register.py
index b8266b9a60..beee9782e7 100644
--- a/setuptools/command/register.py
+++ b/setuptools/command/register.py
@@ -10,7 +10,7 @@ class register(orig.register):
     def run(self):
         msg = (
             "The register command has been removed, use twine to upload "
-            + "instead (https://pypi.org/p/twine)"
+            "instead (https://pypi.org/p/twine)"
         )
 
         self.announce("ERROR: " + msg, log.ERROR)
diff --git a/setuptools/command/upload.py b/setuptools/command/upload.py
index ec7f81e227..1cca47cea9 100644
--- a/setuptools/command/upload.py
+++ b/setuptools/command/upload.py
@@ -10,7 +10,7 @@ class upload(orig.upload):
     def run(self):
         msg = (
             "The upload command has been removed, use twine to upload "
-            + "instead (https://pypi.org/p/twine)"
+            "instead (https://pypi.org/p/twine)"
         )
 
         self.announce("ERROR: " + msg, log.ERROR)

From 713ad96cec0ca9c83eb6095f689ad90b35364c7b Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 13 Apr 2024 13:47:03 -0400
Subject: [PATCH 0757/1761] Add 'consolidate_linker_args' wrapper to protect
 the old behavior for now.

Closes pypa/distutils#246.
---
 distutils/compat/__init__.py          | 15 +++++++++++++++
 distutils/compat/py38.py              | 23 +++++++++++++++++++++++
 distutils/tests/test_unixccompiler.py | 17 +++++++++--------
 distutils/unixccompiler.py            |  5 +++--
 4 files changed, 50 insertions(+), 10 deletions(-)
 create mode 100644 distutils/compat/__init__.py
 create mode 100644 distutils/compat/py38.py

diff --git a/distutils/compat/__init__.py b/distutils/compat/__init__.py
new file mode 100644
index 0000000000..b7be72678f
--- /dev/null
+++ b/distutils/compat/__init__.py
@@ -0,0 +1,15 @@
+from __future__ import annotations
+
+from .py38 import removeprefix
+
+
+def consolidate_linker_args(args: list[str]) -> str:
+    """
+    Ensure the return value is a string for backward compatibility.
+
+    Retain until at least 2024-10-31.
+    """
+
+    if not all(arg.startswith('-Wl,') for arg in args):
+        return args
+    return '-Wl,' + ','.join(removeprefix(arg, '-Wl,') for arg in args)
diff --git a/distutils/compat/py38.py b/distutils/compat/py38.py
new file mode 100644
index 0000000000..0af3814017
--- /dev/null
+++ b/distutils/compat/py38.py
@@ -0,0 +1,23 @@
+import sys
+
+if sys.version_info < (3, 9):
+
+    def removesuffix(self, suffix):
+        # suffix='' should not call self[:-0].
+        if suffix and self.endswith(suffix):
+            return self[: -len(suffix)]
+        else:
+            return self[:]
+
+    def removeprefix(self, prefix):
+        if self.startswith(prefix):
+            return self[len(prefix) :]
+        else:
+            return self[:]
+else:
+
+    def removesuffix(self, suffix):
+        return self.removesuffix(suffix)
+
+    def removeprefix(self, prefix):
+        return self.removeprefix(prefix)
diff --git a/distutils/tests/test_unixccompiler.py b/distutils/tests/test_unixccompiler.py
index f17edf2f6b..6f05fa6989 100644
--- a/distutils/tests/test_unixccompiler.py
+++ b/distutils/tests/test_unixccompiler.py
@@ -4,6 +4,7 @@
 import sys
 import unittest.mock as mock
 from distutils import sysconfig
+from distutils.compat import consolidate_linker_args
 from distutils.errors import DistutilsPlatformError
 from distutils.unixccompiler import UnixCCompiler
 from distutils.util import _clear_cached_macosx_ver
@@ -149,10 +150,10 @@ def gcv(v):
                 return 'yes'
 
         sysconfig.get_config_var = gcv
-        assert self.cc.rpath_foo() == [
+        assert self.cc.rpath_foo() == consolidate_linker_args([
             '-Wl,--enable-new-dtags',
             '-Wl,-rpath,/foo',
-        ]
+        ])
 
         def gcv(v):
             if v == 'CC':
@@ -161,10 +162,10 @@ def gcv(v):
                 return 'yes'
 
         sysconfig.get_config_var = gcv
-        assert self.cc.rpath_foo() == [
+        assert self.cc.rpath_foo() == consolidate_linker_args([
             '-Wl,--enable-new-dtags',
             '-Wl,-rpath,/foo',
-        ]
+        ])
 
         # GCC non-GNULD
         sys.platform = 'bar'
@@ -189,10 +190,10 @@ def gcv(v):
                 return 'yes'
 
         sysconfig.get_config_var = gcv
-        assert self.cc.rpath_foo() == [
+        assert self.cc.rpath_foo() == consolidate_linker_args([
             '-Wl,--enable-new-dtags',
             '-Wl,-rpath,/foo',
-        ]
+        ])
 
         # non-GCC GNULD
         sys.platform = 'bar'
@@ -204,10 +205,10 @@ def gcv(v):
                 return 'yes'
 
         sysconfig.get_config_var = gcv
-        assert self.cc.rpath_foo() == [
+        assert self.cc.rpath_foo() == consolidate_linker_args([
             '-Wl,--enable-new-dtags',
             '-Wl,-rpath,/foo',
-        ]
+        ])
 
         # non-GCC non-GNULD
         sys.platform = 'bar'
diff --git a/distutils/unixccompiler.py b/distutils/unixccompiler.py
index a54481c01b..0248bde87b 100644
--- a/distutils/unixccompiler.py
+++ b/distutils/unixccompiler.py
@@ -22,6 +22,7 @@
 import sys
 
 from . import sysconfig
+from .compat import consolidate_linker_args
 from ._log import log
 from ._macos_compat import compiler_fixup
 from ._modified import newer
@@ -315,11 +316,11 @@ def runtime_library_dir_option(self, dir: str) -> str | list[str]:
         # For all compilers, `-Wl` is the presumed way to pass a
         # compiler option to the linker
         if sysconfig.get_config_var("GNULD") == "yes":
-            return [
+            return consolidate_linker_args([
                 # Force RUNPATH instead of RPATH
                 "-Wl,--enable-new-dtags",
                 "-Wl,-rpath," + dir,
-            ]
+            ])
         else:
             return "-Wl,-R" + dir
 

From 71fb698b720073e697dc8d8ad3e7095fd7588042 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 13 Apr 2024 14:03:03 -0400
Subject: [PATCH 0758/1761] Exclude compat package from coverage.

---
 .coveragerc | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/.coveragerc b/.coveragerc
index 35b98b1df9..bcef31d957 100644
--- a/.coveragerc
+++ b/.coveragerc
@@ -2,6 +2,9 @@
 omit =
 	# leading `*/` for pytest-dev/pytest-cov#456
 	*/.tox/*
+
+	# local
+	*/compat/*
 disable_warnings =
 	couldnt-parse
 

From 9eaea6a12c2b6aeee0fd0a11a4a4da0e1eb89f3e Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 13 Apr 2024 13:33:48 -0400
Subject: [PATCH 0759/1761] Add type declaration for
 runtime_library_dir_option, making explicit the different return types one
 might expect.

---
 distutils/unixccompiler.py | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)

diff --git a/distutils/unixccompiler.py b/distutils/unixccompiler.py
index caf4cd338e..a54481c01b 100644
--- a/distutils/unixccompiler.py
+++ b/distutils/unixccompiler.py
@@ -13,6 +13,8 @@
   * link shared library handled by 'cc -shared'
 """
 
+from __future__ import annotations
+
 import itertools
 import os
 import re
@@ -281,7 +283,7 @@ def _is_gcc(self):
         compiler = os.path.basename(shlex.split(cc_var)[0])
         return "gcc" in compiler or "g++" in compiler
 
-    def runtime_library_dir_option(self, dir):
+    def runtime_library_dir_option(self, dir: str) -> str | list[str]:
         # XXX Hackish, at the very least.  See Python bug #445902:
         # https://bugs.python.org/issue445902
         # Linkers on different platforms need different options to

From e74608b03526415568e4ba035166b66167a90df3 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 13 Apr 2024 16:40:21 -0400
Subject: [PATCH 0760/1761] Extend the retention of the compatibility.

---
 distutils/compat/__init__.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/distutils/compat/__init__.py b/distutils/compat/__init__.py
index b7be72678f..b1ee3fe8b0 100644
--- a/distutils/compat/__init__.py
+++ b/distutils/compat/__init__.py
@@ -7,7 +7,7 @@ def consolidate_linker_args(args: list[str]) -> str:
     """
     Ensure the return value is a string for backward compatibility.
 
-    Retain until at least 2024-10-31.
+    Retain until at least 2024-04-31. See pypa/distutils#246
     """
 
     if not all(arg.startswith('-Wl,') for arg in args):

From d3165d04ec215e3e9dc2cf414ed02d3468a25f79 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 13 Apr 2024 20:37:49 -0400
Subject: [PATCH 0761/1761] =?UTF-8?q?=F0=9F=91=B9=20Feed=20the=20hobgoblin?=
 =?UTF-8?q?s=20(delint).?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 distutils/unixccompiler.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/distutils/unixccompiler.py b/distutils/unixccompiler.py
index 0248bde87b..da97688cbd 100644
--- a/distutils/unixccompiler.py
+++ b/distutils/unixccompiler.py
@@ -22,11 +22,11 @@
 import sys
 
 from . import sysconfig
-from .compat import consolidate_linker_args
 from ._log import log
 from ._macos_compat import compiler_fixup
 from ._modified import newer
 from .ccompiler import CCompiler, gen_lib_options, gen_preprocess_options
+from .compat import consolidate_linker_args
 from .errors import CompileError, DistutilsExecError, LibError, LinkError
 
 # XXX Things not currently handled:

From 1bf363bb3a8f17d99e6ae67d20e8dec801e3b984 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 13 Apr 2024 20:41:52 -0400
Subject: [PATCH 0762/1761] Move compatibility modules into compat package.

---
 distutils/_modified.py                      |  2 +-
 distutils/compat/py38.py                    | 10 ++++++++++
 distutils/{py39compat.py => compat/py39.py} |  0
 distutils/py38compat.py                     |  8 --------
 distutils/sysconfig.py                      |  4 ++--
 distutils/util.py                           |  2 +-
 6 files changed, 14 insertions(+), 12 deletions(-)
 rename distutils/{py39compat.py => compat/py39.py} (100%)
 delete mode 100644 distutils/py38compat.py

diff --git a/distutils/_modified.py b/distutils/_modified.py
index 78485dc25e..9b375181e8 100644
--- a/distutils/_modified.py
+++ b/distutils/_modified.py
@@ -5,7 +5,7 @@
 
 from ._functools import splat
 from .errors import DistutilsFileError
-from .py39compat import zip_strict
+from .compat.py39 import zip_strict
 
 
 def _newer(source, target):
diff --git a/distutils/compat/py38.py b/distutils/compat/py38.py
index 0af3814017..79afc3b295 100644
--- a/distutils/compat/py38.py
+++ b/distutils/compat/py38.py
@@ -21,3 +21,13 @@ def removesuffix(self, suffix):
 
     def removeprefix(self, prefix):
         return self.removeprefix(prefix)
+
+
+def aix_platform(osname, version, release):
+    try:
+        import _aix_support
+
+        return _aix_support.aix_platform()
+    except ImportError:
+        pass
+    return f"{osname}-{version}.{release}"
diff --git a/distutils/py39compat.py b/distutils/compat/py39.py
similarity index 100%
rename from distutils/py39compat.py
rename to distutils/compat/py39.py
diff --git a/distutils/py38compat.py b/distutils/py38compat.py
deleted file mode 100644
index ab12119fa5..0000000000
--- a/distutils/py38compat.py
+++ /dev/null
@@ -1,8 +0,0 @@
-def aix_platform(osname, version, release):
-    try:
-        import _aix_support
-
-        return _aix_support.aix_platform()
-    except ImportError:
-        pass
-    return f"{osname}-{version}.{release}"
diff --git a/distutils/sysconfig.py b/distutils/sysconfig.py
index 1a38e9fa79..514e06e34b 100644
--- a/distutils/sysconfig.py
+++ b/distutils/sysconfig.py
@@ -16,7 +16,7 @@
 import sys
 import sysconfig
 
-from . import py39compat
+from .compat import py39
 from ._functools import pass_none
 from .errors import DistutilsPlatformError
 
@@ -538,7 +538,7 @@ def get_config_vars(*args):
     global _config_vars
     if _config_vars is None:
         _config_vars = sysconfig.get_config_vars().copy()
-        py39compat.add_ext_suffix(_config_vars)
+        py39.add_ext_suffix(_config_vars)
 
     return [_config_vars.get(name) for name in args] if args else _config_vars
 
diff --git a/distutils/util.py b/distutils/util.py
index 9ee77721b3..2cdea14381 100644
--- a/distutils/util.py
+++ b/distutils/util.py
@@ -34,7 +34,7 @@ def get_host_platform():
         if os.name == "posix" and hasattr(os, 'uname'):
             osname, host, release, version, machine = os.uname()
             if osname[:3] == "aix":
-                from .py38compat import aix_platform
+                from .compat.py38 import aix_platform
 
                 return aix_platform(osname, version, release)
 

From 6baafbc13bab7f7ee2fa671f40d31d0d58a3e78d Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 13 Apr 2024 20:45:42 -0400
Subject: [PATCH 0763/1761] Move compatibility module into compat package.

---
 conftest.py                                       | 2 +-
 distutils/tests/compat/__init__.py                | 0
 distutils/tests/{py38compat.py => compat/py38.py} | 0
 distutils/tests/test_archive_util.py              | 2 +-
 distutils/tests/test_bdist_rpm.py                 | 2 +-
 distutils/tests/test_build_ext.py                 | 2 +-
 distutils/tests/test_extension.py                 | 2 +-
 distutils/tests/test_filelist.py                  | 2 +-
 distutils/tests/test_sdist.py                     | 2 +-
 distutils/tests/test_spawn.py                     | 2 +-
 distutils/tests/test_unixccompiler.py             | 2 +-
 11 files changed, 9 insertions(+), 9 deletions(-)
 create mode 100644 distutils/tests/compat/__init__.py
 rename distutils/tests/{py38compat.py => compat/py38.py} (100%)

diff --git a/conftest.py b/conftest.py
index 3ce3411535..4a3bbd3436 100644
--- a/conftest.py
+++ b/conftest.py
@@ -56,7 +56,7 @@ def _save_cwd():
 
 @pytest.fixture
 def distutils_managed_tempdir(request):
-    from distutils.tests import py38compat as os_helper
+    from distutils.tests.compat import py38 as os_helper
 
     self = request.instance
     self.tempdirs = []
diff --git a/distutils/tests/compat/__init__.py b/distutils/tests/compat/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/distutils/tests/py38compat.py b/distutils/tests/compat/py38.py
similarity index 100%
rename from distutils/tests/py38compat.py
rename to distutils/tests/compat/py38.py
diff --git a/distutils/tests/test_archive_util.py b/distutils/tests/test_archive_util.py
index 145cce915d..02af2aa0fd 100644
--- a/distutils/tests/test_archive_util.py
+++ b/distutils/tests/test_archive_util.py
@@ -23,7 +23,7 @@
 import path
 import pytest
 
-from .py38compat import check_warnings
+from .compat.py38 import check_warnings
 from .unix_compat import UID_0_SUPPORT, grp, pwd, require_uid_0, require_unix_id
 
 
diff --git a/distutils/tests/test_bdist_rpm.py b/distutils/tests/test_bdist_rpm.py
index 769623cbb8..a5cb42c334 100644
--- a/distutils/tests/test_bdist_rpm.py
+++ b/distutils/tests/test_bdist_rpm.py
@@ -9,7 +9,7 @@
 
 import pytest
 
-from .py38compat import requires_zlib
+from .compat.py38 import requires_zlib
 
 SETUP_PY = """\
 from distutils.core import setup
diff --git a/distutils/tests/test_build_ext.py b/distutils/tests/test_build_ext.py
index ca5d9d57cd..cc83e7fbc8 100644
--- a/distutils/tests/test_build_ext.py
+++ b/distutils/tests/test_build_ext.py
@@ -31,7 +31,7 @@
 import path
 import pytest
 
-from . import py38compat as import_helper
+from .compat import py38 as import_helper
 
 
 @pytest.fixture()
diff --git a/distutils/tests/test_extension.py b/distutils/tests/test_extension.py
index 77bb147bfd..527a135506 100644
--- a/distutils/tests/test_extension.py
+++ b/distutils/tests/test_extension.py
@@ -6,7 +6,7 @@
 
 import pytest
 
-from .py38compat import check_warnings
+from .compat.py38 import check_warnings
 
 
 class TestExtension:
diff --git a/distutils/tests/test_filelist.py b/distutils/tests/test_filelist.py
index 6a379a6323..ec7e5cf363 100644
--- a/distutils/tests/test_filelist.py
+++ b/distutils/tests/test_filelist.py
@@ -10,7 +10,7 @@
 import jaraco.path
 import pytest
 
-from . import py38compat as os_helper
+from .compat import py38 as os_helper
 
 MANIFEST_IN = """\
 include ok
diff --git a/distutils/tests/test_sdist.py b/distutils/tests/test_sdist.py
index 66a4194706..a85997f1d5 100644
--- a/distutils/tests/test_sdist.py
+++ b/distutils/tests/test_sdist.py
@@ -20,7 +20,7 @@
 import pytest
 from more_itertools import ilen
 
-from .py38compat import check_warnings
+from .compat.py38 import check_warnings
 from .unix_compat import grp, pwd, require_uid_0, require_unix_id
 
 SETUP_PY = """
diff --git a/distutils/tests/test_spawn.py b/distutils/tests/test_spawn.py
index abbac4c23f..7ec5862666 100644
--- a/distutils/tests/test_spawn.py
+++ b/distutils/tests/test_spawn.py
@@ -12,7 +12,7 @@
 import path
 import pytest
 
-from . import py38compat as os_helper
+from .compat import py38 as os_helper
 
 
 class TestSpawn(support.TempdirManager):
diff --git a/distutils/tests/test_unixccompiler.py b/distutils/tests/test_unixccompiler.py
index 6f05fa6989..543aa20da9 100644
--- a/distutils/tests/test_unixccompiler.py
+++ b/distutils/tests/test_unixccompiler.py
@@ -12,7 +12,7 @@
 import pytest
 
 from . import support
-from .py38compat import EnvironmentVarGuard
+from .compat.py38 import EnvironmentVarGuard
 
 
 @pytest.fixture(autouse=True)

From 28d4a09ace8761e7ab6b9cc880f341844c350360 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 13 Apr 2024 21:24:11 -0400
Subject: [PATCH 0764/1761] Fix return type to match implementation.

---
 distutils/compat/__init__.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/distutils/compat/__init__.py b/distutils/compat/__init__.py
index b1ee3fe8b0..4a7321fe70 100644
--- a/distutils/compat/__init__.py
+++ b/distutils/compat/__init__.py
@@ -3,7 +3,7 @@
 from .py38 import removeprefix
 
 
-def consolidate_linker_args(args: list[str]) -> str:
+def consolidate_linker_args(args: list[str]) -> list[str] | str:
     """
     Ensure the return value is a string for backward compatibility.
 

From 8a49e92fc332ee104eb38d8ac5466aac9a2f14aa Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 13 Apr 2024 21:31:40 -0400
Subject: [PATCH 0765/1761] =?UTF-8?q?=F0=9F=A7=8E=E2=80=8D=E2=99=80?=
 =?UTF-8?q?=EF=B8=8F=20Genuflect=20to=20the=20types.?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 distutils/compat/py38.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/distutils/compat/py38.py b/distutils/compat/py38.py
index 79afc3b295..2d44211147 100644
--- a/distutils/compat/py38.py
+++ b/distutils/compat/py38.py
@@ -25,7 +25,7 @@ def removeprefix(self, prefix):
 
 def aix_platform(osname, version, release):
     try:
-        import _aix_support
+        import _aix_support  # type: ignore
 
         return _aix_support.aix_platform()
     except ImportError:

From d746fff01063e8aa67efae8edbd388f62ff49554 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 13 Apr 2024 21:59:38 -0400
Subject: [PATCH 0766/1761] Oops. Meant 2025.

---
 distutils/compat/__init__.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/distutils/compat/__init__.py b/distutils/compat/__init__.py
index 4a7321fe70..e12534a32c 100644
--- a/distutils/compat/__init__.py
+++ b/distutils/compat/__init__.py
@@ -7,7 +7,7 @@ def consolidate_linker_args(args: list[str]) -> list[str] | str:
     """
     Ensure the return value is a string for backward compatibility.
 
-    Retain until at least 2024-04-31. See pypa/distutils#246
+    Retain until at least 2025-04-31. See pypa/distutils#246
     """
 
     if not all(arg.startswith('-Wl,') for arg in args):

From d8ccaf18f4eee7094afa1fd10aa0a702a327bc9e Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 18 Apr 2024 16:38:01 -0400
Subject: [PATCH 0767/1761] Migrated config to pyproject.toml using
 jaraco.develop.migrate-config and ini2toml.

---
 pyproject.toml | 54 +++++++++++++++++++++++++++++++++++++++++++++++++-
 setup.cfg      | 50 ----------------------------------------------
 2 files changed, 53 insertions(+), 51 deletions(-)
 delete mode 100644 setup.cfg

diff --git a/pyproject.toml b/pyproject.toml
index 1faf0ec259..738546e400 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,7 +1,59 @@
 [build-system]
-requires = ["setuptools>=56", "setuptools_scm[toml]>=3.4.1"]
+requires = ["setuptools>=61.2", "setuptools_scm[toml]>=3.4.1"]
 build-backend = "setuptools.build_meta"
 
+[project]
+name = "distutils"
+authors = [
+	{ name = "Jason R. Coombs", email = "jaraco@jaraco.com" },
+]
+description = "Distribution utilities formerly from standard library"
+readme = "README.rst"
+classifiers = [
+	"Development Status :: 5 - Production/Stable",
+	"Intended Audience :: Developers",
+	"License :: OSI Approved :: MIT License",
+	"Programming Language :: Python :: 3",
+	"Programming Language :: Python :: 3 :: Only",
+]
+requires-python = ">=3.8"
+dependencies = []
+dynamic = ["version"]
+
+[project.urls]
+Homepage = "https://github.com/pypa/distutils"
+
+[project.optional-dependencies]
+testing = [
+	# upstream
+	"pytest >= 6, != 8.1.1",
+	"pytest-checkdocs >= 2.4",
+	"pytest-cov",
+	"pytest-mypy",
+	"pytest-enabler >= 2.2",
+	"pytest-ruff >= 0.2.1",
+
+	# local
+	"pytest >= 7.4.3", # 186
+	"jaraco.envs>=2.4",
+	"jaraco.path",
+	"jaraco.text",
+	"path >= 10.6",
+	"docutils",
+	"pyfakefs",
+	"more_itertools",
+]
+docs = [
+	# upstream
+	"sphinx >= 3.5",
+	"jaraco.packaging >= 9.3",
+	"rst.linker >= 1.9",
+	"furo",
+	"sphinx-lint",
+
+	# local
+]
+
 [tool.setuptools_scm]
 
 [tool.pytest-enabler.mypy]
diff --git a/setup.cfg b/setup.cfg
deleted file mode 100644
index fc6d67ea46..0000000000
--- a/setup.cfg
+++ /dev/null
@@ -1,50 +0,0 @@
-[metadata]
-name = distutils
-author = Jason R. Coombs
-author_email = jaraco@jaraco.com
-description = Distribution utilities formerly from standard library
-long_description = file:README.rst
-url = https://github.com/pypa/distutils
-classifiers =
-	Development Status :: 5 - Production/Stable
-	Intended Audience :: Developers
-	License :: OSI Approved :: MIT License
-	Programming Language :: Python :: 3
-	Programming Language :: Python :: 3 :: Only
-
-[options]
-include_package_data = true
-python_requires = >=3.8
-install_requires =
-
-[options.extras_require]
-testing =
-	# upstream
-	pytest >= 6, != 8.1.1
-	pytest-checkdocs >= 2.4
-	pytest-cov
-	pytest-mypy
-	pytest-enabler >= 2.2
-	pytest-ruff >= 0.2.1
-
-	# local
-	pytest >= 7.4.3  #186
-	jaraco.envs>=2.4
-	jaraco.path
-	jaraco.text
-	path >= 10.6
-	docutils
-	pyfakefs
-	more_itertools
-
-docs =
-	# upstream
-	sphinx >= 3.5
-	jaraco.packaging >= 9.3
-	rst.linker >= 1.9
-	furo
-	sphinx-lint
-
-	# local
-
-[options.entry_points]

From bce1a225175b57528cc788eba78e5c07b03d970a Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 20 Apr 2024 15:44:11 -0400
Subject: [PATCH 0768/1761] Extract _make_executable for TestSpawn.

---
 distutils/tests/test_spawn.py | 20 ++++++++++++--------
 1 file changed, 12 insertions(+), 8 deletions(-)

diff --git a/distutils/tests/test_spawn.py b/distutils/tests/test_spawn.py
index 7ec5862666..1f623837db 100644
--- a/distutils/tests/test_spawn.py
+++ b/distutils/tests/test_spawn.py
@@ -45,14 +45,9 @@ def test_spawn(self):
         spawn([exe])  # should work without any error
 
     def test_find_executable(self, tmp_path):
-        program_noeext = 'program'
-        # Give the temporary program an ".exe" suffix for all.
-        # It's needed on Windows and not harmful on other platforms.
-        program = program_noeext + ".exe"
-
-        program_path = tmp_path / program
-        program_path.write_text("", encoding='utf-8')
-        program_path.chmod(stat.S_IXUSR)
+        program_path = self._make_executable(tmp_path, '.exe')
+        program = program_path.name
+        program_noeext = program_path.with_suffix('').name
         filename = str(program_path)
         tmp_dir = path.Path(tmp_path)
 
@@ -121,6 +116,15 @@ def test_find_executable(self, tmp_path):
                 rv = find_executable(program)
                 assert rv == filename
 
+    @staticmethod
+    def _make_executable(tmp_path, ext):
+        # Give the temporary program a suffix regardless of platform.
+        # It's needed on Windows and not harmful on others.
+        program = tmp_path.joinpath('program').with_suffix(ext)
+        program.write_text("", encoding='utf-8')
+        program.chmod(stat.S_IXUSR)
+        return program
+
     def test_spawn_missing_exe(self):
         with pytest.raises(DistutilsExecError) as ctx:
             spawn(['does-not-exist'])

From 5d2a97eae704496195d5a87f9fb5e913a3fa32f5 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 20 Apr 2024 18:10:02 -0400
Subject: [PATCH 0769/1761] Move and reword comment for brevity and clarity.

---
 distutils/spawn.py | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)

diff --git a/distutils/spawn.py b/distutils/spawn.py
index 046b5bbb82..76a2dc3f42 100644
--- a/distutils/spawn.py
+++ b/distutils/spawn.py
@@ -83,14 +83,13 @@ def find_executable(executable, path=None):
 
     if path is None:
         path = os.environ.get('PATH', None)
+        # bpo-35755: Don't fall through if PATH is the empty string
         if path is None:
             try:
                 path = os.confstr("CS_PATH")
             except (AttributeError, ValueError):
                 # os.confstr() or CS_PATH is not available
                 path = os.defpath
-        # bpo-35755: Don't use os.defpath if the PATH environment variable is
-        # set to an empty string
 
     # PATH='' doesn't match, whereas PATH=':' looks in the current directory
     if not path:

From 7babb5d5bfd477500f41ac5eecbee44591aed3e3 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 20 Apr 2024 21:52:29 -0400
Subject: [PATCH 0770/1761] Remove C901 exclusion; code is now compliant.

---
 distutils/spawn.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/distutils/spawn.py b/distutils/spawn.py
index 76a2dc3f42..3927c1fe9a 100644
--- a/distutils/spawn.py
+++ b/distutils/spawn.py
@@ -15,7 +15,7 @@
 from .errors import DistutilsExecError
 
 
-def spawn(cmd, search_path=1, verbose=0, dry_run=0, env=None):  # noqa: C901
+def spawn(cmd, search_path=1, verbose=0, dry_run=0, env=None):
     """Run another program, specified as a command list 'cmd', in a new process.
 
     'cmd' is just the argument list for the new process, ie.

From 2db55275fff5512961a2c40e6f211c418096d4ef Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 20 Apr 2024 21:54:38 -0400
Subject: [PATCH 0771/1761] Remove apparently unnecessary cast to list.

---
 distutils/spawn.py | 4 ----
 1 file changed, 4 deletions(-)

diff --git a/distutils/spawn.py b/distutils/spawn.py
index 3927c1fe9a..a321c5f04c 100644
--- a/distutils/spawn.py
+++ b/distutils/spawn.py
@@ -31,10 +31,6 @@ def spawn(cmd, search_path=1, verbose=0, dry_run=0, env=None):
     Raise DistutilsExecError if running the program fails in any way; just
     return on success.
     """
-    # cmd is documented as a list, but just in case some code passes a tuple
-    # in, protect our %-formatting code against horrible death
-    cmd = list(cmd)
-
     log.info(subprocess.list2cmdline(cmd))
     if dry_run:
         return

From 671f913695aa82f6729e3ba1482b395123db393d Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 20 Apr 2024 21:56:20 -0400
Subject: [PATCH 0772/1761] Use proper boolean literals.

---
 distutils/spawn.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/distutils/spawn.py b/distutils/spawn.py
index a321c5f04c..a7e21d2e88 100644
--- a/distutils/spawn.py
+++ b/distutils/spawn.py
@@ -15,7 +15,7 @@
 from .errors import DistutilsExecError
 
 
-def spawn(cmd, search_path=1, verbose=0, dry_run=0, env=None):
+def spawn(cmd, search_path=True, verbose=False, dry_run=False, env=None):
     """Run another program, specified as a command list 'cmd', in a new process.
 
     'cmd' is just the argument list for the new process, ie.

From ffdb32da4165cf8a0628212e547b1c3eb2e613f3 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 20 Apr 2024 22:00:36 -0400
Subject: [PATCH 0773/1761] Replace Popen with check_call.

---
 distutils/spawn.py | 11 +++++------
 1 file changed, 5 insertions(+), 6 deletions(-)

diff --git a/distutils/spawn.py b/distutils/spawn.py
index a7e21d2e88..0d86552eed 100644
--- a/distutils/spawn.py
+++ b/distutils/spawn.py
@@ -50,18 +50,17 @@ def spawn(cmd, search_path=True, verbose=False, dry_run=False, env=None):
             env[MACOSX_VERSION_VAR] = macosx_target_ver
 
     try:
-        proc = subprocess.Popen(cmd, env=env)
-        proc.wait()
-        exitcode = proc.returncode
+        subprocess.check_call(cmd, env=env)
     except OSError as exc:
         if not DEBUG:
             cmd = cmd[0]
         raise DistutilsExecError(f"command {cmd!r} failed: {exc.args[-1]}") from exc
-
-    if exitcode:
+    except subprocess.CalledProcessError as err:
         if not DEBUG:
             cmd = cmd[0]
-        raise DistutilsExecError(f"command {cmd!r} failed with exit code {exitcode}")
+        raise DistutilsExecError(
+            f"command {cmd!r} failed with exit code {err.returncode}"
+        ) from err
 
 
 def find_executable(executable, path=None):

From a4a1f23e91e7e0bdab0772fb191bd91924032fe3 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 20 Apr 2024 22:03:14 -0400
Subject: [PATCH 0774/1761] Extract function for _debug wrapper.

---
 distutils/spawn.py | 17 +++++++++++------
 1 file changed, 11 insertions(+), 6 deletions(-)

diff --git a/distutils/spawn.py b/distutils/spawn.py
index 0d86552eed..760506550d 100644
--- a/distutils/spawn.py
+++ b/distutils/spawn.py
@@ -15,6 +15,13 @@
 from .errors import DistutilsExecError
 
 
+def _debug(cmd):
+    """
+    Render a subprocess command differently depending on DEBUG.
+    """
+    return cmd if DEBUG else cmd[0]
+
+
 def spawn(cmd, search_path=True, verbose=False, dry_run=False, env=None):
     """Run another program, specified as a command list 'cmd', in a new process.
 
@@ -52,14 +59,12 @@ def spawn(cmd, search_path=True, verbose=False, dry_run=False, env=None):
     try:
         subprocess.check_call(cmd, env=env)
     except OSError as exc:
-        if not DEBUG:
-            cmd = cmd[0]
-        raise DistutilsExecError(f"command {cmd!r} failed: {exc.args[-1]}") from exc
+        raise DistutilsExecError(
+            f"command {_debug(cmd)!r} failed: {exc.args[-1]}"
+        ) from exc
     except subprocess.CalledProcessError as err:
-        if not DEBUG:
-            cmd = cmd[0]
         raise DistutilsExecError(
-            f"command {cmd!r} failed with exit code {err.returncode}"
+            f"command {_debug(cmd)!r} failed with exit code {err.returncode}"
         ) from err
 
 

From 0bb80360811ce814d14bc8edd9073f0da27b5353 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 20 Apr 2024 22:23:19 -0400
Subject: [PATCH 0775/1761] Extract function to inject macos version.

---
 distutils/spawn.py | 31 +++++++++++++++++++++----------
 1 file changed, 21 insertions(+), 10 deletions(-)

diff --git a/distutils/spawn.py b/distutils/spawn.py
index 760506550d..081e254910 100644
--- a/distutils/spawn.py
+++ b/distutils/spawn.py
@@ -6,10 +6,15 @@
 executable name.
 """
 
+from __future__ import annotations
+
 import os
+import platform
 import subprocess
 import sys
 
+from typing import Mapping
+
 from ._log import log
 from .debug import DEBUG
 from .errors import DistutilsExecError
@@ -22,6 +27,21 @@ def _debug(cmd):
     return cmd if DEBUG else cmd[0]
 
 
+def _inject_macos_ver(env: Mapping[str:str] | None) -> Mapping[str:str] | None:
+    if platform.system() != 'Darwin':
+        return env
+
+    from distutils.util import MACOSX_VERSION_VAR, get_macosx_target_ver
+
+    target_ver = get_macosx_target_ver()
+    update = {MACOSX_VERSION_VAR: target_ver} if target_ver else {}
+    return {**_resolve(env), **update}
+
+
+def _resolve(env: Mapping[str:str] | None) -> Mapping[str:str]:
+    return os.environ if env is None else env
+
+
 def spawn(cmd, search_path=True, verbose=False, dry_run=False, env=None):
     """Run another program, specified as a command list 'cmd', in a new process.
 
@@ -47,17 +67,8 @@ def spawn(cmd, search_path=True, verbose=False, dry_run=False, env=None):
         if executable is not None:
             cmd[0] = executable
 
-    env = env if env is not None else dict(os.environ)
-
-    if sys.platform == 'darwin':
-        from distutils.util import MACOSX_VERSION_VAR, get_macosx_target_ver
-
-        macosx_target_ver = get_macosx_target_ver()
-        if macosx_target_ver:
-            env[MACOSX_VERSION_VAR] = macosx_target_ver
-
     try:
-        subprocess.check_call(cmd, env=env)
+        subprocess.check_call(cmd, env=_inject_macos_ver(env))
     except OSError as exc:
         raise DistutilsExecError(
             f"command {_debug(cmd)!r} failed: {exc.args[-1]}"

From de48bf21fd3029e75b3ab59163ace3b231565644 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sun, 21 Apr 2024 02:42:25 -0400
Subject: [PATCH 0776/1761] =?UTF-8?q?=F0=9F=91=B9=20Feed=20the=20hobgoblin?=
 =?UTF-8?q?s=20(delint).?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 distutils/_modified.py | 2 +-
 distutils/spawn.py     | 1 -
 distutils/sysconfig.py | 2 +-
 3 files changed, 2 insertions(+), 3 deletions(-)

diff --git a/distutils/_modified.py b/distutils/_modified.py
index 9b375181e8..07b2ead0ed 100644
--- a/distutils/_modified.py
+++ b/distutils/_modified.py
@@ -4,8 +4,8 @@
 import os.path
 
 from ._functools import splat
-from .errors import DistutilsFileError
 from .compat.py39 import zip_strict
+from .errors import DistutilsFileError
 
 
 def _newer(source, target):
diff --git a/distutils/spawn.py b/distutils/spawn.py
index 081e254910..234d5cd11f 100644
--- a/distutils/spawn.py
+++ b/distutils/spawn.py
@@ -12,7 +12,6 @@
 import platform
 import subprocess
 import sys
-
 from typing import Mapping
 
 from ._log import log
diff --git a/distutils/sysconfig.py b/distutils/sysconfig.py
index 514e06e34b..4ed51c1f03 100644
--- a/distutils/sysconfig.py
+++ b/distutils/sysconfig.py
@@ -16,8 +16,8 @@
 import sys
 import sysconfig
 
-from .compat import py39
 from ._functools import pass_none
+from .compat import py39
 from .errors import DistutilsPlatformError
 
 IS_PYPY = '__pypy__' in sys.builtin_module_names

From 213fae9b0d7b63b00338d5b5ac8f25a220ca04ed Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Wed, 29 May 2024 12:24:29 -0400
Subject: [PATCH 0777/1761] Use mkstemp unconditionally. mktemp has been
 deprecated since Python 2.3.

---
 distutils/util.py | 15 +++------------
 1 file changed, 3 insertions(+), 12 deletions(-)

diff --git a/distutils/util.py b/distutils/util.py
index 2cdea14381..0a8b3d690c 100644
--- a/distutils/util.py
+++ b/distutils/util.py
@@ -12,6 +12,7 @@
 import subprocess
 import sys
 import sysconfig
+import tempfile
 
 from ._log import log
 from ._modified import newer
@@ -405,20 +406,10 @@ def byte_compile(  # noqa: C901
     # "Indirect" byte-compilation: write a temporary script and then
     # run it with the appropriate flags.
     if not direct:
-        try:
-            from tempfile import mkstemp
-
-            (script_fd, script_name) = mkstemp(".py")
-        except ImportError:
-            from tempfile import mktemp
-
-            (script_fd, script_name) = None, mktemp(".py")
+        (script_fd, script_name) = tempfile.mkstemp(".py")
         log.info("writing byte-compilation script '%s'", script_name)
         if not dry_run:
-            if script_fd is not None:
-                script = os.fdopen(script_fd, "w", encoding='utf-8')
-            else:  # pragma: no cover
-                script = open(script_name, "w", encoding='utf-8')
+            script = os.fdopen(script_fd, "w", encoding='utf-8')
 
             with script:
                 script.write(

From b93a0ba42053bf8fe5f19941cbe4987160e584c8 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Wed, 19 Jun 2024 13:46:47 -0400
Subject: [PATCH 0778/1761] Pin to pytest<8.1.

Closes pypa/distutils#259
Ref pytest-dev/pytest#12490
---
 pyproject.toml | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/pyproject.toml b/pyproject.toml
index cda381abb7..30bfd7d730 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -42,6 +42,9 @@ test = [
 	"docutils",
 	"pyfakefs",
 	"more_itertools",
+
+	# workaround for pytest-dev/pytest#12490
+	"pytest < 8.1; python_version < '3.12'",
 ]
 doc = [
 	# upstream

From 01e5f2d3647658e428462063010fc366bfd57dcc Mon Sep 17 00:00:00 2001
From: Stephen Brennan 
Date: Tue, 25 Jun 2024 10:27:30 -0700
Subject: [PATCH 0779/1761] Use a separate build directory for free-threading

Signed-off-by: Stephen Brennan 
---
 distutils/command/build.py    | 5 +++++
 distutils/tests/test_build.py | 3 +++
 2 files changed, 8 insertions(+)

diff --git a/distutils/command/build.py b/distutils/command/build.py
index d18ed503e3..766a2ab191 100644
--- a/distutils/command/build.py
+++ b/distutils/command/build.py
@@ -4,6 +4,7 @@
 
 import os
 import sys
+import sysconfig
 
 from ..core import Command
 from ..errors import DistutilsOptionError
@@ -81,6 +82,10 @@ def finalize_options(self):  # noqa: C901
 
         plat_specifier = f".{self.plat_name}-{sys.implementation.cache_tag}"
 
+        # Python 3.13+ with --disable-gil shouldn't share build directories
+        if sysconfig.get_config_var('Py_GIL_DISABLED'):
+            plat_specifier += 't'
+
         # Make it so Python 2.x and Python 2.x with --with-pydebug don't
         # share the same build directories. Doing so confuses the build
         # process for C modules
diff --git a/distutils/tests/test_build.py b/distutils/tests/test_build.py
index 25483ad76b..8fb1bc1b77 100644
--- a/distutils/tests/test_build.py
+++ b/distutils/tests/test_build.py
@@ -4,6 +4,7 @@
 import sys
 from distutils.command.build import build
 from distutils.tests import support
+from sysconfig import get_config_var
 from sysconfig import get_platform
 
 
@@ -24,6 +25,8 @@ def test_finalize_options(self):
         # examples:
         #   build/lib.macosx-10.3-i386-cpython39
         plat_spec = f'.{cmd.plat_name}-{sys.implementation.cache_tag}'
+        if get_config_var('Py_GIL_DISABLED'):
+            plat_spec += 't'
         if hasattr(sys, 'gettotalrefcount'):
             assert cmd.build_platlib.endswith('-pydebug')
             plat_spec += '-pydebug'

From 314e3f187014000efc321cc1cec239620538426a Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 20 Apr 2024 22:58:10 -0400
Subject: [PATCH 0780/1761] Deprecate find_executable.

---
 distutils/spawn.py                   | 10 +++++++---
 distutils/tests/__init__.py          |  5 +++--
 distutils/tests/test_archive_util.py |  4 ++--
 distutils/tests/test_bdist_rpm.py    | 10 +++++-----
 distutils/tests/test_sdist.py        | 10 +++++-----
 pytest.ini                           |  5 ++++-
 6 files changed, 26 insertions(+), 18 deletions(-)

diff --git a/distutils/spawn.py b/distutils/spawn.py
index 234d5cd11f..429d1ccb2e 100644
--- a/distutils/spawn.py
+++ b/distutils/spawn.py
@@ -2,16 +2,17 @@
 
 Provides the 'spawn()' function, a front-end to various platform-
 specific functions for launching another program in a sub-process.
-Also provides the 'find_executable()' to search the path for a given
-executable name.
 """
 
 from __future__ import annotations
 
 import os
 import platform
+import shutil
 import subprocess
 import sys
+import warnings
+
 from typing import Mapping
 
 from ._log import log
@@ -62,7 +63,7 @@ def spawn(cmd, search_path=True, verbose=False, dry_run=False, env=None):
         return
 
     if search_path:
-        executable = find_executable(cmd[0])
+        executable = shutil.which(cmd[0])
         if executable is not None:
             cmd[0] = executable
 
@@ -84,6 +85,9 @@ def find_executable(executable, path=None):
     A string listing directories separated by 'os.pathsep'; defaults to
     os.environ['PATH'].  Returns the complete filename or None if not found.
     """
+    warnings.warn(
+        'Use shutil.which instead of find_executable', DeprecationWarning, stacklevel=2
+    )
     _, ext = os.path.splitext(executable)
     if (sys.platform == 'win32') and (ext != '.exe'):
         executable = executable + '.exe'
diff --git a/distutils/tests/__init__.py b/distutils/tests/__init__.py
index 20dfe8f19b..16b68b4e24 100644
--- a/distutils/tests/__init__.py
+++ b/distutils/tests/__init__.py
@@ -7,6 +7,7 @@
 by import rather than matching pre-defined names.
 """
 
+import shutil
 from typing import Sequence
 
 
@@ -19,7 +20,7 @@ def missing_compiler_executable(cmd_names: Sequence[str] = []):  # pragma: no co
     missing.
 
     """
-    from distutils import ccompiler, errors, spawn, sysconfig
+    from distutils import ccompiler, errors, sysconfig
 
     compiler = ccompiler.new_compiler()
     sysconfig.customize_compiler(compiler)
@@ -37,5 +38,5 @@ def missing_compiler_executable(cmd_names: Sequence[str] = []):  # pragma: no co
             assert cmd is not None, "the '%s' executable is not configured" % name
         elif not cmd:
             continue
-        if spawn.find_executable(cmd[0]) is None:
+        if shutil.which(cmd[0]) is None:
             return cmd[0]
diff --git a/distutils/tests/test_archive_util.py b/distutils/tests/test_archive_util.py
index 02af2aa0fd..abbcd36cb0 100644
--- a/distutils/tests/test_archive_util.py
+++ b/distutils/tests/test_archive_util.py
@@ -135,7 +135,7 @@ def _create_files(self):
         return tmpdir
 
     @pytest.mark.usefixtures('needs_zlib')
-    @pytest.mark.skipif("not (find_executable('tar') and find_executable('gzip'))")
+    @pytest.mark.skipif("not (shutil.which('tar') and shutil.which('gzip'))")
     def test_tarfile_vs_tar(self):
         tmpdir = self._create_files()
         tmpdir2 = self.mkdtemp()
@@ -190,7 +190,7 @@ def test_tarfile_vs_tar(self):
         tarball = base_name + '.tar'
         assert os.path.exists(tarball)
 
-    @pytest.mark.skipif("not find_executable('compress')")
+    @pytest.mark.skipif("not shutil.which('compress')")
     def test_compress_deprecated(self):
         tmpdir = self._create_files()
         base_name = os.path.join(self.mkdtemp(), 'archive')
diff --git a/distutils/tests/test_bdist_rpm.py b/distutils/tests/test_bdist_rpm.py
index a5cb42c334..28edda4dd0 100644
--- a/distutils/tests/test_bdist_rpm.py
+++ b/distutils/tests/test_bdist_rpm.py
@@ -1,10 +1,10 @@
 """Tests for distutils.command.bdist_rpm."""
 
 import os
+import shutil  # noqa: F401
 import sys
 from distutils.command.bdist_rpm import bdist_rpm
 from distutils.core import Distribution
-from distutils.spawn import find_executable  # noqa: F401
 from distutils.tests import support
 
 import pytest
@@ -43,8 +43,8 @@ class TestBuildRpm(
 ):
     @mac_woes
     @requires_zlib()
-    @pytest.mark.skipif("not find_executable('rpm')")
-    @pytest.mark.skipif("not find_executable('rpmbuild')")
+    @pytest.mark.skipif("not shutil.which('rpm')")
+    @pytest.mark.skipif("not shutil.which('rpmbuild')")
     def test_quiet(self):
         # let's create a package
         tmp_dir = self.mkdtemp()
@@ -86,8 +86,8 @@ def test_quiet(self):
     @mac_woes
     @requires_zlib()
     # https://bugs.python.org/issue1533164
-    @pytest.mark.skipif("not find_executable('rpm')")
-    @pytest.mark.skipif("not find_executable('rpmbuild')")
+    @pytest.mark.skipif("not shutil.which('rpm')")
+    @pytest.mark.skipif("not shutil.which('rpmbuild')")
     def test_no_optimize_flag(self):
         # let's create a package that breaks bdist_rpm
         tmp_dir = self.mkdtemp()
diff --git a/distutils/tests/test_sdist.py b/distutils/tests/test_sdist.py
index a85997f1d5..6a1aa51862 100644
--- a/distutils/tests/test_sdist.py
+++ b/distutils/tests/test_sdist.py
@@ -2,6 +2,7 @@
 
 import os
 import pathlib
+import shutil  # noqa: F401
 import tarfile
 import warnings
 import zipfile
@@ -10,7 +11,6 @@
 from distutils.core import Distribution
 from distutils.errors import DistutilsOptionError
 from distutils.filelist import FileList
-from distutils.spawn import find_executable  # noqa: F401
 from distutils.tests.test_config import BasePyPIRCCommandTestCase
 from os.path import join
 from textwrap import dedent
@@ -137,8 +137,8 @@ def test_prune_file_list(self):
         assert sorted(content) == ['fake-1.0/' + x for x in expected]
 
     @pytest.mark.usefixtures('needs_zlib')
-    @pytest.mark.skipif("not find_executable('tar')")
-    @pytest.mark.skipif("not find_executable('gzip')")
+    @pytest.mark.skipif("not shutil.which('tar')")
+    @pytest.mark.skipif("not shutil.which('gzip')")
     def test_make_distribution(self):
         # now building a sdist
         dist, cmd = self.get_cmd()
@@ -434,8 +434,8 @@ def test_manual_manifest(self):
     @pytest.mark.usefixtures('needs_zlib')
     @require_unix_id
     @require_uid_0
-    @pytest.mark.skipif("not find_executable('tar')")
-    @pytest.mark.skipif("not find_executable('gzip')")
+    @pytest.mark.skipif("not shutil.which('tar')")
+    @pytest.mark.skipif("not shutil.which('gzip')")
     def test_make_distribution_owner_group(self):
         # now building a sdist
         dist, cmd = self.get_cmd()
diff --git a/pytest.ini b/pytest.ini
index f9b1d1fcd2..b53e0d93a1 100644
--- a/pytest.ini
+++ b/pytest.ini
@@ -35,5 +35,8 @@ filterwarnings=
 	# suppress warnings in deprecated compilers
 	ignore:(bcpp|msvc9?)compiler is deprecated
 
-	# suppress well know deprecation warning
+	# suppress well known deprecation warning
 	ignore:distutils.log.Log is deprecated
+
+	# suppress known deprecation
+	ignore:Use shutil.which instead of find_executable:DeprecationWarning

From 8ecebe23ff278b702e6e9298849e68c48f6ddfaa Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 27 Jun 2024 04:33:57 -0400
Subject: [PATCH 0781/1761] Apply canonicalize_version with
 strip_trailing_zero=False.

---
 distutils/dist.py | 22 +++++++++++++++++++---
 1 file changed, 19 insertions(+), 3 deletions(-)

diff --git a/distutils/dist.py b/distutils/dist.py
index 9f570593e0..d3a3f23a45 100644
--- a/distutils/dist.py
+++ b/distutils/dist.py
@@ -13,7 +13,7 @@
 from collections.abc import Iterable
 from email import message_from_file
 
-from ._vendor.packaging.utils import canonicalize_name
+from ._vendor.packaging.utils import canonicalize_name, canonicalize_version
 
 try:
     import warnings
@@ -1191,9 +1191,25 @@ def get_version(self):
         return self.version or "0.0.0"
 
     def get_fullname(self):
+        return self._fullname(self.get_name(), self.get_version())
+
+    @staticmethod
+    def _fullname(name: str, version: str) -> str:
+        """
+        >>> DistributionMetadata._fullname('setup.tools', '1.0-2')
+        'setup_tools-1.0.post2'
+        >>> DistributionMetadata._fullname('setup-tools', '1.2post2')
+        'setup_tools-1.2.post2'
+        >>> DistributionMetadata._fullname('setup-tools', '1.0-r2')
+        'setup_tools-1.0.post2'
+        >>> DistributionMetadata._fullname('setup.tools', '1.0.post')
+        'setup_tools-1.0.post0'
+        >>> DistributionMetadata._fullname('setup.tools', '1.0+ubuntu-1')
+        'setup_tools-1.0+ubuntu.1'
+        """
         return "{}-{}".format(
-            canonicalize_name(self.get_name()).replace('-', '_'),
-            self.get_version(),
+            canonicalize_name(name).replace('-', '_'),
+            canonicalize_version(version, strip_trailing_zero=False),
         )
 
     def get_author(self):

From a68cac3c7e847138ed0b80e669faf0025117511a Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Sat, 18 May 2024 18:57:12 +0200
Subject: [PATCH 0782/1761] Apply ruff rule RUF100

RUF100 Unused `noqa` directive
---
 distutils/bcppcompiler.py             | 2 +-
 distutils/command/__init__.py         | 2 +-
 distutils/msvccompiler.py             | 2 +-
 distutils/tests/test_unixccompiler.py | 2 +-
 ruff.toml                             | 1 +
 5 files changed, 5 insertions(+), 4 deletions(-)

diff --git a/distutils/bcppcompiler.py b/distutils/bcppcompiler.py
index c1341e43cb..d1af267d29 100644
--- a/distutils/bcppcompiler.py
+++ b/distutils/bcppcompiler.py
@@ -84,7 +84,7 @@ def __init__(self, verbose=0, dry_run=0, force=0):
 
     # -- Worker methods ------------------------------------------------
 
-    def compile(  # noqa: C901
+    def compile(
         self,
         sources,
         output_dir=None,
diff --git a/distutils/command/__init__.py b/distutils/command/__init__.py
index 028dcfa0fc..1e8fbe60c2 100644
--- a/distutils/command/__init__.py
+++ b/distutils/command/__init__.py
@@ -3,7 +3,7 @@
 Package containing implementation of all the standard Distutils
 commands."""
 
-__all__ = [  # noqa: F822
+__all__ = [
     'build',
     'build_py',
     'build_ext',
diff --git a/distutils/msvccompiler.py b/distutils/msvccompiler.py
index ac8b68c08c..05ef01facf 100644
--- a/distutils/msvccompiler.py
+++ b/distutils/msvccompiler.py
@@ -684,6 +684,6 @@ def set_path_env_var(self, name):
     OldMSVCCompiler = MSVCCompiler
     # get_build_architecture not really relevant now we support cross-compile
     from distutils.msvc9compiler import (
-        MacroExpander,  # noqa: F811
+        MacroExpander,
         MSVCCompiler,
     )
diff --git a/distutils/tests/test_unixccompiler.py b/distutils/tests/test_unixccompiler.py
index 543aa20da9..d2c88e9116 100644
--- a/distutils/tests/test_unixccompiler.py
+++ b/distutils/tests/test_unixccompiler.py
@@ -32,7 +32,7 @@ def rpath_foo(self):
 
 
 class TestUnixCCompiler(support.TempdirManager):
-    @pytest.mark.skipif('platform.system == "Windows"')  # noqa: C901
+    @pytest.mark.skipif('platform.system == "Windows"')
     def test_runtime_libdir_option(self):  # noqa: C901
         # Issue #5900; GitHub Issue #37
         #
diff --git a/ruff.toml b/ruff.toml
index 70612985a7..f2c67aeb86 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -1,6 +1,7 @@
 [lint]
 extend-select = [
 	"C901",
+	"RUF100",
 	"W",
 ]
 ignore = [

From df0502f7837f682e6157b597e0e5526dda128850 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 27 Jun 2024 04:46:29 -0400
Subject: [PATCH 0783/1761] Move local ruff rules into a local section.

---
 ruff.toml | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)

diff --git a/ruff.toml b/ruff.toml
index f2c67aeb86..3550acad03 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -1,8 +1,10 @@
 [lint]
 extend-select = [
 	"C901",
-	"RUF100",
 	"W",
+
+	# local
+	"RUF100",
 ]
 ignore = [
 	# https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules

From 54858be557f1abbe2dda156197f169db7649bb8b Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Sat, 18 May 2024 19:29:44 +0200
Subject: [PATCH 0784/1761] Apply ruff rule RUF010

RUF010 Use explicit conversion flag
---
 distutils/command/bdist_dumb.py | 2 +-
 distutils/command/bdist_rpm.py  | 2 +-
 distutils/dist.py               | 2 +-
 distutils/version.py            | 6 +++---
 4 files changed, 6 insertions(+), 6 deletions(-)

diff --git a/distutils/command/bdist_dumb.py b/distutils/command/bdist_dumb.py
index 06502d201e..b103516de6 100644
--- a/distutils/command/bdist_dumb.py
+++ b/distutils/command/bdist_dumb.py
@@ -116,7 +116,7 @@ def run(self):
             ):
                 raise DistutilsPlatformError(
                     "can't make a dumb built distribution where "
-                    f"base and platbase are different ({repr(install.install_base)}, {repr(install.install_platbase)})"
+                    f"base and platbase are different ({install.install_base!r}, {install.install_platbase!r})"
                 )
             else:
                 archive_root = os.path.join(
diff --git a/distutils/command/bdist_rpm.py b/distutils/command/bdist_rpm.py
index 649968a5eb..64a29a2401 100644
--- a/distutils/command/bdist_rpm.py
+++ b/distutils/command/bdist_rpm.py
@@ -370,7 +370,7 @@ def run(self):  # noqa: C901
 
             status = out.close()
             if status:
-                raise DistutilsExecError("Failed to execute: %s" % repr(q_cmd))
+                raise DistutilsExecError("Failed to execute: %r" % q_cmd)
 
         finally:
             out.close()
diff --git a/distutils/dist.py b/distutils/dist.py
index 668ce7eb0a..f1f8db9a73 100644
--- a/distutils/dist.py
+++ b/distutils/dist.py
@@ -262,7 +262,7 @@ def __init__(self, attrs=None):  # noqa: C901
                 elif hasattr(self, key):
                     setattr(self, key, val)
                 else:
-                    msg = "Unknown distribution option: %s" % repr(key)
+                    msg = "Unknown distribution option: %r" % key
                     warnings.warn(msg)
 
         # no-user-cfg is handled before other command line args
diff --git a/distutils/version.py b/distutils/version.py
index 806d233ca5..942b56bf94 100644
--- a/distutils/version.py
+++ b/distutils/version.py
@@ -60,7 +60,7 @@ def __init__(self, vstring=None):
         )
 
     def __repr__(self):
-        return f"{self.__class__.__name__} ('{str(self)}')"
+        return f"{self.__class__.__name__} ('{self}')"
 
     def __eq__(self, other):
         c = self._cmp(other)
@@ -153,7 +153,7 @@ class StrictVersion(Version):
     def parse(self, vstring):
         match = self.version_re.match(vstring)
         if not match:
-            raise ValueError("invalid version number '%s'" % vstring)
+            raise ValueError(f"invalid version number '{vstring}'")
 
         (major, minor, patch, prerelease, prerelease_num) = match.group(1, 2, 4, 5, 6)
 
@@ -330,7 +330,7 @@ def __str__(self):
         return self.vstring
 
     def __repr__(self):
-        return "LooseVersion ('%s')" % str(self)
+        return f"LooseVersion ('{self}')"
 
     def _cmp(self, other):
         if isinstance(other, str):

From 3701d1341a5c8641dee595734c2c9a57b519e205 Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Sat, 18 May 2024 19:30:28 +0200
Subject: [PATCH 0785/1761] Enable ruff rule RUF010

---
 ruff.toml | 1 +
 1 file changed, 1 insertion(+)

diff --git a/ruff.toml b/ruff.toml
index 70612985a7..49934c7da8 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -1,6 +1,7 @@
 [lint]
 extend-select = [
 	"C901",
+	"RUF010",
 	"W",
 ]
 ignore = [

From 4cbdaffb3651f888b6c8648e8796becfe5b15a8d Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Sat, 18 May 2024 19:18:45 +0200
Subject: [PATCH 0786/1761] Apply ruff/pyupgrade rule UP031

UP031 Use format specifiers instead of percent format
---
 distutils/_modified.py             |  2 +-
 distutils/archive_util.py          | 11 ++++-----
 distutils/bcppcompiler.py          |  6 ++---
 distutils/ccompiler.py             | 38 ++++++++++++++----------------
 distutils/cmd.py                   | 10 ++++----
 distutils/command/bdist.py         |  6 ++---
 distutils/command/bdist_dumb.py    |  4 ++--
 distutils/command/bdist_rpm.py     | 24 +++++++++----------
 distutils/command/build.py         |  2 +-
 distutils/command/build_clib.py    | 12 +++++-----
 distutils/command/build_ext.py     | 12 +++++-----
 distutils/command/build_py.py      |  8 +++----
 distutils/command/build_scripts.py |  2 +-
 distutils/command/check.py         |  4 ++--
 distutils/command/config.py        | 10 ++++----
 distutils/command/install.py       | 12 +++++-----
 distutils/command/install_lib.py   |  2 +-
 distutils/command/register.py      |  9 ++++---
 distutils/command/sdist.py         | 12 +++++-----
 distutils/command/upload.py        |  8 +++----
 distutils/config.py                |  4 ++--
 distutils/core.py                  | 11 ++++-----
 distutils/cygwinccompiler.py       | 26 ++++++++++----------
 distutils/dir_util.py              |  2 +-
 distutils/dist.py                  | 28 +++++++++++-----------
 distutils/extension.py             |  6 ++---
 distutils/fancy_getopt.py          | 14 +++++------
 distutils/file_util.py             |  6 ++---
 distutils/filelist.py              | 16 ++++++-------
 distutils/msvc9compiler.py         | 18 +++++++-------
 distutils/msvccompiler.py          | 14 +++++------
 distutils/sysconfig.py             |  4 ++--
 distutils/tests/__init__.py        |  2 +-
 distutils/tests/test_bdist.py      |  2 +-
 distutils/tests/test_bdist_dumb.py |  2 +-
 distutils/tests/test_build_clib.py |  2 +-
 distutils/tests/test_build_ext.py  |  4 ++--
 distutils/tests/test_build_py.py   |  4 ++--
 distutils/tests/test_clean.py      |  2 +-
 distutils/tests/test_config_cmd.py |  6 ++---
 distutils/tests/test_dir_util.py   |  6 ++---
 distutils/tests/test_file_util.py  |  2 +-
 distutils/tests/test_install.py    |  4 ++--
 distutils/tests/test_spawn.py      |  4 ++--
 distutils/text_file.py             |  2 +-
 distutils/util.py                  | 10 ++++----
 distutils/version.py               |  4 ++--
 distutils/versionpredicate.py      | 10 ++++----
 48 files changed, 202 insertions(+), 207 deletions(-)

diff --git a/distutils/_modified.py b/distutils/_modified.py
index 07b2ead0ed..6532aa1073 100644
--- a/distutils/_modified.py
+++ b/distutils/_modified.py
@@ -24,7 +24,7 @@ def newer(source, target):
     Raises DistutilsFileError if 'source' does not exist.
     """
     if not os.path.exists(source):
-        raise DistutilsFileError("file '%s' does not exist" % os.path.abspath(source))
+        raise DistutilsFileError(f"file '{os.path.abspath(source)}' does not exist")
 
     return _newer(source, target)
 
diff --git a/distutils/archive_util.py b/distutils/archive_util.py
index 052f6e4646..27b497f36c 100644
--- a/distutils/archive_util.py
+++ b/distutils/archive_util.py
@@ -113,7 +113,7 @@ def _set_uid_gid(tarinfo):
         return tarinfo
 
     if not dry_run:
-        tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress])
+        tar = tarfile.open(archive_name, f'w|{tar_compression[compress]}')
         try:
             tar.add(base_dir, filter=_set_uid_gid)
         finally:
@@ -160,12 +160,11 @@ def make_zipfile(base_name, base_dir, verbose=0, dry_run=0):  # noqa: C901
             # XXX really should distinguish between "couldn't find
             # external 'zip' command" and "zip failed".
             raise DistutilsExecError(
-                (
-                    "unable to create zip file '%s': "
+
+                    f"unable to create zip file '{zip_filename}': "
                     "could neither import the 'zipfile' module nor "
                     "find a standalone zip utility"
-                )
-                % zip_filename
+
             )
 
     else:
@@ -260,7 +259,7 @@ def make_archive(
     try:
         format_info = ARCHIVE_FORMATS[format]
     except KeyError:
-        raise ValueError("unknown archive format '%s'" % format)
+        raise ValueError(f"unknown archive format '{format}'")
 
     func = format_info[0]
     for arg, val in format_info[1]:
diff --git a/distutils/bcppcompiler.py b/distutils/bcppcompiler.py
index c1341e43cb..d6f518bb48 100644
--- a/distutils/bcppcompiler.py
+++ b/distutils/bcppcompiler.py
@@ -234,11 +234,11 @@ def link(  # noqa: C901
                 head, tail = os.path.split(output_filename)
                 modname, ext = os.path.splitext(tail)
                 temp_dir = os.path.dirname(objects[0])  # preserve tree structure
-                def_file = os.path.join(temp_dir, '%s.def' % modname)
+                def_file = os.path.join(temp_dir, f'{modname}.def')
                 contents = ['EXPORTS']
                 for sym in export_symbols or []:
                     contents.append(f'  {sym}=_{sym}')
-                self.execute(write_file, (def_file, contents), "writing %s" % def_file)
+                self.execute(write_file, (def_file, contents), f"writing {def_file}")
 
             # Borland C++ has problems with '/' in paths
             objects2 = map(os.path.normpath, objects)
@@ -254,7 +254,7 @@ def link(  # noqa: C901
                     objects.append(file)
 
             for ell in library_dirs:
-                ld_args.append("/L%s" % os.path.normpath(ell))
+                ld_args.append(f"/L{os.path.normpath(ell)}")
             ld_args.append("/L.")  # we sometimes use relative paths
 
             # list of object files
diff --git a/distutils/ccompiler.py b/distutils/ccompiler.py
index 8876d73098..0671bd3fda 100644
--- a/distutils/ccompiler.py
+++ b/distutils/ccompiler.py
@@ -202,7 +202,7 @@ def _check_macro_definitions(self, definitions):
                 and isinstance(defn[0], str)
             ):
                 raise TypeError(
-                    ("invalid macro definition '%s': " % defn)
+                    (f"invalid macro definition '{defn}': ")
                     + "must be tuple (string,), (string, string), or "
                     + "(string, None)"
                 )
@@ -859,7 +859,7 @@ def has_function(  # noqa: C901
         fd, fname = tempfile.mkstemp(".c", funcname, text=True)
         with os.fdopen(fd, "w", encoding='utf-8') as f:
             for incl in includes:
-                f.write("""#include "%s"\n""" % incl)
+                f.write(f"""#include "{incl}"\n""")
             if not includes:
                 # Use "char func(void);" as the prototype to follow
                 # what autoconf does.  This prototype does not match
@@ -869,22 +869,20 @@ def has_function(  # noqa: C901
                 # know the exact argument types, and the has_function
                 # interface does not provide that level of information.
                 f.write(
-                    """\
+                    f"""\
 #ifdef __cplusplus
 extern "C"
 #endif
-char %s(void);
+char {funcname}(void);
 """
-                    % funcname
                 )
             f.write(
-                """\
-int main (int argc, char **argv) {
-    %s();
+                f"""\
+int main (int argc, char **argv) {{
+    {funcname}();
     return 0;
-}
+}}
 """
-                % funcname
             )
 
         try:
@@ -1032,7 +1030,7 @@ def debug_print(self, msg):
             print(msg)
 
     def warn(self, msg):
-        sys.stderr.write("warning: %s\n" % msg)
+        sys.stderr.write(f"warning: {msg}\n")
 
     def execute(self, func, args, msg=None, level=1):
         execute(func, args, msg, self.dry_run)
@@ -1145,9 +1143,9 @@ def new_compiler(plat=None, compiler=None, verbose=0, dry_run=0, force=0):
 
         (module_name, class_name, long_description) = compiler_class[compiler]
     except KeyError:
-        msg = "don't know how to compile C/C++ code on platform '%s'" % plat
+        msg = f"don't know how to compile C/C++ code on platform '{plat}'"
         if compiler is not None:
-            msg = msg + " with '%s' compiler" % compiler
+            msg = msg + f" with '{compiler}' compiler"
         raise DistutilsPlatformError(msg)
 
     try:
@@ -1157,7 +1155,7 @@ def new_compiler(plat=None, compiler=None, verbose=0, dry_run=0, force=0):
         klass = vars(module)[class_name]
     except ImportError:
         raise DistutilsModuleError(
-            "can't compile C/C++ code: unable to load module '%s'" % module_name
+            f"can't compile C/C++ code: unable to load module '{module_name}'"
         )
     except KeyError:
         raise DistutilsModuleError(
@@ -1196,15 +1194,15 @@ def gen_preprocess_options(macros, include_dirs):
     for macro in macros:
         if not (isinstance(macro, tuple) and 1 <= len(macro) <= 2):
             raise TypeError(
-                "bad macro definition '%s': "
-                "each element of 'macros' list must be a 1- or 2-tuple" % macro
+                f"bad macro definition '{macro}': "
+                "each element of 'macros' list must be a 1- or 2-tuple"
             )
 
         if len(macro) == 1:  # undefine this macro
-            pp_opts.append("-U%s" % macro[0])
+            pp_opts.append(f"-U{macro[0]}")
         elif len(macro) == 2:
             if macro[1] is None:  # define with no explicit value
-                pp_opts.append("-D%s" % macro[0])
+                pp_opts.append(f"-D{macro[0]}")
             else:
                 # XXX *don't* need to be clever about quoting the
                 # macro value here, because we're going to avoid the
@@ -1212,7 +1210,7 @@ def gen_preprocess_options(macros, include_dirs):
                 pp_opts.append("-D{}={}".format(*macro))
 
     for dir in include_dirs:
-        pp_opts.append("-I%s" % dir)
+        pp_opts.append(f"-I{dir}")
     return pp_opts
 
 
@@ -1245,7 +1243,7 @@ def gen_lib_options(compiler, library_dirs, runtime_library_dirs, libraries):
                 lib_opts.append(lib_file)
             else:
                 compiler.warn(
-                    "no library file corresponding to '%s' found (skipping)" % lib
+                    f"no library file corresponding to '{lib}' found (skipping)"
                 )
         else:
             lib_opts.append(compiler.library_option(lib))
diff --git a/distutils/cmd.py b/distutils/cmd.py
index 02dbf165f5..0daf6406f3 100644
--- a/distutils/cmd.py
+++ b/distutils/cmd.py
@@ -135,7 +135,7 @@ def initialize_options(self):
         This method must be implemented by all command classes.
         """
         raise RuntimeError(
-            "abstract method -- subclass %s must override" % self.__class__
+            f"abstract method -- subclass {self.__class__} must override"
         )
 
     def finalize_options(self):
@@ -150,14 +150,14 @@ def finalize_options(self):
         This method must be implemented by all command classes.
         """
         raise RuntimeError(
-            "abstract method -- subclass %s must override" % self.__class__
+            f"abstract method -- subclass {self.__class__} must override"
         )
 
     def dump_options(self, header=None, indent=""):
         from distutils.fancy_getopt import longopt_xlate
 
         if header is None:
-            header = "command options for '%s':" % self.get_command_name()
+            header = f"command options for '{self.get_command_name()}':"
         self.announce(indent + header, level=logging.INFO)
         indent = indent + "  "
         for option, _, _ in self.user_options:
@@ -178,7 +178,7 @@ def run(self):
         This method must be implemented by all command classes.
         """
         raise RuntimeError(
-            "abstract method -- subclass %s must override" % self.__class__
+            f"abstract method -- subclass {self.__class__} must override"
         )
 
     def announce(self, msg, level=logging.DEBUG):
@@ -412,7 +412,7 @@ def make_file(
         timestamp checks.
         """
         if skip_msg is None:
-            skip_msg = "skipping %s (inputs unchanged)" % outfile
+            skip_msg = f"skipping {outfile} (inputs unchanged)"
 
         # Allow 'infiles' to be a single string
         if isinstance(infiles, str):
diff --git a/distutils/command/bdist.py b/distutils/command/bdist.py
index ade98445ba..d7f8b2483c 100644
--- a/distutils/command/bdist.py
+++ b/distutils/command/bdist.py
@@ -41,7 +41,7 @@ class bdist(Command):
             'plat-name=',
             'p',
             "platform name to embed in generated filenames "
-            "(default: %s)" % get_platform(),
+            f"(default: {get_platform()})",
         ),
         ('formats=', None, "formats for distribution (comma-separated list)"),
         (
@@ -120,7 +120,7 @@ def finalize_options(self):
             except KeyError:
                 raise DistutilsPlatformError(
                     "don't know how to create built distributions "
-                    "on platform %s" % os.name
+                    f"on platform {os.name}"
                 )
 
         if self.dist_dir is None:
@@ -133,7 +133,7 @@ def run(self):
             try:
                 commands.append(self.format_commands[format][0])
             except KeyError:
-                raise DistutilsOptionError("invalid format '%s'" % format)
+                raise DistutilsOptionError(f"invalid format '{format}'")
 
         # Reinitialize and run each command.
         for i in range(len(self.formats)):
diff --git a/distutils/command/bdist_dumb.py b/distutils/command/bdist_dumb.py
index 06502d201e..0cec1978f6 100644
--- a/distutils/command/bdist_dumb.py
+++ b/distutils/command/bdist_dumb.py
@@ -23,7 +23,7 @@ class bdist_dumb(Command):
             'plat-name=',
             'p',
             "platform name to embed in generated filenames "
-            "(default: %s)" % get_platform(),
+            f"(default: {get_platform()})",
         ),
         (
             'format=',
@@ -81,7 +81,7 @@ def finalize_options(self):
             except KeyError:
                 raise DistutilsPlatformError(
                     "don't know how to create dumb built distributions "
-                    "on platform %s" % os.name
+                    f"on platform {os.name}"
                 )
 
         self.set_undefined_options(
diff --git a/distutils/command/bdist_rpm.py b/distutils/command/bdist_rpm.py
index 649968a5eb..f08981f798 100644
--- a/distutils/command/bdist_rpm.py
+++ b/distutils/command/bdist_rpm.py
@@ -214,7 +214,7 @@ def finalize_options(self):
 
         if os.name != 'posix':
             raise DistutilsPlatformError(
-                "don't know how to create RPM distributions on platform %s" % os.name
+                f"don't know how to create RPM distributions on platform {os.name}"
             )
         if self.binary_only and self.source_only:
             raise DistutilsOptionError(
@@ -295,9 +295,9 @@ def run(self):  # noqa: C901
 
         # Spec file goes into 'dist_dir' if '--spec-only specified',
         # build/rpm. otherwise.
-        spec_path = os.path.join(spec_dir, "%s.spec" % self.distribution.get_name())
+        spec_path = os.path.join(spec_dir, f"{self.distribution.get_name()}.spec")
         self.execute(
-            write_file, (spec_path, self._make_spec_file()), "writing '%s'" % spec_path
+            write_file, (spec_path, self._make_spec_file()), f"writing '{spec_path}'"
         )
 
         if self.spec_only:  # stop if requested
@@ -322,7 +322,7 @@ def run(self):  # noqa: C901
             if os.path.exists(self.icon):
                 self.copy_file(self.icon, source_dir)
             else:
-                raise DistutilsFileError("icon file '%s' does not exist" % self.icon)
+                raise DistutilsFileError(f"icon file '{self.icon}' does not exist")
 
         # build package
         log.info("building RPMs")
@@ -334,9 +334,9 @@ def run(self):  # noqa: C901
             rpm_cmd.append('-bb')
         else:
             rpm_cmd.append('-ba')
-        rpm_cmd.extend(['--define', '__python %s' % self.python])
+        rpm_cmd.extend(['--define', f'__python {self.python}'])
         if self.rpm3_mode:
-            rpm_cmd.extend(['--define', '_topdir %s' % os.path.abspath(self.rpm_base)])
+            rpm_cmd.extend(['--define', f'_topdir {os.path.abspath(self.rpm_base)}'])
         if not self.keep_temp:
             rpm_cmd.append('--clean')
 
@@ -370,7 +370,7 @@ def run(self):  # noqa: C901
 
             status = out.close()
             if status:
-                raise DistutilsExecError("Failed to execute: %s" % repr(q_cmd))
+                raise DistutilsExecError(f"Failed to execute: {repr(q_cmd)}")
 
         finally:
             out.close()
@@ -426,7 +426,7 @@ def _make_spec_file(self):  # noqa: C901
         # normalizing the whitespace to simplify the test for whether the
         # invocation of brp-python-bytecompile passes in __python):
         vendor_hook = '\n'.join([
-            '  %s \\' % line.strip() for line in vendor_hook.splitlines()
+            f'  {line.strip()} \\' for line in vendor_hook.splitlines()
         ])
         problem = "brp-python-bytecompile \\\n"
         fixed = "brp-python-bytecompile %{__python} \\\n"
@@ -468,7 +468,7 @@ def _make_spec_file(self):  # noqa: C901
             if not self.distribution.has_ext_modules():
                 spec_file.append('BuildArch: noarch')
         else:
-            spec_file.append('BuildArch: %s' % self.force_arch)
+            spec_file.append(f'BuildArch: {self.force_arch}')
 
         for field in (
             'Vendor',
@@ -518,7 +518,7 @@ def _make_spec_file(self):  # noqa: C901
         # rpm scripts
         # figure out default build script
         def_setup_call = f"{self.python} {os.path.basename(sys.argv[0])}"
-        def_build = "%s build" % def_setup_call
+        def_build = f"{def_setup_call} build"
         if self.use_rpm_opt_flags:
             def_build = 'env CFLAGS="$RPM_OPT_FLAGS" ' + def_build
 
@@ -529,8 +529,8 @@ def _make_spec_file(self):  # noqa: C901
         # are just text that we drop in as-is.  Hmmm.
 
         install_cmd = (
-            '%s install -O1 --root=$RPM_BUILD_ROOT ' '--record=INSTALLED_FILES'
-        ) % def_setup_call
+            f'{def_setup_call} install -O1 --root=$RPM_BUILD_ROOT ' '--record=INSTALLED_FILES'
+        )
 
         script_options = [
             ('prep', 'prep_script', "%setup -n %{name}-%{unmangled_version}"),
diff --git a/distutils/command/build.py b/distutils/command/build.py
index d18ed503e3..7abc431897 100644
--- a/distutils/command/build.py
+++ b/distutils/command/build.py
@@ -35,7 +35,7 @@ class build(Command):
             'plat-name=',
             'p',
             "platform name to build for, if supported "
-            "(default: %s)" % get_platform(),
+            f"(default: {get_platform()})",
         ),
         ('compiler=', 'c', "specify the compiler type"),
         ('parallel=', 'j', "number of parallel build jobs"),
diff --git a/distutils/command/build_clib.py b/distutils/command/build_clib.py
index 360575d0cb..f5e91042a7 100644
--- a/distutils/command/build_clib.py
+++ b/distutils/command/build_clib.py
@@ -138,8 +138,8 @@ def check_library_list(self, libraries):
 
             if '/' in name or (os.sep != '/' and os.sep in name):
                 raise DistutilsSetupError(
-                    "bad library name '%s': "
-                    "may not contain directory separators" % lib[0]
+                    f"bad library name '{lib[0]}': "
+                    "may not contain directory separators"
                 )
 
             if not isinstance(build_info, dict):
@@ -166,9 +166,9 @@ def get_source_files(self):
             sources = build_info.get('sources')
             if sources is None or not isinstance(sources, (list, tuple)):
                 raise DistutilsSetupError(
-                    "in 'libraries' option (library '%s'), "
+                    f"in 'libraries' option (library '{lib_name}'), "
                     "'sources' must be present and must be "
-                    "a list of source filenames" % lib_name
+                    "a list of source filenames"
                 )
 
             filenames.extend(sources)
@@ -179,9 +179,9 @@ def build_libraries(self, libraries):
             sources = build_info.get('sources')
             if sources is None or not isinstance(sources, (list, tuple)):
                 raise DistutilsSetupError(
-                    "in 'libraries' option (library '%s'), "
+                    f"in 'libraries' option (library '{lib_name}'), "
                     "'sources' must be present and must be "
-                    "a list of source filenames" % lib_name
+                    "a list of source filenames"
                 )
             sources = list(sources)
 
diff --git a/distutils/command/build_ext.py b/distutils/command/build_ext.py
index 06d949aff1..fe06eed04e 100644
--- a/distutils/command/build_ext.py
+++ b/distutils/command/build_ext.py
@@ -57,7 +57,7 @@ class build_ext(Command):
     #     takes care of both command-line and client options
     #     in between initialize_options() and finalize_options())
 
-    sep_by = " (separated by '%s')" % os.pathsep
+    sep_by = f" (separated by '{os.pathsep}')"
     user_options = [
         ('build-lib=', 'b', "directory for compiled extension modules"),
         ('build-temp=', 't', "directory for temporary files (build by-products)"),
@@ -65,7 +65,7 @@ class build_ext(Command):
             'plat-name=',
             'p',
             "platform name to cross-compile for, if supported "
-            "(default: %s)" % get_platform(),
+            f"(default: {get_platform()})",
         ),
         (
             'inplace',
@@ -517,9 +517,9 @@ def build_extension(self, ext):
         sources = ext.sources
         if sources is None or not isinstance(sources, (list, tuple)):
             raise DistutilsSetupError(
-                "in 'ext_modules' option (extension '%s'), "
+                f"in 'ext_modules' option (extension '{ext.name}'), "
                 "'sources' must be present and must be "
-                "a list of source filenames" % ext.name
+                "a list of source filenames"
             )
         # sort to make the resulting .so file build reproducible
         sources = sorted(sources)
@@ -663,7 +663,7 @@ def find_swig(self):
             # Windows (or so I presume!).  If we find it there, great;
             # if not, act like Unix and assume it's in the PATH.
             for vers in ("1.3", "1.2", "1.1"):
-                fn = os.path.join("c:\\swig%s" % vers, "swig.exe")
+                fn = os.path.join(f"c:\\swig{vers}", "swig.exe")
                 if os.path.isfile(fn):
                     return fn
             else:
@@ -671,7 +671,7 @@ def find_swig(self):
         else:
             raise DistutilsPlatformError(
                 "I don't know how to find (much less run) SWIG "
-                "on platform '%s'" % os.name
+                f"on platform '{os.name}'"
             )
 
     # -- Name generators -----------------------------------------------
diff --git a/distutils/command/build_py.py b/distutils/command/build_py.py
index 56e6fa2e66..ede4ff0da4 100644
--- a/distutils/command/build_py.py
+++ b/distutils/command/build_py.py
@@ -191,12 +191,12 @@ def check_package(self, package, package_dir):
         if package_dir != "":
             if not os.path.exists(package_dir):
                 raise DistutilsFileError(
-                    "package directory '%s' does not exist" % package_dir
+                    f"package directory '{package_dir}' does not exist"
                 )
             if not os.path.isdir(package_dir):
                 raise DistutilsFileError(
-                    "supposed package directory '%s' exists, "
-                    "but is not a directory" % package_dir
+                    f"supposed package directory '{package_dir}' exists, "
+                    "but is not a directory"
                 )
 
         # Directories without __init__.py are namespace packages (PEP 420).
@@ -228,7 +228,7 @@ def find_package_modules(self, package, package_dir):
                 module = os.path.splitext(os.path.basename(f))[0]
                 modules.append((package, module, f))
             else:
-                self.debug_print("excluding %s" % setup_script)
+                self.debug_print(f"excluding {setup_script}")
         return modules
 
     def find_modules(self):
diff --git a/distutils/command/build_scripts.py b/distutils/command/build_scripts.py
index 5f3902a027..9e5963c243 100644
--- a/distutils/command/build_scripts.py
+++ b/distutils/command/build_scripts.py
@@ -96,7 +96,7 @@ def _copy_script(self, script, outfiles, updated_files):  # noqa: C901
         else:
             first_line = f.readline()
             if not first_line:
-                self.warn("%s is an empty file (skipping)" % script)
+                self.warn(f"{script} is an empty file (skipping)")
                 return
 
             shebang_match = shebang_pattern.match(first_line)
diff --git a/distutils/command/check.py b/distutils/command/check.py
index 28599e109c..295927a90b 100644
--- a/distutils/command/check.py
+++ b/distutils/command/check.py
@@ -106,7 +106,7 @@ def check_metadata(self):
                 missing.append(attr)
 
         if missing:
-            self.warn("missing required meta-data: %s" % ', '.join(missing))
+            self.warn("missing required meta-data: {}".format(', '.join(missing)))
 
     def check_restructuredtext(self):
         """Checks if the long string fields are reST-compliant."""
@@ -147,7 +147,7 @@ def _check_rst_data(self, data):
         except AttributeError as e:
             reporter.messages.append((
                 -1,
-                'Could not finish the parsing: %s.' % e,
+                f'Could not finish the parsing: {e}.',
                 '',
                 {},
             ))
diff --git a/distutils/command/config.py b/distutils/command/config.py
index d4b2b0a362..a45ea70177 100644
--- a/distutils/command/config.py
+++ b/distutils/command/config.py
@@ -109,7 +109,7 @@ def _gen_temp_sourcefile(self, body, headers, lang):
         with open(filename, "w", encoding='utf-8') as file:
             if headers:
                 for header in headers:
-                    file.write("#include <%s>\n" % header)
+                    file.write(f"#include <{header}>\n")
                 file.write("\n")
             file.write(body)
             if body[-1] != "\n":
@@ -126,7 +126,7 @@ def _preprocess(self, body, headers, include_dirs, lang):
     def _compile(self, body, headers, include_dirs, lang):
         src = self._gen_temp_sourcefile(body, headers, lang)
         if self.dump_source:
-            dump_file(src, "compiling '%s':" % src)
+            dump_file(src, f"compiling '{src}':")
         (obj,) = self.compiler.object_filenames([src])
         self.temp_files.extend([src, obj])
         self.compiler.compile([src], include_dirs=include_dirs)
@@ -311,12 +311,12 @@ def check_func(
         self._check_compiler()
         body = []
         if decl:
-            body.append("int %s ();" % func)
+            body.append(f"int {func} ();")
         body.append("int main () {")
         if call:
-            body.append("  %s();" % func)
+            body.append(f"  {func}();")
         else:
-            body.append("  %s;" % func)
+            body.append(f"  {func};")
         body.append("}")
         body = "\n".join(body) + "\n"
 
diff --git a/distutils/command/install.py b/distutils/command/install.py
index 8e920be4de..960ff64b16 100644
--- a/distutils/command/install.py
+++ b/distutils/command/install.py
@@ -245,7 +245,7 @@ class install(Command):
         user_options.append((
             'user',
             None,
-            "install in user site-package '%s'" % USER_SITE,
+            f"install in user site-package '{USER_SITE}'",
         ))
         boolean_options.append('user')
 
@@ -600,7 +600,7 @@ def finalize_other(self):
                 self.select_scheme(os.name)
             except KeyError:
                 raise DistutilsPlatformError(
-                    "I don't know how to install stuff on '%s'" % os.name
+                    f"I don't know how to install stuff on '{os.name}'"
                 )
 
     def select_scheme(self, name):
@@ -685,7 +685,7 @@ def create_home_path(self):
         home = convert_path(os.path.expanduser("~"))
         for _name, path in self.config_vars.items():
             if str(path).startswith(home) and not os.path.isdir(path):
-                self.debug_print("os.makedirs('%s', 0o700)" % path)
+                self.debug_print(f"os.makedirs('{path}', 0o700)")
                 os.makedirs(path, 0o700)
 
     # -- Command execution methods -------------------------------------
@@ -720,7 +720,7 @@ def run(self):
             self.execute(
                 write_file,
                 (self.record, outputs),
-                "writing list of installed files to '%s'" % self.record,
+                f"writing list of installed files to '{self.record}'",
             )
 
         sys_path = map(os.path.normpath, sys.path)
@@ -745,10 +745,10 @@ def create_path_file(self):
         filename = os.path.join(self.install_libbase, self.path_file + ".pth")
         if self.install_path_file:
             self.execute(
-                write_file, (filename, [self.extra_dirs]), "creating %s" % filename
+                write_file, (filename, [self.extra_dirs]), f"creating {filename}"
             )
         else:
-            self.warn("path file '%s' not created" % filename)
+            self.warn(f"path file '{filename}' not created")
 
     # -- Reporting methods ---------------------------------------------
 
diff --git a/distutils/command/install_lib.py b/distutils/command/install_lib.py
index b1f346f018..76993b1e6b 100644
--- a/distutils/command/install_lib.py
+++ b/distutils/command/install_lib.py
@@ -114,7 +114,7 @@ def install(self):
             outfiles = self.copy_tree(self.build_dir, self.install_dir)
         else:
             self.warn(
-                "'%s' does not exist -- no Python modules to install" % self.build_dir
+                f"'{self.build_dir}' does not exist -- no Python modules to install"
             )
             return
         return outfiles
diff --git a/distutils/command/register.py b/distutils/command/register.py
index ee6c54daba..6b837275fa 100644
--- a/distutils/command/register.py
+++ b/distutils/command/register.py
@@ -88,7 +88,7 @@ def _set_config(self):
             self.has_config = True
         else:
             if self.repository not in ('pypi', self.DEFAULT_REPOSITORY):
-                raise ValueError('%s not found in .pypirc' % self.repository)
+                raise ValueError(f'{self.repository} not found in .pypirc')
             if self.repository == 'pypi':
                 self.repository = self.DEFAULT_REPOSITORY
             self.has_config = False
@@ -192,7 +192,7 @@ def send_metadata(self):  # noqa: C901
                         logging.INFO,
                     )
                     self.announce(
-                        '(the login will be stored in %s)' % self._get_rc_file(),
+                        f'(the login will be stored in {self._get_rc_file()})',
                         logging.INFO,
                     )
                     choice = 'X'
@@ -277,7 +277,7 @@ def post_to_server(self, data, auth=None):  # noqa: C901
         for key, values in data.items():
             for value in map(str, make_iterable(values)):
                 body.write(sep_boundary)
-                body.write('\nContent-Disposition: form-data; name="%s"' % key)
+                body.write(f'\nContent-Disposition: form-data; name="{key}"')
                 body.write("\n\n")
                 body.write(value)
                 if value and value[-1] == '\r':
@@ -288,8 +288,7 @@ def post_to_server(self, data, auth=None):  # noqa: C901
 
         # build the Request
         headers = {
-            'Content-type': 'multipart/form-data; boundary=%s; charset=utf-8'
-            % boundary,
+            'Content-type': f'multipart/form-data; boundary={boundary}; charset=utf-8',
             'Content-length': str(len(body)),
         }
         req = urllib.request.Request(self.repository, body, headers)
diff --git a/distutils/command/sdist.py b/distutils/command/sdist.py
index 387d27c90b..b483b8bd52 100644
--- a/distutils/command/sdist.py
+++ b/distutils/command/sdist.py
@@ -150,7 +150,7 @@ def finalize_options(self):
 
         bad_format = archive_util.check_archive_formats(self.formats)
         if bad_format:
-            raise DistutilsOptionError("unknown archive format '%s'" % bad_format)
+            raise DistutilsOptionError(f"unknown archive format '{bad_format}'")
 
         if self.dist_dir is None:
             self.dist_dir = "dist"
@@ -288,7 +288,7 @@ def _add_defaults_standards(self):
                 if self._cs_path_exists(fn):
                     self.filelist.append(fn)
                 else:
-                    self.warn("standard file '%s' not found" % fn)
+                    self.warn(f"standard file '{fn}' not found")
 
     def _add_defaults_optional(self):
         optional = ['tests/test*.py', 'test/test*.py', 'setup.cfg']
@@ -411,7 +411,7 @@ def write_manifest(self):
         if self._manifest_is_not_generated():
             log.info(
                 "not writing to manually maintained "
-                "manifest file '%s'" % self.manifest
+                f"manifest file '{self.manifest}'"
             )
             return
 
@@ -420,7 +420,7 @@ def write_manifest(self):
         self.execute(
             file_util.write_file,
             (self.manifest, content),
-            "writing manifest file '%s'" % self.manifest,
+            f"writing manifest file '{self.manifest}'",
         )
 
     def _manifest_is_not_generated(self):
@@ -468,10 +468,10 @@ def make_release_tree(self, base_dir, files):
 
         if hasattr(os, 'link'):  # can make hard links on this system
             link = 'hard'
-            msg = "making hard links in %s..." % base_dir
+            msg = f"making hard links in {base_dir}..."
         else:  # nope, have to copy
             link = None
-            msg = "copying files to %s..." % base_dir
+            msg = f"copying files to {base_dir}..."
 
         if not files:
             log.warning("no files to distribute -- empty manifest?")
diff --git a/distutils/command/upload.py b/distutils/command/upload.py
index cf541f8a82..773e222c9c 100644
--- a/distutils/command/upload.py
+++ b/distutils/command/upload.py
@@ -75,7 +75,7 @@ def upload_file(self, command, pyversion, filename):  # noqa: C901
         # Makes sure the repository URL is compliant
         schema, netloc, url, params, query, fragments = urlparse(self.repository)
         if params or query or fragments:
-            raise AssertionError("Incompatible url %s" % self.repository)
+            raise AssertionError(f"Incompatible url {self.repository}")
 
         if schema not in ('http', 'https'):
             raise AssertionError("unsupported schema " + schema)
@@ -153,10 +153,10 @@ def upload_file(self, command, pyversion, filename):  # noqa: C901
         end_boundary = sep_boundary + b'--\r\n'
         body = io.BytesIO()
         for key, values in data.items():
-            title = '\r\nContent-Disposition: form-data; name="%s"' % key
+            title = f'\r\nContent-Disposition: form-data; name="{key}"'
             for value in make_iterable(values):
                 if type(value) is tuple:
-                    title += '; filename="%s"' % value[0]
+                    title += f'; filename="{value[0]}"'
                     value = value[1]
                 else:
                     value = str(value).encode('utf-8')
@@ -172,7 +172,7 @@ def upload_file(self, command, pyversion, filename):  # noqa: C901
 
         # build the Request
         headers = {
-            'Content-type': 'multipart/form-data; boundary=%s' % boundary,
+            'Content-type': f'multipart/form-data; boundary={boundary}',
             'Content-length': str(len(body)),
             'Authorization': auth,
         }
diff --git a/distutils/config.py b/distutils/config.py
index 83f96a9eec..8ab27f34c2 100644
--- a/distutils/config.py
+++ b/distutils/config.py
@@ -30,7 +30,7 @@ class PyPIRCCommand(Command):
     realm = None
 
     user_options = [
-        ('repository=', 'r', "url of repository [default: %s]" % DEFAULT_REPOSITORY),
+        ('repository=', 'r', f"url of repository [default: {DEFAULT_REPOSITORY}]"),
         ('show-response', None, 'display full response text from server'),
     ]
 
@@ -51,7 +51,7 @@ def _read_pypirc(self):  # noqa: C901
         """Reads the .pypirc file."""
         rc = self._get_rc_file()
         if os.path.exists(rc):
-            self.announce('Using PyPI login from %s' % rc)
+            self.announce(f'Using PyPI login from {rc}')
             repository = self.repository or self.DEFAULT_REPOSITORY
 
             config = RawConfigParser()
diff --git a/distutils/core.py b/distutils/core.py
index 309ce696fa..7238b618f6 100644
--- a/distutils/core.py
+++ b/distutils/core.py
@@ -146,7 +146,7 @@ class found in 'cmdclass' is used in place of the default, which is
         _setup_distribution = dist = klass(attrs)
     except DistutilsSetupError as msg:
         if 'name' not in attrs:
-            raise SystemExit("error in setup command: %s" % msg)
+            raise SystemExit(f"error in setup command: {msg}")
         else:
             raise SystemExit("error in {} setup command: {}".format(attrs['name'], msg))
 
@@ -170,7 +170,7 @@ class found in 'cmdclass' is used in place of the default, which is
     try:
         ok = dist.parse_command_line()
     except DistutilsArgError as msg:
-        raise SystemExit(gen_usage(dist.script_name) + "\nerror: %s" % msg)
+        raise SystemExit(gen_usage(dist.script_name) + f"\nerror: {msg}")
 
     if DEBUG:
         print("options (after parsing command line):")
@@ -274,11 +274,10 @@ def run_setup(script_name, script_args=None, stop_after="run"):
 
     if _setup_distribution is None:
         raise RuntimeError(
-            (
+
                 "'distutils.core.setup()' was never called -- "
-                "perhaps '%s' is not a Distutils setup script?"
-            )
-            % script_name
+                f"perhaps '{script_name}' is not a Distutils setup script?"
+
         )
 
     # I wonder if the setup script's namespace -- g and l -- would be of
diff --git a/distutils/cygwinccompiler.py b/distutils/cygwinccompiler.py
index 539f09d8f3..506b88c9c6 100644
--- a/distutils/cygwinccompiler.py
+++ b/distutils/cygwinccompiler.py
@@ -61,7 +61,7 @@ def get_msvcr():
     try:
         return _msvcr_lookup[msc_ver]
     except KeyError:
-        raise ValueError("Unknown MS Compiler version %s " % msc_ver)
+        raise ValueError(f"Unknown MS Compiler version {msc_ver} ")
 
 
 _runtime_library_dirs_msg = (
@@ -91,8 +91,8 @@ def __init__(self, verbose=0, dry_run=0, force=0):
         if status is not CONFIG_H_OK:
             self.warn(
                 "Python's pyconfig.h doesn't seem to support your compiler. "
-                "Reason: %s. "
-                "Compiling may fail because of undefined preprocessor macros." % details
+                f"Reason: {details}. "
+                "Compiling may fail because of undefined preprocessor macros."
             )
 
         self.cc = os.environ.get('CC', 'gcc')
@@ -102,10 +102,10 @@ def __init__(self, verbose=0, dry_run=0, force=0):
         shared_option = "-shared"
 
         self.set_executables(
-            compiler='%s -mcygwin -O -Wall' % self.cc,
-            compiler_so='%s -mcygwin -mdll -O -Wall' % self.cc,
-            compiler_cxx='%s -mcygwin -O -Wall' % self.cxx,
-            linker_exe='%s -mcygwin' % self.cc,
+            compiler=f'{self.cc} -mcygwin -O -Wall',
+            compiler_so=f'{self.cc} -mcygwin -mdll -O -Wall',
+            compiler_cxx=f'{self.cxx} -mcygwin -O -Wall',
+            linker_exe=f'{self.cc} -mcygwin',
             linker_so=(f'{self.linker_dll} -mcygwin {shared_option}'),
         )
 
@@ -195,10 +195,10 @@ def link(
             def_file = os.path.join(temp_dir, dll_name + ".def")
 
             # Generate .def file
-            contents = ["LIBRARY %s" % os.path.basename(output_filename), "EXPORTS"]
+            contents = [f"LIBRARY {os.path.basename(output_filename)}", "EXPORTS"]
             for sym in export_symbols:
                 contents.append(sym)
-            self.execute(write_file, (def_file, contents), "writing %s" % def_file)
+            self.execute(write_file, (def_file, contents), f"writing {def_file}")
 
             # next add options for def-file
 
@@ -274,10 +274,10 @@ def __init__(self, verbose=0, dry_run=0, force=0):
             raise CCompilerError('Cygwin gcc cannot be used with --compiler=mingw32')
 
         self.set_executables(
-            compiler='%s -O -Wall' % self.cc,
-            compiler_so='%s -mdll -O -Wall' % self.cc,
-            compiler_cxx='%s -O -Wall' % self.cxx,
-            linker_exe='%s' % self.cc,
+            compiler=f'{self.cc} -O -Wall',
+            compiler_so=f'{self.cc} -mdll -O -Wall',
+            compiler_cxx=f'{self.cxx} -O -Wall',
+            linker_exe=f'{self.cc}',
             linker_so=f'{self.linker_dll} {shared_option}',
         )
 
diff --git a/distutils/dir_util.py b/distutils/dir_util.py
index 370c6ffd49..1d5573efc4 100644
--- a/distutils/dir_util.py
+++ b/distutils/dir_util.py
@@ -133,7 +133,7 @@ def copy_tree(  # noqa: C901
     from distutils.file_util import copy_file
 
     if not dry_run and not os.path.isdir(src):
-        raise DistutilsFileError("cannot copy tree '%s': not a directory" % src)
+        raise DistutilsFileError(f"cannot copy tree '{src}': not a directory")
     try:
         names = os.listdir(src)
     except OSError as e:
diff --git a/distutils/dist.py b/distutils/dist.py
index 668ce7eb0a..2eb5e1a1e2 100644
--- a/distutils/dist.py
+++ b/distutils/dist.py
@@ -262,7 +262,7 @@ def __init__(self, attrs=None):  # noqa: C901
                 elif hasattr(self, key):
                     setattr(self, key, val)
                 else:
-                    msg = "Unknown distribution option: %s" % repr(key)
+                    msg = f"Unknown distribution option: {repr(key)}"
                     warnings.warn(msg)
 
         # no-user-cfg is handled before other command line args
@@ -311,9 +311,9 @@ def dump_option_dicts(self, header=None, commands=None, indent=""):
         for cmd_name in commands:
             opt_dict = self.command_options.get(cmd_name)
             if opt_dict is None:
-                self.announce(indent + "no option dict for '%s' command" % cmd_name)
+                self.announce(indent + f"no option dict for '{cmd_name}' command")
             else:
-                self.announce(indent + "option dict for '%s' command:" % cmd_name)
+                self.announce(indent + f"option dict for '{cmd_name}' command:")
                 out = pformat(opt_dict)
                 for line in out.split('\n'):
                     self.announce(indent + "  " + line)
@@ -339,7 +339,7 @@ def find_config_files(self):
         files = [str(path) for path in self._gen_paths() if os.path.isfile(path)]
 
         if DEBUG:
-            self.announce("using config files: %s" % ', '.join(files))
+            self.announce("using config files: {}".format(', '.join(files)))
 
         return files
 
@@ -395,7 +395,7 @@ def parse_config_files(self, filenames=None):  # noqa: C901
         parser = ConfigParser()
         for filename in filenames:
             if DEBUG:
-                self.announce("  reading %s" % filename)
+                self.announce(f"  reading {filename}")
             parser.read(filename, encoding='utf-8')
             for section in parser.sections():
                 options = parser.options(section)
@@ -525,7 +525,7 @@ def _parse_command_opts(self, parser, args):  # noqa: C901
         # Pull the current command from the head of the command line
         command = args[0]
         if not command_re.match(command):
-            raise SystemExit("invalid command name '%s'" % command)
+            raise SystemExit(f"invalid command name '{command}'")
         self.commands.append(command)
 
         # Dig up the command class that implements this command, so we
@@ -540,7 +540,7 @@ def _parse_command_opts(self, parser, args):  # noqa: C901
         # to be sure that the basic "command" interface is implemented.
         if not issubclass(cmd_class, Command):
             raise DistutilsClassError(
-                "command class %s must subclass Command" % cmd_class
+                f"command class {cmd_class} must subclass Command"
             )
 
         # Also make sure that the command object provides a list of its
@@ -668,7 +668,7 @@ def _show_help(
                 )
             else:
                 parser.set_option_table(klass.user_options)
-            parser.print_help("Options for '%s' command:" % klass.__name__)
+            parser.print_help(f"Options for '{klass.__name__}' command:")
             print()
 
         print(gen_usage(self.script_name))
@@ -842,7 +842,7 @@ def get_command_class(self, command):
             self.cmdclass[command] = klass
             return klass
 
-        raise DistutilsModuleError("invalid command '%s'" % command)
+        raise DistutilsModuleError(f"invalid command '{command}'")
 
     def get_command_obj(self, command, create=1):
         """Return the command object for 'command'.  Normally this object
@@ -855,7 +855,7 @@ def get_command_obj(self, command, create=1):
             if DEBUG:
                 self.announce(
                     "Distribution.get_command_obj(): "
-                    "creating '%s' command object" % command
+                    f"creating '{command}' command object"
                 )
 
             klass = self.get_command_class(command)
@@ -887,7 +887,7 @@ def _set_command_options(self, command_obj, option_dict=None):  # noqa: C901
             option_dict = self.get_option_dict(command_name)
 
         if DEBUG:
-            self.announce("  setting options for '%s' command:" % command_name)
+            self.announce(f"  setting options for '{command_name}' command:")
         for option, (source, value) in option_dict.items():
             if DEBUG:
                 self.announce(f"    {option} = {value} (from {source})")
@@ -1149,9 +1149,9 @@ def write_pkg_file(self, file):
             version = '1.1'
 
         # required fields
-        file.write('Metadata-Version: %s\n' % version)
-        file.write('Name: %s\n' % self.get_name())
-        file.write('Version: %s\n' % self.get_version())
+        file.write(f'Metadata-Version: {version}\n')
+        file.write(f'Name: {self.get_name()}\n')
+        file.write(f'Version: {self.get_version()}\n')
 
         def maybe_write(header, val):
             if val:
diff --git a/distutils/extension.py b/distutils/extension.py
index 94e71635d9..914d176d78 100644
--- a/distutils/extension.py
+++ b/distutils/extension.py
@@ -130,7 +130,7 @@ def __init__(
         if len(kw) > 0:
             options = [repr(option) for option in kw]
             options = ', '.join(sorted(options))
-            msg = "Unknown Extension options: %s" % options
+            msg = f"Unknown Extension options: {options}"
             warnings.warn(msg)
 
     def __repr__(self):
@@ -167,7 +167,7 @@ def read_setup_file(filename):  # noqa: C901
                 continue
 
             if line[0] == line[-1] == "*":
-                file.warn("'%s' lines not handled yet" % line)
+                file.warn(f"'{line}' lines not handled yet")
                 continue
 
             line = expand_makefile_vars(line, vars)
@@ -233,7 +233,7 @@ def read_setup_file(filename):  # noqa: C901
                     # and append it to sources.  Hmmmm.
                     ext.extra_objects.append(word)
                 else:
-                    file.warn("unrecognized argument '%s'" % word)
+                    file.warn(f"unrecognized argument '{word}'")
 
             extensions.append(ext)
     finally:
diff --git a/distutils/fancy_getopt.py b/distutils/fancy_getopt.py
index e905aede4d..691430295f 100644
--- a/distutils/fancy_getopt.py
+++ b/distutils/fancy_getopt.py
@@ -21,7 +21,7 @@
 # utilities, we use '-' in place of '_'.  (The spirit of LISP lives on!)
 # The similarities to NAME are again not a coincidence...
 longopt_pat = r'[a-zA-Z](?:[a-zA-Z0-9-]*)'
-longopt_re = re.compile(r'^%s$' % longopt_pat)
+longopt_re = re.compile(rf'^{longopt_pat}$')
 
 # For recognizing "negative alias" options, eg. "quiet=!verbose"
 neg_alias_re = re.compile(f"^({longopt_pat})=!({longopt_pat})$")
@@ -95,7 +95,7 @@ def set_option_table(self, option_table):
     def add_option(self, long_option, short_option=None, help_string=None):
         if long_option in self.option_index:
             raise DistutilsGetoptError(
-                "option conflict: already an option '%s'" % long_option
+                f"option conflict: already an option '{long_option}'"
             )
         else:
             option = (long_option, short_option, help_string)
@@ -162,13 +162,13 @@ def _grok_option_table(self):  # noqa: C901
             # Type- and value-check the option names
             if not isinstance(long, str) or len(long) < 2:
                 raise DistutilsGetoptError(
-                    ("invalid long option '%s': must be a string of length >= 2") % long
+                    f"invalid long option '{long}': must be a string of length >= 2"
                 )
 
             if not ((short is None) or (isinstance(short, str) and len(short) == 1)):
                 raise DistutilsGetoptError(
-                    "invalid short option '%s': "
-                    "must a single character or None" % short
+                    f"invalid short option '{short}': "
+                    "must a single character or None"
                 )
 
             self.repeat[long] = repeat
@@ -210,8 +210,8 @@ def _grok_option_table(self):  # noqa: C901
             # '='.
             if not longopt_re.match(long):
                 raise DistutilsGetoptError(
-                    "invalid long option name '%s' "
-                    "(must be letters, numbers, hyphens only" % long
+                    f"invalid long option name '{long}' "
+                    "(must be letters, numbers, hyphens only"
                 )
 
             self.attr_name[long] = self.get_attr_name(long)
diff --git a/distutils/file_util.py b/distutils/file_util.py
index 960def9cf9..afa7b0f697 100644
--- a/distutils/file_util.py
+++ b/distutils/file_util.py
@@ -106,7 +106,7 @@ def copy_file(  # noqa: C901
 
     if not os.path.isfile(src):
         raise DistutilsFileError(
-            "can't copy '%s': doesn't exist or not a regular file" % src
+            f"can't copy '{src}': doesn't exist or not a regular file"
         )
 
     if os.path.isdir(dst):
@@ -123,7 +123,7 @@ def copy_file(  # noqa: C901
     try:
         action = _copy_action[link]
     except KeyError:
-        raise ValueError("invalid value '%s' for 'link' argument" % link)
+        raise ValueError(f"invalid value '{link}' for 'link' argument")
 
     if verbose >= 1:
         if os.path.basename(dst) == os.path.basename(src):
@@ -186,7 +186,7 @@ def move_file(src, dst, verbose=1, dry_run=0):  # noqa: C901
         return dst
 
     if not isfile(src):
-        raise DistutilsFileError("can't move '%s': not a regular file" % src)
+        raise DistutilsFileError(f"can't move '{src}': not a regular file")
 
     if isdir(dst):
         dst = os.path.join(dst, basename(src))
diff --git a/distutils/filelist.py b/distutils/filelist.py
index 71ffb2abe7..af1958334d 100644
--- a/distutils/filelist.py
+++ b/distutils/filelist.py
@@ -84,24 +84,24 @@ def _parse_template_line(self, line):
         if action in ('include', 'exclude', 'global-include', 'global-exclude'):
             if len(words) < 2:
                 raise DistutilsTemplateError(
-                    "'%s' expects   ..." % action
+                    f"'{action}' expects   ..."
                 )
             patterns = [convert_path(w) for w in words[1:]]
         elif action in ('recursive-include', 'recursive-exclude'):
             if len(words) < 3:
                 raise DistutilsTemplateError(
-                    "'%s' expects    ..." % action
+                    f"'{action}' expects    ..."
                 )
             dir = convert_path(words[1])
             patterns = [convert_path(w) for w in words[2:]]
         elif action in ('graft', 'prune'):
             if len(words) != 2:
                 raise DistutilsTemplateError(
-                    "'%s' expects a single " % action
+                    f"'{action}' expects a single "
                 )
             dir_pattern = convert_path(words[1])
         else:
-            raise DistutilsTemplateError("unknown action '%s'" % action)
+            raise DistutilsTemplateError(f"unknown action '{action}'")
 
         return (action, patterns, dir, dir_pattern)
 
@@ -192,7 +192,7 @@ def process_template_line(self, line):  # noqa: C901
                 )
         else:
             raise DistutilsInternalError(
-                "this cannot happen: invalid action '%s'" % action
+                f"this cannot happen: invalid action '{action}'"
             )
 
     # Filtering/selection methods
@@ -225,7 +225,7 @@ def include_pattern(self, pattern, anchor=1, prefix=None, is_regex=0):
         # XXX docstring lying about what the special chars are?
         files_found = False
         pattern_re = translate_pattern(pattern, anchor, prefix, is_regex)
-        self.debug_print("include_pattern: applying regex r'%s'" % pattern_re.pattern)
+        self.debug_print(f"include_pattern: applying regex r'{pattern_re.pattern}'")
 
         # delayed loading of allfiles list
         if self.allfiles is None:
@@ -247,7 +247,7 @@ def exclude_pattern(self, pattern, anchor=1, prefix=None, is_regex=0):
         """
         files_found = False
         pattern_re = translate_pattern(pattern, anchor, prefix, is_regex)
-        self.debug_print("exclude_pattern: applying regex r'%s'" % pattern_re.pattern)
+        self.debug_print(f"exclude_pattern: applying regex r'{pattern_re.pattern}'")
         for i in range(len(self.files) - 1, -1, -1):
             if pattern_re.search(self.files[i]):
                 self.debug_print(" removing " + self.files[i])
@@ -327,7 +327,7 @@ def glob_to_re(pattern):
         # we're using a regex to manipulate a regex, so we need
         # to escape the backslash twice
         sep = r'\\\\'
-    escaped = r'\1[^%s]' % sep
+    escaped = rf'\1[^{sep}]'
     pattern_re = re.sub(r'((?
Date: Sat, 18 May 2024 19:37:57 +0200
Subject: [PATCH 0787/1761] Round of `ruff format` after `ruff check`

---
 distutils/archive_util.py      | 8 +++-----
 distutils/command/bdist_rpm.py | 3 ++-
 distutils/command/build.py     | 3 +--
 distutils/command/sdist.py     | 3 +--
 distutils/core.py              | 6 ++----
 distutils/sysconfig.py         | 3 +--
 6 files changed, 10 insertions(+), 16 deletions(-)

diff --git a/distutils/archive_util.py b/distutils/archive_util.py
index 27b497f36c..9361bf9543 100644
--- a/distutils/archive_util.py
+++ b/distutils/archive_util.py
@@ -160,11 +160,9 @@ def make_zipfile(base_name, base_dir, verbose=0, dry_run=0):  # noqa: C901
             # XXX really should distinguish between "couldn't find
             # external 'zip' command" and "zip failed".
             raise DistutilsExecError(
-
-                    f"unable to create zip file '{zip_filename}': "
-                    "could neither import the 'zipfile' module nor "
-                    "find a standalone zip utility"
-
+                f"unable to create zip file '{zip_filename}': "
+                "could neither import the 'zipfile' module nor "
+                "find a standalone zip utility"
             )
 
     else:
diff --git a/distutils/command/bdist_rpm.py b/distutils/command/bdist_rpm.py
index f08981f798..d6c461161b 100644
--- a/distutils/command/bdist_rpm.py
+++ b/distutils/command/bdist_rpm.py
@@ -529,7 +529,8 @@ def _make_spec_file(self):  # noqa: C901
         # are just text that we drop in as-is.  Hmmm.
 
         install_cmd = (
-            f'{def_setup_call} install -O1 --root=$RPM_BUILD_ROOT ' '--record=INSTALLED_FILES'
+            f'{def_setup_call} install -O1 --root=$RPM_BUILD_ROOT '
+            '--record=INSTALLED_FILES'
         )
 
         script_options = [
diff --git a/distutils/command/build.py b/distutils/command/build.py
index 7abc431897..74695941e8 100644
--- a/distutils/command/build.py
+++ b/distutils/command/build.py
@@ -34,8 +34,7 @@ class build(Command):
         (
             'plat-name=',
             'p',
-            "platform name to build for, if supported "
-            f"(default: {get_platform()})",
+            f"platform name to build for, if supported (default: {get_platform()})",
         ),
         ('compiler=', 'c', "specify the compiler type"),
         ('parallel=', 'j', "number of parallel build jobs"),
diff --git a/distutils/command/sdist.py b/distutils/command/sdist.py
index b483b8bd52..878e575df1 100644
--- a/distutils/command/sdist.py
+++ b/distutils/command/sdist.py
@@ -410,8 +410,7 @@ def write_manifest(self):
         """
         if self._manifest_is_not_generated():
             log.info(
-                "not writing to manually maintained "
-                f"manifest file '{self.manifest}'"
+                f"not writing to manually maintained manifest file '{self.manifest}'"
             )
             return
 
diff --git a/distutils/core.py b/distutils/core.py
index 7238b618f6..82113c47c1 100644
--- a/distutils/core.py
+++ b/distutils/core.py
@@ -274,10 +274,8 @@ def run_setup(script_name, script_args=None, stop_after="run"):
 
     if _setup_distribution is None:
         raise RuntimeError(
-
-                "'distutils.core.setup()' was never called -- "
-                f"perhaps '{script_name}' is not a Distutils setup script?"
-
+            "'distutils.core.setup()' was never called -- "
+            f"perhaps '{script_name}' is not a Distutils setup script?"
         )
 
     # I wonder if the setup script's namespace -- g and l -- would be of
diff --git a/distutils/sysconfig.py b/distutils/sysconfig.py
index 1325178699..9d85cdfd99 100644
--- a/distutils/sysconfig.py
+++ b/distutils/sysconfig.py
@@ -262,8 +262,7 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
             return os.path.join(prefix, "Lib", "site-packages")
     else:
         raise DistutilsPlatformError(
-            "I don't know where Python installs its library "
-            f"on platform '{os.name}'"
+            f"I don't know where Python installs its library on platform '{os.name}'"
         )
 
 

From 8765761488ab297c4df187cafe4b6a111c38f9f9 Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Sat, 18 May 2024 19:21:07 +0200
Subject: [PATCH 0788/1761] Enable ruff/pyupgrade rules (UP)

---
 ruff.toml | 1 +
 1 file changed, 1 insertion(+)

diff --git a/ruff.toml b/ruff.toml
index 70612985a7..b65c055950 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -1,6 +1,7 @@
 [lint]
 extend-select = [
 	"C901",
+	"UP",
 	"W",
 ]
 ignore = [

From b26d5e480ce0b88f190cbce2a3fa5cb74a29556c Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Fri, 17 May 2024 17:39:19 -0400
Subject: [PATCH 0789/1761] Use `set` instead of `True`-only `dict`

---
 distutils/dir_util.py | 10 +++++-----
 distutils/dist.py     | 18 +++++++++---------
 2 files changed, 14 insertions(+), 14 deletions(-)

diff --git a/distutils/dir_util.py b/distutils/dir_util.py
index 370c6ffd49..175f5c26d1 100644
--- a/distutils/dir_util.py
+++ b/distutils/dir_util.py
@@ -10,7 +10,7 @@
 
 # cache for by mkpath() -- in addition to cheapening redundant calls,
 # eliminates redundant "creating /foo/bar/baz" messages in dry-run mode
-_path_created = {}
+_path_created = set()
 
 
 def mkpath(name, mode=0o777, verbose=1, dry_run=0):  # noqa: C901
@@ -45,7 +45,7 @@ def mkpath(name, mode=0o777, verbose=1, dry_run=0):  # noqa: C901
     created_dirs = []
     if os.path.isdir(name) or name == '':
         return created_dirs
-    if _path_created.get(os.path.abspath(name)):
+    if os.path.abspath(name) in _path_created:
         return created_dirs
 
     (head, tail) = os.path.split(name)
@@ -63,7 +63,7 @@ def mkpath(name, mode=0o777, verbose=1, dry_run=0):  # noqa: C901
         head = os.path.join(head, d)
         abs_head = os.path.abspath(head)
 
-        if _path_created.get(abs_head):
+        if abs_head in _path_created:
             continue
 
         if verbose >= 1:
@@ -79,7 +79,7 @@ def mkpath(name, mode=0o777, verbose=1, dry_run=0):  # noqa: C901
                     )
             created_dirs.append(head)
 
-        _path_created[abs_head] = 1
+        _path_created.add(abs_head)
     return created_dirs
 
 
@@ -222,7 +222,7 @@ def remove_tree(directory, verbose=1, dry_run=0):
             # remove dir from cache if it's already there
             abspath = os.path.abspath(cmd[1])
             if abspath in _path_created:
-                _path_created.pop(abspath)
+                _path_created.remove(abspath)
         except OSError as exc:
             log.warning("error removing %s: %s", directory, exc)
 
diff --git a/distutils/dist.py b/distutils/dist.py
index 668ce7eb0a..b62db8cbb1 100644
--- a/distutils/dist.py
+++ b/distutils/dist.py
@@ -694,12 +694,12 @@ def handle_display_options(self, option_order):
         # display that metadata in the order in which the user supplied the
         # metadata options.
         any_display_options = 0
-        is_display_option = {}
+        is_display_option = set()
         for option in self.display_options:
-            is_display_option[option[0]] = 1
+            is_display_option.add(option[0])
 
         for opt, val in option_order:
-            if val and is_display_option.get(opt):
+            if val and opt in is_display_option:
                 opt = translate_longopt(opt)
                 value = getattr(self.metadata, "get_" + opt)()
                 if opt in ('keywords', 'platforms'):
@@ -740,13 +740,13 @@ def print_commands(self):
         import distutils.command
 
         std_commands = distutils.command.__all__
-        is_std = {}
+        is_std = set()
         for cmd in std_commands:
-            is_std[cmd] = 1
+            is_std.add(cmd)
 
         extra_commands = []
         for cmd in self.cmdclass.keys():
-            if not is_std.get(cmd):
+            if cmd not in is_std:
                 extra_commands.append(cmd)
 
         max_length = 0
@@ -771,13 +771,13 @@ def get_command_list(self):
         import distutils.command
 
         std_commands = distutils.command.__all__
-        is_std = {}
+        is_std = set()
         for cmd in std_commands:
-            is_std[cmd] = 1
+            is_std.add(cmd)
 
         extra_commands = []
         for cmd in self.cmdclass.keys():
-            if not is_std.get(cmd):
+            if cmd not in is_std:
                 extra_commands.append(cmd)
 
         rv = []

From 33b5afa4d8cab3f8d6b365af5bc684c8d5407350 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Fri, 17 May 2024 18:06:18 -0400
Subject: [PATCH 0790/1761] Use actual boolean parameters and variables

---
 distutils/_msvccompiler.py              | 10 +++---
 distutils/archive_util.py               | 14 +++++---
 distutils/bcppcompiler.py               | 12 +++----
 distutils/ccompiler.py                  | 28 +++++++--------
 distutils/cmd.py                        | 26 ++++++++------
 distutils/command/bdist.py              |  4 +--
 distutils/command/bdist_dumb.py         |  8 ++---
 distutils/command/bdist_rpm.py          | 12 +++----
 distutils/command/build.py              |  2 +-
 distutils/command/build_clib.py         |  2 +-
 distutils/command/build_ext.py          |  4 +--
 distutils/command/build_py.py           | 10 +++---
 distutils/command/check.py              |  6 ++--
 distutils/command/config.py             |  6 ++--
 distutils/command/install.py            | 10 +++---
 distutils/command/install_data.py       |  4 +--
 distutils/command/install_headers.py    |  2 +-
 distutils/command/install_lib.py        |  2 +-
 distutils/command/install_scripts.py    |  2 +-
 distutils/command/register.py           |  6 ++--
 distutils/command/sdist.py              | 24 ++++++-------
 distutils/command/upload.py             |  2 +-
 distutils/config.py                     |  2 +-
 distutils/cygwinccompiler.py            |  6 ++--
 distutils/dir_util.py                   | 20 +++++------
 distutils/dist.py                       | 28 +++++++--------
 distutils/extension.py                  | 10 +++---
 distutils/fancy_getopt.py               |  6 ++--
 distutils/file_util.py                  | 12 +++----
 distutils/filelist.py                   | 14 ++++----
 distutils/msvc9compiler.py              | 12 +++----
 distutils/msvccompiler.py               | 12 +++----
 distutils/sysconfig.py                  | 10 ++++--
 distutils/tests/test_bdist.py           |  2 +-
 distutils/tests/test_bdist_rpm.py       |  4 +--
 distutils/tests/test_build_ext.py       | 22 ++++++------
 distutils/tests/test_build_py.py        | 12 ++++---
 distutils/tests/test_build_scripts.py   |  2 +-
 distutils/tests/test_check.py           |  8 ++---
 distutils/tests/test_cmd.py             |  2 +-
 distutils/tests/test_dir_util.py        | 36 +++++++++----------
 distutils/tests/test_dist.py            |  2 +-
 distutils/tests/test_file_util.py       | 14 ++++----
 distutils/tests/test_install.py         |  2 +-
 distutils/tests/test_install_data.py    |  2 +-
 distutils/tests/test_install_lib.py     |  2 +-
 distutils/tests/test_install_scripts.py |  8 ++---
 distutils/tests/test_register.py        | 14 ++++----
 distutils/tests/test_text_file.py       | 34 +++++++++++++-----
 distutils/tests/test_upload.py          |  4 +--
 distutils/unixccompiler.py              |  6 ++--
 distutils/util.py                       | 12 +++----
 distutils/zosccompiler.py               |  4 +--
 docs/distutils/apiref.rst               | 46 ++++++++++++-------------
 docs/distutils/configfile.rst           |  2 +-
 docs/distutils/setupscript.rst          |  2 +-
 56 files changed, 302 insertions(+), 268 deletions(-)

diff --git a/distutils/_msvccompiler.py b/distutils/_msvccompiler.py
index a2159fef83..b0322410c5 100644
--- a/distutils/_msvccompiler.py
+++ b/distutils/_msvccompiler.py
@@ -218,7 +218,7 @@ class MSVCCompiler(CCompiler):
     static_lib_format = shared_lib_format = '%s%s'
     exe_extension = '.exe'
 
-    def __init__(self, verbose=0, dry_run=0, force=0):
+    def __init__(self, verbose=False, dry_run=False, force=False):
         super().__init__(verbose, dry_run, force)
         # target platform (.plat_name is consistent with 'bdist')
         self.plat_name = None
@@ -334,7 +334,7 @@ def compile(  # noqa: C901
         output_dir=None,
         macros=None,
         include_dirs=None,
-        debug=0,
+        debug=False,
         extra_preargs=None,
         extra_postargs=None,
         depends=None,
@@ -423,7 +423,7 @@ def compile(  # noqa: C901
         return objects
 
     def create_static_lib(
-        self, objects, output_libname, output_dir=None, debug=0, target_lang=None
+        self, objects, output_libname, output_dir=None, debug=False, target_lang=None
     ):
         if not self.initialized:
             self.initialize()
@@ -452,7 +452,7 @@ def link(
         library_dirs=None,
         runtime_library_dirs=None,
         export_symbols=None,
-        debug=0,
+        debug=False,
         extra_preargs=None,
         extra_postargs=None,
         build_temp=None,
@@ -551,7 +551,7 @@ def runtime_library_dir_option(self, dir):
     def library_option(self, lib):
         return self.library_filename(lib)
 
-    def find_library_file(self, dirs, lib, debug=0):
+    def find_library_file(self, dirs, lib, debug=False):
         # Prefer a debugging library if found (and requested), but deal
         # with it if we don't have one.
         if debug:
diff --git a/distutils/archive_util.py b/distutils/archive_util.py
index 052f6e4646..7b2d177107 100644
--- a/distutils/archive_util.py
+++ b/distutils/archive_util.py
@@ -56,7 +56,13 @@ def _get_uid(name):
 
 
 def make_tarball(
-    base_name, base_dir, compress="gzip", verbose=0, dry_run=0, owner=None, group=None
+    base_name,
+    base_dir,
+    compress="gzip",
+    verbose=False,
+    dry_run=False,
+    owner=None,
+    group=None,
 ):
     """Create a (possibly compressed) tar file from all the files under
     'base_dir'.
@@ -134,7 +140,7 @@ def _set_uid_gid(tarinfo):
     return archive_name
 
 
-def make_zipfile(base_name, base_dir, verbose=0, dry_run=0):  # noqa: C901
+def make_zipfile(base_name, base_dir, verbose=False, dry_run=False):  # noqa: C901
     """Create a zip file from all the files under 'base_dir'.
 
     The output zip file will be named 'base_name' + ".zip".  Uses either the
@@ -224,8 +230,8 @@ def make_archive(
     format,
     root_dir=None,
     base_dir=None,
-    verbose=0,
-    dry_run=0,
+    verbose=False,
+    dry_run=False,
     owner=None,
     group=None,
 ):
diff --git a/distutils/bcppcompiler.py b/distutils/bcppcompiler.py
index c1341e43cb..4f47058cb3 100644
--- a/distutils/bcppcompiler.py
+++ b/distutils/bcppcompiler.py
@@ -61,7 +61,7 @@ class BCPPCompiler(CCompiler):
     static_lib_format = shared_lib_format = '%s%s'
     exe_extension = '.exe'
 
-    def __init__(self, verbose=0, dry_run=0, force=0):
+    def __init__(self, verbose=False, dry_run=False, force=False):
         super().__init__(verbose, dry_run, force)
 
         # These executables are assumed to all be in the path.
@@ -90,7 +90,7 @@ def compile(  # noqa: C901
         output_dir=None,
         macros=None,
         include_dirs=None,
-        debug=0,
+        debug=False,
         extra_preargs=None,
         extra_postargs=None,
         depends=None,
@@ -161,7 +161,7 @@ def compile(  # noqa: C901
     # compile ()
 
     def create_static_lib(
-        self, objects, output_libname, output_dir=None, debug=0, target_lang=None
+        self, objects, output_libname, output_dir=None, debug=False, target_lang=None
     ):
         (objects, output_dir) = self._fix_object_args(objects, output_dir)
         output_filename = self.library_filename(output_libname, output_dir=output_dir)
@@ -189,7 +189,7 @@ def link(  # noqa: C901
         library_dirs=None,
         runtime_library_dirs=None,
         export_symbols=None,
-        debug=0,
+        debug=False,
         extra_preargs=None,
         extra_postargs=None,
         build_temp=None,
@@ -313,7 +313,7 @@ def link(  # noqa: C901
 
     # -- Miscellaneous methods -----------------------------------------
 
-    def find_library_file(self, dirs, lib, debug=0):
+    def find_library_file(self, dirs, lib, debug=False):
         # List of effective library names to try, in order of preference:
         # xxx_bcpp.lib is better than xxx.lib
         # and xxx_d.lib is better than xxx.lib if debug is set
@@ -339,7 +339,7 @@ def find_library_file(self, dirs, lib, debug=0):
             return None
 
     # overwrite the one from CCompiler to support rc and res-files
-    def object_filenames(self, source_filenames, strip_dir=0, output_dir=''):
+    def object_filenames(self, source_filenames, strip_dir=False, output_dir=''):
         if output_dir is None:
             output_dir = ''
         obj_names = []
diff --git a/distutils/ccompiler.py b/distutils/ccompiler.py
index 8876d73098..c4dced3868 100644
--- a/distutils/ccompiler.py
+++ b/distutils/ccompiler.py
@@ -104,7 +104,7 @@ class CCompiler:
     library dirs specific to this compiler class
     """
 
-    def __init__(self, verbose=0, dry_run=0, force=0):
+    def __init__(self, verbose=False, dry_run=False, force=False):
         self.dry_run = dry_run
         self.force = force
         self.verbose = verbose
@@ -342,7 +342,7 @@ def _setup_compile(self, outdir, macros, incdirs, sources, depends, extra):
             extra = []
 
         # Get the list of expected output (object) files
-        objects = self.object_filenames(sources, strip_dir=0, output_dir=outdir)
+        objects = self.object_filenames(sources, strip_dir=False, output_dir=outdir)
         assert len(objects) == len(sources)
 
         pp_opts = gen_preprocess_options(macros, incdirs)
@@ -532,7 +532,7 @@ def compile(
         output_dir=None,
         macros=None,
         include_dirs=None,
-        debug=0,
+        debug=False,
         extra_preargs=None,
         extra_postargs=None,
         depends=None,
@@ -609,7 +609,7 @@ def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
         pass
 
     def create_static_lib(
-        self, objects, output_libname, output_dir=None, debug=0, target_lang=None
+        self, objects, output_libname, output_dir=None, debug=False, target_lang=None
     ):
         """Link a bunch of stuff together to create a static library file.
         The "bunch of stuff" consists of the list of object files supplied
@@ -650,7 +650,7 @@ def link(
         library_dirs=None,
         runtime_library_dirs=None,
         export_symbols=None,
-        debug=0,
+        debug=False,
         extra_preargs=None,
         extra_postargs=None,
         build_temp=None,
@@ -712,7 +712,7 @@ def link_shared_lib(
         library_dirs=None,
         runtime_library_dirs=None,
         export_symbols=None,
-        debug=0,
+        debug=False,
         extra_preargs=None,
         extra_postargs=None,
         build_temp=None,
@@ -743,7 +743,7 @@ def link_shared_object(
         library_dirs=None,
         runtime_library_dirs=None,
         export_symbols=None,
-        debug=0,
+        debug=False,
         extra_preargs=None,
         extra_postargs=None,
         build_temp=None,
@@ -773,7 +773,7 @@ def link_executable(
         libraries=None,
         library_dirs=None,
         runtime_library_dirs=None,
-        debug=0,
+        debug=False,
         extra_preargs=None,
         extra_postargs=None,
         target_lang=None,
@@ -909,7 +909,7 @@ def has_function(  # noqa: C901
                 os.remove(fn)
         return True
 
-    def find_library_file(self, dirs, lib, debug=0):
+    def find_library_file(self, dirs, lib, debug=False):
         """Search the specified list of directories for a static or shared
         library file 'lib' and return the full path to that file.  If
         'debug' true, look for a debugging version (if that makes sense on
@@ -952,7 +952,7 @@ def find_library_file(self, dirs, lib, debug=0):
     #   * exe_extension -
     #     extension for executable files, eg. '' or '.exe'
 
-    def object_filenames(self, source_filenames, strip_dir=0, output_dir=''):
+    def object_filenames(self, source_filenames, strip_dir=False, output_dir=''):
         if output_dir is None:
             output_dir = ''
         return list(
@@ -987,13 +987,13 @@ def _make_relative(base):
         # If abs, chop off leading /
         return no_drive[os.path.isabs(no_drive) :]
 
-    def shared_object_filename(self, basename, strip_dir=0, output_dir=''):
+    def shared_object_filename(self, basename, strip_dir=False, output_dir=''):
         assert output_dir is not None
         if strip_dir:
             basename = os.path.basename(basename)
         return os.path.join(output_dir, basename + self.shared_lib_extension)
 
-    def executable_filename(self, basename, strip_dir=0, output_dir=''):
+    def executable_filename(self, basename, strip_dir=False, output_dir=''):
         assert output_dir is not None
         if strip_dir:
             basename = os.path.basename(basename)
@@ -1003,7 +1003,7 @@ def library_filename(
         self,
         libname,
         lib_type='static',
-        strip_dir=0,
+        strip_dir=False,
         output_dir='',  # or 'shared'
     ):
         assert output_dir is not None
@@ -1125,7 +1125,7 @@ def show_compilers():
     pretty_printer.print_help("List of available compilers:")
 
 
-def new_compiler(plat=None, compiler=None, verbose=0, dry_run=0, force=0):
+def new_compiler(plat=None, compiler=None, verbose=False, dry_run=False, force=False):
     """Generate an instance of some CCompiler subclass for the supplied
     platform/compiler combination.  'plat' defaults to 'os.name'
     (eg. 'posix', 'nt'), and 'compiler' defaults to the default compiler
diff --git a/distutils/cmd.py b/distutils/cmd.py
index 02dbf165f5..fc35bd977d 100644
--- a/distutils/cmd.py
+++ b/distutils/cmd.py
@@ -87,13 +87,13 @@ def __init__(self, dist):
 
         # The 'help' flag is just used for command-line parsing, so
         # none of that complicated bureaucracy is needed.
-        self.help = 0
+        self.help = False
 
         # 'finalized' records whether or not 'finalize_options()' has been
         # called.  'finalize_options()' itself should not pay attention to
         # this flag: it is the business of 'ensure_finalized()', which
         # always calls 'finalize_options()', to respect/update it.
-        self.finalized = 0
+        self.finalized = False
 
     # XXX A more explicit way to customize dry_run would be better.
     def __getattr__(self, attr):
@@ -109,7 +109,7 @@ def __getattr__(self, attr):
     def ensure_finalized(self):
         if not self.finalized:
             self.finalize_options()
-        self.finalized = 1
+        self.finalized = True
 
     # Subclasses must define:
     #   initialize_options()
@@ -293,7 +293,7 @@ def set_undefined_options(self, src_cmd, *option_pairs):
             if getattr(self, dst_option) is None:
                 setattr(self, dst_option, getattr(src_cmd_obj, src_option))
 
-    def get_finalized_command(self, command, create=1):
+    def get_finalized_command(self, command, create=True):
         """Wrapper around Distribution's 'get_command_obj()' method: find
         (create if necessary and 'create' is true) the command object for
         'command', call its 'ensure_finalized()' method, and return the
@@ -305,7 +305,7 @@ def get_finalized_command(self, command, create=1):
 
     # XXX rename to 'get_reinitialized_command()'? (should do the
     # same in dist.py, if so)
-    def reinitialize_command(self, command, reinit_subcommands=0):
+    def reinitialize_command(self, command, reinit_subcommands=False):
         return self.distribution.reinitialize_command(command, reinit_subcommands)
 
     def run_command(self, command):
@@ -340,7 +340,13 @@ def mkpath(self, name, mode=0o777):
         dir_util.mkpath(name, mode, dry_run=self.dry_run)
 
     def copy_file(
-        self, infile, outfile, preserve_mode=1, preserve_times=1, link=None, level=1
+        self,
+        infile,
+        outfile,
+        preserve_mode=True,
+        preserve_times=True,
+        link=None,
+        level=1,
     ):
         """Copy a file respecting verbose, dry-run and force flags.  (The
         former two default to whatever is in the Distribution object, and
@@ -359,9 +365,9 @@ def copy_tree(
         self,
         infile,
         outfile,
-        preserve_mode=1,
-        preserve_times=1,
-        preserve_symlinks=0,
+        preserve_mode=True,
+        preserve_times=True,
+        preserve_symlinks=False,
         level=1,
     ):
         """Copy an entire directory tree respecting verbose, dry-run,
@@ -381,7 +387,7 @@ def move_file(self, src, dst, level=1):
         """Move a file respecting dry-run flag."""
         return file_util.move_file(src, dst, dry_run=self.dry_run)
 
-    def spawn(self, cmd, search_path=1, level=1):
+    def spawn(self, cmd, search_path=True, level=1):
         """Spawn an external command respecting dry-run flag."""
         from distutils.spawn import spawn
 
diff --git a/distutils/command/bdist.py b/distutils/command/bdist.py
index ade98445ba..21bc7c5d80 100644
--- a/distutils/command/bdist.py
+++ b/distutils/command/bdist.py
@@ -94,7 +94,7 @@ def initialize_options(self):
         self.plat_name = None
         self.formats = None
         self.dist_dir = None
-        self.skip_build = 0
+        self.skip_build = False
         self.group = None
         self.owner = None
 
@@ -150,5 +150,5 @@ def run(self):
             # If we're going to need to run this command again, tell it to
             # keep its temporary files around so subsequent runs go faster.
             if cmd_name in commands[i + 1 :]:
-                sub_cmd.keep_temp = 1
+                sub_cmd.keep_temp = True
             self.run_command(cmd_name)
diff --git a/distutils/command/bdist_dumb.py b/distutils/command/bdist_dumb.py
index 06502d201e..0f15b9fa67 100644
--- a/distutils/command/bdist_dumb.py
+++ b/distutils/command/bdist_dumb.py
@@ -63,10 +63,10 @@ def initialize_options(self):
         self.bdist_dir = None
         self.plat_name = None
         self.format = None
-        self.keep_temp = 0
+        self.keep_temp = False
         self.dist_dir = None
         self.skip_build = None
-        self.relative = 0
+        self.relative = False
         self.owner = None
         self.group = None
 
@@ -95,10 +95,10 @@ def run(self):
         if not self.skip_build:
             self.run_command('build')
 
-        install = self.reinitialize_command('install', reinit_subcommands=1)
+        install = self.reinitialize_command('install', reinit_subcommands=True)
         install.root = self.bdist_dir
         install.skip_build = self.skip_build
-        install.warn_dir = 0
+        install.warn_dir = False
 
         log.info("installing to %s", self.bdist_dir)
         self.run_command('install')
diff --git a/distutils/command/bdist_rpm.py b/distutils/command/bdist_rpm.py
index 649968a5eb..769dfa6d74 100644
--- a/distutils/command/bdist_rpm.py
+++ b/distutils/command/bdist_rpm.py
@@ -187,13 +187,13 @@ def initialize_options(self):
         self.build_requires = None
         self.obsoletes = None
 
-        self.keep_temp = 0
-        self.use_rpm_opt_flags = 1
-        self.rpm3_mode = 1
-        self.no_autoreq = 0
+        self.keep_temp = False
+        self.use_rpm_opt_flags = True
+        self.rpm3_mode = True
+        self.no_autoreq = False
 
         self.force_arch = None
-        self.quiet = 0
+        self.quiet = False
 
     def finalize_options(self):
         self.set_undefined_options('bdist', ('bdist_base', 'bdist_base'))
@@ -223,7 +223,7 @@ def finalize_options(self):
 
         # don't pass CFLAGS to pure python distributions
         if not self.distribution.has_ext_modules():
-            self.use_rpm_opt_flags = 0
+            self.use_rpm_opt_flags = False
 
         self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
         self.finalize_package_data()
diff --git a/distutils/command/build.py b/distutils/command/build.py
index d18ed503e3..0128500b0f 100644
--- a/distutils/command/build.py
+++ b/distutils/command/build.py
@@ -62,7 +62,7 @@ def initialize_options(self):
         self.compiler = None
         self.plat_name = None
         self.debug = None
-        self.force = 0
+        self.force = False
         self.executable = None
         self.parallel = None
 
diff --git a/distutils/command/build_clib.py b/distutils/command/build_clib.py
index 360575d0cb..85148d4b2e 100644
--- a/distutils/command/build_clib.py
+++ b/distutils/command/build_clib.py
@@ -57,7 +57,7 @@ def initialize_options(self):
         self.define = None
         self.undef = None
         self.debug = None
-        self.force = 0
+        self.force = False
         self.compiler = None
 
     def finalize_options(self):
diff --git a/distutils/command/build_ext.py b/distutils/command/build_ext.py
index 06d949aff1..bfe6860035 100644
--- a/distutils/command/build_ext.py
+++ b/distutils/command/build_ext.py
@@ -109,7 +109,7 @@ def initialize_options(self):
         self.build_lib = None
         self.plat_name = None
         self.build_temp = None
-        self.inplace = 0
+        self.inplace = False
         self.package = None
 
         self.include_dirs = None
@@ -175,7 +175,7 @@ def finalize_options(self):  # noqa: C901
         # Make sure Python's include directories (for Python.h, pyconfig.h,
         # etc.) are in the include search path.
         py_include = sysconfig.get_python_inc()
-        plat_py_include = sysconfig.get_python_inc(plat_specific=1)
+        plat_py_include = sysconfig.get_python_inc(plat_specific=True)
         if self.include_dirs is None:
             self.include_dirs = self.distribution.include_dirs or []
         if isinstance(self.include_dirs, str):
diff --git a/distutils/command/build_py.py b/distutils/command/build_py.py
index 56e6fa2e66..40a99b90d4 100644
--- a/distutils/command/build_py.py
+++ b/distutils/command/build_py.py
@@ -38,7 +38,7 @@ def initialize_options(self):
         self.package = None
         self.package_data = None
         self.package_dir = None
-        self.compile = 0
+        self.compile = False
         self.optimize = 0
         self.force = None
 
@@ -95,7 +95,7 @@ def run(self):
             self.build_packages()
             self.build_package_data()
 
-        self.byte_compile(self.get_outputs(include_bytecode=0))
+        self.byte_compile(self.get_outputs(include_bytecode=False))
 
     def get_data_files(self):
         """Generate list of '(package,src_dir,build_dir,filenames)' tuples"""
@@ -264,7 +264,7 @@ def find_modules(self):
                 (package_dir, checked) = packages[package]
             except KeyError:
                 package_dir = self.get_package_dir(package)
-                checked = 0
+                checked = False
 
             if not checked:
                 init_py = self.check_package(package, package_dir)
@@ -306,7 +306,7 @@ def get_module_outfile(self, build_dir, package, module):
         outfile_path = [build_dir] + list(package) + [module + ".py"]
         return os.path.join(*outfile_path)
 
-    def get_outputs(self, include_bytecode=1):
+    def get_outputs(self, include_bytecode=True):
         modules = self.find_all_modules()
         outputs = []
         for package, module, _module_file in modules:
@@ -347,7 +347,7 @@ def build_module(self, module, module_file, package):
         outfile = self.get_module_outfile(self.build_lib, package, module)
         dir = os.path.dirname(outfile)
         self.mkpath(dir)
-        return self.copy_file(module_file, outfile, preserve_mode=0)
+        return self.copy_file(module_file, outfile, preserve_mode=False)
 
     def build_modules(self):
         modules = self.find_modules()
diff --git a/distutils/command/check.py b/distutils/command/check.py
index 28599e109c..11d40a966b 100644
--- a/distutils/command/check.py
+++ b/distutils/command/check.py
@@ -21,7 +21,7 @@ def __init__(
             report_level,
             halt_level,
             stream=None,
-            debug=0,
+            debug=False,
             encoding='ascii',
             error_handler='replace',
         ):
@@ -58,9 +58,9 @@ class check(Command):
 
     def initialize_options(self):
         """Sets default values for options."""
-        self.restructuredtext = 0
+        self.restructuredtext = False
         self.metadata = 1
-        self.strict = 0
+        self.strict = False
         self._warnings = 0
 
     def finalize_options(self):
diff --git a/distutils/command/config.py b/distutils/command/config.py
index d4b2b0a362..e82f0fd043 100644
--- a/distutils/command/config.py
+++ b/distutils/command/config.py
@@ -94,7 +94,7 @@ def _check_compiler(self):
 
         if not isinstance(self.compiler, CCompiler):
             self.compiler = new_compiler(
-                compiler=self.compiler, dry_run=self.dry_run, force=1
+                compiler=self.compiler, dry_run=self.dry_run, force=True
             )
             customize_compiler(self.compiler)
             if self.include_dirs:
@@ -292,8 +292,8 @@ def check_func(
         include_dirs=None,
         libraries=None,
         library_dirs=None,
-        decl=0,
-        call=0,
+        decl=False,
+        call=False,
     ):
         """Determine if function 'func' is available by constructing a
         source file that refers to 'func', and compiles and links it.
diff --git a/distutils/command/install.py b/distutils/command/install.py
index 8e920be4de..9b82cf1e42 100644
--- a/distutils/command/install.py
+++ b/distutils/command/install.py
@@ -258,7 +258,7 @@ def initialize_options(self):
         self.prefix = None
         self.exec_prefix = None
         self.home = None
-        self.user = 0
+        self.user = False
 
         # These select only the installation base; it's up to the user to
         # specify the installation scheme (currently, that means supplying
@@ -293,7 +293,7 @@ def initialize_options(self):
         # 'install_path_file' is always true unless some outsider meddles
         # with it.
         self.extra_path = None
-        self.install_path_file = 1
+        self.install_path_file = True
 
         # 'force' forces installation, even if target files are not
         # out-of-date.  'skip_build' skips running the "build" command,
@@ -301,9 +301,9 @@ def initialize_options(self):
         # a user option, it's just there so the bdist_* commands can turn
         # it off) determines whether we warn about installing to a
         # directory not in sys.path.
-        self.force = 0
-        self.skip_build = 0
-        self.warn_dir = 1
+        self.force = False
+        self.skip_build = False
+        self.warn_dir = True
 
         # These are only here as a conduit from the 'build' command to the
         # 'install_*' commands that do the real work.  ('build_base' isn't
diff --git a/distutils/command/install_data.py b/distutils/command/install_data.py
index b63a1af25e..a4da892480 100644
--- a/distutils/command/install_data.py
+++ b/distutils/command/install_data.py
@@ -31,9 +31,9 @@ def initialize_options(self):
         self.install_dir = None
         self.outfiles = []
         self.root = None
-        self.force = 0
+        self.force = False
         self.data_files = self.distribution.data_files
-        self.warn_dir = 1
+        self.warn_dir = True
 
     def finalize_options(self):
         self.set_undefined_options(
diff --git a/distutils/command/install_headers.py b/distutils/command/install_headers.py
index 085272c1a2..fbb3b242ea 100644
--- a/distutils/command/install_headers.py
+++ b/distutils/command/install_headers.py
@@ -19,7 +19,7 @@ class install_headers(Command):
 
     def initialize_options(self):
         self.install_dir = None
-        self.force = 0
+        self.force = False
         self.outfiles = []
 
     def finalize_options(self):
diff --git a/distutils/command/install_lib.py b/distutils/command/install_lib.py
index b1f346f018..efa6058598 100644
--- a/distutils/command/install_lib.py
+++ b/distutils/command/install_lib.py
@@ -54,7 +54,7 @@ def initialize_options(self):
         # let the 'install' command dictate our installation directory
         self.install_dir = None
         self.build_dir = None
-        self.force = 0
+        self.force = False
         self.compile = None
         self.optimize = None
         self.skip_build = None
diff --git a/distutils/command/install_scripts.py b/distutils/command/install_scripts.py
index e66b13a16d..bb43387fb8 100644
--- a/distutils/command/install_scripts.py
+++ b/distutils/command/install_scripts.py
@@ -26,7 +26,7 @@ class install_scripts(Command):
 
     def initialize_options(self):
         self.install_dir = None
-        self.force = 0
+        self.force = False
         self.build_dir = None
         self.skip_build = None
 
diff --git a/distutils/command/register.py b/distutils/command/register.py
index ee6c54daba..0311d45a36 100644
--- a/distutils/command/register.py
+++ b/distutils/command/register.py
@@ -37,8 +37,8 @@ class register(PyPIRCCommand):
 
     def initialize_options(self):
         PyPIRCCommand.initialize_options(self)
-        self.list_classifiers = 0
-        self.strict = 0
+        self.list_classifiers = False
+        self.strict = False
 
     def finalize_options(self):
         PyPIRCCommand.finalize_options(self)
@@ -74,7 +74,7 @@ def check_metadata(self):
         check = self.distribution.get_command_obj('check')
         check.ensure_finalized()
         check.strict = self.strict
-        check.restructuredtext = 1
+        check.restructuredtext = True
         check.run()
 
     def _set_config(self):
diff --git a/distutils/command/sdist.py b/distutils/command/sdist.py
index 387d27c90b..e737cf2acc 100644
--- a/distutils/command/sdist.py
+++ b/distutils/command/sdist.py
@@ -125,14 +125,14 @@ def initialize_options(self):
 
         # 'use_defaults': if true, we will include the default file set
         # in the manifest
-        self.use_defaults = 1
-        self.prune = 1
+        self.use_defaults = True
+        self.prune = True
 
-        self.manifest_only = 0
-        self.force_manifest = 0
+        self.manifest_only = False
+        self.force_manifest = False
 
         self.formats = ['gztar']
-        self.keep_temp = 0
+        self.keep_temp = False
         self.dist_dir = None
 
         self.archive_files = None
@@ -353,12 +353,12 @@ def read_template(self):
         log.info("reading manifest template '%s'", self.template)
         template = TextFile(
             self.template,
-            strip_comments=1,
-            skip_blanks=1,
-            join_lines=1,
-            lstrip_ws=1,
-            rstrip_ws=1,
-            collapse_join=1,
+            strip_comments=True,
+            skip_blanks=True,
+            join_lines=True,
+            lstrip_ws=True,
+            rstrip_ws=True,
+            collapse_join=True,
         )
 
         try:
@@ -401,7 +401,7 @@ def prune_file_list(self):
 
         vcs_dirs = ['RCS', 'CVS', r'\.svn', r'\.hg', r'\.git', r'\.bzr', '_darcs']
         vcs_ptrn = r'(^|{})({})({}).*'.format(seps, '|'.join(vcs_dirs), seps)
-        self.filelist.exclude_pattern(vcs_ptrn, is_regex=1)
+        self.filelist.exclude_pattern(vcs_ptrn, is_regex=True)
 
     def write_manifest(self):
         """Write the file list in 'self.filelist' (presumably as filled in
diff --git a/distutils/command/upload.py b/distutils/command/upload.py
index cf541f8a82..63751e7268 100644
--- a/distutils/command/upload.py
+++ b/distutils/command/upload.py
@@ -41,7 +41,7 @@ def initialize_options(self):
         PyPIRCCommand.initialize_options(self)
         self.username = ''
         self.password = ''
-        self.show_response = 0
+        self.show_response = False
         self.sign = False
         self.identity = None
 
diff --git a/distutils/config.py b/distutils/config.py
index 83f96a9eec..7b273e168d 100644
--- a/distutils/config.py
+++ b/distutils/config.py
@@ -129,7 +129,7 @@ def initialize_options(self):
         """Initialize options."""
         self.repository = None
         self.realm = None
-        self.show_response = 0
+        self.show_response = False
 
     def finalize_options(self):
         """Finalizes options."""
diff --git a/distutils/cygwinccompiler.py b/distutils/cygwinccompiler.py
index 539f09d8f3..f2e62bd7ac 100644
--- a/distutils/cygwinccompiler.py
+++ b/distutils/cygwinccompiler.py
@@ -83,7 +83,7 @@ class CygwinCCompiler(UnixCCompiler):
     dylib_lib_format = "cyg%s%s"
     exe_extension = ".exe"
 
-    def __init__(self, verbose=0, dry_run=0, force=0):
+    def __init__(self, verbose=False, dry_run=False, force=False):
         super().__init__(verbose, dry_run, force)
 
         status, details = check_config_h()
@@ -154,7 +154,7 @@ def link(
         library_dirs=None,
         runtime_library_dirs=None,
         export_symbols=None,
-        debug=0,
+        debug=False,
         extra_preargs=None,
         extra_postargs=None,
         build_temp=None,
@@ -265,7 +265,7 @@ class Mingw32CCompiler(CygwinCCompiler):
 
     compiler_type = 'mingw32'
 
-    def __init__(self, verbose=0, dry_run=0, force=0):
+    def __init__(self, verbose=False, dry_run=False, force=False):
         super().__init__(verbose, dry_run, force)
 
         shared_option = "-shared"
diff --git a/distutils/dir_util.py b/distutils/dir_util.py
index 370c6ffd49..4108c6c514 100644
--- a/distutils/dir_util.py
+++ b/distutils/dir_util.py
@@ -13,7 +13,7 @@
 _path_created = {}
 
 
-def mkpath(name, mode=0o777, verbose=1, dry_run=0):  # noqa: C901
+def mkpath(name, mode=0o777, verbose=True, dry_run=False):  # noqa: C901
     """Create a directory and any missing ancestor directories.
 
     If the directory already exists (or if 'name' is the empty string, which
@@ -79,11 +79,11 @@ def mkpath(name, mode=0o777, verbose=1, dry_run=0):  # noqa: C901
                     )
             created_dirs.append(head)
 
-        _path_created[abs_head] = 1
+        _path_created[abs_head] = True
     return created_dirs
 
 
-def create_tree(base_dir, files, mode=0o777, verbose=1, dry_run=0):
+def create_tree(base_dir, files, mode=0o777, verbose=True, dry_run=False):
     """Create all the empty directories under 'base_dir' needed to put 'files'
     there.
 
@@ -104,12 +104,12 @@ def create_tree(base_dir, files, mode=0o777, verbose=1, dry_run=0):
 def copy_tree(  # noqa: C901
     src,
     dst,
-    preserve_mode=1,
-    preserve_times=1,
-    preserve_symlinks=0,
-    update=0,
-    verbose=1,
-    dry_run=0,
+    preserve_mode=True,
+    preserve_times=True,
+    preserve_symlinks=False,
+    update=False,
+    verbose=True,
+    dry_run=False,
 ):
     """Copy an entire directory tree 'src' to a new location 'dst'.
 
@@ -202,7 +202,7 @@ def _build_cmdtuple(path, cmdtuples):
     cmdtuples.append((os.rmdir, path))
 
 
-def remove_tree(directory, verbose=1, dry_run=0):
+def remove_tree(directory, verbose=True, dry_run=False):
     """Recursively remove an entire directory tree.
 
     Any errors are ignored (apart from being reported to stdout if 'verbose'
diff --git a/distutils/dist.py b/distutils/dist.py
index 668ce7eb0a..13b939ae42 100644
--- a/distutils/dist.py
+++ b/distutils/dist.py
@@ -137,9 +137,9 @@ def __init__(self, attrs=None):  # noqa: C901
         """
 
         # Default values for our command-line options
-        self.verbose = 1
-        self.dry_run = 0
-        self.help = 0
+        self.verbose = True
+        self.dry_run = False
+        self.help = False
         for attr in self.display_option_names:
             setattr(self, attr, 0)
 
@@ -579,7 +579,7 @@ def _parse_command_opts(self, parser, args):  # noqa: C901
         parser.set_negative_aliases(negative_opt)
         (args, opts) = parser.getopt(args[1:])
         if hasattr(opts, 'help') and opts.help:
-            self._show_help(parser, display_options=0, commands=[cmd_class])
+            self._show_help(parser, display_options=False, commands=[cmd_class])
             return
 
         if hasattr(cmd_class, 'help_options') and isinstance(
@@ -622,7 +622,7 @@ def finalize_options(self):
                 setattr(self.metadata, attr, value)
 
     def _show_help(
-        self, parser, global_options=1, display_options=1, commands: Iterable = ()
+        self, parser, global_options=True, display_options=True, commands: Iterable = ()
     ):
         """Show help for the setup script command-line in the form of
         several lists of command-line options.  'parser' should be a
@@ -696,7 +696,7 @@ def handle_display_options(self, option_order):
         any_display_options = 0
         is_display_option = {}
         for option in self.display_options:
-            is_display_option[option[0]] = 1
+            is_display_option[option[0]] = True
 
         for opt, val in option_order:
             if val and is_display_option.get(opt):
@@ -742,7 +742,7 @@ def print_commands(self):
         std_commands = distutils.command.__all__
         is_std = {}
         for cmd in std_commands:
-            is_std[cmd] = 1
+            is_std[cmd] = True
 
         extra_commands = []
         for cmd in self.cmdclass.keys():
@@ -773,7 +773,7 @@ def get_command_list(self):
         std_commands = distutils.command.__all__
         is_std = {}
         for cmd in std_commands:
-            is_std[cmd] = 1
+            is_std[cmd] = True
 
         extra_commands = []
         for cmd in self.cmdclass.keys():
@@ -844,7 +844,7 @@ def get_command_class(self, command):
 
         raise DistutilsModuleError("invalid command '%s'" % command)
 
-    def get_command_obj(self, command, create=1):
+    def get_command_obj(self, command, create=True):
         """Return the command object for 'command'.  Normally this object
         is cached on a previous call to 'get_command_obj()'; if no command
         object for 'command' is in the cache, then we either create and
@@ -860,7 +860,7 @@ def get_command_obj(self, command, create=1):
 
             klass = self.get_command_class(command)
             cmd_obj = self.command_obj[command] = klass(self)
-            self.have_run[command] = 0
+            self.have_run[command] = False
 
             # Set any options that were supplied in config files
             # or on the command line.  (NB. support for error
@@ -915,7 +915,7 @@ def _set_command_options(self, command_obj, option_dict=None):  # noqa: C901
             except ValueError as msg:
                 raise DistutilsOptionError(msg)
 
-    def reinitialize_command(self, command, reinit_subcommands=0):
+    def reinitialize_command(self, command, reinit_subcommands=False):
         """Reinitializes a command to the state it was in when first
         returned by 'get_command_obj()': ie., initialized but not yet
         finalized.  This provides the opportunity to sneak option
@@ -945,8 +945,8 @@ def reinitialize_command(self, command, reinit_subcommands=0):
         if not command.finalized:
             return command
         command.initialize_options()
-        command.finalized = 0
-        self.have_run[command_name] = 0
+        command.finalized = False
+        self.have_run[command_name] = False
         self._set_command_options(command)
 
         if reinit_subcommands:
@@ -986,7 +986,7 @@ def run_command(self, command):
         cmd_obj = self.get_command_obj(command)
         cmd_obj.ensure_finalized()
         cmd_obj.run()
-        self.have_run[command] = 1
+        self.have_run[command] = True
 
     # -- Distribution query methods ------------------------------------
 
diff --git a/distutils/extension.py b/distutils/extension.py
index 94e71635d9..793f8972d1 100644
--- a/distutils/extension.py
+++ b/distutils/extension.py
@@ -150,11 +150,11 @@ def read_setup_file(filename):  # noqa: C901
     #    ... [ ...] [ ...] [ ...]
     file = TextFile(
         filename,
-        strip_comments=1,
-        skip_blanks=1,
-        join_lines=1,
-        lstrip_ws=1,
-        rstrip_ws=1,
+        strip_comments=True,
+        skip_blanks=True,
+        join_lines=True,
+        lstrip_ws=True,
+        rstrip_ws=True,
     )
     try:
         extensions = []
diff --git a/distutils/fancy_getopt.py b/distutils/fancy_getopt.py
index e905aede4d..abbbe26777 100644
--- a/distutils/fancy_getopt.py
+++ b/distutils/fancy_getopt.py
@@ -178,7 +178,7 @@ def _grok_option_table(self):  # noqa: C901
                 if short:
                     short = short + ':'
                 long = long[0:-1]
-                self.takes_arg[long] = 1
+                self.takes_arg[long] = True
             else:
                 # Is option is a "negative alias" for some other option (eg.
                 # "quiet" == "!verbose")?
@@ -191,7 +191,7 @@ def _grok_option_table(self):  # noqa: C901
                         )
 
                     self.long_opts[-1] = long  # XXX redundant?!
-                self.takes_arg[long] = 0
+                self.takes_arg[long] = False
 
             # If this is an alias option, make sure its "takes arg" flag is
             # the same as the option it's aliased to.
@@ -268,7 +268,7 @@ def getopt(self, args=None, object=None):  # noqa: C901
 
             attr = self.attr_name[opt]
             # The only repeating option at the moment is 'verbose'.
-            # It has a negative option -q quiet, which should set verbose = 0.
+            # It has a negative option -q quiet, which should set verbose = False.
             if val and self.repeat.get(attr) is not None:
                 val = getattr(object, attr, 0) + 1
             setattr(object, attr, val)
diff --git a/distutils/file_util.py b/distutils/file_util.py
index 960def9cf9..0f20091edc 100644
--- a/distutils/file_util.py
+++ b/distutils/file_util.py
@@ -63,12 +63,12 @@ def _copy_file_contents(src, dst, buffer_size=16 * 1024):  # noqa: C901
 def copy_file(  # noqa: C901
     src,
     dst,
-    preserve_mode=1,
-    preserve_times=1,
-    update=0,
+    preserve_mode=True,
+    preserve_times=True,
+    update=False,
     link=None,
-    verbose=1,
-    dry_run=0,
+    verbose=True,
+    dry_run=False,
 ):
     """Copy a file 'src' to 'dst'.  If 'dst' is a directory, then 'src' is
     copied there with the same name; otherwise, it must be a filename.  (If
@@ -168,7 +168,7 @@ def copy_file(  # noqa: C901
 
 
 # XXX I suspect this is Unix-specific -- need porting help!
-def move_file(src, dst, verbose=1, dry_run=0):  # noqa: C901
+def move_file(src, dst, verbose=True, dry_run=False):  # noqa: C901
     """Move a file 'src' to 'dst'.  If 'dst' is a directory, the file will
     be moved into it with the same name; otherwise, 'src' is just renamed
     to 'dst'.  Return the new full name of the file.
diff --git a/distutils/filelist.py b/distutils/filelist.py
index 71ffb2abe7..78276a4d41 100644
--- a/distutils/filelist.py
+++ b/distutils/filelist.py
@@ -119,13 +119,13 @@ def process_template_line(self, line):  # noqa: C901
         if action == 'include':
             self.debug_print("include " + ' '.join(patterns))
             for pattern in patterns:
-                if not self.include_pattern(pattern, anchor=1):
+                if not self.include_pattern(pattern, anchor=True):
                     log.warning("warning: no files found matching '%s'", pattern)
 
         elif action == 'exclude':
             self.debug_print("exclude " + ' '.join(patterns))
             for pattern in patterns:
-                if not self.exclude_pattern(pattern, anchor=1):
+                if not self.exclude_pattern(pattern, anchor=True):
                     log.warning(
                         (
                             "warning: no previously-included files "
@@ -137,7 +137,7 @@ def process_template_line(self, line):  # noqa: C901
         elif action == 'global-include':
             self.debug_print("global-include " + ' '.join(patterns))
             for pattern in patterns:
-                if not self.include_pattern(pattern, anchor=0):
+                if not self.include_pattern(pattern, anchor=False):
                     log.warning(
                         (
                             "warning: no files found matching '%s' "
@@ -149,7 +149,7 @@ def process_template_line(self, line):  # noqa: C901
         elif action == 'global-exclude':
             self.debug_print("global-exclude " + ' '.join(patterns))
             for pattern in patterns:
-                if not self.exclude_pattern(pattern, anchor=0):
+                if not self.exclude_pattern(pattern, anchor=False):
                     log.warning(
                         (
                             "warning: no previously-included files matching "
@@ -197,7 +197,7 @@ def process_template_line(self, line):  # noqa: C901
 
     # Filtering/selection methods
 
-    def include_pattern(self, pattern, anchor=1, prefix=None, is_regex=0):
+    def include_pattern(self, pattern, anchor=True, prefix=None, is_regex=False):
         """Select strings (presumably filenames) from 'self.files' that
         match 'pattern', a Unix-style wildcard (glob) pattern.  Patterns
         are not quite the same as implemented by the 'fnmatch' module: '*'
@@ -238,7 +238,7 @@ def include_pattern(self, pattern, anchor=1, prefix=None, is_regex=0):
                 files_found = True
         return files_found
 
-    def exclude_pattern(self, pattern, anchor=1, prefix=None, is_regex=0):
+    def exclude_pattern(self, pattern, anchor=True, prefix=None, is_regex=False):
         """Remove strings (presumably filenames) from 'files' that match
         'pattern'.  Other parameters are the same as for
         'include_pattern()', above.
@@ -332,7 +332,7 @@ def glob_to_re(pattern):
     return pattern_re
 
 
-def translate_pattern(pattern, anchor=1, prefix=None, is_regex=0):
+def translate_pattern(pattern, anchor=True, prefix=None, is_regex=False):
     """Translate a shell-like wildcard pattern to a compiled regular
     expression.  Return the compiled regex.  If 'is_regex' true,
     then 'pattern' is directly compiled to a regex (if it's a string)
diff --git a/distutils/msvc9compiler.py b/distutils/msvc9compiler.py
index 6a0105e484..2bde3f4e2f 100644
--- a/distutils/msvc9compiler.py
+++ b/distutils/msvc9compiler.py
@@ -346,7 +346,7 @@ class MSVCCompiler(CCompiler):
     static_lib_format = shared_lib_format = '%s%s'
     exe_extension = '.exe'
 
-    def __init__(self, verbose=0, dry_run=0, force=0):
+    def __init__(self, verbose=False, dry_run=False, force=False):
         super().__init__(verbose, dry_run, force)
         self.__version = VERSION
         self.__root = r"Software\Microsoft\VisualStudio"
@@ -460,7 +460,7 @@ def initialize(self, plat_name=None):  # noqa: C901
 
     # -- Worker methods ------------------------------------------------
 
-    def object_filenames(self, source_filenames, strip_dir=0, output_dir=''):
+    def object_filenames(self, source_filenames, strip_dir=False, output_dir=''):
         # Copied from ccompiler.py, extended to return .res as 'object'-file
         # for .rc input file
         if output_dir is None:
@@ -491,7 +491,7 @@ def compile(  # noqa: C901
         output_dir=None,
         macros=None,
         include_dirs=None,
-        debug=0,
+        debug=False,
         extra_preargs=None,
         extra_postargs=None,
         depends=None,
@@ -578,7 +578,7 @@ def compile(  # noqa: C901
         return objects
 
     def create_static_lib(
-        self, objects, output_libname, output_dir=None, debug=0, target_lang=None
+        self, objects, output_libname, output_dir=None, debug=False, target_lang=None
     ):
         if not self.initialized:
             self.initialize()
@@ -606,7 +606,7 @@ def link(  # noqa: C901
         library_dirs=None,
         runtime_library_dirs=None,
         export_symbols=None,
-        debug=0,
+        debug=False,
         extra_preargs=None,
         extra_postargs=None,
         build_temp=None,
@@ -783,7 +783,7 @@ def runtime_library_dir_option(self, dir):
     def library_option(self, lib):
         return self.library_filename(lib)
 
-    def find_library_file(self, dirs, lib, debug=0):
+    def find_library_file(self, dirs, lib, debug=False):
         # Prefer a debugging library if found (and requested), but deal
         # with it if we don't have one.
         if debug:
diff --git a/distutils/msvccompiler.py b/distutils/msvccompiler.py
index ac8b68c08c..223e627afb 100644
--- a/distutils/msvccompiler.py
+++ b/distutils/msvccompiler.py
@@ -253,7 +253,7 @@ class MSVCCompiler(CCompiler):
     static_lib_format = shared_lib_format = '%s%s'
     exe_extension = '.exe'
 
-    def __init__(self, verbose=0, dry_run=0, force=0):
+    def __init__(self, verbose=False, dry_run=False, force=False):
         super().__init__(verbose, dry_run, force)
         self.__version = get_build_version()
         self.__arch = get_build_architecture()
@@ -354,7 +354,7 @@ def initialize(self):
 
     # -- Worker methods ------------------------------------------------
 
-    def object_filenames(self, source_filenames, strip_dir=0, output_dir=''):
+    def object_filenames(self, source_filenames, strip_dir=False, output_dir=''):
         # Copied from ccompiler.py, extended to return .res as 'object'-file
         # for .rc input file
         if output_dir is None:
@@ -385,7 +385,7 @@ def compile(  # noqa: C901
         output_dir=None,
         macros=None,
         include_dirs=None,
-        debug=0,
+        debug=False,
         extra_preargs=None,
         extra_postargs=None,
         depends=None,
@@ -472,7 +472,7 @@ def compile(  # noqa: C901
         return objects
 
     def create_static_lib(
-        self, objects, output_libname, output_dir=None, debug=0, target_lang=None
+        self, objects, output_libname, output_dir=None, debug=False, target_lang=None
     ):
         if not self.initialized:
             self.initialize()
@@ -500,7 +500,7 @@ def link(  # noqa: C901
         library_dirs=None,
         runtime_library_dirs=None,
         export_symbols=None,
-        debug=0,
+        debug=False,
         extra_preargs=None,
         extra_postargs=None,
         build_temp=None,
@@ -585,7 +585,7 @@ def runtime_library_dir_option(self, dir):
     def library_option(self, lib):
         return self.library_filename(lib)
 
-    def find_library_file(self, dirs, lib, debug=0):
+    def find_library_file(self, dirs, lib, debug=False):
         # Prefer a debugging library if found (and requested), but deal
         # with it if we don't have one.
         if debug:
diff --git a/distutils/sysconfig.py b/distutils/sysconfig.py
index 4ed51c1f03..4ef57a89ab 100644
--- a/distutils/sysconfig.py
+++ b/distutils/sysconfig.py
@@ -108,7 +108,7 @@ def get_python_version():
     return '%d.%d' % sys.version_info[:2]
 
 
-def get_python_inc(plat_specific=0, prefix=None):
+def get_python_inc(plat_specific=False, prefix=None):
     """Return the directory containing installed Python header files.
 
     If 'plat_specific' is false (the default), this is the path to the
@@ -213,7 +213,7 @@ def _posix_lib(standard_lib, libpython, early_prefix, prefix):
         return os.path.join(libpython, "site-packages")
 
 
-def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
+def get_python_lib(plat_specific=False, standard_lib=False, prefix=None):
     """Return the directory containing the Python library (standard or
     site additions).
 
@@ -399,7 +399,11 @@ def parse_makefile(fn, g=None):  # noqa: C901
     from distutils.text_file import TextFile
 
     fp = TextFile(
-        fn, strip_comments=1, skip_blanks=1, join_lines=1, errors="surrogateescape"
+        fn,
+        strip_comments=True,
+        skip_blanks=True,
+        join_lines=True,
+        errors="surrogateescape",
     )
 
     if g is None:
diff --git a/distutils/tests/test_bdist.py b/distutils/tests/test_bdist.py
index 1804807752..a06ac0e794 100644
--- a/distutils/tests/test_bdist.py
+++ b/distutils/tests/test_bdist.py
@@ -31,7 +31,7 @@ def test_skip_build(self):
         # bug #10946: bdist --skip-build should trickle down to subcommands
         dist = self.create_dist()[1]
         cmd = bdist(dist)
-        cmd.skip_build = 1
+        cmd.skip_build = True
         cmd.ensure_finalized()
         dist.command_obj['bdist'] = cmd
 
diff --git a/distutils/tests/test_bdist_rpm.py b/distutils/tests/test_bdist_rpm.py
index a5cb42c334..0d3138ba40 100644
--- a/distutils/tests/test_bdist_rpm.py
+++ b/distutils/tests/test_bdist_rpm.py
@@ -72,7 +72,7 @@ def test_quiet(self):
         cmd.fix_python = True
 
         # running in quiet mode
-        cmd.quiet = 1
+        cmd.quiet = True
         cmd.ensure_finalized()
         cmd.run()
 
@@ -114,7 +114,7 @@ def test_no_optimize_flag(self):
         cmd = bdist_rpm(dist)
         cmd.fix_python = True
 
-        cmd.quiet = 1
+        cmd.quiet = True
         cmd.ensure_finalized()
         cmd.run()
 
diff --git a/distutils/tests/test_build_ext.py b/distutils/tests/test_build_ext.py
index cc83e7fbc8..0b8ff2d5f9 100644
--- a/distutils/tests/test_build_ext.py
+++ b/distutils/tests/test_build_ext.py
@@ -140,7 +140,7 @@ def test_solaris_enable_shared(self):
         from distutils.sysconfig import _config_vars
 
         old_var = _config_vars.get('Py_ENABLE_SHARED')
-        _config_vars['Py_ENABLE_SHARED'] = 1
+        _config_vars['Py_ENABLE_SHARED'] = True
         try:
             cmd.ensure_finalized()
         finally:
@@ -164,7 +164,7 @@ def test_user_site(self):
         assert 'user' in options
 
         # setting a value
-        cmd.user = 1
+        cmd.user = True
 
         # setting user based lib and include
         lib = os.path.join(site.USER_BASE, 'lib')
@@ -209,7 +209,7 @@ def test_finalize_options(self):
         for p in py_include.split(os.path.pathsep):
             assert p in cmd.include_dirs
 
-        plat_py_include = sysconfig.get_python_inc(plat_specific=1)
+        plat_py_include = sysconfig.get_python_inc(plat_specific=True)
         for p in plat_py_include.split(os.path.pathsep):
             assert p in cmd.include_dirs
 
@@ -381,7 +381,7 @@ def test_get_outputs(self):
         old_wd = os.getcwd()
         os.chdir(other_tmp_dir)
         try:
-            cmd.inplace = 1
+            cmd.inplace = True
             cmd.run()
             so_file = cmd.get_outputs()[0]
         finally:
@@ -392,7 +392,7 @@ def test_get_outputs(self):
         so_dir = os.path.dirname(so_file)
         assert so_dir == other_tmp_dir
 
-        cmd.inplace = 0
+        cmd.inplace = False
         cmd.compiler = None
         cmd.run()
         so_file = cmd.get_outputs()[0]
@@ -401,7 +401,7 @@ def test_get_outputs(self):
         so_dir = os.path.dirname(so_file)
         assert so_dir == cmd.build_lib
 
-        # inplace = 0, cmd.package = 'bar'
+        # inplace = False, cmd.package = 'bar'
         build_py = cmd.get_finalized_command('build_py')
         build_py.package_dir = {'': 'bar'}
         path = cmd.get_ext_fullpath('foo')
@@ -409,8 +409,8 @@ def test_get_outputs(self):
         path = os.path.split(path)[0]
         assert path == cmd.build_lib
 
-        # inplace = 1, cmd.package = 'bar'
-        cmd.inplace = 1
+        # inplace = True, cmd.package = 'bar'
+        cmd.inplace = True
         other_tmp_dir = os.path.realpath(self.mkdtemp())
         old_wd = os.getcwd()
         os.chdir(other_tmp_dir)
@@ -431,7 +431,7 @@ def test_ext_fullpath(self):
         # dist = Distribution({'name': 'lxml', 'ext_modules': [etree_ext]})
         dist = Distribution()
         cmd = self.build_ext(dist)
-        cmd.inplace = 1
+        cmd.inplace = True
         cmd.distribution.package_dir = {'': 'src'}
         cmd.distribution.packages = ['lxml', 'lxml.html']
         curdir = os.getcwd()
@@ -440,7 +440,7 @@ def test_ext_fullpath(self):
         assert wanted == path
 
         # building lxml.etree not inplace
-        cmd.inplace = 0
+        cmd.inplace = False
         cmd.build_lib = os.path.join(curdir, 'tmpdir')
         wanted = os.path.join(curdir, 'tmpdir', 'lxml', 'etree' + ext)
         path = cmd.get_ext_fullpath('lxml.etree')
@@ -455,7 +455,7 @@ def test_ext_fullpath(self):
         assert wanted == path
 
         # building twisted.runner.portmap inplace
-        cmd.inplace = 1
+        cmd.inplace = True
         path = cmd.get_ext_fullpath('twisted.runner.portmap')
         wanted = os.path.join(curdir, 'twisted', 'runner', 'portmap' + ext)
         assert wanted == path
diff --git a/distutils/tests/test_build_py.py b/distutils/tests/test_build_py.py
index 8bc0e98a4f..739c903622 100644
--- a/distutils/tests/test_build_py.py
+++ b/distutils/tests/test_build_py.py
@@ -28,13 +28,15 @@ def test_package_data(self):
         dist = Distribution({"packages": ["pkg"], "package_dir": {"pkg": sources}})
         # script_name need not exist, it just need to be initialized
         dist.script_name = os.path.join(sources, "setup.py")
-        dist.command_obj["build"] = support.DummyCommand(force=0, build_lib=destination)
+        dist.command_obj["build"] = support.DummyCommand(
+            force=False, build_lib=destination
+        )
         dist.packages = ["pkg"]
         dist.package_data = {"pkg": ["README.txt"]}
         dist.package_dir = {"pkg": sources}
 
         cmd = build_py(dist)
-        cmd.compile = 1
+        cmd.compile = True
         cmd.ensure_finalized()
         assert cmd.package_data == dist.package_data
 
@@ -82,7 +84,7 @@ def test_byte_compile(self):
         os.chdir(project_dir)
         self.write_file('boiledeggs.py', 'import antigravity')
         cmd = build_py(dist)
-        cmd.compile = 1
+        cmd.compile = True
         cmd.build_lib = 'here'
         cmd.finalize_options()
         cmd.run()
@@ -98,7 +100,7 @@ def test_byte_compile_optimized(self):
         os.chdir(project_dir)
         self.write_file('boiledeggs.py', 'import antigravity')
         cmd = build_py(dist)
-        cmd.compile = 0
+        cmd.compile = False
         cmd.optimize = 1
         cmd.build_lib = 'here'
         cmd.finalize_options()
@@ -146,7 +148,7 @@ def test_dont_write_bytecode(self, caplog):
         # makes sure byte_compile is not used
         dist = self.create_dist()[1]
         cmd = build_py(dist)
-        cmd.compile = 1
+        cmd.compile = True
         cmd.optimize = 1
 
         old_dont_write_bytecode = sys.dont_write_bytecode
diff --git a/distutils/tests/test_build_scripts.py b/distutils/tests/test_build_scripts.py
index 208b1f6e65..3582f691ef 100644
--- a/distutils/tests/test_build_scripts.py
+++ b/distutils/tests/test_build_scripts.py
@@ -42,7 +42,7 @@ def get_build_scripts_cmd(self, target, scripts):
         dist = Distribution()
         dist.scripts = scripts
         dist.command_obj["build"] = support.DummyCommand(
-            build_scripts=target, force=1, executable=sys.executable
+            build_scripts=target, force=True, executable=sys.executable
         )
         return build_scripts(dist)
 
diff --git a/distutils/tests/test_check.py b/distutils/tests/test_check.py
index 580cb2a267..b672b1f972 100644
--- a/distutils/tests/test_check.py
+++ b/distutils/tests/test_check.py
@@ -62,7 +62,7 @@ def test_check_metadata(self):
             self._run({}, **{'strict': 1})
 
         # and of course, no error when all metadata are present
-        cmd = self._run(metadata, strict=1)
+        cmd = self._run(metadata, strict=True)
         assert cmd._warnings == 0
 
         # now a test with non-ASCII characters
@@ -126,7 +126,7 @@ def test_check_restructuredtext(self):
         cmd.check_restructuredtext()
         assert cmd._warnings == 1
 
-        # let's see if we have an error with strict=1
+        # let's see if we have an error with strict=True
         metadata = {
             'url': 'xxx',
             'author': 'xxx',
@@ -140,12 +140,12 @@ def test_check_restructuredtext(self):
 
         # and non-broken rest, including a non-ASCII character to test #12114
         metadata['long_description'] = 'title\n=====\n\ntest \u00df'
-        cmd = self._run(metadata, strict=1, restructuredtext=1)
+        cmd = self._run(metadata, strict=True, restructuredtext=True)
         assert cmd._warnings == 0
 
         # check that includes work to test #31292
         metadata['long_description'] = 'title\n=====\n\n.. include:: includetest.rst'
-        cmd = self._run(metadata, cwd=HERE, strict=1, restructuredtext=1)
+        cmd = self._run(metadata, cwd=HERE, strict=True, restructuredtext=True)
         assert cmd._warnings == 0
 
     def test_check_restructuredtext_with_syntax_highlight(self):
diff --git a/distutils/tests/test_cmd.py b/distutils/tests/test_cmd.py
index f366aa6522..76e8f5989b 100644
--- a/distutils/tests/test_cmd.py
+++ b/distutils/tests/test_cmd.py
@@ -48,7 +48,7 @@ def test_ensure_string_list(self, cmd):
     def test_make_file(self, cmd):
         # making sure it raises when infiles is not a string or a list/tuple
         with pytest.raises(TypeError):
-            cmd.make_file(infiles=1, outfile='', func='func', args=())
+            cmd.make_file(infiles=True, outfile='', func='func', args=())
 
         # making sure execute gets called properly
         def _execute(func, args, exec_msg, level):
diff --git a/distutils/tests/test_dir_util.py b/distutils/tests/test_dir_util.py
index 84cda619ba..c8064cd014 100644
--- a/distutils/tests/test_dir_util.py
+++ b/distutils/tests/test_dir_util.py
@@ -29,16 +29,16 @@ def stuff(request, monkeypatch, distutils_managed_tempdir):
 
 class TestDirUtil(support.TempdirManager):
     def test_mkpath_remove_tree_verbosity(self, caplog):
-        mkpath(self.target, verbose=0)
+        mkpath(self.target, verbose=False)
         assert not caplog.records
-        remove_tree(self.root_target, verbose=0)
+        remove_tree(self.root_target, verbose=False)
 
-        mkpath(self.target, verbose=1)
+        mkpath(self.target, verbose=True)
         wanted = ['creating %s' % self.root_target, 'creating %s' % self.target]
         assert caplog.messages == wanted
         caplog.clear()
 
-        remove_tree(self.root_target, verbose=1)
+        remove_tree(self.root_target, verbose=True)
         wanted = ["removing '%s' (and everything under it)" % self.root_target]
         assert caplog.messages == wanted
 
@@ -53,45 +53,45 @@ def test_mkpath_with_custom_mode(self):
         assert stat.S_IMODE(os.stat(self.target2).st_mode) == 0o555 & ~umask
 
     def test_create_tree_verbosity(self, caplog):
-        create_tree(self.root_target, ['one', 'two', 'three'], verbose=0)
+        create_tree(self.root_target, ['one', 'two', 'three'], verbose=False)
         assert caplog.messages == []
-        remove_tree(self.root_target, verbose=0)
+        remove_tree(self.root_target, verbose=False)
 
         wanted = ['creating %s' % self.root_target]
-        create_tree(self.root_target, ['one', 'two', 'three'], verbose=1)
+        create_tree(self.root_target, ['one', 'two', 'three'], verbose=True)
         assert caplog.messages == wanted
 
-        remove_tree(self.root_target, verbose=0)
+        remove_tree(self.root_target, verbose=False)
 
     def test_copy_tree_verbosity(self, caplog):
-        mkpath(self.target, verbose=0)
+        mkpath(self.target, verbose=False)
 
-        copy_tree(self.target, self.target2, verbose=0)
+        copy_tree(self.target, self.target2, verbose=False)
         assert caplog.messages == []
 
-        remove_tree(self.root_target, verbose=0)
+        remove_tree(self.root_target, verbose=False)
 
-        mkpath(self.target, verbose=0)
+        mkpath(self.target, verbose=False)
         a_file = path.Path(self.target) / 'ok.txt'
         jaraco.path.build({'ok.txt': 'some content'}, self.target)
 
         wanted = [f'copying {a_file} -> {self.target2}']
-        copy_tree(self.target, self.target2, verbose=1)
+        copy_tree(self.target, self.target2, verbose=True)
         assert caplog.messages == wanted
 
-        remove_tree(self.root_target, verbose=0)
-        remove_tree(self.target2, verbose=0)
+        remove_tree(self.root_target, verbose=False)
+        remove_tree(self.target2, verbose=False)
 
     def test_copy_tree_skips_nfs_temp_files(self):
-        mkpath(self.target, verbose=0)
+        mkpath(self.target, verbose=False)
 
         jaraco.path.build({'ok.txt': 'some content', '.nfs123abc': ''}, self.target)
 
         copy_tree(self.target, self.target2)
         assert os.listdir(self.target2) == ['ok.txt']
 
-        remove_tree(self.root_target, verbose=0)
-        remove_tree(self.target2, verbose=0)
+        remove_tree(self.root_target, verbose=False)
+        remove_tree(self.target2, verbose=False)
 
     def test_ensure_relative(self):
         if os.sep == '/':
diff --git a/distutils/tests/test_dist.py b/distutils/tests/test_dist.py
index 9ed4d16dd8..5bd206fec1 100644
--- a/distutils/tests/test_dist.py
+++ b/distutils/tests/test_dist.py
@@ -468,7 +468,7 @@ def test_show_help(self, request, capsys):
         # smoke test, just makes sure some help is displayed
         dist = Distribution()
         sys.argv = []
-        dist.help = 1
+        dist.help = True
         dist.script_name = 'setup.py'
         dist.parse_command_line()
 
diff --git a/distutils/tests/test_file_util.py b/distutils/tests/test_file_util.py
index 4c2abd2453..420dc348eb 100644
--- a/distutils/tests/test_file_util.py
+++ b/distutils/tests/test_file_util.py
@@ -22,23 +22,23 @@ class TestFileUtil:
     def test_move_file_verbosity(self, caplog):
         jaraco.path.build({self.source: 'some content'})
 
-        move_file(self.source, self.target, verbose=0)
+        move_file(self.source, self.target, verbose=False)
         assert not caplog.messages
 
         # back to original state
-        move_file(self.target, self.source, verbose=0)
+        move_file(self.target, self.source, verbose=False)
 
-        move_file(self.source, self.target, verbose=1)
+        move_file(self.source, self.target, verbose=True)
         wanted = [f'moving {self.source} -> {self.target}']
         assert caplog.messages == wanted
 
         # back to original state
-        move_file(self.target, self.source, verbose=0)
+        move_file(self.target, self.source, verbose=False)
 
         caplog.clear()
         # now the target is a dir
         os.mkdir(self.target_dir)
-        move_file(self.source, self.target_dir, verbose=1)
+        move_file(self.source, self.target_dir, verbose=True)
         wanted = [f'moving {self.source} -> {self.target_dir}']
         assert caplog.messages == wanted
 
@@ -48,7 +48,7 @@ def test_move_file_exception_unpacking_rename(self):
             DistutilsFileError
         ):
             jaraco.path.build({self.source: 'spam eggs'})
-            move_file(self.source, self.target, verbose=0)
+            move_file(self.source, self.target, verbose=False)
 
     def test_move_file_exception_unpacking_unlink(self):
         # see issue 22182
@@ -58,7 +58,7 @@ def test_move_file_exception_unpacking_unlink(self):
             DistutilsFileError
         ):
             jaraco.path.build({self.source: 'spam eggs'})
-            move_file(self.source, self.target, verbose=0)
+            move_file(self.source, self.target, verbose=False)
 
     def test_copy_file_hard_link(self):
         jaraco.path.build({self.source: 'some content'})
diff --git a/distutils/tests/test_install.py b/distutils/tests/test_install.py
index 08f0f83993..8a0c56473e 100644
--- a/distutils/tests/test_install.py
+++ b/distutils/tests/test_install.py
@@ -100,7 +100,7 @@ def _expanduser(path):
         assert 'user' in options
 
         # setting a value
-        cmd.user = 1
+        cmd.user = True
 
         # user base and site shouldn't be created yet
         assert not os.path.exists(site.USER_BASE)
diff --git a/distutils/tests/test_install_data.py b/distutils/tests/test_install_data.py
index e453d01f1a..f34070b10b 100644
--- a/distutils/tests/test_install_data.py
+++ b/distutils/tests/test_install_data.py
@@ -41,7 +41,7 @@ def test_simple_run(self):
         cmd.outfiles = []
 
         # let's try with warn_dir one
-        cmd.warn_dir = 1
+        cmd.warn_dir = True
         cmd.ensure_finalized()
         cmd.run()
 
diff --git a/distutils/tests/test_install_lib.py b/distutils/tests/test_install_lib.py
index 964106fa00..f685a57956 100644
--- a/distutils/tests/test_install_lib.py
+++ b/distutils/tests/test_install_lib.py
@@ -97,7 +97,7 @@ def test_dont_write_bytecode(self, caplog):
         # makes sure byte_compile is not used
         dist = self.create_dist()[1]
         cmd = install_lib(dist)
-        cmd.compile = 1
+        cmd.compile = True
         cmd.optimize = 1
 
         old_dont_write_bytecode = sys.dont_write_bytecode
diff --git a/distutils/tests/test_install_scripts.py b/distutils/tests/test_install_scripts.py
index 5d9f13a426..868b1c2252 100644
--- a/distutils/tests/test_install_scripts.py
+++ b/distutils/tests/test_install_scripts.py
@@ -14,8 +14,8 @@ def test_default_settings(self):
         dist.command_obj["build"] = support.DummyCommand(build_scripts="/foo/bar")
         dist.command_obj["install"] = support.DummyCommand(
             install_scripts="/splat/funk",
-            force=1,
-            skip_build=1,
+            force=True,
+            skip_build=True,
         )
         cmd = install_scripts(dist)
         assert not cmd.force
@@ -40,8 +40,8 @@ def test_installation(self):
         dist.command_obj["build"] = support.DummyCommand(build_scripts=source)
         dist.command_obj["install"] = support.DummyCommand(
             install_scripts=target,
-            force=1,
-            skip_build=1,
+            force=True,
+            skip_build=True,
         )
         cmd = install_scripts(dist)
         cmd.finalize_options()
diff --git a/distutils/tests/test_register.py b/distutils/tests/test_register.py
index d071bbe951..14dfb832c7 100644
--- a/distutils/tests/test_register.py
+++ b/distutils/tests/test_register.py
@@ -137,7 +137,7 @@ def _no_way(prompt=''):
 
         register_module.input = _no_way
 
-        cmd.show_response = 1
+        cmd.show_response = True
         cmd.run()
 
         # let's see what the server received : we should
@@ -208,7 +208,7 @@ def test_strict(self):
         # empty metadata
         cmd = self._get_cmd({})
         cmd.ensure_finalized()
-        cmd.strict = 1
+        cmd.strict = True
         with pytest.raises(DistutilsSetupError):
             cmd.run()
 
@@ -224,7 +224,7 @@ def test_strict(self):
 
         cmd = self._get_cmd(metadata)
         cmd.ensure_finalized()
-        cmd.strict = 1
+        cmd.strict = True
         with pytest.raises(DistutilsSetupError):
             cmd.run()
 
@@ -232,7 +232,7 @@ def test_strict(self):
         metadata['long_description'] = 'title\n=====\n\ntext'
         cmd = self._get_cmd(metadata)
         cmd.ensure_finalized()
-        cmd.strict = 1
+        cmd.strict = True
         inputs = Inputs('1', 'tarek', 'y')
         register_module.input = inputs.__call__
         # let's run the command
@@ -265,7 +265,7 @@ def test_strict(self):
 
         cmd = self._get_cmd(metadata)
         cmd.ensure_finalized()
-        cmd.strict = 1
+        cmd.strict = True
         inputs = Inputs('1', 'tarek', 'y')
         register_module.input = inputs.__call__
         # let's run the command
@@ -296,7 +296,7 @@ def test_register_invalid_long_description(self, monkeypatch):
 
     def test_list_classifiers(self, caplog):
         cmd = self._get_cmd()
-        cmd.list_classifiers = 1
+        cmd.list_classifiers = True
         cmd.run()
         assert caplog.messages == ['running check', 'xxx']
 
@@ -305,7 +305,7 @@ def test_show_response(self, caplog):
         cmd = self._get_cmd()
         inputs = Inputs('1', 'tarek', 'y')
         register_module.input = inputs.__call__
-        cmd.show_response = 1
+        cmd.show_response = True
         try:
             cmd.run()
         finally:
diff --git a/distutils/tests/test_text_file.py b/distutils/tests/test_text_file.py
index c5c910a820..f511156561 100644
--- a/distutils/tests/test_text_file.py
+++ b/distutils/tests/test_text_file.py
@@ -60,7 +60,11 @@ def test_input(count, description, file, expected_result):
         jaraco.path.build({filename.name: TEST_DATA}, tmp_path)
 
         in_file = TextFile(
-            filename, strip_comments=0, skip_blanks=0, lstrip_ws=0, rstrip_ws=0
+            filename,
+            strip_comments=False,
+            skip_blanks=False,
+            lstrip_ws=False,
+            rstrip_ws=False,
         )
         try:
             test_input(1, "no processing", in_file, result1)
@@ -68,7 +72,11 @@ def test_input(count, description, file, expected_result):
             in_file.close()
 
         in_file = TextFile(
-            filename, strip_comments=1, skip_blanks=0, lstrip_ws=0, rstrip_ws=0
+            filename,
+            strip_comments=True,
+            skip_blanks=False,
+            lstrip_ws=False,
+            rstrip_ws=False,
         )
         try:
             test_input(2, "strip comments", in_file, result2)
@@ -76,7 +84,11 @@ def test_input(count, description, file, expected_result):
             in_file.close()
 
         in_file = TextFile(
-            filename, strip_comments=0, skip_blanks=1, lstrip_ws=0, rstrip_ws=0
+            filename,
+            strip_comments=False,
+            skip_blanks=True,
+            lstrip_ws=False,
+            rstrip_ws=False,
         )
         try:
             test_input(3, "strip blanks", in_file, result3)
@@ -90,7 +102,11 @@ def test_input(count, description, file, expected_result):
             in_file.close()
 
         in_file = TextFile(
-            filename, strip_comments=1, skip_blanks=1, join_lines=1, rstrip_ws=1
+            filename,
+            strip_comments=True,
+            skip_blanks=True,
+            join_lines=True,
+            rstrip_ws=True,
         )
         try:
             test_input(5, "join lines without collapsing", in_file, result5)
@@ -99,11 +115,11 @@ def test_input(count, description, file, expected_result):
 
         in_file = TextFile(
             filename,
-            strip_comments=1,
-            skip_blanks=1,
-            join_lines=1,
-            rstrip_ws=1,
-            collapse_join=1,
+            strip_comments=True,
+            skip_blanks=True,
+            join_lines=True,
+            rstrip_ws=True,
+            collapse_join=True,
         )
         try:
             test_input(6, "join lines with collapsing", in_file, result6)
diff --git a/distutils/tests/test_upload.py b/distutils/tests/test_upload.py
index 0692f00160..56df209c73 100644
--- a/distutils/tests/test_upload.py
+++ b/distutils/tests/test_upload.py
@@ -117,7 +117,7 @@ def test_upload(self, caplog):
         # lets run it
         pkg_dir, dist = self.create_dist(dist_files=dist_files)
         cmd = upload(dist)
-        cmd.show_response = 1
+        cmd.show_response = True
         cmd.ensure_finalized()
         cmd.run()
 
@@ -167,7 +167,7 @@ def test_upload_correct_cr(self):
             dist_files=dist_files, description='long description\r'
         )
         cmd = upload(dist)
-        cmd.show_response = 1
+        cmd.show_response = True
         cmd.ensure_finalized()
         cmd.run()
 
diff --git a/distutils/unixccompiler.py b/distutils/unixccompiler.py
index da97688cbd..df622cd469 100644
--- a/distutils/unixccompiler.py
+++ b/distutils/unixccompiler.py
@@ -190,7 +190,7 @@ def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
             raise CompileError(msg)
 
     def create_static_lib(
-        self, objects, output_libname, output_dir=None, debug=0, target_lang=None
+        self, objects, output_libname, output_dir=None, debug=False, target_lang=None
     ):
         objects, output_dir = self._fix_object_args(objects, output_dir)
 
@@ -223,7 +223,7 @@ def link(
         library_dirs=None,
         runtime_library_dirs=None,
         export_symbols=None,
-        debug=0,
+        debug=False,
         extra_preargs=None,
         extra_postargs=None,
         build_temp=None,
@@ -362,7 +362,7 @@ def _library_root(dir):
 
         return os.path.join(match.group(1), dir[1:]) if apply_root else dir
 
-    def find_library_file(self, dirs, lib, debug=0):
+    def find_library_file(self, dirs, lib, debug=False):
         r"""
         Second-guess the linker with not much hard
         data to go on: GCC seems to prefer the shared library, so
diff --git a/distutils/util.py b/distutils/util.py
index 2cdea14381..ef332ba94d 100644
--- a/distutils/util.py
+++ b/distutils/util.py
@@ -311,7 +311,7 @@ def split_quoted(s):
 # split_quoted ()
 
 
-def execute(func, args, msg=None, verbose=0, dry_run=0):
+def execute(func, args, msg=None, verbose=False, dry_run=False):
     """Perform some action that affects the outside world (eg.  by
     writing to the filesystem).  Such actions are special because they
     are disabled by the 'dry_run' flag.  This method takes care of all
@@ -349,11 +349,11 @@ def strtobool(val):
 def byte_compile(  # noqa: C901
     py_files,
     optimize=0,
-    force=0,
+    force=False,
     prefix=None,
     base_dir=None,
-    verbose=1,
-    dry_run=0,
+    verbose=True,
+    dry_run=False,
     direct=None,
 ):
     """Byte-compile a collection of Python source files to .pyc
@@ -443,8 +443,8 @@ def byte_compile(  # noqa: C901
                     f"""
 byte_compile(files, optimize={optimize!r}, force={force!r},
              prefix={prefix!r}, base_dir={base_dir!r},
-             verbose={verbose!r}, dry_run=0,
-             direct=1)
+             verbose={verbose!r}, dry_run=False,
+             direct=True)
 """
                 )
 
diff --git a/distutils/zosccompiler.py b/distutils/zosccompiler.py
index c7a7ca61cf..af1e7fa5cc 100644
--- a/distutils/zosccompiler.py
+++ b/distutils/zosccompiler.py
@@ -135,7 +135,7 @@ def _get_zos_compiler_name(self):
 
         return zos_compilers.get(zos_compiler_names[0], 'ibm-openxl')
 
-    def __init__(self, verbose=0, dry_run=0, force=0):
+    def __init__(self, verbose=False, dry_run=False, force=False):
         super().__init__(verbose, dry_run, force)
         self.zos_compiler = self._get_zos_compiler_name()
         sysconfig.customize_compiler(self)
@@ -172,7 +172,7 @@ def link(
         library_dirs=None,
         runtime_library_dirs=None,
         export_symbols=None,
-        debug=0,
+        debug=False,
         extra_preargs=None,
         extra_postargs=None,
         build_temp=None,
diff --git a/docs/distutils/apiref.rst b/docs/distutils/apiref.rst
index beb17bc3fc..709186ad0d 100644
--- a/docs/distutils/apiref.rst
+++ b/docs/distutils/apiref.rst
@@ -361,7 +361,7 @@ This module provides the following functions.
    are not given.
 
 
-.. function:: new_compiler(plat=None, compiler=None, verbose=0, dry_run=0, force=0)
+.. function:: new_compiler(plat=None, compiler=None, verbose=False, dry_run=False, force=False)
 
    Factory function to generate an instance of some CCompiler subclass for the
    supplied platform/compiler combination. *plat* defaults to ``os.name`` (eg.
@@ -383,7 +383,7 @@ This module provides the following functions.
    to :command:`build`, :command:`build_ext`, :command:`build_clib`).
 
 
-.. class:: CCompiler([verbose=0, dry_run=0, force=0])
+.. class:: CCompiler([verbose=False, dry_run=False, force=False])
 
    The abstract base class :class:`CCompiler` defines the interface that  must be
    implemented by real compiler classes.  The class also has  some utility methods
@@ -517,7 +517,7 @@ This module provides the following functions.
       list) to do the job.
 
 
-   .. method:: CCompiler.find_library_file(dirs, lib[, debug=0])
+   .. method:: CCompiler.find_library_file(dirs, lib[, debug=False])
 
       Search the specified list of directories for a static or shared library file
       *lib* and return the full path to that file.  If *debug* is true, look for a
@@ -580,7 +580,7 @@ This module provides the following functions.
    The following methods invoke stages in the build process.
 
 
-   .. method:: CCompiler.compile(sources[, output_dir=None, macros=None, include_dirs=None, debug=0, extra_preargs=None, extra_postargs=None, depends=None])
+   .. method:: CCompiler.compile(sources[, output_dir=None, macros=None, include_dirs=None, debug=False, extra_preargs=None, extra_postargs=None, depends=None])
 
       Compile one or more source files. Generates object files (e.g.  transforms a
       :file:`.c` file to a :file:`.o` file.)
@@ -624,7 +624,7 @@ This module provides the following functions.
       Raises :exc:`CompileError` on failure.
 
 
-   .. method:: CCompiler.create_static_lib(objects, output_libname[, output_dir=None, debug=0, target_lang=None])
+   .. method:: CCompiler.create_static_lib(objects, output_libname[, output_dir=None, debug=False, target_lang=None])
 
       Link a bunch of stuff together to create a static library file. The "bunch of
       stuff" consists of the list of object files supplied as *objects*, the extra
@@ -648,7 +648,7 @@ This module provides the following functions.
       Raises :exc:`LibError` on failure.
 
 
-   .. method:: CCompiler.link(target_desc, objects, output_filename[, output_dir=None, libraries=None, library_dirs=None, runtime_library_dirs=None, export_symbols=None, debug=0, extra_preargs=None, extra_postargs=None, build_temp=None, target_lang=None])
+   .. method:: CCompiler.link(target_desc, objects, output_filename[, output_dir=None, libraries=None, library_dirs=None, runtime_library_dirs=None, export_symbols=None, debug=False, extra_preargs=None, extra_postargs=None, build_temp=None, target_lang=None])
 
       Link a bunch of stuff together to create an executable or shared library file.
 
@@ -690,21 +690,21 @@ This module provides the following functions.
       Raises :exc:`LinkError` on failure.
 
 
-   .. method:: CCompiler.link_executable(objects, output_progname[, output_dir=None, libraries=None, library_dirs=None, runtime_library_dirs=None, debug=0, extra_preargs=None, extra_postargs=None, target_lang=None])
+   .. method:: CCompiler.link_executable(objects, output_progname[, output_dir=None, libraries=None, library_dirs=None, runtime_library_dirs=None, debug=False, extra_preargs=None, extra_postargs=None, target_lang=None])
 
       Link an executable.  *output_progname* is the name of the file executable, while
       *objects* are a list of object filenames to link in. Other arguments  are as for
       the :meth:`link` method.
 
 
-   .. method:: CCompiler.link_shared_lib(objects, output_libname[, output_dir=None, libraries=None, library_dirs=None, runtime_library_dirs=None, export_symbols=None, debug=0, extra_preargs=None, extra_postargs=None, build_temp=None, target_lang=None])
+   .. method:: CCompiler.link_shared_lib(objects, output_libname[, output_dir=None, libraries=None, library_dirs=None, runtime_library_dirs=None, export_symbols=None, debug=False, extra_preargs=None, extra_postargs=None, build_temp=None, target_lang=None])
 
       Link a shared library. *output_libname* is the name of the output  library,
       while *objects* is a list of object filenames to link in.  Other arguments are
       as for the :meth:`link` method.
 
 
-   .. method:: CCompiler.link_shared_object(objects, output_filename[, output_dir=None, libraries=None, library_dirs=None, runtime_library_dirs=None, export_symbols=None, debug=0, extra_preargs=None, extra_postargs=None, build_temp=None, target_lang=None])
+   .. method:: CCompiler.link_shared_object(objects, output_filename[, output_dir=None, libraries=None, library_dirs=None, runtime_library_dirs=None, export_symbols=None, debug=False, extra_preargs=None, extra_postargs=None, build_temp=None, target_lang=None])
 
       Link a shared object. *output_filename* is the name of the shared object that
       will be created, while *objects* is a list of object filenames  to link in.
@@ -726,14 +726,14 @@ This module provides the following functions.
    use by the various concrete subclasses.
 
 
-   .. method:: CCompiler.executable_filename(basename[, strip_dir=0, output_dir=''])
+   .. method:: CCompiler.executable_filename(basename[, strip_dir=False, output_dir=''])
 
       Returns the filename of the executable for the given *basename*.  Typically for
       non-Windows platforms this is the same as the basename,  while Windows will get
       a :file:`.exe` added.
 
 
-   .. method:: CCompiler.library_filename(libname[, lib_type='static', strip_dir=0, output_dir=''])
+   .. method:: CCompiler.library_filename(libname[, lib_type='static', strip_dir=False, output_dir=''])
 
       Returns the filename for the given library name on the current platform. On Unix
       a library with *lib_type* of ``'static'`` will typically  be of the form
@@ -741,13 +741,13 @@ This module provides the following functions.
       :file:`liblibname.so`.
 
 
-   .. method:: CCompiler.object_filenames(source_filenames[, strip_dir=0, output_dir=''])
+   .. method:: CCompiler.object_filenames(source_filenames[, strip_dir=False, output_dir=''])
 
       Returns the name of the object files for the given source files.
       *source_filenames* should be a list of filenames.
 
 
-   .. method:: CCompiler.shared_object_filename(basename[, strip_dir=0, output_dir=''])
+   .. method:: CCompiler.shared_object_filename(basename[, strip_dir=False, output_dir=''])
 
       Returns the name of a shared object file for the given file name *basename*.
 
@@ -884,7 +884,7 @@ This module provides a few functions for creating archive files, such as
 tarballs or zipfiles.
 
 
-.. function:: make_archive(base_name, format[, root_dir=None, base_dir=None, verbose=0, dry_run=0])
+.. function:: make_archive(base_name, format[, root_dir=None, base_dir=None, verbose=False, dry_run=False])
 
    Create an archive file (eg. ``zip`` or ``tar``).  *base_name*  is the name of
    the file to create, minus any format-specific extension;  *format* is the
@@ -900,7 +900,7 @@ tarballs or zipfiles.
       Added support for the ``xztar`` format.
 
 
-.. function:: make_tarball(base_name, base_dir[, compress='gzip', verbose=0, dry_run=0])
+.. function:: make_tarball(base_name, base_dir[, compress='gzip', verbose=False, dry_run=False])
 
    'Create an (optional compressed) archive as a tar file from all files in and
    under *base_dir*. *compress* must be ``'gzip'`` (the default),
@@ -915,7 +915,7 @@ tarballs or zipfiles.
       Added support for the ``xz`` compression.
 
 
-.. function:: make_zipfile(base_name, base_dir[, verbose=0, dry_run=0])
+.. function:: make_zipfile(base_name, base_dir[, verbose=False, dry_run=False])
 
    Create a zip file from all files in and under *base_dir*.  The output zip file
    will be named *base_name* + :file:`.zip`.  Uses either the  :mod:`zipfile` Python
@@ -978,7 +978,7 @@ This module provides functions for operating on directories and trees of
 directories.
 
 
-.. function:: mkpath(name[, mode=0o777, verbose=0, dry_run=0])
+.. function:: mkpath(name[, mode=0o777, verbose=False, dry_run=False])
 
    Create a directory and any missing ancestor directories.  If the directory
    already exists (or if *name* is the empty string, which means the current
@@ -989,7 +989,7 @@ directories.
    directories actually created.
 
 
-.. function:: create_tree(base_dir, files[, mode=0o777, verbose=0, dry_run=0])
+.. function:: create_tree(base_dir, files[, mode=0o777, verbose=False, dry_run=False])
 
    Create all the empty directories under *base_dir* needed to put *files* there.
    *base_dir* is just the name of a directory which doesn't necessarily exist
@@ -999,7 +999,7 @@ directories.
    :func:`mkpath`.
 
 
-.. function:: copy_tree(src, dst[, preserve_mode=1, preserve_times=1, preserve_symlinks=0, update=0, verbose=0, dry_run=0])
+.. function:: copy_tree(src, dst[, preserve_mode=True, preserve_times=True, preserve_symlinks=False, update=False, verbose=False, dry_run=False])
 
    Copy an entire directory tree *src* to a new location *dst*.  Both *src* and
    *dst* must be directory names.  If *src* is not a directory, raise
@@ -1026,7 +1026,7 @@ directories.
    .. versionchanged:: 3.3.1
       NFS files are ignored.
 
-.. function:: remove_tree(directory[, verbose=0, dry_run=0])
+.. function:: remove_tree(directory[, verbose=False, dry_run=False])
 
    Recursively remove *directory* and all files and directories underneath it. Any
    errors are ignored (apart from being reported to ``sys.stdout`` if *verbose* is
@@ -1043,7 +1043,7 @@ directories.
 This module contains some utility functions for operating on individual files.
 
 
-.. function:: copy_file(src, dst[, preserve_mode=1, preserve_times=1, update=0, link=None, verbose=0, dry_run=0])
+.. function:: copy_file(src, dst[, preserve_mode=True, preserve_times=True, update=False, link=None, verbose=False, dry_run=False])
 
    Copy file *src* to *dst*. If *dst* is a directory, then *src* is copied there
    with the same name; otherwise, it must be a filename. (If the file exists, it
@@ -1216,7 +1216,7 @@ other utility module.
    .. % Should probably be moved into the standard library.
 
 
-.. function:: execute(func, args[, msg=None, verbose=0, dry_run=0])
+.. function:: execute(func, args[, msg=None, verbose=False, dry_run=False])
 
    Perform some action that affects the outside world (for instance, writing to the
    filesystem).  Such actions are special because they are disabled by the
@@ -1234,7 +1234,7 @@ other utility module.
    :exc:`ValueError` if *val*  is anything else.
 
 
-.. function:: byte_compile(py_files[, optimize=0, force=0, prefix=None, base_dir=None, verbose=1, dry_run=0, direct=None])
+.. function:: byte_compile(py_files[, optimize=0, force=False, prefix=None, base_dir=None, verbose=True, dry_run=False, direct=None])
 
    Byte-compile a collection of Python source files to :file:`.pyc` files in a
    :file:`__pycache__` subdirectory (see :pep:`3147` and :pep:`488`).
diff --git a/docs/distutils/configfile.rst b/docs/distutils/configfile.rst
index 30cccd71c0..f6cb184089 100644
--- a/docs/distutils/configfile.rst
+++ b/docs/distutils/configfile.rst
@@ -96,7 +96,7 @@ configuration file for this distribution:
 .. code-block:: ini
 
    [build_ext]
-   inplace=1
+   inplace=true
 
 This will affect all builds of this module distribution, whether or not you
 explicitly specify :command:`build_ext`.  If you include :file:`setup.cfg` in
diff --git a/docs/distutils/setupscript.rst b/docs/distutils/setupscript.rst
index 71d2439f7e..825a6aa9a3 100644
--- a/docs/distutils/setupscript.rst
+++ b/docs/distutils/setupscript.rst
@@ -273,7 +273,7 @@ search path, though, you can find that directory using the Distutils
 :mod:`distutils.sysconfig` module::
 
     from distutils.sysconfig import get_python_inc
-    incdir = os.path.join(get_python_inc(plat_specific=1), 'Numerical')
+    incdir = os.path.join(get_python_inc(plat_specific=True), 'Numerical')
     setup(...,
           Extension(..., include_dirs=[incdir]),
           )

From affcf39ac09a482ae1aaa36142394002c4bb7bf0 Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Sat, 18 May 2024 19:03:41 +0200
Subject: [PATCH 0791/1761] Apply ruff/flake8-implicit-str-concat rule ISC001

ISC001 Implicitly concatenated string literals on one line
---
 distutils/command/bdist_rpm.py | 2 +-
 distutils/command/register.py  | 2 +-
 distutils/fancy_getopt.py      | 4 ++--
 3 files changed, 4 insertions(+), 4 deletions(-)

diff --git a/distutils/command/bdist_rpm.py b/distutils/command/bdist_rpm.py
index 649968a5eb..df96507e5c 100644
--- a/distutils/command/bdist_rpm.py
+++ b/distutils/command/bdist_rpm.py
@@ -529,7 +529,7 @@ def _make_spec_file(self):  # noqa: C901
         # are just text that we drop in as-is.  Hmmm.
 
         install_cmd = (
-            '%s install -O1 --root=$RPM_BUILD_ROOT ' '--record=INSTALLED_FILES'
+            '%s install -O1 --root=$RPM_BUILD_ROOT --record=INSTALLED_FILES'
         ) % def_setup_call
 
         script_options = [
diff --git a/distutils/command/register.py b/distutils/command/register.py
index ee6c54daba..1ae0e25eb3 100644
--- a/distutils/command/register.py
+++ b/distutils/command/register.py
@@ -225,7 +225,7 @@ def send_metadata(self):  # noqa: C901
                 log.info('Server response (%s): %s', code, result)
             else:
                 log.info('You will receive an email shortly.')
-                log.info('Follow the instructions in it to ' 'complete registration.')
+                log.info('Follow the instructions in it to complete registration.')
         elif choice == '3':
             data = {':action': 'password_reset'}
             data['email'] = ''
diff --git a/distutils/fancy_getopt.py b/distutils/fancy_getopt.py
index e905aede4d..94a63217c5 100644
--- a/distutils/fancy_getopt.py
+++ b/distutils/fancy_getopt.py
@@ -118,11 +118,11 @@ def _check_alias_dict(self, aliases, what):
         for alias, opt in aliases.items():
             if alias not in self.option_index:
                 raise DistutilsGetoptError(
-                    f"invalid {what} '{alias}': " f"option '{alias}' not defined"
+                    f"invalid {what} '{alias}': option '{alias}' not defined"
                 )
             if opt not in self.option_index:
                 raise DistutilsGetoptError(
-                    f"invalid {what} '{alias}': " f"aliased option '{opt}' not defined"
+                    f"invalid {what} '{alias}': aliased option '{opt}' not defined"
                 )
 
     def set_aliases(self, alias):

From 7d12ab75b18e6830b3f483696dd5c91ee047e9b8 Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Sat, 18 May 2024 19:06:09 +0200
Subject: [PATCH 0792/1761] Apply ruff/flake8-implicit-str-concat rule ISC003

ISC003 Explicitly concatenated string should be implicitly concatenated
---
 distutils/command/bdist_dumb.py | 3 +--
 distutils/command/build.py      | 3 +--
 distutils/command/build_ext.py  | 2 +-
 distutils/command/install.py    | 9 +++------
 distutils/dist.py               | 3 +--
 5 files changed, 7 insertions(+), 13 deletions(-)

diff --git a/distutils/command/bdist_dumb.py b/distutils/command/bdist_dumb.py
index 06502d201e..5966e17ef1 100644
--- a/distutils/command/bdist_dumb.py
+++ b/distutils/command/bdist_dumb.py
@@ -33,8 +33,7 @@ class bdist_dumb(Command):
         (
             'keep-temp',
             'k',
-            "keep the pseudo-installation tree around after "
-            + "creating the distribution archive",
+            "keep the pseudo-installation tree around after creating the distribution archive",
         ),
         ('dist-dir=', 'd', "directory to put final built distributions in"),
         ('skip-build', None, "skip rebuilding everything (for testing/debugging)"),
diff --git a/distutils/command/build.py b/distutils/command/build.py
index d18ed503e3..3d896d4d34 100644
--- a/distutils/command/build.py
+++ b/distutils/command/build.py
@@ -26,8 +26,7 @@ class build(Command):
         (
             'build-lib=',
             None,
-            "build directory for all distribution (defaults to either "
-            + "build-purelib or build-platlib",
+            "build directory for all distribution (defaults to either build-purelib or build-platlib",
         ),
         ('build-scripts=', None, "build directory for scripts"),
         ('build-temp=', 't', "temporary build directory"),
diff --git a/distutils/command/build_ext.py b/distutils/command/build_ext.py
index 06d949aff1..b80cfdf735 100644
--- a/distutils/command/build_ext.py
+++ b/distutils/command/build_ext.py
@@ -71,7 +71,7 @@ class build_ext(Command):
             'inplace',
             'i',
             "ignore build-lib and put compiled extensions into the source "
-            + "directory alongside your pure Python modules",
+            "directory alongside your pure Python modules",
         ),
         (
             'include-dirs=',
diff --git a/distutils/command/install.py b/distutils/command/install.py
index 8e920be4de..e73d388d78 100644
--- a/distutils/command/install.py
+++ b/distutils/command/install.py
@@ -193,8 +193,7 @@ class install(Command):
         (
             'install-platbase=',
             None,
-            "base installation directory for platform-specific files "
-            + "(instead of --exec-prefix or --home)",
+            "base installation directory for platform-specific files (instead of --exec-prefix or --home)",
         ),
         ('root=', None, "install everything relative to this alternate root directory"),
         # Or, explicitly set the installation scheme
@@ -211,8 +210,7 @@ class install(Command):
         (
             'install-lib=',
             None,
-            "installation directory for all module distributions "
-            + "(overrides --install-purelib and --install-platlib)",
+            "installation directory for all module distributions (overrides --install-purelib and --install-platlib)",
         ),
         ('install-headers=', None, "installation directory for C/C++ headers"),
         ('install-scripts=', None, "installation directory for Python scripts"),
@@ -348,8 +346,7 @@ def finalize_options(self):  # noqa: C901
             self.install_base or self.install_platbase
         ):
             raise DistutilsOptionError(
-                "must supply either prefix/exec-prefix/home or "
-                + "install-base/install-platbase -- not both"
+                "must supply either prefix/exec-prefix/home or install-base/install-platbase -- not both"
             )
 
         if self.home and (self.prefix or self.exec_prefix):
diff --git a/distutils/dist.py b/distutils/dist.py
index 668ce7eb0a..1fd20d8c64 100644
--- a/distutils/dist.py
+++ b/distutils/dist.py
@@ -652,8 +652,7 @@ def _show_help(
         if display_options:
             parser.set_option_table(self.display_options)
             parser.print_help(
-                "Information display options (just display "
-                + "information, ignore any commands)"
+                "Information display options (just display information, ignore any commands)"
             )
             print()
 

From 5a590b8566cdfd3998c3d66aea5beefed340d774 Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Sat, 18 May 2024 19:06:54 +0200
Subject: [PATCH 0793/1761] Enable ruff/flake8-implicit-str-concat rules (ISC)

---
 ruff.toml | 1 +
 1 file changed, 1 insertion(+)

diff --git a/ruff.toml b/ruff.toml
index 70612985a7..e70a65b941 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -1,6 +1,7 @@
 [lint]
 extend-select = [
 	"C901",
+	"ISC",
 	"W",
 ]
 ignore = [

From 4208e32486161f55e7391c2e0c4d9a6a7a22b809 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 27 Jun 2024 05:55:07 -0400
Subject: [PATCH 0794/1761] Combine strings for clarity.

---
 distutils/ccompiler.py | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)

diff --git a/distutils/ccompiler.py b/distutils/ccompiler.py
index ca71e18a44..4585512fe3 100644
--- a/distutils/ccompiler.py
+++ b/distutils/ccompiler.py
@@ -203,8 +203,7 @@ def _check_macro_definitions(self, definitions):
             ):
                 raise TypeError(
                     f"invalid macro definition '{defn}': "
-                    "must be tuple (string,), (string, string), or "
-                    "(string, None)"
+                    "must be tuple (string,), (string, string), or (string, None)"
                 )
 
     # -- Bookkeeping methods -------------------------------------------

From 1c196feba725b15f7a14d2de6cdc3ab7070a1f57 Mon Sep 17 00:00:00 2001
From: Christoph Reiter 
Date: Fri, 28 Jun 2024 06:46:33 +0200
Subject: [PATCH 0795/1761] CI: run pytest without arguments to avoid stdlib
 distutils being imported

distutils currently doesn't support pytest collection that doesn't
start at least at the distutils dir or above (and not distutils/tests)
since it requires the local distutils being imported before the tests are run,
otherwise the stdlib distutils takes precedence.

Adjust the pytest call to not pass a path to work around this.

Since pytest currently fails to skip collecting venvs with mingw python
(see https://github.com/pytest-dev/pytest/issues/12544) move the venv
to /tmp instead.
---
 .github/workflows/main.yml | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index d4c7a392e6..9e4bf5bb40 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -146,8 +146,8 @@ jobs:
         run: |
           export VIRTUALENV_NO_SETUPTOOLS=1
 
-          python -m virtualenv venv
-          source venv/bin/activate
+          python -m virtualenv /tmp/venv
+          source /tmp/venv/bin/activate
 
           # python-ruff doesn't work without rust
           sed -i '/pytest-ruff/d' pyproject.toml
@@ -156,8 +156,8 @@ jobs:
       - name: Run tests
         shell: msys2 {0}
         run: |
-          source venv/bin/activate
-          pytest distutils/tests
+          source /tmp/venv/bin/activate
+          pytest
 
   ci_setuptools:
     # Integration testing with setuptools

From 23174730a61af359f1c45da42d5f2f66df0c6086 Mon Sep 17 00:00:00 2001
From: Christoph Reiter 
Date: Fri, 28 Jun 2024 07:22:15 +0200
Subject: [PATCH 0796/1761] CI: explicitely CC/CXX for clang only mingw
 environments

MSYS2 has stopped installing gcc compatibility binaries in clang environments
by default some time ago, and distutils is currently hardcoded to look for "gcc",
while only cc/c++ and clang/clang++ are in PATH.

Work around for now by explicitely setting CC/CXX to override the defaults.

Idealy distutils would try to look harder for a valid compiler before giving up,
but this can be improved in the future.
---
 .github/workflows/main.yml | 11 +++++++----
 1 file changed, 7 insertions(+), 4 deletions(-)

diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index 9e4bf5bb40..b6b757dbf5 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -124,10 +124,10 @@ jobs:
     strategy:
       matrix:
         include:
-          - { sys: mingw64, env: x86_64 }
-          - { sys: mingw32, env: i686 }
-          - { sys: ucrt64,  env: ucrt-x86_64 }
-          - { sys: clang64, env: clang-x86_64 }
+          - { sys: mingw64, env: x86_64, cc: gcc, cxx: g++ }
+          - { sys: mingw32, env: i686, cc: gcc, cxx: g++ }
+          - { sys: ucrt64,  env: ucrt-x86_64, cc: gcc, cxx: g++ }
+          - { sys: clang64, env: clang-x86_64, cc: clang, cxx: clang++}
     runs-on: windows-latest
     steps:
       - uses: actions/checkout@v4
@@ -155,6 +155,9 @@ jobs:
           pip install -e .[test]
       - name: Run tests
         shell: msys2 {0}
+        env:
+          CC: ${{ matrix.cc }}
+          CXX: ${{ matrix.cxx }}
         run: |
           source /tmp/venv/bin/activate
           pytest

From 2c86616a044f17621723e4875d76fdbe44070191 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 27 Jun 2024 06:01:43 -0400
Subject: [PATCH 0797/1761] Extract method for checking macro definition.

---
 distutils/ccompiler.py | 35 +++++++++++++++++++----------------
 1 file changed, 19 insertions(+), 16 deletions(-)

diff --git a/distutils/ccompiler.py b/distutils/ccompiler.py
index b860acb779..9974e520cc 100644
--- a/distutils/ccompiler.py
+++ b/distutils/ccompiler.py
@@ -188,23 +188,26 @@ def _find_macro(self, name):
         return None
 
     def _check_macro_definitions(self, definitions):
-        """Ensures that every element of 'definitions' is a valid macro
-        definition, ie. either (name,value) 2-tuple or a (name,) tuple.  Do
-        nothing if all definitions are OK, raise TypeError otherwise.
-        """
+        """Ensure that every element of 'definitions' is valid."""
         for defn in definitions:
-            if not (
-                isinstance(defn, tuple)
-                and (
-                    len(defn) in (1, 2)
-                    and (isinstance(defn[1], str) or defn[1] is None)
-                )
-                and isinstance(defn[0], str)
-            ):
-                raise TypeError(
-                    f"invalid macro definition '{defn}': "
-                    "must be tuple (string,), (string, string), or (string, None)"
-                )
+            self._check_macro_definition(defn)
+
+    def _check_macro_definition(self, defn):
+        """
+        Raise a TypeError if defn is not valid.
+
+        A valid definition is either a (name, value) 2-tuple or a (name,) tuple.
+        """
+        valid = (
+            isinstance(defn, tuple)
+            and (len(defn) in (1, 2) and (isinstance(defn[1], str) or defn[1] is None))
+            and isinstance(defn[0], str)
+        )
+        if not valid:
+            raise TypeError(
+                f"invalid macro definition '{defn}': "
+                "must be tuple (string,), (string, string), or (string, None)"
+            )
 
     # -- Bookkeeping methods -------------------------------------------
 

From 62135e668e07facf8ebff3f6cf1456f3b8c59e4d Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 27 Jun 2024 06:14:54 -0400
Subject: [PATCH 0798/1761] Extract method for _is_valid_macro.

---
 distutils/ccompiler.py | 17 ++++++++++-------
 1 file changed, 10 insertions(+), 7 deletions(-)

diff --git a/distutils/ccompiler.py b/distutils/ccompiler.py
index 9974e520cc..87ca89e197 100644
--- a/distutils/ccompiler.py
+++ b/distutils/ccompiler.py
@@ -6,6 +6,7 @@
 import os
 import re
 import sys
+import types
 import warnings
 
 from ._itertools import always_iterable
@@ -190,7 +191,7 @@ def _find_macro(self, name):
     def _check_macro_definitions(self, definitions):
         """Ensure that every element of 'definitions' is valid."""
         for defn in definitions:
-            self._check_macro_definition(defn)
+            self._check_macro_definition(*defn)
 
     def _check_macro_definition(self, defn):
         """
@@ -198,17 +199,19 @@ def _check_macro_definition(self, defn):
 
         A valid definition is either a (name, value) 2-tuple or a (name,) tuple.
         """
-        valid = (
-            isinstance(defn, tuple)
-            and (len(defn) in (1, 2) and (isinstance(defn[1], str) or defn[1] is None))
-            and isinstance(defn[0], str)
-        )
-        if not valid:
+        if not isinstance(defn, tuple) or not self._is_valid_macro(*defn):
             raise TypeError(
                 f"invalid macro definition '{defn}': "
                 "must be tuple (string,), (string, string), or (string, None)"
             )
 
+    @staticmethod
+    def _is_valid_macro(name, value=None):
+        """
+        A valid macro is a ``name : str`` and a ``value : str | None``.
+        """
+        return isinstance(name, str) and isinstance(value, (str, types.NoneType))
+
     # -- Bookkeeping methods -------------------------------------------
 
     def define_macro(self, name, value=None):

From dc7bfe4c47f5debdfff68c87f0e8b897713eda0a Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Thu, 27 Jun 2024 21:12:19 +0200
Subject: [PATCH 0799/1761] Use brackets for the default value of option
 arguments

The goal is to standardize the format of the help text printed
by commands. It is not easy to choose between brackets `[]` and
parentheses `()`. I went for the docopt style, which is the
closest to a standard I could find:

	http://docopt.org/

	[...] and whether that argument has a default value ([default: 10]).
---
 distutils/command/bdist.py        | 2 +-
 distutils/command/bdist_dumb.py   | 4 ++--
 distutils/command/bdist_rpm.py    | 2 +-
 distutils/command/build.py        | 2 +-
 distutils/command/build_ext.py    | 2 +-
 distutils/command/clean.py        | 8 ++++----
 distutils/command/install_data.py | 2 +-
 7 files changed, 11 insertions(+), 11 deletions(-)

diff --git a/distutils/command/bdist.py b/distutils/command/bdist.py
index 833f7616ce..1738f4e56b 100644
--- a/distutils/command/bdist.py
+++ b/distutils/command/bdist.py
@@ -41,7 +41,7 @@ class bdist(Command):
             'plat-name=',
             'p',
             "platform name to embed in generated filenames "
-            f"(default: {get_platform()})",
+            f"[default: {get_platform()}]",
         ),
         ('formats=', None, "formats for distribution (comma-separated list)"),
         (
diff --git a/distutils/command/bdist_dumb.py b/distutils/command/bdist_dumb.py
index bf9b6ad6ef..67b0c8cce9 100644
--- a/distutils/command/bdist_dumb.py
+++ b/distutils/command/bdist_dumb.py
@@ -23,7 +23,7 @@ class bdist_dumb(Command):
             'plat-name=',
             'p',
             "platform name to embed in generated filenames "
-            f"(default: {get_platform()})",
+            f"[default: {get_platform()}]",
         ),
         (
             'format=',
@@ -40,7 +40,7 @@ class bdist_dumb(Command):
         (
             'relative',
             None,
-            "build the archive using relative paths (default: false)",
+            "build the archive using relative paths [default: false]",
         ),
         (
             'owner=',
diff --git a/distutils/command/bdist_rpm.py b/distutils/command/bdist_rpm.py
index cb98cd50ce..d443eb09b5 100644
--- a/distutils/command/bdist_rpm.py
+++ b/distutils/command/bdist_rpm.py
@@ -40,7 +40,7 @@ class bdist_rpm(Command):
             'python=',
             None,
             "path to Python interpreter to hard-code in the .spec file "
-            "(default: \"python\")",
+            "[default: \"python\"]",
         ),
         (
             'fix-python',
diff --git a/distutils/command/build.py b/distutils/command/build.py
index 84dc43c9fa..caf55073af 100644
--- a/distutils/command/build.py
+++ b/distutils/command/build.py
@@ -34,7 +34,7 @@ class build(Command):
         (
             'plat-name=',
             'p',
-            f"platform name to build for, if supported (default: {get_platform()})",
+            f"platform name to build for, if supported [default: {get_platform()}]",
         ),
         ('compiler=', 'c', "specify the compiler type"),
         ('parallel=', 'j', "number of parallel build jobs"),
diff --git a/distutils/command/build_ext.py b/distutils/command/build_ext.py
index f1ebc040ea..a1d1753dc5 100644
--- a/distutils/command/build_ext.py
+++ b/distutils/command/build_ext.py
@@ -65,7 +65,7 @@ class build_ext(Command):
             'plat-name=',
             'p',
             "platform name to cross-compile for, if supported "
-            f"(default: {get_platform()})",
+            f"[default: {get_platform()}]",
         ),
         (
             'inplace',
diff --git a/distutils/command/clean.py b/distutils/command/clean.py
index 4167a83fb3..fb54a60ed4 100644
--- a/distutils/command/clean.py
+++ b/distutils/command/clean.py
@@ -14,17 +14,17 @@
 class clean(Command):
     description = "clean up temporary files from 'build' command"
     user_options = [
-        ('build-base=', 'b', "base build directory (default: 'build.build-base')"),
+        ('build-base=', 'b', "base build directory [default: 'build.build-base']"),
         (
             'build-lib=',
             None,
-            "build directory for all modules (default: 'build.build-lib')",
+            "build directory for all modules [default: 'build.build-lib']",
         ),
-        ('build-temp=', 't', "temporary build directory (default: 'build.build-temp')"),
+        ('build-temp=', 't', "temporary build directory [default: 'build.build-temp']"),
         (
             'build-scripts=',
             None,
-            "build directory for scripts (default: 'build.build-scripts')",
+            "build directory for scripts [default: 'build.build-scripts']",
         ),
         ('bdist-base=', None, "temporary directory for built distributions"),
         ('all', 'a', "remove all build output, not just temporary by-products"),
diff --git a/distutils/command/install_data.py b/distutils/command/install_data.py
index a4da892480..624c0b901b 100644
--- a/distutils/command/install_data.py
+++ b/distutils/command/install_data.py
@@ -19,7 +19,7 @@ class install_data(Command):
             'install-dir=',
             'd',
             "base directory for installing data files "
-            "(default: installation base dir)",
+            "[default: installation base dir]",
         ),
         ('root=', None, "install everything relative to this alternate root directory"),
         ('force', 'f', "force installation (overwrite existing files)"),

From afc54f822b52df1499d3379c2d05f96bd86433a8 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 28 Jun 2024 11:55:50 -0400
Subject: [PATCH 0800/1761] Remove unnecessary override to the same value.

Co-authored-by: DWesl <22566757+DWesl@users.noreply.github.com>
---
 distutils/unixccompiler.py | 1 -
 1 file changed, 1 deletion(-)

diff --git a/distutils/unixccompiler.py b/distutils/unixccompiler.py
index e547d4893c..7e68596b26 100644
--- a/distutils/unixccompiler.py
+++ b/distutils/unixccompiler.py
@@ -146,7 +146,6 @@ class UnixCCompiler(CCompiler):
         exe_extension = ".exe"
         shared_lib_extension = ".dll.a"
         dylib_lib_extension = ".dll"
-        static_lib_format = shared_lib_format = "lib%s%s"
         dylib_lib_format = "cyg%s%s"
 
     def preprocess(

From 131fae7f7fa7cd3107ccc20f6c1fb6fbe1ab569a Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 28 Jun 2024 11:38:01 -0400
Subject: [PATCH 0801/1761] Suppress EncodingWarnings in docutils.

---
 pytest.ini | 4 ++++
 1 file changed, 4 insertions(+)

diff --git a/pytest.ini b/pytest.ini
index b53e0d93a1..dd57c6ef4e 100644
--- a/pytest.ini
+++ b/pytest.ini
@@ -40,3 +40,7 @@ filterwarnings=
 
 	# suppress known deprecation
 	ignore:Use shutil.which instead of find_executable:DeprecationWarning
+
+	# https://sourceforge.net/p/docutils/bugs/490/
+	ignore:'encoding' argument not specified::docutils.io
+	ignore:UTF-8 Mode affects locale.getpreferredencoding()::docutils.io

From d56a32ddcd1accbb68a74210c029cc76a6e3fcd3 Mon Sep 17 00:00:00 2001
From: DWesl <22566757+DWesl@users.noreply.github.com>
Date: Sat, 30 Jul 2022 20:35:22 -0400
Subject: [PATCH 0802/1761] ENH: Don't add system library directories to rpath

Last of the patches from #73
Might close pypa/setuptools#3257

Dual purposes here:
- On platforms like Cygwin that don't have `rpath`, try to avoid adding things to `rpath`
- Some distribution binary package makers require that no shared library list a system library directory (`/lib`, `/lib64`, `/usr/lib`, `/usr/lib64`) in its `rpath`; this patch simplifies the code to ensure the shared library can find its dependencies at runtime.
---
 distutils/unixccompiler.py | 9 +++++++++
 1 file changed, 9 insertions(+)

diff --git a/distutils/unixccompiler.py b/distutils/unixccompiler.py
index 7e68596b26..cf49e35990 100644
--- a/distutils/unixccompiler.py
+++ b/distutils/unixccompiler.py
@@ -148,6 +148,15 @@ class UnixCCompiler(CCompiler):
         dylib_lib_extension = ".dll"
         dylib_lib_format = "cyg%s%s"
 
+    def _fix_lib_args(self, libraries, library_dirs, runtime_library_dirs):
+        """Remove standard library path from rpath"""
+        libraries, library_dirs, runtime_library_dirs = super()._fix_lib_args(
+            libraries, library_dirs, runtime_library_dirs)
+        libdir = sysconfig.get_config_var('LIBDIR')
+        if runtime_library_dirs and (libdir in runtime_library_dirs):
+            runtime_library_dirs.remove(libdir)
+        return libraries, library_dirs, runtime_library_dirs
+  
     def preprocess(
         self,
         source,

From 18203a23695ebb91b915b9e6465c473b5acd8f0b Mon Sep 17 00:00:00 2001
From: DWesl <22566757+DWesl@users.noreply.github.com>
Date: Sat, 20 Aug 2022 12:05:48 -0400
Subject: [PATCH 0803/1761] ENH: Drop LIBDIR from RPATH only if starting with
 /usr/lib.

Avoids problems with odd LIBDIR
Package managers will be putting things in LIBDIR anyway,
so this should catch all use-cases I know of.
---
 distutils/tests/test_build_ext.py | 10 ++++++++++
 distutils/unixccompiler.py        |  6 +++++-
 2 files changed, 15 insertions(+), 1 deletion(-)

diff --git a/distutils/tests/test_build_ext.py b/distutils/tests/test_build_ext.py
index 6c4c4ba869..ec8a818da6 100644
--- a/distutils/tests/test_build_ext.py
+++ b/distutils/tests/test_build_ext.py
@@ -8,6 +8,7 @@
 import sys
 import tempfile
 import textwrap
+import time
 from distutils import sysconfig
 from distutils.command.build_ext import build_ext
 from distutils.core import Distribution
@@ -55,6 +56,9 @@ def user_site_dir(request):
     site.USER_BASE = orig_user_base
     build_ext.USER_BASE = orig_user_base
 
+    if sys.platform == 'cygwin':
+        time.sleep(1)
+
 
 @contextlib.contextmanager
 def safe_extension_import(name, path):
@@ -95,6 +99,12 @@ def test_build_ext(self):
         copy_xxmodule_c(self.tmp_dir)
         xx_c = os.path.join(self.tmp_dir, 'xxmodule.c')
         xx_ext = Extension('xx', [xx_c])
+        if sys.platform != "win32":
+            xx_ext = Extension(
+                'xx', [xx_c],
+                library_dirs=['/usr/lib'], libraries=['z'],
+                runtime_library_dirs=['/usr/lib']
+            )
         dist = Distribution({'name': 'xx', 'ext_modules': [xx_ext]})
         dist.package_dir = self.tmp_dir
         cmd = self.build_ext(dist)
diff --git a/distutils/unixccompiler.py b/distutils/unixccompiler.py
index cf49e35990..8a01bf3ad8 100644
--- a/distutils/unixccompiler.py
+++ b/distutils/unixccompiler.py
@@ -153,7 +153,11 @@ def _fix_lib_args(self, libraries, library_dirs, runtime_library_dirs):
         libraries, library_dirs, runtime_library_dirs = super()._fix_lib_args(
             libraries, library_dirs, runtime_library_dirs)
         libdir = sysconfig.get_config_var('LIBDIR')
-        if runtime_library_dirs and (libdir in runtime_library_dirs):
+        if (
+            runtime_library_dirs
+            and libdir.startswith("/usr/lib")
+            and (libdir in runtime_library_dirs)
+        ):
             runtime_library_dirs.remove(libdir)
         return libraries, library_dirs, runtime_library_dirs
   

From 22a78ae832f35a75ac764a4880e99e4a1d7d11e9 Mon Sep 17 00:00:00 2001
From: DWesl <22566757+DWesl@users.noreply.github.com>
Date: Sat, 20 Aug 2022 12:07:40 -0400
Subject: [PATCH 0804/1761] TST: Get all tests passing on Cygwin.

The check reference caused docutils problems with no ldesc.
The cygwinccompiler change produced a DeprecationWarning.
---
 distutils/command/check.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/distutils/command/check.py b/distutils/command/check.py
index 58b3f949f9..2eb72b454e 100644
--- a/distutils/command/check.py
+++ b/distutils/command/check.py
@@ -144,7 +144,7 @@ def _check_rst_data(self, data):
         document.note_source(source_path, -1)
         try:
             parser.parse(data, document)
-        except AttributeError as e:
+        except (AttributeError, TypeError) as e:
             reporter.messages.append((
                 -1,
                 f'Could not finish the parsing: {e}.',

From 0a479d338b8caeae0f28b8560a13d58c04278b14 Mon Sep 17 00:00:00 2001
From: DWesl <22566757+DWesl@users.noreply.github.com>
Date: Sat, 20 Aug 2022 19:57:10 -0400
Subject: [PATCH 0805/1761] STY: Apply suggestions from CI running black

---
 distutils/tests/test_build_ext.py | 8 +++++---
 distutils/unixccompiler.py        | 5 +++--
 2 files changed, 8 insertions(+), 5 deletions(-)

diff --git a/distutils/tests/test_build_ext.py b/distutils/tests/test_build_ext.py
index ec8a818da6..d47a4e8ae6 100644
--- a/distutils/tests/test_build_ext.py
+++ b/distutils/tests/test_build_ext.py
@@ -101,9 +101,11 @@ def test_build_ext(self):
         xx_ext = Extension('xx', [xx_c])
         if sys.platform != "win32":
             xx_ext = Extension(
-                'xx', [xx_c],
-                library_dirs=['/usr/lib'], libraries=['z'],
-                runtime_library_dirs=['/usr/lib']
+                'xx',
+                [xx_c],
+                library_dirs=['/usr/lib'],
+                libraries=['z'],
+                runtime_library_dirs=['/usr/lib'],
             )
         dist = Distribution({'name': 'xx', 'ext_modules': [xx_ext]})
         dist.package_dir = self.tmp_dir
diff --git a/distutils/unixccompiler.py b/distutils/unixccompiler.py
index 8a01bf3ad8..b04359a5e5 100644
--- a/distutils/unixccompiler.py
+++ b/distutils/unixccompiler.py
@@ -151,7 +151,8 @@ class UnixCCompiler(CCompiler):
     def _fix_lib_args(self, libraries, library_dirs, runtime_library_dirs):
         """Remove standard library path from rpath"""
         libraries, library_dirs, runtime_library_dirs = super()._fix_lib_args(
-            libraries, library_dirs, runtime_library_dirs)
+            libraries, library_dirs, runtime_library_dirs
+        )
         libdir = sysconfig.get_config_var('LIBDIR')
         if (
             runtime_library_dirs
@@ -160,7 +161,7 @@ def _fix_lib_args(self, libraries, library_dirs, runtime_library_dirs):
         ):
             runtime_library_dirs.remove(libdir)
         return libraries, library_dirs, runtime_library_dirs
-  
+
     def preprocess(
         self,
         source,

From 7b693ab556bfc6dd2da1978189e82d1c87652136 Mon Sep 17 00:00:00 2001
From: DWesl <22566757+DWesl@users.noreply.github.com>
Date: Sun, 21 Aug 2022 06:57:52 -0400
Subject: [PATCH 0806/1761] CI: Install a library to link to on Cygwin

Cygwin separates import libraries from dynamic libraries (needed link time vs run time).
---
 .github/workflows/main.yml | 1 +
 1 file changed, 1 insertion(+)

diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index b6b757dbf5..5f104963a9 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -114,6 +114,7 @@ jobs:
             python${{ matrix.python }}-tox,
             gcc-core,
             gcc-g++,
+            zlib-devel,
             ncompress
             git
       - name: Run tests

From 19ea50b30f0113981bb5272069a056645f72acbb Mon Sep 17 00:00:00 2001
From: DWesl <22566757+DWesl@users.noreply.github.com>
Date: Thu, 1 Sep 2022 18:05:55 -0400
Subject: [PATCH 0807/1761] TST: Try testing rpath with non-FHS library dir

Let's see if I got the syntax right.

Next step will be hooking the temporary /tmp/libxx_z.so file handling into pytest's tempfile handling.
---
 distutils/tests/test_build_ext.py | 47 ++++++++++++++++++++++++-------
 1 file changed, 37 insertions(+), 10 deletions(-)

diff --git a/distutils/tests/test_build_ext.py b/distutils/tests/test_build_ext.py
index d47a4e8ae6..ba494fef94 100644
--- a/distutils/tests/test_build_ext.py
+++ b/distutils/tests/test_build_ext.py
@@ -1,4 +1,5 @@
 import contextlib
+import glob
 import importlib
 import os
 import platform
@@ -94,19 +95,32 @@ class TestBuildExt(TempdirManager):
     def build_ext(self, *args, **kwargs):
         return build_ext(*args, **kwargs)
 
-    def test_build_ext(self):
+    @pytest.mark.parametrize("copy_so", [False, True])
+    def test_build_ext(self, copy_so):
         missing_compiler_executable()
         copy_xxmodule_c(self.tmp_dir)
         xx_c = os.path.join(self.tmp_dir, 'xxmodule.c')
         xx_ext = Extension('xx', [xx_c])
         if sys.platform != "win32":
-            xx_ext = Extension(
-                'xx',
-                [xx_c],
-                library_dirs=['/usr/lib'],
-                libraries=['z'],
-                runtime_library_dirs=['/usr/lib'],
-            )
+            if not copy_so:
+                xx_ext = Extension(
+                    'xx',
+                    [xx_c],
+                    library_dirs=['/usr/lib'],
+                    libraries=['z'],
+                    runtime_library_dirs=['/usr/lib'],
+                )
+            elif sys.platform == 'linux':
+                libz_so = glob.glob('/usr/lib*/libz.so*')
+                shutil.copyfile(libz_so[0], '/tmp/libxx_z.so')
+                
+                xx_ext = Extension(
+                    'xx',
+                    [xx_c],
+                    library_dirs=['/tmp'],
+                    libraries=['xx_z'],
+                    runtime_library_dirs=['/tmp'],
+                )
         dist = Distribution({'name': 'xx', 'ext_modules': [xx_ext]})
         dist.package_dir = self.tmp_dir
         cmd = self.build_ext(dist)
@@ -125,10 +139,13 @@ def test_build_ext(self):
             sys.stdout = old_stdout
 
         with safe_extension_import('xx', self.tmp_dir):
-            self._test_xx()
+            self._test_xx(copy_so)
+            
+        if sys.platform == 'linux' and copy_so:
+            os.unlink('/tmp/libxx_z.so')
 
     @staticmethod
-    def _test_xx():
+    def _test_xx(copy_so):
         import xx
 
         for attr in ('error', 'foo', 'new', 'roj'):
@@ -142,6 +159,16 @@ def _test_xx():
             assert xx.__doc__ == doc
         assert isinstance(xx.Null(), xx.Null)
         assert isinstance(xx.Str(), xx.Str)
+        
+        if sys.platform == 'linux':
+            so_headers = subprocess.check_output(["readelf", "-d", xx.__file__], universal_newlines=True)
+            if not copy_so:
+                # Linked against a library in /usr/lib{,64}
+                assert 'RPATH' not in so_headers and 'RUNPATH' not in so_headers
+            else:
+                # Linked against a library in /tmp
+                assert 'RPATH' in so_headers or 'RUNPATH' in so_headers
+                # The import is the real test here
 
     def test_solaris_enable_shared(self):
         dist = Distribution({'name': 'xx'})

From 1dde9bcbf35c97f3463cb1fe54c288df4d71b468 Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Fri, 28 Jun 2024 21:09:44 +0200
Subject: [PATCH 0808/1761] Apply ruff rule RUF100

RUF100 Unused blanket `noqa` directive
---
 docs/conf.py                              |  6 +++---
 setuptools/_importlib.py                  |  2 +-
 setuptools/command/easy_install.py        | 16 ++++++++--------
 setuptools/config/_apply_pyprojecttoml.py |  4 ++--
 setuptools/config/expand.py               |  2 +-
 setuptools/config/pyprojecttoml.py        |  2 +-
 setuptools/config/setupcfg.py             |  4 ++--
 setuptools/discovery.py                   |  2 +-
 setuptools/msvc.py                        |  2 +-
 setuptools/tests/test_warnings.py         |  8 ++++----
 10 files changed, 24 insertions(+), 24 deletions(-)

diff --git a/docs/conf.py b/docs/conf.py
index 534da15a37..a0e3398d54 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -124,9 +124,9 @@
 github_repo_url = f'{github_url}/{github_repo_slug}'
 github_sponsors_url = f'{github_url}/sponsors'
 extlinks = {
-    'user': (f'{github_sponsors_url}/%s', '@%s'),  # noqa: WPS323
-    'pypi': ('https://pypi.org/project/%s', '%s'),  # noqa: WPS323
-    'wiki': ('https://wikipedia.org/wiki/%s', '%s'),  # noqa: WPS323
+    'user': (f'{github_sponsors_url}/%s', '@%s'),
+    'pypi': ('https://pypi.org/project/%s', '%s'),
+    'wiki': ('https://wikipedia.org/wiki/%s', '%s'),
 }
 extensions += ['sphinx.ext.extlinks']
 
diff --git a/setuptools/_importlib.py b/setuptools/_importlib.py
index bd2b01e2b5..8e52888d6f 100644
--- a/setuptools/_importlib.py
+++ b/setuptools/_importlib.py
@@ -42,7 +42,7 @@ def disable_importlib_metadata_finder(metadata):
 
     disable_importlib_metadata_finder(metadata)
 else:
-    import importlib.metadata as metadata  # noqa: F401
+    import importlib.metadata as metadata
 
 
 if sys.version_info < (3, 9):
diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py
index 5ec5080131..c2bb8bd199 100644
--- a/setuptools/command/easy_install.py
+++ b/setuptools/command/easy_install.py
@@ -467,7 +467,7 @@ def pseudo_tempname(self):
     def warn_deprecated_options(self):
         pass
 
-    def check_site_dir(self):  # noqa: C901  # is too complex (12)  # FIXME
+    def check_site_dir(self):  # is too complex (12)  # FIXME
         """Verify that self.install_dir is .pth-capable dir, if needed"""
 
         instdir = normalize_path(self.install_dir)
@@ -526,7 +526,7 @@ def check_site_dir(self):  # noqa: C901  # is too complex (12)  # FIXME
 
             %s
         """
-    ).lstrip()  # noqa
+    ).lstrip()
 
     __not_exists_id = textwrap.dedent(
         """
@@ -534,7 +534,7 @@ def check_site_dir(self):  # noqa: C901  # is too complex (12)  # FIXME
         choose a different installation directory (using the -d or --install-dir
         option).
         """
-    ).lstrip()  # noqa
+    ).lstrip()
 
     __access_msg = textwrap.dedent(
         """
@@ -552,7 +552,7 @@ def check_site_dir(self):  # noqa: C901  # is too complex (12)  # FIXME
 
         Please make the appropriate changes for your system and try again.
         """
-    ).lstrip()  # noqa
+    ).lstrip()
 
     def cant_write_to_target(self):
         msg = self.__cant_write_msg % (
@@ -939,7 +939,7 @@ def egg_distribution(self, egg_path):
         return Distribution.from_filename(egg_path, metadata=metadata)
 
     # FIXME: 'easy_install.install_egg' is too complex (11)
-    def install_egg(self, egg_path, tmpdir):  # noqa: C901
+    def install_egg(self, egg_path, tmpdir):
         destination = os.path.join(
             self.install_dir,
             os.path.basename(egg_path),
@@ -1131,7 +1131,7 @@ def install_wheel(self, wheel_path, tmpdir):
             pkg_resources.require("%(name)s==%(version)s")  # this exact version
             pkg_resources.require("%(name)s>=%(version)s")  # this version or higher
         """
-    ).lstrip()  # noqa
+    ).lstrip()
 
     __id_warning = textwrap.dedent(
         """
@@ -1139,7 +1139,7 @@ def install_wheel(self, wheel_path, tmpdir):
         this to work.  (e.g. by being the application's script directory, by being on
         PYTHONPATH, or by being added to sys.path by your code.)
         """
-    )  # noqa
+    )
 
     def installation_report(self, req, dist, what="Installed"):
         """Helpful installation message for display to package users"""
@@ -1166,7 +1166,7 @@ def installation_report(self, req, dist, what="Installed"):
 
         See the setuptools documentation for the "develop" command for more info.
         """
-    ).lstrip()  # noqa
+    ).lstrip()
 
     def report_editable(self, spec, setup_script):
         dirname = os.path.dirname(setup_script)
diff --git a/setuptools/config/_apply_pyprojecttoml.py b/setuptools/config/_apply_pyprojecttoml.py
index 5a8700051e..f44271c5dd 100644
--- a/setuptools/config/_apply_pyprojecttoml.py
+++ b/setuptools/config/_apply_pyprojecttoml.py
@@ -31,8 +31,8 @@
 
 if TYPE_CHECKING:
     from distutils.dist import _OptionsList
-    from setuptools._importlib import metadata  # noqa
-    from setuptools.dist import Distribution  # noqa
+    from setuptools._importlib import metadata
+    from setuptools.dist import Distribution
 
 EMPTY: Mapping = MappingProxyType({})  # Immutable dict-like
 _ProjectReadmeValue = Union[str, Dict[str, str]]
diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py
index 6ea6cf6d0e..e5f5dc586e 100644
--- a/setuptools/config/expand.py
+++ b/setuptools/config/expand.py
@@ -47,7 +47,7 @@
 from ..warnings import SetuptoolsWarning
 
 if TYPE_CHECKING:
-    from setuptools.dist import Distribution  # noqa
+    from setuptools.dist import Distribution
 
 _K = TypeVar("_K")
 _V = TypeVar("_V", covariant=True)
diff --git a/setuptools/config/pyprojecttoml.py b/setuptools/config/pyprojecttoml.py
index c8dae5f751..d41c956cbd 100644
--- a/setuptools/config/pyprojecttoml.py
+++ b/setuptools/config/pyprojecttoml.py
@@ -25,7 +25,7 @@
 from ._apply_pyprojecttoml import apply as _apply
 
 if TYPE_CHECKING:
-    from setuptools.dist import Distribution  # noqa
+    from setuptools.dist import Distribution
     from typing_extensions import Self
 
 _logger = logging.getLogger(__name__)
diff --git a/setuptools/config/setupcfg.py b/setuptools/config/setupcfg.py
index 0a7a42eb09..80ebe3d9bd 100644
--- a/setuptools/config/setupcfg.py
+++ b/setuptools/config/setupcfg.py
@@ -39,9 +39,9 @@
 from . import expand
 
 if TYPE_CHECKING:
-    from distutils.dist import DistributionMetadata  # noqa
+    from distutils.dist import DistributionMetadata
 
-    from setuptools.dist import Distribution  # noqa
+    from setuptools.dist import Distribution
 
 SingleCommandOptions = Dict["str", Tuple["str", Any]]
 """Dict that associate the name of the options of a particular command to a
diff --git a/setuptools/discovery.py b/setuptools/discovery.py
index 880d414033..3179852c69 100644
--- a/setuptools/discovery.py
+++ b/setuptools/discovery.py
@@ -62,7 +62,7 @@
 chain_iter = itertools.chain.from_iterable
 
 if TYPE_CHECKING:
-    from setuptools import Distribution  # noqa
+    from setuptools import Distribution
 
 
 def _valid_name(path: StrPath) -> bool:
diff --git a/setuptools/msvc.py b/setuptools/msvc.py
index f86c480d18..1e3df05ee6 100644
--- a/setuptools/msvc.py
+++ b/setuptools/msvc.py
@@ -842,7 +842,7 @@ def WindowsSdkLastVersion(self):
         """
         return self._use_last_dir_name(join(self.WindowsSdkDir, 'lib'))
 
-    @property  # noqa: C901
+    @property
     def WindowsSdkDir(self):  # noqa: C901  # is too complex (12)  # FIXME
         """
         Microsoft Windows SDK directory.
diff --git a/setuptools/tests/test_warnings.py b/setuptools/tests/test_warnings.py
index b5da66b9d6..ac6d07795e 100644
--- a/setuptools/tests/test_warnings.py
+++ b/setuptools/tests/test_warnings.py
@@ -18,7 +18,7 @@
             ********************************************************************************
 
     !!
-    """,  # noqa,
+    """,
     ),
     "futue_due_date": dict(
         args=("Summary", "Lorem ipsum"),
@@ -35,7 +35,7 @@
             ********************************************************************************
 
     !!
-    """,  # noqa
+    """,
     ),
     "past_due_date_with_docs": dict(
         args=("Summary", "Lorem ipsum"),
@@ -54,7 +54,7 @@
             ********************************************************************************
 
     !!
-    """,  # noqa
+    """,
     ),
 }
 
@@ -99,7 +99,7 @@ class _MyDeprecation(SetuptoolsDeprecationWarning):
             ********************************************************************************
 
     !!
-    """  # noqa
+    """
     assert str(exc_info.value) == cleandoc(expected)
 
 

From 42fd45872aa4d52f4b15e5f922bb2302be0e0b63 Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Fri, 28 Jun 2024 21:29:16 +0200
Subject: [PATCH 0809/1761] Apply ruff/flake8-raise rule RSE102

RSE102 Unnecessary parentheses on raised exception
---
 pkg_resources/__init__.py          | 2 +-
 setuptools/command/easy_install.py | 2 +-
 setuptools/tests/test_sandbox.py   | 2 +-
 3 files changed, 3 insertions(+), 3 deletions(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index c4ace5aa77..a0f1c692e8 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -2808,7 +2808,7 @@ def _parse_extras(cls, extras_spec):
             return ()
         req = Requirement.parse('x' + extras_spec)
         if req.specs:
-            raise ValueError()
+            raise ValueError
         return req.extras
 
     @classmethod
diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py
index 5ec5080131..8425ee80de 100644
--- a/setuptools/command/easy_install.py
+++ b/setuptools/command/easy_install.py
@@ -237,7 +237,7 @@ def _render_version():
         dist = get_distribution('setuptools')
         tmpl = 'setuptools {dist.version} from {dist.location} (Python {ver})'
         print(tmpl.format(**locals()))
-        raise SystemExit()
+        raise SystemExit
 
     def finalize_options(self):  # noqa: C901  # is too complex (25)  # FIXME
         self.version and self._render_version()
diff --git a/setuptools/tests/test_sandbox.py b/setuptools/tests/test_sandbox.py
index f666615d99..75dfcba2f1 100644
--- a/setuptools/tests/test_sandbox.py
+++ b/setuptools/tests/test_sandbox.py
@@ -100,7 +100,7 @@ class ExceptionUnderTest(Exception):
         with pytest.raises(setuptools.sandbox.UnpickleableException) as caught:
             with setuptools.sandbox.save_modules():
                 setuptools.sandbox.hide_setuptools()
-                raise ExceptionUnderTest()
+                raise ExceptionUnderTest
 
         (msg,) = caught.value.args
         assert msg == 'ExceptionUnderTest()'

From 55e4e1aa217f5d83d8ec80bc3b956bf2c5c79041 Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Fri, 28 Jun 2024 21:36:32 +0200
Subject: [PATCH 0810/1761] Apply ruff/flake8-return rule RET502

RET502 Do not implicitly `return None` in function able to return non-`None` value
---
 setuptools/package_index.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setuptools/package_index.py b/setuptools/package_index.py
index c91e419923..a91bb4584b 100644
--- a/setuptools/package_index.py
+++ b/setuptools/package_index.py
@@ -856,7 +856,7 @@ def _resolve_vcs(url):
     def _download_vcs(self, url, spec_filename):
         vcs = self._resolve_vcs(url)
         if not vcs:
-            return
+            return None
         if vcs == 'svn':
             raise DistutilsError(
                 f"Invalid config, SVN download is not supported: {url}"

From fc7577b1dbae9ffbfbdd5b7a597914c8b4099b4c Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Fri, 28 Jun 2024 21:37:55 +0200
Subject: [PATCH 0811/1761] Apply ruff/flake8-return rule RET503

RET503 Missing explicit `return` at the end of function able to return non-`None` value
---
 setuptools/msvc.py | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/setuptools/msvc.py b/setuptools/msvc.py
index f86c480d18..649279a806 100644
--- a/setuptools/msvc.py
+++ b/setuptools/msvc.py
@@ -1087,6 +1087,7 @@ def _find_dot_net_versions(self, bits):
             return 'v3.5', 'v2.0.50727'
         elif self.vs_ver == 8.0:
             return 'v3.0', 'v2.0.50727'
+        return None
 
     @staticmethod
     def _use_last_dir_name(path, prefix=''):
@@ -1648,6 +1649,7 @@ def VCRuntimeRedist(self):
             path = join(prefix, arch_subdir, crt_dir, vcruntime)
             if isfile(path):
                 return path
+        return None
 
     def return_env(self, exists=True):
         """

From ae472b47dacfe0884eeabe123b7cb014f9b6af5e Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Fri, 28 Jun 2024 21:55:41 +0200
Subject: [PATCH 0812/1761] Apply ruff/Perflint rule PERF401

PERF401 Use a list comprehension to create a transformed list
---
 setuptools/command/bdist_egg.py    | 8 +++++---
 setuptools/command/easy_install.py | 9 +++++----
 2 files changed, 10 insertions(+), 7 deletions(-)

diff --git a/setuptools/command/bdist_egg.py b/setuptools/command/bdist_egg.py
index 73476e0cec..6cfd2ce86f 100644
--- a/setuptools/command/bdist_egg.py
+++ b/setuptools/command/bdist_egg.py
@@ -290,9 +290,11 @@ def get_ext_outputs(self):
 
         paths = {self.bdist_dir: ''}
         for base, dirs, files in sorted_walk(self.bdist_dir):
-            for filename in files:
-                if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS:
-                    all_outputs.append(paths[base] + filename)
+            all_outputs.extend(
+                paths[base] + filename
+                for filename in files
+                if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS
+            )
             for filename in dirs:
                 paths[os.path.join(base, filename)] = paths[base] + filename + '/'
 
diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py
index 5ec5080131..801a7df768 100644
--- a/setuptools/command/easy_install.py
+++ b/setuptools/command/easy_install.py
@@ -1203,10 +1203,11 @@ def build_and_install(self, setup_script, setup_base):
 
             self.run_setup(setup_script, setup_base, args)
             all_eggs = Environment([dist_dir])
-            eggs = []
-            for key in all_eggs:
-                for dist in all_eggs[key]:
-                    eggs.append(self.install_egg(dist.location, setup_base))
+            eggs = [
+                self.install_egg(dist.location, setup_base)
+                for key in all_eggs
+                for dist in all_eggs[key]
+            ]
             if not eggs and not self.dry_run:
                 log.warn("No eggs found in %s (setup script problem?)", dist_dir)
             return eggs

From 9d45f137d8a462d86f996c9ac8ad59390b4c8001 Mon Sep 17 00:00:00 2001
From: DWesl <22566757+DWesl@users.noreply.github.com>
Date: Wed, 21 Sep 2022 12:21:31 -0400
Subject: [PATCH 0813/1761] TST: Use different library file for link test

The old test seemed to pick up the 32-bit library, not the 64-bit one.
The new test should pick up the 64-bit one consistently when relevant.
---
 distutils/tests/test_build_ext.py | 13 ++++++++-----
 1 file changed, 8 insertions(+), 5 deletions(-)

diff --git a/distutils/tests/test_build_ext.py b/distutils/tests/test_build_ext.py
index ba494fef94..a88554dbbf 100644
--- a/distutils/tests/test_build_ext.py
+++ b/distutils/tests/test_build_ext.py
@@ -6,6 +6,7 @@
 import re
 import shutil
 import site
+import subprocess
 import sys
 import tempfile
 import textwrap
@@ -112,8 +113,8 @@ def test_build_ext(self, copy_so):
                 )
             elif sys.platform == 'linux':
                 libz_so = glob.glob('/usr/lib*/libz.so*')
-                shutil.copyfile(libz_so[0], '/tmp/libxx_z.so')
-                
+                shutil.copyfile(libz_so[-1], '/tmp/libxx_z.so')
+
                 xx_ext = Extension(
                     'xx',
                     [xx_c],
@@ -140,7 +141,7 @@ def test_build_ext(self, copy_so):
 
         with safe_extension_import('xx', self.tmp_dir):
             self._test_xx(copy_so)
-            
+
         if sys.platform == 'linux' and copy_so:
             os.unlink('/tmp/libxx_z.so')
 
@@ -159,9 +160,11 @@ def _test_xx(copy_so):
             assert xx.__doc__ == doc
         assert isinstance(xx.Null(), xx.Null)
         assert isinstance(xx.Str(), xx.Str)
-        
+
         if sys.platform == 'linux':
-            so_headers = subprocess.check_output(["readelf", "-d", xx.__file__], universal_newlines=True)
+            so_headers = subprocess.check_output(
+                ["readelf", "-d", xx.__file__], universal_newlines=True
+            )
             if not copy_so:
                 # Linked against a library in /usr/lib{,64}
                 assert 'RPATH' not in so_headers and 'RUNPATH' not in so_headers

From d5f60da4cbc5f42903f117edbcd142749163b685 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 28 Jun 2024 16:13:01 -0400
Subject: [PATCH 0814/1761] Replace use of deprecated find_executable with
 shutil.which.

---
 setuptools/command/easy_install.py | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)

diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py
index eb6ba1025f..cda9fc6cfc 100644
--- a/setuptools/command/easy_install.py
+++ b/setuptools/command/easy_install.py
@@ -23,7 +23,6 @@
 )
 from distutils import log, dir_util
 from distutils.command.build_scripts import first_line_re
-from distutils.spawn import find_executable
 from distutils.command import install
 import sys
 import os
@@ -2275,7 +2274,7 @@ def _use_header(new_header):
         to an executable on the system.
         """
         clean_header = new_header[2:-1].strip('"')
-        return sys.platform != 'win32' or find_executable(clean_header)
+        return sys.platform != 'win32' or shutil.which(clean_header)
 
 
 class WindowsExecutableLauncherWriter(WindowsScriptWriter):

From 259bfdb515cd2674468a5fff33b2ac6aef9b2261 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 28 Jun 2024 16:19:30 -0400
Subject: [PATCH 0815/1761] Add news fragment

---
 newsfragments/4444.feature.rst | 1 +
 1 file changed, 1 insertion(+)
 create mode 100644 newsfragments/4444.feature.rst

diff --git a/newsfragments/4444.feature.rst b/newsfragments/4444.feature.rst
new file mode 100644
index 0000000000..20b30f8ca4
--- /dev/null
+++ b/newsfragments/4444.feature.rst
@@ -0,0 +1 @@
+Updated distutils including significant changes to support Cygwin and mingw compilers.

From 92989c2b836ded82c5d0e1b24971f1b73efdaf48 Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Sat, 29 Jun 2024 14:21:38 +0200
Subject: [PATCH 0816/1761] Enforce ruff/tryceratops rule TRY300

TRY300 Consider moving this statement to an `else` block
---
 pkg_resources/extern/__init__.py                 | 10 +++++-----
 ruff.toml                                        |  2 ++
 setuptools/__init__.py                           |  2 +-
 setuptools/command/easy_install.py               |  2 +-
 setuptools/config/_validate_pyproject/formats.py |  2 +-
 setuptools/depends.py                            |  6 +++---
 setuptools/extension.py                          |  5 ++---
 setuptools/extern/__init__.py                    | 10 +++++-----
 setuptools/tests/test_sdist.py                   |  2 +-
 9 files changed, 21 insertions(+), 20 deletions(-)

diff --git a/pkg_resources/extern/__init__.py b/pkg_resources/extern/__init__.py
index 9b9ac10aa9..daa978ff72 100644
--- a/pkg_resources/extern/__init__.py
+++ b/pkg_resources/extern/__init__.py
@@ -41,14 +41,14 @@ def load_module(self, fullname: str):
         """
         root, base, target = fullname.partition(self.root_name + '.')
         for prefix in self.search_path:
+            extant = prefix + target
             try:
-                extant = prefix + target
                 __import__(extant)
-                mod = sys.modules[extant]
-                sys.modules[fullname] = mod
-                return mod
             except ImportError:
-                pass
+                continue
+            mod = sys.modules[extant]
+            sys.modules[fullname] = mod
+            return mod
         else:
             raise ImportError(
                 "The '{target}' package is required; "
diff --git a/ruff.toml b/ruff.toml
index 8828fe61a5..2effe696ea 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -14,9 +14,11 @@ extend-select = [
 	"F404", # late-future-import
 	"PYI", # flake8-pyi
 	"UP", # pyupgrade
+	"TRY",
 	"YTT", # flake8-2020
 ]
 ignore = [
+	"TRY301", # raise-within-try, it's handy
 	"UP015", # redundant-open-modes, explicit is preferred
 	"UP030", # temporarily disabled
 	"UP031", # temporarily disabled
diff --git a/setuptools/__init__.py b/setuptools/__init__.py
index 5ef0f7dbd8..bf03f37b77 100644
--- a/setuptools/__init__.py
+++ b/setuptools/__init__.py
@@ -57,9 +57,9 @@ def _get_project_config_files(self, filenames=None):
             """Ignore ``pyproject.toml``, they are not related to setup_requires"""
             try:
                 cfg, toml = super()._split_standard_project_metadata(filenames)
-                return cfg, ()
             except Exception:
                 return filenames, ()
+            return cfg, ()
 
         def finalize_options(self):
             """
diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py
index 5ec5080131..7b00801ca6 100644
--- a/setuptools/command/easy_install.py
+++ b/setuptools/command/easy_install.py
@@ -102,9 +102,9 @@ def _to_bytes(s):
 def isascii(s):
     try:
         s.encode('ascii')
-        return True
     except UnicodeError:
         return False
+    return True
 
 
 def _one_liner(text):
diff --git a/setuptools/config/_validate_pyproject/formats.py b/setuptools/config/_validate_pyproject/formats.py
index 5a0599cbb5..aacf4092b0 100644
--- a/setuptools/config/_validate_pyproject/formats.py
+++ b/setuptools/config/_validate_pyproject/formats.py
@@ -91,9 +91,9 @@ def pep508(value: str) -> bool:
         """
         try:
             _req.Requirement(value)
-            return True
         except _req.InvalidRequirement:
             return False
+        return True
 
 except ImportError:  # pragma: no cover
     _logger.warning(
diff --git a/setuptools/depends.py b/setuptools/depends.py
index b6af51c410..2226b6784a 100644
--- a/setuptools/depends.py
+++ b/setuptools/depends.py
@@ -58,11 +58,11 @@ def get_version(self, paths=None, default="unknown"):
         if self.attribute is None:
             try:
                 f, p, i = find_module(self.module, paths)
-                if f:
-                    f.close()
-                return default
             except ImportError:
                 return None
+            if f:
+                f.close()
+            return default
 
         v = get_module_constant(self.module, self.attribute, default, paths)
 
diff --git a/setuptools/extension.py b/setuptools/extension.py
index 8caad78d4b..25420f42de 100644
--- a/setuptools/extension.py
+++ b/setuptools/extension.py
@@ -16,10 +16,9 @@ def _have_cython():
     try:
         # from (cython_impl) import build_ext
         __import__(cython_impl, fromlist=['build_ext']).build_ext
-        return True
     except Exception:
-        pass
-    return False
+        return False
+    return True
 
 
 # for compatibility
diff --git a/setuptools/extern/__init__.py b/setuptools/extern/__init__.py
index 5ad7169e3b..f9b6eea70d 100644
--- a/setuptools/extern/__init__.py
+++ b/setuptools/extern/__init__.py
@@ -32,14 +32,14 @@ def load_module(self, fullname):
         """
         root, base, target = fullname.partition(self.root_name + '.')
         for prefix in self.search_path:
+            extant = prefix + target
             try:
-                extant = prefix + target
                 __import__(extant)
-                mod = sys.modules[extant]
-                sys.modules[fullname] = mod
-                return mod
             except ImportError:
-                pass
+                continue
+            mod = sys.modules[extant]
+            sys.modules[fullname] = mod
+            return mod
         else:
             raise ImportError(
                 "The '{target}' package is required; "
diff --git a/setuptools/tests/test_sdist.py b/setuptools/tests/test_sdist.py
index 387ec3bebf..1be568d3fa 100644
--- a/setuptools/tests/test_sdist.py
+++ b/setuptools/tests/test_sdist.py
@@ -122,9 +122,9 @@ def touch(path):
 def symlink_or_skip_test(src, dst):
     try:
         os.symlink(src, dst)
-        return dst
     except (OSError, NotImplementedError):
         pytest.skip("symlink not supported in OS")
+    return dst
 
 
 class TestSdistTest:

From 31c859945f696b243027638558e68b269ef68889 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 1 Jul 2024 09:48:07 -0400
Subject: [PATCH 0817/1761] Remove 'normally supplied to setup()'. Declarative
 styles are normalized.

---
 docs/userguide/declarative_config.rst | 6 ++----
 1 file changed, 2 insertions(+), 4 deletions(-)

diff --git a/docs/userguide/declarative_config.rst b/docs/userguide/declarative_config.rst
index 1d5bf6ae42..342f18460e 100644
--- a/docs/userguide/declarative_config.rst
+++ b/docs/userguide/declarative_config.rst
@@ -12,11 +12,9 @@ Configuring setuptools using ``setup.cfg`` files
     call is still required even if your configuration resides in ``setup.cfg``.
 
 ``Setuptools`` allows using configuration files (for example, :file:`setup.cfg`)
-to define a package’s metadata and other options that are normally supplied
-to the ``setup()`` function (declarative config).
+to define a package’s metadata and other options (declarative config).
 
-This approach not only allows automation scenarios but also reduces
-boilerplate code in some cases.
+This approach allows automation scenarios and can reduce boilerplate code.
 
 .. _example-setup-config:
 

From a9a5400313089a48c9e8eebdbdde62d79682e982 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 1 Jul 2024 10:05:17 -0400
Subject: [PATCH 0818/1761] Add a section on interpolation.

Closes #1648
---
 docs/userguide/declarative_config.rst | 18 ++++++++++++++++++
 1 file changed, 18 insertions(+)

diff --git a/docs/userguide/declarative_config.rst b/docs/userguide/declarative_config.rst
index 342f18460e..2f650e1746 100644
--- a/docs/userguide/declarative_config.rst
+++ b/docs/userguide/declarative_config.rst
@@ -133,6 +133,24 @@ value associated with ``""`` in the ``package_dir`` dictionary.
    Please see :doc:`package discovery ` for more
    details.
 
+Interpolation
+=============
+
+Config files are parsed using :mod:`configparser` with
+`interpolation `_
+enabled. As a result, one config value may reference another. This
+feature may be used, for example, in defining extras:
+
+.. code-block:: ini
+
+    [options.extras_require]
+    tester =
+        pytest==3.3.2
+        pytest-sugar
+    dev =
+        pytest-xdist
+        %(tester)s
+
 Specifying values
 =================
 

From 4e6d97db1b4364cb184960f477c3e78435332646 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 1 Jul 2024 11:36:55 -0400
Subject: [PATCH 0819/1761] Prefer relative imports for better portability.

---
 distutils/spawn.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/distutils/spawn.py b/distutils/spawn.py
index 429d1ccb2e..50d30a2761 100644
--- a/distutils/spawn.py
+++ b/distutils/spawn.py
@@ -31,7 +31,7 @@ def _inject_macos_ver(env: Mapping[str:str] | None) -> Mapping[str:str] | None:
     if platform.system() != 'Darwin':
         return env
 
-    from distutils.util import MACOSX_VERSION_VAR, get_macosx_target_ver
+    from .util import MACOSX_VERSION_VAR, get_macosx_target_ver
 
     target_ver = get_macosx_target_ver()
     update = {MACOSX_VERSION_VAR: target_ver} if target_ver else {}

From 3accd5ca04ec23c34b345eef8d0e5d3af072e7a4 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 1 Jul 2024 12:13:55 -0400
Subject: [PATCH 0820/1761] =?UTF-8?q?Bump=20version:=2070.1.1=20=E2=86=92?=
 =?UTF-8?q?=2070.2.0?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg               |  2 +-
 NEWS.rst                       | 16 ++++++++++++++++
 newsfragments/4434.bugfix.rst  |  2 --
 newsfragments/4444.feature.rst |  1 -
 pyproject.toml                 |  2 +-
 5 files changed, 18 insertions(+), 5 deletions(-)
 delete mode 100644 newsfragments/4434.bugfix.rst
 delete mode 100644 newsfragments/4444.feature.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index a11458c8dd..96806f9494 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 70.1.1
+current_version = 70.2.0
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index a33251ec82..df3c50f6a2 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,19 @@
+v70.2.0
+=======
+
+Features
+--------
+
+- Updated distutils including significant changes to support Cygwin and mingw compilers. (#4444)
+
+
+Bugfixes
+--------
+
+- Fix distribution name normalisation (:pep:`625`) for valid versions that are
+  not canonical (e.g. ``1.0-2``). (#4434)
+
+
 v70.1.1
 =======
 
diff --git a/newsfragments/4434.bugfix.rst b/newsfragments/4434.bugfix.rst
deleted file mode 100644
index 5eeb674297..0000000000
--- a/newsfragments/4434.bugfix.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Fix distribution name normalisation (:pep:`625`) for valid versions that are
-not canonical (e.g. ``1.0-2``).
diff --git a/newsfragments/4444.feature.rst b/newsfragments/4444.feature.rst
deleted file mode 100644
index 20b30f8ca4..0000000000
--- a/newsfragments/4444.feature.rst
+++ /dev/null
@@ -1 +0,0 @@
-Updated distutils including significant changes to support Cygwin and mingw compilers.
diff --git a/pyproject.toml b/pyproject.toml
index 75a3fbdd89..00e7ee169f 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "70.1.1"
+version = "70.2.0"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From 2484a26b4d9aaa07ac2d929d9044e17ba9d55716 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 1 Jul 2024 13:26:28 -0400
Subject: [PATCH 0821/1761] Remove .gitignore per
 https://blog.jaraco.com/skeleton/#ignoring-artifacts.

---
 .gitignore | 23 -----------------------
 1 file changed, 23 deletions(-)
 delete mode 100644 .gitignore

diff --git a/.gitignore b/.gitignore
deleted file mode 100644
index 9ad7e2dd67..0000000000
--- a/.gitignore
+++ /dev/null
@@ -1,23 +0,0 @@
-# syntax: glob
-# See https://blog.jaraco.com/skeleton/#ignoring-artifacts before modifying.
-bin
-build
-dist
-docs/build
-include
-lib
-distribute.egg-info
-setuptools.egg-info
-setuptools/tests/bdist_wheel_testdata/*/*.egg-info/
-.coverage
-.eggs
-.tox
-.venv
-*.egg
-*.py[cod]
-*.swp
-*~
-.hg*
-.cache
-.pytest_cache/
-.mypy_cache/

From 8d958f539addcc2a400777a74dc566778140ae3a Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Tue, 2 Jul 2024 09:12:59 +0200
Subject: [PATCH 0822/1761] Ignore ruff/tryceratops rule TRY003

TRY003 Avoid specifying long messages outside the exception class

Applying this rule would mean creating lots of specialised exception
classes. Not sure this would improve readability and maintainability
in this context.
---
 ruff.toml | 1 +
 1 file changed, 1 insertion(+)

diff --git a/ruff.toml b/ruff.toml
index 2effe696ea..be78969cdb 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -18,6 +18,7 @@ extend-select = [
 	"YTT", # flake8-2020
 ]
 ignore = [
+	"TRY003", # raise-vanilla-args, avoid multitude of exception classes
 	"TRY301", # raise-within-try, it's handy
 	"UP015", # redundant-open-modes, explicit is preferred
 	"UP030", # temporarily disabled

From bd1d338614fa1ab7e424fb4ceb6d41927dfaeb33 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 2 Jul 2024 12:26:26 -0400
Subject: [PATCH 0823/1761] Add issue template for distutils deprecation
 reports.

---
 .../ISSUE_TEMPLATE/distutils-deprecation.yaml | 103 ++++++++++++++++++
 1 file changed, 103 insertions(+)
 create mode 100644 .github/ISSUE_TEMPLATE/distutils-deprecation.yaml

diff --git a/.github/ISSUE_TEMPLATE/distutils-deprecation.yaml b/.github/ISSUE_TEMPLATE/distutils-deprecation.yaml
new file mode 100644
index 0000000000..7ac1f8bbb9
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/distutils-deprecation.yaml
@@ -0,0 +1,103 @@
+---
+name: 📇 Distutils Deprecation Report
+description: >-
+  Report a use-case affected by the deprecation of distutils
+labels:
+- distutils deprecation
+- Needs Triage
+projects:
+- pypa/6
+body:
+- type: markdown
+  attributes:
+    value: >
+      So you've encountered an issue with the deprecation of distutils.
+
+      First, sorry for the inconvenience while we work to untangle the
+      legacy which is setuptools/distutils. Our goal is to ensure that
+      the vast majority of use cases are satisfied prior to removing
+      the legacy support.
+
+      Please check the
+      [existing reports](https://github.com/pypa/setuptools/issues?q=label%3A%22distutils+deprecation%22+)
+      to see if the affecting condition has been reported previously.
+
+- type: markdown
+  attributes:
+    value: >-
+      **Environment**
+- type: input
+  attributes:
+    label: setuptools version
+    placeholder: For example, setuptools==69.1.0
+    description: >-
+      Please also test with the **latest version** of `setuptools`.
+
+      Typically, this involves modifying `requires` in `[build-system]` of
+      [`pyproject.toml`](https://setuptools.pypa.io/en/latest/userguide/quickstart.html#basic-use),
+      not just updating `setuptools` using `pip`.
+  validations:
+    required: true
+- type: input
+  attributes:
+    label: Python version
+    placeholder: For example, Python 3.10
+    description: >-
+      Please ensure you are using a [supported version of Python](https://devguide.python.org/versions/#supported-versions).
+
+      Setuptools does not support versions that have reached [`end-of-life`](https://devguide.python.org/versions/#unsupported-versions).
+
+      Support for versions of Python under development (i.e. without a stable release) is experimental.
+  validations:
+    required: true
+- type: input
+  attributes:
+    label: OS
+    placeholder: For example, Gentoo Linux, RHEL 8, Arch Linux, macOS etc.
+  validations:
+    required: true
+- type: textarea
+  attributes:
+    label: Additional environment information
+    description: >-
+      Feel free to add more information about your environment here.
+    placeholder: >-
+      This is only happening when I run setuptools on my fridge's patched firmware 🤯
+
+- type: textarea
+  attributes:
+    label: Description
+    description: >-
+      A clear and concise description of the circumstances leading to the warning.
+  validations:
+    required: true
+
+- type: textarea
+  attributes:
+    label: How to Reproduce
+    description: >-
+      Describe the steps to reproduce the warning.
+
+      Please try to create a [minimal reproducer](https://stackoverflow.com/help/minimal-reproducible-example),
+      and avoid things like "see the steps in the CI logs".
+    placeholder: |
+      1. Clone a simplified example: `git clone ...`
+      2. Create a virtual environment for isolation with `...`
+      2. Build the project with setuptools via '...'
+      2. Then run '...'
+      3. An error occurs.
+  validations:
+    required: true
+
+- type: textarea
+  attributes:
+    label: Other detail
+    description: >-
+      Paste the output of the steps above, including the commands
+      themselves and setuptools' output/traceback etc.
+    value: |
+      ```console
+
+      ```
+
+...

From ef37d17fa7de624cc6e89491827e14ed5db9d023 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 2 Jul 2024 12:32:34 -0400
Subject: [PATCH 0824/1761] In the warnings, provide link to issue template.

---
 _distutils_hack/__init__.py | 12 ++++++++++--
 1 file changed, 10 insertions(+), 2 deletions(-)

diff --git a/_distutils_hack/__init__.py b/_distutils_hack/__init__.py
index 881090d590..1e688f1738 100644
--- a/_distutils_hack/__init__.py
+++ b/_distutils_hack/__init__.py
@@ -3,6 +3,12 @@
 import os
 
 
+report_url = (
+    "https://github.com/pypa/setuptools/issues/new?"
+    "template=distutils-deprecation.yaml"
+)
+
+
 def warn_distutils_present():
     if 'distutils' not in sys.modules:
         return
@@ -26,7 +32,8 @@ def clear_distutils():
     warnings.warn(
         "Setuptools is replacing distutils. Support for replacing "
         "an already imported distutils is deprecated. In the future, "
-        "this condition will fail.",
+        "this condition will fail. "
+        f"Register concerns at {report_url}"
     )
     mods = [
         name
@@ -49,7 +56,8 @@ def enabled():
             "Reliance on distutils from stdlib is deprecated. Users "
             "must rely on setuptools to provide the distutils module. "
             "Avoid importing distutils or import setuptools first, "
-            "and avoid setting SETUPTOOLS_USE_DISTUTILS=stdlib."
+            "and avoid setting SETUPTOOLS_USE_DISTUTILS=stdlib. "
+            f"Register concerns at {report_url}"
         )
     return which == 'local'
 

From 70cda3d1e8bb8a9602256f235c9a023934dd6065 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 2 Jul 2024 12:33:18 -0400
Subject: [PATCH 0825/1761] Use '.yml' for consistency.

---
 .../{distutils-deprecation.yaml => distutils-deprecation.yml}   | 0
 _distutils_hack/__init__.py                                     | 2 +-
 2 files changed, 1 insertion(+), 1 deletion(-)
 rename .github/ISSUE_TEMPLATE/{distutils-deprecation.yaml => distutils-deprecation.yml} (100%)

diff --git a/.github/ISSUE_TEMPLATE/distutils-deprecation.yaml b/.github/ISSUE_TEMPLATE/distutils-deprecation.yml
similarity index 100%
rename from .github/ISSUE_TEMPLATE/distutils-deprecation.yaml
rename to .github/ISSUE_TEMPLATE/distutils-deprecation.yml
diff --git a/_distutils_hack/__init__.py b/_distutils_hack/__init__.py
index 1e688f1738..35ab5cad49 100644
--- a/_distutils_hack/__init__.py
+++ b/_distutils_hack/__init__.py
@@ -5,7 +5,7 @@
 
 report_url = (
     "https://github.com/pypa/setuptools/issues/new?"
-    "template=distutils-deprecation.yaml"
+    "template=distutils-deprecation.yml"
 )
 
 

From 62bd80fb82c984e5601ce9ebb56aa8237fae7233 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 2 Jul 2024 13:31:06 -0400
Subject: [PATCH 0826/1761] Declare the dependencies and update vendoring
 routine for setuptools (only) to simply install the dependencies to the
 _vendor folder.

---
 pyproject.toml                  | 11 ++++++++-
 setuptools/_vendor/vendored.txt | 12 ---------
 setuptools/extern/__init__.py   |  5 +++-
 tools/vendored.py               | 43 ++++++++++++++++++++++++---------
 tox.ini                         |  3 +++
 5 files changed, 48 insertions(+), 26 deletions(-)
 delete mode 100644 setuptools/_vendor/vendored.txt

diff --git a/pyproject.toml b/pyproject.toml
index 00e7ee169f..0709c0b143 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -24,7 +24,16 @@ classifiers = [
 ]
 keywords = ["CPAN PyPI distutils eggs package management"]
 requires-python = ">=3.8"
-dependencies = []
+dependencies = [
+	"packaging>=24",
+	"ordered-set>=3.1.1",
+	"more_itertools>=8.8",
+	"jaraco.text>=3.7",
+	"importlib_resources>=5.10.2",
+	"importlib_metadata>=6",
+	"tomli>=2.0.1",
+	"wheel>=0.43.0",
+]
 
 [project.urls]
 Source = "https://github.com/pypa/setuptools"
diff --git a/setuptools/_vendor/vendored.txt b/setuptools/_vendor/vendored.txt
deleted file mode 100644
index c981dde807..0000000000
--- a/setuptools/_vendor/vendored.txt
+++ /dev/null
@@ -1,12 +0,0 @@
-packaging==24
-ordered-set==3.1.1
-more_itertools==8.8.0
-jaraco.text==3.7.0
-importlib_resources==5.10.2
-importlib_metadata==6.0.0
-# required for importlib_resources and _metadata on older Pythons
-zipp==3.7.0
-tomli==2.0.1
-# required for jaraco.context on older Pythons
-backports.tarfile
-wheel==0.43.0
diff --git a/setuptools/extern/__init__.py b/setuptools/extern/__init__.py
index f9b6eea70d..18ca1e2428 100644
--- a/setuptools/extern/__init__.py
+++ b/setuptools/extern/__init__.py
@@ -77,14 +77,17 @@ def install(self):
 # cog.outl(f"names = (\n{names}\n)")
 # ]]]
 names = (
-    'backports',
+    'autocommand',
     'importlib_metadata',
     'importlib_resources',
+    'inflect',
     'jaraco',
     'more_itertools',
     'ordered_set',
     'packaging',
     'tomli',
+    'typeguard',
+    'typing_extensions',
     'wheel',
     'zipp',
 )
diff --git a/tools/vendored.py b/tools/vendored.py
index edc9195f3c..29457720b6 100644
--- a/tools/vendored.py
+++ b/tools/vendored.py
@@ -4,6 +4,7 @@
 import subprocess
 from textwrap import dedent
 
+from jaraco.packaging import metadata
 from path import Path
 
 
@@ -13,7 +14,7 @@ def remove_all(paths):
 
 
 def update_vendored():
-    update_pkg_resources()
+    # update_pkg_resources()
     update_setuptools()
 
 
@@ -207,19 +208,37 @@ def update_pkg_resources():
     rewrite_platformdirs(vendor / "platformdirs")
 
 
+def load_deps():
+    """
+    Read the dependencies from `.`.
+    """
+    return metadata.load('.').get_all('Requires-Dist')
+
+
+def install_deps(deps, vendor):
+    """
+    Install the deps to vendor.
+    """
+    install_args = [
+        sys.executable,
+        '-m',
+        'pip',
+        'install',
+        '--target',
+        str(vendor),
+        '--python-version',
+        '3.8',
+        '--only-binary',
+        ':all:',
+    ] + list(deps)
+    subprocess.check_call(install_args)
+
+
 def update_setuptools():
     vendor = Path('setuptools/_vendor')
-    install(vendor)
-    rewrite_packaging(vendor / 'packaging', 'setuptools.extern')
-    repair_namespace(vendor / 'jaraco')
-    repair_namespace(vendor / 'backports')
-    rewrite_jaraco_text(vendor / 'jaraco/text', 'setuptools.extern')
-    rewrite_jaraco_functools(vendor / 'jaraco/functools', 'setuptools.extern')
-    rewrite_jaraco_context(vendor / 'jaraco', 'setuptools.extern')
-    rewrite_importlib_resources(vendor / 'importlib_resources', 'setuptools.extern')
-    rewrite_importlib_metadata(vendor / 'importlib_metadata', 'setuptools.extern')
-    rewrite_more_itertools(vendor / "more_itertools")
-    rewrite_wheel(vendor / "wheel")
+    deps = load_deps()
+    clean(vendor)
+    install_deps(deps, vendor)
 
 
 def yield_top_level(name):
diff --git a/tox.ini b/tox.ini
index 6b04ddb1cd..9ff4488cd3 100644
--- a/tox.ini
+++ b/tox.ini
@@ -75,6 +75,9 @@ allowlist_externals = git, sh
 deps =
 	path
 	cogapp
+	jaraco.packaging
+	# workaround for pypa/pyproject-hooks#192
+	pyproject-hooks<1.1
 commands =
 	vendor: python -m tools.vendored
 	sh -c "git grep -l -F '\[\[\[cog' | xargs -t cog -I {toxinidir} -r"  # update `*.extern`

From e9bb6879e29d67b3999de4119c65b63c5e22b67b Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 2 Jul 2024 14:41:43 -0400
Subject: [PATCH 0827/1761] Specify environment-conditional transitive deps.

Workaround for pypa/pip#12770.
---
 tools/vendored.py | 5 +++++
 1 file changed, 5 insertions(+)

diff --git a/tools/vendored.py b/tools/vendored.py
index 29457720b6..63d8c577cf 100644
--- a/tools/vendored.py
+++ b/tools/vendored.py
@@ -219,6 +219,11 @@ def install_deps(deps, vendor):
     """
     Install the deps to vendor.
     """
+    # workaround for https://github.com/pypa/pip/issues/12770
+    deps += [
+        'zipp >= 3.7',
+        'backports.tarfile',
+    ]
     install_args = [
         sys.executable,
         '-m',

From d4352b5d6653d44a6604532436c37fe1f62e7b02 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 2 Jul 2024 14:17:05 -0400
Subject: [PATCH 0828/1761] Import dependencies naturally and ensure they're
 available by appending the vendor dir to sys.path.

---
 setuptools/__init__.py                    |  3 +++
 setuptools/_core_metadata.py              |  8 ++++----
 setuptools/_entry_points.py               |  6 +++---
 setuptools/_importlib.py                  |  4 ++--
 setuptools/_itertools.py                  |  2 +-
 setuptools/_normalization.py              |  4 ++--
 setuptools/_reqs.py                       |  4 ++--
 setuptools/command/_requirestxt.py        |  4 ++--
 setuptools/command/bdist_wheel.py         | 10 +++++-----
 setuptools/command/build_py.py            |  2 +-
 setuptools/command/easy_install.py        |  2 +-
 setuptools/command/editable_wheel.py      |  2 +-
 setuptools/command/egg_info.py            |  2 +-
 setuptools/command/test.py                |  4 ++--
 setuptools/compat/py310.py                |  2 +-
 setuptools/config/_apply_pyprojecttoml.py |  2 +-
 setuptools/config/expand.py               |  4 ++--
 setuptools/config/pyprojecttoml.py        |  2 +-
 setuptools/config/setupcfg.py             |  8 ++++----
 setuptools/depends.py                     |  2 +-
 setuptools/dist.py                        | 10 +++++-----
 setuptools/msvc.py                        |  3 ++-
 setuptools/package_index.py               |  3 ++-
 setuptools/tests/config/test_setupcfg.py  |  2 +-
 setuptools/tests/test_bdist_wheel.py      |  6 +++---
 setuptools/tests/test_extern.py           |  2 +-
 setuptools/tests/test_setuptools.py       |  2 +-
 setuptools/tests/test_wheel.py            |  4 ++--
 setuptools/wheel.py                       |  7 ++++---
 29 files changed, 61 insertions(+), 55 deletions(-)

diff --git a/setuptools/__init__.py b/setuptools/__init__.py
index bf03f37b77..2917c6a811 100644
--- a/setuptools/__init__.py
+++ b/setuptools/__init__.py
@@ -3,8 +3,11 @@
 import functools
 import os
 import re
+import sys
 from typing import TYPE_CHECKING
 
+sys.path.append(os.path.dirname(__file__) + '/_vendor')
+
 import _distutils_hack.override  # noqa: F401
 import distutils.core
 from distutils.errors import DistutilsOptionError
diff --git a/setuptools/_core_metadata.py b/setuptools/_core_metadata.py
index 45aae7d70b..82ec19fc75 100644
--- a/setuptools/_core_metadata.py
+++ b/setuptools/_core_metadata.py
@@ -16,10 +16,10 @@
 from distutils.util import rfc822_escape
 
 from . import _normalization, _reqs
-from .extern.packaging.markers import Marker
-from .extern.packaging.requirements import Requirement
-from .extern.packaging.utils import canonicalize_name, canonicalize_version
-from .extern.packaging.version import Version
+from packaging.markers import Marker
+from packaging.requirements import Requirement
+from packaging.utils import canonicalize_name, canonicalize_version
+from packaging.version import Version
 from .warnings import SetuptoolsDeprecationWarning
 
 
diff --git a/setuptools/_entry_points.py b/setuptools/_entry_points.py
index b244e78387..5de12582be 100644
--- a/setuptools/_entry_points.py
+++ b/setuptools/_entry_points.py
@@ -3,11 +3,11 @@
 import itertools
 
 from .errors import OptionError
-from .extern.jaraco.text import yield_lines
-from .extern.jaraco.functools import pass_none
+from jaraco.text import yield_lines
+from jaraco.functools import pass_none
 from ._importlib import metadata
 from ._itertools import ensure_unique
-from .extern.more_itertools import consume
+from more_itertools import consume
 
 
 def ensure_valid(ep):
diff --git a/setuptools/_importlib.py b/setuptools/_importlib.py
index 8e52888d6f..ff3288102a 100644
--- a/setuptools/_importlib.py
+++ b/setuptools/_importlib.py
@@ -38,7 +38,7 @@ def disable_importlib_metadata_finder(metadata):
 
 
 if sys.version_info < (3, 10):
-    from setuptools.extern import importlib_metadata as metadata
+    import importlib_metadata as metadata
 
     disable_importlib_metadata_finder(metadata)
 else:
@@ -46,6 +46,6 @@ def disable_importlib_metadata_finder(metadata):
 
 
 if sys.version_info < (3, 9):
-    from setuptools.extern import importlib_resources as resources
+    import importlib_resources as resources
 else:
     import importlib.resources as resources  # noqa: F401
diff --git a/setuptools/_itertools.py b/setuptools/_itertools.py
index b8bf6d210a..d6ca841353 100644
--- a/setuptools/_itertools.py
+++ b/setuptools/_itertools.py
@@ -1,4 +1,4 @@
-from setuptools.extern.more_itertools import consume  # noqa: F401
+from more_itertools import consume  # noqa: F401
 
 
 # copied from jaraco.itertools 6.1
diff --git a/setuptools/_normalization.py b/setuptools/_normalization.py
index e858052ccd..467b643d46 100644
--- a/setuptools/_normalization.py
+++ b/setuptools/_normalization.py
@@ -5,7 +5,7 @@
 
 import re
 
-from .extern import packaging
+import packaging
 
 # https://packaging.python.org/en/latest/specifications/core-metadata/#name
 _VALID_NAME = re.compile(r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.I)
@@ -54,7 +54,7 @@ def safe_version(version: str) -> str:
     >>> safe_version("ubuntu lts")
     Traceback (most recent call last):
     ...
-    setuptools.extern.packaging.version.InvalidVersion: Invalid version: 'ubuntu.lts'
+    packaging.version.InvalidVersion: Invalid version: 'ubuntu.lts'
     """
     v = version.replace(' ', '.')
     try:
diff --git a/setuptools/_reqs.py b/setuptools/_reqs.py
index 9f83437033..1b64d9df79 100644
--- a/setuptools/_reqs.py
+++ b/setuptools/_reqs.py
@@ -1,8 +1,8 @@
 from functools import lru_cache
 from typing import Callable, Iterable, Iterator, TypeVar, Union, overload
 
-import setuptools.extern.jaraco.text as text
-from setuptools.extern.packaging.requirements import Requirement
+import jaraco.text as text
+from packaging.requirements import Requirement
 
 _T = TypeVar("_T")
 _StrOrIter = Union[str, Iterable[str]]
diff --git a/setuptools/command/_requirestxt.py b/setuptools/command/_requirestxt.py
index 1f1967e7aa..ef35d183e8 100644
--- a/setuptools/command/_requirestxt.py
+++ b/setuptools/command/_requirestxt.py
@@ -15,8 +15,8 @@
 from typing import Dict, Mapping, TypeVar
 
 from .. import _reqs
-from ..extern.jaraco.text import yield_lines
-from ..extern.packaging.requirements import Requirement
+from jaraco.text import yield_lines
+from packaging.requirements import Requirement
 
 
 # dict can work as an ordered set
diff --git a/setuptools/command/bdist_wheel.py b/setuptools/command/bdist_wheel.py
index d8cdd4e406..50248cdc25 100644
--- a/setuptools/command/bdist_wheel.py
+++ b/setuptools/command/bdist_wheel.py
@@ -23,10 +23,10 @@
 from zipfile import ZIP_DEFLATED, ZIP_STORED
 
 from .. import Command, __version__
-from ..extern.wheel.metadata import pkginfo_to_metadata
-from ..extern.packaging import tags
-from ..extern.packaging import version as _packaging_version
-from ..extern.wheel.wheelfile import WheelFile
+from wheel.metadata import pkginfo_to_metadata
+from packaging import tags
+from packaging import version as _packaging_version
+from wheel.wheelfile import WheelFile
 
 if TYPE_CHECKING:
     import types
@@ -68,7 +68,7 @@ def get_platform(archive_root: str | None) -> str:
     """Return our platform name 'win32', 'linux_x86_64'"""
     result = sysconfig.get_platform()
     if result.startswith("macosx") and archive_root is not None:
-        from ..extern.wheel.macosx_libfile import calculate_macosx_platform_tag
+        from wheel.macosx_libfile import calculate_macosx_platform_tag
 
         result = calculate_macosx_platform_tag(archive_root, result)
     elif _is_32bit_interpreter():
diff --git a/setuptools/command/build_py.py b/setuptools/command/build_py.py
index ab49874635..15a4f63fdd 100644
--- a/setuptools/command/build_py.py
+++ b/setuptools/command/build_py.py
@@ -13,7 +13,7 @@
 from pathlib import Path
 from typing import Iterable, Iterator
 
-from ..extern.more_itertools import unique_everseen
+from more_itertools import unique_everseen
 from ..warnings import SetuptoolsDeprecationWarning
 
 
diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py
index e6ce3fcc05..36114d40ed 100644
--- a/setuptools/command/easy_install.py
+++ b/setuptools/command/easy_install.py
@@ -76,7 +76,7 @@
 import pkg_resources
 from ..compat import py39, py311
 from .._path import ensure_directory
-from ..extern.jaraco.text import yield_lines
+from jaraco.text import yield_lines
 
 
 # Turn on PEP440Warnings
diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py
index ae31bb4c79..65058c2cd6 100644
--- a/setuptools/command/editable_wheel.py
+++ b/setuptools/command/editable_wheel.py
@@ -333,7 +333,7 @@ def _safely_run(self, cmd_name: str):
             )
 
     def _create_wheel_file(self, bdist_wheel):
-        from ..extern.wheel.wheelfile import WheelFile
+        from wheel.wheelfile import WheelFile
 
         dist_info = self.get_finalized_command("dist_info")
         dist_name = dist_info.name
diff --git a/setuptools/command/egg_info.py b/setuptools/command/egg_info.py
index 2f20303341..9e63a934e6 100644
--- a/setuptools/command/egg_info.py
+++ b/setuptools/command/egg_info.py
@@ -27,7 +27,7 @@
 import setuptools.unicode_utils as unicode_utils
 from setuptools.glob import glob
 
-from setuptools.extern import packaging
+import packaging
 from ..warnings import SetuptoolsDeprecationWarning
 
 
diff --git a/setuptools/command/test.py b/setuptools/command/test.py
index af1349e1c6..fbdf9fb942 100644
--- a/setuptools/command/test.py
+++ b/setuptools/command/test.py
@@ -19,8 +19,8 @@
 )
 from .._importlib import metadata
 from setuptools import Command
-from setuptools.extern.more_itertools import unique_everseen
-from setuptools.extern.jaraco.functools import pass_none
+from more_itertools import unique_everseen
+from jaraco.functools import pass_none
 
 
 class ScanningLoader(TestLoader):
diff --git a/setuptools/compat/py310.py b/setuptools/compat/py310.py
index f7d53d6de9..cc875c004b 100644
--- a/setuptools/compat/py310.py
+++ b/setuptools/compat/py310.py
@@ -7,4 +7,4 @@
 if sys.version_info >= (3, 11):
     import tomllib
 else:  # pragma: no cover
-    from setuptools.extern import tomli as tomllib
+    import tomli as tomllib
diff --git a/setuptools/config/_apply_pyprojecttoml.py b/setuptools/config/_apply_pyprojecttoml.py
index f44271c5dd..8c1a81dda5 100644
--- a/setuptools/config/_apply_pyprojecttoml.py
+++ b/setuptools/config/_apply_pyprojecttoml.py
@@ -204,7 +204,7 @@ def _project_urls(dist: Distribution, val: dict, _root_dir):
 
 
 def _python_requires(dist: Distribution, val: dict, _root_dir):
-    from setuptools.extern.packaging.specifiers import SpecifierSet
+    from packaging.specifiers import SpecifierSet
 
     _set_config(dist, "python_requires", SpecifierSet(val))
 
diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py
index e5f5dc586e..f5d94a380c 100644
--- a/setuptools/config/expand.py
+++ b/setuptools/config/expand.py
@@ -122,7 +122,7 @@ def read_files(
 
     (By default ``root_dir`` is the current directory).
     """
-    from setuptools.extern.more_itertools import always_iterable
+    from more_itertools import always_iterable
 
     root_dir = os.path.abspath(root_dir or os.getcwd())
     _filepaths = (os.path.join(root_dir, path) for path in always_iterable(filepaths))
@@ -287,7 +287,7 @@ def find_packages(
     :rtype: list
     """
     from setuptools.discovery import construct_package_dir
-    from setuptools.extern.more_itertools import unique_everseen, always_iterable
+    from more_itertools import unique_everseen, always_iterable
 
     if namespaces:
         from setuptools.discovery import PEP420PackageFinder as PackageFinder
diff --git a/setuptools/config/pyprojecttoml.py b/setuptools/config/pyprojecttoml.py
index d41c956cbd..c315d71535 100644
--- a/setuptools/config/pyprojecttoml.py
+++ b/setuptools/config/pyprojecttoml.py
@@ -278,7 +278,7 @@ def _ensure_previously_set(self, dist: Distribution, field: str):
     def _expand_directive(
         self, specifier: str, directive, package_dir: Mapping[str, str]
     ):
-        from setuptools.extern.more_itertools import always_iterable
+        from more_itertools import always_iterable
 
         with _ignore_errors(self.ignore_option_errors):
             root_dir = self.root_dir
diff --git a/setuptools/config/setupcfg.py b/setuptools/config/setupcfg.py
index 80ebe3d9bd..2ca0856ab4 100644
--- a/setuptools/config/setupcfg.py
+++ b/setuptools/config/setupcfg.py
@@ -31,10 +31,10 @@
 
 from .._path import StrPath
 from ..errors import FileError, OptionError
-from ..extern.packaging.markers import default_environment as marker_env
-from ..extern.packaging.requirements import InvalidRequirement, Requirement
-from ..extern.packaging.specifiers import SpecifierSet
-from ..extern.packaging.version import InvalidVersion, Version
+from packaging.markers import default_environment as marker_env
+from packaging.requirements import InvalidRequirement, Requirement
+from packaging.specifiers import SpecifierSet
+from packaging.version import InvalidVersion, Version
 from ..warnings import SetuptoolsDeprecationWarning
 from . import expand
 
diff --git a/setuptools/depends.py b/setuptools/depends.py
index 2226b6784a..871a0925ef 100644
--- a/setuptools/depends.py
+++ b/setuptools/depends.py
@@ -6,7 +6,7 @@
 
 from . import _imp
 from ._imp import find_module, PY_COMPILED, PY_FROZEN, PY_SOURCE
-from .extern.packaging.version import Version
+from packaging.version import Version
 
 
 __all__ = ['Require', 'find_module']
diff --git a/setuptools/dist.py b/setuptools/dist.py
index 32e8d43c64..bcab50ba65 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -21,11 +21,11 @@
 from distutils.fancy_getopt import translate_longopt
 from distutils.util import strtobool
 
-from .extern.more_itertools import partition, unique_everseen
-from .extern.ordered_set import OrderedSet
-from .extern.packaging.markers import InvalidMarker, Marker
-from .extern.packaging.specifiers import InvalidSpecifier, SpecifierSet
-from .extern.packaging.version import Version
+from more_itertools import partition, unique_everseen
+from ordered_set import OrderedSet
+from packaging.markers import InvalidMarker, Marker
+from packaging.specifiers import InvalidSpecifier, SpecifierSet
+from packaging.version import Version
 
 from . import _entry_points
 from . import _normalization
diff --git a/setuptools/msvc.py b/setuptools/msvc.py
index a3d350fe50..2768059213 100644
--- a/setuptools/msvc.py
+++ b/setuptools/msvc.py
@@ -23,7 +23,8 @@
 import subprocess
 import distutils.errors
 from typing import TYPE_CHECKING
-from setuptools.extern.more_itertools import unique_everseen
+
+from more_itertools import unique_everseen
 
 # https://github.com/python/mypy/issues/8166
 if not TYPE_CHECKING and platform.system() == 'Windows':
diff --git a/setuptools/package_index.py b/setuptools/package_index.py
index 2c807f6b4e..c24c783762 100644
--- a/setuptools/package_index.py
+++ b/setuptools/package_index.py
@@ -39,7 +39,8 @@
 from distutils.errors import DistutilsError
 from fnmatch import translate
 from setuptools.wheel import Wheel
-from setuptools.extern.more_itertools import unique_everseen
+
+from more_itertools import unique_everseen
 
 from .unicode_utils import _read_utf8_with_fallback, _cfg_read_utf8_with_fallback
 
diff --git a/setuptools/tests/config/test_setupcfg.py b/setuptools/tests/config/test_setupcfg.py
index bf9777c668..dc8a4f7f88 100644
--- a/setuptools/tests/config/test_setupcfg.py
+++ b/setuptools/tests/config/test_setupcfg.py
@@ -9,7 +9,7 @@
 from distutils.errors import DistutilsOptionError, DistutilsFileError
 from setuptools.dist import Distribution, _Distribution
 from setuptools.config.setupcfg import ConfigHandler, read_configuration
-from setuptools.extern.packaging.requirements import InvalidRequirement
+from packaging.requirements import InvalidRequirement
 from setuptools.warnings import SetuptoolsDeprecationWarning
 from ..textwrap import DALS
 
diff --git a/setuptools/tests/test_bdist_wheel.py b/setuptools/tests/test_bdist_wheel.py
index 232b66d368..7043d857d7 100644
--- a/setuptools/tests/test_bdist_wheel.py
+++ b/setuptools/tests/test_bdist_wheel.py
@@ -26,7 +26,7 @@
     remove_readonly_exc,
 )
 from setuptools.dist import Distribution
-from setuptools.extern.packaging import tags
+from packaging import tags
 
 DEFAULT_FILES = {
     "dummy_dist-1.0.dist-info/top_level.txt",
@@ -598,12 +598,12 @@ def _fake_import(name: str, *args, **kwargs):
         return importlib.__import__(name, *args, **kwargs)
 
     with suppress(KeyError):
-        monkeypatch.delitem(sys.modules, "setuptools.extern.wheel.macosx_libfile")
+        monkeypatch.delitem(sys.modules, "wheel.macosx_libfile")
 
     # Install an importer shim that refuses to load ctypes
     monkeypatch.setattr(builtins, "__import__", _fake_import)
     with pytest.raises(ModuleNotFoundError, match="No module named ctypes"):
-        import setuptools.extern.wheel.macosx_libfile
+        import wheel.macosx_libfile  # noqa: F401
 
     # Unload and reimport the bdist_wheel command module to make sure it won't try to
     # import ctypes
diff --git a/setuptools/tests/test_extern.py b/setuptools/tests/test_extern.py
index 0d6b164f53..da01b25b98 100644
--- a/setuptools/tests/test_extern.py
+++ b/setuptools/tests/test_extern.py
@@ -2,7 +2,7 @@
 import pickle
 
 from setuptools import Distribution
-from setuptools.extern import ordered_set
+import ordered_set
 
 
 def test_reimport_extern():
diff --git a/setuptools/tests/test_setuptools.py b/setuptools/tests/test_setuptools.py
index b1ca2396bd..0c5b1f18fa 100644
--- a/setuptools/tests/test_setuptools.py
+++ b/setuptools/tests/test_setuptools.py
@@ -16,7 +16,7 @@
 import setuptools.depends as dep
 from setuptools.depends import Require
 
-from setuptools.extern.packaging.version import Version
+from packaging.version import Version
 
 
 @pytest.fixture(autouse=True)
diff --git a/setuptools/tests/test_wheel.py b/setuptools/tests/test_wheel.py
index cdfd9d1a5f..e58ccd8d18 100644
--- a/setuptools/tests/test_wheel.py
+++ b/setuptools/tests/test_wheel.py
@@ -17,8 +17,8 @@
 from jaraco import path
 
 from pkg_resources import Distribution, PathMetadata, PY_MAJOR
-from setuptools.extern.packaging.utils import canonicalize_name
-from setuptools.extern.packaging.tags import parse_tag
+from packaging.utils import canonicalize_name
+from packaging.tags import parse_tag
 from setuptools.wheel import Wheel
 
 from .contexts import tempdir
diff --git a/setuptools/wheel.py b/setuptools/wheel.py
index e06daec4d0..a05cd98d1f 100644
--- a/setuptools/wheel.py
+++ b/setuptools/wheel.py
@@ -9,12 +9,13 @@
 import zipfile
 import contextlib
 
+from packaging.version import Version as parse_version
+from packaging.tags import sys_tags
+from packaging.utils import canonicalize_name
+
 from distutils.util import get_platform
 
 import setuptools
-from setuptools.extern.packaging.version import Version as parse_version
-from setuptools.extern.packaging.tags import sys_tags
-from setuptools.extern.packaging.utils import canonicalize_name
 from setuptools.command.egg_info import write_requirements, _egg_basename
 from setuptools.archive_util import _unpack_zipfile_obj
 

From 00384a5f4fd22c653172b99feefe13b0009eb870 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 2 Jul 2024 14:22:06 -0400
Subject: [PATCH 0829/1761] Re-vendor setuptools packages.

---
 .../INSTALLER                                 |    0
 .../autocommand-2.2.2.dist-info/LICENSE       |  166 +
 .../autocommand-2.2.2.dist-info/METADATA      |  420 ++
 .../autocommand-2.2.2.dist-info/RECORD        |   18 +
 .../WHEEL                                     |    0
 .../autocommand-2.2.2.dist-info/top_level.txt |    1 +
 setuptools/_vendor/autocommand/__init__.py    |   27 +
 setuptools/_vendor/autocommand/autoasync.py   |  142 +
 setuptools/_vendor/autocommand/autocommand.py |   70 +
 setuptools/_vendor/autocommand/automain.py    |   59 +
 setuptools/_vendor/autocommand/autoparse.py   |  333 ++
 setuptools/_vendor/autocommand/errors.py      |   23 +
 .../backports.tarfile-1.0.0.dist-info/RECORD  |    9 -
 .../INSTALLER                                 |    0
 .../LICENSE                                   |    0
 .../METADATA                                  |   12 +-
 .../backports.tarfile-1.2.0.dist-info/RECORD  |   17 +
 .../REQUESTED                                 |    0
 .../WHEEL                                     |    0
 .../top_level.txt                             |    0
 setuptools/_vendor/backports/__init__.py      |    1 +
 .../{tarfile.py => tarfile/__init__.py}       |  129 +-
 .../_vendor/backports/tarfile/__main__.py     |    5 +
 .../tarfile/compat}/__init__.py               |    0
 .../_vendor/backports/tarfile/compat/py38.py  |   24 +
 .../importlib_metadata-6.0.0.dist-info/RECORD |   26 -
 .../INSTALLER                                 |    0
 .../LICENSE                                   |    0
 .../METADATA                                  |   82 +-
 .../importlib_metadata-8.0.0.dist-info/RECORD |   32 +
 .../REQUESTED                                 |    0
 .../WHEEL                                     |    2 +-
 .../top_level.txt                             |    0
 .../_vendor/importlib_metadata/__init__.py    |  405 +-
 .../_vendor/importlib_metadata/_adapters.py   |   23 +-
 .../_vendor/importlib_metadata/_compat.py     |   19 +-
 .../_vendor/importlib_metadata/_meta.py       |   60 +-
 .../compat}/__init__.py                       |    0
 .../importlib_metadata/compat/py311.py        |   22 +
 .../{_py39compat.py => compat/py39.py}        |    7 +-
 .../_vendor/importlib_metadata/diagnose.py    |   21 +
 .../RECORD                                    |   77 -
 .../INSTALLER                                 |    0
 .../LICENSE                                   |    0
 .../METADATA                                  |   52 +-
 .../RECORD                                    |   89 +
 .../REQUESTED                                 |    0
 .../WHEEL                                     |    1 -
 .../top_level.txt                             |    0
 .../_vendor/importlib_resources/__init__.py   |   14 +-
 .../_vendor/importlib_resources/_adapters.py  |    4 +-
 .../_vendor/importlib_resources/_common.py    |    7 +-
 .../_vendor/importlib_resources/_compat.py    |  108 -
 .../_vendor/importlib_resources/_itertools.py |   69 +-
 .../_vendor/importlib_resources/_legacy.py    |  120 -
 setuptools/_vendor/importlib_resources/abc.py |    3 +-
 .../{tests/zipdata02 => compat}/__init__.py   |    0
 .../importlib_resources/compat/py38.py        |   11 +
 .../importlib_resources/compat/py39.py        |   10 +
 .../_vendor/importlib_resources/functional.py |   81 +
 .../future}/__init__.py                       |    0
 .../importlib_resources/future/adapters.py    |   95 +
 .../_vendor/importlib_resources/readers.py    |   90 +-
 .../_vendor/importlib_resources/simple.py     |    2 +-
 .../importlib_resources/tests/_compat.py      |   32 -
 .../importlib_resources/tests/_path.py        |   18 +-
 .../tests/compat/__init__.py}                 |    0
 .../importlib_resources/tests/compat/py312.py |   18 +
 .../importlib_resources/tests/compat/py39.py  |   10 +
 .../tests/data01/subdirectory/binary.file     |  Bin 4 -> 4 bytes
 .../subdirectory/subsubdir/resource.txt       |    1 +
 .../namespacedata01/subdirectory/binary.file  |    1 +
 .../tests/test_compatibilty_files.py          |    6 +-
 .../tests/test_contents.py                    |    2 +-
 .../importlib_resources/tests/test_custom.py  |   47 +
 .../importlib_resources/tests/test_files.py   |   23 +-
 .../tests/test_functional.py                  |  242 +
 .../importlib_resources/tests/test_open.py    |   20 +-
 .../importlib_resources/tests/test_path.py    |   19 +-
 .../importlib_resources/tests/test_read.py    |   41 +-
 .../importlib_resources/tests/test_reader.py  |   34 +-
 .../tests/test_resource.py                    |  155 +-
 .../importlib_resources/tests/update-zips.py  |   53 -
 .../_vendor/importlib_resources/tests/util.py |   79 +-
 .../_vendor/importlib_resources/tests/zip.py  |   32 +
 .../tests/zipdata01/ziptestdata.zip           |  Bin 876 -> 0 bytes
 .../tests/zipdata02/ziptestdata.zip           |  Bin 698 -> 0 bytes
 .../INSTALLER                                 |    0
 .../LICENSE                                   |    0
 .../_vendor/inflect-7.3.1.dist-info/METADATA  |  591 +++
 .../_vendor/inflect-7.3.1.dist-info/RECORD    |   13 +
 .../WHEEL                                     |    2 +-
 .../inflect-7.3.1.dist-info/top_level.txt     |    1 +
 setuptools/_vendor/inflect/__init__.py        | 3986 +++++++++++++++++
 .../REQUESTED => inflect/compat/__init__.py}  |    0
 setuptools/_vendor/inflect/compat/py38.py     |    7 +
 .../REQUESTED => inflect/py.typed}            |    0
 .../jaraco.functools-4.0.0.dist-info/RECORD   |   10 -
 .../INSTALLER                                 |    0
 .../LICENSE                                   |    2 -
 .../METADATA                                  |   21 +-
 .../jaraco.functools-4.0.1.dist-info/RECORD   |   10 +
 .../WHEEL                                     |    2 +-
 .../top_level.txt                             |    0
 .../INSTALLER                                 |    0
 .../LICENSE                                   |    2 -
 .../jaraco.text-3.12.1.dist-info/METADATA     |   95 +
 .../jaraco.text-3.12.1.dist-info/RECORD       |   20 +
 .../REQUESTED                                 |    0
 .../WHEEL                                     |    2 +-
 .../top_level.txt                             |    0
 .../jaraco.text-3.7.0.dist-info/METADATA      |   55 -
 .../jaraco.text-3.7.0.dist-info/RECORD        |   10 -
 setuptools/_vendor/jaraco/context.py          |    2 +-
 .../_vendor/jaraco/functools/__init__.py      |    6 +-
 .../_vendor/jaraco/functools/__init__.pyi     |    3 -
 setuptools/_vendor/jaraco/text/__init__.py    |   61 +-
 setuptools/_vendor/jaraco/text/layouts.py     |   25 +
 .../_vendor/jaraco/text/show-newlines.py      |   33 +
 .../_vendor/jaraco/text/strip-prefix.py       |   21 +
 setuptools/_vendor/jaraco/text/to-dvorak.py   |    6 +
 setuptools/_vendor/jaraco/text/to-qwerty.py   |    6 +
 .../INSTALLER                                 |    0
 .../LICENSE                                   |    0
 .../METADATA                                  |  544 +--
 .../more_itertools-10.3.0.dist-info/RECORD    |   16 +
 .../more_itertools-10.3.0.dist-info/REQUESTED |    0
 .../more_itertools-10.3.0.dist-info/WHEEL     |    4 +
 .../more_itertools-8.8.0.dist-info/RECORD     |   17 -
 .../top_level.txt                             |    1 -
 setuptools/_vendor/more_itertools/__init__.py |    4 +-
 setuptools/_vendor/more_itertools/more.py     | 1352 +++++-
 setuptools/_vendor/more_itertools/more.pyi    |  479 +-
 setuptools/_vendor/more_itertools/recipes.py  |  506 ++-
 setuptools/_vendor/more_itertools/recipes.pyi |  131 +-
 .../ordered_set-3.1.1.dist-info/RECORD        |    9 -
 .../ordered_set-3.1.1.dist-info/top_level.txt |    1 -
 .../INSTALLER                                 |    0
 .../METADATA                                  |   67 +-
 .../ordered_set-4.1.0.dist-info/RECORD        |    8 +
 .../ordered_set-4.1.0.dist-info/REQUESTED     |    0
 .../_vendor/ordered_set-4.1.0.dist-info/WHEEL |    4 +
 .../__init__.py}                              |  194 +-
 setuptools/_vendor/ordered_set/py.typed       |    0
 .../_vendor/packaging-24.0.dist-info/RECORD   |   37 -
 .../packaging-24.1.dist-info/INSTALLER        |    1 +
 .../LICENSE                                   |    0
 .../LICENSE.APACHE                            |    0
 .../LICENSE.BSD                               |    0
 .../METADATA                                  |    6 +-
 .../_vendor/packaging-24.1.dist-info/RECORD   |   37 +
 .../packaging-24.1.dist-info/REQUESTED        |    0
 .../WHEEL                                     |    0
 setuptools/_vendor/packaging/__init__.py      |    2 +-
 setuptools/_vendor/packaging/_elffile.py      |    8 +-
 setuptools/_vendor/packaging/_manylinux.py    |   22 +-
 setuptools/_vendor/packaging/_musllinux.py    |   10 +-
 setuptools/_vendor/packaging/_parser.py       |   26 +-
 setuptools/_vendor/packaging/_tokenizer.py    |   18 +-
 setuptools/_vendor/packaging/markers.py       |  115 +-
 setuptools/_vendor/packaging/metadata.py      |  153 +-
 setuptools/_vendor/packaging/requirements.py  |    9 +-
 setuptools/_vendor/packaging/specifiers.py    |   56 +-
 setuptools/_vendor/packaging/tags.py          |   43 +-
 setuptools/_vendor/packaging/utils.py         |   10 +-
 setuptools/_vendor/packaging/version.py       |   54 +-
 .../typeguard-4.3.0.dist-info/INSTALLER       |    1 +
 .../_vendor/typeguard-4.3.0.dist-info/LICENSE |   19 +
 .../typeguard-4.3.0.dist-info/METADATA        |   81 +
 .../_vendor/typeguard-4.3.0.dist-info/RECORD  |   34 +
 .../_vendor/typeguard-4.3.0.dist-info/WHEEL   |    5 +
 .../entry_points.txt                          |    2 +
 .../typeguard-4.3.0.dist-info/top_level.txt   |    1 +
 setuptools/_vendor/typeguard/__init__.py      |   48 +
 setuptools/_vendor/typeguard/_checkers.py     |  993 ++++
 setuptools/_vendor/typeguard/_config.py       |  108 +
 setuptools/_vendor/typeguard/_decorators.py   |  235 +
 setuptools/_vendor/typeguard/_exceptions.py   |   42 +
 setuptools/_vendor/typeguard/_functions.py    |  308 ++
 setuptools/_vendor/typeguard/_importhook.py   |  213 +
 setuptools/_vendor/typeguard/_memo.py         |   48 +
 .../_vendor/typeguard/_pytest_plugin.py       |  127 +
 setuptools/_vendor/typeguard/_suppression.py  |   86 +
 setuptools/_vendor/typeguard/_transformer.py  | 1229 +++++
 .../_vendor/typeguard/_union_transformer.py   |   55 +
 setuptools/_vendor/typeguard/_utils.py        |  173 +
 setuptools/_vendor/typeguard/py.typed         |    0
 .../INSTALLER                                 |    1 +
 .../LICENSE                                   |  279 ++
 .../METADATA                                  |   67 +
 .../typing_extensions-4.12.2.dist-info/RECORD |    7 +
 .../typing_extensions-4.12.2.dist-info/WHEEL  |    4 +
 setuptools/_vendor/typing_extensions.py       | 3641 +++++++++++++++
 .../_vendor/wheel-0.43.0.dist-info/RECORD     |  126 +-
 setuptools/_vendor/wheel/__main__.py          |   23 +
 .../_vendor/wheel/_setuptools_logging.py      |   26 +
 setuptools/_vendor/wheel/bdist_wheel.py       |  595 +++
 setuptools/_vendor/wheel/cli/__init__.py      |  155 +
 setuptools/_vendor/wheel/cli/convert.py       |  273 ++
 setuptools/_vendor/wheel/cli/pack.py          |   85 +
 setuptools/_vendor/wheel/cli/tags.py          |  139 +
 setuptools/_vendor/wheel/cli/unpack.py        |   30 +
 setuptools/_vendor/wheel/metadata.py          |    2 +-
 setuptools/_vendor/wheel/vendored/__init__.py |    0
 .../wheel/vendored/packaging/__init__.py      |    0
 .../wheel/vendored/packaging/_elffile.py      |  108 +
 .../wheel/vendored/packaging/_manylinux.py    |  260 ++
 .../wheel/vendored/packaging/_musllinux.py    |   83 +
 .../wheel/vendored/packaging/_parser.py       |  356 ++
 .../wheel/vendored/packaging/_structures.py   |   61 +
 .../wheel/vendored/packaging/_tokenizer.py    |  192 +
 .../wheel/vendored/packaging/markers.py       |  253 ++
 .../wheel/vendored/packaging/requirements.py  |   90 +
 .../wheel/vendored/packaging/specifiers.py    | 1011 +++++
 .../_vendor/wheel/vendored/packaging/tags.py  |  571 +++
 .../_vendor/wheel/vendored/packaging/utils.py |  172 +
 .../wheel/vendored/packaging/version.py       |  561 +++
 setuptools/_vendor/wheel/vendored/vendor.txt  |    1 +
 setuptools/_vendor/wheel/wheelfile.py         |    7 +-
 .../_vendor/zipp-3.19.2.dist-info/INSTALLER   |    1 +
 .../LICENSE}                                  |   18 +-
 .../_vendor/zipp-3.19.2.dist-info/METADATA    |  102 +
 .../_vendor/zipp-3.19.2.dist-info/RECORD      |   15 +
 .../_vendor/zipp-3.19.2.dist-info/REQUESTED   |    0
 .../_vendor/zipp-3.19.2.dist-info/WHEEL       |    5 +
 .../top_level.txt                             |    0
 .../_vendor/zipp-3.7.0.dist-info/METADATA     |   58 -
 .../_vendor/zipp-3.7.0.dist-info/RECORD       |    9 -
 setuptools/_vendor/zipp-3.7.0.dist-info/WHEEL |    5 -
 .../_vendor/{zipp.py => zipp/__init__.py}     |  248 +-
 setuptools/_vendor/zipp/compat/__init__.py    |    0
 setuptools/_vendor/zipp/compat/py310.py       |   11 +
 setuptools/_vendor/zipp/glob.py               |  106 +
 setuptools/extern/__init__.py                 |    1 +
 234 files changed, 23877 insertions(+), 2446 deletions(-)
 rename setuptools/_vendor/{backports.tarfile-1.0.0.dist-info => autocommand-2.2.2.dist-info}/INSTALLER (100%)
 create mode 100644 setuptools/_vendor/autocommand-2.2.2.dist-info/LICENSE
 create mode 100644 setuptools/_vendor/autocommand-2.2.2.dist-info/METADATA
 create mode 100644 setuptools/_vendor/autocommand-2.2.2.dist-info/RECORD
 rename setuptools/_vendor/{importlib_metadata-6.0.0.dist-info => autocommand-2.2.2.dist-info}/WHEEL (100%)
 create mode 100644 setuptools/_vendor/autocommand-2.2.2.dist-info/top_level.txt
 create mode 100644 setuptools/_vendor/autocommand/__init__.py
 create mode 100644 setuptools/_vendor/autocommand/autoasync.py
 create mode 100644 setuptools/_vendor/autocommand/autocommand.py
 create mode 100644 setuptools/_vendor/autocommand/automain.py
 create mode 100644 setuptools/_vendor/autocommand/autoparse.py
 create mode 100644 setuptools/_vendor/autocommand/errors.py
 delete mode 100644 setuptools/_vendor/backports.tarfile-1.0.0.dist-info/RECORD
 rename setuptools/_vendor/{importlib_metadata-6.0.0.dist-info => backports.tarfile-1.2.0.dist-info}/INSTALLER (100%)
 rename setuptools/_vendor/{backports.tarfile-1.0.0.dist-info => backports.tarfile-1.2.0.dist-info}/LICENSE (100%)
 rename setuptools/_vendor/{backports.tarfile-1.0.0.dist-info => backports.tarfile-1.2.0.dist-info}/METADATA (83%)
 create mode 100644 setuptools/_vendor/backports.tarfile-1.2.0.dist-info/RECORD
 rename setuptools/_vendor/{importlib_metadata-6.0.0.dist-info => backports.tarfile-1.2.0.dist-info}/REQUESTED (100%)
 rename setuptools/_vendor/{backports.tarfile-1.0.0.dist-info => backports.tarfile-1.2.0.dist-info}/WHEEL (100%)
 rename setuptools/_vendor/{backports.tarfile-1.0.0.dist-info => backports.tarfile-1.2.0.dist-info}/top_level.txt (100%)
 rename setuptools/_vendor/backports/{tarfile.py => tarfile/__init__.py} (96%)
 create mode 100644 setuptools/_vendor/backports/tarfile/__main__.py
 rename setuptools/_vendor/{ => backports/tarfile/compat}/__init__.py (100%)
 create mode 100644 setuptools/_vendor/backports/tarfile/compat/py38.py
 delete mode 100644 setuptools/_vendor/importlib_metadata-6.0.0.dist-info/RECORD
 rename setuptools/_vendor/{importlib_resources-5.10.2.dist-info => importlib_metadata-8.0.0.dist-info}/INSTALLER (100%)
 rename setuptools/_vendor/{importlib_metadata-6.0.0.dist-info => importlib_metadata-8.0.0.dist-info}/LICENSE (100%)
 rename setuptools/_vendor/{importlib_metadata-6.0.0.dist-info => importlib_metadata-8.0.0.dist-info}/METADATA (58%)
 create mode 100644 setuptools/_vendor/importlib_metadata-8.0.0.dist-info/RECORD
 rename setuptools/_vendor/{importlib_resources-5.10.2.dist-info => importlib_metadata-8.0.0.dist-info}/REQUESTED (100%)
 rename setuptools/_vendor/{importlib_resources-5.10.2.dist-info => importlib_metadata-8.0.0.dist-info}/WHEEL (65%)
 rename setuptools/_vendor/{importlib_metadata-6.0.0.dist-info => importlib_metadata-8.0.0.dist-info}/top_level.txt (100%)
 rename setuptools/_vendor/{importlib_resources/tests/zipdata01 => importlib_metadata/compat}/__init__.py (100%)
 create mode 100644 setuptools/_vendor/importlib_metadata/compat/py311.py
 rename setuptools/_vendor/importlib_metadata/{_py39compat.py => compat/py39.py} (82%)
 create mode 100644 setuptools/_vendor/importlib_metadata/diagnose.py
 delete mode 100644 setuptools/_vendor/importlib_resources-5.10.2.dist-info/RECORD
 rename setuptools/_vendor/{jaraco.functools-4.0.0.dist-info => importlib_resources-6.4.0.dist-info}/INSTALLER (100%)
 rename setuptools/_vendor/{importlib_resources-5.10.2.dist-info => importlib_resources-6.4.0.dist-info}/LICENSE (100%)
 rename setuptools/_vendor/{importlib_resources-5.10.2.dist-info => importlib_resources-6.4.0.dist-info}/METADATA (67%)
 create mode 100644 setuptools/_vendor/importlib_resources-6.4.0.dist-info/RECORD
 rename setuptools/_vendor/{jaraco.text-3.7.0.dist-info => importlib_resources-6.4.0.dist-info}/REQUESTED (100%)
 rename setuptools/_vendor/{ordered_set-3.1.1.dist-info => importlib_resources-6.4.0.dist-info}/WHEEL (83%)
 rename setuptools/_vendor/{importlib_resources-5.10.2.dist-info => importlib_resources-6.4.0.dist-info}/top_level.txt (100%)
 delete mode 100644 setuptools/_vendor/importlib_resources/_compat.py
 delete mode 100644 setuptools/_vendor/importlib_resources/_legacy.py
 rename setuptools/_vendor/importlib_resources/{tests/zipdata02 => compat}/__init__.py (100%)
 create mode 100644 setuptools/_vendor/importlib_resources/compat/py38.py
 create mode 100644 setuptools/_vendor/importlib_resources/compat/py39.py
 create mode 100644 setuptools/_vendor/importlib_resources/functional.py
 rename setuptools/_vendor/{jaraco => importlib_resources/future}/__init__.py (100%)
 create mode 100644 setuptools/_vendor/importlib_resources/future/adapters.py
 delete mode 100644 setuptools/_vendor/importlib_resources/tests/_compat.py
 rename setuptools/_vendor/{more_itertools-8.8.0.dist-info/REQUESTED => importlib_resources/tests/compat/__init__.py} (100%)
 create mode 100644 setuptools/_vendor/importlib_resources/tests/compat/py312.py
 create mode 100644 setuptools/_vendor/importlib_resources/tests/compat/py39.py
 create mode 100644 setuptools/_vendor/importlib_resources/tests/data02/subdirectory/subsubdir/resource.txt
 create mode 100644 setuptools/_vendor/importlib_resources/tests/namespacedata01/subdirectory/binary.file
 create mode 100644 setuptools/_vendor/importlib_resources/tests/test_custom.py
 create mode 100644 setuptools/_vendor/importlib_resources/tests/test_functional.py
 delete mode 100644 setuptools/_vendor/importlib_resources/tests/update-zips.py
 create mode 100644 setuptools/_vendor/importlib_resources/tests/zip.py
 delete mode 100644 setuptools/_vendor/importlib_resources/tests/zipdata01/ziptestdata.zip
 delete mode 100644 setuptools/_vendor/importlib_resources/tests/zipdata02/ziptestdata.zip
 rename setuptools/_vendor/{jaraco.text-3.7.0.dist-info => inflect-7.3.1.dist-info}/INSTALLER (100%)
 rename setuptools/_vendor/{jaraco.functools-4.0.0.dist-info => inflect-7.3.1.dist-info}/LICENSE (100%)
 create mode 100644 setuptools/_vendor/inflect-7.3.1.dist-info/METADATA
 create mode 100644 setuptools/_vendor/inflect-7.3.1.dist-info/RECORD
 rename setuptools/_vendor/{jaraco.functools-4.0.0.dist-info => inflect-7.3.1.dist-info}/WHEEL (65%)
 create mode 100644 setuptools/_vendor/inflect-7.3.1.dist-info/top_level.txt
 create mode 100644 setuptools/_vendor/inflect/__init__.py
 rename setuptools/_vendor/{ordered_set-3.1.1.dist-info/REQUESTED => inflect/compat/__init__.py} (100%)
 create mode 100644 setuptools/_vendor/inflect/compat/py38.py
 rename setuptools/_vendor/{packaging-24.0.dist-info/REQUESTED => inflect/py.typed} (100%)
 delete mode 100644 setuptools/_vendor/jaraco.functools-4.0.0.dist-info/RECORD
 rename setuptools/_vendor/{more_itertools-8.8.0.dist-info => jaraco.functools-4.0.1.dist-info}/INSTALLER (100%)
 rename setuptools/_vendor/{jaraco.text-3.7.0.dist-info => jaraco.functools-4.0.1.dist-info}/LICENSE (97%)
 rename setuptools/_vendor/{jaraco.functools-4.0.0.dist-info => jaraco.functools-4.0.1.dist-info}/METADATA (78%)
 create mode 100644 setuptools/_vendor/jaraco.functools-4.0.1.dist-info/RECORD
 rename setuptools/_vendor/{jaraco.text-3.7.0.dist-info => jaraco.functools-4.0.1.dist-info}/WHEEL (65%)
 rename setuptools/_vendor/{jaraco.functools-4.0.0.dist-info => jaraco.functools-4.0.1.dist-info}/top_level.txt (100%)
 rename setuptools/_vendor/{ordered_set-3.1.1.dist-info => jaraco.text-3.12.1.dist-info}/INSTALLER (100%)
 rename setuptools/_vendor/{zipp-3.7.0.dist-info => jaraco.text-3.12.1.dist-info}/LICENSE (97%)
 create mode 100644 setuptools/_vendor/jaraco.text-3.12.1.dist-info/METADATA
 create mode 100644 setuptools/_vendor/jaraco.text-3.12.1.dist-info/RECORD
 rename setuptools/_vendor/{zipp-3.7.0.dist-info => jaraco.text-3.12.1.dist-info}/REQUESTED (100%)
 rename setuptools/_vendor/{more_itertools-8.8.0.dist-info => jaraco.text-3.12.1.dist-info}/WHEEL (65%)
 rename setuptools/_vendor/{jaraco.text-3.7.0.dist-info => jaraco.text-3.12.1.dist-info}/top_level.txt (100%)
 delete mode 100644 setuptools/_vendor/jaraco.text-3.7.0.dist-info/METADATA
 delete mode 100644 setuptools/_vendor/jaraco.text-3.7.0.dist-info/RECORD
 create mode 100644 setuptools/_vendor/jaraco/text/layouts.py
 create mode 100644 setuptools/_vendor/jaraco/text/show-newlines.py
 create mode 100644 setuptools/_vendor/jaraco/text/strip-prefix.py
 create mode 100644 setuptools/_vendor/jaraco/text/to-dvorak.py
 create mode 100644 setuptools/_vendor/jaraco/text/to-qwerty.py
 rename setuptools/_vendor/{packaging-24.0.dist-info => more_itertools-10.3.0.dist-info}/INSTALLER (100%)
 rename setuptools/_vendor/{more_itertools-8.8.0.dist-info => more_itertools-10.3.0.dist-info}/LICENSE (100%)
 rename setuptools/_vendor/{more_itertools-8.8.0.dist-info => more_itertools-10.3.0.dist-info}/METADATA (60%)
 create mode 100644 setuptools/_vendor/more_itertools-10.3.0.dist-info/RECORD
 create mode 100644 setuptools/_vendor/more_itertools-10.3.0.dist-info/REQUESTED
 create mode 100644 setuptools/_vendor/more_itertools-10.3.0.dist-info/WHEEL
 delete mode 100644 setuptools/_vendor/more_itertools-8.8.0.dist-info/RECORD
 delete mode 100644 setuptools/_vendor/more_itertools-8.8.0.dist-info/top_level.txt
 mode change 100644 => 100755 setuptools/_vendor/more_itertools/more.py
 delete mode 100644 setuptools/_vendor/ordered_set-3.1.1.dist-info/RECORD
 delete mode 100644 setuptools/_vendor/ordered_set-3.1.1.dist-info/top_level.txt
 rename setuptools/_vendor/{zipp-3.7.0.dist-info => ordered_set-4.1.0.dist-info}/INSTALLER (100%)
 rename setuptools/_vendor/{ordered_set-3.1.1.dist-info => ordered_set-4.1.0.dist-info}/METADATA (74%)
 create mode 100644 setuptools/_vendor/ordered_set-4.1.0.dist-info/RECORD
 create mode 100644 setuptools/_vendor/ordered_set-4.1.0.dist-info/REQUESTED
 create mode 100644 setuptools/_vendor/ordered_set-4.1.0.dist-info/WHEEL
 rename setuptools/_vendor/{ordered_set.py => ordered_set/__init__.py} (72%)
 create mode 100644 setuptools/_vendor/ordered_set/py.typed
 delete mode 100644 setuptools/_vendor/packaging-24.0.dist-info/RECORD
 create mode 100644 setuptools/_vendor/packaging-24.1.dist-info/INSTALLER
 rename setuptools/_vendor/{packaging-24.0.dist-info => packaging-24.1.dist-info}/LICENSE (100%)
 rename setuptools/_vendor/{packaging-24.0.dist-info => packaging-24.1.dist-info}/LICENSE.APACHE (100%)
 rename setuptools/_vendor/{packaging-24.0.dist-info => packaging-24.1.dist-info}/LICENSE.BSD (100%)
 rename setuptools/_vendor/{packaging-24.0.dist-info => packaging-24.1.dist-info}/METADATA (97%)
 create mode 100644 setuptools/_vendor/packaging-24.1.dist-info/RECORD
 create mode 100644 setuptools/_vendor/packaging-24.1.dist-info/REQUESTED
 rename setuptools/_vendor/{packaging-24.0.dist-info => packaging-24.1.dist-info}/WHEEL (100%)
 create mode 100644 setuptools/_vendor/typeguard-4.3.0.dist-info/INSTALLER
 create mode 100644 setuptools/_vendor/typeguard-4.3.0.dist-info/LICENSE
 create mode 100644 setuptools/_vendor/typeguard-4.3.0.dist-info/METADATA
 create mode 100644 setuptools/_vendor/typeguard-4.3.0.dist-info/RECORD
 create mode 100644 setuptools/_vendor/typeguard-4.3.0.dist-info/WHEEL
 create mode 100644 setuptools/_vendor/typeguard-4.3.0.dist-info/entry_points.txt
 create mode 100644 setuptools/_vendor/typeguard-4.3.0.dist-info/top_level.txt
 create mode 100644 setuptools/_vendor/typeguard/__init__.py
 create mode 100644 setuptools/_vendor/typeguard/_checkers.py
 create mode 100644 setuptools/_vendor/typeguard/_config.py
 create mode 100644 setuptools/_vendor/typeguard/_decorators.py
 create mode 100644 setuptools/_vendor/typeguard/_exceptions.py
 create mode 100644 setuptools/_vendor/typeguard/_functions.py
 create mode 100644 setuptools/_vendor/typeguard/_importhook.py
 create mode 100644 setuptools/_vendor/typeguard/_memo.py
 create mode 100644 setuptools/_vendor/typeguard/_pytest_plugin.py
 create mode 100644 setuptools/_vendor/typeguard/_suppression.py
 create mode 100644 setuptools/_vendor/typeguard/_transformer.py
 create mode 100644 setuptools/_vendor/typeguard/_union_transformer.py
 create mode 100644 setuptools/_vendor/typeguard/_utils.py
 create mode 100644 setuptools/_vendor/typeguard/py.typed
 create mode 100644 setuptools/_vendor/typing_extensions-4.12.2.dist-info/INSTALLER
 create mode 100644 setuptools/_vendor/typing_extensions-4.12.2.dist-info/LICENSE
 create mode 100644 setuptools/_vendor/typing_extensions-4.12.2.dist-info/METADATA
 create mode 100644 setuptools/_vendor/typing_extensions-4.12.2.dist-info/RECORD
 create mode 100644 setuptools/_vendor/typing_extensions-4.12.2.dist-info/WHEEL
 create mode 100644 setuptools/_vendor/typing_extensions.py
 create mode 100644 setuptools/_vendor/wheel/__main__.py
 create mode 100644 setuptools/_vendor/wheel/_setuptools_logging.py
 create mode 100644 setuptools/_vendor/wheel/bdist_wheel.py
 create mode 100644 setuptools/_vendor/wheel/cli/__init__.py
 create mode 100644 setuptools/_vendor/wheel/cli/convert.py
 create mode 100644 setuptools/_vendor/wheel/cli/pack.py
 create mode 100644 setuptools/_vendor/wheel/cli/tags.py
 create mode 100644 setuptools/_vendor/wheel/cli/unpack.py
 create mode 100644 setuptools/_vendor/wheel/vendored/__init__.py
 create mode 100644 setuptools/_vendor/wheel/vendored/packaging/__init__.py
 create mode 100644 setuptools/_vendor/wheel/vendored/packaging/_elffile.py
 create mode 100644 setuptools/_vendor/wheel/vendored/packaging/_manylinux.py
 create mode 100644 setuptools/_vendor/wheel/vendored/packaging/_musllinux.py
 create mode 100644 setuptools/_vendor/wheel/vendored/packaging/_parser.py
 create mode 100644 setuptools/_vendor/wheel/vendored/packaging/_structures.py
 create mode 100644 setuptools/_vendor/wheel/vendored/packaging/_tokenizer.py
 create mode 100644 setuptools/_vendor/wheel/vendored/packaging/markers.py
 create mode 100644 setuptools/_vendor/wheel/vendored/packaging/requirements.py
 create mode 100644 setuptools/_vendor/wheel/vendored/packaging/specifiers.py
 create mode 100644 setuptools/_vendor/wheel/vendored/packaging/tags.py
 create mode 100644 setuptools/_vendor/wheel/vendored/packaging/utils.py
 create mode 100644 setuptools/_vendor/wheel/vendored/packaging/version.py
 create mode 100644 setuptools/_vendor/wheel/vendored/vendor.txt
 create mode 100644 setuptools/_vendor/zipp-3.19.2.dist-info/INSTALLER
 rename setuptools/_vendor/{ordered_set-3.1.1.dist-info/MIT-LICENSE => zipp-3.19.2.dist-info/LICENSE} (58%)
 create mode 100644 setuptools/_vendor/zipp-3.19.2.dist-info/METADATA
 create mode 100644 setuptools/_vendor/zipp-3.19.2.dist-info/RECORD
 create mode 100644 setuptools/_vendor/zipp-3.19.2.dist-info/REQUESTED
 create mode 100644 setuptools/_vendor/zipp-3.19.2.dist-info/WHEEL
 rename setuptools/_vendor/{zipp-3.7.0.dist-info => zipp-3.19.2.dist-info}/top_level.txt (100%)
 delete mode 100644 setuptools/_vendor/zipp-3.7.0.dist-info/METADATA
 delete mode 100644 setuptools/_vendor/zipp-3.7.0.dist-info/RECORD
 delete mode 100644 setuptools/_vendor/zipp-3.7.0.dist-info/WHEEL
 rename setuptools/_vendor/{zipp.py => zipp/__init__.py} (52%)
 create mode 100644 setuptools/_vendor/zipp/compat/__init__.py
 create mode 100644 setuptools/_vendor/zipp/compat/py310.py
 create mode 100644 setuptools/_vendor/zipp/glob.py

diff --git a/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/INSTALLER b/setuptools/_vendor/autocommand-2.2.2.dist-info/INSTALLER
similarity index 100%
rename from setuptools/_vendor/backports.tarfile-1.0.0.dist-info/INSTALLER
rename to setuptools/_vendor/autocommand-2.2.2.dist-info/INSTALLER
diff --git a/setuptools/_vendor/autocommand-2.2.2.dist-info/LICENSE b/setuptools/_vendor/autocommand-2.2.2.dist-info/LICENSE
new file mode 100644
index 0000000000..b49c3af060
--- /dev/null
+++ b/setuptools/_vendor/autocommand-2.2.2.dist-info/LICENSE
@@ -0,0 +1,166 @@
+GNU LESSER GENERAL PUBLIC LICENSE
+                       Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc. 
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+
+  This version of the GNU Lesser General Public License incorporates
+the terms and conditions of version 3 of the GNU General Public
+License, supplemented by the additional permissions listed below.
+
+  0. Additional Definitions.
+
+  As used herein, "this License" refers to version 3 of the GNU Lesser
+General Public License, and the "GNU GPL" refers to version 3 of the GNU
+General Public License.
+
+  "The Library" refers to a covered work governed by this License,
+other than an Application or a Combined Work as defined below.
+
+  An "Application" is any work that makes use of an interface provided
+by the Library, but which is not otherwise based on the Library.
+Defining a subclass of a class defined by the Library is deemed a mode
+of using an interface provided by the Library.
+
+  A "Combined Work" is a work produced by combining or linking an
+Application with the Library.  The particular version of the Library
+with which the Combined Work was made is also called the "Linked
+Version".
+
+  The "Minimal Corresponding Source" for a Combined Work means the
+Corresponding Source for the Combined Work, excluding any source code
+for portions of the Combined Work that, considered in isolation, are
+based on the Application, and not on the Linked Version.
+
+  The "Corresponding Application Code" for a Combined Work means the
+object code and/or source code for the Application, including any data
+and utility programs needed for reproducing the Combined Work from the
+Application, but excluding the System Libraries of the Combined Work.
+
+  1. Exception to Section 3 of the GNU GPL.
+
+  You may convey a covered work under sections 3 and 4 of this License
+without being bound by section 3 of the GNU GPL.
+
+  2. Conveying Modified Versions.
+
+  If you modify a copy of the Library, and, in your modifications, a
+facility refers to a function or data to be supplied by an Application
+that uses the facility (other than as an argument passed when the
+facility is invoked), then you may convey a copy of the modified
+version:
+
+   a) under this License, provided that you make a good faith effort to
+   ensure that, in the event an Application does not supply the
+   function or data, the facility still operates, and performs
+   whatever part of its purpose remains meaningful, or
+
+   b) under the GNU GPL, with none of the additional permissions of
+   this License applicable to that copy.
+
+  3. Object Code Incorporating Material from Library Header Files.
+
+  The object code form of an Application may incorporate material from
+a header file that is part of the Library.  You may convey such object
+code under terms of your choice, provided that, if the incorporated
+material is not limited to numerical parameters, data structure
+layouts and accessors, or small macros, inline functions and templates
+(ten or fewer lines in length), you do both of the following:
+
+   a) Give prominent notice with each copy of the object code that the
+   Library is used in it and that the Library and its use are
+   covered by this License.
+
+   b) Accompany the object code with a copy of the GNU GPL and this license
+   document.
+
+  4. Combined Works.
+
+  You may convey a Combined Work under terms of your choice that,
+taken together, effectively do not restrict modification of the
+portions of the Library contained in the Combined Work and reverse
+engineering for debugging such modifications, if you also do each of
+the following:
+
+   a) Give prominent notice with each copy of the Combined Work that
+   the Library is used in it and that the Library and its use are
+   covered by this License.
+
+   b) Accompany the Combined Work with a copy of the GNU GPL and this license
+   document.
+
+   c) For a Combined Work that displays copyright notices during
+   execution, include the copyright notice for the Library among
+   these notices, as well as a reference directing the user to the
+   copies of the GNU GPL and this license document.
+
+   d) Do one of the following:
+
+       0) Convey the Minimal Corresponding Source under the terms of this
+       License, and the Corresponding Application Code in a form
+       suitable for, and under terms that permit, the user to
+       recombine or relink the Application with a modified version of
+       the Linked Version to produce a modified Combined Work, in the
+       manner specified by section 6 of the GNU GPL for conveying
+       Corresponding Source.
+
+       1) Use a suitable shared library mechanism for linking with the
+       Library.  A suitable mechanism is one that (a) uses at run time
+       a copy of the Library already present on the user's computer
+       system, and (b) will operate properly with a modified version
+       of the Library that is interface-compatible with the Linked
+       Version.
+
+   e) Provide Installation Information, but only if you would otherwise
+   be required to provide such information under section 6 of the
+   GNU GPL, and only to the extent that such information is
+   necessary to install and execute a modified version of the
+   Combined Work produced by recombining or relinking the
+   Application with a modified version of the Linked Version. (If
+   you use option 4d0, the Installation Information must accompany
+   the Minimal Corresponding Source and Corresponding Application
+   Code. If you use option 4d1, you must provide the Installation
+   Information in the manner specified by section 6 of the GNU GPL
+   for conveying Corresponding Source.)
+
+  5. Combined Libraries.
+
+  You may place library facilities that are a work based on the
+Library side by side in a single library together with other library
+facilities that are not Applications and are not covered by this
+License, and convey such a combined library under terms of your
+choice, if you do both of the following:
+
+   a) Accompany the combined library with a copy of the same work based
+   on the Library, uncombined with any other library facilities,
+   conveyed under the terms of this License.
+
+   b) Give prominent notice with the combined library that part of it
+   is a work based on the Library, and explaining where to find the
+   accompanying uncombined form of the same work.
+
+  6. Revised Versions of the GNU Lesser General Public License.
+
+  The Free Software Foundation may publish revised and/or new versions
+of the GNU Lesser General Public License from time to time. Such new
+versions will be similar in spirit to the present version, but may
+differ in detail to address new problems or concerns.
+
+  Each version is given a distinguishing version number. If the
+Library as you received it specifies that a certain numbered version
+of the GNU Lesser General Public License "or any later version"
+applies to it, you have the option of following the terms and
+conditions either of that published version or of any later version
+published by the Free Software Foundation. If the Library as you
+received it does not specify a version number of the GNU Lesser
+General Public License, you may choose any version of the GNU Lesser
+General Public License ever published by the Free Software Foundation.
+
+  If the Library as you received it specifies that a proxy can decide
+whether future versions of the GNU Lesser General Public License shall
+apply, that proxy's public statement of acceptance of any version is
+permanent authorization for you to choose that version for the
+Library.
+
diff --git a/setuptools/_vendor/autocommand-2.2.2.dist-info/METADATA b/setuptools/_vendor/autocommand-2.2.2.dist-info/METADATA
new file mode 100644
index 0000000000..32214fb440
--- /dev/null
+++ b/setuptools/_vendor/autocommand-2.2.2.dist-info/METADATA
@@ -0,0 +1,420 @@
+Metadata-Version: 2.1
+Name: autocommand
+Version: 2.2.2
+Summary: A library to create a command-line program from a function
+Home-page: https://github.com/Lucretiel/autocommand
+Author: Nathan West
+License: LGPLv3
+Project-URL: Homepage, https://github.com/Lucretiel/autocommand
+Project-URL: Bug Tracker, https://github.com/Lucretiel/autocommand/issues
+Platform: any
+Classifier: Development Status :: 6 - Mature
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Topic :: Software Development
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Requires-Python: >=3.7
+Description-Content-Type: text/markdown
+License-File: LICENSE
+
+[![PyPI version](https://badge.fury.io/py/autocommand.svg)](https://badge.fury.io/py/autocommand)
+
+# autocommand
+
+A library to automatically generate and run simple argparse parsers from function signatures.
+
+## Installation
+
+Autocommand is installed via pip:
+
+```
+$ pip install autocommand
+```
+
+## Usage
+
+Autocommand turns a function into a command-line program. It converts the function's parameter signature into command-line arguments, and automatically runs the function if the module was called as `__main__`. In effect, it lets your create a smart main function.
+
+```python
+from autocommand import autocommand
+
+# This program takes exactly one argument and echos it.
+@autocommand(__name__)
+def echo(thing):
+    print(thing)
+```
+
+```
+$ python echo.py hello
+hello
+$ python echo.py -h
+usage: echo [-h] thing
+
+positional arguments:
+  thing
+
+optional arguments:
+  -h, --help  show this help message and exit
+$ python echo.py hello world  # too many arguments
+usage: echo.py [-h] thing
+echo.py: error: unrecognized arguments: world
+```
+
+As you can see, autocommand converts the signature of the function into an argument spec. When you run the file as a program, autocommand collects the command-line arguments and turns them into function arguments. The function is executed with these arguments, and then the program exits with the return value of the function, via `sys.exit`. Autocommand also automatically creates a usage message, which can be invoked with `-h` or `--help`, and automatically prints an error message when provided with invalid arguments.
+
+### Types
+
+You can use a type annotation to give an argument a type. Any type (or in fact any callable) that returns an object when given a string argument can be used, though there are a few special cases that are described later.
+
+```python
+@autocommand(__name__)
+def net_client(host, port: int):
+    ...
+```
+
+Autocommand will catch `TypeErrors` raised by the type during argument parsing, so you can supply a callable and do some basic argument validation as well.
+
+### Trailing Arguments
+
+You can add a `*args` parameter to your function to give it trailing arguments. The command will collect 0 or more trailing arguments and supply them to `args` as a tuple. If a type annotation is supplied, the type is applied to each argument.
+
+```python
+# Write the contents of each file, one by one
+@autocommand(__name__)
+def cat(*files):
+    for filename in files:
+        with open(filename) as file:
+            for line in file:
+                print(line.rstrip())
+```
+
+```
+$ python cat.py -h
+usage: ipython [-h] [file [file ...]]
+
+positional arguments:
+  file
+
+optional arguments:
+  -h, --help  show this help message and exit
+```
+
+### Options
+
+To create `--option` switches, just assign a default. Autocommand will automatically create `--long` and `-s`hort switches.
+
+```python
+@autocommand(__name__)
+def do_with_config(argument, config='~/foo.conf'):
+    pass
+```
+
+```
+$ python example.py -h
+usage: example.py [-h] [-c CONFIG] argument
+
+positional arguments:
+  argument
+
+optional arguments:
+  -h, --help            show this help message and exit
+  -c CONFIG, --config CONFIG
+```
+
+The option's type is automatically deduced from the default, unless one is explicitly given in an annotation:
+
+```python
+@autocommand(__name__)
+def http_connect(host, port=80):
+    print('{}:{}'.format(host, port))
+```
+
+```
+$ python http.py -h
+usage: http.py [-h] [-p PORT] host
+
+positional arguments:
+  host
+
+optional arguments:
+  -h, --help            show this help message and exit
+  -p PORT, --port PORT
+$ python http.py localhost
+localhost:80
+$ python http.py localhost -p 8080
+localhost:8080
+$ python http.py localhost -p blah
+usage: http.py [-h] [-p PORT] host
+http.py: error: argument -p/--port: invalid int value: 'blah'
+```
+
+#### None
+
+If an option is given a default value of `None`, it reads in a value as normal, but supplies `None` if the option isn't provided.
+
+#### Switches
+
+If an argument is given a default value of `True` or `False`, or
+given an explicit `bool` type, it becomes an option switch.
+
+```python
+    @autocommand(__name__)
+    def example(verbose=False, quiet=False):
+        pass
+```
+
+```
+$ python example.py -h
+usage: example.py [-h] [-v] [-q]
+
+optional arguments:
+  -h, --help     show this help message and exit
+  -v, --verbose
+  -q, --quiet
+```
+
+Autocommand attempts to do the "correct thing" in these cases- if the default is `True`, then supplying the switch makes the argument `False`; if the type is `bool` and the default is some other `True` value, then supplying the switch makes the argument `False`, while not supplying the switch makes the argument the default value.
+
+Autocommand also supports the creation of switch inverters. Pass `add_nos=True` to `autocommand` to enable this.
+
+```
+    @autocommand(__name__, add_nos=True)
+    def example(verbose=False):
+        pass
+```
+
+```
+$ python example.py -h
+usage: ipython [-h] [-v] [--no-verbose]
+
+optional arguments:
+  -h, --help     show this help message and exit
+  -v, --verbose
+  --no-verbose
+```
+
+Using the `--no-` version of a switch will pass the opposite value in as a function argument. If multiple switches are present, the last one takes precedence.
+
+#### Files
+
+If the default value is a file object, such as `sys.stdout`, then autocommand just looks for a string, for a file path. It doesn't do any special checking on the string, though (such as checking if the file exists); it's better to let the client decide how to handle errors in this case. Instead, it provides a special context manager called `smart_open`, which behaves exactly like `open` if a filename or other openable type is provided, but also lets you use already open files:
+
+```python
+from autocommand import autocommand, smart_open
+import sys
+
+# Write the contents of stdin, or a file, to stdout
+@autocommand(__name__)
+def write_out(infile=sys.stdin):
+    with smart_open(infile) as f:
+        for line in f:
+            print(line.rstrip())
+    # If a file was opened, it is closed here. If it was just stdin, it is untouched.
+```
+
+```
+$ echo "Hello World!" | python write_out.py | tee hello.txt
+Hello World!
+$ python write_out.py --infile hello.txt
+Hello World!
+```
+
+### Descriptions and docstrings
+
+The `autocommand` decorator accepts `description` and `epilog` kwargs, corresponding to the `description `_ and `epilog `_ of the `ArgumentParser`. If no description is given, but the decorated function has a docstring, then it is taken as the `description` for the `ArgumentParser`. You can also provide both the description and epilog in the docstring by splitting it into two sections with 4 or more - characters.
+
+```python
+@autocommand(__name__)
+def copy(infile=sys.stdin, outfile=sys.stdout):
+    '''
+    Copy an the contents of a file (or stdin) to another file (or stdout)
+    ----------
+    Some extra documentation in the epilog
+    '''
+    with smart_open(infile) as istr:
+        with smart_open(outfile, 'w') as ostr:
+            for line in istr:
+                ostr.write(line)
+```
+
+```
+$ python copy.py -h
+usage: copy.py [-h] [-i INFILE] [-o OUTFILE]
+
+Copy an the contents of a file (or stdin) to another file (or stdout)
+
+optional arguments:
+  -h, --help            show this help message and exit
+  -i INFILE, --infile INFILE
+  -o OUTFILE, --outfile OUTFILE
+
+Some extra documentation in the epilog
+$ echo "Hello World" | python copy.py --outfile hello.txt
+$ python copy.py --infile hello.txt --outfile hello2.txt
+$ python copy.py --infile hello2.txt
+Hello World
+```
+
+### Parameter descriptions
+
+You can also attach description text to individual parameters in the annotation. To attach both a type and a description, supply them both in any order in a tuple
+
+```python
+@autocommand(__name__)
+def copy_net(
+    infile: 'The name of the file to send',
+    host: 'The host to send the file to',
+    port: (int, 'The port to connect to')):
+
+    '''
+    Copy a file over raw TCP to a remote destination.
+    '''
+    # Left as an exercise to the reader
+```
+
+### Decorators and wrappers
+
+Autocommand automatically follows wrapper chains created by `@functools.wraps`. This means that you can apply other wrapping decorators to your main function, and autocommand will still correctly detect the signature.
+
+```python
+from functools import wraps
+from autocommand import autocommand
+
+def print_yielded(func):
+    '''
+    Convert a generator into a function that prints all yielded elements
+    '''
+    @wraps(func)
+    def wrapper(*args, **kwargs):
+        for thing in func(*args, **kwargs):
+            print(thing)
+    return wrapper
+
+@autocommand(__name__,
+    description= 'Print all the values from START to STOP, inclusive, in steps of STEP',
+    epilog=      'STOP and STEP default to 1')
+@print_yielded
+def seq(stop, start=1, step=1):
+    for i in range(start, stop + 1, step):
+        yield i
+```
+
+```
+$ seq.py -h
+usage: seq.py [-h] [-s START] [-S STEP] stop
+
+Print all the values from START to STOP, inclusive, in steps of STEP
+
+positional arguments:
+  stop
+
+optional arguments:
+  -h, --help            show this help message and exit
+  -s START, --start START
+  -S STEP, --step STEP
+
+STOP and STEP default to 1
+```
+
+Even though autocommand is being applied to the `wrapper` returned by `print_yielded`, it still retreives the signature of the underlying `seq` function to create the argument parsing.
+
+### Custom Parser
+
+While autocommand's automatic parser generator is a powerful convenience, it doesn't cover all of the different features that argparse provides. If you need these features, you can provide your own parser as a kwarg to `autocommand`:
+
+```python
+from argparse import ArgumentParser
+from autocommand import autocommand
+
+parser = ArgumentParser()
+# autocommand can't do optional positonal parameters
+parser.add_argument('arg', nargs='?')
+# or mutually exclusive options
+group = parser.add_mutually_exclusive_group()
+group.add_argument('-v', '--verbose', action='store_true')
+group.add_argument('-q', '--quiet', action='store_true')
+
+@autocommand(__name__, parser=parser)
+def main(arg, verbose, quiet):
+    print(arg, verbose, quiet)
+```
+
+```
+$ python parser.py -h
+usage: write_file.py [-h] [-v | -q] [arg]
+
+positional arguments:
+  arg
+
+optional arguments:
+  -h, --help     show this help message and exit
+  -v, --verbose
+  -q, --quiet
+$ python parser.py
+None False False
+$ python parser.py hello
+hello False False
+$ python parser.py -v
+None True False
+$ python parser.py -q
+None False True
+$ python parser.py -vq
+usage: parser.py [-h] [-v | -q] [arg]
+parser.py: error: argument -q/--quiet: not allowed with argument -v/--verbose
+```
+
+Any parser should work fine, so long as each of the parser's arguments has a corresponding parameter in the decorated main function. The order of parameters doesn't matter, as long as they are all present. Note that when using a custom parser, autocommand doesn't modify the parser or the retrieved arguments. This means that no description/epilog will be added, and the function's type annotations and defaults (if present) will be ignored.
+
+## Testing and Library use
+
+The decorated function is only called and exited from if the first argument to `autocommand` is `'__main__'` or `True`. If it is neither of these values, or no argument is given, then a new main function is created by the decorator. This function has the signature `main(argv=None)`, and is intended to be called with arguments as if via `main(sys.argv[1:])`. The function has the attributes `parser` and `main`, which are the generated `ArgumentParser` and the original main function that was decorated. This is to facilitate testing and library use of your main. Calling the function triggers a `parse_args()` with the supplied arguments, and returns the result of the main function. Note that, while it returns instead of calling `sys.exit`, the `parse_args()` function will raise a `SystemExit` in the event of a parsing error or `-h/--help` argument.
+
+```python
+    @autocommand()
+    def test_prog(arg1, arg2: int, quiet=False, verbose=False):
+        if not quiet:
+            print(arg1, arg2)
+            if verbose:
+                print("LOUD NOISES")
+
+        return 0
+
+    print(test_prog(['-v', 'hello', '80']))
+```
+
+```
+$ python test_prog.py
+hello 80
+LOUD NOISES
+0
+```
+
+If the function is called with no arguments, `sys.argv[1:]` is used. This is to allow the autocommand function to be used as a setuptools entry point.
+
+## Exceptions and limitations
+
+- There are a few possible exceptions that `autocommand` can raise. All of them derive from `autocommand.AutocommandError`.
+
+  - If an invalid annotation is given (that is, it isn't a `type`, `str`, `(type, str)`, or `(str, type)`, an `AnnotationError` is raised. The `type` may be any callable, as described in the `Types`_ section.
+  - If the function has a `**kwargs` parameter, a `KWargError` is raised.
+  - If, somehow, the function has a positional-only parameter, a `PositionalArgError` is raised. This means that the argument doesn't have a name, which is currently not possible with a plain `def` or `lambda`, though many built-in functions have this kind of parameter.
+
+- There are a few argparse features that are not supported by autocommand.
+
+  - It isn't possible to have an optional positional argument (as opposed to a `--option`). POSIX thinks this is bad form anyway.
+  - It isn't possible to have mutually exclusive arguments or options
+  - It isn't possible to have subcommands or subparsers, though I'm working on a few solutions involving classes or nested function definitions to allow this.
+
+## Development
+
+Autocommand cannot be important from the project root; this is to enforce separation of concerns and prevent accidental importing of `setup.py` or tests. To develop, install the project in editable mode:
+
+```
+$ python setup.py develop
+```
+
+This will create a link to the source files in the deployment directory, so that any source changes are reflected when it is imported.
diff --git a/setuptools/_vendor/autocommand-2.2.2.dist-info/RECORD b/setuptools/_vendor/autocommand-2.2.2.dist-info/RECORD
new file mode 100644
index 0000000000..e6e12ea51e
--- /dev/null
+++ b/setuptools/_vendor/autocommand-2.2.2.dist-info/RECORD
@@ -0,0 +1,18 @@
+autocommand-2.2.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+autocommand-2.2.2.dist-info/LICENSE,sha256=reeNBJgtaZctREqOFKlPh6IzTdOFXMgDSOqOJAqg3y0,7634
+autocommand-2.2.2.dist-info/METADATA,sha256=OADZuR3O6iBlpu1ieTgzYul6w4uOVrk0P0BO5TGGAJk,15006
+autocommand-2.2.2.dist-info/RECORD,,
+autocommand-2.2.2.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
+autocommand-2.2.2.dist-info/top_level.txt,sha256=AzfhgKKS8EdAwWUTSF8mgeVQbXOY9kokHB6kSqwwqu0,12
+autocommand/__init__.py,sha256=zko5Rnvolvb-UXjCx_2ArPTGBWwUK5QY4LIQIKYR7As,1037
+autocommand/__pycache__/__init__.cpython-312.pyc,,
+autocommand/__pycache__/autoasync.cpython-312.pyc,,
+autocommand/__pycache__/autocommand.cpython-312.pyc,,
+autocommand/__pycache__/automain.cpython-312.pyc,,
+autocommand/__pycache__/autoparse.cpython-312.pyc,,
+autocommand/__pycache__/errors.cpython-312.pyc,,
+autocommand/autoasync.py,sha256=AMdyrxNS4pqWJfP_xuoOcImOHWD-qT7x06wmKN1Vp-U,5680
+autocommand/autocommand.py,sha256=hmkEmQ72HtL55gnURVjDOnsfYlGd5lLXbvT4KG496Qw,2505
+autocommand/automain.py,sha256=A2b8i754Mxc_DjU9WFr6vqYDWlhz0cn8miu8d8EsxV8,2076
+autocommand/autoparse.py,sha256=WVWmZJPcbzUKXP40raQw_0HD8qPJ2V9VG1eFFmmnFxw,11642
+autocommand/errors.py,sha256=7aa3roh9Herd6nIKpQHNWEslWE8oq7GiHYVUuRqORnA,886
diff --git a/setuptools/_vendor/importlib_metadata-6.0.0.dist-info/WHEEL b/setuptools/_vendor/autocommand-2.2.2.dist-info/WHEEL
similarity index 100%
rename from setuptools/_vendor/importlib_metadata-6.0.0.dist-info/WHEEL
rename to setuptools/_vendor/autocommand-2.2.2.dist-info/WHEEL
diff --git a/setuptools/_vendor/autocommand-2.2.2.dist-info/top_level.txt b/setuptools/_vendor/autocommand-2.2.2.dist-info/top_level.txt
new file mode 100644
index 0000000000..dda5158ff6
--- /dev/null
+++ b/setuptools/_vendor/autocommand-2.2.2.dist-info/top_level.txt
@@ -0,0 +1 @@
+autocommand
diff --git a/setuptools/_vendor/autocommand/__init__.py b/setuptools/_vendor/autocommand/__init__.py
new file mode 100644
index 0000000000..73fbfca6b3
--- /dev/null
+++ b/setuptools/_vendor/autocommand/__init__.py
@@ -0,0 +1,27 @@
+# Copyright 2014-2016 Nathan West
+#
+# This file is part of autocommand.
+#
+# autocommand is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# autocommand is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with autocommand.  If not, see .
+
+# flake8 flags all these imports as unused, hence the NOQAs everywhere.
+
+from .automain import automain  # NOQA
+from .autoparse import autoparse, smart_open  # NOQA
+from .autocommand import autocommand  # NOQA
+
+try:
+    from .autoasync import autoasync  # NOQA
+except ImportError:  # pragma: no cover
+    pass
diff --git a/setuptools/_vendor/autocommand/autoasync.py b/setuptools/_vendor/autocommand/autoasync.py
new file mode 100644
index 0000000000..688f7e0554
--- /dev/null
+++ b/setuptools/_vendor/autocommand/autoasync.py
@@ -0,0 +1,142 @@
+# Copyright 2014-2015 Nathan West
+#
+# This file is part of autocommand.
+#
+# autocommand is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# autocommand is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with autocommand.  If not, see .
+
+from asyncio import get_event_loop, iscoroutine
+from functools import wraps
+from inspect import signature
+
+
+async def _run_forever_coro(coro, args, kwargs, loop):
+    '''
+    This helper function launches an async main function that was tagged with
+    forever=True. There are two possibilities:
+
+    - The function is a normal function, which handles initializing the event
+      loop, which is then run forever
+    - The function is a coroutine, which needs to be scheduled in the event
+      loop, which is then run forever
+      - There is also the possibility that the function is a normal function
+        wrapping a coroutine function
+
+    The function is therefore called unconditionally and scheduled in the event
+    loop if the return value is a coroutine object.
+
+    The reason this is a separate function is to make absolutely sure that all
+    the objects created are garbage collected after all is said and done; we
+    do this to ensure that any exceptions raised in the tasks are collected
+    ASAP.
+    '''
+
+    # Personal note: I consider this an antipattern, as it relies on the use of
+    # unowned resources. The setup function dumps some stuff into the event
+    # loop where it just whirls in the ether without a well defined owner or
+    # lifetime. For this reason, there's a good chance I'll remove the
+    # forever=True feature from autoasync at some point in the future.
+    thing = coro(*args, **kwargs)
+    if iscoroutine(thing):
+        await thing
+
+
+def autoasync(coro=None, *, loop=None, forever=False, pass_loop=False):
+    '''
+    Convert an asyncio coroutine into a function which, when called, is
+    evaluted in an event loop, and the return value returned. This is intented
+    to make it easy to write entry points into asyncio coroutines, which
+    otherwise need to be explictly evaluted with an event loop's
+    run_until_complete.
+
+    If `loop` is given, it is used as the event loop to run the coro in. If it
+    is None (the default), the loop is retreived using asyncio.get_event_loop.
+    This call is defered until the decorated function is called, so that
+    callers can install custom event loops or event loop policies after
+    @autoasync is applied.
+
+    If `forever` is True, the loop is run forever after the decorated coroutine
+    is finished. Use this for servers created with asyncio.start_server and the
+    like.
+
+    If `pass_loop` is True, the event loop object is passed into the coroutine
+    as the `loop` kwarg when the wrapper function is called. In this case, the
+    wrapper function's __signature__ is updated to remove this parameter, so
+    that autoparse can still be used on it without generating a parameter for
+    `loop`.
+
+    This coroutine can be called with ( @autoasync(...) ) or without
+    ( @autoasync ) arguments.
+
+    Examples:
+
+    @autoasync
+    def get_file(host, port):
+        reader, writer = yield from asyncio.open_connection(host, port)
+        data = reader.read()
+        sys.stdout.write(data.decode())
+
+    get_file(host, port)
+
+    @autoasync(forever=True, pass_loop=True)
+    def server(host, port, loop):
+        yield_from loop.create_server(Proto, host, port)
+
+    server('localhost', 8899)
+
+    '''
+    if coro is None:
+        return lambda c: autoasync(
+            c, loop=loop,
+            forever=forever,
+            pass_loop=pass_loop)
+
+    # The old and new signatures are required to correctly bind the loop
+    # parameter in 100% of cases, even if it's a positional parameter.
+    # NOTE: A future release will probably require the loop parameter to be
+    # a kwonly parameter.
+    if pass_loop:
+        old_sig = signature(coro)
+        new_sig = old_sig.replace(parameters=(
+            param for name, param in old_sig.parameters.items()
+            if name != "loop"))
+
+    @wraps(coro)
+    def autoasync_wrapper(*args, **kwargs):
+        # Defer the call to get_event_loop so that, if a custom policy is
+        # installed after the autoasync decorator, it is respected at call time
+        local_loop = get_event_loop() if loop is None else loop
+
+        # Inject the 'loop' argument. We have to use this signature binding to
+        # ensure it's injected in the correct place (positional, keyword, etc)
+        if pass_loop:
+            bound_args = old_sig.bind_partial()
+            bound_args.arguments.update(
+                loop=local_loop,
+                **new_sig.bind(*args, **kwargs).arguments)
+            args, kwargs = bound_args.args, bound_args.kwargs
+
+        if forever:
+            local_loop.create_task(_run_forever_coro(
+                coro, args, kwargs, local_loop
+            ))
+            local_loop.run_forever()
+        else:
+            return local_loop.run_until_complete(coro(*args, **kwargs))
+
+    # Attach the updated signature. This allows 'pass_loop' to be used with
+    # autoparse
+    if pass_loop:
+        autoasync_wrapper.__signature__ = new_sig
+
+    return autoasync_wrapper
diff --git a/setuptools/_vendor/autocommand/autocommand.py b/setuptools/_vendor/autocommand/autocommand.py
new file mode 100644
index 0000000000..097e86de07
--- /dev/null
+++ b/setuptools/_vendor/autocommand/autocommand.py
@@ -0,0 +1,70 @@
+# Copyright 2014-2015 Nathan West
+#
+# This file is part of autocommand.
+#
+# autocommand is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# autocommand is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with autocommand.  If not, see .
+
+from .autoparse import autoparse
+from .automain import automain
+try:
+    from .autoasync import autoasync
+except ImportError:  # pragma: no cover
+    pass
+
+
+def autocommand(
+        module, *,
+        description=None,
+        epilog=None,
+        add_nos=False,
+        parser=None,
+        loop=None,
+        forever=False,
+        pass_loop=False):
+
+    if callable(module):
+        raise TypeError('autocommand requires a module name argument')
+
+    def autocommand_decorator(func):
+        # Step 1: if requested, run it all in an asyncio event loop. autoasync
+        # patches the __signature__ of the decorated function, so that in the
+        # event that pass_loop is True, the `loop` parameter of the original
+        # function will *not* be interpreted as a command-line argument by
+        # autoparse
+        if loop is not None or forever or pass_loop:
+            func = autoasync(
+                func,
+                loop=None if loop is True else loop,
+                pass_loop=pass_loop,
+                forever=forever)
+
+        # Step 2: create parser. We do this second so that the arguments are
+        # parsed and passed *before* entering the asyncio event loop, if it
+        # exists. This simplifies the stack trace and ensures errors are
+        # reported earlier. It also ensures that errors raised during parsing &
+        # passing are still raised if `forever` is True.
+        func = autoparse(
+            func,
+            description=description,
+            epilog=epilog,
+            add_nos=add_nos,
+            parser=parser)
+
+        # Step 3: call the function automatically if __name__ == '__main__' (or
+        # if True was provided)
+        func = automain(module)(func)
+
+        return func
+
+    return autocommand_decorator
diff --git a/setuptools/_vendor/autocommand/automain.py b/setuptools/_vendor/autocommand/automain.py
new file mode 100644
index 0000000000..6cc45db66a
--- /dev/null
+++ b/setuptools/_vendor/autocommand/automain.py
@@ -0,0 +1,59 @@
+# Copyright 2014-2015 Nathan West
+#
+# This file is part of autocommand.
+#
+# autocommand is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# autocommand is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with autocommand.  If not, see .
+
+import sys
+from .errors import AutocommandError
+
+
+class AutomainRequiresModuleError(AutocommandError, TypeError):
+    pass
+
+
+def automain(module, *, args=(), kwargs=None):
+    '''
+    This decorator automatically invokes a function if the module is being run
+    as the "__main__" module. Optionally, provide args or kwargs with which to
+    call the function. If `module` is "__main__", the function is called, and
+    the program is `sys.exit`ed with the return value. You can also pass `True`
+    to cause the function to be called unconditionally. If the function is not
+    called, it is returned unchanged by the decorator.
+
+    Usage:
+
+    @automain(__name__)  # Pass __name__ to check __name__=="__main__"
+    def main():
+        ...
+
+    If __name__ is "__main__" here, the main function is called, and then
+    sys.exit called with the return value.
+    '''
+
+    # Check that @automain(...) was called, rather than @automain
+    if callable(module):
+        raise AutomainRequiresModuleError(module)
+
+    if module == '__main__' or module is True:
+        if kwargs is None:
+            kwargs = {}
+
+        # Use a function definition instead of a lambda for a neater traceback
+        def automain_decorator(main):
+            sys.exit(main(*args, **kwargs))
+
+        return automain_decorator
+    else:
+        return lambda main: main
diff --git a/setuptools/_vendor/autocommand/autoparse.py b/setuptools/_vendor/autocommand/autoparse.py
new file mode 100644
index 0000000000..0276a3fae1
--- /dev/null
+++ b/setuptools/_vendor/autocommand/autoparse.py
@@ -0,0 +1,333 @@
+# Copyright 2014-2015 Nathan West
+#
+# This file is part of autocommand.
+#
+# autocommand is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# autocommand is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with autocommand.  If not, see .
+
+import sys
+from re import compile as compile_regex
+from inspect import signature, getdoc, Parameter
+from argparse import ArgumentParser
+from contextlib import contextmanager
+from functools import wraps
+from io import IOBase
+from autocommand.errors import AutocommandError
+
+
+_empty = Parameter.empty
+
+
+class AnnotationError(AutocommandError):
+    '''Annotation error: annotation must be a string, type, or tuple of both'''
+
+
+class PositionalArgError(AutocommandError):
+    '''
+    Postional Arg Error: autocommand can't handle postional-only parameters
+    '''
+
+
+class KWArgError(AutocommandError):
+    '''kwarg Error: autocommand can't handle a **kwargs parameter'''
+
+
+class DocstringError(AutocommandError):
+    '''Docstring error'''
+
+
+class TooManySplitsError(DocstringError):
+    '''
+    The docstring had too many ---- section splits. Currently we only support
+    using up to a single split, to split the docstring into description and
+    epilog parts.
+    '''
+
+
+def _get_type_description(annotation):
+    '''
+    Given an annotation, return the (type, description) for the parameter.
+    If you provide an annotation that is somehow both a string and a callable,
+    the behavior is undefined.
+    '''
+    if annotation is _empty:
+        return None, None
+    elif callable(annotation):
+        return annotation, None
+    elif isinstance(annotation, str):
+        return None, annotation
+    elif isinstance(annotation, tuple):
+        try:
+            arg1, arg2 = annotation
+        except ValueError as e:
+            raise AnnotationError(annotation) from e
+        else:
+            if callable(arg1) and isinstance(arg2, str):
+                return arg1, arg2
+            elif isinstance(arg1, str) and callable(arg2):
+                return arg2, arg1
+
+    raise AnnotationError(annotation)
+
+
+def _add_arguments(param, parser, used_char_args, add_nos):
+    '''
+    Add the argument(s) to an ArgumentParser (using add_argument) for a given
+    parameter. used_char_args is the set of -short options currently already in
+    use, and is updated (if necessary) by this function. If add_nos is True,
+    this will also add an inverse switch for all boolean options. For
+    instance, for the boolean parameter "verbose", this will create --verbose
+    and --no-verbose.
+    '''
+
+    # Impl note: This function is kept separate from make_parser because it's
+    # already very long and I wanted to separate out as much as possible into
+    # its own call scope, to prevent even the possibility of suble mutation
+    # bugs.
+    if param.kind is param.POSITIONAL_ONLY:
+        raise PositionalArgError(param)
+    elif param.kind is param.VAR_KEYWORD:
+        raise KWArgError(param)
+
+    # These are the kwargs for the add_argument function.
+    arg_spec = {}
+    is_option = False
+
+    # Get the type and default from the annotation.
+    arg_type, description = _get_type_description(param.annotation)
+
+    # Get the default value
+    default = param.default
+
+    # If there is no explicit type, and the default is present and not None,
+    # infer the type from the default.
+    if arg_type is None and default not in {_empty, None}:
+        arg_type = type(default)
+
+    # Add default. The presence of a default means this is an option, not an
+    # argument.
+    if default is not _empty:
+        arg_spec['default'] = default
+        is_option = True
+
+    # Add the type
+    if arg_type is not None:
+        # Special case for bool: make it just a --switch
+        if arg_type is bool:
+            if not default or default is _empty:
+                arg_spec['action'] = 'store_true'
+            else:
+                arg_spec['action'] = 'store_false'
+
+            # Switches are always options
+            is_option = True
+
+        # Special case for file types: make it a string type, for filename
+        elif isinstance(default, IOBase):
+            arg_spec['type'] = str
+
+        # TODO: special case for list type.
+        #   - How to specificy type of list members?
+        #       - param: [int]
+        #       - param: int =[]
+        #   - action='append' vs nargs='*'
+
+        else:
+            arg_spec['type'] = arg_type
+
+    # nargs: if the signature includes *args, collect them as trailing CLI
+    # arguments in a list. *args can't have a default value, so it can never be
+    # an option.
+    if param.kind is param.VAR_POSITIONAL:
+        # TODO: consider depluralizing metavar/name here.
+        arg_spec['nargs'] = '*'
+
+    # Add description.
+    if description is not None:
+        arg_spec['help'] = description
+
+    # Get the --flags
+    flags = []
+    name = param.name
+
+    if is_option:
+        # Add the first letter as a -short option.
+        for letter in name[0], name[0].swapcase():
+            if letter not in used_char_args:
+                used_char_args.add(letter)
+                flags.append('-{}'.format(letter))
+                break
+
+        # If the parameter is a --long option, or is a -short option that
+        # somehow failed to get a flag, add it.
+        if len(name) > 1 or not flags:
+            flags.append('--{}'.format(name))
+
+        arg_spec['dest'] = name
+    else:
+        flags.append(name)
+
+    parser.add_argument(*flags, **arg_spec)
+
+    # Create the --no- version for boolean switches
+    if add_nos and arg_type is bool:
+        parser.add_argument(
+            '--no-{}'.format(name),
+            action='store_const',
+            dest=name,
+            const=default if default is not _empty else False)
+
+
+def make_parser(func_sig, description, epilog, add_nos):
+    '''
+    Given the signature of a function, create an ArgumentParser
+    '''
+    parser = ArgumentParser(description=description, epilog=epilog)
+
+    used_char_args = {'h'}
+
+    # Arange the params so that single-character arguments are first. This
+    # esnures they don't have to get --long versions. sorted is stable, so the
+    # parameters will otherwise still be in relative order.
+    params = sorted(
+        func_sig.parameters.values(),
+        key=lambda param: len(param.name) > 1)
+
+    for param in params:
+        _add_arguments(param, parser, used_char_args, add_nos)
+
+    return parser
+
+
+_DOCSTRING_SPLIT = compile_regex(r'\n\s*-{4,}\s*\n')
+
+
+def parse_docstring(docstring):
+    '''
+    Given a docstring, parse it into a description and epilog part
+    '''
+    if docstring is None:
+        return '', ''
+
+    parts = _DOCSTRING_SPLIT.split(docstring)
+
+    if len(parts) == 1:
+        return docstring, ''
+    elif len(parts) == 2:
+        return parts[0], parts[1]
+    else:
+        raise TooManySplitsError()
+
+
+def autoparse(
+        func=None, *,
+        description=None,
+        epilog=None,
+        add_nos=False,
+        parser=None):
+    '''
+    This decorator converts a function that takes normal arguments into a
+    function which takes a single optional argument, argv, parses it using an
+    argparse.ArgumentParser, and calls the underlying function with the parsed
+    arguments. If it is not given, sys.argv[1:] is used. This is so that the
+    function can be used as a setuptools entry point, as well as a normal main
+    function. sys.argv[1:] is not evaluated until the function is called, to
+    allow injecting different arguments for testing.
+
+    It uses the argument signature of the function to create an
+    ArgumentParser. Parameters without defaults become positional parameters,
+    while parameters *with* defaults become --options. Use annotations to set
+    the type of the parameter.
+
+    The `desctiption` and `epilog` parameters corrospond to the same respective
+    argparse parameters. If no description is given, it defaults to the
+    decorated functions's docstring, if present.
+
+    If add_nos is True, every boolean option (that is, every parameter with a
+    default of True/False or a type of bool) will have a --no- version created
+    as well, which inverts the option. For instance, the --verbose option will
+    have a --no-verbose counterpart. These are not mutually exclusive-
+    whichever one appears last in the argument list will have precedence.
+
+    If a parser is given, it is used instead of one generated from the function
+    signature. In this case, no parser is created; instead, the given parser is
+    used to parse the argv argument. The parser's results' argument names must
+    match up with the parameter names of the decorated function.
+
+    The decorated function is attached to the result as the `func` attribute,
+    and the parser is attached as the `parser` attribute.
+    '''
+
+    # If @autoparse(...) is used instead of @autoparse
+    if func is None:
+        return lambda f: autoparse(
+            f, description=description,
+            epilog=epilog,
+            add_nos=add_nos,
+            parser=parser)
+
+    func_sig = signature(func)
+
+    docstr_description, docstr_epilog = parse_docstring(getdoc(func))
+
+    if parser is None:
+        parser = make_parser(
+            func_sig,
+            description or docstr_description,
+            epilog or docstr_epilog,
+            add_nos)
+
+    @wraps(func)
+    def autoparse_wrapper(argv=None):
+        if argv is None:
+            argv = sys.argv[1:]
+
+        # Get empty argument binding, to fill with parsed arguments. This
+        # object does all the heavy lifting of turning named arguments into
+        # into correctly bound *args and **kwargs.
+        parsed_args = func_sig.bind_partial()
+        parsed_args.arguments.update(vars(parser.parse_args(argv)))
+
+        return func(*parsed_args.args, **parsed_args.kwargs)
+
+    # TODO: attach an updated __signature__ to autoparse_wrapper, just in case.
+
+    # Attach the wrapped function and parser, and return the wrapper.
+    autoparse_wrapper.func = func
+    autoparse_wrapper.parser = parser
+    return autoparse_wrapper
+
+
+@contextmanager
+def smart_open(filename_or_file, *args, **kwargs):
+    '''
+    This context manager allows you to open a filename, if you want to default
+    some already-existing file object, like sys.stdout, which shouldn't be
+    closed at the end of the context. If the filename argument is a str, bytes,
+    or int, the file object is created via a call to open with the given *args
+    and **kwargs, sent to the context, and closed at the end of the context,
+    just like "with open(filename) as f:". If it isn't one of the openable
+    types, the object simply sent to the context unchanged, and left unclosed
+    at the end of the context. Example:
+
+        def work_with_file(name=sys.stdout):
+            with smart_open(name) as f:
+                # Works correctly if name is a str filename or sys.stdout
+                print("Some stuff", file=f)
+                # If it was a filename, f is closed at the end here.
+    '''
+    if isinstance(filename_or_file, (str, bytes, int)):
+        with open(filename_or_file, *args, **kwargs) as file:
+            yield file
+    else:
+        yield filename_or_file
diff --git a/setuptools/_vendor/autocommand/errors.py b/setuptools/_vendor/autocommand/errors.py
new file mode 100644
index 0000000000..2570607399
--- /dev/null
+++ b/setuptools/_vendor/autocommand/errors.py
@@ -0,0 +1,23 @@
+# Copyright 2014-2016 Nathan West
+#
+# This file is part of autocommand.
+#
+# autocommand is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# autocommand is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with autocommand.  If not, see .
+
+
+class AutocommandError(Exception):
+    '''Base class for autocommand exceptions'''
+    pass
+
+# Individual modules will define errors specific to that module.
diff --git a/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/RECORD b/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/RECORD
deleted file mode 100644
index a6a44d8fcc..0000000000
--- a/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/RECORD
+++ /dev/null
@@ -1,9 +0,0 @@
-backports.tarfile-1.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-backports.tarfile-1.0.0.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
-backports.tarfile-1.0.0.dist-info/METADATA,sha256=XlT7JAFR04zDMIjs-EFhqc0CkkVyeh-SiVUoKXONXJ0,1876
-backports.tarfile-1.0.0.dist-info/RECORD,,
-backports.tarfile-1.0.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-backports.tarfile-1.0.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
-backports.tarfile-1.0.0.dist-info/top_level.txt,sha256=cGjaLMOoBR1FK0ApojtzWVmViTtJ7JGIK_HwXiEsvtU,10
-backports/__pycache__/tarfile.cpython-312.pyc,,
-backports/tarfile.py,sha256=IO3YX_ZYqn13VOi-3QLM0lnktn102U4d9wUrHc230LY,106920
diff --git a/setuptools/_vendor/importlib_metadata-6.0.0.dist-info/INSTALLER b/setuptools/_vendor/backports.tarfile-1.2.0.dist-info/INSTALLER
similarity index 100%
rename from setuptools/_vendor/importlib_metadata-6.0.0.dist-info/INSTALLER
rename to setuptools/_vendor/backports.tarfile-1.2.0.dist-info/INSTALLER
diff --git a/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/LICENSE b/setuptools/_vendor/backports.tarfile-1.2.0.dist-info/LICENSE
similarity index 100%
rename from setuptools/_vendor/backports.tarfile-1.0.0.dist-info/LICENSE
rename to setuptools/_vendor/backports.tarfile-1.2.0.dist-info/LICENSE
diff --git a/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/METADATA b/setuptools/_vendor/backports.tarfile-1.2.0.dist-info/METADATA
similarity index 83%
rename from setuptools/_vendor/backports.tarfile-1.0.0.dist-info/METADATA
rename to setuptools/_vendor/backports.tarfile-1.2.0.dist-info/METADATA
index e7b64c87f8..db0a2dcdbe 100644
--- a/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/METADATA
+++ b/setuptools/_vendor/backports.tarfile-1.2.0.dist-info/METADATA
@@ -1,16 +1,16 @@
 Metadata-Version: 2.1
 Name: backports.tarfile
-Version: 1.0.0
+Version: 1.2.0
 Summary: Backport of CPython tarfile module
-Home-page: https://github.com/jaraco/backports.tarfile
-Author: Jason R. Coombs
-Author-email: jaraco@jaraco.com
+Author-email: "Jason R. Coombs" 
+Project-URL: Homepage, https://github.com/jaraco/backports.tarfile
 Classifier: Development Status :: 5 - Production/Stable
 Classifier: Intended Audience :: Developers
 Classifier: License :: OSI Approved :: MIT License
 Classifier: Programming Language :: Python :: 3
 Classifier: Programming Language :: Python :: 3 :: Only
 Requires-Python: >=3.8
+Description-Content-Type: text/x-rst
 License-File: LICENSE
 Provides-Extra: docs
 Requires-Dist: sphinx >=3.5 ; extra == 'docs'
@@ -19,10 +19,12 @@ Requires-Dist: rst.linker >=1.9 ; extra == 'docs'
 Requires-Dist: furo ; extra == 'docs'
 Requires-Dist: sphinx-lint ; extra == 'docs'
 Provides-Extra: testing
-Requires-Dist: pytest !=8.1.1,>=6 ; extra == 'testing'
+Requires-Dist: pytest !=8.1.*,>=6 ; extra == 'testing'
 Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'testing'
 Requires-Dist: pytest-cov ; extra == 'testing'
 Requires-Dist: pytest-enabler >=2.2 ; extra == 'testing'
+Requires-Dist: jaraco.test ; extra == 'testing'
+Requires-Dist: pytest !=8.0.* ; extra == 'testing'
 
 .. image:: https://img.shields.io/pypi/v/backports.tarfile.svg
    :target: https://pypi.org/project/backports.tarfile
diff --git a/setuptools/_vendor/backports.tarfile-1.2.0.dist-info/RECORD b/setuptools/_vendor/backports.tarfile-1.2.0.dist-info/RECORD
new file mode 100644
index 0000000000..536dc2f09e
--- /dev/null
+++ b/setuptools/_vendor/backports.tarfile-1.2.0.dist-info/RECORD
@@ -0,0 +1,17 @@
+backports.tarfile-1.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+backports.tarfile-1.2.0.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
+backports.tarfile-1.2.0.dist-info/METADATA,sha256=ghXFTq132dxaEIolxr3HK1mZqm9iyUmaRANZQSr6WlE,2020
+backports.tarfile-1.2.0.dist-info/RECORD,,
+backports.tarfile-1.2.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+backports.tarfile-1.2.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
+backports.tarfile-1.2.0.dist-info/top_level.txt,sha256=cGjaLMOoBR1FK0ApojtzWVmViTtJ7JGIK_HwXiEsvtU,10
+backports/__init__.py,sha256=iOEMwnlORWezdO8-2vxBIPSR37D7JGjluZ8f55vzxls,81
+backports/__pycache__/__init__.cpython-312.pyc,,
+backports/tarfile/__init__.py,sha256=Pwf2qUIfB0SolJPCKcx3vz3UEu_aids4g4sAfxy94qg,108491
+backports/tarfile/__main__.py,sha256=Yw2oGT1afrz2eBskzdPYL8ReB_3liApmhFkN2EbDmc4,59
+backports/tarfile/__pycache__/__init__.cpython-312.pyc,,
+backports/tarfile/__pycache__/__main__.cpython-312.pyc,,
+backports/tarfile/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+backports/tarfile/compat/__pycache__/__init__.cpython-312.pyc,,
+backports/tarfile/compat/__pycache__/py38.cpython-312.pyc,,
+backports/tarfile/compat/py38.py,sha256=iYkyt_gvWjLzGUTJD9TuTfMMjOk-ersXZmRlvQYN2qE,568
diff --git a/setuptools/_vendor/importlib_metadata-6.0.0.dist-info/REQUESTED b/setuptools/_vendor/backports.tarfile-1.2.0.dist-info/REQUESTED
similarity index 100%
rename from setuptools/_vendor/importlib_metadata-6.0.0.dist-info/REQUESTED
rename to setuptools/_vendor/backports.tarfile-1.2.0.dist-info/REQUESTED
diff --git a/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/WHEEL b/setuptools/_vendor/backports.tarfile-1.2.0.dist-info/WHEEL
similarity index 100%
rename from setuptools/_vendor/backports.tarfile-1.0.0.dist-info/WHEEL
rename to setuptools/_vendor/backports.tarfile-1.2.0.dist-info/WHEEL
diff --git a/setuptools/_vendor/backports.tarfile-1.0.0.dist-info/top_level.txt b/setuptools/_vendor/backports.tarfile-1.2.0.dist-info/top_level.txt
similarity index 100%
rename from setuptools/_vendor/backports.tarfile-1.0.0.dist-info/top_level.txt
rename to setuptools/_vendor/backports.tarfile-1.2.0.dist-info/top_level.txt
diff --git a/setuptools/_vendor/backports/__init__.py b/setuptools/_vendor/backports/__init__.py
index e69de29bb2..0d1f7edf5d 100644
--- a/setuptools/_vendor/backports/__init__.py
+++ b/setuptools/_vendor/backports/__init__.py
@@ -0,0 +1 @@
+__path__ = __import__('pkgutil').extend_path(__path__, __name__)  # type: ignore
diff --git a/setuptools/_vendor/backports/tarfile.py b/setuptools/_vendor/backports/tarfile/__init__.py
similarity index 96%
rename from setuptools/_vendor/backports/tarfile.py
rename to setuptools/_vendor/backports/tarfile/__init__.py
index a7a9a6e7b9..8c16881cb3 100644
--- a/setuptools/_vendor/backports/tarfile.py
+++ b/setuptools/_vendor/backports/tarfile/__init__.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python3
 #-------------------------------------------------------------------
 # tarfile.py
 #-------------------------------------------------------------------
@@ -46,7 +45,8 @@
 import struct
 import copy
 import re
-import warnings
+
+from .compat.py38 import removesuffix
 
 try:
     import pwd
@@ -637,6 +637,10 @@ def __init__(self, fileobj, offset, size, name, blockinfo=None):
     def flush(self):
         pass
 
+    @property
+    def mode(self):
+        return 'rb'
+
     def readable(self):
         return True
 
@@ -873,7 +877,7 @@ class TarInfo(object):
         pax_headers = ('A dictionary containing key-value pairs of an '
                        'associated pax extended header.'),
         sparse = 'Sparse member information.',
-        tarfile = None,
+        _tarfile = None,
         _sparse_structs = None,
         _link_target = None,
         )
@@ -902,6 +906,24 @@ def __init__(self, name=""):
         self.sparse = None      # sparse member information
         self.pax_headers = {}   # pax header information
 
+    @property
+    def tarfile(self):
+        import warnings
+        warnings.warn(
+            'The undocumented "tarfile" attribute of TarInfo objects '
+            + 'is deprecated and will be removed in Python 3.16',
+            DeprecationWarning, stacklevel=2)
+        return self._tarfile
+
+    @tarfile.setter
+    def tarfile(self, tarfile):
+        import warnings
+        warnings.warn(
+            'The undocumented "tarfile" attribute of TarInfo objects '
+            + 'is deprecated and will be removed in Python 3.16',
+            DeprecationWarning, stacklevel=2)
+        self._tarfile = tarfile
+
     @property
     def path(self):
         'In pax headers, "name" is called "path".'
@@ -1196,7 +1218,7 @@ def _create_pax_generic_header(cls, pax_headers, type, encoding):
         for keyword, value in pax_headers.items():
             keyword = keyword.encode("utf-8")
             if binary:
-                # Try to restore the original byte representation of `value'.
+                # Try to restore the original byte representation of 'value'.
                 # Needless to say, that the encoding must match the string.
                 value = value.encode(encoding, "surrogateescape")
             else:
@@ -1365,7 +1387,7 @@ def _proc_gnulong(self, tarfile):
         # Remove redundant slashes from directories. This is to be consistent
         # with frombuf().
         if next.isdir():
-            next.name = next.name.removesuffix("/")
+            next.name = removesuffix(next.name, "/")
 
         return next
 
@@ -1641,14 +1663,14 @@ class TarFile(object):
     def __init__(self, name=None, mode="r", fileobj=None, format=None,
             tarinfo=None, dereference=None, ignore_zeros=None, encoding=None,
             errors="surrogateescape", pax_headers=None, debug=None,
-            errorlevel=None, copybufsize=None):
-        """Open an (uncompressed) tar archive `name'. `mode' is either 'r' to
+            errorlevel=None, copybufsize=None, stream=False):
+        """Open an (uncompressed) tar archive 'name'. 'mode' is either 'r' to
            read from an existing archive, 'a' to append data to an existing
-           file or 'w' to create a new file overwriting an existing one. `mode'
+           file or 'w' to create a new file overwriting an existing one. 'mode'
            defaults to 'r'.
-           If `fileobj' is given, it is used for reading or writing data. If it
-           can be determined, `mode' is overridden by `fileobj's mode.
-           `fileobj' is not closed, when TarFile is closed.
+           If 'fileobj' is given, it is used for reading or writing data. If it
+           can be determined, 'mode' is overridden by 'fileobj's mode.
+           'fileobj' is not closed, when TarFile is closed.
         """
         modes = {"r": "rb", "a": "r+b", "w": "wb", "x": "xb"}
         if mode not in modes:
@@ -1673,6 +1695,8 @@ def __init__(self, name=None, mode="r", fileobj=None, format=None,
         self.name = os.path.abspath(name) if name else None
         self.fileobj = fileobj
 
+        self.stream = stream
+
         # Init attributes.
         if format is not None:
             self.format = format
@@ -1975,7 +1999,7 @@ def close(self):
                 self.fileobj.close()
 
     def getmember(self, name):
-        """Return a TarInfo object for member ``name``. If ``name`` can not be
+        """Return a TarInfo object for member 'name'. If 'name' can not be
            found in the archive, KeyError is raised. If a member occurs more
            than once in the archive, its last occurrence is assumed to be the
            most up-to-date version.
@@ -2003,9 +2027,9 @@ def getnames(self):
 
     def gettarinfo(self, name=None, arcname=None, fileobj=None):
         """Create a TarInfo object from the result of os.stat or equivalent
-           on an existing file. The file is either named by ``name``, or
-           specified as a file object ``fileobj`` with a file descriptor. If
-           given, ``arcname`` specifies an alternative name for the file in the
+           on an existing file. The file is either named by 'name', or
+           specified as a file object 'fileobj' with a file descriptor. If
+           given, 'arcname' specifies an alternative name for the file in the
            archive, otherwise, the name is taken from the 'name' attribute of
            'fileobj', or the 'name' argument. The name should be a text
            string.
@@ -2029,7 +2053,7 @@ def gettarinfo(self, name=None, arcname=None, fileobj=None):
         # Now, fill the TarInfo object with
         # information specific for the file.
         tarinfo = self.tarinfo()
-        tarinfo.tarfile = self  # Not needed
+        tarinfo._tarfile = self  # To be removed in 3.16.
 
         # Use os.stat or os.lstat, depending on if symlinks shall be resolved.
         if fileobj is None:
@@ -2101,11 +2125,15 @@ def gettarinfo(self, name=None, arcname=None, fileobj=None):
         return tarinfo
 
     def list(self, verbose=True, *, members=None):
-        """Print a table of contents to sys.stdout. If ``verbose`` is False, only
-           the names of the members are printed. If it is True, an `ls -l'-like
-           output is produced. ``members`` is optional and must be a subset of the
+        """Print a table of contents to sys.stdout. If 'verbose' is False, only
+           the names of the members are printed. If it is True, an 'ls -l'-like
+           output is produced. 'members' is optional and must be a subset of the
            list returned by getmembers().
         """
+        # Convert tarinfo type to stat type.
+        type2mode = {REGTYPE: stat.S_IFREG, SYMTYPE: stat.S_IFLNK,
+                     FIFOTYPE: stat.S_IFIFO, CHRTYPE: stat.S_IFCHR,
+                     DIRTYPE: stat.S_IFDIR, BLKTYPE: stat.S_IFBLK}
         self._check()
 
         if members is None:
@@ -2115,7 +2143,8 @@ def list(self, verbose=True, *, members=None):
                 if tarinfo.mode is None:
                     _safe_print("??????????")
                 else:
-                    _safe_print(stat.filemode(tarinfo.mode))
+                    modetype = type2mode.get(tarinfo.type, 0)
+                    _safe_print(stat.filemode(modetype | tarinfo.mode))
                 _safe_print("%s/%s" % (tarinfo.uname or tarinfo.uid,
                                        tarinfo.gname or tarinfo.gid))
                 if tarinfo.ischr() or tarinfo.isblk():
@@ -2139,11 +2168,11 @@ def list(self, verbose=True, *, members=None):
             print()
 
     def add(self, name, arcname=None, recursive=True, *, filter=None):
-        """Add the file ``name`` to the archive. ``name`` may be any type of file
-           (directory, fifo, symbolic link, etc.). If given, ``arcname``
+        """Add the file 'name' to the archive. 'name' may be any type of file
+           (directory, fifo, symbolic link, etc.). If given, 'arcname'
            specifies an alternative name for the file in the archive.
            Directories are added recursively by default. This can be avoided by
-           setting ``recursive`` to False. ``filter`` is a function
+           setting 'recursive' to False. 'filter' is a function
            that expects a TarInfo object argument and returns the changed
            TarInfo object, if it returns None the TarInfo object will be
            excluded from the archive.
@@ -2190,13 +2219,16 @@ def add(self, name, arcname=None, recursive=True, *, filter=None):
             self.addfile(tarinfo)
 
     def addfile(self, tarinfo, fileobj=None):
-        """Add the TarInfo object ``tarinfo`` to the archive. If ``fileobj`` is
-           given, it should be a binary file, and tarinfo.size bytes are read
-           from it and added to the archive. You can create TarInfo objects
-           directly, or by using gettarinfo().
+        """Add the TarInfo object 'tarinfo' to the archive. If 'tarinfo' represents
+           a non zero-size regular file, the 'fileobj' argument should be a binary file,
+           and tarinfo.size bytes are read from it and added to the archive.
+           You can create TarInfo objects directly, or by using gettarinfo().
         """
         self._check("awx")
 
+        if fileobj is None and tarinfo.isreg() and tarinfo.size != 0:
+            raise ValueError("fileobj not provided for non zero-size regular file")
+
         tarinfo = copy.copy(tarinfo)
 
         buf = tarinfo.tobuf(self.format, self.encoding, self.errors)
@@ -2218,11 +2250,12 @@ def _get_filter_function(self, filter):
         if filter is None:
             filter = self.extraction_filter
             if filter is None:
+                import warnings
                 warnings.warn(
                     'Python 3.14 will, by default, filter extracted tar '
                     + 'archives and reject files or modify their metadata. '
                     + 'Use the filter argument to control this behavior.',
-                    DeprecationWarning)
+                    DeprecationWarning, stacklevel=3)
                 return fully_trusted_filter
             if isinstance(filter, str):
                 raise TypeError(
@@ -2241,12 +2274,12 @@ def extractall(self, path=".", members=None, *, numeric_owner=False,
                    filter=None):
         """Extract all members from the archive to the current working
            directory and set owner, modification time and permissions on
-           directories afterwards. `path' specifies a different directory
-           to extract to. `members' is optional and must be a subset of the
-           list returned by getmembers(). If `numeric_owner` is True, only
+           directories afterwards. 'path' specifies a different directory
+           to extract to. 'members' is optional and must be a subset of the
+           list returned by getmembers(). If 'numeric_owner' is True, only
            the numbers for user/group names are used and not the names.
 
-           The `filter` function will be called on each member just
+           The 'filter' function will be called on each member just
            before extraction.
            It can return a changed TarInfo or None to skip the member.
            String names of common filters are accepted.
@@ -2286,13 +2319,13 @@ def extract(self, member, path="", set_attrs=True, *, numeric_owner=False,
                 filter=None):
         """Extract a member from the archive to the current working directory,
            using its full name. Its file information is extracted as accurately
-           as possible. `member' may be a filename or a TarInfo object. You can
-           specify a different directory using `path'. File attributes (owner,
-           mtime, mode) are set unless `set_attrs' is False. If `numeric_owner`
+           as possible. 'member' may be a filename or a TarInfo object. You can
+           specify a different directory using 'path'. File attributes (owner,
+           mtime, mode) are set unless 'set_attrs' is False. If 'numeric_owner'
            is True, only the numbers for user/group names are used and not
            the names.
 
-           The `filter` function will be called before extraction.
+           The 'filter' function will be called before extraction.
            It can return a changed TarInfo or None to skip the member.
            String names of common filters are accepted.
         """
@@ -2357,10 +2390,10 @@ def _handle_fatal_error(self, e):
             self._dbg(1, "tarfile: %s %s" % (type(e).__name__, e))
 
     def extractfile(self, member):
-        """Extract a member from the archive as a file object. ``member`` may be
-           a filename or a TarInfo object. If ``member`` is a regular file or
+        """Extract a member from the archive as a file object. 'member' may be
+           a filename or a TarInfo object. If 'member' is a regular file or
            a link, an io.BufferedReader object is returned. For all other
-           existing members, None is returned. If ``member`` does not appear
+           existing members, None is returned. If 'member' does not appear
            in the archive, KeyError is raised.
         """
         self._check("r")
@@ -2404,7 +2437,7 @@ def _extract_member(self, tarinfo, targetpath, set_attrs=True,
         if upperdirs and not os.path.exists(upperdirs):
             # Create directories that are not part of the archive with
             # default permissions.
-            os.makedirs(upperdirs)
+            os.makedirs(upperdirs, exist_ok=True)
 
         if tarinfo.islnk() or tarinfo.issym():
             self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname))
@@ -2557,7 +2590,8 @@ def chown(self, tarinfo, targetpath, numeric_owner):
                     os.lchown(targetpath, u, g)
                 else:
                     os.chown(targetpath, u, g)
-            except OSError as e:
+            except (OSError, OverflowError) as e:
+                # OverflowError can be raised if an ID doesn't fit in 'id_t'
                 raise ExtractError("could not change owner") from e
 
     def chmod(self, tarinfo, targetpath):
@@ -2640,7 +2674,9 @@ def next(self):
             break
 
         if tarinfo is not None:
-            self.members.append(tarinfo)
+            # if streaming the file we do not want to cache the tarinfo
+            if not self.stream:
+                self.members.append(tarinfo)
         else:
             self._loaded = True
 
@@ -2691,11 +2727,12 @@ def _getmember(self, name, tarinfo=None, normalize=False):
 
     def _load(self):
         """Read through the entire archive file and look for readable
-           members.
+           members. This should not run if the file is set to stream.
         """
-        while self.next() is not None:
-            pass
-        self._loaded = True
+        if not self.stream:
+            while self.next() is not None:
+                pass
+            self._loaded = True
 
     def _check(self, mode=None):
         """Check if TarFile is still open, and if the operation's mode
diff --git a/setuptools/_vendor/backports/tarfile/__main__.py b/setuptools/_vendor/backports/tarfile/__main__.py
new file mode 100644
index 0000000000..daf5509086
--- /dev/null
+++ b/setuptools/_vendor/backports/tarfile/__main__.py
@@ -0,0 +1,5 @@
+from . import main
+
+
+if __name__ == '__main__':
+    main()
diff --git a/setuptools/_vendor/__init__.py b/setuptools/_vendor/backports/tarfile/compat/__init__.py
similarity index 100%
rename from setuptools/_vendor/__init__.py
rename to setuptools/_vendor/backports/tarfile/compat/__init__.py
diff --git a/setuptools/_vendor/backports/tarfile/compat/py38.py b/setuptools/_vendor/backports/tarfile/compat/py38.py
new file mode 100644
index 0000000000..20fbbfc1c0
--- /dev/null
+++ b/setuptools/_vendor/backports/tarfile/compat/py38.py
@@ -0,0 +1,24 @@
+import sys
+
+
+if sys.version_info < (3, 9):
+
+    def removesuffix(self, suffix):
+        # suffix='' should not call self[:-0].
+        if suffix and self.endswith(suffix):
+            return self[: -len(suffix)]
+        else:
+            return self[:]
+
+    def removeprefix(self, prefix):
+        if self.startswith(prefix):
+            return self[len(prefix) :]
+        else:
+            return self[:]
+else:
+
+    def removesuffix(self, suffix):
+        return self.removesuffix(suffix)
+
+    def removeprefix(self, prefix):
+        return self.removeprefix(prefix)
diff --git a/setuptools/_vendor/importlib_metadata-6.0.0.dist-info/RECORD b/setuptools/_vendor/importlib_metadata-6.0.0.dist-info/RECORD
deleted file mode 100644
index c5ed31bf55..0000000000
--- a/setuptools/_vendor/importlib_metadata-6.0.0.dist-info/RECORD
+++ /dev/null
@@ -1,26 +0,0 @@
-importlib_metadata-6.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-importlib_metadata-6.0.0.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
-importlib_metadata-6.0.0.dist-info/METADATA,sha256=tZIEx9HdEXD34SWuitkNXaYBqSnyNukx2l4FKQAz9hY,4958
-importlib_metadata-6.0.0.dist-info/RECORD,,
-importlib_metadata-6.0.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_metadata-6.0.0.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
-importlib_metadata-6.0.0.dist-info/top_level.txt,sha256=CO3fD9yylANiXkrMo4qHLV_mqXL2sC5JFKgt1yWAT-A,19
-importlib_metadata/__init__.py,sha256=wiMJxNXXhPtRRHSX2N9gGLnTh0YszmE1rn3uKYRrNcs,26490
-importlib_metadata/__pycache__/__init__.cpython-312.pyc,,
-importlib_metadata/__pycache__/_adapters.cpython-312.pyc,,
-importlib_metadata/__pycache__/_collections.cpython-312.pyc,,
-importlib_metadata/__pycache__/_compat.cpython-312.pyc,,
-importlib_metadata/__pycache__/_functools.cpython-312.pyc,,
-importlib_metadata/__pycache__/_itertools.cpython-312.pyc,,
-importlib_metadata/__pycache__/_meta.cpython-312.pyc,,
-importlib_metadata/__pycache__/_py39compat.cpython-312.pyc,,
-importlib_metadata/__pycache__/_text.cpython-312.pyc,,
-importlib_metadata/_adapters.py,sha256=i8S6Ib1OQjcILA-l4gkzktMZe18TaeUNI49PLRp6OBU,2454
-importlib_metadata/_collections.py,sha256=CJ0OTCHIjWA0ZIVS4voORAsn2R4R2cQBEtPsZEJpASY,743
-importlib_metadata/_compat.py,sha256=9zOKf0eDgkCMnnaEhU5kQVxHd1P8BIYV7Stso7av5h8,1857
-importlib_metadata/_functools.py,sha256=PsY2-4rrKX4RVeRC1oGp1lB1pmC9eKN88_f-bD9uOoA,2895
-importlib_metadata/_itertools.py,sha256=cvr_2v8BRbxcIl5x5ldfqdHjhI8Yi8s8yk50G_nm6jQ,2068
-importlib_metadata/_meta.py,sha256=v5e1ZDG7yZTH3h7TjbS5bM5p8AGzMPVOu8skDMv4h6k,1165
-importlib_metadata/_py39compat.py,sha256=2Tk5twb_VgLCY-1NEAQjdZp_S9OFMC-pUzP2isuaPsQ,1098
-importlib_metadata/_text.py,sha256=HCsFksZpJLeTP3NEk_ngrAeXVRRtTrtyh9eOABoRP4A,2166
-importlib_metadata/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
diff --git a/setuptools/_vendor/importlib_resources-5.10.2.dist-info/INSTALLER b/setuptools/_vendor/importlib_metadata-8.0.0.dist-info/INSTALLER
similarity index 100%
rename from setuptools/_vendor/importlib_resources-5.10.2.dist-info/INSTALLER
rename to setuptools/_vendor/importlib_metadata-8.0.0.dist-info/INSTALLER
diff --git a/setuptools/_vendor/importlib_metadata-6.0.0.dist-info/LICENSE b/setuptools/_vendor/importlib_metadata-8.0.0.dist-info/LICENSE
similarity index 100%
rename from setuptools/_vendor/importlib_metadata-6.0.0.dist-info/LICENSE
rename to setuptools/_vendor/importlib_metadata-8.0.0.dist-info/LICENSE
diff --git a/setuptools/_vendor/importlib_metadata-6.0.0.dist-info/METADATA b/setuptools/_vendor/importlib_metadata-8.0.0.dist-info/METADATA
similarity index 58%
rename from setuptools/_vendor/importlib_metadata-6.0.0.dist-info/METADATA
rename to setuptools/_vendor/importlib_metadata-8.0.0.dist-info/METADATA
index 663c0c8720..85513e8a9f 100644
--- a/setuptools/_vendor/importlib_metadata-6.0.0.dist-info/METADATA
+++ b/setuptools/_vendor/importlib_metadata-8.0.0.dist-info/METADATA
@@ -1,60 +1,59 @@
 Metadata-Version: 2.1
-Name: importlib-metadata
-Version: 6.0.0
+Name: importlib_metadata
+Version: 8.0.0
 Summary: Read metadata from Python packages
-Home-page: https://github.com/python/importlib_metadata
-Author: Jason R. Coombs
-Author-email: jaraco@jaraco.com
+Author-email: "Jason R. Coombs" 
+Project-URL: Source, https://github.com/python/importlib_metadata
 Classifier: Development Status :: 5 - Production/Stable
 Classifier: Intended Audience :: Developers
 Classifier: License :: OSI Approved :: Apache Software License
 Classifier: Programming Language :: Python :: 3
 Classifier: Programming Language :: Python :: 3 :: Only
-Requires-Python: >=3.7
+Requires-Python: >=3.8
+Description-Content-Type: text/x-rst
 License-File: LICENSE
-Requires-Dist: zipp (>=0.5)
-Requires-Dist: typing-extensions (>=3.6.4) ; python_version < "3.8"
-Provides-Extra: docs
-Requires-Dist: sphinx (>=3.5) ; extra == 'docs'
-Requires-Dist: jaraco.packaging (>=9) ; extra == 'docs'
-Requires-Dist: rst.linker (>=1.9) ; extra == 'docs'
-Requires-Dist: furo ; extra == 'docs'
-Requires-Dist: sphinx-lint ; extra == 'docs'
-Requires-Dist: jaraco.tidelift (>=1.4) ; extra == 'docs'
+Requires-Dist: zipp >=0.5
+Requires-Dist: typing-extensions >=3.6.4 ; python_version < "3.8"
+Provides-Extra: doc
+Requires-Dist: sphinx >=3.5 ; extra == 'doc'
+Requires-Dist: jaraco.packaging >=9.3 ; extra == 'doc'
+Requires-Dist: rst.linker >=1.9 ; extra == 'doc'
+Requires-Dist: furo ; extra == 'doc'
+Requires-Dist: sphinx-lint ; extra == 'doc'
+Requires-Dist: jaraco.tidelift >=1.4 ; extra == 'doc'
 Provides-Extra: perf
 Requires-Dist: ipython ; extra == 'perf'
-Provides-Extra: testing
-Requires-Dist: pytest (>=6) ; extra == 'testing'
-Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing'
-Requires-Dist: flake8 (<5) ; extra == 'testing'
-Requires-Dist: pytest-cov ; extra == 'testing'
-Requires-Dist: pytest-enabler (>=1.3) ; extra == 'testing'
-Requires-Dist: packaging ; extra == 'testing'
-Requires-Dist: pyfakefs ; extra == 'testing'
-Requires-Dist: flufl.flake8 ; extra == 'testing'
-Requires-Dist: pytest-perf (>=0.9.2) ; extra == 'testing'
-Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing'
-Requires-Dist: pytest-mypy (>=0.9.1) ; (platform_python_implementation != "PyPy") and extra == 'testing'
-Requires-Dist: pytest-flake8 ; (python_version < "3.12") and extra == 'testing'
-Requires-Dist: importlib-resources (>=1.3) ; (python_version < "3.9") and extra == 'testing'
+Provides-Extra: test
+Requires-Dist: pytest !=8.1.*,>=6 ; extra == 'test'
+Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'test'
+Requires-Dist: pytest-cov ; extra == 'test'
+Requires-Dist: pytest-mypy ; extra == 'test'
+Requires-Dist: pytest-enabler >=2.2 ; extra == 'test'
+Requires-Dist: pytest-ruff >=0.2.1 ; extra == 'test'
+Requires-Dist: packaging ; extra == 'test'
+Requires-Dist: pyfakefs ; extra == 'test'
+Requires-Dist: flufl.flake8 ; extra == 'test'
+Requires-Dist: pytest-perf >=0.9.2 ; extra == 'test'
+Requires-Dist: jaraco.test >=5.4 ; extra == 'test'
+Requires-Dist: importlib-resources >=1.3 ; (python_version < "3.9") and extra == 'test'
 
 .. image:: https://img.shields.io/pypi/v/importlib_metadata.svg
    :target: https://pypi.org/project/importlib_metadata
 
 .. image:: https://img.shields.io/pypi/pyversions/importlib_metadata.svg
 
-.. image:: https://github.com/python/importlib_metadata/workflows/tests/badge.svg
+.. image:: https://github.com/python/importlib_metadata/actions/workflows/main.yml/badge.svg
    :target: https://github.com/python/importlib_metadata/actions?query=workflow%3A%22tests%22
    :alt: tests
 
-.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
-   :target: https://github.com/psf/black
-   :alt: Code style: Black
+.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
+    :target: https://github.com/astral-sh/ruff
+    :alt: Ruff
 
 .. image:: https://readthedocs.org/projects/importlib-metadata/badge/?version=latest
    :target: https://importlib-metadata.readthedocs.io/en/latest/?badge=latest
 
-.. image:: https://img.shields.io/badge/skeleton-2022-informational
+.. image:: https://img.shields.io/badge/skeleton-2024-informational
    :target: https://blog.jaraco.com/skeleton
 
 .. image:: https://tidelift.com/badges/package/pypi/importlib-metadata
@@ -79,7 +78,9 @@ were contributed to different versions in the standard library:
 
    * - importlib_metadata
      - stdlib
-   * - 5.0
+   * - 7.0
+     - 3.13
+   * - 6.5
      - 3.12
    * - 4.13
      - 3.11
@@ -92,7 +93,7 @@ were contributed to different versions in the standard library:
 Usage
 =====
 
-See the `online documentation `_
+See the `online documentation `_
 for usage details.
 
 `Finder authors
@@ -116,7 +117,7 @@ Project details
  * Project home: https://github.com/python/importlib_metadata
  * Report bugs at: https://github.com/python/importlib_metadata/issues
  * Code hosting: https://github.com/python/importlib_metadata
- * Documentation: https://importlib_metadata.readthedocs.io/
+ * Documentation: https://importlib-metadata.readthedocs.io/
 
 For Enterprise
 ==============
@@ -126,10 +127,3 @@ Available as part of the Tidelift Subscription.
 This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use.
 
 `Learn more `_.
-
-Security Contact
-================
-
-To report a security vulnerability, please use the
-`Tidelift security contact `_.
-Tidelift will coordinate the fix and disclosure.
diff --git a/setuptools/_vendor/importlib_metadata-8.0.0.dist-info/RECORD b/setuptools/_vendor/importlib_metadata-8.0.0.dist-info/RECORD
new file mode 100644
index 0000000000..07b7dc51db
--- /dev/null
+++ b/setuptools/_vendor/importlib_metadata-8.0.0.dist-info/RECORD
@@ -0,0 +1,32 @@
+importlib_metadata-8.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+importlib_metadata-8.0.0.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
+importlib_metadata-8.0.0.dist-info/METADATA,sha256=anuQ7_7h4J1bSEzfcjIBakPi2cyVQ7y7jklLHsBeH1k,4648
+importlib_metadata-8.0.0.dist-info/RECORD,,
+importlib_metadata-8.0.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_metadata-8.0.0.dist-info/WHEEL,sha256=mguMlWGMX-VHnMpKOjjQidIo1ssRlCFu4a4mBpz1s2M,91
+importlib_metadata-8.0.0.dist-info/top_level.txt,sha256=CO3fD9yylANiXkrMo4qHLV_mqXL2sC5JFKgt1yWAT-A,19
+importlib_metadata/__init__.py,sha256=tZNB-23h8Bixi9uCrQqj9Yf0aeC--Josdy3IZRIQeB0,33798
+importlib_metadata/__pycache__/__init__.cpython-312.pyc,,
+importlib_metadata/__pycache__/_adapters.cpython-312.pyc,,
+importlib_metadata/__pycache__/_collections.cpython-312.pyc,,
+importlib_metadata/__pycache__/_compat.cpython-312.pyc,,
+importlib_metadata/__pycache__/_functools.cpython-312.pyc,,
+importlib_metadata/__pycache__/_itertools.cpython-312.pyc,,
+importlib_metadata/__pycache__/_meta.cpython-312.pyc,,
+importlib_metadata/__pycache__/_text.cpython-312.pyc,,
+importlib_metadata/__pycache__/diagnose.cpython-312.pyc,,
+importlib_metadata/_adapters.py,sha256=rIhWTwBvYA1bV7i-5FfVX38qEXDTXFeS5cb5xJtP3ks,2317
+importlib_metadata/_collections.py,sha256=CJ0OTCHIjWA0ZIVS4voORAsn2R4R2cQBEtPsZEJpASY,743
+importlib_metadata/_compat.py,sha256=73QKrN9KNoaZzhbX5yPCCZa-FaALwXe8TPlDR72JgBU,1314
+importlib_metadata/_functools.py,sha256=PsY2-4rrKX4RVeRC1oGp1lB1pmC9eKN88_f-bD9uOoA,2895
+importlib_metadata/_itertools.py,sha256=cvr_2v8BRbxcIl5x5ldfqdHjhI8Yi8s8yk50G_nm6jQ,2068
+importlib_metadata/_meta.py,sha256=nxZ7C8GVlcBFAKWyVOn_dn7ot_twBcbm1NmvjIetBHI,1801
+importlib_metadata/_text.py,sha256=HCsFksZpJLeTP3NEk_ngrAeXVRRtTrtyh9eOABoRP4A,2166
+importlib_metadata/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_metadata/compat/__pycache__/__init__.cpython-312.pyc,,
+importlib_metadata/compat/__pycache__/py311.cpython-312.pyc,,
+importlib_metadata/compat/__pycache__/py39.cpython-312.pyc,,
+importlib_metadata/compat/py311.py,sha256=uqm-K-uohyj1042TH4a9Er_I5o7667DvulcD-gC_fSA,608
+importlib_metadata/compat/py39.py,sha256=cPkMv6-0ilK-0Jw_Tkn0xYbOKJZc4WJKQHow0c2T44w,1102
+importlib_metadata/diagnose.py,sha256=nkSRMiowlmkhLYhKhvCg9glmt_11Cox-EmLzEbqYTa8,379
+importlib_metadata/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
diff --git a/setuptools/_vendor/importlib_resources-5.10.2.dist-info/REQUESTED b/setuptools/_vendor/importlib_metadata-8.0.0.dist-info/REQUESTED
similarity index 100%
rename from setuptools/_vendor/importlib_resources-5.10.2.dist-info/REQUESTED
rename to setuptools/_vendor/importlib_metadata-8.0.0.dist-info/REQUESTED
diff --git a/setuptools/_vendor/importlib_resources-5.10.2.dist-info/WHEEL b/setuptools/_vendor/importlib_metadata-8.0.0.dist-info/WHEEL
similarity index 65%
rename from setuptools/_vendor/importlib_resources-5.10.2.dist-info/WHEEL
rename to setuptools/_vendor/importlib_metadata-8.0.0.dist-info/WHEEL
index 57e3d840d5..edf4ec7c70 100644
--- a/setuptools/_vendor/importlib_resources-5.10.2.dist-info/WHEEL
+++ b/setuptools/_vendor/importlib_metadata-8.0.0.dist-info/WHEEL
@@ -1,5 +1,5 @@
 Wheel-Version: 1.0
-Generator: bdist_wheel (0.38.4)
+Generator: setuptools (70.1.1)
 Root-Is-Purelib: true
 Tag: py3-none-any
 
diff --git a/setuptools/_vendor/importlib_metadata-6.0.0.dist-info/top_level.txt b/setuptools/_vendor/importlib_metadata-8.0.0.dist-info/top_level.txt
similarity index 100%
rename from setuptools/_vendor/importlib_metadata-6.0.0.dist-info/top_level.txt
rename to setuptools/_vendor/importlib_metadata-8.0.0.dist-info/top_level.txt
diff --git a/setuptools/_vendor/importlib_metadata/__init__.py b/setuptools/_vendor/importlib_metadata/__init__.py
index 8864214375..ed4813551a 100644
--- a/setuptools/_vendor/importlib_metadata/__init__.py
+++ b/setuptools/_vendor/importlib_metadata/__init__.py
@@ -1,25 +1,28 @@
+from __future__ import annotations
+
 import os
 import re
 import abc
-import csv
 import sys
-from .. import zipp
+import json
+import zipp
 import email
+import types
+import inspect
 import pathlib
 import operator
 import textwrap
-import warnings
 import functools
 import itertools
 import posixpath
 import collections
 
-from . import _adapters, _meta, _py39compat
+from . import _meta
+from .compat import py39, py311
 from ._collections import FreezableDefaultDict, Pair
 from ._compat import (
     NullFinder,
     install,
-    pypy_partial,
 )
 from ._functools import method_cache, pass_none
 from ._itertools import always_iterable, unique_everseen
@@ -29,8 +32,7 @@
 from importlib import import_module
 from importlib.abc import MetaPathFinder
 from itertools import starmap
-from typing import List, Mapping, Optional
-
+from typing import Any, Iterable, List, Mapping, Match, Optional, Set, cast
 
 __all__ = [
     'Distribution',
@@ -51,11 +53,11 @@
 class PackageNotFoundError(ModuleNotFoundError):
     """The package was not found."""
 
-    def __str__(self):
+    def __str__(self) -> str:
         return f"No package metadata was found for {self.name}"
 
     @property
-    def name(self):
+    def name(self) -> str:  # type: ignore[override]
         (name,) = self.args
         return name
 
@@ -121,38 +123,11 @@ def read(text, filter_=None):
             yield Pair(name, value)
 
     @staticmethod
-    def valid(line):
+    def valid(line: str):
         return line and not line.startswith('#')
 
 
-class DeprecatedTuple:
-    """
-    Provide subscript item access for backward compatibility.
-
-    >>> recwarn = getfixture('recwarn')
-    >>> ep = EntryPoint(name='name', value='value', group='group')
-    >>> ep[:]
-    ('name', 'value', 'group')
-    >>> ep[0]
-    'name'
-    >>> len(recwarn)
-    1
-    """
-
-    # Do not remove prior to 2023-05-01 or Python 3.13
-    _warn = functools.partial(
-        warnings.warn,
-        "EntryPoint tuple interface is deprecated. Access members by name.",
-        DeprecationWarning,
-        stacklevel=pypy_partial(2),
-    )
-
-    def __getitem__(self, item):
-        self._warn()
-        return self._key()[item]
-
-
-class EntryPoint(DeprecatedTuple):
+class EntryPoint:
     """An entry point as defined by Python packaging conventions.
 
     See `the packaging docs on entry points
@@ -194,34 +169,37 @@ class EntryPoint(DeprecatedTuple):
     value: str
     group: str
 
-    dist: Optional['Distribution'] = None
+    dist: Optional[Distribution] = None
 
-    def __init__(self, name, value, group):
+    def __init__(self, name: str, value: str, group: str) -> None:
         vars(self).update(name=name, value=value, group=group)
 
-    def load(self):
+    def load(self) -> Any:
         """Load the entry point from its definition. If only a module
         is indicated by the value, return that module. Otherwise,
         return the named object.
         """
-        match = self.pattern.match(self.value)
+        match = cast(Match, self.pattern.match(self.value))
         module = import_module(match.group('module'))
         attrs = filter(None, (match.group('attr') or '').split('.'))
         return functools.reduce(getattr, attrs, module)
 
     @property
-    def module(self):
+    def module(self) -> str:
         match = self.pattern.match(self.value)
+        assert match is not None
         return match.group('module')
 
     @property
-    def attr(self):
+    def attr(self) -> str:
         match = self.pattern.match(self.value)
+        assert match is not None
         return match.group('attr')
 
     @property
-    def extras(self):
+    def extras(self) -> List[str]:
         match = self.pattern.match(self.value)
+        assert match is not None
         return re.findall(r'\w+', match.group('extras') or '')
 
     def _for(self, dist):
@@ -269,7 +247,7 @@ def __repr__(self):
             f'group={self.group!r})'
         )
 
-    def __hash__(self):
+    def __hash__(self) -> int:
         return hash(self._key())
 
 
@@ -280,7 +258,7 @@ class EntryPoints(tuple):
 
     __slots__ = ()
 
-    def __getitem__(self, name):  # -> EntryPoint:
+    def __getitem__(self, name: str) -> EntryPoint:  # type: ignore[override]
         """
         Get the EntryPoint in self matching name.
         """
@@ -289,22 +267,29 @@ def __getitem__(self, name):  # -> EntryPoint:
         except StopIteration:
             raise KeyError(name)
 
-    def select(self, **params):
+    def __repr__(self):
+        """
+        Repr with classname and tuple constructor to
+        signal that we deviate from regular tuple behavior.
+        """
+        return '%s(%r)' % (self.__class__.__name__, tuple(self))
+
+    def select(self, **params) -> EntryPoints:
         """
         Select entry points from self that match the
         given parameters (typically group and/or name).
         """
-        return EntryPoints(ep for ep in self if _py39compat.ep_matches(ep, **params))
+        return EntryPoints(ep for ep in self if py39.ep_matches(ep, **params))
 
     @property
-    def names(self):
+    def names(self) -> Set[str]:
         """
         Return the set of all names of all entry points.
         """
         return {ep.name for ep in self}
 
     @property
-    def groups(self):
+    def groups(self) -> Set[str]:
         """
         Return the set of all groups of all entry points.
         """
@@ -325,47 +310,72 @@ def _from_text(text):
 class PackagePath(pathlib.PurePosixPath):
     """A reference to a path in a package"""
 
-    def read_text(self, encoding='utf-8'):
-        with self.locate().open(encoding=encoding) as stream:
-            return stream.read()
+    hash: Optional[FileHash]
+    size: int
+    dist: Distribution
+
+    def read_text(self, encoding: str = 'utf-8') -> str:  # type: ignore[override]
+        return self.locate().read_text(encoding=encoding)
 
-    def read_binary(self):
-        with self.locate().open('rb') as stream:
-            return stream.read()
+    def read_binary(self) -> bytes:
+        return self.locate().read_bytes()
 
-    def locate(self):
+    def locate(self) -> SimplePath:
         """Return a path-like object for this path"""
         return self.dist.locate_file(self)
 
 
 class FileHash:
-    def __init__(self, spec):
+    def __init__(self, spec: str) -> None:
         self.mode, _, self.value = spec.partition('=')
 
-    def __repr__(self):
+    def __repr__(self) -> str:
         return f''
 
 
 class Distribution(metaclass=abc.ABCMeta):
-    """A Python distribution package."""
+    """
+    An abstract Python distribution package.
+
+    Custom providers may derive from this class and define
+    the abstract methods to provide a concrete implementation
+    for their environment. Some providers may opt to override
+    the default implementation of some properties to bypass
+    the file-reading mechanism.
+    """
 
     @abc.abstractmethod
-    def read_text(self, filename):
+    def read_text(self, filename) -> Optional[str]:
         """Attempt to load metadata file given by the name.
 
+        Python distribution metadata is organized by blobs of text
+        typically represented as "files" in the metadata directory
+        (e.g. package-1.0.dist-info). These files include things
+        like:
+
+        - METADATA: The distribution metadata including fields
+          like Name and Version and Description.
+        - entry_points.txt: A series of entry points as defined in
+          `the entry points spec `_.
+        - RECORD: A record of files according to
+          `this recording spec `_.
+
+        A package may provide any set of files, including those
+        not listed here or none at all.
+
         :param filename: The name of the file in the distribution info.
         :return: The text if found, otherwise None.
         """
 
     @abc.abstractmethod
-    def locate_file(self, path):
+    def locate_file(self, path: str | os.PathLike[str]) -> SimplePath:
         """
-        Given a path to a file in this distribution, return a path
+        Given a path to a file in this distribution, return a SimplePath
         to it.
         """
 
     @classmethod
-    def from_name(cls, name: str):
+    def from_name(cls, name: str) -> Distribution:
         """Return the Distribution for the given package name.
 
         :param name: The name of the distribution package to search for.
@@ -378,21 +388,23 @@ def from_name(cls, name: str):
         if not name:
             raise ValueError("A distribution name is required.")
         try:
-            return next(cls.discover(name=name))
+            return next(iter(cls.discover(name=name)))
         except StopIteration:
             raise PackageNotFoundError(name)
 
     @classmethod
-    def discover(cls, **kwargs):
+    def discover(
+        cls, *, context: Optional[DistributionFinder.Context] = None, **kwargs
+    ) -> Iterable[Distribution]:
         """Return an iterable of Distribution objects for all packages.
 
         Pass a ``context`` or pass keyword arguments for constructing
         a context.
 
         :context: A ``DistributionFinder.Context`` object.
-        :return: Iterable of Distribution objects for all packages.
+        :return: Iterable of Distribution objects for packages matching
+          the context.
         """
-        context = kwargs.pop('context', None)
         if context and kwargs:
             raise ValueError("cannot accept context and kwargs")
         context = context or DistributionFinder.Context(**kwargs)
@@ -401,8 +413,8 @@ def discover(cls, **kwargs):
         )
 
     @staticmethod
-    def at(path):
-        """Return a Distribution for the indicated metadata path
+    def at(path: str | os.PathLike[str]) -> Distribution:
+        """Return a Distribution for the indicated metadata path.
 
         :param path: a string or path-like object
         :return: a concrete Distribution instance for the path
@@ -411,7 +423,7 @@ def at(path):
 
     @staticmethod
     def _discover_resolvers():
-        """Search the meta_path for resolvers."""
+        """Search the meta_path for resolvers (MetadataPathFinders)."""
         declared = (
             getattr(finder, 'find_distributions', None) for finder in sys.meta_path
         )
@@ -422,9 +434,16 @@ def metadata(self) -> _meta.PackageMetadata:
         """Return the parsed metadata for this Distribution.
 
         The returned object will have keys that name the various bits of
-        metadata.  See PEP 566 for details.
+        metadata per the
+        `Core metadata specifications `_.
+
+        Custom providers may provide the METADATA file or override this
+        property.
         """
-        text = (
+        # deferred for performance (python/cpython#109829)
+        from . import _adapters
+
+        opt_text = (
             self.read_text('METADATA')
             or self.read_text('PKG-INFO')
             # This last clause is here to support old egg-info files.  Its
@@ -432,10 +451,11 @@ def metadata(self) -> _meta.PackageMetadata:
             # (which points to the egg-info file) attribute unchanged.
             or self.read_text('')
         )
+        text = cast(str, opt_text)
         return _adapters.Message(email.message_from_string(text))
 
     @property
-    def name(self):
+    def name(self) -> str:
         """Return the 'Name' metadata for the distribution package."""
         return self.metadata['Name']
 
@@ -445,24 +465,34 @@ def _normalized_name(self):
         return Prepared.normalize(self.name)
 
     @property
-    def version(self):
+    def version(self) -> str:
         """Return the 'Version' metadata for the distribution package."""
         return self.metadata['Version']
 
     @property
-    def entry_points(self):
+    def entry_points(self) -> EntryPoints:
+        """
+        Return EntryPoints for this distribution.
+
+        Custom providers may provide the ``entry_points.txt`` file
+        or override this property.
+        """
         return EntryPoints._from_text_for(self.read_text('entry_points.txt'), self)
 
     @property
-    def files(self):
+    def files(self) -> Optional[List[PackagePath]]:
         """Files in this distribution.
 
         :return: List of PackagePath for this distribution or None
 
         Result is `None` if the metadata file that enumerates files
-        (i.e. RECORD for dist-info or SOURCES.txt for egg-info) is
-        missing.
+        (i.e. RECORD for dist-info, or installed-files.txt or
+        SOURCES.txt for egg-info) is missing.
         Result may be empty if the metadata exists but is empty.
+
+        Custom providers are recommended to provide a "RECORD" file (in
+        ``read_text``) or override this property to allow for callers to be
+        able to resolve filenames provided by the package.
         """
 
         def make_file(name, hash=None, size_str=None):
@@ -474,27 +504,75 @@ def make_file(name, hash=None, size_str=None):
 
         @pass_none
         def make_files(lines):
-            return list(starmap(make_file, csv.reader(lines)))
+            # Delay csv import, since Distribution.files is not as widely used
+            # as other parts of importlib.metadata
+            import csv
+
+            return starmap(make_file, csv.reader(lines))
 
-        return make_files(self._read_files_distinfo() or self._read_files_egginfo())
+        @pass_none
+        def skip_missing_files(package_paths):
+            return list(filter(lambda path: path.locate().exists(), package_paths))
+
+        return skip_missing_files(
+            make_files(
+                self._read_files_distinfo()
+                or self._read_files_egginfo_installed()
+                or self._read_files_egginfo_sources()
+            )
+        )
 
     def _read_files_distinfo(self):
         """
-        Read the lines of RECORD
+        Read the lines of RECORD.
         """
         text = self.read_text('RECORD')
         return text and text.splitlines()
 
-    def _read_files_egginfo(self):
+    def _read_files_egginfo_installed(self):
+        """
+        Read installed-files.txt and return lines in a similar
+        CSV-parsable format as RECORD: each file must be placed
+        relative to the site-packages directory and must also be
+        quoted (since file names can contain literal commas).
+
+        This file is written when the package is installed by pip,
+        but it might not be written for other installation methods.
+        Assume the file is accurate if it exists.
+        """
+        text = self.read_text('installed-files.txt')
+        # Prepend the .egg-info/ subdir to the lines in this file.
+        # But this subdir is only available from PathDistribution's
+        # self._path.
+        subdir = getattr(self, '_path', None)
+        if not text or not subdir:
+            return
+
+        paths = (
+            py311.relative_fix((subdir / name).resolve())
+            .relative_to(self.locate_file('').resolve(), walk_up=True)
+            .as_posix()
+            for name in text.splitlines()
+        )
+        return map('"{}"'.format, paths)
+
+    def _read_files_egginfo_sources(self):
         """
-        SOURCES.txt might contain literal commas, so wrap each line
-        in quotes.
+        Read SOURCES.txt and return lines in a similar CSV-parsable
+        format as RECORD: each file name must be quoted (since it
+        might contain literal commas).
+
+        Note that SOURCES.txt is not a reliable source for what
+        files are installed by a package. This file is generated
+        for a source archive, and the files that are present
+        there (e.g. setup.py) may not correctly reflect the files
+        that are present after the package has been installed.
         """
         text = self.read_text('SOURCES.txt')
         return text and map('"{}"'.format, text.splitlines())
 
     @property
-    def requires(self):
+    def requires(self) -> Optional[List[str]]:
         """Generated requirements specified for this Distribution"""
         reqs = self._read_dist_info_reqs() or self._read_egg_info_reqs()
         return reqs and list(reqs)
@@ -545,10 +623,23 @@ def url_req_space(req):
             space = url_req_space(section.value)
             yield section.value + space + quoted_marker(section.name)
 
+    @property
+    def origin(self):
+        return self._load_json('direct_url.json')
+
+    def _load_json(self, filename):
+        return pass_none(json.loads)(
+            self.read_text(filename),
+            object_hook=lambda data: types.SimpleNamespace(**data),
+        )
+
 
 class DistributionFinder(MetaPathFinder):
     """
     A MetaPathFinder capable of discovering installed distributions.
+
+    Custom providers should implement this interface in order to
+    supply metadata.
     """
 
     class Context:
@@ -561,6 +652,17 @@ class Context:
         Each DistributionFinder may expect any parameters
         and should attempt to honor the canonical
         parameters defined below when appropriate.
+
+        This mechanism gives a custom provider a means to
+        solicit additional details from the caller beyond
+        "name" and "path" when searching distributions.
+        For example, imagine a provider that exposes suites
+        of packages in either a "public" or "private" ``realm``.
+        A caller may wish to query only for distributions in
+        a particular realm and could call
+        ``distributions(realm="private")`` to signal to the
+        custom provider to only include distributions from that
+        realm.
         """
 
         name = None
@@ -573,7 +675,7 @@ def __init__(self, **kwargs):
             vars(self).update(kwargs)
 
         @property
-        def path(self):
+        def path(self) -> List[str]:
             """
             The sequence of directory path that a distribution finder
             should search.
@@ -584,7 +686,7 @@ def path(self):
             return vars(self).get('path', sys.path)
 
     @abc.abstractmethod
-    def find_distributions(self, context=Context()):
+    def find_distributions(self, context=Context()) -> Iterable[Distribution]:
         """
         Find distributions.
 
@@ -596,11 +698,18 @@ def find_distributions(self, context=Context()):
 
 class FastPath:
     """
-    Micro-optimized class for searching a path for
-    children.
+    Micro-optimized class for searching a root for children.
+
+    Root is a path on the file system that may contain metadata
+    directories either as natural directories or within a zip file.
 
     >>> FastPath('').children()
     ['...']
+
+    FastPath objects are cached and recycled for any given root.
+
+    >>> FastPath('foobar') is FastPath('foobar')
+    True
     """
 
     @functools.lru_cache()  # type: ignore
@@ -642,7 +751,19 @@ def lookup(self, mtime):
 
 
 class Lookup:
+    """
+    A micro-optimized class for searching a (fast) path for metadata.
+    """
+
     def __init__(self, path: FastPath):
+        """
+        Calculate all of the children representing metadata.
+
+        From the children in the path, calculate early all of the
+        children that appear to represent metadata (infos) or legacy
+        metadata (eggs).
+        """
+
         base = os.path.basename(path.root).lower()
         base_is_egg = base.endswith(".egg")
         self.infos = FreezableDefaultDict(list)
@@ -663,7 +784,10 @@ def __init__(self, path: FastPath):
         self.infos.freeze()
         self.eggs.freeze()
 
-    def search(self, prepared):
+    def search(self, prepared: Prepared):
+        """
+        Yield all infos and eggs matching the Prepared query.
+        """
         infos = (
             self.infos[prepared.normalized]
             if prepared
@@ -679,13 +803,28 @@ def search(self, prepared):
 
 class Prepared:
     """
-    A prepared search for metadata on a possibly-named package.
+    A prepared search query for metadata on a possibly-named package.
+
+    Pre-calculates the normalization to prevent repeated operations.
+
+    >>> none = Prepared(None)
+    >>> none.normalized
+    >>> none.legacy_normalized
+    >>> bool(none)
+    False
+    >>> sample = Prepared('Sample__Pkg-name.foo')
+    >>> sample.normalized
+    'sample_pkg_name_foo'
+    >>> sample.legacy_normalized
+    'sample__pkg_name.foo'
+    >>> bool(sample)
+    True
     """
 
     normalized = None
     legacy_normalized = None
 
-    def __init__(self, name):
+    def __init__(self, name: Optional[str]):
         self.name = name
         if name is None:
             return
@@ -719,7 +858,10 @@ class MetadataPathFinder(NullFinder, DistributionFinder):
     of Python that do not have a PathFinder find_distributions().
     """
 
-    def find_distributions(self, context=DistributionFinder.Context()):
+    @classmethod
+    def find_distributions(
+        cls, context=DistributionFinder.Context()
+    ) -> Iterable[PathDistribution]:
         """
         Find distributions.
 
@@ -728,7 +870,7 @@ def find_distributions(self, context=DistributionFinder.Context()):
         (or all names if ``None`` indicated) along the paths in the list
         of directories ``context.path``.
         """
-        found = self._search_paths(context.name, context.path)
+        found = cls._search_paths(context.name, context.path)
         return map(PathDistribution, found)
 
     @classmethod
@@ -739,19 +881,20 @@ def _search_paths(cls, name, paths):
             path.search(prepared) for path in map(FastPath, paths)
         )
 
-    def invalidate_caches(cls):
+    @classmethod
+    def invalidate_caches(cls) -> None:
         FastPath.__new__.cache_clear()
 
 
 class PathDistribution(Distribution):
-    def __init__(self, path: SimplePath):
+    def __init__(self, path: SimplePath) -> None:
         """Construct a distribution.
 
         :param path: SimplePath indicating the metadata directory.
         """
         self._path = path
 
-    def read_text(self, filename):
+    def read_text(self, filename: str | os.PathLike[str]) -> Optional[str]:
         with suppress(
             FileNotFoundError,
             IsADirectoryError,
@@ -761,9 +904,11 @@ def read_text(self, filename):
         ):
             return self._path.joinpath(filename).read_text(encoding='utf-8')
 
+        return None
+
     read_text.__doc__ = Distribution.read_text.__doc__
 
-    def locate_file(self, path):
+    def locate_file(self, path: str | os.PathLike[str]) -> SimplePath:
         return self._path.parent / path
 
     @property
@@ -796,7 +941,7 @@ def _name_from_stem(stem):
         return name
 
 
-def distribution(distribution_name):
+def distribution(distribution_name: str) -> Distribution:
     """Get the ``Distribution`` instance for the named package.
 
     :param distribution_name: The name of the distribution package as a string.
@@ -805,7 +950,7 @@ def distribution(distribution_name):
     return Distribution.from_name(distribution_name)
 
 
-def distributions(**kwargs):
+def distributions(**kwargs) -> Iterable[Distribution]:
     """Get all ``Distribution`` instances in the current environment.
 
     :return: An iterable of ``Distribution`` instances.
@@ -813,7 +958,7 @@ def distributions(**kwargs):
     return Distribution.discover(**kwargs)
 
 
-def metadata(distribution_name) -> _meta.PackageMetadata:
+def metadata(distribution_name: str) -> _meta.PackageMetadata:
     """Get the metadata for the named package.
 
     :param distribution_name: The name of the distribution package to query.
@@ -822,7 +967,7 @@ def metadata(distribution_name) -> _meta.PackageMetadata:
     return Distribution.from_name(distribution_name).metadata
 
 
-def version(distribution_name):
+def version(distribution_name: str) -> str:
     """Get the version string for the named package.
 
     :param distribution_name: The name of the distribution package to query.
@@ -834,7 +979,7 @@ def version(distribution_name):
 
 _unique = functools.partial(
     unique_everseen,
-    key=_py39compat.normalized_name,
+    key=py39.normalized_name,
 )
 """
 Wrapper for ``distributions`` to return unique distributions by name.
@@ -856,7 +1001,7 @@ def entry_points(**params) -> EntryPoints:
     return EntryPoints(eps).select(**params)
 
 
-def files(distribution_name):
+def files(distribution_name: str) -> Optional[List[PackagePath]]:
     """Return a list of files for the named package.
 
     :param distribution_name: The name of the distribution package to query.
@@ -865,11 +1010,11 @@ def files(distribution_name):
     return distribution(distribution_name).files
 
 
-def requires(distribution_name):
+def requires(distribution_name: str) -> Optional[List[str]]:
     """
     Return a list of requirements for the named package.
 
-    :return: An iterator of requirements, suitable for
+    :return: An iterable of requirements, suitable for
         packaging.requirement.Requirement.
     """
     return distribution(distribution_name).requires
@@ -896,9 +1041,43 @@ def _top_level_declared(dist):
     return (dist.read_text('top_level.txt') or '').split()
 
 
+def _topmost(name: PackagePath) -> Optional[str]:
+    """
+    Return the top-most parent as long as there is a parent.
+    """
+    top, *rest = name.parts
+    return top if rest else None
+
+
+def _get_toplevel_name(name: PackagePath) -> str:
+    """
+    Infer a possibly importable module name from a name presumed on
+    sys.path.
+
+    >>> _get_toplevel_name(PackagePath('foo.py'))
+    'foo'
+    >>> _get_toplevel_name(PackagePath('foo'))
+    'foo'
+    >>> _get_toplevel_name(PackagePath('foo.pyc'))
+    'foo'
+    >>> _get_toplevel_name(PackagePath('foo/__init__.py'))
+    'foo'
+    >>> _get_toplevel_name(PackagePath('foo.pth'))
+    'foo.pth'
+    >>> _get_toplevel_name(PackagePath('foo.dist-info'))
+    'foo.dist-info'
+    """
+    return _topmost(name) or (
+        # python/typeshed#10328
+        inspect.getmodulename(name)  # type: ignore
+        or str(name)
+    )
+
+
 def _top_level_inferred(dist):
-    return {
-        f.parts[0] if len(f.parts) > 1 else f.with_suffix('').name
-        for f in always_iterable(dist.files)
-        if f.suffix == ".py"
-    }
+    opt_names = set(map(_get_toplevel_name, always_iterable(dist.files)))
+
+    def importable_name(name):
+        return '.' not in name
+
+    return filter(importable_name, opt_names)
diff --git a/setuptools/_vendor/importlib_metadata/_adapters.py b/setuptools/_vendor/importlib_metadata/_adapters.py
index e33cba5e44..6223263ed5 100644
--- a/setuptools/_vendor/importlib_metadata/_adapters.py
+++ b/setuptools/_vendor/importlib_metadata/_adapters.py
@@ -1,20 +1,8 @@
-import functools
-import warnings
 import re
 import textwrap
 import email.message
 
 from ._text import FoldedCase
-from ._compat import pypy_partial
-
-
-# Do not remove prior to 2024-01-01 or Python 3.14
-_warn = functools.partial(
-    warnings.warn,
-    "Implicit None on return values is deprecated and will raise KeyErrors.",
-    DeprecationWarning,
-    stacklevel=pypy_partial(2),
-)
 
 
 class Message(email.message.Message):
@@ -53,12 +41,17 @@ def __iter__(self):
 
     def __getitem__(self, item):
         """
-        Warn users that a ``KeyError`` can be expected when a
-        mising key is supplied. Ref python/importlib_metadata#371.
+        Override parent behavior to typical dict behavior.
+
+        ``email.message.Message`` will emit None values for missing
+        keys. Typical mappings, including this ``Message``, will raise
+        a key error for missing keys.
+
+        Ref python/importlib_metadata#371.
         """
         res = super().__getitem__(item)
         if res is None:
-            _warn()
+            raise KeyError(item)
         return res
 
     def _repair_headers(self):
diff --git a/setuptools/_vendor/importlib_metadata/_compat.py b/setuptools/_vendor/importlib_metadata/_compat.py
index 84f9eea4f3..df312b1cbb 100644
--- a/setuptools/_vendor/importlib_metadata/_compat.py
+++ b/setuptools/_vendor/importlib_metadata/_compat.py
@@ -2,14 +2,7 @@
 import platform
 
 
-__all__ = ['install', 'NullFinder', 'Protocol']
-
-
-try:
-    from typing import Protocol
-except ImportError:  # pragma: no cover
-    # Python 3.7 compatibility
-    from ..typing_extensions import Protocol  # type: ignore
+__all__ = ['install', 'NullFinder']
 
 
 def install(cls):
@@ -45,7 +38,7 @@ def matches(finder):
 
 class NullFinder:
     """
-    A "Finder" (aka "MetaClassFinder") that never finds any modules,
+    A "Finder" (aka "MetaPathFinder") that never finds any modules,
     but may find distributions.
     """
 
@@ -53,14 +46,6 @@ class NullFinder:
     def find_spec(*args, **kwargs):
         return None
 
-    # In Python 2, the import system requires finders
-    # to have a find_module() method, but this usage
-    # is deprecated in Python 3 in favor of find_spec().
-    # For the purposes of this finder (i.e. being present
-    # on sys.meta_path but having no other import
-    # system functionality), the two methods are identical.
-    find_module = find_spec
-
 
 def pypy_partial(val):
     """
diff --git a/setuptools/_vendor/importlib_metadata/_meta.py b/setuptools/_vendor/importlib_metadata/_meta.py
index 259b15ba19..1927d0f624 100644
--- a/setuptools/_vendor/importlib_metadata/_meta.py
+++ b/setuptools/_vendor/importlib_metadata/_meta.py
@@ -1,24 +1,38 @@
-from ._compat import Protocol
-from typing import Any, Dict, Iterator, List, TypeVar, Union
+from __future__ import annotations
+
+import os
+from typing import Protocol
+from typing import Any, Dict, Iterator, List, Optional, TypeVar, Union, overload
 
 
 _T = TypeVar("_T")
 
 
 class PackageMetadata(Protocol):
-    def __len__(self) -> int:
-        ...  # pragma: no cover
+    def __len__(self) -> int: ...  # pragma: no cover
+
+    def __contains__(self, item: str) -> bool: ...  # pragma: no cover
+
+    def __getitem__(self, key: str) -> str: ...  # pragma: no cover
 
-    def __contains__(self, item: str) -> bool:
-        ...  # pragma: no cover
+    def __iter__(self) -> Iterator[str]: ...  # pragma: no cover
 
-    def __getitem__(self, key: str) -> str:
-        ...  # pragma: no cover
+    @overload
+    def get(
+        self, name: str, failobj: None = None
+    ) -> Optional[str]: ...  # pragma: no cover
 
-    def __iter__(self) -> Iterator[str]:
-        ...  # pragma: no cover
+    @overload
+    def get(self, name: str, failobj: _T) -> Union[str, _T]: ...  # pragma: no cover
 
-    def get_all(self, name: str, failobj: _T = ...) -> Union[List[Any], _T]:
+    # overload per python/importlib_metadata#435
+    @overload
+    def get_all(
+        self, name: str, failobj: None = None
+    ) -> Optional[List[Any]]: ...  # pragma: no cover
+
+    @overload
+    def get_all(self, name: str, failobj: _T) -> Union[List[Any], _T]:
         """
         Return all values associated with a possibly multi-valued key.
         """
@@ -30,20 +44,24 @@ def json(self) -> Dict[str, Union[str, List[str]]]:
         """
 
 
-class SimplePath(Protocol[_T]):
+class SimplePath(Protocol):
     """
-    A minimal subset of pathlib.Path required by PathDistribution.
+    A minimal subset of pathlib.Path required by Distribution.
     """
 
-    def joinpath(self) -> _T:
-        ...  # pragma: no cover
+    def joinpath(
+        self, other: Union[str, os.PathLike[str]]
+    ) -> SimplePath: ...  # pragma: no cover
 
-    def __truediv__(self, other: Union[str, _T]) -> _T:
-        ...  # pragma: no cover
+    def __truediv__(
+        self, other: Union[str, os.PathLike[str]]
+    ) -> SimplePath: ...  # pragma: no cover
 
     @property
-    def parent(self) -> _T:
-        ...  # pragma: no cover
+    def parent(self) -> SimplePath: ...  # pragma: no cover
+
+    def read_text(self, encoding=None) -> str: ...  # pragma: no cover
+
+    def read_bytes(self) -> bytes: ...  # pragma: no cover
 
-    def read_text(self) -> str:
-        ...  # pragma: no cover
+    def exists(self) -> bool: ...  # pragma: no cover
diff --git a/setuptools/_vendor/importlib_resources/tests/zipdata01/__init__.py b/setuptools/_vendor/importlib_metadata/compat/__init__.py
similarity index 100%
rename from setuptools/_vendor/importlib_resources/tests/zipdata01/__init__.py
rename to setuptools/_vendor/importlib_metadata/compat/__init__.py
diff --git a/setuptools/_vendor/importlib_metadata/compat/py311.py b/setuptools/_vendor/importlib_metadata/compat/py311.py
new file mode 100644
index 0000000000..3a5327436f
--- /dev/null
+++ b/setuptools/_vendor/importlib_metadata/compat/py311.py
@@ -0,0 +1,22 @@
+import os
+import pathlib
+import sys
+import types
+
+
+def wrap(path):  # pragma: no cover
+    """
+    Workaround for https://github.com/python/cpython/issues/84538
+    to add backward compatibility for walk_up=True.
+    An example affected package is dask-labextension, which uses
+    jupyter-packaging to install JupyterLab javascript files outside
+    of site-packages.
+    """
+
+    def relative_to(root, *, walk_up=False):
+        return pathlib.Path(os.path.relpath(path, root))
+
+    return types.SimpleNamespace(relative_to=relative_to)
+
+
+relative_fix = wrap if sys.version_info < (3, 12) else lambda x: x
diff --git a/setuptools/_vendor/importlib_metadata/_py39compat.py b/setuptools/_vendor/importlib_metadata/compat/py39.py
similarity index 82%
rename from setuptools/_vendor/importlib_metadata/_py39compat.py
rename to setuptools/_vendor/importlib_metadata/compat/py39.py
index cde4558fbb..1f15bd97e6 100644
--- a/setuptools/_vendor/importlib_metadata/_py39compat.py
+++ b/setuptools/_vendor/importlib_metadata/compat/py39.py
@@ -1,11 +1,12 @@
 """
 Compatibility layer with Python 3.8/3.9
 """
+
 from typing import TYPE_CHECKING, Any, Optional
 
 if TYPE_CHECKING:  # pragma: no cover
     # Prevent circular imports on runtime.
-    from . import Distribution, EntryPoint
+    from .. import Distribution, EntryPoint
 else:
     Distribution = EntryPoint = Any
 
@@ -17,7 +18,7 @@ def normalized_name(dist: Distribution) -> Optional[str]:
     try:
         return dist._normalized_name
     except AttributeError:
-        from . import Prepared  # -> delay to prevent circular imports.
+        from .. import Prepared  # -> delay to prevent circular imports.
 
         return Prepared.normalize(getattr(dist, "name", None) or dist.metadata['Name'])
 
@@ -29,7 +30,7 @@ def ep_matches(ep: EntryPoint, **params) -> bool:
     try:
         return ep.matches(**params)
     except AttributeError:
-        from . import EntryPoint  # -> delay to prevent circular imports.
+        from .. import EntryPoint  # -> delay to prevent circular imports.
 
         # Reconstruct the EntryPoint object to make sure it is compatible.
         return EntryPoint(ep.name, ep.value, ep.group).matches(**params)
diff --git a/setuptools/_vendor/importlib_metadata/diagnose.py b/setuptools/_vendor/importlib_metadata/diagnose.py
new file mode 100644
index 0000000000..e405471ac4
--- /dev/null
+++ b/setuptools/_vendor/importlib_metadata/diagnose.py
@@ -0,0 +1,21 @@
+import sys
+
+from . import Distribution
+
+
+def inspect(path):
+    print("Inspecting", path)
+    dists = list(Distribution.discover(path=[path]))
+    if not dists:
+        return
+    print("Found", len(dists), "packages:", end=' ')
+    print(', '.join(dist.name for dist in dists))
+
+
+def run():
+    for path in sys.path:
+        inspect(path)
+
+
+if __name__ == '__main__':
+    run()
diff --git a/setuptools/_vendor/importlib_resources-5.10.2.dist-info/RECORD b/setuptools/_vendor/importlib_resources-5.10.2.dist-info/RECORD
deleted file mode 100644
index ba764991ee..0000000000
--- a/setuptools/_vendor/importlib_resources-5.10.2.dist-info/RECORD
+++ /dev/null
@@ -1,77 +0,0 @@
-importlib_resources-5.10.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-importlib_resources-5.10.2.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
-importlib_resources-5.10.2.dist-info/METADATA,sha256=Xo5ntATvDYUxdmW8tr8kxtfdiOC9889mOk-LE1LtZfI,4111
-importlib_resources-5.10.2.dist-info/RECORD,,
-importlib_resources-5.10.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources-5.10.2.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
-importlib_resources-5.10.2.dist-info/top_level.txt,sha256=fHIjHU1GZwAjvcydpmUnUrTnbvdiWjG4OEVZK8by0TQ,20
-importlib_resources/__init__.py,sha256=evPm12kLgYqTm-pbzm60bOuumumT8IpBNWFp0uMyrzE,506
-importlib_resources/__pycache__/__init__.cpython-312.pyc,,
-importlib_resources/__pycache__/_adapters.cpython-312.pyc,,
-importlib_resources/__pycache__/_common.cpython-312.pyc,,
-importlib_resources/__pycache__/_compat.cpython-312.pyc,,
-importlib_resources/__pycache__/_itertools.cpython-312.pyc,,
-importlib_resources/__pycache__/_legacy.cpython-312.pyc,,
-importlib_resources/__pycache__/abc.cpython-312.pyc,,
-importlib_resources/__pycache__/readers.cpython-312.pyc,,
-importlib_resources/__pycache__/simple.cpython-312.pyc,,
-importlib_resources/_adapters.py,sha256=o51tP2hpVtohP33gSYyAkGNpLfYDBqxxYsadyiRZi1E,4504
-importlib_resources/_common.py,sha256=jSC4xfLdcMNbtbWHtpzbFkNa0W7kvf__nsYn14C_AEU,5457
-importlib_resources/_compat.py,sha256=dSadF6WPt8MwOqSm_NIOQPhw4x0iaMYTWxi-XS93p7M,2923
-importlib_resources/_itertools.py,sha256=WCdJ1Gs_kNFwKENyIG7TO0Y434IWCu0zjVVSsSbZwU8,884
-importlib_resources/_legacy.py,sha256=0TKdZixxLWA-xwtAZw4HcpqJmj4Xprx1Zkcty0gTRZY,3481
-importlib_resources/abc.py,sha256=Icr2IJ2QtH7vvAB9vC5WRJ9KBoaDyJa7KUs8McuROzo,5140
-importlib_resources/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/readers.py,sha256=PZsi5qacr2Qn3KHw4qw3Gm1MzrBblPHoTdjqjH7EKWw,3581
-importlib_resources/simple.py,sha256=0__2TQBTQoqkajYmNPt1HxERcReAT6boVKJA328pr04,2576
-importlib_resources/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/__pycache__/__init__.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/_compat.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/_path.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/test_compatibilty_files.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/test_contents.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/test_files.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/test_open.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/test_path.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/test_read.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/test_reader.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/test_resource.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/update-zips.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/util.cpython-312.pyc,,
-importlib_resources/tests/_compat.py,sha256=YTSB0U1R9oADnh6GrQcOCgojxcF_N6H1LklymEWf9SQ,708
-importlib_resources/tests/_path.py,sha256=yZyWsQzJZQ1Z8ARAxWkjAdaVVsjlzyqxO0qjBUofJ8M,1039
-importlib_resources/tests/data01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/data01/__pycache__/__init__.cpython-312.pyc,,
-importlib_resources/tests/data01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4
-importlib_resources/tests/data01/subdirectory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/data01/subdirectory/__pycache__/__init__.cpython-312.pyc,,
-importlib_resources/tests/data01/subdirectory/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4
-importlib_resources/tests/data01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44
-importlib_resources/tests/data01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20
-importlib_resources/tests/data02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/data02/__pycache__/__init__.cpython-312.pyc,,
-importlib_resources/tests/data02/one/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/data02/one/__pycache__/__init__.cpython-312.pyc,,
-importlib_resources/tests/data02/one/resource1.txt,sha256=10flKac7c-XXFzJ3t-AB5MJjlBy__dSZvPE_dOm2q6U,13
-importlib_resources/tests/data02/two/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/data02/two/__pycache__/__init__.cpython-312.pyc,,
-importlib_resources/tests/data02/two/resource2.txt,sha256=lt2jbN3TMn9QiFKM832X39bU_62UptDdUkoYzkvEbl0,13
-importlib_resources/tests/namespacedata01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4
-importlib_resources/tests/namespacedata01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44
-importlib_resources/tests/namespacedata01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20
-importlib_resources/tests/test_compatibilty_files.py,sha256=NWkbIsylI8Wz3Dwsxo1quT4ZI6ToXFA2mojCG6Dzuxw,3260
-importlib_resources/tests/test_contents.py,sha256=V1Xfk3lqTDdvUsZuV18Kndf0CT_tkM2oEIwk9Vv0rhg,968
-importlib_resources/tests/test_files.py,sha256=1Y8da-g0xOQLzuREDYUiRc_qhWlvFNeydW_mUH7l15w,3251
-importlib_resources/tests/test_open.py,sha256=pmEgdrSFdM83L6FxtR8U_RT9BfI3JZ4snGmM_ZZIegY,2565
-importlib_resources/tests/test_path.py,sha256=xvPteNA-UKavDhKgLgrQuXSxKWYH7Q4nSNDVfBX95Gs,2103
-importlib_resources/tests/test_read.py,sha256=EyYvpHJ_7F4LuX2EU_c5EerIBQfRhOFmiIR7LOc5Y5E,2408
-importlib_resources/tests/test_reader.py,sha256=nPhldbYPq3fXoQs0ZAub4atjhp2lgNyLNv2G1pg6Agw,4480
-importlib_resources/tests/test_resource.py,sha256=EMoarxTEHcrq8R41LQDsndIG8Idtm4I_LpN8DYpHtT0,8478
-importlib_resources/tests/update-zips.py,sha256=x-SrO5v87iLLUMXyefxDwAd3imAs_slI94sLWvJ6N40,1417
-importlib_resources/tests/util.py,sha256=ARAlxZ47wC-lgR7PGlmgBoi4HnhzcykD5Is2-TAwY0I,4873
-importlib_resources/tests/zipdata01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/zipdata01/__pycache__/__init__.cpython-312.pyc,,
-importlib_resources/tests/zipdata01/ziptestdata.zip,sha256=z5Of4dsv3T0t-46B0MsVhxlhsPGMz28aUhJDWpj3_oY,876
-importlib_resources/tests/zipdata02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/zipdata02/__pycache__/__init__.cpython-312.pyc,,
-importlib_resources/tests/zipdata02/ziptestdata.zip,sha256=ydI-_j-xgQ7tDxqBp9cjOqXBGxUp6ZBbwVJu6Xj-nrY,698
diff --git a/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/INSTALLER b/setuptools/_vendor/importlib_resources-6.4.0.dist-info/INSTALLER
similarity index 100%
rename from setuptools/_vendor/jaraco.functools-4.0.0.dist-info/INSTALLER
rename to setuptools/_vendor/importlib_resources-6.4.0.dist-info/INSTALLER
diff --git a/setuptools/_vendor/importlib_resources-5.10.2.dist-info/LICENSE b/setuptools/_vendor/importlib_resources-6.4.0.dist-info/LICENSE
similarity index 100%
rename from setuptools/_vendor/importlib_resources-5.10.2.dist-info/LICENSE
rename to setuptools/_vendor/importlib_resources-6.4.0.dist-info/LICENSE
diff --git a/setuptools/_vendor/importlib_resources-5.10.2.dist-info/METADATA b/setuptools/_vendor/importlib_resources-6.4.0.dist-info/METADATA
similarity index 67%
rename from setuptools/_vendor/importlib_resources-5.10.2.dist-info/METADATA
rename to setuptools/_vendor/importlib_resources-6.4.0.dist-info/METADATA
index a9995f09a3..b088e721d2 100644
--- a/setuptools/_vendor/importlib_resources-5.10.2.dist-info/METADATA
+++ b/setuptools/_vendor/importlib_resources-6.4.0.dist-info/METADATA
@@ -1,6 +1,6 @@
 Metadata-Version: 2.1
-Name: importlib-resources
-Version: 5.10.2
+Name: importlib_resources
+Version: 6.4.0
 Summary: Read resources from Python packages
 Home-page: https://github.com/python/importlib_resources
 Author: Barry Warsaw
@@ -11,43 +11,44 @@ Classifier: Intended Audience :: Developers
 Classifier: License :: OSI Approved :: Apache Software License
 Classifier: Programming Language :: Python :: 3
 Classifier: Programming Language :: Python :: 3 :: Only
-Requires-Python: >=3.7
+Requires-Python: >=3.8
 License-File: LICENSE
-Requires-Dist: zipp (>=3.1.0) ; python_version < "3.10"
+Requires-Dist: zipp >=3.1.0 ; python_version < "3.10"
 Provides-Extra: docs
-Requires-Dist: sphinx (>=3.5) ; extra == 'docs'
-Requires-Dist: jaraco.packaging (>=9) ; extra == 'docs'
-Requires-Dist: rst.linker (>=1.9) ; extra == 'docs'
+Requires-Dist: sphinx >=3.5 ; extra == 'docs'
+Requires-Dist: sphinx <7.2.5 ; extra == 'docs'
+Requires-Dist: jaraco.packaging >=9.3 ; extra == 'docs'
+Requires-Dist: rst.linker >=1.9 ; extra == 'docs'
 Requires-Dist: furo ; extra == 'docs'
 Requires-Dist: sphinx-lint ; extra == 'docs'
-Requires-Dist: jaraco.tidelift (>=1.4) ; extra == 'docs'
+Requires-Dist: jaraco.tidelift >=1.4 ; extra == 'docs'
 Provides-Extra: testing
-Requires-Dist: pytest (>=6) ; extra == 'testing'
-Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing'
-Requires-Dist: flake8 (<5) ; extra == 'testing'
+Requires-Dist: pytest >=6 ; extra == 'testing'
+Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'testing'
 Requires-Dist: pytest-cov ; extra == 'testing'
-Requires-Dist: pytest-enabler (>=1.3) ; extra == 'testing'
-Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing'
-Requires-Dist: pytest-mypy (>=0.9.1) ; (platform_python_implementation != "PyPy") and extra == 'testing'
-Requires-Dist: pytest-flake8 ; (python_version < "3.12") and extra == 'testing'
+Requires-Dist: pytest-enabler >=2.2 ; extra == 'testing'
+Requires-Dist: pytest-ruff >=0.2.1 ; extra == 'testing'
+Requires-Dist: zipp >=3.17 ; extra == 'testing'
+Requires-Dist: jaraco.test >=5.4 ; extra == 'testing'
+Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing'
 
 .. image:: https://img.shields.io/pypi/v/importlib_resources.svg
    :target: https://pypi.org/project/importlib_resources
 
 .. image:: https://img.shields.io/pypi/pyversions/importlib_resources.svg
 
-.. image:: https://github.com/python/importlib_resources/workflows/tests/badge.svg
+.. image:: https://github.com/python/importlib_resources/actions/workflows/main.yml/badge.svg
    :target: https://github.com/python/importlib_resources/actions?query=workflow%3A%22tests%22
    :alt: tests
 
-.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
-   :target: https://github.com/psf/black
-   :alt: Code style: Black
+.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
+    :target: https://github.com/astral-sh/ruff
+    :alt: Ruff
 
 .. image:: https://readthedocs.org/projects/importlib-resources/badge/?version=latest
    :target: https://importlib-resources.readthedocs.io/en/latest/?badge=latest
 
-.. image:: https://img.shields.io/badge/skeleton-2022-informational
+.. image:: https://img.shields.io/badge/skeleton-2024-informational
    :target: https://blog.jaraco.com/skeleton
 
 .. image:: https://tidelift.com/badges/package/pypi/importlib-resources
@@ -76,7 +77,9 @@ were contributed to different versions in the standard library:
 
    * - importlib_resources
      - stdlib
-   * - 5.9
+   * - 6.0
+     - 3.13
+   * - 5.12
      - 3.12
    * - 5.7
      - 3.11
@@ -95,10 +98,3 @@ Available as part of the Tidelift Subscription.
 This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use.
 
 `Learn more `_.
-
-Security Contact
-================
-
-To report a security vulnerability, please use the
-`Tidelift security contact `_.
-Tidelift will coordinate the fix and disclosure.
diff --git a/setuptools/_vendor/importlib_resources-6.4.0.dist-info/RECORD b/setuptools/_vendor/importlib_resources-6.4.0.dist-info/RECORD
new file mode 100644
index 0000000000..18888dea71
--- /dev/null
+++ b/setuptools/_vendor/importlib_resources-6.4.0.dist-info/RECORD
@@ -0,0 +1,89 @@
+importlib_resources-6.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+importlib_resources-6.4.0.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
+importlib_resources-6.4.0.dist-info/METADATA,sha256=g4eM2LuL0OiZcUVND0qwDJUpE29gOvtO3BSPXTbO9Fk,3944
+importlib_resources-6.4.0.dist-info/RECORD,,
+importlib_resources-6.4.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources-6.4.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
+importlib_resources-6.4.0.dist-info/top_level.txt,sha256=fHIjHU1GZwAjvcydpmUnUrTnbvdiWjG4OEVZK8by0TQ,20
+importlib_resources/__init__.py,sha256=uyp1kzYR6SawQBsqlyaXXfIxJx4Z2mM8MjmZn8qq2Gk,505
+importlib_resources/__pycache__/__init__.cpython-312.pyc,,
+importlib_resources/__pycache__/_adapters.cpython-312.pyc,,
+importlib_resources/__pycache__/_common.cpython-312.pyc,,
+importlib_resources/__pycache__/_itertools.cpython-312.pyc,,
+importlib_resources/__pycache__/abc.cpython-312.pyc,,
+importlib_resources/__pycache__/functional.cpython-312.pyc,,
+importlib_resources/__pycache__/readers.cpython-312.pyc,,
+importlib_resources/__pycache__/simple.cpython-312.pyc,,
+importlib_resources/_adapters.py,sha256=vprJGbUeHbajX6XCuMP6J3lMrqCi-P_MTlziJUR7jfk,4482
+importlib_resources/_common.py,sha256=blt4-ZtHnbUPzQQyPP7jLGgl_86btIW5ZhIsEhclhoA,5571
+importlib_resources/_itertools.py,sha256=eDisV6RqiNZOogLSXf6LOGHOYc79FGgPrKNLzFLmCrU,1277
+importlib_resources/abc.py,sha256=UKNU9ncEDkZRB3txcGb3WLxsL2iju9JbaLTI-dfLE_4,5162
+importlib_resources/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/compat/__pycache__/__init__.cpython-312.pyc,,
+importlib_resources/compat/__pycache__/py38.cpython-312.pyc,,
+importlib_resources/compat/__pycache__/py39.cpython-312.pyc,,
+importlib_resources/compat/py38.py,sha256=MWhut3XsAJwBYUaa5Qb2AoCrZNqcQjVThP-P1uBoE_4,230
+importlib_resources/compat/py39.py,sha256=Wfln4uQUShNz1XdCG-toG6_Y0WrlUmO9JzpvtcfQ-Cw,184
+importlib_resources/functional.py,sha256=mLU4DwSlh8_2IXWqwKOfPVxyRqAEpB3B4XTfRxr3X3M,2651
+importlib_resources/future/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/future/__pycache__/__init__.cpython-312.pyc,,
+importlib_resources/future/__pycache__/adapters.cpython-312.pyc,,
+importlib_resources/future/adapters.py,sha256=1-MF2VRcCButhcC1OMfZILU9o3kwZ4nXB2lurXpaIAw,2940
+importlib_resources/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/readers.py,sha256=WNKurBHHVu9EVtUhWkOj2fxH50HP7uanNFuupAqH2S8,5863
+importlib_resources/simple.py,sha256=CQ3TiIMFiJs_80o-7xJL1EpbUUVna4-NGDrSTQ3HW2Y,2584
+importlib_resources/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/__pycache__/__init__.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/_path.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/test_compatibilty_files.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/test_contents.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/test_custom.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/test_files.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/test_functional.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/test_open.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/test_path.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/test_read.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/test_reader.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/test_resource.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/util.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/zip.cpython-312.pyc,,
+importlib_resources/tests/_path.py,sha256=nkv3ek7D1U898v921rYbldDCtKri2oyYOi3EJqGjEGU,1289
+importlib_resources/tests/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/compat/__pycache__/__init__.cpython-312.pyc,,
+importlib_resources/tests/compat/__pycache__/py312.cpython-312.pyc,,
+importlib_resources/tests/compat/__pycache__/py39.cpython-312.pyc,,
+importlib_resources/tests/compat/py312.py,sha256=qcWjpZhQo2oEsdwIlRRQHrsMGDltkFTnETeG7fLdUS8,364
+importlib_resources/tests/compat/py39.py,sha256=lRTk0RWAOEb9RzAgvdRnqJUGCBLc3qoFQwzuJSa_zP4,329
+importlib_resources/tests/data01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/data01/__pycache__/__init__.cpython-312.pyc,,
+importlib_resources/tests/data01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4
+importlib_resources/tests/data01/subdirectory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/data01/subdirectory/__pycache__/__init__.cpython-312.pyc,,
+importlib_resources/tests/data01/subdirectory/binary.file,sha256=xtRM9Bj2EOP-nh2SlP9D3vgcbNytbLsYIM_0jTqkNV0,4
+importlib_resources/tests/data01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44
+importlib_resources/tests/data01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20
+importlib_resources/tests/data02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/data02/__pycache__/__init__.cpython-312.pyc,,
+importlib_resources/tests/data02/one/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/data02/one/__pycache__/__init__.cpython-312.pyc,,
+importlib_resources/tests/data02/one/resource1.txt,sha256=10flKac7c-XXFzJ3t-AB5MJjlBy__dSZvPE_dOm2q6U,13
+importlib_resources/tests/data02/subdirectory/subsubdir/resource.txt,sha256=jnrBBztxYrtQck7cmVnc4xQVO4-agzAZDGSFkAWtlFw,10
+importlib_resources/tests/data02/two/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/data02/two/__pycache__/__init__.cpython-312.pyc,,
+importlib_resources/tests/data02/two/resource2.txt,sha256=lt2jbN3TMn9QiFKM832X39bU_62UptDdUkoYzkvEbl0,13
+importlib_resources/tests/namespacedata01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4
+importlib_resources/tests/namespacedata01/subdirectory/binary.file,sha256=cbkhEL8TXIVYHIoSj2oZwPasp1KwxskeNXGJnPCbFF0,4
+importlib_resources/tests/namespacedata01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44
+importlib_resources/tests/namespacedata01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20
+importlib_resources/tests/test_compatibilty_files.py,sha256=95N_R7aik8cvnE6sBJpsxmP0K5plOWRIJDgbalD-Hpw,3314
+importlib_resources/tests/test_contents.py,sha256=70HW3mL_hv05Emv-OgdmwoLhXxjtuVxiWVaUpgRaRWA,930
+importlib_resources/tests/test_custom.py,sha256=QrHZqIWl0e-fsQRfm0ych8stOlKJOsAIU3rK6QOcyN0,1221
+importlib_resources/tests/test_files.py,sha256=OcShYu33kCcyXlDyZSVPkJNE08h-N_4bQOLV2QaSqX0,3472
+importlib_resources/tests/test_functional.py,sha256=ByCVViAwb2PIlKvDNJEqTZ0aLZGpFl5qa7CMCX-7HKM,8591
+importlib_resources/tests/test_open.py,sha256=ccmzbOeEa6zTd4ymZZ8yISrecfuYV0jhon-Vddqysu4,2778
+importlib_resources/tests/test_path.py,sha256=x8r2gJxG3hFM9xCOFNkgmHYXxsMldMLTSW_AZYf1l-A,2009
+importlib_resources/tests/test_read.py,sha256=7tsILQ2NoqVGFQxhHqxBwc5hWcN8b_3idojCsszTNfQ,3112
+importlib_resources/tests/test_reader.py,sha256=IcIUXaiPAtuahGV4_ZT4YXFLMMsJmcM1iOxqdIH2Aa4,5001
+importlib_resources/tests/test_resource.py,sha256=fcF8WgZ6rDCTRFnxtAUbdiaNe4G23yGovT1nb2dc7ls,7823
+importlib_resources/tests/util.py,sha256=vjVzEyX0X2RkTN-wGiQiplayp9sZom4JDjJinTNewos,4745
+importlib_resources/tests/zip.py,sha256=2MKmF8-osXBJSnqcUTuAUek_-tSB3iKmIT9qPhcsOsM,783
diff --git a/setuptools/_vendor/jaraco.text-3.7.0.dist-info/REQUESTED b/setuptools/_vendor/importlib_resources-6.4.0.dist-info/REQUESTED
similarity index 100%
rename from setuptools/_vendor/jaraco.text-3.7.0.dist-info/REQUESTED
rename to setuptools/_vendor/importlib_resources-6.4.0.dist-info/REQUESTED
diff --git a/setuptools/_vendor/ordered_set-3.1.1.dist-info/WHEEL b/setuptools/_vendor/importlib_resources-6.4.0.dist-info/WHEEL
similarity index 83%
rename from setuptools/_vendor/ordered_set-3.1.1.dist-info/WHEEL
rename to setuptools/_vendor/importlib_resources-6.4.0.dist-info/WHEEL
index 832be11132..bab98d6758 100644
--- a/setuptools/_vendor/ordered_set-3.1.1.dist-info/WHEEL
+++ b/setuptools/_vendor/importlib_resources-6.4.0.dist-info/WHEEL
@@ -1,6 +1,5 @@
 Wheel-Version: 1.0
 Generator: bdist_wheel (0.43.0)
 Root-Is-Purelib: true
-Tag: py2-none-any
 Tag: py3-none-any
 
diff --git a/setuptools/_vendor/importlib_resources-5.10.2.dist-info/top_level.txt b/setuptools/_vendor/importlib_resources-6.4.0.dist-info/top_level.txt
similarity index 100%
rename from setuptools/_vendor/importlib_resources-5.10.2.dist-info/top_level.txt
rename to setuptools/_vendor/importlib_resources-6.4.0.dist-info/top_level.txt
diff --git a/setuptools/_vendor/importlib_resources/__init__.py b/setuptools/_vendor/importlib_resources/__init__.py
index 34e3a9950c..0d029abd63 100644
--- a/setuptools/_vendor/importlib_resources/__init__.py
+++ b/setuptools/_vendor/importlib_resources/__init__.py
@@ -4,17 +4,17 @@
     as_file,
     files,
     Package,
+    Anchor,
 )
 
-from ._legacy import (
+from .functional import (
     contents,
+    is_resource,
     open_binary,
-    read_binary,
     open_text,
-    read_text,
-    is_resource,
     path,
-    Resource,
+    read_binary,
+    read_text,
 )
 
 from .abc import ResourceReader
@@ -22,11 +22,11 @@
 
 __all__ = [
     'Package',
-    'Resource',
+    'Anchor',
     'ResourceReader',
     'as_file',
-    'contents',
     'files',
+    'contents',
     'is_resource',
     'open_binary',
     'open_text',
diff --git a/setuptools/_vendor/importlib_resources/_adapters.py b/setuptools/_vendor/importlib_resources/_adapters.py
index ea363d86a5..50688fbb66 100644
--- a/setuptools/_vendor/importlib_resources/_adapters.py
+++ b/setuptools/_vendor/importlib_resources/_adapters.py
@@ -34,9 +34,7 @@ def _io_wrapper(file, mode='r', *args, **kwargs):
         return TextIOWrapper(file, *args, **kwargs)
     elif mode == 'rb':
         return file
-    raise ValueError(
-        "Invalid mode value '{}', only 'r' and 'rb' are supported".format(mode)
-    )
+    raise ValueError(f"Invalid mode value '{mode}', only 'r' and 'rb' are supported")
 
 
 class CompatibilityFiles:
diff --git a/setuptools/_vendor/importlib_resources/_common.py b/setuptools/_vendor/importlib_resources/_common.py
index 3c6de1cfb2..8df6b39e41 100644
--- a/setuptools/_vendor/importlib_resources/_common.py
+++ b/setuptools/_vendor/importlib_resources/_common.py
@@ -12,8 +12,6 @@
 from typing import Union, Optional, cast
 from .abc import ResourceReader, Traversable
 
-from ._compat import wrap_spec
-
 Package = Union[types.ModuleType, str]
 Anchor = Package
 
@@ -27,6 +25,8 @@ def package_to_anchor(func):
     >>> files('a', 'b')
     Traceback (most recent call last):
     TypeError: files() takes from 0 to 1 positional arguments but 2 were given
+
+    Remove this compatibility in Python 3.14.
     """
     undefined = object()
 
@@ -109,6 +109,9 @@ def from_package(package: types.ModuleType):
     Return a Traversable object for the given package.
 
     """
+    # deferred for performance (python/cpython#109829)
+    from .future.adapters import wrap_spec
+
     spec = wrap_spec(package)
     reader = spec.loader.get_resource_reader(spec.name)
     return reader.files()
diff --git a/setuptools/_vendor/importlib_resources/_compat.py b/setuptools/_vendor/importlib_resources/_compat.py
deleted file mode 100644
index 8b5b1d280f..0000000000
--- a/setuptools/_vendor/importlib_resources/_compat.py
+++ /dev/null
@@ -1,108 +0,0 @@
-# flake8: noqa
-
-import abc
-import os
-import sys
-import pathlib
-from contextlib import suppress
-from typing import Union
-
-
-if sys.version_info >= (3, 10):
-    from zipfile import Path as ZipPath  # type: ignore
-else:
-    from ..zipp import Path as ZipPath  # type: ignore
-
-
-try:
-    from typing import runtime_checkable  # type: ignore
-except ImportError:
-
-    def runtime_checkable(cls):  # type: ignore
-        return cls
-
-
-try:
-    from typing import Protocol  # type: ignore
-except ImportError:
-    Protocol = abc.ABC  # type: ignore
-
-
-class TraversableResourcesLoader:
-    """
-    Adapt loaders to provide TraversableResources and other
-    compatibility.
-
-    Used primarily for Python 3.9 and earlier where the native
-    loaders do not yet implement TraversableResources.
-    """
-
-    def __init__(self, spec):
-        self.spec = spec
-
-    @property
-    def path(self):
-        return self.spec.origin
-
-    def get_resource_reader(self, name):
-        from . import readers, _adapters
-
-        def _zip_reader(spec):
-            with suppress(AttributeError):
-                return readers.ZipReader(spec.loader, spec.name)
-
-        def _namespace_reader(spec):
-            with suppress(AttributeError, ValueError):
-                return readers.NamespaceReader(spec.submodule_search_locations)
-
-        def _available_reader(spec):
-            with suppress(AttributeError):
-                return spec.loader.get_resource_reader(spec.name)
-
-        def _native_reader(spec):
-            reader = _available_reader(spec)
-            return reader if hasattr(reader, 'files') else None
-
-        def _file_reader(spec):
-            try:
-                path = pathlib.Path(self.path)
-            except TypeError:
-                return None
-            if path.exists():
-                return readers.FileReader(self)
-
-        return (
-            # native reader if it supplies 'files'
-            _native_reader(self.spec)
-            or
-            # local ZipReader if a zip module
-            _zip_reader(self.spec)
-            or
-            # local NamespaceReader if a namespace module
-            _namespace_reader(self.spec)
-            or
-            # local FileReader
-            _file_reader(self.spec)
-            # fallback - adapt the spec ResourceReader to TraversableReader
-            or _adapters.CompatibilityFiles(self.spec)
-        )
-
-
-def wrap_spec(package):
-    """
-    Construct a package spec with traversable compatibility
-    on the spec/loader/reader.
-
-    Supersedes _adapters.wrap_spec to use TraversableResourcesLoader
-    from above for older Python compatibility (<3.10).
-    """
-    from . import _adapters
-
-    return _adapters.SpecLoaderAdapter(package.__spec__, TraversableResourcesLoader)
-
-
-if sys.version_info >= (3, 9):
-    StrPath = Union[str, os.PathLike[str]]
-else:
-    # PathLike is only subscriptable at runtime in 3.9+
-    StrPath = Union[str, "os.PathLike[str]"]
diff --git a/setuptools/_vendor/importlib_resources/_itertools.py b/setuptools/_vendor/importlib_resources/_itertools.py
index cce05582ff..7b775ef5ae 100644
--- a/setuptools/_vendor/importlib_resources/_itertools.py
+++ b/setuptools/_vendor/importlib_resources/_itertools.py
@@ -1,35 +1,38 @@
-from itertools import filterfalse
+# from more_itertools 9.0
+def only(iterable, default=None, too_long=None):
+    """If *iterable* has only one item, return it.
+    If it has zero items, return *default*.
+    If it has more than one item, raise the exception given by *too_long*,
+    which is ``ValueError`` by default.
+    >>> only([], default='missing')
+    'missing'
+    >>> only([1])
+    1
+    >>> only([1, 2])  # doctest: +IGNORE_EXCEPTION_DETAIL
+    Traceback (most recent call last):
+    ...
+    ValueError: Expected exactly one item in iterable, but got 1, 2,
+     and perhaps more.'
+    >>> only([1, 2], too_long=TypeError)  # doctest: +IGNORE_EXCEPTION_DETAIL
+    Traceback (most recent call last):
+    ...
+    TypeError
+    Note that :func:`only` attempts to advance *iterable* twice to ensure there
+    is only one item.  See :func:`spy` or :func:`peekable` to check
+    iterable contents less destructively.
+    """
+    it = iter(iterable)
+    first_value = next(it, default)
 
-from typing import (
-    Callable,
-    Iterable,
-    Iterator,
-    Optional,
-    Set,
-    TypeVar,
-    Union,
-)
-
-# Type and type variable definitions
-_T = TypeVar('_T')
-_U = TypeVar('_U')
-
-
-def unique_everseen(
-    iterable: Iterable[_T], key: Optional[Callable[[_T], _U]] = None
-) -> Iterator[_T]:
-    "List unique elements, preserving order. Remember all elements ever seen."
-    # unique_everseen('AAAABBBCCDAABBB') --> A B C D
-    # unique_everseen('ABBCcAD', str.lower) --> A B C D
-    seen: Set[Union[_T, _U]] = set()
-    seen_add = seen.add
-    if key is None:
-        for element in filterfalse(seen.__contains__, iterable):
-            seen_add(element)
-            yield element
+    try:
+        second_value = next(it)
+    except StopIteration:
+        pass
     else:
-        for element in iterable:
-            k = key(element)
-            if k not in seen:
-                seen_add(k)
-                yield element
+        msg = (
+            'Expected exactly one item in iterable, but got {!r}, {!r}, '
+            'and perhaps more.'.format(first_value, second_value)
+        )
+        raise too_long or ValueError(msg)
+
+    return first_value
diff --git a/setuptools/_vendor/importlib_resources/_legacy.py b/setuptools/_vendor/importlib_resources/_legacy.py
deleted file mode 100644
index b1ea8105da..0000000000
--- a/setuptools/_vendor/importlib_resources/_legacy.py
+++ /dev/null
@@ -1,120 +0,0 @@
-import functools
-import os
-import pathlib
-import types
-import warnings
-
-from typing import Union, Iterable, ContextManager, BinaryIO, TextIO, Any
-
-from . import _common
-
-Package = Union[types.ModuleType, str]
-Resource = str
-
-
-def deprecated(func):
-    @functools.wraps(func)
-    def wrapper(*args, **kwargs):
-        warnings.warn(
-            f"{func.__name__} is deprecated. Use files() instead. "
-            "Refer to https://importlib-resources.readthedocs.io"
-            "/en/latest/using.html#migrating-from-legacy for migration advice.",
-            DeprecationWarning,
-            stacklevel=2,
-        )
-        return func(*args, **kwargs)
-
-    return wrapper
-
-
-def normalize_path(path: Any) -> str:
-    """Normalize a path by ensuring it is a string.
-
-    If the resulting string contains path separators, an exception is raised.
-    """
-    str_path = str(path)
-    parent, file_name = os.path.split(str_path)
-    if parent:
-        raise ValueError(f'{path!r} must be only a file name')
-    return file_name
-
-
-@deprecated
-def open_binary(package: Package, resource: Resource) -> BinaryIO:
-    """Return a file-like object opened for binary reading of the resource."""
-    return (_common.files(package) / normalize_path(resource)).open('rb')
-
-
-@deprecated
-def read_binary(package: Package, resource: Resource) -> bytes:
-    """Return the binary contents of the resource."""
-    return (_common.files(package) / normalize_path(resource)).read_bytes()
-
-
-@deprecated
-def open_text(
-    package: Package,
-    resource: Resource,
-    encoding: str = 'utf-8',
-    errors: str = 'strict',
-) -> TextIO:
-    """Return a file-like object opened for text reading of the resource."""
-    return (_common.files(package) / normalize_path(resource)).open(
-        'r', encoding=encoding, errors=errors
-    )
-
-
-@deprecated
-def read_text(
-    package: Package,
-    resource: Resource,
-    encoding: str = 'utf-8',
-    errors: str = 'strict',
-) -> str:
-    """Return the decoded string of the resource.
-
-    The decoding-related arguments have the same semantics as those of
-    bytes.decode().
-    """
-    with open_text(package, resource, encoding, errors) as fp:
-        return fp.read()
-
-
-@deprecated
-def contents(package: Package) -> Iterable[str]:
-    """Return an iterable of entries in `package`.
-
-    Note that not all entries are resources.  Specifically, directories are
-    not considered resources.  Use `is_resource()` on each entry returned here
-    to check if it is a resource or not.
-    """
-    return [path.name for path in _common.files(package).iterdir()]
-
-
-@deprecated
-def is_resource(package: Package, name: str) -> bool:
-    """True if `name` is a resource inside `package`.
-
-    Directories are *not* resources.
-    """
-    resource = normalize_path(name)
-    return any(
-        traversable.name == resource and traversable.is_file()
-        for traversable in _common.files(package).iterdir()
-    )
-
-
-@deprecated
-def path(
-    package: Package,
-    resource: Resource,
-) -> ContextManager[pathlib.Path]:
-    """A context manager providing a file path object to the resource.
-
-    If the resource does not already exist on its own on the file system,
-    a temporary file will be created. If the file was created, the file
-    will be deleted upon exiting the context manager (no exception is
-    raised if the file was deleted prior to the context manager
-    exiting).
-    """
-    return _common.as_file(_common.files(package) / normalize_path(resource))
diff --git a/setuptools/_vendor/importlib_resources/abc.py b/setuptools/_vendor/importlib_resources/abc.py
index 23b6aeafe4..7a58dd2f96 100644
--- a/setuptools/_vendor/importlib_resources/abc.py
+++ b/setuptools/_vendor/importlib_resources/abc.py
@@ -3,8 +3,9 @@
 import itertools
 import pathlib
 from typing import Any, BinaryIO, Iterable, Iterator, NoReturn, Text, Optional
+from typing import runtime_checkable, Protocol
 
-from ._compat import runtime_checkable, Protocol, StrPath
+from .compat.py38 import StrPath
 
 
 __all__ = ["ResourceReader", "Traversable", "TraversableResources"]
diff --git a/setuptools/_vendor/importlib_resources/tests/zipdata02/__init__.py b/setuptools/_vendor/importlib_resources/compat/__init__.py
similarity index 100%
rename from setuptools/_vendor/importlib_resources/tests/zipdata02/__init__.py
rename to setuptools/_vendor/importlib_resources/compat/__init__.py
diff --git a/setuptools/_vendor/importlib_resources/compat/py38.py b/setuptools/_vendor/importlib_resources/compat/py38.py
new file mode 100644
index 0000000000..4d548257f8
--- /dev/null
+++ b/setuptools/_vendor/importlib_resources/compat/py38.py
@@ -0,0 +1,11 @@
+import os
+import sys
+
+from typing import Union
+
+
+if sys.version_info >= (3, 9):
+    StrPath = Union[str, os.PathLike[str]]
+else:
+    # PathLike is only subscriptable at runtime in 3.9+
+    StrPath = Union[str, "os.PathLike[str]"]
diff --git a/setuptools/_vendor/importlib_resources/compat/py39.py b/setuptools/_vendor/importlib_resources/compat/py39.py
new file mode 100644
index 0000000000..ab87b9dc14
--- /dev/null
+++ b/setuptools/_vendor/importlib_resources/compat/py39.py
@@ -0,0 +1,10 @@
+import sys
+
+
+__all__ = ['ZipPath']
+
+
+if sys.version_info >= (3, 10):
+    from zipfile import Path as ZipPath  # type: ignore
+else:
+    from zipp import Path as ZipPath  # type: ignore
diff --git a/setuptools/_vendor/importlib_resources/functional.py b/setuptools/_vendor/importlib_resources/functional.py
new file mode 100644
index 0000000000..f59416f2dd
--- /dev/null
+++ b/setuptools/_vendor/importlib_resources/functional.py
@@ -0,0 +1,81 @@
+"""Simplified function-based API for importlib.resources"""
+
+import warnings
+
+from ._common import files, as_file
+
+
+_MISSING = object()
+
+
+def open_binary(anchor, *path_names):
+    """Open for binary reading the *resource* within *package*."""
+    return _get_resource(anchor, path_names).open('rb')
+
+
+def open_text(anchor, *path_names, encoding=_MISSING, errors='strict'):
+    """Open for text reading the *resource* within *package*."""
+    encoding = _get_encoding_arg(path_names, encoding)
+    resource = _get_resource(anchor, path_names)
+    return resource.open('r', encoding=encoding, errors=errors)
+
+
+def read_binary(anchor, *path_names):
+    """Read and return contents of *resource* within *package* as bytes."""
+    return _get_resource(anchor, path_names).read_bytes()
+
+
+def read_text(anchor, *path_names, encoding=_MISSING, errors='strict'):
+    """Read and return contents of *resource* within *package* as str."""
+    encoding = _get_encoding_arg(path_names, encoding)
+    resource = _get_resource(anchor, path_names)
+    return resource.read_text(encoding=encoding, errors=errors)
+
+
+def path(anchor, *path_names):
+    """Return the path to the *resource* as an actual file system path."""
+    return as_file(_get_resource(anchor, path_names))
+
+
+def is_resource(anchor, *path_names):
+    """Return ``True`` if there is a resource named *name* in the package,
+
+    Otherwise returns ``False``.
+    """
+    return _get_resource(anchor, path_names).is_file()
+
+
+def contents(anchor, *path_names):
+    """Return an iterable over the named resources within the package.
+
+    The iterable returns :class:`str` resources (e.g. files).
+    The iterable does not recurse into subdirectories.
+    """
+    warnings.warn(
+        "importlib.resources.contents is deprecated. "
+        "Use files(anchor).iterdir() instead.",
+        DeprecationWarning,
+        stacklevel=1,
+    )
+    return (resource.name for resource in _get_resource(anchor, path_names).iterdir())
+
+
+def _get_encoding_arg(path_names, encoding):
+    # For compatibility with versions where *encoding* was a positional
+    # argument, it needs to be given explicitly when there are multiple
+    # *path_names*.
+    # This limitation can be removed in Python 3.15.
+    if encoding is _MISSING:
+        if len(path_names) > 1:
+            raise TypeError(
+                "'encoding' argument required with multiple path names",
+            )
+        else:
+            return 'utf-8'
+    return encoding
+
+
+def _get_resource(anchor, path_names):
+    if anchor is None:
+        raise TypeError("anchor must be module or string, got None")
+    return files(anchor).joinpath(*path_names)
diff --git a/setuptools/_vendor/jaraco/__init__.py b/setuptools/_vendor/importlib_resources/future/__init__.py
similarity index 100%
rename from setuptools/_vendor/jaraco/__init__.py
rename to setuptools/_vendor/importlib_resources/future/__init__.py
diff --git a/setuptools/_vendor/importlib_resources/future/adapters.py b/setuptools/_vendor/importlib_resources/future/adapters.py
new file mode 100644
index 0000000000..0e9764bae8
--- /dev/null
+++ b/setuptools/_vendor/importlib_resources/future/adapters.py
@@ -0,0 +1,95 @@
+import functools
+import pathlib
+from contextlib import suppress
+from types import SimpleNamespace
+
+from .. import readers, _adapters
+
+
+def _block_standard(reader_getter):
+    """
+    Wrap _adapters.TraversableResourcesLoader.get_resource_reader
+    and intercept any standard library readers.
+    """
+
+    @functools.wraps(reader_getter)
+    def wrapper(*args, **kwargs):
+        """
+        If the reader is from the standard library, return None to allow
+        allow likely newer implementations in this library to take precedence.
+        """
+        try:
+            reader = reader_getter(*args, **kwargs)
+        except NotADirectoryError:
+            # MultiplexedPath may fail on zip subdirectory
+            return
+        # Python 3.10+
+        mod_name = reader.__class__.__module__
+        if mod_name.startswith('importlib.') and mod_name.endswith('readers'):
+            return
+        # Python 3.8, 3.9
+        if isinstance(reader, _adapters.CompatibilityFiles) and (
+            reader.spec.loader.__class__.__module__.startswith('zipimport')
+            or reader.spec.loader.__class__.__module__.startswith(
+                '_frozen_importlib_external'
+            )
+        ):
+            return
+        return reader
+
+    return wrapper
+
+
+def _skip_degenerate(reader):
+    """
+    Mask any degenerate reader. Ref #298.
+    """
+    is_degenerate = (
+        isinstance(reader, _adapters.CompatibilityFiles) and not reader._reader
+    )
+    return reader if not is_degenerate else None
+
+
+class TraversableResourcesLoader(_adapters.TraversableResourcesLoader):
+    """
+    Adapt loaders to provide TraversableResources and other
+    compatibility.
+
+    Ensures the readers from importlib_resources are preferred
+    over stdlib readers.
+    """
+
+    def get_resource_reader(self, name):
+        return (
+            _skip_degenerate(_block_standard(super().get_resource_reader)(name))
+            or self._standard_reader()
+            or super().get_resource_reader(name)
+        )
+
+    def _standard_reader(self):
+        return self._zip_reader() or self._namespace_reader() or self._file_reader()
+
+    def _zip_reader(self):
+        with suppress(AttributeError):
+            return readers.ZipReader(self.spec.loader, self.spec.name)
+
+    def _namespace_reader(self):
+        with suppress(AttributeError, ValueError):
+            return readers.NamespaceReader(self.spec.submodule_search_locations)
+
+    def _file_reader(self):
+        try:
+            path = pathlib.Path(self.spec.origin)
+        except TypeError:
+            return None
+        if path.exists():
+            return readers.FileReader(SimpleNamespace(path=path))
+
+
+def wrap_spec(package):
+    """
+    Override _adapters.wrap_spec to use TraversableResourcesLoader
+    from above. Ensures that future behavior is always available on older
+    Pythons.
+    """
+    return _adapters.SpecLoaderAdapter(package.__spec__, TraversableResourcesLoader)
diff --git a/setuptools/_vendor/importlib_resources/readers.py b/setuptools/_vendor/importlib_resources/readers.py
index ab34db7409..4a80a774aa 100644
--- a/setuptools/_vendor/importlib_resources/readers.py
+++ b/setuptools/_vendor/importlib_resources/readers.py
@@ -1,11 +1,15 @@
 import collections
+import contextlib
+import itertools
 import pathlib
 import operator
+import re
+import warnings
 
 from . import abc
 
-from ._itertools import unique_everseen
-from ._compat import ZipPath
+from ._itertools import only
+from .compat.py39 import ZipPath
 
 
 def remove_duplicates(items):
@@ -41,8 +45,10 @@ def open_resource(self, resource):
             raise FileNotFoundError(exc.args[0])
 
     def is_resource(self, path):
-        # workaround for `zipfile.Path.is_file` returning true
-        # for non-existent paths.
+        """
+        Workaround for `zipfile.Path.is_file` returning true
+        for non-existent paths.
+        """
         target = self.files().joinpath(path)
         return target.is_file() and target.exists()
 
@@ -59,7 +65,7 @@ class MultiplexedPath(abc.Traversable):
     """
 
     def __init__(self, *paths):
-        self._paths = list(map(pathlib.Path, remove_duplicates(paths)))
+        self._paths = list(map(_ensure_traversable, remove_duplicates(paths)))
         if not self._paths:
             message = 'MultiplexedPath must contain at least one path'
             raise FileNotFoundError(message)
@@ -67,8 +73,10 @@ def __init__(self, *paths):
             raise NotADirectoryError('MultiplexedPath only supports directories')
 
     def iterdir(self):
-        files = (file for path in self._paths for file in path.iterdir())
-        return unique_everseen(files, key=operator.attrgetter('name'))
+        children = (child for path in self._paths for child in path.iterdir())
+        by_name = operator.attrgetter('name')
+        groups = itertools.groupby(sorted(children, key=by_name), key=by_name)
+        return map(self._follow, (locs for name, locs in groups))
 
     def read_bytes(self):
         raise FileNotFoundError(f'{self} is not a file')
@@ -90,6 +98,25 @@ def joinpath(self, *descendants):
             # Just return something that will not exist.
             return self._paths[0].joinpath(*descendants)
 
+    @classmethod
+    def _follow(cls, children):
+        """
+        Construct a MultiplexedPath if needed.
+
+        If children contains a sole element, return it.
+        Otherwise, return a MultiplexedPath of the items.
+        Unless one of the items is not a Directory, then return the first.
+        """
+        subdirs, one_dir, one_file = itertools.tee(children, 3)
+
+        try:
+            return only(one_dir)
+        except ValueError:
+            try:
+                return cls(*subdirs)
+            except NotADirectoryError:
+                return next(one_file)
+
     def open(self, *args, **kwargs):
         raise FileNotFoundError(f'{self} is not a file')
 
@@ -106,7 +133,36 @@ class NamespaceReader(abc.TraversableResources):
     def __init__(self, namespace_path):
         if 'NamespacePath' not in str(namespace_path):
             raise ValueError('Invalid path')
-        self.path = MultiplexedPath(*list(namespace_path))
+        self.path = MultiplexedPath(*map(self._resolve, namespace_path))
+
+    @classmethod
+    def _resolve(cls, path_str) -> abc.Traversable:
+        r"""
+        Given an item from a namespace path, resolve it to a Traversable.
+
+        path_str might be a directory on the filesystem or a path to a
+        zipfile plus the path within the zipfile, e.g. ``/foo/bar`` or
+        ``/foo/baz.zip/inner_dir`` or ``foo\baz.zip\inner_dir\sub``.
+        """
+        (dir,) = (cand for cand in cls._candidate_paths(path_str) if cand.is_dir())
+        return dir
+
+    @classmethod
+    def _candidate_paths(cls, path_str):
+        yield pathlib.Path(path_str)
+        yield from cls._resolve_zip_path(path_str)
+
+    @staticmethod
+    def _resolve_zip_path(path_str):
+        for match in reversed(list(re.finditer(r'[\\/]', path_str))):
+            with contextlib.suppress(
+                FileNotFoundError,
+                IsADirectoryError,
+                NotADirectoryError,
+                PermissionError,
+            ):
+                inner = path_str[match.end() :].replace('\\', '/') + '/'
+                yield ZipPath(path_str[: match.start()], inner.lstrip('/'))
 
     def resource_path(self, resource):
         """
@@ -118,3 +174,21 @@ def resource_path(self, resource):
 
     def files(self):
         return self.path
+
+
+def _ensure_traversable(path):
+    """
+    Convert deprecated string arguments to traversables (pathlib.Path).
+
+    Remove with Python 3.15.
+    """
+    if not isinstance(path, str):
+        return path
+
+    warnings.warn(
+        "String arguments are deprecated. Pass a Traversable instead.",
+        DeprecationWarning,
+        stacklevel=3,
+    )
+
+    return pathlib.Path(path)
diff --git a/setuptools/_vendor/importlib_resources/simple.py b/setuptools/_vendor/importlib_resources/simple.py
index 7770c922c8..96f117fec6 100644
--- a/setuptools/_vendor/importlib_resources/simple.py
+++ b/setuptools/_vendor/importlib_resources/simple.py
@@ -88,7 +88,7 @@ def is_dir(self):
     def open(self, mode='r', *args, **kwargs):
         stream = self.parent.reader.open_binary(self.name)
         if 'b' not in mode:
-            stream = io.TextIOWrapper(*args, **kwargs)
+            stream = io.TextIOWrapper(stream, *args, **kwargs)
         return stream
 
     def joinpath(self, name):
diff --git a/setuptools/_vendor/importlib_resources/tests/_compat.py b/setuptools/_vendor/importlib_resources/tests/_compat.py
deleted file mode 100644
index e7bf06dd4e..0000000000
--- a/setuptools/_vendor/importlib_resources/tests/_compat.py
+++ /dev/null
@@ -1,32 +0,0 @@
-import os
-
-
-try:
-    from test.support import import_helper  # type: ignore
-except ImportError:
-    # Python 3.9 and earlier
-    class import_helper:  # type: ignore
-        from test.support import (
-            modules_setup,
-            modules_cleanup,
-            DirsOnSysPath,
-            CleanImport,
-        )
-
-
-try:
-    from test.support import os_helper  # type: ignore
-except ImportError:
-    # Python 3.9 compat
-    class os_helper:  # type:ignore
-        from test.support import temp_dir
-
-
-try:
-    # Python 3.10
-    from test.support.os_helper import unlink
-except ImportError:
-    from test.support import unlink as _unlink
-
-    def unlink(target):
-        return _unlink(os.fspath(target))
diff --git a/setuptools/_vendor/importlib_resources/tests/_path.py b/setuptools/_vendor/importlib_resources/tests/_path.py
index c630e4d3d3..1f97c96146 100644
--- a/setuptools/_vendor/importlib_resources/tests/_path.py
+++ b/setuptools/_vendor/importlib_resources/tests/_path.py
@@ -1,12 +1,16 @@
 import pathlib
 import functools
 
+from typing import Dict, Union
+
 
 ####
-# from jaraco.path 3.4
+# from jaraco.path 3.4.1
+
+FilesSpec = Dict[str, Union[str, bytes, 'FilesSpec']]  # type: ignore
 
 
-def build(spec, prefix=pathlib.Path()):
+def build(spec: FilesSpec, prefix=pathlib.Path()):
     """
     Build a set of files/directories, as described by the spec.
 
@@ -23,15 +27,17 @@ def build(spec, prefix=pathlib.Path()):
     ...         "baz.py": "# Some code",
     ...     }
     ... }
-    >>> tmpdir = getfixture('tmpdir')
-    >>> build(spec, tmpdir)
+    >>> target = getfixture('tmp_path')
+    >>> build(spec, target)
+    >>> target.joinpath('foo/baz.py').read_text(encoding='utf-8')
+    '# Some code'
     """
     for name, contents in spec.items():
         create(contents, pathlib.Path(prefix) / name)
 
 
 @functools.singledispatch
-def create(content, path):
+def create(content: Union[str, bytes, FilesSpec], path):
     path.mkdir(exist_ok=True)
     build(content, prefix=path)  # type: ignore
 
@@ -43,7 +49,7 @@ def _(content: bytes, path):
 
 @create.register
 def _(content: str, path):
-    path.write_text(content)
+    path.write_text(content, encoding='utf-8')
 
 
 # end from jaraco.path
diff --git a/setuptools/_vendor/more_itertools-8.8.0.dist-info/REQUESTED b/setuptools/_vendor/importlib_resources/tests/compat/__init__.py
similarity index 100%
rename from setuptools/_vendor/more_itertools-8.8.0.dist-info/REQUESTED
rename to setuptools/_vendor/importlib_resources/tests/compat/__init__.py
diff --git a/setuptools/_vendor/importlib_resources/tests/compat/py312.py b/setuptools/_vendor/importlib_resources/tests/compat/py312.py
new file mode 100644
index 0000000000..ea9a58ba2e
--- /dev/null
+++ b/setuptools/_vendor/importlib_resources/tests/compat/py312.py
@@ -0,0 +1,18 @@
+import contextlib
+
+from .py39 import import_helper
+
+
+@contextlib.contextmanager
+def isolated_modules():
+    """
+    Save modules on entry and cleanup on exit.
+    """
+    (saved,) = import_helper.modules_setup()
+    try:
+        yield
+    finally:
+        import_helper.modules_cleanup(saved)
+
+
+vars(import_helper).setdefault('isolated_modules', isolated_modules)
diff --git a/setuptools/_vendor/importlib_resources/tests/compat/py39.py b/setuptools/_vendor/importlib_resources/tests/compat/py39.py
new file mode 100644
index 0000000000..e158eb85d3
--- /dev/null
+++ b/setuptools/_vendor/importlib_resources/tests/compat/py39.py
@@ -0,0 +1,10 @@
+"""
+Backward-compatability shims to support Python 3.9 and earlier.
+"""
+
+from jaraco.test.cpython import from_test_support, try_import
+
+import_helper = try_import('import_helper') or from_test_support(
+    'modules_setup', 'modules_cleanup', 'DirsOnSysPath'
+)
+os_helper = try_import('os_helper') or from_test_support('temp_dir')
diff --git a/setuptools/_vendor/importlib_resources/tests/data01/subdirectory/binary.file b/setuptools/_vendor/importlib_resources/tests/data01/subdirectory/binary.file
index eaf36c1daccfdf325514461cd1a2ffbc139b5464..5bd8bb897b13225c93a1d26baa88c96b7bd5d817 100644
GIT binary patch
literal 4
LcmZQ!Wn%{b05$*@

literal 4
LcmZQzWMT#Y01f~L

diff --git a/setuptools/_vendor/importlib_resources/tests/data02/subdirectory/subsubdir/resource.txt b/setuptools/_vendor/importlib_resources/tests/data02/subdirectory/subsubdir/resource.txt
new file mode 100644
index 0000000000..48f587a2d0
--- /dev/null
+++ b/setuptools/_vendor/importlib_resources/tests/data02/subdirectory/subsubdir/resource.txt
@@ -0,0 +1 @@
+a resource
\ No newline at end of file
diff --git a/setuptools/_vendor/importlib_resources/tests/namespacedata01/subdirectory/binary.file b/setuptools/_vendor/importlib_resources/tests/namespacedata01/subdirectory/binary.file
new file mode 100644
index 0000000000..100f50643d
--- /dev/null
+++ b/setuptools/_vendor/importlib_resources/tests/namespacedata01/subdirectory/binary.file
@@ -0,0 +1 @@
+

\ No newline at end of file
diff --git a/setuptools/_vendor/importlib_resources/tests/test_compatibilty_files.py b/setuptools/_vendor/importlib_resources/tests/test_compatibilty_files.py
index d92c7c56c9..13ad0dfb21 100644
--- a/setuptools/_vendor/importlib_resources/tests/test_compatibilty_files.py
+++ b/setuptools/_vendor/importlib_resources/tests/test_compatibilty_files.py
@@ -64,11 +64,13 @@ def test_orphan_path_name(self):
 
     def test_spec_path_open(self):
         self.assertEqual(self.files.read_bytes(), b'Hello, world!')
-        self.assertEqual(self.files.read_text(), 'Hello, world!')
+        self.assertEqual(self.files.read_text(encoding='utf-8'), 'Hello, world!')
 
     def test_child_path_open(self):
         self.assertEqual((self.files / 'a').read_bytes(), b'Hello, world!')
-        self.assertEqual((self.files / 'a').read_text(), 'Hello, world!')
+        self.assertEqual(
+            (self.files / 'a').read_text(encoding='utf-8'), 'Hello, world!'
+        )
 
     def test_orphan_path_open(self):
         with self.assertRaises(FileNotFoundError):
diff --git a/setuptools/_vendor/importlib_resources/tests/test_contents.py b/setuptools/_vendor/importlib_resources/tests/test_contents.py
index 525568e8c9..7dc3b0a619 100644
--- a/setuptools/_vendor/importlib_resources/tests/test_contents.py
+++ b/setuptools/_vendor/importlib_resources/tests/test_contents.py
@@ -31,8 +31,8 @@ class ContentsZipTests(ContentsTests, util.ZipSetup, unittest.TestCase):
 class ContentsNamespaceTests(ContentsTests, unittest.TestCase):
     expected = {
         # no __init__ because of namespace design
-        # no subdirectory as incidental difference in fixture
         'binary.file',
+        'subdirectory',
         'utf-16.file',
         'utf-8.file',
     }
diff --git a/setuptools/_vendor/importlib_resources/tests/test_custom.py b/setuptools/_vendor/importlib_resources/tests/test_custom.py
new file mode 100644
index 0000000000..86c65676f1
--- /dev/null
+++ b/setuptools/_vendor/importlib_resources/tests/test_custom.py
@@ -0,0 +1,47 @@
+import unittest
+import contextlib
+import pathlib
+
+import importlib_resources as resources
+from .. import abc
+from ..abc import TraversableResources, ResourceReader
+from . import util
+from .compat.py39 import os_helper
+
+
+class SimpleLoader:
+    """
+    A simple loader that only implements a resource reader.
+    """
+
+    def __init__(self, reader: ResourceReader):
+        self.reader = reader
+
+    def get_resource_reader(self, package):
+        return self.reader
+
+
+class MagicResources(TraversableResources):
+    """
+    Magically returns the resources at path.
+    """
+
+    def __init__(self, path: pathlib.Path):
+        self.path = path
+
+    def files(self):
+        return self.path
+
+
+class CustomTraversableResourcesTests(unittest.TestCase):
+    def setUp(self):
+        self.fixtures = contextlib.ExitStack()
+        self.addCleanup(self.fixtures.close)
+
+    def test_custom_loader(self):
+        temp_dir = pathlib.Path(self.fixtures.enter_context(os_helper.temp_dir()))
+        loader = SimpleLoader(MagicResources(temp_dir))
+        pkg = util.create_package_from_loader(loader)
+        files = resources.files(pkg)
+        assert isinstance(files, abc.Traversable)
+        assert list(files.iterdir()) == []
diff --git a/setuptools/_vendor/importlib_resources/tests/test_files.py b/setuptools/_vendor/importlib_resources/tests/test_files.py
index d258fb5f0f..3e86ec64bc 100644
--- a/setuptools/_vendor/importlib_resources/tests/test_files.py
+++ b/setuptools/_vendor/importlib_resources/tests/test_files.py
@@ -1,4 +1,3 @@
-import typing
 import textwrap
 import unittest
 import warnings
@@ -10,7 +9,8 @@
 from . import data01
 from . import util
 from . import _path
-from ._compat import os_helper, import_helper
+from .compat.py39 import os_helper
+from .compat.py312 import import_helper
 
 
 @contextlib.contextmanager
@@ -31,13 +31,14 @@ def test_read_text(self):
         actual = files.joinpath('utf-8.file').read_text(encoding='utf-8')
         assert actual == 'Hello, UTF-8 world!\n'
 
-    @unittest.skipUnless(
-        hasattr(typing, 'runtime_checkable'),
-        "Only suitable when typing supports runtime_checkable",
-    )
     def test_traversable(self):
         assert isinstance(resources.files(self.data), Traversable)
 
+    def test_joinpath_with_multiple_args(self):
+        files = resources.files(self.data)
+        binfile = files.joinpath('subdirectory', 'binary.file')
+        self.assertTrue(binfile.is_file())
+
     def test_old_parameter(self):
         """
         Files used to take a 'package' parameter. Make sure anyone
@@ -63,13 +64,17 @@ def setUp(self):
         self.data = namespacedata01
 
 
+class OpenNamespaceZipTests(FilesTests, util.ZipSetup, unittest.TestCase):
+    ZIP_MODULE = 'namespacedata01'
+
+
 class SiteDir:
     def setUp(self):
         self.fixtures = contextlib.ExitStack()
         self.addCleanup(self.fixtures.close)
         self.site_dir = self.fixtures.enter_context(os_helper.temp_dir())
         self.fixtures.enter_context(import_helper.DirsOnSysPath(self.site_dir))
-        self.fixtures.enter_context(import_helper.CleanImport())
+        self.fixtures.enter_context(import_helper.isolated_modules())
 
 
 class ModulesFilesTests(SiteDir, unittest.TestCase):
@@ -84,7 +89,7 @@ def test_module_resources(self):
         _path.build(spec, self.site_dir)
         import mod
 
-        actual = resources.files(mod).joinpath('res.txt').read_text()
+        actual = resources.files(mod).joinpath('res.txt').read_text(encoding='utf-8')
         assert actual == spec['res.txt']
 
 
@@ -98,7 +103,7 @@ def test_implicit_files(self):
                 '__init__.py': textwrap.dedent(
                     """
                     import importlib_resources as res
-                    val = res.files().joinpath('res.txt').read_text()
+                    val = res.files().joinpath('res.txt').read_text(encoding='utf-8')
                     """
                 ),
                 'res.txt': 'resources are the best',
diff --git a/setuptools/_vendor/importlib_resources/tests/test_functional.py b/setuptools/_vendor/importlib_resources/tests/test_functional.py
new file mode 100644
index 0000000000..69706cf7be
--- /dev/null
+++ b/setuptools/_vendor/importlib_resources/tests/test_functional.py
@@ -0,0 +1,242 @@
+import unittest
+import os
+import contextlib
+
+try:
+    from test.support.warnings_helper import ignore_warnings, check_warnings
+except ImportError:
+    # older Python versions
+    from test.support import ignore_warnings, check_warnings
+
+import importlib_resources as resources
+
+# Since the functional API forwards to Traversable, we only test
+# filesystem resources here -- not zip files, namespace packages etc.
+# We do test for two kinds of Anchor, though.
+
+
+class StringAnchorMixin:
+    anchor01 = 'importlib_resources.tests.data01'
+    anchor02 = 'importlib_resources.tests.data02'
+
+
+class ModuleAnchorMixin:
+    from . import data01 as anchor01
+    from . import data02 as anchor02
+
+
+class FunctionalAPIBase:
+    def _gen_resourcetxt_path_parts(self):
+        """Yield various names of a text file in anchor02, each in a subTest"""
+        for path_parts in (
+            ('subdirectory', 'subsubdir', 'resource.txt'),
+            ('subdirectory/subsubdir/resource.txt',),
+            ('subdirectory/subsubdir', 'resource.txt'),
+        ):
+            with self.subTest(path_parts=path_parts):
+                yield path_parts
+
+    def test_read_text(self):
+        self.assertEqual(
+            resources.read_text(self.anchor01, 'utf-8.file'),
+            'Hello, UTF-8 world!\n',
+        )
+        self.assertEqual(
+            resources.read_text(
+                self.anchor02,
+                'subdirectory',
+                'subsubdir',
+                'resource.txt',
+                encoding='utf-8',
+            ),
+            'a resource',
+        )
+        for path_parts in self._gen_resourcetxt_path_parts():
+            self.assertEqual(
+                resources.read_text(
+                    self.anchor02,
+                    *path_parts,
+                    encoding='utf-8',
+                ),
+                'a resource',
+            )
+        # Use generic OSError, since e.g. attempting to read a directory can
+        # fail with PermissionError rather than IsADirectoryError
+        with self.assertRaises(OSError):
+            resources.read_text(self.anchor01)
+        with self.assertRaises(OSError):
+            resources.read_text(self.anchor01, 'no-such-file')
+        with self.assertRaises(UnicodeDecodeError):
+            resources.read_text(self.anchor01, 'utf-16.file')
+        self.assertEqual(
+            resources.read_text(
+                self.anchor01,
+                'binary.file',
+                encoding='latin1',
+            ),
+            '\x00\x01\x02\x03',
+        )
+        self.assertEqual(
+            resources.read_text(
+                self.anchor01,
+                'utf-16.file',
+                errors='backslashreplace',
+            ),
+            'Hello, UTF-16 world!\n'.encode('utf-16').decode(
+                errors='backslashreplace',
+            ),
+        )
+
+    def test_read_binary(self):
+        self.assertEqual(
+            resources.read_binary(self.anchor01, 'utf-8.file'),
+            b'Hello, UTF-8 world!\n',
+        )
+        for path_parts in self._gen_resourcetxt_path_parts():
+            self.assertEqual(
+                resources.read_binary(self.anchor02, *path_parts),
+                b'a resource',
+            )
+
+    def test_open_text(self):
+        with resources.open_text(self.anchor01, 'utf-8.file') as f:
+            self.assertEqual(f.read(), 'Hello, UTF-8 world!\n')
+        for path_parts in self._gen_resourcetxt_path_parts():
+            with resources.open_text(
+                self.anchor02,
+                *path_parts,
+                encoding='utf-8',
+            ) as f:
+                self.assertEqual(f.read(), 'a resource')
+        # Use generic OSError, since e.g. attempting to read a directory can
+        # fail with PermissionError rather than IsADirectoryError
+        with self.assertRaises(OSError):
+            resources.open_text(self.anchor01)
+        with self.assertRaises(OSError):
+            resources.open_text(self.anchor01, 'no-such-file')
+        with resources.open_text(self.anchor01, 'utf-16.file') as f:
+            with self.assertRaises(UnicodeDecodeError):
+                f.read()
+        with resources.open_text(
+            self.anchor01,
+            'binary.file',
+            encoding='latin1',
+        ) as f:
+            self.assertEqual(f.read(), '\x00\x01\x02\x03')
+        with resources.open_text(
+            self.anchor01,
+            'utf-16.file',
+            errors='backslashreplace',
+        ) as f:
+            self.assertEqual(
+                f.read(),
+                'Hello, UTF-16 world!\n'.encode('utf-16').decode(
+                    errors='backslashreplace',
+                ),
+            )
+
+    def test_open_binary(self):
+        with resources.open_binary(self.anchor01, 'utf-8.file') as f:
+            self.assertEqual(f.read(), b'Hello, UTF-8 world!\n')
+        for path_parts in self._gen_resourcetxt_path_parts():
+            with resources.open_binary(
+                self.anchor02,
+                *path_parts,
+            ) as f:
+                self.assertEqual(f.read(), b'a resource')
+
+    def test_path(self):
+        with resources.path(self.anchor01, 'utf-8.file') as path:
+            with open(str(path), encoding='utf-8') as f:
+                self.assertEqual(f.read(), 'Hello, UTF-8 world!\n')
+        with resources.path(self.anchor01) as path:
+            with open(os.path.join(path, 'utf-8.file'), encoding='utf-8') as f:
+                self.assertEqual(f.read(), 'Hello, UTF-8 world!\n')
+
+    def test_is_resource(self):
+        is_resource = resources.is_resource
+        self.assertTrue(is_resource(self.anchor01, 'utf-8.file'))
+        self.assertFalse(is_resource(self.anchor01, 'no_such_file'))
+        self.assertFalse(is_resource(self.anchor01))
+        self.assertFalse(is_resource(self.anchor01, 'subdirectory'))
+        for path_parts in self._gen_resourcetxt_path_parts():
+            self.assertTrue(is_resource(self.anchor02, *path_parts))
+
+    def test_contents(self):
+        with check_warnings((".*contents.*", DeprecationWarning)):
+            c = resources.contents(self.anchor01)
+        self.assertGreaterEqual(
+            set(c),
+            {'utf-8.file', 'utf-16.file', 'binary.file', 'subdirectory'},
+        )
+        with contextlib.ExitStack() as cm:
+            cm.enter_context(self.assertRaises(OSError))
+            cm.enter_context(check_warnings((".*contents.*", DeprecationWarning)))
+
+            list(resources.contents(self.anchor01, 'utf-8.file'))
+
+        for path_parts in self._gen_resourcetxt_path_parts():
+            with contextlib.ExitStack() as cm:
+                cm.enter_context(self.assertRaises(OSError))
+                cm.enter_context(check_warnings((".*contents.*", DeprecationWarning)))
+
+                list(resources.contents(self.anchor01, *path_parts))
+        with check_warnings((".*contents.*", DeprecationWarning)):
+            c = resources.contents(self.anchor01, 'subdirectory')
+        self.assertGreaterEqual(
+            set(c),
+            {'binary.file'},
+        )
+
+    @ignore_warnings(category=DeprecationWarning)
+    def test_common_errors(self):
+        for func in (
+            resources.read_text,
+            resources.read_binary,
+            resources.open_text,
+            resources.open_binary,
+            resources.path,
+            resources.is_resource,
+            resources.contents,
+        ):
+            with self.subTest(func=func):
+                # Rejecting None anchor
+                with self.assertRaises(TypeError):
+                    func(None)
+                # Rejecting invalid anchor type
+                with self.assertRaises((TypeError, AttributeError)):
+                    func(1234)
+                # Unknown module
+                with self.assertRaises(ModuleNotFoundError):
+                    func('$missing module$')
+
+    def test_text_errors(self):
+        for func in (
+            resources.read_text,
+            resources.open_text,
+        ):
+            with self.subTest(func=func):
+                # Multiple path arguments need explicit encoding argument.
+                with self.assertRaises(TypeError):
+                    func(
+                        self.anchor02,
+                        'subdirectory',
+                        'subsubdir',
+                        'resource.txt',
+                    )
+
+
+class FunctionalAPITest_StringAnchor(
+    unittest.TestCase,
+    FunctionalAPIBase,
+    StringAnchorMixin,
+):
+    pass
+
+
+class FunctionalAPITest_ModuleAnchor(
+    unittest.TestCase,
+    FunctionalAPIBase,
+    ModuleAnchorMixin,
+):
+    pass
diff --git a/setuptools/_vendor/importlib_resources/tests/test_open.py b/setuptools/_vendor/importlib_resources/tests/test_open.py
index 87b42c3d39..44f1018af3 100644
--- a/setuptools/_vendor/importlib_resources/tests/test_open.py
+++ b/setuptools/_vendor/importlib_resources/tests/test_open.py
@@ -15,7 +15,7 @@ def execute(self, package, path):
 class CommonTextTests(util.CommonTests, unittest.TestCase):
     def execute(self, package, path):
         target = resources.files(package).joinpath(path)
-        with target.open():
+        with target.open(encoding='utf-8'):
             pass
 
 
@@ -24,11 +24,11 @@ def test_open_binary(self):
         target = resources.files(self.data) / 'binary.file'
         with target.open('rb') as fp:
             result = fp.read()
-            self.assertEqual(result, b'\x00\x01\x02\x03')
+            self.assertEqual(result, bytes(range(4)))
 
     def test_open_text_default_encoding(self):
         target = resources.files(self.data) / 'utf-8.file'
-        with target.open() as fp:
+        with target.open(encoding='utf-8') as fp:
             result = fp.read()
             self.assertEqual(result, 'Hello, UTF-8 world!\n')
 
@@ -39,7 +39,9 @@ def test_open_text_given_encoding(self):
         self.assertEqual(result, 'Hello, UTF-16 world!\n')
 
     def test_open_text_with_errors(self):
-        # Raises UnicodeError without the 'errors' argument.
+        """
+        Raises UnicodeError without the 'errors' argument.
+        """
         target = resources.files(self.data) / 'utf-16.file'
         with target.open(encoding='utf-8', errors='strict') as fp:
             self.assertRaises(UnicodeError, fp.read)
@@ -54,11 +56,13 @@ def test_open_text_with_errors(self):
 
     def test_open_binary_FileNotFoundError(self):
         target = resources.files(self.data) / 'does-not-exist'
-        self.assertRaises(FileNotFoundError, target.open, 'rb')
+        with self.assertRaises(FileNotFoundError):
+            target.open('rb')
 
     def test_open_text_FileNotFoundError(self):
         target = resources.files(self.data) / 'does-not-exist'
-        self.assertRaises(FileNotFoundError, target.open)
+        with self.assertRaises(FileNotFoundError):
+            target.open(encoding='utf-8')
 
 
 class OpenDiskTests(OpenTests, unittest.TestCase):
@@ -77,5 +81,9 @@ class OpenZipTests(OpenTests, util.ZipSetup, unittest.TestCase):
     pass
 
 
+class OpenNamespaceZipTests(OpenTests, util.ZipSetup, unittest.TestCase):
+    ZIP_MODULE = 'namespacedata01'
+
+
 if __name__ == '__main__':
     unittest.main()
diff --git a/setuptools/_vendor/importlib_resources/tests/test_path.py b/setuptools/_vendor/importlib_resources/tests/test_path.py
index 4f4d3943bb..c3e1cbb4ed 100644
--- a/setuptools/_vendor/importlib_resources/tests/test_path.py
+++ b/setuptools/_vendor/importlib_resources/tests/test_path.py
@@ -1,4 +1,5 @@
 import io
+import pathlib
 import unittest
 
 import importlib_resources as resources
@@ -14,16 +15,14 @@ def execute(self, package, path):
 
 class PathTests:
     def test_reading(self):
-        # Path should be readable.
-        # Test also implicitly verifies the returned object is a pathlib.Path
-        # instance.
+        """
+        Path should be readable and a pathlib.Path instance.
+        """
         target = resources.files(self.data) / 'utf-8.file'
         with resources.as_file(target) as path:
+            self.assertIsInstance(path, pathlib.Path)
             self.assertTrue(path.name.endswith("utf-8.file"), repr(path))
-            # pathlib.Path.read_text() was introduced in Python 3.5.
-            with path.open('r', encoding='utf-8') as file:
-                text = file.read()
-            self.assertEqual('Hello, UTF-8 world!\n', text)
+            self.assertEqual('Hello, UTF-8 world!\n', path.read_text(encoding='utf-8'))
 
 
 class PathDiskTests(PathTests, unittest.TestCase):
@@ -53,8 +52,10 @@ def setUp(self):
 
 class PathZipTests(PathTests, util.ZipSetup, unittest.TestCase):
     def test_remove_in_context_manager(self):
-        # It is not an error if the file that was temporarily stashed on the
-        # file system is removed inside the `with` stanza.
+        """
+        It is not an error if the file that was temporarily stashed on the
+        file system is removed inside the `with` stanza.
+        """
         target = resources.files(self.data) / 'utf-8.file'
         with resources.as_file(target) as path:
             path.unlink()
diff --git a/setuptools/_vendor/importlib_resources/tests/test_read.py b/setuptools/_vendor/importlib_resources/tests/test_read.py
index 41dd6db5f3..97d90128cf 100644
--- a/setuptools/_vendor/importlib_resources/tests/test_read.py
+++ b/setuptools/_vendor/importlib_resources/tests/test_read.py
@@ -13,16 +13,20 @@ def execute(self, package, path):
 
 class CommonTextTests(util.CommonTests, unittest.TestCase):
     def execute(self, package, path):
-        resources.files(package).joinpath(path).read_text()
+        resources.files(package).joinpath(path).read_text(encoding='utf-8')
 
 
 class ReadTests:
     def test_read_bytes(self):
         result = resources.files(self.data).joinpath('binary.file').read_bytes()
-        self.assertEqual(result, b'\0\1\2\3')
+        self.assertEqual(result, bytes(range(4)))
 
     def test_read_text_default_encoding(self):
-        result = resources.files(self.data).joinpath('utf-8.file').read_text()
+        result = (
+            resources.files(self.data)
+            .joinpath('utf-8.file')
+            .read_text(encoding='utf-8')
+        )
         self.assertEqual(result, 'Hello, UTF-8 world!\n')
 
     def test_read_text_given_encoding(self):
@@ -34,7 +38,9 @@ def test_read_text_given_encoding(self):
         self.assertEqual(result, 'Hello, UTF-16 world!\n')
 
     def test_read_text_with_errors(self):
-        # Raises UnicodeError without the 'errors' argument.
+        """
+        Raises UnicodeError without the 'errors' argument.
+        """
         target = resources.files(self.data) / 'utf-16.file'
         self.assertRaises(UnicodeError, target.read_text, encoding='utf-8')
         result = target.read_text(encoding='utf-8', errors='ignore')
@@ -52,17 +58,15 @@ class ReadDiskTests(ReadTests, unittest.TestCase):
 
 class ReadZipTests(ReadTests, util.ZipSetup, unittest.TestCase):
     def test_read_submodule_resource(self):
-        submodule = import_module('ziptestdata.subdirectory')
+        submodule = import_module('data01.subdirectory')
         result = resources.files(submodule).joinpath('binary.file').read_bytes()
-        self.assertEqual(result, b'\0\1\2\3')
+        self.assertEqual(result, bytes(range(4, 8)))
 
     def test_read_submodule_resource_by_name(self):
         result = (
-            resources.files('ziptestdata.subdirectory')
-            .joinpath('binary.file')
-            .read_bytes()
+            resources.files('data01.subdirectory').joinpath('binary.file').read_bytes()
         )
-        self.assertEqual(result, b'\0\1\2\3')
+        self.assertEqual(result, bytes(range(4, 8)))
 
 
 class ReadNamespaceTests(ReadTests, unittest.TestCase):
@@ -72,5 +76,22 @@ def setUp(self):
         self.data = namespacedata01
 
 
+class ReadNamespaceZipTests(ReadTests, util.ZipSetup, unittest.TestCase):
+    ZIP_MODULE = 'namespacedata01'
+
+    def test_read_submodule_resource(self):
+        submodule = import_module('namespacedata01.subdirectory')
+        result = resources.files(submodule).joinpath('binary.file').read_bytes()
+        self.assertEqual(result, bytes(range(12, 16)))
+
+    def test_read_submodule_resource_by_name(self):
+        result = (
+            resources.files('namespacedata01.subdirectory')
+            .joinpath('binary.file')
+            .read_bytes()
+        )
+        self.assertEqual(result, bytes(range(12, 16)))
+
+
 if __name__ == '__main__':
     unittest.main()
diff --git a/setuptools/_vendor/importlib_resources/tests/test_reader.py b/setuptools/_vendor/importlib_resources/tests/test_reader.py
index 1c8ebeeb13..95c2fc85a4 100644
--- a/setuptools/_vendor/importlib_resources/tests/test_reader.py
+++ b/setuptools/_vendor/importlib_resources/tests/test_reader.py
@@ -10,8 +10,7 @@
 class MultiplexedPathTest(unittest.TestCase):
     @classmethod
     def setUpClass(cls):
-        path = pathlib.Path(__file__).parent / 'namespacedata01'
-        cls.folder = str(path)
+        cls.folder = pathlib.Path(__file__).parent / 'namespacedata01'
 
     def test_init_no_paths(self):
         with self.assertRaises(FileNotFoundError):
@@ -19,7 +18,7 @@ def test_init_no_paths(self):
 
     def test_init_file(self):
         with self.assertRaises(NotADirectoryError):
-            MultiplexedPath(os.path.join(self.folder, 'binary.file'))
+            MultiplexedPath(self.folder / 'binary.file')
 
     def test_iterdir(self):
         contents = {path.name for path in MultiplexedPath(self.folder).iterdir()}
@@ -27,10 +26,12 @@ def test_iterdir(self):
             contents.remove('__pycache__')
         except (KeyError, ValueError):
             pass
-        self.assertEqual(contents, {'binary.file', 'utf-16.file', 'utf-8.file'})
+        self.assertEqual(
+            contents, {'subdirectory', 'binary.file', 'utf-16.file', 'utf-8.file'}
+        )
 
     def test_iterdir_duplicate(self):
-        data01 = os.path.abspath(os.path.join(__file__, '..', 'data01'))
+        data01 = pathlib.Path(__file__).parent.joinpath('data01')
         contents = {
             path.name for path in MultiplexedPath(self.folder, data01).iterdir()
         }
@@ -60,17 +61,17 @@ def test_open_file(self):
             path.open()
 
     def test_join_path(self):
-        prefix = os.path.abspath(os.path.join(__file__, '..'))
-        data01 = os.path.join(prefix, 'data01')
+        data01 = pathlib.Path(__file__).parent.joinpath('data01')
+        prefix = str(data01.parent)
         path = MultiplexedPath(self.folder, data01)
         self.assertEqual(
             str(path.joinpath('binary.file'))[len(prefix) + 1 :],
             os.path.join('namespacedata01', 'binary.file'),
         )
-        self.assertEqual(
-            str(path.joinpath('subdirectory'))[len(prefix) + 1 :],
-            os.path.join('data01', 'subdirectory'),
-        )
+        sub = path.joinpath('subdirectory')
+        assert isinstance(sub, MultiplexedPath)
+        assert 'namespacedata01' in str(sub)
+        assert 'data01' in str(sub)
         self.assertEqual(
             str(path.joinpath('imaginary'))[len(prefix) + 1 :],
             os.path.join('namespacedata01', 'imaginary'),
@@ -81,6 +82,17 @@ def test_join_path_compound(self):
         path = MultiplexedPath(self.folder)
         assert not path.joinpath('imaginary/foo.py').exists()
 
+    def test_join_path_common_subdir(self):
+        data01 = pathlib.Path(__file__).parent.joinpath('data01')
+        data02 = pathlib.Path(__file__).parent.joinpath('data02')
+        prefix = str(data01.parent)
+        path = MultiplexedPath(data01, data02)
+        self.assertIsInstance(path.joinpath('subdirectory'), MultiplexedPath)
+        self.assertEqual(
+            str(path.joinpath('subdirectory', 'subsubdir'))[len(prefix) + 1 :],
+            os.path.join('data02', 'subdirectory', 'subsubdir'),
+        )
+
     def test_repr(self):
         self.assertEqual(
             repr(MultiplexedPath(self.folder)),
diff --git a/setuptools/_vendor/importlib_resources/tests/test_resource.py b/setuptools/_vendor/importlib_resources/tests/test_resource.py
index 8239027167..dc2a108cde 100644
--- a/setuptools/_vendor/importlib_resources/tests/test_resource.py
+++ b/setuptools/_vendor/importlib_resources/tests/test_resource.py
@@ -1,14 +1,11 @@
 import sys
 import unittest
 import importlib_resources as resources
-import uuid
 import pathlib
 
 from . import data01
-from . import zipdata01, zipdata02
 from . import util
 from importlib import import_module
-from ._compat import import_helper, unlink
 
 
 class ResourceTests:
@@ -69,10 +66,12 @@ def test_resource_missing(self):
 
 class ResourceCornerCaseTests(unittest.TestCase):
     def test_package_has_no_reader_fallback(self):
-        # Test odd ball packages which:
+        """
+        Test odd ball packages which:
         # 1. Do not have a ResourceReader as a loader
         # 2. Are not on the file system
         # 3. Are not in a zip file
+        """
         module = util.create_package(
             file=data01, path=data01.__file__, contents=['A', 'B', 'C']
         )
@@ -86,34 +85,32 @@ def test_package_has_no_reader_fallback(self):
 
 
 class ResourceFromZipsTest01(util.ZipSetupBase, unittest.TestCase):
-    ZIP_MODULE = zipdata01  # type: ignore
+    ZIP_MODULE = 'data01'
 
     def test_is_submodule_resource(self):
-        submodule = import_module('ziptestdata.subdirectory')
+        submodule = import_module('data01.subdirectory')
         self.assertTrue(resources.files(submodule).joinpath('binary.file').is_file())
 
     def test_read_submodule_resource_by_name(self):
         self.assertTrue(
-            resources.files('ziptestdata.subdirectory')
-            .joinpath('binary.file')
-            .is_file()
+            resources.files('data01.subdirectory').joinpath('binary.file').is_file()
         )
 
     def test_submodule_contents(self):
-        submodule = import_module('ziptestdata.subdirectory')
+        submodule = import_module('data01.subdirectory')
         self.assertEqual(
             names(resources.files(submodule)), {'__init__.py', 'binary.file'}
         )
 
     def test_submodule_contents_by_name(self):
         self.assertEqual(
-            names(resources.files('ziptestdata.subdirectory')),
+            names(resources.files('data01.subdirectory')),
             {'__init__.py', 'binary.file'},
         )
 
     def test_as_file_directory(self):
-        with resources.as_file(resources.files('ziptestdata')) as data:
-            assert data.name == 'ziptestdata'
+        with resources.as_file(resources.files('data01')) as data:
+            assert data.name == 'data01'
             assert data.is_dir()
             assert data.joinpath('subdirectory').is_dir()
             assert len(list(data.iterdir()))
@@ -121,7 +118,7 @@ def test_as_file_directory(self):
 
 
 class ResourceFromZipsTest02(util.ZipSetupBase, unittest.TestCase):
-    ZIP_MODULE = zipdata02  # type: ignore
+    ZIP_MODULE = 'data02'
 
     def test_unrelated_contents(self):
         """
@@ -129,104 +126,48 @@ def test_unrelated_contents(self):
         distinct resources. Ref python/importlib_resources#44.
         """
         self.assertEqual(
-            names(resources.files('ziptestdata.one')),
+            names(resources.files('data02.one')),
             {'__init__.py', 'resource1.txt'},
         )
         self.assertEqual(
-            names(resources.files('ziptestdata.two')),
+            names(resources.files('data02.two')),
             {'__init__.py', 'resource2.txt'},
         )
 
 
-class DeletingZipsTest(unittest.TestCase):
+class DeletingZipsTest(util.ZipSetupBase, unittest.TestCase):
     """Having accessed resources in a zip file should not keep an open
     reference to the zip.
     """
 
-    ZIP_MODULE = zipdata01
-
-    def setUp(self):
-        modules = import_helper.modules_setup()
-        self.addCleanup(import_helper.modules_cleanup, *modules)
-
-        data_path = pathlib.Path(self.ZIP_MODULE.__file__)
-        data_dir = data_path.parent
-        self.source_zip_path = data_dir / 'ziptestdata.zip'
-        self.zip_path = pathlib.Path(f'{uuid.uuid4()}.zip').absolute()
-        self.zip_path.write_bytes(self.source_zip_path.read_bytes())
-        sys.path.append(str(self.zip_path))
-        self.data = import_module('ziptestdata')
-
-    def tearDown(self):
-        try:
-            sys.path.remove(str(self.zip_path))
-        except ValueError:
-            pass
-
-        try:
-            del sys.path_importer_cache[str(self.zip_path)]
-            del sys.modules[self.data.__name__]
-        except KeyError:
-            pass
-
-        try:
-            unlink(self.zip_path)
-        except OSError:
-            # If the test fails, this will probably fail too
-            pass
-
     def test_iterdir_does_not_keep_open(self):
-        c = [item.name for item in resources.files('ziptestdata').iterdir()]
-        self.zip_path.unlink()
-        del c
+        [item.name for item in resources.files('data01').iterdir()]
 
     def test_is_file_does_not_keep_open(self):
-        c = resources.files('ziptestdata').joinpath('binary.file').is_file()
-        self.zip_path.unlink()
-        del c
+        resources.files('data01').joinpath('binary.file').is_file()
 
     def test_is_file_failure_does_not_keep_open(self):
-        c = resources.files('ziptestdata').joinpath('not-present').is_file()
-        self.zip_path.unlink()
-        del c
+        resources.files('data01').joinpath('not-present').is_file()
 
     @unittest.skip("Desired but not supported.")
     def test_as_file_does_not_keep_open(self):  # pragma: no cover
-        c = resources.as_file(resources.files('ziptestdata') / 'binary.file')
-        self.zip_path.unlink()
-        del c
+        resources.as_file(resources.files('data01') / 'binary.file')
 
     def test_entered_path_does_not_keep_open(self):
-        # This is what certifi does on import to make its bundle
-        # available for the process duration.
-        c = resources.as_file(
-            resources.files('ziptestdata') / 'binary.file'
-        ).__enter__()
-        self.zip_path.unlink()
-        del c
+        """
+        Mimic what certifi does on import to make its bundle
+        available for the process duration.
+        """
+        resources.as_file(resources.files('data01') / 'binary.file').__enter__()
 
     def test_read_binary_does_not_keep_open(self):
-        c = resources.files('ziptestdata').joinpath('binary.file').read_bytes()
-        self.zip_path.unlink()
-        del c
+        resources.files('data01').joinpath('binary.file').read_bytes()
 
     def test_read_text_does_not_keep_open(self):
-        c = resources.files('ziptestdata').joinpath('utf-8.file').read_text()
-        self.zip_path.unlink()
-        del c
-
-
-class ResourceFromNamespaceTest01(unittest.TestCase):
-    site_dir = str(pathlib.Path(__file__).parent)
+        resources.files('data01').joinpath('utf-8.file').read_text(encoding='utf-8')
 
-    @classmethod
-    def setUpClass(cls):
-        sys.path.append(cls.site_dir)
-
-    @classmethod
-    def tearDownClass(cls):
-        sys.path.remove(cls.site_dir)
 
+class ResourceFromNamespaceTests:
     def test_is_submodule_resource(self):
         self.assertTrue(
             resources.files(import_module('namespacedata01'))
@@ -245,7 +186,9 @@ def test_submodule_contents(self):
             contents.remove('__pycache__')
         except KeyError:
             pass
-        self.assertEqual(contents, {'binary.file', 'utf-8.file', 'utf-16.file'})
+        self.assertEqual(
+            contents, {'subdirectory', 'binary.file', 'utf-8.file', 'utf-16.file'}
+        )
 
     def test_submodule_contents_by_name(self):
         contents = names(resources.files('namespacedata01'))
@@ -253,7 +196,45 @@ def test_submodule_contents_by_name(self):
             contents.remove('__pycache__')
         except KeyError:
             pass
-        self.assertEqual(contents, {'binary.file', 'utf-8.file', 'utf-16.file'})
+        self.assertEqual(
+            contents, {'subdirectory', 'binary.file', 'utf-8.file', 'utf-16.file'}
+        )
+
+    def test_submodule_sub_contents(self):
+        contents = names(resources.files(import_module('namespacedata01.subdirectory')))
+        try:
+            contents.remove('__pycache__')
+        except KeyError:
+            pass
+        self.assertEqual(contents, {'binary.file'})
+
+    def test_submodule_sub_contents_by_name(self):
+        contents = names(resources.files('namespacedata01.subdirectory'))
+        try:
+            contents.remove('__pycache__')
+        except KeyError:
+            pass
+        self.assertEqual(contents, {'binary.file'})
+
+
+class ResourceFromNamespaceDiskTests(ResourceFromNamespaceTests, unittest.TestCase):
+    site_dir = str(pathlib.Path(__file__).parent)
+
+    @classmethod
+    def setUpClass(cls):
+        sys.path.append(cls.site_dir)
+
+    @classmethod
+    def tearDownClass(cls):
+        sys.path.remove(cls.site_dir)
+
+
+class ResourceFromNamespaceZipTests(
+    util.ZipSetupBase,
+    ResourceFromNamespaceTests,
+    unittest.TestCase,
+):
+    ZIP_MODULE = 'namespacedata01'
 
 
 if __name__ == '__main__':
diff --git a/setuptools/_vendor/importlib_resources/tests/update-zips.py b/setuptools/_vendor/importlib_resources/tests/update-zips.py
deleted file mode 100644
index 231334aa7e..0000000000
--- a/setuptools/_vendor/importlib_resources/tests/update-zips.py
+++ /dev/null
@@ -1,53 +0,0 @@
-"""
-Generate the zip test data files.
-
-Run to build the tests/zipdataNN/ziptestdata.zip files from
-files in tests/dataNN.
-
-Replaces the file with the working copy, but does commit anything
-to the source repo.
-"""
-
-import contextlib
-import os
-import pathlib
-import zipfile
-
-
-def main():
-    """
-    >>> from unittest import mock
-    >>> monkeypatch = getfixture('monkeypatch')
-    >>> monkeypatch.setattr(zipfile, 'ZipFile', mock.MagicMock())
-    >>> print(); main()  # print workaround for bpo-32509
-    
-    ...data01... -> ziptestdata/...
-    ...
-    ...data02... -> ziptestdata/...
-    ...
-    """
-    suffixes = '01', '02'
-    tuple(map(generate, suffixes))
-
-
-def generate(suffix):
-    root = pathlib.Path(__file__).parent.relative_to(os.getcwd())
-    zfpath = root / f'zipdata{suffix}/ziptestdata.zip'
-    with zipfile.ZipFile(zfpath, 'w') as zf:
-        for src, rel in walk(root / f'data{suffix}'):
-            dst = 'ziptestdata' / pathlib.PurePosixPath(rel.as_posix())
-            print(src, '->', dst)
-            zf.write(src, dst)
-
-
-def walk(datapath):
-    for dirpath, dirnames, filenames in os.walk(datapath):
-        with contextlib.suppress(ValueError):
-            dirnames.remove('__pycache__')
-        for filename in filenames:
-            res = pathlib.Path(dirpath) / filename
-            rel = res.relative_to(datapath)
-            yield res, rel
-
-
-__name__ == '__main__' and main()
diff --git a/setuptools/_vendor/importlib_resources/tests/util.py b/setuptools/_vendor/importlib_resources/tests/util.py
index b596c0ce4f..fb827d2fa0 100644
--- a/setuptools/_vendor/importlib_resources/tests/util.py
+++ b/setuptools/_vendor/importlib_resources/tests/util.py
@@ -4,11 +4,12 @@
 import sys
 import types
 import pathlib
+import contextlib
 
 from . import data01
-from . import zipdata01
 from ..abc import ResourceReader
-from ._compat import import_helper
+from .compat.py39 import import_helper, os_helper
+from . import zip as zip_
 
 
 from importlib.machinery import ModuleSpec
@@ -80,32 +81,44 @@ def execute(self, package, path):
         """
 
     def test_package_name(self):
-        # Passing in the package name should succeed.
+        """
+        Passing in the package name should succeed.
+        """
         self.execute(data01.__name__, 'utf-8.file')
 
     def test_package_object(self):
-        # Passing in the package itself should succeed.
+        """
+        Passing in the package itself should succeed.
+        """
         self.execute(data01, 'utf-8.file')
 
     def test_string_path(self):
-        # Passing in a string for the path should succeed.
+        """
+        Passing in a string for the path should succeed.
+        """
         path = 'utf-8.file'
         self.execute(data01, path)
 
     def test_pathlib_path(self):
-        # Passing in a pathlib.PurePath object for the path should succeed.
+        """
+        Passing in a pathlib.PurePath object for the path should succeed.
+        """
         path = pathlib.PurePath('utf-8.file')
         self.execute(data01, path)
 
     def test_importing_module_as_side_effect(self):
-        # The anchor package can already be imported.
+        """
+        The anchor package can already be imported.
+        """
         del sys.modules[data01.__name__]
         self.execute(data01.__name__, 'utf-8.file')
 
     def test_missing_path(self):
-        # Attempting to open or read or request the path for a
-        # non-existent path should succeed if open_resource
-        # can return a viable data stream.
+        """
+        Attempting to open or read or request the path for a
+        non-existent path should succeed if open_resource
+        can return a viable data stream.
+        """
         bytes_data = io.BytesIO(b'Hello, world!')
         package = create_package(file=bytes_data, path=FileNotFoundError())
         self.execute(package, 'utf-8.file')
@@ -129,39 +142,23 @@ def test_useless_loader(self):
 
 
 class ZipSetupBase:
-    ZIP_MODULE = None
-
-    @classmethod
-    def setUpClass(cls):
-        data_path = pathlib.Path(cls.ZIP_MODULE.__file__)
-        data_dir = data_path.parent
-        cls._zip_path = str(data_dir / 'ziptestdata.zip')
-        sys.path.append(cls._zip_path)
-        cls.data = importlib.import_module('ziptestdata')
-
-    @classmethod
-    def tearDownClass(cls):
-        try:
-            sys.path.remove(cls._zip_path)
-        except ValueError:
-            pass
-
-        try:
-            del sys.path_importer_cache[cls._zip_path]
-            del sys.modules[cls.data.__name__]
-        except KeyError:
-            pass
-
-        try:
-            del cls.data
-            del cls._zip_path
-        except AttributeError:
-            pass
+    ZIP_MODULE = 'data01'
 
     def setUp(self):
-        modules = import_helper.modules_setup()
-        self.addCleanup(import_helper.modules_cleanup, *modules)
+        self.fixtures = contextlib.ExitStack()
+        self.addCleanup(self.fixtures.close)
+
+        self.fixtures.enter_context(import_helper.isolated_modules())
+
+        temp_dir = self.fixtures.enter_context(os_helper.temp_dir())
+        modules = pathlib.Path(temp_dir) / 'zipped modules.zip'
+        src_path = pathlib.Path(__file__).parent.joinpath(self.ZIP_MODULE)
+        self.fixtures.enter_context(
+            import_helper.DirsOnSysPath(str(zip_.make_zip_file(src_path, modules)))
+        )
+
+        self.data = importlib.import_module(self.ZIP_MODULE)
 
 
 class ZipSetup(ZipSetupBase):
-    ZIP_MODULE = zipdata01  # type: ignore
+    pass
diff --git a/setuptools/_vendor/importlib_resources/tests/zip.py b/setuptools/_vendor/importlib_resources/tests/zip.py
new file mode 100644
index 0000000000..962195a901
--- /dev/null
+++ b/setuptools/_vendor/importlib_resources/tests/zip.py
@@ -0,0 +1,32 @@
+"""
+Generate zip test data files.
+"""
+
+import contextlib
+import os
+import pathlib
+import zipfile
+
+import zipp
+
+
+def make_zip_file(src, dst):
+    """
+    Zip the files in src into a new zipfile at dst.
+    """
+    with zipfile.ZipFile(dst, 'w') as zf:
+        for src_path, rel in walk(src):
+            dst_name = src.name / pathlib.PurePosixPath(rel.as_posix())
+            zf.write(src_path, dst_name)
+        zipp.CompleteDirs.inject(zf)
+    return dst
+
+
+def walk(datapath):
+    for dirpath, dirnames, filenames in os.walk(datapath):
+        with contextlib.suppress(ValueError):
+            dirnames.remove('__pycache__')
+        for filename in filenames:
+            res = pathlib.Path(dirpath) / filename
+            rel = res.relative_to(datapath)
+            yield res, rel
diff --git a/setuptools/_vendor/importlib_resources/tests/zipdata01/ziptestdata.zip b/setuptools/_vendor/importlib_resources/tests/zipdata01/ziptestdata.zip
deleted file mode 100644
index 9a3bb0739f87e97c1084b94d7d153680f6727738..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 876
zcmWIWW@Zs#00HOCX@Q%&m27l?Y!DU);;PJolGNgol*E!m{nC;&T|+ayw9K5;|NlG~
zQWMD
z9;rDw`8o=rA#S=B3g!7lIVp-}COK17UPc
zNtt;*xhM-3R!jMEPhCreO-3*u>5Df}T7+BJ{639e$2uhfsIs`pJ5Qf}C
xGXyDE@VNvOv@o!wQJfLgCAgysx3f@9jKpUmiW^zkK<;1z!tFpk^MROw0RS~O%0&PG

diff --git a/setuptools/_vendor/importlib_resources/tests/zipdata02/ziptestdata.zip b/setuptools/_vendor/importlib_resources/tests/zipdata02/ziptestdata.zip
deleted file mode 100644
index d63ff512d2807ef2fd259455283b81b02e0e45fb..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 698
zcmWIWW@Zs#00HOCX@Ot{ln@8fRhb1Psl_EJi6x2p@$s2?nI-Y@dIgmMI5kP5Y0A$_
z#jWw|&p#`9ff_(q7K_HB)Z+ZoqU2OVy^@L&ph*fa0WRVlP*R?c+X1opI-R&20MZDv
z&j{oIpa8N17@0(vaR(gGH(;=&5k%n(M%;#g0ulz6G@1gL$cA79E2=^00gEsw4~s!C
zUxI@ZWaIMqz|BszK;s4KsL2<9jRy!Q2E6`2cTLHjr{wAk1ZCU@!+_
G1_l6Bc%f?m

diff --git a/setuptools/_vendor/jaraco.text-3.7.0.dist-info/INSTALLER b/setuptools/_vendor/inflect-7.3.1.dist-info/INSTALLER
similarity index 100%
rename from setuptools/_vendor/jaraco.text-3.7.0.dist-info/INSTALLER
rename to setuptools/_vendor/inflect-7.3.1.dist-info/INSTALLER
diff --git a/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/LICENSE b/setuptools/_vendor/inflect-7.3.1.dist-info/LICENSE
similarity index 100%
rename from setuptools/_vendor/jaraco.functools-4.0.0.dist-info/LICENSE
rename to setuptools/_vendor/inflect-7.3.1.dist-info/LICENSE
diff --git a/setuptools/_vendor/inflect-7.3.1.dist-info/METADATA b/setuptools/_vendor/inflect-7.3.1.dist-info/METADATA
new file mode 100644
index 0000000000..9a2097a54a
--- /dev/null
+++ b/setuptools/_vendor/inflect-7.3.1.dist-info/METADATA
@@ -0,0 +1,591 @@
+Metadata-Version: 2.1
+Name: inflect
+Version: 7.3.1
+Summary: Correctly generate plurals, singular nouns, ordinals, indefinite articles
+Author-email: Paul Dyson 
+Maintainer-email: "Jason R. Coombs" 
+Project-URL: Source, https://github.com/jaraco/inflect
+Keywords: plural,inflect,participle
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Natural Language :: English
+Classifier: Operating System :: OS Independent
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: Text Processing :: Linguistic
+Requires-Python: >=3.8
+Description-Content-Type: text/x-rst
+License-File: LICENSE
+Requires-Dist: more-itertools >=8.5.0
+Requires-Dist: typeguard >=4.0.1
+Requires-Dist: typing-extensions ; python_version < "3.9"
+Provides-Extra: doc
+Requires-Dist: sphinx >=3.5 ; extra == 'doc'
+Requires-Dist: jaraco.packaging >=9.3 ; extra == 'doc'
+Requires-Dist: rst.linker >=1.9 ; extra == 'doc'
+Requires-Dist: furo ; extra == 'doc'
+Requires-Dist: sphinx-lint ; extra == 'doc'
+Requires-Dist: jaraco.tidelift >=1.4 ; extra == 'doc'
+Provides-Extra: test
+Requires-Dist: pytest !=8.1.*,>=6 ; extra == 'test'
+Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'test'
+Requires-Dist: pytest-cov ; extra == 'test'
+Requires-Dist: pytest-mypy ; extra == 'test'
+Requires-Dist: pytest-enabler >=2.2 ; extra == 'test'
+Requires-Dist: pytest-ruff >=0.2.1 ; extra == 'test'
+Requires-Dist: pygments ; extra == 'test'
+
+.. image:: https://img.shields.io/pypi/v/inflect.svg
+   :target: https://pypi.org/project/inflect
+
+.. image:: https://img.shields.io/pypi/pyversions/inflect.svg
+
+.. image:: https://github.com/jaraco/inflect/actions/workflows/main.yml/badge.svg
+   :target: https://github.com/jaraco/inflect/actions?query=workflow%3A%22tests%22
+   :alt: tests
+
+.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
+    :target: https://github.com/astral-sh/ruff
+    :alt: Ruff
+
+.. image:: https://readthedocs.org/projects/inflect/badge/?version=latest
+   :target: https://inflect.readthedocs.io/en/latest/?badge=latest
+
+.. image:: https://img.shields.io/badge/skeleton-2024-informational
+   :target: https://blog.jaraco.com/skeleton
+
+.. image:: https://tidelift.com/badges/package/pypi/inflect
+   :target: https://tidelift.com/subscription/pkg/pypi-inflect?utm_source=pypi-inflect&utm_medium=readme
+
+NAME
+====
+
+inflect.py - Correctly generate plurals, singular nouns, ordinals, indefinite articles; convert numbers to words.
+
+SYNOPSIS
+========
+
+.. code-block:: python
+
+    import inflect
+
+    p = inflect.engine()
+
+    # METHODS:
+
+    # plural plural_noun plural_verb plural_adj singular_noun no num
+    # compare compare_nouns compare_nouns compare_adjs
+    # a an
+    # present_participle
+    # ordinal number_to_words
+    # join
+    # inflect classical gender
+    # defnoun defverb defadj defa defan
+
+
+    # UNCONDITIONALLY FORM THE PLURAL
+
+    print("The plural of ", word, " is ", p.plural(word))
+
+
+    # CONDITIONALLY FORM THE PLURAL
+
+    print("I saw", cat_count, p.plural("cat", cat_count))
+
+
+    # FORM PLURALS FOR SPECIFIC PARTS OF SPEECH
+
+    print(
+        p.plural_noun("I", N1),
+        p.plural_verb("saw", N1),
+        p.plural_adj("my", N2),
+        p.plural_noun("saw", N2),
+    )
+
+
+    # FORM THE SINGULAR OF PLURAL NOUNS
+
+    print("The singular of ", word, " is ", p.singular_noun(word))
+
+    # SELECT THE GENDER OF SINGULAR PRONOUNS
+
+    print(p.singular_noun("they"))  # 'it'
+    p.gender("feminine")
+    print(p.singular_noun("they"))  # 'she'
+
+
+    # DEAL WITH "0/1/N" -> "no/1/N" TRANSLATION:
+
+    print("There ", p.plural_verb("was", errors), p.no(" error", errors))
+
+
+    # USE DEFAULT COUNTS:
+
+    print(
+        p.num(N1, ""),
+        p.plural("I"),
+        p.plural_verb(" saw"),
+        p.num(N2),
+        p.plural_noun(" saw"),
+    )
+    print("There ", p.num(errors, ""), p.plural_verb("was"), p.no(" error"))
+
+
+    # COMPARE TWO WORDS "NUMBER-INSENSITIVELY":
+
+    if p.compare(word1, word2):
+        print("same")
+    if p.compare_nouns(word1, word2):
+        print("same noun")
+    if p.compare_verbs(word1, word2):
+        print("same verb")
+    if p.compare_adjs(word1, word2):
+        print("same adj.")
+
+
+    # ADD CORRECT "a" OR "an" FOR A GIVEN WORD:
+
+    print("Did you want ", p.a(thing), " or ", p.an(idea))
+
+
+    # CONVERT NUMERALS INTO ORDINALS (i.e. 1->1st, 2->2nd, 3->3rd, etc.)
+
+    print("It was", p.ordinal(position), " from the left\n")
+
+    # CONVERT NUMERALS TO WORDS (i.e. 1->"one", 101->"one hundred and one", etc.)
+    # RETURNS A SINGLE STRING...
+
+    words = p.number_to_words(1234)
+    # "one thousand, two hundred and thirty-four"
+    words = p.number_to_words(p.ordinal(1234))
+    # "one thousand, two hundred and thirty-fourth"
+
+
+    # GET BACK A LIST OF STRINGS, ONE FOR EACH "CHUNK"...
+
+    words = p.number_to_words(1234, wantlist=True)
+    # ("one thousand","two hundred and thirty-four")
+
+
+    # OPTIONAL PARAMETERS CHANGE TRANSLATION:
+
+    words = p.number_to_words(12345, group=1)
+    # "one, two, three, four, five"
+
+    words = p.number_to_words(12345, group=2)
+    # "twelve, thirty-four, five"
+
+    words = p.number_to_words(12345, group=3)
+    # "one twenty-three, forty-five"
+
+    words = p.number_to_words(1234, andword="")
+    # "one thousand, two hundred thirty-four"
+
+    words = p.number_to_words(1234, andword=", plus")
+    # "one thousand, two hundred, plus thirty-four"
+    # TODO: I get no comma before plus: check perl
+
+    words = p.number_to_words(555_1202, group=1, zero="oh")
+    # "five, five, five, one, two, oh, two"
+
+    words = p.number_to_words(555_1202, group=1, one="unity")
+    # "five, five, five, unity, two, oh, two"
+
+    words = p.number_to_words(123.456, group=1, decimal="mark")
+    # "one two three mark four five six"
+    # TODO: DOCBUG: perl gives commas here as do I
+
+    # LITERAL STYLE ONLY NAMES NUMBERS LESS THAN A CERTAIN THRESHOLD...
+
+    words = p.number_to_words(9, threshold=10)  # "nine"
+    words = p.number_to_words(10, threshold=10)  # "ten"
+    words = p.number_to_words(11, threshold=10)  # "11"
+    words = p.number_to_words(1000, threshold=10)  # "1,000"
+
+    # JOIN WORDS INTO A LIST:
+
+    mylist = p.join(("apple", "banana", "carrot"))
+    # "apple, banana, and carrot"
+
+    mylist = p.join(("apple", "banana"))
+    # "apple and banana"
+
+    mylist = p.join(("apple", "banana", "carrot"), final_sep="")
+    # "apple, banana and carrot"
+
+
+    # REQUIRE "CLASSICAL" PLURALS (EG: "focus"->"foci", "cherub"->"cherubim")
+
+    p.classical()  # USE ALL CLASSICAL PLURALS
+
+    p.classical(all=True)  # USE ALL CLASSICAL PLURALS
+    p.classical(all=False)  # SWITCH OFF CLASSICAL MODE
+
+    p.classical(zero=True)  #  "no error" INSTEAD OF "no errors"
+    p.classical(zero=False)  #  "no errors" INSTEAD OF "no error"
+
+    p.classical(herd=True)  #  "2 buffalo" INSTEAD OF "2 buffalos"
+    p.classical(herd=False)  #  "2 buffalos" INSTEAD OF "2 buffalo"
+
+    p.classical(persons=True)  # "2 chairpersons" INSTEAD OF "2 chairpeople"
+    p.classical(persons=False)  # "2 chairpeople" INSTEAD OF "2 chairpersons"
+
+    p.classical(ancient=True)  # "2 formulae" INSTEAD OF "2 formulas"
+    p.classical(ancient=False)  # "2 formulas" INSTEAD OF "2 formulae"
+
+
+    # INTERPOLATE "plural()", "plural_noun()", "plural_verb()", "plural_adj()", "singular_noun()",
+    # a()", "an()", "num()" AND "ordinal()" WITHIN STRINGS:
+
+    print(p.inflect("The plural of {0} is plural('{0}')".format(word)))
+    print(p.inflect("The singular of {0} is singular_noun('{0}')".format(word)))
+    print(p.inflect("I saw {0} plural('cat',{0})".format(cat_count)))
+    print(
+        p.inflect(
+            "plural('I',{0}) "
+            "plural_verb('saw',{0}) "
+            "plural('a',{1}) "
+            "plural_noun('saw',{1})".format(N1, N2)
+        )
+    )
+    print(
+        p.inflect(
+            "num({0}, False)plural('I') "
+            "plural_verb('saw') "
+            "num({1}, False)plural('a') "
+            "plural_noun('saw')".format(N1, N2)
+        )
+    )
+    print(p.inflect("I saw num({0}) plural('cat')\nnum()".format(cat_count)))
+    print(p.inflect("There plural_verb('was',{0}) no('error',{0})".format(errors)))
+    print(p.inflect("There num({0}, False)plural_verb('was') no('error')".format(errors)))
+    print(p.inflect("Did you want a('{0}') or an('{1}')".format(thing, idea)))
+    print(p.inflect("It was ordinal('{0}') from the left".format(position)))
+
+
+    # ADD USER-DEFINED INFLECTIONS (OVERRIDING INBUILT RULES):
+
+    p.defnoun("VAX", "VAXen")  # SINGULAR => PLURAL
+
+    p.defverb(
+        "will",  # 1ST PERSON SINGULAR
+        "shall",  # 1ST PERSON PLURAL
+        "will",  # 2ND PERSON SINGULAR
+        "will",  # 2ND PERSON PLURAL
+        "will",  # 3RD PERSON SINGULAR
+        "will",  # 3RD PERSON PLURAL
+    )
+
+    p.defadj("hir", "their")  # SINGULAR => PLURAL
+
+    p.defa("h")  # "AY HALWAYS SEZ 'HAITCH'!"
+
+    p.defan("horrendous.*")  # "AN HORRENDOUS AFFECTATION"
+
+
+DESCRIPTION
+===========
+
+The methods of the class ``engine`` in module ``inflect.py`` provide plural
+inflections, singular noun inflections, "a"/"an" selection for English words,
+and manipulation of numbers as words.
+
+Plural forms of all nouns, most verbs, and some adjectives are
+provided. Where appropriate, "classical" variants (for example: "brother" ->
+"brethren", "dogma" -> "dogmata", etc.) are also provided.
+
+Single forms of nouns are also provided. The gender of singular pronouns
+can be chosen (for example "they" -> "it" or "she" or "he" or "they").
+
+Pronunciation-based "a"/"an" selection is provided for all English
+words, and most initialisms.
+
+It is also possible to inflect numerals (1,2,3) to ordinals (1st, 2nd, 3rd)
+and to English words ("one", "two", "three").
+
+In generating these inflections, ``inflect.py`` follows the Oxford
+English Dictionary and the guidelines in Fowler's Modern English
+Usage, preferring the former where the two disagree.
+
+The module is built around standard British spelling, but is designed
+to cope with common American variants as well. Slang, jargon, and
+other English dialects are *not* explicitly catered for.
+
+Where two or more inflected forms exist for a single word (typically a
+"classical" form and a "modern" form), ``inflect.py`` prefers the
+more common form (typically the "modern" one), unless "classical"
+processing has been specified
+(see `MODERN VS CLASSICAL INFLECTIONS`).
+
+FORMING PLURALS AND SINGULARS
+=============================
+
+Inflecting Plurals and Singulars
+--------------------------------
+
+All of the ``plural...`` plural inflection methods take the word to be
+inflected as their first argument and return the corresponding inflection.
+Note that all such methods expect the *singular* form of the word. The
+results of passing a plural form are undefined (and unlikely to be correct).
+Similarly, the ``si...`` singular inflection method expects the *plural*
+form of the word.
+
+The ``plural...`` methods also take an optional second argument,
+which indicates the grammatical "number" of the word (or of another word
+with which the word being inflected must agree). If the "number" argument is
+supplied and is not ``1`` (or ``"one"`` or ``"a"``, or some other adjective that
+implies the singular), the plural form of the word is returned. If the
+"number" argument *does* indicate singularity, the (uninflected) word
+itself is returned. If the number argument is omitted, the plural form
+is returned unconditionally.
+
+The ``si...`` method takes a second argument in a similar fashion. If it is
+some form of the number ``1``, or is omitted, the singular form is returned.
+Otherwise the plural is returned unaltered.
+
+
+The various methods of ``inflect.engine`` are:
+
+
+
+``plural_noun(word, count=None)``
+
+ The method ``plural_noun()`` takes a *singular* English noun or
+ pronoun and returns its plural. Pronouns in the nominative ("I" ->
+ "we") and accusative ("me" -> "us") cases are handled, as are
+ possessive pronouns ("mine" -> "ours").
+
+
+``plural_verb(word, count=None)``
+
+ The method ``plural_verb()`` takes the *singular* form of a
+ conjugated verb (that is, one which is already in the correct "person"
+ and "mood") and returns the corresponding plural conjugation.
+
+
+``plural_adj(word, count=None)``
+
+ The method ``plural_adj()`` takes the *singular* form of
+ certain types of adjectives and returns the corresponding plural form.
+ Adjectives that are correctly handled include: "numerical" adjectives
+ ("a" -> "some"), demonstrative adjectives ("this" -> "these", "that" ->
+ "those"), and possessives ("my" -> "our", "cat's" -> "cats'", "child's"
+ -> "childrens'", etc.)
+
+
+``plural(word, count=None)``
+
+ The method ``plural()`` takes a *singular* English noun,
+ pronoun, verb, or adjective and returns its plural form. Where a word
+ has more than one inflection depending on its part of speech (for
+ example, the noun "thought" inflects to "thoughts", the verb "thought"
+ to "thought"), the (singular) noun sense is preferred to the (singular)
+ verb sense.
+
+ Hence ``plural("knife")`` will return "knives" ("knife" having been treated
+ as a singular noun), whereas ``plural("knifes")`` will return "knife"
+ ("knifes" having been treated as a 3rd person singular verb).
+
+ The inherent ambiguity of such cases suggests that,
+ where the part of speech is known, ``plural_noun``, ``plural_verb``, and
+ ``plural_adj`` should be used in preference to ``plural``.
+
+
+``singular_noun(word, count=None)``
+
+ The method ``singular_noun()`` takes a *plural* English noun or
+ pronoun and returns its singular. Pronouns in the nominative ("we" ->
+ "I") and accusative ("us" -> "me") cases are handled, as are
+ possessive pronouns ("ours" -> "mine"). When third person
+ singular pronouns are returned they take the neuter gender by default
+ ("they" -> "it"), not ("they"-> "she") nor ("they" -> "he"). This can be
+ changed with ``gender()``.
+
+Note that all these methods ignore any whitespace surrounding the
+word being inflected, but preserve that whitespace when the result is
+returned. For example, ``plural(" cat  ")`` returns " cats  ".
+
+
+``gender(genderletter)``
+
+ The third person plural pronoun takes the same form for the female, male and
+ neuter (e.g. "they"). The singular however, depends upon gender (e.g. "she",
+ "he", "it" and "they" -- "they" being the gender neutral form.) By default
+ ``singular_noun`` returns the neuter form, however, the gender can be selected with
+ the ``gender`` method. Pass the first letter of the gender to
+ ``gender`` to return the f(eminine), m(asculine), n(euter) or t(hey)
+ form of the singular. e.g.
+ gender('f') followed by singular_noun('themselves') returns 'herself'.
+
+Numbered plurals
+----------------
+
+The ``plural...`` methods return only the inflected word, not the count that
+was used to inflect it. Thus, in order to produce "I saw 3 ducks", it
+is necessary to use:
+
+.. code-block:: python
+
+    print("I saw", N, p.plural_noun(animal, N))
+
+Since the usual purpose of producing a plural is to make it agree with
+a preceding count, inflect.py provides a method
+(``no(word, count)``) which, given a word and a(n optional) count, returns the
+count followed by the correctly inflected word. Hence the previous
+example can be rewritten:
+
+.. code-block:: python
+
+    print("I saw ", p.no(animal, N))
+
+In addition, if the count is zero (or some other term which implies
+zero, such as ``"zero"``, ``"nil"``, etc.) the count is replaced by the
+word "no". Hence, if ``N`` had the value zero, the previous example
+would print (the somewhat more elegant)::
+
+    I saw no animals
+
+rather than::
+
+    I saw 0 animals
+
+Note that the name of the method is a pun: the method
+returns either a number (a *No.*) or a ``"no"``, in front of the
+inflected word.
+
+
+Reducing the number of counts required
+--------------------------------------
+
+In some contexts, the need to supply an explicit count to the various
+``plural...`` methods makes for tiresome repetition. For example:
+
+.. code-block:: python
+
+    print(
+        plural_adj("This", errors),
+        plural_noun(" error", errors),
+        plural_verb(" was", errors),
+        " fatal.",
+    )
+
+inflect.py therefore provides a method
+(``num(count=None, show=None)``) which may be used to set a persistent "default number"
+value. If such a value is set, it is subsequently used whenever an
+optional second "number" argument is omitted. The default value thus set
+can subsequently be removed by calling ``num()`` with no arguments.
+Hence we could rewrite the previous example:
+
+.. code-block:: python
+
+    p.num(errors)
+    print(p.plural_adj("This"), p.plural_noun(" error"), p.plural_verb(" was"), "fatal.")
+    p.num()
+
+Normally, ``num()`` returns its first argument, so that it may also
+be "inlined" in contexts like:
+
+.. code-block:: python
+
+    print(p.num(errors), p.plural_noun(" error"), p.plural_verb(" was"), " detected.")
+    if severity > 1:
+        print(
+            p.plural_adj("This"), p.plural_noun(" error"), p.plural_verb(" was"), "fatal."
+        )
+
+However, in certain contexts (see `INTERPOLATING INFLECTIONS IN STRINGS`)
+it is preferable that ``num()`` return an empty string. Hence ``num()``
+provides an optional second argument. If that argument is supplied (that is, if
+it is defined) and evaluates to false, ``num`` returns an empty string
+instead of its first argument. For example:
+
+.. code-block:: python
+
+    print(p.num(errors, 0), p.no("error"), p.plural_verb(" was"), " detected.")
+    if severity > 1:
+        print(
+            p.plural_adj("This"), p.plural_noun(" error"), p.plural_verb(" was"), "fatal."
+        )
+
+
+
+Number-insensitive equality
+---------------------------
+
+inflect.py also provides a solution to the problem
+of comparing words of differing plurality through the methods
+``compare(word1, word2)``, ``compare_nouns(word1, word2)``,
+``compare_verbs(word1, word2)``, and ``compare_adjs(word1, word2)``.
+Each  of these methods takes two strings, and  compares them
+using the corresponding plural-inflection method (``plural()``, ``plural_noun()``,
+``plural_verb()``, and ``plural_adj()`` respectively).
+
+The comparison returns true if:
+
+- the strings are equal, or
+- one string is equal to a plural form of the other, or
+- the strings are two different plural forms of the one word.
+
+
+Hence all of the following return true:
+
+.. code-block:: python
+
+    p.compare("index", "index")  # RETURNS "eq"
+    p.compare("index", "indexes")  # RETURNS "s:p"
+    p.compare("index", "indices")  # RETURNS "s:p"
+    p.compare("indexes", "index")  # RETURNS "p:s"
+    p.compare("indices", "index")  # RETURNS "p:s"
+    p.compare("indices", "indexes")  # RETURNS "p:p"
+    p.compare("indexes", "indices")  # RETURNS "p:p"
+    p.compare("indices", "indices")  # RETURNS "eq"
+
+As indicated by the comments in the previous example, the actual value
+returned by the various ``compare`` methods encodes which of the
+three equality rules succeeded: "eq" is returned if the strings were
+identical, "s:p" if the strings were singular and plural respectively,
+"p:s" for plural and singular, and "p:p" for two distinct plurals.
+Inequality is indicated by returning an empty string.
+
+It should be noted that two distinct singular words which happen to take
+the same plural form are *not* considered equal, nor are cases where
+one (singular) word's plural is the other (plural) word's singular.
+Hence all of the following return false:
+
+.. code-block:: python
+
+    p.compare("base", "basis")  # ALTHOUGH BOTH -> "bases"
+    p.compare("syrinx", "syringe")  # ALTHOUGH BOTH -> "syringes"
+    p.compare("she", "he")  # ALTHOUGH BOTH -> "they"
+
+    p.compare("opus", "operas")  # ALTHOUGH "opus" -> "opera" -> "operas"
+    p.compare("taxi", "taxes")  # ALTHOUGH "taxi" -> "taxis" -> "taxes"
+
+Note too that, although the comparison is "number-insensitive" it is *not*
+case-insensitive (that is, ``plural("time","Times")`` returns false. To obtain
+both number and case insensitivity, use the ``lower()`` method on both strings
+(that is, ``plural("time".lower(), "Times".lower())`` returns true).
+
+Related Functionality
+=====================
+
+Shout out to these libraries that provide related functionality:
+
+* `WordSet `_
+  parses identifiers like variable names into sets of words suitable for re-assembling
+  in another form.
+
+* `word2number `_ converts words to
+  a number.
+
+
+For Enterprise
+==============
+
+Available as part of the Tidelift Subscription.
+
+This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use.
+
+`Learn more `_.
diff --git a/setuptools/_vendor/inflect-7.3.1.dist-info/RECORD b/setuptools/_vendor/inflect-7.3.1.dist-info/RECORD
new file mode 100644
index 0000000000..73ff576be5
--- /dev/null
+++ b/setuptools/_vendor/inflect-7.3.1.dist-info/RECORD
@@ -0,0 +1,13 @@
+inflect-7.3.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+inflect-7.3.1.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
+inflect-7.3.1.dist-info/METADATA,sha256=ZgMNY0WAZRs-U8wZiV2SMfjSKqBrMngXyDMs_CAwMwg,21079
+inflect-7.3.1.dist-info/RECORD,,
+inflect-7.3.1.dist-info/WHEEL,sha256=y4mX-SOX4fYIkonsAGA5N0Oy-8_gI4FXw5HNI1xqvWg,91
+inflect-7.3.1.dist-info/top_level.txt,sha256=m52ujdp10CqT6jh1XQxZT6kEntcnv-7Tl7UiGNTzWZA,8
+inflect/__init__.py,sha256=Jxy1HJXZiZ85kHeLAhkmvz6EMTdFqBe-duvt34R6IOc,103796
+inflect/__pycache__/__init__.cpython-312.pyc,,
+inflect/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+inflect/compat/__pycache__/__init__.cpython-312.pyc,,
+inflect/compat/__pycache__/py38.cpython-312.pyc,,
+inflect/compat/py38.py,sha256=oObVfVnWX9_OpnOuEJn1mFbJxVhwyR5epbiTNXDDaso,160
+inflect/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
diff --git a/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/WHEEL b/setuptools/_vendor/inflect-7.3.1.dist-info/WHEEL
similarity index 65%
rename from setuptools/_vendor/jaraco.functools-4.0.0.dist-info/WHEEL
rename to setuptools/_vendor/inflect-7.3.1.dist-info/WHEEL
index ba48cbcf92..564c6724e4 100644
--- a/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/WHEEL
+++ b/setuptools/_vendor/inflect-7.3.1.dist-info/WHEEL
@@ -1,5 +1,5 @@
 Wheel-Version: 1.0
-Generator: bdist_wheel (0.41.3)
+Generator: setuptools (70.2.0)
 Root-Is-Purelib: true
 Tag: py3-none-any
 
diff --git a/setuptools/_vendor/inflect-7.3.1.dist-info/top_level.txt b/setuptools/_vendor/inflect-7.3.1.dist-info/top_level.txt
new file mode 100644
index 0000000000..0fd75fab3e
--- /dev/null
+++ b/setuptools/_vendor/inflect-7.3.1.dist-info/top_level.txt
@@ -0,0 +1 @@
+inflect
diff --git a/setuptools/_vendor/inflect/__init__.py b/setuptools/_vendor/inflect/__init__.py
new file mode 100644
index 0000000000..3eec27f4c6
--- /dev/null
+++ b/setuptools/_vendor/inflect/__init__.py
@@ -0,0 +1,3986 @@
+"""
+inflect: english language inflection
+ - correctly generate plurals, ordinals, indefinite articles
+ - convert numbers to words
+
+Copyright (C) 2010 Paul Dyson
+
+Based upon the Perl module
+`Lingua::EN::Inflect `_.
+
+methods:
+    classical inflect
+    plural plural_noun plural_verb plural_adj singular_noun no num a an
+    compare compare_nouns compare_verbs compare_adjs
+    present_participle
+    ordinal
+    number_to_words
+    join
+    defnoun defverb defadj defa defan
+
+INFLECTIONS:
+    classical inflect
+    plural plural_noun plural_verb plural_adj singular_noun compare
+    no num a an present_participle
+
+PLURALS:
+    classical inflect
+    plural plural_noun plural_verb plural_adj singular_noun no num
+    compare compare_nouns compare_verbs compare_adjs
+
+COMPARISONS:
+    classical
+    compare compare_nouns compare_verbs compare_adjs
+
+ARTICLES:
+    classical inflect num a an
+
+NUMERICAL:
+    ordinal number_to_words
+
+USER_DEFINED:
+    defnoun defverb defadj defa defan
+
+Exceptions:
+ UnknownClassicalModeError
+ BadNumValueError
+ BadChunkingOptionError
+ NumOutOfRangeError
+ BadUserDefinedPatternError
+ BadRcFileError
+ BadGenderError
+
+"""
+
+from __future__ import annotations
+
+import ast
+import collections
+import contextlib
+import functools
+import itertools
+import re
+from numbers import Number
+from typing import (
+    TYPE_CHECKING,
+    Any,
+    Callable,
+    Dict,
+    Iterable,
+    List,
+    Literal,
+    Match,
+    Optional,
+    Sequence,
+    Tuple,
+    Union,
+    cast,
+)
+
+from more_itertools import windowed_complete
+from typeguard import typechecked
+
+from .compat.py38 import Annotated
+
+
+class UnknownClassicalModeError(Exception):
+    pass
+
+
+class BadNumValueError(Exception):
+    pass
+
+
+class BadChunkingOptionError(Exception):
+    pass
+
+
+class NumOutOfRangeError(Exception):
+    pass
+
+
+class BadUserDefinedPatternError(Exception):
+    pass
+
+
+class BadRcFileError(Exception):
+    pass
+
+
+class BadGenderError(Exception):
+    pass
+
+
+def enclose(s: str) -> str:
+    return f"(?:{s})"
+
+
+def joinstem(cutpoint: Optional[int] = 0, words: Optional[Iterable[str]] = None) -> str:
+    """
+    Join stem of each word in words into a string for regex.
+
+    Each word is truncated at cutpoint.
+
+    Cutpoint is usually negative indicating the number of letters to remove
+    from the end of each word.
+
+    >>> joinstem(-2, ["ephemeris", "iris", ".*itis"])
+    '(?:ephemer|ir|.*it)'
+
+    >>> joinstem(None, ["ephemeris"])
+    '(?:ephemeris)'
+
+    >>> joinstem(5, None)
+    '(?:)'
+    """
+    return enclose("|".join(w[:cutpoint] for w in words or []))
+
+
+def bysize(words: Iterable[str]) -> Dict[int, set]:
+    """
+    From a list of words, return a dict of sets sorted by word length.
+
+    >>> words = ['ant', 'cat', 'dog', 'pig', 'frog', 'goat', 'horse', 'elephant']
+    >>> ret = bysize(words)
+    >>> sorted(ret[3])
+    ['ant', 'cat', 'dog', 'pig']
+    >>> ret[5]
+    {'horse'}
+    """
+    res: Dict[int, set] = collections.defaultdict(set)
+    for w in words:
+        res[len(w)].add(w)
+    return res
+
+
+def make_pl_si_lists(
+    lst: Iterable[str],
+    plending: str,
+    siendingsize: Optional[int],
+    dojoinstem: bool = True,
+):
+    """
+    given a list of singular words: lst
+
+    an ending to append to make the plural: plending
+
+    the number of characters to remove from the singular
+    before appending plending: siendingsize
+
+    a flag whether to create a joinstem: dojoinstem
+
+    return:
+    a list of pluralised words: si_list (called si because this is what you need to
+    look for to make the singular)
+
+    the pluralised words as a dict of sets sorted by word length: si_bysize
+    the singular words as a dict of sets sorted by word length: pl_bysize
+    if dojoinstem is True: a regular expression that matches any of the stems: stem
+    """
+    if siendingsize is not None:
+        siendingsize = -siendingsize
+    si_list = [w[:siendingsize] + plending for w in lst]
+    pl_bysize = bysize(lst)
+    si_bysize = bysize(si_list)
+    if dojoinstem:
+        stem = joinstem(siendingsize, lst)
+        return si_list, si_bysize, pl_bysize, stem
+    else:
+        return si_list, si_bysize, pl_bysize
+
+
+# 1. PLURALS
+
+pl_sb_irregular_s = {
+    "corpus": "corpuses|corpora",
+    "opus": "opuses|opera",
+    "genus": "genera",
+    "mythos": "mythoi",
+    "penis": "penises|penes",
+    "testis": "testes",
+    "atlas": "atlases|atlantes",
+    "yes": "yeses",
+}
+
+pl_sb_irregular = {
+    "child": "children",
+    "chili": "chilis|chilies",
+    "brother": "brothers|brethren",
+    "infinity": "infinities|infinity",
+    "loaf": "loaves",
+    "lore": "lores|lore",
+    "hoof": "hoofs|hooves",
+    "beef": "beefs|beeves",
+    "thief": "thiefs|thieves",
+    "money": "monies",
+    "mongoose": "mongooses",
+    "ox": "oxen",
+    "cow": "cows|kine",
+    "graffito": "graffiti",
+    "octopus": "octopuses|octopodes",
+    "genie": "genies|genii",
+    "ganglion": "ganglions|ganglia",
+    "trilby": "trilbys",
+    "turf": "turfs|turves",
+    "numen": "numina",
+    "atman": "atmas",
+    "occiput": "occiputs|occipita",
+    "sabretooth": "sabretooths",
+    "sabertooth": "sabertooths",
+    "lowlife": "lowlifes",
+    "flatfoot": "flatfoots",
+    "tenderfoot": "tenderfoots",
+    "romany": "romanies",
+    "jerry": "jerries",
+    "mary": "maries",
+    "talouse": "talouses",
+    "rom": "roma",
+    "carmen": "carmina",
+}
+
+pl_sb_irregular.update(pl_sb_irregular_s)
+# pl_sb_irregular_keys = enclose('|'.join(pl_sb_irregular.keys()))
+
+pl_sb_irregular_caps = {
+    "Romany": "Romanies",
+    "Jerry": "Jerrys",
+    "Mary": "Marys",
+    "Rom": "Roma",
+}
+
+pl_sb_irregular_compound = {"prima donna": "prima donnas|prime donne"}
+
+si_sb_irregular = {v: k for (k, v) in pl_sb_irregular.items()}
+for k in list(si_sb_irregular):
+    if "|" in k:
+        k1, k2 = k.split("|")
+        si_sb_irregular[k1] = si_sb_irregular[k2] = si_sb_irregular[k]
+        del si_sb_irregular[k]
+si_sb_irregular_caps = {v: k for (k, v) in pl_sb_irregular_caps.items()}
+si_sb_irregular_compound = {v: k for (k, v) in pl_sb_irregular_compound.items()}
+for k in list(si_sb_irregular_compound):
+    if "|" in k:
+        k1, k2 = k.split("|")
+        si_sb_irregular_compound[k1] = si_sb_irregular_compound[k2] = (
+            si_sb_irregular_compound[k]
+        )
+        del si_sb_irregular_compound[k]
+
+# si_sb_irregular_keys = enclose('|'.join(si_sb_irregular.keys()))
+
+# Z's that don't double
+
+pl_sb_z_zes_list = ("quartz", "topaz")
+pl_sb_z_zes_bysize = bysize(pl_sb_z_zes_list)
+
+pl_sb_ze_zes_list = ("snooze",)
+pl_sb_ze_zes_bysize = bysize(pl_sb_ze_zes_list)
+
+
+# CLASSICAL "..is" -> "..ides"
+
+pl_sb_C_is_ides_complete = [
+    # GENERAL WORDS...
+    "ephemeris",
+    "iris",
+    "clitoris",
+    "chrysalis",
+    "epididymis",
+]
+
+pl_sb_C_is_ides_endings = [
+    # INFLAMATIONS...
+    "itis"
+]
+
+pl_sb_C_is_ides = joinstem(
+    -2, pl_sb_C_is_ides_complete + [f".*{w}" for w in pl_sb_C_is_ides_endings]
+)
+
+pl_sb_C_is_ides_list = pl_sb_C_is_ides_complete + pl_sb_C_is_ides_endings
+
+(
+    si_sb_C_is_ides_list,
+    si_sb_C_is_ides_bysize,
+    pl_sb_C_is_ides_bysize,
+) = make_pl_si_lists(pl_sb_C_is_ides_list, "ides", 2, dojoinstem=False)
+
+
+# CLASSICAL "..a" -> "..ata"
+
+pl_sb_C_a_ata_list = (
+    "anathema",
+    "bema",
+    "carcinoma",
+    "charisma",
+    "diploma",
+    "dogma",
+    "drama",
+    "edema",
+    "enema",
+    "enigma",
+    "lemma",
+    "lymphoma",
+    "magma",
+    "melisma",
+    "miasma",
+    "oedema",
+    "sarcoma",
+    "schema",
+    "soma",
+    "stigma",
+    "stoma",
+    "trauma",
+    "gumma",
+    "pragma",
+)
+
+(
+    si_sb_C_a_ata_list,
+    si_sb_C_a_ata_bysize,
+    pl_sb_C_a_ata_bysize,
+    pl_sb_C_a_ata,
+) = make_pl_si_lists(pl_sb_C_a_ata_list, "ata", 1)
+
+# UNCONDITIONAL "..a" -> "..ae"
+
+pl_sb_U_a_ae_list = (
+    "alumna",
+    "alga",
+    "vertebra",
+    "persona",
+    "vita",
+)
+(
+    si_sb_U_a_ae_list,
+    si_sb_U_a_ae_bysize,
+    pl_sb_U_a_ae_bysize,
+    pl_sb_U_a_ae,
+) = make_pl_si_lists(pl_sb_U_a_ae_list, "e", None)
+
+# CLASSICAL "..a" -> "..ae"
+
+pl_sb_C_a_ae_list = (
+    "amoeba",
+    "antenna",
+    "formula",
+    "hyperbola",
+    "medusa",
+    "nebula",
+    "parabola",
+    "abscissa",
+    "hydra",
+    "nova",
+    "lacuna",
+    "aurora",
+    "umbra",
+    "flora",
+    "fauna",
+)
+(
+    si_sb_C_a_ae_list,
+    si_sb_C_a_ae_bysize,
+    pl_sb_C_a_ae_bysize,
+    pl_sb_C_a_ae,
+) = make_pl_si_lists(pl_sb_C_a_ae_list, "e", None)
+
+
+# CLASSICAL "..en" -> "..ina"
+
+pl_sb_C_en_ina_list = ("stamen", "foramen", "lumen")
+
+(
+    si_sb_C_en_ina_list,
+    si_sb_C_en_ina_bysize,
+    pl_sb_C_en_ina_bysize,
+    pl_sb_C_en_ina,
+) = make_pl_si_lists(pl_sb_C_en_ina_list, "ina", 2)
+
+
+# UNCONDITIONAL "..um" -> "..a"
+
+pl_sb_U_um_a_list = (
+    "bacterium",
+    "agendum",
+    "desideratum",
+    "erratum",
+    "stratum",
+    "datum",
+    "ovum",
+    "extremum",
+    "candelabrum",
+)
+(
+    si_sb_U_um_a_list,
+    si_sb_U_um_a_bysize,
+    pl_sb_U_um_a_bysize,
+    pl_sb_U_um_a,
+) = make_pl_si_lists(pl_sb_U_um_a_list, "a", 2)
+
+# CLASSICAL "..um" -> "..a"
+
+pl_sb_C_um_a_list = (
+    "maximum",
+    "minimum",
+    "momentum",
+    "optimum",
+    "quantum",
+    "cranium",
+    "curriculum",
+    "dictum",
+    "phylum",
+    "aquarium",
+    "compendium",
+    "emporium",
+    "encomium",
+    "gymnasium",
+    "honorarium",
+    "interregnum",
+    "lustrum",
+    "memorandum",
+    "millennium",
+    "rostrum",
+    "spectrum",
+    "speculum",
+    "stadium",
+    "trapezium",
+    "ultimatum",
+    "medium",
+    "vacuum",
+    "velum",
+    "consortium",
+    "arboretum",
+)
+
+(
+    si_sb_C_um_a_list,
+    si_sb_C_um_a_bysize,
+    pl_sb_C_um_a_bysize,
+    pl_sb_C_um_a,
+) = make_pl_si_lists(pl_sb_C_um_a_list, "a", 2)
+
+
+# UNCONDITIONAL "..us" -> "i"
+
+pl_sb_U_us_i_list = (
+    "alumnus",
+    "alveolus",
+    "bacillus",
+    "bronchus",
+    "locus",
+    "nucleus",
+    "stimulus",
+    "meniscus",
+    "sarcophagus",
+)
+(
+    si_sb_U_us_i_list,
+    si_sb_U_us_i_bysize,
+    pl_sb_U_us_i_bysize,
+    pl_sb_U_us_i,
+) = make_pl_si_lists(pl_sb_U_us_i_list, "i", 2)
+
+# CLASSICAL "..us" -> "..i"
+
+pl_sb_C_us_i_list = (
+    "focus",
+    "radius",
+    "genius",
+    "incubus",
+    "succubus",
+    "nimbus",
+    "fungus",
+    "nucleolus",
+    "stylus",
+    "torus",
+    "umbilicus",
+    "uterus",
+    "hippopotamus",
+    "cactus",
+)
+
+(
+    si_sb_C_us_i_list,
+    si_sb_C_us_i_bysize,
+    pl_sb_C_us_i_bysize,
+    pl_sb_C_us_i,
+) = make_pl_si_lists(pl_sb_C_us_i_list, "i", 2)
+
+
+# CLASSICAL "..us" -> "..us"  (ASSIMILATED 4TH DECLENSION LATIN NOUNS)
+
+pl_sb_C_us_us = (
+    "status",
+    "apparatus",
+    "prospectus",
+    "sinus",
+    "hiatus",
+    "impetus",
+    "plexus",
+)
+pl_sb_C_us_us_bysize = bysize(pl_sb_C_us_us)
+
+# UNCONDITIONAL "..on" -> "a"
+
+pl_sb_U_on_a_list = (
+    "criterion",
+    "perihelion",
+    "aphelion",
+    "phenomenon",
+    "prolegomenon",
+    "noumenon",
+    "organon",
+    "asyndeton",
+    "hyperbaton",
+)
+(
+    si_sb_U_on_a_list,
+    si_sb_U_on_a_bysize,
+    pl_sb_U_on_a_bysize,
+    pl_sb_U_on_a,
+) = make_pl_si_lists(pl_sb_U_on_a_list, "a", 2)
+
+# CLASSICAL "..on" -> "..a"
+
+pl_sb_C_on_a_list = ("oxymoron",)
+
+(
+    si_sb_C_on_a_list,
+    si_sb_C_on_a_bysize,
+    pl_sb_C_on_a_bysize,
+    pl_sb_C_on_a,
+) = make_pl_si_lists(pl_sb_C_on_a_list, "a", 2)
+
+
+# CLASSICAL "..o" -> "..i"  (BUT NORMALLY -> "..os")
+
+pl_sb_C_o_i = [
+    "solo",
+    "soprano",
+    "basso",
+    "alto",
+    "contralto",
+    "tempo",
+    "piano",
+    "virtuoso",
+]  # list not tuple so can concat for pl_sb_U_o_os
+
+pl_sb_C_o_i_bysize = bysize(pl_sb_C_o_i)
+si_sb_C_o_i_bysize = bysize([f"{w[:-1]}i" for w in pl_sb_C_o_i])
+
+pl_sb_C_o_i_stems = joinstem(-1, pl_sb_C_o_i)
+
+# ALWAYS "..o" -> "..os"
+
+pl_sb_U_o_os_complete = {"ado", "ISO", "NATO", "NCO", "NGO", "oto"}
+si_sb_U_o_os_complete = {f"{w}s" for w in pl_sb_U_o_os_complete}
+
+
+pl_sb_U_o_os_endings = [
+    "aficionado",
+    "aggro",
+    "albino",
+    "allegro",
+    "ammo",
+    "Antananarivo",
+    "archipelago",
+    "armadillo",
+    "auto",
+    "avocado",
+    "Bamako",
+    "Barquisimeto",
+    "bimbo",
+    "bingo",
+    "Biro",
+    "bolero",
+    "Bolzano",
+    "bongo",
+    "Boto",
+    "burro",
+    "Cairo",
+    "canto",
+    "cappuccino",
+    "casino",
+    "cello",
+    "Chicago",
+    "Chimango",
+    "cilantro",
+    "cochito",
+    "coco",
+    "Colombo",
+    "Colorado",
+    "commando",
+    "concertino",
+    "contango",
+    "credo",
+    "crescendo",
+    "cyano",
+    "demo",
+    "ditto",
+    "Draco",
+    "dynamo",
+    "embryo",
+    "Esperanto",
+    "espresso",
+    "euro",
+    "falsetto",
+    "Faro",
+    "fiasco",
+    "Filipino",
+    "flamenco",
+    "furioso",
+    "generalissimo",
+    "Gestapo",
+    "ghetto",
+    "gigolo",
+    "gizmo",
+    "Greensboro",
+    "gringo",
+    "Guaiabero",
+    "guano",
+    "gumbo",
+    "gyro",
+    "hairdo",
+    "hippo",
+    "Idaho",
+    "impetigo",
+    "inferno",
+    "info",
+    "intermezzo",
+    "intertrigo",
+    "Iquico",
+    "jumbo",
+    "junto",
+    "Kakapo",
+    "kilo",
+    "Kinkimavo",
+    "Kokako",
+    "Kosovo",
+    "Lesotho",
+    "libero",
+    "libido",
+    "libretto",
+    "lido",
+    "Lilo",
+    "limbo",
+    "limo",
+    "lineno",
+    "lingo",
+    "lino",
+    "livedo",
+    "loco",
+    "logo",
+    "lumbago",
+    "macho",
+    "macro",
+    "mafioso",
+    "magneto",
+    "magnifico",
+    "Majuro",
+    "Malabo",
+    "manifesto",
+    "Maputo",
+    "Maracaibo",
+    "medico",
+    "memo",
+    "metro",
+    "Mexico",
+    "micro",
+    "Milano",
+    "Monaco",
+    "mono",
+    "Montenegro",
+    "Morocco",
+    "Muqdisho",
+    "myo",
+    "neutrino",
+    "Ningbo",
+    "octavo",
+    "oregano",
+    "Orinoco",
+    "Orlando",
+    "Oslo",
+    "panto",
+    "Paramaribo",
+    "Pardusco",
+    "pedalo",
+    "photo",
+    "pimento",
+    "pinto",
+    "pleco",
+    "Pluto",
+    "pogo",
+    "polo",
+    "poncho",
+    "Porto-Novo",
+    "Porto",
+    "pro",
+    "psycho",
+    "pueblo",
+    "quarto",
+    "Quito",
+    "repo",
+    "rhino",
+    "risotto",
+    "rococo",
+    "rondo",
+    "Sacramento",
+    "saddo",
+    "sago",
+    "salvo",
+    "Santiago",
+    "Sapporo",
+    "Sarajevo",
+    "scherzando",
+    "scherzo",
+    "silo",
+    "sirocco",
+    "sombrero",
+    "staccato",
+    "sterno",
+    "stucco",
+    "stylo",
+    "sumo",
+    "Taiko",
+    "techno",
+    "terrazzo",
+    "testudo",
+    "timpano",
+    "tiro",
+    "tobacco",
+    "Togo",
+    "Tokyo",
+    "torero",
+    "Torino",
+    "Toronto",
+    "torso",
+    "tremolo",
+    "typo",
+    "tyro",
+    "ufo",
+    "UNESCO",
+    "vaquero",
+    "vermicello",
+    "verso",
+    "vibrato",
+    "violoncello",
+    "Virgo",
+    "weirdo",
+    "WHO",
+    "WTO",
+    "Yamoussoukro",
+    "yo-yo",
+    "zero",
+    "Zibo",
+] + pl_sb_C_o_i
+
+pl_sb_U_o_os_bysize = bysize(pl_sb_U_o_os_endings)
+si_sb_U_o_os_bysize = bysize([f"{w}s" for w in pl_sb_U_o_os_endings])
+
+
+# UNCONDITIONAL "..ch" -> "..chs"
+
+pl_sb_U_ch_chs_list = ("czech", "eunuch", "stomach")
+
+(
+    si_sb_U_ch_chs_list,
+    si_sb_U_ch_chs_bysize,
+    pl_sb_U_ch_chs_bysize,
+    pl_sb_U_ch_chs,
+) = make_pl_si_lists(pl_sb_U_ch_chs_list, "s", None)
+
+
+# UNCONDITIONAL "..[ei]x" -> "..ices"
+
+pl_sb_U_ex_ices_list = ("codex", "murex", "silex")
+(
+    si_sb_U_ex_ices_list,
+    si_sb_U_ex_ices_bysize,
+    pl_sb_U_ex_ices_bysize,
+    pl_sb_U_ex_ices,
+) = make_pl_si_lists(pl_sb_U_ex_ices_list, "ices", 2)
+
+pl_sb_U_ix_ices_list = ("radix", "helix")
+(
+    si_sb_U_ix_ices_list,
+    si_sb_U_ix_ices_bysize,
+    pl_sb_U_ix_ices_bysize,
+    pl_sb_U_ix_ices,
+) = make_pl_si_lists(pl_sb_U_ix_ices_list, "ices", 2)
+
+# CLASSICAL "..[ei]x" -> "..ices"
+
+pl_sb_C_ex_ices_list = (
+    "vortex",
+    "vertex",
+    "cortex",
+    "latex",
+    "pontifex",
+    "apex",
+    "index",
+    "simplex",
+)
+
+(
+    si_sb_C_ex_ices_list,
+    si_sb_C_ex_ices_bysize,
+    pl_sb_C_ex_ices_bysize,
+    pl_sb_C_ex_ices,
+) = make_pl_si_lists(pl_sb_C_ex_ices_list, "ices", 2)
+
+
+pl_sb_C_ix_ices_list = ("appendix",)
+
+(
+    si_sb_C_ix_ices_list,
+    si_sb_C_ix_ices_bysize,
+    pl_sb_C_ix_ices_bysize,
+    pl_sb_C_ix_ices,
+) = make_pl_si_lists(pl_sb_C_ix_ices_list, "ices", 2)
+
+
+# ARABIC: ".." -> "..i"
+
+pl_sb_C_i_list = ("afrit", "afreet", "efreet")
+
+(si_sb_C_i_list, si_sb_C_i_bysize, pl_sb_C_i_bysize, pl_sb_C_i) = make_pl_si_lists(
+    pl_sb_C_i_list, "i", None
+)
+
+
+# HEBREW: ".." -> "..im"
+
+pl_sb_C_im_list = ("goy", "seraph", "cherub")
+
+(si_sb_C_im_list, si_sb_C_im_bysize, pl_sb_C_im_bysize, pl_sb_C_im) = make_pl_si_lists(
+    pl_sb_C_im_list, "im", None
+)
+
+
+# UNCONDITIONAL "..man" -> "..mans"
+
+pl_sb_U_man_mans_list = """
+    ataman caiman cayman ceriman
+    desman dolman farman harman hetman
+    human leman ottoman shaman talisman
+""".split()
+pl_sb_U_man_mans_caps_list = """
+    Alabaman Bahaman Burman German
+    Hiroshiman Liman Nakayaman Norman Oklahoman
+    Panaman Roman Selman Sonaman Tacoman Yakiman
+    Yokohaman Yuman
+""".split()
+
+(
+    si_sb_U_man_mans_list,
+    si_sb_U_man_mans_bysize,
+    pl_sb_U_man_mans_bysize,
+) = make_pl_si_lists(pl_sb_U_man_mans_list, "s", None, dojoinstem=False)
+(
+    si_sb_U_man_mans_caps_list,
+    si_sb_U_man_mans_caps_bysize,
+    pl_sb_U_man_mans_caps_bysize,
+) = make_pl_si_lists(pl_sb_U_man_mans_caps_list, "s", None, dojoinstem=False)
+
+# UNCONDITIONAL "..louse" -> "..lice"
+pl_sb_U_louse_lice_list = ("booklouse", "grapelouse", "louse", "woodlouse")
+
+(
+    si_sb_U_louse_lice_list,
+    si_sb_U_louse_lice_bysize,
+    pl_sb_U_louse_lice_bysize,
+) = make_pl_si_lists(pl_sb_U_louse_lice_list, "lice", 5, dojoinstem=False)
+
+pl_sb_uninflected_s_complete = [
+    # PAIRS OR GROUPS SUBSUMED TO A SINGULAR...
+    "breeches",
+    "britches",
+    "pajamas",
+    "pyjamas",
+    "clippers",
+    "gallows",
+    "hijinks",
+    "headquarters",
+    "pliers",
+    "scissors",
+    "testes",
+    "herpes",
+    "pincers",
+    "shears",
+    "proceedings",
+    "trousers",
+    # UNASSIMILATED LATIN 4th DECLENSION
+    "cantus",
+    "coitus",
+    "nexus",
+    # RECENT IMPORTS...
+    "contretemps",
+    "corps",
+    "debris",
+    "siemens",
+    # DISEASES
+    "mumps",
+    # MISCELLANEOUS OTHERS...
+    "diabetes",
+    "jackanapes",
+    "series",
+    "species",
+    "subspecies",
+    "rabies",
+    "chassis",
+    "innings",
+    "news",
+    "mews",
+    "haggis",
+]
+
+pl_sb_uninflected_s_endings = [
+    # RECENT IMPORTS...
+    "ois",
+    # DISEASES
+    "measles",
+]
+
+pl_sb_uninflected_s = pl_sb_uninflected_s_complete + [
+    f".*{w}" for w in pl_sb_uninflected_s_endings
+]
+
+pl_sb_uninflected_herd = (
+    # DON'T INFLECT IN CLASSICAL MODE, OTHERWISE NORMAL INFLECTION
+    "wildebeest",
+    "swine",
+    "eland",
+    "bison",
+    "buffalo",
+    "cattle",
+    "elk",
+    "rhinoceros",
+    "zucchini",
+    "caribou",
+    "dace",
+    "grouse",
+    "guinea fowl",
+    "guinea-fowl",
+    "haddock",
+    "hake",
+    "halibut",
+    "herring",
+    "mackerel",
+    "pickerel",
+    "pike",
+    "roe",
+    "seed",
+    "shad",
+    "snipe",
+    "teal",
+    "turbot",
+    "water fowl",
+    "water-fowl",
+)
+
+pl_sb_uninflected_complete = [
+    # SOME FISH AND HERD ANIMALS
+    "tuna",
+    "salmon",
+    "mackerel",
+    "trout",
+    "bream",
+    "sea-bass",
+    "sea bass",
+    "carp",
+    "cod",
+    "flounder",
+    "whiting",
+    "moose",
+    # OTHER ODDITIES
+    "graffiti",
+    "djinn",
+    "samuri",
+    "offspring",
+    "pence",
+    "quid",
+    "hertz",
+] + pl_sb_uninflected_s_complete
+# SOME WORDS ENDING IN ...s (OFTEN PAIRS TAKEN AS A WHOLE)
+
+pl_sb_uninflected_caps = [
+    # ALL NATIONALS ENDING IN -ese
+    "Portuguese",
+    "Amoyese",
+    "Borghese",
+    "Congoese",
+    "Faroese",
+    "Foochowese",
+    "Genevese",
+    "Genoese",
+    "Gilbertese",
+    "Hottentotese",
+    "Kiplingese",
+    "Kongoese",
+    "Lucchese",
+    "Maltese",
+    "Nankingese",
+    "Niasese",
+    "Pekingese",
+    "Piedmontese",
+    "Pistoiese",
+    "Sarawakese",
+    "Shavese",
+    "Vermontese",
+    "Wenchowese",
+    "Yengeese",
+]
+
+
+pl_sb_uninflected_endings = [
+    # UNCOUNTABLE NOUNS
+    "butter",
+    "cash",
+    "furniture",
+    "information",
+    # SOME FISH AND HERD ANIMALS
+    "fish",
+    "deer",
+    "sheep",
+    # ALL NATIONALS ENDING IN -ese
+    "nese",
+    "rese",
+    "lese",
+    "mese",
+    # DISEASES
+    "pox",
+    # OTHER ODDITIES
+    "craft",
+] + pl_sb_uninflected_s_endings
+# SOME WORDS ENDING IN ...s (OFTEN PAIRS TAKEN AS A WHOLE)
+
+
+pl_sb_uninflected_bysize = bysize(pl_sb_uninflected_endings)
+
+
+# SINGULAR WORDS ENDING IN ...s (ALL INFLECT WITH ...es)
+
+pl_sb_singular_s_complete = [
+    "acropolis",
+    "aegis",
+    "alias",
+    "asbestos",
+    "bathos",
+    "bias",
+    "bronchitis",
+    "bursitis",
+    "caddis",
+    "cannabis",
+    "canvas",
+    "chaos",
+    "cosmos",
+    "dais",
+    "digitalis",
+    "epidermis",
+    "ethos",
+    "eyas",
+    "gas",
+    "glottis",
+    "hubris",
+    "ibis",
+    "lens",
+    "mantis",
+    "marquis",
+    "metropolis",
+    "pathos",
+    "pelvis",
+    "polis",
+    "rhinoceros",
+    "sassafras",
+    "trellis",
+] + pl_sb_C_is_ides_complete
+
+
+pl_sb_singular_s_endings = ["ss", "us"] + pl_sb_C_is_ides_endings
+
+pl_sb_singular_s_bysize = bysize(pl_sb_singular_s_endings)
+
+si_sb_singular_s_complete = [f"{w}es" for w in pl_sb_singular_s_complete]
+si_sb_singular_s_endings = [f"{w}es" for w in pl_sb_singular_s_endings]
+si_sb_singular_s_bysize = bysize(si_sb_singular_s_endings)
+
+pl_sb_singular_s_es = ["[A-Z].*es"]
+
+pl_sb_singular_s = enclose(
+    "|".join(
+        pl_sb_singular_s_complete
+        + [f".*{w}" for w in pl_sb_singular_s_endings]
+        + pl_sb_singular_s_es
+    )
+)
+
+
+# PLURALS ENDING IN uses -> use
+
+
+si_sb_ois_oi_case = ("Bolshois", "Hanois")
+
+si_sb_uses_use_case = ("Betelgeuses", "Duses", "Meuses", "Syracuses", "Toulouses")
+
+si_sb_uses_use = (
+    "abuses",
+    "applauses",
+    "blouses",
+    "carouses",
+    "causes",
+    "chartreuses",
+    "clauses",
+    "contuses",
+    "douses",
+    "excuses",
+    "fuses",
+    "grouses",
+    "hypotenuses",
+    "masseuses",
+    "menopauses",
+    "misuses",
+    "muses",
+    "overuses",
+    "pauses",
+    "peruses",
+    "profuses",
+    "recluses",
+    "reuses",
+    "ruses",
+    "souses",
+    "spouses",
+    "suffuses",
+    "transfuses",
+    "uses",
+)
+
+si_sb_ies_ie_case = (
+    "Addies",
+    "Aggies",
+    "Allies",
+    "Amies",
+    "Angies",
+    "Annies",
+    "Annmaries",
+    "Archies",
+    "Arties",
+    "Aussies",
+    "Barbies",
+    "Barries",
+    "Basies",
+    "Bennies",
+    "Bernies",
+    "Berties",
+    "Bessies",
+    "Betties",
+    "Billies",
+    "Blondies",
+    "Bobbies",
+    "Bonnies",
+    "Bowies",
+    "Brandies",
+    "Bries",
+    "Brownies",
+    "Callies",
+    "Carnegies",
+    "Carries",
+    "Cassies",
+    "Charlies",
+    "Cheries",
+    "Christies",
+    "Connies",
+    "Curies",
+    "Dannies",
+    "Debbies",
+    "Dixies",
+    "Dollies",
+    "Donnies",
+    "Drambuies",
+    "Eddies",
+    "Effies",
+    "Ellies",
+    "Elsies",
+    "Eries",
+    "Ernies",
+    "Essies",
+    "Eugenies",
+    "Fannies",
+    "Flossies",
+    "Frankies",
+    "Freddies",
+    "Gillespies",
+    "Goldies",
+    "Gracies",
+    "Guthries",
+    "Hallies",
+    "Hatties",
+    "Hetties",
+    "Hollies",
+    "Jackies",
+    "Jamies",
+    "Janies",
+    "Jannies",
+    "Jeanies",
+    "Jeannies",
+    "Jennies",
+    "Jessies",
+    "Jimmies",
+    "Jodies",
+    "Johnies",
+    "Johnnies",
+    "Josies",
+    "Julies",
+    "Kalgoorlies",
+    "Kathies",
+    "Katies",
+    "Kellies",
+    "Kewpies",
+    "Kristies",
+    "Laramies",
+    "Lassies",
+    "Lauries",
+    "Leslies",
+    "Lessies",
+    "Lillies",
+    "Lizzies",
+    "Lonnies",
+    "Lories",
+    "Lorries",
+    "Lotties",
+    "Louies",
+    "Mackenzies",
+    "Maggies",
+    "Maisies",
+    "Mamies",
+    "Marcies",
+    "Margies",
+    "Maries",
+    "Marjories",
+    "Matties",
+    "McKenzies",
+    "Melanies",
+    "Mickies",
+    "Millies",
+    "Minnies",
+    "Mollies",
+    "Mounties",
+    "Nannies",
+    "Natalies",
+    "Nellies",
+    "Netties",
+    "Ollies",
+    "Ozzies",
+    "Pearlies",
+    "Pottawatomies",
+    "Reggies",
+    "Richies",
+    "Rickies",
+    "Robbies",
+    "Ronnies",
+    "Rosalies",
+    "Rosemaries",
+    "Rosies",
+    "Roxies",
+    "Rushdies",
+    "Ruthies",
+    "Sadies",
+    "Sallies",
+    "Sammies",
+    "Scotties",
+    "Selassies",
+    "Sherries",
+    "Sophies",
+    "Stacies",
+    "Stefanies",
+    "Stephanies",
+    "Stevies",
+    "Susies",
+    "Sylvies",
+    "Tammies",
+    "Terries",
+    "Tessies",
+    "Tommies",
+    "Tracies",
+    "Trekkies",
+    "Valaries",
+    "Valeries",
+    "Valkyries",
+    "Vickies",
+    "Virgies",
+    "Willies",
+    "Winnies",
+    "Wylies",
+    "Yorkies",
+)
+
+si_sb_ies_ie = (
+    "aeries",
+    "baggies",
+    "belies",
+    "biggies",
+    "birdies",
+    "bogies",
+    "bonnies",
+    "boogies",
+    "bookies",
+    "bourgeoisies",
+    "brownies",
+    "budgies",
+    "caddies",
+    "calories",
+    "camaraderies",
+    "cockamamies",
+    "collies",
+    "cookies",
+    "coolies",
+    "cooties",
+    "coteries",
+    "crappies",
+    "curies",
+    "cutesies",
+    "dogies",
+    "eyries",
+    "floozies",
+    "footsies",
+    "freebies",
+    "genies",
+    "goalies",
+    "groupies",
+    "hies",
+    "jalousies",
+    "junkies",
+    "kiddies",
+    "laddies",
+    "lassies",
+    "lies",
+    "lingeries",
+    "magpies",
+    "menageries",
+    "mommies",
+    "movies",
+    "neckties",
+    "newbies",
+    "nighties",
+    "oldies",
+    "organdies",
+    "overlies",
+    "pies",
+    "pinkies",
+    "pixies",
+    "potpies",
+    "prairies",
+    "quickies",
+    "reveries",
+    "rookies",
+    "rotisseries",
+    "softies",
+    "sorties",
+    "species",
+    "stymies",
+    "sweeties",
+    "ties",
+    "underlies",
+    "unties",
+    "veggies",
+    "vies",
+    "yuppies",
+    "zombies",
+)
+
+
+si_sb_oes_oe_case = (
+    "Chloes",
+    "Crusoes",
+    "Defoes",
+    "Faeroes",
+    "Ivanhoes",
+    "Joes",
+    "McEnroes",
+    "Moes",
+    "Monroes",
+    "Noes",
+    "Poes",
+    "Roscoes",
+    "Tahoes",
+    "Tippecanoes",
+    "Zoes",
+)
+
+si_sb_oes_oe = (
+    "aloes",
+    "backhoes",
+    "canoes",
+    "does",
+    "floes",
+    "foes",
+    "hoes",
+    "mistletoes",
+    "oboes",
+    "pekoes",
+    "roes",
+    "sloes",
+    "throes",
+    "tiptoes",
+    "toes",
+    "woes",
+)
+
+si_sb_z_zes = ("quartzes", "topazes")
+
+si_sb_zzes_zz = ("buzzes", "fizzes", "frizzes", "razzes")
+
+si_sb_ches_che_case = (
+    "Andromaches",
+    "Apaches",
+    "Blanches",
+    "Comanches",
+    "Nietzsches",
+    "Porsches",
+    "Roches",
+)
+
+si_sb_ches_che = (
+    "aches",
+    "avalanches",
+    "backaches",
+    "bellyaches",
+    "caches",
+    "cloches",
+    "creches",
+    "douches",
+    "earaches",
+    "fiches",
+    "headaches",
+    "heartaches",
+    "microfiches",
+    "niches",
+    "pastiches",
+    "psyches",
+    "quiches",
+    "stomachaches",
+    "toothaches",
+    "tranches",
+)
+
+si_sb_xes_xe = ("annexes", "axes", "deluxes", "pickaxes")
+
+si_sb_sses_sse_case = ("Hesses", "Jesses", "Larousses", "Matisses")
+si_sb_sses_sse = (
+    "bouillabaisses",
+    "crevasses",
+    "demitasses",
+    "impasses",
+    "mousses",
+    "posses",
+)
+
+si_sb_ves_ve_case = (
+    # *[nwl]ives -> [nwl]live
+    "Clives",
+    "Palmolives",
+)
+si_sb_ves_ve = (
+    # *[^d]eaves -> eave
+    "interweaves",
+    "weaves",
+    # *[nwl]ives -> [nwl]live
+    "olives",
+    # *[eoa]lves -> [eoa]lve
+    "bivalves",
+    "dissolves",
+    "resolves",
+    "salves",
+    "twelves",
+    "valves",
+)
+
+
+plverb_special_s = enclose(
+    "|".join(
+        [pl_sb_singular_s]
+        + pl_sb_uninflected_s
+        + list(pl_sb_irregular_s)
+        + ["(.*[csx])is", "(.*)ceps", "[A-Z].*s"]
+    )
+)
+
+_pl_sb_postfix_adj_defn = (
+    ("general", enclose(r"(?!major|lieutenant|brigadier|adjutant|.*star)\S+")),
+    ("martial", enclose("court")),
+    ("force", enclose("pound")),
+)
+
+pl_sb_postfix_adj: Iterable[str] = (
+    enclose(val + f"(?=(?:-|\\s+){key})") for key, val in _pl_sb_postfix_adj_defn
+)
+
+pl_sb_postfix_adj_stems = f"({'|'.join(pl_sb_postfix_adj)})(.*)"
+
+
+# PLURAL WORDS ENDING IS es GO TO SINGULAR is
+
+si_sb_es_is = (
+    "amanuenses",
+    "amniocenteses",
+    "analyses",
+    "antitheses",
+    "apotheoses",
+    "arterioscleroses",
+    "atheroscleroses",
+    "axes",
+    # 'bases', # bases -> basis
+    "catalyses",
+    "catharses",
+    "chasses",
+    "cirrhoses",
+    "cocces",
+    "crises",
+    "diagnoses",
+    "dialyses",
+    "diereses",
+    "electrolyses",
+    "emphases",
+    "exegeses",
+    "geneses",
+    "halitoses",
+    "hydrolyses",
+    "hypnoses",
+    "hypotheses",
+    "hystereses",
+    "metamorphoses",
+    "metastases",
+    "misdiagnoses",
+    "mitoses",
+    "mononucleoses",
+    "narcoses",
+    "necroses",
+    "nemeses",
+    "neuroses",
+    "oases",
+    "osmoses",
+    "osteoporoses",
+    "paralyses",
+    "parentheses",
+    "parthenogeneses",
+    "periphrases",
+    "photosyntheses",
+    "probosces",
+    "prognoses",
+    "prophylaxes",
+    "prostheses",
+    "preces",
+    "psoriases",
+    "psychoanalyses",
+    "psychokineses",
+    "psychoses",
+    "scleroses",
+    "scolioses",
+    "sepses",
+    "silicoses",
+    "symbioses",
+    "synopses",
+    "syntheses",
+    "taxes",
+    "telekineses",
+    "theses",
+    "thromboses",
+    "tuberculoses",
+    "urinalyses",
+)
+
+pl_prep_list = """
+    about above across after among around at athwart before behind
+    below beneath beside besides between betwixt beyond but by
+    during except for from in into near of off on onto out over
+    since till to under until unto upon with""".split()
+
+pl_prep_list_da = pl_prep_list + ["de", "du", "da"]
+
+pl_prep_bysize = bysize(pl_prep_list_da)
+
+pl_prep = enclose("|".join(pl_prep_list_da))
+
+pl_sb_prep_dual_compound = rf"(.*?)((?:-|\s+)(?:{pl_prep})(?:-|\s+))a(?:-|\s+)(.*)"
+
+
+singular_pronoun_genders = {
+    "neuter",
+    "feminine",
+    "masculine",
+    "gender-neutral",
+    "feminine or masculine",
+    "masculine or feminine",
+}
+
+pl_pron_nom = {
+    # NOMINATIVE    REFLEXIVE
+    "i": "we",
+    "myself": "ourselves",
+    "you": "you",
+    "yourself": "yourselves",
+    "she": "they",
+    "herself": "themselves",
+    "he": "they",
+    "himself": "themselves",
+    "it": "they",
+    "itself": "themselves",
+    "they": "they",
+    "themself": "themselves",
+    #   POSSESSIVE
+    "mine": "ours",
+    "yours": "yours",
+    "hers": "theirs",
+    "his": "theirs",
+    "its": "theirs",
+    "theirs": "theirs",
+}
+
+si_pron: Dict[str, Dict[str, Union[str, Dict[str, str]]]] = {
+    "nom": {v: k for (k, v) in pl_pron_nom.items()}
+}
+si_pron["nom"]["we"] = "I"
+
+
+pl_pron_acc = {
+    # ACCUSATIVE    REFLEXIVE
+    "me": "us",
+    "myself": "ourselves",
+    "you": "you",
+    "yourself": "yourselves",
+    "her": "them",
+    "herself": "themselves",
+    "him": "them",
+    "himself": "themselves",
+    "it": "them",
+    "itself": "themselves",
+    "them": "them",
+    "themself": "themselves",
+}
+
+pl_pron_acc_keys = enclose("|".join(pl_pron_acc))
+pl_pron_acc_keys_bysize = bysize(pl_pron_acc)
+
+si_pron["acc"] = {v: k for (k, v) in pl_pron_acc.items()}
+
+for _thecase, _plur, _gend, _sing in (
+    ("nom", "they", "neuter", "it"),
+    ("nom", "they", "feminine", "she"),
+    ("nom", "they", "masculine", "he"),
+    ("nom", "they", "gender-neutral", "they"),
+    ("nom", "they", "feminine or masculine", "she or he"),
+    ("nom", "they", "masculine or feminine", "he or she"),
+    ("nom", "themselves", "neuter", "itself"),
+    ("nom", "themselves", "feminine", "herself"),
+    ("nom", "themselves", "masculine", "himself"),
+    ("nom", "themselves", "gender-neutral", "themself"),
+    ("nom", "themselves", "feminine or masculine", "herself or himself"),
+    ("nom", "themselves", "masculine or feminine", "himself or herself"),
+    ("nom", "theirs", "neuter", "its"),
+    ("nom", "theirs", "feminine", "hers"),
+    ("nom", "theirs", "masculine", "his"),
+    ("nom", "theirs", "gender-neutral", "theirs"),
+    ("nom", "theirs", "feminine or masculine", "hers or his"),
+    ("nom", "theirs", "masculine or feminine", "his or hers"),
+    ("acc", "them", "neuter", "it"),
+    ("acc", "them", "feminine", "her"),
+    ("acc", "them", "masculine", "him"),
+    ("acc", "them", "gender-neutral", "them"),
+    ("acc", "them", "feminine or masculine", "her or him"),
+    ("acc", "them", "masculine or feminine", "him or her"),
+    ("acc", "themselves", "neuter", "itself"),
+    ("acc", "themselves", "feminine", "herself"),
+    ("acc", "themselves", "masculine", "himself"),
+    ("acc", "themselves", "gender-neutral", "themself"),
+    ("acc", "themselves", "feminine or masculine", "herself or himself"),
+    ("acc", "themselves", "masculine or feminine", "himself or herself"),
+):
+    try:
+        si_pron[_thecase][_plur][_gend] = _sing  # type: ignore
+    except TypeError:
+        si_pron[_thecase][_plur] = {}
+        si_pron[_thecase][_plur][_gend] = _sing  # type: ignore
+
+
+si_pron_acc_keys = enclose("|".join(si_pron["acc"]))
+si_pron_acc_keys_bysize = bysize(si_pron["acc"])
+
+
+def get_si_pron(thecase, word, gender) -> str:
+    try:
+        sing = si_pron[thecase][word]
+    except KeyError:
+        raise  # not a pronoun
+    try:
+        return sing[gender]  # has several types due to gender
+    except TypeError:
+        return cast(str, sing)  # answer independent of gender
+
+
+# These dictionaries group verbs by first, second and third person
+# conjugations.
+
+plverb_irregular_pres = {
+    "am": "are",
+    "are": "are",
+    "is": "are",
+    "was": "were",
+    "were": "were",
+    "have": "have",
+    "has": "have",
+    "do": "do",
+    "does": "do",
+}
+
+plverb_ambiguous_pres = {
+    "act": "act",
+    "acts": "act",
+    "blame": "blame",
+    "blames": "blame",
+    "can": "can",
+    "must": "must",
+    "fly": "fly",
+    "flies": "fly",
+    "copy": "copy",
+    "copies": "copy",
+    "drink": "drink",
+    "drinks": "drink",
+    "fight": "fight",
+    "fights": "fight",
+    "fire": "fire",
+    "fires": "fire",
+    "like": "like",
+    "likes": "like",
+    "look": "look",
+    "looks": "look",
+    "make": "make",
+    "makes": "make",
+    "reach": "reach",
+    "reaches": "reach",
+    "run": "run",
+    "runs": "run",
+    "sink": "sink",
+    "sinks": "sink",
+    "sleep": "sleep",
+    "sleeps": "sleep",
+    "view": "view",
+    "views": "view",
+}
+
+plverb_ambiguous_pres_keys = re.compile(
+    rf"^({enclose('|'.join(plverb_ambiguous_pres))})((\s.*)?)$", re.IGNORECASE
+)
+
+
+plverb_irregular_non_pres = (
+    "did",
+    "had",
+    "ate",
+    "made",
+    "put",
+    "spent",
+    "fought",
+    "sank",
+    "gave",
+    "sought",
+    "shall",
+    "could",
+    "ought",
+    "should",
+)
+
+plverb_ambiguous_non_pres = re.compile(
+    r"^((?:thought|saw|bent|will|might|cut))((\s.*)?)$", re.IGNORECASE
+)
+
+# "..oes" -> "..oe" (the rest are "..oes" -> "o")
+
+pl_v_oes_oe = ("canoes", "floes", "oboes", "roes", "throes", "woes")
+pl_v_oes_oe_endings_size4 = ("hoes", "toes")
+pl_v_oes_oe_endings_size5 = ("shoes",)
+
+
+pl_count_zero = ("0", "no", "zero", "nil")
+
+
+pl_count_one = ("1", "a", "an", "one", "each", "every", "this", "that")
+
+pl_adj_special = {"a": "some", "an": "some", "this": "these", "that": "those"}
+
+pl_adj_special_keys = re.compile(
+    rf"^({enclose('|'.join(pl_adj_special))})$", re.IGNORECASE
+)
+
+pl_adj_poss = {
+    "my": "our",
+    "your": "your",
+    "its": "their",
+    "her": "their",
+    "his": "their",
+    "their": "their",
+}
+
+pl_adj_poss_keys = re.compile(rf"^({enclose('|'.join(pl_adj_poss))})$", re.IGNORECASE)
+
+
+# 2. INDEFINITE ARTICLES
+
+# THIS PATTERN MATCHES STRINGS OF CAPITALS STARTING WITH A "VOWEL-SOUND"
+# CONSONANT FOLLOWED BY ANOTHER CONSONANT, AND WHICH ARE NOT LIKELY
+# TO BE REAL WORDS (OH, ALL RIGHT THEN, IT'S JUST MAGIC!)
+
+A_abbrev = re.compile(
+    r"""
+^(?! FJO | [HLMNS]Y.  | RY[EO] | SQU
+  | ( F[LR]? | [HL] | MN? | N | RH? | S[CHKLMNPTVW]? | X(YL)?) [AEIOU])
+[FHLMNRSX][A-Z]
+""",
+    re.VERBOSE,
+)
+
+# THIS PATTERN CODES THE BEGINNINGS OF ALL ENGLISH WORDS BEGINING WITH A
+# 'y' FOLLOWED BY A CONSONANT. ANY OTHER Y-CONSONANT PREFIX THEREFORE
+# IMPLIES AN ABBREVIATION.
+
+A_y_cons = re.compile(r"^(y(b[lor]|cl[ea]|fere|gg|p[ios]|rou|tt))", re.IGNORECASE)
+
+# EXCEPTIONS TO EXCEPTIONS
+
+A_explicit_a = re.compile(r"^((?:unabomber|unanimous|US))", re.IGNORECASE)
+
+A_explicit_an = re.compile(
+    r"^((?:euler|hour(?!i)|heir|honest|hono[ur]|mpeg))", re.IGNORECASE
+)
+
+A_ordinal_an = re.compile(r"^([aefhilmnorsx]-?th)", re.IGNORECASE)
+
+A_ordinal_a = re.compile(r"^([bcdgjkpqtuvwyz]-?th)", re.IGNORECASE)
+
+
+# NUMERICAL INFLECTIONS
+
+nth = {
+    0: "th",
+    1: "st",
+    2: "nd",
+    3: "rd",
+    4: "th",
+    5: "th",
+    6: "th",
+    7: "th",
+    8: "th",
+    9: "th",
+    11: "th",
+    12: "th",
+    13: "th",
+}
+nth_suff = set(nth.values())
+
+ordinal = dict(
+    ty="tieth",
+    one="first",
+    two="second",
+    three="third",
+    five="fifth",
+    eight="eighth",
+    nine="ninth",
+    twelve="twelfth",
+)
+
+ordinal_suff = re.compile(rf"({'|'.join(ordinal)})\Z")
+
+
+# NUMBERS
+
+unit = ["", "one", "two", "three", "four", "five", "six", "seven", "eight", "nine"]
+teen = [
+    "ten",
+    "eleven",
+    "twelve",
+    "thirteen",
+    "fourteen",
+    "fifteen",
+    "sixteen",
+    "seventeen",
+    "eighteen",
+    "nineteen",
+]
+ten = [
+    "",
+    "",
+    "twenty",
+    "thirty",
+    "forty",
+    "fifty",
+    "sixty",
+    "seventy",
+    "eighty",
+    "ninety",
+]
+mill = [
+    " ",
+    " thousand",
+    " million",
+    " billion",
+    " trillion",
+    " quadrillion",
+    " quintillion",
+    " sextillion",
+    " septillion",
+    " octillion",
+    " nonillion",
+    " decillion",
+]
+
+
+# SUPPORT CLASSICAL PLURALIZATIONS
+
+def_classical = dict(
+    all=False, zero=False, herd=False, names=True, persons=False, ancient=False
+)
+
+all_classical = {k: True for k in def_classical}
+no_classical = {k: False for k in def_classical}
+
+
+# Maps strings to built-in constant types
+string_to_constant = {"True": True, "False": False, "None": None}
+
+
+# Pre-compiled regular expression objects
+DOLLAR_DIGITS = re.compile(r"\$(\d+)")
+FUNCTION_CALL = re.compile(r"((\w+)\([^)]*\)*)", re.IGNORECASE)
+PARTITION_WORD = re.compile(r"\A(\s*)(.+?)(\s*)\Z")
+PL_SB_POSTFIX_ADJ_STEMS_RE = re.compile(
+    rf"^(?:{pl_sb_postfix_adj_stems})$", re.IGNORECASE
+)
+PL_SB_PREP_DUAL_COMPOUND_RE = re.compile(
+    rf"^(?:{pl_sb_prep_dual_compound})$", re.IGNORECASE
+)
+DENOMINATOR = re.compile(r"(?P.+)( (per|a) .+)")
+PLVERB_SPECIAL_S_RE = re.compile(rf"^({plverb_special_s})$")
+WHITESPACE = re.compile(r"\s")
+ENDS_WITH_S = re.compile(r"^(.*[^s])s$", re.IGNORECASE)
+ENDS_WITH_APOSTROPHE_S = re.compile(r"^(.*)'s?$")
+INDEFINITE_ARTICLE_TEST = re.compile(r"\A(\s*)(?:an?\s+)?(.+?)(\s*)\Z", re.IGNORECASE)
+SPECIAL_AN = re.compile(r"^[aefhilmnorsx]$", re.IGNORECASE)
+SPECIAL_A = re.compile(r"^[bcdgjkpqtuvwyz]$", re.IGNORECASE)
+SPECIAL_ABBREV_AN = re.compile(r"^[aefhilmnorsx][.-]", re.IGNORECASE)
+SPECIAL_ABBREV_A = re.compile(r"^[a-z][.-]", re.IGNORECASE)
+CONSONANTS = re.compile(r"^[^aeiouy]", re.IGNORECASE)
+ARTICLE_SPECIAL_EU = re.compile(r"^e[uw]", re.IGNORECASE)
+ARTICLE_SPECIAL_ONCE = re.compile(r"^onc?e\b", re.IGNORECASE)
+ARTICLE_SPECIAL_ONETIME = re.compile(r"^onetime\b", re.IGNORECASE)
+ARTICLE_SPECIAL_UNIT = re.compile(r"^uni([^nmd]|mo)", re.IGNORECASE)
+ARTICLE_SPECIAL_UBA = re.compile(r"^u[bcfghjkqrst][aeiou]", re.IGNORECASE)
+ARTICLE_SPECIAL_UKR = re.compile(r"^ukr", re.IGNORECASE)
+SPECIAL_CAPITALS = re.compile(r"^U[NK][AIEO]?")
+VOWELS = re.compile(r"^[aeiou]", re.IGNORECASE)
+
+DIGIT_GROUP = re.compile(r"(\d)")
+TWO_DIGITS = re.compile(r"(\d)(\d)")
+THREE_DIGITS = re.compile(r"(\d)(\d)(\d)")
+THREE_DIGITS_WORD = re.compile(r"(\d)(\d)(\d)(?=\D*\Z)")
+TWO_DIGITS_WORD = re.compile(r"(\d)(\d)(?=\D*\Z)")
+ONE_DIGIT_WORD = re.compile(r"(\d)(?=\D*\Z)")
+
+FOUR_DIGIT_COMMA = re.compile(r"(\d)(\d{3}(?:,|\Z))")
+NON_DIGIT = re.compile(r"\D")
+WHITESPACES_COMMA = re.compile(r"\s+,")
+COMMA_WORD = re.compile(r", (\S+)\s+\Z")
+WHITESPACES = re.compile(r"\s+")
+
+
+PRESENT_PARTICIPLE_REPLACEMENTS = (
+    (re.compile(r"ie$"), r"y"),
+    (
+        re.compile(r"ue$"),
+        r"u",
+    ),  # TODO: isn't ue$ -> u encompassed in the following rule?
+    (re.compile(r"([auy])e$"), r"\g<1>"),
+    (re.compile(r"ski$"), r"ski"),
+    (re.compile(r"[^b]i$"), r""),
+    (re.compile(r"^(are|were)$"), r"be"),
+    (re.compile(r"^(had)$"), r"hav"),
+    (re.compile(r"^(hoe)$"), r"\g<1>"),
+    (re.compile(r"([^e])e$"), r"\g<1>"),
+    (re.compile(r"er$"), r"er"),
+    (re.compile(r"([^aeiou][aeiouy]([bdgmnprst]))$"), r"\g<1>\g<2>"),
+)
+
+DIGIT = re.compile(r"\d")
+
+
+class Words(str):
+    lowered: str
+    split_: List[str]
+    first: str
+    last: str
+
+    def __init__(self, orig) -> None:
+        self.lowered = self.lower()
+        self.split_ = self.split()
+        self.first = self.split_[0]
+        self.last = self.split_[-1]
+
+
+Falsish = Any  # ideally, falsish would only validate on bool(value) is False
+
+
+_STATIC_TYPE_CHECKING = TYPE_CHECKING
+# ^-- Workaround for typeguard AST manipulation:
+#     https://github.com/agronholm/typeguard/issues/353#issuecomment-1556306554
+
+if _STATIC_TYPE_CHECKING:  # pragma: no cover
+    Word = Annotated[str, "String with at least 1 character"]
+else:
+
+    class _WordMeta(type):  # Too dynamic to be supported by mypy...
+        def __instancecheck__(self, instance: Any) -> bool:
+            return isinstance(instance, str) and len(instance) >= 1
+
+    class Word(metaclass=_WordMeta):  # type: ignore[no-redef]
+        """String with at least 1 character"""
+
+
+class engine:
+    def __init__(self) -> None:
+        self.classical_dict = def_classical.copy()
+        self.persistent_count: Optional[int] = None
+        self.mill_count = 0
+        self.pl_sb_user_defined: List[Optional[Word]] = []
+        self.pl_v_user_defined: List[Optional[Word]] = []
+        self.pl_adj_user_defined: List[Optional[Word]] = []
+        self.si_sb_user_defined: List[Optional[Word]] = []
+        self.A_a_user_defined: List[Optional[Word]] = []
+        self.thegender = "neuter"
+        self.__number_args: Optional[Dict[str, str]] = None
+
+    @property
+    def _number_args(self):
+        return cast(Dict[str, str], self.__number_args)
+
+    @_number_args.setter
+    def _number_args(self, val):
+        self.__number_args = val
+
+    @typechecked
+    def defnoun(self, singular: Optional[Word], plural: Optional[Word]) -> int:
+        """
+        Set the noun plural of singular to plural.
+
+        """
+        self.checkpat(singular)
+        self.checkpatplural(plural)
+        self.pl_sb_user_defined.extend((singular, plural))
+        self.si_sb_user_defined.extend((plural, singular))
+        return 1
+
+    @typechecked
+    def defverb(
+        self,
+        s1: Optional[Word],
+        p1: Optional[Word],
+        s2: Optional[Word],
+        p2: Optional[Word],
+        s3: Optional[Word],
+        p3: Optional[Word],
+    ) -> int:
+        """
+        Set the verb plurals for s1, s2 and s3 to p1, p2 and p3 respectively.
+
+        Where 1, 2 and 3 represent the 1st, 2nd and 3rd person forms of the verb.
+
+        """
+        self.checkpat(s1)
+        self.checkpat(s2)
+        self.checkpat(s3)
+        self.checkpatplural(p1)
+        self.checkpatplural(p2)
+        self.checkpatplural(p3)
+        self.pl_v_user_defined.extend((s1, p1, s2, p2, s3, p3))
+        return 1
+
+    @typechecked
+    def defadj(self, singular: Optional[Word], plural: Optional[Word]) -> int:
+        """
+        Set the adjective plural of singular to plural.
+
+        """
+        self.checkpat(singular)
+        self.checkpatplural(plural)
+        self.pl_adj_user_defined.extend((singular, plural))
+        return 1
+
+    @typechecked
+    def defa(self, pattern: Optional[Word]) -> int:
+        """
+        Define the indefinite article as 'a' for words matching pattern.
+
+        """
+        self.checkpat(pattern)
+        self.A_a_user_defined.extend((pattern, "a"))
+        return 1
+
+    @typechecked
+    def defan(self, pattern: Optional[Word]) -> int:
+        """
+        Define the indefinite article as 'an' for words matching pattern.
+
+        """
+        self.checkpat(pattern)
+        self.A_a_user_defined.extend((pattern, "an"))
+        return 1
+
+    def checkpat(self, pattern: Optional[Word]) -> None:
+        """
+        check for errors in a regex pattern
+        """
+        if pattern is None:
+            return
+        try:
+            re.match(pattern, "")
+        except re.error as err:
+            raise BadUserDefinedPatternError(pattern) from err
+
+    def checkpatplural(self, pattern: Optional[Word]) -> None:
+        """
+        check for errors in a regex replace pattern
+        """
+        return
+
+    @typechecked
+    def ud_match(self, word: Word, wordlist: Sequence[Optional[Word]]) -> Optional[str]:
+        for i in range(len(wordlist) - 2, -2, -2):  # backwards through even elements
+            mo = re.search(rf"^{wordlist[i]}$", word, re.IGNORECASE)
+            if mo:
+                if wordlist[i + 1] is None:
+                    return None
+                pl = DOLLAR_DIGITS.sub(
+                    r"\\1", cast(Word, wordlist[i + 1])
+                )  # change $n to \n for expand
+                return mo.expand(pl)
+        return None
+
+    def classical(self, **kwargs) -> None:
+        """
+        turn classical mode on and off for various categories
+
+        turn on all classical modes:
+        classical()
+        classical(all=True)
+
+        turn on or off specific claassical modes:
+        e.g.
+        classical(herd=True)
+        classical(names=False)
+
+        By default all classical modes are off except names.
+
+        unknown value in args or key in kwargs raises
+        exception: UnknownClasicalModeError
+
+        """
+        if not kwargs:
+            self.classical_dict = all_classical.copy()
+            return
+        if "all" in kwargs:
+            if kwargs["all"]:
+                self.classical_dict = all_classical.copy()
+            else:
+                self.classical_dict = no_classical.copy()
+
+        for k, v in kwargs.items():
+            if k in def_classical:
+                self.classical_dict[k] = v
+            else:
+                raise UnknownClassicalModeError
+
+    def num(
+        self, count: Optional[int] = None, show: Optional[int] = None
+    ) -> str:  # (;$count,$show)
+        """
+        Set the number to be used in other method calls.
+
+        Returns count.
+
+        Set show to False to return '' instead.
+
+        """
+        if count is not None:
+            try:
+                self.persistent_count = int(count)
+            except ValueError as err:
+                raise BadNumValueError from err
+            if (show is None) or show:
+                return str(count)
+        else:
+            self.persistent_count = None
+        return ""
+
+    def gender(self, gender: str) -> None:
+        """
+        set the gender for the singular of plural pronouns
+
+        can be one of:
+        'neuter'                ('they' -> 'it')
+        'feminine'              ('they' -> 'she')
+        'masculine'             ('they' -> 'he')
+        'gender-neutral'        ('they' -> 'they')
+        'feminine or masculine' ('they' -> 'she or he')
+        'masculine or feminine' ('they' -> 'he or she')
+        """
+        if gender in singular_pronoun_genders:
+            self.thegender = gender
+        else:
+            raise BadGenderError
+
+    def _get_value_from_ast(self, obj):
+        """
+        Return the value of the ast object.
+        """
+        if isinstance(obj, ast.Num):
+            return obj.n
+        elif isinstance(obj, ast.Str):
+            return obj.s
+        elif isinstance(obj, ast.List):
+            return [self._get_value_from_ast(e) for e in obj.elts]
+        elif isinstance(obj, ast.Tuple):
+            return tuple([self._get_value_from_ast(e) for e in obj.elts])
+
+        # None, True and False are NameConstants in Py3.4 and above.
+        elif isinstance(obj, ast.NameConstant):
+            return obj.value
+
+        # Probably passed a variable name.
+        # Or passed a single word without wrapping it in quotes as an argument
+        # ex: p.inflect("I plural(see)") instead of p.inflect("I plural('see')")
+        raise NameError(f"name '{obj.id}' is not defined")
+
+    def _string_to_substitute(
+        self, mo: Match, methods_dict: Dict[str, Callable]
+    ) -> str:
+        """
+        Return the string to be substituted for the match.
+        """
+        matched_text, f_name = mo.groups()
+        # matched_text is the complete match string. e.g. plural_noun(cat)
+        # f_name is the function name. e.g. plural_noun
+
+        # Return matched_text if function name is not in methods_dict
+        if f_name not in methods_dict:
+            return matched_text
+
+        # Parse the matched text
+        a_tree = ast.parse(matched_text)
+
+        # get the args and kwargs from ast objects
+        args_list = [
+            self._get_value_from_ast(a)
+            for a in a_tree.body[0].value.args  # type: ignore[attr-defined]
+        ]
+        kwargs_list = {
+            kw.arg: self._get_value_from_ast(kw.value)
+            for kw in a_tree.body[0].value.keywords  # type: ignore[attr-defined]
+        }
+
+        # Call the corresponding function
+        return methods_dict[f_name](*args_list, **kwargs_list)
+
+    # 0. PERFORM GENERAL INFLECTIONS IN A STRING
+
+    @typechecked
+    def inflect(self, text: Word) -> str:
+        """
+        Perform inflections in a string.
+
+        e.g. inflect('The plural of cat is plural(cat)') returns
+        'The plural of cat is cats'
+
+        can use plural, plural_noun, plural_verb, plural_adj,
+        singular_noun, a, an, no, ordinal, number_to_words,
+        and prespart
+
+        """
+        save_persistent_count = self.persistent_count
+
+        # Dictionary of allowed methods
+        methods_dict: Dict[str, Callable] = {
+            "plural": self.plural,
+            "plural_adj": self.plural_adj,
+            "plural_noun": self.plural_noun,
+            "plural_verb": self.plural_verb,
+            "singular_noun": self.singular_noun,
+            "a": self.a,
+            "an": self.a,
+            "no": self.no,
+            "ordinal": self.ordinal,
+            "number_to_words": self.number_to_words,
+            "present_participle": self.present_participle,
+            "num": self.num,
+        }
+
+        # Regular expression to find Python's function call syntax
+        output = FUNCTION_CALL.sub(
+            lambda mo: self._string_to_substitute(mo, methods_dict), text
+        )
+        self.persistent_count = save_persistent_count
+        return output
+
+    # ## PLURAL SUBROUTINES
+
+    def postprocess(self, orig: str, inflected) -> str:
+        inflected = str(inflected)
+        if "|" in inflected:
+            word_options = inflected.split("|")
+            # When two parts of a noun need to be pluralized
+            if len(word_options[0].split(" ")) == len(word_options[1].split(" ")):
+                result = inflected.split("|")[self.classical_dict["all"]].split(" ")
+            # When only the last part of the noun needs to be pluralized
+            else:
+                result = inflected.split(" ")
+                for index, word in enumerate(result):
+                    if "|" in word:
+                        result[index] = word.split("|")[self.classical_dict["all"]]
+        else:
+            result = inflected.split(" ")
+
+        # Try to fix word wise capitalization
+        for index, word in enumerate(orig.split(" ")):
+            if word == "I":
+                # Is this the only word for exceptions like this
+                # Where the original is fully capitalized
+                # without 'meaning' capitalization?
+                # Also this fails to handle a capitalizaion in context
+                continue
+            if word.capitalize() == word:
+                result[index] = result[index].capitalize()
+            if word == word.upper():
+                result[index] = result[index].upper()
+        return " ".join(result)
+
+    def partition_word(self, text: str) -> Tuple[str, str, str]:
+        mo = PARTITION_WORD.search(text)
+        if mo:
+            return mo.group(1), mo.group(2), mo.group(3)
+        else:
+            return "", "", ""
+
+    @typechecked
+    def plural(self, text: Word, count: Optional[Union[str, int, Any]] = None) -> str:
+        """
+        Return the plural of text.
+
+        If count supplied, then return text if count is one of:
+            1, a, an, one, each, every, this, that
+
+        otherwise return the plural.
+
+        Whitespace at the start and end is preserved.
+
+        """
+        pre, word, post = self.partition_word(text)
+        if not word:
+            return text
+        plural = self.postprocess(
+            word,
+            self._pl_special_adjective(word, count)
+            or self._pl_special_verb(word, count)
+            or self._plnoun(word, count),
+        )
+        return f"{pre}{plural}{post}"
+
+    @typechecked
+    def plural_noun(
+        self, text: Word, count: Optional[Union[str, int, Any]] = None
+    ) -> str:
+        """
+        Return the plural of text, where text is a noun.
+
+        If count supplied, then return text if count is one of:
+            1, a, an, one, each, every, this, that
+
+        otherwise return the plural.
+
+        Whitespace at the start and end is preserved.
+
+        """
+        pre, word, post = self.partition_word(text)
+        if not word:
+            return text
+        plural = self.postprocess(word, self._plnoun(word, count))
+        return f"{pre}{plural}{post}"
+
+    @typechecked
+    def plural_verb(
+        self, text: Word, count: Optional[Union[str, int, Any]] = None
+    ) -> str:
+        """
+        Return the plural of text, where text is a verb.
+
+        If count supplied, then return text if count is one of:
+            1, a, an, one, each, every, this, that
+
+        otherwise return the plural.
+
+        Whitespace at the start and end is preserved.
+
+        """
+        pre, word, post = self.partition_word(text)
+        if not word:
+            return text
+        plural = self.postprocess(
+            word,
+            self._pl_special_verb(word, count) or self._pl_general_verb(word, count),
+        )
+        return f"{pre}{plural}{post}"
+
+    @typechecked
+    def plural_adj(
+        self, text: Word, count: Optional[Union[str, int, Any]] = None
+    ) -> str:
+        """
+        Return the plural of text, where text is an adjective.
+
+        If count supplied, then return text if count is one of:
+            1, a, an, one, each, every, this, that
+
+        otherwise return the plural.
+
+        Whitespace at the start and end is preserved.
+
+        """
+        pre, word, post = self.partition_word(text)
+        if not word:
+            return text
+        plural = self.postprocess(word, self._pl_special_adjective(word, count) or word)
+        return f"{pre}{plural}{post}"
+
+    @typechecked
+    def compare(self, word1: Word, word2: Word) -> Union[str, bool]:
+        """
+        compare word1 and word2 for equality regardless of plurality
+
+        return values:
+        eq - the strings are equal
+        p:s - word1 is the plural of word2
+        s:p - word2 is the plural of word1
+        p:p - word1 and word2 are two different plural forms of the one word
+        False - otherwise
+
+        >>> compare = engine().compare
+        >>> compare("egg", "eggs")
+        's:p'
+        >>> compare('egg', 'egg')
+        'eq'
+
+        Words should not be empty.
+
+        >>> compare('egg', '')
+        Traceback (most recent call last):
+        ...
+        typeguard.TypeCheckError:...is not an instance of inflect.Word
+        """
+        norms = self.plural_noun, self.plural_verb, self.plural_adj
+        results = (self._plequal(word1, word2, norm) for norm in norms)
+        return next(filter(None, results), False)
+
+    @typechecked
+    def compare_nouns(self, word1: Word, word2: Word) -> Union[str, bool]:
+        """
+        compare word1 and word2 for equality regardless of plurality
+        word1 and word2 are to be treated as nouns
+
+        return values:
+        eq - the strings are equal
+        p:s - word1 is the plural of word2
+        s:p - word2 is the plural of word1
+        p:p - word1 and word2 are two different plural forms of the one word
+        False - otherwise
+
+        """
+        return self._plequal(word1, word2, self.plural_noun)
+
+    @typechecked
+    def compare_verbs(self, word1: Word, word2: Word) -> Union[str, bool]:
+        """
+        compare word1 and word2 for equality regardless of plurality
+        word1 and word2 are to be treated as verbs
+
+        return values:
+        eq - the strings are equal
+        p:s - word1 is the plural of word2
+        s:p - word2 is the plural of word1
+        p:p - word1 and word2 are two different plural forms of the one word
+        False - otherwise
+
+        """
+        return self._plequal(word1, word2, self.plural_verb)
+
+    @typechecked
+    def compare_adjs(self, word1: Word, word2: Word) -> Union[str, bool]:
+        """
+        compare word1 and word2 for equality regardless of plurality
+        word1 and word2 are to be treated as adjectives
+
+        return values:
+        eq - the strings are equal
+        p:s - word1 is the plural of word2
+        s:p - word2 is the plural of word1
+        p:p - word1 and word2 are two different plural forms of the one word
+        False - otherwise
+
+        """
+        return self._plequal(word1, word2, self.plural_adj)
+
+    @typechecked
+    def singular_noun(
+        self,
+        text: Word,
+        count: Optional[Union[int, str, Any]] = None,
+        gender: Optional[str] = None,
+    ) -> Union[str, Literal[False]]:
+        """
+        Return the singular of text, where text is a plural noun.
+
+        If count supplied, then return the singular if count is one of:
+            1, a, an, one, each, every, this, that or if count is None
+
+        otherwise return text unchanged.
+
+        Whitespace at the start and end is preserved.
+
+        >>> p = engine()
+        >>> p.singular_noun('horses')
+        'horse'
+        >>> p.singular_noun('knights')
+        'knight'
+
+        Returns False when a singular noun is passed.
+
+        >>> p.singular_noun('horse')
+        False
+        >>> p.singular_noun('knight')
+        False
+        >>> p.singular_noun('soldier')
+        False
+
+        """
+        pre, word, post = self.partition_word(text)
+        if not word:
+            return text
+        sing = self._sinoun(word, count=count, gender=gender)
+        if sing is not False:
+            plural = self.postprocess(word, sing)
+            return f"{pre}{plural}{post}"
+        return False
+
+    def _plequal(self, word1: str, word2: str, pl) -> Union[str, bool]:  # noqa: C901
+        classval = self.classical_dict.copy()
+        self.classical_dict = all_classical.copy()
+        if word1 == word2:
+            return "eq"
+        if word1 == pl(word2):
+            return "p:s"
+        if pl(word1) == word2:
+            return "s:p"
+        self.classical_dict = no_classical.copy()
+        if word1 == pl(word2):
+            return "p:s"
+        if pl(word1) == word2:
+            return "s:p"
+        self.classical_dict = classval.copy()
+
+        if pl == self.plural or pl == self.plural_noun:
+            if self._pl_check_plurals_N(word1, word2):
+                return "p:p"
+            if self._pl_check_plurals_N(word2, word1):
+                return "p:p"
+        if pl == self.plural or pl == self.plural_adj:
+            if self._pl_check_plurals_adj(word1, word2):
+                return "p:p"
+        return False
+
+    def _pl_reg_plurals(self, pair: str, stems: str, end1: str, end2: str) -> bool:
+        pattern = rf"({stems})({end1}\|\1{end2}|{end2}\|\1{end1})"
+        return bool(re.search(pattern, pair))
+
+    def _pl_check_plurals_N(self, word1: str, word2: str) -> bool:
+        stem_endings = (
+            (pl_sb_C_a_ata, "as", "ata"),
+            (pl_sb_C_is_ides, "is", "ides"),
+            (pl_sb_C_a_ae, "s", "e"),
+            (pl_sb_C_en_ina, "ens", "ina"),
+            (pl_sb_C_um_a, "ums", "a"),
+            (pl_sb_C_us_i, "uses", "i"),
+            (pl_sb_C_on_a, "ons", "a"),
+            (pl_sb_C_o_i_stems, "os", "i"),
+            (pl_sb_C_ex_ices, "exes", "ices"),
+            (pl_sb_C_ix_ices, "ixes", "ices"),
+            (pl_sb_C_i, "s", "i"),
+            (pl_sb_C_im, "s", "im"),
+            (".*eau", "s", "x"),
+            (".*ieu", "s", "x"),
+            (".*tri", "xes", "ces"),
+            (".{2,}[yia]n", "xes", "ges"),
+        )
+
+        words = map(Words, (word1, word2))
+        pair = "|".join(word.last for word in words)
+
+        return (
+            pair in pl_sb_irregular_s.values()
+            or pair in pl_sb_irregular.values()
+            or pair in pl_sb_irregular_caps.values()
+            or any(
+                self._pl_reg_plurals(pair, stems, end1, end2)
+                for stems, end1, end2 in stem_endings
+            )
+        )
+
+    def _pl_check_plurals_adj(self, word1: str, word2: str) -> bool:
+        word1a = word1[: word1.rfind("'")] if word1.endswith(("'s", "'")) else ""
+        word2a = word2[: word2.rfind("'")] if word2.endswith(("'s", "'")) else ""
+
+        return (
+            bool(word1a)
+            and bool(word2a)
+            and (
+                self._pl_check_plurals_N(word1a, word2a)
+                or self._pl_check_plurals_N(word2a, word1a)
+            )
+        )
+
+    def get_count(self, count: Optional[Union[str, int]] = None) -> Union[str, int]:
+        if count is None and self.persistent_count is not None:
+            count = self.persistent_count
+
+        if count is not None:
+            count = (
+                1
+                if (
+                    (str(count) in pl_count_one)
+                    or (
+                        self.classical_dict["zero"]
+                        and str(count).lower() in pl_count_zero
+                    )
+                )
+                else 2
+            )
+        else:
+            count = ""
+        return count
+
+    # @profile
+    def _plnoun(  # noqa: C901
+        self, word: str, count: Optional[Union[str, int]] = None
+    ) -> str:
+        count = self.get_count(count)
+
+        # DEFAULT TO PLURAL
+
+        if count == 1:
+            return word
+
+        # HANDLE USER-DEFINED NOUNS
+
+        value = self.ud_match(word, self.pl_sb_user_defined)
+        if value is not None:
+            return value
+
+        # HANDLE EMPTY WORD, SINGULAR COUNT AND UNINFLECTED PLURALS
+
+        if word == "":
+            return word
+
+        word = Words(word)
+
+        if word.last.lower() in pl_sb_uninflected_complete:
+            if len(word.split_) >= 3:
+                return self._handle_long_compounds(word, count=2) or word
+            return word
+
+        if word in pl_sb_uninflected_caps:
+            return word
+
+        for k, v in pl_sb_uninflected_bysize.items():
+            if word.lowered[-k:] in v:
+                return word
+
+        if self.classical_dict["herd"] and word.last.lower() in pl_sb_uninflected_herd:
+            return word
+
+        # HANDLE COMPOUNDS ("Governor General", "mother-in-law", "aide-de-camp", ETC.)
+
+        mo = PL_SB_POSTFIX_ADJ_STEMS_RE.search(word)
+        if mo and mo.group(2) != "":
+            return f"{self._plnoun(mo.group(1), 2)}{mo.group(2)}"
+
+        if " a " in word.lowered or "-a-" in word.lowered:
+            mo = PL_SB_PREP_DUAL_COMPOUND_RE.search(word)
+            if mo and mo.group(2) != "" and mo.group(3) != "":
+                return (
+                    f"{self._plnoun(mo.group(1), 2)}"
+                    f"{mo.group(2)}"
+                    f"{self._plnoun(mo.group(3))}"
+                )
+
+        if len(word.split_) >= 3:
+            handled_words = self._handle_long_compounds(word, count=2)
+            if handled_words is not None:
+                return handled_words
+
+        # only pluralize denominators in units
+        mo = DENOMINATOR.search(word.lowered)
+        if mo:
+            index = len(mo.group("denominator"))
+            return f"{self._plnoun(word[:index])}{word[index:]}"
+
+        # handle units given in degrees (only accept if
+        # there is no more than one word following)
+        # degree Celsius => degrees Celsius but degree
+        # fahrenheit hour => degree fahrenheit hours
+        if len(word.split_) >= 2 and word.split_[-2] == "degree":
+            return " ".join([self._plnoun(word.first)] + word.split_[1:])
+
+        with contextlib.suppress(ValueError):
+            return self._handle_prepositional_phrase(
+                word.lowered,
+                functools.partial(self._plnoun, count=2),
+                '-',
+            )
+
+        # HANDLE PRONOUNS
+
+        for k, v in pl_pron_acc_keys_bysize.items():
+            if word.lowered[-k:] in v:  # ends with accusative pronoun
+                for pk, pv in pl_prep_bysize.items():
+                    if word.lowered[:pk] in pv:  # starts with a prep
+                        if word.lowered.split() == [
+                            word.lowered[:pk],
+                            word.lowered[-k:],
+                        ]:
+                            # only whitespace in between
+                            return word.lowered[:-k] + pl_pron_acc[word.lowered[-k:]]
+
+        try:
+            return pl_pron_nom[word.lowered]
+        except KeyError:
+            pass
+
+        try:
+            return pl_pron_acc[word.lowered]
+        except KeyError:
+            pass
+
+        # HANDLE ISOLATED IRREGULAR PLURALS
+
+        if word.last in pl_sb_irregular_caps:
+            llen = len(word.last)
+            return f"{word[:-llen]}{pl_sb_irregular_caps[word.last]}"
+
+        lowered_last = word.last.lower()
+        if lowered_last in pl_sb_irregular:
+            llen = len(lowered_last)
+            return f"{word[:-llen]}{pl_sb_irregular[lowered_last]}"
+
+        dash_split = word.lowered.split('-')
+        if (" ".join(dash_split[-2:])).lower() in pl_sb_irregular_compound:
+            llen = len(
+                " ".join(dash_split[-2:])
+            )  # TODO: what if 2 spaces between these words?
+            return (
+                f"{word[:-llen]}"
+                f"{pl_sb_irregular_compound[(' '.join(dash_split[-2:])).lower()]}"
+            )
+
+        if word.lowered[-3:] == "quy":
+            return f"{word[:-1]}ies"
+
+        if word.lowered[-6:] == "person":
+            if self.classical_dict["persons"]:
+                return f"{word}s"
+            else:
+                return f"{word[:-4]}ople"
+
+        # HANDLE FAMILIES OF IRREGULAR PLURALS
+
+        if word.lowered[-3:] == "man":
+            for k, v in pl_sb_U_man_mans_bysize.items():
+                if word.lowered[-k:] in v:
+                    return f"{word}s"
+            for k, v in pl_sb_U_man_mans_caps_bysize.items():
+                if word[-k:] in v:
+                    return f"{word}s"
+            return f"{word[:-3]}men"
+        if word.lowered[-5:] == "mouse":
+            return f"{word[:-5]}mice"
+        if word.lowered[-5:] == "louse":
+            v = pl_sb_U_louse_lice_bysize.get(len(word))
+            if v and word.lowered in v:
+                return f"{word[:-5]}lice"
+            return f"{word}s"
+        if word.lowered[-5:] == "goose":
+            return f"{word[:-5]}geese"
+        if word.lowered[-5:] == "tooth":
+            return f"{word[:-5]}teeth"
+        if word.lowered[-4:] == "foot":
+            return f"{word[:-4]}feet"
+        if word.lowered[-4:] == "taco":
+            return f"{word[:-5]}tacos"
+
+        if word.lowered == "die":
+            return "dice"
+
+        # HANDLE UNASSIMILATED IMPORTS
+
+        if word.lowered[-4:] == "ceps":
+            return word
+        if word.lowered[-4:] == "zoon":
+            return f"{word[:-2]}a"
+        if word.lowered[-3:] in ("cis", "sis", "xis"):
+            return f"{word[:-2]}es"
+
+        for lastlet, d, numend, post in (
+            ("h", pl_sb_U_ch_chs_bysize, None, "s"),
+            ("x", pl_sb_U_ex_ices_bysize, -2, "ices"),
+            ("x", pl_sb_U_ix_ices_bysize, -2, "ices"),
+            ("m", pl_sb_U_um_a_bysize, -2, "a"),
+            ("s", pl_sb_U_us_i_bysize, -2, "i"),
+            ("n", pl_sb_U_on_a_bysize, -2, "a"),
+            ("a", pl_sb_U_a_ae_bysize, None, "e"),
+        ):
+            if word.lowered[-1] == lastlet:  # this test to add speed
+                for k, v in d.items():
+                    if word.lowered[-k:] in v:
+                        return word[:numend] + post
+
+        # HANDLE INCOMPLETELY ASSIMILATED IMPORTS
+
+        if self.classical_dict["ancient"]:
+            if word.lowered[-4:] == "trix":
+                return f"{word[:-1]}ces"
+            if word.lowered[-3:] in ("eau", "ieu"):
+                return f"{word}x"
+            if word.lowered[-3:] in ("ynx", "inx", "anx") and len(word) > 4:
+                return f"{word[:-1]}ges"
+
+            for lastlet, d, numend, post in (
+                ("n", pl_sb_C_en_ina_bysize, -2, "ina"),
+                ("x", pl_sb_C_ex_ices_bysize, -2, "ices"),
+                ("x", pl_sb_C_ix_ices_bysize, -2, "ices"),
+                ("m", pl_sb_C_um_a_bysize, -2, "a"),
+                ("s", pl_sb_C_us_i_bysize, -2, "i"),
+                ("s", pl_sb_C_us_us_bysize, None, ""),
+                ("a", pl_sb_C_a_ae_bysize, None, "e"),
+                ("a", pl_sb_C_a_ata_bysize, None, "ta"),
+                ("s", pl_sb_C_is_ides_bysize, -1, "des"),
+                ("o", pl_sb_C_o_i_bysize, -1, "i"),
+                ("n", pl_sb_C_on_a_bysize, -2, "a"),
+            ):
+                if word.lowered[-1] == lastlet:  # this test to add speed
+                    for k, v in d.items():
+                        if word.lowered[-k:] in v:
+                            return word[:numend] + post
+
+            for d, numend, post in (
+                (pl_sb_C_i_bysize, None, "i"),
+                (pl_sb_C_im_bysize, None, "im"),
+            ):
+                for k, v in d.items():
+                    if word.lowered[-k:] in v:
+                        return word[:numend] + post
+
+        # HANDLE SINGULAR NOUNS ENDING IN ...s OR OTHER SILIBANTS
+
+        if lowered_last in pl_sb_singular_s_complete:
+            return f"{word}es"
+
+        for k, v in pl_sb_singular_s_bysize.items():
+            if word.lowered[-k:] in v:
+                return f"{word}es"
+
+        if word.lowered[-2:] == "es" and word[0] == word[0].upper():
+            return f"{word}es"
+
+        if word.lowered[-1] == "z":
+            for k, v in pl_sb_z_zes_bysize.items():
+                if word.lowered[-k:] in v:
+                    return f"{word}es"
+
+            if word.lowered[-2:-1] != "z":
+                return f"{word}zes"
+
+        if word.lowered[-2:] == "ze":
+            for k, v in pl_sb_ze_zes_bysize.items():
+                if word.lowered[-k:] in v:
+                    return f"{word}s"
+
+        if word.lowered[-2:] in ("ch", "sh", "zz", "ss") or word.lowered[-1] == "x":
+            return f"{word}es"
+
+        # HANDLE ...f -> ...ves
+
+        if word.lowered[-3:] in ("elf", "alf", "olf"):
+            return f"{word[:-1]}ves"
+        if word.lowered[-3:] == "eaf" and word.lowered[-4:-3] != "d":
+            return f"{word[:-1]}ves"
+        if word.lowered[-4:] in ("nife", "life", "wife"):
+            return f"{word[:-2]}ves"
+        if word.lowered[-3:] == "arf":
+            return f"{word[:-1]}ves"
+
+        # HANDLE ...y
+
+        if word.lowered[-1] == "y":
+            if word.lowered[-2:-1] in "aeiou" or len(word) == 1:
+                return f"{word}s"
+
+            if self.classical_dict["names"]:
+                if word.lowered[-1] == "y" and word[0] == word[0].upper():
+                    return f"{word}s"
+
+            return f"{word[:-1]}ies"
+
+        # HANDLE ...o
+
+        if lowered_last in pl_sb_U_o_os_complete:
+            return f"{word}s"
+
+        for k, v in pl_sb_U_o_os_bysize.items():
+            if word.lowered[-k:] in v:
+                return f"{word}s"
+
+        if word.lowered[-2:] in ("ao", "eo", "io", "oo", "uo"):
+            return f"{word}s"
+
+        if word.lowered[-1] == "o":
+            return f"{word}es"
+
+        # OTHERWISE JUST ADD ...s
+
+        return f"{word}s"
+
+    @classmethod
+    def _handle_prepositional_phrase(cls, phrase, transform, sep):
+        """
+        Given a word or phrase possibly separated by sep, parse out
+        the prepositional phrase and apply the transform to the word
+        preceding the prepositional phrase.
+
+        Raise ValueError if the pivot is not found or if at least two
+        separators are not found.
+
+        >>> engine._handle_prepositional_phrase("man-of-war", str.upper, '-')
+        'MAN-of-war'
+        >>> engine._handle_prepositional_phrase("man of war", str.upper, ' ')
+        'MAN of war'
+        """
+        parts = phrase.split(sep)
+        if len(parts) < 3:
+            raise ValueError("Cannot handle words with fewer than two separators")
+
+        pivot = cls._find_pivot(parts, pl_prep_list_da)
+
+        transformed = transform(parts[pivot - 1]) or parts[pivot - 1]
+        return " ".join(
+            parts[: pivot - 1] + [sep.join([transformed, parts[pivot], ''])]
+        ) + " ".join(parts[(pivot + 1) :])
+
+    def _handle_long_compounds(self, word: Words, count: int) -> Union[str, None]:
+        """
+        Handles the plural and singular for compound `Words` that
+        have three or more words, based on the given count.
+
+        >>> engine()._handle_long_compounds(Words("pair of scissors"), 2)
+        'pairs of scissors'
+        >>> engine()._handle_long_compounds(Words("men beyond hills"), 1)
+        'man beyond hills'
+        """
+        inflection = self._sinoun if count == 1 else self._plnoun
+        solutions = (  # type: ignore
+            " ".join(
+                itertools.chain(
+                    leader,
+                    [inflection(cand, count), prep],  # type: ignore
+                    trailer,
+                )
+            )
+            for leader, (cand, prep), trailer in windowed_complete(word.split_, 2)
+            if prep in pl_prep_list_da  # type: ignore
+        )
+        return next(solutions, None)
+
+    @staticmethod
+    def _find_pivot(words, candidates):
+        pivots = (
+            index for index in range(1, len(words) - 1) if words[index] in candidates
+        )
+        try:
+            return next(pivots)
+        except StopIteration:
+            raise ValueError("No pivot found") from None
+
+    def _pl_special_verb(  # noqa: C901
+        self, word: str, count: Optional[Union[str, int]] = None
+    ) -> Union[str, bool]:
+        if self.classical_dict["zero"] and str(count).lower() in pl_count_zero:
+            return False
+        count = self.get_count(count)
+
+        if count == 1:
+            return word
+
+        # HANDLE USER-DEFINED VERBS
+
+        value = self.ud_match(word, self.pl_v_user_defined)
+        if value is not None:
+            return value
+
+        # HANDLE IRREGULAR PRESENT TENSE (SIMPLE AND COMPOUND)
+
+        try:
+            words = Words(word)
+        except IndexError:
+            return False  # word is ''
+
+        if words.first in plverb_irregular_pres:
+            return f"{plverb_irregular_pres[words.first]}{words[len(words.first) :]}"
+
+        # HANDLE IRREGULAR FUTURE, PRETERITE AND PERFECT TENSES
+
+        if words.first in plverb_irregular_non_pres:
+            return word
+
+        # HANDLE PRESENT NEGATIONS (SIMPLE AND COMPOUND)
+
+        if words.first.endswith("n't") and words.first[:-3] in plverb_irregular_pres:
+            return (
+                f"{plverb_irregular_pres[words.first[:-3]]}n't"
+                f"{words[len(words.first) :]}"
+            )
+
+        if words.first.endswith("n't"):
+            return word
+
+        # HANDLE SPECIAL CASES
+
+        mo = PLVERB_SPECIAL_S_RE.search(word)
+        if mo:
+            return False
+        if WHITESPACE.search(word):
+            return False
+
+        if words.lowered == "quizzes":
+            return "quiz"
+
+        # HANDLE STANDARD 3RD PERSON (CHOP THE ...(e)s OFF SINGLE WORDS)
+
+        if (
+            words.lowered[-4:] in ("ches", "shes", "zzes", "sses")
+            or words.lowered[-3:] == "xes"
+        ):
+            return words[:-2]
+
+        if words.lowered[-3:] == "ies" and len(words) > 3:
+            return words.lowered[:-3] + "y"
+
+        if (
+            words.last.lower() in pl_v_oes_oe
+            or words.lowered[-4:] in pl_v_oes_oe_endings_size4
+            or words.lowered[-5:] in pl_v_oes_oe_endings_size5
+        ):
+            return words[:-1]
+
+        if words.lowered.endswith("oes") and len(words) > 3:
+            return words.lowered[:-2]
+
+        mo = ENDS_WITH_S.search(words)
+        if mo:
+            return mo.group(1)
+
+        # OTHERWISE, A REGULAR VERB (HANDLE ELSEWHERE)
+
+        return False
+
+    def _pl_general_verb(
+        self, word: str, count: Optional[Union[str, int]] = None
+    ) -> str:
+        count = self.get_count(count)
+
+        if count == 1:
+            return word
+
+        # HANDLE AMBIGUOUS PRESENT TENSES  (SIMPLE AND COMPOUND)
+
+        mo = plverb_ambiguous_pres_keys.search(word)
+        if mo:
+            return f"{plverb_ambiguous_pres[mo.group(1).lower()]}{mo.group(2)}"
+
+        # HANDLE AMBIGUOUS PRETERITE AND PERFECT TENSES
+
+        mo = plverb_ambiguous_non_pres.search(word)
+        if mo:
+            return word
+
+        # OTHERWISE, 1st OR 2ND PERSON IS UNINFLECTED
+
+        return word
+
+    def _pl_special_adjective(
+        self, word: str, count: Optional[Union[str, int]] = None
+    ) -> Union[str, bool]:
+        count = self.get_count(count)
+
+        if count == 1:
+            return word
+
+        # HANDLE USER-DEFINED ADJECTIVES
+
+        value = self.ud_match(word, self.pl_adj_user_defined)
+        if value is not None:
+            return value
+
+        # HANDLE KNOWN CASES
+
+        mo = pl_adj_special_keys.search(word)
+        if mo:
+            return pl_adj_special[mo.group(1).lower()]
+
+        # HANDLE POSSESSIVES
+
+        mo = pl_adj_poss_keys.search(word)
+        if mo:
+            return pl_adj_poss[mo.group(1).lower()]
+
+        mo = ENDS_WITH_APOSTROPHE_S.search(word)
+        if mo:
+            pl = self.plural_noun(mo.group(1))
+            trailing_s = "" if pl[-1] == "s" else "s"
+            return f"{pl}'{trailing_s}"
+
+        # OTHERWISE, NO IDEA
+
+        return False
+
+    # @profile
+    def _sinoun(  # noqa: C901
+        self,
+        word: str,
+        count: Optional[Union[str, int]] = None,
+        gender: Optional[str] = None,
+    ) -> Union[str, bool]:
+        count = self.get_count(count)
+
+        # DEFAULT TO PLURAL
+
+        if count == 2:
+            return word
+
+        # SET THE GENDER
+
+        try:
+            if gender is None:
+                gender = self.thegender
+            elif gender not in singular_pronoun_genders:
+                raise BadGenderError
+        except (TypeError, IndexError) as err:
+            raise BadGenderError from err
+
+        # HANDLE USER-DEFINED NOUNS
+
+        value = self.ud_match(word, self.si_sb_user_defined)
+        if value is not None:
+            return value
+
+        # HANDLE EMPTY WORD, SINGULAR COUNT AND UNINFLECTED PLURALS
+
+        if word == "":
+            return word
+
+        if word in si_sb_ois_oi_case:
+            return word[:-1]
+
+        words = Words(word)
+
+        if words.last.lower() in pl_sb_uninflected_complete:
+            if len(words.split_) >= 3:
+                return self._handle_long_compounds(words, count=1) or word
+            return word
+
+        if word in pl_sb_uninflected_caps:
+            return word
+
+        for k, v in pl_sb_uninflected_bysize.items():
+            if words.lowered[-k:] in v:
+                return word
+
+        if self.classical_dict["herd"] and words.last.lower() in pl_sb_uninflected_herd:
+            return word
+
+        if words.last.lower() in pl_sb_C_us_us:
+            return word if self.classical_dict["ancient"] else False
+
+        # HANDLE COMPOUNDS ("Governor General", "mother-in-law", "aide-de-camp", ETC.)
+
+        mo = PL_SB_POSTFIX_ADJ_STEMS_RE.search(word)
+        if mo and mo.group(2) != "":
+            return f"{self._sinoun(mo.group(1), 1, gender=gender)}{mo.group(2)}"
+
+        with contextlib.suppress(ValueError):
+            return self._handle_prepositional_phrase(
+                words.lowered,
+                functools.partial(self._sinoun, count=1, gender=gender),
+                ' ',
+            )
+
+        with contextlib.suppress(ValueError):
+            return self._handle_prepositional_phrase(
+                words.lowered,
+                functools.partial(self._sinoun, count=1, gender=gender),
+                '-',
+            )
+
+        # HANDLE PRONOUNS
+
+        for k, v in si_pron_acc_keys_bysize.items():
+            if words.lowered[-k:] in v:  # ends with accusative pronoun
+                for pk, pv in pl_prep_bysize.items():
+                    if words.lowered[:pk] in pv:  # starts with a prep
+                        if words.lowered.split() == [
+                            words.lowered[:pk],
+                            words.lowered[-k:],
+                        ]:
+                            # only whitespace in between
+                            return words.lowered[:-k] + get_si_pron(
+                                "acc", words.lowered[-k:], gender
+                            )
+
+        try:
+            return get_si_pron("nom", words.lowered, gender)
+        except KeyError:
+            pass
+
+        try:
+            return get_si_pron("acc", words.lowered, gender)
+        except KeyError:
+            pass
+
+        # HANDLE ISOLATED IRREGULAR PLURALS
+
+        if words.last in si_sb_irregular_caps:
+            llen = len(words.last)
+            return f"{word[:-llen]}{si_sb_irregular_caps[words.last]}"
+
+        if words.last.lower() in si_sb_irregular:
+            llen = len(words.last.lower())
+            return f"{word[:-llen]}{si_sb_irregular[words.last.lower()]}"
+
+        dash_split = words.lowered.split("-")
+        if (" ".join(dash_split[-2:])).lower() in si_sb_irregular_compound:
+            llen = len(
+                " ".join(dash_split[-2:])
+            )  # TODO: what if 2 spaces between these words?
+            return "{}{}".format(
+                word[:-llen],
+                si_sb_irregular_compound[(" ".join(dash_split[-2:])).lower()],
+            )
+
+        if words.lowered[-5:] == "quies":
+            return word[:-3] + "y"
+
+        if words.lowered[-7:] == "persons":
+            return word[:-1]
+        if words.lowered[-6:] == "people":
+            return word[:-4] + "rson"
+
+        # HANDLE FAMILIES OF IRREGULAR PLURALS
+
+        if words.lowered[-4:] == "mans":
+            for k, v in si_sb_U_man_mans_bysize.items():
+                if words.lowered[-k:] in v:
+                    return word[:-1]
+            for k, v in si_sb_U_man_mans_caps_bysize.items():
+                if word[-k:] in v:
+                    return word[:-1]
+        if words.lowered[-3:] == "men":
+            return word[:-3] + "man"
+        if words.lowered[-4:] == "mice":
+            return word[:-4] + "mouse"
+        if words.lowered[-4:] == "lice":
+            v = si_sb_U_louse_lice_bysize.get(len(word))
+            if v and words.lowered in v:
+                return word[:-4] + "louse"
+        if words.lowered[-5:] == "geese":
+            return word[:-5] + "goose"
+        if words.lowered[-5:] == "teeth":
+            return word[:-5] + "tooth"
+        if words.lowered[-4:] == "feet":
+            return word[:-4] + "foot"
+
+        if words.lowered == "dice":
+            return "die"
+
+        # HANDLE UNASSIMILATED IMPORTS
+
+        if words.lowered[-4:] == "ceps":
+            return word
+        if words.lowered[-3:] == "zoa":
+            return word[:-1] + "on"
+
+        for lastlet, d, unass_numend, post in (
+            ("s", si_sb_U_ch_chs_bysize, -1, ""),
+            ("s", si_sb_U_ex_ices_bysize, -4, "ex"),
+            ("s", si_sb_U_ix_ices_bysize, -4, "ix"),
+            ("a", si_sb_U_um_a_bysize, -1, "um"),
+            ("i", si_sb_U_us_i_bysize, -1, "us"),
+            ("a", si_sb_U_on_a_bysize, -1, "on"),
+            ("e", si_sb_U_a_ae_bysize, -1, ""),
+        ):
+            if words.lowered[-1] == lastlet:  # this test to add speed
+                for k, v in d.items():
+                    if words.lowered[-k:] in v:
+                        return word[:unass_numend] + post
+
+        # HANDLE INCOMPLETELY ASSIMILATED IMPORTS
+
+        if self.classical_dict["ancient"]:
+            if words.lowered[-6:] == "trices":
+                return word[:-3] + "x"
+            if words.lowered[-4:] in ("eaux", "ieux"):
+                return word[:-1]
+            if words.lowered[-5:] in ("ynges", "inges", "anges") and len(word) > 6:
+                return word[:-3] + "x"
+
+            for lastlet, d, class_numend, post in (
+                ("a", si_sb_C_en_ina_bysize, -3, "en"),
+                ("s", si_sb_C_ex_ices_bysize, -4, "ex"),
+                ("s", si_sb_C_ix_ices_bysize, -4, "ix"),
+                ("a", si_sb_C_um_a_bysize, -1, "um"),
+                ("i", si_sb_C_us_i_bysize, -1, "us"),
+                ("s", pl_sb_C_us_us_bysize, None, ""),
+                ("e", si_sb_C_a_ae_bysize, -1, ""),
+                ("a", si_sb_C_a_ata_bysize, -2, ""),
+                ("s", si_sb_C_is_ides_bysize, -3, "s"),
+                ("i", si_sb_C_o_i_bysize, -1, "o"),
+                ("a", si_sb_C_on_a_bysize, -1, "on"),
+                ("m", si_sb_C_im_bysize, -2, ""),
+                ("i", si_sb_C_i_bysize, -1, ""),
+            ):
+                if words.lowered[-1] == lastlet:  # this test to add speed
+                    for k, v in d.items():
+                        if words.lowered[-k:] in v:
+                            return word[:class_numend] + post
+
+        # HANDLE PLURLS ENDING IN uses -> use
+
+        if (
+            words.lowered[-6:] == "houses"
+            or word in si_sb_uses_use_case
+            or words.last.lower() in si_sb_uses_use
+        ):
+            return word[:-1]
+
+        # HANDLE PLURLS ENDING IN ies -> ie
+
+        if word in si_sb_ies_ie_case or words.last.lower() in si_sb_ies_ie:
+            return word[:-1]
+
+        # HANDLE PLURLS ENDING IN oes -> oe
+
+        if (
+            words.lowered[-5:] == "shoes"
+            or word in si_sb_oes_oe_case
+            or words.last.lower() in si_sb_oes_oe
+        ):
+            return word[:-1]
+
+        # HANDLE SINGULAR NOUNS ENDING IN ...s OR OTHER SILIBANTS
+
+        if word in si_sb_sses_sse_case or words.last.lower() in si_sb_sses_sse:
+            return word[:-1]
+
+        if words.last.lower() in si_sb_singular_s_complete:
+            return word[:-2]
+
+        for k, v in si_sb_singular_s_bysize.items():
+            if words.lowered[-k:] in v:
+                return word[:-2]
+
+        if words.lowered[-4:] == "eses" and word[0] == word[0].upper():
+            return word[:-2]
+
+        if words.last.lower() in si_sb_z_zes:
+            return word[:-2]
+
+        if words.last.lower() in si_sb_zzes_zz:
+            return word[:-2]
+
+        if words.lowered[-4:] == "zzes":
+            return word[:-3]
+
+        if word in si_sb_ches_che_case or words.last.lower() in si_sb_ches_che:
+            return word[:-1]
+
+        if words.lowered[-4:] in ("ches", "shes"):
+            return word[:-2]
+
+        if words.last.lower() in si_sb_xes_xe:
+            return word[:-1]
+
+        if words.lowered[-3:] == "xes":
+            return word[:-2]
+
+        # HANDLE ...f -> ...ves
+
+        if word in si_sb_ves_ve_case or words.last.lower() in si_sb_ves_ve:
+            return word[:-1]
+
+        if words.lowered[-3:] == "ves":
+            if words.lowered[-5:-3] in ("el", "al", "ol"):
+                return word[:-3] + "f"
+            if words.lowered[-5:-3] == "ea" and word[-6:-5] != "d":
+                return word[:-3] + "f"
+            if words.lowered[-5:-3] in ("ni", "li", "wi"):
+                return word[:-3] + "fe"
+            if words.lowered[-5:-3] == "ar":
+                return word[:-3] + "f"
+
+        # HANDLE ...y
+
+        if words.lowered[-2:] == "ys":
+            if len(words.lowered) > 2 and words.lowered[-3] in "aeiou":
+                return word[:-1]
+
+            if self.classical_dict["names"]:
+                if words.lowered[-2:] == "ys" and word[0] == word[0].upper():
+                    return word[:-1]
+
+        if words.lowered[-3:] == "ies":
+            return word[:-3] + "y"
+
+        # HANDLE ...o
+
+        if words.lowered[-2:] == "os":
+            if words.last.lower() in si_sb_U_o_os_complete:
+                return word[:-1]
+
+            for k, v in si_sb_U_o_os_bysize.items():
+                if words.lowered[-k:] in v:
+                    return word[:-1]
+
+            if words.lowered[-3:] in ("aos", "eos", "ios", "oos", "uos"):
+                return word[:-1]
+
+        if words.lowered[-3:] == "oes":
+            return word[:-2]
+
+        # UNASSIMILATED IMPORTS FINAL RULE
+
+        if word in si_sb_es_is:
+            return word[:-2] + "is"
+
+        # OTHERWISE JUST REMOVE ...s
+
+        if words.lowered[-1] == "s":
+            return word[:-1]
+
+        # COULD NOT FIND SINGULAR
+
+        return False
+
+    # ADJECTIVES
+
+    @typechecked
+    def a(self, text: Word, count: Optional[Union[int, str, Any]] = 1) -> str:
+        """
+        Return the appropriate indefinite article followed by text.
+
+        The indefinite article is either 'a' or 'an'.
+
+        If count is not one, then return count followed by text
+        instead of 'a' or 'an'.
+
+        Whitespace at the start and end is preserved.
+
+        """
+        mo = INDEFINITE_ARTICLE_TEST.search(text)
+        if mo:
+            word = mo.group(2)
+            if not word:
+                return text
+            pre = mo.group(1)
+            post = mo.group(3)
+            result = self._indef_article(word, count)
+            return f"{pre}{result}{post}"
+        return ""
+
+    an = a
+
+    _indef_article_cases = (
+        # HANDLE ORDINAL FORMS
+        (A_ordinal_a, "a"),
+        (A_ordinal_an, "an"),
+        # HANDLE SPECIAL CASES
+        (A_explicit_an, "an"),
+        (SPECIAL_AN, "an"),
+        (SPECIAL_A, "a"),
+        # HANDLE ABBREVIATIONS
+        (A_abbrev, "an"),
+        (SPECIAL_ABBREV_AN, "an"),
+        (SPECIAL_ABBREV_A, "a"),
+        # HANDLE CONSONANTS
+        (CONSONANTS, "a"),
+        # HANDLE SPECIAL VOWEL-FORMS
+        (ARTICLE_SPECIAL_EU, "a"),
+        (ARTICLE_SPECIAL_ONCE, "a"),
+        (ARTICLE_SPECIAL_ONETIME, "a"),
+        (ARTICLE_SPECIAL_UNIT, "a"),
+        (ARTICLE_SPECIAL_UBA, "a"),
+        (ARTICLE_SPECIAL_UKR, "a"),
+        (A_explicit_a, "a"),
+        # HANDLE SPECIAL CAPITALS
+        (SPECIAL_CAPITALS, "a"),
+        # HANDLE VOWELS
+        (VOWELS, "an"),
+        # HANDLE y...
+        # (BEFORE CERTAIN CONSONANTS IMPLIES (UNNATURALIZED) "i.." SOUND)
+        (A_y_cons, "an"),
+    )
+
+    def _indef_article(self, word: str, count: Union[int, str, Any]) -> str:
+        mycount = self.get_count(count)
+
+        if mycount != 1:
+            return f"{count} {word}"
+
+        # HANDLE USER-DEFINED VARIANTS
+
+        value = self.ud_match(word, self.A_a_user_defined)
+        if value is not None:
+            return f"{value} {word}"
+
+        matches = (
+            f'{article} {word}'
+            for regexen, article in self._indef_article_cases
+            if regexen.search(word)
+        )
+
+        # OTHERWISE, GUESS "a"
+        fallback = f'a {word}'
+        return next(matches, fallback)
+
+    # 2. TRANSLATE ZERO-QUANTIFIED $word TO "no plural($word)"
+
+    @typechecked
+    def no(self, text: Word, count: Optional[Union[int, str]] = None) -> str:
+        """
+        If count is 0, no, zero or nil, return 'no' followed by the plural
+        of text.
+
+        If count is one of:
+            1, a, an, one, each, every, this, that
+            return count followed by text.
+
+        Otherwise return count follow by the plural of text.
+
+        In the return value count is always followed by a space.
+
+        Whitespace at the start and end is preserved.
+
+        """
+        if count is None and self.persistent_count is not None:
+            count = self.persistent_count
+
+        if count is None:
+            count = 0
+        mo = PARTITION_WORD.search(text)
+        if mo:
+            pre = mo.group(1)
+            word = mo.group(2)
+            post = mo.group(3)
+        else:
+            pre = ""
+            word = ""
+            post = ""
+
+        if str(count).lower() in pl_count_zero:
+            count = 'no'
+        return f"{pre}{count} {self.plural(word, count)}{post}"
+
+    # PARTICIPLES
+
+    @typechecked
+    def present_participle(self, word: Word) -> str:
+        """
+        Return the present participle for word.
+
+        word is the 3rd person singular verb.
+
+        """
+        plv = self.plural_verb(word, 2)
+        ans = plv
+
+        for regexen, repl in PRESENT_PARTICIPLE_REPLACEMENTS:
+            ans, num = regexen.subn(repl, plv)
+            if num:
+                return f"{ans}ing"
+        return f"{ans}ing"
+
+    # NUMERICAL INFLECTIONS
+
+    @typechecked
+    def ordinal(self, num: Union[Number, Word]) -> str:
+        """
+        Return the ordinal of num.
+
+        >>> ordinal = engine().ordinal
+        >>> ordinal(1)
+        '1st'
+        >>> ordinal('one')
+        'first'
+        """
+        if DIGIT.match(str(num)):
+            if isinstance(num, (float, int)) and int(num) == num:
+                n = int(num)
+            else:
+                if "." in str(num):
+                    try:
+                        # numbers after decimal,
+                        # so only need last one for ordinal
+                        n = int(str(num)[-1])
+
+                    except ValueError:  # ends with '.', so need to use whole string
+                        n = int(str(num)[:-1])
+                else:
+                    n = int(num)  # type: ignore
+            try:
+                post = nth[n % 100]
+            except KeyError:
+                post = nth[n % 10]
+            return f"{num}{post}"
+        else:
+            return self._sub_ord(num)
+
+    def millfn(self, ind: int = 0) -> str:
+        if ind > len(mill) - 1:
+            raise NumOutOfRangeError
+        return mill[ind]
+
+    def unitfn(self, units: int, mindex: int = 0) -> str:
+        return f"{unit[units]}{self.millfn(mindex)}"
+
+    def tenfn(self, tens, units, mindex=0) -> str:
+        if tens != 1:
+            tens_part = ten[tens]
+            if tens and units:
+                hyphen = "-"
+            else:
+                hyphen = ""
+            unit_part = unit[units]
+            mill_part = self.millfn(mindex)
+            return f"{tens_part}{hyphen}{unit_part}{mill_part}"
+        return f"{teen[units]}{mill[mindex]}"
+
+    def hundfn(self, hundreds: int, tens: int, units: int, mindex: int) -> str:
+        if hundreds:
+            andword = f" {self._number_args['andword']} " if tens or units else ""
+            # use unit not unitfn as simpler
+            return (
+                f"{unit[hundreds]} hundred{andword}"
+                f"{self.tenfn(tens, units)}{self.millfn(mindex)}, "
+            )
+        if tens or units:
+            return f"{self.tenfn(tens, units)}{self.millfn(mindex)}, "
+        return ""
+
+    def group1sub(self, mo: Match) -> str:
+        units = int(mo.group(1))
+        if units == 1:
+            return f" {self._number_args['one']}, "
+        elif units:
+            return f"{unit[units]}, "
+        else:
+            return f" {self._number_args['zero']}, "
+
+    def group1bsub(self, mo: Match) -> str:
+        units = int(mo.group(1))
+        if units:
+            return f"{unit[units]}, "
+        else:
+            return f" {self._number_args['zero']}, "
+
+    def group2sub(self, mo: Match) -> str:
+        tens = int(mo.group(1))
+        units = int(mo.group(2))
+        if tens:
+            return f"{self.tenfn(tens, units)}, "
+        if units:
+            return f" {self._number_args['zero']} {unit[units]}, "
+        return f" {self._number_args['zero']} {self._number_args['zero']}, "
+
+    def group3sub(self, mo: Match) -> str:
+        hundreds = int(mo.group(1))
+        tens = int(mo.group(2))
+        units = int(mo.group(3))
+        if hundreds == 1:
+            hunword = f" {self._number_args['one']}"
+        elif hundreds:
+            hunword = str(unit[hundreds])
+        else:
+            hunword = f" {self._number_args['zero']}"
+        if tens:
+            tenword = self.tenfn(tens, units)
+        elif units:
+            tenword = f" {self._number_args['zero']} {unit[units]}"
+        else:
+            tenword = f" {self._number_args['zero']} {self._number_args['zero']}"
+        return f"{hunword} {tenword}, "
+
+    def hundsub(self, mo: Match) -> str:
+        ret = self.hundfn(
+            int(mo.group(1)), int(mo.group(2)), int(mo.group(3)), self.mill_count
+        )
+        self.mill_count += 1
+        return ret
+
+    def tensub(self, mo: Match) -> str:
+        return f"{self.tenfn(int(mo.group(1)), int(mo.group(2)), self.mill_count)}, "
+
+    def unitsub(self, mo: Match) -> str:
+        return f"{self.unitfn(int(mo.group(1)), self.mill_count)}, "
+
+    def enword(self, num: str, group: int) -> str:
+        # import pdb
+        # pdb.set_trace()
+
+        if group == 1:
+            num = DIGIT_GROUP.sub(self.group1sub, num)
+        elif group == 2:
+            num = TWO_DIGITS.sub(self.group2sub, num)
+            num = DIGIT_GROUP.sub(self.group1bsub, num, 1)
+        elif group == 3:
+            num = THREE_DIGITS.sub(self.group3sub, num)
+            num = TWO_DIGITS.sub(self.group2sub, num, 1)
+            num = DIGIT_GROUP.sub(self.group1sub, num, 1)
+        elif int(num) == 0:
+            num = self._number_args["zero"]
+        elif int(num) == 1:
+            num = self._number_args["one"]
+        else:
+            num = num.lstrip().lstrip("0")
+            self.mill_count = 0
+            # surely there's a better way to do the next bit
+            mo = THREE_DIGITS_WORD.search(num)
+            while mo:
+                num = THREE_DIGITS_WORD.sub(self.hundsub, num, 1)
+                mo = THREE_DIGITS_WORD.search(num)
+            num = TWO_DIGITS_WORD.sub(self.tensub, num, 1)
+            num = ONE_DIGIT_WORD.sub(self.unitsub, num, 1)
+        return num
+
+    @staticmethod
+    def _sub_ord(val):
+        new = ordinal_suff.sub(lambda match: ordinal[match.group(1)], val)
+        return new + "th" * (new == val)
+
+    @classmethod
+    def _chunk_num(cls, num, decimal, group):
+        if decimal:
+            max_split = -1 if group != 0 else 1
+            chunks = num.split(".", max_split)
+        else:
+            chunks = [num]
+        return cls._remove_last_blank(chunks)
+
+    @staticmethod
+    def _remove_last_blank(chunks):
+        """
+        Remove the last item from chunks if it's a blank string.
+
+        Return the resultant chunks and whether the last item was removed.
+        """
+        removed = chunks[-1] == ""
+        result = chunks[:-1] if removed else chunks
+        return result, removed
+
+    @staticmethod
+    def _get_sign(num):
+        return {'+': 'plus', '-': 'minus'}.get(num.lstrip()[0], '')
+
+    @typechecked
+    def number_to_words(  # noqa: C901
+        self,
+        num: Union[Number, Word],
+        wantlist: bool = False,
+        group: int = 0,
+        comma: Union[Falsish, str] = ",",
+        andword: str = "and",
+        zero: str = "zero",
+        one: str = "one",
+        decimal: Union[Falsish, str] = "point",
+        threshold: Optional[int] = None,
+    ) -> Union[str, List[str]]:
+        """
+        Return a number in words.
+
+        group = 1, 2 or 3 to group numbers before turning into words
+        comma: define comma
+
+        andword:
+            word for 'and'. Can be set to ''.
+            e.g. "one hundred and one" vs "one hundred one"
+
+        zero: word for '0'
+        one: word for '1'
+        decimal: word for decimal point
+        threshold: numbers above threshold not turned into words
+
+        parameters not remembered from last call. Departure from Perl version.
+        """
+        self._number_args = {"andword": andword, "zero": zero, "one": one}
+        num = str(num)
+
+        # Handle "stylistic" conversions (up to a given threshold)...
+        if threshold is not None and float(num) > threshold:
+            spnum = num.split(".", 1)
+            while comma:
+                (spnum[0], n) = FOUR_DIGIT_COMMA.subn(r"\1,\2", spnum[0])
+                if n == 0:
+                    break
+            try:
+                return f"{spnum[0]}.{spnum[1]}"
+            except IndexError:
+                return str(spnum[0])
+
+        if group < 0 or group > 3:
+            raise BadChunkingOptionError
+
+        sign = self._get_sign(num)
+
+        if num in nth_suff:
+            num = zero
+
+        myord = num[-2:] in nth_suff
+        if myord:
+            num = num[:-2]
+
+        chunks, finalpoint = self._chunk_num(num, decimal, group)
+
+        loopstart = chunks[0] == ""
+        first: bool | None = not loopstart
+
+        def _handle_chunk(chunk):
+            nonlocal first
+
+            # remove all non numeric \D
+            chunk = NON_DIGIT.sub("", chunk)
+            if chunk == "":
+                chunk = "0"
+
+            if group == 0 and not first:
+                chunk = self.enword(chunk, 1)
+            else:
+                chunk = self.enword(chunk, group)
+
+            if chunk[-2:] == ", ":
+                chunk = chunk[:-2]
+            chunk = WHITESPACES_COMMA.sub(",", chunk)
+
+            if group == 0 and first:
+                chunk = COMMA_WORD.sub(f" {andword} \\1", chunk)
+            chunk = WHITESPACES.sub(" ", chunk)
+            # chunk = re.sub(r"(\A\s|\s\Z)", self.blankfn, chunk)
+            chunk = chunk.strip()
+            if first:
+                first = None
+            return chunk
+
+        chunks[loopstart:] = map(_handle_chunk, chunks[loopstart:])
+
+        numchunks = []
+        if first != 0:
+            numchunks = chunks[0].split(f"{comma} ")
+
+        if myord and numchunks:
+            numchunks[-1] = self._sub_ord(numchunks[-1])
+
+        for chunk in chunks[1:]:
+            numchunks.append(decimal)
+            numchunks.extend(chunk.split(f"{comma} "))
+
+        if finalpoint:
+            numchunks.append(decimal)
+
+        if wantlist:
+            return [sign] * bool(sign) + numchunks
+
+        signout = f"{sign} " if sign else ""
+        valout = (
+            ', '.join(numchunks)
+            if group
+            else ''.join(self._render(numchunks, decimal, comma))
+        )
+        return signout + valout
+
+    @staticmethod
+    def _render(chunks, decimal, comma):
+        first_item = chunks.pop(0)
+        yield first_item
+        first = decimal is None or not first_item.endswith(decimal)
+        for nc in chunks:
+            if nc == decimal:
+                first = False
+            elif first:
+                yield comma
+            yield f" {nc}"
+
+    @typechecked
+    def join(
+        self,
+        words: Optional[Sequence[Word]],
+        sep: Optional[str] = None,
+        sep_spaced: bool = True,
+        final_sep: Optional[str] = None,
+        conj: str = "and",
+        conj_spaced: bool = True,
+    ) -> str:
+        """
+        Join words into a list.
+
+        e.g. join(['ant', 'bee', 'fly']) returns 'ant, bee, and fly'
+
+        options:
+        conj: replacement for 'and'
+        sep: separator. default ',', unless ',' is in the list then ';'
+        final_sep: final separator. default ',', unless ',' is in the list then ';'
+        conj_spaced: boolean. Should conj have spaces around it
+
+        """
+        if not words:
+            return ""
+        if len(words) == 1:
+            return words[0]
+
+        if conj_spaced:
+            if conj == "":
+                conj = " "
+            else:
+                conj = f" {conj} "
+
+        if len(words) == 2:
+            return f"{words[0]}{conj}{words[1]}"
+
+        if sep is None:
+            if "," in "".join(words):
+                sep = ";"
+            else:
+                sep = ","
+        if final_sep is None:
+            final_sep = sep
+
+        final_sep = f"{final_sep}{conj}"
+
+        if sep_spaced:
+            sep += " "
+
+        return f"{sep.join(words[0:-1])}{final_sep}{words[-1]}"
diff --git a/setuptools/_vendor/ordered_set-3.1.1.dist-info/REQUESTED b/setuptools/_vendor/inflect/compat/__init__.py
similarity index 100%
rename from setuptools/_vendor/ordered_set-3.1.1.dist-info/REQUESTED
rename to setuptools/_vendor/inflect/compat/__init__.py
diff --git a/setuptools/_vendor/inflect/compat/py38.py b/setuptools/_vendor/inflect/compat/py38.py
new file mode 100644
index 0000000000..a2d01bd98f
--- /dev/null
+++ b/setuptools/_vendor/inflect/compat/py38.py
@@ -0,0 +1,7 @@
+import sys
+
+
+if sys.version_info > (3, 9):
+    from typing import Annotated
+else:  # pragma: no cover
+    from typing_extensions import Annotated  # noqa: F401
diff --git a/setuptools/_vendor/packaging-24.0.dist-info/REQUESTED b/setuptools/_vendor/inflect/py.typed
similarity index 100%
rename from setuptools/_vendor/packaging-24.0.dist-info/REQUESTED
rename to setuptools/_vendor/inflect/py.typed
diff --git a/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/RECORD b/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/RECORD
deleted file mode 100644
index 783aa7d2b9..0000000000
--- a/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/RECORD
+++ /dev/null
@@ -1,10 +0,0 @@
-jaraco.functools-4.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-jaraco.functools-4.0.0.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
-jaraco.functools-4.0.0.dist-info/METADATA,sha256=nVOe_vWvaN2iWJ2aBVkhKvmvH-gFksNCXHwCNvcj65I,3078
-jaraco.functools-4.0.0.dist-info/RECORD,,
-jaraco.functools-4.0.0.dist-info/WHEEL,sha256=Xo9-1PvkuimrydujYJAjF7pCkriuXBpUPEjma1nZyJ0,92
-jaraco.functools-4.0.0.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
-jaraco/functools/__init__.py,sha256=hEAJaS2uSZRuF_JY4CxCHIYh79ZpxaPp9OiHyr9EJ1w,16642
-jaraco/functools/__init__.pyi,sha256=N4lLbdhMtrmwiK3UuMGhYsiOLLZx69CUNOdmFPSVh6Q,3982
-jaraco/functools/__pycache__/__init__.cpython-312.pyc,,
-jaraco/functools/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
diff --git a/setuptools/_vendor/more_itertools-8.8.0.dist-info/INSTALLER b/setuptools/_vendor/jaraco.functools-4.0.1.dist-info/INSTALLER
similarity index 100%
rename from setuptools/_vendor/more_itertools-8.8.0.dist-info/INSTALLER
rename to setuptools/_vendor/jaraco.functools-4.0.1.dist-info/INSTALLER
diff --git a/setuptools/_vendor/jaraco.text-3.7.0.dist-info/LICENSE b/setuptools/_vendor/jaraco.functools-4.0.1.dist-info/LICENSE
similarity index 97%
rename from setuptools/_vendor/jaraco.text-3.7.0.dist-info/LICENSE
rename to setuptools/_vendor/jaraco.functools-4.0.1.dist-info/LICENSE
index 353924be0e..1bb5a44356 100644
--- a/setuptools/_vendor/jaraco.text-3.7.0.dist-info/LICENSE
+++ b/setuptools/_vendor/jaraco.functools-4.0.1.dist-info/LICENSE
@@ -1,5 +1,3 @@
-Copyright Jason R. Coombs
-
 Permission is hereby granted, free of charge, to any person obtaining a copy
 of this software and associated documentation files (the "Software"), to
 deal in the Software without restriction, including without limitation the
diff --git a/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/METADATA b/setuptools/_vendor/jaraco.functools-4.0.1.dist-info/METADATA
similarity index 78%
rename from setuptools/_vendor/jaraco.functools-4.0.0.dist-info/METADATA
rename to setuptools/_vendor/jaraco.functools-4.0.1.dist-info/METADATA
index 581b308378..c865140ab2 100644
--- a/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/METADATA
+++ b/setuptools/_vendor/jaraco.functools-4.0.1.dist-info/METADATA
@@ -1,16 +1,16 @@
 Metadata-Version: 2.1
 Name: jaraco.functools
-Version: 4.0.0
+Version: 4.0.1
 Summary: Functools like those found in stdlib
-Home-page: https://github.com/jaraco/jaraco.functools
-Author: Jason R. Coombs
-Author-email: jaraco@jaraco.com
+Author-email: "Jason R. Coombs" 
+Project-URL: Homepage, https://github.com/jaraco/jaraco.functools
 Classifier: Development Status :: 5 - Production/Stable
 Classifier: Intended Audience :: Developers
 Classifier: License :: OSI Approved :: MIT License
 Classifier: Programming Language :: Python :: 3
 Classifier: Programming Language :: Python :: 3 :: Only
 Requires-Python: >=3.8
+Description-Content-Type: text/x-rst
 License-File: LICENSE
 Requires-Dist: more-itertools
 Provides-Extra: docs
@@ -26,17 +26,16 @@ Requires-Dist: pytest >=6 ; extra == 'testing'
 Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'testing'
 Requires-Dist: pytest-cov ; extra == 'testing'
 Requires-Dist: pytest-enabler >=2.2 ; extra == 'testing'
-Requires-Dist: pytest-ruff ; extra == 'testing'
+Requires-Dist: pytest-ruff >=0.2.1 ; extra == 'testing'
 Requires-Dist: jaraco.classes ; extra == 'testing'
-Requires-Dist: pytest-black >=0.3.7 ; (platform_python_implementation != "PyPy") and extra == 'testing'
-Requires-Dist: pytest-mypy >=0.9.1 ; (platform_python_implementation != "PyPy") and extra == 'testing'
+Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing'
 
 .. image:: https://img.shields.io/pypi/v/jaraco.functools.svg
    :target: https://pypi.org/project/jaraco.functools
 
 .. image:: https://img.shields.io/pypi/pyversions/jaraco.functools.svg
 
-.. image:: https://github.com/jaraco/jaraco.functools/workflows/tests/badge.svg
+.. image:: https://github.com/jaraco/jaraco.functools/actions/workflows/main.yml/badge.svg
    :target: https://github.com/jaraco/jaraco.functools/actions?query=workflow%3A%22tests%22
    :alt: tests
 
@@ -44,14 +43,10 @@ Requires-Dist: pytest-mypy >=0.9.1 ; (platform_python_implementation != "PyPy")
     :target: https://github.com/astral-sh/ruff
     :alt: Ruff
 
-.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
-   :target: https://github.com/psf/black
-   :alt: Code style: Black
-
 .. image:: https://readthedocs.org/projects/jaracofunctools/badge/?version=latest
    :target: https://jaracofunctools.readthedocs.io/en/latest/?badge=latest
 
-.. image:: https://img.shields.io/badge/skeleton-2023-informational
+.. image:: https://img.shields.io/badge/skeleton-2024-informational
    :target: https://blog.jaraco.com/skeleton
 
 .. image:: https://tidelift.com/badges/package/pypi/jaraco.functools
diff --git a/setuptools/_vendor/jaraco.functools-4.0.1.dist-info/RECORD b/setuptools/_vendor/jaraco.functools-4.0.1.dist-info/RECORD
new file mode 100644
index 0000000000..ef3bc21e92
--- /dev/null
+++ b/setuptools/_vendor/jaraco.functools-4.0.1.dist-info/RECORD
@@ -0,0 +1,10 @@
+jaraco.functools-4.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+jaraco.functools-4.0.1.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
+jaraco.functools-4.0.1.dist-info/METADATA,sha256=i4aUaQDX-jjdEQK5wevhegyx8JyLfin2HyvaSk3FHso,2891
+jaraco.functools-4.0.1.dist-info/RECORD,,
+jaraco.functools-4.0.1.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
+jaraco.functools-4.0.1.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
+jaraco/functools/__init__.py,sha256=hEAJaS2uSZRuF_JY4CxCHIYh79ZpxaPp9OiHyr9EJ1w,16642
+jaraco/functools/__init__.pyi,sha256=gk3dsgHzo5F_U74HzAvpNivFAPCkPJ1b2-yCd62dfnw,3878
+jaraco/functools/__pycache__/__init__.cpython-312.pyc,,
+jaraco/functools/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
diff --git a/setuptools/_vendor/jaraco.text-3.7.0.dist-info/WHEEL b/setuptools/_vendor/jaraco.functools-4.0.1.dist-info/WHEEL
similarity index 65%
rename from setuptools/_vendor/jaraco.text-3.7.0.dist-info/WHEEL
rename to setuptools/_vendor/jaraco.functools-4.0.1.dist-info/WHEEL
index becc9a66ea..bab98d6758 100644
--- a/setuptools/_vendor/jaraco.text-3.7.0.dist-info/WHEEL
+++ b/setuptools/_vendor/jaraco.functools-4.0.1.dist-info/WHEEL
@@ -1,5 +1,5 @@
 Wheel-Version: 1.0
-Generator: bdist_wheel (0.37.1)
+Generator: bdist_wheel (0.43.0)
 Root-Is-Purelib: true
 Tag: py3-none-any
 
diff --git a/setuptools/_vendor/jaraco.functools-4.0.0.dist-info/top_level.txt b/setuptools/_vendor/jaraco.functools-4.0.1.dist-info/top_level.txt
similarity index 100%
rename from setuptools/_vendor/jaraco.functools-4.0.0.dist-info/top_level.txt
rename to setuptools/_vendor/jaraco.functools-4.0.1.dist-info/top_level.txt
diff --git a/setuptools/_vendor/ordered_set-3.1.1.dist-info/INSTALLER b/setuptools/_vendor/jaraco.text-3.12.1.dist-info/INSTALLER
similarity index 100%
rename from setuptools/_vendor/ordered_set-3.1.1.dist-info/INSTALLER
rename to setuptools/_vendor/jaraco.text-3.12.1.dist-info/INSTALLER
diff --git a/setuptools/_vendor/zipp-3.7.0.dist-info/LICENSE b/setuptools/_vendor/jaraco.text-3.12.1.dist-info/LICENSE
similarity index 97%
rename from setuptools/_vendor/zipp-3.7.0.dist-info/LICENSE
rename to setuptools/_vendor/jaraco.text-3.12.1.dist-info/LICENSE
index 353924be0e..1bb5a44356 100644
--- a/setuptools/_vendor/zipp-3.7.0.dist-info/LICENSE
+++ b/setuptools/_vendor/jaraco.text-3.12.1.dist-info/LICENSE
@@ -1,5 +1,3 @@
-Copyright Jason R. Coombs
-
 Permission is hereby granted, free of charge, to any person obtaining a copy
 of this software and associated documentation files (the "Software"), to
 deal in the Software without restriction, including without limitation the
diff --git a/setuptools/_vendor/jaraco.text-3.12.1.dist-info/METADATA b/setuptools/_vendor/jaraco.text-3.12.1.dist-info/METADATA
new file mode 100644
index 0000000000..0258a380f4
--- /dev/null
+++ b/setuptools/_vendor/jaraco.text-3.12.1.dist-info/METADATA
@@ -0,0 +1,95 @@
+Metadata-Version: 2.1
+Name: jaraco.text
+Version: 3.12.1
+Summary: Module for text manipulation
+Author-email: "Jason R. Coombs" 
+Project-URL: Homepage, https://github.com/jaraco/jaraco.text
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Requires-Python: >=3.8
+Description-Content-Type: text/x-rst
+License-File: LICENSE
+Requires-Dist: jaraco.functools
+Requires-Dist: jaraco.context >=4.1
+Requires-Dist: autocommand
+Requires-Dist: inflect
+Requires-Dist: more-itertools
+Requires-Dist: importlib-resources ; python_version < "3.9"
+Provides-Extra: doc
+Requires-Dist: sphinx >=3.5 ; extra == 'doc'
+Requires-Dist: jaraco.packaging >=9.3 ; extra == 'doc'
+Requires-Dist: rst.linker >=1.9 ; extra == 'doc'
+Requires-Dist: furo ; extra == 'doc'
+Requires-Dist: sphinx-lint ; extra == 'doc'
+Requires-Dist: jaraco.tidelift >=1.4 ; extra == 'doc'
+Provides-Extra: test
+Requires-Dist: pytest !=8.1.*,>=6 ; extra == 'test'
+Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'test'
+Requires-Dist: pytest-cov ; extra == 'test'
+Requires-Dist: pytest-mypy ; extra == 'test'
+Requires-Dist: pytest-enabler >=2.2 ; extra == 'test'
+Requires-Dist: pytest-ruff >=0.2.1 ; extra == 'test'
+Requires-Dist: pathlib2 ; (python_version < "3.10") and extra == 'test'
+
+.. image:: https://img.shields.io/pypi/v/jaraco.text.svg
+   :target: https://pypi.org/project/jaraco.text
+
+.. image:: https://img.shields.io/pypi/pyversions/jaraco.text.svg
+
+.. image:: https://github.com/jaraco/jaraco.text/actions/workflows/main.yml/badge.svg
+   :target: https://github.com/jaraco/jaraco.text/actions?query=workflow%3A%22tests%22
+   :alt: tests
+
+.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
+    :target: https://github.com/astral-sh/ruff
+    :alt: Ruff
+
+.. image:: https://readthedocs.org/projects/jaracotext/badge/?version=latest
+   :target: https://jaracotext.readthedocs.io/en/latest/?badge=latest
+
+.. image:: https://img.shields.io/badge/skeleton-2024-informational
+   :target: https://blog.jaraco.com/skeleton
+
+.. image:: https://tidelift.com/badges/package/pypi/jaraco.text
+   :target: https://tidelift.com/subscription/pkg/pypi-jaraco.text?utm_source=pypi-jaraco.text&utm_medium=readme
+
+
+This package provides handy routines for dealing with text, such as
+wrapping, substitution, trimming, stripping, prefix and suffix removal,
+line continuation, indentation, comment processing, identifier processing,
+values parsing, case insensitive comparison, and more. See the docs
+(linked in the badge above) for the detailed documentation and examples.
+
+Layouts
+=======
+
+One of the features of this package is the layouts module, which
+provides a simple example of translating keystrokes from one keyboard
+layout to another::
+
+    echo qwerty | python -m jaraco.text.to-dvorak
+    ',.pyf
+    echo  "',.pyf" | python -m jaraco.text.to-qwerty
+    qwerty
+
+Newline Reporting
+=================
+
+Need to know what newlines appear in a file?
+
+::
+
+    $ python -m jaraco.text.show-newlines README.rst
+    newline is '\n'
+
+For Enterprise
+==============
+
+Available as part of the Tidelift Subscription.
+
+This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use.
+
+`Learn more `_.
diff --git a/setuptools/_vendor/jaraco.text-3.12.1.dist-info/RECORD b/setuptools/_vendor/jaraco.text-3.12.1.dist-info/RECORD
new file mode 100644
index 0000000000..19e2d8402a
--- /dev/null
+++ b/setuptools/_vendor/jaraco.text-3.12.1.dist-info/RECORD
@@ -0,0 +1,20 @@
+jaraco.text-3.12.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+jaraco.text-3.12.1.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
+jaraco.text-3.12.1.dist-info/METADATA,sha256=AzWdm6ViMfDOPoQMfLWn2zgBQSGJScyqeN29TcuWXVI,3658
+jaraco.text-3.12.1.dist-info/RECORD,,
+jaraco.text-3.12.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+jaraco.text-3.12.1.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
+jaraco.text-3.12.1.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
+jaraco/text/Lorem ipsum.txt,sha256=N_7c_79zxOufBY9HZ3yzMgOkNv-TkOTTio4BydrSjgs,1335
+jaraco/text/__init__.py,sha256=Y2YUqXR_orUoDaY4SkPRe6ZZhb5HUHB_Ah9RCNsVyho,16250
+jaraco/text/__pycache__/__init__.cpython-312.pyc,,
+jaraco/text/__pycache__/layouts.cpython-312.pyc,,
+jaraco/text/__pycache__/show-newlines.cpython-312.pyc,,
+jaraco/text/__pycache__/strip-prefix.cpython-312.pyc,,
+jaraco/text/__pycache__/to-dvorak.cpython-312.pyc,,
+jaraco/text/__pycache__/to-qwerty.cpython-312.pyc,,
+jaraco/text/layouts.py,sha256=HTC8aSTLZ7uXipyOXapRMC158juecjK6RVwitfmZ9_w,643
+jaraco/text/show-newlines.py,sha256=WGQa65e8lyhb92LUOLqVn6KaCtoeVgVws6WtSRmLk6w,904
+jaraco/text/strip-prefix.py,sha256=NfVXV8JVNo6nqcuYASfMV7_y4Eo8zMQqlCOGvAnRIVw,412
+jaraco/text/to-dvorak.py,sha256=1SNcbSsvISpXXg-LnybIHHY-RUFOQr36zcHkY1pWFqw,119
+jaraco/text/to-qwerty.py,sha256=s4UMQUnPwFn_dB5uZC27BurHOQcYondBfzIpVL5pEzw,119
diff --git a/setuptools/_vendor/zipp-3.7.0.dist-info/REQUESTED b/setuptools/_vendor/jaraco.text-3.12.1.dist-info/REQUESTED
similarity index 100%
rename from setuptools/_vendor/zipp-3.7.0.dist-info/REQUESTED
rename to setuptools/_vendor/jaraco.text-3.12.1.dist-info/REQUESTED
diff --git a/setuptools/_vendor/more_itertools-8.8.0.dist-info/WHEEL b/setuptools/_vendor/jaraco.text-3.12.1.dist-info/WHEEL
similarity index 65%
rename from setuptools/_vendor/more_itertools-8.8.0.dist-info/WHEEL
rename to setuptools/_vendor/jaraco.text-3.12.1.dist-info/WHEEL
index 385faab052..bab98d6758 100644
--- a/setuptools/_vendor/more_itertools-8.8.0.dist-info/WHEEL
+++ b/setuptools/_vendor/jaraco.text-3.12.1.dist-info/WHEEL
@@ -1,5 +1,5 @@
 Wheel-Version: 1.0
-Generator: bdist_wheel (0.36.2)
+Generator: bdist_wheel (0.43.0)
 Root-Is-Purelib: true
 Tag: py3-none-any
 
diff --git a/setuptools/_vendor/jaraco.text-3.7.0.dist-info/top_level.txt b/setuptools/_vendor/jaraco.text-3.12.1.dist-info/top_level.txt
similarity index 100%
rename from setuptools/_vendor/jaraco.text-3.7.0.dist-info/top_level.txt
rename to setuptools/_vendor/jaraco.text-3.12.1.dist-info/top_level.txt
diff --git a/setuptools/_vendor/jaraco.text-3.7.0.dist-info/METADATA b/setuptools/_vendor/jaraco.text-3.7.0.dist-info/METADATA
deleted file mode 100644
index 615a50a4ae..0000000000
--- a/setuptools/_vendor/jaraco.text-3.7.0.dist-info/METADATA
+++ /dev/null
@@ -1,55 +0,0 @@
-Metadata-Version: 2.1
-Name: jaraco.text
-Version: 3.7.0
-Summary: Module for text manipulation
-Home-page: https://github.com/jaraco/jaraco.text
-Author: Jason R. Coombs
-Author-email: jaraco@jaraco.com
-License: UNKNOWN
-Platform: UNKNOWN
-Classifier: Development Status :: 5 - Production/Stable
-Classifier: Intended Audience :: Developers
-Classifier: License :: OSI Approved :: MIT License
-Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3 :: Only
-Requires-Python: >=3.6
-License-File: LICENSE
-Requires-Dist: jaraco.functools
-Requires-Dist: jaraco.context (>=4.1)
-Requires-Dist: importlib-resources ; python_version < "3.9"
-Provides-Extra: docs
-Requires-Dist: sphinx ; extra == 'docs'
-Requires-Dist: jaraco.packaging (>=8.2) ; extra == 'docs'
-Requires-Dist: rst.linker (>=1.9) ; extra == 'docs'
-Provides-Extra: testing
-Requires-Dist: pytest (>=6) ; extra == 'testing'
-Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing'
-Requires-Dist: pytest-flake8 ; extra == 'testing'
-Requires-Dist: pytest-cov ; extra == 'testing'
-Requires-Dist: pytest-enabler (>=1.0.1) ; extra == 'testing'
-Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing'
-Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing'
-
-.. image:: https://img.shields.io/pypi/v/jaraco.text.svg
-   :target: `PyPI link`_
-
-.. image:: https://img.shields.io/pypi/pyversions/jaraco.text.svg
-   :target: `PyPI link`_
-
-.. _PyPI link: https://pypi.org/project/jaraco.text
-
-.. image:: https://github.com/jaraco/jaraco.text/workflows/tests/badge.svg
-   :target: https://github.com/jaraco/jaraco.text/actions?query=workflow%3A%22tests%22
-   :alt: tests
-
-.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
-   :target: https://github.com/psf/black
-   :alt: Code style: Black
-
-.. image:: https://readthedocs.org/projects/jaracotext/badge/?version=latest
-   :target: https://jaracotext.readthedocs.io/en/latest/?badge=latest
-
-.. image:: https://img.shields.io/badge/skeleton-2021-informational
-   :target: https://blog.jaraco.com/skeleton
-
-
diff --git a/setuptools/_vendor/jaraco.text-3.7.0.dist-info/RECORD b/setuptools/_vendor/jaraco.text-3.7.0.dist-info/RECORD
deleted file mode 100644
index c698101cb4..0000000000
--- a/setuptools/_vendor/jaraco.text-3.7.0.dist-info/RECORD
+++ /dev/null
@@ -1,10 +0,0 @@
-jaraco.text-3.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-jaraco.text-3.7.0.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050
-jaraco.text-3.7.0.dist-info/METADATA,sha256=5mcR1dY0cJNrM-VIkAFkpjOgvgzmq6nM1GfD0gwTIhs,2136
-jaraco.text-3.7.0.dist-info/RECORD,,
-jaraco.text-3.7.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-jaraco.text-3.7.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
-jaraco.text-3.7.0.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
-jaraco/text/Lorem ipsum.txt,sha256=N_7c_79zxOufBY9HZ3yzMgOkNv-TkOTTio4BydrSjgs,1335
-jaraco/text/__init__.py,sha256=I56MW2ZFwPrYXIxzqxMBe2A1t-T4uZBgEgAKe9-JoqM,15538
-jaraco/text/__pycache__/__init__.cpython-312.pyc,,
diff --git a/setuptools/_vendor/jaraco/context.py b/setuptools/_vendor/jaraco/context.py
index 0322c45d4a..61b27135df 100644
--- a/setuptools/_vendor/jaraco/context.py
+++ b/setuptools/_vendor/jaraco/context.py
@@ -14,7 +14,7 @@
 
 
 if sys.version_info < (3, 12):
-    from setuptools.extern.backports import tarfile
+    from backports import tarfile
 else:
     import tarfile
 
diff --git a/setuptools/_vendor/jaraco/functools/__init__.py b/setuptools/_vendor/jaraco/functools/__init__.py
index 130b87a485..ca6c22fa9b 100644
--- a/setuptools/_vendor/jaraco/functools/__init__.py
+++ b/setuptools/_vendor/jaraco/functools/__init__.py
@@ -7,7 +7,7 @@
 import types
 import warnings
 
-import setuptools.extern.more_itertools
+import more_itertools
 
 
 def compose(*funcs):
@@ -603,10 +603,10 @@ def splat(func):
     simple ``map``.
 
     >>> pairs = [(-1, 1), (0, 2)]
-    >>> setuptools.extern.more_itertools.consume(itertools.starmap(print, pairs))
+    >>> more_itertools.consume(itertools.starmap(print, pairs))
     -1 1
     0 2
-    >>> setuptools.extern.more_itertools.consume(map(splat(print), pairs))
+    >>> more_itertools.consume(map(splat(print), pairs))
     -1 1
     0 2
 
diff --git a/setuptools/_vendor/jaraco/functools/__init__.pyi b/setuptools/_vendor/jaraco/functools/__init__.pyi
index c2b9ab1757..19191bf93e 100644
--- a/setuptools/_vendor/jaraco/functools/__init__.pyi
+++ b/setuptools/_vendor/jaraco/functools/__init__.pyi
@@ -74,9 +74,6 @@ def result_invoke(
 def invoke(
     f: Callable[_P, _R], /, *args: _P.args, **kwargs: _P.kwargs
 ) -> Callable[_P, _R]: ...
-def call_aside(
-    f: Callable[_P, _R], *args: _P.args, **kwargs: _P.kwargs
-) -> Callable[_P, _R]: ...
 
 class Throttler(Generic[_R]):
     last_called: float
diff --git a/setuptools/_vendor/jaraco/text/__init__.py b/setuptools/_vendor/jaraco/text/__init__.py
index a0306d5ff5..0fabd0c3f0 100644
--- a/setuptools/_vendor/jaraco/text/__init__.py
+++ b/setuptools/_vendor/jaraco/text/__init__.py
@@ -6,10 +6,10 @@
 try:
     from importlib.resources import files  # type: ignore
 except ImportError:  # pragma: nocover
-    from setuptools.extern.importlib_resources import files  # type: ignore
+    from importlib_resources import files  # type: ignore
 
-from setuptools.extern.jaraco.functools import compose, method_cache
-from setuptools.extern.jaraco.context import ExceptionTrap
+from jaraco.functools import compose, method_cache
+from jaraco.context import ExceptionTrap
 
 
 def substitution(old, new):
@@ -66,7 +66,7 @@ class FoldedCase(str):
     >>> s in ["Hello World"]
     True
 
-    You may test for set inclusion, but candidate and elements
+    Allows testing for set inclusion, but candidate and elements
     must both be folded.
 
     >>> FoldedCase("Hello World") in {s}
@@ -92,37 +92,40 @@ class FoldedCase(str):
 
     >>> FoldedCase('hello') > FoldedCase('Hello')
     False
+
+    >>> FoldedCase('ß') == FoldedCase('ss')
+    True
     """
 
     def __lt__(self, other):
-        return self.lower() < other.lower()
+        return self.casefold() < other.casefold()
 
     def __gt__(self, other):
-        return self.lower() > other.lower()
+        return self.casefold() > other.casefold()
 
     def __eq__(self, other):
-        return self.lower() == other.lower()
+        return self.casefold() == other.casefold()
 
     def __ne__(self, other):
-        return self.lower() != other.lower()
+        return self.casefold() != other.casefold()
 
     def __hash__(self):
-        return hash(self.lower())
+        return hash(self.casefold())
 
     def __contains__(self, other):
-        return super().lower().__contains__(other.lower())
+        return super().casefold().__contains__(other.casefold())
 
     def in_(self, other):
         "Does self appear in other?"
         return self in FoldedCase(other)
 
-    # cache lower since it's likely to be called frequently.
+    # cache casefold since it's likely to be called frequently.
     @method_cache
-    def lower(self):
-        return super().lower()
+    def casefold(self):
+        return super().casefold()
 
     def index(self, sub):
-        return self.lower().index(sub.lower())
+        return self.casefold().index(sub.casefold())
 
     def split(self, splitter=' ', maxsplit=0):
         pattern = re.compile(re.escape(splitter), re.I)
@@ -224,9 +227,12 @@ def unwrap(s):
     return '\n'.join(cleaned)
 
 
+lorem_ipsum: str = (
+    files(__name__).joinpath('Lorem ipsum.txt').read_text(encoding='utf-8')
+)
 
 
-class Splitter(object):
+class Splitter:
     """object that will split a string with the given arguments for each call
 
     >>> s = Splitter(',')
@@ -276,7 +282,7 @@ class WordSet(tuple):
     >>> WordSet.parse("myABCClass")
     ('my', 'ABC', 'Class')
 
-    The result is a WordSet, so you can get the form you need.
+    The result is a WordSet, providing access to other forms.
 
     >>> WordSet.parse("myABCClass").underscore_separated()
     'my_ABC_Class'
@@ -363,7 +369,7 @@ def trim(self, item):
         return self.trim_left(item).trim_right(item)
 
     def __getitem__(self, item):
-        result = super(WordSet, self).__getitem__(item)
+        result = super().__getitem__(item)
         if isinstance(item, slice):
             result = WordSet(result)
         return result
@@ -578,7 +584,7 @@ def join_continuation(lines):
     ['foobarbaz']
 
     Not sure why, but...
-    The character preceeding the backslash is also elided.
+    The character preceding the backslash is also elided.
 
     >>> list(join_continuation(['goo\\', 'dly']))
     ['godly']
@@ -597,3 +603,22 @@ def join_continuation(lines):
             except StopIteration:
                 return
         yield item
+
+
+def read_newlines(filename, limit=1024):
+    r"""
+    >>> tmp_path = getfixture('tmp_path')
+    >>> filename = tmp_path / 'out.txt'
+    >>> _ = filename.write_text('foo\n', newline='', encoding='utf-8')
+    >>> read_newlines(filename)
+    '\n'
+    >>> _ = filename.write_text('foo\r\n', newline='', encoding='utf-8')
+    >>> read_newlines(filename)
+    '\r\n'
+    >>> _ = filename.write_text('foo\r\nbar\nbing\r', newline='', encoding='utf-8')
+    >>> read_newlines(filename)
+    ('\r', '\n', '\r\n')
+    """
+    with open(filename, encoding='utf-8') as fp:
+        fp.read(limit)
+    return fp.newlines
diff --git a/setuptools/_vendor/jaraco/text/layouts.py b/setuptools/_vendor/jaraco/text/layouts.py
new file mode 100644
index 0000000000..9636f0f7b5
--- /dev/null
+++ b/setuptools/_vendor/jaraco/text/layouts.py
@@ -0,0 +1,25 @@
+qwerty = "-=qwertyuiop[]asdfghjkl;'zxcvbnm,./_+QWERTYUIOP{}ASDFGHJKL:\"ZXCVBNM<>?"
+dvorak = "[]',.pyfgcrl/=aoeuidhtns-;qjkxbmwvz{}\"<>PYFGCRL?+AOEUIDHTNS_:QJKXBMWVZ"
+
+
+to_dvorak = str.maketrans(qwerty, dvorak)
+to_qwerty = str.maketrans(dvorak, qwerty)
+
+
+def translate(input, translation):
+    """
+    >>> translate('dvorak', to_dvorak)
+    'ekrpat'
+    >>> translate('qwerty', to_qwerty)
+    'x,dokt'
+    """
+    return input.translate(translation)
+
+
+def _translate_stream(stream, translation):
+    """
+    >>> import io
+    >>> _translate_stream(io.StringIO('foo'), to_dvorak)
+    urr
+    """
+    print(translate(stream.read(), translation))
diff --git a/setuptools/_vendor/jaraco/text/show-newlines.py b/setuptools/_vendor/jaraco/text/show-newlines.py
new file mode 100644
index 0000000000..e11d1ba428
--- /dev/null
+++ b/setuptools/_vendor/jaraco/text/show-newlines.py
@@ -0,0 +1,33 @@
+import autocommand
+import inflect
+
+from more_itertools import always_iterable
+
+import jaraco.text
+
+
+def report_newlines(filename):
+    r"""
+    Report the newlines in the indicated file.
+
+    >>> tmp_path = getfixture('tmp_path')
+    >>> filename = tmp_path / 'out.txt'
+    >>> _ = filename.write_text('foo\nbar\n', newline='', encoding='utf-8')
+    >>> report_newlines(filename)
+    newline is '\n'
+    >>> filename = tmp_path / 'out.txt'
+    >>> _ = filename.write_text('foo\nbar\r\n', newline='', encoding='utf-8')
+    >>> report_newlines(filename)
+    newlines are ('\n', '\r\n')
+    """
+    newlines = jaraco.text.read_newlines(filename)
+    count = len(tuple(always_iterable(newlines)))
+    engine = inflect.engine()
+    print(
+        engine.plural_noun("newline", count),
+        engine.plural_verb("is", count),
+        repr(newlines),
+    )
+
+
+autocommand.autocommand(__name__)(report_newlines)
diff --git a/setuptools/_vendor/jaraco/text/strip-prefix.py b/setuptools/_vendor/jaraco/text/strip-prefix.py
new file mode 100644
index 0000000000..761717a9b9
--- /dev/null
+++ b/setuptools/_vendor/jaraco/text/strip-prefix.py
@@ -0,0 +1,21 @@
+import sys
+
+import autocommand
+
+from jaraco.text import Stripper
+
+
+def strip_prefix():
+    r"""
+    Strip any common prefix from stdin.
+
+    >>> import io, pytest
+    >>> getfixture('monkeypatch').setattr('sys.stdin', io.StringIO('abcdef\nabc123'))
+    >>> strip_prefix()
+    def
+    123
+    """
+    sys.stdout.writelines(Stripper.strip_prefix(sys.stdin).lines)
+
+
+autocommand.autocommand(__name__)(strip_prefix)
diff --git a/setuptools/_vendor/jaraco/text/to-dvorak.py b/setuptools/_vendor/jaraco/text/to-dvorak.py
new file mode 100644
index 0000000000..a6d5da80b3
--- /dev/null
+++ b/setuptools/_vendor/jaraco/text/to-dvorak.py
@@ -0,0 +1,6 @@
+import sys
+
+from . import layouts
+
+
+__name__ == '__main__' and layouts._translate_stream(sys.stdin, layouts.to_dvorak)
diff --git a/setuptools/_vendor/jaraco/text/to-qwerty.py b/setuptools/_vendor/jaraco/text/to-qwerty.py
new file mode 100644
index 0000000000..abe2728662
--- /dev/null
+++ b/setuptools/_vendor/jaraco/text/to-qwerty.py
@@ -0,0 +1,6 @@
+import sys
+
+from . import layouts
+
+
+__name__ == '__main__' and layouts._translate_stream(sys.stdin, layouts.to_qwerty)
diff --git a/setuptools/_vendor/packaging-24.0.dist-info/INSTALLER b/setuptools/_vendor/more_itertools-10.3.0.dist-info/INSTALLER
similarity index 100%
rename from setuptools/_vendor/packaging-24.0.dist-info/INSTALLER
rename to setuptools/_vendor/more_itertools-10.3.0.dist-info/INSTALLER
diff --git a/setuptools/_vendor/more_itertools-8.8.0.dist-info/LICENSE b/setuptools/_vendor/more_itertools-10.3.0.dist-info/LICENSE
similarity index 100%
rename from setuptools/_vendor/more_itertools-8.8.0.dist-info/LICENSE
rename to setuptools/_vendor/more_itertools-10.3.0.dist-info/LICENSE
diff --git a/setuptools/_vendor/more_itertools-8.8.0.dist-info/METADATA b/setuptools/_vendor/more_itertools-10.3.0.dist-info/METADATA
similarity index 60%
rename from setuptools/_vendor/more_itertools-8.8.0.dist-info/METADATA
rename to setuptools/_vendor/more_itertools-10.3.0.dist-info/METADATA
index bdaee6553f..fb41b0cfe6 100644
--- a/setuptools/_vendor/more_itertools-8.8.0.dist-info/METADATA
+++ b/setuptools/_vendor/more_itertools-10.3.0.dist-info/METADATA
@@ -1,28 +1,26 @@
 Metadata-Version: 2.1
 Name: more-itertools
-Version: 8.8.0
+Version: 10.3.0
 Summary: More routines for operating on iterables, beyond itertools
-Home-page: https://github.com/more-itertools/more-itertools
-Author: Erik Rose
-Author-email: erikrose@grinchcentral.com
-License: MIT
-Keywords: itertools,iterator,iteration,filter,peek,peekable,collate,chunk,chunked
-Platform: UNKNOWN
+Keywords: itertools,iterator,iteration,filter,peek,peekable,chunk,chunked
+Author-email: Erik Rose 
+Requires-Python: >=3.8
+Description-Content-Type: text/x-rst
 Classifier: Development Status :: 5 - Production/Stable
 Classifier: Intended Audience :: Developers
 Classifier: Natural Language :: English
 Classifier: License :: OSI Approved :: MIT License
 Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3.6
-Classifier: Programming Language :: Python :: 3.7
 Classifier: Programming Language :: Python :: 3.8
 Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
 Classifier: Programming Language :: Python :: 3 :: Only
 Classifier: Programming Language :: Python :: Implementation :: CPython
 Classifier: Programming Language :: Python :: Implementation :: PyPy
 Classifier: Topic :: Software Development :: Libraries
-Requires-Python: >=3.5
-Description-Content-Type: text/x-rst
+Project-URL: Homepage, https://github.com/more-itertools/more-itertools
 
 ==============
 More Itertools
@@ -36,124 +34,169 @@ for a variety of problems with the functions it provides. In ``more-itertools``
 we collect additional building blocks, recipes, and routines for working with
 Python iterables.
 
-+------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| Grouping               | `chunked `_,                                                                                                                        |
-|                        | `ichunked `_,                                                                                                                      |
-|                        | `sliced `_,                                                                                                                          |
-|                        | `distribute `_,                                                                                                                  |
-|                        | `divide `_,                                                                                                                          |
-|                        | `split_at `_,                                                                                                                      |
-|                        | `split_before `_,                                                                                                              |
-|                        | `split_after `_,                                                                                                                |
-|                        | `split_into `_,                                                                                                                  |
-|                        | `split_when `_,                                                                                                                  |
-|                        | `bucket `_,                                                                                                                          |
-|                        | `unzip `_,                                                                                                                            |
-|                        | `grouper `_,                                                                                                                        |
-|                        | `partition `_                                                                                                                     |
-+------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| Lookahead and lookback | `spy `_,                                                                                                                                |
-|                        | `peekable `_,                                                                                                                      |
-|                        | `seekable `_                                                                                                                       |
-+------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| Windowing              | `windowed `_,                                                                                                                      |
-|                        | `substrings `_,                                                                                                                  |
-|                        | `substrings_indexes `_,                                                                                                  |
-|                        | `stagger `_,                                                                                                                        |
-|                        | `windowed_complete `_,                                                                                                    |
-|                        | `pairwise `_                                                                                                                       |
-+------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| Augmenting             | `count_cycle `_,                                                                                                                |
-|                        | `intersperse `_,                                                                                                                |
-|                        | `padded `_,                                                                                                                          |
-|                        | `mark_ends `_,                                                                                                                    |
-|                        | `repeat_last `_,                                                                                                                |
-|                        | `adjacent `_,                                                                                                                      |
-|                        | `groupby_transform `_,                                                                                                    |
-|                        | `padnone `_,                                                                                                                        |
-|                        | `ncycles `_                                                                                                                         |
-+------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| Combining              | `collapse `_,                                                                                                                      |
-|                        | `sort_together `_,                                                                                                            |
-|                        | `interleave `_,                                                                                                                  |
-|                        | `interleave_longest `_,                                                                                                  |
-|                        | `zip_offset `_,                                                                                                                  |
-|                        | `zip_equal `_,                                                                                                                    |
-|                        | `dotproduct `_,                                                                                                                  |
-|                        | `convolve `_,                                                                                                                      |
-|                        | `flatten `_,                                                                                                                        |
-|                        | `roundrobin `_,                                                                                                                  |
-|                        | `prepend `_,                                                                                                                        |
-|                        | `value_chain `_                                                                                                                 |
-+------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| Summarizing            | `ilen `_,                                                                                                                              |
-|                        | `unique_to_each `_,                                                                                                          |
-|                        | `sample `_,                                                                                                                          |
-|                        | `consecutive_groups `_,                                                                                                  |
-|                        | `run_length `_,                                                                                                                  |
-|                        | `map_reduce `_,                                                                                                                  |
-|                        | `exactly_n `_,                                                                                                                    |
-|                        | `is_sorted `_,                                                                                                                    |
-|                        | `all_equal `_,                                                                                                                    |
-|                        | `all_unique `_,                                                                                                                  |
-|                        | `first_true `_,                                                                                                                  |
-|                        | `quantify `_                                                                                                                       |
-+------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| Selecting              | `islice_extended `_,                                                                                                        |
-|                        | `first `_,                                                                                                                            |
-|                        | `last `_,                                                                                                                              |
-|                        | `one `_,                                                                                                                                |
-|                        | `only `_,                                                                                                                              |
-|                        | `strip `_,                                                                                                                            |
-|                        | `lstrip `_,                                                                                                                          |
-|                        | `rstrip `_,                                                                                                                          |
-|                        | `filter_except `_                                                                                                             |
-|                        | `map_except `_                                                                                                                   |
-|                        | `nth_or_last `_,                                                                                                                |
-|                        | `nth `_,                                                                                                                                |
-|                        | `take `_,                                                                                                                              |
-|                        | `tail `_,                                                                                                                              |
-|                        | `unique_everseen `_,                                                                                                       |
-|                        | `unique_justseen `_                                                                                                         |
-+------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| Combinatorics          | `distinct_permutations `_,                                                                                            |
-|                        | `distinct_combinations `_,                                                                                            |
-|                        | `circular_shifts `_,                                                                                                        |
-|                        | `partitions `_,                                                                                                                  |
-|                        | `set_partitions `_,                                                                                                          |
-|                        | `product_index `_,                                                                                                            |
-|                        | `combination_index `_,                                                                                                    |
-|                        | `permutation_index `_,                                                                                                    |
-|                        | `powerset `_,                                                                                                                      |
-|                        | `random_product `_,                                                                                                          |
-|                        | `random_permutation `_,                                                                                                  |
-|                        | `random_combination `_,                                                                                                  |
-|                        | `random_combination_with_replacement `_,                                                                |
-|                        | `nth_product `_                                                                                                                 |
-|                        | `nth_permutation `_                                                                                                         |
-|                        | `nth_combination `_                                                                                                         |
-+------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| Wrapping               | `always_iterable `_,                                                                                                        |
-|                        | `always_reversible `_,                                                                                                    |
-|                        | `countable `_,                                                                                                                    |
-|                        | `consumer `_,                                                                                                                      |
-|                        | `with_iter `_,                                                                                                                    |
-|                        | `iter_except `_                                                                                                                 |
-+------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| Others                 | `locate `_,                                                                                                                          |
-|                        | `rlocate `_,                                                                                                                        |
-|                        | `replace `_,                                                                                                                        |
-|                        | `numeric_range `_,                                                                                                            |
-|                        | `side_effect `_,                                                                                                                |
-|                        | `iterate `_,                                                                                                                        |
-|                        | `difference `_,                                                                                                                  |
-|                        | `make_decorator `_,                                                                                                          |
-|                        | `SequenceView `_,                                                                                                              |
-|                        | `time_limited `_,                                                                                                              |
-|                        | `consume `_,                                                                                                                        |
-|                        | `tabulate `_,                                                                                                                      |
-|                        | `repeatfunc `_                                                                                                                   |
-+------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Grouping               | `chunked `_,                                                                               |
+|                        | `ichunked `_,                                                                             |
+|                        | `chunked_even `_,                                                                     |
+|                        | `sliced `_,                                                                                 |
+|                        | `constrained_batches `_,                                                       |
+|                        | `distribute `_,                                                                         |
+|                        | `divide `_,                                                                                 |
+|                        | `split_at `_,                                                                             |
+|                        | `split_before `_,                                                                     |
+|                        | `split_after `_,                                                                       |
+|                        | `split_into `_,                                                                         |
+|                        | `split_when `_,                                                                         |
+|                        | `bucket `_,                                                                                 |
+|                        | `unzip `_,                                                                                   |
+|                        | `batched `_,                                                                               |
+|                        | `grouper `_,                                                                               |
+|                        | `partition `_,                                                                           |
+|                        | `transpose `_                                                                            |
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Lookahead and lookback | `spy `_,                                                                                       |
+|                        | `peekable `_,                                                                             |
+|                        | `seekable `_                                                                              |
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Windowing              | `windowed `_,                                                                             |
+|                        | `substrings `_,                                                                         |
+|                        | `substrings_indexes `_,                                                         |
+|                        | `stagger `_,                                                                               |
+|                        | `windowed_complete `_,                                                           |
+|                        | `pairwise `_,                                                                             |
+|                        | `triplewise `_,                                                                         |
+|                        | `sliding_window `_,                                                                 |
+|                        | `subslices `_                                                                            |
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Augmenting             | `count_cycle `_,                                                                       |
+|                        | `intersperse `_,                                                                       |
+|                        | `padded `_,                                                                                 |
+|                        | `repeat_each `_,                                                                       |
+|                        | `mark_ends `_,                                                                           |
+|                        | `repeat_last `_,                                                                       |
+|                        | `adjacent `_,                                                                             |
+|                        | `groupby_transform `_,                                                           |
+|                        | `pad_none `_,                                                                             |
+|                        | `ncycles `_                                                                                |
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Combining              | `collapse `_,                                                                             |
+|                        | `sort_together `_,                                                                   |
+|                        | `interleave `_,                                                                         |
+|                        | `interleave_longest `_,                                                         |
+|                        | `interleave_evenly `_,                                                           |
+|                        | `zip_offset `_,                                                                         |
+|                        | `zip_equal `_,                                                                           |
+|                        | `zip_broadcast `_,                                                                   |
+|                        | `flatten `_,                                                                               |
+|                        | `roundrobin `_,                                                                         |
+|                        | `prepend `_,                                                                               |
+|                        | `value_chain `_,                                                                       |
+|                        | `partial_product `_                                                                |
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Summarizing            | `ilen `_,                                                                                     |
+|                        | `unique_to_each `_,                                                                 |
+|                        | `sample `_,                                                                                 |
+|                        | `consecutive_groups `_,                                                         |
+|                        | `run_length `_,                                                                         |
+|                        | `map_reduce `_,                                                                         |
+|                        | `join_mappings `_,                                                                   |
+|                        | `exactly_n `_,                                                                           |
+|                        | `is_sorted `_,                                                                           |
+|                        | `all_equal `_,                                                                           |
+|                        | `all_unique `_,                                                                         |
+|                        | `minmax `_,                                                                                 |
+|                        | `first_true `_,                                                                         |
+|                        | `quantify `_,                                                                             |
+|                        | `iequals `_                                                                                |
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Selecting              | `islice_extended `_,                                                               |
+|                        | `first `_,                                                                                   |
+|                        | `last `_,                                                                                     |
+|                        | `one `_,                                                                                       |
+|                        | `only `_,                                                                                     |
+|                        | `strictly_n `_,                                                                         |
+|                        | `strip `_,                                                                                   |
+|                        | `lstrip `_,                                                                                 |
+|                        | `rstrip `_,                                                                                 |
+|                        | `filter_except `_,                                                                   |
+|                        | `map_except `_,                                                                         |
+|                        | `filter_map `_,                                                                         |
+|                        | `iter_suppress `_,                                                                   |
+|                        | `nth_or_last `_,                                                                       |
+|                        | `unique_in_window `_,                                                             |
+|                        | `before_and_after `_,                                                             |
+|                        | `nth `_,                                                                                       |
+|                        | `take `_,                                                                                     |
+|                        | `tail `_,                                                                                     |
+|                        | `unique_everseen `_,                                                               |
+|                        | `unique_justseen `_,                                                               |
+|                        | `unique `_,                                                                                 |
+|                        | `duplicates_everseen `_,                                                       |
+|                        | `duplicates_justseen `_,                                                       |
+|                        | `classify_unique `_,                                                               |
+|                        | `longest_common_prefix `_,                                                   |
+|                        | `takewhile_inclusive `_                                                        |
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Math                   | `dft `_,                                                                                       |
+|                        | `idft `_,                                                                                     |
+|                        | `convolve `_,                                                                             |
+|                        | `dotproduct `_,                                                                         |
+|                        | `factor `_,                                                                                 |
+|                        | `matmul `_,                                                                                 |
+|                        | `polynomial_from_roots `_,                                                   |
+|                        | `polynomial_derivative `_,                                                   |
+|                        | `polynomial_eval `_,                                                               |
+|                        | `sieve `_,                                                                                   |
+|                        | `sum_of_squares `_,                                                                 |
+|                        | `totient `_                                                                                |
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Combinatorics          | `distinct_permutations `_,                                                   |
+|                        | `distinct_combinations `_,                                                   |
+|                        | `circular_shifts `_,                                                               |
+|                        | `partitions `_,                                                                         |
+|                        | `set_partitions `_,                                                                 |
+|                        | `product_index `_,                                                                   |
+|                        | `combination_index `_,                                                           |
+|                        | `permutation_index `_,                                                           |
+|                        | `combination_with_replacement_index `_,                         |
+|                        | `gray_product  `_,                                                                    |
+|                        | `outer_product  `_,                                                                  |
+|                        | `powerset `_,                                                                             |
+|                        | `powerset_of_sets `_,                                                             |
+|                        | `random_product `_,                                                                 |
+|                        | `random_permutation `_,                                                         |
+|                        | `random_combination `_,                                                         |
+|                        | `random_combination_with_replacement `_,                       |
+|                        | `nth_product `_,                                                                       |
+|                        | `nth_permutation `_,                                                               |
+|                        | `nth_combination `_,                                                               |
+|                        | `nth_combination_with_replacement `_                              |
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Wrapping               | `always_iterable `_,                                                               |
+|                        | `always_reversible `_,                                                           |
+|                        | `countable `_,                                                                           |
+|                        | `consumer `_,                                                                             |
+|                        | `with_iter `_,                                                                           |
+|                        | `iter_except `_                                                                        |
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Others                 | `locate `_,                                                                                 |
+|                        | `rlocate `_,                                                                               |
+|                        | `replace `_,                                                                               |
+|                        | `numeric_range `_,                                                                   |
+|                        | `side_effect `_,                                                                       |
+|                        | `iterate `_,                                                                               |
+|                        | `difference `_,                                                                         |
+|                        | `make_decorator `_,                                                                 |
+|                        | `SequenceView `_,                                                                     |
+|                        | `time_limited `_,                                                                     |
+|                        | `map_if `_,                                                                                 |
+|                        | `iter_index `_,                                                                         |
+|                        | `consume `_,                                                                               |
+|                        | `tabulate `_,                                                                             |
+|                        | `repeatfunc `_,                                                                         |
+|                        | `reshape `_                                                                                |
+|                        | `doublestarmap `_                                                                    |
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
 
 
 Getting started
@@ -204,6 +247,7 @@ Blog posts about ``more-itertools``:
 
 * `Yo, I heard you like decorators `__
 * `Tour of Python Itertools `__ (`Alternate `__)
+* `Real-World Python More Itertools `_
 
 
 Development
@@ -218,245 +262,5 @@ repository. Thanks for contributing!
 Version History
 ===============
 
-
-   :noindex:
-
-8.8.0
------
-
-* New functions
-    * countable (thanks to krzysieq)
-
-* Changes to existing functions
-    * split_before was updated to handle empy collections (thanks to TiunovNN)
-    * unique_everseen got a performance boost (thanks to Numerlor)
-    * The type hint for value_chain was corrected (thanks to vr2262)
-
-8.7.0
------
-
-* New functions
-    * convolve (from the Python itertools docs)
-    * product_index, combination_index, and permutation_index (thanks to N8Brooks)
-    * value_chain (thanks to jenstroeger)
-
-* Changes to existing functions
-    * distinct_combinations now uses a non-recursive algorithm (thanks to  knutdrand)
-    * pad_none is now the preferred name for padnone, though the latter remains available.
-    * pairwise will now use the Python standard library implementation on Python 3.10+
-    * sort_together now accepts a ``key`` argument (thanks to brianmaissy)
-    * seekable now has a ``peek`` method, and can indicate whether the iterator it's wrapping is exhausted (thanks to gsakkis)
-    * time_limited can now indicate whether its iterator has expired (thanks to roysmith)
-    * The implementation of unique_everseen was improved (thanks to plammens)
-
-* Other changes:
-    * Various documentation updates (thanks to cthoyt, Evantm, and cyphase)
-
-8.6.0
------
-
-* New itertools
-    * all_unique (thanks to brianmaissy)
-    * nth_product and nth_permutation (thanks to N8Brooks)
-
-* Changes to existing itertools
-    * chunked and sliced now accept a ``strict`` parameter (thanks to shlomif and jtwool)
-
-* Other changes
-    * Python 3.5 has reached its end of life and is no longer supported.
-    * Python 3.9 is officially supported.
-    * Various documentation fixes (thanks to timgates42)
-
-8.5.0
------
-
-* New itertools
-    * windowed_complete (thanks to MarcinKonowalczyk)
-
-* Changes to existing itertools:
-    * The is_sorted implementation was improved (thanks to cool-RR)
-    * The groupby_transform now accepts a ``reducefunc`` parameter.
-    * The last implementation was improved (thanks to brianmaissy)
-
-* Other changes
-    * Various documentation fixes (thanks to craigrosie, samuelstjean, PiCT0)
-    * The tests for distinct_combinations were improved (thanks to Minabsapi)
-    * Automated tests now run on GitHub Actions. All commits now check:
-        * That unit tests pass
-        * That the examples in docstrings work
-        * That test coverage remains high (using `coverage`)
-        * For linting errors (using `flake8`)
-        * For consistent style (using `black`)
-        * That the type stubs work (using `mypy`)
-        * That the docs build correctly (using `sphinx`)
-        * That packages build correctly (using `twine`)
-
-8.4.0
------
-
-* New itertools
-    * mark_ends (thanks to kalekundert)
-    * is_sorted
-
-* Changes to existing itertools:
-    * islice_extended can now be used with real slices (thanks to cool-RR)
-    * The implementations for filter_except and map_except were improved (thanks to SergBobrovsky)
-
-* Other changes
-    * Automated tests now enforce code style (using `black `__)
-    * The various signatures of islice_extended and numeric_range now appear in the docs (thanks to dsfulf)
-    * The test configuration for mypy was updated (thanks to blueyed)
-
-
-8.3.0
------
-
-* New itertools
-    * zip_equal (thanks to frankier and alexmojaki)
-
-* Changes to existing itertools:
-    * split_at, split_before, split_after, and split_when all got a ``maxsplit`` paramter (thanks to jferard and ilai-deutel)
-    * split_at now accepts a ``keep_separator`` parameter (thanks to jferard)
-    * distinct_permutations can now generate ``r``-length permutations (thanks to SergBobrovsky and ilai-deutel)
-    * The windowed implementation was improved  (thanks to SergBobrovsky)
-    * The spy implementation was improved (thanks to has2k1)
-
-* Other changes
-    * Type stubs are now tested with ``stubtest`` (thanks to ilai-deutel)
-    * Tests now run with ``python -m unittest`` instead of ``python setup.py test`` (thanks to jdufresne)
-
-8.2.0
------
-
-* Bug fixes
-    * The .pyi files for typing were updated. (thanks to blueyed and ilai-deutel)
-
-* Changes to existing itertools:
-    * numeric_range now behaves more like the built-in range. (thanks to jferard)
-    * bucket now allows for enumerating keys. (thanks to alexchandel)
-    * sliced now should now work for numpy arrays. (thanks to sswingle)
-    * seekable now has a ``maxlen`` parameter.
-
-8.1.0
------
-
-* Bug fixes
-    * partition works with ``pred=None`` again. (thanks to MSeifert04)
-
-* New itertools
-    * sample (thanks to tommyod)
-    * nth_or_last (thanks to d-ryzhikov)
-
-* Changes to existing itertools:
-    * The implementation for divide was improved. (thanks to jferard)
-
-8.0.2
------
-
-* Bug fixes
-    * The type stub files are now part of the wheel distribution (thanks to keisheiled)
-
-8.0.1
------
-
-* Bug fixes
-    * The type stub files now work for functions imported from the
-      root package (thanks to keisheiled)
-
-8.0.0
------
-
-* New itertools and other additions
-    * This library now ships type hints for use with mypy.
-      (thanks to ilai-deutel for the implementation, and to gabbard and fmagin for assistance)
-    * split_when (thanks to jferard)
-    * repeat_last (thanks to d-ryzhikov)
-
-* Changes to existing itertools:
-    * The implementation for set_partitions was improved. (thanks to jferard)
-    * partition was optimized for expensive predicates. (thanks to stevecj)
-    * unique_everseen and groupby_transform were re-factored. (thanks to SergBobrovsky)
-    * The implementation for difference was improved. (thanks to Jabbey92)
-
-* Other changes
-    * Python 3.4 has reached its end of life and is no longer supported.
-    * Python 3.8 is officially supported. (thanks to jdufresne)
-    * The ``collate`` function has been deprecated.
-      It raises a ``DeprecationWarning`` if used, and will be removed in a future release.
-    * one and only now provide more informative error messages. (thanks to gabbard)
-    * Unit tests were moved outside of the main package (thanks to jdufresne)
-    * Various documentation fixes (thanks to kriomant, gabbard, jdufresne)
-
-
-7.2.0
------
-
-* New itertools
-    * distinct_combinations
-    * set_partitions (thanks to kbarrett)
-    * filter_except
-    * map_except
-
-7.1.0
------
-
-* New itertools
-    * ichunked (thanks davebelais and youtux)
-    * only (thanks jaraco)
-
-* Changes to existing itertools:
-    * numeric_range now supports ranges specified by
-      ``datetime.datetime`` and ``datetime.timedelta`` objects (thanks to MSeifert04 for tests).
-    * difference now supports an *initial* keyword argument.
-
-
-* Other changes
-    * Various documentation fixes (thanks raimon49, pylang)
-
-7.0.0
------
-
-* New itertools:
-    * time_limited
-    * partitions (thanks to rominf and Saluev)
-    * substrings_indexes (thanks to rominf)
-
-* Changes to existing itertools:
-    * collapse now treats ``bytes`` objects the same as ``str`` objects. (thanks to Sweenpet)
-
-The major version update is due to the change in the default behavior of
-collapse. It now treats ``bytes`` objects the same as ``str`` objects.
-This aligns its behavior with always_iterable.
-
-.. code-block:: python
-
-    >>> from more_itertools import collapse
-    >>> iterable = [[1, 2], b'345', [6]]
-    >>> print(list(collapse(iterable)))
-    [1, 2, b'345', 6]
-
-6.0.0
------
-
-* Major changes:
-    * Python 2.7 is no longer supported. The 5.0.0 release will be the last
-      version targeting Python 2.7.
-    * All future releases will target the active versions of Python 3.
-      As of 2019, those are Python 3.4 and above.
-    * The ``six`` library is no longer a dependency.
-    * The accumulate function is no longer part of this library. You
-      may import a better version from the standard ``itertools`` module.
-
-* Changes to existing itertools:
-    * The order of the parameters in grouper have changed to match
-      the latest recipe in the itertools documentation. Use of the old order
-      will be supported in this release, but emit a  ``DeprecationWarning``.
-      The legacy behavior will be dropped in a future release. (thanks to jaraco)
-    * distinct_permutations was improved (thanks to jferard - see also `permutations with unique values `_ at StackOverflow.)
-    * An unused parameter was removed from substrings. (thanks to pylang)
-
-* Other changes:
-    * The docs for unique_everseen were improved. (thanks to jferard and MSeifert04)
-    * Several Python 2-isms were removed. (thanks to jaraco, MSeifert04, and hugovk)
-
+The version history can be found in `documentation `_.
 
diff --git a/setuptools/_vendor/more_itertools-10.3.0.dist-info/RECORD b/setuptools/_vendor/more_itertools-10.3.0.dist-info/RECORD
new file mode 100644
index 0000000000..f15f3fcdc5
--- /dev/null
+++ b/setuptools/_vendor/more_itertools-10.3.0.dist-info/RECORD
@@ -0,0 +1,16 @@
+more_itertools-10.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+more_itertools-10.3.0.dist-info/LICENSE,sha256=CfHIyelBrz5YTVlkHqm4fYPAyw_QB-te85Gn4mQ8GkY,1053
+more_itertools-10.3.0.dist-info/METADATA,sha256=BFO90O-fLNiVQMpj7oIS5ztzgJUUQZ3TA32P5HH3N-A,36293
+more_itertools-10.3.0.dist-info/RECORD,,
+more_itertools-10.3.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+more_itertools-10.3.0.dist-info/WHEEL,sha256=rSgq_JpHF9fHR1lx53qwg_1-2LypZE_qmcuXbVUq948,81
+more_itertools/__init__.py,sha256=dtAbGjTDmn_ghiU5YXfhyDy0phAlXVdt5klZA5fUa-Q,149
+more_itertools/__init__.pyi,sha256=5B3eTzON1BBuOLob1vCflyEb2lSd6usXQQ-Cv-hXkeA,43
+more_itertools/__pycache__/__init__.cpython-312.pyc,,
+more_itertools/__pycache__/more.cpython-312.pyc,,
+more_itertools/__pycache__/recipes.cpython-312.pyc,,
+more_itertools/more.py,sha256=1E5kzFncRKTDw0cYv1yRXMgDdunstLQd1QStcnL6U90,148370
+more_itertools/more.pyi,sha256=iXXeqt48Nxe8VGmIWpkVXuKpR2FYNuu2DU8nQLWCCu0,21484
+more_itertools/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+more_itertools/recipes.py,sha256=WedhhfhGVgr6zii8fIbGJVmRTw0ZKRiLKnYBDGJv4nY,28591
+more_itertools/recipes.pyi,sha256=T_mdGpcFdfrP3JSWbwzYP9JyNV-Go-7RPfpxfftAWlA,4617
diff --git a/setuptools/_vendor/more_itertools-10.3.0.dist-info/REQUESTED b/setuptools/_vendor/more_itertools-10.3.0.dist-info/REQUESTED
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/setuptools/_vendor/more_itertools-10.3.0.dist-info/WHEEL b/setuptools/_vendor/more_itertools-10.3.0.dist-info/WHEEL
new file mode 100644
index 0000000000..db4a255f3a
--- /dev/null
+++ b/setuptools/_vendor/more_itertools-10.3.0.dist-info/WHEEL
@@ -0,0 +1,4 @@
+Wheel-Version: 1.0
+Generator: flit 3.8.0
+Root-Is-Purelib: true
+Tag: py3-none-any
diff --git a/setuptools/_vendor/more_itertools-8.8.0.dist-info/RECORD b/setuptools/_vendor/more_itertools-8.8.0.dist-info/RECORD
deleted file mode 100644
index d1a6ea0d22..0000000000
--- a/setuptools/_vendor/more_itertools-8.8.0.dist-info/RECORD
+++ /dev/null
@@ -1,17 +0,0 @@
-more_itertools-8.8.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-more_itertools-8.8.0.dist-info/LICENSE,sha256=CfHIyelBrz5YTVlkHqm4fYPAyw_QB-te85Gn4mQ8GkY,1053
-more_itertools-8.8.0.dist-info/METADATA,sha256=Gke9w7RnfiAvveik_iBBrzd0RjrDhsQ8uRYNBJdo4qQ,40482
-more_itertools-8.8.0.dist-info/RECORD,,
-more_itertools-8.8.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-more_itertools-8.8.0.dist-info/WHEEL,sha256=OqRkF0eY5GHssMorFjlbTIq072vpHpF60fIQA6lS9xA,92
-more_itertools-8.8.0.dist-info/top_level.txt,sha256=fAuqRXu9LPhxdB9ujJowcFOu1rZ8wzSpOW9_jlKis6M,15
-more_itertools/__init__.py,sha256=C7sXffHTXM3P-iaLPPfqfmDoxOflQMJLcM7ed9p3jak,82
-more_itertools/__init__.pyi,sha256=5B3eTzON1BBuOLob1vCflyEb2lSd6usXQQ-Cv-hXkeA,43
-more_itertools/__pycache__/__init__.cpython-312.pyc,,
-more_itertools/__pycache__/more.cpython-312.pyc,,
-more_itertools/__pycache__/recipes.cpython-312.pyc,,
-more_itertools/more.py,sha256=DlZa8v6JihVwfQ5zHidOA-xDE0orcQIUyxVnCaUoDKE,117968
-more_itertools/more.pyi,sha256=r32pH2raBC1zih3evK4fyvAXvrUamJqc6dgV7QCRL_M,14977
-more_itertools/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-more_itertools/recipes.py,sha256=UkNkrsZyqiwgLHANBTmvMhCvaNSvSNYhyOpz_Jc55DY,16256
-more_itertools/recipes.pyi,sha256=9BpeKd5_qalYVSnuHfqPSCfoGgqnQY2Xu9pNwrDlHU8,3551
diff --git a/setuptools/_vendor/more_itertools-8.8.0.dist-info/top_level.txt b/setuptools/_vendor/more_itertools-8.8.0.dist-info/top_level.txt
deleted file mode 100644
index a5035befb3..0000000000
--- a/setuptools/_vendor/more_itertools-8.8.0.dist-info/top_level.txt
+++ /dev/null
@@ -1 +0,0 @@
-more_itertools
diff --git a/setuptools/_vendor/more_itertools/__init__.py b/setuptools/_vendor/more_itertools/__init__.py
index 19a169fc30..9c4662fc31 100644
--- a/setuptools/_vendor/more_itertools/__init__.py
+++ b/setuptools/_vendor/more_itertools/__init__.py
@@ -1,4 +1,6 @@
+"""More routines for operating on iterables, beyond itertools"""
+
 from .more import *  # noqa
 from .recipes import *  # noqa
 
-__version__ = '8.8.0'
+__version__ = '10.3.0'
diff --git a/setuptools/_vendor/more_itertools/more.py b/setuptools/_vendor/more_itertools/more.py
old mode 100644
new mode 100755
index e6fca4d47f..7b481907da
--- a/setuptools/_vendor/more_itertools/more.py
+++ b/setuptools/_vendor/more_itertools/more.py
@@ -1,11 +1,13 @@
+import math
 import warnings
 
 from collections import Counter, defaultdict, deque, abc
 from collections.abc import Sequence
-from functools import partial, reduce, wraps
-from heapq import merge, heapify, heapreplace, heappop
+from functools import cached_property, partial, reduce, wraps
+from heapq import heapify, heapreplace, heappop
 from itertools import (
     chain,
+    combinations,
     compress,
     count,
     cycle,
@@ -17,72 +19,106 @@
     takewhile,
     tee,
     zip_longest,
+    product,
 )
-from math import exp, factorial, floor, log
+from math import comb, e, exp, factorial, floor, fsum, log, perm, tau
 from queue import Empty, Queue
 from random import random, randrange, uniform
-from operator import itemgetter, mul, sub, gt, lt
+from operator import itemgetter, mul, sub, gt, lt, ge, le
 from sys import hexversion, maxsize
 from time import monotonic
 
 from .recipes import (
+    _marker,
+    _zip_equal,
+    UnequalIterablesError,
     consume,
     flatten,
     pairwise,
     powerset,
     take,
     unique_everseen,
+    all_equal,
+    batched,
 )
 
 __all__ = [
     'AbortThread',
+    'SequenceView',
+    'UnequalIterablesError',
     'adjacent',
+    'all_unique',
     'always_iterable',
     'always_reversible',
     'bucket',
     'callback_iter',
     'chunked',
+    'chunked_even',
     'circular_shifts',
     'collapse',
-    'collate',
+    'combination_index',
+    'combination_with_replacement_index',
     'consecutive_groups',
+    'constrained_batches',
     'consumer',
-    'countable',
     'count_cycle',
-    'mark_ends',
+    'countable',
+    'dft',
     'difference',
     'distinct_combinations',
     'distinct_permutations',
     'distribute',
     'divide',
+    'doublestarmap',
+    'duplicates_everseen',
+    'duplicates_justseen',
+    'classify_unique',
     'exactly_n',
     'filter_except',
+    'filter_map',
     'first',
+    'gray_product',
     'groupby_transform',
+    'ichunked',
+    'iequals',
+    'idft',
     'ilen',
-    'interleave_longest',
     'interleave',
+    'interleave_evenly',
+    'interleave_longest',
     'intersperse',
+    'is_sorted',
     'islice_extended',
     'iterate',
-    'ichunked',
-    'is_sorted',
+    'iter_suppress',
+    'join_mappings',
     'last',
     'locate',
+    'longest_common_prefix',
     'lstrip',
     'make_decorator',
     'map_except',
+    'map_if',
     'map_reduce',
+    'mark_ends',
+    'minmax',
     'nth_or_last',
     'nth_permutation',
     'nth_product',
+    'nth_combination_with_replacement',
     'numeric_range',
     'one',
     'only',
+    'outer_product',
     'padded',
+    'partial_product',
     'partitions',
-    'set_partitions',
     'peekable',
+    'permutation_index',
+    'powerset_of_sets',
+    'product_index',
+    'raise_',
+    'repeat_each',
     'repeat_last',
     'replace',
     'rlocate',
@@ -90,37 +126,37 @@
     'run_length',
     'sample',
     'seekable',
-    'SequenceView',
+    'set_partitions',
     'side_effect',
     'sliced',
     'sort_together',
-    'split_at',
     'split_after',
+    'split_at',
     'split_before',
-    'split_when',
     'split_into',
+    'split_when',
     'spy',
     'stagger',
     'strip',
+    'strictly_n',
     'substrings',
     'substrings_indexes',
+    'takewhile_inclusive',
     'time_limited',
+    'unique_in_window',
     'unique_to_each',
     'unzip',
+    'value_chain',
     'windowed',
+    'windowed_complete',
     'with_iter',
-    'UnequalIterablesError',
+    'zip_broadcast',
     'zip_equal',
     'zip_offset',
-    'windowed_complete',
-    'all_unique',
-    'value_chain',
-    'product_index',
-    'combination_index',
-    'permutation_index',
 ]
 
-_marker = object()
+# math.sumprod is available for Python 3.12+
+_fsumprod = getattr(math, 'sumprod', lambda x, y: fsum(map(mul, x, y)))
 
 
 def chunked(iterable, n, strict=False):
@@ -144,6 +180,8 @@ def chunked(iterable, n, strict=False):
     """
     iterator = iter(partial(take, n, iter(iterable)), [])
     if strict:
+        if n is None:
+            raise ValueError('n must not be None when using strict mode.')
 
         def ret():
             for chunk in iterator:
@@ -173,15 +211,14 @@ def first(iterable, default=_marker):
     ``next(iter(iterable), default)``.
 
     """
-    try:
-        return next(iter(iterable))
-    except StopIteration as e:
-        if default is _marker:
-            raise ValueError(
-                'first() was called on an empty iterable, and no '
-                'default value was provided.'
-            ) from e
-        return default
+    for item in iterable:
+        return item
+    if default is _marker:
+        raise ValueError(
+            'first() was called on an empty iterable, and no '
+            'default value was provided.'
+        )
+    return default
 
 
 def last(iterable, default=_marker):
@@ -395,44 +432,6 @@ def __getitem__(self, index):
         return self._cache[index]
 
 
-def collate(*iterables, **kwargs):
-    """Return a sorted merge of the items from each of several already-sorted
-    *iterables*.
-
-        >>> list(collate('ACDZ', 'AZ', 'JKL'))
-        ['A', 'A', 'C', 'D', 'J', 'K', 'L', 'Z', 'Z']
-
-    Works lazily, keeping only the next value from each iterable in memory. Use
-    :func:`collate` to, for example, perform a n-way mergesort of items that
-    don't fit in memory.
-
-    If a *key* function is specified, the iterables will be sorted according
-    to its result:
-
-        >>> key = lambda s: int(s)  # Sort by numeric value, not by string
-        >>> list(collate(['1', '10'], ['2', '11'], key=key))
-        ['1', '2', '10', '11']
-
-
-    If the *iterables* are sorted in descending order, set *reverse* to
-    ``True``:
-
-        >>> list(collate([5, 3, 1], [4, 2, 0], reverse=True))
-        [5, 4, 3, 2, 1, 0]
-
-    If the elements of the passed-in iterables are out of order, you might get
-    unexpected results.
-
-    On Python 3.5+, this function is an alias for :func:`heapq.merge`.
-
-    """
-    warnings.warn(
-        "collate is no longer part of more_itertools, use heapq.merge",
-        DeprecationWarning,
-    )
-    return merge(*iterables, **kwargs)
-
-
 def consumer(func):
     """Decorator that automatically advances a PEP-342-style "reverse iterator"
     to its first yield point so you don't have to call ``next()`` on it
@@ -492,7 +491,10 @@ def iterate(func, start):
     """
     while True:
         yield start
-        start = func(start)
+        try:
+            start = func(start)
+        except StopIteration:
+            break
 
 
 def with_iter(context_manager):
@@ -558,10 +560,10 @@ def one(iterable, too_short=None, too_long=None):
 
     try:
         first_value = next(it)
-    except StopIteration as e:
+    except StopIteration as exc:
         raise (
             too_short or ValueError('too few items in iterable (expected 1)')
-        ) from e
+        ) from exc
 
     try:
         second_value = next(it)
@@ -577,6 +579,87 @@ def one(iterable, too_short=None, too_long=None):
     return first_value
 
 
+def raise_(exception, *args):
+    raise exception(*args)
+
+
+def strictly_n(iterable, n, too_short=None, too_long=None):
+    """Validate that *iterable* has exactly *n* items and return them if
+    it does. If it has fewer than *n* items, call function *too_short*
+    with those items. If it has more than *n* items, call function
+    *too_long* with the first ``n + 1`` items.
+
+        >>> iterable = ['a', 'b', 'c', 'd']
+        >>> n = 4
+        >>> list(strictly_n(iterable, n))
+        ['a', 'b', 'c', 'd']
+
+    Note that the returned iterable must be consumed in order for the check to
+    be made.
+
+    By default, *too_short* and *too_long* are functions that raise
+    ``ValueError``.
+
+        >>> list(strictly_n('ab', 3))  # doctest: +IGNORE_EXCEPTION_DETAIL
+        Traceback (most recent call last):
+        ...
+        ValueError: too few items in iterable (got 2)
+
+        >>> list(strictly_n('abc', 2))  # doctest: +IGNORE_EXCEPTION_DETAIL
+        Traceback (most recent call last):
+        ...
+        ValueError: too many items in iterable (got at least 3)
+
+    You can instead supply functions that do something else.
+    *too_short* will be called with the number of items in *iterable*.
+    *too_long* will be called with `n + 1`.
+
+        >>> def too_short(item_count):
+        ...     raise RuntimeError
+        >>> it = strictly_n('abcd', 6, too_short=too_short)
+        >>> list(it)  # doctest: +IGNORE_EXCEPTION_DETAIL
+        Traceback (most recent call last):
+        ...
+        RuntimeError
+
+        >>> def too_long(item_count):
+        ...     print('The boss is going to hear about this')
+        >>> it = strictly_n('abcdef', 4, too_long=too_long)
+        >>> list(it)
+        The boss is going to hear about this
+        ['a', 'b', 'c', 'd']
+
+    """
+    if too_short is None:
+        too_short = lambda item_count: raise_(
+            ValueError,
+            'Too few items in iterable (got {})'.format(item_count),
+        )
+
+    if too_long is None:
+        too_long = lambda item_count: raise_(
+            ValueError,
+            'Too many items in iterable (got at least {})'.format(item_count),
+        )
+
+    it = iter(iterable)
+    for i in range(n):
+        try:
+            item = next(it)
+        except StopIteration:
+            too_short(i)
+            return
+        else:
+            yield item
+
+    try:
+        next(it)
+    except StopIteration:
+        pass
+    else:
+        too_long(n + 1)
+
+
 def distinct_permutations(iterable, r=None):
     """Yield successive distinct permutations of the elements in *iterable*.
 
@@ -601,6 +684,7 @@ def distinct_permutations(iterable, r=None):
         [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)]
 
     """
+
     # Algorithm: https://w.wiki/Qai
     def _full(A):
         while True:
@@ -691,8 +775,8 @@ def intersperse(e, iterable, n=1):
     if n == 0:
         raise ValueError('n must be > 0')
     elif n == 1:
-        # interleave(repeat(e), iterable) -> e, x_0, e, e, x_1, e, x_2...
-        # islice(..., 1, None) -> x_0, e, e, x_1, e, x_2...
+        # interleave(repeat(e), iterable) -> e, x_0, e, x_1, e, x_2...
+        # islice(..., 1, None) -> x_0, e, x_1, e, x_2...
         return islice(interleave(repeat(e), iterable), 1, None)
     else:
         # interleave(filler, chunks) -> [e], [x_0, x_1], [e], [x_2, x_3]...
@@ -766,24 +850,31 @@ def windowed(seq, n, fillvalue=None, step=1):
     if n < 0:
         raise ValueError('n must be >= 0')
     if n == 0:
-        yield tuple()
+        yield ()
         return
     if step < 1:
         raise ValueError('step must be >= 1')
 
-    window = deque(maxlen=n)
-    i = n
-    for _ in map(window.append, seq):
-        i -= 1
-        if not i:
-            i = step
-            yield tuple(window)
-
-    size = len(window)
-    if size < n:
-        yield tuple(chain(window, repeat(fillvalue, n - size)))
-    elif 0 < i < min(step, n):
-        window += (fillvalue,) * i
+    iterable = iter(seq)
+
+    # Generate first window
+    window = deque(islice(iterable, n), maxlen=n)
+
+    # Deal with the first window not being full
+    if not window:
+        return
+    if len(window) < n:
+        yield tuple(window) + ((fillvalue,) * (n - len(window)))
+        return
+    yield tuple(window)
+
+    # Create the filler for the next windows. The padding ensures
+    # we have just enough elements to fill the last window.
+    padding = (fillvalue,) * (n - 1 if step >= n else step - 1)
+    filler = map(window.append, chain(iterable, padding))
+
+    # Generate the rest of the windows
+    for _ in islice(filler, step - 1, None, step):
         yield tuple(window)
 
 
@@ -848,7 +939,7 @@ def substrings_indexes(seq, reverse=False):
 
 
 class bucket:
-    """Wrap *iterable* and return an object that buckets it iterable into
+    """Wrap *iterable* and return an object that buckets the iterable into
     child iterables based on a *key* function.
 
         >>> iterable = ['a1', 'b1', 'c1', 'a2', 'b2', 'c2', 'b3']
@@ -1016,6 +1107,72 @@ def interleave_longest(*iterables):
     return (x for x in i if x is not _marker)
 
 
+def interleave_evenly(iterables, lengths=None):
+    """
+    Interleave multiple iterables so that their elements are evenly distributed
+    throughout the output sequence.
+
+    >>> iterables = [1, 2, 3, 4, 5], ['a', 'b']
+    >>> list(interleave_evenly(iterables))
+    [1, 2, 'a', 3, 4, 'b', 5]
+
+    >>> iterables = [[1, 2, 3], [4, 5], [6, 7, 8]]
+    >>> list(interleave_evenly(iterables))
+    [1, 6, 4, 2, 7, 3, 8, 5]
+
+    This function requires iterables of known length. Iterables without
+    ``__len__()`` can be used by manually specifying lengths with *lengths*:
+
+    >>> from itertools import combinations, repeat
+    >>> iterables = [combinations(range(4), 2), ['a', 'b', 'c']]
+    >>> lengths = [4 * (4 - 1) // 2, 3]
+    >>> list(interleave_evenly(iterables, lengths=lengths))
+    [(0, 1), (0, 2), 'a', (0, 3), (1, 2), 'b', (1, 3), (2, 3), 'c']
+
+    Based on Bresenham's algorithm.
+    """
+    if lengths is None:
+        try:
+            lengths = [len(it) for it in iterables]
+        except TypeError:
+            raise ValueError(
+                'Iterable lengths could not be determined automatically. '
+                'Specify them with the lengths keyword.'
+            )
+    elif len(iterables) != len(lengths):
+        raise ValueError('Mismatching number of iterables and lengths.')
+
+    dims = len(lengths)
+
+    # sort iterables by length, descending
+    lengths_permute = sorted(
+        range(dims), key=lambda i: lengths[i], reverse=True
+    )
+    lengths_desc = [lengths[i] for i in lengths_permute]
+    iters_desc = [iter(iterables[i]) for i in lengths_permute]
+
+    # the longest iterable is the primary one (Bresenham: the longest
+    # distance along an axis)
+    delta_primary, deltas_secondary = lengths_desc[0], lengths_desc[1:]
+    iter_primary, iters_secondary = iters_desc[0], iters_desc[1:]
+    errors = [delta_primary // dims] * len(deltas_secondary)
+
+    to_yield = sum(lengths)
+    while to_yield:
+        yield next(iter_primary)
+        to_yield -= 1
+        # update errors for each secondary iterable
+        errors = [e - delta for e, delta in zip(errors, deltas_secondary)]
+
+        # those iterables for which the error is negative are yielded
+        # ("diagonal step" in Bresenham)
+        for i, e_ in enumerate(errors):
+            if e_ < 0:
+                yield next(iters_secondary[i])
+                to_yield -= 1
+                errors[i] += delta_primary
+
+
 def collapse(iterable, base_type=None, levels=None):
     """Flatten an iterable with multiple levels of nesting (e.g., a list of
     lists of tuples) into non-iterable types.
@@ -1042,26 +1199,38 @@ def collapse(iterable, base_type=None, levels=None):
     ['a', ['b'], 'c', ['d']]
 
     """
+    stack = deque()
+    # Add our first node group, treat the iterable as a single node
+    stack.appendleft((0, repeat(iterable, 1)))
 
-    def walk(node, level):
-        if (
-            ((levels is not None) and (level > levels))
-            or isinstance(node, (str, bytes))
-            or ((base_type is not None) and isinstance(node, base_type))
-        ):
-            yield node
-            return
+    while stack:
+        node_group = stack.popleft()
+        level, nodes = node_group
 
-        try:
-            tree = iter(node)
-        except TypeError:
-            yield node
-            return
-        else:
-            for child in tree:
-                yield from walk(child, level + 1)
+        # Check if beyond max level
+        if levels is not None and level > levels:
+            yield from nodes
+            continue
 
-    yield from walk(iterable, 0)
+        for node in nodes:
+            # Check if done iterating
+            if isinstance(node, (str, bytes)) or (
+                (base_type is not None) and isinstance(node, base_type)
+            ):
+                yield node
+            # Otherwise try to create child nodes
+            else:
+                try:
+                    tree = iter(node)
+                except TypeError:
+                    yield node
+                else:
+                    # Save our current location
+                    stack.appendleft(node_group)
+                    # Append the new child node
+                    stack.appendleft((level + 1, tree))
+                    # Break to process child node
+                    break
 
 
 def side_effect(func, iterable, chunk_size=None, before=None, after=None):
@@ -1176,7 +1345,7 @@ def split_at(iterable, pred, maxsplit=-1, keep_separator=False):
         [[0], [2], [4, 5, 6, 7, 8, 9]]
 
     By default, the delimiting items are not included in the output.
-    The include them, set *keep_separator* to ``True``.
+    To include them, set *keep_separator* to ``True``.
 
         >>> list(split_at('abcdcba', lambda x: x == 'b', keep_separator=True))
         [['a'], ['b'], ['c', 'd', 'c'], ['b'], ['a']]
@@ -1266,7 +1435,9 @@ def split_after(iterable, pred, maxsplit=-1):
         if pred(item) and buf:
             yield buf
             if maxsplit == 1:
-                yield list(it)
+                buf = list(it)
+                if buf:
+                    yield buf
                 return
             buf = []
             maxsplit -= 1
@@ -1372,28 +1543,50 @@ def padded(iterable, fillvalue=None, n=None, next_multiple=False):
         [1, 2, 3, '?', '?']
 
     If *next_multiple* is ``True``, *fillvalue* will be emitted until the
-    number of items emitted is a multiple of *n*::
+    number of items emitted is a multiple of *n*:
 
         >>> list(padded([1, 2, 3, 4], n=3, next_multiple=True))
         [1, 2, 3, 4, None, None]
 
     If *n* is ``None``, *fillvalue* will be emitted indefinitely.
 
+    To create an *iterable* of exactly size *n*, you can truncate with
+    :func:`islice`.
+
+        >>> list(islice(padded([1, 2, 3], '?'), 5))
+        [1, 2, 3, '?', '?']
+        >>> list(islice(padded([1, 2, 3, 4, 5, 6, 7, 8], '?'), 5))
+        [1, 2, 3, 4, 5]
+
     """
-    it = iter(iterable)
+    iterable = iter(iterable)
+    iterable_with_repeat = chain(iterable, repeat(fillvalue))
+
     if n is None:
-        yield from chain(it, repeat(fillvalue))
+        return iterable_with_repeat
     elif n < 1:
         raise ValueError('n must be at least 1')
+    elif next_multiple:
+
+        def slice_generator():
+            for first in iterable:
+                yield (first,)
+                yield islice(iterable_with_repeat, n - 1)
+
+        # While elements exist produce slices of size n
+        return chain.from_iterable(slice_generator())
     else:
-        item_count = 0
-        for item in it:
-            yield item
-            item_count += 1
+        # Ensure the first batch is at least size n then iterate
+        return chain(islice(iterable_with_repeat, n), iterable)
 
-        remaining = (n - item_count) % n if next_multiple else n - item_count
-        for _ in range(remaining):
-            yield fillvalue
+
+def repeat_each(iterable, n=2):
+    """Repeat each element in *iterable* *n* times.
+
+    >>> list(repeat_each('ABC', 3))
+    ['A', 'A', 'A', 'B', 'B', 'B', 'C', 'C', 'C']
+    """
+    return chain.from_iterable(map(repeat, iterable, repeat(n)))
 
 
 def repeat_last(iterable, default=None):
@@ -1439,7 +1632,9 @@ def distribute(n, iterable):
         [[1], [2], [3], [], []]
 
     This function uses :func:`itertools.tee` and may require significant
-    storage. If you need the order items in the smaller iterables to match the
+    storage.
+
+    If you need the order items in the smaller iterables to match the
     original iterable, see :func:`divide`.
 
     """
@@ -1478,25 +1673,6 @@ def stagger(iterable, offsets=(-1, 0, 1), longest=False, fillvalue=None):
     )
 
 
-class UnequalIterablesError(ValueError):
-    def __init__(self, details=None):
-        msg = 'Iterables have different lengths'
-        if details is not None:
-            msg += (': index 0 has length {}; index {} has length {}').format(
-                *details
-            )
-
-        super().__init__(msg)
-
-
-def _zip_equal_generator(iterables):
-    for combo in zip_longest(*iterables, fillvalue=_marker):
-        for val in combo:
-            if val is _marker:
-                raise UnequalIterablesError()
-        yield combo
-
-
 def zip_equal(*iterables):
     """``zip`` the input *iterables* together, but raise
     ``UnequalIterablesError`` if they aren't all the same length.
@@ -1524,23 +1700,8 @@ def zip_equal(*iterables):
             ),
             DeprecationWarning,
         )
-    # Check whether the iterables are all the same size.
-    try:
-        first_size = len(iterables[0])
-        for i, it in enumerate(iterables[1:], 1):
-            size = len(it)
-            if size != first_size:
-                break
-        else:
-            # If we didn't break out, we can use the built-in zip.
-            return zip(*iterables)
 
-        # If we did break out, there was a mismatch.
-        raise UnequalIterablesError(details=(first_size, i, size))
-    # If any one of the iterables didn't have a length, start reading
-    # them until one runs out.
-    except TypeError:
-        return _zip_equal_generator(iterables)
+    return _zip_equal(*iterables)
 
 
 def zip_offset(*iterables, offsets, longest=False, fillvalue=None):
@@ -1653,7 +1814,7 @@ def unzip(iterable):
     of the zipped *iterable*.
 
     The ``i``-th iterable contains the ``i``-th element from each element
-    of the zipped iterable. The first element is used to to determine the
+    of the zipped iterable. The first element is used to determine the
     length of the remaining elements.
 
         >>> iterable = [('a', 1), ('b', 2), ('c', 3), ('d', 4)]
@@ -1721,9 +1882,9 @@ def divide(n, iterable):
         >>> [list(c) for c in children]
         [[1], [2], [3], [], []]
 
-    This function will exhaust the iterable before returning and may require
-    significant storage. If order is not important, see :func:`distribute`,
-    which does not first pull the iterable into memory.
+    This function will exhaust the iterable before returning.
+    If order is not important, see :func:`distribute`, which does not first
+    pull the iterable into memory.
 
     """
     if n < 1:
@@ -1965,7 +2126,6 @@ def __init__(self, *args):
         if self._step == self._zero:
             raise ValueError('numeric_range() arg 3 must not be zero')
         self._growing = self._step > self._zero
-        self._init_len()
 
     def __bool__(self):
         if self._growing:
@@ -2041,7 +2201,8 @@ def __iter__(self):
     def __len__(self):
         return self._len
 
-    def _init_len(self):
+    @cached_property
+    def _len(self):
         if self._growing:
             start = self._start
             stop = self._stop
@@ -2052,10 +2213,10 @@ def _init_len(self):
             step = -self._step
         distance = stop - start
         if distance <= self._zero:
-            self._len = 0
+            return 0
         else:  # distance > 0 and step > 0: regular euclidean division
             q, r = divmod(distance, step)
-            self._len = int(q) + int(r != self._zero)
+            return int(q) + int(r != self._zero)
 
     def __reduce__(self):
         return numeric_range, (self._start, self._stop, self._step)
@@ -2203,6 +2364,16 @@ def locate(iterable, pred=bool, window_size=None):
     return compress(count(), starmap(pred, it))
 
 
+def longest_common_prefix(iterables):
+    """Yield elements of the longest common prefix amongst given *iterables*.
+
+    >>> ''.join(longest_common_prefix(['abcd', 'abc', 'abf']))
+    'ab'
+
+    """
+    return (c[0] for c in takewhile(all_equal, zip(*iterables)))
+
+
 def lstrip(iterable, pred):
     """Yield the items from *iterable*, but strip any from the beginning
     for which *pred* returns ``True``.
@@ -2511,7 +2682,7 @@ def difference(iterable, func=sub, *, initial=None):
     if initial is not None:
         first = []
 
-    return chain(first, starmap(func, zip(b, a)))
+    return chain(first, map(func, b, a))
 
 
 class SequenceView(Sequence):
@@ -2585,6 +2756,9 @@ class seekable:
         >>> it.seek(10)
         >>> next(it)
         '10'
+        >>> it.relative_seek(-2)  # Seeking relative to the current position
+        >>> next(it)
+        '9'
         >>> it.seek(20)  # Seeking past the end of the source isn't a problem
         >>> list(it)
         []
@@ -2698,6 +2872,10 @@ def seek(self, index):
         if remainder > 0:
             consume(self, remainder)
 
+    def relative_seek(self, count):
+        index = len(self._cache)
+        self.seek(max(index + count, 0))
+
 
 class run_length:
     """
@@ -2804,6 +2982,7 @@ def make_decorator(wrapping_func, result_index=0):
         '7'
 
     """
+
     # See https://sites.google.com/site/bbayles/index/decorator_factory for
     # notes on how this works.
     def decorator(*wrapping_args, **wrapping_kwargs):
@@ -3090,6 +3269,8 @@ class time_limited:
     stops if  the time elapsed is greater than *limit_seconds*. If your time
     limit is 1 second, but it takes 2 seconds to generate the first item from
     the iterable, the function will run for 2 seconds and not yield anything.
+    As a special case, when *limit_seconds* is zero, the iterator never
+    returns anything.
 
     """
 
@@ -3105,6 +3286,9 @@ def __iter__(self):
         return self
 
     def __next__(self):
+        if self.limit_seconds == 0:
+            self.timed_out = True
+            raise StopIteration
         item = next(self._iterable)
         if monotonic() - self._start_time > self.limit_seconds:
             self.timed_out = True
@@ -3154,6 +3338,40 @@ def only(iterable, default=None, too_long=None):
     return first_value
 
 
+def _ichunk(iterable, n):
+    cache = deque()
+    chunk = islice(iterable, n)
+
+    def generator():
+        while True:
+            if cache:
+                yield cache.popleft()
+            else:
+                try:
+                    item = next(chunk)
+                except StopIteration:
+                    return
+                else:
+                    yield item
+
+    def materialize_next(n=1):
+        # if n not specified materialize everything
+        if n is None:
+            cache.extend(chunk)
+            return len(cache)
+
+        to_cache = n - len(cache)
+
+        # materialize up to n
+        if to_cache > 0:
+            cache.extend(islice(chunk, to_cache))
+
+        # return number materialized up to n
+        return min(n, len(cache))
+
+    return (generator(), materialize_next)
+
+
 def ichunked(iterable, n):
     """Break *iterable* into sub-iterables with *n* elements each.
     :func:`ichunked` is like :func:`chunked`, but it yields iterables
@@ -3175,20 +3393,39 @@ def ichunked(iterable, n):
     [8, 9, 10, 11]
 
     """
-    source = iter(iterable)
-
+    iterable = iter(iterable)
     while True:
+        # Create new chunk
+        chunk, materialize_next = _ichunk(iterable, n)
+
         # Check to see whether we're at the end of the source iterable
-        item = next(source, _marker)
-        if item is _marker:
+        if not materialize_next():
             return
 
-        # Clone the source and yield an n-length slice
-        source, it = tee(chain([item], source))
-        yield islice(it, n)
+        yield chunk
+
+        # Fill previous chunk's cache
+        materialize_next(None)
+
+
+def iequals(*iterables):
+    """Return ``True`` if all given *iterables* are equal to each other,
+    which means that they contain the same elements in the same order.
 
-        # Advance the source iterable
-        consume(source, n)
+    The function is useful for comparing iterables of different data types
+    or iterables that do not support equality checks.
+
+    >>> iequals("abc", ['a', 'b', 'c'], ('a', 'b', 'c'), iter("abc"))
+    True
+
+    >>> iequals("abc", "acb")
+    False
+
+    Not to be confused with :func:`all_equal`, which checks whether all
+    elements of iterable are equal to each other.
+
+    """
+    return all(map(all_equal, zip_longest(*iterables, fillvalue=object())))
 
 
 def distinct_combinations(iterable, r):
@@ -3260,7 +3497,7 @@ def map_except(function, iterable, *exceptions):
     result, unless *function* raises one of the specified *exceptions*.
 
     *function* is called to transform each item in *iterable*.
-    It should be a accept one argument.
+    It should accept one argument.
 
     >>> iterable = ['1', '2', 'three', '4', None]
     >>> list(map_except(int, iterable, ValueError, TypeError))
@@ -3276,6 +3513,28 @@ def map_except(function, iterable, *exceptions):
             pass
 
 
+def map_if(iterable, pred, func, func_else=lambda x: x):
+    """Evaluate each item from *iterable* using *pred*. If the result is
+    equivalent to ``True``, transform the item with *func* and yield it.
+    Otherwise, transform the item with *func_else* and yield it.
+
+    *pred*, *func*, and *func_else* should each be functions that accept
+    one argument. By default, *func_else* is the identity function.
+
+    >>> from math import sqrt
+    >>> iterable = list(range(-5, 5))
+    >>> iterable
+    [-5, -4, -3, -2, -1, 0, 1, 2, 3, 4]
+    >>> list(map_if(iterable, lambda x: x > 3, lambda x: 'toobig'))
+    [-5, -4, -3, -2, -1, 0, 1, 2, 3, 'toobig']
+    >>> list(map_if(iterable, lambda x: x >= 0,
+    ... lambda x: f'{sqrt(x):.2f}', lambda x: None))
+    [None, None, None, None, None, '0.00', '1.00', '1.41', '1.73', '2.00']
+    """
+    for item in iterable:
+        yield func(item) if pred(item) else func_else(item)
+
+
 def _sample_unweighted(iterable, k):
     # Implementation of "Algorithm L" from the 1994 paper by Kim-Hung Li:
     # "Reservoir-Sampling Algorithms of Time Complexity O(n(1+log(N/n)))".
@@ -3292,7 +3551,6 @@ def _sample_unweighted(iterable, k):
     next_index = k + floor(log(random()) / log(1 - W))
 
     for index, element in enumerate(iterable, k):
-
         if index == next_index:
             reservoir[randrange(k)] = element
             # The new W is the largest in a sample of k U(0, `old_W`) numbers
@@ -3373,7 +3631,7 @@ def sample(iterable, k, weights=None):
         return _sample_weighted(iterable, k, weights)
 
 
-def is_sorted(iterable, key=None, reverse=False):
+def is_sorted(iterable, key=None, reverse=False, strict=False):
     """Returns ``True`` if the items of iterable are in sorted order, and
     ``False`` otherwise. *key* and *reverse* have the same meaning that they do
     in the built-in :func:`sorted` function.
@@ -3383,12 +3641,20 @@ def is_sorted(iterable, key=None, reverse=False):
     >>> is_sorted([5, 4, 3, 1, 2], reverse=True)
     False
 
+    If *strict*, tests for strict sorting, that is, returns ``False`` if equal
+    elements are found:
+
+    >>> is_sorted([1, 2, 2])
+    True
+    >>> is_sorted([1, 2, 2], strict=True)
+    False
+
     The function returns ``False`` after encountering the first out-of-order
     item. If there are no out-of-order items, the iterable is exhausted.
     """
 
-    compare = lt if reverse else gt
-    it = iterable if (key is None) else map(key, iterable)
+    compare = (le if reverse else ge) if strict else (lt if reverse else gt)
+    it = iterable if key is None else map(key, iterable)
     return not any(starmap(compare, pairwise(it)))
 
 
@@ -3453,7 +3719,10 @@ def __init__(self, func, callback_kwd='callback', wait_seconds=0.1):
         self._aborted = False
         self._future = None
         self._wait_seconds = wait_seconds
-        self._executor = __import__("concurrent.futures").futures.ThreadPoolExecutor(max_workers=1)
+        # Lazily import concurrent.future
+        self._executor = __import__(
+            'concurrent.futures'
+        ).futures.ThreadPoolExecutor(max_workers=1)
         self._iterator = self._reader()
 
     def __enter__(self):
@@ -3649,7 +3918,8 @@ def nth_permutation(iterable, r, index):
     elif not 0 <= r < n:
         raise ValueError
     else:
-        c = factorial(n) // factorial(n - r)
+        c = perm(n, r)
+    assert c > 0  # factortial(n)>0, and r>> nth_combination_with_replacement(range(5), 3, 5)
+        (0, 1, 1)
+
+    ``ValueError`` will be raised If *r* is negative or greater than the length
+    of *iterable*.
+    ``IndexError`` will be raised if the given *index* is invalid.
+    """
+    pool = tuple(iterable)
+    n = len(pool)
+    if (r < 0) or (r > n):
+        raise ValueError
+
+    c = comb(n + r - 1, r)
+
+    if index < 0:
+        index += c
+
+    if (index < 0) or (index >= c):
+        raise IndexError
+
+    result = []
+    i = 0
+    while r:
+        r -= 1
+        while n >= 0:
+            num_combs = comb(n + r - 1, r)
+            if index < num_combs:
+                break
+            n -= 1
+            i += 1
+            index -= num_combs
+        result.append(pool[i])
+
+    return tuple(result)
+
+
 def value_chain(*args):
     """Yield all arguments passed to the function in the same order in which
     they were passed. If an argument itself is iterable then iterate over its
@@ -3686,6 +3999,12 @@ def value_chain(*args):
         >>> list(value_chain('12', '34', ['56', '78']))
         ['12', '34', '56', '78']
 
+    Pre- or postpend a single element to an iterable:
+
+        >>> list(value_chain(1, [2, 3, 4, 5, 6]))
+        [1, 2, 3, 4, 5, 6]
+        >>> list(value_chain([1, 2, 3, 4, 5], 6))
+        [1, 2, 3, 4, 5, 6]
 
     Multiple levels of nesting are not flattened.
 
@@ -3758,14 +4077,71 @@ def combination_index(element, iterable):
 
     n, _ = last(pool, default=(n, None))
 
-    # Python versiosn below 3.8 don't have math.comb
+    # Python versions below 3.8 don't have math.comb
     index = 1
     for i, j in enumerate(reversed(indexes), start=1):
         j = n - j
         if i <= j:
-            index += factorial(j) // (factorial(i) * factorial(j - i))
+            index += comb(j, i)
+
+    return comb(n + 1, k + 1) - index
+
+
+def combination_with_replacement_index(element, iterable):
+    """Equivalent to
+    ``list(combinations_with_replacement(iterable, r)).index(element)``
+
+    The subsequences with repetition of *iterable* that are of length *r* can
+    be ordered lexicographically. :func:`combination_with_replacement_index`
+    computes the index of the first *element*, without computing the previous
+    combinations with replacement.
+
+        >>> combination_with_replacement_index('adf', 'abcdefg')
+        20
+
+    ``ValueError`` will be raised if the given *element* isn't one of the
+    combinations with replacement of *iterable*.
+    """
+    element = tuple(element)
+    l = len(element)
+    element = enumerate(element)
+
+    k, y = next(element, (None, None))
+    if k is None:
+        return 0
+
+    indexes = []
+    pool = tuple(iterable)
+    for n, x in enumerate(pool):
+        while x == y:
+            indexes.append(n)
+            tmp, y = next(element, (None, None))
+            if tmp is None:
+                break
+            else:
+                k = tmp
+        if y is None:
+            break
+    else:
+        raise ValueError(
+            'element is not a combination with replacement of iterable'
+        )
+
+    n = len(pool)
+    occupations = [0] * n
+    for p in indexes:
+        occupations[p] += 1
+
+    index = 0
+    cumulative_sum = 0
+    for k in range(1, n):
+        cumulative_sum += occupations[k - 1]
+        j = l + n - 1 - k - cumulative_sum
+        i = n - k
+        if i <= j:
+            index += comb(j, i)
 
-    return factorial(n + 1) // (factorial(k + 1) * factorial(n - k)) - index
+    return index
 
 
 def permutation_index(element, iterable):
@@ -3822,3 +4198,609 @@ def __next__(self):
         self.items_seen += 1
 
         return item
+
+
+def chunked_even(iterable, n):
+    """Break *iterable* into lists of approximately length *n*.
+    Items are distributed such the lengths of the lists differ by at most
+    1 item.
+
+    >>> iterable = [1, 2, 3, 4, 5, 6, 7]
+    >>> n = 3
+    >>> list(chunked_even(iterable, n))  # List lengths: 3, 2, 2
+    [[1, 2, 3], [4, 5], [6, 7]]
+    >>> list(chunked(iterable, n))  # List lengths: 3, 3, 1
+    [[1, 2, 3], [4, 5, 6], [7]]
+
+    """
+    iterable = iter(iterable)
+
+    # Initialize a buffer to process the chunks while keeping
+    # some back to fill any underfilled chunks
+    min_buffer = (n - 1) * (n - 2)
+    buffer = list(islice(iterable, min_buffer))
+
+    # Append items until we have a completed chunk
+    for _ in islice(map(buffer.append, iterable), n, None, n):
+        yield buffer[:n]
+        del buffer[:n]
+
+    # Check if any chunks need addition processing
+    if not buffer:
+        return
+    length = len(buffer)
+
+    # Chunks are either size `full_size <= n` or `partial_size = full_size - 1`
+    q, r = divmod(length, n)
+    num_lists = q + (1 if r > 0 else 0)
+    q, r = divmod(length, num_lists)
+    full_size = q + (1 if r > 0 else 0)
+    partial_size = full_size - 1
+    num_full = length - partial_size * num_lists
+
+    # Yield chunks of full size
+    partial_start_idx = num_full * full_size
+    if full_size > 0:
+        for i in range(0, partial_start_idx, full_size):
+            yield buffer[i : i + full_size]
+
+    # Yield chunks of partial size
+    if partial_size > 0:
+        for i in range(partial_start_idx, length, partial_size):
+            yield buffer[i : i + partial_size]
+
+
+def zip_broadcast(*objects, scalar_types=(str, bytes), strict=False):
+    """A version of :func:`zip` that "broadcasts" any scalar
+    (i.e., non-iterable) items into output tuples.
+
+    >>> iterable_1 = [1, 2, 3]
+    >>> iterable_2 = ['a', 'b', 'c']
+    >>> scalar = '_'
+    >>> list(zip_broadcast(iterable_1, iterable_2, scalar))
+    [(1, 'a', '_'), (2, 'b', '_'), (3, 'c', '_')]
+
+    The *scalar_types* keyword argument determines what types are considered
+    scalar. It is set to ``(str, bytes)`` by default. Set it to ``None`` to
+    treat strings and byte strings as iterable:
+
+    >>> list(zip_broadcast('abc', 0, 'xyz', scalar_types=None))
+    [('a', 0, 'x'), ('b', 0, 'y'), ('c', 0, 'z')]
+
+    If the *strict* keyword argument is ``True``, then
+    ``UnequalIterablesError`` will be raised if any of the iterables have
+    different lengths.
+    """
+
+    def is_scalar(obj):
+        if scalar_types and isinstance(obj, scalar_types):
+            return True
+        try:
+            iter(obj)
+        except TypeError:
+            return True
+        else:
+            return False
+
+    size = len(objects)
+    if not size:
+        return
+
+    new_item = [None] * size
+    iterables, iterable_positions = [], []
+    for i, obj in enumerate(objects):
+        if is_scalar(obj):
+            new_item[i] = obj
+        else:
+            iterables.append(iter(obj))
+            iterable_positions.append(i)
+
+    if not iterables:
+        yield tuple(objects)
+        return
+
+    zipper = _zip_equal if strict else zip
+    for item in zipper(*iterables):
+        for i, new_item[i] in zip(iterable_positions, item):
+            pass
+        yield tuple(new_item)
+
+
+def unique_in_window(iterable, n, key=None):
+    """Yield the items from *iterable* that haven't been seen recently.
+    *n* is the size of the lookback window.
+
+        >>> iterable = [0, 1, 0, 2, 3, 0]
+        >>> n = 3
+        >>> list(unique_in_window(iterable, n))
+        [0, 1, 2, 3, 0]
+
+    The *key* function, if provided, will be used to determine uniqueness:
+
+        >>> list(unique_in_window('abAcda', 3, key=lambda x: x.lower()))
+        ['a', 'b', 'c', 'd', 'a']
+
+    The items in *iterable* must be hashable.
+
+    """
+    if n <= 0:
+        raise ValueError('n must be greater than 0')
+
+    window = deque(maxlen=n)
+    counts = defaultdict(int)
+    use_key = key is not None
+
+    for item in iterable:
+        if len(window) == n:
+            to_discard = window[0]
+            if counts[to_discard] == 1:
+                del counts[to_discard]
+            else:
+                counts[to_discard] -= 1
+
+        k = key(item) if use_key else item
+        if k not in counts:
+            yield item
+        counts[k] += 1
+        window.append(k)
+
+
+def duplicates_everseen(iterable, key=None):
+    """Yield duplicate elements after their first appearance.
+
+    >>> list(duplicates_everseen('mississippi'))
+    ['s', 'i', 's', 's', 'i', 'p', 'i']
+    >>> list(duplicates_everseen('AaaBbbCccAaa', str.lower))
+    ['a', 'a', 'b', 'b', 'c', 'c', 'A', 'a', 'a']
+
+    This function is analogous to :func:`unique_everseen` and is subject to
+    the same performance considerations.
+
+    """
+    seen_set = set()
+    seen_list = []
+    use_key = key is not None
+
+    for element in iterable:
+        k = key(element) if use_key else element
+        try:
+            if k not in seen_set:
+                seen_set.add(k)
+            else:
+                yield element
+        except TypeError:
+            if k not in seen_list:
+                seen_list.append(k)
+            else:
+                yield element
+
+
+def duplicates_justseen(iterable, key=None):
+    """Yields serially-duplicate elements after their first appearance.
+
+    >>> list(duplicates_justseen('mississippi'))
+    ['s', 's', 'p']
+    >>> list(duplicates_justseen('AaaBbbCccAaa', str.lower))
+    ['a', 'a', 'b', 'b', 'c', 'c', 'a', 'a']
+
+    This function is analogous to :func:`unique_justseen`.
+
+    """
+    return flatten(g for _, g in groupby(iterable, key) for _ in g)
+
+
+def classify_unique(iterable, key=None):
+    """Classify each element in terms of its uniqueness.
+
+    For each element in the input iterable, return a 3-tuple consisting of:
+
+    1. The element itself
+    2. ``False`` if the element is equal to the one preceding it in the input,
+       ``True`` otherwise (i.e. the equivalent of :func:`unique_justseen`)
+    3. ``False`` if this element has been seen anywhere in the input before,
+       ``True`` otherwise (i.e. the equivalent of :func:`unique_everseen`)
+
+    >>> list(classify_unique('otto'))    # doctest: +NORMALIZE_WHITESPACE
+    [('o', True,  True),
+     ('t', True,  True),
+     ('t', False, False),
+     ('o', True,  False)]
+
+    This function is analogous to :func:`unique_everseen` and is subject to
+    the same performance considerations.
+
+    """
+    seen_set = set()
+    seen_list = []
+    use_key = key is not None
+    previous = None
+
+    for i, element in enumerate(iterable):
+        k = key(element) if use_key else element
+        is_unique_justseen = not i or previous != k
+        previous = k
+        is_unique_everseen = False
+        try:
+            if k not in seen_set:
+                seen_set.add(k)
+                is_unique_everseen = True
+        except TypeError:
+            if k not in seen_list:
+                seen_list.append(k)
+                is_unique_everseen = True
+        yield element, is_unique_justseen, is_unique_everseen
+
+
+def minmax(iterable_or_value, *others, key=None, default=_marker):
+    """Returns both the smallest and largest items in an iterable
+    or the largest of two or more arguments.
+
+        >>> minmax([3, 1, 5])
+        (1, 5)
+
+        >>> minmax(4, 2, 6)
+        (2, 6)
+
+    If a *key* function is provided, it will be used to transform the input
+    items for comparison.
+
+        >>> minmax([5, 30], key=str)  # '30' sorts before '5'
+        (30, 5)
+
+    If a *default* value is provided, it will be returned if there are no
+    input items.
+
+        >>> minmax([], default=(0, 0))
+        (0, 0)
+
+    Otherwise ``ValueError`` is raised.
+
+    This function is based on the
+    `recipe `__ by
+    Raymond Hettinger and takes care to minimize the number of comparisons
+    performed.
+    """
+    iterable = (iterable_or_value, *others) if others else iterable_or_value
+
+    it = iter(iterable)
+
+    try:
+        lo = hi = next(it)
+    except StopIteration as exc:
+        if default is _marker:
+            raise ValueError(
+                '`minmax()` argument is an empty iterable. '
+                'Provide a `default` value to suppress this error.'
+            ) from exc
+        return default
+
+    # Different branches depending on the presence of key. This saves a lot
+    # of unimportant copies which would slow the "key=None" branch
+    # significantly down.
+    if key is None:
+        for x, y in zip_longest(it, it, fillvalue=lo):
+            if y < x:
+                x, y = y, x
+            if x < lo:
+                lo = x
+            if hi < y:
+                hi = y
+
+    else:
+        lo_key = hi_key = key(lo)
+
+        for x, y in zip_longest(it, it, fillvalue=lo):
+            x_key, y_key = key(x), key(y)
+
+            if y_key < x_key:
+                x, y, x_key, y_key = y, x, y_key, x_key
+            if x_key < lo_key:
+                lo, lo_key = x, x_key
+            if hi_key < y_key:
+                hi, hi_key = y, y_key
+
+    return lo, hi
+
+
+def constrained_batches(
+    iterable, max_size, max_count=None, get_len=len, strict=True
+):
+    """Yield batches of items from *iterable* with a combined size limited by
+    *max_size*.
+
+    >>> iterable = [b'12345', b'123', b'12345678', b'1', b'1', b'12', b'1']
+    >>> list(constrained_batches(iterable, 10))
+    [(b'12345', b'123'), (b'12345678', b'1', b'1'), (b'12', b'1')]
+
+    If a *max_count* is supplied, the number of items per batch is also
+    limited:
+
+    >>> iterable = [b'12345', b'123', b'12345678', b'1', b'1', b'12', b'1']
+    >>> list(constrained_batches(iterable, 10, max_count = 2))
+    [(b'12345', b'123'), (b'12345678', b'1'), (b'1', b'12'), (b'1',)]
+
+    If a *get_len* function is supplied, use that instead of :func:`len` to
+    determine item size.
+
+    If *strict* is ``True``, raise ``ValueError`` if any single item is bigger
+    than *max_size*. Otherwise, allow single items to exceed *max_size*.
+    """
+    if max_size <= 0:
+        raise ValueError('maximum size must be greater than zero')
+
+    batch = []
+    batch_size = 0
+    batch_count = 0
+    for item in iterable:
+        item_len = get_len(item)
+        if strict and item_len > max_size:
+            raise ValueError('item size exceeds maximum size')
+
+        reached_count = batch_count == max_count
+        reached_size = item_len + batch_size > max_size
+        if batch_count and (reached_size or reached_count):
+            yield tuple(batch)
+            batch.clear()
+            batch_size = 0
+            batch_count = 0
+
+        batch.append(item)
+        batch_size += item_len
+        batch_count += 1
+
+    if batch:
+        yield tuple(batch)
+
+
+def gray_product(*iterables):
+    """Like :func:`itertools.product`, but return tuples in an order such
+    that only one element in the generated tuple changes from one iteration
+    to the next.
+
+        >>> list(gray_product('AB','CD'))
+        [('A', 'C'), ('B', 'C'), ('B', 'D'), ('A', 'D')]
+
+    This function consumes all of the input iterables before producing output.
+    If any of the input iterables have fewer than two items, ``ValueError``
+    is raised.
+
+    For information on the algorithm, see
+    `this section `__
+    of Donald Knuth's *The Art of Computer Programming*.
+    """
+    all_iterables = tuple(tuple(x) for x in iterables)
+    iterable_count = len(all_iterables)
+    for iterable in all_iterables:
+        if len(iterable) < 2:
+            raise ValueError("each iterable must have two or more items")
+
+    # This is based on "Algorithm H" from section 7.2.1.1, page 20.
+    # a holds the indexes of the source iterables for the n-tuple to be yielded
+    # f is the array of "focus pointers"
+    # o is the array of "directions"
+    a = [0] * iterable_count
+    f = list(range(iterable_count + 1))
+    o = [1] * iterable_count
+    while True:
+        yield tuple(all_iterables[i][a[i]] for i in range(iterable_count))
+        j = f[0]
+        f[0] = 0
+        if j == iterable_count:
+            break
+        a[j] = a[j] + o[j]
+        if a[j] == 0 or a[j] == len(all_iterables[j]) - 1:
+            o[j] = -o[j]
+            f[j] = f[j + 1]
+            f[j + 1] = j + 1
+
+
+def partial_product(*iterables):
+    """Yields tuples containing one item from each iterator, with subsequent
+    tuples changing a single item at a time by advancing each iterator until it
+    is exhausted. This sequence guarantees every value in each iterable is
+    output at least once without generating all possible combinations.
+
+    This may be useful, for example, when testing an expensive function.
+
+        >>> list(partial_product('AB', 'C', 'DEF'))
+        [('A', 'C', 'D'), ('B', 'C', 'D'), ('B', 'C', 'E'), ('B', 'C', 'F')]
+    """
+
+    iterators = list(map(iter, iterables))
+
+    try:
+        prod = [next(it) for it in iterators]
+    except StopIteration:
+        return
+    yield tuple(prod)
+
+    for i, it in enumerate(iterators):
+        for prod[i] in it:
+            yield tuple(prod)
+
+
+def takewhile_inclusive(predicate, iterable):
+    """A variant of :func:`takewhile` that yields one additional element.
+
+        >>> list(takewhile_inclusive(lambda x: x < 5, [1, 4, 6, 4, 1]))
+        [1, 4, 6]
+
+    :func:`takewhile` would return ``[1, 4]``.
+    """
+    for x in iterable:
+        yield x
+        if not predicate(x):
+            break
+
+
+def outer_product(func, xs, ys, *args, **kwargs):
+    """A generalized outer product that applies a binary function to all
+    pairs of items. Returns a 2D matrix with ``len(xs)`` rows and ``len(ys)``
+    columns.
+    Also accepts ``*args`` and ``**kwargs`` that are passed to ``func``.
+
+    Multiplication table:
+
+    >>> list(outer_product(mul, range(1, 4), range(1, 6)))
+    [(1, 2, 3, 4, 5), (2, 4, 6, 8, 10), (3, 6, 9, 12, 15)]
+
+    Cross tabulation:
+
+    >>> xs = ['A', 'B', 'A', 'A', 'B', 'B', 'A', 'A', 'B', 'B']
+    >>> ys = ['X', 'X', 'X', 'Y', 'Z', 'Z', 'Y', 'Y', 'Z', 'Z']
+    >>> rows = list(zip(xs, ys))
+    >>> count_rows = lambda x, y: rows.count((x, y))
+    >>> list(outer_product(count_rows, sorted(set(xs)), sorted(set(ys))))
+    [(2, 3, 0), (1, 0, 4)]
+
+    Usage with ``*args`` and ``**kwargs``:
+
+    >>> animals = ['cat', 'wolf', 'mouse']
+    >>> list(outer_product(min, animals, animals, key=len))
+    [('cat', 'cat', 'cat'), ('cat', 'wolf', 'wolf'), ('cat', 'wolf', 'mouse')]
+    """
+    ys = tuple(ys)
+    return batched(
+        starmap(lambda x, y: func(x, y, *args, **kwargs), product(xs, ys)),
+        n=len(ys),
+    )
+
+
+def iter_suppress(iterable, *exceptions):
+    """Yield each of the items from *iterable*. If the iteration raises one of
+    the specified *exceptions*, that exception will be suppressed and iteration
+    will stop.
+
+    >>> from itertools import chain
+    >>> def breaks_at_five(x):
+    ...     while True:
+    ...         if x >= 5:
+    ...             raise RuntimeError
+    ...         yield x
+    ...         x += 1
+    >>> it_1 = iter_suppress(breaks_at_five(1), RuntimeError)
+    >>> it_2 = iter_suppress(breaks_at_five(2), RuntimeError)
+    >>> list(chain(it_1, it_2))
+    [1, 2, 3, 4, 2, 3, 4]
+    """
+    try:
+        yield from iterable
+    except exceptions:
+        return
+
+
+def filter_map(func, iterable):
+    """Apply *func* to every element of *iterable*, yielding only those which
+    are not ``None``.
+
+    >>> elems = ['1', 'a', '2', 'b', '3']
+    >>> list(filter_map(lambda s: int(s) if s.isnumeric() else None, elems))
+    [1, 2, 3]
+    """
+    for x in iterable:
+        y = func(x)
+        if y is not None:
+            yield y
+
+
+def powerset_of_sets(iterable):
+    """Yields all possible subsets of the iterable.
+
+        >>> list(powerset_of_sets([1, 2, 3]))  # doctest: +SKIP
+        [set(), {1}, {2}, {3}, {1, 2}, {1, 3}, {2, 3}, {1, 2, 3}]
+        >>> list(powerset_of_sets([1, 1, 0]))  # doctest: +SKIP
+        [set(), {1}, {0}, {0, 1}]
+
+    :func:`powerset_of_sets` takes care to minimize the number
+    of hash operations performed.
+    """
+    sets = tuple(map(set, dict.fromkeys(map(frozenset, zip(iterable)))))
+    for r in range(len(sets) + 1):
+        yield from starmap(set().union, combinations(sets, r))
+
+
+def join_mappings(**field_to_map):
+    """
+    Joins multiple mappings together using their common keys.
+
+    >>> user_scores = {'elliot': 50, 'claris': 60}
+    >>> user_times = {'elliot': 30, 'claris': 40}
+    >>> join_mappings(score=user_scores, time=user_times)
+    {'elliot': {'score': 50, 'time': 30}, 'claris': {'score': 60, 'time': 40}}
+    """
+    ret = defaultdict(dict)
+
+    for field_name, mapping in field_to_map.items():
+        for key, value in mapping.items():
+            ret[key][field_name] = value
+
+    return dict(ret)
+
+
+def _complex_sumprod(v1, v2):
+    """High precision sumprod() for complex numbers.
+    Used by :func:`dft` and :func:`idft`.
+    """
+
+    r1 = chain((p.real for p in v1), (-p.imag for p in v1))
+    r2 = chain((q.real for q in v2), (q.imag for q in v2))
+    i1 = chain((p.real for p in v1), (p.imag for p in v1))
+    i2 = chain((q.imag for q in v2), (q.real for q in v2))
+    return complex(_fsumprod(r1, r2), _fsumprod(i1, i2))
+
+
+def dft(xarr):
+    """Discrete Fourier Tranform. *xarr* is a sequence of complex numbers.
+    Yields the components of the corresponding transformed output vector.
+
+    >>> import cmath
+    >>> xarr = [1, 2-1j, -1j, -1+2j]
+    >>> Xarr = [2, -2-2j, -2j, 4+4j]
+    >>> all(map(cmath.isclose, dft(xarr), Xarr))
+    True
+
+    See :func:`idft` for the inverse Discrete Fourier Transform.
+    """
+    N = len(xarr)
+    roots_of_unity = [e ** (n / N * tau * -1j) for n in range(N)]
+    for k in range(N):
+        coeffs = [roots_of_unity[k * n % N] for n in range(N)]
+        yield _complex_sumprod(xarr, coeffs)
+
+
+def idft(Xarr):
+    """Inverse Discrete Fourier Tranform. *Xarr* is a sequence of
+    complex numbers. Yields the components of the corresponding
+    inverse-transformed output vector.
+
+    >>> import cmath
+    >>> xarr = [1, 2-1j, -1j, -1+2j]
+    >>> Xarr = [2, -2-2j, -2j, 4+4j]
+    >>> all(map(cmath.isclose, idft(Xarr), xarr))
+    True
+
+    See :func:`dft` for the Discrete Fourier Transform.
+    """
+    N = len(Xarr)
+    roots_of_unity = [e ** (n / N * tau * 1j) for n in range(N)]
+    for k in range(N):
+        coeffs = [roots_of_unity[k * n % N] for n in range(N)]
+        yield _complex_sumprod(Xarr, coeffs) / N
+
+
+def doublestarmap(func, iterable):
+    """Apply *func* to every item of *iterable* by dictionary unpacking
+    the item into *func*.
+
+    The difference between :func:`itertools.starmap` and :func:`doublestarmap`
+    parallels the distinction between ``func(*a)`` and ``func(**a)``.
+
+    >>> iterable = [{'a': 1, 'b': 2}, {'a': 40, 'b': 60}]
+    >>> list(doublestarmap(lambda a, b: a + b, iterable))
+    [3, 100]
+
+    ``TypeError`` will be raised if *func*'s signature doesn't match the
+    mapping contained in *iterable* or if *iterable* does not contain mappings.
+    """
+    for item in iterable:
+        yield func(**item)
diff --git a/setuptools/_vendor/more_itertools/more.pyi b/setuptools/_vendor/more_itertools/more.pyi
index 2fba9cb300..e946023259 100644
--- a/setuptools/_vendor/more_itertools/more.pyi
+++ b/setuptools/_vendor/more_itertools/more.pyi
@@ -1,35 +1,39 @@
 """Stubs for more_itertools.more"""
 
+from __future__ import annotations
+
+from types import TracebackType
 from typing import (
     Any,
     Callable,
     Container,
-    Dict,
+    ContextManager,
     Generic,
     Hashable,
+    Mapping,
     Iterable,
     Iterator,
-    List,
-    Optional,
+    Mapping,
+    overload,
     Reversible,
     Sequence,
     Sized,
-    Tuple,
-    Union,
+    Type,
     TypeVar,
     type_check_only,
 )
-from types import TracebackType
-from typing_extensions import ContextManager, Protocol, Type, overload
+from typing_extensions import Protocol
 
 # Type and type variable definitions
 _T = TypeVar('_T')
+_T1 = TypeVar('_T1')
+_T2 = TypeVar('_T2')
 _U = TypeVar('_U')
 _V = TypeVar('_V')
 _W = TypeVar('_W')
 _T_co = TypeVar('_T_co', covariant=True)
-_GenFn = TypeVar('_GenFn', bound=Callable[..., Iterator[object]])
-_Raisable = Union[BaseException, 'Type[BaseException]']
+_GenFn = TypeVar('_GenFn', bound=Callable[..., Iterator[Any]])
+_Raisable = BaseException | Type[BaseException]
 
 @type_check_only
 class _SizedIterable(Protocol[_T_co], Sized, Iterable[_T_co]): ...
@@ -37,23 +41,25 @@ class _SizedIterable(Protocol[_T_co], Sized, Iterable[_T_co]): ...
 @type_check_only
 class _SizedReversible(Protocol[_T_co], Sized, Reversible[_T_co]): ...
 
+@type_check_only
+class _SupportsSlicing(Protocol[_T_co]):
+    def __getitem__(self, __k: slice) -> _T_co: ...
+
 def chunked(
-    iterable: Iterable[_T], n: int, strict: bool = ...
-) -> Iterator[List[_T]]: ...
+    iterable: Iterable[_T], n: int | None, strict: bool = ...
+) -> Iterator[list[_T]]: ...
 @overload
 def first(iterable: Iterable[_T]) -> _T: ...
 @overload
-def first(iterable: Iterable[_T], default: _U) -> Union[_T, _U]: ...
+def first(iterable: Iterable[_T], default: _U) -> _T | _U: ...
 @overload
 def last(iterable: Iterable[_T]) -> _T: ...
 @overload
-def last(iterable: Iterable[_T], default: _U) -> Union[_T, _U]: ...
+def last(iterable: Iterable[_T], default: _U) -> _T | _U: ...
 @overload
 def nth_or_last(iterable: Iterable[_T], n: int) -> _T: ...
 @overload
-def nth_or_last(
-    iterable: Iterable[_T], n: int, default: _U
-) -> Union[_T, _U]: ...
+def nth_or_last(iterable: Iterable[_T], n: int, default: _U) -> _T | _U: ...
 
 class peekable(Generic[_T], Iterator[_T]):
     def __init__(self, iterable: Iterable[_T]) -> None: ...
@@ -62,52 +68,58 @@ class peekable(Generic[_T], Iterator[_T]):
     @overload
     def peek(self) -> _T: ...
     @overload
-    def peek(self, default: _U) -> Union[_T, _U]: ...
+    def peek(self, default: _U) -> _T | _U: ...
     def prepend(self, *items: _T) -> None: ...
     def __next__(self) -> _T: ...
     @overload
     def __getitem__(self, index: int) -> _T: ...
     @overload
-    def __getitem__(self, index: slice) -> List[_T]: ...
+    def __getitem__(self, index: slice) -> list[_T]: ...
 
-def collate(*iterables: Iterable[_T], **kwargs: Any) -> Iterable[_T]: ...
 def consumer(func: _GenFn) -> _GenFn: ...
-def ilen(iterable: Iterable[object]) -> int: ...
+def ilen(iterable: Iterable[_T]) -> int: ...
 def iterate(func: Callable[[_T], _T], start: _T) -> Iterator[_T]: ...
 def with_iter(
     context_manager: ContextManager[Iterable[_T]],
 ) -> Iterator[_T]: ...
 def one(
     iterable: Iterable[_T],
-    too_short: Optional[_Raisable] = ...,
-    too_long: Optional[_Raisable] = ...,
+    too_short: _Raisable | None = ...,
+    too_long: _Raisable | None = ...,
 ) -> _T: ...
+def raise_(exception: _Raisable, *args: Any) -> None: ...
+def strictly_n(
+    iterable: Iterable[_T],
+    n: int,
+    too_short: _GenFn | None = ...,
+    too_long: _GenFn | None = ...,
+) -> list[_T]: ...
 def distinct_permutations(
-    iterable: Iterable[_T], r: Optional[int] = ...
-) -> Iterator[Tuple[_T, ...]]: ...
+    iterable: Iterable[_T], r: int | None = ...
+) -> Iterator[tuple[_T, ...]]: ...
 def intersperse(
     e: _U, iterable: Iterable[_T], n: int = ...
-) -> Iterator[Union[_T, _U]]: ...
-def unique_to_each(*iterables: Iterable[_T]) -> List[List[_T]]: ...
+) -> Iterator[_T | _U]: ...
+def unique_to_each(*iterables: Iterable[_T]) -> list[list[_T]]: ...
 @overload
 def windowed(
     seq: Iterable[_T], n: int, *, step: int = ...
-) -> Iterator[Tuple[Optional[_T], ...]]: ...
+) -> Iterator[tuple[_T | None, ...]]: ...
 @overload
 def windowed(
     seq: Iterable[_T], n: int, fillvalue: _U, step: int = ...
-) -> Iterator[Tuple[Union[_T, _U], ...]]: ...
-def substrings(iterable: Iterable[_T]) -> Iterator[Tuple[_T, ...]]: ...
+) -> Iterator[tuple[_T | _U, ...]]: ...
+def substrings(iterable: Iterable[_T]) -> Iterator[tuple[_T, ...]]: ...
 def substrings_indexes(
     seq: Sequence[_T], reverse: bool = ...
-) -> Iterator[Tuple[Sequence[_T], int, int]]: ...
+) -> Iterator[tuple[Sequence[_T], int, int]]: ...
 
 class bucket(Generic[_T, _U], Container[_U]):
     def __init__(
         self,
         iterable: Iterable[_T],
         key: Callable[[_T], _U],
-        validator: Optional[Callable[[object], object]] = ...,
+        validator: Callable[[_U], object] | None = ...,
     ) -> None: ...
     def __contains__(self, value: object) -> bool: ...
     def __iter__(self) -> Iterator[_U]: ...
@@ -115,130 +127,232 @@ class bucket(Generic[_T, _U], Container[_U]):
 
 def spy(
     iterable: Iterable[_T], n: int = ...
-) -> Tuple[List[_T], Iterator[_T]]: ...
+) -> tuple[list[_T], Iterator[_T]]: ...
 def interleave(*iterables: Iterable[_T]) -> Iterator[_T]: ...
 def interleave_longest(*iterables: Iterable[_T]) -> Iterator[_T]: ...
+def interleave_evenly(
+    iterables: list[Iterable[_T]], lengths: list[int] | None = ...
+) -> Iterator[_T]: ...
 def collapse(
     iterable: Iterable[Any],
-    base_type: Optional[type] = ...,
-    levels: Optional[int] = ...,
+    base_type: type | None = ...,
+    levels: int | None = ...,
 ) -> Iterator[Any]: ...
 @overload
 def side_effect(
     func: Callable[[_T], object],
     iterable: Iterable[_T],
     chunk_size: None = ...,
-    before: Optional[Callable[[], object]] = ...,
-    after: Optional[Callable[[], object]] = ...,
+    before: Callable[[], object] | None = ...,
+    after: Callable[[], object] | None = ...,
 ) -> Iterator[_T]: ...
 @overload
 def side_effect(
-    func: Callable[[List[_T]], object],
+    func: Callable[[list[_T]], object],
     iterable: Iterable[_T],
     chunk_size: int,
-    before: Optional[Callable[[], object]] = ...,
-    after: Optional[Callable[[], object]] = ...,
+    before: Callable[[], object] | None = ...,
+    after: Callable[[], object] | None = ...,
 ) -> Iterator[_T]: ...
 def sliced(
-    seq: Sequence[_T], n: int, strict: bool = ...
-) -> Iterator[Sequence[_T]]: ...
+    seq: _SupportsSlicing[_T], n: int, strict: bool = ...
+) -> Iterator[_T]: ...
 def split_at(
     iterable: Iterable[_T],
     pred: Callable[[_T], object],
     maxsplit: int = ...,
     keep_separator: bool = ...,
-) -> Iterator[List[_T]]: ...
+) -> Iterator[list[_T]]: ...
 def split_before(
     iterable: Iterable[_T], pred: Callable[[_T], object], maxsplit: int = ...
-) -> Iterator[List[_T]]: ...
+) -> Iterator[list[_T]]: ...
 def split_after(
     iterable: Iterable[_T], pred: Callable[[_T], object], maxsplit: int = ...
-) -> Iterator[List[_T]]: ...
+) -> Iterator[list[_T]]: ...
 def split_when(
     iterable: Iterable[_T],
     pred: Callable[[_T, _T], object],
     maxsplit: int = ...,
-) -> Iterator[List[_T]]: ...
+) -> Iterator[list[_T]]: ...
 def split_into(
-    iterable: Iterable[_T], sizes: Iterable[Optional[int]]
-) -> Iterator[List[_T]]: ...
+    iterable: Iterable[_T], sizes: Iterable[int | None]
+) -> Iterator[list[_T]]: ...
 @overload
 def padded(
     iterable: Iterable[_T],
     *,
-    n: Optional[int] = ...,
-    next_multiple: bool = ...
-) -> Iterator[Optional[_T]]: ...
+    n: int | None = ...,
+    next_multiple: bool = ...,
+) -> Iterator[_T | None]: ...
 @overload
 def padded(
     iterable: Iterable[_T],
     fillvalue: _U,
-    n: Optional[int] = ...,
+    n: int | None = ...,
     next_multiple: bool = ...,
-) -> Iterator[Union[_T, _U]]: ...
+) -> Iterator[_T | _U]: ...
 @overload
 def repeat_last(iterable: Iterable[_T]) -> Iterator[_T]: ...
 @overload
-def repeat_last(
-    iterable: Iterable[_T], default: _U
-) -> Iterator[Union[_T, _U]]: ...
-def distribute(n: int, iterable: Iterable[_T]) -> List[Iterator[_T]]: ...
+def repeat_last(iterable: Iterable[_T], default: _U) -> Iterator[_T | _U]: ...
+def distribute(n: int, iterable: Iterable[_T]) -> list[Iterator[_T]]: ...
 @overload
 def stagger(
     iterable: Iterable[_T],
     offsets: _SizedIterable[int] = ...,
     longest: bool = ...,
-) -> Iterator[Tuple[Optional[_T], ...]]: ...
+) -> Iterator[tuple[_T | None, ...]]: ...
 @overload
 def stagger(
     iterable: Iterable[_T],
     offsets: _SizedIterable[int] = ...,
     longest: bool = ...,
     fillvalue: _U = ...,
-) -> Iterator[Tuple[Union[_T, _U], ...]]: ...
+) -> Iterator[tuple[_T | _U, ...]]: ...
 
 class UnequalIterablesError(ValueError):
-    def __init__(
-        self, details: Optional[Tuple[int, int, int]] = ...
-    ) -> None: ...
+    def __init__(self, details: tuple[int, int, int] | None = ...) -> None: ...
 
-def zip_equal(*iterables: Iterable[_T]) -> Iterator[Tuple[_T, ...]]: ...
+@overload
+def zip_equal(__iter1: Iterable[_T1]) -> Iterator[tuple[_T1]]: ...
+@overload
+def zip_equal(
+    __iter1: Iterable[_T1], __iter2: Iterable[_T2]
+) -> Iterator[tuple[_T1, _T2]]: ...
+@overload
+def zip_equal(
+    __iter1: Iterable[_T],
+    __iter2: Iterable[_T],
+    __iter3: Iterable[_T],
+    *iterables: Iterable[_T],
+) -> Iterator[tuple[_T, ...]]: ...
 @overload
 def zip_offset(
-    *iterables: Iterable[_T], offsets: _SizedIterable[int], longest: bool = ...
-) -> Iterator[Tuple[Optional[_T], ...]]: ...
+    __iter1: Iterable[_T1],
+    *,
+    offsets: _SizedIterable[int],
+    longest: bool = ...,
+    fillvalue: None = None,
+) -> Iterator[tuple[_T1 | None]]: ...
 @overload
 def zip_offset(
+    __iter1: Iterable[_T1],
+    __iter2: Iterable[_T2],
+    *,
+    offsets: _SizedIterable[int],
+    longest: bool = ...,
+    fillvalue: None = None,
+) -> Iterator[tuple[_T1 | None, _T2 | None]]: ...
+@overload
+def zip_offset(
+    __iter1: Iterable[_T],
+    __iter2: Iterable[_T],
+    __iter3: Iterable[_T],
     *iterables: Iterable[_T],
     offsets: _SizedIterable[int],
     longest: bool = ...,
-    fillvalue: _U
-) -> Iterator[Tuple[Union[_T, _U], ...]]: ...
+    fillvalue: None = None,
+) -> Iterator[tuple[_T | None, ...]]: ...
+@overload
+def zip_offset(
+    __iter1: Iterable[_T1],
+    *,
+    offsets: _SizedIterable[int],
+    longest: bool = ...,
+    fillvalue: _U,
+) -> Iterator[tuple[_T1 | _U]]: ...
+@overload
+def zip_offset(
+    __iter1: Iterable[_T1],
+    __iter2: Iterable[_T2],
+    *,
+    offsets: _SizedIterable[int],
+    longest: bool = ...,
+    fillvalue: _U,
+) -> Iterator[tuple[_T1 | _U, _T2 | _U]]: ...
+@overload
+def zip_offset(
+    __iter1: Iterable[_T],
+    __iter2: Iterable[_T],
+    __iter3: Iterable[_T],
+    *iterables: Iterable[_T],
+    offsets: _SizedIterable[int],
+    longest: bool = ...,
+    fillvalue: _U,
+) -> Iterator[tuple[_T | _U, ...]]: ...
 def sort_together(
     iterables: Iterable[Iterable[_T]],
     key_list: Iterable[int] = ...,
-    key: Optional[Callable[..., Any]] = ...,
+    key: Callable[..., Any] | None = ...,
     reverse: bool = ...,
-) -> List[Tuple[_T, ...]]: ...
-def unzip(iterable: Iterable[Sequence[_T]]) -> Tuple[Iterator[_T], ...]: ...
-def divide(n: int, iterable: Iterable[_T]) -> List[Iterator[_T]]: ...
+) -> list[tuple[_T, ...]]: ...
+def unzip(iterable: Iterable[Sequence[_T]]) -> tuple[Iterator[_T], ...]: ...
+def divide(n: int, iterable: Iterable[_T]) -> list[Iterator[_T]]: ...
 def always_iterable(
     obj: object,
-    base_type: Union[
-        type, Tuple[Union[type, Tuple[Any, ...]], ...], None
-    ] = ...,
+    base_type: type | tuple[type | tuple[Any, ...], ...] | None = ...,
 ) -> Iterator[Any]: ...
 def adjacent(
     predicate: Callable[[_T], bool],
     iterable: Iterable[_T],
     distance: int = ...,
-) -> Iterator[Tuple[bool, _T]]: ...
+) -> Iterator[tuple[bool, _T]]: ...
+@overload
+def groupby_transform(
+    iterable: Iterable[_T],
+    keyfunc: None = None,
+    valuefunc: None = None,
+    reducefunc: None = None,
+) -> Iterator[tuple[_T, Iterator[_T]]]: ...
+@overload
+def groupby_transform(
+    iterable: Iterable[_T],
+    keyfunc: Callable[[_T], _U],
+    valuefunc: None,
+    reducefunc: None,
+) -> Iterator[tuple[_U, Iterator[_T]]]: ...
+@overload
+def groupby_transform(
+    iterable: Iterable[_T],
+    keyfunc: None,
+    valuefunc: Callable[[_T], _V],
+    reducefunc: None,
+) -> Iterable[tuple[_T, Iterable[_V]]]: ...
+@overload
+def groupby_transform(
+    iterable: Iterable[_T],
+    keyfunc: Callable[[_T], _U],
+    valuefunc: Callable[[_T], _V],
+    reducefunc: None,
+) -> Iterable[tuple[_U, Iterator[_V]]]: ...
+@overload
+def groupby_transform(
+    iterable: Iterable[_T],
+    keyfunc: None,
+    valuefunc: None,
+    reducefunc: Callable[[Iterator[_T]], _W],
+) -> Iterable[tuple[_T, _W]]: ...
+@overload
+def groupby_transform(
+    iterable: Iterable[_T],
+    keyfunc: Callable[[_T], _U],
+    valuefunc: None,
+    reducefunc: Callable[[Iterator[_T]], _W],
+) -> Iterable[tuple[_U, _W]]: ...
+@overload
 def groupby_transform(
     iterable: Iterable[_T],
-    keyfunc: Optional[Callable[[_T], _U]] = ...,
-    valuefunc: Optional[Callable[[_T], _V]] = ...,
-    reducefunc: Optional[Callable[..., _W]] = ...,
-) -> Iterator[Tuple[_T, _W]]: ...
+    keyfunc: None,
+    valuefunc: Callable[[_T], _V],
+    reducefunc: Callable[[Iterable[_V]], _W],
+) -> Iterable[tuple[_T, _W]]: ...
+@overload
+def groupby_transform(
+    iterable: Iterable[_T],
+    keyfunc: Callable[[_T], _U],
+    valuefunc: Callable[[_T], _V],
+    reducefunc: Callable[[Iterable[_V]], _W],
+) -> Iterable[tuple[_U, _W]]: ...
 
 class numeric_range(Generic[_T, _U], Sequence[_T], Hashable, Reversible[_T]):
     @overload
@@ -259,22 +373,22 @@ class numeric_range(Generic[_T, _U], Sequence[_T], Hashable, Reversible[_T]):
     def __len__(self) -> int: ...
     def __reduce__(
         self,
-    ) -> Tuple[Type[numeric_range[_T, _U]], Tuple[_T, _T, _U]]: ...
+    ) -> tuple[Type[numeric_range[_T, _U]], tuple[_T, _T, _U]]: ...
     def __repr__(self) -> str: ...
     def __reversed__(self) -> Iterator[_T]: ...
     def count(self, value: _T) -> int: ...
     def index(self, value: _T) -> int: ...  # type: ignore
 
 def count_cycle(
-    iterable: Iterable[_T], n: Optional[int] = ...
-) -> Iterable[Tuple[int, _T]]: ...
+    iterable: Iterable[_T], n: int | None = ...
+) -> Iterable[tuple[int, _T]]: ...
 def mark_ends(
     iterable: Iterable[_T],
-) -> Iterable[Tuple[bool, bool, _T]]: ...
+) -> Iterable[tuple[bool, bool, _T]]: ...
 def locate(
-    iterable: Iterable[object],
+    iterable: Iterable[_T],
     pred: Callable[..., Any] = ...,
-    window_size: Optional[int] = ...,
+    window_size: int | None = ...,
 ) -> Iterator[int]: ...
 def lstrip(
     iterable: Iterable[_T], pred: Callable[[_T], object]
@@ -287,9 +401,7 @@ def strip(
 ) -> Iterator[_T]: ...
 
 class islice_extended(Generic[_T], Iterator[_T]):
-    def __init__(
-        self, iterable: Iterable[_T], *args: Optional[int]
-    ) -> None: ...
+    def __init__(self, iterable: Iterable[_T], *args: int | None) -> None: ...
     def __iter__(self) -> islice_extended[_T]: ...
     def __next__(self) -> _T: ...
     def __getitem__(self, index: slice) -> islice_extended[_T]: ...
@@ -303,8 +415,8 @@ def difference(
     iterable: Iterable[_T],
     func: Callable[[_T, _T], _U] = ...,
     *,
-    initial: None = ...
-) -> Iterator[Union[_T, _U]]: ...
+    initial: None = ...,
+) -> Iterator[_T | _U]: ...
 @overload
 def difference(
     iterable: Iterable[_T], func: Callable[[_T, _T], _U] = ..., *, initial: _U
@@ -320,7 +432,7 @@ class SequenceView(Generic[_T], Sequence[_T]):
 
 class seekable(Generic[_T], Iterator[_T]):
     def __init__(
-        self, iterable: Iterable[_T], maxlen: Optional[int] = ...
+        self, iterable: Iterable[_T], maxlen: int | None = ...
     ) -> None: ...
     def __iter__(self) -> seekable[_T]: ...
     def __next__(self) -> _T: ...
@@ -328,20 +440,21 @@ class seekable(Generic[_T], Iterator[_T]):
     @overload
     def peek(self) -> _T: ...
     @overload
-    def peek(self, default: _U) -> Union[_T, _U]: ...
+    def peek(self, default: _U) -> _T | _U: ...
     def elements(self) -> SequenceView[_T]: ...
     def seek(self, index: int) -> None: ...
+    def relative_seek(self, count: int) -> None: ...
 
 class run_length:
     @staticmethod
-    def encode(iterable: Iterable[_T]) -> Iterator[Tuple[_T, int]]: ...
+    def encode(iterable: Iterable[_T]) -> Iterator[tuple[_T, int]]: ...
     @staticmethod
-    def decode(iterable: Iterable[Tuple[_T, int]]) -> Iterator[_T]: ...
+    def decode(iterable: Iterable[tuple[_T, int]]) -> Iterator[_T]: ...
 
 def exactly_n(
     iterable: Iterable[_T], n: int, predicate: Callable[[_T], object] = ...
 ) -> bool: ...
-def circular_shifts(iterable: Iterable[_T]) -> List[Tuple[_T, ...]]: ...
+def circular_shifts(iterable: Iterable[_T]) -> list[tuple[_T, ...]]: ...
 def make_decorator(
     wrapping_func: Callable[..., _U], result_index: int = ...
 ) -> Callable[..., Callable[[Callable[..., Any]], Callable[..., _U]]]: ...
@@ -351,44 +464,44 @@ def map_reduce(
     keyfunc: Callable[[_T], _U],
     valuefunc: None = ...,
     reducefunc: None = ...,
-) -> Dict[_U, List[_T]]: ...
+) -> dict[_U, list[_T]]: ...
 @overload
 def map_reduce(
     iterable: Iterable[_T],
     keyfunc: Callable[[_T], _U],
     valuefunc: Callable[[_T], _V],
     reducefunc: None = ...,
-) -> Dict[_U, List[_V]]: ...
+) -> dict[_U, list[_V]]: ...
 @overload
 def map_reduce(
     iterable: Iterable[_T],
     keyfunc: Callable[[_T], _U],
     valuefunc: None = ...,
-    reducefunc: Callable[[List[_T]], _W] = ...,
-) -> Dict[_U, _W]: ...
+    reducefunc: Callable[[list[_T]], _W] = ...,
+) -> dict[_U, _W]: ...
 @overload
 def map_reduce(
     iterable: Iterable[_T],
     keyfunc: Callable[[_T], _U],
     valuefunc: Callable[[_T], _V],
-    reducefunc: Callable[[List[_V]], _W],
-) -> Dict[_U, _W]: ...
+    reducefunc: Callable[[list[_V]], _W],
+) -> dict[_U, _W]: ...
 def rlocate(
     iterable: Iterable[_T],
     pred: Callable[..., object] = ...,
-    window_size: Optional[int] = ...,
+    window_size: int | None = ...,
 ) -> Iterator[int]: ...
 def replace(
     iterable: Iterable[_T],
     pred: Callable[..., object],
     substitutes: Iterable[_U],
-    count: Optional[int] = ...,
+    count: int | None = ...,
     window_size: int = ...,
-) -> Iterator[Union[_T, _U]]: ...
-def partitions(iterable: Iterable[_T]) -> Iterator[List[List[_T]]]: ...
+) -> Iterator[_T | _U]: ...
+def partitions(iterable: Iterable[_T]) -> Iterator[list[list[_T]]]: ...
 def set_partitions(
-    iterable: Iterable[_T], k: Optional[int] = ...
-) -> Iterator[List[List[_T]]]: ...
+    iterable: Iterable[_T], k: int | None = ...
+) -> Iterator[list[list[_T]]]: ...
 
 class time_limited(Generic[_T], Iterator[_T]):
     def __init__(
@@ -399,35 +512,42 @@ class time_limited(Generic[_T], Iterator[_T]):
 
 @overload
 def only(
-    iterable: Iterable[_T], *, too_long: Optional[_Raisable] = ...
-) -> Optional[_T]: ...
+    iterable: Iterable[_T], *, too_long: _Raisable | None = ...
+) -> _T | None: ...
 @overload
 def only(
-    iterable: Iterable[_T], default: _U, too_long: Optional[_Raisable] = ...
-) -> Union[_T, _U]: ...
+    iterable: Iterable[_T], default: _U, too_long: _Raisable | None = ...
+) -> _T | _U: ...
 def ichunked(iterable: Iterable[_T], n: int) -> Iterator[Iterator[_T]]: ...
 def distinct_combinations(
     iterable: Iterable[_T], r: int
-) -> Iterator[Tuple[_T, ...]]: ...
+) -> Iterator[tuple[_T, ...]]: ...
 def filter_except(
     validator: Callable[[Any], object],
     iterable: Iterable[_T],
-    *exceptions: Type[BaseException]
+    *exceptions: Type[BaseException],
 ) -> Iterator[_T]: ...
 def map_except(
     function: Callable[[Any], _U],
     iterable: Iterable[_T],
-    *exceptions: Type[BaseException]
+    *exceptions: Type[BaseException],
 ) -> Iterator[_U]: ...
+def map_if(
+    iterable: Iterable[Any],
+    pred: Callable[[Any], bool],
+    func: Callable[[Any], Any],
+    func_else: Callable[[Any], Any] | None = ...,
+) -> Iterator[Any]: ...
 def sample(
     iterable: Iterable[_T],
     k: int,
-    weights: Optional[Iterable[float]] = ...,
-) -> List[_T]: ...
+    weights: Iterable[float] | None = ...,
+) -> list[_T]: ...
 def is_sorted(
     iterable: Iterable[_T],
-    key: Optional[Callable[[_T], _U]] = ...,
+    key: Callable[[_T], _U] | None = ...,
     reverse: bool = False,
+    strict: bool = False,
 ) -> bool: ...
 
 class AbortThread(BaseException):
@@ -443,10 +563,10 @@ class callback_iter(Generic[_T], Iterator[_T]):
     def __enter__(self) -> callback_iter[_T]: ...
     def __exit__(
         self,
-        exc_type: Optional[Type[BaseException]],
-        exc_value: Optional[BaseException],
-        traceback: Optional[TracebackType],
-    ) -> Optional[bool]: ...
+        exc_type: Type[BaseException] | None,
+        exc_value: BaseException | None,
+        traceback: TracebackType | None,
+    ) -> bool | None: ...
     def __iter__(self) -> callback_iter[_T]: ...
     def __next__(self) -> _T: ...
     def _reader(self) -> Iterator[_T]: ...
@@ -457,24 +577,133 @@ class callback_iter(Generic[_T], Iterator[_T]):
 
 def windowed_complete(
     iterable: Iterable[_T], n: int
-) -> Iterator[Tuple[_T, ...]]: ...
+) -> Iterator[tuple[_T, ...]]: ...
 def all_unique(
-    iterable: Iterable[_T], key: Optional[Callable[[_T], _U]] = ...
+    iterable: Iterable[_T], key: Callable[[_T], _U] | None = ...
 ) -> bool: ...
-def nth_product(index: int, *args: Iterable[_T]) -> Tuple[_T, ...]: ...
+def nth_product(index: int, *args: Iterable[_T]) -> tuple[_T, ...]: ...
+def nth_combination_with_replacement(
+    iterable: Iterable[_T], r: int, index: int
+) -> tuple[_T, ...]: ...
 def nth_permutation(
     iterable: Iterable[_T], r: int, index: int
-) -> Tuple[_T, ...]: ...
-def value_chain(*args: Union[_T, Iterable[_T]]) -> Iterable[_T]: ...
+) -> tuple[_T, ...]: ...
+def value_chain(*args: _T | Iterable[_T]) -> Iterable[_T]: ...
 def product_index(element: Iterable[_T], *args: Iterable[_T]) -> int: ...
 def combination_index(
     element: Iterable[_T], iterable: Iterable[_T]
 ) -> int: ...
+def combination_with_replacement_index(
+    element: Iterable[_T], iterable: Iterable[_T]
+) -> int: ...
 def permutation_index(
     element: Iterable[_T], iterable: Iterable[_T]
 ) -> int: ...
+def repeat_each(iterable: Iterable[_T], n: int = ...) -> Iterator[_T]: ...
 
 class countable(Generic[_T], Iterator[_T]):
     def __init__(self, iterable: Iterable[_T]) -> None: ...
     def __iter__(self) -> countable[_T]: ...
     def __next__(self) -> _T: ...
+    items_seen: int
+
+def chunked_even(iterable: Iterable[_T], n: int) -> Iterator[list[_T]]: ...
+def zip_broadcast(
+    *objects: _T | Iterable[_T],
+    scalar_types: type | tuple[type | tuple[Any, ...], ...] | None = ...,
+    strict: bool = ...,
+) -> Iterable[tuple[_T, ...]]: ...
+def unique_in_window(
+    iterable: Iterable[_T], n: int, key: Callable[[_T], _U] | None = ...
+) -> Iterator[_T]: ...
+def duplicates_everseen(
+    iterable: Iterable[_T], key: Callable[[_T], _U] | None = ...
+) -> Iterator[_T]: ...
+def duplicates_justseen(
+    iterable: Iterable[_T], key: Callable[[_T], _U] | None = ...
+) -> Iterator[_T]: ...
+def classify_unique(
+    iterable: Iterable[_T], key: Callable[[_T], _U] | None = ...
+) -> Iterator[tuple[_T, bool, bool]]: ...
+
+class _SupportsLessThan(Protocol):
+    def __lt__(self, __other: Any) -> bool: ...
+
+_SupportsLessThanT = TypeVar("_SupportsLessThanT", bound=_SupportsLessThan)
+
+@overload
+def minmax(
+    iterable_or_value: Iterable[_SupportsLessThanT], *, key: None = None
+) -> tuple[_SupportsLessThanT, _SupportsLessThanT]: ...
+@overload
+def minmax(
+    iterable_or_value: Iterable[_T], *, key: Callable[[_T], _SupportsLessThan]
+) -> tuple[_T, _T]: ...
+@overload
+def minmax(
+    iterable_or_value: Iterable[_SupportsLessThanT],
+    *,
+    key: None = None,
+    default: _U,
+) -> _U | tuple[_SupportsLessThanT, _SupportsLessThanT]: ...
+@overload
+def minmax(
+    iterable_or_value: Iterable[_T],
+    *,
+    key: Callable[[_T], _SupportsLessThan],
+    default: _U,
+) -> _U | tuple[_T, _T]: ...
+@overload
+def minmax(
+    iterable_or_value: _SupportsLessThanT,
+    __other: _SupportsLessThanT,
+    *others: _SupportsLessThanT,
+) -> tuple[_SupportsLessThanT, _SupportsLessThanT]: ...
+@overload
+def minmax(
+    iterable_or_value: _T,
+    __other: _T,
+    *others: _T,
+    key: Callable[[_T], _SupportsLessThan],
+) -> tuple[_T, _T]: ...
+def longest_common_prefix(
+    iterables: Iterable[Iterable[_T]],
+) -> Iterator[_T]: ...
+def iequals(*iterables: Iterable[Any]) -> bool: ...
+def constrained_batches(
+    iterable: Iterable[_T],
+    max_size: int,
+    max_count: int | None = ...,
+    get_len: Callable[[_T], object] = ...,
+    strict: bool = ...,
+) -> Iterator[tuple[_T]]: ...
+def gray_product(*iterables: Iterable[_T]) -> Iterator[tuple[_T, ...]]: ...
+def partial_product(*iterables: Iterable[_T]) -> Iterator[tuple[_T, ...]]: ...
+def takewhile_inclusive(
+    predicate: Callable[[_T], bool], iterable: Iterable[_T]
+) -> Iterator[_T]: ...
+def outer_product(
+    func: Callable[[_T, _U], _V],
+    xs: Iterable[_T],
+    ys: Iterable[_U],
+    *args: Any,
+    **kwargs: Any,
+) -> Iterator[tuple[_V, ...]]: ...
+def iter_suppress(
+    iterable: Iterable[_T],
+    *exceptions: Type[BaseException],
+) -> Iterator[_T]: ...
+def filter_map(
+    func: Callable[[_T], _V | None],
+    iterable: Iterable[_T],
+) -> Iterator[_V]: ...
+def powerset_of_sets(iterable: Iterable[_T]) -> Iterator[set[_T]]: ...
+def join_mappings(
+    **field_to_map: Mapping[_T, _V]
+) -> dict[_T, dict[str, _V]]: ...
+def doublestarmap(
+    func: Callable[..., _T],
+    iterable: Iterable[Mapping[str, Any]],
+) -> Iterator[_T]: ...
+def dft(xarr: Sequence[complex]) -> Iterator[complex]: ...
+def idft(Xarr: Sequence[complex]) -> Iterator[complex]: ...
diff --git a/setuptools/_vendor/more_itertools/recipes.py b/setuptools/_vendor/more_itertools/recipes.py
index 521abd7c2c..b32fa95533 100644
--- a/setuptools/_vendor/more_itertools/recipes.py
+++ b/setuptools/_vendor/more_itertools/recipes.py
@@ -7,32 +7,44 @@
 .. [1] http://docs.python.org/library/itertools.html#recipes
 
 """
-import warnings
+
+import math
+import operator
+
 from collections import deque
+from collections.abc import Sized
+from functools import partial, reduce
 from itertools import (
     chain,
     combinations,
+    compress,
     count,
     cycle,
     groupby,
     islice,
+    product,
     repeat,
     starmap,
     tee,
     zip_longest,
 )
-import operator
 from random import randrange, sample, choice
+from sys import hexversion
 
 __all__ = [
     'all_equal',
+    'batched',
+    'before_and_after',
     'consume',
     'convolve',
     'dotproduct',
     'first_true',
+    'factor',
     'flatten',
     'grouper',
     'iter_except',
+    'iter_index',
+    'matmul',
     'ncycles',
     'nth',
     'nth_combination',
@@ -40,22 +52,48 @@
     'pad_none',
     'pairwise',
     'partition',
+    'polynomial_eval',
+    'polynomial_from_roots',
+    'polynomial_derivative',
     'powerset',
     'prepend',
     'quantify',
+    'reshape',
     'random_combination_with_replacement',
     'random_combination',
     'random_permutation',
     'random_product',
     'repeatfunc',
     'roundrobin',
+    'sieve',
+    'sliding_window',
+    'subslices',
+    'sum_of_squares',
     'tabulate',
     'tail',
     'take',
+    'totient',
+    'transpose',
+    'triplewise',
+    'unique',
     'unique_everseen',
     'unique_justseen',
 ]
 
+_marker = object()
+
+
+# zip with strict is available for Python 3.10+
+try:
+    zip(strict=True)
+except TypeError:
+    _zip_strict = zip
+else:
+    _zip_strict = partial(zip, strict=True)
+
+# math.sumprod is available for Python 3.12+
+_sumprod = getattr(math, 'sumprod', lambda x, y: dotproduct(x, y))
+
 
 def take(n, iterable):
     """Return first *n* items of the iterable as a list.
@@ -99,7 +137,14 @@ def tail(n, iterable):
     ['E', 'F', 'G']
 
     """
-    return iter(deque(iterable, maxlen=n))
+    # If the given iterable has a length, then we can use islice to get its
+    # final elements. Note that if the iterable is not actually Iterable,
+    # either islice or deque will throw a TypeError. This is why we don't
+    # check if it is Iterable.
+    if isinstance(iterable, Sized):
+        yield from islice(iterable, max(0, len(iterable) - n), None)
+    else:
+        yield from iter(deque(iterable, maxlen=n))
 
 
 def consume(iterator, n=None):
@@ -155,7 +200,7 @@ def nth(iterable, n, default=None):
     return next(islice(iterable, n, None), default)
 
 
-def all_equal(iterable):
+def all_equal(iterable, key=None):
     """
     Returns ``True`` if all the elements are equal to each other.
 
@@ -164,9 +209,16 @@ def all_equal(iterable):
         >>> all_equal('aaab')
         False
 
+    A function that accepts a single argument and returns a transformed version
+    of each input item can be specified with *key*:
+
+        >>> all_equal('AaaA', key=str.casefold)
+        True
+        >>> all_equal([1, 2, 3], key=lambda x: x < 10)
+        True
+
     """
-    g = groupby(iterable)
-    return next(g, True) and not next(g, False)
+    return len(list(islice(groupby(iterable, key), 2))) <= 1
 
 
 def quantify(iterable, pred=bool):
@@ -266,7 +318,7 @@ def _pairwise(iterable):
     """
     a, b = tee(iterable)
     next(b, None)
-    yield from zip(a, b)
+    return zip(a, b)
 
 
 try:
@@ -276,25 +328,84 @@ def _pairwise(iterable):
 else:
 
     def pairwise(iterable):
-        yield from itertools_pairwise(iterable)
+        return itertools_pairwise(iterable)
 
     pairwise.__doc__ = _pairwise.__doc__
 
 
-def grouper(iterable, n, fillvalue=None):
-    """Collect data into fixed-length chunks or blocks.
+class UnequalIterablesError(ValueError):
+    def __init__(self, details=None):
+        msg = 'Iterables have different lengths'
+        if details is not None:
+            msg += (': index 0 has length {}; index {} has length {}').format(
+                *details
+            )
+
+        super().__init__(msg)
+
 
-    >>> list(grouper('ABCDEFG', 3, 'x'))
+def _zip_equal_generator(iterables):
+    for combo in zip_longest(*iterables, fillvalue=_marker):
+        for val in combo:
+            if val is _marker:
+                raise UnequalIterablesError()
+        yield combo
+
+
+def _zip_equal(*iterables):
+    # Check whether the iterables are all the same size.
+    try:
+        first_size = len(iterables[0])
+        for i, it in enumerate(iterables[1:], 1):
+            size = len(it)
+            if size != first_size:
+                raise UnequalIterablesError(details=(first_size, i, size))
+        # All sizes are equal, we can use the built-in zip.
+        return zip(*iterables)
+    # If any one of the iterables didn't have a length, start reading
+    # them until one runs out.
+    except TypeError:
+        return _zip_equal_generator(iterables)
+
+
+def grouper(iterable, n, incomplete='fill', fillvalue=None):
+    """Group elements from *iterable* into fixed-length groups of length *n*.
+
+    >>> list(grouper('ABCDEF', 3))
+    [('A', 'B', 'C'), ('D', 'E', 'F')]
+
+    The keyword arguments *incomplete* and *fillvalue* control what happens for
+    iterables whose length is not a multiple of *n*.
+
+    When *incomplete* is `'fill'`, the last group will contain instances of
+    *fillvalue*.
+
+    >>> list(grouper('ABCDEFG', 3, incomplete='fill', fillvalue='x'))
     [('A', 'B', 'C'), ('D', 'E', 'F'), ('G', 'x', 'x')]
 
+    When *incomplete* is `'ignore'`, the last group will not be emitted.
+
+    >>> list(grouper('ABCDEFG', 3, incomplete='ignore', fillvalue='x'))
+    [('A', 'B', 'C'), ('D', 'E', 'F')]
+
+    When *incomplete* is `'strict'`, a subclass of `ValueError` will be raised.
+
+    >>> it = grouper('ABCDEFG', 3, incomplete='strict')
+    >>> list(it)  # doctest: +IGNORE_EXCEPTION_DETAIL
+    Traceback (most recent call last):
+    ...
+    UnequalIterablesError
+
     """
-    if isinstance(iterable, int):
-        warnings.warn(
-            "grouper expects iterable as first parameter", DeprecationWarning
-        )
-        n, iterable = iterable, n
     args = [iter(iterable)] * n
-    return zip_longest(fillvalue=fillvalue, *args)
+    if incomplete == 'fill':
+        return zip_longest(*args, fillvalue=fillvalue)
+    if incomplete == 'strict':
+        return _zip_equal(*args)
+    if incomplete == 'ignore':
+        return zip(*args)
+    else:
+        raise ValueError('Expected fill, strict, or ignore')
 
 
 def roundrobin(*iterables):
@@ -308,16 +419,11 @@ def roundrobin(*iterables):
     iterables is small).
 
     """
-    # Recipe credited to George Sakkis
-    pending = len(iterables)
-    nexts = cycle(iter(it).__next__ for it in iterables)
-    while pending:
-        try:
-            for next in nexts:
-                yield next()
-        except StopIteration:
-            pending -= 1
-            nexts = cycle(islice(nexts, pending))
+    # Algorithm credited to George Sakkis
+    iterators = map(iter, iterables)
+    for num_active in range(len(iterables), 0, -1):
+        iterators = cycle(islice(iterators, num_active))
+        yield from map(next, iterators)
 
 
 def partition(pred, iterable):
@@ -343,12 +449,9 @@ def partition(pred, iterable):
     if pred is None:
         pred = bool
 
-    evaluations = ((pred(x), x) for x in iterable)
-    t1, t2 = tee(evaluations)
-    return (
-        (x for (cond, x) in t1 if not cond),
-        (x for (cond, x) in t2 if cond),
-    )
+    t1, t2, p = tee(iterable, 3)
+    p1, p2 = tee(map(pred, p))
+    return (compress(t1, map(operator.not_, p1)), compress(t2, p2))
 
 
 def powerset(iterable):
@@ -359,16 +462,14 @@ def powerset(iterable):
 
     :func:`powerset` will operate on iterables that aren't :class:`set`
     instances, so repeated elements in the input will produce repeated elements
-    in the output. Use :func:`unique_everseen` on the input to avoid generating
-    duplicates:
+    in the output.
 
         >>> seq = [1, 1, 0]
         >>> list(powerset(seq))
         [(), (1,), (1,), (0,), (1, 1), (1, 0), (1, 0), (1, 1, 0)]
-        >>> from more_itertools import unique_everseen
-        >>> list(powerset(unique_everseen(seq)))
-        [(), (1,), (0,), (1, 0)]
 
+    For a variant that efficiently yields actual :class:`set` instances, see
+    :func:`powerset_of_sets`.
     """
     s = list(iterable)
     return chain.from_iterable(combinations(s, r) for r in range(len(s) + 1))
@@ -396,7 +497,7 @@ def unique_everseen(iterable, key=None):
         >>> list(unique_everseen(iterable, key=tuple))  # Faster
         [[1, 2], [2, 3]]
 
-    Similary, you may want to convert unhashable ``set`` objects with
+    Similarly, you may want to convert unhashable ``set`` objects with
     ``key=frozenset``. For ``dict`` objects,
     ``key=lambda x: frozenset(x.items())`` can be used.
 
@@ -428,9 +529,31 @@ def unique_justseen(iterable, key=None):
     ['A', 'B', 'C', 'A', 'D']
 
     """
+    if key is None:
+        return map(operator.itemgetter(0), groupby(iterable))
+
     return map(next, map(operator.itemgetter(1), groupby(iterable, key)))
 
 
+def unique(iterable, key=None, reverse=False):
+    """Yields unique elements in sorted order.
+
+    >>> list(unique([[1, 2], [3, 4], [1, 2]]))
+    [[1, 2], [3, 4]]
+
+    *key* and *reverse* are passed to :func:`sorted`.
+
+    >>> list(unique('ABBcCAD', str.casefold))
+    ['A', 'B', 'c', 'D']
+    >>> list(unique('ABBcCAD', str.casefold, reverse=True))
+    ['D', 'c', 'B', 'A']
+
+    The elements in *iterable* need not be hashable, but they must be
+    comparable for sorting to work.
+    """
+    return unique_justseen(sorted(iterable, key=key, reverse=reverse), key=key)
+
+
 def iter_except(func, exception, first=None):
     """Yields results from a function repeatedly until an exception is raised.
 
@@ -442,6 +565,16 @@ def iter_except(func, exception, first=None):
         >>> list(iter_except(l.pop, IndexError))
         [2, 1, 0]
 
+    Multiple exceptions can be specified as a stopping condition:
+
+        >>> l = [1, 2, 3, '...', 4, 5, 6]
+        >>> list(iter_except(lambda: 1 + l.pop(), (IndexError, TypeError)))
+        [7, 6, 5]
+        >>> list(iter_except(lambda: 1 + l.pop(), (IndexError, TypeError)))
+        [4, 3, 2]
+        >>> list(iter_except(lambda: 1 + l.pop(), (IndexError, TypeError)))
+        []
+
     """
     try:
         if first is not None:
@@ -612,9 +745,302 @@ def convolve(signal, kernel):
     is immediately consumed and stored.
 
     """
+    # This implementation intentionally doesn't match the one in the itertools
+    # documentation.
     kernel = tuple(kernel)[::-1]
     n = len(kernel)
     window = deque([0], maxlen=n) * n
     for x in chain(signal, repeat(0, n - 1)):
         window.append(x)
-        yield sum(map(operator.mul, kernel, window))
+        yield _sumprod(kernel, window)
+
+
+def before_and_after(predicate, it):
+    """A variant of :func:`takewhile` that allows complete access to the
+    remainder of the iterator.
+
+         >>> it = iter('ABCdEfGhI')
+         >>> all_upper, remainder = before_and_after(str.isupper, it)
+         >>> ''.join(all_upper)
+         'ABC'
+         >>> ''.join(remainder) # takewhile() would lose the 'd'
+         'dEfGhI'
+
+    Note that the first iterator must be fully consumed before the second
+    iterator can generate valid results.
+    """
+    it = iter(it)
+    transition = []
+
+    def true_iterator():
+        for elem in it:
+            if predicate(elem):
+                yield elem
+            else:
+                transition.append(elem)
+                return
+
+    # Note: this is different from itertools recipes to allow nesting
+    # before_and_after remainders into before_and_after again. See tests
+    # for an example.
+    remainder_iterator = chain(transition, it)
+
+    return true_iterator(), remainder_iterator
+
+
+def triplewise(iterable):
+    """Return overlapping triplets from *iterable*.
+
+    >>> list(triplewise('ABCDE'))
+    [('A', 'B', 'C'), ('B', 'C', 'D'), ('C', 'D', 'E')]
+
+    """
+    for (a, _), (b, c) in pairwise(pairwise(iterable)):
+        yield a, b, c
+
+
+def sliding_window(iterable, n):
+    """Return a sliding window of width *n* over *iterable*.
+
+        >>> list(sliding_window(range(6), 4))
+        [(0, 1, 2, 3), (1, 2, 3, 4), (2, 3, 4, 5)]
+
+    If *iterable* has fewer than *n* items, then nothing is yielded:
+
+        >>> list(sliding_window(range(3), 4))
+        []
+
+    For a variant with more features, see :func:`windowed`.
+    """
+    it = iter(iterable)
+    window = deque(islice(it, n - 1), maxlen=n)
+    for x in it:
+        window.append(x)
+        yield tuple(window)
+
+
+def subslices(iterable):
+    """Return all contiguous non-empty subslices of *iterable*.
+
+        >>> list(subslices('ABC'))
+        [['A'], ['A', 'B'], ['A', 'B', 'C'], ['B'], ['B', 'C'], ['C']]
+
+    This is similar to :func:`substrings`, but emits items in a different
+    order.
+    """
+    seq = list(iterable)
+    slices = starmap(slice, combinations(range(len(seq) + 1), 2))
+    return map(operator.getitem, repeat(seq), slices)
+
+
+def polynomial_from_roots(roots):
+    """Compute a polynomial's coefficients from its roots.
+
+    >>> roots = [5, -4, 3]  # (x - 5) * (x + 4) * (x - 3)
+    >>> polynomial_from_roots(roots)  # x^3 - 4 * x^2 - 17 * x + 60
+    [1, -4, -17, 60]
+    """
+    factors = zip(repeat(1), map(operator.neg, roots))
+    return list(reduce(convolve, factors, [1]))
+
+
+def iter_index(iterable, value, start=0, stop=None):
+    """Yield the index of each place in *iterable* that *value* occurs,
+    beginning with index *start* and ending before index *stop*.
+
+
+    >>> list(iter_index('AABCADEAF', 'A'))
+    [0, 1, 4, 7]
+    >>> list(iter_index('AABCADEAF', 'A', 1))  # start index is inclusive
+    [1, 4, 7]
+    >>> list(iter_index('AABCADEAF', 'A', 1, 7))  # stop index is not inclusive
+    [1, 4]
+
+    The behavior for non-scalar *values* matches the built-in Python types.
+
+    >>> list(iter_index('ABCDABCD', 'AB'))
+    [0, 4]
+    >>> list(iter_index([0, 1, 2, 3, 0, 1, 2, 3], [0, 1]))
+    []
+    >>> list(iter_index([[0, 1], [2, 3], [0, 1], [2, 3]], [0, 1]))
+    [0, 2]
+
+    See :func:`locate` for a more general means of finding the indexes
+    associated with particular values.
+
+    """
+    seq_index = getattr(iterable, 'index', None)
+    if seq_index is None:
+        # Slow path for general iterables
+        it = islice(iterable, start, stop)
+        for i, element in enumerate(it, start):
+            if element is value or element == value:
+                yield i
+    else:
+        # Fast path for sequences
+        stop = len(iterable) if stop is None else stop
+        i = start - 1
+        try:
+            while True:
+                yield (i := seq_index(value, i + 1, stop))
+        except ValueError:
+            pass
+
+
+def sieve(n):
+    """Yield the primes less than n.
+
+    >>> list(sieve(30))
+    [2, 3, 5, 7, 11, 13, 17, 19, 23, 29]
+    """
+    if n > 2:
+        yield 2
+    start = 3
+    data = bytearray((0, 1)) * (n // 2)
+    limit = math.isqrt(n) + 1
+    for p in iter_index(data, 1, start, limit):
+        yield from iter_index(data, 1, start, p * p)
+        data[p * p : n : p + p] = bytes(len(range(p * p, n, p + p)))
+        start = p * p
+    yield from iter_index(data, 1, start)
+
+
+def _batched(iterable, n, *, strict=False):
+    """Batch data into tuples of length *n*. If the number of items in
+    *iterable* is not divisible by *n*:
+    * The last batch will be shorter if *strict* is ``False``.
+    * :exc:`ValueError` will be raised if *strict* is ``True``.
+
+    >>> list(batched('ABCDEFG', 3))
+    [('A', 'B', 'C'), ('D', 'E', 'F'), ('G',)]
+
+    On Python 3.13 and above, this is an alias for :func:`itertools.batched`.
+    """
+    if n < 1:
+        raise ValueError('n must be at least one')
+    it = iter(iterable)
+    while batch := tuple(islice(it, n)):
+        if strict and len(batch) != n:
+            raise ValueError('batched(): incomplete batch')
+        yield batch
+
+
+if hexversion >= 0x30D00A2:
+    from itertools import batched as itertools_batched
+
+    def batched(iterable, n, *, strict=False):
+        return itertools_batched(iterable, n, strict=strict)
+
+else:
+    batched = _batched
+
+    batched.__doc__ = _batched.__doc__
+
+
+def transpose(it):
+    """Swap the rows and columns of the input matrix.
+
+    >>> list(transpose([(1, 2, 3), (11, 22, 33)]))
+    [(1, 11), (2, 22), (3, 33)]
+
+    The caller should ensure that the dimensions of the input are compatible.
+    If the input is empty, no output will be produced.
+    """
+    return _zip_strict(*it)
+
+
+def reshape(matrix, cols):
+    """Reshape the 2-D input *matrix* to have a column count given by *cols*.
+
+    >>> matrix = [(0, 1), (2, 3), (4, 5)]
+    >>> cols = 3
+    >>> list(reshape(matrix, cols))
+    [(0, 1, 2), (3, 4, 5)]
+    """
+    return batched(chain.from_iterable(matrix), cols)
+
+
+def matmul(m1, m2):
+    """Multiply two matrices.
+
+    >>> list(matmul([(7, 5), (3, 5)], [(2, 5), (7, 9)]))
+    [(49, 80), (41, 60)]
+
+    The caller should ensure that the dimensions of the input matrices are
+    compatible with each other.
+    """
+    n = len(m2[0])
+    return batched(starmap(_sumprod, product(m1, transpose(m2))), n)
+
+
+def factor(n):
+    """Yield the prime factors of n.
+
+    >>> list(factor(360))
+    [2, 2, 2, 3, 3, 5]
+    """
+    for prime in sieve(math.isqrt(n) + 1):
+        while not n % prime:
+            yield prime
+            n //= prime
+            if n == 1:
+                return
+    if n > 1:
+        yield n
+
+
+def polynomial_eval(coefficients, x):
+    """Evaluate a polynomial at a specific value.
+
+    Example: evaluating x^3 - 4 * x^2 - 17 * x + 60 at x = 2.5:
+
+    >>> coefficients = [1, -4, -17, 60]
+    >>> x = 2.5
+    >>> polynomial_eval(coefficients, x)
+    8.125
+    """
+    n = len(coefficients)
+    if n == 0:
+        return x * 0  # coerce zero to the type of x
+    powers = map(pow, repeat(x), reversed(range(n)))
+    return _sumprod(coefficients, powers)
+
+
+def sum_of_squares(it):
+    """Return the sum of the squares of the input values.
+
+    >>> sum_of_squares([10, 20, 30])
+    1400
+    """
+    return _sumprod(*tee(it))
+
+
+def polynomial_derivative(coefficients):
+    """Compute the first derivative of a polynomial.
+
+    Example: evaluating the derivative of x^3 - 4 * x^2 - 17 * x + 60
+
+    >>> coefficients = [1, -4, -17, 60]
+    >>> derivative_coefficients = polynomial_derivative(coefficients)
+    >>> derivative_coefficients
+    [3, -8, -17]
+    """
+    n = len(coefficients)
+    powers = reversed(range(1, n))
+    return list(map(operator.mul, coefficients, powers))
+
+
+def totient(n):
+    """Return the count of natural numbers up to *n* that are coprime with *n*.
+
+    >>> totient(9)
+    6
+    >>> totient(12)
+    4
+    """
+    # The itertools docs use unique_justseen instead of set; see
+    # https://github.com/more-itertools/more-itertools/issues/823
+    for p in set(factor(n)):
+        n = n // p * (p - 1)
+
+    return n
diff --git a/setuptools/_vendor/more_itertools/recipes.pyi b/setuptools/_vendor/more_itertools/recipes.pyi
index 5e39d96390..739acec05f 100644
--- a/setuptools/_vendor/more_itertools/recipes.pyi
+++ b/setuptools/_vendor/more_itertools/recipes.pyi
@@ -1,103 +1,136 @@
 """Stubs for more_itertools.recipes"""
+
+from __future__ import annotations
+
 from typing import (
     Any,
     Callable,
     Iterable,
     Iterator,
-    List,
-    Optional,
-    Tuple,
+    overload,
+    Sequence,
+    Type,
     TypeVar,
-    Union,
 )
-from typing_extensions import overload, Type
 
 # Type and type variable definitions
 _T = TypeVar('_T')
+_T1 = TypeVar('_T1')
+_T2 = TypeVar('_T2')
 _U = TypeVar('_U')
 
-def take(n: int, iterable: Iterable[_T]) -> List[_T]: ...
+def take(n: int, iterable: Iterable[_T]) -> list[_T]: ...
 def tabulate(
     function: Callable[[int], _T], start: int = ...
 ) -> Iterator[_T]: ...
 def tail(n: int, iterable: Iterable[_T]) -> Iterator[_T]: ...
-def consume(iterator: Iterable[object], n: Optional[int] = ...) -> None: ...
+def consume(iterator: Iterable[_T], n: int | None = ...) -> None: ...
 @overload
-def nth(iterable: Iterable[_T], n: int) -> Optional[_T]: ...
+def nth(iterable: Iterable[_T], n: int) -> _T | None: ...
 @overload
-def nth(iterable: Iterable[_T], n: int, default: _U) -> Union[_T, _U]: ...
-def all_equal(iterable: Iterable[object]) -> bool: ...
+def nth(iterable: Iterable[_T], n: int, default: _U) -> _T | _U: ...
+def all_equal(
+    iterable: Iterable[_T], key: Callable[[_T], _U] | None = ...
+) -> bool: ...
 def quantify(
     iterable: Iterable[_T], pred: Callable[[_T], bool] = ...
 ) -> int: ...
-def pad_none(iterable: Iterable[_T]) -> Iterator[Optional[_T]]: ...
-def padnone(iterable: Iterable[_T]) -> Iterator[Optional[_T]]: ...
+def pad_none(iterable: Iterable[_T]) -> Iterator[_T | None]: ...
+def padnone(iterable: Iterable[_T]) -> Iterator[_T | None]: ...
 def ncycles(iterable: Iterable[_T], n: int) -> Iterator[_T]: ...
-def dotproduct(vec1: Iterable[object], vec2: Iterable[object]) -> object: ...
+def dotproduct(vec1: Iterable[_T1], vec2: Iterable[_T2]) -> Any: ...
 def flatten(listOfLists: Iterable[Iterable[_T]]) -> Iterator[_T]: ...
 def repeatfunc(
-    func: Callable[..., _U], times: Optional[int] = ..., *args: Any
+    func: Callable[..., _U], times: int | None = ..., *args: Any
 ) -> Iterator[_U]: ...
-def pairwise(iterable: Iterable[_T]) -> Iterator[Tuple[_T, _T]]: ...
-@overload
+def pairwise(iterable: Iterable[_T]) -> Iterator[tuple[_T, _T]]: ...
 def grouper(
-    iterable: Iterable[_T], n: int
-) -> Iterator[Tuple[Optional[_T], ...]]: ...
-@overload
-def grouper(
-    iterable: Iterable[_T], n: int, fillvalue: _U
-) -> Iterator[Tuple[Union[_T, _U], ...]]: ...
-@overload
-def grouper(  # Deprecated interface
-    iterable: int, n: Iterable[_T]
-) -> Iterator[Tuple[Optional[_T], ...]]: ...
-@overload
-def grouper(  # Deprecated interface
-    iterable: int, n: Iterable[_T], fillvalue: _U
-) -> Iterator[Tuple[Union[_T, _U], ...]]: ...
+    iterable: Iterable[_T],
+    n: int,
+    incomplete: str = ...,
+    fillvalue: _U = ...,
+) -> Iterator[tuple[_T | _U, ...]]: ...
 def roundrobin(*iterables: Iterable[_T]) -> Iterator[_T]: ...
 def partition(
-    pred: Optional[Callable[[_T], object]], iterable: Iterable[_T]
-) -> Tuple[Iterator[_T], Iterator[_T]]: ...
-def powerset(iterable: Iterable[_T]) -> Iterator[Tuple[_T, ...]]: ...
+    pred: Callable[[_T], object] | None, iterable: Iterable[_T]
+) -> tuple[Iterator[_T], Iterator[_T]]: ...
+def powerset(iterable: Iterable[_T]) -> Iterator[tuple[_T, ...]]: ...
 def unique_everseen(
-    iterable: Iterable[_T], key: Optional[Callable[[_T], _U]] = ...
+    iterable: Iterable[_T], key: Callable[[_T], _U] | None = ...
 ) -> Iterator[_T]: ...
 def unique_justseen(
-    iterable: Iterable[_T], key: Optional[Callable[[_T], object]] = ...
+    iterable: Iterable[_T], key: Callable[[_T], object] | None = ...
+) -> Iterator[_T]: ...
+def unique(
+    iterable: Iterable[_T],
+    key: Callable[[_T], object] | None = ...,
+    reverse: bool = False,
 ) -> Iterator[_T]: ...
 @overload
 def iter_except(
-    func: Callable[[], _T], exception: Type[BaseException], first: None = ...
+    func: Callable[[], _T],
+    exception: Type[BaseException] | tuple[Type[BaseException], ...],
+    first: None = ...,
 ) -> Iterator[_T]: ...
 @overload
 def iter_except(
     func: Callable[[], _T],
-    exception: Type[BaseException],
+    exception: Type[BaseException] | tuple[Type[BaseException], ...],
     first: Callable[[], _U],
-) -> Iterator[Union[_T, _U]]: ...
+) -> Iterator[_T | _U]: ...
 @overload
 def first_true(
-    iterable: Iterable[_T], *, pred: Optional[Callable[[_T], object]] = ...
-) -> Optional[_T]: ...
+    iterable: Iterable[_T], *, pred: Callable[[_T], object] | None = ...
+) -> _T | None: ...
 @overload
 def first_true(
     iterable: Iterable[_T],
     default: _U,
-    pred: Optional[Callable[[_T], object]] = ...,
-) -> Union[_T, _U]: ...
+    pred: Callable[[_T], object] | None = ...,
+) -> _T | _U: ...
 def random_product(
     *args: Iterable[_T], repeat: int = ...
-) -> Tuple[_T, ...]: ...
+) -> tuple[_T, ...]: ...
 def random_permutation(
-    iterable: Iterable[_T], r: Optional[int] = ...
-) -> Tuple[_T, ...]: ...
-def random_combination(iterable: Iterable[_T], r: int) -> Tuple[_T, ...]: ...
+    iterable: Iterable[_T], r: int | None = ...
+) -> tuple[_T, ...]: ...
+def random_combination(iterable: Iterable[_T], r: int) -> tuple[_T, ...]: ...
 def random_combination_with_replacement(
     iterable: Iterable[_T], r: int
-) -> Tuple[_T, ...]: ...
+) -> tuple[_T, ...]: ...
 def nth_combination(
     iterable: Iterable[_T], r: int, index: int
-) -> Tuple[_T, ...]: ...
-def prepend(value: _T, iterator: Iterable[_U]) -> Iterator[Union[_T, _U]]: ...
+) -> tuple[_T, ...]: ...
+def prepend(value: _T, iterator: Iterable[_U]) -> Iterator[_T | _U]: ...
 def convolve(signal: Iterable[_T], kernel: Iterable[_T]) -> Iterator[_T]: ...
+def before_and_after(
+    predicate: Callable[[_T], bool], it: Iterable[_T]
+) -> tuple[Iterator[_T], Iterator[_T]]: ...
+def triplewise(iterable: Iterable[_T]) -> Iterator[tuple[_T, _T, _T]]: ...
+def sliding_window(
+    iterable: Iterable[_T], n: int
+) -> Iterator[tuple[_T, ...]]: ...
+def subslices(iterable: Iterable[_T]) -> Iterator[list[_T]]: ...
+def polynomial_from_roots(roots: Sequence[_T]) -> list[_T]: ...
+def iter_index(
+    iterable: Iterable[_T],
+    value: Any,
+    start: int | None = ...,
+    stop: int | None = ...,
+) -> Iterator[int]: ...
+def sieve(n: int) -> Iterator[int]: ...
+def batched(
+    iterable: Iterable[_T], n: int, *, strict: bool = False
+) -> Iterator[tuple[_T]]: ...
+def transpose(
+    it: Iterable[Iterable[_T]],
+) -> Iterator[tuple[_T, ...]]: ...
+def reshape(
+    matrix: Iterable[Iterable[_T]], cols: int
+) -> Iterator[tuple[_T, ...]]: ...
+def matmul(m1: Sequence[_T], m2: Sequence[_T]) -> Iterator[tuple[_T]]: ...
+def factor(n: int) -> Iterator[int]: ...
+def polynomial_eval(coefficients: Sequence[_T], x: _U) -> _U: ...
+def sum_of_squares(it: Iterable[_T]) -> _T: ...
+def polynomial_derivative(coefficients: Sequence[_T]) -> list[_T]: ...
+def totient(n: int) -> int: ...
diff --git a/setuptools/_vendor/ordered_set-3.1.1.dist-info/RECORD b/setuptools/_vendor/ordered_set-3.1.1.dist-info/RECORD
deleted file mode 100644
index 3267872d45..0000000000
--- a/setuptools/_vendor/ordered_set-3.1.1.dist-info/RECORD
+++ /dev/null
@@ -1,9 +0,0 @@
-__pycache__/ordered_set.cpython-312.pyc,,
-ordered_set-3.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-ordered_set-3.1.1.dist-info/METADATA,sha256=qEaJM9CbGNixB_jvfohisKbXTUjcef6nCCcBJju6f4U,5357
-ordered_set-3.1.1.dist-info/MIT-LICENSE,sha256=TvRE7qUSUBcd0ols7wgNf3zDEEJWW7kv7WDRySrMBBE,1071
-ordered_set-3.1.1.dist-info/RECORD,,
-ordered_set-3.1.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-ordered_set-3.1.1.dist-info/WHEEL,sha256=DZajD4pwLWue70CAfc7YaxT1wLUciNBvN_TTcvXpltE,110
-ordered_set-3.1.1.dist-info/top_level.txt,sha256=NTY2_aDi1Do9fl3Z9EmWPxasFkUeW2dzO2D3RDx5CfM,12
-ordered_set.py,sha256=dbaCcs27dyN9gnMWGF5nA_BrVn6Q-NrjKYJpV9_fgBs,15130
diff --git a/setuptools/_vendor/ordered_set-3.1.1.dist-info/top_level.txt b/setuptools/_vendor/ordered_set-3.1.1.dist-info/top_level.txt
deleted file mode 100644
index 1c191eef52..0000000000
--- a/setuptools/_vendor/ordered_set-3.1.1.dist-info/top_level.txt
+++ /dev/null
@@ -1 +0,0 @@
-ordered_set
diff --git a/setuptools/_vendor/zipp-3.7.0.dist-info/INSTALLER b/setuptools/_vendor/ordered_set-4.1.0.dist-info/INSTALLER
similarity index 100%
rename from setuptools/_vendor/zipp-3.7.0.dist-info/INSTALLER
rename to setuptools/_vendor/ordered_set-4.1.0.dist-info/INSTALLER
diff --git a/setuptools/_vendor/ordered_set-3.1.1.dist-info/METADATA b/setuptools/_vendor/ordered_set-4.1.0.dist-info/METADATA
similarity index 74%
rename from setuptools/_vendor/ordered_set-3.1.1.dist-info/METADATA
rename to setuptools/_vendor/ordered_set-4.1.0.dist-info/METADATA
index 4c64d142b9..7aea136818 100644
--- a/setuptools/_vendor/ordered_set-3.1.1.dist-info/METADATA
+++ b/setuptools/_vendor/ordered_set-4.1.0.dist-info/METADATA
@@ -1,37 +1,46 @@
 Metadata-Version: 2.1
 Name: ordered-set
-Version: 3.1.1
-Summary: A MutableSet that remembers its order, so that every entry has an index.
-Home-page: https://github.com/LuminosoInsight/ordered-set
-Maintainer: Robyn Speer
-Maintainer-email: rspeer@luminoso.com
-License: MIT-LICENSE
-Platform: any
+Version: 4.1.0
+Summary: An OrderedSet is a custom MutableSet that remembers its order, so that every
+Author-email: Elia Robyn Lake 
+Requires-Python: >=3.7
+Description-Content-Type: text/markdown
 Classifier: Development Status :: 5 - Production/Stable
 Classifier: Intended Audience :: Developers
 Classifier: License :: OSI Approved :: MIT License
 Classifier: Programming Language :: Python
-Classifier: Programming Language :: Python :: 2
-Classifier: Programming Language :: Python :: 2.7
 Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3.4
-Classifier: Programming Language :: Python :: 3.5
-Classifier: Programming Language :: Python :: 3.6
 Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
 Classifier: Programming Language :: Python :: Implementation :: CPython
 Classifier: Programming Language :: Python :: Implementation :: PyPy
-Requires-Python: >=2.7
-Description-Content-Type: text/markdown
-License-File: MIT-LICENSE
+Requires-Dist: pytest ; extra == "dev"
+Requires-Dist: black ; extra == "dev"
+Requires-Dist: mypy ; extra == "dev"
+Project-URL: Home, https://github.com/rspeer/ordered-set
+Provides-Extra: dev
 
-[![Travis](https://img.shields.io/travis/LuminosoInsight/ordered-set/master.svg?label=Travis%20CI)](https://travis-ci.org/LuminosoInsight/ordered-set)
-[![Codecov](https://codecov.io/github/LuminosoInsight/ordered-set/badge.svg?branch=master&service=github)](https://codecov.io/github/LuminosoInsight/ordered-set?branch=master)
 [![Pypi](https://img.shields.io/pypi/v/ordered-set.svg)](https://pypi.python.org/pypi/ordered-set)
 
 An OrderedSet is a mutable data structure that is a hybrid of a list and a set.
 It remembers the order of its entries, and every entry has an index number that
 can be looked up.
 
+## Installation
+
+`ordered_set` is available on PyPI and packaged as a wheel. You can list it
+as a dependency of your project, in whatever form that takes.
+
+To install it into your current Python environment:
+
+    pip install ordered-set
+
+To install the code for development, after checking out the repository:
+
+    pip install flit
+    flit install
 
 ## Usage examples
 
@@ -95,8 +104,13 @@ OrderedSet implements `__getstate__` and `__setstate__` so it can be pickled,
 and implements the abstract base classes `collections.MutableSet` and
 `collections.Sequence`.
 
+OrderedSet can be used as a generic collection type, similar to the collections
+in the `typing` module like List, Dict, and Set. For example, you can annotate
+a variable as having the type `OrderedSet[str]` or `OrderedSet[Tuple[int,
+str]]`.
 
-## Interoperability with NumPy and Pandas
+
+## OrderedSet in data science applications
 
 An OrderedSet can be used as a bi-directional mapping between a sparse
 vocabulary and dense index numbers. As of version 3.1, it accepts NumPy arrays
@@ -112,17 +126,11 @@ indexing in reverse) are both aliases for `index` (which handles both cases
 in OrderedSet).
 
 
-## Type hinting
-To use type hinting features install `ordered-set-stubs` package from
-[PyPI](https://pypi.org/project/ordered-set-stubs/):
-
-    $ pip install ordered-set-stubs
-
-
 ## Authors
 
-OrderedSet was implemented by Robyn Speer. Jon Crall contributed changes and
-tests to make it fit the Python set API.
+OrderedSet was implemented by Elia Robyn Lake (maiden name: Robyn Speer).
+Jon Crall contributed changes and tests to make it fit the Python set API.
+Roman Inflianskas added the original type annotations.
 
 
 ## Comparisons
@@ -148,8 +156,3 @@ does not provide the list-like random access features of OrderedSet. You
 would have to convert it to a list in O(N) to look up the index of an entry or
 look up an entry by its index.
 
-
-## Compatibility
-
-OrderedSet is automatically tested on Python 2.7, 3.4, 3.5, 3.6, and 3.7.
-We've checked more informally that it works on PyPy and PyPy3.
diff --git a/setuptools/_vendor/ordered_set-4.1.0.dist-info/RECORD b/setuptools/_vendor/ordered_set-4.1.0.dist-info/RECORD
new file mode 100644
index 0000000000..a9875cde4e
--- /dev/null
+++ b/setuptools/_vendor/ordered_set-4.1.0.dist-info/RECORD
@@ -0,0 +1,8 @@
+ordered_set-4.1.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+ordered_set-4.1.0.dist-info/METADATA,sha256=FqVN_VUTJTCDQ-vtnmXrbgapDjciET-54gSNJ47sro8,5340
+ordered_set-4.1.0.dist-info/RECORD,,
+ordered_set-4.1.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+ordered_set-4.1.0.dist-info/WHEEL,sha256=jPMR_Dzkc4X4icQtmz81lnNY_kAsfog7ry7qoRvYLXw,81
+ordered_set/__init__.py,sha256=ytazgKsyBKi9uFtBt938yXxQtdat1VCC681s9s0CMqg,17146
+ordered_set/__pycache__/__init__.cpython-312.pyc,,
+ordered_set/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
diff --git a/setuptools/_vendor/ordered_set-4.1.0.dist-info/REQUESTED b/setuptools/_vendor/ordered_set-4.1.0.dist-info/REQUESTED
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/setuptools/_vendor/ordered_set-4.1.0.dist-info/WHEEL b/setuptools/_vendor/ordered_set-4.1.0.dist-info/WHEEL
new file mode 100644
index 0000000000..c727d14823
--- /dev/null
+++ b/setuptools/_vendor/ordered_set-4.1.0.dist-info/WHEEL
@@ -0,0 +1,4 @@
+Wheel-Version: 1.0
+Generator: flit 3.6.0
+Root-Is-Purelib: true
+Tag: py3-none-any
diff --git a/setuptools/_vendor/ordered_set.py b/setuptools/_vendor/ordered_set/__init__.py
similarity index 72%
rename from setuptools/_vendor/ordered_set.py
rename to setuptools/_vendor/ordered_set/__init__.py
index 14876000de..e86c70ed80 100644
--- a/setuptools/_vendor/ordered_set.py
+++ b/setuptools/_vendor/ordered_set/__init__.py
@@ -1,45 +1,58 @@
 """
 An OrderedSet is a custom MutableSet that remembers its order, so that every
-entry has an index that can be looked up.
+entry has an index that can be looked up. It can also act like a Sequence.
 
 Based on a recipe originally posted to ActiveState Recipes by Raymond Hettiger,
 and released under the MIT license.
 """
 import itertools as it
-from collections import deque
-
-try:
-    # Python 3
-    from collections.abc import MutableSet, Sequence
-except ImportError:
-    # Python 2.7
-    from collections import MutableSet, Sequence
+from typing import (
+    Any,
+    Dict,
+    Iterable,
+    Iterator,
+    List,
+    MutableSet,
+    AbstractSet,
+    Sequence,
+    Set,
+    TypeVar,
+    Union,
+    overload,
+)
 
 SLICE_ALL = slice(None)
-__version__ = "3.1"
+__version__ = "4.1.0"
+
+
+T = TypeVar("T")
 
+# SetLike[T] is either a set of elements of type T, or a sequence, which
+# we will convert to an OrderedSet by adding its elements in order.
+SetLike = Union[AbstractSet[T], Sequence[T]]
+OrderedSetInitializer = Union[AbstractSet[T], Sequence[T], Iterable[T]]
 
-def is_iterable(obj):
+
+def _is_atomic(obj: Any) -> bool:
     """
-    Are we being asked to look up a list of things, instead of a single thing?
-    We check for the `__iter__` attribute so that this can cover types that
-    don't have to be known by this module, such as NumPy arrays.
+    Returns True for objects which are iterable but should not be iterated in
+    the context of indexing an OrderedSet.
+
+    When we index by an iterable, usually that means we're being asked to look
+    up a list of things.
 
-    Strings, however, should be considered as atomic values to look up, not
-    iterables. The same goes for tuples, since they are immutable and therefore
-    valid entries.
+    However, in the case of the .index() method, we shouldn't handle strings
+    and tuples like other iterables. They're not sequences of things to look
+    up, they're the single, atomic thing we're trying to find.
 
-    We don't need to check for the Python 2 `unicode` type, because it doesn't
-    have an `__iter__` attribute anyway.
+    As an example, oset.index('hello') should give the index of 'hello' in an
+    OrderedSet of strings. It shouldn't give the indexes of each individual
+    character.
     """
-    return (
-        hasattr(obj, "__iter__")
-        and not isinstance(obj, str)
-        and not isinstance(obj, tuple)
-    )
+    return isinstance(obj, str) or isinstance(obj, tuple)
 
 
-class OrderedSet(MutableSet, Sequence):
+class OrderedSet(MutableSet[T], Sequence[T]):
     """
     An OrderedSet is a custom MutableSet that remembers its order, so that
     every entry has an index that can be looked up.
@@ -49,11 +62,14 @@ class OrderedSet(MutableSet, Sequence):
         OrderedSet([1, 2, 3])
     """
 
-    def __init__(self, iterable=None):
-        self.items = []
-        self.map = {}
-        if iterable is not None:
-            self |= iterable
+    def __init__(self, initial: OrderedSetInitializer[T] = None):
+        self.items: List[T] = []
+        self.map: Dict[T, int] = {}
+        if initial is not None:
+            # In terms of duck-typing, the default __ior__ is compatible with
+            # the types we use, but it doesn't expect all the types we
+            # support as values for `initial`.
+            self |= initial  # type: ignore
 
     def __len__(self):
         """
@@ -67,6 +83,19 @@ def __len__(self):
         """
         return len(self.items)
 
+    @overload
+    def __getitem__(self, index: slice) -> "OrderedSet[T]":
+        ...
+
+    @overload
+    def __getitem__(self, index: Sequence[int]) -> List[T]:
+        ...
+
+    @overload
+    def __getitem__(self, index: int) -> T:
+        ...
+
+    # concrete implementation
     def __getitem__(self, index):
         """
         Get the item at a given index.
@@ -87,9 +116,9 @@ def __getitem__(self, index):
         """
         if isinstance(index, slice) and index == SLICE_ALL:
             return self.copy()
-        elif is_iterable(index):
+        elif isinstance(index, Iterable):
             return [self.items[i] for i in index]
-        elif hasattr(index, "__index__") or isinstance(index, slice):
+        elif isinstance(index, slice) or hasattr(index, "__index__"):
             result = self.items[index]
             if isinstance(result, list):
                 return self.__class__(result)
@@ -98,7 +127,7 @@ def __getitem__(self, index):
         else:
             raise TypeError("Don't know how to index an OrderedSet by %r" % index)
 
-    def copy(self):
+    def copy(self) -> "OrderedSet[T]":
         """
         Return a shallow copy of this object.
 
@@ -112,9 +141,12 @@ def copy(self):
         """
         return self.__class__(self)
 
+    # Define the gritty details of how an OrderedSet is serialized as a pickle.
+    # We leave off type annotations, because the only code that should interact
+    # with these is a generalized tool such as pickle.
     def __getstate__(self):
         if len(self) == 0:
-            # The state can't be an empty list.
+            # In pickle, the state can't be an empty list.
             # We need to return a truthy value, or else __setstate__ won't be run.
             #
             # This could have been done more gracefully by always putting the state
@@ -130,9 +162,9 @@ def __setstate__(self, state):
         else:
             self.__init__(state)
 
-    def __contains__(self, key):
+    def __contains__(self, key: Any) -> bool:
         """
-        Test if the item is in this ordered set
+        Test if the item is in this ordered set.
 
         Example:
             >>> 1 in OrderedSet([1, 3, 2])
@@ -142,7 +174,10 @@ def __contains__(self, key):
         """
         return key in self.map
 
-    def add(self, key):
+    # Technically type-incompatible with MutableSet, because we return an
+    # int instead of nothing. This is also one of the things that makes
+    # OrderedSet convenient to use.
+    def add(self, key: T) -> int:
         """
         Add `key` as an item to this OrderedSet, then return its index.
 
@@ -163,7 +198,7 @@ def add(self, key):
 
     append = add
 
-    def update(self, sequence):
+    def update(self, sequence: SetLike[T]) -> int:
         """
         Update the set with the given iterable sequence, then return the index
         of the last element inserted.
@@ -175,7 +210,7 @@ def update(self, sequence):
             >>> print(oset)
             OrderedSet([1, 2, 3, 5, 4])
         """
-        item_index = None
+        item_index = 0
         try:
             for item in sequence:
                 item_index = self.add(item)
@@ -185,6 +220,15 @@ def update(self, sequence):
             )
         return item_index
 
+    @overload
+    def index(self, key: Sequence[T]) -> List[int]:
+        ...
+
+    @overload
+    def index(self, key: T) -> int:
+        ...
+
+    # concrete implementation
     def index(self, key):
         """
         Get the index of a given entry, raising an IndexError if it's not
@@ -198,7 +242,7 @@ def index(self, key):
             >>> oset.index(2)
             1
         """
-        if is_iterable(key):
+        if isinstance(key, Iterable) and not _is_atomic(key):
             return [self.index(subkey) for subkey in key]
         return self.map[key]
 
@@ -206,11 +250,12 @@ def index(self, key):
     get_loc = index
     get_indexer = index
 
-    def pop(self):
+    def pop(self, index=-1) -> T:
         """
-        Remove and return the last element from the set.
+        Remove and return item at index (default last).
 
         Raises KeyError if the set is empty.
+        Raises IndexError if index is out of range.
 
         Example:
             >>> oset = OrderedSet([1, 2, 3])
@@ -220,12 +265,12 @@ def pop(self):
         if not self.items:
             raise KeyError("Set is empty")
 
-        elem = self.items[-1]
-        del self.items[-1]
+        elem = self.items[index]
+        del self.items[index]
         del self.map[elem]
         return elem
 
-    def discard(self, key):
+    def discard(self, key: T) -> None:
         """
         Remove an element.  Do not raise an exception if absent.
 
@@ -249,14 +294,14 @@ def discard(self, key):
                 if v >= i:
                     self.map[k] = v - 1
 
-    def clear(self):
+    def clear(self) -> None:
         """
         Remove all items from this OrderedSet.
         """
         del self.items[:]
         self.map.clear()
 
-    def __iter__(self):
+    def __iter__(self) -> Iterator[T]:
         """
         Example:
             >>> list(iter(OrderedSet([1, 2, 3])))
@@ -264,7 +309,7 @@ def __iter__(self):
         """
         return iter(self.items)
 
-    def __reversed__(self):
+    def __reversed__(self) -> Iterator[T]:
         """
         Example:
             >>> list(reversed(OrderedSet([1, 2, 3])))
@@ -272,12 +317,12 @@ def __reversed__(self):
         """
         return reversed(self.items)
 
-    def __repr__(self):
+    def __repr__(self) -> str:
         if not self:
             return "%s()" % (self.__class__.__name__,)
         return "%s(%r)" % (self.__class__.__name__, list(self))
 
-    def __eq__(self, other):
+    def __eq__(self, other: Any) -> bool:
         """
         Returns true if the containers have the same items. If `other` is a
         Sequence, then order is checked, otherwise it is ignored.
@@ -293,9 +338,7 @@ def __eq__(self, other):
             >>> oset == OrderedSet([3, 2, 1])
             False
         """
-        # In Python 2 deque is not a Sequence, so treat it as one for
-        # consistent behavior with Python 3.
-        if isinstance(other, (Sequence, deque)):
+        if isinstance(other, Sequence):
             # Check that this OrderedSet contains the same elements, in the
             # same order, as the other object.
             return list(self) == list(other)
@@ -307,7 +350,7 @@ def __eq__(self, other):
         else:
             return set(self) == other_as_set
 
-    def union(self, *sets):
+    def union(self, *sets: SetLike[T]) -> "OrderedSet[T]":
         """
         Combines all unique items.
         Each items order is defined by its first appearance.
@@ -321,16 +364,18 @@ def union(self, *sets):
             >>> oset | {10}
             OrderedSet([3, 1, 4, 5, 2, 0, 10])
         """
-        cls = self.__class__ if isinstance(self, OrderedSet) else OrderedSet
+        cls: type = OrderedSet
+        if isinstance(self, OrderedSet):
+            cls = self.__class__
         containers = map(list, it.chain([self], sets))
         items = it.chain.from_iterable(containers)
         return cls(items)
 
-    def __and__(self, other):
+    def __and__(self, other: SetLike[T]) -> "OrderedSet[T]":
         # the parent implementation of this is backwards
         return self.intersection(other)
 
-    def intersection(self, *sets):
+    def intersection(self, *sets: SetLike[T]) -> "OrderedSet[T]":
         """
         Returns elements in common between all sets. Order is defined only
         by the first set.
@@ -344,15 +389,16 @@ def intersection(self, *sets):
             >>> oset.intersection()
             OrderedSet([1, 2, 3])
         """
-        cls = self.__class__ if isinstance(self, OrderedSet) else OrderedSet
+        cls: type = OrderedSet
+        items: OrderedSetInitializer[T] = self
+        if isinstance(self, OrderedSet):
+            cls = self.__class__
         if sets:
             common = set.intersection(*map(set, sets))
             items = (item for item in self if item in common)
-        else:
-            items = self
         return cls(items)
 
-    def difference(self, *sets):
+    def difference(self, *sets: SetLike[T]) -> "OrderedSet[T]":
         """
         Returns all elements that are in this set but not the others.
 
@@ -367,14 +413,13 @@ def difference(self, *sets):
             OrderedSet([1, 2, 3])
         """
         cls = self.__class__
+        items: OrderedSetInitializer[T] = self
         if sets:
             other = set.union(*map(set, sets))
             items = (item for item in self if item not in other)
-        else:
-            items = self
         return cls(items)
 
-    def issubset(self, other):
+    def issubset(self, other: SetLike[T]) -> bool:
         """
         Report whether another set contains this set.
 
@@ -390,7 +435,7 @@ def issubset(self, other):
             return False
         return all(item in other for item in self)
 
-    def issuperset(self, other):
+    def issuperset(self, other: SetLike[T]) -> bool:
         """
         Report whether this set contains another set.
 
@@ -406,7 +451,7 @@ def issuperset(self, other):
             return False
         return all(item in self for item in other)
 
-    def symmetric_difference(self, other):
+    def symmetric_difference(self, other: SetLike[T]) -> "OrderedSet[T]":
         """
         Return the symmetric difference of two OrderedSets as a new set.
         That is, the new set will contain all elements that are in exactly
@@ -421,12 +466,14 @@ def symmetric_difference(self, other):
             >>> this.symmetric_difference(other)
             OrderedSet([4, 5, 9, 2])
         """
-        cls = self.__class__ if isinstance(self, OrderedSet) else OrderedSet
+        cls: type = OrderedSet
+        if isinstance(self, OrderedSet):
+            cls = self.__class__
         diff1 = cls(self).difference(other)
         diff2 = cls(other).difference(self)
         return diff1.union(diff2)
 
-    def _update_items(self, items):
+    def _update_items(self, items: list) -> None:
         """
         Replace the 'items' list of this OrderedSet with a new one, updating
         self.map accordingly.
@@ -434,7 +481,7 @@ def _update_items(self, items):
         self.items = items
         self.map = {item: idx for (idx, item) in enumerate(items)}
 
-    def difference_update(self, *sets):
+    def difference_update(self, *sets: SetLike[T]) -> None:
         """
         Update this OrderedSet to remove items from one or more other sets.
 
@@ -449,12 +496,13 @@ def difference_update(self, *sets):
             >>> print(this)
             OrderedSet([3, 5])
         """
-        items_to_remove = set()
+        items_to_remove = set()  # type: Set[T]
         for other in sets:
-            items_to_remove |= set(other)
+            items_as_set = set(other)  # type: Set[T]
+            items_to_remove |= items_as_set
         self._update_items([item for item in self.items if item not in items_to_remove])
 
-    def intersection_update(self, other):
+    def intersection_update(self, other: SetLike[T]) -> None:
         """
         Update this OrderedSet to keep only items in another set, preserving
         their order in this set.
@@ -469,7 +517,7 @@ def intersection_update(self, other):
         other = set(other)
         self._update_items([item for item in self.items if item in other])
 
-    def symmetric_difference_update(self, other):
+    def symmetric_difference_update(self, other: SetLike[T]) -> None:
         """
         Update this OrderedSet to remove items from another set, then
         add items from the other set that were not present in this set.
diff --git a/setuptools/_vendor/ordered_set/py.typed b/setuptools/_vendor/ordered_set/py.typed
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/setuptools/_vendor/packaging-24.0.dist-info/RECORD b/setuptools/_vendor/packaging-24.0.dist-info/RECORD
deleted file mode 100644
index bcf796c2f4..0000000000
--- a/setuptools/_vendor/packaging-24.0.dist-info/RECORD
+++ /dev/null
@@ -1,37 +0,0 @@
-packaging-24.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-packaging-24.0.dist-info/LICENSE,sha256=ytHvW9NA1z4HS6YU0m996spceUDD2MNIUuZcSQlobEg,197
-packaging-24.0.dist-info/LICENSE.APACHE,sha256=DVQuDIgE45qn836wDaWnYhSdxoLXgpRRKH4RuTjpRZQ,10174
-packaging-24.0.dist-info/LICENSE.BSD,sha256=tw5-m3QvHMb5SLNMFqo5_-zpQZY2S8iP8NIYDwAo-sU,1344
-packaging-24.0.dist-info/METADATA,sha256=0dESdhY_wHValuOrbgdebiEw04EbX4dkujlxPdEsFus,3203
-packaging-24.0.dist-info/RECORD,,
-packaging-24.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-packaging-24.0.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
-packaging/__init__.py,sha256=UzotcV07p8vcJzd80S-W0srhgY8NMVD_XvJcZ7JN-tA,496
-packaging/__pycache__/__init__.cpython-312.pyc,,
-packaging/__pycache__/_elffile.cpython-312.pyc,,
-packaging/__pycache__/_manylinux.cpython-312.pyc,,
-packaging/__pycache__/_musllinux.cpython-312.pyc,,
-packaging/__pycache__/_parser.cpython-312.pyc,,
-packaging/__pycache__/_structures.cpython-312.pyc,,
-packaging/__pycache__/_tokenizer.cpython-312.pyc,,
-packaging/__pycache__/markers.cpython-312.pyc,,
-packaging/__pycache__/metadata.cpython-312.pyc,,
-packaging/__pycache__/requirements.cpython-312.pyc,,
-packaging/__pycache__/specifiers.cpython-312.pyc,,
-packaging/__pycache__/tags.cpython-312.pyc,,
-packaging/__pycache__/utils.cpython-312.pyc,,
-packaging/__pycache__/version.cpython-312.pyc,,
-packaging/_elffile.py,sha256=hbmK8OD6Z7fY6hwinHEUcD1by7czkGiNYu7ShnFEk2k,3266
-packaging/_manylinux.py,sha256=1ng_TqyH49hY6s3W_zVHyoJIaogbJqbIF1jJ0fAehc4,9590
-packaging/_musllinux.py,sha256=kgmBGLFybpy8609-KTvzmt2zChCPWYvhp5BWP4JX7dE,2676
-packaging/_parser.py,sha256=zlsFB1FpMRjkUdQb6WLq7xON52ruQadxFpYsDXWhLb4,10347
-packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431
-packaging/_tokenizer.py,sha256=alCtbwXhOFAmFGZ6BQ-wCTSFoRAJ2z-ysIf7__MTJ_k,5292
-packaging/markers.py,sha256=eH-txS2zq1HdNpTd9LcZUcVIwewAiNU0grmq5wjKnOk,8208
-packaging/metadata.py,sha256=w7jPEg6mDf1FTZMn79aFxFuk4SKtynUJtxr2InTxlV4,33036
-packaging/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-packaging/requirements.py,sha256=dgoBeVprPu2YE6Q8nGfwOPTjATHbRa_ZGLyXhFEln6Q,2933
-packaging/specifiers.py,sha256=dB2DwbmvSbEuVilEyiIQ382YfW5JfwzXTfRRPVtaENY,39784
-packaging/tags.py,sha256=fedHXiOHkBxNZTXotXv8uXPmMFU9ae-TKBujgYHigcA,18950
-packaging/utils.py,sha256=XgdmP3yx9-wQEFjO7OvMj9RjEf5JlR5HFFR69v7SQ9E,5268
-packaging/version.py,sha256=XjRBLNK17UMDgLeP8UHnqwiY3TdSi03xFQURtec211A,16236
diff --git a/setuptools/_vendor/packaging-24.1.dist-info/INSTALLER b/setuptools/_vendor/packaging-24.1.dist-info/INSTALLER
new file mode 100644
index 0000000000..a1b589e38a
--- /dev/null
+++ b/setuptools/_vendor/packaging-24.1.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/setuptools/_vendor/packaging-24.0.dist-info/LICENSE b/setuptools/_vendor/packaging-24.1.dist-info/LICENSE
similarity index 100%
rename from setuptools/_vendor/packaging-24.0.dist-info/LICENSE
rename to setuptools/_vendor/packaging-24.1.dist-info/LICENSE
diff --git a/setuptools/_vendor/packaging-24.0.dist-info/LICENSE.APACHE b/setuptools/_vendor/packaging-24.1.dist-info/LICENSE.APACHE
similarity index 100%
rename from setuptools/_vendor/packaging-24.0.dist-info/LICENSE.APACHE
rename to setuptools/_vendor/packaging-24.1.dist-info/LICENSE.APACHE
diff --git a/setuptools/_vendor/packaging-24.0.dist-info/LICENSE.BSD b/setuptools/_vendor/packaging-24.1.dist-info/LICENSE.BSD
similarity index 100%
rename from setuptools/_vendor/packaging-24.0.dist-info/LICENSE.BSD
rename to setuptools/_vendor/packaging-24.1.dist-info/LICENSE.BSD
diff --git a/setuptools/_vendor/packaging-24.0.dist-info/METADATA b/setuptools/_vendor/packaging-24.1.dist-info/METADATA
similarity index 97%
rename from setuptools/_vendor/packaging-24.0.dist-info/METADATA
rename to setuptools/_vendor/packaging-24.1.dist-info/METADATA
index 10ab4390a9..255dc46e0e 100644
--- a/setuptools/_vendor/packaging-24.0.dist-info/METADATA
+++ b/setuptools/_vendor/packaging-24.1.dist-info/METADATA
@@ -1,9 +1,9 @@
 Metadata-Version: 2.1
 Name: packaging
-Version: 24.0
+Version: 24.1
 Summary: Core utilities for Python packages
 Author-email: Donald Stufft 
-Requires-Python: >=3.7
+Requires-Python: >=3.8
 Description-Content-Type: text/x-rst
 Classifier: Development Status :: 5 - Production/Stable
 Classifier: Intended Audience :: Developers
@@ -12,12 +12,12 @@ Classifier: License :: OSI Approved :: BSD License
 Classifier: Programming Language :: Python
 Classifier: Programming Language :: Python :: 3
 Classifier: Programming Language :: Python :: 3 :: Only
-Classifier: Programming Language :: Python :: 3.7
 Classifier: Programming Language :: Python :: 3.8
 Classifier: Programming Language :: Python :: 3.9
 Classifier: Programming Language :: Python :: 3.10
 Classifier: Programming Language :: Python :: 3.11
 Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: 3.13
 Classifier: Programming Language :: Python :: Implementation :: CPython
 Classifier: Programming Language :: Python :: Implementation :: PyPy
 Classifier: Typing :: Typed
diff --git a/setuptools/_vendor/packaging-24.1.dist-info/RECORD b/setuptools/_vendor/packaging-24.1.dist-info/RECORD
new file mode 100644
index 0000000000..2b1e6bd4db
--- /dev/null
+++ b/setuptools/_vendor/packaging-24.1.dist-info/RECORD
@@ -0,0 +1,37 @@
+packaging-24.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+packaging-24.1.dist-info/LICENSE,sha256=ytHvW9NA1z4HS6YU0m996spceUDD2MNIUuZcSQlobEg,197
+packaging-24.1.dist-info/LICENSE.APACHE,sha256=DVQuDIgE45qn836wDaWnYhSdxoLXgpRRKH4RuTjpRZQ,10174
+packaging-24.1.dist-info/LICENSE.BSD,sha256=tw5-m3QvHMb5SLNMFqo5_-zpQZY2S8iP8NIYDwAo-sU,1344
+packaging-24.1.dist-info/METADATA,sha256=X3ooO3WnCfzNSBrqQjefCD1POAF1M2WSLmsHMgQlFdk,3204
+packaging-24.1.dist-info/RECORD,,
+packaging-24.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+packaging-24.1.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
+packaging/__init__.py,sha256=dtw2bNmWCQ9WnMoK3bk_elL1svSlikXtLpZhCFIB9SE,496
+packaging/__pycache__/__init__.cpython-312.pyc,,
+packaging/__pycache__/_elffile.cpython-312.pyc,,
+packaging/__pycache__/_manylinux.cpython-312.pyc,,
+packaging/__pycache__/_musllinux.cpython-312.pyc,,
+packaging/__pycache__/_parser.cpython-312.pyc,,
+packaging/__pycache__/_structures.cpython-312.pyc,,
+packaging/__pycache__/_tokenizer.cpython-312.pyc,,
+packaging/__pycache__/markers.cpython-312.pyc,,
+packaging/__pycache__/metadata.cpython-312.pyc,,
+packaging/__pycache__/requirements.cpython-312.pyc,,
+packaging/__pycache__/specifiers.cpython-312.pyc,,
+packaging/__pycache__/tags.cpython-312.pyc,,
+packaging/__pycache__/utils.cpython-312.pyc,,
+packaging/__pycache__/version.cpython-312.pyc,,
+packaging/_elffile.py,sha256=_LcJW4YNKywYsl4169B2ukKRqwxjxst_8H0FRVQKlz8,3282
+packaging/_manylinux.py,sha256=Xo4V0PZz8sbuVCbTni0t1CR0AHeir_7ib4lTmV8scD4,9586
+packaging/_musllinux.py,sha256=p9ZqNYiOItGee8KcZFeHF_YcdhVwGHdK6r-8lgixvGQ,2694
+packaging/_parser.py,sha256=s_TvTvDNK0NrM2QB3VKThdWFM4Nc0P6JnkObkl3MjpM,10236
+packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431
+packaging/_tokenizer.py,sha256=J6v5H7Jzvb-g81xp_2QACKwO7LxHQA6ikryMU7zXwN8,5273
+packaging/markers.py,sha256=dWKSqn5Sp-jDmOG-W3GfLHKjwhf1IsznbT71VlBoB5M,10671
+packaging/metadata.py,sha256=KINuSkJ12u-SyoKNTy_pHNGAfMUtxNvZ53qA1zAKcKI,32349
+packaging/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+packaging/requirements.py,sha256=gYyRSAdbrIyKDY66ugIDUQjRMvxkH2ALioTmX3tnL6o,2947
+packaging/specifiers.py,sha256=rjpc3hoJuunRIT6DdH7gLTnQ5j5QKSuWjoTC5sdHtHI,39714
+packaging/tags.py,sha256=y8EbheOu9WS7s-MebaXMcHMF-jzsA_C1Lz5XRTiSy4w,18883
+packaging/utils.py,sha256=NAdYUwnlAOpkat_RthavX8a07YuVxgGL_vwrx73GSDM,5287
+packaging/version.py,sha256=V0H3SOj_8werrvorrb6QDLRhlcqSErNTTkCvvfszhDI,16198
diff --git a/setuptools/_vendor/packaging-24.1.dist-info/REQUESTED b/setuptools/_vendor/packaging-24.1.dist-info/REQUESTED
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/setuptools/_vendor/packaging-24.0.dist-info/WHEEL b/setuptools/_vendor/packaging-24.1.dist-info/WHEEL
similarity index 100%
rename from setuptools/_vendor/packaging-24.0.dist-info/WHEEL
rename to setuptools/_vendor/packaging-24.1.dist-info/WHEEL
diff --git a/setuptools/_vendor/packaging/__init__.py b/setuptools/_vendor/packaging/__init__.py
index e7c0aa12ca..9ba41d8357 100644
--- a/setuptools/_vendor/packaging/__init__.py
+++ b/setuptools/_vendor/packaging/__init__.py
@@ -6,7 +6,7 @@
 __summary__ = "Core utilities for Python packages"
 __uri__ = "https://github.com/pypa/packaging"
 
-__version__ = "24.0"
+__version__ = "24.1"
 
 __author__ = "Donald Stufft and individual contributors"
 __email__ = "donald@stufft.io"
diff --git a/setuptools/_vendor/packaging/_elffile.py b/setuptools/_vendor/packaging/_elffile.py
index 6fb19b30bb..f7a02180bf 100644
--- a/setuptools/_vendor/packaging/_elffile.py
+++ b/setuptools/_vendor/packaging/_elffile.py
@@ -8,10 +8,12 @@
 ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html
 """
 
+from __future__ import annotations
+
 import enum
 import os
 import struct
-from typing import IO, Optional, Tuple
+from typing import IO
 
 
 class ELFInvalid(ValueError):
@@ -87,11 +89,11 @@ def __init__(self, f: IO[bytes]) -> None:
         except struct.error as e:
             raise ELFInvalid("unable to parse machine and section information") from e
 
-    def _read(self, fmt: str) -> Tuple[int, ...]:
+    def _read(self, fmt: str) -> tuple[int, ...]:
         return struct.unpack(fmt, self._f.read(struct.calcsize(fmt)))
 
     @property
-    def interpreter(self) -> Optional[str]:
+    def interpreter(self) -> str | None:
         """
         The path recorded in the ``PT_INTERP`` section header.
         """
diff --git a/setuptools/_vendor/packaging/_manylinux.py b/setuptools/_vendor/packaging/_manylinux.py
index ad62505f3f..08f651fbd8 100644
--- a/setuptools/_vendor/packaging/_manylinux.py
+++ b/setuptools/_vendor/packaging/_manylinux.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import collections
 import contextlib
 import functools
@@ -5,7 +7,7 @@
 import re
 import sys
 import warnings
-from typing import Dict, Generator, Iterator, NamedTuple, Optional, Sequence, Tuple
+from typing import Generator, Iterator, NamedTuple, Sequence
 
 from ._elffile import EIClass, EIData, ELFFile, EMachine
 
@@ -17,7 +19,7 @@
 # `os.PathLike` not a generic type until Python 3.9, so sticking with `str`
 # as the type for `path` until then.
 @contextlib.contextmanager
-def _parse_elf(path: str) -> Generator[Optional[ELFFile], None, None]:
+def _parse_elf(path: str) -> Generator[ELFFile | None, None, None]:
     try:
         with open(path, "rb") as f:
             yield ELFFile(f)
@@ -72,7 +74,7 @@ def _have_compatible_abi(executable: str, archs: Sequence[str]) -> bool:
 # For now, guess what the highest minor version might be, assume it will
 # be 50 for testing. Once this actually happens, update the dictionary
 # with the actual value.
-_LAST_GLIBC_MINOR: Dict[int, int] = collections.defaultdict(lambda: 50)
+_LAST_GLIBC_MINOR: dict[int, int] = collections.defaultdict(lambda: 50)
 
 
 class _GLibCVersion(NamedTuple):
@@ -80,7 +82,7 @@ class _GLibCVersion(NamedTuple):
     minor: int
 
 
-def _glibc_version_string_confstr() -> Optional[str]:
+def _glibc_version_string_confstr() -> str | None:
     """
     Primary implementation of glibc_version_string using os.confstr.
     """
@@ -90,7 +92,7 @@ def _glibc_version_string_confstr() -> Optional[str]:
     # https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183
     try:
         # Should be a string like "glibc 2.17".
-        version_string: Optional[str] = os.confstr("CS_GNU_LIBC_VERSION")
+        version_string: str | None = os.confstr("CS_GNU_LIBC_VERSION")
         assert version_string is not None
         _, version = version_string.rsplit()
     except (AssertionError, AttributeError, OSError, ValueError):
@@ -99,7 +101,7 @@ def _glibc_version_string_confstr() -> Optional[str]:
     return version
 
 
-def _glibc_version_string_ctypes() -> Optional[str]:
+def _glibc_version_string_ctypes() -> str | None:
     """
     Fallback implementation of glibc_version_string using ctypes.
     """
@@ -143,12 +145,12 @@ def _glibc_version_string_ctypes() -> Optional[str]:
     return version_str
 
 
-def _glibc_version_string() -> Optional[str]:
+def _glibc_version_string() -> str | None:
     """Returns glibc version string, or None if not using glibc."""
     return _glibc_version_string_confstr() or _glibc_version_string_ctypes()
 
 
-def _parse_glibc_version(version_str: str) -> Tuple[int, int]:
+def _parse_glibc_version(version_str: str) -> tuple[int, int]:
     """Parse glibc version.
 
     We use a regexp instead of str.split because we want to discard any
@@ -167,8 +169,8 @@ def _parse_glibc_version(version_str: str) -> Tuple[int, int]:
     return int(m.group("major")), int(m.group("minor"))
 
 
-@functools.lru_cache()
-def _get_glibc_version() -> Tuple[int, int]:
+@functools.lru_cache
+def _get_glibc_version() -> tuple[int, int]:
     version_str = _glibc_version_string()
     if version_str is None:
         return (-1, -1)
diff --git a/setuptools/_vendor/packaging/_musllinux.py b/setuptools/_vendor/packaging/_musllinux.py
index 86419df9d7..d2bf30b563 100644
--- a/setuptools/_vendor/packaging/_musllinux.py
+++ b/setuptools/_vendor/packaging/_musllinux.py
@@ -4,11 +4,13 @@
 linked against musl, and what musl version is used.
 """
 
+from __future__ import annotations
+
 import functools
 import re
 import subprocess
 import sys
-from typing import Iterator, NamedTuple, Optional, Sequence
+from typing import Iterator, NamedTuple, Sequence
 
 from ._elffile import ELFFile
 
@@ -18,7 +20,7 @@ class _MuslVersion(NamedTuple):
     minor: int
 
 
-def _parse_musl_version(output: str) -> Optional[_MuslVersion]:
+def _parse_musl_version(output: str) -> _MuslVersion | None:
     lines = [n for n in (n.strip() for n in output.splitlines()) if n]
     if len(lines) < 2 or lines[0][:4] != "musl":
         return None
@@ -28,8 +30,8 @@ def _parse_musl_version(output: str) -> Optional[_MuslVersion]:
     return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2)))
 
 
-@functools.lru_cache()
-def _get_musl_version(executable: str) -> Optional[_MuslVersion]:
+@functools.lru_cache
+def _get_musl_version(executable: str) -> _MuslVersion | None:
     """Detect currently-running musl runtime version.
 
     This is done by checking the specified executable's dynamic linking
diff --git a/setuptools/_vendor/packaging/_parser.py b/setuptools/_vendor/packaging/_parser.py
index 684df75457..c1238c06ea 100644
--- a/setuptools/_vendor/packaging/_parser.py
+++ b/setuptools/_vendor/packaging/_parser.py
@@ -1,11 +1,13 @@
 """Handwritten parser of dependency specifiers.
 
-The docstring for each __parse_* function contains ENBF-inspired grammar representing
+The docstring for each __parse_* function contains EBNF-inspired grammar representing
 the implementation.
 """
 
+from __future__ import annotations
+
 import ast
-from typing import Any, List, NamedTuple, Optional, Tuple, Union
+from typing import NamedTuple, Sequence, Tuple, Union
 
 from ._tokenizer import DEFAULT_RULES, Tokenizer
 
@@ -41,20 +43,16 @@ def serialize(self) -> str:
 
 MarkerVar = Union[Variable, Value]
 MarkerItem = Tuple[MarkerVar, Op, MarkerVar]
-# MarkerAtom = Union[MarkerItem, List["MarkerAtom"]]
-# MarkerList = List[Union["MarkerList", MarkerAtom, str]]
-# mypy does not support recursive type definition
-# https://github.com/python/mypy/issues/731
-MarkerAtom = Any
-MarkerList = List[Any]
+MarkerAtom = Union[MarkerItem, Sequence["MarkerAtom"]]
+MarkerList = Sequence[Union["MarkerList", MarkerAtom, str]]
 
 
 class ParsedRequirement(NamedTuple):
     name: str
     url: str
-    extras: List[str]
+    extras: list[str]
     specifier: str
-    marker: Optional[MarkerList]
+    marker: MarkerList | None
 
 
 # --------------------------------------------------------------------------------------
@@ -87,7 +85,7 @@ def _parse_requirement(tokenizer: Tokenizer) -> ParsedRequirement:
 
 def _parse_requirement_details(
     tokenizer: Tokenizer,
-) -> Tuple[str, str, Optional[MarkerList]]:
+) -> tuple[str, str, MarkerList | None]:
     """
     requirement_details = AT URL (WS requirement_marker?)?
                         | specifier WS? (requirement_marker)?
@@ -156,7 +154,7 @@ def _parse_requirement_marker(
     return marker
 
 
-def _parse_extras(tokenizer: Tokenizer) -> List[str]:
+def _parse_extras(tokenizer: Tokenizer) -> list[str]:
     """
     extras = (LEFT_BRACKET wsp* extras_list? wsp* RIGHT_BRACKET)?
     """
@@ -175,11 +173,11 @@ def _parse_extras(tokenizer: Tokenizer) -> List[str]:
     return extras
 
 
-def _parse_extras_list(tokenizer: Tokenizer) -> List[str]:
+def _parse_extras_list(tokenizer: Tokenizer) -> list[str]:
     """
     extras_list = identifier (wsp* ',' wsp* identifier)*
     """
-    extras: List[str] = []
+    extras: list[str] = []
 
     if not tokenizer.check("IDENTIFIER"):
         return extras
diff --git a/setuptools/_vendor/packaging/_tokenizer.py b/setuptools/_vendor/packaging/_tokenizer.py
index dd0d648d49..89d041605c 100644
--- a/setuptools/_vendor/packaging/_tokenizer.py
+++ b/setuptools/_vendor/packaging/_tokenizer.py
@@ -1,7 +1,9 @@
+from __future__ import annotations
+
 import contextlib
 import re
 from dataclasses import dataclass
-from typing import Dict, Iterator, NoReturn, Optional, Tuple, Union
+from typing import Iterator, NoReturn
 
 from .specifiers import Specifier
 
@@ -21,7 +23,7 @@ def __init__(
         message: str,
         *,
         source: str,
-        span: Tuple[int, int],
+        span: tuple[int, int],
     ) -> None:
         self.span = span
         self.message = message
@@ -34,7 +36,7 @@ def __str__(self) -> str:
         return "\n    ".join([self.message, self.source, marker])
 
 
-DEFAULT_RULES: "Dict[str, Union[str, re.Pattern[str]]]" = {
+DEFAULT_RULES: dict[str, str | re.Pattern[str]] = {
     "LEFT_PARENTHESIS": r"\(",
     "RIGHT_PARENTHESIS": r"\)",
     "LEFT_BRACKET": r"\[",
@@ -96,13 +98,13 @@ def __init__(
         self,
         source: str,
         *,
-        rules: "Dict[str, Union[str, re.Pattern[str]]]",
+        rules: dict[str, str | re.Pattern[str]],
     ) -> None:
         self.source = source
-        self.rules: Dict[str, re.Pattern[str]] = {
+        self.rules: dict[str, re.Pattern[str]] = {
             name: re.compile(pattern) for name, pattern in rules.items()
         }
-        self.next_token: Optional[Token] = None
+        self.next_token: Token | None = None
         self.position = 0
 
     def consume(self, name: str) -> None:
@@ -154,8 +156,8 @@ def raise_syntax_error(
         self,
         message: str,
         *,
-        span_start: Optional[int] = None,
-        span_end: Optional[int] = None,
+        span_start: int | None = None,
+        span_end: int | None = None,
     ) -> NoReturn:
         """Raise ParserSyntaxError at the given position."""
         span = (
diff --git a/setuptools/_vendor/packaging/markers.py b/setuptools/_vendor/packaging/markers.py
index 8b98fca723..7ac7bb69a5 100644
--- a/setuptools/_vendor/packaging/markers.py
+++ b/setuptools/_vendor/packaging/markers.py
@@ -2,20 +2,16 @@
 # 2.0, and the BSD License. See the LICENSE file in the root of this repository
 # for complete details.
 
+from __future__ import annotations
+
 import operator
 import os
 import platform
 import sys
-from typing import Any, Callable, Dict, List, Optional, Tuple, Union
-
-from ._parser import (
-    MarkerAtom,
-    MarkerList,
-    Op,
-    Value,
-    Variable,
-    parse_marker as _parse_marker,
-)
+from typing import Any, Callable, TypedDict, cast
+
+from ._parser import MarkerAtom, MarkerList, Op, Value, Variable
+from ._parser import parse_marker as _parse_marker
 from ._tokenizer import ParserSyntaxError
 from .specifiers import InvalidSpecifier, Specifier
 from .utils import canonicalize_name
@@ -50,6 +46,78 @@ class UndefinedEnvironmentName(ValueError):
     """
 
 
+class Environment(TypedDict):
+    implementation_name: str
+    """The implementation's identifier, e.g. ``'cpython'``."""
+
+    implementation_version: str
+    """
+    The implementation's version, e.g. ``'3.13.0a2'`` for CPython 3.13.0a2, or
+    ``'7.3.13'`` for PyPy3.10 v7.3.13.
+    """
+
+    os_name: str
+    """
+    The value of :py:data:`os.name`. The name of the operating system dependent module
+    imported, e.g. ``'posix'``.
+    """
+
+    platform_machine: str
+    """
+    Returns the machine type, e.g. ``'i386'``.
+
+    An empty string if the value cannot be determined.
+    """
+
+    platform_release: str
+    """
+    The system's release, e.g. ``'2.2.0'`` or ``'NT'``.
+
+    An empty string if the value cannot be determined.
+    """
+
+    platform_system: str
+    """
+    The system/OS name, e.g. ``'Linux'``, ``'Windows'`` or ``'Java'``.
+
+    An empty string if the value cannot be determined.
+    """
+
+    platform_version: str
+    """
+    The system's release version, e.g. ``'#3 on degas'``.
+
+    An empty string if the value cannot be determined.
+    """
+
+    python_full_version: str
+    """
+    The Python version as string ``'major.minor.patchlevel'``.
+
+    Note that unlike the Python :py:data:`sys.version`, this value will always include
+    the patchlevel (it defaults to 0).
+    """
+
+    platform_python_implementation: str
+    """
+    A string identifying the Python implementation, e.g. ``'CPython'``.
+    """
+
+    python_version: str
+    """The Python version as string ``'major.minor'``."""
+
+    sys_platform: str
+    """
+    This string contains a platform identifier that can be used to append
+    platform-specific components to :py:data:`sys.path`, for instance.
+
+    For Unix systems, except on Linux and AIX, this is the lowercased OS name as
+    returned by ``uname -s`` with the first part of the version as returned by
+    ``uname -r`` appended, e.g. ``'sunos5'`` or ``'freebsd8'``, at the time when Python
+    was built.
+    """
+
+
 def _normalize_extra_values(results: Any) -> Any:
     """
     Normalize extra values.
@@ -67,9 +135,8 @@ def _normalize_extra_values(results: Any) -> Any:
 
 
 def _format_marker(
-    marker: Union[List[str], MarkerAtom, str], first: Optional[bool] = True
+    marker: list[str] | MarkerAtom | str, first: bool | None = True
 ) -> str:
-
     assert isinstance(marker, (list, tuple, str))
 
     # Sometimes we have a structure like [[...]] which is a single item list
@@ -95,7 +162,7 @@ def _format_marker(
         return marker
 
 
-_operators: Dict[str, Operator] = {
+_operators: dict[str, Operator] = {
     "in": lambda lhs, rhs: lhs in rhs,
     "not in": lambda lhs, rhs: lhs not in rhs,
     "<": operator.lt,
@@ -115,14 +182,14 @@ def _eval_op(lhs: str, op: Op, rhs: str) -> bool:
     else:
         return spec.contains(lhs, prereleases=True)
 
-    oper: Optional[Operator] = _operators.get(op.serialize())
+    oper: Operator | None = _operators.get(op.serialize())
     if oper is None:
         raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.")
 
     return oper(lhs, rhs)
 
 
-def _normalize(*values: str, key: str) -> Tuple[str, ...]:
+def _normalize(*values: str, key: str) -> tuple[str, ...]:
     # PEP 685 – Comparison of extra names for optional distribution dependencies
     # https://peps.python.org/pep-0685/
     # > When comparing extra names, tools MUST normalize the names being
@@ -134,8 +201,8 @@ def _normalize(*values: str, key: str) -> Tuple[str, ...]:
     return values
 
 
-def _evaluate_markers(markers: MarkerList, environment: Dict[str, str]) -> bool:
-    groups: List[List[bool]] = [[]]
+def _evaluate_markers(markers: MarkerList, environment: dict[str, str]) -> bool:
+    groups: list[list[bool]] = [[]]
 
     for marker in markers:
         assert isinstance(marker, (list, tuple, str))
@@ -164,7 +231,7 @@ def _evaluate_markers(markers: MarkerList, environment: Dict[str, str]) -> bool:
     return any(all(item) for item in groups)
 
 
-def format_full_version(info: "sys._version_info") -> str:
+def format_full_version(info: sys._version_info) -> str:
     version = "{0.major}.{0.minor}.{0.micro}".format(info)
     kind = info.releaselevel
     if kind != "final":
@@ -172,7 +239,7 @@ def format_full_version(info: "sys._version_info") -> str:
     return version
 
 
-def default_environment() -> Dict[str, str]:
+def default_environment() -> Environment:
     iver = format_full_version(sys.implementation.version)
     implementation_name = sys.implementation.name
     return {
@@ -231,7 +298,7 @@ def __eq__(self, other: Any) -> bool:
 
         return str(self) == str(other)
 
-    def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool:
+    def evaluate(self, environment: dict[str, str] | None = None) -> bool:
         """Evaluate a marker.
 
         Return the boolean from evaluating the given marker against the
@@ -240,8 +307,14 @@ def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool:
 
         The environment is determined from the current Python process.
         """
-        current_environment = default_environment()
+        current_environment = cast("dict[str, str]", default_environment())
         current_environment["extra"] = ""
+        # Work around platform.python_version() returning something that is not PEP 440
+        # compliant for non-tagged Python builds. We preserve default_environment()'s
+        # behavior of returning platform.python_version() verbatim, and leave it to the
+        # caller to provide a syntactically valid version if they want to override it.
+        if current_environment["python_full_version"].endswith("+"):
+            current_environment["python_full_version"] += "local"
         if environment is not None:
             current_environment.update(environment)
             # The API used to allow setting extra to None. We need to handle this
diff --git a/setuptools/_vendor/packaging/metadata.py b/setuptools/_vendor/packaging/metadata.py
index fb27493079..eb8dc844d2 100644
--- a/setuptools/_vendor/packaging/metadata.py
+++ b/setuptools/_vendor/packaging/metadata.py
@@ -1,50 +1,31 @@
+from __future__ import annotations
+
 import email.feedparser
 import email.header
 import email.message
 import email.parser
 import email.policy
-import sys
 import typing
 from typing import (
     Any,
     Callable,
-    Dict,
     Generic,
-    List,
-    Optional,
-    Tuple,
-    Type,
-    Union,
+    Literal,
+    TypedDict,
     cast,
 )
 
-from . import requirements, specifiers, utils, version as version_module
+from . import requirements, specifiers, utils
+from . import version as version_module
 
 T = typing.TypeVar("T")
-if sys.version_info[:2] >= (3, 8):  # pragma: no cover
-    from typing import Literal, TypedDict
-else:  # pragma: no cover
-    if typing.TYPE_CHECKING:
-        from typing_extensions import Literal, TypedDict
-    else:
-        try:
-            from typing_extensions import Literal, TypedDict
-        except ImportError:
-
-            class Literal:
-                def __init_subclass__(*_args, **_kwargs):
-                    pass
-
-            class TypedDict:
-                def __init_subclass__(*_args, **_kwargs):
-                    pass
 
 
 try:
     ExceptionGroup
 except NameError:  # pragma: no cover
 
-    class ExceptionGroup(Exception):  # noqa: N818
+    class ExceptionGroup(Exception):
         """A minimal implementation of :external:exc:`ExceptionGroup` from Python 3.11.
 
         If :external:exc:`ExceptionGroup` is already defined by Python itself,
@@ -52,9 +33,9 @@ class ExceptionGroup(Exception):  # noqa: N818
         """
 
         message: str
-        exceptions: List[Exception]
+        exceptions: list[Exception]
 
-        def __init__(self, message: str, exceptions: List[Exception]) -> None:
+        def __init__(self, message: str, exceptions: list[Exception]) -> None:
             self.message = message
             self.exceptions = exceptions
 
@@ -100,32 +81,32 @@ class RawMetadata(TypedDict, total=False):
     metadata_version: str
     name: str
     version: str
-    platforms: List[str]
+    platforms: list[str]
     summary: str
     description: str
-    keywords: List[str]
+    keywords: list[str]
     home_page: str
     author: str
     author_email: str
     license: str
 
     # Metadata 1.1 - PEP 314
-    supported_platforms: List[str]
+    supported_platforms: list[str]
     download_url: str
-    classifiers: List[str]
-    requires: List[str]
-    provides: List[str]
-    obsoletes: List[str]
+    classifiers: list[str]
+    requires: list[str]
+    provides: list[str]
+    obsoletes: list[str]
 
     # Metadata 1.2 - PEP 345
     maintainer: str
     maintainer_email: str
-    requires_dist: List[str]
-    provides_dist: List[str]
-    obsoletes_dist: List[str]
+    requires_dist: list[str]
+    provides_dist: list[str]
+    obsoletes_dist: list[str]
     requires_python: str
-    requires_external: List[str]
-    project_urls: Dict[str, str]
+    requires_external: list[str]
+    project_urls: dict[str, str]
 
     # Metadata 2.0
     # PEP 426 attempted to completely revamp the metadata format
@@ -138,10 +119,10 @@ class RawMetadata(TypedDict, total=False):
 
     # Metadata 2.1 - PEP 566
     description_content_type: str
-    provides_extra: List[str]
+    provides_extra: list[str]
 
     # Metadata 2.2 - PEP 643
-    dynamic: List[str]
+    dynamic: list[str]
 
     # Metadata 2.3 - PEP 685
     # No new fields were added in PEP 685, just some edge case were
@@ -185,12 +166,12 @@ class RawMetadata(TypedDict, total=False):
 }
 
 
-def _parse_keywords(data: str) -> List[str]:
+def _parse_keywords(data: str) -> list[str]:
     """Split a string of comma-separate keyboards into a list of keywords."""
     return [k.strip() for k in data.split(",")]
 
 
-def _parse_project_urls(data: List[str]) -> Dict[str, str]:
+def _parse_project_urls(data: list[str]) -> dict[str, str]:
     """Parse a list of label/URL string pairings separated by a comma."""
     urls = {}
     for pair in data:
@@ -230,7 +211,7 @@ def _parse_project_urls(data: List[str]) -> Dict[str, str]:
     return urls
 
 
-def _get_payload(msg: email.message.Message, source: Union[bytes, str]) -> str:
+def _get_payload(msg: email.message.Message, source: bytes | str) -> str:
     """Get the body of the message."""
     # If our source is a str, then our caller has managed encodings for us,
     # and we don't need to deal with it.
@@ -292,7 +273,7 @@ def _get_payload(msg: email.message.Message, source: Union[bytes, str]) -> str:
 _RAW_TO_EMAIL_MAPPING = {raw: email for email, raw in _EMAIL_TO_RAW_MAPPING.items()}
 
 
-def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[str]]]:
+def parse_email(data: bytes | str) -> tuple[RawMetadata, dict[str, list[str]]]:
     """Parse a distribution's metadata stored as email headers (e.g. from ``METADATA``).
 
     This function returns a two-item tuple of dicts. The first dict is of
@@ -308,8 +289,8 @@ def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[st
     included in this dict.
 
     """
-    raw: Dict[str, Union[str, List[str], Dict[str, str]]] = {}
-    unparsed: Dict[str, List[str]] = {}
+    raw: dict[str, str | list[str] | dict[str, str]] = {}
+    unparsed: dict[str, list[str]] = {}
 
     if isinstance(data, str):
         parsed = email.parser.Parser(policy=email.policy.compat32).parsestr(data)
@@ -357,7 +338,7 @@ def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[st
                 # The Header object stores it's data as chunks, and each chunk
                 # can be independently encoded, so we'll need to check each
                 # of them.
-                chunks: List[Tuple[bytes, Optional[str]]] = []
+                chunks: list[tuple[bytes, str | None]] = []
                 for bin, encoding in email.header.decode_header(h):
                     try:
                         bin.decode("utf8", "strict")
@@ -499,11 +480,11 @@ def __init__(
     ) -> None:
         self.added = added
 
-    def __set_name__(self, _owner: "Metadata", name: str) -> None:
+    def __set_name__(self, _owner: Metadata, name: str) -> None:
         self.name = name
         self.raw_name = _RAW_TO_EMAIL_MAPPING[name]
 
-    def __get__(self, instance: "Metadata", _owner: Type["Metadata"]) -> T:
+    def __get__(self, instance: Metadata, _owner: type[Metadata]) -> T:
         # With Python 3.8, the caching can be replaced with functools.cached_property().
         # No need to check the cache as attribute lookup will resolve into the
         # instance's __dict__ before __get__ is called.
@@ -531,7 +512,7 @@ def __get__(self, instance: "Metadata", _owner: Type["Metadata"]) -> T:
         return cast(T, value)
 
     def _invalid_metadata(
-        self, msg: str, cause: Optional[Exception] = None
+        self, msg: str, cause: Exception | None = None
     ) -> InvalidMetadata:
         exc = InvalidMetadata(
             self.raw_name, msg.format_map({"field": repr(self.raw_name)})
@@ -606,7 +587,7 @@ def _process_description_content_type(self, value: str) -> str:
             )
         return value
 
-    def _process_dynamic(self, value: List[str]) -> List[str]:
+    def _process_dynamic(self, value: list[str]) -> list[str]:
         for dynamic_field in map(str.lower, value):
             if dynamic_field in {"name", "version", "metadata-version"}:
                 raise self._invalid_metadata(
@@ -618,8 +599,8 @@ def _process_dynamic(self, value: List[str]) -> List[str]:
 
     def _process_provides_extra(
         self,
-        value: List[str],
-    ) -> List[utils.NormalizedName]:
+        value: list[str],
+    ) -> list[utils.NormalizedName]:
         normalized_names = []
         try:
             for name in value:
@@ -641,8 +622,8 @@ def _process_requires_python(self, value: str) -> specifiers.SpecifierSet:
 
     def _process_requires_dist(
         self,
-        value: List[str],
-    ) -> List[requirements.Requirement]:
+        value: list[str],
+    ) -> list[requirements.Requirement]:
         reqs = []
         try:
             for req in value:
@@ -665,7 +646,7 @@ class Metadata:
     _raw: RawMetadata
 
     @classmethod
-    def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> "Metadata":
+    def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> Metadata:
         """Create an instance from :class:`RawMetadata`.
 
         If *validate* is true, all metadata will be validated. All exceptions
@@ -675,7 +656,7 @@ def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> "Metadata":
         ins._raw = data.copy()  # Mutations occur due to caching enriched values.
 
         if validate:
-            exceptions: List[Exception] = []
+            exceptions: list[Exception] = []
             try:
                 metadata_version = ins.metadata_version
                 metadata_age = _VALID_METADATA_VERSIONS.index(metadata_version)
@@ -722,9 +703,7 @@ def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> "Metadata":
         return ins
 
     @classmethod
-    def from_email(
-        cls, data: Union[bytes, str], *, validate: bool = True
-    ) -> "Metadata":
+    def from_email(cls, data: bytes | str, *, validate: bool = True) -> Metadata:
         """Parse metadata from email headers.
 
         If *validate* is true, the metadata will be validated. All exceptions
@@ -760,66 +739,66 @@ def from_email(
     *validate* parameter)"""
     version: _Validator[version_module.Version] = _Validator()
     """:external:ref:`core-metadata-version` (required)"""
-    dynamic: _Validator[Optional[List[str]]] = _Validator(
+    dynamic: _Validator[list[str] | None] = _Validator(
         added="2.2",
     )
     """:external:ref:`core-metadata-dynamic`
     (validated against core metadata field names and lowercased)"""
-    platforms: _Validator[Optional[List[str]]] = _Validator()
+    platforms: _Validator[list[str] | None] = _Validator()
     """:external:ref:`core-metadata-platform`"""
-    supported_platforms: _Validator[Optional[List[str]]] = _Validator(added="1.1")
+    supported_platforms: _Validator[list[str] | None] = _Validator(added="1.1")
     """:external:ref:`core-metadata-supported-platform`"""
-    summary: _Validator[Optional[str]] = _Validator()
+    summary: _Validator[str | None] = _Validator()
     """:external:ref:`core-metadata-summary` (validated to contain no newlines)"""
-    description: _Validator[Optional[str]] = _Validator()  # TODO 2.1: can be in body
+    description: _Validator[str | None] = _Validator()  # TODO 2.1: can be in body
     """:external:ref:`core-metadata-description`"""
-    description_content_type: _Validator[Optional[str]] = _Validator(added="2.1")
+    description_content_type: _Validator[str | None] = _Validator(added="2.1")
     """:external:ref:`core-metadata-description-content-type` (validated)"""
-    keywords: _Validator[Optional[List[str]]] = _Validator()
+    keywords: _Validator[list[str] | None] = _Validator()
     """:external:ref:`core-metadata-keywords`"""
-    home_page: _Validator[Optional[str]] = _Validator()
+    home_page: _Validator[str | None] = _Validator()
     """:external:ref:`core-metadata-home-page`"""
-    download_url: _Validator[Optional[str]] = _Validator(added="1.1")
+    download_url: _Validator[str | None] = _Validator(added="1.1")
     """:external:ref:`core-metadata-download-url`"""
-    author: _Validator[Optional[str]] = _Validator()
+    author: _Validator[str | None] = _Validator()
     """:external:ref:`core-metadata-author`"""
-    author_email: _Validator[Optional[str]] = _Validator()
+    author_email: _Validator[str | None] = _Validator()
     """:external:ref:`core-metadata-author-email`"""
-    maintainer: _Validator[Optional[str]] = _Validator(added="1.2")
+    maintainer: _Validator[str | None] = _Validator(added="1.2")
     """:external:ref:`core-metadata-maintainer`"""
-    maintainer_email: _Validator[Optional[str]] = _Validator(added="1.2")
+    maintainer_email: _Validator[str | None] = _Validator(added="1.2")
     """:external:ref:`core-metadata-maintainer-email`"""
-    license: _Validator[Optional[str]] = _Validator()
+    license: _Validator[str | None] = _Validator()
     """:external:ref:`core-metadata-license`"""
-    classifiers: _Validator[Optional[List[str]]] = _Validator(added="1.1")
+    classifiers: _Validator[list[str] | None] = _Validator(added="1.1")
     """:external:ref:`core-metadata-classifier`"""
-    requires_dist: _Validator[Optional[List[requirements.Requirement]]] = _Validator(
+    requires_dist: _Validator[list[requirements.Requirement] | None] = _Validator(
         added="1.2"
     )
     """:external:ref:`core-metadata-requires-dist`"""
-    requires_python: _Validator[Optional[specifiers.SpecifierSet]] = _Validator(
+    requires_python: _Validator[specifiers.SpecifierSet | None] = _Validator(
         added="1.2"
     )
     """:external:ref:`core-metadata-requires-python`"""
     # Because `Requires-External` allows for non-PEP 440 version specifiers, we
     # don't do any processing on the values.
-    requires_external: _Validator[Optional[List[str]]] = _Validator(added="1.2")
+    requires_external: _Validator[list[str] | None] = _Validator(added="1.2")
     """:external:ref:`core-metadata-requires-external`"""
-    project_urls: _Validator[Optional[Dict[str, str]]] = _Validator(added="1.2")
+    project_urls: _Validator[dict[str, str] | None] = _Validator(added="1.2")
     """:external:ref:`core-metadata-project-url`"""
     # PEP 685 lets us raise an error if an extra doesn't pass `Name` validation
     # regardless of metadata version.
-    provides_extra: _Validator[Optional[List[utils.NormalizedName]]] = _Validator(
+    provides_extra: _Validator[list[utils.NormalizedName] | None] = _Validator(
         added="2.1",
     )
     """:external:ref:`core-metadata-provides-extra`"""
-    provides_dist: _Validator[Optional[List[str]]] = _Validator(added="1.2")
+    provides_dist: _Validator[list[str] | None] = _Validator(added="1.2")
     """:external:ref:`core-metadata-provides-dist`"""
-    obsoletes_dist: _Validator[Optional[List[str]]] = _Validator(added="1.2")
+    obsoletes_dist: _Validator[list[str] | None] = _Validator(added="1.2")
     """:external:ref:`core-metadata-obsoletes-dist`"""
-    requires: _Validator[Optional[List[str]]] = _Validator(added="1.1")
+    requires: _Validator[list[str] | None] = _Validator(added="1.1")
     """``Requires`` (deprecated)"""
-    provides: _Validator[Optional[List[str]]] = _Validator(added="1.1")
+    provides: _Validator[list[str] | None] = _Validator(added="1.1")
     """``Provides`` (deprecated)"""
-    obsoletes: _Validator[Optional[List[str]]] = _Validator(added="1.1")
+    obsoletes: _Validator[list[str] | None] = _Validator(added="1.1")
     """``Obsoletes`` (deprecated)"""
diff --git a/setuptools/_vendor/packaging/requirements.py b/setuptools/_vendor/packaging/requirements.py
index bdc43a7e98..4e068c9567 100644
--- a/setuptools/_vendor/packaging/requirements.py
+++ b/setuptools/_vendor/packaging/requirements.py
@@ -1,8 +1,9 @@
 # This file is dual licensed under the terms of the Apache License, Version
 # 2.0, and the BSD License. See the LICENSE file in the root of this repository
 # for complete details.
+from __future__ import annotations
 
-from typing import Any, Iterator, Optional, Set
+from typing import Any, Iterator
 
 from ._parser import parse_requirement as _parse_requirement
 from ._tokenizer import ParserSyntaxError
@@ -37,10 +38,10 @@ def __init__(self, requirement_string: str) -> None:
             raise InvalidRequirement(str(e)) from e
 
         self.name: str = parsed.name
-        self.url: Optional[str] = parsed.url or None
-        self.extras: Set[str] = set(parsed.extras or [])
+        self.url: str | None = parsed.url or None
+        self.extras: set[str] = set(parsed.extras or [])
         self.specifier: SpecifierSet = SpecifierSet(parsed.specifier)
-        self.marker: Optional[Marker] = None
+        self.marker: Marker | None = None
         if parsed.marker is not None:
             self.marker = Marker.__new__(Marker)
             self.marker._markers = _normalize_extra_values(parsed.marker)
diff --git a/setuptools/_vendor/packaging/specifiers.py b/setuptools/_vendor/packaging/specifiers.py
index 2d015bab59..2fa75f7abb 100644
--- a/setuptools/_vendor/packaging/specifiers.py
+++ b/setuptools/_vendor/packaging/specifiers.py
@@ -8,10 +8,12 @@
     from packaging.version import Version
 """
 
+from __future__ import annotations
+
 import abc
 import itertools
 import re
-from typing import Callable, Iterable, Iterator, List, Optional, Tuple, TypeVar, Union
+from typing import Callable, Iterable, Iterator, TypeVar, Union
 
 from .utils import canonicalize_version
 from .version import Version
@@ -64,7 +66,7 @@ def __eq__(self, other: object) -> bool:
 
     @property
     @abc.abstractmethod
-    def prereleases(self) -> Optional[bool]:
+    def prereleases(self) -> bool | None:
         """Whether or not pre-releases as a whole are allowed.
 
         This can be set to either ``True`` or ``False`` to explicitly enable or disable
@@ -79,14 +81,14 @@ def prereleases(self, value: bool) -> None:
         """
 
     @abc.abstractmethod
-    def contains(self, item: str, prereleases: Optional[bool] = None) -> bool:
+    def contains(self, item: str, prereleases: bool | None = None) -> bool:
         """
         Determines if the given item is contained within this specifier.
         """
 
     @abc.abstractmethod
     def filter(
-        self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None
+        self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None
     ) -> Iterator[UnparsedVersionVar]:
         """
         Takes an iterable of items and filters them so that only items which
@@ -217,7 +219,7 @@ class Specifier(BaseSpecifier):
         "===": "arbitrary",
     }
 
-    def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None:
+    def __init__(self, spec: str = "", prereleases: bool | None = None) -> None:
         """Initialize a Specifier instance.
 
         :param spec:
@@ -234,7 +236,7 @@ def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None:
         if not match:
             raise InvalidSpecifier(f"Invalid specifier: '{spec}'")
 
-        self._spec: Tuple[str, str] = (
+        self._spec: tuple[str, str] = (
             match.group("operator").strip(),
             match.group("version").strip(),
         )
@@ -318,7 +320,7 @@ def __str__(self) -> str:
         return "{}{}".format(*self._spec)
 
     @property
-    def _canonical_spec(self) -> Tuple[str, str]:
+    def _canonical_spec(self) -> tuple[str, str]:
         canonical_version = canonicalize_version(
             self._spec[1],
             strip_trailing_zero=(self._spec[0] != "~="),
@@ -364,7 +366,6 @@ def _get_operator(self, op: str) -> CallableOperator:
         return operator_callable
 
     def _compare_compatible(self, prospective: Version, spec: str) -> bool:
-
         # Compatible releases have an equivalent combination of >= and ==. That
         # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to
         # implement this in terms of the other specifiers instead of
@@ -385,7 +386,6 @@ def _compare_compatible(self, prospective: Version, spec: str) -> bool:
         )
 
     def _compare_equal(self, prospective: Version, spec: str) -> bool:
-
         # We need special logic to handle prefix matching
         if spec.endswith(".*"):
             # In the case of prefix matching we want to ignore local segment.
@@ -429,21 +429,18 @@ def _compare_not_equal(self, prospective: Version, spec: str) -> bool:
         return not self._compare_equal(prospective, spec)
 
     def _compare_less_than_equal(self, prospective: Version, spec: str) -> bool:
-
         # NB: Local version identifiers are NOT permitted in the version
         # specifier, so local version labels can be universally removed from
         # the prospective version.
         return Version(prospective.public) <= Version(spec)
 
     def _compare_greater_than_equal(self, prospective: Version, spec: str) -> bool:
-
         # NB: Local version identifiers are NOT permitted in the version
         # specifier, so local version labels can be universally removed from
         # the prospective version.
         return Version(prospective.public) >= Version(spec)
 
     def _compare_less_than(self, prospective: Version, spec_str: str) -> bool:
-
         # Convert our spec to a Version instance, since we'll want to work with
         # it as a version.
         spec = Version(spec_str)
@@ -468,7 +465,6 @@ def _compare_less_than(self, prospective: Version, spec_str: str) -> bool:
         return True
 
     def _compare_greater_than(self, prospective: Version, spec_str: str) -> bool:
-
         # Convert our spec to a Version instance, since we'll want to work with
         # it as a version.
         spec = Version(spec_str)
@@ -501,7 +497,7 @@ def _compare_greater_than(self, prospective: Version, spec_str: str) -> bool:
     def _compare_arbitrary(self, prospective: Version, spec: str) -> bool:
         return str(prospective).lower() == str(spec).lower()
 
-    def __contains__(self, item: Union[str, Version]) -> bool:
+    def __contains__(self, item: str | Version) -> bool:
         """Return whether or not the item is contained in this specifier.
 
         :param item: The item to check for.
@@ -522,9 +518,7 @@ def __contains__(self, item: Union[str, Version]) -> bool:
         """
         return self.contains(item)
 
-    def contains(
-        self, item: UnparsedVersion, prereleases: Optional[bool] = None
-    ) -> bool:
+    def contains(self, item: UnparsedVersion, prereleases: bool | None = None) -> bool:
         """Return whether or not the item is contained in this specifier.
 
         :param item:
@@ -569,7 +563,7 @@ def contains(
         return operator_callable(normalized_item, self.version)
 
     def filter(
-        self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None
+        self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None
     ) -> Iterator[UnparsedVersionVar]:
         """Filter items in the given iterable, that match the specifier.
 
@@ -633,7 +627,7 @@ def filter(
 _prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
 
 
-def _version_split(version: str) -> List[str]:
+def _version_split(version: str) -> list[str]:
     """Split version into components.
 
     The split components are intended for version comparison. The logic does
@@ -641,7 +635,7 @@ def _version_split(version: str) -> List[str]:
     components back with :func:`_version_join` may not produce the original
     version string.
     """
-    result: List[str] = []
+    result: list[str] = []
 
     epoch, _, rest = version.rpartition("!")
     result.append(epoch or "0")
@@ -655,7 +649,7 @@ def _version_split(version: str) -> List[str]:
     return result
 
 
-def _version_join(components: List[str]) -> str:
+def _version_join(components: list[str]) -> str:
     """Join split version components into a version string.
 
     This function assumes the input came from :func:`_version_split`, where the
@@ -672,7 +666,7 @@ def _is_not_suffix(segment: str) -> bool:
     )
 
 
-def _pad_version(left: List[str], right: List[str]) -> Tuple[List[str], List[str]]:
+def _pad_version(left: list[str], right: list[str]) -> tuple[list[str], list[str]]:
     left_split, right_split = [], []
 
     # Get the release segment of our versions
@@ -700,9 +694,7 @@ class SpecifierSet(BaseSpecifier):
     specifiers (``>=3.0,!=3.1``), or no specifier at all.
     """
 
-    def __init__(
-        self, specifiers: str = "", prereleases: Optional[bool] = None
-    ) -> None:
+    def __init__(self, specifiers: str = "", prereleases: bool | None = None) -> None:
         """Initialize a SpecifierSet instance.
 
         :param specifiers:
@@ -730,7 +722,7 @@ def __init__(
         self._prereleases = prereleases
 
     @property
-    def prereleases(self) -> Optional[bool]:
+    def prereleases(self) -> bool | None:
         # If we have been given an explicit prerelease modifier, then we'll
         # pass that through here.
         if self._prereleases is not None:
@@ -787,7 +779,7 @@ def __str__(self) -> str:
     def __hash__(self) -> int:
         return hash(self._specs)
 
-    def __and__(self, other: Union["SpecifierSet", str]) -> "SpecifierSet":
+    def __and__(self, other: SpecifierSet | str) -> SpecifierSet:
         """Return a SpecifierSet which is a combination of the two sets.
 
         :param other: The other object to combine with.
@@ -883,8 +875,8 @@ def __contains__(self, item: UnparsedVersion) -> bool:
     def contains(
         self,
         item: UnparsedVersion,
-        prereleases: Optional[bool] = None,
-        installed: Optional[bool] = None,
+        prereleases: bool | None = None,
+        installed: bool | None = None,
     ) -> bool:
         """Return whether or not the item is contained in this SpecifierSet.
 
@@ -938,7 +930,7 @@ def contains(
         return all(s.contains(item, prereleases=prereleases) for s in self._specs)
 
     def filter(
-        self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None
+        self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None
     ) -> Iterator[UnparsedVersionVar]:
         """Filter items in the given iterable, that match the specifiers in this set.
 
@@ -995,8 +987,8 @@ def filter(
         # which will filter out any pre-releases, unless there are no final
         # releases.
         else:
-            filtered: List[UnparsedVersionVar] = []
-            found_prereleases: List[UnparsedVersionVar] = []
+            filtered: list[UnparsedVersionVar] = []
+            found_prereleases: list[UnparsedVersionVar] = []
 
             for item in iterable:
                 parsed_version = _coerce_version(item)
diff --git a/setuptools/_vendor/packaging/tags.py b/setuptools/_vendor/packaging/tags.py
index 89f1926137..6667d29908 100644
--- a/setuptools/_vendor/packaging/tags.py
+++ b/setuptools/_vendor/packaging/tags.py
@@ -2,6 +2,8 @@
 # 2.0, and the BSD License. See the LICENSE file in the root of this repository
 # for complete details.
 
+from __future__ import annotations
+
 import logging
 import platform
 import re
@@ -11,15 +13,10 @@
 import sysconfig
 from importlib.machinery import EXTENSION_SUFFIXES
 from typing import (
-    Dict,
-    FrozenSet,
     Iterable,
     Iterator,
-    List,
-    Optional,
     Sequence,
     Tuple,
-    Union,
     cast,
 )
 
@@ -30,7 +27,7 @@
 PythonVersion = Sequence[int]
 MacVersion = Tuple[int, int]
 
-INTERPRETER_SHORT_NAMES: Dict[str, str] = {
+INTERPRETER_SHORT_NAMES: dict[str, str] = {
     "python": "py",  # Generic.
     "cpython": "cp",
     "pypy": "pp",
@@ -96,7 +93,7 @@ def __repr__(self) -> str:
         return f"<{self} @ {id(self)}>"
 
 
-def parse_tag(tag: str) -> FrozenSet[Tag]:
+def parse_tag(tag: str) -> frozenset[Tag]:
     """
     Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances.
 
@@ -112,8 +109,8 @@ def parse_tag(tag: str) -> FrozenSet[Tag]:
     return frozenset(tags)
 
 
-def _get_config_var(name: str, warn: bool = False) -> Union[int, str, None]:
-    value: Union[int, str, None] = sysconfig.get_config_var(name)
+def _get_config_var(name: str, warn: bool = False) -> int | str | None:
+    value: int | str | None = sysconfig.get_config_var(name)
     if value is None and warn:
         logger.debug(
             "Config variable '%s' is unset, Python ABI tag may be incorrect", name
@@ -125,7 +122,7 @@ def _normalize_string(string: str) -> str:
     return string.replace(".", "_").replace("-", "_").replace(" ", "_")
 
 
-def _is_threaded_cpython(abis: List[str]) -> bool:
+def _is_threaded_cpython(abis: list[str]) -> bool:
     """
     Determine if the ABI corresponds to a threaded (`--disable-gil`) build.
 
@@ -151,7 +148,7 @@ def _abi3_applies(python_version: PythonVersion, threading: bool) -> bool:
     return len(python_version) > 1 and tuple(python_version) >= (3, 2) and not threading
 
 
-def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]:
+def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> list[str]:
     py_version = tuple(py_version)  # To allow for version comparison.
     abis = []
     version = _version_nodot(py_version[:2])
@@ -185,9 +182,9 @@ def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]:
 
 
 def cpython_tags(
-    python_version: Optional[PythonVersion] = None,
-    abis: Optional[Iterable[str]] = None,
-    platforms: Optional[Iterable[str]] = None,
+    python_version: PythonVersion | None = None,
+    abis: Iterable[str] | None = None,
+    platforms: Iterable[str] | None = None,
     *,
     warn: bool = False,
 ) -> Iterator[Tag]:
@@ -244,7 +241,7 @@ def cpython_tags(
                 yield Tag(interpreter, "abi3", platform_)
 
 
-def _generic_abi() -> List[str]:
+def _generic_abi() -> list[str]:
     """
     Return the ABI tag based on EXT_SUFFIX.
     """
@@ -286,9 +283,9 @@ def _generic_abi() -> List[str]:
 
 
 def generic_tags(
-    interpreter: Optional[str] = None,
-    abis: Optional[Iterable[str]] = None,
-    platforms: Optional[Iterable[str]] = None,
+    interpreter: str | None = None,
+    abis: Iterable[str] | None = None,
+    platforms: Iterable[str] | None = None,
     *,
     warn: bool = False,
 ) -> Iterator[Tag]:
@@ -332,9 +329,9 @@ def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]:
 
 
 def compatible_tags(
-    python_version: Optional[PythonVersion] = None,
-    interpreter: Optional[str] = None,
-    platforms: Optional[Iterable[str]] = None,
+    python_version: PythonVersion | None = None,
+    interpreter: str | None = None,
+    platforms: Iterable[str] | None = None,
 ) -> Iterator[Tag]:
     """
     Yields the sequence of tags that are compatible with a specific version of Python.
@@ -366,7 +363,7 @@ def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str:
     return "i386"
 
 
-def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> List[str]:
+def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> list[str]:
     formats = [cpu_arch]
     if cpu_arch == "x86_64":
         if version < (10, 4):
@@ -399,7 +396,7 @@ def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> List[str]:
 
 
 def mac_platforms(
-    version: Optional[MacVersion] = None, arch: Optional[str] = None
+    version: MacVersion | None = None, arch: str | None = None
 ) -> Iterator[str]:
     """
     Yields the platform tags for a macOS system.
diff --git a/setuptools/_vendor/packaging/utils.py b/setuptools/_vendor/packaging/utils.py
index c2c2f75aa8..d33da5bb8b 100644
--- a/setuptools/_vendor/packaging/utils.py
+++ b/setuptools/_vendor/packaging/utils.py
@@ -2,8 +2,10 @@
 # 2.0, and the BSD License. See the LICENSE file in the root of this repository
 # for complete details.
 
+from __future__ import annotations
+
 import re
-from typing import FrozenSet, NewType, Tuple, Union, cast
+from typing import NewType, Tuple, Union, cast
 
 from .tags import Tag, parse_tag
 from .version import InvalidVersion, Version
@@ -53,7 +55,7 @@ def is_normalized_name(name: str) -> bool:
 
 
 def canonicalize_version(
-    version: Union[Version, str], *, strip_trailing_zero: bool = True
+    version: Version | str, *, strip_trailing_zero: bool = True
 ) -> str:
     """
     This is very similar to Version.__str__, but has one subtle difference
@@ -102,7 +104,7 @@ def canonicalize_version(
 
 def parse_wheel_filename(
     filename: str,
-) -> Tuple[NormalizedName, Version, BuildTag, FrozenSet[Tag]]:
+) -> tuple[NormalizedName, Version, BuildTag, frozenset[Tag]]:
     if not filename.endswith(".whl"):
         raise InvalidWheelFilename(
             f"Invalid wheel filename (extension must be '.whl'): {filename}"
@@ -143,7 +145,7 @@ def parse_wheel_filename(
     return (name, version, build, tags)
 
 
-def parse_sdist_filename(filename: str) -> Tuple[NormalizedName, Version]:
+def parse_sdist_filename(filename: str) -> tuple[NormalizedName, Version]:
     if filename.endswith(".tar.gz"):
         file_stem = filename[: -len(".tar.gz")]
     elif filename.endswith(".zip"):
diff --git a/setuptools/_vendor/packaging/version.py b/setuptools/_vendor/packaging/version.py
index 5faab9bd0d..46bc261308 100644
--- a/setuptools/_vendor/packaging/version.py
+++ b/setuptools/_vendor/packaging/version.py
@@ -7,9 +7,11 @@
     from packaging.version import parse, Version
 """
 
+from __future__ import annotations
+
 import itertools
 import re
-from typing import Any, Callable, NamedTuple, Optional, SupportsInt, Tuple, Union
+from typing import Any, Callable, NamedTuple, SupportsInt, Tuple, Union
 
 from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType
 
@@ -35,14 +37,14 @@
 
 class _Version(NamedTuple):
     epoch: int
-    release: Tuple[int, ...]
-    dev: Optional[Tuple[str, int]]
-    pre: Optional[Tuple[str, int]]
-    post: Optional[Tuple[str, int]]
-    local: Optional[LocalType]
+    release: tuple[int, ...]
+    dev: tuple[str, int] | None
+    pre: tuple[str, int] | None
+    post: tuple[str, int] | None
+    local: LocalType | None
 
 
-def parse(version: str) -> "Version":
+def parse(version: str) -> Version:
     """Parse the given version string.
 
     >>> parse('1.0.dev1')
@@ -65,7 +67,7 @@ class InvalidVersion(ValueError):
 
 
 class _BaseVersion:
-    _key: Tuple[Any, ...]
+    _key: tuple[Any, ...]
 
     def __hash__(self) -> int:
         return hash(self._key)
@@ -73,13 +75,13 @@ def __hash__(self) -> int:
     # Please keep the duplicated `isinstance` check
     # in the six comparisons hereunder
     # unless you find a way to avoid adding overhead function calls.
-    def __lt__(self, other: "_BaseVersion") -> bool:
+    def __lt__(self, other: _BaseVersion) -> bool:
         if not isinstance(other, _BaseVersion):
             return NotImplemented
 
         return self._key < other._key
 
-    def __le__(self, other: "_BaseVersion") -> bool:
+    def __le__(self, other: _BaseVersion) -> bool:
         if not isinstance(other, _BaseVersion):
             return NotImplemented
 
@@ -91,13 +93,13 @@ def __eq__(self, other: object) -> bool:
 
         return self._key == other._key
 
-    def __ge__(self, other: "_BaseVersion") -> bool:
+    def __ge__(self, other: _BaseVersion) -> bool:
         if not isinstance(other, _BaseVersion):
             return NotImplemented
 
         return self._key >= other._key
 
-    def __gt__(self, other: "_BaseVersion") -> bool:
+    def __gt__(self, other: _BaseVersion) -> bool:
         if not isinstance(other, _BaseVersion):
             return NotImplemented
 
@@ -274,7 +276,7 @@ def epoch(self) -> int:
         return self._version.epoch
 
     @property
-    def release(self) -> Tuple[int, ...]:
+    def release(self) -> tuple[int, ...]:
         """The components of the "release" segment of the version.
 
         >>> Version("1.2.3").release
@@ -290,7 +292,7 @@ def release(self) -> Tuple[int, ...]:
         return self._version.release
 
     @property
-    def pre(self) -> Optional[Tuple[str, int]]:
+    def pre(self) -> tuple[str, int] | None:
         """The pre-release segment of the version.
 
         >>> print(Version("1.2.3").pre)
@@ -305,7 +307,7 @@ def pre(self) -> Optional[Tuple[str, int]]:
         return self._version.pre
 
     @property
-    def post(self) -> Optional[int]:
+    def post(self) -> int | None:
         """The post-release number of the version.
 
         >>> print(Version("1.2.3").post)
@@ -316,7 +318,7 @@ def post(self) -> Optional[int]:
         return self._version.post[1] if self._version.post else None
 
     @property
-    def dev(self) -> Optional[int]:
+    def dev(self) -> int | None:
         """The development number of the version.
 
         >>> print(Version("1.2.3").dev)
@@ -327,7 +329,7 @@ def dev(self) -> Optional[int]:
         return self._version.dev[1] if self._version.dev else None
 
     @property
-    def local(self) -> Optional[str]:
+    def local(self) -> str | None:
         """The local version segment of the version.
 
         >>> print(Version("1.2.3").local)
@@ -450,9 +452,8 @@ def micro(self) -> int:
 
 
 def _parse_letter_version(
-    letter: Optional[str], number: Union[str, bytes, SupportsInt, None]
-) -> Optional[Tuple[str, int]]:
-
+    letter: str | None, number: str | bytes | SupportsInt | None
+) -> tuple[str, int] | None:
     if letter:
         # We consider there to be an implicit 0 in a pre-release if there is
         # not a numeral associated with it.
@@ -488,7 +489,7 @@ def _parse_letter_version(
 _local_version_separators = re.compile(r"[\._-]")
 
 
-def _parse_local_version(local: Optional[str]) -> Optional[LocalType]:
+def _parse_local_version(local: str | None) -> LocalType | None:
     """
     Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
     """
@@ -502,13 +503,12 @@ def _parse_local_version(local: Optional[str]) -> Optional[LocalType]:
 
 def _cmpkey(
     epoch: int,
-    release: Tuple[int, ...],
-    pre: Optional[Tuple[str, int]],
-    post: Optional[Tuple[str, int]],
-    dev: Optional[Tuple[str, int]],
-    local: Optional[LocalType],
+    release: tuple[int, ...],
+    pre: tuple[str, int] | None,
+    post: tuple[str, int] | None,
+    dev: tuple[str, int] | None,
+    local: LocalType | None,
 ) -> CmpKey:
-
     # When we compare a release version, we want to compare it with all of the
     # trailing zeros removed. So we'll use a reverse the list, drop all the now
     # leading zeros until we come to something non zero, then take the rest
diff --git a/setuptools/_vendor/typeguard-4.3.0.dist-info/INSTALLER b/setuptools/_vendor/typeguard-4.3.0.dist-info/INSTALLER
new file mode 100644
index 0000000000..a1b589e38a
--- /dev/null
+++ b/setuptools/_vendor/typeguard-4.3.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/setuptools/_vendor/typeguard-4.3.0.dist-info/LICENSE b/setuptools/_vendor/typeguard-4.3.0.dist-info/LICENSE
new file mode 100644
index 0000000000..07806f8af9
--- /dev/null
+++ b/setuptools/_vendor/typeguard-4.3.0.dist-info/LICENSE
@@ -0,0 +1,19 @@
+This is the MIT license: http://www.opensource.org/licenses/mit-license.php
+
+Copyright (c) Alex Grönholm
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this
+software and associated documentation files (the "Software"), to deal in the Software
+without restriction, including without limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons
+to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or
+substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
+INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
+PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
+FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
diff --git a/setuptools/_vendor/typeguard-4.3.0.dist-info/METADATA b/setuptools/_vendor/typeguard-4.3.0.dist-info/METADATA
new file mode 100644
index 0000000000..6e5750b485
--- /dev/null
+++ b/setuptools/_vendor/typeguard-4.3.0.dist-info/METADATA
@@ -0,0 +1,81 @@
+Metadata-Version: 2.1
+Name: typeguard
+Version: 4.3.0
+Summary: Run-time type checker for Python
+Author-email: Alex Grönholm 
+License: MIT
+Project-URL: Documentation, https://typeguard.readthedocs.io/en/latest/
+Project-URL: Change log, https://typeguard.readthedocs.io/en/latest/versionhistory.html
+Project-URL: Source code, https://github.com/agronholm/typeguard
+Project-URL: Issue tracker, https://github.com/agronholm/typeguard/issues
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Requires-Python: >=3.8
+Description-Content-Type: text/x-rst
+License-File: LICENSE
+Requires-Dist: typing-extensions >=4.10.0
+Requires-Dist: importlib-metadata >=3.6 ; python_version < "3.10"
+Provides-Extra: doc
+Requires-Dist: packaging ; extra == 'doc'
+Requires-Dist: Sphinx >=7 ; extra == 'doc'
+Requires-Dist: sphinx-autodoc-typehints >=1.2.0 ; extra == 'doc'
+Requires-Dist: sphinx-rtd-theme >=1.3.0 ; extra == 'doc'
+Provides-Extra: test
+Requires-Dist: coverage[toml] >=7 ; extra == 'test'
+Requires-Dist: pytest >=7 ; extra == 'test'
+Requires-Dist: mypy >=1.2.0 ; (platform_python_implementation != "PyPy") and extra == 'test'
+
+.. image:: https://github.com/agronholm/typeguard/actions/workflows/test.yml/badge.svg
+  :target: https://github.com/agronholm/typeguard/actions/workflows/test.yml
+  :alt: Build Status
+.. image:: https://coveralls.io/repos/agronholm/typeguard/badge.svg?branch=master&service=github
+  :target: https://coveralls.io/github/agronholm/typeguard?branch=master
+  :alt: Code Coverage
+.. image:: https://readthedocs.org/projects/typeguard/badge/?version=latest
+  :target: https://typeguard.readthedocs.io/en/latest/?badge=latest
+  :alt: Documentation
+
+This library provides run-time type checking for functions defined with
+`PEP 484 `_ argument (and return) type
+annotations, and any arbitrary objects. It can be used together with static type
+checkers as an additional layer of type safety, to catch type violations that could only
+be detected at run time.
+
+Two principal ways to do type checking are provided:
+
+#. The ``check_type`` function:
+
+   * like ``isinstance()``, but supports arbitrary type annotations (within limits)
+   * can be used as a ``cast()`` replacement, but with actual checking of the value
+#. Code instrumentation:
+
+   * entire modules, or individual functions (via ``@typechecked``) are recompiled, with
+     type checking code injected into them
+   * automatically checks function arguments, return values and assignments to annotated
+     local variables
+   * for generator functions (regular and async), checks yield and send values
+   * requires the original source code of the instrumented module(s) to be accessible
+
+Two options are provided for code instrumentation:
+
+#. the ``@typechecked`` function:
+
+   * can be applied to functions individually
+#. the import hook (``typeguard.install_import_hook()``):
+
+   * automatically instruments targeted modules on import
+   * no manual code changes required in the target modules
+   * requires the import hook to be installed before the targeted modules are imported
+   * may clash with other import hooks
+
+See the documentation_ for further information.
+
+.. _documentation: https://typeguard.readthedocs.io/en/latest/
diff --git a/setuptools/_vendor/typeguard-4.3.0.dist-info/RECORD b/setuptools/_vendor/typeguard-4.3.0.dist-info/RECORD
new file mode 100644
index 0000000000..801e73347c
--- /dev/null
+++ b/setuptools/_vendor/typeguard-4.3.0.dist-info/RECORD
@@ -0,0 +1,34 @@
+typeguard-4.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+typeguard-4.3.0.dist-info/LICENSE,sha256=YWP3mH37ONa8MgzitwsvArhivEESZRbVUu8c1DJH51g,1130
+typeguard-4.3.0.dist-info/METADATA,sha256=z2dcHAp0TwhYCFU5Deh8x31nazElgujUz9tbuP0pjSE,3717
+typeguard-4.3.0.dist-info/RECORD,,
+typeguard-4.3.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
+typeguard-4.3.0.dist-info/entry_points.txt,sha256=qp7NQ1aLtiSgMQqo6gWlfGpy0IIXzoMJmeQTLpzqFZQ,48
+typeguard-4.3.0.dist-info/top_level.txt,sha256=4z28AhuDodwRS_c1J_l8H51t5QuwfTseskYzlxp6grs,10
+typeguard/__init__.py,sha256=Onh4w38elPCjtlcU3JY9k3h70NjsxXIkAflmQn-Z0FY,2071
+typeguard/__pycache__/__init__.cpython-312.pyc,,
+typeguard/__pycache__/_checkers.cpython-312.pyc,,
+typeguard/__pycache__/_config.cpython-312.pyc,,
+typeguard/__pycache__/_decorators.cpython-312.pyc,,
+typeguard/__pycache__/_exceptions.cpython-312.pyc,,
+typeguard/__pycache__/_functions.cpython-312.pyc,,
+typeguard/__pycache__/_importhook.cpython-312.pyc,,
+typeguard/__pycache__/_memo.cpython-312.pyc,,
+typeguard/__pycache__/_pytest_plugin.cpython-312.pyc,,
+typeguard/__pycache__/_suppression.cpython-312.pyc,,
+typeguard/__pycache__/_transformer.cpython-312.pyc,,
+typeguard/__pycache__/_union_transformer.cpython-312.pyc,,
+typeguard/__pycache__/_utils.cpython-312.pyc,,
+typeguard/_checkers.py,sha256=JRrgKicdOEfIBoNEtegYCEIlhpad-a1u1Em7GCj0WCI,31360
+typeguard/_config.py,sha256=nIz8QwDa-oFO3L9O8_6srzlmd99pSby2wOM4Wb7F_B0,2846
+typeguard/_decorators.py,sha256=v6dsIeWvPhExGLP_wXF-RmDUyjZf_Ak28g7gBJ_v0-0,9033
+typeguard/_exceptions.py,sha256=ZIPeiV-FBd5Emw2EaWd2Fvlsrwi4ocwT2fVGBIAtHcQ,1121
+typeguard/_functions.py,sha256=ibgSAKa5ptIm1eR9ARG0BSozAFJPFNASZqhPVyQeqig,10393
+typeguard/_importhook.py,sha256=ugjCDvFcdWMU7UugqlJG91IpVNpEIxtRr-99s0h1k7M,6389
+typeguard/_memo.py,sha256=1juQV_vxnD2JYKbSrebiQuj4oKHz6n67v9pYA-CCISg,1303
+typeguard/_pytest_plugin.py,sha256=-fcSqkv54rIfIF8pDavY5YQPkj4OX8GMt_lL7CQSD4I,4416
+typeguard/_suppression.py,sha256=VQfzxcwIbu3if0f7VBkKM7hkYOA7tNFw9a7jMBsmMg4,2266
+typeguard/_transformer.py,sha256=9Ha7_QhdwoUni_6hvdY-hZbuEergowHrNL2vzHIakFY,44937
+typeguard/_union_transformer.py,sha256=v_42r7-6HuRX2SoFwnyJ-E5PlxXpVeUJPJR1-HU9qSo,1354
+typeguard/_utils.py,sha256=5HhO1rPn5f1M6ymkVAEv7Xmlz1cX-j0OnTMlyHqqrR8,5270
+typeguard/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
diff --git a/setuptools/_vendor/typeguard-4.3.0.dist-info/WHEEL b/setuptools/_vendor/typeguard-4.3.0.dist-info/WHEEL
new file mode 100644
index 0000000000..bab98d6758
--- /dev/null
+++ b/setuptools/_vendor/typeguard-4.3.0.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.43.0)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/setuptools/_vendor/typeguard-4.3.0.dist-info/entry_points.txt b/setuptools/_vendor/typeguard-4.3.0.dist-info/entry_points.txt
new file mode 100644
index 0000000000..47c9d0bd91
--- /dev/null
+++ b/setuptools/_vendor/typeguard-4.3.0.dist-info/entry_points.txt
@@ -0,0 +1,2 @@
+[pytest11]
+typeguard = typeguard._pytest_plugin
diff --git a/setuptools/_vendor/typeguard-4.3.0.dist-info/top_level.txt b/setuptools/_vendor/typeguard-4.3.0.dist-info/top_level.txt
new file mode 100644
index 0000000000..be5ec23ea2
--- /dev/null
+++ b/setuptools/_vendor/typeguard-4.3.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+typeguard
diff --git a/setuptools/_vendor/typeguard/__init__.py b/setuptools/_vendor/typeguard/__init__.py
new file mode 100644
index 0000000000..6781cad094
--- /dev/null
+++ b/setuptools/_vendor/typeguard/__init__.py
@@ -0,0 +1,48 @@
+import os
+from typing import Any
+
+from ._checkers import TypeCheckerCallable as TypeCheckerCallable
+from ._checkers import TypeCheckLookupCallback as TypeCheckLookupCallback
+from ._checkers import check_type_internal as check_type_internal
+from ._checkers import checker_lookup_functions as checker_lookup_functions
+from ._checkers import load_plugins as load_plugins
+from ._config import CollectionCheckStrategy as CollectionCheckStrategy
+from ._config import ForwardRefPolicy as ForwardRefPolicy
+from ._config import TypeCheckConfiguration as TypeCheckConfiguration
+from ._decorators import typechecked as typechecked
+from ._decorators import typeguard_ignore as typeguard_ignore
+from ._exceptions import InstrumentationWarning as InstrumentationWarning
+from ._exceptions import TypeCheckError as TypeCheckError
+from ._exceptions import TypeCheckWarning as TypeCheckWarning
+from ._exceptions import TypeHintWarning as TypeHintWarning
+from ._functions import TypeCheckFailCallback as TypeCheckFailCallback
+from ._functions import check_type as check_type
+from ._functions import warn_on_error as warn_on_error
+from ._importhook import ImportHookManager as ImportHookManager
+from ._importhook import TypeguardFinder as TypeguardFinder
+from ._importhook import install_import_hook as install_import_hook
+from ._memo import TypeCheckMemo as TypeCheckMemo
+from ._suppression import suppress_type_checks as suppress_type_checks
+from ._utils import Unset as Unset
+
+# Re-export imports so they look like they live directly in this package
+for value in list(locals().values()):
+    if getattr(value, "__module__", "").startswith(f"{__name__}."):
+        value.__module__ = __name__
+
+
+config: TypeCheckConfiguration
+
+
+def __getattr__(name: str) -> Any:
+    if name == "config":
+        from ._config import global_config
+
+        return global_config
+
+    raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
+
+
+# Automatically load checker lookup functions unless explicitly disabled
+if "TYPEGUARD_DISABLE_PLUGIN_AUTOLOAD" not in os.environ:
+    load_plugins()
diff --git a/setuptools/_vendor/typeguard/_checkers.py b/setuptools/_vendor/typeguard/_checkers.py
new file mode 100644
index 0000000000..67dd5ad4dc
--- /dev/null
+++ b/setuptools/_vendor/typeguard/_checkers.py
@@ -0,0 +1,993 @@
+from __future__ import annotations
+
+import collections.abc
+import inspect
+import sys
+import types
+import typing
+import warnings
+from enum import Enum
+from inspect import Parameter, isclass, isfunction
+from io import BufferedIOBase, IOBase, RawIOBase, TextIOBase
+from textwrap import indent
+from typing import (
+    IO,
+    AbstractSet,
+    Any,
+    BinaryIO,
+    Callable,
+    Dict,
+    ForwardRef,
+    List,
+    Mapping,
+    MutableMapping,
+    NewType,
+    Optional,
+    Sequence,
+    Set,
+    TextIO,
+    Tuple,
+    Type,
+    TypeVar,
+    Union,
+)
+from unittest.mock import Mock
+from weakref import WeakKeyDictionary
+
+try:
+    import typing_extensions
+except ImportError:
+    typing_extensions = None  # type: ignore[assignment]
+
+# Must use this because typing.is_typeddict does not recognize
+# TypedDict from typing_extensions, and as of version 4.12.0
+# typing_extensions.TypedDict is different from typing.TypedDict
+# on all versions.
+from typing_extensions import is_typeddict
+
+from ._config import ForwardRefPolicy
+from ._exceptions import TypeCheckError, TypeHintWarning
+from ._memo import TypeCheckMemo
+from ._utils import evaluate_forwardref, get_stacklevel, get_type_name, qualified_name
+
+if sys.version_info >= (3, 11):
+    from typing import (
+        Annotated,
+        NotRequired,
+        TypeAlias,
+        get_args,
+        get_origin,
+    )
+
+    SubclassableAny = Any
+else:
+    from typing_extensions import (
+        Annotated,
+        NotRequired,
+        TypeAlias,
+        get_args,
+        get_origin,
+    )
+    from typing_extensions import Any as SubclassableAny
+
+if sys.version_info >= (3, 10):
+    from importlib.metadata import entry_points
+    from typing import ParamSpec
+else:
+    from importlib_metadata import entry_points
+    from typing_extensions import ParamSpec
+
+TypeCheckerCallable: TypeAlias = Callable[
+    [Any, Any, Tuple[Any, ...], TypeCheckMemo], Any
+]
+TypeCheckLookupCallback: TypeAlias = Callable[
+    [Any, Tuple[Any, ...], Tuple[Any, ...]], Optional[TypeCheckerCallable]
+]
+
+checker_lookup_functions: list[TypeCheckLookupCallback] = []
+generic_alias_types: tuple[type, ...] = (type(List), type(List[Any]))
+if sys.version_info >= (3, 9):
+    generic_alias_types += (types.GenericAlias,)
+
+protocol_check_cache: WeakKeyDictionary[
+    type[Any], dict[type[Any], TypeCheckError | None]
+] = WeakKeyDictionary()
+
+# Sentinel
+_missing = object()
+
+# Lifted from mypy.sharedparse
+BINARY_MAGIC_METHODS = {
+    "__add__",
+    "__and__",
+    "__cmp__",
+    "__divmod__",
+    "__div__",
+    "__eq__",
+    "__floordiv__",
+    "__ge__",
+    "__gt__",
+    "__iadd__",
+    "__iand__",
+    "__idiv__",
+    "__ifloordiv__",
+    "__ilshift__",
+    "__imatmul__",
+    "__imod__",
+    "__imul__",
+    "__ior__",
+    "__ipow__",
+    "__irshift__",
+    "__isub__",
+    "__itruediv__",
+    "__ixor__",
+    "__le__",
+    "__lshift__",
+    "__lt__",
+    "__matmul__",
+    "__mod__",
+    "__mul__",
+    "__ne__",
+    "__or__",
+    "__pow__",
+    "__radd__",
+    "__rand__",
+    "__rdiv__",
+    "__rfloordiv__",
+    "__rlshift__",
+    "__rmatmul__",
+    "__rmod__",
+    "__rmul__",
+    "__ror__",
+    "__rpow__",
+    "__rrshift__",
+    "__rshift__",
+    "__rsub__",
+    "__rtruediv__",
+    "__rxor__",
+    "__sub__",
+    "__truediv__",
+    "__xor__",
+}
+
+
+def check_callable(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    if not callable(value):
+        raise TypeCheckError("is not callable")
+
+    if args:
+        try:
+            signature = inspect.signature(value)
+        except (TypeError, ValueError):
+            return
+
+        argument_types = args[0]
+        if isinstance(argument_types, list) and not any(
+            type(item) is ParamSpec for item in argument_types
+        ):
+            # The callable must not have keyword-only arguments without defaults
+            unfulfilled_kwonlyargs = [
+                param.name
+                for param in signature.parameters.values()
+                if param.kind == Parameter.KEYWORD_ONLY
+                and param.default == Parameter.empty
+            ]
+            if unfulfilled_kwonlyargs:
+                raise TypeCheckError(
+                    f"has mandatory keyword-only arguments in its declaration: "
+                    f'{", ".join(unfulfilled_kwonlyargs)}'
+                )
+
+            num_positional_args = num_mandatory_pos_args = 0
+            has_varargs = False
+            for param in signature.parameters.values():
+                if param.kind in (
+                    Parameter.POSITIONAL_ONLY,
+                    Parameter.POSITIONAL_OR_KEYWORD,
+                ):
+                    num_positional_args += 1
+                    if param.default is Parameter.empty:
+                        num_mandatory_pos_args += 1
+                elif param.kind == Parameter.VAR_POSITIONAL:
+                    has_varargs = True
+
+            if num_mandatory_pos_args > len(argument_types):
+                raise TypeCheckError(
+                    f"has too many mandatory positional arguments in its declaration; "
+                    f"expected {len(argument_types)} but {num_mandatory_pos_args} "
+                    f"mandatory positional argument(s) declared"
+                )
+            elif not has_varargs and num_positional_args < len(argument_types):
+                raise TypeCheckError(
+                    f"has too few arguments in its declaration; expected "
+                    f"{len(argument_types)} but {num_positional_args} argument(s) "
+                    f"declared"
+                )
+
+
+def check_mapping(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    if origin_type is Dict or origin_type is dict:
+        if not isinstance(value, dict):
+            raise TypeCheckError("is not a dict")
+    if origin_type is MutableMapping or origin_type is collections.abc.MutableMapping:
+        if not isinstance(value, collections.abc.MutableMapping):
+            raise TypeCheckError("is not a mutable mapping")
+    elif not isinstance(value, collections.abc.Mapping):
+        raise TypeCheckError("is not a mapping")
+
+    if args:
+        key_type, value_type = args
+        if key_type is not Any or value_type is not Any:
+            samples = memo.config.collection_check_strategy.iterate_samples(
+                value.items()
+            )
+            for k, v in samples:
+                try:
+                    check_type_internal(k, key_type, memo)
+                except TypeCheckError as exc:
+                    exc.append_path_element(f"key {k!r}")
+                    raise
+
+                try:
+                    check_type_internal(v, value_type, memo)
+                except TypeCheckError as exc:
+                    exc.append_path_element(f"value of key {k!r}")
+                    raise
+
+
+def check_typed_dict(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    if not isinstance(value, dict):
+        raise TypeCheckError("is not a dict")
+
+    declared_keys = frozenset(origin_type.__annotations__)
+    if hasattr(origin_type, "__required_keys__"):
+        required_keys = set(origin_type.__required_keys__)
+    else:  # py3.8 and lower
+        required_keys = set(declared_keys) if origin_type.__total__ else set()
+
+    existing_keys = set(value)
+    extra_keys = existing_keys - declared_keys
+    if extra_keys:
+        keys_formatted = ", ".join(f'"{key}"' for key in sorted(extra_keys, key=repr))
+        raise TypeCheckError(f"has unexpected extra key(s): {keys_formatted}")
+
+    # Detect NotRequired fields which are hidden by get_type_hints()
+    type_hints: dict[str, type] = {}
+    for key, annotation in origin_type.__annotations__.items():
+        if isinstance(annotation, ForwardRef):
+            annotation = evaluate_forwardref(annotation, memo)
+            if get_origin(annotation) is NotRequired:
+                required_keys.discard(key)
+                annotation = get_args(annotation)[0]
+
+        type_hints[key] = annotation
+
+    missing_keys = required_keys - existing_keys
+    if missing_keys:
+        keys_formatted = ", ".join(f'"{key}"' for key in sorted(missing_keys, key=repr))
+        raise TypeCheckError(f"is missing required key(s): {keys_formatted}")
+
+    for key, argtype in type_hints.items():
+        argvalue = value.get(key, _missing)
+        if argvalue is not _missing:
+            try:
+                check_type_internal(argvalue, argtype, memo)
+            except TypeCheckError as exc:
+                exc.append_path_element(f"value of key {key!r}")
+                raise
+
+
+def check_list(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    if not isinstance(value, list):
+        raise TypeCheckError("is not a list")
+
+    if args and args != (Any,):
+        samples = memo.config.collection_check_strategy.iterate_samples(value)
+        for i, v in enumerate(samples):
+            try:
+                check_type_internal(v, args[0], memo)
+            except TypeCheckError as exc:
+                exc.append_path_element(f"item {i}")
+                raise
+
+
+def check_sequence(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    if not isinstance(value, collections.abc.Sequence):
+        raise TypeCheckError("is not a sequence")
+
+    if args and args != (Any,):
+        samples = memo.config.collection_check_strategy.iterate_samples(value)
+        for i, v in enumerate(samples):
+            try:
+                check_type_internal(v, args[0], memo)
+            except TypeCheckError as exc:
+                exc.append_path_element(f"item {i}")
+                raise
+
+
+def check_set(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    if origin_type is frozenset:
+        if not isinstance(value, frozenset):
+            raise TypeCheckError("is not a frozenset")
+    elif not isinstance(value, AbstractSet):
+        raise TypeCheckError("is not a set")
+
+    if args and args != (Any,):
+        samples = memo.config.collection_check_strategy.iterate_samples(value)
+        for v in samples:
+            try:
+                check_type_internal(v, args[0], memo)
+            except TypeCheckError as exc:
+                exc.append_path_element(f"[{v}]")
+                raise
+
+
+def check_tuple(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    # Specialized check for NamedTuples
+    if field_types := getattr(origin_type, "__annotations__", None):
+        if not isinstance(value, origin_type):
+            raise TypeCheckError(
+                f"is not a named tuple of type {qualified_name(origin_type)}"
+            )
+
+        for name, field_type in field_types.items():
+            try:
+                check_type_internal(getattr(value, name), field_type, memo)
+            except TypeCheckError as exc:
+                exc.append_path_element(f"attribute {name!r}")
+                raise
+
+        return
+    elif not isinstance(value, tuple):
+        raise TypeCheckError("is not a tuple")
+
+    if args:
+        use_ellipsis = args[-1] is Ellipsis
+        tuple_params = args[: -1 if use_ellipsis else None]
+    else:
+        # Unparametrized Tuple or plain tuple
+        return
+
+    if use_ellipsis:
+        element_type = tuple_params[0]
+        samples = memo.config.collection_check_strategy.iterate_samples(value)
+        for i, element in enumerate(samples):
+            try:
+                check_type_internal(element, element_type, memo)
+            except TypeCheckError as exc:
+                exc.append_path_element(f"item {i}")
+                raise
+    elif tuple_params == ((),):
+        if value != ():
+            raise TypeCheckError("is not an empty tuple")
+    else:
+        if len(value) != len(tuple_params):
+            raise TypeCheckError(
+                f"has wrong number of elements (expected {len(tuple_params)}, got "
+                f"{len(value)} instead)"
+            )
+
+        for i, (element, element_type) in enumerate(zip(value, tuple_params)):
+            try:
+                check_type_internal(element, element_type, memo)
+            except TypeCheckError as exc:
+                exc.append_path_element(f"item {i}")
+                raise
+
+
+def check_union(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    errors: dict[str, TypeCheckError] = {}
+    try:
+        for type_ in args:
+            try:
+                check_type_internal(value, type_, memo)
+                return
+            except TypeCheckError as exc:
+                errors[get_type_name(type_)] = exc
+
+        formatted_errors = indent(
+            "\n".join(f"{key}: {error}" for key, error in errors.items()), "  "
+        )
+    finally:
+        del errors  # avoid creating ref cycle
+    raise TypeCheckError(f"did not match any element in the union:\n{formatted_errors}")
+
+
+def check_uniontype(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    errors: dict[str, TypeCheckError] = {}
+    for type_ in args:
+        try:
+            check_type_internal(value, type_, memo)
+            return
+        except TypeCheckError as exc:
+            errors[get_type_name(type_)] = exc
+
+    formatted_errors = indent(
+        "\n".join(f"{key}: {error}" for key, error in errors.items()), "  "
+    )
+    raise TypeCheckError(f"did not match any element in the union:\n{formatted_errors}")
+
+
+def check_class(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    if not isclass(value) and not isinstance(value, generic_alias_types):
+        raise TypeCheckError("is not a class")
+
+    if not args:
+        return
+
+    if isinstance(args[0], ForwardRef):
+        expected_class = evaluate_forwardref(args[0], memo)
+    else:
+        expected_class = args[0]
+
+    if expected_class is Any:
+        return
+    elif getattr(expected_class, "_is_protocol", False):
+        check_protocol(value, expected_class, (), memo)
+    elif isinstance(expected_class, TypeVar):
+        check_typevar(value, expected_class, (), memo, subclass_check=True)
+    elif get_origin(expected_class) is Union:
+        errors: dict[str, TypeCheckError] = {}
+        for arg in get_args(expected_class):
+            if arg is Any:
+                return
+
+            try:
+                check_class(value, type, (arg,), memo)
+                return
+            except TypeCheckError as exc:
+                errors[get_type_name(arg)] = exc
+        else:
+            formatted_errors = indent(
+                "\n".join(f"{key}: {error}" for key, error in errors.items()), "  "
+            )
+            raise TypeCheckError(
+                f"did not match any element in the union:\n{formatted_errors}"
+            )
+    elif not issubclass(value, expected_class):  # type: ignore[arg-type]
+        raise TypeCheckError(f"is not a subclass of {qualified_name(expected_class)}")
+
+
+def check_newtype(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    check_type_internal(value, origin_type.__supertype__, memo)
+
+
+def check_instance(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    if not isinstance(value, origin_type):
+        raise TypeCheckError(f"is not an instance of {qualified_name(origin_type)}")
+
+
+def check_typevar(
+    value: Any,
+    origin_type: TypeVar,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+    *,
+    subclass_check: bool = False,
+) -> None:
+    if origin_type.__bound__ is not None:
+        annotation = (
+            Type[origin_type.__bound__] if subclass_check else origin_type.__bound__
+        )
+        check_type_internal(value, annotation, memo)
+    elif origin_type.__constraints__:
+        for constraint in origin_type.__constraints__:
+            annotation = Type[constraint] if subclass_check else constraint
+            try:
+                check_type_internal(value, annotation, memo)
+            except TypeCheckError:
+                pass
+            else:
+                break
+        else:
+            formatted_constraints = ", ".join(
+                get_type_name(constraint) for constraint in origin_type.__constraints__
+            )
+            raise TypeCheckError(
+                f"does not match any of the constraints " f"({formatted_constraints})"
+            )
+
+
+if typing_extensions is None:
+
+    def _is_literal_type(typ: object) -> bool:
+        return typ is typing.Literal
+
+else:
+
+    def _is_literal_type(typ: object) -> bool:
+        return typ is typing.Literal or typ is typing_extensions.Literal
+
+
+def check_literal(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    def get_literal_args(literal_args: tuple[Any, ...]) -> tuple[Any, ...]:
+        retval: list[Any] = []
+        for arg in literal_args:
+            if _is_literal_type(get_origin(arg)):
+                retval.extend(get_literal_args(arg.__args__))
+            elif arg is None or isinstance(arg, (int, str, bytes, bool, Enum)):
+                retval.append(arg)
+            else:
+                raise TypeError(
+                    f"Illegal literal value: {arg}"
+                )  # TypeError here is deliberate
+
+        return tuple(retval)
+
+    final_args = tuple(get_literal_args(args))
+    try:
+        index = final_args.index(value)
+    except ValueError:
+        pass
+    else:
+        if type(final_args[index]) is type(value):
+            return
+
+    formatted_args = ", ".join(repr(arg) for arg in final_args)
+    raise TypeCheckError(f"is not any of ({formatted_args})") from None
+
+
+def check_literal_string(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    check_type_internal(value, str, memo)
+
+
+def check_typeguard(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    check_type_internal(value, bool, memo)
+
+
+def check_none(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    if value is not None:
+        raise TypeCheckError("is not None")
+
+
+def check_number(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    if origin_type is complex and not isinstance(value, (complex, float, int)):
+        raise TypeCheckError("is neither complex, float or int")
+    elif origin_type is float and not isinstance(value, (float, int)):
+        raise TypeCheckError("is neither float or int")
+
+
+def check_io(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    if origin_type is TextIO or (origin_type is IO and args == (str,)):
+        if not isinstance(value, TextIOBase):
+            raise TypeCheckError("is not a text based I/O object")
+    elif origin_type is BinaryIO or (origin_type is IO and args == (bytes,)):
+        if not isinstance(value, (RawIOBase, BufferedIOBase)):
+            raise TypeCheckError("is not a binary I/O object")
+    elif not isinstance(value, IOBase):
+        raise TypeCheckError("is not an I/O object")
+
+
+def check_protocol(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    subject: type[Any] = value if isclass(value) else type(value)
+
+    if subject in protocol_check_cache:
+        result_map = protocol_check_cache[subject]
+        if origin_type in result_map:
+            if exc := result_map[origin_type]:
+                raise exc
+            else:
+                return
+
+    # Collect a set of methods and non-method attributes present in the protocol
+    ignored_attrs = set(dir(typing.Protocol)) | {
+        "__annotations__",
+        "__non_callable_proto_members__",
+    }
+    expected_methods: dict[str, tuple[Any, Any]] = {}
+    expected_noncallable_members: dict[str, Any] = {}
+    for attrname in dir(origin_type):
+        # Skip attributes present in typing.Protocol
+        if attrname in ignored_attrs:
+            continue
+
+        member = getattr(origin_type, attrname)
+        if callable(member):
+            signature = inspect.signature(member)
+            argtypes = [
+                (p.annotation if p.annotation is not Parameter.empty else Any)
+                for p in signature.parameters.values()
+                if p.kind is not Parameter.KEYWORD_ONLY
+            ] or Ellipsis
+            return_annotation = (
+                signature.return_annotation
+                if signature.return_annotation is not Parameter.empty
+                else Any
+            )
+            expected_methods[attrname] = argtypes, return_annotation
+        else:
+            expected_noncallable_members[attrname] = member
+
+    for attrname, annotation in typing.get_type_hints(origin_type).items():
+        expected_noncallable_members[attrname] = annotation
+
+    subject_annotations = typing.get_type_hints(subject)
+
+    # Check that all required methods are present and their signatures are compatible
+    result_map = protocol_check_cache.setdefault(subject, {})
+    try:
+        for attrname, callable_args in expected_methods.items():
+            try:
+                method = getattr(subject, attrname)
+            except AttributeError:
+                if attrname in subject_annotations:
+                    raise TypeCheckError(
+                        f"is not compatible with the {origin_type.__qualname__} protocol "
+                        f"because its {attrname!r} attribute is not a method"
+                    ) from None
+                else:
+                    raise TypeCheckError(
+                        f"is not compatible with the {origin_type.__qualname__} protocol "
+                        f"because it has no method named {attrname!r}"
+                    ) from None
+
+            if not callable(method):
+                raise TypeCheckError(
+                    f"is not compatible with the {origin_type.__qualname__} protocol "
+                    f"because its {attrname!r} attribute is not a callable"
+                )
+
+            # TODO: raise exception on added keyword-only arguments without defaults
+            try:
+                check_callable(method, Callable, callable_args, memo)
+            except TypeCheckError as exc:
+                raise TypeCheckError(
+                    f"is not compatible with the {origin_type.__qualname__} protocol "
+                    f"because its {attrname!r} method {exc}"
+                ) from None
+
+        # Check that all required non-callable members are present
+        for attrname in expected_noncallable_members:
+            # TODO: implement assignability checks for non-callable members
+            if attrname not in subject_annotations and not hasattr(subject, attrname):
+                raise TypeCheckError(
+                    f"is not compatible with the {origin_type.__qualname__} protocol "
+                    f"because it has no attribute named {attrname!r}"
+                )
+    except TypeCheckError as exc:
+        result_map[origin_type] = exc
+        raise
+    else:
+        result_map[origin_type] = None
+
+
+def check_byteslike(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    if not isinstance(value, (bytearray, bytes, memoryview)):
+        raise TypeCheckError("is not bytes-like")
+
+
+def check_self(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    if memo.self_type is None:
+        raise TypeCheckError("cannot be checked against Self outside of a method call")
+
+    if isclass(value):
+        if not issubclass(value, memo.self_type):
+            raise TypeCheckError(
+                f"is not an instance of the self type "
+                f"({qualified_name(memo.self_type)})"
+            )
+    elif not isinstance(value, memo.self_type):
+        raise TypeCheckError(
+            f"is not an instance of the self type ({qualified_name(memo.self_type)})"
+        )
+
+
+def check_paramspec(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    pass  # No-op for now
+
+
+def check_instanceof(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    if not isinstance(value, origin_type):
+        raise TypeCheckError(f"is not an instance of {qualified_name(origin_type)}")
+
+
+def check_type_internal(
+    value: Any,
+    annotation: Any,
+    memo: TypeCheckMemo,
+) -> None:
+    """
+    Check that the given object is compatible with the given type annotation.
+
+    This function should only be used by type checker callables. Applications should use
+    :func:`~.check_type` instead.
+
+    :param value: the value to check
+    :param annotation: the type annotation to check against
+    :param memo: a memo object containing configuration and information necessary for
+        looking up forward references
+    """
+
+    if isinstance(annotation, ForwardRef):
+        try:
+            annotation = evaluate_forwardref(annotation, memo)
+        except NameError:
+            if memo.config.forward_ref_policy is ForwardRefPolicy.ERROR:
+                raise
+            elif memo.config.forward_ref_policy is ForwardRefPolicy.WARN:
+                warnings.warn(
+                    f"Cannot resolve forward reference {annotation.__forward_arg__!r}",
+                    TypeHintWarning,
+                    stacklevel=get_stacklevel(),
+                )
+
+            return
+
+    if annotation is Any or annotation is SubclassableAny or isinstance(value, Mock):
+        return
+
+    # Skip type checks if value is an instance of a class that inherits from Any
+    if not isclass(value) and SubclassableAny in type(value).__bases__:
+        return
+
+    extras: tuple[Any, ...]
+    origin_type = get_origin(annotation)
+    if origin_type is Annotated:
+        annotation, *extras_ = get_args(annotation)
+        extras = tuple(extras_)
+        origin_type = get_origin(annotation)
+    else:
+        extras = ()
+
+    if origin_type is not None:
+        args = get_args(annotation)
+
+        # Compatibility hack to distinguish between unparametrized and empty tuple
+        # (tuple[()]), necessary due to https://github.com/python/cpython/issues/91137
+        if origin_type in (tuple, Tuple) and annotation is not Tuple and not args:
+            args = ((),)
+    else:
+        origin_type = annotation
+        args = ()
+
+    for lookup_func in checker_lookup_functions:
+        checker = lookup_func(origin_type, args, extras)
+        if checker:
+            checker(value, origin_type, args, memo)
+            return
+
+    if isclass(origin_type):
+        if not isinstance(value, origin_type):
+            raise TypeCheckError(f"is not an instance of {qualified_name(origin_type)}")
+    elif type(origin_type) is str:  # noqa: E721
+        warnings.warn(
+            f"Skipping type check against {origin_type!r}; this looks like a "
+            f"string-form forward reference imported from another module",
+            TypeHintWarning,
+            stacklevel=get_stacklevel(),
+        )
+
+
+# Equality checks are applied to these
+origin_type_checkers = {
+    bytes: check_byteslike,
+    AbstractSet: check_set,
+    BinaryIO: check_io,
+    Callable: check_callable,
+    collections.abc.Callable: check_callable,
+    complex: check_number,
+    dict: check_mapping,
+    Dict: check_mapping,
+    float: check_number,
+    frozenset: check_set,
+    IO: check_io,
+    list: check_list,
+    List: check_list,
+    typing.Literal: check_literal,
+    Mapping: check_mapping,
+    MutableMapping: check_mapping,
+    None: check_none,
+    collections.abc.Mapping: check_mapping,
+    collections.abc.MutableMapping: check_mapping,
+    Sequence: check_sequence,
+    collections.abc.Sequence: check_sequence,
+    collections.abc.Set: check_set,
+    set: check_set,
+    Set: check_set,
+    TextIO: check_io,
+    tuple: check_tuple,
+    Tuple: check_tuple,
+    type: check_class,
+    Type: check_class,
+    Union: check_union,
+}
+if sys.version_info >= (3, 10):
+    origin_type_checkers[types.UnionType] = check_uniontype
+    origin_type_checkers[typing.TypeGuard] = check_typeguard
+if sys.version_info >= (3, 11):
+    origin_type_checkers.update(
+        {typing.LiteralString: check_literal_string, typing.Self: check_self}
+    )
+if typing_extensions is not None:
+    # On some Python versions, these may simply be re-exports from typing,
+    # but exactly which Python versions is subject to change,
+    # so it's best to err on the safe side
+    # and update the dictionary on all Python versions
+    # if typing_extensions is installed
+    origin_type_checkers[typing_extensions.Literal] = check_literal
+    origin_type_checkers[typing_extensions.LiteralString] = check_literal_string
+    origin_type_checkers[typing_extensions.Self] = check_self
+    origin_type_checkers[typing_extensions.TypeGuard] = check_typeguard
+
+
+def builtin_checker_lookup(
+    origin_type: Any, args: tuple[Any, ...], extras: tuple[Any, ...]
+) -> TypeCheckerCallable | None:
+    checker = origin_type_checkers.get(origin_type)
+    if checker is not None:
+        return checker
+    elif is_typeddict(origin_type):
+        return check_typed_dict
+    elif isclass(origin_type) and issubclass(
+        origin_type,
+        Tuple,  # type: ignore[arg-type]
+    ):
+        # NamedTuple
+        return check_tuple
+    elif getattr(origin_type, "_is_protocol", False):
+        return check_protocol
+    elif isinstance(origin_type, ParamSpec):
+        return check_paramspec
+    elif isinstance(origin_type, TypeVar):
+        return check_typevar
+    elif origin_type.__class__ is NewType:
+        # typing.NewType on Python 3.10+
+        return check_newtype
+    elif (
+        isfunction(origin_type)
+        and getattr(origin_type, "__module__", None) == "typing"
+        and getattr(origin_type, "__qualname__", "").startswith("NewType.")
+        and hasattr(origin_type, "__supertype__")
+    ):
+        # typing.NewType on Python 3.9 and below
+        return check_newtype
+
+    return None
+
+
+checker_lookup_functions.append(builtin_checker_lookup)
+
+
+def load_plugins() -> None:
+    """
+    Load all type checker lookup functions from entry points.
+
+    All entry points from the ``typeguard.checker_lookup`` group are loaded, and the
+    returned lookup functions are added to :data:`typeguard.checker_lookup_functions`.
+
+    .. note:: This function is called implicitly on import, unless the
+        ``TYPEGUARD_DISABLE_PLUGIN_AUTOLOAD`` environment variable is present.
+    """
+
+    for ep in entry_points(group="typeguard.checker_lookup"):
+        try:
+            plugin = ep.load()
+        except Exception as exc:
+            warnings.warn(
+                f"Failed to load plugin {ep.name!r}: " f"{qualified_name(exc)}: {exc}",
+                stacklevel=2,
+            )
+            continue
+
+        if not callable(plugin):
+            warnings.warn(
+                f"Plugin {ep} returned a non-callable object: {plugin!r}", stacklevel=2
+            )
+            continue
+
+        checker_lookup_functions.insert(0, plugin)
diff --git a/setuptools/_vendor/typeguard/_config.py b/setuptools/_vendor/typeguard/_config.py
new file mode 100644
index 0000000000..36efad5396
--- /dev/null
+++ b/setuptools/_vendor/typeguard/_config.py
@@ -0,0 +1,108 @@
+from __future__ import annotations
+
+from collections.abc import Iterable
+from dataclasses import dataclass
+from enum import Enum, auto
+from typing import TYPE_CHECKING, TypeVar
+
+if TYPE_CHECKING:
+    from ._functions import TypeCheckFailCallback
+
+T = TypeVar("T")
+
+
+class ForwardRefPolicy(Enum):
+    """
+    Defines how unresolved forward references are handled.
+
+    Members:
+
+    * ``ERROR``: propagate the :exc:`NameError` when the forward reference lookup fails
+    * ``WARN``: emit a :class:`~.TypeHintWarning` if the forward reference lookup fails
+    * ``IGNORE``: silently skip checks for unresolveable forward references
+    """
+
+    ERROR = auto()
+    WARN = auto()
+    IGNORE = auto()
+
+
+class CollectionCheckStrategy(Enum):
+    """
+    Specifies how thoroughly the contents of collections are type checked.
+
+    This has an effect on the following built-in checkers:
+
+    * ``AbstractSet``
+    * ``Dict``
+    * ``List``
+    * ``Mapping``
+    * ``Set``
+    * ``Tuple[, ...]`` (arbitrarily sized tuples)
+
+    Members:
+
+    * ``FIRST_ITEM``: check only the first item
+    * ``ALL_ITEMS``: check all items
+    """
+
+    FIRST_ITEM = auto()
+    ALL_ITEMS = auto()
+
+    def iterate_samples(self, collection: Iterable[T]) -> Iterable[T]:
+        if self is CollectionCheckStrategy.FIRST_ITEM:
+            try:
+                return [next(iter(collection))]
+            except StopIteration:
+                return ()
+        else:
+            return collection
+
+
+@dataclass
+class TypeCheckConfiguration:
+    """
+     You can change Typeguard's behavior with these settings.
+
+    .. attribute:: typecheck_fail_callback
+       :type: Callable[[TypeCheckError, TypeCheckMemo], Any]
+
+         Callable that is called when type checking fails.
+
+         Default: ``None`` (the :exc:`~.TypeCheckError` is raised directly)
+
+    .. attribute:: forward_ref_policy
+       :type: ForwardRefPolicy
+
+         Specifies what to do when a forward reference fails to resolve.
+
+         Default: ``WARN``
+
+    .. attribute:: collection_check_strategy
+       :type: CollectionCheckStrategy
+
+         Specifies how thoroughly the contents of collections (list, dict, etc.) are
+         type checked.
+
+         Default: ``FIRST_ITEM``
+
+    .. attribute:: debug_instrumentation
+       :type: bool
+
+         If set to ``True``, the code of modules or functions instrumented by typeguard
+         is printed to ``sys.stderr`` after the instrumentation is done
+
+         Requires Python 3.9 or newer.
+
+         Default: ``False``
+    """
+
+    forward_ref_policy: ForwardRefPolicy = ForwardRefPolicy.WARN
+    typecheck_fail_callback: TypeCheckFailCallback | None = None
+    collection_check_strategy: CollectionCheckStrategy = (
+        CollectionCheckStrategy.FIRST_ITEM
+    )
+    debug_instrumentation: bool = False
+
+
+global_config = TypeCheckConfiguration()
diff --git a/setuptools/_vendor/typeguard/_decorators.py b/setuptools/_vendor/typeguard/_decorators.py
new file mode 100644
index 0000000000..cf3253351f
--- /dev/null
+++ b/setuptools/_vendor/typeguard/_decorators.py
@@ -0,0 +1,235 @@
+from __future__ import annotations
+
+import ast
+import inspect
+import sys
+from collections.abc import Sequence
+from functools import partial
+from inspect import isclass, isfunction
+from types import CodeType, FrameType, FunctionType
+from typing import TYPE_CHECKING, Any, Callable, ForwardRef, TypeVar, cast, overload
+from warnings import warn
+
+from ._config import CollectionCheckStrategy, ForwardRefPolicy, global_config
+from ._exceptions import InstrumentationWarning
+from ._functions import TypeCheckFailCallback
+from ._transformer import TypeguardTransformer
+from ._utils import Unset, function_name, get_stacklevel, is_method_of, unset
+
+if TYPE_CHECKING:
+    from typeshed.stdlib.types import _Cell
+
+    _F = TypeVar("_F")
+
+    def typeguard_ignore(f: _F) -> _F:
+        """This decorator is a noop during static type-checking."""
+        return f
+
+else:
+    from typing import no_type_check as typeguard_ignore  # noqa: F401
+
+T_CallableOrType = TypeVar("T_CallableOrType", bound=Callable[..., Any])
+
+
+def make_cell(value: object) -> _Cell:
+    return (lambda: value).__closure__[0]  # type: ignore[index]
+
+
+def find_target_function(
+    new_code: CodeType, target_path: Sequence[str], firstlineno: int
+) -> CodeType | None:
+    target_name = target_path[0]
+    for const in new_code.co_consts:
+        if isinstance(const, CodeType):
+            if const.co_name == target_name:
+                if const.co_firstlineno == firstlineno:
+                    return const
+                elif len(target_path) > 1:
+                    target_code = find_target_function(
+                        const, target_path[1:], firstlineno
+                    )
+                    if target_code:
+                        return target_code
+
+    return None
+
+
+def instrument(f: T_CallableOrType) -> FunctionType | str:
+    if not getattr(f, "__code__", None):
+        return "no code associated"
+    elif not getattr(f, "__module__", None):
+        return "__module__ attribute is not set"
+    elif f.__code__.co_filename == "":
+        return "cannot instrument functions defined in a REPL"
+    elif hasattr(f, "__wrapped__"):
+        return (
+            "@typechecked only supports instrumenting functions wrapped with "
+            "@classmethod, @staticmethod or @property"
+        )
+
+    target_path = [item for item in f.__qualname__.split(".") if item != ""]
+    module_source = inspect.getsource(sys.modules[f.__module__])
+    module_ast = ast.parse(module_source)
+    instrumentor = TypeguardTransformer(target_path, f.__code__.co_firstlineno)
+    instrumentor.visit(module_ast)
+
+    if not instrumentor.target_node or instrumentor.target_lineno is None:
+        return "instrumentor did not find the target function"
+
+    module_code = compile(module_ast, f.__code__.co_filename, "exec", dont_inherit=True)
+    new_code = find_target_function(
+        module_code, target_path, instrumentor.target_lineno
+    )
+    if not new_code:
+        return "cannot find the target function in the AST"
+
+    if global_config.debug_instrumentation and sys.version_info >= (3, 9):
+        # Find the matching AST node, then unparse it to source and print to stdout
+        print(
+            f"Source code of {f.__qualname__}() after instrumentation:"
+            "\n----------------------------------------------",
+            file=sys.stderr,
+        )
+        print(ast.unparse(instrumentor.target_node), file=sys.stderr)
+        print(
+            "----------------------------------------------",
+            file=sys.stderr,
+        )
+
+    closure = f.__closure__
+    if new_code.co_freevars != f.__code__.co_freevars:
+        # Create a new closure and find values for the new free variables
+        frame = cast(FrameType, inspect.currentframe())
+        frame = cast(FrameType, frame.f_back)
+        frame_locals = cast(FrameType, frame.f_back).f_locals
+        cells: list[_Cell] = []
+        for key in new_code.co_freevars:
+            if key in instrumentor.names_used_in_annotations:
+                # Find the value and make a new cell from it
+                value = frame_locals.get(key) or ForwardRef(key)
+                cells.append(make_cell(value))
+            else:
+                # Reuse the cell from the existing closure
+                assert f.__closure__
+                cells.append(f.__closure__[f.__code__.co_freevars.index(key)])
+
+        closure = tuple(cells)
+
+    new_function = FunctionType(new_code, f.__globals__, f.__name__, closure=closure)
+    new_function.__module__ = f.__module__
+    new_function.__name__ = f.__name__
+    new_function.__qualname__ = f.__qualname__
+    new_function.__annotations__ = f.__annotations__
+    new_function.__doc__ = f.__doc__
+    new_function.__defaults__ = f.__defaults__
+    new_function.__kwdefaults__ = f.__kwdefaults__
+    return new_function
+
+
+@overload
+def typechecked(
+    *,
+    forward_ref_policy: ForwardRefPolicy | Unset = unset,
+    typecheck_fail_callback: TypeCheckFailCallback | Unset = unset,
+    collection_check_strategy: CollectionCheckStrategy | Unset = unset,
+    debug_instrumentation: bool | Unset = unset,
+) -> Callable[[T_CallableOrType], T_CallableOrType]: ...
+
+
+@overload
+def typechecked(target: T_CallableOrType) -> T_CallableOrType: ...
+
+
+def typechecked(
+    target: T_CallableOrType | None = None,
+    *,
+    forward_ref_policy: ForwardRefPolicy | Unset = unset,
+    typecheck_fail_callback: TypeCheckFailCallback | Unset = unset,
+    collection_check_strategy: CollectionCheckStrategy | Unset = unset,
+    debug_instrumentation: bool | Unset = unset,
+) -> Any:
+    """
+    Instrument the target function to perform run-time type checking.
+
+    This decorator recompiles the target function, injecting code to type check
+    arguments, return values, yield values (excluding ``yield from``) and assignments to
+    annotated local variables.
+
+    This can also be used as a class decorator. This will instrument all type annotated
+    methods, including :func:`@classmethod `,
+    :func:`@staticmethod `,  and :class:`@property ` decorated
+    methods in the class.
+
+    .. note:: When Python is run in optimized mode (``-O`` or ``-OO``, this decorator
+        is a no-op). This is a feature meant for selectively introducing type checking
+        into a code base where the checks aren't meant to be run in production.
+
+    :param target: the function or class to enable type checking for
+    :param forward_ref_policy: override for
+        :attr:`.TypeCheckConfiguration.forward_ref_policy`
+    :param typecheck_fail_callback: override for
+        :attr:`.TypeCheckConfiguration.typecheck_fail_callback`
+    :param collection_check_strategy: override for
+        :attr:`.TypeCheckConfiguration.collection_check_strategy`
+    :param debug_instrumentation: override for
+        :attr:`.TypeCheckConfiguration.debug_instrumentation`
+
+    """
+    if target is None:
+        return partial(
+            typechecked,
+            forward_ref_policy=forward_ref_policy,
+            typecheck_fail_callback=typecheck_fail_callback,
+            collection_check_strategy=collection_check_strategy,
+            debug_instrumentation=debug_instrumentation,
+        )
+
+    if not __debug__:
+        return target
+
+    if isclass(target):
+        for key, attr in target.__dict__.items():
+            if is_method_of(attr, target):
+                retval = instrument(attr)
+                if isfunction(retval):
+                    setattr(target, key, retval)
+            elif isinstance(attr, (classmethod, staticmethod)):
+                if is_method_of(attr.__func__, target):
+                    retval = instrument(attr.__func__)
+                    if isfunction(retval):
+                        wrapper = attr.__class__(retval)
+                        setattr(target, key, wrapper)
+            elif isinstance(attr, property):
+                kwargs: dict[str, Any] = dict(doc=attr.__doc__)
+                for name in ("fset", "fget", "fdel"):
+                    property_func = kwargs[name] = getattr(attr, name)
+                    if is_method_of(property_func, target):
+                        retval = instrument(property_func)
+                        if isfunction(retval):
+                            kwargs[name] = retval
+
+                setattr(target, key, attr.__class__(**kwargs))
+
+        return target
+
+    # Find either the first Python wrapper or the actual function
+    wrapper_class: (
+        type[classmethod[Any, Any, Any]] | type[staticmethod[Any, Any]] | None
+    ) = None
+    if isinstance(target, (classmethod, staticmethod)):
+        wrapper_class = target.__class__
+        target = target.__func__
+
+    retval = instrument(target)
+    if isinstance(retval, str):
+        warn(
+            f"{retval} -- not typechecking {function_name(target)}",
+            InstrumentationWarning,
+            stacklevel=get_stacklevel(),
+        )
+        return target
+
+    if wrapper_class is None:
+        return retval
+    else:
+        return wrapper_class(retval)
diff --git a/setuptools/_vendor/typeguard/_exceptions.py b/setuptools/_vendor/typeguard/_exceptions.py
new file mode 100644
index 0000000000..625437a649
--- /dev/null
+++ b/setuptools/_vendor/typeguard/_exceptions.py
@@ -0,0 +1,42 @@
+from collections import deque
+from typing import Deque
+
+
+class TypeHintWarning(UserWarning):
+    """
+    A warning that is emitted when a type hint in string form could not be resolved to
+    an actual type.
+    """
+
+
+class TypeCheckWarning(UserWarning):
+    """Emitted by typeguard's type checkers when a type mismatch is detected."""
+
+    def __init__(self, message: str):
+        super().__init__(message)
+
+
+class InstrumentationWarning(UserWarning):
+    """Emitted when there's a problem with instrumenting a function for type checks."""
+
+    def __init__(self, message: str):
+        super().__init__(message)
+
+
+class TypeCheckError(Exception):
+    """
+    Raised by typeguard's type checkers when a type mismatch is detected.
+    """
+
+    def __init__(self, message: str):
+        super().__init__(message)
+        self._path: Deque[str] = deque()
+
+    def append_path_element(self, element: str) -> None:
+        self._path.append(element)
+
+    def __str__(self) -> str:
+        if self._path:
+            return " of ".join(self._path) + " " + str(self.args[0])
+        else:
+            return str(self.args[0])
diff --git a/setuptools/_vendor/typeguard/_functions.py b/setuptools/_vendor/typeguard/_functions.py
new file mode 100644
index 0000000000..28497856a3
--- /dev/null
+++ b/setuptools/_vendor/typeguard/_functions.py
@@ -0,0 +1,308 @@
+from __future__ import annotations
+
+import sys
+import warnings
+from typing import Any, Callable, NoReturn, TypeVar, Union, overload
+
+from . import _suppression
+from ._checkers import BINARY_MAGIC_METHODS, check_type_internal
+from ._config import (
+    CollectionCheckStrategy,
+    ForwardRefPolicy,
+    TypeCheckConfiguration,
+)
+from ._exceptions import TypeCheckError, TypeCheckWarning
+from ._memo import TypeCheckMemo
+from ._utils import get_stacklevel, qualified_name
+
+if sys.version_info >= (3, 11):
+    from typing import Literal, Never, TypeAlias
+else:
+    from typing_extensions import Literal, Never, TypeAlias
+
+T = TypeVar("T")
+TypeCheckFailCallback: TypeAlias = Callable[[TypeCheckError, TypeCheckMemo], Any]
+
+
+@overload
+def check_type(
+    value: object,
+    expected_type: type[T],
+    *,
+    forward_ref_policy: ForwardRefPolicy = ...,
+    typecheck_fail_callback: TypeCheckFailCallback | None = ...,
+    collection_check_strategy: CollectionCheckStrategy = ...,
+) -> T: ...
+
+
+@overload
+def check_type(
+    value: object,
+    expected_type: Any,
+    *,
+    forward_ref_policy: ForwardRefPolicy = ...,
+    typecheck_fail_callback: TypeCheckFailCallback | None = ...,
+    collection_check_strategy: CollectionCheckStrategy = ...,
+) -> Any: ...
+
+
+def check_type(
+    value: object,
+    expected_type: Any,
+    *,
+    forward_ref_policy: ForwardRefPolicy = TypeCheckConfiguration().forward_ref_policy,
+    typecheck_fail_callback: TypeCheckFailCallback | None = (
+        TypeCheckConfiguration().typecheck_fail_callback
+    ),
+    collection_check_strategy: CollectionCheckStrategy = (
+        TypeCheckConfiguration().collection_check_strategy
+    ),
+) -> Any:
+    """
+    Ensure that ``value`` matches ``expected_type``.
+
+    The types from the :mod:`typing` module do not support :func:`isinstance` or
+    :func:`issubclass` so a number of type specific checks are required. This function
+    knows which checker to call for which type.
+
+    This function wraps :func:`~.check_type_internal` in the following ways:
+
+    * Respects type checking suppression (:func:`~.suppress_type_checks`)
+    * Forms a :class:`~.TypeCheckMemo` from the current stack frame
+    * Calls the configured type check fail callback if the check fails
+
+    Note that this function is independent of the globally shared configuration in
+    :data:`typeguard.config`. This means that usage within libraries is safe from being
+    affected configuration changes made by other libraries or by the integrating
+    application. Instead, configuration options have the same default values as their
+    corresponding fields in :class:`TypeCheckConfiguration`.
+
+    :param value: value to be checked against ``expected_type``
+    :param expected_type: a class or generic type instance, or a tuple of such things
+    :param forward_ref_policy: see :attr:`TypeCheckConfiguration.forward_ref_policy`
+    :param typecheck_fail_callback:
+        see :attr`TypeCheckConfiguration.typecheck_fail_callback`
+    :param collection_check_strategy:
+        see :attr:`TypeCheckConfiguration.collection_check_strategy`
+    :return: ``value``, unmodified
+    :raises TypeCheckError: if there is a type mismatch
+
+    """
+    if type(expected_type) is tuple:
+        expected_type = Union[expected_type]
+
+    config = TypeCheckConfiguration(
+        forward_ref_policy=forward_ref_policy,
+        typecheck_fail_callback=typecheck_fail_callback,
+        collection_check_strategy=collection_check_strategy,
+    )
+
+    if _suppression.type_checks_suppressed or expected_type is Any:
+        return value
+
+    frame = sys._getframe(1)
+    memo = TypeCheckMemo(frame.f_globals, frame.f_locals, config=config)
+    try:
+        check_type_internal(value, expected_type, memo)
+    except TypeCheckError as exc:
+        exc.append_path_element(qualified_name(value, add_class_prefix=True))
+        if config.typecheck_fail_callback:
+            config.typecheck_fail_callback(exc, memo)
+        else:
+            raise
+
+    return value
+
+
+def check_argument_types(
+    func_name: str,
+    arguments: dict[str, tuple[Any, Any]],
+    memo: TypeCheckMemo,
+) -> Literal[True]:
+    if _suppression.type_checks_suppressed:
+        return True
+
+    for argname, (value, annotation) in arguments.items():
+        if annotation is NoReturn or annotation is Never:
+            exc = TypeCheckError(
+                f"{func_name}() was declared never to be called but it was"
+            )
+            if memo.config.typecheck_fail_callback:
+                memo.config.typecheck_fail_callback(exc, memo)
+            else:
+                raise exc
+
+        try:
+            check_type_internal(value, annotation, memo)
+        except TypeCheckError as exc:
+            qualname = qualified_name(value, add_class_prefix=True)
+            exc.append_path_element(f'argument "{argname}" ({qualname})')
+            if memo.config.typecheck_fail_callback:
+                memo.config.typecheck_fail_callback(exc, memo)
+            else:
+                raise
+
+    return True
+
+
+def check_return_type(
+    func_name: str,
+    retval: T,
+    annotation: Any,
+    memo: TypeCheckMemo,
+) -> T:
+    if _suppression.type_checks_suppressed:
+        return retval
+
+    if annotation is NoReturn or annotation is Never:
+        exc = TypeCheckError(f"{func_name}() was declared never to return but it did")
+        if memo.config.typecheck_fail_callback:
+            memo.config.typecheck_fail_callback(exc, memo)
+        else:
+            raise exc
+
+    try:
+        check_type_internal(retval, annotation, memo)
+    except TypeCheckError as exc:
+        # Allow NotImplemented if this is a binary magic method (__eq__() et al)
+        if retval is NotImplemented and annotation is bool:
+            # This does (and cannot) not check if it's actually a method
+            func_name = func_name.rsplit(".", 1)[-1]
+            if func_name in BINARY_MAGIC_METHODS:
+                return retval
+
+        qualname = qualified_name(retval, add_class_prefix=True)
+        exc.append_path_element(f"the return value ({qualname})")
+        if memo.config.typecheck_fail_callback:
+            memo.config.typecheck_fail_callback(exc, memo)
+        else:
+            raise
+
+    return retval
+
+
+def check_send_type(
+    func_name: str,
+    sendval: T,
+    annotation: Any,
+    memo: TypeCheckMemo,
+) -> T:
+    if _suppression.type_checks_suppressed:
+        return sendval
+
+    if annotation is NoReturn or annotation is Never:
+        exc = TypeCheckError(
+            f"{func_name}() was declared never to be sent a value to but it was"
+        )
+        if memo.config.typecheck_fail_callback:
+            memo.config.typecheck_fail_callback(exc, memo)
+        else:
+            raise exc
+
+    try:
+        check_type_internal(sendval, annotation, memo)
+    except TypeCheckError as exc:
+        qualname = qualified_name(sendval, add_class_prefix=True)
+        exc.append_path_element(f"the value sent to generator ({qualname})")
+        if memo.config.typecheck_fail_callback:
+            memo.config.typecheck_fail_callback(exc, memo)
+        else:
+            raise
+
+    return sendval
+
+
+def check_yield_type(
+    func_name: str,
+    yieldval: T,
+    annotation: Any,
+    memo: TypeCheckMemo,
+) -> T:
+    if _suppression.type_checks_suppressed:
+        return yieldval
+
+    if annotation is NoReturn or annotation is Never:
+        exc = TypeCheckError(f"{func_name}() was declared never to yield but it did")
+        if memo.config.typecheck_fail_callback:
+            memo.config.typecheck_fail_callback(exc, memo)
+        else:
+            raise exc
+
+    try:
+        check_type_internal(yieldval, annotation, memo)
+    except TypeCheckError as exc:
+        qualname = qualified_name(yieldval, add_class_prefix=True)
+        exc.append_path_element(f"the yielded value ({qualname})")
+        if memo.config.typecheck_fail_callback:
+            memo.config.typecheck_fail_callback(exc, memo)
+        else:
+            raise
+
+    return yieldval
+
+
+def check_variable_assignment(
+    value: object, varname: str, annotation: Any, memo: TypeCheckMemo
+) -> Any:
+    if _suppression.type_checks_suppressed:
+        return value
+
+    try:
+        check_type_internal(value, annotation, memo)
+    except TypeCheckError as exc:
+        qualname = qualified_name(value, add_class_prefix=True)
+        exc.append_path_element(f"value assigned to {varname} ({qualname})")
+        if memo.config.typecheck_fail_callback:
+            memo.config.typecheck_fail_callback(exc, memo)
+        else:
+            raise
+
+    return value
+
+
+def check_multi_variable_assignment(
+    value: Any, targets: list[dict[str, Any]], memo: TypeCheckMemo
+) -> Any:
+    if max(len(target) for target in targets) == 1:
+        iterated_values = [value]
+    else:
+        iterated_values = list(value)
+
+    if not _suppression.type_checks_suppressed:
+        for expected_types in targets:
+            value_index = 0
+            for ann_index, (varname, expected_type) in enumerate(
+                expected_types.items()
+            ):
+                if varname.startswith("*"):
+                    varname = varname[1:]
+                    keys_left = len(expected_types) - 1 - ann_index
+                    next_value_index = len(iterated_values) - keys_left
+                    obj: object = iterated_values[value_index:next_value_index]
+                    value_index = next_value_index
+                else:
+                    obj = iterated_values[value_index]
+                    value_index += 1
+
+                try:
+                    check_type_internal(obj, expected_type, memo)
+                except TypeCheckError as exc:
+                    qualname = qualified_name(obj, add_class_prefix=True)
+                    exc.append_path_element(f"value assigned to {varname} ({qualname})")
+                    if memo.config.typecheck_fail_callback:
+                        memo.config.typecheck_fail_callback(exc, memo)
+                    else:
+                        raise
+
+    return iterated_values[0] if len(iterated_values) == 1 else iterated_values
+
+
+def warn_on_error(exc: TypeCheckError, memo: TypeCheckMemo) -> None:
+    """
+    Emit a warning on a type mismatch.
+
+    This is intended to be used as an error handler in
+    :attr:`TypeCheckConfiguration.typecheck_fail_callback`.
+
+    """
+    warnings.warn(TypeCheckWarning(str(exc)), stacklevel=get_stacklevel())
diff --git a/setuptools/_vendor/typeguard/_importhook.py b/setuptools/_vendor/typeguard/_importhook.py
new file mode 100644
index 0000000000..8590540a5a
--- /dev/null
+++ b/setuptools/_vendor/typeguard/_importhook.py
@@ -0,0 +1,213 @@
+from __future__ import annotations
+
+import ast
+import sys
+import types
+from collections.abc import Callable, Iterable
+from importlib.abc import MetaPathFinder
+from importlib.machinery import ModuleSpec, SourceFileLoader
+from importlib.util import cache_from_source, decode_source
+from inspect import isclass
+from os import PathLike
+from types import CodeType, ModuleType, TracebackType
+from typing import Sequence, TypeVar
+from unittest.mock import patch
+
+from ._config import global_config
+from ._transformer import TypeguardTransformer
+
+if sys.version_info >= (3, 12):
+    from collections.abc import Buffer
+else:
+    from typing_extensions import Buffer
+
+if sys.version_info >= (3, 11):
+    from typing import ParamSpec
+else:
+    from typing_extensions import ParamSpec
+
+if sys.version_info >= (3, 10):
+    from importlib.metadata import PackageNotFoundError, version
+else:
+    from importlib_metadata import PackageNotFoundError, version
+
+try:
+    OPTIMIZATION = "typeguard" + "".join(version("typeguard").split(".")[:3])
+except PackageNotFoundError:
+    OPTIMIZATION = "typeguard"
+
+P = ParamSpec("P")
+T = TypeVar("T")
+
+
+# The name of this function is magical
+def _call_with_frames_removed(
+    f: Callable[P, T], *args: P.args, **kwargs: P.kwargs
+) -> T:
+    return f(*args, **kwargs)
+
+
+def optimized_cache_from_source(path: str, debug_override: bool | None = None) -> str:
+    return cache_from_source(path, debug_override, optimization=OPTIMIZATION)
+
+
+class TypeguardLoader(SourceFileLoader):
+    @staticmethod
+    def source_to_code(
+        data: Buffer | str | ast.Module | ast.Expression | ast.Interactive,
+        path: Buffer | str | PathLike[str] = "",
+    ) -> CodeType:
+        if isinstance(data, (ast.Module, ast.Expression, ast.Interactive)):
+            tree = data
+        else:
+            if isinstance(data, str):
+                source = data
+            else:
+                source = decode_source(data)
+
+            tree = _call_with_frames_removed(
+                ast.parse,
+                source,
+                path,
+                "exec",
+            )
+
+        tree = TypeguardTransformer().visit(tree)
+        ast.fix_missing_locations(tree)
+
+        if global_config.debug_instrumentation and sys.version_info >= (3, 9):
+            print(
+                f"Source code of {path!r} after instrumentation:\n"
+                "----------------------------------------------",
+                file=sys.stderr,
+            )
+            print(ast.unparse(tree), file=sys.stderr)
+            print("----------------------------------------------", file=sys.stderr)
+
+        return _call_with_frames_removed(
+            compile, tree, path, "exec", 0, dont_inherit=True
+        )
+
+    def exec_module(self, module: ModuleType) -> None:
+        # Use a custom optimization marker – the import lock should make this monkey
+        # patch safe
+        with patch(
+            "importlib._bootstrap_external.cache_from_source",
+            optimized_cache_from_source,
+        ):
+            super().exec_module(module)
+
+
+class TypeguardFinder(MetaPathFinder):
+    """
+    Wraps another path finder and instruments the module with
+    :func:`@typechecked ` if :meth:`should_instrument` returns
+    ``True``.
+
+    Should not be used directly, but rather via :func:`~.install_import_hook`.
+
+    .. versionadded:: 2.6
+    """
+
+    def __init__(self, packages: list[str] | None, original_pathfinder: MetaPathFinder):
+        self.packages = packages
+        self._original_pathfinder = original_pathfinder
+
+    def find_spec(
+        self,
+        fullname: str,
+        path: Sequence[str] | None,
+        target: types.ModuleType | None = None,
+    ) -> ModuleSpec | None:
+        if self.should_instrument(fullname):
+            spec = self._original_pathfinder.find_spec(fullname, path, target)
+            if spec is not None and isinstance(spec.loader, SourceFileLoader):
+                spec.loader = TypeguardLoader(spec.loader.name, spec.loader.path)
+                return spec
+
+        return None
+
+    def should_instrument(self, module_name: str) -> bool:
+        """
+        Determine whether the module with the given name should be instrumented.
+
+        :param module_name: full name of the module that is about to be imported (e.g.
+            ``xyz.abc``)
+
+        """
+        if self.packages is None:
+            return True
+
+        for package in self.packages:
+            if module_name == package or module_name.startswith(package + "."):
+                return True
+
+        return False
+
+
+class ImportHookManager:
+    """
+    A handle that can be used to uninstall the Typeguard import hook.
+    """
+
+    def __init__(self, hook: MetaPathFinder):
+        self.hook = hook
+
+    def __enter__(self) -> None:
+        pass
+
+    def __exit__(
+        self,
+        exc_type: type[BaseException],
+        exc_val: BaseException,
+        exc_tb: TracebackType,
+    ) -> None:
+        self.uninstall()
+
+    def uninstall(self) -> None:
+        """Uninstall the import hook."""
+        try:
+            sys.meta_path.remove(self.hook)
+        except ValueError:
+            pass  # already removed
+
+
+def install_import_hook(
+    packages: Iterable[str] | None = None,
+    *,
+    cls: type[TypeguardFinder] = TypeguardFinder,
+) -> ImportHookManager:
+    """
+    Install an import hook that instruments functions for automatic type checking.
+
+    This only affects modules loaded **after** this hook has been installed.
+
+    :param packages: an iterable of package names to instrument, or ``None`` to
+        instrument all packages
+    :param cls: a custom meta path finder class
+    :return: a context manager that uninstalls the hook on exit (or when you call
+        ``.uninstall()``)
+
+    .. versionadded:: 2.6
+
+    """
+    if packages is None:
+        target_packages: list[str] | None = None
+    elif isinstance(packages, str):
+        target_packages = [packages]
+    else:
+        target_packages = list(packages)
+
+    for finder in sys.meta_path:
+        if (
+            isclass(finder)
+            and finder.__name__ == "PathFinder"
+            and hasattr(finder, "find_spec")
+        ):
+            break
+    else:
+        raise RuntimeError("Cannot find a PathFinder in sys.meta_path")
+
+    hook = cls(target_packages, finder)
+    sys.meta_path.insert(0, hook)
+    return ImportHookManager(hook)
diff --git a/setuptools/_vendor/typeguard/_memo.py b/setuptools/_vendor/typeguard/_memo.py
new file mode 100644
index 0000000000..1d0d80c66d
--- /dev/null
+++ b/setuptools/_vendor/typeguard/_memo.py
@@ -0,0 +1,48 @@
+from __future__ import annotations
+
+from typing import Any
+
+from typeguard._config import TypeCheckConfiguration, global_config
+
+
+class TypeCheckMemo:
+    """
+    Contains information necessary for type checkers to do their work.
+
+    .. attribute:: globals
+       :type: dict[str, Any]
+
+        Dictionary of global variables to use for resolving forward references.
+
+    .. attribute:: locals
+       :type: dict[str, Any]
+
+        Dictionary of local variables to use for resolving forward references.
+
+    .. attribute:: self_type
+       :type: type | None
+
+        When running type checks within an instance method or class method, this is the
+        class object that the first argument (usually named ``self`` or ``cls``) refers
+        to.
+
+    .. attribute:: config
+       :type: TypeCheckConfiguration
+
+         Contains the configuration for a particular set of type checking operations.
+    """
+
+    __slots__ = "globals", "locals", "self_type", "config"
+
+    def __init__(
+        self,
+        globals: dict[str, Any],
+        locals: dict[str, Any],
+        *,
+        self_type: type | None = None,
+        config: TypeCheckConfiguration = global_config,
+    ):
+        self.globals = globals
+        self.locals = locals
+        self.self_type = self_type
+        self.config = config
diff --git a/setuptools/_vendor/typeguard/_pytest_plugin.py b/setuptools/_vendor/typeguard/_pytest_plugin.py
new file mode 100644
index 0000000000..7b2f494ec7
--- /dev/null
+++ b/setuptools/_vendor/typeguard/_pytest_plugin.py
@@ -0,0 +1,127 @@
+from __future__ import annotations
+
+import sys
+import warnings
+from typing import TYPE_CHECKING, Any, Literal
+
+from typeguard._config import CollectionCheckStrategy, ForwardRefPolicy, global_config
+from typeguard._exceptions import InstrumentationWarning
+from typeguard._importhook import install_import_hook
+from typeguard._utils import qualified_name, resolve_reference
+
+if TYPE_CHECKING:
+    from pytest import Config, Parser
+
+
+def pytest_addoption(parser: Parser) -> None:
+    def add_ini_option(
+        opt_type: (
+            Literal["string", "paths", "pathlist", "args", "linelist", "bool"] | None
+        ),
+    ) -> None:
+        parser.addini(
+            group.options[-1].names()[0][2:],
+            group.options[-1].attrs()["help"],
+            opt_type,
+        )
+
+    group = parser.getgroup("typeguard")
+    group.addoption(
+        "--typeguard-packages",
+        action="store",
+        help="comma separated name list of packages and modules to instrument for "
+        "type checking, or :all: to instrument all modules loaded after typeguard",
+    )
+    add_ini_option("linelist")
+
+    group.addoption(
+        "--typeguard-debug-instrumentation",
+        action="store_true",
+        help="print all instrumented code to stderr",
+    )
+    add_ini_option("bool")
+
+    group.addoption(
+        "--typeguard-typecheck-fail-callback",
+        action="store",
+        help=(
+            "a module:varname (e.g. typeguard:warn_on_error) reference to a function "
+            "that is called (with the exception, and memo object as arguments) to "
+            "handle a TypeCheckError"
+        ),
+    )
+    add_ini_option("string")
+
+    group.addoption(
+        "--typeguard-forward-ref-policy",
+        action="store",
+        choices=list(ForwardRefPolicy.__members__),
+        help=(
+            "determines how to deal with unresolveable forward references in type "
+            "annotations"
+        ),
+    )
+    add_ini_option("string")
+
+    group.addoption(
+        "--typeguard-collection-check-strategy",
+        action="store",
+        choices=list(CollectionCheckStrategy.__members__),
+        help="determines how thoroughly to check collections (list, dict, etc)",
+    )
+    add_ini_option("string")
+
+
+def pytest_configure(config: Config) -> None:
+    def getoption(name: str) -> Any:
+        return config.getoption(name.replace("-", "_")) or config.getini(name)
+
+    packages: list[str] | None = []
+    if packages_option := config.getoption("typeguard_packages"):
+        packages = [pkg.strip() for pkg in packages_option.split(",")]
+    elif packages_ini := config.getini("typeguard-packages"):
+        packages = packages_ini
+
+    if packages:
+        if packages == [":all:"]:
+            packages = None
+        else:
+            already_imported_packages = sorted(
+                package for package in packages if package in sys.modules
+            )
+            if already_imported_packages:
+                warnings.warn(
+                    f"typeguard cannot check these packages because they are already "
+                    f"imported: {', '.join(already_imported_packages)}",
+                    InstrumentationWarning,
+                    stacklevel=1,
+                )
+
+        install_import_hook(packages=packages)
+
+    debug_option = getoption("typeguard-debug-instrumentation")
+    if debug_option:
+        global_config.debug_instrumentation = True
+
+    fail_callback_option = getoption("typeguard-typecheck-fail-callback")
+    if fail_callback_option:
+        callback = resolve_reference(fail_callback_option)
+        if not callable(callback):
+            raise TypeError(
+                f"{fail_callback_option} ({qualified_name(callback.__class__)}) is not "
+                f"a callable"
+            )
+
+        global_config.typecheck_fail_callback = callback
+
+    forward_ref_policy_option = getoption("typeguard-forward-ref-policy")
+    if forward_ref_policy_option:
+        forward_ref_policy = ForwardRefPolicy.__members__[forward_ref_policy_option]
+        global_config.forward_ref_policy = forward_ref_policy
+
+    collection_check_strategy_option = getoption("typeguard-collection-check-strategy")
+    if collection_check_strategy_option:
+        collection_check_strategy = CollectionCheckStrategy.__members__[
+            collection_check_strategy_option
+        ]
+        global_config.collection_check_strategy = collection_check_strategy
diff --git a/setuptools/_vendor/typeguard/_suppression.py b/setuptools/_vendor/typeguard/_suppression.py
new file mode 100644
index 0000000000..bbbfbfbe8e
--- /dev/null
+++ b/setuptools/_vendor/typeguard/_suppression.py
@@ -0,0 +1,86 @@
+from __future__ import annotations
+
+import sys
+from collections.abc import Callable, Generator
+from contextlib import contextmanager
+from functools import update_wrapper
+from threading import Lock
+from typing import ContextManager, TypeVar, overload
+
+if sys.version_info >= (3, 10):
+    from typing import ParamSpec
+else:
+    from typing_extensions import ParamSpec
+
+P = ParamSpec("P")
+T = TypeVar("T")
+
+type_checks_suppressed = 0
+type_checks_suppress_lock = Lock()
+
+
+@overload
+def suppress_type_checks(func: Callable[P, T]) -> Callable[P, T]: ...
+
+
+@overload
+def suppress_type_checks() -> ContextManager[None]: ...
+
+
+def suppress_type_checks(
+    func: Callable[P, T] | None = None,
+) -> Callable[P, T] | ContextManager[None]:
+    """
+    Temporarily suppress all type checking.
+
+    This function has two operating modes, based on how it's used:
+
+    #. as a context manager (``with suppress_type_checks(): ...``)
+    #. as a decorator (``@suppress_type_checks``)
+
+    When used as a context manager, :func:`check_type` and any automatically
+    instrumented functions skip the actual type checking. These context managers can be
+    nested.
+
+    When used as a decorator, all type checking is suppressed while the function is
+    running.
+
+    Type checking will resume once no more context managers are active and no decorated
+    functions are running.
+
+    Both operating modes are thread-safe.
+
+    """
+
+    def wrapper(*args: P.args, **kwargs: P.kwargs) -> T:
+        global type_checks_suppressed
+
+        with type_checks_suppress_lock:
+            type_checks_suppressed += 1
+
+        assert func is not None
+        try:
+            return func(*args, **kwargs)
+        finally:
+            with type_checks_suppress_lock:
+                type_checks_suppressed -= 1
+
+    def cm() -> Generator[None, None, None]:
+        global type_checks_suppressed
+
+        with type_checks_suppress_lock:
+            type_checks_suppressed += 1
+
+        try:
+            yield
+        finally:
+            with type_checks_suppress_lock:
+                type_checks_suppressed -= 1
+
+    if func is None:
+        # Context manager mode
+        return contextmanager(cm)()
+    else:
+        # Decorator mode
+        update_wrapper(wrapper, func)
+        return wrapper
diff --git a/setuptools/_vendor/typeguard/_transformer.py b/setuptools/_vendor/typeguard/_transformer.py
new file mode 100644
index 0000000000..13ac3630e6
--- /dev/null
+++ b/setuptools/_vendor/typeguard/_transformer.py
@@ -0,0 +1,1229 @@
+from __future__ import annotations
+
+import ast
+import builtins
+import sys
+import typing
+from ast import (
+    AST,
+    Add,
+    AnnAssign,
+    Assign,
+    AsyncFunctionDef,
+    Attribute,
+    AugAssign,
+    BinOp,
+    BitAnd,
+    BitOr,
+    BitXor,
+    Call,
+    ClassDef,
+    Constant,
+    Dict,
+    Div,
+    Expr,
+    Expression,
+    FloorDiv,
+    FunctionDef,
+    If,
+    Import,
+    ImportFrom,
+    Index,
+    List,
+    Load,
+    LShift,
+    MatMult,
+    Mod,
+    Module,
+    Mult,
+    Name,
+    NamedExpr,
+    NodeTransformer,
+    NodeVisitor,
+    Pass,
+    Pow,
+    Return,
+    RShift,
+    Starred,
+    Store,
+    Sub,
+    Subscript,
+    Tuple,
+    Yield,
+    YieldFrom,
+    alias,
+    copy_location,
+    expr,
+    fix_missing_locations,
+    keyword,
+    walk,
+)
+from collections import defaultdict
+from collections.abc import Generator, Sequence
+from contextlib import contextmanager
+from copy import deepcopy
+from dataclasses import dataclass, field
+from typing import Any, ClassVar, cast, overload
+
+generator_names = (
+    "typing.Generator",
+    "collections.abc.Generator",
+    "typing.Iterator",
+    "collections.abc.Iterator",
+    "typing.Iterable",
+    "collections.abc.Iterable",
+    "typing.AsyncIterator",
+    "collections.abc.AsyncIterator",
+    "typing.AsyncIterable",
+    "collections.abc.AsyncIterable",
+    "typing.AsyncGenerator",
+    "collections.abc.AsyncGenerator",
+)
+anytype_names = (
+    "typing.Any",
+    "typing_extensions.Any",
+)
+literal_names = (
+    "typing.Literal",
+    "typing_extensions.Literal",
+)
+annotated_names = (
+    "typing.Annotated",
+    "typing_extensions.Annotated",
+)
+ignore_decorators = (
+    "typing.no_type_check",
+    "typeguard.typeguard_ignore",
+)
+aug_assign_functions = {
+    Add: "iadd",
+    Sub: "isub",
+    Mult: "imul",
+    MatMult: "imatmul",
+    Div: "itruediv",
+    FloorDiv: "ifloordiv",
+    Mod: "imod",
+    Pow: "ipow",
+    LShift: "ilshift",
+    RShift: "irshift",
+    BitAnd: "iand",
+    BitXor: "ixor",
+    BitOr: "ior",
+}
+
+
+@dataclass
+class TransformMemo:
+    node: Module | ClassDef | FunctionDef | AsyncFunctionDef | None
+    parent: TransformMemo | None
+    path: tuple[str, ...]
+    joined_path: Constant = field(init=False)
+    return_annotation: expr | None = None
+    yield_annotation: expr | None = None
+    send_annotation: expr | None = None
+    is_async: bool = False
+    local_names: set[str] = field(init=False, default_factory=set)
+    imported_names: dict[str, str] = field(init=False, default_factory=dict)
+    ignored_names: set[str] = field(init=False, default_factory=set)
+    load_names: defaultdict[str, dict[str, Name]] = field(
+        init=False, default_factory=lambda: defaultdict(dict)
+    )
+    has_yield_expressions: bool = field(init=False, default=False)
+    has_return_expressions: bool = field(init=False, default=False)
+    memo_var_name: Name | None = field(init=False, default=None)
+    should_instrument: bool = field(init=False, default=True)
+    variable_annotations: dict[str, expr] = field(init=False, default_factory=dict)
+    configuration_overrides: dict[str, Any] = field(init=False, default_factory=dict)
+    code_inject_index: int = field(init=False, default=0)
+
+    def __post_init__(self) -> None:
+        elements: list[str] = []
+        memo = self
+        while isinstance(memo.node, (ClassDef, FunctionDef, AsyncFunctionDef)):
+            elements.insert(0, memo.node.name)
+            if not memo.parent:
+                break
+
+            memo = memo.parent
+            if isinstance(memo.node, (FunctionDef, AsyncFunctionDef)):
+                elements.insert(0, "")
+
+        self.joined_path = Constant(".".join(elements))
+
+        # Figure out where to insert instrumentation code
+        if self.node:
+            for index, child in enumerate(self.node.body):
+                if isinstance(child, ImportFrom) and child.module == "__future__":
+                    # (module only) __future__ imports must come first
+                    continue
+                elif (
+                    isinstance(child, Expr)
+                    and isinstance(child.value, Constant)
+                    and isinstance(child.value.value, str)
+                ):
+                    continue  # docstring
+
+                self.code_inject_index = index
+                break
+
+    def get_unused_name(self, name: str) -> str:
+        memo: TransformMemo | None = self
+        while memo is not None:
+            if name in memo.local_names:
+                memo = self
+                name += "_"
+            else:
+                memo = memo.parent
+
+        self.local_names.add(name)
+        return name
+
+    def is_ignored_name(self, expression: expr | Expr | None) -> bool:
+        top_expression = (
+            expression.value if isinstance(expression, Expr) else expression
+        )
+
+        if isinstance(top_expression, Attribute) and isinstance(
+            top_expression.value, Name
+        ):
+            name = top_expression.value.id
+        elif isinstance(top_expression, Name):
+            name = top_expression.id
+        else:
+            return False
+
+        memo: TransformMemo | None = self
+        while memo is not None:
+            if name in memo.ignored_names:
+                return True
+
+            memo = memo.parent
+
+        return False
+
+    def get_memo_name(self) -> Name:
+        if not self.memo_var_name:
+            self.memo_var_name = Name(id="memo", ctx=Load())
+
+        return self.memo_var_name
+
+    def get_import(self, module: str, name: str) -> Name:
+        if module in self.load_names and name in self.load_names[module]:
+            return self.load_names[module][name]
+
+        qualified_name = f"{module}.{name}"
+        if name in self.imported_names and self.imported_names[name] == qualified_name:
+            return Name(id=name, ctx=Load())
+
+        alias = self.get_unused_name(name)
+        node = self.load_names[module][name] = Name(id=alias, ctx=Load())
+        self.imported_names[name] = qualified_name
+        return node
+
+    def insert_imports(self, node: Module | FunctionDef | AsyncFunctionDef) -> None:
+        """Insert imports needed by injected code."""
+        if not self.load_names:
+            return
+
+        # Insert imports after any "from __future__ ..." imports and any docstring
+        for modulename, names in self.load_names.items():
+            aliases = [
+                alias(orig_name, new_name.id if orig_name != new_name.id else None)
+                for orig_name, new_name in sorted(names.items())
+            ]
+            node.body.insert(self.code_inject_index, ImportFrom(modulename, aliases, 0))
+
+    def name_matches(self, expression: expr | Expr | None, *names: str) -> bool:
+        if expression is None:
+            return False
+
+        path: list[str] = []
+        top_expression = (
+            expression.value if isinstance(expression, Expr) else expression
+        )
+
+        if isinstance(top_expression, Subscript):
+            top_expression = top_expression.value
+        elif isinstance(top_expression, Call):
+            top_expression = top_expression.func
+
+        while isinstance(top_expression, Attribute):
+            path.insert(0, top_expression.attr)
+            top_expression = top_expression.value
+
+        if not isinstance(top_expression, Name):
+            return False
+
+        if top_expression.id in self.imported_names:
+            translated = self.imported_names[top_expression.id]
+        elif hasattr(builtins, top_expression.id):
+            translated = "builtins." + top_expression.id
+        else:
+            translated = top_expression.id
+
+        path.insert(0, translated)
+        joined_path = ".".join(path)
+        if joined_path in names:
+            return True
+        elif self.parent:
+            return self.parent.name_matches(expression, *names)
+        else:
+            return False
+
+    def get_config_keywords(self) -> list[keyword]:
+        if self.parent and isinstance(self.parent.node, ClassDef):
+            overrides = self.parent.configuration_overrides.copy()
+        else:
+            overrides = {}
+
+        overrides.update(self.configuration_overrides)
+        return [keyword(key, value) for key, value in overrides.items()]
+
+
+class NameCollector(NodeVisitor):
+    def __init__(self) -> None:
+        self.names: set[str] = set()
+
+    def visit_Import(self, node: Import) -> None:
+        for name in node.names:
+            self.names.add(name.asname or name.name)
+
+    def visit_ImportFrom(self, node: ImportFrom) -> None:
+        for name in node.names:
+            self.names.add(name.asname or name.name)
+
+    def visit_Assign(self, node: Assign) -> None:
+        for target in node.targets:
+            if isinstance(target, Name):
+                self.names.add(target.id)
+
+    def visit_NamedExpr(self, node: NamedExpr) -> Any:
+        if isinstance(node.target, Name):
+            self.names.add(node.target.id)
+
+    def visit_FunctionDef(self, node: FunctionDef) -> None:
+        pass
+
+    def visit_ClassDef(self, node: ClassDef) -> None:
+        pass
+
+
+class GeneratorDetector(NodeVisitor):
+    """Detects if a function node is a generator function."""
+
+    contains_yields: bool = False
+    in_root_function: bool = False
+
+    def visit_Yield(self, node: Yield) -> Any:
+        self.contains_yields = True
+
+    def visit_YieldFrom(self, node: YieldFrom) -> Any:
+        self.contains_yields = True
+
+    def visit_ClassDef(self, node: ClassDef) -> Any:
+        pass
+
+    def visit_FunctionDef(self, node: FunctionDef | AsyncFunctionDef) -> Any:
+        if not self.in_root_function:
+            self.in_root_function = True
+            self.generic_visit(node)
+            self.in_root_function = False
+
+    def visit_AsyncFunctionDef(self, node: AsyncFunctionDef) -> Any:
+        self.visit_FunctionDef(node)
+
+
+class AnnotationTransformer(NodeTransformer):
+    type_substitutions: ClassVar[dict[str, tuple[str, str]]] = {
+        "builtins.dict": ("typing", "Dict"),
+        "builtins.list": ("typing", "List"),
+        "builtins.tuple": ("typing", "Tuple"),
+        "builtins.set": ("typing", "Set"),
+        "builtins.frozenset": ("typing", "FrozenSet"),
+    }
+
+    def __init__(self, transformer: TypeguardTransformer):
+        self.transformer = transformer
+        self._memo = transformer._memo
+        self._level = 0
+
+    def visit(self, node: AST) -> Any:
+        # Don't process Literals
+        if isinstance(node, expr) and self._memo.name_matches(node, *literal_names):
+            return node
+
+        self._level += 1
+        new_node = super().visit(node)
+        self._level -= 1
+
+        if isinstance(new_node, Expression) and not hasattr(new_node, "body"):
+            return None
+
+        # Return None if this new node matches a variation of typing.Any
+        if (
+            self._level == 0
+            and isinstance(new_node, expr)
+            and self._memo.name_matches(new_node, *anytype_names)
+        ):
+            return None
+
+        return new_node
+
+    def visit_BinOp(self, node: BinOp) -> Any:
+        self.generic_visit(node)
+
+        if isinstance(node.op, BitOr):
+            # If either branch of the BinOp has been transformed to `None`, it means
+            # that a type in the union was ignored, so the entire annotation should e
+            # ignored
+            if not hasattr(node, "left") or not hasattr(node, "right"):
+                return None
+
+            # Return Any if either side is Any
+            if self._memo.name_matches(node.left, *anytype_names):
+                return node.left
+            elif self._memo.name_matches(node.right, *anytype_names):
+                return node.right
+
+            if sys.version_info < (3, 10):
+                union_name = self.transformer._get_import("typing", "Union")
+                return Subscript(
+                    value=union_name,
+                    slice=Index(
+                        Tuple(elts=[node.left, node.right], ctx=Load()), ctx=Load()
+                    ),
+                    ctx=Load(),
+                )
+
+        return node
+
+    def visit_Attribute(self, node: Attribute) -> Any:
+        if self._memo.is_ignored_name(node):
+            return None
+
+        return node
+
+    def visit_Subscript(self, node: Subscript) -> Any:
+        if self._memo.is_ignored_name(node.value):
+            return None
+
+        # The subscript of typing(_extensions).Literal can be any arbitrary string, so
+        # don't try to evaluate it as code
+        if node.slice:
+            if isinstance(node.slice, Index):
+                # Python 3.8
+                slice_value = node.slice.value  # type: ignore[attr-defined]
+            else:
+                slice_value = node.slice
+
+            if isinstance(slice_value, Tuple):
+                if self._memo.name_matches(node.value, *annotated_names):
+                    # Only treat the first argument to typing.Annotated as a potential
+                    # forward reference
+                    items = cast(
+                        typing.List[expr],
+                        [self.visit(slice_value.elts[0])] + slice_value.elts[1:],
+                    )
+                else:
+                    items = cast(
+                        typing.List[expr],
+                        [self.visit(item) for item in slice_value.elts],
+                    )
+
+                # If this is a Union and any of the items is Any, erase the entire
+                # annotation
+                if self._memo.name_matches(node.value, "typing.Union") and any(
+                    item is None
+                    or (
+                        isinstance(item, expr)
+                        and self._memo.name_matches(item, *anytype_names)
+                    )
+                    for item in items
+                ):
+                    return None
+
+                # If all items in the subscript were Any, erase the subscript entirely
+                if all(item is None for item in items):
+                    return node.value
+
+                for index, item in enumerate(items):
+                    if item is None:
+                        items[index] = self.transformer._get_import("typing", "Any")
+
+                slice_value.elts = items
+            else:
+                self.generic_visit(node)
+
+                # If the transformer erased the slice entirely, just return the node
+                # value without the subscript (unless it's Optional, in which case erase
+                # the node entirely
+                if self._memo.name_matches(
+                    node.value, "typing.Optional"
+                ) and not hasattr(node, "slice"):
+                    return None
+                if sys.version_info >= (3, 9) and not hasattr(node, "slice"):
+                    return node.value
+                elif sys.version_info < (3, 9) and not hasattr(node.slice, "value"):
+                    return node.value
+
+        return node
+
+    def visit_Name(self, node: Name) -> Any:
+        if self._memo.is_ignored_name(node):
+            return None
+
+        if sys.version_info < (3, 9):
+            for typename, substitute in self.type_substitutions.items():
+                if self._memo.name_matches(node, typename):
+                    new_node = self.transformer._get_import(*substitute)
+                    return copy_location(new_node, node)
+
+        return node
+
+    def visit_Call(self, node: Call) -> Any:
+        # Don't recurse into calls
+        return node
+
+    def visit_Constant(self, node: Constant) -> Any:
+        if isinstance(node.value, str):
+            expression = ast.parse(node.value, mode="eval")
+            new_node = self.visit(expression)
+            if new_node:
+                return copy_location(new_node.body, node)
+            else:
+                return None
+
+        return node
+
+
+class TypeguardTransformer(NodeTransformer):
+    def __init__(
+        self, target_path: Sequence[str] | None = None, target_lineno: int | None = None
+    ) -> None:
+        self._target_path = tuple(target_path) if target_path else None
+        self._memo = self._module_memo = TransformMemo(None, None, ())
+        self.names_used_in_annotations: set[str] = set()
+        self.target_node: FunctionDef | AsyncFunctionDef | None = None
+        self.target_lineno = target_lineno
+
+    def generic_visit(self, node: AST) -> AST:
+        has_non_empty_body_initially = bool(getattr(node, "body", None))
+        initial_type = type(node)
+
+        node = super().generic_visit(node)
+
+        if (
+            type(node) is initial_type
+            and has_non_empty_body_initially
+            and hasattr(node, "body")
+            and not node.body
+        ):
+            # If we have still the same node type after transformation
+            # but we've optimised it's body away, we add a `pass` statement.
+            node.body = [Pass()]
+
+        return node
+
+    @contextmanager
+    def _use_memo(
+        self, node: ClassDef | FunctionDef | AsyncFunctionDef
+    ) -> Generator[None, Any, None]:
+        new_memo = TransformMemo(node, self._memo, self._memo.path + (node.name,))
+        old_memo = self._memo
+        self._memo = new_memo
+
+        if isinstance(node, (FunctionDef, AsyncFunctionDef)):
+            new_memo.should_instrument = (
+                self._target_path is None or new_memo.path == self._target_path
+            )
+            if new_memo.should_instrument:
+                # Check if the function is a generator function
+                detector = GeneratorDetector()
+                detector.visit(node)
+
+                # Extract yield, send and return types where possible from a subscripted
+                # annotation like Generator[int, str, bool]
+                return_annotation = deepcopy(node.returns)
+                if detector.contains_yields and new_memo.name_matches(
+                    return_annotation, *generator_names
+                ):
+                    if isinstance(return_annotation, Subscript):
+                        annotation_slice = return_annotation.slice
+
+                        # Python < 3.9
+                        if isinstance(annotation_slice, Index):
+                            annotation_slice = (
+                                annotation_slice.value  # type: ignore[attr-defined]
+                            )
+
+                        if isinstance(annotation_slice, Tuple):
+                            items = annotation_slice.elts
+                        else:
+                            items = [annotation_slice]
+
+                        if len(items) > 0:
+                            new_memo.yield_annotation = self._convert_annotation(
+                                items[0]
+                            )
+
+                        if len(items) > 1:
+                            new_memo.send_annotation = self._convert_annotation(
+                                items[1]
+                            )
+
+                        if len(items) > 2:
+                            new_memo.return_annotation = self._convert_annotation(
+                                items[2]
+                            )
+                else:
+                    new_memo.return_annotation = self._convert_annotation(
+                        return_annotation
+                    )
+
+        if isinstance(node, AsyncFunctionDef):
+            new_memo.is_async = True
+
+        yield
+        self._memo = old_memo
+
+    def _get_import(self, module: str, name: str) -> Name:
+        memo = self._memo if self._target_path else self._module_memo
+        return memo.get_import(module, name)
+
+    @overload
+    def _convert_annotation(self, annotation: None) -> None: ...
+
+    @overload
+    def _convert_annotation(self, annotation: expr) -> expr: ...
+
+    def _convert_annotation(self, annotation: expr | None) -> expr | None:
+        if annotation is None:
+            return None
+
+        # Convert PEP 604 unions (x | y) and generic built-in collections where
+        # necessary, and undo forward references
+        new_annotation = cast(expr, AnnotationTransformer(self).visit(annotation))
+        if isinstance(new_annotation, expr):
+            new_annotation = ast.copy_location(new_annotation, annotation)
+
+            # Store names used in the annotation
+            names = {node.id for node in walk(new_annotation) if isinstance(node, Name)}
+            self.names_used_in_annotations.update(names)
+
+        return new_annotation
+
+    def visit_Name(self, node: Name) -> Name:
+        self._memo.local_names.add(node.id)
+        return node
+
+    def visit_Module(self, node: Module) -> Module:
+        self._module_memo = self._memo = TransformMemo(node, None, ())
+        self.generic_visit(node)
+        self._module_memo.insert_imports(node)
+
+        fix_missing_locations(node)
+        return node
+
+    def visit_Import(self, node: Import) -> Import:
+        for name in node.names:
+            self._memo.local_names.add(name.asname or name.name)
+            self._memo.imported_names[name.asname or name.name] = name.name
+
+        return node
+
+    def visit_ImportFrom(self, node: ImportFrom) -> ImportFrom:
+        for name in node.names:
+            if name.name != "*":
+                alias = name.asname or name.name
+                self._memo.local_names.add(alias)
+                self._memo.imported_names[alias] = f"{node.module}.{name.name}"
+
+        return node
+
+    def visit_ClassDef(self, node: ClassDef) -> ClassDef | None:
+        self._memo.local_names.add(node.name)
+
+        # Eliminate top level classes not belonging to the target path
+        if (
+            self._target_path is not None
+            and not self._memo.path
+            and node.name != self._target_path[0]
+        ):
+            return None
+
+        with self._use_memo(node):
+            for decorator in node.decorator_list.copy():
+                if self._memo.name_matches(decorator, "typeguard.typechecked"):
+                    # Remove the decorator to prevent duplicate instrumentation
+                    node.decorator_list.remove(decorator)
+
+                    # Store any configuration overrides
+                    if isinstance(decorator, Call) and decorator.keywords:
+                        self._memo.configuration_overrides.update(
+                            {kw.arg: kw.value for kw in decorator.keywords if kw.arg}
+                        )
+
+            self.generic_visit(node)
+            return node
+
+    def visit_FunctionDef(
+        self, node: FunctionDef | AsyncFunctionDef
+    ) -> FunctionDef | AsyncFunctionDef | None:
+        """
+        Injects type checks for function arguments, and for a return of None if the
+        function is annotated to return something else than Any or None, and the body
+        ends without an explicit "return".
+
+        """
+        self._memo.local_names.add(node.name)
+
+        # Eliminate top level functions not belonging to the target path
+        if (
+            self._target_path is not None
+            and not self._memo.path
+            and node.name != self._target_path[0]
+        ):
+            return None
+
+        # Skip instrumentation if we're instrumenting the whole module and the function
+        # contains either @no_type_check or @typeguard_ignore
+        if self._target_path is None:
+            for decorator in node.decorator_list:
+                if self._memo.name_matches(decorator, *ignore_decorators):
+                    return node
+
+        with self._use_memo(node):
+            arg_annotations: dict[str, Any] = {}
+            if self._target_path is None or self._memo.path == self._target_path:
+                # Find line number we're supposed to match against
+                if node.decorator_list:
+                    first_lineno = node.decorator_list[0].lineno
+                else:
+                    first_lineno = node.lineno
+
+                for decorator in node.decorator_list.copy():
+                    if self._memo.name_matches(decorator, "typing.overload"):
+                        # Remove overloads entirely
+                        return None
+                    elif self._memo.name_matches(decorator, "typeguard.typechecked"):
+                        # Remove the decorator to prevent duplicate instrumentation
+                        node.decorator_list.remove(decorator)
+
+                        # Store any configuration overrides
+                        if isinstance(decorator, Call) and decorator.keywords:
+                            self._memo.configuration_overrides = {
+                                kw.arg: kw.value for kw in decorator.keywords if kw.arg
+                            }
+
+                if self.target_lineno == first_lineno:
+                    assert self.target_node is None
+                    self.target_node = node
+                    if node.decorator_list:
+                        self.target_lineno = node.decorator_list[0].lineno
+                    else:
+                        self.target_lineno = node.lineno
+
+                all_args = node.args.args + node.args.kwonlyargs + node.args.posonlyargs
+
+                # Ensure that any type shadowed by the positional or keyword-only
+                # argument names are ignored in this function
+                for arg in all_args:
+                    self._memo.ignored_names.add(arg.arg)
+
+                # Ensure that any type shadowed by the variable positional argument name
+                # (e.g. "args" in *args) is ignored this function
+                if node.args.vararg:
+                    self._memo.ignored_names.add(node.args.vararg.arg)
+
+                # Ensure that any type shadowed by the variable keywrod argument name
+                # (e.g. "kwargs" in *kwargs) is ignored this function
+                if node.args.kwarg:
+                    self._memo.ignored_names.add(node.args.kwarg.arg)
+
+                for arg in all_args:
+                    annotation = self._convert_annotation(deepcopy(arg.annotation))
+                    if annotation:
+                        arg_annotations[arg.arg] = annotation
+
+                if node.args.vararg:
+                    annotation_ = self._convert_annotation(node.args.vararg.annotation)
+                    if annotation_:
+                        if sys.version_info >= (3, 9):
+                            container = Name("tuple", ctx=Load())
+                        else:
+                            container = self._get_import("typing", "Tuple")
+
+                        subscript_slice: Tuple | Index = Tuple(
+                            [
+                                annotation_,
+                                Constant(Ellipsis),
+                            ],
+                            ctx=Load(),
+                        )
+                        if sys.version_info < (3, 9):
+                            subscript_slice = Index(subscript_slice, ctx=Load())
+
+                        arg_annotations[node.args.vararg.arg] = Subscript(
+                            container, subscript_slice, ctx=Load()
+                        )
+
+                if node.args.kwarg:
+                    annotation_ = self._convert_annotation(node.args.kwarg.annotation)
+                    if annotation_:
+                        if sys.version_info >= (3, 9):
+                            container = Name("dict", ctx=Load())
+                        else:
+                            container = self._get_import("typing", "Dict")
+
+                        subscript_slice = Tuple(
+                            [
+                                Name("str", ctx=Load()),
+                                annotation_,
+                            ],
+                            ctx=Load(),
+                        )
+                        if sys.version_info < (3, 9):
+                            subscript_slice = Index(subscript_slice, ctx=Load())
+
+                        arg_annotations[node.args.kwarg.arg] = Subscript(
+                            container, subscript_slice, ctx=Load()
+                        )
+
+                if arg_annotations:
+                    self._memo.variable_annotations.update(arg_annotations)
+
+            self.generic_visit(node)
+
+            if arg_annotations:
+                annotations_dict = Dict(
+                    keys=[Constant(key) for key in arg_annotations.keys()],
+                    values=[
+                        Tuple([Name(key, ctx=Load()), annotation], ctx=Load())
+                        for key, annotation in arg_annotations.items()
+                    ],
+                )
+                func_name = self._get_import(
+                    "typeguard._functions", "check_argument_types"
+                )
+                args = [
+                    self._memo.joined_path,
+                    annotations_dict,
+                    self._memo.get_memo_name(),
+                ]
+                node.body.insert(
+                    self._memo.code_inject_index, Expr(Call(func_name, args, []))
+                )
+
+            # Add a checked "return None" to the end if there's no explicit return
+            # Skip if the return annotation is None or Any
+            if (
+                self._memo.return_annotation
+                and (not self._memo.is_async or not self._memo.has_yield_expressions)
+                and not isinstance(node.body[-1], Return)
+                and (
+                    not isinstance(self._memo.return_annotation, Constant)
+                    or self._memo.return_annotation.value is not None
+                )
+            ):
+                func_name = self._get_import(
+                    "typeguard._functions", "check_return_type"
+                )
+                return_node = Return(
+                    Call(
+                        func_name,
+                        [
+                            self._memo.joined_path,
+                            Constant(None),
+                            self._memo.return_annotation,
+                            self._memo.get_memo_name(),
+                        ],
+                        [],
+                    )
+                )
+
+                # Replace a placeholder "pass" at the end
+                if isinstance(node.body[-1], Pass):
+                    copy_location(return_node, node.body[-1])
+                    del node.body[-1]
+
+                node.body.append(return_node)
+
+            # Insert code to create the call memo, if it was ever needed for this
+            # function
+            if self._memo.memo_var_name:
+                memo_kwargs: dict[str, Any] = {}
+                if self._memo.parent and isinstance(self._memo.parent.node, ClassDef):
+                    for decorator in node.decorator_list:
+                        if (
+                            isinstance(decorator, Name)
+                            and decorator.id == "staticmethod"
+                        ):
+                            break
+                        elif (
+                            isinstance(decorator, Name)
+                            and decorator.id == "classmethod"
+                        ):
+                            memo_kwargs["self_type"] = Name(
+                                id=node.args.args[0].arg, ctx=Load()
+                            )
+                            break
+                    else:
+                        if node.args.args:
+                            if node.name == "__new__":
+                                memo_kwargs["self_type"] = Name(
+                                    id=node.args.args[0].arg, ctx=Load()
+                                )
+                            else:
+                                memo_kwargs["self_type"] = Attribute(
+                                    Name(id=node.args.args[0].arg, ctx=Load()),
+                                    "__class__",
+                                    ctx=Load(),
+                                )
+
+                # Construct the function reference
+                # Nested functions get special treatment: the function name is added
+                # to free variables (and the closure of the resulting function)
+                names: list[str] = [node.name]
+                memo = self._memo.parent
+                while memo:
+                    if isinstance(memo.node, (FunctionDef, AsyncFunctionDef)):
+                        # This is a nested function. Use the function name as-is.
+                        del names[:-1]
+                        break
+                    elif not isinstance(memo.node, ClassDef):
+                        break
+
+                    names.insert(0, memo.node.name)
+                    memo = memo.parent
+
+                config_keywords = self._memo.get_config_keywords()
+                if config_keywords:
+                    memo_kwargs["config"] = Call(
+                        self._get_import("dataclasses", "replace"),
+                        [self._get_import("typeguard._config", "global_config")],
+                        config_keywords,
+                    )
+
+                self._memo.memo_var_name.id = self._memo.get_unused_name("memo")
+                memo_store_name = Name(id=self._memo.memo_var_name.id, ctx=Store())
+                globals_call = Call(Name(id="globals", ctx=Load()), [], [])
+                locals_call = Call(Name(id="locals", ctx=Load()), [], [])
+                memo_expr = Call(
+                    self._get_import("typeguard", "TypeCheckMemo"),
+                    [globals_call, locals_call],
+                    [keyword(key, value) for key, value in memo_kwargs.items()],
+                )
+                node.body.insert(
+                    self._memo.code_inject_index,
+                    Assign([memo_store_name], memo_expr),
+                )
+
+                self._memo.insert_imports(node)
+
+                # Special case the __new__() method to create a local alias from the
+                # class name to the first argument (usually "cls")
+                if (
+                    isinstance(node, FunctionDef)
+                    and node.args
+                    and self._memo.parent is not None
+                    and isinstance(self._memo.parent.node, ClassDef)
+                    and node.name == "__new__"
+                ):
+                    first_args_expr = Name(node.args.args[0].arg, ctx=Load())
+                    cls_name = Name(self._memo.parent.node.name, ctx=Store())
+                    node.body.insert(
+                        self._memo.code_inject_index,
+                        Assign([cls_name], first_args_expr),
+                    )
+
+                # Rmove any placeholder "pass" at the end
+                if isinstance(node.body[-1], Pass):
+                    del node.body[-1]
+
+        return node
+
+    def visit_AsyncFunctionDef(
+        self, node: AsyncFunctionDef
+    ) -> FunctionDef | AsyncFunctionDef | None:
+        return self.visit_FunctionDef(node)
+
+    def visit_Return(self, node: Return) -> Return:
+        """This injects type checks into "return" statements."""
+        self.generic_visit(node)
+        if (
+            self._memo.return_annotation
+            and self._memo.should_instrument
+            and not self._memo.is_ignored_name(self._memo.return_annotation)
+        ):
+            func_name = self._get_import("typeguard._functions", "check_return_type")
+            old_node = node
+            retval = old_node.value or Constant(None)
+            node = Return(
+                Call(
+                    func_name,
+                    [
+                        self._memo.joined_path,
+                        retval,
+                        self._memo.return_annotation,
+                        self._memo.get_memo_name(),
+                    ],
+                    [],
+                )
+            )
+            copy_location(node, old_node)
+
+        return node
+
+    def visit_Yield(self, node: Yield) -> Yield | Call:
+        """
+        This injects type checks into "yield" expressions, checking both the yielded
+        value and the value sent back to the generator, when appropriate.
+
+        """
+        self._memo.has_yield_expressions = True
+        self.generic_visit(node)
+
+        if (
+            self._memo.yield_annotation
+            and self._memo.should_instrument
+            and not self._memo.is_ignored_name(self._memo.yield_annotation)
+        ):
+            func_name = self._get_import("typeguard._functions", "check_yield_type")
+            yieldval = node.value or Constant(None)
+            node.value = Call(
+                func_name,
+                [
+                    self._memo.joined_path,
+                    yieldval,
+                    self._memo.yield_annotation,
+                    self._memo.get_memo_name(),
+                ],
+                [],
+            )
+
+        if (
+            self._memo.send_annotation
+            and self._memo.should_instrument
+            and not self._memo.is_ignored_name(self._memo.send_annotation)
+        ):
+            func_name = self._get_import("typeguard._functions", "check_send_type")
+            old_node = node
+            call_node = Call(
+                func_name,
+                [
+                    self._memo.joined_path,
+                    old_node,
+                    self._memo.send_annotation,
+                    self._memo.get_memo_name(),
+                ],
+                [],
+            )
+            copy_location(call_node, old_node)
+            return call_node
+
+        return node
+
+    def visit_AnnAssign(self, node: AnnAssign) -> Any:
+        """
+        This injects a type check into a local variable annotation-assignment within a
+        function body.
+
+        """
+        self.generic_visit(node)
+
+        if (
+            isinstance(self._memo.node, (FunctionDef, AsyncFunctionDef))
+            and node.annotation
+            and isinstance(node.target, Name)
+        ):
+            self._memo.ignored_names.add(node.target.id)
+            annotation = self._convert_annotation(deepcopy(node.annotation))
+            if annotation:
+                self._memo.variable_annotations[node.target.id] = annotation
+                if node.value:
+                    func_name = self._get_import(
+                        "typeguard._functions", "check_variable_assignment"
+                    )
+                    node.value = Call(
+                        func_name,
+                        [
+                            node.value,
+                            Constant(node.target.id),
+                            annotation,
+                            self._memo.get_memo_name(),
+                        ],
+                        [],
+                    )
+
+        return node
+
+    def visit_Assign(self, node: Assign) -> Any:
+        """
+        This injects a type check into a local variable assignment within a function
+        body. The variable must have been annotated earlier in the function body.
+
+        """
+        self.generic_visit(node)
+
+        # Only instrument function-local assignments
+        if isinstance(self._memo.node, (FunctionDef, AsyncFunctionDef)):
+            targets: list[dict[Constant, expr | None]] = []
+            check_required = False
+            for target in node.targets:
+                elts: Sequence[expr]
+                if isinstance(target, Name):
+                    elts = [target]
+                elif isinstance(target, Tuple):
+                    elts = target.elts
+                else:
+                    continue
+
+                annotations_: dict[Constant, expr | None] = {}
+                for exp in elts:
+                    prefix = ""
+                    if isinstance(exp, Starred):
+                        exp = exp.value
+                        prefix = "*"
+
+                    if isinstance(exp, Name):
+                        self._memo.ignored_names.add(exp.id)
+                        name = prefix + exp.id
+                        annotation = self._memo.variable_annotations.get(exp.id)
+                        if annotation:
+                            annotations_[Constant(name)] = annotation
+                            check_required = True
+                        else:
+                            annotations_[Constant(name)] = None
+
+                targets.append(annotations_)
+
+            if check_required:
+                # Replace missing annotations with typing.Any
+                for item in targets:
+                    for key, expression in item.items():
+                        if expression is None:
+                            item[key] = self._get_import("typing", "Any")
+
+                if len(targets) == 1 and len(targets[0]) == 1:
+                    func_name = self._get_import(
+                        "typeguard._functions", "check_variable_assignment"
+                    )
+                    target_varname = next(iter(targets[0]))
+                    node.value = Call(
+                        func_name,
+                        [
+                            node.value,
+                            target_varname,
+                            targets[0][target_varname],
+                            self._memo.get_memo_name(),
+                        ],
+                        [],
+                    )
+                elif targets:
+                    func_name = self._get_import(
+                        "typeguard._functions", "check_multi_variable_assignment"
+                    )
+                    targets_arg = List(
+                        [
+                            Dict(keys=list(target), values=list(target.values()))
+                            for target in targets
+                        ],
+                        ctx=Load(),
+                    )
+                    node.value = Call(
+                        func_name,
+                        [node.value, targets_arg, self._memo.get_memo_name()],
+                        [],
+                    )
+
+        return node
+
+    def visit_NamedExpr(self, node: NamedExpr) -> Any:
+        """This injects a type check into an assignment expression (a := foo())."""
+        self.generic_visit(node)
+
+        # Only instrument function-local assignments
+        if isinstance(self._memo.node, (FunctionDef, AsyncFunctionDef)) and isinstance(
+            node.target, Name
+        ):
+            self._memo.ignored_names.add(node.target.id)
+
+            # Bail out if no matching annotation is found
+            annotation = self._memo.variable_annotations.get(node.target.id)
+            if annotation is None:
+                return node
+
+            func_name = self._get_import(
+                "typeguard._functions", "check_variable_assignment"
+            )
+            node.value = Call(
+                func_name,
+                [
+                    node.value,
+                    Constant(node.target.id),
+                    annotation,
+                    self._memo.get_memo_name(),
+                ],
+                [],
+            )
+
+        return node
+
+    def visit_AugAssign(self, node: AugAssign) -> Any:
+        """
+        This injects a type check into an augmented assignment expression (a += 1).
+
+        """
+        self.generic_visit(node)
+
+        # Only instrument function-local assignments
+        if isinstance(self._memo.node, (FunctionDef, AsyncFunctionDef)) and isinstance(
+            node.target, Name
+        ):
+            # Bail out if no matching annotation is found
+            annotation = self._memo.variable_annotations.get(node.target.id)
+            if annotation is None:
+                return node
+
+            # Bail out if the operator is not found (newer Python version?)
+            try:
+                operator_func_name = aug_assign_functions[node.op.__class__]
+            except KeyError:
+                return node
+
+            operator_func = self._get_import("operator", operator_func_name)
+            operator_call = Call(
+                operator_func, [Name(node.target.id, ctx=Load()), node.value], []
+            )
+            check_call = Call(
+                self._get_import("typeguard._functions", "check_variable_assignment"),
+                [
+                    operator_call,
+                    Constant(node.target.id),
+                    annotation,
+                    self._memo.get_memo_name(),
+                ],
+                [],
+            )
+            return Assign(targets=[node.target], value=check_call)
+
+        return node
+
+    def visit_If(self, node: If) -> Any:
+        """
+        This blocks names from being collected from a module-level
+        "if typing.TYPE_CHECKING:" block, so that they won't be type checked.
+
+        """
+        self.generic_visit(node)
+
+        if (
+            self._memo is self._module_memo
+            and isinstance(node.test, Name)
+            and self._memo.name_matches(node.test, "typing.TYPE_CHECKING")
+        ):
+            collector = NameCollector()
+            collector.visit(node)
+            self._memo.ignored_names.update(collector.names)
+
+        return node
diff --git a/setuptools/_vendor/typeguard/_union_transformer.py b/setuptools/_vendor/typeguard/_union_transformer.py
new file mode 100644
index 0000000000..19617e6af5
--- /dev/null
+++ b/setuptools/_vendor/typeguard/_union_transformer.py
@@ -0,0 +1,55 @@
+"""
+Transforms lazily evaluated PEP 604 unions into typing.Unions, for compatibility with
+Python versions older than 3.10.
+"""
+
+from __future__ import annotations
+
+from ast import (
+    BinOp,
+    BitOr,
+    Index,
+    Load,
+    Name,
+    NodeTransformer,
+    Subscript,
+    fix_missing_locations,
+    parse,
+)
+from ast import Tuple as ASTTuple
+from types import CodeType
+from typing import Any, Dict, FrozenSet, List, Set, Tuple, Union
+
+type_substitutions = {
+    "dict": Dict,
+    "list": List,
+    "tuple": Tuple,
+    "set": Set,
+    "frozenset": FrozenSet,
+    "Union": Union,
+}
+
+
+class UnionTransformer(NodeTransformer):
+    def __init__(self, union_name: Name | None = None):
+        self.union_name = union_name or Name(id="Union", ctx=Load())
+
+    def visit_BinOp(self, node: BinOp) -> Any:
+        self.generic_visit(node)
+        if isinstance(node.op, BitOr):
+            return Subscript(
+                value=self.union_name,
+                slice=Index(
+                    ASTTuple(elts=[node.left, node.right], ctx=Load()), ctx=Load()
+                ),
+                ctx=Load(),
+            )
+
+        return node
+
+
+def compile_type_hint(hint: str) -> CodeType:
+    parsed = parse(hint, "", "eval")
+    UnionTransformer().visit(parsed)
+    fix_missing_locations(parsed)
+    return compile(parsed, "", "eval", flags=0)
diff --git a/setuptools/_vendor/typeguard/_utils.py b/setuptools/_vendor/typeguard/_utils.py
new file mode 100644
index 0000000000..9bcc8417f8
--- /dev/null
+++ b/setuptools/_vendor/typeguard/_utils.py
@@ -0,0 +1,173 @@
+from __future__ import annotations
+
+import inspect
+import sys
+from importlib import import_module
+from inspect import currentframe
+from types import CodeType, FrameType, FunctionType
+from typing import TYPE_CHECKING, Any, Callable, ForwardRef, Union, cast, final
+from weakref import WeakValueDictionary
+
+if TYPE_CHECKING:
+    from ._memo import TypeCheckMemo
+
+if sys.version_info >= (3, 13):
+    from typing import get_args, get_origin
+
+    def evaluate_forwardref(forwardref: ForwardRef, memo: TypeCheckMemo) -> Any:
+        return forwardref._evaluate(
+            memo.globals, memo.locals, type_params=(), recursive_guard=frozenset()
+        )
+
+elif sys.version_info >= (3, 10):
+    from typing import get_args, get_origin
+
+    def evaluate_forwardref(forwardref: ForwardRef, memo: TypeCheckMemo) -> Any:
+        return forwardref._evaluate(
+            memo.globals, memo.locals, recursive_guard=frozenset()
+        )
+
+else:
+    from typing_extensions import get_args, get_origin
+
+    evaluate_extra_args: tuple[frozenset[Any], ...] = (
+        (frozenset(),) if sys.version_info >= (3, 9) else ()
+    )
+
+    def evaluate_forwardref(forwardref: ForwardRef, memo: TypeCheckMemo) -> Any:
+        from ._union_transformer import compile_type_hint, type_substitutions
+
+        if not forwardref.__forward_evaluated__:
+            forwardref.__forward_code__ = compile_type_hint(forwardref.__forward_arg__)
+
+        try:
+            return forwardref._evaluate(memo.globals, memo.locals, *evaluate_extra_args)
+        except NameError:
+            if sys.version_info < (3, 10):
+                # Try again, with the type substitutions (list -> List etc.) in place
+                new_globals = memo.globals.copy()
+                new_globals.setdefault("Union", Union)
+                if sys.version_info < (3, 9):
+                    new_globals.update(type_substitutions)
+
+                return forwardref._evaluate(
+                    new_globals, memo.locals or new_globals, *evaluate_extra_args
+                )
+
+            raise
+
+
+_functions_map: WeakValueDictionary[CodeType, FunctionType] = WeakValueDictionary()
+
+
+def get_type_name(type_: Any) -> str:
+    name: str
+    for attrname in "__name__", "_name", "__forward_arg__":
+        candidate = getattr(type_, attrname, None)
+        if isinstance(candidate, str):
+            name = candidate
+            break
+    else:
+        origin = get_origin(type_)
+        candidate = getattr(origin, "_name", None)
+        if candidate is None:
+            candidate = type_.__class__.__name__.strip("_")
+
+        if isinstance(candidate, str):
+            name = candidate
+        else:
+            return "(unknown)"
+
+    args = get_args(type_)
+    if args:
+        if name == "Literal":
+            formatted_args = ", ".join(repr(arg) for arg in args)
+        else:
+            formatted_args = ", ".join(get_type_name(arg) for arg in args)
+
+        name += f"[{formatted_args}]"
+
+    module = getattr(type_, "__module__", None)
+    if module and module not in (None, "typing", "typing_extensions", "builtins"):
+        name = module + "." + name
+
+    return name
+
+
+def qualified_name(obj: Any, *, add_class_prefix: bool = False) -> str:
+    """
+    Return the qualified name (e.g. package.module.Type) for the given object.
+
+    Builtins and types from the :mod:`typing` package get special treatment by having
+    the module name stripped from the generated name.
+
+    """
+    if obj is None:
+        return "None"
+    elif inspect.isclass(obj):
+        prefix = "class " if add_class_prefix else ""
+        type_ = obj
+    else:
+        prefix = ""
+        type_ = type(obj)
+
+    module = type_.__module__
+    qualname = type_.__qualname__
+    name = qualname if module in ("typing", "builtins") else f"{module}.{qualname}"
+    return prefix + name
+
+
+def function_name(func: Callable[..., Any]) -> str:
+    """
+    Return the qualified name of the given function.
+
+    Builtins and types from the :mod:`typing` package get special treatment by having
+    the module name stripped from the generated name.
+
+    """
+    # For partial functions and objects with __call__ defined, __qualname__ does not
+    # exist
+    module = getattr(func, "__module__", "")
+    qualname = (module + ".") if module not in ("builtins", "") else ""
+    return qualname + getattr(func, "__qualname__", repr(func))
+
+
+def resolve_reference(reference: str) -> Any:
+    modulename, varname = reference.partition(":")[::2]
+    if not modulename or not varname:
+        raise ValueError(f"{reference!r} is not a module:varname reference")
+
+    obj = import_module(modulename)
+    for attr in varname.split("."):
+        obj = getattr(obj, attr)
+
+    return obj
+
+
+def is_method_of(obj: object, cls: type) -> bool:
+    return (
+        inspect.isfunction(obj)
+        and obj.__module__ == cls.__module__
+        and obj.__qualname__.startswith(cls.__qualname__ + ".")
+    )
+
+
+def get_stacklevel() -> int:
+    level = 1
+    frame = cast(FrameType, currentframe()).f_back
+    while frame and frame.f_globals.get("__name__", "").startswith("typeguard."):
+        level += 1
+        frame = frame.f_back
+
+    return level
+
+
+@final
+class Unset:
+    __slots__ = ()
+
+    def __repr__(self) -> str:
+        return ""
+
+
+unset = Unset()
diff --git a/setuptools/_vendor/typeguard/py.typed b/setuptools/_vendor/typeguard/py.typed
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/setuptools/_vendor/typing_extensions-4.12.2.dist-info/INSTALLER b/setuptools/_vendor/typing_extensions-4.12.2.dist-info/INSTALLER
new file mode 100644
index 0000000000..a1b589e38a
--- /dev/null
+++ b/setuptools/_vendor/typing_extensions-4.12.2.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/setuptools/_vendor/typing_extensions-4.12.2.dist-info/LICENSE b/setuptools/_vendor/typing_extensions-4.12.2.dist-info/LICENSE
new file mode 100644
index 0000000000..f26bcf4d2d
--- /dev/null
+++ b/setuptools/_vendor/typing_extensions-4.12.2.dist-info/LICENSE
@@ -0,0 +1,279 @@
+A. HISTORY OF THE SOFTWARE
+==========================
+
+Python was created in the early 1990s by Guido van Rossum at Stichting
+Mathematisch Centrum (CWI, see https://www.cwi.nl) in the Netherlands
+as a successor of a language called ABC.  Guido remains Python's
+principal author, although it includes many contributions from others.
+
+In 1995, Guido continued his work on Python at the Corporation for
+National Research Initiatives (CNRI, see https://www.cnri.reston.va.us)
+in Reston, Virginia where he released several versions of the
+software.
+
+In May 2000, Guido and the Python core development team moved to
+BeOpen.com to form the BeOpen PythonLabs team.  In October of the same
+year, the PythonLabs team moved to Digital Creations, which became
+Zope Corporation.  In 2001, the Python Software Foundation (PSF, see
+https://www.python.org/psf/) was formed, a non-profit organization
+created specifically to own Python-related Intellectual Property.
+Zope Corporation was a sponsoring member of the PSF.
+
+All Python releases are Open Source (see https://opensource.org for
+the Open Source Definition).  Historically, most, but not all, Python
+releases have also been GPL-compatible; the table below summarizes
+the various releases.
+
+    Release         Derived     Year        Owner       GPL-
+                    from                                compatible? (1)
+
+    0.9.0 thru 1.2              1991-1995   CWI         yes
+    1.3 thru 1.5.2  1.2         1995-1999   CNRI        yes
+    1.6             1.5.2       2000        CNRI        no
+    2.0             1.6         2000        BeOpen.com  no
+    1.6.1           1.6         2001        CNRI        yes (2)
+    2.1             2.0+1.6.1   2001        PSF         no
+    2.0.1           2.0+1.6.1   2001        PSF         yes
+    2.1.1           2.1+2.0.1   2001        PSF         yes
+    2.1.2           2.1.1       2002        PSF         yes
+    2.1.3           2.1.2       2002        PSF         yes
+    2.2 and above   2.1.1       2001-now    PSF         yes
+
+Footnotes:
+
+(1) GPL-compatible doesn't mean that we're distributing Python under
+    the GPL.  All Python licenses, unlike the GPL, let you distribute
+    a modified version without making your changes open source.  The
+    GPL-compatible licenses make it possible to combine Python with
+    other software that is released under the GPL; the others don't.
+
+(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
+    because its license has a choice of law clause.  According to
+    CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
+    is "not incompatible" with the GPL.
+
+Thanks to the many outside volunteers who have worked under Guido's
+direction to make these releases possible.
+
+
+B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
+===============================================================
+
+Python software and documentation are licensed under the
+Python Software Foundation License Version 2.
+
+Starting with Python 3.8.6, examples, recipes, and other code in
+the documentation are dual licensed under the PSF License Version 2
+and the Zero-Clause BSD license.
+
+Some software incorporated into Python is under different licenses.
+The licenses are listed with code falling under that license.
+
+
+PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
+--------------------------------------------
+
+1. This LICENSE AGREEMENT is between the Python Software Foundation
+("PSF"), and the Individual or Organization ("Licensee") accessing and
+otherwise using this software ("Python") in source or binary form and
+its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, PSF hereby
+grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
+analyze, test, perform and/or display publicly, prepare derivative works,
+distribute, and otherwise use Python alone or in any derivative version,
+provided, however, that PSF's License Agreement and PSF's notice of copyright,
+i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023 Python Software Foundation;
+All Rights Reserved" are retained in Python alone or in any derivative version
+prepared by Licensee.
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python.
+
+4. PSF is making Python available to Licensee on an "AS IS"
+basis.  PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. Nothing in this License Agreement shall be deemed to create any
+relationship of agency, partnership, or joint venture between PSF and
+Licensee.  This License Agreement does not grant permission to use PSF
+trademarks or trade name in a trademark sense to endorse or promote
+products or services of Licensee, or any third party.
+
+8. By copying, installing or otherwise using Python, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+
+BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
+-------------------------------------------
+
+BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
+
+1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
+office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
+Individual or Organization ("Licensee") accessing and otherwise using
+this software in source or binary form and its associated
+documentation ("the Software").
+
+2. Subject to the terms and conditions of this BeOpen Python License
+Agreement, BeOpen hereby grants Licensee a non-exclusive,
+royalty-free, world-wide license to reproduce, analyze, test, perform
+and/or display publicly, prepare derivative works, distribute, and
+otherwise use the Software alone or in any derivative version,
+provided, however, that the BeOpen Python License is retained in the
+Software, alone or in any derivative version prepared by Licensee.
+
+3. BeOpen is making the Software available to Licensee on an "AS IS"
+basis.  BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
+SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
+AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
+DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+5. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+6. This License Agreement shall be governed by and interpreted in all
+respects by the law of the State of California, excluding conflict of
+law provisions.  Nothing in this License Agreement shall be deemed to
+create any relationship of agency, partnership, or joint venture
+between BeOpen and Licensee.  This License Agreement does not grant
+permission to use BeOpen trademarks or trade names in a trademark
+sense to endorse or promote products or services of Licensee, or any
+third party.  As an exception, the "BeOpen Python" logos available at
+http://www.pythonlabs.com/logos.html may be used according to the
+permissions granted on that web page.
+
+7. By copying, installing or otherwise using the software, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+
+CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
+---------------------------------------
+
+1. This LICENSE AGREEMENT is between the Corporation for National
+Research Initiatives, having an office at 1895 Preston White Drive,
+Reston, VA 20191 ("CNRI"), and the Individual or Organization
+("Licensee") accessing and otherwise using Python 1.6.1 software in
+source or binary form and its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, CNRI
+hereby grants Licensee a nonexclusive, royalty-free, world-wide
+license to reproduce, analyze, test, perform and/or display publicly,
+prepare derivative works, distribute, and otherwise use Python 1.6.1
+alone or in any derivative version, provided, however, that CNRI's
+License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
+1995-2001 Corporation for National Research Initiatives; All Rights
+Reserved" are retained in Python 1.6.1 alone or in any derivative
+version prepared by Licensee.  Alternately, in lieu of CNRI's License
+Agreement, Licensee may substitute the following text (omitting the
+quotes): "Python 1.6.1 is made available subject to the terms and
+conditions in CNRI's License Agreement.  This Agreement together with
+Python 1.6.1 may be located on the internet using the following
+unique, persistent identifier (known as a handle): 1895.22/1013.  This
+Agreement may also be obtained from a proxy server on the internet
+using the following URL: http://hdl.handle.net/1895.22/1013".
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python 1.6.1 or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python 1.6.1.
+
+4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
+basis.  CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. This License Agreement shall be governed by the federal
+intellectual property law of the United States, including without
+limitation the federal copyright law, and, to the extent such
+U.S. federal law does not apply, by the law of the Commonwealth of
+Virginia, excluding Virginia's conflict of law provisions.
+Notwithstanding the foregoing, with regard to derivative works based
+on Python 1.6.1 that incorporate non-separable material that was
+previously distributed under the GNU General Public License (GPL), the
+law of the Commonwealth of Virginia shall govern this License
+Agreement only as to issues arising under or with respect to
+Paragraphs 4, 5, and 7 of this License Agreement.  Nothing in this
+License Agreement shall be deemed to create any relationship of
+agency, partnership, or joint venture between CNRI and Licensee.  This
+License Agreement does not grant permission to use CNRI trademarks or
+trade name in a trademark sense to endorse or promote products or
+services of Licensee, or any third party.
+
+8. By clicking on the "ACCEPT" button where indicated, or by copying,
+installing or otherwise using Python 1.6.1, Licensee agrees to be
+bound by the terms and conditions of this License Agreement.
+
+        ACCEPT
+
+
+CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
+--------------------------------------------------
+
+Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
+The Netherlands.  All rights reserved.
+
+Permission to use, copy, modify, and distribute this software and its
+documentation for any purpose and without fee is hereby granted,
+provided that the above copyright notice appear in all copies and that
+both that copyright notice and this permission notice appear in
+supporting documentation, and that the name of Stichting Mathematisch
+Centrum or CWI not be used in advertising or publicity pertaining to
+distribution of the software without specific, written prior
+permission.
+
+STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
+THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
+FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
+OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+ZERO-CLAUSE BSD LICENSE FOR CODE IN THE PYTHON DOCUMENTATION
+----------------------------------------------------------------------
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
+REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
+OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+PERFORMANCE OF THIS SOFTWARE.
diff --git a/setuptools/_vendor/typing_extensions-4.12.2.dist-info/METADATA b/setuptools/_vendor/typing_extensions-4.12.2.dist-info/METADATA
new file mode 100644
index 0000000000..f15e2b3877
--- /dev/null
+++ b/setuptools/_vendor/typing_extensions-4.12.2.dist-info/METADATA
@@ -0,0 +1,67 @@
+Metadata-Version: 2.1
+Name: typing_extensions
+Version: 4.12.2
+Summary: Backported and Experimental Type Hints for Python 3.8+
+Keywords: annotations,backport,checker,checking,function,hinting,hints,type,typechecking,typehinting,typehints,typing
+Author-email: "Guido van Rossum, Jukka Lehtosalo, Łukasz Langa, Michael Lee" 
+Requires-Python: >=3.8
+Description-Content-Type: text/markdown
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Console
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Python Software Foundation License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: 3.13
+Classifier: Topic :: Software Development
+Project-URL: Bug Tracker, https://github.com/python/typing_extensions/issues
+Project-URL: Changes, https://github.com/python/typing_extensions/blob/main/CHANGELOG.md
+Project-URL: Documentation, https://typing-extensions.readthedocs.io/
+Project-URL: Home, https://github.com/python/typing_extensions
+Project-URL: Q & A, https://github.com/python/typing/discussions
+Project-URL: Repository, https://github.com/python/typing_extensions
+
+# Typing Extensions
+
+[![Chat at https://gitter.im/python/typing](https://badges.gitter.im/python/typing.svg)](https://gitter.im/python/typing)
+
+[Documentation](https://typing-extensions.readthedocs.io/en/latest/#) –
+[PyPI](https://pypi.org/project/typing-extensions/)
+
+## Overview
+
+The `typing_extensions` module serves two related purposes:
+
+- Enable use of new type system features on older Python versions. For example,
+  `typing.TypeGuard` is new in Python 3.10, but `typing_extensions` allows
+  users on previous Python versions to use it too.
+- Enable experimentation with new type system PEPs before they are accepted and
+  added to the `typing` module.
+
+`typing_extensions` is treated specially by static type checkers such as
+mypy and pyright. Objects defined in `typing_extensions` are treated the same
+way as equivalent forms in `typing`.
+
+`typing_extensions` uses
+[Semantic Versioning](https://semver.org/). The
+major version will be incremented only for backwards-incompatible changes.
+Therefore, it's safe to depend
+on `typing_extensions` like this: `typing_extensions >=x.y, <(x+1)`,
+where `x.y` is the first version that includes all features you need.
+
+## Included items
+
+See [the documentation](https://typing-extensions.readthedocs.io/en/latest/#) for a
+complete listing of module contents.
+
+## Contributing
+
+See [CONTRIBUTING.md](https://github.com/python/typing_extensions/blob/main/CONTRIBUTING.md)
+for how to contribute to `typing_extensions`.
+
diff --git a/setuptools/_vendor/typing_extensions-4.12.2.dist-info/RECORD b/setuptools/_vendor/typing_extensions-4.12.2.dist-info/RECORD
new file mode 100644
index 0000000000..bc7b45334d
--- /dev/null
+++ b/setuptools/_vendor/typing_extensions-4.12.2.dist-info/RECORD
@@ -0,0 +1,7 @@
+__pycache__/typing_extensions.cpython-312.pyc,,
+typing_extensions-4.12.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+typing_extensions-4.12.2.dist-info/LICENSE,sha256=Oy-B_iHRgcSZxZolbI4ZaEVdZonSaaqFNzv7avQdo78,13936
+typing_extensions-4.12.2.dist-info/METADATA,sha256=BeUQIa8cnYbrjWx-N8TOznM9UGW5Gm2DicVpDtRA8W0,3018
+typing_extensions-4.12.2.dist-info/RECORD,,
+typing_extensions-4.12.2.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
+typing_extensions.py,sha256=gwekpyG9DVG3lxWKX4ni8u7nk3We5slG98mA9F3DJQw,134451
diff --git a/setuptools/_vendor/typing_extensions-4.12.2.dist-info/WHEEL b/setuptools/_vendor/typing_extensions-4.12.2.dist-info/WHEEL
new file mode 100644
index 0000000000..3b5e64b5e6
--- /dev/null
+++ b/setuptools/_vendor/typing_extensions-4.12.2.dist-info/WHEEL
@@ -0,0 +1,4 @@
+Wheel-Version: 1.0
+Generator: flit 3.9.0
+Root-Is-Purelib: true
+Tag: py3-none-any
diff --git a/setuptools/_vendor/typing_extensions.py b/setuptools/_vendor/typing_extensions.py
new file mode 100644
index 0000000000..dec429ca87
--- /dev/null
+++ b/setuptools/_vendor/typing_extensions.py
@@ -0,0 +1,3641 @@
+import abc
+import collections
+import collections.abc
+import contextlib
+import functools
+import inspect
+import operator
+import sys
+import types as _types
+import typing
+import warnings
+
+__all__ = [
+    # Super-special typing primitives.
+    'Any',
+    'ClassVar',
+    'Concatenate',
+    'Final',
+    'LiteralString',
+    'ParamSpec',
+    'ParamSpecArgs',
+    'ParamSpecKwargs',
+    'Self',
+    'Type',
+    'TypeVar',
+    'TypeVarTuple',
+    'Unpack',
+
+    # ABCs (from collections.abc).
+    'Awaitable',
+    'AsyncIterator',
+    'AsyncIterable',
+    'Coroutine',
+    'AsyncGenerator',
+    'AsyncContextManager',
+    'Buffer',
+    'ChainMap',
+
+    # Concrete collection types.
+    'ContextManager',
+    'Counter',
+    'Deque',
+    'DefaultDict',
+    'NamedTuple',
+    'OrderedDict',
+    'TypedDict',
+
+    # Structural checks, a.k.a. protocols.
+    'SupportsAbs',
+    'SupportsBytes',
+    'SupportsComplex',
+    'SupportsFloat',
+    'SupportsIndex',
+    'SupportsInt',
+    'SupportsRound',
+
+    # One-off things.
+    'Annotated',
+    'assert_never',
+    'assert_type',
+    'clear_overloads',
+    'dataclass_transform',
+    'deprecated',
+    'Doc',
+    'get_overloads',
+    'final',
+    'get_args',
+    'get_origin',
+    'get_original_bases',
+    'get_protocol_members',
+    'get_type_hints',
+    'IntVar',
+    'is_protocol',
+    'is_typeddict',
+    'Literal',
+    'NewType',
+    'overload',
+    'override',
+    'Protocol',
+    'reveal_type',
+    'runtime',
+    'runtime_checkable',
+    'Text',
+    'TypeAlias',
+    'TypeAliasType',
+    'TypeGuard',
+    'TypeIs',
+    'TYPE_CHECKING',
+    'Never',
+    'NoReturn',
+    'ReadOnly',
+    'Required',
+    'NotRequired',
+
+    # Pure aliases, have always been in typing
+    'AbstractSet',
+    'AnyStr',
+    'BinaryIO',
+    'Callable',
+    'Collection',
+    'Container',
+    'Dict',
+    'ForwardRef',
+    'FrozenSet',
+    'Generator',
+    'Generic',
+    'Hashable',
+    'IO',
+    'ItemsView',
+    'Iterable',
+    'Iterator',
+    'KeysView',
+    'List',
+    'Mapping',
+    'MappingView',
+    'Match',
+    'MutableMapping',
+    'MutableSequence',
+    'MutableSet',
+    'NoDefault',
+    'Optional',
+    'Pattern',
+    'Reversible',
+    'Sequence',
+    'Set',
+    'Sized',
+    'TextIO',
+    'Tuple',
+    'Union',
+    'ValuesView',
+    'cast',
+    'no_type_check',
+    'no_type_check_decorator',
+]
+
+# for backward compatibility
+PEP_560 = True
+GenericMeta = type
+_PEP_696_IMPLEMENTED = sys.version_info >= (3, 13, 0, "beta")
+
+# The functions below are modified copies of typing internal helpers.
+# They are needed by _ProtocolMeta and they provide support for PEP 646.
+
+
+class _Sentinel:
+    def __repr__(self):
+        return ""
+
+
+_marker = _Sentinel()
+
+
+if sys.version_info >= (3, 10):
+    def _should_collect_from_parameters(t):
+        return isinstance(
+            t, (typing._GenericAlias, _types.GenericAlias, _types.UnionType)
+        )
+elif sys.version_info >= (3, 9):
+    def _should_collect_from_parameters(t):
+        return isinstance(t, (typing._GenericAlias, _types.GenericAlias))
+else:
+    def _should_collect_from_parameters(t):
+        return isinstance(t, typing._GenericAlias) and not t._special
+
+
+NoReturn = typing.NoReturn
+
+# Some unconstrained type variables.  These are used by the container types.
+# (These are not for export.)
+T = typing.TypeVar('T')  # Any type.
+KT = typing.TypeVar('KT')  # Key type.
+VT = typing.TypeVar('VT')  # Value type.
+T_co = typing.TypeVar('T_co', covariant=True)  # Any type covariant containers.
+T_contra = typing.TypeVar('T_contra', contravariant=True)  # Ditto contravariant.
+
+
+if sys.version_info >= (3, 11):
+    from typing import Any
+else:
+
+    class _AnyMeta(type):
+        def __instancecheck__(self, obj):
+            if self is Any:
+                raise TypeError("typing_extensions.Any cannot be used with isinstance()")
+            return super().__instancecheck__(obj)
+
+        def __repr__(self):
+            if self is Any:
+                return "typing_extensions.Any"
+            return super().__repr__()
+
+    class Any(metaclass=_AnyMeta):
+        """Special type indicating an unconstrained type.
+        - Any is compatible with every type.
+        - Any assumed to have all methods.
+        - All values assumed to be instances of Any.
+        Note that all the above statements are true from the point of view of
+        static type checkers. At runtime, Any should not be used with instance
+        checks.
+        """
+        def __new__(cls, *args, **kwargs):
+            if cls is Any:
+                raise TypeError("Any cannot be instantiated")
+            return super().__new__(cls, *args, **kwargs)
+
+
+ClassVar = typing.ClassVar
+
+
+class _ExtensionsSpecialForm(typing._SpecialForm, _root=True):
+    def __repr__(self):
+        return 'typing_extensions.' + self._name
+
+
+Final = typing.Final
+
+if sys.version_info >= (3, 11):
+    final = typing.final
+else:
+    # @final exists in 3.8+, but we backport it for all versions
+    # before 3.11 to keep support for the __final__ attribute.
+    # See https://bugs.python.org/issue46342
+    def final(f):
+        """This decorator can be used to indicate to type checkers that
+        the decorated method cannot be overridden, and decorated class
+        cannot be subclassed. For example:
+
+            class Base:
+                @final
+                def done(self) -> None:
+                    ...
+            class Sub(Base):
+                def done(self) -> None:  # Error reported by type checker
+                    ...
+            @final
+            class Leaf:
+                ...
+            class Other(Leaf):  # Error reported by type checker
+                ...
+
+        There is no runtime checking of these properties. The decorator
+        sets the ``__final__`` attribute to ``True`` on the decorated object
+        to allow runtime introspection.
+        """
+        try:
+            f.__final__ = True
+        except (AttributeError, TypeError):
+            # Skip the attribute silently if it is not writable.
+            # AttributeError happens if the object has __slots__ or a
+            # read-only property, TypeError if it's a builtin class.
+            pass
+        return f
+
+
+def IntVar(name):
+    return typing.TypeVar(name)
+
+
+# A Literal bug was fixed in 3.11.0, 3.10.1 and 3.9.8
+if sys.version_info >= (3, 10, 1):
+    Literal = typing.Literal
+else:
+    def _flatten_literal_params(parameters):
+        """An internal helper for Literal creation: flatten Literals among parameters"""
+        params = []
+        for p in parameters:
+            if isinstance(p, _LiteralGenericAlias):
+                params.extend(p.__args__)
+            else:
+                params.append(p)
+        return tuple(params)
+
+    def _value_and_type_iter(params):
+        for p in params:
+            yield p, type(p)
+
+    class _LiteralGenericAlias(typing._GenericAlias, _root=True):
+        def __eq__(self, other):
+            if not isinstance(other, _LiteralGenericAlias):
+                return NotImplemented
+            these_args_deduped = set(_value_and_type_iter(self.__args__))
+            other_args_deduped = set(_value_and_type_iter(other.__args__))
+            return these_args_deduped == other_args_deduped
+
+        def __hash__(self):
+            return hash(frozenset(_value_and_type_iter(self.__args__)))
+
+    class _LiteralForm(_ExtensionsSpecialForm, _root=True):
+        def __init__(self, doc: str):
+            self._name = 'Literal'
+            self._doc = self.__doc__ = doc
+
+        def __getitem__(self, parameters):
+            if not isinstance(parameters, tuple):
+                parameters = (parameters,)
+
+            parameters = _flatten_literal_params(parameters)
+
+            val_type_pairs = list(_value_and_type_iter(parameters))
+            try:
+                deduped_pairs = set(val_type_pairs)
+            except TypeError:
+                # unhashable parameters
+                pass
+            else:
+                # similar logic to typing._deduplicate on Python 3.9+
+                if len(deduped_pairs) < len(val_type_pairs):
+                    new_parameters = []
+                    for pair in val_type_pairs:
+                        if pair in deduped_pairs:
+                            new_parameters.append(pair[0])
+                            deduped_pairs.remove(pair)
+                    assert not deduped_pairs, deduped_pairs
+                    parameters = tuple(new_parameters)
+
+            return _LiteralGenericAlias(self, parameters)
+
+    Literal = _LiteralForm(doc="""\
+                           A type that can be used to indicate to type checkers
+                           that the corresponding value has a value literally equivalent
+                           to the provided parameter. For example:
+
+                               var: Literal[4] = 4
+
+                           The type checker understands that 'var' is literally equal to
+                           the value 4 and no other value.
+
+                           Literal[...] cannot be subclassed. There is no runtime
+                           checking verifying that the parameter is actually a value
+                           instead of a type.""")
+
+
+_overload_dummy = typing._overload_dummy
+
+
+if hasattr(typing, "get_overloads"):  # 3.11+
+    overload = typing.overload
+    get_overloads = typing.get_overloads
+    clear_overloads = typing.clear_overloads
+else:
+    # {module: {qualname: {firstlineno: func}}}
+    _overload_registry = collections.defaultdict(
+        functools.partial(collections.defaultdict, dict)
+    )
+
+    def overload(func):
+        """Decorator for overloaded functions/methods.
+
+        In a stub file, place two or more stub definitions for the same
+        function in a row, each decorated with @overload.  For example:
+
+        @overload
+        def utf8(value: None) -> None: ...
+        @overload
+        def utf8(value: bytes) -> bytes: ...
+        @overload
+        def utf8(value: str) -> bytes: ...
+
+        In a non-stub file (i.e. a regular .py file), do the same but
+        follow it with an implementation.  The implementation should *not*
+        be decorated with @overload.  For example:
+
+        @overload
+        def utf8(value: None) -> None: ...
+        @overload
+        def utf8(value: bytes) -> bytes: ...
+        @overload
+        def utf8(value: str) -> bytes: ...
+        def utf8(value):
+            # implementation goes here
+
+        The overloads for a function can be retrieved at runtime using the
+        get_overloads() function.
+        """
+        # classmethod and staticmethod
+        f = getattr(func, "__func__", func)
+        try:
+            _overload_registry[f.__module__][f.__qualname__][
+                f.__code__.co_firstlineno
+            ] = func
+        except AttributeError:
+            # Not a normal function; ignore.
+            pass
+        return _overload_dummy
+
+    def get_overloads(func):
+        """Return all defined overloads for *func* as a sequence."""
+        # classmethod and staticmethod
+        f = getattr(func, "__func__", func)
+        if f.__module__ not in _overload_registry:
+            return []
+        mod_dict = _overload_registry[f.__module__]
+        if f.__qualname__ not in mod_dict:
+            return []
+        return list(mod_dict[f.__qualname__].values())
+
+    def clear_overloads():
+        """Clear all overloads in the registry."""
+        _overload_registry.clear()
+
+
+# This is not a real generic class.  Don't use outside annotations.
+Type = typing.Type
+
+# Various ABCs mimicking those in collections.abc.
+# A few are simply re-exported for completeness.
+Awaitable = typing.Awaitable
+Coroutine = typing.Coroutine
+AsyncIterable = typing.AsyncIterable
+AsyncIterator = typing.AsyncIterator
+Deque = typing.Deque
+DefaultDict = typing.DefaultDict
+OrderedDict = typing.OrderedDict
+Counter = typing.Counter
+ChainMap = typing.ChainMap
+Text = typing.Text
+TYPE_CHECKING = typing.TYPE_CHECKING
+
+
+if sys.version_info >= (3, 13, 0, "beta"):
+    from typing import AsyncContextManager, AsyncGenerator, ContextManager, Generator
+else:
+    def _is_dunder(attr):
+        return attr.startswith('__') and attr.endswith('__')
+
+    # Python <3.9 doesn't have typing._SpecialGenericAlias
+    _special_generic_alias_base = getattr(
+        typing, "_SpecialGenericAlias", typing._GenericAlias
+    )
+
+    class _SpecialGenericAlias(_special_generic_alias_base, _root=True):
+        def __init__(self, origin, nparams, *, inst=True, name=None, defaults=()):
+            if _special_generic_alias_base is typing._GenericAlias:
+                # Python <3.9
+                self.__origin__ = origin
+                self._nparams = nparams
+                super().__init__(origin, nparams, special=True, inst=inst, name=name)
+            else:
+                # Python >= 3.9
+                super().__init__(origin, nparams, inst=inst, name=name)
+            self._defaults = defaults
+
+        def __setattr__(self, attr, val):
+            allowed_attrs = {'_name', '_inst', '_nparams', '_defaults'}
+            if _special_generic_alias_base is typing._GenericAlias:
+                # Python <3.9
+                allowed_attrs.add("__origin__")
+            if _is_dunder(attr) or attr in allowed_attrs:
+                object.__setattr__(self, attr, val)
+            else:
+                setattr(self.__origin__, attr, val)
+
+        @typing._tp_cache
+        def __getitem__(self, params):
+            if not isinstance(params, tuple):
+                params = (params,)
+            msg = "Parameters to generic types must be types."
+            params = tuple(typing._type_check(p, msg) for p in params)
+            if (
+                self._defaults
+                and len(params) < self._nparams
+                and len(params) + len(self._defaults) >= self._nparams
+            ):
+                params = (*params, *self._defaults[len(params) - self._nparams:])
+            actual_len = len(params)
+
+            if actual_len != self._nparams:
+                if self._defaults:
+                    expected = f"at least {self._nparams - len(self._defaults)}"
+                else:
+                    expected = str(self._nparams)
+                if not self._nparams:
+                    raise TypeError(f"{self} is not a generic class")
+                raise TypeError(
+                    f"Too {'many' if actual_len > self._nparams else 'few'}"
+                    f" arguments for {self};"
+                    f" actual {actual_len}, expected {expected}"
+                )
+            return self.copy_with(params)
+
+    _NoneType = type(None)
+    Generator = _SpecialGenericAlias(
+        collections.abc.Generator, 3, defaults=(_NoneType, _NoneType)
+    )
+    AsyncGenerator = _SpecialGenericAlias(
+        collections.abc.AsyncGenerator, 2, defaults=(_NoneType,)
+    )
+    ContextManager = _SpecialGenericAlias(
+        contextlib.AbstractContextManager,
+        2,
+        name="ContextManager",
+        defaults=(typing.Optional[bool],)
+    )
+    AsyncContextManager = _SpecialGenericAlias(
+        contextlib.AbstractAsyncContextManager,
+        2,
+        name="AsyncContextManager",
+        defaults=(typing.Optional[bool],)
+    )
+
+
+_PROTO_ALLOWLIST = {
+    'collections.abc': [
+        'Callable', 'Awaitable', 'Iterable', 'Iterator', 'AsyncIterable',
+        'Hashable', 'Sized', 'Container', 'Collection', 'Reversible', 'Buffer',
+    ],
+    'contextlib': ['AbstractContextManager', 'AbstractAsyncContextManager'],
+    'typing_extensions': ['Buffer'],
+}
+
+
+_EXCLUDED_ATTRS = frozenset(typing.EXCLUDED_ATTRIBUTES) | {
+    "__match_args__", "__protocol_attrs__", "__non_callable_proto_members__",
+    "__final__",
+}
+
+
+def _get_protocol_attrs(cls):
+    attrs = set()
+    for base in cls.__mro__[:-1]:  # without object
+        if base.__name__ in {'Protocol', 'Generic'}:
+            continue
+        annotations = getattr(base, '__annotations__', {})
+        for attr in (*base.__dict__, *annotations):
+            if (not attr.startswith('_abc_') and attr not in _EXCLUDED_ATTRS):
+                attrs.add(attr)
+    return attrs
+
+
+def _caller(depth=2):
+    try:
+        return sys._getframe(depth).f_globals.get('__name__', '__main__')
+    except (AttributeError, ValueError):  # For platforms without _getframe()
+        return None
+
+
+# `__match_args__` attribute was removed from protocol members in 3.13,
+# we want to backport this change to older Python versions.
+if sys.version_info >= (3, 13):
+    Protocol = typing.Protocol
+else:
+    def _allow_reckless_class_checks(depth=3):
+        """Allow instance and class checks for special stdlib modules.
+        The abc and functools modules indiscriminately call isinstance() and
+        issubclass() on the whole MRO of a user class, which may contain protocols.
+        """
+        return _caller(depth) in {'abc', 'functools', None}
+
+    def _no_init(self, *args, **kwargs):
+        if type(self)._is_protocol:
+            raise TypeError('Protocols cannot be instantiated')
+
+    def _type_check_issubclass_arg_1(arg):
+        """Raise TypeError if `arg` is not an instance of `type`
+        in `issubclass(arg, )`.
+
+        In most cases, this is verified by type.__subclasscheck__.
+        Checking it again unnecessarily would slow down issubclass() checks,
+        so, we don't perform this check unless we absolutely have to.
+
+        For various error paths, however,
+        we want to ensure that *this* error message is shown to the user
+        where relevant, rather than a typing.py-specific error message.
+        """
+        if not isinstance(arg, type):
+            # Same error message as for issubclass(1, int).
+            raise TypeError('issubclass() arg 1 must be a class')
+
+    # Inheriting from typing._ProtocolMeta isn't actually desirable,
+    # but is necessary to allow typing.Protocol and typing_extensions.Protocol
+    # to mix without getting TypeErrors about "metaclass conflict"
+    class _ProtocolMeta(type(typing.Protocol)):
+        # This metaclass is somewhat unfortunate,
+        # but is necessary for several reasons...
+        #
+        # NOTE: DO NOT call super() in any methods in this class
+        # That would call the methods on typing._ProtocolMeta on Python 3.8-3.11
+        # and those are slow
+        def __new__(mcls, name, bases, namespace, **kwargs):
+            if name == "Protocol" and len(bases) < 2:
+                pass
+            elif {Protocol, typing.Protocol} & set(bases):
+                for base in bases:
+                    if not (
+                        base in {object, typing.Generic, Protocol, typing.Protocol}
+                        or base.__name__ in _PROTO_ALLOWLIST.get(base.__module__, [])
+                        or is_protocol(base)
+                    ):
+                        raise TypeError(
+                            f"Protocols can only inherit from other protocols, "
+                            f"got {base!r}"
+                        )
+            return abc.ABCMeta.__new__(mcls, name, bases, namespace, **kwargs)
+
+        def __init__(cls, *args, **kwargs):
+            abc.ABCMeta.__init__(cls, *args, **kwargs)
+            if getattr(cls, "_is_protocol", False):
+                cls.__protocol_attrs__ = _get_protocol_attrs(cls)
+
+        def __subclasscheck__(cls, other):
+            if cls is Protocol:
+                return type.__subclasscheck__(cls, other)
+            if (
+                getattr(cls, '_is_protocol', False)
+                and not _allow_reckless_class_checks()
+            ):
+                if not getattr(cls, '_is_runtime_protocol', False):
+                    _type_check_issubclass_arg_1(other)
+                    raise TypeError(
+                        "Instance and class checks can only be used with "
+                        "@runtime_checkable protocols"
+                    )
+                if (
+                    # this attribute is set by @runtime_checkable:
+                    cls.__non_callable_proto_members__
+                    and cls.__dict__.get("__subclasshook__") is _proto_hook
+                ):
+                    _type_check_issubclass_arg_1(other)
+                    non_method_attrs = sorted(cls.__non_callable_proto_members__)
+                    raise TypeError(
+                        "Protocols with non-method members don't support issubclass()."
+                        f" Non-method members: {str(non_method_attrs)[1:-1]}."
+                    )
+            return abc.ABCMeta.__subclasscheck__(cls, other)
+
+        def __instancecheck__(cls, instance):
+            # We need this method for situations where attributes are
+            # assigned in __init__.
+            if cls is Protocol:
+                return type.__instancecheck__(cls, instance)
+            if not getattr(cls, "_is_protocol", False):
+                # i.e., it's a concrete subclass of a protocol
+                return abc.ABCMeta.__instancecheck__(cls, instance)
+
+            if (
+                not getattr(cls, '_is_runtime_protocol', False) and
+                not _allow_reckless_class_checks()
+            ):
+                raise TypeError("Instance and class checks can only be used with"
+                                " @runtime_checkable protocols")
+
+            if abc.ABCMeta.__instancecheck__(cls, instance):
+                return True
+
+            for attr in cls.__protocol_attrs__:
+                try:
+                    val = inspect.getattr_static(instance, attr)
+                except AttributeError:
+                    break
+                # this attribute is set by @runtime_checkable:
+                if val is None and attr not in cls.__non_callable_proto_members__:
+                    break
+            else:
+                return True
+
+            return False
+
+        def __eq__(cls, other):
+            # Hack so that typing.Generic.__class_getitem__
+            # treats typing_extensions.Protocol
+            # as equivalent to typing.Protocol
+            if abc.ABCMeta.__eq__(cls, other) is True:
+                return True
+            return cls is Protocol and other is typing.Protocol
+
+        # This has to be defined, or the abc-module cache
+        # complains about classes with this metaclass being unhashable,
+        # if we define only __eq__!
+        def __hash__(cls) -> int:
+            return type.__hash__(cls)
+
+    @classmethod
+    def _proto_hook(cls, other):
+        if not cls.__dict__.get('_is_protocol', False):
+            return NotImplemented
+
+        for attr in cls.__protocol_attrs__:
+            for base in other.__mro__:
+                # Check if the members appears in the class dictionary...
+                if attr in base.__dict__:
+                    if base.__dict__[attr] is None:
+                        return NotImplemented
+                    break
+
+                # ...or in annotations, if it is a sub-protocol.
+                annotations = getattr(base, '__annotations__', {})
+                if (
+                    isinstance(annotations, collections.abc.Mapping)
+                    and attr in annotations
+                    and is_protocol(other)
+                ):
+                    break
+            else:
+                return NotImplemented
+        return True
+
+    class Protocol(typing.Generic, metaclass=_ProtocolMeta):
+        __doc__ = typing.Protocol.__doc__
+        __slots__ = ()
+        _is_protocol = True
+        _is_runtime_protocol = False
+
+        def __init_subclass__(cls, *args, **kwargs):
+            super().__init_subclass__(*args, **kwargs)
+
+            # Determine if this is a protocol or a concrete subclass.
+            if not cls.__dict__.get('_is_protocol', False):
+                cls._is_protocol = any(b is Protocol for b in cls.__bases__)
+
+            # Set (or override) the protocol subclass hook.
+            if '__subclasshook__' not in cls.__dict__:
+                cls.__subclasshook__ = _proto_hook
+
+            # Prohibit instantiation for protocol classes
+            if cls._is_protocol and cls.__init__ is Protocol.__init__:
+                cls.__init__ = _no_init
+
+
+if sys.version_info >= (3, 13):
+    runtime_checkable = typing.runtime_checkable
+else:
+    def runtime_checkable(cls):
+        """Mark a protocol class as a runtime protocol.
+
+        Such protocol can be used with isinstance() and issubclass().
+        Raise TypeError if applied to a non-protocol class.
+        This allows a simple-minded structural check very similar to
+        one trick ponies in collections.abc such as Iterable.
+
+        For example::
+
+            @runtime_checkable
+            class Closable(Protocol):
+                def close(self): ...
+
+            assert isinstance(open('/some/file'), Closable)
+
+        Warning: this will check only the presence of the required methods,
+        not their type signatures!
+        """
+        if not issubclass(cls, typing.Generic) or not getattr(cls, '_is_protocol', False):
+            raise TypeError(f'@runtime_checkable can be only applied to protocol classes,'
+                            f' got {cls!r}')
+        cls._is_runtime_protocol = True
+
+        # typing.Protocol classes on <=3.11 break if we execute this block,
+        # because typing.Protocol classes on <=3.11 don't have a
+        # `__protocol_attrs__` attribute, and this block relies on the
+        # `__protocol_attrs__` attribute. Meanwhile, typing.Protocol classes on 3.12.2+
+        # break if we *don't* execute this block, because *they* assume that all
+        # protocol classes have a `__non_callable_proto_members__` attribute
+        # (which this block sets)
+        if isinstance(cls, _ProtocolMeta) or sys.version_info >= (3, 12, 2):
+            # PEP 544 prohibits using issubclass()
+            # with protocols that have non-method members.
+            # See gh-113320 for why we compute this attribute here,
+            # rather than in `_ProtocolMeta.__init__`
+            cls.__non_callable_proto_members__ = set()
+            for attr in cls.__protocol_attrs__:
+                try:
+                    is_callable = callable(getattr(cls, attr, None))
+                except Exception as e:
+                    raise TypeError(
+                        f"Failed to determine whether protocol member {attr!r} "
+                        "is a method member"
+                    ) from e
+                else:
+                    if not is_callable:
+                        cls.__non_callable_proto_members__.add(attr)
+
+        return cls
+
+
+# The "runtime" alias exists for backwards compatibility.
+runtime = runtime_checkable
+
+
+# Our version of runtime-checkable protocols is faster on Python 3.8-3.11
+if sys.version_info >= (3, 12):
+    SupportsInt = typing.SupportsInt
+    SupportsFloat = typing.SupportsFloat
+    SupportsComplex = typing.SupportsComplex
+    SupportsBytes = typing.SupportsBytes
+    SupportsIndex = typing.SupportsIndex
+    SupportsAbs = typing.SupportsAbs
+    SupportsRound = typing.SupportsRound
+else:
+    @runtime_checkable
+    class SupportsInt(Protocol):
+        """An ABC with one abstract method __int__."""
+        __slots__ = ()
+
+        @abc.abstractmethod
+        def __int__(self) -> int:
+            pass
+
+    @runtime_checkable
+    class SupportsFloat(Protocol):
+        """An ABC with one abstract method __float__."""
+        __slots__ = ()
+
+        @abc.abstractmethod
+        def __float__(self) -> float:
+            pass
+
+    @runtime_checkable
+    class SupportsComplex(Protocol):
+        """An ABC with one abstract method __complex__."""
+        __slots__ = ()
+
+        @abc.abstractmethod
+        def __complex__(self) -> complex:
+            pass
+
+    @runtime_checkable
+    class SupportsBytes(Protocol):
+        """An ABC with one abstract method __bytes__."""
+        __slots__ = ()
+
+        @abc.abstractmethod
+        def __bytes__(self) -> bytes:
+            pass
+
+    @runtime_checkable
+    class SupportsIndex(Protocol):
+        __slots__ = ()
+
+        @abc.abstractmethod
+        def __index__(self) -> int:
+            pass
+
+    @runtime_checkable
+    class SupportsAbs(Protocol[T_co]):
+        """
+        An ABC with one abstract method __abs__ that is covariant in its return type.
+        """
+        __slots__ = ()
+
+        @abc.abstractmethod
+        def __abs__(self) -> T_co:
+            pass
+
+    @runtime_checkable
+    class SupportsRound(Protocol[T_co]):
+        """
+        An ABC with one abstract method __round__ that is covariant in its return type.
+        """
+        __slots__ = ()
+
+        @abc.abstractmethod
+        def __round__(self, ndigits: int = 0) -> T_co:
+            pass
+
+
+def _ensure_subclassable(mro_entries):
+    def inner(func):
+        if sys.implementation.name == "pypy" and sys.version_info < (3, 9):
+            cls_dict = {
+                "__call__": staticmethod(func),
+                "__mro_entries__": staticmethod(mro_entries)
+            }
+            t = type(func.__name__, (), cls_dict)
+            return functools.update_wrapper(t(), func)
+        else:
+            func.__mro_entries__ = mro_entries
+            return func
+    return inner
+
+
+# Update this to something like >=3.13.0b1 if and when
+# PEP 728 is implemented in CPython
+_PEP_728_IMPLEMENTED = False
+
+if _PEP_728_IMPLEMENTED:
+    # The standard library TypedDict in Python 3.8 does not store runtime information
+    # about which (if any) keys are optional.  See https://bugs.python.org/issue38834
+    # The standard library TypedDict in Python 3.9.0/1 does not honour the "total"
+    # keyword with old-style TypedDict().  See https://bugs.python.org/issue42059
+    # The standard library TypedDict below Python 3.11 does not store runtime
+    # information about optional and required keys when using Required or NotRequired.
+    # Generic TypedDicts are also impossible using typing.TypedDict on Python <3.11.
+    # Aaaand on 3.12 we add __orig_bases__ to TypedDict
+    # to enable better runtime introspection.
+    # On 3.13 we deprecate some odd ways of creating TypedDicts.
+    # Also on 3.13, PEP 705 adds the ReadOnly[] qualifier.
+    # PEP 728 (still pending) makes more changes.
+    TypedDict = typing.TypedDict
+    _TypedDictMeta = typing._TypedDictMeta
+    is_typeddict = typing.is_typeddict
+else:
+    # 3.10.0 and later
+    _TAKES_MODULE = "module" in inspect.signature(typing._type_check).parameters
+
+    def _get_typeddict_qualifiers(annotation_type):
+        while True:
+            annotation_origin = get_origin(annotation_type)
+            if annotation_origin is Annotated:
+                annotation_args = get_args(annotation_type)
+                if annotation_args:
+                    annotation_type = annotation_args[0]
+                else:
+                    break
+            elif annotation_origin is Required:
+                yield Required
+                annotation_type, = get_args(annotation_type)
+            elif annotation_origin is NotRequired:
+                yield NotRequired
+                annotation_type, = get_args(annotation_type)
+            elif annotation_origin is ReadOnly:
+                yield ReadOnly
+                annotation_type, = get_args(annotation_type)
+            else:
+                break
+
+    class _TypedDictMeta(type):
+        def __new__(cls, name, bases, ns, *, total=True, closed=False):
+            """Create new typed dict class object.
+
+            This method is called when TypedDict is subclassed,
+            or when TypedDict is instantiated. This way
+            TypedDict supports all three syntax forms described in its docstring.
+            Subclasses and instances of TypedDict return actual dictionaries.
+            """
+            for base in bases:
+                if type(base) is not _TypedDictMeta and base is not typing.Generic:
+                    raise TypeError('cannot inherit from both a TypedDict type '
+                                    'and a non-TypedDict base class')
+
+            if any(issubclass(b, typing.Generic) for b in bases):
+                generic_base = (typing.Generic,)
+            else:
+                generic_base = ()
+
+            # typing.py generally doesn't let you inherit from plain Generic, unless
+            # the name of the class happens to be "Protocol"
+            tp_dict = type.__new__(_TypedDictMeta, "Protocol", (*generic_base, dict), ns)
+            tp_dict.__name__ = name
+            if tp_dict.__qualname__ == "Protocol":
+                tp_dict.__qualname__ = name
+
+            if not hasattr(tp_dict, '__orig_bases__'):
+                tp_dict.__orig_bases__ = bases
+
+            annotations = {}
+            if "__annotations__" in ns:
+                own_annotations = ns["__annotations__"]
+            elif "__annotate__" in ns:
+                # TODO: Use inspect.VALUE here, and make the annotations lazily evaluated
+                own_annotations = ns["__annotate__"](1)
+            else:
+                own_annotations = {}
+            msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type"
+            if _TAKES_MODULE:
+                own_annotations = {
+                    n: typing._type_check(tp, msg, module=tp_dict.__module__)
+                    for n, tp in own_annotations.items()
+                }
+            else:
+                own_annotations = {
+                    n: typing._type_check(tp, msg)
+                    for n, tp in own_annotations.items()
+                }
+            required_keys = set()
+            optional_keys = set()
+            readonly_keys = set()
+            mutable_keys = set()
+            extra_items_type = None
+
+            for base in bases:
+                base_dict = base.__dict__
+
+                annotations.update(base_dict.get('__annotations__', {}))
+                required_keys.update(base_dict.get('__required_keys__', ()))
+                optional_keys.update(base_dict.get('__optional_keys__', ()))
+                readonly_keys.update(base_dict.get('__readonly_keys__', ()))
+                mutable_keys.update(base_dict.get('__mutable_keys__', ()))
+                base_extra_items_type = base_dict.get('__extra_items__', None)
+                if base_extra_items_type is not None:
+                    extra_items_type = base_extra_items_type
+
+            if closed and extra_items_type is None:
+                extra_items_type = Never
+            if closed and "__extra_items__" in own_annotations:
+                annotation_type = own_annotations.pop("__extra_items__")
+                qualifiers = set(_get_typeddict_qualifiers(annotation_type))
+                if Required in qualifiers:
+                    raise TypeError(
+                        "Special key __extra_items__ does not support "
+                        "Required"
+                    )
+                if NotRequired in qualifiers:
+                    raise TypeError(
+                        "Special key __extra_items__ does not support "
+                        "NotRequired"
+                    )
+                extra_items_type = annotation_type
+
+            annotations.update(own_annotations)
+            for annotation_key, annotation_type in own_annotations.items():
+                qualifiers = set(_get_typeddict_qualifiers(annotation_type))
+
+                if Required in qualifiers:
+                    required_keys.add(annotation_key)
+                elif NotRequired in qualifiers:
+                    optional_keys.add(annotation_key)
+                elif total:
+                    required_keys.add(annotation_key)
+                else:
+                    optional_keys.add(annotation_key)
+                if ReadOnly in qualifiers:
+                    mutable_keys.discard(annotation_key)
+                    readonly_keys.add(annotation_key)
+                else:
+                    mutable_keys.add(annotation_key)
+                    readonly_keys.discard(annotation_key)
+
+            tp_dict.__annotations__ = annotations
+            tp_dict.__required_keys__ = frozenset(required_keys)
+            tp_dict.__optional_keys__ = frozenset(optional_keys)
+            tp_dict.__readonly_keys__ = frozenset(readonly_keys)
+            tp_dict.__mutable_keys__ = frozenset(mutable_keys)
+            if not hasattr(tp_dict, '__total__'):
+                tp_dict.__total__ = total
+            tp_dict.__closed__ = closed
+            tp_dict.__extra_items__ = extra_items_type
+            return tp_dict
+
+        __call__ = dict  # static method
+
+        def __subclasscheck__(cls, other):
+            # Typed dicts are only for static structural subtyping.
+            raise TypeError('TypedDict does not support instance and class checks')
+
+        __instancecheck__ = __subclasscheck__
+
+    _TypedDict = type.__new__(_TypedDictMeta, 'TypedDict', (), {})
+
+    @_ensure_subclassable(lambda bases: (_TypedDict,))
+    def TypedDict(typename, fields=_marker, /, *, total=True, closed=False, **kwargs):
+        """A simple typed namespace. At runtime it is equivalent to a plain dict.
+
+        TypedDict creates a dictionary type such that a type checker will expect all
+        instances to have a certain set of keys, where each key is
+        associated with a value of a consistent type. This expectation
+        is not checked at runtime.
+
+        Usage::
+
+            class Point2D(TypedDict):
+                x: int
+                y: int
+                label: str
+
+            a: Point2D = {'x': 1, 'y': 2, 'label': 'good'}  # OK
+            b: Point2D = {'z': 3, 'label': 'bad'}           # Fails type check
+
+            assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first')
+
+        The type info can be accessed via the Point2D.__annotations__ dict, and
+        the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets.
+        TypedDict supports an additional equivalent form::
+
+            Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str})
+
+        By default, all keys must be present in a TypedDict. It is possible
+        to override this by specifying totality::
+
+            class Point2D(TypedDict, total=False):
+                x: int
+                y: int
+
+        This means that a Point2D TypedDict can have any of the keys omitted. A type
+        checker is only expected to support a literal False or True as the value of
+        the total argument. True is the default, and makes all items defined in the
+        class body be required.
+
+        The Required and NotRequired special forms can also be used to mark
+        individual keys as being required or not required::
+
+            class Point2D(TypedDict):
+                x: int  # the "x" key must always be present (Required is the default)
+                y: NotRequired[int]  # the "y" key can be omitted
+
+        See PEP 655 for more details on Required and NotRequired.
+        """
+        if fields is _marker or fields is None:
+            if fields is _marker:
+                deprecated_thing = "Failing to pass a value for the 'fields' parameter"
+            else:
+                deprecated_thing = "Passing `None` as the 'fields' parameter"
+
+            example = f"`{typename} = TypedDict({typename!r}, {{}})`"
+            deprecation_msg = (
+                f"{deprecated_thing} is deprecated and will be disallowed in "
+                "Python 3.15. To create a TypedDict class with 0 fields "
+                "using the functional syntax, pass an empty dictionary, e.g. "
+            ) + example + "."
+            warnings.warn(deprecation_msg, DeprecationWarning, stacklevel=2)
+            if closed is not False and closed is not True:
+                kwargs["closed"] = closed
+                closed = False
+            fields = kwargs
+        elif kwargs:
+            raise TypeError("TypedDict takes either a dict or keyword arguments,"
+                            " but not both")
+        if kwargs:
+            if sys.version_info >= (3, 13):
+                raise TypeError("TypedDict takes no keyword arguments")
+            warnings.warn(
+                "The kwargs-based syntax for TypedDict definitions is deprecated "
+                "in Python 3.11, will be removed in Python 3.13, and may not be "
+                "understood by third-party type checkers.",
+                DeprecationWarning,
+                stacklevel=2,
+            )
+
+        ns = {'__annotations__': dict(fields)}
+        module = _caller()
+        if module is not None:
+            # Setting correct module is necessary to make typed dict classes pickleable.
+            ns['__module__'] = module
+
+        td = _TypedDictMeta(typename, (), ns, total=total, closed=closed)
+        td.__orig_bases__ = (TypedDict,)
+        return td
+
+    if hasattr(typing, "_TypedDictMeta"):
+        _TYPEDDICT_TYPES = (typing._TypedDictMeta, _TypedDictMeta)
+    else:
+        _TYPEDDICT_TYPES = (_TypedDictMeta,)
+
+    def is_typeddict(tp):
+        """Check if an annotation is a TypedDict class
+
+        For example::
+            class Film(TypedDict):
+                title: str
+                year: int
+
+            is_typeddict(Film)  # => True
+            is_typeddict(Union[list, str])  # => False
+        """
+        # On 3.8, this would otherwise return True
+        if hasattr(typing, "TypedDict") and tp is typing.TypedDict:
+            return False
+        return isinstance(tp, _TYPEDDICT_TYPES)
+
+
+if hasattr(typing, "assert_type"):
+    assert_type = typing.assert_type
+
+else:
+    def assert_type(val, typ, /):
+        """Assert (to the type checker) that the value is of the given type.
+
+        When the type checker encounters a call to assert_type(), it
+        emits an error if the value is not of the specified type::
+
+            def greet(name: str) -> None:
+                assert_type(name, str)  # ok
+                assert_type(name, int)  # type checker error
+
+        At runtime this returns the first argument unchanged and otherwise
+        does nothing.
+        """
+        return val
+
+
+if hasattr(typing, "ReadOnly"):  # 3.13+
+    get_type_hints = typing.get_type_hints
+else:  # <=3.13
+    # replaces _strip_annotations()
+    def _strip_extras(t):
+        """Strips Annotated, Required and NotRequired from a given type."""
+        if isinstance(t, _AnnotatedAlias):
+            return _strip_extras(t.__origin__)
+        if hasattr(t, "__origin__") and t.__origin__ in (Required, NotRequired, ReadOnly):
+            return _strip_extras(t.__args__[0])
+        if isinstance(t, typing._GenericAlias):
+            stripped_args = tuple(_strip_extras(a) for a in t.__args__)
+            if stripped_args == t.__args__:
+                return t
+            return t.copy_with(stripped_args)
+        if hasattr(_types, "GenericAlias") and isinstance(t, _types.GenericAlias):
+            stripped_args = tuple(_strip_extras(a) for a in t.__args__)
+            if stripped_args == t.__args__:
+                return t
+            return _types.GenericAlias(t.__origin__, stripped_args)
+        if hasattr(_types, "UnionType") and isinstance(t, _types.UnionType):
+            stripped_args = tuple(_strip_extras(a) for a in t.__args__)
+            if stripped_args == t.__args__:
+                return t
+            return functools.reduce(operator.or_, stripped_args)
+
+        return t
+
+    def get_type_hints(obj, globalns=None, localns=None, include_extras=False):
+        """Return type hints for an object.
+
+        This is often the same as obj.__annotations__, but it handles
+        forward references encoded as string literals, adds Optional[t] if a
+        default value equal to None is set and recursively replaces all
+        'Annotated[T, ...]', 'Required[T]' or 'NotRequired[T]' with 'T'
+        (unless 'include_extras=True').
+
+        The argument may be a module, class, method, or function. The annotations
+        are returned as a dictionary. For classes, annotations include also
+        inherited members.
+
+        TypeError is raised if the argument is not of a type that can contain
+        annotations, and an empty dictionary is returned if no annotations are
+        present.
+
+        BEWARE -- the behavior of globalns and localns is counterintuitive
+        (unless you are familiar with how eval() and exec() work).  The
+        search order is locals first, then globals.
+
+        - If no dict arguments are passed, an attempt is made to use the
+          globals from obj (or the respective module's globals for classes),
+          and these are also used as the locals.  If the object does not appear
+          to have globals, an empty dictionary is used.
+
+        - If one dict argument is passed, it is used for both globals and
+          locals.
+
+        - If two dict arguments are passed, they specify globals and
+          locals, respectively.
+        """
+        if hasattr(typing, "Annotated"):  # 3.9+
+            hint = typing.get_type_hints(
+                obj, globalns=globalns, localns=localns, include_extras=True
+            )
+        else:  # 3.8
+            hint = typing.get_type_hints(obj, globalns=globalns, localns=localns)
+        if include_extras:
+            return hint
+        return {k: _strip_extras(t) for k, t in hint.items()}
+
+
+# Python 3.9+ has PEP 593 (Annotated)
+if hasattr(typing, 'Annotated'):
+    Annotated = typing.Annotated
+    # Not exported and not a public API, but needed for get_origin() and get_args()
+    # to work.
+    _AnnotatedAlias = typing._AnnotatedAlias
+# 3.8
+else:
+    class _AnnotatedAlias(typing._GenericAlias, _root=True):
+        """Runtime representation of an annotated type.
+
+        At its core 'Annotated[t, dec1, dec2, ...]' is an alias for the type 't'
+        with extra annotations. The alias behaves like a normal typing alias,
+        instantiating is the same as instantiating the underlying type, binding
+        it to types is also the same.
+        """
+        def __init__(self, origin, metadata):
+            if isinstance(origin, _AnnotatedAlias):
+                metadata = origin.__metadata__ + metadata
+                origin = origin.__origin__
+            super().__init__(origin, origin)
+            self.__metadata__ = metadata
+
+        def copy_with(self, params):
+            assert len(params) == 1
+            new_type = params[0]
+            return _AnnotatedAlias(new_type, self.__metadata__)
+
+        def __repr__(self):
+            return (f"typing_extensions.Annotated[{typing._type_repr(self.__origin__)}, "
+                    f"{', '.join(repr(a) for a in self.__metadata__)}]")
+
+        def __reduce__(self):
+            return operator.getitem, (
+                Annotated, (self.__origin__, *self.__metadata__)
+            )
+
+        def __eq__(self, other):
+            if not isinstance(other, _AnnotatedAlias):
+                return NotImplemented
+            if self.__origin__ != other.__origin__:
+                return False
+            return self.__metadata__ == other.__metadata__
+
+        def __hash__(self):
+            return hash((self.__origin__, self.__metadata__))
+
+    class Annotated:
+        """Add context specific metadata to a type.
+
+        Example: Annotated[int, runtime_check.Unsigned] indicates to the
+        hypothetical runtime_check module that this type is an unsigned int.
+        Every other consumer of this type can ignore this metadata and treat
+        this type as int.
+
+        The first argument to Annotated must be a valid type (and will be in
+        the __origin__ field), the remaining arguments are kept as a tuple in
+        the __extra__ field.
+
+        Details:
+
+        - It's an error to call `Annotated` with less than two arguments.
+        - Nested Annotated are flattened::
+
+            Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3]
+
+        - Instantiating an annotated type is equivalent to instantiating the
+        underlying type::
+
+            Annotated[C, Ann1](5) == C(5)
+
+        - Annotated can be used as a generic type alias::
+
+            Optimized = Annotated[T, runtime.Optimize()]
+            Optimized[int] == Annotated[int, runtime.Optimize()]
+
+            OptimizedList = Annotated[List[T], runtime.Optimize()]
+            OptimizedList[int] == Annotated[List[int], runtime.Optimize()]
+        """
+
+        __slots__ = ()
+
+        def __new__(cls, *args, **kwargs):
+            raise TypeError("Type Annotated cannot be instantiated.")
+
+        @typing._tp_cache
+        def __class_getitem__(cls, params):
+            if not isinstance(params, tuple) or len(params) < 2:
+                raise TypeError("Annotated[...] should be used "
+                                "with at least two arguments (a type and an "
+                                "annotation).")
+            allowed_special_forms = (ClassVar, Final)
+            if get_origin(params[0]) in allowed_special_forms:
+                origin = params[0]
+            else:
+                msg = "Annotated[t, ...]: t must be a type."
+                origin = typing._type_check(params[0], msg)
+            metadata = tuple(params[1:])
+            return _AnnotatedAlias(origin, metadata)
+
+        def __init_subclass__(cls, *args, **kwargs):
+            raise TypeError(
+                f"Cannot subclass {cls.__module__}.Annotated"
+            )
+
+# Python 3.8 has get_origin() and get_args() but those implementations aren't
+# Annotated-aware, so we can't use those. Python 3.9's versions don't support
+# ParamSpecArgs and ParamSpecKwargs, so only Python 3.10's versions will do.
+if sys.version_info[:2] >= (3, 10):
+    get_origin = typing.get_origin
+    get_args = typing.get_args
+# 3.8-3.9
+else:
+    try:
+        # 3.9+
+        from typing import _BaseGenericAlias
+    except ImportError:
+        _BaseGenericAlias = typing._GenericAlias
+    try:
+        # 3.9+
+        from typing import GenericAlias as _typing_GenericAlias
+    except ImportError:
+        _typing_GenericAlias = typing._GenericAlias
+
+    def get_origin(tp):
+        """Get the unsubscripted version of a type.
+
+        This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar
+        and Annotated. Return None for unsupported types. Examples::
+
+            get_origin(Literal[42]) is Literal
+            get_origin(int) is None
+            get_origin(ClassVar[int]) is ClassVar
+            get_origin(Generic) is Generic
+            get_origin(Generic[T]) is Generic
+            get_origin(Union[T, int]) is Union
+            get_origin(List[Tuple[T, T]][int]) == list
+            get_origin(P.args) is P
+        """
+        if isinstance(tp, _AnnotatedAlias):
+            return Annotated
+        if isinstance(tp, (typing._GenericAlias, _typing_GenericAlias, _BaseGenericAlias,
+                           ParamSpecArgs, ParamSpecKwargs)):
+            return tp.__origin__
+        if tp is typing.Generic:
+            return typing.Generic
+        return None
+
+    def get_args(tp):
+        """Get type arguments with all substitutions performed.
+
+        For unions, basic simplifications used by Union constructor are performed.
+        Examples::
+            get_args(Dict[str, int]) == (str, int)
+            get_args(int) == ()
+            get_args(Union[int, Union[T, int], str][int]) == (int, str)
+            get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int])
+            get_args(Callable[[], T][int]) == ([], int)
+        """
+        if isinstance(tp, _AnnotatedAlias):
+            return (tp.__origin__, *tp.__metadata__)
+        if isinstance(tp, (typing._GenericAlias, _typing_GenericAlias)):
+            if getattr(tp, "_special", False):
+                return ()
+            res = tp.__args__
+            if get_origin(tp) is collections.abc.Callable and res[0] is not Ellipsis:
+                res = (list(res[:-1]), res[-1])
+            return res
+        return ()
+
+
+# 3.10+
+if hasattr(typing, 'TypeAlias'):
+    TypeAlias = typing.TypeAlias
+# 3.9
+elif sys.version_info[:2] >= (3, 9):
+    @_ExtensionsSpecialForm
+    def TypeAlias(self, parameters):
+        """Special marker indicating that an assignment should
+        be recognized as a proper type alias definition by type
+        checkers.
+
+        For example::
+
+            Predicate: TypeAlias = Callable[..., bool]
+
+        It's invalid when used anywhere except as in the example above.
+        """
+        raise TypeError(f"{self} is not subscriptable")
+# 3.8
+else:
+    TypeAlias = _ExtensionsSpecialForm(
+        'TypeAlias',
+        doc="""Special marker indicating that an assignment should
+        be recognized as a proper type alias definition by type
+        checkers.
+
+        For example::
+
+            Predicate: TypeAlias = Callable[..., bool]
+
+        It's invalid when used anywhere except as in the example
+        above."""
+    )
+
+
+if hasattr(typing, "NoDefault"):
+    NoDefault = typing.NoDefault
+else:
+    class NoDefaultTypeMeta(type):
+        def __setattr__(cls, attr, value):
+            # TypeError is consistent with the behavior of NoneType
+            raise TypeError(
+                f"cannot set {attr!r} attribute of immutable type {cls.__name__!r}"
+            )
+
+    class NoDefaultType(metaclass=NoDefaultTypeMeta):
+        """The type of the NoDefault singleton."""
+
+        __slots__ = ()
+
+        def __new__(cls):
+            return globals().get("NoDefault") or object.__new__(cls)
+
+        def __repr__(self):
+            return "typing_extensions.NoDefault"
+
+        def __reduce__(self):
+            return "NoDefault"
+
+    NoDefault = NoDefaultType()
+    del NoDefaultType, NoDefaultTypeMeta
+
+
+def _set_default(type_param, default):
+    type_param.has_default = lambda: default is not NoDefault
+    type_param.__default__ = default
+
+
+def _set_module(typevarlike):
+    # for pickling:
+    def_mod = _caller(depth=3)
+    if def_mod != 'typing_extensions':
+        typevarlike.__module__ = def_mod
+
+
+class _DefaultMixin:
+    """Mixin for TypeVarLike defaults."""
+
+    __slots__ = ()
+    __init__ = _set_default
+
+
+# Classes using this metaclass must provide a _backported_typevarlike ClassVar
+class _TypeVarLikeMeta(type):
+    def __instancecheck__(cls, __instance: Any) -> bool:
+        return isinstance(__instance, cls._backported_typevarlike)
+
+
+if _PEP_696_IMPLEMENTED:
+    from typing import TypeVar
+else:
+    # Add default and infer_variance parameters from PEP 696 and 695
+    class TypeVar(metaclass=_TypeVarLikeMeta):
+        """Type variable."""
+
+        _backported_typevarlike = typing.TypeVar
+
+        def __new__(cls, name, *constraints, bound=None,
+                    covariant=False, contravariant=False,
+                    default=NoDefault, infer_variance=False):
+            if hasattr(typing, "TypeAliasType"):
+                # PEP 695 implemented (3.12+), can pass infer_variance to typing.TypeVar
+                typevar = typing.TypeVar(name, *constraints, bound=bound,
+                                         covariant=covariant, contravariant=contravariant,
+                                         infer_variance=infer_variance)
+            else:
+                typevar = typing.TypeVar(name, *constraints, bound=bound,
+                                         covariant=covariant, contravariant=contravariant)
+                if infer_variance and (covariant or contravariant):
+                    raise ValueError("Variance cannot be specified with infer_variance.")
+                typevar.__infer_variance__ = infer_variance
+
+            _set_default(typevar, default)
+            _set_module(typevar)
+
+            def _tvar_prepare_subst(alias, args):
+                if (
+                    typevar.has_default()
+                    and alias.__parameters__.index(typevar) == len(args)
+                ):
+                    args += (typevar.__default__,)
+                return args
+
+            typevar.__typing_prepare_subst__ = _tvar_prepare_subst
+            return typevar
+
+        def __init_subclass__(cls) -> None:
+            raise TypeError(f"type '{__name__}.TypeVar' is not an acceptable base type")
+
+
+# Python 3.10+ has PEP 612
+if hasattr(typing, 'ParamSpecArgs'):
+    ParamSpecArgs = typing.ParamSpecArgs
+    ParamSpecKwargs = typing.ParamSpecKwargs
+# 3.8-3.9
+else:
+    class _Immutable:
+        """Mixin to indicate that object should not be copied."""
+        __slots__ = ()
+
+        def __copy__(self):
+            return self
+
+        def __deepcopy__(self, memo):
+            return self
+
+    class ParamSpecArgs(_Immutable):
+        """The args for a ParamSpec object.
+
+        Given a ParamSpec object P, P.args is an instance of ParamSpecArgs.
+
+        ParamSpecArgs objects have a reference back to their ParamSpec:
+
+        P.args.__origin__ is P
+
+        This type is meant for runtime introspection and has no special meaning to
+        static type checkers.
+        """
+        def __init__(self, origin):
+            self.__origin__ = origin
+
+        def __repr__(self):
+            return f"{self.__origin__.__name__}.args"
+
+        def __eq__(self, other):
+            if not isinstance(other, ParamSpecArgs):
+                return NotImplemented
+            return self.__origin__ == other.__origin__
+
+    class ParamSpecKwargs(_Immutable):
+        """The kwargs for a ParamSpec object.
+
+        Given a ParamSpec object P, P.kwargs is an instance of ParamSpecKwargs.
+
+        ParamSpecKwargs objects have a reference back to their ParamSpec:
+
+        P.kwargs.__origin__ is P
+
+        This type is meant for runtime introspection and has no special meaning to
+        static type checkers.
+        """
+        def __init__(self, origin):
+            self.__origin__ = origin
+
+        def __repr__(self):
+            return f"{self.__origin__.__name__}.kwargs"
+
+        def __eq__(self, other):
+            if not isinstance(other, ParamSpecKwargs):
+                return NotImplemented
+            return self.__origin__ == other.__origin__
+
+
+if _PEP_696_IMPLEMENTED:
+    from typing import ParamSpec
+
+# 3.10+
+elif hasattr(typing, 'ParamSpec'):
+
+    # Add default parameter - PEP 696
+    class ParamSpec(metaclass=_TypeVarLikeMeta):
+        """Parameter specification."""
+
+        _backported_typevarlike = typing.ParamSpec
+
+        def __new__(cls, name, *, bound=None,
+                    covariant=False, contravariant=False,
+                    infer_variance=False, default=NoDefault):
+            if hasattr(typing, "TypeAliasType"):
+                # PEP 695 implemented, can pass infer_variance to typing.TypeVar
+                paramspec = typing.ParamSpec(name, bound=bound,
+                                             covariant=covariant,
+                                             contravariant=contravariant,
+                                             infer_variance=infer_variance)
+            else:
+                paramspec = typing.ParamSpec(name, bound=bound,
+                                             covariant=covariant,
+                                             contravariant=contravariant)
+                paramspec.__infer_variance__ = infer_variance
+
+            _set_default(paramspec, default)
+            _set_module(paramspec)
+
+            def _paramspec_prepare_subst(alias, args):
+                params = alias.__parameters__
+                i = params.index(paramspec)
+                if i == len(args) and paramspec.has_default():
+                    args = [*args, paramspec.__default__]
+                if i >= len(args):
+                    raise TypeError(f"Too few arguments for {alias}")
+                # Special case where Z[[int, str, bool]] == Z[int, str, bool] in PEP 612.
+                if len(params) == 1 and not typing._is_param_expr(args[0]):
+                    assert i == 0
+                    args = (args,)
+                # Convert lists to tuples to help other libraries cache the results.
+                elif isinstance(args[i], list):
+                    args = (*args[:i], tuple(args[i]), *args[i + 1:])
+                return args
+
+            paramspec.__typing_prepare_subst__ = _paramspec_prepare_subst
+            return paramspec
+
+        def __init_subclass__(cls) -> None:
+            raise TypeError(f"type '{__name__}.ParamSpec' is not an acceptable base type")
+
+# 3.8-3.9
+else:
+
+    # Inherits from list as a workaround for Callable checks in Python < 3.9.2.
+    class ParamSpec(list, _DefaultMixin):
+        """Parameter specification variable.
+
+        Usage::
+
+           P = ParamSpec('P')
+
+        Parameter specification variables exist primarily for the benefit of static
+        type checkers.  They are used to forward the parameter types of one
+        callable to another callable, a pattern commonly found in higher order
+        functions and decorators.  They are only valid when used in ``Concatenate``,
+        or s the first argument to ``Callable``. In Python 3.10 and higher,
+        they are also supported in user-defined Generics at runtime.
+        See class Generic for more information on generic types.  An
+        example for annotating a decorator::
+
+           T = TypeVar('T')
+           P = ParamSpec('P')
+
+           def add_logging(f: Callable[P, T]) -> Callable[P, T]:
+               '''A type-safe decorator to add logging to a function.'''
+               def inner(*args: P.args, **kwargs: P.kwargs) -> T:
+                   logging.info(f'{f.__name__} was called')
+                   return f(*args, **kwargs)
+               return inner
+
+           @add_logging
+           def add_two(x: float, y: float) -> float:
+               '''Add two numbers together.'''
+               return x + y
+
+        Parameter specification variables defined with covariant=True or
+        contravariant=True can be used to declare covariant or contravariant
+        generic types.  These keyword arguments are valid, but their actual semantics
+        are yet to be decided.  See PEP 612 for details.
+
+        Parameter specification variables can be introspected. e.g.:
+
+           P.__name__ == 'T'
+           P.__bound__ == None
+           P.__covariant__ == False
+           P.__contravariant__ == False
+
+        Note that only parameter specification variables defined in global scope can
+        be pickled.
+        """
+
+        # Trick Generic __parameters__.
+        __class__ = typing.TypeVar
+
+        @property
+        def args(self):
+            return ParamSpecArgs(self)
+
+        @property
+        def kwargs(self):
+            return ParamSpecKwargs(self)
+
+        def __init__(self, name, *, bound=None, covariant=False, contravariant=False,
+                     infer_variance=False, default=NoDefault):
+            list.__init__(self, [self])
+            self.__name__ = name
+            self.__covariant__ = bool(covariant)
+            self.__contravariant__ = bool(contravariant)
+            self.__infer_variance__ = bool(infer_variance)
+            if bound:
+                self.__bound__ = typing._type_check(bound, 'Bound must be a type.')
+            else:
+                self.__bound__ = None
+            _DefaultMixin.__init__(self, default)
+
+            # for pickling:
+            def_mod = _caller()
+            if def_mod != 'typing_extensions':
+                self.__module__ = def_mod
+
+        def __repr__(self):
+            if self.__infer_variance__:
+                prefix = ''
+            elif self.__covariant__:
+                prefix = '+'
+            elif self.__contravariant__:
+                prefix = '-'
+            else:
+                prefix = '~'
+            return prefix + self.__name__
+
+        def __hash__(self):
+            return object.__hash__(self)
+
+        def __eq__(self, other):
+            return self is other
+
+        def __reduce__(self):
+            return self.__name__
+
+        # Hack to get typing._type_check to pass.
+        def __call__(self, *args, **kwargs):
+            pass
+
+
+# 3.8-3.9
+if not hasattr(typing, 'Concatenate'):
+    # Inherits from list as a workaround for Callable checks in Python < 3.9.2.
+    class _ConcatenateGenericAlias(list):
+
+        # Trick Generic into looking into this for __parameters__.
+        __class__ = typing._GenericAlias
+
+        # Flag in 3.8.
+        _special = False
+
+        def __init__(self, origin, args):
+            super().__init__(args)
+            self.__origin__ = origin
+            self.__args__ = args
+
+        def __repr__(self):
+            _type_repr = typing._type_repr
+            return (f'{_type_repr(self.__origin__)}'
+                    f'[{", ".join(_type_repr(arg) for arg in self.__args__)}]')
+
+        def __hash__(self):
+            return hash((self.__origin__, self.__args__))
+
+        # Hack to get typing._type_check to pass in Generic.
+        def __call__(self, *args, **kwargs):
+            pass
+
+        @property
+        def __parameters__(self):
+            return tuple(
+                tp for tp in self.__args__ if isinstance(tp, (typing.TypeVar, ParamSpec))
+            )
+
+
+# 3.8-3.9
+@typing._tp_cache
+def _concatenate_getitem(self, parameters):
+    if parameters == ():
+        raise TypeError("Cannot take a Concatenate of no types.")
+    if not isinstance(parameters, tuple):
+        parameters = (parameters,)
+    if not isinstance(parameters[-1], ParamSpec):
+        raise TypeError("The last parameter to Concatenate should be a "
+                        "ParamSpec variable.")
+    msg = "Concatenate[arg, ...]: each arg must be a type."
+    parameters = tuple(typing._type_check(p, msg) for p in parameters)
+    return _ConcatenateGenericAlias(self, parameters)
+
+
+# 3.10+
+if hasattr(typing, 'Concatenate'):
+    Concatenate = typing.Concatenate
+    _ConcatenateGenericAlias = typing._ConcatenateGenericAlias
+# 3.9
+elif sys.version_info[:2] >= (3, 9):
+    @_ExtensionsSpecialForm
+    def Concatenate(self, parameters):
+        """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a
+        higher order function which adds, removes or transforms parameters of a
+        callable.
+
+        For example::
+
+           Callable[Concatenate[int, P], int]
+
+        See PEP 612 for detailed information.
+        """
+        return _concatenate_getitem(self, parameters)
+# 3.8
+else:
+    class _ConcatenateForm(_ExtensionsSpecialForm, _root=True):
+        def __getitem__(self, parameters):
+            return _concatenate_getitem(self, parameters)
+
+    Concatenate = _ConcatenateForm(
+        'Concatenate',
+        doc="""Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a
+        higher order function which adds, removes or transforms parameters of a
+        callable.
+
+        For example::
+
+           Callable[Concatenate[int, P], int]
+
+        See PEP 612 for detailed information.
+        """)
+
+# 3.10+
+if hasattr(typing, 'TypeGuard'):
+    TypeGuard = typing.TypeGuard
+# 3.9
+elif sys.version_info[:2] >= (3, 9):
+    @_ExtensionsSpecialForm
+    def TypeGuard(self, parameters):
+        """Special typing form used to annotate the return type of a user-defined
+        type guard function.  ``TypeGuard`` only accepts a single type argument.
+        At runtime, functions marked this way should return a boolean.
+
+        ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static
+        type checkers to determine a more precise type of an expression within a
+        program's code flow.  Usually type narrowing is done by analyzing
+        conditional code flow and applying the narrowing to a block of code.  The
+        conditional expression here is sometimes referred to as a "type guard".
+
+        Sometimes it would be convenient to use a user-defined boolean function
+        as a type guard.  Such a function should use ``TypeGuard[...]`` as its
+        return type to alert static type checkers to this intention.
+
+        Using  ``-> TypeGuard`` tells the static type checker that for a given
+        function:
+
+        1. The return value is a boolean.
+        2. If the return value is ``True``, the type of its argument
+        is the type inside ``TypeGuard``.
+
+        For example::
+
+            def is_str(val: Union[str, float]):
+                # "isinstance" type guard
+                if isinstance(val, str):
+                    # Type of ``val`` is narrowed to ``str``
+                    ...
+                else:
+                    # Else, type of ``val`` is narrowed to ``float``.
+                    ...
+
+        Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower
+        form of ``TypeA`` (it can even be a wider form) and this may lead to
+        type-unsafe results.  The main reason is to allow for things like
+        narrowing ``List[object]`` to ``List[str]`` even though the latter is not
+        a subtype of the former, since ``List`` is invariant.  The responsibility of
+        writing type-safe type guards is left to the user.
+
+        ``TypeGuard`` also works with type variables.  For more information, see
+        PEP 647 (User-Defined Type Guards).
+        """
+        item = typing._type_check(parameters, f'{self} accepts only a single type.')
+        return typing._GenericAlias(self, (item,))
+# 3.8
+else:
+    class _TypeGuardForm(_ExtensionsSpecialForm, _root=True):
+        def __getitem__(self, parameters):
+            item = typing._type_check(parameters,
+                                      f'{self._name} accepts only a single type')
+            return typing._GenericAlias(self, (item,))
+
+    TypeGuard = _TypeGuardForm(
+        'TypeGuard',
+        doc="""Special typing form used to annotate the return type of a user-defined
+        type guard function.  ``TypeGuard`` only accepts a single type argument.
+        At runtime, functions marked this way should return a boolean.
+
+        ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static
+        type checkers to determine a more precise type of an expression within a
+        program's code flow.  Usually type narrowing is done by analyzing
+        conditional code flow and applying the narrowing to a block of code.  The
+        conditional expression here is sometimes referred to as a "type guard".
+
+        Sometimes it would be convenient to use a user-defined boolean function
+        as a type guard.  Such a function should use ``TypeGuard[...]`` as its
+        return type to alert static type checkers to this intention.
+
+        Using  ``-> TypeGuard`` tells the static type checker that for a given
+        function:
+
+        1. The return value is a boolean.
+        2. If the return value is ``True``, the type of its argument
+        is the type inside ``TypeGuard``.
+
+        For example::
+
+            def is_str(val: Union[str, float]):
+                # "isinstance" type guard
+                if isinstance(val, str):
+                    # Type of ``val`` is narrowed to ``str``
+                    ...
+                else:
+                    # Else, type of ``val`` is narrowed to ``float``.
+                    ...
+
+        Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower
+        form of ``TypeA`` (it can even be a wider form) and this may lead to
+        type-unsafe results.  The main reason is to allow for things like
+        narrowing ``List[object]`` to ``List[str]`` even though the latter is not
+        a subtype of the former, since ``List`` is invariant.  The responsibility of
+        writing type-safe type guards is left to the user.
+
+        ``TypeGuard`` also works with type variables.  For more information, see
+        PEP 647 (User-Defined Type Guards).
+        """)
+
+# 3.13+
+if hasattr(typing, 'TypeIs'):
+    TypeIs = typing.TypeIs
+# 3.9
+elif sys.version_info[:2] >= (3, 9):
+    @_ExtensionsSpecialForm
+    def TypeIs(self, parameters):
+        """Special typing form used to annotate the return type of a user-defined
+        type narrower function.  ``TypeIs`` only accepts a single type argument.
+        At runtime, functions marked this way should return a boolean.
+
+        ``TypeIs`` aims to benefit *type narrowing* -- a technique used by static
+        type checkers to determine a more precise type of an expression within a
+        program's code flow.  Usually type narrowing is done by analyzing
+        conditional code flow and applying the narrowing to a block of code.  The
+        conditional expression here is sometimes referred to as a "type guard".
+
+        Sometimes it would be convenient to use a user-defined boolean function
+        as a type guard.  Such a function should use ``TypeIs[...]`` as its
+        return type to alert static type checkers to this intention.
+
+        Using  ``-> TypeIs`` tells the static type checker that for a given
+        function:
+
+        1. The return value is a boolean.
+        2. If the return value is ``True``, the type of its argument
+        is the intersection of the type inside ``TypeGuard`` and the argument's
+        previously known type.
+
+        For example::
+
+            def is_awaitable(val: object) -> TypeIs[Awaitable[Any]]:
+                return hasattr(val, '__await__')
+
+            def f(val: Union[int, Awaitable[int]]) -> int:
+                if is_awaitable(val):
+                    assert_type(val, Awaitable[int])
+                else:
+                    assert_type(val, int)
+
+        ``TypeIs`` also works with type variables.  For more information, see
+        PEP 742 (Narrowing types with TypeIs).
+        """
+        item = typing._type_check(parameters, f'{self} accepts only a single type.')
+        return typing._GenericAlias(self, (item,))
+# 3.8
+else:
+    class _TypeIsForm(_ExtensionsSpecialForm, _root=True):
+        def __getitem__(self, parameters):
+            item = typing._type_check(parameters,
+                                      f'{self._name} accepts only a single type')
+            return typing._GenericAlias(self, (item,))
+
+    TypeIs = _TypeIsForm(
+        'TypeIs',
+        doc="""Special typing form used to annotate the return type of a user-defined
+        type narrower function.  ``TypeIs`` only accepts a single type argument.
+        At runtime, functions marked this way should return a boolean.
+
+        ``TypeIs`` aims to benefit *type narrowing* -- a technique used by static
+        type checkers to determine a more precise type of an expression within a
+        program's code flow.  Usually type narrowing is done by analyzing
+        conditional code flow and applying the narrowing to a block of code.  The
+        conditional expression here is sometimes referred to as a "type guard".
+
+        Sometimes it would be convenient to use a user-defined boolean function
+        as a type guard.  Such a function should use ``TypeIs[...]`` as its
+        return type to alert static type checkers to this intention.
+
+        Using  ``-> TypeIs`` tells the static type checker that for a given
+        function:
+
+        1. The return value is a boolean.
+        2. If the return value is ``True``, the type of its argument
+        is the intersection of the type inside ``TypeGuard`` and the argument's
+        previously known type.
+
+        For example::
+
+            def is_awaitable(val: object) -> TypeIs[Awaitable[Any]]:
+                return hasattr(val, '__await__')
+
+            def f(val: Union[int, Awaitable[int]]) -> int:
+                if is_awaitable(val):
+                    assert_type(val, Awaitable[int])
+                else:
+                    assert_type(val, int)
+
+        ``TypeIs`` also works with type variables.  For more information, see
+        PEP 742 (Narrowing types with TypeIs).
+        """)
+
+
+# Vendored from cpython typing._SpecialFrom
+class _SpecialForm(typing._Final, _root=True):
+    __slots__ = ('_name', '__doc__', '_getitem')
+
+    def __init__(self, getitem):
+        self._getitem = getitem
+        self._name = getitem.__name__
+        self.__doc__ = getitem.__doc__
+
+    def __getattr__(self, item):
+        if item in {'__name__', '__qualname__'}:
+            return self._name
+
+        raise AttributeError(item)
+
+    def __mro_entries__(self, bases):
+        raise TypeError(f"Cannot subclass {self!r}")
+
+    def __repr__(self):
+        return f'typing_extensions.{self._name}'
+
+    def __reduce__(self):
+        return self._name
+
+    def __call__(self, *args, **kwds):
+        raise TypeError(f"Cannot instantiate {self!r}")
+
+    def __or__(self, other):
+        return typing.Union[self, other]
+
+    def __ror__(self, other):
+        return typing.Union[other, self]
+
+    def __instancecheck__(self, obj):
+        raise TypeError(f"{self} cannot be used with isinstance()")
+
+    def __subclasscheck__(self, cls):
+        raise TypeError(f"{self} cannot be used with issubclass()")
+
+    @typing._tp_cache
+    def __getitem__(self, parameters):
+        return self._getitem(self, parameters)
+
+
+if hasattr(typing, "LiteralString"):  # 3.11+
+    LiteralString = typing.LiteralString
+else:
+    @_SpecialForm
+    def LiteralString(self, params):
+        """Represents an arbitrary literal string.
+
+        Example::
+
+          from typing_extensions import LiteralString
+
+          def query(sql: LiteralString) -> ...:
+              ...
+
+          query("SELECT * FROM table")  # ok
+          query(f"SELECT * FROM {input()}")  # not ok
+
+        See PEP 675 for details.
+
+        """
+        raise TypeError(f"{self} is not subscriptable")
+
+
+if hasattr(typing, "Self"):  # 3.11+
+    Self = typing.Self
+else:
+    @_SpecialForm
+    def Self(self, params):
+        """Used to spell the type of "self" in classes.
+
+        Example::
+
+          from typing import Self
+
+          class ReturnsSelf:
+              def parse(self, data: bytes) -> Self:
+                  ...
+                  return self
+
+        """
+
+        raise TypeError(f"{self} is not subscriptable")
+
+
+if hasattr(typing, "Never"):  # 3.11+
+    Never = typing.Never
+else:
+    @_SpecialForm
+    def Never(self, params):
+        """The bottom type, a type that has no members.
+
+        This can be used to define a function that should never be
+        called, or a function that never returns::
+
+            from typing_extensions import Never
+
+            def never_call_me(arg: Never) -> None:
+                pass
+
+            def int_or_str(arg: int | str) -> None:
+                never_call_me(arg)  # type checker error
+                match arg:
+                    case int():
+                        print("It's an int")
+                    case str():
+                        print("It's a str")
+                    case _:
+                        never_call_me(arg)  # ok, arg is of type Never
+
+        """
+
+        raise TypeError(f"{self} is not subscriptable")
+
+
+if hasattr(typing, 'Required'):  # 3.11+
+    Required = typing.Required
+    NotRequired = typing.NotRequired
+elif sys.version_info[:2] >= (3, 9):  # 3.9-3.10
+    @_ExtensionsSpecialForm
+    def Required(self, parameters):
+        """A special typing construct to mark a key of a total=False TypedDict
+        as required. For example:
+
+            class Movie(TypedDict, total=False):
+                title: Required[str]
+                year: int
+
+            m = Movie(
+                title='The Matrix',  # typechecker error if key is omitted
+                year=1999,
+            )
+
+        There is no runtime checking that a required key is actually provided
+        when instantiating a related TypedDict.
+        """
+        item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
+        return typing._GenericAlias(self, (item,))
+
+    @_ExtensionsSpecialForm
+    def NotRequired(self, parameters):
+        """A special typing construct to mark a key of a TypedDict as
+        potentially missing. For example:
+
+            class Movie(TypedDict):
+                title: str
+                year: NotRequired[int]
+
+            m = Movie(
+                title='The Matrix',  # typechecker error if key is omitted
+                year=1999,
+            )
+        """
+        item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
+        return typing._GenericAlias(self, (item,))
+
+else:  # 3.8
+    class _RequiredForm(_ExtensionsSpecialForm, _root=True):
+        def __getitem__(self, parameters):
+            item = typing._type_check(parameters,
+                                      f'{self._name} accepts only a single type.')
+            return typing._GenericAlias(self, (item,))
+
+    Required = _RequiredForm(
+        'Required',
+        doc="""A special typing construct to mark a key of a total=False TypedDict
+        as required. For example:
+
+            class Movie(TypedDict, total=False):
+                title: Required[str]
+                year: int
+
+            m = Movie(
+                title='The Matrix',  # typechecker error if key is omitted
+                year=1999,
+            )
+
+        There is no runtime checking that a required key is actually provided
+        when instantiating a related TypedDict.
+        """)
+    NotRequired = _RequiredForm(
+        'NotRequired',
+        doc="""A special typing construct to mark a key of a TypedDict as
+        potentially missing. For example:
+
+            class Movie(TypedDict):
+                title: str
+                year: NotRequired[int]
+
+            m = Movie(
+                title='The Matrix',  # typechecker error if key is omitted
+                year=1999,
+            )
+        """)
+
+
+if hasattr(typing, 'ReadOnly'):
+    ReadOnly = typing.ReadOnly
+elif sys.version_info[:2] >= (3, 9):  # 3.9-3.12
+    @_ExtensionsSpecialForm
+    def ReadOnly(self, parameters):
+        """A special typing construct to mark an item of a TypedDict as read-only.
+
+        For example:
+
+            class Movie(TypedDict):
+                title: ReadOnly[str]
+                year: int
+
+            def mutate_movie(m: Movie) -> None:
+                m["year"] = 1992  # allowed
+                m["title"] = "The Matrix"  # typechecker error
+
+        There is no runtime checking for this property.
+        """
+        item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
+        return typing._GenericAlias(self, (item,))
+
+else:  # 3.8
+    class _ReadOnlyForm(_ExtensionsSpecialForm, _root=True):
+        def __getitem__(self, parameters):
+            item = typing._type_check(parameters,
+                                      f'{self._name} accepts only a single type.')
+            return typing._GenericAlias(self, (item,))
+
+    ReadOnly = _ReadOnlyForm(
+        'ReadOnly',
+        doc="""A special typing construct to mark a key of a TypedDict as read-only.
+
+        For example:
+
+            class Movie(TypedDict):
+                title: ReadOnly[str]
+                year: int
+
+            def mutate_movie(m: Movie) -> None:
+                m["year"] = 1992  # allowed
+                m["title"] = "The Matrix"  # typechecker error
+
+        There is no runtime checking for this propery.
+        """)
+
+
+_UNPACK_DOC = """\
+Type unpack operator.
+
+The type unpack operator takes the child types from some container type,
+such as `tuple[int, str]` or a `TypeVarTuple`, and 'pulls them out'. For
+example:
+
+  # For some generic class `Foo`:
+  Foo[Unpack[tuple[int, str]]]  # Equivalent to Foo[int, str]
+
+  Ts = TypeVarTuple('Ts')
+  # Specifies that `Bar` is generic in an arbitrary number of types.
+  # (Think of `Ts` as a tuple of an arbitrary number of individual
+  #  `TypeVar`s, which the `Unpack` is 'pulling out' directly into the
+  #  `Generic[]`.)
+  class Bar(Generic[Unpack[Ts]]): ...
+  Bar[int]  # Valid
+  Bar[int, str]  # Also valid
+
+From Python 3.11, this can also be done using the `*` operator:
+
+    Foo[*tuple[int, str]]
+    class Bar(Generic[*Ts]): ...
+
+The operator can also be used along with a `TypedDict` to annotate
+`**kwargs` in a function signature. For instance:
+
+  class Movie(TypedDict):
+    name: str
+    year: int
+
+  # This function expects two keyword arguments - *name* of type `str` and
+  # *year* of type `int`.
+  def foo(**kwargs: Unpack[Movie]): ...
+
+Note that there is only some runtime checking of this operator. Not
+everything the runtime allows may be accepted by static type checkers.
+
+For more information, see PEP 646 and PEP 692.
+"""
+
+
+if sys.version_info >= (3, 12):  # PEP 692 changed the repr of Unpack[]
+    Unpack = typing.Unpack
+
+    def _is_unpack(obj):
+        return get_origin(obj) is Unpack
+
+elif sys.version_info[:2] >= (3, 9):  # 3.9+
+    class _UnpackSpecialForm(_ExtensionsSpecialForm, _root=True):
+        def __init__(self, getitem):
+            super().__init__(getitem)
+            self.__doc__ = _UNPACK_DOC
+
+    class _UnpackAlias(typing._GenericAlias, _root=True):
+        __class__ = typing.TypeVar
+
+        @property
+        def __typing_unpacked_tuple_args__(self):
+            assert self.__origin__ is Unpack
+            assert len(self.__args__) == 1
+            arg, = self.__args__
+            if isinstance(arg, (typing._GenericAlias, _types.GenericAlias)):
+                if arg.__origin__ is not tuple:
+                    raise TypeError("Unpack[...] must be used with a tuple type")
+                return arg.__args__
+            return None
+
+    @_UnpackSpecialForm
+    def Unpack(self, parameters):
+        item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
+        return _UnpackAlias(self, (item,))
+
+    def _is_unpack(obj):
+        return isinstance(obj, _UnpackAlias)
+
+else:  # 3.8
+    class _UnpackAlias(typing._GenericAlias, _root=True):
+        __class__ = typing.TypeVar
+
+    class _UnpackForm(_ExtensionsSpecialForm, _root=True):
+        def __getitem__(self, parameters):
+            item = typing._type_check(parameters,
+                                      f'{self._name} accepts only a single type.')
+            return _UnpackAlias(self, (item,))
+
+    Unpack = _UnpackForm('Unpack', doc=_UNPACK_DOC)
+
+    def _is_unpack(obj):
+        return isinstance(obj, _UnpackAlias)
+
+
+if _PEP_696_IMPLEMENTED:
+    from typing import TypeVarTuple
+
+elif hasattr(typing, "TypeVarTuple"):  # 3.11+
+
+    def _unpack_args(*args):
+        newargs = []
+        for arg in args:
+            subargs = getattr(arg, '__typing_unpacked_tuple_args__', None)
+            if subargs is not None and not (subargs and subargs[-1] is ...):
+                newargs.extend(subargs)
+            else:
+                newargs.append(arg)
+        return newargs
+
+    # Add default parameter - PEP 696
+    class TypeVarTuple(metaclass=_TypeVarLikeMeta):
+        """Type variable tuple."""
+
+        _backported_typevarlike = typing.TypeVarTuple
+
+        def __new__(cls, name, *, default=NoDefault):
+            tvt = typing.TypeVarTuple(name)
+            _set_default(tvt, default)
+            _set_module(tvt)
+
+            def _typevartuple_prepare_subst(alias, args):
+                params = alias.__parameters__
+                typevartuple_index = params.index(tvt)
+                for param in params[typevartuple_index + 1:]:
+                    if isinstance(param, TypeVarTuple):
+                        raise TypeError(
+                            f"More than one TypeVarTuple parameter in {alias}"
+                        )
+
+                alen = len(args)
+                plen = len(params)
+                left = typevartuple_index
+                right = plen - typevartuple_index - 1
+                var_tuple_index = None
+                fillarg = None
+                for k, arg in enumerate(args):
+                    if not isinstance(arg, type):
+                        subargs = getattr(arg, '__typing_unpacked_tuple_args__', None)
+                        if subargs and len(subargs) == 2 and subargs[-1] is ...:
+                            if var_tuple_index is not None:
+                                raise TypeError(
+                                    "More than one unpacked "
+                                    "arbitrary-length tuple argument"
+                                )
+                            var_tuple_index = k
+                            fillarg = subargs[0]
+                if var_tuple_index is not None:
+                    left = min(left, var_tuple_index)
+                    right = min(right, alen - var_tuple_index - 1)
+                elif left + right > alen:
+                    raise TypeError(f"Too few arguments for {alias};"
+                                    f" actual {alen}, expected at least {plen - 1}")
+                if left == alen - right and tvt.has_default():
+                    replacement = _unpack_args(tvt.__default__)
+                else:
+                    replacement = args[left: alen - right]
+
+                return (
+                    *args[:left],
+                    *([fillarg] * (typevartuple_index - left)),
+                    replacement,
+                    *([fillarg] * (plen - right - left - typevartuple_index - 1)),
+                    *args[alen - right:],
+                )
+
+            tvt.__typing_prepare_subst__ = _typevartuple_prepare_subst
+            return tvt
+
+        def __init_subclass__(self, *args, **kwds):
+            raise TypeError("Cannot subclass special typing classes")
+
+else:  # <=3.10
+    class TypeVarTuple(_DefaultMixin):
+        """Type variable tuple.
+
+        Usage::
+
+            Ts = TypeVarTuple('Ts')
+
+        In the same way that a normal type variable is a stand-in for a single
+        type such as ``int``, a type variable *tuple* is a stand-in for a *tuple*
+        type such as ``Tuple[int, str]``.
+
+        Type variable tuples can be used in ``Generic`` declarations.
+        Consider the following example::
+
+            class Array(Generic[*Ts]): ...
+
+        The ``Ts`` type variable tuple here behaves like ``tuple[T1, T2]``,
+        where ``T1`` and ``T2`` are type variables. To use these type variables
+        as type parameters of ``Array``, we must *unpack* the type variable tuple using
+        the star operator: ``*Ts``. The signature of ``Array`` then behaves
+        as if we had simply written ``class Array(Generic[T1, T2]): ...``.
+        In contrast to ``Generic[T1, T2]``, however, ``Generic[*Shape]`` allows
+        us to parameterise the class with an *arbitrary* number of type parameters.
+
+        Type variable tuples can be used anywhere a normal ``TypeVar`` can.
+        This includes class definitions, as shown above, as well as function
+        signatures and variable annotations::
+
+            class Array(Generic[*Ts]):
+
+                def __init__(self, shape: Tuple[*Ts]):
+                    self._shape: Tuple[*Ts] = shape
+
+                def get_shape(self) -> Tuple[*Ts]:
+                    return self._shape
+
+            shape = (Height(480), Width(640))
+            x: Array[Height, Width] = Array(shape)
+            y = abs(x)  # Inferred type is Array[Height, Width]
+            z = x + x   #        ...    is Array[Height, Width]
+            x.get_shape()  #     ...    is tuple[Height, Width]
+
+        """
+
+        # Trick Generic __parameters__.
+        __class__ = typing.TypeVar
+
+        def __iter__(self):
+            yield self.__unpacked__
+
+        def __init__(self, name, *, default=NoDefault):
+            self.__name__ = name
+            _DefaultMixin.__init__(self, default)
+
+            # for pickling:
+            def_mod = _caller()
+            if def_mod != 'typing_extensions':
+                self.__module__ = def_mod
+
+            self.__unpacked__ = Unpack[self]
+
+        def __repr__(self):
+            return self.__name__
+
+        def __hash__(self):
+            return object.__hash__(self)
+
+        def __eq__(self, other):
+            return self is other
+
+        def __reduce__(self):
+            return self.__name__
+
+        def __init_subclass__(self, *args, **kwds):
+            if '_root' not in kwds:
+                raise TypeError("Cannot subclass special typing classes")
+
+
+if hasattr(typing, "reveal_type"):  # 3.11+
+    reveal_type = typing.reveal_type
+else:  # <=3.10
+    def reveal_type(obj: T, /) -> T:
+        """Reveal the inferred type of a variable.
+
+        When a static type checker encounters a call to ``reveal_type()``,
+        it will emit the inferred type of the argument::
+
+            x: int = 1
+            reveal_type(x)
+
+        Running a static type checker (e.g., ``mypy``) on this example
+        will produce output similar to 'Revealed type is "builtins.int"'.
+
+        At runtime, the function prints the runtime type of the
+        argument and returns it unchanged.
+
+        """
+        print(f"Runtime type is {type(obj).__name__!r}", file=sys.stderr)
+        return obj
+
+
+if hasattr(typing, "_ASSERT_NEVER_REPR_MAX_LENGTH"):  # 3.11+
+    _ASSERT_NEVER_REPR_MAX_LENGTH = typing._ASSERT_NEVER_REPR_MAX_LENGTH
+else:  # <=3.10
+    _ASSERT_NEVER_REPR_MAX_LENGTH = 100
+
+
+if hasattr(typing, "assert_never"):  # 3.11+
+    assert_never = typing.assert_never
+else:  # <=3.10
+    def assert_never(arg: Never, /) -> Never:
+        """Assert to the type checker that a line of code is unreachable.
+
+        Example::
+
+            def int_or_str(arg: int | str) -> None:
+                match arg:
+                    case int():
+                        print("It's an int")
+                    case str():
+                        print("It's a str")
+                    case _:
+                        assert_never(arg)
+
+        If a type checker finds that a call to assert_never() is
+        reachable, it will emit an error.
+
+        At runtime, this throws an exception when called.
+
+        """
+        value = repr(arg)
+        if len(value) > _ASSERT_NEVER_REPR_MAX_LENGTH:
+            value = value[:_ASSERT_NEVER_REPR_MAX_LENGTH] + '...'
+        raise AssertionError(f"Expected code to be unreachable, but got: {value}")
+
+
+if sys.version_info >= (3, 12):  # 3.12+
+    # dataclass_transform exists in 3.11 but lacks the frozen_default parameter
+    dataclass_transform = typing.dataclass_transform
+else:  # <=3.11
+    def dataclass_transform(
+        *,
+        eq_default: bool = True,
+        order_default: bool = False,
+        kw_only_default: bool = False,
+        frozen_default: bool = False,
+        field_specifiers: typing.Tuple[
+            typing.Union[typing.Type[typing.Any], typing.Callable[..., typing.Any]],
+            ...
+        ] = (),
+        **kwargs: typing.Any,
+    ) -> typing.Callable[[T], T]:
+        """Decorator that marks a function, class, or metaclass as providing
+        dataclass-like behavior.
+
+        Example:
+
+            from typing_extensions import dataclass_transform
+
+            _T = TypeVar("_T")
+
+            # Used on a decorator function
+            @dataclass_transform()
+            def create_model(cls: type[_T]) -> type[_T]:
+                ...
+                return cls
+
+            @create_model
+            class CustomerModel:
+                id: int
+                name: str
+
+            # Used on a base class
+            @dataclass_transform()
+            class ModelBase: ...
+
+            class CustomerModel(ModelBase):
+                id: int
+                name: str
+
+            # Used on a metaclass
+            @dataclass_transform()
+            class ModelMeta(type): ...
+
+            class ModelBase(metaclass=ModelMeta): ...
+
+            class CustomerModel(ModelBase):
+                id: int
+                name: str
+
+        Each of the ``CustomerModel`` classes defined in this example will now
+        behave similarly to a dataclass created with the ``@dataclasses.dataclass``
+        decorator. For example, the type checker will synthesize an ``__init__``
+        method.
+
+        The arguments to this decorator can be used to customize this behavior:
+        - ``eq_default`` indicates whether the ``eq`` parameter is assumed to be
+          True or False if it is omitted by the caller.
+        - ``order_default`` indicates whether the ``order`` parameter is
+          assumed to be True or False if it is omitted by the caller.
+        - ``kw_only_default`` indicates whether the ``kw_only`` parameter is
+          assumed to be True or False if it is omitted by the caller.
+        - ``frozen_default`` indicates whether the ``frozen`` parameter is
+          assumed to be True or False if it is omitted by the caller.
+        - ``field_specifiers`` specifies a static list of supported classes
+          or functions that describe fields, similar to ``dataclasses.field()``.
+
+        At runtime, this decorator records its arguments in the
+        ``__dataclass_transform__`` attribute on the decorated object.
+
+        See PEP 681 for details.
+
+        """
+        def decorator(cls_or_fn):
+            cls_or_fn.__dataclass_transform__ = {
+                "eq_default": eq_default,
+                "order_default": order_default,
+                "kw_only_default": kw_only_default,
+                "frozen_default": frozen_default,
+                "field_specifiers": field_specifiers,
+                "kwargs": kwargs,
+            }
+            return cls_or_fn
+        return decorator
+
+
+if hasattr(typing, "override"):  # 3.12+
+    override = typing.override
+else:  # <=3.11
+    _F = typing.TypeVar("_F", bound=typing.Callable[..., typing.Any])
+
+    def override(arg: _F, /) -> _F:
+        """Indicate that a method is intended to override a method in a base class.
+
+        Usage:
+
+            class Base:
+                def method(self) -> None:
+                    pass
+
+            class Child(Base):
+                @override
+                def method(self) -> None:
+                    super().method()
+
+        When this decorator is applied to a method, the type checker will
+        validate that it overrides a method with the same name on a base class.
+        This helps prevent bugs that may occur when a base class is changed
+        without an equivalent change to a child class.
+
+        There is no runtime checking of these properties. The decorator
+        sets the ``__override__`` attribute to ``True`` on the decorated object
+        to allow runtime introspection.
+
+        See PEP 698 for details.
+
+        """
+        try:
+            arg.__override__ = True
+        except (AttributeError, TypeError):
+            # Skip the attribute silently if it is not writable.
+            # AttributeError happens if the object has __slots__ or a
+            # read-only property, TypeError if it's a builtin class.
+            pass
+        return arg
+
+
+if hasattr(warnings, "deprecated"):
+    deprecated = warnings.deprecated
+else:
+    _T = typing.TypeVar("_T")
+
+    class deprecated:
+        """Indicate that a class, function or overload is deprecated.
+
+        When this decorator is applied to an object, the type checker
+        will generate a diagnostic on usage of the deprecated object.
+
+        Usage:
+
+            @deprecated("Use B instead")
+            class A:
+                pass
+
+            @deprecated("Use g instead")
+            def f():
+                pass
+
+            @overload
+            @deprecated("int support is deprecated")
+            def g(x: int) -> int: ...
+            @overload
+            def g(x: str) -> int: ...
+
+        The warning specified by *category* will be emitted at runtime
+        on use of deprecated objects. For functions, that happens on calls;
+        for classes, on instantiation and on creation of subclasses.
+        If the *category* is ``None``, no warning is emitted at runtime.
+        The *stacklevel* determines where the
+        warning is emitted. If it is ``1`` (the default), the warning
+        is emitted at the direct caller of the deprecated object; if it
+        is higher, it is emitted further up the stack.
+        Static type checker behavior is not affected by the *category*
+        and *stacklevel* arguments.
+
+        The deprecation message passed to the decorator is saved in the
+        ``__deprecated__`` attribute on the decorated object.
+        If applied to an overload, the decorator
+        must be after the ``@overload`` decorator for the attribute to
+        exist on the overload as returned by ``get_overloads()``.
+
+        See PEP 702 for details.
+
+        """
+        def __init__(
+            self,
+            message: str,
+            /,
+            *,
+            category: typing.Optional[typing.Type[Warning]] = DeprecationWarning,
+            stacklevel: int = 1,
+        ) -> None:
+            if not isinstance(message, str):
+                raise TypeError(
+                    "Expected an object of type str for 'message', not "
+                    f"{type(message).__name__!r}"
+                )
+            self.message = message
+            self.category = category
+            self.stacklevel = stacklevel
+
+        def __call__(self, arg: _T, /) -> _T:
+            # Make sure the inner functions created below don't
+            # retain a reference to self.
+            msg = self.message
+            category = self.category
+            stacklevel = self.stacklevel
+            if category is None:
+                arg.__deprecated__ = msg
+                return arg
+            elif isinstance(arg, type):
+                import functools
+                from types import MethodType
+
+                original_new = arg.__new__
+
+                @functools.wraps(original_new)
+                def __new__(cls, *args, **kwargs):
+                    if cls is arg:
+                        warnings.warn(msg, category=category, stacklevel=stacklevel + 1)
+                    if original_new is not object.__new__:
+                        return original_new(cls, *args, **kwargs)
+                    # Mirrors a similar check in object.__new__.
+                    elif cls.__init__ is object.__init__ and (args or kwargs):
+                        raise TypeError(f"{cls.__name__}() takes no arguments")
+                    else:
+                        return original_new(cls)
+
+                arg.__new__ = staticmethod(__new__)
+
+                original_init_subclass = arg.__init_subclass__
+                # We need slightly different behavior if __init_subclass__
+                # is a bound method (likely if it was implemented in Python)
+                if isinstance(original_init_subclass, MethodType):
+                    original_init_subclass = original_init_subclass.__func__
+
+                    @functools.wraps(original_init_subclass)
+                    def __init_subclass__(*args, **kwargs):
+                        warnings.warn(msg, category=category, stacklevel=stacklevel + 1)
+                        return original_init_subclass(*args, **kwargs)
+
+                    arg.__init_subclass__ = classmethod(__init_subclass__)
+                # Or otherwise, which likely means it's a builtin such as
+                # object's implementation of __init_subclass__.
+                else:
+                    @functools.wraps(original_init_subclass)
+                    def __init_subclass__(*args, **kwargs):
+                        warnings.warn(msg, category=category, stacklevel=stacklevel + 1)
+                        return original_init_subclass(*args, **kwargs)
+
+                    arg.__init_subclass__ = __init_subclass__
+
+                arg.__deprecated__ = __new__.__deprecated__ = msg
+                __init_subclass__.__deprecated__ = msg
+                return arg
+            elif callable(arg):
+                import functools
+
+                @functools.wraps(arg)
+                def wrapper(*args, **kwargs):
+                    warnings.warn(msg, category=category, stacklevel=stacklevel + 1)
+                    return arg(*args, **kwargs)
+
+                arg.__deprecated__ = wrapper.__deprecated__ = msg
+                return wrapper
+            else:
+                raise TypeError(
+                    "@deprecated decorator with non-None category must be applied to "
+                    f"a class or callable, not {arg!r}"
+                )
+
+
+# We have to do some monkey patching to deal with the dual nature of
+# Unpack/TypeVarTuple:
+# - We want Unpack to be a kind of TypeVar so it gets accepted in
+#   Generic[Unpack[Ts]]
+# - We want it to *not* be treated as a TypeVar for the purposes of
+#   counting generic parameters, so that when we subscript a generic,
+#   the runtime doesn't try to substitute the Unpack with the subscripted type.
+if not hasattr(typing, "TypeVarTuple"):
+    def _check_generic(cls, parameters, elen=_marker):
+        """Check correct count for parameters of a generic cls (internal helper).
+
+        This gives a nice error message in case of count mismatch.
+        """
+        if not elen:
+            raise TypeError(f"{cls} is not a generic class")
+        if elen is _marker:
+            if not hasattr(cls, "__parameters__") or not cls.__parameters__:
+                raise TypeError(f"{cls} is not a generic class")
+            elen = len(cls.__parameters__)
+        alen = len(parameters)
+        if alen != elen:
+            expect_val = elen
+            if hasattr(cls, "__parameters__"):
+                parameters = [p for p in cls.__parameters__ if not _is_unpack(p)]
+                num_tv_tuples = sum(isinstance(p, TypeVarTuple) for p in parameters)
+                if (num_tv_tuples > 0) and (alen >= elen - num_tv_tuples):
+                    return
+
+                # deal with TypeVarLike defaults
+                # required TypeVarLikes cannot appear after a defaulted one.
+                if alen < elen:
+                    # since we validate TypeVarLike default in _collect_type_vars
+                    # or _collect_parameters we can safely check parameters[alen]
+                    if (
+                        getattr(parameters[alen], '__default__', NoDefault)
+                        is not NoDefault
+                    ):
+                        return
+
+                    num_default_tv = sum(getattr(p, '__default__', NoDefault)
+                                         is not NoDefault for p in parameters)
+
+                    elen -= num_default_tv
+
+                    expect_val = f"at least {elen}"
+
+            things = "arguments" if sys.version_info >= (3, 10) else "parameters"
+            raise TypeError(f"Too {'many' if alen > elen else 'few'} {things}"
+                            f" for {cls}; actual {alen}, expected {expect_val}")
+else:
+    # Python 3.11+
+
+    def _check_generic(cls, parameters, elen):
+        """Check correct count for parameters of a generic cls (internal helper).
+
+        This gives a nice error message in case of count mismatch.
+        """
+        if not elen:
+            raise TypeError(f"{cls} is not a generic class")
+        alen = len(parameters)
+        if alen != elen:
+            expect_val = elen
+            if hasattr(cls, "__parameters__"):
+                parameters = [p for p in cls.__parameters__ if not _is_unpack(p)]
+
+                # deal with TypeVarLike defaults
+                # required TypeVarLikes cannot appear after a defaulted one.
+                if alen < elen:
+                    # since we validate TypeVarLike default in _collect_type_vars
+                    # or _collect_parameters we can safely check parameters[alen]
+                    if (
+                        getattr(parameters[alen], '__default__', NoDefault)
+                        is not NoDefault
+                    ):
+                        return
+
+                    num_default_tv = sum(getattr(p, '__default__', NoDefault)
+                                         is not NoDefault for p in parameters)
+
+                    elen -= num_default_tv
+
+                    expect_val = f"at least {elen}"
+
+            raise TypeError(f"Too {'many' if alen > elen else 'few'} arguments"
+                            f" for {cls}; actual {alen}, expected {expect_val}")
+
+if not _PEP_696_IMPLEMENTED:
+    typing._check_generic = _check_generic
+
+
+def _has_generic_or_protocol_as_origin() -> bool:
+    try:
+        frame = sys._getframe(2)
+    # - Catch AttributeError: not all Python implementations have sys._getframe()
+    # - Catch ValueError: maybe we're called from an unexpected module
+    #   and the call stack isn't deep enough
+    except (AttributeError, ValueError):
+        return False  # err on the side of leniency
+    else:
+        # If we somehow get invoked from outside typing.py,
+        # also err on the side of leniency
+        if frame.f_globals.get("__name__") != "typing":
+            return False
+        origin = frame.f_locals.get("origin")
+        # Cannot use "in" because origin may be an object with a buggy __eq__ that
+        # throws an error.
+        return origin is typing.Generic or origin is Protocol or origin is typing.Protocol
+
+
+_TYPEVARTUPLE_TYPES = {TypeVarTuple, getattr(typing, "TypeVarTuple", None)}
+
+
+def _is_unpacked_typevartuple(x) -> bool:
+    if get_origin(x) is not Unpack:
+        return False
+    args = get_args(x)
+    return (
+        bool(args)
+        and len(args) == 1
+        and type(args[0]) in _TYPEVARTUPLE_TYPES
+    )
+
+
+# Python 3.11+ _collect_type_vars was renamed to _collect_parameters
+if hasattr(typing, '_collect_type_vars'):
+    def _collect_type_vars(types, typevar_types=None):
+        """Collect all type variable contained in types in order of
+        first appearance (lexicographic order). For example::
+
+            _collect_type_vars((T, List[S, T])) == (T, S)
+        """
+        if typevar_types is None:
+            typevar_types = typing.TypeVar
+        tvars = []
+
+        # A required TypeVarLike cannot appear after a TypeVarLike with a default
+        # if it was a direct call to `Generic[]` or `Protocol[]`
+        enforce_default_ordering = _has_generic_or_protocol_as_origin()
+        default_encountered = False
+
+        # Also, a TypeVarLike with a default cannot appear after a TypeVarTuple
+        type_var_tuple_encountered = False
+
+        for t in types:
+            if _is_unpacked_typevartuple(t):
+                type_var_tuple_encountered = True
+            elif isinstance(t, typevar_types) and t not in tvars:
+                if enforce_default_ordering:
+                    has_default = getattr(t, '__default__', NoDefault) is not NoDefault
+                    if has_default:
+                        if type_var_tuple_encountered:
+                            raise TypeError('Type parameter with a default'
+                                            ' follows TypeVarTuple')
+                        default_encountered = True
+                    elif default_encountered:
+                        raise TypeError(f'Type parameter {t!r} without a default'
+                                        ' follows type parameter with a default')
+
+                tvars.append(t)
+            if _should_collect_from_parameters(t):
+                tvars.extend([t for t in t.__parameters__ if t not in tvars])
+        return tuple(tvars)
+
+    typing._collect_type_vars = _collect_type_vars
+else:
+    def _collect_parameters(args):
+        """Collect all type variables and parameter specifications in args
+        in order of first appearance (lexicographic order).
+
+        For example::
+
+            assert _collect_parameters((T, Callable[P, T])) == (T, P)
+        """
+        parameters = []
+
+        # A required TypeVarLike cannot appear after a TypeVarLike with default
+        # if it was a direct call to `Generic[]` or `Protocol[]`
+        enforce_default_ordering = _has_generic_or_protocol_as_origin()
+        default_encountered = False
+
+        # Also, a TypeVarLike with a default cannot appear after a TypeVarTuple
+        type_var_tuple_encountered = False
+
+        for t in args:
+            if isinstance(t, type):
+                # We don't want __parameters__ descriptor of a bare Python class.
+                pass
+            elif isinstance(t, tuple):
+                # `t` might be a tuple, when `ParamSpec` is substituted with
+                # `[T, int]`, or `[int, *Ts]`, etc.
+                for x in t:
+                    for collected in _collect_parameters([x]):
+                        if collected not in parameters:
+                            parameters.append(collected)
+            elif hasattr(t, '__typing_subst__'):
+                if t not in parameters:
+                    if enforce_default_ordering:
+                        has_default = (
+                            getattr(t, '__default__', NoDefault) is not NoDefault
+                        )
+
+                        if type_var_tuple_encountered and has_default:
+                            raise TypeError('Type parameter with a default'
+                                            ' follows TypeVarTuple')
+
+                        if has_default:
+                            default_encountered = True
+                        elif default_encountered:
+                            raise TypeError(f'Type parameter {t!r} without a default'
+                                            ' follows type parameter with a default')
+
+                    parameters.append(t)
+            else:
+                if _is_unpacked_typevartuple(t):
+                    type_var_tuple_encountered = True
+                for x in getattr(t, '__parameters__', ()):
+                    if x not in parameters:
+                        parameters.append(x)
+
+        return tuple(parameters)
+
+    if not _PEP_696_IMPLEMENTED:
+        typing._collect_parameters = _collect_parameters
+
+# Backport typing.NamedTuple as it exists in Python 3.13.
+# In 3.11, the ability to define generic `NamedTuple`s was supported.
+# This was explicitly disallowed in 3.9-3.10, and only half-worked in <=3.8.
+# On 3.12, we added __orig_bases__ to call-based NamedTuples
+# On 3.13, we deprecated kwargs-based NamedTuples
+if sys.version_info >= (3, 13):
+    NamedTuple = typing.NamedTuple
+else:
+    def _make_nmtuple(name, types, module, defaults=()):
+        fields = [n for n, t in types]
+        annotations = {n: typing._type_check(t, f"field {n} annotation must be a type")
+                       for n, t in types}
+        nm_tpl = collections.namedtuple(name, fields,
+                                        defaults=defaults, module=module)
+        nm_tpl.__annotations__ = nm_tpl.__new__.__annotations__ = annotations
+        # The `_field_types` attribute was removed in 3.9;
+        # in earlier versions, it is the same as the `__annotations__` attribute
+        if sys.version_info < (3, 9):
+            nm_tpl._field_types = annotations
+        return nm_tpl
+
+    _prohibited_namedtuple_fields = typing._prohibited
+    _special_namedtuple_fields = frozenset({'__module__', '__name__', '__annotations__'})
+
+    class _NamedTupleMeta(type):
+        def __new__(cls, typename, bases, ns):
+            assert _NamedTuple in bases
+            for base in bases:
+                if base is not _NamedTuple and base is not typing.Generic:
+                    raise TypeError(
+                        'can only inherit from a NamedTuple type and Generic')
+            bases = tuple(tuple if base is _NamedTuple else base for base in bases)
+            if "__annotations__" in ns:
+                types = ns["__annotations__"]
+            elif "__annotate__" in ns:
+                # TODO: Use inspect.VALUE here, and make the annotations lazily evaluated
+                types = ns["__annotate__"](1)
+            else:
+                types = {}
+            default_names = []
+            for field_name in types:
+                if field_name in ns:
+                    default_names.append(field_name)
+                elif default_names:
+                    raise TypeError(f"Non-default namedtuple field {field_name} "
+                                    f"cannot follow default field"
+                                    f"{'s' if len(default_names) > 1 else ''} "
+                                    f"{', '.join(default_names)}")
+            nm_tpl = _make_nmtuple(
+                typename, types.items(),
+                defaults=[ns[n] for n in default_names],
+                module=ns['__module__']
+            )
+            nm_tpl.__bases__ = bases
+            if typing.Generic in bases:
+                if hasattr(typing, '_generic_class_getitem'):  # 3.12+
+                    nm_tpl.__class_getitem__ = classmethod(typing._generic_class_getitem)
+                else:
+                    class_getitem = typing.Generic.__class_getitem__.__func__
+                    nm_tpl.__class_getitem__ = classmethod(class_getitem)
+            # update from user namespace without overriding special namedtuple attributes
+            for key, val in ns.items():
+                if key in _prohibited_namedtuple_fields:
+                    raise AttributeError("Cannot overwrite NamedTuple attribute " + key)
+                elif key not in _special_namedtuple_fields:
+                    if key not in nm_tpl._fields:
+                        setattr(nm_tpl, key, ns[key])
+                    try:
+                        set_name = type(val).__set_name__
+                    except AttributeError:
+                        pass
+                    else:
+                        try:
+                            set_name(val, nm_tpl, key)
+                        except BaseException as e:
+                            msg = (
+                                f"Error calling __set_name__ on {type(val).__name__!r} "
+                                f"instance {key!r} in {typename!r}"
+                            )
+                            # BaseException.add_note() existed on py311,
+                            # but the __set_name__ machinery didn't start
+                            # using add_note() until py312.
+                            # Making sure exceptions are raised in the same way
+                            # as in "normal" classes seems most important here.
+                            if sys.version_info >= (3, 12):
+                                e.add_note(msg)
+                                raise
+                            else:
+                                raise RuntimeError(msg) from e
+
+            if typing.Generic in bases:
+                nm_tpl.__init_subclass__()
+            return nm_tpl
+
+    _NamedTuple = type.__new__(_NamedTupleMeta, 'NamedTuple', (), {})
+
+    def _namedtuple_mro_entries(bases):
+        assert NamedTuple in bases
+        return (_NamedTuple,)
+
+    @_ensure_subclassable(_namedtuple_mro_entries)
+    def NamedTuple(typename, fields=_marker, /, **kwargs):
+        """Typed version of namedtuple.
+
+        Usage::
+
+            class Employee(NamedTuple):
+                name: str
+                id: int
+
+        This is equivalent to::
+
+            Employee = collections.namedtuple('Employee', ['name', 'id'])
+
+        The resulting class has an extra __annotations__ attribute, giving a
+        dict that maps field names to types.  (The field names are also in
+        the _fields attribute, which is part of the namedtuple API.)
+        An alternative equivalent functional syntax is also accepted::
+
+            Employee = NamedTuple('Employee', [('name', str), ('id', int)])
+        """
+        if fields is _marker:
+            if kwargs:
+                deprecated_thing = "Creating NamedTuple classes using keyword arguments"
+                deprecation_msg = (
+                    "{name} is deprecated and will be disallowed in Python {remove}. "
+                    "Use the class-based or functional syntax instead."
+                )
+            else:
+                deprecated_thing = "Failing to pass a value for the 'fields' parameter"
+                example = f"`{typename} = NamedTuple({typename!r}, [])`"
+                deprecation_msg = (
+                    "{name} is deprecated and will be disallowed in Python {remove}. "
+                    "To create a NamedTuple class with 0 fields "
+                    "using the functional syntax, "
+                    "pass an empty list, e.g. "
+                ) + example + "."
+        elif fields is None:
+            if kwargs:
+                raise TypeError(
+                    "Cannot pass `None` as the 'fields' parameter "
+                    "and also specify fields using keyword arguments"
+                )
+            else:
+                deprecated_thing = "Passing `None` as the 'fields' parameter"
+                example = f"`{typename} = NamedTuple({typename!r}, [])`"
+                deprecation_msg = (
+                    "{name} is deprecated and will be disallowed in Python {remove}. "
+                    "To create a NamedTuple class with 0 fields "
+                    "using the functional syntax, "
+                    "pass an empty list, e.g. "
+                ) + example + "."
+        elif kwargs:
+            raise TypeError("Either list of fields or keywords"
+                            " can be provided to NamedTuple, not both")
+        if fields is _marker or fields is None:
+            warnings.warn(
+                deprecation_msg.format(name=deprecated_thing, remove="3.15"),
+                DeprecationWarning,
+                stacklevel=2,
+            )
+            fields = kwargs.items()
+        nt = _make_nmtuple(typename, fields, module=_caller())
+        nt.__orig_bases__ = (NamedTuple,)
+        return nt
+
+
+if hasattr(collections.abc, "Buffer"):
+    Buffer = collections.abc.Buffer
+else:
+    class Buffer(abc.ABC):  # noqa: B024
+        """Base class for classes that implement the buffer protocol.
+
+        The buffer protocol allows Python objects to expose a low-level
+        memory buffer interface. Before Python 3.12, it is not possible
+        to implement the buffer protocol in pure Python code, or even
+        to check whether a class implements the buffer protocol. In
+        Python 3.12 and higher, the ``__buffer__`` method allows access
+        to the buffer protocol from Python code, and the
+        ``collections.abc.Buffer`` ABC allows checking whether a class
+        implements the buffer protocol.
+
+        To indicate support for the buffer protocol in earlier versions,
+        inherit from this ABC, either in a stub file or at runtime,
+        or use ABC registration. This ABC provides no methods, because
+        there is no Python-accessible methods shared by pre-3.12 buffer
+        classes. It is useful primarily for static checks.
+
+        """
+
+    # As a courtesy, register the most common stdlib buffer classes.
+    Buffer.register(memoryview)
+    Buffer.register(bytearray)
+    Buffer.register(bytes)
+
+
+# Backport of types.get_original_bases, available on 3.12+ in CPython
+if hasattr(_types, "get_original_bases"):
+    get_original_bases = _types.get_original_bases
+else:
+    def get_original_bases(cls, /):
+        """Return the class's "original" bases prior to modification by `__mro_entries__`.
+
+        Examples::
+
+            from typing import TypeVar, Generic
+            from typing_extensions import NamedTuple, TypedDict
+
+            T = TypeVar("T")
+            class Foo(Generic[T]): ...
+            class Bar(Foo[int], float): ...
+            class Baz(list[str]): ...
+            Eggs = NamedTuple("Eggs", [("a", int), ("b", str)])
+            Spam = TypedDict("Spam", {"a": int, "b": str})
+
+            assert get_original_bases(Bar) == (Foo[int], float)
+            assert get_original_bases(Baz) == (list[str],)
+            assert get_original_bases(Eggs) == (NamedTuple,)
+            assert get_original_bases(Spam) == (TypedDict,)
+            assert get_original_bases(int) == (object,)
+        """
+        try:
+            return cls.__dict__.get("__orig_bases__", cls.__bases__)
+        except AttributeError:
+            raise TypeError(
+                f'Expected an instance of type, not {type(cls).__name__!r}'
+            ) from None
+
+
+# NewType is a class on Python 3.10+, making it pickleable
+# The error message for subclassing instances of NewType was improved on 3.11+
+if sys.version_info >= (3, 11):
+    NewType = typing.NewType
+else:
+    class NewType:
+        """NewType creates simple unique types with almost zero
+        runtime overhead. NewType(name, tp) is considered a subtype of tp
+        by static type checkers. At runtime, NewType(name, tp) returns
+        a dummy callable that simply returns its argument. Usage::
+            UserId = NewType('UserId', int)
+            def name_by_id(user_id: UserId) -> str:
+                ...
+            UserId('user')          # Fails type check
+            name_by_id(42)          # Fails type check
+            name_by_id(UserId(42))  # OK
+            num = UserId(5) + 1     # type: int
+        """
+
+        def __call__(self, obj, /):
+            return obj
+
+        def __init__(self, name, tp):
+            self.__qualname__ = name
+            if '.' in name:
+                name = name.rpartition('.')[-1]
+            self.__name__ = name
+            self.__supertype__ = tp
+            def_mod = _caller()
+            if def_mod != 'typing_extensions':
+                self.__module__ = def_mod
+
+        def __mro_entries__(self, bases):
+            # We defined __mro_entries__ to get a better error message
+            # if a user attempts to subclass a NewType instance. bpo-46170
+            supercls_name = self.__name__
+
+            class Dummy:
+                def __init_subclass__(cls):
+                    subcls_name = cls.__name__
+                    raise TypeError(
+                        f"Cannot subclass an instance of NewType. "
+                        f"Perhaps you were looking for: "
+                        f"`{subcls_name} = NewType({subcls_name!r}, {supercls_name})`"
+                    )
+
+            return (Dummy,)
+
+        def __repr__(self):
+            return f'{self.__module__}.{self.__qualname__}'
+
+        def __reduce__(self):
+            return self.__qualname__
+
+        if sys.version_info >= (3, 10):
+            # PEP 604 methods
+            # It doesn't make sense to have these methods on Python <3.10
+
+            def __or__(self, other):
+                return typing.Union[self, other]
+
+            def __ror__(self, other):
+                return typing.Union[other, self]
+
+
+if hasattr(typing, "TypeAliasType"):
+    TypeAliasType = typing.TypeAliasType
+else:
+    def _is_unionable(obj):
+        """Corresponds to is_unionable() in unionobject.c in CPython."""
+        return obj is None or isinstance(obj, (
+            type,
+            _types.GenericAlias,
+            _types.UnionType,
+            TypeAliasType,
+        ))
+
+    class TypeAliasType:
+        """Create named, parameterized type aliases.
+
+        This provides a backport of the new `type` statement in Python 3.12:
+
+            type ListOrSet[T] = list[T] | set[T]
+
+        is equivalent to:
+
+            T = TypeVar("T")
+            ListOrSet = TypeAliasType("ListOrSet", list[T] | set[T], type_params=(T,))
+
+        The name ListOrSet can then be used as an alias for the type it refers to.
+
+        The type_params argument should contain all the type parameters used
+        in the value of the type alias. If the alias is not generic, this
+        argument is omitted.
+
+        Static type checkers should only support type aliases declared using
+        TypeAliasType that follow these rules:
+
+        - The first argument (the name) must be a string literal.
+        - The TypeAliasType instance must be immediately assigned to a variable
+          of the same name. (For example, 'X = TypeAliasType("Y", int)' is invalid,
+          as is 'X, Y = TypeAliasType("X", int), TypeAliasType("Y", int)').
+
+        """
+
+        def __init__(self, name: str, value, *, type_params=()):
+            if not isinstance(name, str):
+                raise TypeError("TypeAliasType name must be a string")
+            self.__value__ = value
+            self.__type_params__ = type_params
+
+            parameters = []
+            for type_param in type_params:
+                if isinstance(type_param, TypeVarTuple):
+                    parameters.extend(type_param)
+                else:
+                    parameters.append(type_param)
+            self.__parameters__ = tuple(parameters)
+            def_mod = _caller()
+            if def_mod != 'typing_extensions':
+                self.__module__ = def_mod
+            # Setting this attribute closes the TypeAliasType from further modification
+            self.__name__ = name
+
+        def __setattr__(self, name: str, value: object, /) -> None:
+            if hasattr(self, "__name__"):
+                self._raise_attribute_error(name)
+            super().__setattr__(name, value)
+
+        def __delattr__(self, name: str, /) -> Never:
+            self._raise_attribute_error(name)
+
+        def _raise_attribute_error(self, name: str) -> Never:
+            # Match the Python 3.12 error messages exactly
+            if name == "__name__":
+                raise AttributeError("readonly attribute")
+            elif name in {"__value__", "__type_params__", "__parameters__", "__module__"}:
+                raise AttributeError(
+                    f"attribute '{name}' of 'typing.TypeAliasType' objects "
+                    "is not writable"
+                )
+            else:
+                raise AttributeError(
+                    f"'typing.TypeAliasType' object has no attribute '{name}'"
+                )
+
+        def __repr__(self) -> str:
+            return self.__name__
+
+        def __getitem__(self, parameters):
+            if not isinstance(parameters, tuple):
+                parameters = (parameters,)
+            parameters = [
+                typing._type_check(
+                    item, f'Subscripting {self.__name__} requires a type.'
+                )
+                for item in parameters
+            ]
+            return typing._GenericAlias(self, tuple(parameters))
+
+        def __reduce__(self):
+            return self.__name__
+
+        def __init_subclass__(cls, *args, **kwargs):
+            raise TypeError(
+                "type 'typing_extensions.TypeAliasType' is not an acceptable base type"
+            )
+
+        # The presence of this method convinces typing._type_check
+        # that TypeAliasTypes are types.
+        def __call__(self):
+            raise TypeError("Type alias is not callable")
+
+        if sys.version_info >= (3, 10):
+            def __or__(self, right):
+                # For forward compatibility with 3.12, reject Unions
+                # that are not accepted by the built-in Union.
+                if not _is_unionable(right):
+                    return NotImplemented
+                return typing.Union[self, right]
+
+            def __ror__(self, left):
+                if not _is_unionable(left):
+                    return NotImplemented
+                return typing.Union[left, self]
+
+
+if hasattr(typing, "is_protocol"):
+    is_protocol = typing.is_protocol
+    get_protocol_members = typing.get_protocol_members
+else:
+    def is_protocol(tp: type, /) -> bool:
+        """Return True if the given type is a Protocol.
+
+        Example::
+
+            >>> from typing_extensions import Protocol, is_protocol
+            >>> class P(Protocol):
+            ...     def a(self) -> str: ...
+            ...     b: int
+            >>> is_protocol(P)
+            True
+            >>> is_protocol(int)
+            False
+        """
+        return (
+            isinstance(tp, type)
+            and getattr(tp, '_is_protocol', False)
+            and tp is not Protocol
+            and tp is not typing.Protocol
+        )
+
+    def get_protocol_members(tp: type, /) -> typing.FrozenSet[str]:
+        """Return the set of members defined in a Protocol.
+
+        Example::
+
+            >>> from typing_extensions import Protocol, get_protocol_members
+            >>> class P(Protocol):
+            ...     def a(self) -> str: ...
+            ...     b: int
+            >>> get_protocol_members(P)
+            frozenset({'a', 'b'})
+
+        Raise a TypeError for arguments that are not Protocols.
+        """
+        if not is_protocol(tp):
+            raise TypeError(f'{tp!r} is not a Protocol')
+        if hasattr(tp, '__protocol_attrs__'):
+            return frozenset(tp.__protocol_attrs__)
+        return frozenset(_get_protocol_attrs(tp))
+
+
+if hasattr(typing, "Doc"):
+    Doc = typing.Doc
+else:
+    class Doc:
+        """Define the documentation of a type annotation using ``Annotated``, to be
+         used in class attributes, function and method parameters, return values,
+         and variables.
+
+        The value should be a positional-only string literal to allow static tools
+        like editors and documentation generators to use it.
+
+        This complements docstrings.
+
+        The string value passed is available in the attribute ``documentation``.
+
+        Example::
+
+            >>> from typing_extensions import Annotated, Doc
+            >>> def hi(to: Annotated[str, Doc("Who to say hi to")]) -> None: ...
+        """
+        def __init__(self, documentation: str, /) -> None:
+            self.documentation = documentation
+
+        def __repr__(self) -> str:
+            return f"Doc({self.documentation!r})"
+
+        def __hash__(self) -> int:
+            return hash(self.documentation)
+
+        def __eq__(self, other: object) -> bool:
+            if not isinstance(other, Doc):
+                return NotImplemented
+            return self.documentation == other.documentation
+
+
+_CapsuleType = getattr(_types, "CapsuleType", None)
+
+if _CapsuleType is None:
+    try:
+        import _socket
+    except ImportError:
+        pass
+    else:
+        _CAPI = getattr(_socket, "CAPI", None)
+        if _CAPI is not None:
+            _CapsuleType = type(_CAPI)
+
+if _CapsuleType is not None:
+    CapsuleType = _CapsuleType
+    __all__.append("CapsuleType")
+
+
+# Aliases for items that have always been in typing.
+# Explicitly assign these (rather than using `from typing import *` at the top),
+# so that we get a CI error if one of these is deleted from typing.py
+# in a future version of Python
+AbstractSet = typing.AbstractSet
+AnyStr = typing.AnyStr
+BinaryIO = typing.BinaryIO
+Callable = typing.Callable
+Collection = typing.Collection
+Container = typing.Container
+Dict = typing.Dict
+ForwardRef = typing.ForwardRef
+FrozenSet = typing.FrozenSet
+Generic = typing.Generic
+Hashable = typing.Hashable
+IO = typing.IO
+ItemsView = typing.ItemsView
+Iterable = typing.Iterable
+Iterator = typing.Iterator
+KeysView = typing.KeysView
+List = typing.List
+Mapping = typing.Mapping
+MappingView = typing.MappingView
+Match = typing.Match
+MutableMapping = typing.MutableMapping
+MutableSequence = typing.MutableSequence
+MutableSet = typing.MutableSet
+Optional = typing.Optional
+Pattern = typing.Pattern
+Reversible = typing.Reversible
+Sequence = typing.Sequence
+Set = typing.Set
+Sized = typing.Sized
+TextIO = typing.TextIO
+Tuple = typing.Tuple
+Union = typing.Union
+ValuesView = typing.ValuesView
+cast = typing.cast
+no_type_check = typing.no_type_check
+no_type_check_decorator = typing.no_type_check_decorator
diff --git a/setuptools/_vendor/wheel-0.43.0.dist-info/RECORD b/setuptools/_vendor/wheel-0.43.0.dist-info/RECORD
index 786fe55190..a3c6c3ea2f 100644
--- a/setuptools/_vendor/wheel-0.43.0.dist-info/RECORD
+++ b/setuptools/_vendor/wheel-0.43.0.dist-info/RECORD
@@ -1,63 +1,63 @@
-../../bin/wheel,sha256=Y73OywJ5gxOkyLS7G4Z9CS6Pb63oCt-LMViLs-ygeGE,245
-wheel-0.43.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-wheel-0.43.0.dist-info/LICENSE.txt,sha256=MMI2GGeRCPPo6h0qZYx8pBe9_IkcmO8aifpP8MmChlQ,1107
-wheel-0.43.0.dist-info/METADATA,sha256=WbrCKwClnT5WCKVrjPjvxDgxo2tyeS7kOJyc1GaceEE,2153
-wheel-0.43.0.dist-info/RECORD,,
-wheel-0.43.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-wheel-0.43.0.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
-wheel-0.43.0.dist-info/entry_points.txt,sha256=rTY1BbkPHhkGMm4Q3F0pIzJBzW2kMxoG1oriffvGdA0,104
-wheel/__init__.py,sha256=D6jhH00eMzbgrXGAeOwVfD5i-lCAMMycuG1L0useDlo,59
-wheel/__main__.py,sha256=NkMUnuTCGcOkgY0IBLgBCVC_BGGcWORx2K8jYGS12UE,455
-wheel/__pycache__/__init__.cpython-312.pyc,,
-wheel/__pycache__/__main__.cpython-312.pyc,,
-wheel/__pycache__/_setuptools_logging.cpython-312.pyc,,
-wheel/__pycache__/bdist_wheel.cpython-312.pyc,,
-wheel/__pycache__/macosx_libfile.cpython-312.pyc,,
-wheel/__pycache__/metadata.cpython-312.pyc,,
-wheel/__pycache__/util.cpython-312.pyc,,
-wheel/__pycache__/wheelfile.cpython-312.pyc,,
-wheel/_setuptools_logging.py,sha256=NoCnjJ4DFEZ45Eo-2BdXLsWJCwGkait1tp_17paleVw,746
-wheel/bdist_wheel.py,sha256=OKJyp9E831zJrxoRfmM9AgOjByG1CB-pzF5kXQFmaKk,20938
-wheel/cli/__init__.py,sha256=eBNhnPwWTtdKAJHy77lvz7gOQ5Eu3GavGugXxhSsn-U,4264
-wheel/cli/__pycache__/__init__.cpython-312.pyc,,
-wheel/cli/__pycache__/convert.cpython-312.pyc,,
-wheel/cli/__pycache__/pack.cpython-312.pyc,,
-wheel/cli/__pycache__/tags.cpython-312.pyc,,
-wheel/cli/__pycache__/unpack.cpython-312.pyc,,
-wheel/cli/convert.py,sha256=qJcpYGKqdfw1P6BelgN1Hn_suNgM6bvyEWFlZeuSWx0,9439
-wheel/cli/pack.py,sha256=CAFcHdBVulvsHYJlndKVO7KMI9JqBTZz5ii0PKxxCOs,3103
-wheel/cli/tags.py,sha256=lHw-LaWrkS5Jy_qWcw-6pSjeNM6yAjDnqKI3E5JTTCU,4760
-wheel/cli/unpack.py,sha256=Y_J7ynxPSoFFTT7H0fMgbBlVErwyDGcObgme5MBuz58,1021
-wheel/macosx_libfile.py,sha256=HnW6OPdN993psStvwl49xtx2kw7hoVbe6nvwmf8WsKI,16103
-wheel/metadata.py,sha256=q-xCCqSAK7HzyZxK9A6_HAWmhqS1oB4BFw1-rHQxBiQ,5884
-wheel/util.py,sha256=e0jpnsbbM9QhaaMSyap-_ZgUxcxwpyLDk6RHcrduPLg,621
-wheel/vendored/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-wheel/vendored/__pycache__/__init__.cpython-312.pyc,,
-wheel/vendored/packaging/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-wheel/vendored/packaging/__pycache__/__init__.cpython-312.pyc,,
-wheel/vendored/packaging/__pycache__/_elffile.cpython-312.pyc,,
-wheel/vendored/packaging/__pycache__/_manylinux.cpython-312.pyc,,
-wheel/vendored/packaging/__pycache__/_musllinux.cpython-312.pyc,,
-wheel/vendored/packaging/__pycache__/_parser.cpython-312.pyc,,
-wheel/vendored/packaging/__pycache__/_structures.cpython-312.pyc,,
-wheel/vendored/packaging/__pycache__/_tokenizer.cpython-312.pyc,,
-wheel/vendored/packaging/__pycache__/markers.cpython-312.pyc,,
-wheel/vendored/packaging/__pycache__/requirements.cpython-312.pyc,,
-wheel/vendored/packaging/__pycache__/specifiers.cpython-312.pyc,,
-wheel/vendored/packaging/__pycache__/tags.cpython-312.pyc,,
-wheel/vendored/packaging/__pycache__/utils.cpython-312.pyc,,
-wheel/vendored/packaging/__pycache__/version.cpython-312.pyc,,
-wheel/vendored/packaging/_elffile.py,sha256=hbmK8OD6Z7fY6hwinHEUcD1by7czkGiNYu7ShnFEk2k,3266
-wheel/vendored/packaging/_manylinux.py,sha256=P7sdR5_7XBY09LVYYPhHmydMJIIwPXWsh4olk74Uuj4,9588
-wheel/vendored/packaging/_musllinux.py,sha256=z1s8To2hQ0vpn_d-O2i5qxGwEK8WmGlLt3d_26V7NeY,2674
-wheel/vendored/packaging/_parser.py,sha256=4tT4emSl2qTaU7VTQE1Xa9o1jMPCsBezsYBxyNMUN-s,10347
-wheel/vendored/packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431
-wheel/vendored/packaging/_tokenizer.py,sha256=alCtbwXhOFAmFGZ6BQ-wCTSFoRAJ2z-ysIf7__MTJ_k,5292
-wheel/vendored/packaging/markers.py,sha256=_TSPI1BhJYO7Bp9AzTmHQxIqHEVXaTjmDh9G-w8qzPA,8232
-wheel/vendored/packaging/requirements.py,sha256=dgoBeVprPu2YE6Q8nGfwOPTjATHbRa_ZGLyXhFEln6Q,2933
-wheel/vendored/packaging/specifiers.py,sha256=IWSt0SrLSP72heWhAC8UL0eGvas7XIQHjqiViVfmPKE,39778
-wheel/vendored/packaging/tags.py,sha256=fedHXiOHkBxNZTXotXv8uXPmMFU9ae-TKBujgYHigcA,18950
-wheel/vendored/packaging/utils.py,sha256=XgdmP3yx9-wQEFjO7OvMj9RjEf5JlR5HFFR69v7SQ9E,5268
-wheel/vendored/packaging/version.py,sha256=PFJaYZDxBgyxkfYhH3SQw4qfE9ICCWrTmitvq14y3bs,16234
-wheel/vendored/vendor.txt,sha256=Z2ENjB1i5prfez8CdM1Sdr3c6Zxv2rRRolMpLmBncAE,16
-wheel/wheelfile.py,sha256=DtJDWoZMvnBh4leNMDPGOprQU9d_dp6q-MmV0U--4xc,7694
+../../bin/wheel,sha256=cT2EHbrv-J-UyUXu26cDY-0I7RgcruysJeHFanT1Xfo,249
+wheel-0.43.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+wheel-0.43.0.dist-info/LICENSE.txt,sha256=MMI2GGeRCPPo6h0qZYx8pBe9_IkcmO8aifpP8MmChlQ,1107
+wheel-0.43.0.dist-info/METADATA,sha256=WbrCKwClnT5WCKVrjPjvxDgxo2tyeS7kOJyc1GaceEE,2153
+wheel-0.43.0.dist-info/RECORD,,
+wheel-0.43.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+wheel-0.43.0.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
+wheel-0.43.0.dist-info/entry_points.txt,sha256=rTY1BbkPHhkGMm4Q3F0pIzJBzW2kMxoG1oriffvGdA0,104
+wheel/__init__.py,sha256=D6jhH00eMzbgrXGAeOwVfD5i-lCAMMycuG1L0useDlo,59
+wheel/__main__.py,sha256=NkMUnuTCGcOkgY0IBLgBCVC_BGGcWORx2K8jYGS12UE,455
+wheel/__pycache__/__init__.cpython-312.pyc,,
+wheel/__pycache__/__main__.cpython-312.pyc,,
+wheel/__pycache__/_setuptools_logging.cpython-312.pyc,,
+wheel/__pycache__/bdist_wheel.cpython-312.pyc,,
+wheel/__pycache__/macosx_libfile.cpython-312.pyc,,
+wheel/__pycache__/metadata.cpython-312.pyc,,
+wheel/__pycache__/util.cpython-312.pyc,,
+wheel/__pycache__/wheelfile.cpython-312.pyc,,
+wheel/_setuptools_logging.py,sha256=NoCnjJ4DFEZ45Eo-2BdXLsWJCwGkait1tp_17paleVw,746
+wheel/bdist_wheel.py,sha256=OKJyp9E831zJrxoRfmM9AgOjByG1CB-pzF5kXQFmaKk,20938
+wheel/cli/__init__.py,sha256=eBNhnPwWTtdKAJHy77lvz7gOQ5Eu3GavGugXxhSsn-U,4264
+wheel/cli/__pycache__/__init__.cpython-312.pyc,,
+wheel/cli/__pycache__/convert.cpython-312.pyc,,
+wheel/cli/__pycache__/pack.cpython-312.pyc,,
+wheel/cli/__pycache__/tags.cpython-312.pyc,,
+wheel/cli/__pycache__/unpack.cpython-312.pyc,,
+wheel/cli/convert.py,sha256=qJcpYGKqdfw1P6BelgN1Hn_suNgM6bvyEWFlZeuSWx0,9439
+wheel/cli/pack.py,sha256=CAFcHdBVulvsHYJlndKVO7KMI9JqBTZz5ii0PKxxCOs,3103
+wheel/cli/tags.py,sha256=lHw-LaWrkS5Jy_qWcw-6pSjeNM6yAjDnqKI3E5JTTCU,4760
+wheel/cli/unpack.py,sha256=Y_J7ynxPSoFFTT7H0fMgbBlVErwyDGcObgme5MBuz58,1021
+wheel/macosx_libfile.py,sha256=HnW6OPdN993psStvwl49xtx2kw7hoVbe6nvwmf8WsKI,16103
+wheel/metadata.py,sha256=q-xCCqSAK7HzyZxK9A6_HAWmhqS1oB4BFw1-rHQxBiQ,5884
+wheel/util.py,sha256=e0jpnsbbM9QhaaMSyap-_ZgUxcxwpyLDk6RHcrduPLg,621
+wheel/vendored/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+wheel/vendored/__pycache__/__init__.cpython-312.pyc,,
+wheel/vendored/packaging/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+wheel/vendored/packaging/__pycache__/__init__.cpython-312.pyc,,
+wheel/vendored/packaging/__pycache__/_elffile.cpython-312.pyc,,
+wheel/vendored/packaging/__pycache__/_manylinux.cpython-312.pyc,,
+wheel/vendored/packaging/__pycache__/_musllinux.cpython-312.pyc,,
+wheel/vendored/packaging/__pycache__/_parser.cpython-312.pyc,,
+wheel/vendored/packaging/__pycache__/_structures.cpython-312.pyc,,
+wheel/vendored/packaging/__pycache__/_tokenizer.cpython-312.pyc,,
+wheel/vendored/packaging/__pycache__/markers.cpython-312.pyc,,
+wheel/vendored/packaging/__pycache__/requirements.cpython-312.pyc,,
+wheel/vendored/packaging/__pycache__/specifiers.cpython-312.pyc,,
+wheel/vendored/packaging/__pycache__/tags.cpython-312.pyc,,
+wheel/vendored/packaging/__pycache__/utils.cpython-312.pyc,,
+wheel/vendored/packaging/__pycache__/version.cpython-312.pyc,,
+wheel/vendored/packaging/_elffile.py,sha256=hbmK8OD6Z7fY6hwinHEUcD1by7czkGiNYu7ShnFEk2k,3266
+wheel/vendored/packaging/_manylinux.py,sha256=P7sdR5_7XBY09LVYYPhHmydMJIIwPXWsh4olk74Uuj4,9588
+wheel/vendored/packaging/_musllinux.py,sha256=z1s8To2hQ0vpn_d-O2i5qxGwEK8WmGlLt3d_26V7NeY,2674
+wheel/vendored/packaging/_parser.py,sha256=4tT4emSl2qTaU7VTQE1Xa9o1jMPCsBezsYBxyNMUN-s,10347
+wheel/vendored/packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431
+wheel/vendored/packaging/_tokenizer.py,sha256=alCtbwXhOFAmFGZ6BQ-wCTSFoRAJ2z-ysIf7__MTJ_k,5292
+wheel/vendored/packaging/markers.py,sha256=_TSPI1BhJYO7Bp9AzTmHQxIqHEVXaTjmDh9G-w8qzPA,8232
+wheel/vendored/packaging/requirements.py,sha256=dgoBeVprPu2YE6Q8nGfwOPTjATHbRa_ZGLyXhFEln6Q,2933
+wheel/vendored/packaging/specifiers.py,sha256=IWSt0SrLSP72heWhAC8UL0eGvas7XIQHjqiViVfmPKE,39778
+wheel/vendored/packaging/tags.py,sha256=fedHXiOHkBxNZTXotXv8uXPmMFU9ae-TKBujgYHigcA,18950
+wheel/vendored/packaging/utils.py,sha256=XgdmP3yx9-wQEFjO7OvMj9RjEf5JlR5HFFR69v7SQ9E,5268
+wheel/vendored/packaging/version.py,sha256=PFJaYZDxBgyxkfYhH3SQw4qfE9ICCWrTmitvq14y3bs,16234
+wheel/vendored/vendor.txt,sha256=Z2ENjB1i5prfez8CdM1Sdr3c6Zxv2rRRolMpLmBncAE,16
+wheel/wheelfile.py,sha256=DtJDWoZMvnBh4leNMDPGOprQU9d_dp6q-MmV0U--4xc,7694
diff --git a/setuptools/_vendor/wheel/__main__.py b/setuptools/_vendor/wheel/__main__.py
new file mode 100644
index 0000000000..0be7453749
--- /dev/null
+++ b/setuptools/_vendor/wheel/__main__.py
@@ -0,0 +1,23 @@
+"""
+Wheel command line tool (enable python -m wheel syntax)
+"""
+
+from __future__ import annotations
+
+import sys
+
+
+def main():  # needed for console script
+    if __package__ == "":
+        # To be able to run 'python wheel-0.9.whl/wheel':
+        import os.path
+
+        path = os.path.dirname(os.path.dirname(__file__))
+        sys.path[0:0] = [path]
+    import wheel.cli
+
+    sys.exit(wheel.cli.main())
+
+
+if __name__ == "__main__":
+    sys.exit(main())
diff --git a/setuptools/_vendor/wheel/_setuptools_logging.py b/setuptools/_vendor/wheel/_setuptools_logging.py
new file mode 100644
index 0000000000..006c098523
--- /dev/null
+++ b/setuptools/_vendor/wheel/_setuptools_logging.py
@@ -0,0 +1,26 @@
+# copied from setuptools.logging, omitting monkeypatching
+from __future__ import annotations
+
+import logging
+import sys
+
+
+def _not_warning(record):
+    return record.levelno < logging.WARNING
+
+
+def configure():
+    """
+    Configure logging to emit warning and above to stderr
+    and everything else to stdout. This behavior is provided
+    for compatibility with distutils.log but may change in
+    the future.
+    """
+    err_handler = logging.StreamHandler()
+    err_handler.setLevel(logging.WARNING)
+    out_handler = logging.StreamHandler(sys.stdout)
+    out_handler.addFilter(_not_warning)
+    handlers = err_handler, out_handler
+    logging.basicConfig(
+        format="{message}", style="{", handlers=handlers, level=logging.DEBUG
+    )
diff --git a/setuptools/_vendor/wheel/bdist_wheel.py b/setuptools/_vendor/wheel/bdist_wheel.py
new file mode 100644
index 0000000000..6b811ee3df
--- /dev/null
+++ b/setuptools/_vendor/wheel/bdist_wheel.py
@@ -0,0 +1,595 @@
+"""
+Create a wheel (.whl) distribution.
+
+A wheel is a built archive format.
+"""
+
+from __future__ import annotations
+
+import os
+import re
+import shutil
+import stat
+import struct
+import sys
+import sysconfig
+import warnings
+from email.generator import BytesGenerator, Generator
+from email.policy import EmailPolicy
+from glob import iglob
+from shutil import rmtree
+from zipfile import ZIP_DEFLATED, ZIP_STORED
+
+import setuptools
+from setuptools import Command
+
+from . import __version__ as wheel_version
+from .macosx_libfile import calculate_macosx_platform_tag
+from .metadata import pkginfo_to_metadata
+from .util import log
+from .vendored.packaging import tags
+from .vendored.packaging import version as _packaging_version
+from .wheelfile import WheelFile
+
+
+def safe_name(name):
+    """Convert an arbitrary string to a standard distribution name
+    Any runs of non-alphanumeric/. characters are replaced with a single '-'.
+    """
+    return re.sub("[^A-Za-z0-9.]+", "-", name)
+
+
+def safe_version(version):
+    """
+    Convert an arbitrary string to a standard version string
+    """
+    try:
+        # normalize the version
+        return str(_packaging_version.Version(version))
+    except _packaging_version.InvalidVersion:
+        version = version.replace(" ", ".")
+        return re.sub("[^A-Za-z0-9.]+", "-", version)
+
+
+setuptools_major_version = int(setuptools.__version__.split(".")[0])
+
+PY_LIMITED_API_PATTERN = r"cp3\d"
+
+
+def _is_32bit_interpreter():
+    return struct.calcsize("P") == 4
+
+
+def python_tag():
+    return f"py{sys.version_info[0]}"
+
+
+def get_platform(archive_root):
+    """Return our platform name 'win32', 'linux_x86_64'"""
+    result = sysconfig.get_platform()
+    if result.startswith("macosx") and archive_root is not None:
+        result = calculate_macosx_platform_tag(archive_root, result)
+    elif _is_32bit_interpreter():
+        if result == "linux-x86_64":
+            # pip pull request #3497
+            result = "linux-i686"
+        elif result == "linux-aarch64":
+            # packaging pull request #234
+            # TODO armv8l, packaging pull request #690 => this did not land
+            # in pip/packaging yet
+            result = "linux-armv7l"
+
+    return result.replace("-", "_")
+
+
+def get_flag(var, fallback, expected=True, warn=True):
+    """Use a fallback value for determining SOABI flags if the needed config
+    var is unset or unavailable."""
+    val = sysconfig.get_config_var(var)
+    if val is None:
+        if warn:
+            warnings.warn(
+                f"Config variable '{var}' is unset, Python ABI tag may " "be incorrect",
+                RuntimeWarning,
+                stacklevel=2,
+            )
+        return fallback
+    return val == expected
+
+
+def get_abi_tag():
+    """Return the ABI tag based on SOABI (if available) or emulate SOABI (PyPy2)."""
+    soabi = sysconfig.get_config_var("SOABI")
+    impl = tags.interpreter_name()
+    if not soabi and impl in ("cp", "pp") and hasattr(sys, "maxunicode"):
+        d = ""
+        m = ""
+        u = ""
+        if get_flag("Py_DEBUG", hasattr(sys, "gettotalrefcount"), warn=(impl == "cp")):
+            d = "d"
+
+        if get_flag(
+            "WITH_PYMALLOC",
+            impl == "cp",
+            warn=(impl == "cp" and sys.version_info < (3, 8)),
+        ) and sys.version_info < (3, 8):
+            m = "m"
+
+        abi = f"{impl}{tags.interpreter_version()}{d}{m}{u}"
+    elif soabi and impl == "cp" and soabi.startswith("cpython"):
+        # non-Windows
+        abi = "cp" + soabi.split("-")[1]
+    elif soabi and impl == "cp" and soabi.startswith("cp"):
+        # Windows
+        abi = soabi.split("-")[0]
+    elif soabi and impl == "pp":
+        # we want something like pypy36-pp73
+        abi = "-".join(soabi.split("-")[:2])
+        abi = abi.replace(".", "_").replace("-", "_")
+    elif soabi and impl == "graalpy":
+        abi = "-".join(soabi.split("-")[:3])
+        abi = abi.replace(".", "_").replace("-", "_")
+    elif soabi:
+        abi = soabi.replace(".", "_").replace("-", "_")
+    else:
+        abi = None
+
+    return abi
+
+
+def safer_name(name):
+    return safe_name(name).replace("-", "_")
+
+
+def safer_version(version):
+    return safe_version(version).replace("-", "_")
+
+
+def remove_readonly(func, path, excinfo):
+    remove_readonly_exc(func, path, excinfo[1])
+
+
+def remove_readonly_exc(func, path, exc):
+    os.chmod(path, stat.S_IWRITE)
+    func(path)
+
+
+class bdist_wheel(Command):
+    description = "create a wheel distribution"
+
+    supported_compressions = {
+        "stored": ZIP_STORED,
+        "deflated": ZIP_DEFLATED,
+    }
+
+    user_options = [
+        ("bdist-dir=", "b", "temporary directory for creating the distribution"),
+        (
+            "plat-name=",
+            "p",
+            "platform name to embed in generated filenames "
+            "(default: %s)" % get_platform(None),
+        ),
+        (
+            "keep-temp",
+            "k",
+            "keep the pseudo-installation tree around after "
+            "creating the distribution archive",
+        ),
+        ("dist-dir=", "d", "directory to put final built distributions in"),
+        ("skip-build", None, "skip rebuilding everything (for testing/debugging)"),
+        (
+            "relative",
+            None,
+            "build the archive using relative paths " "(default: false)",
+        ),
+        (
+            "owner=",
+            "u",
+            "Owner name used when creating a tar file" " [default: current user]",
+        ),
+        (
+            "group=",
+            "g",
+            "Group name used when creating a tar file" " [default: current group]",
+        ),
+        ("universal", None, "make a universal wheel" " (default: false)"),
+        (
+            "compression=",
+            None,
+            "zipfile compression (one of: {})" " (default: 'deflated')".format(
+                ", ".join(supported_compressions)
+            ),
+        ),
+        (
+            "python-tag=",
+            None,
+            "Python implementation compatibility tag"
+            " (default: '%s')" % (python_tag()),
+        ),
+        (
+            "build-number=",
+            None,
+            "Build number for this particular version. "
+            "As specified in PEP-0427, this must start with a digit. "
+            "[default: None]",
+        ),
+        (
+            "py-limited-api=",
+            None,
+            "Python tag (cp32|cp33|cpNN) for abi3 wheel tag" " (default: false)",
+        ),
+    ]
+
+    boolean_options = ["keep-temp", "skip-build", "relative", "universal"]
+
+    def initialize_options(self):
+        self.bdist_dir = None
+        self.data_dir = None
+        self.plat_name = None
+        self.plat_tag = None
+        self.format = "zip"
+        self.keep_temp = False
+        self.dist_dir = None
+        self.egginfo_dir = None
+        self.root_is_pure = None
+        self.skip_build = None
+        self.relative = False
+        self.owner = None
+        self.group = None
+        self.universal = False
+        self.compression = "deflated"
+        self.python_tag = python_tag()
+        self.build_number = None
+        self.py_limited_api = False
+        self.plat_name_supplied = False
+
+    def finalize_options(self):
+        if self.bdist_dir is None:
+            bdist_base = self.get_finalized_command("bdist").bdist_base
+            self.bdist_dir = os.path.join(bdist_base, "wheel")
+
+        egg_info = self.distribution.get_command_obj("egg_info")
+        egg_info.ensure_finalized()  # needed for correct `wheel_dist_name`
+
+        self.data_dir = self.wheel_dist_name + ".data"
+        self.plat_name_supplied = self.plat_name is not None
+
+        try:
+            self.compression = self.supported_compressions[self.compression]
+        except KeyError:
+            raise ValueError(f"Unsupported compression: {self.compression}") from None
+
+        need_options = ("dist_dir", "plat_name", "skip_build")
+
+        self.set_undefined_options("bdist", *zip(need_options, need_options))
+
+        self.root_is_pure = not (
+            self.distribution.has_ext_modules() or self.distribution.has_c_libraries()
+        )
+
+        if self.py_limited_api and not re.match(
+            PY_LIMITED_API_PATTERN, self.py_limited_api
+        ):
+            raise ValueError("py-limited-api must match '%s'" % PY_LIMITED_API_PATTERN)
+
+        # Support legacy [wheel] section for setting universal
+        wheel = self.distribution.get_option_dict("wheel")
+        if "universal" in wheel:
+            # please don't define this in your global configs
+            log.warning(
+                "The [wheel] section is deprecated. Use [bdist_wheel] instead.",
+            )
+            val = wheel["universal"][1].strip()
+            if val.lower() in ("1", "true", "yes"):
+                self.universal = True
+
+        if self.build_number is not None and not self.build_number[:1].isdigit():
+            raise ValueError("Build tag (build-number) must start with a digit.")
+
+    @property
+    def wheel_dist_name(self):
+        """Return distribution full name with - replaced with _"""
+        components = (
+            safer_name(self.distribution.get_name()),
+            safer_version(self.distribution.get_version()),
+        )
+        if self.build_number:
+            components += (self.build_number,)
+        return "-".join(components)
+
+    def get_tag(self):
+        # bdist sets self.plat_name if unset, we should only use it for purepy
+        # wheels if the user supplied it.
+        if self.plat_name_supplied:
+            plat_name = self.plat_name
+        elif self.root_is_pure:
+            plat_name = "any"
+        else:
+            # macosx contains system version in platform name so need special handle
+            if self.plat_name and not self.plat_name.startswith("macosx"):
+                plat_name = self.plat_name
+            else:
+                # on macosx always limit the platform name to comply with any
+                # c-extension modules in bdist_dir, since the user can specify
+                # a higher MACOSX_DEPLOYMENT_TARGET via tools like CMake
+
+                # on other platforms, and on macosx if there are no c-extension
+                # modules, use the default platform name.
+                plat_name = get_platform(self.bdist_dir)
+
+            if _is_32bit_interpreter():
+                if plat_name in ("linux-x86_64", "linux_x86_64"):
+                    plat_name = "linux_i686"
+                if plat_name in ("linux-aarch64", "linux_aarch64"):
+                    # TODO armv8l, packaging pull request #690 => this did not land
+                    # in pip/packaging yet
+                    plat_name = "linux_armv7l"
+
+        plat_name = (
+            plat_name.lower().replace("-", "_").replace(".", "_").replace(" ", "_")
+        )
+
+        if self.root_is_pure:
+            if self.universal:
+                impl = "py2.py3"
+            else:
+                impl = self.python_tag
+            tag = (impl, "none", plat_name)
+        else:
+            impl_name = tags.interpreter_name()
+            impl_ver = tags.interpreter_version()
+            impl = impl_name + impl_ver
+            # We don't work on CPython 3.1, 3.0.
+            if self.py_limited_api and (impl_name + impl_ver).startswith("cp3"):
+                impl = self.py_limited_api
+                abi_tag = "abi3"
+            else:
+                abi_tag = str(get_abi_tag()).lower()
+            tag = (impl, abi_tag, plat_name)
+            # issue gh-374: allow overriding plat_name
+            supported_tags = [
+                (t.interpreter, t.abi, plat_name) for t in tags.sys_tags()
+            ]
+            assert (
+                tag in supported_tags
+            ), f"would build wheel with unsupported tag {tag}"
+        return tag
+
+    def run(self):
+        build_scripts = self.reinitialize_command("build_scripts")
+        build_scripts.executable = "python"
+        build_scripts.force = True
+
+        build_ext = self.reinitialize_command("build_ext")
+        build_ext.inplace = False
+
+        if not self.skip_build:
+            self.run_command("build")
+
+        install = self.reinitialize_command("install", reinit_subcommands=True)
+        install.root = self.bdist_dir
+        install.compile = False
+        install.skip_build = self.skip_build
+        install.warn_dir = False
+
+        # A wheel without setuptools scripts is more cross-platform.
+        # Use the (undocumented) `no_ep` option to setuptools'
+        # install_scripts command to avoid creating entry point scripts.
+        install_scripts = self.reinitialize_command("install_scripts")
+        install_scripts.no_ep = True
+
+        # Use a custom scheme for the archive, because we have to decide
+        # at installation time which scheme to use.
+        for key in ("headers", "scripts", "data", "purelib", "platlib"):
+            setattr(install, "install_" + key, os.path.join(self.data_dir, key))
+
+        basedir_observed = ""
+
+        if os.name == "nt":
+            # win32 barfs if any of these are ''; could be '.'?
+            # (distutils.command.install:change_roots bug)
+            basedir_observed = os.path.normpath(os.path.join(self.data_dir, ".."))
+            self.install_libbase = self.install_lib = basedir_observed
+
+        setattr(
+            install,
+            "install_purelib" if self.root_is_pure else "install_platlib",
+            basedir_observed,
+        )
+
+        log.info(f"installing to {self.bdist_dir}")
+
+        self.run_command("install")
+
+        impl_tag, abi_tag, plat_tag = self.get_tag()
+        archive_basename = f"{self.wheel_dist_name}-{impl_tag}-{abi_tag}-{plat_tag}"
+        if not self.relative:
+            archive_root = self.bdist_dir
+        else:
+            archive_root = os.path.join(
+                self.bdist_dir, self._ensure_relative(install.install_base)
+            )
+
+        self.set_undefined_options("install_egg_info", ("target", "egginfo_dir"))
+        distinfo_dirname = (
+            f"{safer_name(self.distribution.get_name())}-"
+            f"{safer_version(self.distribution.get_version())}.dist-info"
+        )
+        distinfo_dir = os.path.join(self.bdist_dir, distinfo_dirname)
+        self.egg2dist(self.egginfo_dir, distinfo_dir)
+
+        self.write_wheelfile(distinfo_dir)
+
+        # Make the archive
+        if not os.path.exists(self.dist_dir):
+            os.makedirs(self.dist_dir)
+
+        wheel_path = os.path.join(self.dist_dir, archive_basename + ".whl")
+        with WheelFile(wheel_path, "w", self.compression) as wf:
+            wf.write_files(archive_root)
+
+        # Add to 'Distribution.dist_files' so that the "upload" command works
+        getattr(self.distribution, "dist_files", []).append(
+            (
+                "bdist_wheel",
+                "{}.{}".format(*sys.version_info[:2]),  # like 3.7
+                wheel_path,
+            )
+        )
+
+        if not self.keep_temp:
+            log.info(f"removing {self.bdist_dir}")
+            if not self.dry_run:
+                if sys.version_info < (3, 12):
+                    rmtree(self.bdist_dir, onerror=remove_readonly)
+                else:
+                    rmtree(self.bdist_dir, onexc=remove_readonly_exc)
+
+    def write_wheelfile(
+        self, wheelfile_base, generator="bdist_wheel (" + wheel_version + ")"
+    ):
+        from email.message import Message
+
+        msg = Message()
+        msg["Wheel-Version"] = "1.0"  # of the spec
+        msg["Generator"] = generator
+        msg["Root-Is-Purelib"] = str(self.root_is_pure).lower()
+        if self.build_number is not None:
+            msg["Build"] = self.build_number
+
+        # Doesn't work for bdist_wininst
+        impl_tag, abi_tag, plat_tag = self.get_tag()
+        for impl in impl_tag.split("."):
+            for abi in abi_tag.split("."):
+                for plat in plat_tag.split("."):
+                    msg["Tag"] = "-".join((impl, abi, plat))
+
+        wheelfile_path = os.path.join(wheelfile_base, "WHEEL")
+        log.info(f"creating {wheelfile_path}")
+        with open(wheelfile_path, "wb") as f:
+            BytesGenerator(f, maxheaderlen=0).flatten(msg)
+
+    def _ensure_relative(self, path):
+        # copied from dir_util, deleted
+        drive, path = os.path.splitdrive(path)
+        if path[0:1] == os.sep:
+            path = drive + path[1:]
+        return path
+
+    @property
+    def license_paths(self):
+        if setuptools_major_version >= 57:
+            # Setuptools has resolved any patterns to actual file names
+            return self.distribution.metadata.license_files or ()
+
+        files = set()
+        metadata = self.distribution.get_option_dict("metadata")
+        if setuptools_major_version >= 42:
+            # Setuptools recognizes the license_files option but does not do globbing
+            patterns = self.distribution.metadata.license_files
+        else:
+            # Prior to those, wheel is entirely responsible for handling license files
+            if "license_files" in metadata:
+                patterns = metadata["license_files"][1].split()
+            else:
+                patterns = ()
+
+        if "license_file" in metadata:
+            warnings.warn(
+                'The "license_file" option is deprecated. Use "license_files" instead.',
+                DeprecationWarning,
+                stacklevel=2,
+            )
+            files.add(metadata["license_file"][1])
+
+        if not files and not patterns and not isinstance(patterns, list):
+            patterns = ("LICEN[CS]E*", "COPYING*", "NOTICE*", "AUTHORS*")
+
+        for pattern in patterns:
+            for path in iglob(pattern):
+                if path.endswith("~"):
+                    log.debug(
+                        f'ignoring license file "{path}" as it looks like a backup'
+                    )
+                    continue
+
+                if path not in files and os.path.isfile(path):
+                    log.info(
+                        f'adding license file "{path}" (matched pattern "{pattern}")'
+                    )
+                    files.add(path)
+
+        return files
+
+    def egg2dist(self, egginfo_path, distinfo_path):
+        """Convert an .egg-info directory into a .dist-info directory"""
+
+        def adios(p):
+            """Appropriately delete directory, file or link."""
+            if os.path.exists(p) and not os.path.islink(p) and os.path.isdir(p):
+                shutil.rmtree(p)
+            elif os.path.exists(p):
+                os.unlink(p)
+
+        adios(distinfo_path)
+
+        if not os.path.exists(egginfo_path):
+            # There is no egg-info. This is probably because the egg-info
+            # file/directory is not named matching the distribution name used
+            # to name the archive file. Check for this case and report
+            # accordingly.
+            import glob
+
+            pat = os.path.join(os.path.dirname(egginfo_path), "*.egg-info")
+            possible = glob.glob(pat)
+            err = f"Egg metadata expected at {egginfo_path} but not found"
+            if possible:
+                alt = os.path.basename(possible[0])
+                err += f" ({alt} found - possible misnamed archive file?)"
+
+            raise ValueError(err)
+
+        if os.path.isfile(egginfo_path):
+            # .egg-info is a single file
+            pkginfo_path = egginfo_path
+            pkg_info = pkginfo_to_metadata(egginfo_path, egginfo_path)
+            os.mkdir(distinfo_path)
+        else:
+            # .egg-info is a directory
+            pkginfo_path = os.path.join(egginfo_path, "PKG-INFO")
+            pkg_info = pkginfo_to_metadata(egginfo_path, pkginfo_path)
+
+            # ignore common egg metadata that is useless to wheel
+            shutil.copytree(
+                egginfo_path,
+                distinfo_path,
+                ignore=lambda x, y: {
+                    "PKG-INFO",
+                    "requires.txt",
+                    "SOURCES.txt",
+                    "not-zip-safe",
+                },
+            )
+
+            # delete dependency_links if it is only whitespace
+            dependency_links_path = os.path.join(distinfo_path, "dependency_links.txt")
+            with open(dependency_links_path, encoding="utf-8") as dependency_links_file:
+                dependency_links = dependency_links_file.read().strip()
+            if not dependency_links:
+                adios(dependency_links_path)
+
+        pkg_info_path = os.path.join(distinfo_path, "METADATA")
+        serialization_policy = EmailPolicy(
+            utf8=True,
+            mangle_from_=False,
+            max_line_length=0,
+        )
+        with open(pkg_info_path, "w", encoding="utf-8") as out:
+            Generator(out, policy=serialization_policy).flatten(pkg_info)
+
+        for license_path in self.license_paths:
+            filename = os.path.basename(license_path)
+            shutil.copy(license_path, os.path.join(distinfo_path, filename))
+
+        adios(egginfo_path)
diff --git a/setuptools/_vendor/wheel/cli/__init__.py b/setuptools/_vendor/wheel/cli/__init__.py
new file mode 100644
index 0000000000..a38860f5a6
--- /dev/null
+++ b/setuptools/_vendor/wheel/cli/__init__.py
@@ -0,0 +1,155 @@
+"""
+Wheel command-line utility.
+"""
+
+from __future__ import annotations
+
+import argparse
+import os
+import sys
+from argparse import ArgumentTypeError
+
+
+class WheelError(Exception):
+    pass
+
+
+def unpack_f(args):
+    from .unpack import unpack
+
+    unpack(args.wheelfile, args.dest)
+
+
+def pack_f(args):
+    from .pack import pack
+
+    pack(args.directory, args.dest_dir, args.build_number)
+
+
+def convert_f(args):
+    from .convert import convert
+
+    convert(args.files, args.dest_dir, args.verbose)
+
+
+def tags_f(args):
+    from .tags import tags
+
+    names = (
+        tags(
+            wheel,
+            args.python_tag,
+            args.abi_tag,
+            args.platform_tag,
+            args.build,
+            args.remove,
+        )
+        for wheel in args.wheel
+    )
+
+    for name in names:
+        print(name)
+
+
+def version_f(args):
+    from .. import __version__
+
+    print("wheel %s" % __version__)
+
+
+def parse_build_tag(build_tag: str) -> str:
+    if build_tag and not build_tag[0].isdigit():
+        raise ArgumentTypeError("build tag must begin with a digit")
+    elif "-" in build_tag:
+        raise ArgumentTypeError("invalid character ('-') in build tag")
+
+    return build_tag
+
+
+TAGS_HELP = """\
+Make a new wheel with given tags. Any tags unspecified will remain the same.
+Starting the tags with a "+" will append to the existing tags. Starting with a
+"-" will remove a tag (use --option=-TAG syntax). Multiple tags can be
+separated by ".". The original file will remain unless --remove is given.  The
+output filename(s) will be displayed on stdout for further processing.
+"""
+
+
+def parser():
+    p = argparse.ArgumentParser()
+    s = p.add_subparsers(help="commands")
+
+    unpack_parser = s.add_parser("unpack", help="Unpack wheel")
+    unpack_parser.add_argument(
+        "--dest", "-d", help="Destination directory", default="."
+    )
+    unpack_parser.add_argument("wheelfile", help="Wheel file")
+    unpack_parser.set_defaults(func=unpack_f)
+
+    repack_parser = s.add_parser("pack", help="Repack wheel")
+    repack_parser.add_argument("directory", help="Root directory of the unpacked wheel")
+    repack_parser.add_argument(
+        "--dest-dir",
+        "-d",
+        default=os.path.curdir,
+        help="Directory to store the wheel (default %(default)s)",
+    )
+    repack_parser.add_argument(
+        "--build-number", help="Build tag to use in the wheel name"
+    )
+    repack_parser.set_defaults(func=pack_f)
+
+    convert_parser = s.add_parser("convert", help="Convert egg or wininst to wheel")
+    convert_parser.add_argument("files", nargs="*", help="Files to convert")
+    convert_parser.add_argument(
+        "--dest-dir",
+        "-d",
+        default=os.path.curdir,
+        help="Directory to store wheels (default %(default)s)",
+    )
+    convert_parser.add_argument("--verbose", "-v", action="store_true")
+    convert_parser.set_defaults(func=convert_f)
+
+    tags_parser = s.add_parser(
+        "tags", help="Add or replace the tags on a wheel", description=TAGS_HELP
+    )
+    tags_parser.add_argument("wheel", nargs="*", help="Existing wheel(s) to retag")
+    tags_parser.add_argument(
+        "--remove",
+        action="store_true",
+        help="Remove the original files, keeping only the renamed ones",
+    )
+    tags_parser.add_argument(
+        "--python-tag", metavar="TAG", help="Specify an interpreter tag(s)"
+    )
+    tags_parser.add_argument("--abi-tag", metavar="TAG", help="Specify an ABI tag(s)")
+    tags_parser.add_argument(
+        "--platform-tag", metavar="TAG", help="Specify a platform tag(s)"
+    )
+    tags_parser.add_argument(
+        "--build", type=parse_build_tag, metavar="BUILD", help="Specify a build tag"
+    )
+    tags_parser.set_defaults(func=tags_f)
+
+    version_parser = s.add_parser("version", help="Print version and exit")
+    version_parser.set_defaults(func=version_f)
+
+    help_parser = s.add_parser("help", help="Show this help")
+    help_parser.set_defaults(func=lambda args: p.print_help())
+
+    return p
+
+
+def main():
+    p = parser()
+    args = p.parse_args()
+    if not hasattr(args, "func"):
+        p.print_help()
+    else:
+        try:
+            args.func(args)
+            return 0
+        except WheelError as e:
+            print(e, file=sys.stderr)
+
+    return 1
diff --git a/setuptools/_vendor/wheel/cli/convert.py b/setuptools/_vendor/wheel/cli/convert.py
new file mode 100644
index 0000000000..291534046a
--- /dev/null
+++ b/setuptools/_vendor/wheel/cli/convert.py
@@ -0,0 +1,273 @@
+from __future__ import annotations
+
+import os.path
+import re
+import shutil
+import tempfile
+import zipfile
+from glob import iglob
+
+from ..bdist_wheel import bdist_wheel
+from ..wheelfile import WheelFile
+from . import WheelError
+
+try:
+    from setuptools import Distribution
+except ImportError:
+    from distutils.dist import Distribution
+
+egg_info_re = re.compile(
+    r"""
+    (?P.+?)-(?P.+?)
+    (-(?Ppy\d\.\d+)
+     (-(?P.+?))?
+    )?.egg$""",
+    re.VERBOSE,
+)
+
+
+class _bdist_wheel_tag(bdist_wheel):
+    # allow the client to override the default generated wheel tag
+    # The default bdist_wheel implementation uses python and abi tags
+    # of the running python process. This is not suitable for
+    # generating/repackaging prebuild binaries.
+
+    full_tag_supplied = False
+    full_tag = None  # None or a (pytag, soabitag, plattag) triple
+
+    def get_tag(self):
+        if self.full_tag_supplied and self.full_tag is not None:
+            return self.full_tag
+        else:
+            return bdist_wheel.get_tag(self)
+
+
+def egg2wheel(egg_path: str, dest_dir: str) -> None:
+    filename = os.path.basename(egg_path)
+    match = egg_info_re.match(filename)
+    if not match:
+        raise WheelError(f"Invalid egg file name: {filename}")
+
+    egg_info = match.groupdict()
+    dir = tempfile.mkdtemp(suffix="_e2w")
+    if os.path.isfile(egg_path):
+        # assume we have a bdist_egg otherwise
+        with zipfile.ZipFile(egg_path) as egg:
+            egg.extractall(dir)
+    else:
+        # support buildout-style installed eggs directories
+        for pth in os.listdir(egg_path):
+            src = os.path.join(egg_path, pth)
+            if os.path.isfile(src):
+                shutil.copy2(src, dir)
+            else:
+                shutil.copytree(src, os.path.join(dir, pth))
+
+    pyver = egg_info["pyver"]
+    if pyver:
+        pyver = egg_info["pyver"] = pyver.replace(".", "")
+
+    arch = (egg_info["arch"] or "any").replace(".", "_").replace("-", "_")
+
+    # assume all binary eggs are for CPython
+    abi = "cp" + pyver[2:] if arch != "any" else "none"
+
+    root_is_purelib = egg_info["arch"] is None
+    if root_is_purelib:
+        bw = bdist_wheel(Distribution())
+    else:
+        bw = _bdist_wheel_tag(Distribution())
+
+    bw.root_is_pure = root_is_purelib
+    bw.python_tag = pyver
+    bw.plat_name_supplied = True
+    bw.plat_name = egg_info["arch"] or "any"
+    if not root_is_purelib:
+        bw.full_tag_supplied = True
+        bw.full_tag = (pyver, abi, arch)
+
+    dist_info_dir = os.path.join(dir, "{name}-{ver}.dist-info".format(**egg_info))
+    bw.egg2dist(os.path.join(dir, "EGG-INFO"), dist_info_dir)
+    bw.write_wheelfile(dist_info_dir, generator="egg2wheel")
+    wheel_name = "{name}-{ver}-{pyver}-{}-{}.whl".format(abi, arch, **egg_info)
+    with WheelFile(os.path.join(dest_dir, wheel_name), "w") as wf:
+        wf.write_files(dir)
+
+    shutil.rmtree(dir)
+
+
+def parse_wininst_info(wininfo_name, egginfo_name):
+    """Extract metadata from filenames.
+
+    Extracts the 4 metadataitems needed (name, version, pyversion, arch) from
+    the installer filename and the name of the egg-info directory embedded in
+    the zipfile (if any).
+
+    The egginfo filename has the format::
+
+        name-ver(-pyver)(-arch).egg-info
+
+    The installer filename has the format::
+
+        name-ver.arch(-pyver).exe
+
+    Some things to note:
+
+    1. The installer filename is not definitive. An installer can be renamed
+       and work perfectly well as an installer. So more reliable data should
+       be used whenever possible.
+    2. The egg-info data should be preferred for the name and version, because
+       these come straight from the distutils metadata, and are mandatory.
+    3. The pyver from the egg-info data should be ignored, as it is
+       constructed from the version of Python used to build the installer,
+       which is irrelevant - the installer filename is correct here (even to
+       the point that when it's not there, any version is implied).
+    4. The architecture must be taken from the installer filename, as it is
+       not included in the egg-info data.
+    5. Architecture-neutral installers still have an architecture because the
+       installer format itself (being executable) is architecture-specific. We
+       should therefore ignore the architecture if the content is pure-python.
+    """
+
+    egginfo = None
+    if egginfo_name:
+        egginfo = egg_info_re.search(egginfo_name)
+        if not egginfo:
+            raise ValueError(f"Egg info filename {egginfo_name} is not valid")
+
+    # Parse the wininst filename
+    # 1. Distribution name (up to the first '-')
+    w_name, sep, rest = wininfo_name.partition("-")
+    if not sep:
+        raise ValueError(f"Installer filename {wininfo_name} is not valid")
+
+    # Strip '.exe'
+    rest = rest[:-4]
+    # 2. Python version (from the last '-', must start with 'py')
+    rest2, sep, w_pyver = rest.rpartition("-")
+    if sep and w_pyver.startswith("py"):
+        rest = rest2
+        w_pyver = w_pyver.replace(".", "")
+    else:
+        # Not version specific - use py2.py3. While it is possible that
+        # pure-Python code is not compatible with both Python 2 and 3, there
+        # is no way of knowing from the wininst format, so we assume the best
+        # here (the user can always manually rename the wheel to be more
+        # restrictive if needed).
+        w_pyver = "py2.py3"
+    # 3. Version and architecture
+    w_ver, sep, w_arch = rest.rpartition(".")
+    if not sep:
+        raise ValueError(f"Installer filename {wininfo_name} is not valid")
+
+    if egginfo:
+        w_name = egginfo.group("name")
+        w_ver = egginfo.group("ver")
+
+    return {"name": w_name, "ver": w_ver, "arch": w_arch, "pyver": w_pyver}
+
+
+def wininst2wheel(path, dest_dir):
+    with zipfile.ZipFile(path) as bdw:
+        # Search for egg-info in the archive
+        egginfo_name = None
+        for filename in bdw.namelist():
+            if ".egg-info" in filename:
+                egginfo_name = filename
+                break
+
+        info = parse_wininst_info(os.path.basename(path), egginfo_name)
+
+        root_is_purelib = True
+        for zipinfo in bdw.infolist():
+            if zipinfo.filename.startswith("PLATLIB"):
+                root_is_purelib = False
+                break
+        if root_is_purelib:
+            paths = {"purelib": ""}
+        else:
+            paths = {"platlib": ""}
+
+        dist_info = "{name}-{ver}".format(**info)
+        datadir = "%s.data/" % dist_info
+
+        # rewrite paths to trick ZipFile into extracting an egg
+        # XXX grab wininst .ini - between .exe, padding, and first zip file.
+        members = []
+        egginfo_name = ""
+        for zipinfo in bdw.infolist():
+            key, basename = zipinfo.filename.split("/", 1)
+            key = key.lower()
+            basepath = paths.get(key, None)
+            if basepath is None:
+                basepath = datadir + key.lower() + "/"
+            oldname = zipinfo.filename
+            newname = basepath + basename
+            zipinfo.filename = newname
+            del bdw.NameToInfo[oldname]
+            bdw.NameToInfo[newname] = zipinfo
+            # Collect member names, but omit '' (from an entry like "PLATLIB/"
+            if newname:
+                members.append(newname)
+            # Remember egg-info name for the egg2dist call below
+            if not egginfo_name:
+                if newname.endswith(".egg-info"):
+                    egginfo_name = newname
+                elif ".egg-info/" in newname:
+                    egginfo_name, sep, _ = newname.rpartition("/")
+        dir = tempfile.mkdtemp(suffix="_b2w")
+        bdw.extractall(dir, members)
+
+    # egg2wheel
+    abi = "none"
+    pyver = info["pyver"]
+    arch = (info["arch"] or "any").replace(".", "_").replace("-", "_")
+    # Wininst installers always have arch even if they are not
+    # architecture-specific (because the format itself is).
+    # So, assume the content is architecture-neutral if root is purelib.
+    if root_is_purelib:
+        arch = "any"
+    # If the installer is architecture-specific, it's almost certainly also
+    # CPython-specific.
+    if arch != "any":
+        pyver = pyver.replace("py", "cp")
+    wheel_name = "-".join((dist_info, pyver, abi, arch))
+    if root_is_purelib:
+        bw = bdist_wheel(Distribution())
+    else:
+        bw = _bdist_wheel_tag(Distribution())
+
+    bw.root_is_pure = root_is_purelib
+    bw.python_tag = pyver
+    bw.plat_name_supplied = True
+    bw.plat_name = info["arch"] or "any"
+
+    if not root_is_purelib:
+        bw.full_tag_supplied = True
+        bw.full_tag = (pyver, abi, arch)
+
+    dist_info_dir = os.path.join(dir, "%s.dist-info" % dist_info)
+    bw.egg2dist(os.path.join(dir, egginfo_name), dist_info_dir)
+    bw.write_wheelfile(dist_info_dir, generator="wininst2wheel")
+
+    wheel_path = os.path.join(dest_dir, wheel_name)
+    with WheelFile(wheel_path, "w") as wf:
+        wf.write_files(dir)
+
+    shutil.rmtree(dir)
+
+
+def convert(files, dest_dir, verbose):
+    for pat in files:
+        for installer in iglob(pat):
+            if os.path.splitext(installer)[1] == ".egg":
+                conv = egg2wheel
+            else:
+                conv = wininst2wheel
+
+            if verbose:
+                print(f"{installer}... ", flush=True)
+
+            conv(installer, dest_dir)
+            if verbose:
+                print("OK")
diff --git a/setuptools/_vendor/wheel/cli/pack.py b/setuptools/_vendor/wheel/cli/pack.py
new file mode 100644
index 0000000000..64469c0c73
--- /dev/null
+++ b/setuptools/_vendor/wheel/cli/pack.py
@@ -0,0 +1,85 @@
+from __future__ import annotations
+
+import email.policy
+import os.path
+import re
+from email.generator import BytesGenerator
+from email.parser import BytesParser
+
+from wheel.cli import WheelError
+from wheel.wheelfile import WheelFile
+
+DIST_INFO_RE = re.compile(r"^(?P(?P.+?)-(?P\d.*?))\.dist-info$")
+
+
+def pack(directory: str, dest_dir: str, build_number: str | None) -> None:
+    """Repack a previously unpacked wheel directory into a new wheel file.
+
+    The .dist-info/WHEEL file must contain one or more tags so that the target
+    wheel file name can be determined.
+
+    :param directory: The unpacked wheel directory
+    :param dest_dir: Destination directory (defaults to the current directory)
+    """
+    # Find the .dist-info directory
+    dist_info_dirs = [
+        fn
+        for fn in os.listdir(directory)
+        if os.path.isdir(os.path.join(directory, fn)) and DIST_INFO_RE.match(fn)
+    ]
+    if len(dist_info_dirs) > 1:
+        raise WheelError(f"Multiple .dist-info directories found in {directory}")
+    elif not dist_info_dirs:
+        raise WheelError(f"No .dist-info directories found in {directory}")
+
+    # Determine the target wheel filename
+    dist_info_dir = dist_info_dirs[0]
+    name_version = DIST_INFO_RE.match(dist_info_dir).group("namever")
+
+    # Read the tags and the existing build number from .dist-info/WHEEL
+    wheel_file_path = os.path.join(directory, dist_info_dir, "WHEEL")
+    with open(wheel_file_path, "rb") as f:
+        info = BytesParser(policy=email.policy.compat32).parse(f)
+        tags: list[str] = info.get_all("Tag", [])
+        existing_build_number = info.get("Build")
+
+        if not tags:
+            raise WheelError(
+                f"No tags present in {dist_info_dir}/WHEEL; cannot determine target "
+                f"wheel filename"
+            )
+
+    # Set the wheel file name and add/replace/remove the Build tag in .dist-info/WHEEL
+    build_number = build_number if build_number is not None else existing_build_number
+    if build_number is not None:
+        del info["Build"]
+        if build_number:
+            info["Build"] = build_number
+            name_version += "-" + build_number
+
+        if build_number != existing_build_number:
+            with open(wheel_file_path, "wb") as f:
+                BytesGenerator(f, maxheaderlen=0).flatten(info)
+
+    # Reassemble the tags for the wheel file
+    tagline = compute_tagline(tags)
+
+    # Repack the wheel
+    wheel_path = os.path.join(dest_dir, f"{name_version}-{tagline}.whl")
+    with WheelFile(wheel_path, "w") as wf:
+        print(f"Repacking wheel as {wheel_path}...", end="", flush=True)
+        wf.write_files(directory)
+
+    print("OK")
+
+
+def compute_tagline(tags: list[str]) -> str:
+    """Compute a tagline from a list of tags.
+
+    :param tags: A list of tags
+    :return: A tagline
+    """
+    impls = sorted({tag.split("-")[0] for tag in tags})
+    abivers = sorted({tag.split("-")[1] for tag in tags})
+    platforms = sorted({tag.split("-")[2] for tag in tags})
+    return "-".join([".".join(impls), ".".join(abivers), ".".join(platforms)])
diff --git a/setuptools/_vendor/wheel/cli/tags.py b/setuptools/_vendor/wheel/cli/tags.py
new file mode 100644
index 0000000000..88da72e9ec
--- /dev/null
+++ b/setuptools/_vendor/wheel/cli/tags.py
@@ -0,0 +1,139 @@
+from __future__ import annotations
+
+import email.policy
+import itertools
+import os
+from collections.abc import Iterable
+from email.parser import BytesParser
+
+from ..wheelfile import WheelFile
+
+
+def _compute_tags(original_tags: Iterable[str], new_tags: str | None) -> set[str]:
+    """Add or replace tags. Supports dot-separated tags"""
+    if new_tags is None:
+        return set(original_tags)
+
+    if new_tags.startswith("+"):
+        return {*original_tags, *new_tags[1:].split(".")}
+
+    if new_tags.startswith("-"):
+        return set(original_tags) - set(new_tags[1:].split("."))
+
+    return set(new_tags.split("."))
+
+
+def tags(
+    wheel: str,
+    python_tags: str | None = None,
+    abi_tags: str | None = None,
+    platform_tags: str | None = None,
+    build_tag: str | None = None,
+    remove: bool = False,
+) -> str:
+    """Change the tags on a wheel file.
+
+    The tags are left unchanged if they are not specified. To specify "none",
+    use ["none"]. To append to the previous tags, a tag should start with a
+    "+".  If a tag starts with "-", it will be removed from existing tags.
+    Processing is done left to right.
+
+    :param wheel: The paths to the wheels
+    :param python_tags: The Python tags to set
+    :param abi_tags: The ABI tags to set
+    :param platform_tags: The platform tags to set
+    :param build_tag: The build tag to set
+    :param remove: Remove the original wheel
+    """
+    with WheelFile(wheel, "r") as f:
+        assert f.filename, f"{f.filename} must be available"
+
+        wheel_info = f.read(f.dist_info_path + "/WHEEL")
+        info = BytesParser(policy=email.policy.compat32).parsebytes(wheel_info)
+
+        original_wheel_name = os.path.basename(f.filename)
+        namever = f.parsed_filename.group("namever")
+        build = f.parsed_filename.group("build")
+        original_python_tags = f.parsed_filename.group("pyver").split(".")
+        original_abi_tags = f.parsed_filename.group("abi").split(".")
+        original_plat_tags = f.parsed_filename.group("plat").split(".")
+
+    tags: list[str] = info.get_all("Tag", [])
+    existing_build_tag = info.get("Build")
+
+    impls = {tag.split("-")[0] for tag in tags}
+    abivers = {tag.split("-")[1] for tag in tags}
+    platforms = {tag.split("-")[2] for tag in tags}
+
+    if impls != set(original_python_tags):
+        msg = f"Wheel internal tags {impls!r} != filename tags {original_python_tags!r}"
+        raise AssertionError(msg)
+
+    if abivers != set(original_abi_tags):
+        msg = f"Wheel internal tags {abivers!r} != filename tags {original_abi_tags!r}"
+        raise AssertionError(msg)
+
+    if platforms != set(original_plat_tags):
+        msg = (
+            f"Wheel internal tags {platforms!r} != filename tags {original_plat_tags!r}"
+        )
+        raise AssertionError(msg)
+
+    if existing_build_tag != build:
+        msg = (
+            f"Incorrect filename '{build}' "
+            f"& *.dist-info/WHEEL '{existing_build_tag}' build numbers"
+        )
+        raise AssertionError(msg)
+
+    # Start changing as needed
+    if build_tag is not None:
+        build = build_tag
+
+    final_python_tags = sorted(_compute_tags(original_python_tags, python_tags))
+    final_abi_tags = sorted(_compute_tags(original_abi_tags, abi_tags))
+    final_plat_tags = sorted(_compute_tags(original_plat_tags, platform_tags))
+
+    final_tags = [
+        namever,
+        ".".join(final_python_tags),
+        ".".join(final_abi_tags),
+        ".".join(final_plat_tags),
+    ]
+    if build:
+        final_tags.insert(1, build)
+
+    final_wheel_name = "-".join(final_tags) + ".whl"
+
+    if original_wheel_name != final_wheel_name:
+        del info["Tag"], info["Build"]
+        for a, b, c in itertools.product(
+            final_python_tags, final_abi_tags, final_plat_tags
+        ):
+            info["Tag"] = f"{a}-{b}-{c}"
+        if build:
+            info["Build"] = build
+
+        original_wheel_path = os.path.join(
+            os.path.dirname(f.filename), original_wheel_name
+        )
+        final_wheel_path = os.path.join(os.path.dirname(f.filename), final_wheel_name)
+
+        with WheelFile(original_wheel_path, "r") as fin, WheelFile(
+            final_wheel_path, "w"
+        ) as fout:
+            fout.comment = fin.comment  # preserve the comment
+            for item in fin.infolist():
+                if item.is_dir():
+                    continue
+                if item.filename == f.dist_info_path + "/RECORD":
+                    continue
+                if item.filename == f.dist_info_path + "/WHEEL":
+                    fout.writestr(item, info.as_bytes())
+                else:
+                    fout.writestr(item, fin.read(item))
+
+        if remove:
+            os.remove(original_wheel_path)
+
+    return final_wheel_name
diff --git a/setuptools/_vendor/wheel/cli/unpack.py b/setuptools/_vendor/wheel/cli/unpack.py
new file mode 100644
index 0000000000..d48840e6ec
--- /dev/null
+++ b/setuptools/_vendor/wheel/cli/unpack.py
@@ -0,0 +1,30 @@
+from __future__ import annotations
+
+from pathlib import Path
+
+from ..wheelfile import WheelFile
+
+
+def unpack(path: str, dest: str = ".") -> None:
+    """Unpack a wheel.
+
+    Wheel content will be unpacked to {dest}/{name}-{ver}, where {name}
+    is the package name and {ver} its version.
+
+    :param path: The path to the wheel.
+    :param dest: Destination directory (default to current directory).
+    """
+    with WheelFile(path) as wf:
+        namever = wf.parsed_filename.group("namever")
+        destination = Path(dest) / namever
+        print(f"Unpacking to: {destination}...", end="", flush=True)
+        for zinfo in wf.filelist:
+            wf.extract(zinfo, destination)
+
+            # Set permissions to the same values as they were set in the archive
+            # We have to do this manually due to
+            # https://github.com/python/cpython/issues/59999
+            permissions = zinfo.external_attr >> 16 & 0o777
+            destination.joinpath(zinfo.filename).chmod(permissions)
+
+    print("OK")
diff --git a/setuptools/_vendor/wheel/metadata.py b/setuptools/_vendor/wheel/metadata.py
index 341f614ceb..6aa4362808 100644
--- a/setuptools/_vendor/wheel/metadata.py
+++ b/setuptools/_vendor/wheel/metadata.py
@@ -13,7 +13,7 @@
 from email.parser import Parser
 from typing import Iterator
 
-from ..packaging.requirements import Requirement
+from .vendored.packaging.requirements import Requirement
 
 
 def _nonblank(str):
diff --git a/setuptools/_vendor/wheel/vendored/__init__.py b/setuptools/_vendor/wheel/vendored/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/setuptools/_vendor/wheel/vendored/packaging/__init__.py b/setuptools/_vendor/wheel/vendored/packaging/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/setuptools/_vendor/wheel/vendored/packaging/_elffile.py b/setuptools/_vendor/wheel/vendored/packaging/_elffile.py
new file mode 100644
index 0000000000..6fb19b30bb
--- /dev/null
+++ b/setuptools/_vendor/wheel/vendored/packaging/_elffile.py
@@ -0,0 +1,108 @@
+"""
+ELF file parser.
+
+This provides a class ``ELFFile`` that parses an ELF executable in a similar
+interface to ``ZipFile``. Only the read interface is implemented.
+
+Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca
+ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html
+"""
+
+import enum
+import os
+import struct
+from typing import IO, Optional, Tuple
+
+
+class ELFInvalid(ValueError):
+    pass
+
+
+class EIClass(enum.IntEnum):
+    C32 = 1
+    C64 = 2
+
+
+class EIData(enum.IntEnum):
+    Lsb = 1
+    Msb = 2
+
+
+class EMachine(enum.IntEnum):
+    I386 = 3
+    S390 = 22
+    Arm = 40
+    X8664 = 62
+    AArc64 = 183
+
+
+class ELFFile:
+    """
+    Representation of an ELF executable.
+    """
+
+    def __init__(self, f: IO[bytes]) -> None:
+        self._f = f
+
+        try:
+            ident = self._read("16B")
+        except struct.error:
+            raise ELFInvalid("unable to parse identification")
+        magic = bytes(ident[:4])
+        if magic != b"\x7fELF":
+            raise ELFInvalid(f"invalid magic: {magic!r}")
+
+        self.capacity = ident[4]  # Format for program header (bitness).
+        self.encoding = ident[5]  # Data structure encoding (endianness).
+
+        try:
+            # e_fmt: Format for program header.
+            # p_fmt: Format for section header.
+            # p_idx: Indexes to find p_type, p_offset, and p_filesz.
+            e_fmt, self._p_fmt, self._p_idx = {
+                (1, 1): ("HHIIIIIHHH", ">IIIIIIII", (0, 1, 4)),  # 32-bit MSB.
+                (2, 1): ("HHIQQQIHHH", ">IIQQQQQQ", (0, 2, 5)),  # 64-bit MSB.
+            }[(self.capacity, self.encoding)]
+        except KeyError:
+            raise ELFInvalid(
+                f"unrecognized capacity ({self.capacity}) or "
+                f"encoding ({self.encoding})"
+            )
+
+        try:
+            (
+                _,
+                self.machine,  # Architecture type.
+                _,
+                _,
+                self._e_phoff,  # Offset of program header.
+                _,
+                self.flags,  # Processor-specific flags.
+                _,
+                self._e_phentsize,  # Size of section.
+                self._e_phnum,  # Number of sections.
+            ) = self._read(e_fmt)
+        except struct.error as e:
+            raise ELFInvalid("unable to parse machine and section information") from e
+
+    def _read(self, fmt: str) -> Tuple[int, ...]:
+        return struct.unpack(fmt, self._f.read(struct.calcsize(fmt)))
+
+    @property
+    def interpreter(self) -> Optional[str]:
+        """
+        The path recorded in the ``PT_INTERP`` section header.
+        """
+        for index in range(self._e_phnum):
+            self._f.seek(self._e_phoff + self._e_phentsize * index)
+            try:
+                data = self._read(self._p_fmt)
+            except struct.error:
+                continue
+            if data[self._p_idx[0]] != 3:  # Not PT_INTERP.
+                continue
+            self._f.seek(data[self._p_idx[1]])
+            return os.fsdecode(self._f.read(data[self._p_idx[2]])).strip("\0")
+        return None
diff --git a/setuptools/_vendor/wheel/vendored/packaging/_manylinux.py b/setuptools/_vendor/wheel/vendored/packaging/_manylinux.py
new file mode 100644
index 0000000000..1f5f4ab3e5
--- /dev/null
+++ b/setuptools/_vendor/wheel/vendored/packaging/_manylinux.py
@@ -0,0 +1,260 @@
+import collections
+import contextlib
+import functools
+import os
+import re
+import sys
+import warnings
+from typing import Dict, Generator, Iterator, NamedTuple, Optional, Sequence, Tuple
+
+from ._elffile import EIClass, EIData, ELFFile, EMachine
+
+EF_ARM_ABIMASK = 0xFF000000
+EF_ARM_ABI_VER5 = 0x05000000
+EF_ARM_ABI_FLOAT_HARD = 0x00000400
+
+
+# `os.PathLike` not a generic type until Python 3.9, so sticking with `str`
+# as the type for `path` until then.
+@contextlib.contextmanager
+def _parse_elf(path: str) -> Generator[Optional[ELFFile], None, None]:
+    try:
+        with open(path, "rb") as f:
+            yield ELFFile(f)
+    except (OSError, TypeError, ValueError):
+        yield None
+
+
+def _is_linux_armhf(executable: str) -> bool:
+    # hard-float ABI can be detected from the ELF header of the running
+    # process
+    # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf
+    with _parse_elf(executable) as f:
+        return (
+            f is not None
+            and f.capacity == EIClass.C32
+            and f.encoding == EIData.Lsb
+            and f.machine == EMachine.Arm
+            and f.flags & EF_ARM_ABIMASK == EF_ARM_ABI_VER5
+            and f.flags & EF_ARM_ABI_FLOAT_HARD == EF_ARM_ABI_FLOAT_HARD
+        )
+
+
+def _is_linux_i686(executable: str) -> bool:
+    with _parse_elf(executable) as f:
+        return (
+            f is not None
+            and f.capacity == EIClass.C32
+            and f.encoding == EIData.Lsb
+            and f.machine == EMachine.I386
+        )
+
+
+def _have_compatible_abi(executable: str, archs: Sequence[str]) -> bool:
+    if "armv7l" in archs:
+        return _is_linux_armhf(executable)
+    if "i686" in archs:
+        return _is_linux_i686(executable)
+    allowed_archs = {
+        "x86_64",
+        "aarch64",
+        "ppc64",
+        "ppc64le",
+        "s390x",
+        "loongarch64",
+        "riscv64",
+    }
+    return any(arch in allowed_archs for arch in archs)
+
+
+# If glibc ever changes its major version, we need to know what the last
+# minor version was, so we can build the complete list of all versions.
+# For now, guess what the highest minor version might be, assume it will
+# be 50 for testing. Once this actually happens, update the dictionary
+# with the actual value.
+_LAST_GLIBC_MINOR: Dict[int, int] = collections.defaultdict(lambda: 50)
+
+
+class _GLibCVersion(NamedTuple):
+    major: int
+    minor: int
+
+
+def _glibc_version_string_confstr() -> Optional[str]:
+    """
+    Primary implementation of glibc_version_string using os.confstr.
+    """
+    # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
+    # to be broken or missing. This strategy is used in the standard library
+    # platform module.
+    # https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183
+    try:
+        # Should be a string like "glibc 2.17".
+        version_string: Optional[str] = os.confstr("CS_GNU_LIBC_VERSION")
+        assert version_string is not None
+        _, version = version_string.rsplit()
+    except (AssertionError, AttributeError, OSError, ValueError):
+        # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
+        return None
+    return version
+
+
+def _glibc_version_string_ctypes() -> Optional[str]:
+    """
+    Fallback implementation of glibc_version_string using ctypes.
+    """
+    try:
+        import ctypes
+    except ImportError:
+        return None
+
+    # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
+    # manpage says, "If filename is NULL, then the returned handle is for the
+    # main program". This way we can let the linker do the work to figure out
+    # which libc our process is actually using.
+    #
+    # We must also handle the special case where the executable is not a
+    # dynamically linked executable. This can occur when using musl libc,
+    # for example. In this situation, dlopen() will error, leading to an
+    # OSError. Interestingly, at least in the case of musl, there is no
+    # errno set on the OSError. The single string argument used to construct
+    # OSError comes from libc itself and is therefore not portable to
+    # hard code here. In any case, failure to call dlopen() means we
+    # can proceed, so we bail on our attempt.
+    try:
+        process_namespace = ctypes.CDLL(None)
+    except OSError:
+        return None
+
+    try:
+        gnu_get_libc_version = process_namespace.gnu_get_libc_version
+    except AttributeError:
+        # Symbol doesn't exist -> therefore, we are not linked to
+        # glibc.
+        return None
+
+    # Call gnu_get_libc_version, which returns a string like "2.5"
+    gnu_get_libc_version.restype = ctypes.c_char_p
+    version_str: str = gnu_get_libc_version()
+    # py2 / py3 compatibility:
+    if not isinstance(version_str, str):
+        version_str = version_str.decode("ascii")
+
+    return version_str
+
+
+def _glibc_version_string() -> Optional[str]:
+    """Returns glibc version string, or None if not using glibc."""
+    return _glibc_version_string_confstr() or _glibc_version_string_ctypes()
+
+
+def _parse_glibc_version(version_str: str) -> Tuple[int, int]:
+    """Parse glibc version.
+
+    We use a regexp instead of str.split because we want to discard any
+    random junk that might come after the minor version -- this might happen
+    in patched/forked versions of glibc (e.g. Linaro's version of glibc
+    uses version strings like "2.20-2014.11"). See gh-3588.
+    """
+    m = re.match(r"(?P[0-9]+)\.(?P[0-9]+)", version_str)
+    if not m:
+        warnings.warn(
+            f"Expected glibc version with 2 components major.minor,"
+            f" got: {version_str}",
+            RuntimeWarning,
+        )
+        return -1, -1
+    return int(m.group("major")), int(m.group("minor"))
+
+
+@functools.lru_cache
+def _get_glibc_version() -> Tuple[int, int]:
+    version_str = _glibc_version_string()
+    if version_str is None:
+        return (-1, -1)
+    return _parse_glibc_version(version_str)
+
+
+# From PEP 513, PEP 600
+def _is_compatible(arch: str, version: _GLibCVersion) -> bool:
+    sys_glibc = _get_glibc_version()
+    if sys_glibc < version:
+        return False
+    # Check for presence of _manylinux module.
+    try:
+        import _manylinux
+    except ImportError:
+        return True
+    if hasattr(_manylinux, "manylinux_compatible"):
+        result = _manylinux.manylinux_compatible(version[0], version[1], arch)
+        if result is not None:
+            return bool(result)
+        return True
+    if version == _GLibCVersion(2, 5):
+        if hasattr(_manylinux, "manylinux1_compatible"):
+            return bool(_manylinux.manylinux1_compatible)
+    if version == _GLibCVersion(2, 12):
+        if hasattr(_manylinux, "manylinux2010_compatible"):
+            return bool(_manylinux.manylinux2010_compatible)
+    if version == _GLibCVersion(2, 17):
+        if hasattr(_manylinux, "manylinux2014_compatible"):
+            return bool(_manylinux.manylinux2014_compatible)
+    return True
+
+
+_LEGACY_MANYLINUX_MAP = {
+    # CentOS 7 w/ glibc 2.17 (PEP 599)
+    (2, 17): "manylinux2014",
+    # CentOS 6 w/ glibc 2.12 (PEP 571)
+    (2, 12): "manylinux2010",
+    # CentOS 5 w/ glibc 2.5 (PEP 513)
+    (2, 5): "manylinux1",
+}
+
+
+def platform_tags(archs: Sequence[str]) -> Iterator[str]:
+    """Generate manylinux tags compatible to the current platform.
+
+    :param archs: Sequence of compatible architectures.
+        The first one shall be the closest to the actual architecture and be the part of
+        platform tag after the ``linux_`` prefix, e.g. ``x86_64``.
+        The ``linux_`` prefix is assumed as a prerequisite for the current platform to
+        be manylinux-compatible.
+
+    :returns: An iterator of compatible manylinux tags.
+    """
+    if not _have_compatible_abi(sys.executable, archs):
+        return
+    # Oldest glibc to be supported regardless of architecture is (2, 17).
+    too_old_glibc2 = _GLibCVersion(2, 16)
+    if set(archs) & {"x86_64", "i686"}:
+        # On x86/i686 also oldest glibc to be supported is (2, 5).
+        too_old_glibc2 = _GLibCVersion(2, 4)
+    current_glibc = _GLibCVersion(*_get_glibc_version())
+    glibc_max_list = [current_glibc]
+    # We can assume compatibility across glibc major versions.
+    # https://sourceware.org/bugzilla/show_bug.cgi?id=24636
+    #
+    # Build a list of maximum glibc versions so that we can
+    # output the canonical list of all glibc from current_glibc
+    # down to too_old_glibc2, including all intermediary versions.
+    for glibc_major in range(current_glibc.major - 1, 1, -1):
+        glibc_minor = _LAST_GLIBC_MINOR[glibc_major]
+        glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor))
+    for arch in archs:
+        for glibc_max in glibc_max_list:
+            if glibc_max.major == too_old_glibc2.major:
+                min_minor = too_old_glibc2.minor
+            else:
+                # For other glibc major versions oldest supported is (x, 0).
+                min_minor = -1
+            for glibc_minor in range(glibc_max.minor, min_minor, -1):
+                glibc_version = _GLibCVersion(glibc_max.major, glibc_minor)
+                tag = "manylinux_{}_{}".format(*glibc_version)
+                if _is_compatible(arch, glibc_version):
+                    yield f"{tag}_{arch}"
+                # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags.
+                if glibc_version in _LEGACY_MANYLINUX_MAP:
+                    legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version]
+                    if _is_compatible(arch, glibc_version):
+                        yield f"{legacy_tag}_{arch}"
diff --git a/setuptools/_vendor/wheel/vendored/packaging/_musllinux.py b/setuptools/_vendor/wheel/vendored/packaging/_musllinux.py
new file mode 100644
index 0000000000..eb4251b5c1
--- /dev/null
+++ b/setuptools/_vendor/wheel/vendored/packaging/_musllinux.py
@@ -0,0 +1,83 @@
+"""PEP 656 support.
+
+This module implements logic to detect if the currently running Python is
+linked against musl, and what musl version is used.
+"""
+
+import functools
+import re
+import subprocess
+import sys
+from typing import Iterator, NamedTuple, Optional, Sequence
+
+from ._elffile import ELFFile
+
+
+class _MuslVersion(NamedTuple):
+    major: int
+    minor: int
+
+
+def _parse_musl_version(output: str) -> Optional[_MuslVersion]:
+    lines = [n for n in (n.strip() for n in output.splitlines()) if n]
+    if len(lines) < 2 or lines[0][:4] != "musl":
+        return None
+    m = re.match(r"Version (\d+)\.(\d+)", lines[1])
+    if not m:
+        return None
+    return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2)))
+
+
+@functools.lru_cache
+def _get_musl_version(executable: str) -> Optional[_MuslVersion]:
+    """Detect currently-running musl runtime version.
+
+    This is done by checking the specified executable's dynamic linking
+    information, and invoking the loader to parse its output for a version
+    string. If the loader is musl, the output would be something like::
+
+        musl libc (x86_64)
+        Version 1.2.2
+        Dynamic Program Loader
+    """
+    try:
+        with open(executable, "rb") as f:
+            ld = ELFFile(f).interpreter
+    except (OSError, TypeError, ValueError):
+        return None
+    if ld is None or "musl" not in ld:
+        return None
+    proc = subprocess.run([ld], stderr=subprocess.PIPE, text=True)
+    return _parse_musl_version(proc.stderr)
+
+
+def platform_tags(archs: Sequence[str]) -> Iterator[str]:
+    """Generate musllinux tags compatible to the current platform.
+
+    :param archs: Sequence of compatible architectures.
+        The first one shall be the closest to the actual architecture and be the part of
+        platform tag after the ``linux_`` prefix, e.g. ``x86_64``.
+        The ``linux_`` prefix is assumed as a prerequisite for the current platform to
+        be musllinux-compatible.
+
+    :returns: An iterator of compatible musllinux tags.
+    """
+    sys_musl = _get_musl_version(sys.executable)
+    if sys_musl is None:  # Python not dynamically linked against musl.
+        return
+    for arch in archs:
+        for minor in range(sys_musl.minor, -1, -1):
+            yield f"musllinux_{sys_musl.major}_{minor}_{arch}"
+
+
+if __name__ == "__main__":  # pragma: no cover
+    import sysconfig
+
+    plat = sysconfig.get_platform()
+    assert plat.startswith("linux-"), "not linux"
+
+    print("plat:", plat)
+    print("musl:", _get_musl_version(sys.executable))
+    print("tags:", end=" ")
+    for t in platform_tags(re.sub(r"[.-]", "_", plat.split("-", 1)[-1])):
+        print(t, end="\n      ")
diff --git a/setuptools/_vendor/wheel/vendored/packaging/_parser.py b/setuptools/_vendor/wheel/vendored/packaging/_parser.py
new file mode 100644
index 0000000000..513686a219
--- /dev/null
+++ b/setuptools/_vendor/wheel/vendored/packaging/_parser.py
@@ -0,0 +1,356 @@
+"""Handwritten parser of dependency specifiers.
+
+The docstring for each __parse_* function contains EBNF-inspired grammar representing
+the implementation.
+"""
+
+import ast
+from typing import Any, List, NamedTuple, Optional, Tuple, Union
+
+from ._tokenizer import DEFAULT_RULES, Tokenizer
+
+
+class Node:
+    def __init__(self, value: str) -> None:
+        self.value = value
+
+    def __str__(self) -> str:
+        return self.value
+
+    def __repr__(self) -> str:
+        return f"<{self.__class__.__name__}('{self}')>"
+
+    def serialize(self) -> str:
+        raise NotImplementedError
+
+
+class Variable(Node):
+    def serialize(self) -> str:
+        return str(self)
+
+
+class Value(Node):
+    def serialize(self) -> str:
+        return f'"{self}"'
+
+
+class Op(Node):
+    def serialize(self) -> str:
+        return str(self)
+
+
+MarkerVar = Union[Variable, Value]
+MarkerItem = Tuple[MarkerVar, Op, MarkerVar]
+# MarkerAtom = Union[MarkerItem, List["MarkerAtom"]]
+# MarkerList = List[Union["MarkerList", MarkerAtom, str]]
+# mypy does not support recursive type definition
+# https://github.com/python/mypy/issues/731
+MarkerAtom = Any
+MarkerList = List[Any]
+
+
+class ParsedRequirement(NamedTuple):
+    name: str
+    url: str
+    extras: List[str]
+    specifier: str
+    marker: Optional[MarkerList]
+
+
+# --------------------------------------------------------------------------------------
+# Recursive descent parser for dependency specifier
+# --------------------------------------------------------------------------------------
+def parse_requirement(source: str) -> ParsedRequirement:
+    return _parse_requirement(Tokenizer(source, rules=DEFAULT_RULES))
+
+
+def _parse_requirement(tokenizer: Tokenizer) -> ParsedRequirement:
+    """
+    requirement = WS? IDENTIFIER WS? extras WS? requirement_details
+    """
+    tokenizer.consume("WS")
+
+    name_token = tokenizer.expect(
+        "IDENTIFIER", expected="package name at the start of dependency specifier"
+    )
+    name = name_token.text
+    tokenizer.consume("WS")
+
+    extras = _parse_extras(tokenizer)
+    tokenizer.consume("WS")
+
+    url, specifier, marker = _parse_requirement_details(tokenizer)
+    tokenizer.expect("END", expected="end of dependency specifier")
+
+    return ParsedRequirement(name, url, extras, specifier, marker)
+
+
+def _parse_requirement_details(
+    tokenizer: Tokenizer,
+) -> Tuple[str, str, Optional[MarkerList]]:
+    """
+    requirement_details = AT URL (WS requirement_marker?)?
+                        | specifier WS? (requirement_marker)?
+    """
+
+    specifier = ""
+    url = ""
+    marker = None
+
+    if tokenizer.check("AT"):
+        tokenizer.read()
+        tokenizer.consume("WS")
+
+        url_start = tokenizer.position
+        url = tokenizer.expect("URL", expected="URL after @").text
+        if tokenizer.check("END", peek=True):
+            return (url, specifier, marker)
+
+        tokenizer.expect("WS", expected="whitespace after URL")
+
+        # The input might end after whitespace.
+        if tokenizer.check("END", peek=True):
+            return (url, specifier, marker)
+
+        marker = _parse_requirement_marker(
+            tokenizer, span_start=url_start, after="URL and whitespace"
+        )
+    else:
+        specifier_start = tokenizer.position
+        specifier = _parse_specifier(tokenizer)
+        tokenizer.consume("WS")
+
+        if tokenizer.check("END", peek=True):
+            return (url, specifier, marker)
+
+        marker = _parse_requirement_marker(
+            tokenizer,
+            span_start=specifier_start,
+            after=(
+                "version specifier"
+                if specifier
+                else "name and no valid version specifier"
+            ),
+        )
+
+    return (url, specifier, marker)
+
+
+def _parse_requirement_marker(
+    tokenizer: Tokenizer, *, span_start: int, after: str
+) -> MarkerList:
+    """
+    requirement_marker = SEMICOLON marker WS?
+    """
+
+    if not tokenizer.check("SEMICOLON"):
+        tokenizer.raise_syntax_error(
+            f"Expected end or semicolon (after {after})",
+            span_start=span_start,
+        )
+    tokenizer.read()
+
+    marker = _parse_marker(tokenizer)
+    tokenizer.consume("WS")
+
+    return marker
+
+
+def _parse_extras(tokenizer: Tokenizer) -> List[str]:
+    """
+    extras = (LEFT_BRACKET wsp* extras_list? wsp* RIGHT_BRACKET)?
+    """
+    if not tokenizer.check("LEFT_BRACKET", peek=True):
+        return []
+
+    with tokenizer.enclosing_tokens(
+        "LEFT_BRACKET",
+        "RIGHT_BRACKET",
+        around="extras",
+    ):
+        tokenizer.consume("WS")
+        extras = _parse_extras_list(tokenizer)
+        tokenizer.consume("WS")
+
+    return extras
+
+
+def _parse_extras_list(tokenizer: Tokenizer) -> List[str]:
+    """
+    extras_list = identifier (wsp* ',' wsp* identifier)*
+    """
+    extras: List[str] = []
+
+    if not tokenizer.check("IDENTIFIER"):
+        return extras
+
+    extras.append(tokenizer.read().text)
+
+    while True:
+        tokenizer.consume("WS")
+        if tokenizer.check("IDENTIFIER", peek=True):
+            tokenizer.raise_syntax_error("Expected comma between extra names")
+        elif not tokenizer.check("COMMA"):
+            break
+
+        tokenizer.read()
+        tokenizer.consume("WS")
+
+        extra_token = tokenizer.expect("IDENTIFIER", expected="extra name after comma")
+        extras.append(extra_token.text)
+
+    return extras
+
+
+def _parse_specifier(tokenizer: Tokenizer) -> str:
+    """
+    specifier = LEFT_PARENTHESIS WS? version_many WS? RIGHT_PARENTHESIS
+              | WS? version_many WS?
+    """
+    with tokenizer.enclosing_tokens(
+        "LEFT_PARENTHESIS",
+        "RIGHT_PARENTHESIS",
+        around="version specifier",
+    ):
+        tokenizer.consume("WS")
+        parsed_specifiers = _parse_version_many(tokenizer)
+        tokenizer.consume("WS")
+
+    return parsed_specifiers
+
+
+def _parse_version_many(tokenizer: Tokenizer) -> str:
+    """
+    version_many = (SPECIFIER (WS? COMMA WS? SPECIFIER)*)?
+    """
+    parsed_specifiers = ""
+    while tokenizer.check("SPECIFIER"):
+        span_start = tokenizer.position
+        parsed_specifiers += tokenizer.read().text
+        if tokenizer.check("VERSION_PREFIX_TRAIL", peek=True):
+            tokenizer.raise_syntax_error(
+                ".* suffix can only be used with `==` or `!=` operators",
+                span_start=span_start,
+                span_end=tokenizer.position + 1,
+            )
+        if tokenizer.check("VERSION_LOCAL_LABEL_TRAIL", peek=True):
+            tokenizer.raise_syntax_error(
+                "Local version label can only be used with `==` or `!=` operators",
+                span_start=span_start,
+                span_end=tokenizer.position,
+            )
+        tokenizer.consume("WS")
+        if not tokenizer.check("COMMA"):
+            break
+        parsed_specifiers += tokenizer.read().text
+        tokenizer.consume("WS")
+
+    return parsed_specifiers
+
+
+# --------------------------------------------------------------------------------------
+# Recursive descent parser for marker expression
+# --------------------------------------------------------------------------------------
+def parse_marker(source: str) -> MarkerList:
+    return _parse_full_marker(Tokenizer(source, rules=DEFAULT_RULES))
+
+
+def _parse_full_marker(tokenizer: Tokenizer) -> MarkerList:
+    retval = _parse_marker(tokenizer)
+    tokenizer.expect("END", expected="end of marker expression")
+    return retval
+
+
+def _parse_marker(tokenizer: Tokenizer) -> MarkerList:
+    """
+    marker = marker_atom (BOOLOP marker_atom)+
+    """
+    expression = [_parse_marker_atom(tokenizer)]
+    while tokenizer.check("BOOLOP"):
+        token = tokenizer.read()
+        expr_right = _parse_marker_atom(tokenizer)
+        expression.extend((token.text, expr_right))
+    return expression
+
+
+def _parse_marker_atom(tokenizer: Tokenizer) -> MarkerAtom:
+    """
+    marker_atom = WS? LEFT_PARENTHESIS WS? marker WS? RIGHT_PARENTHESIS WS?
+                | WS? marker_item WS?
+    """
+
+    tokenizer.consume("WS")
+    if tokenizer.check("LEFT_PARENTHESIS", peek=True):
+        with tokenizer.enclosing_tokens(
+            "LEFT_PARENTHESIS",
+            "RIGHT_PARENTHESIS",
+            around="marker expression",
+        ):
+            tokenizer.consume("WS")
+            marker: MarkerAtom = _parse_marker(tokenizer)
+            tokenizer.consume("WS")
+    else:
+        marker = _parse_marker_item(tokenizer)
+    tokenizer.consume("WS")
+    return marker
+
+
+def _parse_marker_item(tokenizer: Tokenizer) -> MarkerItem:
+    """
+    marker_item = WS? marker_var WS? marker_op WS? marker_var WS?
+    """
+    tokenizer.consume("WS")
+    marker_var_left = _parse_marker_var(tokenizer)
+    tokenizer.consume("WS")
+    marker_op = _parse_marker_op(tokenizer)
+    tokenizer.consume("WS")
+    marker_var_right = _parse_marker_var(tokenizer)
+    tokenizer.consume("WS")
+    return (marker_var_left, marker_op, marker_var_right)
+
+
+def _parse_marker_var(tokenizer: Tokenizer) -> MarkerVar:
+    """
+    marker_var = VARIABLE | QUOTED_STRING
+    """
+    if tokenizer.check("VARIABLE"):
+        return process_env_var(tokenizer.read().text.replace(".", "_"))
+    elif tokenizer.check("QUOTED_STRING"):
+        return process_python_str(tokenizer.read().text)
+    else:
+        tokenizer.raise_syntax_error(
+            message="Expected a marker variable or quoted string"
+        )
+
+
+def process_env_var(env_var: str) -> Variable:
+    if env_var in ("platform_python_implementation", "python_implementation"):
+        return Variable("platform_python_implementation")
+    else:
+        return Variable(env_var)
+
+
+def process_python_str(python_str: str) -> Value:
+    value = ast.literal_eval(python_str)
+    return Value(str(value))
+
+
+def _parse_marker_op(tokenizer: Tokenizer) -> Op:
+    """
+    marker_op = IN | NOT IN | OP
+    """
+    if tokenizer.check("IN"):
+        tokenizer.read()
+        return Op("in")
+    elif tokenizer.check("NOT"):
+        tokenizer.read()
+        tokenizer.expect("WS", expected="whitespace after 'not'")
+        tokenizer.expect("IN", expected="'in' after 'not'")
+        return Op("not in")
+    elif tokenizer.check("OP"):
+        return Op(tokenizer.read().text)
+    else:
+        return tokenizer.raise_syntax_error(
+            "Expected marker operator, one of "
+            "<=, <, !=, ==, >=, >, ~=, ===, in, not in"
+        )
diff --git a/setuptools/_vendor/wheel/vendored/packaging/_structures.py b/setuptools/_vendor/wheel/vendored/packaging/_structures.py
new file mode 100644
index 0000000000..90a6465f96
--- /dev/null
+++ b/setuptools/_vendor/wheel/vendored/packaging/_structures.py
@@ -0,0 +1,61 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+class InfinityType:
+    def __repr__(self) -> str:
+        return "Infinity"
+
+    def __hash__(self) -> int:
+        return hash(repr(self))
+
+    def __lt__(self, other: object) -> bool:
+        return False
+
+    def __le__(self, other: object) -> bool:
+        return False
+
+    def __eq__(self, other: object) -> bool:
+        return isinstance(other, self.__class__)
+
+    def __gt__(self, other: object) -> bool:
+        return True
+
+    def __ge__(self, other: object) -> bool:
+        return True
+
+    def __neg__(self: object) -> "NegativeInfinityType":
+        return NegativeInfinity
+
+
+Infinity = InfinityType()
+
+
+class NegativeInfinityType:
+    def __repr__(self) -> str:
+        return "-Infinity"
+
+    def __hash__(self) -> int:
+        return hash(repr(self))
+
+    def __lt__(self, other: object) -> bool:
+        return True
+
+    def __le__(self, other: object) -> bool:
+        return True
+
+    def __eq__(self, other: object) -> bool:
+        return isinstance(other, self.__class__)
+
+    def __gt__(self, other: object) -> bool:
+        return False
+
+    def __ge__(self, other: object) -> bool:
+        return False
+
+    def __neg__(self: object) -> InfinityType:
+        return Infinity
+
+
+NegativeInfinity = NegativeInfinityType()
diff --git a/setuptools/_vendor/wheel/vendored/packaging/_tokenizer.py b/setuptools/_vendor/wheel/vendored/packaging/_tokenizer.py
new file mode 100644
index 0000000000..dd0d648d49
--- /dev/null
+++ b/setuptools/_vendor/wheel/vendored/packaging/_tokenizer.py
@@ -0,0 +1,192 @@
+import contextlib
+import re
+from dataclasses import dataclass
+from typing import Dict, Iterator, NoReturn, Optional, Tuple, Union
+
+from .specifiers import Specifier
+
+
+@dataclass
+class Token:
+    name: str
+    text: str
+    position: int
+
+
+class ParserSyntaxError(Exception):
+    """The provided source text could not be parsed correctly."""
+
+    def __init__(
+        self,
+        message: str,
+        *,
+        source: str,
+        span: Tuple[int, int],
+    ) -> None:
+        self.span = span
+        self.message = message
+        self.source = source
+
+        super().__init__()
+
+    def __str__(self) -> str:
+        marker = " " * self.span[0] + "~" * (self.span[1] - self.span[0]) + "^"
+        return "\n    ".join([self.message, self.source, marker])
+
+
+DEFAULT_RULES: "Dict[str, Union[str, re.Pattern[str]]]" = {
+    "LEFT_PARENTHESIS": r"\(",
+    "RIGHT_PARENTHESIS": r"\)",
+    "LEFT_BRACKET": r"\[",
+    "RIGHT_BRACKET": r"\]",
+    "SEMICOLON": r";",
+    "COMMA": r",",
+    "QUOTED_STRING": re.compile(
+        r"""
+            (
+                ('[^']*')
+                |
+                ("[^"]*")
+            )
+        """,
+        re.VERBOSE,
+    ),
+    "OP": r"(===|==|~=|!=|<=|>=|<|>)",
+    "BOOLOP": r"\b(or|and)\b",
+    "IN": r"\bin\b",
+    "NOT": r"\bnot\b",
+    "VARIABLE": re.compile(
+        r"""
+            \b(
+                python_version
+                |python_full_version
+                |os[._]name
+                |sys[._]platform
+                |platform_(release|system)
+                |platform[._](version|machine|python_implementation)
+                |python_implementation
+                |implementation_(name|version)
+                |extra
+            )\b
+        """,
+        re.VERBOSE,
+    ),
+    "SPECIFIER": re.compile(
+        Specifier._operator_regex_str + Specifier._version_regex_str,
+        re.VERBOSE | re.IGNORECASE,
+    ),
+    "AT": r"\@",
+    "URL": r"[^ \t]+",
+    "IDENTIFIER": r"\b[a-zA-Z0-9][a-zA-Z0-9._-]*\b",
+    "VERSION_PREFIX_TRAIL": r"\.\*",
+    "VERSION_LOCAL_LABEL_TRAIL": r"\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*",
+    "WS": r"[ \t]+",
+    "END": r"$",
+}
+
+
+class Tokenizer:
+    """Context-sensitive token parsing.
+
+    Provides methods to examine the input stream to check whether the next token
+    matches.
+    """
+
+    def __init__(
+        self,
+        source: str,
+        *,
+        rules: "Dict[str, Union[str, re.Pattern[str]]]",
+    ) -> None:
+        self.source = source
+        self.rules: Dict[str, re.Pattern[str]] = {
+            name: re.compile(pattern) for name, pattern in rules.items()
+        }
+        self.next_token: Optional[Token] = None
+        self.position = 0
+
+    def consume(self, name: str) -> None:
+        """Move beyond provided token name, if at current position."""
+        if self.check(name):
+            self.read()
+
+    def check(self, name: str, *, peek: bool = False) -> bool:
+        """Check whether the next token has the provided name.
+
+        By default, if the check succeeds, the token *must* be read before
+        another check. If `peek` is set to `True`, the token is not loaded and
+        would need to be checked again.
+        """
+        assert (
+            self.next_token is None
+        ), f"Cannot check for {name!r}, already have {self.next_token!r}"
+        assert name in self.rules, f"Unknown token name: {name!r}"
+
+        expression = self.rules[name]
+
+        match = expression.match(self.source, self.position)
+        if match is None:
+            return False
+        if not peek:
+            self.next_token = Token(name, match[0], self.position)
+        return True
+
+    def expect(self, name: str, *, expected: str) -> Token:
+        """Expect a certain token name next, failing with a syntax error otherwise.
+
+        The token is *not* read.
+        """
+        if not self.check(name):
+            raise self.raise_syntax_error(f"Expected {expected}")
+        return self.read()
+
+    def read(self) -> Token:
+        """Consume the next token and return it."""
+        token = self.next_token
+        assert token is not None
+
+        self.position += len(token.text)
+        self.next_token = None
+
+        return token
+
+    def raise_syntax_error(
+        self,
+        message: str,
+        *,
+        span_start: Optional[int] = None,
+        span_end: Optional[int] = None,
+    ) -> NoReturn:
+        """Raise ParserSyntaxError at the given position."""
+        span = (
+            self.position if span_start is None else span_start,
+            self.position if span_end is None else span_end,
+        )
+        raise ParserSyntaxError(
+            message,
+            source=self.source,
+            span=span,
+        )
+
+    @contextlib.contextmanager
+    def enclosing_tokens(
+        self, open_token: str, close_token: str, *, around: str
+    ) -> Iterator[None]:
+        if self.check(open_token):
+            open_position = self.position
+            self.read()
+        else:
+            open_position = None
+
+        yield
+
+        if open_position is None:
+            return
+
+        if not self.check(close_token):
+            self.raise_syntax_error(
+                f"Expected matching {close_token} for {open_token}, after {around}",
+                span_start=open_position,
+            )
+
+        self.read()
diff --git a/setuptools/_vendor/wheel/vendored/packaging/markers.py b/setuptools/_vendor/wheel/vendored/packaging/markers.py
new file mode 100644
index 0000000000..c96d22a5a4
--- /dev/null
+++ b/setuptools/_vendor/wheel/vendored/packaging/markers.py
@@ -0,0 +1,253 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import operator
+import os
+import platform
+import sys
+from typing import Any, Callable, Dict, List, Optional, Tuple, Union
+
+from ._parser import (
+    MarkerAtom,
+    MarkerList,
+    Op,
+    Value,
+    Variable,
+)
+from ._parser import (
+    parse_marker as _parse_marker,
+)
+from ._tokenizer import ParserSyntaxError
+from .specifiers import InvalidSpecifier, Specifier
+from .utils import canonicalize_name
+
+__all__ = [
+    "InvalidMarker",
+    "UndefinedComparison",
+    "UndefinedEnvironmentName",
+    "Marker",
+    "default_environment",
+]
+
+Operator = Callable[[str, str], bool]
+
+
+class InvalidMarker(ValueError):
+    """
+    An invalid marker was found, users should refer to PEP 508.
+    """
+
+
+class UndefinedComparison(ValueError):
+    """
+    An invalid operation was attempted on a value that doesn't support it.
+    """
+
+
+class UndefinedEnvironmentName(ValueError):
+    """
+    A name was attempted to be used that does not exist inside of the
+    environment.
+    """
+
+
+def _normalize_extra_values(results: Any) -> Any:
+    """
+    Normalize extra values.
+    """
+    if isinstance(results[0], tuple):
+        lhs, op, rhs = results[0]
+        if isinstance(lhs, Variable) and lhs.value == "extra":
+            normalized_extra = canonicalize_name(rhs.value)
+            rhs = Value(normalized_extra)
+        elif isinstance(rhs, Variable) and rhs.value == "extra":
+            normalized_extra = canonicalize_name(lhs.value)
+            lhs = Value(normalized_extra)
+        results[0] = lhs, op, rhs
+    return results
+
+
+def _format_marker(
+    marker: Union[List[str], MarkerAtom, str], first: Optional[bool] = True
+) -> str:
+    assert isinstance(marker, (list, tuple, str))
+
+    # Sometimes we have a structure like [[...]] which is a single item list
+    # where the single item is itself it's own list. In that case we want skip
+    # the rest of this function so that we don't get extraneous () on the
+    # outside.
+    if (
+        isinstance(marker, list)
+        and len(marker) == 1
+        and isinstance(marker[0], (list, tuple))
+    ):
+        return _format_marker(marker[0])
+
+    if isinstance(marker, list):
+        inner = (_format_marker(m, first=False) for m in marker)
+        if first:
+            return " ".join(inner)
+        else:
+            return "(" + " ".join(inner) + ")"
+    elif isinstance(marker, tuple):
+        return " ".join([m.serialize() for m in marker])
+    else:
+        return marker
+
+
+_operators: Dict[str, Operator] = {
+    "in": lambda lhs, rhs: lhs in rhs,
+    "not in": lambda lhs, rhs: lhs not in rhs,
+    "<": operator.lt,
+    "<=": operator.le,
+    "==": operator.eq,
+    "!=": operator.ne,
+    ">=": operator.ge,
+    ">": operator.gt,
+}
+
+
+def _eval_op(lhs: str, op: Op, rhs: str) -> bool:
+    try:
+        spec = Specifier("".join([op.serialize(), rhs]))
+    except InvalidSpecifier:
+        pass
+    else:
+        return spec.contains(lhs, prereleases=True)
+
+    oper: Optional[Operator] = _operators.get(op.serialize())
+    if oper is None:
+        raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.")
+
+    return oper(lhs, rhs)
+
+
+def _normalize(*values: str, key: str) -> Tuple[str, ...]:
+    # PEP 685 – Comparison of extra names for optional distribution dependencies
+    # https://peps.python.org/pep-0685/
+    # > When comparing extra names, tools MUST normalize the names being
+    # > compared using the semantics outlined in PEP 503 for names
+    if key == "extra":
+        return tuple(canonicalize_name(v) for v in values)
+
+    # other environment markers don't have such standards
+    return values
+
+
+def _evaluate_markers(markers: MarkerList, environment: Dict[str, str]) -> bool:
+    groups: List[List[bool]] = [[]]
+
+    for marker in markers:
+        assert isinstance(marker, (list, tuple, str))
+
+        if isinstance(marker, list):
+            groups[-1].append(_evaluate_markers(marker, environment))
+        elif isinstance(marker, tuple):
+            lhs, op, rhs = marker
+
+            if isinstance(lhs, Variable):
+                environment_key = lhs.value
+                lhs_value = environment[environment_key]
+                rhs_value = rhs.value
+            else:
+                lhs_value = lhs.value
+                environment_key = rhs.value
+                rhs_value = environment[environment_key]
+
+            lhs_value, rhs_value = _normalize(lhs_value, rhs_value, key=environment_key)
+            groups[-1].append(_eval_op(lhs_value, op, rhs_value))
+        else:
+            assert marker in ["and", "or"]
+            if marker == "or":
+                groups.append([])
+
+    return any(all(item) for item in groups)
+
+
+def format_full_version(info: "sys._version_info") -> str:
+    version = "{0.major}.{0.minor}.{0.micro}".format(info)
+    kind = info.releaselevel
+    if kind != "final":
+        version += kind[0] + str(info.serial)
+    return version
+
+
+def default_environment() -> Dict[str, str]:
+    iver = format_full_version(sys.implementation.version)
+    implementation_name = sys.implementation.name
+    return {
+        "implementation_name": implementation_name,
+        "implementation_version": iver,
+        "os_name": os.name,
+        "platform_machine": platform.machine(),
+        "platform_release": platform.release(),
+        "platform_system": platform.system(),
+        "platform_version": platform.version(),
+        "python_full_version": platform.python_version(),
+        "platform_python_implementation": platform.python_implementation(),
+        "python_version": ".".join(platform.python_version_tuple()[:2]),
+        "sys_platform": sys.platform,
+    }
+
+
+class Marker:
+    def __init__(self, marker: str) -> None:
+        # Note: We create a Marker object without calling this constructor in
+        #       packaging.requirements.Requirement. If any additional logic is
+        #       added here, make sure to mirror/adapt Requirement.
+        try:
+            self._markers = _normalize_extra_values(_parse_marker(marker))
+            # The attribute `_markers` can be described in terms of a recursive type:
+            # MarkerList = List[Union[Tuple[Node, ...], str, MarkerList]]
+            #
+            # For example, the following expression:
+            # python_version > "3.6" or (python_version == "3.6" and os_name == "unix")
+            #
+            # is parsed into:
+            # [
+            #     (, ')>, ),
+            #     'and',
+            #     [
+            #         (, , ),
+            #         'or',
+            #         (, , )
+            #     ]
+            # ]
+        except ParserSyntaxError as e:
+            raise InvalidMarker(str(e)) from e
+
+    def __str__(self) -> str:
+        return _format_marker(self._markers)
+
+    def __repr__(self) -> str:
+        return f""
+
+    def __hash__(self) -> int:
+        return hash((self.__class__.__name__, str(self)))
+
+    def __eq__(self, other: Any) -> bool:
+        if not isinstance(other, Marker):
+            return NotImplemented
+
+        return str(self) == str(other)
+
+    def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool:
+        """Evaluate a marker.
+
+        Return the boolean from evaluating the given marker against the
+        environment. environment is an optional argument to override all or
+        part of the determined environment.
+
+        The environment is determined from the current Python process.
+        """
+        current_environment = default_environment()
+        current_environment["extra"] = ""
+        if environment is not None:
+            current_environment.update(environment)
+            # The API used to allow setting extra to None. We need to handle this
+            # case for backwards compatibility.
+            if current_environment["extra"] is None:
+                current_environment["extra"] = ""
+
+        return _evaluate_markers(self._markers, current_environment)
diff --git a/setuptools/_vendor/wheel/vendored/packaging/requirements.py b/setuptools/_vendor/wheel/vendored/packaging/requirements.py
new file mode 100644
index 0000000000..bdc43a7e98
--- /dev/null
+++ b/setuptools/_vendor/wheel/vendored/packaging/requirements.py
@@ -0,0 +1,90 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from typing import Any, Iterator, Optional, Set
+
+from ._parser import parse_requirement as _parse_requirement
+from ._tokenizer import ParserSyntaxError
+from .markers import Marker, _normalize_extra_values
+from .specifiers import SpecifierSet
+from .utils import canonicalize_name
+
+
+class InvalidRequirement(ValueError):
+    """
+    An invalid requirement was found, users should refer to PEP 508.
+    """
+
+
+class Requirement:
+    """Parse a requirement.
+
+    Parse a given requirement string into its parts, such as name, specifier,
+    URL, and extras. Raises InvalidRequirement on a badly-formed requirement
+    string.
+    """
+
+    # TODO: Can we test whether something is contained within a requirement?
+    #       If so how do we do that? Do we need to test against the _name_ of
+    #       the thing as well as the version? What about the markers?
+    # TODO: Can we normalize the name and extra name?
+
+    def __init__(self, requirement_string: str) -> None:
+        try:
+            parsed = _parse_requirement(requirement_string)
+        except ParserSyntaxError as e:
+            raise InvalidRequirement(str(e)) from e
+
+        self.name: str = parsed.name
+        self.url: Optional[str] = parsed.url or None
+        self.extras: Set[str] = set(parsed.extras or [])
+        self.specifier: SpecifierSet = SpecifierSet(parsed.specifier)
+        self.marker: Optional[Marker] = None
+        if parsed.marker is not None:
+            self.marker = Marker.__new__(Marker)
+            self.marker._markers = _normalize_extra_values(parsed.marker)
+
+    def _iter_parts(self, name: str) -> Iterator[str]:
+        yield name
+
+        if self.extras:
+            formatted_extras = ",".join(sorted(self.extras))
+            yield f"[{formatted_extras}]"
+
+        if self.specifier:
+            yield str(self.specifier)
+
+        if self.url:
+            yield f"@ {self.url}"
+            if self.marker:
+                yield " "
+
+        if self.marker:
+            yield f"; {self.marker}"
+
+    def __str__(self) -> str:
+        return "".join(self._iter_parts(self.name))
+
+    def __repr__(self) -> str:
+        return f""
+
+    def __hash__(self) -> int:
+        return hash(
+            (
+                self.__class__.__name__,
+                *self._iter_parts(canonicalize_name(self.name)),
+            )
+        )
+
+    def __eq__(self, other: Any) -> bool:
+        if not isinstance(other, Requirement):
+            return NotImplemented
+
+        return (
+            canonicalize_name(self.name) == canonicalize_name(other.name)
+            and self.extras == other.extras
+            and self.specifier == other.specifier
+            and self.url == other.url
+            and self.marker == other.marker
+        )
diff --git a/setuptools/_vendor/wheel/vendored/packaging/specifiers.py b/setuptools/_vendor/wheel/vendored/packaging/specifiers.py
new file mode 100644
index 0000000000..6d4066ae27
--- /dev/null
+++ b/setuptools/_vendor/wheel/vendored/packaging/specifiers.py
@@ -0,0 +1,1011 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+"""
+.. testsetup::
+
+    from packaging.specifiers import Specifier, SpecifierSet, InvalidSpecifier
+    from packaging.version import Version
+"""
+
+import abc
+import itertools
+import re
+from typing import Callable, Iterable, Iterator, List, Optional, Tuple, TypeVar, Union
+
+from .utils import canonicalize_version
+from .version import Version
+
+UnparsedVersion = Union[Version, str]
+UnparsedVersionVar = TypeVar("UnparsedVersionVar", bound=UnparsedVersion)
+CallableOperator = Callable[[Version, str], bool]
+
+
+def _coerce_version(version: UnparsedVersion) -> Version:
+    if not isinstance(version, Version):
+        version = Version(version)
+    return version
+
+
+class InvalidSpecifier(ValueError):
+    """
+    Raised when attempting to create a :class:`Specifier` with a specifier
+    string that is invalid.
+
+    >>> Specifier("lolwat")
+    Traceback (most recent call last):
+        ...
+    packaging.specifiers.InvalidSpecifier: Invalid specifier: 'lolwat'
+    """
+
+
+class BaseSpecifier(metaclass=abc.ABCMeta):
+    @abc.abstractmethod
+    def __str__(self) -> str:
+        """
+        Returns the str representation of this Specifier-like object. This
+        should be representative of the Specifier itself.
+        """
+
+    @abc.abstractmethod
+    def __hash__(self) -> int:
+        """
+        Returns a hash value for this Specifier-like object.
+        """
+
+    @abc.abstractmethod
+    def __eq__(self, other: object) -> bool:
+        """
+        Returns a boolean representing whether or not the two Specifier-like
+        objects are equal.
+
+        :param other: The other object to check against.
+        """
+
+    @property
+    @abc.abstractmethod
+    def prereleases(self) -> Optional[bool]:
+        """Whether or not pre-releases as a whole are allowed.
+
+        This can be set to either ``True`` or ``False`` to explicitly enable or disable
+        prereleases or it can be set to ``None`` (the default) to use default semantics.
+        """
+
+    @prereleases.setter
+    def prereleases(self, value: bool) -> None:
+        """Setter for :attr:`prereleases`.
+
+        :param value: The value to set.
+        """
+
+    @abc.abstractmethod
+    def contains(self, item: str, prereleases: Optional[bool] = None) -> bool:
+        """
+        Determines if the given item is contained within this specifier.
+        """
+
+    @abc.abstractmethod
+    def filter(
+        self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None
+    ) -> Iterator[UnparsedVersionVar]:
+        """
+        Takes an iterable of items and filters them so that only items which
+        are contained within this specifier are allowed in it.
+        """
+
+
+class Specifier(BaseSpecifier):
+    """This class abstracts handling of version specifiers.
+
+    .. tip::
+
+        It is generally not required to instantiate this manually. You should instead
+        prefer to work with :class:`SpecifierSet` instead, which can parse
+        comma-separated version specifiers (which is what package metadata contains).
+    """
+
+    _operator_regex_str = r"""
+        (?P(~=|==|!=|<=|>=|<|>|===))
+        """
+    _version_regex_str = r"""
+        (?P
+            (?:
+                # The identity operators allow for an escape hatch that will
+                # do an exact string match of the version you wish to install.
+                # This will not be parsed by PEP 440 and we cannot determine
+                # any semantic meaning from it. This operator is discouraged
+                # but included entirely as an escape hatch.
+                (?<====)  # Only match for the identity operator
+                \s*
+                [^\s;)]*  # The arbitrary version can be just about anything,
+                          # we match everything except for whitespace, a
+                          # semi-colon for marker support, and a closing paren
+                          # since versions can be enclosed in them.
+            )
+            |
+            (?:
+                # The (non)equality operators allow for wild card and local
+                # versions to be specified so we have to define these two
+                # operators separately to enable that.
+                (?<===|!=)            # Only match for equals and not equals
+
+                \s*
+                v?
+                (?:[0-9]+!)?          # epoch
+                [0-9]+(?:\.[0-9]+)*   # release
+
+                # You cannot use a wild card and a pre-release, post-release, a dev or
+                # local version together so group them with a | and make them optional.
+                (?:
+                    \.\*  # Wild card syntax of .*
+                    |
+                    (?:                                  # pre release
+                        [-_\.]?
+                        (alpha|beta|preview|pre|a|b|c|rc)
+                        [-_\.]?
+                        [0-9]*
+                    )?
+                    (?:                                  # post release
+                        (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
+                    )?
+                    (?:[-_\.]?dev[-_\.]?[0-9]*)?         # dev release
+                    (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local
+                )?
+            )
+            |
+            (?:
+                # The compatible operator requires at least two digits in the
+                # release segment.
+                (?<=~=)               # Only match for the compatible operator
+
+                \s*
+                v?
+                (?:[0-9]+!)?          # epoch
+                [0-9]+(?:\.[0-9]+)+   # release  (We have a + instead of a *)
+                (?:                   # pre release
+                    [-_\.]?
+                    (alpha|beta|preview|pre|a|b|c|rc)
+                    [-_\.]?
+                    [0-9]*
+                )?
+                (?:                                   # post release
+                    (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
+                )?
+                (?:[-_\.]?dev[-_\.]?[0-9]*)?          # dev release
+            )
+            |
+            (?:
+                # All other operators only allow a sub set of what the
+                # (non)equality operators do. Specifically they do not allow
+                # local versions to be specified nor do they allow the prefix
+                # matching wild cards.
+                (?=": "greater_than_equal",
+        "<": "less_than",
+        ">": "greater_than",
+        "===": "arbitrary",
+    }
+
+    def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None:
+        """Initialize a Specifier instance.
+
+        :param spec:
+            The string representation of a specifier which will be parsed and
+            normalized before use.
+        :param prereleases:
+            This tells the specifier if it should accept prerelease versions if
+            applicable or not. The default of ``None`` will autodetect it from the
+            given specifiers.
+        :raises InvalidSpecifier:
+            If the given specifier is invalid (i.e. bad syntax).
+        """
+        match = self._regex.search(spec)
+        if not match:
+            raise InvalidSpecifier(f"Invalid specifier: '{spec}'")
+
+        self._spec: Tuple[str, str] = (
+            match.group("operator").strip(),
+            match.group("version").strip(),
+        )
+
+        # Store whether or not this Specifier should accept prereleases
+        self._prereleases = prereleases
+
+    # https://github.com/python/mypy/pull/13475#pullrequestreview-1079784515
+    @property  # type: ignore[override]
+    def prereleases(self) -> bool:
+        # If there is an explicit prereleases set for this, then we'll just
+        # blindly use that.
+        if self._prereleases is not None:
+            return self._prereleases
+
+        # Look at all of our specifiers and determine if they are inclusive
+        # operators, and if they are if they are including an explicit
+        # prerelease.
+        operator, version = self._spec
+        if operator in ["==", ">=", "<=", "~=", "==="]:
+            # The == specifier can include a trailing .*, if it does we
+            # want to remove before parsing.
+            if operator == "==" and version.endswith(".*"):
+                version = version[:-2]
+
+            # Parse the version, and if it is a pre-release than this
+            # specifier allows pre-releases.
+            if Version(version).is_prerelease:
+                return True
+
+        return False
+
+    @prereleases.setter
+    def prereleases(self, value: bool) -> None:
+        self._prereleases = value
+
+    @property
+    def operator(self) -> str:
+        """The operator of this specifier.
+
+        >>> Specifier("==1.2.3").operator
+        '=='
+        """
+        return self._spec[0]
+
+    @property
+    def version(self) -> str:
+        """The version of this specifier.
+
+        >>> Specifier("==1.2.3").version
+        '1.2.3'
+        """
+        return self._spec[1]
+
+    def __repr__(self) -> str:
+        """A representation of the Specifier that shows all internal state.
+
+        >>> Specifier('>=1.0.0')
+        =1.0.0')>
+        >>> Specifier('>=1.0.0', prereleases=False)
+        =1.0.0', prereleases=False)>
+        >>> Specifier('>=1.0.0', prereleases=True)
+        =1.0.0', prereleases=True)>
+        """
+        pre = (
+            f", prereleases={self.prereleases!r}"
+            if self._prereleases is not None
+            else ""
+        )
+
+        return f"<{self.__class__.__name__}({str(self)!r}{pre})>"
+
+    def __str__(self) -> str:
+        """A string representation of the Specifier that can be round-tripped.
+
+        >>> str(Specifier('>=1.0.0'))
+        '>=1.0.0'
+        >>> str(Specifier('>=1.0.0', prereleases=False))
+        '>=1.0.0'
+        """
+        return "{}{}".format(*self._spec)
+
+    @property
+    def _canonical_spec(self) -> Tuple[str, str]:
+        canonical_version = canonicalize_version(
+            self._spec[1],
+            strip_trailing_zero=(self._spec[0] != "~="),
+        )
+        return self._spec[0], canonical_version
+
+    def __hash__(self) -> int:
+        return hash(self._canonical_spec)
+
+    def __eq__(self, other: object) -> bool:
+        """Whether or not the two Specifier-like objects are equal.
+
+        :param other: The other object to check against.
+
+        The value of :attr:`prereleases` is ignored.
+
+        >>> Specifier("==1.2.3") == Specifier("== 1.2.3.0")
+        True
+        >>> (Specifier("==1.2.3", prereleases=False) ==
+        ...  Specifier("==1.2.3", prereleases=True))
+        True
+        >>> Specifier("==1.2.3") == "==1.2.3"
+        True
+        >>> Specifier("==1.2.3") == Specifier("==1.2.4")
+        False
+        >>> Specifier("==1.2.3") == Specifier("~=1.2.3")
+        False
+        """
+        if isinstance(other, str):
+            try:
+                other = self.__class__(str(other))
+            except InvalidSpecifier:
+                return NotImplemented
+        elif not isinstance(other, self.__class__):
+            return NotImplemented
+
+        return self._canonical_spec == other._canonical_spec
+
+    def _get_operator(self, op: str) -> CallableOperator:
+        operator_callable: CallableOperator = getattr(
+            self, f"_compare_{self._operators[op]}"
+        )
+        return operator_callable
+
+    def _compare_compatible(self, prospective: Version, spec: str) -> bool:
+        # Compatible releases have an equivalent combination of >= and ==. That
+        # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to
+        # implement this in terms of the other specifiers instead of
+        # implementing it ourselves. The only thing we need to do is construct
+        # the other specifiers.
+
+        # We want everything but the last item in the version, but we want to
+        # ignore suffix segments.
+        prefix = _version_join(
+            list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1]
+        )
+
+        # Add the prefix notation to the end of our string
+        prefix += ".*"
+
+        return self._get_operator(">=")(prospective, spec) and self._get_operator("==")(
+            prospective, prefix
+        )
+
+    def _compare_equal(self, prospective: Version, spec: str) -> bool:
+        # We need special logic to handle prefix matching
+        if spec.endswith(".*"):
+            # In the case of prefix matching we want to ignore local segment.
+            normalized_prospective = canonicalize_version(
+                prospective.public, strip_trailing_zero=False
+            )
+            # Get the normalized version string ignoring the trailing .*
+            normalized_spec = canonicalize_version(spec[:-2], strip_trailing_zero=False)
+            # Split the spec out by bangs and dots, and pretend that there is
+            # an implicit dot in between a release segment and a pre-release segment.
+            split_spec = _version_split(normalized_spec)
+
+            # Split the prospective version out by bangs and dots, and pretend
+            # that there is an implicit dot in between a release segment and
+            # a pre-release segment.
+            split_prospective = _version_split(normalized_prospective)
+
+            # 0-pad the prospective version before shortening it to get the correct
+            # shortened version.
+            padded_prospective, _ = _pad_version(split_prospective, split_spec)
+
+            # Shorten the prospective version to be the same length as the spec
+            # so that we can determine if the specifier is a prefix of the
+            # prospective version or not.
+            shortened_prospective = padded_prospective[: len(split_spec)]
+
+            return shortened_prospective == split_spec
+        else:
+            # Convert our spec string into a Version
+            spec_version = Version(spec)
+
+            # If the specifier does not have a local segment, then we want to
+            # act as if the prospective version also does not have a local
+            # segment.
+            if not spec_version.local:
+                prospective = Version(prospective.public)
+
+            return prospective == spec_version
+
+    def _compare_not_equal(self, prospective: Version, spec: str) -> bool:
+        return not self._compare_equal(prospective, spec)
+
+    def _compare_less_than_equal(self, prospective: Version, spec: str) -> bool:
+        # NB: Local version identifiers are NOT permitted in the version
+        # specifier, so local version labels can be universally removed from
+        # the prospective version.
+        return Version(prospective.public) <= Version(spec)
+
+    def _compare_greater_than_equal(self, prospective: Version, spec: str) -> bool:
+        # NB: Local version identifiers are NOT permitted in the version
+        # specifier, so local version labels can be universally removed from
+        # the prospective version.
+        return Version(prospective.public) >= Version(spec)
+
+    def _compare_less_than(self, prospective: Version, spec_str: str) -> bool:
+        # Convert our spec to a Version instance, since we'll want to work with
+        # it as a version.
+        spec = Version(spec_str)
+
+        # Check to see if the prospective version is less than the spec
+        # version. If it's not we can short circuit and just return False now
+        # instead of doing extra unneeded work.
+        if not prospective < spec:
+            return False
+
+        # This special case is here so that, unless the specifier itself
+        # includes is a pre-release version, that we do not accept pre-release
+        # versions for the version mentioned in the specifier (e.g. <3.1 should
+        # not match 3.1.dev0, but should match 3.0.dev0).
+        if not spec.is_prerelease and prospective.is_prerelease:
+            if Version(prospective.base_version) == Version(spec.base_version):
+                return False
+
+        # If we've gotten to here, it means that prospective version is both
+        # less than the spec version *and* it's not a pre-release of the same
+        # version in the spec.
+        return True
+
+    def _compare_greater_than(self, prospective: Version, spec_str: str) -> bool:
+        # Convert our spec to a Version instance, since we'll want to work with
+        # it as a version.
+        spec = Version(spec_str)
+
+        # Check to see if the prospective version is greater than the spec
+        # version. If it's not we can short circuit and just return False now
+        # instead of doing extra unneeded work.
+        if not prospective > spec:
+            return False
+
+        # This special case is here so that, unless the specifier itself
+        # includes is a post-release version, that we do not accept
+        # post-release versions for the version mentioned in the specifier
+        # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0).
+        if not spec.is_postrelease and prospective.is_postrelease:
+            if Version(prospective.base_version) == Version(spec.base_version):
+                return False
+
+        # Ensure that we do not allow a local version of the version mentioned
+        # in the specifier, which is technically greater than, to match.
+        if prospective.local is not None:
+            if Version(prospective.base_version) == Version(spec.base_version):
+                return False
+
+        # If we've gotten to here, it means that prospective version is both
+        # greater than the spec version *and* it's not a pre-release of the
+        # same version in the spec.
+        return True
+
+    def _compare_arbitrary(self, prospective: Version, spec: str) -> bool:
+        return str(prospective).lower() == str(spec).lower()
+
+    def __contains__(self, item: Union[str, Version]) -> bool:
+        """Return whether or not the item is contained in this specifier.
+
+        :param item: The item to check for.
+
+        This is used for the ``in`` operator and behaves the same as
+        :meth:`contains` with no ``prereleases`` argument passed.
+
+        >>> "1.2.3" in Specifier(">=1.2.3")
+        True
+        >>> Version("1.2.3") in Specifier(">=1.2.3")
+        True
+        >>> "1.0.0" in Specifier(">=1.2.3")
+        False
+        >>> "1.3.0a1" in Specifier(">=1.2.3")
+        False
+        >>> "1.3.0a1" in Specifier(">=1.2.3", prereleases=True)
+        True
+        """
+        return self.contains(item)
+
+    def contains(
+        self, item: UnparsedVersion, prereleases: Optional[bool] = None
+    ) -> bool:
+        """Return whether or not the item is contained in this specifier.
+
+        :param item:
+            The item to check for, which can be a version string or a
+            :class:`Version` instance.
+        :param prereleases:
+            Whether or not to match prereleases with this Specifier. If set to
+            ``None`` (the default), it uses :attr:`prereleases` to determine
+            whether or not prereleases are allowed.
+
+        >>> Specifier(">=1.2.3").contains("1.2.3")
+        True
+        >>> Specifier(">=1.2.3").contains(Version("1.2.3"))
+        True
+        >>> Specifier(">=1.2.3").contains("1.0.0")
+        False
+        >>> Specifier(">=1.2.3").contains("1.3.0a1")
+        False
+        >>> Specifier(">=1.2.3", prereleases=True).contains("1.3.0a1")
+        True
+        >>> Specifier(">=1.2.3").contains("1.3.0a1", prereleases=True)
+        True
+        """
+
+        # Determine if prereleases are to be allowed or not.
+        if prereleases is None:
+            prereleases = self.prereleases
+
+        # Normalize item to a Version, this allows us to have a shortcut for
+        # "2.0" in Specifier(">=2")
+        normalized_item = _coerce_version(item)
+
+        # Determine if we should be supporting prereleases in this specifier
+        # or not, if we do not support prereleases than we can short circuit
+        # logic if this version is a prereleases.
+        if normalized_item.is_prerelease and not prereleases:
+            return False
+
+        # Actually do the comparison to determine if this item is contained
+        # within this Specifier or not.
+        operator_callable: CallableOperator = self._get_operator(self.operator)
+        return operator_callable(normalized_item, self.version)
+
+    def filter(
+        self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None
+    ) -> Iterator[UnparsedVersionVar]:
+        """Filter items in the given iterable, that match the specifier.
+
+        :param iterable:
+            An iterable that can contain version strings and :class:`Version` instances.
+            The items in the iterable will be filtered according to the specifier.
+        :param prereleases:
+            Whether or not to allow prereleases in the returned iterator. If set to
+            ``None`` (the default), it will be intelligently decide whether to allow
+            prereleases or not (based on the :attr:`prereleases` attribute, and
+            whether the only versions matching are prereleases).
+
+        This method is smarter than just ``filter(Specifier().contains, [...])``
+        because it implements the rule from :pep:`440` that a prerelease item
+        SHOULD be accepted if no other versions match the given specifier.
+
+        >>> list(Specifier(">=1.2.3").filter(["1.2", "1.3", "1.5a1"]))
+        ['1.3']
+        >>> list(Specifier(">=1.2.3").filter(["1.2", "1.2.3", "1.3", Version("1.4")]))
+        ['1.2.3', '1.3', ]
+        >>> list(Specifier(">=1.2.3").filter(["1.2", "1.5a1"]))
+        ['1.5a1']
+        >>> list(Specifier(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True))
+        ['1.3', '1.5a1']
+        >>> list(Specifier(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"]))
+        ['1.3', '1.5a1']
+        """
+
+        yielded = False
+        found_prereleases = []
+
+        kw = {"prereleases": prereleases if prereleases is not None else True}
+
+        # Attempt to iterate over all the values in the iterable and if any of
+        # them match, yield them.
+        for version in iterable:
+            parsed_version = _coerce_version(version)
+
+            if self.contains(parsed_version, **kw):
+                # If our version is a prerelease, and we were not set to allow
+                # prereleases, then we'll store it for later in case nothing
+                # else matches this specifier.
+                if parsed_version.is_prerelease and not (
+                    prereleases or self.prereleases
+                ):
+                    found_prereleases.append(version)
+                # Either this is not a prerelease, or we should have been
+                # accepting prereleases from the beginning.
+                else:
+                    yielded = True
+                    yield version
+
+        # Now that we've iterated over everything, determine if we've yielded
+        # any values, and if we have not and we have any prereleases stored up
+        # then we will go ahead and yield the prereleases.
+        if not yielded and found_prereleases:
+            for version in found_prereleases:
+                yield version
+
+
+_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
+
+
+def _version_split(version: str) -> List[str]:
+    """Split version into components.
+
+    The split components are intended for version comparison. The logic does
+    not attempt to retain the original version string, so joining the
+    components back with :func:`_version_join` may not produce the original
+    version string.
+    """
+    result: List[str] = []
+
+    epoch, _, rest = version.rpartition("!")
+    result.append(epoch or "0")
+
+    for item in rest.split("."):
+        match = _prefix_regex.search(item)
+        if match:
+            result.extend(match.groups())
+        else:
+            result.append(item)
+    return result
+
+
+def _version_join(components: List[str]) -> str:
+    """Join split version components into a version string.
+
+    This function assumes the input came from :func:`_version_split`, where the
+    first component must be the epoch (either empty or numeric), and all other
+    components numeric.
+    """
+    epoch, *rest = components
+    return f"{epoch}!{'.'.join(rest)}"
+
+
+def _is_not_suffix(segment: str) -> bool:
+    return not any(
+        segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post")
+    )
+
+
+def _pad_version(left: List[str], right: List[str]) -> Tuple[List[str], List[str]]:
+    left_split, right_split = [], []
+
+    # Get the release segment of our versions
+    left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left)))
+    right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))
+
+    # Get the rest of our versions
+    left_split.append(left[len(left_split[0]) :])
+    right_split.append(right[len(right_split[0]) :])
+
+    # Insert our padding
+    left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0])))
+    right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0])))
+
+    return (
+        list(itertools.chain.from_iterable(left_split)),
+        list(itertools.chain.from_iterable(right_split)),
+    )
+
+
+class SpecifierSet(BaseSpecifier):
+    """This class abstracts handling of a set of version specifiers.
+
+    It can be passed a single specifier (``>=3.0``), a comma-separated list of
+    specifiers (``>=3.0,!=3.1``), or no specifier at all.
+    """
+
+    def __init__(
+        self, specifiers: str = "", prereleases: Optional[bool] = None
+    ) -> None:
+        """Initialize a SpecifierSet instance.
+
+        :param specifiers:
+            The string representation of a specifier or a comma-separated list of
+            specifiers which will be parsed and normalized before use.
+        :param prereleases:
+            This tells the SpecifierSet if it should accept prerelease versions if
+            applicable or not. The default of ``None`` will autodetect it from the
+            given specifiers.
+
+        :raises InvalidSpecifier:
+            If the given ``specifiers`` are not parseable than this exception will be
+            raised.
+        """
+
+        # Split on `,` to break each individual specifier into it's own item, and
+        # strip each item to remove leading/trailing whitespace.
+        split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
+
+        # Make each individual specifier a Specifier and save in a frozen set for later.
+        self._specs = frozenset(map(Specifier, split_specifiers))
+
+        # Store our prereleases value so we can use it later to determine if
+        # we accept prereleases or not.
+        self._prereleases = prereleases
+
+    @property
+    def prereleases(self) -> Optional[bool]:
+        # If we have been given an explicit prerelease modifier, then we'll
+        # pass that through here.
+        if self._prereleases is not None:
+            return self._prereleases
+
+        # If we don't have any specifiers, and we don't have a forced value,
+        # then we'll just return None since we don't know if this should have
+        # pre-releases or not.
+        if not self._specs:
+            return None
+
+        # Otherwise we'll see if any of the given specifiers accept
+        # prereleases, if any of them do we'll return True, otherwise False.
+        return any(s.prereleases for s in self._specs)
+
+    @prereleases.setter
+    def prereleases(self, value: bool) -> None:
+        self._prereleases = value
+
+    def __repr__(self) -> str:
+        """A representation of the specifier set that shows all internal state.
+
+        Note that the ordering of the individual specifiers within the set may not
+        match the input string.
+
+        >>> SpecifierSet('>=1.0.0,!=2.0.0')
+        =1.0.0')>
+        >>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=False)
+        =1.0.0', prereleases=False)>
+        >>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=True)
+        =1.0.0', prereleases=True)>
+        """
+        pre = (
+            f", prereleases={self.prereleases!r}"
+            if self._prereleases is not None
+            else ""
+        )
+
+        return f""
+
+    def __str__(self) -> str:
+        """A string representation of the specifier set that can be round-tripped.
+
+        Note that the ordering of the individual specifiers within the set may not
+        match the input string.
+
+        >>> str(SpecifierSet(">=1.0.0,!=1.0.1"))
+        '!=1.0.1,>=1.0.0'
+        >>> str(SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False))
+        '!=1.0.1,>=1.0.0'
+        """
+        return ",".join(sorted(str(s) for s in self._specs))
+
+    def __hash__(self) -> int:
+        return hash(self._specs)
+
+    def __and__(self, other: Union["SpecifierSet", str]) -> "SpecifierSet":
+        """Return a SpecifierSet which is a combination of the two sets.
+
+        :param other: The other object to combine with.
+
+        >>> SpecifierSet(">=1.0.0,!=1.0.1") & '<=2.0.0,!=2.0.1'
+        =1.0.0')>
+        >>> SpecifierSet(">=1.0.0,!=1.0.1") & SpecifierSet('<=2.0.0,!=2.0.1')
+        =1.0.0')>
+        """
+        if isinstance(other, str):
+            other = SpecifierSet(other)
+        elif not isinstance(other, SpecifierSet):
+            return NotImplemented
+
+        specifier = SpecifierSet()
+        specifier._specs = frozenset(self._specs | other._specs)
+
+        if self._prereleases is None and other._prereleases is not None:
+            specifier._prereleases = other._prereleases
+        elif self._prereleases is not None and other._prereleases is None:
+            specifier._prereleases = self._prereleases
+        elif self._prereleases == other._prereleases:
+            specifier._prereleases = self._prereleases
+        else:
+            raise ValueError(
+                "Cannot combine SpecifierSets with True and False prerelease "
+                "overrides."
+            )
+
+        return specifier
+
+    def __eq__(self, other: object) -> bool:
+        """Whether or not the two SpecifierSet-like objects are equal.
+
+        :param other: The other object to check against.
+
+        The value of :attr:`prereleases` is ignored.
+
+        >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.1")
+        True
+        >>> (SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False) ==
+        ...  SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True))
+        True
+        >>> SpecifierSet(">=1.0.0,!=1.0.1") == ">=1.0.0,!=1.0.1"
+        True
+        >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0")
+        False
+        >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.2")
+        False
+        """
+        if isinstance(other, (str, Specifier)):
+            other = SpecifierSet(str(other))
+        elif not isinstance(other, SpecifierSet):
+            return NotImplemented
+
+        return self._specs == other._specs
+
+    def __len__(self) -> int:
+        """Returns the number of specifiers in this specifier set."""
+        return len(self._specs)
+
+    def __iter__(self) -> Iterator[Specifier]:
+        """
+        Returns an iterator over all the underlying :class:`Specifier` instances
+        in this specifier set.
+
+        >>> sorted(SpecifierSet(">=1.0.0,!=1.0.1"), key=str)
+        [, =1.0.0')>]
+        """
+        return iter(self._specs)
+
+    def __contains__(self, item: UnparsedVersion) -> bool:
+        """Return whether or not the item is contained in this specifier.
+
+        :param item: The item to check for.
+
+        This is used for the ``in`` operator and behaves the same as
+        :meth:`contains` with no ``prereleases`` argument passed.
+
+        >>> "1.2.3" in SpecifierSet(">=1.0.0,!=1.0.1")
+        True
+        >>> Version("1.2.3") in SpecifierSet(">=1.0.0,!=1.0.1")
+        True
+        >>> "1.0.1" in SpecifierSet(">=1.0.0,!=1.0.1")
+        False
+        >>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1")
+        False
+        >>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True)
+        True
+        """
+        return self.contains(item)
+
+    def contains(
+        self,
+        item: UnparsedVersion,
+        prereleases: Optional[bool] = None,
+        installed: Optional[bool] = None,
+    ) -> bool:
+        """Return whether or not the item is contained in this SpecifierSet.
+
+        :param item:
+            The item to check for, which can be a version string or a
+            :class:`Version` instance.
+        :param prereleases:
+            Whether or not to match prereleases with this SpecifierSet. If set to
+            ``None`` (the default), it uses :attr:`prereleases` to determine
+            whether or not prereleases are allowed.
+
+        >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.2.3")
+        True
+        >>> SpecifierSet(">=1.0.0,!=1.0.1").contains(Version("1.2.3"))
+        True
+        >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.0.1")
+        False
+        >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1")
+        False
+        >>> SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True).contains("1.3.0a1")
+        True
+        >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1", prereleases=True)
+        True
+        """
+        # Ensure that our item is a Version instance.
+        if not isinstance(item, Version):
+            item = Version(item)
+
+        # Determine if we're forcing a prerelease or not, if we're not forcing
+        # one for this particular filter call, then we'll use whatever the
+        # SpecifierSet thinks for whether or not we should support prereleases.
+        if prereleases is None:
+            prereleases = self.prereleases
+
+        # We can determine if we're going to allow pre-releases by looking to
+        # see if any of the underlying items supports them. If none of them do
+        # and this item is a pre-release then we do not allow it and we can
+        # short circuit that here.
+        # Note: This means that 1.0.dev1 would not be contained in something
+        #       like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0
+        if not prereleases and item.is_prerelease:
+            return False
+
+        if installed and item.is_prerelease:
+            item = Version(item.base_version)
+
+        # We simply dispatch to the underlying specs here to make sure that the
+        # given version is contained within all of them.
+        # Note: This use of all() here means that an empty set of specifiers
+        #       will always return True, this is an explicit design decision.
+        return all(s.contains(item, prereleases=prereleases) for s in self._specs)
+
+    def filter(
+        self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None
+    ) -> Iterator[UnparsedVersionVar]:
+        """Filter items in the given iterable, that match the specifiers in this set.
+
+        :param iterable:
+            An iterable that can contain version strings and :class:`Version` instances.
+            The items in the iterable will be filtered according to the specifier.
+        :param prereleases:
+            Whether or not to allow prereleases in the returned iterator. If set to
+            ``None`` (the default), it will be intelligently decide whether to allow
+            prereleases or not (based on the :attr:`prereleases` attribute, and
+            whether the only versions matching are prereleases).
+
+        This method is smarter than just ``filter(SpecifierSet(...).contains, [...])``
+        because it implements the rule from :pep:`440` that a prerelease item
+        SHOULD be accepted if no other versions match the given specifier.
+
+        >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", "1.5a1"]))
+        ['1.3']
+        >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", Version("1.4")]))
+        ['1.3', ]
+        >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.5a1"]))
+        []
+        >>> list(SpecifierSet(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True))
+        ['1.3', '1.5a1']
+        >>> list(SpecifierSet(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"]))
+        ['1.3', '1.5a1']
+
+        An "empty" SpecifierSet will filter items based on the presence of prerelease
+        versions in the set.
+
+        >>> list(SpecifierSet("").filter(["1.3", "1.5a1"]))
+        ['1.3']
+        >>> list(SpecifierSet("").filter(["1.5a1"]))
+        ['1.5a1']
+        >>> list(SpecifierSet("", prereleases=True).filter(["1.3", "1.5a1"]))
+        ['1.3', '1.5a1']
+        >>> list(SpecifierSet("").filter(["1.3", "1.5a1"], prereleases=True))
+        ['1.3', '1.5a1']
+        """
+        # Determine if we're forcing a prerelease or not, if we're not forcing
+        # one for this particular filter call, then we'll use whatever the
+        # SpecifierSet thinks for whether or not we should support prereleases.
+        if prereleases is None:
+            prereleases = self.prereleases
+
+        # If we have any specifiers, then we want to wrap our iterable in the
+        # filter method for each one, this will act as a logical AND amongst
+        # each specifier.
+        if self._specs:
+            for spec in self._specs:
+                iterable = spec.filter(iterable, prereleases=bool(prereleases))
+            return iter(iterable)
+        # If we do not have any specifiers, then we need to have a rough filter
+        # which will filter out any pre-releases, unless there are no final
+        # releases.
+        else:
+            filtered: List[UnparsedVersionVar] = []
+            found_prereleases: List[UnparsedVersionVar] = []
+
+            for item in iterable:
+                parsed_version = _coerce_version(item)
+
+                # Store any item which is a pre-release for later unless we've
+                # already found a final version or we are accepting prereleases
+                if parsed_version.is_prerelease and not prereleases:
+                    if not filtered:
+                        found_prereleases.append(item)
+                else:
+                    filtered.append(item)
+
+            # If we've found no items except for pre-releases, then we'll go
+            # ahead and use the pre-releases
+            if not filtered and found_prereleases and prereleases is None:
+                return iter(found_prereleases)
+
+            return iter(filtered)
diff --git a/setuptools/_vendor/wheel/vendored/packaging/tags.py b/setuptools/_vendor/wheel/vendored/packaging/tags.py
new file mode 100644
index 0000000000..89f1926137
--- /dev/null
+++ b/setuptools/_vendor/wheel/vendored/packaging/tags.py
@@ -0,0 +1,571 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import logging
+import platform
+import re
+import struct
+import subprocess
+import sys
+import sysconfig
+from importlib.machinery import EXTENSION_SUFFIXES
+from typing import (
+    Dict,
+    FrozenSet,
+    Iterable,
+    Iterator,
+    List,
+    Optional,
+    Sequence,
+    Tuple,
+    Union,
+    cast,
+)
+
+from . import _manylinux, _musllinux
+
+logger = logging.getLogger(__name__)
+
+PythonVersion = Sequence[int]
+MacVersion = Tuple[int, int]
+
+INTERPRETER_SHORT_NAMES: Dict[str, str] = {
+    "python": "py",  # Generic.
+    "cpython": "cp",
+    "pypy": "pp",
+    "ironpython": "ip",
+    "jython": "jy",
+}
+
+
+_32_BIT_INTERPRETER = struct.calcsize("P") == 4
+
+
+class Tag:
+    """
+    A representation of the tag triple for a wheel.
+
+    Instances are considered immutable and thus are hashable. Equality checking
+    is also supported.
+    """
+
+    __slots__ = ["_interpreter", "_abi", "_platform", "_hash"]
+
+    def __init__(self, interpreter: str, abi: str, platform: str) -> None:
+        self._interpreter = interpreter.lower()
+        self._abi = abi.lower()
+        self._platform = platform.lower()
+        # The __hash__ of every single element in a Set[Tag] will be evaluated each time
+        # that a set calls its `.disjoint()` method, which may be called hundreds of
+        # times when scanning a page of links for packages with tags matching that
+        # Set[Tag]. Pre-computing the value here produces significant speedups for
+        # downstream consumers.
+        self._hash = hash((self._interpreter, self._abi, self._platform))
+
+    @property
+    def interpreter(self) -> str:
+        return self._interpreter
+
+    @property
+    def abi(self) -> str:
+        return self._abi
+
+    @property
+    def platform(self) -> str:
+        return self._platform
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, Tag):
+            return NotImplemented
+
+        return (
+            (self._hash == other._hash)  # Short-circuit ASAP for perf reasons.
+            and (self._platform == other._platform)
+            and (self._abi == other._abi)
+            and (self._interpreter == other._interpreter)
+        )
+
+    def __hash__(self) -> int:
+        return self._hash
+
+    def __str__(self) -> str:
+        return f"{self._interpreter}-{self._abi}-{self._platform}"
+
+    def __repr__(self) -> str:
+        return f"<{self} @ {id(self)}>"
+
+
+def parse_tag(tag: str) -> FrozenSet[Tag]:
+    """
+    Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances.
+
+    Returning a set is required due to the possibility that the tag is a
+    compressed tag set.
+    """
+    tags = set()
+    interpreters, abis, platforms = tag.split("-")
+    for interpreter in interpreters.split("."):
+        for abi in abis.split("."):
+            for platform_ in platforms.split("."):
+                tags.add(Tag(interpreter, abi, platform_))
+    return frozenset(tags)
+
+
+def _get_config_var(name: str, warn: bool = False) -> Union[int, str, None]:
+    value: Union[int, str, None] = sysconfig.get_config_var(name)
+    if value is None and warn:
+        logger.debug(
+            "Config variable '%s' is unset, Python ABI tag may be incorrect", name
+        )
+    return value
+
+
+def _normalize_string(string: str) -> str:
+    return string.replace(".", "_").replace("-", "_").replace(" ", "_")
+
+
+def _is_threaded_cpython(abis: List[str]) -> bool:
+    """
+    Determine if the ABI corresponds to a threaded (`--disable-gil`) build.
+
+    The threaded builds are indicated by a "t" in the abiflags.
+    """
+    if len(abis) == 0:
+        return False
+    # expect e.g., cp313
+    m = re.match(r"cp\d+(.*)", abis[0])
+    if not m:
+        return False
+    abiflags = m.group(1)
+    return "t" in abiflags
+
+
+def _abi3_applies(python_version: PythonVersion, threading: bool) -> bool:
+    """
+    Determine if the Python version supports abi3.
+
+    PEP 384 was first implemented in Python 3.2. The threaded (`--disable-gil`)
+    builds do not support abi3.
+    """
+    return len(python_version) > 1 and tuple(python_version) >= (3, 2) and not threading
+
+
+def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]:
+    py_version = tuple(py_version)  # To allow for version comparison.
+    abis = []
+    version = _version_nodot(py_version[:2])
+    threading = debug = pymalloc = ucs4 = ""
+    with_debug = _get_config_var("Py_DEBUG", warn)
+    has_refcount = hasattr(sys, "gettotalrefcount")
+    # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled
+    # extension modules is the best option.
+    # https://github.com/pypa/pip/issues/3383#issuecomment-173267692
+    has_ext = "_d.pyd" in EXTENSION_SUFFIXES
+    if with_debug or (with_debug is None and (has_refcount or has_ext)):
+        debug = "d"
+    if py_version >= (3, 13) and _get_config_var("Py_GIL_DISABLED", warn):
+        threading = "t"
+    if py_version < (3, 8):
+        with_pymalloc = _get_config_var("WITH_PYMALLOC", warn)
+        if with_pymalloc or with_pymalloc is None:
+            pymalloc = "m"
+        if py_version < (3, 3):
+            unicode_size = _get_config_var("Py_UNICODE_SIZE", warn)
+            if unicode_size == 4 or (
+                unicode_size is None and sys.maxunicode == 0x10FFFF
+            ):
+                ucs4 = "u"
+    elif debug:
+        # Debug builds can also load "normal" extension modules.
+        # We can also assume no UCS-4 or pymalloc requirement.
+        abis.append(f"cp{version}{threading}")
+    abis.insert(0, f"cp{version}{threading}{debug}{pymalloc}{ucs4}")
+    return abis
+
+
+def cpython_tags(
+    python_version: Optional[PythonVersion] = None,
+    abis: Optional[Iterable[str]] = None,
+    platforms: Optional[Iterable[str]] = None,
+    *,
+    warn: bool = False,
+) -> Iterator[Tag]:
+    """
+    Yields the tags for a CPython interpreter.
+
+    The tags consist of:
+    - cp--
+    - cp-abi3-
+    - cp-none-
+    - cp-abi3-  # Older Python versions down to 3.2.
+
+    If python_version only specifies a major version then user-provided ABIs and
+    the 'none' ABItag will be used.
+
+    If 'abi3' or 'none' are specified in 'abis' then they will be yielded at
+    their normal position and not at the beginning.
+    """
+    if not python_version:
+        python_version = sys.version_info[:2]
+
+    interpreter = f"cp{_version_nodot(python_version[:2])}"
+
+    if abis is None:
+        if len(python_version) > 1:
+            abis = _cpython_abis(python_version, warn)
+        else:
+            abis = []
+    abis = list(abis)
+    # 'abi3' and 'none' are explicitly handled later.
+    for explicit_abi in ("abi3", "none"):
+        try:
+            abis.remove(explicit_abi)
+        except ValueError:
+            pass
+
+    platforms = list(platforms or platform_tags())
+    for abi in abis:
+        for platform_ in platforms:
+            yield Tag(interpreter, abi, platform_)
+
+    threading = _is_threaded_cpython(abis)
+    use_abi3 = _abi3_applies(python_version, threading)
+    if use_abi3:
+        yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms)
+    yield from (Tag(interpreter, "none", platform_) for platform_ in platforms)
+
+    if use_abi3:
+        for minor_version in range(python_version[1] - 1, 1, -1):
+            for platform_ in platforms:
+                interpreter = "cp{version}".format(
+                    version=_version_nodot((python_version[0], minor_version))
+                )
+                yield Tag(interpreter, "abi3", platform_)
+
+
+def _generic_abi() -> List[str]:
+    """
+    Return the ABI tag based on EXT_SUFFIX.
+    """
+    # The following are examples of `EXT_SUFFIX`.
+    # We want to keep the parts which are related to the ABI and remove the
+    # parts which are related to the platform:
+    # - linux:   '.cpython-310-x86_64-linux-gnu.so' => cp310
+    # - mac:     '.cpython-310-darwin.so'           => cp310
+    # - win:     '.cp310-win_amd64.pyd'             => cp310
+    # - win:     '.pyd'                             => cp37 (uses _cpython_abis())
+    # - pypy:    '.pypy38-pp73-x86_64-linux-gnu.so' => pypy38_pp73
+    # - graalpy: '.graalpy-38-native-x86_64-darwin.dylib'
+    #                                               => graalpy_38_native
+
+    ext_suffix = _get_config_var("EXT_SUFFIX", warn=True)
+    if not isinstance(ext_suffix, str) or ext_suffix[0] != ".":
+        raise SystemError("invalid sysconfig.get_config_var('EXT_SUFFIX')")
+    parts = ext_suffix.split(".")
+    if len(parts) < 3:
+        # CPython3.7 and earlier uses ".pyd" on Windows.
+        return _cpython_abis(sys.version_info[:2])
+    soabi = parts[1]
+    if soabi.startswith("cpython"):
+        # non-windows
+        abi = "cp" + soabi.split("-")[1]
+    elif soabi.startswith("cp"):
+        # windows
+        abi = soabi.split("-")[0]
+    elif soabi.startswith("pypy"):
+        abi = "-".join(soabi.split("-")[:2])
+    elif soabi.startswith("graalpy"):
+        abi = "-".join(soabi.split("-")[:3])
+    elif soabi:
+        # pyston, ironpython, others?
+        abi = soabi
+    else:
+        return []
+    return [_normalize_string(abi)]
+
+
+def generic_tags(
+    interpreter: Optional[str] = None,
+    abis: Optional[Iterable[str]] = None,
+    platforms: Optional[Iterable[str]] = None,
+    *,
+    warn: bool = False,
+) -> Iterator[Tag]:
+    """
+    Yields the tags for a generic interpreter.
+
+    The tags consist of:
+    - --
+
+    The "none" ABI will be added if it was not explicitly provided.
+    """
+    if not interpreter:
+        interp_name = interpreter_name()
+        interp_version = interpreter_version(warn=warn)
+        interpreter = "".join([interp_name, interp_version])
+    if abis is None:
+        abis = _generic_abi()
+    else:
+        abis = list(abis)
+    platforms = list(platforms or platform_tags())
+    if "none" not in abis:
+        abis.append("none")
+    for abi in abis:
+        for platform_ in platforms:
+            yield Tag(interpreter, abi, platform_)
+
+
+def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]:
+    """
+    Yields Python versions in descending order.
+
+    After the latest version, the major-only version will be yielded, and then
+    all previous versions of that major version.
+    """
+    if len(py_version) > 1:
+        yield f"py{_version_nodot(py_version[:2])}"
+    yield f"py{py_version[0]}"
+    if len(py_version) > 1:
+        for minor in range(py_version[1] - 1, -1, -1):
+            yield f"py{_version_nodot((py_version[0], minor))}"
+
+
+def compatible_tags(
+    python_version: Optional[PythonVersion] = None,
+    interpreter: Optional[str] = None,
+    platforms: Optional[Iterable[str]] = None,
+) -> Iterator[Tag]:
+    """
+    Yields the sequence of tags that are compatible with a specific version of Python.
+
+    The tags consist of:
+    - py*-none-
+    - -none-any  # ... if `interpreter` is provided.
+    - py*-none-any
+    """
+    if not python_version:
+        python_version = sys.version_info[:2]
+    platforms = list(platforms or platform_tags())
+    for version in _py_interpreter_range(python_version):
+        for platform_ in platforms:
+            yield Tag(version, "none", platform_)
+    if interpreter:
+        yield Tag(interpreter, "none", "any")
+    for version in _py_interpreter_range(python_version):
+        yield Tag(version, "none", "any")
+
+
+def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str:
+    if not is_32bit:
+        return arch
+
+    if arch.startswith("ppc"):
+        return "ppc"
+
+    return "i386"
+
+
+def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> List[str]:
+    formats = [cpu_arch]
+    if cpu_arch == "x86_64":
+        if version < (10, 4):
+            return []
+        formats.extend(["intel", "fat64", "fat32"])
+
+    elif cpu_arch == "i386":
+        if version < (10, 4):
+            return []
+        formats.extend(["intel", "fat32", "fat"])
+
+    elif cpu_arch == "ppc64":
+        # TODO: Need to care about 32-bit PPC for ppc64 through 10.2?
+        if version > (10, 5) or version < (10, 4):
+            return []
+        formats.append("fat64")
+
+    elif cpu_arch == "ppc":
+        if version > (10, 6):
+            return []
+        formats.extend(["fat32", "fat"])
+
+    if cpu_arch in {"arm64", "x86_64"}:
+        formats.append("universal2")
+
+    if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}:
+        formats.append("universal")
+
+    return formats
+
+
+def mac_platforms(
+    version: Optional[MacVersion] = None, arch: Optional[str] = None
+) -> Iterator[str]:
+    """
+    Yields the platform tags for a macOS system.
+
+    The `version` parameter is a two-item tuple specifying the macOS version to
+    generate platform tags for. The `arch` parameter is the CPU architecture to
+    generate platform tags for. Both parameters default to the appropriate value
+    for the current system.
+    """
+    version_str, _, cpu_arch = platform.mac_ver()
+    if version is None:
+        version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
+        if version == (10, 16):
+            # When built against an older macOS SDK, Python will report macOS 10.16
+            # instead of the real version.
+            version_str = subprocess.run(
+                [
+                    sys.executable,
+                    "-sS",
+                    "-c",
+                    "import platform; print(platform.mac_ver()[0])",
+                ],
+                check=True,
+                env={"SYSTEM_VERSION_COMPAT": "0"},
+                stdout=subprocess.PIPE,
+                text=True,
+            ).stdout
+            version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
+    else:
+        version = version
+    if arch is None:
+        arch = _mac_arch(cpu_arch)
+    else:
+        arch = arch
+
+    if (10, 0) <= version and version < (11, 0):
+        # Prior to Mac OS 11, each yearly release of Mac OS bumped the
+        # "minor" version number.  The major version was always 10.
+        for minor_version in range(version[1], -1, -1):
+            compat_version = 10, minor_version
+            binary_formats = _mac_binary_formats(compat_version, arch)
+            for binary_format in binary_formats:
+                yield "macosx_{major}_{minor}_{binary_format}".format(
+                    major=10, minor=minor_version, binary_format=binary_format
+                )
+
+    if version >= (11, 0):
+        # Starting with Mac OS 11, each yearly release bumps the major version
+        # number.   The minor versions are now the midyear updates.
+        for major_version in range(version[0], 10, -1):
+            compat_version = major_version, 0
+            binary_formats = _mac_binary_formats(compat_version, arch)
+            for binary_format in binary_formats:
+                yield "macosx_{major}_{minor}_{binary_format}".format(
+                    major=major_version, minor=0, binary_format=binary_format
+                )
+
+    if version >= (11, 0):
+        # Mac OS 11 on x86_64 is compatible with binaries from previous releases.
+        # Arm64 support was introduced in 11.0, so no Arm binaries from previous
+        # releases exist.
+        #
+        # However, the "universal2" binary format can have a
+        # macOS version earlier than 11.0 when the x86_64 part of the binary supports
+        # that version of macOS.
+        if arch == "x86_64":
+            for minor_version in range(16, 3, -1):
+                compat_version = 10, minor_version
+                binary_formats = _mac_binary_formats(compat_version, arch)
+                for binary_format in binary_formats:
+                    yield "macosx_{major}_{minor}_{binary_format}".format(
+                        major=compat_version[0],
+                        minor=compat_version[1],
+                        binary_format=binary_format,
+                    )
+        else:
+            for minor_version in range(16, 3, -1):
+                compat_version = 10, minor_version
+                binary_format = "universal2"
+                yield "macosx_{major}_{minor}_{binary_format}".format(
+                    major=compat_version[0],
+                    minor=compat_version[1],
+                    binary_format=binary_format,
+                )
+
+
+def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]:
+    linux = _normalize_string(sysconfig.get_platform())
+    if not linux.startswith("linux_"):
+        # we should never be here, just yield the sysconfig one and return
+        yield linux
+        return
+    if is_32bit:
+        if linux == "linux_x86_64":
+            linux = "linux_i686"
+        elif linux == "linux_aarch64":
+            linux = "linux_armv8l"
+    _, arch = linux.split("_", 1)
+    archs = {"armv8l": ["armv8l", "armv7l"]}.get(arch, [arch])
+    yield from _manylinux.platform_tags(archs)
+    yield from _musllinux.platform_tags(archs)
+    for arch in archs:
+        yield f"linux_{arch}"
+
+
+def _generic_platforms() -> Iterator[str]:
+    yield _normalize_string(sysconfig.get_platform())
+
+
+def platform_tags() -> Iterator[str]:
+    """
+    Provides the platform tags for this installation.
+    """
+    if platform.system() == "Darwin":
+        return mac_platforms()
+    elif platform.system() == "Linux":
+        return _linux_platforms()
+    else:
+        return _generic_platforms()
+
+
+def interpreter_name() -> str:
+    """
+    Returns the name of the running interpreter.
+
+    Some implementations have a reserved, two-letter abbreviation which will
+    be returned when appropriate.
+    """
+    name = sys.implementation.name
+    return INTERPRETER_SHORT_NAMES.get(name) or name
+
+
+def interpreter_version(*, warn: bool = False) -> str:
+    """
+    Returns the version of the running interpreter.
+    """
+    version = _get_config_var("py_version_nodot", warn=warn)
+    if version:
+        version = str(version)
+    else:
+        version = _version_nodot(sys.version_info[:2])
+    return version
+
+
+def _version_nodot(version: PythonVersion) -> str:
+    return "".join(map(str, version))
+
+
+def sys_tags(*, warn: bool = False) -> Iterator[Tag]:
+    """
+    Returns the sequence of tag triples for the running interpreter.
+
+    The order of the sequence corresponds to priority order for the
+    interpreter, from most to least important.
+    """
+
+    interp_name = interpreter_name()
+    if interp_name == "cp":
+        yield from cpython_tags(warn=warn)
+    else:
+        yield from generic_tags()
+
+    if interp_name == "pp":
+        interp = "pp3"
+    elif interp_name == "cp":
+        interp = "cp" + interpreter_version(warn=warn)
+    else:
+        interp = None
+    yield from compatible_tags(interpreter=interp)
diff --git a/setuptools/_vendor/wheel/vendored/packaging/utils.py b/setuptools/_vendor/wheel/vendored/packaging/utils.py
new file mode 100644
index 0000000000..c2c2f75aa8
--- /dev/null
+++ b/setuptools/_vendor/wheel/vendored/packaging/utils.py
@@ -0,0 +1,172 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import re
+from typing import FrozenSet, NewType, Tuple, Union, cast
+
+from .tags import Tag, parse_tag
+from .version import InvalidVersion, Version
+
+BuildTag = Union[Tuple[()], Tuple[int, str]]
+NormalizedName = NewType("NormalizedName", str)
+
+
+class InvalidName(ValueError):
+    """
+    An invalid distribution name; users should refer to the packaging user guide.
+    """
+
+
+class InvalidWheelFilename(ValueError):
+    """
+    An invalid wheel filename was found, users should refer to PEP 427.
+    """
+
+
+class InvalidSdistFilename(ValueError):
+    """
+    An invalid sdist filename was found, users should refer to the packaging user guide.
+    """
+
+
+# Core metadata spec for `Name`
+_validate_regex = re.compile(
+    r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE
+)
+_canonicalize_regex = re.compile(r"[-_.]+")
+_normalized_regex = re.compile(r"^([a-z0-9]|[a-z0-9]([a-z0-9-](?!--))*[a-z0-9])$")
+# PEP 427: The build number must start with a digit.
+_build_tag_regex = re.compile(r"(\d+)(.*)")
+
+
+def canonicalize_name(name: str, *, validate: bool = False) -> NormalizedName:
+    if validate and not _validate_regex.match(name):
+        raise InvalidName(f"name is invalid: {name!r}")
+    # This is taken from PEP 503.
+    value = _canonicalize_regex.sub("-", name).lower()
+    return cast(NormalizedName, value)
+
+
+def is_normalized_name(name: str) -> bool:
+    return _normalized_regex.match(name) is not None
+
+
+def canonicalize_version(
+    version: Union[Version, str], *, strip_trailing_zero: bool = True
+) -> str:
+    """
+    This is very similar to Version.__str__, but has one subtle difference
+    with the way it handles the release segment.
+    """
+    if isinstance(version, str):
+        try:
+            parsed = Version(version)
+        except InvalidVersion:
+            # Legacy versions cannot be normalized
+            return version
+    else:
+        parsed = version
+
+    parts = []
+
+    # Epoch
+    if parsed.epoch != 0:
+        parts.append(f"{parsed.epoch}!")
+
+    # Release segment
+    release_segment = ".".join(str(x) for x in parsed.release)
+    if strip_trailing_zero:
+        # NB: This strips trailing '.0's to normalize
+        release_segment = re.sub(r"(\.0)+$", "", release_segment)
+    parts.append(release_segment)
+
+    # Pre-release
+    if parsed.pre is not None:
+        parts.append("".join(str(x) for x in parsed.pre))
+
+    # Post-release
+    if parsed.post is not None:
+        parts.append(f".post{parsed.post}")
+
+    # Development release
+    if parsed.dev is not None:
+        parts.append(f".dev{parsed.dev}")
+
+    # Local version segment
+    if parsed.local is not None:
+        parts.append(f"+{parsed.local}")
+
+    return "".join(parts)
+
+
+def parse_wheel_filename(
+    filename: str,
+) -> Tuple[NormalizedName, Version, BuildTag, FrozenSet[Tag]]:
+    if not filename.endswith(".whl"):
+        raise InvalidWheelFilename(
+            f"Invalid wheel filename (extension must be '.whl'): {filename}"
+        )
+
+    filename = filename[:-4]
+    dashes = filename.count("-")
+    if dashes not in (4, 5):
+        raise InvalidWheelFilename(
+            f"Invalid wheel filename (wrong number of parts): {filename}"
+        )
+
+    parts = filename.split("-", dashes - 2)
+    name_part = parts[0]
+    # See PEP 427 for the rules on escaping the project name.
+    if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None:
+        raise InvalidWheelFilename(f"Invalid project name: {filename}")
+    name = canonicalize_name(name_part)
+
+    try:
+        version = Version(parts[1])
+    except InvalidVersion as e:
+        raise InvalidWheelFilename(
+            f"Invalid wheel filename (invalid version): {filename}"
+        ) from e
+
+    if dashes == 5:
+        build_part = parts[2]
+        build_match = _build_tag_regex.match(build_part)
+        if build_match is None:
+            raise InvalidWheelFilename(
+                f"Invalid build number: {build_part} in '{filename}'"
+            )
+        build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2)))
+    else:
+        build = ()
+    tags = parse_tag(parts[-1])
+    return (name, version, build, tags)
+
+
+def parse_sdist_filename(filename: str) -> Tuple[NormalizedName, Version]:
+    if filename.endswith(".tar.gz"):
+        file_stem = filename[: -len(".tar.gz")]
+    elif filename.endswith(".zip"):
+        file_stem = filename[: -len(".zip")]
+    else:
+        raise InvalidSdistFilename(
+            f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):"
+            f" {filename}"
+        )
+
+    # We are requiring a PEP 440 version, which cannot contain dashes,
+    # so we split on the last dash.
+    name_part, sep, version_part = file_stem.rpartition("-")
+    if not sep:
+        raise InvalidSdistFilename(f"Invalid sdist filename: {filename}")
+
+    name = canonicalize_name(name_part)
+
+    try:
+        version = Version(version_part)
+    except InvalidVersion as e:
+        raise InvalidSdistFilename(
+            f"Invalid sdist filename (invalid version): {filename}"
+        ) from e
+
+    return (name, version)
diff --git a/setuptools/_vendor/wheel/vendored/packaging/version.py b/setuptools/_vendor/wheel/vendored/packaging/version.py
new file mode 100644
index 0000000000..cda8e99935
--- /dev/null
+++ b/setuptools/_vendor/wheel/vendored/packaging/version.py
@@ -0,0 +1,561 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+"""
+.. testsetup::
+
+    from packaging.version import parse, Version
+"""
+
+import itertools
+import re
+from typing import Any, Callable, NamedTuple, Optional, SupportsInt, Tuple, Union
+
+from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType
+
+__all__ = ["VERSION_PATTERN", "parse", "Version", "InvalidVersion"]
+
+LocalType = Tuple[Union[int, str], ...]
+
+CmpPrePostDevType = Union[InfinityType, NegativeInfinityType, Tuple[str, int]]
+CmpLocalType = Union[
+    NegativeInfinityType,
+    Tuple[Union[Tuple[int, str], Tuple[NegativeInfinityType, Union[int, str]]], ...],
+]
+CmpKey = Tuple[
+    int,
+    Tuple[int, ...],
+    CmpPrePostDevType,
+    CmpPrePostDevType,
+    CmpPrePostDevType,
+    CmpLocalType,
+]
+VersionComparisonMethod = Callable[[CmpKey, CmpKey], bool]
+
+
+class _Version(NamedTuple):
+    epoch: int
+    release: Tuple[int, ...]
+    dev: Optional[Tuple[str, int]]
+    pre: Optional[Tuple[str, int]]
+    post: Optional[Tuple[str, int]]
+    local: Optional[LocalType]
+
+
+def parse(version: str) -> "Version":
+    """Parse the given version string.
+
+    >>> parse('1.0.dev1')
+    
+
+    :param version: The version string to parse.
+    :raises InvalidVersion: When the version string is not a valid version.
+    """
+    return Version(version)
+
+
+class InvalidVersion(ValueError):
+    """Raised when a version string is not a valid version.
+
+    >>> Version("invalid")
+    Traceback (most recent call last):
+        ...
+    packaging.version.InvalidVersion: Invalid version: 'invalid'
+    """
+
+
+class _BaseVersion:
+    _key: Tuple[Any, ...]
+
+    def __hash__(self) -> int:
+        return hash(self._key)
+
+    # Please keep the duplicated `isinstance` check
+    # in the six comparisons hereunder
+    # unless you find a way to avoid adding overhead function calls.
+    def __lt__(self, other: "_BaseVersion") -> bool:
+        if not isinstance(other, _BaseVersion):
+            return NotImplemented
+
+        return self._key < other._key
+
+    def __le__(self, other: "_BaseVersion") -> bool:
+        if not isinstance(other, _BaseVersion):
+            return NotImplemented
+
+        return self._key <= other._key
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, _BaseVersion):
+            return NotImplemented
+
+        return self._key == other._key
+
+    def __ge__(self, other: "_BaseVersion") -> bool:
+        if not isinstance(other, _BaseVersion):
+            return NotImplemented
+
+        return self._key >= other._key
+
+    def __gt__(self, other: "_BaseVersion") -> bool:
+        if not isinstance(other, _BaseVersion):
+            return NotImplemented
+
+        return self._key > other._key
+
+    def __ne__(self, other: object) -> bool:
+        if not isinstance(other, _BaseVersion):
+            return NotImplemented
+
+        return self._key != other._key
+
+
+# Deliberately not anchored to the start and end of the string, to make it
+# easier for 3rd party code to reuse
+_VERSION_PATTERN = r"""
+    v?
+    (?:
+        (?:(?P[0-9]+)!)?                           # epoch
+        (?P[0-9]+(?:\.[0-9]+)*)                  # release segment
+        (?P
                                          # pre-release
+            [-_\.]?
+            (?Palpha|a|beta|b|preview|pre|c|rc)
+            [-_\.]?
+            (?P[0-9]+)?
+        )?
+        (?P                                         # post release
+            (?:-(?P[0-9]+))
+            |
+            (?:
+                [-_\.]?
+                (?Ppost|rev|r)
+                [-_\.]?
+                (?P[0-9]+)?
+            )
+        )?
+        (?P                                          # dev release
+            [-_\.]?
+            (?Pdev)
+            [-_\.]?
+            (?P[0-9]+)?
+        )?
+    )
+    (?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
+"""
+
+VERSION_PATTERN = _VERSION_PATTERN
+"""
+A string containing the regular expression used to match a valid version.
+
+The pattern is not anchored at either end, and is intended for embedding in larger
+expressions (for example, matching a version number as part of a file name). The
+regular expression should be compiled with the ``re.VERBOSE`` and ``re.IGNORECASE``
+flags set.
+
+:meta hide-value:
+"""
+
+
+class Version(_BaseVersion):
+    """This class abstracts handling of a project's versions.
+
+    A :class:`Version` instance is comparison aware and can be compared and
+    sorted using the standard Python interfaces.
+
+    >>> v1 = Version("1.0a5")
+    >>> v2 = Version("1.0")
+    >>> v1
+    
+    >>> v2
+    
+    >>> v1 < v2
+    True
+    >>> v1 == v2
+    False
+    >>> v1 > v2
+    False
+    >>> v1 >= v2
+    False
+    >>> v1 <= v2
+    True
+    """
+
+    _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
+    _key: CmpKey
+
+    def __init__(self, version: str) -> None:
+        """Initialize a Version object.
+
+        :param version:
+            The string representation of a version which will be parsed and normalized
+            before use.
+        :raises InvalidVersion:
+            If the ``version`` does not conform to PEP 440 in any way then this
+            exception will be raised.
+        """
+
+        # Validate the version and parse it into pieces
+        match = self._regex.search(version)
+        if not match:
+            raise InvalidVersion(f"Invalid version: '{version}'")
+
+        # Store the parsed out pieces of the version
+        self._version = _Version(
+            epoch=int(match.group("epoch")) if match.group("epoch") else 0,
+            release=tuple(int(i) for i in match.group("release").split(".")),
+            pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
+            post=_parse_letter_version(
+                match.group("post_l"), match.group("post_n1") or match.group("post_n2")
+            ),
+            dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
+            local=_parse_local_version(match.group("local")),
+        )
+
+        # Generate a key which will be used for sorting
+        self._key = _cmpkey(
+            self._version.epoch,
+            self._version.release,
+            self._version.pre,
+            self._version.post,
+            self._version.dev,
+            self._version.local,
+        )
+
+    def __repr__(self) -> str:
+        """A representation of the Version that shows all internal state.
+
+        >>> Version('1.0.0')
+        
+        """
+        return f""
+
+    def __str__(self) -> str:
+        """A string representation of the version that can be rounded-tripped.
+
+        >>> str(Version("1.0a5"))
+        '1.0a5'
+        """
+        parts = []
+
+        # Epoch
+        if self.epoch != 0:
+            parts.append(f"{self.epoch}!")
+
+        # Release segment
+        parts.append(".".join(str(x) for x in self.release))
+
+        # Pre-release
+        if self.pre is not None:
+            parts.append("".join(str(x) for x in self.pre))
+
+        # Post-release
+        if self.post is not None:
+            parts.append(f".post{self.post}")
+
+        # Development release
+        if self.dev is not None:
+            parts.append(f".dev{self.dev}")
+
+        # Local version segment
+        if self.local is not None:
+            parts.append(f"+{self.local}")
+
+        return "".join(parts)
+
+    @property
+    def epoch(self) -> int:
+        """The epoch of the version.
+
+        >>> Version("2.0.0").epoch
+        0
+        >>> Version("1!2.0.0").epoch
+        1
+        """
+        return self._version.epoch
+
+    @property
+    def release(self) -> Tuple[int, ...]:
+        """The components of the "release" segment of the version.
+
+        >>> Version("1.2.3").release
+        (1, 2, 3)
+        >>> Version("2.0.0").release
+        (2, 0, 0)
+        >>> Version("1!2.0.0.post0").release
+        (2, 0, 0)
+
+        Includes trailing zeroes but not the epoch or any pre-release / development /
+        post-release suffixes.
+        """
+        return self._version.release
+
+    @property
+    def pre(self) -> Optional[Tuple[str, int]]:
+        """The pre-release segment of the version.
+
+        >>> print(Version("1.2.3").pre)
+        None
+        >>> Version("1.2.3a1").pre
+        ('a', 1)
+        >>> Version("1.2.3b1").pre
+        ('b', 1)
+        >>> Version("1.2.3rc1").pre
+        ('rc', 1)
+        """
+        return self._version.pre
+
+    @property
+    def post(self) -> Optional[int]:
+        """The post-release number of the version.
+
+        >>> print(Version("1.2.3").post)
+        None
+        >>> Version("1.2.3.post1").post
+        1
+        """
+        return self._version.post[1] if self._version.post else None
+
+    @property
+    def dev(self) -> Optional[int]:
+        """The development number of the version.
+
+        >>> print(Version("1.2.3").dev)
+        None
+        >>> Version("1.2.3.dev1").dev
+        1
+        """
+        return self._version.dev[1] if self._version.dev else None
+
+    @property
+    def local(self) -> Optional[str]:
+        """The local version segment of the version.
+
+        >>> print(Version("1.2.3").local)
+        None
+        >>> Version("1.2.3+abc").local
+        'abc'
+        """
+        if self._version.local:
+            return ".".join(str(x) for x in self._version.local)
+        else:
+            return None
+
+    @property
+    def public(self) -> str:
+        """The public portion of the version.
+
+        >>> Version("1.2.3").public
+        '1.2.3'
+        >>> Version("1.2.3+abc").public
+        '1.2.3'
+        >>> Version("1.2.3+abc.dev1").public
+        '1.2.3'
+        """
+        return str(self).split("+", 1)[0]
+
+    @property
+    def base_version(self) -> str:
+        """The "base version" of the version.
+
+        >>> Version("1.2.3").base_version
+        '1.2.3'
+        >>> Version("1.2.3+abc").base_version
+        '1.2.3'
+        >>> Version("1!1.2.3+abc.dev1").base_version
+        '1!1.2.3'
+
+        The "base version" is the public version of the project without any pre or post
+        release markers.
+        """
+        parts = []
+
+        # Epoch
+        if self.epoch != 0:
+            parts.append(f"{self.epoch}!")
+
+        # Release segment
+        parts.append(".".join(str(x) for x in self.release))
+
+        return "".join(parts)
+
+    @property
+    def is_prerelease(self) -> bool:
+        """Whether this version is a pre-release.
+
+        >>> Version("1.2.3").is_prerelease
+        False
+        >>> Version("1.2.3a1").is_prerelease
+        True
+        >>> Version("1.2.3b1").is_prerelease
+        True
+        >>> Version("1.2.3rc1").is_prerelease
+        True
+        >>> Version("1.2.3dev1").is_prerelease
+        True
+        """
+        return self.dev is not None or self.pre is not None
+
+    @property
+    def is_postrelease(self) -> bool:
+        """Whether this version is a post-release.
+
+        >>> Version("1.2.3").is_postrelease
+        False
+        >>> Version("1.2.3.post1").is_postrelease
+        True
+        """
+        return self.post is not None
+
+    @property
+    def is_devrelease(self) -> bool:
+        """Whether this version is a development release.
+
+        >>> Version("1.2.3").is_devrelease
+        False
+        >>> Version("1.2.3.dev1").is_devrelease
+        True
+        """
+        return self.dev is not None
+
+    @property
+    def major(self) -> int:
+        """The first item of :attr:`release` or ``0`` if unavailable.
+
+        >>> Version("1.2.3").major
+        1
+        """
+        return self.release[0] if len(self.release) >= 1 else 0
+
+    @property
+    def minor(self) -> int:
+        """The second item of :attr:`release` or ``0`` if unavailable.
+
+        >>> Version("1.2.3").minor
+        2
+        >>> Version("1").minor
+        0
+        """
+        return self.release[1] if len(self.release) >= 2 else 0
+
+    @property
+    def micro(self) -> int:
+        """The third item of :attr:`release` or ``0`` if unavailable.
+
+        >>> Version("1.2.3").micro
+        3
+        >>> Version("1").micro
+        0
+        """
+        return self.release[2] if len(self.release) >= 3 else 0
+
+
+def _parse_letter_version(
+    letter: Optional[str], number: Union[str, bytes, SupportsInt, None]
+) -> Optional[Tuple[str, int]]:
+    if letter:
+        # We consider there to be an implicit 0 in a pre-release if there is
+        # not a numeral associated with it.
+        if number is None:
+            number = 0
+
+        # We normalize any letters to their lower case form
+        letter = letter.lower()
+
+        # We consider some words to be alternate spellings of other words and
+        # in those cases we want to normalize the spellings to our preferred
+        # spelling.
+        if letter == "alpha":
+            letter = "a"
+        elif letter == "beta":
+            letter = "b"
+        elif letter in ["c", "pre", "preview"]:
+            letter = "rc"
+        elif letter in ["rev", "r"]:
+            letter = "post"
+
+        return letter, int(number)
+    if not letter and number:
+        # We assume if we are given a number, but we are not given a letter
+        # then this is using the implicit post release syntax (e.g. 1.0-1)
+        letter = "post"
+
+        return letter, int(number)
+
+    return None
+
+
+_local_version_separators = re.compile(r"[\._-]")
+
+
+def _parse_local_version(local: Optional[str]) -> Optional[LocalType]:
+    """
+    Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
+    """
+    if local is not None:
+        return tuple(
+            part.lower() if not part.isdigit() else int(part)
+            for part in _local_version_separators.split(local)
+        )
+    return None
+
+
+def _cmpkey(
+    epoch: int,
+    release: Tuple[int, ...],
+    pre: Optional[Tuple[str, int]],
+    post: Optional[Tuple[str, int]],
+    dev: Optional[Tuple[str, int]],
+    local: Optional[LocalType],
+) -> CmpKey:
+    # When we compare a release version, we want to compare it with all of the
+    # trailing zeros removed. So we'll use a reverse the list, drop all the now
+    # leading zeros until we come to something non zero, then take the rest
+    # re-reverse it back into the correct order and make it a tuple and use
+    # that for our sorting key.
+    _release = tuple(
+        reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
+    )
+
+    # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
+    # We'll do this by abusing the pre segment, but we _only_ want to do this
+    # if there is not a pre or a post segment. If we have one of those then
+    # the normal sorting rules will handle this case correctly.
+    if pre is None and post is None and dev is not None:
+        _pre: CmpPrePostDevType = NegativeInfinity
+    # Versions without a pre-release (except as noted above) should sort after
+    # those with one.
+    elif pre is None:
+        _pre = Infinity
+    else:
+        _pre = pre
+
+    # Versions without a post segment should sort before those with one.
+    if post is None:
+        _post: CmpPrePostDevType = NegativeInfinity
+
+    else:
+        _post = post
+
+    # Versions without a development segment should sort after those with one.
+    if dev is None:
+        _dev: CmpPrePostDevType = Infinity
+
+    else:
+        _dev = dev
+
+    if local is None:
+        # Versions without a local segment should sort before those with one.
+        _local: CmpLocalType = NegativeInfinity
+    else:
+        # Versions with a local segment need that segment parsed to implement
+        # the sorting rules in PEP440.
+        # - Alpha numeric segments sort before numeric segments
+        # - Alpha numeric segments sort lexicographically
+        # - Numeric segments sort numerically
+        # - Shorter versions sort before longer versions when the prefixes
+        #   match exactly
+        _local = tuple(
+            (i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
+        )
+
+    return epoch, _release, _pre, _post, _dev, _local
diff --git a/setuptools/_vendor/wheel/vendored/vendor.txt b/setuptools/_vendor/wheel/vendored/vendor.txt
new file mode 100644
index 0000000000..14666103a8
--- /dev/null
+++ b/setuptools/_vendor/wheel/vendored/vendor.txt
@@ -0,0 +1 @@
+packaging==24.0
diff --git a/setuptools/_vendor/wheel/wheelfile.py b/setuptools/_vendor/wheel/wheelfile.py
index 83a31772bd..6440e90ade 100644
--- a/setuptools/_vendor/wheel/wheelfile.py
+++ b/setuptools/_vendor/wheel/wheelfile.py
@@ -9,7 +9,8 @@
 from io import StringIO, TextIOWrapper
 from zipfile import ZIP_DEFLATED, ZipFile, ZipInfo
 
-from .util import log, urlsafe_b64decode, urlsafe_b64encode
+from wheel.cli import WheelError
+from wheel.util import log, urlsafe_b64decode, urlsafe_b64encode
 
 # Non-greedy matching of an optional build number may be too clever (more
 # invalid wheel filenames will match). Separate regex for .dist-info?
@@ -193,7 +194,3 @@ def close(self):
             self.writestr(self.record_path, data.getvalue())
 
         ZipFile.close(self)
-
-
-class WheelError(Exception):
-    pass
diff --git a/setuptools/_vendor/zipp-3.19.2.dist-info/INSTALLER b/setuptools/_vendor/zipp-3.19.2.dist-info/INSTALLER
new file mode 100644
index 0000000000..a1b589e38a
--- /dev/null
+++ b/setuptools/_vendor/zipp-3.19.2.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/setuptools/_vendor/ordered_set-3.1.1.dist-info/MIT-LICENSE b/setuptools/_vendor/zipp-3.19.2.dist-info/LICENSE
similarity index 58%
rename from setuptools/_vendor/ordered_set-3.1.1.dist-info/MIT-LICENSE
rename to setuptools/_vendor/zipp-3.19.2.dist-info/LICENSE
index 25117ef4f1..1bb5a44356 100644
--- a/setuptools/_vendor/ordered_set-3.1.1.dist-info/MIT-LICENSE
+++ b/setuptools/_vendor/zipp-3.19.2.dist-info/LICENSE
@@ -1,11 +1,9 @@
-Copyright (c) 2018 Luminoso Technologies, Inc.
-
-Permission is hereby granted, free of charge, to any person obtaining a
-copy of this software and associated documentation files (the "Software"),
-to deal in the Software without restriction, including without limitation
-the rights to use, copy, modify, merge, publish, distribute, sublicense,
-and/or sell copies of the Software, and to permit persons to whom the
-Software is furnished to do so, subject to the following conditions:
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
 
 The above copyright notice and this permission notice shall be included in
 all copies or substantial portions of the Software.
@@ -15,5 +13,5 @@ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
 FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
 AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
 LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
diff --git a/setuptools/_vendor/zipp-3.19.2.dist-info/METADATA b/setuptools/_vendor/zipp-3.19.2.dist-info/METADATA
new file mode 100644
index 0000000000..1399281717
--- /dev/null
+++ b/setuptools/_vendor/zipp-3.19.2.dist-info/METADATA
@@ -0,0 +1,102 @@
+Metadata-Version: 2.1
+Name: zipp
+Version: 3.19.2
+Summary: Backport of pathlib-compatible object wrapper for zip files
+Author-email: "Jason R. Coombs" 
+Project-URL: Homepage, https://github.com/jaraco/zipp
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Requires-Python: >=3.8
+Description-Content-Type: text/x-rst
+License-File: LICENSE
+Provides-Extra: doc
+Requires-Dist: sphinx >=3.5 ; extra == 'doc'
+Requires-Dist: jaraco.packaging >=9.3 ; extra == 'doc'
+Requires-Dist: rst.linker >=1.9 ; extra == 'doc'
+Requires-Dist: furo ; extra == 'doc'
+Requires-Dist: sphinx-lint ; extra == 'doc'
+Requires-Dist: jaraco.tidelift >=1.4 ; extra == 'doc'
+Provides-Extra: test
+Requires-Dist: pytest !=8.1.*,>=6 ; extra == 'test'
+Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'test'
+Requires-Dist: pytest-cov ; extra == 'test'
+Requires-Dist: pytest-mypy ; extra == 'test'
+Requires-Dist: pytest-enabler >=2.2 ; extra == 'test'
+Requires-Dist: pytest-ruff >=0.2.1 ; extra == 'test'
+Requires-Dist: jaraco.itertools ; extra == 'test'
+Requires-Dist: jaraco.functools ; extra == 'test'
+Requires-Dist: more-itertools ; extra == 'test'
+Requires-Dist: big-O ; extra == 'test'
+Requires-Dist: pytest-ignore-flaky ; extra == 'test'
+Requires-Dist: jaraco.test ; extra == 'test'
+Requires-Dist: importlib-resources ; (python_version < "3.9") and extra == 'test'
+
+.. image:: https://img.shields.io/pypi/v/zipp.svg
+   :target: https://pypi.org/project/zipp
+
+.. image:: https://img.shields.io/pypi/pyversions/zipp.svg
+
+.. image:: https://github.com/jaraco/zipp/actions/workflows/main.yml/badge.svg
+   :target: https://github.com/jaraco/zipp/actions?query=workflow%3A%22tests%22
+   :alt: tests
+
+.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
+    :target: https://github.com/astral-sh/ruff
+    :alt: Ruff
+
+.. .. image:: https://readthedocs.org/projects/PROJECT_RTD/badge/?version=latest
+..    :target: https://PROJECT_RTD.readthedocs.io/en/latest/?badge=latest
+
+.. image:: https://img.shields.io/badge/skeleton-2024-informational
+   :target: https://blog.jaraco.com/skeleton
+
+.. image:: https://tidelift.com/badges/package/pypi/zipp
+   :target: https://tidelift.com/subscription/pkg/pypi-zipp?utm_source=pypi-zipp&utm_medium=readme
+
+
+A pathlib-compatible Zipfile object wrapper. Official backport of the standard library
+`Path object `_.
+
+
+Compatibility
+=============
+
+New features are introduced in this third-party library and later merged
+into CPython. The following table indicates which versions of this library
+were contributed to different versions in the standard library:
+
+.. list-table::
+   :header-rows: 1
+
+   * - zipp
+     - stdlib
+   * - 3.18
+     - 3.13
+   * - 3.16
+     - 3.12
+   * - 3.5
+     - 3.11
+   * - 3.2
+     - 3.10
+   * - 3.3 ??
+     - 3.9
+   * - 1.0
+     - 3.8
+
+
+Usage
+=====
+
+Use ``zipp.Path`` in place of ``zipfile.Path`` on any Python.
+
+For Enterprise
+==============
+
+Available as part of the Tidelift Subscription.
+
+This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use.
+
+`Learn more `_.
diff --git a/setuptools/_vendor/zipp-3.19.2.dist-info/RECORD b/setuptools/_vendor/zipp-3.19.2.dist-info/RECORD
new file mode 100644
index 0000000000..77c02835d8
--- /dev/null
+++ b/setuptools/_vendor/zipp-3.19.2.dist-info/RECORD
@@ -0,0 +1,15 @@
+zipp-3.19.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+zipp-3.19.2.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
+zipp-3.19.2.dist-info/METADATA,sha256=UIrk_kMIHGSwsKKChYizqMw0MMZpPRZ2ZiVpQAsN_bE,3575
+zipp-3.19.2.dist-info/RECORD,,
+zipp-3.19.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+zipp-3.19.2.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
+zipp-3.19.2.dist-info/top_level.txt,sha256=iAbdoSHfaGqBfVb2XuR9JqSQHCoOsOtG6y9C_LSpqFw,5
+zipp/__init__.py,sha256=QuI1g00G4fRAcGt-HqbV0oWIkmSgedCGGYsHHYzNa8A,13412
+zipp/__pycache__/__init__.cpython-312.pyc,,
+zipp/__pycache__/glob.cpython-312.pyc,,
+zipp/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+zipp/compat/__pycache__/__init__.cpython-312.pyc,,
+zipp/compat/__pycache__/py310.cpython-312.pyc,,
+zipp/compat/py310.py,sha256=eZpkW0zRtunkhEh8jjX3gCGe22emoKCBJw72Zt4RkhA,219
+zipp/glob.py,sha256=etWpnfEoRyfUvrUsi6sTiGmErvPwe6HzY6pT8jg_lUI,3082
diff --git a/setuptools/_vendor/zipp-3.19.2.dist-info/REQUESTED b/setuptools/_vendor/zipp-3.19.2.dist-info/REQUESTED
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/setuptools/_vendor/zipp-3.19.2.dist-info/WHEEL b/setuptools/_vendor/zipp-3.19.2.dist-info/WHEEL
new file mode 100644
index 0000000000..bab98d6758
--- /dev/null
+++ b/setuptools/_vendor/zipp-3.19.2.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.43.0)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/setuptools/_vendor/zipp-3.7.0.dist-info/top_level.txt b/setuptools/_vendor/zipp-3.19.2.dist-info/top_level.txt
similarity index 100%
rename from setuptools/_vendor/zipp-3.7.0.dist-info/top_level.txt
rename to setuptools/_vendor/zipp-3.19.2.dist-info/top_level.txt
diff --git a/setuptools/_vendor/zipp-3.7.0.dist-info/METADATA b/setuptools/_vendor/zipp-3.7.0.dist-info/METADATA
deleted file mode 100644
index b1308b5f6e..0000000000
--- a/setuptools/_vendor/zipp-3.7.0.dist-info/METADATA
+++ /dev/null
@@ -1,58 +0,0 @@
-Metadata-Version: 2.1
-Name: zipp
-Version: 3.7.0
-Summary: Backport of pathlib-compatible object wrapper for zip files
-Home-page: https://github.com/jaraco/zipp
-Author: Jason R. Coombs
-Author-email: jaraco@jaraco.com
-License: UNKNOWN
-Platform: UNKNOWN
-Classifier: Development Status :: 5 - Production/Stable
-Classifier: Intended Audience :: Developers
-Classifier: License :: OSI Approved :: MIT License
-Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3 :: Only
-Requires-Python: >=3.7
-License-File: LICENSE
-Provides-Extra: docs
-Requires-Dist: sphinx ; extra == 'docs'
-Requires-Dist: jaraco.packaging (>=8.2) ; extra == 'docs'
-Requires-Dist: rst.linker (>=1.9) ; extra == 'docs'
-Provides-Extra: testing
-Requires-Dist: pytest (>=6) ; extra == 'testing'
-Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing'
-Requires-Dist: pytest-flake8 ; extra == 'testing'
-Requires-Dist: pytest-cov ; extra == 'testing'
-Requires-Dist: pytest-enabler (>=1.0.1) ; extra == 'testing'
-Requires-Dist: jaraco.itertools ; extra == 'testing'
-Requires-Dist: func-timeout ; extra == 'testing'
-Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing'
-Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing'
-
-.. image:: https://img.shields.io/pypi/v/zipp.svg
-   :target: `PyPI link`_
-
-.. image:: https://img.shields.io/pypi/pyversions/zipp.svg
-   :target: `PyPI link`_
-
-.. _PyPI link: https://pypi.org/project/zipp
-
-.. image:: https://github.com/jaraco/zipp/workflows/tests/badge.svg
-   :target: https://github.com/jaraco/zipp/actions?query=workflow%3A%22tests%22
-   :alt: tests
-
-.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
-   :target: https://github.com/psf/black
-   :alt: Code style: Black
-
-.. .. image:: https://readthedocs.org/projects/skeleton/badge/?version=latest
-..    :target: https://skeleton.readthedocs.io/en/latest/?badge=latest
-
-.. image:: https://img.shields.io/badge/skeleton-2021-informational
-   :target: https://blog.jaraco.com/skeleton
-
-
-A pathlib-compatible Zipfile object wrapper. Official backport of the standard library
-`Path object `_.
-
-
diff --git a/setuptools/_vendor/zipp-3.7.0.dist-info/RECORD b/setuptools/_vendor/zipp-3.7.0.dist-info/RECORD
deleted file mode 100644
index adc797bc2e..0000000000
--- a/setuptools/_vendor/zipp-3.7.0.dist-info/RECORD
+++ /dev/null
@@ -1,9 +0,0 @@
-__pycache__/zipp.cpython-312.pyc,,
-zipp-3.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-zipp-3.7.0.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050
-zipp-3.7.0.dist-info/METADATA,sha256=ZLzgaXTyZX_MxTU0lcGfhdPY4CjFrT_3vyQ2Fo49pl8,2261
-zipp-3.7.0.dist-info/RECORD,,
-zipp-3.7.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-zipp-3.7.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
-zipp-3.7.0.dist-info/top_level.txt,sha256=iAbdoSHfaGqBfVb2XuR9JqSQHCoOsOtG6y9C_LSpqFw,5
-zipp.py,sha256=ajztOH-9I7KA_4wqDYygtHa6xUBVZgFpmZ8FE74HHHI,8425
diff --git a/setuptools/_vendor/zipp-3.7.0.dist-info/WHEEL b/setuptools/_vendor/zipp-3.7.0.dist-info/WHEEL
deleted file mode 100644
index becc9a66ea..0000000000
--- a/setuptools/_vendor/zipp-3.7.0.dist-info/WHEEL
+++ /dev/null
@@ -1,5 +0,0 @@
-Wheel-Version: 1.0
-Generator: bdist_wheel (0.37.1)
-Root-Is-Purelib: true
-Tag: py3-none-any
-
diff --git a/setuptools/_vendor/zipp.py b/setuptools/_vendor/zipp/__init__.py
similarity index 52%
rename from setuptools/_vendor/zipp.py
rename to setuptools/_vendor/zipp/__init__.py
index 26b723c1fd..d65297b835 100644
--- a/setuptools/_vendor/zipp.py
+++ b/setuptools/_vendor/zipp/__init__.py
@@ -3,13 +3,13 @@
 import zipfile
 import itertools
 import contextlib
-import sys
 import pathlib
+import re
+import stat
+import sys
 
-if sys.version_info < (3, 7):
-    from collections import OrderedDict
-else:
-    OrderedDict = dict
+from .compat.py310 import text_encoding
+from .glob import Translator
 
 
 __all__ = ['Path']
@@ -56,7 +56,7 @@ def _ancestry(path):
         path, tail = posixpath.split(path)
 
 
-_dedupe = OrderedDict.fromkeys
+_dedupe = dict.fromkeys
 """Deduplicate an iterable in original order"""
 
 
@@ -68,10 +68,95 @@ def _difference(minuend, subtrahend):
     return itertools.filterfalse(set(subtrahend).__contains__, minuend)
 
 
-class CompleteDirs(zipfile.ZipFile):
+class InitializedState:
+    """
+    Mix-in to save the initialization state for pickling.
+    """
+
+    def __init__(self, *args, **kwargs):
+        self.__args = args
+        self.__kwargs = kwargs
+        super().__init__(*args, **kwargs)
+
+    def __getstate__(self):
+        return self.__args, self.__kwargs
+
+    def __setstate__(self, state):
+        args, kwargs = state
+        super().__init__(*args, **kwargs)
+
+
+class SanitizedNames:
+    """
+    ZipFile mix-in to ensure names are sanitized.
+    """
+
+    def namelist(self):
+        return list(map(self._sanitize, super().namelist()))
+
+    @staticmethod
+    def _sanitize(name):
+        r"""
+        Ensure a relative path with posix separators and no dot names.
+
+        Modeled after
+        https://github.com/python/cpython/blob/bcc1be39cb1d04ad9fc0bd1b9193d3972835a57c/Lib/zipfile/__init__.py#L1799-L1813
+        but provides consistent cross-platform behavior.
+
+        >>> san = SanitizedNames._sanitize
+        >>> san('/foo/bar')
+        'foo/bar'
+        >>> san('//foo.txt')
+        'foo.txt'
+        >>> san('foo/.././bar.txt')
+        'foo/bar.txt'
+        >>> san('foo../.bar.txt')
+        'foo../.bar.txt'
+        >>> san('\\foo\\bar.txt')
+        'foo/bar.txt'
+        >>> san('D:\\foo.txt')
+        'D/foo.txt'
+        >>> san('\\\\server\\share\\file.txt')
+        'server/share/file.txt'
+        >>> san('\\\\?\\GLOBALROOT\\Volume3')
+        '?/GLOBALROOT/Volume3'
+        >>> san('\\\\.\\PhysicalDrive1\\root')
+        'PhysicalDrive1/root'
+
+        Retain any trailing slash.
+        >>> san('abc/')
+        'abc/'
+
+        Raises a ValueError if the result is empty.
+        >>> san('../..')
+        Traceback (most recent call last):
+        ...
+        ValueError: Empty filename
+        """
+
+        def allowed(part):
+            return part and part not in {'..', '.'}
+
+        # Remove the drive letter.
+        # Don't use ntpath.splitdrive, because that also strips UNC paths
+        bare = re.sub('^([A-Z]):', r'\1', name, flags=re.IGNORECASE)
+        clean = bare.replace('\\', '/')
+        parts = clean.split('/')
+        joined = '/'.join(filter(allowed, parts))
+        if not joined:
+            raise ValueError("Empty filename")
+        return joined + '/' * name.endswith('/')
+
+
+class CompleteDirs(InitializedState, SanitizedNames, zipfile.ZipFile):
     """
     A ZipFile subclass that ensures that implied directories
     are always included in the namelist.
+
+    >>> list(CompleteDirs._implied_dirs(['foo/bar.txt', 'foo/bar/baz.txt']))
+    ['foo/', 'foo/bar/']
+    >>> list(CompleteDirs._implied_dirs(['foo/bar.txt', 'foo/bar/baz.txt', 'foo/bar/']))
+    ['foo/']
     """
 
     @staticmethod
@@ -81,7 +166,7 @@ def _implied_dirs(names):
         return _dedupe(_difference(as_dirs, names))
 
     def namelist(self):
-        names = super(CompleteDirs, self).namelist()
+        names = super().namelist()
         return names + list(self._implied_dirs(names))
 
     def _name_set(self):
@@ -97,6 +182,17 @@ def resolve_dir(self, name):
         dir_match = name not in names and dirname in names
         return dirname if dir_match else name
 
+    def getinfo(self, name):
+        """
+        Supplement getinfo for implied dirs.
+        """
+        try:
+            return super().getinfo(name)
+        except KeyError:
+            if not name.endswith('/') or name not in self._name_set():
+                raise
+            return zipfile.ZipInfo(filename=name)
+
     @classmethod
     def make(cls, source):
         """
@@ -107,7 +203,7 @@ def make(cls, source):
             return source
 
         if not isinstance(source, zipfile.ZipFile):
-            return cls(_pathlib_compat(source))
+            return cls(source)
 
         # Only allow for FastLookup when supplied zipfile is read-only
         if 'r' not in source.mode:
@@ -116,6 +212,16 @@ def make(cls, source):
         source.__class__ = cls
         return source
 
+    @classmethod
+    def inject(cls, zf: zipfile.ZipFile) -> zipfile.ZipFile:
+        """
+        Given a writable zip file zf, inject directory entries for
+        any directories implied by the presence of children.
+        """
+        for name in cls._implied_dirs(zf.namelist()):
+            zf.writestr(name, b"")
+        return zf
+
 
 class FastLookup(CompleteDirs):
     """
@@ -126,30 +232,29 @@ class FastLookup(CompleteDirs):
     def namelist(self):
         with contextlib.suppress(AttributeError):
             return self.__names
-        self.__names = super(FastLookup, self).namelist()
+        self.__names = super().namelist()
         return self.__names
 
     def _name_set(self):
         with contextlib.suppress(AttributeError):
             return self.__lookup
-        self.__lookup = super(FastLookup, self)._name_set()
+        self.__lookup = super()._name_set()
         return self.__lookup
 
 
-def _pathlib_compat(path):
-    """
-    For path-like objects, convert to a filename for compatibility
-    on Python 3.6.1 and earlier.
-    """
-    try:
-        return path.__fspath__()
-    except AttributeError:
-        return str(path)
+def _extract_text_encoding(encoding=None, *args, **kwargs):
+    # compute stack level so that the caller of the caller sees any warning.
+    is_pypy = sys.implementation.name == 'pypy'
+    stack_level = 3 + is_pypy
+    return text_encoding(encoding, stack_level), args, kwargs
 
 
 class Path:
     """
-    A pathlib-compatible interface for zip files.
+    A :class:`importlib.resources.abc.Traversable` interface for zip files.
+
+    Implements many of the features users enjoy from
+    :class:`pathlib.Path`.
 
     Consider a zip file with this structure::
 
@@ -169,13 +274,13 @@ class Path:
 
     Path accepts the zipfile object itself or a filename
 
-    >>> root = Path(zf)
+    >>> path = Path(zf)
 
     From there, several path operations are available.
 
     Directory iteration (including the zip file itself):
 
-    >>> a, b = root.iterdir()
+    >>> a, b = path.iterdir()
     >>> a
     Path('mem/abcde.zip', 'a.txt')
     >>> b
@@ -196,7 +301,7 @@ class Path:
 
     Read text:
 
-    >>> c.read_text()
+    >>> c.read_text(encoding='utf-8')
     'content of c'
 
     existence:
@@ -213,16 +318,38 @@ class Path:
     'mem/abcde.zip/b/c.txt'
 
     At the root, ``name``, ``filename``, and ``parent``
-    resolve to the zipfile. Note these attributes are not
-    valid and will raise a ``ValueError`` if the zipfile
-    has no filename.
+    resolve to the zipfile.
 
-    >>> root.name
+    >>> str(path)
+    'mem/abcde.zip/'
+    >>> path.name
     'abcde.zip'
-    >>> str(root.filename).replace(os.sep, posixpath.sep)
-    'mem/abcde.zip'
-    >>> str(root.parent)
+    >>> path.filename == pathlib.Path('mem/abcde.zip')
+    True
+    >>> str(path.parent)
     'mem'
+
+    If the zipfile has no filename, such attributes are not
+    valid and accessing them will raise an Exception.
+
+    >>> zf.filename = None
+    >>> path.name
+    Traceback (most recent call last):
+    ...
+    TypeError: ...
+
+    >>> path.filename
+    Traceback (most recent call last):
+    ...
+    TypeError: ...
+
+    >>> path.parent
+    Traceback (most recent call last):
+    ...
+    TypeError: ...
+
+    # workaround python/cpython#106763
+    >>> pass
     """
 
     __repr = "{self.__class__.__name__}({self.root.filename!r}, {self.at!r})"
@@ -240,6 +367,18 @@ def __init__(self, root, at=""):
         self.root = FastLookup.make(root)
         self.at = at
 
+    def __eq__(self, other):
+        """
+        >>> Path(zipfile.ZipFile(io.BytesIO(), 'w')) == 'foo'
+        False
+        """
+        if self.__class__ is not other.__class__:
+            return NotImplemented
+        return (self.root, self.at) == (other.root, other.at)
+
+    def __hash__(self):
+        return hash((self.root, self.at))
+
     def open(self, mode='r', *args, pwd=None, **kwargs):
         """
         Open this entry as text or binary following the semantics
@@ -256,30 +395,36 @@ def open(self, mode='r', *args, pwd=None, **kwargs):
             if args or kwargs:
                 raise ValueError("encoding args invalid for binary operation")
             return stream
-        return io.TextIOWrapper(stream, *args, **kwargs)
+        # Text mode:
+        encoding, args, kwargs = _extract_text_encoding(*args, **kwargs)
+        return io.TextIOWrapper(stream, encoding, *args, **kwargs)
+
+    def _base(self):
+        return pathlib.PurePosixPath(self.at or self.root.filename)
 
     @property
     def name(self):
-        return pathlib.Path(self.at).name or self.filename.name
+        return self._base().name
 
     @property
     def suffix(self):
-        return pathlib.Path(self.at).suffix or self.filename.suffix
+        return self._base().suffix
 
     @property
     def suffixes(self):
-        return pathlib.Path(self.at).suffixes or self.filename.suffixes
+        return self._base().suffixes
 
     @property
     def stem(self):
-        return pathlib.Path(self.at).stem or self.filename.stem
+        return self._base().stem
 
     @property
     def filename(self):
         return pathlib.Path(self.root.filename).joinpath(self.at)
 
     def read_text(self, *args, **kwargs):
-        with self.open('r', *args, **kwargs) as strm:
+        encoding, args, kwargs = _extract_text_encoding(*args, **kwargs)
+        with self.open('r', encoding, *args, **kwargs) as strm:
             return strm.read()
 
     def read_bytes(self):
@@ -307,6 +452,33 @@ def iterdir(self):
         subs = map(self._next, self.root.namelist())
         return filter(self._is_child, subs)
 
+    def match(self, path_pattern):
+        return pathlib.PurePosixPath(self.at).match(path_pattern)
+
+    def is_symlink(self):
+        """
+        Return whether this path is a symlink.
+        """
+        info = self.root.getinfo(self.at)
+        mode = info.external_attr >> 16
+        return stat.S_ISLNK(mode)
+
+    def glob(self, pattern):
+        if not pattern:
+            raise ValueError(f"Unacceptable pattern: {pattern!r}")
+
+        prefix = re.escape(self.at)
+        tr = Translator(seps='/')
+        matches = re.compile(prefix + tr.translate(pattern)).fullmatch
+        names = (data.filename for data in self.root.filelist)
+        return map(self._next, filter(matches, names))
+
+    def rglob(self, pattern):
+        return self.glob(f'**/{pattern}')
+
+    def relative_to(self, other, *extra):
+        return posixpath.relpath(str(self), str(other.joinpath(*extra)))
+
     def __str__(self):
         return posixpath.join(self.root.filename, self.at)
 
@@ -314,7 +486,7 @@ def __repr__(self):
         return self.__repr.format(self=self)
 
     def joinpath(self, *other):
-        next = posixpath.join(self.at, *map(_pathlib_compat, other))
+        next = posixpath.join(self.at, *other)
         return self._next(self.root.resolve_dir(next))
 
     __truediv__ = joinpath
diff --git a/setuptools/_vendor/zipp/compat/__init__.py b/setuptools/_vendor/zipp/compat/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/setuptools/_vendor/zipp/compat/py310.py b/setuptools/_vendor/zipp/compat/py310.py
new file mode 100644
index 0000000000..d5ca53e037
--- /dev/null
+++ b/setuptools/_vendor/zipp/compat/py310.py
@@ -0,0 +1,11 @@
+import sys
+import io
+
+
+def _text_encoding(encoding, stacklevel=2, /):  # pragma: no cover
+    return encoding
+
+
+text_encoding = (
+    io.text_encoding if sys.version_info > (3, 10) else _text_encoding  # type: ignore
+)
diff --git a/setuptools/_vendor/zipp/glob.py b/setuptools/_vendor/zipp/glob.py
new file mode 100644
index 0000000000..69c41d77c3
--- /dev/null
+++ b/setuptools/_vendor/zipp/glob.py
@@ -0,0 +1,106 @@
+import os
+import re
+
+
+_default_seps = os.sep + str(os.altsep) * bool(os.altsep)
+
+
+class Translator:
+    """
+    >>> Translator('xyz')
+    Traceback (most recent call last):
+    ...
+    AssertionError: Invalid separators
+
+    >>> Translator('')
+    Traceback (most recent call last):
+    ...
+    AssertionError: Invalid separators
+    """
+
+    seps: str
+
+    def __init__(self, seps: str = _default_seps):
+        assert seps and set(seps) <= set(_default_seps), "Invalid separators"
+        self.seps = seps
+
+    def translate(self, pattern):
+        """
+        Given a glob pattern, produce a regex that matches it.
+        """
+        return self.extend(self.translate_core(pattern))
+
+    def extend(self, pattern):
+        r"""
+        Extend regex for pattern-wide concerns.
+
+        Apply '(?s:)' to create a non-matching group that
+        matches newlines (valid on Unix).
+
+        Append '\Z' to imply fullmatch even when match is used.
+        """
+        return rf'(?s:{pattern})\Z'
+
+    def translate_core(self, pattern):
+        r"""
+        Given a glob pattern, produce a regex that matches it.
+
+        >>> t = Translator()
+        >>> t.translate_core('*.txt').replace('\\\\', '')
+        '[^/]*\\.txt'
+        >>> t.translate_core('a?txt')
+        'a[^/]txt'
+        >>> t.translate_core('**/*').replace('\\\\', '')
+        '.*/[^/][^/]*'
+        """
+        self.restrict_rglob(pattern)
+        return ''.join(map(self.replace, separate(self.star_not_empty(pattern))))
+
+    def replace(self, match):
+        """
+        Perform the replacements for a match from :func:`separate`.
+        """
+        return match.group('set') or (
+            re.escape(match.group(0))
+            .replace('\\*\\*', r'.*')
+            .replace('\\*', rf'[^{re.escape(self.seps)}]*')
+            .replace('\\?', r'[^/]')
+        )
+
+    def restrict_rglob(self, pattern):
+        """
+        Raise ValueError if ** appears in anything but a full path segment.
+
+        >>> Translator().translate('**foo')
+        Traceback (most recent call last):
+        ...
+        ValueError: ** must appear alone in a path segment
+        """
+        seps_pattern = rf'[{re.escape(self.seps)}]+'
+        segments = re.split(seps_pattern, pattern)
+        if any('**' in segment and segment != '**' for segment in segments):
+            raise ValueError("** must appear alone in a path segment")
+
+    def star_not_empty(self, pattern):
+        """
+        Ensure that * will not match an empty segment.
+        """
+
+        def handle_segment(match):
+            segment = match.group(0)
+            return '?*' if segment == '*' else segment
+
+        not_seps_pattern = rf'[^{re.escape(self.seps)}]+'
+        return re.sub(not_seps_pattern, handle_segment, pattern)
+
+
+def separate(pattern):
+    """
+    Separate out character sets to avoid translating their contents.
+
+    >>> [m.group(0) for m in separate('*.txt')]
+    ['*.txt']
+    >>> [m.group(0) for m in separate('a[?]txt')]
+    ['a', '[?]', 'txt']
+    """
+    return re.finditer(r'([^\[]+)|(?P[\[].*?[\]])|([\[][^\]]*$)', pattern)
diff --git a/setuptools/extern/__init__.py b/setuptools/extern/__init__.py
index 18ca1e2428..cf73039f6d 100644
--- a/setuptools/extern/__init__.py
+++ b/setuptools/extern/__init__.py
@@ -78,6 +78,7 @@ def install(self):
 # ]]]
 names = (
     'autocommand',
+    'backports',
     'importlib_metadata',
     'importlib_resources',
     'inflect',

From 3ed7e2708c957ba9e513ad202d3044d8f5d45632 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 2 Jul 2024 20:02:44 -0400
Subject: [PATCH 0830/1761] Remove importlib_metadata workaround.

---
 setuptools/_importlib.py | 40 +---------------------------------------
 1 file changed, 1 insertion(+), 39 deletions(-)

diff --git a/setuptools/_importlib.py b/setuptools/_importlib.py
index ff3288102a..14384bef5d 100644
--- a/setuptools/_importlib.py
+++ b/setuptools/_importlib.py
@@ -1,48 +1,10 @@
 import sys
 
 
-def disable_importlib_metadata_finder(metadata):
-    """
-    Ensure importlib_metadata doesn't provide older, incompatible
-    Distributions.
-
-    Workaround for #3102.
-    """
-    try:
-        import importlib_metadata
-    except ImportError:
-        return
-    except AttributeError:
-        from .warnings import SetuptoolsWarning
-
-        SetuptoolsWarning.emit(
-            "Incompatibility problem.",
-            """
-            `importlib-metadata` version is incompatible with `setuptools`.
-            This problem is likely to be solved by installing an updated version of
-            `importlib-metadata`.
-            """,
-            see_url="https://github.com/python/importlib_metadata/issues/396",
-        )  # Ensure a descriptive message is shown.
-        raise  # This exception can be suppressed by _distutils_hack
-
-    if importlib_metadata is metadata:
-        return
-    to_remove = [
-        ob
-        for ob in sys.meta_path
-        if isinstance(ob, importlib_metadata.MetadataPathFinder)
-    ]
-    for item in to_remove:
-        sys.meta_path.remove(item)
-
-
 if sys.version_info < (3, 10):
     import importlib_metadata as metadata
-
-    disable_importlib_metadata_finder(metadata)
 else:
-    import importlib.metadata as metadata
+    import importlib.metadata as metadata  # noqa: F401
 
 
 if sys.version_info < (3, 9):

From f21bcab30b04843f362dfc450609f3df05be703f Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 2 Jul 2024 20:34:21 -0400
Subject: [PATCH 0831/1761] Remove setuptools.extern

---
 setuptools/extern/__init__.py | 96 -----------------------------------
 1 file changed, 96 deletions(-)
 delete mode 100644 setuptools/extern/__init__.py

diff --git a/setuptools/extern/__init__.py b/setuptools/extern/__init__.py
deleted file mode 100644
index cf73039f6d..0000000000
--- a/setuptools/extern/__init__.py
+++ /dev/null
@@ -1,96 +0,0 @@
-import importlib.util
-import sys
-
-
-class VendorImporter:
-    """
-    A PEP 302 meta path importer for finding optionally-vendored
-    or otherwise naturally-installed packages from root_name.
-    """
-
-    def __init__(self, root_name, vendored_names=(), vendor_pkg=None):
-        self.root_name = root_name
-        self.vendored_names = set(vendored_names)
-        self.vendor_pkg = vendor_pkg or root_name.replace('extern', '_vendor')
-
-    @property
-    def search_path(self):
-        """
-        Search first the vendor package then as a natural package.
-        """
-        yield self.vendor_pkg + '.'
-        yield ''
-
-    def _module_matches_namespace(self, fullname):
-        """Figure out if the target module is vendored."""
-        root, base, target = fullname.partition(self.root_name + '.')
-        return not root and any(map(target.startswith, self.vendored_names))
-
-    def load_module(self, fullname):
-        """
-        Iterate over the search path to locate and load fullname.
-        """
-        root, base, target = fullname.partition(self.root_name + '.')
-        for prefix in self.search_path:
-            extant = prefix + target
-            try:
-                __import__(extant)
-            except ImportError:
-                continue
-            mod = sys.modules[extant]
-            sys.modules[fullname] = mod
-            return mod
-        else:
-            raise ImportError(
-                "The '{target}' package is required; "
-                "normally this is bundled with this package so if you get "
-                "this warning, consult the packager of your "
-                "distribution.".format(**locals())
-            )
-
-    def create_module(self, spec):
-        return self.load_module(spec.name)
-
-    def exec_module(self, module):
-        pass
-
-    def find_spec(self, fullname, path=None, target=None):
-        """Return a module spec for vendored names."""
-        return (
-            importlib.util.spec_from_loader(fullname, self)
-            if self._module_matches_namespace(fullname)
-            else None
-        )
-
-    def install(self):
-        """
-        Install this importer into sys.meta_path if not already present.
-        """
-        if self not in sys.meta_path:
-            sys.meta_path.append(self)
-
-
-# [[[cog
-# import cog
-# from tools.vendored import yield_top_level
-# names = "\n".join(f"    {x!r}," for x in yield_top_level('setuptools'))
-# cog.outl(f"names = (\n{names}\n)")
-# ]]]
-names = (
-    'autocommand',
-    'backports',
-    'importlib_metadata',
-    'importlib_resources',
-    'inflect',
-    'jaraco',
-    'more_itertools',
-    'ordered_set',
-    'packaging',
-    'tomli',
-    'typeguard',
-    'typing_extensions',
-    'wheel',
-    'zipp',
-)
-# [[[end]]]
-VendorImporter(__name__, names, 'setuptools._vendor').install()

From bd5cf0003173c74f44fd53a3cca3ab07af3be8f4 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 2 Jul 2024 20:34:51 -0400
Subject: [PATCH 0832/1761] Remove check-extern env in tox.

---
 .github/workflows/main.yml |  1 -
 tools/vendored.py          | 21 ---------------------
 tox.ini                    |  6 +-----
 3 files changed, 1 insertion(+), 27 deletions(-)

diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index b8bbc750cc..b9ecc51412 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -129,7 +129,6 @@ jobs:
         job:
         - diffcov
         - docs
-        - check-extern
     runs-on: ubuntu-latest
     steps:
       - uses: actions/checkout@v4
diff --git a/tools/vendored.py b/tools/vendored.py
index 63d8c577cf..36180a9c5e 100644
--- a/tools/vendored.py
+++ b/tools/vendored.py
@@ -246,25 +246,4 @@ def update_setuptools():
     install_deps(deps, vendor)
 
 
-def yield_top_level(name):
-    """Iterate over all modules and (top level) packages vendored
-    >>> roots = set(yield_top_level("setuptools"))
-    >>> examples = roots & {"jaraco", "backports", "zipp"}
-    >>> list(sorted(examples))
-    ['backports', 'jaraco', 'zipp']
-    >>> 'bin' in examples
-    False
-    """
-    vendor = Path(f"{name}/_vendor")
-    ignore = {"__pycache__", "__init__.py", ".ruff_cache", "bin"}
-
-    for item in sorted(vendor.iterdir()):
-        if item.name in ignore:
-            continue
-        if item.is_dir() and item.suffix != ".dist-info":
-            yield str(item.name)
-        if item.is_file() and item.suffix == ".py":
-            yield str(item.stem)
-
-
 __name__ == '__main__' and update_vendored()
diff --git a/tox.ini b/tox.ini
index 9ff4488cd3..8a3f6260b7 100644
--- a/tox.ini
+++ b/tox.ini
@@ -69,19 +69,15 @@ pass_env = *
 commands =
 	python tools/finalize.py
 
-[testenv:{vendor,check-extern}]
+[testenv:vendor]
 skip_install = True
-allowlist_externals = git, sh
 deps =
 	path
-	cogapp
 	jaraco.packaging
 	# workaround for pypa/pyproject-hooks#192
 	pyproject-hooks<1.1
 commands =
 	vendor: python -m tools.vendored
-	sh -c "git grep -l -F '\[\[\[cog' | xargs -t cog -I {toxinidir} -r"  # update `*.extern`
-	check-extern: git diff --exit-code
 
 [testenv:generate-validation-code]
 skip_install = True

From 9234fc35f37bd74a32501f22c698f6a8af26ffed Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 2 Jul 2024 20:46:18 -0400
Subject: [PATCH 0833/1761] Update vendoring routine for pkg_resources to
 simply install the dependencies to the _vendor folder.

---
 pkg_resources/_vendor/vendored.txt | 13 -------------
 pyproject.toml                     |  3 +++
 tools/vendored.py                  | 25 ++++++++++++++-----------
 3 files changed, 17 insertions(+), 24 deletions(-)
 delete mode 100644 pkg_resources/_vendor/vendored.txt

diff --git a/pkg_resources/_vendor/vendored.txt b/pkg_resources/_vendor/vendored.txt
deleted file mode 100644
index 0c8fdc3823..0000000000
--- a/pkg_resources/_vendor/vendored.txt
+++ /dev/null
@@ -1,13 +0,0 @@
-packaging==24
-
-platformdirs==2.6.2
-
-jaraco.text==3.7.0
-# required for jaraco.text on older Pythons
-importlib_resources==5.10.2
-# required for importlib_resources on older Pythons
-zipp==3.7.0
-# required for jaraco.functools
-more_itertools==10.2.0
-# required for jaraco.context on older Pythons
-backports.tarfile
diff --git a/pyproject.toml b/pyproject.toml
index 0709c0b143..7c517943a4 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -33,6 +33,9 @@ dependencies = [
 	"importlib_metadata>=6",
 	"tomli>=2.0.1",
 	"wheel>=0.43.0",
+
+	# pkg_resources
+	"platformdirs >= 2.6.2",
 ]
 
 [project.urls]
diff --git a/tools/vendored.py b/tools/vendored.py
index 36180a9c5e..208aab8eb1 100644
--- a/tools/vendored.py
+++ b/tools/vendored.py
@@ -14,7 +14,7 @@ def remove_all(paths):
 
 
 def update_vendored():
-    # update_pkg_resources()
+    update_pkg_resources()
     update_setuptools()
 
 
@@ -195,17 +195,20 @@ def install(vendor):
 
 
 def update_pkg_resources():
+    deps = [
+        'packaging >= 24',
+        'platformdirs >= 2.6.2',
+        'jaraco.text >= 3.7',
+    ]
+    # workaround for https://github.com/pypa/pip/issues/12770
+    deps += [
+        'importlib_resources >= 5.10.2',
+        'zipp >= 3.7',
+        'backports.tarfile',
+    ]
     vendor = Path('pkg_resources/_vendor')
-    install(vendor)
-    rewrite_packaging(vendor / 'packaging', 'pkg_resources.extern')
-    repair_namespace(vendor / 'jaraco')
-    repair_namespace(vendor / 'backports')
-    rewrite_jaraco_text(vendor / 'jaraco/text', 'pkg_resources.extern')
-    rewrite_jaraco_functools(vendor / 'jaraco/functools', 'pkg_resources.extern')
-    rewrite_jaraco_context(vendor / 'jaraco', 'pkg_resources.extern')
-    rewrite_importlib_resources(vendor / 'importlib_resources', 'pkg_resources.extern')
-    rewrite_more_itertools(vendor / "more_itertools")
-    rewrite_platformdirs(vendor / "platformdirs")
+    clean(vendor)
+    install_deps(deps, vendor)
 
 
 def load_deps():

From d03cd0e6e21067e2802d811d5a4535f69da80788 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 2 Jul 2024 20:51:30 -0400
Subject: [PATCH 0834/1761] Import dependencies naturally and ensure they're
 available by appending the vendor dir to sys.path.

---
 pkg_resources/__init__.py             | 14 ++++++++------
 pkg_resources/tests/test_resources.py |  2 +-
 2 files changed, 9 insertions(+), 7 deletions(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index d47df3f3c5..4b7887e9f4 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -74,6 +74,8 @@
 
 import _imp
 
+sys.path.append(os.path.dirname(__file__) + '/_vendor')
+
 # capture these to bypass sandboxing
 from os import utime
 from os import open as os_open
@@ -87,16 +89,16 @@
     # no write support, probably under GAE
     WRITE_SUPPORT = False
 
-from pkg_resources.extern.jaraco.text import (
+from jaraco.text import (
     yield_lines,
     drop_comment,
     join_continuation,
 )
-from pkg_resources.extern.packaging import markers as _packaging_markers
-from pkg_resources.extern.packaging import requirements as _packaging_requirements
-from pkg_resources.extern.packaging import utils as _packaging_utils
-from pkg_resources.extern.packaging import version as _packaging_version
-from pkg_resources.extern.platformdirs import user_cache_dir as _user_cache_dir
+from packaging import markers as _packaging_markers
+from packaging import requirements as _packaging_requirements
+from packaging import utils as _packaging_utils
+from packaging import version as _packaging_version
+from platformdirs import user_cache_dir as _user_cache_dir
 
 if TYPE_CHECKING:
     from _typeshed import BytesPath, StrPath, StrOrBytesPath
diff --git a/pkg_resources/tests/test_resources.py b/pkg_resources/tests/test_resources.py
index 826d691b83..9837c2719d 100644
--- a/pkg_resources/tests/test_resources.py
+++ b/pkg_resources/tests/test_resources.py
@@ -5,7 +5,7 @@
 import itertools
 
 import pytest
-from pkg_resources.extern.packaging.specifiers import SpecifierSet
+from packaging.specifiers import SpecifierSet
 
 import pkg_resources
 from pkg_resources import (

From c6913bf59da16669de5097825ec0a02903ea1926 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 2 Jul 2024 21:00:39 -0400
Subject: [PATCH 0835/1761] Re-vendor pkg_resources packages.

---
 .../INSTALLER                                 |    0
 .../autocommand-2.2.2.dist-info/LICENSE       |  166 +
 .../autocommand-2.2.2.dist-info/METADATA      |  420 ++
 .../autocommand-2.2.2.dist-info/RECORD        |   18 +
 .../WHEEL                                     |    0
 .../autocommand-2.2.2.dist-info/top_level.txt |    1 +
 pkg_resources/_vendor/autocommand/__init__.py |   27 +
 .../_vendor/autocommand/autoasync.py          |  142 +
 .../_vendor/autocommand/autocommand.py        |   70 +
 pkg_resources/_vendor/autocommand/automain.py |   59 +
 .../_vendor/autocommand/autoparse.py          |  333 ++
 pkg_resources/_vendor/autocommand/errors.py   |   23 +
 .../backports.tarfile-1.0.0.dist-info/RECORD  |    9 -
 .../INSTALLER                                 |    0
 .../LICENSE                                   |    0
 .../METADATA                                  |   12 +-
 .../backports.tarfile-1.2.0.dist-info/RECORD  |   17 +
 .../REQUESTED                                 |    0
 .../WHEEL                                     |    0
 .../top_level.txt                             |    0
 pkg_resources/_vendor/backports/__init__.py   |    1 +
 .../{tarfile.py => tarfile/__init__.py}       |  129 +-
 .../_vendor/backports/tarfile/__main__.py     |    5 +
 .../tarfile/compat}/__init__.py               |    0
 .../_vendor/backports/tarfile/compat/py38.py  |   24 +
 .../RECORD                                    |   77 -
 .../INSTALLER                                 |    0
 .../LICENSE                                   |    0
 .../METADATA                                  |   52 +-
 .../RECORD                                    |   89 +
 .../REQUESTED                                 |    0
 .../WHEEL                                     |    2 +-
 .../top_level.txt                             |    0
 .../_vendor/importlib_resources/__init__.py   |   14 +-
 .../_vendor/importlib_resources/_adapters.py  |    4 +-
 .../_vendor/importlib_resources/_common.py    |    7 +-
 .../_vendor/importlib_resources/_compat.py    |  108 -
 .../_vendor/importlib_resources/_itertools.py |   69 +-
 .../_vendor/importlib_resources/_legacy.py    |  120 -
 .../_vendor/importlib_resources/abc.py        |    3 +-
 .../{tests/zipdata01 => compat}/__init__.py   |    0
 .../importlib_resources/compat/py38.py        |   11 +
 .../importlib_resources/compat/py39.py        |   10 +
 .../_vendor/importlib_resources/functional.py |   81 +
 .../{tests/zipdata02 => future}/__init__.py   |    0
 .../importlib_resources/future/adapters.py    |   95 +
 .../_vendor/importlib_resources/readers.py    |   90 +-
 .../_vendor/importlib_resources/simple.py     |    2 +-
 .../importlib_resources/tests/_compat.py      |   32 -
 .../importlib_resources/tests/_path.py        |   18 +-
 .../tests/compat}/__init__.py                 |    0
 .../importlib_resources/tests/compat/py312.py |   18 +
 .../importlib_resources/tests/compat/py39.py  |   10 +
 .../tests/data01/subdirectory/binary.file     |  Bin 4 -> 4 bytes
 .../subdirectory/subsubdir/resource.txt       |    1 +
 .../namespacedata01/subdirectory/binary.file  |    1 +
 .../tests/test_compatibilty_files.py          |    6 +-
 .../tests/test_contents.py                    |    2 +-
 .../importlib_resources/tests/test_custom.py  |   47 +
 .../importlib_resources/tests/test_files.py   |   23 +-
 .../tests/test_functional.py                  |  242 +
 .../importlib_resources/tests/test_open.py    |   20 +-
 .../importlib_resources/tests/test_path.py    |   19 +-
 .../importlib_resources/tests/test_read.py    |   41 +-
 .../importlib_resources/tests/test_reader.py  |   34 +-
 .../tests/test_resource.py                    |  155 +-
 .../importlib_resources/tests/update-zips.py  |   53 -
 .../_vendor/importlib_resources/tests/util.py |   79 +-
 .../_vendor/importlib_resources/tests/zip.py  |   32 +
 .../tests/zipdata01/ziptestdata.zip           |  Bin 876 -> 0 bytes
 .../tests/zipdata02/ziptestdata.zip           |  Bin 698 -> 0 bytes
 .../INSTALLER                                 |    0
 .../LICENSE                                   |    0
 .../_vendor/inflect-7.3.1.dist-info/METADATA  |  591 +++
 .../_vendor/inflect-7.3.1.dist-info/RECORD    |   13 +
 .../WHEEL                                     |    2 +-
 .../inflect-7.3.1.dist-info/top_level.txt     |    1 +
 pkg_resources/_vendor/inflect/__init__.py     | 3986 +++++++++++++++++
 .../REQUESTED => inflect/compat/__init__.py}  |    0
 pkg_resources/_vendor/inflect/compat/py38.py  |    7 +
 .../REQUESTED => inflect/py.typed}            |    0
 .../jaraco.functools-4.0.0.dist-info/RECORD   |   10 -
 .../INSTALLER                                 |    0
 .../LICENSE                                   |    2 -
 .../METADATA                                  |   21 +-
 .../jaraco.functools-4.0.1.dist-info/RECORD   |   10 +
 .../WHEEL                                     |    2 +-
 .../top_level.txt                             |    0
 .../INSTALLER                                 |    0
 .../LICENSE                                   |    2 -
 .../jaraco.text-3.12.1.dist-info/METADATA     |   95 +
 .../jaraco.text-3.12.1.dist-info/RECORD       |   20 +
 .../REQUESTED                                 |    0
 .../jaraco.text-3.12.1.dist-info/WHEEL        |    5 +
 .../top_level.txt                             |    0
 .../jaraco.text-3.7.0.dist-info/METADATA      |   55 -
 .../jaraco.text-3.7.0.dist-info/RECORD        |   10 -
 pkg_resources/_vendor/jaraco/context.py       |    2 +-
 .../_vendor/jaraco/functools/__init__.py      |    6 +-
 .../_vendor/jaraco/functools/__init__.pyi     |    3 -
 pkg_resources/_vendor/jaraco/text/__init__.py |   61 +-
 pkg_resources/_vendor/jaraco/text/layouts.py  |   25 +
 .../_vendor/jaraco/text/show-newlines.py      |   33 +
 .../_vendor/jaraco/text/strip-prefix.py       |   21 +
 .../_vendor/jaraco/text/to-dvorak.py          |    6 +
 .../_vendor/jaraco/text/to-qwerty.py          |    6 +
 .../more_itertools-10.2.0.dist-info/RECORD    |   15 -
 .../INSTALLER                                 |    0
 .../LICENSE                                   |    0
 .../METADATA                                  |   29 +-
 .../more_itertools-10.3.0.dist-info/RECORD    |   15 +
 .../WHEEL                                     |    0
 .../_vendor/more_itertools/__init__.py        |    2 +-
 pkg_resources/_vendor/more_itertools/more.py  |  379 +-
 pkg_resources/_vendor/more_itertools/more.pyi |   14 +
 .../_vendor/more_itertools/recipes.py         |   76 +-
 .../_vendor/more_itertools/recipes.pyi        |   10 +-
 .../_vendor/packaging-24.0.dist-info/RECORD   |   37 -
 .../INSTALLER                                 |    0
 .../LICENSE                                   |    0
 .../LICENSE.APACHE                            |    0
 .../LICENSE.BSD                               |    0
 .../METADATA                                  |    6 +-
 .../_vendor/packaging-24.1.dist-info/RECORD   |   37 +
 .../REQUESTED                                 |    0
 .../WHEEL                                     |    0
 pkg_resources/_vendor/packaging/__init__.py   |    2 +-
 pkg_resources/_vendor/packaging/_elffile.py   |    8 +-
 pkg_resources/_vendor/packaging/_manylinux.py |   22 +-
 pkg_resources/_vendor/packaging/_musllinux.py |   10 +-
 pkg_resources/_vendor/packaging/_parser.py    |   26 +-
 pkg_resources/_vendor/packaging/_tokenizer.py |   18 +-
 pkg_resources/_vendor/packaging/markers.py    |  115 +-
 pkg_resources/_vendor/packaging/metadata.py   |  153 +-
 .../_vendor/packaging/requirements.py         |    9 +-
 pkg_resources/_vendor/packaging/specifiers.py |   56 +-
 pkg_resources/_vendor/packaging/tags.py       |   43 +-
 pkg_resources/_vendor/packaging/utils.py      |   10 +-
 pkg_resources/_vendor/packaging/version.py    |   54 +-
 .../platformdirs-2.6.2.dist-info/RECORD       |   23 -
 .../platformdirs-4.2.2.dist-info/INSTALLER    |    1 +
 .../METADATA                                  |  106 +-
 .../platformdirs-4.2.2.dist-info/RECORD       |   23 +
 .../platformdirs-4.2.2.dist-info/REQUESTED    |    0
 .../WHEEL                                     |    2 +-
 .../licenses/LICENSE                          |    0
 .../_vendor/platformdirs/__init__.py          |  437 +-
 .../_vendor/platformdirs/__main__.py          |   27 +-
 pkg_resources/_vendor/platformdirs/android.py |  181 +-
 pkg_resources/_vendor/platformdirs/api.py     |  182 +-
 pkg_resources/_vendor/platformdirs/macos.py   |   98 +-
 pkg_resources/_vendor/platformdirs/unix.py    |  212 +-
 pkg_resources/_vendor/platformdirs/version.py |   16 +-
 pkg_resources/_vendor/platformdirs/windows.py |  158 +-
 .../typeguard-4.3.0.dist-info/INSTALLER       |    1 +
 .../_vendor/typeguard-4.3.0.dist-info/LICENSE |   19 +
 .../typeguard-4.3.0.dist-info/METADATA        |   81 +
 .../_vendor/typeguard-4.3.0.dist-info/RECORD  |   34 +
 .../_vendor/typeguard-4.3.0.dist-info/WHEEL   |    5 +
 .../entry_points.txt                          |    2 +
 .../typeguard-4.3.0.dist-info/top_level.txt   |    1 +
 pkg_resources/_vendor/typeguard/__init__.py   |   48 +
 pkg_resources/_vendor/typeguard/_checkers.py  |  993 ++++
 pkg_resources/_vendor/typeguard/_config.py    |  108 +
 .../_vendor/typeguard/_decorators.py          |  235 +
 .../_vendor/typeguard/_exceptions.py          |   42 +
 pkg_resources/_vendor/typeguard/_functions.py |  308 ++
 .../_vendor/typeguard/_importhook.py          |  213 +
 pkg_resources/_vendor/typeguard/_memo.py      |   48 +
 .../_vendor/typeguard/_pytest_plugin.py       |  127 +
 .../_vendor/typeguard/_suppression.py         |   86 +
 .../_vendor/typeguard/_transformer.py         | 1229 +++++
 .../_vendor/typeguard/_union_transformer.py   |   55 +
 pkg_resources/_vendor/typeguard/_utils.py     |  173 +
 pkg_resources/_vendor/typeguard/py.typed      |    0
 .../INSTALLER                                 |    1 +
 .../LICENSE                                   |  279 ++
 .../METADATA                                  |   67 +
 .../typing_extensions-4.12.2.dist-info/RECORD |    7 +
 .../typing_extensions-4.12.2.dist-info/WHEEL  |    4 +
 pkg_resources/_vendor/typing_extensions.py    | 3641 +++++++++++++++
 .../_vendor/zipp-3.19.2.dist-info/INSTALLER   |    1 +
 .../_vendor/zipp-3.19.2.dist-info/LICENSE     |   17 +
 .../_vendor/zipp-3.19.2.dist-info/METADATA    |  102 +
 .../_vendor/zipp-3.19.2.dist-info/RECORD      |   15 +
 .../_vendor/zipp-3.19.2.dist-info/REQUESTED   |    0
 .../_vendor/zipp-3.19.2.dist-info/WHEEL       |    5 +
 .../top_level.txt                             |    0
 .../_vendor/zipp-3.7.0.dist-info/METADATA     |   58 -
 .../_vendor/zipp-3.7.0.dist-info/RECORD       |    9 -
 .../_vendor/{zipp.py => zipp/__init__.py}     |  248 +-
 pkg_resources/_vendor/zipp/compat/__init__.py |    0
 pkg_resources/_vendor/zipp/compat/py310.py    |   11 +
 pkg_resources/_vendor/zipp/glob.py            |  106 +
 194 files changed, 17438 insertions(+), 1671 deletions(-)
 rename pkg_resources/_vendor/{backports.tarfile-1.0.0.dist-info => autocommand-2.2.2.dist-info}/INSTALLER (100%)
 create mode 100644 pkg_resources/_vendor/autocommand-2.2.2.dist-info/LICENSE
 create mode 100644 pkg_resources/_vendor/autocommand-2.2.2.dist-info/METADATA
 create mode 100644 pkg_resources/_vendor/autocommand-2.2.2.dist-info/RECORD
 rename pkg_resources/_vendor/{importlib_resources-5.10.2.dist-info => autocommand-2.2.2.dist-info}/WHEEL (100%)
 create mode 100644 pkg_resources/_vendor/autocommand-2.2.2.dist-info/top_level.txt
 create mode 100644 pkg_resources/_vendor/autocommand/__init__.py
 create mode 100644 pkg_resources/_vendor/autocommand/autoasync.py
 create mode 100644 pkg_resources/_vendor/autocommand/autocommand.py
 create mode 100644 pkg_resources/_vendor/autocommand/automain.py
 create mode 100644 pkg_resources/_vendor/autocommand/autoparse.py
 create mode 100644 pkg_resources/_vendor/autocommand/errors.py
 delete mode 100644 pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/RECORD
 rename pkg_resources/_vendor/{importlib_resources-5.10.2.dist-info => backports.tarfile-1.2.0.dist-info}/INSTALLER (100%)
 rename pkg_resources/_vendor/{backports.tarfile-1.0.0.dist-info => backports.tarfile-1.2.0.dist-info}/LICENSE (100%)
 rename pkg_resources/_vendor/{backports.tarfile-1.0.0.dist-info => backports.tarfile-1.2.0.dist-info}/METADATA (83%)
 create mode 100644 pkg_resources/_vendor/backports.tarfile-1.2.0.dist-info/RECORD
 rename pkg_resources/_vendor/{backports.tarfile-1.0.0.dist-info => backports.tarfile-1.2.0.dist-info}/REQUESTED (100%)
 rename pkg_resources/_vendor/{backports.tarfile-1.0.0.dist-info => backports.tarfile-1.2.0.dist-info}/WHEEL (100%)
 rename pkg_resources/_vendor/{backports.tarfile-1.0.0.dist-info => backports.tarfile-1.2.0.dist-info}/top_level.txt (100%)
 rename pkg_resources/_vendor/backports/{tarfile.py => tarfile/__init__.py} (96%)
 create mode 100644 pkg_resources/_vendor/backports/tarfile/__main__.py
 rename pkg_resources/_vendor/{ => backports/tarfile/compat}/__init__.py (100%)
 create mode 100644 pkg_resources/_vendor/backports/tarfile/compat/py38.py
 delete mode 100644 pkg_resources/_vendor/importlib_resources-5.10.2.dist-info/RECORD
 rename pkg_resources/_vendor/{jaraco.functools-4.0.0.dist-info => importlib_resources-6.4.0.dist-info}/INSTALLER (100%)
 rename pkg_resources/_vendor/{importlib_resources-5.10.2.dist-info => importlib_resources-6.4.0.dist-info}/LICENSE (100%)
 rename pkg_resources/_vendor/{importlib_resources-5.10.2.dist-info => importlib_resources-6.4.0.dist-info}/METADATA (67%)
 create mode 100644 pkg_resources/_vendor/importlib_resources-6.4.0.dist-info/RECORD
 rename pkg_resources/_vendor/{importlib_resources-5.10.2.dist-info => importlib_resources-6.4.0.dist-info}/REQUESTED (100%)
 rename pkg_resources/_vendor/{jaraco.functools-4.0.0.dist-info => importlib_resources-6.4.0.dist-info}/WHEEL (65%)
 rename pkg_resources/_vendor/{importlib_resources-5.10.2.dist-info => importlib_resources-6.4.0.dist-info}/top_level.txt (100%)
 delete mode 100644 pkg_resources/_vendor/importlib_resources/_compat.py
 delete mode 100644 pkg_resources/_vendor/importlib_resources/_legacy.py
 rename pkg_resources/_vendor/importlib_resources/{tests/zipdata01 => compat}/__init__.py (100%)
 create mode 100644 pkg_resources/_vendor/importlib_resources/compat/py38.py
 create mode 100644 pkg_resources/_vendor/importlib_resources/compat/py39.py
 create mode 100644 pkg_resources/_vendor/importlib_resources/functional.py
 rename pkg_resources/_vendor/importlib_resources/{tests/zipdata02 => future}/__init__.py (100%)
 create mode 100644 pkg_resources/_vendor/importlib_resources/future/adapters.py
 delete mode 100644 pkg_resources/_vendor/importlib_resources/tests/_compat.py
 rename pkg_resources/_vendor/{jaraco => importlib_resources/tests/compat}/__init__.py (100%)
 create mode 100644 pkg_resources/_vendor/importlib_resources/tests/compat/py312.py
 create mode 100644 pkg_resources/_vendor/importlib_resources/tests/compat/py39.py
 create mode 100644 pkg_resources/_vendor/importlib_resources/tests/data02/subdirectory/subsubdir/resource.txt
 create mode 100644 pkg_resources/_vendor/importlib_resources/tests/namespacedata01/subdirectory/binary.file
 create mode 100644 pkg_resources/_vendor/importlib_resources/tests/test_custom.py
 create mode 100644 pkg_resources/_vendor/importlib_resources/tests/test_functional.py
 delete mode 100644 pkg_resources/_vendor/importlib_resources/tests/update-zips.py
 create mode 100644 pkg_resources/_vendor/importlib_resources/tests/zip.py
 delete mode 100644 pkg_resources/_vendor/importlib_resources/tests/zipdata01/ziptestdata.zip
 delete mode 100644 pkg_resources/_vendor/importlib_resources/tests/zipdata02/ziptestdata.zip
 rename pkg_resources/_vendor/{jaraco.text-3.7.0.dist-info => inflect-7.3.1.dist-info}/INSTALLER (100%)
 rename pkg_resources/_vendor/{jaraco.functools-4.0.0.dist-info => inflect-7.3.1.dist-info}/LICENSE (100%)
 create mode 100644 pkg_resources/_vendor/inflect-7.3.1.dist-info/METADATA
 create mode 100644 pkg_resources/_vendor/inflect-7.3.1.dist-info/RECORD
 rename pkg_resources/_vendor/{jaraco.text-3.7.0.dist-info => inflect-7.3.1.dist-info}/WHEEL (65%)
 create mode 100644 pkg_resources/_vendor/inflect-7.3.1.dist-info/top_level.txt
 create mode 100644 pkg_resources/_vendor/inflect/__init__.py
 rename pkg_resources/_vendor/{jaraco.text-3.7.0.dist-info/REQUESTED => inflect/compat/__init__.py} (100%)
 create mode 100644 pkg_resources/_vendor/inflect/compat/py38.py
 rename pkg_resources/_vendor/{packaging-24.0.dist-info/REQUESTED => inflect/py.typed} (100%)
 delete mode 100644 pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/RECORD
 rename pkg_resources/_vendor/{more_itertools-10.2.0.dist-info => jaraco.functools-4.0.1.dist-info}/INSTALLER (100%)
 rename pkg_resources/_vendor/{zipp-3.7.0.dist-info => jaraco.functools-4.0.1.dist-info}/LICENSE (97%)
 rename pkg_resources/_vendor/{jaraco.functools-4.0.0.dist-info => jaraco.functools-4.0.1.dist-info}/METADATA (78%)
 create mode 100644 pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/RECORD
 rename pkg_resources/_vendor/{zipp-3.7.0.dist-info => jaraco.functools-4.0.1.dist-info}/WHEEL (65%)
 rename pkg_resources/_vendor/{jaraco.functools-4.0.0.dist-info => jaraco.functools-4.0.1.dist-info}/top_level.txt (100%)
 rename pkg_resources/_vendor/{packaging-24.0.dist-info => jaraco.text-3.12.1.dist-info}/INSTALLER (100%)
 rename pkg_resources/_vendor/{jaraco.text-3.7.0.dist-info => jaraco.text-3.12.1.dist-info}/LICENSE (97%)
 create mode 100644 pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/METADATA
 create mode 100644 pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/RECORD
 rename pkg_resources/_vendor/{platformdirs-2.6.2.dist-info => jaraco.text-3.12.1.dist-info}/REQUESTED (100%)
 create mode 100644 pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/WHEEL
 rename pkg_resources/_vendor/{jaraco.text-3.7.0.dist-info => jaraco.text-3.12.1.dist-info}/top_level.txt (100%)
 delete mode 100644 pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/METADATA
 delete mode 100644 pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/RECORD
 create mode 100644 pkg_resources/_vendor/jaraco/text/layouts.py
 create mode 100644 pkg_resources/_vendor/jaraco/text/show-newlines.py
 create mode 100644 pkg_resources/_vendor/jaraco/text/strip-prefix.py
 create mode 100644 pkg_resources/_vendor/jaraco/text/to-dvorak.py
 create mode 100644 pkg_resources/_vendor/jaraco/text/to-qwerty.py
 delete mode 100644 pkg_resources/_vendor/more_itertools-10.2.0.dist-info/RECORD
 rename pkg_resources/_vendor/{platformdirs-2.6.2.dist-info => more_itertools-10.3.0.dist-info}/INSTALLER (100%)
 rename pkg_resources/_vendor/{more_itertools-10.2.0.dist-info => more_itertools-10.3.0.dist-info}/LICENSE (100%)
 rename pkg_resources/_vendor/{more_itertools-10.2.0.dist-info => more_itertools-10.3.0.dist-info}/METADATA (95%)
 create mode 100644 pkg_resources/_vendor/more_itertools-10.3.0.dist-info/RECORD
 rename pkg_resources/_vendor/{more_itertools-10.2.0.dist-info => more_itertools-10.3.0.dist-info}/WHEEL (100%)
 delete mode 100644 pkg_resources/_vendor/packaging-24.0.dist-info/RECORD
 rename pkg_resources/_vendor/{zipp-3.7.0.dist-info => packaging-24.1.dist-info}/INSTALLER (100%)
 rename pkg_resources/_vendor/{packaging-24.0.dist-info => packaging-24.1.dist-info}/LICENSE (100%)
 rename pkg_resources/_vendor/{packaging-24.0.dist-info => packaging-24.1.dist-info}/LICENSE.APACHE (100%)
 rename pkg_resources/_vendor/{packaging-24.0.dist-info => packaging-24.1.dist-info}/LICENSE.BSD (100%)
 rename pkg_resources/_vendor/{packaging-24.0.dist-info => packaging-24.1.dist-info}/METADATA (97%)
 create mode 100644 pkg_resources/_vendor/packaging-24.1.dist-info/RECORD
 rename pkg_resources/_vendor/{zipp-3.7.0.dist-info => packaging-24.1.dist-info}/REQUESTED (100%)
 rename pkg_resources/_vendor/{packaging-24.0.dist-info => packaging-24.1.dist-info}/WHEEL (100%)
 delete mode 100644 pkg_resources/_vendor/platformdirs-2.6.2.dist-info/RECORD
 create mode 100644 pkg_resources/_vendor/platformdirs-4.2.2.dist-info/INSTALLER
 rename pkg_resources/_vendor/{platformdirs-2.6.2.dist-info => platformdirs-4.2.2.dist-info}/METADATA (75%)
 create mode 100644 pkg_resources/_vendor/platformdirs-4.2.2.dist-info/RECORD
 create mode 100644 pkg_resources/_vendor/platformdirs-4.2.2.dist-info/REQUESTED
 rename pkg_resources/_vendor/{platformdirs-2.6.2.dist-info => platformdirs-4.2.2.dist-info}/WHEEL (67%)
 rename pkg_resources/_vendor/{platformdirs-2.6.2.dist-info => platformdirs-4.2.2.dist-info}/licenses/LICENSE (100%)
 create mode 100644 pkg_resources/_vendor/typeguard-4.3.0.dist-info/INSTALLER
 create mode 100644 pkg_resources/_vendor/typeguard-4.3.0.dist-info/LICENSE
 create mode 100644 pkg_resources/_vendor/typeguard-4.3.0.dist-info/METADATA
 create mode 100644 pkg_resources/_vendor/typeguard-4.3.0.dist-info/RECORD
 create mode 100644 pkg_resources/_vendor/typeguard-4.3.0.dist-info/WHEEL
 create mode 100644 pkg_resources/_vendor/typeguard-4.3.0.dist-info/entry_points.txt
 create mode 100644 pkg_resources/_vendor/typeguard-4.3.0.dist-info/top_level.txt
 create mode 100644 pkg_resources/_vendor/typeguard/__init__.py
 create mode 100644 pkg_resources/_vendor/typeguard/_checkers.py
 create mode 100644 pkg_resources/_vendor/typeguard/_config.py
 create mode 100644 pkg_resources/_vendor/typeguard/_decorators.py
 create mode 100644 pkg_resources/_vendor/typeguard/_exceptions.py
 create mode 100644 pkg_resources/_vendor/typeguard/_functions.py
 create mode 100644 pkg_resources/_vendor/typeguard/_importhook.py
 create mode 100644 pkg_resources/_vendor/typeguard/_memo.py
 create mode 100644 pkg_resources/_vendor/typeguard/_pytest_plugin.py
 create mode 100644 pkg_resources/_vendor/typeguard/_suppression.py
 create mode 100644 pkg_resources/_vendor/typeguard/_transformer.py
 create mode 100644 pkg_resources/_vendor/typeguard/_union_transformer.py
 create mode 100644 pkg_resources/_vendor/typeguard/_utils.py
 create mode 100644 pkg_resources/_vendor/typeguard/py.typed
 create mode 100644 pkg_resources/_vendor/typing_extensions-4.12.2.dist-info/INSTALLER
 create mode 100644 pkg_resources/_vendor/typing_extensions-4.12.2.dist-info/LICENSE
 create mode 100644 pkg_resources/_vendor/typing_extensions-4.12.2.dist-info/METADATA
 create mode 100644 pkg_resources/_vendor/typing_extensions-4.12.2.dist-info/RECORD
 create mode 100644 pkg_resources/_vendor/typing_extensions-4.12.2.dist-info/WHEEL
 create mode 100644 pkg_resources/_vendor/typing_extensions.py
 create mode 100644 pkg_resources/_vendor/zipp-3.19.2.dist-info/INSTALLER
 create mode 100644 pkg_resources/_vendor/zipp-3.19.2.dist-info/LICENSE
 create mode 100644 pkg_resources/_vendor/zipp-3.19.2.dist-info/METADATA
 create mode 100644 pkg_resources/_vendor/zipp-3.19.2.dist-info/RECORD
 create mode 100644 pkg_resources/_vendor/zipp-3.19.2.dist-info/REQUESTED
 create mode 100644 pkg_resources/_vendor/zipp-3.19.2.dist-info/WHEEL
 rename pkg_resources/_vendor/{zipp-3.7.0.dist-info => zipp-3.19.2.dist-info}/top_level.txt (100%)
 delete mode 100644 pkg_resources/_vendor/zipp-3.7.0.dist-info/METADATA
 delete mode 100644 pkg_resources/_vendor/zipp-3.7.0.dist-info/RECORD
 rename pkg_resources/_vendor/{zipp.py => zipp/__init__.py} (52%)
 create mode 100644 pkg_resources/_vendor/zipp/compat/__init__.py
 create mode 100644 pkg_resources/_vendor/zipp/compat/py310.py
 create mode 100644 pkg_resources/_vendor/zipp/glob.py

diff --git a/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/INSTALLER b/pkg_resources/_vendor/autocommand-2.2.2.dist-info/INSTALLER
similarity index 100%
rename from pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/INSTALLER
rename to pkg_resources/_vendor/autocommand-2.2.2.dist-info/INSTALLER
diff --git a/pkg_resources/_vendor/autocommand-2.2.2.dist-info/LICENSE b/pkg_resources/_vendor/autocommand-2.2.2.dist-info/LICENSE
new file mode 100644
index 0000000000..b49c3af060
--- /dev/null
+++ b/pkg_resources/_vendor/autocommand-2.2.2.dist-info/LICENSE
@@ -0,0 +1,166 @@
+GNU LESSER GENERAL PUBLIC LICENSE
+                       Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc. 
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+
+  This version of the GNU Lesser General Public License incorporates
+the terms and conditions of version 3 of the GNU General Public
+License, supplemented by the additional permissions listed below.
+
+  0. Additional Definitions.
+
+  As used herein, "this License" refers to version 3 of the GNU Lesser
+General Public License, and the "GNU GPL" refers to version 3 of the GNU
+General Public License.
+
+  "The Library" refers to a covered work governed by this License,
+other than an Application or a Combined Work as defined below.
+
+  An "Application" is any work that makes use of an interface provided
+by the Library, but which is not otherwise based on the Library.
+Defining a subclass of a class defined by the Library is deemed a mode
+of using an interface provided by the Library.
+
+  A "Combined Work" is a work produced by combining or linking an
+Application with the Library.  The particular version of the Library
+with which the Combined Work was made is also called the "Linked
+Version".
+
+  The "Minimal Corresponding Source" for a Combined Work means the
+Corresponding Source for the Combined Work, excluding any source code
+for portions of the Combined Work that, considered in isolation, are
+based on the Application, and not on the Linked Version.
+
+  The "Corresponding Application Code" for a Combined Work means the
+object code and/or source code for the Application, including any data
+and utility programs needed for reproducing the Combined Work from the
+Application, but excluding the System Libraries of the Combined Work.
+
+  1. Exception to Section 3 of the GNU GPL.
+
+  You may convey a covered work under sections 3 and 4 of this License
+without being bound by section 3 of the GNU GPL.
+
+  2. Conveying Modified Versions.
+
+  If you modify a copy of the Library, and, in your modifications, a
+facility refers to a function or data to be supplied by an Application
+that uses the facility (other than as an argument passed when the
+facility is invoked), then you may convey a copy of the modified
+version:
+
+   a) under this License, provided that you make a good faith effort to
+   ensure that, in the event an Application does not supply the
+   function or data, the facility still operates, and performs
+   whatever part of its purpose remains meaningful, or
+
+   b) under the GNU GPL, with none of the additional permissions of
+   this License applicable to that copy.
+
+  3. Object Code Incorporating Material from Library Header Files.
+
+  The object code form of an Application may incorporate material from
+a header file that is part of the Library.  You may convey such object
+code under terms of your choice, provided that, if the incorporated
+material is not limited to numerical parameters, data structure
+layouts and accessors, or small macros, inline functions and templates
+(ten or fewer lines in length), you do both of the following:
+
+   a) Give prominent notice with each copy of the object code that the
+   Library is used in it and that the Library and its use are
+   covered by this License.
+
+   b) Accompany the object code with a copy of the GNU GPL and this license
+   document.
+
+  4. Combined Works.
+
+  You may convey a Combined Work under terms of your choice that,
+taken together, effectively do not restrict modification of the
+portions of the Library contained in the Combined Work and reverse
+engineering for debugging such modifications, if you also do each of
+the following:
+
+   a) Give prominent notice with each copy of the Combined Work that
+   the Library is used in it and that the Library and its use are
+   covered by this License.
+
+   b) Accompany the Combined Work with a copy of the GNU GPL and this license
+   document.
+
+   c) For a Combined Work that displays copyright notices during
+   execution, include the copyright notice for the Library among
+   these notices, as well as a reference directing the user to the
+   copies of the GNU GPL and this license document.
+
+   d) Do one of the following:
+
+       0) Convey the Minimal Corresponding Source under the terms of this
+       License, and the Corresponding Application Code in a form
+       suitable for, and under terms that permit, the user to
+       recombine or relink the Application with a modified version of
+       the Linked Version to produce a modified Combined Work, in the
+       manner specified by section 6 of the GNU GPL for conveying
+       Corresponding Source.
+
+       1) Use a suitable shared library mechanism for linking with the
+       Library.  A suitable mechanism is one that (a) uses at run time
+       a copy of the Library already present on the user's computer
+       system, and (b) will operate properly with a modified version
+       of the Library that is interface-compatible with the Linked
+       Version.
+
+   e) Provide Installation Information, but only if you would otherwise
+   be required to provide such information under section 6 of the
+   GNU GPL, and only to the extent that such information is
+   necessary to install and execute a modified version of the
+   Combined Work produced by recombining or relinking the
+   Application with a modified version of the Linked Version. (If
+   you use option 4d0, the Installation Information must accompany
+   the Minimal Corresponding Source and Corresponding Application
+   Code. If you use option 4d1, you must provide the Installation
+   Information in the manner specified by section 6 of the GNU GPL
+   for conveying Corresponding Source.)
+
+  5. Combined Libraries.
+
+  You may place library facilities that are a work based on the
+Library side by side in a single library together with other library
+facilities that are not Applications and are not covered by this
+License, and convey such a combined library under terms of your
+choice, if you do both of the following:
+
+   a) Accompany the combined library with a copy of the same work based
+   on the Library, uncombined with any other library facilities,
+   conveyed under the terms of this License.
+
+   b) Give prominent notice with the combined library that part of it
+   is a work based on the Library, and explaining where to find the
+   accompanying uncombined form of the same work.
+
+  6. Revised Versions of the GNU Lesser General Public License.
+
+  The Free Software Foundation may publish revised and/or new versions
+of the GNU Lesser General Public License from time to time. Such new
+versions will be similar in spirit to the present version, but may
+differ in detail to address new problems or concerns.
+
+  Each version is given a distinguishing version number. If the
+Library as you received it specifies that a certain numbered version
+of the GNU Lesser General Public License "or any later version"
+applies to it, you have the option of following the terms and
+conditions either of that published version or of any later version
+published by the Free Software Foundation. If the Library as you
+received it does not specify a version number of the GNU Lesser
+General Public License, you may choose any version of the GNU Lesser
+General Public License ever published by the Free Software Foundation.
+
+  If the Library as you received it specifies that a proxy can decide
+whether future versions of the GNU Lesser General Public License shall
+apply, that proxy's public statement of acceptance of any version is
+permanent authorization for you to choose that version for the
+Library.
+
diff --git a/pkg_resources/_vendor/autocommand-2.2.2.dist-info/METADATA b/pkg_resources/_vendor/autocommand-2.2.2.dist-info/METADATA
new file mode 100644
index 0000000000..32214fb440
--- /dev/null
+++ b/pkg_resources/_vendor/autocommand-2.2.2.dist-info/METADATA
@@ -0,0 +1,420 @@
+Metadata-Version: 2.1
+Name: autocommand
+Version: 2.2.2
+Summary: A library to create a command-line program from a function
+Home-page: https://github.com/Lucretiel/autocommand
+Author: Nathan West
+License: LGPLv3
+Project-URL: Homepage, https://github.com/Lucretiel/autocommand
+Project-URL: Bug Tracker, https://github.com/Lucretiel/autocommand/issues
+Platform: any
+Classifier: Development Status :: 6 - Mature
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Topic :: Software Development
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Requires-Python: >=3.7
+Description-Content-Type: text/markdown
+License-File: LICENSE
+
+[![PyPI version](https://badge.fury.io/py/autocommand.svg)](https://badge.fury.io/py/autocommand)
+
+# autocommand
+
+A library to automatically generate and run simple argparse parsers from function signatures.
+
+## Installation
+
+Autocommand is installed via pip:
+
+```
+$ pip install autocommand
+```
+
+## Usage
+
+Autocommand turns a function into a command-line program. It converts the function's parameter signature into command-line arguments, and automatically runs the function if the module was called as `__main__`. In effect, it lets your create a smart main function.
+
+```python
+from autocommand import autocommand
+
+# This program takes exactly one argument and echos it.
+@autocommand(__name__)
+def echo(thing):
+    print(thing)
+```
+
+```
+$ python echo.py hello
+hello
+$ python echo.py -h
+usage: echo [-h] thing
+
+positional arguments:
+  thing
+
+optional arguments:
+  -h, --help  show this help message and exit
+$ python echo.py hello world  # too many arguments
+usage: echo.py [-h] thing
+echo.py: error: unrecognized arguments: world
+```
+
+As you can see, autocommand converts the signature of the function into an argument spec. When you run the file as a program, autocommand collects the command-line arguments and turns them into function arguments. The function is executed with these arguments, and then the program exits with the return value of the function, via `sys.exit`. Autocommand also automatically creates a usage message, which can be invoked with `-h` or `--help`, and automatically prints an error message when provided with invalid arguments.
+
+### Types
+
+You can use a type annotation to give an argument a type. Any type (or in fact any callable) that returns an object when given a string argument can be used, though there are a few special cases that are described later.
+
+```python
+@autocommand(__name__)
+def net_client(host, port: int):
+    ...
+```
+
+Autocommand will catch `TypeErrors` raised by the type during argument parsing, so you can supply a callable and do some basic argument validation as well.
+
+### Trailing Arguments
+
+You can add a `*args` parameter to your function to give it trailing arguments. The command will collect 0 or more trailing arguments and supply them to `args` as a tuple. If a type annotation is supplied, the type is applied to each argument.
+
+```python
+# Write the contents of each file, one by one
+@autocommand(__name__)
+def cat(*files):
+    for filename in files:
+        with open(filename) as file:
+            for line in file:
+                print(line.rstrip())
+```
+
+```
+$ python cat.py -h
+usage: ipython [-h] [file [file ...]]
+
+positional arguments:
+  file
+
+optional arguments:
+  -h, --help  show this help message and exit
+```
+
+### Options
+
+To create `--option` switches, just assign a default. Autocommand will automatically create `--long` and `-s`hort switches.
+
+```python
+@autocommand(__name__)
+def do_with_config(argument, config='~/foo.conf'):
+    pass
+```
+
+```
+$ python example.py -h
+usage: example.py [-h] [-c CONFIG] argument
+
+positional arguments:
+  argument
+
+optional arguments:
+  -h, --help            show this help message and exit
+  -c CONFIG, --config CONFIG
+```
+
+The option's type is automatically deduced from the default, unless one is explicitly given in an annotation:
+
+```python
+@autocommand(__name__)
+def http_connect(host, port=80):
+    print('{}:{}'.format(host, port))
+```
+
+```
+$ python http.py -h
+usage: http.py [-h] [-p PORT] host
+
+positional arguments:
+  host
+
+optional arguments:
+  -h, --help            show this help message and exit
+  -p PORT, --port PORT
+$ python http.py localhost
+localhost:80
+$ python http.py localhost -p 8080
+localhost:8080
+$ python http.py localhost -p blah
+usage: http.py [-h] [-p PORT] host
+http.py: error: argument -p/--port: invalid int value: 'blah'
+```
+
+#### None
+
+If an option is given a default value of `None`, it reads in a value as normal, but supplies `None` if the option isn't provided.
+
+#### Switches
+
+If an argument is given a default value of `True` or `False`, or
+given an explicit `bool` type, it becomes an option switch.
+
+```python
+    @autocommand(__name__)
+    def example(verbose=False, quiet=False):
+        pass
+```
+
+```
+$ python example.py -h
+usage: example.py [-h] [-v] [-q]
+
+optional arguments:
+  -h, --help     show this help message and exit
+  -v, --verbose
+  -q, --quiet
+```
+
+Autocommand attempts to do the "correct thing" in these cases- if the default is `True`, then supplying the switch makes the argument `False`; if the type is `bool` and the default is some other `True` value, then supplying the switch makes the argument `False`, while not supplying the switch makes the argument the default value.
+
+Autocommand also supports the creation of switch inverters. Pass `add_nos=True` to `autocommand` to enable this.
+
+```
+    @autocommand(__name__, add_nos=True)
+    def example(verbose=False):
+        pass
+```
+
+```
+$ python example.py -h
+usage: ipython [-h] [-v] [--no-verbose]
+
+optional arguments:
+  -h, --help     show this help message and exit
+  -v, --verbose
+  --no-verbose
+```
+
+Using the `--no-` version of a switch will pass the opposite value in as a function argument. If multiple switches are present, the last one takes precedence.
+
+#### Files
+
+If the default value is a file object, such as `sys.stdout`, then autocommand just looks for a string, for a file path. It doesn't do any special checking on the string, though (such as checking if the file exists); it's better to let the client decide how to handle errors in this case. Instead, it provides a special context manager called `smart_open`, which behaves exactly like `open` if a filename or other openable type is provided, but also lets you use already open files:
+
+```python
+from autocommand import autocommand, smart_open
+import sys
+
+# Write the contents of stdin, or a file, to stdout
+@autocommand(__name__)
+def write_out(infile=sys.stdin):
+    with smart_open(infile) as f:
+        for line in f:
+            print(line.rstrip())
+    # If a file was opened, it is closed here. If it was just stdin, it is untouched.
+```
+
+```
+$ echo "Hello World!" | python write_out.py | tee hello.txt
+Hello World!
+$ python write_out.py --infile hello.txt
+Hello World!
+```
+
+### Descriptions and docstrings
+
+The `autocommand` decorator accepts `description` and `epilog` kwargs, corresponding to the `description `_ and `epilog `_ of the `ArgumentParser`. If no description is given, but the decorated function has a docstring, then it is taken as the `description` for the `ArgumentParser`. You can also provide both the description and epilog in the docstring by splitting it into two sections with 4 or more - characters.
+
+```python
+@autocommand(__name__)
+def copy(infile=sys.stdin, outfile=sys.stdout):
+    '''
+    Copy an the contents of a file (or stdin) to another file (or stdout)
+    ----------
+    Some extra documentation in the epilog
+    '''
+    with smart_open(infile) as istr:
+        with smart_open(outfile, 'w') as ostr:
+            for line in istr:
+                ostr.write(line)
+```
+
+```
+$ python copy.py -h
+usage: copy.py [-h] [-i INFILE] [-o OUTFILE]
+
+Copy an the contents of a file (or stdin) to another file (or stdout)
+
+optional arguments:
+  -h, --help            show this help message and exit
+  -i INFILE, --infile INFILE
+  -o OUTFILE, --outfile OUTFILE
+
+Some extra documentation in the epilog
+$ echo "Hello World" | python copy.py --outfile hello.txt
+$ python copy.py --infile hello.txt --outfile hello2.txt
+$ python copy.py --infile hello2.txt
+Hello World
+```
+
+### Parameter descriptions
+
+You can also attach description text to individual parameters in the annotation. To attach both a type and a description, supply them both in any order in a tuple
+
+```python
+@autocommand(__name__)
+def copy_net(
+    infile: 'The name of the file to send',
+    host: 'The host to send the file to',
+    port: (int, 'The port to connect to')):
+
+    '''
+    Copy a file over raw TCP to a remote destination.
+    '''
+    # Left as an exercise to the reader
+```
+
+### Decorators and wrappers
+
+Autocommand automatically follows wrapper chains created by `@functools.wraps`. This means that you can apply other wrapping decorators to your main function, and autocommand will still correctly detect the signature.
+
+```python
+from functools import wraps
+from autocommand import autocommand
+
+def print_yielded(func):
+    '''
+    Convert a generator into a function that prints all yielded elements
+    '''
+    @wraps(func)
+    def wrapper(*args, **kwargs):
+        for thing in func(*args, **kwargs):
+            print(thing)
+    return wrapper
+
+@autocommand(__name__,
+    description= 'Print all the values from START to STOP, inclusive, in steps of STEP',
+    epilog=      'STOP and STEP default to 1')
+@print_yielded
+def seq(stop, start=1, step=1):
+    for i in range(start, stop + 1, step):
+        yield i
+```
+
+```
+$ seq.py -h
+usage: seq.py [-h] [-s START] [-S STEP] stop
+
+Print all the values from START to STOP, inclusive, in steps of STEP
+
+positional arguments:
+  stop
+
+optional arguments:
+  -h, --help            show this help message and exit
+  -s START, --start START
+  -S STEP, --step STEP
+
+STOP and STEP default to 1
+```
+
+Even though autocommand is being applied to the `wrapper` returned by `print_yielded`, it still retreives the signature of the underlying `seq` function to create the argument parsing.
+
+### Custom Parser
+
+While autocommand's automatic parser generator is a powerful convenience, it doesn't cover all of the different features that argparse provides. If you need these features, you can provide your own parser as a kwarg to `autocommand`:
+
+```python
+from argparse import ArgumentParser
+from autocommand import autocommand
+
+parser = ArgumentParser()
+# autocommand can't do optional positonal parameters
+parser.add_argument('arg', nargs='?')
+# or mutually exclusive options
+group = parser.add_mutually_exclusive_group()
+group.add_argument('-v', '--verbose', action='store_true')
+group.add_argument('-q', '--quiet', action='store_true')
+
+@autocommand(__name__, parser=parser)
+def main(arg, verbose, quiet):
+    print(arg, verbose, quiet)
+```
+
+```
+$ python parser.py -h
+usage: write_file.py [-h] [-v | -q] [arg]
+
+positional arguments:
+  arg
+
+optional arguments:
+  -h, --help     show this help message and exit
+  -v, --verbose
+  -q, --quiet
+$ python parser.py
+None False False
+$ python parser.py hello
+hello False False
+$ python parser.py -v
+None True False
+$ python parser.py -q
+None False True
+$ python parser.py -vq
+usage: parser.py [-h] [-v | -q] [arg]
+parser.py: error: argument -q/--quiet: not allowed with argument -v/--verbose
+```
+
+Any parser should work fine, so long as each of the parser's arguments has a corresponding parameter in the decorated main function. The order of parameters doesn't matter, as long as they are all present. Note that when using a custom parser, autocommand doesn't modify the parser or the retrieved arguments. This means that no description/epilog will be added, and the function's type annotations and defaults (if present) will be ignored.
+
+## Testing and Library use
+
+The decorated function is only called and exited from if the first argument to `autocommand` is `'__main__'` or `True`. If it is neither of these values, or no argument is given, then a new main function is created by the decorator. This function has the signature `main(argv=None)`, and is intended to be called with arguments as if via `main(sys.argv[1:])`. The function has the attributes `parser` and `main`, which are the generated `ArgumentParser` and the original main function that was decorated. This is to facilitate testing and library use of your main. Calling the function triggers a `parse_args()` with the supplied arguments, and returns the result of the main function. Note that, while it returns instead of calling `sys.exit`, the `parse_args()` function will raise a `SystemExit` in the event of a parsing error or `-h/--help` argument.
+
+```python
+    @autocommand()
+    def test_prog(arg1, arg2: int, quiet=False, verbose=False):
+        if not quiet:
+            print(arg1, arg2)
+            if verbose:
+                print("LOUD NOISES")
+
+        return 0
+
+    print(test_prog(['-v', 'hello', '80']))
+```
+
+```
+$ python test_prog.py
+hello 80
+LOUD NOISES
+0
+```
+
+If the function is called with no arguments, `sys.argv[1:]` is used. This is to allow the autocommand function to be used as a setuptools entry point.
+
+## Exceptions and limitations
+
+- There are a few possible exceptions that `autocommand` can raise. All of them derive from `autocommand.AutocommandError`.
+
+  - If an invalid annotation is given (that is, it isn't a `type`, `str`, `(type, str)`, or `(str, type)`, an `AnnotationError` is raised. The `type` may be any callable, as described in the `Types`_ section.
+  - If the function has a `**kwargs` parameter, a `KWargError` is raised.
+  - If, somehow, the function has a positional-only parameter, a `PositionalArgError` is raised. This means that the argument doesn't have a name, which is currently not possible with a plain `def` or `lambda`, though many built-in functions have this kind of parameter.
+
+- There are a few argparse features that are not supported by autocommand.
+
+  - It isn't possible to have an optional positional argument (as opposed to a `--option`). POSIX thinks this is bad form anyway.
+  - It isn't possible to have mutually exclusive arguments or options
+  - It isn't possible to have subcommands or subparsers, though I'm working on a few solutions involving classes or nested function definitions to allow this.
+
+## Development
+
+Autocommand cannot be important from the project root; this is to enforce separation of concerns and prevent accidental importing of `setup.py` or tests. To develop, install the project in editable mode:
+
+```
+$ python setup.py develop
+```
+
+This will create a link to the source files in the deployment directory, so that any source changes are reflected when it is imported.
diff --git a/pkg_resources/_vendor/autocommand-2.2.2.dist-info/RECORD b/pkg_resources/_vendor/autocommand-2.2.2.dist-info/RECORD
new file mode 100644
index 0000000000..e6e12ea51e
--- /dev/null
+++ b/pkg_resources/_vendor/autocommand-2.2.2.dist-info/RECORD
@@ -0,0 +1,18 @@
+autocommand-2.2.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+autocommand-2.2.2.dist-info/LICENSE,sha256=reeNBJgtaZctREqOFKlPh6IzTdOFXMgDSOqOJAqg3y0,7634
+autocommand-2.2.2.dist-info/METADATA,sha256=OADZuR3O6iBlpu1ieTgzYul6w4uOVrk0P0BO5TGGAJk,15006
+autocommand-2.2.2.dist-info/RECORD,,
+autocommand-2.2.2.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
+autocommand-2.2.2.dist-info/top_level.txt,sha256=AzfhgKKS8EdAwWUTSF8mgeVQbXOY9kokHB6kSqwwqu0,12
+autocommand/__init__.py,sha256=zko5Rnvolvb-UXjCx_2ArPTGBWwUK5QY4LIQIKYR7As,1037
+autocommand/__pycache__/__init__.cpython-312.pyc,,
+autocommand/__pycache__/autoasync.cpython-312.pyc,,
+autocommand/__pycache__/autocommand.cpython-312.pyc,,
+autocommand/__pycache__/automain.cpython-312.pyc,,
+autocommand/__pycache__/autoparse.cpython-312.pyc,,
+autocommand/__pycache__/errors.cpython-312.pyc,,
+autocommand/autoasync.py,sha256=AMdyrxNS4pqWJfP_xuoOcImOHWD-qT7x06wmKN1Vp-U,5680
+autocommand/autocommand.py,sha256=hmkEmQ72HtL55gnURVjDOnsfYlGd5lLXbvT4KG496Qw,2505
+autocommand/automain.py,sha256=A2b8i754Mxc_DjU9WFr6vqYDWlhz0cn8miu8d8EsxV8,2076
+autocommand/autoparse.py,sha256=WVWmZJPcbzUKXP40raQw_0HD8qPJ2V9VG1eFFmmnFxw,11642
+autocommand/errors.py,sha256=7aa3roh9Herd6nIKpQHNWEslWE8oq7GiHYVUuRqORnA,886
diff --git a/pkg_resources/_vendor/importlib_resources-5.10.2.dist-info/WHEEL b/pkg_resources/_vendor/autocommand-2.2.2.dist-info/WHEEL
similarity index 100%
rename from pkg_resources/_vendor/importlib_resources-5.10.2.dist-info/WHEEL
rename to pkg_resources/_vendor/autocommand-2.2.2.dist-info/WHEEL
diff --git a/pkg_resources/_vendor/autocommand-2.2.2.dist-info/top_level.txt b/pkg_resources/_vendor/autocommand-2.2.2.dist-info/top_level.txt
new file mode 100644
index 0000000000..dda5158ff6
--- /dev/null
+++ b/pkg_resources/_vendor/autocommand-2.2.2.dist-info/top_level.txt
@@ -0,0 +1 @@
+autocommand
diff --git a/pkg_resources/_vendor/autocommand/__init__.py b/pkg_resources/_vendor/autocommand/__init__.py
new file mode 100644
index 0000000000..73fbfca6b3
--- /dev/null
+++ b/pkg_resources/_vendor/autocommand/__init__.py
@@ -0,0 +1,27 @@
+# Copyright 2014-2016 Nathan West
+#
+# This file is part of autocommand.
+#
+# autocommand is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# autocommand is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with autocommand.  If not, see .
+
+# flake8 flags all these imports as unused, hence the NOQAs everywhere.
+
+from .automain import automain  # NOQA
+from .autoparse import autoparse, smart_open  # NOQA
+from .autocommand import autocommand  # NOQA
+
+try:
+    from .autoasync import autoasync  # NOQA
+except ImportError:  # pragma: no cover
+    pass
diff --git a/pkg_resources/_vendor/autocommand/autoasync.py b/pkg_resources/_vendor/autocommand/autoasync.py
new file mode 100644
index 0000000000..688f7e0554
--- /dev/null
+++ b/pkg_resources/_vendor/autocommand/autoasync.py
@@ -0,0 +1,142 @@
+# Copyright 2014-2015 Nathan West
+#
+# This file is part of autocommand.
+#
+# autocommand is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# autocommand is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with autocommand.  If not, see .
+
+from asyncio import get_event_loop, iscoroutine
+from functools import wraps
+from inspect import signature
+
+
+async def _run_forever_coro(coro, args, kwargs, loop):
+    '''
+    This helper function launches an async main function that was tagged with
+    forever=True. There are two possibilities:
+
+    - The function is a normal function, which handles initializing the event
+      loop, which is then run forever
+    - The function is a coroutine, which needs to be scheduled in the event
+      loop, which is then run forever
+      - There is also the possibility that the function is a normal function
+        wrapping a coroutine function
+
+    The function is therefore called unconditionally and scheduled in the event
+    loop if the return value is a coroutine object.
+
+    The reason this is a separate function is to make absolutely sure that all
+    the objects created are garbage collected after all is said and done; we
+    do this to ensure that any exceptions raised in the tasks are collected
+    ASAP.
+    '''
+
+    # Personal note: I consider this an antipattern, as it relies on the use of
+    # unowned resources. The setup function dumps some stuff into the event
+    # loop where it just whirls in the ether without a well defined owner or
+    # lifetime. For this reason, there's a good chance I'll remove the
+    # forever=True feature from autoasync at some point in the future.
+    thing = coro(*args, **kwargs)
+    if iscoroutine(thing):
+        await thing
+
+
+def autoasync(coro=None, *, loop=None, forever=False, pass_loop=False):
+    '''
+    Convert an asyncio coroutine into a function which, when called, is
+    evaluted in an event loop, and the return value returned. This is intented
+    to make it easy to write entry points into asyncio coroutines, which
+    otherwise need to be explictly evaluted with an event loop's
+    run_until_complete.
+
+    If `loop` is given, it is used as the event loop to run the coro in. If it
+    is None (the default), the loop is retreived using asyncio.get_event_loop.
+    This call is defered until the decorated function is called, so that
+    callers can install custom event loops or event loop policies after
+    @autoasync is applied.
+
+    If `forever` is True, the loop is run forever after the decorated coroutine
+    is finished. Use this for servers created with asyncio.start_server and the
+    like.
+
+    If `pass_loop` is True, the event loop object is passed into the coroutine
+    as the `loop` kwarg when the wrapper function is called. In this case, the
+    wrapper function's __signature__ is updated to remove this parameter, so
+    that autoparse can still be used on it without generating a parameter for
+    `loop`.
+
+    This coroutine can be called with ( @autoasync(...) ) or without
+    ( @autoasync ) arguments.
+
+    Examples:
+
+    @autoasync
+    def get_file(host, port):
+        reader, writer = yield from asyncio.open_connection(host, port)
+        data = reader.read()
+        sys.stdout.write(data.decode())
+
+    get_file(host, port)
+
+    @autoasync(forever=True, pass_loop=True)
+    def server(host, port, loop):
+        yield_from loop.create_server(Proto, host, port)
+
+    server('localhost', 8899)
+
+    '''
+    if coro is None:
+        return lambda c: autoasync(
+            c, loop=loop,
+            forever=forever,
+            pass_loop=pass_loop)
+
+    # The old and new signatures are required to correctly bind the loop
+    # parameter in 100% of cases, even if it's a positional parameter.
+    # NOTE: A future release will probably require the loop parameter to be
+    # a kwonly parameter.
+    if pass_loop:
+        old_sig = signature(coro)
+        new_sig = old_sig.replace(parameters=(
+            param for name, param in old_sig.parameters.items()
+            if name != "loop"))
+
+    @wraps(coro)
+    def autoasync_wrapper(*args, **kwargs):
+        # Defer the call to get_event_loop so that, if a custom policy is
+        # installed after the autoasync decorator, it is respected at call time
+        local_loop = get_event_loop() if loop is None else loop
+
+        # Inject the 'loop' argument. We have to use this signature binding to
+        # ensure it's injected in the correct place (positional, keyword, etc)
+        if pass_loop:
+            bound_args = old_sig.bind_partial()
+            bound_args.arguments.update(
+                loop=local_loop,
+                **new_sig.bind(*args, **kwargs).arguments)
+            args, kwargs = bound_args.args, bound_args.kwargs
+
+        if forever:
+            local_loop.create_task(_run_forever_coro(
+                coro, args, kwargs, local_loop
+            ))
+            local_loop.run_forever()
+        else:
+            return local_loop.run_until_complete(coro(*args, **kwargs))
+
+    # Attach the updated signature. This allows 'pass_loop' to be used with
+    # autoparse
+    if pass_loop:
+        autoasync_wrapper.__signature__ = new_sig
+
+    return autoasync_wrapper
diff --git a/pkg_resources/_vendor/autocommand/autocommand.py b/pkg_resources/_vendor/autocommand/autocommand.py
new file mode 100644
index 0000000000..097e86de07
--- /dev/null
+++ b/pkg_resources/_vendor/autocommand/autocommand.py
@@ -0,0 +1,70 @@
+# Copyright 2014-2015 Nathan West
+#
+# This file is part of autocommand.
+#
+# autocommand is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# autocommand is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with autocommand.  If not, see .
+
+from .autoparse import autoparse
+from .automain import automain
+try:
+    from .autoasync import autoasync
+except ImportError:  # pragma: no cover
+    pass
+
+
+def autocommand(
+        module, *,
+        description=None,
+        epilog=None,
+        add_nos=False,
+        parser=None,
+        loop=None,
+        forever=False,
+        pass_loop=False):
+
+    if callable(module):
+        raise TypeError('autocommand requires a module name argument')
+
+    def autocommand_decorator(func):
+        # Step 1: if requested, run it all in an asyncio event loop. autoasync
+        # patches the __signature__ of the decorated function, so that in the
+        # event that pass_loop is True, the `loop` parameter of the original
+        # function will *not* be interpreted as a command-line argument by
+        # autoparse
+        if loop is not None or forever or pass_loop:
+            func = autoasync(
+                func,
+                loop=None if loop is True else loop,
+                pass_loop=pass_loop,
+                forever=forever)
+
+        # Step 2: create parser. We do this second so that the arguments are
+        # parsed and passed *before* entering the asyncio event loop, if it
+        # exists. This simplifies the stack trace and ensures errors are
+        # reported earlier. It also ensures that errors raised during parsing &
+        # passing are still raised if `forever` is True.
+        func = autoparse(
+            func,
+            description=description,
+            epilog=epilog,
+            add_nos=add_nos,
+            parser=parser)
+
+        # Step 3: call the function automatically if __name__ == '__main__' (or
+        # if True was provided)
+        func = automain(module)(func)
+
+        return func
+
+    return autocommand_decorator
diff --git a/pkg_resources/_vendor/autocommand/automain.py b/pkg_resources/_vendor/autocommand/automain.py
new file mode 100644
index 0000000000..6cc45db66a
--- /dev/null
+++ b/pkg_resources/_vendor/autocommand/automain.py
@@ -0,0 +1,59 @@
+# Copyright 2014-2015 Nathan West
+#
+# This file is part of autocommand.
+#
+# autocommand is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# autocommand is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with autocommand.  If not, see .
+
+import sys
+from .errors import AutocommandError
+
+
+class AutomainRequiresModuleError(AutocommandError, TypeError):
+    pass
+
+
+def automain(module, *, args=(), kwargs=None):
+    '''
+    This decorator automatically invokes a function if the module is being run
+    as the "__main__" module. Optionally, provide args or kwargs with which to
+    call the function. If `module` is "__main__", the function is called, and
+    the program is `sys.exit`ed with the return value. You can also pass `True`
+    to cause the function to be called unconditionally. If the function is not
+    called, it is returned unchanged by the decorator.
+
+    Usage:
+
+    @automain(__name__)  # Pass __name__ to check __name__=="__main__"
+    def main():
+        ...
+
+    If __name__ is "__main__" here, the main function is called, and then
+    sys.exit called with the return value.
+    '''
+
+    # Check that @automain(...) was called, rather than @automain
+    if callable(module):
+        raise AutomainRequiresModuleError(module)
+
+    if module == '__main__' or module is True:
+        if kwargs is None:
+            kwargs = {}
+
+        # Use a function definition instead of a lambda for a neater traceback
+        def automain_decorator(main):
+            sys.exit(main(*args, **kwargs))
+
+        return automain_decorator
+    else:
+        return lambda main: main
diff --git a/pkg_resources/_vendor/autocommand/autoparse.py b/pkg_resources/_vendor/autocommand/autoparse.py
new file mode 100644
index 0000000000..0276a3fae1
--- /dev/null
+++ b/pkg_resources/_vendor/autocommand/autoparse.py
@@ -0,0 +1,333 @@
+# Copyright 2014-2015 Nathan West
+#
+# This file is part of autocommand.
+#
+# autocommand is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# autocommand is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with autocommand.  If not, see .
+
+import sys
+from re import compile as compile_regex
+from inspect import signature, getdoc, Parameter
+from argparse import ArgumentParser
+from contextlib import contextmanager
+from functools import wraps
+from io import IOBase
+from autocommand.errors import AutocommandError
+
+
+_empty = Parameter.empty
+
+
+class AnnotationError(AutocommandError):
+    '''Annotation error: annotation must be a string, type, or tuple of both'''
+
+
+class PositionalArgError(AutocommandError):
+    '''
+    Postional Arg Error: autocommand can't handle postional-only parameters
+    '''
+
+
+class KWArgError(AutocommandError):
+    '''kwarg Error: autocommand can't handle a **kwargs parameter'''
+
+
+class DocstringError(AutocommandError):
+    '''Docstring error'''
+
+
+class TooManySplitsError(DocstringError):
+    '''
+    The docstring had too many ---- section splits. Currently we only support
+    using up to a single split, to split the docstring into description and
+    epilog parts.
+    '''
+
+
+def _get_type_description(annotation):
+    '''
+    Given an annotation, return the (type, description) for the parameter.
+    If you provide an annotation that is somehow both a string and a callable,
+    the behavior is undefined.
+    '''
+    if annotation is _empty:
+        return None, None
+    elif callable(annotation):
+        return annotation, None
+    elif isinstance(annotation, str):
+        return None, annotation
+    elif isinstance(annotation, tuple):
+        try:
+            arg1, arg2 = annotation
+        except ValueError as e:
+            raise AnnotationError(annotation) from e
+        else:
+            if callable(arg1) and isinstance(arg2, str):
+                return arg1, arg2
+            elif isinstance(arg1, str) and callable(arg2):
+                return arg2, arg1
+
+    raise AnnotationError(annotation)
+
+
+def _add_arguments(param, parser, used_char_args, add_nos):
+    '''
+    Add the argument(s) to an ArgumentParser (using add_argument) for a given
+    parameter. used_char_args is the set of -short options currently already in
+    use, and is updated (if necessary) by this function. If add_nos is True,
+    this will also add an inverse switch for all boolean options. For
+    instance, for the boolean parameter "verbose", this will create --verbose
+    and --no-verbose.
+    '''
+
+    # Impl note: This function is kept separate from make_parser because it's
+    # already very long and I wanted to separate out as much as possible into
+    # its own call scope, to prevent even the possibility of suble mutation
+    # bugs.
+    if param.kind is param.POSITIONAL_ONLY:
+        raise PositionalArgError(param)
+    elif param.kind is param.VAR_KEYWORD:
+        raise KWArgError(param)
+
+    # These are the kwargs for the add_argument function.
+    arg_spec = {}
+    is_option = False
+
+    # Get the type and default from the annotation.
+    arg_type, description = _get_type_description(param.annotation)
+
+    # Get the default value
+    default = param.default
+
+    # If there is no explicit type, and the default is present and not None,
+    # infer the type from the default.
+    if arg_type is None and default not in {_empty, None}:
+        arg_type = type(default)
+
+    # Add default. The presence of a default means this is an option, not an
+    # argument.
+    if default is not _empty:
+        arg_spec['default'] = default
+        is_option = True
+
+    # Add the type
+    if arg_type is not None:
+        # Special case for bool: make it just a --switch
+        if arg_type is bool:
+            if not default or default is _empty:
+                arg_spec['action'] = 'store_true'
+            else:
+                arg_spec['action'] = 'store_false'
+
+            # Switches are always options
+            is_option = True
+
+        # Special case for file types: make it a string type, for filename
+        elif isinstance(default, IOBase):
+            arg_spec['type'] = str
+
+        # TODO: special case for list type.
+        #   - How to specificy type of list members?
+        #       - param: [int]
+        #       - param: int =[]
+        #   - action='append' vs nargs='*'
+
+        else:
+            arg_spec['type'] = arg_type
+
+    # nargs: if the signature includes *args, collect them as trailing CLI
+    # arguments in a list. *args can't have a default value, so it can never be
+    # an option.
+    if param.kind is param.VAR_POSITIONAL:
+        # TODO: consider depluralizing metavar/name here.
+        arg_spec['nargs'] = '*'
+
+    # Add description.
+    if description is not None:
+        arg_spec['help'] = description
+
+    # Get the --flags
+    flags = []
+    name = param.name
+
+    if is_option:
+        # Add the first letter as a -short option.
+        for letter in name[0], name[0].swapcase():
+            if letter not in used_char_args:
+                used_char_args.add(letter)
+                flags.append('-{}'.format(letter))
+                break
+
+        # If the parameter is a --long option, or is a -short option that
+        # somehow failed to get a flag, add it.
+        if len(name) > 1 or not flags:
+            flags.append('--{}'.format(name))
+
+        arg_spec['dest'] = name
+    else:
+        flags.append(name)
+
+    parser.add_argument(*flags, **arg_spec)
+
+    # Create the --no- version for boolean switches
+    if add_nos and arg_type is bool:
+        parser.add_argument(
+            '--no-{}'.format(name),
+            action='store_const',
+            dest=name,
+            const=default if default is not _empty else False)
+
+
+def make_parser(func_sig, description, epilog, add_nos):
+    '''
+    Given the signature of a function, create an ArgumentParser
+    '''
+    parser = ArgumentParser(description=description, epilog=epilog)
+
+    used_char_args = {'h'}
+
+    # Arange the params so that single-character arguments are first. This
+    # esnures they don't have to get --long versions. sorted is stable, so the
+    # parameters will otherwise still be in relative order.
+    params = sorted(
+        func_sig.parameters.values(),
+        key=lambda param: len(param.name) > 1)
+
+    for param in params:
+        _add_arguments(param, parser, used_char_args, add_nos)
+
+    return parser
+
+
+_DOCSTRING_SPLIT = compile_regex(r'\n\s*-{4,}\s*\n')
+
+
+def parse_docstring(docstring):
+    '''
+    Given a docstring, parse it into a description and epilog part
+    '''
+    if docstring is None:
+        return '', ''
+
+    parts = _DOCSTRING_SPLIT.split(docstring)
+
+    if len(parts) == 1:
+        return docstring, ''
+    elif len(parts) == 2:
+        return parts[0], parts[1]
+    else:
+        raise TooManySplitsError()
+
+
+def autoparse(
+        func=None, *,
+        description=None,
+        epilog=None,
+        add_nos=False,
+        parser=None):
+    '''
+    This decorator converts a function that takes normal arguments into a
+    function which takes a single optional argument, argv, parses it using an
+    argparse.ArgumentParser, and calls the underlying function with the parsed
+    arguments. If it is not given, sys.argv[1:] is used. This is so that the
+    function can be used as a setuptools entry point, as well as a normal main
+    function. sys.argv[1:] is not evaluated until the function is called, to
+    allow injecting different arguments for testing.
+
+    It uses the argument signature of the function to create an
+    ArgumentParser. Parameters without defaults become positional parameters,
+    while parameters *with* defaults become --options. Use annotations to set
+    the type of the parameter.
+
+    The `desctiption` and `epilog` parameters corrospond to the same respective
+    argparse parameters. If no description is given, it defaults to the
+    decorated functions's docstring, if present.
+
+    If add_nos is True, every boolean option (that is, every parameter with a
+    default of True/False or a type of bool) will have a --no- version created
+    as well, which inverts the option. For instance, the --verbose option will
+    have a --no-verbose counterpart. These are not mutually exclusive-
+    whichever one appears last in the argument list will have precedence.
+
+    If a parser is given, it is used instead of one generated from the function
+    signature. In this case, no parser is created; instead, the given parser is
+    used to parse the argv argument. The parser's results' argument names must
+    match up with the parameter names of the decorated function.
+
+    The decorated function is attached to the result as the `func` attribute,
+    and the parser is attached as the `parser` attribute.
+    '''
+
+    # If @autoparse(...) is used instead of @autoparse
+    if func is None:
+        return lambda f: autoparse(
+            f, description=description,
+            epilog=epilog,
+            add_nos=add_nos,
+            parser=parser)
+
+    func_sig = signature(func)
+
+    docstr_description, docstr_epilog = parse_docstring(getdoc(func))
+
+    if parser is None:
+        parser = make_parser(
+            func_sig,
+            description or docstr_description,
+            epilog or docstr_epilog,
+            add_nos)
+
+    @wraps(func)
+    def autoparse_wrapper(argv=None):
+        if argv is None:
+            argv = sys.argv[1:]
+
+        # Get empty argument binding, to fill with parsed arguments. This
+        # object does all the heavy lifting of turning named arguments into
+        # into correctly bound *args and **kwargs.
+        parsed_args = func_sig.bind_partial()
+        parsed_args.arguments.update(vars(parser.parse_args(argv)))
+
+        return func(*parsed_args.args, **parsed_args.kwargs)
+
+    # TODO: attach an updated __signature__ to autoparse_wrapper, just in case.
+
+    # Attach the wrapped function and parser, and return the wrapper.
+    autoparse_wrapper.func = func
+    autoparse_wrapper.parser = parser
+    return autoparse_wrapper
+
+
+@contextmanager
+def smart_open(filename_or_file, *args, **kwargs):
+    '''
+    This context manager allows you to open a filename, if you want to default
+    some already-existing file object, like sys.stdout, which shouldn't be
+    closed at the end of the context. If the filename argument is a str, bytes,
+    or int, the file object is created via a call to open with the given *args
+    and **kwargs, sent to the context, and closed at the end of the context,
+    just like "with open(filename) as f:". If it isn't one of the openable
+    types, the object simply sent to the context unchanged, and left unclosed
+    at the end of the context. Example:
+
+        def work_with_file(name=sys.stdout):
+            with smart_open(name) as f:
+                # Works correctly if name is a str filename or sys.stdout
+                print("Some stuff", file=f)
+                # If it was a filename, f is closed at the end here.
+    '''
+    if isinstance(filename_or_file, (str, bytes, int)):
+        with open(filename_or_file, *args, **kwargs) as file:
+            yield file
+    else:
+        yield filename_or_file
diff --git a/pkg_resources/_vendor/autocommand/errors.py b/pkg_resources/_vendor/autocommand/errors.py
new file mode 100644
index 0000000000..2570607399
--- /dev/null
+++ b/pkg_resources/_vendor/autocommand/errors.py
@@ -0,0 +1,23 @@
+# Copyright 2014-2016 Nathan West
+#
+# This file is part of autocommand.
+#
+# autocommand is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# autocommand is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with autocommand.  If not, see .
+
+
+class AutocommandError(Exception):
+    '''Base class for autocommand exceptions'''
+    pass
+
+# Individual modules will define errors specific to that module.
diff --git a/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/RECORD b/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/RECORD
deleted file mode 100644
index a6a44d8fcc..0000000000
--- a/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/RECORD
+++ /dev/null
@@ -1,9 +0,0 @@
-backports.tarfile-1.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-backports.tarfile-1.0.0.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
-backports.tarfile-1.0.0.dist-info/METADATA,sha256=XlT7JAFR04zDMIjs-EFhqc0CkkVyeh-SiVUoKXONXJ0,1876
-backports.tarfile-1.0.0.dist-info/RECORD,,
-backports.tarfile-1.0.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-backports.tarfile-1.0.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
-backports.tarfile-1.0.0.dist-info/top_level.txt,sha256=cGjaLMOoBR1FK0ApojtzWVmViTtJ7JGIK_HwXiEsvtU,10
-backports/__pycache__/tarfile.cpython-312.pyc,,
-backports/tarfile.py,sha256=IO3YX_ZYqn13VOi-3QLM0lnktn102U4d9wUrHc230LY,106920
diff --git a/pkg_resources/_vendor/importlib_resources-5.10.2.dist-info/INSTALLER b/pkg_resources/_vendor/backports.tarfile-1.2.0.dist-info/INSTALLER
similarity index 100%
rename from pkg_resources/_vendor/importlib_resources-5.10.2.dist-info/INSTALLER
rename to pkg_resources/_vendor/backports.tarfile-1.2.0.dist-info/INSTALLER
diff --git a/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/LICENSE b/pkg_resources/_vendor/backports.tarfile-1.2.0.dist-info/LICENSE
similarity index 100%
rename from pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/LICENSE
rename to pkg_resources/_vendor/backports.tarfile-1.2.0.dist-info/LICENSE
diff --git a/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/METADATA b/pkg_resources/_vendor/backports.tarfile-1.2.0.dist-info/METADATA
similarity index 83%
rename from pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/METADATA
rename to pkg_resources/_vendor/backports.tarfile-1.2.0.dist-info/METADATA
index e7b64c87f8..db0a2dcdbe 100644
--- a/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/METADATA
+++ b/pkg_resources/_vendor/backports.tarfile-1.2.0.dist-info/METADATA
@@ -1,16 +1,16 @@
 Metadata-Version: 2.1
 Name: backports.tarfile
-Version: 1.0.0
+Version: 1.2.0
 Summary: Backport of CPython tarfile module
-Home-page: https://github.com/jaraco/backports.tarfile
-Author: Jason R. Coombs
-Author-email: jaraco@jaraco.com
+Author-email: "Jason R. Coombs" 
+Project-URL: Homepage, https://github.com/jaraco/backports.tarfile
 Classifier: Development Status :: 5 - Production/Stable
 Classifier: Intended Audience :: Developers
 Classifier: License :: OSI Approved :: MIT License
 Classifier: Programming Language :: Python :: 3
 Classifier: Programming Language :: Python :: 3 :: Only
 Requires-Python: >=3.8
+Description-Content-Type: text/x-rst
 License-File: LICENSE
 Provides-Extra: docs
 Requires-Dist: sphinx >=3.5 ; extra == 'docs'
@@ -19,10 +19,12 @@ Requires-Dist: rst.linker >=1.9 ; extra == 'docs'
 Requires-Dist: furo ; extra == 'docs'
 Requires-Dist: sphinx-lint ; extra == 'docs'
 Provides-Extra: testing
-Requires-Dist: pytest !=8.1.1,>=6 ; extra == 'testing'
+Requires-Dist: pytest !=8.1.*,>=6 ; extra == 'testing'
 Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'testing'
 Requires-Dist: pytest-cov ; extra == 'testing'
 Requires-Dist: pytest-enabler >=2.2 ; extra == 'testing'
+Requires-Dist: jaraco.test ; extra == 'testing'
+Requires-Dist: pytest !=8.0.* ; extra == 'testing'
 
 .. image:: https://img.shields.io/pypi/v/backports.tarfile.svg
    :target: https://pypi.org/project/backports.tarfile
diff --git a/pkg_resources/_vendor/backports.tarfile-1.2.0.dist-info/RECORD b/pkg_resources/_vendor/backports.tarfile-1.2.0.dist-info/RECORD
new file mode 100644
index 0000000000..536dc2f09e
--- /dev/null
+++ b/pkg_resources/_vendor/backports.tarfile-1.2.0.dist-info/RECORD
@@ -0,0 +1,17 @@
+backports.tarfile-1.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+backports.tarfile-1.2.0.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
+backports.tarfile-1.2.0.dist-info/METADATA,sha256=ghXFTq132dxaEIolxr3HK1mZqm9iyUmaRANZQSr6WlE,2020
+backports.tarfile-1.2.0.dist-info/RECORD,,
+backports.tarfile-1.2.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+backports.tarfile-1.2.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
+backports.tarfile-1.2.0.dist-info/top_level.txt,sha256=cGjaLMOoBR1FK0ApojtzWVmViTtJ7JGIK_HwXiEsvtU,10
+backports/__init__.py,sha256=iOEMwnlORWezdO8-2vxBIPSR37D7JGjluZ8f55vzxls,81
+backports/__pycache__/__init__.cpython-312.pyc,,
+backports/tarfile/__init__.py,sha256=Pwf2qUIfB0SolJPCKcx3vz3UEu_aids4g4sAfxy94qg,108491
+backports/tarfile/__main__.py,sha256=Yw2oGT1afrz2eBskzdPYL8ReB_3liApmhFkN2EbDmc4,59
+backports/tarfile/__pycache__/__init__.cpython-312.pyc,,
+backports/tarfile/__pycache__/__main__.cpython-312.pyc,,
+backports/tarfile/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+backports/tarfile/compat/__pycache__/__init__.cpython-312.pyc,,
+backports/tarfile/compat/__pycache__/py38.cpython-312.pyc,,
+backports/tarfile/compat/py38.py,sha256=iYkyt_gvWjLzGUTJD9TuTfMMjOk-ersXZmRlvQYN2qE,568
diff --git a/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/REQUESTED b/pkg_resources/_vendor/backports.tarfile-1.2.0.dist-info/REQUESTED
similarity index 100%
rename from pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/REQUESTED
rename to pkg_resources/_vendor/backports.tarfile-1.2.0.dist-info/REQUESTED
diff --git a/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/WHEEL b/pkg_resources/_vendor/backports.tarfile-1.2.0.dist-info/WHEEL
similarity index 100%
rename from pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/WHEEL
rename to pkg_resources/_vendor/backports.tarfile-1.2.0.dist-info/WHEEL
diff --git a/pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/top_level.txt b/pkg_resources/_vendor/backports.tarfile-1.2.0.dist-info/top_level.txt
similarity index 100%
rename from pkg_resources/_vendor/backports.tarfile-1.0.0.dist-info/top_level.txt
rename to pkg_resources/_vendor/backports.tarfile-1.2.0.dist-info/top_level.txt
diff --git a/pkg_resources/_vendor/backports/__init__.py b/pkg_resources/_vendor/backports/__init__.py
index e69de29bb2..0d1f7edf5d 100644
--- a/pkg_resources/_vendor/backports/__init__.py
+++ b/pkg_resources/_vendor/backports/__init__.py
@@ -0,0 +1 @@
+__path__ = __import__('pkgutil').extend_path(__path__, __name__)  # type: ignore
diff --git a/pkg_resources/_vendor/backports/tarfile.py b/pkg_resources/_vendor/backports/tarfile/__init__.py
similarity index 96%
rename from pkg_resources/_vendor/backports/tarfile.py
rename to pkg_resources/_vendor/backports/tarfile/__init__.py
index a7a9a6e7b9..8c16881cb3 100644
--- a/pkg_resources/_vendor/backports/tarfile.py
+++ b/pkg_resources/_vendor/backports/tarfile/__init__.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python3
 #-------------------------------------------------------------------
 # tarfile.py
 #-------------------------------------------------------------------
@@ -46,7 +45,8 @@
 import struct
 import copy
 import re
-import warnings
+
+from .compat.py38 import removesuffix
 
 try:
     import pwd
@@ -637,6 +637,10 @@ def __init__(self, fileobj, offset, size, name, blockinfo=None):
     def flush(self):
         pass
 
+    @property
+    def mode(self):
+        return 'rb'
+
     def readable(self):
         return True
 
@@ -873,7 +877,7 @@ class TarInfo(object):
         pax_headers = ('A dictionary containing key-value pairs of an '
                        'associated pax extended header.'),
         sparse = 'Sparse member information.',
-        tarfile = None,
+        _tarfile = None,
         _sparse_structs = None,
         _link_target = None,
         )
@@ -902,6 +906,24 @@ def __init__(self, name=""):
         self.sparse = None      # sparse member information
         self.pax_headers = {}   # pax header information
 
+    @property
+    def tarfile(self):
+        import warnings
+        warnings.warn(
+            'The undocumented "tarfile" attribute of TarInfo objects '
+            + 'is deprecated and will be removed in Python 3.16',
+            DeprecationWarning, stacklevel=2)
+        return self._tarfile
+
+    @tarfile.setter
+    def tarfile(self, tarfile):
+        import warnings
+        warnings.warn(
+            'The undocumented "tarfile" attribute of TarInfo objects '
+            + 'is deprecated and will be removed in Python 3.16',
+            DeprecationWarning, stacklevel=2)
+        self._tarfile = tarfile
+
     @property
     def path(self):
         'In pax headers, "name" is called "path".'
@@ -1196,7 +1218,7 @@ def _create_pax_generic_header(cls, pax_headers, type, encoding):
         for keyword, value in pax_headers.items():
             keyword = keyword.encode("utf-8")
             if binary:
-                # Try to restore the original byte representation of `value'.
+                # Try to restore the original byte representation of 'value'.
                 # Needless to say, that the encoding must match the string.
                 value = value.encode(encoding, "surrogateescape")
             else:
@@ -1365,7 +1387,7 @@ def _proc_gnulong(self, tarfile):
         # Remove redundant slashes from directories. This is to be consistent
         # with frombuf().
         if next.isdir():
-            next.name = next.name.removesuffix("/")
+            next.name = removesuffix(next.name, "/")
 
         return next
 
@@ -1641,14 +1663,14 @@ class TarFile(object):
     def __init__(self, name=None, mode="r", fileobj=None, format=None,
             tarinfo=None, dereference=None, ignore_zeros=None, encoding=None,
             errors="surrogateescape", pax_headers=None, debug=None,
-            errorlevel=None, copybufsize=None):
-        """Open an (uncompressed) tar archive `name'. `mode' is either 'r' to
+            errorlevel=None, copybufsize=None, stream=False):
+        """Open an (uncompressed) tar archive 'name'. 'mode' is either 'r' to
            read from an existing archive, 'a' to append data to an existing
-           file or 'w' to create a new file overwriting an existing one. `mode'
+           file or 'w' to create a new file overwriting an existing one. 'mode'
            defaults to 'r'.
-           If `fileobj' is given, it is used for reading or writing data. If it
-           can be determined, `mode' is overridden by `fileobj's mode.
-           `fileobj' is not closed, when TarFile is closed.
+           If 'fileobj' is given, it is used for reading or writing data. If it
+           can be determined, 'mode' is overridden by 'fileobj's mode.
+           'fileobj' is not closed, when TarFile is closed.
         """
         modes = {"r": "rb", "a": "r+b", "w": "wb", "x": "xb"}
         if mode not in modes:
@@ -1673,6 +1695,8 @@ def __init__(self, name=None, mode="r", fileobj=None, format=None,
         self.name = os.path.abspath(name) if name else None
         self.fileobj = fileobj
 
+        self.stream = stream
+
         # Init attributes.
         if format is not None:
             self.format = format
@@ -1975,7 +1999,7 @@ def close(self):
                 self.fileobj.close()
 
     def getmember(self, name):
-        """Return a TarInfo object for member ``name``. If ``name`` can not be
+        """Return a TarInfo object for member 'name'. If 'name' can not be
            found in the archive, KeyError is raised. If a member occurs more
            than once in the archive, its last occurrence is assumed to be the
            most up-to-date version.
@@ -2003,9 +2027,9 @@ def getnames(self):
 
     def gettarinfo(self, name=None, arcname=None, fileobj=None):
         """Create a TarInfo object from the result of os.stat or equivalent
-           on an existing file. The file is either named by ``name``, or
-           specified as a file object ``fileobj`` with a file descriptor. If
-           given, ``arcname`` specifies an alternative name for the file in the
+           on an existing file. The file is either named by 'name', or
+           specified as a file object 'fileobj' with a file descriptor. If
+           given, 'arcname' specifies an alternative name for the file in the
            archive, otherwise, the name is taken from the 'name' attribute of
            'fileobj', or the 'name' argument. The name should be a text
            string.
@@ -2029,7 +2053,7 @@ def gettarinfo(self, name=None, arcname=None, fileobj=None):
         # Now, fill the TarInfo object with
         # information specific for the file.
         tarinfo = self.tarinfo()
-        tarinfo.tarfile = self  # Not needed
+        tarinfo._tarfile = self  # To be removed in 3.16.
 
         # Use os.stat or os.lstat, depending on if symlinks shall be resolved.
         if fileobj is None:
@@ -2101,11 +2125,15 @@ def gettarinfo(self, name=None, arcname=None, fileobj=None):
         return tarinfo
 
     def list(self, verbose=True, *, members=None):
-        """Print a table of contents to sys.stdout. If ``verbose`` is False, only
-           the names of the members are printed. If it is True, an `ls -l'-like
-           output is produced. ``members`` is optional and must be a subset of the
+        """Print a table of contents to sys.stdout. If 'verbose' is False, only
+           the names of the members are printed. If it is True, an 'ls -l'-like
+           output is produced. 'members' is optional and must be a subset of the
            list returned by getmembers().
         """
+        # Convert tarinfo type to stat type.
+        type2mode = {REGTYPE: stat.S_IFREG, SYMTYPE: stat.S_IFLNK,
+                     FIFOTYPE: stat.S_IFIFO, CHRTYPE: stat.S_IFCHR,
+                     DIRTYPE: stat.S_IFDIR, BLKTYPE: stat.S_IFBLK}
         self._check()
 
         if members is None:
@@ -2115,7 +2143,8 @@ def list(self, verbose=True, *, members=None):
                 if tarinfo.mode is None:
                     _safe_print("??????????")
                 else:
-                    _safe_print(stat.filemode(tarinfo.mode))
+                    modetype = type2mode.get(tarinfo.type, 0)
+                    _safe_print(stat.filemode(modetype | tarinfo.mode))
                 _safe_print("%s/%s" % (tarinfo.uname or tarinfo.uid,
                                        tarinfo.gname or tarinfo.gid))
                 if tarinfo.ischr() or tarinfo.isblk():
@@ -2139,11 +2168,11 @@ def list(self, verbose=True, *, members=None):
             print()
 
     def add(self, name, arcname=None, recursive=True, *, filter=None):
-        """Add the file ``name`` to the archive. ``name`` may be any type of file
-           (directory, fifo, symbolic link, etc.). If given, ``arcname``
+        """Add the file 'name' to the archive. 'name' may be any type of file
+           (directory, fifo, symbolic link, etc.). If given, 'arcname'
            specifies an alternative name for the file in the archive.
            Directories are added recursively by default. This can be avoided by
-           setting ``recursive`` to False. ``filter`` is a function
+           setting 'recursive' to False. 'filter' is a function
            that expects a TarInfo object argument and returns the changed
            TarInfo object, if it returns None the TarInfo object will be
            excluded from the archive.
@@ -2190,13 +2219,16 @@ def add(self, name, arcname=None, recursive=True, *, filter=None):
             self.addfile(tarinfo)
 
     def addfile(self, tarinfo, fileobj=None):
-        """Add the TarInfo object ``tarinfo`` to the archive. If ``fileobj`` is
-           given, it should be a binary file, and tarinfo.size bytes are read
-           from it and added to the archive. You can create TarInfo objects
-           directly, or by using gettarinfo().
+        """Add the TarInfo object 'tarinfo' to the archive. If 'tarinfo' represents
+           a non zero-size regular file, the 'fileobj' argument should be a binary file,
+           and tarinfo.size bytes are read from it and added to the archive.
+           You can create TarInfo objects directly, or by using gettarinfo().
         """
         self._check("awx")
 
+        if fileobj is None and tarinfo.isreg() and tarinfo.size != 0:
+            raise ValueError("fileobj not provided for non zero-size regular file")
+
         tarinfo = copy.copy(tarinfo)
 
         buf = tarinfo.tobuf(self.format, self.encoding, self.errors)
@@ -2218,11 +2250,12 @@ def _get_filter_function(self, filter):
         if filter is None:
             filter = self.extraction_filter
             if filter is None:
+                import warnings
                 warnings.warn(
                     'Python 3.14 will, by default, filter extracted tar '
                     + 'archives and reject files or modify their metadata. '
                     + 'Use the filter argument to control this behavior.',
-                    DeprecationWarning)
+                    DeprecationWarning, stacklevel=3)
                 return fully_trusted_filter
             if isinstance(filter, str):
                 raise TypeError(
@@ -2241,12 +2274,12 @@ def extractall(self, path=".", members=None, *, numeric_owner=False,
                    filter=None):
         """Extract all members from the archive to the current working
            directory and set owner, modification time and permissions on
-           directories afterwards. `path' specifies a different directory
-           to extract to. `members' is optional and must be a subset of the
-           list returned by getmembers(). If `numeric_owner` is True, only
+           directories afterwards. 'path' specifies a different directory
+           to extract to. 'members' is optional and must be a subset of the
+           list returned by getmembers(). If 'numeric_owner' is True, only
            the numbers for user/group names are used and not the names.
 
-           The `filter` function will be called on each member just
+           The 'filter' function will be called on each member just
            before extraction.
            It can return a changed TarInfo or None to skip the member.
            String names of common filters are accepted.
@@ -2286,13 +2319,13 @@ def extract(self, member, path="", set_attrs=True, *, numeric_owner=False,
                 filter=None):
         """Extract a member from the archive to the current working directory,
            using its full name. Its file information is extracted as accurately
-           as possible. `member' may be a filename or a TarInfo object. You can
-           specify a different directory using `path'. File attributes (owner,
-           mtime, mode) are set unless `set_attrs' is False. If `numeric_owner`
+           as possible. 'member' may be a filename or a TarInfo object. You can
+           specify a different directory using 'path'. File attributes (owner,
+           mtime, mode) are set unless 'set_attrs' is False. If 'numeric_owner'
            is True, only the numbers for user/group names are used and not
            the names.
 
-           The `filter` function will be called before extraction.
+           The 'filter' function will be called before extraction.
            It can return a changed TarInfo or None to skip the member.
            String names of common filters are accepted.
         """
@@ -2357,10 +2390,10 @@ def _handle_fatal_error(self, e):
             self._dbg(1, "tarfile: %s %s" % (type(e).__name__, e))
 
     def extractfile(self, member):
-        """Extract a member from the archive as a file object. ``member`` may be
-           a filename or a TarInfo object. If ``member`` is a regular file or
+        """Extract a member from the archive as a file object. 'member' may be
+           a filename or a TarInfo object. If 'member' is a regular file or
            a link, an io.BufferedReader object is returned. For all other
-           existing members, None is returned. If ``member`` does not appear
+           existing members, None is returned. If 'member' does not appear
            in the archive, KeyError is raised.
         """
         self._check("r")
@@ -2404,7 +2437,7 @@ def _extract_member(self, tarinfo, targetpath, set_attrs=True,
         if upperdirs and not os.path.exists(upperdirs):
             # Create directories that are not part of the archive with
             # default permissions.
-            os.makedirs(upperdirs)
+            os.makedirs(upperdirs, exist_ok=True)
 
         if tarinfo.islnk() or tarinfo.issym():
             self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname))
@@ -2557,7 +2590,8 @@ def chown(self, tarinfo, targetpath, numeric_owner):
                     os.lchown(targetpath, u, g)
                 else:
                     os.chown(targetpath, u, g)
-            except OSError as e:
+            except (OSError, OverflowError) as e:
+                # OverflowError can be raised if an ID doesn't fit in 'id_t'
                 raise ExtractError("could not change owner") from e
 
     def chmod(self, tarinfo, targetpath):
@@ -2640,7 +2674,9 @@ def next(self):
             break
 
         if tarinfo is not None:
-            self.members.append(tarinfo)
+            # if streaming the file we do not want to cache the tarinfo
+            if not self.stream:
+                self.members.append(tarinfo)
         else:
             self._loaded = True
 
@@ -2691,11 +2727,12 @@ def _getmember(self, name, tarinfo=None, normalize=False):
 
     def _load(self):
         """Read through the entire archive file and look for readable
-           members.
+           members. This should not run if the file is set to stream.
         """
-        while self.next() is not None:
-            pass
-        self._loaded = True
+        if not self.stream:
+            while self.next() is not None:
+                pass
+            self._loaded = True
 
     def _check(self, mode=None):
         """Check if TarFile is still open, and if the operation's mode
diff --git a/pkg_resources/_vendor/backports/tarfile/__main__.py b/pkg_resources/_vendor/backports/tarfile/__main__.py
new file mode 100644
index 0000000000..daf5509086
--- /dev/null
+++ b/pkg_resources/_vendor/backports/tarfile/__main__.py
@@ -0,0 +1,5 @@
+from . import main
+
+
+if __name__ == '__main__':
+    main()
diff --git a/pkg_resources/_vendor/__init__.py b/pkg_resources/_vendor/backports/tarfile/compat/__init__.py
similarity index 100%
rename from pkg_resources/_vendor/__init__.py
rename to pkg_resources/_vendor/backports/tarfile/compat/__init__.py
diff --git a/pkg_resources/_vendor/backports/tarfile/compat/py38.py b/pkg_resources/_vendor/backports/tarfile/compat/py38.py
new file mode 100644
index 0000000000..20fbbfc1c0
--- /dev/null
+++ b/pkg_resources/_vendor/backports/tarfile/compat/py38.py
@@ -0,0 +1,24 @@
+import sys
+
+
+if sys.version_info < (3, 9):
+
+    def removesuffix(self, suffix):
+        # suffix='' should not call self[:-0].
+        if suffix and self.endswith(suffix):
+            return self[: -len(suffix)]
+        else:
+            return self[:]
+
+    def removeprefix(self, prefix):
+        if self.startswith(prefix):
+            return self[len(prefix) :]
+        else:
+            return self[:]
+else:
+
+    def removesuffix(self, suffix):
+        return self.removesuffix(suffix)
+
+    def removeprefix(self, prefix):
+        return self.removeprefix(prefix)
diff --git a/pkg_resources/_vendor/importlib_resources-5.10.2.dist-info/RECORD b/pkg_resources/_vendor/importlib_resources-5.10.2.dist-info/RECORD
deleted file mode 100644
index ba764991ee..0000000000
--- a/pkg_resources/_vendor/importlib_resources-5.10.2.dist-info/RECORD
+++ /dev/null
@@ -1,77 +0,0 @@
-importlib_resources-5.10.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-importlib_resources-5.10.2.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
-importlib_resources-5.10.2.dist-info/METADATA,sha256=Xo5ntATvDYUxdmW8tr8kxtfdiOC9889mOk-LE1LtZfI,4111
-importlib_resources-5.10.2.dist-info/RECORD,,
-importlib_resources-5.10.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources-5.10.2.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
-importlib_resources-5.10.2.dist-info/top_level.txt,sha256=fHIjHU1GZwAjvcydpmUnUrTnbvdiWjG4OEVZK8by0TQ,20
-importlib_resources/__init__.py,sha256=evPm12kLgYqTm-pbzm60bOuumumT8IpBNWFp0uMyrzE,506
-importlib_resources/__pycache__/__init__.cpython-312.pyc,,
-importlib_resources/__pycache__/_adapters.cpython-312.pyc,,
-importlib_resources/__pycache__/_common.cpython-312.pyc,,
-importlib_resources/__pycache__/_compat.cpython-312.pyc,,
-importlib_resources/__pycache__/_itertools.cpython-312.pyc,,
-importlib_resources/__pycache__/_legacy.cpython-312.pyc,,
-importlib_resources/__pycache__/abc.cpython-312.pyc,,
-importlib_resources/__pycache__/readers.cpython-312.pyc,,
-importlib_resources/__pycache__/simple.cpython-312.pyc,,
-importlib_resources/_adapters.py,sha256=o51tP2hpVtohP33gSYyAkGNpLfYDBqxxYsadyiRZi1E,4504
-importlib_resources/_common.py,sha256=jSC4xfLdcMNbtbWHtpzbFkNa0W7kvf__nsYn14C_AEU,5457
-importlib_resources/_compat.py,sha256=dSadF6WPt8MwOqSm_NIOQPhw4x0iaMYTWxi-XS93p7M,2923
-importlib_resources/_itertools.py,sha256=WCdJ1Gs_kNFwKENyIG7TO0Y434IWCu0zjVVSsSbZwU8,884
-importlib_resources/_legacy.py,sha256=0TKdZixxLWA-xwtAZw4HcpqJmj4Xprx1Zkcty0gTRZY,3481
-importlib_resources/abc.py,sha256=Icr2IJ2QtH7vvAB9vC5WRJ9KBoaDyJa7KUs8McuROzo,5140
-importlib_resources/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/readers.py,sha256=PZsi5qacr2Qn3KHw4qw3Gm1MzrBblPHoTdjqjH7EKWw,3581
-importlib_resources/simple.py,sha256=0__2TQBTQoqkajYmNPt1HxERcReAT6boVKJA328pr04,2576
-importlib_resources/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/__pycache__/__init__.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/_compat.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/_path.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/test_compatibilty_files.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/test_contents.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/test_files.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/test_open.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/test_path.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/test_read.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/test_reader.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/test_resource.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/update-zips.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/util.cpython-312.pyc,,
-importlib_resources/tests/_compat.py,sha256=YTSB0U1R9oADnh6GrQcOCgojxcF_N6H1LklymEWf9SQ,708
-importlib_resources/tests/_path.py,sha256=yZyWsQzJZQ1Z8ARAxWkjAdaVVsjlzyqxO0qjBUofJ8M,1039
-importlib_resources/tests/data01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/data01/__pycache__/__init__.cpython-312.pyc,,
-importlib_resources/tests/data01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4
-importlib_resources/tests/data01/subdirectory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/data01/subdirectory/__pycache__/__init__.cpython-312.pyc,,
-importlib_resources/tests/data01/subdirectory/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4
-importlib_resources/tests/data01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44
-importlib_resources/tests/data01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20
-importlib_resources/tests/data02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/data02/__pycache__/__init__.cpython-312.pyc,,
-importlib_resources/tests/data02/one/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/data02/one/__pycache__/__init__.cpython-312.pyc,,
-importlib_resources/tests/data02/one/resource1.txt,sha256=10flKac7c-XXFzJ3t-AB5MJjlBy__dSZvPE_dOm2q6U,13
-importlib_resources/tests/data02/two/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/data02/two/__pycache__/__init__.cpython-312.pyc,,
-importlib_resources/tests/data02/two/resource2.txt,sha256=lt2jbN3TMn9QiFKM832X39bU_62UptDdUkoYzkvEbl0,13
-importlib_resources/tests/namespacedata01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4
-importlib_resources/tests/namespacedata01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44
-importlib_resources/tests/namespacedata01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20
-importlib_resources/tests/test_compatibilty_files.py,sha256=NWkbIsylI8Wz3Dwsxo1quT4ZI6ToXFA2mojCG6Dzuxw,3260
-importlib_resources/tests/test_contents.py,sha256=V1Xfk3lqTDdvUsZuV18Kndf0CT_tkM2oEIwk9Vv0rhg,968
-importlib_resources/tests/test_files.py,sha256=1Y8da-g0xOQLzuREDYUiRc_qhWlvFNeydW_mUH7l15w,3251
-importlib_resources/tests/test_open.py,sha256=pmEgdrSFdM83L6FxtR8U_RT9BfI3JZ4snGmM_ZZIegY,2565
-importlib_resources/tests/test_path.py,sha256=xvPteNA-UKavDhKgLgrQuXSxKWYH7Q4nSNDVfBX95Gs,2103
-importlib_resources/tests/test_read.py,sha256=EyYvpHJ_7F4LuX2EU_c5EerIBQfRhOFmiIR7LOc5Y5E,2408
-importlib_resources/tests/test_reader.py,sha256=nPhldbYPq3fXoQs0ZAub4atjhp2lgNyLNv2G1pg6Agw,4480
-importlib_resources/tests/test_resource.py,sha256=EMoarxTEHcrq8R41LQDsndIG8Idtm4I_LpN8DYpHtT0,8478
-importlib_resources/tests/update-zips.py,sha256=x-SrO5v87iLLUMXyefxDwAd3imAs_slI94sLWvJ6N40,1417
-importlib_resources/tests/util.py,sha256=ARAlxZ47wC-lgR7PGlmgBoi4HnhzcykD5Is2-TAwY0I,4873
-importlib_resources/tests/zipdata01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/zipdata01/__pycache__/__init__.cpython-312.pyc,,
-importlib_resources/tests/zipdata01/ziptestdata.zip,sha256=z5Of4dsv3T0t-46B0MsVhxlhsPGMz28aUhJDWpj3_oY,876
-importlib_resources/tests/zipdata02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/zipdata02/__pycache__/__init__.cpython-312.pyc,,
-importlib_resources/tests/zipdata02/ziptestdata.zip,sha256=ydI-_j-xgQ7tDxqBp9cjOqXBGxUp6ZBbwVJu6Xj-nrY,698
diff --git a/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/INSTALLER b/pkg_resources/_vendor/importlib_resources-6.4.0.dist-info/INSTALLER
similarity index 100%
rename from pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/INSTALLER
rename to pkg_resources/_vendor/importlib_resources-6.4.0.dist-info/INSTALLER
diff --git a/pkg_resources/_vendor/importlib_resources-5.10.2.dist-info/LICENSE b/pkg_resources/_vendor/importlib_resources-6.4.0.dist-info/LICENSE
similarity index 100%
rename from pkg_resources/_vendor/importlib_resources-5.10.2.dist-info/LICENSE
rename to pkg_resources/_vendor/importlib_resources-6.4.0.dist-info/LICENSE
diff --git a/pkg_resources/_vendor/importlib_resources-5.10.2.dist-info/METADATA b/pkg_resources/_vendor/importlib_resources-6.4.0.dist-info/METADATA
similarity index 67%
rename from pkg_resources/_vendor/importlib_resources-5.10.2.dist-info/METADATA
rename to pkg_resources/_vendor/importlib_resources-6.4.0.dist-info/METADATA
index a9995f09a3..b088e721d2 100644
--- a/pkg_resources/_vendor/importlib_resources-5.10.2.dist-info/METADATA
+++ b/pkg_resources/_vendor/importlib_resources-6.4.0.dist-info/METADATA
@@ -1,6 +1,6 @@
 Metadata-Version: 2.1
-Name: importlib-resources
-Version: 5.10.2
+Name: importlib_resources
+Version: 6.4.0
 Summary: Read resources from Python packages
 Home-page: https://github.com/python/importlib_resources
 Author: Barry Warsaw
@@ -11,43 +11,44 @@ Classifier: Intended Audience :: Developers
 Classifier: License :: OSI Approved :: Apache Software License
 Classifier: Programming Language :: Python :: 3
 Classifier: Programming Language :: Python :: 3 :: Only
-Requires-Python: >=3.7
+Requires-Python: >=3.8
 License-File: LICENSE
-Requires-Dist: zipp (>=3.1.0) ; python_version < "3.10"
+Requires-Dist: zipp >=3.1.0 ; python_version < "3.10"
 Provides-Extra: docs
-Requires-Dist: sphinx (>=3.5) ; extra == 'docs'
-Requires-Dist: jaraco.packaging (>=9) ; extra == 'docs'
-Requires-Dist: rst.linker (>=1.9) ; extra == 'docs'
+Requires-Dist: sphinx >=3.5 ; extra == 'docs'
+Requires-Dist: sphinx <7.2.5 ; extra == 'docs'
+Requires-Dist: jaraco.packaging >=9.3 ; extra == 'docs'
+Requires-Dist: rst.linker >=1.9 ; extra == 'docs'
 Requires-Dist: furo ; extra == 'docs'
 Requires-Dist: sphinx-lint ; extra == 'docs'
-Requires-Dist: jaraco.tidelift (>=1.4) ; extra == 'docs'
+Requires-Dist: jaraco.tidelift >=1.4 ; extra == 'docs'
 Provides-Extra: testing
-Requires-Dist: pytest (>=6) ; extra == 'testing'
-Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing'
-Requires-Dist: flake8 (<5) ; extra == 'testing'
+Requires-Dist: pytest >=6 ; extra == 'testing'
+Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'testing'
 Requires-Dist: pytest-cov ; extra == 'testing'
-Requires-Dist: pytest-enabler (>=1.3) ; extra == 'testing'
-Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing'
-Requires-Dist: pytest-mypy (>=0.9.1) ; (platform_python_implementation != "PyPy") and extra == 'testing'
-Requires-Dist: pytest-flake8 ; (python_version < "3.12") and extra == 'testing'
+Requires-Dist: pytest-enabler >=2.2 ; extra == 'testing'
+Requires-Dist: pytest-ruff >=0.2.1 ; extra == 'testing'
+Requires-Dist: zipp >=3.17 ; extra == 'testing'
+Requires-Dist: jaraco.test >=5.4 ; extra == 'testing'
+Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing'
 
 .. image:: https://img.shields.io/pypi/v/importlib_resources.svg
    :target: https://pypi.org/project/importlib_resources
 
 .. image:: https://img.shields.io/pypi/pyversions/importlib_resources.svg
 
-.. image:: https://github.com/python/importlib_resources/workflows/tests/badge.svg
+.. image:: https://github.com/python/importlib_resources/actions/workflows/main.yml/badge.svg
    :target: https://github.com/python/importlib_resources/actions?query=workflow%3A%22tests%22
    :alt: tests
 
-.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
-   :target: https://github.com/psf/black
-   :alt: Code style: Black
+.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
+    :target: https://github.com/astral-sh/ruff
+    :alt: Ruff
 
 .. image:: https://readthedocs.org/projects/importlib-resources/badge/?version=latest
    :target: https://importlib-resources.readthedocs.io/en/latest/?badge=latest
 
-.. image:: https://img.shields.io/badge/skeleton-2022-informational
+.. image:: https://img.shields.io/badge/skeleton-2024-informational
    :target: https://blog.jaraco.com/skeleton
 
 .. image:: https://tidelift.com/badges/package/pypi/importlib-resources
@@ -76,7 +77,9 @@ were contributed to different versions in the standard library:
 
    * - importlib_resources
      - stdlib
-   * - 5.9
+   * - 6.0
+     - 3.13
+   * - 5.12
      - 3.12
    * - 5.7
      - 3.11
@@ -95,10 +98,3 @@ Available as part of the Tidelift Subscription.
 This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use.
 
 `Learn more `_.
-
-Security Contact
-================
-
-To report a security vulnerability, please use the
-`Tidelift security contact `_.
-Tidelift will coordinate the fix and disclosure.
diff --git a/pkg_resources/_vendor/importlib_resources-6.4.0.dist-info/RECORD b/pkg_resources/_vendor/importlib_resources-6.4.0.dist-info/RECORD
new file mode 100644
index 0000000000..18888dea71
--- /dev/null
+++ b/pkg_resources/_vendor/importlib_resources-6.4.0.dist-info/RECORD
@@ -0,0 +1,89 @@
+importlib_resources-6.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+importlib_resources-6.4.0.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
+importlib_resources-6.4.0.dist-info/METADATA,sha256=g4eM2LuL0OiZcUVND0qwDJUpE29gOvtO3BSPXTbO9Fk,3944
+importlib_resources-6.4.0.dist-info/RECORD,,
+importlib_resources-6.4.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources-6.4.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
+importlib_resources-6.4.0.dist-info/top_level.txt,sha256=fHIjHU1GZwAjvcydpmUnUrTnbvdiWjG4OEVZK8by0TQ,20
+importlib_resources/__init__.py,sha256=uyp1kzYR6SawQBsqlyaXXfIxJx4Z2mM8MjmZn8qq2Gk,505
+importlib_resources/__pycache__/__init__.cpython-312.pyc,,
+importlib_resources/__pycache__/_adapters.cpython-312.pyc,,
+importlib_resources/__pycache__/_common.cpython-312.pyc,,
+importlib_resources/__pycache__/_itertools.cpython-312.pyc,,
+importlib_resources/__pycache__/abc.cpython-312.pyc,,
+importlib_resources/__pycache__/functional.cpython-312.pyc,,
+importlib_resources/__pycache__/readers.cpython-312.pyc,,
+importlib_resources/__pycache__/simple.cpython-312.pyc,,
+importlib_resources/_adapters.py,sha256=vprJGbUeHbajX6XCuMP6J3lMrqCi-P_MTlziJUR7jfk,4482
+importlib_resources/_common.py,sha256=blt4-ZtHnbUPzQQyPP7jLGgl_86btIW5ZhIsEhclhoA,5571
+importlib_resources/_itertools.py,sha256=eDisV6RqiNZOogLSXf6LOGHOYc79FGgPrKNLzFLmCrU,1277
+importlib_resources/abc.py,sha256=UKNU9ncEDkZRB3txcGb3WLxsL2iju9JbaLTI-dfLE_4,5162
+importlib_resources/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/compat/__pycache__/__init__.cpython-312.pyc,,
+importlib_resources/compat/__pycache__/py38.cpython-312.pyc,,
+importlib_resources/compat/__pycache__/py39.cpython-312.pyc,,
+importlib_resources/compat/py38.py,sha256=MWhut3XsAJwBYUaa5Qb2AoCrZNqcQjVThP-P1uBoE_4,230
+importlib_resources/compat/py39.py,sha256=Wfln4uQUShNz1XdCG-toG6_Y0WrlUmO9JzpvtcfQ-Cw,184
+importlib_resources/functional.py,sha256=mLU4DwSlh8_2IXWqwKOfPVxyRqAEpB3B4XTfRxr3X3M,2651
+importlib_resources/future/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/future/__pycache__/__init__.cpython-312.pyc,,
+importlib_resources/future/__pycache__/adapters.cpython-312.pyc,,
+importlib_resources/future/adapters.py,sha256=1-MF2VRcCButhcC1OMfZILU9o3kwZ4nXB2lurXpaIAw,2940
+importlib_resources/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/readers.py,sha256=WNKurBHHVu9EVtUhWkOj2fxH50HP7uanNFuupAqH2S8,5863
+importlib_resources/simple.py,sha256=CQ3TiIMFiJs_80o-7xJL1EpbUUVna4-NGDrSTQ3HW2Y,2584
+importlib_resources/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/__pycache__/__init__.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/_path.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/test_compatibilty_files.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/test_contents.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/test_custom.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/test_files.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/test_functional.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/test_open.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/test_path.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/test_read.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/test_reader.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/test_resource.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/util.cpython-312.pyc,,
+importlib_resources/tests/__pycache__/zip.cpython-312.pyc,,
+importlib_resources/tests/_path.py,sha256=nkv3ek7D1U898v921rYbldDCtKri2oyYOi3EJqGjEGU,1289
+importlib_resources/tests/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/compat/__pycache__/__init__.cpython-312.pyc,,
+importlib_resources/tests/compat/__pycache__/py312.cpython-312.pyc,,
+importlib_resources/tests/compat/__pycache__/py39.cpython-312.pyc,,
+importlib_resources/tests/compat/py312.py,sha256=qcWjpZhQo2oEsdwIlRRQHrsMGDltkFTnETeG7fLdUS8,364
+importlib_resources/tests/compat/py39.py,sha256=lRTk0RWAOEb9RzAgvdRnqJUGCBLc3qoFQwzuJSa_zP4,329
+importlib_resources/tests/data01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/data01/__pycache__/__init__.cpython-312.pyc,,
+importlib_resources/tests/data01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4
+importlib_resources/tests/data01/subdirectory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/data01/subdirectory/__pycache__/__init__.cpython-312.pyc,,
+importlib_resources/tests/data01/subdirectory/binary.file,sha256=xtRM9Bj2EOP-nh2SlP9D3vgcbNytbLsYIM_0jTqkNV0,4
+importlib_resources/tests/data01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44
+importlib_resources/tests/data01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20
+importlib_resources/tests/data02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/data02/__pycache__/__init__.cpython-312.pyc,,
+importlib_resources/tests/data02/one/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/data02/one/__pycache__/__init__.cpython-312.pyc,,
+importlib_resources/tests/data02/one/resource1.txt,sha256=10flKac7c-XXFzJ3t-AB5MJjlBy__dSZvPE_dOm2q6U,13
+importlib_resources/tests/data02/subdirectory/subsubdir/resource.txt,sha256=jnrBBztxYrtQck7cmVnc4xQVO4-agzAZDGSFkAWtlFw,10
+importlib_resources/tests/data02/two/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/data02/two/__pycache__/__init__.cpython-312.pyc,,
+importlib_resources/tests/data02/two/resource2.txt,sha256=lt2jbN3TMn9QiFKM832X39bU_62UptDdUkoYzkvEbl0,13
+importlib_resources/tests/namespacedata01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4
+importlib_resources/tests/namespacedata01/subdirectory/binary.file,sha256=cbkhEL8TXIVYHIoSj2oZwPasp1KwxskeNXGJnPCbFF0,4
+importlib_resources/tests/namespacedata01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44
+importlib_resources/tests/namespacedata01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20
+importlib_resources/tests/test_compatibilty_files.py,sha256=95N_R7aik8cvnE6sBJpsxmP0K5plOWRIJDgbalD-Hpw,3314
+importlib_resources/tests/test_contents.py,sha256=70HW3mL_hv05Emv-OgdmwoLhXxjtuVxiWVaUpgRaRWA,930
+importlib_resources/tests/test_custom.py,sha256=QrHZqIWl0e-fsQRfm0ych8stOlKJOsAIU3rK6QOcyN0,1221
+importlib_resources/tests/test_files.py,sha256=OcShYu33kCcyXlDyZSVPkJNE08h-N_4bQOLV2QaSqX0,3472
+importlib_resources/tests/test_functional.py,sha256=ByCVViAwb2PIlKvDNJEqTZ0aLZGpFl5qa7CMCX-7HKM,8591
+importlib_resources/tests/test_open.py,sha256=ccmzbOeEa6zTd4ymZZ8yISrecfuYV0jhon-Vddqysu4,2778
+importlib_resources/tests/test_path.py,sha256=x8r2gJxG3hFM9xCOFNkgmHYXxsMldMLTSW_AZYf1l-A,2009
+importlib_resources/tests/test_read.py,sha256=7tsILQ2NoqVGFQxhHqxBwc5hWcN8b_3idojCsszTNfQ,3112
+importlib_resources/tests/test_reader.py,sha256=IcIUXaiPAtuahGV4_ZT4YXFLMMsJmcM1iOxqdIH2Aa4,5001
+importlib_resources/tests/test_resource.py,sha256=fcF8WgZ6rDCTRFnxtAUbdiaNe4G23yGovT1nb2dc7ls,7823
+importlib_resources/tests/util.py,sha256=vjVzEyX0X2RkTN-wGiQiplayp9sZom4JDjJinTNewos,4745
+importlib_resources/tests/zip.py,sha256=2MKmF8-osXBJSnqcUTuAUek_-tSB3iKmIT9qPhcsOsM,783
diff --git a/pkg_resources/_vendor/importlib_resources-5.10.2.dist-info/REQUESTED b/pkg_resources/_vendor/importlib_resources-6.4.0.dist-info/REQUESTED
similarity index 100%
rename from pkg_resources/_vendor/importlib_resources-5.10.2.dist-info/REQUESTED
rename to pkg_resources/_vendor/importlib_resources-6.4.0.dist-info/REQUESTED
diff --git a/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/WHEEL b/pkg_resources/_vendor/importlib_resources-6.4.0.dist-info/WHEEL
similarity index 65%
rename from pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/WHEEL
rename to pkg_resources/_vendor/importlib_resources-6.4.0.dist-info/WHEEL
index ba48cbcf92..bab98d6758 100644
--- a/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/WHEEL
+++ b/pkg_resources/_vendor/importlib_resources-6.4.0.dist-info/WHEEL
@@ -1,5 +1,5 @@
 Wheel-Version: 1.0
-Generator: bdist_wheel (0.41.3)
+Generator: bdist_wheel (0.43.0)
 Root-Is-Purelib: true
 Tag: py3-none-any
 
diff --git a/pkg_resources/_vendor/importlib_resources-5.10.2.dist-info/top_level.txt b/pkg_resources/_vendor/importlib_resources-6.4.0.dist-info/top_level.txt
similarity index 100%
rename from pkg_resources/_vendor/importlib_resources-5.10.2.dist-info/top_level.txt
rename to pkg_resources/_vendor/importlib_resources-6.4.0.dist-info/top_level.txt
diff --git a/pkg_resources/_vendor/importlib_resources/__init__.py b/pkg_resources/_vendor/importlib_resources/__init__.py
index 34e3a9950c..0d029abd63 100644
--- a/pkg_resources/_vendor/importlib_resources/__init__.py
+++ b/pkg_resources/_vendor/importlib_resources/__init__.py
@@ -4,17 +4,17 @@
     as_file,
     files,
     Package,
+    Anchor,
 )
 
-from ._legacy import (
+from .functional import (
     contents,
+    is_resource,
     open_binary,
-    read_binary,
     open_text,
-    read_text,
-    is_resource,
     path,
-    Resource,
+    read_binary,
+    read_text,
 )
 
 from .abc import ResourceReader
@@ -22,11 +22,11 @@
 
 __all__ = [
     'Package',
-    'Resource',
+    'Anchor',
     'ResourceReader',
     'as_file',
-    'contents',
     'files',
+    'contents',
     'is_resource',
     'open_binary',
     'open_text',
diff --git a/pkg_resources/_vendor/importlib_resources/_adapters.py b/pkg_resources/_vendor/importlib_resources/_adapters.py
index ea363d86a5..50688fbb66 100644
--- a/pkg_resources/_vendor/importlib_resources/_adapters.py
+++ b/pkg_resources/_vendor/importlib_resources/_adapters.py
@@ -34,9 +34,7 @@ def _io_wrapper(file, mode='r', *args, **kwargs):
         return TextIOWrapper(file, *args, **kwargs)
     elif mode == 'rb':
         return file
-    raise ValueError(
-        "Invalid mode value '{}', only 'r' and 'rb' are supported".format(mode)
-    )
+    raise ValueError(f"Invalid mode value '{mode}', only 'r' and 'rb' are supported")
 
 
 class CompatibilityFiles:
diff --git a/pkg_resources/_vendor/importlib_resources/_common.py b/pkg_resources/_vendor/importlib_resources/_common.py
index 3c6de1cfb2..8df6b39e41 100644
--- a/pkg_resources/_vendor/importlib_resources/_common.py
+++ b/pkg_resources/_vendor/importlib_resources/_common.py
@@ -12,8 +12,6 @@
 from typing import Union, Optional, cast
 from .abc import ResourceReader, Traversable
 
-from ._compat import wrap_spec
-
 Package = Union[types.ModuleType, str]
 Anchor = Package
 
@@ -27,6 +25,8 @@ def package_to_anchor(func):
     >>> files('a', 'b')
     Traceback (most recent call last):
     TypeError: files() takes from 0 to 1 positional arguments but 2 were given
+
+    Remove this compatibility in Python 3.14.
     """
     undefined = object()
 
@@ -109,6 +109,9 @@ def from_package(package: types.ModuleType):
     Return a Traversable object for the given package.
 
     """
+    # deferred for performance (python/cpython#109829)
+    from .future.adapters import wrap_spec
+
     spec = wrap_spec(package)
     reader = spec.loader.get_resource_reader(spec.name)
     return reader.files()
diff --git a/pkg_resources/_vendor/importlib_resources/_compat.py b/pkg_resources/_vendor/importlib_resources/_compat.py
deleted file mode 100644
index 8b5b1d280f..0000000000
--- a/pkg_resources/_vendor/importlib_resources/_compat.py
+++ /dev/null
@@ -1,108 +0,0 @@
-# flake8: noqa
-
-import abc
-import os
-import sys
-import pathlib
-from contextlib import suppress
-from typing import Union
-
-
-if sys.version_info >= (3, 10):
-    from zipfile import Path as ZipPath  # type: ignore
-else:
-    from ..zipp import Path as ZipPath  # type: ignore
-
-
-try:
-    from typing import runtime_checkable  # type: ignore
-except ImportError:
-
-    def runtime_checkable(cls):  # type: ignore
-        return cls
-
-
-try:
-    from typing import Protocol  # type: ignore
-except ImportError:
-    Protocol = abc.ABC  # type: ignore
-
-
-class TraversableResourcesLoader:
-    """
-    Adapt loaders to provide TraversableResources and other
-    compatibility.
-
-    Used primarily for Python 3.9 and earlier where the native
-    loaders do not yet implement TraversableResources.
-    """
-
-    def __init__(self, spec):
-        self.spec = spec
-
-    @property
-    def path(self):
-        return self.spec.origin
-
-    def get_resource_reader(self, name):
-        from . import readers, _adapters
-
-        def _zip_reader(spec):
-            with suppress(AttributeError):
-                return readers.ZipReader(spec.loader, spec.name)
-
-        def _namespace_reader(spec):
-            with suppress(AttributeError, ValueError):
-                return readers.NamespaceReader(spec.submodule_search_locations)
-
-        def _available_reader(spec):
-            with suppress(AttributeError):
-                return spec.loader.get_resource_reader(spec.name)
-
-        def _native_reader(spec):
-            reader = _available_reader(spec)
-            return reader if hasattr(reader, 'files') else None
-
-        def _file_reader(spec):
-            try:
-                path = pathlib.Path(self.path)
-            except TypeError:
-                return None
-            if path.exists():
-                return readers.FileReader(self)
-
-        return (
-            # native reader if it supplies 'files'
-            _native_reader(self.spec)
-            or
-            # local ZipReader if a zip module
-            _zip_reader(self.spec)
-            or
-            # local NamespaceReader if a namespace module
-            _namespace_reader(self.spec)
-            or
-            # local FileReader
-            _file_reader(self.spec)
-            # fallback - adapt the spec ResourceReader to TraversableReader
-            or _adapters.CompatibilityFiles(self.spec)
-        )
-
-
-def wrap_spec(package):
-    """
-    Construct a package spec with traversable compatibility
-    on the spec/loader/reader.
-
-    Supersedes _adapters.wrap_spec to use TraversableResourcesLoader
-    from above for older Python compatibility (<3.10).
-    """
-    from . import _adapters
-
-    return _adapters.SpecLoaderAdapter(package.__spec__, TraversableResourcesLoader)
-
-
-if sys.version_info >= (3, 9):
-    StrPath = Union[str, os.PathLike[str]]
-else:
-    # PathLike is only subscriptable at runtime in 3.9+
-    StrPath = Union[str, "os.PathLike[str]"]
diff --git a/pkg_resources/_vendor/importlib_resources/_itertools.py b/pkg_resources/_vendor/importlib_resources/_itertools.py
index cce05582ff..7b775ef5ae 100644
--- a/pkg_resources/_vendor/importlib_resources/_itertools.py
+++ b/pkg_resources/_vendor/importlib_resources/_itertools.py
@@ -1,35 +1,38 @@
-from itertools import filterfalse
+# from more_itertools 9.0
+def only(iterable, default=None, too_long=None):
+    """If *iterable* has only one item, return it.
+    If it has zero items, return *default*.
+    If it has more than one item, raise the exception given by *too_long*,
+    which is ``ValueError`` by default.
+    >>> only([], default='missing')
+    'missing'
+    >>> only([1])
+    1
+    >>> only([1, 2])  # doctest: +IGNORE_EXCEPTION_DETAIL
+    Traceback (most recent call last):
+    ...
+    ValueError: Expected exactly one item in iterable, but got 1, 2,
+     and perhaps more.'
+    >>> only([1, 2], too_long=TypeError)  # doctest: +IGNORE_EXCEPTION_DETAIL
+    Traceback (most recent call last):
+    ...
+    TypeError
+    Note that :func:`only` attempts to advance *iterable* twice to ensure there
+    is only one item.  See :func:`spy` or :func:`peekable` to check
+    iterable contents less destructively.
+    """
+    it = iter(iterable)
+    first_value = next(it, default)
 
-from typing import (
-    Callable,
-    Iterable,
-    Iterator,
-    Optional,
-    Set,
-    TypeVar,
-    Union,
-)
-
-# Type and type variable definitions
-_T = TypeVar('_T')
-_U = TypeVar('_U')
-
-
-def unique_everseen(
-    iterable: Iterable[_T], key: Optional[Callable[[_T], _U]] = None
-) -> Iterator[_T]:
-    "List unique elements, preserving order. Remember all elements ever seen."
-    # unique_everseen('AAAABBBCCDAABBB') --> A B C D
-    # unique_everseen('ABBCcAD', str.lower) --> A B C D
-    seen: Set[Union[_T, _U]] = set()
-    seen_add = seen.add
-    if key is None:
-        for element in filterfalse(seen.__contains__, iterable):
-            seen_add(element)
-            yield element
+    try:
+        second_value = next(it)
+    except StopIteration:
+        pass
     else:
-        for element in iterable:
-            k = key(element)
-            if k not in seen:
-                seen_add(k)
-                yield element
+        msg = (
+            'Expected exactly one item in iterable, but got {!r}, {!r}, '
+            'and perhaps more.'.format(first_value, second_value)
+        )
+        raise too_long or ValueError(msg)
+
+    return first_value
diff --git a/pkg_resources/_vendor/importlib_resources/_legacy.py b/pkg_resources/_vendor/importlib_resources/_legacy.py
deleted file mode 100644
index b1ea8105da..0000000000
--- a/pkg_resources/_vendor/importlib_resources/_legacy.py
+++ /dev/null
@@ -1,120 +0,0 @@
-import functools
-import os
-import pathlib
-import types
-import warnings
-
-from typing import Union, Iterable, ContextManager, BinaryIO, TextIO, Any
-
-from . import _common
-
-Package = Union[types.ModuleType, str]
-Resource = str
-
-
-def deprecated(func):
-    @functools.wraps(func)
-    def wrapper(*args, **kwargs):
-        warnings.warn(
-            f"{func.__name__} is deprecated. Use files() instead. "
-            "Refer to https://importlib-resources.readthedocs.io"
-            "/en/latest/using.html#migrating-from-legacy for migration advice.",
-            DeprecationWarning,
-            stacklevel=2,
-        )
-        return func(*args, **kwargs)
-
-    return wrapper
-
-
-def normalize_path(path: Any) -> str:
-    """Normalize a path by ensuring it is a string.
-
-    If the resulting string contains path separators, an exception is raised.
-    """
-    str_path = str(path)
-    parent, file_name = os.path.split(str_path)
-    if parent:
-        raise ValueError(f'{path!r} must be only a file name')
-    return file_name
-
-
-@deprecated
-def open_binary(package: Package, resource: Resource) -> BinaryIO:
-    """Return a file-like object opened for binary reading of the resource."""
-    return (_common.files(package) / normalize_path(resource)).open('rb')
-
-
-@deprecated
-def read_binary(package: Package, resource: Resource) -> bytes:
-    """Return the binary contents of the resource."""
-    return (_common.files(package) / normalize_path(resource)).read_bytes()
-
-
-@deprecated
-def open_text(
-    package: Package,
-    resource: Resource,
-    encoding: str = 'utf-8',
-    errors: str = 'strict',
-) -> TextIO:
-    """Return a file-like object opened for text reading of the resource."""
-    return (_common.files(package) / normalize_path(resource)).open(
-        'r', encoding=encoding, errors=errors
-    )
-
-
-@deprecated
-def read_text(
-    package: Package,
-    resource: Resource,
-    encoding: str = 'utf-8',
-    errors: str = 'strict',
-) -> str:
-    """Return the decoded string of the resource.
-
-    The decoding-related arguments have the same semantics as those of
-    bytes.decode().
-    """
-    with open_text(package, resource, encoding, errors) as fp:
-        return fp.read()
-
-
-@deprecated
-def contents(package: Package) -> Iterable[str]:
-    """Return an iterable of entries in `package`.
-
-    Note that not all entries are resources.  Specifically, directories are
-    not considered resources.  Use `is_resource()` on each entry returned here
-    to check if it is a resource or not.
-    """
-    return [path.name for path in _common.files(package).iterdir()]
-
-
-@deprecated
-def is_resource(package: Package, name: str) -> bool:
-    """True if `name` is a resource inside `package`.
-
-    Directories are *not* resources.
-    """
-    resource = normalize_path(name)
-    return any(
-        traversable.name == resource and traversable.is_file()
-        for traversable in _common.files(package).iterdir()
-    )
-
-
-@deprecated
-def path(
-    package: Package,
-    resource: Resource,
-) -> ContextManager[pathlib.Path]:
-    """A context manager providing a file path object to the resource.
-
-    If the resource does not already exist on its own on the file system,
-    a temporary file will be created. If the file was created, the file
-    will be deleted upon exiting the context manager (no exception is
-    raised if the file was deleted prior to the context manager
-    exiting).
-    """
-    return _common.as_file(_common.files(package) / normalize_path(resource))
diff --git a/pkg_resources/_vendor/importlib_resources/abc.py b/pkg_resources/_vendor/importlib_resources/abc.py
index 23b6aeafe4..7a58dd2f96 100644
--- a/pkg_resources/_vendor/importlib_resources/abc.py
+++ b/pkg_resources/_vendor/importlib_resources/abc.py
@@ -3,8 +3,9 @@
 import itertools
 import pathlib
 from typing import Any, BinaryIO, Iterable, Iterator, NoReturn, Text, Optional
+from typing import runtime_checkable, Protocol
 
-from ._compat import runtime_checkable, Protocol, StrPath
+from .compat.py38 import StrPath
 
 
 __all__ = ["ResourceReader", "Traversable", "TraversableResources"]
diff --git a/pkg_resources/_vendor/importlib_resources/tests/zipdata01/__init__.py b/pkg_resources/_vendor/importlib_resources/compat/__init__.py
similarity index 100%
rename from pkg_resources/_vendor/importlib_resources/tests/zipdata01/__init__.py
rename to pkg_resources/_vendor/importlib_resources/compat/__init__.py
diff --git a/pkg_resources/_vendor/importlib_resources/compat/py38.py b/pkg_resources/_vendor/importlib_resources/compat/py38.py
new file mode 100644
index 0000000000..4d548257f8
--- /dev/null
+++ b/pkg_resources/_vendor/importlib_resources/compat/py38.py
@@ -0,0 +1,11 @@
+import os
+import sys
+
+from typing import Union
+
+
+if sys.version_info >= (3, 9):
+    StrPath = Union[str, os.PathLike[str]]
+else:
+    # PathLike is only subscriptable at runtime in 3.9+
+    StrPath = Union[str, "os.PathLike[str]"]
diff --git a/pkg_resources/_vendor/importlib_resources/compat/py39.py b/pkg_resources/_vendor/importlib_resources/compat/py39.py
new file mode 100644
index 0000000000..ab87b9dc14
--- /dev/null
+++ b/pkg_resources/_vendor/importlib_resources/compat/py39.py
@@ -0,0 +1,10 @@
+import sys
+
+
+__all__ = ['ZipPath']
+
+
+if sys.version_info >= (3, 10):
+    from zipfile import Path as ZipPath  # type: ignore
+else:
+    from zipp import Path as ZipPath  # type: ignore
diff --git a/pkg_resources/_vendor/importlib_resources/functional.py b/pkg_resources/_vendor/importlib_resources/functional.py
new file mode 100644
index 0000000000..f59416f2dd
--- /dev/null
+++ b/pkg_resources/_vendor/importlib_resources/functional.py
@@ -0,0 +1,81 @@
+"""Simplified function-based API for importlib.resources"""
+
+import warnings
+
+from ._common import files, as_file
+
+
+_MISSING = object()
+
+
+def open_binary(anchor, *path_names):
+    """Open for binary reading the *resource* within *package*."""
+    return _get_resource(anchor, path_names).open('rb')
+
+
+def open_text(anchor, *path_names, encoding=_MISSING, errors='strict'):
+    """Open for text reading the *resource* within *package*."""
+    encoding = _get_encoding_arg(path_names, encoding)
+    resource = _get_resource(anchor, path_names)
+    return resource.open('r', encoding=encoding, errors=errors)
+
+
+def read_binary(anchor, *path_names):
+    """Read and return contents of *resource* within *package* as bytes."""
+    return _get_resource(anchor, path_names).read_bytes()
+
+
+def read_text(anchor, *path_names, encoding=_MISSING, errors='strict'):
+    """Read and return contents of *resource* within *package* as str."""
+    encoding = _get_encoding_arg(path_names, encoding)
+    resource = _get_resource(anchor, path_names)
+    return resource.read_text(encoding=encoding, errors=errors)
+
+
+def path(anchor, *path_names):
+    """Return the path to the *resource* as an actual file system path."""
+    return as_file(_get_resource(anchor, path_names))
+
+
+def is_resource(anchor, *path_names):
+    """Return ``True`` if there is a resource named *name* in the package,
+
+    Otherwise returns ``False``.
+    """
+    return _get_resource(anchor, path_names).is_file()
+
+
+def contents(anchor, *path_names):
+    """Return an iterable over the named resources within the package.
+
+    The iterable returns :class:`str` resources (e.g. files).
+    The iterable does not recurse into subdirectories.
+    """
+    warnings.warn(
+        "importlib.resources.contents is deprecated. "
+        "Use files(anchor).iterdir() instead.",
+        DeprecationWarning,
+        stacklevel=1,
+    )
+    return (resource.name for resource in _get_resource(anchor, path_names).iterdir())
+
+
+def _get_encoding_arg(path_names, encoding):
+    # For compatibility with versions where *encoding* was a positional
+    # argument, it needs to be given explicitly when there are multiple
+    # *path_names*.
+    # This limitation can be removed in Python 3.15.
+    if encoding is _MISSING:
+        if len(path_names) > 1:
+            raise TypeError(
+                "'encoding' argument required with multiple path names",
+            )
+        else:
+            return 'utf-8'
+    return encoding
+
+
+def _get_resource(anchor, path_names):
+    if anchor is None:
+        raise TypeError("anchor must be module or string, got None")
+    return files(anchor).joinpath(*path_names)
diff --git a/pkg_resources/_vendor/importlib_resources/tests/zipdata02/__init__.py b/pkg_resources/_vendor/importlib_resources/future/__init__.py
similarity index 100%
rename from pkg_resources/_vendor/importlib_resources/tests/zipdata02/__init__.py
rename to pkg_resources/_vendor/importlib_resources/future/__init__.py
diff --git a/pkg_resources/_vendor/importlib_resources/future/adapters.py b/pkg_resources/_vendor/importlib_resources/future/adapters.py
new file mode 100644
index 0000000000..0e9764bae8
--- /dev/null
+++ b/pkg_resources/_vendor/importlib_resources/future/adapters.py
@@ -0,0 +1,95 @@
+import functools
+import pathlib
+from contextlib import suppress
+from types import SimpleNamespace
+
+from .. import readers, _adapters
+
+
+def _block_standard(reader_getter):
+    """
+    Wrap _adapters.TraversableResourcesLoader.get_resource_reader
+    and intercept any standard library readers.
+    """
+
+    @functools.wraps(reader_getter)
+    def wrapper(*args, **kwargs):
+        """
+        If the reader is from the standard library, return None to allow
+        allow likely newer implementations in this library to take precedence.
+        """
+        try:
+            reader = reader_getter(*args, **kwargs)
+        except NotADirectoryError:
+            # MultiplexedPath may fail on zip subdirectory
+            return
+        # Python 3.10+
+        mod_name = reader.__class__.__module__
+        if mod_name.startswith('importlib.') and mod_name.endswith('readers'):
+            return
+        # Python 3.8, 3.9
+        if isinstance(reader, _adapters.CompatibilityFiles) and (
+            reader.spec.loader.__class__.__module__.startswith('zipimport')
+            or reader.spec.loader.__class__.__module__.startswith(
+                '_frozen_importlib_external'
+            )
+        ):
+            return
+        return reader
+
+    return wrapper
+
+
+def _skip_degenerate(reader):
+    """
+    Mask any degenerate reader. Ref #298.
+    """
+    is_degenerate = (
+        isinstance(reader, _adapters.CompatibilityFiles) and not reader._reader
+    )
+    return reader if not is_degenerate else None
+
+
+class TraversableResourcesLoader(_adapters.TraversableResourcesLoader):
+    """
+    Adapt loaders to provide TraversableResources and other
+    compatibility.
+
+    Ensures the readers from importlib_resources are preferred
+    over stdlib readers.
+    """
+
+    def get_resource_reader(self, name):
+        return (
+            _skip_degenerate(_block_standard(super().get_resource_reader)(name))
+            or self._standard_reader()
+            or super().get_resource_reader(name)
+        )
+
+    def _standard_reader(self):
+        return self._zip_reader() or self._namespace_reader() or self._file_reader()
+
+    def _zip_reader(self):
+        with suppress(AttributeError):
+            return readers.ZipReader(self.spec.loader, self.spec.name)
+
+    def _namespace_reader(self):
+        with suppress(AttributeError, ValueError):
+            return readers.NamespaceReader(self.spec.submodule_search_locations)
+
+    def _file_reader(self):
+        try:
+            path = pathlib.Path(self.spec.origin)
+        except TypeError:
+            return None
+        if path.exists():
+            return readers.FileReader(SimpleNamespace(path=path))
+
+
+def wrap_spec(package):
+    """
+    Override _adapters.wrap_spec to use TraversableResourcesLoader
+    from above. Ensures that future behavior is always available on older
+    Pythons.
+    """
+    return _adapters.SpecLoaderAdapter(package.__spec__, TraversableResourcesLoader)
diff --git a/pkg_resources/_vendor/importlib_resources/readers.py b/pkg_resources/_vendor/importlib_resources/readers.py
index ab34db7409..4a80a774aa 100644
--- a/pkg_resources/_vendor/importlib_resources/readers.py
+++ b/pkg_resources/_vendor/importlib_resources/readers.py
@@ -1,11 +1,15 @@
 import collections
+import contextlib
+import itertools
 import pathlib
 import operator
+import re
+import warnings
 
 from . import abc
 
-from ._itertools import unique_everseen
-from ._compat import ZipPath
+from ._itertools import only
+from .compat.py39 import ZipPath
 
 
 def remove_duplicates(items):
@@ -41,8 +45,10 @@ def open_resource(self, resource):
             raise FileNotFoundError(exc.args[0])
 
     def is_resource(self, path):
-        # workaround for `zipfile.Path.is_file` returning true
-        # for non-existent paths.
+        """
+        Workaround for `zipfile.Path.is_file` returning true
+        for non-existent paths.
+        """
         target = self.files().joinpath(path)
         return target.is_file() and target.exists()
 
@@ -59,7 +65,7 @@ class MultiplexedPath(abc.Traversable):
     """
 
     def __init__(self, *paths):
-        self._paths = list(map(pathlib.Path, remove_duplicates(paths)))
+        self._paths = list(map(_ensure_traversable, remove_duplicates(paths)))
         if not self._paths:
             message = 'MultiplexedPath must contain at least one path'
             raise FileNotFoundError(message)
@@ -67,8 +73,10 @@ def __init__(self, *paths):
             raise NotADirectoryError('MultiplexedPath only supports directories')
 
     def iterdir(self):
-        files = (file for path in self._paths for file in path.iterdir())
-        return unique_everseen(files, key=operator.attrgetter('name'))
+        children = (child for path in self._paths for child in path.iterdir())
+        by_name = operator.attrgetter('name')
+        groups = itertools.groupby(sorted(children, key=by_name), key=by_name)
+        return map(self._follow, (locs for name, locs in groups))
 
     def read_bytes(self):
         raise FileNotFoundError(f'{self} is not a file')
@@ -90,6 +98,25 @@ def joinpath(self, *descendants):
             # Just return something that will not exist.
             return self._paths[0].joinpath(*descendants)
 
+    @classmethod
+    def _follow(cls, children):
+        """
+        Construct a MultiplexedPath if needed.
+
+        If children contains a sole element, return it.
+        Otherwise, return a MultiplexedPath of the items.
+        Unless one of the items is not a Directory, then return the first.
+        """
+        subdirs, one_dir, one_file = itertools.tee(children, 3)
+
+        try:
+            return only(one_dir)
+        except ValueError:
+            try:
+                return cls(*subdirs)
+            except NotADirectoryError:
+                return next(one_file)
+
     def open(self, *args, **kwargs):
         raise FileNotFoundError(f'{self} is not a file')
 
@@ -106,7 +133,36 @@ class NamespaceReader(abc.TraversableResources):
     def __init__(self, namespace_path):
         if 'NamespacePath' not in str(namespace_path):
             raise ValueError('Invalid path')
-        self.path = MultiplexedPath(*list(namespace_path))
+        self.path = MultiplexedPath(*map(self._resolve, namespace_path))
+
+    @classmethod
+    def _resolve(cls, path_str) -> abc.Traversable:
+        r"""
+        Given an item from a namespace path, resolve it to a Traversable.
+
+        path_str might be a directory on the filesystem or a path to a
+        zipfile plus the path within the zipfile, e.g. ``/foo/bar`` or
+        ``/foo/baz.zip/inner_dir`` or ``foo\baz.zip\inner_dir\sub``.
+        """
+        (dir,) = (cand for cand in cls._candidate_paths(path_str) if cand.is_dir())
+        return dir
+
+    @classmethod
+    def _candidate_paths(cls, path_str):
+        yield pathlib.Path(path_str)
+        yield from cls._resolve_zip_path(path_str)
+
+    @staticmethod
+    def _resolve_zip_path(path_str):
+        for match in reversed(list(re.finditer(r'[\\/]', path_str))):
+            with contextlib.suppress(
+                FileNotFoundError,
+                IsADirectoryError,
+                NotADirectoryError,
+                PermissionError,
+            ):
+                inner = path_str[match.end() :].replace('\\', '/') + '/'
+                yield ZipPath(path_str[: match.start()], inner.lstrip('/'))
 
     def resource_path(self, resource):
         """
@@ -118,3 +174,21 @@ def resource_path(self, resource):
 
     def files(self):
         return self.path
+
+
+def _ensure_traversable(path):
+    """
+    Convert deprecated string arguments to traversables (pathlib.Path).
+
+    Remove with Python 3.15.
+    """
+    if not isinstance(path, str):
+        return path
+
+    warnings.warn(
+        "String arguments are deprecated. Pass a Traversable instead.",
+        DeprecationWarning,
+        stacklevel=3,
+    )
+
+    return pathlib.Path(path)
diff --git a/pkg_resources/_vendor/importlib_resources/simple.py b/pkg_resources/_vendor/importlib_resources/simple.py
index 7770c922c8..96f117fec6 100644
--- a/pkg_resources/_vendor/importlib_resources/simple.py
+++ b/pkg_resources/_vendor/importlib_resources/simple.py
@@ -88,7 +88,7 @@ def is_dir(self):
     def open(self, mode='r', *args, **kwargs):
         stream = self.parent.reader.open_binary(self.name)
         if 'b' not in mode:
-            stream = io.TextIOWrapper(*args, **kwargs)
+            stream = io.TextIOWrapper(stream, *args, **kwargs)
         return stream
 
     def joinpath(self, name):
diff --git a/pkg_resources/_vendor/importlib_resources/tests/_compat.py b/pkg_resources/_vendor/importlib_resources/tests/_compat.py
deleted file mode 100644
index e7bf06dd4e..0000000000
--- a/pkg_resources/_vendor/importlib_resources/tests/_compat.py
+++ /dev/null
@@ -1,32 +0,0 @@
-import os
-
-
-try:
-    from test.support import import_helper  # type: ignore
-except ImportError:
-    # Python 3.9 and earlier
-    class import_helper:  # type: ignore
-        from test.support import (
-            modules_setup,
-            modules_cleanup,
-            DirsOnSysPath,
-            CleanImport,
-        )
-
-
-try:
-    from test.support import os_helper  # type: ignore
-except ImportError:
-    # Python 3.9 compat
-    class os_helper:  # type:ignore
-        from test.support import temp_dir
-
-
-try:
-    # Python 3.10
-    from test.support.os_helper import unlink
-except ImportError:
-    from test.support import unlink as _unlink
-
-    def unlink(target):
-        return _unlink(os.fspath(target))
diff --git a/pkg_resources/_vendor/importlib_resources/tests/_path.py b/pkg_resources/_vendor/importlib_resources/tests/_path.py
index c630e4d3d3..1f97c96146 100644
--- a/pkg_resources/_vendor/importlib_resources/tests/_path.py
+++ b/pkg_resources/_vendor/importlib_resources/tests/_path.py
@@ -1,12 +1,16 @@
 import pathlib
 import functools
 
+from typing import Dict, Union
+
 
 ####
-# from jaraco.path 3.4
+# from jaraco.path 3.4.1
+
+FilesSpec = Dict[str, Union[str, bytes, 'FilesSpec']]  # type: ignore
 
 
-def build(spec, prefix=pathlib.Path()):
+def build(spec: FilesSpec, prefix=pathlib.Path()):
     """
     Build a set of files/directories, as described by the spec.
 
@@ -23,15 +27,17 @@ def build(spec, prefix=pathlib.Path()):
     ...         "baz.py": "# Some code",
     ...     }
     ... }
-    >>> tmpdir = getfixture('tmpdir')
-    >>> build(spec, tmpdir)
+    >>> target = getfixture('tmp_path')
+    >>> build(spec, target)
+    >>> target.joinpath('foo/baz.py').read_text(encoding='utf-8')
+    '# Some code'
     """
     for name, contents in spec.items():
         create(contents, pathlib.Path(prefix) / name)
 
 
 @functools.singledispatch
-def create(content, path):
+def create(content: Union[str, bytes, FilesSpec], path):
     path.mkdir(exist_ok=True)
     build(content, prefix=path)  # type: ignore
 
@@ -43,7 +49,7 @@ def _(content: bytes, path):
 
 @create.register
 def _(content: str, path):
-    path.write_text(content)
+    path.write_text(content, encoding='utf-8')
 
 
 # end from jaraco.path
diff --git a/pkg_resources/_vendor/jaraco/__init__.py b/pkg_resources/_vendor/importlib_resources/tests/compat/__init__.py
similarity index 100%
rename from pkg_resources/_vendor/jaraco/__init__.py
rename to pkg_resources/_vendor/importlib_resources/tests/compat/__init__.py
diff --git a/pkg_resources/_vendor/importlib_resources/tests/compat/py312.py b/pkg_resources/_vendor/importlib_resources/tests/compat/py312.py
new file mode 100644
index 0000000000..ea9a58ba2e
--- /dev/null
+++ b/pkg_resources/_vendor/importlib_resources/tests/compat/py312.py
@@ -0,0 +1,18 @@
+import contextlib
+
+from .py39 import import_helper
+
+
+@contextlib.contextmanager
+def isolated_modules():
+    """
+    Save modules on entry and cleanup on exit.
+    """
+    (saved,) = import_helper.modules_setup()
+    try:
+        yield
+    finally:
+        import_helper.modules_cleanup(saved)
+
+
+vars(import_helper).setdefault('isolated_modules', isolated_modules)
diff --git a/pkg_resources/_vendor/importlib_resources/tests/compat/py39.py b/pkg_resources/_vendor/importlib_resources/tests/compat/py39.py
new file mode 100644
index 0000000000..e158eb85d3
--- /dev/null
+++ b/pkg_resources/_vendor/importlib_resources/tests/compat/py39.py
@@ -0,0 +1,10 @@
+"""
+Backward-compatability shims to support Python 3.9 and earlier.
+"""
+
+from jaraco.test.cpython import from_test_support, try_import
+
+import_helper = try_import('import_helper') or from_test_support(
+    'modules_setup', 'modules_cleanup', 'DirsOnSysPath'
+)
+os_helper = try_import('os_helper') or from_test_support('temp_dir')
diff --git a/pkg_resources/_vendor/importlib_resources/tests/data01/subdirectory/binary.file b/pkg_resources/_vendor/importlib_resources/tests/data01/subdirectory/binary.file
index eaf36c1daccfdf325514461cd1a2ffbc139b5464..5bd8bb897b13225c93a1d26baa88c96b7bd5d817 100644
GIT binary patch
literal 4
LcmZQ!Wn%{b05$*@

literal 4
LcmZQzWMT#Y01f~L

diff --git a/pkg_resources/_vendor/importlib_resources/tests/data02/subdirectory/subsubdir/resource.txt b/pkg_resources/_vendor/importlib_resources/tests/data02/subdirectory/subsubdir/resource.txt
new file mode 100644
index 0000000000..48f587a2d0
--- /dev/null
+++ b/pkg_resources/_vendor/importlib_resources/tests/data02/subdirectory/subsubdir/resource.txt
@@ -0,0 +1 @@
+a resource
\ No newline at end of file
diff --git a/pkg_resources/_vendor/importlib_resources/tests/namespacedata01/subdirectory/binary.file b/pkg_resources/_vendor/importlib_resources/tests/namespacedata01/subdirectory/binary.file
new file mode 100644
index 0000000000..100f50643d
--- /dev/null
+++ b/pkg_resources/_vendor/importlib_resources/tests/namespacedata01/subdirectory/binary.file
@@ -0,0 +1 @@
+

\ No newline at end of file
diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_compatibilty_files.py b/pkg_resources/_vendor/importlib_resources/tests/test_compatibilty_files.py
index d92c7c56c9..13ad0dfb21 100644
--- a/pkg_resources/_vendor/importlib_resources/tests/test_compatibilty_files.py
+++ b/pkg_resources/_vendor/importlib_resources/tests/test_compatibilty_files.py
@@ -64,11 +64,13 @@ def test_orphan_path_name(self):
 
     def test_spec_path_open(self):
         self.assertEqual(self.files.read_bytes(), b'Hello, world!')
-        self.assertEqual(self.files.read_text(), 'Hello, world!')
+        self.assertEqual(self.files.read_text(encoding='utf-8'), 'Hello, world!')
 
     def test_child_path_open(self):
         self.assertEqual((self.files / 'a').read_bytes(), b'Hello, world!')
-        self.assertEqual((self.files / 'a').read_text(), 'Hello, world!')
+        self.assertEqual(
+            (self.files / 'a').read_text(encoding='utf-8'), 'Hello, world!'
+        )
 
     def test_orphan_path_open(self):
         with self.assertRaises(FileNotFoundError):
diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_contents.py b/pkg_resources/_vendor/importlib_resources/tests/test_contents.py
index 525568e8c9..7dc3b0a619 100644
--- a/pkg_resources/_vendor/importlib_resources/tests/test_contents.py
+++ b/pkg_resources/_vendor/importlib_resources/tests/test_contents.py
@@ -31,8 +31,8 @@ class ContentsZipTests(ContentsTests, util.ZipSetup, unittest.TestCase):
 class ContentsNamespaceTests(ContentsTests, unittest.TestCase):
     expected = {
         # no __init__ because of namespace design
-        # no subdirectory as incidental difference in fixture
         'binary.file',
+        'subdirectory',
         'utf-16.file',
         'utf-8.file',
     }
diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_custom.py b/pkg_resources/_vendor/importlib_resources/tests/test_custom.py
new file mode 100644
index 0000000000..86c65676f1
--- /dev/null
+++ b/pkg_resources/_vendor/importlib_resources/tests/test_custom.py
@@ -0,0 +1,47 @@
+import unittest
+import contextlib
+import pathlib
+
+import importlib_resources as resources
+from .. import abc
+from ..abc import TraversableResources, ResourceReader
+from . import util
+from .compat.py39 import os_helper
+
+
+class SimpleLoader:
+    """
+    A simple loader that only implements a resource reader.
+    """
+
+    def __init__(self, reader: ResourceReader):
+        self.reader = reader
+
+    def get_resource_reader(self, package):
+        return self.reader
+
+
+class MagicResources(TraversableResources):
+    """
+    Magically returns the resources at path.
+    """
+
+    def __init__(self, path: pathlib.Path):
+        self.path = path
+
+    def files(self):
+        return self.path
+
+
+class CustomTraversableResourcesTests(unittest.TestCase):
+    def setUp(self):
+        self.fixtures = contextlib.ExitStack()
+        self.addCleanup(self.fixtures.close)
+
+    def test_custom_loader(self):
+        temp_dir = pathlib.Path(self.fixtures.enter_context(os_helper.temp_dir()))
+        loader = SimpleLoader(MagicResources(temp_dir))
+        pkg = util.create_package_from_loader(loader)
+        files = resources.files(pkg)
+        assert isinstance(files, abc.Traversable)
+        assert list(files.iterdir()) == []
diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_files.py b/pkg_resources/_vendor/importlib_resources/tests/test_files.py
index d258fb5f0f..3e86ec64bc 100644
--- a/pkg_resources/_vendor/importlib_resources/tests/test_files.py
+++ b/pkg_resources/_vendor/importlib_resources/tests/test_files.py
@@ -1,4 +1,3 @@
-import typing
 import textwrap
 import unittest
 import warnings
@@ -10,7 +9,8 @@
 from . import data01
 from . import util
 from . import _path
-from ._compat import os_helper, import_helper
+from .compat.py39 import os_helper
+from .compat.py312 import import_helper
 
 
 @contextlib.contextmanager
@@ -31,13 +31,14 @@ def test_read_text(self):
         actual = files.joinpath('utf-8.file').read_text(encoding='utf-8')
         assert actual == 'Hello, UTF-8 world!\n'
 
-    @unittest.skipUnless(
-        hasattr(typing, 'runtime_checkable'),
-        "Only suitable when typing supports runtime_checkable",
-    )
     def test_traversable(self):
         assert isinstance(resources.files(self.data), Traversable)
 
+    def test_joinpath_with_multiple_args(self):
+        files = resources.files(self.data)
+        binfile = files.joinpath('subdirectory', 'binary.file')
+        self.assertTrue(binfile.is_file())
+
     def test_old_parameter(self):
         """
         Files used to take a 'package' parameter. Make sure anyone
@@ -63,13 +64,17 @@ def setUp(self):
         self.data = namespacedata01
 
 
+class OpenNamespaceZipTests(FilesTests, util.ZipSetup, unittest.TestCase):
+    ZIP_MODULE = 'namespacedata01'
+
+
 class SiteDir:
     def setUp(self):
         self.fixtures = contextlib.ExitStack()
         self.addCleanup(self.fixtures.close)
         self.site_dir = self.fixtures.enter_context(os_helper.temp_dir())
         self.fixtures.enter_context(import_helper.DirsOnSysPath(self.site_dir))
-        self.fixtures.enter_context(import_helper.CleanImport())
+        self.fixtures.enter_context(import_helper.isolated_modules())
 
 
 class ModulesFilesTests(SiteDir, unittest.TestCase):
@@ -84,7 +89,7 @@ def test_module_resources(self):
         _path.build(spec, self.site_dir)
         import mod
 
-        actual = resources.files(mod).joinpath('res.txt').read_text()
+        actual = resources.files(mod).joinpath('res.txt').read_text(encoding='utf-8')
         assert actual == spec['res.txt']
 
 
@@ -98,7 +103,7 @@ def test_implicit_files(self):
                 '__init__.py': textwrap.dedent(
                     """
                     import importlib_resources as res
-                    val = res.files().joinpath('res.txt').read_text()
+                    val = res.files().joinpath('res.txt').read_text(encoding='utf-8')
                     """
                 ),
                 'res.txt': 'resources are the best',
diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_functional.py b/pkg_resources/_vendor/importlib_resources/tests/test_functional.py
new file mode 100644
index 0000000000..69706cf7be
--- /dev/null
+++ b/pkg_resources/_vendor/importlib_resources/tests/test_functional.py
@@ -0,0 +1,242 @@
+import unittest
+import os
+import contextlib
+
+try:
+    from test.support.warnings_helper import ignore_warnings, check_warnings
+except ImportError:
+    # older Python versions
+    from test.support import ignore_warnings, check_warnings
+
+import importlib_resources as resources
+
+# Since the functional API forwards to Traversable, we only test
+# filesystem resources here -- not zip files, namespace packages etc.
+# We do test for two kinds of Anchor, though.
+
+
+class StringAnchorMixin:
+    anchor01 = 'importlib_resources.tests.data01'
+    anchor02 = 'importlib_resources.tests.data02'
+
+
+class ModuleAnchorMixin:
+    from . import data01 as anchor01
+    from . import data02 as anchor02
+
+
+class FunctionalAPIBase:
+    def _gen_resourcetxt_path_parts(self):
+        """Yield various names of a text file in anchor02, each in a subTest"""
+        for path_parts in (
+            ('subdirectory', 'subsubdir', 'resource.txt'),
+            ('subdirectory/subsubdir/resource.txt',),
+            ('subdirectory/subsubdir', 'resource.txt'),
+        ):
+            with self.subTest(path_parts=path_parts):
+                yield path_parts
+
+    def test_read_text(self):
+        self.assertEqual(
+            resources.read_text(self.anchor01, 'utf-8.file'),
+            'Hello, UTF-8 world!\n',
+        )
+        self.assertEqual(
+            resources.read_text(
+                self.anchor02,
+                'subdirectory',
+                'subsubdir',
+                'resource.txt',
+                encoding='utf-8',
+            ),
+            'a resource',
+        )
+        for path_parts in self._gen_resourcetxt_path_parts():
+            self.assertEqual(
+                resources.read_text(
+                    self.anchor02,
+                    *path_parts,
+                    encoding='utf-8',
+                ),
+                'a resource',
+            )
+        # Use generic OSError, since e.g. attempting to read a directory can
+        # fail with PermissionError rather than IsADirectoryError
+        with self.assertRaises(OSError):
+            resources.read_text(self.anchor01)
+        with self.assertRaises(OSError):
+            resources.read_text(self.anchor01, 'no-such-file')
+        with self.assertRaises(UnicodeDecodeError):
+            resources.read_text(self.anchor01, 'utf-16.file')
+        self.assertEqual(
+            resources.read_text(
+                self.anchor01,
+                'binary.file',
+                encoding='latin1',
+            ),
+            '\x00\x01\x02\x03',
+        )
+        self.assertEqual(
+            resources.read_text(
+                self.anchor01,
+                'utf-16.file',
+                errors='backslashreplace',
+            ),
+            'Hello, UTF-16 world!\n'.encode('utf-16').decode(
+                errors='backslashreplace',
+            ),
+        )
+
+    def test_read_binary(self):
+        self.assertEqual(
+            resources.read_binary(self.anchor01, 'utf-8.file'),
+            b'Hello, UTF-8 world!\n',
+        )
+        for path_parts in self._gen_resourcetxt_path_parts():
+            self.assertEqual(
+                resources.read_binary(self.anchor02, *path_parts),
+                b'a resource',
+            )
+
+    def test_open_text(self):
+        with resources.open_text(self.anchor01, 'utf-8.file') as f:
+            self.assertEqual(f.read(), 'Hello, UTF-8 world!\n')
+        for path_parts in self._gen_resourcetxt_path_parts():
+            with resources.open_text(
+                self.anchor02,
+                *path_parts,
+                encoding='utf-8',
+            ) as f:
+                self.assertEqual(f.read(), 'a resource')
+        # Use generic OSError, since e.g. attempting to read a directory can
+        # fail with PermissionError rather than IsADirectoryError
+        with self.assertRaises(OSError):
+            resources.open_text(self.anchor01)
+        with self.assertRaises(OSError):
+            resources.open_text(self.anchor01, 'no-such-file')
+        with resources.open_text(self.anchor01, 'utf-16.file') as f:
+            with self.assertRaises(UnicodeDecodeError):
+                f.read()
+        with resources.open_text(
+            self.anchor01,
+            'binary.file',
+            encoding='latin1',
+        ) as f:
+            self.assertEqual(f.read(), '\x00\x01\x02\x03')
+        with resources.open_text(
+            self.anchor01,
+            'utf-16.file',
+            errors='backslashreplace',
+        ) as f:
+            self.assertEqual(
+                f.read(),
+                'Hello, UTF-16 world!\n'.encode('utf-16').decode(
+                    errors='backslashreplace',
+                ),
+            )
+
+    def test_open_binary(self):
+        with resources.open_binary(self.anchor01, 'utf-8.file') as f:
+            self.assertEqual(f.read(), b'Hello, UTF-8 world!\n')
+        for path_parts in self._gen_resourcetxt_path_parts():
+            with resources.open_binary(
+                self.anchor02,
+                *path_parts,
+            ) as f:
+                self.assertEqual(f.read(), b'a resource')
+
+    def test_path(self):
+        with resources.path(self.anchor01, 'utf-8.file') as path:
+            with open(str(path), encoding='utf-8') as f:
+                self.assertEqual(f.read(), 'Hello, UTF-8 world!\n')
+        with resources.path(self.anchor01) as path:
+            with open(os.path.join(path, 'utf-8.file'), encoding='utf-8') as f:
+                self.assertEqual(f.read(), 'Hello, UTF-8 world!\n')
+
+    def test_is_resource(self):
+        is_resource = resources.is_resource
+        self.assertTrue(is_resource(self.anchor01, 'utf-8.file'))
+        self.assertFalse(is_resource(self.anchor01, 'no_such_file'))
+        self.assertFalse(is_resource(self.anchor01))
+        self.assertFalse(is_resource(self.anchor01, 'subdirectory'))
+        for path_parts in self._gen_resourcetxt_path_parts():
+            self.assertTrue(is_resource(self.anchor02, *path_parts))
+
+    def test_contents(self):
+        with check_warnings((".*contents.*", DeprecationWarning)):
+            c = resources.contents(self.anchor01)
+        self.assertGreaterEqual(
+            set(c),
+            {'utf-8.file', 'utf-16.file', 'binary.file', 'subdirectory'},
+        )
+        with contextlib.ExitStack() as cm:
+            cm.enter_context(self.assertRaises(OSError))
+            cm.enter_context(check_warnings((".*contents.*", DeprecationWarning)))
+
+            list(resources.contents(self.anchor01, 'utf-8.file'))
+
+        for path_parts in self._gen_resourcetxt_path_parts():
+            with contextlib.ExitStack() as cm:
+                cm.enter_context(self.assertRaises(OSError))
+                cm.enter_context(check_warnings((".*contents.*", DeprecationWarning)))
+
+                list(resources.contents(self.anchor01, *path_parts))
+        with check_warnings((".*contents.*", DeprecationWarning)):
+            c = resources.contents(self.anchor01, 'subdirectory')
+        self.assertGreaterEqual(
+            set(c),
+            {'binary.file'},
+        )
+
+    @ignore_warnings(category=DeprecationWarning)
+    def test_common_errors(self):
+        for func in (
+            resources.read_text,
+            resources.read_binary,
+            resources.open_text,
+            resources.open_binary,
+            resources.path,
+            resources.is_resource,
+            resources.contents,
+        ):
+            with self.subTest(func=func):
+                # Rejecting None anchor
+                with self.assertRaises(TypeError):
+                    func(None)
+                # Rejecting invalid anchor type
+                with self.assertRaises((TypeError, AttributeError)):
+                    func(1234)
+                # Unknown module
+                with self.assertRaises(ModuleNotFoundError):
+                    func('$missing module$')
+
+    def test_text_errors(self):
+        for func in (
+            resources.read_text,
+            resources.open_text,
+        ):
+            with self.subTest(func=func):
+                # Multiple path arguments need explicit encoding argument.
+                with self.assertRaises(TypeError):
+                    func(
+                        self.anchor02,
+                        'subdirectory',
+                        'subsubdir',
+                        'resource.txt',
+                    )
+
+
+class FunctionalAPITest_StringAnchor(
+    unittest.TestCase,
+    FunctionalAPIBase,
+    StringAnchorMixin,
+):
+    pass
+
+
+class FunctionalAPITest_ModuleAnchor(
+    unittest.TestCase,
+    FunctionalAPIBase,
+    ModuleAnchorMixin,
+):
+    pass
diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_open.py b/pkg_resources/_vendor/importlib_resources/tests/test_open.py
index 87b42c3d39..44f1018af3 100644
--- a/pkg_resources/_vendor/importlib_resources/tests/test_open.py
+++ b/pkg_resources/_vendor/importlib_resources/tests/test_open.py
@@ -15,7 +15,7 @@ def execute(self, package, path):
 class CommonTextTests(util.CommonTests, unittest.TestCase):
     def execute(self, package, path):
         target = resources.files(package).joinpath(path)
-        with target.open():
+        with target.open(encoding='utf-8'):
             pass
 
 
@@ -24,11 +24,11 @@ def test_open_binary(self):
         target = resources.files(self.data) / 'binary.file'
         with target.open('rb') as fp:
             result = fp.read()
-            self.assertEqual(result, b'\x00\x01\x02\x03')
+            self.assertEqual(result, bytes(range(4)))
 
     def test_open_text_default_encoding(self):
         target = resources.files(self.data) / 'utf-8.file'
-        with target.open() as fp:
+        with target.open(encoding='utf-8') as fp:
             result = fp.read()
             self.assertEqual(result, 'Hello, UTF-8 world!\n')
 
@@ -39,7 +39,9 @@ def test_open_text_given_encoding(self):
         self.assertEqual(result, 'Hello, UTF-16 world!\n')
 
     def test_open_text_with_errors(self):
-        # Raises UnicodeError without the 'errors' argument.
+        """
+        Raises UnicodeError without the 'errors' argument.
+        """
         target = resources.files(self.data) / 'utf-16.file'
         with target.open(encoding='utf-8', errors='strict') as fp:
             self.assertRaises(UnicodeError, fp.read)
@@ -54,11 +56,13 @@ def test_open_text_with_errors(self):
 
     def test_open_binary_FileNotFoundError(self):
         target = resources.files(self.data) / 'does-not-exist'
-        self.assertRaises(FileNotFoundError, target.open, 'rb')
+        with self.assertRaises(FileNotFoundError):
+            target.open('rb')
 
     def test_open_text_FileNotFoundError(self):
         target = resources.files(self.data) / 'does-not-exist'
-        self.assertRaises(FileNotFoundError, target.open)
+        with self.assertRaises(FileNotFoundError):
+            target.open(encoding='utf-8')
 
 
 class OpenDiskTests(OpenTests, unittest.TestCase):
@@ -77,5 +81,9 @@ class OpenZipTests(OpenTests, util.ZipSetup, unittest.TestCase):
     pass
 
 
+class OpenNamespaceZipTests(OpenTests, util.ZipSetup, unittest.TestCase):
+    ZIP_MODULE = 'namespacedata01'
+
+
 if __name__ == '__main__':
     unittest.main()
diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_path.py b/pkg_resources/_vendor/importlib_resources/tests/test_path.py
index 4f4d3943bb..c3e1cbb4ed 100644
--- a/pkg_resources/_vendor/importlib_resources/tests/test_path.py
+++ b/pkg_resources/_vendor/importlib_resources/tests/test_path.py
@@ -1,4 +1,5 @@
 import io
+import pathlib
 import unittest
 
 import importlib_resources as resources
@@ -14,16 +15,14 @@ def execute(self, package, path):
 
 class PathTests:
     def test_reading(self):
-        # Path should be readable.
-        # Test also implicitly verifies the returned object is a pathlib.Path
-        # instance.
+        """
+        Path should be readable and a pathlib.Path instance.
+        """
         target = resources.files(self.data) / 'utf-8.file'
         with resources.as_file(target) as path:
+            self.assertIsInstance(path, pathlib.Path)
             self.assertTrue(path.name.endswith("utf-8.file"), repr(path))
-            # pathlib.Path.read_text() was introduced in Python 3.5.
-            with path.open('r', encoding='utf-8') as file:
-                text = file.read()
-            self.assertEqual('Hello, UTF-8 world!\n', text)
+            self.assertEqual('Hello, UTF-8 world!\n', path.read_text(encoding='utf-8'))
 
 
 class PathDiskTests(PathTests, unittest.TestCase):
@@ -53,8 +52,10 @@ def setUp(self):
 
 class PathZipTests(PathTests, util.ZipSetup, unittest.TestCase):
     def test_remove_in_context_manager(self):
-        # It is not an error if the file that was temporarily stashed on the
-        # file system is removed inside the `with` stanza.
+        """
+        It is not an error if the file that was temporarily stashed on the
+        file system is removed inside the `with` stanza.
+        """
         target = resources.files(self.data) / 'utf-8.file'
         with resources.as_file(target) as path:
             path.unlink()
diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_read.py b/pkg_resources/_vendor/importlib_resources/tests/test_read.py
index 41dd6db5f3..97d90128cf 100644
--- a/pkg_resources/_vendor/importlib_resources/tests/test_read.py
+++ b/pkg_resources/_vendor/importlib_resources/tests/test_read.py
@@ -13,16 +13,20 @@ def execute(self, package, path):
 
 class CommonTextTests(util.CommonTests, unittest.TestCase):
     def execute(self, package, path):
-        resources.files(package).joinpath(path).read_text()
+        resources.files(package).joinpath(path).read_text(encoding='utf-8')
 
 
 class ReadTests:
     def test_read_bytes(self):
         result = resources.files(self.data).joinpath('binary.file').read_bytes()
-        self.assertEqual(result, b'\0\1\2\3')
+        self.assertEqual(result, bytes(range(4)))
 
     def test_read_text_default_encoding(self):
-        result = resources.files(self.data).joinpath('utf-8.file').read_text()
+        result = (
+            resources.files(self.data)
+            .joinpath('utf-8.file')
+            .read_text(encoding='utf-8')
+        )
         self.assertEqual(result, 'Hello, UTF-8 world!\n')
 
     def test_read_text_given_encoding(self):
@@ -34,7 +38,9 @@ def test_read_text_given_encoding(self):
         self.assertEqual(result, 'Hello, UTF-16 world!\n')
 
     def test_read_text_with_errors(self):
-        # Raises UnicodeError without the 'errors' argument.
+        """
+        Raises UnicodeError without the 'errors' argument.
+        """
         target = resources.files(self.data) / 'utf-16.file'
         self.assertRaises(UnicodeError, target.read_text, encoding='utf-8')
         result = target.read_text(encoding='utf-8', errors='ignore')
@@ -52,17 +58,15 @@ class ReadDiskTests(ReadTests, unittest.TestCase):
 
 class ReadZipTests(ReadTests, util.ZipSetup, unittest.TestCase):
     def test_read_submodule_resource(self):
-        submodule = import_module('ziptestdata.subdirectory')
+        submodule = import_module('data01.subdirectory')
         result = resources.files(submodule).joinpath('binary.file').read_bytes()
-        self.assertEqual(result, b'\0\1\2\3')
+        self.assertEqual(result, bytes(range(4, 8)))
 
     def test_read_submodule_resource_by_name(self):
         result = (
-            resources.files('ziptestdata.subdirectory')
-            .joinpath('binary.file')
-            .read_bytes()
+            resources.files('data01.subdirectory').joinpath('binary.file').read_bytes()
         )
-        self.assertEqual(result, b'\0\1\2\3')
+        self.assertEqual(result, bytes(range(4, 8)))
 
 
 class ReadNamespaceTests(ReadTests, unittest.TestCase):
@@ -72,5 +76,22 @@ def setUp(self):
         self.data = namespacedata01
 
 
+class ReadNamespaceZipTests(ReadTests, util.ZipSetup, unittest.TestCase):
+    ZIP_MODULE = 'namespacedata01'
+
+    def test_read_submodule_resource(self):
+        submodule = import_module('namespacedata01.subdirectory')
+        result = resources.files(submodule).joinpath('binary.file').read_bytes()
+        self.assertEqual(result, bytes(range(12, 16)))
+
+    def test_read_submodule_resource_by_name(self):
+        result = (
+            resources.files('namespacedata01.subdirectory')
+            .joinpath('binary.file')
+            .read_bytes()
+        )
+        self.assertEqual(result, bytes(range(12, 16)))
+
+
 if __name__ == '__main__':
     unittest.main()
diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_reader.py b/pkg_resources/_vendor/importlib_resources/tests/test_reader.py
index 1c8ebeeb13..95c2fc85a4 100644
--- a/pkg_resources/_vendor/importlib_resources/tests/test_reader.py
+++ b/pkg_resources/_vendor/importlib_resources/tests/test_reader.py
@@ -10,8 +10,7 @@
 class MultiplexedPathTest(unittest.TestCase):
     @classmethod
     def setUpClass(cls):
-        path = pathlib.Path(__file__).parent / 'namespacedata01'
-        cls.folder = str(path)
+        cls.folder = pathlib.Path(__file__).parent / 'namespacedata01'
 
     def test_init_no_paths(self):
         with self.assertRaises(FileNotFoundError):
@@ -19,7 +18,7 @@ def test_init_no_paths(self):
 
     def test_init_file(self):
         with self.assertRaises(NotADirectoryError):
-            MultiplexedPath(os.path.join(self.folder, 'binary.file'))
+            MultiplexedPath(self.folder / 'binary.file')
 
     def test_iterdir(self):
         contents = {path.name for path in MultiplexedPath(self.folder).iterdir()}
@@ -27,10 +26,12 @@ def test_iterdir(self):
             contents.remove('__pycache__')
         except (KeyError, ValueError):
             pass
-        self.assertEqual(contents, {'binary.file', 'utf-16.file', 'utf-8.file'})
+        self.assertEqual(
+            contents, {'subdirectory', 'binary.file', 'utf-16.file', 'utf-8.file'}
+        )
 
     def test_iterdir_duplicate(self):
-        data01 = os.path.abspath(os.path.join(__file__, '..', 'data01'))
+        data01 = pathlib.Path(__file__).parent.joinpath('data01')
         contents = {
             path.name for path in MultiplexedPath(self.folder, data01).iterdir()
         }
@@ -60,17 +61,17 @@ def test_open_file(self):
             path.open()
 
     def test_join_path(self):
-        prefix = os.path.abspath(os.path.join(__file__, '..'))
-        data01 = os.path.join(prefix, 'data01')
+        data01 = pathlib.Path(__file__).parent.joinpath('data01')
+        prefix = str(data01.parent)
         path = MultiplexedPath(self.folder, data01)
         self.assertEqual(
             str(path.joinpath('binary.file'))[len(prefix) + 1 :],
             os.path.join('namespacedata01', 'binary.file'),
         )
-        self.assertEqual(
-            str(path.joinpath('subdirectory'))[len(prefix) + 1 :],
-            os.path.join('data01', 'subdirectory'),
-        )
+        sub = path.joinpath('subdirectory')
+        assert isinstance(sub, MultiplexedPath)
+        assert 'namespacedata01' in str(sub)
+        assert 'data01' in str(sub)
         self.assertEqual(
             str(path.joinpath('imaginary'))[len(prefix) + 1 :],
             os.path.join('namespacedata01', 'imaginary'),
@@ -81,6 +82,17 @@ def test_join_path_compound(self):
         path = MultiplexedPath(self.folder)
         assert not path.joinpath('imaginary/foo.py').exists()
 
+    def test_join_path_common_subdir(self):
+        data01 = pathlib.Path(__file__).parent.joinpath('data01')
+        data02 = pathlib.Path(__file__).parent.joinpath('data02')
+        prefix = str(data01.parent)
+        path = MultiplexedPath(data01, data02)
+        self.assertIsInstance(path.joinpath('subdirectory'), MultiplexedPath)
+        self.assertEqual(
+            str(path.joinpath('subdirectory', 'subsubdir'))[len(prefix) + 1 :],
+            os.path.join('data02', 'subdirectory', 'subsubdir'),
+        )
+
     def test_repr(self):
         self.assertEqual(
             repr(MultiplexedPath(self.folder)),
diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_resource.py b/pkg_resources/_vendor/importlib_resources/tests/test_resource.py
index 8239027167..dc2a108cde 100644
--- a/pkg_resources/_vendor/importlib_resources/tests/test_resource.py
+++ b/pkg_resources/_vendor/importlib_resources/tests/test_resource.py
@@ -1,14 +1,11 @@
 import sys
 import unittest
 import importlib_resources as resources
-import uuid
 import pathlib
 
 from . import data01
-from . import zipdata01, zipdata02
 from . import util
 from importlib import import_module
-from ._compat import import_helper, unlink
 
 
 class ResourceTests:
@@ -69,10 +66,12 @@ def test_resource_missing(self):
 
 class ResourceCornerCaseTests(unittest.TestCase):
     def test_package_has_no_reader_fallback(self):
-        # Test odd ball packages which:
+        """
+        Test odd ball packages which:
         # 1. Do not have a ResourceReader as a loader
         # 2. Are not on the file system
         # 3. Are not in a zip file
+        """
         module = util.create_package(
             file=data01, path=data01.__file__, contents=['A', 'B', 'C']
         )
@@ -86,34 +85,32 @@ def test_package_has_no_reader_fallback(self):
 
 
 class ResourceFromZipsTest01(util.ZipSetupBase, unittest.TestCase):
-    ZIP_MODULE = zipdata01  # type: ignore
+    ZIP_MODULE = 'data01'
 
     def test_is_submodule_resource(self):
-        submodule = import_module('ziptestdata.subdirectory')
+        submodule = import_module('data01.subdirectory')
         self.assertTrue(resources.files(submodule).joinpath('binary.file').is_file())
 
     def test_read_submodule_resource_by_name(self):
         self.assertTrue(
-            resources.files('ziptestdata.subdirectory')
-            .joinpath('binary.file')
-            .is_file()
+            resources.files('data01.subdirectory').joinpath('binary.file').is_file()
         )
 
     def test_submodule_contents(self):
-        submodule = import_module('ziptestdata.subdirectory')
+        submodule = import_module('data01.subdirectory')
         self.assertEqual(
             names(resources.files(submodule)), {'__init__.py', 'binary.file'}
         )
 
     def test_submodule_contents_by_name(self):
         self.assertEqual(
-            names(resources.files('ziptestdata.subdirectory')),
+            names(resources.files('data01.subdirectory')),
             {'__init__.py', 'binary.file'},
         )
 
     def test_as_file_directory(self):
-        with resources.as_file(resources.files('ziptestdata')) as data:
-            assert data.name == 'ziptestdata'
+        with resources.as_file(resources.files('data01')) as data:
+            assert data.name == 'data01'
             assert data.is_dir()
             assert data.joinpath('subdirectory').is_dir()
             assert len(list(data.iterdir()))
@@ -121,7 +118,7 @@ def test_as_file_directory(self):
 
 
 class ResourceFromZipsTest02(util.ZipSetupBase, unittest.TestCase):
-    ZIP_MODULE = zipdata02  # type: ignore
+    ZIP_MODULE = 'data02'
 
     def test_unrelated_contents(self):
         """
@@ -129,104 +126,48 @@ def test_unrelated_contents(self):
         distinct resources. Ref python/importlib_resources#44.
         """
         self.assertEqual(
-            names(resources.files('ziptestdata.one')),
+            names(resources.files('data02.one')),
             {'__init__.py', 'resource1.txt'},
         )
         self.assertEqual(
-            names(resources.files('ziptestdata.two')),
+            names(resources.files('data02.two')),
             {'__init__.py', 'resource2.txt'},
         )
 
 
-class DeletingZipsTest(unittest.TestCase):
+class DeletingZipsTest(util.ZipSetupBase, unittest.TestCase):
     """Having accessed resources in a zip file should not keep an open
     reference to the zip.
     """
 
-    ZIP_MODULE = zipdata01
-
-    def setUp(self):
-        modules = import_helper.modules_setup()
-        self.addCleanup(import_helper.modules_cleanup, *modules)
-
-        data_path = pathlib.Path(self.ZIP_MODULE.__file__)
-        data_dir = data_path.parent
-        self.source_zip_path = data_dir / 'ziptestdata.zip'
-        self.zip_path = pathlib.Path(f'{uuid.uuid4()}.zip').absolute()
-        self.zip_path.write_bytes(self.source_zip_path.read_bytes())
-        sys.path.append(str(self.zip_path))
-        self.data = import_module('ziptestdata')
-
-    def tearDown(self):
-        try:
-            sys.path.remove(str(self.zip_path))
-        except ValueError:
-            pass
-
-        try:
-            del sys.path_importer_cache[str(self.zip_path)]
-            del sys.modules[self.data.__name__]
-        except KeyError:
-            pass
-
-        try:
-            unlink(self.zip_path)
-        except OSError:
-            # If the test fails, this will probably fail too
-            pass
-
     def test_iterdir_does_not_keep_open(self):
-        c = [item.name for item in resources.files('ziptestdata').iterdir()]
-        self.zip_path.unlink()
-        del c
+        [item.name for item in resources.files('data01').iterdir()]
 
     def test_is_file_does_not_keep_open(self):
-        c = resources.files('ziptestdata').joinpath('binary.file').is_file()
-        self.zip_path.unlink()
-        del c
+        resources.files('data01').joinpath('binary.file').is_file()
 
     def test_is_file_failure_does_not_keep_open(self):
-        c = resources.files('ziptestdata').joinpath('not-present').is_file()
-        self.zip_path.unlink()
-        del c
+        resources.files('data01').joinpath('not-present').is_file()
 
     @unittest.skip("Desired but not supported.")
     def test_as_file_does_not_keep_open(self):  # pragma: no cover
-        c = resources.as_file(resources.files('ziptestdata') / 'binary.file')
-        self.zip_path.unlink()
-        del c
+        resources.as_file(resources.files('data01') / 'binary.file')
 
     def test_entered_path_does_not_keep_open(self):
-        # This is what certifi does on import to make its bundle
-        # available for the process duration.
-        c = resources.as_file(
-            resources.files('ziptestdata') / 'binary.file'
-        ).__enter__()
-        self.zip_path.unlink()
-        del c
+        """
+        Mimic what certifi does on import to make its bundle
+        available for the process duration.
+        """
+        resources.as_file(resources.files('data01') / 'binary.file').__enter__()
 
     def test_read_binary_does_not_keep_open(self):
-        c = resources.files('ziptestdata').joinpath('binary.file').read_bytes()
-        self.zip_path.unlink()
-        del c
+        resources.files('data01').joinpath('binary.file').read_bytes()
 
     def test_read_text_does_not_keep_open(self):
-        c = resources.files('ziptestdata').joinpath('utf-8.file').read_text()
-        self.zip_path.unlink()
-        del c
-
-
-class ResourceFromNamespaceTest01(unittest.TestCase):
-    site_dir = str(pathlib.Path(__file__).parent)
+        resources.files('data01').joinpath('utf-8.file').read_text(encoding='utf-8')
 
-    @classmethod
-    def setUpClass(cls):
-        sys.path.append(cls.site_dir)
-
-    @classmethod
-    def tearDownClass(cls):
-        sys.path.remove(cls.site_dir)
 
+class ResourceFromNamespaceTests:
     def test_is_submodule_resource(self):
         self.assertTrue(
             resources.files(import_module('namespacedata01'))
@@ -245,7 +186,9 @@ def test_submodule_contents(self):
             contents.remove('__pycache__')
         except KeyError:
             pass
-        self.assertEqual(contents, {'binary.file', 'utf-8.file', 'utf-16.file'})
+        self.assertEqual(
+            contents, {'subdirectory', 'binary.file', 'utf-8.file', 'utf-16.file'}
+        )
 
     def test_submodule_contents_by_name(self):
         contents = names(resources.files('namespacedata01'))
@@ -253,7 +196,45 @@ def test_submodule_contents_by_name(self):
             contents.remove('__pycache__')
         except KeyError:
             pass
-        self.assertEqual(contents, {'binary.file', 'utf-8.file', 'utf-16.file'})
+        self.assertEqual(
+            contents, {'subdirectory', 'binary.file', 'utf-8.file', 'utf-16.file'}
+        )
+
+    def test_submodule_sub_contents(self):
+        contents = names(resources.files(import_module('namespacedata01.subdirectory')))
+        try:
+            contents.remove('__pycache__')
+        except KeyError:
+            pass
+        self.assertEqual(contents, {'binary.file'})
+
+    def test_submodule_sub_contents_by_name(self):
+        contents = names(resources.files('namespacedata01.subdirectory'))
+        try:
+            contents.remove('__pycache__')
+        except KeyError:
+            pass
+        self.assertEqual(contents, {'binary.file'})
+
+
+class ResourceFromNamespaceDiskTests(ResourceFromNamespaceTests, unittest.TestCase):
+    site_dir = str(pathlib.Path(__file__).parent)
+
+    @classmethod
+    def setUpClass(cls):
+        sys.path.append(cls.site_dir)
+
+    @classmethod
+    def tearDownClass(cls):
+        sys.path.remove(cls.site_dir)
+
+
+class ResourceFromNamespaceZipTests(
+    util.ZipSetupBase,
+    ResourceFromNamespaceTests,
+    unittest.TestCase,
+):
+    ZIP_MODULE = 'namespacedata01'
 
 
 if __name__ == '__main__':
diff --git a/pkg_resources/_vendor/importlib_resources/tests/update-zips.py b/pkg_resources/_vendor/importlib_resources/tests/update-zips.py
deleted file mode 100644
index 231334aa7e..0000000000
--- a/pkg_resources/_vendor/importlib_resources/tests/update-zips.py
+++ /dev/null
@@ -1,53 +0,0 @@
-"""
-Generate the zip test data files.
-
-Run to build the tests/zipdataNN/ziptestdata.zip files from
-files in tests/dataNN.
-
-Replaces the file with the working copy, but does commit anything
-to the source repo.
-"""
-
-import contextlib
-import os
-import pathlib
-import zipfile
-
-
-def main():
-    """
-    >>> from unittest import mock
-    >>> monkeypatch = getfixture('monkeypatch')
-    >>> monkeypatch.setattr(zipfile, 'ZipFile', mock.MagicMock())
-    >>> print(); main()  # print workaround for bpo-32509
-    
-    ...data01... -> ziptestdata/...
-    ...
-    ...data02... -> ziptestdata/...
-    ...
-    """
-    suffixes = '01', '02'
-    tuple(map(generate, suffixes))
-
-
-def generate(suffix):
-    root = pathlib.Path(__file__).parent.relative_to(os.getcwd())
-    zfpath = root / f'zipdata{suffix}/ziptestdata.zip'
-    with zipfile.ZipFile(zfpath, 'w') as zf:
-        for src, rel in walk(root / f'data{suffix}'):
-            dst = 'ziptestdata' / pathlib.PurePosixPath(rel.as_posix())
-            print(src, '->', dst)
-            zf.write(src, dst)
-
-
-def walk(datapath):
-    for dirpath, dirnames, filenames in os.walk(datapath):
-        with contextlib.suppress(ValueError):
-            dirnames.remove('__pycache__')
-        for filename in filenames:
-            res = pathlib.Path(dirpath) / filename
-            rel = res.relative_to(datapath)
-            yield res, rel
-
-
-__name__ == '__main__' and main()
diff --git a/pkg_resources/_vendor/importlib_resources/tests/util.py b/pkg_resources/_vendor/importlib_resources/tests/util.py
index b596c0ce4f..fb827d2fa0 100644
--- a/pkg_resources/_vendor/importlib_resources/tests/util.py
+++ b/pkg_resources/_vendor/importlib_resources/tests/util.py
@@ -4,11 +4,12 @@
 import sys
 import types
 import pathlib
+import contextlib
 
 from . import data01
-from . import zipdata01
 from ..abc import ResourceReader
-from ._compat import import_helper
+from .compat.py39 import import_helper, os_helper
+from . import zip as zip_
 
 
 from importlib.machinery import ModuleSpec
@@ -80,32 +81,44 @@ def execute(self, package, path):
         """
 
     def test_package_name(self):
-        # Passing in the package name should succeed.
+        """
+        Passing in the package name should succeed.
+        """
         self.execute(data01.__name__, 'utf-8.file')
 
     def test_package_object(self):
-        # Passing in the package itself should succeed.
+        """
+        Passing in the package itself should succeed.
+        """
         self.execute(data01, 'utf-8.file')
 
     def test_string_path(self):
-        # Passing in a string for the path should succeed.
+        """
+        Passing in a string for the path should succeed.
+        """
         path = 'utf-8.file'
         self.execute(data01, path)
 
     def test_pathlib_path(self):
-        # Passing in a pathlib.PurePath object for the path should succeed.
+        """
+        Passing in a pathlib.PurePath object for the path should succeed.
+        """
         path = pathlib.PurePath('utf-8.file')
         self.execute(data01, path)
 
     def test_importing_module_as_side_effect(self):
-        # The anchor package can already be imported.
+        """
+        The anchor package can already be imported.
+        """
         del sys.modules[data01.__name__]
         self.execute(data01.__name__, 'utf-8.file')
 
     def test_missing_path(self):
-        # Attempting to open or read or request the path for a
-        # non-existent path should succeed if open_resource
-        # can return a viable data stream.
+        """
+        Attempting to open or read or request the path for a
+        non-existent path should succeed if open_resource
+        can return a viable data stream.
+        """
         bytes_data = io.BytesIO(b'Hello, world!')
         package = create_package(file=bytes_data, path=FileNotFoundError())
         self.execute(package, 'utf-8.file')
@@ -129,39 +142,23 @@ def test_useless_loader(self):
 
 
 class ZipSetupBase:
-    ZIP_MODULE = None
-
-    @classmethod
-    def setUpClass(cls):
-        data_path = pathlib.Path(cls.ZIP_MODULE.__file__)
-        data_dir = data_path.parent
-        cls._zip_path = str(data_dir / 'ziptestdata.zip')
-        sys.path.append(cls._zip_path)
-        cls.data = importlib.import_module('ziptestdata')
-
-    @classmethod
-    def tearDownClass(cls):
-        try:
-            sys.path.remove(cls._zip_path)
-        except ValueError:
-            pass
-
-        try:
-            del sys.path_importer_cache[cls._zip_path]
-            del sys.modules[cls.data.__name__]
-        except KeyError:
-            pass
-
-        try:
-            del cls.data
-            del cls._zip_path
-        except AttributeError:
-            pass
+    ZIP_MODULE = 'data01'
 
     def setUp(self):
-        modules = import_helper.modules_setup()
-        self.addCleanup(import_helper.modules_cleanup, *modules)
+        self.fixtures = contextlib.ExitStack()
+        self.addCleanup(self.fixtures.close)
+
+        self.fixtures.enter_context(import_helper.isolated_modules())
+
+        temp_dir = self.fixtures.enter_context(os_helper.temp_dir())
+        modules = pathlib.Path(temp_dir) / 'zipped modules.zip'
+        src_path = pathlib.Path(__file__).parent.joinpath(self.ZIP_MODULE)
+        self.fixtures.enter_context(
+            import_helper.DirsOnSysPath(str(zip_.make_zip_file(src_path, modules)))
+        )
+
+        self.data = importlib.import_module(self.ZIP_MODULE)
 
 
 class ZipSetup(ZipSetupBase):
-    ZIP_MODULE = zipdata01  # type: ignore
+    pass
diff --git a/pkg_resources/_vendor/importlib_resources/tests/zip.py b/pkg_resources/_vendor/importlib_resources/tests/zip.py
new file mode 100644
index 0000000000..962195a901
--- /dev/null
+++ b/pkg_resources/_vendor/importlib_resources/tests/zip.py
@@ -0,0 +1,32 @@
+"""
+Generate zip test data files.
+"""
+
+import contextlib
+import os
+import pathlib
+import zipfile
+
+import zipp
+
+
+def make_zip_file(src, dst):
+    """
+    Zip the files in src into a new zipfile at dst.
+    """
+    with zipfile.ZipFile(dst, 'w') as zf:
+        for src_path, rel in walk(src):
+            dst_name = src.name / pathlib.PurePosixPath(rel.as_posix())
+            zf.write(src_path, dst_name)
+        zipp.CompleteDirs.inject(zf)
+    return dst
+
+
+def walk(datapath):
+    for dirpath, dirnames, filenames in os.walk(datapath):
+        with contextlib.suppress(ValueError):
+            dirnames.remove('__pycache__')
+        for filename in filenames:
+            res = pathlib.Path(dirpath) / filename
+            rel = res.relative_to(datapath)
+            yield res, rel
diff --git a/pkg_resources/_vendor/importlib_resources/tests/zipdata01/ziptestdata.zip b/pkg_resources/_vendor/importlib_resources/tests/zipdata01/ziptestdata.zip
deleted file mode 100644
index 9a3bb0739f87e97c1084b94d7d153680f6727738..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 876
zcmWIWW@Zs#00HOCX@Q%&m27l?Y!DU);;PJolGNgol*E!m{nC;&T|+ayw9K5;|NlG~
zQWMD
z9;rDw`8o=rA#S=B3g!7lIVp-}COK17UPc
zNtt;*xhM-3R!jMEPhCreO-3*u>5Df}T7+BJ{639e$2uhfsIs`pJ5Qf}C
xGXyDE@VNvOv@o!wQJfLgCAgysx3f@9jKpUmiW^zkK<;1z!tFpk^MROw0RS~O%0&PG

diff --git a/pkg_resources/_vendor/importlib_resources/tests/zipdata02/ziptestdata.zip b/pkg_resources/_vendor/importlib_resources/tests/zipdata02/ziptestdata.zip
deleted file mode 100644
index d63ff512d2807ef2fd259455283b81b02e0e45fb..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 698
zcmWIWW@Zs#00HOCX@Ot{ln@8fRhb1Psl_EJi6x2p@$s2?nI-Y@dIgmMI5kP5Y0A$_
z#jWw|&p#`9ff_(q7K_HB)Z+ZoqU2OVy^@L&ph*fa0WRVlP*R?c+X1opI-R&20MZDv
z&j{oIpa8N17@0(vaR(gGH(;=&5k%n(M%;#g0ulz6G@1gL$cA79E2=^00gEsw4~s!C
zUxI@ZWaIMqz|BszK;s4KsL2<9jRy!Q2E6`2cTLHjr{wAk1ZCU@!+_
G1_l6Bc%f?m

diff --git a/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/INSTALLER b/pkg_resources/_vendor/inflect-7.3.1.dist-info/INSTALLER
similarity index 100%
rename from pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/INSTALLER
rename to pkg_resources/_vendor/inflect-7.3.1.dist-info/INSTALLER
diff --git a/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/LICENSE b/pkg_resources/_vendor/inflect-7.3.1.dist-info/LICENSE
similarity index 100%
rename from pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/LICENSE
rename to pkg_resources/_vendor/inflect-7.3.1.dist-info/LICENSE
diff --git a/pkg_resources/_vendor/inflect-7.3.1.dist-info/METADATA b/pkg_resources/_vendor/inflect-7.3.1.dist-info/METADATA
new file mode 100644
index 0000000000..9a2097a54a
--- /dev/null
+++ b/pkg_resources/_vendor/inflect-7.3.1.dist-info/METADATA
@@ -0,0 +1,591 @@
+Metadata-Version: 2.1
+Name: inflect
+Version: 7.3.1
+Summary: Correctly generate plurals, singular nouns, ordinals, indefinite articles
+Author-email: Paul Dyson 
+Maintainer-email: "Jason R. Coombs" 
+Project-URL: Source, https://github.com/jaraco/inflect
+Keywords: plural,inflect,participle
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Natural Language :: English
+Classifier: Operating System :: OS Independent
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: Text Processing :: Linguistic
+Requires-Python: >=3.8
+Description-Content-Type: text/x-rst
+License-File: LICENSE
+Requires-Dist: more-itertools >=8.5.0
+Requires-Dist: typeguard >=4.0.1
+Requires-Dist: typing-extensions ; python_version < "3.9"
+Provides-Extra: doc
+Requires-Dist: sphinx >=3.5 ; extra == 'doc'
+Requires-Dist: jaraco.packaging >=9.3 ; extra == 'doc'
+Requires-Dist: rst.linker >=1.9 ; extra == 'doc'
+Requires-Dist: furo ; extra == 'doc'
+Requires-Dist: sphinx-lint ; extra == 'doc'
+Requires-Dist: jaraco.tidelift >=1.4 ; extra == 'doc'
+Provides-Extra: test
+Requires-Dist: pytest !=8.1.*,>=6 ; extra == 'test'
+Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'test'
+Requires-Dist: pytest-cov ; extra == 'test'
+Requires-Dist: pytest-mypy ; extra == 'test'
+Requires-Dist: pytest-enabler >=2.2 ; extra == 'test'
+Requires-Dist: pytest-ruff >=0.2.1 ; extra == 'test'
+Requires-Dist: pygments ; extra == 'test'
+
+.. image:: https://img.shields.io/pypi/v/inflect.svg
+   :target: https://pypi.org/project/inflect
+
+.. image:: https://img.shields.io/pypi/pyversions/inflect.svg
+
+.. image:: https://github.com/jaraco/inflect/actions/workflows/main.yml/badge.svg
+   :target: https://github.com/jaraco/inflect/actions?query=workflow%3A%22tests%22
+   :alt: tests
+
+.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
+    :target: https://github.com/astral-sh/ruff
+    :alt: Ruff
+
+.. image:: https://readthedocs.org/projects/inflect/badge/?version=latest
+   :target: https://inflect.readthedocs.io/en/latest/?badge=latest
+
+.. image:: https://img.shields.io/badge/skeleton-2024-informational
+   :target: https://blog.jaraco.com/skeleton
+
+.. image:: https://tidelift.com/badges/package/pypi/inflect
+   :target: https://tidelift.com/subscription/pkg/pypi-inflect?utm_source=pypi-inflect&utm_medium=readme
+
+NAME
+====
+
+inflect.py - Correctly generate plurals, singular nouns, ordinals, indefinite articles; convert numbers to words.
+
+SYNOPSIS
+========
+
+.. code-block:: python
+
+    import inflect
+
+    p = inflect.engine()
+
+    # METHODS:
+
+    # plural plural_noun plural_verb plural_adj singular_noun no num
+    # compare compare_nouns compare_nouns compare_adjs
+    # a an
+    # present_participle
+    # ordinal number_to_words
+    # join
+    # inflect classical gender
+    # defnoun defverb defadj defa defan
+
+
+    # UNCONDITIONALLY FORM THE PLURAL
+
+    print("The plural of ", word, " is ", p.plural(word))
+
+
+    # CONDITIONALLY FORM THE PLURAL
+
+    print("I saw", cat_count, p.plural("cat", cat_count))
+
+
+    # FORM PLURALS FOR SPECIFIC PARTS OF SPEECH
+
+    print(
+        p.plural_noun("I", N1),
+        p.plural_verb("saw", N1),
+        p.plural_adj("my", N2),
+        p.plural_noun("saw", N2),
+    )
+
+
+    # FORM THE SINGULAR OF PLURAL NOUNS
+
+    print("The singular of ", word, " is ", p.singular_noun(word))
+
+    # SELECT THE GENDER OF SINGULAR PRONOUNS
+
+    print(p.singular_noun("they"))  # 'it'
+    p.gender("feminine")
+    print(p.singular_noun("they"))  # 'she'
+
+
+    # DEAL WITH "0/1/N" -> "no/1/N" TRANSLATION:
+
+    print("There ", p.plural_verb("was", errors), p.no(" error", errors))
+
+
+    # USE DEFAULT COUNTS:
+
+    print(
+        p.num(N1, ""),
+        p.plural("I"),
+        p.plural_verb(" saw"),
+        p.num(N2),
+        p.plural_noun(" saw"),
+    )
+    print("There ", p.num(errors, ""), p.plural_verb("was"), p.no(" error"))
+
+
+    # COMPARE TWO WORDS "NUMBER-INSENSITIVELY":
+
+    if p.compare(word1, word2):
+        print("same")
+    if p.compare_nouns(word1, word2):
+        print("same noun")
+    if p.compare_verbs(word1, word2):
+        print("same verb")
+    if p.compare_adjs(word1, word2):
+        print("same adj.")
+
+
+    # ADD CORRECT "a" OR "an" FOR A GIVEN WORD:
+
+    print("Did you want ", p.a(thing), " or ", p.an(idea))
+
+
+    # CONVERT NUMERALS INTO ORDINALS (i.e. 1->1st, 2->2nd, 3->3rd, etc.)
+
+    print("It was", p.ordinal(position), " from the left\n")
+
+    # CONVERT NUMERALS TO WORDS (i.e. 1->"one", 101->"one hundred and one", etc.)
+    # RETURNS A SINGLE STRING...
+
+    words = p.number_to_words(1234)
+    # "one thousand, two hundred and thirty-four"
+    words = p.number_to_words(p.ordinal(1234))
+    # "one thousand, two hundred and thirty-fourth"
+
+
+    # GET BACK A LIST OF STRINGS, ONE FOR EACH "CHUNK"...
+
+    words = p.number_to_words(1234, wantlist=True)
+    # ("one thousand","two hundred and thirty-four")
+
+
+    # OPTIONAL PARAMETERS CHANGE TRANSLATION:
+
+    words = p.number_to_words(12345, group=1)
+    # "one, two, three, four, five"
+
+    words = p.number_to_words(12345, group=2)
+    # "twelve, thirty-four, five"
+
+    words = p.number_to_words(12345, group=3)
+    # "one twenty-three, forty-five"
+
+    words = p.number_to_words(1234, andword="")
+    # "one thousand, two hundred thirty-four"
+
+    words = p.number_to_words(1234, andword=", plus")
+    # "one thousand, two hundred, plus thirty-four"
+    # TODO: I get no comma before plus: check perl
+
+    words = p.number_to_words(555_1202, group=1, zero="oh")
+    # "five, five, five, one, two, oh, two"
+
+    words = p.number_to_words(555_1202, group=1, one="unity")
+    # "five, five, five, unity, two, oh, two"
+
+    words = p.number_to_words(123.456, group=1, decimal="mark")
+    # "one two three mark four five six"
+    # TODO: DOCBUG: perl gives commas here as do I
+
+    # LITERAL STYLE ONLY NAMES NUMBERS LESS THAN A CERTAIN THRESHOLD...
+
+    words = p.number_to_words(9, threshold=10)  # "nine"
+    words = p.number_to_words(10, threshold=10)  # "ten"
+    words = p.number_to_words(11, threshold=10)  # "11"
+    words = p.number_to_words(1000, threshold=10)  # "1,000"
+
+    # JOIN WORDS INTO A LIST:
+
+    mylist = p.join(("apple", "banana", "carrot"))
+    # "apple, banana, and carrot"
+
+    mylist = p.join(("apple", "banana"))
+    # "apple and banana"
+
+    mylist = p.join(("apple", "banana", "carrot"), final_sep="")
+    # "apple, banana and carrot"
+
+
+    # REQUIRE "CLASSICAL" PLURALS (EG: "focus"->"foci", "cherub"->"cherubim")
+
+    p.classical()  # USE ALL CLASSICAL PLURALS
+
+    p.classical(all=True)  # USE ALL CLASSICAL PLURALS
+    p.classical(all=False)  # SWITCH OFF CLASSICAL MODE
+
+    p.classical(zero=True)  #  "no error" INSTEAD OF "no errors"
+    p.classical(zero=False)  #  "no errors" INSTEAD OF "no error"
+
+    p.classical(herd=True)  #  "2 buffalo" INSTEAD OF "2 buffalos"
+    p.classical(herd=False)  #  "2 buffalos" INSTEAD OF "2 buffalo"
+
+    p.classical(persons=True)  # "2 chairpersons" INSTEAD OF "2 chairpeople"
+    p.classical(persons=False)  # "2 chairpeople" INSTEAD OF "2 chairpersons"
+
+    p.classical(ancient=True)  # "2 formulae" INSTEAD OF "2 formulas"
+    p.classical(ancient=False)  # "2 formulas" INSTEAD OF "2 formulae"
+
+
+    # INTERPOLATE "plural()", "plural_noun()", "plural_verb()", "plural_adj()", "singular_noun()",
+    # a()", "an()", "num()" AND "ordinal()" WITHIN STRINGS:
+
+    print(p.inflect("The plural of {0} is plural('{0}')".format(word)))
+    print(p.inflect("The singular of {0} is singular_noun('{0}')".format(word)))
+    print(p.inflect("I saw {0} plural('cat',{0})".format(cat_count)))
+    print(
+        p.inflect(
+            "plural('I',{0}) "
+            "plural_verb('saw',{0}) "
+            "plural('a',{1}) "
+            "plural_noun('saw',{1})".format(N1, N2)
+        )
+    )
+    print(
+        p.inflect(
+            "num({0}, False)plural('I') "
+            "plural_verb('saw') "
+            "num({1}, False)plural('a') "
+            "plural_noun('saw')".format(N1, N2)
+        )
+    )
+    print(p.inflect("I saw num({0}) plural('cat')\nnum()".format(cat_count)))
+    print(p.inflect("There plural_verb('was',{0}) no('error',{0})".format(errors)))
+    print(p.inflect("There num({0}, False)plural_verb('was') no('error')".format(errors)))
+    print(p.inflect("Did you want a('{0}') or an('{1}')".format(thing, idea)))
+    print(p.inflect("It was ordinal('{0}') from the left".format(position)))
+
+
+    # ADD USER-DEFINED INFLECTIONS (OVERRIDING INBUILT RULES):
+
+    p.defnoun("VAX", "VAXen")  # SINGULAR => PLURAL
+
+    p.defverb(
+        "will",  # 1ST PERSON SINGULAR
+        "shall",  # 1ST PERSON PLURAL
+        "will",  # 2ND PERSON SINGULAR
+        "will",  # 2ND PERSON PLURAL
+        "will",  # 3RD PERSON SINGULAR
+        "will",  # 3RD PERSON PLURAL
+    )
+
+    p.defadj("hir", "their")  # SINGULAR => PLURAL
+
+    p.defa("h")  # "AY HALWAYS SEZ 'HAITCH'!"
+
+    p.defan("horrendous.*")  # "AN HORRENDOUS AFFECTATION"
+
+
+DESCRIPTION
+===========
+
+The methods of the class ``engine`` in module ``inflect.py`` provide plural
+inflections, singular noun inflections, "a"/"an" selection for English words,
+and manipulation of numbers as words.
+
+Plural forms of all nouns, most verbs, and some adjectives are
+provided. Where appropriate, "classical" variants (for example: "brother" ->
+"brethren", "dogma" -> "dogmata", etc.) are also provided.
+
+Single forms of nouns are also provided. The gender of singular pronouns
+can be chosen (for example "they" -> "it" or "she" or "he" or "they").
+
+Pronunciation-based "a"/"an" selection is provided for all English
+words, and most initialisms.
+
+It is also possible to inflect numerals (1,2,3) to ordinals (1st, 2nd, 3rd)
+and to English words ("one", "two", "three").
+
+In generating these inflections, ``inflect.py`` follows the Oxford
+English Dictionary and the guidelines in Fowler's Modern English
+Usage, preferring the former where the two disagree.
+
+The module is built around standard British spelling, but is designed
+to cope with common American variants as well. Slang, jargon, and
+other English dialects are *not* explicitly catered for.
+
+Where two or more inflected forms exist for a single word (typically a
+"classical" form and a "modern" form), ``inflect.py`` prefers the
+more common form (typically the "modern" one), unless "classical"
+processing has been specified
+(see `MODERN VS CLASSICAL INFLECTIONS`).
+
+FORMING PLURALS AND SINGULARS
+=============================
+
+Inflecting Plurals and Singulars
+--------------------------------
+
+All of the ``plural...`` plural inflection methods take the word to be
+inflected as their first argument and return the corresponding inflection.
+Note that all such methods expect the *singular* form of the word. The
+results of passing a plural form are undefined (and unlikely to be correct).
+Similarly, the ``si...`` singular inflection method expects the *plural*
+form of the word.
+
+The ``plural...`` methods also take an optional second argument,
+which indicates the grammatical "number" of the word (or of another word
+with which the word being inflected must agree). If the "number" argument is
+supplied and is not ``1`` (or ``"one"`` or ``"a"``, or some other adjective that
+implies the singular), the plural form of the word is returned. If the
+"number" argument *does* indicate singularity, the (uninflected) word
+itself is returned. If the number argument is omitted, the plural form
+is returned unconditionally.
+
+The ``si...`` method takes a second argument in a similar fashion. If it is
+some form of the number ``1``, or is omitted, the singular form is returned.
+Otherwise the plural is returned unaltered.
+
+
+The various methods of ``inflect.engine`` are:
+
+
+
+``plural_noun(word, count=None)``
+
+ The method ``plural_noun()`` takes a *singular* English noun or
+ pronoun and returns its plural. Pronouns in the nominative ("I" ->
+ "we") and accusative ("me" -> "us") cases are handled, as are
+ possessive pronouns ("mine" -> "ours").
+
+
+``plural_verb(word, count=None)``
+
+ The method ``plural_verb()`` takes the *singular* form of a
+ conjugated verb (that is, one which is already in the correct "person"
+ and "mood") and returns the corresponding plural conjugation.
+
+
+``plural_adj(word, count=None)``
+
+ The method ``plural_adj()`` takes the *singular* form of
+ certain types of adjectives and returns the corresponding plural form.
+ Adjectives that are correctly handled include: "numerical" adjectives
+ ("a" -> "some"), demonstrative adjectives ("this" -> "these", "that" ->
+ "those"), and possessives ("my" -> "our", "cat's" -> "cats'", "child's"
+ -> "childrens'", etc.)
+
+
+``plural(word, count=None)``
+
+ The method ``plural()`` takes a *singular* English noun,
+ pronoun, verb, or adjective and returns its plural form. Where a word
+ has more than one inflection depending on its part of speech (for
+ example, the noun "thought" inflects to "thoughts", the verb "thought"
+ to "thought"), the (singular) noun sense is preferred to the (singular)
+ verb sense.
+
+ Hence ``plural("knife")`` will return "knives" ("knife" having been treated
+ as a singular noun), whereas ``plural("knifes")`` will return "knife"
+ ("knifes" having been treated as a 3rd person singular verb).
+
+ The inherent ambiguity of such cases suggests that,
+ where the part of speech is known, ``plural_noun``, ``plural_verb``, and
+ ``plural_adj`` should be used in preference to ``plural``.
+
+
+``singular_noun(word, count=None)``
+
+ The method ``singular_noun()`` takes a *plural* English noun or
+ pronoun and returns its singular. Pronouns in the nominative ("we" ->
+ "I") and accusative ("us" -> "me") cases are handled, as are
+ possessive pronouns ("ours" -> "mine"). When third person
+ singular pronouns are returned they take the neuter gender by default
+ ("they" -> "it"), not ("they"-> "she") nor ("they" -> "he"). This can be
+ changed with ``gender()``.
+
+Note that all these methods ignore any whitespace surrounding the
+word being inflected, but preserve that whitespace when the result is
+returned. For example, ``plural(" cat  ")`` returns " cats  ".
+
+
+``gender(genderletter)``
+
+ The third person plural pronoun takes the same form for the female, male and
+ neuter (e.g. "they"). The singular however, depends upon gender (e.g. "she",
+ "he", "it" and "they" -- "they" being the gender neutral form.) By default
+ ``singular_noun`` returns the neuter form, however, the gender can be selected with
+ the ``gender`` method. Pass the first letter of the gender to
+ ``gender`` to return the f(eminine), m(asculine), n(euter) or t(hey)
+ form of the singular. e.g.
+ gender('f') followed by singular_noun('themselves') returns 'herself'.
+
+Numbered plurals
+----------------
+
+The ``plural...`` methods return only the inflected word, not the count that
+was used to inflect it. Thus, in order to produce "I saw 3 ducks", it
+is necessary to use:
+
+.. code-block:: python
+
+    print("I saw", N, p.plural_noun(animal, N))
+
+Since the usual purpose of producing a plural is to make it agree with
+a preceding count, inflect.py provides a method
+(``no(word, count)``) which, given a word and a(n optional) count, returns the
+count followed by the correctly inflected word. Hence the previous
+example can be rewritten:
+
+.. code-block:: python
+
+    print("I saw ", p.no(animal, N))
+
+In addition, if the count is zero (or some other term which implies
+zero, such as ``"zero"``, ``"nil"``, etc.) the count is replaced by the
+word "no". Hence, if ``N`` had the value zero, the previous example
+would print (the somewhat more elegant)::
+
+    I saw no animals
+
+rather than::
+
+    I saw 0 animals
+
+Note that the name of the method is a pun: the method
+returns either a number (a *No.*) or a ``"no"``, in front of the
+inflected word.
+
+
+Reducing the number of counts required
+--------------------------------------
+
+In some contexts, the need to supply an explicit count to the various
+``plural...`` methods makes for tiresome repetition. For example:
+
+.. code-block:: python
+
+    print(
+        plural_adj("This", errors),
+        plural_noun(" error", errors),
+        plural_verb(" was", errors),
+        " fatal.",
+    )
+
+inflect.py therefore provides a method
+(``num(count=None, show=None)``) which may be used to set a persistent "default number"
+value. If such a value is set, it is subsequently used whenever an
+optional second "number" argument is omitted. The default value thus set
+can subsequently be removed by calling ``num()`` with no arguments.
+Hence we could rewrite the previous example:
+
+.. code-block:: python
+
+    p.num(errors)
+    print(p.plural_adj("This"), p.plural_noun(" error"), p.plural_verb(" was"), "fatal.")
+    p.num()
+
+Normally, ``num()`` returns its first argument, so that it may also
+be "inlined" in contexts like:
+
+.. code-block:: python
+
+    print(p.num(errors), p.plural_noun(" error"), p.plural_verb(" was"), " detected.")
+    if severity > 1:
+        print(
+            p.plural_adj("This"), p.plural_noun(" error"), p.plural_verb(" was"), "fatal."
+        )
+
+However, in certain contexts (see `INTERPOLATING INFLECTIONS IN STRINGS`)
+it is preferable that ``num()`` return an empty string. Hence ``num()``
+provides an optional second argument. If that argument is supplied (that is, if
+it is defined) and evaluates to false, ``num`` returns an empty string
+instead of its first argument. For example:
+
+.. code-block:: python
+
+    print(p.num(errors, 0), p.no("error"), p.plural_verb(" was"), " detected.")
+    if severity > 1:
+        print(
+            p.plural_adj("This"), p.plural_noun(" error"), p.plural_verb(" was"), "fatal."
+        )
+
+
+
+Number-insensitive equality
+---------------------------
+
+inflect.py also provides a solution to the problem
+of comparing words of differing plurality through the methods
+``compare(word1, word2)``, ``compare_nouns(word1, word2)``,
+``compare_verbs(word1, word2)``, and ``compare_adjs(word1, word2)``.
+Each  of these methods takes two strings, and  compares them
+using the corresponding plural-inflection method (``plural()``, ``plural_noun()``,
+``plural_verb()``, and ``plural_adj()`` respectively).
+
+The comparison returns true if:
+
+- the strings are equal, or
+- one string is equal to a plural form of the other, or
+- the strings are two different plural forms of the one word.
+
+
+Hence all of the following return true:
+
+.. code-block:: python
+
+    p.compare("index", "index")  # RETURNS "eq"
+    p.compare("index", "indexes")  # RETURNS "s:p"
+    p.compare("index", "indices")  # RETURNS "s:p"
+    p.compare("indexes", "index")  # RETURNS "p:s"
+    p.compare("indices", "index")  # RETURNS "p:s"
+    p.compare("indices", "indexes")  # RETURNS "p:p"
+    p.compare("indexes", "indices")  # RETURNS "p:p"
+    p.compare("indices", "indices")  # RETURNS "eq"
+
+As indicated by the comments in the previous example, the actual value
+returned by the various ``compare`` methods encodes which of the
+three equality rules succeeded: "eq" is returned if the strings were
+identical, "s:p" if the strings were singular and plural respectively,
+"p:s" for plural and singular, and "p:p" for two distinct plurals.
+Inequality is indicated by returning an empty string.
+
+It should be noted that two distinct singular words which happen to take
+the same plural form are *not* considered equal, nor are cases where
+one (singular) word's plural is the other (plural) word's singular.
+Hence all of the following return false:
+
+.. code-block:: python
+
+    p.compare("base", "basis")  # ALTHOUGH BOTH -> "bases"
+    p.compare("syrinx", "syringe")  # ALTHOUGH BOTH -> "syringes"
+    p.compare("she", "he")  # ALTHOUGH BOTH -> "they"
+
+    p.compare("opus", "operas")  # ALTHOUGH "opus" -> "opera" -> "operas"
+    p.compare("taxi", "taxes")  # ALTHOUGH "taxi" -> "taxis" -> "taxes"
+
+Note too that, although the comparison is "number-insensitive" it is *not*
+case-insensitive (that is, ``plural("time","Times")`` returns false. To obtain
+both number and case insensitivity, use the ``lower()`` method on both strings
+(that is, ``plural("time".lower(), "Times".lower())`` returns true).
+
+Related Functionality
+=====================
+
+Shout out to these libraries that provide related functionality:
+
+* `WordSet `_
+  parses identifiers like variable names into sets of words suitable for re-assembling
+  in another form.
+
+* `word2number `_ converts words to
+  a number.
+
+
+For Enterprise
+==============
+
+Available as part of the Tidelift Subscription.
+
+This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use.
+
+`Learn more `_.
diff --git a/pkg_resources/_vendor/inflect-7.3.1.dist-info/RECORD b/pkg_resources/_vendor/inflect-7.3.1.dist-info/RECORD
new file mode 100644
index 0000000000..73ff576be5
--- /dev/null
+++ b/pkg_resources/_vendor/inflect-7.3.1.dist-info/RECORD
@@ -0,0 +1,13 @@
+inflect-7.3.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+inflect-7.3.1.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
+inflect-7.3.1.dist-info/METADATA,sha256=ZgMNY0WAZRs-U8wZiV2SMfjSKqBrMngXyDMs_CAwMwg,21079
+inflect-7.3.1.dist-info/RECORD,,
+inflect-7.3.1.dist-info/WHEEL,sha256=y4mX-SOX4fYIkonsAGA5N0Oy-8_gI4FXw5HNI1xqvWg,91
+inflect-7.3.1.dist-info/top_level.txt,sha256=m52ujdp10CqT6jh1XQxZT6kEntcnv-7Tl7UiGNTzWZA,8
+inflect/__init__.py,sha256=Jxy1HJXZiZ85kHeLAhkmvz6EMTdFqBe-duvt34R6IOc,103796
+inflect/__pycache__/__init__.cpython-312.pyc,,
+inflect/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+inflect/compat/__pycache__/__init__.cpython-312.pyc,,
+inflect/compat/__pycache__/py38.cpython-312.pyc,,
+inflect/compat/py38.py,sha256=oObVfVnWX9_OpnOuEJn1mFbJxVhwyR5epbiTNXDDaso,160
+inflect/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
diff --git a/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/WHEEL b/pkg_resources/_vendor/inflect-7.3.1.dist-info/WHEEL
similarity index 65%
rename from pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/WHEEL
rename to pkg_resources/_vendor/inflect-7.3.1.dist-info/WHEEL
index becc9a66ea..564c6724e4 100644
--- a/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/WHEEL
+++ b/pkg_resources/_vendor/inflect-7.3.1.dist-info/WHEEL
@@ -1,5 +1,5 @@
 Wheel-Version: 1.0
-Generator: bdist_wheel (0.37.1)
+Generator: setuptools (70.2.0)
 Root-Is-Purelib: true
 Tag: py3-none-any
 
diff --git a/pkg_resources/_vendor/inflect-7.3.1.dist-info/top_level.txt b/pkg_resources/_vendor/inflect-7.3.1.dist-info/top_level.txt
new file mode 100644
index 0000000000..0fd75fab3e
--- /dev/null
+++ b/pkg_resources/_vendor/inflect-7.3.1.dist-info/top_level.txt
@@ -0,0 +1 @@
+inflect
diff --git a/pkg_resources/_vendor/inflect/__init__.py b/pkg_resources/_vendor/inflect/__init__.py
new file mode 100644
index 0000000000..3eec27f4c6
--- /dev/null
+++ b/pkg_resources/_vendor/inflect/__init__.py
@@ -0,0 +1,3986 @@
+"""
+inflect: english language inflection
+ - correctly generate plurals, ordinals, indefinite articles
+ - convert numbers to words
+
+Copyright (C) 2010 Paul Dyson
+
+Based upon the Perl module
+`Lingua::EN::Inflect `_.
+
+methods:
+    classical inflect
+    plural plural_noun plural_verb plural_adj singular_noun no num a an
+    compare compare_nouns compare_verbs compare_adjs
+    present_participle
+    ordinal
+    number_to_words
+    join
+    defnoun defverb defadj defa defan
+
+INFLECTIONS:
+    classical inflect
+    plural plural_noun plural_verb plural_adj singular_noun compare
+    no num a an present_participle
+
+PLURALS:
+    classical inflect
+    plural plural_noun plural_verb plural_adj singular_noun no num
+    compare compare_nouns compare_verbs compare_adjs
+
+COMPARISONS:
+    classical
+    compare compare_nouns compare_verbs compare_adjs
+
+ARTICLES:
+    classical inflect num a an
+
+NUMERICAL:
+    ordinal number_to_words
+
+USER_DEFINED:
+    defnoun defverb defadj defa defan
+
+Exceptions:
+ UnknownClassicalModeError
+ BadNumValueError
+ BadChunkingOptionError
+ NumOutOfRangeError
+ BadUserDefinedPatternError
+ BadRcFileError
+ BadGenderError
+
+"""
+
+from __future__ import annotations
+
+import ast
+import collections
+import contextlib
+import functools
+import itertools
+import re
+from numbers import Number
+from typing import (
+    TYPE_CHECKING,
+    Any,
+    Callable,
+    Dict,
+    Iterable,
+    List,
+    Literal,
+    Match,
+    Optional,
+    Sequence,
+    Tuple,
+    Union,
+    cast,
+)
+
+from more_itertools import windowed_complete
+from typeguard import typechecked
+
+from .compat.py38 import Annotated
+
+
+class UnknownClassicalModeError(Exception):
+    pass
+
+
+class BadNumValueError(Exception):
+    pass
+
+
+class BadChunkingOptionError(Exception):
+    pass
+
+
+class NumOutOfRangeError(Exception):
+    pass
+
+
+class BadUserDefinedPatternError(Exception):
+    pass
+
+
+class BadRcFileError(Exception):
+    pass
+
+
+class BadGenderError(Exception):
+    pass
+
+
+def enclose(s: str) -> str:
+    return f"(?:{s})"
+
+
+def joinstem(cutpoint: Optional[int] = 0, words: Optional[Iterable[str]] = None) -> str:
+    """
+    Join stem of each word in words into a string for regex.
+
+    Each word is truncated at cutpoint.
+
+    Cutpoint is usually negative indicating the number of letters to remove
+    from the end of each word.
+
+    >>> joinstem(-2, ["ephemeris", "iris", ".*itis"])
+    '(?:ephemer|ir|.*it)'
+
+    >>> joinstem(None, ["ephemeris"])
+    '(?:ephemeris)'
+
+    >>> joinstem(5, None)
+    '(?:)'
+    """
+    return enclose("|".join(w[:cutpoint] for w in words or []))
+
+
+def bysize(words: Iterable[str]) -> Dict[int, set]:
+    """
+    From a list of words, return a dict of sets sorted by word length.
+
+    >>> words = ['ant', 'cat', 'dog', 'pig', 'frog', 'goat', 'horse', 'elephant']
+    >>> ret = bysize(words)
+    >>> sorted(ret[3])
+    ['ant', 'cat', 'dog', 'pig']
+    >>> ret[5]
+    {'horse'}
+    """
+    res: Dict[int, set] = collections.defaultdict(set)
+    for w in words:
+        res[len(w)].add(w)
+    return res
+
+
+def make_pl_si_lists(
+    lst: Iterable[str],
+    plending: str,
+    siendingsize: Optional[int],
+    dojoinstem: bool = True,
+):
+    """
+    given a list of singular words: lst
+
+    an ending to append to make the plural: plending
+
+    the number of characters to remove from the singular
+    before appending plending: siendingsize
+
+    a flag whether to create a joinstem: dojoinstem
+
+    return:
+    a list of pluralised words: si_list (called si because this is what you need to
+    look for to make the singular)
+
+    the pluralised words as a dict of sets sorted by word length: si_bysize
+    the singular words as a dict of sets sorted by word length: pl_bysize
+    if dojoinstem is True: a regular expression that matches any of the stems: stem
+    """
+    if siendingsize is not None:
+        siendingsize = -siendingsize
+    si_list = [w[:siendingsize] + plending for w in lst]
+    pl_bysize = bysize(lst)
+    si_bysize = bysize(si_list)
+    if dojoinstem:
+        stem = joinstem(siendingsize, lst)
+        return si_list, si_bysize, pl_bysize, stem
+    else:
+        return si_list, si_bysize, pl_bysize
+
+
+# 1. PLURALS
+
+pl_sb_irregular_s = {
+    "corpus": "corpuses|corpora",
+    "opus": "opuses|opera",
+    "genus": "genera",
+    "mythos": "mythoi",
+    "penis": "penises|penes",
+    "testis": "testes",
+    "atlas": "atlases|atlantes",
+    "yes": "yeses",
+}
+
+pl_sb_irregular = {
+    "child": "children",
+    "chili": "chilis|chilies",
+    "brother": "brothers|brethren",
+    "infinity": "infinities|infinity",
+    "loaf": "loaves",
+    "lore": "lores|lore",
+    "hoof": "hoofs|hooves",
+    "beef": "beefs|beeves",
+    "thief": "thiefs|thieves",
+    "money": "monies",
+    "mongoose": "mongooses",
+    "ox": "oxen",
+    "cow": "cows|kine",
+    "graffito": "graffiti",
+    "octopus": "octopuses|octopodes",
+    "genie": "genies|genii",
+    "ganglion": "ganglions|ganglia",
+    "trilby": "trilbys",
+    "turf": "turfs|turves",
+    "numen": "numina",
+    "atman": "atmas",
+    "occiput": "occiputs|occipita",
+    "sabretooth": "sabretooths",
+    "sabertooth": "sabertooths",
+    "lowlife": "lowlifes",
+    "flatfoot": "flatfoots",
+    "tenderfoot": "tenderfoots",
+    "romany": "romanies",
+    "jerry": "jerries",
+    "mary": "maries",
+    "talouse": "talouses",
+    "rom": "roma",
+    "carmen": "carmina",
+}
+
+pl_sb_irregular.update(pl_sb_irregular_s)
+# pl_sb_irregular_keys = enclose('|'.join(pl_sb_irregular.keys()))
+
+pl_sb_irregular_caps = {
+    "Romany": "Romanies",
+    "Jerry": "Jerrys",
+    "Mary": "Marys",
+    "Rom": "Roma",
+}
+
+pl_sb_irregular_compound = {"prima donna": "prima donnas|prime donne"}
+
+si_sb_irregular = {v: k for (k, v) in pl_sb_irregular.items()}
+for k in list(si_sb_irregular):
+    if "|" in k:
+        k1, k2 = k.split("|")
+        si_sb_irregular[k1] = si_sb_irregular[k2] = si_sb_irregular[k]
+        del si_sb_irregular[k]
+si_sb_irregular_caps = {v: k for (k, v) in pl_sb_irregular_caps.items()}
+si_sb_irregular_compound = {v: k for (k, v) in pl_sb_irregular_compound.items()}
+for k in list(si_sb_irregular_compound):
+    if "|" in k:
+        k1, k2 = k.split("|")
+        si_sb_irregular_compound[k1] = si_sb_irregular_compound[k2] = (
+            si_sb_irregular_compound[k]
+        )
+        del si_sb_irregular_compound[k]
+
+# si_sb_irregular_keys = enclose('|'.join(si_sb_irregular.keys()))
+
+# Z's that don't double
+
+pl_sb_z_zes_list = ("quartz", "topaz")
+pl_sb_z_zes_bysize = bysize(pl_sb_z_zes_list)
+
+pl_sb_ze_zes_list = ("snooze",)
+pl_sb_ze_zes_bysize = bysize(pl_sb_ze_zes_list)
+
+
+# CLASSICAL "..is" -> "..ides"
+
+pl_sb_C_is_ides_complete = [
+    # GENERAL WORDS...
+    "ephemeris",
+    "iris",
+    "clitoris",
+    "chrysalis",
+    "epididymis",
+]
+
+pl_sb_C_is_ides_endings = [
+    # INFLAMATIONS...
+    "itis"
+]
+
+pl_sb_C_is_ides = joinstem(
+    -2, pl_sb_C_is_ides_complete + [f".*{w}" for w in pl_sb_C_is_ides_endings]
+)
+
+pl_sb_C_is_ides_list = pl_sb_C_is_ides_complete + pl_sb_C_is_ides_endings
+
+(
+    si_sb_C_is_ides_list,
+    si_sb_C_is_ides_bysize,
+    pl_sb_C_is_ides_bysize,
+) = make_pl_si_lists(pl_sb_C_is_ides_list, "ides", 2, dojoinstem=False)
+
+
+# CLASSICAL "..a" -> "..ata"
+
+pl_sb_C_a_ata_list = (
+    "anathema",
+    "bema",
+    "carcinoma",
+    "charisma",
+    "diploma",
+    "dogma",
+    "drama",
+    "edema",
+    "enema",
+    "enigma",
+    "lemma",
+    "lymphoma",
+    "magma",
+    "melisma",
+    "miasma",
+    "oedema",
+    "sarcoma",
+    "schema",
+    "soma",
+    "stigma",
+    "stoma",
+    "trauma",
+    "gumma",
+    "pragma",
+)
+
+(
+    si_sb_C_a_ata_list,
+    si_sb_C_a_ata_bysize,
+    pl_sb_C_a_ata_bysize,
+    pl_sb_C_a_ata,
+) = make_pl_si_lists(pl_sb_C_a_ata_list, "ata", 1)
+
+# UNCONDITIONAL "..a" -> "..ae"
+
+pl_sb_U_a_ae_list = (
+    "alumna",
+    "alga",
+    "vertebra",
+    "persona",
+    "vita",
+)
+(
+    si_sb_U_a_ae_list,
+    si_sb_U_a_ae_bysize,
+    pl_sb_U_a_ae_bysize,
+    pl_sb_U_a_ae,
+) = make_pl_si_lists(pl_sb_U_a_ae_list, "e", None)
+
+# CLASSICAL "..a" -> "..ae"
+
+pl_sb_C_a_ae_list = (
+    "amoeba",
+    "antenna",
+    "formula",
+    "hyperbola",
+    "medusa",
+    "nebula",
+    "parabola",
+    "abscissa",
+    "hydra",
+    "nova",
+    "lacuna",
+    "aurora",
+    "umbra",
+    "flora",
+    "fauna",
+)
+(
+    si_sb_C_a_ae_list,
+    si_sb_C_a_ae_bysize,
+    pl_sb_C_a_ae_bysize,
+    pl_sb_C_a_ae,
+) = make_pl_si_lists(pl_sb_C_a_ae_list, "e", None)
+
+
+# CLASSICAL "..en" -> "..ina"
+
+pl_sb_C_en_ina_list = ("stamen", "foramen", "lumen")
+
+(
+    si_sb_C_en_ina_list,
+    si_sb_C_en_ina_bysize,
+    pl_sb_C_en_ina_bysize,
+    pl_sb_C_en_ina,
+) = make_pl_si_lists(pl_sb_C_en_ina_list, "ina", 2)
+
+
+# UNCONDITIONAL "..um" -> "..a"
+
+pl_sb_U_um_a_list = (
+    "bacterium",
+    "agendum",
+    "desideratum",
+    "erratum",
+    "stratum",
+    "datum",
+    "ovum",
+    "extremum",
+    "candelabrum",
+)
+(
+    si_sb_U_um_a_list,
+    si_sb_U_um_a_bysize,
+    pl_sb_U_um_a_bysize,
+    pl_sb_U_um_a,
+) = make_pl_si_lists(pl_sb_U_um_a_list, "a", 2)
+
+# CLASSICAL "..um" -> "..a"
+
+pl_sb_C_um_a_list = (
+    "maximum",
+    "minimum",
+    "momentum",
+    "optimum",
+    "quantum",
+    "cranium",
+    "curriculum",
+    "dictum",
+    "phylum",
+    "aquarium",
+    "compendium",
+    "emporium",
+    "encomium",
+    "gymnasium",
+    "honorarium",
+    "interregnum",
+    "lustrum",
+    "memorandum",
+    "millennium",
+    "rostrum",
+    "spectrum",
+    "speculum",
+    "stadium",
+    "trapezium",
+    "ultimatum",
+    "medium",
+    "vacuum",
+    "velum",
+    "consortium",
+    "arboretum",
+)
+
+(
+    si_sb_C_um_a_list,
+    si_sb_C_um_a_bysize,
+    pl_sb_C_um_a_bysize,
+    pl_sb_C_um_a,
+) = make_pl_si_lists(pl_sb_C_um_a_list, "a", 2)
+
+
+# UNCONDITIONAL "..us" -> "i"
+
+pl_sb_U_us_i_list = (
+    "alumnus",
+    "alveolus",
+    "bacillus",
+    "bronchus",
+    "locus",
+    "nucleus",
+    "stimulus",
+    "meniscus",
+    "sarcophagus",
+)
+(
+    si_sb_U_us_i_list,
+    si_sb_U_us_i_bysize,
+    pl_sb_U_us_i_bysize,
+    pl_sb_U_us_i,
+) = make_pl_si_lists(pl_sb_U_us_i_list, "i", 2)
+
+# CLASSICAL "..us" -> "..i"
+
+pl_sb_C_us_i_list = (
+    "focus",
+    "radius",
+    "genius",
+    "incubus",
+    "succubus",
+    "nimbus",
+    "fungus",
+    "nucleolus",
+    "stylus",
+    "torus",
+    "umbilicus",
+    "uterus",
+    "hippopotamus",
+    "cactus",
+)
+
+(
+    si_sb_C_us_i_list,
+    si_sb_C_us_i_bysize,
+    pl_sb_C_us_i_bysize,
+    pl_sb_C_us_i,
+) = make_pl_si_lists(pl_sb_C_us_i_list, "i", 2)
+
+
+# CLASSICAL "..us" -> "..us"  (ASSIMILATED 4TH DECLENSION LATIN NOUNS)
+
+pl_sb_C_us_us = (
+    "status",
+    "apparatus",
+    "prospectus",
+    "sinus",
+    "hiatus",
+    "impetus",
+    "plexus",
+)
+pl_sb_C_us_us_bysize = bysize(pl_sb_C_us_us)
+
+# UNCONDITIONAL "..on" -> "a"
+
+pl_sb_U_on_a_list = (
+    "criterion",
+    "perihelion",
+    "aphelion",
+    "phenomenon",
+    "prolegomenon",
+    "noumenon",
+    "organon",
+    "asyndeton",
+    "hyperbaton",
+)
+(
+    si_sb_U_on_a_list,
+    si_sb_U_on_a_bysize,
+    pl_sb_U_on_a_bysize,
+    pl_sb_U_on_a,
+) = make_pl_si_lists(pl_sb_U_on_a_list, "a", 2)
+
+# CLASSICAL "..on" -> "..a"
+
+pl_sb_C_on_a_list = ("oxymoron",)
+
+(
+    si_sb_C_on_a_list,
+    si_sb_C_on_a_bysize,
+    pl_sb_C_on_a_bysize,
+    pl_sb_C_on_a,
+) = make_pl_si_lists(pl_sb_C_on_a_list, "a", 2)
+
+
+# CLASSICAL "..o" -> "..i"  (BUT NORMALLY -> "..os")
+
+pl_sb_C_o_i = [
+    "solo",
+    "soprano",
+    "basso",
+    "alto",
+    "contralto",
+    "tempo",
+    "piano",
+    "virtuoso",
+]  # list not tuple so can concat for pl_sb_U_o_os
+
+pl_sb_C_o_i_bysize = bysize(pl_sb_C_o_i)
+si_sb_C_o_i_bysize = bysize([f"{w[:-1]}i" for w in pl_sb_C_o_i])
+
+pl_sb_C_o_i_stems = joinstem(-1, pl_sb_C_o_i)
+
+# ALWAYS "..o" -> "..os"
+
+pl_sb_U_o_os_complete = {"ado", "ISO", "NATO", "NCO", "NGO", "oto"}
+si_sb_U_o_os_complete = {f"{w}s" for w in pl_sb_U_o_os_complete}
+
+
+pl_sb_U_o_os_endings = [
+    "aficionado",
+    "aggro",
+    "albino",
+    "allegro",
+    "ammo",
+    "Antananarivo",
+    "archipelago",
+    "armadillo",
+    "auto",
+    "avocado",
+    "Bamako",
+    "Barquisimeto",
+    "bimbo",
+    "bingo",
+    "Biro",
+    "bolero",
+    "Bolzano",
+    "bongo",
+    "Boto",
+    "burro",
+    "Cairo",
+    "canto",
+    "cappuccino",
+    "casino",
+    "cello",
+    "Chicago",
+    "Chimango",
+    "cilantro",
+    "cochito",
+    "coco",
+    "Colombo",
+    "Colorado",
+    "commando",
+    "concertino",
+    "contango",
+    "credo",
+    "crescendo",
+    "cyano",
+    "demo",
+    "ditto",
+    "Draco",
+    "dynamo",
+    "embryo",
+    "Esperanto",
+    "espresso",
+    "euro",
+    "falsetto",
+    "Faro",
+    "fiasco",
+    "Filipino",
+    "flamenco",
+    "furioso",
+    "generalissimo",
+    "Gestapo",
+    "ghetto",
+    "gigolo",
+    "gizmo",
+    "Greensboro",
+    "gringo",
+    "Guaiabero",
+    "guano",
+    "gumbo",
+    "gyro",
+    "hairdo",
+    "hippo",
+    "Idaho",
+    "impetigo",
+    "inferno",
+    "info",
+    "intermezzo",
+    "intertrigo",
+    "Iquico",
+    "jumbo",
+    "junto",
+    "Kakapo",
+    "kilo",
+    "Kinkimavo",
+    "Kokako",
+    "Kosovo",
+    "Lesotho",
+    "libero",
+    "libido",
+    "libretto",
+    "lido",
+    "Lilo",
+    "limbo",
+    "limo",
+    "lineno",
+    "lingo",
+    "lino",
+    "livedo",
+    "loco",
+    "logo",
+    "lumbago",
+    "macho",
+    "macro",
+    "mafioso",
+    "magneto",
+    "magnifico",
+    "Majuro",
+    "Malabo",
+    "manifesto",
+    "Maputo",
+    "Maracaibo",
+    "medico",
+    "memo",
+    "metro",
+    "Mexico",
+    "micro",
+    "Milano",
+    "Monaco",
+    "mono",
+    "Montenegro",
+    "Morocco",
+    "Muqdisho",
+    "myo",
+    "neutrino",
+    "Ningbo",
+    "octavo",
+    "oregano",
+    "Orinoco",
+    "Orlando",
+    "Oslo",
+    "panto",
+    "Paramaribo",
+    "Pardusco",
+    "pedalo",
+    "photo",
+    "pimento",
+    "pinto",
+    "pleco",
+    "Pluto",
+    "pogo",
+    "polo",
+    "poncho",
+    "Porto-Novo",
+    "Porto",
+    "pro",
+    "psycho",
+    "pueblo",
+    "quarto",
+    "Quito",
+    "repo",
+    "rhino",
+    "risotto",
+    "rococo",
+    "rondo",
+    "Sacramento",
+    "saddo",
+    "sago",
+    "salvo",
+    "Santiago",
+    "Sapporo",
+    "Sarajevo",
+    "scherzando",
+    "scherzo",
+    "silo",
+    "sirocco",
+    "sombrero",
+    "staccato",
+    "sterno",
+    "stucco",
+    "stylo",
+    "sumo",
+    "Taiko",
+    "techno",
+    "terrazzo",
+    "testudo",
+    "timpano",
+    "tiro",
+    "tobacco",
+    "Togo",
+    "Tokyo",
+    "torero",
+    "Torino",
+    "Toronto",
+    "torso",
+    "tremolo",
+    "typo",
+    "tyro",
+    "ufo",
+    "UNESCO",
+    "vaquero",
+    "vermicello",
+    "verso",
+    "vibrato",
+    "violoncello",
+    "Virgo",
+    "weirdo",
+    "WHO",
+    "WTO",
+    "Yamoussoukro",
+    "yo-yo",
+    "zero",
+    "Zibo",
+] + pl_sb_C_o_i
+
+pl_sb_U_o_os_bysize = bysize(pl_sb_U_o_os_endings)
+si_sb_U_o_os_bysize = bysize([f"{w}s" for w in pl_sb_U_o_os_endings])
+
+
+# UNCONDITIONAL "..ch" -> "..chs"
+
+pl_sb_U_ch_chs_list = ("czech", "eunuch", "stomach")
+
+(
+    si_sb_U_ch_chs_list,
+    si_sb_U_ch_chs_bysize,
+    pl_sb_U_ch_chs_bysize,
+    pl_sb_U_ch_chs,
+) = make_pl_si_lists(pl_sb_U_ch_chs_list, "s", None)
+
+
+# UNCONDITIONAL "..[ei]x" -> "..ices"
+
+pl_sb_U_ex_ices_list = ("codex", "murex", "silex")
+(
+    si_sb_U_ex_ices_list,
+    si_sb_U_ex_ices_bysize,
+    pl_sb_U_ex_ices_bysize,
+    pl_sb_U_ex_ices,
+) = make_pl_si_lists(pl_sb_U_ex_ices_list, "ices", 2)
+
+pl_sb_U_ix_ices_list = ("radix", "helix")
+(
+    si_sb_U_ix_ices_list,
+    si_sb_U_ix_ices_bysize,
+    pl_sb_U_ix_ices_bysize,
+    pl_sb_U_ix_ices,
+) = make_pl_si_lists(pl_sb_U_ix_ices_list, "ices", 2)
+
+# CLASSICAL "..[ei]x" -> "..ices"
+
+pl_sb_C_ex_ices_list = (
+    "vortex",
+    "vertex",
+    "cortex",
+    "latex",
+    "pontifex",
+    "apex",
+    "index",
+    "simplex",
+)
+
+(
+    si_sb_C_ex_ices_list,
+    si_sb_C_ex_ices_bysize,
+    pl_sb_C_ex_ices_bysize,
+    pl_sb_C_ex_ices,
+) = make_pl_si_lists(pl_sb_C_ex_ices_list, "ices", 2)
+
+
+pl_sb_C_ix_ices_list = ("appendix",)
+
+(
+    si_sb_C_ix_ices_list,
+    si_sb_C_ix_ices_bysize,
+    pl_sb_C_ix_ices_bysize,
+    pl_sb_C_ix_ices,
+) = make_pl_si_lists(pl_sb_C_ix_ices_list, "ices", 2)
+
+
+# ARABIC: ".." -> "..i"
+
+pl_sb_C_i_list = ("afrit", "afreet", "efreet")
+
+(si_sb_C_i_list, si_sb_C_i_bysize, pl_sb_C_i_bysize, pl_sb_C_i) = make_pl_si_lists(
+    pl_sb_C_i_list, "i", None
+)
+
+
+# HEBREW: ".." -> "..im"
+
+pl_sb_C_im_list = ("goy", "seraph", "cherub")
+
+(si_sb_C_im_list, si_sb_C_im_bysize, pl_sb_C_im_bysize, pl_sb_C_im) = make_pl_si_lists(
+    pl_sb_C_im_list, "im", None
+)
+
+
+# UNCONDITIONAL "..man" -> "..mans"
+
+pl_sb_U_man_mans_list = """
+    ataman caiman cayman ceriman
+    desman dolman farman harman hetman
+    human leman ottoman shaman talisman
+""".split()
+pl_sb_U_man_mans_caps_list = """
+    Alabaman Bahaman Burman German
+    Hiroshiman Liman Nakayaman Norman Oklahoman
+    Panaman Roman Selman Sonaman Tacoman Yakiman
+    Yokohaman Yuman
+""".split()
+
+(
+    si_sb_U_man_mans_list,
+    si_sb_U_man_mans_bysize,
+    pl_sb_U_man_mans_bysize,
+) = make_pl_si_lists(pl_sb_U_man_mans_list, "s", None, dojoinstem=False)
+(
+    si_sb_U_man_mans_caps_list,
+    si_sb_U_man_mans_caps_bysize,
+    pl_sb_U_man_mans_caps_bysize,
+) = make_pl_si_lists(pl_sb_U_man_mans_caps_list, "s", None, dojoinstem=False)
+
+# UNCONDITIONAL "..louse" -> "..lice"
+pl_sb_U_louse_lice_list = ("booklouse", "grapelouse", "louse", "woodlouse")
+
+(
+    si_sb_U_louse_lice_list,
+    si_sb_U_louse_lice_bysize,
+    pl_sb_U_louse_lice_bysize,
+) = make_pl_si_lists(pl_sb_U_louse_lice_list, "lice", 5, dojoinstem=False)
+
+pl_sb_uninflected_s_complete = [
+    # PAIRS OR GROUPS SUBSUMED TO A SINGULAR...
+    "breeches",
+    "britches",
+    "pajamas",
+    "pyjamas",
+    "clippers",
+    "gallows",
+    "hijinks",
+    "headquarters",
+    "pliers",
+    "scissors",
+    "testes",
+    "herpes",
+    "pincers",
+    "shears",
+    "proceedings",
+    "trousers",
+    # UNASSIMILATED LATIN 4th DECLENSION
+    "cantus",
+    "coitus",
+    "nexus",
+    # RECENT IMPORTS...
+    "contretemps",
+    "corps",
+    "debris",
+    "siemens",
+    # DISEASES
+    "mumps",
+    # MISCELLANEOUS OTHERS...
+    "diabetes",
+    "jackanapes",
+    "series",
+    "species",
+    "subspecies",
+    "rabies",
+    "chassis",
+    "innings",
+    "news",
+    "mews",
+    "haggis",
+]
+
+pl_sb_uninflected_s_endings = [
+    # RECENT IMPORTS...
+    "ois",
+    # DISEASES
+    "measles",
+]
+
+pl_sb_uninflected_s = pl_sb_uninflected_s_complete + [
+    f".*{w}" for w in pl_sb_uninflected_s_endings
+]
+
+pl_sb_uninflected_herd = (
+    # DON'T INFLECT IN CLASSICAL MODE, OTHERWISE NORMAL INFLECTION
+    "wildebeest",
+    "swine",
+    "eland",
+    "bison",
+    "buffalo",
+    "cattle",
+    "elk",
+    "rhinoceros",
+    "zucchini",
+    "caribou",
+    "dace",
+    "grouse",
+    "guinea fowl",
+    "guinea-fowl",
+    "haddock",
+    "hake",
+    "halibut",
+    "herring",
+    "mackerel",
+    "pickerel",
+    "pike",
+    "roe",
+    "seed",
+    "shad",
+    "snipe",
+    "teal",
+    "turbot",
+    "water fowl",
+    "water-fowl",
+)
+
+pl_sb_uninflected_complete = [
+    # SOME FISH AND HERD ANIMALS
+    "tuna",
+    "salmon",
+    "mackerel",
+    "trout",
+    "bream",
+    "sea-bass",
+    "sea bass",
+    "carp",
+    "cod",
+    "flounder",
+    "whiting",
+    "moose",
+    # OTHER ODDITIES
+    "graffiti",
+    "djinn",
+    "samuri",
+    "offspring",
+    "pence",
+    "quid",
+    "hertz",
+] + pl_sb_uninflected_s_complete
+# SOME WORDS ENDING IN ...s (OFTEN PAIRS TAKEN AS A WHOLE)
+
+pl_sb_uninflected_caps = [
+    # ALL NATIONALS ENDING IN -ese
+    "Portuguese",
+    "Amoyese",
+    "Borghese",
+    "Congoese",
+    "Faroese",
+    "Foochowese",
+    "Genevese",
+    "Genoese",
+    "Gilbertese",
+    "Hottentotese",
+    "Kiplingese",
+    "Kongoese",
+    "Lucchese",
+    "Maltese",
+    "Nankingese",
+    "Niasese",
+    "Pekingese",
+    "Piedmontese",
+    "Pistoiese",
+    "Sarawakese",
+    "Shavese",
+    "Vermontese",
+    "Wenchowese",
+    "Yengeese",
+]
+
+
+pl_sb_uninflected_endings = [
+    # UNCOUNTABLE NOUNS
+    "butter",
+    "cash",
+    "furniture",
+    "information",
+    # SOME FISH AND HERD ANIMALS
+    "fish",
+    "deer",
+    "sheep",
+    # ALL NATIONALS ENDING IN -ese
+    "nese",
+    "rese",
+    "lese",
+    "mese",
+    # DISEASES
+    "pox",
+    # OTHER ODDITIES
+    "craft",
+] + pl_sb_uninflected_s_endings
+# SOME WORDS ENDING IN ...s (OFTEN PAIRS TAKEN AS A WHOLE)
+
+
+pl_sb_uninflected_bysize = bysize(pl_sb_uninflected_endings)
+
+
+# SINGULAR WORDS ENDING IN ...s (ALL INFLECT WITH ...es)
+
+pl_sb_singular_s_complete = [
+    "acropolis",
+    "aegis",
+    "alias",
+    "asbestos",
+    "bathos",
+    "bias",
+    "bronchitis",
+    "bursitis",
+    "caddis",
+    "cannabis",
+    "canvas",
+    "chaos",
+    "cosmos",
+    "dais",
+    "digitalis",
+    "epidermis",
+    "ethos",
+    "eyas",
+    "gas",
+    "glottis",
+    "hubris",
+    "ibis",
+    "lens",
+    "mantis",
+    "marquis",
+    "metropolis",
+    "pathos",
+    "pelvis",
+    "polis",
+    "rhinoceros",
+    "sassafras",
+    "trellis",
+] + pl_sb_C_is_ides_complete
+
+
+pl_sb_singular_s_endings = ["ss", "us"] + pl_sb_C_is_ides_endings
+
+pl_sb_singular_s_bysize = bysize(pl_sb_singular_s_endings)
+
+si_sb_singular_s_complete = [f"{w}es" for w in pl_sb_singular_s_complete]
+si_sb_singular_s_endings = [f"{w}es" for w in pl_sb_singular_s_endings]
+si_sb_singular_s_bysize = bysize(si_sb_singular_s_endings)
+
+pl_sb_singular_s_es = ["[A-Z].*es"]
+
+pl_sb_singular_s = enclose(
+    "|".join(
+        pl_sb_singular_s_complete
+        + [f".*{w}" for w in pl_sb_singular_s_endings]
+        + pl_sb_singular_s_es
+    )
+)
+
+
+# PLURALS ENDING IN uses -> use
+
+
+si_sb_ois_oi_case = ("Bolshois", "Hanois")
+
+si_sb_uses_use_case = ("Betelgeuses", "Duses", "Meuses", "Syracuses", "Toulouses")
+
+si_sb_uses_use = (
+    "abuses",
+    "applauses",
+    "blouses",
+    "carouses",
+    "causes",
+    "chartreuses",
+    "clauses",
+    "contuses",
+    "douses",
+    "excuses",
+    "fuses",
+    "grouses",
+    "hypotenuses",
+    "masseuses",
+    "menopauses",
+    "misuses",
+    "muses",
+    "overuses",
+    "pauses",
+    "peruses",
+    "profuses",
+    "recluses",
+    "reuses",
+    "ruses",
+    "souses",
+    "spouses",
+    "suffuses",
+    "transfuses",
+    "uses",
+)
+
+si_sb_ies_ie_case = (
+    "Addies",
+    "Aggies",
+    "Allies",
+    "Amies",
+    "Angies",
+    "Annies",
+    "Annmaries",
+    "Archies",
+    "Arties",
+    "Aussies",
+    "Barbies",
+    "Barries",
+    "Basies",
+    "Bennies",
+    "Bernies",
+    "Berties",
+    "Bessies",
+    "Betties",
+    "Billies",
+    "Blondies",
+    "Bobbies",
+    "Bonnies",
+    "Bowies",
+    "Brandies",
+    "Bries",
+    "Brownies",
+    "Callies",
+    "Carnegies",
+    "Carries",
+    "Cassies",
+    "Charlies",
+    "Cheries",
+    "Christies",
+    "Connies",
+    "Curies",
+    "Dannies",
+    "Debbies",
+    "Dixies",
+    "Dollies",
+    "Donnies",
+    "Drambuies",
+    "Eddies",
+    "Effies",
+    "Ellies",
+    "Elsies",
+    "Eries",
+    "Ernies",
+    "Essies",
+    "Eugenies",
+    "Fannies",
+    "Flossies",
+    "Frankies",
+    "Freddies",
+    "Gillespies",
+    "Goldies",
+    "Gracies",
+    "Guthries",
+    "Hallies",
+    "Hatties",
+    "Hetties",
+    "Hollies",
+    "Jackies",
+    "Jamies",
+    "Janies",
+    "Jannies",
+    "Jeanies",
+    "Jeannies",
+    "Jennies",
+    "Jessies",
+    "Jimmies",
+    "Jodies",
+    "Johnies",
+    "Johnnies",
+    "Josies",
+    "Julies",
+    "Kalgoorlies",
+    "Kathies",
+    "Katies",
+    "Kellies",
+    "Kewpies",
+    "Kristies",
+    "Laramies",
+    "Lassies",
+    "Lauries",
+    "Leslies",
+    "Lessies",
+    "Lillies",
+    "Lizzies",
+    "Lonnies",
+    "Lories",
+    "Lorries",
+    "Lotties",
+    "Louies",
+    "Mackenzies",
+    "Maggies",
+    "Maisies",
+    "Mamies",
+    "Marcies",
+    "Margies",
+    "Maries",
+    "Marjories",
+    "Matties",
+    "McKenzies",
+    "Melanies",
+    "Mickies",
+    "Millies",
+    "Minnies",
+    "Mollies",
+    "Mounties",
+    "Nannies",
+    "Natalies",
+    "Nellies",
+    "Netties",
+    "Ollies",
+    "Ozzies",
+    "Pearlies",
+    "Pottawatomies",
+    "Reggies",
+    "Richies",
+    "Rickies",
+    "Robbies",
+    "Ronnies",
+    "Rosalies",
+    "Rosemaries",
+    "Rosies",
+    "Roxies",
+    "Rushdies",
+    "Ruthies",
+    "Sadies",
+    "Sallies",
+    "Sammies",
+    "Scotties",
+    "Selassies",
+    "Sherries",
+    "Sophies",
+    "Stacies",
+    "Stefanies",
+    "Stephanies",
+    "Stevies",
+    "Susies",
+    "Sylvies",
+    "Tammies",
+    "Terries",
+    "Tessies",
+    "Tommies",
+    "Tracies",
+    "Trekkies",
+    "Valaries",
+    "Valeries",
+    "Valkyries",
+    "Vickies",
+    "Virgies",
+    "Willies",
+    "Winnies",
+    "Wylies",
+    "Yorkies",
+)
+
+si_sb_ies_ie = (
+    "aeries",
+    "baggies",
+    "belies",
+    "biggies",
+    "birdies",
+    "bogies",
+    "bonnies",
+    "boogies",
+    "bookies",
+    "bourgeoisies",
+    "brownies",
+    "budgies",
+    "caddies",
+    "calories",
+    "camaraderies",
+    "cockamamies",
+    "collies",
+    "cookies",
+    "coolies",
+    "cooties",
+    "coteries",
+    "crappies",
+    "curies",
+    "cutesies",
+    "dogies",
+    "eyries",
+    "floozies",
+    "footsies",
+    "freebies",
+    "genies",
+    "goalies",
+    "groupies",
+    "hies",
+    "jalousies",
+    "junkies",
+    "kiddies",
+    "laddies",
+    "lassies",
+    "lies",
+    "lingeries",
+    "magpies",
+    "menageries",
+    "mommies",
+    "movies",
+    "neckties",
+    "newbies",
+    "nighties",
+    "oldies",
+    "organdies",
+    "overlies",
+    "pies",
+    "pinkies",
+    "pixies",
+    "potpies",
+    "prairies",
+    "quickies",
+    "reveries",
+    "rookies",
+    "rotisseries",
+    "softies",
+    "sorties",
+    "species",
+    "stymies",
+    "sweeties",
+    "ties",
+    "underlies",
+    "unties",
+    "veggies",
+    "vies",
+    "yuppies",
+    "zombies",
+)
+
+
+si_sb_oes_oe_case = (
+    "Chloes",
+    "Crusoes",
+    "Defoes",
+    "Faeroes",
+    "Ivanhoes",
+    "Joes",
+    "McEnroes",
+    "Moes",
+    "Monroes",
+    "Noes",
+    "Poes",
+    "Roscoes",
+    "Tahoes",
+    "Tippecanoes",
+    "Zoes",
+)
+
+si_sb_oes_oe = (
+    "aloes",
+    "backhoes",
+    "canoes",
+    "does",
+    "floes",
+    "foes",
+    "hoes",
+    "mistletoes",
+    "oboes",
+    "pekoes",
+    "roes",
+    "sloes",
+    "throes",
+    "tiptoes",
+    "toes",
+    "woes",
+)
+
+si_sb_z_zes = ("quartzes", "topazes")
+
+si_sb_zzes_zz = ("buzzes", "fizzes", "frizzes", "razzes")
+
+si_sb_ches_che_case = (
+    "Andromaches",
+    "Apaches",
+    "Blanches",
+    "Comanches",
+    "Nietzsches",
+    "Porsches",
+    "Roches",
+)
+
+si_sb_ches_che = (
+    "aches",
+    "avalanches",
+    "backaches",
+    "bellyaches",
+    "caches",
+    "cloches",
+    "creches",
+    "douches",
+    "earaches",
+    "fiches",
+    "headaches",
+    "heartaches",
+    "microfiches",
+    "niches",
+    "pastiches",
+    "psyches",
+    "quiches",
+    "stomachaches",
+    "toothaches",
+    "tranches",
+)
+
+si_sb_xes_xe = ("annexes", "axes", "deluxes", "pickaxes")
+
+si_sb_sses_sse_case = ("Hesses", "Jesses", "Larousses", "Matisses")
+si_sb_sses_sse = (
+    "bouillabaisses",
+    "crevasses",
+    "demitasses",
+    "impasses",
+    "mousses",
+    "posses",
+)
+
+si_sb_ves_ve_case = (
+    # *[nwl]ives -> [nwl]live
+    "Clives",
+    "Palmolives",
+)
+si_sb_ves_ve = (
+    # *[^d]eaves -> eave
+    "interweaves",
+    "weaves",
+    # *[nwl]ives -> [nwl]live
+    "olives",
+    # *[eoa]lves -> [eoa]lve
+    "bivalves",
+    "dissolves",
+    "resolves",
+    "salves",
+    "twelves",
+    "valves",
+)
+
+
+plverb_special_s = enclose(
+    "|".join(
+        [pl_sb_singular_s]
+        + pl_sb_uninflected_s
+        + list(pl_sb_irregular_s)
+        + ["(.*[csx])is", "(.*)ceps", "[A-Z].*s"]
+    )
+)
+
+_pl_sb_postfix_adj_defn = (
+    ("general", enclose(r"(?!major|lieutenant|brigadier|adjutant|.*star)\S+")),
+    ("martial", enclose("court")),
+    ("force", enclose("pound")),
+)
+
+pl_sb_postfix_adj: Iterable[str] = (
+    enclose(val + f"(?=(?:-|\\s+){key})") for key, val in _pl_sb_postfix_adj_defn
+)
+
+pl_sb_postfix_adj_stems = f"({'|'.join(pl_sb_postfix_adj)})(.*)"
+
+
+# PLURAL WORDS ENDING IS es GO TO SINGULAR is
+
+si_sb_es_is = (
+    "amanuenses",
+    "amniocenteses",
+    "analyses",
+    "antitheses",
+    "apotheoses",
+    "arterioscleroses",
+    "atheroscleroses",
+    "axes",
+    # 'bases', # bases -> basis
+    "catalyses",
+    "catharses",
+    "chasses",
+    "cirrhoses",
+    "cocces",
+    "crises",
+    "diagnoses",
+    "dialyses",
+    "diereses",
+    "electrolyses",
+    "emphases",
+    "exegeses",
+    "geneses",
+    "halitoses",
+    "hydrolyses",
+    "hypnoses",
+    "hypotheses",
+    "hystereses",
+    "metamorphoses",
+    "metastases",
+    "misdiagnoses",
+    "mitoses",
+    "mononucleoses",
+    "narcoses",
+    "necroses",
+    "nemeses",
+    "neuroses",
+    "oases",
+    "osmoses",
+    "osteoporoses",
+    "paralyses",
+    "parentheses",
+    "parthenogeneses",
+    "periphrases",
+    "photosyntheses",
+    "probosces",
+    "prognoses",
+    "prophylaxes",
+    "prostheses",
+    "preces",
+    "psoriases",
+    "psychoanalyses",
+    "psychokineses",
+    "psychoses",
+    "scleroses",
+    "scolioses",
+    "sepses",
+    "silicoses",
+    "symbioses",
+    "synopses",
+    "syntheses",
+    "taxes",
+    "telekineses",
+    "theses",
+    "thromboses",
+    "tuberculoses",
+    "urinalyses",
+)
+
+pl_prep_list = """
+    about above across after among around at athwart before behind
+    below beneath beside besides between betwixt beyond but by
+    during except for from in into near of off on onto out over
+    since till to under until unto upon with""".split()
+
+pl_prep_list_da = pl_prep_list + ["de", "du", "da"]
+
+pl_prep_bysize = bysize(pl_prep_list_da)
+
+pl_prep = enclose("|".join(pl_prep_list_da))
+
+pl_sb_prep_dual_compound = rf"(.*?)((?:-|\s+)(?:{pl_prep})(?:-|\s+))a(?:-|\s+)(.*)"
+
+
+singular_pronoun_genders = {
+    "neuter",
+    "feminine",
+    "masculine",
+    "gender-neutral",
+    "feminine or masculine",
+    "masculine or feminine",
+}
+
+pl_pron_nom = {
+    # NOMINATIVE    REFLEXIVE
+    "i": "we",
+    "myself": "ourselves",
+    "you": "you",
+    "yourself": "yourselves",
+    "she": "they",
+    "herself": "themselves",
+    "he": "they",
+    "himself": "themselves",
+    "it": "they",
+    "itself": "themselves",
+    "they": "they",
+    "themself": "themselves",
+    #   POSSESSIVE
+    "mine": "ours",
+    "yours": "yours",
+    "hers": "theirs",
+    "his": "theirs",
+    "its": "theirs",
+    "theirs": "theirs",
+}
+
+si_pron: Dict[str, Dict[str, Union[str, Dict[str, str]]]] = {
+    "nom": {v: k for (k, v) in pl_pron_nom.items()}
+}
+si_pron["nom"]["we"] = "I"
+
+
+pl_pron_acc = {
+    # ACCUSATIVE    REFLEXIVE
+    "me": "us",
+    "myself": "ourselves",
+    "you": "you",
+    "yourself": "yourselves",
+    "her": "them",
+    "herself": "themselves",
+    "him": "them",
+    "himself": "themselves",
+    "it": "them",
+    "itself": "themselves",
+    "them": "them",
+    "themself": "themselves",
+}
+
+pl_pron_acc_keys = enclose("|".join(pl_pron_acc))
+pl_pron_acc_keys_bysize = bysize(pl_pron_acc)
+
+si_pron["acc"] = {v: k for (k, v) in pl_pron_acc.items()}
+
+for _thecase, _plur, _gend, _sing in (
+    ("nom", "they", "neuter", "it"),
+    ("nom", "they", "feminine", "she"),
+    ("nom", "they", "masculine", "he"),
+    ("nom", "they", "gender-neutral", "they"),
+    ("nom", "they", "feminine or masculine", "she or he"),
+    ("nom", "they", "masculine or feminine", "he or she"),
+    ("nom", "themselves", "neuter", "itself"),
+    ("nom", "themselves", "feminine", "herself"),
+    ("nom", "themselves", "masculine", "himself"),
+    ("nom", "themselves", "gender-neutral", "themself"),
+    ("nom", "themselves", "feminine or masculine", "herself or himself"),
+    ("nom", "themselves", "masculine or feminine", "himself or herself"),
+    ("nom", "theirs", "neuter", "its"),
+    ("nom", "theirs", "feminine", "hers"),
+    ("nom", "theirs", "masculine", "his"),
+    ("nom", "theirs", "gender-neutral", "theirs"),
+    ("nom", "theirs", "feminine or masculine", "hers or his"),
+    ("nom", "theirs", "masculine or feminine", "his or hers"),
+    ("acc", "them", "neuter", "it"),
+    ("acc", "them", "feminine", "her"),
+    ("acc", "them", "masculine", "him"),
+    ("acc", "them", "gender-neutral", "them"),
+    ("acc", "them", "feminine or masculine", "her or him"),
+    ("acc", "them", "masculine or feminine", "him or her"),
+    ("acc", "themselves", "neuter", "itself"),
+    ("acc", "themselves", "feminine", "herself"),
+    ("acc", "themselves", "masculine", "himself"),
+    ("acc", "themselves", "gender-neutral", "themself"),
+    ("acc", "themselves", "feminine or masculine", "herself or himself"),
+    ("acc", "themselves", "masculine or feminine", "himself or herself"),
+):
+    try:
+        si_pron[_thecase][_plur][_gend] = _sing  # type: ignore
+    except TypeError:
+        si_pron[_thecase][_plur] = {}
+        si_pron[_thecase][_plur][_gend] = _sing  # type: ignore
+
+
+si_pron_acc_keys = enclose("|".join(si_pron["acc"]))
+si_pron_acc_keys_bysize = bysize(si_pron["acc"])
+
+
+def get_si_pron(thecase, word, gender) -> str:
+    try:
+        sing = si_pron[thecase][word]
+    except KeyError:
+        raise  # not a pronoun
+    try:
+        return sing[gender]  # has several types due to gender
+    except TypeError:
+        return cast(str, sing)  # answer independent of gender
+
+
+# These dictionaries group verbs by first, second and third person
+# conjugations.
+
+plverb_irregular_pres = {
+    "am": "are",
+    "are": "are",
+    "is": "are",
+    "was": "were",
+    "were": "were",
+    "have": "have",
+    "has": "have",
+    "do": "do",
+    "does": "do",
+}
+
+plverb_ambiguous_pres = {
+    "act": "act",
+    "acts": "act",
+    "blame": "blame",
+    "blames": "blame",
+    "can": "can",
+    "must": "must",
+    "fly": "fly",
+    "flies": "fly",
+    "copy": "copy",
+    "copies": "copy",
+    "drink": "drink",
+    "drinks": "drink",
+    "fight": "fight",
+    "fights": "fight",
+    "fire": "fire",
+    "fires": "fire",
+    "like": "like",
+    "likes": "like",
+    "look": "look",
+    "looks": "look",
+    "make": "make",
+    "makes": "make",
+    "reach": "reach",
+    "reaches": "reach",
+    "run": "run",
+    "runs": "run",
+    "sink": "sink",
+    "sinks": "sink",
+    "sleep": "sleep",
+    "sleeps": "sleep",
+    "view": "view",
+    "views": "view",
+}
+
+plverb_ambiguous_pres_keys = re.compile(
+    rf"^({enclose('|'.join(plverb_ambiguous_pres))})((\s.*)?)$", re.IGNORECASE
+)
+
+
+plverb_irregular_non_pres = (
+    "did",
+    "had",
+    "ate",
+    "made",
+    "put",
+    "spent",
+    "fought",
+    "sank",
+    "gave",
+    "sought",
+    "shall",
+    "could",
+    "ought",
+    "should",
+)
+
+plverb_ambiguous_non_pres = re.compile(
+    r"^((?:thought|saw|bent|will|might|cut))((\s.*)?)$", re.IGNORECASE
+)
+
+# "..oes" -> "..oe" (the rest are "..oes" -> "o")
+
+pl_v_oes_oe = ("canoes", "floes", "oboes", "roes", "throes", "woes")
+pl_v_oes_oe_endings_size4 = ("hoes", "toes")
+pl_v_oes_oe_endings_size5 = ("shoes",)
+
+
+pl_count_zero = ("0", "no", "zero", "nil")
+
+
+pl_count_one = ("1", "a", "an", "one", "each", "every", "this", "that")
+
+pl_adj_special = {"a": "some", "an": "some", "this": "these", "that": "those"}
+
+pl_adj_special_keys = re.compile(
+    rf"^({enclose('|'.join(pl_adj_special))})$", re.IGNORECASE
+)
+
+pl_adj_poss = {
+    "my": "our",
+    "your": "your",
+    "its": "their",
+    "her": "their",
+    "his": "their",
+    "their": "their",
+}
+
+pl_adj_poss_keys = re.compile(rf"^({enclose('|'.join(pl_adj_poss))})$", re.IGNORECASE)
+
+
+# 2. INDEFINITE ARTICLES
+
+# THIS PATTERN MATCHES STRINGS OF CAPITALS STARTING WITH A "VOWEL-SOUND"
+# CONSONANT FOLLOWED BY ANOTHER CONSONANT, AND WHICH ARE NOT LIKELY
+# TO BE REAL WORDS (OH, ALL RIGHT THEN, IT'S JUST MAGIC!)
+
+A_abbrev = re.compile(
+    r"""
+^(?! FJO | [HLMNS]Y.  | RY[EO] | SQU
+  | ( F[LR]? | [HL] | MN? | N | RH? | S[CHKLMNPTVW]? | X(YL)?) [AEIOU])
+[FHLMNRSX][A-Z]
+""",
+    re.VERBOSE,
+)
+
+# THIS PATTERN CODES THE BEGINNINGS OF ALL ENGLISH WORDS BEGINING WITH A
+# 'y' FOLLOWED BY A CONSONANT. ANY OTHER Y-CONSONANT PREFIX THEREFORE
+# IMPLIES AN ABBREVIATION.
+
+A_y_cons = re.compile(r"^(y(b[lor]|cl[ea]|fere|gg|p[ios]|rou|tt))", re.IGNORECASE)
+
+# EXCEPTIONS TO EXCEPTIONS
+
+A_explicit_a = re.compile(r"^((?:unabomber|unanimous|US))", re.IGNORECASE)
+
+A_explicit_an = re.compile(
+    r"^((?:euler|hour(?!i)|heir|honest|hono[ur]|mpeg))", re.IGNORECASE
+)
+
+A_ordinal_an = re.compile(r"^([aefhilmnorsx]-?th)", re.IGNORECASE)
+
+A_ordinal_a = re.compile(r"^([bcdgjkpqtuvwyz]-?th)", re.IGNORECASE)
+
+
+# NUMERICAL INFLECTIONS
+
+nth = {
+    0: "th",
+    1: "st",
+    2: "nd",
+    3: "rd",
+    4: "th",
+    5: "th",
+    6: "th",
+    7: "th",
+    8: "th",
+    9: "th",
+    11: "th",
+    12: "th",
+    13: "th",
+}
+nth_suff = set(nth.values())
+
+ordinal = dict(
+    ty="tieth",
+    one="first",
+    two="second",
+    three="third",
+    five="fifth",
+    eight="eighth",
+    nine="ninth",
+    twelve="twelfth",
+)
+
+ordinal_suff = re.compile(rf"({'|'.join(ordinal)})\Z")
+
+
+# NUMBERS
+
+unit = ["", "one", "two", "three", "four", "five", "six", "seven", "eight", "nine"]
+teen = [
+    "ten",
+    "eleven",
+    "twelve",
+    "thirteen",
+    "fourteen",
+    "fifteen",
+    "sixteen",
+    "seventeen",
+    "eighteen",
+    "nineteen",
+]
+ten = [
+    "",
+    "",
+    "twenty",
+    "thirty",
+    "forty",
+    "fifty",
+    "sixty",
+    "seventy",
+    "eighty",
+    "ninety",
+]
+mill = [
+    " ",
+    " thousand",
+    " million",
+    " billion",
+    " trillion",
+    " quadrillion",
+    " quintillion",
+    " sextillion",
+    " septillion",
+    " octillion",
+    " nonillion",
+    " decillion",
+]
+
+
+# SUPPORT CLASSICAL PLURALIZATIONS
+
+def_classical = dict(
+    all=False, zero=False, herd=False, names=True, persons=False, ancient=False
+)
+
+all_classical = {k: True for k in def_classical}
+no_classical = {k: False for k in def_classical}
+
+
+# Maps strings to built-in constant types
+string_to_constant = {"True": True, "False": False, "None": None}
+
+
+# Pre-compiled regular expression objects
+DOLLAR_DIGITS = re.compile(r"\$(\d+)")
+FUNCTION_CALL = re.compile(r"((\w+)\([^)]*\)*)", re.IGNORECASE)
+PARTITION_WORD = re.compile(r"\A(\s*)(.+?)(\s*)\Z")
+PL_SB_POSTFIX_ADJ_STEMS_RE = re.compile(
+    rf"^(?:{pl_sb_postfix_adj_stems})$", re.IGNORECASE
+)
+PL_SB_PREP_DUAL_COMPOUND_RE = re.compile(
+    rf"^(?:{pl_sb_prep_dual_compound})$", re.IGNORECASE
+)
+DENOMINATOR = re.compile(r"(?P.+)( (per|a) .+)")
+PLVERB_SPECIAL_S_RE = re.compile(rf"^({plverb_special_s})$")
+WHITESPACE = re.compile(r"\s")
+ENDS_WITH_S = re.compile(r"^(.*[^s])s$", re.IGNORECASE)
+ENDS_WITH_APOSTROPHE_S = re.compile(r"^(.*)'s?$")
+INDEFINITE_ARTICLE_TEST = re.compile(r"\A(\s*)(?:an?\s+)?(.+?)(\s*)\Z", re.IGNORECASE)
+SPECIAL_AN = re.compile(r"^[aefhilmnorsx]$", re.IGNORECASE)
+SPECIAL_A = re.compile(r"^[bcdgjkpqtuvwyz]$", re.IGNORECASE)
+SPECIAL_ABBREV_AN = re.compile(r"^[aefhilmnorsx][.-]", re.IGNORECASE)
+SPECIAL_ABBREV_A = re.compile(r"^[a-z][.-]", re.IGNORECASE)
+CONSONANTS = re.compile(r"^[^aeiouy]", re.IGNORECASE)
+ARTICLE_SPECIAL_EU = re.compile(r"^e[uw]", re.IGNORECASE)
+ARTICLE_SPECIAL_ONCE = re.compile(r"^onc?e\b", re.IGNORECASE)
+ARTICLE_SPECIAL_ONETIME = re.compile(r"^onetime\b", re.IGNORECASE)
+ARTICLE_SPECIAL_UNIT = re.compile(r"^uni([^nmd]|mo)", re.IGNORECASE)
+ARTICLE_SPECIAL_UBA = re.compile(r"^u[bcfghjkqrst][aeiou]", re.IGNORECASE)
+ARTICLE_SPECIAL_UKR = re.compile(r"^ukr", re.IGNORECASE)
+SPECIAL_CAPITALS = re.compile(r"^U[NK][AIEO]?")
+VOWELS = re.compile(r"^[aeiou]", re.IGNORECASE)
+
+DIGIT_GROUP = re.compile(r"(\d)")
+TWO_DIGITS = re.compile(r"(\d)(\d)")
+THREE_DIGITS = re.compile(r"(\d)(\d)(\d)")
+THREE_DIGITS_WORD = re.compile(r"(\d)(\d)(\d)(?=\D*\Z)")
+TWO_DIGITS_WORD = re.compile(r"(\d)(\d)(?=\D*\Z)")
+ONE_DIGIT_WORD = re.compile(r"(\d)(?=\D*\Z)")
+
+FOUR_DIGIT_COMMA = re.compile(r"(\d)(\d{3}(?:,|\Z))")
+NON_DIGIT = re.compile(r"\D")
+WHITESPACES_COMMA = re.compile(r"\s+,")
+COMMA_WORD = re.compile(r", (\S+)\s+\Z")
+WHITESPACES = re.compile(r"\s+")
+
+
+PRESENT_PARTICIPLE_REPLACEMENTS = (
+    (re.compile(r"ie$"), r"y"),
+    (
+        re.compile(r"ue$"),
+        r"u",
+    ),  # TODO: isn't ue$ -> u encompassed in the following rule?
+    (re.compile(r"([auy])e$"), r"\g<1>"),
+    (re.compile(r"ski$"), r"ski"),
+    (re.compile(r"[^b]i$"), r""),
+    (re.compile(r"^(are|were)$"), r"be"),
+    (re.compile(r"^(had)$"), r"hav"),
+    (re.compile(r"^(hoe)$"), r"\g<1>"),
+    (re.compile(r"([^e])e$"), r"\g<1>"),
+    (re.compile(r"er$"), r"er"),
+    (re.compile(r"([^aeiou][aeiouy]([bdgmnprst]))$"), r"\g<1>\g<2>"),
+)
+
+DIGIT = re.compile(r"\d")
+
+
+class Words(str):
+    lowered: str
+    split_: List[str]
+    first: str
+    last: str
+
+    def __init__(self, orig) -> None:
+        self.lowered = self.lower()
+        self.split_ = self.split()
+        self.first = self.split_[0]
+        self.last = self.split_[-1]
+
+
+Falsish = Any  # ideally, falsish would only validate on bool(value) is False
+
+
+_STATIC_TYPE_CHECKING = TYPE_CHECKING
+# ^-- Workaround for typeguard AST manipulation:
+#     https://github.com/agronholm/typeguard/issues/353#issuecomment-1556306554
+
+if _STATIC_TYPE_CHECKING:  # pragma: no cover
+    Word = Annotated[str, "String with at least 1 character"]
+else:
+
+    class _WordMeta(type):  # Too dynamic to be supported by mypy...
+        def __instancecheck__(self, instance: Any) -> bool:
+            return isinstance(instance, str) and len(instance) >= 1
+
+    class Word(metaclass=_WordMeta):  # type: ignore[no-redef]
+        """String with at least 1 character"""
+
+
+class engine:
+    def __init__(self) -> None:
+        self.classical_dict = def_classical.copy()
+        self.persistent_count: Optional[int] = None
+        self.mill_count = 0
+        self.pl_sb_user_defined: List[Optional[Word]] = []
+        self.pl_v_user_defined: List[Optional[Word]] = []
+        self.pl_adj_user_defined: List[Optional[Word]] = []
+        self.si_sb_user_defined: List[Optional[Word]] = []
+        self.A_a_user_defined: List[Optional[Word]] = []
+        self.thegender = "neuter"
+        self.__number_args: Optional[Dict[str, str]] = None
+
+    @property
+    def _number_args(self):
+        return cast(Dict[str, str], self.__number_args)
+
+    @_number_args.setter
+    def _number_args(self, val):
+        self.__number_args = val
+
+    @typechecked
+    def defnoun(self, singular: Optional[Word], plural: Optional[Word]) -> int:
+        """
+        Set the noun plural of singular to plural.
+
+        """
+        self.checkpat(singular)
+        self.checkpatplural(plural)
+        self.pl_sb_user_defined.extend((singular, plural))
+        self.si_sb_user_defined.extend((plural, singular))
+        return 1
+
+    @typechecked
+    def defverb(
+        self,
+        s1: Optional[Word],
+        p1: Optional[Word],
+        s2: Optional[Word],
+        p2: Optional[Word],
+        s3: Optional[Word],
+        p3: Optional[Word],
+    ) -> int:
+        """
+        Set the verb plurals for s1, s2 and s3 to p1, p2 and p3 respectively.
+
+        Where 1, 2 and 3 represent the 1st, 2nd and 3rd person forms of the verb.
+
+        """
+        self.checkpat(s1)
+        self.checkpat(s2)
+        self.checkpat(s3)
+        self.checkpatplural(p1)
+        self.checkpatplural(p2)
+        self.checkpatplural(p3)
+        self.pl_v_user_defined.extend((s1, p1, s2, p2, s3, p3))
+        return 1
+
+    @typechecked
+    def defadj(self, singular: Optional[Word], plural: Optional[Word]) -> int:
+        """
+        Set the adjective plural of singular to plural.
+
+        """
+        self.checkpat(singular)
+        self.checkpatplural(plural)
+        self.pl_adj_user_defined.extend((singular, plural))
+        return 1
+
+    @typechecked
+    def defa(self, pattern: Optional[Word]) -> int:
+        """
+        Define the indefinite article as 'a' for words matching pattern.
+
+        """
+        self.checkpat(pattern)
+        self.A_a_user_defined.extend((pattern, "a"))
+        return 1
+
+    @typechecked
+    def defan(self, pattern: Optional[Word]) -> int:
+        """
+        Define the indefinite article as 'an' for words matching pattern.
+
+        """
+        self.checkpat(pattern)
+        self.A_a_user_defined.extend((pattern, "an"))
+        return 1
+
+    def checkpat(self, pattern: Optional[Word]) -> None:
+        """
+        check for errors in a regex pattern
+        """
+        if pattern is None:
+            return
+        try:
+            re.match(pattern, "")
+        except re.error as err:
+            raise BadUserDefinedPatternError(pattern) from err
+
+    def checkpatplural(self, pattern: Optional[Word]) -> None:
+        """
+        check for errors in a regex replace pattern
+        """
+        return
+
+    @typechecked
+    def ud_match(self, word: Word, wordlist: Sequence[Optional[Word]]) -> Optional[str]:
+        for i in range(len(wordlist) - 2, -2, -2):  # backwards through even elements
+            mo = re.search(rf"^{wordlist[i]}$", word, re.IGNORECASE)
+            if mo:
+                if wordlist[i + 1] is None:
+                    return None
+                pl = DOLLAR_DIGITS.sub(
+                    r"\\1", cast(Word, wordlist[i + 1])
+                )  # change $n to \n for expand
+                return mo.expand(pl)
+        return None
+
+    def classical(self, **kwargs) -> None:
+        """
+        turn classical mode on and off for various categories
+
+        turn on all classical modes:
+        classical()
+        classical(all=True)
+
+        turn on or off specific claassical modes:
+        e.g.
+        classical(herd=True)
+        classical(names=False)
+
+        By default all classical modes are off except names.
+
+        unknown value in args or key in kwargs raises
+        exception: UnknownClasicalModeError
+
+        """
+        if not kwargs:
+            self.classical_dict = all_classical.copy()
+            return
+        if "all" in kwargs:
+            if kwargs["all"]:
+                self.classical_dict = all_classical.copy()
+            else:
+                self.classical_dict = no_classical.copy()
+
+        for k, v in kwargs.items():
+            if k in def_classical:
+                self.classical_dict[k] = v
+            else:
+                raise UnknownClassicalModeError
+
+    def num(
+        self, count: Optional[int] = None, show: Optional[int] = None
+    ) -> str:  # (;$count,$show)
+        """
+        Set the number to be used in other method calls.
+
+        Returns count.
+
+        Set show to False to return '' instead.
+
+        """
+        if count is not None:
+            try:
+                self.persistent_count = int(count)
+            except ValueError as err:
+                raise BadNumValueError from err
+            if (show is None) or show:
+                return str(count)
+        else:
+            self.persistent_count = None
+        return ""
+
+    def gender(self, gender: str) -> None:
+        """
+        set the gender for the singular of plural pronouns
+
+        can be one of:
+        'neuter'                ('they' -> 'it')
+        'feminine'              ('they' -> 'she')
+        'masculine'             ('they' -> 'he')
+        'gender-neutral'        ('they' -> 'they')
+        'feminine or masculine' ('they' -> 'she or he')
+        'masculine or feminine' ('they' -> 'he or she')
+        """
+        if gender in singular_pronoun_genders:
+            self.thegender = gender
+        else:
+            raise BadGenderError
+
+    def _get_value_from_ast(self, obj):
+        """
+        Return the value of the ast object.
+        """
+        if isinstance(obj, ast.Num):
+            return obj.n
+        elif isinstance(obj, ast.Str):
+            return obj.s
+        elif isinstance(obj, ast.List):
+            return [self._get_value_from_ast(e) for e in obj.elts]
+        elif isinstance(obj, ast.Tuple):
+            return tuple([self._get_value_from_ast(e) for e in obj.elts])
+
+        # None, True and False are NameConstants in Py3.4 and above.
+        elif isinstance(obj, ast.NameConstant):
+            return obj.value
+
+        # Probably passed a variable name.
+        # Or passed a single word without wrapping it in quotes as an argument
+        # ex: p.inflect("I plural(see)") instead of p.inflect("I plural('see')")
+        raise NameError(f"name '{obj.id}' is not defined")
+
+    def _string_to_substitute(
+        self, mo: Match, methods_dict: Dict[str, Callable]
+    ) -> str:
+        """
+        Return the string to be substituted for the match.
+        """
+        matched_text, f_name = mo.groups()
+        # matched_text is the complete match string. e.g. plural_noun(cat)
+        # f_name is the function name. e.g. plural_noun
+
+        # Return matched_text if function name is not in methods_dict
+        if f_name not in methods_dict:
+            return matched_text
+
+        # Parse the matched text
+        a_tree = ast.parse(matched_text)
+
+        # get the args and kwargs from ast objects
+        args_list = [
+            self._get_value_from_ast(a)
+            for a in a_tree.body[0].value.args  # type: ignore[attr-defined]
+        ]
+        kwargs_list = {
+            kw.arg: self._get_value_from_ast(kw.value)
+            for kw in a_tree.body[0].value.keywords  # type: ignore[attr-defined]
+        }
+
+        # Call the corresponding function
+        return methods_dict[f_name](*args_list, **kwargs_list)
+
+    # 0. PERFORM GENERAL INFLECTIONS IN A STRING
+
+    @typechecked
+    def inflect(self, text: Word) -> str:
+        """
+        Perform inflections in a string.
+
+        e.g. inflect('The plural of cat is plural(cat)') returns
+        'The plural of cat is cats'
+
+        can use plural, plural_noun, plural_verb, plural_adj,
+        singular_noun, a, an, no, ordinal, number_to_words,
+        and prespart
+
+        """
+        save_persistent_count = self.persistent_count
+
+        # Dictionary of allowed methods
+        methods_dict: Dict[str, Callable] = {
+            "plural": self.plural,
+            "plural_adj": self.plural_adj,
+            "plural_noun": self.plural_noun,
+            "plural_verb": self.plural_verb,
+            "singular_noun": self.singular_noun,
+            "a": self.a,
+            "an": self.a,
+            "no": self.no,
+            "ordinal": self.ordinal,
+            "number_to_words": self.number_to_words,
+            "present_participle": self.present_participle,
+            "num": self.num,
+        }
+
+        # Regular expression to find Python's function call syntax
+        output = FUNCTION_CALL.sub(
+            lambda mo: self._string_to_substitute(mo, methods_dict), text
+        )
+        self.persistent_count = save_persistent_count
+        return output
+
+    # ## PLURAL SUBROUTINES
+
+    def postprocess(self, orig: str, inflected) -> str:
+        inflected = str(inflected)
+        if "|" in inflected:
+            word_options = inflected.split("|")
+            # When two parts of a noun need to be pluralized
+            if len(word_options[0].split(" ")) == len(word_options[1].split(" ")):
+                result = inflected.split("|")[self.classical_dict["all"]].split(" ")
+            # When only the last part of the noun needs to be pluralized
+            else:
+                result = inflected.split(" ")
+                for index, word in enumerate(result):
+                    if "|" in word:
+                        result[index] = word.split("|")[self.classical_dict["all"]]
+        else:
+            result = inflected.split(" ")
+
+        # Try to fix word wise capitalization
+        for index, word in enumerate(orig.split(" ")):
+            if word == "I":
+                # Is this the only word for exceptions like this
+                # Where the original is fully capitalized
+                # without 'meaning' capitalization?
+                # Also this fails to handle a capitalizaion in context
+                continue
+            if word.capitalize() == word:
+                result[index] = result[index].capitalize()
+            if word == word.upper():
+                result[index] = result[index].upper()
+        return " ".join(result)
+
+    def partition_word(self, text: str) -> Tuple[str, str, str]:
+        mo = PARTITION_WORD.search(text)
+        if mo:
+            return mo.group(1), mo.group(2), mo.group(3)
+        else:
+            return "", "", ""
+
+    @typechecked
+    def plural(self, text: Word, count: Optional[Union[str, int, Any]] = None) -> str:
+        """
+        Return the plural of text.
+
+        If count supplied, then return text if count is one of:
+            1, a, an, one, each, every, this, that
+
+        otherwise return the plural.
+
+        Whitespace at the start and end is preserved.
+
+        """
+        pre, word, post = self.partition_word(text)
+        if not word:
+            return text
+        plural = self.postprocess(
+            word,
+            self._pl_special_adjective(word, count)
+            or self._pl_special_verb(word, count)
+            or self._plnoun(word, count),
+        )
+        return f"{pre}{plural}{post}"
+
+    @typechecked
+    def plural_noun(
+        self, text: Word, count: Optional[Union[str, int, Any]] = None
+    ) -> str:
+        """
+        Return the plural of text, where text is a noun.
+
+        If count supplied, then return text if count is one of:
+            1, a, an, one, each, every, this, that
+
+        otherwise return the plural.
+
+        Whitespace at the start and end is preserved.
+
+        """
+        pre, word, post = self.partition_word(text)
+        if not word:
+            return text
+        plural = self.postprocess(word, self._plnoun(word, count))
+        return f"{pre}{plural}{post}"
+
+    @typechecked
+    def plural_verb(
+        self, text: Word, count: Optional[Union[str, int, Any]] = None
+    ) -> str:
+        """
+        Return the plural of text, where text is a verb.
+
+        If count supplied, then return text if count is one of:
+            1, a, an, one, each, every, this, that
+
+        otherwise return the plural.
+
+        Whitespace at the start and end is preserved.
+
+        """
+        pre, word, post = self.partition_word(text)
+        if not word:
+            return text
+        plural = self.postprocess(
+            word,
+            self._pl_special_verb(word, count) or self._pl_general_verb(word, count),
+        )
+        return f"{pre}{plural}{post}"
+
+    @typechecked
+    def plural_adj(
+        self, text: Word, count: Optional[Union[str, int, Any]] = None
+    ) -> str:
+        """
+        Return the plural of text, where text is an adjective.
+
+        If count supplied, then return text if count is one of:
+            1, a, an, one, each, every, this, that
+
+        otherwise return the plural.
+
+        Whitespace at the start and end is preserved.
+
+        """
+        pre, word, post = self.partition_word(text)
+        if not word:
+            return text
+        plural = self.postprocess(word, self._pl_special_adjective(word, count) or word)
+        return f"{pre}{plural}{post}"
+
+    @typechecked
+    def compare(self, word1: Word, word2: Word) -> Union[str, bool]:
+        """
+        compare word1 and word2 for equality regardless of plurality
+
+        return values:
+        eq - the strings are equal
+        p:s - word1 is the plural of word2
+        s:p - word2 is the plural of word1
+        p:p - word1 and word2 are two different plural forms of the one word
+        False - otherwise
+
+        >>> compare = engine().compare
+        >>> compare("egg", "eggs")
+        's:p'
+        >>> compare('egg', 'egg')
+        'eq'
+
+        Words should not be empty.
+
+        >>> compare('egg', '')
+        Traceback (most recent call last):
+        ...
+        typeguard.TypeCheckError:...is not an instance of inflect.Word
+        """
+        norms = self.plural_noun, self.plural_verb, self.plural_adj
+        results = (self._plequal(word1, word2, norm) for norm in norms)
+        return next(filter(None, results), False)
+
+    @typechecked
+    def compare_nouns(self, word1: Word, word2: Word) -> Union[str, bool]:
+        """
+        compare word1 and word2 for equality regardless of plurality
+        word1 and word2 are to be treated as nouns
+
+        return values:
+        eq - the strings are equal
+        p:s - word1 is the plural of word2
+        s:p - word2 is the plural of word1
+        p:p - word1 and word2 are two different plural forms of the one word
+        False - otherwise
+
+        """
+        return self._plequal(word1, word2, self.plural_noun)
+
+    @typechecked
+    def compare_verbs(self, word1: Word, word2: Word) -> Union[str, bool]:
+        """
+        compare word1 and word2 for equality regardless of plurality
+        word1 and word2 are to be treated as verbs
+
+        return values:
+        eq - the strings are equal
+        p:s - word1 is the plural of word2
+        s:p - word2 is the plural of word1
+        p:p - word1 and word2 are two different plural forms of the one word
+        False - otherwise
+
+        """
+        return self._plequal(word1, word2, self.plural_verb)
+
+    @typechecked
+    def compare_adjs(self, word1: Word, word2: Word) -> Union[str, bool]:
+        """
+        compare word1 and word2 for equality regardless of plurality
+        word1 and word2 are to be treated as adjectives
+
+        return values:
+        eq - the strings are equal
+        p:s - word1 is the plural of word2
+        s:p - word2 is the plural of word1
+        p:p - word1 and word2 are two different plural forms of the one word
+        False - otherwise
+
+        """
+        return self._plequal(word1, word2, self.plural_adj)
+
+    @typechecked
+    def singular_noun(
+        self,
+        text: Word,
+        count: Optional[Union[int, str, Any]] = None,
+        gender: Optional[str] = None,
+    ) -> Union[str, Literal[False]]:
+        """
+        Return the singular of text, where text is a plural noun.
+
+        If count supplied, then return the singular if count is one of:
+            1, a, an, one, each, every, this, that or if count is None
+
+        otherwise return text unchanged.
+
+        Whitespace at the start and end is preserved.
+
+        >>> p = engine()
+        >>> p.singular_noun('horses')
+        'horse'
+        >>> p.singular_noun('knights')
+        'knight'
+
+        Returns False when a singular noun is passed.
+
+        >>> p.singular_noun('horse')
+        False
+        >>> p.singular_noun('knight')
+        False
+        >>> p.singular_noun('soldier')
+        False
+
+        """
+        pre, word, post = self.partition_word(text)
+        if not word:
+            return text
+        sing = self._sinoun(word, count=count, gender=gender)
+        if sing is not False:
+            plural = self.postprocess(word, sing)
+            return f"{pre}{plural}{post}"
+        return False
+
+    def _plequal(self, word1: str, word2: str, pl) -> Union[str, bool]:  # noqa: C901
+        classval = self.classical_dict.copy()
+        self.classical_dict = all_classical.copy()
+        if word1 == word2:
+            return "eq"
+        if word1 == pl(word2):
+            return "p:s"
+        if pl(word1) == word2:
+            return "s:p"
+        self.classical_dict = no_classical.copy()
+        if word1 == pl(word2):
+            return "p:s"
+        if pl(word1) == word2:
+            return "s:p"
+        self.classical_dict = classval.copy()
+
+        if pl == self.plural or pl == self.plural_noun:
+            if self._pl_check_plurals_N(word1, word2):
+                return "p:p"
+            if self._pl_check_plurals_N(word2, word1):
+                return "p:p"
+        if pl == self.plural or pl == self.plural_adj:
+            if self._pl_check_plurals_adj(word1, word2):
+                return "p:p"
+        return False
+
+    def _pl_reg_plurals(self, pair: str, stems: str, end1: str, end2: str) -> bool:
+        pattern = rf"({stems})({end1}\|\1{end2}|{end2}\|\1{end1})"
+        return bool(re.search(pattern, pair))
+
+    def _pl_check_plurals_N(self, word1: str, word2: str) -> bool:
+        stem_endings = (
+            (pl_sb_C_a_ata, "as", "ata"),
+            (pl_sb_C_is_ides, "is", "ides"),
+            (pl_sb_C_a_ae, "s", "e"),
+            (pl_sb_C_en_ina, "ens", "ina"),
+            (pl_sb_C_um_a, "ums", "a"),
+            (pl_sb_C_us_i, "uses", "i"),
+            (pl_sb_C_on_a, "ons", "a"),
+            (pl_sb_C_o_i_stems, "os", "i"),
+            (pl_sb_C_ex_ices, "exes", "ices"),
+            (pl_sb_C_ix_ices, "ixes", "ices"),
+            (pl_sb_C_i, "s", "i"),
+            (pl_sb_C_im, "s", "im"),
+            (".*eau", "s", "x"),
+            (".*ieu", "s", "x"),
+            (".*tri", "xes", "ces"),
+            (".{2,}[yia]n", "xes", "ges"),
+        )
+
+        words = map(Words, (word1, word2))
+        pair = "|".join(word.last for word in words)
+
+        return (
+            pair in pl_sb_irregular_s.values()
+            or pair in pl_sb_irregular.values()
+            or pair in pl_sb_irregular_caps.values()
+            or any(
+                self._pl_reg_plurals(pair, stems, end1, end2)
+                for stems, end1, end2 in stem_endings
+            )
+        )
+
+    def _pl_check_plurals_adj(self, word1: str, word2: str) -> bool:
+        word1a = word1[: word1.rfind("'")] if word1.endswith(("'s", "'")) else ""
+        word2a = word2[: word2.rfind("'")] if word2.endswith(("'s", "'")) else ""
+
+        return (
+            bool(word1a)
+            and bool(word2a)
+            and (
+                self._pl_check_plurals_N(word1a, word2a)
+                or self._pl_check_plurals_N(word2a, word1a)
+            )
+        )
+
+    def get_count(self, count: Optional[Union[str, int]] = None) -> Union[str, int]:
+        if count is None and self.persistent_count is not None:
+            count = self.persistent_count
+
+        if count is not None:
+            count = (
+                1
+                if (
+                    (str(count) in pl_count_one)
+                    or (
+                        self.classical_dict["zero"]
+                        and str(count).lower() in pl_count_zero
+                    )
+                )
+                else 2
+            )
+        else:
+            count = ""
+        return count
+
+    # @profile
+    def _plnoun(  # noqa: C901
+        self, word: str, count: Optional[Union[str, int]] = None
+    ) -> str:
+        count = self.get_count(count)
+
+        # DEFAULT TO PLURAL
+
+        if count == 1:
+            return word
+
+        # HANDLE USER-DEFINED NOUNS
+
+        value = self.ud_match(word, self.pl_sb_user_defined)
+        if value is not None:
+            return value
+
+        # HANDLE EMPTY WORD, SINGULAR COUNT AND UNINFLECTED PLURALS
+
+        if word == "":
+            return word
+
+        word = Words(word)
+
+        if word.last.lower() in pl_sb_uninflected_complete:
+            if len(word.split_) >= 3:
+                return self._handle_long_compounds(word, count=2) or word
+            return word
+
+        if word in pl_sb_uninflected_caps:
+            return word
+
+        for k, v in pl_sb_uninflected_bysize.items():
+            if word.lowered[-k:] in v:
+                return word
+
+        if self.classical_dict["herd"] and word.last.lower() in pl_sb_uninflected_herd:
+            return word
+
+        # HANDLE COMPOUNDS ("Governor General", "mother-in-law", "aide-de-camp", ETC.)
+
+        mo = PL_SB_POSTFIX_ADJ_STEMS_RE.search(word)
+        if mo and mo.group(2) != "":
+            return f"{self._plnoun(mo.group(1), 2)}{mo.group(2)}"
+
+        if " a " in word.lowered or "-a-" in word.lowered:
+            mo = PL_SB_PREP_DUAL_COMPOUND_RE.search(word)
+            if mo and mo.group(2) != "" and mo.group(3) != "":
+                return (
+                    f"{self._plnoun(mo.group(1), 2)}"
+                    f"{mo.group(2)}"
+                    f"{self._plnoun(mo.group(3))}"
+                )
+
+        if len(word.split_) >= 3:
+            handled_words = self._handle_long_compounds(word, count=2)
+            if handled_words is not None:
+                return handled_words
+
+        # only pluralize denominators in units
+        mo = DENOMINATOR.search(word.lowered)
+        if mo:
+            index = len(mo.group("denominator"))
+            return f"{self._plnoun(word[:index])}{word[index:]}"
+
+        # handle units given in degrees (only accept if
+        # there is no more than one word following)
+        # degree Celsius => degrees Celsius but degree
+        # fahrenheit hour => degree fahrenheit hours
+        if len(word.split_) >= 2 and word.split_[-2] == "degree":
+            return " ".join([self._plnoun(word.first)] + word.split_[1:])
+
+        with contextlib.suppress(ValueError):
+            return self._handle_prepositional_phrase(
+                word.lowered,
+                functools.partial(self._plnoun, count=2),
+                '-',
+            )
+
+        # HANDLE PRONOUNS
+
+        for k, v in pl_pron_acc_keys_bysize.items():
+            if word.lowered[-k:] in v:  # ends with accusative pronoun
+                for pk, pv in pl_prep_bysize.items():
+                    if word.lowered[:pk] in pv:  # starts with a prep
+                        if word.lowered.split() == [
+                            word.lowered[:pk],
+                            word.lowered[-k:],
+                        ]:
+                            # only whitespace in between
+                            return word.lowered[:-k] + pl_pron_acc[word.lowered[-k:]]
+
+        try:
+            return pl_pron_nom[word.lowered]
+        except KeyError:
+            pass
+
+        try:
+            return pl_pron_acc[word.lowered]
+        except KeyError:
+            pass
+
+        # HANDLE ISOLATED IRREGULAR PLURALS
+
+        if word.last in pl_sb_irregular_caps:
+            llen = len(word.last)
+            return f"{word[:-llen]}{pl_sb_irregular_caps[word.last]}"
+
+        lowered_last = word.last.lower()
+        if lowered_last in pl_sb_irregular:
+            llen = len(lowered_last)
+            return f"{word[:-llen]}{pl_sb_irregular[lowered_last]}"
+
+        dash_split = word.lowered.split('-')
+        if (" ".join(dash_split[-2:])).lower() in pl_sb_irregular_compound:
+            llen = len(
+                " ".join(dash_split[-2:])
+            )  # TODO: what if 2 spaces between these words?
+            return (
+                f"{word[:-llen]}"
+                f"{pl_sb_irregular_compound[(' '.join(dash_split[-2:])).lower()]}"
+            )
+
+        if word.lowered[-3:] == "quy":
+            return f"{word[:-1]}ies"
+
+        if word.lowered[-6:] == "person":
+            if self.classical_dict["persons"]:
+                return f"{word}s"
+            else:
+                return f"{word[:-4]}ople"
+
+        # HANDLE FAMILIES OF IRREGULAR PLURALS
+
+        if word.lowered[-3:] == "man":
+            for k, v in pl_sb_U_man_mans_bysize.items():
+                if word.lowered[-k:] in v:
+                    return f"{word}s"
+            for k, v in pl_sb_U_man_mans_caps_bysize.items():
+                if word[-k:] in v:
+                    return f"{word}s"
+            return f"{word[:-3]}men"
+        if word.lowered[-5:] == "mouse":
+            return f"{word[:-5]}mice"
+        if word.lowered[-5:] == "louse":
+            v = pl_sb_U_louse_lice_bysize.get(len(word))
+            if v and word.lowered in v:
+                return f"{word[:-5]}lice"
+            return f"{word}s"
+        if word.lowered[-5:] == "goose":
+            return f"{word[:-5]}geese"
+        if word.lowered[-5:] == "tooth":
+            return f"{word[:-5]}teeth"
+        if word.lowered[-4:] == "foot":
+            return f"{word[:-4]}feet"
+        if word.lowered[-4:] == "taco":
+            return f"{word[:-5]}tacos"
+
+        if word.lowered == "die":
+            return "dice"
+
+        # HANDLE UNASSIMILATED IMPORTS
+
+        if word.lowered[-4:] == "ceps":
+            return word
+        if word.lowered[-4:] == "zoon":
+            return f"{word[:-2]}a"
+        if word.lowered[-3:] in ("cis", "sis", "xis"):
+            return f"{word[:-2]}es"
+
+        for lastlet, d, numend, post in (
+            ("h", pl_sb_U_ch_chs_bysize, None, "s"),
+            ("x", pl_sb_U_ex_ices_bysize, -2, "ices"),
+            ("x", pl_sb_U_ix_ices_bysize, -2, "ices"),
+            ("m", pl_sb_U_um_a_bysize, -2, "a"),
+            ("s", pl_sb_U_us_i_bysize, -2, "i"),
+            ("n", pl_sb_U_on_a_bysize, -2, "a"),
+            ("a", pl_sb_U_a_ae_bysize, None, "e"),
+        ):
+            if word.lowered[-1] == lastlet:  # this test to add speed
+                for k, v in d.items():
+                    if word.lowered[-k:] in v:
+                        return word[:numend] + post
+
+        # HANDLE INCOMPLETELY ASSIMILATED IMPORTS
+
+        if self.classical_dict["ancient"]:
+            if word.lowered[-4:] == "trix":
+                return f"{word[:-1]}ces"
+            if word.lowered[-3:] in ("eau", "ieu"):
+                return f"{word}x"
+            if word.lowered[-3:] in ("ynx", "inx", "anx") and len(word) > 4:
+                return f"{word[:-1]}ges"
+
+            for lastlet, d, numend, post in (
+                ("n", pl_sb_C_en_ina_bysize, -2, "ina"),
+                ("x", pl_sb_C_ex_ices_bysize, -2, "ices"),
+                ("x", pl_sb_C_ix_ices_bysize, -2, "ices"),
+                ("m", pl_sb_C_um_a_bysize, -2, "a"),
+                ("s", pl_sb_C_us_i_bysize, -2, "i"),
+                ("s", pl_sb_C_us_us_bysize, None, ""),
+                ("a", pl_sb_C_a_ae_bysize, None, "e"),
+                ("a", pl_sb_C_a_ata_bysize, None, "ta"),
+                ("s", pl_sb_C_is_ides_bysize, -1, "des"),
+                ("o", pl_sb_C_o_i_bysize, -1, "i"),
+                ("n", pl_sb_C_on_a_bysize, -2, "a"),
+            ):
+                if word.lowered[-1] == lastlet:  # this test to add speed
+                    for k, v in d.items():
+                        if word.lowered[-k:] in v:
+                            return word[:numend] + post
+
+            for d, numend, post in (
+                (pl_sb_C_i_bysize, None, "i"),
+                (pl_sb_C_im_bysize, None, "im"),
+            ):
+                for k, v in d.items():
+                    if word.lowered[-k:] in v:
+                        return word[:numend] + post
+
+        # HANDLE SINGULAR NOUNS ENDING IN ...s OR OTHER SILIBANTS
+
+        if lowered_last in pl_sb_singular_s_complete:
+            return f"{word}es"
+
+        for k, v in pl_sb_singular_s_bysize.items():
+            if word.lowered[-k:] in v:
+                return f"{word}es"
+
+        if word.lowered[-2:] == "es" and word[0] == word[0].upper():
+            return f"{word}es"
+
+        if word.lowered[-1] == "z":
+            for k, v in pl_sb_z_zes_bysize.items():
+                if word.lowered[-k:] in v:
+                    return f"{word}es"
+
+            if word.lowered[-2:-1] != "z":
+                return f"{word}zes"
+
+        if word.lowered[-2:] == "ze":
+            for k, v in pl_sb_ze_zes_bysize.items():
+                if word.lowered[-k:] in v:
+                    return f"{word}s"
+
+        if word.lowered[-2:] in ("ch", "sh", "zz", "ss") or word.lowered[-1] == "x":
+            return f"{word}es"
+
+        # HANDLE ...f -> ...ves
+
+        if word.lowered[-3:] in ("elf", "alf", "olf"):
+            return f"{word[:-1]}ves"
+        if word.lowered[-3:] == "eaf" and word.lowered[-4:-3] != "d":
+            return f"{word[:-1]}ves"
+        if word.lowered[-4:] in ("nife", "life", "wife"):
+            return f"{word[:-2]}ves"
+        if word.lowered[-3:] == "arf":
+            return f"{word[:-1]}ves"
+
+        # HANDLE ...y
+
+        if word.lowered[-1] == "y":
+            if word.lowered[-2:-1] in "aeiou" or len(word) == 1:
+                return f"{word}s"
+
+            if self.classical_dict["names"]:
+                if word.lowered[-1] == "y" and word[0] == word[0].upper():
+                    return f"{word}s"
+
+            return f"{word[:-1]}ies"
+
+        # HANDLE ...o
+
+        if lowered_last in pl_sb_U_o_os_complete:
+            return f"{word}s"
+
+        for k, v in pl_sb_U_o_os_bysize.items():
+            if word.lowered[-k:] in v:
+                return f"{word}s"
+
+        if word.lowered[-2:] in ("ao", "eo", "io", "oo", "uo"):
+            return f"{word}s"
+
+        if word.lowered[-1] == "o":
+            return f"{word}es"
+
+        # OTHERWISE JUST ADD ...s
+
+        return f"{word}s"
+
+    @classmethod
+    def _handle_prepositional_phrase(cls, phrase, transform, sep):
+        """
+        Given a word or phrase possibly separated by sep, parse out
+        the prepositional phrase and apply the transform to the word
+        preceding the prepositional phrase.
+
+        Raise ValueError if the pivot is not found or if at least two
+        separators are not found.
+
+        >>> engine._handle_prepositional_phrase("man-of-war", str.upper, '-')
+        'MAN-of-war'
+        >>> engine._handle_prepositional_phrase("man of war", str.upper, ' ')
+        'MAN of war'
+        """
+        parts = phrase.split(sep)
+        if len(parts) < 3:
+            raise ValueError("Cannot handle words with fewer than two separators")
+
+        pivot = cls._find_pivot(parts, pl_prep_list_da)
+
+        transformed = transform(parts[pivot - 1]) or parts[pivot - 1]
+        return " ".join(
+            parts[: pivot - 1] + [sep.join([transformed, parts[pivot], ''])]
+        ) + " ".join(parts[(pivot + 1) :])
+
+    def _handle_long_compounds(self, word: Words, count: int) -> Union[str, None]:
+        """
+        Handles the plural and singular for compound `Words` that
+        have three or more words, based on the given count.
+
+        >>> engine()._handle_long_compounds(Words("pair of scissors"), 2)
+        'pairs of scissors'
+        >>> engine()._handle_long_compounds(Words("men beyond hills"), 1)
+        'man beyond hills'
+        """
+        inflection = self._sinoun if count == 1 else self._plnoun
+        solutions = (  # type: ignore
+            " ".join(
+                itertools.chain(
+                    leader,
+                    [inflection(cand, count), prep],  # type: ignore
+                    trailer,
+                )
+            )
+            for leader, (cand, prep), trailer in windowed_complete(word.split_, 2)
+            if prep in pl_prep_list_da  # type: ignore
+        )
+        return next(solutions, None)
+
+    @staticmethod
+    def _find_pivot(words, candidates):
+        pivots = (
+            index for index in range(1, len(words) - 1) if words[index] in candidates
+        )
+        try:
+            return next(pivots)
+        except StopIteration:
+            raise ValueError("No pivot found") from None
+
+    def _pl_special_verb(  # noqa: C901
+        self, word: str, count: Optional[Union[str, int]] = None
+    ) -> Union[str, bool]:
+        if self.classical_dict["zero"] and str(count).lower() in pl_count_zero:
+            return False
+        count = self.get_count(count)
+
+        if count == 1:
+            return word
+
+        # HANDLE USER-DEFINED VERBS
+
+        value = self.ud_match(word, self.pl_v_user_defined)
+        if value is not None:
+            return value
+
+        # HANDLE IRREGULAR PRESENT TENSE (SIMPLE AND COMPOUND)
+
+        try:
+            words = Words(word)
+        except IndexError:
+            return False  # word is ''
+
+        if words.first in plverb_irregular_pres:
+            return f"{plverb_irregular_pres[words.first]}{words[len(words.first) :]}"
+
+        # HANDLE IRREGULAR FUTURE, PRETERITE AND PERFECT TENSES
+
+        if words.first in plverb_irregular_non_pres:
+            return word
+
+        # HANDLE PRESENT NEGATIONS (SIMPLE AND COMPOUND)
+
+        if words.first.endswith("n't") and words.first[:-3] in plverb_irregular_pres:
+            return (
+                f"{plverb_irregular_pres[words.first[:-3]]}n't"
+                f"{words[len(words.first) :]}"
+            )
+
+        if words.first.endswith("n't"):
+            return word
+
+        # HANDLE SPECIAL CASES
+
+        mo = PLVERB_SPECIAL_S_RE.search(word)
+        if mo:
+            return False
+        if WHITESPACE.search(word):
+            return False
+
+        if words.lowered == "quizzes":
+            return "quiz"
+
+        # HANDLE STANDARD 3RD PERSON (CHOP THE ...(e)s OFF SINGLE WORDS)
+
+        if (
+            words.lowered[-4:] in ("ches", "shes", "zzes", "sses")
+            or words.lowered[-3:] == "xes"
+        ):
+            return words[:-2]
+
+        if words.lowered[-3:] == "ies" and len(words) > 3:
+            return words.lowered[:-3] + "y"
+
+        if (
+            words.last.lower() in pl_v_oes_oe
+            or words.lowered[-4:] in pl_v_oes_oe_endings_size4
+            or words.lowered[-5:] in pl_v_oes_oe_endings_size5
+        ):
+            return words[:-1]
+
+        if words.lowered.endswith("oes") and len(words) > 3:
+            return words.lowered[:-2]
+
+        mo = ENDS_WITH_S.search(words)
+        if mo:
+            return mo.group(1)
+
+        # OTHERWISE, A REGULAR VERB (HANDLE ELSEWHERE)
+
+        return False
+
+    def _pl_general_verb(
+        self, word: str, count: Optional[Union[str, int]] = None
+    ) -> str:
+        count = self.get_count(count)
+
+        if count == 1:
+            return word
+
+        # HANDLE AMBIGUOUS PRESENT TENSES  (SIMPLE AND COMPOUND)
+
+        mo = plverb_ambiguous_pres_keys.search(word)
+        if mo:
+            return f"{plverb_ambiguous_pres[mo.group(1).lower()]}{mo.group(2)}"
+
+        # HANDLE AMBIGUOUS PRETERITE AND PERFECT TENSES
+
+        mo = plverb_ambiguous_non_pres.search(word)
+        if mo:
+            return word
+
+        # OTHERWISE, 1st OR 2ND PERSON IS UNINFLECTED
+
+        return word
+
+    def _pl_special_adjective(
+        self, word: str, count: Optional[Union[str, int]] = None
+    ) -> Union[str, bool]:
+        count = self.get_count(count)
+
+        if count == 1:
+            return word
+
+        # HANDLE USER-DEFINED ADJECTIVES
+
+        value = self.ud_match(word, self.pl_adj_user_defined)
+        if value is not None:
+            return value
+
+        # HANDLE KNOWN CASES
+
+        mo = pl_adj_special_keys.search(word)
+        if mo:
+            return pl_adj_special[mo.group(1).lower()]
+
+        # HANDLE POSSESSIVES
+
+        mo = pl_adj_poss_keys.search(word)
+        if mo:
+            return pl_adj_poss[mo.group(1).lower()]
+
+        mo = ENDS_WITH_APOSTROPHE_S.search(word)
+        if mo:
+            pl = self.plural_noun(mo.group(1))
+            trailing_s = "" if pl[-1] == "s" else "s"
+            return f"{pl}'{trailing_s}"
+
+        # OTHERWISE, NO IDEA
+
+        return False
+
+    # @profile
+    def _sinoun(  # noqa: C901
+        self,
+        word: str,
+        count: Optional[Union[str, int]] = None,
+        gender: Optional[str] = None,
+    ) -> Union[str, bool]:
+        count = self.get_count(count)
+
+        # DEFAULT TO PLURAL
+
+        if count == 2:
+            return word
+
+        # SET THE GENDER
+
+        try:
+            if gender is None:
+                gender = self.thegender
+            elif gender not in singular_pronoun_genders:
+                raise BadGenderError
+        except (TypeError, IndexError) as err:
+            raise BadGenderError from err
+
+        # HANDLE USER-DEFINED NOUNS
+
+        value = self.ud_match(word, self.si_sb_user_defined)
+        if value is not None:
+            return value
+
+        # HANDLE EMPTY WORD, SINGULAR COUNT AND UNINFLECTED PLURALS
+
+        if word == "":
+            return word
+
+        if word in si_sb_ois_oi_case:
+            return word[:-1]
+
+        words = Words(word)
+
+        if words.last.lower() in pl_sb_uninflected_complete:
+            if len(words.split_) >= 3:
+                return self._handle_long_compounds(words, count=1) or word
+            return word
+
+        if word in pl_sb_uninflected_caps:
+            return word
+
+        for k, v in pl_sb_uninflected_bysize.items():
+            if words.lowered[-k:] in v:
+                return word
+
+        if self.classical_dict["herd"] and words.last.lower() in pl_sb_uninflected_herd:
+            return word
+
+        if words.last.lower() in pl_sb_C_us_us:
+            return word if self.classical_dict["ancient"] else False
+
+        # HANDLE COMPOUNDS ("Governor General", "mother-in-law", "aide-de-camp", ETC.)
+
+        mo = PL_SB_POSTFIX_ADJ_STEMS_RE.search(word)
+        if mo and mo.group(2) != "":
+            return f"{self._sinoun(mo.group(1), 1, gender=gender)}{mo.group(2)}"
+
+        with contextlib.suppress(ValueError):
+            return self._handle_prepositional_phrase(
+                words.lowered,
+                functools.partial(self._sinoun, count=1, gender=gender),
+                ' ',
+            )
+
+        with contextlib.suppress(ValueError):
+            return self._handle_prepositional_phrase(
+                words.lowered,
+                functools.partial(self._sinoun, count=1, gender=gender),
+                '-',
+            )
+
+        # HANDLE PRONOUNS
+
+        for k, v in si_pron_acc_keys_bysize.items():
+            if words.lowered[-k:] in v:  # ends with accusative pronoun
+                for pk, pv in pl_prep_bysize.items():
+                    if words.lowered[:pk] in pv:  # starts with a prep
+                        if words.lowered.split() == [
+                            words.lowered[:pk],
+                            words.lowered[-k:],
+                        ]:
+                            # only whitespace in between
+                            return words.lowered[:-k] + get_si_pron(
+                                "acc", words.lowered[-k:], gender
+                            )
+
+        try:
+            return get_si_pron("nom", words.lowered, gender)
+        except KeyError:
+            pass
+
+        try:
+            return get_si_pron("acc", words.lowered, gender)
+        except KeyError:
+            pass
+
+        # HANDLE ISOLATED IRREGULAR PLURALS
+
+        if words.last in si_sb_irregular_caps:
+            llen = len(words.last)
+            return f"{word[:-llen]}{si_sb_irregular_caps[words.last]}"
+
+        if words.last.lower() in si_sb_irregular:
+            llen = len(words.last.lower())
+            return f"{word[:-llen]}{si_sb_irregular[words.last.lower()]}"
+
+        dash_split = words.lowered.split("-")
+        if (" ".join(dash_split[-2:])).lower() in si_sb_irregular_compound:
+            llen = len(
+                " ".join(dash_split[-2:])
+            )  # TODO: what if 2 spaces between these words?
+            return "{}{}".format(
+                word[:-llen],
+                si_sb_irregular_compound[(" ".join(dash_split[-2:])).lower()],
+            )
+
+        if words.lowered[-5:] == "quies":
+            return word[:-3] + "y"
+
+        if words.lowered[-7:] == "persons":
+            return word[:-1]
+        if words.lowered[-6:] == "people":
+            return word[:-4] + "rson"
+
+        # HANDLE FAMILIES OF IRREGULAR PLURALS
+
+        if words.lowered[-4:] == "mans":
+            for k, v in si_sb_U_man_mans_bysize.items():
+                if words.lowered[-k:] in v:
+                    return word[:-1]
+            for k, v in si_sb_U_man_mans_caps_bysize.items():
+                if word[-k:] in v:
+                    return word[:-1]
+        if words.lowered[-3:] == "men":
+            return word[:-3] + "man"
+        if words.lowered[-4:] == "mice":
+            return word[:-4] + "mouse"
+        if words.lowered[-4:] == "lice":
+            v = si_sb_U_louse_lice_bysize.get(len(word))
+            if v and words.lowered in v:
+                return word[:-4] + "louse"
+        if words.lowered[-5:] == "geese":
+            return word[:-5] + "goose"
+        if words.lowered[-5:] == "teeth":
+            return word[:-5] + "tooth"
+        if words.lowered[-4:] == "feet":
+            return word[:-4] + "foot"
+
+        if words.lowered == "dice":
+            return "die"
+
+        # HANDLE UNASSIMILATED IMPORTS
+
+        if words.lowered[-4:] == "ceps":
+            return word
+        if words.lowered[-3:] == "zoa":
+            return word[:-1] + "on"
+
+        for lastlet, d, unass_numend, post in (
+            ("s", si_sb_U_ch_chs_bysize, -1, ""),
+            ("s", si_sb_U_ex_ices_bysize, -4, "ex"),
+            ("s", si_sb_U_ix_ices_bysize, -4, "ix"),
+            ("a", si_sb_U_um_a_bysize, -1, "um"),
+            ("i", si_sb_U_us_i_bysize, -1, "us"),
+            ("a", si_sb_U_on_a_bysize, -1, "on"),
+            ("e", si_sb_U_a_ae_bysize, -1, ""),
+        ):
+            if words.lowered[-1] == lastlet:  # this test to add speed
+                for k, v in d.items():
+                    if words.lowered[-k:] in v:
+                        return word[:unass_numend] + post
+
+        # HANDLE INCOMPLETELY ASSIMILATED IMPORTS
+
+        if self.classical_dict["ancient"]:
+            if words.lowered[-6:] == "trices":
+                return word[:-3] + "x"
+            if words.lowered[-4:] in ("eaux", "ieux"):
+                return word[:-1]
+            if words.lowered[-5:] in ("ynges", "inges", "anges") and len(word) > 6:
+                return word[:-3] + "x"
+
+            for lastlet, d, class_numend, post in (
+                ("a", si_sb_C_en_ina_bysize, -3, "en"),
+                ("s", si_sb_C_ex_ices_bysize, -4, "ex"),
+                ("s", si_sb_C_ix_ices_bysize, -4, "ix"),
+                ("a", si_sb_C_um_a_bysize, -1, "um"),
+                ("i", si_sb_C_us_i_bysize, -1, "us"),
+                ("s", pl_sb_C_us_us_bysize, None, ""),
+                ("e", si_sb_C_a_ae_bysize, -1, ""),
+                ("a", si_sb_C_a_ata_bysize, -2, ""),
+                ("s", si_sb_C_is_ides_bysize, -3, "s"),
+                ("i", si_sb_C_o_i_bysize, -1, "o"),
+                ("a", si_sb_C_on_a_bysize, -1, "on"),
+                ("m", si_sb_C_im_bysize, -2, ""),
+                ("i", si_sb_C_i_bysize, -1, ""),
+            ):
+                if words.lowered[-1] == lastlet:  # this test to add speed
+                    for k, v in d.items():
+                        if words.lowered[-k:] in v:
+                            return word[:class_numend] + post
+
+        # HANDLE PLURLS ENDING IN uses -> use
+
+        if (
+            words.lowered[-6:] == "houses"
+            or word in si_sb_uses_use_case
+            or words.last.lower() in si_sb_uses_use
+        ):
+            return word[:-1]
+
+        # HANDLE PLURLS ENDING IN ies -> ie
+
+        if word in si_sb_ies_ie_case or words.last.lower() in si_sb_ies_ie:
+            return word[:-1]
+
+        # HANDLE PLURLS ENDING IN oes -> oe
+
+        if (
+            words.lowered[-5:] == "shoes"
+            or word in si_sb_oes_oe_case
+            or words.last.lower() in si_sb_oes_oe
+        ):
+            return word[:-1]
+
+        # HANDLE SINGULAR NOUNS ENDING IN ...s OR OTHER SILIBANTS
+
+        if word in si_sb_sses_sse_case or words.last.lower() in si_sb_sses_sse:
+            return word[:-1]
+
+        if words.last.lower() in si_sb_singular_s_complete:
+            return word[:-2]
+
+        for k, v in si_sb_singular_s_bysize.items():
+            if words.lowered[-k:] in v:
+                return word[:-2]
+
+        if words.lowered[-4:] == "eses" and word[0] == word[0].upper():
+            return word[:-2]
+
+        if words.last.lower() in si_sb_z_zes:
+            return word[:-2]
+
+        if words.last.lower() in si_sb_zzes_zz:
+            return word[:-2]
+
+        if words.lowered[-4:] == "zzes":
+            return word[:-3]
+
+        if word in si_sb_ches_che_case or words.last.lower() in si_sb_ches_che:
+            return word[:-1]
+
+        if words.lowered[-4:] in ("ches", "shes"):
+            return word[:-2]
+
+        if words.last.lower() in si_sb_xes_xe:
+            return word[:-1]
+
+        if words.lowered[-3:] == "xes":
+            return word[:-2]
+
+        # HANDLE ...f -> ...ves
+
+        if word in si_sb_ves_ve_case or words.last.lower() in si_sb_ves_ve:
+            return word[:-1]
+
+        if words.lowered[-3:] == "ves":
+            if words.lowered[-5:-3] in ("el", "al", "ol"):
+                return word[:-3] + "f"
+            if words.lowered[-5:-3] == "ea" and word[-6:-5] != "d":
+                return word[:-3] + "f"
+            if words.lowered[-5:-3] in ("ni", "li", "wi"):
+                return word[:-3] + "fe"
+            if words.lowered[-5:-3] == "ar":
+                return word[:-3] + "f"
+
+        # HANDLE ...y
+
+        if words.lowered[-2:] == "ys":
+            if len(words.lowered) > 2 and words.lowered[-3] in "aeiou":
+                return word[:-1]
+
+            if self.classical_dict["names"]:
+                if words.lowered[-2:] == "ys" and word[0] == word[0].upper():
+                    return word[:-1]
+
+        if words.lowered[-3:] == "ies":
+            return word[:-3] + "y"
+
+        # HANDLE ...o
+
+        if words.lowered[-2:] == "os":
+            if words.last.lower() in si_sb_U_o_os_complete:
+                return word[:-1]
+
+            for k, v in si_sb_U_o_os_bysize.items():
+                if words.lowered[-k:] in v:
+                    return word[:-1]
+
+            if words.lowered[-3:] in ("aos", "eos", "ios", "oos", "uos"):
+                return word[:-1]
+
+        if words.lowered[-3:] == "oes":
+            return word[:-2]
+
+        # UNASSIMILATED IMPORTS FINAL RULE
+
+        if word in si_sb_es_is:
+            return word[:-2] + "is"
+
+        # OTHERWISE JUST REMOVE ...s
+
+        if words.lowered[-1] == "s":
+            return word[:-1]
+
+        # COULD NOT FIND SINGULAR
+
+        return False
+
+    # ADJECTIVES
+
+    @typechecked
+    def a(self, text: Word, count: Optional[Union[int, str, Any]] = 1) -> str:
+        """
+        Return the appropriate indefinite article followed by text.
+
+        The indefinite article is either 'a' or 'an'.
+
+        If count is not one, then return count followed by text
+        instead of 'a' or 'an'.
+
+        Whitespace at the start and end is preserved.
+
+        """
+        mo = INDEFINITE_ARTICLE_TEST.search(text)
+        if mo:
+            word = mo.group(2)
+            if not word:
+                return text
+            pre = mo.group(1)
+            post = mo.group(3)
+            result = self._indef_article(word, count)
+            return f"{pre}{result}{post}"
+        return ""
+
+    an = a
+
+    _indef_article_cases = (
+        # HANDLE ORDINAL FORMS
+        (A_ordinal_a, "a"),
+        (A_ordinal_an, "an"),
+        # HANDLE SPECIAL CASES
+        (A_explicit_an, "an"),
+        (SPECIAL_AN, "an"),
+        (SPECIAL_A, "a"),
+        # HANDLE ABBREVIATIONS
+        (A_abbrev, "an"),
+        (SPECIAL_ABBREV_AN, "an"),
+        (SPECIAL_ABBREV_A, "a"),
+        # HANDLE CONSONANTS
+        (CONSONANTS, "a"),
+        # HANDLE SPECIAL VOWEL-FORMS
+        (ARTICLE_SPECIAL_EU, "a"),
+        (ARTICLE_SPECIAL_ONCE, "a"),
+        (ARTICLE_SPECIAL_ONETIME, "a"),
+        (ARTICLE_SPECIAL_UNIT, "a"),
+        (ARTICLE_SPECIAL_UBA, "a"),
+        (ARTICLE_SPECIAL_UKR, "a"),
+        (A_explicit_a, "a"),
+        # HANDLE SPECIAL CAPITALS
+        (SPECIAL_CAPITALS, "a"),
+        # HANDLE VOWELS
+        (VOWELS, "an"),
+        # HANDLE y...
+        # (BEFORE CERTAIN CONSONANTS IMPLIES (UNNATURALIZED) "i.." SOUND)
+        (A_y_cons, "an"),
+    )
+
+    def _indef_article(self, word: str, count: Union[int, str, Any]) -> str:
+        mycount = self.get_count(count)
+
+        if mycount != 1:
+            return f"{count} {word}"
+
+        # HANDLE USER-DEFINED VARIANTS
+
+        value = self.ud_match(word, self.A_a_user_defined)
+        if value is not None:
+            return f"{value} {word}"
+
+        matches = (
+            f'{article} {word}'
+            for regexen, article in self._indef_article_cases
+            if regexen.search(word)
+        )
+
+        # OTHERWISE, GUESS "a"
+        fallback = f'a {word}'
+        return next(matches, fallback)
+
+    # 2. TRANSLATE ZERO-QUANTIFIED $word TO "no plural($word)"
+
+    @typechecked
+    def no(self, text: Word, count: Optional[Union[int, str]] = None) -> str:
+        """
+        If count is 0, no, zero or nil, return 'no' followed by the plural
+        of text.
+
+        If count is one of:
+            1, a, an, one, each, every, this, that
+            return count followed by text.
+
+        Otherwise return count follow by the plural of text.
+
+        In the return value count is always followed by a space.
+
+        Whitespace at the start and end is preserved.
+
+        """
+        if count is None and self.persistent_count is not None:
+            count = self.persistent_count
+
+        if count is None:
+            count = 0
+        mo = PARTITION_WORD.search(text)
+        if mo:
+            pre = mo.group(1)
+            word = mo.group(2)
+            post = mo.group(3)
+        else:
+            pre = ""
+            word = ""
+            post = ""
+
+        if str(count).lower() in pl_count_zero:
+            count = 'no'
+        return f"{pre}{count} {self.plural(word, count)}{post}"
+
+    # PARTICIPLES
+
+    @typechecked
+    def present_participle(self, word: Word) -> str:
+        """
+        Return the present participle for word.
+
+        word is the 3rd person singular verb.
+
+        """
+        plv = self.plural_verb(word, 2)
+        ans = plv
+
+        for regexen, repl in PRESENT_PARTICIPLE_REPLACEMENTS:
+            ans, num = regexen.subn(repl, plv)
+            if num:
+                return f"{ans}ing"
+        return f"{ans}ing"
+
+    # NUMERICAL INFLECTIONS
+
+    @typechecked
+    def ordinal(self, num: Union[Number, Word]) -> str:
+        """
+        Return the ordinal of num.
+
+        >>> ordinal = engine().ordinal
+        >>> ordinal(1)
+        '1st'
+        >>> ordinal('one')
+        'first'
+        """
+        if DIGIT.match(str(num)):
+            if isinstance(num, (float, int)) and int(num) == num:
+                n = int(num)
+            else:
+                if "." in str(num):
+                    try:
+                        # numbers after decimal,
+                        # so only need last one for ordinal
+                        n = int(str(num)[-1])
+
+                    except ValueError:  # ends with '.', so need to use whole string
+                        n = int(str(num)[:-1])
+                else:
+                    n = int(num)  # type: ignore
+            try:
+                post = nth[n % 100]
+            except KeyError:
+                post = nth[n % 10]
+            return f"{num}{post}"
+        else:
+            return self._sub_ord(num)
+
+    def millfn(self, ind: int = 0) -> str:
+        if ind > len(mill) - 1:
+            raise NumOutOfRangeError
+        return mill[ind]
+
+    def unitfn(self, units: int, mindex: int = 0) -> str:
+        return f"{unit[units]}{self.millfn(mindex)}"
+
+    def tenfn(self, tens, units, mindex=0) -> str:
+        if tens != 1:
+            tens_part = ten[tens]
+            if tens and units:
+                hyphen = "-"
+            else:
+                hyphen = ""
+            unit_part = unit[units]
+            mill_part = self.millfn(mindex)
+            return f"{tens_part}{hyphen}{unit_part}{mill_part}"
+        return f"{teen[units]}{mill[mindex]}"
+
+    def hundfn(self, hundreds: int, tens: int, units: int, mindex: int) -> str:
+        if hundreds:
+            andword = f" {self._number_args['andword']} " if tens or units else ""
+            # use unit not unitfn as simpler
+            return (
+                f"{unit[hundreds]} hundred{andword}"
+                f"{self.tenfn(tens, units)}{self.millfn(mindex)}, "
+            )
+        if tens or units:
+            return f"{self.tenfn(tens, units)}{self.millfn(mindex)}, "
+        return ""
+
+    def group1sub(self, mo: Match) -> str:
+        units = int(mo.group(1))
+        if units == 1:
+            return f" {self._number_args['one']}, "
+        elif units:
+            return f"{unit[units]}, "
+        else:
+            return f" {self._number_args['zero']}, "
+
+    def group1bsub(self, mo: Match) -> str:
+        units = int(mo.group(1))
+        if units:
+            return f"{unit[units]}, "
+        else:
+            return f" {self._number_args['zero']}, "
+
+    def group2sub(self, mo: Match) -> str:
+        tens = int(mo.group(1))
+        units = int(mo.group(2))
+        if tens:
+            return f"{self.tenfn(tens, units)}, "
+        if units:
+            return f" {self._number_args['zero']} {unit[units]}, "
+        return f" {self._number_args['zero']} {self._number_args['zero']}, "
+
+    def group3sub(self, mo: Match) -> str:
+        hundreds = int(mo.group(1))
+        tens = int(mo.group(2))
+        units = int(mo.group(3))
+        if hundreds == 1:
+            hunword = f" {self._number_args['one']}"
+        elif hundreds:
+            hunword = str(unit[hundreds])
+        else:
+            hunword = f" {self._number_args['zero']}"
+        if tens:
+            tenword = self.tenfn(tens, units)
+        elif units:
+            tenword = f" {self._number_args['zero']} {unit[units]}"
+        else:
+            tenword = f" {self._number_args['zero']} {self._number_args['zero']}"
+        return f"{hunword} {tenword}, "
+
+    def hundsub(self, mo: Match) -> str:
+        ret = self.hundfn(
+            int(mo.group(1)), int(mo.group(2)), int(mo.group(3)), self.mill_count
+        )
+        self.mill_count += 1
+        return ret
+
+    def tensub(self, mo: Match) -> str:
+        return f"{self.tenfn(int(mo.group(1)), int(mo.group(2)), self.mill_count)}, "
+
+    def unitsub(self, mo: Match) -> str:
+        return f"{self.unitfn(int(mo.group(1)), self.mill_count)}, "
+
+    def enword(self, num: str, group: int) -> str:
+        # import pdb
+        # pdb.set_trace()
+
+        if group == 1:
+            num = DIGIT_GROUP.sub(self.group1sub, num)
+        elif group == 2:
+            num = TWO_DIGITS.sub(self.group2sub, num)
+            num = DIGIT_GROUP.sub(self.group1bsub, num, 1)
+        elif group == 3:
+            num = THREE_DIGITS.sub(self.group3sub, num)
+            num = TWO_DIGITS.sub(self.group2sub, num, 1)
+            num = DIGIT_GROUP.sub(self.group1sub, num, 1)
+        elif int(num) == 0:
+            num = self._number_args["zero"]
+        elif int(num) == 1:
+            num = self._number_args["one"]
+        else:
+            num = num.lstrip().lstrip("0")
+            self.mill_count = 0
+            # surely there's a better way to do the next bit
+            mo = THREE_DIGITS_WORD.search(num)
+            while mo:
+                num = THREE_DIGITS_WORD.sub(self.hundsub, num, 1)
+                mo = THREE_DIGITS_WORD.search(num)
+            num = TWO_DIGITS_WORD.sub(self.tensub, num, 1)
+            num = ONE_DIGIT_WORD.sub(self.unitsub, num, 1)
+        return num
+
+    @staticmethod
+    def _sub_ord(val):
+        new = ordinal_suff.sub(lambda match: ordinal[match.group(1)], val)
+        return new + "th" * (new == val)
+
+    @classmethod
+    def _chunk_num(cls, num, decimal, group):
+        if decimal:
+            max_split = -1 if group != 0 else 1
+            chunks = num.split(".", max_split)
+        else:
+            chunks = [num]
+        return cls._remove_last_blank(chunks)
+
+    @staticmethod
+    def _remove_last_blank(chunks):
+        """
+        Remove the last item from chunks if it's a blank string.
+
+        Return the resultant chunks and whether the last item was removed.
+        """
+        removed = chunks[-1] == ""
+        result = chunks[:-1] if removed else chunks
+        return result, removed
+
+    @staticmethod
+    def _get_sign(num):
+        return {'+': 'plus', '-': 'minus'}.get(num.lstrip()[0], '')
+
+    @typechecked
+    def number_to_words(  # noqa: C901
+        self,
+        num: Union[Number, Word],
+        wantlist: bool = False,
+        group: int = 0,
+        comma: Union[Falsish, str] = ",",
+        andword: str = "and",
+        zero: str = "zero",
+        one: str = "one",
+        decimal: Union[Falsish, str] = "point",
+        threshold: Optional[int] = None,
+    ) -> Union[str, List[str]]:
+        """
+        Return a number in words.
+
+        group = 1, 2 or 3 to group numbers before turning into words
+        comma: define comma
+
+        andword:
+            word for 'and'. Can be set to ''.
+            e.g. "one hundred and one" vs "one hundred one"
+
+        zero: word for '0'
+        one: word for '1'
+        decimal: word for decimal point
+        threshold: numbers above threshold not turned into words
+
+        parameters not remembered from last call. Departure from Perl version.
+        """
+        self._number_args = {"andword": andword, "zero": zero, "one": one}
+        num = str(num)
+
+        # Handle "stylistic" conversions (up to a given threshold)...
+        if threshold is not None and float(num) > threshold:
+            spnum = num.split(".", 1)
+            while comma:
+                (spnum[0], n) = FOUR_DIGIT_COMMA.subn(r"\1,\2", spnum[0])
+                if n == 0:
+                    break
+            try:
+                return f"{spnum[0]}.{spnum[1]}"
+            except IndexError:
+                return str(spnum[0])
+
+        if group < 0 or group > 3:
+            raise BadChunkingOptionError
+
+        sign = self._get_sign(num)
+
+        if num in nth_suff:
+            num = zero
+
+        myord = num[-2:] in nth_suff
+        if myord:
+            num = num[:-2]
+
+        chunks, finalpoint = self._chunk_num(num, decimal, group)
+
+        loopstart = chunks[0] == ""
+        first: bool | None = not loopstart
+
+        def _handle_chunk(chunk):
+            nonlocal first
+
+            # remove all non numeric \D
+            chunk = NON_DIGIT.sub("", chunk)
+            if chunk == "":
+                chunk = "0"
+
+            if group == 0 and not first:
+                chunk = self.enword(chunk, 1)
+            else:
+                chunk = self.enword(chunk, group)
+
+            if chunk[-2:] == ", ":
+                chunk = chunk[:-2]
+            chunk = WHITESPACES_COMMA.sub(",", chunk)
+
+            if group == 0 and first:
+                chunk = COMMA_WORD.sub(f" {andword} \\1", chunk)
+            chunk = WHITESPACES.sub(" ", chunk)
+            # chunk = re.sub(r"(\A\s|\s\Z)", self.blankfn, chunk)
+            chunk = chunk.strip()
+            if first:
+                first = None
+            return chunk
+
+        chunks[loopstart:] = map(_handle_chunk, chunks[loopstart:])
+
+        numchunks = []
+        if first != 0:
+            numchunks = chunks[0].split(f"{comma} ")
+
+        if myord and numchunks:
+            numchunks[-1] = self._sub_ord(numchunks[-1])
+
+        for chunk in chunks[1:]:
+            numchunks.append(decimal)
+            numchunks.extend(chunk.split(f"{comma} "))
+
+        if finalpoint:
+            numchunks.append(decimal)
+
+        if wantlist:
+            return [sign] * bool(sign) + numchunks
+
+        signout = f"{sign} " if sign else ""
+        valout = (
+            ', '.join(numchunks)
+            if group
+            else ''.join(self._render(numchunks, decimal, comma))
+        )
+        return signout + valout
+
+    @staticmethod
+    def _render(chunks, decimal, comma):
+        first_item = chunks.pop(0)
+        yield first_item
+        first = decimal is None or not first_item.endswith(decimal)
+        for nc in chunks:
+            if nc == decimal:
+                first = False
+            elif first:
+                yield comma
+            yield f" {nc}"
+
+    @typechecked
+    def join(
+        self,
+        words: Optional[Sequence[Word]],
+        sep: Optional[str] = None,
+        sep_spaced: bool = True,
+        final_sep: Optional[str] = None,
+        conj: str = "and",
+        conj_spaced: bool = True,
+    ) -> str:
+        """
+        Join words into a list.
+
+        e.g. join(['ant', 'bee', 'fly']) returns 'ant, bee, and fly'
+
+        options:
+        conj: replacement for 'and'
+        sep: separator. default ',', unless ',' is in the list then ';'
+        final_sep: final separator. default ',', unless ',' is in the list then ';'
+        conj_spaced: boolean. Should conj have spaces around it
+
+        """
+        if not words:
+            return ""
+        if len(words) == 1:
+            return words[0]
+
+        if conj_spaced:
+            if conj == "":
+                conj = " "
+            else:
+                conj = f" {conj} "
+
+        if len(words) == 2:
+            return f"{words[0]}{conj}{words[1]}"
+
+        if sep is None:
+            if "," in "".join(words):
+                sep = ";"
+            else:
+                sep = ","
+        if final_sep is None:
+            final_sep = sep
+
+        final_sep = f"{final_sep}{conj}"
+
+        if sep_spaced:
+            sep += " "
+
+        return f"{sep.join(words[0:-1])}{final_sep}{words[-1]}"
diff --git a/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/REQUESTED b/pkg_resources/_vendor/inflect/compat/__init__.py
similarity index 100%
rename from pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/REQUESTED
rename to pkg_resources/_vendor/inflect/compat/__init__.py
diff --git a/pkg_resources/_vendor/inflect/compat/py38.py b/pkg_resources/_vendor/inflect/compat/py38.py
new file mode 100644
index 0000000000..a2d01bd98f
--- /dev/null
+++ b/pkg_resources/_vendor/inflect/compat/py38.py
@@ -0,0 +1,7 @@
+import sys
+
+
+if sys.version_info > (3, 9):
+    from typing import Annotated
+else:  # pragma: no cover
+    from typing_extensions import Annotated  # noqa: F401
diff --git a/pkg_resources/_vendor/packaging-24.0.dist-info/REQUESTED b/pkg_resources/_vendor/inflect/py.typed
similarity index 100%
rename from pkg_resources/_vendor/packaging-24.0.dist-info/REQUESTED
rename to pkg_resources/_vendor/inflect/py.typed
diff --git a/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/RECORD b/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/RECORD
deleted file mode 100644
index 783aa7d2b9..0000000000
--- a/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/RECORD
+++ /dev/null
@@ -1,10 +0,0 @@
-jaraco.functools-4.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-jaraco.functools-4.0.0.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
-jaraco.functools-4.0.0.dist-info/METADATA,sha256=nVOe_vWvaN2iWJ2aBVkhKvmvH-gFksNCXHwCNvcj65I,3078
-jaraco.functools-4.0.0.dist-info/RECORD,,
-jaraco.functools-4.0.0.dist-info/WHEEL,sha256=Xo9-1PvkuimrydujYJAjF7pCkriuXBpUPEjma1nZyJ0,92
-jaraco.functools-4.0.0.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
-jaraco/functools/__init__.py,sha256=hEAJaS2uSZRuF_JY4CxCHIYh79ZpxaPp9OiHyr9EJ1w,16642
-jaraco/functools/__init__.pyi,sha256=N4lLbdhMtrmwiK3UuMGhYsiOLLZx69CUNOdmFPSVh6Q,3982
-jaraco/functools/__pycache__/__init__.cpython-312.pyc,,
-jaraco/functools/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
diff --git a/pkg_resources/_vendor/more_itertools-10.2.0.dist-info/INSTALLER b/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/INSTALLER
similarity index 100%
rename from pkg_resources/_vendor/more_itertools-10.2.0.dist-info/INSTALLER
rename to pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/INSTALLER
diff --git a/pkg_resources/_vendor/zipp-3.7.0.dist-info/LICENSE b/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/LICENSE
similarity index 97%
rename from pkg_resources/_vendor/zipp-3.7.0.dist-info/LICENSE
rename to pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/LICENSE
index 353924be0e..1bb5a44356 100644
--- a/pkg_resources/_vendor/zipp-3.7.0.dist-info/LICENSE
+++ b/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/LICENSE
@@ -1,5 +1,3 @@
-Copyright Jason R. Coombs
-
 Permission is hereby granted, free of charge, to any person obtaining a copy
 of this software and associated documentation files (the "Software"), to
 deal in the Software without restriction, including without limitation the
diff --git a/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/METADATA b/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/METADATA
similarity index 78%
rename from pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/METADATA
rename to pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/METADATA
index 581b308378..c865140ab2 100644
--- a/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/METADATA
+++ b/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/METADATA
@@ -1,16 +1,16 @@
 Metadata-Version: 2.1
 Name: jaraco.functools
-Version: 4.0.0
+Version: 4.0.1
 Summary: Functools like those found in stdlib
-Home-page: https://github.com/jaraco/jaraco.functools
-Author: Jason R. Coombs
-Author-email: jaraco@jaraco.com
+Author-email: "Jason R. Coombs" 
+Project-URL: Homepage, https://github.com/jaraco/jaraco.functools
 Classifier: Development Status :: 5 - Production/Stable
 Classifier: Intended Audience :: Developers
 Classifier: License :: OSI Approved :: MIT License
 Classifier: Programming Language :: Python :: 3
 Classifier: Programming Language :: Python :: 3 :: Only
 Requires-Python: >=3.8
+Description-Content-Type: text/x-rst
 License-File: LICENSE
 Requires-Dist: more-itertools
 Provides-Extra: docs
@@ -26,17 +26,16 @@ Requires-Dist: pytest >=6 ; extra == 'testing'
 Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'testing'
 Requires-Dist: pytest-cov ; extra == 'testing'
 Requires-Dist: pytest-enabler >=2.2 ; extra == 'testing'
-Requires-Dist: pytest-ruff ; extra == 'testing'
+Requires-Dist: pytest-ruff >=0.2.1 ; extra == 'testing'
 Requires-Dist: jaraco.classes ; extra == 'testing'
-Requires-Dist: pytest-black >=0.3.7 ; (platform_python_implementation != "PyPy") and extra == 'testing'
-Requires-Dist: pytest-mypy >=0.9.1 ; (platform_python_implementation != "PyPy") and extra == 'testing'
+Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing'
 
 .. image:: https://img.shields.io/pypi/v/jaraco.functools.svg
    :target: https://pypi.org/project/jaraco.functools
 
 .. image:: https://img.shields.io/pypi/pyversions/jaraco.functools.svg
 
-.. image:: https://github.com/jaraco/jaraco.functools/workflows/tests/badge.svg
+.. image:: https://github.com/jaraco/jaraco.functools/actions/workflows/main.yml/badge.svg
    :target: https://github.com/jaraco/jaraco.functools/actions?query=workflow%3A%22tests%22
    :alt: tests
 
@@ -44,14 +43,10 @@ Requires-Dist: pytest-mypy >=0.9.1 ; (platform_python_implementation != "PyPy")
     :target: https://github.com/astral-sh/ruff
     :alt: Ruff
 
-.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
-   :target: https://github.com/psf/black
-   :alt: Code style: Black
-
 .. image:: https://readthedocs.org/projects/jaracofunctools/badge/?version=latest
    :target: https://jaracofunctools.readthedocs.io/en/latest/?badge=latest
 
-.. image:: https://img.shields.io/badge/skeleton-2023-informational
+.. image:: https://img.shields.io/badge/skeleton-2024-informational
    :target: https://blog.jaraco.com/skeleton
 
 .. image:: https://tidelift.com/badges/package/pypi/jaraco.functools
diff --git a/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/RECORD b/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/RECORD
new file mode 100644
index 0000000000..ef3bc21e92
--- /dev/null
+++ b/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/RECORD
@@ -0,0 +1,10 @@
+jaraco.functools-4.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+jaraco.functools-4.0.1.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
+jaraco.functools-4.0.1.dist-info/METADATA,sha256=i4aUaQDX-jjdEQK5wevhegyx8JyLfin2HyvaSk3FHso,2891
+jaraco.functools-4.0.1.dist-info/RECORD,,
+jaraco.functools-4.0.1.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
+jaraco.functools-4.0.1.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
+jaraco/functools/__init__.py,sha256=hEAJaS2uSZRuF_JY4CxCHIYh79ZpxaPp9OiHyr9EJ1w,16642
+jaraco/functools/__init__.pyi,sha256=gk3dsgHzo5F_U74HzAvpNivFAPCkPJ1b2-yCd62dfnw,3878
+jaraco/functools/__pycache__/__init__.cpython-312.pyc,,
+jaraco/functools/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
diff --git a/pkg_resources/_vendor/zipp-3.7.0.dist-info/WHEEL b/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/WHEEL
similarity index 65%
rename from pkg_resources/_vendor/zipp-3.7.0.dist-info/WHEEL
rename to pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/WHEEL
index becc9a66ea..bab98d6758 100644
--- a/pkg_resources/_vendor/zipp-3.7.0.dist-info/WHEEL
+++ b/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/WHEEL
@@ -1,5 +1,5 @@
 Wheel-Version: 1.0
-Generator: bdist_wheel (0.37.1)
+Generator: bdist_wheel (0.43.0)
 Root-Is-Purelib: true
 Tag: py3-none-any
 
diff --git a/pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/top_level.txt b/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/top_level.txt
similarity index 100%
rename from pkg_resources/_vendor/jaraco.functools-4.0.0.dist-info/top_level.txt
rename to pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/top_level.txt
diff --git a/pkg_resources/_vendor/packaging-24.0.dist-info/INSTALLER b/pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/INSTALLER
similarity index 100%
rename from pkg_resources/_vendor/packaging-24.0.dist-info/INSTALLER
rename to pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/INSTALLER
diff --git a/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/LICENSE b/pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/LICENSE
similarity index 97%
rename from pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/LICENSE
rename to pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/LICENSE
index 353924be0e..1bb5a44356 100644
--- a/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/LICENSE
+++ b/pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/LICENSE
@@ -1,5 +1,3 @@
-Copyright Jason R. Coombs
-
 Permission is hereby granted, free of charge, to any person obtaining a copy
 of this software and associated documentation files (the "Software"), to
 deal in the Software without restriction, including without limitation the
diff --git a/pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/METADATA b/pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/METADATA
new file mode 100644
index 0000000000..0258a380f4
--- /dev/null
+++ b/pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/METADATA
@@ -0,0 +1,95 @@
+Metadata-Version: 2.1
+Name: jaraco.text
+Version: 3.12.1
+Summary: Module for text manipulation
+Author-email: "Jason R. Coombs" 
+Project-URL: Homepage, https://github.com/jaraco/jaraco.text
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Requires-Python: >=3.8
+Description-Content-Type: text/x-rst
+License-File: LICENSE
+Requires-Dist: jaraco.functools
+Requires-Dist: jaraco.context >=4.1
+Requires-Dist: autocommand
+Requires-Dist: inflect
+Requires-Dist: more-itertools
+Requires-Dist: importlib-resources ; python_version < "3.9"
+Provides-Extra: doc
+Requires-Dist: sphinx >=3.5 ; extra == 'doc'
+Requires-Dist: jaraco.packaging >=9.3 ; extra == 'doc'
+Requires-Dist: rst.linker >=1.9 ; extra == 'doc'
+Requires-Dist: furo ; extra == 'doc'
+Requires-Dist: sphinx-lint ; extra == 'doc'
+Requires-Dist: jaraco.tidelift >=1.4 ; extra == 'doc'
+Provides-Extra: test
+Requires-Dist: pytest !=8.1.*,>=6 ; extra == 'test'
+Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'test'
+Requires-Dist: pytest-cov ; extra == 'test'
+Requires-Dist: pytest-mypy ; extra == 'test'
+Requires-Dist: pytest-enabler >=2.2 ; extra == 'test'
+Requires-Dist: pytest-ruff >=0.2.1 ; extra == 'test'
+Requires-Dist: pathlib2 ; (python_version < "3.10") and extra == 'test'
+
+.. image:: https://img.shields.io/pypi/v/jaraco.text.svg
+   :target: https://pypi.org/project/jaraco.text
+
+.. image:: https://img.shields.io/pypi/pyversions/jaraco.text.svg
+
+.. image:: https://github.com/jaraco/jaraco.text/actions/workflows/main.yml/badge.svg
+   :target: https://github.com/jaraco/jaraco.text/actions?query=workflow%3A%22tests%22
+   :alt: tests
+
+.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
+    :target: https://github.com/astral-sh/ruff
+    :alt: Ruff
+
+.. image:: https://readthedocs.org/projects/jaracotext/badge/?version=latest
+   :target: https://jaracotext.readthedocs.io/en/latest/?badge=latest
+
+.. image:: https://img.shields.io/badge/skeleton-2024-informational
+   :target: https://blog.jaraco.com/skeleton
+
+.. image:: https://tidelift.com/badges/package/pypi/jaraco.text
+   :target: https://tidelift.com/subscription/pkg/pypi-jaraco.text?utm_source=pypi-jaraco.text&utm_medium=readme
+
+
+This package provides handy routines for dealing with text, such as
+wrapping, substitution, trimming, stripping, prefix and suffix removal,
+line continuation, indentation, comment processing, identifier processing,
+values parsing, case insensitive comparison, and more. See the docs
+(linked in the badge above) for the detailed documentation and examples.
+
+Layouts
+=======
+
+One of the features of this package is the layouts module, which
+provides a simple example of translating keystrokes from one keyboard
+layout to another::
+
+    echo qwerty | python -m jaraco.text.to-dvorak
+    ',.pyf
+    echo  "',.pyf" | python -m jaraco.text.to-qwerty
+    qwerty
+
+Newline Reporting
+=================
+
+Need to know what newlines appear in a file?
+
+::
+
+    $ python -m jaraco.text.show-newlines README.rst
+    newline is '\n'
+
+For Enterprise
+==============
+
+Available as part of the Tidelift Subscription.
+
+This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use.
+
+`Learn more `_.
diff --git a/pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/RECORD b/pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/RECORD
new file mode 100644
index 0000000000..19e2d8402a
--- /dev/null
+++ b/pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/RECORD
@@ -0,0 +1,20 @@
+jaraco.text-3.12.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+jaraco.text-3.12.1.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
+jaraco.text-3.12.1.dist-info/METADATA,sha256=AzWdm6ViMfDOPoQMfLWn2zgBQSGJScyqeN29TcuWXVI,3658
+jaraco.text-3.12.1.dist-info/RECORD,,
+jaraco.text-3.12.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+jaraco.text-3.12.1.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
+jaraco.text-3.12.1.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
+jaraco/text/Lorem ipsum.txt,sha256=N_7c_79zxOufBY9HZ3yzMgOkNv-TkOTTio4BydrSjgs,1335
+jaraco/text/__init__.py,sha256=Y2YUqXR_orUoDaY4SkPRe6ZZhb5HUHB_Ah9RCNsVyho,16250
+jaraco/text/__pycache__/__init__.cpython-312.pyc,,
+jaraco/text/__pycache__/layouts.cpython-312.pyc,,
+jaraco/text/__pycache__/show-newlines.cpython-312.pyc,,
+jaraco/text/__pycache__/strip-prefix.cpython-312.pyc,,
+jaraco/text/__pycache__/to-dvorak.cpython-312.pyc,,
+jaraco/text/__pycache__/to-qwerty.cpython-312.pyc,,
+jaraco/text/layouts.py,sha256=HTC8aSTLZ7uXipyOXapRMC158juecjK6RVwitfmZ9_w,643
+jaraco/text/show-newlines.py,sha256=WGQa65e8lyhb92LUOLqVn6KaCtoeVgVws6WtSRmLk6w,904
+jaraco/text/strip-prefix.py,sha256=NfVXV8JVNo6nqcuYASfMV7_y4Eo8zMQqlCOGvAnRIVw,412
+jaraco/text/to-dvorak.py,sha256=1SNcbSsvISpXXg-LnybIHHY-RUFOQr36zcHkY1pWFqw,119
+jaraco/text/to-qwerty.py,sha256=s4UMQUnPwFn_dB5uZC27BurHOQcYondBfzIpVL5pEzw,119
diff --git a/pkg_resources/_vendor/platformdirs-2.6.2.dist-info/REQUESTED b/pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/REQUESTED
similarity index 100%
rename from pkg_resources/_vendor/platformdirs-2.6.2.dist-info/REQUESTED
rename to pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/REQUESTED
diff --git a/pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/WHEEL b/pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/WHEEL
new file mode 100644
index 0000000000..bab98d6758
--- /dev/null
+++ b/pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.43.0)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/top_level.txt b/pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/top_level.txt
similarity index 100%
rename from pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/top_level.txt
rename to pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/top_level.txt
diff --git a/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/METADATA b/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/METADATA
deleted file mode 100644
index 615a50a4ae..0000000000
--- a/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/METADATA
+++ /dev/null
@@ -1,55 +0,0 @@
-Metadata-Version: 2.1
-Name: jaraco.text
-Version: 3.7.0
-Summary: Module for text manipulation
-Home-page: https://github.com/jaraco/jaraco.text
-Author: Jason R. Coombs
-Author-email: jaraco@jaraco.com
-License: UNKNOWN
-Platform: UNKNOWN
-Classifier: Development Status :: 5 - Production/Stable
-Classifier: Intended Audience :: Developers
-Classifier: License :: OSI Approved :: MIT License
-Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3 :: Only
-Requires-Python: >=3.6
-License-File: LICENSE
-Requires-Dist: jaraco.functools
-Requires-Dist: jaraco.context (>=4.1)
-Requires-Dist: importlib-resources ; python_version < "3.9"
-Provides-Extra: docs
-Requires-Dist: sphinx ; extra == 'docs'
-Requires-Dist: jaraco.packaging (>=8.2) ; extra == 'docs'
-Requires-Dist: rst.linker (>=1.9) ; extra == 'docs'
-Provides-Extra: testing
-Requires-Dist: pytest (>=6) ; extra == 'testing'
-Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing'
-Requires-Dist: pytest-flake8 ; extra == 'testing'
-Requires-Dist: pytest-cov ; extra == 'testing'
-Requires-Dist: pytest-enabler (>=1.0.1) ; extra == 'testing'
-Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing'
-Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing'
-
-.. image:: https://img.shields.io/pypi/v/jaraco.text.svg
-   :target: `PyPI link`_
-
-.. image:: https://img.shields.io/pypi/pyversions/jaraco.text.svg
-   :target: `PyPI link`_
-
-.. _PyPI link: https://pypi.org/project/jaraco.text
-
-.. image:: https://github.com/jaraco/jaraco.text/workflows/tests/badge.svg
-   :target: https://github.com/jaraco/jaraco.text/actions?query=workflow%3A%22tests%22
-   :alt: tests
-
-.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
-   :target: https://github.com/psf/black
-   :alt: Code style: Black
-
-.. image:: https://readthedocs.org/projects/jaracotext/badge/?version=latest
-   :target: https://jaracotext.readthedocs.io/en/latest/?badge=latest
-
-.. image:: https://img.shields.io/badge/skeleton-2021-informational
-   :target: https://blog.jaraco.com/skeleton
-
-
diff --git a/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/RECORD b/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/RECORD
deleted file mode 100644
index c698101cb4..0000000000
--- a/pkg_resources/_vendor/jaraco.text-3.7.0.dist-info/RECORD
+++ /dev/null
@@ -1,10 +0,0 @@
-jaraco.text-3.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-jaraco.text-3.7.0.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050
-jaraco.text-3.7.0.dist-info/METADATA,sha256=5mcR1dY0cJNrM-VIkAFkpjOgvgzmq6nM1GfD0gwTIhs,2136
-jaraco.text-3.7.0.dist-info/RECORD,,
-jaraco.text-3.7.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-jaraco.text-3.7.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
-jaraco.text-3.7.0.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
-jaraco/text/Lorem ipsum.txt,sha256=N_7c_79zxOufBY9HZ3yzMgOkNv-TkOTTio4BydrSjgs,1335
-jaraco/text/__init__.py,sha256=I56MW2ZFwPrYXIxzqxMBe2A1t-T4uZBgEgAKe9-JoqM,15538
-jaraco/text/__pycache__/__init__.cpython-312.pyc,,
diff --git a/pkg_resources/_vendor/jaraco/context.py b/pkg_resources/_vendor/jaraco/context.py
index c42f6135d5..61b27135df 100644
--- a/pkg_resources/_vendor/jaraco/context.py
+++ b/pkg_resources/_vendor/jaraco/context.py
@@ -14,7 +14,7 @@
 
 
 if sys.version_info < (3, 12):
-    from pkg_resources.extern.backports import tarfile
+    from backports import tarfile
 else:
     import tarfile
 
diff --git a/pkg_resources/_vendor/jaraco/functools/__init__.py b/pkg_resources/_vendor/jaraco/functools/__init__.py
index f523099c72..ca6c22fa9b 100644
--- a/pkg_resources/_vendor/jaraco/functools/__init__.py
+++ b/pkg_resources/_vendor/jaraco/functools/__init__.py
@@ -7,7 +7,7 @@
 import types
 import warnings
 
-import pkg_resources.extern.more_itertools
+import more_itertools
 
 
 def compose(*funcs):
@@ -603,10 +603,10 @@ def splat(func):
     simple ``map``.
 
     >>> pairs = [(-1, 1), (0, 2)]
-    >>> pkg_resources.extern.more_itertools.consume(itertools.starmap(print, pairs))
+    >>> more_itertools.consume(itertools.starmap(print, pairs))
     -1 1
     0 2
-    >>> pkg_resources.extern.more_itertools.consume(map(splat(print), pairs))
+    >>> more_itertools.consume(map(splat(print), pairs))
     -1 1
     0 2
 
diff --git a/pkg_resources/_vendor/jaraco/functools/__init__.pyi b/pkg_resources/_vendor/jaraco/functools/__init__.pyi
index c2b9ab1757..19191bf93e 100644
--- a/pkg_resources/_vendor/jaraco/functools/__init__.pyi
+++ b/pkg_resources/_vendor/jaraco/functools/__init__.pyi
@@ -74,9 +74,6 @@ def result_invoke(
 def invoke(
     f: Callable[_P, _R], /, *args: _P.args, **kwargs: _P.kwargs
 ) -> Callable[_P, _R]: ...
-def call_aside(
-    f: Callable[_P, _R], *args: _P.args, **kwargs: _P.kwargs
-) -> Callable[_P, _R]: ...
 
 class Throttler(Generic[_R]):
     last_called: float
diff --git a/pkg_resources/_vendor/jaraco/text/__init__.py b/pkg_resources/_vendor/jaraco/text/__init__.py
index c466378ceb..0fabd0c3f0 100644
--- a/pkg_resources/_vendor/jaraco/text/__init__.py
+++ b/pkg_resources/_vendor/jaraco/text/__init__.py
@@ -6,10 +6,10 @@
 try:
     from importlib.resources import files  # type: ignore
 except ImportError:  # pragma: nocover
-    from pkg_resources.extern.importlib_resources import files  # type: ignore
+    from importlib_resources import files  # type: ignore
 
-from pkg_resources.extern.jaraco.functools import compose, method_cache
-from pkg_resources.extern.jaraco.context import ExceptionTrap
+from jaraco.functools import compose, method_cache
+from jaraco.context import ExceptionTrap
 
 
 def substitution(old, new):
@@ -66,7 +66,7 @@ class FoldedCase(str):
     >>> s in ["Hello World"]
     True
 
-    You may test for set inclusion, but candidate and elements
+    Allows testing for set inclusion, but candidate and elements
     must both be folded.
 
     >>> FoldedCase("Hello World") in {s}
@@ -92,37 +92,40 @@ class FoldedCase(str):
 
     >>> FoldedCase('hello') > FoldedCase('Hello')
     False
+
+    >>> FoldedCase('ß') == FoldedCase('ss')
+    True
     """
 
     def __lt__(self, other):
-        return self.lower() < other.lower()
+        return self.casefold() < other.casefold()
 
     def __gt__(self, other):
-        return self.lower() > other.lower()
+        return self.casefold() > other.casefold()
 
     def __eq__(self, other):
-        return self.lower() == other.lower()
+        return self.casefold() == other.casefold()
 
     def __ne__(self, other):
-        return self.lower() != other.lower()
+        return self.casefold() != other.casefold()
 
     def __hash__(self):
-        return hash(self.lower())
+        return hash(self.casefold())
 
     def __contains__(self, other):
-        return super().lower().__contains__(other.lower())
+        return super().casefold().__contains__(other.casefold())
 
     def in_(self, other):
         "Does self appear in other?"
         return self in FoldedCase(other)
 
-    # cache lower since it's likely to be called frequently.
+    # cache casefold since it's likely to be called frequently.
     @method_cache
-    def lower(self):
-        return super().lower()
+    def casefold(self):
+        return super().casefold()
 
     def index(self, sub):
-        return self.lower().index(sub.lower())
+        return self.casefold().index(sub.casefold())
 
     def split(self, splitter=' ', maxsplit=0):
         pattern = re.compile(re.escape(splitter), re.I)
@@ -224,9 +227,12 @@ def unwrap(s):
     return '\n'.join(cleaned)
 
 
+lorem_ipsum: str = (
+    files(__name__).joinpath('Lorem ipsum.txt').read_text(encoding='utf-8')
+)
 
 
-class Splitter(object):
+class Splitter:
     """object that will split a string with the given arguments for each call
 
     >>> s = Splitter(',')
@@ -276,7 +282,7 @@ class WordSet(tuple):
     >>> WordSet.parse("myABCClass")
     ('my', 'ABC', 'Class')
 
-    The result is a WordSet, so you can get the form you need.
+    The result is a WordSet, providing access to other forms.
 
     >>> WordSet.parse("myABCClass").underscore_separated()
     'my_ABC_Class'
@@ -363,7 +369,7 @@ def trim(self, item):
         return self.trim_left(item).trim_right(item)
 
     def __getitem__(self, item):
-        result = super(WordSet, self).__getitem__(item)
+        result = super().__getitem__(item)
         if isinstance(item, slice):
             result = WordSet(result)
         return result
@@ -578,7 +584,7 @@ def join_continuation(lines):
     ['foobarbaz']
 
     Not sure why, but...
-    The character preceeding the backslash is also elided.
+    The character preceding the backslash is also elided.
 
     >>> list(join_continuation(['goo\\', 'dly']))
     ['godly']
@@ -597,3 +603,22 @@ def join_continuation(lines):
             except StopIteration:
                 return
         yield item
+
+
+def read_newlines(filename, limit=1024):
+    r"""
+    >>> tmp_path = getfixture('tmp_path')
+    >>> filename = tmp_path / 'out.txt'
+    >>> _ = filename.write_text('foo\n', newline='', encoding='utf-8')
+    >>> read_newlines(filename)
+    '\n'
+    >>> _ = filename.write_text('foo\r\n', newline='', encoding='utf-8')
+    >>> read_newlines(filename)
+    '\r\n'
+    >>> _ = filename.write_text('foo\r\nbar\nbing\r', newline='', encoding='utf-8')
+    >>> read_newlines(filename)
+    ('\r', '\n', '\r\n')
+    """
+    with open(filename, encoding='utf-8') as fp:
+        fp.read(limit)
+    return fp.newlines
diff --git a/pkg_resources/_vendor/jaraco/text/layouts.py b/pkg_resources/_vendor/jaraco/text/layouts.py
new file mode 100644
index 0000000000..9636f0f7b5
--- /dev/null
+++ b/pkg_resources/_vendor/jaraco/text/layouts.py
@@ -0,0 +1,25 @@
+qwerty = "-=qwertyuiop[]asdfghjkl;'zxcvbnm,./_+QWERTYUIOP{}ASDFGHJKL:\"ZXCVBNM<>?"
+dvorak = "[]',.pyfgcrl/=aoeuidhtns-;qjkxbmwvz{}\"<>PYFGCRL?+AOEUIDHTNS_:QJKXBMWVZ"
+
+
+to_dvorak = str.maketrans(qwerty, dvorak)
+to_qwerty = str.maketrans(dvorak, qwerty)
+
+
+def translate(input, translation):
+    """
+    >>> translate('dvorak', to_dvorak)
+    'ekrpat'
+    >>> translate('qwerty', to_qwerty)
+    'x,dokt'
+    """
+    return input.translate(translation)
+
+
+def _translate_stream(stream, translation):
+    """
+    >>> import io
+    >>> _translate_stream(io.StringIO('foo'), to_dvorak)
+    urr
+    """
+    print(translate(stream.read(), translation))
diff --git a/pkg_resources/_vendor/jaraco/text/show-newlines.py b/pkg_resources/_vendor/jaraco/text/show-newlines.py
new file mode 100644
index 0000000000..e11d1ba428
--- /dev/null
+++ b/pkg_resources/_vendor/jaraco/text/show-newlines.py
@@ -0,0 +1,33 @@
+import autocommand
+import inflect
+
+from more_itertools import always_iterable
+
+import jaraco.text
+
+
+def report_newlines(filename):
+    r"""
+    Report the newlines in the indicated file.
+
+    >>> tmp_path = getfixture('tmp_path')
+    >>> filename = tmp_path / 'out.txt'
+    >>> _ = filename.write_text('foo\nbar\n', newline='', encoding='utf-8')
+    >>> report_newlines(filename)
+    newline is '\n'
+    >>> filename = tmp_path / 'out.txt'
+    >>> _ = filename.write_text('foo\nbar\r\n', newline='', encoding='utf-8')
+    >>> report_newlines(filename)
+    newlines are ('\n', '\r\n')
+    """
+    newlines = jaraco.text.read_newlines(filename)
+    count = len(tuple(always_iterable(newlines)))
+    engine = inflect.engine()
+    print(
+        engine.plural_noun("newline", count),
+        engine.plural_verb("is", count),
+        repr(newlines),
+    )
+
+
+autocommand.autocommand(__name__)(report_newlines)
diff --git a/pkg_resources/_vendor/jaraco/text/strip-prefix.py b/pkg_resources/_vendor/jaraco/text/strip-prefix.py
new file mode 100644
index 0000000000..761717a9b9
--- /dev/null
+++ b/pkg_resources/_vendor/jaraco/text/strip-prefix.py
@@ -0,0 +1,21 @@
+import sys
+
+import autocommand
+
+from jaraco.text import Stripper
+
+
+def strip_prefix():
+    r"""
+    Strip any common prefix from stdin.
+
+    >>> import io, pytest
+    >>> getfixture('monkeypatch').setattr('sys.stdin', io.StringIO('abcdef\nabc123'))
+    >>> strip_prefix()
+    def
+    123
+    """
+    sys.stdout.writelines(Stripper.strip_prefix(sys.stdin).lines)
+
+
+autocommand.autocommand(__name__)(strip_prefix)
diff --git a/pkg_resources/_vendor/jaraco/text/to-dvorak.py b/pkg_resources/_vendor/jaraco/text/to-dvorak.py
new file mode 100644
index 0000000000..a6d5da80b3
--- /dev/null
+++ b/pkg_resources/_vendor/jaraco/text/to-dvorak.py
@@ -0,0 +1,6 @@
+import sys
+
+from . import layouts
+
+
+__name__ == '__main__' and layouts._translate_stream(sys.stdin, layouts.to_dvorak)
diff --git a/pkg_resources/_vendor/jaraco/text/to-qwerty.py b/pkg_resources/_vendor/jaraco/text/to-qwerty.py
new file mode 100644
index 0000000000..abe2728662
--- /dev/null
+++ b/pkg_resources/_vendor/jaraco/text/to-qwerty.py
@@ -0,0 +1,6 @@
+import sys
+
+from . import layouts
+
+
+__name__ == '__main__' and layouts._translate_stream(sys.stdin, layouts.to_qwerty)
diff --git a/pkg_resources/_vendor/more_itertools-10.2.0.dist-info/RECORD b/pkg_resources/_vendor/more_itertools-10.2.0.dist-info/RECORD
deleted file mode 100644
index 2ce6e4a6f5..0000000000
--- a/pkg_resources/_vendor/more_itertools-10.2.0.dist-info/RECORD
+++ /dev/null
@@ -1,15 +0,0 @@
-more_itertools-10.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-more_itertools-10.2.0.dist-info/LICENSE,sha256=CfHIyelBrz5YTVlkHqm4fYPAyw_QB-te85Gn4mQ8GkY,1053
-more_itertools-10.2.0.dist-info/METADATA,sha256=lTIPxfD4IiP6aHzPjP4dXmzRRUmiXicAB6qnY82T-Gs,34886
-more_itertools-10.2.0.dist-info/RECORD,,
-more_itertools-10.2.0.dist-info/WHEEL,sha256=rSgq_JpHF9fHR1lx53qwg_1-2LypZE_qmcuXbVUq948,81
-more_itertools/__init__.py,sha256=VodgFyRJvpnHbAMgseYRiP7r928FFOAakmQrl6J88os,149
-more_itertools/__init__.pyi,sha256=5B3eTzON1BBuOLob1vCflyEb2lSd6usXQQ-Cv-hXkeA,43
-more_itertools/__pycache__/__init__.cpython-312.pyc,,
-more_itertools/__pycache__/more.cpython-312.pyc,,
-more_itertools/__pycache__/recipes.cpython-312.pyc,,
-more_itertools/more.py,sha256=jYdpbgXHf8yZDByPrhluxpe0D_IXRk2tfQnyfOFMi74,143045
-more_itertools/more.pyi,sha256=KTHYeqr0rFbn1GWRnv0jY64JRNnKKT0kA3kmsah8DYQ,21044
-more_itertools/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-more_itertools/recipes.py,sha256=Rb3OhzJTCn2biutDEUSImbuY-8NDS1lkHt0My-uCOf4,27548
-more_itertools/recipes.pyi,sha256=T1IuEVXCqw2NeJJNW036MtWi8BVfR8Ilpf7cBmvhBaQ,4436
diff --git a/pkg_resources/_vendor/platformdirs-2.6.2.dist-info/INSTALLER b/pkg_resources/_vendor/more_itertools-10.3.0.dist-info/INSTALLER
similarity index 100%
rename from pkg_resources/_vendor/platformdirs-2.6.2.dist-info/INSTALLER
rename to pkg_resources/_vendor/more_itertools-10.3.0.dist-info/INSTALLER
diff --git a/pkg_resources/_vendor/more_itertools-10.2.0.dist-info/LICENSE b/pkg_resources/_vendor/more_itertools-10.3.0.dist-info/LICENSE
similarity index 100%
rename from pkg_resources/_vendor/more_itertools-10.2.0.dist-info/LICENSE
rename to pkg_resources/_vendor/more_itertools-10.3.0.dist-info/LICENSE
diff --git a/pkg_resources/_vendor/more_itertools-10.2.0.dist-info/METADATA b/pkg_resources/_vendor/more_itertools-10.3.0.dist-info/METADATA
similarity index 95%
rename from pkg_resources/_vendor/more_itertools-10.2.0.dist-info/METADATA
rename to pkg_resources/_vendor/more_itertools-10.3.0.dist-info/METADATA
index f54f1ff279..fb41b0cfe6 100644
--- a/pkg_resources/_vendor/more_itertools-10.2.0.dist-info/METADATA
+++ b/pkg_resources/_vendor/more_itertools-10.3.0.dist-info/METADATA
@@ -1,6 +1,6 @@
 Metadata-Version: 2.1
 Name: more-itertools
-Version: 10.2.0
+Version: 10.3.0
 Summary: More routines for operating on iterables, beyond itertools
 Keywords: itertools,iterator,iteration,filter,peek,peekable,chunk,chunked
 Author-email: Erik Rose 
@@ -87,8 +87,6 @@ Python iterables.
 |                        | `zip_offset `_,                                                                         |
 |                        | `zip_equal `_,                                                                           |
 |                        | `zip_broadcast `_,                                                                   |
-|                        | `dotproduct `_,                                                                         |
-|                        | `convolve `_,                                                                             |
 |                        | `flatten `_,                                                                               |
 |                        | `roundrobin `_,                                                                         |
 |                        | `prepend `_,                                                                               |
@@ -101,6 +99,7 @@ Python iterables.
 |                        | `consecutive_groups `_,                                                         |
 |                        | `run_length `_,                                                                         |
 |                        | `map_reduce `_,                                                                         |
+|                        | `join_mappings `_,                                                                   |
 |                        | `exactly_n `_,                                                                           |
 |                        | `is_sorted `_,                                                                           |
 |                        | `all_equal `_,                                                                           |
@@ -131,12 +130,26 @@ Python iterables.
 |                        | `tail `_,                                                                                     |
 |                        | `unique_everseen `_,                                                               |
 |                        | `unique_justseen `_,                                                               |
+|                        | `unique `_,                                                                                 |
 |                        | `duplicates_everseen `_,                                                       |
 |                        | `duplicates_justseen `_,                                                       |
 |                        | `classify_unique `_,                                                               |
 |                        | `longest_common_prefix `_,                                                   |
 |                        | `takewhile_inclusive `_                                                        |
 +------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Math                   | `dft `_,                                                                                       |
+|                        | `idft `_,                                                                                     |
+|                        | `convolve `_,                                                                             |
+|                        | `dotproduct `_,                                                                         |
+|                        | `factor `_,                                                                                 |
+|                        | `matmul `_,                                                                                 |
+|                        | `polynomial_from_roots `_,                                                   |
+|                        | `polynomial_derivative `_,                                                   |
+|                        | `polynomial_eval `_,                                                               |
+|                        | `sieve `_,                                                                                   |
+|                        | `sum_of_squares `_,                                                                 |
+|                        | `totient `_                                                                                |
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
 | Combinatorics          | `distinct_permutations `_,                                                   |
 |                        | `distinct_combinations `_,                                                   |
 |                        | `circular_shifts `_,                                                               |
@@ -149,6 +162,7 @@ Python iterables.
 |                        | `gray_product  `_,                                                                    |
 |                        | `outer_product  `_,                                                                  |
 |                        | `powerset `_,                                                                             |
+|                        | `powerset_of_sets `_,                                                             |
 |                        | `random_product `_,                                                                 |
 |                        | `random_permutation `_,                                                         |
 |                        | `random_combination `_,                                                         |
@@ -180,15 +194,8 @@ Python iterables.
 |                        | `consume `_,                                                                               |
 |                        | `tabulate `_,                                                                             |
 |                        | `repeatfunc `_,                                                                         |
-|                        | `polynomial_from_roots `_,                                                   |
-|                        | `polynomial_eval `_,                                                               |
-|                        | `polynomial_derivative `_,                                                   |
-|                        | `sieve `_,                                                                                   |
-|                        | `factor `_,                                                                                 |
-|                        | `matmul `_,                                                                                 |
-|                        | `sum_of_squares `_,                                                                 |
-|                        | `totient `_,                                                                               |
 |                        | `reshape `_                                                                                |
+|                        | `doublestarmap `_                                                                    |
 +------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
 
 
diff --git a/pkg_resources/_vendor/more_itertools-10.3.0.dist-info/RECORD b/pkg_resources/_vendor/more_itertools-10.3.0.dist-info/RECORD
new file mode 100644
index 0000000000..53183bfb30
--- /dev/null
+++ b/pkg_resources/_vendor/more_itertools-10.3.0.dist-info/RECORD
@@ -0,0 +1,15 @@
+more_itertools-10.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+more_itertools-10.3.0.dist-info/LICENSE,sha256=CfHIyelBrz5YTVlkHqm4fYPAyw_QB-te85Gn4mQ8GkY,1053
+more_itertools-10.3.0.dist-info/METADATA,sha256=BFO90O-fLNiVQMpj7oIS5ztzgJUUQZ3TA32P5HH3N-A,36293
+more_itertools-10.3.0.dist-info/RECORD,,
+more_itertools-10.3.0.dist-info/WHEEL,sha256=rSgq_JpHF9fHR1lx53qwg_1-2LypZE_qmcuXbVUq948,81
+more_itertools/__init__.py,sha256=dtAbGjTDmn_ghiU5YXfhyDy0phAlXVdt5klZA5fUa-Q,149
+more_itertools/__init__.pyi,sha256=5B3eTzON1BBuOLob1vCflyEb2lSd6usXQQ-Cv-hXkeA,43
+more_itertools/__pycache__/__init__.cpython-312.pyc,,
+more_itertools/__pycache__/more.cpython-312.pyc,,
+more_itertools/__pycache__/recipes.cpython-312.pyc,,
+more_itertools/more.py,sha256=1E5kzFncRKTDw0cYv1yRXMgDdunstLQd1QStcnL6U90,148370
+more_itertools/more.pyi,sha256=iXXeqt48Nxe8VGmIWpkVXuKpR2FYNuu2DU8nQLWCCu0,21484
+more_itertools/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+more_itertools/recipes.py,sha256=WedhhfhGVgr6zii8fIbGJVmRTw0ZKRiLKnYBDGJv4nY,28591
+more_itertools/recipes.pyi,sha256=T_mdGpcFdfrP3JSWbwzYP9JyNV-Go-7RPfpxfftAWlA,4617
diff --git a/pkg_resources/_vendor/more_itertools-10.2.0.dist-info/WHEEL b/pkg_resources/_vendor/more_itertools-10.3.0.dist-info/WHEEL
similarity index 100%
rename from pkg_resources/_vendor/more_itertools-10.2.0.dist-info/WHEEL
rename to pkg_resources/_vendor/more_itertools-10.3.0.dist-info/WHEEL
diff --git a/pkg_resources/_vendor/more_itertools/__init__.py b/pkg_resources/_vendor/more_itertools/__init__.py
index aff94a9abd..9c4662fc31 100644
--- a/pkg_resources/_vendor/more_itertools/__init__.py
+++ b/pkg_resources/_vendor/more_itertools/__init__.py
@@ -3,4 +3,4 @@
 from .more import *  # noqa
 from .recipes import *  # noqa
 
-__version__ = '10.2.0'
+__version__ = '10.3.0'
diff --git a/pkg_resources/_vendor/more_itertools/more.py b/pkg_resources/_vendor/more_itertools/more.py
index d0957681f5..7b481907da 100755
--- a/pkg_resources/_vendor/more_itertools/more.py
+++ b/pkg_resources/_vendor/more_itertools/more.py
@@ -1,3 +1,4 @@
+import math
 import warnings
 
 from collections import Counter, defaultdict, deque, abc
@@ -6,6 +7,7 @@
 from heapq import heapify, heapreplace, heappop
 from itertools import (
     chain,
+    combinations,
     compress,
     count,
     cycle,
@@ -19,7 +21,7 @@
     zip_longest,
     product,
 )
-from math import exp, factorial, floor, log, perm, comb
+from math import comb, e, exp, factorial, floor, fsum, log, perm, tau
 from queue import Empty, Queue
 from random import random, randrange, uniform
 from operator import itemgetter, mul, sub, gt, lt, ge, le
@@ -61,11 +63,13 @@
     'consumer',
     'count_cycle',
     'countable',
+    'dft',
     'difference',
     'distinct_combinations',
     'distinct_permutations',
     'distribute',
     'divide',
+    'doublestarmap',
     'duplicates_everseen',
     'duplicates_justseen',
     'classify_unique',
@@ -77,6 +81,7 @@
     'groupby_transform',
     'ichunked',
     'iequals',
+    'idft',
     'ilen',
     'interleave',
     'interleave_evenly',
@@ -86,6 +91,7 @@
     'islice_extended',
     'iterate',
     'iter_suppress',
+    'join_mappings',
     'last',
     'locate',
     'longest_common_prefix',
@@ -109,6 +115,7 @@
     'partitions',
     'peekable',
     'permutation_index',
+    'powerset_of_sets',
     'product_index',
     'raise_',
     'repeat_each',
@@ -148,6 +155,9 @@
     'zip_offset',
 ]
 
+# math.sumprod is available for Python 3.12+
+_fsumprod = getattr(math, 'sumprod', lambda x, y: fsum(map(mul, x, y)))
+
 
 def chunked(iterable, n, strict=False):
     """Break *iterable* into lists of length *n*:
@@ -550,10 +560,10 @@ def one(iterable, too_short=None, too_long=None):
 
     try:
         first_value = next(it)
-    except StopIteration as e:
+    except StopIteration as exc:
         raise (
             too_short or ValueError('too few items in iterable (expected 1)')
-        ) from e
+        ) from exc
 
     try:
         second_value = next(it)
@@ -840,26 +850,31 @@ def windowed(seq, n, fillvalue=None, step=1):
     if n < 0:
         raise ValueError('n must be >= 0')
     if n == 0:
-        yield tuple()
+        yield ()
         return
     if step < 1:
         raise ValueError('step must be >= 1')
 
-    window = deque(maxlen=n)
-    i = n
-    for _ in map(window.append, seq):
-        i -= 1
-        if not i:
-            i = step
-            yield tuple(window)
-
-    size = len(window)
-    if size == 0:
+    iterable = iter(seq)
+
+    # Generate first window
+    window = deque(islice(iterable, n), maxlen=n)
+
+    # Deal with the first window not being full
+    if not window:
+        return
+    if len(window) < n:
+        yield tuple(window) + ((fillvalue,) * (n - len(window)))
         return
-    elif size < n:
-        yield tuple(chain(window, repeat(fillvalue, n - size)))
-    elif 0 < i < min(step, n):
-        window += (fillvalue,) * i
+    yield tuple(window)
+
+    # Create the filler for the next windows. The padding ensures
+    # we have just enough elements to fill the last window.
+    padding = (fillvalue,) * (n - 1 if step >= n else step - 1)
+    filler = map(window.append, chain(iterable, padding))
+
+    # Generate the rest of the windows
+    for _ in islice(filler, step - 1, None, step):
         yield tuple(window)
 
 
@@ -1151,8 +1166,8 @@ def interleave_evenly(iterables, lengths=None):
 
         # those iterables for which the error is negative are yielded
         # ("diagonal step" in Bresenham)
-        for i, e in enumerate(errors):
-            if e < 0:
+        for i, e_ in enumerate(errors):
+            if e_ < 0:
                 yield next(iters_secondary[i])
                 to_yield -= 1
                 errors[i] += delta_primary
@@ -1184,26 +1199,38 @@ def collapse(iterable, base_type=None, levels=None):
     ['a', ['b'], 'c', ['d']]
 
     """
+    stack = deque()
+    # Add our first node group, treat the iterable as a single node
+    stack.appendleft((0, repeat(iterable, 1)))
 
-    def walk(node, level):
-        if (
-            ((levels is not None) and (level > levels))
-            or isinstance(node, (str, bytes))
-            or ((base_type is not None) and isinstance(node, base_type))
-        ):
-            yield node
-            return
+    while stack:
+        node_group = stack.popleft()
+        level, nodes = node_group
 
-        try:
-            tree = iter(node)
-        except TypeError:
-            yield node
-            return
-        else:
-            for child in tree:
-                yield from walk(child, level + 1)
+        # Check if beyond max level
+        if levels is not None and level > levels:
+            yield from nodes
+            continue
 
-    yield from walk(iterable, 0)
+        for node in nodes:
+            # Check if done iterating
+            if isinstance(node, (str, bytes)) or (
+                (base_type is not None) and isinstance(node, base_type)
+            ):
+                yield node
+            # Otherwise try to create child nodes
+            else:
+                try:
+                    tree = iter(node)
+                except TypeError:
+                    yield node
+                else:
+                    # Save our current location
+                    stack.appendleft(node_group)
+                    # Append the new child node
+                    stack.appendleft((level + 1, tree))
+                    # Break to process child node
+                    break
 
 
 def side_effect(func, iterable, chunk_size=None, before=None, after=None):
@@ -1516,28 +1543,41 @@ def padded(iterable, fillvalue=None, n=None, next_multiple=False):
         [1, 2, 3, '?', '?']
 
     If *next_multiple* is ``True``, *fillvalue* will be emitted until the
-    number of items emitted is a multiple of *n*::
+    number of items emitted is a multiple of *n*:
 
         >>> list(padded([1, 2, 3, 4], n=3, next_multiple=True))
         [1, 2, 3, 4, None, None]
 
     If *n* is ``None``, *fillvalue* will be emitted indefinitely.
 
+    To create an *iterable* of exactly size *n*, you can truncate with
+    :func:`islice`.
+
+        >>> list(islice(padded([1, 2, 3], '?'), 5))
+        [1, 2, 3, '?', '?']
+        >>> list(islice(padded([1, 2, 3, 4, 5, 6, 7, 8], '?'), 5))
+        [1, 2, 3, 4, 5]
+
     """
-    it = iter(iterable)
+    iterable = iter(iterable)
+    iterable_with_repeat = chain(iterable, repeat(fillvalue))
+
     if n is None:
-        yield from chain(it, repeat(fillvalue))
+        return iterable_with_repeat
     elif n < 1:
         raise ValueError('n must be at least 1')
-    else:
-        item_count = 0
-        for item in it:
-            yield item
-            item_count += 1
+    elif next_multiple:
 
-        remaining = (n - item_count) % n if next_multiple else n - item_count
-        for _ in range(remaining):
-            yield fillvalue
+        def slice_generator():
+            for first in iterable:
+                yield (first,)
+                yield islice(iterable_with_repeat, n - 1)
+
+        # While elements exist produce slices of size n
+        return chain.from_iterable(slice_generator())
+    else:
+        # Ensure the first batch is at least size n then iterate
+        return chain(islice(iterable_with_repeat, n), iterable)
 
 
 def repeat_each(iterable, n=2):
@@ -1592,7 +1632,9 @@ def distribute(n, iterable):
         [[1], [2], [3], [], []]
 
     This function uses :func:`itertools.tee` and may require significant
-    storage. If you need the order items in the smaller iterables to match the
+    storage.
+
+    If you need the order items in the smaller iterables to match the
     original iterable, see :func:`divide`.
 
     """
@@ -1840,9 +1882,9 @@ def divide(n, iterable):
         >>> [list(c) for c in children]
         [[1], [2], [3], [], []]
 
-    This function will exhaust the iterable before returning and may require
-    significant storage. If order is not important, see :func:`distribute`,
-    which does not first pull the iterable into memory.
+    This function will exhaust the iterable before returning.
+    If order is not important, see :func:`distribute`, which does not first
+    pull the iterable into memory.
 
     """
     if n < 1:
@@ -3296,25 +3338,38 @@ def only(iterable, default=None, too_long=None):
     return first_value
 
 
-class _IChunk:
-    def __init__(self, iterable, n):
-        self._it = islice(iterable, n)
-        self._cache = deque()
+def _ichunk(iterable, n):
+    cache = deque()
+    chunk = islice(iterable, n)
+
+    def generator():
+        while True:
+            if cache:
+                yield cache.popleft()
+            else:
+                try:
+                    item = next(chunk)
+                except StopIteration:
+                    return
+                else:
+                    yield item
 
-    def fill_cache(self):
-        self._cache.extend(self._it)
+    def materialize_next(n=1):
+        # if n not specified materialize everything
+        if n is None:
+            cache.extend(chunk)
+            return len(cache)
 
-    def __iter__(self):
-        return self
+        to_cache = n - len(cache)
 
-    def __next__(self):
-        try:
-            return next(self._it)
-        except StopIteration:
-            if self._cache:
-                return self._cache.popleft()
-            else:
-                raise
+        # materialize up to n
+        if to_cache > 0:
+            cache.extend(islice(chunk, to_cache))
+
+        # return number materialized up to n
+        return min(n, len(cache))
+
+    return (generator(), materialize_next)
 
 
 def ichunked(iterable, n):
@@ -3338,19 +3393,19 @@ def ichunked(iterable, n):
     [8, 9, 10, 11]
 
     """
-    source = peekable(iter(iterable))
-    ichunk_marker = object()
+    iterable = iter(iterable)
     while True:
+        # Create new chunk
+        chunk, materialize_next = _ichunk(iterable, n)
+
         # Check to see whether we're at the end of the source iterable
-        item = source.peek(ichunk_marker)
-        if item is ichunk_marker:
+        if not materialize_next():
             return
 
-        chunk = _IChunk(source, n)
         yield chunk
 
-        # Advance the source iterable and fill previous chunk's cache
-        chunk.fill_cache()
+        # Fill previous chunk's cache
+        materialize_next(None)
 
 
 def iequals(*iterables):
@@ -3666,7 +3721,8 @@ def __init__(self, func, callback_kwd='callback', wait_seconds=0.1):
         self._wait_seconds = wait_seconds
         # Lazily import concurrent.future
         self._executor = __import__(
-        ).futures.__import__("concurrent.futures").futures.ThreadPoolExecutor(max_workers=1)
+            'concurrent.futures'
+        ).futures.ThreadPoolExecutor(max_workers=1)
         self._iterator = self._reader()
 
     def __enter__(self):
@@ -3863,6 +3919,7 @@ def nth_permutation(iterable, r, index):
         raise ValueError
     else:
         c = perm(n, r)
+    assert c > 0  # factortial(n)>0, and r>> list(value_chain('12', '34', ['56', '78']))
         ['12', '34', '56', '78']
 
+    Pre- or postpend a single element to an iterable:
+
+        >>> list(value_chain(1, [2, 3, 4, 5, 6]))
+        [1, 2, 3, 4, 5, 6]
+        >>> list(value_chain([1, 2, 3, 4, 5], 6))
+        [1, 2, 3, 4, 5, 6]
 
     Multiple levels of nesting are not flattened.
 
@@ -4153,53 +4213,41 @@ def chunked_even(iterable, n):
     [[1, 2, 3], [4, 5, 6], [7]]
 
     """
+    iterable = iter(iterable)
 
-    len_method = getattr(iterable, '__len__', None)
-
-    if len_method is None:
-        return _chunked_even_online(iterable, n)
-    else:
-        return _chunked_even_finite(iterable, len_method(), n)
-
-
-def _chunked_even_online(iterable, n):
-    buffer = []
-    maxbuf = n + (n - 2) * (n - 1)
-    for x in iterable:
-        buffer.append(x)
-        if len(buffer) == maxbuf:
-            yield buffer[:n]
-            buffer = buffer[n:]
-    yield from _chunked_even_finite(buffer, len(buffer), n)
+    # Initialize a buffer to process the chunks while keeping
+    # some back to fill any underfilled chunks
+    min_buffer = (n - 1) * (n - 2)
+    buffer = list(islice(iterable, min_buffer))
 
+    # Append items until we have a completed chunk
+    for _ in islice(map(buffer.append, iterable), n, None, n):
+        yield buffer[:n]
+        del buffer[:n]
 
-def _chunked_even_finite(iterable, N, n):
-    if N < 1:
+    # Check if any chunks need addition processing
+    if not buffer:
         return
+    length = len(buffer)
 
-    # Lists are either size `full_size <= n` or `partial_size = full_size - 1`
-    q, r = divmod(N, n)
+    # Chunks are either size `full_size <= n` or `partial_size = full_size - 1`
+    q, r = divmod(length, n)
     num_lists = q + (1 if r > 0 else 0)
-    q, r = divmod(N, num_lists)
+    q, r = divmod(length, num_lists)
     full_size = q + (1 if r > 0 else 0)
     partial_size = full_size - 1
-    num_full = N - partial_size * num_lists
-    num_partial = num_lists - num_full
+    num_full = length - partial_size * num_lists
 
-    # Yield num_full lists of full_size
+    # Yield chunks of full size
     partial_start_idx = num_full * full_size
     if full_size > 0:
         for i in range(0, partial_start_idx, full_size):
-            yield list(islice(iterable, i, i + full_size))
+            yield buffer[i : i + full_size]
 
-    # Yield num_partial lists of partial_size
+    # Yield chunks of partial size
     if partial_size > 0:
-        for i in range(
-            partial_start_idx,
-            partial_start_idx + (num_partial * partial_size),
-            partial_size,
-        ):
-            yield list(islice(iterable, i, i + partial_size))
+        for i in range(partial_start_idx, length, partial_size):
+            yield buffer[i : i + partial_size]
 
 
 def zip_broadcast(*objects, scalar_types=(str, bytes), strict=False):
@@ -4418,12 +4466,12 @@ def minmax(iterable_or_value, *others, key=None, default=_marker):
 
     try:
         lo = hi = next(it)
-    except StopIteration as e:
+    except StopIteration as exc:
         if default is _marker:
             raise ValueError(
                 '`minmax()` argument is an empty iterable. '
                 'Provide a `default` value to suppress this error.'
-            ) from e
+            ) from exc
         return default
 
     # Different branches depending on the presence of key. This saves a lot
@@ -4653,3 +4701,106 @@ def filter_map(func, iterable):
         y = func(x)
         if y is not None:
             yield y
+
+
+def powerset_of_sets(iterable):
+    """Yields all possible subsets of the iterable.
+
+        >>> list(powerset_of_sets([1, 2, 3]))  # doctest: +SKIP
+        [set(), {1}, {2}, {3}, {1, 2}, {1, 3}, {2, 3}, {1, 2, 3}]
+        >>> list(powerset_of_sets([1, 1, 0]))  # doctest: +SKIP
+        [set(), {1}, {0}, {0, 1}]
+
+    :func:`powerset_of_sets` takes care to minimize the number
+    of hash operations performed.
+    """
+    sets = tuple(map(set, dict.fromkeys(map(frozenset, zip(iterable)))))
+    for r in range(len(sets) + 1):
+        yield from starmap(set().union, combinations(sets, r))
+
+
+def join_mappings(**field_to_map):
+    """
+    Joins multiple mappings together using their common keys.
+
+    >>> user_scores = {'elliot': 50, 'claris': 60}
+    >>> user_times = {'elliot': 30, 'claris': 40}
+    >>> join_mappings(score=user_scores, time=user_times)
+    {'elliot': {'score': 50, 'time': 30}, 'claris': {'score': 60, 'time': 40}}
+    """
+    ret = defaultdict(dict)
+
+    for field_name, mapping in field_to_map.items():
+        for key, value in mapping.items():
+            ret[key][field_name] = value
+
+    return dict(ret)
+
+
+def _complex_sumprod(v1, v2):
+    """High precision sumprod() for complex numbers.
+    Used by :func:`dft` and :func:`idft`.
+    """
+
+    r1 = chain((p.real for p in v1), (-p.imag for p in v1))
+    r2 = chain((q.real for q in v2), (q.imag for q in v2))
+    i1 = chain((p.real for p in v1), (p.imag for p in v1))
+    i2 = chain((q.imag for q in v2), (q.real for q in v2))
+    return complex(_fsumprod(r1, r2), _fsumprod(i1, i2))
+
+
+def dft(xarr):
+    """Discrete Fourier Tranform. *xarr* is a sequence of complex numbers.
+    Yields the components of the corresponding transformed output vector.
+
+    >>> import cmath
+    >>> xarr = [1, 2-1j, -1j, -1+2j]
+    >>> Xarr = [2, -2-2j, -2j, 4+4j]
+    >>> all(map(cmath.isclose, dft(xarr), Xarr))
+    True
+
+    See :func:`idft` for the inverse Discrete Fourier Transform.
+    """
+    N = len(xarr)
+    roots_of_unity = [e ** (n / N * tau * -1j) for n in range(N)]
+    for k in range(N):
+        coeffs = [roots_of_unity[k * n % N] for n in range(N)]
+        yield _complex_sumprod(xarr, coeffs)
+
+
+def idft(Xarr):
+    """Inverse Discrete Fourier Tranform. *Xarr* is a sequence of
+    complex numbers. Yields the components of the corresponding
+    inverse-transformed output vector.
+
+    >>> import cmath
+    >>> xarr = [1, 2-1j, -1j, -1+2j]
+    >>> Xarr = [2, -2-2j, -2j, 4+4j]
+    >>> all(map(cmath.isclose, idft(Xarr), xarr))
+    True
+
+    See :func:`dft` for the Discrete Fourier Transform.
+    """
+    N = len(Xarr)
+    roots_of_unity = [e ** (n / N * tau * 1j) for n in range(N)]
+    for k in range(N):
+        coeffs = [roots_of_unity[k * n % N] for n in range(N)]
+        yield _complex_sumprod(Xarr, coeffs) / N
+
+
+def doublestarmap(func, iterable):
+    """Apply *func* to every item of *iterable* by dictionary unpacking
+    the item into *func*.
+
+    The difference between :func:`itertools.starmap` and :func:`doublestarmap`
+    parallels the distinction between ``func(*a)`` and ``func(**a)``.
+
+    >>> iterable = [{'a': 1, 'b': 2}, {'a': 40, 'b': 60}]
+    >>> list(doublestarmap(lambda a, b: a + b, iterable))
+    [3, 100]
+
+    ``TypeError`` will be raised if *func*'s signature doesn't match the
+    mapping contained in *iterable* or if *iterable* does not contain mappings.
+    """
+    for item in iterable:
+        yield func(**item)
diff --git a/pkg_resources/_vendor/more_itertools/more.pyi b/pkg_resources/_vendor/more_itertools/more.pyi
index 9a5fc911a3..e946023259 100644
--- a/pkg_resources/_vendor/more_itertools/more.pyi
+++ b/pkg_resources/_vendor/more_itertools/more.pyi
@@ -1,4 +1,5 @@
 """Stubs for more_itertools.more"""
+
 from __future__ import annotations
 
 from types import TracebackType
@@ -9,8 +10,10 @@ from typing import (
     ContextManager,
     Generic,
     Hashable,
+    Mapping,
     Iterable,
     Iterator,
+    Mapping,
     overload,
     Reversible,
     Sequence,
@@ -602,6 +605,7 @@ class countable(Generic[_T], Iterator[_T]):
     def __init__(self, iterable: Iterable[_T]) -> None: ...
     def __iter__(self) -> countable[_T]: ...
     def __next__(self) -> _T: ...
+    items_seen: int
 
 def chunked_even(iterable: Iterable[_T], n: int) -> Iterator[list[_T]]: ...
 def zip_broadcast(
@@ -693,3 +697,13 @@ def filter_map(
     func: Callable[[_T], _V | None],
     iterable: Iterable[_T],
 ) -> Iterator[_V]: ...
+def powerset_of_sets(iterable: Iterable[_T]) -> Iterator[set[_T]]: ...
+def join_mappings(
+    **field_to_map: Mapping[_T, _V]
+) -> dict[_T, dict[str, _V]]: ...
+def doublestarmap(
+    func: Callable[..., _T],
+    iterable: Iterable[Mapping[str, Any]],
+) -> Iterator[_T]: ...
+def dft(xarr: Sequence[complex]) -> Iterator[complex]: ...
+def idft(Xarr: Sequence[complex]) -> Iterator[complex]: ...
diff --git a/pkg_resources/_vendor/more_itertools/recipes.py b/pkg_resources/_vendor/more_itertools/recipes.py
index 145e3cb5bd..b32fa95533 100644
--- a/pkg_resources/_vendor/more_itertools/recipes.py
+++ b/pkg_resources/_vendor/more_itertools/recipes.py
@@ -7,6 +7,7 @@
 .. [1] http://docs.python.org/library/itertools.html#recipes
 
 """
+
 import math
 import operator
 
@@ -74,6 +75,7 @@
     'totient',
     'transpose',
     'triplewise',
+    'unique',
     'unique_everseen',
     'unique_justseen',
 ]
@@ -198,7 +200,7 @@ def nth(iterable, n, default=None):
     return next(islice(iterable, n, None), default)
 
 
-def all_equal(iterable):
+def all_equal(iterable, key=None):
     """
     Returns ``True`` if all the elements are equal to each other.
 
@@ -207,9 +209,16 @@ def all_equal(iterable):
         >>> all_equal('aaab')
         False
 
+    A function that accepts a single argument and returns a transformed version
+    of each input item can be specified with *key*:
+
+        >>> all_equal('AaaA', key=str.casefold)
+        True
+        >>> all_equal([1, 2, 3], key=lambda x: x < 10)
+        True
+
     """
-    g = groupby(iterable)
-    return next(g, True) and not next(g, False)
+    return len(list(islice(groupby(iterable, key), 2))) <= 1
 
 
 def quantify(iterable, pred=bool):
@@ -410,16 +419,11 @@ def roundrobin(*iterables):
     iterables is small).
 
     """
-    # Recipe credited to George Sakkis
-    pending = len(iterables)
-    nexts = cycle(iter(it).__next__ for it in iterables)
-    while pending:
-        try:
-            for next in nexts:
-                yield next()
-        except StopIteration:
-            pending -= 1
-            nexts = cycle(islice(nexts, pending))
+    # Algorithm credited to George Sakkis
+    iterators = map(iter, iterables)
+    for num_active in range(len(iterables), 0, -1):
+        iterators = cycle(islice(iterators, num_active))
+        yield from map(next, iterators)
 
 
 def partition(pred, iterable):
@@ -458,16 +462,14 @@ def powerset(iterable):
 
     :func:`powerset` will operate on iterables that aren't :class:`set`
     instances, so repeated elements in the input will produce repeated elements
-    in the output. Use :func:`unique_everseen` on the input to avoid generating
-    duplicates:
+    in the output.
 
         >>> seq = [1, 1, 0]
         >>> list(powerset(seq))
         [(), (1,), (1,), (0,), (1, 1), (1, 0), (1, 0), (1, 1, 0)]
-        >>> from more_itertools import unique_everseen
-        >>> list(powerset(unique_everseen(seq)))
-        [(), (1,), (0,), (1, 0)]
 
+    For a variant that efficiently yields actual :class:`set` instances, see
+    :func:`powerset_of_sets`.
     """
     s = list(iterable)
     return chain.from_iterable(combinations(s, r) for r in range(len(s) + 1))
@@ -533,6 +535,25 @@ def unique_justseen(iterable, key=None):
     return map(next, map(operator.itemgetter(1), groupby(iterable, key)))
 
 
+def unique(iterable, key=None, reverse=False):
+    """Yields unique elements in sorted order.
+
+    >>> list(unique([[1, 2], [3, 4], [1, 2]]))
+    [[1, 2], [3, 4]]
+
+    *key* and *reverse* are passed to :func:`sorted`.
+
+    >>> list(unique('ABBcCAD', str.casefold))
+    ['A', 'B', 'c', 'D']
+    >>> list(unique('ABBcCAD', str.casefold, reverse=True))
+    ['D', 'c', 'B', 'A']
+
+    The elements in *iterable* need not be hashable, but they must be
+    comparable for sorting to work.
+    """
+    return unique_justseen(sorted(iterable, key=key, reverse=reverse), key=key)
+
+
 def iter_except(func, exception, first=None):
     """Yields results from a function repeatedly until an exception is raised.
 
@@ -827,8 +848,6 @@ def iter_index(iterable, value, start=0, stop=None):
     """Yield the index of each place in *iterable* that *value* occurs,
     beginning with index *start* and ending before index *stop*.
 
-    See :func:`locate` for a more general means of finding the indexes
-    associated with particular values.
 
     >>> list(iter_index('AABCADEAF', 'A'))
     [0, 1, 4, 7]
@@ -836,6 +855,19 @@ def iter_index(iterable, value, start=0, stop=None):
     [1, 4, 7]
     >>> list(iter_index('AABCADEAF', 'A', 1, 7))  # stop index is not inclusive
     [1, 4]
+
+    The behavior for non-scalar *values* matches the built-in Python types.
+
+    >>> list(iter_index('ABCDABCD', 'AB'))
+    [0, 4]
+    >>> list(iter_index([0, 1, 2, 3, 0, 1, 2, 3], [0, 1]))
+    []
+    >>> list(iter_index([[0, 1], [2, 3], [0, 1], [2, 3]], [0, 1]))
+    [0, 2]
+
+    See :func:`locate` for a more general means of finding the indexes
+    associated with particular values.
+
     """
     seq_index = getattr(iterable, 'index', None)
     if seq_index is None:
@@ -1006,7 +1038,9 @@ def totient(n):
     >>> totient(12)
     4
     """
-    for p in unique_justseen(factor(n)):
+    # The itertools docs use unique_justseen instead of set; see
+    # https://github.com/more-itertools/more-itertools/issues/823
+    for p in set(factor(n)):
         n = n // p * (p - 1)
 
     return n
diff --git a/pkg_resources/_vendor/more_itertools/recipes.pyi b/pkg_resources/_vendor/more_itertools/recipes.pyi
index ed4c19db49..739acec05f 100644
--- a/pkg_resources/_vendor/more_itertools/recipes.pyi
+++ b/pkg_resources/_vendor/more_itertools/recipes.pyi
@@ -1,4 +1,5 @@
 """Stubs for more_itertools.recipes"""
+
 from __future__ import annotations
 
 from typing import (
@@ -28,7 +29,9 @@ def consume(iterator: Iterable[_T], n: int | None = ...) -> None: ...
 def nth(iterable: Iterable[_T], n: int) -> _T | None: ...
 @overload
 def nth(iterable: Iterable[_T], n: int, default: _U) -> _T | _U: ...
-def all_equal(iterable: Iterable[_T]) -> bool: ...
+def all_equal(
+    iterable: Iterable[_T], key: Callable[[_T], _U] | None = ...
+) -> bool: ...
 def quantify(
     iterable: Iterable[_T], pred: Callable[[_T], bool] = ...
 ) -> int: ...
@@ -58,6 +61,11 @@ def unique_everseen(
 def unique_justseen(
     iterable: Iterable[_T], key: Callable[[_T], object] | None = ...
 ) -> Iterator[_T]: ...
+def unique(
+    iterable: Iterable[_T],
+    key: Callable[[_T], object] | None = ...,
+    reverse: bool = False,
+) -> Iterator[_T]: ...
 @overload
 def iter_except(
     func: Callable[[], _T],
diff --git a/pkg_resources/_vendor/packaging-24.0.dist-info/RECORD b/pkg_resources/_vendor/packaging-24.0.dist-info/RECORD
deleted file mode 100644
index bcf796c2f4..0000000000
--- a/pkg_resources/_vendor/packaging-24.0.dist-info/RECORD
+++ /dev/null
@@ -1,37 +0,0 @@
-packaging-24.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-packaging-24.0.dist-info/LICENSE,sha256=ytHvW9NA1z4HS6YU0m996spceUDD2MNIUuZcSQlobEg,197
-packaging-24.0.dist-info/LICENSE.APACHE,sha256=DVQuDIgE45qn836wDaWnYhSdxoLXgpRRKH4RuTjpRZQ,10174
-packaging-24.0.dist-info/LICENSE.BSD,sha256=tw5-m3QvHMb5SLNMFqo5_-zpQZY2S8iP8NIYDwAo-sU,1344
-packaging-24.0.dist-info/METADATA,sha256=0dESdhY_wHValuOrbgdebiEw04EbX4dkujlxPdEsFus,3203
-packaging-24.0.dist-info/RECORD,,
-packaging-24.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-packaging-24.0.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
-packaging/__init__.py,sha256=UzotcV07p8vcJzd80S-W0srhgY8NMVD_XvJcZ7JN-tA,496
-packaging/__pycache__/__init__.cpython-312.pyc,,
-packaging/__pycache__/_elffile.cpython-312.pyc,,
-packaging/__pycache__/_manylinux.cpython-312.pyc,,
-packaging/__pycache__/_musllinux.cpython-312.pyc,,
-packaging/__pycache__/_parser.cpython-312.pyc,,
-packaging/__pycache__/_structures.cpython-312.pyc,,
-packaging/__pycache__/_tokenizer.cpython-312.pyc,,
-packaging/__pycache__/markers.cpython-312.pyc,,
-packaging/__pycache__/metadata.cpython-312.pyc,,
-packaging/__pycache__/requirements.cpython-312.pyc,,
-packaging/__pycache__/specifiers.cpython-312.pyc,,
-packaging/__pycache__/tags.cpython-312.pyc,,
-packaging/__pycache__/utils.cpython-312.pyc,,
-packaging/__pycache__/version.cpython-312.pyc,,
-packaging/_elffile.py,sha256=hbmK8OD6Z7fY6hwinHEUcD1by7czkGiNYu7ShnFEk2k,3266
-packaging/_manylinux.py,sha256=1ng_TqyH49hY6s3W_zVHyoJIaogbJqbIF1jJ0fAehc4,9590
-packaging/_musllinux.py,sha256=kgmBGLFybpy8609-KTvzmt2zChCPWYvhp5BWP4JX7dE,2676
-packaging/_parser.py,sha256=zlsFB1FpMRjkUdQb6WLq7xON52ruQadxFpYsDXWhLb4,10347
-packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431
-packaging/_tokenizer.py,sha256=alCtbwXhOFAmFGZ6BQ-wCTSFoRAJ2z-ysIf7__MTJ_k,5292
-packaging/markers.py,sha256=eH-txS2zq1HdNpTd9LcZUcVIwewAiNU0grmq5wjKnOk,8208
-packaging/metadata.py,sha256=w7jPEg6mDf1FTZMn79aFxFuk4SKtynUJtxr2InTxlV4,33036
-packaging/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-packaging/requirements.py,sha256=dgoBeVprPu2YE6Q8nGfwOPTjATHbRa_ZGLyXhFEln6Q,2933
-packaging/specifiers.py,sha256=dB2DwbmvSbEuVilEyiIQ382YfW5JfwzXTfRRPVtaENY,39784
-packaging/tags.py,sha256=fedHXiOHkBxNZTXotXv8uXPmMFU9ae-TKBujgYHigcA,18950
-packaging/utils.py,sha256=XgdmP3yx9-wQEFjO7OvMj9RjEf5JlR5HFFR69v7SQ9E,5268
-packaging/version.py,sha256=XjRBLNK17UMDgLeP8UHnqwiY3TdSi03xFQURtec211A,16236
diff --git a/pkg_resources/_vendor/zipp-3.7.0.dist-info/INSTALLER b/pkg_resources/_vendor/packaging-24.1.dist-info/INSTALLER
similarity index 100%
rename from pkg_resources/_vendor/zipp-3.7.0.dist-info/INSTALLER
rename to pkg_resources/_vendor/packaging-24.1.dist-info/INSTALLER
diff --git a/pkg_resources/_vendor/packaging-24.0.dist-info/LICENSE b/pkg_resources/_vendor/packaging-24.1.dist-info/LICENSE
similarity index 100%
rename from pkg_resources/_vendor/packaging-24.0.dist-info/LICENSE
rename to pkg_resources/_vendor/packaging-24.1.dist-info/LICENSE
diff --git a/pkg_resources/_vendor/packaging-24.0.dist-info/LICENSE.APACHE b/pkg_resources/_vendor/packaging-24.1.dist-info/LICENSE.APACHE
similarity index 100%
rename from pkg_resources/_vendor/packaging-24.0.dist-info/LICENSE.APACHE
rename to pkg_resources/_vendor/packaging-24.1.dist-info/LICENSE.APACHE
diff --git a/pkg_resources/_vendor/packaging-24.0.dist-info/LICENSE.BSD b/pkg_resources/_vendor/packaging-24.1.dist-info/LICENSE.BSD
similarity index 100%
rename from pkg_resources/_vendor/packaging-24.0.dist-info/LICENSE.BSD
rename to pkg_resources/_vendor/packaging-24.1.dist-info/LICENSE.BSD
diff --git a/pkg_resources/_vendor/packaging-24.0.dist-info/METADATA b/pkg_resources/_vendor/packaging-24.1.dist-info/METADATA
similarity index 97%
rename from pkg_resources/_vendor/packaging-24.0.dist-info/METADATA
rename to pkg_resources/_vendor/packaging-24.1.dist-info/METADATA
index 10ab4390a9..255dc46e0e 100644
--- a/pkg_resources/_vendor/packaging-24.0.dist-info/METADATA
+++ b/pkg_resources/_vendor/packaging-24.1.dist-info/METADATA
@@ -1,9 +1,9 @@
 Metadata-Version: 2.1
 Name: packaging
-Version: 24.0
+Version: 24.1
 Summary: Core utilities for Python packages
 Author-email: Donald Stufft 
-Requires-Python: >=3.7
+Requires-Python: >=3.8
 Description-Content-Type: text/x-rst
 Classifier: Development Status :: 5 - Production/Stable
 Classifier: Intended Audience :: Developers
@@ -12,12 +12,12 @@ Classifier: License :: OSI Approved :: BSD License
 Classifier: Programming Language :: Python
 Classifier: Programming Language :: Python :: 3
 Classifier: Programming Language :: Python :: 3 :: Only
-Classifier: Programming Language :: Python :: 3.7
 Classifier: Programming Language :: Python :: 3.8
 Classifier: Programming Language :: Python :: 3.9
 Classifier: Programming Language :: Python :: 3.10
 Classifier: Programming Language :: Python :: 3.11
 Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: 3.13
 Classifier: Programming Language :: Python :: Implementation :: CPython
 Classifier: Programming Language :: Python :: Implementation :: PyPy
 Classifier: Typing :: Typed
diff --git a/pkg_resources/_vendor/packaging-24.1.dist-info/RECORD b/pkg_resources/_vendor/packaging-24.1.dist-info/RECORD
new file mode 100644
index 0000000000..2b1e6bd4db
--- /dev/null
+++ b/pkg_resources/_vendor/packaging-24.1.dist-info/RECORD
@@ -0,0 +1,37 @@
+packaging-24.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+packaging-24.1.dist-info/LICENSE,sha256=ytHvW9NA1z4HS6YU0m996spceUDD2MNIUuZcSQlobEg,197
+packaging-24.1.dist-info/LICENSE.APACHE,sha256=DVQuDIgE45qn836wDaWnYhSdxoLXgpRRKH4RuTjpRZQ,10174
+packaging-24.1.dist-info/LICENSE.BSD,sha256=tw5-m3QvHMb5SLNMFqo5_-zpQZY2S8iP8NIYDwAo-sU,1344
+packaging-24.1.dist-info/METADATA,sha256=X3ooO3WnCfzNSBrqQjefCD1POAF1M2WSLmsHMgQlFdk,3204
+packaging-24.1.dist-info/RECORD,,
+packaging-24.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+packaging-24.1.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
+packaging/__init__.py,sha256=dtw2bNmWCQ9WnMoK3bk_elL1svSlikXtLpZhCFIB9SE,496
+packaging/__pycache__/__init__.cpython-312.pyc,,
+packaging/__pycache__/_elffile.cpython-312.pyc,,
+packaging/__pycache__/_manylinux.cpython-312.pyc,,
+packaging/__pycache__/_musllinux.cpython-312.pyc,,
+packaging/__pycache__/_parser.cpython-312.pyc,,
+packaging/__pycache__/_structures.cpython-312.pyc,,
+packaging/__pycache__/_tokenizer.cpython-312.pyc,,
+packaging/__pycache__/markers.cpython-312.pyc,,
+packaging/__pycache__/metadata.cpython-312.pyc,,
+packaging/__pycache__/requirements.cpython-312.pyc,,
+packaging/__pycache__/specifiers.cpython-312.pyc,,
+packaging/__pycache__/tags.cpython-312.pyc,,
+packaging/__pycache__/utils.cpython-312.pyc,,
+packaging/__pycache__/version.cpython-312.pyc,,
+packaging/_elffile.py,sha256=_LcJW4YNKywYsl4169B2ukKRqwxjxst_8H0FRVQKlz8,3282
+packaging/_manylinux.py,sha256=Xo4V0PZz8sbuVCbTni0t1CR0AHeir_7ib4lTmV8scD4,9586
+packaging/_musllinux.py,sha256=p9ZqNYiOItGee8KcZFeHF_YcdhVwGHdK6r-8lgixvGQ,2694
+packaging/_parser.py,sha256=s_TvTvDNK0NrM2QB3VKThdWFM4Nc0P6JnkObkl3MjpM,10236
+packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431
+packaging/_tokenizer.py,sha256=J6v5H7Jzvb-g81xp_2QACKwO7LxHQA6ikryMU7zXwN8,5273
+packaging/markers.py,sha256=dWKSqn5Sp-jDmOG-W3GfLHKjwhf1IsznbT71VlBoB5M,10671
+packaging/metadata.py,sha256=KINuSkJ12u-SyoKNTy_pHNGAfMUtxNvZ53qA1zAKcKI,32349
+packaging/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+packaging/requirements.py,sha256=gYyRSAdbrIyKDY66ugIDUQjRMvxkH2ALioTmX3tnL6o,2947
+packaging/specifiers.py,sha256=rjpc3hoJuunRIT6DdH7gLTnQ5j5QKSuWjoTC5sdHtHI,39714
+packaging/tags.py,sha256=y8EbheOu9WS7s-MebaXMcHMF-jzsA_C1Lz5XRTiSy4w,18883
+packaging/utils.py,sha256=NAdYUwnlAOpkat_RthavX8a07YuVxgGL_vwrx73GSDM,5287
+packaging/version.py,sha256=V0H3SOj_8werrvorrb6QDLRhlcqSErNTTkCvvfszhDI,16198
diff --git a/pkg_resources/_vendor/zipp-3.7.0.dist-info/REQUESTED b/pkg_resources/_vendor/packaging-24.1.dist-info/REQUESTED
similarity index 100%
rename from pkg_resources/_vendor/zipp-3.7.0.dist-info/REQUESTED
rename to pkg_resources/_vendor/packaging-24.1.dist-info/REQUESTED
diff --git a/pkg_resources/_vendor/packaging-24.0.dist-info/WHEEL b/pkg_resources/_vendor/packaging-24.1.dist-info/WHEEL
similarity index 100%
rename from pkg_resources/_vendor/packaging-24.0.dist-info/WHEEL
rename to pkg_resources/_vendor/packaging-24.1.dist-info/WHEEL
diff --git a/pkg_resources/_vendor/packaging/__init__.py b/pkg_resources/_vendor/packaging/__init__.py
index e7c0aa12ca..9ba41d8357 100644
--- a/pkg_resources/_vendor/packaging/__init__.py
+++ b/pkg_resources/_vendor/packaging/__init__.py
@@ -6,7 +6,7 @@
 __summary__ = "Core utilities for Python packages"
 __uri__ = "https://github.com/pypa/packaging"
 
-__version__ = "24.0"
+__version__ = "24.1"
 
 __author__ = "Donald Stufft and individual contributors"
 __email__ = "donald@stufft.io"
diff --git a/pkg_resources/_vendor/packaging/_elffile.py b/pkg_resources/_vendor/packaging/_elffile.py
index 6fb19b30bb..f7a02180bf 100644
--- a/pkg_resources/_vendor/packaging/_elffile.py
+++ b/pkg_resources/_vendor/packaging/_elffile.py
@@ -8,10 +8,12 @@
 ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html
 """
 
+from __future__ import annotations
+
 import enum
 import os
 import struct
-from typing import IO, Optional, Tuple
+from typing import IO
 
 
 class ELFInvalid(ValueError):
@@ -87,11 +89,11 @@ def __init__(self, f: IO[bytes]) -> None:
         except struct.error as e:
             raise ELFInvalid("unable to parse machine and section information") from e
 
-    def _read(self, fmt: str) -> Tuple[int, ...]:
+    def _read(self, fmt: str) -> tuple[int, ...]:
         return struct.unpack(fmt, self._f.read(struct.calcsize(fmt)))
 
     @property
-    def interpreter(self) -> Optional[str]:
+    def interpreter(self) -> str | None:
         """
         The path recorded in the ``PT_INTERP`` section header.
         """
diff --git a/pkg_resources/_vendor/packaging/_manylinux.py b/pkg_resources/_vendor/packaging/_manylinux.py
index ad62505f3f..08f651fbd8 100644
--- a/pkg_resources/_vendor/packaging/_manylinux.py
+++ b/pkg_resources/_vendor/packaging/_manylinux.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import collections
 import contextlib
 import functools
@@ -5,7 +7,7 @@
 import re
 import sys
 import warnings
-from typing import Dict, Generator, Iterator, NamedTuple, Optional, Sequence, Tuple
+from typing import Generator, Iterator, NamedTuple, Sequence
 
 from ._elffile import EIClass, EIData, ELFFile, EMachine
 
@@ -17,7 +19,7 @@
 # `os.PathLike` not a generic type until Python 3.9, so sticking with `str`
 # as the type for `path` until then.
 @contextlib.contextmanager
-def _parse_elf(path: str) -> Generator[Optional[ELFFile], None, None]:
+def _parse_elf(path: str) -> Generator[ELFFile | None, None, None]:
     try:
         with open(path, "rb") as f:
             yield ELFFile(f)
@@ -72,7 +74,7 @@ def _have_compatible_abi(executable: str, archs: Sequence[str]) -> bool:
 # For now, guess what the highest minor version might be, assume it will
 # be 50 for testing. Once this actually happens, update the dictionary
 # with the actual value.
-_LAST_GLIBC_MINOR: Dict[int, int] = collections.defaultdict(lambda: 50)
+_LAST_GLIBC_MINOR: dict[int, int] = collections.defaultdict(lambda: 50)
 
 
 class _GLibCVersion(NamedTuple):
@@ -80,7 +82,7 @@ class _GLibCVersion(NamedTuple):
     minor: int
 
 
-def _glibc_version_string_confstr() -> Optional[str]:
+def _glibc_version_string_confstr() -> str | None:
     """
     Primary implementation of glibc_version_string using os.confstr.
     """
@@ -90,7 +92,7 @@ def _glibc_version_string_confstr() -> Optional[str]:
     # https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183
     try:
         # Should be a string like "glibc 2.17".
-        version_string: Optional[str] = os.confstr("CS_GNU_LIBC_VERSION")
+        version_string: str | None = os.confstr("CS_GNU_LIBC_VERSION")
         assert version_string is not None
         _, version = version_string.rsplit()
     except (AssertionError, AttributeError, OSError, ValueError):
@@ -99,7 +101,7 @@ def _glibc_version_string_confstr() -> Optional[str]:
     return version
 
 
-def _glibc_version_string_ctypes() -> Optional[str]:
+def _glibc_version_string_ctypes() -> str | None:
     """
     Fallback implementation of glibc_version_string using ctypes.
     """
@@ -143,12 +145,12 @@ def _glibc_version_string_ctypes() -> Optional[str]:
     return version_str
 
 
-def _glibc_version_string() -> Optional[str]:
+def _glibc_version_string() -> str | None:
     """Returns glibc version string, or None if not using glibc."""
     return _glibc_version_string_confstr() or _glibc_version_string_ctypes()
 
 
-def _parse_glibc_version(version_str: str) -> Tuple[int, int]:
+def _parse_glibc_version(version_str: str) -> tuple[int, int]:
     """Parse glibc version.
 
     We use a regexp instead of str.split because we want to discard any
@@ -167,8 +169,8 @@ def _parse_glibc_version(version_str: str) -> Tuple[int, int]:
     return int(m.group("major")), int(m.group("minor"))
 
 
-@functools.lru_cache()
-def _get_glibc_version() -> Tuple[int, int]:
+@functools.lru_cache
+def _get_glibc_version() -> tuple[int, int]:
     version_str = _glibc_version_string()
     if version_str is None:
         return (-1, -1)
diff --git a/pkg_resources/_vendor/packaging/_musllinux.py b/pkg_resources/_vendor/packaging/_musllinux.py
index 86419df9d7..d2bf30b563 100644
--- a/pkg_resources/_vendor/packaging/_musllinux.py
+++ b/pkg_resources/_vendor/packaging/_musllinux.py
@@ -4,11 +4,13 @@
 linked against musl, and what musl version is used.
 """
 
+from __future__ import annotations
+
 import functools
 import re
 import subprocess
 import sys
-from typing import Iterator, NamedTuple, Optional, Sequence
+from typing import Iterator, NamedTuple, Sequence
 
 from ._elffile import ELFFile
 
@@ -18,7 +20,7 @@ class _MuslVersion(NamedTuple):
     minor: int
 
 
-def _parse_musl_version(output: str) -> Optional[_MuslVersion]:
+def _parse_musl_version(output: str) -> _MuslVersion | None:
     lines = [n for n in (n.strip() for n in output.splitlines()) if n]
     if len(lines) < 2 or lines[0][:4] != "musl":
         return None
@@ -28,8 +30,8 @@ def _parse_musl_version(output: str) -> Optional[_MuslVersion]:
     return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2)))
 
 
-@functools.lru_cache()
-def _get_musl_version(executable: str) -> Optional[_MuslVersion]:
+@functools.lru_cache
+def _get_musl_version(executable: str) -> _MuslVersion | None:
     """Detect currently-running musl runtime version.
 
     This is done by checking the specified executable's dynamic linking
diff --git a/pkg_resources/_vendor/packaging/_parser.py b/pkg_resources/_vendor/packaging/_parser.py
index 684df75457..c1238c06ea 100644
--- a/pkg_resources/_vendor/packaging/_parser.py
+++ b/pkg_resources/_vendor/packaging/_parser.py
@@ -1,11 +1,13 @@
 """Handwritten parser of dependency specifiers.
 
-The docstring for each __parse_* function contains ENBF-inspired grammar representing
+The docstring for each __parse_* function contains EBNF-inspired grammar representing
 the implementation.
 """
 
+from __future__ import annotations
+
 import ast
-from typing import Any, List, NamedTuple, Optional, Tuple, Union
+from typing import NamedTuple, Sequence, Tuple, Union
 
 from ._tokenizer import DEFAULT_RULES, Tokenizer
 
@@ -41,20 +43,16 @@ def serialize(self) -> str:
 
 MarkerVar = Union[Variable, Value]
 MarkerItem = Tuple[MarkerVar, Op, MarkerVar]
-# MarkerAtom = Union[MarkerItem, List["MarkerAtom"]]
-# MarkerList = List[Union["MarkerList", MarkerAtom, str]]
-# mypy does not support recursive type definition
-# https://github.com/python/mypy/issues/731
-MarkerAtom = Any
-MarkerList = List[Any]
+MarkerAtom = Union[MarkerItem, Sequence["MarkerAtom"]]
+MarkerList = Sequence[Union["MarkerList", MarkerAtom, str]]
 
 
 class ParsedRequirement(NamedTuple):
     name: str
     url: str
-    extras: List[str]
+    extras: list[str]
     specifier: str
-    marker: Optional[MarkerList]
+    marker: MarkerList | None
 
 
 # --------------------------------------------------------------------------------------
@@ -87,7 +85,7 @@ def _parse_requirement(tokenizer: Tokenizer) -> ParsedRequirement:
 
 def _parse_requirement_details(
     tokenizer: Tokenizer,
-) -> Tuple[str, str, Optional[MarkerList]]:
+) -> tuple[str, str, MarkerList | None]:
     """
     requirement_details = AT URL (WS requirement_marker?)?
                         | specifier WS? (requirement_marker)?
@@ -156,7 +154,7 @@ def _parse_requirement_marker(
     return marker
 
 
-def _parse_extras(tokenizer: Tokenizer) -> List[str]:
+def _parse_extras(tokenizer: Tokenizer) -> list[str]:
     """
     extras = (LEFT_BRACKET wsp* extras_list? wsp* RIGHT_BRACKET)?
     """
@@ -175,11 +173,11 @@ def _parse_extras(tokenizer: Tokenizer) -> List[str]:
     return extras
 
 
-def _parse_extras_list(tokenizer: Tokenizer) -> List[str]:
+def _parse_extras_list(tokenizer: Tokenizer) -> list[str]:
     """
     extras_list = identifier (wsp* ',' wsp* identifier)*
     """
-    extras: List[str] = []
+    extras: list[str] = []
 
     if not tokenizer.check("IDENTIFIER"):
         return extras
diff --git a/pkg_resources/_vendor/packaging/_tokenizer.py b/pkg_resources/_vendor/packaging/_tokenizer.py
index dd0d648d49..89d041605c 100644
--- a/pkg_resources/_vendor/packaging/_tokenizer.py
+++ b/pkg_resources/_vendor/packaging/_tokenizer.py
@@ -1,7 +1,9 @@
+from __future__ import annotations
+
 import contextlib
 import re
 from dataclasses import dataclass
-from typing import Dict, Iterator, NoReturn, Optional, Tuple, Union
+from typing import Iterator, NoReturn
 
 from .specifiers import Specifier
 
@@ -21,7 +23,7 @@ def __init__(
         message: str,
         *,
         source: str,
-        span: Tuple[int, int],
+        span: tuple[int, int],
     ) -> None:
         self.span = span
         self.message = message
@@ -34,7 +36,7 @@ def __str__(self) -> str:
         return "\n    ".join([self.message, self.source, marker])
 
 
-DEFAULT_RULES: "Dict[str, Union[str, re.Pattern[str]]]" = {
+DEFAULT_RULES: dict[str, str | re.Pattern[str]] = {
     "LEFT_PARENTHESIS": r"\(",
     "RIGHT_PARENTHESIS": r"\)",
     "LEFT_BRACKET": r"\[",
@@ -96,13 +98,13 @@ def __init__(
         self,
         source: str,
         *,
-        rules: "Dict[str, Union[str, re.Pattern[str]]]",
+        rules: dict[str, str | re.Pattern[str]],
     ) -> None:
         self.source = source
-        self.rules: Dict[str, re.Pattern[str]] = {
+        self.rules: dict[str, re.Pattern[str]] = {
             name: re.compile(pattern) for name, pattern in rules.items()
         }
-        self.next_token: Optional[Token] = None
+        self.next_token: Token | None = None
         self.position = 0
 
     def consume(self, name: str) -> None:
@@ -154,8 +156,8 @@ def raise_syntax_error(
         self,
         message: str,
         *,
-        span_start: Optional[int] = None,
-        span_end: Optional[int] = None,
+        span_start: int | None = None,
+        span_end: int | None = None,
     ) -> NoReturn:
         """Raise ParserSyntaxError at the given position."""
         span = (
diff --git a/pkg_resources/_vendor/packaging/markers.py b/pkg_resources/_vendor/packaging/markers.py
index 8b98fca723..7ac7bb69a5 100644
--- a/pkg_resources/_vendor/packaging/markers.py
+++ b/pkg_resources/_vendor/packaging/markers.py
@@ -2,20 +2,16 @@
 # 2.0, and the BSD License. See the LICENSE file in the root of this repository
 # for complete details.
 
+from __future__ import annotations
+
 import operator
 import os
 import platform
 import sys
-from typing import Any, Callable, Dict, List, Optional, Tuple, Union
-
-from ._parser import (
-    MarkerAtom,
-    MarkerList,
-    Op,
-    Value,
-    Variable,
-    parse_marker as _parse_marker,
-)
+from typing import Any, Callable, TypedDict, cast
+
+from ._parser import MarkerAtom, MarkerList, Op, Value, Variable
+from ._parser import parse_marker as _parse_marker
 from ._tokenizer import ParserSyntaxError
 from .specifiers import InvalidSpecifier, Specifier
 from .utils import canonicalize_name
@@ -50,6 +46,78 @@ class UndefinedEnvironmentName(ValueError):
     """
 
 
+class Environment(TypedDict):
+    implementation_name: str
+    """The implementation's identifier, e.g. ``'cpython'``."""
+
+    implementation_version: str
+    """
+    The implementation's version, e.g. ``'3.13.0a2'`` for CPython 3.13.0a2, or
+    ``'7.3.13'`` for PyPy3.10 v7.3.13.
+    """
+
+    os_name: str
+    """
+    The value of :py:data:`os.name`. The name of the operating system dependent module
+    imported, e.g. ``'posix'``.
+    """
+
+    platform_machine: str
+    """
+    Returns the machine type, e.g. ``'i386'``.
+
+    An empty string if the value cannot be determined.
+    """
+
+    platform_release: str
+    """
+    The system's release, e.g. ``'2.2.0'`` or ``'NT'``.
+
+    An empty string if the value cannot be determined.
+    """
+
+    platform_system: str
+    """
+    The system/OS name, e.g. ``'Linux'``, ``'Windows'`` or ``'Java'``.
+
+    An empty string if the value cannot be determined.
+    """
+
+    platform_version: str
+    """
+    The system's release version, e.g. ``'#3 on degas'``.
+
+    An empty string if the value cannot be determined.
+    """
+
+    python_full_version: str
+    """
+    The Python version as string ``'major.minor.patchlevel'``.
+
+    Note that unlike the Python :py:data:`sys.version`, this value will always include
+    the patchlevel (it defaults to 0).
+    """
+
+    platform_python_implementation: str
+    """
+    A string identifying the Python implementation, e.g. ``'CPython'``.
+    """
+
+    python_version: str
+    """The Python version as string ``'major.minor'``."""
+
+    sys_platform: str
+    """
+    This string contains a platform identifier that can be used to append
+    platform-specific components to :py:data:`sys.path`, for instance.
+
+    For Unix systems, except on Linux and AIX, this is the lowercased OS name as
+    returned by ``uname -s`` with the first part of the version as returned by
+    ``uname -r`` appended, e.g. ``'sunos5'`` or ``'freebsd8'``, at the time when Python
+    was built.
+    """
+
+
 def _normalize_extra_values(results: Any) -> Any:
     """
     Normalize extra values.
@@ -67,9 +135,8 @@ def _normalize_extra_values(results: Any) -> Any:
 
 
 def _format_marker(
-    marker: Union[List[str], MarkerAtom, str], first: Optional[bool] = True
+    marker: list[str] | MarkerAtom | str, first: bool | None = True
 ) -> str:
-
     assert isinstance(marker, (list, tuple, str))
 
     # Sometimes we have a structure like [[...]] which is a single item list
@@ -95,7 +162,7 @@ def _format_marker(
         return marker
 
 
-_operators: Dict[str, Operator] = {
+_operators: dict[str, Operator] = {
     "in": lambda lhs, rhs: lhs in rhs,
     "not in": lambda lhs, rhs: lhs not in rhs,
     "<": operator.lt,
@@ -115,14 +182,14 @@ def _eval_op(lhs: str, op: Op, rhs: str) -> bool:
     else:
         return spec.contains(lhs, prereleases=True)
 
-    oper: Optional[Operator] = _operators.get(op.serialize())
+    oper: Operator | None = _operators.get(op.serialize())
     if oper is None:
         raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.")
 
     return oper(lhs, rhs)
 
 
-def _normalize(*values: str, key: str) -> Tuple[str, ...]:
+def _normalize(*values: str, key: str) -> tuple[str, ...]:
     # PEP 685 – Comparison of extra names for optional distribution dependencies
     # https://peps.python.org/pep-0685/
     # > When comparing extra names, tools MUST normalize the names being
@@ -134,8 +201,8 @@ def _normalize(*values: str, key: str) -> Tuple[str, ...]:
     return values
 
 
-def _evaluate_markers(markers: MarkerList, environment: Dict[str, str]) -> bool:
-    groups: List[List[bool]] = [[]]
+def _evaluate_markers(markers: MarkerList, environment: dict[str, str]) -> bool:
+    groups: list[list[bool]] = [[]]
 
     for marker in markers:
         assert isinstance(marker, (list, tuple, str))
@@ -164,7 +231,7 @@ def _evaluate_markers(markers: MarkerList, environment: Dict[str, str]) -> bool:
     return any(all(item) for item in groups)
 
 
-def format_full_version(info: "sys._version_info") -> str:
+def format_full_version(info: sys._version_info) -> str:
     version = "{0.major}.{0.minor}.{0.micro}".format(info)
     kind = info.releaselevel
     if kind != "final":
@@ -172,7 +239,7 @@ def format_full_version(info: "sys._version_info") -> str:
     return version
 
 
-def default_environment() -> Dict[str, str]:
+def default_environment() -> Environment:
     iver = format_full_version(sys.implementation.version)
     implementation_name = sys.implementation.name
     return {
@@ -231,7 +298,7 @@ def __eq__(self, other: Any) -> bool:
 
         return str(self) == str(other)
 
-    def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool:
+    def evaluate(self, environment: dict[str, str] | None = None) -> bool:
         """Evaluate a marker.
 
         Return the boolean from evaluating the given marker against the
@@ -240,8 +307,14 @@ def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool:
 
         The environment is determined from the current Python process.
         """
-        current_environment = default_environment()
+        current_environment = cast("dict[str, str]", default_environment())
         current_environment["extra"] = ""
+        # Work around platform.python_version() returning something that is not PEP 440
+        # compliant for non-tagged Python builds. We preserve default_environment()'s
+        # behavior of returning platform.python_version() verbatim, and leave it to the
+        # caller to provide a syntactically valid version if they want to override it.
+        if current_environment["python_full_version"].endswith("+"):
+            current_environment["python_full_version"] += "local"
         if environment is not None:
             current_environment.update(environment)
             # The API used to allow setting extra to None. We need to handle this
diff --git a/pkg_resources/_vendor/packaging/metadata.py b/pkg_resources/_vendor/packaging/metadata.py
index fb27493079..eb8dc844d2 100644
--- a/pkg_resources/_vendor/packaging/metadata.py
+++ b/pkg_resources/_vendor/packaging/metadata.py
@@ -1,50 +1,31 @@
+from __future__ import annotations
+
 import email.feedparser
 import email.header
 import email.message
 import email.parser
 import email.policy
-import sys
 import typing
 from typing import (
     Any,
     Callable,
-    Dict,
     Generic,
-    List,
-    Optional,
-    Tuple,
-    Type,
-    Union,
+    Literal,
+    TypedDict,
     cast,
 )
 
-from . import requirements, specifiers, utils, version as version_module
+from . import requirements, specifiers, utils
+from . import version as version_module
 
 T = typing.TypeVar("T")
-if sys.version_info[:2] >= (3, 8):  # pragma: no cover
-    from typing import Literal, TypedDict
-else:  # pragma: no cover
-    if typing.TYPE_CHECKING:
-        from typing_extensions import Literal, TypedDict
-    else:
-        try:
-            from typing_extensions import Literal, TypedDict
-        except ImportError:
-
-            class Literal:
-                def __init_subclass__(*_args, **_kwargs):
-                    pass
-
-            class TypedDict:
-                def __init_subclass__(*_args, **_kwargs):
-                    pass
 
 
 try:
     ExceptionGroup
 except NameError:  # pragma: no cover
 
-    class ExceptionGroup(Exception):  # noqa: N818
+    class ExceptionGroup(Exception):
         """A minimal implementation of :external:exc:`ExceptionGroup` from Python 3.11.
 
         If :external:exc:`ExceptionGroup` is already defined by Python itself,
@@ -52,9 +33,9 @@ class ExceptionGroup(Exception):  # noqa: N818
         """
 
         message: str
-        exceptions: List[Exception]
+        exceptions: list[Exception]
 
-        def __init__(self, message: str, exceptions: List[Exception]) -> None:
+        def __init__(self, message: str, exceptions: list[Exception]) -> None:
             self.message = message
             self.exceptions = exceptions
 
@@ -100,32 +81,32 @@ class RawMetadata(TypedDict, total=False):
     metadata_version: str
     name: str
     version: str
-    platforms: List[str]
+    platforms: list[str]
     summary: str
     description: str
-    keywords: List[str]
+    keywords: list[str]
     home_page: str
     author: str
     author_email: str
     license: str
 
     # Metadata 1.1 - PEP 314
-    supported_platforms: List[str]
+    supported_platforms: list[str]
     download_url: str
-    classifiers: List[str]
-    requires: List[str]
-    provides: List[str]
-    obsoletes: List[str]
+    classifiers: list[str]
+    requires: list[str]
+    provides: list[str]
+    obsoletes: list[str]
 
     # Metadata 1.2 - PEP 345
     maintainer: str
     maintainer_email: str
-    requires_dist: List[str]
-    provides_dist: List[str]
-    obsoletes_dist: List[str]
+    requires_dist: list[str]
+    provides_dist: list[str]
+    obsoletes_dist: list[str]
     requires_python: str
-    requires_external: List[str]
-    project_urls: Dict[str, str]
+    requires_external: list[str]
+    project_urls: dict[str, str]
 
     # Metadata 2.0
     # PEP 426 attempted to completely revamp the metadata format
@@ -138,10 +119,10 @@ class RawMetadata(TypedDict, total=False):
 
     # Metadata 2.1 - PEP 566
     description_content_type: str
-    provides_extra: List[str]
+    provides_extra: list[str]
 
     # Metadata 2.2 - PEP 643
-    dynamic: List[str]
+    dynamic: list[str]
 
     # Metadata 2.3 - PEP 685
     # No new fields were added in PEP 685, just some edge case were
@@ -185,12 +166,12 @@ class RawMetadata(TypedDict, total=False):
 }
 
 
-def _parse_keywords(data: str) -> List[str]:
+def _parse_keywords(data: str) -> list[str]:
     """Split a string of comma-separate keyboards into a list of keywords."""
     return [k.strip() for k in data.split(",")]
 
 
-def _parse_project_urls(data: List[str]) -> Dict[str, str]:
+def _parse_project_urls(data: list[str]) -> dict[str, str]:
     """Parse a list of label/URL string pairings separated by a comma."""
     urls = {}
     for pair in data:
@@ -230,7 +211,7 @@ def _parse_project_urls(data: List[str]) -> Dict[str, str]:
     return urls
 
 
-def _get_payload(msg: email.message.Message, source: Union[bytes, str]) -> str:
+def _get_payload(msg: email.message.Message, source: bytes | str) -> str:
     """Get the body of the message."""
     # If our source is a str, then our caller has managed encodings for us,
     # and we don't need to deal with it.
@@ -292,7 +273,7 @@ def _get_payload(msg: email.message.Message, source: Union[bytes, str]) -> str:
 _RAW_TO_EMAIL_MAPPING = {raw: email for email, raw in _EMAIL_TO_RAW_MAPPING.items()}
 
 
-def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[str]]]:
+def parse_email(data: bytes | str) -> tuple[RawMetadata, dict[str, list[str]]]:
     """Parse a distribution's metadata stored as email headers (e.g. from ``METADATA``).
 
     This function returns a two-item tuple of dicts. The first dict is of
@@ -308,8 +289,8 @@ def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[st
     included in this dict.
 
     """
-    raw: Dict[str, Union[str, List[str], Dict[str, str]]] = {}
-    unparsed: Dict[str, List[str]] = {}
+    raw: dict[str, str | list[str] | dict[str, str]] = {}
+    unparsed: dict[str, list[str]] = {}
 
     if isinstance(data, str):
         parsed = email.parser.Parser(policy=email.policy.compat32).parsestr(data)
@@ -357,7 +338,7 @@ def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[st
                 # The Header object stores it's data as chunks, and each chunk
                 # can be independently encoded, so we'll need to check each
                 # of them.
-                chunks: List[Tuple[bytes, Optional[str]]] = []
+                chunks: list[tuple[bytes, str | None]] = []
                 for bin, encoding in email.header.decode_header(h):
                     try:
                         bin.decode("utf8", "strict")
@@ -499,11 +480,11 @@ def __init__(
     ) -> None:
         self.added = added
 
-    def __set_name__(self, _owner: "Metadata", name: str) -> None:
+    def __set_name__(self, _owner: Metadata, name: str) -> None:
         self.name = name
         self.raw_name = _RAW_TO_EMAIL_MAPPING[name]
 
-    def __get__(self, instance: "Metadata", _owner: Type["Metadata"]) -> T:
+    def __get__(self, instance: Metadata, _owner: type[Metadata]) -> T:
         # With Python 3.8, the caching can be replaced with functools.cached_property().
         # No need to check the cache as attribute lookup will resolve into the
         # instance's __dict__ before __get__ is called.
@@ -531,7 +512,7 @@ def __get__(self, instance: "Metadata", _owner: Type["Metadata"]) -> T:
         return cast(T, value)
 
     def _invalid_metadata(
-        self, msg: str, cause: Optional[Exception] = None
+        self, msg: str, cause: Exception | None = None
     ) -> InvalidMetadata:
         exc = InvalidMetadata(
             self.raw_name, msg.format_map({"field": repr(self.raw_name)})
@@ -606,7 +587,7 @@ def _process_description_content_type(self, value: str) -> str:
             )
         return value
 
-    def _process_dynamic(self, value: List[str]) -> List[str]:
+    def _process_dynamic(self, value: list[str]) -> list[str]:
         for dynamic_field in map(str.lower, value):
             if dynamic_field in {"name", "version", "metadata-version"}:
                 raise self._invalid_metadata(
@@ -618,8 +599,8 @@ def _process_dynamic(self, value: List[str]) -> List[str]:
 
     def _process_provides_extra(
         self,
-        value: List[str],
-    ) -> List[utils.NormalizedName]:
+        value: list[str],
+    ) -> list[utils.NormalizedName]:
         normalized_names = []
         try:
             for name in value:
@@ -641,8 +622,8 @@ def _process_requires_python(self, value: str) -> specifiers.SpecifierSet:
 
     def _process_requires_dist(
         self,
-        value: List[str],
-    ) -> List[requirements.Requirement]:
+        value: list[str],
+    ) -> list[requirements.Requirement]:
         reqs = []
         try:
             for req in value:
@@ -665,7 +646,7 @@ class Metadata:
     _raw: RawMetadata
 
     @classmethod
-    def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> "Metadata":
+    def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> Metadata:
         """Create an instance from :class:`RawMetadata`.
 
         If *validate* is true, all metadata will be validated. All exceptions
@@ -675,7 +656,7 @@ def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> "Metadata":
         ins._raw = data.copy()  # Mutations occur due to caching enriched values.
 
         if validate:
-            exceptions: List[Exception] = []
+            exceptions: list[Exception] = []
             try:
                 metadata_version = ins.metadata_version
                 metadata_age = _VALID_METADATA_VERSIONS.index(metadata_version)
@@ -722,9 +703,7 @@ def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> "Metadata":
         return ins
 
     @classmethod
-    def from_email(
-        cls, data: Union[bytes, str], *, validate: bool = True
-    ) -> "Metadata":
+    def from_email(cls, data: bytes | str, *, validate: bool = True) -> Metadata:
         """Parse metadata from email headers.
 
         If *validate* is true, the metadata will be validated. All exceptions
@@ -760,66 +739,66 @@ def from_email(
     *validate* parameter)"""
     version: _Validator[version_module.Version] = _Validator()
     """:external:ref:`core-metadata-version` (required)"""
-    dynamic: _Validator[Optional[List[str]]] = _Validator(
+    dynamic: _Validator[list[str] | None] = _Validator(
         added="2.2",
     )
     """:external:ref:`core-metadata-dynamic`
     (validated against core metadata field names and lowercased)"""
-    platforms: _Validator[Optional[List[str]]] = _Validator()
+    platforms: _Validator[list[str] | None] = _Validator()
     """:external:ref:`core-metadata-platform`"""
-    supported_platforms: _Validator[Optional[List[str]]] = _Validator(added="1.1")
+    supported_platforms: _Validator[list[str] | None] = _Validator(added="1.1")
     """:external:ref:`core-metadata-supported-platform`"""
-    summary: _Validator[Optional[str]] = _Validator()
+    summary: _Validator[str | None] = _Validator()
     """:external:ref:`core-metadata-summary` (validated to contain no newlines)"""
-    description: _Validator[Optional[str]] = _Validator()  # TODO 2.1: can be in body
+    description: _Validator[str | None] = _Validator()  # TODO 2.1: can be in body
     """:external:ref:`core-metadata-description`"""
-    description_content_type: _Validator[Optional[str]] = _Validator(added="2.1")
+    description_content_type: _Validator[str | None] = _Validator(added="2.1")
     """:external:ref:`core-metadata-description-content-type` (validated)"""
-    keywords: _Validator[Optional[List[str]]] = _Validator()
+    keywords: _Validator[list[str] | None] = _Validator()
     """:external:ref:`core-metadata-keywords`"""
-    home_page: _Validator[Optional[str]] = _Validator()
+    home_page: _Validator[str | None] = _Validator()
     """:external:ref:`core-metadata-home-page`"""
-    download_url: _Validator[Optional[str]] = _Validator(added="1.1")
+    download_url: _Validator[str | None] = _Validator(added="1.1")
     """:external:ref:`core-metadata-download-url`"""
-    author: _Validator[Optional[str]] = _Validator()
+    author: _Validator[str | None] = _Validator()
     """:external:ref:`core-metadata-author`"""
-    author_email: _Validator[Optional[str]] = _Validator()
+    author_email: _Validator[str | None] = _Validator()
     """:external:ref:`core-metadata-author-email`"""
-    maintainer: _Validator[Optional[str]] = _Validator(added="1.2")
+    maintainer: _Validator[str | None] = _Validator(added="1.2")
     """:external:ref:`core-metadata-maintainer`"""
-    maintainer_email: _Validator[Optional[str]] = _Validator(added="1.2")
+    maintainer_email: _Validator[str | None] = _Validator(added="1.2")
     """:external:ref:`core-metadata-maintainer-email`"""
-    license: _Validator[Optional[str]] = _Validator()
+    license: _Validator[str | None] = _Validator()
     """:external:ref:`core-metadata-license`"""
-    classifiers: _Validator[Optional[List[str]]] = _Validator(added="1.1")
+    classifiers: _Validator[list[str] | None] = _Validator(added="1.1")
     """:external:ref:`core-metadata-classifier`"""
-    requires_dist: _Validator[Optional[List[requirements.Requirement]]] = _Validator(
+    requires_dist: _Validator[list[requirements.Requirement] | None] = _Validator(
         added="1.2"
     )
     """:external:ref:`core-metadata-requires-dist`"""
-    requires_python: _Validator[Optional[specifiers.SpecifierSet]] = _Validator(
+    requires_python: _Validator[specifiers.SpecifierSet | None] = _Validator(
         added="1.2"
     )
     """:external:ref:`core-metadata-requires-python`"""
     # Because `Requires-External` allows for non-PEP 440 version specifiers, we
     # don't do any processing on the values.
-    requires_external: _Validator[Optional[List[str]]] = _Validator(added="1.2")
+    requires_external: _Validator[list[str] | None] = _Validator(added="1.2")
     """:external:ref:`core-metadata-requires-external`"""
-    project_urls: _Validator[Optional[Dict[str, str]]] = _Validator(added="1.2")
+    project_urls: _Validator[dict[str, str] | None] = _Validator(added="1.2")
     """:external:ref:`core-metadata-project-url`"""
     # PEP 685 lets us raise an error if an extra doesn't pass `Name` validation
     # regardless of metadata version.
-    provides_extra: _Validator[Optional[List[utils.NormalizedName]]] = _Validator(
+    provides_extra: _Validator[list[utils.NormalizedName] | None] = _Validator(
         added="2.1",
     )
     """:external:ref:`core-metadata-provides-extra`"""
-    provides_dist: _Validator[Optional[List[str]]] = _Validator(added="1.2")
+    provides_dist: _Validator[list[str] | None] = _Validator(added="1.2")
     """:external:ref:`core-metadata-provides-dist`"""
-    obsoletes_dist: _Validator[Optional[List[str]]] = _Validator(added="1.2")
+    obsoletes_dist: _Validator[list[str] | None] = _Validator(added="1.2")
     """:external:ref:`core-metadata-obsoletes-dist`"""
-    requires: _Validator[Optional[List[str]]] = _Validator(added="1.1")
+    requires: _Validator[list[str] | None] = _Validator(added="1.1")
     """``Requires`` (deprecated)"""
-    provides: _Validator[Optional[List[str]]] = _Validator(added="1.1")
+    provides: _Validator[list[str] | None] = _Validator(added="1.1")
     """``Provides`` (deprecated)"""
-    obsoletes: _Validator[Optional[List[str]]] = _Validator(added="1.1")
+    obsoletes: _Validator[list[str] | None] = _Validator(added="1.1")
     """``Obsoletes`` (deprecated)"""
diff --git a/pkg_resources/_vendor/packaging/requirements.py b/pkg_resources/_vendor/packaging/requirements.py
index bdc43a7e98..4e068c9567 100644
--- a/pkg_resources/_vendor/packaging/requirements.py
+++ b/pkg_resources/_vendor/packaging/requirements.py
@@ -1,8 +1,9 @@
 # This file is dual licensed under the terms of the Apache License, Version
 # 2.0, and the BSD License. See the LICENSE file in the root of this repository
 # for complete details.
+from __future__ import annotations
 
-from typing import Any, Iterator, Optional, Set
+from typing import Any, Iterator
 
 from ._parser import parse_requirement as _parse_requirement
 from ._tokenizer import ParserSyntaxError
@@ -37,10 +38,10 @@ def __init__(self, requirement_string: str) -> None:
             raise InvalidRequirement(str(e)) from e
 
         self.name: str = parsed.name
-        self.url: Optional[str] = parsed.url or None
-        self.extras: Set[str] = set(parsed.extras or [])
+        self.url: str | None = parsed.url or None
+        self.extras: set[str] = set(parsed.extras or [])
         self.specifier: SpecifierSet = SpecifierSet(parsed.specifier)
-        self.marker: Optional[Marker] = None
+        self.marker: Marker | None = None
         if parsed.marker is not None:
             self.marker = Marker.__new__(Marker)
             self.marker._markers = _normalize_extra_values(parsed.marker)
diff --git a/pkg_resources/_vendor/packaging/specifiers.py b/pkg_resources/_vendor/packaging/specifiers.py
index 2d015bab59..2fa75f7abb 100644
--- a/pkg_resources/_vendor/packaging/specifiers.py
+++ b/pkg_resources/_vendor/packaging/specifiers.py
@@ -8,10 +8,12 @@
     from packaging.version import Version
 """
 
+from __future__ import annotations
+
 import abc
 import itertools
 import re
-from typing import Callable, Iterable, Iterator, List, Optional, Tuple, TypeVar, Union
+from typing import Callable, Iterable, Iterator, TypeVar, Union
 
 from .utils import canonicalize_version
 from .version import Version
@@ -64,7 +66,7 @@ def __eq__(self, other: object) -> bool:
 
     @property
     @abc.abstractmethod
-    def prereleases(self) -> Optional[bool]:
+    def prereleases(self) -> bool | None:
         """Whether or not pre-releases as a whole are allowed.
 
         This can be set to either ``True`` or ``False`` to explicitly enable or disable
@@ -79,14 +81,14 @@ def prereleases(self, value: bool) -> None:
         """
 
     @abc.abstractmethod
-    def contains(self, item: str, prereleases: Optional[bool] = None) -> bool:
+    def contains(self, item: str, prereleases: bool | None = None) -> bool:
         """
         Determines if the given item is contained within this specifier.
         """
 
     @abc.abstractmethod
     def filter(
-        self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None
+        self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None
     ) -> Iterator[UnparsedVersionVar]:
         """
         Takes an iterable of items and filters them so that only items which
@@ -217,7 +219,7 @@ class Specifier(BaseSpecifier):
         "===": "arbitrary",
     }
 
-    def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None:
+    def __init__(self, spec: str = "", prereleases: bool | None = None) -> None:
         """Initialize a Specifier instance.
 
         :param spec:
@@ -234,7 +236,7 @@ def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None:
         if not match:
             raise InvalidSpecifier(f"Invalid specifier: '{spec}'")
 
-        self._spec: Tuple[str, str] = (
+        self._spec: tuple[str, str] = (
             match.group("operator").strip(),
             match.group("version").strip(),
         )
@@ -318,7 +320,7 @@ def __str__(self) -> str:
         return "{}{}".format(*self._spec)
 
     @property
-    def _canonical_spec(self) -> Tuple[str, str]:
+    def _canonical_spec(self) -> tuple[str, str]:
         canonical_version = canonicalize_version(
             self._spec[1],
             strip_trailing_zero=(self._spec[0] != "~="),
@@ -364,7 +366,6 @@ def _get_operator(self, op: str) -> CallableOperator:
         return operator_callable
 
     def _compare_compatible(self, prospective: Version, spec: str) -> bool:
-
         # Compatible releases have an equivalent combination of >= and ==. That
         # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to
         # implement this in terms of the other specifiers instead of
@@ -385,7 +386,6 @@ def _compare_compatible(self, prospective: Version, spec: str) -> bool:
         )
 
     def _compare_equal(self, prospective: Version, spec: str) -> bool:
-
         # We need special logic to handle prefix matching
         if spec.endswith(".*"):
             # In the case of prefix matching we want to ignore local segment.
@@ -429,21 +429,18 @@ def _compare_not_equal(self, prospective: Version, spec: str) -> bool:
         return not self._compare_equal(prospective, spec)
 
     def _compare_less_than_equal(self, prospective: Version, spec: str) -> bool:
-
         # NB: Local version identifiers are NOT permitted in the version
         # specifier, so local version labels can be universally removed from
         # the prospective version.
         return Version(prospective.public) <= Version(spec)
 
     def _compare_greater_than_equal(self, prospective: Version, spec: str) -> bool:
-
         # NB: Local version identifiers are NOT permitted in the version
         # specifier, so local version labels can be universally removed from
         # the prospective version.
         return Version(prospective.public) >= Version(spec)
 
     def _compare_less_than(self, prospective: Version, spec_str: str) -> bool:
-
         # Convert our spec to a Version instance, since we'll want to work with
         # it as a version.
         spec = Version(spec_str)
@@ -468,7 +465,6 @@ def _compare_less_than(self, prospective: Version, spec_str: str) -> bool:
         return True
 
     def _compare_greater_than(self, prospective: Version, spec_str: str) -> bool:
-
         # Convert our spec to a Version instance, since we'll want to work with
         # it as a version.
         spec = Version(spec_str)
@@ -501,7 +497,7 @@ def _compare_greater_than(self, prospective: Version, spec_str: str) -> bool:
     def _compare_arbitrary(self, prospective: Version, spec: str) -> bool:
         return str(prospective).lower() == str(spec).lower()
 
-    def __contains__(self, item: Union[str, Version]) -> bool:
+    def __contains__(self, item: str | Version) -> bool:
         """Return whether or not the item is contained in this specifier.
 
         :param item: The item to check for.
@@ -522,9 +518,7 @@ def __contains__(self, item: Union[str, Version]) -> bool:
         """
         return self.contains(item)
 
-    def contains(
-        self, item: UnparsedVersion, prereleases: Optional[bool] = None
-    ) -> bool:
+    def contains(self, item: UnparsedVersion, prereleases: bool | None = None) -> bool:
         """Return whether or not the item is contained in this specifier.
 
         :param item:
@@ -569,7 +563,7 @@ def contains(
         return operator_callable(normalized_item, self.version)
 
     def filter(
-        self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None
+        self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None
     ) -> Iterator[UnparsedVersionVar]:
         """Filter items in the given iterable, that match the specifier.
 
@@ -633,7 +627,7 @@ def filter(
 _prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
 
 
-def _version_split(version: str) -> List[str]:
+def _version_split(version: str) -> list[str]:
     """Split version into components.
 
     The split components are intended for version comparison. The logic does
@@ -641,7 +635,7 @@ def _version_split(version: str) -> List[str]:
     components back with :func:`_version_join` may not produce the original
     version string.
     """
-    result: List[str] = []
+    result: list[str] = []
 
     epoch, _, rest = version.rpartition("!")
     result.append(epoch or "0")
@@ -655,7 +649,7 @@ def _version_split(version: str) -> List[str]:
     return result
 
 
-def _version_join(components: List[str]) -> str:
+def _version_join(components: list[str]) -> str:
     """Join split version components into a version string.
 
     This function assumes the input came from :func:`_version_split`, where the
@@ -672,7 +666,7 @@ def _is_not_suffix(segment: str) -> bool:
     )
 
 
-def _pad_version(left: List[str], right: List[str]) -> Tuple[List[str], List[str]]:
+def _pad_version(left: list[str], right: list[str]) -> tuple[list[str], list[str]]:
     left_split, right_split = [], []
 
     # Get the release segment of our versions
@@ -700,9 +694,7 @@ class SpecifierSet(BaseSpecifier):
     specifiers (``>=3.0,!=3.1``), or no specifier at all.
     """
 
-    def __init__(
-        self, specifiers: str = "", prereleases: Optional[bool] = None
-    ) -> None:
+    def __init__(self, specifiers: str = "", prereleases: bool | None = None) -> None:
         """Initialize a SpecifierSet instance.
 
         :param specifiers:
@@ -730,7 +722,7 @@ def __init__(
         self._prereleases = prereleases
 
     @property
-    def prereleases(self) -> Optional[bool]:
+    def prereleases(self) -> bool | None:
         # If we have been given an explicit prerelease modifier, then we'll
         # pass that through here.
         if self._prereleases is not None:
@@ -787,7 +779,7 @@ def __str__(self) -> str:
     def __hash__(self) -> int:
         return hash(self._specs)
 
-    def __and__(self, other: Union["SpecifierSet", str]) -> "SpecifierSet":
+    def __and__(self, other: SpecifierSet | str) -> SpecifierSet:
         """Return a SpecifierSet which is a combination of the two sets.
 
         :param other: The other object to combine with.
@@ -883,8 +875,8 @@ def __contains__(self, item: UnparsedVersion) -> bool:
     def contains(
         self,
         item: UnparsedVersion,
-        prereleases: Optional[bool] = None,
-        installed: Optional[bool] = None,
+        prereleases: bool | None = None,
+        installed: bool | None = None,
     ) -> bool:
         """Return whether or not the item is contained in this SpecifierSet.
 
@@ -938,7 +930,7 @@ def contains(
         return all(s.contains(item, prereleases=prereleases) for s in self._specs)
 
     def filter(
-        self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None
+        self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None
     ) -> Iterator[UnparsedVersionVar]:
         """Filter items in the given iterable, that match the specifiers in this set.
 
@@ -995,8 +987,8 @@ def filter(
         # which will filter out any pre-releases, unless there are no final
         # releases.
         else:
-            filtered: List[UnparsedVersionVar] = []
-            found_prereleases: List[UnparsedVersionVar] = []
+            filtered: list[UnparsedVersionVar] = []
+            found_prereleases: list[UnparsedVersionVar] = []
 
             for item in iterable:
                 parsed_version = _coerce_version(item)
diff --git a/pkg_resources/_vendor/packaging/tags.py b/pkg_resources/_vendor/packaging/tags.py
index 89f1926137..6667d29908 100644
--- a/pkg_resources/_vendor/packaging/tags.py
+++ b/pkg_resources/_vendor/packaging/tags.py
@@ -2,6 +2,8 @@
 # 2.0, and the BSD License. See the LICENSE file in the root of this repository
 # for complete details.
 
+from __future__ import annotations
+
 import logging
 import platform
 import re
@@ -11,15 +13,10 @@
 import sysconfig
 from importlib.machinery import EXTENSION_SUFFIXES
 from typing import (
-    Dict,
-    FrozenSet,
     Iterable,
     Iterator,
-    List,
-    Optional,
     Sequence,
     Tuple,
-    Union,
     cast,
 )
 
@@ -30,7 +27,7 @@
 PythonVersion = Sequence[int]
 MacVersion = Tuple[int, int]
 
-INTERPRETER_SHORT_NAMES: Dict[str, str] = {
+INTERPRETER_SHORT_NAMES: dict[str, str] = {
     "python": "py",  # Generic.
     "cpython": "cp",
     "pypy": "pp",
@@ -96,7 +93,7 @@ def __repr__(self) -> str:
         return f"<{self} @ {id(self)}>"
 
 
-def parse_tag(tag: str) -> FrozenSet[Tag]:
+def parse_tag(tag: str) -> frozenset[Tag]:
     """
     Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances.
 
@@ -112,8 +109,8 @@ def parse_tag(tag: str) -> FrozenSet[Tag]:
     return frozenset(tags)
 
 
-def _get_config_var(name: str, warn: bool = False) -> Union[int, str, None]:
-    value: Union[int, str, None] = sysconfig.get_config_var(name)
+def _get_config_var(name: str, warn: bool = False) -> int | str | None:
+    value: int | str | None = sysconfig.get_config_var(name)
     if value is None and warn:
         logger.debug(
             "Config variable '%s' is unset, Python ABI tag may be incorrect", name
@@ -125,7 +122,7 @@ def _normalize_string(string: str) -> str:
     return string.replace(".", "_").replace("-", "_").replace(" ", "_")
 
 
-def _is_threaded_cpython(abis: List[str]) -> bool:
+def _is_threaded_cpython(abis: list[str]) -> bool:
     """
     Determine if the ABI corresponds to a threaded (`--disable-gil`) build.
 
@@ -151,7 +148,7 @@ def _abi3_applies(python_version: PythonVersion, threading: bool) -> bool:
     return len(python_version) > 1 and tuple(python_version) >= (3, 2) and not threading
 
 
-def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]:
+def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> list[str]:
     py_version = tuple(py_version)  # To allow for version comparison.
     abis = []
     version = _version_nodot(py_version[:2])
@@ -185,9 +182,9 @@ def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]:
 
 
 def cpython_tags(
-    python_version: Optional[PythonVersion] = None,
-    abis: Optional[Iterable[str]] = None,
-    platforms: Optional[Iterable[str]] = None,
+    python_version: PythonVersion | None = None,
+    abis: Iterable[str] | None = None,
+    platforms: Iterable[str] | None = None,
     *,
     warn: bool = False,
 ) -> Iterator[Tag]:
@@ -244,7 +241,7 @@ def cpython_tags(
                 yield Tag(interpreter, "abi3", platform_)
 
 
-def _generic_abi() -> List[str]:
+def _generic_abi() -> list[str]:
     """
     Return the ABI tag based on EXT_SUFFIX.
     """
@@ -286,9 +283,9 @@ def _generic_abi() -> List[str]:
 
 
 def generic_tags(
-    interpreter: Optional[str] = None,
-    abis: Optional[Iterable[str]] = None,
-    platforms: Optional[Iterable[str]] = None,
+    interpreter: str | None = None,
+    abis: Iterable[str] | None = None,
+    platforms: Iterable[str] | None = None,
     *,
     warn: bool = False,
 ) -> Iterator[Tag]:
@@ -332,9 +329,9 @@ def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]:
 
 
 def compatible_tags(
-    python_version: Optional[PythonVersion] = None,
-    interpreter: Optional[str] = None,
-    platforms: Optional[Iterable[str]] = None,
+    python_version: PythonVersion | None = None,
+    interpreter: str | None = None,
+    platforms: Iterable[str] | None = None,
 ) -> Iterator[Tag]:
     """
     Yields the sequence of tags that are compatible with a specific version of Python.
@@ -366,7 +363,7 @@ def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str:
     return "i386"
 
 
-def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> List[str]:
+def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> list[str]:
     formats = [cpu_arch]
     if cpu_arch == "x86_64":
         if version < (10, 4):
@@ -399,7 +396,7 @@ def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> List[str]:
 
 
 def mac_platforms(
-    version: Optional[MacVersion] = None, arch: Optional[str] = None
+    version: MacVersion | None = None, arch: str | None = None
 ) -> Iterator[str]:
     """
     Yields the platform tags for a macOS system.
diff --git a/pkg_resources/_vendor/packaging/utils.py b/pkg_resources/_vendor/packaging/utils.py
index c2c2f75aa8..d33da5bb8b 100644
--- a/pkg_resources/_vendor/packaging/utils.py
+++ b/pkg_resources/_vendor/packaging/utils.py
@@ -2,8 +2,10 @@
 # 2.0, and the BSD License. See the LICENSE file in the root of this repository
 # for complete details.
 
+from __future__ import annotations
+
 import re
-from typing import FrozenSet, NewType, Tuple, Union, cast
+from typing import NewType, Tuple, Union, cast
 
 from .tags import Tag, parse_tag
 from .version import InvalidVersion, Version
@@ -53,7 +55,7 @@ def is_normalized_name(name: str) -> bool:
 
 
 def canonicalize_version(
-    version: Union[Version, str], *, strip_trailing_zero: bool = True
+    version: Version | str, *, strip_trailing_zero: bool = True
 ) -> str:
     """
     This is very similar to Version.__str__, but has one subtle difference
@@ -102,7 +104,7 @@ def canonicalize_version(
 
 def parse_wheel_filename(
     filename: str,
-) -> Tuple[NormalizedName, Version, BuildTag, FrozenSet[Tag]]:
+) -> tuple[NormalizedName, Version, BuildTag, frozenset[Tag]]:
     if not filename.endswith(".whl"):
         raise InvalidWheelFilename(
             f"Invalid wheel filename (extension must be '.whl'): {filename}"
@@ -143,7 +145,7 @@ def parse_wheel_filename(
     return (name, version, build, tags)
 
 
-def parse_sdist_filename(filename: str) -> Tuple[NormalizedName, Version]:
+def parse_sdist_filename(filename: str) -> tuple[NormalizedName, Version]:
     if filename.endswith(".tar.gz"):
         file_stem = filename[: -len(".tar.gz")]
     elif filename.endswith(".zip"):
diff --git a/pkg_resources/_vendor/packaging/version.py b/pkg_resources/_vendor/packaging/version.py
index 5faab9bd0d..46bc261308 100644
--- a/pkg_resources/_vendor/packaging/version.py
+++ b/pkg_resources/_vendor/packaging/version.py
@@ -7,9 +7,11 @@
     from packaging.version import parse, Version
 """
 
+from __future__ import annotations
+
 import itertools
 import re
-from typing import Any, Callable, NamedTuple, Optional, SupportsInt, Tuple, Union
+from typing import Any, Callable, NamedTuple, SupportsInt, Tuple, Union
 
 from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType
 
@@ -35,14 +37,14 @@
 
 class _Version(NamedTuple):
     epoch: int
-    release: Tuple[int, ...]
-    dev: Optional[Tuple[str, int]]
-    pre: Optional[Tuple[str, int]]
-    post: Optional[Tuple[str, int]]
-    local: Optional[LocalType]
+    release: tuple[int, ...]
+    dev: tuple[str, int] | None
+    pre: tuple[str, int] | None
+    post: tuple[str, int] | None
+    local: LocalType | None
 
 
-def parse(version: str) -> "Version":
+def parse(version: str) -> Version:
     """Parse the given version string.
 
     >>> parse('1.0.dev1')
@@ -65,7 +67,7 @@ class InvalidVersion(ValueError):
 
 
 class _BaseVersion:
-    _key: Tuple[Any, ...]
+    _key: tuple[Any, ...]
 
     def __hash__(self) -> int:
         return hash(self._key)
@@ -73,13 +75,13 @@ def __hash__(self) -> int:
     # Please keep the duplicated `isinstance` check
     # in the six comparisons hereunder
     # unless you find a way to avoid adding overhead function calls.
-    def __lt__(self, other: "_BaseVersion") -> bool:
+    def __lt__(self, other: _BaseVersion) -> bool:
         if not isinstance(other, _BaseVersion):
             return NotImplemented
 
         return self._key < other._key
 
-    def __le__(self, other: "_BaseVersion") -> bool:
+    def __le__(self, other: _BaseVersion) -> bool:
         if not isinstance(other, _BaseVersion):
             return NotImplemented
 
@@ -91,13 +93,13 @@ def __eq__(self, other: object) -> bool:
 
         return self._key == other._key
 
-    def __ge__(self, other: "_BaseVersion") -> bool:
+    def __ge__(self, other: _BaseVersion) -> bool:
         if not isinstance(other, _BaseVersion):
             return NotImplemented
 
         return self._key >= other._key
 
-    def __gt__(self, other: "_BaseVersion") -> bool:
+    def __gt__(self, other: _BaseVersion) -> bool:
         if not isinstance(other, _BaseVersion):
             return NotImplemented
 
@@ -274,7 +276,7 @@ def epoch(self) -> int:
         return self._version.epoch
 
     @property
-    def release(self) -> Tuple[int, ...]:
+    def release(self) -> tuple[int, ...]:
         """The components of the "release" segment of the version.
 
         >>> Version("1.2.3").release
@@ -290,7 +292,7 @@ def release(self) -> Tuple[int, ...]:
         return self._version.release
 
     @property
-    def pre(self) -> Optional[Tuple[str, int]]:
+    def pre(self) -> tuple[str, int] | None:
         """The pre-release segment of the version.
 
         >>> print(Version("1.2.3").pre)
@@ -305,7 +307,7 @@ def pre(self) -> Optional[Tuple[str, int]]:
         return self._version.pre
 
     @property
-    def post(self) -> Optional[int]:
+    def post(self) -> int | None:
         """The post-release number of the version.
 
         >>> print(Version("1.2.3").post)
@@ -316,7 +318,7 @@ def post(self) -> Optional[int]:
         return self._version.post[1] if self._version.post else None
 
     @property
-    def dev(self) -> Optional[int]:
+    def dev(self) -> int | None:
         """The development number of the version.
 
         >>> print(Version("1.2.3").dev)
@@ -327,7 +329,7 @@ def dev(self) -> Optional[int]:
         return self._version.dev[1] if self._version.dev else None
 
     @property
-    def local(self) -> Optional[str]:
+    def local(self) -> str | None:
         """The local version segment of the version.
 
         >>> print(Version("1.2.3").local)
@@ -450,9 +452,8 @@ def micro(self) -> int:
 
 
 def _parse_letter_version(
-    letter: Optional[str], number: Union[str, bytes, SupportsInt, None]
-) -> Optional[Tuple[str, int]]:
-
+    letter: str | None, number: str | bytes | SupportsInt | None
+) -> tuple[str, int] | None:
     if letter:
         # We consider there to be an implicit 0 in a pre-release if there is
         # not a numeral associated with it.
@@ -488,7 +489,7 @@ def _parse_letter_version(
 _local_version_separators = re.compile(r"[\._-]")
 
 
-def _parse_local_version(local: Optional[str]) -> Optional[LocalType]:
+def _parse_local_version(local: str | None) -> LocalType | None:
     """
     Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
     """
@@ -502,13 +503,12 @@ def _parse_local_version(local: Optional[str]) -> Optional[LocalType]:
 
 def _cmpkey(
     epoch: int,
-    release: Tuple[int, ...],
-    pre: Optional[Tuple[str, int]],
-    post: Optional[Tuple[str, int]],
-    dev: Optional[Tuple[str, int]],
-    local: Optional[LocalType],
+    release: tuple[int, ...],
+    pre: tuple[str, int] | None,
+    post: tuple[str, int] | None,
+    dev: tuple[str, int] | None,
+    local: LocalType | None,
 ) -> CmpKey:
-
     # When we compare a release version, we want to compare it with all of the
     # trailing zeros removed. So we'll use a reverse the list, drop all the now
     # leading zeros until we come to something non zero, then take the rest
diff --git a/pkg_resources/_vendor/platformdirs-2.6.2.dist-info/RECORD b/pkg_resources/_vendor/platformdirs-2.6.2.dist-info/RECORD
deleted file mode 100644
index a721322694..0000000000
--- a/pkg_resources/_vendor/platformdirs-2.6.2.dist-info/RECORD
+++ /dev/null
@@ -1,23 +0,0 @@
-platformdirs-2.6.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-platformdirs-2.6.2.dist-info/METADATA,sha256=rDoFsb9-2tVym02IIeYCoKgGaCpY2v8xw8WWXywxhIM,9502
-platformdirs-2.6.2.dist-info/RECORD,,
-platformdirs-2.6.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-platformdirs-2.6.2.dist-info/WHEEL,sha256=NaLmgHHW_f9jTvv_wRh9vcK7c7EK9o5fwsIXMOzoGgM,87
-platformdirs-2.6.2.dist-info/licenses/LICENSE,sha256=KeD9YukphQ6G6yjD_czwzv30-pSHkBHP-z0NS-1tTbY,1089
-platformdirs/__init__.py,sha256=td0a-fHENmnG8ess2WRoysKv9ud5j6TQ-p_iUM_uE18,12864
-platformdirs/__main__.py,sha256=VsC0t5m-6f0YVr96PVks93G3EDF8MSNY4KpUMvPahDA,1164
-platformdirs/__pycache__/__init__.cpython-312.pyc,,
-platformdirs/__pycache__/__main__.cpython-312.pyc,,
-platformdirs/__pycache__/android.cpython-312.pyc,,
-platformdirs/__pycache__/api.cpython-312.pyc,,
-platformdirs/__pycache__/macos.cpython-312.pyc,,
-platformdirs/__pycache__/unix.cpython-312.pyc,,
-platformdirs/__pycache__/version.cpython-312.pyc,,
-platformdirs/__pycache__/windows.cpython-312.pyc,,
-platformdirs/android.py,sha256=GKizhyS7ESRiU67u8UnBJLm46goau9937EchXWbPBlk,4068
-platformdirs/api.py,sha256=MXKHXOL3eh_-trSok-JUTjAR_zjmmKF3rjREVABjP8s,4910
-platformdirs/macos.py,sha256=-3UXQewbT0yMhMdkzRXfXGAntmLIH7Qt4a9Hlf8I5_Y,2655
-platformdirs/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-platformdirs/unix.py,sha256=P-WQjSSieE38DXjMDa1t4XHnKJQ5idEaKT0PyXwm8KQ,6911
-platformdirs/version.py,sha256=qaN-fw_htIgKUVXoAuAEVgKxQu3tZ9qE2eiKkWIS7LA,160
-platformdirs/windows.py,sha256=LOrXLgI0CjQldDo2zhOZYGYZ6g4e_cJOCB_pF9aMRWQ,6596
diff --git a/pkg_resources/_vendor/platformdirs-4.2.2.dist-info/INSTALLER b/pkg_resources/_vendor/platformdirs-4.2.2.dist-info/INSTALLER
new file mode 100644
index 0000000000..a1b589e38a
--- /dev/null
+++ b/pkg_resources/_vendor/platformdirs-4.2.2.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/pkg_resources/_vendor/platformdirs-2.6.2.dist-info/METADATA b/pkg_resources/_vendor/platformdirs-4.2.2.dist-info/METADATA
similarity index 75%
rename from pkg_resources/_vendor/platformdirs-2.6.2.dist-info/METADATA
rename to pkg_resources/_vendor/platformdirs-4.2.2.dist-info/METADATA
index 608afde321..ab51ef36ad 100644
--- a/pkg_resources/_vendor/platformdirs-2.6.2.dist-info/METADATA
+++ b/pkg_resources/_vendor/platformdirs-4.2.2.dist-info/METADATA
@@ -1,49 +1,50 @@
-Metadata-Version: 2.1
+Metadata-Version: 2.3
 Name: platformdirs
-Version: 2.6.2
-Summary: A small Python package for determining appropriate platform-specific dirs, e.g. a "user data dir".
+Version: 4.2.2
+Summary: A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`.
 Project-URL: Documentation, https://platformdirs.readthedocs.io
 Project-URL: Homepage, https://github.com/platformdirs/platformdirs
 Project-URL: Source, https://github.com/platformdirs/platformdirs
 Project-URL: Tracker, https://github.com/platformdirs/platformdirs/issues
 Maintainer-email: Bernát Gábor , Julian Berman , Ofek Lev , Ronny Pfannschmidt 
+License-Expression: MIT
 License-File: LICENSE
-Keywords: application,cache,directory,log,user
+Keywords: appdirs,application,cache,directory,log,user
 Classifier: Development Status :: 5 - Production/Stable
 Classifier: Intended Audience :: Developers
 Classifier: License :: OSI Approved :: MIT License
 Classifier: Operating System :: OS Independent
 Classifier: Programming Language :: Python
-Classifier: Programming Language :: Python :: 3
 Classifier: Programming Language :: Python :: 3 :: Only
-Classifier: Programming Language :: Python :: 3.7
 Classifier: Programming Language :: Python :: 3.8
 Classifier: Programming Language :: Python :: 3.9
 Classifier: Programming Language :: Python :: 3.10
 Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
 Classifier: Programming Language :: Python :: Implementation :: CPython
 Classifier: Programming Language :: Python :: Implementation :: PyPy
 Classifier: Topic :: Software Development :: Libraries :: Python Modules
-Requires-Python: >=3.7
-Requires-Dist: typing-extensions>=4.4; python_version < '3.8'
+Requires-Python: >=3.8
 Provides-Extra: docs
-Requires-Dist: furo>=2022.12.7; extra == 'docs'
+Requires-Dist: furo>=2023.9.10; extra == 'docs'
 Requires-Dist: proselint>=0.13; extra == 'docs'
-Requires-Dist: sphinx-autodoc-typehints>=1.19.5; extra == 'docs'
-Requires-Dist: sphinx>=5.3; extra == 'docs'
+Requires-Dist: sphinx-autodoc-typehints>=1.25.2; extra == 'docs'
+Requires-Dist: sphinx>=7.2.6; extra == 'docs'
 Provides-Extra: test
 Requires-Dist: appdirs==1.4.4; extra == 'test'
-Requires-Dist: covdefaults>=2.2.2; extra == 'test'
-Requires-Dist: pytest-cov>=4; extra == 'test'
-Requires-Dist: pytest-mock>=3.10; extra == 'test'
-Requires-Dist: pytest>=7.2; extra == 'test'
+Requires-Dist: covdefaults>=2.3; extra == 'test'
+Requires-Dist: pytest-cov>=4.1; extra == 'test'
+Requires-Dist: pytest-mock>=3.12; extra == 'test'
+Requires-Dist: pytest>=7.4.3; extra == 'test'
+Provides-Extra: type
+Requires-Dist: mypy>=1.8; extra == 'type'
 Description-Content-Type: text/x-rst
 
 The problem
 ===========
 
-.. image:: https://github.com/platformdirs/platformdirs/workflows/Test/badge.svg
-   :target: https://github.com/platformdirs/platformdirs/actions?query=workflow%3ATest
+.. image:: https://github.com/platformdirs/platformdirs/actions/workflows/check.yml/badge.svg
+   :target: https://github.com/platformdirs/platformdirs/actions
 
 When writing desktop application, finding the right location to store user data
 and configuration varies per platform. Even for single-platform apps, there
@@ -53,7 +54,7 @@ For example, if running on macOS, you should use::
 
     ~/Library/Application Support/
 
-If on Windows (at least English Win XP) that should be::
+If on Windows (at least English Win) that should be::
 
     C:\Documents and Settings\\Application Data\Local Settings\\
 
@@ -82,6 +83,11 @@ This kind of thing is what the ``platformdirs`` package is for.
 - site config dir (``site_config_dir``)
 - user log dir (``user_log_dir``)
 - user documents dir (``user_documents_dir``)
+- user downloads dir (``user_downloads_dir``)
+- user pictures dir (``user_pictures_dir``)
+- user videos dir (``user_videos_dir``)
+- user music dir (``user_music_dir``)
+- user desktop dir (``user_desktop_dir``)
 - user runtime dir (``user_runtime_dir``)
 
 And also:
@@ -109,10 +115,20 @@ On macOS:
     '/Users/trentm/Library/Logs/SuperApp'
     >>> user_documents_dir()
     '/Users/trentm/Documents'
+    >>> user_downloads_dir()
+    '/Users/trentm/Downloads'
+    >>> user_pictures_dir()
+    '/Users/trentm/Pictures'
+    >>> user_videos_dir()
+    '/Users/trentm/Movies'
+    >>> user_music_dir()
+    '/Users/trentm/Music'
+    >>> user_desktop_dir()
+    '/Users/trentm/Desktop'
     >>> user_runtime_dir(appname, appauthor)
     '/Users/trentm/Library/Caches/TemporaryItems/SuperApp'
 
-On Windows 7:
+On Windows:
 
 .. code-block:: pycon
 
@@ -129,6 +145,16 @@ On Windows 7:
     'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp\\Logs'
     >>> user_documents_dir()
     'C:\\Users\\trentm\\Documents'
+    >>> user_downloads_dir()
+    'C:\\Users\\trentm\\Downloads'
+    >>> user_pictures_dir()
+    'C:\\Users\\trentm\\Pictures'
+    >>> user_videos_dir()
+    'C:\\Users\\trentm\\Videos'
+    >>> user_music_dir()
+    'C:\\Users\\trentm\\Music'
+    >>> user_desktop_dir()
+    'C:\\Users\\trentm\\Desktop'
     >>> user_runtime_dir(appname, appauthor)
     'C:\\Users\\trentm\\AppData\\Local\\Temp\\Acme\\SuperApp'
 
@@ -148,11 +174,21 @@ On Linux:
     >>> user_cache_dir(appname, appauthor)
     '/home/trentm/.cache/SuperApp'
     >>> user_log_dir(appname, appauthor)
-    '/home/trentm/.cache/SuperApp/log'
+    '/home/trentm/.local/state/SuperApp/log'
     >>> user_config_dir(appname)
     '/home/trentm/.config/SuperApp'
     >>> user_documents_dir()
     '/home/trentm/Documents'
+    >>> user_downloads_dir()
+    '/home/trentm/Downloads'
+    >>> user_pictures_dir()
+    '/home/trentm/Pictures'
+    >>> user_videos_dir()
+    '/home/trentm/Videos'
+    >>> user_music_dir()
+    '/home/trentm/Music'
+    >>> user_desktop_dir()
+    '/home/trentm/Desktop'
     >>> user_runtime_dir(appname, appauthor)
     '/run/user/{os.getuid()}/SuperApp'
     >>> site_config_dir(appname)
@@ -176,6 +212,16 @@ On Android::
     '/data/data/com.myApp/shared_prefs/SuperApp'
     >>> user_documents_dir()
     '/storage/emulated/0/Documents'
+    >>> user_downloads_dir()
+    '/storage/emulated/0/Downloads'
+    >>> user_pictures_dir()
+    '/storage/emulated/0/Pictures'
+    >>> user_videos_dir()
+    '/storage/emulated/0/DCIM/Camera'
+    >>> user_music_dir()
+    '/storage/emulated/0/Music'
+    >>> user_desktop_dir()
+    '/storage/emulated/0/Desktop'
     >>> user_runtime_dir(appname, appauthor)
     '/data/data/com.myApp/cache/SuperApp/tmp'
 
@@ -203,6 +249,16 @@ apps also support ``XDG_*`` environment variables.
     '/Users/trentm/Library/Logs/SuperApp'
     >>> dirs.user_documents_dir
     '/Users/trentm/Documents'
+    >>> dirs.user_downloads_dir
+    '/Users/trentm/Downloads'
+    >>> dirs.user_pictures_dir
+    '/Users/trentm/Pictures'
+    >>> dirs.user_videos_dir
+    '/Users/trentm/Movies'
+    >>> dirs.user_music_dir
+    '/Users/trentm/Music'
+    >>> dirs.user_desktop_dir
+    '/Users/trentm/Desktop'
     >>> dirs.user_runtime_dir
     '/Users/trentm/Library/Caches/TemporaryItems/SuperApp'
 
@@ -225,6 +281,16 @@ dirs::
     '/Users/trentm/Library/Logs/SuperApp/1.0'
     >>> dirs.user_documents_dir
     '/Users/trentm/Documents'
+    >>> dirs.user_downloads_dir
+    '/Users/trentm/Downloads'
+    >>> dirs.user_pictures_dir
+    '/Users/trentm/Pictures'
+    >>> dirs.user_videos_dir
+    '/Users/trentm/Movies'
+    >>> dirs.user_music_dir
+    '/Users/trentm/Music'
+    >>> dirs.user_desktop_dir
+    '/Users/trentm/Desktop'
     >>> dirs.user_runtime_dir
     '/Users/trentm/Library/Caches/TemporaryItems/SuperApp/1.0'
 
diff --git a/pkg_resources/_vendor/platformdirs-4.2.2.dist-info/RECORD b/pkg_resources/_vendor/platformdirs-4.2.2.dist-info/RECORD
new file mode 100644
index 0000000000..64c0c8ea2e
--- /dev/null
+++ b/pkg_resources/_vendor/platformdirs-4.2.2.dist-info/RECORD
@@ -0,0 +1,23 @@
+platformdirs-4.2.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+platformdirs-4.2.2.dist-info/METADATA,sha256=zmsie01G1MtXR0wgIv5XpVeTO7idr0WWvfmxKsKWuGk,11429
+platformdirs-4.2.2.dist-info/RECORD,,
+platformdirs-4.2.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+platformdirs-4.2.2.dist-info/WHEEL,sha256=zEMcRr9Kr03x1ozGwg5v9NQBKn3kndp6LSoSlVg-jhU,87
+platformdirs-4.2.2.dist-info/licenses/LICENSE,sha256=KeD9YukphQ6G6yjD_czwzv30-pSHkBHP-z0NS-1tTbY,1089
+platformdirs/__init__.py,sha256=EMGE8qeHRR9CzDFr8kL3tA8hdZZniYjXBVZd0UGTWK0,22225
+platformdirs/__main__.py,sha256=HnsUQHpiBaiTxwcmwVw-nFaPdVNZtQIdi1eWDtI-MzI,1493
+platformdirs/__pycache__/__init__.cpython-312.pyc,,
+platformdirs/__pycache__/__main__.cpython-312.pyc,,
+platformdirs/__pycache__/android.cpython-312.pyc,,
+platformdirs/__pycache__/api.cpython-312.pyc,,
+platformdirs/__pycache__/macos.cpython-312.pyc,,
+platformdirs/__pycache__/unix.cpython-312.pyc,,
+platformdirs/__pycache__/version.cpython-312.pyc,,
+platformdirs/__pycache__/windows.cpython-312.pyc,,
+platformdirs/android.py,sha256=xZXY9Jd46WOsxT2U6-5HsNtDZ-IQqxcEUrBLl3hYk4o,9016
+platformdirs/api.py,sha256=QBYdUac2eC521ek_y53uD1Dcq-lJX8IgSRVd4InC6uc,8996
+platformdirs/macos.py,sha256=wftsbsvq6nZ0WORXSiCrZNkRHz_WKuktl0a6mC7MFkI,5580
+platformdirs/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+platformdirs/unix.py,sha256=Cci9Wqt35dAMsg6HT9nRGHSBW5obb0pR3AE1JJnsCXg,10643
+platformdirs/version.py,sha256=r7F76tZRjgQKzrpx_I0_ZMQOMU-PS7eGnHD7zEK3KB0,411
+platformdirs/windows.py,sha256=IFpiohUBwxPtCzlyKwNtxyW4Jk8haa6W8o59mfrDXVo,10125
diff --git a/pkg_resources/_vendor/platformdirs-4.2.2.dist-info/REQUESTED b/pkg_resources/_vendor/platformdirs-4.2.2.dist-info/REQUESTED
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/pkg_resources/_vendor/platformdirs-2.6.2.dist-info/WHEEL b/pkg_resources/_vendor/platformdirs-4.2.2.dist-info/WHEEL
similarity index 67%
rename from pkg_resources/_vendor/platformdirs-2.6.2.dist-info/WHEEL
rename to pkg_resources/_vendor/platformdirs-4.2.2.dist-info/WHEEL
index 6d803659b7..516596c767 100644
--- a/pkg_resources/_vendor/platformdirs-2.6.2.dist-info/WHEEL
+++ b/pkg_resources/_vendor/platformdirs-4.2.2.dist-info/WHEEL
@@ -1,4 +1,4 @@
 Wheel-Version: 1.0
-Generator: hatchling 1.11.1
+Generator: hatchling 1.24.2
 Root-Is-Purelib: true
 Tag: py3-none-any
diff --git a/pkg_resources/_vendor/platformdirs-2.6.2.dist-info/licenses/LICENSE b/pkg_resources/_vendor/platformdirs-4.2.2.dist-info/licenses/LICENSE
similarity index 100%
rename from pkg_resources/_vendor/platformdirs-2.6.2.dist-info/licenses/LICENSE
rename to pkg_resources/_vendor/platformdirs-4.2.2.dist-info/licenses/LICENSE
diff --git a/pkg_resources/_vendor/platformdirs/__init__.py b/pkg_resources/_vendor/platformdirs/__init__.py
index aef2821b83..3f7d9490d1 100644
--- a/pkg_resources/_vendor/platformdirs/__init__.py
+++ b/pkg_resources/_vendor/platformdirs/__init__.py
@@ -1,42 +1,43 @@
 """
-Utilities for determining application-specific dirs. See  for details and
-usage.
+Utilities for determining application-specific dirs.
+
+See  for details and usage.
+
 """
+
 from __future__ import annotations
 
 import os
 import sys
-from pathlib import Path
-
-if sys.version_info >= (3, 8):  # pragma: no cover (py38+)
-    from typing import Literal
-else:  # pragma: no cover (py38+)
-    from ..typing_extensions import Literal
+from typing import TYPE_CHECKING
 
 from .api import PlatformDirsABC
 from .version import __version__
 from .version import __version_tuple__ as __version_info__
 
+if TYPE_CHECKING:
+    from pathlib import Path
+    from typing import Literal
+
 
 def _set_platform_dir_class() -> type[PlatformDirsABC]:
     if sys.platform == "win32":
-        from .windows import Windows as Result
+        from platformdirs.windows import Windows as Result  # noqa: PLC0415
     elif sys.platform == "darwin":
-        from .macos import MacOS as Result
+        from platformdirs.macos import MacOS as Result  # noqa: PLC0415
     else:
-        from .unix import Unix as Result
+        from platformdirs.unix import Unix as Result  # noqa: PLC0415
 
     if os.getenv("ANDROID_DATA") == "/data" and os.getenv("ANDROID_ROOT") == "/system":
-
         if os.getenv("SHELL") or os.getenv("PREFIX"):
             return Result
 
-        from .android import _android_folder
+        from platformdirs.android import _android_folder  # noqa: PLC0415
 
         if _android_folder() is not None:
-            from .android import Android
+            from platformdirs.android import Android  # noqa: PLC0415
 
-            return Android  # return to avoid redefinition of result
+            return Android  # return to avoid redefinition of a result
 
     return Result
 
@@ -49,294 +50,578 @@ def user_data_dir(
     appname: str | None = None,
     appauthor: str | None | Literal[False] = None,
     version: str | None = None,
-    roaming: bool = False,
+    roaming: bool = False,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
 ) -> str:
     """
     :param appname: See `appname `.
     :param appauthor: See `appauthor `.
     :param version: See `version `.
-    :param roaming: See `roaming `.
+    :param roaming: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
     :returns: data directory tied to the user
     """
-    return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_data_dir
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        roaming=roaming,
+        ensure_exists=ensure_exists,
+    ).user_data_dir
 
 
 def site_data_dir(
     appname: str | None = None,
     appauthor: str | None | Literal[False] = None,
     version: str | None = None,
-    multipath: bool = False,
+    multipath: bool = False,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
 ) -> str:
     """
     :param appname: See `appname `.
     :param appauthor: See `appauthor `.
     :param version: See `version `.
     :param multipath: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
     :returns: data directory shared by users
     """
-    return PlatformDirs(appname=appname, appauthor=appauthor, version=version, multipath=multipath).site_data_dir
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        multipath=multipath,
+        ensure_exists=ensure_exists,
+    ).site_data_dir
 
 
 def user_config_dir(
     appname: str | None = None,
     appauthor: str | None | Literal[False] = None,
     version: str | None = None,
-    roaming: bool = False,
+    roaming: bool = False,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
 ) -> str:
     """
     :param appname: See `appname `.
     :param appauthor: See `appauthor `.
     :param version: See `version `.
-    :param roaming: See `roaming `.
+    :param roaming: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
     :returns: config directory tied to the user
     """
-    return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_config_dir
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        roaming=roaming,
+        ensure_exists=ensure_exists,
+    ).user_config_dir
 
 
 def site_config_dir(
     appname: str | None = None,
     appauthor: str | None | Literal[False] = None,
     version: str | None = None,
-    multipath: bool = False,
+    multipath: bool = False,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
 ) -> str:
     """
     :param appname: See `appname `.
     :param appauthor: See `appauthor `.
     :param version: See `version `.
     :param multipath: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
     :returns: config directory shared by the users
     """
-    return PlatformDirs(appname=appname, appauthor=appauthor, version=version, multipath=multipath).site_config_dir
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        multipath=multipath,
+        ensure_exists=ensure_exists,
+    ).site_config_dir
 
 
 def user_cache_dir(
     appname: str | None = None,
     appauthor: str | None | Literal[False] = None,
     version: str | None = None,
-    opinion: bool = True,
+    opinion: bool = True,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
 ) -> str:
     """
     :param appname: See `appname `.
     :param appauthor: See `appauthor `.
     :param version: See `version `.
     :param opinion: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
     :returns: cache directory tied to the user
     """
-    return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_cache_dir
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        opinion=opinion,
+        ensure_exists=ensure_exists,
+    ).user_cache_dir
+
+
+def site_cache_dir(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    opinion: bool = True,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> str:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param opinion: See `opinion `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: cache directory tied to the user
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        opinion=opinion,
+        ensure_exists=ensure_exists,
+    ).site_cache_dir
 
 
 def user_state_dir(
     appname: str | None = None,
     appauthor: str | None | Literal[False] = None,
     version: str | None = None,
-    roaming: bool = False,
+    roaming: bool = False,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
 ) -> str:
     """
     :param appname: See `appname `.
     :param appauthor: See `appauthor `.
     :param version: See `version `.
-    :param roaming: See `roaming `.
+    :param roaming: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
     :returns: state directory tied to the user
     """
-    return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_state_dir
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        roaming=roaming,
+        ensure_exists=ensure_exists,
+    ).user_state_dir
 
 
 def user_log_dir(
     appname: str | None = None,
     appauthor: str | None | Literal[False] = None,
     version: str | None = None,
-    opinion: bool = True,
+    opinion: bool = True,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
 ) -> str:
     """
     :param appname: See `appname `.
     :param appauthor: See `appauthor `.
     :param version: See `version `.
     :param opinion: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
     :returns: log directory tied to the user
     """
-    return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_log_dir
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        opinion=opinion,
+        ensure_exists=ensure_exists,
+    ).user_log_dir
 
 
 def user_documents_dir() -> str:
-    """
-    :returns: documents directory tied to the user
-    """
+    """:returns: documents directory tied to the user"""
     return PlatformDirs().user_documents_dir
 
 
+def user_downloads_dir() -> str:
+    """:returns: downloads directory tied to the user"""
+    return PlatformDirs().user_downloads_dir
+
+
+def user_pictures_dir() -> str:
+    """:returns: pictures directory tied to the user"""
+    return PlatformDirs().user_pictures_dir
+
+
+def user_videos_dir() -> str:
+    """:returns: videos directory tied to the user"""
+    return PlatformDirs().user_videos_dir
+
+
+def user_music_dir() -> str:
+    """:returns: music directory tied to the user"""
+    return PlatformDirs().user_music_dir
+
+
+def user_desktop_dir() -> str:
+    """:returns: desktop directory tied to the user"""
+    return PlatformDirs().user_desktop_dir
+
+
 def user_runtime_dir(
     appname: str | None = None,
     appauthor: str | None | Literal[False] = None,
     version: str | None = None,
-    opinion: bool = True,
+    opinion: bool = True,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
 ) -> str:
     """
     :param appname: See `appname `.
     :param appauthor: See `appauthor `.
     :param version: See `version `.
     :param opinion: See `opinion `.
+    :param ensure_exists: See `ensure_exists `.
     :returns: runtime directory tied to the user
     """
-    return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_runtime_dir
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        opinion=opinion,
+        ensure_exists=ensure_exists,
+    ).user_runtime_dir
+
+
+def site_runtime_dir(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    opinion: bool = True,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> str:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param opinion: See `opinion `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: runtime directory shared by users
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        opinion=opinion,
+        ensure_exists=ensure_exists,
+    ).site_runtime_dir
 
 
 def user_data_path(
     appname: str | None = None,
     appauthor: str | None | Literal[False] = None,
     version: str | None = None,
-    roaming: bool = False,
+    roaming: bool = False,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
 ) -> Path:
     """
     :param appname: See `appname `.
     :param appauthor: See `appauthor `.
     :param version: See `version `.
-    :param roaming: See `roaming `.
+    :param roaming: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
     :returns: data path tied to the user
     """
-    return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_data_path
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        roaming=roaming,
+        ensure_exists=ensure_exists,
+    ).user_data_path
 
 
 def site_data_path(
     appname: str | None = None,
     appauthor: str | None | Literal[False] = None,
     version: str | None = None,
-    multipath: bool = False,
+    multipath: bool = False,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
 ) -> Path:
     """
     :param appname: See `appname `.
     :param appauthor: See `appauthor `.
     :param version: See `version `.
     :param multipath: See `multipath `.
+    :param ensure_exists: See `ensure_exists `.
     :returns: data path shared by users
     """
-    return PlatformDirs(appname=appname, appauthor=appauthor, version=version, multipath=multipath).site_data_path
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        multipath=multipath,
+        ensure_exists=ensure_exists,
+    ).site_data_path
 
 
 def user_config_path(
     appname: str | None = None,
     appauthor: str | None | Literal[False] = None,
     version: str | None = None,
-    roaming: bool = False,
+    roaming: bool = False,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
 ) -> Path:
     """
     :param appname: See `appname `.
     :param appauthor: See `appauthor `.
     :param version: See `version `.
-    :param roaming: See `roaming `.
+    :param roaming: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
     :returns: config path tied to the user
     """
-    return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_config_path
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        roaming=roaming,
+        ensure_exists=ensure_exists,
+    ).user_config_path
 
 
 def site_config_path(
     appname: str | None = None,
     appauthor: str | None | Literal[False] = None,
     version: str | None = None,
-    multipath: bool = False,
+    multipath: bool = False,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
 ) -> Path:
     """
     :param appname: See `appname `.
     :param appauthor: See `appauthor `.
     :param version: See `version `.
     :param multipath: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
     :returns: config path shared by the users
     """
-    return PlatformDirs(appname=appname, appauthor=appauthor, version=version, multipath=multipath).site_config_path
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        multipath=multipath,
+        ensure_exists=ensure_exists,
+    ).site_config_path
+
+
+def site_cache_path(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    opinion: bool = True,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> Path:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param opinion: See `opinion `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: cache directory tied to the user
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        opinion=opinion,
+        ensure_exists=ensure_exists,
+    ).site_cache_path
 
 
 def user_cache_path(
     appname: str | None = None,
     appauthor: str | None | Literal[False] = None,
     version: str | None = None,
-    opinion: bool = True,
+    opinion: bool = True,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
 ) -> Path:
     """
     :param appname: See `appname `.
     :param appauthor: See `appauthor `.
     :param version: See `version `.
     :param opinion: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
     :returns: cache path tied to the user
     """
-    return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_cache_path
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        opinion=opinion,
+        ensure_exists=ensure_exists,
+    ).user_cache_path
 
 
 def user_state_path(
     appname: str | None = None,
     appauthor: str | None | Literal[False] = None,
     version: str | None = None,
-    roaming: bool = False,
+    roaming: bool = False,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
 ) -> Path:
     """
     :param appname: See `appname `.
     :param appauthor: See `appauthor `.
     :param version: See `version `.
-    :param roaming: See `roaming `.
+    :param roaming: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
     :returns: state path tied to the user
     """
-    return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_state_path
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        roaming=roaming,
+        ensure_exists=ensure_exists,
+    ).user_state_path
 
 
 def user_log_path(
     appname: str | None = None,
     appauthor: str | None | Literal[False] = None,
     version: str | None = None,
-    opinion: bool = True,
+    opinion: bool = True,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
 ) -> Path:
     """
     :param appname: See `appname `.
     :param appauthor: See `appauthor `.
     :param version: See `version `.
     :param opinion: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
     :returns: log path tied to the user
     """
-    return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_log_path
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        opinion=opinion,
+        ensure_exists=ensure_exists,
+    ).user_log_path
 
 
 def user_documents_path() -> Path:
-    """
-    :returns: documents path tied to the user
-    """
+    """:returns: documents a path tied to the user"""
     return PlatformDirs().user_documents_path
 
 
+def user_downloads_path() -> Path:
+    """:returns: downloads path tied to the user"""
+    return PlatformDirs().user_downloads_path
+
+
+def user_pictures_path() -> Path:
+    """:returns: pictures path tied to the user"""
+    return PlatformDirs().user_pictures_path
+
+
+def user_videos_path() -> Path:
+    """:returns: videos path tied to the user"""
+    return PlatformDirs().user_videos_path
+
+
+def user_music_path() -> Path:
+    """:returns: music path tied to the user"""
+    return PlatformDirs().user_music_path
+
+
+def user_desktop_path() -> Path:
+    """:returns: desktop path tied to the user"""
+    return PlatformDirs().user_desktop_path
+
+
 def user_runtime_path(
     appname: str | None = None,
     appauthor: str | None | Literal[False] = None,
     version: str | None = None,
-    opinion: bool = True,
+    opinion: bool = True,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
 ) -> Path:
     """
     :param appname: See `appname `.
     :param appauthor: See `appauthor `.
     :param version: See `version `.
     :param opinion: See `opinion `.
+    :param ensure_exists: See `ensure_exists `.
     :returns: runtime path tied to the user
     """
-    return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_runtime_path
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        opinion=opinion,
+        ensure_exists=ensure_exists,
+    ).user_runtime_path
+
+
+def site_runtime_path(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    opinion: bool = True,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> Path:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param opinion: See `opinion `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: runtime path shared by users
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        opinion=opinion,
+        ensure_exists=ensure_exists,
+    ).site_runtime_path
 
 
 __all__ = [
-    "__version__",
-    "__version_info__",
-    "PlatformDirs",
     "AppDirs",
+    "PlatformDirs",
     "PlatformDirsABC",
-    "user_data_dir",
-    "user_config_dir",
-    "user_cache_dir",
-    "user_state_dir",
-    "user_log_dir",
-    "user_documents_dir",
-    "user_runtime_dir",
-    "site_data_dir",
+    "__version__",
+    "__version_info__",
+    "site_cache_dir",
+    "site_cache_path",
     "site_config_dir",
-    "user_data_path",
-    "user_config_path",
+    "site_config_path",
+    "site_data_dir",
+    "site_data_path",
+    "site_runtime_dir",
+    "site_runtime_path",
+    "user_cache_dir",
     "user_cache_path",
-    "user_state_path",
-    "user_log_path",
+    "user_config_dir",
+    "user_config_path",
+    "user_data_dir",
+    "user_data_path",
+    "user_desktop_dir",
+    "user_desktop_path",
+    "user_documents_dir",
     "user_documents_path",
+    "user_downloads_dir",
+    "user_downloads_path",
+    "user_log_dir",
+    "user_log_path",
+    "user_music_dir",
+    "user_music_path",
+    "user_pictures_dir",
+    "user_pictures_path",
+    "user_runtime_dir",
     "user_runtime_path",
-    "site_data_path",
-    "site_config_path",
+    "user_state_dir",
+    "user_state_path",
+    "user_videos_dir",
+    "user_videos_path",
 ]
diff --git a/pkg_resources/_vendor/platformdirs/__main__.py b/pkg_resources/_vendor/platformdirs/__main__.py
index 0fc1edd59c..922c521358 100644
--- a/pkg_resources/_vendor/platformdirs/__main__.py
+++ b/pkg_resources/_vendor/platformdirs/__main__.py
@@ -1,3 +1,5 @@
+"""Main entry point."""
+
 from __future__ import annotations
 
 from platformdirs import PlatformDirs, __version__
@@ -9,37 +11,44 @@
     "user_state_dir",
     "user_log_dir",
     "user_documents_dir",
+    "user_downloads_dir",
+    "user_pictures_dir",
+    "user_videos_dir",
+    "user_music_dir",
     "user_runtime_dir",
     "site_data_dir",
     "site_config_dir",
+    "site_cache_dir",
+    "site_runtime_dir",
 )
 
 
 def main() -> None:
+    """Run the main entry point."""
     app_name = "MyApp"
     app_author = "MyCompany"
 
-    print(f"-- platformdirs {__version__} --")
+    print(f"-- platformdirs {__version__} --")  # noqa: T201
 
-    print("-- app dirs (with optional 'version')")
+    print("-- app dirs (with optional 'version')")  # noqa: T201
     dirs = PlatformDirs(app_name, app_author, version="1.0")
     for prop in PROPS:
-        print(f"{prop}: {getattr(dirs, prop)}")
+        print(f"{prop}: {getattr(dirs, prop)}")  # noqa: T201
 
-    print("\n-- app dirs (without optional 'version')")
+    print("\n-- app dirs (without optional 'version')")  # noqa: T201
     dirs = PlatformDirs(app_name, app_author)
     for prop in PROPS:
-        print(f"{prop}: {getattr(dirs, prop)}")
+        print(f"{prop}: {getattr(dirs, prop)}")  # noqa: T201
 
-    print("\n-- app dirs (without optional 'appauthor')")
+    print("\n-- app dirs (without optional 'appauthor')")  # noqa: T201
     dirs = PlatformDirs(app_name)
     for prop in PROPS:
-        print(f"{prop}: {getattr(dirs, prop)}")
+        print(f"{prop}: {getattr(dirs, prop)}")  # noqa: T201
 
-    print("\n-- app dirs (with disabled 'appauthor')")
+    print("\n-- app dirs (with disabled 'appauthor')")  # noqa: T201
     dirs = PlatformDirs(app_name, appauthor=False)
     for prop in PROPS:
-        print(f"{prop}: {getattr(dirs, prop)}")
+        print(f"{prop}: {getattr(dirs, prop)}")  # noqa: T201
 
 
 if __name__ == "__main__":
diff --git a/pkg_resources/_vendor/platformdirs/android.py b/pkg_resources/_vendor/platformdirs/android.py
index eda8093512..afd3141c72 100644
--- a/pkg_resources/_vendor/platformdirs/android.py
+++ b/pkg_resources/_vendor/platformdirs/android.py
@@ -1,19 +1,23 @@
+"""Android."""
+
 from __future__ import annotations
 
 import os
 import re
 import sys
 from functools import lru_cache
-from typing import cast
+from typing import TYPE_CHECKING, cast
 
 from .api import PlatformDirsABC
 
 
 class Android(PlatformDirsABC):
     """
-    Follows the guidance `from here `_. Makes use of the
-    `appname ` and
-    `version `.
+    Follows the guidance `from here `_.
+
+    Makes use of the `appname `, `version
+    `, `ensure_exists `.
+
     """
 
     @property
@@ -29,7 +33,8 @@ def site_data_dir(self) -> str:
     @property
     def user_config_dir(self) -> str:
         """
-        :return: config directory tied to the user, e.g. ``/data/user///shared_prefs/``
+        :return: config directory tied to the user, e.g. \
+        ``/data/user///shared_prefs/``
         """
         return self._append_app_name_and_version(cast(str, _android_folder()), "shared_prefs")
 
@@ -40,9 +45,14 @@ def site_config_dir(self) -> str:
 
     @property
     def user_cache_dir(self) -> str:
-        """:return: cache directory tied to the user, e.g. e.g. ``/data/user///cache/``"""
+        """:return: cache directory tied to the user, e.g.,``/data/user///cache/``"""
         return self._append_app_name_and_version(cast(str, _android_folder()), "cache")
 
+    @property
+    def site_cache_dir(self) -> str:
+        """:return: cache directory shared by users, same as `user_cache_dir`"""
+        return self.user_cache_dir
+
     @property
     def user_state_dir(self) -> str:
         """:return: state directory tied to the user, same as `user_data_dir`"""
@@ -56,16 +66,39 @@ def user_log_dir(self) -> str:
         """
         path = self.user_cache_dir
         if self.opinion:
-            path = os.path.join(path, "log")
+            path = os.path.join(path, "log")  # noqa: PTH118
         return path
 
     @property
     def user_documents_dir(self) -> str:
-        """
-        :return: documents directory tied to the user e.g. ``/storage/emulated/0/Documents``
-        """
+        """:return: documents directory tied to the user e.g. ``/storage/emulated/0/Documents``"""
         return _android_documents_folder()
 
+    @property
+    def user_downloads_dir(self) -> str:
+        """:return: downloads directory tied to the user e.g. ``/storage/emulated/0/Downloads``"""
+        return _android_downloads_folder()
+
+    @property
+    def user_pictures_dir(self) -> str:
+        """:return: pictures directory tied to the user e.g. ``/storage/emulated/0/Pictures``"""
+        return _android_pictures_folder()
+
+    @property
+    def user_videos_dir(self) -> str:
+        """:return: videos directory tied to the user e.g. ``/storage/emulated/0/DCIM/Camera``"""
+        return _android_videos_folder()
+
+    @property
+    def user_music_dir(self) -> str:
+        """:return: music directory tied to the user e.g. ``/storage/emulated/0/Music``"""
+        return _android_music_folder()
+
+    @property
+    def user_desktop_dir(self) -> str:
+        """:return: desktop directory tied to the user e.g. ``/storage/emulated/0/Desktop``"""
+        return "/storage/emulated/0/Desktop"
+
     @property
     def user_runtime_dir(self) -> str:
         """
@@ -74,21 +107,43 @@ def user_runtime_dir(self) -> str:
         """
         path = self.user_cache_dir
         if self.opinion:
-            path = os.path.join(path, "tmp")
+            path = os.path.join(path, "tmp")  # noqa: PTH118
         return path
 
+    @property
+    def site_runtime_dir(self) -> str:
+        """:return: runtime directory shared by users, same as `user_runtime_dir`"""
+        return self.user_runtime_dir
 
-@lru_cache(maxsize=1)
-def _android_folder() -> str | None:
-    """:return: base folder for the Android OS or None if cannot be found"""
-    try:
-        # First try to get path to android app via pyjnius
-        from jnius import autoclass
 
-        Context = autoclass("android.content.Context")  # noqa: N806
-        result: str | None = Context.getFilesDir().getParentFile().getAbsolutePath()
-    except Exception:
-        # if fails find an android folder looking path on the sys.path
+@lru_cache(maxsize=1)
+def _android_folder() -> str | None:  # noqa: C901, PLR0912
+    """:return: base folder for the Android OS or None if it cannot be found"""
+    result: str | None = None
+    # type checker isn't happy with our "import android", just don't do this when type checking see
+    # https://stackoverflow.com/a/61394121
+    if not TYPE_CHECKING:
+        try:
+            # First try to get a path to android app using python4android (if available)...
+            from android import mActivity  # noqa: PLC0415
+
+            context = cast("android.content.Context", mActivity.getApplicationContext())  # noqa: F821
+            result = context.getFilesDir().getParentFile().getAbsolutePath()
+        except Exception:  # noqa: BLE001
+            result = None
+    if result is None:
+        try:
+            # ...and fall back to using plain pyjnius, if python4android isn't available or doesn't deliver any useful
+            # result...
+            from jnius import autoclass  # noqa: PLC0415
+
+            context = autoclass("android.content.Context")
+            result = context.getFilesDir().getParentFile().getAbsolutePath()
+        except Exception:  # noqa: BLE001
+            result = None
+    if result is None:
+        # and if that fails, too, find an android folder looking at path on the sys.path
+        # warning: only works for apps installed under /data, not adopted storage etc.
         pattern = re.compile(r"/data/(data|user/\d+)/(.+)/files")
         for path in sys.path:
             if pattern.match(path):
@@ -96,6 +151,16 @@ def _android_folder() -> str | None:
                 break
         else:
             result = None
+    if result is None:
+        # one last try: find an android folder looking at path on the sys.path taking adopted storage paths into
+        # account
+        pattern = re.compile(r"/mnt/expand/[a-fA-F0-9-]{36}/(data|user/\d+)/(.+)/files")
+        for path in sys.path:
+            if pattern.match(path):
+                result = path.split("/files")[0]
+                break
+        else:
+            result = None
     return result
 
 
@@ -104,17 +169,81 @@ def _android_documents_folder() -> str:
     """:return: documents folder for the Android OS"""
     # Get directories with pyjnius
     try:
-        from jnius import autoclass
+        from jnius import autoclass  # noqa: PLC0415
 
-        Context = autoclass("android.content.Context")  # noqa: N806
-        Environment = autoclass("android.os.Environment")  # noqa: N806
-        documents_dir: str = Context.getExternalFilesDir(Environment.DIRECTORY_DOCUMENTS).getAbsolutePath()
-    except Exception:
+        context = autoclass("android.content.Context")
+        environment = autoclass("android.os.Environment")
+        documents_dir: str = context.getExternalFilesDir(environment.DIRECTORY_DOCUMENTS).getAbsolutePath()
+    except Exception:  # noqa: BLE001
         documents_dir = "/storage/emulated/0/Documents"
 
     return documents_dir
 
 
+@lru_cache(maxsize=1)
+def _android_downloads_folder() -> str:
+    """:return: downloads folder for the Android OS"""
+    # Get directories with pyjnius
+    try:
+        from jnius import autoclass  # noqa: PLC0415
+
+        context = autoclass("android.content.Context")
+        environment = autoclass("android.os.Environment")
+        downloads_dir: str = context.getExternalFilesDir(environment.DIRECTORY_DOWNLOADS).getAbsolutePath()
+    except Exception:  # noqa: BLE001
+        downloads_dir = "/storage/emulated/0/Downloads"
+
+    return downloads_dir
+
+
+@lru_cache(maxsize=1)
+def _android_pictures_folder() -> str:
+    """:return: pictures folder for the Android OS"""
+    # Get directories with pyjnius
+    try:
+        from jnius import autoclass  # noqa: PLC0415
+
+        context = autoclass("android.content.Context")
+        environment = autoclass("android.os.Environment")
+        pictures_dir: str = context.getExternalFilesDir(environment.DIRECTORY_PICTURES).getAbsolutePath()
+    except Exception:  # noqa: BLE001
+        pictures_dir = "/storage/emulated/0/Pictures"
+
+    return pictures_dir
+
+
+@lru_cache(maxsize=1)
+def _android_videos_folder() -> str:
+    """:return: videos folder for the Android OS"""
+    # Get directories with pyjnius
+    try:
+        from jnius import autoclass  # noqa: PLC0415
+
+        context = autoclass("android.content.Context")
+        environment = autoclass("android.os.Environment")
+        videos_dir: str = context.getExternalFilesDir(environment.DIRECTORY_DCIM).getAbsolutePath()
+    except Exception:  # noqa: BLE001
+        videos_dir = "/storage/emulated/0/DCIM/Camera"
+
+    return videos_dir
+
+
+@lru_cache(maxsize=1)
+def _android_music_folder() -> str:
+    """:return: music folder for the Android OS"""
+    # Get directories with pyjnius
+    try:
+        from jnius import autoclass  # noqa: PLC0415
+
+        context = autoclass("android.content.Context")
+        environment = autoclass("android.os.Environment")
+        music_dir: str = context.getExternalFilesDir(environment.DIRECTORY_MUSIC).getAbsolutePath()
+    except Exception:  # noqa: BLE001
+        music_dir = "/storage/emulated/0/Music"
+
+    return music_dir
+
+
 __all__ = [
     "Android",
 ]
diff --git a/pkg_resources/_vendor/platformdirs/api.py b/pkg_resources/_vendor/platformdirs/api.py
index 6f6e2c2c69..c50caa648a 100644
--- a/pkg_resources/_vendor/platformdirs/api.py
+++ b/pkg_resources/_vendor/platformdirs/api.py
@@ -1,28 +1,29 @@
+"""Base API."""
+
 from __future__ import annotations
 
 import os
-import sys
 from abc import ABC, abstractmethod
 from pathlib import Path
+from typing import TYPE_CHECKING
 
-if sys.version_info >= (3, 8):  # pragma: no branch
-    from typing import Literal  # pragma: no cover
+if TYPE_CHECKING:
+    from typing import Iterator, Literal
 
 
-class PlatformDirsABC(ABC):
-    """
-    Abstract base class for platform directories.
-    """
+class PlatformDirsABC(ABC):  # noqa: PLR0904
+    """Abstract base class for platform directories."""
 
-    def __init__(
+    def __init__(  # noqa: PLR0913, PLR0917
         self,
         appname: str | None = None,
         appauthor: str | None | Literal[False] = None,
         version: str | None = None,
-        roaming: bool = False,
-        multipath: bool = False,
-        opinion: bool = True,
-    ):
+        roaming: bool = False,  # noqa: FBT001, FBT002
+        multipath: bool = False,  # noqa: FBT001, FBT002
+        opinion: bool = True,  # noqa: FBT001, FBT002
+        ensure_exists: bool = False,  # noqa: FBT001, FBT002
+    ) -> None:
         """
         Create a new platform directory.
 
@@ -32,30 +33,49 @@ def __init__(
         :param roaming: See `roaming`.
         :param multipath: See `multipath`.
         :param opinion: See `opinion`.
+        :param ensure_exists: See `ensure_exists`.
+
         """
         self.appname = appname  #: The name of application.
         self.appauthor = appauthor
         """
-        The name of the app author or distributing body for this application. Typically, it is the owning company name.
-        Defaults to `appname`. You may pass ``False`` to disable it.
+        The name of the app author or distributing body for this application.
+
+        Typically, it is the owning company name. Defaults to `appname`. You may pass ``False`` to disable it.
+
         """
         self.version = version
         """
-        An optional version path element to append to the path. You might want to use this if you want multiple versions
-        of your app to be able to run independently. If used, this would typically be ``.``.
+        An optional version path element to append to the path.
+
+        You might want to use this if you want multiple versions of your app to be able to run independently. If used,
+        this would typically be ``.``.
+
         """
         self.roaming = roaming
         """
-        Whether to use the roaming appdata directory on Windows. That means that for users on a Windows network setup
-        for roaming profiles, this user data will be synced on login (see
-        `here `_).
+        Whether to use the roaming appdata directory on Windows.
+
+        That means that for users on a Windows network setup for roaming profiles, this user data will be synced on
+        login (see
+        `here `_).
+
         """
         self.multipath = multipath
         """
-        An optional parameter only applicable to Unix/Linux which indicates that the entire list of data dirs should be
-        returned. By default, the first item would only be returned.
+        An optional parameter which indicates that the entire list of data dirs should be returned.
+
+        By default, the first item would only be returned.
+
         """
         self.opinion = opinion  #: A flag to indicating to use opinionated values.
+        self.ensure_exists = ensure_exists
+        """
+        Optionally create the directory (and any missing parents) upon access if it does not exist.
+
+        By default, no directories are created.
+
+        """
 
     def _append_app_name_and_version(self, *base: str) -> str:
         params = list(base[1:])
@@ -63,7 +83,13 @@ def _append_app_name_and_version(self, *base: str) -> str:
             params.append(self.appname)
             if self.version:
                 params.append(self.version)
-        return os.path.join(base[0], *params)
+        path = os.path.join(base[0], *params)  # noqa: PTH118
+        self._optionally_create_directory(path)
+        return path
+
+    def _optionally_create_directory(self, path: str) -> None:
+        if self.ensure_exists:
+            Path(path).mkdir(parents=True, exist_ok=True)
 
     @property
     @abstractmethod
@@ -90,6 +116,11 @@ def site_config_dir(self) -> str:
     def user_cache_dir(self) -> str:
         """:return: cache directory tied to the user"""
 
+    @property
+    @abstractmethod
+    def site_cache_dir(self) -> str:
+        """:return: cache directory shared by users"""
+
     @property
     @abstractmethod
     def user_state_dir(self) -> str:
@@ -105,11 +136,41 @@ def user_log_dir(self) -> str:
     def user_documents_dir(self) -> str:
         """:return: documents directory tied to the user"""
 
+    @property
+    @abstractmethod
+    def user_downloads_dir(self) -> str:
+        """:return: downloads directory tied to the user"""
+
+    @property
+    @abstractmethod
+    def user_pictures_dir(self) -> str:
+        """:return: pictures directory tied to the user"""
+
+    @property
+    @abstractmethod
+    def user_videos_dir(self) -> str:
+        """:return: videos directory tied to the user"""
+
+    @property
+    @abstractmethod
+    def user_music_dir(self) -> str:
+        """:return: music directory tied to the user"""
+
+    @property
+    @abstractmethod
+    def user_desktop_dir(self) -> str:
+        """:return: desktop directory tied to the user"""
+
     @property
     @abstractmethod
     def user_runtime_dir(self) -> str:
         """:return: runtime directory tied to the user"""
 
+    @property
+    @abstractmethod
+    def site_runtime_dir(self) -> str:
+        """:return: runtime directory shared by users"""
+
     @property
     def user_data_path(self) -> Path:
         """:return: data path tied to the user"""
@@ -135,6 +196,11 @@ def user_cache_path(self) -> Path:
         """:return: cache path tied to the user"""
         return Path(self.user_cache_dir)
 
+    @property
+    def site_cache_path(self) -> Path:
+        """:return: cache path shared by users"""
+        return Path(self.site_cache_dir)
+
     @property
     def user_state_path(self) -> Path:
         """:return: state path tied to the user"""
@@ -147,10 +213,80 @@ def user_log_path(self) -> Path:
 
     @property
     def user_documents_path(self) -> Path:
-        """:return: documents path tied to the user"""
+        """:return: documents a path tied to the user"""
         return Path(self.user_documents_dir)
 
+    @property
+    def user_downloads_path(self) -> Path:
+        """:return: downloads path tied to the user"""
+        return Path(self.user_downloads_dir)
+
+    @property
+    def user_pictures_path(self) -> Path:
+        """:return: pictures path tied to the user"""
+        return Path(self.user_pictures_dir)
+
+    @property
+    def user_videos_path(self) -> Path:
+        """:return: videos path tied to the user"""
+        return Path(self.user_videos_dir)
+
+    @property
+    def user_music_path(self) -> Path:
+        """:return: music path tied to the user"""
+        return Path(self.user_music_dir)
+
+    @property
+    def user_desktop_path(self) -> Path:
+        """:return: desktop path tied to the user"""
+        return Path(self.user_desktop_dir)
+
     @property
     def user_runtime_path(self) -> Path:
         """:return: runtime path tied to the user"""
         return Path(self.user_runtime_dir)
+
+    @property
+    def site_runtime_path(self) -> Path:
+        """:return: runtime path shared by users"""
+        return Path(self.site_runtime_dir)
+
+    def iter_config_dirs(self) -> Iterator[str]:
+        """:yield: all user and site configuration directories."""
+        yield self.user_config_dir
+        yield self.site_config_dir
+
+    def iter_data_dirs(self) -> Iterator[str]:
+        """:yield: all user and site data directories."""
+        yield self.user_data_dir
+        yield self.site_data_dir
+
+    def iter_cache_dirs(self) -> Iterator[str]:
+        """:yield: all user and site cache directories."""
+        yield self.user_cache_dir
+        yield self.site_cache_dir
+
+    def iter_runtime_dirs(self) -> Iterator[str]:
+        """:yield: all user and site runtime directories."""
+        yield self.user_runtime_dir
+        yield self.site_runtime_dir
+
+    def iter_config_paths(self) -> Iterator[Path]:
+        """:yield: all user and site configuration paths."""
+        for path in self.iter_config_dirs():
+            yield Path(path)
+
+    def iter_data_paths(self) -> Iterator[Path]:
+        """:yield: all user and site data paths."""
+        for path in self.iter_data_dirs():
+            yield Path(path)
+
+    def iter_cache_paths(self) -> Iterator[Path]:
+        """:yield: all user and site cache paths."""
+        for path in self.iter_cache_dirs():
+            yield Path(path)
+
+    def iter_runtime_paths(self) -> Iterator[Path]:
+        """:yield: all user and site runtime paths."""
+        for path in self.iter_runtime_dirs():
+            yield Path(path)
diff --git a/pkg_resources/_vendor/platformdirs/macos.py b/pkg_resources/_vendor/platformdirs/macos.py
index a01337c776..eb1ba5df1d 100644
--- a/pkg_resources/_vendor/platformdirs/macos.py
+++ b/pkg_resources/_vendor/platformdirs/macos.py
@@ -1,42 +1,78 @@
+"""macOS."""
+
 from __future__ import annotations
 
-import os
+import os.path
+import sys
 
 from .api import PlatformDirsABC
 
 
 class MacOS(PlatformDirsABC):
     """
-    Platform directories for the macOS operating system. Follows the guidance from `Apple documentation
-    `_.
-    Makes use of the `appname ` and
-    `version `.
+    Platform directories for the macOS operating system.
+
+    Follows the guidance from
+    `Apple documentation `_.
+    Makes use of the `appname `,
+    `version `,
+    `ensure_exists `.
+
     """
 
     @property
     def user_data_dir(self) -> str:
         """:return: data directory tied to the user, e.g. ``~/Library/Application Support/$appname/$version``"""
-        return self._append_app_name_and_version(os.path.expanduser("~/Library/Application Support/"))
+        return self._append_app_name_and_version(os.path.expanduser("~/Library/Application Support"))  # noqa: PTH111
 
     @property
     def site_data_dir(self) -> str:
-        """:return: data directory shared by users, e.g. ``/Library/Application Support/$appname/$version``"""
-        return self._append_app_name_and_version("/Library/Application Support")
+        """
+        :return: data directory shared by users, e.g. ``/Library/Application Support/$appname/$version``.
+          If we're using a Python binary managed by `Homebrew `_, the directory
+          will be under the Homebrew prefix, e.g. ``/opt/homebrew/share/$appname/$version``.
+          If `multipath ` is enabled, and we're in Homebrew,
+          the response is a multi-path string separated by ":", e.g.
+          ``/opt/homebrew/share/$appname/$version:/Library/Application Support/$appname/$version``
+        """
+        is_homebrew = sys.prefix.startswith("/opt/homebrew")
+        path_list = [self._append_app_name_and_version("/opt/homebrew/share")] if is_homebrew else []
+        path_list.append(self._append_app_name_and_version("/Library/Application Support"))
+        if self.multipath:
+            return os.pathsep.join(path_list)
+        return path_list[0]
 
     @property
     def user_config_dir(self) -> str:
-        """:return: config directory tied to the user, e.g. ``~/Library/Preferences/$appname/$version``"""
-        return self._append_app_name_and_version(os.path.expanduser("~/Library/Preferences/"))
+        """:return: config directory tied to the user, same as `user_data_dir`"""
+        return self.user_data_dir
 
     @property
     def site_config_dir(self) -> str:
-        """:return: config directory shared by the users, e.g. ``/Library/Preferences/$appname``"""
-        return self._append_app_name_and_version("/Library/Preferences")
+        """:return: config directory shared by the users, same as `site_data_dir`"""
+        return self.site_data_dir
 
     @property
     def user_cache_dir(self) -> str:
         """:return: cache directory tied to the user, e.g. ``~/Library/Caches/$appname/$version``"""
-        return self._append_app_name_and_version(os.path.expanduser("~/Library/Caches"))
+        return self._append_app_name_and_version(os.path.expanduser("~/Library/Caches"))  # noqa: PTH111
+
+    @property
+    def site_cache_dir(self) -> str:
+        """
+        :return: cache directory shared by users, e.g. ``/Library/Caches/$appname/$version``.
+          If we're using a Python binary managed by `Homebrew `_, the directory
+          will be under the Homebrew prefix, e.g. ``/opt/homebrew/var/cache/$appname/$version``.
+          If `multipath ` is enabled, and we're in Homebrew,
+          the response is a multi-path string separated by ":", e.g.
+          ``/opt/homebrew/var/cache/$appname/$version:/Library/Caches/$appname/$version``
+        """
+        is_homebrew = sys.prefix.startswith("/opt/homebrew")
+        path_list = [self._append_app_name_and_version("/opt/homebrew/var/cache")] if is_homebrew else []
+        path_list.append(self._append_app_name_and_version("/Library/Caches"))
+        if self.multipath:
+            return os.pathsep.join(path_list)
+        return path_list[0]
 
     @property
     def user_state_dir(self) -> str:
@@ -46,17 +82,47 @@ def user_state_dir(self) -> str:
     @property
     def user_log_dir(self) -> str:
         """:return: log directory tied to the user, e.g. ``~/Library/Logs/$appname/$version``"""
-        return self._append_app_name_and_version(os.path.expanduser("~/Library/Logs"))
+        return self._append_app_name_and_version(os.path.expanduser("~/Library/Logs"))  # noqa: PTH111
 
     @property
     def user_documents_dir(self) -> str:
         """:return: documents directory tied to the user, e.g. ``~/Documents``"""
-        return os.path.expanduser("~/Documents")
+        return os.path.expanduser("~/Documents")  # noqa: PTH111
+
+    @property
+    def user_downloads_dir(self) -> str:
+        """:return: downloads directory tied to the user, e.g. ``~/Downloads``"""
+        return os.path.expanduser("~/Downloads")  # noqa: PTH111
+
+    @property
+    def user_pictures_dir(self) -> str:
+        """:return: pictures directory tied to the user, e.g. ``~/Pictures``"""
+        return os.path.expanduser("~/Pictures")  # noqa: PTH111
+
+    @property
+    def user_videos_dir(self) -> str:
+        """:return: videos directory tied to the user, e.g. ``~/Movies``"""
+        return os.path.expanduser("~/Movies")  # noqa: PTH111
+
+    @property
+    def user_music_dir(self) -> str:
+        """:return: music directory tied to the user, e.g. ``~/Music``"""
+        return os.path.expanduser("~/Music")  # noqa: PTH111
+
+    @property
+    def user_desktop_dir(self) -> str:
+        """:return: desktop directory tied to the user, e.g. ``~/Desktop``"""
+        return os.path.expanduser("~/Desktop")  # noqa: PTH111
 
     @property
     def user_runtime_dir(self) -> str:
         """:return: runtime directory tied to the user, e.g. ``~/Library/Caches/TemporaryItems/$appname/$version``"""
-        return self._append_app_name_and_version(os.path.expanduser("~/Library/Caches/TemporaryItems"))
+        return self._append_app_name_and_version(os.path.expanduser("~/Library/Caches/TemporaryItems"))  # noqa: PTH111
+
+    @property
+    def site_runtime_dir(self) -> str:
+        """:return: runtime directory shared by users, same as `user_runtime_dir`"""
+        return self.user_runtime_dir
 
 
 __all__ = [
diff --git a/pkg_resources/_vendor/platformdirs/unix.py b/pkg_resources/_vendor/platformdirs/unix.py
index 9aca5a0305..9500ade614 100644
--- a/pkg_resources/_vendor/platformdirs/unix.py
+++ b/pkg_resources/_vendor/platformdirs/unix.py
@@ -1,30 +1,36 @@
+"""Unix."""
+
 from __future__ import annotations
 
 import os
 import sys
 from configparser import ConfigParser
 from pathlib import Path
+from typing import Iterator, NoReturn
 
 from .api import PlatformDirsABC
 
-if sys.platform.startswith("linux"):  # pragma: no branch # no op check, only to please the type checker
-    from os import getuid
-else:
+if sys.platform == "win32":
+
+    def getuid() -> NoReturn:
+        msg = "should only be used on Unix"
+        raise RuntimeError(msg)
 
-    def getuid() -> int:
-        raise RuntimeError("should only be used on Linux")
+else:
+    from os import getuid
 
 
-class Unix(PlatformDirsABC):
+class Unix(PlatformDirsABC):  # noqa: PLR0904
     """
-    On Unix/Linux, we follow the
-    `XDG Basedir Spec `_. The spec allows
-    overriding directories with environment variables. The examples show are the default values, alongside the name of
-    the environment variable that overrides them. Makes use of the
-    `appname `,
-    `version `,
-    `multipath `,
-    `opinion `.
+    On Unix/Linux, we follow the `XDG Basedir Spec `_.
+
+    The spec allows overriding directories with environment variables. The examples shown are the default values,
+    alongside the name of the environment variable that overrides them. Makes use of the `appname
+    `, `version `, `multipath
+    `, `opinion `, `ensure_exists
+    `.
+
     """
 
     @property
@@ -35,28 +41,28 @@ def user_data_dir(self) -> str:
         """
         path = os.environ.get("XDG_DATA_HOME", "")
         if not path.strip():
-            path = os.path.expanduser("~/.local/share")
+            path = os.path.expanduser("~/.local/share")  # noqa: PTH111
         return self._append_app_name_and_version(path)
 
+    @property
+    def _site_data_dirs(self) -> list[str]:
+        path = os.environ.get("XDG_DATA_DIRS", "")
+        if not path.strip():
+            path = f"/usr/local/share{os.pathsep}/usr/share"
+        return [self._append_app_name_and_version(p) for p in path.split(os.pathsep)]
+
     @property
     def site_data_dir(self) -> str:
         """
         :return: data directories shared by users (if `multipath ` is
-         enabled and ``XDG_DATA_DIR`` is set and a multi path the response is also a multi path separated by the OS
-         path separator), e.g. ``/usr/local/share/$appname/$version`` or ``/usr/share/$appname/$version``
+         enabled and ``XDG_DATA_DIRS`` is set and a multi path the response is also a multi path separated by the
+         OS path separator), e.g. ``/usr/local/share/$appname/$version`` or ``/usr/share/$appname/$version``
         """
         # XDG default for $XDG_DATA_DIRS; only first, if multipath is False
-        path = os.environ.get("XDG_DATA_DIRS", "")
-        if not path.strip():
-            path = f"/usr/local/share{os.pathsep}/usr/share"
-        return self._with_multi_path(path)
-
-    def _with_multi_path(self, path: str) -> str:
-        path_list = path.split(os.pathsep)
+        dirs = self._site_data_dirs
         if not self.multipath:
-            path_list = path_list[0:1]
-        path_list = [self._append_app_name_and_version(os.path.expanduser(p)) for p in path_list]
-        return os.pathsep.join(path_list)
+            return dirs[0]
+        return os.pathsep.join(dirs)
 
     @property
     def user_config_dir(self) -> str:
@@ -66,21 +72,28 @@ def user_config_dir(self) -> str:
         """
         path = os.environ.get("XDG_CONFIG_HOME", "")
         if not path.strip():
-            path = os.path.expanduser("~/.config")
+            path = os.path.expanduser("~/.config")  # noqa: PTH111
         return self._append_app_name_and_version(path)
 
+    @property
+    def _site_config_dirs(self) -> list[str]:
+        path = os.environ.get("XDG_CONFIG_DIRS", "")
+        if not path.strip():
+            path = "/etc/xdg"
+        return [self._append_app_name_and_version(p) for p in path.split(os.pathsep)]
+
     @property
     def site_config_dir(self) -> str:
         """
         :return: config directories shared by users (if `multipath `
-         is enabled and ``XDG_DATA_DIR`` is set and a multi path the response is also a multi path separated by the OS
-         path separator), e.g. ``/etc/xdg/$appname/$version``
+         is enabled and ``XDG_CONFIG_DIRS`` is set and a multi path the response is also a multi path separated by
+         the OS path separator), e.g. ``/etc/xdg/$appname/$version``
         """
         # XDG default for $XDG_CONFIG_DIRS only first, if multipath is False
-        path = os.environ.get("XDG_CONFIG_DIRS", "")
-        if not path.strip():
-            path = "/etc/xdg"
-        return self._with_multi_path(path)
+        dirs = self._site_config_dirs
+        if not self.multipath:
+            return dirs[0]
+        return os.pathsep.join(dirs)
 
     @property
     def user_cache_dir(self) -> str:
@@ -90,9 +103,14 @@ def user_cache_dir(self) -> str:
         """
         path = os.environ.get("XDG_CACHE_HOME", "")
         if not path.strip():
-            path = os.path.expanduser("~/.cache")
+            path = os.path.expanduser("~/.cache")  # noqa: PTH111
         return self._append_app_name_and_version(path)
 
+    @property
+    def site_cache_dir(self) -> str:
+        """:return: cache directory shared by users, e.g. ``/var/cache/$appname/$version``"""
+        return self._append_app_name_and_version("/var/cache")
+
     @property
     def user_state_dir(self) -> str:
         """
@@ -101,67 +119,144 @@ def user_state_dir(self) -> str:
         """
         path = os.environ.get("XDG_STATE_HOME", "")
         if not path.strip():
-            path = os.path.expanduser("~/.local/state")
+            path = os.path.expanduser("~/.local/state")  # noqa: PTH111
         return self._append_app_name_and_version(path)
 
     @property
     def user_log_dir(self) -> str:
-        """
-        :return: log directory tied to the user, same as `user_state_dir` if not opinionated else ``log`` in it
-        """
+        """:return: log directory tied to the user, same as `user_state_dir` if not opinionated else ``log`` in it"""
         path = self.user_state_dir
         if self.opinion:
-            path = os.path.join(path, "log")
+            path = os.path.join(path, "log")  # noqa: PTH118
+            self._optionally_create_directory(path)
         return path
 
     @property
     def user_documents_dir(self) -> str:
-        """
-        :return: documents directory tied to the user, e.g. ``~/Documents``
-        """
-        documents_dir = _get_user_dirs_folder("XDG_DOCUMENTS_DIR")
-        if documents_dir is None:
-            documents_dir = os.environ.get("XDG_DOCUMENTS_DIR", "").strip()
-            if not documents_dir:
-                documents_dir = os.path.expanduser("~/Documents")
+        """:return: documents directory tied to the user, e.g. ``~/Documents``"""
+        return _get_user_media_dir("XDG_DOCUMENTS_DIR", "~/Documents")
 
-        return documents_dir
+    @property
+    def user_downloads_dir(self) -> str:
+        """:return: downloads directory tied to the user, e.g. ``~/Downloads``"""
+        return _get_user_media_dir("XDG_DOWNLOAD_DIR", "~/Downloads")
+
+    @property
+    def user_pictures_dir(self) -> str:
+        """:return: pictures directory tied to the user, e.g. ``~/Pictures``"""
+        return _get_user_media_dir("XDG_PICTURES_DIR", "~/Pictures")
+
+    @property
+    def user_videos_dir(self) -> str:
+        """:return: videos directory tied to the user, e.g. ``~/Videos``"""
+        return _get_user_media_dir("XDG_VIDEOS_DIR", "~/Videos")
+
+    @property
+    def user_music_dir(self) -> str:
+        """:return: music directory tied to the user, e.g. ``~/Music``"""
+        return _get_user_media_dir("XDG_MUSIC_DIR", "~/Music")
+
+    @property
+    def user_desktop_dir(self) -> str:
+        """:return: desktop directory tied to the user, e.g. ``~/Desktop``"""
+        return _get_user_media_dir("XDG_DESKTOP_DIR", "~/Desktop")
 
     @property
     def user_runtime_dir(self) -> str:
         """
         :return: runtime directory tied to the user, e.g. ``/run/user/$(id -u)/$appname/$version`` or
-         ``$XDG_RUNTIME_DIR/$appname/$version``
+         ``$XDG_RUNTIME_DIR/$appname/$version``.
+
+         For FreeBSD/OpenBSD/NetBSD, it would return ``/var/run/user/$(id -u)/$appname/$version`` if
+         exists, otherwise ``/tmp/runtime-$(id -u)/$appname/$version``, if``$XDG_RUNTIME_DIR``
+         is not set.
+        """
+        path = os.environ.get("XDG_RUNTIME_DIR", "")
+        if not path.strip():
+            if sys.platform.startswith(("freebsd", "openbsd", "netbsd")):
+                path = f"/var/run/user/{getuid()}"
+                if not Path(path).exists():
+                    path = f"/tmp/runtime-{getuid()}"  # noqa: S108
+            else:
+                path = f"/run/user/{getuid()}"
+        return self._append_app_name_and_version(path)
+
+    @property
+    def site_runtime_dir(self) -> str:
+        """
+        :return: runtime directory shared by users, e.g. ``/run/$appname/$version`` or \
+        ``$XDG_RUNTIME_DIR/$appname/$version``.
+
+        Note that this behaves almost exactly like `user_runtime_dir` if ``$XDG_RUNTIME_DIR`` is set, but will
+        fall back to paths associated to the root user instead of a regular logged-in user if it's not set.
+
+        If you wish to ensure that a logged-in root user path is returned e.g. ``/run/user/0``, use `user_runtime_dir`
+        instead.
+
+        For FreeBSD/OpenBSD/NetBSD, it would return ``/var/run/$appname/$version`` if ``$XDG_RUNTIME_DIR`` is not set.
         """
         path = os.environ.get("XDG_RUNTIME_DIR", "")
         if not path.strip():
-            path = f"/run/user/{getuid()}"
+            if sys.platform.startswith(("freebsd", "openbsd", "netbsd")):
+                path = "/var/run"
+            else:
+                path = "/run"
         return self._append_app_name_and_version(path)
 
     @property
     def site_data_path(self) -> Path:
-        """:return: data path shared by users. Only return first item, even if ``multipath`` is set to ``True``"""
+        """:return: data path shared by users. Only return the first item, even if ``multipath`` is set to ``True``"""
         return self._first_item_as_path_if_multipath(self.site_data_dir)
 
     @property
     def site_config_path(self) -> Path:
-        """:return: config path shared by the users. Only return first item, even if ``multipath`` is set to ``True``"""
+        """:return: config path shared by the users, returns the first item, even if ``multipath`` is set to ``True``"""
         return self._first_item_as_path_if_multipath(self.site_config_dir)
 
+    @property
+    def site_cache_path(self) -> Path:
+        """:return: cache path shared by users. Only return the first item, even if ``multipath`` is set to ``True``"""
+        return self._first_item_as_path_if_multipath(self.site_cache_dir)
+
     def _first_item_as_path_if_multipath(self, directory: str) -> Path:
         if self.multipath:
             # If multipath is True, the first path is returned.
             directory = directory.split(os.pathsep)[0]
         return Path(directory)
 
+    def iter_config_dirs(self) -> Iterator[str]:
+        """:yield: all user and site configuration directories."""
+        yield self.user_config_dir
+        yield from self._site_config_dirs
+
+    def iter_data_dirs(self) -> Iterator[str]:
+        """:yield: all user and site data directories."""
+        yield self.user_data_dir
+        yield from self._site_data_dirs
+
+
+def _get_user_media_dir(env_var: str, fallback_tilde_path: str) -> str:
+    media_dir = _get_user_dirs_folder(env_var)
+    if media_dir is None:
+        media_dir = os.environ.get(env_var, "").strip()
+        if not media_dir:
+            media_dir = os.path.expanduser(fallback_tilde_path)  # noqa: PTH111
+
+    return media_dir
+
 
 def _get_user_dirs_folder(key: str) -> str | None:
-    """Return directory from user-dirs.dirs config file. See https://freedesktop.org/wiki/Software/xdg-user-dirs/"""
-    user_dirs_config_path = os.path.join(Unix().user_config_dir, "user-dirs.dirs")
-    if os.path.exists(user_dirs_config_path):
+    """
+    Return directory from user-dirs.dirs config file.
+
+    See https://freedesktop.org/wiki/Software/xdg-user-dirs/.
+
+    """
+    user_dirs_config_path = Path(Unix().user_config_dir) / "user-dirs.dirs"
+    if user_dirs_config_path.exists():
         parser = ConfigParser()
 
-        with open(user_dirs_config_path) as stream:
+        with user_dirs_config_path.open() as stream:
             # Add fake section header, so ConfigParser doesn't complain
             parser.read_string(f"[top]\n{stream.read()}")
 
@@ -170,8 +265,7 @@ def _get_user_dirs_folder(key: str) -> str | None:
 
         path = parser["top"][key].strip('"')
         # Handle relative home paths
-        path = path.replace("$HOME", os.path.expanduser("~"))
-        return path
+        return path.replace("$HOME", os.path.expanduser("~"))  # noqa: PTH111
 
     return None
 
diff --git a/pkg_resources/_vendor/platformdirs/version.py b/pkg_resources/_vendor/platformdirs/version.py
index 9f6eb98e8f..6483ddce0b 100644
--- a/pkg_resources/_vendor/platformdirs/version.py
+++ b/pkg_resources/_vendor/platformdirs/version.py
@@ -1,4 +1,16 @@
 # file generated by setuptools_scm
 # don't change, don't track in version control
-__version__ = version = '2.6.2'
-__version_tuple__ = version_tuple = (2, 6, 2)
+TYPE_CHECKING = False
+if TYPE_CHECKING:
+    from typing import Tuple, Union
+    VERSION_TUPLE = Tuple[Union[int, str], ...]
+else:
+    VERSION_TUPLE = object
+
+version: str
+__version__: str
+__version_tuple__: VERSION_TUPLE
+version_tuple: VERSION_TUPLE
+
+__version__ = version = '4.2.2'
+__version_tuple__ = version_tuple = (4, 2, 2)
diff --git a/pkg_resources/_vendor/platformdirs/windows.py b/pkg_resources/_vendor/platformdirs/windows.py
index d5c27b3414..d7bc96091a 100644
--- a/pkg_resources/_vendor/platformdirs/windows.py
+++ b/pkg_resources/_vendor/platformdirs/windows.py
@@ -1,23 +1,28 @@
+"""Windows."""
+
 from __future__ import annotations
 
-import ctypes
 import os
 import sys
 from functools import lru_cache
-from typing import Callable
+from typing import TYPE_CHECKING
 
 from .api import PlatformDirsABC
 
+if TYPE_CHECKING:
+    from collections.abc import Callable
+
 
 class Windows(PlatformDirsABC):
-    """`MSDN on where to store app data files
-    `_.
-    Makes use of the
-    `appname `,
-    `appauthor `,
-    `version `,
-    `roaming `,
-    `opinion `."""
+    """
+    `MSDN on where to store app data files `_.
+
+    Makes use of the `appname `, `appauthor
+    `, `version `, `roaming
+    `, `opinion `, `ensure_exists
+    `.
+
+    """
 
     @property
     def user_data_dir(self) -> str:
@@ -41,7 +46,9 @@ def _append_parts(self, path: str, *, opinion_value: str | None = None) -> str:
                 params.append(opinion_value)
             if self.version:
                 params.append(self.version)
-        return os.path.join(path, *params)
+        path = os.path.join(path, *params)  # noqa: PTH118
+        self._optionally_create_directory(path)
+        return path
 
     @property
     def site_data_dir(self) -> str:
@@ -68,6 +75,12 @@ def user_cache_dir(self) -> str:
         path = os.path.normpath(get_win_folder("CSIDL_LOCAL_APPDATA"))
         return self._append_parts(path, opinion_value="Cache")
 
+    @property
+    def site_cache_dir(self) -> str:
+        """:return: cache directory shared by users, e.g. ``C:\\ProgramData\\$appauthor\\$appname\\Cache\\$version``"""
+        path = os.path.normpath(get_win_folder("CSIDL_COMMON_APPDATA"))
+        return self._append_parts(path, opinion_value="Cache")
+
     @property
     def user_state_dir(self) -> str:
         """:return: state directory tied to the user, same as `user_data_dir`"""
@@ -75,35 +88,63 @@ def user_state_dir(self) -> str:
 
     @property
     def user_log_dir(self) -> str:
-        """
-        :return: log directory tied to the user, same as `user_data_dir` if not opinionated else ``Logs`` in it
-        """
+        """:return: log directory tied to the user, same as `user_data_dir` if not opinionated else ``Logs`` in it"""
         path = self.user_data_dir
         if self.opinion:
-            path = os.path.join(path, "Logs")
+            path = os.path.join(path, "Logs")  # noqa: PTH118
+            self._optionally_create_directory(path)
         return path
 
     @property
     def user_documents_dir(self) -> str:
-        """
-        :return: documents directory tied to the user e.g. ``%USERPROFILE%\\Documents``
-        """
+        """:return: documents directory tied to the user e.g. ``%USERPROFILE%\\Documents``"""
         return os.path.normpath(get_win_folder("CSIDL_PERSONAL"))
 
+    @property
+    def user_downloads_dir(self) -> str:
+        """:return: downloads directory tied to the user e.g. ``%USERPROFILE%\\Downloads``"""
+        return os.path.normpath(get_win_folder("CSIDL_DOWNLOADS"))
+
+    @property
+    def user_pictures_dir(self) -> str:
+        """:return: pictures directory tied to the user e.g. ``%USERPROFILE%\\Pictures``"""
+        return os.path.normpath(get_win_folder("CSIDL_MYPICTURES"))
+
+    @property
+    def user_videos_dir(self) -> str:
+        """:return: videos directory tied to the user e.g. ``%USERPROFILE%\\Videos``"""
+        return os.path.normpath(get_win_folder("CSIDL_MYVIDEO"))
+
+    @property
+    def user_music_dir(self) -> str:
+        """:return: music directory tied to the user e.g. ``%USERPROFILE%\\Music``"""
+        return os.path.normpath(get_win_folder("CSIDL_MYMUSIC"))
+
+    @property
+    def user_desktop_dir(self) -> str:
+        """:return: desktop directory tied to the user, e.g. ``%USERPROFILE%\\Desktop``"""
+        return os.path.normpath(get_win_folder("CSIDL_DESKTOPDIRECTORY"))
+
     @property
     def user_runtime_dir(self) -> str:
         """
         :return: runtime directory tied to the user, e.g.
          ``%USERPROFILE%\\AppData\\Local\\Temp\\$appauthor\\$appname``
         """
-        path = os.path.normpath(os.path.join(get_win_folder("CSIDL_LOCAL_APPDATA"), "Temp"))
+        path = os.path.normpath(os.path.join(get_win_folder("CSIDL_LOCAL_APPDATA"), "Temp"))  # noqa: PTH118
         return self._append_parts(path)
 
+    @property
+    def site_runtime_dir(self) -> str:
+        """:return: runtime directory shared by users, same as `user_runtime_dir`"""
+        return self.user_runtime_dir
+
 
 def get_win_folder_from_env_vars(csidl_name: str) -> str:
     """Get folder from environment variables."""
-    if csidl_name == "CSIDL_PERSONAL":  # does not have an environment name
-        return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Documents")
+    result = get_win_folder_if_csidl_name_not_env_var(csidl_name)
+    if result is not None:
+        return result
 
     env_var_name = {
         "CSIDL_APPDATA": "APPDATA",
@@ -111,31 +152,58 @@ def get_win_folder_from_env_vars(csidl_name: str) -> str:
         "CSIDL_LOCAL_APPDATA": "LOCALAPPDATA",
     }.get(csidl_name)
     if env_var_name is None:
-        raise ValueError(f"Unknown CSIDL name: {csidl_name}")
+        msg = f"Unknown CSIDL name: {csidl_name}"
+        raise ValueError(msg)
     result = os.environ.get(env_var_name)
     if result is None:
-        raise ValueError(f"Unset environment variable: {env_var_name}")
+        msg = f"Unset environment variable: {env_var_name}"
+        raise ValueError(msg)
     return result
 
 
+def get_win_folder_if_csidl_name_not_env_var(csidl_name: str) -> str | None:
+    """Get a folder for a CSIDL name that does not exist as an environment variable."""
+    if csidl_name == "CSIDL_PERSONAL":
+        return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Documents")  # noqa: PTH118
+
+    if csidl_name == "CSIDL_DOWNLOADS":
+        return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Downloads")  # noqa: PTH118
+
+    if csidl_name == "CSIDL_MYPICTURES":
+        return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Pictures")  # noqa: PTH118
+
+    if csidl_name == "CSIDL_MYVIDEO":
+        return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Videos")  # noqa: PTH118
+
+    if csidl_name == "CSIDL_MYMUSIC":
+        return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Music")  # noqa: PTH118
+    return None
+
+
 def get_win_folder_from_registry(csidl_name: str) -> str:
-    """Get folder from the registry.
+    """
+    Get folder from the registry.
+
+    This is a fallback technique at best. I'm not sure if using the registry for these guarantees us the correct answer
+    for all CSIDL_* names.
 
-    This is a fallback technique at best. I'm not sure if using the
-    registry for this guarantees us the correct answer for all CSIDL_*
-    names.
     """
     shell_folder_name = {
         "CSIDL_APPDATA": "AppData",
         "CSIDL_COMMON_APPDATA": "Common AppData",
         "CSIDL_LOCAL_APPDATA": "Local AppData",
         "CSIDL_PERSONAL": "Personal",
+        "CSIDL_DOWNLOADS": "{374DE290-123F-4565-9164-39C4925E467B}",
+        "CSIDL_MYPICTURES": "My Pictures",
+        "CSIDL_MYVIDEO": "My Video",
+        "CSIDL_MYMUSIC": "My Music",
     }.get(csidl_name)
     if shell_folder_name is None:
-        raise ValueError(f"Unknown CSIDL name: {csidl_name}")
+        msg = f"Unknown CSIDL name: {csidl_name}"
+        raise ValueError(msg)
     if sys.platform != "win32":  # only needed for mypy type checker to know that this code runs only on Windows
         raise NotImplementedError
-    import winreg
+    import winreg  # noqa: PLC0415
 
     key = winreg.OpenKey(winreg.HKEY_CURRENT_USER, r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders")
     directory, _ = winreg.QueryValueEx(key, shell_folder_name)
@@ -144,33 +212,53 @@ def get_win_folder_from_registry(csidl_name: str) -> str:
 
 def get_win_folder_via_ctypes(csidl_name: str) -> str:
     """Get folder with ctypes."""
+    # There is no 'CSIDL_DOWNLOADS'.
+    # Use 'CSIDL_PROFILE' (40) and append the default folder 'Downloads' instead.
+    # https://learn.microsoft.com/en-us/windows/win32/shell/knownfolderid
+
+    import ctypes  # noqa: PLC0415
+
     csidl_const = {
         "CSIDL_APPDATA": 26,
         "CSIDL_COMMON_APPDATA": 35,
         "CSIDL_LOCAL_APPDATA": 28,
         "CSIDL_PERSONAL": 5,
+        "CSIDL_MYPICTURES": 39,
+        "CSIDL_MYVIDEO": 14,
+        "CSIDL_MYMUSIC": 13,
+        "CSIDL_DOWNLOADS": 40,
+        "CSIDL_DESKTOPDIRECTORY": 16,
     }.get(csidl_name)
     if csidl_const is None:
-        raise ValueError(f"Unknown CSIDL name: {csidl_name}")
+        msg = f"Unknown CSIDL name: {csidl_name}"
+        raise ValueError(msg)
 
     buf = ctypes.create_unicode_buffer(1024)
     windll = getattr(ctypes, "windll")  # noqa: B009 # using getattr to avoid false positive with mypy type checker
     windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
 
-    # Downgrade to short path name if it has highbit chars.
-    if any(ord(c) > 255 for c in buf):
+    # Downgrade to short path name if it has high-bit chars.
+    if any(ord(c) > 255 for c in buf):  # noqa: PLR2004
         buf2 = ctypes.create_unicode_buffer(1024)
         if windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
             buf = buf2
 
+    if csidl_name == "CSIDL_DOWNLOADS":
+        return os.path.join(buf.value, "Downloads")  # noqa: PTH118
+
     return buf.value
 
 
 def _pick_get_win_folder() -> Callable[[str], str]:
-    if hasattr(ctypes, "windll"):
-        return get_win_folder_via_ctypes
     try:
-        import winreg  # noqa: F401
+        import ctypes  # noqa: PLC0415
+    except ImportError:
+        pass
+    else:
+        if hasattr(ctypes, "windll"):
+            return get_win_folder_via_ctypes
+    try:
+        import winreg  # noqa: PLC0415, F401
     except ImportError:
         return get_win_folder_from_env_vars
     else:
diff --git a/pkg_resources/_vendor/typeguard-4.3.0.dist-info/INSTALLER b/pkg_resources/_vendor/typeguard-4.3.0.dist-info/INSTALLER
new file mode 100644
index 0000000000..a1b589e38a
--- /dev/null
+++ b/pkg_resources/_vendor/typeguard-4.3.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/pkg_resources/_vendor/typeguard-4.3.0.dist-info/LICENSE b/pkg_resources/_vendor/typeguard-4.3.0.dist-info/LICENSE
new file mode 100644
index 0000000000..07806f8af9
--- /dev/null
+++ b/pkg_resources/_vendor/typeguard-4.3.0.dist-info/LICENSE
@@ -0,0 +1,19 @@
+This is the MIT license: http://www.opensource.org/licenses/mit-license.php
+
+Copyright (c) Alex Grönholm
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this
+software and associated documentation files (the "Software"), to deal in the Software
+without restriction, including without limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons
+to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or
+substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
+INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
+PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
+FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
diff --git a/pkg_resources/_vendor/typeguard-4.3.0.dist-info/METADATA b/pkg_resources/_vendor/typeguard-4.3.0.dist-info/METADATA
new file mode 100644
index 0000000000..6e5750b485
--- /dev/null
+++ b/pkg_resources/_vendor/typeguard-4.3.0.dist-info/METADATA
@@ -0,0 +1,81 @@
+Metadata-Version: 2.1
+Name: typeguard
+Version: 4.3.0
+Summary: Run-time type checker for Python
+Author-email: Alex Grönholm 
+License: MIT
+Project-URL: Documentation, https://typeguard.readthedocs.io/en/latest/
+Project-URL: Change log, https://typeguard.readthedocs.io/en/latest/versionhistory.html
+Project-URL: Source code, https://github.com/agronholm/typeguard
+Project-URL: Issue tracker, https://github.com/agronholm/typeguard/issues
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Requires-Python: >=3.8
+Description-Content-Type: text/x-rst
+License-File: LICENSE
+Requires-Dist: typing-extensions >=4.10.0
+Requires-Dist: importlib-metadata >=3.6 ; python_version < "3.10"
+Provides-Extra: doc
+Requires-Dist: packaging ; extra == 'doc'
+Requires-Dist: Sphinx >=7 ; extra == 'doc'
+Requires-Dist: sphinx-autodoc-typehints >=1.2.0 ; extra == 'doc'
+Requires-Dist: sphinx-rtd-theme >=1.3.0 ; extra == 'doc'
+Provides-Extra: test
+Requires-Dist: coverage[toml] >=7 ; extra == 'test'
+Requires-Dist: pytest >=7 ; extra == 'test'
+Requires-Dist: mypy >=1.2.0 ; (platform_python_implementation != "PyPy") and extra == 'test'
+
+.. image:: https://github.com/agronholm/typeguard/actions/workflows/test.yml/badge.svg
+  :target: https://github.com/agronholm/typeguard/actions/workflows/test.yml
+  :alt: Build Status
+.. image:: https://coveralls.io/repos/agronholm/typeguard/badge.svg?branch=master&service=github
+  :target: https://coveralls.io/github/agronholm/typeguard?branch=master
+  :alt: Code Coverage
+.. image:: https://readthedocs.org/projects/typeguard/badge/?version=latest
+  :target: https://typeguard.readthedocs.io/en/latest/?badge=latest
+  :alt: Documentation
+
+This library provides run-time type checking for functions defined with
+`PEP 484 `_ argument (and return) type
+annotations, and any arbitrary objects. It can be used together with static type
+checkers as an additional layer of type safety, to catch type violations that could only
+be detected at run time.
+
+Two principal ways to do type checking are provided:
+
+#. The ``check_type`` function:
+
+   * like ``isinstance()``, but supports arbitrary type annotations (within limits)
+   * can be used as a ``cast()`` replacement, but with actual checking of the value
+#. Code instrumentation:
+
+   * entire modules, or individual functions (via ``@typechecked``) are recompiled, with
+     type checking code injected into them
+   * automatically checks function arguments, return values and assignments to annotated
+     local variables
+   * for generator functions (regular and async), checks yield and send values
+   * requires the original source code of the instrumented module(s) to be accessible
+
+Two options are provided for code instrumentation:
+
+#. the ``@typechecked`` function:
+
+   * can be applied to functions individually
+#. the import hook (``typeguard.install_import_hook()``):
+
+   * automatically instruments targeted modules on import
+   * no manual code changes required in the target modules
+   * requires the import hook to be installed before the targeted modules are imported
+   * may clash with other import hooks
+
+See the documentation_ for further information.
+
+.. _documentation: https://typeguard.readthedocs.io/en/latest/
diff --git a/pkg_resources/_vendor/typeguard-4.3.0.dist-info/RECORD b/pkg_resources/_vendor/typeguard-4.3.0.dist-info/RECORD
new file mode 100644
index 0000000000..801e73347c
--- /dev/null
+++ b/pkg_resources/_vendor/typeguard-4.3.0.dist-info/RECORD
@@ -0,0 +1,34 @@
+typeguard-4.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+typeguard-4.3.0.dist-info/LICENSE,sha256=YWP3mH37ONa8MgzitwsvArhivEESZRbVUu8c1DJH51g,1130
+typeguard-4.3.0.dist-info/METADATA,sha256=z2dcHAp0TwhYCFU5Deh8x31nazElgujUz9tbuP0pjSE,3717
+typeguard-4.3.0.dist-info/RECORD,,
+typeguard-4.3.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
+typeguard-4.3.0.dist-info/entry_points.txt,sha256=qp7NQ1aLtiSgMQqo6gWlfGpy0IIXzoMJmeQTLpzqFZQ,48
+typeguard-4.3.0.dist-info/top_level.txt,sha256=4z28AhuDodwRS_c1J_l8H51t5QuwfTseskYzlxp6grs,10
+typeguard/__init__.py,sha256=Onh4w38elPCjtlcU3JY9k3h70NjsxXIkAflmQn-Z0FY,2071
+typeguard/__pycache__/__init__.cpython-312.pyc,,
+typeguard/__pycache__/_checkers.cpython-312.pyc,,
+typeguard/__pycache__/_config.cpython-312.pyc,,
+typeguard/__pycache__/_decorators.cpython-312.pyc,,
+typeguard/__pycache__/_exceptions.cpython-312.pyc,,
+typeguard/__pycache__/_functions.cpython-312.pyc,,
+typeguard/__pycache__/_importhook.cpython-312.pyc,,
+typeguard/__pycache__/_memo.cpython-312.pyc,,
+typeguard/__pycache__/_pytest_plugin.cpython-312.pyc,,
+typeguard/__pycache__/_suppression.cpython-312.pyc,,
+typeguard/__pycache__/_transformer.cpython-312.pyc,,
+typeguard/__pycache__/_union_transformer.cpython-312.pyc,,
+typeguard/__pycache__/_utils.cpython-312.pyc,,
+typeguard/_checkers.py,sha256=JRrgKicdOEfIBoNEtegYCEIlhpad-a1u1Em7GCj0WCI,31360
+typeguard/_config.py,sha256=nIz8QwDa-oFO3L9O8_6srzlmd99pSby2wOM4Wb7F_B0,2846
+typeguard/_decorators.py,sha256=v6dsIeWvPhExGLP_wXF-RmDUyjZf_Ak28g7gBJ_v0-0,9033
+typeguard/_exceptions.py,sha256=ZIPeiV-FBd5Emw2EaWd2Fvlsrwi4ocwT2fVGBIAtHcQ,1121
+typeguard/_functions.py,sha256=ibgSAKa5ptIm1eR9ARG0BSozAFJPFNASZqhPVyQeqig,10393
+typeguard/_importhook.py,sha256=ugjCDvFcdWMU7UugqlJG91IpVNpEIxtRr-99s0h1k7M,6389
+typeguard/_memo.py,sha256=1juQV_vxnD2JYKbSrebiQuj4oKHz6n67v9pYA-CCISg,1303
+typeguard/_pytest_plugin.py,sha256=-fcSqkv54rIfIF8pDavY5YQPkj4OX8GMt_lL7CQSD4I,4416
+typeguard/_suppression.py,sha256=VQfzxcwIbu3if0f7VBkKM7hkYOA7tNFw9a7jMBsmMg4,2266
+typeguard/_transformer.py,sha256=9Ha7_QhdwoUni_6hvdY-hZbuEergowHrNL2vzHIakFY,44937
+typeguard/_union_transformer.py,sha256=v_42r7-6HuRX2SoFwnyJ-E5PlxXpVeUJPJR1-HU9qSo,1354
+typeguard/_utils.py,sha256=5HhO1rPn5f1M6ymkVAEv7Xmlz1cX-j0OnTMlyHqqrR8,5270
+typeguard/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
diff --git a/pkg_resources/_vendor/typeguard-4.3.0.dist-info/WHEEL b/pkg_resources/_vendor/typeguard-4.3.0.dist-info/WHEEL
new file mode 100644
index 0000000000..bab98d6758
--- /dev/null
+++ b/pkg_resources/_vendor/typeguard-4.3.0.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.43.0)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/pkg_resources/_vendor/typeguard-4.3.0.dist-info/entry_points.txt b/pkg_resources/_vendor/typeguard-4.3.0.dist-info/entry_points.txt
new file mode 100644
index 0000000000..47c9d0bd91
--- /dev/null
+++ b/pkg_resources/_vendor/typeguard-4.3.0.dist-info/entry_points.txt
@@ -0,0 +1,2 @@
+[pytest11]
+typeguard = typeguard._pytest_plugin
diff --git a/pkg_resources/_vendor/typeguard-4.3.0.dist-info/top_level.txt b/pkg_resources/_vendor/typeguard-4.3.0.dist-info/top_level.txt
new file mode 100644
index 0000000000..be5ec23ea2
--- /dev/null
+++ b/pkg_resources/_vendor/typeguard-4.3.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+typeguard
diff --git a/pkg_resources/_vendor/typeguard/__init__.py b/pkg_resources/_vendor/typeguard/__init__.py
new file mode 100644
index 0000000000..6781cad094
--- /dev/null
+++ b/pkg_resources/_vendor/typeguard/__init__.py
@@ -0,0 +1,48 @@
+import os
+from typing import Any
+
+from ._checkers import TypeCheckerCallable as TypeCheckerCallable
+from ._checkers import TypeCheckLookupCallback as TypeCheckLookupCallback
+from ._checkers import check_type_internal as check_type_internal
+from ._checkers import checker_lookup_functions as checker_lookup_functions
+from ._checkers import load_plugins as load_plugins
+from ._config import CollectionCheckStrategy as CollectionCheckStrategy
+from ._config import ForwardRefPolicy as ForwardRefPolicy
+from ._config import TypeCheckConfiguration as TypeCheckConfiguration
+from ._decorators import typechecked as typechecked
+from ._decorators import typeguard_ignore as typeguard_ignore
+from ._exceptions import InstrumentationWarning as InstrumentationWarning
+from ._exceptions import TypeCheckError as TypeCheckError
+from ._exceptions import TypeCheckWarning as TypeCheckWarning
+from ._exceptions import TypeHintWarning as TypeHintWarning
+from ._functions import TypeCheckFailCallback as TypeCheckFailCallback
+from ._functions import check_type as check_type
+from ._functions import warn_on_error as warn_on_error
+from ._importhook import ImportHookManager as ImportHookManager
+from ._importhook import TypeguardFinder as TypeguardFinder
+from ._importhook import install_import_hook as install_import_hook
+from ._memo import TypeCheckMemo as TypeCheckMemo
+from ._suppression import suppress_type_checks as suppress_type_checks
+from ._utils import Unset as Unset
+
+# Re-export imports so they look like they live directly in this package
+for value in list(locals().values()):
+    if getattr(value, "__module__", "").startswith(f"{__name__}."):
+        value.__module__ = __name__
+
+
+config: TypeCheckConfiguration
+
+
+def __getattr__(name: str) -> Any:
+    if name == "config":
+        from ._config import global_config
+
+        return global_config
+
+    raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
+
+
+# Automatically load checker lookup functions unless explicitly disabled
+if "TYPEGUARD_DISABLE_PLUGIN_AUTOLOAD" not in os.environ:
+    load_plugins()
diff --git a/pkg_resources/_vendor/typeguard/_checkers.py b/pkg_resources/_vendor/typeguard/_checkers.py
new file mode 100644
index 0000000000..67dd5ad4dc
--- /dev/null
+++ b/pkg_resources/_vendor/typeguard/_checkers.py
@@ -0,0 +1,993 @@
+from __future__ import annotations
+
+import collections.abc
+import inspect
+import sys
+import types
+import typing
+import warnings
+from enum import Enum
+from inspect import Parameter, isclass, isfunction
+from io import BufferedIOBase, IOBase, RawIOBase, TextIOBase
+from textwrap import indent
+from typing import (
+    IO,
+    AbstractSet,
+    Any,
+    BinaryIO,
+    Callable,
+    Dict,
+    ForwardRef,
+    List,
+    Mapping,
+    MutableMapping,
+    NewType,
+    Optional,
+    Sequence,
+    Set,
+    TextIO,
+    Tuple,
+    Type,
+    TypeVar,
+    Union,
+)
+from unittest.mock import Mock
+from weakref import WeakKeyDictionary
+
+try:
+    import typing_extensions
+except ImportError:
+    typing_extensions = None  # type: ignore[assignment]
+
+# Must use this because typing.is_typeddict does not recognize
+# TypedDict from typing_extensions, and as of version 4.12.0
+# typing_extensions.TypedDict is different from typing.TypedDict
+# on all versions.
+from typing_extensions import is_typeddict
+
+from ._config import ForwardRefPolicy
+from ._exceptions import TypeCheckError, TypeHintWarning
+from ._memo import TypeCheckMemo
+from ._utils import evaluate_forwardref, get_stacklevel, get_type_name, qualified_name
+
+if sys.version_info >= (3, 11):
+    from typing import (
+        Annotated,
+        NotRequired,
+        TypeAlias,
+        get_args,
+        get_origin,
+    )
+
+    SubclassableAny = Any
+else:
+    from typing_extensions import (
+        Annotated,
+        NotRequired,
+        TypeAlias,
+        get_args,
+        get_origin,
+    )
+    from typing_extensions import Any as SubclassableAny
+
+if sys.version_info >= (3, 10):
+    from importlib.metadata import entry_points
+    from typing import ParamSpec
+else:
+    from importlib_metadata import entry_points
+    from typing_extensions import ParamSpec
+
+TypeCheckerCallable: TypeAlias = Callable[
+    [Any, Any, Tuple[Any, ...], TypeCheckMemo], Any
+]
+TypeCheckLookupCallback: TypeAlias = Callable[
+    [Any, Tuple[Any, ...], Tuple[Any, ...]], Optional[TypeCheckerCallable]
+]
+
+checker_lookup_functions: list[TypeCheckLookupCallback] = []
+generic_alias_types: tuple[type, ...] = (type(List), type(List[Any]))
+if sys.version_info >= (3, 9):
+    generic_alias_types += (types.GenericAlias,)
+
+protocol_check_cache: WeakKeyDictionary[
+    type[Any], dict[type[Any], TypeCheckError | None]
+] = WeakKeyDictionary()
+
+# Sentinel
+_missing = object()
+
+# Lifted from mypy.sharedparse
+BINARY_MAGIC_METHODS = {
+    "__add__",
+    "__and__",
+    "__cmp__",
+    "__divmod__",
+    "__div__",
+    "__eq__",
+    "__floordiv__",
+    "__ge__",
+    "__gt__",
+    "__iadd__",
+    "__iand__",
+    "__idiv__",
+    "__ifloordiv__",
+    "__ilshift__",
+    "__imatmul__",
+    "__imod__",
+    "__imul__",
+    "__ior__",
+    "__ipow__",
+    "__irshift__",
+    "__isub__",
+    "__itruediv__",
+    "__ixor__",
+    "__le__",
+    "__lshift__",
+    "__lt__",
+    "__matmul__",
+    "__mod__",
+    "__mul__",
+    "__ne__",
+    "__or__",
+    "__pow__",
+    "__radd__",
+    "__rand__",
+    "__rdiv__",
+    "__rfloordiv__",
+    "__rlshift__",
+    "__rmatmul__",
+    "__rmod__",
+    "__rmul__",
+    "__ror__",
+    "__rpow__",
+    "__rrshift__",
+    "__rshift__",
+    "__rsub__",
+    "__rtruediv__",
+    "__rxor__",
+    "__sub__",
+    "__truediv__",
+    "__xor__",
+}
+
+
+def check_callable(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    if not callable(value):
+        raise TypeCheckError("is not callable")
+
+    if args:
+        try:
+            signature = inspect.signature(value)
+        except (TypeError, ValueError):
+            return
+
+        argument_types = args[0]
+        if isinstance(argument_types, list) and not any(
+            type(item) is ParamSpec for item in argument_types
+        ):
+            # The callable must not have keyword-only arguments without defaults
+            unfulfilled_kwonlyargs = [
+                param.name
+                for param in signature.parameters.values()
+                if param.kind == Parameter.KEYWORD_ONLY
+                and param.default == Parameter.empty
+            ]
+            if unfulfilled_kwonlyargs:
+                raise TypeCheckError(
+                    f"has mandatory keyword-only arguments in its declaration: "
+                    f'{", ".join(unfulfilled_kwonlyargs)}'
+                )
+
+            num_positional_args = num_mandatory_pos_args = 0
+            has_varargs = False
+            for param in signature.parameters.values():
+                if param.kind in (
+                    Parameter.POSITIONAL_ONLY,
+                    Parameter.POSITIONAL_OR_KEYWORD,
+                ):
+                    num_positional_args += 1
+                    if param.default is Parameter.empty:
+                        num_mandatory_pos_args += 1
+                elif param.kind == Parameter.VAR_POSITIONAL:
+                    has_varargs = True
+
+            if num_mandatory_pos_args > len(argument_types):
+                raise TypeCheckError(
+                    f"has too many mandatory positional arguments in its declaration; "
+                    f"expected {len(argument_types)} but {num_mandatory_pos_args} "
+                    f"mandatory positional argument(s) declared"
+                )
+            elif not has_varargs and num_positional_args < len(argument_types):
+                raise TypeCheckError(
+                    f"has too few arguments in its declaration; expected "
+                    f"{len(argument_types)} but {num_positional_args} argument(s) "
+                    f"declared"
+                )
+
+
+def check_mapping(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    if origin_type is Dict or origin_type is dict:
+        if not isinstance(value, dict):
+            raise TypeCheckError("is not a dict")
+    if origin_type is MutableMapping or origin_type is collections.abc.MutableMapping:
+        if not isinstance(value, collections.abc.MutableMapping):
+            raise TypeCheckError("is not a mutable mapping")
+    elif not isinstance(value, collections.abc.Mapping):
+        raise TypeCheckError("is not a mapping")
+
+    if args:
+        key_type, value_type = args
+        if key_type is not Any or value_type is not Any:
+            samples = memo.config.collection_check_strategy.iterate_samples(
+                value.items()
+            )
+            for k, v in samples:
+                try:
+                    check_type_internal(k, key_type, memo)
+                except TypeCheckError as exc:
+                    exc.append_path_element(f"key {k!r}")
+                    raise
+
+                try:
+                    check_type_internal(v, value_type, memo)
+                except TypeCheckError as exc:
+                    exc.append_path_element(f"value of key {k!r}")
+                    raise
+
+
+def check_typed_dict(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    if not isinstance(value, dict):
+        raise TypeCheckError("is not a dict")
+
+    declared_keys = frozenset(origin_type.__annotations__)
+    if hasattr(origin_type, "__required_keys__"):
+        required_keys = set(origin_type.__required_keys__)
+    else:  # py3.8 and lower
+        required_keys = set(declared_keys) if origin_type.__total__ else set()
+
+    existing_keys = set(value)
+    extra_keys = existing_keys - declared_keys
+    if extra_keys:
+        keys_formatted = ", ".join(f'"{key}"' for key in sorted(extra_keys, key=repr))
+        raise TypeCheckError(f"has unexpected extra key(s): {keys_formatted}")
+
+    # Detect NotRequired fields which are hidden by get_type_hints()
+    type_hints: dict[str, type] = {}
+    for key, annotation in origin_type.__annotations__.items():
+        if isinstance(annotation, ForwardRef):
+            annotation = evaluate_forwardref(annotation, memo)
+            if get_origin(annotation) is NotRequired:
+                required_keys.discard(key)
+                annotation = get_args(annotation)[0]
+
+        type_hints[key] = annotation
+
+    missing_keys = required_keys - existing_keys
+    if missing_keys:
+        keys_formatted = ", ".join(f'"{key}"' for key in sorted(missing_keys, key=repr))
+        raise TypeCheckError(f"is missing required key(s): {keys_formatted}")
+
+    for key, argtype in type_hints.items():
+        argvalue = value.get(key, _missing)
+        if argvalue is not _missing:
+            try:
+                check_type_internal(argvalue, argtype, memo)
+            except TypeCheckError as exc:
+                exc.append_path_element(f"value of key {key!r}")
+                raise
+
+
+def check_list(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    if not isinstance(value, list):
+        raise TypeCheckError("is not a list")
+
+    if args and args != (Any,):
+        samples = memo.config.collection_check_strategy.iterate_samples(value)
+        for i, v in enumerate(samples):
+            try:
+                check_type_internal(v, args[0], memo)
+            except TypeCheckError as exc:
+                exc.append_path_element(f"item {i}")
+                raise
+
+
+def check_sequence(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    if not isinstance(value, collections.abc.Sequence):
+        raise TypeCheckError("is not a sequence")
+
+    if args and args != (Any,):
+        samples = memo.config.collection_check_strategy.iterate_samples(value)
+        for i, v in enumerate(samples):
+            try:
+                check_type_internal(v, args[0], memo)
+            except TypeCheckError as exc:
+                exc.append_path_element(f"item {i}")
+                raise
+
+
+def check_set(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    if origin_type is frozenset:
+        if not isinstance(value, frozenset):
+            raise TypeCheckError("is not a frozenset")
+    elif not isinstance(value, AbstractSet):
+        raise TypeCheckError("is not a set")
+
+    if args and args != (Any,):
+        samples = memo.config.collection_check_strategy.iterate_samples(value)
+        for v in samples:
+            try:
+                check_type_internal(v, args[0], memo)
+            except TypeCheckError as exc:
+                exc.append_path_element(f"[{v}]")
+                raise
+
+
+def check_tuple(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    # Specialized check for NamedTuples
+    if field_types := getattr(origin_type, "__annotations__", None):
+        if not isinstance(value, origin_type):
+            raise TypeCheckError(
+                f"is not a named tuple of type {qualified_name(origin_type)}"
+            )
+
+        for name, field_type in field_types.items():
+            try:
+                check_type_internal(getattr(value, name), field_type, memo)
+            except TypeCheckError as exc:
+                exc.append_path_element(f"attribute {name!r}")
+                raise
+
+        return
+    elif not isinstance(value, tuple):
+        raise TypeCheckError("is not a tuple")
+
+    if args:
+        use_ellipsis = args[-1] is Ellipsis
+        tuple_params = args[: -1 if use_ellipsis else None]
+    else:
+        # Unparametrized Tuple or plain tuple
+        return
+
+    if use_ellipsis:
+        element_type = tuple_params[0]
+        samples = memo.config.collection_check_strategy.iterate_samples(value)
+        for i, element in enumerate(samples):
+            try:
+                check_type_internal(element, element_type, memo)
+            except TypeCheckError as exc:
+                exc.append_path_element(f"item {i}")
+                raise
+    elif tuple_params == ((),):
+        if value != ():
+            raise TypeCheckError("is not an empty tuple")
+    else:
+        if len(value) != len(tuple_params):
+            raise TypeCheckError(
+                f"has wrong number of elements (expected {len(tuple_params)}, got "
+                f"{len(value)} instead)"
+            )
+
+        for i, (element, element_type) in enumerate(zip(value, tuple_params)):
+            try:
+                check_type_internal(element, element_type, memo)
+            except TypeCheckError as exc:
+                exc.append_path_element(f"item {i}")
+                raise
+
+
+def check_union(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    errors: dict[str, TypeCheckError] = {}
+    try:
+        for type_ in args:
+            try:
+                check_type_internal(value, type_, memo)
+                return
+            except TypeCheckError as exc:
+                errors[get_type_name(type_)] = exc
+
+        formatted_errors = indent(
+            "\n".join(f"{key}: {error}" for key, error in errors.items()), "  "
+        )
+    finally:
+        del errors  # avoid creating ref cycle
+    raise TypeCheckError(f"did not match any element in the union:\n{formatted_errors}")
+
+
+def check_uniontype(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    errors: dict[str, TypeCheckError] = {}
+    for type_ in args:
+        try:
+            check_type_internal(value, type_, memo)
+            return
+        except TypeCheckError as exc:
+            errors[get_type_name(type_)] = exc
+
+    formatted_errors = indent(
+        "\n".join(f"{key}: {error}" for key, error in errors.items()), "  "
+    )
+    raise TypeCheckError(f"did not match any element in the union:\n{formatted_errors}")
+
+
+def check_class(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    if not isclass(value) and not isinstance(value, generic_alias_types):
+        raise TypeCheckError("is not a class")
+
+    if not args:
+        return
+
+    if isinstance(args[0], ForwardRef):
+        expected_class = evaluate_forwardref(args[0], memo)
+    else:
+        expected_class = args[0]
+
+    if expected_class is Any:
+        return
+    elif getattr(expected_class, "_is_protocol", False):
+        check_protocol(value, expected_class, (), memo)
+    elif isinstance(expected_class, TypeVar):
+        check_typevar(value, expected_class, (), memo, subclass_check=True)
+    elif get_origin(expected_class) is Union:
+        errors: dict[str, TypeCheckError] = {}
+        for arg in get_args(expected_class):
+            if arg is Any:
+                return
+
+            try:
+                check_class(value, type, (arg,), memo)
+                return
+            except TypeCheckError as exc:
+                errors[get_type_name(arg)] = exc
+        else:
+            formatted_errors = indent(
+                "\n".join(f"{key}: {error}" for key, error in errors.items()), "  "
+            )
+            raise TypeCheckError(
+                f"did not match any element in the union:\n{formatted_errors}"
+            )
+    elif not issubclass(value, expected_class):  # type: ignore[arg-type]
+        raise TypeCheckError(f"is not a subclass of {qualified_name(expected_class)}")
+
+
+def check_newtype(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    check_type_internal(value, origin_type.__supertype__, memo)
+
+
+def check_instance(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    if not isinstance(value, origin_type):
+        raise TypeCheckError(f"is not an instance of {qualified_name(origin_type)}")
+
+
+def check_typevar(
+    value: Any,
+    origin_type: TypeVar,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+    *,
+    subclass_check: bool = False,
+) -> None:
+    if origin_type.__bound__ is not None:
+        annotation = (
+            Type[origin_type.__bound__] if subclass_check else origin_type.__bound__
+        )
+        check_type_internal(value, annotation, memo)
+    elif origin_type.__constraints__:
+        for constraint in origin_type.__constraints__:
+            annotation = Type[constraint] if subclass_check else constraint
+            try:
+                check_type_internal(value, annotation, memo)
+            except TypeCheckError:
+                pass
+            else:
+                break
+        else:
+            formatted_constraints = ", ".join(
+                get_type_name(constraint) for constraint in origin_type.__constraints__
+            )
+            raise TypeCheckError(
+                f"does not match any of the constraints " f"({formatted_constraints})"
+            )
+
+
+if typing_extensions is None:
+
+    def _is_literal_type(typ: object) -> bool:
+        return typ is typing.Literal
+
+else:
+
+    def _is_literal_type(typ: object) -> bool:
+        return typ is typing.Literal or typ is typing_extensions.Literal
+
+
+def check_literal(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    def get_literal_args(literal_args: tuple[Any, ...]) -> tuple[Any, ...]:
+        retval: list[Any] = []
+        for arg in literal_args:
+            if _is_literal_type(get_origin(arg)):
+                retval.extend(get_literal_args(arg.__args__))
+            elif arg is None or isinstance(arg, (int, str, bytes, bool, Enum)):
+                retval.append(arg)
+            else:
+                raise TypeError(
+                    f"Illegal literal value: {arg}"
+                )  # TypeError here is deliberate
+
+        return tuple(retval)
+
+    final_args = tuple(get_literal_args(args))
+    try:
+        index = final_args.index(value)
+    except ValueError:
+        pass
+    else:
+        if type(final_args[index]) is type(value):
+            return
+
+    formatted_args = ", ".join(repr(arg) for arg in final_args)
+    raise TypeCheckError(f"is not any of ({formatted_args})") from None
+
+
+def check_literal_string(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    check_type_internal(value, str, memo)
+
+
+def check_typeguard(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    check_type_internal(value, bool, memo)
+
+
+def check_none(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    if value is not None:
+        raise TypeCheckError("is not None")
+
+
+def check_number(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    if origin_type is complex and not isinstance(value, (complex, float, int)):
+        raise TypeCheckError("is neither complex, float or int")
+    elif origin_type is float and not isinstance(value, (float, int)):
+        raise TypeCheckError("is neither float or int")
+
+
+def check_io(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    if origin_type is TextIO or (origin_type is IO and args == (str,)):
+        if not isinstance(value, TextIOBase):
+            raise TypeCheckError("is not a text based I/O object")
+    elif origin_type is BinaryIO or (origin_type is IO and args == (bytes,)):
+        if not isinstance(value, (RawIOBase, BufferedIOBase)):
+            raise TypeCheckError("is not a binary I/O object")
+    elif not isinstance(value, IOBase):
+        raise TypeCheckError("is not an I/O object")
+
+
+def check_protocol(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    subject: type[Any] = value if isclass(value) else type(value)
+
+    if subject in protocol_check_cache:
+        result_map = protocol_check_cache[subject]
+        if origin_type in result_map:
+            if exc := result_map[origin_type]:
+                raise exc
+            else:
+                return
+
+    # Collect a set of methods and non-method attributes present in the protocol
+    ignored_attrs = set(dir(typing.Protocol)) | {
+        "__annotations__",
+        "__non_callable_proto_members__",
+    }
+    expected_methods: dict[str, tuple[Any, Any]] = {}
+    expected_noncallable_members: dict[str, Any] = {}
+    for attrname in dir(origin_type):
+        # Skip attributes present in typing.Protocol
+        if attrname in ignored_attrs:
+            continue
+
+        member = getattr(origin_type, attrname)
+        if callable(member):
+            signature = inspect.signature(member)
+            argtypes = [
+                (p.annotation if p.annotation is not Parameter.empty else Any)
+                for p in signature.parameters.values()
+                if p.kind is not Parameter.KEYWORD_ONLY
+            ] or Ellipsis
+            return_annotation = (
+                signature.return_annotation
+                if signature.return_annotation is not Parameter.empty
+                else Any
+            )
+            expected_methods[attrname] = argtypes, return_annotation
+        else:
+            expected_noncallable_members[attrname] = member
+
+    for attrname, annotation in typing.get_type_hints(origin_type).items():
+        expected_noncallable_members[attrname] = annotation
+
+    subject_annotations = typing.get_type_hints(subject)
+
+    # Check that all required methods are present and their signatures are compatible
+    result_map = protocol_check_cache.setdefault(subject, {})
+    try:
+        for attrname, callable_args in expected_methods.items():
+            try:
+                method = getattr(subject, attrname)
+            except AttributeError:
+                if attrname in subject_annotations:
+                    raise TypeCheckError(
+                        f"is not compatible with the {origin_type.__qualname__} protocol "
+                        f"because its {attrname!r} attribute is not a method"
+                    ) from None
+                else:
+                    raise TypeCheckError(
+                        f"is not compatible with the {origin_type.__qualname__} protocol "
+                        f"because it has no method named {attrname!r}"
+                    ) from None
+
+            if not callable(method):
+                raise TypeCheckError(
+                    f"is not compatible with the {origin_type.__qualname__} protocol "
+                    f"because its {attrname!r} attribute is not a callable"
+                )
+
+            # TODO: raise exception on added keyword-only arguments without defaults
+            try:
+                check_callable(method, Callable, callable_args, memo)
+            except TypeCheckError as exc:
+                raise TypeCheckError(
+                    f"is not compatible with the {origin_type.__qualname__} protocol "
+                    f"because its {attrname!r} method {exc}"
+                ) from None
+
+        # Check that all required non-callable members are present
+        for attrname in expected_noncallable_members:
+            # TODO: implement assignability checks for non-callable members
+            if attrname not in subject_annotations and not hasattr(subject, attrname):
+                raise TypeCheckError(
+                    f"is not compatible with the {origin_type.__qualname__} protocol "
+                    f"because it has no attribute named {attrname!r}"
+                )
+    except TypeCheckError as exc:
+        result_map[origin_type] = exc
+        raise
+    else:
+        result_map[origin_type] = None
+
+
+def check_byteslike(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    if not isinstance(value, (bytearray, bytes, memoryview)):
+        raise TypeCheckError("is not bytes-like")
+
+
+def check_self(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    if memo.self_type is None:
+        raise TypeCheckError("cannot be checked against Self outside of a method call")
+
+    if isclass(value):
+        if not issubclass(value, memo.self_type):
+            raise TypeCheckError(
+                f"is not an instance of the self type "
+                f"({qualified_name(memo.self_type)})"
+            )
+    elif not isinstance(value, memo.self_type):
+        raise TypeCheckError(
+            f"is not an instance of the self type ({qualified_name(memo.self_type)})"
+        )
+
+
+def check_paramspec(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    pass  # No-op for now
+
+
+def check_instanceof(
+    value: Any,
+    origin_type: Any,
+    args: tuple[Any, ...],
+    memo: TypeCheckMemo,
+) -> None:
+    if not isinstance(value, origin_type):
+        raise TypeCheckError(f"is not an instance of {qualified_name(origin_type)}")
+
+
+def check_type_internal(
+    value: Any,
+    annotation: Any,
+    memo: TypeCheckMemo,
+) -> None:
+    """
+    Check that the given object is compatible with the given type annotation.
+
+    This function should only be used by type checker callables. Applications should use
+    :func:`~.check_type` instead.
+
+    :param value: the value to check
+    :param annotation: the type annotation to check against
+    :param memo: a memo object containing configuration and information necessary for
+        looking up forward references
+    """
+
+    if isinstance(annotation, ForwardRef):
+        try:
+            annotation = evaluate_forwardref(annotation, memo)
+        except NameError:
+            if memo.config.forward_ref_policy is ForwardRefPolicy.ERROR:
+                raise
+            elif memo.config.forward_ref_policy is ForwardRefPolicy.WARN:
+                warnings.warn(
+                    f"Cannot resolve forward reference {annotation.__forward_arg__!r}",
+                    TypeHintWarning,
+                    stacklevel=get_stacklevel(),
+                )
+
+            return
+
+    if annotation is Any or annotation is SubclassableAny or isinstance(value, Mock):
+        return
+
+    # Skip type checks if value is an instance of a class that inherits from Any
+    if not isclass(value) and SubclassableAny in type(value).__bases__:
+        return
+
+    extras: tuple[Any, ...]
+    origin_type = get_origin(annotation)
+    if origin_type is Annotated:
+        annotation, *extras_ = get_args(annotation)
+        extras = tuple(extras_)
+        origin_type = get_origin(annotation)
+    else:
+        extras = ()
+
+    if origin_type is not None:
+        args = get_args(annotation)
+
+        # Compatibility hack to distinguish between unparametrized and empty tuple
+        # (tuple[()]), necessary due to https://github.com/python/cpython/issues/91137
+        if origin_type in (tuple, Tuple) and annotation is not Tuple and not args:
+            args = ((),)
+    else:
+        origin_type = annotation
+        args = ()
+
+    for lookup_func in checker_lookup_functions:
+        checker = lookup_func(origin_type, args, extras)
+        if checker:
+            checker(value, origin_type, args, memo)
+            return
+
+    if isclass(origin_type):
+        if not isinstance(value, origin_type):
+            raise TypeCheckError(f"is not an instance of {qualified_name(origin_type)}")
+    elif type(origin_type) is str:  # noqa: E721
+        warnings.warn(
+            f"Skipping type check against {origin_type!r}; this looks like a "
+            f"string-form forward reference imported from another module",
+            TypeHintWarning,
+            stacklevel=get_stacklevel(),
+        )
+
+
+# Equality checks are applied to these
+origin_type_checkers = {
+    bytes: check_byteslike,
+    AbstractSet: check_set,
+    BinaryIO: check_io,
+    Callable: check_callable,
+    collections.abc.Callable: check_callable,
+    complex: check_number,
+    dict: check_mapping,
+    Dict: check_mapping,
+    float: check_number,
+    frozenset: check_set,
+    IO: check_io,
+    list: check_list,
+    List: check_list,
+    typing.Literal: check_literal,
+    Mapping: check_mapping,
+    MutableMapping: check_mapping,
+    None: check_none,
+    collections.abc.Mapping: check_mapping,
+    collections.abc.MutableMapping: check_mapping,
+    Sequence: check_sequence,
+    collections.abc.Sequence: check_sequence,
+    collections.abc.Set: check_set,
+    set: check_set,
+    Set: check_set,
+    TextIO: check_io,
+    tuple: check_tuple,
+    Tuple: check_tuple,
+    type: check_class,
+    Type: check_class,
+    Union: check_union,
+}
+if sys.version_info >= (3, 10):
+    origin_type_checkers[types.UnionType] = check_uniontype
+    origin_type_checkers[typing.TypeGuard] = check_typeguard
+if sys.version_info >= (3, 11):
+    origin_type_checkers.update(
+        {typing.LiteralString: check_literal_string, typing.Self: check_self}
+    )
+if typing_extensions is not None:
+    # On some Python versions, these may simply be re-exports from typing,
+    # but exactly which Python versions is subject to change,
+    # so it's best to err on the safe side
+    # and update the dictionary on all Python versions
+    # if typing_extensions is installed
+    origin_type_checkers[typing_extensions.Literal] = check_literal
+    origin_type_checkers[typing_extensions.LiteralString] = check_literal_string
+    origin_type_checkers[typing_extensions.Self] = check_self
+    origin_type_checkers[typing_extensions.TypeGuard] = check_typeguard
+
+
+def builtin_checker_lookup(
+    origin_type: Any, args: tuple[Any, ...], extras: tuple[Any, ...]
+) -> TypeCheckerCallable | None:
+    checker = origin_type_checkers.get(origin_type)
+    if checker is not None:
+        return checker
+    elif is_typeddict(origin_type):
+        return check_typed_dict
+    elif isclass(origin_type) and issubclass(
+        origin_type,
+        Tuple,  # type: ignore[arg-type]
+    ):
+        # NamedTuple
+        return check_tuple
+    elif getattr(origin_type, "_is_protocol", False):
+        return check_protocol
+    elif isinstance(origin_type, ParamSpec):
+        return check_paramspec
+    elif isinstance(origin_type, TypeVar):
+        return check_typevar
+    elif origin_type.__class__ is NewType:
+        # typing.NewType on Python 3.10+
+        return check_newtype
+    elif (
+        isfunction(origin_type)
+        and getattr(origin_type, "__module__", None) == "typing"
+        and getattr(origin_type, "__qualname__", "").startswith("NewType.")
+        and hasattr(origin_type, "__supertype__")
+    ):
+        # typing.NewType on Python 3.9 and below
+        return check_newtype
+
+    return None
+
+
+checker_lookup_functions.append(builtin_checker_lookup)
+
+
+def load_plugins() -> None:
+    """
+    Load all type checker lookup functions from entry points.
+
+    All entry points from the ``typeguard.checker_lookup`` group are loaded, and the
+    returned lookup functions are added to :data:`typeguard.checker_lookup_functions`.
+
+    .. note:: This function is called implicitly on import, unless the
+        ``TYPEGUARD_DISABLE_PLUGIN_AUTOLOAD`` environment variable is present.
+    """
+
+    for ep in entry_points(group="typeguard.checker_lookup"):
+        try:
+            plugin = ep.load()
+        except Exception as exc:
+            warnings.warn(
+                f"Failed to load plugin {ep.name!r}: " f"{qualified_name(exc)}: {exc}",
+                stacklevel=2,
+            )
+            continue
+
+        if not callable(plugin):
+            warnings.warn(
+                f"Plugin {ep} returned a non-callable object: {plugin!r}", stacklevel=2
+            )
+            continue
+
+        checker_lookup_functions.insert(0, plugin)
diff --git a/pkg_resources/_vendor/typeguard/_config.py b/pkg_resources/_vendor/typeguard/_config.py
new file mode 100644
index 0000000000..36efad5396
--- /dev/null
+++ b/pkg_resources/_vendor/typeguard/_config.py
@@ -0,0 +1,108 @@
+from __future__ import annotations
+
+from collections.abc import Iterable
+from dataclasses import dataclass
+from enum import Enum, auto
+from typing import TYPE_CHECKING, TypeVar
+
+if TYPE_CHECKING:
+    from ._functions import TypeCheckFailCallback
+
+T = TypeVar("T")
+
+
+class ForwardRefPolicy(Enum):
+    """
+    Defines how unresolved forward references are handled.
+
+    Members:
+
+    * ``ERROR``: propagate the :exc:`NameError` when the forward reference lookup fails
+    * ``WARN``: emit a :class:`~.TypeHintWarning` if the forward reference lookup fails
+    * ``IGNORE``: silently skip checks for unresolveable forward references
+    """
+
+    ERROR = auto()
+    WARN = auto()
+    IGNORE = auto()
+
+
+class CollectionCheckStrategy(Enum):
+    """
+    Specifies how thoroughly the contents of collections are type checked.
+
+    This has an effect on the following built-in checkers:
+
+    * ``AbstractSet``
+    * ``Dict``
+    * ``List``
+    * ``Mapping``
+    * ``Set``
+    * ``Tuple[, ...]`` (arbitrarily sized tuples)
+
+    Members:
+
+    * ``FIRST_ITEM``: check only the first item
+    * ``ALL_ITEMS``: check all items
+    """
+
+    FIRST_ITEM = auto()
+    ALL_ITEMS = auto()
+
+    def iterate_samples(self, collection: Iterable[T]) -> Iterable[T]:
+        if self is CollectionCheckStrategy.FIRST_ITEM:
+            try:
+                return [next(iter(collection))]
+            except StopIteration:
+                return ()
+        else:
+            return collection
+
+
+@dataclass
+class TypeCheckConfiguration:
+    """
+     You can change Typeguard's behavior with these settings.
+
+    .. attribute:: typecheck_fail_callback
+       :type: Callable[[TypeCheckError, TypeCheckMemo], Any]
+
+         Callable that is called when type checking fails.
+
+         Default: ``None`` (the :exc:`~.TypeCheckError` is raised directly)
+
+    .. attribute:: forward_ref_policy
+       :type: ForwardRefPolicy
+
+         Specifies what to do when a forward reference fails to resolve.
+
+         Default: ``WARN``
+
+    .. attribute:: collection_check_strategy
+       :type: CollectionCheckStrategy
+
+         Specifies how thoroughly the contents of collections (list, dict, etc.) are
+         type checked.
+
+         Default: ``FIRST_ITEM``
+
+    .. attribute:: debug_instrumentation
+       :type: bool
+
+         If set to ``True``, the code of modules or functions instrumented by typeguard
+         is printed to ``sys.stderr`` after the instrumentation is done
+
+         Requires Python 3.9 or newer.
+
+         Default: ``False``
+    """
+
+    forward_ref_policy: ForwardRefPolicy = ForwardRefPolicy.WARN
+    typecheck_fail_callback: TypeCheckFailCallback | None = None
+    collection_check_strategy: CollectionCheckStrategy = (
+        CollectionCheckStrategy.FIRST_ITEM
+    )
+    debug_instrumentation: bool = False
+
+
+global_config = TypeCheckConfiguration()
diff --git a/pkg_resources/_vendor/typeguard/_decorators.py b/pkg_resources/_vendor/typeguard/_decorators.py
new file mode 100644
index 0000000000..cf3253351f
--- /dev/null
+++ b/pkg_resources/_vendor/typeguard/_decorators.py
@@ -0,0 +1,235 @@
+from __future__ import annotations
+
+import ast
+import inspect
+import sys
+from collections.abc import Sequence
+from functools import partial
+from inspect import isclass, isfunction
+from types import CodeType, FrameType, FunctionType
+from typing import TYPE_CHECKING, Any, Callable, ForwardRef, TypeVar, cast, overload
+from warnings import warn
+
+from ._config import CollectionCheckStrategy, ForwardRefPolicy, global_config
+from ._exceptions import InstrumentationWarning
+from ._functions import TypeCheckFailCallback
+from ._transformer import TypeguardTransformer
+from ._utils import Unset, function_name, get_stacklevel, is_method_of, unset
+
+if TYPE_CHECKING:
+    from typeshed.stdlib.types import _Cell
+
+    _F = TypeVar("_F")
+
+    def typeguard_ignore(f: _F) -> _F:
+        """This decorator is a noop during static type-checking."""
+        return f
+
+else:
+    from typing import no_type_check as typeguard_ignore  # noqa: F401
+
+T_CallableOrType = TypeVar("T_CallableOrType", bound=Callable[..., Any])
+
+
+def make_cell(value: object) -> _Cell:
+    return (lambda: value).__closure__[0]  # type: ignore[index]
+
+
+def find_target_function(
+    new_code: CodeType, target_path: Sequence[str], firstlineno: int
+) -> CodeType | None:
+    target_name = target_path[0]
+    for const in new_code.co_consts:
+        if isinstance(const, CodeType):
+            if const.co_name == target_name:
+                if const.co_firstlineno == firstlineno:
+                    return const
+                elif len(target_path) > 1:
+                    target_code = find_target_function(
+                        const, target_path[1:], firstlineno
+                    )
+                    if target_code:
+                        return target_code
+
+    return None
+
+
+def instrument(f: T_CallableOrType) -> FunctionType | str:
+    if not getattr(f, "__code__", None):
+        return "no code associated"
+    elif not getattr(f, "__module__", None):
+        return "__module__ attribute is not set"
+    elif f.__code__.co_filename == "":
+        return "cannot instrument functions defined in a REPL"
+    elif hasattr(f, "__wrapped__"):
+        return (
+            "@typechecked only supports instrumenting functions wrapped with "
+            "@classmethod, @staticmethod or @property"
+        )
+
+    target_path = [item for item in f.__qualname__.split(".") if item != ""]
+    module_source = inspect.getsource(sys.modules[f.__module__])
+    module_ast = ast.parse(module_source)
+    instrumentor = TypeguardTransformer(target_path, f.__code__.co_firstlineno)
+    instrumentor.visit(module_ast)
+
+    if not instrumentor.target_node or instrumentor.target_lineno is None:
+        return "instrumentor did not find the target function"
+
+    module_code = compile(module_ast, f.__code__.co_filename, "exec", dont_inherit=True)
+    new_code = find_target_function(
+        module_code, target_path, instrumentor.target_lineno
+    )
+    if not new_code:
+        return "cannot find the target function in the AST"
+
+    if global_config.debug_instrumentation and sys.version_info >= (3, 9):
+        # Find the matching AST node, then unparse it to source and print to stdout
+        print(
+            f"Source code of {f.__qualname__}() after instrumentation:"
+            "\n----------------------------------------------",
+            file=sys.stderr,
+        )
+        print(ast.unparse(instrumentor.target_node), file=sys.stderr)
+        print(
+            "----------------------------------------------",
+            file=sys.stderr,
+        )
+
+    closure = f.__closure__
+    if new_code.co_freevars != f.__code__.co_freevars:
+        # Create a new closure and find values for the new free variables
+        frame = cast(FrameType, inspect.currentframe())
+        frame = cast(FrameType, frame.f_back)
+        frame_locals = cast(FrameType, frame.f_back).f_locals
+        cells: list[_Cell] = []
+        for key in new_code.co_freevars:
+            if key in instrumentor.names_used_in_annotations:
+                # Find the value and make a new cell from it
+                value = frame_locals.get(key) or ForwardRef(key)
+                cells.append(make_cell(value))
+            else:
+                # Reuse the cell from the existing closure
+                assert f.__closure__
+                cells.append(f.__closure__[f.__code__.co_freevars.index(key)])
+
+        closure = tuple(cells)
+
+    new_function = FunctionType(new_code, f.__globals__, f.__name__, closure=closure)
+    new_function.__module__ = f.__module__
+    new_function.__name__ = f.__name__
+    new_function.__qualname__ = f.__qualname__
+    new_function.__annotations__ = f.__annotations__
+    new_function.__doc__ = f.__doc__
+    new_function.__defaults__ = f.__defaults__
+    new_function.__kwdefaults__ = f.__kwdefaults__
+    return new_function
+
+
+@overload
+def typechecked(
+    *,
+    forward_ref_policy: ForwardRefPolicy | Unset = unset,
+    typecheck_fail_callback: TypeCheckFailCallback | Unset = unset,
+    collection_check_strategy: CollectionCheckStrategy | Unset = unset,
+    debug_instrumentation: bool | Unset = unset,
+) -> Callable[[T_CallableOrType], T_CallableOrType]: ...
+
+
+@overload
+def typechecked(target: T_CallableOrType) -> T_CallableOrType: ...
+
+
+def typechecked(
+    target: T_CallableOrType | None = None,
+    *,
+    forward_ref_policy: ForwardRefPolicy | Unset = unset,
+    typecheck_fail_callback: TypeCheckFailCallback | Unset = unset,
+    collection_check_strategy: CollectionCheckStrategy | Unset = unset,
+    debug_instrumentation: bool | Unset = unset,
+) -> Any:
+    """
+    Instrument the target function to perform run-time type checking.
+
+    This decorator recompiles the target function, injecting code to type check
+    arguments, return values, yield values (excluding ``yield from``) and assignments to
+    annotated local variables.
+
+    This can also be used as a class decorator. This will instrument all type annotated
+    methods, including :func:`@classmethod `,
+    :func:`@staticmethod `,  and :class:`@property ` decorated
+    methods in the class.
+
+    .. note:: When Python is run in optimized mode (``-O`` or ``-OO``, this decorator
+        is a no-op). This is a feature meant for selectively introducing type checking
+        into a code base where the checks aren't meant to be run in production.
+
+    :param target: the function or class to enable type checking for
+    :param forward_ref_policy: override for
+        :attr:`.TypeCheckConfiguration.forward_ref_policy`
+    :param typecheck_fail_callback: override for
+        :attr:`.TypeCheckConfiguration.typecheck_fail_callback`
+    :param collection_check_strategy: override for
+        :attr:`.TypeCheckConfiguration.collection_check_strategy`
+    :param debug_instrumentation: override for
+        :attr:`.TypeCheckConfiguration.debug_instrumentation`
+
+    """
+    if target is None:
+        return partial(
+            typechecked,
+            forward_ref_policy=forward_ref_policy,
+            typecheck_fail_callback=typecheck_fail_callback,
+            collection_check_strategy=collection_check_strategy,
+            debug_instrumentation=debug_instrumentation,
+        )
+
+    if not __debug__:
+        return target
+
+    if isclass(target):
+        for key, attr in target.__dict__.items():
+            if is_method_of(attr, target):
+                retval = instrument(attr)
+                if isfunction(retval):
+                    setattr(target, key, retval)
+            elif isinstance(attr, (classmethod, staticmethod)):
+                if is_method_of(attr.__func__, target):
+                    retval = instrument(attr.__func__)
+                    if isfunction(retval):
+                        wrapper = attr.__class__(retval)
+                        setattr(target, key, wrapper)
+            elif isinstance(attr, property):
+                kwargs: dict[str, Any] = dict(doc=attr.__doc__)
+                for name in ("fset", "fget", "fdel"):
+                    property_func = kwargs[name] = getattr(attr, name)
+                    if is_method_of(property_func, target):
+                        retval = instrument(property_func)
+                        if isfunction(retval):
+                            kwargs[name] = retval
+
+                setattr(target, key, attr.__class__(**kwargs))
+
+        return target
+
+    # Find either the first Python wrapper or the actual function
+    wrapper_class: (
+        type[classmethod[Any, Any, Any]] | type[staticmethod[Any, Any]] | None
+    ) = None
+    if isinstance(target, (classmethod, staticmethod)):
+        wrapper_class = target.__class__
+        target = target.__func__
+
+    retval = instrument(target)
+    if isinstance(retval, str):
+        warn(
+            f"{retval} -- not typechecking {function_name(target)}",
+            InstrumentationWarning,
+            stacklevel=get_stacklevel(),
+        )
+        return target
+
+    if wrapper_class is None:
+        return retval
+    else:
+        return wrapper_class(retval)
diff --git a/pkg_resources/_vendor/typeguard/_exceptions.py b/pkg_resources/_vendor/typeguard/_exceptions.py
new file mode 100644
index 0000000000..625437a649
--- /dev/null
+++ b/pkg_resources/_vendor/typeguard/_exceptions.py
@@ -0,0 +1,42 @@
+from collections import deque
+from typing import Deque
+
+
+class TypeHintWarning(UserWarning):
+    """
+    A warning that is emitted when a type hint in string form could not be resolved to
+    an actual type.
+    """
+
+
+class TypeCheckWarning(UserWarning):
+    """Emitted by typeguard's type checkers when a type mismatch is detected."""
+
+    def __init__(self, message: str):
+        super().__init__(message)
+
+
+class InstrumentationWarning(UserWarning):
+    """Emitted when there's a problem with instrumenting a function for type checks."""
+
+    def __init__(self, message: str):
+        super().__init__(message)
+
+
+class TypeCheckError(Exception):
+    """
+    Raised by typeguard's type checkers when a type mismatch is detected.
+    """
+
+    def __init__(self, message: str):
+        super().__init__(message)
+        self._path: Deque[str] = deque()
+
+    def append_path_element(self, element: str) -> None:
+        self._path.append(element)
+
+    def __str__(self) -> str:
+        if self._path:
+            return " of ".join(self._path) + " " + str(self.args[0])
+        else:
+            return str(self.args[0])
diff --git a/pkg_resources/_vendor/typeguard/_functions.py b/pkg_resources/_vendor/typeguard/_functions.py
new file mode 100644
index 0000000000..28497856a3
--- /dev/null
+++ b/pkg_resources/_vendor/typeguard/_functions.py
@@ -0,0 +1,308 @@
+from __future__ import annotations
+
+import sys
+import warnings
+from typing import Any, Callable, NoReturn, TypeVar, Union, overload
+
+from . import _suppression
+from ._checkers import BINARY_MAGIC_METHODS, check_type_internal
+from ._config import (
+    CollectionCheckStrategy,
+    ForwardRefPolicy,
+    TypeCheckConfiguration,
+)
+from ._exceptions import TypeCheckError, TypeCheckWarning
+from ._memo import TypeCheckMemo
+from ._utils import get_stacklevel, qualified_name
+
+if sys.version_info >= (3, 11):
+    from typing import Literal, Never, TypeAlias
+else:
+    from typing_extensions import Literal, Never, TypeAlias
+
+T = TypeVar("T")
+TypeCheckFailCallback: TypeAlias = Callable[[TypeCheckError, TypeCheckMemo], Any]
+
+
+@overload
+def check_type(
+    value: object,
+    expected_type: type[T],
+    *,
+    forward_ref_policy: ForwardRefPolicy = ...,
+    typecheck_fail_callback: TypeCheckFailCallback | None = ...,
+    collection_check_strategy: CollectionCheckStrategy = ...,
+) -> T: ...
+
+
+@overload
+def check_type(
+    value: object,
+    expected_type: Any,
+    *,
+    forward_ref_policy: ForwardRefPolicy = ...,
+    typecheck_fail_callback: TypeCheckFailCallback | None = ...,
+    collection_check_strategy: CollectionCheckStrategy = ...,
+) -> Any: ...
+
+
+def check_type(
+    value: object,
+    expected_type: Any,
+    *,
+    forward_ref_policy: ForwardRefPolicy = TypeCheckConfiguration().forward_ref_policy,
+    typecheck_fail_callback: TypeCheckFailCallback | None = (
+        TypeCheckConfiguration().typecheck_fail_callback
+    ),
+    collection_check_strategy: CollectionCheckStrategy = (
+        TypeCheckConfiguration().collection_check_strategy
+    ),
+) -> Any:
+    """
+    Ensure that ``value`` matches ``expected_type``.
+
+    The types from the :mod:`typing` module do not support :func:`isinstance` or
+    :func:`issubclass` so a number of type specific checks are required. This function
+    knows which checker to call for which type.
+
+    This function wraps :func:`~.check_type_internal` in the following ways:
+
+    * Respects type checking suppression (:func:`~.suppress_type_checks`)
+    * Forms a :class:`~.TypeCheckMemo` from the current stack frame
+    * Calls the configured type check fail callback if the check fails
+
+    Note that this function is independent of the globally shared configuration in
+    :data:`typeguard.config`. This means that usage within libraries is safe from being
+    affected configuration changes made by other libraries or by the integrating
+    application. Instead, configuration options have the same default values as their
+    corresponding fields in :class:`TypeCheckConfiguration`.
+
+    :param value: value to be checked against ``expected_type``
+    :param expected_type: a class or generic type instance, or a tuple of such things
+    :param forward_ref_policy: see :attr:`TypeCheckConfiguration.forward_ref_policy`
+    :param typecheck_fail_callback:
+        see :attr`TypeCheckConfiguration.typecheck_fail_callback`
+    :param collection_check_strategy:
+        see :attr:`TypeCheckConfiguration.collection_check_strategy`
+    :return: ``value``, unmodified
+    :raises TypeCheckError: if there is a type mismatch
+
+    """
+    if type(expected_type) is tuple:
+        expected_type = Union[expected_type]
+
+    config = TypeCheckConfiguration(
+        forward_ref_policy=forward_ref_policy,
+        typecheck_fail_callback=typecheck_fail_callback,
+        collection_check_strategy=collection_check_strategy,
+    )
+
+    if _suppression.type_checks_suppressed or expected_type is Any:
+        return value
+
+    frame = sys._getframe(1)
+    memo = TypeCheckMemo(frame.f_globals, frame.f_locals, config=config)
+    try:
+        check_type_internal(value, expected_type, memo)
+    except TypeCheckError as exc:
+        exc.append_path_element(qualified_name(value, add_class_prefix=True))
+        if config.typecheck_fail_callback:
+            config.typecheck_fail_callback(exc, memo)
+        else:
+            raise
+
+    return value
+
+
+def check_argument_types(
+    func_name: str,
+    arguments: dict[str, tuple[Any, Any]],
+    memo: TypeCheckMemo,
+) -> Literal[True]:
+    if _suppression.type_checks_suppressed:
+        return True
+
+    for argname, (value, annotation) in arguments.items():
+        if annotation is NoReturn or annotation is Never:
+            exc = TypeCheckError(
+                f"{func_name}() was declared never to be called but it was"
+            )
+            if memo.config.typecheck_fail_callback:
+                memo.config.typecheck_fail_callback(exc, memo)
+            else:
+                raise exc
+
+        try:
+            check_type_internal(value, annotation, memo)
+        except TypeCheckError as exc:
+            qualname = qualified_name(value, add_class_prefix=True)
+            exc.append_path_element(f'argument "{argname}" ({qualname})')
+            if memo.config.typecheck_fail_callback:
+                memo.config.typecheck_fail_callback(exc, memo)
+            else:
+                raise
+
+    return True
+
+
+def check_return_type(
+    func_name: str,
+    retval: T,
+    annotation: Any,
+    memo: TypeCheckMemo,
+) -> T:
+    if _suppression.type_checks_suppressed:
+        return retval
+
+    if annotation is NoReturn or annotation is Never:
+        exc = TypeCheckError(f"{func_name}() was declared never to return but it did")
+        if memo.config.typecheck_fail_callback:
+            memo.config.typecheck_fail_callback(exc, memo)
+        else:
+            raise exc
+
+    try:
+        check_type_internal(retval, annotation, memo)
+    except TypeCheckError as exc:
+        # Allow NotImplemented if this is a binary magic method (__eq__() et al)
+        if retval is NotImplemented and annotation is bool:
+            # This does (and cannot) not check if it's actually a method
+            func_name = func_name.rsplit(".", 1)[-1]
+            if func_name in BINARY_MAGIC_METHODS:
+                return retval
+
+        qualname = qualified_name(retval, add_class_prefix=True)
+        exc.append_path_element(f"the return value ({qualname})")
+        if memo.config.typecheck_fail_callback:
+            memo.config.typecheck_fail_callback(exc, memo)
+        else:
+            raise
+
+    return retval
+
+
+def check_send_type(
+    func_name: str,
+    sendval: T,
+    annotation: Any,
+    memo: TypeCheckMemo,
+) -> T:
+    if _suppression.type_checks_suppressed:
+        return sendval
+
+    if annotation is NoReturn or annotation is Never:
+        exc = TypeCheckError(
+            f"{func_name}() was declared never to be sent a value to but it was"
+        )
+        if memo.config.typecheck_fail_callback:
+            memo.config.typecheck_fail_callback(exc, memo)
+        else:
+            raise exc
+
+    try:
+        check_type_internal(sendval, annotation, memo)
+    except TypeCheckError as exc:
+        qualname = qualified_name(sendval, add_class_prefix=True)
+        exc.append_path_element(f"the value sent to generator ({qualname})")
+        if memo.config.typecheck_fail_callback:
+            memo.config.typecheck_fail_callback(exc, memo)
+        else:
+            raise
+
+    return sendval
+
+
+def check_yield_type(
+    func_name: str,
+    yieldval: T,
+    annotation: Any,
+    memo: TypeCheckMemo,
+) -> T:
+    if _suppression.type_checks_suppressed:
+        return yieldval
+
+    if annotation is NoReturn or annotation is Never:
+        exc = TypeCheckError(f"{func_name}() was declared never to yield but it did")
+        if memo.config.typecheck_fail_callback:
+            memo.config.typecheck_fail_callback(exc, memo)
+        else:
+            raise exc
+
+    try:
+        check_type_internal(yieldval, annotation, memo)
+    except TypeCheckError as exc:
+        qualname = qualified_name(yieldval, add_class_prefix=True)
+        exc.append_path_element(f"the yielded value ({qualname})")
+        if memo.config.typecheck_fail_callback:
+            memo.config.typecheck_fail_callback(exc, memo)
+        else:
+            raise
+
+    return yieldval
+
+
+def check_variable_assignment(
+    value: object, varname: str, annotation: Any, memo: TypeCheckMemo
+) -> Any:
+    if _suppression.type_checks_suppressed:
+        return value
+
+    try:
+        check_type_internal(value, annotation, memo)
+    except TypeCheckError as exc:
+        qualname = qualified_name(value, add_class_prefix=True)
+        exc.append_path_element(f"value assigned to {varname} ({qualname})")
+        if memo.config.typecheck_fail_callback:
+            memo.config.typecheck_fail_callback(exc, memo)
+        else:
+            raise
+
+    return value
+
+
+def check_multi_variable_assignment(
+    value: Any, targets: list[dict[str, Any]], memo: TypeCheckMemo
+) -> Any:
+    if max(len(target) for target in targets) == 1:
+        iterated_values = [value]
+    else:
+        iterated_values = list(value)
+
+    if not _suppression.type_checks_suppressed:
+        for expected_types in targets:
+            value_index = 0
+            for ann_index, (varname, expected_type) in enumerate(
+                expected_types.items()
+            ):
+                if varname.startswith("*"):
+                    varname = varname[1:]
+                    keys_left = len(expected_types) - 1 - ann_index
+                    next_value_index = len(iterated_values) - keys_left
+                    obj: object = iterated_values[value_index:next_value_index]
+                    value_index = next_value_index
+                else:
+                    obj = iterated_values[value_index]
+                    value_index += 1
+
+                try:
+                    check_type_internal(obj, expected_type, memo)
+                except TypeCheckError as exc:
+                    qualname = qualified_name(obj, add_class_prefix=True)
+                    exc.append_path_element(f"value assigned to {varname} ({qualname})")
+                    if memo.config.typecheck_fail_callback:
+                        memo.config.typecheck_fail_callback(exc, memo)
+                    else:
+                        raise
+
+    return iterated_values[0] if len(iterated_values) == 1 else iterated_values
+
+
+def warn_on_error(exc: TypeCheckError, memo: TypeCheckMemo) -> None:
+    """
+    Emit a warning on a type mismatch.
+
+    This is intended to be used as an error handler in
+    :attr:`TypeCheckConfiguration.typecheck_fail_callback`.
+
+    """
+    warnings.warn(TypeCheckWarning(str(exc)), stacklevel=get_stacklevel())
diff --git a/pkg_resources/_vendor/typeguard/_importhook.py b/pkg_resources/_vendor/typeguard/_importhook.py
new file mode 100644
index 0000000000..8590540a5a
--- /dev/null
+++ b/pkg_resources/_vendor/typeguard/_importhook.py
@@ -0,0 +1,213 @@
+from __future__ import annotations
+
+import ast
+import sys
+import types
+from collections.abc import Callable, Iterable
+from importlib.abc import MetaPathFinder
+from importlib.machinery import ModuleSpec, SourceFileLoader
+from importlib.util import cache_from_source, decode_source
+from inspect import isclass
+from os import PathLike
+from types import CodeType, ModuleType, TracebackType
+from typing import Sequence, TypeVar
+from unittest.mock import patch
+
+from ._config import global_config
+from ._transformer import TypeguardTransformer
+
+if sys.version_info >= (3, 12):
+    from collections.abc import Buffer
+else:
+    from typing_extensions import Buffer
+
+if sys.version_info >= (3, 11):
+    from typing import ParamSpec
+else:
+    from typing_extensions import ParamSpec
+
+if sys.version_info >= (3, 10):
+    from importlib.metadata import PackageNotFoundError, version
+else:
+    from importlib_metadata import PackageNotFoundError, version
+
+try:
+    OPTIMIZATION = "typeguard" + "".join(version("typeguard").split(".")[:3])
+except PackageNotFoundError:
+    OPTIMIZATION = "typeguard"
+
+P = ParamSpec("P")
+T = TypeVar("T")
+
+
+# The name of this function is magical
+def _call_with_frames_removed(
+    f: Callable[P, T], *args: P.args, **kwargs: P.kwargs
+) -> T:
+    return f(*args, **kwargs)
+
+
+def optimized_cache_from_source(path: str, debug_override: bool | None = None) -> str:
+    return cache_from_source(path, debug_override, optimization=OPTIMIZATION)
+
+
+class TypeguardLoader(SourceFileLoader):
+    @staticmethod
+    def source_to_code(
+        data: Buffer | str | ast.Module | ast.Expression | ast.Interactive,
+        path: Buffer | str | PathLike[str] = "",
+    ) -> CodeType:
+        if isinstance(data, (ast.Module, ast.Expression, ast.Interactive)):
+            tree = data
+        else:
+            if isinstance(data, str):
+                source = data
+            else:
+                source = decode_source(data)
+
+            tree = _call_with_frames_removed(
+                ast.parse,
+                source,
+                path,
+                "exec",
+            )
+
+        tree = TypeguardTransformer().visit(tree)
+        ast.fix_missing_locations(tree)
+
+        if global_config.debug_instrumentation and sys.version_info >= (3, 9):
+            print(
+                f"Source code of {path!r} after instrumentation:\n"
+                "----------------------------------------------",
+                file=sys.stderr,
+            )
+            print(ast.unparse(tree), file=sys.stderr)
+            print("----------------------------------------------", file=sys.stderr)
+
+        return _call_with_frames_removed(
+            compile, tree, path, "exec", 0, dont_inherit=True
+        )
+
+    def exec_module(self, module: ModuleType) -> None:
+        # Use a custom optimization marker – the import lock should make this monkey
+        # patch safe
+        with patch(
+            "importlib._bootstrap_external.cache_from_source",
+            optimized_cache_from_source,
+        ):
+            super().exec_module(module)
+
+
+class TypeguardFinder(MetaPathFinder):
+    """
+    Wraps another path finder and instruments the module with
+    :func:`@typechecked ` if :meth:`should_instrument` returns
+    ``True``.
+
+    Should not be used directly, but rather via :func:`~.install_import_hook`.
+
+    .. versionadded:: 2.6
+    """
+
+    def __init__(self, packages: list[str] | None, original_pathfinder: MetaPathFinder):
+        self.packages = packages
+        self._original_pathfinder = original_pathfinder
+
+    def find_spec(
+        self,
+        fullname: str,
+        path: Sequence[str] | None,
+        target: types.ModuleType | None = None,
+    ) -> ModuleSpec | None:
+        if self.should_instrument(fullname):
+            spec = self._original_pathfinder.find_spec(fullname, path, target)
+            if spec is not None and isinstance(spec.loader, SourceFileLoader):
+                spec.loader = TypeguardLoader(spec.loader.name, spec.loader.path)
+                return spec
+
+        return None
+
+    def should_instrument(self, module_name: str) -> bool:
+        """
+        Determine whether the module with the given name should be instrumented.
+
+        :param module_name: full name of the module that is about to be imported (e.g.
+            ``xyz.abc``)
+
+        """
+        if self.packages is None:
+            return True
+
+        for package in self.packages:
+            if module_name == package or module_name.startswith(package + "."):
+                return True
+
+        return False
+
+
+class ImportHookManager:
+    """
+    A handle that can be used to uninstall the Typeguard import hook.
+    """
+
+    def __init__(self, hook: MetaPathFinder):
+        self.hook = hook
+
+    def __enter__(self) -> None:
+        pass
+
+    def __exit__(
+        self,
+        exc_type: type[BaseException],
+        exc_val: BaseException,
+        exc_tb: TracebackType,
+    ) -> None:
+        self.uninstall()
+
+    def uninstall(self) -> None:
+        """Uninstall the import hook."""
+        try:
+            sys.meta_path.remove(self.hook)
+        except ValueError:
+            pass  # already removed
+
+
+def install_import_hook(
+    packages: Iterable[str] | None = None,
+    *,
+    cls: type[TypeguardFinder] = TypeguardFinder,
+) -> ImportHookManager:
+    """
+    Install an import hook that instruments functions for automatic type checking.
+
+    This only affects modules loaded **after** this hook has been installed.
+
+    :param packages: an iterable of package names to instrument, or ``None`` to
+        instrument all packages
+    :param cls: a custom meta path finder class
+    :return: a context manager that uninstalls the hook on exit (or when you call
+        ``.uninstall()``)
+
+    .. versionadded:: 2.6
+
+    """
+    if packages is None:
+        target_packages: list[str] | None = None
+    elif isinstance(packages, str):
+        target_packages = [packages]
+    else:
+        target_packages = list(packages)
+
+    for finder in sys.meta_path:
+        if (
+            isclass(finder)
+            and finder.__name__ == "PathFinder"
+            and hasattr(finder, "find_spec")
+        ):
+            break
+    else:
+        raise RuntimeError("Cannot find a PathFinder in sys.meta_path")
+
+    hook = cls(target_packages, finder)
+    sys.meta_path.insert(0, hook)
+    return ImportHookManager(hook)
diff --git a/pkg_resources/_vendor/typeguard/_memo.py b/pkg_resources/_vendor/typeguard/_memo.py
new file mode 100644
index 0000000000..1d0d80c66d
--- /dev/null
+++ b/pkg_resources/_vendor/typeguard/_memo.py
@@ -0,0 +1,48 @@
+from __future__ import annotations
+
+from typing import Any
+
+from typeguard._config import TypeCheckConfiguration, global_config
+
+
+class TypeCheckMemo:
+    """
+    Contains information necessary for type checkers to do their work.
+
+    .. attribute:: globals
+       :type: dict[str, Any]
+
+        Dictionary of global variables to use for resolving forward references.
+
+    .. attribute:: locals
+       :type: dict[str, Any]
+
+        Dictionary of local variables to use for resolving forward references.
+
+    .. attribute:: self_type
+       :type: type | None
+
+        When running type checks within an instance method or class method, this is the
+        class object that the first argument (usually named ``self`` or ``cls``) refers
+        to.
+
+    .. attribute:: config
+       :type: TypeCheckConfiguration
+
+         Contains the configuration for a particular set of type checking operations.
+    """
+
+    __slots__ = "globals", "locals", "self_type", "config"
+
+    def __init__(
+        self,
+        globals: dict[str, Any],
+        locals: dict[str, Any],
+        *,
+        self_type: type | None = None,
+        config: TypeCheckConfiguration = global_config,
+    ):
+        self.globals = globals
+        self.locals = locals
+        self.self_type = self_type
+        self.config = config
diff --git a/pkg_resources/_vendor/typeguard/_pytest_plugin.py b/pkg_resources/_vendor/typeguard/_pytest_plugin.py
new file mode 100644
index 0000000000..7b2f494ec7
--- /dev/null
+++ b/pkg_resources/_vendor/typeguard/_pytest_plugin.py
@@ -0,0 +1,127 @@
+from __future__ import annotations
+
+import sys
+import warnings
+from typing import TYPE_CHECKING, Any, Literal
+
+from typeguard._config import CollectionCheckStrategy, ForwardRefPolicy, global_config
+from typeguard._exceptions import InstrumentationWarning
+from typeguard._importhook import install_import_hook
+from typeguard._utils import qualified_name, resolve_reference
+
+if TYPE_CHECKING:
+    from pytest import Config, Parser
+
+
+def pytest_addoption(parser: Parser) -> None:
+    def add_ini_option(
+        opt_type: (
+            Literal["string", "paths", "pathlist", "args", "linelist", "bool"] | None
+        ),
+    ) -> None:
+        parser.addini(
+            group.options[-1].names()[0][2:],
+            group.options[-1].attrs()["help"],
+            opt_type,
+        )
+
+    group = parser.getgroup("typeguard")
+    group.addoption(
+        "--typeguard-packages",
+        action="store",
+        help="comma separated name list of packages and modules to instrument for "
+        "type checking, or :all: to instrument all modules loaded after typeguard",
+    )
+    add_ini_option("linelist")
+
+    group.addoption(
+        "--typeguard-debug-instrumentation",
+        action="store_true",
+        help="print all instrumented code to stderr",
+    )
+    add_ini_option("bool")
+
+    group.addoption(
+        "--typeguard-typecheck-fail-callback",
+        action="store",
+        help=(
+            "a module:varname (e.g. typeguard:warn_on_error) reference to a function "
+            "that is called (with the exception, and memo object as arguments) to "
+            "handle a TypeCheckError"
+        ),
+    )
+    add_ini_option("string")
+
+    group.addoption(
+        "--typeguard-forward-ref-policy",
+        action="store",
+        choices=list(ForwardRefPolicy.__members__),
+        help=(
+            "determines how to deal with unresolveable forward references in type "
+            "annotations"
+        ),
+    )
+    add_ini_option("string")
+
+    group.addoption(
+        "--typeguard-collection-check-strategy",
+        action="store",
+        choices=list(CollectionCheckStrategy.__members__),
+        help="determines how thoroughly to check collections (list, dict, etc)",
+    )
+    add_ini_option("string")
+
+
+def pytest_configure(config: Config) -> None:
+    def getoption(name: str) -> Any:
+        return config.getoption(name.replace("-", "_")) or config.getini(name)
+
+    packages: list[str] | None = []
+    if packages_option := config.getoption("typeguard_packages"):
+        packages = [pkg.strip() for pkg in packages_option.split(",")]
+    elif packages_ini := config.getini("typeguard-packages"):
+        packages = packages_ini
+
+    if packages:
+        if packages == [":all:"]:
+            packages = None
+        else:
+            already_imported_packages = sorted(
+                package for package in packages if package in sys.modules
+            )
+            if already_imported_packages:
+                warnings.warn(
+                    f"typeguard cannot check these packages because they are already "
+                    f"imported: {', '.join(already_imported_packages)}",
+                    InstrumentationWarning,
+                    stacklevel=1,
+                )
+
+        install_import_hook(packages=packages)
+
+    debug_option = getoption("typeguard-debug-instrumentation")
+    if debug_option:
+        global_config.debug_instrumentation = True
+
+    fail_callback_option = getoption("typeguard-typecheck-fail-callback")
+    if fail_callback_option:
+        callback = resolve_reference(fail_callback_option)
+        if not callable(callback):
+            raise TypeError(
+                f"{fail_callback_option} ({qualified_name(callback.__class__)}) is not "
+                f"a callable"
+            )
+
+        global_config.typecheck_fail_callback = callback
+
+    forward_ref_policy_option = getoption("typeguard-forward-ref-policy")
+    if forward_ref_policy_option:
+        forward_ref_policy = ForwardRefPolicy.__members__[forward_ref_policy_option]
+        global_config.forward_ref_policy = forward_ref_policy
+
+    collection_check_strategy_option = getoption("typeguard-collection-check-strategy")
+    if collection_check_strategy_option:
+        collection_check_strategy = CollectionCheckStrategy.__members__[
+            collection_check_strategy_option
+        ]
+        global_config.collection_check_strategy = collection_check_strategy
diff --git a/pkg_resources/_vendor/typeguard/_suppression.py b/pkg_resources/_vendor/typeguard/_suppression.py
new file mode 100644
index 0000000000..bbbfbfbe8e
--- /dev/null
+++ b/pkg_resources/_vendor/typeguard/_suppression.py
@@ -0,0 +1,86 @@
+from __future__ import annotations
+
+import sys
+from collections.abc import Callable, Generator
+from contextlib import contextmanager
+from functools import update_wrapper
+from threading import Lock
+from typing import ContextManager, TypeVar, overload
+
+if sys.version_info >= (3, 10):
+    from typing import ParamSpec
+else:
+    from typing_extensions import ParamSpec
+
+P = ParamSpec("P")
+T = TypeVar("T")
+
+type_checks_suppressed = 0
+type_checks_suppress_lock = Lock()
+
+
+@overload
+def suppress_type_checks(func: Callable[P, T]) -> Callable[P, T]: ...
+
+
+@overload
+def suppress_type_checks() -> ContextManager[None]: ...
+
+
+def suppress_type_checks(
+    func: Callable[P, T] | None = None,
+) -> Callable[P, T] | ContextManager[None]:
+    """
+    Temporarily suppress all type checking.
+
+    This function has two operating modes, based on how it's used:
+
+    #. as a context manager (``with suppress_type_checks(): ...``)
+    #. as a decorator (``@suppress_type_checks``)
+
+    When used as a context manager, :func:`check_type` and any automatically
+    instrumented functions skip the actual type checking. These context managers can be
+    nested.
+
+    When used as a decorator, all type checking is suppressed while the function is
+    running.
+
+    Type checking will resume once no more context managers are active and no decorated
+    functions are running.
+
+    Both operating modes are thread-safe.
+
+    """
+
+    def wrapper(*args: P.args, **kwargs: P.kwargs) -> T:
+        global type_checks_suppressed
+
+        with type_checks_suppress_lock:
+            type_checks_suppressed += 1
+
+        assert func is not None
+        try:
+            return func(*args, **kwargs)
+        finally:
+            with type_checks_suppress_lock:
+                type_checks_suppressed -= 1
+
+    def cm() -> Generator[None, None, None]:
+        global type_checks_suppressed
+
+        with type_checks_suppress_lock:
+            type_checks_suppressed += 1
+
+        try:
+            yield
+        finally:
+            with type_checks_suppress_lock:
+                type_checks_suppressed -= 1
+
+    if func is None:
+        # Context manager mode
+        return contextmanager(cm)()
+    else:
+        # Decorator mode
+        update_wrapper(wrapper, func)
+        return wrapper
diff --git a/pkg_resources/_vendor/typeguard/_transformer.py b/pkg_resources/_vendor/typeguard/_transformer.py
new file mode 100644
index 0000000000..13ac3630e6
--- /dev/null
+++ b/pkg_resources/_vendor/typeguard/_transformer.py
@@ -0,0 +1,1229 @@
+from __future__ import annotations
+
+import ast
+import builtins
+import sys
+import typing
+from ast import (
+    AST,
+    Add,
+    AnnAssign,
+    Assign,
+    AsyncFunctionDef,
+    Attribute,
+    AugAssign,
+    BinOp,
+    BitAnd,
+    BitOr,
+    BitXor,
+    Call,
+    ClassDef,
+    Constant,
+    Dict,
+    Div,
+    Expr,
+    Expression,
+    FloorDiv,
+    FunctionDef,
+    If,
+    Import,
+    ImportFrom,
+    Index,
+    List,
+    Load,
+    LShift,
+    MatMult,
+    Mod,
+    Module,
+    Mult,
+    Name,
+    NamedExpr,
+    NodeTransformer,
+    NodeVisitor,
+    Pass,
+    Pow,
+    Return,
+    RShift,
+    Starred,
+    Store,
+    Sub,
+    Subscript,
+    Tuple,
+    Yield,
+    YieldFrom,
+    alias,
+    copy_location,
+    expr,
+    fix_missing_locations,
+    keyword,
+    walk,
+)
+from collections import defaultdict
+from collections.abc import Generator, Sequence
+from contextlib import contextmanager
+from copy import deepcopy
+from dataclasses import dataclass, field
+from typing import Any, ClassVar, cast, overload
+
+generator_names = (
+    "typing.Generator",
+    "collections.abc.Generator",
+    "typing.Iterator",
+    "collections.abc.Iterator",
+    "typing.Iterable",
+    "collections.abc.Iterable",
+    "typing.AsyncIterator",
+    "collections.abc.AsyncIterator",
+    "typing.AsyncIterable",
+    "collections.abc.AsyncIterable",
+    "typing.AsyncGenerator",
+    "collections.abc.AsyncGenerator",
+)
+anytype_names = (
+    "typing.Any",
+    "typing_extensions.Any",
+)
+literal_names = (
+    "typing.Literal",
+    "typing_extensions.Literal",
+)
+annotated_names = (
+    "typing.Annotated",
+    "typing_extensions.Annotated",
+)
+ignore_decorators = (
+    "typing.no_type_check",
+    "typeguard.typeguard_ignore",
+)
+aug_assign_functions = {
+    Add: "iadd",
+    Sub: "isub",
+    Mult: "imul",
+    MatMult: "imatmul",
+    Div: "itruediv",
+    FloorDiv: "ifloordiv",
+    Mod: "imod",
+    Pow: "ipow",
+    LShift: "ilshift",
+    RShift: "irshift",
+    BitAnd: "iand",
+    BitXor: "ixor",
+    BitOr: "ior",
+}
+
+
+@dataclass
+class TransformMemo:
+    node: Module | ClassDef | FunctionDef | AsyncFunctionDef | None
+    parent: TransformMemo | None
+    path: tuple[str, ...]
+    joined_path: Constant = field(init=False)
+    return_annotation: expr | None = None
+    yield_annotation: expr | None = None
+    send_annotation: expr | None = None
+    is_async: bool = False
+    local_names: set[str] = field(init=False, default_factory=set)
+    imported_names: dict[str, str] = field(init=False, default_factory=dict)
+    ignored_names: set[str] = field(init=False, default_factory=set)
+    load_names: defaultdict[str, dict[str, Name]] = field(
+        init=False, default_factory=lambda: defaultdict(dict)
+    )
+    has_yield_expressions: bool = field(init=False, default=False)
+    has_return_expressions: bool = field(init=False, default=False)
+    memo_var_name: Name | None = field(init=False, default=None)
+    should_instrument: bool = field(init=False, default=True)
+    variable_annotations: dict[str, expr] = field(init=False, default_factory=dict)
+    configuration_overrides: dict[str, Any] = field(init=False, default_factory=dict)
+    code_inject_index: int = field(init=False, default=0)
+
+    def __post_init__(self) -> None:
+        elements: list[str] = []
+        memo = self
+        while isinstance(memo.node, (ClassDef, FunctionDef, AsyncFunctionDef)):
+            elements.insert(0, memo.node.name)
+            if not memo.parent:
+                break
+
+            memo = memo.parent
+            if isinstance(memo.node, (FunctionDef, AsyncFunctionDef)):
+                elements.insert(0, "")
+
+        self.joined_path = Constant(".".join(elements))
+
+        # Figure out where to insert instrumentation code
+        if self.node:
+            for index, child in enumerate(self.node.body):
+                if isinstance(child, ImportFrom) and child.module == "__future__":
+                    # (module only) __future__ imports must come first
+                    continue
+                elif (
+                    isinstance(child, Expr)
+                    and isinstance(child.value, Constant)
+                    and isinstance(child.value.value, str)
+                ):
+                    continue  # docstring
+
+                self.code_inject_index = index
+                break
+
+    def get_unused_name(self, name: str) -> str:
+        memo: TransformMemo | None = self
+        while memo is not None:
+            if name in memo.local_names:
+                memo = self
+                name += "_"
+            else:
+                memo = memo.parent
+
+        self.local_names.add(name)
+        return name
+
+    def is_ignored_name(self, expression: expr | Expr | None) -> bool:
+        top_expression = (
+            expression.value if isinstance(expression, Expr) else expression
+        )
+
+        if isinstance(top_expression, Attribute) and isinstance(
+            top_expression.value, Name
+        ):
+            name = top_expression.value.id
+        elif isinstance(top_expression, Name):
+            name = top_expression.id
+        else:
+            return False
+
+        memo: TransformMemo | None = self
+        while memo is not None:
+            if name in memo.ignored_names:
+                return True
+
+            memo = memo.parent
+
+        return False
+
+    def get_memo_name(self) -> Name:
+        if not self.memo_var_name:
+            self.memo_var_name = Name(id="memo", ctx=Load())
+
+        return self.memo_var_name
+
+    def get_import(self, module: str, name: str) -> Name:
+        if module in self.load_names and name in self.load_names[module]:
+            return self.load_names[module][name]
+
+        qualified_name = f"{module}.{name}"
+        if name in self.imported_names and self.imported_names[name] == qualified_name:
+            return Name(id=name, ctx=Load())
+
+        alias = self.get_unused_name(name)
+        node = self.load_names[module][name] = Name(id=alias, ctx=Load())
+        self.imported_names[name] = qualified_name
+        return node
+
+    def insert_imports(self, node: Module | FunctionDef | AsyncFunctionDef) -> None:
+        """Insert imports needed by injected code."""
+        if not self.load_names:
+            return
+
+        # Insert imports after any "from __future__ ..." imports and any docstring
+        for modulename, names in self.load_names.items():
+            aliases = [
+                alias(orig_name, new_name.id if orig_name != new_name.id else None)
+                for orig_name, new_name in sorted(names.items())
+            ]
+            node.body.insert(self.code_inject_index, ImportFrom(modulename, aliases, 0))
+
+    def name_matches(self, expression: expr | Expr | None, *names: str) -> bool:
+        if expression is None:
+            return False
+
+        path: list[str] = []
+        top_expression = (
+            expression.value if isinstance(expression, Expr) else expression
+        )
+
+        if isinstance(top_expression, Subscript):
+            top_expression = top_expression.value
+        elif isinstance(top_expression, Call):
+            top_expression = top_expression.func
+
+        while isinstance(top_expression, Attribute):
+            path.insert(0, top_expression.attr)
+            top_expression = top_expression.value
+
+        if not isinstance(top_expression, Name):
+            return False
+
+        if top_expression.id in self.imported_names:
+            translated = self.imported_names[top_expression.id]
+        elif hasattr(builtins, top_expression.id):
+            translated = "builtins." + top_expression.id
+        else:
+            translated = top_expression.id
+
+        path.insert(0, translated)
+        joined_path = ".".join(path)
+        if joined_path in names:
+            return True
+        elif self.parent:
+            return self.parent.name_matches(expression, *names)
+        else:
+            return False
+
+    def get_config_keywords(self) -> list[keyword]:
+        if self.parent and isinstance(self.parent.node, ClassDef):
+            overrides = self.parent.configuration_overrides.copy()
+        else:
+            overrides = {}
+
+        overrides.update(self.configuration_overrides)
+        return [keyword(key, value) for key, value in overrides.items()]
+
+
+class NameCollector(NodeVisitor):
+    def __init__(self) -> None:
+        self.names: set[str] = set()
+
+    def visit_Import(self, node: Import) -> None:
+        for name in node.names:
+            self.names.add(name.asname or name.name)
+
+    def visit_ImportFrom(self, node: ImportFrom) -> None:
+        for name in node.names:
+            self.names.add(name.asname or name.name)
+
+    def visit_Assign(self, node: Assign) -> None:
+        for target in node.targets:
+            if isinstance(target, Name):
+                self.names.add(target.id)
+
+    def visit_NamedExpr(self, node: NamedExpr) -> Any:
+        if isinstance(node.target, Name):
+            self.names.add(node.target.id)
+
+    def visit_FunctionDef(self, node: FunctionDef) -> None:
+        pass
+
+    def visit_ClassDef(self, node: ClassDef) -> None:
+        pass
+
+
+class GeneratorDetector(NodeVisitor):
+    """Detects if a function node is a generator function."""
+
+    contains_yields: bool = False
+    in_root_function: bool = False
+
+    def visit_Yield(self, node: Yield) -> Any:
+        self.contains_yields = True
+
+    def visit_YieldFrom(self, node: YieldFrom) -> Any:
+        self.contains_yields = True
+
+    def visit_ClassDef(self, node: ClassDef) -> Any:
+        pass
+
+    def visit_FunctionDef(self, node: FunctionDef | AsyncFunctionDef) -> Any:
+        if not self.in_root_function:
+            self.in_root_function = True
+            self.generic_visit(node)
+            self.in_root_function = False
+
+    def visit_AsyncFunctionDef(self, node: AsyncFunctionDef) -> Any:
+        self.visit_FunctionDef(node)
+
+
+class AnnotationTransformer(NodeTransformer):
+    type_substitutions: ClassVar[dict[str, tuple[str, str]]] = {
+        "builtins.dict": ("typing", "Dict"),
+        "builtins.list": ("typing", "List"),
+        "builtins.tuple": ("typing", "Tuple"),
+        "builtins.set": ("typing", "Set"),
+        "builtins.frozenset": ("typing", "FrozenSet"),
+    }
+
+    def __init__(self, transformer: TypeguardTransformer):
+        self.transformer = transformer
+        self._memo = transformer._memo
+        self._level = 0
+
+    def visit(self, node: AST) -> Any:
+        # Don't process Literals
+        if isinstance(node, expr) and self._memo.name_matches(node, *literal_names):
+            return node
+
+        self._level += 1
+        new_node = super().visit(node)
+        self._level -= 1
+
+        if isinstance(new_node, Expression) and not hasattr(new_node, "body"):
+            return None
+
+        # Return None if this new node matches a variation of typing.Any
+        if (
+            self._level == 0
+            and isinstance(new_node, expr)
+            and self._memo.name_matches(new_node, *anytype_names)
+        ):
+            return None
+
+        return new_node
+
+    def visit_BinOp(self, node: BinOp) -> Any:
+        self.generic_visit(node)
+
+        if isinstance(node.op, BitOr):
+            # If either branch of the BinOp has been transformed to `None`, it means
+            # that a type in the union was ignored, so the entire annotation should e
+            # ignored
+            if not hasattr(node, "left") or not hasattr(node, "right"):
+                return None
+
+            # Return Any if either side is Any
+            if self._memo.name_matches(node.left, *anytype_names):
+                return node.left
+            elif self._memo.name_matches(node.right, *anytype_names):
+                return node.right
+
+            if sys.version_info < (3, 10):
+                union_name = self.transformer._get_import("typing", "Union")
+                return Subscript(
+                    value=union_name,
+                    slice=Index(
+                        Tuple(elts=[node.left, node.right], ctx=Load()), ctx=Load()
+                    ),
+                    ctx=Load(),
+                )
+
+        return node
+
+    def visit_Attribute(self, node: Attribute) -> Any:
+        if self._memo.is_ignored_name(node):
+            return None
+
+        return node
+
+    def visit_Subscript(self, node: Subscript) -> Any:
+        if self._memo.is_ignored_name(node.value):
+            return None
+
+        # The subscript of typing(_extensions).Literal can be any arbitrary string, so
+        # don't try to evaluate it as code
+        if node.slice:
+            if isinstance(node.slice, Index):
+                # Python 3.8
+                slice_value = node.slice.value  # type: ignore[attr-defined]
+            else:
+                slice_value = node.slice
+
+            if isinstance(slice_value, Tuple):
+                if self._memo.name_matches(node.value, *annotated_names):
+                    # Only treat the first argument to typing.Annotated as a potential
+                    # forward reference
+                    items = cast(
+                        typing.List[expr],
+                        [self.visit(slice_value.elts[0])] + slice_value.elts[1:],
+                    )
+                else:
+                    items = cast(
+                        typing.List[expr],
+                        [self.visit(item) for item in slice_value.elts],
+                    )
+
+                # If this is a Union and any of the items is Any, erase the entire
+                # annotation
+                if self._memo.name_matches(node.value, "typing.Union") and any(
+                    item is None
+                    or (
+                        isinstance(item, expr)
+                        and self._memo.name_matches(item, *anytype_names)
+                    )
+                    for item in items
+                ):
+                    return None
+
+                # If all items in the subscript were Any, erase the subscript entirely
+                if all(item is None for item in items):
+                    return node.value
+
+                for index, item in enumerate(items):
+                    if item is None:
+                        items[index] = self.transformer._get_import("typing", "Any")
+
+                slice_value.elts = items
+            else:
+                self.generic_visit(node)
+
+                # If the transformer erased the slice entirely, just return the node
+                # value without the subscript (unless it's Optional, in which case erase
+                # the node entirely
+                if self._memo.name_matches(
+                    node.value, "typing.Optional"
+                ) and not hasattr(node, "slice"):
+                    return None
+                if sys.version_info >= (3, 9) and not hasattr(node, "slice"):
+                    return node.value
+                elif sys.version_info < (3, 9) and not hasattr(node.slice, "value"):
+                    return node.value
+
+        return node
+
+    def visit_Name(self, node: Name) -> Any:
+        if self._memo.is_ignored_name(node):
+            return None
+
+        if sys.version_info < (3, 9):
+            for typename, substitute in self.type_substitutions.items():
+                if self._memo.name_matches(node, typename):
+                    new_node = self.transformer._get_import(*substitute)
+                    return copy_location(new_node, node)
+
+        return node
+
+    def visit_Call(self, node: Call) -> Any:
+        # Don't recurse into calls
+        return node
+
+    def visit_Constant(self, node: Constant) -> Any:
+        if isinstance(node.value, str):
+            expression = ast.parse(node.value, mode="eval")
+            new_node = self.visit(expression)
+            if new_node:
+                return copy_location(new_node.body, node)
+            else:
+                return None
+
+        return node
+
+
+class TypeguardTransformer(NodeTransformer):
+    def __init__(
+        self, target_path: Sequence[str] | None = None, target_lineno: int | None = None
+    ) -> None:
+        self._target_path = tuple(target_path) if target_path else None
+        self._memo = self._module_memo = TransformMemo(None, None, ())
+        self.names_used_in_annotations: set[str] = set()
+        self.target_node: FunctionDef | AsyncFunctionDef | None = None
+        self.target_lineno = target_lineno
+
+    def generic_visit(self, node: AST) -> AST:
+        has_non_empty_body_initially = bool(getattr(node, "body", None))
+        initial_type = type(node)
+
+        node = super().generic_visit(node)
+
+        if (
+            type(node) is initial_type
+            and has_non_empty_body_initially
+            and hasattr(node, "body")
+            and not node.body
+        ):
+            # If we have still the same node type after transformation
+            # but we've optimised it's body away, we add a `pass` statement.
+            node.body = [Pass()]
+
+        return node
+
+    @contextmanager
+    def _use_memo(
+        self, node: ClassDef | FunctionDef | AsyncFunctionDef
+    ) -> Generator[None, Any, None]:
+        new_memo = TransformMemo(node, self._memo, self._memo.path + (node.name,))
+        old_memo = self._memo
+        self._memo = new_memo
+
+        if isinstance(node, (FunctionDef, AsyncFunctionDef)):
+            new_memo.should_instrument = (
+                self._target_path is None or new_memo.path == self._target_path
+            )
+            if new_memo.should_instrument:
+                # Check if the function is a generator function
+                detector = GeneratorDetector()
+                detector.visit(node)
+
+                # Extract yield, send and return types where possible from a subscripted
+                # annotation like Generator[int, str, bool]
+                return_annotation = deepcopy(node.returns)
+                if detector.contains_yields and new_memo.name_matches(
+                    return_annotation, *generator_names
+                ):
+                    if isinstance(return_annotation, Subscript):
+                        annotation_slice = return_annotation.slice
+
+                        # Python < 3.9
+                        if isinstance(annotation_slice, Index):
+                            annotation_slice = (
+                                annotation_slice.value  # type: ignore[attr-defined]
+                            )
+
+                        if isinstance(annotation_slice, Tuple):
+                            items = annotation_slice.elts
+                        else:
+                            items = [annotation_slice]
+
+                        if len(items) > 0:
+                            new_memo.yield_annotation = self._convert_annotation(
+                                items[0]
+                            )
+
+                        if len(items) > 1:
+                            new_memo.send_annotation = self._convert_annotation(
+                                items[1]
+                            )
+
+                        if len(items) > 2:
+                            new_memo.return_annotation = self._convert_annotation(
+                                items[2]
+                            )
+                else:
+                    new_memo.return_annotation = self._convert_annotation(
+                        return_annotation
+                    )
+
+        if isinstance(node, AsyncFunctionDef):
+            new_memo.is_async = True
+
+        yield
+        self._memo = old_memo
+
+    def _get_import(self, module: str, name: str) -> Name:
+        memo = self._memo if self._target_path else self._module_memo
+        return memo.get_import(module, name)
+
+    @overload
+    def _convert_annotation(self, annotation: None) -> None: ...
+
+    @overload
+    def _convert_annotation(self, annotation: expr) -> expr: ...
+
+    def _convert_annotation(self, annotation: expr | None) -> expr | None:
+        if annotation is None:
+            return None
+
+        # Convert PEP 604 unions (x | y) and generic built-in collections where
+        # necessary, and undo forward references
+        new_annotation = cast(expr, AnnotationTransformer(self).visit(annotation))
+        if isinstance(new_annotation, expr):
+            new_annotation = ast.copy_location(new_annotation, annotation)
+
+            # Store names used in the annotation
+            names = {node.id for node in walk(new_annotation) if isinstance(node, Name)}
+            self.names_used_in_annotations.update(names)
+
+        return new_annotation
+
+    def visit_Name(self, node: Name) -> Name:
+        self._memo.local_names.add(node.id)
+        return node
+
+    def visit_Module(self, node: Module) -> Module:
+        self._module_memo = self._memo = TransformMemo(node, None, ())
+        self.generic_visit(node)
+        self._module_memo.insert_imports(node)
+
+        fix_missing_locations(node)
+        return node
+
+    def visit_Import(self, node: Import) -> Import:
+        for name in node.names:
+            self._memo.local_names.add(name.asname or name.name)
+            self._memo.imported_names[name.asname or name.name] = name.name
+
+        return node
+
+    def visit_ImportFrom(self, node: ImportFrom) -> ImportFrom:
+        for name in node.names:
+            if name.name != "*":
+                alias = name.asname or name.name
+                self._memo.local_names.add(alias)
+                self._memo.imported_names[alias] = f"{node.module}.{name.name}"
+
+        return node
+
+    def visit_ClassDef(self, node: ClassDef) -> ClassDef | None:
+        self._memo.local_names.add(node.name)
+
+        # Eliminate top level classes not belonging to the target path
+        if (
+            self._target_path is not None
+            and not self._memo.path
+            and node.name != self._target_path[0]
+        ):
+            return None
+
+        with self._use_memo(node):
+            for decorator in node.decorator_list.copy():
+                if self._memo.name_matches(decorator, "typeguard.typechecked"):
+                    # Remove the decorator to prevent duplicate instrumentation
+                    node.decorator_list.remove(decorator)
+
+                    # Store any configuration overrides
+                    if isinstance(decorator, Call) and decorator.keywords:
+                        self._memo.configuration_overrides.update(
+                            {kw.arg: kw.value for kw in decorator.keywords if kw.arg}
+                        )
+
+            self.generic_visit(node)
+            return node
+
+    def visit_FunctionDef(
+        self, node: FunctionDef | AsyncFunctionDef
+    ) -> FunctionDef | AsyncFunctionDef | None:
+        """
+        Injects type checks for function arguments, and for a return of None if the
+        function is annotated to return something else than Any or None, and the body
+        ends without an explicit "return".
+
+        """
+        self._memo.local_names.add(node.name)
+
+        # Eliminate top level functions not belonging to the target path
+        if (
+            self._target_path is not None
+            and not self._memo.path
+            and node.name != self._target_path[0]
+        ):
+            return None
+
+        # Skip instrumentation if we're instrumenting the whole module and the function
+        # contains either @no_type_check or @typeguard_ignore
+        if self._target_path is None:
+            for decorator in node.decorator_list:
+                if self._memo.name_matches(decorator, *ignore_decorators):
+                    return node
+
+        with self._use_memo(node):
+            arg_annotations: dict[str, Any] = {}
+            if self._target_path is None or self._memo.path == self._target_path:
+                # Find line number we're supposed to match against
+                if node.decorator_list:
+                    first_lineno = node.decorator_list[0].lineno
+                else:
+                    first_lineno = node.lineno
+
+                for decorator in node.decorator_list.copy():
+                    if self._memo.name_matches(decorator, "typing.overload"):
+                        # Remove overloads entirely
+                        return None
+                    elif self._memo.name_matches(decorator, "typeguard.typechecked"):
+                        # Remove the decorator to prevent duplicate instrumentation
+                        node.decorator_list.remove(decorator)
+
+                        # Store any configuration overrides
+                        if isinstance(decorator, Call) and decorator.keywords:
+                            self._memo.configuration_overrides = {
+                                kw.arg: kw.value for kw in decorator.keywords if kw.arg
+                            }
+
+                if self.target_lineno == first_lineno:
+                    assert self.target_node is None
+                    self.target_node = node
+                    if node.decorator_list:
+                        self.target_lineno = node.decorator_list[0].lineno
+                    else:
+                        self.target_lineno = node.lineno
+
+                all_args = node.args.args + node.args.kwonlyargs + node.args.posonlyargs
+
+                # Ensure that any type shadowed by the positional or keyword-only
+                # argument names are ignored in this function
+                for arg in all_args:
+                    self._memo.ignored_names.add(arg.arg)
+
+                # Ensure that any type shadowed by the variable positional argument name
+                # (e.g. "args" in *args) is ignored this function
+                if node.args.vararg:
+                    self._memo.ignored_names.add(node.args.vararg.arg)
+
+                # Ensure that any type shadowed by the variable keywrod argument name
+                # (e.g. "kwargs" in *kwargs) is ignored this function
+                if node.args.kwarg:
+                    self._memo.ignored_names.add(node.args.kwarg.arg)
+
+                for arg in all_args:
+                    annotation = self._convert_annotation(deepcopy(arg.annotation))
+                    if annotation:
+                        arg_annotations[arg.arg] = annotation
+
+                if node.args.vararg:
+                    annotation_ = self._convert_annotation(node.args.vararg.annotation)
+                    if annotation_:
+                        if sys.version_info >= (3, 9):
+                            container = Name("tuple", ctx=Load())
+                        else:
+                            container = self._get_import("typing", "Tuple")
+
+                        subscript_slice: Tuple | Index = Tuple(
+                            [
+                                annotation_,
+                                Constant(Ellipsis),
+                            ],
+                            ctx=Load(),
+                        )
+                        if sys.version_info < (3, 9):
+                            subscript_slice = Index(subscript_slice, ctx=Load())
+
+                        arg_annotations[node.args.vararg.arg] = Subscript(
+                            container, subscript_slice, ctx=Load()
+                        )
+
+                if node.args.kwarg:
+                    annotation_ = self._convert_annotation(node.args.kwarg.annotation)
+                    if annotation_:
+                        if sys.version_info >= (3, 9):
+                            container = Name("dict", ctx=Load())
+                        else:
+                            container = self._get_import("typing", "Dict")
+
+                        subscript_slice = Tuple(
+                            [
+                                Name("str", ctx=Load()),
+                                annotation_,
+                            ],
+                            ctx=Load(),
+                        )
+                        if sys.version_info < (3, 9):
+                            subscript_slice = Index(subscript_slice, ctx=Load())
+
+                        arg_annotations[node.args.kwarg.arg] = Subscript(
+                            container, subscript_slice, ctx=Load()
+                        )
+
+                if arg_annotations:
+                    self._memo.variable_annotations.update(arg_annotations)
+
+            self.generic_visit(node)
+
+            if arg_annotations:
+                annotations_dict = Dict(
+                    keys=[Constant(key) for key in arg_annotations.keys()],
+                    values=[
+                        Tuple([Name(key, ctx=Load()), annotation], ctx=Load())
+                        for key, annotation in arg_annotations.items()
+                    ],
+                )
+                func_name = self._get_import(
+                    "typeguard._functions", "check_argument_types"
+                )
+                args = [
+                    self._memo.joined_path,
+                    annotations_dict,
+                    self._memo.get_memo_name(),
+                ]
+                node.body.insert(
+                    self._memo.code_inject_index, Expr(Call(func_name, args, []))
+                )
+
+            # Add a checked "return None" to the end if there's no explicit return
+            # Skip if the return annotation is None or Any
+            if (
+                self._memo.return_annotation
+                and (not self._memo.is_async or not self._memo.has_yield_expressions)
+                and not isinstance(node.body[-1], Return)
+                and (
+                    not isinstance(self._memo.return_annotation, Constant)
+                    or self._memo.return_annotation.value is not None
+                )
+            ):
+                func_name = self._get_import(
+                    "typeguard._functions", "check_return_type"
+                )
+                return_node = Return(
+                    Call(
+                        func_name,
+                        [
+                            self._memo.joined_path,
+                            Constant(None),
+                            self._memo.return_annotation,
+                            self._memo.get_memo_name(),
+                        ],
+                        [],
+                    )
+                )
+
+                # Replace a placeholder "pass" at the end
+                if isinstance(node.body[-1], Pass):
+                    copy_location(return_node, node.body[-1])
+                    del node.body[-1]
+
+                node.body.append(return_node)
+
+            # Insert code to create the call memo, if it was ever needed for this
+            # function
+            if self._memo.memo_var_name:
+                memo_kwargs: dict[str, Any] = {}
+                if self._memo.parent and isinstance(self._memo.parent.node, ClassDef):
+                    for decorator in node.decorator_list:
+                        if (
+                            isinstance(decorator, Name)
+                            and decorator.id == "staticmethod"
+                        ):
+                            break
+                        elif (
+                            isinstance(decorator, Name)
+                            and decorator.id == "classmethod"
+                        ):
+                            memo_kwargs["self_type"] = Name(
+                                id=node.args.args[0].arg, ctx=Load()
+                            )
+                            break
+                    else:
+                        if node.args.args:
+                            if node.name == "__new__":
+                                memo_kwargs["self_type"] = Name(
+                                    id=node.args.args[0].arg, ctx=Load()
+                                )
+                            else:
+                                memo_kwargs["self_type"] = Attribute(
+                                    Name(id=node.args.args[0].arg, ctx=Load()),
+                                    "__class__",
+                                    ctx=Load(),
+                                )
+
+                # Construct the function reference
+                # Nested functions get special treatment: the function name is added
+                # to free variables (and the closure of the resulting function)
+                names: list[str] = [node.name]
+                memo = self._memo.parent
+                while memo:
+                    if isinstance(memo.node, (FunctionDef, AsyncFunctionDef)):
+                        # This is a nested function. Use the function name as-is.
+                        del names[:-1]
+                        break
+                    elif not isinstance(memo.node, ClassDef):
+                        break
+
+                    names.insert(0, memo.node.name)
+                    memo = memo.parent
+
+                config_keywords = self._memo.get_config_keywords()
+                if config_keywords:
+                    memo_kwargs["config"] = Call(
+                        self._get_import("dataclasses", "replace"),
+                        [self._get_import("typeguard._config", "global_config")],
+                        config_keywords,
+                    )
+
+                self._memo.memo_var_name.id = self._memo.get_unused_name("memo")
+                memo_store_name = Name(id=self._memo.memo_var_name.id, ctx=Store())
+                globals_call = Call(Name(id="globals", ctx=Load()), [], [])
+                locals_call = Call(Name(id="locals", ctx=Load()), [], [])
+                memo_expr = Call(
+                    self._get_import("typeguard", "TypeCheckMemo"),
+                    [globals_call, locals_call],
+                    [keyword(key, value) for key, value in memo_kwargs.items()],
+                )
+                node.body.insert(
+                    self._memo.code_inject_index,
+                    Assign([memo_store_name], memo_expr),
+                )
+
+                self._memo.insert_imports(node)
+
+                # Special case the __new__() method to create a local alias from the
+                # class name to the first argument (usually "cls")
+                if (
+                    isinstance(node, FunctionDef)
+                    and node.args
+                    and self._memo.parent is not None
+                    and isinstance(self._memo.parent.node, ClassDef)
+                    and node.name == "__new__"
+                ):
+                    first_args_expr = Name(node.args.args[0].arg, ctx=Load())
+                    cls_name = Name(self._memo.parent.node.name, ctx=Store())
+                    node.body.insert(
+                        self._memo.code_inject_index,
+                        Assign([cls_name], first_args_expr),
+                    )
+
+                # Rmove any placeholder "pass" at the end
+                if isinstance(node.body[-1], Pass):
+                    del node.body[-1]
+
+        return node
+
+    def visit_AsyncFunctionDef(
+        self, node: AsyncFunctionDef
+    ) -> FunctionDef | AsyncFunctionDef | None:
+        return self.visit_FunctionDef(node)
+
+    def visit_Return(self, node: Return) -> Return:
+        """This injects type checks into "return" statements."""
+        self.generic_visit(node)
+        if (
+            self._memo.return_annotation
+            and self._memo.should_instrument
+            and not self._memo.is_ignored_name(self._memo.return_annotation)
+        ):
+            func_name = self._get_import("typeguard._functions", "check_return_type")
+            old_node = node
+            retval = old_node.value or Constant(None)
+            node = Return(
+                Call(
+                    func_name,
+                    [
+                        self._memo.joined_path,
+                        retval,
+                        self._memo.return_annotation,
+                        self._memo.get_memo_name(),
+                    ],
+                    [],
+                )
+            )
+            copy_location(node, old_node)
+
+        return node
+
+    def visit_Yield(self, node: Yield) -> Yield | Call:
+        """
+        This injects type checks into "yield" expressions, checking both the yielded
+        value and the value sent back to the generator, when appropriate.
+
+        """
+        self._memo.has_yield_expressions = True
+        self.generic_visit(node)
+
+        if (
+            self._memo.yield_annotation
+            and self._memo.should_instrument
+            and not self._memo.is_ignored_name(self._memo.yield_annotation)
+        ):
+            func_name = self._get_import("typeguard._functions", "check_yield_type")
+            yieldval = node.value or Constant(None)
+            node.value = Call(
+                func_name,
+                [
+                    self._memo.joined_path,
+                    yieldval,
+                    self._memo.yield_annotation,
+                    self._memo.get_memo_name(),
+                ],
+                [],
+            )
+
+        if (
+            self._memo.send_annotation
+            and self._memo.should_instrument
+            and not self._memo.is_ignored_name(self._memo.send_annotation)
+        ):
+            func_name = self._get_import("typeguard._functions", "check_send_type")
+            old_node = node
+            call_node = Call(
+                func_name,
+                [
+                    self._memo.joined_path,
+                    old_node,
+                    self._memo.send_annotation,
+                    self._memo.get_memo_name(),
+                ],
+                [],
+            )
+            copy_location(call_node, old_node)
+            return call_node
+
+        return node
+
+    def visit_AnnAssign(self, node: AnnAssign) -> Any:
+        """
+        This injects a type check into a local variable annotation-assignment within a
+        function body.
+
+        """
+        self.generic_visit(node)
+
+        if (
+            isinstance(self._memo.node, (FunctionDef, AsyncFunctionDef))
+            and node.annotation
+            and isinstance(node.target, Name)
+        ):
+            self._memo.ignored_names.add(node.target.id)
+            annotation = self._convert_annotation(deepcopy(node.annotation))
+            if annotation:
+                self._memo.variable_annotations[node.target.id] = annotation
+                if node.value:
+                    func_name = self._get_import(
+                        "typeguard._functions", "check_variable_assignment"
+                    )
+                    node.value = Call(
+                        func_name,
+                        [
+                            node.value,
+                            Constant(node.target.id),
+                            annotation,
+                            self._memo.get_memo_name(),
+                        ],
+                        [],
+                    )
+
+        return node
+
+    def visit_Assign(self, node: Assign) -> Any:
+        """
+        This injects a type check into a local variable assignment within a function
+        body. The variable must have been annotated earlier in the function body.
+
+        """
+        self.generic_visit(node)
+
+        # Only instrument function-local assignments
+        if isinstance(self._memo.node, (FunctionDef, AsyncFunctionDef)):
+            targets: list[dict[Constant, expr | None]] = []
+            check_required = False
+            for target in node.targets:
+                elts: Sequence[expr]
+                if isinstance(target, Name):
+                    elts = [target]
+                elif isinstance(target, Tuple):
+                    elts = target.elts
+                else:
+                    continue
+
+                annotations_: dict[Constant, expr | None] = {}
+                for exp in elts:
+                    prefix = ""
+                    if isinstance(exp, Starred):
+                        exp = exp.value
+                        prefix = "*"
+
+                    if isinstance(exp, Name):
+                        self._memo.ignored_names.add(exp.id)
+                        name = prefix + exp.id
+                        annotation = self._memo.variable_annotations.get(exp.id)
+                        if annotation:
+                            annotations_[Constant(name)] = annotation
+                            check_required = True
+                        else:
+                            annotations_[Constant(name)] = None
+
+                targets.append(annotations_)
+
+            if check_required:
+                # Replace missing annotations with typing.Any
+                for item in targets:
+                    for key, expression in item.items():
+                        if expression is None:
+                            item[key] = self._get_import("typing", "Any")
+
+                if len(targets) == 1 and len(targets[0]) == 1:
+                    func_name = self._get_import(
+                        "typeguard._functions", "check_variable_assignment"
+                    )
+                    target_varname = next(iter(targets[0]))
+                    node.value = Call(
+                        func_name,
+                        [
+                            node.value,
+                            target_varname,
+                            targets[0][target_varname],
+                            self._memo.get_memo_name(),
+                        ],
+                        [],
+                    )
+                elif targets:
+                    func_name = self._get_import(
+                        "typeguard._functions", "check_multi_variable_assignment"
+                    )
+                    targets_arg = List(
+                        [
+                            Dict(keys=list(target), values=list(target.values()))
+                            for target in targets
+                        ],
+                        ctx=Load(),
+                    )
+                    node.value = Call(
+                        func_name,
+                        [node.value, targets_arg, self._memo.get_memo_name()],
+                        [],
+                    )
+
+        return node
+
+    def visit_NamedExpr(self, node: NamedExpr) -> Any:
+        """This injects a type check into an assignment expression (a := foo())."""
+        self.generic_visit(node)
+
+        # Only instrument function-local assignments
+        if isinstance(self._memo.node, (FunctionDef, AsyncFunctionDef)) and isinstance(
+            node.target, Name
+        ):
+            self._memo.ignored_names.add(node.target.id)
+
+            # Bail out if no matching annotation is found
+            annotation = self._memo.variable_annotations.get(node.target.id)
+            if annotation is None:
+                return node
+
+            func_name = self._get_import(
+                "typeguard._functions", "check_variable_assignment"
+            )
+            node.value = Call(
+                func_name,
+                [
+                    node.value,
+                    Constant(node.target.id),
+                    annotation,
+                    self._memo.get_memo_name(),
+                ],
+                [],
+            )
+
+        return node
+
+    def visit_AugAssign(self, node: AugAssign) -> Any:
+        """
+        This injects a type check into an augmented assignment expression (a += 1).
+
+        """
+        self.generic_visit(node)
+
+        # Only instrument function-local assignments
+        if isinstance(self._memo.node, (FunctionDef, AsyncFunctionDef)) and isinstance(
+            node.target, Name
+        ):
+            # Bail out if no matching annotation is found
+            annotation = self._memo.variable_annotations.get(node.target.id)
+            if annotation is None:
+                return node
+
+            # Bail out if the operator is not found (newer Python version?)
+            try:
+                operator_func_name = aug_assign_functions[node.op.__class__]
+            except KeyError:
+                return node
+
+            operator_func = self._get_import("operator", operator_func_name)
+            operator_call = Call(
+                operator_func, [Name(node.target.id, ctx=Load()), node.value], []
+            )
+            check_call = Call(
+                self._get_import("typeguard._functions", "check_variable_assignment"),
+                [
+                    operator_call,
+                    Constant(node.target.id),
+                    annotation,
+                    self._memo.get_memo_name(),
+                ],
+                [],
+            )
+            return Assign(targets=[node.target], value=check_call)
+
+        return node
+
+    def visit_If(self, node: If) -> Any:
+        """
+        This blocks names from being collected from a module-level
+        "if typing.TYPE_CHECKING:" block, so that they won't be type checked.
+
+        """
+        self.generic_visit(node)
+
+        if (
+            self._memo is self._module_memo
+            and isinstance(node.test, Name)
+            and self._memo.name_matches(node.test, "typing.TYPE_CHECKING")
+        ):
+            collector = NameCollector()
+            collector.visit(node)
+            self._memo.ignored_names.update(collector.names)
+
+        return node
diff --git a/pkg_resources/_vendor/typeguard/_union_transformer.py b/pkg_resources/_vendor/typeguard/_union_transformer.py
new file mode 100644
index 0000000000..19617e6af5
--- /dev/null
+++ b/pkg_resources/_vendor/typeguard/_union_transformer.py
@@ -0,0 +1,55 @@
+"""
+Transforms lazily evaluated PEP 604 unions into typing.Unions, for compatibility with
+Python versions older than 3.10.
+"""
+
+from __future__ import annotations
+
+from ast import (
+    BinOp,
+    BitOr,
+    Index,
+    Load,
+    Name,
+    NodeTransformer,
+    Subscript,
+    fix_missing_locations,
+    parse,
+)
+from ast import Tuple as ASTTuple
+from types import CodeType
+from typing import Any, Dict, FrozenSet, List, Set, Tuple, Union
+
+type_substitutions = {
+    "dict": Dict,
+    "list": List,
+    "tuple": Tuple,
+    "set": Set,
+    "frozenset": FrozenSet,
+    "Union": Union,
+}
+
+
+class UnionTransformer(NodeTransformer):
+    def __init__(self, union_name: Name | None = None):
+        self.union_name = union_name or Name(id="Union", ctx=Load())
+
+    def visit_BinOp(self, node: BinOp) -> Any:
+        self.generic_visit(node)
+        if isinstance(node.op, BitOr):
+            return Subscript(
+                value=self.union_name,
+                slice=Index(
+                    ASTTuple(elts=[node.left, node.right], ctx=Load()), ctx=Load()
+                ),
+                ctx=Load(),
+            )
+
+        return node
+
+
+def compile_type_hint(hint: str) -> CodeType:
+    parsed = parse(hint, "", "eval")
+    UnionTransformer().visit(parsed)
+    fix_missing_locations(parsed)
+    return compile(parsed, "", "eval", flags=0)
diff --git a/pkg_resources/_vendor/typeguard/_utils.py b/pkg_resources/_vendor/typeguard/_utils.py
new file mode 100644
index 0000000000..9bcc8417f8
--- /dev/null
+++ b/pkg_resources/_vendor/typeguard/_utils.py
@@ -0,0 +1,173 @@
+from __future__ import annotations
+
+import inspect
+import sys
+from importlib import import_module
+from inspect import currentframe
+from types import CodeType, FrameType, FunctionType
+from typing import TYPE_CHECKING, Any, Callable, ForwardRef, Union, cast, final
+from weakref import WeakValueDictionary
+
+if TYPE_CHECKING:
+    from ._memo import TypeCheckMemo
+
+if sys.version_info >= (3, 13):
+    from typing import get_args, get_origin
+
+    def evaluate_forwardref(forwardref: ForwardRef, memo: TypeCheckMemo) -> Any:
+        return forwardref._evaluate(
+            memo.globals, memo.locals, type_params=(), recursive_guard=frozenset()
+        )
+
+elif sys.version_info >= (3, 10):
+    from typing import get_args, get_origin
+
+    def evaluate_forwardref(forwardref: ForwardRef, memo: TypeCheckMemo) -> Any:
+        return forwardref._evaluate(
+            memo.globals, memo.locals, recursive_guard=frozenset()
+        )
+
+else:
+    from typing_extensions import get_args, get_origin
+
+    evaluate_extra_args: tuple[frozenset[Any], ...] = (
+        (frozenset(),) if sys.version_info >= (3, 9) else ()
+    )
+
+    def evaluate_forwardref(forwardref: ForwardRef, memo: TypeCheckMemo) -> Any:
+        from ._union_transformer import compile_type_hint, type_substitutions
+
+        if not forwardref.__forward_evaluated__:
+            forwardref.__forward_code__ = compile_type_hint(forwardref.__forward_arg__)
+
+        try:
+            return forwardref._evaluate(memo.globals, memo.locals, *evaluate_extra_args)
+        except NameError:
+            if sys.version_info < (3, 10):
+                # Try again, with the type substitutions (list -> List etc.) in place
+                new_globals = memo.globals.copy()
+                new_globals.setdefault("Union", Union)
+                if sys.version_info < (3, 9):
+                    new_globals.update(type_substitutions)
+
+                return forwardref._evaluate(
+                    new_globals, memo.locals or new_globals, *evaluate_extra_args
+                )
+
+            raise
+
+
+_functions_map: WeakValueDictionary[CodeType, FunctionType] = WeakValueDictionary()
+
+
+def get_type_name(type_: Any) -> str:
+    name: str
+    for attrname in "__name__", "_name", "__forward_arg__":
+        candidate = getattr(type_, attrname, None)
+        if isinstance(candidate, str):
+            name = candidate
+            break
+    else:
+        origin = get_origin(type_)
+        candidate = getattr(origin, "_name", None)
+        if candidate is None:
+            candidate = type_.__class__.__name__.strip("_")
+
+        if isinstance(candidate, str):
+            name = candidate
+        else:
+            return "(unknown)"
+
+    args = get_args(type_)
+    if args:
+        if name == "Literal":
+            formatted_args = ", ".join(repr(arg) for arg in args)
+        else:
+            formatted_args = ", ".join(get_type_name(arg) for arg in args)
+
+        name += f"[{formatted_args}]"
+
+    module = getattr(type_, "__module__", None)
+    if module and module not in (None, "typing", "typing_extensions", "builtins"):
+        name = module + "." + name
+
+    return name
+
+
+def qualified_name(obj: Any, *, add_class_prefix: bool = False) -> str:
+    """
+    Return the qualified name (e.g. package.module.Type) for the given object.
+
+    Builtins and types from the :mod:`typing` package get special treatment by having
+    the module name stripped from the generated name.
+
+    """
+    if obj is None:
+        return "None"
+    elif inspect.isclass(obj):
+        prefix = "class " if add_class_prefix else ""
+        type_ = obj
+    else:
+        prefix = ""
+        type_ = type(obj)
+
+    module = type_.__module__
+    qualname = type_.__qualname__
+    name = qualname if module in ("typing", "builtins") else f"{module}.{qualname}"
+    return prefix + name
+
+
+def function_name(func: Callable[..., Any]) -> str:
+    """
+    Return the qualified name of the given function.
+
+    Builtins and types from the :mod:`typing` package get special treatment by having
+    the module name stripped from the generated name.
+
+    """
+    # For partial functions and objects with __call__ defined, __qualname__ does not
+    # exist
+    module = getattr(func, "__module__", "")
+    qualname = (module + ".") if module not in ("builtins", "") else ""
+    return qualname + getattr(func, "__qualname__", repr(func))
+
+
+def resolve_reference(reference: str) -> Any:
+    modulename, varname = reference.partition(":")[::2]
+    if not modulename or not varname:
+        raise ValueError(f"{reference!r} is not a module:varname reference")
+
+    obj = import_module(modulename)
+    for attr in varname.split("."):
+        obj = getattr(obj, attr)
+
+    return obj
+
+
+def is_method_of(obj: object, cls: type) -> bool:
+    return (
+        inspect.isfunction(obj)
+        and obj.__module__ == cls.__module__
+        and obj.__qualname__.startswith(cls.__qualname__ + ".")
+    )
+
+
+def get_stacklevel() -> int:
+    level = 1
+    frame = cast(FrameType, currentframe()).f_back
+    while frame and frame.f_globals.get("__name__", "").startswith("typeguard."):
+        level += 1
+        frame = frame.f_back
+
+    return level
+
+
+@final
+class Unset:
+    __slots__ = ()
+
+    def __repr__(self) -> str:
+        return ""
+
+
+unset = Unset()
diff --git a/pkg_resources/_vendor/typeguard/py.typed b/pkg_resources/_vendor/typeguard/py.typed
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/pkg_resources/_vendor/typing_extensions-4.12.2.dist-info/INSTALLER b/pkg_resources/_vendor/typing_extensions-4.12.2.dist-info/INSTALLER
new file mode 100644
index 0000000000..a1b589e38a
--- /dev/null
+++ b/pkg_resources/_vendor/typing_extensions-4.12.2.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/pkg_resources/_vendor/typing_extensions-4.12.2.dist-info/LICENSE b/pkg_resources/_vendor/typing_extensions-4.12.2.dist-info/LICENSE
new file mode 100644
index 0000000000..f26bcf4d2d
--- /dev/null
+++ b/pkg_resources/_vendor/typing_extensions-4.12.2.dist-info/LICENSE
@@ -0,0 +1,279 @@
+A. HISTORY OF THE SOFTWARE
+==========================
+
+Python was created in the early 1990s by Guido van Rossum at Stichting
+Mathematisch Centrum (CWI, see https://www.cwi.nl) in the Netherlands
+as a successor of a language called ABC.  Guido remains Python's
+principal author, although it includes many contributions from others.
+
+In 1995, Guido continued his work on Python at the Corporation for
+National Research Initiatives (CNRI, see https://www.cnri.reston.va.us)
+in Reston, Virginia where he released several versions of the
+software.
+
+In May 2000, Guido and the Python core development team moved to
+BeOpen.com to form the BeOpen PythonLabs team.  In October of the same
+year, the PythonLabs team moved to Digital Creations, which became
+Zope Corporation.  In 2001, the Python Software Foundation (PSF, see
+https://www.python.org/psf/) was formed, a non-profit organization
+created specifically to own Python-related Intellectual Property.
+Zope Corporation was a sponsoring member of the PSF.
+
+All Python releases are Open Source (see https://opensource.org for
+the Open Source Definition).  Historically, most, but not all, Python
+releases have also been GPL-compatible; the table below summarizes
+the various releases.
+
+    Release         Derived     Year        Owner       GPL-
+                    from                                compatible? (1)
+
+    0.9.0 thru 1.2              1991-1995   CWI         yes
+    1.3 thru 1.5.2  1.2         1995-1999   CNRI        yes
+    1.6             1.5.2       2000        CNRI        no
+    2.0             1.6         2000        BeOpen.com  no
+    1.6.1           1.6         2001        CNRI        yes (2)
+    2.1             2.0+1.6.1   2001        PSF         no
+    2.0.1           2.0+1.6.1   2001        PSF         yes
+    2.1.1           2.1+2.0.1   2001        PSF         yes
+    2.1.2           2.1.1       2002        PSF         yes
+    2.1.3           2.1.2       2002        PSF         yes
+    2.2 and above   2.1.1       2001-now    PSF         yes
+
+Footnotes:
+
+(1) GPL-compatible doesn't mean that we're distributing Python under
+    the GPL.  All Python licenses, unlike the GPL, let you distribute
+    a modified version without making your changes open source.  The
+    GPL-compatible licenses make it possible to combine Python with
+    other software that is released under the GPL; the others don't.
+
+(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
+    because its license has a choice of law clause.  According to
+    CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
+    is "not incompatible" with the GPL.
+
+Thanks to the many outside volunteers who have worked under Guido's
+direction to make these releases possible.
+
+
+B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
+===============================================================
+
+Python software and documentation are licensed under the
+Python Software Foundation License Version 2.
+
+Starting with Python 3.8.6, examples, recipes, and other code in
+the documentation are dual licensed under the PSF License Version 2
+and the Zero-Clause BSD license.
+
+Some software incorporated into Python is under different licenses.
+The licenses are listed with code falling under that license.
+
+
+PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
+--------------------------------------------
+
+1. This LICENSE AGREEMENT is between the Python Software Foundation
+("PSF"), and the Individual or Organization ("Licensee") accessing and
+otherwise using this software ("Python") in source or binary form and
+its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, PSF hereby
+grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
+analyze, test, perform and/or display publicly, prepare derivative works,
+distribute, and otherwise use Python alone or in any derivative version,
+provided, however, that PSF's License Agreement and PSF's notice of copyright,
+i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023 Python Software Foundation;
+All Rights Reserved" are retained in Python alone or in any derivative version
+prepared by Licensee.
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python.
+
+4. PSF is making Python available to Licensee on an "AS IS"
+basis.  PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. Nothing in this License Agreement shall be deemed to create any
+relationship of agency, partnership, or joint venture between PSF and
+Licensee.  This License Agreement does not grant permission to use PSF
+trademarks or trade name in a trademark sense to endorse or promote
+products or services of Licensee, or any third party.
+
+8. By copying, installing or otherwise using Python, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+
+BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
+-------------------------------------------
+
+BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
+
+1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
+office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
+Individual or Organization ("Licensee") accessing and otherwise using
+this software in source or binary form and its associated
+documentation ("the Software").
+
+2. Subject to the terms and conditions of this BeOpen Python License
+Agreement, BeOpen hereby grants Licensee a non-exclusive,
+royalty-free, world-wide license to reproduce, analyze, test, perform
+and/or display publicly, prepare derivative works, distribute, and
+otherwise use the Software alone or in any derivative version,
+provided, however, that the BeOpen Python License is retained in the
+Software, alone or in any derivative version prepared by Licensee.
+
+3. BeOpen is making the Software available to Licensee on an "AS IS"
+basis.  BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
+SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
+AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
+DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+5. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+6. This License Agreement shall be governed by and interpreted in all
+respects by the law of the State of California, excluding conflict of
+law provisions.  Nothing in this License Agreement shall be deemed to
+create any relationship of agency, partnership, or joint venture
+between BeOpen and Licensee.  This License Agreement does not grant
+permission to use BeOpen trademarks or trade names in a trademark
+sense to endorse or promote products or services of Licensee, or any
+third party.  As an exception, the "BeOpen Python" logos available at
+http://www.pythonlabs.com/logos.html may be used according to the
+permissions granted on that web page.
+
+7. By copying, installing or otherwise using the software, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+
+CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
+---------------------------------------
+
+1. This LICENSE AGREEMENT is between the Corporation for National
+Research Initiatives, having an office at 1895 Preston White Drive,
+Reston, VA 20191 ("CNRI"), and the Individual or Organization
+("Licensee") accessing and otherwise using Python 1.6.1 software in
+source or binary form and its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, CNRI
+hereby grants Licensee a nonexclusive, royalty-free, world-wide
+license to reproduce, analyze, test, perform and/or display publicly,
+prepare derivative works, distribute, and otherwise use Python 1.6.1
+alone or in any derivative version, provided, however, that CNRI's
+License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
+1995-2001 Corporation for National Research Initiatives; All Rights
+Reserved" are retained in Python 1.6.1 alone or in any derivative
+version prepared by Licensee.  Alternately, in lieu of CNRI's License
+Agreement, Licensee may substitute the following text (omitting the
+quotes): "Python 1.6.1 is made available subject to the terms and
+conditions in CNRI's License Agreement.  This Agreement together with
+Python 1.6.1 may be located on the internet using the following
+unique, persistent identifier (known as a handle): 1895.22/1013.  This
+Agreement may also be obtained from a proxy server on the internet
+using the following URL: http://hdl.handle.net/1895.22/1013".
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python 1.6.1 or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python 1.6.1.
+
+4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
+basis.  CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. This License Agreement shall be governed by the federal
+intellectual property law of the United States, including without
+limitation the federal copyright law, and, to the extent such
+U.S. federal law does not apply, by the law of the Commonwealth of
+Virginia, excluding Virginia's conflict of law provisions.
+Notwithstanding the foregoing, with regard to derivative works based
+on Python 1.6.1 that incorporate non-separable material that was
+previously distributed under the GNU General Public License (GPL), the
+law of the Commonwealth of Virginia shall govern this License
+Agreement only as to issues arising under or with respect to
+Paragraphs 4, 5, and 7 of this License Agreement.  Nothing in this
+License Agreement shall be deemed to create any relationship of
+agency, partnership, or joint venture between CNRI and Licensee.  This
+License Agreement does not grant permission to use CNRI trademarks or
+trade name in a trademark sense to endorse or promote products or
+services of Licensee, or any third party.
+
+8. By clicking on the "ACCEPT" button where indicated, or by copying,
+installing or otherwise using Python 1.6.1, Licensee agrees to be
+bound by the terms and conditions of this License Agreement.
+
+        ACCEPT
+
+
+CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
+--------------------------------------------------
+
+Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
+The Netherlands.  All rights reserved.
+
+Permission to use, copy, modify, and distribute this software and its
+documentation for any purpose and without fee is hereby granted,
+provided that the above copyright notice appear in all copies and that
+both that copyright notice and this permission notice appear in
+supporting documentation, and that the name of Stichting Mathematisch
+Centrum or CWI not be used in advertising or publicity pertaining to
+distribution of the software without specific, written prior
+permission.
+
+STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
+THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
+FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
+OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+ZERO-CLAUSE BSD LICENSE FOR CODE IN THE PYTHON DOCUMENTATION
+----------------------------------------------------------------------
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
+REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
+OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+PERFORMANCE OF THIS SOFTWARE.
diff --git a/pkg_resources/_vendor/typing_extensions-4.12.2.dist-info/METADATA b/pkg_resources/_vendor/typing_extensions-4.12.2.dist-info/METADATA
new file mode 100644
index 0000000000..f15e2b3877
--- /dev/null
+++ b/pkg_resources/_vendor/typing_extensions-4.12.2.dist-info/METADATA
@@ -0,0 +1,67 @@
+Metadata-Version: 2.1
+Name: typing_extensions
+Version: 4.12.2
+Summary: Backported and Experimental Type Hints for Python 3.8+
+Keywords: annotations,backport,checker,checking,function,hinting,hints,type,typechecking,typehinting,typehints,typing
+Author-email: "Guido van Rossum, Jukka Lehtosalo, Łukasz Langa, Michael Lee" 
+Requires-Python: >=3.8
+Description-Content-Type: text/markdown
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Console
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Python Software Foundation License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: 3.13
+Classifier: Topic :: Software Development
+Project-URL: Bug Tracker, https://github.com/python/typing_extensions/issues
+Project-URL: Changes, https://github.com/python/typing_extensions/blob/main/CHANGELOG.md
+Project-URL: Documentation, https://typing-extensions.readthedocs.io/
+Project-URL: Home, https://github.com/python/typing_extensions
+Project-URL: Q & A, https://github.com/python/typing/discussions
+Project-URL: Repository, https://github.com/python/typing_extensions
+
+# Typing Extensions
+
+[![Chat at https://gitter.im/python/typing](https://badges.gitter.im/python/typing.svg)](https://gitter.im/python/typing)
+
+[Documentation](https://typing-extensions.readthedocs.io/en/latest/#) –
+[PyPI](https://pypi.org/project/typing-extensions/)
+
+## Overview
+
+The `typing_extensions` module serves two related purposes:
+
+- Enable use of new type system features on older Python versions. For example,
+  `typing.TypeGuard` is new in Python 3.10, but `typing_extensions` allows
+  users on previous Python versions to use it too.
+- Enable experimentation with new type system PEPs before they are accepted and
+  added to the `typing` module.
+
+`typing_extensions` is treated specially by static type checkers such as
+mypy and pyright. Objects defined in `typing_extensions` are treated the same
+way as equivalent forms in `typing`.
+
+`typing_extensions` uses
+[Semantic Versioning](https://semver.org/). The
+major version will be incremented only for backwards-incompatible changes.
+Therefore, it's safe to depend
+on `typing_extensions` like this: `typing_extensions >=x.y, <(x+1)`,
+where `x.y` is the first version that includes all features you need.
+
+## Included items
+
+See [the documentation](https://typing-extensions.readthedocs.io/en/latest/#) for a
+complete listing of module contents.
+
+## Contributing
+
+See [CONTRIBUTING.md](https://github.com/python/typing_extensions/blob/main/CONTRIBUTING.md)
+for how to contribute to `typing_extensions`.
+
diff --git a/pkg_resources/_vendor/typing_extensions-4.12.2.dist-info/RECORD b/pkg_resources/_vendor/typing_extensions-4.12.2.dist-info/RECORD
new file mode 100644
index 0000000000..bc7b45334d
--- /dev/null
+++ b/pkg_resources/_vendor/typing_extensions-4.12.2.dist-info/RECORD
@@ -0,0 +1,7 @@
+__pycache__/typing_extensions.cpython-312.pyc,,
+typing_extensions-4.12.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+typing_extensions-4.12.2.dist-info/LICENSE,sha256=Oy-B_iHRgcSZxZolbI4ZaEVdZonSaaqFNzv7avQdo78,13936
+typing_extensions-4.12.2.dist-info/METADATA,sha256=BeUQIa8cnYbrjWx-N8TOznM9UGW5Gm2DicVpDtRA8W0,3018
+typing_extensions-4.12.2.dist-info/RECORD,,
+typing_extensions-4.12.2.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
+typing_extensions.py,sha256=gwekpyG9DVG3lxWKX4ni8u7nk3We5slG98mA9F3DJQw,134451
diff --git a/pkg_resources/_vendor/typing_extensions-4.12.2.dist-info/WHEEL b/pkg_resources/_vendor/typing_extensions-4.12.2.dist-info/WHEEL
new file mode 100644
index 0000000000..3b5e64b5e6
--- /dev/null
+++ b/pkg_resources/_vendor/typing_extensions-4.12.2.dist-info/WHEEL
@@ -0,0 +1,4 @@
+Wheel-Version: 1.0
+Generator: flit 3.9.0
+Root-Is-Purelib: true
+Tag: py3-none-any
diff --git a/pkg_resources/_vendor/typing_extensions.py b/pkg_resources/_vendor/typing_extensions.py
new file mode 100644
index 0000000000..dec429ca87
--- /dev/null
+++ b/pkg_resources/_vendor/typing_extensions.py
@@ -0,0 +1,3641 @@
+import abc
+import collections
+import collections.abc
+import contextlib
+import functools
+import inspect
+import operator
+import sys
+import types as _types
+import typing
+import warnings
+
+__all__ = [
+    # Super-special typing primitives.
+    'Any',
+    'ClassVar',
+    'Concatenate',
+    'Final',
+    'LiteralString',
+    'ParamSpec',
+    'ParamSpecArgs',
+    'ParamSpecKwargs',
+    'Self',
+    'Type',
+    'TypeVar',
+    'TypeVarTuple',
+    'Unpack',
+
+    # ABCs (from collections.abc).
+    'Awaitable',
+    'AsyncIterator',
+    'AsyncIterable',
+    'Coroutine',
+    'AsyncGenerator',
+    'AsyncContextManager',
+    'Buffer',
+    'ChainMap',
+
+    # Concrete collection types.
+    'ContextManager',
+    'Counter',
+    'Deque',
+    'DefaultDict',
+    'NamedTuple',
+    'OrderedDict',
+    'TypedDict',
+
+    # Structural checks, a.k.a. protocols.
+    'SupportsAbs',
+    'SupportsBytes',
+    'SupportsComplex',
+    'SupportsFloat',
+    'SupportsIndex',
+    'SupportsInt',
+    'SupportsRound',
+
+    # One-off things.
+    'Annotated',
+    'assert_never',
+    'assert_type',
+    'clear_overloads',
+    'dataclass_transform',
+    'deprecated',
+    'Doc',
+    'get_overloads',
+    'final',
+    'get_args',
+    'get_origin',
+    'get_original_bases',
+    'get_protocol_members',
+    'get_type_hints',
+    'IntVar',
+    'is_protocol',
+    'is_typeddict',
+    'Literal',
+    'NewType',
+    'overload',
+    'override',
+    'Protocol',
+    'reveal_type',
+    'runtime',
+    'runtime_checkable',
+    'Text',
+    'TypeAlias',
+    'TypeAliasType',
+    'TypeGuard',
+    'TypeIs',
+    'TYPE_CHECKING',
+    'Never',
+    'NoReturn',
+    'ReadOnly',
+    'Required',
+    'NotRequired',
+
+    # Pure aliases, have always been in typing
+    'AbstractSet',
+    'AnyStr',
+    'BinaryIO',
+    'Callable',
+    'Collection',
+    'Container',
+    'Dict',
+    'ForwardRef',
+    'FrozenSet',
+    'Generator',
+    'Generic',
+    'Hashable',
+    'IO',
+    'ItemsView',
+    'Iterable',
+    'Iterator',
+    'KeysView',
+    'List',
+    'Mapping',
+    'MappingView',
+    'Match',
+    'MutableMapping',
+    'MutableSequence',
+    'MutableSet',
+    'NoDefault',
+    'Optional',
+    'Pattern',
+    'Reversible',
+    'Sequence',
+    'Set',
+    'Sized',
+    'TextIO',
+    'Tuple',
+    'Union',
+    'ValuesView',
+    'cast',
+    'no_type_check',
+    'no_type_check_decorator',
+]
+
+# for backward compatibility
+PEP_560 = True
+GenericMeta = type
+_PEP_696_IMPLEMENTED = sys.version_info >= (3, 13, 0, "beta")
+
+# The functions below are modified copies of typing internal helpers.
+# They are needed by _ProtocolMeta and they provide support for PEP 646.
+
+
+class _Sentinel:
+    def __repr__(self):
+        return ""
+
+
+_marker = _Sentinel()
+
+
+if sys.version_info >= (3, 10):
+    def _should_collect_from_parameters(t):
+        return isinstance(
+            t, (typing._GenericAlias, _types.GenericAlias, _types.UnionType)
+        )
+elif sys.version_info >= (3, 9):
+    def _should_collect_from_parameters(t):
+        return isinstance(t, (typing._GenericAlias, _types.GenericAlias))
+else:
+    def _should_collect_from_parameters(t):
+        return isinstance(t, typing._GenericAlias) and not t._special
+
+
+NoReturn = typing.NoReturn
+
+# Some unconstrained type variables.  These are used by the container types.
+# (These are not for export.)
+T = typing.TypeVar('T')  # Any type.
+KT = typing.TypeVar('KT')  # Key type.
+VT = typing.TypeVar('VT')  # Value type.
+T_co = typing.TypeVar('T_co', covariant=True)  # Any type covariant containers.
+T_contra = typing.TypeVar('T_contra', contravariant=True)  # Ditto contravariant.
+
+
+if sys.version_info >= (3, 11):
+    from typing import Any
+else:
+
+    class _AnyMeta(type):
+        def __instancecheck__(self, obj):
+            if self is Any:
+                raise TypeError("typing_extensions.Any cannot be used with isinstance()")
+            return super().__instancecheck__(obj)
+
+        def __repr__(self):
+            if self is Any:
+                return "typing_extensions.Any"
+            return super().__repr__()
+
+    class Any(metaclass=_AnyMeta):
+        """Special type indicating an unconstrained type.
+        - Any is compatible with every type.
+        - Any assumed to have all methods.
+        - All values assumed to be instances of Any.
+        Note that all the above statements are true from the point of view of
+        static type checkers. At runtime, Any should not be used with instance
+        checks.
+        """
+        def __new__(cls, *args, **kwargs):
+            if cls is Any:
+                raise TypeError("Any cannot be instantiated")
+            return super().__new__(cls, *args, **kwargs)
+
+
+ClassVar = typing.ClassVar
+
+
+class _ExtensionsSpecialForm(typing._SpecialForm, _root=True):
+    def __repr__(self):
+        return 'typing_extensions.' + self._name
+
+
+Final = typing.Final
+
+if sys.version_info >= (3, 11):
+    final = typing.final
+else:
+    # @final exists in 3.8+, but we backport it for all versions
+    # before 3.11 to keep support for the __final__ attribute.
+    # See https://bugs.python.org/issue46342
+    def final(f):
+        """This decorator can be used to indicate to type checkers that
+        the decorated method cannot be overridden, and decorated class
+        cannot be subclassed. For example:
+
+            class Base:
+                @final
+                def done(self) -> None:
+                    ...
+            class Sub(Base):
+                def done(self) -> None:  # Error reported by type checker
+                    ...
+            @final
+            class Leaf:
+                ...
+            class Other(Leaf):  # Error reported by type checker
+                ...
+
+        There is no runtime checking of these properties. The decorator
+        sets the ``__final__`` attribute to ``True`` on the decorated object
+        to allow runtime introspection.
+        """
+        try:
+            f.__final__ = True
+        except (AttributeError, TypeError):
+            # Skip the attribute silently if it is not writable.
+            # AttributeError happens if the object has __slots__ or a
+            # read-only property, TypeError if it's a builtin class.
+            pass
+        return f
+
+
+def IntVar(name):
+    return typing.TypeVar(name)
+
+
+# A Literal bug was fixed in 3.11.0, 3.10.1 and 3.9.8
+if sys.version_info >= (3, 10, 1):
+    Literal = typing.Literal
+else:
+    def _flatten_literal_params(parameters):
+        """An internal helper for Literal creation: flatten Literals among parameters"""
+        params = []
+        for p in parameters:
+            if isinstance(p, _LiteralGenericAlias):
+                params.extend(p.__args__)
+            else:
+                params.append(p)
+        return tuple(params)
+
+    def _value_and_type_iter(params):
+        for p in params:
+            yield p, type(p)
+
+    class _LiteralGenericAlias(typing._GenericAlias, _root=True):
+        def __eq__(self, other):
+            if not isinstance(other, _LiteralGenericAlias):
+                return NotImplemented
+            these_args_deduped = set(_value_and_type_iter(self.__args__))
+            other_args_deduped = set(_value_and_type_iter(other.__args__))
+            return these_args_deduped == other_args_deduped
+
+        def __hash__(self):
+            return hash(frozenset(_value_and_type_iter(self.__args__)))
+
+    class _LiteralForm(_ExtensionsSpecialForm, _root=True):
+        def __init__(self, doc: str):
+            self._name = 'Literal'
+            self._doc = self.__doc__ = doc
+
+        def __getitem__(self, parameters):
+            if not isinstance(parameters, tuple):
+                parameters = (parameters,)
+
+            parameters = _flatten_literal_params(parameters)
+
+            val_type_pairs = list(_value_and_type_iter(parameters))
+            try:
+                deduped_pairs = set(val_type_pairs)
+            except TypeError:
+                # unhashable parameters
+                pass
+            else:
+                # similar logic to typing._deduplicate on Python 3.9+
+                if len(deduped_pairs) < len(val_type_pairs):
+                    new_parameters = []
+                    for pair in val_type_pairs:
+                        if pair in deduped_pairs:
+                            new_parameters.append(pair[0])
+                            deduped_pairs.remove(pair)
+                    assert not deduped_pairs, deduped_pairs
+                    parameters = tuple(new_parameters)
+
+            return _LiteralGenericAlias(self, parameters)
+
+    Literal = _LiteralForm(doc="""\
+                           A type that can be used to indicate to type checkers
+                           that the corresponding value has a value literally equivalent
+                           to the provided parameter. For example:
+
+                               var: Literal[4] = 4
+
+                           The type checker understands that 'var' is literally equal to
+                           the value 4 and no other value.
+
+                           Literal[...] cannot be subclassed. There is no runtime
+                           checking verifying that the parameter is actually a value
+                           instead of a type.""")
+
+
+_overload_dummy = typing._overload_dummy
+
+
+if hasattr(typing, "get_overloads"):  # 3.11+
+    overload = typing.overload
+    get_overloads = typing.get_overloads
+    clear_overloads = typing.clear_overloads
+else:
+    # {module: {qualname: {firstlineno: func}}}
+    _overload_registry = collections.defaultdict(
+        functools.partial(collections.defaultdict, dict)
+    )
+
+    def overload(func):
+        """Decorator for overloaded functions/methods.
+
+        In a stub file, place two or more stub definitions for the same
+        function in a row, each decorated with @overload.  For example:
+
+        @overload
+        def utf8(value: None) -> None: ...
+        @overload
+        def utf8(value: bytes) -> bytes: ...
+        @overload
+        def utf8(value: str) -> bytes: ...
+
+        In a non-stub file (i.e. a regular .py file), do the same but
+        follow it with an implementation.  The implementation should *not*
+        be decorated with @overload.  For example:
+
+        @overload
+        def utf8(value: None) -> None: ...
+        @overload
+        def utf8(value: bytes) -> bytes: ...
+        @overload
+        def utf8(value: str) -> bytes: ...
+        def utf8(value):
+            # implementation goes here
+
+        The overloads for a function can be retrieved at runtime using the
+        get_overloads() function.
+        """
+        # classmethod and staticmethod
+        f = getattr(func, "__func__", func)
+        try:
+            _overload_registry[f.__module__][f.__qualname__][
+                f.__code__.co_firstlineno
+            ] = func
+        except AttributeError:
+            # Not a normal function; ignore.
+            pass
+        return _overload_dummy
+
+    def get_overloads(func):
+        """Return all defined overloads for *func* as a sequence."""
+        # classmethod and staticmethod
+        f = getattr(func, "__func__", func)
+        if f.__module__ not in _overload_registry:
+            return []
+        mod_dict = _overload_registry[f.__module__]
+        if f.__qualname__ not in mod_dict:
+            return []
+        return list(mod_dict[f.__qualname__].values())
+
+    def clear_overloads():
+        """Clear all overloads in the registry."""
+        _overload_registry.clear()
+
+
+# This is not a real generic class.  Don't use outside annotations.
+Type = typing.Type
+
+# Various ABCs mimicking those in collections.abc.
+# A few are simply re-exported for completeness.
+Awaitable = typing.Awaitable
+Coroutine = typing.Coroutine
+AsyncIterable = typing.AsyncIterable
+AsyncIterator = typing.AsyncIterator
+Deque = typing.Deque
+DefaultDict = typing.DefaultDict
+OrderedDict = typing.OrderedDict
+Counter = typing.Counter
+ChainMap = typing.ChainMap
+Text = typing.Text
+TYPE_CHECKING = typing.TYPE_CHECKING
+
+
+if sys.version_info >= (3, 13, 0, "beta"):
+    from typing import AsyncContextManager, AsyncGenerator, ContextManager, Generator
+else:
+    def _is_dunder(attr):
+        return attr.startswith('__') and attr.endswith('__')
+
+    # Python <3.9 doesn't have typing._SpecialGenericAlias
+    _special_generic_alias_base = getattr(
+        typing, "_SpecialGenericAlias", typing._GenericAlias
+    )
+
+    class _SpecialGenericAlias(_special_generic_alias_base, _root=True):
+        def __init__(self, origin, nparams, *, inst=True, name=None, defaults=()):
+            if _special_generic_alias_base is typing._GenericAlias:
+                # Python <3.9
+                self.__origin__ = origin
+                self._nparams = nparams
+                super().__init__(origin, nparams, special=True, inst=inst, name=name)
+            else:
+                # Python >= 3.9
+                super().__init__(origin, nparams, inst=inst, name=name)
+            self._defaults = defaults
+
+        def __setattr__(self, attr, val):
+            allowed_attrs = {'_name', '_inst', '_nparams', '_defaults'}
+            if _special_generic_alias_base is typing._GenericAlias:
+                # Python <3.9
+                allowed_attrs.add("__origin__")
+            if _is_dunder(attr) or attr in allowed_attrs:
+                object.__setattr__(self, attr, val)
+            else:
+                setattr(self.__origin__, attr, val)
+
+        @typing._tp_cache
+        def __getitem__(self, params):
+            if not isinstance(params, tuple):
+                params = (params,)
+            msg = "Parameters to generic types must be types."
+            params = tuple(typing._type_check(p, msg) for p in params)
+            if (
+                self._defaults
+                and len(params) < self._nparams
+                and len(params) + len(self._defaults) >= self._nparams
+            ):
+                params = (*params, *self._defaults[len(params) - self._nparams:])
+            actual_len = len(params)
+
+            if actual_len != self._nparams:
+                if self._defaults:
+                    expected = f"at least {self._nparams - len(self._defaults)}"
+                else:
+                    expected = str(self._nparams)
+                if not self._nparams:
+                    raise TypeError(f"{self} is not a generic class")
+                raise TypeError(
+                    f"Too {'many' if actual_len > self._nparams else 'few'}"
+                    f" arguments for {self};"
+                    f" actual {actual_len}, expected {expected}"
+                )
+            return self.copy_with(params)
+
+    _NoneType = type(None)
+    Generator = _SpecialGenericAlias(
+        collections.abc.Generator, 3, defaults=(_NoneType, _NoneType)
+    )
+    AsyncGenerator = _SpecialGenericAlias(
+        collections.abc.AsyncGenerator, 2, defaults=(_NoneType,)
+    )
+    ContextManager = _SpecialGenericAlias(
+        contextlib.AbstractContextManager,
+        2,
+        name="ContextManager",
+        defaults=(typing.Optional[bool],)
+    )
+    AsyncContextManager = _SpecialGenericAlias(
+        contextlib.AbstractAsyncContextManager,
+        2,
+        name="AsyncContextManager",
+        defaults=(typing.Optional[bool],)
+    )
+
+
+_PROTO_ALLOWLIST = {
+    'collections.abc': [
+        'Callable', 'Awaitable', 'Iterable', 'Iterator', 'AsyncIterable',
+        'Hashable', 'Sized', 'Container', 'Collection', 'Reversible', 'Buffer',
+    ],
+    'contextlib': ['AbstractContextManager', 'AbstractAsyncContextManager'],
+    'typing_extensions': ['Buffer'],
+}
+
+
+_EXCLUDED_ATTRS = frozenset(typing.EXCLUDED_ATTRIBUTES) | {
+    "__match_args__", "__protocol_attrs__", "__non_callable_proto_members__",
+    "__final__",
+}
+
+
+def _get_protocol_attrs(cls):
+    attrs = set()
+    for base in cls.__mro__[:-1]:  # without object
+        if base.__name__ in {'Protocol', 'Generic'}:
+            continue
+        annotations = getattr(base, '__annotations__', {})
+        for attr in (*base.__dict__, *annotations):
+            if (not attr.startswith('_abc_') and attr not in _EXCLUDED_ATTRS):
+                attrs.add(attr)
+    return attrs
+
+
+def _caller(depth=2):
+    try:
+        return sys._getframe(depth).f_globals.get('__name__', '__main__')
+    except (AttributeError, ValueError):  # For platforms without _getframe()
+        return None
+
+
+# `__match_args__` attribute was removed from protocol members in 3.13,
+# we want to backport this change to older Python versions.
+if sys.version_info >= (3, 13):
+    Protocol = typing.Protocol
+else:
+    def _allow_reckless_class_checks(depth=3):
+        """Allow instance and class checks for special stdlib modules.
+        The abc and functools modules indiscriminately call isinstance() and
+        issubclass() on the whole MRO of a user class, which may contain protocols.
+        """
+        return _caller(depth) in {'abc', 'functools', None}
+
+    def _no_init(self, *args, **kwargs):
+        if type(self)._is_protocol:
+            raise TypeError('Protocols cannot be instantiated')
+
+    def _type_check_issubclass_arg_1(arg):
+        """Raise TypeError if `arg` is not an instance of `type`
+        in `issubclass(arg, )`.
+
+        In most cases, this is verified by type.__subclasscheck__.
+        Checking it again unnecessarily would slow down issubclass() checks,
+        so, we don't perform this check unless we absolutely have to.
+
+        For various error paths, however,
+        we want to ensure that *this* error message is shown to the user
+        where relevant, rather than a typing.py-specific error message.
+        """
+        if not isinstance(arg, type):
+            # Same error message as for issubclass(1, int).
+            raise TypeError('issubclass() arg 1 must be a class')
+
+    # Inheriting from typing._ProtocolMeta isn't actually desirable,
+    # but is necessary to allow typing.Protocol and typing_extensions.Protocol
+    # to mix without getting TypeErrors about "metaclass conflict"
+    class _ProtocolMeta(type(typing.Protocol)):
+        # This metaclass is somewhat unfortunate,
+        # but is necessary for several reasons...
+        #
+        # NOTE: DO NOT call super() in any methods in this class
+        # That would call the methods on typing._ProtocolMeta on Python 3.8-3.11
+        # and those are slow
+        def __new__(mcls, name, bases, namespace, **kwargs):
+            if name == "Protocol" and len(bases) < 2:
+                pass
+            elif {Protocol, typing.Protocol} & set(bases):
+                for base in bases:
+                    if not (
+                        base in {object, typing.Generic, Protocol, typing.Protocol}
+                        or base.__name__ in _PROTO_ALLOWLIST.get(base.__module__, [])
+                        or is_protocol(base)
+                    ):
+                        raise TypeError(
+                            f"Protocols can only inherit from other protocols, "
+                            f"got {base!r}"
+                        )
+            return abc.ABCMeta.__new__(mcls, name, bases, namespace, **kwargs)
+
+        def __init__(cls, *args, **kwargs):
+            abc.ABCMeta.__init__(cls, *args, **kwargs)
+            if getattr(cls, "_is_protocol", False):
+                cls.__protocol_attrs__ = _get_protocol_attrs(cls)
+
+        def __subclasscheck__(cls, other):
+            if cls is Protocol:
+                return type.__subclasscheck__(cls, other)
+            if (
+                getattr(cls, '_is_protocol', False)
+                and not _allow_reckless_class_checks()
+            ):
+                if not getattr(cls, '_is_runtime_protocol', False):
+                    _type_check_issubclass_arg_1(other)
+                    raise TypeError(
+                        "Instance and class checks can only be used with "
+                        "@runtime_checkable protocols"
+                    )
+                if (
+                    # this attribute is set by @runtime_checkable:
+                    cls.__non_callable_proto_members__
+                    and cls.__dict__.get("__subclasshook__") is _proto_hook
+                ):
+                    _type_check_issubclass_arg_1(other)
+                    non_method_attrs = sorted(cls.__non_callable_proto_members__)
+                    raise TypeError(
+                        "Protocols with non-method members don't support issubclass()."
+                        f" Non-method members: {str(non_method_attrs)[1:-1]}."
+                    )
+            return abc.ABCMeta.__subclasscheck__(cls, other)
+
+        def __instancecheck__(cls, instance):
+            # We need this method for situations where attributes are
+            # assigned in __init__.
+            if cls is Protocol:
+                return type.__instancecheck__(cls, instance)
+            if not getattr(cls, "_is_protocol", False):
+                # i.e., it's a concrete subclass of a protocol
+                return abc.ABCMeta.__instancecheck__(cls, instance)
+
+            if (
+                not getattr(cls, '_is_runtime_protocol', False) and
+                not _allow_reckless_class_checks()
+            ):
+                raise TypeError("Instance and class checks can only be used with"
+                                " @runtime_checkable protocols")
+
+            if abc.ABCMeta.__instancecheck__(cls, instance):
+                return True
+
+            for attr in cls.__protocol_attrs__:
+                try:
+                    val = inspect.getattr_static(instance, attr)
+                except AttributeError:
+                    break
+                # this attribute is set by @runtime_checkable:
+                if val is None and attr not in cls.__non_callable_proto_members__:
+                    break
+            else:
+                return True
+
+            return False
+
+        def __eq__(cls, other):
+            # Hack so that typing.Generic.__class_getitem__
+            # treats typing_extensions.Protocol
+            # as equivalent to typing.Protocol
+            if abc.ABCMeta.__eq__(cls, other) is True:
+                return True
+            return cls is Protocol and other is typing.Protocol
+
+        # This has to be defined, or the abc-module cache
+        # complains about classes with this metaclass being unhashable,
+        # if we define only __eq__!
+        def __hash__(cls) -> int:
+            return type.__hash__(cls)
+
+    @classmethod
+    def _proto_hook(cls, other):
+        if not cls.__dict__.get('_is_protocol', False):
+            return NotImplemented
+
+        for attr in cls.__protocol_attrs__:
+            for base in other.__mro__:
+                # Check if the members appears in the class dictionary...
+                if attr in base.__dict__:
+                    if base.__dict__[attr] is None:
+                        return NotImplemented
+                    break
+
+                # ...or in annotations, if it is a sub-protocol.
+                annotations = getattr(base, '__annotations__', {})
+                if (
+                    isinstance(annotations, collections.abc.Mapping)
+                    and attr in annotations
+                    and is_protocol(other)
+                ):
+                    break
+            else:
+                return NotImplemented
+        return True
+
+    class Protocol(typing.Generic, metaclass=_ProtocolMeta):
+        __doc__ = typing.Protocol.__doc__
+        __slots__ = ()
+        _is_protocol = True
+        _is_runtime_protocol = False
+
+        def __init_subclass__(cls, *args, **kwargs):
+            super().__init_subclass__(*args, **kwargs)
+
+            # Determine if this is a protocol or a concrete subclass.
+            if not cls.__dict__.get('_is_protocol', False):
+                cls._is_protocol = any(b is Protocol for b in cls.__bases__)
+
+            # Set (or override) the protocol subclass hook.
+            if '__subclasshook__' not in cls.__dict__:
+                cls.__subclasshook__ = _proto_hook
+
+            # Prohibit instantiation for protocol classes
+            if cls._is_protocol and cls.__init__ is Protocol.__init__:
+                cls.__init__ = _no_init
+
+
+if sys.version_info >= (3, 13):
+    runtime_checkable = typing.runtime_checkable
+else:
+    def runtime_checkable(cls):
+        """Mark a protocol class as a runtime protocol.
+
+        Such protocol can be used with isinstance() and issubclass().
+        Raise TypeError if applied to a non-protocol class.
+        This allows a simple-minded structural check very similar to
+        one trick ponies in collections.abc such as Iterable.
+
+        For example::
+
+            @runtime_checkable
+            class Closable(Protocol):
+                def close(self): ...
+
+            assert isinstance(open('/some/file'), Closable)
+
+        Warning: this will check only the presence of the required methods,
+        not their type signatures!
+        """
+        if not issubclass(cls, typing.Generic) or not getattr(cls, '_is_protocol', False):
+            raise TypeError(f'@runtime_checkable can be only applied to protocol classes,'
+                            f' got {cls!r}')
+        cls._is_runtime_protocol = True
+
+        # typing.Protocol classes on <=3.11 break if we execute this block,
+        # because typing.Protocol classes on <=3.11 don't have a
+        # `__protocol_attrs__` attribute, and this block relies on the
+        # `__protocol_attrs__` attribute. Meanwhile, typing.Protocol classes on 3.12.2+
+        # break if we *don't* execute this block, because *they* assume that all
+        # protocol classes have a `__non_callable_proto_members__` attribute
+        # (which this block sets)
+        if isinstance(cls, _ProtocolMeta) or sys.version_info >= (3, 12, 2):
+            # PEP 544 prohibits using issubclass()
+            # with protocols that have non-method members.
+            # See gh-113320 for why we compute this attribute here,
+            # rather than in `_ProtocolMeta.__init__`
+            cls.__non_callable_proto_members__ = set()
+            for attr in cls.__protocol_attrs__:
+                try:
+                    is_callable = callable(getattr(cls, attr, None))
+                except Exception as e:
+                    raise TypeError(
+                        f"Failed to determine whether protocol member {attr!r} "
+                        "is a method member"
+                    ) from e
+                else:
+                    if not is_callable:
+                        cls.__non_callable_proto_members__.add(attr)
+
+        return cls
+
+
+# The "runtime" alias exists for backwards compatibility.
+runtime = runtime_checkable
+
+
+# Our version of runtime-checkable protocols is faster on Python 3.8-3.11
+if sys.version_info >= (3, 12):
+    SupportsInt = typing.SupportsInt
+    SupportsFloat = typing.SupportsFloat
+    SupportsComplex = typing.SupportsComplex
+    SupportsBytes = typing.SupportsBytes
+    SupportsIndex = typing.SupportsIndex
+    SupportsAbs = typing.SupportsAbs
+    SupportsRound = typing.SupportsRound
+else:
+    @runtime_checkable
+    class SupportsInt(Protocol):
+        """An ABC with one abstract method __int__."""
+        __slots__ = ()
+
+        @abc.abstractmethod
+        def __int__(self) -> int:
+            pass
+
+    @runtime_checkable
+    class SupportsFloat(Protocol):
+        """An ABC with one abstract method __float__."""
+        __slots__ = ()
+
+        @abc.abstractmethod
+        def __float__(self) -> float:
+            pass
+
+    @runtime_checkable
+    class SupportsComplex(Protocol):
+        """An ABC with one abstract method __complex__."""
+        __slots__ = ()
+
+        @abc.abstractmethod
+        def __complex__(self) -> complex:
+            pass
+
+    @runtime_checkable
+    class SupportsBytes(Protocol):
+        """An ABC with one abstract method __bytes__."""
+        __slots__ = ()
+
+        @abc.abstractmethod
+        def __bytes__(self) -> bytes:
+            pass
+
+    @runtime_checkable
+    class SupportsIndex(Protocol):
+        __slots__ = ()
+
+        @abc.abstractmethod
+        def __index__(self) -> int:
+            pass
+
+    @runtime_checkable
+    class SupportsAbs(Protocol[T_co]):
+        """
+        An ABC with one abstract method __abs__ that is covariant in its return type.
+        """
+        __slots__ = ()
+
+        @abc.abstractmethod
+        def __abs__(self) -> T_co:
+            pass
+
+    @runtime_checkable
+    class SupportsRound(Protocol[T_co]):
+        """
+        An ABC with one abstract method __round__ that is covariant in its return type.
+        """
+        __slots__ = ()
+
+        @abc.abstractmethod
+        def __round__(self, ndigits: int = 0) -> T_co:
+            pass
+
+
+def _ensure_subclassable(mro_entries):
+    def inner(func):
+        if sys.implementation.name == "pypy" and sys.version_info < (3, 9):
+            cls_dict = {
+                "__call__": staticmethod(func),
+                "__mro_entries__": staticmethod(mro_entries)
+            }
+            t = type(func.__name__, (), cls_dict)
+            return functools.update_wrapper(t(), func)
+        else:
+            func.__mro_entries__ = mro_entries
+            return func
+    return inner
+
+
+# Update this to something like >=3.13.0b1 if and when
+# PEP 728 is implemented in CPython
+_PEP_728_IMPLEMENTED = False
+
+if _PEP_728_IMPLEMENTED:
+    # The standard library TypedDict in Python 3.8 does not store runtime information
+    # about which (if any) keys are optional.  See https://bugs.python.org/issue38834
+    # The standard library TypedDict in Python 3.9.0/1 does not honour the "total"
+    # keyword with old-style TypedDict().  See https://bugs.python.org/issue42059
+    # The standard library TypedDict below Python 3.11 does not store runtime
+    # information about optional and required keys when using Required or NotRequired.
+    # Generic TypedDicts are also impossible using typing.TypedDict on Python <3.11.
+    # Aaaand on 3.12 we add __orig_bases__ to TypedDict
+    # to enable better runtime introspection.
+    # On 3.13 we deprecate some odd ways of creating TypedDicts.
+    # Also on 3.13, PEP 705 adds the ReadOnly[] qualifier.
+    # PEP 728 (still pending) makes more changes.
+    TypedDict = typing.TypedDict
+    _TypedDictMeta = typing._TypedDictMeta
+    is_typeddict = typing.is_typeddict
+else:
+    # 3.10.0 and later
+    _TAKES_MODULE = "module" in inspect.signature(typing._type_check).parameters
+
+    def _get_typeddict_qualifiers(annotation_type):
+        while True:
+            annotation_origin = get_origin(annotation_type)
+            if annotation_origin is Annotated:
+                annotation_args = get_args(annotation_type)
+                if annotation_args:
+                    annotation_type = annotation_args[0]
+                else:
+                    break
+            elif annotation_origin is Required:
+                yield Required
+                annotation_type, = get_args(annotation_type)
+            elif annotation_origin is NotRequired:
+                yield NotRequired
+                annotation_type, = get_args(annotation_type)
+            elif annotation_origin is ReadOnly:
+                yield ReadOnly
+                annotation_type, = get_args(annotation_type)
+            else:
+                break
+
+    class _TypedDictMeta(type):
+        def __new__(cls, name, bases, ns, *, total=True, closed=False):
+            """Create new typed dict class object.
+
+            This method is called when TypedDict is subclassed,
+            or when TypedDict is instantiated. This way
+            TypedDict supports all three syntax forms described in its docstring.
+            Subclasses and instances of TypedDict return actual dictionaries.
+            """
+            for base in bases:
+                if type(base) is not _TypedDictMeta and base is not typing.Generic:
+                    raise TypeError('cannot inherit from both a TypedDict type '
+                                    'and a non-TypedDict base class')
+
+            if any(issubclass(b, typing.Generic) for b in bases):
+                generic_base = (typing.Generic,)
+            else:
+                generic_base = ()
+
+            # typing.py generally doesn't let you inherit from plain Generic, unless
+            # the name of the class happens to be "Protocol"
+            tp_dict = type.__new__(_TypedDictMeta, "Protocol", (*generic_base, dict), ns)
+            tp_dict.__name__ = name
+            if tp_dict.__qualname__ == "Protocol":
+                tp_dict.__qualname__ = name
+
+            if not hasattr(tp_dict, '__orig_bases__'):
+                tp_dict.__orig_bases__ = bases
+
+            annotations = {}
+            if "__annotations__" in ns:
+                own_annotations = ns["__annotations__"]
+            elif "__annotate__" in ns:
+                # TODO: Use inspect.VALUE here, and make the annotations lazily evaluated
+                own_annotations = ns["__annotate__"](1)
+            else:
+                own_annotations = {}
+            msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type"
+            if _TAKES_MODULE:
+                own_annotations = {
+                    n: typing._type_check(tp, msg, module=tp_dict.__module__)
+                    for n, tp in own_annotations.items()
+                }
+            else:
+                own_annotations = {
+                    n: typing._type_check(tp, msg)
+                    for n, tp in own_annotations.items()
+                }
+            required_keys = set()
+            optional_keys = set()
+            readonly_keys = set()
+            mutable_keys = set()
+            extra_items_type = None
+
+            for base in bases:
+                base_dict = base.__dict__
+
+                annotations.update(base_dict.get('__annotations__', {}))
+                required_keys.update(base_dict.get('__required_keys__', ()))
+                optional_keys.update(base_dict.get('__optional_keys__', ()))
+                readonly_keys.update(base_dict.get('__readonly_keys__', ()))
+                mutable_keys.update(base_dict.get('__mutable_keys__', ()))
+                base_extra_items_type = base_dict.get('__extra_items__', None)
+                if base_extra_items_type is not None:
+                    extra_items_type = base_extra_items_type
+
+            if closed and extra_items_type is None:
+                extra_items_type = Never
+            if closed and "__extra_items__" in own_annotations:
+                annotation_type = own_annotations.pop("__extra_items__")
+                qualifiers = set(_get_typeddict_qualifiers(annotation_type))
+                if Required in qualifiers:
+                    raise TypeError(
+                        "Special key __extra_items__ does not support "
+                        "Required"
+                    )
+                if NotRequired in qualifiers:
+                    raise TypeError(
+                        "Special key __extra_items__ does not support "
+                        "NotRequired"
+                    )
+                extra_items_type = annotation_type
+
+            annotations.update(own_annotations)
+            for annotation_key, annotation_type in own_annotations.items():
+                qualifiers = set(_get_typeddict_qualifiers(annotation_type))
+
+                if Required in qualifiers:
+                    required_keys.add(annotation_key)
+                elif NotRequired in qualifiers:
+                    optional_keys.add(annotation_key)
+                elif total:
+                    required_keys.add(annotation_key)
+                else:
+                    optional_keys.add(annotation_key)
+                if ReadOnly in qualifiers:
+                    mutable_keys.discard(annotation_key)
+                    readonly_keys.add(annotation_key)
+                else:
+                    mutable_keys.add(annotation_key)
+                    readonly_keys.discard(annotation_key)
+
+            tp_dict.__annotations__ = annotations
+            tp_dict.__required_keys__ = frozenset(required_keys)
+            tp_dict.__optional_keys__ = frozenset(optional_keys)
+            tp_dict.__readonly_keys__ = frozenset(readonly_keys)
+            tp_dict.__mutable_keys__ = frozenset(mutable_keys)
+            if not hasattr(tp_dict, '__total__'):
+                tp_dict.__total__ = total
+            tp_dict.__closed__ = closed
+            tp_dict.__extra_items__ = extra_items_type
+            return tp_dict
+
+        __call__ = dict  # static method
+
+        def __subclasscheck__(cls, other):
+            # Typed dicts are only for static structural subtyping.
+            raise TypeError('TypedDict does not support instance and class checks')
+
+        __instancecheck__ = __subclasscheck__
+
+    _TypedDict = type.__new__(_TypedDictMeta, 'TypedDict', (), {})
+
+    @_ensure_subclassable(lambda bases: (_TypedDict,))
+    def TypedDict(typename, fields=_marker, /, *, total=True, closed=False, **kwargs):
+        """A simple typed namespace. At runtime it is equivalent to a plain dict.
+
+        TypedDict creates a dictionary type such that a type checker will expect all
+        instances to have a certain set of keys, where each key is
+        associated with a value of a consistent type. This expectation
+        is not checked at runtime.
+
+        Usage::
+
+            class Point2D(TypedDict):
+                x: int
+                y: int
+                label: str
+
+            a: Point2D = {'x': 1, 'y': 2, 'label': 'good'}  # OK
+            b: Point2D = {'z': 3, 'label': 'bad'}           # Fails type check
+
+            assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first')
+
+        The type info can be accessed via the Point2D.__annotations__ dict, and
+        the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets.
+        TypedDict supports an additional equivalent form::
+
+            Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str})
+
+        By default, all keys must be present in a TypedDict. It is possible
+        to override this by specifying totality::
+
+            class Point2D(TypedDict, total=False):
+                x: int
+                y: int
+
+        This means that a Point2D TypedDict can have any of the keys omitted. A type
+        checker is only expected to support a literal False or True as the value of
+        the total argument. True is the default, and makes all items defined in the
+        class body be required.
+
+        The Required and NotRequired special forms can also be used to mark
+        individual keys as being required or not required::
+
+            class Point2D(TypedDict):
+                x: int  # the "x" key must always be present (Required is the default)
+                y: NotRequired[int]  # the "y" key can be omitted
+
+        See PEP 655 for more details on Required and NotRequired.
+        """
+        if fields is _marker or fields is None:
+            if fields is _marker:
+                deprecated_thing = "Failing to pass a value for the 'fields' parameter"
+            else:
+                deprecated_thing = "Passing `None` as the 'fields' parameter"
+
+            example = f"`{typename} = TypedDict({typename!r}, {{}})`"
+            deprecation_msg = (
+                f"{deprecated_thing} is deprecated and will be disallowed in "
+                "Python 3.15. To create a TypedDict class with 0 fields "
+                "using the functional syntax, pass an empty dictionary, e.g. "
+            ) + example + "."
+            warnings.warn(deprecation_msg, DeprecationWarning, stacklevel=2)
+            if closed is not False and closed is not True:
+                kwargs["closed"] = closed
+                closed = False
+            fields = kwargs
+        elif kwargs:
+            raise TypeError("TypedDict takes either a dict or keyword arguments,"
+                            " but not both")
+        if kwargs:
+            if sys.version_info >= (3, 13):
+                raise TypeError("TypedDict takes no keyword arguments")
+            warnings.warn(
+                "The kwargs-based syntax for TypedDict definitions is deprecated "
+                "in Python 3.11, will be removed in Python 3.13, and may not be "
+                "understood by third-party type checkers.",
+                DeprecationWarning,
+                stacklevel=2,
+            )
+
+        ns = {'__annotations__': dict(fields)}
+        module = _caller()
+        if module is not None:
+            # Setting correct module is necessary to make typed dict classes pickleable.
+            ns['__module__'] = module
+
+        td = _TypedDictMeta(typename, (), ns, total=total, closed=closed)
+        td.__orig_bases__ = (TypedDict,)
+        return td
+
+    if hasattr(typing, "_TypedDictMeta"):
+        _TYPEDDICT_TYPES = (typing._TypedDictMeta, _TypedDictMeta)
+    else:
+        _TYPEDDICT_TYPES = (_TypedDictMeta,)
+
+    def is_typeddict(tp):
+        """Check if an annotation is a TypedDict class
+
+        For example::
+            class Film(TypedDict):
+                title: str
+                year: int
+
+            is_typeddict(Film)  # => True
+            is_typeddict(Union[list, str])  # => False
+        """
+        # On 3.8, this would otherwise return True
+        if hasattr(typing, "TypedDict") and tp is typing.TypedDict:
+            return False
+        return isinstance(tp, _TYPEDDICT_TYPES)
+
+
+if hasattr(typing, "assert_type"):
+    assert_type = typing.assert_type
+
+else:
+    def assert_type(val, typ, /):
+        """Assert (to the type checker) that the value is of the given type.
+
+        When the type checker encounters a call to assert_type(), it
+        emits an error if the value is not of the specified type::
+
+            def greet(name: str) -> None:
+                assert_type(name, str)  # ok
+                assert_type(name, int)  # type checker error
+
+        At runtime this returns the first argument unchanged and otherwise
+        does nothing.
+        """
+        return val
+
+
+if hasattr(typing, "ReadOnly"):  # 3.13+
+    get_type_hints = typing.get_type_hints
+else:  # <=3.13
+    # replaces _strip_annotations()
+    def _strip_extras(t):
+        """Strips Annotated, Required and NotRequired from a given type."""
+        if isinstance(t, _AnnotatedAlias):
+            return _strip_extras(t.__origin__)
+        if hasattr(t, "__origin__") and t.__origin__ in (Required, NotRequired, ReadOnly):
+            return _strip_extras(t.__args__[0])
+        if isinstance(t, typing._GenericAlias):
+            stripped_args = tuple(_strip_extras(a) for a in t.__args__)
+            if stripped_args == t.__args__:
+                return t
+            return t.copy_with(stripped_args)
+        if hasattr(_types, "GenericAlias") and isinstance(t, _types.GenericAlias):
+            stripped_args = tuple(_strip_extras(a) for a in t.__args__)
+            if stripped_args == t.__args__:
+                return t
+            return _types.GenericAlias(t.__origin__, stripped_args)
+        if hasattr(_types, "UnionType") and isinstance(t, _types.UnionType):
+            stripped_args = tuple(_strip_extras(a) for a in t.__args__)
+            if stripped_args == t.__args__:
+                return t
+            return functools.reduce(operator.or_, stripped_args)
+
+        return t
+
+    def get_type_hints(obj, globalns=None, localns=None, include_extras=False):
+        """Return type hints for an object.
+
+        This is often the same as obj.__annotations__, but it handles
+        forward references encoded as string literals, adds Optional[t] if a
+        default value equal to None is set and recursively replaces all
+        'Annotated[T, ...]', 'Required[T]' or 'NotRequired[T]' with 'T'
+        (unless 'include_extras=True').
+
+        The argument may be a module, class, method, or function. The annotations
+        are returned as a dictionary. For classes, annotations include also
+        inherited members.
+
+        TypeError is raised if the argument is not of a type that can contain
+        annotations, and an empty dictionary is returned if no annotations are
+        present.
+
+        BEWARE -- the behavior of globalns and localns is counterintuitive
+        (unless you are familiar with how eval() and exec() work).  The
+        search order is locals first, then globals.
+
+        - If no dict arguments are passed, an attempt is made to use the
+          globals from obj (or the respective module's globals for classes),
+          and these are also used as the locals.  If the object does not appear
+          to have globals, an empty dictionary is used.
+
+        - If one dict argument is passed, it is used for both globals and
+          locals.
+
+        - If two dict arguments are passed, they specify globals and
+          locals, respectively.
+        """
+        if hasattr(typing, "Annotated"):  # 3.9+
+            hint = typing.get_type_hints(
+                obj, globalns=globalns, localns=localns, include_extras=True
+            )
+        else:  # 3.8
+            hint = typing.get_type_hints(obj, globalns=globalns, localns=localns)
+        if include_extras:
+            return hint
+        return {k: _strip_extras(t) for k, t in hint.items()}
+
+
+# Python 3.9+ has PEP 593 (Annotated)
+if hasattr(typing, 'Annotated'):
+    Annotated = typing.Annotated
+    # Not exported and not a public API, but needed for get_origin() and get_args()
+    # to work.
+    _AnnotatedAlias = typing._AnnotatedAlias
+# 3.8
+else:
+    class _AnnotatedAlias(typing._GenericAlias, _root=True):
+        """Runtime representation of an annotated type.
+
+        At its core 'Annotated[t, dec1, dec2, ...]' is an alias for the type 't'
+        with extra annotations. The alias behaves like a normal typing alias,
+        instantiating is the same as instantiating the underlying type, binding
+        it to types is also the same.
+        """
+        def __init__(self, origin, metadata):
+            if isinstance(origin, _AnnotatedAlias):
+                metadata = origin.__metadata__ + metadata
+                origin = origin.__origin__
+            super().__init__(origin, origin)
+            self.__metadata__ = metadata
+
+        def copy_with(self, params):
+            assert len(params) == 1
+            new_type = params[0]
+            return _AnnotatedAlias(new_type, self.__metadata__)
+
+        def __repr__(self):
+            return (f"typing_extensions.Annotated[{typing._type_repr(self.__origin__)}, "
+                    f"{', '.join(repr(a) for a in self.__metadata__)}]")
+
+        def __reduce__(self):
+            return operator.getitem, (
+                Annotated, (self.__origin__, *self.__metadata__)
+            )
+
+        def __eq__(self, other):
+            if not isinstance(other, _AnnotatedAlias):
+                return NotImplemented
+            if self.__origin__ != other.__origin__:
+                return False
+            return self.__metadata__ == other.__metadata__
+
+        def __hash__(self):
+            return hash((self.__origin__, self.__metadata__))
+
+    class Annotated:
+        """Add context specific metadata to a type.
+
+        Example: Annotated[int, runtime_check.Unsigned] indicates to the
+        hypothetical runtime_check module that this type is an unsigned int.
+        Every other consumer of this type can ignore this metadata and treat
+        this type as int.
+
+        The first argument to Annotated must be a valid type (and will be in
+        the __origin__ field), the remaining arguments are kept as a tuple in
+        the __extra__ field.
+
+        Details:
+
+        - It's an error to call `Annotated` with less than two arguments.
+        - Nested Annotated are flattened::
+
+            Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3]
+
+        - Instantiating an annotated type is equivalent to instantiating the
+        underlying type::
+
+            Annotated[C, Ann1](5) == C(5)
+
+        - Annotated can be used as a generic type alias::
+
+            Optimized = Annotated[T, runtime.Optimize()]
+            Optimized[int] == Annotated[int, runtime.Optimize()]
+
+            OptimizedList = Annotated[List[T], runtime.Optimize()]
+            OptimizedList[int] == Annotated[List[int], runtime.Optimize()]
+        """
+
+        __slots__ = ()
+
+        def __new__(cls, *args, **kwargs):
+            raise TypeError("Type Annotated cannot be instantiated.")
+
+        @typing._tp_cache
+        def __class_getitem__(cls, params):
+            if not isinstance(params, tuple) or len(params) < 2:
+                raise TypeError("Annotated[...] should be used "
+                                "with at least two arguments (a type and an "
+                                "annotation).")
+            allowed_special_forms = (ClassVar, Final)
+            if get_origin(params[0]) in allowed_special_forms:
+                origin = params[0]
+            else:
+                msg = "Annotated[t, ...]: t must be a type."
+                origin = typing._type_check(params[0], msg)
+            metadata = tuple(params[1:])
+            return _AnnotatedAlias(origin, metadata)
+
+        def __init_subclass__(cls, *args, **kwargs):
+            raise TypeError(
+                f"Cannot subclass {cls.__module__}.Annotated"
+            )
+
+# Python 3.8 has get_origin() and get_args() but those implementations aren't
+# Annotated-aware, so we can't use those. Python 3.9's versions don't support
+# ParamSpecArgs and ParamSpecKwargs, so only Python 3.10's versions will do.
+if sys.version_info[:2] >= (3, 10):
+    get_origin = typing.get_origin
+    get_args = typing.get_args
+# 3.8-3.9
+else:
+    try:
+        # 3.9+
+        from typing import _BaseGenericAlias
+    except ImportError:
+        _BaseGenericAlias = typing._GenericAlias
+    try:
+        # 3.9+
+        from typing import GenericAlias as _typing_GenericAlias
+    except ImportError:
+        _typing_GenericAlias = typing._GenericAlias
+
+    def get_origin(tp):
+        """Get the unsubscripted version of a type.
+
+        This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar
+        and Annotated. Return None for unsupported types. Examples::
+
+            get_origin(Literal[42]) is Literal
+            get_origin(int) is None
+            get_origin(ClassVar[int]) is ClassVar
+            get_origin(Generic) is Generic
+            get_origin(Generic[T]) is Generic
+            get_origin(Union[T, int]) is Union
+            get_origin(List[Tuple[T, T]][int]) == list
+            get_origin(P.args) is P
+        """
+        if isinstance(tp, _AnnotatedAlias):
+            return Annotated
+        if isinstance(tp, (typing._GenericAlias, _typing_GenericAlias, _BaseGenericAlias,
+                           ParamSpecArgs, ParamSpecKwargs)):
+            return tp.__origin__
+        if tp is typing.Generic:
+            return typing.Generic
+        return None
+
+    def get_args(tp):
+        """Get type arguments with all substitutions performed.
+
+        For unions, basic simplifications used by Union constructor are performed.
+        Examples::
+            get_args(Dict[str, int]) == (str, int)
+            get_args(int) == ()
+            get_args(Union[int, Union[T, int], str][int]) == (int, str)
+            get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int])
+            get_args(Callable[[], T][int]) == ([], int)
+        """
+        if isinstance(tp, _AnnotatedAlias):
+            return (tp.__origin__, *tp.__metadata__)
+        if isinstance(tp, (typing._GenericAlias, _typing_GenericAlias)):
+            if getattr(tp, "_special", False):
+                return ()
+            res = tp.__args__
+            if get_origin(tp) is collections.abc.Callable and res[0] is not Ellipsis:
+                res = (list(res[:-1]), res[-1])
+            return res
+        return ()
+
+
+# 3.10+
+if hasattr(typing, 'TypeAlias'):
+    TypeAlias = typing.TypeAlias
+# 3.9
+elif sys.version_info[:2] >= (3, 9):
+    @_ExtensionsSpecialForm
+    def TypeAlias(self, parameters):
+        """Special marker indicating that an assignment should
+        be recognized as a proper type alias definition by type
+        checkers.
+
+        For example::
+
+            Predicate: TypeAlias = Callable[..., bool]
+
+        It's invalid when used anywhere except as in the example above.
+        """
+        raise TypeError(f"{self} is not subscriptable")
+# 3.8
+else:
+    TypeAlias = _ExtensionsSpecialForm(
+        'TypeAlias',
+        doc="""Special marker indicating that an assignment should
+        be recognized as a proper type alias definition by type
+        checkers.
+
+        For example::
+
+            Predicate: TypeAlias = Callable[..., bool]
+
+        It's invalid when used anywhere except as in the example
+        above."""
+    )
+
+
+if hasattr(typing, "NoDefault"):
+    NoDefault = typing.NoDefault
+else:
+    class NoDefaultTypeMeta(type):
+        def __setattr__(cls, attr, value):
+            # TypeError is consistent with the behavior of NoneType
+            raise TypeError(
+                f"cannot set {attr!r} attribute of immutable type {cls.__name__!r}"
+            )
+
+    class NoDefaultType(metaclass=NoDefaultTypeMeta):
+        """The type of the NoDefault singleton."""
+
+        __slots__ = ()
+
+        def __new__(cls):
+            return globals().get("NoDefault") or object.__new__(cls)
+
+        def __repr__(self):
+            return "typing_extensions.NoDefault"
+
+        def __reduce__(self):
+            return "NoDefault"
+
+    NoDefault = NoDefaultType()
+    del NoDefaultType, NoDefaultTypeMeta
+
+
+def _set_default(type_param, default):
+    type_param.has_default = lambda: default is not NoDefault
+    type_param.__default__ = default
+
+
+def _set_module(typevarlike):
+    # for pickling:
+    def_mod = _caller(depth=3)
+    if def_mod != 'typing_extensions':
+        typevarlike.__module__ = def_mod
+
+
+class _DefaultMixin:
+    """Mixin for TypeVarLike defaults."""
+
+    __slots__ = ()
+    __init__ = _set_default
+
+
+# Classes using this metaclass must provide a _backported_typevarlike ClassVar
+class _TypeVarLikeMeta(type):
+    def __instancecheck__(cls, __instance: Any) -> bool:
+        return isinstance(__instance, cls._backported_typevarlike)
+
+
+if _PEP_696_IMPLEMENTED:
+    from typing import TypeVar
+else:
+    # Add default and infer_variance parameters from PEP 696 and 695
+    class TypeVar(metaclass=_TypeVarLikeMeta):
+        """Type variable."""
+
+        _backported_typevarlike = typing.TypeVar
+
+        def __new__(cls, name, *constraints, bound=None,
+                    covariant=False, contravariant=False,
+                    default=NoDefault, infer_variance=False):
+            if hasattr(typing, "TypeAliasType"):
+                # PEP 695 implemented (3.12+), can pass infer_variance to typing.TypeVar
+                typevar = typing.TypeVar(name, *constraints, bound=bound,
+                                         covariant=covariant, contravariant=contravariant,
+                                         infer_variance=infer_variance)
+            else:
+                typevar = typing.TypeVar(name, *constraints, bound=bound,
+                                         covariant=covariant, contravariant=contravariant)
+                if infer_variance and (covariant or contravariant):
+                    raise ValueError("Variance cannot be specified with infer_variance.")
+                typevar.__infer_variance__ = infer_variance
+
+            _set_default(typevar, default)
+            _set_module(typevar)
+
+            def _tvar_prepare_subst(alias, args):
+                if (
+                    typevar.has_default()
+                    and alias.__parameters__.index(typevar) == len(args)
+                ):
+                    args += (typevar.__default__,)
+                return args
+
+            typevar.__typing_prepare_subst__ = _tvar_prepare_subst
+            return typevar
+
+        def __init_subclass__(cls) -> None:
+            raise TypeError(f"type '{__name__}.TypeVar' is not an acceptable base type")
+
+
+# Python 3.10+ has PEP 612
+if hasattr(typing, 'ParamSpecArgs'):
+    ParamSpecArgs = typing.ParamSpecArgs
+    ParamSpecKwargs = typing.ParamSpecKwargs
+# 3.8-3.9
+else:
+    class _Immutable:
+        """Mixin to indicate that object should not be copied."""
+        __slots__ = ()
+
+        def __copy__(self):
+            return self
+
+        def __deepcopy__(self, memo):
+            return self
+
+    class ParamSpecArgs(_Immutable):
+        """The args for a ParamSpec object.
+
+        Given a ParamSpec object P, P.args is an instance of ParamSpecArgs.
+
+        ParamSpecArgs objects have a reference back to their ParamSpec:
+
+        P.args.__origin__ is P
+
+        This type is meant for runtime introspection and has no special meaning to
+        static type checkers.
+        """
+        def __init__(self, origin):
+            self.__origin__ = origin
+
+        def __repr__(self):
+            return f"{self.__origin__.__name__}.args"
+
+        def __eq__(self, other):
+            if not isinstance(other, ParamSpecArgs):
+                return NotImplemented
+            return self.__origin__ == other.__origin__
+
+    class ParamSpecKwargs(_Immutable):
+        """The kwargs for a ParamSpec object.
+
+        Given a ParamSpec object P, P.kwargs is an instance of ParamSpecKwargs.
+
+        ParamSpecKwargs objects have a reference back to their ParamSpec:
+
+        P.kwargs.__origin__ is P
+
+        This type is meant for runtime introspection and has no special meaning to
+        static type checkers.
+        """
+        def __init__(self, origin):
+            self.__origin__ = origin
+
+        def __repr__(self):
+            return f"{self.__origin__.__name__}.kwargs"
+
+        def __eq__(self, other):
+            if not isinstance(other, ParamSpecKwargs):
+                return NotImplemented
+            return self.__origin__ == other.__origin__
+
+
+if _PEP_696_IMPLEMENTED:
+    from typing import ParamSpec
+
+# 3.10+
+elif hasattr(typing, 'ParamSpec'):
+
+    # Add default parameter - PEP 696
+    class ParamSpec(metaclass=_TypeVarLikeMeta):
+        """Parameter specification."""
+
+        _backported_typevarlike = typing.ParamSpec
+
+        def __new__(cls, name, *, bound=None,
+                    covariant=False, contravariant=False,
+                    infer_variance=False, default=NoDefault):
+            if hasattr(typing, "TypeAliasType"):
+                # PEP 695 implemented, can pass infer_variance to typing.TypeVar
+                paramspec = typing.ParamSpec(name, bound=bound,
+                                             covariant=covariant,
+                                             contravariant=contravariant,
+                                             infer_variance=infer_variance)
+            else:
+                paramspec = typing.ParamSpec(name, bound=bound,
+                                             covariant=covariant,
+                                             contravariant=contravariant)
+                paramspec.__infer_variance__ = infer_variance
+
+            _set_default(paramspec, default)
+            _set_module(paramspec)
+
+            def _paramspec_prepare_subst(alias, args):
+                params = alias.__parameters__
+                i = params.index(paramspec)
+                if i == len(args) and paramspec.has_default():
+                    args = [*args, paramspec.__default__]
+                if i >= len(args):
+                    raise TypeError(f"Too few arguments for {alias}")
+                # Special case where Z[[int, str, bool]] == Z[int, str, bool] in PEP 612.
+                if len(params) == 1 and not typing._is_param_expr(args[0]):
+                    assert i == 0
+                    args = (args,)
+                # Convert lists to tuples to help other libraries cache the results.
+                elif isinstance(args[i], list):
+                    args = (*args[:i], tuple(args[i]), *args[i + 1:])
+                return args
+
+            paramspec.__typing_prepare_subst__ = _paramspec_prepare_subst
+            return paramspec
+
+        def __init_subclass__(cls) -> None:
+            raise TypeError(f"type '{__name__}.ParamSpec' is not an acceptable base type")
+
+# 3.8-3.9
+else:
+
+    # Inherits from list as a workaround for Callable checks in Python < 3.9.2.
+    class ParamSpec(list, _DefaultMixin):
+        """Parameter specification variable.
+
+        Usage::
+
+           P = ParamSpec('P')
+
+        Parameter specification variables exist primarily for the benefit of static
+        type checkers.  They are used to forward the parameter types of one
+        callable to another callable, a pattern commonly found in higher order
+        functions and decorators.  They are only valid when used in ``Concatenate``,
+        or s the first argument to ``Callable``. In Python 3.10 and higher,
+        they are also supported in user-defined Generics at runtime.
+        See class Generic for more information on generic types.  An
+        example for annotating a decorator::
+
+           T = TypeVar('T')
+           P = ParamSpec('P')
+
+           def add_logging(f: Callable[P, T]) -> Callable[P, T]:
+               '''A type-safe decorator to add logging to a function.'''
+               def inner(*args: P.args, **kwargs: P.kwargs) -> T:
+                   logging.info(f'{f.__name__} was called')
+                   return f(*args, **kwargs)
+               return inner
+
+           @add_logging
+           def add_two(x: float, y: float) -> float:
+               '''Add two numbers together.'''
+               return x + y
+
+        Parameter specification variables defined with covariant=True or
+        contravariant=True can be used to declare covariant or contravariant
+        generic types.  These keyword arguments are valid, but their actual semantics
+        are yet to be decided.  See PEP 612 for details.
+
+        Parameter specification variables can be introspected. e.g.:
+
+           P.__name__ == 'T'
+           P.__bound__ == None
+           P.__covariant__ == False
+           P.__contravariant__ == False
+
+        Note that only parameter specification variables defined in global scope can
+        be pickled.
+        """
+
+        # Trick Generic __parameters__.
+        __class__ = typing.TypeVar
+
+        @property
+        def args(self):
+            return ParamSpecArgs(self)
+
+        @property
+        def kwargs(self):
+            return ParamSpecKwargs(self)
+
+        def __init__(self, name, *, bound=None, covariant=False, contravariant=False,
+                     infer_variance=False, default=NoDefault):
+            list.__init__(self, [self])
+            self.__name__ = name
+            self.__covariant__ = bool(covariant)
+            self.__contravariant__ = bool(contravariant)
+            self.__infer_variance__ = bool(infer_variance)
+            if bound:
+                self.__bound__ = typing._type_check(bound, 'Bound must be a type.')
+            else:
+                self.__bound__ = None
+            _DefaultMixin.__init__(self, default)
+
+            # for pickling:
+            def_mod = _caller()
+            if def_mod != 'typing_extensions':
+                self.__module__ = def_mod
+
+        def __repr__(self):
+            if self.__infer_variance__:
+                prefix = ''
+            elif self.__covariant__:
+                prefix = '+'
+            elif self.__contravariant__:
+                prefix = '-'
+            else:
+                prefix = '~'
+            return prefix + self.__name__
+
+        def __hash__(self):
+            return object.__hash__(self)
+
+        def __eq__(self, other):
+            return self is other
+
+        def __reduce__(self):
+            return self.__name__
+
+        # Hack to get typing._type_check to pass.
+        def __call__(self, *args, **kwargs):
+            pass
+
+
+# 3.8-3.9
+if not hasattr(typing, 'Concatenate'):
+    # Inherits from list as a workaround for Callable checks in Python < 3.9.2.
+    class _ConcatenateGenericAlias(list):
+
+        # Trick Generic into looking into this for __parameters__.
+        __class__ = typing._GenericAlias
+
+        # Flag in 3.8.
+        _special = False
+
+        def __init__(self, origin, args):
+            super().__init__(args)
+            self.__origin__ = origin
+            self.__args__ = args
+
+        def __repr__(self):
+            _type_repr = typing._type_repr
+            return (f'{_type_repr(self.__origin__)}'
+                    f'[{", ".join(_type_repr(arg) for arg in self.__args__)}]')
+
+        def __hash__(self):
+            return hash((self.__origin__, self.__args__))
+
+        # Hack to get typing._type_check to pass in Generic.
+        def __call__(self, *args, **kwargs):
+            pass
+
+        @property
+        def __parameters__(self):
+            return tuple(
+                tp for tp in self.__args__ if isinstance(tp, (typing.TypeVar, ParamSpec))
+            )
+
+
+# 3.8-3.9
+@typing._tp_cache
+def _concatenate_getitem(self, parameters):
+    if parameters == ():
+        raise TypeError("Cannot take a Concatenate of no types.")
+    if not isinstance(parameters, tuple):
+        parameters = (parameters,)
+    if not isinstance(parameters[-1], ParamSpec):
+        raise TypeError("The last parameter to Concatenate should be a "
+                        "ParamSpec variable.")
+    msg = "Concatenate[arg, ...]: each arg must be a type."
+    parameters = tuple(typing._type_check(p, msg) for p in parameters)
+    return _ConcatenateGenericAlias(self, parameters)
+
+
+# 3.10+
+if hasattr(typing, 'Concatenate'):
+    Concatenate = typing.Concatenate
+    _ConcatenateGenericAlias = typing._ConcatenateGenericAlias
+# 3.9
+elif sys.version_info[:2] >= (3, 9):
+    @_ExtensionsSpecialForm
+    def Concatenate(self, parameters):
+        """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a
+        higher order function which adds, removes or transforms parameters of a
+        callable.
+
+        For example::
+
+           Callable[Concatenate[int, P], int]
+
+        See PEP 612 for detailed information.
+        """
+        return _concatenate_getitem(self, parameters)
+# 3.8
+else:
+    class _ConcatenateForm(_ExtensionsSpecialForm, _root=True):
+        def __getitem__(self, parameters):
+            return _concatenate_getitem(self, parameters)
+
+    Concatenate = _ConcatenateForm(
+        'Concatenate',
+        doc="""Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a
+        higher order function which adds, removes or transforms parameters of a
+        callable.
+
+        For example::
+
+           Callable[Concatenate[int, P], int]
+
+        See PEP 612 for detailed information.
+        """)
+
+# 3.10+
+if hasattr(typing, 'TypeGuard'):
+    TypeGuard = typing.TypeGuard
+# 3.9
+elif sys.version_info[:2] >= (3, 9):
+    @_ExtensionsSpecialForm
+    def TypeGuard(self, parameters):
+        """Special typing form used to annotate the return type of a user-defined
+        type guard function.  ``TypeGuard`` only accepts a single type argument.
+        At runtime, functions marked this way should return a boolean.
+
+        ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static
+        type checkers to determine a more precise type of an expression within a
+        program's code flow.  Usually type narrowing is done by analyzing
+        conditional code flow and applying the narrowing to a block of code.  The
+        conditional expression here is sometimes referred to as a "type guard".
+
+        Sometimes it would be convenient to use a user-defined boolean function
+        as a type guard.  Such a function should use ``TypeGuard[...]`` as its
+        return type to alert static type checkers to this intention.
+
+        Using  ``-> TypeGuard`` tells the static type checker that for a given
+        function:
+
+        1. The return value is a boolean.
+        2. If the return value is ``True``, the type of its argument
+        is the type inside ``TypeGuard``.
+
+        For example::
+
+            def is_str(val: Union[str, float]):
+                # "isinstance" type guard
+                if isinstance(val, str):
+                    # Type of ``val`` is narrowed to ``str``
+                    ...
+                else:
+                    # Else, type of ``val`` is narrowed to ``float``.
+                    ...
+
+        Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower
+        form of ``TypeA`` (it can even be a wider form) and this may lead to
+        type-unsafe results.  The main reason is to allow for things like
+        narrowing ``List[object]`` to ``List[str]`` even though the latter is not
+        a subtype of the former, since ``List`` is invariant.  The responsibility of
+        writing type-safe type guards is left to the user.
+
+        ``TypeGuard`` also works with type variables.  For more information, see
+        PEP 647 (User-Defined Type Guards).
+        """
+        item = typing._type_check(parameters, f'{self} accepts only a single type.')
+        return typing._GenericAlias(self, (item,))
+# 3.8
+else:
+    class _TypeGuardForm(_ExtensionsSpecialForm, _root=True):
+        def __getitem__(self, parameters):
+            item = typing._type_check(parameters,
+                                      f'{self._name} accepts only a single type')
+            return typing._GenericAlias(self, (item,))
+
+    TypeGuard = _TypeGuardForm(
+        'TypeGuard',
+        doc="""Special typing form used to annotate the return type of a user-defined
+        type guard function.  ``TypeGuard`` only accepts a single type argument.
+        At runtime, functions marked this way should return a boolean.
+
+        ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static
+        type checkers to determine a more precise type of an expression within a
+        program's code flow.  Usually type narrowing is done by analyzing
+        conditional code flow and applying the narrowing to a block of code.  The
+        conditional expression here is sometimes referred to as a "type guard".
+
+        Sometimes it would be convenient to use a user-defined boolean function
+        as a type guard.  Such a function should use ``TypeGuard[...]`` as its
+        return type to alert static type checkers to this intention.
+
+        Using  ``-> TypeGuard`` tells the static type checker that for a given
+        function:
+
+        1. The return value is a boolean.
+        2. If the return value is ``True``, the type of its argument
+        is the type inside ``TypeGuard``.
+
+        For example::
+
+            def is_str(val: Union[str, float]):
+                # "isinstance" type guard
+                if isinstance(val, str):
+                    # Type of ``val`` is narrowed to ``str``
+                    ...
+                else:
+                    # Else, type of ``val`` is narrowed to ``float``.
+                    ...
+
+        Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower
+        form of ``TypeA`` (it can even be a wider form) and this may lead to
+        type-unsafe results.  The main reason is to allow for things like
+        narrowing ``List[object]`` to ``List[str]`` even though the latter is not
+        a subtype of the former, since ``List`` is invariant.  The responsibility of
+        writing type-safe type guards is left to the user.
+
+        ``TypeGuard`` also works with type variables.  For more information, see
+        PEP 647 (User-Defined Type Guards).
+        """)
+
+# 3.13+
+if hasattr(typing, 'TypeIs'):
+    TypeIs = typing.TypeIs
+# 3.9
+elif sys.version_info[:2] >= (3, 9):
+    @_ExtensionsSpecialForm
+    def TypeIs(self, parameters):
+        """Special typing form used to annotate the return type of a user-defined
+        type narrower function.  ``TypeIs`` only accepts a single type argument.
+        At runtime, functions marked this way should return a boolean.
+
+        ``TypeIs`` aims to benefit *type narrowing* -- a technique used by static
+        type checkers to determine a more precise type of an expression within a
+        program's code flow.  Usually type narrowing is done by analyzing
+        conditional code flow and applying the narrowing to a block of code.  The
+        conditional expression here is sometimes referred to as a "type guard".
+
+        Sometimes it would be convenient to use a user-defined boolean function
+        as a type guard.  Such a function should use ``TypeIs[...]`` as its
+        return type to alert static type checkers to this intention.
+
+        Using  ``-> TypeIs`` tells the static type checker that for a given
+        function:
+
+        1. The return value is a boolean.
+        2. If the return value is ``True``, the type of its argument
+        is the intersection of the type inside ``TypeGuard`` and the argument's
+        previously known type.
+
+        For example::
+
+            def is_awaitable(val: object) -> TypeIs[Awaitable[Any]]:
+                return hasattr(val, '__await__')
+
+            def f(val: Union[int, Awaitable[int]]) -> int:
+                if is_awaitable(val):
+                    assert_type(val, Awaitable[int])
+                else:
+                    assert_type(val, int)
+
+        ``TypeIs`` also works with type variables.  For more information, see
+        PEP 742 (Narrowing types with TypeIs).
+        """
+        item = typing._type_check(parameters, f'{self} accepts only a single type.')
+        return typing._GenericAlias(self, (item,))
+# 3.8
+else:
+    class _TypeIsForm(_ExtensionsSpecialForm, _root=True):
+        def __getitem__(self, parameters):
+            item = typing._type_check(parameters,
+                                      f'{self._name} accepts only a single type')
+            return typing._GenericAlias(self, (item,))
+
+    TypeIs = _TypeIsForm(
+        'TypeIs',
+        doc="""Special typing form used to annotate the return type of a user-defined
+        type narrower function.  ``TypeIs`` only accepts a single type argument.
+        At runtime, functions marked this way should return a boolean.
+
+        ``TypeIs`` aims to benefit *type narrowing* -- a technique used by static
+        type checkers to determine a more precise type of an expression within a
+        program's code flow.  Usually type narrowing is done by analyzing
+        conditional code flow and applying the narrowing to a block of code.  The
+        conditional expression here is sometimes referred to as a "type guard".
+
+        Sometimes it would be convenient to use a user-defined boolean function
+        as a type guard.  Such a function should use ``TypeIs[...]`` as its
+        return type to alert static type checkers to this intention.
+
+        Using  ``-> TypeIs`` tells the static type checker that for a given
+        function:
+
+        1. The return value is a boolean.
+        2. If the return value is ``True``, the type of its argument
+        is the intersection of the type inside ``TypeGuard`` and the argument's
+        previously known type.
+
+        For example::
+
+            def is_awaitable(val: object) -> TypeIs[Awaitable[Any]]:
+                return hasattr(val, '__await__')
+
+            def f(val: Union[int, Awaitable[int]]) -> int:
+                if is_awaitable(val):
+                    assert_type(val, Awaitable[int])
+                else:
+                    assert_type(val, int)
+
+        ``TypeIs`` also works with type variables.  For more information, see
+        PEP 742 (Narrowing types with TypeIs).
+        """)
+
+
+# Vendored from cpython typing._SpecialFrom
+class _SpecialForm(typing._Final, _root=True):
+    __slots__ = ('_name', '__doc__', '_getitem')
+
+    def __init__(self, getitem):
+        self._getitem = getitem
+        self._name = getitem.__name__
+        self.__doc__ = getitem.__doc__
+
+    def __getattr__(self, item):
+        if item in {'__name__', '__qualname__'}:
+            return self._name
+
+        raise AttributeError(item)
+
+    def __mro_entries__(self, bases):
+        raise TypeError(f"Cannot subclass {self!r}")
+
+    def __repr__(self):
+        return f'typing_extensions.{self._name}'
+
+    def __reduce__(self):
+        return self._name
+
+    def __call__(self, *args, **kwds):
+        raise TypeError(f"Cannot instantiate {self!r}")
+
+    def __or__(self, other):
+        return typing.Union[self, other]
+
+    def __ror__(self, other):
+        return typing.Union[other, self]
+
+    def __instancecheck__(self, obj):
+        raise TypeError(f"{self} cannot be used with isinstance()")
+
+    def __subclasscheck__(self, cls):
+        raise TypeError(f"{self} cannot be used with issubclass()")
+
+    @typing._tp_cache
+    def __getitem__(self, parameters):
+        return self._getitem(self, parameters)
+
+
+if hasattr(typing, "LiteralString"):  # 3.11+
+    LiteralString = typing.LiteralString
+else:
+    @_SpecialForm
+    def LiteralString(self, params):
+        """Represents an arbitrary literal string.
+
+        Example::
+
+          from typing_extensions import LiteralString
+
+          def query(sql: LiteralString) -> ...:
+              ...
+
+          query("SELECT * FROM table")  # ok
+          query(f"SELECT * FROM {input()}")  # not ok
+
+        See PEP 675 for details.
+
+        """
+        raise TypeError(f"{self} is not subscriptable")
+
+
+if hasattr(typing, "Self"):  # 3.11+
+    Self = typing.Self
+else:
+    @_SpecialForm
+    def Self(self, params):
+        """Used to spell the type of "self" in classes.
+
+        Example::
+
+          from typing import Self
+
+          class ReturnsSelf:
+              def parse(self, data: bytes) -> Self:
+                  ...
+                  return self
+
+        """
+
+        raise TypeError(f"{self} is not subscriptable")
+
+
+if hasattr(typing, "Never"):  # 3.11+
+    Never = typing.Never
+else:
+    @_SpecialForm
+    def Never(self, params):
+        """The bottom type, a type that has no members.
+
+        This can be used to define a function that should never be
+        called, or a function that never returns::
+
+            from typing_extensions import Never
+
+            def never_call_me(arg: Never) -> None:
+                pass
+
+            def int_or_str(arg: int | str) -> None:
+                never_call_me(arg)  # type checker error
+                match arg:
+                    case int():
+                        print("It's an int")
+                    case str():
+                        print("It's a str")
+                    case _:
+                        never_call_me(arg)  # ok, arg is of type Never
+
+        """
+
+        raise TypeError(f"{self} is not subscriptable")
+
+
+if hasattr(typing, 'Required'):  # 3.11+
+    Required = typing.Required
+    NotRequired = typing.NotRequired
+elif sys.version_info[:2] >= (3, 9):  # 3.9-3.10
+    @_ExtensionsSpecialForm
+    def Required(self, parameters):
+        """A special typing construct to mark a key of a total=False TypedDict
+        as required. For example:
+
+            class Movie(TypedDict, total=False):
+                title: Required[str]
+                year: int
+
+            m = Movie(
+                title='The Matrix',  # typechecker error if key is omitted
+                year=1999,
+            )
+
+        There is no runtime checking that a required key is actually provided
+        when instantiating a related TypedDict.
+        """
+        item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
+        return typing._GenericAlias(self, (item,))
+
+    @_ExtensionsSpecialForm
+    def NotRequired(self, parameters):
+        """A special typing construct to mark a key of a TypedDict as
+        potentially missing. For example:
+
+            class Movie(TypedDict):
+                title: str
+                year: NotRequired[int]
+
+            m = Movie(
+                title='The Matrix',  # typechecker error if key is omitted
+                year=1999,
+            )
+        """
+        item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
+        return typing._GenericAlias(self, (item,))
+
+else:  # 3.8
+    class _RequiredForm(_ExtensionsSpecialForm, _root=True):
+        def __getitem__(self, parameters):
+            item = typing._type_check(parameters,
+                                      f'{self._name} accepts only a single type.')
+            return typing._GenericAlias(self, (item,))
+
+    Required = _RequiredForm(
+        'Required',
+        doc="""A special typing construct to mark a key of a total=False TypedDict
+        as required. For example:
+
+            class Movie(TypedDict, total=False):
+                title: Required[str]
+                year: int
+
+            m = Movie(
+                title='The Matrix',  # typechecker error if key is omitted
+                year=1999,
+            )
+
+        There is no runtime checking that a required key is actually provided
+        when instantiating a related TypedDict.
+        """)
+    NotRequired = _RequiredForm(
+        'NotRequired',
+        doc="""A special typing construct to mark a key of a TypedDict as
+        potentially missing. For example:
+
+            class Movie(TypedDict):
+                title: str
+                year: NotRequired[int]
+
+            m = Movie(
+                title='The Matrix',  # typechecker error if key is omitted
+                year=1999,
+            )
+        """)
+
+
+if hasattr(typing, 'ReadOnly'):
+    ReadOnly = typing.ReadOnly
+elif sys.version_info[:2] >= (3, 9):  # 3.9-3.12
+    @_ExtensionsSpecialForm
+    def ReadOnly(self, parameters):
+        """A special typing construct to mark an item of a TypedDict as read-only.
+
+        For example:
+
+            class Movie(TypedDict):
+                title: ReadOnly[str]
+                year: int
+
+            def mutate_movie(m: Movie) -> None:
+                m["year"] = 1992  # allowed
+                m["title"] = "The Matrix"  # typechecker error
+
+        There is no runtime checking for this property.
+        """
+        item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
+        return typing._GenericAlias(self, (item,))
+
+else:  # 3.8
+    class _ReadOnlyForm(_ExtensionsSpecialForm, _root=True):
+        def __getitem__(self, parameters):
+            item = typing._type_check(parameters,
+                                      f'{self._name} accepts only a single type.')
+            return typing._GenericAlias(self, (item,))
+
+    ReadOnly = _ReadOnlyForm(
+        'ReadOnly',
+        doc="""A special typing construct to mark a key of a TypedDict as read-only.
+
+        For example:
+
+            class Movie(TypedDict):
+                title: ReadOnly[str]
+                year: int
+
+            def mutate_movie(m: Movie) -> None:
+                m["year"] = 1992  # allowed
+                m["title"] = "The Matrix"  # typechecker error
+
+        There is no runtime checking for this propery.
+        """)
+
+
+_UNPACK_DOC = """\
+Type unpack operator.
+
+The type unpack operator takes the child types from some container type,
+such as `tuple[int, str]` or a `TypeVarTuple`, and 'pulls them out'. For
+example:
+
+  # For some generic class `Foo`:
+  Foo[Unpack[tuple[int, str]]]  # Equivalent to Foo[int, str]
+
+  Ts = TypeVarTuple('Ts')
+  # Specifies that `Bar` is generic in an arbitrary number of types.
+  # (Think of `Ts` as a tuple of an arbitrary number of individual
+  #  `TypeVar`s, which the `Unpack` is 'pulling out' directly into the
+  #  `Generic[]`.)
+  class Bar(Generic[Unpack[Ts]]): ...
+  Bar[int]  # Valid
+  Bar[int, str]  # Also valid
+
+From Python 3.11, this can also be done using the `*` operator:
+
+    Foo[*tuple[int, str]]
+    class Bar(Generic[*Ts]): ...
+
+The operator can also be used along with a `TypedDict` to annotate
+`**kwargs` in a function signature. For instance:
+
+  class Movie(TypedDict):
+    name: str
+    year: int
+
+  # This function expects two keyword arguments - *name* of type `str` and
+  # *year* of type `int`.
+  def foo(**kwargs: Unpack[Movie]): ...
+
+Note that there is only some runtime checking of this operator. Not
+everything the runtime allows may be accepted by static type checkers.
+
+For more information, see PEP 646 and PEP 692.
+"""
+
+
+if sys.version_info >= (3, 12):  # PEP 692 changed the repr of Unpack[]
+    Unpack = typing.Unpack
+
+    def _is_unpack(obj):
+        return get_origin(obj) is Unpack
+
+elif sys.version_info[:2] >= (3, 9):  # 3.9+
+    class _UnpackSpecialForm(_ExtensionsSpecialForm, _root=True):
+        def __init__(self, getitem):
+            super().__init__(getitem)
+            self.__doc__ = _UNPACK_DOC
+
+    class _UnpackAlias(typing._GenericAlias, _root=True):
+        __class__ = typing.TypeVar
+
+        @property
+        def __typing_unpacked_tuple_args__(self):
+            assert self.__origin__ is Unpack
+            assert len(self.__args__) == 1
+            arg, = self.__args__
+            if isinstance(arg, (typing._GenericAlias, _types.GenericAlias)):
+                if arg.__origin__ is not tuple:
+                    raise TypeError("Unpack[...] must be used with a tuple type")
+                return arg.__args__
+            return None
+
+    @_UnpackSpecialForm
+    def Unpack(self, parameters):
+        item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
+        return _UnpackAlias(self, (item,))
+
+    def _is_unpack(obj):
+        return isinstance(obj, _UnpackAlias)
+
+else:  # 3.8
+    class _UnpackAlias(typing._GenericAlias, _root=True):
+        __class__ = typing.TypeVar
+
+    class _UnpackForm(_ExtensionsSpecialForm, _root=True):
+        def __getitem__(self, parameters):
+            item = typing._type_check(parameters,
+                                      f'{self._name} accepts only a single type.')
+            return _UnpackAlias(self, (item,))
+
+    Unpack = _UnpackForm('Unpack', doc=_UNPACK_DOC)
+
+    def _is_unpack(obj):
+        return isinstance(obj, _UnpackAlias)
+
+
+if _PEP_696_IMPLEMENTED:
+    from typing import TypeVarTuple
+
+elif hasattr(typing, "TypeVarTuple"):  # 3.11+
+
+    def _unpack_args(*args):
+        newargs = []
+        for arg in args:
+            subargs = getattr(arg, '__typing_unpacked_tuple_args__', None)
+            if subargs is not None and not (subargs and subargs[-1] is ...):
+                newargs.extend(subargs)
+            else:
+                newargs.append(arg)
+        return newargs
+
+    # Add default parameter - PEP 696
+    class TypeVarTuple(metaclass=_TypeVarLikeMeta):
+        """Type variable tuple."""
+
+        _backported_typevarlike = typing.TypeVarTuple
+
+        def __new__(cls, name, *, default=NoDefault):
+            tvt = typing.TypeVarTuple(name)
+            _set_default(tvt, default)
+            _set_module(tvt)
+
+            def _typevartuple_prepare_subst(alias, args):
+                params = alias.__parameters__
+                typevartuple_index = params.index(tvt)
+                for param in params[typevartuple_index + 1:]:
+                    if isinstance(param, TypeVarTuple):
+                        raise TypeError(
+                            f"More than one TypeVarTuple parameter in {alias}"
+                        )
+
+                alen = len(args)
+                plen = len(params)
+                left = typevartuple_index
+                right = plen - typevartuple_index - 1
+                var_tuple_index = None
+                fillarg = None
+                for k, arg in enumerate(args):
+                    if not isinstance(arg, type):
+                        subargs = getattr(arg, '__typing_unpacked_tuple_args__', None)
+                        if subargs and len(subargs) == 2 and subargs[-1] is ...:
+                            if var_tuple_index is not None:
+                                raise TypeError(
+                                    "More than one unpacked "
+                                    "arbitrary-length tuple argument"
+                                )
+                            var_tuple_index = k
+                            fillarg = subargs[0]
+                if var_tuple_index is not None:
+                    left = min(left, var_tuple_index)
+                    right = min(right, alen - var_tuple_index - 1)
+                elif left + right > alen:
+                    raise TypeError(f"Too few arguments for {alias};"
+                                    f" actual {alen}, expected at least {plen - 1}")
+                if left == alen - right and tvt.has_default():
+                    replacement = _unpack_args(tvt.__default__)
+                else:
+                    replacement = args[left: alen - right]
+
+                return (
+                    *args[:left],
+                    *([fillarg] * (typevartuple_index - left)),
+                    replacement,
+                    *([fillarg] * (plen - right - left - typevartuple_index - 1)),
+                    *args[alen - right:],
+                )
+
+            tvt.__typing_prepare_subst__ = _typevartuple_prepare_subst
+            return tvt
+
+        def __init_subclass__(self, *args, **kwds):
+            raise TypeError("Cannot subclass special typing classes")
+
+else:  # <=3.10
+    class TypeVarTuple(_DefaultMixin):
+        """Type variable tuple.
+
+        Usage::
+
+            Ts = TypeVarTuple('Ts')
+
+        In the same way that a normal type variable is a stand-in for a single
+        type such as ``int``, a type variable *tuple* is a stand-in for a *tuple*
+        type such as ``Tuple[int, str]``.
+
+        Type variable tuples can be used in ``Generic`` declarations.
+        Consider the following example::
+
+            class Array(Generic[*Ts]): ...
+
+        The ``Ts`` type variable tuple here behaves like ``tuple[T1, T2]``,
+        where ``T1`` and ``T2`` are type variables. To use these type variables
+        as type parameters of ``Array``, we must *unpack* the type variable tuple using
+        the star operator: ``*Ts``. The signature of ``Array`` then behaves
+        as if we had simply written ``class Array(Generic[T1, T2]): ...``.
+        In contrast to ``Generic[T1, T2]``, however, ``Generic[*Shape]`` allows
+        us to parameterise the class with an *arbitrary* number of type parameters.
+
+        Type variable tuples can be used anywhere a normal ``TypeVar`` can.
+        This includes class definitions, as shown above, as well as function
+        signatures and variable annotations::
+
+            class Array(Generic[*Ts]):
+
+                def __init__(self, shape: Tuple[*Ts]):
+                    self._shape: Tuple[*Ts] = shape
+
+                def get_shape(self) -> Tuple[*Ts]:
+                    return self._shape
+
+            shape = (Height(480), Width(640))
+            x: Array[Height, Width] = Array(shape)
+            y = abs(x)  # Inferred type is Array[Height, Width]
+            z = x + x   #        ...    is Array[Height, Width]
+            x.get_shape()  #     ...    is tuple[Height, Width]
+
+        """
+
+        # Trick Generic __parameters__.
+        __class__ = typing.TypeVar
+
+        def __iter__(self):
+            yield self.__unpacked__
+
+        def __init__(self, name, *, default=NoDefault):
+            self.__name__ = name
+            _DefaultMixin.__init__(self, default)
+
+            # for pickling:
+            def_mod = _caller()
+            if def_mod != 'typing_extensions':
+                self.__module__ = def_mod
+
+            self.__unpacked__ = Unpack[self]
+
+        def __repr__(self):
+            return self.__name__
+
+        def __hash__(self):
+            return object.__hash__(self)
+
+        def __eq__(self, other):
+            return self is other
+
+        def __reduce__(self):
+            return self.__name__
+
+        def __init_subclass__(self, *args, **kwds):
+            if '_root' not in kwds:
+                raise TypeError("Cannot subclass special typing classes")
+
+
+if hasattr(typing, "reveal_type"):  # 3.11+
+    reveal_type = typing.reveal_type
+else:  # <=3.10
+    def reveal_type(obj: T, /) -> T:
+        """Reveal the inferred type of a variable.
+
+        When a static type checker encounters a call to ``reveal_type()``,
+        it will emit the inferred type of the argument::
+
+            x: int = 1
+            reveal_type(x)
+
+        Running a static type checker (e.g., ``mypy``) on this example
+        will produce output similar to 'Revealed type is "builtins.int"'.
+
+        At runtime, the function prints the runtime type of the
+        argument and returns it unchanged.
+
+        """
+        print(f"Runtime type is {type(obj).__name__!r}", file=sys.stderr)
+        return obj
+
+
+if hasattr(typing, "_ASSERT_NEVER_REPR_MAX_LENGTH"):  # 3.11+
+    _ASSERT_NEVER_REPR_MAX_LENGTH = typing._ASSERT_NEVER_REPR_MAX_LENGTH
+else:  # <=3.10
+    _ASSERT_NEVER_REPR_MAX_LENGTH = 100
+
+
+if hasattr(typing, "assert_never"):  # 3.11+
+    assert_never = typing.assert_never
+else:  # <=3.10
+    def assert_never(arg: Never, /) -> Never:
+        """Assert to the type checker that a line of code is unreachable.
+
+        Example::
+
+            def int_or_str(arg: int | str) -> None:
+                match arg:
+                    case int():
+                        print("It's an int")
+                    case str():
+                        print("It's a str")
+                    case _:
+                        assert_never(arg)
+
+        If a type checker finds that a call to assert_never() is
+        reachable, it will emit an error.
+
+        At runtime, this throws an exception when called.
+
+        """
+        value = repr(arg)
+        if len(value) > _ASSERT_NEVER_REPR_MAX_LENGTH:
+            value = value[:_ASSERT_NEVER_REPR_MAX_LENGTH] + '...'
+        raise AssertionError(f"Expected code to be unreachable, but got: {value}")
+
+
+if sys.version_info >= (3, 12):  # 3.12+
+    # dataclass_transform exists in 3.11 but lacks the frozen_default parameter
+    dataclass_transform = typing.dataclass_transform
+else:  # <=3.11
+    def dataclass_transform(
+        *,
+        eq_default: bool = True,
+        order_default: bool = False,
+        kw_only_default: bool = False,
+        frozen_default: bool = False,
+        field_specifiers: typing.Tuple[
+            typing.Union[typing.Type[typing.Any], typing.Callable[..., typing.Any]],
+            ...
+        ] = (),
+        **kwargs: typing.Any,
+    ) -> typing.Callable[[T], T]:
+        """Decorator that marks a function, class, or metaclass as providing
+        dataclass-like behavior.
+
+        Example:
+
+            from typing_extensions import dataclass_transform
+
+            _T = TypeVar("_T")
+
+            # Used on a decorator function
+            @dataclass_transform()
+            def create_model(cls: type[_T]) -> type[_T]:
+                ...
+                return cls
+
+            @create_model
+            class CustomerModel:
+                id: int
+                name: str
+
+            # Used on a base class
+            @dataclass_transform()
+            class ModelBase: ...
+
+            class CustomerModel(ModelBase):
+                id: int
+                name: str
+
+            # Used on a metaclass
+            @dataclass_transform()
+            class ModelMeta(type): ...
+
+            class ModelBase(metaclass=ModelMeta): ...
+
+            class CustomerModel(ModelBase):
+                id: int
+                name: str
+
+        Each of the ``CustomerModel`` classes defined in this example will now
+        behave similarly to a dataclass created with the ``@dataclasses.dataclass``
+        decorator. For example, the type checker will synthesize an ``__init__``
+        method.
+
+        The arguments to this decorator can be used to customize this behavior:
+        - ``eq_default`` indicates whether the ``eq`` parameter is assumed to be
+          True or False if it is omitted by the caller.
+        - ``order_default`` indicates whether the ``order`` parameter is
+          assumed to be True or False if it is omitted by the caller.
+        - ``kw_only_default`` indicates whether the ``kw_only`` parameter is
+          assumed to be True or False if it is omitted by the caller.
+        - ``frozen_default`` indicates whether the ``frozen`` parameter is
+          assumed to be True or False if it is omitted by the caller.
+        - ``field_specifiers`` specifies a static list of supported classes
+          or functions that describe fields, similar to ``dataclasses.field()``.
+
+        At runtime, this decorator records its arguments in the
+        ``__dataclass_transform__`` attribute on the decorated object.
+
+        See PEP 681 for details.
+
+        """
+        def decorator(cls_or_fn):
+            cls_or_fn.__dataclass_transform__ = {
+                "eq_default": eq_default,
+                "order_default": order_default,
+                "kw_only_default": kw_only_default,
+                "frozen_default": frozen_default,
+                "field_specifiers": field_specifiers,
+                "kwargs": kwargs,
+            }
+            return cls_or_fn
+        return decorator
+
+
+if hasattr(typing, "override"):  # 3.12+
+    override = typing.override
+else:  # <=3.11
+    _F = typing.TypeVar("_F", bound=typing.Callable[..., typing.Any])
+
+    def override(arg: _F, /) -> _F:
+        """Indicate that a method is intended to override a method in a base class.
+
+        Usage:
+
+            class Base:
+                def method(self) -> None:
+                    pass
+
+            class Child(Base):
+                @override
+                def method(self) -> None:
+                    super().method()
+
+        When this decorator is applied to a method, the type checker will
+        validate that it overrides a method with the same name on a base class.
+        This helps prevent bugs that may occur when a base class is changed
+        without an equivalent change to a child class.
+
+        There is no runtime checking of these properties. The decorator
+        sets the ``__override__`` attribute to ``True`` on the decorated object
+        to allow runtime introspection.
+
+        See PEP 698 for details.
+
+        """
+        try:
+            arg.__override__ = True
+        except (AttributeError, TypeError):
+            # Skip the attribute silently if it is not writable.
+            # AttributeError happens if the object has __slots__ or a
+            # read-only property, TypeError if it's a builtin class.
+            pass
+        return arg
+
+
+if hasattr(warnings, "deprecated"):
+    deprecated = warnings.deprecated
+else:
+    _T = typing.TypeVar("_T")
+
+    class deprecated:
+        """Indicate that a class, function or overload is deprecated.
+
+        When this decorator is applied to an object, the type checker
+        will generate a diagnostic on usage of the deprecated object.
+
+        Usage:
+
+            @deprecated("Use B instead")
+            class A:
+                pass
+
+            @deprecated("Use g instead")
+            def f():
+                pass
+
+            @overload
+            @deprecated("int support is deprecated")
+            def g(x: int) -> int: ...
+            @overload
+            def g(x: str) -> int: ...
+
+        The warning specified by *category* will be emitted at runtime
+        on use of deprecated objects. For functions, that happens on calls;
+        for classes, on instantiation and on creation of subclasses.
+        If the *category* is ``None``, no warning is emitted at runtime.
+        The *stacklevel* determines where the
+        warning is emitted. If it is ``1`` (the default), the warning
+        is emitted at the direct caller of the deprecated object; if it
+        is higher, it is emitted further up the stack.
+        Static type checker behavior is not affected by the *category*
+        and *stacklevel* arguments.
+
+        The deprecation message passed to the decorator is saved in the
+        ``__deprecated__`` attribute on the decorated object.
+        If applied to an overload, the decorator
+        must be after the ``@overload`` decorator for the attribute to
+        exist on the overload as returned by ``get_overloads()``.
+
+        See PEP 702 for details.
+
+        """
+        def __init__(
+            self,
+            message: str,
+            /,
+            *,
+            category: typing.Optional[typing.Type[Warning]] = DeprecationWarning,
+            stacklevel: int = 1,
+        ) -> None:
+            if not isinstance(message, str):
+                raise TypeError(
+                    "Expected an object of type str for 'message', not "
+                    f"{type(message).__name__!r}"
+                )
+            self.message = message
+            self.category = category
+            self.stacklevel = stacklevel
+
+        def __call__(self, arg: _T, /) -> _T:
+            # Make sure the inner functions created below don't
+            # retain a reference to self.
+            msg = self.message
+            category = self.category
+            stacklevel = self.stacklevel
+            if category is None:
+                arg.__deprecated__ = msg
+                return arg
+            elif isinstance(arg, type):
+                import functools
+                from types import MethodType
+
+                original_new = arg.__new__
+
+                @functools.wraps(original_new)
+                def __new__(cls, *args, **kwargs):
+                    if cls is arg:
+                        warnings.warn(msg, category=category, stacklevel=stacklevel + 1)
+                    if original_new is not object.__new__:
+                        return original_new(cls, *args, **kwargs)
+                    # Mirrors a similar check in object.__new__.
+                    elif cls.__init__ is object.__init__ and (args or kwargs):
+                        raise TypeError(f"{cls.__name__}() takes no arguments")
+                    else:
+                        return original_new(cls)
+
+                arg.__new__ = staticmethod(__new__)
+
+                original_init_subclass = arg.__init_subclass__
+                # We need slightly different behavior if __init_subclass__
+                # is a bound method (likely if it was implemented in Python)
+                if isinstance(original_init_subclass, MethodType):
+                    original_init_subclass = original_init_subclass.__func__
+
+                    @functools.wraps(original_init_subclass)
+                    def __init_subclass__(*args, **kwargs):
+                        warnings.warn(msg, category=category, stacklevel=stacklevel + 1)
+                        return original_init_subclass(*args, **kwargs)
+
+                    arg.__init_subclass__ = classmethod(__init_subclass__)
+                # Or otherwise, which likely means it's a builtin such as
+                # object's implementation of __init_subclass__.
+                else:
+                    @functools.wraps(original_init_subclass)
+                    def __init_subclass__(*args, **kwargs):
+                        warnings.warn(msg, category=category, stacklevel=stacklevel + 1)
+                        return original_init_subclass(*args, **kwargs)
+
+                    arg.__init_subclass__ = __init_subclass__
+
+                arg.__deprecated__ = __new__.__deprecated__ = msg
+                __init_subclass__.__deprecated__ = msg
+                return arg
+            elif callable(arg):
+                import functools
+
+                @functools.wraps(arg)
+                def wrapper(*args, **kwargs):
+                    warnings.warn(msg, category=category, stacklevel=stacklevel + 1)
+                    return arg(*args, **kwargs)
+
+                arg.__deprecated__ = wrapper.__deprecated__ = msg
+                return wrapper
+            else:
+                raise TypeError(
+                    "@deprecated decorator with non-None category must be applied to "
+                    f"a class or callable, not {arg!r}"
+                )
+
+
+# We have to do some monkey patching to deal with the dual nature of
+# Unpack/TypeVarTuple:
+# - We want Unpack to be a kind of TypeVar so it gets accepted in
+#   Generic[Unpack[Ts]]
+# - We want it to *not* be treated as a TypeVar for the purposes of
+#   counting generic parameters, so that when we subscript a generic,
+#   the runtime doesn't try to substitute the Unpack with the subscripted type.
+if not hasattr(typing, "TypeVarTuple"):
+    def _check_generic(cls, parameters, elen=_marker):
+        """Check correct count for parameters of a generic cls (internal helper).
+
+        This gives a nice error message in case of count mismatch.
+        """
+        if not elen:
+            raise TypeError(f"{cls} is not a generic class")
+        if elen is _marker:
+            if not hasattr(cls, "__parameters__") or not cls.__parameters__:
+                raise TypeError(f"{cls} is not a generic class")
+            elen = len(cls.__parameters__)
+        alen = len(parameters)
+        if alen != elen:
+            expect_val = elen
+            if hasattr(cls, "__parameters__"):
+                parameters = [p for p in cls.__parameters__ if not _is_unpack(p)]
+                num_tv_tuples = sum(isinstance(p, TypeVarTuple) for p in parameters)
+                if (num_tv_tuples > 0) and (alen >= elen - num_tv_tuples):
+                    return
+
+                # deal with TypeVarLike defaults
+                # required TypeVarLikes cannot appear after a defaulted one.
+                if alen < elen:
+                    # since we validate TypeVarLike default in _collect_type_vars
+                    # or _collect_parameters we can safely check parameters[alen]
+                    if (
+                        getattr(parameters[alen], '__default__', NoDefault)
+                        is not NoDefault
+                    ):
+                        return
+
+                    num_default_tv = sum(getattr(p, '__default__', NoDefault)
+                                         is not NoDefault for p in parameters)
+
+                    elen -= num_default_tv
+
+                    expect_val = f"at least {elen}"
+
+            things = "arguments" if sys.version_info >= (3, 10) else "parameters"
+            raise TypeError(f"Too {'many' if alen > elen else 'few'} {things}"
+                            f" for {cls}; actual {alen}, expected {expect_val}")
+else:
+    # Python 3.11+
+
+    def _check_generic(cls, parameters, elen):
+        """Check correct count for parameters of a generic cls (internal helper).
+
+        This gives a nice error message in case of count mismatch.
+        """
+        if not elen:
+            raise TypeError(f"{cls} is not a generic class")
+        alen = len(parameters)
+        if alen != elen:
+            expect_val = elen
+            if hasattr(cls, "__parameters__"):
+                parameters = [p for p in cls.__parameters__ if not _is_unpack(p)]
+
+                # deal with TypeVarLike defaults
+                # required TypeVarLikes cannot appear after a defaulted one.
+                if alen < elen:
+                    # since we validate TypeVarLike default in _collect_type_vars
+                    # or _collect_parameters we can safely check parameters[alen]
+                    if (
+                        getattr(parameters[alen], '__default__', NoDefault)
+                        is not NoDefault
+                    ):
+                        return
+
+                    num_default_tv = sum(getattr(p, '__default__', NoDefault)
+                                         is not NoDefault for p in parameters)
+
+                    elen -= num_default_tv
+
+                    expect_val = f"at least {elen}"
+
+            raise TypeError(f"Too {'many' if alen > elen else 'few'} arguments"
+                            f" for {cls}; actual {alen}, expected {expect_val}")
+
+if not _PEP_696_IMPLEMENTED:
+    typing._check_generic = _check_generic
+
+
+def _has_generic_or_protocol_as_origin() -> bool:
+    try:
+        frame = sys._getframe(2)
+    # - Catch AttributeError: not all Python implementations have sys._getframe()
+    # - Catch ValueError: maybe we're called from an unexpected module
+    #   and the call stack isn't deep enough
+    except (AttributeError, ValueError):
+        return False  # err on the side of leniency
+    else:
+        # If we somehow get invoked from outside typing.py,
+        # also err on the side of leniency
+        if frame.f_globals.get("__name__") != "typing":
+            return False
+        origin = frame.f_locals.get("origin")
+        # Cannot use "in" because origin may be an object with a buggy __eq__ that
+        # throws an error.
+        return origin is typing.Generic or origin is Protocol or origin is typing.Protocol
+
+
+_TYPEVARTUPLE_TYPES = {TypeVarTuple, getattr(typing, "TypeVarTuple", None)}
+
+
+def _is_unpacked_typevartuple(x) -> bool:
+    if get_origin(x) is not Unpack:
+        return False
+    args = get_args(x)
+    return (
+        bool(args)
+        and len(args) == 1
+        and type(args[0]) in _TYPEVARTUPLE_TYPES
+    )
+
+
+# Python 3.11+ _collect_type_vars was renamed to _collect_parameters
+if hasattr(typing, '_collect_type_vars'):
+    def _collect_type_vars(types, typevar_types=None):
+        """Collect all type variable contained in types in order of
+        first appearance (lexicographic order). For example::
+
+            _collect_type_vars((T, List[S, T])) == (T, S)
+        """
+        if typevar_types is None:
+            typevar_types = typing.TypeVar
+        tvars = []
+
+        # A required TypeVarLike cannot appear after a TypeVarLike with a default
+        # if it was a direct call to `Generic[]` or `Protocol[]`
+        enforce_default_ordering = _has_generic_or_protocol_as_origin()
+        default_encountered = False
+
+        # Also, a TypeVarLike with a default cannot appear after a TypeVarTuple
+        type_var_tuple_encountered = False
+
+        for t in types:
+            if _is_unpacked_typevartuple(t):
+                type_var_tuple_encountered = True
+            elif isinstance(t, typevar_types) and t not in tvars:
+                if enforce_default_ordering:
+                    has_default = getattr(t, '__default__', NoDefault) is not NoDefault
+                    if has_default:
+                        if type_var_tuple_encountered:
+                            raise TypeError('Type parameter with a default'
+                                            ' follows TypeVarTuple')
+                        default_encountered = True
+                    elif default_encountered:
+                        raise TypeError(f'Type parameter {t!r} without a default'
+                                        ' follows type parameter with a default')
+
+                tvars.append(t)
+            if _should_collect_from_parameters(t):
+                tvars.extend([t for t in t.__parameters__ if t not in tvars])
+        return tuple(tvars)
+
+    typing._collect_type_vars = _collect_type_vars
+else:
+    def _collect_parameters(args):
+        """Collect all type variables and parameter specifications in args
+        in order of first appearance (lexicographic order).
+
+        For example::
+
+            assert _collect_parameters((T, Callable[P, T])) == (T, P)
+        """
+        parameters = []
+
+        # A required TypeVarLike cannot appear after a TypeVarLike with default
+        # if it was a direct call to `Generic[]` or `Protocol[]`
+        enforce_default_ordering = _has_generic_or_protocol_as_origin()
+        default_encountered = False
+
+        # Also, a TypeVarLike with a default cannot appear after a TypeVarTuple
+        type_var_tuple_encountered = False
+
+        for t in args:
+            if isinstance(t, type):
+                # We don't want __parameters__ descriptor of a bare Python class.
+                pass
+            elif isinstance(t, tuple):
+                # `t` might be a tuple, when `ParamSpec` is substituted with
+                # `[T, int]`, or `[int, *Ts]`, etc.
+                for x in t:
+                    for collected in _collect_parameters([x]):
+                        if collected not in parameters:
+                            parameters.append(collected)
+            elif hasattr(t, '__typing_subst__'):
+                if t not in parameters:
+                    if enforce_default_ordering:
+                        has_default = (
+                            getattr(t, '__default__', NoDefault) is not NoDefault
+                        )
+
+                        if type_var_tuple_encountered and has_default:
+                            raise TypeError('Type parameter with a default'
+                                            ' follows TypeVarTuple')
+
+                        if has_default:
+                            default_encountered = True
+                        elif default_encountered:
+                            raise TypeError(f'Type parameter {t!r} without a default'
+                                            ' follows type parameter with a default')
+
+                    parameters.append(t)
+            else:
+                if _is_unpacked_typevartuple(t):
+                    type_var_tuple_encountered = True
+                for x in getattr(t, '__parameters__', ()):
+                    if x not in parameters:
+                        parameters.append(x)
+
+        return tuple(parameters)
+
+    if not _PEP_696_IMPLEMENTED:
+        typing._collect_parameters = _collect_parameters
+
+# Backport typing.NamedTuple as it exists in Python 3.13.
+# In 3.11, the ability to define generic `NamedTuple`s was supported.
+# This was explicitly disallowed in 3.9-3.10, and only half-worked in <=3.8.
+# On 3.12, we added __orig_bases__ to call-based NamedTuples
+# On 3.13, we deprecated kwargs-based NamedTuples
+if sys.version_info >= (3, 13):
+    NamedTuple = typing.NamedTuple
+else:
+    def _make_nmtuple(name, types, module, defaults=()):
+        fields = [n for n, t in types]
+        annotations = {n: typing._type_check(t, f"field {n} annotation must be a type")
+                       for n, t in types}
+        nm_tpl = collections.namedtuple(name, fields,
+                                        defaults=defaults, module=module)
+        nm_tpl.__annotations__ = nm_tpl.__new__.__annotations__ = annotations
+        # The `_field_types` attribute was removed in 3.9;
+        # in earlier versions, it is the same as the `__annotations__` attribute
+        if sys.version_info < (3, 9):
+            nm_tpl._field_types = annotations
+        return nm_tpl
+
+    _prohibited_namedtuple_fields = typing._prohibited
+    _special_namedtuple_fields = frozenset({'__module__', '__name__', '__annotations__'})
+
+    class _NamedTupleMeta(type):
+        def __new__(cls, typename, bases, ns):
+            assert _NamedTuple in bases
+            for base in bases:
+                if base is not _NamedTuple and base is not typing.Generic:
+                    raise TypeError(
+                        'can only inherit from a NamedTuple type and Generic')
+            bases = tuple(tuple if base is _NamedTuple else base for base in bases)
+            if "__annotations__" in ns:
+                types = ns["__annotations__"]
+            elif "__annotate__" in ns:
+                # TODO: Use inspect.VALUE here, and make the annotations lazily evaluated
+                types = ns["__annotate__"](1)
+            else:
+                types = {}
+            default_names = []
+            for field_name in types:
+                if field_name in ns:
+                    default_names.append(field_name)
+                elif default_names:
+                    raise TypeError(f"Non-default namedtuple field {field_name} "
+                                    f"cannot follow default field"
+                                    f"{'s' if len(default_names) > 1 else ''} "
+                                    f"{', '.join(default_names)}")
+            nm_tpl = _make_nmtuple(
+                typename, types.items(),
+                defaults=[ns[n] for n in default_names],
+                module=ns['__module__']
+            )
+            nm_tpl.__bases__ = bases
+            if typing.Generic in bases:
+                if hasattr(typing, '_generic_class_getitem'):  # 3.12+
+                    nm_tpl.__class_getitem__ = classmethod(typing._generic_class_getitem)
+                else:
+                    class_getitem = typing.Generic.__class_getitem__.__func__
+                    nm_tpl.__class_getitem__ = classmethod(class_getitem)
+            # update from user namespace without overriding special namedtuple attributes
+            for key, val in ns.items():
+                if key in _prohibited_namedtuple_fields:
+                    raise AttributeError("Cannot overwrite NamedTuple attribute " + key)
+                elif key not in _special_namedtuple_fields:
+                    if key not in nm_tpl._fields:
+                        setattr(nm_tpl, key, ns[key])
+                    try:
+                        set_name = type(val).__set_name__
+                    except AttributeError:
+                        pass
+                    else:
+                        try:
+                            set_name(val, nm_tpl, key)
+                        except BaseException as e:
+                            msg = (
+                                f"Error calling __set_name__ on {type(val).__name__!r} "
+                                f"instance {key!r} in {typename!r}"
+                            )
+                            # BaseException.add_note() existed on py311,
+                            # but the __set_name__ machinery didn't start
+                            # using add_note() until py312.
+                            # Making sure exceptions are raised in the same way
+                            # as in "normal" classes seems most important here.
+                            if sys.version_info >= (3, 12):
+                                e.add_note(msg)
+                                raise
+                            else:
+                                raise RuntimeError(msg) from e
+
+            if typing.Generic in bases:
+                nm_tpl.__init_subclass__()
+            return nm_tpl
+
+    _NamedTuple = type.__new__(_NamedTupleMeta, 'NamedTuple', (), {})
+
+    def _namedtuple_mro_entries(bases):
+        assert NamedTuple in bases
+        return (_NamedTuple,)
+
+    @_ensure_subclassable(_namedtuple_mro_entries)
+    def NamedTuple(typename, fields=_marker, /, **kwargs):
+        """Typed version of namedtuple.
+
+        Usage::
+
+            class Employee(NamedTuple):
+                name: str
+                id: int
+
+        This is equivalent to::
+
+            Employee = collections.namedtuple('Employee', ['name', 'id'])
+
+        The resulting class has an extra __annotations__ attribute, giving a
+        dict that maps field names to types.  (The field names are also in
+        the _fields attribute, which is part of the namedtuple API.)
+        An alternative equivalent functional syntax is also accepted::
+
+            Employee = NamedTuple('Employee', [('name', str), ('id', int)])
+        """
+        if fields is _marker:
+            if kwargs:
+                deprecated_thing = "Creating NamedTuple classes using keyword arguments"
+                deprecation_msg = (
+                    "{name} is deprecated and will be disallowed in Python {remove}. "
+                    "Use the class-based or functional syntax instead."
+                )
+            else:
+                deprecated_thing = "Failing to pass a value for the 'fields' parameter"
+                example = f"`{typename} = NamedTuple({typename!r}, [])`"
+                deprecation_msg = (
+                    "{name} is deprecated and will be disallowed in Python {remove}. "
+                    "To create a NamedTuple class with 0 fields "
+                    "using the functional syntax, "
+                    "pass an empty list, e.g. "
+                ) + example + "."
+        elif fields is None:
+            if kwargs:
+                raise TypeError(
+                    "Cannot pass `None` as the 'fields' parameter "
+                    "and also specify fields using keyword arguments"
+                )
+            else:
+                deprecated_thing = "Passing `None` as the 'fields' parameter"
+                example = f"`{typename} = NamedTuple({typename!r}, [])`"
+                deprecation_msg = (
+                    "{name} is deprecated and will be disallowed in Python {remove}. "
+                    "To create a NamedTuple class with 0 fields "
+                    "using the functional syntax, "
+                    "pass an empty list, e.g. "
+                ) + example + "."
+        elif kwargs:
+            raise TypeError("Either list of fields or keywords"
+                            " can be provided to NamedTuple, not both")
+        if fields is _marker or fields is None:
+            warnings.warn(
+                deprecation_msg.format(name=deprecated_thing, remove="3.15"),
+                DeprecationWarning,
+                stacklevel=2,
+            )
+            fields = kwargs.items()
+        nt = _make_nmtuple(typename, fields, module=_caller())
+        nt.__orig_bases__ = (NamedTuple,)
+        return nt
+
+
+if hasattr(collections.abc, "Buffer"):
+    Buffer = collections.abc.Buffer
+else:
+    class Buffer(abc.ABC):  # noqa: B024
+        """Base class for classes that implement the buffer protocol.
+
+        The buffer protocol allows Python objects to expose a low-level
+        memory buffer interface. Before Python 3.12, it is not possible
+        to implement the buffer protocol in pure Python code, or even
+        to check whether a class implements the buffer protocol. In
+        Python 3.12 and higher, the ``__buffer__`` method allows access
+        to the buffer protocol from Python code, and the
+        ``collections.abc.Buffer`` ABC allows checking whether a class
+        implements the buffer protocol.
+
+        To indicate support for the buffer protocol in earlier versions,
+        inherit from this ABC, either in a stub file or at runtime,
+        or use ABC registration. This ABC provides no methods, because
+        there is no Python-accessible methods shared by pre-3.12 buffer
+        classes. It is useful primarily for static checks.
+
+        """
+
+    # As a courtesy, register the most common stdlib buffer classes.
+    Buffer.register(memoryview)
+    Buffer.register(bytearray)
+    Buffer.register(bytes)
+
+
+# Backport of types.get_original_bases, available on 3.12+ in CPython
+if hasattr(_types, "get_original_bases"):
+    get_original_bases = _types.get_original_bases
+else:
+    def get_original_bases(cls, /):
+        """Return the class's "original" bases prior to modification by `__mro_entries__`.
+
+        Examples::
+
+            from typing import TypeVar, Generic
+            from typing_extensions import NamedTuple, TypedDict
+
+            T = TypeVar("T")
+            class Foo(Generic[T]): ...
+            class Bar(Foo[int], float): ...
+            class Baz(list[str]): ...
+            Eggs = NamedTuple("Eggs", [("a", int), ("b", str)])
+            Spam = TypedDict("Spam", {"a": int, "b": str})
+
+            assert get_original_bases(Bar) == (Foo[int], float)
+            assert get_original_bases(Baz) == (list[str],)
+            assert get_original_bases(Eggs) == (NamedTuple,)
+            assert get_original_bases(Spam) == (TypedDict,)
+            assert get_original_bases(int) == (object,)
+        """
+        try:
+            return cls.__dict__.get("__orig_bases__", cls.__bases__)
+        except AttributeError:
+            raise TypeError(
+                f'Expected an instance of type, not {type(cls).__name__!r}'
+            ) from None
+
+
+# NewType is a class on Python 3.10+, making it pickleable
+# The error message for subclassing instances of NewType was improved on 3.11+
+if sys.version_info >= (3, 11):
+    NewType = typing.NewType
+else:
+    class NewType:
+        """NewType creates simple unique types with almost zero
+        runtime overhead. NewType(name, tp) is considered a subtype of tp
+        by static type checkers. At runtime, NewType(name, tp) returns
+        a dummy callable that simply returns its argument. Usage::
+            UserId = NewType('UserId', int)
+            def name_by_id(user_id: UserId) -> str:
+                ...
+            UserId('user')          # Fails type check
+            name_by_id(42)          # Fails type check
+            name_by_id(UserId(42))  # OK
+            num = UserId(5) + 1     # type: int
+        """
+
+        def __call__(self, obj, /):
+            return obj
+
+        def __init__(self, name, tp):
+            self.__qualname__ = name
+            if '.' in name:
+                name = name.rpartition('.')[-1]
+            self.__name__ = name
+            self.__supertype__ = tp
+            def_mod = _caller()
+            if def_mod != 'typing_extensions':
+                self.__module__ = def_mod
+
+        def __mro_entries__(self, bases):
+            # We defined __mro_entries__ to get a better error message
+            # if a user attempts to subclass a NewType instance. bpo-46170
+            supercls_name = self.__name__
+
+            class Dummy:
+                def __init_subclass__(cls):
+                    subcls_name = cls.__name__
+                    raise TypeError(
+                        f"Cannot subclass an instance of NewType. "
+                        f"Perhaps you were looking for: "
+                        f"`{subcls_name} = NewType({subcls_name!r}, {supercls_name})`"
+                    )
+
+            return (Dummy,)
+
+        def __repr__(self):
+            return f'{self.__module__}.{self.__qualname__}'
+
+        def __reduce__(self):
+            return self.__qualname__
+
+        if sys.version_info >= (3, 10):
+            # PEP 604 methods
+            # It doesn't make sense to have these methods on Python <3.10
+
+            def __or__(self, other):
+                return typing.Union[self, other]
+
+            def __ror__(self, other):
+                return typing.Union[other, self]
+
+
+if hasattr(typing, "TypeAliasType"):
+    TypeAliasType = typing.TypeAliasType
+else:
+    def _is_unionable(obj):
+        """Corresponds to is_unionable() in unionobject.c in CPython."""
+        return obj is None or isinstance(obj, (
+            type,
+            _types.GenericAlias,
+            _types.UnionType,
+            TypeAliasType,
+        ))
+
+    class TypeAliasType:
+        """Create named, parameterized type aliases.
+
+        This provides a backport of the new `type` statement in Python 3.12:
+
+            type ListOrSet[T] = list[T] | set[T]
+
+        is equivalent to:
+
+            T = TypeVar("T")
+            ListOrSet = TypeAliasType("ListOrSet", list[T] | set[T], type_params=(T,))
+
+        The name ListOrSet can then be used as an alias for the type it refers to.
+
+        The type_params argument should contain all the type parameters used
+        in the value of the type alias. If the alias is not generic, this
+        argument is omitted.
+
+        Static type checkers should only support type aliases declared using
+        TypeAliasType that follow these rules:
+
+        - The first argument (the name) must be a string literal.
+        - The TypeAliasType instance must be immediately assigned to a variable
+          of the same name. (For example, 'X = TypeAliasType("Y", int)' is invalid,
+          as is 'X, Y = TypeAliasType("X", int), TypeAliasType("Y", int)').
+
+        """
+
+        def __init__(self, name: str, value, *, type_params=()):
+            if not isinstance(name, str):
+                raise TypeError("TypeAliasType name must be a string")
+            self.__value__ = value
+            self.__type_params__ = type_params
+
+            parameters = []
+            for type_param in type_params:
+                if isinstance(type_param, TypeVarTuple):
+                    parameters.extend(type_param)
+                else:
+                    parameters.append(type_param)
+            self.__parameters__ = tuple(parameters)
+            def_mod = _caller()
+            if def_mod != 'typing_extensions':
+                self.__module__ = def_mod
+            # Setting this attribute closes the TypeAliasType from further modification
+            self.__name__ = name
+
+        def __setattr__(self, name: str, value: object, /) -> None:
+            if hasattr(self, "__name__"):
+                self._raise_attribute_error(name)
+            super().__setattr__(name, value)
+
+        def __delattr__(self, name: str, /) -> Never:
+            self._raise_attribute_error(name)
+
+        def _raise_attribute_error(self, name: str) -> Never:
+            # Match the Python 3.12 error messages exactly
+            if name == "__name__":
+                raise AttributeError("readonly attribute")
+            elif name in {"__value__", "__type_params__", "__parameters__", "__module__"}:
+                raise AttributeError(
+                    f"attribute '{name}' of 'typing.TypeAliasType' objects "
+                    "is not writable"
+                )
+            else:
+                raise AttributeError(
+                    f"'typing.TypeAliasType' object has no attribute '{name}'"
+                )
+
+        def __repr__(self) -> str:
+            return self.__name__
+
+        def __getitem__(self, parameters):
+            if not isinstance(parameters, tuple):
+                parameters = (parameters,)
+            parameters = [
+                typing._type_check(
+                    item, f'Subscripting {self.__name__} requires a type.'
+                )
+                for item in parameters
+            ]
+            return typing._GenericAlias(self, tuple(parameters))
+
+        def __reduce__(self):
+            return self.__name__
+
+        def __init_subclass__(cls, *args, **kwargs):
+            raise TypeError(
+                "type 'typing_extensions.TypeAliasType' is not an acceptable base type"
+            )
+
+        # The presence of this method convinces typing._type_check
+        # that TypeAliasTypes are types.
+        def __call__(self):
+            raise TypeError("Type alias is not callable")
+
+        if sys.version_info >= (3, 10):
+            def __or__(self, right):
+                # For forward compatibility with 3.12, reject Unions
+                # that are not accepted by the built-in Union.
+                if not _is_unionable(right):
+                    return NotImplemented
+                return typing.Union[self, right]
+
+            def __ror__(self, left):
+                if not _is_unionable(left):
+                    return NotImplemented
+                return typing.Union[left, self]
+
+
+if hasattr(typing, "is_protocol"):
+    is_protocol = typing.is_protocol
+    get_protocol_members = typing.get_protocol_members
+else:
+    def is_protocol(tp: type, /) -> bool:
+        """Return True if the given type is a Protocol.
+
+        Example::
+
+            >>> from typing_extensions import Protocol, is_protocol
+            >>> class P(Protocol):
+            ...     def a(self) -> str: ...
+            ...     b: int
+            >>> is_protocol(P)
+            True
+            >>> is_protocol(int)
+            False
+        """
+        return (
+            isinstance(tp, type)
+            and getattr(tp, '_is_protocol', False)
+            and tp is not Protocol
+            and tp is not typing.Protocol
+        )
+
+    def get_protocol_members(tp: type, /) -> typing.FrozenSet[str]:
+        """Return the set of members defined in a Protocol.
+
+        Example::
+
+            >>> from typing_extensions import Protocol, get_protocol_members
+            >>> class P(Protocol):
+            ...     def a(self) -> str: ...
+            ...     b: int
+            >>> get_protocol_members(P)
+            frozenset({'a', 'b'})
+
+        Raise a TypeError for arguments that are not Protocols.
+        """
+        if not is_protocol(tp):
+            raise TypeError(f'{tp!r} is not a Protocol')
+        if hasattr(tp, '__protocol_attrs__'):
+            return frozenset(tp.__protocol_attrs__)
+        return frozenset(_get_protocol_attrs(tp))
+
+
+if hasattr(typing, "Doc"):
+    Doc = typing.Doc
+else:
+    class Doc:
+        """Define the documentation of a type annotation using ``Annotated``, to be
+         used in class attributes, function and method parameters, return values,
+         and variables.
+
+        The value should be a positional-only string literal to allow static tools
+        like editors and documentation generators to use it.
+
+        This complements docstrings.
+
+        The string value passed is available in the attribute ``documentation``.
+
+        Example::
+
+            >>> from typing_extensions import Annotated, Doc
+            >>> def hi(to: Annotated[str, Doc("Who to say hi to")]) -> None: ...
+        """
+        def __init__(self, documentation: str, /) -> None:
+            self.documentation = documentation
+
+        def __repr__(self) -> str:
+            return f"Doc({self.documentation!r})"
+
+        def __hash__(self) -> int:
+            return hash(self.documentation)
+
+        def __eq__(self, other: object) -> bool:
+            if not isinstance(other, Doc):
+                return NotImplemented
+            return self.documentation == other.documentation
+
+
+_CapsuleType = getattr(_types, "CapsuleType", None)
+
+if _CapsuleType is None:
+    try:
+        import _socket
+    except ImportError:
+        pass
+    else:
+        _CAPI = getattr(_socket, "CAPI", None)
+        if _CAPI is not None:
+            _CapsuleType = type(_CAPI)
+
+if _CapsuleType is not None:
+    CapsuleType = _CapsuleType
+    __all__.append("CapsuleType")
+
+
+# Aliases for items that have always been in typing.
+# Explicitly assign these (rather than using `from typing import *` at the top),
+# so that we get a CI error if one of these is deleted from typing.py
+# in a future version of Python
+AbstractSet = typing.AbstractSet
+AnyStr = typing.AnyStr
+BinaryIO = typing.BinaryIO
+Callable = typing.Callable
+Collection = typing.Collection
+Container = typing.Container
+Dict = typing.Dict
+ForwardRef = typing.ForwardRef
+FrozenSet = typing.FrozenSet
+Generic = typing.Generic
+Hashable = typing.Hashable
+IO = typing.IO
+ItemsView = typing.ItemsView
+Iterable = typing.Iterable
+Iterator = typing.Iterator
+KeysView = typing.KeysView
+List = typing.List
+Mapping = typing.Mapping
+MappingView = typing.MappingView
+Match = typing.Match
+MutableMapping = typing.MutableMapping
+MutableSequence = typing.MutableSequence
+MutableSet = typing.MutableSet
+Optional = typing.Optional
+Pattern = typing.Pattern
+Reversible = typing.Reversible
+Sequence = typing.Sequence
+Set = typing.Set
+Sized = typing.Sized
+TextIO = typing.TextIO
+Tuple = typing.Tuple
+Union = typing.Union
+ValuesView = typing.ValuesView
+cast = typing.cast
+no_type_check = typing.no_type_check
+no_type_check_decorator = typing.no_type_check_decorator
diff --git a/pkg_resources/_vendor/zipp-3.19.2.dist-info/INSTALLER b/pkg_resources/_vendor/zipp-3.19.2.dist-info/INSTALLER
new file mode 100644
index 0000000000..a1b589e38a
--- /dev/null
+++ b/pkg_resources/_vendor/zipp-3.19.2.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/pkg_resources/_vendor/zipp-3.19.2.dist-info/LICENSE b/pkg_resources/_vendor/zipp-3.19.2.dist-info/LICENSE
new file mode 100644
index 0000000000..1bb5a44356
--- /dev/null
+++ b/pkg_resources/_vendor/zipp-3.19.2.dist-info/LICENSE
@@ -0,0 +1,17 @@
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
diff --git a/pkg_resources/_vendor/zipp-3.19.2.dist-info/METADATA b/pkg_resources/_vendor/zipp-3.19.2.dist-info/METADATA
new file mode 100644
index 0000000000..1399281717
--- /dev/null
+++ b/pkg_resources/_vendor/zipp-3.19.2.dist-info/METADATA
@@ -0,0 +1,102 @@
+Metadata-Version: 2.1
+Name: zipp
+Version: 3.19.2
+Summary: Backport of pathlib-compatible object wrapper for zip files
+Author-email: "Jason R. Coombs" 
+Project-URL: Homepage, https://github.com/jaraco/zipp
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Requires-Python: >=3.8
+Description-Content-Type: text/x-rst
+License-File: LICENSE
+Provides-Extra: doc
+Requires-Dist: sphinx >=3.5 ; extra == 'doc'
+Requires-Dist: jaraco.packaging >=9.3 ; extra == 'doc'
+Requires-Dist: rst.linker >=1.9 ; extra == 'doc'
+Requires-Dist: furo ; extra == 'doc'
+Requires-Dist: sphinx-lint ; extra == 'doc'
+Requires-Dist: jaraco.tidelift >=1.4 ; extra == 'doc'
+Provides-Extra: test
+Requires-Dist: pytest !=8.1.*,>=6 ; extra == 'test'
+Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'test'
+Requires-Dist: pytest-cov ; extra == 'test'
+Requires-Dist: pytest-mypy ; extra == 'test'
+Requires-Dist: pytest-enabler >=2.2 ; extra == 'test'
+Requires-Dist: pytest-ruff >=0.2.1 ; extra == 'test'
+Requires-Dist: jaraco.itertools ; extra == 'test'
+Requires-Dist: jaraco.functools ; extra == 'test'
+Requires-Dist: more-itertools ; extra == 'test'
+Requires-Dist: big-O ; extra == 'test'
+Requires-Dist: pytest-ignore-flaky ; extra == 'test'
+Requires-Dist: jaraco.test ; extra == 'test'
+Requires-Dist: importlib-resources ; (python_version < "3.9") and extra == 'test'
+
+.. image:: https://img.shields.io/pypi/v/zipp.svg
+   :target: https://pypi.org/project/zipp
+
+.. image:: https://img.shields.io/pypi/pyversions/zipp.svg
+
+.. image:: https://github.com/jaraco/zipp/actions/workflows/main.yml/badge.svg
+   :target: https://github.com/jaraco/zipp/actions?query=workflow%3A%22tests%22
+   :alt: tests
+
+.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
+    :target: https://github.com/astral-sh/ruff
+    :alt: Ruff
+
+.. .. image:: https://readthedocs.org/projects/PROJECT_RTD/badge/?version=latest
+..    :target: https://PROJECT_RTD.readthedocs.io/en/latest/?badge=latest
+
+.. image:: https://img.shields.io/badge/skeleton-2024-informational
+   :target: https://blog.jaraco.com/skeleton
+
+.. image:: https://tidelift.com/badges/package/pypi/zipp
+   :target: https://tidelift.com/subscription/pkg/pypi-zipp?utm_source=pypi-zipp&utm_medium=readme
+
+
+A pathlib-compatible Zipfile object wrapper. Official backport of the standard library
+`Path object `_.
+
+
+Compatibility
+=============
+
+New features are introduced in this third-party library and later merged
+into CPython. The following table indicates which versions of this library
+were contributed to different versions in the standard library:
+
+.. list-table::
+   :header-rows: 1
+
+   * - zipp
+     - stdlib
+   * - 3.18
+     - 3.13
+   * - 3.16
+     - 3.12
+   * - 3.5
+     - 3.11
+   * - 3.2
+     - 3.10
+   * - 3.3 ??
+     - 3.9
+   * - 1.0
+     - 3.8
+
+
+Usage
+=====
+
+Use ``zipp.Path`` in place of ``zipfile.Path`` on any Python.
+
+For Enterprise
+==============
+
+Available as part of the Tidelift Subscription.
+
+This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use.
+
+`Learn more `_.
diff --git a/pkg_resources/_vendor/zipp-3.19.2.dist-info/RECORD b/pkg_resources/_vendor/zipp-3.19.2.dist-info/RECORD
new file mode 100644
index 0000000000..77c02835d8
--- /dev/null
+++ b/pkg_resources/_vendor/zipp-3.19.2.dist-info/RECORD
@@ -0,0 +1,15 @@
+zipp-3.19.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+zipp-3.19.2.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
+zipp-3.19.2.dist-info/METADATA,sha256=UIrk_kMIHGSwsKKChYizqMw0MMZpPRZ2ZiVpQAsN_bE,3575
+zipp-3.19.2.dist-info/RECORD,,
+zipp-3.19.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+zipp-3.19.2.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
+zipp-3.19.2.dist-info/top_level.txt,sha256=iAbdoSHfaGqBfVb2XuR9JqSQHCoOsOtG6y9C_LSpqFw,5
+zipp/__init__.py,sha256=QuI1g00G4fRAcGt-HqbV0oWIkmSgedCGGYsHHYzNa8A,13412
+zipp/__pycache__/__init__.cpython-312.pyc,,
+zipp/__pycache__/glob.cpython-312.pyc,,
+zipp/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+zipp/compat/__pycache__/__init__.cpython-312.pyc,,
+zipp/compat/__pycache__/py310.cpython-312.pyc,,
+zipp/compat/py310.py,sha256=eZpkW0zRtunkhEh8jjX3gCGe22emoKCBJw72Zt4RkhA,219
+zipp/glob.py,sha256=etWpnfEoRyfUvrUsi6sTiGmErvPwe6HzY6pT8jg_lUI,3082
diff --git a/pkg_resources/_vendor/zipp-3.19.2.dist-info/REQUESTED b/pkg_resources/_vendor/zipp-3.19.2.dist-info/REQUESTED
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/pkg_resources/_vendor/zipp-3.19.2.dist-info/WHEEL b/pkg_resources/_vendor/zipp-3.19.2.dist-info/WHEEL
new file mode 100644
index 0000000000..bab98d6758
--- /dev/null
+++ b/pkg_resources/_vendor/zipp-3.19.2.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.43.0)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/pkg_resources/_vendor/zipp-3.7.0.dist-info/top_level.txt b/pkg_resources/_vendor/zipp-3.19.2.dist-info/top_level.txt
similarity index 100%
rename from pkg_resources/_vendor/zipp-3.7.0.dist-info/top_level.txt
rename to pkg_resources/_vendor/zipp-3.19.2.dist-info/top_level.txt
diff --git a/pkg_resources/_vendor/zipp-3.7.0.dist-info/METADATA b/pkg_resources/_vendor/zipp-3.7.0.dist-info/METADATA
deleted file mode 100644
index b1308b5f6e..0000000000
--- a/pkg_resources/_vendor/zipp-3.7.0.dist-info/METADATA
+++ /dev/null
@@ -1,58 +0,0 @@
-Metadata-Version: 2.1
-Name: zipp
-Version: 3.7.0
-Summary: Backport of pathlib-compatible object wrapper for zip files
-Home-page: https://github.com/jaraco/zipp
-Author: Jason R. Coombs
-Author-email: jaraco@jaraco.com
-License: UNKNOWN
-Platform: UNKNOWN
-Classifier: Development Status :: 5 - Production/Stable
-Classifier: Intended Audience :: Developers
-Classifier: License :: OSI Approved :: MIT License
-Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3 :: Only
-Requires-Python: >=3.7
-License-File: LICENSE
-Provides-Extra: docs
-Requires-Dist: sphinx ; extra == 'docs'
-Requires-Dist: jaraco.packaging (>=8.2) ; extra == 'docs'
-Requires-Dist: rst.linker (>=1.9) ; extra == 'docs'
-Provides-Extra: testing
-Requires-Dist: pytest (>=6) ; extra == 'testing'
-Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing'
-Requires-Dist: pytest-flake8 ; extra == 'testing'
-Requires-Dist: pytest-cov ; extra == 'testing'
-Requires-Dist: pytest-enabler (>=1.0.1) ; extra == 'testing'
-Requires-Dist: jaraco.itertools ; extra == 'testing'
-Requires-Dist: func-timeout ; extra == 'testing'
-Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing'
-Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing'
-
-.. image:: https://img.shields.io/pypi/v/zipp.svg
-   :target: `PyPI link`_
-
-.. image:: https://img.shields.io/pypi/pyversions/zipp.svg
-   :target: `PyPI link`_
-
-.. _PyPI link: https://pypi.org/project/zipp
-
-.. image:: https://github.com/jaraco/zipp/workflows/tests/badge.svg
-   :target: https://github.com/jaraco/zipp/actions?query=workflow%3A%22tests%22
-   :alt: tests
-
-.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
-   :target: https://github.com/psf/black
-   :alt: Code style: Black
-
-.. .. image:: https://readthedocs.org/projects/skeleton/badge/?version=latest
-..    :target: https://skeleton.readthedocs.io/en/latest/?badge=latest
-
-.. image:: https://img.shields.io/badge/skeleton-2021-informational
-   :target: https://blog.jaraco.com/skeleton
-
-
-A pathlib-compatible Zipfile object wrapper. Official backport of the standard library
-`Path object `_.
-
-
diff --git a/pkg_resources/_vendor/zipp-3.7.0.dist-info/RECORD b/pkg_resources/_vendor/zipp-3.7.0.dist-info/RECORD
deleted file mode 100644
index adc797bc2e..0000000000
--- a/pkg_resources/_vendor/zipp-3.7.0.dist-info/RECORD
+++ /dev/null
@@ -1,9 +0,0 @@
-__pycache__/zipp.cpython-312.pyc,,
-zipp-3.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-zipp-3.7.0.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050
-zipp-3.7.0.dist-info/METADATA,sha256=ZLzgaXTyZX_MxTU0lcGfhdPY4CjFrT_3vyQ2Fo49pl8,2261
-zipp-3.7.0.dist-info/RECORD,,
-zipp-3.7.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-zipp-3.7.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
-zipp-3.7.0.dist-info/top_level.txt,sha256=iAbdoSHfaGqBfVb2XuR9JqSQHCoOsOtG6y9C_LSpqFw,5
-zipp.py,sha256=ajztOH-9I7KA_4wqDYygtHa6xUBVZgFpmZ8FE74HHHI,8425
diff --git a/pkg_resources/_vendor/zipp.py b/pkg_resources/_vendor/zipp/__init__.py
similarity index 52%
rename from pkg_resources/_vendor/zipp.py
rename to pkg_resources/_vendor/zipp/__init__.py
index 26b723c1fd..d65297b835 100644
--- a/pkg_resources/_vendor/zipp.py
+++ b/pkg_resources/_vendor/zipp/__init__.py
@@ -3,13 +3,13 @@
 import zipfile
 import itertools
 import contextlib
-import sys
 import pathlib
+import re
+import stat
+import sys
 
-if sys.version_info < (3, 7):
-    from collections import OrderedDict
-else:
-    OrderedDict = dict
+from .compat.py310 import text_encoding
+from .glob import Translator
 
 
 __all__ = ['Path']
@@ -56,7 +56,7 @@ def _ancestry(path):
         path, tail = posixpath.split(path)
 
 
-_dedupe = OrderedDict.fromkeys
+_dedupe = dict.fromkeys
 """Deduplicate an iterable in original order"""
 
 
@@ -68,10 +68,95 @@ def _difference(minuend, subtrahend):
     return itertools.filterfalse(set(subtrahend).__contains__, minuend)
 
 
-class CompleteDirs(zipfile.ZipFile):
+class InitializedState:
+    """
+    Mix-in to save the initialization state for pickling.
+    """
+
+    def __init__(self, *args, **kwargs):
+        self.__args = args
+        self.__kwargs = kwargs
+        super().__init__(*args, **kwargs)
+
+    def __getstate__(self):
+        return self.__args, self.__kwargs
+
+    def __setstate__(self, state):
+        args, kwargs = state
+        super().__init__(*args, **kwargs)
+
+
+class SanitizedNames:
+    """
+    ZipFile mix-in to ensure names are sanitized.
+    """
+
+    def namelist(self):
+        return list(map(self._sanitize, super().namelist()))
+
+    @staticmethod
+    def _sanitize(name):
+        r"""
+        Ensure a relative path with posix separators and no dot names.
+
+        Modeled after
+        https://github.com/python/cpython/blob/bcc1be39cb1d04ad9fc0bd1b9193d3972835a57c/Lib/zipfile/__init__.py#L1799-L1813
+        but provides consistent cross-platform behavior.
+
+        >>> san = SanitizedNames._sanitize
+        >>> san('/foo/bar')
+        'foo/bar'
+        >>> san('//foo.txt')
+        'foo.txt'
+        >>> san('foo/.././bar.txt')
+        'foo/bar.txt'
+        >>> san('foo../.bar.txt')
+        'foo../.bar.txt'
+        >>> san('\\foo\\bar.txt')
+        'foo/bar.txt'
+        >>> san('D:\\foo.txt')
+        'D/foo.txt'
+        >>> san('\\\\server\\share\\file.txt')
+        'server/share/file.txt'
+        >>> san('\\\\?\\GLOBALROOT\\Volume3')
+        '?/GLOBALROOT/Volume3'
+        >>> san('\\\\.\\PhysicalDrive1\\root')
+        'PhysicalDrive1/root'
+
+        Retain any trailing slash.
+        >>> san('abc/')
+        'abc/'
+
+        Raises a ValueError if the result is empty.
+        >>> san('../..')
+        Traceback (most recent call last):
+        ...
+        ValueError: Empty filename
+        """
+
+        def allowed(part):
+            return part and part not in {'..', '.'}
+
+        # Remove the drive letter.
+        # Don't use ntpath.splitdrive, because that also strips UNC paths
+        bare = re.sub('^([A-Z]):', r'\1', name, flags=re.IGNORECASE)
+        clean = bare.replace('\\', '/')
+        parts = clean.split('/')
+        joined = '/'.join(filter(allowed, parts))
+        if not joined:
+            raise ValueError("Empty filename")
+        return joined + '/' * name.endswith('/')
+
+
+class CompleteDirs(InitializedState, SanitizedNames, zipfile.ZipFile):
     """
     A ZipFile subclass that ensures that implied directories
     are always included in the namelist.
+
+    >>> list(CompleteDirs._implied_dirs(['foo/bar.txt', 'foo/bar/baz.txt']))
+    ['foo/', 'foo/bar/']
+    >>> list(CompleteDirs._implied_dirs(['foo/bar.txt', 'foo/bar/baz.txt', 'foo/bar/']))
+    ['foo/']
     """
 
     @staticmethod
@@ -81,7 +166,7 @@ def _implied_dirs(names):
         return _dedupe(_difference(as_dirs, names))
 
     def namelist(self):
-        names = super(CompleteDirs, self).namelist()
+        names = super().namelist()
         return names + list(self._implied_dirs(names))
 
     def _name_set(self):
@@ -97,6 +182,17 @@ def resolve_dir(self, name):
         dir_match = name not in names and dirname in names
         return dirname if dir_match else name
 
+    def getinfo(self, name):
+        """
+        Supplement getinfo for implied dirs.
+        """
+        try:
+            return super().getinfo(name)
+        except KeyError:
+            if not name.endswith('/') or name not in self._name_set():
+                raise
+            return zipfile.ZipInfo(filename=name)
+
     @classmethod
     def make(cls, source):
         """
@@ -107,7 +203,7 @@ def make(cls, source):
             return source
 
         if not isinstance(source, zipfile.ZipFile):
-            return cls(_pathlib_compat(source))
+            return cls(source)
 
         # Only allow for FastLookup when supplied zipfile is read-only
         if 'r' not in source.mode:
@@ -116,6 +212,16 @@ def make(cls, source):
         source.__class__ = cls
         return source
 
+    @classmethod
+    def inject(cls, zf: zipfile.ZipFile) -> zipfile.ZipFile:
+        """
+        Given a writable zip file zf, inject directory entries for
+        any directories implied by the presence of children.
+        """
+        for name in cls._implied_dirs(zf.namelist()):
+            zf.writestr(name, b"")
+        return zf
+
 
 class FastLookup(CompleteDirs):
     """
@@ -126,30 +232,29 @@ class FastLookup(CompleteDirs):
     def namelist(self):
         with contextlib.suppress(AttributeError):
             return self.__names
-        self.__names = super(FastLookup, self).namelist()
+        self.__names = super().namelist()
         return self.__names
 
     def _name_set(self):
         with contextlib.suppress(AttributeError):
             return self.__lookup
-        self.__lookup = super(FastLookup, self)._name_set()
+        self.__lookup = super()._name_set()
         return self.__lookup
 
 
-def _pathlib_compat(path):
-    """
-    For path-like objects, convert to a filename for compatibility
-    on Python 3.6.1 and earlier.
-    """
-    try:
-        return path.__fspath__()
-    except AttributeError:
-        return str(path)
+def _extract_text_encoding(encoding=None, *args, **kwargs):
+    # compute stack level so that the caller of the caller sees any warning.
+    is_pypy = sys.implementation.name == 'pypy'
+    stack_level = 3 + is_pypy
+    return text_encoding(encoding, stack_level), args, kwargs
 
 
 class Path:
     """
-    A pathlib-compatible interface for zip files.
+    A :class:`importlib.resources.abc.Traversable` interface for zip files.
+
+    Implements many of the features users enjoy from
+    :class:`pathlib.Path`.
 
     Consider a zip file with this structure::
 
@@ -169,13 +274,13 @@ class Path:
 
     Path accepts the zipfile object itself or a filename
 
-    >>> root = Path(zf)
+    >>> path = Path(zf)
 
     From there, several path operations are available.
 
     Directory iteration (including the zip file itself):
 
-    >>> a, b = root.iterdir()
+    >>> a, b = path.iterdir()
     >>> a
     Path('mem/abcde.zip', 'a.txt')
     >>> b
@@ -196,7 +301,7 @@ class Path:
 
     Read text:
 
-    >>> c.read_text()
+    >>> c.read_text(encoding='utf-8')
     'content of c'
 
     existence:
@@ -213,16 +318,38 @@ class Path:
     'mem/abcde.zip/b/c.txt'
 
     At the root, ``name``, ``filename``, and ``parent``
-    resolve to the zipfile. Note these attributes are not
-    valid and will raise a ``ValueError`` if the zipfile
-    has no filename.
+    resolve to the zipfile.
 
-    >>> root.name
+    >>> str(path)
+    'mem/abcde.zip/'
+    >>> path.name
     'abcde.zip'
-    >>> str(root.filename).replace(os.sep, posixpath.sep)
-    'mem/abcde.zip'
-    >>> str(root.parent)
+    >>> path.filename == pathlib.Path('mem/abcde.zip')
+    True
+    >>> str(path.parent)
     'mem'
+
+    If the zipfile has no filename, such attributes are not
+    valid and accessing them will raise an Exception.
+
+    >>> zf.filename = None
+    >>> path.name
+    Traceback (most recent call last):
+    ...
+    TypeError: ...
+
+    >>> path.filename
+    Traceback (most recent call last):
+    ...
+    TypeError: ...
+
+    >>> path.parent
+    Traceback (most recent call last):
+    ...
+    TypeError: ...
+
+    # workaround python/cpython#106763
+    >>> pass
     """
 
     __repr = "{self.__class__.__name__}({self.root.filename!r}, {self.at!r})"
@@ -240,6 +367,18 @@ def __init__(self, root, at=""):
         self.root = FastLookup.make(root)
         self.at = at
 
+    def __eq__(self, other):
+        """
+        >>> Path(zipfile.ZipFile(io.BytesIO(), 'w')) == 'foo'
+        False
+        """
+        if self.__class__ is not other.__class__:
+            return NotImplemented
+        return (self.root, self.at) == (other.root, other.at)
+
+    def __hash__(self):
+        return hash((self.root, self.at))
+
     def open(self, mode='r', *args, pwd=None, **kwargs):
         """
         Open this entry as text or binary following the semantics
@@ -256,30 +395,36 @@ def open(self, mode='r', *args, pwd=None, **kwargs):
             if args or kwargs:
                 raise ValueError("encoding args invalid for binary operation")
             return stream
-        return io.TextIOWrapper(stream, *args, **kwargs)
+        # Text mode:
+        encoding, args, kwargs = _extract_text_encoding(*args, **kwargs)
+        return io.TextIOWrapper(stream, encoding, *args, **kwargs)
+
+    def _base(self):
+        return pathlib.PurePosixPath(self.at or self.root.filename)
 
     @property
     def name(self):
-        return pathlib.Path(self.at).name or self.filename.name
+        return self._base().name
 
     @property
     def suffix(self):
-        return pathlib.Path(self.at).suffix or self.filename.suffix
+        return self._base().suffix
 
     @property
     def suffixes(self):
-        return pathlib.Path(self.at).suffixes or self.filename.suffixes
+        return self._base().suffixes
 
     @property
     def stem(self):
-        return pathlib.Path(self.at).stem or self.filename.stem
+        return self._base().stem
 
     @property
     def filename(self):
         return pathlib.Path(self.root.filename).joinpath(self.at)
 
     def read_text(self, *args, **kwargs):
-        with self.open('r', *args, **kwargs) as strm:
+        encoding, args, kwargs = _extract_text_encoding(*args, **kwargs)
+        with self.open('r', encoding, *args, **kwargs) as strm:
             return strm.read()
 
     def read_bytes(self):
@@ -307,6 +452,33 @@ def iterdir(self):
         subs = map(self._next, self.root.namelist())
         return filter(self._is_child, subs)
 
+    def match(self, path_pattern):
+        return pathlib.PurePosixPath(self.at).match(path_pattern)
+
+    def is_symlink(self):
+        """
+        Return whether this path is a symlink.
+        """
+        info = self.root.getinfo(self.at)
+        mode = info.external_attr >> 16
+        return stat.S_ISLNK(mode)
+
+    def glob(self, pattern):
+        if not pattern:
+            raise ValueError(f"Unacceptable pattern: {pattern!r}")
+
+        prefix = re.escape(self.at)
+        tr = Translator(seps='/')
+        matches = re.compile(prefix + tr.translate(pattern)).fullmatch
+        names = (data.filename for data in self.root.filelist)
+        return map(self._next, filter(matches, names))
+
+    def rglob(self, pattern):
+        return self.glob(f'**/{pattern}')
+
+    def relative_to(self, other, *extra):
+        return posixpath.relpath(str(self), str(other.joinpath(*extra)))
+
     def __str__(self):
         return posixpath.join(self.root.filename, self.at)
 
@@ -314,7 +486,7 @@ def __repr__(self):
         return self.__repr.format(self=self)
 
     def joinpath(self, *other):
-        next = posixpath.join(self.at, *map(_pathlib_compat, other))
+        next = posixpath.join(self.at, *other)
         return self._next(self.root.resolve_dir(next))
 
     __truediv__ = joinpath
diff --git a/pkg_resources/_vendor/zipp/compat/__init__.py b/pkg_resources/_vendor/zipp/compat/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/pkg_resources/_vendor/zipp/compat/py310.py b/pkg_resources/_vendor/zipp/compat/py310.py
new file mode 100644
index 0000000000..d5ca53e037
--- /dev/null
+++ b/pkg_resources/_vendor/zipp/compat/py310.py
@@ -0,0 +1,11 @@
+import sys
+import io
+
+
+def _text_encoding(encoding, stacklevel=2, /):  # pragma: no cover
+    return encoding
+
+
+text_encoding = (
+    io.text_encoding if sys.version_info > (3, 10) else _text_encoding  # type: ignore
+)
diff --git a/pkg_resources/_vendor/zipp/glob.py b/pkg_resources/_vendor/zipp/glob.py
new file mode 100644
index 0000000000..69c41d77c3
--- /dev/null
+++ b/pkg_resources/_vendor/zipp/glob.py
@@ -0,0 +1,106 @@
+import os
+import re
+
+
+_default_seps = os.sep + str(os.altsep) * bool(os.altsep)
+
+
+class Translator:
+    """
+    >>> Translator('xyz')
+    Traceback (most recent call last):
+    ...
+    AssertionError: Invalid separators
+
+    >>> Translator('')
+    Traceback (most recent call last):
+    ...
+    AssertionError: Invalid separators
+    """
+
+    seps: str
+
+    def __init__(self, seps: str = _default_seps):
+        assert seps and set(seps) <= set(_default_seps), "Invalid separators"
+        self.seps = seps
+
+    def translate(self, pattern):
+        """
+        Given a glob pattern, produce a regex that matches it.
+        """
+        return self.extend(self.translate_core(pattern))
+
+    def extend(self, pattern):
+        r"""
+        Extend regex for pattern-wide concerns.
+
+        Apply '(?s:)' to create a non-matching group that
+        matches newlines (valid on Unix).
+
+        Append '\Z' to imply fullmatch even when match is used.
+        """
+        return rf'(?s:{pattern})\Z'
+
+    def translate_core(self, pattern):
+        r"""
+        Given a glob pattern, produce a regex that matches it.
+
+        >>> t = Translator()
+        >>> t.translate_core('*.txt').replace('\\\\', '')
+        '[^/]*\\.txt'
+        >>> t.translate_core('a?txt')
+        'a[^/]txt'
+        >>> t.translate_core('**/*').replace('\\\\', '')
+        '.*/[^/][^/]*'
+        """
+        self.restrict_rglob(pattern)
+        return ''.join(map(self.replace, separate(self.star_not_empty(pattern))))
+
+    def replace(self, match):
+        """
+        Perform the replacements for a match from :func:`separate`.
+        """
+        return match.group('set') or (
+            re.escape(match.group(0))
+            .replace('\\*\\*', r'.*')
+            .replace('\\*', rf'[^{re.escape(self.seps)}]*')
+            .replace('\\?', r'[^/]')
+        )
+
+    def restrict_rglob(self, pattern):
+        """
+        Raise ValueError if ** appears in anything but a full path segment.
+
+        >>> Translator().translate('**foo')
+        Traceback (most recent call last):
+        ...
+        ValueError: ** must appear alone in a path segment
+        """
+        seps_pattern = rf'[{re.escape(self.seps)}]+'
+        segments = re.split(seps_pattern, pattern)
+        if any('**' in segment and segment != '**' for segment in segments):
+            raise ValueError("** must appear alone in a path segment")
+
+    def star_not_empty(self, pattern):
+        """
+        Ensure that * will not match an empty segment.
+        """
+
+        def handle_segment(match):
+            segment = match.group(0)
+            return '?*' if segment == '*' else segment
+
+        not_seps_pattern = rf'[^{re.escape(self.seps)}]+'
+        return re.sub(not_seps_pattern, handle_segment, pattern)
+
+
+def separate(pattern):
+    """
+    Separate out character sets to avoid translating their contents.
+
+    >>> [m.group(0) for m in separate('*.txt')]
+    ['*.txt']
+    >>> [m.group(0) for m in separate('a[?]txt')]
+    ['a', '[?]', 'txt']
+    """
+    return re.finditer(r'([^\[]+)|(?P[\[].*?[\]])|([\[][^\]]*$)', pattern)

From 51615db65917c4c4d6946ca870a38bbc8693b270 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 2 Jul 2024 21:11:25 -0400
Subject: [PATCH 0836/1761] Remove obsolete 'rewrite' functionality from
 vendored script.

---
 tools/vendored.py | 172 +---------------------------------------------
 1 file changed, 1 insertion(+), 171 deletions(-)

diff --git a/tools/vendored.py b/tools/vendored.py
index 208aab8eb1..a2ee6fd054 100644
--- a/tools/vendored.py
+++ b/tools/vendored.py
@@ -1,8 +1,5 @@
-import re
-import shutil
 import sys
 import subprocess
-from textwrap import dedent
 
 from jaraco.packaging import metadata
 from path import Path
@@ -18,182 +15,15 @@ def update_vendored():
     update_setuptools()
 
 
-def rewrite_packaging(pkg_files, new_root):
-    """
-    Rewrite imports in packaging to redirect to vendored copies.
-    """
-    for file in pkg_files.glob('*.py'):
-        text = file.text()
-        text = re.sub(r' (pyparsing)', rf' {new_root}.\1', text)
-        text = text.replace(
-            'from six.moves.urllib import parse',
-            'from urllib import parse',
-        )
-        file.write_text(text)
-
-
-def rewrite_jaraco_text(pkg_files, new_root):
-    """
-    Rewrite imports in jaraco.text to redirect to vendored copies.
-    """
-    for file in pkg_files.glob('*.py'):
-        text = file.read_text()
-        text = re.sub(r' (jaraco\.)', rf' {new_root}.\1', text)
-        text = re.sub(r' (importlib_resources)', rf' {new_root}.\1', text)
-        # suppress loading of lorem_ipsum; ref #3072
-        text = re.sub(r'^lorem_ipsum.*\n$', '', text, flags=re.M)
-        file.write_text(text)
-
-
-def repair_namespace(pkg_files):
-    # required for zip-packaged setuptools #3084
-    pkg_files.joinpath('__init__.py').write_text('')
-
-
-def rewrite_jaraco_functools(pkg_files, new_root):
-    """
-    Rewrite imports in jaraco.functools to redirect to vendored copies.
-    """
-    for file in pkg_files.glob('*.py'):
-        text = file.read_text()
-        text = re.sub(r' (more_itertools)', rf' {new_root}.\1', text)
-        file.write_text(text)
-
-
-def rewrite_jaraco_context(pkg_files, new_root):
-    """
-    Rewrite imports in jaraco.context to redirect to vendored copies.
-    """
-    for file in pkg_files.glob('context.py'):
-        text = file.read_text()
-        text = re.sub(r' (backports)', rf' {new_root}.\1', text)
-        file.write_text(text)
-
-
-def rewrite_importlib_resources(pkg_files, new_root):
-    """
-    Rewrite imports in importlib_resources to redirect to vendored copies.
-    """
-    for file in pkg_files.glob('*.py'):
-        text = file.read_text().replace('importlib_resources.abc', '.abc')
-        text = text.replace('zipp', '..zipp')
-        file.write_text(text)
-
-
-def rewrite_importlib_metadata(pkg_files, new_root):
-    """
-    Rewrite imports in importlib_metadata to redirect to vendored copies.
-    """
-    for file in pkg_files.glob('*.py'):
-        text = file.read_text()
-        text = text.replace('import zipp', 'from .. import zipp')
-        file.write_text(text)
-
-
-def rewrite_more_itertools(pkg_files: Path):
-    """
-    Defer import of concurrent.futures. Workaround for #3090.
-    """
-    more_file = pkg_files.joinpath('more.py')
-    text = more_file.read_text()
-    text = re.sub(r'^.*concurrent.futures.*?\n', '', text, flags=re.MULTILINE)
-    text = re.sub(
-        'ThreadPoolExecutor',
-        '__import__("concurrent.futures").futures.ThreadPoolExecutor',
-        text,
-    )
-    more_file.write_text(text)
-
-
-def rewrite_wheel(pkg_files: Path):
-    """
-    Remove parts of wheel not needed by bdist_wheel, and rewrite imports to use
-    setuptools's own code or vendored dependencies.
-    """
-    shutil.rmtree(pkg_files / 'cli')
-    shutil.rmtree(pkg_files / 'vendored')
-    pkg_files.joinpath('_setuptools_logging.py').unlink()
-    pkg_files.joinpath('__main__.py').unlink()
-    pkg_files.joinpath('bdist_wheel.py').unlink()
-
-    # Rewrite vendored imports to use setuptools's own vendored libraries
-    for path in pkg_files.iterdir():
-        if path.suffix == '.py':  # type: ignore[attr-defined]
-            code = path.read_text()
-            if path.name == 'wheelfile.py':
-                code = re.sub(
-                    r"^from wheel.util import ",
-                    r"from .util import ",
-                    code,
-                    flags=re.MULTILINE,
-                )
-
-                # No need to keep the wheel.cli package just for this trivial exception
-                code = re.sub(
-                    r"^from wheel.cli import WheelError\n",
-                    r"",
-                    code,
-                    flags=re.MULTILINE,
-                )
-                code += dedent(
-                    """
-
-                    class WheelError(Exception):
-                        pass
-                    """
-                )
-            else:
-                code = re.sub(
-                    r"^from \.vendored\.([\w.]+) import ",
-                    r"from ..\1 import ",
-                    code,
-                    flags=re.MULTILINE,
-                )
-                code = re.sub(
-                    r"^from \.util import log$",
-                    r"from distutils import log$",
-                    code,
-                    flags=re.MULTILINE,
-                )
-
-            path.write_text(code)  # type: ignore[attr-defined]
-
-
-def rewrite_platformdirs(pkg_files: Path):
-    """
-    Replace some absolute imports with relative ones.
-    """
-    init = pkg_files.joinpath('__init__.py')
-    text = init.read_text()
-    text = text.replace('from platformdirs.', 'from .')
-    init.write_text(text)
-
-
 def clean(vendor):
     """
     Remove all files out of the vendor directory except the meta
     data (as pip uninstall doesn't support -t).
     """
-    ignored = ['vendored.txt', 'ruff.toml']
+    ignored = ['ruff.toml']
     remove_all(path for path in vendor.glob('*') if path.basename() not in ignored)
 
 
-def install(vendor):
-    clean(vendor)
-    install_args = [
-        sys.executable,
-        '-m',
-        'pip',
-        'install',
-        '-r',
-        str(vendor / 'vendored.txt'),
-        '-t',
-        str(vendor),
-    ]
-    subprocess.check_call(install_args)
-    (vendor / '__init__.py').write_text('')
-
-
 def update_pkg_resources():
     deps = [
         'packaging >= 24',

From b4b6bf755eeeb11deb3783dce8582472a1819850 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 2 Jul 2024 21:18:44 -0400
Subject: [PATCH 0837/1761] Update auto-detect the minimum python version
 needed for vendored packages.

---
 tools/vendored.py | 16 +++++++++++++---
 1 file changed, 13 insertions(+), 3 deletions(-)

diff --git a/tools/vendored.py b/tools/vendored.py
index a2ee6fd054..22b18e50a4 100644
--- a/tools/vendored.py
+++ b/tools/vendored.py
@@ -1,7 +1,8 @@
+import functools
 import sys
 import subprocess
 
-from jaraco.packaging import metadata
+import jaraco.packaging.metadata
 from path import Path
 
 
@@ -41,11 +42,20 @@ def update_pkg_resources():
     install_deps(deps, vendor)
 
 
+@functools.cache
+def metadata():
+    return jaraco.packaging.metadata.load('.')
+
+
 def load_deps():
     """
     Read the dependencies from `.`.
     """
-    return metadata.load('.').get_all('Requires-Dist')
+    return metadata().get_all('Requires-Dist')
+
+
+def min_python():
+    return metadata()['Requires-Python'].removeprefix('>=').strip()
 
 
 def install_deps(deps, vendor):
@@ -65,7 +75,7 @@ def install_deps(deps, vendor):
         '--target',
         str(vendor),
         '--python-version',
-        '3.8',
+        min_python(),
         '--only-binary',
         ':all:',
     ] + list(deps)

From 4f6d97344c9e15c643aa3e366b21377a2008bc11 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 2 Jul 2024 23:20:10 -0400
Subject: [PATCH 0838/1761] Refresh vendored dependencies.

---
 .../platformdirs-4.2.2.dist-info/INSTALLER    |   1 +
 .../platformdirs-4.2.2.dist-info/METADATA     | 319 +++++++++
 .../platformdirs-4.2.2.dist-info/RECORD       |  23 +
 .../platformdirs-4.2.2.dist-info/REQUESTED    |   0
 .../platformdirs-4.2.2.dist-info/WHEEL        |   4 +
 .../licenses/LICENSE                          |  21 +
 setuptools/_vendor/platformdirs/__init__.py   | 627 ++++++++++++++++++
 setuptools/_vendor/platformdirs/__main__.py   |  55 ++
 setuptools/_vendor/platformdirs/android.py    | 249 +++++++
 setuptools/_vendor/platformdirs/api.py        | 292 ++++++++
 setuptools/_vendor/platformdirs/macos.py      | 130 ++++
 setuptools/_vendor/platformdirs/py.typed      |   0
 setuptools/_vendor/platformdirs/unix.py       | 275 ++++++++
 setuptools/_vendor/platformdirs/version.py    |  16 +
 setuptools/_vendor/platformdirs/windows.py    | 272 ++++++++
 15 files changed, 2284 insertions(+)
 create mode 100644 setuptools/_vendor/platformdirs-4.2.2.dist-info/INSTALLER
 create mode 100644 setuptools/_vendor/platformdirs-4.2.2.dist-info/METADATA
 create mode 100644 setuptools/_vendor/platformdirs-4.2.2.dist-info/RECORD
 create mode 100644 setuptools/_vendor/platformdirs-4.2.2.dist-info/REQUESTED
 create mode 100644 setuptools/_vendor/platformdirs-4.2.2.dist-info/WHEEL
 create mode 100644 setuptools/_vendor/platformdirs-4.2.2.dist-info/licenses/LICENSE
 create mode 100644 setuptools/_vendor/platformdirs/__init__.py
 create mode 100644 setuptools/_vendor/platformdirs/__main__.py
 create mode 100644 setuptools/_vendor/platformdirs/android.py
 create mode 100644 setuptools/_vendor/platformdirs/api.py
 create mode 100644 setuptools/_vendor/platformdirs/macos.py
 create mode 100644 setuptools/_vendor/platformdirs/py.typed
 create mode 100644 setuptools/_vendor/platformdirs/unix.py
 create mode 100644 setuptools/_vendor/platformdirs/version.py
 create mode 100644 setuptools/_vendor/platformdirs/windows.py

diff --git a/setuptools/_vendor/platformdirs-4.2.2.dist-info/INSTALLER b/setuptools/_vendor/platformdirs-4.2.2.dist-info/INSTALLER
new file mode 100644
index 0000000000..a1b589e38a
--- /dev/null
+++ b/setuptools/_vendor/platformdirs-4.2.2.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/setuptools/_vendor/platformdirs-4.2.2.dist-info/METADATA b/setuptools/_vendor/platformdirs-4.2.2.dist-info/METADATA
new file mode 100644
index 0000000000..ab51ef36ad
--- /dev/null
+++ b/setuptools/_vendor/platformdirs-4.2.2.dist-info/METADATA
@@ -0,0 +1,319 @@
+Metadata-Version: 2.3
+Name: platformdirs
+Version: 4.2.2
+Summary: A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`.
+Project-URL: Documentation, https://platformdirs.readthedocs.io
+Project-URL: Homepage, https://github.com/platformdirs/platformdirs
+Project-URL: Source, https://github.com/platformdirs/platformdirs
+Project-URL: Tracker, https://github.com/platformdirs/platformdirs/issues
+Maintainer-email: Bernát Gábor , Julian Berman , Ofek Lev , Ronny Pfannschmidt 
+License-Expression: MIT
+License-File: LICENSE
+Keywords: appdirs,application,cache,directory,log,user
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Requires-Python: >=3.8
+Provides-Extra: docs
+Requires-Dist: furo>=2023.9.10; extra == 'docs'
+Requires-Dist: proselint>=0.13; extra == 'docs'
+Requires-Dist: sphinx-autodoc-typehints>=1.25.2; extra == 'docs'
+Requires-Dist: sphinx>=7.2.6; extra == 'docs'
+Provides-Extra: test
+Requires-Dist: appdirs==1.4.4; extra == 'test'
+Requires-Dist: covdefaults>=2.3; extra == 'test'
+Requires-Dist: pytest-cov>=4.1; extra == 'test'
+Requires-Dist: pytest-mock>=3.12; extra == 'test'
+Requires-Dist: pytest>=7.4.3; extra == 'test'
+Provides-Extra: type
+Requires-Dist: mypy>=1.8; extra == 'type'
+Description-Content-Type: text/x-rst
+
+The problem
+===========
+
+.. image:: https://github.com/platformdirs/platformdirs/actions/workflows/check.yml/badge.svg
+   :target: https://github.com/platformdirs/platformdirs/actions
+
+When writing desktop application, finding the right location to store user data
+and configuration varies per platform. Even for single-platform apps, there
+may by plenty of nuances in figuring out the right location.
+
+For example, if running on macOS, you should use::
+
+    ~/Library/Application Support/
+
+If on Windows (at least English Win) that should be::
+
+    C:\Documents and Settings\\Application Data\Local Settings\\
+
+or possibly::
+
+    C:\Documents and Settings\\Application Data\\
+
+for `roaming profiles `_ but that is another story.
+
+On Linux (and other Unices), according to the `XDG Basedir Spec`_, it should be::
+
+    ~/.local/share/
+
+.. _XDG Basedir Spec: https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html
+
+``platformdirs`` to the rescue
+==============================
+
+This kind of thing is what the ``platformdirs`` package is for.
+``platformdirs`` will help you choose an appropriate:
+
+- user data dir (``user_data_dir``)
+- user config dir (``user_config_dir``)
+- user cache dir (``user_cache_dir``)
+- site data dir (``site_data_dir``)
+- site config dir (``site_config_dir``)
+- user log dir (``user_log_dir``)
+- user documents dir (``user_documents_dir``)
+- user downloads dir (``user_downloads_dir``)
+- user pictures dir (``user_pictures_dir``)
+- user videos dir (``user_videos_dir``)
+- user music dir (``user_music_dir``)
+- user desktop dir (``user_desktop_dir``)
+- user runtime dir (``user_runtime_dir``)
+
+And also:
+
+- Is slightly opinionated on the directory names used. Look for "OPINION" in
+  documentation and code for when an opinion is being applied.
+
+Example output
+==============
+
+On macOS:
+
+.. code-block:: pycon
+
+    >>> from platformdirs import *
+    >>> appname = "SuperApp"
+    >>> appauthor = "Acme"
+    >>> user_data_dir(appname, appauthor)
+    '/Users/trentm/Library/Application Support/SuperApp'
+    >>> site_data_dir(appname, appauthor)
+    '/Library/Application Support/SuperApp'
+    >>> user_cache_dir(appname, appauthor)
+    '/Users/trentm/Library/Caches/SuperApp'
+    >>> user_log_dir(appname, appauthor)
+    '/Users/trentm/Library/Logs/SuperApp'
+    >>> user_documents_dir()
+    '/Users/trentm/Documents'
+    >>> user_downloads_dir()
+    '/Users/trentm/Downloads'
+    >>> user_pictures_dir()
+    '/Users/trentm/Pictures'
+    >>> user_videos_dir()
+    '/Users/trentm/Movies'
+    >>> user_music_dir()
+    '/Users/trentm/Music'
+    >>> user_desktop_dir()
+    '/Users/trentm/Desktop'
+    >>> user_runtime_dir(appname, appauthor)
+    '/Users/trentm/Library/Caches/TemporaryItems/SuperApp'
+
+On Windows:
+
+.. code-block:: pycon
+
+    >>> from platformdirs import *
+    >>> appname = "SuperApp"
+    >>> appauthor = "Acme"
+    >>> user_data_dir(appname, appauthor)
+    'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp'
+    >>> user_data_dir(appname, appauthor, roaming=True)
+    'C:\\Users\\trentm\\AppData\\Roaming\\Acme\\SuperApp'
+    >>> user_cache_dir(appname, appauthor)
+    'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp\\Cache'
+    >>> user_log_dir(appname, appauthor)
+    'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp\\Logs'
+    >>> user_documents_dir()
+    'C:\\Users\\trentm\\Documents'
+    >>> user_downloads_dir()
+    'C:\\Users\\trentm\\Downloads'
+    >>> user_pictures_dir()
+    'C:\\Users\\trentm\\Pictures'
+    >>> user_videos_dir()
+    'C:\\Users\\trentm\\Videos'
+    >>> user_music_dir()
+    'C:\\Users\\trentm\\Music'
+    >>> user_desktop_dir()
+    'C:\\Users\\trentm\\Desktop'
+    >>> user_runtime_dir(appname, appauthor)
+    'C:\\Users\\trentm\\AppData\\Local\\Temp\\Acme\\SuperApp'
+
+On Linux:
+
+.. code-block:: pycon
+
+    >>> from platformdirs import *
+    >>> appname = "SuperApp"
+    >>> appauthor = "Acme"
+    >>> user_data_dir(appname, appauthor)
+    '/home/trentm/.local/share/SuperApp'
+    >>> site_data_dir(appname, appauthor)
+    '/usr/local/share/SuperApp'
+    >>> site_data_dir(appname, appauthor, multipath=True)
+    '/usr/local/share/SuperApp:/usr/share/SuperApp'
+    >>> user_cache_dir(appname, appauthor)
+    '/home/trentm/.cache/SuperApp'
+    >>> user_log_dir(appname, appauthor)
+    '/home/trentm/.local/state/SuperApp/log'
+    >>> user_config_dir(appname)
+    '/home/trentm/.config/SuperApp'
+    >>> user_documents_dir()
+    '/home/trentm/Documents'
+    >>> user_downloads_dir()
+    '/home/trentm/Downloads'
+    >>> user_pictures_dir()
+    '/home/trentm/Pictures'
+    >>> user_videos_dir()
+    '/home/trentm/Videos'
+    >>> user_music_dir()
+    '/home/trentm/Music'
+    >>> user_desktop_dir()
+    '/home/trentm/Desktop'
+    >>> user_runtime_dir(appname, appauthor)
+    '/run/user/{os.getuid()}/SuperApp'
+    >>> site_config_dir(appname)
+    '/etc/xdg/SuperApp'
+    >>> os.environ["XDG_CONFIG_DIRS"] = "/etc:/usr/local/etc"
+    >>> site_config_dir(appname, multipath=True)
+    '/etc/SuperApp:/usr/local/etc/SuperApp'
+
+On Android::
+
+    >>> from platformdirs import *
+    >>> appname = "SuperApp"
+    >>> appauthor = "Acme"
+    >>> user_data_dir(appname, appauthor)
+    '/data/data/com.myApp/files/SuperApp'
+    >>> user_cache_dir(appname, appauthor)
+    '/data/data/com.myApp/cache/SuperApp'
+    >>> user_log_dir(appname, appauthor)
+    '/data/data/com.myApp/cache/SuperApp/log'
+    >>> user_config_dir(appname)
+    '/data/data/com.myApp/shared_prefs/SuperApp'
+    >>> user_documents_dir()
+    '/storage/emulated/0/Documents'
+    >>> user_downloads_dir()
+    '/storage/emulated/0/Downloads'
+    >>> user_pictures_dir()
+    '/storage/emulated/0/Pictures'
+    >>> user_videos_dir()
+    '/storage/emulated/0/DCIM/Camera'
+    >>> user_music_dir()
+    '/storage/emulated/0/Music'
+    >>> user_desktop_dir()
+    '/storage/emulated/0/Desktop'
+    >>> user_runtime_dir(appname, appauthor)
+    '/data/data/com.myApp/cache/SuperApp/tmp'
+
+Note: Some android apps like Termux and Pydroid are used as shells. These
+apps are used by the end user to emulate Linux environment. Presence of
+``SHELL`` environment variable is used by Platformdirs to differentiate
+between general android apps and android apps used as shells. Shell android
+apps also support ``XDG_*`` environment variables.
+
+
+``PlatformDirs`` for convenience
+================================
+
+.. code-block:: pycon
+
+    >>> from platformdirs import PlatformDirs
+    >>> dirs = PlatformDirs("SuperApp", "Acme")
+    >>> dirs.user_data_dir
+    '/Users/trentm/Library/Application Support/SuperApp'
+    >>> dirs.site_data_dir
+    '/Library/Application Support/SuperApp'
+    >>> dirs.user_cache_dir
+    '/Users/trentm/Library/Caches/SuperApp'
+    >>> dirs.user_log_dir
+    '/Users/trentm/Library/Logs/SuperApp'
+    >>> dirs.user_documents_dir
+    '/Users/trentm/Documents'
+    >>> dirs.user_downloads_dir
+    '/Users/trentm/Downloads'
+    >>> dirs.user_pictures_dir
+    '/Users/trentm/Pictures'
+    >>> dirs.user_videos_dir
+    '/Users/trentm/Movies'
+    >>> dirs.user_music_dir
+    '/Users/trentm/Music'
+    >>> dirs.user_desktop_dir
+    '/Users/trentm/Desktop'
+    >>> dirs.user_runtime_dir
+    '/Users/trentm/Library/Caches/TemporaryItems/SuperApp'
+
+Per-version isolation
+=====================
+
+If you have multiple versions of your app in use that you want to be
+able to run side-by-side, then you may want version-isolation for these
+dirs::
+
+    >>> from platformdirs import PlatformDirs
+    >>> dirs = PlatformDirs("SuperApp", "Acme", version="1.0")
+    >>> dirs.user_data_dir
+    '/Users/trentm/Library/Application Support/SuperApp/1.0'
+    >>> dirs.site_data_dir
+    '/Library/Application Support/SuperApp/1.0'
+    >>> dirs.user_cache_dir
+    '/Users/trentm/Library/Caches/SuperApp/1.0'
+    >>> dirs.user_log_dir
+    '/Users/trentm/Library/Logs/SuperApp/1.0'
+    >>> dirs.user_documents_dir
+    '/Users/trentm/Documents'
+    >>> dirs.user_downloads_dir
+    '/Users/trentm/Downloads'
+    >>> dirs.user_pictures_dir
+    '/Users/trentm/Pictures'
+    >>> dirs.user_videos_dir
+    '/Users/trentm/Movies'
+    >>> dirs.user_music_dir
+    '/Users/trentm/Music'
+    >>> dirs.user_desktop_dir
+    '/Users/trentm/Desktop'
+    >>> dirs.user_runtime_dir
+    '/Users/trentm/Library/Caches/TemporaryItems/SuperApp/1.0'
+
+Be wary of using this for configuration files though; you'll need to handle
+migrating configuration files manually.
+
+Why this Fork?
+==============
+
+This repository is a friendly fork of the wonderful work started by
+`ActiveState `_ who created
+``appdirs``, this package's ancestor.
+
+Maintaining an open source project is no easy task, particularly
+from within an organization, and the Python community is indebted
+to ``appdirs`` (and to Trent Mick and Jeff Rouse in particular) for
+creating an incredibly useful simple module, as evidenced by the wide
+number of users it has attracted over the years.
+
+Nonetheless, given the number of long-standing open issues
+and pull requests, and no clear path towards `ensuring
+that maintenance of the package would continue or grow
+`_, this fork was
+created.
+
+Contributions are most welcome.
diff --git a/setuptools/_vendor/platformdirs-4.2.2.dist-info/RECORD b/setuptools/_vendor/platformdirs-4.2.2.dist-info/RECORD
new file mode 100644
index 0000000000..64c0c8ea2e
--- /dev/null
+++ b/setuptools/_vendor/platformdirs-4.2.2.dist-info/RECORD
@@ -0,0 +1,23 @@
+platformdirs-4.2.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+platformdirs-4.2.2.dist-info/METADATA,sha256=zmsie01G1MtXR0wgIv5XpVeTO7idr0WWvfmxKsKWuGk,11429
+platformdirs-4.2.2.dist-info/RECORD,,
+platformdirs-4.2.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+platformdirs-4.2.2.dist-info/WHEEL,sha256=zEMcRr9Kr03x1ozGwg5v9NQBKn3kndp6LSoSlVg-jhU,87
+platformdirs-4.2.2.dist-info/licenses/LICENSE,sha256=KeD9YukphQ6G6yjD_czwzv30-pSHkBHP-z0NS-1tTbY,1089
+platformdirs/__init__.py,sha256=EMGE8qeHRR9CzDFr8kL3tA8hdZZniYjXBVZd0UGTWK0,22225
+platformdirs/__main__.py,sha256=HnsUQHpiBaiTxwcmwVw-nFaPdVNZtQIdi1eWDtI-MzI,1493
+platformdirs/__pycache__/__init__.cpython-312.pyc,,
+platformdirs/__pycache__/__main__.cpython-312.pyc,,
+platformdirs/__pycache__/android.cpython-312.pyc,,
+platformdirs/__pycache__/api.cpython-312.pyc,,
+platformdirs/__pycache__/macos.cpython-312.pyc,,
+platformdirs/__pycache__/unix.cpython-312.pyc,,
+platformdirs/__pycache__/version.cpython-312.pyc,,
+platformdirs/__pycache__/windows.cpython-312.pyc,,
+platformdirs/android.py,sha256=xZXY9Jd46WOsxT2U6-5HsNtDZ-IQqxcEUrBLl3hYk4o,9016
+platformdirs/api.py,sha256=QBYdUac2eC521ek_y53uD1Dcq-lJX8IgSRVd4InC6uc,8996
+platformdirs/macos.py,sha256=wftsbsvq6nZ0WORXSiCrZNkRHz_WKuktl0a6mC7MFkI,5580
+platformdirs/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+platformdirs/unix.py,sha256=Cci9Wqt35dAMsg6HT9nRGHSBW5obb0pR3AE1JJnsCXg,10643
+platformdirs/version.py,sha256=r7F76tZRjgQKzrpx_I0_ZMQOMU-PS7eGnHD7zEK3KB0,411
+platformdirs/windows.py,sha256=IFpiohUBwxPtCzlyKwNtxyW4Jk8haa6W8o59mfrDXVo,10125
diff --git a/setuptools/_vendor/platformdirs-4.2.2.dist-info/REQUESTED b/setuptools/_vendor/platformdirs-4.2.2.dist-info/REQUESTED
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/setuptools/_vendor/platformdirs-4.2.2.dist-info/WHEEL b/setuptools/_vendor/platformdirs-4.2.2.dist-info/WHEEL
new file mode 100644
index 0000000000..516596c767
--- /dev/null
+++ b/setuptools/_vendor/platformdirs-4.2.2.dist-info/WHEEL
@@ -0,0 +1,4 @@
+Wheel-Version: 1.0
+Generator: hatchling 1.24.2
+Root-Is-Purelib: true
+Tag: py3-none-any
diff --git a/setuptools/_vendor/platformdirs-4.2.2.dist-info/licenses/LICENSE b/setuptools/_vendor/platformdirs-4.2.2.dist-info/licenses/LICENSE
new file mode 100644
index 0000000000..f35fed9191
--- /dev/null
+++ b/setuptools/_vendor/platformdirs-4.2.2.dist-info/licenses/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2010-202x The platformdirs developers
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/setuptools/_vendor/platformdirs/__init__.py b/setuptools/_vendor/platformdirs/__init__.py
new file mode 100644
index 0000000000..3f7d9490d1
--- /dev/null
+++ b/setuptools/_vendor/platformdirs/__init__.py
@@ -0,0 +1,627 @@
+"""
+Utilities for determining application-specific dirs.
+
+See  for details and usage.
+
+"""
+
+from __future__ import annotations
+
+import os
+import sys
+from typing import TYPE_CHECKING
+
+from .api import PlatformDirsABC
+from .version import __version__
+from .version import __version_tuple__ as __version_info__
+
+if TYPE_CHECKING:
+    from pathlib import Path
+    from typing import Literal
+
+
+def _set_platform_dir_class() -> type[PlatformDirsABC]:
+    if sys.platform == "win32":
+        from platformdirs.windows import Windows as Result  # noqa: PLC0415
+    elif sys.platform == "darwin":
+        from platformdirs.macos import MacOS as Result  # noqa: PLC0415
+    else:
+        from platformdirs.unix import Unix as Result  # noqa: PLC0415
+
+    if os.getenv("ANDROID_DATA") == "/data" and os.getenv("ANDROID_ROOT") == "/system":
+        if os.getenv("SHELL") or os.getenv("PREFIX"):
+            return Result
+
+        from platformdirs.android import _android_folder  # noqa: PLC0415
+
+        if _android_folder() is not None:
+            from platformdirs.android import Android  # noqa: PLC0415
+
+            return Android  # return to avoid redefinition of a result
+
+    return Result
+
+
+PlatformDirs = _set_platform_dir_class()  #: Currently active platform
+AppDirs = PlatformDirs  #: Backwards compatibility with appdirs
+
+
+def user_data_dir(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    roaming: bool = False,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> str:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param roaming: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: data directory tied to the user
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        roaming=roaming,
+        ensure_exists=ensure_exists,
+    ).user_data_dir
+
+
+def site_data_dir(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    multipath: bool = False,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> str:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param multipath: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: data directory shared by users
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        multipath=multipath,
+        ensure_exists=ensure_exists,
+    ).site_data_dir
+
+
+def user_config_dir(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    roaming: bool = False,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> str:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param roaming: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: config directory tied to the user
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        roaming=roaming,
+        ensure_exists=ensure_exists,
+    ).user_config_dir
+
+
+def site_config_dir(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    multipath: bool = False,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> str:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param multipath: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: config directory shared by the users
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        multipath=multipath,
+        ensure_exists=ensure_exists,
+    ).site_config_dir
+
+
+def user_cache_dir(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    opinion: bool = True,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> str:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param opinion: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: cache directory tied to the user
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        opinion=opinion,
+        ensure_exists=ensure_exists,
+    ).user_cache_dir
+
+
+def site_cache_dir(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    opinion: bool = True,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> str:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param opinion: See `opinion `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: cache directory tied to the user
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        opinion=opinion,
+        ensure_exists=ensure_exists,
+    ).site_cache_dir
+
+
+def user_state_dir(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    roaming: bool = False,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> str:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param roaming: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: state directory tied to the user
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        roaming=roaming,
+        ensure_exists=ensure_exists,
+    ).user_state_dir
+
+
+def user_log_dir(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    opinion: bool = True,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> str:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param opinion: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: log directory tied to the user
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        opinion=opinion,
+        ensure_exists=ensure_exists,
+    ).user_log_dir
+
+
+def user_documents_dir() -> str:
+    """:returns: documents directory tied to the user"""
+    return PlatformDirs().user_documents_dir
+
+
+def user_downloads_dir() -> str:
+    """:returns: downloads directory tied to the user"""
+    return PlatformDirs().user_downloads_dir
+
+
+def user_pictures_dir() -> str:
+    """:returns: pictures directory tied to the user"""
+    return PlatformDirs().user_pictures_dir
+
+
+def user_videos_dir() -> str:
+    """:returns: videos directory tied to the user"""
+    return PlatformDirs().user_videos_dir
+
+
+def user_music_dir() -> str:
+    """:returns: music directory tied to the user"""
+    return PlatformDirs().user_music_dir
+
+
+def user_desktop_dir() -> str:
+    """:returns: desktop directory tied to the user"""
+    return PlatformDirs().user_desktop_dir
+
+
+def user_runtime_dir(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    opinion: bool = True,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> str:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param opinion: See `opinion `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: runtime directory tied to the user
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        opinion=opinion,
+        ensure_exists=ensure_exists,
+    ).user_runtime_dir
+
+
+def site_runtime_dir(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    opinion: bool = True,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> str:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param opinion: See `opinion `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: runtime directory shared by users
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        opinion=opinion,
+        ensure_exists=ensure_exists,
+    ).site_runtime_dir
+
+
+def user_data_path(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    roaming: bool = False,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> Path:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param roaming: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: data path tied to the user
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        roaming=roaming,
+        ensure_exists=ensure_exists,
+    ).user_data_path
+
+
+def site_data_path(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    multipath: bool = False,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> Path:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param multipath: See `multipath `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: data path shared by users
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        multipath=multipath,
+        ensure_exists=ensure_exists,
+    ).site_data_path
+
+
+def user_config_path(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    roaming: bool = False,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> Path:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param roaming: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: config path tied to the user
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        roaming=roaming,
+        ensure_exists=ensure_exists,
+    ).user_config_path
+
+
+def site_config_path(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    multipath: bool = False,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> Path:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param multipath: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: config path shared by the users
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        multipath=multipath,
+        ensure_exists=ensure_exists,
+    ).site_config_path
+
+
+def site_cache_path(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    opinion: bool = True,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> Path:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param opinion: See `opinion `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: cache directory tied to the user
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        opinion=opinion,
+        ensure_exists=ensure_exists,
+    ).site_cache_path
+
+
+def user_cache_path(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    opinion: bool = True,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> Path:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param opinion: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: cache path tied to the user
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        opinion=opinion,
+        ensure_exists=ensure_exists,
+    ).user_cache_path
+
+
+def user_state_path(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    roaming: bool = False,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> Path:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param roaming: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: state path tied to the user
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        roaming=roaming,
+        ensure_exists=ensure_exists,
+    ).user_state_path
+
+
+def user_log_path(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    opinion: bool = True,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> Path:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param opinion: See `roaming `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: log path tied to the user
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        opinion=opinion,
+        ensure_exists=ensure_exists,
+    ).user_log_path
+
+
+def user_documents_path() -> Path:
+    """:returns: documents a path tied to the user"""
+    return PlatformDirs().user_documents_path
+
+
+def user_downloads_path() -> Path:
+    """:returns: downloads path tied to the user"""
+    return PlatformDirs().user_downloads_path
+
+
+def user_pictures_path() -> Path:
+    """:returns: pictures path tied to the user"""
+    return PlatformDirs().user_pictures_path
+
+
+def user_videos_path() -> Path:
+    """:returns: videos path tied to the user"""
+    return PlatformDirs().user_videos_path
+
+
+def user_music_path() -> Path:
+    """:returns: music path tied to the user"""
+    return PlatformDirs().user_music_path
+
+
+def user_desktop_path() -> Path:
+    """:returns: desktop path tied to the user"""
+    return PlatformDirs().user_desktop_path
+
+
+def user_runtime_path(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    opinion: bool = True,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> Path:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param opinion: See `opinion `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: runtime path tied to the user
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        opinion=opinion,
+        ensure_exists=ensure_exists,
+    ).user_runtime_path
+
+
+def site_runtime_path(
+    appname: str | None = None,
+    appauthor: str | None | Literal[False] = None,
+    version: str | None = None,
+    opinion: bool = True,  # noqa: FBT001, FBT002
+    ensure_exists: bool = False,  # noqa: FBT001, FBT002
+) -> Path:
+    """
+    :param appname: See `appname `.
+    :param appauthor: See `appauthor `.
+    :param version: See `version `.
+    :param opinion: See `opinion `.
+    :param ensure_exists: See `ensure_exists `.
+    :returns: runtime path shared by users
+    """
+    return PlatformDirs(
+        appname=appname,
+        appauthor=appauthor,
+        version=version,
+        opinion=opinion,
+        ensure_exists=ensure_exists,
+    ).site_runtime_path
+
+
+__all__ = [
+    "AppDirs",
+    "PlatformDirs",
+    "PlatformDirsABC",
+    "__version__",
+    "__version_info__",
+    "site_cache_dir",
+    "site_cache_path",
+    "site_config_dir",
+    "site_config_path",
+    "site_data_dir",
+    "site_data_path",
+    "site_runtime_dir",
+    "site_runtime_path",
+    "user_cache_dir",
+    "user_cache_path",
+    "user_config_dir",
+    "user_config_path",
+    "user_data_dir",
+    "user_data_path",
+    "user_desktop_dir",
+    "user_desktop_path",
+    "user_documents_dir",
+    "user_documents_path",
+    "user_downloads_dir",
+    "user_downloads_path",
+    "user_log_dir",
+    "user_log_path",
+    "user_music_dir",
+    "user_music_path",
+    "user_pictures_dir",
+    "user_pictures_path",
+    "user_runtime_dir",
+    "user_runtime_path",
+    "user_state_dir",
+    "user_state_path",
+    "user_videos_dir",
+    "user_videos_path",
+]
diff --git a/setuptools/_vendor/platformdirs/__main__.py b/setuptools/_vendor/platformdirs/__main__.py
new file mode 100644
index 0000000000..922c521358
--- /dev/null
+++ b/setuptools/_vendor/platformdirs/__main__.py
@@ -0,0 +1,55 @@
+"""Main entry point."""
+
+from __future__ import annotations
+
+from platformdirs import PlatformDirs, __version__
+
+PROPS = (
+    "user_data_dir",
+    "user_config_dir",
+    "user_cache_dir",
+    "user_state_dir",
+    "user_log_dir",
+    "user_documents_dir",
+    "user_downloads_dir",
+    "user_pictures_dir",
+    "user_videos_dir",
+    "user_music_dir",
+    "user_runtime_dir",
+    "site_data_dir",
+    "site_config_dir",
+    "site_cache_dir",
+    "site_runtime_dir",
+)
+
+
+def main() -> None:
+    """Run the main entry point."""
+    app_name = "MyApp"
+    app_author = "MyCompany"
+
+    print(f"-- platformdirs {__version__} --")  # noqa: T201
+
+    print("-- app dirs (with optional 'version')")  # noqa: T201
+    dirs = PlatformDirs(app_name, app_author, version="1.0")
+    for prop in PROPS:
+        print(f"{prop}: {getattr(dirs, prop)}")  # noqa: T201
+
+    print("\n-- app dirs (without optional 'version')")  # noqa: T201
+    dirs = PlatformDirs(app_name, app_author)
+    for prop in PROPS:
+        print(f"{prop}: {getattr(dirs, prop)}")  # noqa: T201
+
+    print("\n-- app dirs (without optional 'appauthor')")  # noqa: T201
+    dirs = PlatformDirs(app_name)
+    for prop in PROPS:
+        print(f"{prop}: {getattr(dirs, prop)}")  # noqa: T201
+
+    print("\n-- app dirs (with disabled 'appauthor')")  # noqa: T201
+    dirs = PlatformDirs(app_name, appauthor=False)
+    for prop in PROPS:
+        print(f"{prop}: {getattr(dirs, prop)}")  # noqa: T201
+
+
+if __name__ == "__main__":
+    main()
diff --git a/setuptools/_vendor/platformdirs/android.py b/setuptools/_vendor/platformdirs/android.py
new file mode 100644
index 0000000000..afd3141c72
--- /dev/null
+++ b/setuptools/_vendor/platformdirs/android.py
@@ -0,0 +1,249 @@
+"""Android."""
+
+from __future__ import annotations
+
+import os
+import re
+import sys
+from functools import lru_cache
+from typing import TYPE_CHECKING, cast
+
+from .api import PlatformDirsABC
+
+
+class Android(PlatformDirsABC):
+    """
+    Follows the guidance `from here `_.
+
+    Makes use of the `appname `, `version
+    `, `ensure_exists `.
+
+    """
+
+    @property
+    def user_data_dir(self) -> str:
+        """:return: data directory tied to the user, e.g. ``/data/user///files/``"""
+        return self._append_app_name_and_version(cast(str, _android_folder()), "files")
+
+    @property
+    def site_data_dir(self) -> str:
+        """:return: data directory shared by users, same as `user_data_dir`"""
+        return self.user_data_dir
+
+    @property
+    def user_config_dir(self) -> str:
+        """
+        :return: config directory tied to the user, e.g. \
+        ``/data/user///shared_prefs/``
+        """
+        return self._append_app_name_and_version(cast(str, _android_folder()), "shared_prefs")
+
+    @property
+    def site_config_dir(self) -> str:
+        """:return: config directory shared by the users, same as `user_config_dir`"""
+        return self.user_config_dir
+
+    @property
+    def user_cache_dir(self) -> str:
+        """:return: cache directory tied to the user, e.g.,``/data/user///cache/``"""
+        return self._append_app_name_and_version(cast(str, _android_folder()), "cache")
+
+    @property
+    def site_cache_dir(self) -> str:
+        """:return: cache directory shared by users, same as `user_cache_dir`"""
+        return self.user_cache_dir
+
+    @property
+    def user_state_dir(self) -> str:
+        """:return: state directory tied to the user, same as `user_data_dir`"""
+        return self.user_data_dir
+
+    @property
+    def user_log_dir(self) -> str:
+        """
+        :return: log directory tied to the user, same as `user_cache_dir` if not opinionated else ``log`` in it,
+          e.g. ``/data/user///cache//log``
+        """
+        path = self.user_cache_dir
+        if self.opinion:
+            path = os.path.join(path, "log")  # noqa: PTH118
+        return path
+
+    @property
+    def user_documents_dir(self) -> str:
+        """:return: documents directory tied to the user e.g. ``/storage/emulated/0/Documents``"""
+        return _android_documents_folder()
+
+    @property
+    def user_downloads_dir(self) -> str:
+        """:return: downloads directory tied to the user e.g. ``/storage/emulated/0/Downloads``"""
+        return _android_downloads_folder()
+
+    @property
+    def user_pictures_dir(self) -> str:
+        """:return: pictures directory tied to the user e.g. ``/storage/emulated/0/Pictures``"""
+        return _android_pictures_folder()
+
+    @property
+    def user_videos_dir(self) -> str:
+        """:return: videos directory tied to the user e.g. ``/storage/emulated/0/DCIM/Camera``"""
+        return _android_videos_folder()
+
+    @property
+    def user_music_dir(self) -> str:
+        """:return: music directory tied to the user e.g. ``/storage/emulated/0/Music``"""
+        return _android_music_folder()
+
+    @property
+    def user_desktop_dir(self) -> str:
+        """:return: desktop directory tied to the user e.g. ``/storage/emulated/0/Desktop``"""
+        return "/storage/emulated/0/Desktop"
+
+    @property
+    def user_runtime_dir(self) -> str:
+        """
+        :return: runtime directory tied to the user, same as `user_cache_dir` if not opinionated else ``tmp`` in it,
+          e.g. ``/data/user///cache//tmp``
+        """
+        path = self.user_cache_dir
+        if self.opinion:
+            path = os.path.join(path, "tmp")  # noqa: PTH118
+        return path
+
+    @property
+    def site_runtime_dir(self) -> str:
+        """:return: runtime directory shared by users, same as `user_runtime_dir`"""
+        return self.user_runtime_dir
+
+
+@lru_cache(maxsize=1)
+def _android_folder() -> str | None:  # noqa: C901, PLR0912
+    """:return: base folder for the Android OS or None if it cannot be found"""
+    result: str | None = None
+    # type checker isn't happy with our "import android", just don't do this when type checking see
+    # https://stackoverflow.com/a/61394121
+    if not TYPE_CHECKING:
+        try:
+            # First try to get a path to android app using python4android (if available)...
+            from android import mActivity  # noqa: PLC0415
+
+            context = cast("android.content.Context", mActivity.getApplicationContext())  # noqa: F821
+            result = context.getFilesDir().getParentFile().getAbsolutePath()
+        except Exception:  # noqa: BLE001
+            result = None
+    if result is None:
+        try:
+            # ...and fall back to using plain pyjnius, if python4android isn't available or doesn't deliver any useful
+            # result...
+            from jnius import autoclass  # noqa: PLC0415
+
+            context = autoclass("android.content.Context")
+            result = context.getFilesDir().getParentFile().getAbsolutePath()
+        except Exception:  # noqa: BLE001
+            result = None
+    if result is None:
+        # and if that fails, too, find an android folder looking at path on the sys.path
+        # warning: only works for apps installed under /data, not adopted storage etc.
+        pattern = re.compile(r"/data/(data|user/\d+)/(.+)/files")
+        for path in sys.path:
+            if pattern.match(path):
+                result = path.split("/files")[0]
+                break
+        else:
+            result = None
+    if result is None:
+        # one last try: find an android folder looking at path on the sys.path taking adopted storage paths into
+        # account
+        pattern = re.compile(r"/mnt/expand/[a-fA-F0-9-]{36}/(data|user/\d+)/(.+)/files")
+        for path in sys.path:
+            if pattern.match(path):
+                result = path.split("/files")[0]
+                break
+        else:
+            result = None
+    return result
+
+
+@lru_cache(maxsize=1)
+def _android_documents_folder() -> str:
+    """:return: documents folder for the Android OS"""
+    # Get directories with pyjnius
+    try:
+        from jnius import autoclass  # noqa: PLC0415
+
+        context = autoclass("android.content.Context")
+        environment = autoclass("android.os.Environment")
+        documents_dir: str = context.getExternalFilesDir(environment.DIRECTORY_DOCUMENTS).getAbsolutePath()
+    except Exception:  # noqa: BLE001
+        documents_dir = "/storage/emulated/0/Documents"
+
+    return documents_dir
+
+
+@lru_cache(maxsize=1)
+def _android_downloads_folder() -> str:
+    """:return: downloads folder for the Android OS"""
+    # Get directories with pyjnius
+    try:
+        from jnius import autoclass  # noqa: PLC0415
+
+        context = autoclass("android.content.Context")
+        environment = autoclass("android.os.Environment")
+        downloads_dir: str = context.getExternalFilesDir(environment.DIRECTORY_DOWNLOADS).getAbsolutePath()
+    except Exception:  # noqa: BLE001
+        downloads_dir = "/storage/emulated/0/Downloads"
+
+    return downloads_dir
+
+
+@lru_cache(maxsize=1)
+def _android_pictures_folder() -> str:
+    """:return: pictures folder for the Android OS"""
+    # Get directories with pyjnius
+    try:
+        from jnius import autoclass  # noqa: PLC0415
+
+        context = autoclass("android.content.Context")
+        environment = autoclass("android.os.Environment")
+        pictures_dir: str = context.getExternalFilesDir(environment.DIRECTORY_PICTURES).getAbsolutePath()
+    except Exception:  # noqa: BLE001
+        pictures_dir = "/storage/emulated/0/Pictures"
+
+    return pictures_dir
+
+
+@lru_cache(maxsize=1)
+def _android_videos_folder() -> str:
+    """:return: videos folder for the Android OS"""
+    # Get directories with pyjnius
+    try:
+        from jnius import autoclass  # noqa: PLC0415
+
+        context = autoclass("android.content.Context")
+        environment = autoclass("android.os.Environment")
+        videos_dir: str = context.getExternalFilesDir(environment.DIRECTORY_DCIM).getAbsolutePath()
+    except Exception:  # noqa: BLE001
+        videos_dir = "/storage/emulated/0/DCIM/Camera"
+
+    return videos_dir
+
+
+@lru_cache(maxsize=1)
+def _android_music_folder() -> str:
+    """:return: music folder for the Android OS"""
+    # Get directories with pyjnius
+    try:
+        from jnius import autoclass  # noqa: PLC0415
+
+        context = autoclass("android.content.Context")
+        environment = autoclass("android.os.Environment")
+        music_dir: str = context.getExternalFilesDir(environment.DIRECTORY_MUSIC).getAbsolutePath()
+    except Exception:  # noqa: BLE001
+        music_dir = "/storage/emulated/0/Music"
+
+    return music_dir
+
+
+__all__ = [
+    "Android",
+]
diff --git a/setuptools/_vendor/platformdirs/api.py b/setuptools/_vendor/platformdirs/api.py
new file mode 100644
index 0000000000..c50caa648a
--- /dev/null
+++ b/setuptools/_vendor/platformdirs/api.py
@@ -0,0 +1,292 @@
+"""Base API."""
+
+from __future__ import annotations
+
+import os
+from abc import ABC, abstractmethod
+from pathlib import Path
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Iterator, Literal
+
+
+class PlatformDirsABC(ABC):  # noqa: PLR0904
+    """Abstract base class for platform directories."""
+
+    def __init__(  # noqa: PLR0913, PLR0917
+        self,
+        appname: str | None = None,
+        appauthor: str | None | Literal[False] = None,
+        version: str | None = None,
+        roaming: bool = False,  # noqa: FBT001, FBT002
+        multipath: bool = False,  # noqa: FBT001, FBT002
+        opinion: bool = True,  # noqa: FBT001, FBT002
+        ensure_exists: bool = False,  # noqa: FBT001, FBT002
+    ) -> None:
+        """
+        Create a new platform directory.
+
+        :param appname: See `appname`.
+        :param appauthor: See `appauthor`.
+        :param version: See `version`.
+        :param roaming: See `roaming`.
+        :param multipath: See `multipath`.
+        :param opinion: See `opinion`.
+        :param ensure_exists: See `ensure_exists`.
+
+        """
+        self.appname = appname  #: The name of application.
+        self.appauthor = appauthor
+        """
+        The name of the app author or distributing body for this application.
+
+        Typically, it is the owning company name. Defaults to `appname`. You may pass ``False`` to disable it.
+
+        """
+        self.version = version
+        """
+        An optional version path element to append to the path.
+
+        You might want to use this if you want multiple versions of your app to be able to run independently. If used,
+        this would typically be ``.``.
+
+        """
+        self.roaming = roaming
+        """
+        Whether to use the roaming appdata directory on Windows.
+
+        That means that for users on a Windows network setup for roaming profiles, this user data will be synced on
+        login (see
+        `here `_).
+
+        """
+        self.multipath = multipath
+        """
+        An optional parameter which indicates that the entire list of data dirs should be returned.
+
+        By default, the first item would only be returned.
+
+        """
+        self.opinion = opinion  #: A flag to indicating to use opinionated values.
+        self.ensure_exists = ensure_exists
+        """
+        Optionally create the directory (and any missing parents) upon access if it does not exist.
+
+        By default, no directories are created.
+
+        """
+
+    def _append_app_name_and_version(self, *base: str) -> str:
+        params = list(base[1:])
+        if self.appname:
+            params.append(self.appname)
+            if self.version:
+                params.append(self.version)
+        path = os.path.join(base[0], *params)  # noqa: PTH118
+        self._optionally_create_directory(path)
+        return path
+
+    def _optionally_create_directory(self, path: str) -> None:
+        if self.ensure_exists:
+            Path(path).mkdir(parents=True, exist_ok=True)
+
+    @property
+    @abstractmethod
+    def user_data_dir(self) -> str:
+        """:return: data directory tied to the user"""
+
+    @property
+    @abstractmethod
+    def site_data_dir(self) -> str:
+        """:return: data directory shared by users"""
+
+    @property
+    @abstractmethod
+    def user_config_dir(self) -> str:
+        """:return: config directory tied to the user"""
+
+    @property
+    @abstractmethod
+    def site_config_dir(self) -> str:
+        """:return: config directory shared by the users"""
+
+    @property
+    @abstractmethod
+    def user_cache_dir(self) -> str:
+        """:return: cache directory tied to the user"""
+
+    @property
+    @abstractmethod
+    def site_cache_dir(self) -> str:
+        """:return: cache directory shared by users"""
+
+    @property
+    @abstractmethod
+    def user_state_dir(self) -> str:
+        """:return: state directory tied to the user"""
+
+    @property
+    @abstractmethod
+    def user_log_dir(self) -> str:
+        """:return: log directory tied to the user"""
+
+    @property
+    @abstractmethod
+    def user_documents_dir(self) -> str:
+        """:return: documents directory tied to the user"""
+
+    @property
+    @abstractmethod
+    def user_downloads_dir(self) -> str:
+        """:return: downloads directory tied to the user"""
+
+    @property
+    @abstractmethod
+    def user_pictures_dir(self) -> str:
+        """:return: pictures directory tied to the user"""
+
+    @property
+    @abstractmethod
+    def user_videos_dir(self) -> str:
+        """:return: videos directory tied to the user"""
+
+    @property
+    @abstractmethod
+    def user_music_dir(self) -> str:
+        """:return: music directory tied to the user"""
+
+    @property
+    @abstractmethod
+    def user_desktop_dir(self) -> str:
+        """:return: desktop directory tied to the user"""
+
+    @property
+    @abstractmethod
+    def user_runtime_dir(self) -> str:
+        """:return: runtime directory tied to the user"""
+
+    @property
+    @abstractmethod
+    def site_runtime_dir(self) -> str:
+        """:return: runtime directory shared by users"""
+
+    @property
+    def user_data_path(self) -> Path:
+        """:return: data path tied to the user"""
+        return Path(self.user_data_dir)
+
+    @property
+    def site_data_path(self) -> Path:
+        """:return: data path shared by users"""
+        return Path(self.site_data_dir)
+
+    @property
+    def user_config_path(self) -> Path:
+        """:return: config path tied to the user"""
+        return Path(self.user_config_dir)
+
+    @property
+    def site_config_path(self) -> Path:
+        """:return: config path shared by the users"""
+        return Path(self.site_config_dir)
+
+    @property
+    def user_cache_path(self) -> Path:
+        """:return: cache path tied to the user"""
+        return Path(self.user_cache_dir)
+
+    @property
+    def site_cache_path(self) -> Path:
+        """:return: cache path shared by users"""
+        return Path(self.site_cache_dir)
+
+    @property
+    def user_state_path(self) -> Path:
+        """:return: state path tied to the user"""
+        return Path(self.user_state_dir)
+
+    @property
+    def user_log_path(self) -> Path:
+        """:return: log path tied to the user"""
+        return Path(self.user_log_dir)
+
+    @property
+    def user_documents_path(self) -> Path:
+        """:return: documents a path tied to the user"""
+        return Path(self.user_documents_dir)
+
+    @property
+    def user_downloads_path(self) -> Path:
+        """:return: downloads path tied to the user"""
+        return Path(self.user_downloads_dir)
+
+    @property
+    def user_pictures_path(self) -> Path:
+        """:return: pictures path tied to the user"""
+        return Path(self.user_pictures_dir)
+
+    @property
+    def user_videos_path(self) -> Path:
+        """:return: videos path tied to the user"""
+        return Path(self.user_videos_dir)
+
+    @property
+    def user_music_path(self) -> Path:
+        """:return: music path tied to the user"""
+        return Path(self.user_music_dir)
+
+    @property
+    def user_desktop_path(self) -> Path:
+        """:return: desktop path tied to the user"""
+        return Path(self.user_desktop_dir)
+
+    @property
+    def user_runtime_path(self) -> Path:
+        """:return: runtime path tied to the user"""
+        return Path(self.user_runtime_dir)
+
+    @property
+    def site_runtime_path(self) -> Path:
+        """:return: runtime path shared by users"""
+        return Path(self.site_runtime_dir)
+
+    def iter_config_dirs(self) -> Iterator[str]:
+        """:yield: all user and site configuration directories."""
+        yield self.user_config_dir
+        yield self.site_config_dir
+
+    def iter_data_dirs(self) -> Iterator[str]:
+        """:yield: all user and site data directories."""
+        yield self.user_data_dir
+        yield self.site_data_dir
+
+    def iter_cache_dirs(self) -> Iterator[str]:
+        """:yield: all user and site cache directories."""
+        yield self.user_cache_dir
+        yield self.site_cache_dir
+
+    def iter_runtime_dirs(self) -> Iterator[str]:
+        """:yield: all user and site runtime directories."""
+        yield self.user_runtime_dir
+        yield self.site_runtime_dir
+
+    def iter_config_paths(self) -> Iterator[Path]:
+        """:yield: all user and site configuration paths."""
+        for path in self.iter_config_dirs():
+            yield Path(path)
+
+    def iter_data_paths(self) -> Iterator[Path]:
+        """:yield: all user and site data paths."""
+        for path in self.iter_data_dirs():
+            yield Path(path)
+
+    def iter_cache_paths(self) -> Iterator[Path]:
+        """:yield: all user and site cache paths."""
+        for path in self.iter_cache_dirs():
+            yield Path(path)
+
+    def iter_runtime_paths(self) -> Iterator[Path]:
+        """:yield: all user and site runtime paths."""
+        for path in self.iter_runtime_dirs():
+            yield Path(path)
diff --git a/setuptools/_vendor/platformdirs/macos.py b/setuptools/_vendor/platformdirs/macos.py
new file mode 100644
index 0000000000..eb1ba5df1d
--- /dev/null
+++ b/setuptools/_vendor/platformdirs/macos.py
@@ -0,0 +1,130 @@
+"""macOS."""
+
+from __future__ import annotations
+
+import os.path
+import sys
+
+from .api import PlatformDirsABC
+
+
+class MacOS(PlatformDirsABC):
+    """
+    Platform directories for the macOS operating system.
+
+    Follows the guidance from
+    `Apple documentation `_.
+    Makes use of the `appname `,
+    `version `,
+    `ensure_exists `.
+
+    """
+
+    @property
+    def user_data_dir(self) -> str:
+        """:return: data directory tied to the user, e.g. ``~/Library/Application Support/$appname/$version``"""
+        return self._append_app_name_and_version(os.path.expanduser("~/Library/Application Support"))  # noqa: PTH111
+
+    @property
+    def site_data_dir(self) -> str:
+        """
+        :return: data directory shared by users, e.g. ``/Library/Application Support/$appname/$version``.
+          If we're using a Python binary managed by `Homebrew `_, the directory
+          will be under the Homebrew prefix, e.g. ``/opt/homebrew/share/$appname/$version``.
+          If `multipath ` is enabled, and we're in Homebrew,
+          the response is a multi-path string separated by ":", e.g.
+          ``/opt/homebrew/share/$appname/$version:/Library/Application Support/$appname/$version``
+        """
+        is_homebrew = sys.prefix.startswith("/opt/homebrew")
+        path_list = [self._append_app_name_and_version("/opt/homebrew/share")] if is_homebrew else []
+        path_list.append(self._append_app_name_and_version("/Library/Application Support"))
+        if self.multipath:
+            return os.pathsep.join(path_list)
+        return path_list[0]
+
+    @property
+    def user_config_dir(self) -> str:
+        """:return: config directory tied to the user, same as `user_data_dir`"""
+        return self.user_data_dir
+
+    @property
+    def site_config_dir(self) -> str:
+        """:return: config directory shared by the users, same as `site_data_dir`"""
+        return self.site_data_dir
+
+    @property
+    def user_cache_dir(self) -> str:
+        """:return: cache directory tied to the user, e.g. ``~/Library/Caches/$appname/$version``"""
+        return self._append_app_name_and_version(os.path.expanduser("~/Library/Caches"))  # noqa: PTH111
+
+    @property
+    def site_cache_dir(self) -> str:
+        """
+        :return: cache directory shared by users, e.g. ``/Library/Caches/$appname/$version``.
+          If we're using a Python binary managed by `Homebrew `_, the directory
+          will be under the Homebrew prefix, e.g. ``/opt/homebrew/var/cache/$appname/$version``.
+          If `multipath ` is enabled, and we're in Homebrew,
+          the response is a multi-path string separated by ":", e.g.
+          ``/opt/homebrew/var/cache/$appname/$version:/Library/Caches/$appname/$version``
+        """
+        is_homebrew = sys.prefix.startswith("/opt/homebrew")
+        path_list = [self._append_app_name_and_version("/opt/homebrew/var/cache")] if is_homebrew else []
+        path_list.append(self._append_app_name_and_version("/Library/Caches"))
+        if self.multipath:
+            return os.pathsep.join(path_list)
+        return path_list[0]
+
+    @property
+    def user_state_dir(self) -> str:
+        """:return: state directory tied to the user, same as `user_data_dir`"""
+        return self.user_data_dir
+
+    @property
+    def user_log_dir(self) -> str:
+        """:return: log directory tied to the user, e.g. ``~/Library/Logs/$appname/$version``"""
+        return self._append_app_name_and_version(os.path.expanduser("~/Library/Logs"))  # noqa: PTH111
+
+    @property
+    def user_documents_dir(self) -> str:
+        """:return: documents directory tied to the user, e.g. ``~/Documents``"""
+        return os.path.expanduser("~/Documents")  # noqa: PTH111
+
+    @property
+    def user_downloads_dir(self) -> str:
+        """:return: downloads directory tied to the user, e.g. ``~/Downloads``"""
+        return os.path.expanduser("~/Downloads")  # noqa: PTH111
+
+    @property
+    def user_pictures_dir(self) -> str:
+        """:return: pictures directory tied to the user, e.g. ``~/Pictures``"""
+        return os.path.expanduser("~/Pictures")  # noqa: PTH111
+
+    @property
+    def user_videos_dir(self) -> str:
+        """:return: videos directory tied to the user, e.g. ``~/Movies``"""
+        return os.path.expanduser("~/Movies")  # noqa: PTH111
+
+    @property
+    def user_music_dir(self) -> str:
+        """:return: music directory tied to the user, e.g. ``~/Music``"""
+        return os.path.expanduser("~/Music")  # noqa: PTH111
+
+    @property
+    def user_desktop_dir(self) -> str:
+        """:return: desktop directory tied to the user, e.g. ``~/Desktop``"""
+        return os.path.expanduser("~/Desktop")  # noqa: PTH111
+
+    @property
+    def user_runtime_dir(self) -> str:
+        """:return: runtime directory tied to the user, e.g. ``~/Library/Caches/TemporaryItems/$appname/$version``"""
+        return self._append_app_name_and_version(os.path.expanduser("~/Library/Caches/TemporaryItems"))  # noqa: PTH111
+
+    @property
+    def site_runtime_dir(self) -> str:
+        """:return: runtime directory shared by users, same as `user_runtime_dir`"""
+        return self.user_runtime_dir
+
+
+__all__ = [
+    "MacOS",
+]
diff --git a/setuptools/_vendor/platformdirs/py.typed b/setuptools/_vendor/platformdirs/py.typed
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/setuptools/_vendor/platformdirs/unix.py b/setuptools/_vendor/platformdirs/unix.py
new file mode 100644
index 0000000000..9500ade614
--- /dev/null
+++ b/setuptools/_vendor/platformdirs/unix.py
@@ -0,0 +1,275 @@
+"""Unix."""
+
+from __future__ import annotations
+
+import os
+import sys
+from configparser import ConfigParser
+from pathlib import Path
+from typing import Iterator, NoReturn
+
+from .api import PlatformDirsABC
+
+if sys.platform == "win32":
+
+    def getuid() -> NoReturn:
+        msg = "should only be used on Unix"
+        raise RuntimeError(msg)
+
+else:
+    from os import getuid
+
+
+class Unix(PlatformDirsABC):  # noqa: PLR0904
+    """
+    On Unix/Linux, we follow the `XDG Basedir Spec `_.
+
+    The spec allows overriding directories with environment variables. The examples shown are the default values,
+    alongside the name of the environment variable that overrides them. Makes use of the `appname
+    `, `version `, `multipath
+    `, `opinion `, `ensure_exists
+    `.
+
+    """
+
+    @property
+    def user_data_dir(self) -> str:
+        """
+        :return: data directory tied to the user, e.g. ``~/.local/share/$appname/$version`` or
+         ``$XDG_DATA_HOME/$appname/$version``
+        """
+        path = os.environ.get("XDG_DATA_HOME", "")
+        if not path.strip():
+            path = os.path.expanduser("~/.local/share")  # noqa: PTH111
+        return self._append_app_name_and_version(path)
+
+    @property
+    def _site_data_dirs(self) -> list[str]:
+        path = os.environ.get("XDG_DATA_DIRS", "")
+        if not path.strip():
+            path = f"/usr/local/share{os.pathsep}/usr/share"
+        return [self._append_app_name_and_version(p) for p in path.split(os.pathsep)]
+
+    @property
+    def site_data_dir(self) -> str:
+        """
+        :return: data directories shared by users (if `multipath ` is
+         enabled and ``XDG_DATA_DIRS`` is set and a multi path the response is also a multi path separated by the
+         OS path separator), e.g. ``/usr/local/share/$appname/$version`` or ``/usr/share/$appname/$version``
+        """
+        # XDG default for $XDG_DATA_DIRS; only first, if multipath is False
+        dirs = self._site_data_dirs
+        if not self.multipath:
+            return dirs[0]
+        return os.pathsep.join(dirs)
+
+    @property
+    def user_config_dir(self) -> str:
+        """
+        :return: config directory tied to the user, e.g. ``~/.config/$appname/$version`` or
+         ``$XDG_CONFIG_HOME/$appname/$version``
+        """
+        path = os.environ.get("XDG_CONFIG_HOME", "")
+        if not path.strip():
+            path = os.path.expanduser("~/.config")  # noqa: PTH111
+        return self._append_app_name_and_version(path)
+
+    @property
+    def _site_config_dirs(self) -> list[str]:
+        path = os.environ.get("XDG_CONFIG_DIRS", "")
+        if not path.strip():
+            path = "/etc/xdg"
+        return [self._append_app_name_and_version(p) for p in path.split(os.pathsep)]
+
+    @property
+    def site_config_dir(self) -> str:
+        """
+        :return: config directories shared by users (if `multipath `
+         is enabled and ``XDG_CONFIG_DIRS`` is set and a multi path the response is also a multi path separated by
+         the OS path separator), e.g. ``/etc/xdg/$appname/$version``
+        """
+        # XDG default for $XDG_CONFIG_DIRS only first, if multipath is False
+        dirs = self._site_config_dirs
+        if not self.multipath:
+            return dirs[0]
+        return os.pathsep.join(dirs)
+
+    @property
+    def user_cache_dir(self) -> str:
+        """
+        :return: cache directory tied to the user, e.g. ``~/.cache/$appname/$version`` or
+         ``~/$XDG_CACHE_HOME/$appname/$version``
+        """
+        path = os.environ.get("XDG_CACHE_HOME", "")
+        if not path.strip():
+            path = os.path.expanduser("~/.cache")  # noqa: PTH111
+        return self._append_app_name_and_version(path)
+
+    @property
+    def site_cache_dir(self) -> str:
+        """:return: cache directory shared by users, e.g. ``/var/cache/$appname/$version``"""
+        return self._append_app_name_and_version("/var/cache")
+
+    @property
+    def user_state_dir(self) -> str:
+        """
+        :return: state directory tied to the user, e.g. ``~/.local/state/$appname/$version`` or
+         ``$XDG_STATE_HOME/$appname/$version``
+        """
+        path = os.environ.get("XDG_STATE_HOME", "")
+        if not path.strip():
+            path = os.path.expanduser("~/.local/state")  # noqa: PTH111
+        return self._append_app_name_and_version(path)
+
+    @property
+    def user_log_dir(self) -> str:
+        """:return: log directory tied to the user, same as `user_state_dir` if not opinionated else ``log`` in it"""
+        path = self.user_state_dir
+        if self.opinion:
+            path = os.path.join(path, "log")  # noqa: PTH118
+            self._optionally_create_directory(path)
+        return path
+
+    @property
+    def user_documents_dir(self) -> str:
+        """:return: documents directory tied to the user, e.g. ``~/Documents``"""
+        return _get_user_media_dir("XDG_DOCUMENTS_DIR", "~/Documents")
+
+    @property
+    def user_downloads_dir(self) -> str:
+        """:return: downloads directory tied to the user, e.g. ``~/Downloads``"""
+        return _get_user_media_dir("XDG_DOWNLOAD_DIR", "~/Downloads")
+
+    @property
+    def user_pictures_dir(self) -> str:
+        """:return: pictures directory tied to the user, e.g. ``~/Pictures``"""
+        return _get_user_media_dir("XDG_PICTURES_DIR", "~/Pictures")
+
+    @property
+    def user_videos_dir(self) -> str:
+        """:return: videos directory tied to the user, e.g. ``~/Videos``"""
+        return _get_user_media_dir("XDG_VIDEOS_DIR", "~/Videos")
+
+    @property
+    def user_music_dir(self) -> str:
+        """:return: music directory tied to the user, e.g. ``~/Music``"""
+        return _get_user_media_dir("XDG_MUSIC_DIR", "~/Music")
+
+    @property
+    def user_desktop_dir(self) -> str:
+        """:return: desktop directory tied to the user, e.g. ``~/Desktop``"""
+        return _get_user_media_dir("XDG_DESKTOP_DIR", "~/Desktop")
+
+    @property
+    def user_runtime_dir(self) -> str:
+        """
+        :return: runtime directory tied to the user, e.g. ``/run/user/$(id -u)/$appname/$version`` or
+         ``$XDG_RUNTIME_DIR/$appname/$version``.
+
+         For FreeBSD/OpenBSD/NetBSD, it would return ``/var/run/user/$(id -u)/$appname/$version`` if
+         exists, otherwise ``/tmp/runtime-$(id -u)/$appname/$version``, if``$XDG_RUNTIME_DIR``
+         is not set.
+        """
+        path = os.environ.get("XDG_RUNTIME_DIR", "")
+        if not path.strip():
+            if sys.platform.startswith(("freebsd", "openbsd", "netbsd")):
+                path = f"/var/run/user/{getuid()}"
+                if not Path(path).exists():
+                    path = f"/tmp/runtime-{getuid()}"  # noqa: S108
+            else:
+                path = f"/run/user/{getuid()}"
+        return self._append_app_name_and_version(path)
+
+    @property
+    def site_runtime_dir(self) -> str:
+        """
+        :return: runtime directory shared by users, e.g. ``/run/$appname/$version`` or \
+        ``$XDG_RUNTIME_DIR/$appname/$version``.
+
+        Note that this behaves almost exactly like `user_runtime_dir` if ``$XDG_RUNTIME_DIR`` is set, but will
+        fall back to paths associated to the root user instead of a regular logged-in user if it's not set.
+
+        If you wish to ensure that a logged-in root user path is returned e.g. ``/run/user/0``, use `user_runtime_dir`
+        instead.
+
+        For FreeBSD/OpenBSD/NetBSD, it would return ``/var/run/$appname/$version`` if ``$XDG_RUNTIME_DIR`` is not set.
+        """
+        path = os.environ.get("XDG_RUNTIME_DIR", "")
+        if not path.strip():
+            if sys.platform.startswith(("freebsd", "openbsd", "netbsd")):
+                path = "/var/run"
+            else:
+                path = "/run"
+        return self._append_app_name_and_version(path)
+
+    @property
+    def site_data_path(self) -> Path:
+        """:return: data path shared by users. Only return the first item, even if ``multipath`` is set to ``True``"""
+        return self._first_item_as_path_if_multipath(self.site_data_dir)
+
+    @property
+    def site_config_path(self) -> Path:
+        """:return: config path shared by the users, returns the first item, even if ``multipath`` is set to ``True``"""
+        return self._first_item_as_path_if_multipath(self.site_config_dir)
+
+    @property
+    def site_cache_path(self) -> Path:
+        """:return: cache path shared by users. Only return the first item, even if ``multipath`` is set to ``True``"""
+        return self._first_item_as_path_if_multipath(self.site_cache_dir)
+
+    def _first_item_as_path_if_multipath(self, directory: str) -> Path:
+        if self.multipath:
+            # If multipath is True, the first path is returned.
+            directory = directory.split(os.pathsep)[0]
+        return Path(directory)
+
+    def iter_config_dirs(self) -> Iterator[str]:
+        """:yield: all user and site configuration directories."""
+        yield self.user_config_dir
+        yield from self._site_config_dirs
+
+    def iter_data_dirs(self) -> Iterator[str]:
+        """:yield: all user and site data directories."""
+        yield self.user_data_dir
+        yield from self._site_data_dirs
+
+
+def _get_user_media_dir(env_var: str, fallback_tilde_path: str) -> str:
+    media_dir = _get_user_dirs_folder(env_var)
+    if media_dir is None:
+        media_dir = os.environ.get(env_var, "").strip()
+        if not media_dir:
+            media_dir = os.path.expanduser(fallback_tilde_path)  # noqa: PTH111
+
+    return media_dir
+
+
+def _get_user_dirs_folder(key: str) -> str | None:
+    """
+    Return directory from user-dirs.dirs config file.
+
+    See https://freedesktop.org/wiki/Software/xdg-user-dirs/.
+
+    """
+    user_dirs_config_path = Path(Unix().user_config_dir) / "user-dirs.dirs"
+    if user_dirs_config_path.exists():
+        parser = ConfigParser()
+
+        with user_dirs_config_path.open() as stream:
+            # Add fake section header, so ConfigParser doesn't complain
+            parser.read_string(f"[top]\n{stream.read()}")
+
+        if key not in parser["top"]:
+            return None
+
+        path = parser["top"][key].strip('"')
+        # Handle relative home paths
+        return path.replace("$HOME", os.path.expanduser("~"))  # noqa: PTH111
+
+    return None
+
+
+__all__ = [
+    "Unix",
+]
diff --git a/setuptools/_vendor/platformdirs/version.py b/setuptools/_vendor/platformdirs/version.py
new file mode 100644
index 0000000000..6483ddce0b
--- /dev/null
+++ b/setuptools/_vendor/platformdirs/version.py
@@ -0,0 +1,16 @@
+# file generated by setuptools_scm
+# don't change, don't track in version control
+TYPE_CHECKING = False
+if TYPE_CHECKING:
+    from typing import Tuple, Union
+    VERSION_TUPLE = Tuple[Union[int, str], ...]
+else:
+    VERSION_TUPLE = object
+
+version: str
+__version__: str
+__version_tuple__: VERSION_TUPLE
+version_tuple: VERSION_TUPLE
+
+__version__ = version = '4.2.2'
+__version_tuple__ = version_tuple = (4, 2, 2)
diff --git a/setuptools/_vendor/platformdirs/windows.py b/setuptools/_vendor/platformdirs/windows.py
new file mode 100644
index 0000000000..d7bc96091a
--- /dev/null
+++ b/setuptools/_vendor/platformdirs/windows.py
@@ -0,0 +1,272 @@
+"""Windows."""
+
+from __future__ import annotations
+
+import os
+import sys
+from functools import lru_cache
+from typing import TYPE_CHECKING
+
+from .api import PlatformDirsABC
+
+if TYPE_CHECKING:
+    from collections.abc import Callable
+
+
+class Windows(PlatformDirsABC):
+    """
+    `MSDN on where to store app data files `_.
+
+    Makes use of the `appname `, `appauthor
+    `, `version `, `roaming
+    `, `opinion `, `ensure_exists
+    `.
+
+    """
+
+    @property
+    def user_data_dir(self) -> str:
+        """
+        :return: data directory tied to the user, e.g.
+         ``%USERPROFILE%\\AppData\\Local\\$appauthor\\$appname`` (not roaming) or
+         ``%USERPROFILE%\\AppData\\Roaming\\$appauthor\\$appname`` (roaming)
+        """
+        const = "CSIDL_APPDATA" if self.roaming else "CSIDL_LOCAL_APPDATA"
+        path = os.path.normpath(get_win_folder(const))
+        return self._append_parts(path)
+
+    def _append_parts(self, path: str, *, opinion_value: str | None = None) -> str:
+        params = []
+        if self.appname:
+            if self.appauthor is not False:
+                author = self.appauthor or self.appname
+                params.append(author)
+            params.append(self.appname)
+            if opinion_value is not None and self.opinion:
+                params.append(opinion_value)
+            if self.version:
+                params.append(self.version)
+        path = os.path.join(path, *params)  # noqa: PTH118
+        self._optionally_create_directory(path)
+        return path
+
+    @property
+    def site_data_dir(self) -> str:
+        """:return: data directory shared by users, e.g. ``C:\\ProgramData\\$appauthor\\$appname``"""
+        path = os.path.normpath(get_win_folder("CSIDL_COMMON_APPDATA"))
+        return self._append_parts(path)
+
+    @property
+    def user_config_dir(self) -> str:
+        """:return: config directory tied to the user, same as `user_data_dir`"""
+        return self.user_data_dir
+
+    @property
+    def site_config_dir(self) -> str:
+        """:return: config directory shared by the users, same as `site_data_dir`"""
+        return self.site_data_dir
+
+    @property
+    def user_cache_dir(self) -> str:
+        """
+        :return: cache directory tied to the user (if opinionated with ``Cache`` folder within ``$appname``) e.g.
+         ``%USERPROFILE%\\AppData\\Local\\$appauthor\\$appname\\Cache\\$version``
+        """
+        path = os.path.normpath(get_win_folder("CSIDL_LOCAL_APPDATA"))
+        return self._append_parts(path, opinion_value="Cache")
+
+    @property
+    def site_cache_dir(self) -> str:
+        """:return: cache directory shared by users, e.g. ``C:\\ProgramData\\$appauthor\\$appname\\Cache\\$version``"""
+        path = os.path.normpath(get_win_folder("CSIDL_COMMON_APPDATA"))
+        return self._append_parts(path, opinion_value="Cache")
+
+    @property
+    def user_state_dir(self) -> str:
+        """:return: state directory tied to the user, same as `user_data_dir`"""
+        return self.user_data_dir
+
+    @property
+    def user_log_dir(self) -> str:
+        """:return: log directory tied to the user, same as `user_data_dir` if not opinionated else ``Logs`` in it"""
+        path = self.user_data_dir
+        if self.opinion:
+            path = os.path.join(path, "Logs")  # noqa: PTH118
+            self._optionally_create_directory(path)
+        return path
+
+    @property
+    def user_documents_dir(self) -> str:
+        """:return: documents directory tied to the user e.g. ``%USERPROFILE%\\Documents``"""
+        return os.path.normpath(get_win_folder("CSIDL_PERSONAL"))
+
+    @property
+    def user_downloads_dir(self) -> str:
+        """:return: downloads directory tied to the user e.g. ``%USERPROFILE%\\Downloads``"""
+        return os.path.normpath(get_win_folder("CSIDL_DOWNLOADS"))
+
+    @property
+    def user_pictures_dir(self) -> str:
+        """:return: pictures directory tied to the user e.g. ``%USERPROFILE%\\Pictures``"""
+        return os.path.normpath(get_win_folder("CSIDL_MYPICTURES"))
+
+    @property
+    def user_videos_dir(self) -> str:
+        """:return: videos directory tied to the user e.g. ``%USERPROFILE%\\Videos``"""
+        return os.path.normpath(get_win_folder("CSIDL_MYVIDEO"))
+
+    @property
+    def user_music_dir(self) -> str:
+        """:return: music directory tied to the user e.g. ``%USERPROFILE%\\Music``"""
+        return os.path.normpath(get_win_folder("CSIDL_MYMUSIC"))
+
+    @property
+    def user_desktop_dir(self) -> str:
+        """:return: desktop directory tied to the user, e.g. ``%USERPROFILE%\\Desktop``"""
+        return os.path.normpath(get_win_folder("CSIDL_DESKTOPDIRECTORY"))
+
+    @property
+    def user_runtime_dir(self) -> str:
+        """
+        :return: runtime directory tied to the user, e.g.
+         ``%USERPROFILE%\\AppData\\Local\\Temp\\$appauthor\\$appname``
+        """
+        path = os.path.normpath(os.path.join(get_win_folder("CSIDL_LOCAL_APPDATA"), "Temp"))  # noqa: PTH118
+        return self._append_parts(path)
+
+    @property
+    def site_runtime_dir(self) -> str:
+        """:return: runtime directory shared by users, same as `user_runtime_dir`"""
+        return self.user_runtime_dir
+
+
+def get_win_folder_from_env_vars(csidl_name: str) -> str:
+    """Get folder from environment variables."""
+    result = get_win_folder_if_csidl_name_not_env_var(csidl_name)
+    if result is not None:
+        return result
+
+    env_var_name = {
+        "CSIDL_APPDATA": "APPDATA",
+        "CSIDL_COMMON_APPDATA": "ALLUSERSPROFILE",
+        "CSIDL_LOCAL_APPDATA": "LOCALAPPDATA",
+    }.get(csidl_name)
+    if env_var_name is None:
+        msg = f"Unknown CSIDL name: {csidl_name}"
+        raise ValueError(msg)
+    result = os.environ.get(env_var_name)
+    if result is None:
+        msg = f"Unset environment variable: {env_var_name}"
+        raise ValueError(msg)
+    return result
+
+
+def get_win_folder_if_csidl_name_not_env_var(csidl_name: str) -> str | None:
+    """Get a folder for a CSIDL name that does not exist as an environment variable."""
+    if csidl_name == "CSIDL_PERSONAL":
+        return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Documents")  # noqa: PTH118
+
+    if csidl_name == "CSIDL_DOWNLOADS":
+        return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Downloads")  # noqa: PTH118
+
+    if csidl_name == "CSIDL_MYPICTURES":
+        return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Pictures")  # noqa: PTH118
+
+    if csidl_name == "CSIDL_MYVIDEO":
+        return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Videos")  # noqa: PTH118
+
+    if csidl_name == "CSIDL_MYMUSIC":
+        return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Music")  # noqa: PTH118
+    return None
+
+
+def get_win_folder_from_registry(csidl_name: str) -> str:
+    """
+    Get folder from the registry.
+
+    This is a fallback technique at best. I'm not sure if using the registry for these guarantees us the correct answer
+    for all CSIDL_* names.
+
+    """
+    shell_folder_name = {
+        "CSIDL_APPDATA": "AppData",
+        "CSIDL_COMMON_APPDATA": "Common AppData",
+        "CSIDL_LOCAL_APPDATA": "Local AppData",
+        "CSIDL_PERSONAL": "Personal",
+        "CSIDL_DOWNLOADS": "{374DE290-123F-4565-9164-39C4925E467B}",
+        "CSIDL_MYPICTURES": "My Pictures",
+        "CSIDL_MYVIDEO": "My Video",
+        "CSIDL_MYMUSIC": "My Music",
+    }.get(csidl_name)
+    if shell_folder_name is None:
+        msg = f"Unknown CSIDL name: {csidl_name}"
+        raise ValueError(msg)
+    if sys.platform != "win32":  # only needed for mypy type checker to know that this code runs only on Windows
+        raise NotImplementedError
+    import winreg  # noqa: PLC0415
+
+    key = winreg.OpenKey(winreg.HKEY_CURRENT_USER, r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders")
+    directory, _ = winreg.QueryValueEx(key, shell_folder_name)
+    return str(directory)
+
+
+def get_win_folder_via_ctypes(csidl_name: str) -> str:
+    """Get folder with ctypes."""
+    # There is no 'CSIDL_DOWNLOADS'.
+    # Use 'CSIDL_PROFILE' (40) and append the default folder 'Downloads' instead.
+    # https://learn.microsoft.com/en-us/windows/win32/shell/knownfolderid
+
+    import ctypes  # noqa: PLC0415
+
+    csidl_const = {
+        "CSIDL_APPDATA": 26,
+        "CSIDL_COMMON_APPDATA": 35,
+        "CSIDL_LOCAL_APPDATA": 28,
+        "CSIDL_PERSONAL": 5,
+        "CSIDL_MYPICTURES": 39,
+        "CSIDL_MYVIDEO": 14,
+        "CSIDL_MYMUSIC": 13,
+        "CSIDL_DOWNLOADS": 40,
+        "CSIDL_DESKTOPDIRECTORY": 16,
+    }.get(csidl_name)
+    if csidl_const is None:
+        msg = f"Unknown CSIDL name: {csidl_name}"
+        raise ValueError(msg)
+
+    buf = ctypes.create_unicode_buffer(1024)
+    windll = getattr(ctypes, "windll")  # noqa: B009 # using getattr to avoid false positive with mypy type checker
+    windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
+
+    # Downgrade to short path name if it has high-bit chars.
+    if any(ord(c) > 255 for c in buf):  # noqa: PLR2004
+        buf2 = ctypes.create_unicode_buffer(1024)
+        if windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
+            buf = buf2
+
+    if csidl_name == "CSIDL_DOWNLOADS":
+        return os.path.join(buf.value, "Downloads")  # noqa: PTH118
+
+    return buf.value
+
+
+def _pick_get_win_folder() -> Callable[[str], str]:
+    try:
+        import ctypes  # noqa: PLC0415
+    except ImportError:
+        pass
+    else:
+        if hasattr(ctypes, "windll"):
+            return get_win_folder_via_ctypes
+    try:
+        import winreg  # noqa: PLC0415, F401
+    except ImportError:
+        return get_win_folder_from_env_vars
+    else:
+        return get_win_folder_from_registry
+
+
+get_win_folder = lru_cache(maxsize=None)(_pick_get_win_folder())
+
+__all__ = [
+    "Windows",
+]

From 3fb68280f0721d1aa142e37ac193e7ef004eacde Mon Sep 17 00:00:00 2001
From: DWesl <22566757+DWesl@users.noreply.github.com>
Date: Wed, 3 Jul 2024 10:48:30 -0400
Subject: [PATCH 0839/1761] DBG: Print ELF headers of extension on Linux.

---
 distutils/tests/test_build_ext.py | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/distutils/tests/test_build_ext.py b/distutils/tests/test_build_ext.py
index a88554dbbf..d2bf7f3fc8 100644
--- a/distutils/tests/test_build_ext.py
+++ b/distutils/tests/test_build_ext.py
@@ -166,6 +166,8 @@ def _test_xx(copy_so):
                 ["readelf", "-d", xx.__file__], universal_newlines=True
             )
             if not copy_so:
+                import pprint
+                pprint.pprint(so_headers)
                 # Linked against a library in /usr/lib{,64}
                 assert 'RPATH' not in so_headers and 'RUNPATH' not in so_headers
             else:

From 8b4b9d0c46b01511bcba053c545af7804cbf6f34 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 2 Jul 2024 23:25:08 -0400
Subject: [PATCH 0840/1761] Consolidate vendored packages in the setuptools
 package.

Now that the vendoring doesn't depend on importing the setuptools package, it's safe for pkg_resources to import _from_ there without actually importing setuptools.
---
 pkg_resources/__init__.py                     |    2 +-
 .../autocommand-2.2.2.dist-info/INSTALLER     |    1 -
 .../autocommand-2.2.2.dist-info/LICENSE       |  166 -
 .../autocommand-2.2.2.dist-info/METADATA      |  420 --
 .../autocommand-2.2.2.dist-info/RECORD        |   18 -
 .../_vendor/autocommand-2.2.2.dist-info/WHEEL |    5 -
 .../autocommand-2.2.2.dist-info/top_level.txt |    1 -
 pkg_resources/_vendor/autocommand/__init__.py |   27 -
 .../_vendor/autocommand/autoasync.py          |  142 -
 .../_vendor/autocommand/autocommand.py        |   70 -
 pkg_resources/_vendor/autocommand/automain.py |   59 -
 .../_vendor/autocommand/autoparse.py          |  333 --
 pkg_resources/_vendor/autocommand/errors.py   |   23 -
 .../INSTALLER                                 |    1 -
 .../backports.tarfile-1.2.0.dist-info/LICENSE |   17 -
 .../METADATA                                  |   46 -
 .../backports.tarfile-1.2.0.dist-info/RECORD  |   17 -
 .../REQUESTED                                 |    0
 .../backports.tarfile-1.2.0.dist-info/WHEEL   |    5 -
 .../top_level.txt                             |    1 -
 pkg_resources/_vendor/backports/__init__.py   |    1 -
 .../_vendor/backports/tarfile/__init__.py     | 2937 ----------
 .../_vendor/backports/tarfile/__main__.py     |    5 -
 .../backports/tarfile/compat/__init__.py      |    0
 .../_vendor/backports/tarfile/compat/py38.py  |   24 -
 .../INSTALLER                                 |    1 -
 .../LICENSE                                   |  202 -
 .../METADATA                                  |  100 -
 .../RECORD                                    |   89 -
 .../REQUESTED                                 |    0
 .../importlib_resources-6.4.0.dist-info/WHEEL |    5 -
 .../top_level.txt                             |    1 -
 .../_vendor/importlib_resources/__init__.py   |   36 -
 .../_vendor/importlib_resources/_adapters.py  |  168 -
 .../_vendor/importlib_resources/_common.py    |  210 -
 .../_vendor/importlib_resources/_itertools.py |   38 -
 .../_vendor/importlib_resources/abc.py        |  171 -
 .../importlib_resources/compat/__init__.py    |    0
 .../importlib_resources/compat/py38.py        |   11 -
 .../importlib_resources/compat/py39.py        |   10 -
 .../_vendor/importlib_resources/functional.py |   81 -
 .../importlib_resources/future/__init__.py    |    0
 .../importlib_resources/future/adapters.py    |   95 -
 .../_vendor/importlib_resources/py.typed      |    0
 .../_vendor/importlib_resources/readers.py    |  194 -
 .../_vendor/importlib_resources/simple.py     |  106 -
 .../importlib_resources/tests/__init__.py     |    0
 .../importlib_resources/tests/_path.py        |   56 -
 .../tests/compat/__init__.py                  |    0
 .../importlib_resources/tests/compat/py312.py |   18 -
 .../importlib_resources/tests/compat/py39.py  |   10 -
 .../tests/data01/__init__.py                  |    0
 .../tests/data01/binary.file                  |  Bin 4 -> 0 bytes
 .../tests/data01/subdirectory/__init__.py     |    0
 .../tests/data01/subdirectory/binary.file     |    1 -
 .../tests/data01/utf-16.file                  |  Bin 44 -> 0 bytes
 .../tests/data01/utf-8.file                   |    1 -
 .../tests/data02/__init__.py                  |    0
 .../tests/data02/one/__init__.py              |    0
 .../tests/data02/one/resource1.txt            |    1 -
 .../subdirectory/subsubdir/resource.txt       |    1 -
 .../tests/data02/two/__init__.py              |    0
 .../tests/data02/two/resource2.txt            |    1 -
 .../tests/namespacedata01/binary.file         |  Bin 4 -> 0 bytes
 .../namespacedata01/subdirectory/binary.file  |    1 -
 .../tests/namespacedata01/utf-16.file         |  Bin 44 -> 0 bytes
 .../tests/namespacedata01/utf-8.file          |    1 -
 .../tests/test_compatibilty_files.py          |  104 -
 .../tests/test_contents.py                    |   43 -
 .../importlib_resources/tests/test_custom.py  |   47 -
 .../importlib_resources/tests/test_files.py   |  117 -
 .../tests/test_functional.py                  |  242 -
 .../importlib_resources/tests/test_open.py    |   89 -
 .../importlib_resources/tests/test_path.py    |   65 -
 .../importlib_resources/tests/test_read.py    |   97 -
 .../importlib_resources/tests/test_reader.py  |  145 -
 .../tests/test_resource.py                    |  241 -
 .../_vendor/importlib_resources/tests/util.py |  164 -
 .../_vendor/importlib_resources/tests/zip.py  |   32 -
 .../_vendor/inflect-7.3.1.dist-info/INSTALLER |    1 -
 .../_vendor/inflect-7.3.1.dist-info/LICENSE   |   17 -
 .../_vendor/inflect-7.3.1.dist-info/METADATA  |  591 --
 .../_vendor/inflect-7.3.1.dist-info/RECORD    |   13 -
 .../_vendor/inflect-7.3.1.dist-info/WHEEL     |    5 -
 .../inflect-7.3.1.dist-info/top_level.txt     |    1 -
 pkg_resources/_vendor/inflect/__init__.py     | 3986 --------------
 .../_vendor/inflect/compat/__init__.py        |    0
 pkg_resources/_vendor/inflect/compat/py38.py  |    7 -
 pkg_resources/_vendor/inflect/py.typed        |    0
 .../jaraco.context-5.3.0.dist-info/INSTALLER  |    1 -
 .../jaraco.context-5.3.0.dist-info/LICENSE    |   17 -
 .../jaraco.context-5.3.0.dist-info/METADATA   |   75 -
 .../jaraco.context-5.3.0.dist-info/RECORD     |    8 -
 .../jaraco.context-5.3.0.dist-info/WHEEL      |    5 -
 .../top_level.txt                             |    1 -
 .../INSTALLER                                 |    1 -
 .../jaraco.functools-4.0.1.dist-info/LICENSE  |   17 -
 .../jaraco.functools-4.0.1.dist-info/METADATA |   64 -
 .../jaraco.functools-4.0.1.dist-info/RECORD   |   10 -
 .../jaraco.functools-4.0.1.dist-info/WHEEL    |    5 -
 .../top_level.txt                             |    1 -
 .../jaraco.text-3.12.1.dist-info/INSTALLER    |    1 -
 .../jaraco.text-3.12.1.dist-info/LICENSE      |   17 -
 .../jaraco.text-3.12.1.dist-info/METADATA     |   95 -
 .../jaraco.text-3.12.1.dist-info/RECORD       |   20 -
 .../jaraco.text-3.12.1.dist-info/REQUESTED    |    0
 .../jaraco.text-3.12.1.dist-info/WHEEL        |    5 -
 .../top_level.txt                             |    1 -
 pkg_resources/_vendor/jaraco/context.py       |  361 --
 .../_vendor/jaraco/functools/__init__.py      |  633 ---
 .../_vendor/jaraco/functools/__init__.pyi     |  125 -
 .../_vendor/jaraco/functools/py.typed         |    0
 .../_vendor/jaraco/text/Lorem ipsum.txt       |    2 -
 pkg_resources/_vendor/jaraco/text/__init__.py |  624 ---
 pkg_resources/_vendor/jaraco/text/layouts.py  |   25 -
 .../_vendor/jaraco/text/show-newlines.py      |   33 -
 .../_vendor/jaraco/text/strip-prefix.py       |   21 -
 .../_vendor/jaraco/text/to-dvorak.py          |    6 -
 .../_vendor/jaraco/text/to-qwerty.py          |    6 -
 .../more_itertools-10.3.0.dist-info/INSTALLER |    1 -
 .../more_itertools-10.3.0.dist-info/LICENSE   |   19 -
 .../more_itertools-10.3.0.dist-info/METADATA  |  266 -
 .../more_itertools-10.3.0.dist-info/RECORD    |   15 -
 .../more_itertools-10.3.0.dist-info/WHEEL     |    4 -
 .../_vendor/more_itertools/__init__.py        |    6 -
 .../_vendor/more_itertools/__init__.pyi       |    2 -
 pkg_resources/_vendor/more_itertools/more.py  | 4806 -----------------
 pkg_resources/_vendor/more_itertools/more.pyi |  709 ---
 pkg_resources/_vendor/more_itertools/py.typed |    0
 .../_vendor/more_itertools/recipes.py         | 1046 ----
 .../_vendor/more_itertools/recipes.pyi        |  136 -
 .../packaging-24.1.dist-info/INSTALLER        |    1 -
 .../_vendor/packaging-24.1.dist-info/LICENSE  |    3 -
 .../packaging-24.1.dist-info/LICENSE.APACHE   |  177 -
 .../packaging-24.1.dist-info/LICENSE.BSD      |   23 -
 .../_vendor/packaging-24.1.dist-info/METADATA |  102 -
 .../_vendor/packaging-24.1.dist-info/RECORD   |   37 -
 .../packaging-24.1.dist-info/REQUESTED        |    0
 .../_vendor/packaging-24.1.dist-info/WHEEL    |    4 -
 pkg_resources/_vendor/packaging/__init__.py   |   15 -
 pkg_resources/_vendor/packaging/_elffile.py   |  110 -
 pkg_resources/_vendor/packaging/_manylinux.py |  262 -
 pkg_resources/_vendor/packaging/_musllinux.py |   85 -
 pkg_resources/_vendor/packaging/_parser.py    |  354 --
 .../_vendor/packaging/_structures.py          |   61 -
 pkg_resources/_vendor/packaging/_tokenizer.py |  194 -
 pkg_resources/_vendor/packaging/markers.py    |  325 --
 pkg_resources/_vendor/packaging/metadata.py   |  804 ---
 pkg_resources/_vendor/packaging/py.typed      |    0
 .../_vendor/packaging/requirements.py         |   91 -
 pkg_resources/_vendor/packaging/specifiers.py | 1009 ----
 pkg_resources/_vendor/packaging/tags.py       |  568 --
 pkg_resources/_vendor/packaging/utils.py      |  174 -
 pkg_resources/_vendor/packaging/version.py    |  563 --
 .../platformdirs-4.2.2.dist-info/INSTALLER    |    1 -
 .../platformdirs-4.2.2.dist-info/METADATA     |  319 --
 .../platformdirs-4.2.2.dist-info/RECORD       |   23 -
 .../platformdirs-4.2.2.dist-info/REQUESTED    |    0
 .../platformdirs-4.2.2.dist-info/WHEEL        |    4 -
 .../licenses/LICENSE                          |   21 -
 .../_vendor/platformdirs/__init__.py          |  627 ---
 .../_vendor/platformdirs/__main__.py          |   55 -
 pkg_resources/_vendor/platformdirs/android.py |  249 -
 pkg_resources/_vendor/platformdirs/api.py     |  292 -
 pkg_resources/_vendor/platformdirs/macos.py   |  130 -
 pkg_resources/_vendor/platformdirs/py.typed   |    0
 pkg_resources/_vendor/platformdirs/unix.py    |  275 -
 pkg_resources/_vendor/platformdirs/version.py |   16 -
 pkg_resources/_vendor/platformdirs/windows.py |  272 -
 pkg_resources/_vendor/ruff.toml               |    1 -
 .../typeguard-4.3.0.dist-info/INSTALLER       |    1 -
 .../_vendor/typeguard-4.3.0.dist-info/LICENSE |   19 -
 .../typeguard-4.3.0.dist-info/METADATA        |   81 -
 .../_vendor/typeguard-4.3.0.dist-info/RECORD  |   34 -
 .../_vendor/typeguard-4.3.0.dist-info/WHEEL   |    5 -
 .../entry_points.txt                          |    2 -
 .../typeguard-4.3.0.dist-info/top_level.txt   |    1 -
 pkg_resources/_vendor/typeguard/__init__.py   |   48 -
 pkg_resources/_vendor/typeguard/_checkers.py  |  993 ----
 pkg_resources/_vendor/typeguard/_config.py    |  108 -
 .../_vendor/typeguard/_decorators.py          |  235 -
 .../_vendor/typeguard/_exceptions.py          |   42 -
 pkg_resources/_vendor/typeguard/_functions.py |  308 --
 .../_vendor/typeguard/_importhook.py          |  213 -
 pkg_resources/_vendor/typeguard/_memo.py      |   48 -
 .../_vendor/typeguard/_pytest_plugin.py       |  127 -
 .../_vendor/typeguard/_suppression.py         |   86 -
 .../_vendor/typeguard/_transformer.py         | 1229 -----
 .../_vendor/typeguard/_union_transformer.py   |   55 -
 pkg_resources/_vendor/typeguard/_utils.py     |  173 -
 pkg_resources/_vendor/typeguard/py.typed      |    0
 .../INSTALLER                                 |    1 -
 .../LICENSE                                   |  279 -
 .../METADATA                                  |   67 -
 .../typing_extensions-4.12.2.dist-info/RECORD |    7 -
 .../typing_extensions-4.12.2.dist-info/WHEEL  |    4 -
 pkg_resources/_vendor/typing_extensions.py    | 3641 -------------
 .../_vendor/zipp-3.19.2.dist-info/INSTALLER   |    1 -
 .../_vendor/zipp-3.19.2.dist-info/LICENSE     |   17 -
 .../_vendor/zipp-3.19.2.dist-info/METADATA    |  102 -
 .../_vendor/zipp-3.19.2.dist-info/RECORD      |   15 -
 .../_vendor/zipp-3.19.2.dist-info/REQUESTED   |    0
 .../_vendor/zipp-3.19.2.dist-info/WHEEL       |    5 -
 .../zipp-3.19.2.dist-info/top_level.txt       |    1 -
 pkg_resources/_vendor/zipp/__init__.py        |  501 --
 pkg_resources/_vendor/zipp/compat/__init__.py |    0
 pkg_resources/_vendor/zipp/compat/py310.py    |   11 -
 pkg_resources/_vendor/zipp/glob.py            |  106 -
 tools/vendored.py                             |   18 -
 209 files changed, 1 insertion(+), 36957 deletions(-)
 delete mode 100644 pkg_resources/_vendor/autocommand-2.2.2.dist-info/INSTALLER
 delete mode 100644 pkg_resources/_vendor/autocommand-2.2.2.dist-info/LICENSE
 delete mode 100644 pkg_resources/_vendor/autocommand-2.2.2.dist-info/METADATA
 delete mode 100644 pkg_resources/_vendor/autocommand-2.2.2.dist-info/RECORD
 delete mode 100644 pkg_resources/_vendor/autocommand-2.2.2.dist-info/WHEEL
 delete mode 100644 pkg_resources/_vendor/autocommand-2.2.2.dist-info/top_level.txt
 delete mode 100644 pkg_resources/_vendor/autocommand/__init__.py
 delete mode 100644 pkg_resources/_vendor/autocommand/autoasync.py
 delete mode 100644 pkg_resources/_vendor/autocommand/autocommand.py
 delete mode 100644 pkg_resources/_vendor/autocommand/automain.py
 delete mode 100644 pkg_resources/_vendor/autocommand/autoparse.py
 delete mode 100644 pkg_resources/_vendor/autocommand/errors.py
 delete mode 100644 pkg_resources/_vendor/backports.tarfile-1.2.0.dist-info/INSTALLER
 delete mode 100644 pkg_resources/_vendor/backports.tarfile-1.2.0.dist-info/LICENSE
 delete mode 100644 pkg_resources/_vendor/backports.tarfile-1.2.0.dist-info/METADATA
 delete mode 100644 pkg_resources/_vendor/backports.tarfile-1.2.0.dist-info/RECORD
 delete mode 100644 pkg_resources/_vendor/backports.tarfile-1.2.0.dist-info/REQUESTED
 delete mode 100644 pkg_resources/_vendor/backports.tarfile-1.2.0.dist-info/WHEEL
 delete mode 100644 pkg_resources/_vendor/backports.tarfile-1.2.0.dist-info/top_level.txt
 delete mode 100644 pkg_resources/_vendor/backports/__init__.py
 delete mode 100644 pkg_resources/_vendor/backports/tarfile/__init__.py
 delete mode 100644 pkg_resources/_vendor/backports/tarfile/__main__.py
 delete mode 100644 pkg_resources/_vendor/backports/tarfile/compat/__init__.py
 delete mode 100644 pkg_resources/_vendor/backports/tarfile/compat/py38.py
 delete mode 100644 pkg_resources/_vendor/importlib_resources-6.4.0.dist-info/INSTALLER
 delete mode 100644 pkg_resources/_vendor/importlib_resources-6.4.0.dist-info/LICENSE
 delete mode 100644 pkg_resources/_vendor/importlib_resources-6.4.0.dist-info/METADATA
 delete mode 100644 pkg_resources/_vendor/importlib_resources-6.4.0.dist-info/RECORD
 delete mode 100644 pkg_resources/_vendor/importlib_resources-6.4.0.dist-info/REQUESTED
 delete mode 100644 pkg_resources/_vendor/importlib_resources-6.4.0.dist-info/WHEEL
 delete mode 100644 pkg_resources/_vendor/importlib_resources-6.4.0.dist-info/top_level.txt
 delete mode 100644 pkg_resources/_vendor/importlib_resources/__init__.py
 delete mode 100644 pkg_resources/_vendor/importlib_resources/_adapters.py
 delete mode 100644 pkg_resources/_vendor/importlib_resources/_common.py
 delete mode 100644 pkg_resources/_vendor/importlib_resources/_itertools.py
 delete mode 100644 pkg_resources/_vendor/importlib_resources/abc.py
 delete mode 100644 pkg_resources/_vendor/importlib_resources/compat/__init__.py
 delete mode 100644 pkg_resources/_vendor/importlib_resources/compat/py38.py
 delete mode 100644 pkg_resources/_vendor/importlib_resources/compat/py39.py
 delete mode 100644 pkg_resources/_vendor/importlib_resources/functional.py
 delete mode 100644 pkg_resources/_vendor/importlib_resources/future/__init__.py
 delete mode 100644 pkg_resources/_vendor/importlib_resources/future/adapters.py
 delete mode 100644 pkg_resources/_vendor/importlib_resources/py.typed
 delete mode 100644 pkg_resources/_vendor/importlib_resources/readers.py
 delete mode 100644 pkg_resources/_vendor/importlib_resources/simple.py
 delete mode 100644 pkg_resources/_vendor/importlib_resources/tests/__init__.py
 delete mode 100644 pkg_resources/_vendor/importlib_resources/tests/_path.py
 delete mode 100644 pkg_resources/_vendor/importlib_resources/tests/compat/__init__.py
 delete mode 100644 pkg_resources/_vendor/importlib_resources/tests/compat/py312.py
 delete mode 100644 pkg_resources/_vendor/importlib_resources/tests/compat/py39.py
 delete mode 100644 pkg_resources/_vendor/importlib_resources/tests/data01/__init__.py
 delete mode 100644 pkg_resources/_vendor/importlib_resources/tests/data01/binary.file
 delete mode 100644 pkg_resources/_vendor/importlib_resources/tests/data01/subdirectory/__init__.py
 delete mode 100644 pkg_resources/_vendor/importlib_resources/tests/data01/subdirectory/binary.file
 delete mode 100644 pkg_resources/_vendor/importlib_resources/tests/data01/utf-16.file
 delete mode 100644 pkg_resources/_vendor/importlib_resources/tests/data01/utf-8.file
 delete mode 100644 pkg_resources/_vendor/importlib_resources/tests/data02/__init__.py
 delete mode 100644 pkg_resources/_vendor/importlib_resources/tests/data02/one/__init__.py
 delete mode 100644 pkg_resources/_vendor/importlib_resources/tests/data02/one/resource1.txt
 delete mode 100644 pkg_resources/_vendor/importlib_resources/tests/data02/subdirectory/subsubdir/resource.txt
 delete mode 100644 pkg_resources/_vendor/importlib_resources/tests/data02/two/__init__.py
 delete mode 100644 pkg_resources/_vendor/importlib_resources/tests/data02/two/resource2.txt
 delete mode 100644 pkg_resources/_vendor/importlib_resources/tests/namespacedata01/binary.file
 delete mode 100644 pkg_resources/_vendor/importlib_resources/tests/namespacedata01/subdirectory/binary.file
 delete mode 100644 pkg_resources/_vendor/importlib_resources/tests/namespacedata01/utf-16.file
 delete mode 100644 pkg_resources/_vendor/importlib_resources/tests/namespacedata01/utf-8.file
 delete mode 100644 pkg_resources/_vendor/importlib_resources/tests/test_compatibilty_files.py
 delete mode 100644 pkg_resources/_vendor/importlib_resources/tests/test_contents.py
 delete mode 100644 pkg_resources/_vendor/importlib_resources/tests/test_custom.py
 delete mode 100644 pkg_resources/_vendor/importlib_resources/tests/test_files.py
 delete mode 100644 pkg_resources/_vendor/importlib_resources/tests/test_functional.py
 delete mode 100644 pkg_resources/_vendor/importlib_resources/tests/test_open.py
 delete mode 100644 pkg_resources/_vendor/importlib_resources/tests/test_path.py
 delete mode 100644 pkg_resources/_vendor/importlib_resources/tests/test_read.py
 delete mode 100644 pkg_resources/_vendor/importlib_resources/tests/test_reader.py
 delete mode 100644 pkg_resources/_vendor/importlib_resources/tests/test_resource.py
 delete mode 100644 pkg_resources/_vendor/importlib_resources/tests/util.py
 delete mode 100644 pkg_resources/_vendor/importlib_resources/tests/zip.py
 delete mode 100644 pkg_resources/_vendor/inflect-7.3.1.dist-info/INSTALLER
 delete mode 100644 pkg_resources/_vendor/inflect-7.3.1.dist-info/LICENSE
 delete mode 100644 pkg_resources/_vendor/inflect-7.3.1.dist-info/METADATA
 delete mode 100644 pkg_resources/_vendor/inflect-7.3.1.dist-info/RECORD
 delete mode 100644 pkg_resources/_vendor/inflect-7.3.1.dist-info/WHEEL
 delete mode 100644 pkg_resources/_vendor/inflect-7.3.1.dist-info/top_level.txt
 delete mode 100644 pkg_resources/_vendor/inflect/__init__.py
 delete mode 100644 pkg_resources/_vendor/inflect/compat/__init__.py
 delete mode 100644 pkg_resources/_vendor/inflect/compat/py38.py
 delete mode 100644 pkg_resources/_vendor/inflect/py.typed
 delete mode 100644 pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/INSTALLER
 delete mode 100644 pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/LICENSE
 delete mode 100644 pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/METADATA
 delete mode 100644 pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/RECORD
 delete mode 100644 pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/WHEEL
 delete mode 100644 pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/top_level.txt
 delete mode 100644 pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/INSTALLER
 delete mode 100644 pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/LICENSE
 delete mode 100644 pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/METADATA
 delete mode 100644 pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/RECORD
 delete mode 100644 pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/WHEEL
 delete mode 100644 pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/top_level.txt
 delete mode 100644 pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/INSTALLER
 delete mode 100644 pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/LICENSE
 delete mode 100644 pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/METADATA
 delete mode 100644 pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/RECORD
 delete mode 100644 pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/REQUESTED
 delete mode 100644 pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/WHEEL
 delete mode 100644 pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/top_level.txt
 delete mode 100644 pkg_resources/_vendor/jaraco/context.py
 delete mode 100644 pkg_resources/_vendor/jaraco/functools/__init__.py
 delete mode 100644 pkg_resources/_vendor/jaraco/functools/__init__.pyi
 delete mode 100644 pkg_resources/_vendor/jaraco/functools/py.typed
 delete mode 100644 pkg_resources/_vendor/jaraco/text/Lorem ipsum.txt
 delete mode 100644 pkg_resources/_vendor/jaraco/text/__init__.py
 delete mode 100644 pkg_resources/_vendor/jaraco/text/layouts.py
 delete mode 100644 pkg_resources/_vendor/jaraco/text/show-newlines.py
 delete mode 100644 pkg_resources/_vendor/jaraco/text/strip-prefix.py
 delete mode 100644 pkg_resources/_vendor/jaraco/text/to-dvorak.py
 delete mode 100644 pkg_resources/_vendor/jaraco/text/to-qwerty.py
 delete mode 100644 pkg_resources/_vendor/more_itertools-10.3.0.dist-info/INSTALLER
 delete mode 100644 pkg_resources/_vendor/more_itertools-10.3.0.dist-info/LICENSE
 delete mode 100644 pkg_resources/_vendor/more_itertools-10.3.0.dist-info/METADATA
 delete mode 100644 pkg_resources/_vendor/more_itertools-10.3.0.dist-info/RECORD
 delete mode 100644 pkg_resources/_vendor/more_itertools-10.3.0.dist-info/WHEEL
 delete mode 100644 pkg_resources/_vendor/more_itertools/__init__.py
 delete mode 100644 pkg_resources/_vendor/more_itertools/__init__.pyi
 delete mode 100755 pkg_resources/_vendor/more_itertools/more.py
 delete mode 100644 pkg_resources/_vendor/more_itertools/more.pyi
 delete mode 100644 pkg_resources/_vendor/more_itertools/py.typed
 delete mode 100644 pkg_resources/_vendor/more_itertools/recipes.py
 delete mode 100644 pkg_resources/_vendor/more_itertools/recipes.pyi
 delete mode 100644 pkg_resources/_vendor/packaging-24.1.dist-info/INSTALLER
 delete mode 100644 pkg_resources/_vendor/packaging-24.1.dist-info/LICENSE
 delete mode 100644 pkg_resources/_vendor/packaging-24.1.dist-info/LICENSE.APACHE
 delete mode 100644 pkg_resources/_vendor/packaging-24.1.dist-info/LICENSE.BSD
 delete mode 100644 pkg_resources/_vendor/packaging-24.1.dist-info/METADATA
 delete mode 100644 pkg_resources/_vendor/packaging-24.1.dist-info/RECORD
 delete mode 100644 pkg_resources/_vendor/packaging-24.1.dist-info/REQUESTED
 delete mode 100644 pkg_resources/_vendor/packaging-24.1.dist-info/WHEEL
 delete mode 100644 pkg_resources/_vendor/packaging/__init__.py
 delete mode 100644 pkg_resources/_vendor/packaging/_elffile.py
 delete mode 100644 pkg_resources/_vendor/packaging/_manylinux.py
 delete mode 100644 pkg_resources/_vendor/packaging/_musllinux.py
 delete mode 100644 pkg_resources/_vendor/packaging/_parser.py
 delete mode 100644 pkg_resources/_vendor/packaging/_structures.py
 delete mode 100644 pkg_resources/_vendor/packaging/_tokenizer.py
 delete mode 100644 pkg_resources/_vendor/packaging/markers.py
 delete mode 100644 pkg_resources/_vendor/packaging/metadata.py
 delete mode 100644 pkg_resources/_vendor/packaging/py.typed
 delete mode 100644 pkg_resources/_vendor/packaging/requirements.py
 delete mode 100644 pkg_resources/_vendor/packaging/specifiers.py
 delete mode 100644 pkg_resources/_vendor/packaging/tags.py
 delete mode 100644 pkg_resources/_vendor/packaging/utils.py
 delete mode 100644 pkg_resources/_vendor/packaging/version.py
 delete mode 100644 pkg_resources/_vendor/platformdirs-4.2.2.dist-info/INSTALLER
 delete mode 100644 pkg_resources/_vendor/platformdirs-4.2.2.dist-info/METADATA
 delete mode 100644 pkg_resources/_vendor/platformdirs-4.2.2.dist-info/RECORD
 delete mode 100644 pkg_resources/_vendor/platformdirs-4.2.2.dist-info/REQUESTED
 delete mode 100644 pkg_resources/_vendor/platformdirs-4.2.2.dist-info/WHEEL
 delete mode 100644 pkg_resources/_vendor/platformdirs-4.2.2.dist-info/licenses/LICENSE
 delete mode 100644 pkg_resources/_vendor/platformdirs/__init__.py
 delete mode 100644 pkg_resources/_vendor/platformdirs/__main__.py
 delete mode 100644 pkg_resources/_vendor/platformdirs/android.py
 delete mode 100644 pkg_resources/_vendor/platformdirs/api.py
 delete mode 100644 pkg_resources/_vendor/platformdirs/macos.py
 delete mode 100644 pkg_resources/_vendor/platformdirs/py.typed
 delete mode 100644 pkg_resources/_vendor/platformdirs/unix.py
 delete mode 100644 pkg_resources/_vendor/platformdirs/version.py
 delete mode 100644 pkg_resources/_vendor/platformdirs/windows.py
 delete mode 100644 pkg_resources/_vendor/ruff.toml
 delete mode 100644 pkg_resources/_vendor/typeguard-4.3.0.dist-info/INSTALLER
 delete mode 100644 pkg_resources/_vendor/typeguard-4.3.0.dist-info/LICENSE
 delete mode 100644 pkg_resources/_vendor/typeguard-4.3.0.dist-info/METADATA
 delete mode 100644 pkg_resources/_vendor/typeguard-4.3.0.dist-info/RECORD
 delete mode 100644 pkg_resources/_vendor/typeguard-4.3.0.dist-info/WHEEL
 delete mode 100644 pkg_resources/_vendor/typeguard-4.3.0.dist-info/entry_points.txt
 delete mode 100644 pkg_resources/_vendor/typeguard-4.3.0.dist-info/top_level.txt
 delete mode 100644 pkg_resources/_vendor/typeguard/__init__.py
 delete mode 100644 pkg_resources/_vendor/typeguard/_checkers.py
 delete mode 100644 pkg_resources/_vendor/typeguard/_config.py
 delete mode 100644 pkg_resources/_vendor/typeguard/_decorators.py
 delete mode 100644 pkg_resources/_vendor/typeguard/_exceptions.py
 delete mode 100644 pkg_resources/_vendor/typeguard/_functions.py
 delete mode 100644 pkg_resources/_vendor/typeguard/_importhook.py
 delete mode 100644 pkg_resources/_vendor/typeguard/_memo.py
 delete mode 100644 pkg_resources/_vendor/typeguard/_pytest_plugin.py
 delete mode 100644 pkg_resources/_vendor/typeguard/_suppression.py
 delete mode 100644 pkg_resources/_vendor/typeguard/_transformer.py
 delete mode 100644 pkg_resources/_vendor/typeguard/_union_transformer.py
 delete mode 100644 pkg_resources/_vendor/typeguard/_utils.py
 delete mode 100644 pkg_resources/_vendor/typeguard/py.typed
 delete mode 100644 pkg_resources/_vendor/typing_extensions-4.12.2.dist-info/INSTALLER
 delete mode 100644 pkg_resources/_vendor/typing_extensions-4.12.2.dist-info/LICENSE
 delete mode 100644 pkg_resources/_vendor/typing_extensions-4.12.2.dist-info/METADATA
 delete mode 100644 pkg_resources/_vendor/typing_extensions-4.12.2.dist-info/RECORD
 delete mode 100644 pkg_resources/_vendor/typing_extensions-4.12.2.dist-info/WHEEL
 delete mode 100644 pkg_resources/_vendor/typing_extensions.py
 delete mode 100644 pkg_resources/_vendor/zipp-3.19.2.dist-info/INSTALLER
 delete mode 100644 pkg_resources/_vendor/zipp-3.19.2.dist-info/LICENSE
 delete mode 100644 pkg_resources/_vendor/zipp-3.19.2.dist-info/METADATA
 delete mode 100644 pkg_resources/_vendor/zipp-3.19.2.dist-info/RECORD
 delete mode 100644 pkg_resources/_vendor/zipp-3.19.2.dist-info/REQUESTED
 delete mode 100644 pkg_resources/_vendor/zipp-3.19.2.dist-info/WHEEL
 delete mode 100644 pkg_resources/_vendor/zipp-3.19.2.dist-info/top_level.txt
 delete mode 100644 pkg_resources/_vendor/zipp/__init__.py
 delete mode 100644 pkg_resources/_vendor/zipp/compat/__init__.py
 delete mode 100644 pkg_resources/_vendor/zipp/compat/py310.py
 delete mode 100644 pkg_resources/_vendor/zipp/glob.py

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index 4b7887e9f4..e53525032c 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -74,7 +74,7 @@
 
 import _imp
 
-sys.path.append(os.path.dirname(__file__) + '/_vendor')
+sys.path.append(os.path.dirname(__file__) + '/../setuptools/_vendor')
 
 # capture these to bypass sandboxing
 from os import utime
diff --git a/pkg_resources/_vendor/autocommand-2.2.2.dist-info/INSTALLER b/pkg_resources/_vendor/autocommand-2.2.2.dist-info/INSTALLER
deleted file mode 100644
index a1b589e38a..0000000000
--- a/pkg_resources/_vendor/autocommand-2.2.2.dist-info/INSTALLER
+++ /dev/null
@@ -1 +0,0 @@
-pip
diff --git a/pkg_resources/_vendor/autocommand-2.2.2.dist-info/LICENSE b/pkg_resources/_vendor/autocommand-2.2.2.dist-info/LICENSE
deleted file mode 100644
index b49c3af060..0000000000
--- a/pkg_resources/_vendor/autocommand-2.2.2.dist-info/LICENSE
+++ /dev/null
@@ -1,166 +0,0 @@
-GNU LESSER GENERAL PUBLIC LICENSE
-                       Version 3, 29 June 2007
-
- Copyright (C) 2007 Free Software Foundation, Inc. 
- Everyone is permitted to copy and distribute verbatim copies
- of this license document, but changing it is not allowed.
-
-
-  This version of the GNU Lesser General Public License incorporates
-the terms and conditions of version 3 of the GNU General Public
-License, supplemented by the additional permissions listed below.
-
-  0. Additional Definitions.
-
-  As used herein, "this License" refers to version 3 of the GNU Lesser
-General Public License, and the "GNU GPL" refers to version 3 of the GNU
-General Public License.
-
-  "The Library" refers to a covered work governed by this License,
-other than an Application or a Combined Work as defined below.
-
-  An "Application" is any work that makes use of an interface provided
-by the Library, but which is not otherwise based on the Library.
-Defining a subclass of a class defined by the Library is deemed a mode
-of using an interface provided by the Library.
-
-  A "Combined Work" is a work produced by combining or linking an
-Application with the Library.  The particular version of the Library
-with which the Combined Work was made is also called the "Linked
-Version".
-
-  The "Minimal Corresponding Source" for a Combined Work means the
-Corresponding Source for the Combined Work, excluding any source code
-for portions of the Combined Work that, considered in isolation, are
-based on the Application, and not on the Linked Version.
-
-  The "Corresponding Application Code" for a Combined Work means the
-object code and/or source code for the Application, including any data
-and utility programs needed for reproducing the Combined Work from the
-Application, but excluding the System Libraries of the Combined Work.
-
-  1. Exception to Section 3 of the GNU GPL.
-
-  You may convey a covered work under sections 3 and 4 of this License
-without being bound by section 3 of the GNU GPL.
-
-  2. Conveying Modified Versions.
-
-  If you modify a copy of the Library, and, in your modifications, a
-facility refers to a function or data to be supplied by an Application
-that uses the facility (other than as an argument passed when the
-facility is invoked), then you may convey a copy of the modified
-version:
-
-   a) under this License, provided that you make a good faith effort to
-   ensure that, in the event an Application does not supply the
-   function or data, the facility still operates, and performs
-   whatever part of its purpose remains meaningful, or
-
-   b) under the GNU GPL, with none of the additional permissions of
-   this License applicable to that copy.
-
-  3. Object Code Incorporating Material from Library Header Files.
-
-  The object code form of an Application may incorporate material from
-a header file that is part of the Library.  You may convey such object
-code under terms of your choice, provided that, if the incorporated
-material is not limited to numerical parameters, data structure
-layouts and accessors, or small macros, inline functions and templates
-(ten or fewer lines in length), you do both of the following:
-
-   a) Give prominent notice with each copy of the object code that the
-   Library is used in it and that the Library and its use are
-   covered by this License.
-
-   b) Accompany the object code with a copy of the GNU GPL and this license
-   document.
-
-  4. Combined Works.
-
-  You may convey a Combined Work under terms of your choice that,
-taken together, effectively do not restrict modification of the
-portions of the Library contained in the Combined Work and reverse
-engineering for debugging such modifications, if you also do each of
-the following:
-
-   a) Give prominent notice with each copy of the Combined Work that
-   the Library is used in it and that the Library and its use are
-   covered by this License.
-
-   b) Accompany the Combined Work with a copy of the GNU GPL and this license
-   document.
-
-   c) For a Combined Work that displays copyright notices during
-   execution, include the copyright notice for the Library among
-   these notices, as well as a reference directing the user to the
-   copies of the GNU GPL and this license document.
-
-   d) Do one of the following:
-
-       0) Convey the Minimal Corresponding Source under the terms of this
-       License, and the Corresponding Application Code in a form
-       suitable for, and under terms that permit, the user to
-       recombine or relink the Application with a modified version of
-       the Linked Version to produce a modified Combined Work, in the
-       manner specified by section 6 of the GNU GPL for conveying
-       Corresponding Source.
-
-       1) Use a suitable shared library mechanism for linking with the
-       Library.  A suitable mechanism is one that (a) uses at run time
-       a copy of the Library already present on the user's computer
-       system, and (b) will operate properly with a modified version
-       of the Library that is interface-compatible with the Linked
-       Version.
-
-   e) Provide Installation Information, but only if you would otherwise
-   be required to provide such information under section 6 of the
-   GNU GPL, and only to the extent that such information is
-   necessary to install and execute a modified version of the
-   Combined Work produced by recombining or relinking the
-   Application with a modified version of the Linked Version. (If
-   you use option 4d0, the Installation Information must accompany
-   the Minimal Corresponding Source and Corresponding Application
-   Code. If you use option 4d1, you must provide the Installation
-   Information in the manner specified by section 6 of the GNU GPL
-   for conveying Corresponding Source.)
-
-  5. Combined Libraries.
-
-  You may place library facilities that are a work based on the
-Library side by side in a single library together with other library
-facilities that are not Applications and are not covered by this
-License, and convey such a combined library under terms of your
-choice, if you do both of the following:
-
-   a) Accompany the combined library with a copy of the same work based
-   on the Library, uncombined with any other library facilities,
-   conveyed under the terms of this License.
-
-   b) Give prominent notice with the combined library that part of it
-   is a work based on the Library, and explaining where to find the
-   accompanying uncombined form of the same work.
-
-  6. Revised Versions of the GNU Lesser General Public License.
-
-  The Free Software Foundation may publish revised and/or new versions
-of the GNU Lesser General Public License from time to time. Such new
-versions will be similar in spirit to the present version, but may
-differ in detail to address new problems or concerns.
-
-  Each version is given a distinguishing version number. If the
-Library as you received it specifies that a certain numbered version
-of the GNU Lesser General Public License "or any later version"
-applies to it, you have the option of following the terms and
-conditions either of that published version or of any later version
-published by the Free Software Foundation. If the Library as you
-received it does not specify a version number of the GNU Lesser
-General Public License, you may choose any version of the GNU Lesser
-General Public License ever published by the Free Software Foundation.
-
-  If the Library as you received it specifies that a proxy can decide
-whether future versions of the GNU Lesser General Public License shall
-apply, that proxy's public statement of acceptance of any version is
-permanent authorization for you to choose that version for the
-Library.
-
diff --git a/pkg_resources/_vendor/autocommand-2.2.2.dist-info/METADATA b/pkg_resources/_vendor/autocommand-2.2.2.dist-info/METADATA
deleted file mode 100644
index 32214fb440..0000000000
--- a/pkg_resources/_vendor/autocommand-2.2.2.dist-info/METADATA
+++ /dev/null
@@ -1,420 +0,0 @@
-Metadata-Version: 2.1
-Name: autocommand
-Version: 2.2.2
-Summary: A library to create a command-line program from a function
-Home-page: https://github.com/Lucretiel/autocommand
-Author: Nathan West
-License: LGPLv3
-Project-URL: Homepage, https://github.com/Lucretiel/autocommand
-Project-URL: Bug Tracker, https://github.com/Lucretiel/autocommand/issues
-Platform: any
-Classifier: Development Status :: 6 - Mature
-Classifier: Intended Audience :: Developers
-Classifier: License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)
-Classifier: Programming Language :: Python
-Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3 :: Only
-Classifier: Topic :: Software Development
-Classifier: Topic :: Software Development :: Libraries
-Classifier: Topic :: Software Development :: Libraries :: Python Modules
-Requires-Python: >=3.7
-Description-Content-Type: text/markdown
-License-File: LICENSE
-
-[![PyPI version](https://badge.fury.io/py/autocommand.svg)](https://badge.fury.io/py/autocommand)
-
-# autocommand
-
-A library to automatically generate and run simple argparse parsers from function signatures.
-
-## Installation
-
-Autocommand is installed via pip:
-
-```
-$ pip install autocommand
-```
-
-## Usage
-
-Autocommand turns a function into a command-line program. It converts the function's parameter signature into command-line arguments, and automatically runs the function if the module was called as `__main__`. In effect, it lets your create a smart main function.
-
-```python
-from autocommand import autocommand
-
-# This program takes exactly one argument and echos it.
-@autocommand(__name__)
-def echo(thing):
-    print(thing)
-```
-
-```
-$ python echo.py hello
-hello
-$ python echo.py -h
-usage: echo [-h] thing
-
-positional arguments:
-  thing
-
-optional arguments:
-  -h, --help  show this help message and exit
-$ python echo.py hello world  # too many arguments
-usage: echo.py [-h] thing
-echo.py: error: unrecognized arguments: world
-```
-
-As you can see, autocommand converts the signature of the function into an argument spec. When you run the file as a program, autocommand collects the command-line arguments and turns them into function arguments. The function is executed with these arguments, and then the program exits with the return value of the function, via `sys.exit`. Autocommand also automatically creates a usage message, which can be invoked with `-h` or `--help`, and automatically prints an error message when provided with invalid arguments.
-
-### Types
-
-You can use a type annotation to give an argument a type. Any type (or in fact any callable) that returns an object when given a string argument can be used, though there are a few special cases that are described later.
-
-```python
-@autocommand(__name__)
-def net_client(host, port: int):
-    ...
-```
-
-Autocommand will catch `TypeErrors` raised by the type during argument parsing, so you can supply a callable and do some basic argument validation as well.
-
-### Trailing Arguments
-
-You can add a `*args` parameter to your function to give it trailing arguments. The command will collect 0 or more trailing arguments and supply them to `args` as a tuple. If a type annotation is supplied, the type is applied to each argument.
-
-```python
-# Write the contents of each file, one by one
-@autocommand(__name__)
-def cat(*files):
-    for filename in files:
-        with open(filename) as file:
-            for line in file:
-                print(line.rstrip())
-```
-
-```
-$ python cat.py -h
-usage: ipython [-h] [file [file ...]]
-
-positional arguments:
-  file
-
-optional arguments:
-  -h, --help  show this help message and exit
-```
-
-### Options
-
-To create `--option` switches, just assign a default. Autocommand will automatically create `--long` and `-s`hort switches.
-
-```python
-@autocommand(__name__)
-def do_with_config(argument, config='~/foo.conf'):
-    pass
-```
-
-```
-$ python example.py -h
-usage: example.py [-h] [-c CONFIG] argument
-
-positional arguments:
-  argument
-
-optional arguments:
-  -h, --help            show this help message and exit
-  -c CONFIG, --config CONFIG
-```
-
-The option's type is automatically deduced from the default, unless one is explicitly given in an annotation:
-
-```python
-@autocommand(__name__)
-def http_connect(host, port=80):
-    print('{}:{}'.format(host, port))
-```
-
-```
-$ python http.py -h
-usage: http.py [-h] [-p PORT] host
-
-positional arguments:
-  host
-
-optional arguments:
-  -h, --help            show this help message and exit
-  -p PORT, --port PORT
-$ python http.py localhost
-localhost:80
-$ python http.py localhost -p 8080
-localhost:8080
-$ python http.py localhost -p blah
-usage: http.py [-h] [-p PORT] host
-http.py: error: argument -p/--port: invalid int value: 'blah'
-```
-
-#### None
-
-If an option is given a default value of `None`, it reads in a value as normal, but supplies `None` if the option isn't provided.
-
-#### Switches
-
-If an argument is given a default value of `True` or `False`, or
-given an explicit `bool` type, it becomes an option switch.
-
-```python
-    @autocommand(__name__)
-    def example(verbose=False, quiet=False):
-        pass
-```
-
-```
-$ python example.py -h
-usage: example.py [-h] [-v] [-q]
-
-optional arguments:
-  -h, --help     show this help message and exit
-  -v, --verbose
-  -q, --quiet
-```
-
-Autocommand attempts to do the "correct thing" in these cases- if the default is `True`, then supplying the switch makes the argument `False`; if the type is `bool` and the default is some other `True` value, then supplying the switch makes the argument `False`, while not supplying the switch makes the argument the default value.
-
-Autocommand also supports the creation of switch inverters. Pass `add_nos=True` to `autocommand` to enable this.
-
-```
-    @autocommand(__name__, add_nos=True)
-    def example(verbose=False):
-        pass
-```
-
-```
-$ python example.py -h
-usage: ipython [-h] [-v] [--no-verbose]
-
-optional arguments:
-  -h, --help     show this help message and exit
-  -v, --verbose
-  --no-verbose
-```
-
-Using the `--no-` version of a switch will pass the opposite value in as a function argument. If multiple switches are present, the last one takes precedence.
-
-#### Files
-
-If the default value is a file object, such as `sys.stdout`, then autocommand just looks for a string, for a file path. It doesn't do any special checking on the string, though (such as checking if the file exists); it's better to let the client decide how to handle errors in this case. Instead, it provides a special context manager called `smart_open`, which behaves exactly like `open` if a filename or other openable type is provided, but also lets you use already open files:
-
-```python
-from autocommand import autocommand, smart_open
-import sys
-
-# Write the contents of stdin, or a file, to stdout
-@autocommand(__name__)
-def write_out(infile=sys.stdin):
-    with smart_open(infile) as f:
-        for line in f:
-            print(line.rstrip())
-    # If a file was opened, it is closed here. If it was just stdin, it is untouched.
-```
-
-```
-$ echo "Hello World!" | python write_out.py | tee hello.txt
-Hello World!
-$ python write_out.py --infile hello.txt
-Hello World!
-```
-
-### Descriptions and docstrings
-
-The `autocommand` decorator accepts `description` and `epilog` kwargs, corresponding to the `description `_ and `epilog `_ of the `ArgumentParser`. If no description is given, but the decorated function has a docstring, then it is taken as the `description` for the `ArgumentParser`. You can also provide both the description and epilog in the docstring by splitting it into two sections with 4 or more - characters.
-
-```python
-@autocommand(__name__)
-def copy(infile=sys.stdin, outfile=sys.stdout):
-    '''
-    Copy an the contents of a file (or stdin) to another file (or stdout)
-    ----------
-    Some extra documentation in the epilog
-    '''
-    with smart_open(infile) as istr:
-        with smart_open(outfile, 'w') as ostr:
-            for line in istr:
-                ostr.write(line)
-```
-
-```
-$ python copy.py -h
-usage: copy.py [-h] [-i INFILE] [-o OUTFILE]
-
-Copy an the contents of a file (or stdin) to another file (or stdout)
-
-optional arguments:
-  -h, --help            show this help message and exit
-  -i INFILE, --infile INFILE
-  -o OUTFILE, --outfile OUTFILE
-
-Some extra documentation in the epilog
-$ echo "Hello World" | python copy.py --outfile hello.txt
-$ python copy.py --infile hello.txt --outfile hello2.txt
-$ python copy.py --infile hello2.txt
-Hello World
-```
-
-### Parameter descriptions
-
-You can also attach description text to individual parameters in the annotation. To attach both a type and a description, supply them both in any order in a tuple
-
-```python
-@autocommand(__name__)
-def copy_net(
-    infile: 'The name of the file to send',
-    host: 'The host to send the file to',
-    port: (int, 'The port to connect to')):
-
-    '''
-    Copy a file over raw TCP to a remote destination.
-    '''
-    # Left as an exercise to the reader
-```
-
-### Decorators and wrappers
-
-Autocommand automatically follows wrapper chains created by `@functools.wraps`. This means that you can apply other wrapping decorators to your main function, and autocommand will still correctly detect the signature.
-
-```python
-from functools import wraps
-from autocommand import autocommand
-
-def print_yielded(func):
-    '''
-    Convert a generator into a function that prints all yielded elements
-    '''
-    @wraps(func)
-    def wrapper(*args, **kwargs):
-        for thing in func(*args, **kwargs):
-            print(thing)
-    return wrapper
-
-@autocommand(__name__,
-    description= 'Print all the values from START to STOP, inclusive, in steps of STEP',
-    epilog=      'STOP and STEP default to 1')
-@print_yielded
-def seq(stop, start=1, step=1):
-    for i in range(start, stop + 1, step):
-        yield i
-```
-
-```
-$ seq.py -h
-usage: seq.py [-h] [-s START] [-S STEP] stop
-
-Print all the values from START to STOP, inclusive, in steps of STEP
-
-positional arguments:
-  stop
-
-optional arguments:
-  -h, --help            show this help message and exit
-  -s START, --start START
-  -S STEP, --step STEP
-
-STOP and STEP default to 1
-```
-
-Even though autocommand is being applied to the `wrapper` returned by `print_yielded`, it still retreives the signature of the underlying `seq` function to create the argument parsing.
-
-### Custom Parser
-
-While autocommand's automatic parser generator is a powerful convenience, it doesn't cover all of the different features that argparse provides. If you need these features, you can provide your own parser as a kwarg to `autocommand`:
-
-```python
-from argparse import ArgumentParser
-from autocommand import autocommand
-
-parser = ArgumentParser()
-# autocommand can't do optional positonal parameters
-parser.add_argument('arg', nargs='?')
-# or mutually exclusive options
-group = parser.add_mutually_exclusive_group()
-group.add_argument('-v', '--verbose', action='store_true')
-group.add_argument('-q', '--quiet', action='store_true')
-
-@autocommand(__name__, parser=parser)
-def main(arg, verbose, quiet):
-    print(arg, verbose, quiet)
-```
-
-```
-$ python parser.py -h
-usage: write_file.py [-h] [-v | -q] [arg]
-
-positional arguments:
-  arg
-
-optional arguments:
-  -h, --help     show this help message and exit
-  -v, --verbose
-  -q, --quiet
-$ python parser.py
-None False False
-$ python parser.py hello
-hello False False
-$ python parser.py -v
-None True False
-$ python parser.py -q
-None False True
-$ python parser.py -vq
-usage: parser.py [-h] [-v | -q] [arg]
-parser.py: error: argument -q/--quiet: not allowed with argument -v/--verbose
-```
-
-Any parser should work fine, so long as each of the parser's arguments has a corresponding parameter in the decorated main function. The order of parameters doesn't matter, as long as they are all present. Note that when using a custom parser, autocommand doesn't modify the parser or the retrieved arguments. This means that no description/epilog will be added, and the function's type annotations and defaults (if present) will be ignored.
-
-## Testing and Library use
-
-The decorated function is only called and exited from if the first argument to `autocommand` is `'__main__'` or `True`. If it is neither of these values, or no argument is given, then a new main function is created by the decorator. This function has the signature `main(argv=None)`, and is intended to be called with arguments as if via `main(sys.argv[1:])`. The function has the attributes `parser` and `main`, which are the generated `ArgumentParser` and the original main function that was decorated. This is to facilitate testing and library use of your main. Calling the function triggers a `parse_args()` with the supplied arguments, and returns the result of the main function. Note that, while it returns instead of calling `sys.exit`, the `parse_args()` function will raise a `SystemExit` in the event of a parsing error or `-h/--help` argument.
-
-```python
-    @autocommand()
-    def test_prog(arg1, arg2: int, quiet=False, verbose=False):
-        if not quiet:
-            print(arg1, arg2)
-            if verbose:
-                print("LOUD NOISES")
-
-        return 0
-
-    print(test_prog(['-v', 'hello', '80']))
-```
-
-```
-$ python test_prog.py
-hello 80
-LOUD NOISES
-0
-```
-
-If the function is called with no arguments, `sys.argv[1:]` is used. This is to allow the autocommand function to be used as a setuptools entry point.
-
-## Exceptions and limitations
-
-- There are a few possible exceptions that `autocommand` can raise. All of them derive from `autocommand.AutocommandError`.
-
-  - If an invalid annotation is given (that is, it isn't a `type`, `str`, `(type, str)`, or `(str, type)`, an `AnnotationError` is raised. The `type` may be any callable, as described in the `Types`_ section.
-  - If the function has a `**kwargs` parameter, a `KWargError` is raised.
-  - If, somehow, the function has a positional-only parameter, a `PositionalArgError` is raised. This means that the argument doesn't have a name, which is currently not possible with a plain `def` or `lambda`, though many built-in functions have this kind of parameter.
-
-- There are a few argparse features that are not supported by autocommand.
-
-  - It isn't possible to have an optional positional argument (as opposed to a `--option`). POSIX thinks this is bad form anyway.
-  - It isn't possible to have mutually exclusive arguments or options
-  - It isn't possible to have subcommands or subparsers, though I'm working on a few solutions involving classes or nested function definitions to allow this.
-
-## Development
-
-Autocommand cannot be important from the project root; this is to enforce separation of concerns and prevent accidental importing of `setup.py` or tests. To develop, install the project in editable mode:
-
-```
-$ python setup.py develop
-```
-
-This will create a link to the source files in the deployment directory, so that any source changes are reflected when it is imported.
diff --git a/pkg_resources/_vendor/autocommand-2.2.2.dist-info/RECORD b/pkg_resources/_vendor/autocommand-2.2.2.dist-info/RECORD
deleted file mode 100644
index e6e12ea51e..0000000000
--- a/pkg_resources/_vendor/autocommand-2.2.2.dist-info/RECORD
+++ /dev/null
@@ -1,18 +0,0 @@
-autocommand-2.2.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-autocommand-2.2.2.dist-info/LICENSE,sha256=reeNBJgtaZctREqOFKlPh6IzTdOFXMgDSOqOJAqg3y0,7634
-autocommand-2.2.2.dist-info/METADATA,sha256=OADZuR3O6iBlpu1ieTgzYul6w4uOVrk0P0BO5TGGAJk,15006
-autocommand-2.2.2.dist-info/RECORD,,
-autocommand-2.2.2.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
-autocommand-2.2.2.dist-info/top_level.txt,sha256=AzfhgKKS8EdAwWUTSF8mgeVQbXOY9kokHB6kSqwwqu0,12
-autocommand/__init__.py,sha256=zko5Rnvolvb-UXjCx_2ArPTGBWwUK5QY4LIQIKYR7As,1037
-autocommand/__pycache__/__init__.cpython-312.pyc,,
-autocommand/__pycache__/autoasync.cpython-312.pyc,,
-autocommand/__pycache__/autocommand.cpython-312.pyc,,
-autocommand/__pycache__/automain.cpython-312.pyc,,
-autocommand/__pycache__/autoparse.cpython-312.pyc,,
-autocommand/__pycache__/errors.cpython-312.pyc,,
-autocommand/autoasync.py,sha256=AMdyrxNS4pqWJfP_xuoOcImOHWD-qT7x06wmKN1Vp-U,5680
-autocommand/autocommand.py,sha256=hmkEmQ72HtL55gnURVjDOnsfYlGd5lLXbvT4KG496Qw,2505
-autocommand/automain.py,sha256=A2b8i754Mxc_DjU9WFr6vqYDWlhz0cn8miu8d8EsxV8,2076
-autocommand/autoparse.py,sha256=WVWmZJPcbzUKXP40raQw_0HD8qPJ2V9VG1eFFmmnFxw,11642
-autocommand/errors.py,sha256=7aa3roh9Herd6nIKpQHNWEslWE8oq7GiHYVUuRqORnA,886
diff --git a/pkg_resources/_vendor/autocommand-2.2.2.dist-info/WHEEL b/pkg_resources/_vendor/autocommand-2.2.2.dist-info/WHEEL
deleted file mode 100644
index 57e3d840d5..0000000000
--- a/pkg_resources/_vendor/autocommand-2.2.2.dist-info/WHEEL
+++ /dev/null
@@ -1,5 +0,0 @@
-Wheel-Version: 1.0
-Generator: bdist_wheel (0.38.4)
-Root-Is-Purelib: true
-Tag: py3-none-any
-
diff --git a/pkg_resources/_vendor/autocommand-2.2.2.dist-info/top_level.txt b/pkg_resources/_vendor/autocommand-2.2.2.dist-info/top_level.txt
deleted file mode 100644
index dda5158ff6..0000000000
--- a/pkg_resources/_vendor/autocommand-2.2.2.dist-info/top_level.txt
+++ /dev/null
@@ -1 +0,0 @@
-autocommand
diff --git a/pkg_resources/_vendor/autocommand/__init__.py b/pkg_resources/_vendor/autocommand/__init__.py
deleted file mode 100644
index 73fbfca6b3..0000000000
--- a/pkg_resources/_vendor/autocommand/__init__.py
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright 2014-2016 Nathan West
-#
-# This file is part of autocommand.
-#
-# autocommand is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# autocommand is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with autocommand.  If not, see .
-
-# flake8 flags all these imports as unused, hence the NOQAs everywhere.
-
-from .automain import automain  # NOQA
-from .autoparse import autoparse, smart_open  # NOQA
-from .autocommand import autocommand  # NOQA
-
-try:
-    from .autoasync import autoasync  # NOQA
-except ImportError:  # pragma: no cover
-    pass
diff --git a/pkg_resources/_vendor/autocommand/autoasync.py b/pkg_resources/_vendor/autocommand/autoasync.py
deleted file mode 100644
index 688f7e0554..0000000000
--- a/pkg_resources/_vendor/autocommand/autoasync.py
+++ /dev/null
@@ -1,142 +0,0 @@
-# Copyright 2014-2015 Nathan West
-#
-# This file is part of autocommand.
-#
-# autocommand is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# autocommand is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with autocommand.  If not, see .
-
-from asyncio import get_event_loop, iscoroutine
-from functools import wraps
-from inspect import signature
-
-
-async def _run_forever_coro(coro, args, kwargs, loop):
-    '''
-    This helper function launches an async main function that was tagged with
-    forever=True. There are two possibilities:
-
-    - The function is a normal function, which handles initializing the event
-      loop, which is then run forever
-    - The function is a coroutine, which needs to be scheduled in the event
-      loop, which is then run forever
-      - There is also the possibility that the function is a normal function
-        wrapping a coroutine function
-
-    The function is therefore called unconditionally and scheduled in the event
-    loop if the return value is a coroutine object.
-
-    The reason this is a separate function is to make absolutely sure that all
-    the objects created are garbage collected after all is said and done; we
-    do this to ensure that any exceptions raised in the tasks are collected
-    ASAP.
-    '''
-
-    # Personal note: I consider this an antipattern, as it relies on the use of
-    # unowned resources. The setup function dumps some stuff into the event
-    # loop where it just whirls in the ether without a well defined owner or
-    # lifetime. For this reason, there's a good chance I'll remove the
-    # forever=True feature from autoasync at some point in the future.
-    thing = coro(*args, **kwargs)
-    if iscoroutine(thing):
-        await thing
-
-
-def autoasync(coro=None, *, loop=None, forever=False, pass_loop=False):
-    '''
-    Convert an asyncio coroutine into a function which, when called, is
-    evaluted in an event loop, and the return value returned. This is intented
-    to make it easy to write entry points into asyncio coroutines, which
-    otherwise need to be explictly evaluted with an event loop's
-    run_until_complete.
-
-    If `loop` is given, it is used as the event loop to run the coro in. If it
-    is None (the default), the loop is retreived using asyncio.get_event_loop.
-    This call is defered until the decorated function is called, so that
-    callers can install custom event loops or event loop policies after
-    @autoasync is applied.
-
-    If `forever` is True, the loop is run forever after the decorated coroutine
-    is finished. Use this for servers created with asyncio.start_server and the
-    like.
-
-    If `pass_loop` is True, the event loop object is passed into the coroutine
-    as the `loop` kwarg when the wrapper function is called. In this case, the
-    wrapper function's __signature__ is updated to remove this parameter, so
-    that autoparse can still be used on it without generating a parameter for
-    `loop`.
-
-    This coroutine can be called with ( @autoasync(...) ) or without
-    ( @autoasync ) arguments.
-
-    Examples:
-
-    @autoasync
-    def get_file(host, port):
-        reader, writer = yield from asyncio.open_connection(host, port)
-        data = reader.read()
-        sys.stdout.write(data.decode())
-
-    get_file(host, port)
-
-    @autoasync(forever=True, pass_loop=True)
-    def server(host, port, loop):
-        yield_from loop.create_server(Proto, host, port)
-
-    server('localhost', 8899)
-
-    '''
-    if coro is None:
-        return lambda c: autoasync(
-            c, loop=loop,
-            forever=forever,
-            pass_loop=pass_loop)
-
-    # The old and new signatures are required to correctly bind the loop
-    # parameter in 100% of cases, even if it's a positional parameter.
-    # NOTE: A future release will probably require the loop parameter to be
-    # a kwonly parameter.
-    if pass_loop:
-        old_sig = signature(coro)
-        new_sig = old_sig.replace(parameters=(
-            param for name, param in old_sig.parameters.items()
-            if name != "loop"))
-
-    @wraps(coro)
-    def autoasync_wrapper(*args, **kwargs):
-        # Defer the call to get_event_loop so that, if a custom policy is
-        # installed after the autoasync decorator, it is respected at call time
-        local_loop = get_event_loop() if loop is None else loop
-
-        # Inject the 'loop' argument. We have to use this signature binding to
-        # ensure it's injected in the correct place (positional, keyword, etc)
-        if pass_loop:
-            bound_args = old_sig.bind_partial()
-            bound_args.arguments.update(
-                loop=local_loop,
-                **new_sig.bind(*args, **kwargs).arguments)
-            args, kwargs = bound_args.args, bound_args.kwargs
-
-        if forever:
-            local_loop.create_task(_run_forever_coro(
-                coro, args, kwargs, local_loop
-            ))
-            local_loop.run_forever()
-        else:
-            return local_loop.run_until_complete(coro(*args, **kwargs))
-
-    # Attach the updated signature. This allows 'pass_loop' to be used with
-    # autoparse
-    if pass_loop:
-        autoasync_wrapper.__signature__ = new_sig
-
-    return autoasync_wrapper
diff --git a/pkg_resources/_vendor/autocommand/autocommand.py b/pkg_resources/_vendor/autocommand/autocommand.py
deleted file mode 100644
index 097e86de07..0000000000
--- a/pkg_resources/_vendor/autocommand/autocommand.py
+++ /dev/null
@@ -1,70 +0,0 @@
-# Copyright 2014-2015 Nathan West
-#
-# This file is part of autocommand.
-#
-# autocommand is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# autocommand is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with autocommand.  If not, see .
-
-from .autoparse import autoparse
-from .automain import automain
-try:
-    from .autoasync import autoasync
-except ImportError:  # pragma: no cover
-    pass
-
-
-def autocommand(
-        module, *,
-        description=None,
-        epilog=None,
-        add_nos=False,
-        parser=None,
-        loop=None,
-        forever=False,
-        pass_loop=False):
-
-    if callable(module):
-        raise TypeError('autocommand requires a module name argument')
-
-    def autocommand_decorator(func):
-        # Step 1: if requested, run it all in an asyncio event loop. autoasync
-        # patches the __signature__ of the decorated function, so that in the
-        # event that pass_loop is True, the `loop` parameter of the original
-        # function will *not* be interpreted as a command-line argument by
-        # autoparse
-        if loop is not None or forever or pass_loop:
-            func = autoasync(
-                func,
-                loop=None if loop is True else loop,
-                pass_loop=pass_loop,
-                forever=forever)
-
-        # Step 2: create parser. We do this second so that the arguments are
-        # parsed and passed *before* entering the asyncio event loop, if it
-        # exists. This simplifies the stack trace and ensures errors are
-        # reported earlier. It also ensures that errors raised during parsing &
-        # passing are still raised if `forever` is True.
-        func = autoparse(
-            func,
-            description=description,
-            epilog=epilog,
-            add_nos=add_nos,
-            parser=parser)
-
-        # Step 3: call the function automatically if __name__ == '__main__' (or
-        # if True was provided)
-        func = automain(module)(func)
-
-        return func
-
-    return autocommand_decorator
diff --git a/pkg_resources/_vendor/autocommand/automain.py b/pkg_resources/_vendor/autocommand/automain.py
deleted file mode 100644
index 6cc45db66a..0000000000
--- a/pkg_resources/_vendor/autocommand/automain.py
+++ /dev/null
@@ -1,59 +0,0 @@
-# Copyright 2014-2015 Nathan West
-#
-# This file is part of autocommand.
-#
-# autocommand is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# autocommand is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with autocommand.  If not, see .
-
-import sys
-from .errors import AutocommandError
-
-
-class AutomainRequiresModuleError(AutocommandError, TypeError):
-    pass
-
-
-def automain(module, *, args=(), kwargs=None):
-    '''
-    This decorator automatically invokes a function if the module is being run
-    as the "__main__" module. Optionally, provide args or kwargs with which to
-    call the function. If `module` is "__main__", the function is called, and
-    the program is `sys.exit`ed with the return value. You can also pass `True`
-    to cause the function to be called unconditionally. If the function is not
-    called, it is returned unchanged by the decorator.
-
-    Usage:
-
-    @automain(__name__)  # Pass __name__ to check __name__=="__main__"
-    def main():
-        ...
-
-    If __name__ is "__main__" here, the main function is called, and then
-    sys.exit called with the return value.
-    '''
-
-    # Check that @automain(...) was called, rather than @automain
-    if callable(module):
-        raise AutomainRequiresModuleError(module)
-
-    if module == '__main__' or module is True:
-        if kwargs is None:
-            kwargs = {}
-
-        # Use a function definition instead of a lambda for a neater traceback
-        def automain_decorator(main):
-            sys.exit(main(*args, **kwargs))
-
-        return automain_decorator
-    else:
-        return lambda main: main
diff --git a/pkg_resources/_vendor/autocommand/autoparse.py b/pkg_resources/_vendor/autocommand/autoparse.py
deleted file mode 100644
index 0276a3fae1..0000000000
--- a/pkg_resources/_vendor/autocommand/autoparse.py
+++ /dev/null
@@ -1,333 +0,0 @@
-# Copyright 2014-2015 Nathan West
-#
-# This file is part of autocommand.
-#
-# autocommand is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# autocommand is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with autocommand.  If not, see .
-
-import sys
-from re import compile as compile_regex
-from inspect import signature, getdoc, Parameter
-from argparse import ArgumentParser
-from contextlib import contextmanager
-from functools import wraps
-from io import IOBase
-from autocommand.errors import AutocommandError
-
-
-_empty = Parameter.empty
-
-
-class AnnotationError(AutocommandError):
-    '''Annotation error: annotation must be a string, type, or tuple of both'''
-
-
-class PositionalArgError(AutocommandError):
-    '''
-    Postional Arg Error: autocommand can't handle postional-only parameters
-    '''
-
-
-class KWArgError(AutocommandError):
-    '''kwarg Error: autocommand can't handle a **kwargs parameter'''
-
-
-class DocstringError(AutocommandError):
-    '''Docstring error'''
-
-
-class TooManySplitsError(DocstringError):
-    '''
-    The docstring had too many ---- section splits. Currently we only support
-    using up to a single split, to split the docstring into description and
-    epilog parts.
-    '''
-
-
-def _get_type_description(annotation):
-    '''
-    Given an annotation, return the (type, description) for the parameter.
-    If you provide an annotation that is somehow both a string and a callable,
-    the behavior is undefined.
-    '''
-    if annotation is _empty:
-        return None, None
-    elif callable(annotation):
-        return annotation, None
-    elif isinstance(annotation, str):
-        return None, annotation
-    elif isinstance(annotation, tuple):
-        try:
-            arg1, arg2 = annotation
-        except ValueError as e:
-            raise AnnotationError(annotation) from e
-        else:
-            if callable(arg1) and isinstance(arg2, str):
-                return arg1, arg2
-            elif isinstance(arg1, str) and callable(arg2):
-                return arg2, arg1
-
-    raise AnnotationError(annotation)
-
-
-def _add_arguments(param, parser, used_char_args, add_nos):
-    '''
-    Add the argument(s) to an ArgumentParser (using add_argument) for a given
-    parameter. used_char_args is the set of -short options currently already in
-    use, and is updated (if necessary) by this function. If add_nos is True,
-    this will also add an inverse switch for all boolean options. For
-    instance, for the boolean parameter "verbose", this will create --verbose
-    and --no-verbose.
-    '''
-
-    # Impl note: This function is kept separate from make_parser because it's
-    # already very long and I wanted to separate out as much as possible into
-    # its own call scope, to prevent even the possibility of suble mutation
-    # bugs.
-    if param.kind is param.POSITIONAL_ONLY:
-        raise PositionalArgError(param)
-    elif param.kind is param.VAR_KEYWORD:
-        raise KWArgError(param)
-
-    # These are the kwargs for the add_argument function.
-    arg_spec = {}
-    is_option = False
-
-    # Get the type and default from the annotation.
-    arg_type, description = _get_type_description(param.annotation)
-
-    # Get the default value
-    default = param.default
-
-    # If there is no explicit type, and the default is present and not None,
-    # infer the type from the default.
-    if arg_type is None and default not in {_empty, None}:
-        arg_type = type(default)
-
-    # Add default. The presence of a default means this is an option, not an
-    # argument.
-    if default is not _empty:
-        arg_spec['default'] = default
-        is_option = True
-
-    # Add the type
-    if arg_type is not None:
-        # Special case for bool: make it just a --switch
-        if arg_type is bool:
-            if not default or default is _empty:
-                arg_spec['action'] = 'store_true'
-            else:
-                arg_spec['action'] = 'store_false'
-
-            # Switches are always options
-            is_option = True
-
-        # Special case for file types: make it a string type, for filename
-        elif isinstance(default, IOBase):
-            arg_spec['type'] = str
-
-        # TODO: special case for list type.
-        #   - How to specificy type of list members?
-        #       - param: [int]
-        #       - param: int =[]
-        #   - action='append' vs nargs='*'
-
-        else:
-            arg_spec['type'] = arg_type
-
-    # nargs: if the signature includes *args, collect them as trailing CLI
-    # arguments in a list. *args can't have a default value, so it can never be
-    # an option.
-    if param.kind is param.VAR_POSITIONAL:
-        # TODO: consider depluralizing metavar/name here.
-        arg_spec['nargs'] = '*'
-
-    # Add description.
-    if description is not None:
-        arg_spec['help'] = description
-
-    # Get the --flags
-    flags = []
-    name = param.name
-
-    if is_option:
-        # Add the first letter as a -short option.
-        for letter in name[0], name[0].swapcase():
-            if letter not in used_char_args:
-                used_char_args.add(letter)
-                flags.append('-{}'.format(letter))
-                break
-
-        # If the parameter is a --long option, or is a -short option that
-        # somehow failed to get a flag, add it.
-        if len(name) > 1 or not flags:
-            flags.append('--{}'.format(name))
-
-        arg_spec['dest'] = name
-    else:
-        flags.append(name)
-
-    parser.add_argument(*flags, **arg_spec)
-
-    # Create the --no- version for boolean switches
-    if add_nos and arg_type is bool:
-        parser.add_argument(
-            '--no-{}'.format(name),
-            action='store_const',
-            dest=name,
-            const=default if default is not _empty else False)
-
-
-def make_parser(func_sig, description, epilog, add_nos):
-    '''
-    Given the signature of a function, create an ArgumentParser
-    '''
-    parser = ArgumentParser(description=description, epilog=epilog)
-
-    used_char_args = {'h'}
-
-    # Arange the params so that single-character arguments are first. This
-    # esnures they don't have to get --long versions. sorted is stable, so the
-    # parameters will otherwise still be in relative order.
-    params = sorted(
-        func_sig.parameters.values(),
-        key=lambda param: len(param.name) > 1)
-
-    for param in params:
-        _add_arguments(param, parser, used_char_args, add_nos)
-
-    return parser
-
-
-_DOCSTRING_SPLIT = compile_regex(r'\n\s*-{4,}\s*\n')
-
-
-def parse_docstring(docstring):
-    '''
-    Given a docstring, parse it into a description and epilog part
-    '''
-    if docstring is None:
-        return '', ''
-
-    parts = _DOCSTRING_SPLIT.split(docstring)
-
-    if len(parts) == 1:
-        return docstring, ''
-    elif len(parts) == 2:
-        return parts[0], parts[1]
-    else:
-        raise TooManySplitsError()
-
-
-def autoparse(
-        func=None, *,
-        description=None,
-        epilog=None,
-        add_nos=False,
-        parser=None):
-    '''
-    This decorator converts a function that takes normal arguments into a
-    function which takes a single optional argument, argv, parses it using an
-    argparse.ArgumentParser, and calls the underlying function with the parsed
-    arguments. If it is not given, sys.argv[1:] is used. This is so that the
-    function can be used as a setuptools entry point, as well as a normal main
-    function. sys.argv[1:] is not evaluated until the function is called, to
-    allow injecting different arguments for testing.
-
-    It uses the argument signature of the function to create an
-    ArgumentParser. Parameters without defaults become positional parameters,
-    while parameters *with* defaults become --options. Use annotations to set
-    the type of the parameter.
-
-    The `desctiption` and `epilog` parameters corrospond to the same respective
-    argparse parameters. If no description is given, it defaults to the
-    decorated functions's docstring, if present.
-
-    If add_nos is True, every boolean option (that is, every parameter with a
-    default of True/False or a type of bool) will have a --no- version created
-    as well, which inverts the option. For instance, the --verbose option will
-    have a --no-verbose counterpart. These are not mutually exclusive-
-    whichever one appears last in the argument list will have precedence.
-
-    If a parser is given, it is used instead of one generated from the function
-    signature. In this case, no parser is created; instead, the given parser is
-    used to parse the argv argument. The parser's results' argument names must
-    match up with the parameter names of the decorated function.
-
-    The decorated function is attached to the result as the `func` attribute,
-    and the parser is attached as the `parser` attribute.
-    '''
-
-    # If @autoparse(...) is used instead of @autoparse
-    if func is None:
-        return lambda f: autoparse(
-            f, description=description,
-            epilog=epilog,
-            add_nos=add_nos,
-            parser=parser)
-
-    func_sig = signature(func)
-
-    docstr_description, docstr_epilog = parse_docstring(getdoc(func))
-
-    if parser is None:
-        parser = make_parser(
-            func_sig,
-            description or docstr_description,
-            epilog or docstr_epilog,
-            add_nos)
-
-    @wraps(func)
-    def autoparse_wrapper(argv=None):
-        if argv is None:
-            argv = sys.argv[1:]
-
-        # Get empty argument binding, to fill with parsed arguments. This
-        # object does all the heavy lifting of turning named arguments into
-        # into correctly bound *args and **kwargs.
-        parsed_args = func_sig.bind_partial()
-        parsed_args.arguments.update(vars(parser.parse_args(argv)))
-
-        return func(*parsed_args.args, **parsed_args.kwargs)
-
-    # TODO: attach an updated __signature__ to autoparse_wrapper, just in case.
-
-    # Attach the wrapped function and parser, and return the wrapper.
-    autoparse_wrapper.func = func
-    autoparse_wrapper.parser = parser
-    return autoparse_wrapper
-
-
-@contextmanager
-def smart_open(filename_or_file, *args, **kwargs):
-    '''
-    This context manager allows you to open a filename, if you want to default
-    some already-existing file object, like sys.stdout, which shouldn't be
-    closed at the end of the context. If the filename argument is a str, bytes,
-    or int, the file object is created via a call to open with the given *args
-    and **kwargs, sent to the context, and closed at the end of the context,
-    just like "with open(filename) as f:". If it isn't one of the openable
-    types, the object simply sent to the context unchanged, and left unclosed
-    at the end of the context. Example:
-
-        def work_with_file(name=sys.stdout):
-            with smart_open(name) as f:
-                # Works correctly if name is a str filename or sys.stdout
-                print("Some stuff", file=f)
-                # If it was a filename, f is closed at the end here.
-    '''
-    if isinstance(filename_or_file, (str, bytes, int)):
-        with open(filename_or_file, *args, **kwargs) as file:
-            yield file
-    else:
-        yield filename_or_file
diff --git a/pkg_resources/_vendor/autocommand/errors.py b/pkg_resources/_vendor/autocommand/errors.py
deleted file mode 100644
index 2570607399..0000000000
--- a/pkg_resources/_vendor/autocommand/errors.py
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright 2014-2016 Nathan West
-#
-# This file is part of autocommand.
-#
-# autocommand is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# autocommand is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with autocommand.  If not, see .
-
-
-class AutocommandError(Exception):
-    '''Base class for autocommand exceptions'''
-    pass
-
-# Individual modules will define errors specific to that module.
diff --git a/pkg_resources/_vendor/backports.tarfile-1.2.0.dist-info/INSTALLER b/pkg_resources/_vendor/backports.tarfile-1.2.0.dist-info/INSTALLER
deleted file mode 100644
index a1b589e38a..0000000000
--- a/pkg_resources/_vendor/backports.tarfile-1.2.0.dist-info/INSTALLER
+++ /dev/null
@@ -1 +0,0 @@
-pip
diff --git a/pkg_resources/_vendor/backports.tarfile-1.2.0.dist-info/LICENSE b/pkg_resources/_vendor/backports.tarfile-1.2.0.dist-info/LICENSE
deleted file mode 100644
index 1bb5a44356..0000000000
--- a/pkg_resources/_vendor/backports.tarfile-1.2.0.dist-info/LICENSE
+++ /dev/null
@@ -1,17 +0,0 @@
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to
-deal in the Software without restriction, including without limitation the
-rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
-sell copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
-IN THE SOFTWARE.
diff --git a/pkg_resources/_vendor/backports.tarfile-1.2.0.dist-info/METADATA b/pkg_resources/_vendor/backports.tarfile-1.2.0.dist-info/METADATA
deleted file mode 100644
index db0a2dcdbe..0000000000
--- a/pkg_resources/_vendor/backports.tarfile-1.2.0.dist-info/METADATA
+++ /dev/null
@@ -1,46 +0,0 @@
-Metadata-Version: 2.1
-Name: backports.tarfile
-Version: 1.2.0
-Summary: Backport of CPython tarfile module
-Author-email: "Jason R. Coombs" 
-Project-URL: Homepage, https://github.com/jaraco/backports.tarfile
-Classifier: Development Status :: 5 - Production/Stable
-Classifier: Intended Audience :: Developers
-Classifier: License :: OSI Approved :: MIT License
-Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3 :: Only
-Requires-Python: >=3.8
-Description-Content-Type: text/x-rst
-License-File: LICENSE
-Provides-Extra: docs
-Requires-Dist: sphinx >=3.5 ; extra == 'docs'
-Requires-Dist: jaraco.packaging >=9.3 ; extra == 'docs'
-Requires-Dist: rst.linker >=1.9 ; extra == 'docs'
-Requires-Dist: furo ; extra == 'docs'
-Requires-Dist: sphinx-lint ; extra == 'docs'
-Provides-Extra: testing
-Requires-Dist: pytest !=8.1.*,>=6 ; extra == 'testing'
-Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'testing'
-Requires-Dist: pytest-cov ; extra == 'testing'
-Requires-Dist: pytest-enabler >=2.2 ; extra == 'testing'
-Requires-Dist: jaraco.test ; extra == 'testing'
-Requires-Dist: pytest !=8.0.* ; extra == 'testing'
-
-.. image:: https://img.shields.io/pypi/v/backports.tarfile.svg
-   :target: https://pypi.org/project/backports.tarfile
-
-.. image:: https://img.shields.io/pypi/pyversions/backports.tarfile.svg
-
-.. image:: https://github.com/jaraco/backports.tarfile/actions/workflows/main.yml/badge.svg
-   :target: https://github.com/jaraco/backports.tarfile/actions?query=workflow%3A%22tests%22
-   :alt: tests
-
-.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
-    :target: https://github.com/astral-sh/ruff
-    :alt: Ruff
-
-.. .. image:: https://readthedocs.org/projects/backportstarfile/badge/?version=latest
-..    :target: https://backportstarfile.readthedocs.io/en/latest/?badge=latest
-
-.. image:: https://img.shields.io/badge/skeleton-2024-informational
-   :target: https://blog.jaraco.com/skeleton
diff --git a/pkg_resources/_vendor/backports.tarfile-1.2.0.dist-info/RECORD b/pkg_resources/_vendor/backports.tarfile-1.2.0.dist-info/RECORD
deleted file mode 100644
index 536dc2f09e..0000000000
--- a/pkg_resources/_vendor/backports.tarfile-1.2.0.dist-info/RECORD
+++ /dev/null
@@ -1,17 +0,0 @@
-backports.tarfile-1.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-backports.tarfile-1.2.0.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
-backports.tarfile-1.2.0.dist-info/METADATA,sha256=ghXFTq132dxaEIolxr3HK1mZqm9iyUmaRANZQSr6WlE,2020
-backports.tarfile-1.2.0.dist-info/RECORD,,
-backports.tarfile-1.2.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-backports.tarfile-1.2.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
-backports.tarfile-1.2.0.dist-info/top_level.txt,sha256=cGjaLMOoBR1FK0ApojtzWVmViTtJ7JGIK_HwXiEsvtU,10
-backports/__init__.py,sha256=iOEMwnlORWezdO8-2vxBIPSR37D7JGjluZ8f55vzxls,81
-backports/__pycache__/__init__.cpython-312.pyc,,
-backports/tarfile/__init__.py,sha256=Pwf2qUIfB0SolJPCKcx3vz3UEu_aids4g4sAfxy94qg,108491
-backports/tarfile/__main__.py,sha256=Yw2oGT1afrz2eBskzdPYL8ReB_3liApmhFkN2EbDmc4,59
-backports/tarfile/__pycache__/__init__.cpython-312.pyc,,
-backports/tarfile/__pycache__/__main__.cpython-312.pyc,,
-backports/tarfile/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-backports/tarfile/compat/__pycache__/__init__.cpython-312.pyc,,
-backports/tarfile/compat/__pycache__/py38.cpython-312.pyc,,
-backports/tarfile/compat/py38.py,sha256=iYkyt_gvWjLzGUTJD9TuTfMMjOk-ersXZmRlvQYN2qE,568
diff --git a/pkg_resources/_vendor/backports.tarfile-1.2.0.dist-info/REQUESTED b/pkg_resources/_vendor/backports.tarfile-1.2.0.dist-info/REQUESTED
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/pkg_resources/_vendor/backports.tarfile-1.2.0.dist-info/WHEEL b/pkg_resources/_vendor/backports.tarfile-1.2.0.dist-info/WHEEL
deleted file mode 100644
index bab98d6758..0000000000
--- a/pkg_resources/_vendor/backports.tarfile-1.2.0.dist-info/WHEEL
+++ /dev/null
@@ -1,5 +0,0 @@
-Wheel-Version: 1.0
-Generator: bdist_wheel (0.43.0)
-Root-Is-Purelib: true
-Tag: py3-none-any
-
diff --git a/pkg_resources/_vendor/backports.tarfile-1.2.0.dist-info/top_level.txt b/pkg_resources/_vendor/backports.tarfile-1.2.0.dist-info/top_level.txt
deleted file mode 100644
index 99d2be5b64..0000000000
--- a/pkg_resources/_vendor/backports.tarfile-1.2.0.dist-info/top_level.txt
+++ /dev/null
@@ -1 +0,0 @@
-backports
diff --git a/pkg_resources/_vendor/backports/__init__.py b/pkg_resources/_vendor/backports/__init__.py
deleted file mode 100644
index 0d1f7edf5d..0000000000
--- a/pkg_resources/_vendor/backports/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-__path__ = __import__('pkgutil').extend_path(__path__, __name__)  # type: ignore
diff --git a/pkg_resources/_vendor/backports/tarfile/__init__.py b/pkg_resources/_vendor/backports/tarfile/__init__.py
deleted file mode 100644
index 8c16881cb3..0000000000
--- a/pkg_resources/_vendor/backports/tarfile/__init__.py
+++ /dev/null
@@ -1,2937 +0,0 @@
-#-------------------------------------------------------------------
-# tarfile.py
-#-------------------------------------------------------------------
-# Copyright (C) 2002 Lars Gustaebel 
-# All rights reserved.
-#
-# Permission  is  hereby granted,  free  of charge,  to  any person
-# obtaining a  copy of  this software  and associated documentation
-# files  (the  "Software"),  to   deal  in  the  Software   without
-# restriction,  including  without limitation  the  rights to  use,
-# copy, modify, merge, publish, distribute, sublicense, and/or sell
-# copies  of  the  Software,  and to  permit  persons  to  whom the
-# Software  is  furnished  to  do  so,  subject  to  the  following
-# conditions:
-#
-# The above copyright  notice and this  permission notice shall  be
-# included in all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS  IS", WITHOUT WARRANTY OF ANY  KIND,
-# EXPRESS OR IMPLIED, INCLUDING  BUT NOT LIMITED TO  THE WARRANTIES
-# OF  MERCHANTABILITY,  FITNESS   FOR  A  PARTICULAR   PURPOSE  AND
-# NONINFRINGEMENT.  IN  NO  EVENT SHALL  THE  AUTHORS  OR COPYRIGHT
-# HOLDERS  BE LIABLE  FOR ANY  CLAIM, DAMAGES  OR OTHER  LIABILITY,
-# WHETHER  IN AN  ACTION OF  CONTRACT, TORT  OR OTHERWISE,  ARISING
-# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
-# OTHER DEALINGS IN THE SOFTWARE.
-#
-"""Read from and write to tar format archives.
-"""
-
-version     = "0.9.0"
-__author__  = "Lars Gust\u00e4bel (lars@gustaebel.de)"
-__credits__ = "Gustavo Niemeyer, Niels Gust\u00e4bel, Richard Townsend."
-
-#---------
-# Imports
-#---------
-from builtins import open as bltn_open
-import sys
-import os
-import io
-import shutil
-import stat
-import time
-import struct
-import copy
-import re
-
-from .compat.py38 import removesuffix
-
-try:
-    import pwd
-except ImportError:
-    pwd = None
-try:
-    import grp
-except ImportError:
-    grp = None
-
-# os.symlink on Windows prior to 6.0 raises NotImplementedError
-# OSError (winerror=1314) will be raised if the caller does not hold the
-# SeCreateSymbolicLinkPrivilege privilege
-symlink_exception = (AttributeError, NotImplementedError, OSError)
-
-# from tarfile import *
-__all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError", "ReadError",
-           "CompressionError", "StreamError", "ExtractError", "HeaderError",
-           "ENCODING", "USTAR_FORMAT", "GNU_FORMAT", "PAX_FORMAT",
-           "DEFAULT_FORMAT", "open","fully_trusted_filter", "data_filter",
-           "tar_filter", "FilterError", "AbsoluteLinkError",
-           "OutsideDestinationError", "SpecialFileError", "AbsolutePathError",
-           "LinkOutsideDestinationError"]
-
-
-#---------------------------------------------------------
-# tar constants
-#---------------------------------------------------------
-NUL = b"\0"                     # the null character
-BLOCKSIZE = 512                 # length of processing blocks
-RECORDSIZE = BLOCKSIZE * 20     # length of records
-GNU_MAGIC = b"ustar  \0"        # magic gnu tar string
-POSIX_MAGIC = b"ustar\x0000"    # magic posix tar string
-
-LENGTH_NAME = 100               # maximum length of a filename
-LENGTH_LINK = 100               # maximum length of a linkname
-LENGTH_PREFIX = 155             # maximum length of the prefix field
-
-REGTYPE = b"0"                  # regular file
-AREGTYPE = b"\0"                # regular file
-LNKTYPE = b"1"                  # link (inside tarfile)
-SYMTYPE = b"2"                  # symbolic link
-CHRTYPE = b"3"                  # character special device
-BLKTYPE = b"4"                  # block special device
-DIRTYPE = b"5"                  # directory
-FIFOTYPE = b"6"                 # fifo special device
-CONTTYPE = b"7"                 # contiguous file
-
-GNUTYPE_LONGNAME = b"L"         # GNU tar longname
-GNUTYPE_LONGLINK = b"K"         # GNU tar longlink
-GNUTYPE_SPARSE = b"S"           # GNU tar sparse file
-
-XHDTYPE = b"x"                  # POSIX.1-2001 extended header
-XGLTYPE = b"g"                  # POSIX.1-2001 global header
-SOLARIS_XHDTYPE = b"X"          # Solaris extended header
-
-USTAR_FORMAT = 0                # POSIX.1-1988 (ustar) format
-GNU_FORMAT = 1                  # GNU tar format
-PAX_FORMAT = 2                  # POSIX.1-2001 (pax) format
-DEFAULT_FORMAT = PAX_FORMAT
-
-#---------------------------------------------------------
-# tarfile constants
-#---------------------------------------------------------
-# File types that tarfile supports:
-SUPPORTED_TYPES = (REGTYPE, AREGTYPE, LNKTYPE,
-                   SYMTYPE, DIRTYPE, FIFOTYPE,
-                   CONTTYPE, CHRTYPE, BLKTYPE,
-                   GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
-                   GNUTYPE_SPARSE)
-
-# File types that will be treated as a regular file.
-REGULAR_TYPES = (REGTYPE, AREGTYPE,
-                 CONTTYPE, GNUTYPE_SPARSE)
-
-# File types that are part of the GNU tar format.
-GNU_TYPES = (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
-             GNUTYPE_SPARSE)
-
-# Fields from a pax header that override a TarInfo attribute.
-PAX_FIELDS = ("path", "linkpath", "size", "mtime",
-              "uid", "gid", "uname", "gname")
-
-# Fields from a pax header that are affected by hdrcharset.
-PAX_NAME_FIELDS = {"path", "linkpath", "uname", "gname"}
-
-# Fields in a pax header that are numbers, all other fields
-# are treated as strings.
-PAX_NUMBER_FIELDS = {
-    "atime": float,
-    "ctime": float,
-    "mtime": float,
-    "uid": int,
-    "gid": int,
-    "size": int
-}
-
-#---------------------------------------------------------
-# initialization
-#---------------------------------------------------------
-if os.name == "nt":
-    ENCODING = "utf-8"
-else:
-    ENCODING = sys.getfilesystemencoding()
-
-#---------------------------------------------------------
-# Some useful functions
-#---------------------------------------------------------
-
-def stn(s, length, encoding, errors):
-    """Convert a string to a null-terminated bytes object.
-    """
-    if s is None:
-        raise ValueError("metadata cannot contain None")
-    s = s.encode(encoding, errors)
-    return s[:length] + (length - len(s)) * NUL
-
-def nts(s, encoding, errors):
-    """Convert a null-terminated bytes object to a string.
-    """
-    p = s.find(b"\0")
-    if p != -1:
-        s = s[:p]
-    return s.decode(encoding, errors)
-
-def nti(s):
-    """Convert a number field to a python number.
-    """
-    # There are two possible encodings for a number field, see
-    # itn() below.
-    if s[0] in (0o200, 0o377):
-        n = 0
-        for i in range(len(s) - 1):
-            n <<= 8
-            n += s[i + 1]
-        if s[0] == 0o377:
-            n = -(256 ** (len(s) - 1) - n)
-    else:
-        try:
-            s = nts(s, "ascii", "strict")
-            n = int(s.strip() or "0", 8)
-        except ValueError:
-            raise InvalidHeaderError("invalid header")
-    return n
-
-def itn(n, digits=8, format=DEFAULT_FORMAT):
-    """Convert a python number to a number field.
-    """
-    # POSIX 1003.1-1988 requires numbers to be encoded as a string of
-    # octal digits followed by a null-byte, this allows values up to
-    # (8**(digits-1))-1. GNU tar allows storing numbers greater than
-    # that if necessary. A leading 0o200 or 0o377 byte indicate this
-    # particular encoding, the following digits-1 bytes are a big-endian
-    # base-256 representation. This allows values up to (256**(digits-1))-1.
-    # A 0o200 byte indicates a positive number, a 0o377 byte a negative
-    # number.
-    original_n = n
-    n = int(n)
-    if 0 <= n < 8 ** (digits - 1):
-        s = bytes("%0*o" % (digits - 1, n), "ascii") + NUL
-    elif format == GNU_FORMAT and -256 ** (digits - 1) <= n < 256 ** (digits - 1):
-        if n >= 0:
-            s = bytearray([0o200])
-        else:
-            s = bytearray([0o377])
-            n = 256 ** digits + n
-
-        for i in range(digits - 1):
-            s.insert(1, n & 0o377)
-            n >>= 8
-    else:
-        raise ValueError("overflow in number field")
-
-    return s
-
-def calc_chksums(buf):
-    """Calculate the checksum for a member's header by summing up all
-       characters except for the chksum field which is treated as if
-       it was filled with spaces. According to the GNU tar sources,
-       some tars (Sun and NeXT) calculate chksum with signed char,
-       which will be different if there are chars in the buffer with
-       the high bit set. So we calculate two checksums, unsigned and
-       signed.
-    """
-    unsigned_chksum = 256 + sum(struct.unpack_from("148B8x356B", buf))
-    signed_chksum = 256 + sum(struct.unpack_from("148b8x356b", buf))
-    return unsigned_chksum, signed_chksum
-
-def copyfileobj(src, dst, length=None, exception=OSError, bufsize=None):
-    """Copy length bytes from fileobj src to fileobj dst.
-       If length is None, copy the entire content.
-    """
-    bufsize = bufsize or 16 * 1024
-    if length == 0:
-        return
-    if length is None:
-        shutil.copyfileobj(src, dst, bufsize)
-        return
-
-    blocks, remainder = divmod(length, bufsize)
-    for b in range(blocks):
-        buf = src.read(bufsize)
-        if len(buf) < bufsize:
-            raise exception("unexpected end of data")
-        dst.write(buf)
-
-    if remainder != 0:
-        buf = src.read(remainder)
-        if len(buf) < remainder:
-            raise exception("unexpected end of data")
-        dst.write(buf)
-    return
-
-def _safe_print(s):
-    encoding = getattr(sys.stdout, 'encoding', None)
-    if encoding is not None:
-        s = s.encode(encoding, 'backslashreplace').decode(encoding)
-    print(s, end=' ')
-
-
-class TarError(Exception):
-    """Base exception."""
-    pass
-class ExtractError(TarError):
-    """General exception for extract errors."""
-    pass
-class ReadError(TarError):
-    """Exception for unreadable tar archives."""
-    pass
-class CompressionError(TarError):
-    """Exception for unavailable compression methods."""
-    pass
-class StreamError(TarError):
-    """Exception for unsupported operations on stream-like TarFiles."""
-    pass
-class HeaderError(TarError):
-    """Base exception for header errors."""
-    pass
-class EmptyHeaderError(HeaderError):
-    """Exception for empty headers."""
-    pass
-class TruncatedHeaderError(HeaderError):
-    """Exception for truncated headers."""
-    pass
-class EOFHeaderError(HeaderError):
-    """Exception for end of file headers."""
-    pass
-class InvalidHeaderError(HeaderError):
-    """Exception for invalid headers."""
-    pass
-class SubsequentHeaderError(HeaderError):
-    """Exception for missing and invalid extended headers."""
-    pass
-
-#---------------------------
-# internal stream interface
-#---------------------------
-class _LowLevelFile:
-    """Low-level file object. Supports reading and writing.
-       It is used instead of a regular file object for streaming
-       access.
-    """
-
-    def __init__(self, name, mode):
-        mode = {
-            "r": os.O_RDONLY,
-            "w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC,
-        }[mode]
-        if hasattr(os, "O_BINARY"):
-            mode |= os.O_BINARY
-        self.fd = os.open(name, mode, 0o666)
-
-    def close(self):
-        os.close(self.fd)
-
-    def read(self, size):
-        return os.read(self.fd, size)
-
-    def write(self, s):
-        os.write(self.fd, s)
-
-class _Stream:
-    """Class that serves as an adapter between TarFile and
-       a stream-like object.  The stream-like object only
-       needs to have a read() or write() method that works with bytes,
-       and the method is accessed blockwise.
-       Use of gzip or bzip2 compression is possible.
-       A stream-like object could be for example: sys.stdin.buffer,
-       sys.stdout.buffer, a socket, a tape device etc.
-
-       _Stream is intended to be used only internally.
-    """
-
-    def __init__(self, name, mode, comptype, fileobj, bufsize,
-                 compresslevel):
-        """Construct a _Stream object.
-        """
-        self._extfileobj = True
-        if fileobj is None:
-            fileobj = _LowLevelFile(name, mode)
-            self._extfileobj = False
-
-        if comptype == '*':
-            # Enable transparent compression detection for the
-            # stream interface
-            fileobj = _StreamProxy(fileobj)
-            comptype = fileobj.getcomptype()
-
-        self.name     = name or ""
-        self.mode     = mode
-        self.comptype = comptype
-        self.fileobj  = fileobj
-        self.bufsize  = bufsize
-        self.buf      = b""
-        self.pos      = 0
-        self.closed   = False
-
-        try:
-            if comptype == "gz":
-                try:
-                    import zlib
-                except ImportError:
-                    raise CompressionError("zlib module is not available") from None
-                self.zlib = zlib
-                self.crc = zlib.crc32(b"")
-                if mode == "r":
-                    self.exception = zlib.error
-                    self._init_read_gz()
-                else:
-                    self._init_write_gz(compresslevel)
-
-            elif comptype == "bz2":
-                try:
-                    import bz2
-                except ImportError:
-                    raise CompressionError("bz2 module is not available") from None
-                if mode == "r":
-                    self.dbuf = b""
-                    self.cmp = bz2.BZ2Decompressor()
-                    self.exception = OSError
-                else:
-                    self.cmp = bz2.BZ2Compressor(compresslevel)
-
-            elif comptype == "xz":
-                try:
-                    import lzma
-                except ImportError:
-                    raise CompressionError("lzma module is not available") from None
-                if mode == "r":
-                    self.dbuf = b""
-                    self.cmp = lzma.LZMADecompressor()
-                    self.exception = lzma.LZMAError
-                else:
-                    self.cmp = lzma.LZMACompressor()
-
-            elif comptype != "tar":
-                raise CompressionError("unknown compression type %r" % comptype)
-
-        except:
-            if not self._extfileobj:
-                self.fileobj.close()
-            self.closed = True
-            raise
-
-    def __del__(self):
-        if hasattr(self, "closed") and not self.closed:
-            self.close()
-
-    def _init_write_gz(self, compresslevel):
-        """Initialize for writing with gzip compression.
-        """
-        self.cmp = self.zlib.compressobj(compresslevel,
-                                         self.zlib.DEFLATED,
-                                         -self.zlib.MAX_WBITS,
-                                         self.zlib.DEF_MEM_LEVEL,
-                                         0)
-        timestamp = struct.pack(" self.bufsize:
-            self.fileobj.write(self.buf[:self.bufsize])
-            self.buf = self.buf[self.bufsize:]
-
-    def close(self):
-        """Close the _Stream object. No operation should be
-           done on it afterwards.
-        """
-        if self.closed:
-            return
-
-        self.closed = True
-        try:
-            if self.mode == "w" and self.comptype != "tar":
-                self.buf += self.cmp.flush()
-
-            if self.mode == "w" and self.buf:
-                self.fileobj.write(self.buf)
-                self.buf = b""
-                if self.comptype == "gz":
-                    self.fileobj.write(struct.pack("= 0:
-            blocks, remainder = divmod(pos - self.pos, self.bufsize)
-            for i in range(blocks):
-                self.read(self.bufsize)
-            self.read(remainder)
-        else:
-            raise StreamError("seeking backwards is not allowed")
-        return self.pos
-
-    def read(self, size):
-        """Return the next size number of bytes from the stream."""
-        assert size is not None
-        buf = self._read(size)
-        self.pos += len(buf)
-        return buf
-
-    def _read(self, size):
-        """Return size bytes from the stream.
-        """
-        if self.comptype == "tar":
-            return self.__read(size)
-
-        c = len(self.dbuf)
-        t = [self.dbuf]
-        while c < size:
-            # Skip underlying buffer to avoid unaligned double buffering.
-            if self.buf:
-                buf = self.buf
-                self.buf = b""
-            else:
-                buf = self.fileobj.read(self.bufsize)
-                if not buf:
-                    break
-            try:
-                buf = self.cmp.decompress(buf)
-            except self.exception as e:
-                raise ReadError("invalid compressed data") from e
-            t.append(buf)
-            c += len(buf)
-        t = b"".join(t)
-        self.dbuf = t[size:]
-        return t[:size]
-
-    def __read(self, size):
-        """Return size bytes from stream. If internal buffer is empty,
-           read another block from the stream.
-        """
-        c = len(self.buf)
-        t = [self.buf]
-        while c < size:
-            buf = self.fileobj.read(self.bufsize)
-            if not buf:
-                break
-            t.append(buf)
-            c += len(buf)
-        t = b"".join(t)
-        self.buf = t[size:]
-        return t[:size]
-# class _Stream
-
-class _StreamProxy(object):
-    """Small proxy class that enables transparent compression
-       detection for the Stream interface (mode 'r|*').
-    """
-
-    def __init__(self, fileobj):
-        self.fileobj = fileobj
-        self.buf = self.fileobj.read(BLOCKSIZE)
-
-    def read(self, size):
-        self.read = self.fileobj.read
-        return self.buf
-
-    def getcomptype(self):
-        if self.buf.startswith(b"\x1f\x8b\x08"):
-            return "gz"
-        elif self.buf[0:3] == b"BZh" and self.buf[4:10] == b"1AY&SY":
-            return "bz2"
-        elif self.buf.startswith((b"\x5d\x00\x00\x80", b"\xfd7zXZ")):
-            return "xz"
-        else:
-            return "tar"
-
-    def close(self):
-        self.fileobj.close()
-# class StreamProxy
-
-#------------------------
-# Extraction file object
-#------------------------
-class _FileInFile(object):
-    """A thin wrapper around an existing file object that
-       provides a part of its data as an individual file
-       object.
-    """
-
-    def __init__(self, fileobj, offset, size, name, blockinfo=None):
-        self.fileobj = fileobj
-        self.offset = offset
-        self.size = size
-        self.position = 0
-        self.name = name
-        self.closed = False
-
-        if blockinfo is None:
-            blockinfo = [(0, size)]
-
-        # Construct a map with data and zero blocks.
-        self.map_index = 0
-        self.map = []
-        lastpos = 0
-        realpos = self.offset
-        for offset, size in blockinfo:
-            if offset > lastpos:
-                self.map.append((False, lastpos, offset, None))
-            self.map.append((True, offset, offset + size, realpos))
-            realpos += size
-            lastpos = offset + size
-        if lastpos < self.size:
-            self.map.append((False, lastpos, self.size, None))
-
-    def flush(self):
-        pass
-
-    @property
-    def mode(self):
-        return 'rb'
-
-    def readable(self):
-        return True
-
-    def writable(self):
-        return False
-
-    def seekable(self):
-        return self.fileobj.seekable()
-
-    def tell(self):
-        """Return the current file position.
-        """
-        return self.position
-
-    def seek(self, position, whence=io.SEEK_SET):
-        """Seek to a position in the file.
-        """
-        if whence == io.SEEK_SET:
-            self.position = min(max(position, 0), self.size)
-        elif whence == io.SEEK_CUR:
-            if position < 0:
-                self.position = max(self.position + position, 0)
-            else:
-                self.position = min(self.position + position, self.size)
-        elif whence == io.SEEK_END:
-            self.position = max(min(self.size + position, self.size), 0)
-        else:
-            raise ValueError("Invalid argument")
-        return self.position
-
-    def read(self, size=None):
-        """Read data from the file.
-        """
-        if size is None:
-            size = self.size - self.position
-        else:
-            size = min(size, self.size - self.position)
-
-        buf = b""
-        while size > 0:
-            while True:
-                data, start, stop, offset = self.map[self.map_index]
-                if start <= self.position < stop:
-                    break
-                else:
-                    self.map_index += 1
-                    if self.map_index == len(self.map):
-                        self.map_index = 0
-            length = min(size, stop - self.position)
-            if data:
-                self.fileobj.seek(offset + (self.position - start))
-                b = self.fileobj.read(length)
-                if len(b) != length:
-                    raise ReadError("unexpected end of data")
-                buf += b
-            else:
-                buf += NUL * length
-            size -= length
-            self.position += length
-        return buf
-
-    def readinto(self, b):
-        buf = self.read(len(b))
-        b[:len(buf)] = buf
-        return len(buf)
-
-    def close(self):
-        self.closed = True
-#class _FileInFile
-
-class ExFileObject(io.BufferedReader):
-
-    def __init__(self, tarfile, tarinfo):
-        fileobj = _FileInFile(tarfile.fileobj, tarinfo.offset_data,
-                tarinfo.size, tarinfo.name, tarinfo.sparse)
-        super().__init__(fileobj)
-#class ExFileObject
-
-
-#-----------------------------
-# extraction filters (PEP 706)
-#-----------------------------
-
-class FilterError(TarError):
-    pass
-
-class AbsolutePathError(FilterError):
-    def __init__(self, tarinfo):
-        self.tarinfo = tarinfo
-        super().__init__(f'member {tarinfo.name!r} has an absolute path')
-
-class OutsideDestinationError(FilterError):
-    def __init__(self, tarinfo, path):
-        self.tarinfo = tarinfo
-        self._path = path
-        super().__init__(f'{tarinfo.name!r} would be extracted to {path!r}, '
-                         + 'which is outside the destination')
-
-class SpecialFileError(FilterError):
-    def __init__(self, tarinfo):
-        self.tarinfo = tarinfo
-        super().__init__(f'{tarinfo.name!r} is a special file')
-
-class AbsoluteLinkError(FilterError):
-    def __init__(self, tarinfo):
-        self.tarinfo = tarinfo
-        super().__init__(f'{tarinfo.name!r} is a link to an absolute path')
-
-class LinkOutsideDestinationError(FilterError):
-    def __init__(self, tarinfo, path):
-        self.tarinfo = tarinfo
-        self._path = path
-        super().__init__(f'{tarinfo.name!r} would link to {path!r}, '
-                         + 'which is outside the destination')
-
-def _get_filtered_attrs(member, dest_path, for_data=True):
-    new_attrs = {}
-    name = member.name
-    dest_path = os.path.realpath(dest_path)
-    # Strip leading / (tar's directory separator) from filenames.
-    # Include os.sep (target OS directory separator) as well.
-    if name.startswith(('/', os.sep)):
-        name = new_attrs['name'] = member.path.lstrip('/' + os.sep)
-    if os.path.isabs(name):
-        # Path is absolute even after stripping.
-        # For example, 'C:/foo' on Windows.
-        raise AbsolutePathError(member)
-    # Ensure we stay in the destination
-    target_path = os.path.realpath(os.path.join(dest_path, name))
-    if os.path.commonpath([target_path, dest_path]) != dest_path:
-        raise OutsideDestinationError(member, target_path)
-    # Limit permissions (no high bits, and go-w)
-    mode = member.mode
-    if mode is not None:
-        # Strip high bits & group/other write bits
-        mode = mode & 0o755
-        if for_data:
-            # For data, handle permissions & file types
-            if member.isreg() or member.islnk():
-                if not mode & 0o100:
-                    # Clear executable bits if not executable by user
-                    mode &= ~0o111
-                # Ensure owner can read & write
-                mode |= 0o600
-            elif member.isdir() or member.issym():
-                # Ignore mode for directories & symlinks
-                mode = None
-            else:
-                # Reject special files
-                raise SpecialFileError(member)
-        if mode != member.mode:
-            new_attrs['mode'] = mode
-    if for_data:
-        # Ignore ownership for 'data'
-        if member.uid is not None:
-            new_attrs['uid'] = None
-        if member.gid is not None:
-            new_attrs['gid'] = None
-        if member.uname is not None:
-            new_attrs['uname'] = None
-        if member.gname is not None:
-            new_attrs['gname'] = None
-        # Check link destination for 'data'
-        if member.islnk() or member.issym():
-            if os.path.isabs(member.linkname):
-                raise AbsoluteLinkError(member)
-            if member.issym():
-                target_path = os.path.join(dest_path,
-                                           os.path.dirname(name),
-                                           member.linkname)
-            else:
-                target_path = os.path.join(dest_path,
-                                           member.linkname)
-            target_path = os.path.realpath(target_path)
-            if os.path.commonpath([target_path, dest_path]) != dest_path:
-                raise LinkOutsideDestinationError(member, target_path)
-    return new_attrs
-
-def fully_trusted_filter(member, dest_path):
-    return member
-
-def tar_filter(member, dest_path):
-    new_attrs = _get_filtered_attrs(member, dest_path, False)
-    if new_attrs:
-        return member.replace(**new_attrs, deep=False)
-    return member
-
-def data_filter(member, dest_path):
-    new_attrs = _get_filtered_attrs(member, dest_path, True)
-    if new_attrs:
-        return member.replace(**new_attrs, deep=False)
-    return member
-
-_NAMED_FILTERS = {
-    "fully_trusted": fully_trusted_filter,
-    "tar": tar_filter,
-    "data": data_filter,
-}
-
-#------------------
-# Exported Classes
-#------------------
-
-# Sentinel for replace() defaults, meaning "don't change the attribute"
-_KEEP = object()
-
-class TarInfo(object):
-    """Informational class which holds the details about an
-       archive member given by a tar header block.
-       TarInfo objects are returned by TarFile.getmember(),
-       TarFile.getmembers() and TarFile.gettarinfo() and are
-       usually created internally.
-    """
-
-    __slots__ = dict(
-        name = 'Name of the archive member.',
-        mode = 'Permission bits.',
-        uid = 'User ID of the user who originally stored this member.',
-        gid = 'Group ID of the user who originally stored this member.',
-        size = 'Size in bytes.',
-        mtime = 'Time of last modification.',
-        chksum = 'Header checksum.',
-        type = ('File type. type is usually one of these constants: '
-                'REGTYPE, AREGTYPE, LNKTYPE, SYMTYPE, DIRTYPE, FIFOTYPE, '
-                'CONTTYPE, CHRTYPE, BLKTYPE, GNUTYPE_SPARSE.'),
-        linkname = ('Name of the target file name, which is only present '
-                    'in TarInfo objects of type LNKTYPE and SYMTYPE.'),
-        uname = 'User name.',
-        gname = 'Group name.',
-        devmajor = 'Device major number.',
-        devminor = 'Device minor number.',
-        offset = 'The tar header starts here.',
-        offset_data = "The file's data starts here.",
-        pax_headers = ('A dictionary containing key-value pairs of an '
-                       'associated pax extended header.'),
-        sparse = 'Sparse member information.',
-        _tarfile = None,
-        _sparse_structs = None,
-        _link_target = None,
-        )
-
-    def __init__(self, name=""):
-        """Construct a TarInfo object. name is the optional name
-           of the member.
-        """
-        self.name = name        # member name
-        self.mode = 0o644       # file permissions
-        self.uid = 0            # user id
-        self.gid = 0            # group id
-        self.size = 0           # file size
-        self.mtime = 0          # modification time
-        self.chksum = 0         # header checksum
-        self.type = REGTYPE     # member type
-        self.linkname = ""      # link name
-        self.uname = ""         # user name
-        self.gname = ""         # group name
-        self.devmajor = 0       # device major number
-        self.devminor = 0       # device minor number
-
-        self.offset = 0         # the tar header starts here
-        self.offset_data = 0    # the file's data starts here
-
-        self.sparse = None      # sparse member information
-        self.pax_headers = {}   # pax header information
-
-    @property
-    def tarfile(self):
-        import warnings
-        warnings.warn(
-            'The undocumented "tarfile" attribute of TarInfo objects '
-            + 'is deprecated and will be removed in Python 3.16',
-            DeprecationWarning, stacklevel=2)
-        return self._tarfile
-
-    @tarfile.setter
-    def tarfile(self, tarfile):
-        import warnings
-        warnings.warn(
-            'The undocumented "tarfile" attribute of TarInfo objects '
-            + 'is deprecated and will be removed in Python 3.16',
-            DeprecationWarning, stacklevel=2)
-        self._tarfile = tarfile
-
-    @property
-    def path(self):
-        'In pax headers, "name" is called "path".'
-        return self.name
-
-    @path.setter
-    def path(self, name):
-        self.name = name
-
-    @property
-    def linkpath(self):
-        'In pax headers, "linkname" is called "linkpath".'
-        return self.linkname
-
-    @linkpath.setter
-    def linkpath(self, linkname):
-        self.linkname = linkname
-
-    def __repr__(self):
-        return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self))
-
-    def replace(self, *,
-                name=_KEEP, mtime=_KEEP, mode=_KEEP, linkname=_KEEP,
-                uid=_KEEP, gid=_KEEP, uname=_KEEP, gname=_KEEP,
-                deep=True, _KEEP=_KEEP):
-        """Return a deep copy of self with the given attributes replaced.
-        """
-        if deep:
-            result = copy.deepcopy(self)
-        else:
-            result = copy.copy(self)
-        if name is not _KEEP:
-            result.name = name
-        if mtime is not _KEEP:
-            result.mtime = mtime
-        if mode is not _KEEP:
-            result.mode = mode
-        if linkname is not _KEEP:
-            result.linkname = linkname
-        if uid is not _KEEP:
-            result.uid = uid
-        if gid is not _KEEP:
-            result.gid = gid
-        if uname is not _KEEP:
-            result.uname = uname
-        if gname is not _KEEP:
-            result.gname = gname
-        return result
-
-    def get_info(self):
-        """Return the TarInfo's attributes as a dictionary.
-        """
-        if self.mode is None:
-            mode = None
-        else:
-            mode = self.mode & 0o7777
-        info = {
-            "name":     self.name,
-            "mode":     mode,
-            "uid":      self.uid,
-            "gid":      self.gid,
-            "size":     self.size,
-            "mtime":    self.mtime,
-            "chksum":   self.chksum,
-            "type":     self.type,
-            "linkname": self.linkname,
-            "uname":    self.uname,
-            "gname":    self.gname,
-            "devmajor": self.devmajor,
-            "devminor": self.devminor
-        }
-
-        if info["type"] == DIRTYPE and not info["name"].endswith("/"):
-            info["name"] += "/"
-
-        return info
-
-    def tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="surrogateescape"):
-        """Return a tar header as a string of 512 byte blocks.
-        """
-        info = self.get_info()
-        for name, value in info.items():
-            if value is None:
-                raise ValueError("%s may not be None" % name)
-
-        if format == USTAR_FORMAT:
-            return self.create_ustar_header(info, encoding, errors)
-        elif format == GNU_FORMAT:
-            return self.create_gnu_header(info, encoding, errors)
-        elif format == PAX_FORMAT:
-            return self.create_pax_header(info, encoding)
-        else:
-            raise ValueError("invalid format")
-
-    def create_ustar_header(self, info, encoding, errors):
-        """Return the object as a ustar header block.
-        """
-        info["magic"] = POSIX_MAGIC
-
-        if len(info["linkname"].encode(encoding, errors)) > LENGTH_LINK:
-            raise ValueError("linkname is too long")
-
-        if len(info["name"].encode(encoding, errors)) > LENGTH_NAME:
-            info["prefix"], info["name"] = self._posix_split_name(info["name"], encoding, errors)
-
-        return self._create_header(info, USTAR_FORMAT, encoding, errors)
-
-    def create_gnu_header(self, info, encoding, errors):
-        """Return the object as a GNU header block sequence.
-        """
-        info["magic"] = GNU_MAGIC
-
-        buf = b""
-        if len(info["linkname"].encode(encoding, errors)) > LENGTH_LINK:
-            buf += self._create_gnu_long_header(info["linkname"], GNUTYPE_LONGLINK, encoding, errors)
-
-        if len(info["name"].encode(encoding, errors)) > LENGTH_NAME:
-            buf += self._create_gnu_long_header(info["name"], GNUTYPE_LONGNAME, encoding, errors)
-
-        return buf + self._create_header(info, GNU_FORMAT, encoding, errors)
-
-    def create_pax_header(self, info, encoding):
-        """Return the object as a ustar header block. If it cannot be
-           represented this way, prepend a pax extended header sequence
-           with supplement information.
-        """
-        info["magic"] = POSIX_MAGIC
-        pax_headers = self.pax_headers.copy()
-
-        # Test string fields for values that exceed the field length or cannot
-        # be represented in ASCII encoding.
-        for name, hname, length in (
-                ("name", "path", LENGTH_NAME), ("linkname", "linkpath", LENGTH_LINK),
-                ("uname", "uname", 32), ("gname", "gname", 32)):
-
-            if hname in pax_headers:
-                # The pax header has priority.
-                continue
-
-            # Try to encode the string as ASCII.
-            try:
-                info[name].encode("ascii", "strict")
-            except UnicodeEncodeError:
-                pax_headers[hname] = info[name]
-                continue
-
-            if len(info[name]) > length:
-                pax_headers[hname] = info[name]
-
-        # Test number fields for values that exceed the field limit or values
-        # that like to be stored as float.
-        for name, digits in (("uid", 8), ("gid", 8), ("size", 12), ("mtime", 12)):
-            needs_pax = False
-
-            val = info[name]
-            val_is_float = isinstance(val, float)
-            val_int = round(val) if val_is_float else val
-            if not 0 <= val_int < 8 ** (digits - 1):
-                # Avoid overflow.
-                info[name] = 0
-                needs_pax = True
-            elif val_is_float:
-                # Put rounded value in ustar header, and full
-                # precision value in pax header.
-                info[name] = val_int
-                needs_pax = True
-
-            # The existing pax header has priority.
-            if needs_pax and name not in pax_headers:
-                pax_headers[name] = str(val)
-
-        # Create a pax extended header if necessary.
-        if pax_headers:
-            buf = self._create_pax_generic_header(pax_headers, XHDTYPE, encoding)
-        else:
-            buf = b""
-
-        return buf + self._create_header(info, USTAR_FORMAT, "ascii", "replace")
-
-    @classmethod
-    def create_pax_global_header(cls, pax_headers):
-        """Return the object as a pax global header block sequence.
-        """
-        return cls._create_pax_generic_header(pax_headers, XGLTYPE, "utf-8")
-
-    def _posix_split_name(self, name, encoding, errors):
-        """Split a name longer than 100 chars into a prefix
-           and a name part.
-        """
-        components = name.split("/")
-        for i in range(1, len(components)):
-            prefix = "/".join(components[:i])
-            name = "/".join(components[i:])
-            if len(prefix.encode(encoding, errors)) <= LENGTH_PREFIX and \
-                    len(name.encode(encoding, errors)) <= LENGTH_NAME:
-                break
-        else:
-            raise ValueError("name is too long")
-
-        return prefix, name
-
-    @staticmethod
-    def _create_header(info, format, encoding, errors):
-        """Return a header block. info is a dictionary with file
-           information, format must be one of the *_FORMAT constants.
-        """
-        has_device_fields = info.get("type") in (CHRTYPE, BLKTYPE)
-        if has_device_fields:
-            devmajor = itn(info.get("devmajor", 0), 8, format)
-            devminor = itn(info.get("devminor", 0), 8, format)
-        else:
-            devmajor = stn("", 8, encoding, errors)
-            devminor = stn("", 8, encoding, errors)
-
-        # None values in metadata should cause ValueError.
-        # itn()/stn() do this for all fields except type.
-        filetype = info.get("type", REGTYPE)
-        if filetype is None:
-            raise ValueError("TarInfo.type must not be None")
-
-        parts = [
-            stn(info.get("name", ""), 100, encoding, errors),
-            itn(info.get("mode", 0) & 0o7777, 8, format),
-            itn(info.get("uid", 0), 8, format),
-            itn(info.get("gid", 0), 8, format),
-            itn(info.get("size", 0), 12, format),
-            itn(info.get("mtime", 0), 12, format),
-            b"        ", # checksum field
-            filetype,
-            stn(info.get("linkname", ""), 100, encoding, errors),
-            info.get("magic", POSIX_MAGIC),
-            stn(info.get("uname", ""), 32, encoding, errors),
-            stn(info.get("gname", ""), 32, encoding, errors),
-            devmajor,
-            devminor,
-            stn(info.get("prefix", ""), 155, encoding, errors)
-        ]
-
-        buf = struct.pack("%ds" % BLOCKSIZE, b"".join(parts))
-        chksum = calc_chksums(buf[-BLOCKSIZE:])[0]
-        buf = buf[:-364] + bytes("%06o\0" % chksum, "ascii") + buf[-357:]
-        return buf
-
-    @staticmethod
-    def _create_payload(payload):
-        """Return the string payload filled with zero bytes
-           up to the next 512 byte border.
-        """
-        blocks, remainder = divmod(len(payload), BLOCKSIZE)
-        if remainder > 0:
-            payload += (BLOCKSIZE - remainder) * NUL
-        return payload
-
-    @classmethod
-    def _create_gnu_long_header(cls, name, type, encoding, errors):
-        """Return a GNUTYPE_LONGNAME or GNUTYPE_LONGLINK sequence
-           for name.
-        """
-        name = name.encode(encoding, errors) + NUL
-
-        info = {}
-        info["name"] = "././@LongLink"
-        info["type"] = type
-        info["size"] = len(name)
-        info["magic"] = GNU_MAGIC
-
-        # create extended header + name blocks.
-        return cls._create_header(info, USTAR_FORMAT, encoding, errors) + \
-                cls._create_payload(name)
-
-    @classmethod
-    def _create_pax_generic_header(cls, pax_headers, type, encoding):
-        """Return a POSIX.1-2008 extended or global header sequence
-           that contains a list of keyword, value pairs. The values
-           must be strings.
-        """
-        # Check if one of the fields contains surrogate characters and thereby
-        # forces hdrcharset=BINARY, see _proc_pax() for more information.
-        binary = False
-        for keyword, value in pax_headers.items():
-            try:
-                value.encode("utf-8", "strict")
-            except UnicodeEncodeError:
-                binary = True
-                break
-
-        records = b""
-        if binary:
-            # Put the hdrcharset field at the beginning of the header.
-            records += b"21 hdrcharset=BINARY\n"
-
-        for keyword, value in pax_headers.items():
-            keyword = keyword.encode("utf-8")
-            if binary:
-                # Try to restore the original byte representation of 'value'.
-                # Needless to say, that the encoding must match the string.
-                value = value.encode(encoding, "surrogateescape")
-            else:
-                value = value.encode("utf-8")
-
-            l = len(keyword) + len(value) + 3   # ' ' + '=' + '\n'
-            n = p = 0
-            while True:
-                n = l + len(str(p))
-                if n == p:
-                    break
-                p = n
-            records += bytes(str(p), "ascii") + b" " + keyword + b"=" + value + b"\n"
-
-        # We use a hardcoded "././@PaxHeader" name like star does
-        # instead of the one that POSIX recommends.
-        info = {}
-        info["name"] = "././@PaxHeader"
-        info["type"] = type
-        info["size"] = len(records)
-        info["magic"] = POSIX_MAGIC
-
-        # Create pax header + record blocks.
-        return cls._create_header(info, USTAR_FORMAT, "ascii", "replace") + \
-                cls._create_payload(records)
-
-    @classmethod
-    def frombuf(cls, buf, encoding, errors):
-        """Construct a TarInfo object from a 512 byte bytes object.
-        """
-        if len(buf) == 0:
-            raise EmptyHeaderError("empty header")
-        if len(buf) != BLOCKSIZE:
-            raise TruncatedHeaderError("truncated header")
-        if buf.count(NUL) == BLOCKSIZE:
-            raise EOFHeaderError("end of file header")
-
-        chksum = nti(buf[148:156])
-        if chksum not in calc_chksums(buf):
-            raise InvalidHeaderError("bad checksum")
-
-        obj = cls()
-        obj.name = nts(buf[0:100], encoding, errors)
-        obj.mode = nti(buf[100:108])
-        obj.uid = nti(buf[108:116])
-        obj.gid = nti(buf[116:124])
-        obj.size = nti(buf[124:136])
-        obj.mtime = nti(buf[136:148])
-        obj.chksum = chksum
-        obj.type = buf[156:157]
-        obj.linkname = nts(buf[157:257], encoding, errors)
-        obj.uname = nts(buf[265:297], encoding, errors)
-        obj.gname = nts(buf[297:329], encoding, errors)
-        obj.devmajor = nti(buf[329:337])
-        obj.devminor = nti(buf[337:345])
-        prefix = nts(buf[345:500], encoding, errors)
-
-        # Old V7 tar format represents a directory as a regular
-        # file with a trailing slash.
-        if obj.type == AREGTYPE and obj.name.endswith("/"):
-            obj.type = DIRTYPE
-
-        # The old GNU sparse format occupies some of the unused
-        # space in the buffer for up to 4 sparse structures.
-        # Save them for later processing in _proc_sparse().
-        if obj.type == GNUTYPE_SPARSE:
-            pos = 386
-            structs = []
-            for i in range(4):
-                try:
-                    offset = nti(buf[pos:pos + 12])
-                    numbytes = nti(buf[pos + 12:pos + 24])
-                except ValueError:
-                    break
-                structs.append((offset, numbytes))
-                pos += 24
-            isextended = bool(buf[482])
-            origsize = nti(buf[483:495])
-            obj._sparse_structs = (structs, isextended, origsize)
-
-        # Remove redundant slashes from directories.
-        if obj.isdir():
-            obj.name = obj.name.rstrip("/")
-
-        # Reconstruct a ustar longname.
-        if prefix and obj.type not in GNU_TYPES:
-            obj.name = prefix + "/" + obj.name
-        return obj
-
-    @classmethod
-    def fromtarfile(cls, tarfile):
-        """Return the next TarInfo object from TarFile object
-           tarfile.
-        """
-        buf = tarfile.fileobj.read(BLOCKSIZE)
-        obj = cls.frombuf(buf, tarfile.encoding, tarfile.errors)
-        obj.offset = tarfile.fileobj.tell() - BLOCKSIZE
-        return obj._proc_member(tarfile)
-
-    #--------------------------------------------------------------------------
-    # The following are methods that are called depending on the type of a
-    # member. The entry point is _proc_member() which can be overridden in a
-    # subclass to add custom _proc_*() methods. A _proc_*() method MUST
-    # implement the following
-    # operations:
-    # 1. Set self.offset_data to the position where the data blocks begin,
-    #    if there is data that follows.
-    # 2. Set tarfile.offset to the position where the next member's header will
-    #    begin.
-    # 3. Return self or another valid TarInfo object.
-    def _proc_member(self, tarfile):
-        """Choose the right processing method depending on
-           the type and call it.
-        """
-        if self.type in (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK):
-            return self._proc_gnulong(tarfile)
-        elif self.type == GNUTYPE_SPARSE:
-            return self._proc_sparse(tarfile)
-        elif self.type in (XHDTYPE, XGLTYPE, SOLARIS_XHDTYPE):
-            return self._proc_pax(tarfile)
-        else:
-            return self._proc_builtin(tarfile)
-
-    def _proc_builtin(self, tarfile):
-        """Process a builtin type or an unknown type which
-           will be treated as a regular file.
-        """
-        self.offset_data = tarfile.fileobj.tell()
-        offset = self.offset_data
-        if self.isreg() or self.type not in SUPPORTED_TYPES:
-            # Skip the following data blocks.
-            offset += self._block(self.size)
-        tarfile.offset = offset
-
-        # Patch the TarInfo object with saved global
-        # header information.
-        self._apply_pax_info(tarfile.pax_headers, tarfile.encoding, tarfile.errors)
-
-        # Remove redundant slashes from directories. This is to be consistent
-        # with frombuf().
-        if self.isdir():
-            self.name = self.name.rstrip("/")
-
-        return self
-
-    def _proc_gnulong(self, tarfile):
-        """Process the blocks that hold a GNU longname
-           or longlink member.
-        """
-        buf = tarfile.fileobj.read(self._block(self.size))
-
-        # Fetch the next header and process it.
-        try:
-            next = self.fromtarfile(tarfile)
-        except HeaderError as e:
-            raise SubsequentHeaderError(str(e)) from None
-
-        # Patch the TarInfo object from the next header with
-        # the longname information.
-        next.offset = self.offset
-        if self.type == GNUTYPE_LONGNAME:
-            next.name = nts(buf, tarfile.encoding, tarfile.errors)
-        elif self.type == GNUTYPE_LONGLINK:
-            next.linkname = nts(buf, tarfile.encoding, tarfile.errors)
-
-        # Remove redundant slashes from directories. This is to be consistent
-        # with frombuf().
-        if next.isdir():
-            next.name = removesuffix(next.name, "/")
-
-        return next
-
-    def _proc_sparse(self, tarfile):
-        """Process a GNU sparse header plus extra headers.
-        """
-        # We already collected some sparse structures in frombuf().
-        structs, isextended, origsize = self._sparse_structs
-        del self._sparse_structs
-
-        # Collect sparse structures from extended header blocks.
-        while isextended:
-            buf = tarfile.fileobj.read(BLOCKSIZE)
-            pos = 0
-            for i in range(21):
-                try:
-                    offset = nti(buf[pos:pos + 12])
-                    numbytes = nti(buf[pos + 12:pos + 24])
-                except ValueError:
-                    break
-                if offset and numbytes:
-                    structs.append((offset, numbytes))
-                pos += 24
-            isextended = bool(buf[504])
-        self.sparse = structs
-
-        self.offset_data = tarfile.fileobj.tell()
-        tarfile.offset = self.offset_data + self._block(self.size)
-        self.size = origsize
-        return self
-
-    def _proc_pax(self, tarfile):
-        """Process an extended or global header as described in
-           POSIX.1-2008.
-        """
-        # Read the header information.
-        buf = tarfile.fileobj.read(self._block(self.size))
-
-        # A pax header stores supplemental information for either
-        # the following file (extended) or all following files
-        # (global).
-        if self.type == XGLTYPE:
-            pax_headers = tarfile.pax_headers
-        else:
-            pax_headers = tarfile.pax_headers.copy()
-
-        # Check if the pax header contains a hdrcharset field. This tells us
-        # the encoding of the path, linkpath, uname and gname fields. Normally,
-        # these fields are UTF-8 encoded but since POSIX.1-2008 tar
-        # implementations are allowed to store them as raw binary strings if
-        # the translation to UTF-8 fails.
-        match = re.search(br"\d+ hdrcharset=([^\n]+)\n", buf)
-        if match is not None:
-            pax_headers["hdrcharset"] = match.group(1).decode("utf-8")
-
-        # For the time being, we don't care about anything other than "BINARY".
-        # The only other value that is currently allowed by the standard is
-        # "ISO-IR 10646 2000 UTF-8" in other words UTF-8.
-        hdrcharset = pax_headers.get("hdrcharset")
-        if hdrcharset == "BINARY":
-            encoding = tarfile.encoding
-        else:
-            encoding = "utf-8"
-
-        # Parse pax header information. A record looks like that:
-        # "%d %s=%s\n" % (length, keyword, value). length is the size
-        # of the complete record including the length field itself and
-        # the newline. keyword and value are both UTF-8 encoded strings.
-        regex = re.compile(br"(\d+) ([^=]+)=")
-        pos = 0
-        while match := regex.match(buf, pos):
-            length, keyword = match.groups()
-            length = int(length)
-            if length == 0:
-                raise InvalidHeaderError("invalid header")
-            value = buf[match.end(2) + 1:match.start(1) + length - 1]
-
-            # Normally, we could just use "utf-8" as the encoding and "strict"
-            # as the error handler, but we better not take the risk. For
-            # example, GNU tar <= 1.23 is known to store filenames it cannot
-            # translate to UTF-8 as raw strings (unfortunately without a
-            # hdrcharset=BINARY header).
-            # We first try the strict standard encoding, and if that fails we
-            # fall back on the user's encoding and error handler.
-            keyword = self._decode_pax_field(keyword, "utf-8", "utf-8",
-                    tarfile.errors)
-            if keyword in PAX_NAME_FIELDS:
-                value = self._decode_pax_field(value, encoding, tarfile.encoding,
-                        tarfile.errors)
-            else:
-                value = self._decode_pax_field(value, "utf-8", "utf-8",
-                        tarfile.errors)
-
-            pax_headers[keyword] = value
-            pos += length
-
-        # Fetch the next header.
-        try:
-            next = self.fromtarfile(tarfile)
-        except HeaderError as e:
-            raise SubsequentHeaderError(str(e)) from None
-
-        # Process GNU sparse information.
-        if "GNU.sparse.map" in pax_headers:
-            # GNU extended sparse format version 0.1.
-            self._proc_gnusparse_01(next, pax_headers)
-
-        elif "GNU.sparse.size" in pax_headers:
-            # GNU extended sparse format version 0.0.
-            self._proc_gnusparse_00(next, pax_headers, buf)
-
-        elif pax_headers.get("GNU.sparse.major") == "1" and pax_headers.get("GNU.sparse.minor") == "0":
-            # GNU extended sparse format version 1.0.
-            self._proc_gnusparse_10(next, pax_headers, tarfile)
-
-        if self.type in (XHDTYPE, SOLARIS_XHDTYPE):
-            # Patch the TarInfo object with the extended header info.
-            next._apply_pax_info(pax_headers, tarfile.encoding, tarfile.errors)
-            next.offset = self.offset
-
-            if "size" in pax_headers:
-                # If the extended header replaces the size field,
-                # we need to recalculate the offset where the next
-                # header starts.
-                offset = next.offset_data
-                if next.isreg() or next.type not in SUPPORTED_TYPES:
-                    offset += next._block(next.size)
-                tarfile.offset = offset
-
-        return next
-
-    def _proc_gnusparse_00(self, next, pax_headers, buf):
-        """Process a GNU tar extended sparse header, version 0.0.
-        """
-        offsets = []
-        for match in re.finditer(br"\d+ GNU.sparse.offset=(\d+)\n", buf):
-            offsets.append(int(match.group(1)))
-        numbytes = []
-        for match in re.finditer(br"\d+ GNU.sparse.numbytes=(\d+)\n", buf):
-            numbytes.append(int(match.group(1)))
-        next.sparse = list(zip(offsets, numbytes))
-
-    def _proc_gnusparse_01(self, next, pax_headers):
-        """Process a GNU tar extended sparse header, version 0.1.
-        """
-        sparse = [int(x) for x in pax_headers["GNU.sparse.map"].split(",")]
-        next.sparse = list(zip(sparse[::2], sparse[1::2]))
-
-    def _proc_gnusparse_10(self, next, pax_headers, tarfile):
-        """Process a GNU tar extended sparse header, version 1.0.
-        """
-        fields = None
-        sparse = []
-        buf = tarfile.fileobj.read(BLOCKSIZE)
-        fields, buf = buf.split(b"\n", 1)
-        fields = int(fields)
-        while len(sparse) < fields * 2:
-            if b"\n" not in buf:
-                buf += tarfile.fileobj.read(BLOCKSIZE)
-            number, buf = buf.split(b"\n", 1)
-            sparse.append(int(number))
-        next.offset_data = tarfile.fileobj.tell()
-        next.sparse = list(zip(sparse[::2], sparse[1::2]))
-
-    def _apply_pax_info(self, pax_headers, encoding, errors):
-        """Replace fields with supplemental information from a previous
-           pax extended or global header.
-        """
-        for keyword, value in pax_headers.items():
-            if keyword == "GNU.sparse.name":
-                setattr(self, "path", value)
-            elif keyword == "GNU.sparse.size":
-                setattr(self, "size", int(value))
-            elif keyword == "GNU.sparse.realsize":
-                setattr(self, "size", int(value))
-            elif keyword in PAX_FIELDS:
-                if keyword in PAX_NUMBER_FIELDS:
-                    try:
-                        value = PAX_NUMBER_FIELDS[keyword](value)
-                    except ValueError:
-                        value = 0
-                if keyword == "path":
-                    value = value.rstrip("/")
-                setattr(self, keyword, value)
-
-        self.pax_headers = pax_headers.copy()
-
-    def _decode_pax_field(self, value, encoding, fallback_encoding, fallback_errors):
-        """Decode a single field from a pax record.
-        """
-        try:
-            return value.decode(encoding, "strict")
-        except UnicodeDecodeError:
-            return value.decode(fallback_encoding, fallback_errors)
-
-    def _block(self, count):
-        """Round up a byte count by BLOCKSIZE and return it,
-           e.g. _block(834) => 1024.
-        """
-        blocks, remainder = divmod(count, BLOCKSIZE)
-        if remainder:
-            blocks += 1
-        return blocks * BLOCKSIZE
-
-    def isreg(self):
-        'Return True if the Tarinfo object is a regular file.'
-        return self.type in REGULAR_TYPES
-
-    def isfile(self):
-        'Return True if the Tarinfo object is a regular file.'
-        return self.isreg()
-
-    def isdir(self):
-        'Return True if it is a directory.'
-        return self.type == DIRTYPE
-
-    def issym(self):
-        'Return True if it is a symbolic link.'
-        return self.type == SYMTYPE
-
-    def islnk(self):
-        'Return True if it is a hard link.'
-        return self.type == LNKTYPE
-
-    def ischr(self):
-        'Return True if it is a character device.'
-        return self.type == CHRTYPE
-
-    def isblk(self):
-        'Return True if it is a block device.'
-        return self.type == BLKTYPE
-
-    def isfifo(self):
-        'Return True if it is a FIFO.'
-        return self.type == FIFOTYPE
-
-    def issparse(self):
-        return self.sparse is not None
-
-    def isdev(self):
-        'Return True if it is one of character device, block device or FIFO.'
-        return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE)
-# class TarInfo
-
-class TarFile(object):
-    """The TarFile Class provides an interface to tar archives.
-    """
-
-    debug = 0                   # May be set from 0 (no msgs) to 3 (all msgs)
-
-    dereference = False         # If true, add content of linked file to the
-                                # tar file, else the link.
-
-    ignore_zeros = False        # If true, skips empty or invalid blocks and
-                                # continues processing.
-
-    errorlevel = 1              # If 0, fatal errors only appear in debug
-                                # messages (if debug >= 0). If > 0, errors
-                                # are passed to the caller as exceptions.
-
-    format = DEFAULT_FORMAT     # The format to use when creating an archive.
-
-    encoding = ENCODING         # Encoding for 8-bit character strings.
-
-    errors = None               # Error handler for unicode conversion.
-
-    tarinfo = TarInfo           # The default TarInfo class to use.
-
-    fileobject = ExFileObject   # The file-object for extractfile().
-
-    extraction_filter = None    # The default filter for extraction.
-
-    def __init__(self, name=None, mode="r", fileobj=None, format=None,
-            tarinfo=None, dereference=None, ignore_zeros=None, encoding=None,
-            errors="surrogateescape", pax_headers=None, debug=None,
-            errorlevel=None, copybufsize=None, stream=False):
-        """Open an (uncompressed) tar archive 'name'. 'mode' is either 'r' to
-           read from an existing archive, 'a' to append data to an existing
-           file or 'w' to create a new file overwriting an existing one. 'mode'
-           defaults to 'r'.
-           If 'fileobj' is given, it is used for reading or writing data. If it
-           can be determined, 'mode' is overridden by 'fileobj's mode.
-           'fileobj' is not closed, when TarFile is closed.
-        """
-        modes = {"r": "rb", "a": "r+b", "w": "wb", "x": "xb"}
-        if mode not in modes:
-            raise ValueError("mode must be 'r', 'a', 'w' or 'x'")
-        self.mode = mode
-        self._mode = modes[mode]
-
-        if not fileobj:
-            if self.mode == "a" and not os.path.exists(name):
-                # Create nonexistent files in append mode.
-                self.mode = "w"
-                self._mode = "wb"
-            fileobj = bltn_open(name, self._mode)
-            self._extfileobj = False
-        else:
-            if (name is None and hasattr(fileobj, "name") and
-                isinstance(fileobj.name, (str, bytes))):
-                name = fileobj.name
-            if hasattr(fileobj, "mode"):
-                self._mode = fileobj.mode
-            self._extfileobj = True
-        self.name = os.path.abspath(name) if name else None
-        self.fileobj = fileobj
-
-        self.stream = stream
-
-        # Init attributes.
-        if format is not None:
-            self.format = format
-        if tarinfo is not None:
-            self.tarinfo = tarinfo
-        if dereference is not None:
-            self.dereference = dereference
-        if ignore_zeros is not None:
-            self.ignore_zeros = ignore_zeros
-        if encoding is not None:
-            self.encoding = encoding
-        self.errors = errors
-
-        if pax_headers is not None and self.format == PAX_FORMAT:
-            self.pax_headers = pax_headers
-        else:
-            self.pax_headers = {}
-
-        if debug is not None:
-            self.debug = debug
-        if errorlevel is not None:
-            self.errorlevel = errorlevel
-
-        # Init datastructures.
-        self.copybufsize = copybufsize
-        self.closed = False
-        self.members = []       # list of members as TarInfo objects
-        self._loaded = False    # flag if all members have been read
-        self.offset = self.fileobj.tell()
-                                # current position in the archive file
-        self.inodes = {}        # dictionary caching the inodes of
-                                # archive members already added
-
-        try:
-            if self.mode == "r":
-                self.firstmember = None
-                self.firstmember = self.next()
-
-            if self.mode == "a":
-                # Move to the end of the archive,
-                # before the first empty block.
-                while True:
-                    self.fileobj.seek(self.offset)
-                    try:
-                        tarinfo = self.tarinfo.fromtarfile(self)
-                        self.members.append(tarinfo)
-                    except EOFHeaderError:
-                        self.fileobj.seek(self.offset)
-                        break
-                    except HeaderError as e:
-                        raise ReadError(str(e)) from None
-
-            if self.mode in ("a", "w", "x"):
-                self._loaded = True
-
-                if self.pax_headers:
-                    buf = self.tarinfo.create_pax_global_header(self.pax_headers.copy())
-                    self.fileobj.write(buf)
-                    self.offset += len(buf)
-        except:
-            if not self._extfileobj:
-                self.fileobj.close()
-            self.closed = True
-            raise
-
-    #--------------------------------------------------------------------------
-    # Below are the classmethods which act as alternate constructors to the
-    # TarFile class. The open() method is the only one that is needed for
-    # public use; it is the "super"-constructor and is able to select an
-    # adequate "sub"-constructor for a particular compression using the mapping
-    # from OPEN_METH.
-    #
-    # This concept allows one to subclass TarFile without losing the comfort of
-    # the super-constructor. A sub-constructor is registered and made available
-    # by adding it to the mapping in OPEN_METH.
-
-    @classmethod
-    def open(cls, name=None, mode="r", fileobj=None, bufsize=RECORDSIZE, **kwargs):
-        r"""Open a tar archive for reading, writing or appending. Return
-           an appropriate TarFile class.
-
-           mode:
-           'r' or 'r:\*' open for reading with transparent compression
-           'r:'         open for reading exclusively uncompressed
-           'r:gz'       open for reading with gzip compression
-           'r:bz2'      open for reading with bzip2 compression
-           'r:xz'       open for reading with lzma compression
-           'a' or 'a:'  open for appending, creating the file if necessary
-           'w' or 'w:'  open for writing without compression
-           'w:gz'       open for writing with gzip compression
-           'w:bz2'      open for writing with bzip2 compression
-           'w:xz'       open for writing with lzma compression
-
-           'x' or 'x:'  create a tarfile exclusively without compression, raise
-                        an exception if the file is already created
-           'x:gz'       create a gzip compressed tarfile, raise an exception
-                        if the file is already created
-           'x:bz2'      create a bzip2 compressed tarfile, raise an exception
-                        if the file is already created
-           'x:xz'       create an lzma compressed tarfile, raise an exception
-                        if the file is already created
-
-           'r|\*'        open a stream of tar blocks with transparent compression
-           'r|'         open an uncompressed stream of tar blocks for reading
-           'r|gz'       open a gzip compressed stream of tar blocks
-           'r|bz2'      open a bzip2 compressed stream of tar blocks
-           'r|xz'       open an lzma compressed stream of tar blocks
-           'w|'         open an uncompressed stream for writing
-           'w|gz'       open a gzip compressed stream for writing
-           'w|bz2'      open a bzip2 compressed stream for writing
-           'w|xz'       open an lzma compressed stream for writing
-        """
-
-        if not name and not fileobj:
-            raise ValueError("nothing to open")
-
-        if mode in ("r", "r:*"):
-            # Find out which *open() is appropriate for opening the file.
-            def not_compressed(comptype):
-                return cls.OPEN_METH[comptype] == 'taropen'
-            error_msgs = []
-            for comptype in sorted(cls.OPEN_METH, key=not_compressed):
-                func = getattr(cls, cls.OPEN_METH[comptype])
-                if fileobj is not None:
-                    saved_pos = fileobj.tell()
-                try:
-                    return func(name, "r", fileobj, **kwargs)
-                except (ReadError, CompressionError) as e:
-                    error_msgs.append(f'- method {comptype}: {e!r}')
-                    if fileobj is not None:
-                        fileobj.seek(saved_pos)
-                    continue
-            error_msgs_summary = '\n'.join(error_msgs)
-            raise ReadError(f"file could not be opened successfully:\n{error_msgs_summary}")
-
-        elif ":" in mode:
-            filemode, comptype = mode.split(":", 1)
-            filemode = filemode or "r"
-            comptype = comptype or "tar"
-
-            # Select the *open() function according to
-            # given compression.
-            if comptype in cls.OPEN_METH:
-                func = getattr(cls, cls.OPEN_METH[comptype])
-            else:
-                raise CompressionError("unknown compression type %r" % comptype)
-            return func(name, filemode, fileobj, **kwargs)
-
-        elif "|" in mode:
-            filemode, comptype = mode.split("|", 1)
-            filemode = filemode or "r"
-            comptype = comptype or "tar"
-
-            if filemode not in ("r", "w"):
-                raise ValueError("mode must be 'r' or 'w'")
-
-            compresslevel = kwargs.pop("compresslevel", 9)
-            stream = _Stream(name, filemode, comptype, fileobj, bufsize,
-                             compresslevel)
-            try:
-                t = cls(name, filemode, stream, **kwargs)
-            except:
-                stream.close()
-                raise
-            t._extfileobj = False
-            return t
-
-        elif mode in ("a", "w", "x"):
-            return cls.taropen(name, mode, fileobj, **kwargs)
-
-        raise ValueError("undiscernible mode")
-
-    @classmethod
-    def taropen(cls, name, mode="r", fileobj=None, **kwargs):
-        """Open uncompressed tar archive name for reading or writing.
-        """
-        if mode not in ("r", "a", "w", "x"):
-            raise ValueError("mode must be 'r', 'a', 'w' or 'x'")
-        return cls(name, mode, fileobj, **kwargs)
-
-    @classmethod
-    def gzopen(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
-        """Open gzip compressed tar archive name for reading or writing.
-           Appending is not allowed.
-        """
-        if mode not in ("r", "w", "x"):
-            raise ValueError("mode must be 'r', 'w' or 'x'")
-
-        try:
-            from gzip import GzipFile
-        except ImportError:
-            raise CompressionError("gzip module is not available") from None
-
-        try:
-            fileobj = GzipFile(name, mode + "b", compresslevel, fileobj)
-        except OSError as e:
-            if fileobj is not None and mode == 'r':
-                raise ReadError("not a gzip file") from e
-            raise
-
-        try:
-            t = cls.taropen(name, mode, fileobj, **kwargs)
-        except OSError as e:
-            fileobj.close()
-            if mode == 'r':
-                raise ReadError("not a gzip file") from e
-            raise
-        except:
-            fileobj.close()
-            raise
-        t._extfileobj = False
-        return t
-
-    @classmethod
-    def bz2open(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
-        """Open bzip2 compressed tar archive name for reading or writing.
-           Appending is not allowed.
-        """
-        if mode not in ("r", "w", "x"):
-            raise ValueError("mode must be 'r', 'w' or 'x'")
-
-        try:
-            from bz2 import BZ2File
-        except ImportError:
-            raise CompressionError("bz2 module is not available") from None
-
-        fileobj = BZ2File(fileobj or name, mode, compresslevel=compresslevel)
-
-        try:
-            t = cls.taropen(name, mode, fileobj, **kwargs)
-        except (OSError, EOFError) as e:
-            fileobj.close()
-            if mode == 'r':
-                raise ReadError("not a bzip2 file") from e
-            raise
-        except:
-            fileobj.close()
-            raise
-        t._extfileobj = False
-        return t
-
-    @classmethod
-    def xzopen(cls, name, mode="r", fileobj=None, preset=None, **kwargs):
-        """Open lzma compressed tar archive name for reading or writing.
-           Appending is not allowed.
-        """
-        if mode not in ("r", "w", "x"):
-            raise ValueError("mode must be 'r', 'w' or 'x'")
-
-        try:
-            from lzma import LZMAFile, LZMAError
-        except ImportError:
-            raise CompressionError("lzma module is not available") from None
-
-        fileobj = LZMAFile(fileobj or name, mode, preset=preset)
-
-        try:
-            t = cls.taropen(name, mode, fileobj, **kwargs)
-        except (LZMAError, EOFError) as e:
-            fileobj.close()
-            if mode == 'r':
-                raise ReadError("not an lzma file") from e
-            raise
-        except:
-            fileobj.close()
-            raise
-        t._extfileobj = False
-        return t
-
-    # All *open() methods are registered here.
-    OPEN_METH = {
-        "tar": "taropen",   # uncompressed tar
-        "gz":  "gzopen",    # gzip compressed tar
-        "bz2": "bz2open",   # bzip2 compressed tar
-        "xz":  "xzopen"     # lzma compressed tar
-    }
-
-    #--------------------------------------------------------------------------
-    # The public methods which TarFile provides:
-
-    def close(self):
-        """Close the TarFile. In write-mode, two finishing zero blocks are
-           appended to the archive.
-        """
-        if self.closed:
-            return
-
-        self.closed = True
-        try:
-            if self.mode in ("a", "w", "x"):
-                self.fileobj.write(NUL * (BLOCKSIZE * 2))
-                self.offset += (BLOCKSIZE * 2)
-                # fill up the end with zero-blocks
-                # (like option -b20 for tar does)
-                blocks, remainder = divmod(self.offset, RECORDSIZE)
-                if remainder > 0:
-                    self.fileobj.write(NUL * (RECORDSIZE - remainder))
-        finally:
-            if not self._extfileobj:
-                self.fileobj.close()
-
-    def getmember(self, name):
-        """Return a TarInfo object for member 'name'. If 'name' can not be
-           found in the archive, KeyError is raised. If a member occurs more
-           than once in the archive, its last occurrence is assumed to be the
-           most up-to-date version.
-        """
-        tarinfo = self._getmember(name.rstrip('/'))
-        if tarinfo is None:
-            raise KeyError("filename %r not found" % name)
-        return tarinfo
-
-    def getmembers(self):
-        """Return the members of the archive as a list of TarInfo objects. The
-           list has the same order as the members in the archive.
-        """
-        self._check()
-        if not self._loaded:    # if we want to obtain a list of
-            self._load()        # all members, we first have to
-                                # scan the whole archive.
-        return self.members
-
-    def getnames(self):
-        """Return the members of the archive as a list of their names. It has
-           the same order as the list returned by getmembers().
-        """
-        return [tarinfo.name for tarinfo in self.getmembers()]
-
-    def gettarinfo(self, name=None, arcname=None, fileobj=None):
-        """Create a TarInfo object from the result of os.stat or equivalent
-           on an existing file. The file is either named by 'name', or
-           specified as a file object 'fileobj' with a file descriptor. If
-           given, 'arcname' specifies an alternative name for the file in the
-           archive, otherwise, the name is taken from the 'name' attribute of
-           'fileobj', or the 'name' argument. The name should be a text
-           string.
-        """
-        self._check("awx")
-
-        # When fileobj is given, replace name by
-        # fileobj's real name.
-        if fileobj is not None:
-            name = fileobj.name
-
-        # Building the name of the member in the archive.
-        # Backward slashes are converted to forward slashes,
-        # Absolute paths are turned to relative paths.
-        if arcname is None:
-            arcname = name
-        drv, arcname = os.path.splitdrive(arcname)
-        arcname = arcname.replace(os.sep, "/")
-        arcname = arcname.lstrip("/")
-
-        # Now, fill the TarInfo object with
-        # information specific for the file.
-        tarinfo = self.tarinfo()
-        tarinfo._tarfile = self  # To be removed in 3.16.
-
-        # Use os.stat or os.lstat, depending on if symlinks shall be resolved.
-        if fileobj is None:
-            if not self.dereference:
-                statres = os.lstat(name)
-            else:
-                statres = os.stat(name)
-        else:
-            statres = os.fstat(fileobj.fileno())
-        linkname = ""
-
-        stmd = statres.st_mode
-        if stat.S_ISREG(stmd):
-            inode = (statres.st_ino, statres.st_dev)
-            if not self.dereference and statres.st_nlink > 1 and \
-                    inode in self.inodes and arcname != self.inodes[inode]:
-                # Is it a hardlink to an already
-                # archived file?
-                type = LNKTYPE
-                linkname = self.inodes[inode]
-            else:
-                # The inode is added only if its valid.
-                # For win32 it is always 0.
-                type = REGTYPE
-                if inode[0]:
-                    self.inodes[inode] = arcname
-        elif stat.S_ISDIR(stmd):
-            type = DIRTYPE
-        elif stat.S_ISFIFO(stmd):
-            type = FIFOTYPE
-        elif stat.S_ISLNK(stmd):
-            type = SYMTYPE
-            linkname = os.readlink(name)
-        elif stat.S_ISCHR(stmd):
-            type = CHRTYPE
-        elif stat.S_ISBLK(stmd):
-            type = BLKTYPE
-        else:
-            return None
-
-        # Fill the TarInfo object with all
-        # information we can get.
-        tarinfo.name = arcname
-        tarinfo.mode = stmd
-        tarinfo.uid = statres.st_uid
-        tarinfo.gid = statres.st_gid
-        if type == REGTYPE:
-            tarinfo.size = statres.st_size
-        else:
-            tarinfo.size = 0
-        tarinfo.mtime = statres.st_mtime
-        tarinfo.type = type
-        tarinfo.linkname = linkname
-        if pwd:
-            try:
-                tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0]
-            except KeyError:
-                pass
-        if grp:
-            try:
-                tarinfo.gname = grp.getgrgid(tarinfo.gid)[0]
-            except KeyError:
-                pass
-
-        if type in (CHRTYPE, BLKTYPE):
-            if hasattr(os, "major") and hasattr(os, "minor"):
-                tarinfo.devmajor = os.major(statres.st_rdev)
-                tarinfo.devminor = os.minor(statres.st_rdev)
-        return tarinfo
-
-    def list(self, verbose=True, *, members=None):
-        """Print a table of contents to sys.stdout. If 'verbose' is False, only
-           the names of the members are printed. If it is True, an 'ls -l'-like
-           output is produced. 'members' is optional and must be a subset of the
-           list returned by getmembers().
-        """
-        # Convert tarinfo type to stat type.
-        type2mode = {REGTYPE: stat.S_IFREG, SYMTYPE: stat.S_IFLNK,
-                     FIFOTYPE: stat.S_IFIFO, CHRTYPE: stat.S_IFCHR,
-                     DIRTYPE: stat.S_IFDIR, BLKTYPE: stat.S_IFBLK}
-        self._check()
-
-        if members is None:
-            members = self
-        for tarinfo in members:
-            if verbose:
-                if tarinfo.mode is None:
-                    _safe_print("??????????")
-                else:
-                    modetype = type2mode.get(tarinfo.type, 0)
-                    _safe_print(stat.filemode(modetype | tarinfo.mode))
-                _safe_print("%s/%s" % (tarinfo.uname or tarinfo.uid,
-                                       tarinfo.gname or tarinfo.gid))
-                if tarinfo.ischr() or tarinfo.isblk():
-                    _safe_print("%10s" %
-                            ("%d,%d" % (tarinfo.devmajor, tarinfo.devminor)))
-                else:
-                    _safe_print("%10d" % tarinfo.size)
-                if tarinfo.mtime is None:
-                    _safe_print("????-??-?? ??:??:??")
-                else:
-                    _safe_print("%d-%02d-%02d %02d:%02d:%02d" \
-                                % time.localtime(tarinfo.mtime)[:6])
-
-            _safe_print(tarinfo.name + ("/" if tarinfo.isdir() else ""))
-
-            if verbose:
-                if tarinfo.issym():
-                    _safe_print("-> " + tarinfo.linkname)
-                if tarinfo.islnk():
-                    _safe_print("link to " + tarinfo.linkname)
-            print()
-
-    def add(self, name, arcname=None, recursive=True, *, filter=None):
-        """Add the file 'name' to the archive. 'name' may be any type of file
-           (directory, fifo, symbolic link, etc.). If given, 'arcname'
-           specifies an alternative name for the file in the archive.
-           Directories are added recursively by default. This can be avoided by
-           setting 'recursive' to False. 'filter' is a function
-           that expects a TarInfo object argument and returns the changed
-           TarInfo object, if it returns None the TarInfo object will be
-           excluded from the archive.
-        """
-        self._check("awx")
-
-        if arcname is None:
-            arcname = name
-
-        # Skip if somebody tries to archive the archive...
-        if self.name is not None and os.path.abspath(name) == self.name:
-            self._dbg(2, "tarfile: Skipped %r" % name)
-            return
-
-        self._dbg(1, name)
-
-        # Create a TarInfo object from the file.
-        tarinfo = self.gettarinfo(name, arcname)
-
-        if tarinfo is None:
-            self._dbg(1, "tarfile: Unsupported type %r" % name)
-            return
-
-        # Change or exclude the TarInfo object.
-        if filter is not None:
-            tarinfo = filter(tarinfo)
-            if tarinfo is None:
-                self._dbg(2, "tarfile: Excluded %r" % name)
-                return
-
-        # Append the tar header and data to the archive.
-        if tarinfo.isreg():
-            with bltn_open(name, "rb") as f:
-                self.addfile(tarinfo, f)
-
-        elif tarinfo.isdir():
-            self.addfile(tarinfo)
-            if recursive:
-                for f in sorted(os.listdir(name)):
-                    self.add(os.path.join(name, f), os.path.join(arcname, f),
-                            recursive, filter=filter)
-
-        else:
-            self.addfile(tarinfo)
-
-    def addfile(self, tarinfo, fileobj=None):
-        """Add the TarInfo object 'tarinfo' to the archive. If 'tarinfo' represents
-           a non zero-size regular file, the 'fileobj' argument should be a binary file,
-           and tarinfo.size bytes are read from it and added to the archive.
-           You can create TarInfo objects directly, or by using gettarinfo().
-        """
-        self._check("awx")
-
-        if fileobj is None and tarinfo.isreg() and tarinfo.size != 0:
-            raise ValueError("fileobj not provided for non zero-size regular file")
-
-        tarinfo = copy.copy(tarinfo)
-
-        buf = tarinfo.tobuf(self.format, self.encoding, self.errors)
-        self.fileobj.write(buf)
-        self.offset += len(buf)
-        bufsize=self.copybufsize
-        # If there's data to follow, append it.
-        if fileobj is not None:
-            copyfileobj(fileobj, self.fileobj, tarinfo.size, bufsize=bufsize)
-            blocks, remainder = divmod(tarinfo.size, BLOCKSIZE)
-            if remainder > 0:
-                self.fileobj.write(NUL * (BLOCKSIZE - remainder))
-                blocks += 1
-            self.offset += blocks * BLOCKSIZE
-
-        self.members.append(tarinfo)
-
-    def _get_filter_function(self, filter):
-        if filter is None:
-            filter = self.extraction_filter
-            if filter is None:
-                import warnings
-                warnings.warn(
-                    'Python 3.14 will, by default, filter extracted tar '
-                    + 'archives and reject files or modify their metadata. '
-                    + 'Use the filter argument to control this behavior.',
-                    DeprecationWarning, stacklevel=3)
-                return fully_trusted_filter
-            if isinstance(filter, str):
-                raise TypeError(
-                    'String names are not supported for '
-                    + 'TarFile.extraction_filter. Use a function such as '
-                    + 'tarfile.data_filter directly.')
-            return filter
-        if callable(filter):
-            return filter
-        try:
-            return _NAMED_FILTERS[filter]
-        except KeyError:
-            raise ValueError(f"filter {filter!r} not found") from None
-
-    def extractall(self, path=".", members=None, *, numeric_owner=False,
-                   filter=None):
-        """Extract all members from the archive to the current working
-           directory and set owner, modification time and permissions on
-           directories afterwards. 'path' specifies a different directory
-           to extract to. 'members' is optional and must be a subset of the
-           list returned by getmembers(). If 'numeric_owner' is True, only
-           the numbers for user/group names are used and not the names.
-
-           The 'filter' function will be called on each member just
-           before extraction.
-           It can return a changed TarInfo or None to skip the member.
-           String names of common filters are accepted.
-        """
-        directories = []
-
-        filter_function = self._get_filter_function(filter)
-        if members is None:
-            members = self
-
-        for member in members:
-            tarinfo = self._get_extract_tarinfo(member, filter_function, path)
-            if tarinfo is None:
-                continue
-            if tarinfo.isdir():
-                # For directories, delay setting attributes until later,
-                # since permissions can interfere with extraction and
-                # extracting contents can reset mtime.
-                directories.append(tarinfo)
-            self._extract_one(tarinfo, path, set_attrs=not tarinfo.isdir(),
-                              numeric_owner=numeric_owner)
-
-        # Reverse sort directories.
-        directories.sort(key=lambda a: a.name, reverse=True)
-
-        # Set correct owner, mtime and filemode on directories.
-        for tarinfo in directories:
-            dirpath = os.path.join(path, tarinfo.name)
-            try:
-                self.chown(tarinfo, dirpath, numeric_owner=numeric_owner)
-                self.utime(tarinfo, dirpath)
-                self.chmod(tarinfo, dirpath)
-            except ExtractError as e:
-                self._handle_nonfatal_error(e)
-
-    def extract(self, member, path="", set_attrs=True, *, numeric_owner=False,
-                filter=None):
-        """Extract a member from the archive to the current working directory,
-           using its full name. Its file information is extracted as accurately
-           as possible. 'member' may be a filename or a TarInfo object. You can
-           specify a different directory using 'path'. File attributes (owner,
-           mtime, mode) are set unless 'set_attrs' is False. If 'numeric_owner'
-           is True, only the numbers for user/group names are used and not
-           the names.
-
-           The 'filter' function will be called before extraction.
-           It can return a changed TarInfo or None to skip the member.
-           String names of common filters are accepted.
-        """
-        filter_function = self._get_filter_function(filter)
-        tarinfo = self._get_extract_tarinfo(member, filter_function, path)
-        if tarinfo is not None:
-            self._extract_one(tarinfo, path, set_attrs, numeric_owner)
-
-    def _get_extract_tarinfo(self, member, filter_function, path):
-        """Get filtered TarInfo (or None) from member, which might be a str"""
-        if isinstance(member, str):
-            tarinfo = self.getmember(member)
-        else:
-            tarinfo = member
-
-        unfiltered = tarinfo
-        try:
-            tarinfo = filter_function(tarinfo, path)
-        except (OSError, FilterError) as e:
-            self._handle_fatal_error(e)
-        except ExtractError as e:
-            self._handle_nonfatal_error(e)
-        if tarinfo is None:
-            self._dbg(2, "tarfile: Excluded %r" % unfiltered.name)
-            return None
-        # Prepare the link target for makelink().
-        if tarinfo.islnk():
-            tarinfo = copy.copy(tarinfo)
-            tarinfo._link_target = os.path.join(path, tarinfo.linkname)
-        return tarinfo
-
-    def _extract_one(self, tarinfo, path, set_attrs, numeric_owner):
-        """Extract from filtered tarinfo to disk"""
-        self._check("r")
-
-        try:
-            self._extract_member(tarinfo, os.path.join(path, tarinfo.name),
-                                 set_attrs=set_attrs,
-                                 numeric_owner=numeric_owner)
-        except OSError as e:
-            self._handle_fatal_error(e)
-        except ExtractError as e:
-            self._handle_nonfatal_error(e)
-
-    def _handle_nonfatal_error(self, e):
-        """Handle non-fatal error (ExtractError) according to errorlevel"""
-        if self.errorlevel > 1:
-            raise
-        else:
-            self._dbg(1, "tarfile: %s" % e)
-
-    def _handle_fatal_error(self, e):
-        """Handle "fatal" error according to self.errorlevel"""
-        if self.errorlevel > 0:
-            raise
-        elif isinstance(e, OSError):
-            if e.filename is None:
-                self._dbg(1, "tarfile: %s" % e.strerror)
-            else:
-                self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename))
-        else:
-            self._dbg(1, "tarfile: %s %s" % (type(e).__name__, e))
-
-    def extractfile(self, member):
-        """Extract a member from the archive as a file object. 'member' may be
-           a filename or a TarInfo object. If 'member' is a regular file or
-           a link, an io.BufferedReader object is returned. For all other
-           existing members, None is returned. If 'member' does not appear
-           in the archive, KeyError is raised.
-        """
-        self._check("r")
-
-        if isinstance(member, str):
-            tarinfo = self.getmember(member)
-        else:
-            tarinfo = member
-
-        if tarinfo.isreg() or tarinfo.type not in SUPPORTED_TYPES:
-            # Members with unknown types are treated as regular files.
-            return self.fileobject(self, tarinfo)
-
-        elif tarinfo.islnk() or tarinfo.issym():
-            if isinstance(self.fileobj, _Stream):
-                # A small but ugly workaround for the case that someone tries
-                # to extract a (sym)link as a file-object from a non-seekable
-                # stream of tar blocks.
-                raise StreamError("cannot extract (sym)link as file object")
-            else:
-                # A (sym)link's file object is its target's file object.
-                return self.extractfile(self._find_link_target(tarinfo))
-        else:
-            # If there's no data associated with the member (directory, chrdev,
-            # blkdev, etc.), return None instead of a file object.
-            return None
-
-    def _extract_member(self, tarinfo, targetpath, set_attrs=True,
-                        numeric_owner=False):
-        """Extract the TarInfo object tarinfo to a physical
-           file called targetpath.
-        """
-        # Fetch the TarInfo object for the given name
-        # and build the destination pathname, replacing
-        # forward slashes to platform specific separators.
-        targetpath = targetpath.rstrip("/")
-        targetpath = targetpath.replace("/", os.sep)
-
-        # Create all upper directories.
-        upperdirs = os.path.dirname(targetpath)
-        if upperdirs and not os.path.exists(upperdirs):
-            # Create directories that are not part of the archive with
-            # default permissions.
-            os.makedirs(upperdirs, exist_ok=True)
-
-        if tarinfo.islnk() or tarinfo.issym():
-            self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname))
-        else:
-            self._dbg(1, tarinfo.name)
-
-        if tarinfo.isreg():
-            self.makefile(tarinfo, targetpath)
-        elif tarinfo.isdir():
-            self.makedir(tarinfo, targetpath)
-        elif tarinfo.isfifo():
-            self.makefifo(tarinfo, targetpath)
-        elif tarinfo.ischr() or tarinfo.isblk():
-            self.makedev(tarinfo, targetpath)
-        elif tarinfo.islnk() or tarinfo.issym():
-            self.makelink(tarinfo, targetpath)
-        elif tarinfo.type not in SUPPORTED_TYPES:
-            self.makeunknown(tarinfo, targetpath)
-        else:
-            self.makefile(tarinfo, targetpath)
-
-        if set_attrs:
-            self.chown(tarinfo, targetpath, numeric_owner)
-            if not tarinfo.issym():
-                self.chmod(tarinfo, targetpath)
-                self.utime(tarinfo, targetpath)
-
-    #--------------------------------------------------------------------------
-    # Below are the different file methods. They are called via
-    # _extract_member() when extract() is called. They can be replaced in a
-    # subclass to implement other functionality.
-
-    def makedir(self, tarinfo, targetpath):
-        """Make a directory called targetpath.
-        """
-        try:
-            if tarinfo.mode is None:
-                # Use the system's default mode
-                os.mkdir(targetpath)
-            else:
-                # Use a safe mode for the directory, the real mode is set
-                # later in _extract_member().
-                os.mkdir(targetpath, 0o700)
-        except FileExistsError:
-            if not os.path.isdir(targetpath):
-                raise
-
-    def makefile(self, tarinfo, targetpath):
-        """Make a file called targetpath.
-        """
-        source = self.fileobj
-        source.seek(tarinfo.offset_data)
-        bufsize = self.copybufsize
-        with bltn_open(targetpath, "wb") as target:
-            if tarinfo.sparse is not None:
-                for offset, size in tarinfo.sparse:
-                    target.seek(offset)
-                    copyfileobj(source, target, size, ReadError, bufsize)
-                target.seek(tarinfo.size)
-                target.truncate()
-            else:
-                copyfileobj(source, target, tarinfo.size, ReadError, bufsize)
-
-    def makeunknown(self, tarinfo, targetpath):
-        """Make a file from a TarInfo object with an unknown type
-           at targetpath.
-        """
-        self.makefile(tarinfo, targetpath)
-        self._dbg(1, "tarfile: Unknown file type %r, " \
-                     "extracted as regular file." % tarinfo.type)
-
-    def makefifo(self, tarinfo, targetpath):
-        """Make a fifo called targetpath.
-        """
-        if hasattr(os, "mkfifo"):
-            os.mkfifo(targetpath)
-        else:
-            raise ExtractError("fifo not supported by system")
-
-    def makedev(self, tarinfo, targetpath):
-        """Make a character or block device called targetpath.
-        """
-        if not hasattr(os, "mknod") or not hasattr(os, "makedev"):
-            raise ExtractError("special devices not supported by system")
-
-        mode = tarinfo.mode
-        if mode is None:
-            # Use mknod's default
-            mode = 0o600
-        if tarinfo.isblk():
-            mode |= stat.S_IFBLK
-        else:
-            mode |= stat.S_IFCHR
-
-        os.mknod(targetpath, mode,
-                 os.makedev(tarinfo.devmajor, tarinfo.devminor))
-
-    def makelink(self, tarinfo, targetpath):
-        """Make a (symbolic) link called targetpath. If it cannot be created
-          (platform limitation), we try to make a copy of the referenced file
-          instead of a link.
-        """
-        try:
-            # For systems that support symbolic and hard links.
-            if tarinfo.issym():
-                if os.path.lexists(targetpath):
-                    # Avoid FileExistsError on following os.symlink.
-                    os.unlink(targetpath)
-                os.symlink(tarinfo.linkname, targetpath)
-            else:
-                if os.path.exists(tarinfo._link_target):
-                    os.link(tarinfo._link_target, targetpath)
-                else:
-                    self._extract_member(self._find_link_target(tarinfo),
-                                         targetpath)
-        except symlink_exception:
-            try:
-                self._extract_member(self._find_link_target(tarinfo),
-                                     targetpath)
-            except KeyError:
-                raise ExtractError("unable to resolve link inside archive") from None
-
-    def chown(self, tarinfo, targetpath, numeric_owner):
-        """Set owner of targetpath according to tarinfo. If numeric_owner
-           is True, use .gid/.uid instead of .gname/.uname. If numeric_owner
-           is False, fall back to .gid/.uid when the search based on name
-           fails.
-        """
-        if hasattr(os, "geteuid") and os.geteuid() == 0:
-            # We have to be root to do so.
-            g = tarinfo.gid
-            u = tarinfo.uid
-            if not numeric_owner:
-                try:
-                    if grp and tarinfo.gname:
-                        g = grp.getgrnam(tarinfo.gname)[2]
-                except KeyError:
-                    pass
-                try:
-                    if pwd and tarinfo.uname:
-                        u = pwd.getpwnam(tarinfo.uname)[2]
-                except KeyError:
-                    pass
-            if g is None:
-                g = -1
-            if u is None:
-                u = -1
-            try:
-                if tarinfo.issym() and hasattr(os, "lchown"):
-                    os.lchown(targetpath, u, g)
-                else:
-                    os.chown(targetpath, u, g)
-            except (OSError, OverflowError) as e:
-                # OverflowError can be raised if an ID doesn't fit in 'id_t'
-                raise ExtractError("could not change owner") from e
-
-    def chmod(self, tarinfo, targetpath):
-        """Set file permissions of targetpath according to tarinfo.
-        """
-        if tarinfo.mode is None:
-            return
-        try:
-            os.chmod(targetpath, tarinfo.mode)
-        except OSError as e:
-            raise ExtractError("could not change mode") from e
-
-    def utime(self, tarinfo, targetpath):
-        """Set modification time of targetpath according to tarinfo.
-        """
-        mtime = tarinfo.mtime
-        if mtime is None:
-            return
-        if not hasattr(os, 'utime'):
-            return
-        try:
-            os.utime(targetpath, (mtime, mtime))
-        except OSError as e:
-            raise ExtractError("could not change modification time") from e
-
-    #--------------------------------------------------------------------------
-    def next(self):
-        """Return the next member of the archive as a TarInfo object, when
-           TarFile is opened for reading. Return None if there is no more
-           available.
-        """
-        self._check("ra")
-        if self.firstmember is not None:
-            m = self.firstmember
-            self.firstmember = None
-            return m
-
-        # Advance the file pointer.
-        if self.offset != self.fileobj.tell():
-            if self.offset == 0:
-                return None
-            self.fileobj.seek(self.offset - 1)
-            if not self.fileobj.read(1):
-                raise ReadError("unexpected end of data")
-
-        # Read the next block.
-        tarinfo = None
-        while True:
-            try:
-                tarinfo = self.tarinfo.fromtarfile(self)
-            except EOFHeaderError as e:
-                if self.ignore_zeros:
-                    self._dbg(2, "0x%X: %s" % (self.offset, e))
-                    self.offset += BLOCKSIZE
-                    continue
-            except InvalidHeaderError as e:
-                if self.ignore_zeros:
-                    self._dbg(2, "0x%X: %s" % (self.offset, e))
-                    self.offset += BLOCKSIZE
-                    continue
-                elif self.offset == 0:
-                    raise ReadError(str(e)) from None
-            except EmptyHeaderError:
-                if self.offset == 0:
-                    raise ReadError("empty file") from None
-            except TruncatedHeaderError as e:
-                if self.offset == 0:
-                    raise ReadError(str(e)) from None
-            except SubsequentHeaderError as e:
-                raise ReadError(str(e)) from None
-            except Exception as e:
-                try:
-                    import zlib
-                    if isinstance(e, zlib.error):
-                        raise ReadError(f'zlib error: {e}') from None
-                    else:
-                        raise e
-                except ImportError:
-                    raise e
-            break
-
-        if tarinfo is not None:
-            # if streaming the file we do not want to cache the tarinfo
-            if not self.stream:
-                self.members.append(tarinfo)
-        else:
-            self._loaded = True
-
-        return tarinfo
-
-    #--------------------------------------------------------------------------
-    # Little helper methods:
-
-    def _getmember(self, name, tarinfo=None, normalize=False):
-        """Find an archive member by name from bottom to top.
-           If tarinfo is given, it is used as the starting point.
-        """
-        # Ensure that all members have been loaded.
-        members = self.getmembers()
-
-        # Limit the member search list up to tarinfo.
-        skipping = False
-        if tarinfo is not None:
-            try:
-                index = members.index(tarinfo)
-            except ValueError:
-                # The given starting point might be a (modified) copy.
-                # We'll later skip members until we find an equivalent.
-                skipping = True
-            else:
-                # Happy fast path
-                members = members[:index]
-
-        if normalize:
-            name = os.path.normpath(name)
-
-        for member in reversed(members):
-            if skipping:
-                if tarinfo.offset == member.offset:
-                    skipping = False
-                continue
-            if normalize:
-                member_name = os.path.normpath(member.name)
-            else:
-                member_name = member.name
-
-            if name == member_name:
-                return member
-
-        if skipping:
-            # Starting point was not found
-            raise ValueError(tarinfo)
-
-    def _load(self):
-        """Read through the entire archive file and look for readable
-           members. This should not run if the file is set to stream.
-        """
-        if not self.stream:
-            while self.next() is not None:
-                pass
-            self._loaded = True
-
-    def _check(self, mode=None):
-        """Check if TarFile is still open, and if the operation's mode
-           corresponds to TarFile's mode.
-        """
-        if self.closed:
-            raise OSError("%s is closed" % self.__class__.__name__)
-        if mode is not None and self.mode not in mode:
-            raise OSError("bad operation for mode %r" % self.mode)
-
-    def _find_link_target(self, tarinfo):
-        """Find the target member of a symlink or hardlink member in the
-           archive.
-        """
-        if tarinfo.issym():
-            # Always search the entire archive.
-            linkname = "/".join(filter(None, (os.path.dirname(tarinfo.name), tarinfo.linkname)))
-            limit = None
-        else:
-            # Search the archive before the link, because a hard link is
-            # just a reference to an already archived file.
-            linkname = tarinfo.linkname
-            limit = tarinfo
-
-        member = self._getmember(linkname, tarinfo=limit, normalize=True)
-        if member is None:
-            raise KeyError("linkname %r not found" % linkname)
-        return member
-
-    def __iter__(self):
-        """Provide an iterator object.
-        """
-        if self._loaded:
-            yield from self.members
-            return
-
-        # Yield items using TarFile's next() method.
-        # When all members have been read, set TarFile as _loaded.
-        index = 0
-        # Fix for SF #1100429: Under rare circumstances it can
-        # happen that getmembers() is called during iteration,
-        # which will have already exhausted the next() method.
-        if self.firstmember is not None:
-            tarinfo = self.next()
-            index += 1
-            yield tarinfo
-
-        while True:
-            if index < len(self.members):
-                tarinfo = self.members[index]
-            elif not self._loaded:
-                tarinfo = self.next()
-                if not tarinfo:
-                    self._loaded = True
-                    return
-            else:
-                return
-            index += 1
-            yield tarinfo
-
-    def _dbg(self, level, msg):
-        """Write debugging output to sys.stderr.
-        """
-        if level <= self.debug:
-            print(msg, file=sys.stderr)
-
-    def __enter__(self):
-        self._check()
-        return self
-
-    def __exit__(self, type, value, traceback):
-        if type is None:
-            self.close()
-        else:
-            # An exception occurred. We must not call close() because
-            # it would try to write end-of-archive blocks and padding.
-            if not self._extfileobj:
-                self.fileobj.close()
-            self.closed = True
-
-#--------------------
-# exported functions
-#--------------------
-
-def is_tarfile(name):
-    """Return True if name points to a tar archive that we
-       are able to handle, else return False.
-
-       'name' should be a string, file, or file-like object.
-    """
-    try:
-        if hasattr(name, "read"):
-            pos = name.tell()
-            t = open(fileobj=name)
-            name.seek(pos)
-        else:
-            t = open(name)
-        t.close()
-        return True
-    except TarError:
-        return False
-
-open = TarFile.open
-
-
-def main():
-    import argparse
-
-    description = 'A simple command-line interface for tarfile module.'
-    parser = argparse.ArgumentParser(description=description)
-    parser.add_argument('-v', '--verbose', action='store_true', default=False,
-                        help='Verbose output')
-    parser.add_argument('--filter', metavar='',
-                        choices=_NAMED_FILTERS,
-                        help='Filter for extraction')
-
-    group = parser.add_mutually_exclusive_group(required=True)
-    group.add_argument('-l', '--list', metavar='',
-                       help='Show listing of a tarfile')
-    group.add_argument('-e', '--extract', nargs='+',
-                       metavar=('', ''),
-                       help='Extract tarfile into target dir')
-    group.add_argument('-c', '--create', nargs='+',
-                       metavar=('', ''),
-                       help='Create tarfile from sources')
-    group.add_argument('-t', '--test', metavar='',
-                       help='Test if a tarfile is valid')
-
-    args = parser.parse_args()
-
-    if args.filter and args.extract is None:
-        parser.exit(1, '--filter is only valid for extraction\n')
-
-    if args.test is not None:
-        src = args.test
-        if is_tarfile(src):
-            with open(src, 'r') as tar:
-                tar.getmembers()
-                print(tar.getmembers(), file=sys.stderr)
-            if args.verbose:
-                print('{!r} is a tar archive.'.format(src))
-        else:
-            parser.exit(1, '{!r} is not a tar archive.\n'.format(src))
-
-    elif args.list is not None:
-        src = args.list
-        if is_tarfile(src):
-            with TarFile.open(src, 'r:*') as tf:
-                tf.list(verbose=args.verbose)
-        else:
-            parser.exit(1, '{!r} is not a tar archive.\n'.format(src))
-
-    elif args.extract is not None:
-        if len(args.extract) == 1:
-            src = args.extract[0]
-            curdir = os.curdir
-        elif len(args.extract) == 2:
-            src, curdir = args.extract
-        else:
-            parser.exit(1, parser.format_help())
-
-        if is_tarfile(src):
-            with TarFile.open(src, 'r:*') as tf:
-                tf.extractall(path=curdir, filter=args.filter)
-            if args.verbose:
-                if curdir == '.':
-                    msg = '{!r} file is extracted.'.format(src)
-                else:
-                    msg = ('{!r} file is extracted '
-                           'into {!r} directory.').format(src, curdir)
-                print(msg)
-        else:
-            parser.exit(1, '{!r} is not a tar archive.\n'.format(src))
-
-    elif args.create is not None:
-        tar_name = args.create.pop(0)
-        _, ext = os.path.splitext(tar_name)
-        compressions = {
-            # gz
-            '.gz': 'gz',
-            '.tgz': 'gz',
-            # xz
-            '.xz': 'xz',
-            '.txz': 'xz',
-            # bz2
-            '.bz2': 'bz2',
-            '.tbz': 'bz2',
-            '.tbz2': 'bz2',
-            '.tb2': 'bz2',
-        }
-        tar_mode = 'w:' + compressions[ext] if ext in compressions else 'w'
-        tar_files = args.create
-
-        with TarFile.open(tar_name, tar_mode) as tf:
-            for file_name in tar_files:
-                tf.add(file_name)
-
-        if args.verbose:
-            print('{!r} file created.'.format(tar_name))
-
-if __name__ == '__main__':
-    main()
diff --git a/pkg_resources/_vendor/backports/tarfile/__main__.py b/pkg_resources/_vendor/backports/tarfile/__main__.py
deleted file mode 100644
index daf5509086..0000000000
--- a/pkg_resources/_vendor/backports/tarfile/__main__.py
+++ /dev/null
@@ -1,5 +0,0 @@
-from . import main
-
-
-if __name__ == '__main__':
-    main()
diff --git a/pkg_resources/_vendor/backports/tarfile/compat/__init__.py b/pkg_resources/_vendor/backports/tarfile/compat/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/pkg_resources/_vendor/backports/tarfile/compat/py38.py b/pkg_resources/_vendor/backports/tarfile/compat/py38.py
deleted file mode 100644
index 20fbbfc1c0..0000000000
--- a/pkg_resources/_vendor/backports/tarfile/compat/py38.py
+++ /dev/null
@@ -1,24 +0,0 @@
-import sys
-
-
-if sys.version_info < (3, 9):
-
-    def removesuffix(self, suffix):
-        # suffix='' should not call self[:-0].
-        if suffix and self.endswith(suffix):
-            return self[: -len(suffix)]
-        else:
-            return self[:]
-
-    def removeprefix(self, prefix):
-        if self.startswith(prefix):
-            return self[len(prefix) :]
-        else:
-            return self[:]
-else:
-
-    def removesuffix(self, suffix):
-        return self.removesuffix(suffix)
-
-    def removeprefix(self, prefix):
-        return self.removeprefix(prefix)
diff --git a/pkg_resources/_vendor/importlib_resources-6.4.0.dist-info/INSTALLER b/pkg_resources/_vendor/importlib_resources-6.4.0.dist-info/INSTALLER
deleted file mode 100644
index a1b589e38a..0000000000
--- a/pkg_resources/_vendor/importlib_resources-6.4.0.dist-info/INSTALLER
+++ /dev/null
@@ -1 +0,0 @@
-pip
diff --git a/pkg_resources/_vendor/importlib_resources-6.4.0.dist-info/LICENSE b/pkg_resources/_vendor/importlib_resources-6.4.0.dist-info/LICENSE
deleted file mode 100644
index d645695673..0000000000
--- a/pkg_resources/_vendor/importlib_resources-6.4.0.dist-info/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
-   APPENDIX: How to apply the Apache License to your work.
-
-      To apply the Apache License to your work, attach the following
-      boilerplate notice, with the fields enclosed by brackets "[]"
-      replaced with your own identifying information. (Don't include
-      the brackets!)  The text should be enclosed in the appropriate
-      comment syntax for the file format. We also recommend that a
-      file or class name and description of purpose be included on the
-      same "printed page" as the copyright notice for easier
-      identification within third-party archives.
-
-   Copyright [yyyy] [name of copyright owner]
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-   You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
diff --git a/pkg_resources/_vendor/importlib_resources-6.4.0.dist-info/METADATA b/pkg_resources/_vendor/importlib_resources-6.4.0.dist-info/METADATA
deleted file mode 100644
index b088e721d2..0000000000
--- a/pkg_resources/_vendor/importlib_resources-6.4.0.dist-info/METADATA
+++ /dev/null
@@ -1,100 +0,0 @@
-Metadata-Version: 2.1
-Name: importlib_resources
-Version: 6.4.0
-Summary: Read resources from Python packages
-Home-page: https://github.com/python/importlib_resources
-Author: Barry Warsaw
-Author-email: barry@python.org
-Project-URL: Documentation, https://importlib-resources.readthedocs.io/
-Classifier: Development Status :: 5 - Production/Stable
-Classifier: Intended Audience :: Developers
-Classifier: License :: OSI Approved :: Apache Software License
-Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3 :: Only
-Requires-Python: >=3.8
-License-File: LICENSE
-Requires-Dist: zipp >=3.1.0 ; python_version < "3.10"
-Provides-Extra: docs
-Requires-Dist: sphinx >=3.5 ; extra == 'docs'
-Requires-Dist: sphinx <7.2.5 ; extra == 'docs'
-Requires-Dist: jaraco.packaging >=9.3 ; extra == 'docs'
-Requires-Dist: rst.linker >=1.9 ; extra == 'docs'
-Requires-Dist: furo ; extra == 'docs'
-Requires-Dist: sphinx-lint ; extra == 'docs'
-Requires-Dist: jaraco.tidelift >=1.4 ; extra == 'docs'
-Provides-Extra: testing
-Requires-Dist: pytest >=6 ; extra == 'testing'
-Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'testing'
-Requires-Dist: pytest-cov ; extra == 'testing'
-Requires-Dist: pytest-enabler >=2.2 ; extra == 'testing'
-Requires-Dist: pytest-ruff >=0.2.1 ; extra == 'testing'
-Requires-Dist: zipp >=3.17 ; extra == 'testing'
-Requires-Dist: jaraco.test >=5.4 ; extra == 'testing'
-Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing'
-
-.. image:: https://img.shields.io/pypi/v/importlib_resources.svg
-   :target: https://pypi.org/project/importlib_resources
-
-.. image:: https://img.shields.io/pypi/pyversions/importlib_resources.svg
-
-.. image:: https://github.com/python/importlib_resources/actions/workflows/main.yml/badge.svg
-   :target: https://github.com/python/importlib_resources/actions?query=workflow%3A%22tests%22
-   :alt: tests
-
-.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
-    :target: https://github.com/astral-sh/ruff
-    :alt: Ruff
-
-.. image:: https://readthedocs.org/projects/importlib-resources/badge/?version=latest
-   :target: https://importlib-resources.readthedocs.io/en/latest/?badge=latest
-
-.. image:: https://img.shields.io/badge/skeleton-2024-informational
-   :target: https://blog.jaraco.com/skeleton
-
-.. image:: https://tidelift.com/badges/package/pypi/importlib-resources
-   :target: https://tidelift.com/subscription/pkg/pypi-importlib-resources?utm_source=pypi-importlib-resources&utm_medium=readme
-
-``importlib_resources`` is a backport of Python standard library
-`importlib.resources
-`_
-module for older Pythons.
-
-The key goal of this module is to replace parts of `pkg_resources
-`_ with a
-solution in Python's stdlib that relies on well-defined APIs.  This makes
-reading resources included in packages easier, with more stable and consistent
-semantics.
-
-Compatibility
-=============
-
-New features are introduced in this third-party library and later merged
-into CPython. The following table indicates which versions of this library
-were contributed to different versions in the standard library:
-
-.. list-table::
-   :header-rows: 1
-
-   * - importlib_resources
-     - stdlib
-   * - 6.0
-     - 3.13
-   * - 5.12
-     - 3.12
-   * - 5.7
-     - 3.11
-   * - 5.0
-     - 3.10
-   * - 1.3
-     - 3.9
-   * - 0.5 (?)
-     - 3.7
-
-For Enterprise
-==============
-
-Available as part of the Tidelift Subscription.
-
-This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use.
-
-`Learn more `_.
diff --git a/pkg_resources/_vendor/importlib_resources-6.4.0.dist-info/RECORD b/pkg_resources/_vendor/importlib_resources-6.4.0.dist-info/RECORD
deleted file mode 100644
index 18888dea71..0000000000
--- a/pkg_resources/_vendor/importlib_resources-6.4.0.dist-info/RECORD
+++ /dev/null
@@ -1,89 +0,0 @@
-importlib_resources-6.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-importlib_resources-6.4.0.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
-importlib_resources-6.4.0.dist-info/METADATA,sha256=g4eM2LuL0OiZcUVND0qwDJUpE29gOvtO3BSPXTbO9Fk,3944
-importlib_resources-6.4.0.dist-info/RECORD,,
-importlib_resources-6.4.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources-6.4.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
-importlib_resources-6.4.0.dist-info/top_level.txt,sha256=fHIjHU1GZwAjvcydpmUnUrTnbvdiWjG4OEVZK8by0TQ,20
-importlib_resources/__init__.py,sha256=uyp1kzYR6SawQBsqlyaXXfIxJx4Z2mM8MjmZn8qq2Gk,505
-importlib_resources/__pycache__/__init__.cpython-312.pyc,,
-importlib_resources/__pycache__/_adapters.cpython-312.pyc,,
-importlib_resources/__pycache__/_common.cpython-312.pyc,,
-importlib_resources/__pycache__/_itertools.cpython-312.pyc,,
-importlib_resources/__pycache__/abc.cpython-312.pyc,,
-importlib_resources/__pycache__/functional.cpython-312.pyc,,
-importlib_resources/__pycache__/readers.cpython-312.pyc,,
-importlib_resources/__pycache__/simple.cpython-312.pyc,,
-importlib_resources/_adapters.py,sha256=vprJGbUeHbajX6XCuMP6J3lMrqCi-P_MTlziJUR7jfk,4482
-importlib_resources/_common.py,sha256=blt4-ZtHnbUPzQQyPP7jLGgl_86btIW5ZhIsEhclhoA,5571
-importlib_resources/_itertools.py,sha256=eDisV6RqiNZOogLSXf6LOGHOYc79FGgPrKNLzFLmCrU,1277
-importlib_resources/abc.py,sha256=UKNU9ncEDkZRB3txcGb3WLxsL2iju9JbaLTI-dfLE_4,5162
-importlib_resources/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/compat/__pycache__/__init__.cpython-312.pyc,,
-importlib_resources/compat/__pycache__/py38.cpython-312.pyc,,
-importlib_resources/compat/__pycache__/py39.cpython-312.pyc,,
-importlib_resources/compat/py38.py,sha256=MWhut3XsAJwBYUaa5Qb2AoCrZNqcQjVThP-P1uBoE_4,230
-importlib_resources/compat/py39.py,sha256=Wfln4uQUShNz1XdCG-toG6_Y0WrlUmO9JzpvtcfQ-Cw,184
-importlib_resources/functional.py,sha256=mLU4DwSlh8_2IXWqwKOfPVxyRqAEpB3B4XTfRxr3X3M,2651
-importlib_resources/future/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/future/__pycache__/__init__.cpython-312.pyc,,
-importlib_resources/future/__pycache__/adapters.cpython-312.pyc,,
-importlib_resources/future/adapters.py,sha256=1-MF2VRcCButhcC1OMfZILU9o3kwZ4nXB2lurXpaIAw,2940
-importlib_resources/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/readers.py,sha256=WNKurBHHVu9EVtUhWkOj2fxH50HP7uanNFuupAqH2S8,5863
-importlib_resources/simple.py,sha256=CQ3TiIMFiJs_80o-7xJL1EpbUUVna4-NGDrSTQ3HW2Y,2584
-importlib_resources/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/__pycache__/__init__.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/_path.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/test_compatibilty_files.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/test_contents.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/test_custom.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/test_files.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/test_functional.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/test_open.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/test_path.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/test_read.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/test_reader.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/test_resource.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/util.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/zip.cpython-312.pyc,,
-importlib_resources/tests/_path.py,sha256=nkv3ek7D1U898v921rYbldDCtKri2oyYOi3EJqGjEGU,1289
-importlib_resources/tests/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/compat/__pycache__/__init__.cpython-312.pyc,,
-importlib_resources/tests/compat/__pycache__/py312.cpython-312.pyc,,
-importlib_resources/tests/compat/__pycache__/py39.cpython-312.pyc,,
-importlib_resources/tests/compat/py312.py,sha256=qcWjpZhQo2oEsdwIlRRQHrsMGDltkFTnETeG7fLdUS8,364
-importlib_resources/tests/compat/py39.py,sha256=lRTk0RWAOEb9RzAgvdRnqJUGCBLc3qoFQwzuJSa_zP4,329
-importlib_resources/tests/data01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/data01/__pycache__/__init__.cpython-312.pyc,,
-importlib_resources/tests/data01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4
-importlib_resources/tests/data01/subdirectory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/data01/subdirectory/__pycache__/__init__.cpython-312.pyc,,
-importlib_resources/tests/data01/subdirectory/binary.file,sha256=xtRM9Bj2EOP-nh2SlP9D3vgcbNytbLsYIM_0jTqkNV0,4
-importlib_resources/tests/data01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44
-importlib_resources/tests/data01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20
-importlib_resources/tests/data02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/data02/__pycache__/__init__.cpython-312.pyc,,
-importlib_resources/tests/data02/one/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/data02/one/__pycache__/__init__.cpython-312.pyc,,
-importlib_resources/tests/data02/one/resource1.txt,sha256=10flKac7c-XXFzJ3t-AB5MJjlBy__dSZvPE_dOm2q6U,13
-importlib_resources/tests/data02/subdirectory/subsubdir/resource.txt,sha256=jnrBBztxYrtQck7cmVnc4xQVO4-agzAZDGSFkAWtlFw,10
-importlib_resources/tests/data02/two/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/data02/two/__pycache__/__init__.cpython-312.pyc,,
-importlib_resources/tests/data02/two/resource2.txt,sha256=lt2jbN3TMn9QiFKM832X39bU_62UptDdUkoYzkvEbl0,13
-importlib_resources/tests/namespacedata01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4
-importlib_resources/tests/namespacedata01/subdirectory/binary.file,sha256=cbkhEL8TXIVYHIoSj2oZwPasp1KwxskeNXGJnPCbFF0,4
-importlib_resources/tests/namespacedata01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44
-importlib_resources/tests/namespacedata01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20
-importlib_resources/tests/test_compatibilty_files.py,sha256=95N_R7aik8cvnE6sBJpsxmP0K5plOWRIJDgbalD-Hpw,3314
-importlib_resources/tests/test_contents.py,sha256=70HW3mL_hv05Emv-OgdmwoLhXxjtuVxiWVaUpgRaRWA,930
-importlib_resources/tests/test_custom.py,sha256=QrHZqIWl0e-fsQRfm0ych8stOlKJOsAIU3rK6QOcyN0,1221
-importlib_resources/tests/test_files.py,sha256=OcShYu33kCcyXlDyZSVPkJNE08h-N_4bQOLV2QaSqX0,3472
-importlib_resources/tests/test_functional.py,sha256=ByCVViAwb2PIlKvDNJEqTZ0aLZGpFl5qa7CMCX-7HKM,8591
-importlib_resources/tests/test_open.py,sha256=ccmzbOeEa6zTd4ymZZ8yISrecfuYV0jhon-Vddqysu4,2778
-importlib_resources/tests/test_path.py,sha256=x8r2gJxG3hFM9xCOFNkgmHYXxsMldMLTSW_AZYf1l-A,2009
-importlib_resources/tests/test_read.py,sha256=7tsILQ2NoqVGFQxhHqxBwc5hWcN8b_3idojCsszTNfQ,3112
-importlib_resources/tests/test_reader.py,sha256=IcIUXaiPAtuahGV4_ZT4YXFLMMsJmcM1iOxqdIH2Aa4,5001
-importlib_resources/tests/test_resource.py,sha256=fcF8WgZ6rDCTRFnxtAUbdiaNe4G23yGovT1nb2dc7ls,7823
-importlib_resources/tests/util.py,sha256=vjVzEyX0X2RkTN-wGiQiplayp9sZom4JDjJinTNewos,4745
-importlib_resources/tests/zip.py,sha256=2MKmF8-osXBJSnqcUTuAUek_-tSB3iKmIT9qPhcsOsM,783
diff --git a/pkg_resources/_vendor/importlib_resources-6.4.0.dist-info/REQUESTED b/pkg_resources/_vendor/importlib_resources-6.4.0.dist-info/REQUESTED
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/pkg_resources/_vendor/importlib_resources-6.4.0.dist-info/WHEEL b/pkg_resources/_vendor/importlib_resources-6.4.0.dist-info/WHEEL
deleted file mode 100644
index bab98d6758..0000000000
--- a/pkg_resources/_vendor/importlib_resources-6.4.0.dist-info/WHEEL
+++ /dev/null
@@ -1,5 +0,0 @@
-Wheel-Version: 1.0
-Generator: bdist_wheel (0.43.0)
-Root-Is-Purelib: true
-Tag: py3-none-any
-
diff --git a/pkg_resources/_vendor/importlib_resources-6.4.0.dist-info/top_level.txt b/pkg_resources/_vendor/importlib_resources-6.4.0.dist-info/top_level.txt
deleted file mode 100644
index 58ad1bd333..0000000000
--- a/pkg_resources/_vendor/importlib_resources-6.4.0.dist-info/top_level.txt
+++ /dev/null
@@ -1 +0,0 @@
-importlib_resources
diff --git a/pkg_resources/_vendor/importlib_resources/__init__.py b/pkg_resources/_vendor/importlib_resources/__init__.py
deleted file mode 100644
index 0d029abd63..0000000000
--- a/pkg_resources/_vendor/importlib_resources/__init__.py
+++ /dev/null
@@ -1,36 +0,0 @@
-"""Read resources contained within a package."""
-
-from ._common import (
-    as_file,
-    files,
-    Package,
-    Anchor,
-)
-
-from .functional import (
-    contents,
-    is_resource,
-    open_binary,
-    open_text,
-    path,
-    read_binary,
-    read_text,
-)
-
-from .abc import ResourceReader
-
-
-__all__ = [
-    'Package',
-    'Anchor',
-    'ResourceReader',
-    'as_file',
-    'files',
-    'contents',
-    'is_resource',
-    'open_binary',
-    'open_text',
-    'path',
-    'read_binary',
-    'read_text',
-]
diff --git a/pkg_resources/_vendor/importlib_resources/_adapters.py b/pkg_resources/_vendor/importlib_resources/_adapters.py
deleted file mode 100644
index 50688fbb66..0000000000
--- a/pkg_resources/_vendor/importlib_resources/_adapters.py
+++ /dev/null
@@ -1,168 +0,0 @@
-from contextlib import suppress
-from io import TextIOWrapper
-
-from . import abc
-
-
-class SpecLoaderAdapter:
-    """
-    Adapt a package spec to adapt the underlying loader.
-    """
-
-    def __init__(self, spec, adapter=lambda spec: spec.loader):
-        self.spec = spec
-        self.loader = adapter(spec)
-
-    def __getattr__(self, name):
-        return getattr(self.spec, name)
-
-
-class TraversableResourcesLoader:
-    """
-    Adapt a loader to provide TraversableResources.
-    """
-
-    def __init__(self, spec):
-        self.spec = spec
-
-    def get_resource_reader(self, name):
-        return CompatibilityFiles(self.spec)._native()
-
-
-def _io_wrapper(file, mode='r', *args, **kwargs):
-    if mode == 'r':
-        return TextIOWrapper(file, *args, **kwargs)
-    elif mode == 'rb':
-        return file
-    raise ValueError(f"Invalid mode value '{mode}', only 'r' and 'rb' are supported")
-
-
-class CompatibilityFiles:
-    """
-    Adapter for an existing or non-existent resource reader
-    to provide a compatibility .files().
-    """
-
-    class SpecPath(abc.Traversable):
-        """
-        Path tied to a module spec.
-        Can be read and exposes the resource reader children.
-        """
-
-        def __init__(self, spec, reader):
-            self._spec = spec
-            self._reader = reader
-
-        def iterdir(self):
-            if not self._reader:
-                return iter(())
-            return iter(
-                CompatibilityFiles.ChildPath(self._reader, path)
-                for path in self._reader.contents()
-            )
-
-        def is_file(self):
-            return False
-
-        is_dir = is_file
-
-        def joinpath(self, other):
-            if not self._reader:
-                return CompatibilityFiles.OrphanPath(other)
-            return CompatibilityFiles.ChildPath(self._reader, other)
-
-        @property
-        def name(self):
-            return self._spec.name
-
-        def open(self, mode='r', *args, **kwargs):
-            return _io_wrapper(self._reader.open_resource(None), mode, *args, **kwargs)
-
-    class ChildPath(abc.Traversable):
-        """
-        Path tied to a resource reader child.
-        Can be read but doesn't expose any meaningful children.
-        """
-
-        def __init__(self, reader, name):
-            self._reader = reader
-            self._name = name
-
-        def iterdir(self):
-            return iter(())
-
-        def is_file(self):
-            return self._reader.is_resource(self.name)
-
-        def is_dir(self):
-            return not self.is_file()
-
-        def joinpath(self, other):
-            return CompatibilityFiles.OrphanPath(self.name, other)
-
-        @property
-        def name(self):
-            return self._name
-
-        def open(self, mode='r', *args, **kwargs):
-            return _io_wrapper(
-                self._reader.open_resource(self.name), mode, *args, **kwargs
-            )
-
-    class OrphanPath(abc.Traversable):
-        """
-        Orphan path, not tied to a module spec or resource reader.
-        Can't be read and doesn't expose any meaningful children.
-        """
-
-        def __init__(self, *path_parts):
-            if len(path_parts) < 1:
-                raise ValueError('Need at least one path part to construct a path')
-            self._path = path_parts
-
-        def iterdir(self):
-            return iter(())
-
-        def is_file(self):
-            return False
-
-        is_dir = is_file
-
-        def joinpath(self, other):
-            return CompatibilityFiles.OrphanPath(*self._path, other)
-
-        @property
-        def name(self):
-            return self._path[-1]
-
-        def open(self, mode='r', *args, **kwargs):
-            raise FileNotFoundError("Can't open orphan path")
-
-    def __init__(self, spec):
-        self.spec = spec
-
-    @property
-    def _reader(self):
-        with suppress(AttributeError):
-            return self.spec.loader.get_resource_reader(self.spec.name)
-
-    def _native(self):
-        """
-        Return the native reader if it supports files().
-        """
-        reader = self._reader
-        return reader if hasattr(reader, 'files') else self
-
-    def __getattr__(self, attr):
-        return getattr(self._reader, attr)
-
-    def files(self):
-        return CompatibilityFiles.SpecPath(self.spec, self._reader)
-
-
-def wrap_spec(package):
-    """
-    Construct a package spec with traversable compatibility
-    on the spec/loader/reader.
-    """
-    return SpecLoaderAdapter(package.__spec__, TraversableResourcesLoader)
diff --git a/pkg_resources/_vendor/importlib_resources/_common.py b/pkg_resources/_vendor/importlib_resources/_common.py
deleted file mode 100644
index 8df6b39e41..0000000000
--- a/pkg_resources/_vendor/importlib_resources/_common.py
+++ /dev/null
@@ -1,210 +0,0 @@
-import os
-import pathlib
-import tempfile
-import functools
-import contextlib
-import types
-import importlib
-import inspect
-import warnings
-import itertools
-
-from typing import Union, Optional, cast
-from .abc import ResourceReader, Traversable
-
-Package = Union[types.ModuleType, str]
-Anchor = Package
-
-
-def package_to_anchor(func):
-    """
-    Replace 'package' parameter as 'anchor' and warn about the change.
-
-    Other errors should fall through.
-
-    >>> files('a', 'b')
-    Traceback (most recent call last):
-    TypeError: files() takes from 0 to 1 positional arguments but 2 were given
-
-    Remove this compatibility in Python 3.14.
-    """
-    undefined = object()
-
-    @functools.wraps(func)
-    def wrapper(anchor=undefined, package=undefined):
-        if package is not undefined:
-            if anchor is not undefined:
-                return func(anchor, package)
-            warnings.warn(
-                "First parameter to files is renamed to 'anchor'",
-                DeprecationWarning,
-                stacklevel=2,
-            )
-            return func(package)
-        elif anchor is undefined:
-            return func()
-        return func(anchor)
-
-    return wrapper
-
-
-@package_to_anchor
-def files(anchor: Optional[Anchor] = None) -> Traversable:
-    """
-    Get a Traversable resource for an anchor.
-    """
-    return from_package(resolve(anchor))
-
-
-def get_resource_reader(package: types.ModuleType) -> Optional[ResourceReader]:
-    """
-    Return the package's loader if it's a ResourceReader.
-    """
-    # We can't use
-    # a issubclass() check here because apparently abc.'s __subclasscheck__()
-    # hook wants to create a weak reference to the object, but
-    # zipimport.zipimporter does not support weak references, resulting in a
-    # TypeError.  That seems terrible.
-    spec = package.__spec__
-    reader = getattr(spec.loader, 'get_resource_reader', None)  # type: ignore
-    if reader is None:
-        return None
-    return reader(spec.name)  # type: ignore
-
-
-@functools.singledispatch
-def resolve(cand: Optional[Anchor]) -> types.ModuleType:
-    return cast(types.ModuleType, cand)
-
-
-@resolve.register
-def _(cand: str) -> types.ModuleType:
-    return importlib.import_module(cand)
-
-
-@resolve.register
-def _(cand: None) -> types.ModuleType:
-    return resolve(_infer_caller().f_globals['__name__'])
-
-
-def _infer_caller():
-    """
-    Walk the stack and find the frame of the first caller not in this module.
-    """
-
-    def is_this_file(frame_info):
-        return frame_info.filename == __file__
-
-    def is_wrapper(frame_info):
-        return frame_info.function == 'wrapper'
-
-    not_this_file = itertools.filterfalse(is_this_file, inspect.stack())
-    # also exclude 'wrapper' due to singledispatch in the call stack
-    callers = itertools.filterfalse(is_wrapper, not_this_file)
-    return next(callers).frame
-
-
-def from_package(package: types.ModuleType):
-    """
-    Return a Traversable object for the given package.
-
-    """
-    # deferred for performance (python/cpython#109829)
-    from .future.adapters import wrap_spec
-
-    spec = wrap_spec(package)
-    reader = spec.loader.get_resource_reader(spec.name)
-    return reader.files()
-
-
-@contextlib.contextmanager
-def _tempfile(
-    reader,
-    suffix='',
-    # gh-93353: Keep a reference to call os.remove() in late Python
-    # finalization.
-    *,
-    _os_remove=os.remove,
-):
-    # Not using tempfile.NamedTemporaryFile as it leads to deeper 'try'
-    # blocks due to the need to close the temporary file to work on Windows
-    # properly.
-    fd, raw_path = tempfile.mkstemp(suffix=suffix)
-    try:
-        try:
-            os.write(fd, reader())
-        finally:
-            os.close(fd)
-        del reader
-        yield pathlib.Path(raw_path)
-    finally:
-        try:
-            _os_remove(raw_path)
-        except FileNotFoundError:
-            pass
-
-
-def _temp_file(path):
-    return _tempfile(path.read_bytes, suffix=path.name)
-
-
-def _is_present_dir(path: Traversable) -> bool:
-    """
-    Some Traversables implement ``is_dir()`` to raise an
-    exception (i.e. ``FileNotFoundError``) when the
-    directory doesn't exist. This function wraps that call
-    to always return a boolean and only return True
-    if there's a dir and it exists.
-    """
-    with contextlib.suppress(FileNotFoundError):
-        return path.is_dir()
-    return False
-
-
-@functools.singledispatch
-def as_file(path):
-    """
-    Given a Traversable object, return that object as a
-    path on the local file system in a context manager.
-    """
-    return _temp_dir(path) if _is_present_dir(path) else _temp_file(path)
-
-
-@as_file.register(pathlib.Path)
-@contextlib.contextmanager
-def _(path):
-    """
-    Degenerate behavior for pathlib.Path objects.
-    """
-    yield path
-
-
-@contextlib.contextmanager
-def _temp_path(dir: tempfile.TemporaryDirectory):
-    """
-    Wrap tempfile.TemporyDirectory to return a pathlib object.
-    """
-    with dir as result:
-        yield pathlib.Path(result)
-
-
-@contextlib.contextmanager
-def _temp_dir(path):
-    """
-    Given a traversable dir, recursively replicate the whole tree
-    to the file system in a context manager.
-    """
-    assert path.is_dir()
-    with _temp_path(tempfile.TemporaryDirectory()) as temp_dir:
-        yield _write_contents(temp_dir, path)
-
-
-def _write_contents(target, source):
-    child = target.joinpath(source.name)
-    if source.is_dir():
-        child.mkdir()
-        for item in source.iterdir():
-            _write_contents(child, item)
-    else:
-        child.write_bytes(source.read_bytes())
-    return child
diff --git a/pkg_resources/_vendor/importlib_resources/_itertools.py b/pkg_resources/_vendor/importlib_resources/_itertools.py
deleted file mode 100644
index 7b775ef5ae..0000000000
--- a/pkg_resources/_vendor/importlib_resources/_itertools.py
+++ /dev/null
@@ -1,38 +0,0 @@
-# from more_itertools 9.0
-def only(iterable, default=None, too_long=None):
-    """If *iterable* has only one item, return it.
-    If it has zero items, return *default*.
-    If it has more than one item, raise the exception given by *too_long*,
-    which is ``ValueError`` by default.
-    >>> only([], default='missing')
-    'missing'
-    >>> only([1])
-    1
-    >>> only([1, 2])  # doctest: +IGNORE_EXCEPTION_DETAIL
-    Traceback (most recent call last):
-    ...
-    ValueError: Expected exactly one item in iterable, but got 1, 2,
-     and perhaps more.'
-    >>> only([1, 2], too_long=TypeError)  # doctest: +IGNORE_EXCEPTION_DETAIL
-    Traceback (most recent call last):
-    ...
-    TypeError
-    Note that :func:`only` attempts to advance *iterable* twice to ensure there
-    is only one item.  See :func:`spy` or :func:`peekable` to check
-    iterable contents less destructively.
-    """
-    it = iter(iterable)
-    first_value = next(it, default)
-
-    try:
-        second_value = next(it)
-    except StopIteration:
-        pass
-    else:
-        msg = (
-            'Expected exactly one item in iterable, but got {!r}, {!r}, '
-            'and perhaps more.'.format(first_value, second_value)
-        )
-        raise too_long or ValueError(msg)
-
-    return first_value
diff --git a/pkg_resources/_vendor/importlib_resources/abc.py b/pkg_resources/_vendor/importlib_resources/abc.py
deleted file mode 100644
index 7a58dd2f96..0000000000
--- a/pkg_resources/_vendor/importlib_resources/abc.py
+++ /dev/null
@@ -1,171 +0,0 @@
-import abc
-import io
-import itertools
-import pathlib
-from typing import Any, BinaryIO, Iterable, Iterator, NoReturn, Text, Optional
-from typing import runtime_checkable, Protocol
-
-from .compat.py38 import StrPath
-
-
-__all__ = ["ResourceReader", "Traversable", "TraversableResources"]
-
-
-class ResourceReader(metaclass=abc.ABCMeta):
-    """Abstract base class for loaders to provide resource reading support."""
-
-    @abc.abstractmethod
-    def open_resource(self, resource: Text) -> BinaryIO:
-        """Return an opened, file-like object for binary reading.
-
-        The 'resource' argument is expected to represent only a file name.
-        If the resource cannot be found, FileNotFoundError is raised.
-        """
-        # This deliberately raises FileNotFoundError instead of
-        # NotImplementedError so that if this method is accidentally called,
-        # it'll still do the right thing.
-        raise FileNotFoundError
-
-    @abc.abstractmethod
-    def resource_path(self, resource: Text) -> Text:
-        """Return the file system path to the specified resource.
-
-        The 'resource' argument is expected to represent only a file name.
-        If the resource does not exist on the file system, raise
-        FileNotFoundError.
-        """
-        # This deliberately raises FileNotFoundError instead of
-        # NotImplementedError so that if this method is accidentally called,
-        # it'll still do the right thing.
-        raise FileNotFoundError
-
-    @abc.abstractmethod
-    def is_resource(self, path: Text) -> bool:
-        """Return True if the named 'path' is a resource.
-
-        Files are resources, directories are not.
-        """
-        raise FileNotFoundError
-
-    @abc.abstractmethod
-    def contents(self) -> Iterable[str]:
-        """Return an iterable of entries in `package`."""
-        raise FileNotFoundError
-
-
-class TraversalError(Exception):
-    pass
-
-
-@runtime_checkable
-class Traversable(Protocol):
-    """
-    An object with a subset of pathlib.Path methods suitable for
-    traversing directories and opening files.
-
-    Any exceptions that occur when accessing the backing resource
-    may propagate unaltered.
-    """
-
-    @abc.abstractmethod
-    def iterdir(self) -> Iterator["Traversable"]:
-        """
-        Yield Traversable objects in self
-        """
-
-    def read_bytes(self) -> bytes:
-        """
-        Read contents of self as bytes
-        """
-        with self.open('rb') as strm:
-            return strm.read()
-
-    def read_text(self, encoding: Optional[str] = None) -> str:
-        """
-        Read contents of self as text
-        """
-        with self.open(encoding=encoding) as strm:
-            return strm.read()
-
-    @abc.abstractmethod
-    def is_dir(self) -> bool:
-        """
-        Return True if self is a directory
-        """
-
-    @abc.abstractmethod
-    def is_file(self) -> bool:
-        """
-        Return True if self is a file
-        """
-
-    def joinpath(self, *descendants: StrPath) -> "Traversable":
-        """
-        Return Traversable resolved with any descendants applied.
-
-        Each descendant should be a path segment relative to self
-        and each may contain multiple levels separated by
-        ``posixpath.sep`` (``/``).
-        """
-        if not descendants:
-            return self
-        names = itertools.chain.from_iterable(
-            path.parts for path in map(pathlib.PurePosixPath, descendants)
-        )
-        target = next(names)
-        matches = (
-            traversable for traversable in self.iterdir() if traversable.name == target
-        )
-        try:
-            match = next(matches)
-        except StopIteration:
-            raise TraversalError(
-                "Target not found during traversal.", target, list(names)
-            )
-        return match.joinpath(*names)
-
-    def __truediv__(self, child: StrPath) -> "Traversable":
-        """
-        Return Traversable child in self
-        """
-        return self.joinpath(child)
-
-    @abc.abstractmethod
-    def open(self, mode='r', *args, **kwargs):
-        """
-        mode may be 'r' or 'rb' to open as text or binary. Return a handle
-        suitable for reading (same as pathlib.Path.open).
-
-        When opening as text, accepts encoding parameters such as those
-        accepted by io.TextIOWrapper.
-        """
-
-    @property
-    @abc.abstractmethod
-    def name(self) -> str:
-        """
-        The base name of this object without any parent references.
-        """
-
-
-class TraversableResources(ResourceReader):
-    """
-    The required interface for providing traversable
-    resources.
-    """
-
-    @abc.abstractmethod
-    def files(self) -> "Traversable":
-        """Return a Traversable object for the loaded package."""
-
-    def open_resource(self, resource: StrPath) -> io.BufferedReader:
-        return self.files().joinpath(resource).open('rb')
-
-    def resource_path(self, resource: Any) -> NoReturn:
-        raise FileNotFoundError(resource)
-
-    def is_resource(self, path: StrPath) -> bool:
-        return self.files().joinpath(path).is_file()
-
-    def contents(self) -> Iterator[str]:
-        return (item.name for item in self.files().iterdir())
diff --git a/pkg_resources/_vendor/importlib_resources/compat/__init__.py b/pkg_resources/_vendor/importlib_resources/compat/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/pkg_resources/_vendor/importlib_resources/compat/py38.py b/pkg_resources/_vendor/importlib_resources/compat/py38.py
deleted file mode 100644
index 4d548257f8..0000000000
--- a/pkg_resources/_vendor/importlib_resources/compat/py38.py
+++ /dev/null
@@ -1,11 +0,0 @@
-import os
-import sys
-
-from typing import Union
-
-
-if sys.version_info >= (3, 9):
-    StrPath = Union[str, os.PathLike[str]]
-else:
-    # PathLike is only subscriptable at runtime in 3.9+
-    StrPath = Union[str, "os.PathLike[str]"]
diff --git a/pkg_resources/_vendor/importlib_resources/compat/py39.py b/pkg_resources/_vendor/importlib_resources/compat/py39.py
deleted file mode 100644
index ab87b9dc14..0000000000
--- a/pkg_resources/_vendor/importlib_resources/compat/py39.py
+++ /dev/null
@@ -1,10 +0,0 @@
-import sys
-
-
-__all__ = ['ZipPath']
-
-
-if sys.version_info >= (3, 10):
-    from zipfile import Path as ZipPath  # type: ignore
-else:
-    from zipp import Path as ZipPath  # type: ignore
diff --git a/pkg_resources/_vendor/importlib_resources/functional.py b/pkg_resources/_vendor/importlib_resources/functional.py
deleted file mode 100644
index f59416f2dd..0000000000
--- a/pkg_resources/_vendor/importlib_resources/functional.py
+++ /dev/null
@@ -1,81 +0,0 @@
-"""Simplified function-based API for importlib.resources"""
-
-import warnings
-
-from ._common import files, as_file
-
-
-_MISSING = object()
-
-
-def open_binary(anchor, *path_names):
-    """Open for binary reading the *resource* within *package*."""
-    return _get_resource(anchor, path_names).open('rb')
-
-
-def open_text(anchor, *path_names, encoding=_MISSING, errors='strict'):
-    """Open for text reading the *resource* within *package*."""
-    encoding = _get_encoding_arg(path_names, encoding)
-    resource = _get_resource(anchor, path_names)
-    return resource.open('r', encoding=encoding, errors=errors)
-
-
-def read_binary(anchor, *path_names):
-    """Read and return contents of *resource* within *package* as bytes."""
-    return _get_resource(anchor, path_names).read_bytes()
-
-
-def read_text(anchor, *path_names, encoding=_MISSING, errors='strict'):
-    """Read and return contents of *resource* within *package* as str."""
-    encoding = _get_encoding_arg(path_names, encoding)
-    resource = _get_resource(anchor, path_names)
-    return resource.read_text(encoding=encoding, errors=errors)
-
-
-def path(anchor, *path_names):
-    """Return the path to the *resource* as an actual file system path."""
-    return as_file(_get_resource(anchor, path_names))
-
-
-def is_resource(anchor, *path_names):
-    """Return ``True`` if there is a resource named *name* in the package,
-
-    Otherwise returns ``False``.
-    """
-    return _get_resource(anchor, path_names).is_file()
-
-
-def contents(anchor, *path_names):
-    """Return an iterable over the named resources within the package.
-
-    The iterable returns :class:`str` resources (e.g. files).
-    The iterable does not recurse into subdirectories.
-    """
-    warnings.warn(
-        "importlib.resources.contents is deprecated. "
-        "Use files(anchor).iterdir() instead.",
-        DeprecationWarning,
-        stacklevel=1,
-    )
-    return (resource.name for resource in _get_resource(anchor, path_names).iterdir())
-
-
-def _get_encoding_arg(path_names, encoding):
-    # For compatibility with versions where *encoding* was a positional
-    # argument, it needs to be given explicitly when there are multiple
-    # *path_names*.
-    # This limitation can be removed in Python 3.15.
-    if encoding is _MISSING:
-        if len(path_names) > 1:
-            raise TypeError(
-                "'encoding' argument required with multiple path names",
-            )
-        else:
-            return 'utf-8'
-    return encoding
-
-
-def _get_resource(anchor, path_names):
-    if anchor is None:
-        raise TypeError("anchor must be module or string, got None")
-    return files(anchor).joinpath(*path_names)
diff --git a/pkg_resources/_vendor/importlib_resources/future/__init__.py b/pkg_resources/_vendor/importlib_resources/future/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/pkg_resources/_vendor/importlib_resources/future/adapters.py b/pkg_resources/_vendor/importlib_resources/future/adapters.py
deleted file mode 100644
index 0e9764bae8..0000000000
--- a/pkg_resources/_vendor/importlib_resources/future/adapters.py
+++ /dev/null
@@ -1,95 +0,0 @@
-import functools
-import pathlib
-from contextlib import suppress
-from types import SimpleNamespace
-
-from .. import readers, _adapters
-
-
-def _block_standard(reader_getter):
-    """
-    Wrap _adapters.TraversableResourcesLoader.get_resource_reader
-    and intercept any standard library readers.
-    """
-
-    @functools.wraps(reader_getter)
-    def wrapper(*args, **kwargs):
-        """
-        If the reader is from the standard library, return None to allow
-        allow likely newer implementations in this library to take precedence.
-        """
-        try:
-            reader = reader_getter(*args, **kwargs)
-        except NotADirectoryError:
-            # MultiplexedPath may fail on zip subdirectory
-            return
-        # Python 3.10+
-        mod_name = reader.__class__.__module__
-        if mod_name.startswith('importlib.') and mod_name.endswith('readers'):
-            return
-        # Python 3.8, 3.9
-        if isinstance(reader, _adapters.CompatibilityFiles) and (
-            reader.spec.loader.__class__.__module__.startswith('zipimport')
-            or reader.spec.loader.__class__.__module__.startswith(
-                '_frozen_importlib_external'
-            )
-        ):
-            return
-        return reader
-
-    return wrapper
-
-
-def _skip_degenerate(reader):
-    """
-    Mask any degenerate reader. Ref #298.
-    """
-    is_degenerate = (
-        isinstance(reader, _adapters.CompatibilityFiles) and not reader._reader
-    )
-    return reader if not is_degenerate else None
-
-
-class TraversableResourcesLoader(_adapters.TraversableResourcesLoader):
-    """
-    Adapt loaders to provide TraversableResources and other
-    compatibility.
-
-    Ensures the readers from importlib_resources are preferred
-    over stdlib readers.
-    """
-
-    def get_resource_reader(self, name):
-        return (
-            _skip_degenerate(_block_standard(super().get_resource_reader)(name))
-            or self._standard_reader()
-            or super().get_resource_reader(name)
-        )
-
-    def _standard_reader(self):
-        return self._zip_reader() or self._namespace_reader() or self._file_reader()
-
-    def _zip_reader(self):
-        with suppress(AttributeError):
-            return readers.ZipReader(self.spec.loader, self.spec.name)
-
-    def _namespace_reader(self):
-        with suppress(AttributeError, ValueError):
-            return readers.NamespaceReader(self.spec.submodule_search_locations)
-
-    def _file_reader(self):
-        try:
-            path = pathlib.Path(self.spec.origin)
-        except TypeError:
-            return None
-        if path.exists():
-            return readers.FileReader(SimpleNamespace(path=path))
-
-
-def wrap_spec(package):
-    """
-    Override _adapters.wrap_spec to use TraversableResourcesLoader
-    from above. Ensures that future behavior is always available on older
-    Pythons.
-    """
-    return _adapters.SpecLoaderAdapter(package.__spec__, TraversableResourcesLoader)
diff --git a/pkg_resources/_vendor/importlib_resources/py.typed b/pkg_resources/_vendor/importlib_resources/py.typed
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/pkg_resources/_vendor/importlib_resources/readers.py b/pkg_resources/_vendor/importlib_resources/readers.py
deleted file mode 100644
index 4a80a774aa..0000000000
--- a/pkg_resources/_vendor/importlib_resources/readers.py
+++ /dev/null
@@ -1,194 +0,0 @@
-import collections
-import contextlib
-import itertools
-import pathlib
-import operator
-import re
-import warnings
-
-from . import abc
-
-from ._itertools import only
-from .compat.py39 import ZipPath
-
-
-def remove_duplicates(items):
-    return iter(collections.OrderedDict.fromkeys(items))
-
-
-class FileReader(abc.TraversableResources):
-    def __init__(self, loader):
-        self.path = pathlib.Path(loader.path).parent
-
-    def resource_path(self, resource):
-        """
-        Return the file system path to prevent
-        `resources.path()` from creating a temporary
-        copy.
-        """
-        return str(self.path.joinpath(resource))
-
-    def files(self):
-        return self.path
-
-
-class ZipReader(abc.TraversableResources):
-    def __init__(self, loader, module):
-        _, _, name = module.rpartition('.')
-        self.prefix = loader.prefix.replace('\\', '/') + name + '/'
-        self.archive = loader.archive
-
-    def open_resource(self, resource):
-        try:
-            return super().open_resource(resource)
-        except KeyError as exc:
-            raise FileNotFoundError(exc.args[0])
-
-    def is_resource(self, path):
-        """
-        Workaround for `zipfile.Path.is_file` returning true
-        for non-existent paths.
-        """
-        target = self.files().joinpath(path)
-        return target.is_file() and target.exists()
-
-    def files(self):
-        return ZipPath(self.archive, self.prefix)
-
-
-class MultiplexedPath(abc.Traversable):
-    """
-    Given a series of Traversable objects, implement a merged
-    version of the interface across all objects. Useful for
-    namespace packages which may be multihomed at a single
-    name.
-    """
-
-    def __init__(self, *paths):
-        self._paths = list(map(_ensure_traversable, remove_duplicates(paths)))
-        if not self._paths:
-            message = 'MultiplexedPath must contain at least one path'
-            raise FileNotFoundError(message)
-        if not all(path.is_dir() for path in self._paths):
-            raise NotADirectoryError('MultiplexedPath only supports directories')
-
-    def iterdir(self):
-        children = (child for path in self._paths for child in path.iterdir())
-        by_name = operator.attrgetter('name')
-        groups = itertools.groupby(sorted(children, key=by_name), key=by_name)
-        return map(self._follow, (locs for name, locs in groups))
-
-    def read_bytes(self):
-        raise FileNotFoundError(f'{self} is not a file')
-
-    def read_text(self, *args, **kwargs):
-        raise FileNotFoundError(f'{self} is not a file')
-
-    def is_dir(self):
-        return True
-
-    def is_file(self):
-        return False
-
-    def joinpath(self, *descendants):
-        try:
-            return super().joinpath(*descendants)
-        except abc.TraversalError:
-            # One of the paths did not resolve (a directory does not exist).
-            # Just return something that will not exist.
-            return self._paths[0].joinpath(*descendants)
-
-    @classmethod
-    def _follow(cls, children):
-        """
-        Construct a MultiplexedPath if needed.
-
-        If children contains a sole element, return it.
-        Otherwise, return a MultiplexedPath of the items.
-        Unless one of the items is not a Directory, then return the first.
-        """
-        subdirs, one_dir, one_file = itertools.tee(children, 3)
-
-        try:
-            return only(one_dir)
-        except ValueError:
-            try:
-                return cls(*subdirs)
-            except NotADirectoryError:
-                return next(one_file)
-
-    def open(self, *args, **kwargs):
-        raise FileNotFoundError(f'{self} is not a file')
-
-    @property
-    def name(self):
-        return self._paths[0].name
-
-    def __repr__(self):
-        paths = ', '.join(f"'{path}'" for path in self._paths)
-        return f'MultiplexedPath({paths})'
-
-
-class NamespaceReader(abc.TraversableResources):
-    def __init__(self, namespace_path):
-        if 'NamespacePath' not in str(namespace_path):
-            raise ValueError('Invalid path')
-        self.path = MultiplexedPath(*map(self._resolve, namespace_path))
-
-    @classmethod
-    def _resolve(cls, path_str) -> abc.Traversable:
-        r"""
-        Given an item from a namespace path, resolve it to a Traversable.
-
-        path_str might be a directory on the filesystem or a path to a
-        zipfile plus the path within the zipfile, e.g. ``/foo/bar`` or
-        ``/foo/baz.zip/inner_dir`` or ``foo\baz.zip\inner_dir\sub``.
-        """
-        (dir,) = (cand for cand in cls._candidate_paths(path_str) if cand.is_dir())
-        return dir
-
-    @classmethod
-    def _candidate_paths(cls, path_str):
-        yield pathlib.Path(path_str)
-        yield from cls._resolve_zip_path(path_str)
-
-    @staticmethod
-    def _resolve_zip_path(path_str):
-        for match in reversed(list(re.finditer(r'[\\/]', path_str))):
-            with contextlib.suppress(
-                FileNotFoundError,
-                IsADirectoryError,
-                NotADirectoryError,
-                PermissionError,
-            ):
-                inner = path_str[match.end() :].replace('\\', '/') + '/'
-                yield ZipPath(path_str[: match.start()], inner.lstrip('/'))
-
-    def resource_path(self, resource):
-        """
-        Return the file system path to prevent
-        `resources.path()` from creating a temporary
-        copy.
-        """
-        return str(self.path.joinpath(resource))
-
-    def files(self):
-        return self.path
-
-
-def _ensure_traversable(path):
-    """
-    Convert deprecated string arguments to traversables (pathlib.Path).
-
-    Remove with Python 3.15.
-    """
-    if not isinstance(path, str):
-        return path
-
-    warnings.warn(
-        "String arguments are deprecated. Pass a Traversable instead.",
-        DeprecationWarning,
-        stacklevel=3,
-    )
-
-    return pathlib.Path(path)
diff --git a/pkg_resources/_vendor/importlib_resources/simple.py b/pkg_resources/_vendor/importlib_resources/simple.py
deleted file mode 100644
index 96f117fec6..0000000000
--- a/pkg_resources/_vendor/importlib_resources/simple.py
+++ /dev/null
@@ -1,106 +0,0 @@
-"""
-Interface adapters for low-level readers.
-"""
-
-import abc
-import io
-import itertools
-from typing import BinaryIO, List
-
-from .abc import Traversable, TraversableResources
-
-
-class SimpleReader(abc.ABC):
-    """
-    The minimum, low-level interface required from a resource
-    provider.
-    """
-
-    @property
-    @abc.abstractmethod
-    def package(self) -> str:
-        """
-        The name of the package for which this reader loads resources.
-        """
-
-    @abc.abstractmethod
-    def children(self) -> List['SimpleReader']:
-        """
-        Obtain an iterable of SimpleReader for available
-        child containers (e.g. directories).
-        """
-
-    @abc.abstractmethod
-    def resources(self) -> List[str]:
-        """
-        Obtain available named resources for this virtual package.
-        """
-
-    @abc.abstractmethod
-    def open_binary(self, resource: str) -> BinaryIO:
-        """
-        Obtain a File-like for a named resource.
-        """
-
-    @property
-    def name(self):
-        return self.package.split('.')[-1]
-
-
-class ResourceContainer(Traversable):
-    """
-    Traversable container for a package's resources via its reader.
-    """
-
-    def __init__(self, reader: SimpleReader):
-        self.reader = reader
-
-    def is_dir(self):
-        return True
-
-    def is_file(self):
-        return False
-
-    def iterdir(self):
-        files = (ResourceHandle(self, name) for name in self.reader.resources)
-        dirs = map(ResourceContainer, self.reader.children())
-        return itertools.chain(files, dirs)
-
-    def open(self, *args, **kwargs):
-        raise IsADirectoryError()
-
-
-class ResourceHandle(Traversable):
-    """
-    Handle to a named resource in a ResourceReader.
-    """
-
-    def __init__(self, parent: ResourceContainer, name: str):
-        self.parent = parent
-        self.name = name  # type: ignore
-
-    def is_file(self):
-        return True
-
-    def is_dir(self):
-        return False
-
-    def open(self, mode='r', *args, **kwargs):
-        stream = self.parent.reader.open_binary(self.name)
-        if 'b' not in mode:
-            stream = io.TextIOWrapper(stream, *args, **kwargs)
-        return stream
-
-    def joinpath(self, name):
-        raise RuntimeError("Cannot traverse into a resource")
-
-
-class TraversableReader(TraversableResources, SimpleReader):
-    """
-    A TraversableResources based on SimpleReader. Resource providers
-    may derive from this class to provide the TraversableResources
-    interface by supplying the SimpleReader interface.
-    """
-
-    def files(self):
-        return ResourceContainer(self)
diff --git a/pkg_resources/_vendor/importlib_resources/tests/__init__.py b/pkg_resources/_vendor/importlib_resources/tests/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/pkg_resources/_vendor/importlib_resources/tests/_path.py b/pkg_resources/_vendor/importlib_resources/tests/_path.py
deleted file mode 100644
index 1f97c96146..0000000000
--- a/pkg_resources/_vendor/importlib_resources/tests/_path.py
+++ /dev/null
@@ -1,56 +0,0 @@
-import pathlib
-import functools
-
-from typing import Dict, Union
-
-
-####
-# from jaraco.path 3.4.1
-
-FilesSpec = Dict[str, Union[str, bytes, 'FilesSpec']]  # type: ignore
-
-
-def build(spec: FilesSpec, prefix=pathlib.Path()):
-    """
-    Build a set of files/directories, as described by the spec.
-
-    Each key represents a pathname, and the value represents
-    the content. Content may be a nested directory.
-
-    >>> spec = {
-    ...     'README.txt': "A README file",
-    ...     "foo": {
-    ...         "__init__.py": "",
-    ...         "bar": {
-    ...             "__init__.py": "",
-    ...         },
-    ...         "baz.py": "# Some code",
-    ...     }
-    ... }
-    >>> target = getfixture('tmp_path')
-    >>> build(spec, target)
-    >>> target.joinpath('foo/baz.py').read_text(encoding='utf-8')
-    '# Some code'
-    """
-    for name, contents in spec.items():
-        create(contents, pathlib.Path(prefix) / name)
-
-
-@functools.singledispatch
-def create(content: Union[str, bytes, FilesSpec], path):
-    path.mkdir(exist_ok=True)
-    build(content, prefix=path)  # type: ignore
-
-
-@create.register
-def _(content: bytes, path):
-    path.write_bytes(content)
-
-
-@create.register
-def _(content: str, path):
-    path.write_text(content, encoding='utf-8')
-
-
-# end from jaraco.path
-####
diff --git a/pkg_resources/_vendor/importlib_resources/tests/compat/__init__.py b/pkg_resources/_vendor/importlib_resources/tests/compat/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/pkg_resources/_vendor/importlib_resources/tests/compat/py312.py b/pkg_resources/_vendor/importlib_resources/tests/compat/py312.py
deleted file mode 100644
index ea9a58ba2e..0000000000
--- a/pkg_resources/_vendor/importlib_resources/tests/compat/py312.py
+++ /dev/null
@@ -1,18 +0,0 @@
-import contextlib
-
-from .py39 import import_helper
-
-
-@contextlib.contextmanager
-def isolated_modules():
-    """
-    Save modules on entry and cleanup on exit.
-    """
-    (saved,) = import_helper.modules_setup()
-    try:
-        yield
-    finally:
-        import_helper.modules_cleanup(saved)
-
-
-vars(import_helper).setdefault('isolated_modules', isolated_modules)
diff --git a/pkg_resources/_vendor/importlib_resources/tests/compat/py39.py b/pkg_resources/_vendor/importlib_resources/tests/compat/py39.py
deleted file mode 100644
index e158eb85d3..0000000000
--- a/pkg_resources/_vendor/importlib_resources/tests/compat/py39.py
+++ /dev/null
@@ -1,10 +0,0 @@
-"""
-Backward-compatability shims to support Python 3.9 and earlier.
-"""
-
-from jaraco.test.cpython import from_test_support, try_import
-
-import_helper = try_import('import_helper') or from_test_support(
-    'modules_setup', 'modules_cleanup', 'DirsOnSysPath'
-)
-os_helper = try_import('os_helper') or from_test_support('temp_dir')
diff --git a/pkg_resources/_vendor/importlib_resources/tests/data01/__init__.py b/pkg_resources/_vendor/importlib_resources/tests/data01/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/pkg_resources/_vendor/importlib_resources/tests/data01/binary.file b/pkg_resources/_vendor/importlib_resources/tests/data01/binary.file
deleted file mode 100644
index eaf36c1daccfdf325514461cd1a2ffbc139b5464..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 4
LcmZQzWMT#Y01f~L

diff --git a/pkg_resources/_vendor/importlib_resources/tests/data01/subdirectory/__init__.py b/pkg_resources/_vendor/importlib_resources/tests/data01/subdirectory/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/pkg_resources/_vendor/importlib_resources/tests/data01/subdirectory/binary.file b/pkg_resources/_vendor/importlib_resources/tests/data01/subdirectory/binary.file
deleted file mode 100644
index 5bd8bb897b..0000000000
--- a/pkg_resources/_vendor/importlib_resources/tests/data01/subdirectory/binary.file
+++ /dev/null
@@ -1 +0,0 @@
-
\ No newline at end of file
diff --git a/pkg_resources/_vendor/importlib_resources/tests/data01/utf-16.file b/pkg_resources/_vendor/importlib_resources/tests/data01/utf-16.file
deleted file mode 100644
index 2cb772295ef4b480a8d83725bd5006a0236d8f68..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 44
ucmezW&x0YAAqNQa8FUyF7(y9B7~B|i84MZBfV^^`Xc15@g+Y;liva-T)Ce>H

diff --git a/pkg_resources/_vendor/importlib_resources/tests/data01/utf-8.file b/pkg_resources/_vendor/importlib_resources/tests/data01/utf-8.file
deleted file mode 100644
index 1c0132ad90..0000000000
--- a/pkg_resources/_vendor/importlib_resources/tests/data01/utf-8.file
+++ /dev/null
@@ -1 +0,0 @@
-Hello, UTF-8 world!
diff --git a/pkg_resources/_vendor/importlib_resources/tests/data02/__init__.py b/pkg_resources/_vendor/importlib_resources/tests/data02/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/pkg_resources/_vendor/importlib_resources/tests/data02/one/__init__.py b/pkg_resources/_vendor/importlib_resources/tests/data02/one/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/pkg_resources/_vendor/importlib_resources/tests/data02/one/resource1.txt b/pkg_resources/_vendor/importlib_resources/tests/data02/one/resource1.txt
deleted file mode 100644
index 61a813e401..0000000000
--- a/pkg_resources/_vendor/importlib_resources/tests/data02/one/resource1.txt
+++ /dev/null
@@ -1 +0,0 @@
-one resource
diff --git a/pkg_resources/_vendor/importlib_resources/tests/data02/subdirectory/subsubdir/resource.txt b/pkg_resources/_vendor/importlib_resources/tests/data02/subdirectory/subsubdir/resource.txt
deleted file mode 100644
index 48f587a2d0..0000000000
--- a/pkg_resources/_vendor/importlib_resources/tests/data02/subdirectory/subsubdir/resource.txt
+++ /dev/null
@@ -1 +0,0 @@
-a resource
\ No newline at end of file
diff --git a/pkg_resources/_vendor/importlib_resources/tests/data02/two/__init__.py b/pkg_resources/_vendor/importlib_resources/tests/data02/two/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/pkg_resources/_vendor/importlib_resources/tests/data02/two/resource2.txt b/pkg_resources/_vendor/importlib_resources/tests/data02/two/resource2.txt
deleted file mode 100644
index a80ce46ea3..0000000000
--- a/pkg_resources/_vendor/importlib_resources/tests/data02/two/resource2.txt
+++ /dev/null
@@ -1 +0,0 @@
-two resource
diff --git a/pkg_resources/_vendor/importlib_resources/tests/namespacedata01/binary.file b/pkg_resources/_vendor/importlib_resources/tests/namespacedata01/binary.file
deleted file mode 100644
index eaf36c1daccfdf325514461cd1a2ffbc139b5464..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 4
LcmZQzWMT#Y01f~L

diff --git a/pkg_resources/_vendor/importlib_resources/tests/namespacedata01/subdirectory/binary.file b/pkg_resources/_vendor/importlib_resources/tests/namespacedata01/subdirectory/binary.file
deleted file mode 100644
index 100f50643d..0000000000
--- a/pkg_resources/_vendor/importlib_resources/tests/namespacedata01/subdirectory/binary.file
+++ /dev/null
@@ -1 +0,0 @@
-

\ No newline at end of file
diff --git a/pkg_resources/_vendor/importlib_resources/tests/namespacedata01/utf-16.file b/pkg_resources/_vendor/importlib_resources/tests/namespacedata01/utf-16.file
deleted file mode 100644
index 2cb772295ef4b480a8d83725bd5006a0236d8f68..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 44
ucmezW&x0YAAqNQa8FUyF7(y9B7~B|i84MZBfV^^`Xc15@g+Y;liva-T)Ce>H

diff --git a/pkg_resources/_vendor/importlib_resources/tests/namespacedata01/utf-8.file b/pkg_resources/_vendor/importlib_resources/tests/namespacedata01/utf-8.file
deleted file mode 100644
index 1c0132ad90..0000000000
--- a/pkg_resources/_vendor/importlib_resources/tests/namespacedata01/utf-8.file
+++ /dev/null
@@ -1 +0,0 @@
-Hello, UTF-8 world!
diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_compatibilty_files.py b/pkg_resources/_vendor/importlib_resources/tests/test_compatibilty_files.py
deleted file mode 100644
index 13ad0dfb21..0000000000
--- a/pkg_resources/_vendor/importlib_resources/tests/test_compatibilty_files.py
+++ /dev/null
@@ -1,104 +0,0 @@
-import io
-import unittest
-
-import importlib_resources as resources
-
-from importlib_resources._adapters import (
-    CompatibilityFiles,
-    wrap_spec,
-)
-
-from . import util
-
-
-class CompatibilityFilesTests(unittest.TestCase):
-    @property
-    def package(self):
-        bytes_data = io.BytesIO(b'Hello, world!')
-        return util.create_package(
-            file=bytes_data,
-            path='some_path',
-            contents=('a', 'b', 'c'),
-        )
-
-    @property
-    def files(self):
-        return resources.files(self.package)
-
-    def test_spec_path_iter(self):
-        self.assertEqual(
-            sorted(path.name for path in self.files.iterdir()),
-            ['a', 'b', 'c'],
-        )
-
-    def test_child_path_iter(self):
-        self.assertEqual(list((self.files / 'a').iterdir()), [])
-
-    def test_orphan_path_iter(self):
-        self.assertEqual(list((self.files / 'a' / 'a').iterdir()), [])
-        self.assertEqual(list((self.files / 'a' / 'a' / 'a').iterdir()), [])
-
-    def test_spec_path_is(self):
-        self.assertFalse(self.files.is_file())
-        self.assertFalse(self.files.is_dir())
-
-    def test_child_path_is(self):
-        self.assertTrue((self.files / 'a').is_file())
-        self.assertFalse((self.files / 'a').is_dir())
-
-    def test_orphan_path_is(self):
-        self.assertFalse((self.files / 'a' / 'a').is_file())
-        self.assertFalse((self.files / 'a' / 'a').is_dir())
-        self.assertFalse((self.files / 'a' / 'a' / 'a').is_file())
-        self.assertFalse((self.files / 'a' / 'a' / 'a').is_dir())
-
-    def test_spec_path_name(self):
-        self.assertEqual(self.files.name, 'testingpackage')
-
-    def test_child_path_name(self):
-        self.assertEqual((self.files / 'a').name, 'a')
-
-    def test_orphan_path_name(self):
-        self.assertEqual((self.files / 'a' / 'b').name, 'b')
-        self.assertEqual((self.files / 'a' / 'b' / 'c').name, 'c')
-
-    def test_spec_path_open(self):
-        self.assertEqual(self.files.read_bytes(), b'Hello, world!')
-        self.assertEqual(self.files.read_text(encoding='utf-8'), 'Hello, world!')
-
-    def test_child_path_open(self):
-        self.assertEqual((self.files / 'a').read_bytes(), b'Hello, world!')
-        self.assertEqual(
-            (self.files / 'a').read_text(encoding='utf-8'), 'Hello, world!'
-        )
-
-    def test_orphan_path_open(self):
-        with self.assertRaises(FileNotFoundError):
-            (self.files / 'a' / 'b').read_bytes()
-        with self.assertRaises(FileNotFoundError):
-            (self.files / 'a' / 'b' / 'c').read_bytes()
-
-    def test_open_invalid_mode(self):
-        with self.assertRaises(ValueError):
-            self.files.open('0')
-
-    def test_orphan_path_invalid(self):
-        with self.assertRaises(ValueError):
-            CompatibilityFiles.OrphanPath()
-
-    def test_wrap_spec(self):
-        spec = wrap_spec(self.package)
-        self.assertIsInstance(spec.loader.get_resource_reader(None), CompatibilityFiles)
-
-
-class CompatibilityFilesNoReaderTests(unittest.TestCase):
-    @property
-    def package(self):
-        return util.create_package_from_loader(None)
-
-    @property
-    def files(self):
-        return resources.files(self.package)
-
-    def test_spec_path_joinpath(self):
-        self.assertIsInstance(self.files / 'a', CompatibilityFiles.OrphanPath)
diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_contents.py b/pkg_resources/_vendor/importlib_resources/tests/test_contents.py
deleted file mode 100644
index 7dc3b0a619..0000000000
--- a/pkg_resources/_vendor/importlib_resources/tests/test_contents.py
+++ /dev/null
@@ -1,43 +0,0 @@
-import unittest
-import importlib_resources as resources
-
-from . import data01
-from . import util
-
-
-class ContentsTests:
-    expected = {
-        '__init__.py',
-        'binary.file',
-        'subdirectory',
-        'utf-16.file',
-        'utf-8.file',
-    }
-
-    def test_contents(self):
-        contents = {path.name for path in resources.files(self.data).iterdir()}
-        assert self.expected <= contents
-
-
-class ContentsDiskTests(ContentsTests, unittest.TestCase):
-    def setUp(self):
-        self.data = data01
-
-
-class ContentsZipTests(ContentsTests, util.ZipSetup, unittest.TestCase):
-    pass
-
-
-class ContentsNamespaceTests(ContentsTests, unittest.TestCase):
-    expected = {
-        # no __init__ because of namespace design
-        'binary.file',
-        'subdirectory',
-        'utf-16.file',
-        'utf-8.file',
-    }
-
-    def setUp(self):
-        from . import namespacedata01
-
-        self.data = namespacedata01
diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_custom.py b/pkg_resources/_vendor/importlib_resources/tests/test_custom.py
deleted file mode 100644
index 86c65676f1..0000000000
--- a/pkg_resources/_vendor/importlib_resources/tests/test_custom.py
+++ /dev/null
@@ -1,47 +0,0 @@
-import unittest
-import contextlib
-import pathlib
-
-import importlib_resources as resources
-from .. import abc
-from ..abc import TraversableResources, ResourceReader
-from . import util
-from .compat.py39 import os_helper
-
-
-class SimpleLoader:
-    """
-    A simple loader that only implements a resource reader.
-    """
-
-    def __init__(self, reader: ResourceReader):
-        self.reader = reader
-
-    def get_resource_reader(self, package):
-        return self.reader
-
-
-class MagicResources(TraversableResources):
-    """
-    Magically returns the resources at path.
-    """
-
-    def __init__(self, path: pathlib.Path):
-        self.path = path
-
-    def files(self):
-        return self.path
-
-
-class CustomTraversableResourcesTests(unittest.TestCase):
-    def setUp(self):
-        self.fixtures = contextlib.ExitStack()
-        self.addCleanup(self.fixtures.close)
-
-    def test_custom_loader(self):
-        temp_dir = pathlib.Path(self.fixtures.enter_context(os_helper.temp_dir()))
-        loader = SimpleLoader(MagicResources(temp_dir))
-        pkg = util.create_package_from_loader(loader)
-        files = resources.files(pkg)
-        assert isinstance(files, abc.Traversable)
-        assert list(files.iterdir()) == []
diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_files.py b/pkg_resources/_vendor/importlib_resources/tests/test_files.py
deleted file mode 100644
index 3e86ec64bc..0000000000
--- a/pkg_resources/_vendor/importlib_resources/tests/test_files.py
+++ /dev/null
@@ -1,117 +0,0 @@
-import textwrap
-import unittest
-import warnings
-import importlib
-import contextlib
-
-import importlib_resources as resources
-from ..abc import Traversable
-from . import data01
-from . import util
-from . import _path
-from .compat.py39 import os_helper
-from .compat.py312 import import_helper
-
-
-@contextlib.contextmanager
-def suppress_known_deprecation():
-    with warnings.catch_warnings(record=True) as ctx:
-        warnings.simplefilter('default', category=DeprecationWarning)
-        yield ctx
-
-
-class FilesTests:
-    def test_read_bytes(self):
-        files = resources.files(self.data)
-        actual = files.joinpath('utf-8.file').read_bytes()
-        assert actual == b'Hello, UTF-8 world!\n'
-
-    def test_read_text(self):
-        files = resources.files(self.data)
-        actual = files.joinpath('utf-8.file').read_text(encoding='utf-8')
-        assert actual == 'Hello, UTF-8 world!\n'
-
-    def test_traversable(self):
-        assert isinstance(resources.files(self.data), Traversable)
-
-    def test_joinpath_with_multiple_args(self):
-        files = resources.files(self.data)
-        binfile = files.joinpath('subdirectory', 'binary.file')
-        self.assertTrue(binfile.is_file())
-
-    def test_old_parameter(self):
-        """
-        Files used to take a 'package' parameter. Make sure anyone
-        passing by name is still supported.
-        """
-        with suppress_known_deprecation():
-            resources.files(package=self.data)
-
-
-class OpenDiskTests(FilesTests, unittest.TestCase):
-    def setUp(self):
-        self.data = data01
-
-
-class OpenZipTests(FilesTests, util.ZipSetup, unittest.TestCase):
-    pass
-
-
-class OpenNamespaceTests(FilesTests, unittest.TestCase):
-    def setUp(self):
-        from . import namespacedata01
-
-        self.data = namespacedata01
-
-
-class OpenNamespaceZipTests(FilesTests, util.ZipSetup, unittest.TestCase):
-    ZIP_MODULE = 'namespacedata01'
-
-
-class SiteDir:
-    def setUp(self):
-        self.fixtures = contextlib.ExitStack()
-        self.addCleanup(self.fixtures.close)
-        self.site_dir = self.fixtures.enter_context(os_helper.temp_dir())
-        self.fixtures.enter_context(import_helper.DirsOnSysPath(self.site_dir))
-        self.fixtures.enter_context(import_helper.isolated_modules())
-
-
-class ModulesFilesTests(SiteDir, unittest.TestCase):
-    def test_module_resources(self):
-        """
-        A module can have resources found adjacent to the module.
-        """
-        spec = {
-            'mod.py': '',
-            'res.txt': 'resources are the best',
-        }
-        _path.build(spec, self.site_dir)
-        import mod
-
-        actual = resources.files(mod).joinpath('res.txt').read_text(encoding='utf-8')
-        assert actual == spec['res.txt']
-
-
-class ImplicitContextFilesTests(SiteDir, unittest.TestCase):
-    def test_implicit_files(self):
-        """
-        Without any parameter, files() will infer the location as the caller.
-        """
-        spec = {
-            'somepkg': {
-                '__init__.py': textwrap.dedent(
-                    """
-                    import importlib_resources as res
-                    val = res.files().joinpath('res.txt').read_text(encoding='utf-8')
-                    """
-                ),
-                'res.txt': 'resources are the best',
-            },
-        }
-        _path.build(spec, self.site_dir)
-        assert importlib.import_module('somepkg').val == 'resources are the best'
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_functional.py b/pkg_resources/_vendor/importlib_resources/tests/test_functional.py
deleted file mode 100644
index 69706cf7be..0000000000
--- a/pkg_resources/_vendor/importlib_resources/tests/test_functional.py
+++ /dev/null
@@ -1,242 +0,0 @@
-import unittest
-import os
-import contextlib
-
-try:
-    from test.support.warnings_helper import ignore_warnings, check_warnings
-except ImportError:
-    # older Python versions
-    from test.support import ignore_warnings, check_warnings
-
-import importlib_resources as resources
-
-# Since the functional API forwards to Traversable, we only test
-# filesystem resources here -- not zip files, namespace packages etc.
-# We do test for two kinds of Anchor, though.
-
-
-class StringAnchorMixin:
-    anchor01 = 'importlib_resources.tests.data01'
-    anchor02 = 'importlib_resources.tests.data02'
-
-
-class ModuleAnchorMixin:
-    from . import data01 as anchor01
-    from . import data02 as anchor02
-
-
-class FunctionalAPIBase:
-    def _gen_resourcetxt_path_parts(self):
-        """Yield various names of a text file in anchor02, each in a subTest"""
-        for path_parts in (
-            ('subdirectory', 'subsubdir', 'resource.txt'),
-            ('subdirectory/subsubdir/resource.txt',),
-            ('subdirectory/subsubdir', 'resource.txt'),
-        ):
-            with self.subTest(path_parts=path_parts):
-                yield path_parts
-
-    def test_read_text(self):
-        self.assertEqual(
-            resources.read_text(self.anchor01, 'utf-8.file'),
-            'Hello, UTF-8 world!\n',
-        )
-        self.assertEqual(
-            resources.read_text(
-                self.anchor02,
-                'subdirectory',
-                'subsubdir',
-                'resource.txt',
-                encoding='utf-8',
-            ),
-            'a resource',
-        )
-        for path_parts in self._gen_resourcetxt_path_parts():
-            self.assertEqual(
-                resources.read_text(
-                    self.anchor02,
-                    *path_parts,
-                    encoding='utf-8',
-                ),
-                'a resource',
-            )
-        # Use generic OSError, since e.g. attempting to read a directory can
-        # fail with PermissionError rather than IsADirectoryError
-        with self.assertRaises(OSError):
-            resources.read_text(self.anchor01)
-        with self.assertRaises(OSError):
-            resources.read_text(self.anchor01, 'no-such-file')
-        with self.assertRaises(UnicodeDecodeError):
-            resources.read_text(self.anchor01, 'utf-16.file')
-        self.assertEqual(
-            resources.read_text(
-                self.anchor01,
-                'binary.file',
-                encoding='latin1',
-            ),
-            '\x00\x01\x02\x03',
-        )
-        self.assertEqual(
-            resources.read_text(
-                self.anchor01,
-                'utf-16.file',
-                errors='backslashreplace',
-            ),
-            'Hello, UTF-16 world!\n'.encode('utf-16').decode(
-                errors='backslashreplace',
-            ),
-        )
-
-    def test_read_binary(self):
-        self.assertEqual(
-            resources.read_binary(self.anchor01, 'utf-8.file'),
-            b'Hello, UTF-8 world!\n',
-        )
-        for path_parts in self._gen_resourcetxt_path_parts():
-            self.assertEqual(
-                resources.read_binary(self.anchor02, *path_parts),
-                b'a resource',
-            )
-
-    def test_open_text(self):
-        with resources.open_text(self.anchor01, 'utf-8.file') as f:
-            self.assertEqual(f.read(), 'Hello, UTF-8 world!\n')
-        for path_parts in self._gen_resourcetxt_path_parts():
-            with resources.open_text(
-                self.anchor02,
-                *path_parts,
-                encoding='utf-8',
-            ) as f:
-                self.assertEqual(f.read(), 'a resource')
-        # Use generic OSError, since e.g. attempting to read a directory can
-        # fail with PermissionError rather than IsADirectoryError
-        with self.assertRaises(OSError):
-            resources.open_text(self.anchor01)
-        with self.assertRaises(OSError):
-            resources.open_text(self.anchor01, 'no-such-file')
-        with resources.open_text(self.anchor01, 'utf-16.file') as f:
-            with self.assertRaises(UnicodeDecodeError):
-                f.read()
-        with resources.open_text(
-            self.anchor01,
-            'binary.file',
-            encoding='latin1',
-        ) as f:
-            self.assertEqual(f.read(), '\x00\x01\x02\x03')
-        with resources.open_text(
-            self.anchor01,
-            'utf-16.file',
-            errors='backslashreplace',
-        ) as f:
-            self.assertEqual(
-                f.read(),
-                'Hello, UTF-16 world!\n'.encode('utf-16').decode(
-                    errors='backslashreplace',
-                ),
-            )
-
-    def test_open_binary(self):
-        with resources.open_binary(self.anchor01, 'utf-8.file') as f:
-            self.assertEqual(f.read(), b'Hello, UTF-8 world!\n')
-        for path_parts in self._gen_resourcetxt_path_parts():
-            with resources.open_binary(
-                self.anchor02,
-                *path_parts,
-            ) as f:
-                self.assertEqual(f.read(), b'a resource')
-
-    def test_path(self):
-        with resources.path(self.anchor01, 'utf-8.file') as path:
-            with open(str(path), encoding='utf-8') as f:
-                self.assertEqual(f.read(), 'Hello, UTF-8 world!\n')
-        with resources.path(self.anchor01) as path:
-            with open(os.path.join(path, 'utf-8.file'), encoding='utf-8') as f:
-                self.assertEqual(f.read(), 'Hello, UTF-8 world!\n')
-
-    def test_is_resource(self):
-        is_resource = resources.is_resource
-        self.assertTrue(is_resource(self.anchor01, 'utf-8.file'))
-        self.assertFalse(is_resource(self.anchor01, 'no_such_file'))
-        self.assertFalse(is_resource(self.anchor01))
-        self.assertFalse(is_resource(self.anchor01, 'subdirectory'))
-        for path_parts in self._gen_resourcetxt_path_parts():
-            self.assertTrue(is_resource(self.anchor02, *path_parts))
-
-    def test_contents(self):
-        with check_warnings((".*contents.*", DeprecationWarning)):
-            c = resources.contents(self.anchor01)
-        self.assertGreaterEqual(
-            set(c),
-            {'utf-8.file', 'utf-16.file', 'binary.file', 'subdirectory'},
-        )
-        with contextlib.ExitStack() as cm:
-            cm.enter_context(self.assertRaises(OSError))
-            cm.enter_context(check_warnings((".*contents.*", DeprecationWarning)))
-
-            list(resources.contents(self.anchor01, 'utf-8.file'))
-
-        for path_parts in self._gen_resourcetxt_path_parts():
-            with contextlib.ExitStack() as cm:
-                cm.enter_context(self.assertRaises(OSError))
-                cm.enter_context(check_warnings((".*contents.*", DeprecationWarning)))
-
-                list(resources.contents(self.anchor01, *path_parts))
-        with check_warnings((".*contents.*", DeprecationWarning)):
-            c = resources.contents(self.anchor01, 'subdirectory')
-        self.assertGreaterEqual(
-            set(c),
-            {'binary.file'},
-        )
-
-    @ignore_warnings(category=DeprecationWarning)
-    def test_common_errors(self):
-        for func in (
-            resources.read_text,
-            resources.read_binary,
-            resources.open_text,
-            resources.open_binary,
-            resources.path,
-            resources.is_resource,
-            resources.contents,
-        ):
-            with self.subTest(func=func):
-                # Rejecting None anchor
-                with self.assertRaises(TypeError):
-                    func(None)
-                # Rejecting invalid anchor type
-                with self.assertRaises((TypeError, AttributeError)):
-                    func(1234)
-                # Unknown module
-                with self.assertRaises(ModuleNotFoundError):
-                    func('$missing module$')
-
-    def test_text_errors(self):
-        for func in (
-            resources.read_text,
-            resources.open_text,
-        ):
-            with self.subTest(func=func):
-                # Multiple path arguments need explicit encoding argument.
-                with self.assertRaises(TypeError):
-                    func(
-                        self.anchor02,
-                        'subdirectory',
-                        'subsubdir',
-                        'resource.txt',
-                    )
-
-
-class FunctionalAPITest_StringAnchor(
-    unittest.TestCase,
-    FunctionalAPIBase,
-    StringAnchorMixin,
-):
-    pass
-
-
-class FunctionalAPITest_ModuleAnchor(
-    unittest.TestCase,
-    FunctionalAPIBase,
-    ModuleAnchorMixin,
-):
-    pass
diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_open.py b/pkg_resources/_vendor/importlib_resources/tests/test_open.py
deleted file mode 100644
index 44f1018af3..0000000000
--- a/pkg_resources/_vendor/importlib_resources/tests/test_open.py
+++ /dev/null
@@ -1,89 +0,0 @@
-import unittest
-
-import importlib_resources as resources
-from . import data01
-from . import util
-
-
-class CommonBinaryTests(util.CommonTests, unittest.TestCase):
-    def execute(self, package, path):
-        target = resources.files(package).joinpath(path)
-        with target.open('rb'):
-            pass
-
-
-class CommonTextTests(util.CommonTests, unittest.TestCase):
-    def execute(self, package, path):
-        target = resources.files(package).joinpath(path)
-        with target.open(encoding='utf-8'):
-            pass
-
-
-class OpenTests:
-    def test_open_binary(self):
-        target = resources.files(self.data) / 'binary.file'
-        with target.open('rb') as fp:
-            result = fp.read()
-            self.assertEqual(result, bytes(range(4)))
-
-    def test_open_text_default_encoding(self):
-        target = resources.files(self.data) / 'utf-8.file'
-        with target.open(encoding='utf-8') as fp:
-            result = fp.read()
-            self.assertEqual(result, 'Hello, UTF-8 world!\n')
-
-    def test_open_text_given_encoding(self):
-        target = resources.files(self.data) / 'utf-16.file'
-        with target.open(encoding='utf-16', errors='strict') as fp:
-            result = fp.read()
-        self.assertEqual(result, 'Hello, UTF-16 world!\n')
-
-    def test_open_text_with_errors(self):
-        """
-        Raises UnicodeError without the 'errors' argument.
-        """
-        target = resources.files(self.data) / 'utf-16.file'
-        with target.open(encoding='utf-8', errors='strict') as fp:
-            self.assertRaises(UnicodeError, fp.read)
-        with target.open(encoding='utf-8', errors='ignore') as fp:
-            result = fp.read()
-        self.assertEqual(
-            result,
-            'H\x00e\x00l\x00l\x00o\x00,\x00 '
-            '\x00U\x00T\x00F\x00-\x001\x006\x00 '
-            '\x00w\x00o\x00r\x00l\x00d\x00!\x00\n\x00',
-        )
-
-    def test_open_binary_FileNotFoundError(self):
-        target = resources.files(self.data) / 'does-not-exist'
-        with self.assertRaises(FileNotFoundError):
-            target.open('rb')
-
-    def test_open_text_FileNotFoundError(self):
-        target = resources.files(self.data) / 'does-not-exist'
-        with self.assertRaises(FileNotFoundError):
-            target.open(encoding='utf-8')
-
-
-class OpenDiskTests(OpenTests, unittest.TestCase):
-    def setUp(self):
-        self.data = data01
-
-
-class OpenDiskNamespaceTests(OpenTests, unittest.TestCase):
-    def setUp(self):
-        from . import namespacedata01
-
-        self.data = namespacedata01
-
-
-class OpenZipTests(OpenTests, util.ZipSetup, unittest.TestCase):
-    pass
-
-
-class OpenNamespaceZipTests(OpenTests, util.ZipSetup, unittest.TestCase):
-    ZIP_MODULE = 'namespacedata01'
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_path.py b/pkg_resources/_vendor/importlib_resources/tests/test_path.py
deleted file mode 100644
index c3e1cbb4ed..0000000000
--- a/pkg_resources/_vendor/importlib_resources/tests/test_path.py
+++ /dev/null
@@ -1,65 +0,0 @@
-import io
-import pathlib
-import unittest
-
-import importlib_resources as resources
-from . import data01
-from . import util
-
-
-class CommonTests(util.CommonTests, unittest.TestCase):
-    def execute(self, package, path):
-        with resources.as_file(resources.files(package).joinpath(path)):
-            pass
-
-
-class PathTests:
-    def test_reading(self):
-        """
-        Path should be readable and a pathlib.Path instance.
-        """
-        target = resources.files(self.data) / 'utf-8.file'
-        with resources.as_file(target) as path:
-            self.assertIsInstance(path, pathlib.Path)
-            self.assertTrue(path.name.endswith("utf-8.file"), repr(path))
-            self.assertEqual('Hello, UTF-8 world!\n', path.read_text(encoding='utf-8'))
-
-
-class PathDiskTests(PathTests, unittest.TestCase):
-    data = data01
-
-    def test_natural_path(self):
-        """
-        Guarantee the internal implementation detail that
-        file-system-backed resources do not get the tempdir
-        treatment.
-        """
-        target = resources.files(self.data) / 'utf-8.file'
-        with resources.as_file(target) as path:
-            assert 'data' in str(path)
-
-
-class PathMemoryTests(PathTests, unittest.TestCase):
-    def setUp(self):
-        file = io.BytesIO(b'Hello, UTF-8 world!\n')
-        self.addCleanup(file.close)
-        self.data = util.create_package(
-            file=file, path=FileNotFoundError("package exists only in memory")
-        )
-        self.data.__spec__.origin = None
-        self.data.__spec__.has_location = False
-
-
-class PathZipTests(PathTests, util.ZipSetup, unittest.TestCase):
-    def test_remove_in_context_manager(self):
-        """
-        It is not an error if the file that was temporarily stashed on the
-        file system is removed inside the `with` stanza.
-        """
-        target = resources.files(self.data) / 'utf-8.file'
-        with resources.as_file(target) as path:
-            path.unlink()
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_read.py b/pkg_resources/_vendor/importlib_resources/tests/test_read.py
deleted file mode 100644
index 97d90128cf..0000000000
--- a/pkg_resources/_vendor/importlib_resources/tests/test_read.py
+++ /dev/null
@@ -1,97 +0,0 @@
-import unittest
-import importlib_resources as resources
-
-from . import data01
-from . import util
-from importlib import import_module
-
-
-class CommonBinaryTests(util.CommonTests, unittest.TestCase):
-    def execute(self, package, path):
-        resources.files(package).joinpath(path).read_bytes()
-
-
-class CommonTextTests(util.CommonTests, unittest.TestCase):
-    def execute(self, package, path):
-        resources.files(package).joinpath(path).read_text(encoding='utf-8')
-
-
-class ReadTests:
-    def test_read_bytes(self):
-        result = resources.files(self.data).joinpath('binary.file').read_bytes()
-        self.assertEqual(result, bytes(range(4)))
-
-    def test_read_text_default_encoding(self):
-        result = (
-            resources.files(self.data)
-            .joinpath('utf-8.file')
-            .read_text(encoding='utf-8')
-        )
-        self.assertEqual(result, 'Hello, UTF-8 world!\n')
-
-    def test_read_text_given_encoding(self):
-        result = (
-            resources.files(self.data)
-            .joinpath('utf-16.file')
-            .read_text(encoding='utf-16')
-        )
-        self.assertEqual(result, 'Hello, UTF-16 world!\n')
-
-    def test_read_text_with_errors(self):
-        """
-        Raises UnicodeError without the 'errors' argument.
-        """
-        target = resources.files(self.data) / 'utf-16.file'
-        self.assertRaises(UnicodeError, target.read_text, encoding='utf-8')
-        result = target.read_text(encoding='utf-8', errors='ignore')
-        self.assertEqual(
-            result,
-            'H\x00e\x00l\x00l\x00o\x00,\x00 '
-            '\x00U\x00T\x00F\x00-\x001\x006\x00 '
-            '\x00w\x00o\x00r\x00l\x00d\x00!\x00\n\x00',
-        )
-
-
-class ReadDiskTests(ReadTests, unittest.TestCase):
-    data = data01
-
-
-class ReadZipTests(ReadTests, util.ZipSetup, unittest.TestCase):
-    def test_read_submodule_resource(self):
-        submodule = import_module('data01.subdirectory')
-        result = resources.files(submodule).joinpath('binary.file').read_bytes()
-        self.assertEqual(result, bytes(range(4, 8)))
-
-    def test_read_submodule_resource_by_name(self):
-        result = (
-            resources.files('data01.subdirectory').joinpath('binary.file').read_bytes()
-        )
-        self.assertEqual(result, bytes(range(4, 8)))
-
-
-class ReadNamespaceTests(ReadTests, unittest.TestCase):
-    def setUp(self):
-        from . import namespacedata01
-
-        self.data = namespacedata01
-
-
-class ReadNamespaceZipTests(ReadTests, util.ZipSetup, unittest.TestCase):
-    ZIP_MODULE = 'namespacedata01'
-
-    def test_read_submodule_resource(self):
-        submodule = import_module('namespacedata01.subdirectory')
-        result = resources.files(submodule).joinpath('binary.file').read_bytes()
-        self.assertEqual(result, bytes(range(12, 16)))
-
-    def test_read_submodule_resource_by_name(self):
-        result = (
-            resources.files('namespacedata01.subdirectory')
-            .joinpath('binary.file')
-            .read_bytes()
-        )
-        self.assertEqual(result, bytes(range(12, 16)))
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_reader.py b/pkg_resources/_vendor/importlib_resources/tests/test_reader.py
deleted file mode 100644
index 95c2fc85a4..0000000000
--- a/pkg_resources/_vendor/importlib_resources/tests/test_reader.py
+++ /dev/null
@@ -1,145 +0,0 @@
-import os.path
-import sys
-import pathlib
-import unittest
-
-from importlib import import_module
-from importlib_resources.readers import MultiplexedPath, NamespaceReader
-
-
-class MultiplexedPathTest(unittest.TestCase):
-    @classmethod
-    def setUpClass(cls):
-        cls.folder = pathlib.Path(__file__).parent / 'namespacedata01'
-
-    def test_init_no_paths(self):
-        with self.assertRaises(FileNotFoundError):
-            MultiplexedPath()
-
-    def test_init_file(self):
-        with self.assertRaises(NotADirectoryError):
-            MultiplexedPath(self.folder / 'binary.file')
-
-    def test_iterdir(self):
-        contents = {path.name for path in MultiplexedPath(self.folder).iterdir()}
-        try:
-            contents.remove('__pycache__')
-        except (KeyError, ValueError):
-            pass
-        self.assertEqual(
-            contents, {'subdirectory', 'binary.file', 'utf-16.file', 'utf-8.file'}
-        )
-
-    def test_iterdir_duplicate(self):
-        data01 = pathlib.Path(__file__).parent.joinpath('data01')
-        contents = {
-            path.name for path in MultiplexedPath(self.folder, data01).iterdir()
-        }
-        for remove in ('__pycache__', '__init__.pyc'):
-            try:
-                contents.remove(remove)
-            except (KeyError, ValueError):
-                pass
-        self.assertEqual(
-            contents,
-            {'__init__.py', 'binary.file', 'subdirectory', 'utf-16.file', 'utf-8.file'},
-        )
-
-    def test_is_dir(self):
-        self.assertEqual(MultiplexedPath(self.folder).is_dir(), True)
-
-    def test_is_file(self):
-        self.assertEqual(MultiplexedPath(self.folder).is_file(), False)
-
-    def test_open_file(self):
-        path = MultiplexedPath(self.folder)
-        with self.assertRaises(FileNotFoundError):
-            path.read_bytes()
-        with self.assertRaises(FileNotFoundError):
-            path.read_text()
-        with self.assertRaises(FileNotFoundError):
-            path.open()
-
-    def test_join_path(self):
-        data01 = pathlib.Path(__file__).parent.joinpath('data01')
-        prefix = str(data01.parent)
-        path = MultiplexedPath(self.folder, data01)
-        self.assertEqual(
-            str(path.joinpath('binary.file'))[len(prefix) + 1 :],
-            os.path.join('namespacedata01', 'binary.file'),
-        )
-        sub = path.joinpath('subdirectory')
-        assert isinstance(sub, MultiplexedPath)
-        assert 'namespacedata01' in str(sub)
-        assert 'data01' in str(sub)
-        self.assertEqual(
-            str(path.joinpath('imaginary'))[len(prefix) + 1 :],
-            os.path.join('namespacedata01', 'imaginary'),
-        )
-        self.assertEqual(path.joinpath(), path)
-
-    def test_join_path_compound(self):
-        path = MultiplexedPath(self.folder)
-        assert not path.joinpath('imaginary/foo.py').exists()
-
-    def test_join_path_common_subdir(self):
-        data01 = pathlib.Path(__file__).parent.joinpath('data01')
-        data02 = pathlib.Path(__file__).parent.joinpath('data02')
-        prefix = str(data01.parent)
-        path = MultiplexedPath(data01, data02)
-        self.assertIsInstance(path.joinpath('subdirectory'), MultiplexedPath)
-        self.assertEqual(
-            str(path.joinpath('subdirectory', 'subsubdir'))[len(prefix) + 1 :],
-            os.path.join('data02', 'subdirectory', 'subsubdir'),
-        )
-
-    def test_repr(self):
-        self.assertEqual(
-            repr(MultiplexedPath(self.folder)),
-            f"MultiplexedPath('{self.folder}')",
-        )
-
-    def test_name(self):
-        self.assertEqual(
-            MultiplexedPath(self.folder).name,
-            os.path.basename(self.folder),
-        )
-
-
-class NamespaceReaderTest(unittest.TestCase):
-    site_dir = str(pathlib.Path(__file__).parent)
-
-    @classmethod
-    def setUpClass(cls):
-        sys.path.append(cls.site_dir)
-
-    @classmethod
-    def tearDownClass(cls):
-        sys.path.remove(cls.site_dir)
-
-    def test_init_error(self):
-        with self.assertRaises(ValueError):
-            NamespaceReader(['path1', 'path2'])
-
-    def test_resource_path(self):
-        namespacedata01 = import_module('namespacedata01')
-        reader = NamespaceReader(namespacedata01.__spec__.submodule_search_locations)
-
-        root = os.path.abspath(os.path.join(__file__, '..', 'namespacedata01'))
-        self.assertEqual(
-            reader.resource_path('binary.file'), os.path.join(root, 'binary.file')
-        )
-        self.assertEqual(
-            reader.resource_path('imaginary'), os.path.join(root, 'imaginary')
-        )
-
-    def test_files(self):
-        namespacedata01 = import_module('namespacedata01')
-        reader = NamespaceReader(namespacedata01.__spec__.submodule_search_locations)
-        root = os.path.abspath(os.path.join(__file__, '..', 'namespacedata01'))
-        self.assertIsInstance(reader.files(), MultiplexedPath)
-        self.assertEqual(repr(reader.files()), f"MultiplexedPath('{root}')")
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/pkg_resources/_vendor/importlib_resources/tests/test_resource.py b/pkg_resources/_vendor/importlib_resources/tests/test_resource.py
deleted file mode 100644
index dc2a108cde..0000000000
--- a/pkg_resources/_vendor/importlib_resources/tests/test_resource.py
+++ /dev/null
@@ -1,241 +0,0 @@
-import sys
-import unittest
-import importlib_resources as resources
-import pathlib
-
-from . import data01
-from . import util
-from importlib import import_module
-
-
-class ResourceTests:
-    # Subclasses are expected to set the `data` attribute.
-
-    def test_is_file_exists(self):
-        target = resources.files(self.data) / 'binary.file'
-        self.assertTrue(target.is_file())
-
-    def test_is_file_missing(self):
-        target = resources.files(self.data) / 'not-a-file'
-        self.assertFalse(target.is_file())
-
-    def test_is_dir(self):
-        target = resources.files(self.data) / 'subdirectory'
-        self.assertFalse(target.is_file())
-        self.assertTrue(target.is_dir())
-
-
-class ResourceDiskTests(ResourceTests, unittest.TestCase):
-    def setUp(self):
-        self.data = data01
-
-
-class ResourceZipTests(ResourceTests, util.ZipSetup, unittest.TestCase):
-    pass
-
-
-def names(traversable):
-    return {item.name for item in traversable.iterdir()}
-
-
-class ResourceLoaderTests(unittest.TestCase):
-    def test_resource_contents(self):
-        package = util.create_package(
-            file=data01, path=data01.__file__, contents=['A', 'B', 'C']
-        )
-        self.assertEqual(names(resources.files(package)), {'A', 'B', 'C'})
-
-    def test_is_file(self):
-        package = util.create_package(
-            file=data01, path=data01.__file__, contents=['A', 'B', 'C', 'D/E', 'D/F']
-        )
-        self.assertTrue(resources.files(package).joinpath('B').is_file())
-
-    def test_is_dir(self):
-        package = util.create_package(
-            file=data01, path=data01.__file__, contents=['A', 'B', 'C', 'D/E', 'D/F']
-        )
-        self.assertTrue(resources.files(package).joinpath('D').is_dir())
-
-    def test_resource_missing(self):
-        package = util.create_package(
-            file=data01, path=data01.__file__, contents=['A', 'B', 'C', 'D/E', 'D/F']
-        )
-        self.assertFalse(resources.files(package).joinpath('Z').is_file())
-
-
-class ResourceCornerCaseTests(unittest.TestCase):
-    def test_package_has_no_reader_fallback(self):
-        """
-        Test odd ball packages which:
-        # 1. Do not have a ResourceReader as a loader
-        # 2. Are not on the file system
-        # 3. Are not in a zip file
-        """
-        module = util.create_package(
-            file=data01, path=data01.__file__, contents=['A', 'B', 'C']
-        )
-        # Give the module a dummy loader.
-        module.__loader__ = object()
-        # Give the module a dummy origin.
-        module.__file__ = '/path/which/shall/not/be/named'
-        module.__spec__.loader = module.__loader__
-        module.__spec__.origin = module.__file__
-        self.assertFalse(resources.files(module).joinpath('A').is_file())
-
-
-class ResourceFromZipsTest01(util.ZipSetupBase, unittest.TestCase):
-    ZIP_MODULE = 'data01'
-
-    def test_is_submodule_resource(self):
-        submodule = import_module('data01.subdirectory')
-        self.assertTrue(resources.files(submodule).joinpath('binary.file').is_file())
-
-    def test_read_submodule_resource_by_name(self):
-        self.assertTrue(
-            resources.files('data01.subdirectory').joinpath('binary.file').is_file()
-        )
-
-    def test_submodule_contents(self):
-        submodule = import_module('data01.subdirectory')
-        self.assertEqual(
-            names(resources.files(submodule)), {'__init__.py', 'binary.file'}
-        )
-
-    def test_submodule_contents_by_name(self):
-        self.assertEqual(
-            names(resources.files('data01.subdirectory')),
-            {'__init__.py', 'binary.file'},
-        )
-
-    def test_as_file_directory(self):
-        with resources.as_file(resources.files('data01')) as data:
-            assert data.name == 'data01'
-            assert data.is_dir()
-            assert data.joinpath('subdirectory').is_dir()
-            assert len(list(data.iterdir()))
-        assert not data.parent.exists()
-
-
-class ResourceFromZipsTest02(util.ZipSetupBase, unittest.TestCase):
-    ZIP_MODULE = 'data02'
-
-    def test_unrelated_contents(self):
-        """
-        Test thata zip with two unrelated subpackages return
-        distinct resources. Ref python/importlib_resources#44.
-        """
-        self.assertEqual(
-            names(resources.files('data02.one')),
-            {'__init__.py', 'resource1.txt'},
-        )
-        self.assertEqual(
-            names(resources.files('data02.two')),
-            {'__init__.py', 'resource2.txt'},
-        )
-
-
-class DeletingZipsTest(util.ZipSetupBase, unittest.TestCase):
-    """Having accessed resources in a zip file should not keep an open
-    reference to the zip.
-    """
-
-    def test_iterdir_does_not_keep_open(self):
-        [item.name for item in resources.files('data01').iterdir()]
-
-    def test_is_file_does_not_keep_open(self):
-        resources.files('data01').joinpath('binary.file').is_file()
-
-    def test_is_file_failure_does_not_keep_open(self):
-        resources.files('data01').joinpath('not-present').is_file()
-
-    @unittest.skip("Desired but not supported.")
-    def test_as_file_does_not_keep_open(self):  # pragma: no cover
-        resources.as_file(resources.files('data01') / 'binary.file')
-
-    def test_entered_path_does_not_keep_open(self):
-        """
-        Mimic what certifi does on import to make its bundle
-        available for the process duration.
-        """
-        resources.as_file(resources.files('data01') / 'binary.file').__enter__()
-
-    def test_read_binary_does_not_keep_open(self):
-        resources.files('data01').joinpath('binary.file').read_bytes()
-
-    def test_read_text_does_not_keep_open(self):
-        resources.files('data01').joinpath('utf-8.file').read_text(encoding='utf-8')
-
-
-class ResourceFromNamespaceTests:
-    def test_is_submodule_resource(self):
-        self.assertTrue(
-            resources.files(import_module('namespacedata01'))
-            .joinpath('binary.file')
-            .is_file()
-        )
-
-    def test_read_submodule_resource_by_name(self):
-        self.assertTrue(
-            resources.files('namespacedata01').joinpath('binary.file').is_file()
-        )
-
-    def test_submodule_contents(self):
-        contents = names(resources.files(import_module('namespacedata01')))
-        try:
-            contents.remove('__pycache__')
-        except KeyError:
-            pass
-        self.assertEqual(
-            contents, {'subdirectory', 'binary.file', 'utf-8.file', 'utf-16.file'}
-        )
-
-    def test_submodule_contents_by_name(self):
-        contents = names(resources.files('namespacedata01'))
-        try:
-            contents.remove('__pycache__')
-        except KeyError:
-            pass
-        self.assertEqual(
-            contents, {'subdirectory', 'binary.file', 'utf-8.file', 'utf-16.file'}
-        )
-
-    def test_submodule_sub_contents(self):
-        contents = names(resources.files(import_module('namespacedata01.subdirectory')))
-        try:
-            contents.remove('__pycache__')
-        except KeyError:
-            pass
-        self.assertEqual(contents, {'binary.file'})
-
-    def test_submodule_sub_contents_by_name(self):
-        contents = names(resources.files('namespacedata01.subdirectory'))
-        try:
-            contents.remove('__pycache__')
-        except KeyError:
-            pass
-        self.assertEqual(contents, {'binary.file'})
-
-
-class ResourceFromNamespaceDiskTests(ResourceFromNamespaceTests, unittest.TestCase):
-    site_dir = str(pathlib.Path(__file__).parent)
-
-    @classmethod
-    def setUpClass(cls):
-        sys.path.append(cls.site_dir)
-
-    @classmethod
-    def tearDownClass(cls):
-        sys.path.remove(cls.site_dir)
-
-
-class ResourceFromNamespaceZipTests(
-    util.ZipSetupBase,
-    ResourceFromNamespaceTests,
-    unittest.TestCase,
-):
-    ZIP_MODULE = 'namespacedata01'
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/pkg_resources/_vendor/importlib_resources/tests/util.py b/pkg_resources/_vendor/importlib_resources/tests/util.py
deleted file mode 100644
index fb827d2fa0..0000000000
--- a/pkg_resources/_vendor/importlib_resources/tests/util.py
+++ /dev/null
@@ -1,164 +0,0 @@
-import abc
-import importlib
-import io
-import sys
-import types
-import pathlib
-import contextlib
-
-from . import data01
-from ..abc import ResourceReader
-from .compat.py39 import import_helper, os_helper
-from . import zip as zip_
-
-
-from importlib.machinery import ModuleSpec
-
-
-class Reader(ResourceReader):
-    def __init__(self, **kwargs):
-        vars(self).update(kwargs)
-
-    def get_resource_reader(self, package):
-        return self
-
-    def open_resource(self, path):
-        self._path = path
-        if isinstance(self.file, Exception):
-            raise self.file
-        return self.file
-
-    def resource_path(self, path_):
-        self._path = path_
-        if isinstance(self.path, Exception):
-            raise self.path
-        return self.path
-
-    def is_resource(self, path_):
-        self._path = path_
-        if isinstance(self.path, Exception):
-            raise self.path
-
-        def part(entry):
-            return entry.split('/')
-
-        return any(
-            len(parts) == 1 and parts[0] == path_ for parts in map(part, self._contents)
-        )
-
-    def contents(self):
-        if isinstance(self.path, Exception):
-            raise self.path
-        yield from self._contents
-
-
-def create_package_from_loader(loader, is_package=True):
-    name = 'testingpackage'
-    module = types.ModuleType(name)
-    spec = ModuleSpec(name, loader, origin='does-not-exist', is_package=is_package)
-    module.__spec__ = spec
-    module.__loader__ = loader
-    return module
-
-
-def create_package(file=None, path=None, is_package=True, contents=()):
-    return create_package_from_loader(
-        Reader(file=file, path=path, _contents=contents),
-        is_package,
-    )
-
-
-class CommonTests(metaclass=abc.ABCMeta):
-    """
-    Tests shared by test_open, test_path, and test_read.
-    """
-
-    @abc.abstractmethod
-    def execute(self, package, path):
-        """
-        Call the pertinent legacy API function (e.g. open_text, path)
-        on package and path.
-        """
-
-    def test_package_name(self):
-        """
-        Passing in the package name should succeed.
-        """
-        self.execute(data01.__name__, 'utf-8.file')
-
-    def test_package_object(self):
-        """
-        Passing in the package itself should succeed.
-        """
-        self.execute(data01, 'utf-8.file')
-
-    def test_string_path(self):
-        """
-        Passing in a string for the path should succeed.
-        """
-        path = 'utf-8.file'
-        self.execute(data01, path)
-
-    def test_pathlib_path(self):
-        """
-        Passing in a pathlib.PurePath object for the path should succeed.
-        """
-        path = pathlib.PurePath('utf-8.file')
-        self.execute(data01, path)
-
-    def test_importing_module_as_side_effect(self):
-        """
-        The anchor package can already be imported.
-        """
-        del sys.modules[data01.__name__]
-        self.execute(data01.__name__, 'utf-8.file')
-
-    def test_missing_path(self):
-        """
-        Attempting to open or read or request the path for a
-        non-existent path should succeed if open_resource
-        can return a viable data stream.
-        """
-        bytes_data = io.BytesIO(b'Hello, world!')
-        package = create_package(file=bytes_data, path=FileNotFoundError())
-        self.execute(package, 'utf-8.file')
-        self.assertEqual(package.__loader__._path, 'utf-8.file')
-
-    def test_extant_path(self):
-        # Attempting to open or read or request the path when the
-        # path does exist should still succeed. Does not assert
-        # anything about the result.
-        bytes_data = io.BytesIO(b'Hello, world!')
-        # any path that exists
-        path = __file__
-        package = create_package(file=bytes_data, path=path)
-        self.execute(package, 'utf-8.file')
-        self.assertEqual(package.__loader__._path, 'utf-8.file')
-
-    def test_useless_loader(self):
-        package = create_package(file=FileNotFoundError(), path=FileNotFoundError())
-        with self.assertRaises(FileNotFoundError):
-            self.execute(package, 'utf-8.file')
-
-
-class ZipSetupBase:
-    ZIP_MODULE = 'data01'
-
-    def setUp(self):
-        self.fixtures = contextlib.ExitStack()
-        self.addCleanup(self.fixtures.close)
-
-        self.fixtures.enter_context(import_helper.isolated_modules())
-
-        temp_dir = self.fixtures.enter_context(os_helper.temp_dir())
-        modules = pathlib.Path(temp_dir) / 'zipped modules.zip'
-        src_path = pathlib.Path(__file__).parent.joinpath(self.ZIP_MODULE)
-        self.fixtures.enter_context(
-            import_helper.DirsOnSysPath(str(zip_.make_zip_file(src_path, modules)))
-        )
-
-        self.data = importlib.import_module(self.ZIP_MODULE)
-
-
-class ZipSetup(ZipSetupBase):
-    pass
diff --git a/pkg_resources/_vendor/importlib_resources/tests/zip.py b/pkg_resources/_vendor/importlib_resources/tests/zip.py
deleted file mode 100644
index 962195a901..0000000000
--- a/pkg_resources/_vendor/importlib_resources/tests/zip.py
+++ /dev/null
@@ -1,32 +0,0 @@
-"""
-Generate zip test data files.
-"""
-
-import contextlib
-import os
-import pathlib
-import zipfile
-
-import zipp
-
-
-def make_zip_file(src, dst):
-    """
-    Zip the files in src into a new zipfile at dst.
-    """
-    with zipfile.ZipFile(dst, 'w') as zf:
-        for src_path, rel in walk(src):
-            dst_name = src.name / pathlib.PurePosixPath(rel.as_posix())
-            zf.write(src_path, dst_name)
-        zipp.CompleteDirs.inject(zf)
-    return dst
-
-
-def walk(datapath):
-    for dirpath, dirnames, filenames in os.walk(datapath):
-        with contextlib.suppress(ValueError):
-            dirnames.remove('__pycache__')
-        for filename in filenames:
-            res = pathlib.Path(dirpath) / filename
-            rel = res.relative_to(datapath)
-            yield res, rel
diff --git a/pkg_resources/_vendor/inflect-7.3.1.dist-info/INSTALLER b/pkg_resources/_vendor/inflect-7.3.1.dist-info/INSTALLER
deleted file mode 100644
index a1b589e38a..0000000000
--- a/pkg_resources/_vendor/inflect-7.3.1.dist-info/INSTALLER
+++ /dev/null
@@ -1 +0,0 @@
-pip
diff --git a/pkg_resources/_vendor/inflect-7.3.1.dist-info/LICENSE b/pkg_resources/_vendor/inflect-7.3.1.dist-info/LICENSE
deleted file mode 100644
index 1bb5a44356..0000000000
--- a/pkg_resources/_vendor/inflect-7.3.1.dist-info/LICENSE
+++ /dev/null
@@ -1,17 +0,0 @@
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to
-deal in the Software without restriction, including without limitation the
-rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
-sell copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
-IN THE SOFTWARE.
diff --git a/pkg_resources/_vendor/inflect-7.3.1.dist-info/METADATA b/pkg_resources/_vendor/inflect-7.3.1.dist-info/METADATA
deleted file mode 100644
index 9a2097a54a..0000000000
--- a/pkg_resources/_vendor/inflect-7.3.1.dist-info/METADATA
+++ /dev/null
@@ -1,591 +0,0 @@
-Metadata-Version: 2.1
-Name: inflect
-Version: 7.3.1
-Summary: Correctly generate plurals, singular nouns, ordinals, indefinite articles
-Author-email: Paul Dyson 
-Maintainer-email: "Jason R. Coombs" 
-Project-URL: Source, https://github.com/jaraco/inflect
-Keywords: plural,inflect,participle
-Classifier: Development Status :: 5 - Production/Stable
-Classifier: Intended Audience :: Developers
-Classifier: License :: OSI Approved :: MIT License
-Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3 :: Only
-Classifier: Natural Language :: English
-Classifier: Operating System :: OS Independent
-Classifier: Topic :: Software Development :: Libraries :: Python Modules
-Classifier: Topic :: Text Processing :: Linguistic
-Requires-Python: >=3.8
-Description-Content-Type: text/x-rst
-License-File: LICENSE
-Requires-Dist: more-itertools >=8.5.0
-Requires-Dist: typeguard >=4.0.1
-Requires-Dist: typing-extensions ; python_version < "3.9"
-Provides-Extra: doc
-Requires-Dist: sphinx >=3.5 ; extra == 'doc'
-Requires-Dist: jaraco.packaging >=9.3 ; extra == 'doc'
-Requires-Dist: rst.linker >=1.9 ; extra == 'doc'
-Requires-Dist: furo ; extra == 'doc'
-Requires-Dist: sphinx-lint ; extra == 'doc'
-Requires-Dist: jaraco.tidelift >=1.4 ; extra == 'doc'
-Provides-Extra: test
-Requires-Dist: pytest !=8.1.*,>=6 ; extra == 'test'
-Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'test'
-Requires-Dist: pytest-cov ; extra == 'test'
-Requires-Dist: pytest-mypy ; extra == 'test'
-Requires-Dist: pytest-enabler >=2.2 ; extra == 'test'
-Requires-Dist: pytest-ruff >=0.2.1 ; extra == 'test'
-Requires-Dist: pygments ; extra == 'test'
-
-.. image:: https://img.shields.io/pypi/v/inflect.svg
-   :target: https://pypi.org/project/inflect
-
-.. image:: https://img.shields.io/pypi/pyversions/inflect.svg
-
-.. image:: https://github.com/jaraco/inflect/actions/workflows/main.yml/badge.svg
-   :target: https://github.com/jaraco/inflect/actions?query=workflow%3A%22tests%22
-   :alt: tests
-
-.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
-    :target: https://github.com/astral-sh/ruff
-    :alt: Ruff
-
-.. image:: https://readthedocs.org/projects/inflect/badge/?version=latest
-   :target: https://inflect.readthedocs.io/en/latest/?badge=latest
-
-.. image:: https://img.shields.io/badge/skeleton-2024-informational
-   :target: https://blog.jaraco.com/skeleton
-
-.. image:: https://tidelift.com/badges/package/pypi/inflect
-   :target: https://tidelift.com/subscription/pkg/pypi-inflect?utm_source=pypi-inflect&utm_medium=readme
-
-NAME
-====
-
-inflect.py - Correctly generate plurals, singular nouns, ordinals, indefinite articles; convert numbers to words.
-
-SYNOPSIS
-========
-
-.. code-block:: python
-
-    import inflect
-
-    p = inflect.engine()
-
-    # METHODS:
-
-    # plural plural_noun plural_verb plural_adj singular_noun no num
-    # compare compare_nouns compare_nouns compare_adjs
-    # a an
-    # present_participle
-    # ordinal number_to_words
-    # join
-    # inflect classical gender
-    # defnoun defverb defadj defa defan
-
-
-    # UNCONDITIONALLY FORM THE PLURAL
-
-    print("The plural of ", word, " is ", p.plural(word))
-
-
-    # CONDITIONALLY FORM THE PLURAL
-
-    print("I saw", cat_count, p.plural("cat", cat_count))
-
-
-    # FORM PLURALS FOR SPECIFIC PARTS OF SPEECH
-
-    print(
-        p.plural_noun("I", N1),
-        p.plural_verb("saw", N1),
-        p.plural_adj("my", N2),
-        p.plural_noun("saw", N2),
-    )
-
-
-    # FORM THE SINGULAR OF PLURAL NOUNS
-
-    print("The singular of ", word, " is ", p.singular_noun(word))
-
-    # SELECT THE GENDER OF SINGULAR PRONOUNS
-
-    print(p.singular_noun("they"))  # 'it'
-    p.gender("feminine")
-    print(p.singular_noun("they"))  # 'she'
-
-
-    # DEAL WITH "0/1/N" -> "no/1/N" TRANSLATION:
-
-    print("There ", p.plural_verb("was", errors), p.no(" error", errors))
-
-
-    # USE DEFAULT COUNTS:
-
-    print(
-        p.num(N1, ""),
-        p.plural("I"),
-        p.plural_verb(" saw"),
-        p.num(N2),
-        p.plural_noun(" saw"),
-    )
-    print("There ", p.num(errors, ""), p.plural_verb("was"), p.no(" error"))
-
-
-    # COMPARE TWO WORDS "NUMBER-INSENSITIVELY":
-
-    if p.compare(word1, word2):
-        print("same")
-    if p.compare_nouns(word1, word2):
-        print("same noun")
-    if p.compare_verbs(word1, word2):
-        print("same verb")
-    if p.compare_adjs(word1, word2):
-        print("same adj.")
-
-
-    # ADD CORRECT "a" OR "an" FOR A GIVEN WORD:
-
-    print("Did you want ", p.a(thing), " or ", p.an(idea))
-
-
-    # CONVERT NUMERALS INTO ORDINALS (i.e. 1->1st, 2->2nd, 3->3rd, etc.)
-
-    print("It was", p.ordinal(position), " from the left\n")
-
-    # CONVERT NUMERALS TO WORDS (i.e. 1->"one", 101->"one hundred and one", etc.)
-    # RETURNS A SINGLE STRING...
-
-    words = p.number_to_words(1234)
-    # "one thousand, two hundred and thirty-four"
-    words = p.number_to_words(p.ordinal(1234))
-    # "one thousand, two hundred and thirty-fourth"
-
-
-    # GET BACK A LIST OF STRINGS, ONE FOR EACH "CHUNK"...
-
-    words = p.number_to_words(1234, wantlist=True)
-    # ("one thousand","two hundred and thirty-four")
-
-
-    # OPTIONAL PARAMETERS CHANGE TRANSLATION:
-
-    words = p.number_to_words(12345, group=1)
-    # "one, two, three, four, five"
-
-    words = p.number_to_words(12345, group=2)
-    # "twelve, thirty-four, five"
-
-    words = p.number_to_words(12345, group=3)
-    # "one twenty-three, forty-five"
-
-    words = p.number_to_words(1234, andword="")
-    # "one thousand, two hundred thirty-four"
-
-    words = p.number_to_words(1234, andword=", plus")
-    # "one thousand, two hundred, plus thirty-four"
-    # TODO: I get no comma before plus: check perl
-
-    words = p.number_to_words(555_1202, group=1, zero="oh")
-    # "five, five, five, one, two, oh, two"
-
-    words = p.number_to_words(555_1202, group=1, one="unity")
-    # "five, five, five, unity, two, oh, two"
-
-    words = p.number_to_words(123.456, group=1, decimal="mark")
-    # "one two three mark four five six"
-    # TODO: DOCBUG: perl gives commas here as do I
-
-    # LITERAL STYLE ONLY NAMES NUMBERS LESS THAN A CERTAIN THRESHOLD...
-
-    words = p.number_to_words(9, threshold=10)  # "nine"
-    words = p.number_to_words(10, threshold=10)  # "ten"
-    words = p.number_to_words(11, threshold=10)  # "11"
-    words = p.number_to_words(1000, threshold=10)  # "1,000"
-
-    # JOIN WORDS INTO A LIST:
-
-    mylist = p.join(("apple", "banana", "carrot"))
-    # "apple, banana, and carrot"
-
-    mylist = p.join(("apple", "banana"))
-    # "apple and banana"
-
-    mylist = p.join(("apple", "banana", "carrot"), final_sep="")
-    # "apple, banana and carrot"
-
-
-    # REQUIRE "CLASSICAL" PLURALS (EG: "focus"->"foci", "cherub"->"cherubim")
-
-    p.classical()  # USE ALL CLASSICAL PLURALS
-
-    p.classical(all=True)  # USE ALL CLASSICAL PLURALS
-    p.classical(all=False)  # SWITCH OFF CLASSICAL MODE
-
-    p.classical(zero=True)  #  "no error" INSTEAD OF "no errors"
-    p.classical(zero=False)  #  "no errors" INSTEAD OF "no error"
-
-    p.classical(herd=True)  #  "2 buffalo" INSTEAD OF "2 buffalos"
-    p.classical(herd=False)  #  "2 buffalos" INSTEAD OF "2 buffalo"
-
-    p.classical(persons=True)  # "2 chairpersons" INSTEAD OF "2 chairpeople"
-    p.classical(persons=False)  # "2 chairpeople" INSTEAD OF "2 chairpersons"
-
-    p.classical(ancient=True)  # "2 formulae" INSTEAD OF "2 formulas"
-    p.classical(ancient=False)  # "2 formulas" INSTEAD OF "2 formulae"
-
-
-    # INTERPOLATE "plural()", "plural_noun()", "plural_verb()", "plural_adj()", "singular_noun()",
-    # a()", "an()", "num()" AND "ordinal()" WITHIN STRINGS:
-
-    print(p.inflect("The plural of {0} is plural('{0}')".format(word)))
-    print(p.inflect("The singular of {0} is singular_noun('{0}')".format(word)))
-    print(p.inflect("I saw {0} plural('cat',{0})".format(cat_count)))
-    print(
-        p.inflect(
-            "plural('I',{0}) "
-            "plural_verb('saw',{0}) "
-            "plural('a',{1}) "
-            "plural_noun('saw',{1})".format(N1, N2)
-        )
-    )
-    print(
-        p.inflect(
-            "num({0}, False)plural('I') "
-            "plural_verb('saw') "
-            "num({1}, False)plural('a') "
-            "plural_noun('saw')".format(N1, N2)
-        )
-    )
-    print(p.inflect("I saw num({0}) plural('cat')\nnum()".format(cat_count)))
-    print(p.inflect("There plural_verb('was',{0}) no('error',{0})".format(errors)))
-    print(p.inflect("There num({0}, False)plural_verb('was') no('error')".format(errors)))
-    print(p.inflect("Did you want a('{0}') or an('{1}')".format(thing, idea)))
-    print(p.inflect("It was ordinal('{0}') from the left".format(position)))
-
-
-    # ADD USER-DEFINED INFLECTIONS (OVERRIDING INBUILT RULES):
-
-    p.defnoun("VAX", "VAXen")  # SINGULAR => PLURAL
-
-    p.defverb(
-        "will",  # 1ST PERSON SINGULAR
-        "shall",  # 1ST PERSON PLURAL
-        "will",  # 2ND PERSON SINGULAR
-        "will",  # 2ND PERSON PLURAL
-        "will",  # 3RD PERSON SINGULAR
-        "will",  # 3RD PERSON PLURAL
-    )
-
-    p.defadj("hir", "their")  # SINGULAR => PLURAL
-
-    p.defa("h")  # "AY HALWAYS SEZ 'HAITCH'!"
-
-    p.defan("horrendous.*")  # "AN HORRENDOUS AFFECTATION"
-
-
-DESCRIPTION
-===========
-
-The methods of the class ``engine`` in module ``inflect.py`` provide plural
-inflections, singular noun inflections, "a"/"an" selection for English words,
-and manipulation of numbers as words.
-
-Plural forms of all nouns, most verbs, and some adjectives are
-provided. Where appropriate, "classical" variants (for example: "brother" ->
-"brethren", "dogma" -> "dogmata", etc.) are also provided.
-
-Single forms of nouns are also provided. The gender of singular pronouns
-can be chosen (for example "they" -> "it" or "she" or "he" or "they").
-
-Pronunciation-based "a"/"an" selection is provided for all English
-words, and most initialisms.
-
-It is also possible to inflect numerals (1,2,3) to ordinals (1st, 2nd, 3rd)
-and to English words ("one", "two", "three").
-
-In generating these inflections, ``inflect.py`` follows the Oxford
-English Dictionary and the guidelines in Fowler's Modern English
-Usage, preferring the former where the two disagree.
-
-The module is built around standard British spelling, but is designed
-to cope with common American variants as well. Slang, jargon, and
-other English dialects are *not* explicitly catered for.
-
-Where two or more inflected forms exist for a single word (typically a
-"classical" form and a "modern" form), ``inflect.py`` prefers the
-more common form (typically the "modern" one), unless "classical"
-processing has been specified
-(see `MODERN VS CLASSICAL INFLECTIONS`).
-
-FORMING PLURALS AND SINGULARS
-=============================
-
-Inflecting Plurals and Singulars
---------------------------------
-
-All of the ``plural...`` plural inflection methods take the word to be
-inflected as their first argument and return the corresponding inflection.
-Note that all such methods expect the *singular* form of the word. The
-results of passing a plural form are undefined (and unlikely to be correct).
-Similarly, the ``si...`` singular inflection method expects the *plural*
-form of the word.
-
-The ``plural...`` methods also take an optional second argument,
-which indicates the grammatical "number" of the word (or of another word
-with which the word being inflected must agree). If the "number" argument is
-supplied and is not ``1`` (or ``"one"`` or ``"a"``, or some other adjective that
-implies the singular), the plural form of the word is returned. If the
-"number" argument *does* indicate singularity, the (uninflected) word
-itself is returned. If the number argument is omitted, the plural form
-is returned unconditionally.
-
-The ``si...`` method takes a second argument in a similar fashion. If it is
-some form of the number ``1``, or is omitted, the singular form is returned.
-Otherwise the plural is returned unaltered.
-
-
-The various methods of ``inflect.engine`` are:
-
-
-
-``plural_noun(word, count=None)``
-
- The method ``plural_noun()`` takes a *singular* English noun or
- pronoun and returns its plural. Pronouns in the nominative ("I" ->
- "we") and accusative ("me" -> "us") cases are handled, as are
- possessive pronouns ("mine" -> "ours").
-
-
-``plural_verb(word, count=None)``
-
- The method ``plural_verb()`` takes the *singular* form of a
- conjugated verb (that is, one which is already in the correct "person"
- and "mood") and returns the corresponding plural conjugation.
-
-
-``plural_adj(word, count=None)``
-
- The method ``plural_adj()`` takes the *singular* form of
- certain types of adjectives and returns the corresponding plural form.
- Adjectives that are correctly handled include: "numerical" adjectives
- ("a" -> "some"), demonstrative adjectives ("this" -> "these", "that" ->
- "those"), and possessives ("my" -> "our", "cat's" -> "cats'", "child's"
- -> "childrens'", etc.)
-
-
-``plural(word, count=None)``
-
- The method ``plural()`` takes a *singular* English noun,
- pronoun, verb, or adjective and returns its plural form. Where a word
- has more than one inflection depending on its part of speech (for
- example, the noun "thought" inflects to "thoughts", the verb "thought"
- to "thought"), the (singular) noun sense is preferred to the (singular)
- verb sense.
-
- Hence ``plural("knife")`` will return "knives" ("knife" having been treated
- as a singular noun), whereas ``plural("knifes")`` will return "knife"
- ("knifes" having been treated as a 3rd person singular verb).
-
- The inherent ambiguity of such cases suggests that,
- where the part of speech is known, ``plural_noun``, ``plural_verb``, and
- ``plural_adj`` should be used in preference to ``plural``.
-
-
-``singular_noun(word, count=None)``
-
- The method ``singular_noun()`` takes a *plural* English noun or
- pronoun and returns its singular. Pronouns in the nominative ("we" ->
- "I") and accusative ("us" -> "me") cases are handled, as are
- possessive pronouns ("ours" -> "mine"). When third person
- singular pronouns are returned they take the neuter gender by default
- ("they" -> "it"), not ("they"-> "she") nor ("they" -> "he"). This can be
- changed with ``gender()``.
-
-Note that all these methods ignore any whitespace surrounding the
-word being inflected, but preserve that whitespace when the result is
-returned. For example, ``plural(" cat  ")`` returns " cats  ".
-
-
-``gender(genderletter)``
-
- The third person plural pronoun takes the same form for the female, male and
- neuter (e.g. "they"). The singular however, depends upon gender (e.g. "she",
- "he", "it" and "they" -- "they" being the gender neutral form.) By default
- ``singular_noun`` returns the neuter form, however, the gender can be selected with
- the ``gender`` method. Pass the first letter of the gender to
- ``gender`` to return the f(eminine), m(asculine), n(euter) or t(hey)
- form of the singular. e.g.
- gender('f') followed by singular_noun('themselves') returns 'herself'.
-
-Numbered plurals
-----------------
-
-The ``plural...`` methods return only the inflected word, not the count that
-was used to inflect it. Thus, in order to produce "I saw 3 ducks", it
-is necessary to use:
-
-.. code-block:: python
-
-    print("I saw", N, p.plural_noun(animal, N))
-
-Since the usual purpose of producing a plural is to make it agree with
-a preceding count, inflect.py provides a method
-(``no(word, count)``) which, given a word and a(n optional) count, returns the
-count followed by the correctly inflected word. Hence the previous
-example can be rewritten:
-
-.. code-block:: python
-
-    print("I saw ", p.no(animal, N))
-
-In addition, if the count is zero (or some other term which implies
-zero, such as ``"zero"``, ``"nil"``, etc.) the count is replaced by the
-word "no". Hence, if ``N`` had the value zero, the previous example
-would print (the somewhat more elegant)::
-
-    I saw no animals
-
-rather than::
-
-    I saw 0 animals
-
-Note that the name of the method is a pun: the method
-returns either a number (a *No.*) or a ``"no"``, in front of the
-inflected word.
-
-
-Reducing the number of counts required
---------------------------------------
-
-In some contexts, the need to supply an explicit count to the various
-``plural...`` methods makes for tiresome repetition. For example:
-
-.. code-block:: python
-
-    print(
-        plural_adj("This", errors),
-        plural_noun(" error", errors),
-        plural_verb(" was", errors),
-        " fatal.",
-    )
-
-inflect.py therefore provides a method
-(``num(count=None, show=None)``) which may be used to set a persistent "default number"
-value. If such a value is set, it is subsequently used whenever an
-optional second "number" argument is omitted. The default value thus set
-can subsequently be removed by calling ``num()`` with no arguments.
-Hence we could rewrite the previous example:
-
-.. code-block:: python
-
-    p.num(errors)
-    print(p.plural_adj("This"), p.plural_noun(" error"), p.plural_verb(" was"), "fatal.")
-    p.num()
-
-Normally, ``num()`` returns its first argument, so that it may also
-be "inlined" in contexts like:
-
-.. code-block:: python
-
-    print(p.num(errors), p.plural_noun(" error"), p.plural_verb(" was"), " detected.")
-    if severity > 1:
-        print(
-            p.plural_adj("This"), p.plural_noun(" error"), p.plural_verb(" was"), "fatal."
-        )
-
-However, in certain contexts (see `INTERPOLATING INFLECTIONS IN STRINGS`)
-it is preferable that ``num()`` return an empty string. Hence ``num()``
-provides an optional second argument. If that argument is supplied (that is, if
-it is defined) and evaluates to false, ``num`` returns an empty string
-instead of its first argument. For example:
-
-.. code-block:: python
-
-    print(p.num(errors, 0), p.no("error"), p.plural_verb(" was"), " detected.")
-    if severity > 1:
-        print(
-            p.plural_adj("This"), p.plural_noun(" error"), p.plural_verb(" was"), "fatal."
-        )
-
-
-
-Number-insensitive equality
----------------------------
-
-inflect.py also provides a solution to the problem
-of comparing words of differing plurality through the methods
-``compare(word1, word2)``, ``compare_nouns(word1, word2)``,
-``compare_verbs(word1, word2)``, and ``compare_adjs(word1, word2)``.
-Each  of these methods takes two strings, and  compares them
-using the corresponding plural-inflection method (``plural()``, ``plural_noun()``,
-``plural_verb()``, and ``plural_adj()`` respectively).
-
-The comparison returns true if:
-
-- the strings are equal, or
-- one string is equal to a plural form of the other, or
-- the strings are two different plural forms of the one word.
-
-
-Hence all of the following return true:
-
-.. code-block:: python
-
-    p.compare("index", "index")  # RETURNS "eq"
-    p.compare("index", "indexes")  # RETURNS "s:p"
-    p.compare("index", "indices")  # RETURNS "s:p"
-    p.compare("indexes", "index")  # RETURNS "p:s"
-    p.compare("indices", "index")  # RETURNS "p:s"
-    p.compare("indices", "indexes")  # RETURNS "p:p"
-    p.compare("indexes", "indices")  # RETURNS "p:p"
-    p.compare("indices", "indices")  # RETURNS "eq"
-
-As indicated by the comments in the previous example, the actual value
-returned by the various ``compare`` methods encodes which of the
-three equality rules succeeded: "eq" is returned if the strings were
-identical, "s:p" if the strings were singular and plural respectively,
-"p:s" for plural and singular, and "p:p" for two distinct plurals.
-Inequality is indicated by returning an empty string.
-
-It should be noted that two distinct singular words which happen to take
-the same plural form are *not* considered equal, nor are cases where
-one (singular) word's plural is the other (plural) word's singular.
-Hence all of the following return false:
-
-.. code-block:: python
-
-    p.compare("base", "basis")  # ALTHOUGH BOTH -> "bases"
-    p.compare("syrinx", "syringe")  # ALTHOUGH BOTH -> "syringes"
-    p.compare("she", "he")  # ALTHOUGH BOTH -> "they"
-
-    p.compare("opus", "operas")  # ALTHOUGH "opus" -> "opera" -> "operas"
-    p.compare("taxi", "taxes")  # ALTHOUGH "taxi" -> "taxis" -> "taxes"
-
-Note too that, although the comparison is "number-insensitive" it is *not*
-case-insensitive (that is, ``plural("time","Times")`` returns false. To obtain
-both number and case insensitivity, use the ``lower()`` method on both strings
-(that is, ``plural("time".lower(), "Times".lower())`` returns true).
-
-Related Functionality
-=====================
-
-Shout out to these libraries that provide related functionality:
-
-* `WordSet `_
-  parses identifiers like variable names into sets of words suitable for re-assembling
-  in another form.
-
-* `word2number `_ converts words to
-  a number.
-
-
-For Enterprise
-==============
-
-Available as part of the Tidelift Subscription.
-
-This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use.
-
-`Learn more `_.
diff --git a/pkg_resources/_vendor/inflect-7.3.1.dist-info/RECORD b/pkg_resources/_vendor/inflect-7.3.1.dist-info/RECORD
deleted file mode 100644
index 73ff576be5..0000000000
--- a/pkg_resources/_vendor/inflect-7.3.1.dist-info/RECORD
+++ /dev/null
@@ -1,13 +0,0 @@
-inflect-7.3.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-inflect-7.3.1.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
-inflect-7.3.1.dist-info/METADATA,sha256=ZgMNY0WAZRs-U8wZiV2SMfjSKqBrMngXyDMs_CAwMwg,21079
-inflect-7.3.1.dist-info/RECORD,,
-inflect-7.3.1.dist-info/WHEEL,sha256=y4mX-SOX4fYIkonsAGA5N0Oy-8_gI4FXw5HNI1xqvWg,91
-inflect-7.3.1.dist-info/top_level.txt,sha256=m52ujdp10CqT6jh1XQxZT6kEntcnv-7Tl7UiGNTzWZA,8
-inflect/__init__.py,sha256=Jxy1HJXZiZ85kHeLAhkmvz6EMTdFqBe-duvt34R6IOc,103796
-inflect/__pycache__/__init__.cpython-312.pyc,,
-inflect/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-inflect/compat/__pycache__/__init__.cpython-312.pyc,,
-inflect/compat/__pycache__/py38.cpython-312.pyc,,
-inflect/compat/py38.py,sha256=oObVfVnWX9_OpnOuEJn1mFbJxVhwyR5epbiTNXDDaso,160
-inflect/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
diff --git a/pkg_resources/_vendor/inflect-7.3.1.dist-info/WHEEL b/pkg_resources/_vendor/inflect-7.3.1.dist-info/WHEEL
deleted file mode 100644
index 564c6724e4..0000000000
--- a/pkg_resources/_vendor/inflect-7.3.1.dist-info/WHEEL
+++ /dev/null
@@ -1,5 +0,0 @@
-Wheel-Version: 1.0
-Generator: setuptools (70.2.0)
-Root-Is-Purelib: true
-Tag: py3-none-any
-
diff --git a/pkg_resources/_vendor/inflect-7.3.1.dist-info/top_level.txt b/pkg_resources/_vendor/inflect-7.3.1.dist-info/top_level.txt
deleted file mode 100644
index 0fd75fab3e..0000000000
--- a/pkg_resources/_vendor/inflect-7.3.1.dist-info/top_level.txt
+++ /dev/null
@@ -1 +0,0 @@
-inflect
diff --git a/pkg_resources/_vendor/inflect/__init__.py b/pkg_resources/_vendor/inflect/__init__.py
deleted file mode 100644
index 3eec27f4c6..0000000000
--- a/pkg_resources/_vendor/inflect/__init__.py
+++ /dev/null
@@ -1,3986 +0,0 @@
-"""
-inflect: english language inflection
- - correctly generate plurals, ordinals, indefinite articles
- - convert numbers to words
-
-Copyright (C) 2010 Paul Dyson
-
-Based upon the Perl module
-`Lingua::EN::Inflect `_.
-
-methods:
-    classical inflect
-    plural plural_noun plural_verb plural_adj singular_noun no num a an
-    compare compare_nouns compare_verbs compare_adjs
-    present_participle
-    ordinal
-    number_to_words
-    join
-    defnoun defverb defadj defa defan
-
-INFLECTIONS:
-    classical inflect
-    plural plural_noun plural_verb plural_adj singular_noun compare
-    no num a an present_participle
-
-PLURALS:
-    classical inflect
-    plural plural_noun plural_verb plural_adj singular_noun no num
-    compare compare_nouns compare_verbs compare_adjs
-
-COMPARISONS:
-    classical
-    compare compare_nouns compare_verbs compare_adjs
-
-ARTICLES:
-    classical inflect num a an
-
-NUMERICAL:
-    ordinal number_to_words
-
-USER_DEFINED:
-    defnoun defverb defadj defa defan
-
-Exceptions:
- UnknownClassicalModeError
- BadNumValueError
- BadChunkingOptionError
- NumOutOfRangeError
- BadUserDefinedPatternError
- BadRcFileError
- BadGenderError
-
-"""
-
-from __future__ import annotations
-
-import ast
-import collections
-import contextlib
-import functools
-import itertools
-import re
-from numbers import Number
-from typing import (
-    TYPE_CHECKING,
-    Any,
-    Callable,
-    Dict,
-    Iterable,
-    List,
-    Literal,
-    Match,
-    Optional,
-    Sequence,
-    Tuple,
-    Union,
-    cast,
-)
-
-from more_itertools import windowed_complete
-from typeguard import typechecked
-
-from .compat.py38 import Annotated
-
-
-class UnknownClassicalModeError(Exception):
-    pass
-
-
-class BadNumValueError(Exception):
-    pass
-
-
-class BadChunkingOptionError(Exception):
-    pass
-
-
-class NumOutOfRangeError(Exception):
-    pass
-
-
-class BadUserDefinedPatternError(Exception):
-    pass
-
-
-class BadRcFileError(Exception):
-    pass
-
-
-class BadGenderError(Exception):
-    pass
-
-
-def enclose(s: str) -> str:
-    return f"(?:{s})"
-
-
-def joinstem(cutpoint: Optional[int] = 0, words: Optional[Iterable[str]] = None) -> str:
-    """
-    Join stem of each word in words into a string for regex.
-
-    Each word is truncated at cutpoint.
-
-    Cutpoint is usually negative indicating the number of letters to remove
-    from the end of each word.
-
-    >>> joinstem(-2, ["ephemeris", "iris", ".*itis"])
-    '(?:ephemer|ir|.*it)'
-
-    >>> joinstem(None, ["ephemeris"])
-    '(?:ephemeris)'
-
-    >>> joinstem(5, None)
-    '(?:)'
-    """
-    return enclose("|".join(w[:cutpoint] for w in words or []))
-
-
-def bysize(words: Iterable[str]) -> Dict[int, set]:
-    """
-    From a list of words, return a dict of sets sorted by word length.
-
-    >>> words = ['ant', 'cat', 'dog', 'pig', 'frog', 'goat', 'horse', 'elephant']
-    >>> ret = bysize(words)
-    >>> sorted(ret[3])
-    ['ant', 'cat', 'dog', 'pig']
-    >>> ret[5]
-    {'horse'}
-    """
-    res: Dict[int, set] = collections.defaultdict(set)
-    for w in words:
-        res[len(w)].add(w)
-    return res
-
-
-def make_pl_si_lists(
-    lst: Iterable[str],
-    plending: str,
-    siendingsize: Optional[int],
-    dojoinstem: bool = True,
-):
-    """
-    given a list of singular words: lst
-
-    an ending to append to make the plural: plending
-
-    the number of characters to remove from the singular
-    before appending plending: siendingsize
-
-    a flag whether to create a joinstem: dojoinstem
-
-    return:
-    a list of pluralised words: si_list (called si because this is what you need to
-    look for to make the singular)
-
-    the pluralised words as a dict of sets sorted by word length: si_bysize
-    the singular words as a dict of sets sorted by word length: pl_bysize
-    if dojoinstem is True: a regular expression that matches any of the stems: stem
-    """
-    if siendingsize is not None:
-        siendingsize = -siendingsize
-    si_list = [w[:siendingsize] + plending for w in lst]
-    pl_bysize = bysize(lst)
-    si_bysize = bysize(si_list)
-    if dojoinstem:
-        stem = joinstem(siendingsize, lst)
-        return si_list, si_bysize, pl_bysize, stem
-    else:
-        return si_list, si_bysize, pl_bysize
-
-
-# 1. PLURALS
-
-pl_sb_irregular_s = {
-    "corpus": "corpuses|corpora",
-    "opus": "opuses|opera",
-    "genus": "genera",
-    "mythos": "mythoi",
-    "penis": "penises|penes",
-    "testis": "testes",
-    "atlas": "atlases|atlantes",
-    "yes": "yeses",
-}
-
-pl_sb_irregular = {
-    "child": "children",
-    "chili": "chilis|chilies",
-    "brother": "brothers|brethren",
-    "infinity": "infinities|infinity",
-    "loaf": "loaves",
-    "lore": "lores|lore",
-    "hoof": "hoofs|hooves",
-    "beef": "beefs|beeves",
-    "thief": "thiefs|thieves",
-    "money": "monies",
-    "mongoose": "mongooses",
-    "ox": "oxen",
-    "cow": "cows|kine",
-    "graffito": "graffiti",
-    "octopus": "octopuses|octopodes",
-    "genie": "genies|genii",
-    "ganglion": "ganglions|ganglia",
-    "trilby": "trilbys",
-    "turf": "turfs|turves",
-    "numen": "numina",
-    "atman": "atmas",
-    "occiput": "occiputs|occipita",
-    "sabretooth": "sabretooths",
-    "sabertooth": "sabertooths",
-    "lowlife": "lowlifes",
-    "flatfoot": "flatfoots",
-    "tenderfoot": "tenderfoots",
-    "romany": "romanies",
-    "jerry": "jerries",
-    "mary": "maries",
-    "talouse": "talouses",
-    "rom": "roma",
-    "carmen": "carmina",
-}
-
-pl_sb_irregular.update(pl_sb_irregular_s)
-# pl_sb_irregular_keys = enclose('|'.join(pl_sb_irregular.keys()))
-
-pl_sb_irregular_caps = {
-    "Romany": "Romanies",
-    "Jerry": "Jerrys",
-    "Mary": "Marys",
-    "Rom": "Roma",
-}
-
-pl_sb_irregular_compound = {"prima donna": "prima donnas|prime donne"}
-
-si_sb_irregular = {v: k for (k, v) in pl_sb_irregular.items()}
-for k in list(si_sb_irregular):
-    if "|" in k:
-        k1, k2 = k.split("|")
-        si_sb_irregular[k1] = si_sb_irregular[k2] = si_sb_irregular[k]
-        del si_sb_irregular[k]
-si_sb_irregular_caps = {v: k for (k, v) in pl_sb_irregular_caps.items()}
-si_sb_irregular_compound = {v: k for (k, v) in pl_sb_irregular_compound.items()}
-for k in list(si_sb_irregular_compound):
-    if "|" in k:
-        k1, k2 = k.split("|")
-        si_sb_irregular_compound[k1] = si_sb_irregular_compound[k2] = (
-            si_sb_irregular_compound[k]
-        )
-        del si_sb_irregular_compound[k]
-
-# si_sb_irregular_keys = enclose('|'.join(si_sb_irregular.keys()))
-
-# Z's that don't double
-
-pl_sb_z_zes_list = ("quartz", "topaz")
-pl_sb_z_zes_bysize = bysize(pl_sb_z_zes_list)
-
-pl_sb_ze_zes_list = ("snooze",)
-pl_sb_ze_zes_bysize = bysize(pl_sb_ze_zes_list)
-
-
-# CLASSICAL "..is" -> "..ides"
-
-pl_sb_C_is_ides_complete = [
-    # GENERAL WORDS...
-    "ephemeris",
-    "iris",
-    "clitoris",
-    "chrysalis",
-    "epididymis",
-]
-
-pl_sb_C_is_ides_endings = [
-    # INFLAMATIONS...
-    "itis"
-]
-
-pl_sb_C_is_ides = joinstem(
-    -2, pl_sb_C_is_ides_complete + [f".*{w}" for w in pl_sb_C_is_ides_endings]
-)
-
-pl_sb_C_is_ides_list = pl_sb_C_is_ides_complete + pl_sb_C_is_ides_endings
-
-(
-    si_sb_C_is_ides_list,
-    si_sb_C_is_ides_bysize,
-    pl_sb_C_is_ides_bysize,
-) = make_pl_si_lists(pl_sb_C_is_ides_list, "ides", 2, dojoinstem=False)
-
-
-# CLASSICAL "..a" -> "..ata"
-
-pl_sb_C_a_ata_list = (
-    "anathema",
-    "bema",
-    "carcinoma",
-    "charisma",
-    "diploma",
-    "dogma",
-    "drama",
-    "edema",
-    "enema",
-    "enigma",
-    "lemma",
-    "lymphoma",
-    "magma",
-    "melisma",
-    "miasma",
-    "oedema",
-    "sarcoma",
-    "schema",
-    "soma",
-    "stigma",
-    "stoma",
-    "trauma",
-    "gumma",
-    "pragma",
-)
-
-(
-    si_sb_C_a_ata_list,
-    si_sb_C_a_ata_bysize,
-    pl_sb_C_a_ata_bysize,
-    pl_sb_C_a_ata,
-) = make_pl_si_lists(pl_sb_C_a_ata_list, "ata", 1)
-
-# UNCONDITIONAL "..a" -> "..ae"
-
-pl_sb_U_a_ae_list = (
-    "alumna",
-    "alga",
-    "vertebra",
-    "persona",
-    "vita",
-)
-(
-    si_sb_U_a_ae_list,
-    si_sb_U_a_ae_bysize,
-    pl_sb_U_a_ae_bysize,
-    pl_sb_U_a_ae,
-) = make_pl_si_lists(pl_sb_U_a_ae_list, "e", None)
-
-# CLASSICAL "..a" -> "..ae"
-
-pl_sb_C_a_ae_list = (
-    "amoeba",
-    "antenna",
-    "formula",
-    "hyperbola",
-    "medusa",
-    "nebula",
-    "parabola",
-    "abscissa",
-    "hydra",
-    "nova",
-    "lacuna",
-    "aurora",
-    "umbra",
-    "flora",
-    "fauna",
-)
-(
-    si_sb_C_a_ae_list,
-    si_sb_C_a_ae_bysize,
-    pl_sb_C_a_ae_bysize,
-    pl_sb_C_a_ae,
-) = make_pl_si_lists(pl_sb_C_a_ae_list, "e", None)
-
-
-# CLASSICAL "..en" -> "..ina"
-
-pl_sb_C_en_ina_list = ("stamen", "foramen", "lumen")
-
-(
-    si_sb_C_en_ina_list,
-    si_sb_C_en_ina_bysize,
-    pl_sb_C_en_ina_bysize,
-    pl_sb_C_en_ina,
-) = make_pl_si_lists(pl_sb_C_en_ina_list, "ina", 2)
-
-
-# UNCONDITIONAL "..um" -> "..a"
-
-pl_sb_U_um_a_list = (
-    "bacterium",
-    "agendum",
-    "desideratum",
-    "erratum",
-    "stratum",
-    "datum",
-    "ovum",
-    "extremum",
-    "candelabrum",
-)
-(
-    si_sb_U_um_a_list,
-    si_sb_U_um_a_bysize,
-    pl_sb_U_um_a_bysize,
-    pl_sb_U_um_a,
-) = make_pl_si_lists(pl_sb_U_um_a_list, "a", 2)
-
-# CLASSICAL "..um" -> "..a"
-
-pl_sb_C_um_a_list = (
-    "maximum",
-    "minimum",
-    "momentum",
-    "optimum",
-    "quantum",
-    "cranium",
-    "curriculum",
-    "dictum",
-    "phylum",
-    "aquarium",
-    "compendium",
-    "emporium",
-    "encomium",
-    "gymnasium",
-    "honorarium",
-    "interregnum",
-    "lustrum",
-    "memorandum",
-    "millennium",
-    "rostrum",
-    "spectrum",
-    "speculum",
-    "stadium",
-    "trapezium",
-    "ultimatum",
-    "medium",
-    "vacuum",
-    "velum",
-    "consortium",
-    "arboretum",
-)
-
-(
-    si_sb_C_um_a_list,
-    si_sb_C_um_a_bysize,
-    pl_sb_C_um_a_bysize,
-    pl_sb_C_um_a,
-) = make_pl_si_lists(pl_sb_C_um_a_list, "a", 2)
-
-
-# UNCONDITIONAL "..us" -> "i"
-
-pl_sb_U_us_i_list = (
-    "alumnus",
-    "alveolus",
-    "bacillus",
-    "bronchus",
-    "locus",
-    "nucleus",
-    "stimulus",
-    "meniscus",
-    "sarcophagus",
-)
-(
-    si_sb_U_us_i_list,
-    si_sb_U_us_i_bysize,
-    pl_sb_U_us_i_bysize,
-    pl_sb_U_us_i,
-) = make_pl_si_lists(pl_sb_U_us_i_list, "i", 2)
-
-# CLASSICAL "..us" -> "..i"
-
-pl_sb_C_us_i_list = (
-    "focus",
-    "radius",
-    "genius",
-    "incubus",
-    "succubus",
-    "nimbus",
-    "fungus",
-    "nucleolus",
-    "stylus",
-    "torus",
-    "umbilicus",
-    "uterus",
-    "hippopotamus",
-    "cactus",
-)
-
-(
-    si_sb_C_us_i_list,
-    si_sb_C_us_i_bysize,
-    pl_sb_C_us_i_bysize,
-    pl_sb_C_us_i,
-) = make_pl_si_lists(pl_sb_C_us_i_list, "i", 2)
-
-
-# CLASSICAL "..us" -> "..us"  (ASSIMILATED 4TH DECLENSION LATIN NOUNS)
-
-pl_sb_C_us_us = (
-    "status",
-    "apparatus",
-    "prospectus",
-    "sinus",
-    "hiatus",
-    "impetus",
-    "plexus",
-)
-pl_sb_C_us_us_bysize = bysize(pl_sb_C_us_us)
-
-# UNCONDITIONAL "..on" -> "a"
-
-pl_sb_U_on_a_list = (
-    "criterion",
-    "perihelion",
-    "aphelion",
-    "phenomenon",
-    "prolegomenon",
-    "noumenon",
-    "organon",
-    "asyndeton",
-    "hyperbaton",
-)
-(
-    si_sb_U_on_a_list,
-    si_sb_U_on_a_bysize,
-    pl_sb_U_on_a_bysize,
-    pl_sb_U_on_a,
-) = make_pl_si_lists(pl_sb_U_on_a_list, "a", 2)
-
-# CLASSICAL "..on" -> "..a"
-
-pl_sb_C_on_a_list = ("oxymoron",)
-
-(
-    si_sb_C_on_a_list,
-    si_sb_C_on_a_bysize,
-    pl_sb_C_on_a_bysize,
-    pl_sb_C_on_a,
-) = make_pl_si_lists(pl_sb_C_on_a_list, "a", 2)
-
-
-# CLASSICAL "..o" -> "..i"  (BUT NORMALLY -> "..os")
-
-pl_sb_C_o_i = [
-    "solo",
-    "soprano",
-    "basso",
-    "alto",
-    "contralto",
-    "tempo",
-    "piano",
-    "virtuoso",
-]  # list not tuple so can concat for pl_sb_U_o_os
-
-pl_sb_C_o_i_bysize = bysize(pl_sb_C_o_i)
-si_sb_C_o_i_bysize = bysize([f"{w[:-1]}i" for w in pl_sb_C_o_i])
-
-pl_sb_C_o_i_stems = joinstem(-1, pl_sb_C_o_i)
-
-# ALWAYS "..o" -> "..os"
-
-pl_sb_U_o_os_complete = {"ado", "ISO", "NATO", "NCO", "NGO", "oto"}
-si_sb_U_o_os_complete = {f"{w}s" for w in pl_sb_U_o_os_complete}
-
-
-pl_sb_U_o_os_endings = [
-    "aficionado",
-    "aggro",
-    "albino",
-    "allegro",
-    "ammo",
-    "Antananarivo",
-    "archipelago",
-    "armadillo",
-    "auto",
-    "avocado",
-    "Bamako",
-    "Barquisimeto",
-    "bimbo",
-    "bingo",
-    "Biro",
-    "bolero",
-    "Bolzano",
-    "bongo",
-    "Boto",
-    "burro",
-    "Cairo",
-    "canto",
-    "cappuccino",
-    "casino",
-    "cello",
-    "Chicago",
-    "Chimango",
-    "cilantro",
-    "cochito",
-    "coco",
-    "Colombo",
-    "Colorado",
-    "commando",
-    "concertino",
-    "contango",
-    "credo",
-    "crescendo",
-    "cyano",
-    "demo",
-    "ditto",
-    "Draco",
-    "dynamo",
-    "embryo",
-    "Esperanto",
-    "espresso",
-    "euro",
-    "falsetto",
-    "Faro",
-    "fiasco",
-    "Filipino",
-    "flamenco",
-    "furioso",
-    "generalissimo",
-    "Gestapo",
-    "ghetto",
-    "gigolo",
-    "gizmo",
-    "Greensboro",
-    "gringo",
-    "Guaiabero",
-    "guano",
-    "gumbo",
-    "gyro",
-    "hairdo",
-    "hippo",
-    "Idaho",
-    "impetigo",
-    "inferno",
-    "info",
-    "intermezzo",
-    "intertrigo",
-    "Iquico",
-    "jumbo",
-    "junto",
-    "Kakapo",
-    "kilo",
-    "Kinkimavo",
-    "Kokako",
-    "Kosovo",
-    "Lesotho",
-    "libero",
-    "libido",
-    "libretto",
-    "lido",
-    "Lilo",
-    "limbo",
-    "limo",
-    "lineno",
-    "lingo",
-    "lino",
-    "livedo",
-    "loco",
-    "logo",
-    "lumbago",
-    "macho",
-    "macro",
-    "mafioso",
-    "magneto",
-    "magnifico",
-    "Majuro",
-    "Malabo",
-    "manifesto",
-    "Maputo",
-    "Maracaibo",
-    "medico",
-    "memo",
-    "metro",
-    "Mexico",
-    "micro",
-    "Milano",
-    "Monaco",
-    "mono",
-    "Montenegro",
-    "Morocco",
-    "Muqdisho",
-    "myo",
-    "neutrino",
-    "Ningbo",
-    "octavo",
-    "oregano",
-    "Orinoco",
-    "Orlando",
-    "Oslo",
-    "panto",
-    "Paramaribo",
-    "Pardusco",
-    "pedalo",
-    "photo",
-    "pimento",
-    "pinto",
-    "pleco",
-    "Pluto",
-    "pogo",
-    "polo",
-    "poncho",
-    "Porto-Novo",
-    "Porto",
-    "pro",
-    "psycho",
-    "pueblo",
-    "quarto",
-    "Quito",
-    "repo",
-    "rhino",
-    "risotto",
-    "rococo",
-    "rondo",
-    "Sacramento",
-    "saddo",
-    "sago",
-    "salvo",
-    "Santiago",
-    "Sapporo",
-    "Sarajevo",
-    "scherzando",
-    "scherzo",
-    "silo",
-    "sirocco",
-    "sombrero",
-    "staccato",
-    "sterno",
-    "stucco",
-    "stylo",
-    "sumo",
-    "Taiko",
-    "techno",
-    "terrazzo",
-    "testudo",
-    "timpano",
-    "tiro",
-    "tobacco",
-    "Togo",
-    "Tokyo",
-    "torero",
-    "Torino",
-    "Toronto",
-    "torso",
-    "tremolo",
-    "typo",
-    "tyro",
-    "ufo",
-    "UNESCO",
-    "vaquero",
-    "vermicello",
-    "verso",
-    "vibrato",
-    "violoncello",
-    "Virgo",
-    "weirdo",
-    "WHO",
-    "WTO",
-    "Yamoussoukro",
-    "yo-yo",
-    "zero",
-    "Zibo",
-] + pl_sb_C_o_i
-
-pl_sb_U_o_os_bysize = bysize(pl_sb_U_o_os_endings)
-si_sb_U_o_os_bysize = bysize([f"{w}s" for w in pl_sb_U_o_os_endings])
-
-
-# UNCONDITIONAL "..ch" -> "..chs"
-
-pl_sb_U_ch_chs_list = ("czech", "eunuch", "stomach")
-
-(
-    si_sb_U_ch_chs_list,
-    si_sb_U_ch_chs_bysize,
-    pl_sb_U_ch_chs_bysize,
-    pl_sb_U_ch_chs,
-) = make_pl_si_lists(pl_sb_U_ch_chs_list, "s", None)
-
-
-# UNCONDITIONAL "..[ei]x" -> "..ices"
-
-pl_sb_U_ex_ices_list = ("codex", "murex", "silex")
-(
-    si_sb_U_ex_ices_list,
-    si_sb_U_ex_ices_bysize,
-    pl_sb_U_ex_ices_bysize,
-    pl_sb_U_ex_ices,
-) = make_pl_si_lists(pl_sb_U_ex_ices_list, "ices", 2)
-
-pl_sb_U_ix_ices_list = ("radix", "helix")
-(
-    si_sb_U_ix_ices_list,
-    si_sb_U_ix_ices_bysize,
-    pl_sb_U_ix_ices_bysize,
-    pl_sb_U_ix_ices,
-) = make_pl_si_lists(pl_sb_U_ix_ices_list, "ices", 2)
-
-# CLASSICAL "..[ei]x" -> "..ices"
-
-pl_sb_C_ex_ices_list = (
-    "vortex",
-    "vertex",
-    "cortex",
-    "latex",
-    "pontifex",
-    "apex",
-    "index",
-    "simplex",
-)
-
-(
-    si_sb_C_ex_ices_list,
-    si_sb_C_ex_ices_bysize,
-    pl_sb_C_ex_ices_bysize,
-    pl_sb_C_ex_ices,
-) = make_pl_si_lists(pl_sb_C_ex_ices_list, "ices", 2)
-
-
-pl_sb_C_ix_ices_list = ("appendix",)
-
-(
-    si_sb_C_ix_ices_list,
-    si_sb_C_ix_ices_bysize,
-    pl_sb_C_ix_ices_bysize,
-    pl_sb_C_ix_ices,
-) = make_pl_si_lists(pl_sb_C_ix_ices_list, "ices", 2)
-
-
-# ARABIC: ".." -> "..i"
-
-pl_sb_C_i_list = ("afrit", "afreet", "efreet")
-
-(si_sb_C_i_list, si_sb_C_i_bysize, pl_sb_C_i_bysize, pl_sb_C_i) = make_pl_si_lists(
-    pl_sb_C_i_list, "i", None
-)
-
-
-# HEBREW: ".." -> "..im"
-
-pl_sb_C_im_list = ("goy", "seraph", "cherub")
-
-(si_sb_C_im_list, si_sb_C_im_bysize, pl_sb_C_im_bysize, pl_sb_C_im) = make_pl_si_lists(
-    pl_sb_C_im_list, "im", None
-)
-
-
-# UNCONDITIONAL "..man" -> "..mans"
-
-pl_sb_U_man_mans_list = """
-    ataman caiman cayman ceriman
-    desman dolman farman harman hetman
-    human leman ottoman shaman talisman
-""".split()
-pl_sb_U_man_mans_caps_list = """
-    Alabaman Bahaman Burman German
-    Hiroshiman Liman Nakayaman Norman Oklahoman
-    Panaman Roman Selman Sonaman Tacoman Yakiman
-    Yokohaman Yuman
-""".split()
-
-(
-    si_sb_U_man_mans_list,
-    si_sb_U_man_mans_bysize,
-    pl_sb_U_man_mans_bysize,
-) = make_pl_si_lists(pl_sb_U_man_mans_list, "s", None, dojoinstem=False)
-(
-    si_sb_U_man_mans_caps_list,
-    si_sb_U_man_mans_caps_bysize,
-    pl_sb_U_man_mans_caps_bysize,
-) = make_pl_si_lists(pl_sb_U_man_mans_caps_list, "s", None, dojoinstem=False)
-
-# UNCONDITIONAL "..louse" -> "..lice"
-pl_sb_U_louse_lice_list = ("booklouse", "grapelouse", "louse", "woodlouse")
-
-(
-    si_sb_U_louse_lice_list,
-    si_sb_U_louse_lice_bysize,
-    pl_sb_U_louse_lice_bysize,
-) = make_pl_si_lists(pl_sb_U_louse_lice_list, "lice", 5, dojoinstem=False)
-
-pl_sb_uninflected_s_complete = [
-    # PAIRS OR GROUPS SUBSUMED TO A SINGULAR...
-    "breeches",
-    "britches",
-    "pajamas",
-    "pyjamas",
-    "clippers",
-    "gallows",
-    "hijinks",
-    "headquarters",
-    "pliers",
-    "scissors",
-    "testes",
-    "herpes",
-    "pincers",
-    "shears",
-    "proceedings",
-    "trousers",
-    # UNASSIMILATED LATIN 4th DECLENSION
-    "cantus",
-    "coitus",
-    "nexus",
-    # RECENT IMPORTS...
-    "contretemps",
-    "corps",
-    "debris",
-    "siemens",
-    # DISEASES
-    "mumps",
-    # MISCELLANEOUS OTHERS...
-    "diabetes",
-    "jackanapes",
-    "series",
-    "species",
-    "subspecies",
-    "rabies",
-    "chassis",
-    "innings",
-    "news",
-    "mews",
-    "haggis",
-]
-
-pl_sb_uninflected_s_endings = [
-    # RECENT IMPORTS...
-    "ois",
-    # DISEASES
-    "measles",
-]
-
-pl_sb_uninflected_s = pl_sb_uninflected_s_complete + [
-    f".*{w}" for w in pl_sb_uninflected_s_endings
-]
-
-pl_sb_uninflected_herd = (
-    # DON'T INFLECT IN CLASSICAL MODE, OTHERWISE NORMAL INFLECTION
-    "wildebeest",
-    "swine",
-    "eland",
-    "bison",
-    "buffalo",
-    "cattle",
-    "elk",
-    "rhinoceros",
-    "zucchini",
-    "caribou",
-    "dace",
-    "grouse",
-    "guinea fowl",
-    "guinea-fowl",
-    "haddock",
-    "hake",
-    "halibut",
-    "herring",
-    "mackerel",
-    "pickerel",
-    "pike",
-    "roe",
-    "seed",
-    "shad",
-    "snipe",
-    "teal",
-    "turbot",
-    "water fowl",
-    "water-fowl",
-)
-
-pl_sb_uninflected_complete = [
-    # SOME FISH AND HERD ANIMALS
-    "tuna",
-    "salmon",
-    "mackerel",
-    "trout",
-    "bream",
-    "sea-bass",
-    "sea bass",
-    "carp",
-    "cod",
-    "flounder",
-    "whiting",
-    "moose",
-    # OTHER ODDITIES
-    "graffiti",
-    "djinn",
-    "samuri",
-    "offspring",
-    "pence",
-    "quid",
-    "hertz",
-] + pl_sb_uninflected_s_complete
-# SOME WORDS ENDING IN ...s (OFTEN PAIRS TAKEN AS A WHOLE)
-
-pl_sb_uninflected_caps = [
-    # ALL NATIONALS ENDING IN -ese
-    "Portuguese",
-    "Amoyese",
-    "Borghese",
-    "Congoese",
-    "Faroese",
-    "Foochowese",
-    "Genevese",
-    "Genoese",
-    "Gilbertese",
-    "Hottentotese",
-    "Kiplingese",
-    "Kongoese",
-    "Lucchese",
-    "Maltese",
-    "Nankingese",
-    "Niasese",
-    "Pekingese",
-    "Piedmontese",
-    "Pistoiese",
-    "Sarawakese",
-    "Shavese",
-    "Vermontese",
-    "Wenchowese",
-    "Yengeese",
-]
-
-
-pl_sb_uninflected_endings = [
-    # UNCOUNTABLE NOUNS
-    "butter",
-    "cash",
-    "furniture",
-    "information",
-    # SOME FISH AND HERD ANIMALS
-    "fish",
-    "deer",
-    "sheep",
-    # ALL NATIONALS ENDING IN -ese
-    "nese",
-    "rese",
-    "lese",
-    "mese",
-    # DISEASES
-    "pox",
-    # OTHER ODDITIES
-    "craft",
-] + pl_sb_uninflected_s_endings
-# SOME WORDS ENDING IN ...s (OFTEN PAIRS TAKEN AS A WHOLE)
-
-
-pl_sb_uninflected_bysize = bysize(pl_sb_uninflected_endings)
-
-
-# SINGULAR WORDS ENDING IN ...s (ALL INFLECT WITH ...es)
-
-pl_sb_singular_s_complete = [
-    "acropolis",
-    "aegis",
-    "alias",
-    "asbestos",
-    "bathos",
-    "bias",
-    "bronchitis",
-    "bursitis",
-    "caddis",
-    "cannabis",
-    "canvas",
-    "chaos",
-    "cosmos",
-    "dais",
-    "digitalis",
-    "epidermis",
-    "ethos",
-    "eyas",
-    "gas",
-    "glottis",
-    "hubris",
-    "ibis",
-    "lens",
-    "mantis",
-    "marquis",
-    "metropolis",
-    "pathos",
-    "pelvis",
-    "polis",
-    "rhinoceros",
-    "sassafras",
-    "trellis",
-] + pl_sb_C_is_ides_complete
-
-
-pl_sb_singular_s_endings = ["ss", "us"] + pl_sb_C_is_ides_endings
-
-pl_sb_singular_s_bysize = bysize(pl_sb_singular_s_endings)
-
-si_sb_singular_s_complete = [f"{w}es" for w in pl_sb_singular_s_complete]
-si_sb_singular_s_endings = [f"{w}es" for w in pl_sb_singular_s_endings]
-si_sb_singular_s_bysize = bysize(si_sb_singular_s_endings)
-
-pl_sb_singular_s_es = ["[A-Z].*es"]
-
-pl_sb_singular_s = enclose(
-    "|".join(
-        pl_sb_singular_s_complete
-        + [f".*{w}" for w in pl_sb_singular_s_endings]
-        + pl_sb_singular_s_es
-    )
-)
-
-
-# PLURALS ENDING IN uses -> use
-
-
-si_sb_ois_oi_case = ("Bolshois", "Hanois")
-
-si_sb_uses_use_case = ("Betelgeuses", "Duses", "Meuses", "Syracuses", "Toulouses")
-
-si_sb_uses_use = (
-    "abuses",
-    "applauses",
-    "blouses",
-    "carouses",
-    "causes",
-    "chartreuses",
-    "clauses",
-    "contuses",
-    "douses",
-    "excuses",
-    "fuses",
-    "grouses",
-    "hypotenuses",
-    "masseuses",
-    "menopauses",
-    "misuses",
-    "muses",
-    "overuses",
-    "pauses",
-    "peruses",
-    "profuses",
-    "recluses",
-    "reuses",
-    "ruses",
-    "souses",
-    "spouses",
-    "suffuses",
-    "transfuses",
-    "uses",
-)
-
-si_sb_ies_ie_case = (
-    "Addies",
-    "Aggies",
-    "Allies",
-    "Amies",
-    "Angies",
-    "Annies",
-    "Annmaries",
-    "Archies",
-    "Arties",
-    "Aussies",
-    "Barbies",
-    "Barries",
-    "Basies",
-    "Bennies",
-    "Bernies",
-    "Berties",
-    "Bessies",
-    "Betties",
-    "Billies",
-    "Blondies",
-    "Bobbies",
-    "Bonnies",
-    "Bowies",
-    "Brandies",
-    "Bries",
-    "Brownies",
-    "Callies",
-    "Carnegies",
-    "Carries",
-    "Cassies",
-    "Charlies",
-    "Cheries",
-    "Christies",
-    "Connies",
-    "Curies",
-    "Dannies",
-    "Debbies",
-    "Dixies",
-    "Dollies",
-    "Donnies",
-    "Drambuies",
-    "Eddies",
-    "Effies",
-    "Ellies",
-    "Elsies",
-    "Eries",
-    "Ernies",
-    "Essies",
-    "Eugenies",
-    "Fannies",
-    "Flossies",
-    "Frankies",
-    "Freddies",
-    "Gillespies",
-    "Goldies",
-    "Gracies",
-    "Guthries",
-    "Hallies",
-    "Hatties",
-    "Hetties",
-    "Hollies",
-    "Jackies",
-    "Jamies",
-    "Janies",
-    "Jannies",
-    "Jeanies",
-    "Jeannies",
-    "Jennies",
-    "Jessies",
-    "Jimmies",
-    "Jodies",
-    "Johnies",
-    "Johnnies",
-    "Josies",
-    "Julies",
-    "Kalgoorlies",
-    "Kathies",
-    "Katies",
-    "Kellies",
-    "Kewpies",
-    "Kristies",
-    "Laramies",
-    "Lassies",
-    "Lauries",
-    "Leslies",
-    "Lessies",
-    "Lillies",
-    "Lizzies",
-    "Lonnies",
-    "Lories",
-    "Lorries",
-    "Lotties",
-    "Louies",
-    "Mackenzies",
-    "Maggies",
-    "Maisies",
-    "Mamies",
-    "Marcies",
-    "Margies",
-    "Maries",
-    "Marjories",
-    "Matties",
-    "McKenzies",
-    "Melanies",
-    "Mickies",
-    "Millies",
-    "Minnies",
-    "Mollies",
-    "Mounties",
-    "Nannies",
-    "Natalies",
-    "Nellies",
-    "Netties",
-    "Ollies",
-    "Ozzies",
-    "Pearlies",
-    "Pottawatomies",
-    "Reggies",
-    "Richies",
-    "Rickies",
-    "Robbies",
-    "Ronnies",
-    "Rosalies",
-    "Rosemaries",
-    "Rosies",
-    "Roxies",
-    "Rushdies",
-    "Ruthies",
-    "Sadies",
-    "Sallies",
-    "Sammies",
-    "Scotties",
-    "Selassies",
-    "Sherries",
-    "Sophies",
-    "Stacies",
-    "Stefanies",
-    "Stephanies",
-    "Stevies",
-    "Susies",
-    "Sylvies",
-    "Tammies",
-    "Terries",
-    "Tessies",
-    "Tommies",
-    "Tracies",
-    "Trekkies",
-    "Valaries",
-    "Valeries",
-    "Valkyries",
-    "Vickies",
-    "Virgies",
-    "Willies",
-    "Winnies",
-    "Wylies",
-    "Yorkies",
-)
-
-si_sb_ies_ie = (
-    "aeries",
-    "baggies",
-    "belies",
-    "biggies",
-    "birdies",
-    "bogies",
-    "bonnies",
-    "boogies",
-    "bookies",
-    "bourgeoisies",
-    "brownies",
-    "budgies",
-    "caddies",
-    "calories",
-    "camaraderies",
-    "cockamamies",
-    "collies",
-    "cookies",
-    "coolies",
-    "cooties",
-    "coteries",
-    "crappies",
-    "curies",
-    "cutesies",
-    "dogies",
-    "eyries",
-    "floozies",
-    "footsies",
-    "freebies",
-    "genies",
-    "goalies",
-    "groupies",
-    "hies",
-    "jalousies",
-    "junkies",
-    "kiddies",
-    "laddies",
-    "lassies",
-    "lies",
-    "lingeries",
-    "magpies",
-    "menageries",
-    "mommies",
-    "movies",
-    "neckties",
-    "newbies",
-    "nighties",
-    "oldies",
-    "organdies",
-    "overlies",
-    "pies",
-    "pinkies",
-    "pixies",
-    "potpies",
-    "prairies",
-    "quickies",
-    "reveries",
-    "rookies",
-    "rotisseries",
-    "softies",
-    "sorties",
-    "species",
-    "stymies",
-    "sweeties",
-    "ties",
-    "underlies",
-    "unties",
-    "veggies",
-    "vies",
-    "yuppies",
-    "zombies",
-)
-
-
-si_sb_oes_oe_case = (
-    "Chloes",
-    "Crusoes",
-    "Defoes",
-    "Faeroes",
-    "Ivanhoes",
-    "Joes",
-    "McEnroes",
-    "Moes",
-    "Monroes",
-    "Noes",
-    "Poes",
-    "Roscoes",
-    "Tahoes",
-    "Tippecanoes",
-    "Zoes",
-)
-
-si_sb_oes_oe = (
-    "aloes",
-    "backhoes",
-    "canoes",
-    "does",
-    "floes",
-    "foes",
-    "hoes",
-    "mistletoes",
-    "oboes",
-    "pekoes",
-    "roes",
-    "sloes",
-    "throes",
-    "tiptoes",
-    "toes",
-    "woes",
-)
-
-si_sb_z_zes = ("quartzes", "topazes")
-
-si_sb_zzes_zz = ("buzzes", "fizzes", "frizzes", "razzes")
-
-si_sb_ches_che_case = (
-    "Andromaches",
-    "Apaches",
-    "Blanches",
-    "Comanches",
-    "Nietzsches",
-    "Porsches",
-    "Roches",
-)
-
-si_sb_ches_che = (
-    "aches",
-    "avalanches",
-    "backaches",
-    "bellyaches",
-    "caches",
-    "cloches",
-    "creches",
-    "douches",
-    "earaches",
-    "fiches",
-    "headaches",
-    "heartaches",
-    "microfiches",
-    "niches",
-    "pastiches",
-    "psyches",
-    "quiches",
-    "stomachaches",
-    "toothaches",
-    "tranches",
-)
-
-si_sb_xes_xe = ("annexes", "axes", "deluxes", "pickaxes")
-
-si_sb_sses_sse_case = ("Hesses", "Jesses", "Larousses", "Matisses")
-si_sb_sses_sse = (
-    "bouillabaisses",
-    "crevasses",
-    "demitasses",
-    "impasses",
-    "mousses",
-    "posses",
-)
-
-si_sb_ves_ve_case = (
-    # *[nwl]ives -> [nwl]live
-    "Clives",
-    "Palmolives",
-)
-si_sb_ves_ve = (
-    # *[^d]eaves -> eave
-    "interweaves",
-    "weaves",
-    # *[nwl]ives -> [nwl]live
-    "olives",
-    # *[eoa]lves -> [eoa]lve
-    "bivalves",
-    "dissolves",
-    "resolves",
-    "salves",
-    "twelves",
-    "valves",
-)
-
-
-plverb_special_s = enclose(
-    "|".join(
-        [pl_sb_singular_s]
-        + pl_sb_uninflected_s
-        + list(pl_sb_irregular_s)
-        + ["(.*[csx])is", "(.*)ceps", "[A-Z].*s"]
-    )
-)
-
-_pl_sb_postfix_adj_defn = (
-    ("general", enclose(r"(?!major|lieutenant|brigadier|adjutant|.*star)\S+")),
-    ("martial", enclose("court")),
-    ("force", enclose("pound")),
-)
-
-pl_sb_postfix_adj: Iterable[str] = (
-    enclose(val + f"(?=(?:-|\\s+){key})") for key, val in _pl_sb_postfix_adj_defn
-)
-
-pl_sb_postfix_adj_stems = f"({'|'.join(pl_sb_postfix_adj)})(.*)"
-
-
-# PLURAL WORDS ENDING IS es GO TO SINGULAR is
-
-si_sb_es_is = (
-    "amanuenses",
-    "amniocenteses",
-    "analyses",
-    "antitheses",
-    "apotheoses",
-    "arterioscleroses",
-    "atheroscleroses",
-    "axes",
-    # 'bases', # bases -> basis
-    "catalyses",
-    "catharses",
-    "chasses",
-    "cirrhoses",
-    "cocces",
-    "crises",
-    "diagnoses",
-    "dialyses",
-    "diereses",
-    "electrolyses",
-    "emphases",
-    "exegeses",
-    "geneses",
-    "halitoses",
-    "hydrolyses",
-    "hypnoses",
-    "hypotheses",
-    "hystereses",
-    "metamorphoses",
-    "metastases",
-    "misdiagnoses",
-    "mitoses",
-    "mononucleoses",
-    "narcoses",
-    "necroses",
-    "nemeses",
-    "neuroses",
-    "oases",
-    "osmoses",
-    "osteoporoses",
-    "paralyses",
-    "parentheses",
-    "parthenogeneses",
-    "periphrases",
-    "photosyntheses",
-    "probosces",
-    "prognoses",
-    "prophylaxes",
-    "prostheses",
-    "preces",
-    "psoriases",
-    "psychoanalyses",
-    "psychokineses",
-    "psychoses",
-    "scleroses",
-    "scolioses",
-    "sepses",
-    "silicoses",
-    "symbioses",
-    "synopses",
-    "syntheses",
-    "taxes",
-    "telekineses",
-    "theses",
-    "thromboses",
-    "tuberculoses",
-    "urinalyses",
-)
-
-pl_prep_list = """
-    about above across after among around at athwart before behind
-    below beneath beside besides between betwixt beyond but by
-    during except for from in into near of off on onto out over
-    since till to under until unto upon with""".split()
-
-pl_prep_list_da = pl_prep_list + ["de", "du", "da"]
-
-pl_prep_bysize = bysize(pl_prep_list_da)
-
-pl_prep = enclose("|".join(pl_prep_list_da))
-
-pl_sb_prep_dual_compound = rf"(.*?)((?:-|\s+)(?:{pl_prep})(?:-|\s+))a(?:-|\s+)(.*)"
-
-
-singular_pronoun_genders = {
-    "neuter",
-    "feminine",
-    "masculine",
-    "gender-neutral",
-    "feminine or masculine",
-    "masculine or feminine",
-}
-
-pl_pron_nom = {
-    # NOMINATIVE    REFLEXIVE
-    "i": "we",
-    "myself": "ourselves",
-    "you": "you",
-    "yourself": "yourselves",
-    "she": "they",
-    "herself": "themselves",
-    "he": "they",
-    "himself": "themselves",
-    "it": "they",
-    "itself": "themselves",
-    "they": "they",
-    "themself": "themselves",
-    #   POSSESSIVE
-    "mine": "ours",
-    "yours": "yours",
-    "hers": "theirs",
-    "his": "theirs",
-    "its": "theirs",
-    "theirs": "theirs",
-}
-
-si_pron: Dict[str, Dict[str, Union[str, Dict[str, str]]]] = {
-    "nom": {v: k for (k, v) in pl_pron_nom.items()}
-}
-si_pron["nom"]["we"] = "I"
-
-
-pl_pron_acc = {
-    # ACCUSATIVE    REFLEXIVE
-    "me": "us",
-    "myself": "ourselves",
-    "you": "you",
-    "yourself": "yourselves",
-    "her": "them",
-    "herself": "themselves",
-    "him": "them",
-    "himself": "themselves",
-    "it": "them",
-    "itself": "themselves",
-    "them": "them",
-    "themself": "themselves",
-}
-
-pl_pron_acc_keys = enclose("|".join(pl_pron_acc))
-pl_pron_acc_keys_bysize = bysize(pl_pron_acc)
-
-si_pron["acc"] = {v: k for (k, v) in pl_pron_acc.items()}
-
-for _thecase, _plur, _gend, _sing in (
-    ("nom", "they", "neuter", "it"),
-    ("nom", "they", "feminine", "she"),
-    ("nom", "they", "masculine", "he"),
-    ("nom", "they", "gender-neutral", "they"),
-    ("nom", "they", "feminine or masculine", "she or he"),
-    ("nom", "they", "masculine or feminine", "he or she"),
-    ("nom", "themselves", "neuter", "itself"),
-    ("nom", "themselves", "feminine", "herself"),
-    ("nom", "themselves", "masculine", "himself"),
-    ("nom", "themselves", "gender-neutral", "themself"),
-    ("nom", "themselves", "feminine or masculine", "herself or himself"),
-    ("nom", "themselves", "masculine or feminine", "himself or herself"),
-    ("nom", "theirs", "neuter", "its"),
-    ("nom", "theirs", "feminine", "hers"),
-    ("nom", "theirs", "masculine", "his"),
-    ("nom", "theirs", "gender-neutral", "theirs"),
-    ("nom", "theirs", "feminine or masculine", "hers or his"),
-    ("nom", "theirs", "masculine or feminine", "his or hers"),
-    ("acc", "them", "neuter", "it"),
-    ("acc", "them", "feminine", "her"),
-    ("acc", "them", "masculine", "him"),
-    ("acc", "them", "gender-neutral", "them"),
-    ("acc", "them", "feminine or masculine", "her or him"),
-    ("acc", "them", "masculine or feminine", "him or her"),
-    ("acc", "themselves", "neuter", "itself"),
-    ("acc", "themselves", "feminine", "herself"),
-    ("acc", "themselves", "masculine", "himself"),
-    ("acc", "themselves", "gender-neutral", "themself"),
-    ("acc", "themselves", "feminine or masculine", "herself or himself"),
-    ("acc", "themselves", "masculine or feminine", "himself or herself"),
-):
-    try:
-        si_pron[_thecase][_plur][_gend] = _sing  # type: ignore
-    except TypeError:
-        si_pron[_thecase][_plur] = {}
-        si_pron[_thecase][_plur][_gend] = _sing  # type: ignore
-
-
-si_pron_acc_keys = enclose("|".join(si_pron["acc"]))
-si_pron_acc_keys_bysize = bysize(si_pron["acc"])
-
-
-def get_si_pron(thecase, word, gender) -> str:
-    try:
-        sing = si_pron[thecase][word]
-    except KeyError:
-        raise  # not a pronoun
-    try:
-        return sing[gender]  # has several types due to gender
-    except TypeError:
-        return cast(str, sing)  # answer independent of gender
-
-
-# These dictionaries group verbs by first, second and third person
-# conjugations.
-
-plverb_irregular_pres = {
-    "am": "are",
-    "are": "are",
-    "is": "are",
-    "was": "were",
-    "were": "were",
-    "have": "have",
-    "has": "have",
-    "do": "do",
-    "does": "do",
-}
-
-plverb_ambiguous_pres = {
-    "act": "act",
-    "acts": "act",
-    "blame": "blame",
-    "blames": "blame",
-    "can": "can",
-    "must": "must",
-    "fly": "fly",
-    "flies": "fly",
-    "copy": "copy",
-    "copies": "copy",
-    "drink": "drink",
-    "drinks": "drink",
-    "fight": "fight",
-    "fights": "fight",
-    "fire": "fire",
-    "fires": "fire",
-    "like": "like",
-    "likes": "like",
-    "look": "look",
-    "looks": "look",
-    "make": "make",
-    "makes": "make",
-    "reach": "reach",
-    "reaches": "reach",
-    "run": "run",
-    "runs": "run",
-    "sink": "sink",
-    "sinks": "sink",
-    "sleep": "sleep",
-    "sleeps": "sleep",
-    "view": "view",
-    "views": "view",
-}
-
-plverb_ambiguous_pres_keys = re.compile(
-    rf"^({enclose('|'.join(plverb_ambiguous_pres))})((\s.*)?)$", re.IGNORECASE
-)
-
-
-plverb_irregular_non_pres = (
-    "did",
-    "had",
-    "ate",
-    "made",
-    "put",
-    "spent",
-    "fought",
-    "sank",
-    "gave",
-    "sought",
-    "shall",
-    "could",
-    "ought",
-    "should",
-)
-
-plverb_ambiguous_non_pres = re.compile(
-    r"^((?:thought|saw|bent|will|might|cut))((\s.*)?)$", re.IGNORECASE
-)
-
-# "..oes" -> "..oe" (the rest are "..oes" -> "o")
-
-pl_v_oes_oe = ("canoes", "floes", "oboes", "roes", "throes", "woes")
-pl_v_oes_oe_endings_size4 = ("hoes", "toes")
-pl_v_oes_oe_endings_size5 = ("shoes",)
-
-
-pl_count_zero = ("0", "no", "zero", "nil")
-
-
-pl_count_one = ("1", "a", "an", "one", "each", "every", "this", "that")
-
-pl_adj_special = {"a": "some", "an": "some", "this": "these", "that": "those"}
-
-pl_adj_special_keys = re.compile(
-    rf"^({enclose('|'.join(pl_adj_special))})$", re.IGNORECASE
-)
-
-pl_adj_poss = {
-    "my": "our",
-    "your": "your",
-    "its": "their",
-    "her": "their",
-    "his": "their",
-    "their": "their",
-}
-
-pl_adj_poss_keys = re.compile(rf"^({enclose('|'.join(pl_adj_poss))})$", re.IGNORECASE)
-
-
-# 2. INDEFINITE ARTICLES
-
-# THIS PATTERN MATCHES STRINGS OF CAPITALS STARTING WITH A "VOWEL-SOUND"
-# CONSONANT FOLLOWED BY ANOTHER CONSONANT, AND WHICH ARE NOT LIKELY
-# TO BE REAL WORDS (OH, ALL RIGHT THEN, IT'S JUST MAGIC!)
-
-A_abbrev = re.compile(
-    r"""
-^(?! FJO | [HLMNS]Y.  | RY[EO] | SQU
-  | ( F[LR]? | [HL] | MN? | N | RH? | S[CHKLMNPTVW]? | X(YL)?) [AEIOU])
-[FHLMNRSX][A-Z]
-""",
-    re.VERBOSE,
-)
-
-# THIS PATTERN CODES THE BEGINNINGS OF ALL ENGLISH WORDS BEGINING WITH A
-# 'y' FOLLOWED BY A CONSONANT. ANY OTHER Y-CONSONANT PREFIX THEREFORE
-# IMPLIES AN ABBREVIATION.
-
-A_y_cons = re.compile(r"^(y(b[lor]|cl[ea]|fere|gg|p[ios]|rou|tt))", re.IGNORECASE)
-
-# EXCEPTIONS TO EXCEPTIONS
-
-A_explicit_a = re.compile(r"^((?:unabomber|unanimous|US))", re.IGNORECASE)
-
-A_explicit_an = re.compile(
-    r"^((?:euler|hour(?!i)|heir|honest|hono[ur]|mpeg))", re.IGNORECASE
-)
-
-A_ordinal_an = re.compile(r"^([aefhilmnorsx]-?th)", re.IGNORECASE)
-
-A_ordinal_a = re.compile(r"^([bcdgjkpqtuvwyz]-?th)", re.IGNORECASE)
-
-
-# NUMERICAL INFLECTIONS
-
-nth = {
-    0: "th",
-    1: "st",
-    2: "nd",
-    3: "rd",
-    4: "th",
-    5: "th",
-    6: "th",
-    7: "th",
-    8: "th",
-    9: "th",
-    11: "th",
-    12: "th",
-    13: "th",
-}
-nth_suff = set(nth.values())
-
-ordinal = dict(
-    ty="tieth",
-    one="first",
-    two="second",
-    three="third",
-    five="fifth",
-    eight="eighth",
-    nine="ninth",
-    twelve="twelfth",
-)
-
-ordinal_suff = re.compile(rf"({'|'.join(ordinal)})\Z")
-
-
-# NUMBERS
-
-unit = ["", "one", "two", "three", "four", "five", "six", "seven", "eight", "nine"]
-teen = [
-    "ten",
-    "eleven",
-    "twelve",
-    "thirteen",
-    "fourteen",
-    "fifteen",
-    "sixteen",
-    "seventeen",
-    "eighteen",
-    "nineteen",
-]
-ten = [
-    "",
-    "",
-    "twenty",
-    "thirty",
-    "forty",
-    "fifty",
-    "sixty",
-    "seventy",
-    "eighty",
-    "ninety",
-]
-mill = [
-    " ",
-    " thousand",
-    " million",
-    " billion",
-    " trillion",
-    " quadrillion",
-    " quintillion",
-    " sextillion",
-    " septillion",
-    " octillion",
-    " nonillion",
-    " decillion",
-]
-
-
-# SUPPORT CLASSICAL PLURALIZATIONS
-
-def_classical = dict(
-    all=False, zero=False, herd=False, names=True, persons=False, ancient=False
-)
-
-all_classical = {k: True for k in def_classical}
-no_classical = {k: False for k in def_classical}
-
-
-# Maps strings to built-in constant types
-string_to_constant = {"True": True, "False": False, "None": None}
-
-
-# Pre-compiled regular expression objects
-DOLLAR_DIGITS = re.compile(r"\$(\d+)")
-FUNCTION_CALL = re.compile(r"((\w+)\([^)]*\)*)", re.IGNORECASE)
-PARTITION_WORD = re.compile(r"\A(\s*)(.+?)(\s*)\Z")
-PL_SB_POSTFIX_ADJ_STEMS_RE = re.compile(
-    rf"^(?:{pl_sb_postfix_adj_stems})$", re.IGNORECASE
-)
-PL_SB_PREP_DUAL_COMPOUND_RE = re.compile(
-    rf"^(?:{pl_sb_prep_dual_compound})$", re.IGNORECASE
-)
-DENOMINATOR = re.compile(r"(?P.+)( (per|a) .+)")
-PLVERB_SPECIAL_S_RE = re.compile(rf"^({plverb_special_s})$")
-WHITESPACE = re.compile(r"\s")
-ENDS_WITH_S = re.compile(r"^(.*[^s])s$", re.IGNORECASE)
-ENDS_WITH_APOSTROPHE_S = re.compile(r"^(.*)'s?$")
-INDEFINITE_ARTICLE_TEST = re.compile(r"\A(\s*)(?:an?\s+)?(.+?)(\s*)\Z", re.IGNORECASE)
-SPECIAL_AN = re.compile(r"^[aefhilmnorsx]$", re.IGNORECASE)
-SPECIAL_A = re.compile(r"^[bcdgjkpqtuvwyz]$", re.IGNORECASE)
-SPECIAL_ABBREV_AN = re.compile(r"^[aefhilmnorsx][.-]", re.IGNORECASE)
-SPECIAL_ABBREV_A = re.compile(r"^[a-z][.-]", re.IGNORECASE)
-CONSONANTS = re.compile(r"^[^aeiouy]", re.IGNORECASE)
-ARTICLE_SPECIAL_EU = re.compile(r"^e[uw]", re.IGNORECASE)
-ARTICLE_SPECIAL_ONCE = re.compile(r"^onc?e\b", re.IGNORECASE)
-ARTICLE_SPECIAL_ONETIME = re.compile(r"^onetime\b", re.IGNORECASE)
-ARTICLE_SPECIAL_UNIT = re.compile(r"^uni([^nmd]|mo)", re.IGNORECASE)
-ARTICLE_SPECIAL_UBA = re.compile(r"^u[bcfghjkqrst][aeiou]", re.IGNORECASE)
-ARTICLE_SPECIAL_UKR = re.compile(r"^ukr", re.IGNORECASE)
-SPECIAL_CAPITALS = re.compile(r"^U[NK][AIEO]?")
-VOWELS = re.compile(r"^[aeiou]", re.IGNORECASE)
-
-DIGIT_GROUP = re.compile(r"(\d)")
-TWO_DIGITS = re.compile(r"(\d)(\d)")
-THREE_DIGITS = re.compile(r"(\d)(\d)(\d)")
-THREE_DIGITS_WORD = re.compile(r"(\d)(\d)(\d)(?=\D*\Z)")
-TWO_DIGITS_WORD = re.compile(r"(\d)(\d)(?=\D*\Z)")
-ONE_DIGIT_WORD = re.compile(r"(\d)(?=\D*\Z)")
-
-FOUR_DIGIT_COMMA = re.compile(r"(\d)(\d{3}(?:,|\Z))")
-NON_DIGIT = re.compile(r"\D")
-WHITESPACES_COMMA = re.compile(r"\s+,")
-COMMA_WORD = re.compile(r", (\S+)\s+\Z")
-WHITESPACES = re.compile(r"\s+")
-
-
-PRESENT_PARTICIPLE_REPLACEMENTS = (
-    (re.compile(r"ie$"), r"y"),
-    (
-        re.compile(r"ue$"),
-        r"u",
-    ),  # TODO: isn't ue$ -> u encompassed in the following rule?
-    (re.compile(r"([auy])e$"), r"\g<1>"),
-    (re.compile(r"ski$"), r"ski"),
-    (re.compile(r"[^b]i$"), r""),
-    (re.compile(r"^(are|were)$"), r"be"),
-    (re.compile(r"^(had)$"), r"hav"),
-    (re.compile(r"^(hoe)$"), r"\g<1>"),
-    (re.compile(r"([^e])e$"), r"\g<1>"),
-    (re.compile(r"er$"), r"er"),
-    (re.compile(r"([^aeiou][aeiouy]([bdgmnprst]))$"), r"\g<1>\g<2>"),
-)
-
-DIGIT = re.compile(r"\d")
-
-
-class Words(str):
-    lowered: str
-    split_: List[str]
-    first: str
-    last: str
-
-    def __init__(self, orig) -> None:
-        self.lowered = self.lower()
-        self.split_ = self.split()
-        self.first = self.split_[0]
-        self.last = self.split_[-1]
-
-
-Falsish = Any  # ideally, falsish would only validate on bool(value) is False
-
-
-_STATIC_TYPE_CHECKING = TYPE_CHECKING
-# ^-- Workaround for typeguard AST manipulation:
-#     https://github.com/agronholm/typeguard/issues/353#issuecomment-1556306554
-
-if _STATIC_TYPE_CHECKING:  # pragma: no cover
-    Word = Annotated[str, "String with at least 1 character"]
-else:
-
-    class _WordMeta(type):  # Too dynamic to be supported by mypy...
-        def __instancecheck__(self, instance: Any) -> bool:
-            return isinstance(instance, str) and len(instance) >= 1
-
-    class Word(metaclass=_WordMeta):  # type: ignore[no-redef]
-        """String with at least 1 character"""
-
-
-class engine:
-    def __init__(self) -> None:
-        self.classical_dict = def_classical.copy()
-        self.persistent_count: Optional[int] = None
-        self.mill_count = 0
-        self.pl_sb_user_defined: List[Optional[Word]] = []
-        self.pl_v_user_defined: List[Optional[Word]] = []
-        self.pl_adj_user_defined: List[Optional[Word]] = []
-        self.si_sb_user_defined: List[Optional[Word]] = []
-        self.A_a_user_defined: List[Optional[Word]] = []
-        self.thegender = "neuter"
-        self.__number_args: Optional[Dict[str, str]] = None
-
-    @property
-    def _number_args(self):
-        return cast(Dict[str, str], self.__number_args)
-
-    @_number_args.setter
-    def _number_args(self, val):
-        self.__number_args = val
-
-    @typechecked
-    def defnoun(self, singular: Optional[Word], plural: Optional[Word]) -> int:
-        """
-        Set the noun plural of singular to plural.
-
-        """
-        self.checkpat(singular)
-        self.checkpatplural(plural)
-        self.pl_sb_user_defined.extend((singular, plural))
-        self.si_sb_user_defined.extend((plural, singular))
-        return 1
-
-    @typechecked
-    def defverb(
-        self,
-        s1: Optional[Word],
-        p1: Optional[Word],
-        s2: Optional[Word],
-        p2: Optional[Word],
-        s3: Optional[Word],
-        p3: Optional[Word],
-    ) -> int:
-        """
-        Set the verb plurals for s1, s2 and s3 to p1, p2 and p3 respectively.
-
-        Where 1, 2 and 3 represent the 1st, 2nd and 3rd person forms of the verb.
-
-        """
-        self.checkpat(s1)
-        self.checkpat(s2)
-        self.checkpat(s3)
-        self.checkpatplural(p1)
-        self.checkpatplural(p2)
-        self.checkpatplural(p3)
-        self.pl_v_user_defined.extend((s1, p1, s2, p2, s3, p3))
-        return 1
-
-    @typechecked
-    def defadj(self, singular: Optional[Word], plural: Optional[Word]) -> int:
-        """
-        Set the adjective plural of singular to plural.
-
-        """
-        self.checkpat(singular)
-        self.checkpatplural(plural)
-        self.pl_adj_user_defined.extend((singular, plural))
-        return 1
-
-    @typechecked
-    def defa(self, pattern: Optional[Word]) -> int:
-        """
-        Define the indefinite article as 'a' for words matching pattern.
-
-        """
-        self.checkpat(pattern)
-        self.A_a_user_defined.extend((pattern, "a"))
-        return 1
-
-    @typechecked
-    def defan(self, pattern: Optional[Word]) -> int:
-        """
-        Define the indefinite article as 'an' for words matching pattern.
-
-        """
-        self.checkpat(pattern)
-        self.A_a_user_defined.extend((pattern, "an"))
-        return 1
-
-    def checkpat(self, pattern: Optional[Word]) -> None:
-        """
-        check for errors in a regex pattern
-        """
-        if pattern is None:
-            return
-        try:
-            re.match(pattern, "")
-        except re.error as err:
-            raise BadUserDefinedPatternError(pattern) from err
-
-    def checkpatplural(self, pattern: Optional[Word]) -> None:
-        """
-        check for errors in a regex replace pattern
-        """
-        return
-
-    @typechecked
-    def ud_match(self, word: Word, wordlist: Sequence[Optional[Word]]) -> Optional[str]:
-        for i in range(len(wordlist) - 2, -2, -2):  # backwards through even elements
-            mo = re.search(rf"^{wordlist[i]}$", word, re.IGNORECASE)
-            if mo:
-                if wordlist[i + 1] is None:
-                    return None
-                pl = DOLLAR_DIGITS.sub(
-                    r"\\1", cast(Word, wordlist[i + 1])
-                )  # change $n to \n for expand
-                return mo.expand(pl)
-        return None
-
-    def classical(self, **kwargs) -> None:
-        """
-        turn classical mode on and off for various categories
-
-        turn on all classical modes:
-        classical()
-        classical(all=True)
-
-        turn on or off specific claassical modes:
-        e.g.
-        classical(herd=True)
-        classical(names=False)
-
-        By default all classical modes are off except names.
-
-        unknown value in args or key in kwargs raises
-        exception: UnknownClasicalModeError
-
-        """
-        if not kwargs:
-            self.classical_dict = all_classical.copy()
-            return
-        if "all" in kwargs:
-            if kwargs["all"]:
-                self.classical_dict = all_classical.copy()
-            else:
-                self.classical_dict = no_classical.copy()
-
-        for k, v in kwargs.items():
-            if k in def_classical:
-                self.classical_dict[k] = v
-            else:
-                raise UnknownClassicalModeError
-
-    def num(
-        self, count: Optional[int] = None, show: Optional[int] = None
-    ) -> str:  # (;$count,$show)
-        """
-        Set the number to be used in other method calls.
-
-        Returns count.
-
-        Set show to False to return '' instead.
-
-        """
-        if count is not None:
-            try:
-                self.persistent_count = int(count)
-            except ValueError as err:
-                raise BadNumValueError from err
-            if (show is None) or show:
-                return str(count)
-        else:
-            self.persistent_count = None
-        return ""
-
-    def gender(self, gender: str) -> None:
-        """
-        set the gender for the singular of plural pronouns
-
-        can be one of:
-        'neuter'                ('they' -> 'it')
-        'feminine'              ('they' -> 'she')
-        'masculine'             ('they' -> 'he')
-        'gender-neutral'        ('they' -> 'they')
-        'feminine or masculine' ('they' -> 'she or he')
-        'masculine or feminine' ('they' -> 'he or she')
-        """
-        if gender in singular_pronoun_genders:
-            self.thegender = gender
-        else:
-            raise BadGenderError
-
-    def _get_value_from_ast(self, obj):
-        """
-        Return the value of the ast object.
-        """
-        if isinstance(obj, ast.Num):
-            return obj.n
-        elif isinstance(obj, ast.Str):
-            return obj.s
-        elif isinstance(obj, ast.List):
-            return [self._get_value_from_ast(e) for e in obj.elts]
-        elif isinstance(obj, ast.Tuple):
-            return tuple([self._get_value_from_ast(e) for e in obj.elts])
-
-        # None, True and False are NameConstants in Py3.4 and above.
-        elif isinstance(obj, ast.NameConstant):
-            return obj.value
-
-        # Probably passed a variable name.
-        # Or passed a single word without wrapping it in quotes as an argument
-        # ex: p.inflect("I plural(see)") instead of p.inflect("I plural('see')")
-        raise NameError(f"name '{obj.id}' is not defined")
-
-    def _string_to_substitute(
-        self, mo: Match, methods_dict: Dict[str, Callable]
-    ) -> str:
-        """
-        Return the string to be substituted for the match.
-        """
-        matched_text, f_name = mo.groups()
-        # matched_text is the complete match string. e.g. plural_noun(cat)
-        # f_name is the function name. e.g. plural_noun
-
-        # Return matched_text if function name is not in methods_dict
-        if f_name not in methods_dict:
-            return matched_text
-
-        # Parse the matched text
-        a_tree = ast.parse(matched_text)
-
-        # get the args and kwargs from ast objects
-        args_list = [
-            self._get_value_from_ast(a)
-            for a in a_tree.body[0].value.args  # type: ignore[attr-defined]
-        ]
-        kwargs_list = {
-            kw.arg: self._get_value_from_ast(kw.value)
-            for kw in a_tree.body[0].value.keywords  # type: ignore[attr-defined]
-        }
-
-        # Call the corresponding function
-        return methods_dict[f_name](*args_list, **kwargs_list)
-
-    # 0. PERFORM GENERAL INFLECTIONS IN A STRING
-
-    @typechecked
-    def inflect(self, text: Word) -> str:
-        """
-        Perform inflections in a string.
-
-        e.g. inflect('The plural of cat is plural(cat)') returns
-        'The plural of cat is cats'
-
-        can use plural, plural_noun, plural_verb, plural_adj,
-        singular_noun, a, an, no, ordinal, number_to_words,
-        and prespart
-
-        """
-        save_persistent_count = self.persistent_count
-
-        # Dictionary of allowed methods
-        methods_dict: Dict[str, Callable] = {
-            "plural": self.plural,
-            "plural_adj": self.plural_adj,
-            "plural_noun": self.plural_noun,
-            "plural_verb": self.plural_verb,
-            "singular_noun": self.singular_noun,
-            "a": self.a,
-            "an": self.a,
-            "no": self.no,
-            "ordinal": self.ordinal,
-            "number_to_words": self.number_to_words,
-            "present_participle": self.present_participle,
-            "num": self.num,
-        }
-
-        # Regular expression to find Python's function call syntax
-        output = FUNCTION_CALL.sub(
-            lambda mo: self._string_to_substitute(mo, methods_dict), text
-        )
-        self.persistent_count = save_persistent_count
-        return output
-
-    # ## PLURAL SUBROUTINES
-
-    def postprocess(self, orig: str, inflected) -> str:
-        inflected = str(inflected)
-        if "|" in inflected:
-            word_options = inflected.split("|")
-            # When two parts of a noun need to be pluralized
-            if len(word_options[0].split(" ")) == len(word_options[1].split(" ")):
-                result = inflected.split("|")[self.classical_dict["all"]].split(" ")
-            # When only the last part of the noun needs to be pluralized
-            else:
-                result = inflected.split(" ")
-                for index, word in enumerate(result):
-                    if "|" in word:
-                        result[index] = word.split("|")[self.classical_dict["all"]]
-        else:
-            result = inflected.split(" ")
-
-        # Try to fix word wise capitalization
-        for index, word in enumerate(orig.split(" ")):
-            if word == "I":
-                # Is this the only word for exceptions like this
-                # Where the original is fully capitalized
-                # without 'meaning' capitalization?
-                # Also this fails to handle a capitalizaion in context
-                continue
-            if word.capitalize() == word:
-                result[index] = result[index].capitalize()
-            if word == word.upper():
-                result[index] = result[index].upper()
-        return " ".join(result)
-
-    def partition_word(self, text: str) -> Tuple[str, str, str]:
-        mo = PARTITION_WORD.search(text)
-        if mo:
-            return mo.group(1), mo.group(2), mo.group(3)
-        else:
-            return "", "", ""
-
-    @typechecked
-    def plural(self, text: Word, count: Optional[Union[str, int, Any]] = None) -> str:
-        """
-        Return the plural of text.
-
-        If count supplied, then return text if count is one of:
-            1, a, an, one, each, every, this, that
-
-        otherwise return the plural.
-
-        Whitespace at the start and end is preserved.
-
-        """
-        pre, word, post = self.partition_word(text)
-        if not word:
-            return text
-        plural = self.postprocess(
-            word,
-            self._pl_special_adjective(word, count)
-            or self._pl_special_verb(word, count)
-            or self._plnoun(word, count),
-        )
-        return f"{pre}{plural}{post}"
-
-    @typechecked
-    def plural_noun(
-        self, text: Word, count: Optional[Union[str, int, Any]] = None
-    ) -> str:
-        """
-        Return the plural of text, where text is a noun.
-
-        If count supplied, then return text if count is one of:
-            1, a, an, one, each, every, this, that
-
-        otherwise return the plural.
-
-        Whitespace at the start and end is preserved.
-
-        """
-        pre, word, post = self.partition_word(text)
-        if not word:
-            return text
-        plural = self.postprocess(word, self._plnoun(word, count))
-        return f"{pre}{plural}{post}"
-
-    @typechecked
-    def plural_verb(
-        self, text: Word, count: Optional[Union[str, int, Any]] = None
-    ) -> str:
-        """
-        Return the plural of text, where text is a verb.
-
-        If count supplied, then return text if count is one of:
-            1, a, an, one, each, every, this, that
-
-        otherwise return the plural.
-
-        Whitespace at the start and end is preserved.
-
-        """
-        pre, word, post = self.partition_word(text)
-        if not word:
-            return text
-        plural = self.postprocess(
-            word,
-            self._pl_special_verb(word, count) or self._pl_general_verb(word, count),
-        )
-        return f"{pre}{plural}{post}"
-
-    @typechecked
-    def plural_adj(
-        self, text: Word, count: Optional[Union[str, int, Any]] = None
-    ) -> str:
-        """
-        Return the plural of text, where text is an adjective.
-
-        If count supplied, then return text if count is one of:
-            1, a, an, one, each, every, this, that
-
-        otherwise return the plural.
-
-        Whitespace at the start and end is preserved.
-
-        """
-        pre, word, post = self.partition_word(text)
-        if not word:
-            return text
-        plural = self.postprocess(word, self._pl_special_adjective(word, count) or word)
-        return f"{pre}{plural}{post}"
-
-    @typechecked
-    def compare(self, word1: Word, word2: Word) -> Union[str, bool]:
-        """
-        compare word1 and word2 for equality regardless of plurality
-
-        return values:
-        eq - the strings are equal
-        p:s - word1 is the plural of word2
-        s:p - word2 is the plural of word1
-        p:p - word1 and word2 are two different plural forms of the one word
-        False - otherwise
-
-        >>> compare = engine().compare
-        >>> compare("egg", "eggs")
-        's:p'
-        >>> compare('egg', 'egg')
-        'eq'
-
-        Words should not be empty.
-
-        >>> compare('egg', '')
-        Traceback (most recent call last):
-        ...
-        typeguard.TypeCheckError:...is not an instance of inflect.Word
-        """
-        norms = self.plural_noun, self.plural_verb, self.plural_adj
-        results = (self._plequal(word1, word2, norm) for norm in norms)
-        return next(filter(None, results), False)
-
-    @typechecked
-    def compare_nouns(self, word1: Word, word2: Word) -> Union[str, bool]:
-        """
-        compare word1 and word2 for equality regardless of plurality
-        word1 and word2 are to be treated as nouns
-
-        return values:
-        eq - the strings are equal
-        p:s - word1 is the plural of word2
-        s:p - word2 is the plural of word1
-        p:p - word1 and word2 are two different plural forms of the one word
-        False - otherwise
-
-        """
-        return self._plequal(word1, word2, self.plural_noun)
-
-    @typechecked
-    def compare_verbs(self, word1: Word, word2: Word) -> Union[str, bool]:
-        """
-        compare word1 and word2 for equality regardless of plurality
-        word1 and word2 are to be treated as verbs
-
-        return values:
-        eq - the strings are equal
-        p:s - word1 is the plural of word2
-        s:p - word2 is the plural of word1
-        p:p - word1 and word2 are two different plural forms of the one word
-        False - otherwise
-
-        """
-        return self._plequal(word1, word2, self.plural_verb)
-
-    @typechecked
-    def compare_adjs(self, word1: Word, word2: Word) -> Union[str, bool]:
-        """
-        compare word1 and word2 for equality regardless of plurality
-        word1 and word2 are to be treated as adjectives
-
-        return values:
-        eq - the strings are equal
-        p:s - word1 is the plural of word2
-        s:p - word2 is the plural of word1
-        p:p - word1 and word2 are two different plural forms of the one word
-        False - otherwise
-
-        """
-        return self._plequal(word1, word2, self.plural_adj)
-
-    @typechecked
-    def singular_noun(
-        self,
-        text: Word,
-        count: Optional[Union[int, str, Any]] = None,
-        gender: Optional[str] = None,
-    ) -> Union[str, Literal[False]]:
-        """
-        Return the singular of text, where text is a plural noun.
-
-        If count supplied, then return the singular if count is one of:
-            1, a, an, one, each, every, this, that or if count is None
-
-        otherwise return text unchanged.
-
-        Whitespace at the start and end is preserved.
-
-        >>> p = engine()
-        >>> p.singular_noun('horses')
-        'horse'
-        >>> p.singular_noun('knights')
-        'knight'
-
-        Returns False when a singular noun is passed.
-
-        >>> p.singular_noun('horse')
-        False
-        >>> p.singular_noun('knight')
-        False
-        >>> p.singular_noun('soldier')
-        False
-
-        """
-        pre, word, post = self.partition_word(text)
-        if not word:
-            return text
-        sing = self._sinoun(word, count=count, gender=gender)
-        if sing is not False:
-            plural = self.postprocess(word, sing)
-            return f"{pre}{plural}{post}"
-        return False
-
-    def _plequal(self, word1: str, word2: str, pl) -> Union[str, bool]:  # noqa: C901
-        classval = self.classical_dict.copy()
-        self.classical_dict = all_classical.copy()
-        if word1 == word2:
-            return "eq"
-        if word1 == pl(word2):
-            return "p:s"
-        if pl(word1) == word2:
-            return "s:p"
-        self.classical_dict = no_classical.copy()
-        if word1 == pl(word2):
-            return "p:s"
-        if pl(word1) == word2:
-            return "s:p"
-        self.classical_dict = classval.copy()
-
-        if pl == self.plural or pl == self.plural_noun:
-            if self._pl_check_plurals_N(word1, word2):
-                return "p:p"
-            if self._pl_check_plurals_N(word2, word1):
-                return "p:p"
-        if pl == self.plural or pl == self.plural_adj:
-            if self._pl_check_plurals_adj(word1, word2):
-                return "p:p"
-        return False
-
-    def _pl_reg_plurals(self, pair: str, stems: str, end1: str, end2: str) -> bool:
-        pattern = rf"({stems})({end1}\|\1{end2}|{end2}\|\1{end1})"
-        return bool(re.search(pattern, pair))
-
-    def _pl_check_plurals_N(self, word1: str, word2: str) -> bool:
-        stem_endings = (
-            (pl_sb_C_a_ata, "as", "ata"),
-            (pl_sb_C_is_ides, "is", "ides"),
-            (pl_sb_C_a_ae, "s", "e"),
-            (pl_sb_C_en_ina, "ens", "ina"),
-            (pl_sb_C_um_a, "ums", "a"),
-            (pl_sb_C_us_i, "uses", "i"),
-            (pl_sb_C_on_a, "ons", "a"),
-            (pl_sb_C_o_i_stems, "os", "i"),
-            (pl_sb_C_ex_ices, "exes", "ices"),
-            (pl_sb_C_ix_ices, "ixes", "ices"),
-            (pl_sb_C_i, "s", "i"),
-            (pl_sb_C_im, "s", "im"),
-            (".*eau", "s", "x"),
-            (".*ieu", "s", "x"),
-            (".*tri", "xes", "ces"),
-            (".{2,}[yia]n", "xes", "ges"),
-        )
-
-        words = map(Words, (word1, word2))
-        pair = "|".join(word.last for word in words)
-
-        return (
-            pair in pl_sb_irregular_s.values()
-            or pair in pl_sb_irregular.values()
-            or pair in pl_sb_irregular_caps.values()
-            or any(
-                self._pl_reg_plurals(pair, stems, end1, end2)
-                for stems, end1, end2 in stem_endings
-            )
-        )
-
-    def _pl_check_plurals_adj(self, word1: str, word2: str) -> bool:
-        word1a = word1[: word1.rfind("'")] if word1.endswith(("'s", "'")) else ""
-        word2a = word2[: word2.rfind("'")] if word2.endswith(("'s", "'")) else ""
-
-        return (
-            bool(word1a)
-            and bool(word2a)
-            and (
-                self._pl_check_plurals_N(word1a, word2a)
-                or self._pl_check_plurals_N(word2a, word1a)
-            )
-        )
-
-    def get_count(self, count: Optional[Union[str, int]] = None) -> Union[str, int]:
-        if count is None and self.persistent_count is not None:
-            count = self.persistent_count
-
-        if count is not None:
-            count = (
-                1
-                if (
-                    (str(count) in pl_count_one)
-                    or (
-                        self.classical_dict["zero"]
-                        and str(count).lower() in pl_count_zero
-                    )
-                )
-                else 2
-            )
-        else:
-            count = ""
-        return count
-
-    # @profile
-    def _plnoun(  # noqa: C901
-        self, word: str, count: Optional[Union[str, int]] = None
-    ) -> str:
-        count = self.get_count(count)
-
-        # DEFAULT TO PLURAL
-
-        if count == 1:
-            return word
-
-        # HANDLE USER-DEFINED NOUNS
-
-        value = self.ud_match(word, self.pl_sb_user_defined)
-        if value is not None:
-            return value
-
-        # HANDLE EMPTY WORD, SINGULAR COUNT AND UNINFLECTED PLURALS
-
-        if word == "":
-            return word
-
-        word = Words(word)
-
-        if word.last.lower() in pl_sb_uninflected_complete:
-            if len(word.split_) >= 3:
-                return self._handle_long_compounds(word, count=2) or word
-            return word
-
-        if word in pl_sb_uninflected_caps:
-            return word
-
-        for k, v in pl_sb_uninflected_bysize.items():
-            if word.lowered[-k:] in v:
-                return word
-
-        if self.classical_dict["herd"] and word.last.lower() in pl_sb_uninflected_herd:
-            return word
-
-        # HANDLE COMPOUNDS ("Governor General", "mother-in-law", "aide-de-camp", ETC.)
-
-        mo = PL_SB_POSTFIX_ADJ_STEMS_RE.search(word)
-        if mo and mo.group(2) != "":
-            return f"{self._plnoun(mo.group(1), 2)}{mo.group(2)}"
-
-        if " a " in word.lowered or "-a-" in word.lowered:
-            mo = PL_SB_PREP_DUAL_COMPOUND_RE.search(word)
-            if mo and mo.group(2) != "" and mo.group(3) != "":
-                return (
-                    f"{self._plnoun(mo.group(1), 2)}"
-                    f"{mo.group(2)}"
-                    f"{self._plnoun(mo.group(3))}"
-                )
-
-        if len(word.split_) >= 3:
-            handled_words = self._handle_long_compounds(word, count=2)
-            if handled_words is not None:
-                return handled_words
-
-        # only pluralize denominators in units
-        mo = DENOMINATOR.search(word.lowered)
-        if mo:
-            index = len(mo.group("denominator"))
-            return f"{self._plnoun(word[:index])}{word[index:]}"
-
-        # handle units given in degrees (only accept if
-        # there is no more than one word following)
-        # degree Celsius => degrees Celsius but degree
-        # fahrenheit hour => degree fahrenheit hours
-        if len(word.split_) >= 2 and word.split_[-2] == "degree":
-            return " ".join([self._plnoun(word.first)] + word.split_[1:])
-
-        with contextlib.suppress(ValueError):
-            return self._handle_prepositional_phrase(
-                word.lowered,
-                functools.partial(self._plnoun, count=2),
-                '-',
-            )
-
-        # HANDLE PRONOUNS
-
-        for k, v in pl_pron_acc_keys_bysize.items():
-            if word.lowered[-k:] in v:  # ends with accusative pronoun
-                for pk, pv in pl_prep_bysize.items():
-                    if word.lowered[:pk] in pv:  # starts with a prep
-                        if word.lowered.split() == [
-                            word.lowered[:pk],
-                            word.lowered[-k:],
-                        ]:
-                            # only whitespace in between
-                            return word.lowered[:-k] + pl_pron_acc[word.lowered[-k:]]
-
-        try:
-            return pl_pron_nom[word.lowered]
-        except KeyError:
-            pass
-
-        try:
-            return pl_pron_acc[word.lowered]
-        except KeyError:
-            pass
-
-        # HANDLE ISOLATED IRREGULAR PLURALS
-
-        if word.last in pl_sb_irregular_caps:
-            llen = len(word.last)
-            return f"{word[:-llen]}{pl_sb_irregular_caps[word.last]}"
-
-        lowered_last = word.last.lower()
-        if lowered_last in pl_sb_irregular:
-            llen = len(lowered_last)
-            return f"{word[:-llen]}{pl_sb_irregular[lowered_last]}"
-
-        dash_split = word.lowered.split('-')
-        if (" ".join(dash_split[-2:])).lower() in pl_sb_irregular_compound:
-            llen = len(
-                " ".join(dash_split[-2:])
-            )  # TODO: what if 2 spaces between these words?
-            return (
-                f"{word[:-llen]}"
-                f"{pl_sb_irregular_compound[(' '.join(dash_split[-2:])).lower()]}"
-            )
-
-        if word.lowered[-3:] == "quy":
-            return f"{word[:-1]}ies"
-
-        if word.lowered[-6:] == "person":
-            if self.classical_dict["persons"]:
-                return f"{word}s"
-            else:
-                return f"{word[:-4]}ople"
-
-        # HANDLE FAMILIES OF IRREGULAR PLURALS
-
-        if word.lowered[-3:] == "man":
-            for k, v in pl_sb_U_man_mans_bysize.items():
-                if word.lowered[-k:] in v:
-                    return f"{word}s"
-            for k, v in pl_sb_U_man_mans_caps_bysize.items():
-                if word[-k:] in v:
-                    return f"{word}s"
-            return f"{word[:-3]}men"
-        if word.lowered[-5:] == "mouse":
-            return f"{word[:-5]}mice"
-        if word.lowered[-5:] == "louse":
-            v = pl_sb_U_louse_lice_bysize.get(len(word))
-            if v and word.lowered in v:
-                return f"{word[:-5]}lice"
-            return f"{word}s"
-        if word.lowered[-5:] == "goose":
-            return f"{word[:-5]}geese"
-        if word.lowered[-5:] == "tooth":
-            return f"{word[:-5]}teeth"
-        if word.lowered[-4:] == "foot":
-            return f"{word[:-4]}feet"
-        if word.lowered[-4:] == "taco":
-            return f"{word[:-5]}tacos"
-
-        if word.lowered == "die":
-            return "dice"
-
-        # HANDLE UNASSIMILATED IMPORTS
-
-        if word.lowered[-4:] == "ceps":
-            return word
-        if word.lowered[-4:] == "zoon":
-            return f"{word[:-2]}a"
-        if word.lowered[-3:] in ("cis", "sis", "xis"):
-            return f"{word[:-2]}es"
-
-        for lastlet, d, numend, post in (
-            ("h", pl_sb_U_ch_chs_bysize, None, "s"),
-            ("x", pl_sb_U_ex_ices_bysize, -2, "ices"),
-            ("x", pl_sb_U_ix_ices_bysize, -2, "ices"),
-            ("m", pl_sb_U_um_a_bysize, -2, "a"),
-            ("s", pl_sb_U_us_i_bysize, -2, "i"),
-            ("n", pl_sb_U_on_a_bysize, -2, "a"),
-            ("a", pl_sb_U_a_ae_bysize, None, "e"),
-        ):
-            if word.lowered[-1] == lastlet:  # this test to add speed
-                for k, v in d.items():
-                    if word.lowered[-k:] in v:
-                        return word[:numend] + post
-
-        # HANDLE INCOMPLETELY ASSIMILATED IMPORTS
-
-        if self.classical_dict["ancient"]:
-            if word.lowered[-4:] == "trix":
-                return f"{word[:-1]}ces"
-            if word.lowered[-3:] in ("eau", "ieu"):
-                return f"{word}x"
-            if word.lowered[-3:] in ("ynx", "inx", "anx") and len(word) > 4:
-                return f"{word[:-1]}ges"
-
-            for lastlet, d, numend, post in (
-                ("n", pl_sb_C_en_ina_bysize, -2, "ina"),
-                ("x", pl_sb_C_ex_ices_bysize, -2, "ices"),
-                ("x", pl_sb_C_ix_ices_bysize, -2, "ices"),
-                ("m", pl_sb_C_um_a_bysize, -2, "a"),
-                ("s", pl_sb_C_us_i_bysize, -2, "i"),
-                ("s", pl_sb_C_us_us_bysize, None, ""),
-                ("a", pl_sb_C_a_ae_bysize, None, "e"),
-                ("a", pl_sb_C_a_ata_bysize, None, "ta"),
-                ("s", pl_sb_C_is_ides_bysize, -1, "des"),
-                ("o", pl_sb_C_o_i_bysize, -1, "i"),
-                ("n", pl_sb_C_on_a_bysize, -2, "a"),
-            ):
-                if word.lowered[-1] == lastlet:  # this test to add speed
-                    for k, v in d.items():
-                        if word.lowered[-k:] in v:
-                            return word[:numend] + post
-
-            for d, numend, post in (
-                (pl_sb_C_i_bysize, None, "i"),
-                (pl_sb_C_im_bysize, None, "im"),
-            ):
-                for k, v in d.items():
-                    if word.lowered[-k:] in v:
-                        return word[:numend] + post
-
-        # HANDLE SINGULAR NOUNS ENDING IN ...s OR OTHER SILIBANTS
-
-        if lowered_last in pl_sb_singular_s_complete:
-            return f"{word}es"
-
-        for k, v in pl_sb_singular_s_bysize.items():
-            if word.lowered[-k:] in v:
-                return f"{word}es"
-
-        if word.lowered[-2:] == "es" and word[0] == word[0].upper():
-            return f"{word}es"
-
-        if word.lowered[-1] == "z":
-            for k, v in pl_sb_z_zes_bysize.items():
-                if word.lowered[-k:] in v:
-                    return f"{word}es"
-
-            if word.lowered[-2:-1] != "z":
-                return f"{word}zes"
-
-        if word.lowered[-2:] == "ze":
-            for k, v in pl_sb_ze_zes_bysize.items():
-                if word.lowered[-k:] in v:
-                    return f"{word}s"
-
-        if word.lowered[-2:] in ("ch", "sh", "zz", "ss") or word.lowered[-1] == "x":
-            return f"{word}es"
-
-        # HANDLE ...f -> ...ves
-
-        if word.lowered[-3:] in ("elf", "alf", "olf"):
-            return f"{word[:-1]}ves"
-        if word.lowered[-3:] == "eaf" and word.lowered[-4:-3] != "d":
-            return f"{word[:-1]}ves"
-        if word.lowered[-4:] in ("nife", "life", "wife"):
-            return f"{word[:-2]}ves"
-        if word.lowered[-3:] == "arf":
-            return f"{word[:-1]}ves"
-
-        # HANDLE ...y
-
-        if word.lowered[-1] == "y":
-            if word.lowered[-2:-1] in "aeiou" or len(word) == 1:
-                return f"{word}s"
-
-            if self.classical_dict["names"]:
-                if word.lowered[-1] == "y" and word[0] == word[0].upper():
-                    return f"{word}s"
-
-            return f"{word[:-1]}ies"
-
-        # HANDLE ...o
-
-        if lowered_last in pl_sb_U_o_os_complete:
-            return f"{word}s"
-
-        for k, v in pl_sb_U_o_os_bysize.items():
-            if word.lowered[-k:] in v:
-                return f"{word}s"
-
-        if word.lowered[-2:] in ("ao", "eo", "io", "oo", "uo"):
-            return f"{word}s"
-
-        if word.lowered[-1] == "o":
-            return f"{word}es"
-
-        # OTHERWISE JUST ADD ...s
-
-        return f"{word}s"
-
-    @classmethod
-    def _handle_prepositional_phrase(cls, phrase, transform, sep):
-        """
-        Given a word or phrase possibly separated by sep, parse out
-        the prepositional phrase and apply the transform to the word
-        preceding the prepositional phrase.
-
-        Raise ValueError if the pivot is not found or if at least two
-        separators are not found.
-
-        >>> engine._handle_prepositional_phrase("man-of-war", str.upper, '-')
-        'MAN-of-war'
-        >>> engine._handle_prepositional_phrase("man of war", str.upper, ' ')
-        'MAN of war'
-        """
-        parts = phrase.split(sep)
-        if len(parts) < 3:
-            raise ValueError("Cannot handle words with fewer than two separators")
-
-        pivot = cls._find_pivot(parts, pl_prep_list_da)
-
-        transformed = transform(parts[pivot - 1]) or parts[pivot - 1]
-        return " ".join(
-            parts[: pivot - 1] + [sep.join([transformed, parts[pivot], ''])]
-        ) + " ".join(parts[(pivot + 1) :])
-
-    def _handle_long_compounds(self, word: Words, count: int) -> Union[str, None]:
-        """
-        Handles the plural and singular for compound `Words` that
-        have three or more words, based on the given count.
-
-        >>> engine()._handle_long_compounds(Words("pair of scissors"), 2)
-        'pairs of scissors'
-        >>> engine()._handle_long_compounds(Words("men beyond hills"), 1)
-        'man beyond hills'
-        """
-        inflection = self._sinoun if count == 1 else self._plnoun
-        solutions = (  # type: ignore
-            " ".join(
-                itertools.chain(
-                    leader,
-                    [inflection(cand, count), prep],  # type: ignore
-                    trailer,
-                )
-            )
-            for leader, (cand, prep), trailer in windowed_complete(word.split_, 2)
-            if prep in pl_prep_list_da  # type: ignore
-        )
-        return next(solutions, None)
-
-    @staticmethod
-    def _find_pivot(words, candidates):
-        pivots = (
-            index for index in range(1, len(words) - 1) if words[index] in candidates
-        )
-        try:
-            return next(pivots)
-        except StopIteration:
-            raise ValueError("No pivot found") from None
-
-    def _pl_special_verb(  # noqa: C901
-        self, word: str, count: Optional[Union[str, int]] = None
-    ) -> Union[str, bool]:
-        if self.classical_dict["zero"] and str(count).lower() in pl_count_zero:
-            return False
-        count = self.get_count(count)
-
-        if count == 1:
-            return word
-
-        # HANDLE USER-DEFINED VERBS
-
-        value = self.ud_match(word, self.pl_v_user_defined)
-        if value is not None:
-            return value
-
-        # HANDLE IRREGULAR PRESENT TENSE (SIMPLE AND COMPOUND)
-
-        try:
-            words = Words(word)
-        except IndexError:
-            return False  # word is ''
-
-        if words.first in plverb_irregular_pres:
-            return f"{plverb_irregular_pres[words.first]}{words[len(words.first) :]}"
-
-        # HANDLE IRREGULAR FUTURE, PRETERITE AND PERFECT TENSES
-
-        if words.first in plverb_irregular_non_pres:
-            return word
-
-        # HANDLE PRESENT NEGATIONS (SIMPLE AND COMPOUND)
-
-        if words.first.endswith("n't") and words.first[:-3] in plverb_irregular_pres:
-            return (
-                f"{plverb_irregular_pres[words.first[:-3]]}n't"
-                f"{words[len(words.first) :]}"
-            )
-
-        if words.first.endswith("n't"):
-            return word
-
-        # HANDLE SPECIAL CASES
-
-        mo = PLVERB_SPECIAL_S_RE.search(word)
-        if mo:
-            return False
-        if WHITESPACE.search(word):
-            return False
-
-        if words.lowered == "quizzes":
-            return "quiz"
-
-        # HANDLE STANDARD 3RD PERSON (CHOP THE ...(e)s OFF SINGLE WORDS)
-
-        if (
-            words.lowered[-4:] in ("ches", "shes", "zzes", "sses")
-            or words.lowered[-3:] == "xes"
-        ):
-            return words[:-2]
-
-        if words.lowered[-3:] == "ies" and len(words) > 3:
-            return words.lowered[:-3] + "y"
-
-        if (
-            words.last.lower() in pl_v_oes_oe
-            or words.lowered[-4:] in pl_v_oes_oe_endings_size4
-            or words.lowered[-5:] in pl_v_oes_oe_endings_size5
-        ):
-            return words[:-1]
-
-        if words.lowered.endswith("oes") and len(words) > 3:
-            return words.lowered[:-2]
-
-        mo = ENDS_WITH_S.search(words)
-        if mo:
-            return mo.group(1)
-
-        # OTHERWISE, A REGULAR VERB (HANDLE ELSEWHERE)
-
-        return False
-
-    def _pl_general_verb(
-        self, word: str, count: Optional[Union[str, int]] = None
-    ) -> str:
-        count = self.get_count(count)
-
-        if count == 1:
-            return word
-
-        # HANDLE AMBIGUOUS PRESENT TENSES  (SIMPLE AND COMPOUND)
-
-        mo = plverb_ambiguous_pres_keys.search(word)
-        if mo:
-            return f"{plverb_ambiguous_pres[mo.group(1).lower()]}{mo.group(2)}"
-
-        # HANDLE AMBIGUOUS PRETERITE AND PERFECT TENSES
-
-        mo = plverb_ambiguous_non_pres.search(word)
-        if mo:
-            return word
-
-        # OTHERWISE, 1st OR 2ND PERSON IS UNINFLECTED
-
-        return word
-
-    def _pl_special_adjective(
-        self, word: str, count: Optional[Union[str, int]] = None
-    ) -> Union[str, bool]:
-        count = self.get_count(count)
-
-        if count == 1:
-            return word
-
-        # HANDLE USER-DEFINED ADJECTIVES
-
-        value = self.ud_match(word, self.pl_adj_user_defined)
-        if value is not None:
-            return value
-
-        # HANDLE KNOWN CASES
-
-        mo = pl_adj_special_keys.search(word)
-        if mo:
-            return pl_adj_special[mo.group(1).lower()]
-
-        # HANDLE POSSESSIVES
-
-        mo = pl_adj_poss_keys.search(word)
-        if mo:
-            return pl_adj_poss[mo.group(1).lower()]
-
-        mo = ENDS_WITH_APOSTROPHE_S.search(word)
-        if mo:
-            pl = self.plural_noun(mo.group(1))
-            trailing_s = "" if pl[-1] == "s" else "s"
-            return f"{pl}'{trailing_s}"
-
-        # OTHERWISE, NO IDEA
-
-        return False
-
-    # @profile
-    def _sinoun(  # noqa: C901
-        self,
-        word: str,
-        count: Optional[Union[str, int]] = None,
-        gender: Optional[str] = None,
-    ) -> Union[str, bool]:
-        count = self.get_count(count)
-
-        # DEFAULT TO PLURAL
-
-        if count == 2:
-            return word
-
-        # SET THE GENDER
-
-        try:
-            if gender is None:
-                gender = self.thegender
-            elif gender not in singular_pronoun_genders:
-                raise BadGenderError
-        except (TypeError, IndexError) as err:
-            raise BadGenderError from err
-
-        # HANDLE USER-DEFINED NOUNS
-
-        value = self.ud_match(word, self.si_sb_user_defined)
-        if value is not None:
-            return value
-
-        # HANDLE EMPTY WORD, SINGULAR COUNT AND UNINFLECTED PLURALS
-
-        if word == "":
-            return word
-
-        if word in si_sb_ois_oi_case:
-            return word[:-1]
-
-        words = Words(word)
-
-        if words.last.lower() in pl_sb_uninflected_complete:
-            if len(words.split_) >= 3:
-                return self._handle_long_compounds(words, count=1) or word
-            return word
-
-        if word in pl_sb_uninflected_caps:
-            return word
-
-        for k, v in pl_sb_uninflected_bysize.items():
-            if words.lowered[-k:] in v:
-                return word
-
-        if self.classical_dict["herd"] and words.last.lower() in pl_sb_uninflected_herd:
-            return word
-
-        if words.last.lower() in pl_sb_C_us_us:
-            return word if self.classical_dict["ancient"] else False
-
-        # HANDLE COMPOUNDS ("Governor General", "mother-in-law", "aide-de-camp", ETC.)
-
-        mo = PL_SB_POSTFIX_ADJ_STEMS_RE.search(word)
-        if mo and mo.group(2) != "":
-            return f"{self._sinoun(mo.group(1), 1, gender=gender)}{mo.group(2)}"
-
-        with contextlib.suppress(ValueError):
-            return self._handle_prepositional_phrase(
-                words.lowered,
-                functools.partial(self._sinoun, count=1, gender=gender),
-                ' ',
-            )
-
-        with contextlib.suppress(ValueError):
-            return self._handle_prepositional_phrase(
-                words.lowered,
-                functools.partial(self._sinoun, count=1, gender=gender),
-                '-',
-            )
-
-        # HANDLE PRONOUNS
-
-        for k, v in si_pron_acc_keys_bysize.items():
-            if words.lowered[-k:] in v:  # ends with accusative pronoun
-                for pk, pv in pl_prep_bysize.items():
-                    if words.lowered[:pk] in pv:  # starts with a prep
-                        if words.lowered.split() == [
-                            words.lowered[:pk],
-                            words.lowered[-k:],
-                        ]:
-                            # only whitespace in between
-                            return words.lowered[:-k] + get_si_pron(
-                                "acc", words.lowered[-k:], gender
-                            )
-
-        try:
-            return get_si_pron("nom", words.lowered, gender)
-        except KeyError:
-            pass
-
-        try:
-            return get_si_pron("acc", words.lowered, gender)
-        except KeyError:
-            pass
-
-        # HANDLE ISOLATED IRREGULAR PLURALS
-
-        if words.last in si_sb_irregular_caps:
-            llen = len(words.last)
-            return f"{word[:-llen]}{si_sb_irregular_caps[words.last]}"
-
-        if words.last.lower() in si_sb_irregular:
-            llen = len(words.last.lower())
-            return f"{word[:-llen]}{si_sb_irregular[words.last.lower()]}"
-
-        dash_split = words.lowered.split("-")
-        if (" ".join(dash_split[-2:])).lower() in si_sb_irregular_compound:
-            llen = len(
-                " ".join(dash_split[-2:])
-            )  # TODO: what if 2 spaces between these words?
-            return "{}{}".format(
-                word[:-llen],
-                si_sb_irregular_compound[(" ".join(dash_split[-2:])).lower()],
-            )
-
-        if words.lowered[-5:] == "quies":
-            return word[:-3] + "y"
-
-        if words.lowered[-7:] == "persons":
-            return word[:-1]
-        if words.lowered[-6:] == "people":
-            return word[:-4] + "rson"
-
-        # HANDLE FAMILIES OF IRREGULAR PLURALS
-
-        if words.lowered[-4:] == "mans":
-            for k, v in si_sb_U_man_mans_bysize.items():
-                if words.lowered[-k:] in v:
-                    return word[:-1]
-            for k, v in si_sb_U_man_mans_caps_bysize.items():
-                if word[-k:] in v:
-                    return word[:-1]
-        if words.lowered[-3:] == "men":
-            return word[:-3] + "man"
-        if words.lowered[-4:] == "mice":
-            return word[:-4] + "mouse"
-        if words.lowered[-4:] == "lice":
-            v = si_sb_U_louse_lice_bysize.get(len(word))
-            if v and words.lowered in v:
-                return word[:-4] + "louse"
-        if words.lowered[-5:] == "geese":
-            return word[:-5] + "goose"
-        if words.lowered[-5:] == "teeth":
-            return word[:-5] + "tooth"
-        if words.lowered[-4:] == "feet":
-            return word[:-4] + "foot"
-
-        if words.lowered == "dice":
-            return "die"
-
-        # HANDLE UNASSIMILATED IMPORTS
-
-        if words.lowered[-4:] == "ceps":
-            return word
-        if words.lowered[-3:] == "zoa":
-            return word[:-1] + "on"
-
-        for lastlet, d, unass_numend, post in (
-            ("s", si_sb_U_ch_chs_bysize, -1, ""),
-            ("s", si_sb_U_ex_ices_bysize, -4, "ex"),
-            ("s", si_sb_U_ix_ices_bysize, -4, "ix"),
-            ("a", si_sb_U_um_a_bysize, -1, "um"),
-            ("i", si_sb_U_us_i_bysize, -1, "us"),
-            ("a", si_sb_U_on_a_bysize, -1, "on"),
-            ("e", si_sb_U_a_ae_bysize, -1, ""),
-        ):
-            if words.lowered[-1] == lastlet:  # this test to add speed
-                for k, v in d.items():
-                    if words.lowered[-k:] in v:
-                        return word[:unass_numend] + post
-
-        # HANDLE INCOMPLETELY ASSIMILATED IMPORTS
-
-        if self.classical_dict["ancient"]:
-            if words.lowered[-6:] == "trices":
-                return word[:-3] + "x"
-            if words.lowered[-4:] in ("eaux", "ieux"):
-                return word[:-1]
-            if words.lowered[-5:] in ("ynges", "inges", "anges") and len(word) > 6:
-                return word[:-3] + "x"
-
-            for lastlet, d, class_numend, post in (
-                ("a", si_sb_C_en_ina_bysize, -3, "en"),
-                ("s", si_sb_C_ex_ices_bysize, -4, "ex"),
-                ("s", si_sb_C_ix_ices_bysize, -4, "ix"),
-                ("a", si_sb_C_um_a_bysize, -1, "um"),
-                ("i", si_sb_C_us_i_bysize, -1, "us"),
-                ("s", pl_sb_C_us_us_bysize, None, ""),
-                ("e", si_sb_C_a_ae_bysize, -1, ""),
-                ("a", si_sb_C_a_ata_bysize, -2, ""),
-                ("s", si_sb_C_is_ides_bysize, -3, "s"),
-                ("i", si_sb_C_o_i_bysize, -1, "o"),
-                ("a", si_sb_C_on_a_bysize, -1, "on"),
-                ("m", si_sb_C_im_bysize, -2, ""),
-                ("i", si_sb_C_i_bysize, -1, ""),
-            ):
-                if words.lowered[-1] == lastlet:  # this test to add speed
-                    for k, v in d.items():
-                        if words.lowered[-k:] in v:
-                            return word[:class_numend] + post
-
-        # HANDLE PLURLS ENDING IN uses -> use
-
-        if (
-            words.lowered[-6:] == "houses"
-            or word in si_sb_uses_use_case
-            or words.last.lower() in si_sb_uses_use
-        ):
-            return word[:-1]
-
-        # HANDLE PLURLS ENDING IN ies -> ie
-
-        if word in si_sb_ies_ie_case or words.last.lower() in si_sb_ies_ie:
-            return word[:-1]
-
-        # HANDLE PLURLS ENDING IN oes -> oe
-
-        if (
-            words.lowered[-5:] == "shoes"
-            or word in si_sb_oes_oe_case
-            or words.last.lower() in si_sb_oes_oe
-        ):
-            return word[:-1]
-
-        # HANDLE SINGULAR NOUNS ENDING IN ...s OR OTHER SILIBANTS
-
-        if word in si_sb_sses_sse_case or words.last.lower() in si_sb_sses_sse:
-            return word[:-1]
-
-        if words.last.lower() in si_sb_singular_s_complete:
-            return word[:-2]
-
-        for k, v in si_sb_singular_s_bysize.items():
-            if words.lowered[-k:] in v:
-                return word[:-2]
-
-        if words.lowered[-4:] == "eses" and word[0] == word[0].upper():
-            return word[:-2]
-
-        if words.last.lower() in si_sb_z_zes:
-            return word[:-2]
-
-        if words.last.lower() in si_sb_zzes_zz:
-            return word[:-2]
-
-        if words.lowered[-4:] == "zzes":
-            return word[:-3]
-
-        if word in si_sb_ches_che_case or words.last.lower() in si_sb_ches_che:
-            return word[:-1]
-
-        if words.lowered[-4:] in ("ches", "shes"):
-            return word[:-2]
-
-        if words.last.lower() in si_sb_xes_xe:
-            return word[:-1]
-
-        if words.lowered[-3:] == "xes":
-            return word[:-2]
-
-        # HANDLE ...f -> ...ves
-
-        if word in si_sb_ves_ve_case or words.last.lower() in si_sb_ves_ve:
-            return word[:-1]
-
-        if words.lowered[-3:] == "ves":
-            if words.lowered[-5:-3] in ("el", "al", "ol"):
-                return word[:-3] + "f"
-            if words.lowered[-5:-3] == "ea" and word[-6:-5] != "d":
-                return word[:-3] + "f"
-            if words.lowered[-5:-3] in ("ni", "li", "wi"):
-                return word[:-3] + "fe"
-            if words.lowered[-5:-3] == "ar":
-                return word[:-3] + "f"
-
-        # HANDLE ...y
-
-        if words.lowered[-2:] == "ys":
-            if len(words.lowered) > 2 and words.lowered[-3] in "aeiou":
-                return word[:-1]
-
-            if self.classical_dict["names"]:
-                if words.lowered[-2:] == "ys" and word[0] == word[0].upper():
-                    return word[:-1]
-
-        if words.lowered[-3:] == "ies":
-            return word[:-3] + "y"
-
-        # HANDLE ...o
-
-        if words.lowered[-2:] == "os":
-            if words.last.lower() in si_sb_U_o_os_complete:
-                return word[:-1]
-
-            for k, v in si_sb_U_o_os_bysize.items():
-                if words.lowered[-k:] in v:
-                    return word[:-1]
-
-            if words.lowered[-3:] in ("aos", "eos", "ios", "oos", "uos"):
-                return word[:-1]
-
-        if words.lowered[-3:] == "oes":
-            return word[:-2]
-
-        # UNASSIMILATED IMPORTS FINAL RULE
-
-        if word in si_sb_es_is:
-            return word[:-2] + "is"
-
-        # OTHERWISE JUST REMOVE ...s
-
-        if words.lowered[-1] == "s":
-            return word[:-1]
-
-        # COULD NOT FIND SINGULAR
-
-        return False
-
-    # ADJECTIVES
-
-    @typechecked
-    def a(self, text: Word, count: Optional[Union[int, str, Any]] = 1) -> str:
-        """
-        Return the appropriate indefinite article followed by text.
-
-        The indefinite article is either 'a' or 'an'.
-
-        If count is not one, then return count followed by text
-        instead of 'a' or 'an'.
-
-        Whitespace at the start and end is preserved.
-
-        """
-        mo = INDEFINITE_ARTICLE_TEST.search(text)
-        if mo:
-            word = mo.group(2)
-            if not word:
-                return text
-            pre = mo.group(1)
-            post = mo.group(3)
-            result = self._indef_article(word, count)
-            return f"{pre}{result}{post}"
-        return ""
-
-    an = a
-
-    _indef_article_cases = (
-        # HANDLE ORDINAL FORMS
-        (A_ordinal_a, "a"),
-        (A_ordinal_an, "an"),
-        # HANDLE SPECIAL CASES
-        (A_explicit_an, "an"),
-        (SPECIAL_AN, "an"),
-        (SPECIAL_A, "a"),
-        # HANDLE ABBREVIATIONS
-        (A_abbrev, "an"),
-        (SPECIAL_ABBREV_AN, "an"),
-        (SPECIAL_ABBREV_A, "a"),
-        # HANDLE CONSONANTS
-        (CONSONANTS, "a"),
-        # HANDLE SPECIAL VOWEL-FORMS
-        (ARTICLE_SPECIAL_EU, "a"),
-        (ARTICLE_SPECIAL_ONCE, "a"),
-        (ARTICLE_SPECIAL_ONETIME, "a"),
-        (ARTICLE_SPECIAL_UNIT, "a"),
-        (ARTICLE_SPECIAL_UBA, "a"),
-        (ARTICLE_SPECIAL_UKR, "a"),
-        (A_explicit_a, "a"),
-        # HANDLE SPECIAL CAPITALS
-        (SPECIAL_CAPITALS, "a"),
-        # HANDLE VOWELS
-        (VOWELS, "an"),
-        # HANDLE y...
-        # (BEFORE CERTAIN CONSONANTS IMPLIES (UNNATURALIZED) "i.." SOUND)
-        (A_y_cons, "an"),
-    )
-
-    def _indef_article(self, word: str, count: Union[int, str, Any]) -> str:
-        mycount = self.get_count(count)
-
-        if mycount != 1:
-            return f"{count} {word}"
-
-        # HANDLE USER-DEFINED VARIANTS
-
-        value = self.ud_match(word, self.A_a_user_defined)
-        if value is not None:
-            return f"{value} {word}"
-
-        matches = (
-            f'{article} {word}'
-            for regexen, article in self._indef_article_cases
-            if regexen.search(word)
-        )
-
-        # OTHERWISE, GUESS "a"
-        fallback = f'a {word}'
-        return next(matches, fallback)
-
-    # 2. TRANSLATE ZERO-QUANTIFIED $word TO "no plural($word)"
-
-    @typechecked
-    def no(self, text: Word, count: Optional[Union[int, str]] = None) -> str:
-        """
-        If count is 0, no, zero or nil, return 'no' followed by the plural
-        of text.
-
-        If count is one of:
-            1, a, an, one, each, every, this, that
-            return count followed by text.
-
-        Otherwise return count follow by the plural of text.
-
-        In the return value count is always followed by a space.
-
-        Whitespace at the start and end is preserved.
-
-        """
-        if count is None and self.persistent_count is not None:
-            count = self.persistent_count
-
-        if count is None:
-            count = 0
-        mo = PARTITION_WORD.search(text)
-        if mo:
-            pre = mo.group(1)
-            word = mo.group(2)
-            post = mo.group(3)
-        else:
-            pre = ""
-            word = ""
-            post = ""
-
-        if str(count).lower() in pl_count_zero:
-            count = 'no'
-        return f"{pre}{count} {self.plural(word, count)}{post}"
-
-    # PARTICIPLES
-
-    @typechecked
-    def present_participle(self, word: Word) -> str:
-        """
-        Return the present participle for word.
-
-        word is the 3rd person singular verb.
-
-        """
-        plv = self.plural_verb(word, 2)
-        ans = plv
-
-        for regexen, repl in PRESENT_PARTICIPLE_REPLACEMENTS:
-            ans, num = regexen.subn(repl, plv)
-            if num:
-                return f"{ans}ing"
-        return f"{ans}ing"
-
-    # NUMERICAL INFLECTIONS
-
-    @typechecked
-    def ordinal(self, num: Union[Number, Word]) -> str:
-        """
-        Return the ordinal of num.
-
-        >>> ordinal = engine().ordinal
-        >>> ordinal(1)
-        '1st'
-        >>> ordinal('one')
-        'first'
-        """
-        if DIGIT.match(str(num)):
-            if isinstance(num, (float, int)) and int(num) == num:
-                n = int(num)
-            else:
-                if "." in str(num):
-                    try:
-                        # numbers after decimal,
-                        # so only need last one for ordinal
-                        n = int(str(num)[-1])
-
-                    except ValueError:  # ends with '.', so need to use whole string
-                        n = int(str(num)[:-1])
-                else:
-                    n = int(num)  # type: ignore
-            try:
-                post = nth[n % 100]
-            except KeyError:
-                post = nth[n % 10]
-            return f"{num}{post}"
-        else:
-            return self._sub_ord(num)
-
-    def millfn(self, ind: int = 0) -> str:
-        if ind > len(mill) - 1:
-            raise NumOutOfRangeError
-        return mill[ind]
-
-    def unitfn(self, units: int, mindex: int = 0) -> str:
-        return f"{unit[units]}{self.millfn(mindex)}"
-
-    def tenfn(self, tens, units, mindex=0) -> str:
-        if tens != 1:
-            tens_part = ten[tens]
-            if tens and units:
-                hyphen = "-"
-            else:
-                hyphen = ""
-            unit_part = unit[units]
-            mill_part = self.millfn(mindex)
-            return f"{tens_part}{hyphen}{unit_part}{mill_part}"
-        return f"{teen[units]}{mill[mindex]}"
-
-    def hundfn(self, hundreds: int, tens: int, units: int, mindex: int) -> str:
-        if hundreds:
-            andword = f" {self._number_args['andword']} " if tens or units else ""
-            # use unit not unitfn as simpler
-            return (
-                f"{unit[hundreds]} hundred{andword}"
-                f"{self.tenfn(tens, units)}{self.millfn(mindex)}, "
-            )
-        if tens or units:
-            return f"{self.tenfn(tens, units)}{self.millfn(mindex)}, "
-        return ""
-
-    def group1sub(self, mo: Match) -> str:
-        units = int(mo.group(1))
-        if units == 1:
-            return f" {self._number_args['one']}, "
-        elif units:
-            return f"{unit[units]}, "
-        else:
-            return f" {self._number_args['zero']}, "
-
-    def group1bsub(self, mo: Match) -> str:
-        units = int(mo.group(1))
-        if units:
-            return f"{unit[units]}, "
-        else:
-            return f" {self._number_args['zero']}, "
-
-    def group2sub(self, mo: Match) -> str:
-        tens = int(mo.group(1))
-        units = int(mo.group(2))
-        if tens:
-            return f"{self.tenfn(tens, units)}, "
-        if units:
-            return f" {self._number_args['zero']} {unit[units]}, "
-        return f" {self._number_args['zero']} {self._number_args['zero']}, "
-
-    def group3sub(self, mo: Match) -> str:
-        hundreds = int(mo.group(1))
-        tens = int(mo.group(2))
-        units = int(mo.group(3))
-        if hundreds == 1:
-            hunword = f" {self._number_args['one']}"
-        elif hundreds:
-            hunword = str(unit[hundreds])
-        else:
-            hunword = f" {self._number_args['zero']}"
-        if tens:
-            tenword = self.tenfn(tens, units)
-        elif units:
-            tenword = f" {self._number_args['zero']} {unit[units]}"
-        else:
-            tenword = f" {self._number_args['zero']} {self._number_args['zero']}"
-        return f"{hunword} {tenword}, "
-
-    def hundsub(self, mo: Match) -> str:
-        ret = self.hundfn(
-            int(mo.group(1)), int(mo.group(2)), int(mo.group(3)), self.mill_count
-        )
-        self.mill_count += 1
-        return ret
-
-    def tensub(self, mo: Match) -> str:
-        return f"{self.tenfn(int(mo.group(1)), int(mo.group(2)), self.mill_count)}, "
-
-    def unitsub(self, mo: Match) -> str:
-        return f"{self.unitfn(int(mo.group(1)), self.mill_count)}, "
-
-    def enword(self, num: str, group: int) -> str:
-        # import pdb
-        # pdb.set_trace()
-
-        if group == 1:
-            num = DIGIT_GROUP.sub(self.group1sub, num)
-        elif group == 2:
-            num = TWO_DIGITS.sub(self.group2sub, num)
-            num = DIGIT_GROUP.sub(self.group1bsub, num, 1)
-        elif group == 3:
-            num = THREE_DIGITS.sub(self.group3sub, num)
-            num = TWO_DIGITS.sub(self.group2sub, num, 1)
-            num = DIGIT_GROUP.sub(self.group1sub, num, 1)
-        elif int(num) == 0:
-            num = self._number_args["zero"]
-        elif int(num) == 1:
-            num = self._number_args["one"]
-        else:
-            num = num.lstrip().lstrip("0")
-            self.mill_count = 0
-            # surely there's a better way to do the next bit
-            mo = THREE_DIGITS_WORD.search(num)
-            while mo:
-                num = THREE_DIGITS_WORD.sub(self.hundsub, num, 1)
-                mo = THREE_DIGITS_WORD.search(num)
-            num = TWO_DIGITS_WORD.sub(self.tensub, num, 1)
-            num = ONE_DIGIT_WORD.sub(self.unitsub, num, 1)
-        return num
-
-    @staticmethod
-    def _sub_ord(val):
-        new = ordinal_suff.sub(lambda match: ordinal[match.group(1)], val)
-        return new + "th" * (new == val)
-
-    @classmethod
-    def _chunk_num(cls, num, decimal, group):
-        if decimal:
-            max_split = -1 if group != 0 else 1
-            chunks = num.split(".", max_split)
-        else:
-            chunks = [num]
-        return cls._remove_last_blank(chunks)
-
-    @staticmethod
-    def _remove_last_blank(chunks):
-        """
-        Remove the last item from chunks if it's a blank string.
-
-        Return the resultant chunks and whether the last item was removed.
-        """
-        removed = chunks[-1] == ""
-        result = chunks[:-1] if removed else chunks
-        return result, removed
-
-    @staticmethod
-    def _get_sign(num):
-        return {'+': 'plus', '-': 'minus'}.get(num.lstrip()[0], '')
-
-    @typechecked
-    def number_to_words(  # noqa: C901
-        self,
-        num: Union[Number, Word],
-        wantlist: bool = False,
-        group: int = 0,
-        comma: Union[Falsish, str] = ",",
-        andword: str = "and",
-        zero: str = "zero",
-        one: str = "one",
-        decimal: Union[Falsish, str] = "point",
-        threshold: Optional[int] = None,
-    ) -> Union[str, List[str]]:
-        """
-        Return a number in words.
-
-        group = 1, 2 or 3 to group numbers before turning into words
-        comma: define comma
-
-        andword:
-            word for 'and'. Can be set to ''.
-            e.g. "one hundred and one" vs "one hundred one"
-
-        zero: word for '0'
-        one: word for '1'
-        decimal: word for decimal point
-        threshold: numbers above threshold not turned into words
-
-        parameters not remembered from last call. Departure from Perl version.
-        """
-        self._number_args = {"andword": andword, "zero": zero, "one": one}
-        num = str(num)
-
-        # Handle "stylistic" conversions (up to a given threshold)...
-        if threshold is not None and float(num) > threshold:
-            spnum = num.split(".", 1)
-            while comma:
-                (spnum[0], n) = FOUR_DIGIT_COMMA.subn(r"\1,\2", spnum[0])
-                if n == 0:
-                    break
-            try:
-                return f"{spnum[0]}.{spnum[1]}"
-            except IndexError:
-                return str(spnum[0])
-
-        if group < 0 or group > 3:
-            raise BadChunkingOptionError
-
-        sign = self._get_sign(num)
-
-        if num in nth_suff:
-            num = zero
-
-        myord = num[-2:] in nth_suff
-        if myord:
-            num = num[:-2]
-
-        chunks, finalpoint = self._chunk_num(num, decimal, group)
-
-        loopstart = chunks[0] == ""
-        first: bool | None = not loopstart
-
-        def _handle_chunk(chunk):
-            nonlocal first
-
-            # remove all non numeric \D
-            chunk = NON_DIGIT.sub("", chunk)
-            if chunk == "":
-                chunk = "0"
-
-            if group == 0 and not first:
-                chunk = self.enword(chunk, 1)
-            else:
-                chunk = self.enword(chunk, group)
-
-            if chunk[-2:] == ", ":
-                chunk = chunk[:-2]
-            chunk = WHITESPACES_COMMA.sub(",", chunk)
-
-            if group == 0 and first:
-                chunk = COMMA_WORD.sub(f" {andword} \\1", chunk)
-            chunk = WHITESPACES.sub(" ", chunk)
-            # chunk = re.sub(r"(\A\s|\s\Z)", self.blankfn, chunk)
-            chunk = chunk.strip()
-            if first:
-                first = None
-            return chunk
-
-        chunks[loopstart:] = map(_handle_chunk, chunks[loopstart:])
-
-        numchunks = []
-        if first != 0:
-            numchunks = chunks[0].split(f"{comma} ")
-
-        if myord and numchunks:
-            numchunks[-1] = self._sub_ord(numchunks[-1])
-
-        for chunk in chunks[1:]:
-            numchunks.append(decimal)
-            numchunks.extend(chunk.split(f"{comma} "))
-
-        if finalpoint:
-            numchunks.append(decimal)
-
-        if wantlist:
-            return [sign] * bool(sign) + numchunks
-
-        signout = f"{sign} " if sign else ""
-        valout = (
-            ', '.join(numchunks)
-            if group
-            else ''.join(self._render(numchunks, decimal, comma))
-        )
-        return signout + valout
-
-    @staticmethod
-    def _render(chunks, decimal, comma):
-        first_item = chunks.pop(0)
-        yield first_item
-        first = decimal is None or not first_item.endswith(decimal)
-        for nc in chunks:
-            if nc == decimal:
-                first = False
-            elif first:
-                yield comma
-            yield f" {nc}"
-
-    @typechecked
-    def join(
-        self,
-        words: Optional[Sequence[Word]],
-        sep: Optional[str] = None,
-        sep_spaced: bool = True,
-        final_sep: Optional[str] = None,
-        conj: str = "and",
-        conj_spaced: bool = True,
-    ) -> str:
-        """
-        Join words into a list.
-
-        e.g. join(['ant', 'bee', 'fly']) returns 'ant, bee, and fly'
-
-        options:
-        conj: replacement for 'and'
-        sep: separator. default ',', unless ',' is in the list then ';'
-        final_sep: final separator. default ',', unless ',' is in the list then ';'
-        conj_spaced: boolean. Should conj have spaces around it
-
-        """
-        if not words:
-            return ""
-        if len(words) == 1:
-            return words[0]
-
-        if conj_spaced:
-            if conj == "":
-                conj = " "
-            else:
-                conj = f" {conj} "
-
-        if len(words) == 2:
-            return f"{words[0]}{conj}{words[1]}"
-
-        if sep is None:
-            if "," in "".join(words):
-                sep = ";"
-            else:
-                sep = ","
-        if final_sep is None:
-            final_sep = sep
-
-        final_sep = f"{final_sep}{conj}"
-
-        if sep_spaced:
-            sep += " "
-
-        return f"{sep.join(words[0:-1])}{final_sep}{words[-1]}"
diff --git a/pkg_resources/_vendor/inflect/compat/__init__.py b/pkg_resources/_vendor/inflect/compat/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/pkg_resources/_vendor/inflect/compat/py38.py b/pkg_resources/_vendor/inflect/compat/py38.py
deleted file mode 100644
index a2d01bd98f..0000000000
--- a/pkg_resources/_vendor/inflect/compat/py38.py
+++ /dev/null
@@ -1,7 +0,0 @@
-import sys
-
-
-if sys.version_info > (3, 9):
-    from typing import Annotated
-else:  # pragma: no cover
-    from typing_extensions import Annotated  # noqa: F401
diff --git a/pkg_resources/_vendor/inflect/py.typed b/pkg_resources/_vendor/inflect/py.typed
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/INSTALLER b/pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/INSTALLER
deleted file mode 100644
index a1b589e38a..0000000000
--- a/pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/INSTALLER
+++ /dev/null
@@ -1 +0,0 @@
-pip
diff --git a/pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/LICENSE b/pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/LICENSE
deleted file mode 100644
index 1bb5a44356..0000000000
--- a/pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/LICENSE
+++ /dev/null
@@ -1,17 +0,0 @@
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to
-deal in the Software without restriction, including without limitation the
-rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
-sell copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
-IN THE SOFTWARE.
diff --git a/pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/METADATA b/pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/METADATA
deleted file mode 100644
index a36f7c5e82..0000000000
--- a/pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/METADATA
+++ /dev/null
@@ -1,75 +0,0 @@
-Metadata-Version: 2.1
-Name: jaraco.context
-Version: 5.3.0
-Summary: Useful decorators and context managers
-Home-page: https://github.com/jaraco/jaraco.context
-Author: Jason R. Coombs
-Author-email: jaraco@jaraco.com
-Classifier: Development Status :: 5 - Production/Stable
-Classifier: Intended Audience :: Developers
-Classifier: License :: OSI Approved :: MIT License
-Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3 :: Only
-Requires-Python: >=3.8
-License-File: LICENSE
-Requires-Dist: backports.tarfile ; python_version < "3.12"
-Provides-Extra: docs
-Requires-Dist: sphinx >=3.5 ; extra == 'docs'
-Requires-Dist: jaraco.packaging >=9.3 ; extra == 'docs'
-Requires-Dist: rst.linker >=1.9 ; extra == 'docs'
-Requires-Dist: furo ; extra == 'docs'
-Requires-Dist: sphinx-lint ; extra == 'docs'
-Requires-Dist: jaraco.tidelift >=1.4 ; extra == 'docs'
-Provides-Extra: testing
-Requires-Dist: pytest !=8.1.1,>=6 ; extra == 'testing'
-Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'testing'
-Requires-Dist: pytest-cov ; extra == 'testing'
-Requires-Dist: pytest-mypy ; extra == 'testing'
-Requires-Dist: pytest-enabler >=2.2 ; extra == 'testing'
-Requires-Dist: pytest-ruff >=0.2.1 ; extra == 'testing'
-Requires-Dist: portend ; extra == 'testing'
-
-.. image:: https://img.shields.io/pypi/v/jaraco.context.svg
-   :target: https://pypi.org/project/jaraco.context
-
-.. image:: https://img.shields.io/pypi/pyversions/jaraco.context.svg
-
-.. image:: https://github.com/jaraco/jaraco.context/actions/workflows/main.yml/badge.svg
-   :target: https://github.com/jaraco/jaraco.context/actions?query=workflow%3A%22tests%22
-   :alt: tests
-
-.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
-    :target: https://github.com/astral-sh/ruff
-    :alt: Ruff
-
-.. image:: https://readthedocs.org/projects/jaracocontext/badge/?version=latest
-   :target: https://jaracocontext.readthedocs.io/en/latest/?badge=latest
-
-.. image:: https://img.shields.io/badge/skeleton-2024-informational
-   :target: https://blog.jaraco.com/skeleton
-
-.. image:: https://tidelift.com/badges/package/pypi/jaraco.context
-   :target: https://tidelift.com/subscription/pkg/pypi-jaraco.context?utm_source=pypi-jaraco.context&utm_medium=readme
-
-
-Highlights
-==========
-
-See the docs linked from the badge above for the full details, but here are some features that may be of interest.
-
-- ``ExceptionTrap`` provides a general-purpose wrapper for trapping exceptions and then acting on the outcome. Includes ``passes`` and ``raises`` decorators to replace the result of a wrapped function by a boolean indicating the outcome of the exception trap. See `this keyring commit `_ for an example of it in production.
-- ``suppress`` simply enables ``contextlib.suppress`` as a decorator.
-- ``on_interrupt`` is a decorator used by CLI entry points to affect the handling of a ``KeyboardInterrupt``. Inspired by `Lucretiel/autocommand#18 `_.
-- ``pushd`` is similar to pytest's ``monkeypatch.chdir`` or path's `default context `_, changes the current working directory for the duration of the context.
-- ``tarball`` will download a tarball, extract it, change directory, yield, then clean up after. Convenient when working with web assets.
-- ``null`` is there for those times when one code branch needs a context and the other doesn't; this null context provides symmetry across those branches.
-
-
-For Enterprise
-==============
-
-Available as part of the Tidelift Subscription.
-
-This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use.
-
-`Learn more `_.
diff --git a/pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/RECORD b/pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/RECORD
deleted file mode 100644
index 09d191f214..0000000000
--- a/pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/RECORD
+++ /dev/null
@@ -1,8 +0,0 @@
-jaraco.context-5.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-jaraco.context-5.3.0.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
-jaraco.context-5.3.0.dist-info/METADATA,sha256=xDtguJej0tN9iEXCUvxEJh2a7xceIRVBEakBLSr__tY,4020
-jaraco.context-5.3.0.dist-info/RECORD,,
-jaraco.context-5.3.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
-jaraco.context-5.3.0.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
-jaraco/__pycache__/context.cpython-312.pyc,,
-jaraco/context.py,sha256=REoLIxDkO5MfEYowt_WoupNCRoxBS5v7YX2PbW8lIcs,9552
diff --git a/pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/WHEEL b/pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/WHEEL
deleted file mode 100644
index bab98d6758..0000000000
--- a/pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/WHEEL
+++ /dev/null
@@ -1,5 +0,0 @@
-Wheel-Version: 1.0
-Generator: bdist_wheel (0.43.0)
-Root-Is-Purelib: true
-Tag: py3-none-any
-
diff --git a/pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/top_level.txt b/pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/top_level.txt
deleted file mode 100644
index f6205a5f19..0000000000
--- a/pkg_resources/_vendor/jaraco.context-5.3.0.dist-info/top_level.txt
+++ /dev/null
@@ -1 +0,0 @@
-jaraco
diff --git a/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/INSTALLER b/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/INSTALLER
deleted file mode 100644
index a1b589e38a..0000000000
--- a/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/INSTALLER
+++ /dev/null
@@ -1 +0,0 @@
-pip
diff --git a/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/LICENSE b/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/LICENSE
deleted file mode 100644
index 1bb5a44356..0000000000
--- a/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/LICENSE
+++ /dev/null
@@ -1,17 +0,0 @@
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to
-deal in the Software without restriction, including without limitation the
-rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
-sell copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
-IN THE SOFTWARE.
diff --git a/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/METADATA b/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/METADATA
deleted file mode 100644
index c865140ab2..0000000000
--- a/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/METADATA
+++ /dev/null
@@ -1,64 +0,0 @@
-Metadata-Version: 2.1
-Name: jaraco.functools
-Version: 4.0.1
-Summary: Functools like those found in stdlib
-Author-email: "Jason R. Coombs" 
-Project-URL: Homepage, https://github.com/jaraco/jaraco.functools
-Classifier: Development Status :: 5 - Production/Stable
-Classifier: Intended Audience :: Developers
-Classifier: License :: OSI Approved :: MIT License
-Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3 :: Only
-Requires-Python: >=3.8
-Description-Content-Type: text/x-rst
-License-File: LICENSE
-Requires-Dist: more-itertools
-Provides-Extra: docs
-Requires-Dist: sphinx >=3.5 ; extra == 'docs'
-Requires-Dist: sphinx <7.2.5 ; extra == 'docs'
-Requires-Dist: jaraco.packaging >=9.3 ; extra == 'docs'
-Requires-Dist: rst.linker >=1.9 ; extra == 'docs'
-Requires-Dist: furo ; extra == 'docs'
-Requires-Dist: sphinx-lint ; extra == 'docs'
-Requires-Dist: jaraco.tidelift >=1.4 ; extra == 'docs'
-Provides-Extra: testing
-Requires-Dist: pytest >=6 ; extra == 'testing'
-Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'testing'
-Requires-Dist: pytest-cov ; extra == 'testing'
-Requires-Dist: pytest-enabler >=2.2 ; extra == 'testing'
-Requires-Dist: pytest-ruff >=0.2.1 ; extra == 'testing'
-Requires-Dist: jaraco.classes ; extra == 'testing'
-Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing'
-
-.. image:: https://img.shields.io/pypi/v/jaraco.functools.svg
-   :target: https://pypi.org/project/jaraco.functools
-
-.. image:: https://img.shields.io/pypi/pyversions/jaraco.functools.svg
-
-.. image:: https://github.com/jaraco/jaraco.functools/actions/workflows/main.yml/badge.svg
-   :target: https://github.com/jaraco/jaraco.functools/actions?query=workflow%3A%22tests%22
-   :alt: tests
-
-.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
-    :target: https://github.com/astral-sh/ruff
-    :alt: Ruff
-
-.. image:: https://readthedocs.org/projects/jaracofunctools/badge/?version=latest
-   :target: https://jaracofunctools.readthedocs.io/en/latest/?badge=latest
-
-.. image:: https://img.shields.io/badge/skeleton-2024-informational
-   :target: https://blog.jaraco.com/skeleton
-
-.. image:: https://tidelift.com/badges/package/pypi/jaraco.functools
-   :target: https://tidelift.com/subscription/pkg/pypi-jaraco.functools?utm_source=pypi-jaraco.functools&utm_medium=readme
-
-Additional functools in the spirit of stdlib's functools.
-
-For Enterprise
-==============
-
-Available as part of the Tidelift Subscription.
-
-This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use.
-
-`Learn more `_.
diff --git a/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/RECORD b/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/RECORD
deleted file mode 100644
index ef3bc21e92..0000000000
--- a/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/RECORD
+++ /dev/null
@@ -1,10 +0,0 @@
-jaraco.functools-4.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-jaraco.functools-4.0.1.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
-jaraco.functools-4.0.1.dist-info/METADATA,sha256=i4aUaQDX-jjdEQK5wevhegyx8JyLfin2HyvaSk3FHso,2891
-jaraco.functools-4.0.1.dist-info/RECORD,,
-jaraco.functools-4.0.1.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
-jaraco.functools-4.0.1.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
-jaraco/functools/__init__.py,sha256=hEAJaS2uSZRuF_JY4CxCHIYh79ZpxaPp9OiHyr9EJ1w,16642
-jaraco/functools/__init__.pyi,sha256=gk3dsgHzo5F_U74HzAvpNivFAPCkPJ1b2-yCd62dfnw,3878
-jaraco/functools/__pycache__/__init__.cpython-312.pyc,,
-jaraco/functools/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
diff --git a/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/WHEEL b/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/WHEEL
deleted file mode 100644
index bab98d6758..0000000000
--- a/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/WHEEL
+++ /dev/null
@@ -1,5 +0,0 @@
-Wheel-Version: 1.0
-Generator: bdist_wheel (0.43.0)
-Root-Is-Purelib: true
-Tag: py3-none-any
-
diff --git a/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/top_level.txt b/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/top_level.txt
deleted file mode 100644
index f6205a5f19..0000000000
--- a/pkg_resources/_vendor/jaraco.functools-4.0.1.dist-info/top_level.txt
+++ /dev/null
@@ -1 +0,0 @@
-jaraco
diff --git a/pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/INSTALLER b/pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/INSTALLER
deleted file mode 100644
index a1b589e38a..0000000000
--- a/pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/INSTALLER
+++ /dev/null
@@ -1 +0,0 @@
-pip
diff --git a/pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/LICENSE b/pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/LICENSE
deleted file mode 100644
index 1bb5a44356..0000000000
--- a/pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/LICENSE
+++ /dev/null
@@ -1,17 +0,0 @@
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to
-deal in the Software without restriction, including without limitation the
-rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
-sell copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
-IN THE SOFTWARE.
diff --git a/pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/METADATA b/pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/METADATA
deleted file mode 100644
index 0258a380f4..0000000000
--- a/pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/METADATA
+++ /dev/null
@@ -1,95 +0,0 @@
-Metadata-Version: 2.1
-Name: jaraco.text
-Version: 3.12.1
-Summary: Module for text manipulation
-Author-email: "Jason R. Coombs" 
-Project-URL: Homepage, https://github.com/jaraco/jaraco.text
-Classifier: Development Status :: 5 - Production/Stable
-Classifier: Intended Audience :: Developers
-Classifier: License :: OSI Approved :: MIT License
-Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3 :: Only
-Requires-Python: >=3.8
-Description-Content-Type: text/x-rst
-License-File: LICENSE
-Requires-Dist: jaraco.functools
-Requires-Dist: jaraco.context >=4.1
-Requires-Dist: autocommand
-Requires-Dist: inflect
-Requires-Dist: more-itertools
-Requires-Dist: importlib-resources ; python_version < "3.9"
-Provides-Extra: doc
-Requires-Dist: sphinx >=3.5 ; extra == 'doc'
-Requires-Dist: jaraco.packaging >=9.3 ; extra == 'doc'
-Requires-Dist: rst.linker >=1.9 ; extra == 'doc'
-Requires-Dist: furo ; extra == 'doc'
-Requires-Dist: sphinx-lint ; extra == 'doc'
-Requires-Dist: jaraco.tidelift >=1.4 ; extra == 'doc'
-Provides-Extra: test
-Requires-Dist: pytest !=8.1.*,>=6 ; extra == 'test'
-Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'test'
-Requires-Dist: pytest-cov ; extra == 'test'
-Requires-Dist: pytest-mypy ; extra == 'test'
-Requires-Dist: pytest-enabler >=2.2 ; extra == 'test'
-Requires-Dist: pytest-ruff >=0.2.1 ; extra == 'test'
-Requires-Dist: pathlib2 ; (python_version < "3.10") and extra == 'test'
-
-.. image:: https://img.shields.io/pypi/v/jaraco.text.svg
-   :target: https://pypi.org/project/jaraco.text
-
-.. image:: https://img.shields.io/pypi/pyversions/jaraco.text.svg
-
-.. image:: https://github.com/jaraco/jaraco.text/actions/workflows/main.yml/badge.svg
-   :target: https://github.com/jaraco/jaraco.text/actions?query=workflow%3A%22tests%22
-   :alt: tests
-
-.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
-    :target: https://github.com/astral-sh/ruff
-    :alt: Ruff
-
-.. image:: https://readthedocs.org/projects/jaracotext/badge/?version=latest
-   :target: https://jaracotext.readthedocs.io/en/latest/?badge=latest
-
-.. image:: https://img.shields.io/badge/skeleton-2024-informational
-   :target: https://blog.jaraco.com/skeleton
-
-.. image:: https://tidelift.com/badges/package/pypi/jaraco.text
-   :target: https://tidelift.com/subscription/pkg/pypi-jaraco.text?utm_source=pypi-jaraco.text&utm_medium=readme
-
-
-This package provides handy routines for dealing with text, such as
-wrapping, substitution, trimming, stripping, prefix and suffix removal,
-line continuation, indentation, comment processing, identifier processing,
-values parsing, case insensitive comparison, and more. See the docs
-(linked in the badge above) for the detailed documentation and examples.
-
-Layouts
-=======
-
-One of the features of this package is the layouts module, which
-provides a simple example of translating keystrokes from one keyboard
-layout to another::
-
-    echo qwerty | python -m jaraco.text.to-dvorak
-    ',.pyf
-    echo  "',.pyf" | python -m jaraco.text.to-qwerty
-    qwerty
-
-Newline Reporting
-=================
-
-Need to know what newlines appear in a file?
-
-::
-
-    $ python -m jaraco.text.show-newlines README.rst
-    newline is '\n'
-
-For Enterprise
-==============
-
-Available as part of the Tidelift Subscription.
-
-This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use.
-
-`Learn more `_.
diff --git a/pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/RECORD b/pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/RECORD
deleted file mode 100644
index 19e2d8402a..0000000000
--- a/pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/RECORD
+++ /dev/null
@@ -1,20 +0,0 @@
-jaraco.text-3.12.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-jaraco.text-3.12.1.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
-jaraco.text-3.12.1.dist-info/METADATA,sha256=AzWdm6ViMfDOPoQMfLWn2zgBQSGJScyqeN29TcuWXVI,3658
-jaraco.text-3.12.1.dist-info/RECORD,,
-jaraco.text-3.12.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-jaraco.text-3.12.1.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
-jaraco.text-3.12.1.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
-jaraco/text/Lorem ipsum.txt,sha256=N_7c_79zxOufBY9HZ3yzMgOkNv-TkOTTio4BydrSjgs,1335
-jaraco/text/__init__.py,sha256=Y2YUqXR_orUoDaY4SkPRe6ZZhb5HUHB_Ah9RCNsVyho,16250
-jaraco/text/__pycache__/__init__.cpython-312.pyc,,
-jaraco/text/__pycache__/layouts.cpython-312.pyc,,
-jaraco/text/__pycache__/show-newlines.cpython-312.pyc,,
-jaraco/text/__pycache__/strip-prefix.cpython-312.pyc,,
-jaraco/text/__pycache__/to-dvorak.cpython-312.pyc,,
-jaraco/text/__pycache__/to-qwerty.cpython-312.pyc,,
-jaraco/text/layouts.py,sha256=HTC8aSTLZ7uXipyOXapRMC158juecjK6RVwitfmZ9_w,643
-jaraco/text/show-newlines.py,sha256=WGQa65e8lyhb92LUOLqVn6KaCtoeVgVws6WtSRmLk6w,904
-jaraco/text/strip-prefix.py,sha256=NfVXV8JVNo6nqcuYASfMV7_y4Eo8zMQqlCOGvAnRIVw,412
-jaraco/text/to-dvorak.py,sha256=1SNcbSsvISpXXg-LnybIHHY-RUFOQr36zcHkY1pWFqw,119
-jaraco/text/to-qwerty.py,sha256=s4UMQUnPwFn_dB5uZC27BurHOQcYondBfzIpVL5pEzw,119
diff --git a/pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/REQUESTED b/pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/REQUESTED
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/WHEEL b/pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/WHEEL
deleted file mode 100644
index bab98d6758..0000000000
--- a/pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/WHEEL
+++ /dev/null
@@ -1,5 +0,0 @@
-Wheel-Version: 1.0
-Generator: bdist_wheel (0.43.0)
-Root-Is-Purelib: true
-Tag: py3-none-any
-
diff --git a/pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/top_level.txt b/pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/top_level.txt
deleted file mode 100644
index f6205a5f19..0000000000
--- a/pkg_resources/_vendor/jaraco.text-3.12.1.dist-info/top_level.txt
+++ /dev/null
@@ -1 +0,0 @@
-jaraco
diff --git a/pkg_resources/_vendor/jaraco/context.py b/pkg_resources/_vendor/jaraco/context.py
deleted file mode 100644
index 61b27135df..0000000000
--- a/pkg_resources/_vendor/jaraco/context.py
+++ /dev/null
@@ -1,361 +0,0 @@
-from __future__ import annotations
-
-import contextlib
-import functools
-import operator
-import os
-import shutil
-import subprocess
-import sys
-import tempfile
-import urllib.request
-import warnings
-from typing import Iterator
-
-
-if sys.version_info < (3, 12):
-    from backports import tarfile
-else:
-    import tarfile
-
-
-@contextlib.contextmanager
-def pushd(dir: str | os.PathLike) -> Iterator[str | os.PathLike]:
-    """
-    >>> tmp_path = getfixture('tmp_path')
-    >>> with pushd(tmp_path):
-    ...     assert os.getcwd() == os.fspath(tmp_path)
-    >>> assert os.getcwd() != os.fspath(tmp_path)
-    """
-
-    orig = os.getcwd()
-    os.chdir(dir)
-    try:
-        yield dir
-    finally:
-        os.chdir(orig)
-
-
-@contextlib.contextmanager
-def tarball(
-    url, target_dir: str | os.PathLike | None = None
-) -> Iterator[str | os.PathLike]:
-    """
-    Get a tarball, extract it, yield, then clean up.
-
-    >>> import urllib.request
-    >>> url = getfixture('tarfile_served')
-    >>> target = getfixture('tmp_path') / 'out'
-    >>> tb = tarball(url, target_dir=target)
-    >>> import pathlib
-    >>> with tb as extracted:
-    ...     contents = pathlib.Path(extracted, 'contents.txt').read_text(encoding='utf-8')
-    >>> assert not os.path.exists(extracted)
-    """
-    if target_dir is None:
-        target_dir = os.path.basename(url).replace('.tar.gz', '').replace('.tgz', '')
-    # In the tar command, use --strip-components=1 to strip the first path and
-    #  then
-    #  use -C to cause the files to be extracted to {target_dir}. This ensures
-    #  that we always know where the files were extracted.
-    os.mkdir(target_dir)
-    try:
-        req = urllib.request.urlopen(url)
-        with tarfile.open(fileobj=req, mode='r|*') as tf:
-            tf.extractall(path=target_dir, filter=strip_first_component)
-        yield target_dir
-    finally:
-        shutil.rmtree(target_dir)
-
-
-def strip_first_component(
-    member: tarfile.TarInfo,
-    path,
-) -> tarfile.TarInfo:
-    _, member.name = member.name.split('/', 1)
-    return member
-
-
-def _compose(*cmgrs):
-    """
-    Compose any number of dependent context managers into a single one.
-
-    The last, innermost context manager may take arbitrary arguments, but
-    each successive context manager should accept the result from the
-    previous as a single parameter.
-
-    Like :func:`jaraco.functools.compose`, behavior works from right to
-    left, so the context manager should be indicated from outermost to
-    innermost.
-
-    Example, to create a context manager to change to a temporary
-    directory:
-
-    >>> temp_dir_as_cwd = _compose(pushd, temp_dir)
-    >>> with temp_dir_as_cwd() as dir:
-    ...     assert os.path.samefile(os.getcwd(), dir)
-    """
-
-    def compose_two(inner, outer):
-        def composed(*args, **kwargs):
-            with inner(*args, **kwargs) as saved, outer(saved) as res:
-                yield res
-
-        return contextlib.contextmanager(composed)
-
-    return functools.reduce(compose_two, reversed(cmgrs))
-
-
-tarball_cwd = _compose(pushd, tarball)
-
-
-@contextlib.contextmanager
-def tarball_context(*args, **kwargs):
-    warnings.warn(
-        "tarball_context is deprecated. Use tarball or tarball_cwd instead.",
-        DeprecationWarning,
-        stacklevel=2,
-    )
-    pushd_ctx = kwargs.pop('pushd', pushd)
-    with tarball(*args, **kwargs) as tball, pushd_ctx(tball) as dir:
-        yield dir
-
-
-def infer_compression(url):
-    """
-    Given a URL or filename, infer the compression code for tar.
-
-    >>> infer_compression('http://foo/bar.tar.gz')
-    'z'
-    >>> infer_compression('http://foo/bar.tgz')
-    'z'
-    >>> infer_compression('file.bz')
-    'j'
-    >>> infer_compression('file.xz')
-    'J'
-    """
-    warnings.warn(
-        "infer_compression is deprecated with no replacement",
-        DeprecationWarning,
-        stacklevel=2,
-    )
-    # cheat and just assume it's the last two characters
-    compression_indicator = url[-2:]
-    mapping = dict(gz='z', bz='j', xz='J')
-    # Assume 'z' (gzip) if no match
-    return mapping.get(compression_indicator, 'z')
-
-
-@contextlib.contextmanager
-def temp_dir(remover=shutil.rmtree):
-    """
-    Create a temporary directory context. Pass a custom remover
-    to override the removal behavior.
-
-    >>> import pathlib
-    >>> with temp_dir() as the_dir:
-    ...     assert os.path.isdir(the_dir)
-    ...     _ = pathlib.Path(the_dir).joinpath('somefile').write_text('contents', encoding='utf-8')
-    >>> assert not os.path.exists(the_dir)
-    """
-    temp_dir = tempfile.mkdtemp()
-    try:
-        yield temp_dir
-    finally:
-        remover(temp_dir)
-
-
-@contextlib.contextmanager
-def repo_context(url, branch=None, quiet=True, dest_ctx=temp_dir):
-    """
-    Check out the repo indicated by url.
-
-    If dest_ctx is supplied, it should be a context manager
-    to yield the target directory for the check out.
-    """
-    exe = 'git' if 'git' in url else 'hg'
-    with dest_ctx() as repo_dir:
-        cmd = [exe, 'clone', url, repo_dir]
-        if branch:
-            cmd.extend(['--branch', branch])
-        devnull = open(os.path.devnull, 'w')
-        stdout = devnull if quiet else None
-        subprocess.check_call(cmd, stdout=stdout)
-        yield repo_dir
-
-
-def null():
-    """
-    A null context suitable to stand in for a meaningful context.
-
-    >>> with null() as value:
-    ...     assert value is None
-
-    This context is most useful when dealing with two or more code
-    branches but only some need a context. Wrap the others in a null
-    context to provide symmetry across all options.
-    """
-    warnings.warn(
-        "null is deprecated. Use contextlib.nullcontext",
-        DeprecationWarning,
-        stacklevel=2,
-    )
-    return contextlib.nullcontext()
-
-
-class ExceptionTrap:
-    """
-    A context manager that will catch certain exceptions and provide an
-    indication they occurred.
-
-    >>> with ExceptionTrap() as trap:
-    ...     raise Exception()
-    >>> bool(trap)
-    True
-
-    >>> with ExceptionTrap() as trap:
-    ...     pass
-    >>> bool(trap)
-    False
-
-    >>> with ExceptionTrap(ValueError) as trap:
-    ...     raise ValueError("1 + 1 is not 3")
-    >>> bool(trap)
-    True
-    >>> trap.value
-    ValueError('1 + 1 is not 3')
-    >>> trap.tb
-    
-
-    >>> with ExceptionTrap(ValueError) as trap:
-    ...     raise Exception()
-    Traceback (most recent call last):
-    ...
-    Exception
-
-    >>> bool(trap)
-    False
-    """
-
-    exc_info = None, None, None
-
-    def __init__(self, exceptions=(Exception,)):
-        self.exceptions = exceptions
-
-    def __enter__(self):
-        return self
-
-    @property
-    def type(self):
-        return self.exc_info[0]
-
-    @property
-    def value(self):
-        return self.exc_info[1]
-
-    @property
-    def tb(self):
-        return self.exc_info[2]
-
-    def __exit__(self, *exc_info):
-        type = exc_info[0]
-        matches = type and issubclass(type, self.exceptions)
-        if matches:
-            self.exc_info = exc_info
-        return matches
-
-    def __bool__(self):
-        return bool(self.type)
-
-    def raises(self, func, *, _test=bool):
-        """
-        Wrap func and replace the result with the truth
-        value of the trap (True if an exception occurred).
-
-        First, give the decorator an alias to support Python 3.8
-        Syntax.
-
-        >>> raises = ExceptionTrap(ValueError).raises
-
-        Now decorate a function that always fails.
-
-        >>> @raises
-        ... def fail():
-        ...     raise ValueError('failed')
-        >>> fail()
-        True
-        """
-
-        @functools.wraps(func)
-        def wrapper(*args, **kwargs):
-            with ExceptionTrap(self.exceptions) as trap:
-                func(*args, **kwargs)
-            return _test(trap)
-
-        return wrapper
-
-    def passes(self, func):
-        """
-        Wrap func and replace the result with the truth
-        value of the trap (True if no exception).
-
-        First, give the decorator an alias to support Python 3.8
-        Syntax.
-
-        >>> passes = ExceptionTrap(ValueError).passes
-
-        Now decorate a function that always fails.
-
-        >>> @passes
-        ... def fail():
-        ...     raise ValueError('failed')
-
-        >>> fail()
-        False
-        """
-        return self.raises(func, _test=operator.not_)
-
-
-class suppress(contextlib.suppress, contextlib.ContextDecorator):
-    """
-    A version of contextlib.suppress with decorator support.
-
-    >>> @suppress(KeyError)
-    ... def key_error():
-    ...     {}['']
-    >>> key_error()
-    """
-
-
-class on_interrupt(contextlib.ContextDecorator):
-    """
-    Replace a KeyboardInterrupt with SystemExit(1)
-
-    >>> def do_interrupt():
-    ...     raise KeyboardInterrupt()
-    >>> on_interrupt('error')(do_interrupt)()
-    Traceback (most recent call last):
-    ...
-    SystemExit: 1
-    >>> on_interrupt('error', code=255)(do_interrupt)()
-    Traceback (most recent call last):
-    ...
-    SystemExit: 255
-    >>> on_interrupt('suppress')(do_interrupt)()
-    >>> with __import__('pytest').raises(KeyboardInterrupt):
-    ...     on_interrupt('ignore')(do_interrupt)()
-    """
-
-    def __init__(self, action='error', /, code=1):
-        self.action = action
-        self.code = code
-
-    def __enter__(self):
-        return self
-
-    def __exit__(self, exctype, excinst, exctb):
-        if exctype is not KeyboardInterrupt or self.action == 'ignore':
-            return
-        elif self.action == 'error':
-            raise SystemExit(self.code) from excinst
-        return self.action == 'suppress'
diff --git a/pkg_resources/_vendor/jaraco/functools/__init__.py b/pkg_resources/_vendor/jaraco/functools/__init__.py
deleted file mode 100644
index ca6c22fa9b..0000000000
--- a/pkg_resources/_vendor/jaraco/functools/__init__.py
+++ /dev/null
@@ -1,633 +0,0 @@
-import collections.abc
-import functools
-import inspect
-import itertools
-import operator
-import time
-import types
-import warnings
-
-import more_itertools
-
-
-def compose(*funcs):
-    """
-    Compose any number of unary functions into a single unary function.
-
-    >>> import textwrap
-    >>> expected = str.strip(textwrap.dedent(compose.__doc__))
-    >>> strip_and_dedent = compose(str.strip, textwrap.dedent)
-    >>> strip_and_dedent(compose.__doc__) == expected
-    True
-
-    Compose also allows the innermost function to take arbitrary arguments.
-
-    >>> round_three = lambda x: round(x, ndigits=3)
-    >>> f = compose(round_three, int.__truediv__)
-    >>> [f(3*x, x+1) for x in range(1,10)]
-    [1.5, 2.0, 2.25, 2.4, 2.5, 2.571, 2.625, 2.667, 2.7]
-    """
-
-    def compose_two(f1, f2):
-        return lambda *args, **kwargs: f1(f2(*args, **kwargs))
-
-    return functools.reduce(compose_two, funcs)
-
-
-def once(func):
-    """
-    Decorate func so it's only ever called the first time.
-
-    This decorator can ensure that an expensive or non-idempotent function
-    will not be expensive on subsequent calls and is idempotent.
-
-    >>> add_three = once(lambda a: a+3)
-    >>> add_three(3)
-    6
-    >>> add_three(9)
-    6
-    >>> add_three('12')
-    6
-
-    To reset the stored value, simply clear the property ``saved_result``.
-
-    >>> del add_three.saved_result
-    >>> add_three(9)
-    12
-    >>> add_three(8)
-    12
-
-    Or invoke 'reset()' on it.
-
-    >>> add_three.reset()
-    >>> add_three(-3)
-    0
-    >>> add_three(0)
-    0
-    """
-
-    @functools.wraps(func)
-    def wrapper(*args, **kwargs):
-        if not hasattr(wrapper, 'saved_result'):
-            wrapper.saved_result = func(*args, **kwargs)
-        return wrapper.saved_result
-
-    wrapper.reset = lambda: vars(wrapper).__delitem__('saved_result')
-    return wrapper
-
-
-def method_cache(method, cache_wrapper=functools.lru_cache()):
-    """
-    Wrap lru_cache to support storing the cache data in the object instances.
-
-    Abstracts the common paradigm where the method explicitly saves an
-    underscore-prefixed protected property on first call and returns that
-    subsequently.
-
-    >>> class MyClass:
-    ...     calls = 0
-    ...
-    ...     @method_cache
-    ...     def method(self, value):
-    ...         self.calls += 1
-    ...         return value
-
-    >>> a = MyClass()
-    >>> a.method(3)
-    3
-    >>> for x in range(75):
-    ...     res = a.method(x)
-    >>> a.calls
-    75
-
-    Note that the apparent behavior will be exactly like that of lru_cache
-    except that the cache is stored on each instance, so values in one
-    instance will not flush values from another, and when an instance is
-    deleted, so are the cached values for that instance.
-
-    >>> b = MyClass()
-    >>> for x in range(35):
-    ...     res = b.method(x)
-    >>> b.calls
-    35
-    >>> a.method(0)
-    0
-    >>> a.calls
-    75
-
-    Note that if method had been decorated with ``functools.lru_cache()``,
-    a.calls would have been 76 (due to the cached value of 0 having been
-    flushed by the 'b' instance).
-
-    Clear the cache with ``.cache_clear()``
-
-    >>> a.method.cache_clear()
-
-    Same for a method that hasn't yet been called.
-
-    >>> c = MyClass()
-    >>> c.method.cache_clear()
-
-    Another cache wrapper may be supplied:
-
-    >>> cache = functools.lru_cache(maxsize=2)
-    >>> MyClass.method2 = method_cache(lambda self: 3, cache_wrapper=cache)
-    >>> a = MyClass()
-    >>> a.method2()
-    3
-
-    Caution - do not subsequently wrap the method with another decorator, such
-    as ``@property``, which changes the semantics of the function.
-
-    See also
-    http://code.activestate.com/recipes/577452-a-memoize-decorator-for-instance-methods/
-    for another implementation and additional justification.
-    """
-
-    def wrapper(self, *args, **kwargs):
-        # it's the first call, replace the method with a cached, bound method
-        bound_method = types.MethodType(method, self)
-        cached_method = cache_wrapper(bound_method)
-        setattr(self, method.__name__, cached_method)
-        return cached_method(*args, **kwargs)
-
-    # Support cache clear even before cache has been created.
-    wrapper.cache_clear = lambda: None
-
-    return _special_method_cache(method, cache_wrapper) or wrapper
-
-
-def _special_method_cache(method, cache_wrapper):
-    """
-    Because Python treats special methods differently, it's not
-    possible to use instance attributes to implement the cached
-    methods.
-
-    Instead, install the wrapper method under a different name
-    and return a simple proxy to that wrapper.
-
-    https://github.com/jaraco/jaraco.functools/issues/5
-    """
-    name = method.__name__
-    special_names = '__getattr__', '__getitem__'
-
-    if name not in special_names:
-        return None
-
-    wrapper_name = '__cached' + name
-
-    def proxy(self, /, *args, **kwargs):
-        if wrapper_name not in vars(self):
-            bound = types.MethodType(method, self)
-            cache = cache_wrapper(bound)
-            setattr(self, wrapper_name, cache)
-        else:
-            cache = getattr(self, wrapper_name)
-        return cache(*args, **kwargs)
-
-    return proxy
-
-
-def apply(transform):
-    """
-    Decorate a function with a transform function that is
-    invoked on results returned from the decorated function.
-
-    >>> @apply(reversed)
-    ... def get_numbers(start):
-    ...     "doc for get_numbers"
-    ...     return range(start, start+3)
-    >>> list(get_numbers(4))
-    [6, 5, 4]
-    >>> get_numbers.__doc__
-    'doc for get_numbers'
-    """
-
-    def wrap(func):
-        return functools.wraps(func)(compose(transform, func))
-
-    return wrap
-
-
-def result_invoke(action):
-    r"""
-    Decorate a function with an action function that is
-    invoked on the results returned from the decorated
-    function (for its side effect), then return the original
-    result.
-
-    >>> @result_invoke(print)
-    ... def add_two(a, b):
-    ...     return a + b
-    >>> x = add_two(2, 3)
-    5
-    >>> x
-    5
-    """
-
-    def wrap(func):
-        @functools.wraps(func)
-        def wrapper(*args, **kwargs):
-            result = func(*args, **kwargs)
-            action(result)
-            return result
-
-        return wrapper
-
-    return wrap
-
-
-def invoke(f, /, *args, **kwargs):
-    """
-    Call a function for its side effect after initialization.
-
-    The benefit of using the decorator instead of simply invoking a function
-    after defining it is that it makes explicit the author's intent for the
-    function to be called immediately. Whereas if one simply calls the
-    function immediately, it's less obvious if that was intentional or
-    incidental. It also avoids repeating the name - the two actions, defining
-    the function and calling it immediately are modeled separately, but linked
-    by the decorator construct.
-
-    The benefit of having a function construct (opposed to just invoking some
-    behavior inline) is to serve as a scope in which the behavior occurs. It
-    avoids polluting the global namespace with local variables, provides an
-    anchor on which to attach documentation (docstring), keeps the behavior
-    logically separated (instead of conceptually separated or not separated at
-    all), and provides potential to re-use the behavior for testing or other
-    purposes.
-
-    This function is named as a pithy way to communicate, "call this function
-    primarily for its side effect", or "while defining this function, also
-    take it aside and call it". It exists because there's no Python construct
-    for "define and call" (nor should there be, as decorators serve this need
-    just fine). The behavior happens immediately and synchronously.
-
-    >>> @invoke
-    ... def func(): print("called")
-    called
-    >>> func()
-    called
-
-    Use functools.partial to pass parameters to the initial call
-
-    >>> @functools.partial(invoke, name='bingo')
-    ... def func(name): print('called with', name)
-    called with bingo
-    """
-    f(*args, **kwargs)
-    return f
-
-
-class Throttler:
-    """Rate-limit a function (or other callable)."""
-
-    def __init__(self, func, max_rate=float('Inf')):
-        if isinstance(func, Throttler):
-            func = func.func
-        self.func = func
-        self.max_rate = max_rate
-        self.reset()
-
-    def reset(self):
-        self.last_called = 0
-
-    def __call__(self, *args, **kwargs):
-        self._wait()
-        return self.func(*args, **kwargs)
-
-    def _wait(self):
-        """Ensure at least 1/max_rate seconds from last call."""
-        elapsed = time.time() - self.last_called
-        must_wait = 1 / self.max_rate - elapsed
-        time.sleep(max(0, must_wait))
-        self.last_called = time.time()
-
-    def __get__(self, obj, owner=None):
-        return first_invoke(self._wait, functools.partial(self.func, obj))
-
-
-def first_invoke(func1, func2):
-    """
-    Return a function that when invoked will invoke func1 without
-    any parameters (for its side effect) and then invoke func2
-    with whatever parameters were passed, returning its result.
-    """
-
-    def wrapper(*args, **kwargs):
-        func1()
-        return func2(*args, **kwargs)
-
-    return wrapper
-
-
-method_caller = first_invoke(
-    lambda: warnings.warn(
-        '`jaraco.functools.method_caller` is deprecated, '
-        'use `operator.methodcaller` instead',
-        DeprecationWarning,
-        stacklevel=3,
-    ),
-    operator.methodcaller,
-)
-
-
-def retry_call(func, cleanup=lambda: None, retries=0, trap=()):
-    """
-    Given a callable func, trap the indicated exceptions
-    for up to 'retries' times, invoking cleanup on the
-    exception. On the final attempt, allow any exceptions
-    to propagate.
-    """
-    attempts = itertools.count() if retries == float('inf') else range(retries)
-    for _ in attempts:
-        try:
-            return func()
-        except trap:
-            cleanup()
-
-    return func()
-
-
-def retry(*r_args, **r_kwargs):
-    """
-    Decorator wrapper for retry_call. Accepts arguments to retry_call
-    except func and then returns a decorator for the decorated function.
-
-    Ex:
-
-    >>> @retry(retries=3)
-    ... def my_func(a, b):
-    ...     "this is my funk"
-    ...     print(a, b)
-    >>> my_func.__doc__
-    'this is my funk'
-    """
-
-    def decorate(func):
-        @functools.wraps(func)
-        def wrapper(*f_args, **f_kwargs):
-            bound = functools.partial(func, *f_args, **f_kwargs)
-            return retry_call(bound, *r_args, **r_kwargs)
-
-        return wrapper
-
-    return decorate
-
-
-def print_yielded(func):
-    """
-    Convert a generator into a function that prints all yielded elements.
-
-    >>> @print_yielded
-    ... def x():
-    ...     yield 3; yield None
-    >>> x()
-    3
-    None
-    """
-    print_all = functools.partial(map, print)
-    print_results = compose(more_itertools.consume, print_all, func)
-    return functools.wraps(func)(print_results)
-
-
-def pass_none(func):
-    """
-    Wrap func so it's not called if its first param is None.
-
-    >>> print_text = pass_none(print)
-    >>> print_text('text')
-    text
-    >>> print_text(None)
-    """
-
-    @functools.wraps(func)
-    def wrapper(param, /, *args, **kwargs):
-        if param is not None:
-            return func(param, *args, **kwargs)
-        return None
-
-    return wrapper
-
-
-def assign_params(func, namespace):
-    """
-    Assign parameters from namespace where func solicits.
-
-    >>> def func(x, y=3):
-    ...     print(x, y)
-    >>> assigned = assign_params(func, dict(x=2, z=4))
-    >>> assigned()
-    2 3
-
-    The usual errors are raised if a function doesn't receive
-    its required parameters:
-
-    >>> assigned = assign_params(func, dict(y=3, z=4))
-    >>> assigned()
-    Traceback (most recent call last):
-    TypeError: func() ...argument...
-
-    It even works on methods:
-
-    >>> class Handler:
-    ...     def meth(self, arg):
-    ...         print(arg)
-    >>> assign_params(Handler().meth, dict(arg='crystal', foo='clear'))()
-    crystal
-    """
-    sig = inspect.signature(func)
-    params = sig.parameters.keys()
-    call_ns = {k: namespace[k] for k in params if k in namespace}
-    return functools.partial(func, **call_ns)
-
-
-def save_method_args(method):
-    """
-    Wrap a method such that when it is called, the args and kwargs are
-    saved on the method.
-
-    >>> class MyClass:
-    ...     @save_method_args
-    ...     def method(self, a, b):
-    ...         print(a, b)
-    >>> my_ob = MyClass()
-    >>> my_ob.method(1, 2)
-    1 2
-    >>> my_ob._saved_method.args
-    (1, 2)
-    >>> my_ob._saved_method.kwargs
-    {}
-    >>> my_ob.method(a=3, b='foo')
-    3 foo
-    >>> my_ob._saved_method.args
-    ()
-    >>> my_ob._saved_method.kwargs == dict(a=3, b='foo')
-    True
-
-    The arguments are stored on the instance, allowing for
-    different instance to save different args.
-
-    >>> your_ob = MyClass()
-    >>> your_ob.method({str('x'): 3}, b=[4])
-    {'x': 3} [4]
-    >>> your_ob._saved_method.args
-    ({'x': 3},)
-    >>> my_ob._saved_method.args
-    ()
-    """
-    args_and_kwargs = collections.namedtuple('args_and_kwargs', 'args kwargs')
-
-    @functools.wraps(method)
-    def wrapper(self, /, *args, **kwargs):
-        attr_name = '_saved_' + method.__name__
-        attr = args_and_kwargs(args, kwargs)
-        setattr(self, attr_name, attr)
-        return method(self, *args, **kwargs)
-
-    return wrapper
-
-
-def except_(*exceptions, replace=None, use=None):
-    """
-    Replace the indicated exceptions, if raised, with the indicated
-    literal replacement or evaluated expression (if present).
-
-    >>> safe_int = except_(ValueError)(int)
-    >>> safe_int('five')
-    >>> safe_int('5')
-    5
-
-    Specify a literal replacement with ``replace``.
-
-    >>> safe_int_r = except_(ValueError, replace=0)(int)
-    >>> safe_int_r('five')
-    0
-
-    Provide an expression to ``use`` to pass through particular parameters.
-
-    >>> safe_int_pt = except_(ValueError, use='args[0]')(int)
-    >>> safe_int_pt('five')
-    'five'
-
-    """
-
-    def decorate(func):
-        @functools.wraps(func)
-        def wrapper(*args, **kwargs):
-            try:
-                return func(*args, **kwargs)
-            except exceptions:
-                try:
-                    return eval(use)
-                except TypeError:
-                    return replace
-
-        return wrapper
-
-    return decorate
-
-
-def identity(x):
-    """
-    Return the argument.
-
-    >>> o = object()
-    >>> identity(o) is o
-    True
-    """
-    return x
-
-
-def bypass_when(check, *, _op=identity):
-    """
-    Decorate a function to return its parameter when ``check``.
-
-    >>> bypassed = []  # False
-
-    >>> @bypass_when(bypassed)
-    ... def double(x):
-    ...     return x * 2
-    >>> double(2)
-    4
-    >>> bypassed[:] = [object()]  # True
-    >>> double(2)
-    2
-    """
-
-    def decorate(func):
-        @functools.wraps(func)
-        def wrapper(param, /):
-            return param if _op(check) else func(param)
-
-        return wrapper
-
-    return decorate
-
-
-def bypass_unless(check):
-    """
-    Decorate a function to return its parameter unless ``check``.
-
-    >>> enabled = [object()]  # True
-
-    >>> @bypass_unless(enabled)
-    ... def double(x):
-    ...     return x * 2
-    >>> double(2)
-    4
-    >>> del enabled[:]  # False
-    >>> double(2)
-    2
-    """
-    return bypass_when(check, _op=operator.not_)
-
-
-@functools.singledispatch
-def _splat_inner(args, func):
-    """Splat args to func."""
-    return func(*args)
-
-
-@_splat_inner.register
-def _(args: collections.abc.Mapping, func):
-    """Splat kargs to func as kwargs."""
-    return func(**args)
-
-
-def splat(func):
-    """
-    Wrap func to expect its parameters to be passed positionally in a tuple.
-
-    Has a similar effect to that of ``itertools.starmap`` over
-    simple ``map``.
-
-    >>> pairs = [(-1, 1), (0, 2)]
-    >>> more_itertools.consume(itertools.starmap(print, pairs))
-    -1 1
-    0 2
-    >>> more_itertools.consume(map(splat(print), pairs))
-    -1 1
-    0 2
-
-    The approach generalizes to other iterators that don't have a "star"
-    equivalent, such as a "starfilter".
-
-    >>> list(filter(splat(operator.add), pairs))
-    [(0, 2)]
-
-    Splat also accepts a mapping argument.
-
-    >>> def is_nice(msg, code):
-    ...     return "smile" in msg or code == 0
-    >>> msgs = [
-    ...     dict(msg='smile!', code=20),
-    ...     dict(msg='error :(', code=1),
-    ...     dict(msg='unknown', code=0),
-    ... ]
-    >>> for msg in filter(splat(is_nice), msgs):
-    ...     print(msg)
-    {'msg': 'smile!', 'code': 20}
-    {'msg': 'unknown', 'code': 0}
-    """
-    return functools.wraps(func)(functools.partial(_splat_inner, func=func))
diff --git a/pkg_resources/_vendor/jaraco/functools/__init__.pyi b/pkg_resources/_vendor/jaraco/functools/__init__.pyi
deleted file mode 100644
index 19191bf93e..0000000000
--- a/pkg_resources/_vendor/jaraco/functools/__init__.pyi
+++ /dev/null
@@ -1,125 +0,0 @@
-from collections.abc import Callable, Hashable, Iterator
-from functools import partial
-from operator import methodcaller
-import sys
-from typing import (
-    Any,
-    Generic,
-    Protocol,
-    TypeVar,
-    overload,
-)
-
-if sys.version_info >= (3, 10):
-    from typing import Concatenate, ParamSpec
-else:
-    from typing_extensions import Concatenate, ParamSpec
-
-_P = ParamSpec('_P')
-_R = TypeVar('_R')
-_T = TypeVar('_T')
-_R1 = TypeVar('_R1')
-_R2 = TypeVar('_R2')
-_V = TypeVar('_V')
-_S = TypeVar('_S')
-_R_co = TypeVar('_R_co', covariant=True)
-
-class _OnceCallable(Protocol[_P, _R]):
-    saved_result: _R
-    reset: Callable[[], None]
-    def __call__(self, *args: _P.args, **kwargs: _P.kwargs) -> _R: ...
-
-class _ProxyMethodCacheWrapper(Protocol[_R_co]):
-    cache_clear: Callable[[], None]
-    def __call__(self, *args: Hashable, **kwargs: Hashable) -> _R_co: ...
-
-class _MethodCacheWrapper(Protocol[_R_co]):
-    def cache_clear(self) -> None: ...
-    def __call__(self, *args: Hashable, **kwargs: Hashable) -> _R_co: ...
-
-# `compose()` overloads below will cover most use cases.
-
-@overload
-def compose(
-    __func1: Callable[[_R], _T],
-    __func2: Callable[_P, _R],
-    /,
-) -> Callable[_P, _T]: ...
-@overload
-def compose(
-    __func1: Callable[[_R], _T],
-    __func2: Callable[[_R1], _R],
-    __func3: Callable[_P, _R1],
-    /,
-) -> Callable[_P, _T]: ...
-@overload
-def compose(
-    __func1: Callable[[_R], _T],
-    __func2: Callable[[_R2], _R],
-    __func3: Callable[[_R1], _R2],
-    __func4: Callable[_P, _R1],
-    /,
-) -> Callable[_P, _T]: ...
-def once(func: Callable[_P, _R]) -> _OnceCallable[_P, _R]: ...
-def method_cache(
-    method: Callable[..., _R],
-    cache_wrapper: Callable[[Callable[..., _R]], _MethodCacheWrapper[_R]] = ...,
-) -> _MethodCacheWrapper[_R] | _ProxyMethodCacheWrapper[_R]: ...
-def apply(
-    transform: Callable[[_R], _T]
-) -> Callable[[Callable[_P, _R]], Callable[_P, _T]]: ...
-def result_invoke(
-    action: Callable[[_R], Any]
-) -> Callable[[Callable[_P, _R]], Callable[_P, _R]]: ...
-def invoke(
-    f: Callable[_P, _R], /, *args: _P.args, **kwargs: _P.kwargs
-) -> Callable[_P, _R]: ...
-
-class Throttler(Generic[_R]):
-    last_called: float
-    func: Callable[..., _R]
-    max_rate: float
-    def __init__(
-        self, func: Callable[..., _R] | Throttler[_R], max_rate: float = ...
-    ) -> None: ...
-    def reset(self) -> None: ...
-    def __call__(self, *args: Any, **kwargs: Any) -> _R: ...
-    def __get__(self, obj: Any, owner: type[Any] | None = ...) -> Callable[..., _R]: ...
-
-def first_invoke(
-    func1: Callable[..., Any], func2: Callable[_P, _R]
-) -> Callable[_P, _R]: ...
-
-method_caller: Callable[..., methodcaller]
-
-def retry_call(
-    func: Callable[..., _R],
-    cleanup: Callable[..., None] = ...,
-    retries: int | float = ...,
-    trap: type[BaseException] | tuple[type[BaseException], ...] = ...,
-) -> _R: ...
-def retry(
-    cleanup: Callable[..., None] = ...,
-    retries: int | float = ...,
-    trap: type[BaseException] | tuple[type[BaseException], ...] = ...,
-) -> Callable[[Callable[..., _R]], Callable[..., _R]]: ...
-def print_yielded(func: Callable[_P, Iterator[Any]]) -> Callable[_P, None]: ...
-def pass_none(
-    func: Callable[Concatenate[_T, _P], _R]
-) -> Callable[Concatenate[_T, _P], _R]: ...
-def assign_params(
-    func: Callable[..., _R], namespace: dict[str, Any]
-) -> partial[_R]: ...
-def save_method_args(
-    method: Callable[Concatenate[_S, _P], _R]
-) -> Callable[Concatenate[_S, _P], _R]: ...
-def except_(
-    *exceptions: type[BaseException], replace: Any = ..., use: Any = ...
-) -> Callable[[Callable[_P, Any]], Callable[_P, Any]]: ...
-def identity(x: _T) -> _T: ...
-def bypass_when(
-    check: _V, *, _op: Callable[[_V], Any] = ...
-) -> Callable[[Callable[[_T], _R]], Callable[[_T], _T | _R]]: ...
-def bypass_unless(
-    check: Any,
-) -> Callable[[Callable[[_T], _R]], Callable[[_T], _T | _R]]: ...
diff --git a/pkg_resources/_vendor/jaraco/functools/py.typed b/pkg_resources/_vendor/jaraco/functools/py.typed
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/pkg_resources/_vendor/jaraco/text/Lorem ipsum.txt b/pkg_resources/_vendor/jaraco/text/Lorem ipsum.txt
deleted file mode 100644
index 986f944b60..0000000000
--- a/pkg_resources/_vendor/jaraco/text/Lorem ipsum.txt	
+++ /dev/null
@@ -1,2 +0,0 @@
-Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
-Curabitur pretium tincidunt lacus. Nulla gravida orci a odio. Nullam varius, turpis et commodo pharetra, est eros bibendum elit, nec luctus magna felis sollicitudin mauris. Integer in mauris eu nibh euismod gravida. Duis ac tellus et risus vulputate vehicula. Donec lobortis risus a elit. Etiam tempor. Ut ullamcorper, ligula eu tempor congue, eros est euismod turpis, id tincidunt sapien risus a quam. Maecenas fermentum consequat mi. Donec fermentum. Pellentesque malesuada nulla a mi. Duis sapien sem, aliquet nec, commodo eget, consequat quis, neque. Aliquam faucibus, elit ut dictum aliquet, felis nisl adipiscing sapien, sed malesuada diam lacus eget erat. Cras mollis scelerisque nunc. Nullam arcu. Aliquam consequat. Curabitur augue lorem, dapibus quis, laoreet et, pretium ac, nisi. Aenean magna nisl, mollis quis, molestie eu, feugiat in, orci. In hac habitasse platea dictumst.
diff --git a/pkg_resources/_vendor/jaraco/text/__init__.py b/pkg_resources/_vendor/jaraco/text/__init__.py
deleted file mode 100644
index 0fabd0c3f0..0000000000
--- a/pkg_resources/_vendor/jaraco/text/__init__.py
+++ /dev/null
@@ -1,624 +0,0 @@
-import re
-import itertools
-import textwrap
-import functools
-
-try:
-    from importlib.resources import files  # type: ignore
-except ImportError:  # pragma: nocover
-    from importlib_resources import files  # type: ignore
-
-from jaraco.functools import compose, method_cache
-from jaraco.context import ExceptionTrap
-
-
-def substitution(old, new):
-    """
-    Return a function that will perform a substitution on a string
-    """
-    return lambda s: s.replace(old, new)
-
-
-def multi_substitution(*substitutions):
-    """
-    Take a sequence of pairs specifying substitutions, and create
-    a function that performs those substitutions.
-
-    >>> multi_substitution(('foo', 'bar'), ('bar', 'baz'))('foo')
-    'baz'
-    """
-    substitutions = itertools.starmap(substitution, substitutions)
-    # compose function applies last function first, so reverse the
-    #  substitutions to get the expected order.
-    substitutions = reversed(tuple(substitutions))
-    return compose(*substitutions)
-
-
-class FoldedCase(str):
-    """
-    A case insensitive string class; behaves just like str
-    except compares equal when the only variation is case.
-
-    >>> s = FoldedCase('hello world')
-
-    >>> s == 'Hello World'
-    True
-
-    >>> 'Hello World' == s
-    True
-
-    >>> s != 'Hello World'
-    False
-
-    >>> s.index('O')
-    4
-
-    >>> s.split('O')
-    ['hell', ' w', 'rld']
-
-    >>> sorted(map(FoldedCase, ['GAMMA', 'alpha', 'Beta']))
-    ['alpha', 'Beta', 'GAMMA']
-
-    Sequence membership is straightforward.
-
-    >>> "Hello World" in [s]
-    True
-    >>> s in ["Hello World"]
-    True
-
-    Allows testing for set inclusion, but candidate and elements
-    must both be folded.
-
-    >>> FoldedCase("Hello World") in {s}
-    True
-    >>> s in {FoldedCase("Hello World")}
-    True
-
-    String inclusion works as long as the FoldedCase object
-    is on the right.
-
-    >>> "hello" in FoldedCase("Hello World")
-    True
-
-    But not if the FoldedCase object is on the left:
-
-    >>> FoldedCase('hello') in 'Hello World'
-    False
-
-    In that case, use ``in_``:
-
-    >>> FoldedCase('hello').in_('Hello World')
-    True
-
-    >>> FoldedCase('hello') > FoldedCase('Hello')
-    False
-
-    >>> FoldedCase('ß') == FoldedCase('ss')
-    True
-    """
-
-    def __lt__(self, other):
-        return self.casefold() < other.casefold()
-
-    def __gt__(self, other):
-        return self.casefold() > other.casefold()
-
-    def __eq__(self, other):
-        return self.casefold() == other.casefold()
-
-    def __ne__(self, other):
-        return self.casefold() != other.casefold()
-
-    def __hash__(self):
-        return hash(self.casefold())
-
-    def __contains__(self, other):
-        return super().casefold().__contains__(other.casefold())
-
-    def in_(self, other):
-        "Does self appear in other?"
-        return self in FoldedCase(other)
-
-    # cache casefold since it's likely to be called frequently.
-    @method_cache
-    def casefold(self):
-        return super().casefold()
-
-    def index(self, sub):
-        return self.casefold().index(sub.casefold())
-
-    def split(self, splitter=' ', maxsplit=0):
-        pattern = re.compile(re.escape(splitter), re.I)
-        return pattern.split(self, maxsplit)
-
-
-# Python 3.8 compatibility
-_unicode_trap = ExceptionTrap(UnicodeDecodeError)
-
-
-@_unicode_trap.passes
-def is_decodable(value):
-    r"""
-    Return True if the supplied value is decodable (using the default
-    encoding).
-
-    >>> is_decodable(b'\xff')
-    False
-    >>> is_decodable(b'\x32')
-    True
-    """
-    value.decode()
-
-
-def is_binary(value):
-    r"""
-    Return True if the value appears to be binary (that is, it's a byte
-    string and isn't decodable).
-
-    >>> is_binary(b'\xff')
-    True
-    >>> is_binary('\xff')
-    False
-    """
-    return isinstance(value, bytes) and not is_decodable(value)
-
-
-def trim(s):
-    r"""
-    Trim something like a docstring to remove the whitespace that
-    is common due to indentation and formatting.
-
-    >>> trim("\n\tfoo = bar\n\t\tbar = baz\n")
-    'foo = bar\n\tbar = baz'
-    """
-    return textwrap.dedent(s).strip()
-
-
-def wrap(s):
-    """
-    Wrap lines of text, retaining existing newlines as
-    paragraph markers.
-
-    >>> print(wrap(lorem_ipsum))
-    Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do
-    eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad
-    minim veniam, quis nostrud exercitation ullamco laboris nisi ut
-    aliquip ex ea commodo consequat. Duis aute irure dolor in
-    reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla
-    pariatur. Excepteur sint occaecat cupidatat non proident, sunt in
-    culpa qui officia deserunt mollit anim id est laborum.
-    
-    Curabitur pretium tincidunt lacus. Nulla gravida orci a odio. Nullam
-    varius, turpis et commodo pharetra, est eros bibendum elit, nec luctus
-    magna felis sollicitudin mauris. Integer in mauris eu nibh euismod
-    gravida. Duis ac tellus et risus vulputate vehicula. Donec lobortis
-    risus a elit. Etiam tempor. Ut ullamcorper, ligula eu tempor congue,
-    eros est euismod turpis, id tincidunt sapien risus a quam. Maecenas
-    fermentum consequat mi. Donec fermentum. Pellentesque malesuada nulla
-    a mi. Duis sapien sem, aliquet nec, commodo eget, consequat quis,
-    neque. Aliquam faucibus, elit ut dictum aliquet, felis nisl adipiscing
-    sapien, sed malesuada diam lacus eget erat. Cras mollis scelerisque
-    nunc. Nullam arcu. Aliquam consequat. Curabitur augue lorem, dapibus
-    quis, laoreet et, pretium ac, nisi. Aenean magna nisl, mollis quis,
-    molestie eu, feugiat in, orci. In hac habitasse platea dictumst.
-    """
-    paragraphs = s.splitlines()
-    wrapped = ('\n'.join(textwrap.wrap(para)) for para in paragraphs)
-    return '\n\n'.join(wrapped)
-
-
-def unwrap(s):
-    r"""
-    Given a multi-line string, return an unwrapped version.
-
-    >>> wrapped = wrap(lorem_ipsum)
-    >>> wrapped.count('\n')
-    20
-    >>> unwrapped = unwrap(wrapped)
-    >>> unwrapped.count('\n')
-    1
-    >>> print(unwrapped)
-    Lorem ipsum dolor sit amet, consectetur adipiscing ...
-    Curabitur pretium tincidunt lacus. Nulla gravida orci ...
-
-    """
-    paragraphs = re.split(r'\n\n+', s)
-    cleaned = (para.replace('\n', ' ') for para in paragraphs)
-    return '\n'.join(cleaned)
-
-
-lorem_ipsum: str = (
-    files(__name__).joinpath('Lorem ipsum.txt').read_text(encoding='utf-8')
-)
-
-
-class Splitter:
-    """object that will split a string with the given arguments for each call
-
-    >>> s = Splitter(',')
-    >>> s('hello, world, this is your, master calling')
-    ['hello', ' world', ' this is your', ' master calling']
-    """
-
-    def __init__(self, *args):
-        self.args = args
-
-    def __call__(self, s):
-        return s.split(*self.args)
-
-
-def indent(string, prefix=' ' * 4):
-    """
-    >>> indent('foo')
-    '    foo'
-    """
-    return prefix + string
-
-
-class WordSet(tuple):
-    """
-    Given an identifier, return the words that identifier represents,
-    whether in camel case, underscore-separated, etc.
-
-    >>> WordSet.parse("camelCase")
-    ('camel', 'Case')
-
-    >>> WordSet.parse("under_sep")
-    ('under', 'sep')
-
-    Acronyms should be retained
-
-    >>> WordSet.parse("firstSNL")
-    ('first', 'SNL')
-
-    >>> WordSet.parse("you_and_I")
-    ('you', 'and', 'I')
-
-    >>> WordSet.parse("A simple test")
-    ('A', 'simple', 'test')
-
-    Multiple caps should not interfere with the first cap of another word.
-
-    >>> WordSet.parse("myABCClass")
-    ('my', 'ABC', 'Class')
-
-    The result is a WordSet, providing access to other forms.
-
-    >>> WordSet.parse("myABCClass").underscore_separated()
-    'my_ABC_Class'
-
-    >>> WordSet.parse('a-command').camel_case()
-    'ACommand'
-
-    >>> WordSet.parse('someIdentifier').lowered().space_separated()
-    'some identifier'
-
-    Slices of the result should return another WordSet.
-
-    >>> WordSet.parse('taken-out-of-context')[1:].underscore_separated()
-    'out_of_context'
-
-    >>> WordSet.from_class_name(WordSet()).lowered().space_separated()
-    'word set'
-
-    >>> example = WordSet.parse('figured it out')
-    >>> example.headless_camel_case()
-    'figuredItOut'
-    >>> example.dash_separated()
-    'figured-it-out'
-
-    """
-
-    _pattern = re.compile('([A-Z]?[a-z]+)|([A-Z]+(?![a-z]))')
-
-    def capitalized(self):
-        return WordSet(word.capitalize() for word in self)
-
-    def lowered(self):
-        return WordSet(word.lower() for word in self)
-
-    def camel_case(self):
-        return ''.join(self.capitalized())
-
-    def headless_camel_case(self):
-        words = iter(self)
-        first = next(words).lower()
-        new_words = itertools.chain((first,), WordSet(words).camel_case())
-        return ''.join(new_words)
-
-    def underscore_separated(self):
-        return '_'.join(self)
-
-    def dash_separated(self):
-        return '-'.join(self)
-
-    def space_separated(self):
-        return ' '.join(self)
-
-    def trim_right(self, item):
-        """
-        Remove the item from the end of the set.
-
-        >>> WordSet.parse('foo bar').trim_right('foo')
-        ('foo', 'bar')
-        >>> WordSet.parse('foo bar').trim_right('bar')
-        ('foo',)
-        >>> WordSet.parse('').trim_right('bar')
-        ()
-        """
-        return self[:-1] if self and self[-1] == item else self
-
-    def trim_left(self, item):
-        """
-        Remove the item from the beginning of the set.
-
-        >>> WordSet.parse('foo bar').trim_left('foo')
-        ('bar',)
-        >>> WordSet.parse('foo bar').trim_left('bar')
-        ('foo', 'bar')
-        >>> WordSet.parse('').trim_left('bar')
-        ()
-        """
-        return self[1:] if self and self[0] == item else self
-
-    def trim(self, item):
-        """
-        >>> WordSet.parse('foo bar').trim('foo')
-        ('bar',)
-        """
-        return self.trim_left(item).trim_right(item)
-
-    def __getitem__(self, item):
-        result = super().__getitem__(item)
-        if isinstance(item, slice):
-            result = WordSet(result)
-        return result
-
-    @classmethod
-    def parse(cls, identifier):
-        matches = cls._pattern.finditer(identifier)
-        return WordSet(match.group(0) for match in matches)
-
-    @classmethod
-    def from_class_name(cls, subject):
-        return cls.parse(subject.__class__.__name__)
-
-
-# for backward compatibility
-words = WordSet.parse
-
-
-def simple_html_strip(s):
-    r"""
-    Remove HTML from the string `s`.
-
-    >>> str(simple_html_strip(''))
-    ''
-
-    >>> print(simple_html_strip('A stormy day in paradise'))
-    A stormy day in paradise
-
-    >>> print(simple_html_strip('Somebody  tell the truth.'))
-    Somebody  tell the truth.
-
-    >>> print(simple_html_strip('What about
\nmultiple lines?')) - What about - multiple lines? - """ - html_stripper = re.compile('()|(<[^>]*>)|([^<]+)', re.DOTALL) - texts = (match.group(3) or '' for match in html_stripper.finditer(s)) - return ''.join(texts) - - -class SeparatedValues(str): - """ - A string separated by a separator. Overrides __iter__ for getting - the values. - - >>> list(SeparatedValues('a,b,c')) - ['a', 'b', 'c'] - - Whitespace is stripped and empty values are discarded. - - >>> list(SeparatedValues(' a, b , c, ')) - ['a', 'b', 'c'] - """ - - separator = ',' - - def __iter__(self): - parts = self.split(self.separator) - return filter(None, (part.strip() for part in parts)) - - -class Stripper: - r""" - Given a series of lines, find the common prefix and strip it from them. - - >>> lines = [ - ... 'abcdefg\n', - ... 'abc\n', - ... 'abcde\n', - ... ] - >>> res = Stripper.strip_prefix(lines) - >>> res.prefix - 'abc' - >>> list(res.lines) - ['defg\n', '\n', 'de\n'] - - If no prefix is common, nothing should be stripped. - - >>> lines = [ - ... 'abcd\n', - ... '1234\n', - ... ] - >>> res = Stripper.strip_prefix(lines) - >>> res.prefix = '' - >>> list(res.lines) - ['abcd\n', '1234\n'] - """ - - def __init__(self, prefix, lines): - self.prefix = prefix - self.lines = map(self, lines) - - @classmethod - def strip_prefix(cls, lines): - prefix_lines, lines = itertools.tee(lines) - prefix = functools.reduce(cls.common_prefix, prefix_lines) - return cls(prefix, lines) - - def __call__(self, line): - if not self.prefix: - return line - null, prefix, rest = line.partition(self.prefix) - return rest - - @staticmethod - def common_prefix(s1, s2): - """ - Return the common prefix of two lines. - """ - index = min(len(s1), len(s2)) - while s1[:index] != s2[:index]: - index -= 1 - return s1[:index] - - -def remove_prefix(text, prefix): - """ - Remove the prefix from the text if it exists. - - >>> remove_prefix('underwhelming performance', 'underwhelming ') - 'performance' - - >>> remove_prefix('something special', 'sample') - 'something special' - """ - null, prefix, rest = text.rpartition(prefix) - return rest - - -def remove_suffix(text, suffix): - """ - Remove the suffix from the text if it exists. - - >>> remove_suffix('name.git', '.git') - 'name' - - >>> remove_suffix('something special', 'sample') - 'something special' - """ - rest, suffix, null = text.partition(suffix) - return rest - - -def normalize_newlines(text): - r""" - Replace alternate newlines with the canonical newline. - - >>> normalize_newlines('Lorem Ipsum\u2029') - 'Lorem Ipsum\n' - >>> normalize_newlines('Lorem Ipsum\r\n') - 'Lorem Ipsum\n' - >>> normalize_newlines('Lorem Ipsum\x85') - 'Lorem Ipsum\n' - """ - newlines = ['\r\n', '\r', '\n', '\u0085', '\u2028', '\u2029'] - pattern = '|'.join(newlines) - return re.sub(pattern, '\n', text) - - -def _nonblank(str): - return str and not str.startswith('#') - - -@functools.singledispatch -def yield_lines(iterable): - r""" - Yield valid lines of a string or iterable. - - >>> list(yield_lines('')) - [] - >>> list(yield_lines(['foo', 'bar'])) - ['foo', 'bar'] - >>> list(yield_lines('foo\nbar')) - ['foo', 'bar'] - >>> list(yield_lines('\nfoo\n#bar\nbaz #comment')) - ['foo', 'baz #comment'] - >>> list(yield_lines(['foo\nbar', 'baz', 'bing\n\n\n'])) - ['foo', 'bar', 'baz', 'bing'] - """ - return itertools.chain.from_iterable(map(yield_lines, iterable)) - - -@yield_lines.register(str) -def _(text): - return filter(_nonblank, map(str.strip, text.splitlines())) - - -def drop_comment(line): - """ - Drop comments. - - >>> drop_comment('foo # bar') - 'foo' - - A hash without a space may be in a URL. - - >>> drop_comment('http://example.com/foo#bar') - 'http://example.com/foo#bar' - """ - return line.partition(' #')[0] - - -def join_continuation(lines): - r""" - Join lines continued by a trailing backslash. - - >>> list(join_continuation(['foo \\', 'bar', 'baz'])) - ['foobar', 'baz'] - >>> list(join_continuation(['foo \\', 'bar', 'baz'])) - ['foobar', 'baz'] - >>> list(join_continuation(['foo \\', 'bar \\', 'baz'])) - ['foobarbaz'] - - Not sure why, but... - The character preceding the backslash is also elided. - - >>> list(join_continuation(['goo\\', 'dly'])) - ['godly'] - - A terrible idea, but... - If no line is available to continue, suppress the lines. - - >>> list(join_continuation(['foo', 'bar\\', 'baz\\'])) - ['foo'] - """ - lines = iter(lines) - for item in lines: - while item.endswith('\\'): - try: - item = item[:-2].strip() + next(lines) - except StopIteration: - return - yield item - - -def read_newlines(filename, limit=1024): - r""" - >>> tmp_path = getfixture('tmp_path') - >>> filename = tmp_path / 'out.txt' - >>> _ = filename.write_text('foo\n', newline='', encoding='utf-8') - >>> read_newlines(filename) - '\n' - >>> _ = filename.write_text('foo\r\n', newline='', encoding='utf-8') - >>> read_newlines(filename) - '\r\n' - >>> _ = filename.write_text('foo\r\nbar\nbing\r', newline='', encoding='utf-8') - >>> read_newlines(filename) - ('\r', '\n', '\r\n') - """ - with open(filename, encoding='utf-8') as fp: - fp.read(limit) - return fp.newlines diff --git a/pkg_resources/_vendor/jaraco/text/layouts.py b/pkg_resources/_vendor/jaraco/text/layouts.py deleted file mode 100644 index 9636f0f7b5..0000000000 --- a/pkg_resources/_vendor/jaraco/text/layouts.py +++ /dev/null @@ -1,25 +0,0 @@ -qwerty = "-=qwertyuiop[]asdfghjkl;'zxcvbnm,./_+QWERTYUIOP{}ASDFGHJKL:\"ZXCVBNM<>?" -dvorak = "[]',.pyfgcrl/=aoeuidhtns-;qjkxbmwvz{}\"<>PYFGCRL?+AOEUIDHTNS_:QJKXBMWVZ" - - -to_dvorak = str.maketrans(qwerty, dvorak) -to_qwerty = str.maketrans(dvorak, qwerty) - - -def translate(input, translation): - """ - >>> translate('dvorak', to_dvorak) - 'ekrpat' - >>> translate('qwerty', to_qwerty) - 'x,dokt' - """ - return input.translate(translation) - - -def _translate_stream(stream, translation): - """ - >>> import io - >>> _translate_stream(io.StringIO('foo'), to_dvorak) - urr - """ - print(translate(stream.read(), translation)) diff --git a/pkg_resources/_vendor/jaraco/text/show-newlines.py b/pkg_resources/_vendor/jaraco/text/show-newlines.py deleted file mode 100644 index e11d1ba428..0000000000 --- a/pkg_resources/_vendor/jaraco/text/show-newlines.py +++ /dev/null @@ -1,33 +0,0 @@ -import autocommand -import inflect - -from more_itertools import always_iterable - -import jaraco.text - - -def report_newlines(filename): - r""" - Report the newlines in the indicated file. - - >>> tmp_path = getfixture('tmp_path') - >>> filename = tmp_path / 'out.txt' - >>> _ = filename.write_text('foo\nbar\n', newline='', encoding='utf-8') - >>> report_newlines(filename) - newline is '\n' - >>> filename = tmp_path / 'out.txt' - >>> _ = filename.write_text('foo\nbar\r\n', newline='', encoding='utf-8') - >>> report_newlines(filename) - newlines are ('\n', '\r\n') - """ - newlines = jaraco.text.read_newlines(filename) - count = len(tuple(always_iterable(newlines))) - engine = inflect.engine() - print( - engine.plural_noun("newline", count), - engine.plural_verb("is", count), - repr(newlines), - ) - - -autocommand.autocommand(__name__)(report_newlines) diff --git a/pkg_resources/_vendor/jaraco/text/strip-prefix.py b/pkg_resources/_vendor/jaraco/text/strip-prefix.py deleted file mode 100644 index 761717a9b9..0000000000 --- a/pkg_resources/_vendor/jaraco/text/strip-prefix.py +++ /dev/null @@ -1,21 +0,0 @@ -import sys - -import autocommand - -from jaraco.text import Stripper - - -def strip_prefix(): - r""" - Strip any common prefix from stdin. - - >>> import io, pytest - >>> getfixture('monkeypatch').setattr('sys.stdin', io.StringIO('abcdef\nabc123')) - >>> strip_prefix() - def - 123 - """ - sys.stdout.writelines(Stripper.strip_prefix(sys.stdin).lines) - - -autocommand.autocommand(__name__)(strip_prefix) diff --git a/pkg_resources/_vendor/jaraco/text/to-dvorak.py b/pkg_resources/_vendor/jaraco/text/to-dvorak.py deleted file mode 100644 index a6d5da80b3..0000000000 --- a/pkg_resources/_vendor/jaraco/text/to-dvorak.py +++ /dev/null @@ -1,6 +0,0 @@ -import sys - -from . import layouts - - -__name__ == '__main__' and layouts._translate_stream(sys.stdin, layouts.to_dvorak) diff --git a/pkg_resources/_vendor/jaraco/text/to-qwerty.py b/pkg_resources/_vendor/jaraco/text/to-qwerty.py deleted file mode 100644 index abe2728662..0000000000 --- a/pkg_resources/_vendor/jaraco/text/to-qwerty.py +++ /dev/null @@ -1,6 +0,0 @@ -import sys - -from . import layouts - - -__name__ == '__main__' and layouts._translate_stream(sys.stdin, layouts.to_qwerty) diff --git a/pkg_resources/_vendor/more_itertools-10.3.0.dist-info/INSTALLER b/pkg_resources/_vendor/more_itertools-10.3.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e38a..0000000000 --- a/pkg_resources/_vendor/more_itertools-10.3.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/pkg_resources/_vendor/more_itertools-10.3.0.dist-info/LICENSE b/pkg_resources/_vendor/more_itertools-10.3.0.dist-info/LICENSE deleted file mode 100644 index 0a523bece3..0000000000 --- a/pkg_resources/_vendor/more_itertools-10.3.0.dist-info/LICENSE +++ /dev/null @@ -1,19 +0,0 @@ -Copyright (c) 2012 Erik Rose - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/pkg_resources/_vendor/more_itertools-10.3.0.dist-info/METADATA b/pkg_resources/_vendor/more_itertools-10.3.0.dist-info/METADATA deleted file mode 100644 index fb41b0cfe6..0000000000 --- a/pkg_resources/_vendor/more_itertools-10.3.0.dist-info/METADATA +++ /dev/null @@ -1,266 +0,0 @@ -Metadata-Version: 2.1 -Name: more-itertools -Version: 10.3.0 -Summary: More routines for operating on iterables, beyond itertools -Keywords: itertools,iterator,iteration,filter,peek,peekable,chunk,chunked -Author-email: Erik Rose -Requires-Python: >=3.8 -Description-Content-Type: text/x-rst -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: Natural Language :: English -Classifier: License :: OSI Approved :: MIT License -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Topic :: Software Development :: Libraries -Project-URL: Homepage, https://github.com/more-itertools/more-itertools - -============== -More Itertools -============== - -.. image:: https://readthedocs.org/projects/more-itertools/badge/?version=latest - :target: https://more-itertools.readthedocs.io/en/stable/ - -Python's ``itertools`` library is a gem - you can compose elegant solutions -for a variety of problems with the functions it provides. In ``more-itertools`` -we collect additional building blocks, recipes, and routines for working with -Python iterables. - -+------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| Grouping | `chunked `_, | -| | `ichunked `_, | -| | `chunked_even `_, | -| | `sliced `_, | -| | `constrained_batches `_, | -| | `distribute `_, | -| | `divide `_, | -| | `split_at `_, | -| | `split_before `_, | -| | `split_after `_, | -| | `split_into `_, | -| | `split_when `_, | -| | `bucket `_, | -| | `unzip `_, | -| | `batched `_, | -| | `grouper `_, | -| | `partition `_, | -| | `transpose `_ | -+------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| Lookahead and lookback | `spy `_, | -| | `peekable `_, | -| | `seekable `_ | -+------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| Windowing | `windowed `_, | -| | `substrings `_, | -| | `substrings_indexes `_, | -| | `stagger `_, | -| | `windowed_complete `_, | -| | `pairwise `_, | -| | `triplewise `_, | -| | `sliding_window `_, | -| | `subslices `_ | -+------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| Augmenting | `count_cycle `_, | -| | `intersperse `_, | -| | `padded `_, | -| | `repeat_each `_, | -| | `mark_ends `_, | -| | `repeat_last `_, | -| | `adjacent `_, | -| | `groupby_transform `_, | -| | `pad_none `_, | -| | `ncycles `_ | -+------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| Combining | `collapse `_, | -| | `sort_together `_, | -| | `interleave `_, | -| | `interleave_longest `_, | -| | `interleave_evenly `_, | -| | `zip_offset `_, | -| | `zip_equal `_, | -| | `zip_broadcast `_, | -| | `flatten `_, | -| | `roundrobin `_, | -| | `prepend `_, | -| | `value_chain `_, | -| | `partial_product `_ | -+------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| Summarizing | `ilen `_, | -| | `unique_to_each `_, | -| | `sample `_, | -| | `consecutive_groups `_, | -| | `run_length `_, | -| | `map_reduce `_, | -| | `join_mappings `_, | -| | `exactly_n `_, | -| | `is_sorted `_, | -| | `all_equal `_, | -| | `all_unique `_, | -| | `minmax `_, | -| | `first_true `_, | -| | `quantify `_, | -| | `iequals `_ | -+------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| Selecting | `islice_extended `_, | -| | `first `_, | -| | `last `_, | -| | `one `_, | -| | `only `_, | -| | `strictly_n `_, | -| | `strip `_, | -| | `lstrip `_, | -| | `rstrip `_, | -| | `filter_except `_, | -| | `map_except `_, | -| | `filter_map `_, | -| | `iter_suppress `_, | -| | `nth_or_last `_, | -| | `unique_in_window `_, | -| | `before_and_after `_, | -| | `nth `_, | -| | `take `_, | -| | `tail `_, | -| | `unique_everseen `_, | -| | `unique_justseen `_, | -| | `unique `_, | -| | `duplicates_everseen `_, | -| | `duplicates_justseen `_, | -| | `classify_unique `_, | -| | `longest_common_prefix `_, | -| | `takewhile_inclusive `_ | -+------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| Math | `dft `_, | -| | `idft `_, | -| | `convolve `_, | -| | `dotproduct `_, | -| | `factor `_, | -| | `matmul `_, | -| | `polynomial_from_roots `_, | -| | `polynomial_derivative `_, | -| | `polynomial_eval `_, | -| | `sieve `_, | -| | `sum_of_squares `_, | -| | `totient `_ | -+------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| Combinatorics | `distinct_permutations `_, | -| | `distinct_combinations `_, | -| | `circular_shifts `_, | -| | `partitions `_, | -| | `set_partitions `_, | -| | `product_index `_, | -| | `combination_index `_, | -| | `permutation_index `_, | -| | `combination_with_replacement_index `_, | -| | `gray_product `_, | -| | `outer_product `_, | -| | `powerset `_, | -| | `powerset_of_sets `_, | -| | `random_product `_, | -| | `random_permutation `_, | -| | `random_combination `_, | -| | `random_combination_with_replacement `_, | -| | `nth_product `_, | -| | `nth_permutation `_, | -| | `nth_combination `_, | -| | `nth_combination_with_replacement `_ | -+------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| Wrapping | `always_iterable `_, | -| | `always_reversible `_, | -| | `countable `_, | -| | `consumer `_, | -| | `with_iter `_, | -| | `iter_except `_ | -+------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| Others | `locate `_, | -| | `rlocate `_, | -| | `replace `_, | -| | `numeric_range `_, | -| | `side_effect `_, | -| | `iterate `_, | -| | `difference `_, | -| | `make_decorator `_, | -| | `SequenceView `_, | -| | `time_limited `_, | -| | `map_if `_, | -| | `iter_index `_, | -| | `consume `_, | -| | `tabulate `_, | -| | `repeatfunc `_, | -| | `reshape `_ | -| | `doublestarmap `_ | -+------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ - - -Getting started -=============== - -To get started, install the library with `pip `_: - -.. code-block:: shell - - pip install more-itertools - -The recipes from the `itertools docs `_ -are included in the top-level package: - -.. code-block:: python - - >>> from more_itertools import flatten - >>> iterable = [(0, 1), (2, 3)] - >>> list(flatten(iterable)) - [0, 1, 2, 3] - -Several new recipes are available as well: - -.. code-block:: python - - >>> from more_itertools import chunked - >>> iterable = [0, 1, 2, 3, 4, 5, 6, 7, 8] - >>> list(chunked(iterable, 3)) - [[0, 1, 2], [3, 4, 5], [6, 7, 8]] - - >>> from more_itertools import spy - >>> iterable = (x * x for x in range(1, 6)) - >>> head, iterable = spy(iterable, n=3) - >>> list(head) - [1, 4, 9] - >>> list(iterable) - [1, 4, 9, 16, 25] - - - -For the full listing of functions, see the `API documentation `_. - - -Links elsewhere -=============== - -Blog posts about ``more-itertools``: - -* `Yo, I heard you like decorators `__ -* `Tour of Python Itertools `__ (`Alternate `__) -* `Real-World Python More Itertools `_ - - -Development -=========== - -``more-itertools`` is maintained by `@erikrose `_ -and `@bbayles `_, with help from `many others `_. -If you have a problem or suggestion, please file a bug or pull request in this -repository. Thanks for contributing! - - -Version History -=============== - -The version history can be found in `documentation `_. - diff --git a/pkg_resources/_vendor/more_itertools-10.3.0.dist-info/RECORD b/pkg_resources/_vendor/more_itertools-10.3.0.dist-info/RECORD deleted file mode 100644 index 53183bfb30..0000000000 --- a/pkg_resources/_vendor/more_itertools-10.3.0.dist-info/RECORD +++ /dev/null @@ -1,15 +0,0 @@ -more_itertools-10.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -more_itertools-10.3.0.dist-info/LICENSE,sha256=CfHIyelBrz5YTVlkHqm4fYPAyw_QB-te85Gn4mQ8GkY,1053 -more_itertools-10.3.0.dist-info/METADATA,sha256=BFO90O-fLNiVQMpj7oIS5ztzgJUUQZ3TA32P5HH3N-A,36293 -more_itertools-10.3.0.dist-info/RECORD,, -more_itertools-10.3.0.dist-info/WHEEL,sha256=rSgq_JpHF9fHR1lx53qwg_1-2LypZE_qmcuXbVUq948,81 -more_itertools/__init__.py,sha256=dtAbGjTDmn_ghiU5YXfhyDy0phAlXVdt5klZA5fUa-Q,149 -more_itertools/__init__.pyi,sha256=5B3eTzON1BBuOLob1vCflyEb2lSd6usXQQ-Cv-hXkeA,43 -more_itertools/__pycache__/__init__.cpython-312.pyc,, -more_itertools/__pycache__/more.cpython-312.pyc,, -more_itertools/__pycache__/recipes.cpython-312.pyc,, -more_itertools/more.py,sha256=1E5kzFncRKTDw0cYv1yRXMgDdunstLQd1QStcnL6U90,148370 -more_itertools/more.pyi,sha256=iXXeqt48Nxe8VGmIWpkVXuKpR2FYNuu2DU8nQLWCCu0,21484 -more_itertools/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -more_itertools/recipes.py,sha256=WedhhfhGVgr6zii8fIbGJVmRTw0ZKRiLKnYBDGJv4nY,28591 -more_itertools/recipes.pyi,sha256=T_mdGpcFdfrP3JSWbwzYP9JyNV-Go-7RPfpxfftAWlA,4617 diff --git a/pkg_resources/_vendor/more_itertools-10.3.0.dist-info/WHEEL b/pkg_resources/_vendor/more_itertools-10.3.0.dist-info/WHEEL deleted file mode 100644 index db4a255f3a..0000000000 --- a/pkg_resources/_vendor/more_itertools-10.3.0.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: flit 3.8.0 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/pkg_resources/_vendor/more_itertools/__init__.py b/pkg_resources/_vendor/more_itertools/__init__.py deleted file mode 100644 index 9c4662fc31..0000000000 --- a/pkg_resources/_vendor/more_itertools/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -"""More routines for operating on iterables, beyond itertools""" - -from .more import * # noqa -from .recipes import * # noqa - -__version__ = '10.3.0' diff --git a/pkg_resources/_vendor/more_itertools/__init__.pyi b/pkg_resources/_vendor/more_itertools/__init__.pyi deleted file mode 100644 index 96f6e36c7f..0000000000 --- a/pkg_resources/_vendor/more_itertools/__init__.pyi +++ /dev/null @@ -1,2 +0,0 @@ -from .more import * -from .recipes import * diff --git a/pkg_resources/_vendor/more_itertools/more.py b/pkg_resources/_vendor/more_itertools/more.py deleted file mode 100755 index 7b481907da..0000000000 --- a/pkg_resources/_vendor/more_itertools/more.py +++ /dev/null @@ -1,4806 +0,0 @@ -import math -import warnings - -from collections import Counter, defaultdict, deque, abc -from collections.abc import Sequence -from functools import cached_property, partial, reduce, wraps -from heapq import heapify, heapreplace, heappop -from itertools import ( - chain, - combinations, - compress, - count, - cycle, - dropwhile, - groupby, - islice, - repeat, - starmap, - takewhile, - tee, - zip_longest, - product, -) -from math import comb, e, exp, factorial, floor, fsum, log, perm, tau -from queue import Empty, Queue -from random import random, randrange, uniform -from operator import itemgetter, mul, sub, gt, lt, ge, le -from sys import hexversion, maxsize -from time import monotonic - -from .recipes import ( - _marker, - _zip_equal, - UnequalIterablesError, - consume, - flatten, - pairwise, - powerset, - take, - unique_everseen, - all_equal, - batched, -) - -__all__ = [ - 'AbortThread', - 'SequenceView', - 'UnequalIterablesError', - 'adjacent', - 'all_unique', - 'always_iterable', - 'always_reversible', - 'bucket', - 'callback_iter', - 'chunked', - 'chunked_even', - 'circular_shifts', - 'collapse', - 'combination_index', - 'combination_with_replacement_index', - 'consecutive_groups', - 'constrained_batches', - 'consumer', - 'count_cycle', - 'countable', - 'dft', - 'difference', - 'distinct_combinations', - 'distinct_permutations', - 'distribute', - 'divide', - 'doublestarmap', - 'duplicates_everseen', - 'duplicates_justseen', - 'classify_unique', - 'exactly_n', - 'filter_except', - 'filter_map', - 'first', - 'gray_product', - 'groupby_transform', - 'ichunked', - 'iequals', - 'idft', - 'ilen', - 'interleave', - 'interleave_evenly', - 'interleave_longest', - 'intersperse', - 'is_sorted', - 'islice_extended', - 'iterate', - 'iter_suppress', - 'join_mappings', - 'last', - 'locate', - 'longest_common_prefix', - 'lstrip', - 'make_decorator', - 'map_except', - 'map_if', - 'map_reduce', - 'mark_ends', - 'minmax', - 'nth_or_last', - 'nth_permutation', - 'nth_product', - 'nth_combination_with_replacement', - 'numeric_range', - 'one', - 'only', - 'outer_product', - 'padded', - 'partial_product', - 'partitions', - 'peekable', - 'permutation_index', - 'powerset_of_sets', - 'product_index', - 'raise_', - 'repeat_each', - 'repeat_last', - 'replace', - 'rlocate', - 'rstrip', - 'run_length', - 'sample', - 'seekable', - 'set_partitions', - 'side_effect', - 'sliced', - 'sort_together', - 'split_after', - 'split_at', - 'split_before', - 'split_into', - 'split_when', - 'spy', - 'stagger', - 'strip', - 'strictly_n', - 'substrings', - 'substrings_indexes', - 'takewhile_inclusive', - 'time_limited', - 'unique_in_window', - 'unique_to_each', - 'unzip', - 'value_chain', - 'windowed', - 'windowed_complete', - 'with_iter', - 'zip_broadcast', - 'zip_equal', - 'zip_offset', -] - -# math.sumprod is available for Python 3.12+ -_fsumprod = getattr(math, 'sumprod', lambda x, y: fsum(map(mul, x, y))) - - -def chunked(iterable, n, strict=False): - """Break *iterable* into lists of length *n*: - - >>> list(chunked([1, 2, 3, 4, 5, 6], 3)) - [[1, 2, 3], [4, 5, 6]] - - By the default, the last yielded list will have fewer than *n* elements - if the length of *iterable* is not divisible by *n*: - - >>> list(chunked([1, 2, 3, 4, 5, 6, 7, 8], 3)) - [[1, 2, 3], [4, 5, 6], [7, 8]] - - To use a fill-in value instead, see the :func:`grouper` recipe. - - If the length of *iterable* is not divisible by *n* and *strict* is - ``True``, then ``ValueError`` will be raised before the last - list is yielded. - - """ - iterator = iter(partial(take, n, iter(iterable)), []) - if strict: - if n is None: - raise ValueError('n must not be None when using strict mode.') - - def ret(): - for chunk in iterator: - if len(chunk) != n: - raise ValueError('iterable is not divisible by n.') - yield chunk - - return iter(ret()) - else: - return iterator - - -def first(iterable, default=_marker): - """Return the first item of *iterable*, or *default* if *iterable* is - empty. - - >>> first([0, 1, 2, 3]) - 0 - >>> first([], 'some default') - 'some default' - - If *default* is not provided and there are no items in the iterable, - raise ``ValueError``. - - :func:`first` is useful when you have a generator of expensive-to-retrieve - values and want any arbitrary one. It is marginally shorter than - ``next(iter(iterable), default)``. - - """ - for item in iterable: - return item - if default is _marker: - raise ValueError( - 'first() was called on an empty iterable, and no ' - 'default value was provided.' - ) - return default - - -def last(iterable, default=_marker): - """Return the last item of *iterable*, or *default* if *iterable* is - empty. - - >>> last([0, 1, 2, 3]) - 3 - >>> last([], 'some default') - 'some default' - - If *default* is not provided and there are no items in the iterable, - raise ``ValueError``. - """ - try: - if isinstance(iterable, Sequence): - return iterable[-1] - # Work around https://bugs.python.org/issue38525 - elif hasattr(iterable, '__reversed__') and (hexversion != 0x030800F0): - return next(reversed(iterable)) - else: - return deque(iterable, maxlen=1)[-1] - except (IndexError, TypeError, StopIteration): - if default is _marker: - raise ValueError( - 'last() was called on an empty iterable, and no default was ' - 'provided.' - ) - return default - - -def nth_or_last(iterable, n, default=_marker): - """Return the nth or the last item of *iterable*, - or *default* if *iterable* is empty. - - >>> nth_or_last([0, 1, 2, 3], 2) - 2 - >>> nth_or_last([0, 1], 2) - 1 - >>> nth_or_last([], 0, 'some default') - 'some default' - - If *default* is not provided and there are no items in the iterable, - raise ``ValueError``. - """ - return last(islice(iterable, n + 1), default=default) - - -class peekable: - """Wrap an iterator to allow lookahead and prepending elements. - - Call :meth:`peek` on the result to get the value that will be returned - by :func:`next`. This won't advance the iterator: - - >>> p = peekable(['a', 'b']) - >>> p.peek() - 'a' - >>> next(p) - 'a' - - Pass :meth:`peek` a default value to return that instead of raising - ``StopIteration`` when the iterator is exhausted. - - >>> p = peekable([]) - >>> p.peek('hi') - 'hi' - - peekables also offer a :meth:`prepend` method, which "inserts" items - at the head of the iterable: - - >>> p = peekable([1, 2, 3]) - >>> p.prepend(10, 11, 12) - >>> next(p) - 10 - >>> p.peek() - 11 - >>> list(p) - [11, 12, 1, 2, 3] - - peekables can be indexed. Index 0 is the item that will be returned by - :func:`next`, index 1 is the item after that, and so on: - The values up to the given index will be cached. - - >>> p = peekable(['a', 'b', 'c', 'd']) - >>> p[0] - 'a' - >>> p[1] - 'b' - >>> next(p) - 'a' - - Negative indexes are supported, but be aware that they will cache the - remaining items in the source iterator, which may require significant - storage. - - To check whether a peekable is exhausted, check its truth value: - - >>> p = peekable(['a', 'b']) - >>> if p: # peekable has items - ... list(p) - ['a', 'b'] - >>> if not p: # peekable is exhausted - ... list(p) - [] - - """ - - def __init__(self, iterable): - self._it = iter(iterable) - self._cache = deque() - - def __iter__(self): - return self - - def __bool__(self): - try: - self.peek() - except StopIteration: - return False - return True - - def peek(self, default=_marker): - """Return the item that will be next returned from ``next()``. - - Return ``default`` if there are no items left. If ``default`` is not - provided, raise ``StopIteration``. - - """ - if not self._cache: - try: - self._cache.append(next(self._it)) - except StopIteration: - if default is _marker: - raise - return default - return self._cache[0] - - def prepend(self, *items): - """Stack up items to be the next ones returned from ``next()`` or - ``self.peek()``. The items will be returned in - first in, first out order:: - - >>> p = peekable([1, 2, 3]) - >>> p.prepend(10, 11, 12) - >>> next(p) - 10 - >>> list(p) - [11, 12, 1, 2, 3] - - It is possible, by prepending items, to "resurrect" a peekable that - previously raised ``StopIteration``. - - >>> p = peekable([]) - >>> next(p) - Traceback (most recent call last): - ... - StopIteration - >>> p.prepend(1) - >>> next(p) - 1 - >>> next(p) - Traceback (most recent call last): - ... - StopIteration - - """ - self._cache.extendleft(reversed(items)) - - def __next__(self): - if self._cache: - return self._cache.popleft() - - return next(self._it) - - def _get_slice(self, index): - # Normalize the slice's arguments - step = 1 if (index.step is None) else index.step - if step > 0: - start = 0 if (index.start is None) else index.start - stop = maxsize if (index.stop is None) else index.stop - elif step < 0: - start = -1 if (index.start is None) else index.start - stop = (-maxsize - 1) if (index.stop is None) else index.stop - else: - raise ValueError('slice step cannot be zero') - - # If either the start or stop index is negative, we'll need to cache - # the rest of the iterable in order to slice from the right side. - if (start < 0) or (stop < 0): - self._cache.extend(self._it) - # Otherwise we'll need to find the rightmost index and cache to that - # point. - else: - n = min(max(start, stop) + 1, maxsize) - cache_len = len(self._cache) - if n >= cache_len: - self._cache.extend(islice(self._it, n - cache_len)) - - return list(self._cache)[index] - - def __getitem__(self, index): - if isinstance(index, slice): - return self._get_slice(index) - - cache_len = len(self._cache) - if index < 0: - self._cache.extend(self._it) - elif index >= cache_len: - self._cache.extend(islice(self._it, index + 1 - cache_len)) - - return self._cache[index] - - -def consumer(func): - """Decorator that automatically advances a PEP-342-style "reverse iterator" - to its first yield point so you don't have to call ``next()`` on it - manually. - - >>> @consumer - ... def tally(): - ... i = 0 - ... while True: - ... print('Thing number %s is %s.' % (i, (yield))) - ... i += 1 - ... - >>> t = tally() - >>> t.send('red') - Thing number 0 is red. - >>> t.send('fish') - Thing number 1 is fish. - - Without the decorator, you would have to call ``next(t)`` before - ``t.send()`` could be used. - - """ - - @wraps(func) - def wrapper(*args, **kwargs): - gen = func(*args, **kwargs) - next(gen) - return gen - - return wrapper - - -def ilen(iterable): - """Return the number of items in *iterable*. - - >>> ilen(x for x in range(1000000) if x % 3 == 0) - 333334 - - This consumes the iterable, so handle with care. - - """ - # This approach was selected because benchmarks showed it's likely the - # fastest of the known implementations at the time of writing. - # See GitHub tracker: #236, #230. - counter = count() - deque(zip(iterable, counter), maxlen=0) - return next(counter) - - -def iterate(func, start): - """Return ``start``, ``func(start)``, ``func(func(start))``, ... - - >>> from itertools import islice - >>> list(islice(iterate(lambda x: 2*x, 1), 10)) - [1, 2, 4, 8, 16, 32, 64, 128, 256, 512] - - """ - while True: - yield start - try: - start = func(start) - except StopIteration: - break - - -def with_iter(context_manager): - """Wrap an iterable in a ``with`` statement, so it closes once exhausted. - - For example, this will close the file when the iterator is exhausted:: - - upper_lines = (line.upper() for line in with_iter(open('foo'))) - - Any context manager which returns an iterable is a candidate for - ``with_iter``. - - """ - with context_manager as iterable: - yield from iterable - - -def one(iterable, too_short=None, too_long=None): - """Return the first item from *iterable*, which is expected to contain only - that item. Raise an exception if *iterable* is empty or has more than one - item. - - :func:`one` is useful for ensuring that an iterable contains only one item. - For example, it can be used to retrieve the result of a database query - that is expected to return a single row. - - If *iterable* is empty, ``ValueError`` will be raised. You may specify a - different exception with the *too_short* keyword: - - >>> it = [] - >>> one(it) # doctest: +IGNORE_EXCEPTION_DETAIL - Traceback (most recent call last): - ... - ValueError: too many items in iterable (expected 1)' - >>> too_short = IndexError('too few items') - >>> one(it, too_short=too_short) # doctest: +IGNORE_EXCEPTION_DETAIL - Traceback (most recent call last): - ... - IndexError: too few items - - Similarly, if *iterable* contains more than one item, ``ValueError`` will - be raised. You may specify a different exception with the *too_long* - keyword: - - >>> it = ['too', 'many'] - >>> one(it) # doctest: +IGNORE_EXCEPTION_DETAIL - Traceback (most recent call last): - ... - ValueError: Expected exactly one item in iterable, but got 'too', - 'many', and perhaps more. - >>> too_long = RuntimeError - >>> one(it, too_long=too_long) # doctest: +IGNORE_EXCEPTION_DETAIL - Traceback (most recent call last): - ... - RuntimeError - - Note that :func:`one` attempts to advance *iterable* twice to ensure there - is only one item. See :func:`spy` or :func:`peekable` to check iterable - contents less destructively. - - """ - it = iter(iterable) - - try: - first_value = next(it) - except StopIteration as exc: - raise ( - too_short or ValueError('too few items in iterable (expected 1)') - ) from exc - - try: - second_value = next(it) - except StopIteration: - pass - else: - msg = ( - 'Expected exactly one item in iterable, but got {!r}, {!r}, ' - 'and perhaps more.'.format(first_value, second_value) - ) - raise too_long or ValueError(msg) - - return first_value - - -def raise_(exception, *args): - raise exception(*args) - - -def strictly_n(iterable, n, too_short=None, too_long=None): - """Validate that *iterable* has exactly *n* items and return them if - it does. If it has fewer than *n* items, call function *too_short* - with those items. If it has more than *n* items, call function - *too_long* with the first ``n + 1`` items. - - >>> iterable = ['a', 'b', 'c', 'd'] - >>> n = 4 - >>> list(strictly_n(iterable, n)) - ['a', 'b', 'c', 'd'] - - Note that the returned iterable must be consumed in order for the check to - be made. - - By default, *too_short* and *too_long* are functions that raise - ``ValueError``. - - >>> list(strictly_n('ab', 3)) # doctest: +IGNORE_EXCEPTION_DETAIL - Traceback (most recent call last): - ... - ValueError: too few items in iterable (got 2) - - >>> list(strictly_n('abc', 2)) # doctest: +IGNORE_EXCEPTION_DETAIL - Traceback (most recent call last): - ... - ValueError: too many items in iterable (got at least 3) - - You can instead supply functions that do something else. - *too_short* will be called with the number of items in *iterable*. - *too_long* will be called with `n + 1`. - - >>> def too_short(item_count): - ... raise RuntimeError - >>> it = strictly_n('abcd', 6, too_short=too_short) - >>> list(it) # doctest: +IGNORE_EXCEPTION_DETAIL - Traceback (most recent call last): - ... - RuntimeError - - >>> def too_long(item_count): - ... print('The boss is going to hear about this') - >>> it = strictly_n('abcdef', 4, too_long=too_long) - >>> list(it) - The boss is going to hear about this - ['a', 'b', 'c', 'd'] - - """ - if too_short is None: - too_short = lambda item_count: raise_( - ValueError, - 'Too few items in iterable (got {})'.format(item_count), - ) - - if too_long is None: - too_long = lambda item_count: raise_( - ValueError, - 'Too many items in iterable (got at least {})'.format(item_count), - ) - - it = iter(iterable) - for i in range(n): - try: - item = next(it) - except StopIteration: - too_short(i) - return - else: - yield item - - try: - next(it) - except StopIteration: - pass - else: - too_long(n + 1) - - -def distinct_permutations(iterable, r=None): - """Yield successive distinct permutations of the elements in *iterable*. - - >>> sorted(distinct_permutations([1, 0, 1])) - [(0, 1, 1), (1, 0, 1), (1, 1, 0)] - - Equivalent to ``set(permutations(iterable))``, except duplicates are not - generated and thrown away. For larger input sequences this is much more - efficient. - - Duplicate permutations arise when there are duplicated elements in the - input iterable. The number of items returned is - `n! / (x_1! * x_2! * ... * x_n!)`, where `n` is the total number of - items input, and each `x_i` is the count of a distinct item in the input - sequence. - - If *r* is given, only the *r*-length permutations are yielded. - - >>> sorted(distinct_permutations([1, 0, 1], r=2)) - [(0, 1), (1, 0), (1, 1)] - >>> sorted(distinct_permutations(range(3), r=2)) - [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)] - - """ - - # Algorithm: https://w.wiki/Qai - def _full(A): - while True: - # Yield the permutation we have - yield tuple(A) - - # Find the largest index i such that A[i] < A[i + 1] - for i in range(size - 2, -1, -1): - if A[i] < A[i + 1]: - break - # If no such index exists, this permutation is the last one - else: - return - - # Find the largest index j greater than j such that A[i] < A[j] - for j in range(size - 1, i, -1): - if A[i] < A[j]: - break - - # Swap the value of A[i] with that of A[j], then reverse the - # sequence from A[i + 1] to form the new permutation - A[i], A[j] = A[j], A[i] - A[i + 1 :] = A[: i - size : -1] # A[i + 1:][::-1] - - # Algorithm: modified from the above - def _partial(A, r): - # Split A into the first r items and the last r items - head, tail = A[:r], A[r:] - right_head_indexes = range(r - 1, -1, -1) - left_tail_indexes = range(len(tail)) - - while True: - # Yield the permutation we have - yield tuple(head) - - # Starting from the right, find the first index of the head with - # value smaller than the maximum value of the tail - call it i. - pivot = tail[-1] - for i in right_head_indexes: - if head[i] < pivot: - break - pivot = head[i] - else: - return - - # Starting from the left, find the first value of the tail - # with a value greater than head[i] and swap. - for j in left_tail_indexes: - if tail[j] > head[i]: - head[i], tail[j] = tail[j], head[i] - break - # If we didn't find one, start from the right and find the first - # index of the head with a value greater than head[i] and swap. - else: - for j in right_head_indexes: - if head[j] > head[i]: - head[i], head[j] = head[j], head[i] - break - - # Reverse head[i + 1:] and swap it with tail[:r - (i + 1)] - tail += head[: i - r : -1] # head[i + 1:][::-1] - i += 1 - head[i:], tail[:] = tail[: r - i], tail[r - i :] - - items = sorted(iterable) - - size = len(items) - if r is None: - r = size - - if 0 < r <= size: - return _full(items) if (r == size) else _partial(items, r) - - return iter(() if r else ((),)) - - -def intersperse(e, iterable, n=1): - """Intersperse filler element *e* among the items in *iterable*, leaving - *n* items between each filler element. - - >>> list(intersperse('!', [1, 2, 3, 4, 5])) - [1, '!', 2, '!', 3, '!', 4, '!', 5] - - >>> list(intersperse(None, [1, 2, 3, 4, 5], n=2)) - [1, 2, None, 3, 4, None, 5] - - """ - if n == 0: - raise ValueError('n must be > 0') - elif n == 1: - # interleave(repeat(e), iterable) -> e, x_0, e, x_1, e, x_2... - # islice(..., 1, None) -> x_0, e, x_1, e, x_2... - return islice(interleave(repeat(e), iterable), 1, None) - else: - # interleave(filler, chunks) -> [e], [x_0, x_1], [e], [x_2, x_3]... - # islice(..., 1, None) -> [x_0, x_1], [e], [x_2, x_3]... - # flatten(...) -> x_0, x_1, e, x_2, x_3... - filler = repeat([e]) - chunks = chunked(iterable, n) - return flatten(islice(interleave(filler, chunks), 1, None)) - - -def unique_to_each(*iterables): - """Return the elements from each of the input iterables that aren't in the - other input iterables. - - For example, suppose you have a set of packages, each with a set of - dependencies:: - - {'pkg_1': {'A', 'B'}, 'pkg_2': {'B', 'C'}, 'pkg_3': {'B', 'D'}} - - If you remove one package, which dependencies can also be removed? - - If ``pkg_1`` is removed, then ``A`` is no longer necessary - it is not - associated with ``pkg_2`` or ``pkg_3``. Similarly, ``C`` is only needed for - ``pkg_2``, and ``D`` is only needed for ``pkg_3``:: - - >>> unique_to_each({'A', 'B'}, {'B', 'C'}, {'B', 'D'}) - [['A'], ['C'], ['D']] - - If there are duplicates in one input iterable that aren't in the others - they will be duplicated in the output. Input order is preserved:: - - >>> unique_to_each("mississippi", "missouri") - [['p', 'p'], ['o', 'u', 'r']] - - It is assumed that the elements of each iterable are hashable. - - """ - pool = [list(it) for it in iterables] - counts = Counter(chain.from_iterable(map(set, pool))) - uniques = {element for element in counts if counts[element] == 1} - return [list(filter(uniques.__contains__, it)) for it in pool] - - -def windowed(seq, n, fillvalue=None, step=1): - """Return a sliding window of width *n* over the given iterable. - - >>> all_windows = windowed([1, 2, 3, 4, 5], 3) - >>> list(all_windows) - [(1, 2, 3), (2, 3, 4), (3, 4, 5)] - - When the window is larger than the iterable, *fillvalue* is used in place - of missing values: - - >>> list(windowed([1, 2, 3], 4)) - [(1, 2, 3, None)] - - Each window will advance in increments of *step*: - - >>> list(windowed([1, 2, 3, 4, 5, 6], 3, fillvalue='!', step=2)) - [(1, 2, 3), (3, 4, 5), (5, 6, '!')] - - To slide into the iterable's items, use :func:`chain` to add filler items - to the left: - - >>> iterable = [1, 2, 3, 4] - >>> n = 3 - >>> padding = [None] * (n - 1) - >>> list(windowed(chain(padding, iterable), 3)) - [(None, None, 1), (None, 1, 2), (1, 2, 3), (2, 3, 4)] - """ - if n < 0: - raise ValueError('n must be >= 0') - if n == 0: - yield () - return - if step < 1: - raise ValueError('step must be >= 1') - - iterable = iter(seq) - - # Generate first window - window = deque(islice(iterable, n), maxlen=n) - - # Deal with the first window not being full - if not window: - return - if len(window) < n: - yield tuple(window) + ((fillvalue,) * (n - len(window))) - return - yield tuple(window) - - # Create the filler for the next windows. The padding ensures - # we have just enough elements to fill the last window. - padding = (fillvalue,) * (n - 1 if step >= n else step - 1) - filler = map(window.append, chain(iterable, padding)) - - # Generate the rest of the windows - for _ in islice(filler, step - 1, None, step): - yield tuple(window) - - -def substrings(iterable): - """Yield all of the substrings of *iterable*. - - >>> [''.join(s) for s in substrings('more')] - ['m', 'o', 'r', 'e', 'mo', 'or', 're', 'mor', 'ore', 'more'] - - Note that non-string iterables can also be subdivided. - - >>> list(substrings([0, 1, 2])) - [(0,), (1,), (2,), (0, 1), (1, 2), (0, 1, 2)] - - """ - # The length-1 substrings - seq = [] - for item in iter(iterable): - seq.append(item) - yield (item,) - seq = tuple(seq) - item_count = len(seq) - - # And the rest - for n in range(2, item_count + 1): - for i in range(item_count - n + 1): - yield seq[i : i + n] - - -def substrings_indexes(seq, reverse=False): - """Yield all substrings and their positions in *seq* - - The items yielded will be a tuple of the form ``(substr, i, j)``, where - ``substr == seq[i:j]``. - - This function only works for iterables that support slicing, such as - ``str`` objects. - - >>> for item in substrings_indexes('more'): - ... print(item) - ('m', 0, 1) - ('o', 1, 2) - ('r', 2, 3) - ('e', 3, 4) - ('mo', 0, 2) - ('or', 1, 3) - ('re', 2, 4) - ('mor', 0, 3) - ('ore', 1, 4) - ('more', 0, 4) - - Set *reverse* to ``True`` to yield the same items in the opposite order. - - - """ - r = range(1, len(seq) + 1) - if reverse: - r = reversed(r) - return ( - (seq[i : i + L], i, i + L) for L in r for i in range(len(seq) - L + 1) - ) - - -class bucket: - """Wrap *iterable* and return an object that buckets the iterable into - child iterables based on a *key* function. - - >>> iterable = ['a1', 'b1', 'c1', 'a2', 'b2', 'c2', 'b3'] - >>> s = bucket(iterable, key=lambda x: x[0]) # Bucket by 1st character - >>> sorted(list(s)) # Get the keys - ['a', 'b', 'c'] - >>> a_iterable = s['a'] - >>> next(a_iterable) - 'a1' - >>> next(a_iterable) - 'a2' - >>> list(s['b']) - ['b1', 'b2', 'b3'] - - The original iterable will be advanced and its items will be cached until - they are used by the child iterables. This may require significant storage. - - By default, attempting to select a bucket to which no items belong will - exhaust the iterable and cache all values. - If you specify a *validator* function, selected buckets will instead be - checked against it. - - >>> from itertools import count - >>> it = count(1, 2) # Infinite sequence of odd numbers - >>> key = lambda x: x % 10 # Bucket by last digit - >>> validator = lambda x: x in {1, 3, 5, 7, 9} # Odd digits only - >>> s = bucket(it, key=key, validator=validator) - >>> 2 in s - False - >>> list(s[2]) - [] - - """ - - def __init__(self, iterable, key, validator=None): - self._it = iter(iterable) - self._key = key - self._cache = defaultdict(deque) - self._validator = validator or (lambda x: True) - - def __contains__(self, value): - if not self._validator(value): - return False - - try: - item = next(self[value]) - except StopIteration: - return False - else: - self._cache[value].appendleft(item) - - return True - - def _get_values(self, value): - """ - Helper to yield items from the parent iterator that match *value*. - Items that don't match are stored in the local cache as they - are encountered. - """ - while True: - # If we've cached some items that match the target value, emit - # the first one and evict it from the cache. - if self._cache[value]: - yield self._cache[value].popleft() - # Otherwise we need to advance the parent iterator to search for - # a matching item, caching the rest. - else: - while True: - try: - item = next(self._it) - except StopIteration: - return - item_value = self._key(item) - if item_value == value: - yield item - break - elif self._validator(item_value): - self._cache[item_value].append(item) - - def __iter__(self): - for item in self._it: - item_value = self._key(item) - if self._validator(item_value): - self._cache[item_value].append(item) - - yield from self._cache.keys() - - def __getitem__(self, value): - if not self._validator(value): - return iter(()) - - return self._get_values(value) - - -def spy(iterable, n=1): - """Return a 2-tuple with a list containing the first *n* elements of - *iterable*, and an iterator with the same items as *iterable*. - This allows you to "look ahead" at the items in the iterable without - advancing it. - - There is one item in the list by default: - - >>> iterable = 'abcdefg' - >>> head, iterable = spy(iterable) - >>> head - ['a'] - >>> list(iterable) - ['a', 'b', 'c', 'd', 'e', 'f', 'g'] - - You may use unpacking to retrieve items instead of lists: - - >>> (head,), iterable = spy('abcdefg') - >>> head - 'a' - >>> (first, second), iterable = spy('abcdefg', 2) - >>> first - 'a' - >>> second - 'b' - - The number of items requested can be larger than the number of items in - the iterable: - - >>> iterable = [1, 2, 3, 4, 5] - >>> head, iterable = spy(iterable, 10) - >>> head - [1, 2, 3, 4, 5] - >>> list(iterable) - [1, 2, 3, 4, 5] - - """ - it = iter(iterable) - head = take(n, it) - - return head.copy(), chain(head, it) - - -def interleave(*iterables): - """Return a new iterable yielding from each iterable in turn, - until the shortest is exhausted. - - >>> list(interleave([1, 2, 3], [4, 5], [6, 7, 8])) - [1, 4, 6, 2, 5, 7] - - For a version that doesn't terminate after the shortest iterable is - exhausted, see :func:`interleave_longest`. - - """ - return chain.from_iterable(zip(*iterables)) - - -def interleave_longest(*iterables): - """Return a new iterable yielding from each iterable in turn, - skipping any that are exhausted. - - >>> list(interleave_longest([1, 2, 3], [4, 5], [6, 7, 8])) - [1, 4, 6, 2, 5, 7, 3, 8] - - This function produces the same output as :func:`roundrobin`, but may - perform better for some inputs (in particular when the number of iterables - is large). - - """ - i = chain.from_iterable(zip_longest(*iterables, fillvalue=_marker)) - return (x for x in i if x is not _marker) - - -def interleave_evenly(iterables, lengths=None): - """ - Interleave multiple iterables so that their elements are evenly distributed - throughout the output sequence. - - >>> iterables = [1, 2, 3, 4, 5], ['a', 'b'] - >>> list(interleave_evenly(iterables)) - [1, 2, 'a', 3, 4, 'b', 5] - - >>> iterables = [[1, 2, 3], [4, 5], [6, 7, 8]] - >>> list(interleave_evenly(iterables)) - [1, 6, 4, 2, 7, 3, 8, 5] - - This function requires iterables of known length. Iterables without - ``__len__()`` can be used by manually specifying lengths with *lengths*: - - >>> from itertools import combinations, repeat - >>> iterables = [combinations(range(4), 2), ['a', 'b', 'c']] - >>> lengths = [4 * (4 - 1) // 2, 3] - >>> list(interleave_evenly(iterables, lengths=lengths)) - [(0, 1), (0, 2), 'a', (0, 3), (1, 2), 'b', (1, 3), (2, 3), 'c'] - - Based on Bresenham's algorithm. - """ - if lengths is None: - try: - lengths = [len(it) for it in iterables] - except TypeError: - raise ValueError( - 'Iterable lengths could not be determined automatically. ' - 'Specify them with the lengths keyword.' - ) - elif len(iterables) != len(lengths): - raise ValueError('Mismatching number of iterables and lengths.') - - dims = len(lengths) - - # sort iterables by length, descending - lengths_permute = sorted( - range(dims), key=lambda i: lengths[i], reverse=True - ) - lengths_desc = [lengths[i] for i in lengths_permute] - iters_desc = [iter(iterables[i]) for i in lengths_permute] - - # the longest iterable is the primary one (Bresenham: the longest - # distance along an axis) - delta_primary, deltas_secondary = lengths_desc[0], lengths_desc[1:] - iter_primary, iters_secondary = iters_desc[0], iters_desc[1:] - errors = [delta_primary // dims] * len(deltas_secondary) - - to_yield = sum(lengths) - while to_yield: - yield next(iter_primary) - to_yield -= 1 - # update errors for each secondary iterable - errors = [e - delta for e, delta in zip(errors, deltas_secondary)] - - # those iterables for which the error is negative are yielded - # ("diagonal step" in Bresenham) - for i, e_ in enumerate(errors): - if e_ < 0: - yield next(iters_secondary[i]) - to_yield -= 1 - errors[i] += delta_primary - - -def collapse(iterable, base_type=None, levels=None): - """Flatten an iterable with multiple levels of nesting (e.g., a list of - lists of tuples) into non-iterable types. - - >>> iterable = [(1, 2), ([3, 4], [[5], [6]])] - >>> list(collapse(iterable)) - [1, 2, 3, 4, 5, 6] - - Binary and text strings are not considered iterable and - will not be collapsed. - - To avoid collapsing other types, specify *base_type*: - - >>> iterable = ['ab', ('cd', 'ef'), ['gh', 'ij']] - >>> list(collapse(iterable, base_type=tuple)) - ['ab', ('cd', 'ef'), 'gh', 'ij'] - - Specify *levels* to stop flattening after a certain level: - - >>> iterable = [('a', ['b']), ('c', ['d'])] - >>> list(collapse(iterable)) # Fully flattened - ['a', 'b', 'c', 'd'] - >>> list(collapse(iterable, levels=1)) # Only one level flattened - ['a', ['b'], 'c', ['d']] - - """ - stack = deque() - # Add our first node group, treat the iterable as a single node - stack.appendleft((0, repeat(iterable, 1))) - - while stack: - node_group = stack.popleft() - level, nodes = node_group - - # Check if beyond max level - if levels is not None and level > levels: - yield from nodes - continue - - for node in nodes: - # Check if done iterating - if isinstance(node, (str, bytes)) or ( - (base_type is not None) and isinstance(node, base_type) - ): - yield node - # Otherwise try to create child nodes - else: - try: - tree = iter(node) - except TypeError: - yield node - else: - # Save our current location - stack.appendleft(node_group) - # Append the new child node - stack.appendleft((level + 1, tree)) - # Break to process child node - break - - -def side_effect(func, iterable, chunk_size=None, before=None, after=None): - """Invoke *func* on each item in *iterable* (or on each *chunk_size* group - of items) before yielding the item. - - `func` must be a function that takes a single argument. Its return value - will be discarded. - - *before* and *after* are optional functions that take no arguments. They - will be executed before iteration starts and after it ends, respectively. - - `side_effect` can be used for logging, updating progress bars, or anything - that is not functionally "pure." - - Emitting a status message: - - >>> from more_itertools import consume - >>> func = lambda item: print('Received {}'.format(item)) - >>> consume(side_effect(func, range(2))) - Received 0 - Received 1 - - Operating on chunks of items: - - >>> pair_sums = [] - >>> func = lambda chunk: pair_sums.append(sum(chunk)) - >>> list(side_effect(func, [0, 1, 2, 3, 4, 5], 2)) - [0, 1, 2, 3, 4, 5] - >>> list(pair_sums) - [1, 5, 9] - - Writing to a file-like object: - - >>> from io import StringIO - >>> from more_itertools import consume - >>> f = StringIO() - >>> func = lambda x: print(x, file=f) - >>> before = lambda: print(u'HEADER', file=f) - >>> after = f.close - >>> it = [u'a', u'b', u'c'] - >>> consume(side_effect(func, it, before=before, after=after)) - >>> f.closed - True - - """ - try: - if before is not None: - before() - - if chunk_size is None: - for item in iterable: - func(item) - yield item - else: - for chunk in chunked(iterable, chunk_size): - func(chunk) - yield from chunk - finally: - if after is not None: - after() - - -def sliced(seq, n, strict=False): - """Yield slices of length *n* from the sequence *seq*. - - >>> list(sliced((1, 2, 3, 4, 5, 6), 3)) - [(1, 2, 3), (4, 5, 6)] - - By the default, the last yielded slice will have fewer than *n* elements - if the length of *seq* is not divisible by *n*: - - >>> list(sliced((1, 2, 3, 4, 5, 6, 7, 8), 3)) - [(1, 2, 3), (4, 5, 6), (7, 8)] - - If the length of *seq* is not divisible by *n* and *strict* is - ``True``, then ``ValueError`` will be raised before the last - slice is yielded. - - This function will only work for iterables that support slicing. - For non-sliceable iterables, see :func:`chunked`. - - """ - iterator = takewhile(len, (seq[i : i + n] for i in count(0, n))) - if strict: - - def ret(): - for _slice in iterator: - if len(_slice) != n: - raise ValueError("seq is not divisible by n.") - yield _slice - - return iter(ret()) - else: - return iterator - - -def split_at(iterable, pred, maxsplit=-1, keep_separator=False): - """Yield lists of items from *iterable*, where each list is delimited by - an item where callable *pred* returns ``True``. - - >>> list(split_at('abcdcba', lambda x: x == 'b')) - [['a'], ['c', 'd', 'c'], ['a']] - - >>> list(split_at(range(10), lambda n: n % 2 == 1)) - [[0], [2], [4], [6], [8], []] - - At most *maxsplit* splits are done. If *maxsplit* is not specified or -1, - then there is no limit on the number of splits: - - >>> list(split_at(range(10), lambda n: n % 2 == 1, maxsplit=2)) - [[0], [2], [4, 5, 6, 7, 8, 9]] - - By default, the delimiting items are not included in the output. - To include them, set *keep_separator* to ``True``. - - >>> list(split_at('abcdcba', lambda x: x == 'b', keep_separator=True)) - [['a'], ['b'], ['c', 'd', 'c'], ['b'], ['a']] - - """ - if maxsplit == 0: - yield list(iterable) - return - - buf = [] - it = iter(iterable) - for item in it: - if pred(item): - yield buf - if keep_separator: - yield [item] - if maxsplit == 1: - yield list(it) - return - buf = [] - maxsplit -= 1 - else: - buf.append(item) - yield buf - - -def split_before(iterable, pred, maxsplit=-1): - """Yield lists of items from *iterable*, where each list ends just before - an item for which callable *pred* returns ``True``: - - >>> list(split_before('OneTwo', lambda s: s.isupper())) - [['O', 'n', 'e'], ['T', 'w', 'o']] - - >>> list(split_before(range(10), lambda n: n % 3 == 0)) - [[0, 1, 2], [3, 4, 5], [6, 7, 8], [9]] - - At most *maxsplit* splits are done. If *maxsplit* is not specified or -1, - then there is no limit on the number of splits: - - >>> list(split_before(range(10), lambda n: n % 3 == 0, maxsplit=2)) - [[0, 1, 2], [3, 4, 5], [6, 7, 8, 9]] - """ - if maxsplit == 0: - yield list(iterable) - return - - buf = [] - it = iter(iterable) - for item in it: - if pred(item) and buf: - yield buf - if maxsplit == 1: - yield [item] + list(it) - return - buf = [] - maxsplit -= 1 - buf.append(item) - if buf: - yield buf - - -def split_after(iterable, pred, maxsplit=-1): - """Yield lists of items from *iterable*, where each list ends with an - item where callable *pred* returns ``True``: - - >>> list(split_after('one1two2', lambda s: s.isdigit())) - [['o', 'n', 'e', '1'], ['t', 'w', 'o', '2']] - - >>> list(split_after(range(10), lambda n: n % 3 == 0)) - [[0], [1, 2, 3], [4, 5, 6], [7, 8, 9]] - - At most *maxsplit* splits are done. If *maxsplit* is not specified or -1, - then there is no limit on the number of splits: - - >>> list(split_after(range(10), lambda n: n % 3 == 0, maxsplit=2)) - [[0], [1, 2, 3], [4, 5, 6, 7, 8, 9]] - - """ - if maxsplit == 0: - yield list(iterable) - return - - buf = [] - it = iter(iterable) - for item in it: - buf.append(item) - if pred(item) and buf: - yield buf - if maxsplit == 1: - buf = list(it) - if buf: - yield buf - return - buf = [] - maxsplit -= 1 - if buf: - yield buf - - -def split_when(iterable, pred, maxsplit=-1): - """Split *iterable* into pieces based on the output of *pred*. - *pred* should be a function that takes successive pairs of items and - returns ``True`` if the iterable should be split in between them. - - For example, to find runs of increasing numbers, split the iterable when - element ``i`` is larger than element ``i + 1``: - - >>> list(split_when([1, 2, 3, 3, 2, 5, 2, 4, 2], lambda x, y: x > y)) - [[1, 2, 3, 3], [2, 5], [2, 4], [2]] - - At most *maxsplit* splits are done. If *maxsplit* is not specified or -1, - then there is no limit on the number of splits: - - >>> list(split_when([1, 2, 3, 3, 2, 5, 2, 4, 2], - ... lambda x, y: x > y, maxsplit=2)) - [[1, 2, 3, 3], [2, 5], [2, 4, 2]] - - """ - if maxsplit == 0: - yield list(iterable) - return - - it = iter(iterable) - try: - cur_item = next(it) - except StopIteration: - return - - buf = [cur_item] - for next_item in it: - if pred(cur_item, next_item): - yield buf - if maxsplit == 1: - yield [next_item] + list(it) - return - buf = [] - maxsplit -= 1 - - buf.append(next_item) - cur_item = next_item - - yield buf - - -def split_into(iterable, sizes): - """Yield a list of sequential items from *iterable* of length 'n' for each - integer 'n' in *sizes*. - - >>> list(split_into([1,2,3,4,5,6], [1,2,3])) - [[1], [2, 3], [4, 5, 6]] - - If the sum of *sizes* is smaller than the length of *iterable*, then the - remaining items of *iterable* will not be returned. - - >>> list(split_into([1,2,3,4,5,6], [2,3])) - [[1, 2], [3, 4, 5]] - - If the sum of *sizes* is larger than the length of *iterable*, fewer items - will be returned in the iteration that overruns *iterable* and further - lists will be empty: - - >>> list(split_into([1,2,3,4], [1,2,3,4])) - [[1], [2, 3], [4], []] - - When a ``None`` object is encountered in *sizes*, the returned list will - contain items up to the end of *iterable* the same way that itertools.slice - does: - - >>> list(split_into([1,2,3,4,5,6,7,8,9,0], [2,3,None])) - [[1, 2], [3, 4, 5], [6, 7, 8, 9, 0]] - - :func:`split_into` can be useful for grouping a series of items where the - sizes of the groups are not uniform. An example would be where in a row - from a table, multiple columns represent elements of the same feature - (e.g. a point represented by x,y,z) but, the format is not the same for - all columns. - """ - # convert the iterable argument into an iterator so its contents can - # be consumed by islice in case it is a generator - it = iter(iterable) - - for size in sizes: - if size is None: - yield list(it) - return - else: - yield list(islice(it, size)) - - -def padded(iterable, fillvalue=None, n=None, next_multiple=False): - """Yield the elements from *iterable*, followed by *fillvalue*, such that - at least *n* items are emitted. - - >>> list(padded([1, 2, 3], '?', 5)) - [1, 2, 3, '?', '?'] - - If *next_multiple* is ``True``, *fillvalue* will be emitted until the - number of items emitted is a multiple of *n*: - - >>> list(padded([1, 2, 3, 4], n=3, next_multiple=True)) - [1, 2, 3, 4, None, None] - - If *n* is ``None``, *fillvalue* will be emitted indefinitely. - - To create an *iterable* of exactly size *n*, you can truncate with - :func:`islice`. - - >>> list(islice(padded([1, 2, 3], '?'), 5)) - [1, 2, 3, '?', '?'] - >>> list(islice(padded([1, 2, 3, 4, 5, 6, 7, 8], '?'), 5)) - [1, 2, 3, 4, 5] - - """ - iterable = iter(iterable) - iterable_with_repeat = chain(iterable, repeat(fillvalue)) - - if n is None: - return iterable_with_repeat - elif n < 1: - raise ValueError('n must be at least 1') - elif next_multiple: - - def slice_generator(): - for first in iterable: - yield (first,) - yield islice(iterable_with_repeat, n - 1) - - # While elements exist produce slices of size n - return chain.from_iterable(slice_generator()) - else: - # Ensure the first batch is at least size n then iterate - return chain(islice(iterable_with_repeat, n), iterable) - - -def repeat_each(iterable, n=2): - """Repeat each element in *iterable* *n* times. - - >>> list(repeat_each('ABC', 3)) - ['A', 'A', 'A', 'B', 'B', 'B', 'C', 'C', 'C'] - """ - return chain.from_iterable(map(repeat, iterable, repeat(n))) - - -def repeat_last(iterable, default=None): - """After the *iterable* is exhausted, keep yielding its last element. - - >>> list(islice(repeat_last(range(3)), 5)) - [0, 1, 2, 2, 2] - - If the iterable is empty, yield *default* forever:: - - >>> list(islice(repeat_last(range(0), 42), 5)) - [42, 42, 42, 42, 42] - - """ - item = _marker - for item in iterable: - yield item - final = default if item is _marker else item - yield from repeat(final) - - -def distribute(n, iterable): - """Distribute the items from *iterable* among *n* smaller iterables. - - >>> group_1, group_2 = distribute(2, [1, 2, 3, 4, 5, 6]) - >>> list(group_1) - [1, 3, 5] - >>> list(group_2) - [2, 4, 6] - - If the length of *iterable* is not evenly divisible by *n*, then the - length of the returned iterables will not be identical: - - >>> children = distribute(3, [1, 2, 3, 4, 5, 6, 7]) - >>> [list(c) for c in children] - [[1, 4, 7], [2, 5], [3, 6]] - - If the length of *iterable* is smaller than *n*, then the last returned - iterables will be empty: - - >>> children = distribute(5, [1, 2, 3]) - >>> [list(c) for c in children] - [[1], [2], [3], [], []] - - This function uses :func:`itertools.tee` and may require significant - storage. - - If you need the order items in the smaller iterables to match the - original iterable, see :func:`divide`. - - """ - if n < 1: - raise ValueError('n must be at least 1') - - children = tee(iterable, n) - return [islice(it, index, None, n) for index, it in enumerate(children)] - - -def stagger(iterable, offsets=(-1, 0, 1), longest=False, fillvalue=None): - """Yield tuples whose elements are offset from *iterable*. - The amount by which the `i`-th item in each tuple is offset is given by - the `i`-th item in *offsets*. - - >>> list(stagger([0, 1, 2, 3])) - [(None, 0, 1), (0, 1, 2), (1, 2, 3)] - >>> list(stagger(range(8), offsets=(0, 2, 4))) - [(0, 2, 4), (1, 3, 5), (2, 4, 6), (3, 5, 7)] - - By default, the sequence will end when the final element of a tuple is the - last item in the iterable. To continue until the first element of a tuple - is the last item in the iterable, set *longest* to ``True``:: - - >>> list(stagger([0, 1, 2, 3], longest=True)) - [(None, 0, 1), (0, 1, 2), (1, 2, 3), (2, 3, None), (3, None, None)] - - By default, ``None`` will be used to replace offsets beyond the end of the - sequence. Specify *fillvalue* to use some other value. - - """ - children = tee(iterable, len(offsets)) - - return zip_offset( - *children, offsets=offsets, longest=longest, fillvalue=fillvalue - ) - - -def zip_equal(*iterables): - """``zip`` the input *iterables* together, but raise - ``UnequalIterablesError`` if they aren't all the same length. - - >>> it_1 = range(3) - >>> it_2 = iter('abc') - >>> list(zip_equal(it_1, it_2)) - [(0, 'a'), (1, 'b'), (2, 'c')] - - >>> it_1 = range(3) - >>> it_2 = iter('abcd') - >>> list(zip_equal(it_1, it_2)) # doctest: +IGNORE_EXCEPTION_DETAIL - Traceback (most recent call last): - ... - more_itertools.more.UnequalIterablesError: Iterables have different - lengths - - """ - if hexversion >= 0x30A00A6: - warnings.warn( - ( - 'zip_equal will be removed in a future version of ' - 'more-itertools. Use the builtin zip function with ' - 'strict=True instead.' - ), - DeprecationWarning, - ) - - return _zip_equal(*iterables) - - -def zip_offset(*iterables, offsets, longest=False, fillvalue=None): - """``zip`` the input *iterables* together, but offset the `i`-th iterable - by the `i`-th item in *offsets*. - - >>> list(zip_offset('0123', 'abcdef', offsets=(0, 1))) - [('0', 'b'), ('1', 'c'), ('2', 'd'), ('3', 'e')] - - This can be used as a lightweight alternative to SciPy or pandas to analyze - data sets in which some series have a lead or lag relationship. - - By default, the sequence will end when the shortest iterable is exhausted. - To continue until the longest iterable is exhausted, set *longest* to - ``True``. - - >>> list(zip_offset('0123', 'abcdef', offsets=(0, 1), longest=True)) - [('0', 'b'), ('1', 'c'), ('2', 'd'), ('3', 'e'), (None, 'f')] - - By default, ``None`` will be used to replace offsets beyond the end of the - sequence. Specify *fillvalue* to use some other value. - - """ - if len(iterables) != len(offsets): - raise ValueError("Number of iterables and offsets didn't match") - - staggered = [] - for it, n in zip(iterables, offsets): - if n < 0: - staggered.append(chain(repeat(fillvalue, -n), it)) - elif n > 0: - staggered.append(islice(it, n, None)) - else: - staggered.append(it) - - if longest: - return zip_longest(*staggered, fillvalue=fillvalue) - - return zip(*staggered) - - -def sort_together(iterables, key_list=(0,), key=None, reverse=False): - """Return the input iterables sorted together, with *key_list* as the - priority for sorting. All iterables are trimmed to the length of the - shortest one. - - This can be used like the sorting function in a spreadsheet. If each - iterable represents a column of data, the key list determines which - columns are used for sorting. - - By default, all iterables are sorted using the ``0``-th iterable:: - - >>> iterables = [(4, 3, 2, 1), ('a', 'b', 'c', 'd')] - >>> sort_together(iterables) - [(1, 2, 3, 4), ('d', 'c', 'b', 'a')] - - Set a different key list to sort according to another iterable. - Specifying multiple keys dictates how ties are broken:: - - >>> iterables = [(3, 1, 2), (0, 1, 0), ('c', 'b', 'a')] - >>> sort_together(iterables, key_list=(1, 2)) - [(2, 3, 1), (0, 0, 1), ('a', 'c', 'b')] - - To sort by a function of the elements of the iterable, pass a *key* - function. Its arguments are the elements of the iterables corresponding to - the key list:: - - >>> names = ('a', 'b', 'c') - >>> lengths = (1, 2, 3) - >>> widths = (5, 2, 1) - >>> def area(length, width): - ... return length * width - >>> sort_together([names, lengths, widths], key_list=(1, 2), key=area) - [('c', 'b', 'a'), (3, 2, 1), (1, 2, 5)] - - Set *reverse* to ``True`` to sort in descending order. - - >>> sort_together([(1, 2, 3), ('c', 'b', 'a')], reverse=True) - [(3, 2, 1), ('a', 'b', 'c')] - - """ - if key is None: - # if there is no key function, the key argument to sorted is an - # itemgetter - key_argument = itemgetter(*key_list) - else: - # if there is a key function, call it with the items at the offsets - # specified by the key function as arguments - key_list = list(key_list) - if len(key_list) == 1: - # if key_list contains a single item, pass the item at that offset - # as the only argument to the key function - key_offset = key_list[0] - key_argument = lambda zipped_items: key(zipped_items[key_offset]) - else: - # if key_list contains multiple items, use itemgetter to return a - # tuple of items, which we pass as *args to the key function - get_key_items = itemgetter(*key_list) - key_argument = lambda zipped_items: key( - *get_key_items(zipped_items) - ) - - return list( - zip(*sorted(zip(*iterables), key=key_argument, reverse=reverse)) - ) - - -def unzip(iterable): - """The inverse of :func:`zip`, this function disaggregates the elements - of the zipped *iterable*. - - The ``i``-th iterable contains the ``i``-th element from each element - of the zipped iterable. The first element is used to determine the - length of the remaining elements. - - >>> iterable = [('a', 1), ('b', 2), ('c', 3), ('d', 4)] - >>> letters, numbers = unzip(iterable) - >>> list(letters) - ['a', 'b', 'c', 'd'] - >>> list(numbers) - [1, 2, 3, 4] - - This is similar to using ``zip(*iterable)``, but it avoids reading - *iterable* into memory. Note, however, that this function uses - :func:`itertools.tee` and thus may require significant storage. - - """ - head, iterable = spy(iter(iterable)) - if not head: - # empty iterable, e.g. zip([], [], []) - return () - # spy returns a one-length iterable as head - head = head[0] - iterables = tee(iterable, len(head)) - - def itemgetter(i): - def getter(obj): - try: - return obj[i] - except IndexError: - # basically if we have an iterable like - # iter([(1, 2, 3), (4, 5), (6,)]) - # the second unzipped iterable would fail at the third tuple - # since it would try to access tup[1] - # same with the third unzipped iterable and the second tuple - # to support these "improperly zipped" iterables, - # we create a custom itemgetter - # which just stops the unzipped iterables - # at first length mismatch - raise StopIteration - - return getter - - return tuple(map(itemgetter(i), it) for i, it in enumerate(iterables)) - - -def divide(n, iterable): - """Divide the elements from *iterable* into *n* parts, maintaining - order. - - >>> group_1, group_2 = divide(2, [1, 2, 3, 4, 5, 6]) - >>> list(group_1) - [1, 2, 3] - >>> list(group_2) - [4, 5, 6] - - If the length of *iterable* is not evenly divisible by *n*, then the - length of the returned iterables will not be identical: - - >>> children = divide(3, [1, 2, 3, 4, 5, 6, 7]) - >>> [list(c) for c in children] - [[1, 2, 3], [4, 5], [6, 7]] - - If the length of the iterable is smaller than n, then the last returned - iterables will be empty: - - >>> children = divide(5, [1, 2, 3]) - >>> [list(c) for c in children] - [[1], [2], [3], [], []] - - This function will exhaust the iterable before returning. - If order is not important, see :func:`distribute`, which does not first - pull the iterable into memory. - - """ - if n < 1: - raise ValueError('n must be at least 1') - - try: - iterable[:0] - except TypeError: - seq = tuple(iterable) - else: - seq = iterable - - q, r = divmod(len(seq), n) - - ret = [] - stop = 0 - for i in range(1, n + 1): - start = stop - stop += q + 1 if i <= r else q - ret.append(iter(seq[start:stop])) - - return ret - - -def always_iterable(obj, base_type=(str, bytes)): - """If *obj* is iterable, return an iterator over its items:: - - >>> obj = (1, 2, 3) - >>> list(always_iterable(obj)) - [1, 2, 3] - - If *obj* is not iterable, return a one-item iterable containing *obj*:: - - >>> obj = 1 - >>> list(always_iterable(obj)) - [1] - - If *obj* is ``None``, return an empty iterable: - - >>> obj = None - >>> list(always_iterable(None)) - [] - - By default, binary and text strings are not considered iterable:: - - >>> obj = 'foo' - >>> list(always_iterable(obj)) - ['foo'] - - If *base_type* is set, objects for which ``isinstance(obj, base_type)`` - returns ``True`` won't be considered iterable. - - >>> obj = {'a': 1} - >>> list(always_iterable(obj)) # Iterate over the dict's keys - ['a'] - >>> list(always_iterable(obj, base_type=dict)) # Treat dicts as a unit - [{'a': 1}] - - Set *base_type* to ``None`` to avoid any special handling and treat objects - Python considers iterable as iterable: - - >>> obj = 'foo' - >>> list(always_iterable(obj, base_type=None)) - ['f', 'o', 'o'] - """ - if obj is None: - return iter(()) - - if (base_type is not None) and isinstance(obj, base_type): - return iter((obj,)) - - try: - return iter(obj) - except TypeError: - return iter((obj,)) - - -def adjacent(predicate, iterable, distance=1): - """Return an iterable over `(bool, item)` tuples where the `item` is - drawn from *iterable* and the `bool` indicates whether - that item satisfies the *predicate* or is adjacent to an item that does. - - For example, to find whether items are adjacent to a ``3``:: - - >>> list(adjacent(lambda x: x == 3, range(6))) - [(False, 0), (False, 1), (True, 2), (True, 3), (True, 4), (False, 5)] - - Set *distance* to change what counts as adjacent. For example, to find - whether items are two places away from a ``3``: - - >>> list(adjacent(lambda x: x == 3, range(6), distance=2)) - [(False, 0), (True, 1), (True, 2), (True, 3), (True, 4), (True, 5)] - - This is useful for contextualizing the results of a search function. - For example, a code comparison tool might want to identify lines that - have changed, but also surrounding lines to give the viewer of the diff - context. - - The predicate function will only be called once for each item in the - iterable. - - See also :func:`groupby_transform`, which can be used with this function - to group ranges of items with the same `bool` value. - - """ - # Allow distance=0 mainly for testing that it reproduces results with map() - if distance < 0: - raise ValueError('distance must be at least 0') - - i1, i2 = tee(iterable) - padding = [False] * distance - selected = chain(padding, map(predicate, i1), padding) - adjacent_to_selected = map(any, windowed(selected, 2 * distance + 1)) - return zip(adjacent_to_selected, i2) - - -def groupby_transform(iterable, keyfunc=None, valuefunc=None, reducefunc=None): - """An extension of :func:`itertools.groupby` that can apply transformations - to the grouped data. - - * *keyfunc* is a function computing a key value for each item in *iterable* - * *valuefunc* is a function that transforms the individual items from - *iterable* after grouping - * *reducefunc* is a function that transforms each group of items - - >>> iterable = 'aAAbBBcCC' - >>> keyfunc = lambda k: k.upper() - >>> valuefunc = lambda v: v.lower() - >>> reducefunc = lambda g: ''.join(g) - >>> list(groupby_transform(iterable, keyfunc, valuefunc, reducefunc)) - [('A', 'aaa'), ('B', 'bbb'), ('C', 'ccc')] - - Each optional argument defaults to an identity function if not specified. - - :func:`groupby_transform` is useful when grouping elements of an iterable - using a separate iterable as the key. To do this, :func:`zip` the iterables - and pass a *keyfunc* that extracts the first element and a *valuefunc* - that extracts the second element:: - - >>> from operator import itemgetter - >>> keys = [0, 0, 1, 1, 1, 2, 2, 2, 3] - >>> values = 'abcdefghi' - >>> iterable = zip(keys, values) - >>> grouper = groupby_transform(iterable, itemgetter(0), itemgetter(1)) - >>> [(k, ''.join(g)) for k, g in grouper] - [(0, 'ab'), (1, 'cde'), (2, 'fgh'), (3, 'i')] - - Note that the order of items in the iterable is significant. - Only adjacent items are grouped together, so if you don't want any - duplicate groups, you should sort the iterable by the key function. - - """ - ret = groupby(iterable, keyfunc) - if valuefunc: - ret = ((k, map(valuefunc, g)) for k, g in ret) - if reducefunc: - ret = ((k, reducefunc(g)) for k, g in ret) - - return ret - - -class numeric_range(abc.Sequence, abc.Hashable): - """An extension of the built-in ``range()`` function whose arguments can - be any orderable numeric type. - - With only *stop* specified, *start* defaults to ``0`` and *step* - defaults to ``1``. The output items will match the type of *stop*: - - >>> list(numeric_range(3.5)) - [0.0, 1.0, 2.0, 3.0] - - With only *start* and *stop* specified, *step* defaults to ``1``. The - output items will match the type of *start*: - - >>> from decimal import Decimal - >>> start = Decimal('2.1') - >>> stop = Decimal('5.1') - >>> list(numeric_range(start, stop)) - [Decimal('2.1'), Decimal('3.1'), Decimal('4.1')] - - With *start*, *stop*, and *step* specified the output items will match - the type of ``start + step``: - - >>> from fractions import Fraction - >>> start = Fraction(1, 2) # Start at 1/2 - >>> stop = Fraction(5, 2) # End at 5/2 - >>> step = Fraction(1, 2) # Count by 1/2 - >>> list(numeric_range(start, stop, step)) - [Fraction(1, 2), Fraction(1, 1), Fraction(3, 2), Fraction(2, 1)] - - If *step* is zero, ``ValueError`` is raised. Negative steps are supported: - - >>> list(numeric_range(3, -1, -1.0)) - [3.0, 2.0, 1.0, 0.0] - - Be aware of the limitations of floating point numbers; the representation - of the yielded numbers may be surprising. - - ``datetime.datetime`` objects can be used for *start* and *stop*, if *step* - is a ``datetime.timedelta`` object: - - >>> import datetime - >>> start = datetime.datetime(2019, 1, 1) - >>> stop = datetime.datetime(2019, 1, 3) - >>> step = datetime.timedelta(days=1) - >>> items = iter(numeric_range(start, stop, step)) - >>> next(items) - datetime.datetime(2019, 1, 1, 0, 0) - >>> next(items) - datetime.datetime(2019, 1, 2, 0, 0) - - """ - - _EMPTY_HASH = hash(range(0, 0)) - - def __init__(self, *args): - argc = len(args) - if argc == 1: - (self._stop,) = args - self._start = type(self._stop)(0) - self._step = type(self._stop - self._start)(1) - elif argc == 2: - self._start, self._stop = args - self._step = type(self._stop - self._start)(1) - elif argc == 3: - self._start, self._stop, self._step = args - elif argc == 0: - raise TypeError( - 'numeric_range expected at least ' - '1 argument, got {}'.format(argc) - ) - else: - raise TypeError( - 'numeric_range expected at most ' - '3 arguments, got {}'.format(argc) - ) - - self._zero = type(self._step)(0) - if self._step == self._zero: - raise ValueError('numeric_range() arg 3 must not be zero') - self._growing = self._step > self._zero - - def __bool__(self): - if self._growing: - return self._start < self._stop - else: - return self._start > self._stop - - def __contains__(self, elem): - if self._growing: - if self._start <= elem < self._stop: - return (elem - self._start) % self._step == self._zero - else: - if self._start >= elem > self._stop: - return (self._start - elem) % (-self._step) == self._zero - - return False - - def __eq__(self, other): - if isinstance(other, numeric_range): - empty_self = not bool(self) - empty_other = not bool(other) - if empty_self or empty_other: - return empty_self and empty_other # True if both empty - else: - return ( - self._start == other._start - and self._step == other._step - and self._get_by_index(-1) == other._get_by_index(-1) - ) - else: - return False - - def __getitem__(self, key): - if isinstance(key, int): - return self._get_by_index(key) - elif isinstance(key, slice): - step = self._step if key.step is None else key.step * self._step - - if key.start is None or key.start <= -self._len: - start = self._start - elif key.start >= self._len: - start = self._stop - else: # -self._len < key.start < self._len - start = self._get_by_index(key.start) - - if key.stop is None or key.stop >= self._len: - stop = self._stop - elif key.stop <= -self._len: - stop = self._start - else: # -self._len < key.stop < self._len - stop = self._get_by_index(key.stop) - - return numeric_range(start, stop, step) - else: - raise TypeError( - 'numeric range indices must be ' - 'integers or slices, not {}'.format(type(key).__name__) - ) - - def __hash__(self): - if self: - return hash((self._start, self._get_by_index(-1), self._step)) - else: - return self._EMPTY_HASH - - def __iter__(self): - values = (self._start + (n * self._step) for n in count()) - if self._growing: - return takewhile(partial(gt, self._stop), values) - else: - return takewhile(partial(lt, self._stop), values) - - def __len__(self): - return self._len - - @cached_property - def _len(self): - if self._growing: - start = self._start - stop = self._stop - step = self._step - else: - start = self._stop - stop = self._start - step = -self._step - distance = stop - start - if distance <= self._zero: - return 0 - else: # distance > 0 and step > 0: regular euclidean division - q, r = divmod(distance, step) - return int(q) + int(r != self._zero) - - def __reduce__(self): - return numeric_range, (self._start, self._stop, self._step) - - def __repr__(self): - if self._step == 1: - return "numeric_range({}, {})".format( - repr(self._start), repr(self._stop) - ) - else: - return "numeric_range({}, {}, {})".format( - repr(self._start), repr(self._stop), repr(self._step) - ) - - def __reversed__(self): - return iter( - numeric_range( - self._get_by_index(-1), self._start - self._step, -self._step - ) - ) - - def count(self, value): - return int(value in self) - - def index(self, value): - if self._growing: - if self._start <= value < self._stop: - q, r = divmod(value - self._start, self._step) - if r == self._zero: - return int(q) - else: - if self._start >= value > self._stop: - q, r = divmod(self._start - value, -self._step) - if r == self._zero: - return int(q) - - raise ValueError("{} is not in numeric range".format(value)) - - def _get_by_index(self, i): - if i < 0: - i += self._len - if i < 0 or i >= self._len: - raise IndexError("numeric range object index out of range") - return self._start + i * self._step - - -def count_cycle(iterable, n=None): - """Cycle through the items from *iterable* up to *n* times, yielding - the number of completed cycles along with each item. If *n* is omitted the - process repeats indefinitely. - - >>> list(count_cycle('AB', 3)) - [(0, 'A'), (0, 'B'), (1, 'A'), (1, 'B'), (2, 'A'), (2, 'B')] - - """ - iterable = tuple(iterable) - if not iterable: - return iter(()) - counter = count() if n is None else range(n) - return ((i, item) for i in counter for item in iterable) - - -def mark_ends(iterable): - """Yield 3-tuples of the form ``(is_first, is_last, item)``. - - >>> list(mark_ends('ABC')) - [(True, False, 'A'), (False, False, 'B'), (False, True, 'C')] - - Use this when looping over an iterable to take special action on its first - and/or last items: - - >>> iterable = ['Header', 100, 200, 'Footer'] - >>> total = 0 - >>> for is_first, is_last, item in mark_ends(iterable): - ... if is_first: - ... continue # Skip the header - ... if is_last: - ... continue # Skip the footer - ... total += item - >>> print(total) - 300 - """ - it = iter(iterable) - - try: - b = next(it) - except StopIteration: - return - - try: - for i in count(): - a = b - b = next(it) - yield i == 0, False, a - - except StopIteration: - yield i == 0, True, a - - -def locate(iterable, pred=bool, window_size=None): - """Yield the index of each item in *iterable* for which *pred* returns - ``True``. - - *pred* defaults to :func:`bool`, which will select truthy items: - - >>> list(locate([0, 1, 1, 0, 1, 0, 0])) - [1, 2, 4] - - Set *pred* to a custom function to, e.g., find the indexes for a particular - item. - - >>> list(locate(['a', 'b', 'c', 'b'], lambda x: x == 'b')) - [1, 3] - - If *window_size* is given, then the *pred* function will be called with - that many items. This enables searching for sub-sequences: - - >>> iterable = [0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2, 3] - >>> pred = lambda *args: args == (1, 2, 3) - >>> list(locate(iterable, pred=pred, window_size=3)) - [1, 5, 9] - - Use with :func:`seekable` to find indexes and then retrieve the associated - items: - - >>> from itertools import count - >>> from more_itertools import seekable - >>> source = (3 * n + 1 if (n % 2) else n // 2 for n in count()) - >>> it = seekable(source) - >>> pred = lambda x: x > 100 - >>> indexes = locate(it, pred=pred) - >>> i = next(indexes) - >>> it.seek(i) - >>> next(it) - 106 - - """ - if window_size is None: - return compress(count(), map(pred, iterable)) - - if window_size < 1: - raise ValueError('window size must be at least 1') - - it = windowed(iterable, window_size, fillvalue=_marker) - return compress(count(), starmap(pred, it)) - - -def longest_common_prefix(iterables): - """Yield elements of the longest common prefix amongst given *iterables*. - - >>> ''.join(longest_common_prefix(['abcd', 'abc', 'abf'])) - 'ab' - - """ - return (c[0] for c in takewhile(all_equal, zip(*iterables))) - - -def lstrip(iterable, pred): - """Yield the items from *iterable*, but strip any from the beginning - for which *pred* returns ``True``. - - For example, to remove a set of items from the start of an iterable: - - >>> iterable = (None, False, None, 1, 2, None, 3, False, None) - >>> pred = lambda x: x in {None, False, ''} - >>> list(lstrip(iterable, pred)) - [1, 2, None, 3, False, None] - - This function is analogous to to :func:`str.lstrip`, and is essentially - an wrapper for :func:`itertools.dropwhile`. - - """ - return dropwhile(pred, iterable) - - -def rstrip(iterable, pred): - """Yield the items from *iterable*, but strip any from the end - for which *pred* returns ``True``. - - For example, to remove a set of items from the end of an iterable: - - >>> iterable = (None, False, None, 1, 2, None, 3, False, None) - >>> pred = lambda x: x in {None, False, ''} - >>> list(rstrip(iterable, pred)) - [None, False, None, 1, 2, None, 3] - - This function is analogous to :func:`str.rstrip`. - - """ - cache = [] - cache_append = cache.append - cache_clear = cache.clear - for x in iterable: - if pred(x): - cache_append(x) - else: - yield from cache - cache_clear() - yield x - - -def strip(iterable, pred): - """Yield the items from *iterable*, but strip any from the - beginning and end for which *pred* returns ``True``. - - For example, to remove a set of items from both ends of an iterable: - - >>> iterable = (None, False, None, 1, 2, None, 3, False, None) - >>> pred = lambda x: x in {None, False, ''} - >>> list(strip(iterable, pred)) - [1, 2, None, 3] - - This function is analogous to :func:`str.strip`. - - """ - return rstrip(lstrip(iterable, pred), pred) - - -class islice_extended: - """An extension of :func:`itertools.islice` that supports negative values - for *stop*, *start*, and *step*. - - >>> iterable = iter('abcdefgh') - >>> list(islice_extended(iterable, -4, -1)) - ['e', 'f', 'g'] - - Slices with negative values require some caching of *iterable*, but this - function takes care to minimize the amount of memory required. - - For example, you can use a negative step with an infinite iterator: - - >>> from itertools import count - >>> list(islice_extended(count(), 110, 99, -2)) - [110, 108, 106, 104, 102, 100] - - You can also use slice notation directly: - - >>> iterable = map(str, count()) - >>> it = islice_extended(iterable)[10:20:2] - >>> list(it) - ['10', '12', '14', '16', '18'] - - """ - - def __init__(self, iterable, *args): - it = iter(iterable) - if args: - self._iterable = _islice_helper(it, slice(*args)) - else: - self._iterable = it - - def __iter__(self): - return self - - def __next__(self): - return next(self._iterable) - - def __getitem__(self, key): - if isinstance(key, slice): - return islice_extended(_islice_helper(self._iterable, key)) - - raise TypeError('islice_extended.__getitem__ argument must be a slice') - - -def _islice_helper(it, s): - start = s.start - stop = s.stop - if s.step == 0: - raise ValueError('step argument must be a non-zero integer or None.') - step = s.step or 1 - - if step > 0: - start = 0 if (start is None) else start - - if start < 0: - # Consume all but the last -start items - cache = deque(enumerate(it, 1), maxlen=-start) - len_iter = cache[-1][0] if cache else 0 - - # Adjust start to be positive - i = max(len_iter + start, 0) - - # Adjust stop to be positive - if stop is None: - j = len_iter - elif stop >= 0: - j = min(stop, len_iter) - else: - j = max(len_iter + stop, 0) - - # Slice the cache - n = j - i - if n <= 0: - return - - for index, item in islice(cache, 0, n, step): - yield item - elif (stop is not None) and (stop < 0): - # Advance to the start position - next(islice(it, start, start), None) - - # When stop is negative, we have to carry -stop items while - # iterating - cache = deque(islice(it, -stop), maxlen=-stop) - - for index, item in enumerate(it): - cached_item = cache.popleft() - if index % step == 0: - yield cached_item - cache.append(item) - else: - # When both start and stop are positive we have the normal case - yield from islice(it, start, stop, step) - else: - start = -1 if (start is None) else start - - if (stop is not None) and (stop < 0): - # Consume all but the last items - n = -stop - 1 - cache = deque(enumerate(it, 1), maxlen=n) - len_iter = cache[-1][0] if cache else 0 - - # If start and stop are both negative they are comparable and - # we can just slice. Otherwise we can adjust start to be negative - # and then slice. - if start < 0: - i, j = start, stop - else: - i, j = min(start - len_iter, -1), None - - for index, item in list(cache)[i:j:step]: - yield item - else: - # Advance to the stop position - if stop is not None: - m = stop + 1 - next(islice(it, m, m), None) - - # stop is positive, so if start is negative they are not comparable - # and we need the rest of the items. - if start < 0: - i = start - n = None - # stop is None and start is positive, so we just need items up to - # the start index. - elif stop is None: - i = None - n = start + 1 - # Both stop and start are positive, so they are comparable. - else: - i = None - n = start - stop - if n <= 0: - return - - cache = list(islice(it, n)) - - yield from cache[i::step] - - -def always_reversible(iterable): - """An extension of :func:`reversed` that supports all iterables, not - just those which implement the ``Reversible`` or ``Sequence`` protocols. - - >>> print(*always_reversible(x for x in range(3))) - 2 1 0 - - If the iterable is already reversible, this function returns the - result of :func:`reversed()`. If the iterable is not reversible, - this function will cache the remaining items in the iterable and - yield them in reverse order, which may require significant storage. - """ - try: - return reversed(iterable) - except TypeError: - return reversed(list(iterable)) - - -def consecutive_groups(iterable, ordering=lambda x: x): - """Yield groups of consecutive items using :func:`itertools.groupby`. - The *ordering* function determines whether two items are adjacent by - returning their position. - - By default, the ordering function is the identity function. This is - suitable for finding runs of numbers: - - >>> iterable = [1, 10, 11, 12, 20, 30, 31, 32, 33, 40] - >>> for group in consecutive_groups(iterable): - ... print(list(group)) - [1] - [10, 11, 12] - [20] - [30, 31, 32, 33] - [40] - - For finding runs of adjacent letters, try using the :meth:`index` method - of a string of letters: - - >>> from string import ascii_lowercase - >>> iterable = 'abcdfgilmnop' - >>> ordering = ascii_lowercase.index - >>> for group in consecutive_groups(iterable, ordering): - ... print(list(group)) - ['a', 'b', 'c', 'd'] - ['f', 'g'] - ['i'] - ['l', 'm', 'n', 'o', 'p'] - - Each group of consecutive items is an iterator that shares it source with - *iterable*. When an an output group is advanced, the previous group is - no longer available unless its elements are copied (e.g., into a ``list``). - - >>> iterable = [1, 2, 11, 12, 21, 22] - >>> saved_groups = [] - >>> for group in consecutive_groups(iterable): - ... saved_groups.append(list(group)) # Copy group elements - >>> saved_groups - [[1, 2], [11, 12], [21, 22]] - - """ - for k, g in groupby( - enumerate(iterable), key=lambda x: x[0] - ordering(x[1]) - ): - yield map(itemgetter(1), g) - - -def difference(iterable, func=sub, *, initial=None): - """This function is the inverse of :func:`itertools.accumulate`. By default - it will compute the first difference of *iterable* using - :func:`operator.sub`: - - >>> from itertools import accumulate - >>> iterable = accumulate([0, 1, 2, 3, 4]) # produces 0, 1, 3, 6, 10 - >>> list(difference(iterable)) - [0, 1, 2, 3, 4] - - *func* defaults to :func:`operator.sub`, but other functions can be - specified. They will be applied as follows:: - - A, B, C, D, ... --> A, func(B, A), func(C, B), func(D, C), ... - - For example, to do progressive division: - - >>> iterable = [1, 2, 6, 24, 120] - >>> func = lambda x, y: x // y - >>> list(difference(iterable, func)) - [1, 2, 3, 4, 5] - - If the *initial* keyword is set, the first element will be skipped when - computing successive differences. - - >>> it = [10, 11, 13, 16] # from accumulate([1, 2, 3], initial=10) - >>> list(difference(it, initial=10)) - [1, 2, 3] - - """ - a, b = tee(iterable) - try: - first = [next(b)] - except StopIteration: - return iter([]) - - if initial is not None: - first = [] - - return chain(first, map(func, b, a)) - - -class SequenceView(Sequence): - """Return a read-only view of the sequence object *target*. - - :class:`SequenceView` objects are analogous to Python's built-in - "dictionary view" types. They provide a dynamic view of a sequence's items, - meaning that when the sequence updates, so does the view. - - >>> seq = ['0', '1', '2'] - >>> view = SequenceView(seq) - >>> view - SequenceView(['0', '1', '2']) - >>> seq.append('3') - >>> view - SequenceView(['0', '1', '2', '3']) - - Sequence views support indexing, slicing, and length queries. They act - like the underlying sequence, except they don't allow assignment: - - >>> view[1] - '1' - >>> view[1:-1] - ['1', '2'] - >>> len(view) - 4 - - Sequence views are useful as an alternative to copying, as they don't - require (much) extra storage. - - """ - - def __init__(self, target): - if not isinstance(target, Sequence): - raise TypeError - self._target = target - - def __getitem__(self, index): - return self._target[index] - - def __len__(self): - return len(self._target) - - def __repr__(self): - return '{}({})'.format(self.__class__.__name__, repr(self._target)) - - -class seekable: - """Wrap an iterator to allow for seeking backward and forward. This - progressively caches the items in the source iterable so they can be - re-visited. - - Call :meth:`seek` with an index to seek to that position in the source - iterable. - - To "reset" an iterator, seek to ``0``: - - >>> from itertools import count - >>> it = seekable((str(n) for n in count())) - >>> next(it), next(it), next(it) - ('0', '1', '2') - >>> it.seek(0) - >>> next(it), next(it), next(it) - ('0', '1', '2') - >>> next(it) - '3' - - You can also seek forward: - - >>> it = seekable((str(n) for n in range(20))) - >>> it.seek(10) - >>> next(it) - '10' - >>> it.relative_seek(-2) # Seeking relative to the current position - >>> next(it) - '9' - >>> it.seek(20) # Seeking past the end of the source isn't a problem - >>> list(it) - [] - >>> it.seek(0) # Resetting works even after hitting the end - >>> next(it), next(it), next(it) - ('0', '1', '2') - - Call :meth:`peek` to look ahead one item without advancing the iterator: - - >>> it = seekable('1234') - >>> it.peek() - '1' - >>> list(it) - ['1', '2', '3', '4'] - >>> it.peek(default='empty') - 'empty' - - Before the iterator is at its end, calling :func:`bool` on it will return - ``True``. After it will return ``False``: - - >>> it = seekable('5678') - >>> bool(it) - True - >>> list(it) - ['5', '6', '7', '8'] - >>> bool(it) - False - - You may view the contents of the cache with the :meth:`elements` method. - That returns a :class:`SequenceView`, a view that updates automatically: - - >>> it = seekable((str(n) for n in range(10))) - >>> next(it), next(it), next(it) - ('0', '1', '2') - >>> elements = it.elements() - >>> elements - SequenceView(['0', '1', '2']) - >>> next(it) - '3' - >>> elements - SequenceView(['0', '1', '2', '3']) - - By default, the cache grows as the source iterable progresses, so beware of - wrapping very large or infinite iterables. Supply *maxlen* to limit the - size of the cache (this of course limits how far back you can seek). - - >>> from itertools import count - >>> it = seekable((str(n) for n in count()), maxlen=2) - >>> next(it), next(it), next(it), next(it) - ('0', '1', '2', '3') - >>> list(it.elements()) - ['2', '3'] - >>> it.seek(0) - >>> next(it), next(it), next(it), next(it) - ('2', '3', '4', '5') - >>> next(it) - '6' - - """ - - def __init__(self, iterable, maxlen=None): - self._source = iter(iterable) - if maxlen is None: - self._cache = [] - else: - self._cache = deque([], maxlen) - self._index = None - - def __iter__(self): - return self - - def __next__(self): - if self._index is not None: - try: - item = self._cache[self._index] - except IndexError: - self._index = None - else: - self._index += 1 - return item - - item = next(self._source) - self._cache.append(item) - return item - - def __bool__(self): - try: - self.peek() - except StopIteration: - return False - return True - - def peek(self, default=_marker): - try: - peeked = next(self) - except StopIteration: - if default is _marker: - raise - return default - if self._index is None: - self._index = len(self._cache) - self._index -= 1 - return peeked - - def elements(self): - return SequenceView(self._cache) - - def seek(self, index): - self._index = index - remainder = index - len(self._cache) - if remainder > 0: - consume(self, remainder) - - def relative_seek(self, count): - index = len(self._cache) - self.seek(max(index + count, 0)) - - -class run_length: - """ - :func:`run_length.encode` compresses an iterable with run-length encoding. - It yields groups of repeated items with the count of how many times they - were repeated: - - >>> uncompressed = 'abbcccdddd' - >>> list(run_length.encode(uncompressed)) - [('a', 1), ('b', 2), ('c', 3), ('d', 4)] - - :func:`run_length.decode` decompresses an iterable that was previously - compressed with run-length encoding. It yields the items of the - decompressed iterable: - - >>> compressed = [('a', 1), ('b', 2), ('c', 3), ('d', 4)] - >>> list(run_length.decode(compressed)) - ['a', 'b', 'b', 'c', 'c', 'c', 'd', 'd', 'd', 'd'] - - """ - - @staticmethod - def encode(iterable): - return ((k, ilen(g)) for k, g in groupby(iterable)) - - @staticmethod - def decode(iterable): - return chain.from_iterable(repeat(k, n) for k, n in iterable) - - -def exactly_n(iterable, n, predicate=bool): - """Return ``True`` if exactly ``n`` items in the iterable are ``True`` - according to the *predicate* function. - - >>> exactly_n([True, True, False], 2) - True - >>> exactly_n([True, True, False], 1) - False - >>> exactly_n([0, 1, 2, 3, 4, 5], 3, lambda x: x < 3) - True - - The iterable will be advanced until ``n + 1`` truthy items are encountered, - so avoid calling it on infinite iterables. - - """ - return len(take(n + 1, filter(predicate, iterable))) == n - - -def circular_shifts(iterable): - """Return a list of circular shifts of *iterable*. - - >>> circular_shifts(range(4)) - [(0, 1, 2, 3), (1, 2, 3, 0), (2, 3, 0, 1), (3, 0, 1, 2)] - """ - lst = list(iterable) - return take(len(lst), windowed(cycle(lst), len(lst))) - - -def make_decorator(wrapping_func, result_index=0): - """Return a decorator version of *wrapping_func*, which is a function that - modifies an iterable. *result_index* is the position in that function's - signature where the iterable goes. - - This lets you use itertools on the "production end," i.e. at function - definition. This can augment what the function returns without changing the - function's code. - - For example, to produce a decorator version of :func:`chunked`: - - >>> from more_itertools import chunked - >>> chunker = make_decorator(chunked, result_index=0) - >>> @chunker(3) - ... def iter_range(n): - ... return iter(range(n)) - ... - >>> list(iter_range(9)) - [[0, 1, 2], [3, 4, 5], [6, 7, 8]] - - To only allow truthy items to be returned: - - >>> truth_serum = make_decorator(filter, result_index=1) - >>> @truth_serum(bool) - ... def boolean_test(): - ... return [0, 1, '', ' ', False, True] - ... - >>> list(boolean_test()) - [1, ' ', True] - - The :func:`peekable` and :func:`seekable` wrappers make for practical - decorators: - - >>> from more_itertools import peekable - >>> peekable_function = make_decorator(peekable) - >>> @peekable_function() - ... def str_range(*args): - ... return (str(x) for x in range(*args)) - ... - >>> it = str_range(1, 20, 2) - >>> next(it), next(it), next(it) - ('1', '3', '5') - >>> it.peek() - '7' - >>> next(it) - '7' - - """ - - # See https://sites.google.com/site/bbayles/index/decorator_factory for - # notes on how this works. - def decorator(*wrapping_args, **wrapping_kwargs): - def outer_wrapper(f): - def inner_wrapper(*args, **kwargs): - result = f(*args, **kwargs) - wrapping_args_ = list(wrapping_args) - wrapping_args_.insert(result_index, result) - return wrapping_func(*wrapping_args_, **wrapping_kwargs) - - return inner_wrapper - - return outer_wrapper - - return decorator - - -def map_reduce(iterable, keyfunc, valuefunc=None, reducefunc=None): - """Return a dictionary that maps the items in *iterable* to categories - defined by *keyfunc*, transforms them with *valuefunc*, and - then summarizes them by category with *reducefunc*. - - *valuefunc* defaults to the identity function if it is unspecified. - If *reducefunc* is unspecified, no summarization takes place: - - >>> keyfunc = lambda x: x.upper() - >>> result = map_reduce('abbccc', keyfunc) - >>> sorted(result.items()) - [('A', ['a']), ('B', ['b', 'b']), ('C', ['c', 'c', 'c'])] - - Specifying *valuefunc* transforms the categorized items: - - >>> keyfunc = lambda x: x.upper() - >>> valuefunc = lambda x: 1 - >>> result = map_reduce('abbccc', keyfunc, valuefunc) - >>> sorted(result.items()) - [('A', [1]), ('B', [1, 1]), ('C', [1, 1, 1])] - - Specifying *reducefunc* summarizes the categorized items: - - >>> keyfunc = lambda x: x.upper() - >>> valuefunc = lambda x: 1 - >>> reducefunc = sum - >>> result = map_reduce('abbccc', keyfunc, valuefunc, reducefunc) - >>> sorted(result.items()) - [('A', 1), ('B', 2), ('C', 3)] - - You may want to filter the input iterable before applying the map/reduce - procedure: - - >>> all_items = range(30) - >>> items = [x for x in all_items if 10 <= x <= 20] # Filter - >>> keyfunc = lambda x: x % 2 # Evens map to 0; odds to 1 - >>> categories = map_reduce(items, keyfunc=keyfunc) - >>> sorted(categories.items()) - [(0, [10, 12, 14, 16, 18, 20]), (1, [11, 13, 15, 17, 19])] - >>> summaries = map_reduce(items, keyfunc=keyfunc, reducefunc=sum) - >>> sorted(summaries.items()) - [(0, 90), (1, 75)] - - Note that all items in the iterable are gathered into a list before the - summarization step, which may require significant storage. - - The returned object is a :obj:`collections.defaultdict` with the - ``default_factory`` set to ``None``, such that it behaves like a normal - dictionary. - - """ - valuefunc = (lambda x: x) if (valuefunc is None) else valuefunc - - ret = defaultdict(list) - for item in iterable: - key = keyfunc(item) - value = valuefunc(item) - ret[key].append(value) - - if reducefunc is not None: - for key, value_list in ret.items(): - ret[key] = reducefunc(value_list) - - ret.default_factory = None - return ret - - -def rlocate(iterable, pred=bool, window_size=None): - """Yield the index of each item in *iterable* for which *pred* returns - ``True``, starting from the right and moving left. - - *pred* defaults to :func:`bool`, which will select truthy items: - - >>> list(rlocate([0, 1, 1, 0, 1, 0, 0])) # Truthy at 1, 2, and 4 - [4, 2, 1] - - Set *pred* to a custom function to, e.g., find the indexes for a particular - item: - - >>> iterable = iter('abcb') - >>> pred = lambda x: x == 'b' - >>> list(rlocate(iterable, pred)) - [3, 1] - - If *window_size* is given, then the *pred* function will be called with - that many items. This enables searching for sub-sequences: - - >>> iterable = [0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2, 3] - >>> pred = lambda *args: args == (1, 2, 3) - >>> list(rlocate(iterable, pred=pred, window_size=3)) - [9, 5, 1] - - Beware, this function won't return anything for infinite iterables. - If *iterable* is reversible, ``rlocate`` will reverse it and search from - the right. Otherwise, it will search from the left and return the results - in reverse order. - - See :func:`locate` to for other example applications. - - """ - if window_size is None: - try: - len_iter = len(iterable) - return (len_iter - i - 1 for i in locate(reversed(iterable), pred)) - except TypeError: - pass - - return reversed(list(locate(iterable, pred, window_size))) - - -def replace(iterable, pred, substitutes, count=None, window_size=1): - """Yield the items from *iterable*, replacing the items for which *pred* - returns ``True`` with the items from the iterable *substitutes*. - - >>> iterable = [1, 1, 0, 1, 1, 0, 1, 1] - >>> pred = lambda x: x == 0 - >>> substitutes = (2, 3) - >>> list(replace(iterable, pred, substitutes)) - [1, 1, 2, 3, 1, 1, 2, 3, 1, 1] - - If *count* is given, the number of replacements will be limited: - - >>> iterable = [1, 1, 0, 1, 1, 0, 1, 1, 0] - >>> pred = lambda x: x == 0 - >>> substitutes = [None] - >>> list(replace(iterable, pred, substitutes, count=2)) - [1, 1, None, 1, 1, None, 1, 1, 0] - - Use *window_size* to control the number of items passed as arguments to - *pred*. This allows for locating and replacing subsequences. - - >>> iterable = [0, 1, 2, 5, 0, 1, 2, 5] - >>> window_size = 3 - >>> pred = lambda *args: args == (0, 1, 2) # 3 items passed to pred - >>> substitutes = [3, 4] # Splice in these items - >>> list(replace(iterable, pred, substitutes, window_size=window_size)) - [3, 4, 5, 3, 4, 5] - - """ - if window_size < 1: - raise ValueError('window_size must be at least 1') - - # Save the substitutes iterable, since it's used more than once - substitutes = tuple(substitutes) - - # Add padding such that the number of windows matches the length of the - # iterable - it = chain(iterable, [_marker] * (window_size - 1)) - windows = windowed(it, window_size) - - n = 0 - for w in windows: - # If the current window matches our predicate (and we haven't hit - # our maximum number of replacements), splice in the substitutes - # and then consume the following windows that overlap with this one. - # For example, if the iterable is (0, 1, 2, 3, 4...) - # and the window size is 2, we have (0, 1), (1, 2), (2, 3)... - # If the predicate matches on (0, 1), we need to zap (0, 1) and (1, 2) - if pred(*w): - if (count is None) or (n < count): - n += 1 - yield from substitutes - consume(windows, window_size - 1) - continue - - # If there was no match (or we've reached the replacement limit), - # yield the first item from the window. - if w and (w[0] is not _marker): - yield w[0] - - -def partitions(iterable): - """Yield all possible order-preserving partitions of *iterable*. - - >>> iterable = 'abc' - >>> for part in partitions(iterable): - ... print([''.join(p) for p in part]) - ['abc'] - ['a', 'bc'] - ['ab', 'c'] - ['a', 'b', 'c'] - - This is unrelated to :func:`partition`. - - """ - sequence = list(iterable) - n = len(sequence) - for i in powerset(range(1, n)): - yield [sequence[i:j] for i, j in zip((0,) + i, i + (n,))] - - -def set_partitions(iterable, k=None): - """ - Yield the set partitions of *iterable* into *k* parts. Set partitions are - not order-preserving. - - >>> iterable = 'abc' - >>> for part in set_partitions(iterable, 2): - ... print([''.join(p) for p in part]) - ['a', 'bc'] - ['ab', 'c'] - ['b', 'ac'] - - - If *k* is not given, every set partition is generated. - - >>> iterable = 'abc' - >>> for part in set_partitions(iterable): - ... print([''.join(p) for p in part]) - ['abc'] - ['a', 'bc'] - ['ab', 'c'] - ['b', 'ac'] - ['a', 'b', 'c'] - - """ - L = list(iterable) - n = len(L) - if k is not None: - if k < 1: - raise ValueError( - "Can't partition in a negative or zero number of groups" - ) - elif k > n: - return - - def set_partitions_helper(L, k): - n = len(L) - if k == 1: - yield [L] - elif n == k: - yield [[s] for s in L] - else: - e, *M = L - for p in set_partitions_helper(M, k - 1): - yield [[e], *p] - for p in set_partitions_helper(M, k): - for i in range(len(p)): - yield p[:i] + [[e] + p[i]] + p[i + 1 :] - - if k is None: - for k in range(1, n + 1): - yield from set_partitions_helper(L, k) - else: - yield from set_partitions_helper(L, k) - - -class time_limited: - """ - Yield items from *iterable* until *limit_seconds* have passed. - If the time limit expires before all items have been yielded, the - ``timed_out`` parameter will be set to ``True``. - - >>> from time import sleep - >>> def generator(): - ... yield 1 - ... yield 2 - ... sleep(0.2) - ... yield 3 - >>> iterable = time_limited(0.1, generator()) - >>> list(iterable) - [1, 2] - >>> iterable.timed_out - True - - Note that the time is checked before each item is yielded, and iteration - stops if the time elapsed is greater than *limit_seconds*. If your time - limit is 1 second, but it takes 2 seconds to generate the first item from - the iterable, the function will run for 2 seconds and not yield anything. - As a special case, when *limit_seconds* is zero, the iterator never - returns anything. - - """ - - def __init__(self, limit_seconds, iterable): - if limit_seconds < 0: - raise ValueError('limit_seconds must be positive') - self.limit_seconds = limit_seconds - self._iterable = iter(iterable) - self._start_time = monotonic() - self.timed_out = False - - def __iter__(self): - return self - - def __next__(self): - if self.limit_seconds == 0: - self.timed_out = True - raise StopIteration - item = next(self._iterable) - if monotonic() - self._start_time > self.limit_seconds: - self.timed_out = True - raise StopIteration - - return item - - -def only(iterable, default=None, too_long=None): - """If *iterable* has only one item, return it. - If it has zero items, return *default*. - If it has more than one item, raise the exception given by *too_long*, - which is ``ValueError`` by default. - - >>> only([], default='missing') - 'missing' - >>> only([1]) - 1 - >>> only([1, 2]) # doctest: +IGNORE_EXCEPTION_DETAIL - Traceback (most recent call last): - ... - ValueError: Expected exactly one item in iterable, but got 1, 2, - and perhaps more.' - >>> only([1, 2], too_long=TypeError) # doctest: +IGNORE_EXCEPTION_DETAIL - Traceback (most recent call last): - ... - TypeError - - Note that :func:`only` attempts to advance *iterable* twice to ensure there - is only one item. See :func:`spy` or :func:`peekable` to check - iterable contents less destructively. - """ - it = iter(iterable) - first_value = next(it, default) - - try: - second_value = next(it) - except StopIteration: - pass - else: - msg = ( - 'Expected exactly one item in iterable, but got {!r}, {!r}, ' - 'and perhaps more.'.format(first_value, second_value) - ) - raise too_long or ValueError(msg) - - return first_value - - -def _ichunk(iterable, n): - cache = deque() - chunk = islice(iterable, n) - - def generator(): - while True: - if cache: - yield cache.popleft() - else: - try: - item = next(chunk) - except StopIteration: - return - else: - yield item - - def materialize_next(n=1): - # if n not specified materialize everything - if n is None: - cache.extend(chunk) - return len(cache) - - to_cache = n - len(cache) - - # materialize up to n - if to_cache > 0: - cache.extend(islice(chunk, to_cache)) - - # return number materialized up to n - return min(n, len(cache)) - - return (generator(), materialize_next) - - -def ichunked(iterable, n): - """Break *iterable* into sub-iterables with *n* elements each. - :func:`ichunked` is like :func:`chunked`, but it yields iterables - instead of lists. - - If the sub-iterables are read in order, the elements of *iterable* - won't be stored in memory. - If they are read out of order, :func:`itertools.tee` is used to cache - elements as necessary. - - >>> from itertools import count - >>> all_chunks = ichunked(count(), 4) - >>> c_1, c_2, c_3 = next(all_chunks), next(all_chunks), next(all_chunks) - >>> list(c_2) # c_1's elements have been cached; c_3's haven't been - [4, 5, 6, 7] - >>> list(c_1) - [0, 1, 2, 3] - >>> list(c_3) - [8, 9, 10, 11] - - """ - iterable = iter(iterable) - while True: - # Create new chunk - chunk, materialize_next = _ichunk(iterable, n) - - # Check to see whether we're at the end of the source iterable - if not materialize_next(): - return - - yield chunk - - # Fill previous chunk's cache - materialize_next(None) - - -def iequals(*iterables): - """Return ``True`` if all given *iterables* are equal to each other, - which means that they contain the same elements in the same order. - - The function is useful for comparing iterables of different data types - or iterables that do not support equality checks. - - >>> iequals("abc", ['a', 'b', 'c'], ('a', 'b', 'c'), iter("abc")) - True - - >>> iequals("abc", "acb") - False - - Not to be confused with :func:`all_equal`, which checks whether all - elements of iterable are equal to each other. - - """ - return all(map(all_equal, zip_longest(*iterables, fillvalue=object()))) - - -def distinct_combinations(iterable, r): - """Yield the distinct combinations of *r* items taken from *iterable*. - - >>> list(distinct_combinations([0, 0, 1], 2)) - [(0, 0), (0, 1)] - - Equivalent to ``set(combinations(iterable))``, except duplicates are not - generated and thrown away. For larger input sequences this is much more - efficient. - - """ - if r < 0: - raise ValueError('r must be non-negative') - elif r == 0: - yield () - return - pool = tuple(iterable) - generators = [unique_everseen(enumerate(pool), key=itemgetter(1))] - current_combo = [None] * r - level = 0 - while generators: - try: - cur_idx, p = next(generators[-1]) - except StopIteration: - generators.pop() - level -= 1 - continue - current_combo[level] = p - if level + 1 == r: - yield tuple(current_combo) - else: - generators.append( - unique_everseen( - enumerate(pool[cur_idx + 1 :], cur_idx + 1), - key=itemgetter(1), - ) - ) - level += 1 - - -def filter_except(validator, iterable, *exceptions): - """Yield the items from *iterable* for which the *validator* function does - not raise one of the specified *exceptions*. - - *validator* is called for each item in *iterable*. - It should be a function that accepts one argument and raises an exception - if that item is not valid. - - >>> iterable = ['1', '2', 'three', '4', None] - >>> list(filter_except(int, iterable, ValueError, TypeError)) - ['1', '2', '4'] - - If an exception other than one given by *exceptions* is raised by - *validator*, it is raised like normal. - """ - for item in iterable: - try: - validator(item) - except exceptions: - pass - else: - yield item - - -def map_except(function, iterable, *exceptions): - """Transform each item from *iterable* with *function* and yield the - result, unless *function* raises one of the specified *exceptions*. - - *function* is called to transform each item in *iterable*. - It should accept one argument. - - >>> iterable = ['1', '2', 'three', '4', None] - >>> list(map_except(int, iterable, ValueError, TypeError)) - [1, 2, 4] - - If an exception other than one given by *exceptions* is raised by - *function*, it is raised like normal. - """ - for item in iterable: - try: - yield function(item) - except exceptions: - pass - - -def map_if(iterable, pred, func, func_else=lambda x: x): - """Evaluate each item from *iterable* using *pred*. If the result is - equivalent to ``True``, transform the item with *func* and yield it. - Otherwise, transform the item with *func_else* and yield it. - - *pred*, *func*, and *func_else* should each be functions that accept - one argument. By default, *func_else* is the identity function. - - >>> from math import sqrt - >>> iterable = list(range(-5, 5)) - >>> iterable - [-5, -4, -3, -2, -1, 0, 1, 2, 3, 4] - >>> list(map_if(iterable, lambda x: x > 3, lambda x: 'toobig')) - [-5, -4, -3, -2, -1, 0, 1, 2, 3, 'toobig'] - >>> list(map_if(iterable, lambda x: x >= 0, - ... lambda x: f'{sqrt(x):.2f}', lambda x: None)) - [None, None, None, None, None, '0.00', '1.00', '1.41', '1.73', '2.00'] - """ - for item in iterable: - yield func(item) if pred(item) else func_else(item) - - -def _sample_unweighted(iterable, k): - # Implementation of "Algorithm L" from the 1994 paper by Kim-Hung Li: - # "Reservoir-Sampling Algorithms of Time Complexity O(n(1+log(N/n)))". - - # Fill up the reservoir (collection of samples) with the first `k` samples - reservoir = take(k, iterable) - - # Generate random number that's the largest in a sample of k U(0,1) numbers - # Largest order statistic: https://en.wikipedia.org/wiki/Order_statistic - W = exp(log(random()) / k) - - # The number of elements to skip before changing the reservoir is a random - # number with a geometric distribution. Sample it using random() and logs. - next_index = k + floor(log(random()) / log(1 - W)) - - for index, element in enumerate(iterable, k): - if index == next_index: - reservoir[randrange(k)] = element - # The new W is the largest in a sample of k U(0, `old_W`) numbers - W *= exp(log(random()) / k) - next_index += floor(log(random()) / log(1 - W)) + 1 - - return reservoir - - -def _sample_weighted(iterable, k, weights): - # Implementation of "A-ExpJ" from the 2006 paper by Efraimidis et al. : - # "Weighted random sampling with a reservoir". - - # Log-transform for numerical stability for weights that are small/large - weight_keys = (log(random()) / weight for weight in weights) - - # Fill up the reservoir (collection of samples) with the first `k` - # weight-keys and elements, then heapify the list. - reservoir = take(k, zip(weight_keys, iterable)) - heapify(reservoir) - - # The number of jumps before changing the reservoir is a random variable - # with an exponential distribution. Sample it using random() and logs. - smallest_weight_key, _ = reservoir[0] - weights_to_skip = log(random()) / smallest_weight_key - - for weight, element in zip(weights, iterable): - if weight >= weights_to_skip: - # The notation here is consistent with the paper, but we store - # the weight-keys in log-space for better numerical stability. - smallest_weight_key, _ = reservoir[0] - t_w = exp(weight * smallest_weight_key) - r_2 = uniform(t_w, 1) # generate U(t_w, 1) - weight_key = log(r_2) / weight - heapreplace(reservoir, (weight_key, element)) - smallest_weight_key, _ = reservoir[0] - weights_to_skip = log(random()) / smallest_weight_key - else: - weights_to_skip -= weight - - # Equivalent to [element for weight_key, element in sorted(reservoir)] - return [heappop(reservoir)[1] for _ in range(k)] - - -def sample(iterable, k, weights=None): - """Return a *k*-length list of elements chosen (without replacement) - from the *iterable*. Like :func:`random.sample`, but works on iterables - of unknown length. - - >>> iterable = range(100) - >>> sample(iterable, 5) # doctest: +SKIP - [81, 60, 96, 16, 4] - - An iterable with *weights* may also be given: - - >>> iterable = range(100) - >>> weights = (i * i + 1 for i in range(100)) - >>> sampled = sample(iterable, 5, weights=weights) # doctest: +SKIP - [79, 67, 74, 66, 78] - - The algorithm can also be used to generate weighted random permutations. - The relative weight of each item determines the probability that it - appears late in the permutation. - - >>> data = "abcdefgh" - >>> weights = range(1, len(data) + 1) - >>> sample(data, k=len(data), weights=weights) # doctest: +SKIP - ['c', 'a', 'b', 'e', 'g', 'd', 'h', 'f'] - """ - if k == 0: - return [] - - iterable = iter(iterable) - if weights is None: - return _sample_unweighted(iterable, k) - else: - weights = iter(weights) - return _sample_weighted(iterable, k, weights) - - -def is_sorted(iterable, key=None, reverse=False, strict=False): - """Returns ``True`` if the items of iterable are in sorted order, and - ``False`` otherwise. *key* and *reverse* have the same meaning that they do - in the built-in :func:`sorted` function. - - >>> is_sorted(['1', '2', '3', '4', '5'], key=int) - True - >>> is_sorted([5, 4, 3, 1, 2], reverse=True) - False - - If *strict*, tests for strict sorting, that is, returns ``False`` if equal - elements are found: - - >>> is_sorted([1, 2, 2]) - True - >>> is_sorted([1, 2, 2], strict=True) - False - - The function returns ``False`` after encountering the first out-of-order - item. If there are no out-of-order items, the iterable is exhausted. - """ - - compare = (le if reverse else ge) if strict else (lt if reverse else gt) - it = iterable if key is None else map(key, iterable) - return not any(starmap(compare, pairwise(it))) - - -class AbortThread(BaseException): - pass - - -class callback_iter: - """Convert a function that uses callbacks to an iterator. - - Let *func* be a function that takes a `callback` keyword argument. - For example: - - >>> def func(callback=None): - ... for i, c in [(1, 'a'), (2, 'b'), (3, 'c')]: - ... if callback: - ... callback(i, c) - ... return 4 - - - Use ``with callback_iter(func)`` to get an iterator over the parameters - that are delivered to the callback. - - >>> with callback_iter(func) as it: - ... for args, kwargs in it: - ... print(args) - (1, 'a') - (2, 'b') - (3, 'c') - - The function will be called in a background thread. The ``done`` property - indicates whether it has completed execution. - - >>> it.done - True - - If it completes successfully, its return value will be available - in the ``result`` property. - - >>> it.result - 4 - - Notes: - - * If the function uses some keyword argument besides ``callback``, supply - *callback_kwd*. - * If it finished executing, but raised an exception, accessing the - ``result`` property will raise the same exception. - * If it hasn't finished executing, accessing the ``result`` - property from within the ``with`` block will raise ``RuntimeError``. - * If it hasn't finished executing, accessing the ``result`` property from - outside the ``with`` block will raise a - ``more_itertools.AbortThread`` exception. - * Provide *wait_seconds* to adjust how frequently the it is polled for - output. - - """ - - def __init__(self, func, callback_kwd='callback', wait_seconds=0.1): - self._func = func - self._callback_kwd = callback_kwd - self._aborted = False - self._future = None - self._wait_seconds = wait_seconds - # Lazily import concurrent.future - self._executor = __import__( - 'concurrent.futures' - ).futures.ThreadPoolExecutor(max_workers=1) - self._iterator = self._reader() - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_value, traceback): - self._aborted = True - self._executor.shutdown() - - def __iter__(self): - return self - - def __next__(self): - return next(self._iterator) - - @property - def done(self): - if self._future is None: - return False - return self._future.done() - - @property - def result(self): - if not self.done: - raise RuntimeError('Function has not yet completed') - - return self._future.result() - - def _reader(self): - q = Queue() - - def callback(*args, **kwargs): - if self._aborted: - raise AbortThread('canceled by user') - - q.put((args, kwargs)) - - self._future = self._executor.submit( - self._func, **{self._callback_kwd: callback} - ) - - while True: - try: - item = q.get(timeout=self._wait_seconds) - except Empty: - pass - else: - q.task_done() - yield item - - if self._future.done(): - break - - remaining = [] - while True: - try: - item = q.get_nowait() - except Empty: - break - else: - q.task_done() - remaining.append(item) - q.join() - yield from remaining - - -def windowed_complete(iterable, n): - """ - Yield ``(beginning, middle, end)`` tuples, where: - - * Each ``middle`` has *n* items from *iterable* - * Each ``beginning`` has the items before the ones in ``middle`` - * Each ``end`` has the items after the ones in ``middle`` - - >>> iterable = range(7) - >>> n = 3 - >>> for beginning, middle, end in windowed_complete(iterable, n): - ... print(beginning, middle, end) - () (0, 1, 2) (3, 4, 5, 6) - (0,) (1, 2, 3) (4, 5, 6) - (0, 1) (2, 3, 4) (5, 6) - (0, 1, 2) (3, 4, 5) (6,) - (0, 1, 2, 3) (4, 5, 6) () - - Note that *n* must be at least 0 and most equal to the length of - *iterable*. - - This function will exhaust the iterable and may require significant - storage. - """ - if n < 0: - raise ValueError('n must be >= 0') - - seq = tuple(iterable) - size = len(seq) - - if n > size: - raise ValueError('n must be <= len(seq)') - - for i in range(size - n + 1): - beginning = seq[:i] - middle = seq[i : i + n] - end = seq[i + n :] - yield beginning, middle, end - - -def all_unique(iterable, key=None): - """ - Returns ``True`` if all the elements of *iterable* are unique (no two - elements are equal). - - >>> all_unique('ABCB') - False - - If a *key* function is specified, it will be used to make comparisons. - - >>> all_unique('ABCb') - True - >>> all_unique('ABCb', str.lower) - False - - The function returns as soon as the first non-unique element is - encountered. Iterables with a mix of hashable and unhashable items can - be used, but the function will be slower for unhashable items. - """ - seenset = set() - seenset_add = seenset.add - seenlist = [] - seenlist_add = seenlist.append - for element in map(key, iterable) if key else iterable: - try: - if element in seenset: - return False - seenset_add(element) - except TypeError: - if element in seenlist: - return False - seenlist_add(element) - return True - - -def nth_product(index, *args): - """Equivalent to ``list(product(*args))[index]``. - - The products of *args* can be ordered lexicographically. - :func:`nth_product` computes the product at sort position *index* without - computing the previous products. - - >>> nth_product(8, range(2), range(2), range(2), range(2)) - (1, 0, 0, 0) - - ``IndexError`` will be raised if the given *index* is invalid. - """ - pools = list(map(tuple, reversed(args))) - ns = list(map(len, pools)) - - c = reduce(mul, ns) - - if index < 0: - index += c - - if not 0 <= index < c: - raise IndexError - - result = [] - for pool, n in zip(pools, ns): - result.append(pool[index % n]) - index //= n - - return tuple(reversed(result)) - - -def nth_permutation(iterable, r, index): - """Equivalent to ``list(permutations(iterable, r))[index]``` - - The subsequences of *iterable* that are of length *r* where order is - important can be ordered lexicographically. :func:`nth_permutation` - computes the subsequence at sort position *index* directly, without - computing the previous subsequences. - - >>> nth_permutation('ghijk', 2, 5) - ('h', 'i') - - ``ValueError`` will be raised If *r* is negative or greater than the length - of *iterable*. - ``IndexError`` will be raised if the given *index* is invalid. - """ - pool = list(iterable) - n = len(pool) - - if r is None or r == n: - r, c = n, factorial(n) - elif not 0 <= r < n: - raise ValueError - else: - c = perm(n, r) - assert c > 0 # factortial(n)>0, and r>> nth_combination_with_replacement(range(5), 3, 5) - (0, 1, 1) - - ``ValueError`` will be raised If *r* is negative or greater than the length - of *iterable*. - ``IndexError`` will be raised if the given *index* is invalid. - """ - pool = tuple(iterable) - n = len(pool) - if (r < 0) or (r > n): - raise ValueError - - c = comb(n + r - 1, r) - - if index < 0: - index += c - - if (index < 0) or (index >= c): - raise IndexError - - result = [] - i = 0 - while r: - r -= 1 - while n >= 0: - num_combs = comb(n + r - 1, r) - if index < num_combs: - break - n -= 1 - i += 1 - index -= num_combs - result.append(pool[i]) - - return tuple(result) - - -def value_chain(*args): - """Yield all arguments passed to the function in the same order in which - they were passed. If an argument itself is iterable then iterate over its - values. - - >>> list(value_chain(1, 2, 3, [4, 5, 6])) - [1, 2, 3, 4, 5, 6] - - Binary and text strings are not considered iterable and are emitted - as-is: - - >>> list(value_chain('12', '34', ['56', '78'])) - ['12', '34', '56', '78'] - - Pre- or postpend a single element to an iterable: - - >>> list(value_chain(1, [2, 3, 4, 5, 6])) - [1, 2, 3, 4, 5, 6] - >>> list(value_chain([1, 2, 3, 4, 5], 6)) - [1, 2, 3, 4, 5, 6] - - Multiple levels of nesting are not flattened. - - """ - for value in args: - if isinstance(value, (str, bytes)): - yield value - continue - try: - yield from value - except TypeError: - yield value - - -def product_index(element, *args): - """Equivalent to ``list(product(*args)).index(element)`` - - The products of *args* can be ordered lexicographically. - :func:`product_index` computes the first index of *element* without - computing the previous products. - - >>> product_index([8, 2], range(10), range(5)) - 42 - - ``ValueError`` will be raised if the given *element* isn't in the product - of *args*. - """ - index = 0 - - for x, pool in zip_longest(element, args, fillvalue=_marker): - if x is _marker or pool is _marker: - raise ValueError('element is not a product of args') - - pool = tuple(pool) - index = index * len(pool) + pool.index(x) - - return index - - -def combination_index(element, iterable): - """Equivalent to ``list(combinations(iterable, r)).index(element)`` - - The subsequences of *iterable* that are of length *r* can be ordered - lexicographically. :func:`combination_index` computes the index of the - first *element*, without computing the previous combinations. - - >>> combination_index('adf', 'abcdefg') - 10 - - ``ValueError`` will be raised if the given *element* isn't one of the - combinations of *iterable*. - """ - element = enumerate(element) - k, y = next(element, (None, None)) - if k is None: - return 0 - - indexes = [] - pool = enumerate(iterable) - for n, x in pool: - if x == y: - indexes.append(n) - tmp, y = next(element, (None, None)) - if tmp is None: - break - else: - k = tmp - else: - raise ValueError('element is not a combination of iterable') - - n, _ = last(pool, default=(n, None)) - - # Python versions below 3.8 don't have math.comb - index = 1 - for i, j in enumerate(reversed(indexes), start=1): - j = n - j - if i <= j: - index += comb(j, i) - - return comb(n + 1, k + 1) - index - - -def combination_with_replacement_index(element, iterable): - """Equivalent to - ``list(combinations_with_replacement(iterable, r)).index(element)`` - - The subsequences with repetition of *iterable* that are of length *r* can - be ordered lexicographically. :func:`combination_with_replacement_index` - computes the index of the first *element*, without computing the previous - combinations with replacement. - - >>> combination_with_replacement_index('adf', 'abcdefg') - 20 - - ``ValueError`` will be raised if the given *element* isn't one of the - combinations with replacement of *iterable*. - """ - element = tuple(element) - l = len(element) - element = enumerate(element) - - k, y = next(element, (None, None)) - if k is None: - return 0 - - indexes = [] - pool = tuple(iterable) - for n, x in enumerate(pool): - while x == y: - indexes.append(n) - tmp, y = next(element, (None, None)) - if tmp is None: - break - else: - k = tmp - if y is None: - break - else: - raise ValueError( - 'element is not a combination with replacement of iterable' - ) - - n = len(pool) - occupations = [0] * n - for p in indexes: - occupations[p] += 1 - - index = 0 - cumulative_sum = 0 - for k in range(1, n): - cumulative_sum += occupations[k - 1] - j = l + n - 1 - k - cumulative_sum - i = n - k - if i <= j: - index += comb(j, i) - - return index - - -def permutation_index(element, iterable): - """Equivalent to ``list(permutations(iterable, r)).index(element)``` - - The subsequences of *iterable* that are of length *r* where order is - important can be ordered lexicographically. :func:`permutation_index` - computes the index of the first *element* directly, without computing - the previous permutations. - - >>> permutation_index([1, 3, 2], range(5)) - 19 - - ``ValueError`` will be raised if the given *element* isn't one of the - permutations of *iterable*. - """ - index = 0 - pool = list(iterable) - for i, x in zip(range(len(pool), -1, -1), element): - r = pool.index(x) - index = index * i + r - del pool[r] - - return index - - -class countable: - """Wrap *iterable* and keep a count of how many items have been consumed. - - The ``items_seen`` attribute starts at ``0`` and increments as the iterable - is consumed: - - >>> iterable = map(str, range(10)) - >>> it = countable(iterable) - >>> it.items_seen - 0 - >>> next(it), next(it) - ('0', '1') - >>> list(it) - ['2', '3', '4', '5', '6', '7', '8', '9'] - >>> it.items_seen - 10 - """ - - def __init__(self, iterable): - self._it = iter(iterable) - self.items_seen = 0 - - def __iter__(self): - return self - - def __next__(self): - item = next(self._it) - self.items_seen += 1 - - return item - - -def chunked_even(iterable, n): - """Break *iterable* into lists of approximately length *n*. - Items are distributed such the lengths of the lists differ by at most - 1 item. - - >>> iterable = [1, 2, 3, 4, 5, 6, 7] - >>> n = 3 - >>> list(chunked_even(iterable, n)) # List lengths: 3, 2, 2 - [[1, 2, 3], [4, 5], [6, 7]] - >>> list(chunked(iterable, n)) # List lengths: 3, 3, 1 - [[1, 2, 3], [4, 5, 6], [7]] - - """ - iterable = iter(iterable) - - # Initialize a buffer to process the chunks while keeping - # some back to fill any underfilled chunks - min_buffer = (n - 1) * (n - 2) - buffer = list(islice(iterable, min_buffer)) - - # Append items until we have a completed chunk - for _ in islice(map(buffer.append, iterable), n, None, n): - yield buffer[:n] - del buffer[:n] - - # Check if any chunks need addition processing - if not buffer: - return - length = len(buffer) - - # Chunks are either size `full_size <= n` or `partial_size = full_size - 1` - q, r = divmod(length, n) - num_lists = q + (1 if r > 0 else 0) - q, r = divmod(length, num_lists) - full_size = q + (1 if r > 0 else 0) - partial_size = full_size - 1 - num_full = length - partial_size * num_lists - - # Yield chunks of full size - partial_start_idx = num_full * full_size - if full_size > 0: - for i in range(0, partial_start_idx, full_size): - yield buffer[i : i + full_size] - - # Yield chunks of partial size - if partial_size > 0: - for i in range(partial_start_idx, length, partial_size): - yield buffer[i : i + partial_size] - - -def zip_broadcast(*objects, scalar_types=(str, bytes), strict=False): - """A version of :func:`zip` that "broadcasts" any scalar - (i.e., non-iterable) items into output tuples. - - >>> iterable_1 = [1, 2, 3] - >>> iterable_2 = ['a', 'b', 'c'] - >>> scalar = '_' - >>> list(zip_broadcast(iterable_1, iterable_2, scalar)) - [(1, 'a', '_'), (2, 'b', '_'), (3, 'c', '_')] - - The *scalar_types* keyword argument determines what types are considered - scalar. It is set to ``(str, bytes)`` by default. Set it to ``None`` to - treat strings and byte strings as iterable: - - >>> list(zip_broadcast('abc', 0, 'xyz', scalar_types=None)) - [('a', 0, 'x'), ('b', 0, 'y'), ('c', 0, 'z')] - - If the *strict* keyword argument is ``True``, then - ``UnequalIterablesError`` will be raised if any of the iterables have - different lengths. - """ - - def is_scalar(obj): - if scalar_types and isinstance(obj, scalar_types): - return True - try: - iter(obj) - except TypeError: - return True - else: - return False - - size = len(objects) - if not size: - return - - new_item = [None] * size - iterables, iterable_positions = [], [] - for i, obj in enumerate(objects): - if is_scalar(obj): - new_item[i] = obj - else: - iterables.append(iter(obj)) - iterable_positions.append(i) - - if not iterables: - yield tuple(objects) - return - - zipper = _zip_equal if strict else zip - for item in zipper(*iterables): - for i, new_item[i] in zip(iterable_positions, item): - pass - yield tuple(new_item) - - -def unique_in_window(iterable, n, key=None): - """Yield the items from *iterable* that haven't been seen recently. - *n* is the size of the lookback window. - - >>> iterable = [0, 1, 0, 2, 3, 0] - >>> n = 3 - >>> list(unique_in_window(iterable, n)) - [0, 1, 2, 3, 0] - - The *key* function, if provided, will be used to determine uniqueness: - - >>> list(unique_in_window('abAcda', 3, key=lambda x: x.lower())) - ['a', 'b', 'c', 'd', 'a'] - - The items in *iterable* must be hashable. - - """ - if n <= 0: - raise ValueError('n must be greater than 0') - - window = deque(maxlen=n) - counts = defaultdict(int) - use_key = key is not None - - for item in iterable: - if len(window) == n: - to_discard = window[0] - if counts[to_discard] == 1: - del counts[to_discard] - else: - counts[to_discard] -= 1 - - k = key(item) if use_key else item - if k not in counts: - yield item - counts[k] += 1 - window.append(k) - - -def duplicates_everseen(iterable, key=None): - """Yield duplicate elements after their first appearance. - - >>> list(duplicates_everseen('mississippi')) - ['s', 'i', 's', 's', 'i', 'p', 'i'] - >>> list(duplicates_everseen('AaaBbbCccAaa', str.lower)) - ['a', 'a', 'b', 'b', 'c', 'c', 'A', 'a', 'a'] - - This function is analogous to :func:`unique_everseen` and is subject to - the same performance considerations. - - """ - seen_set = set() - seen_list = [] - use_key = key is not None - - for element in iterable: - k = key(element) if use_key else element - try: - if k not in seen_set: - seen_set.add(k) - else: - yield element - except TypeError: - if k not in seen_list: - seen_list.append(k) - else: - yield element - - -def duplicates_justseen(iterable, key=None): - """Yields serially-duplicate elements after their first appearance. - - >>> list(duplicates_justseen('mississippi')) - ['s', 's', 'p'] - >>> list(duplicates_justseen('AaaBbbCccAaa', str.lower)) - ['a', 'a', 'b', 'b', 'c', 'c', 'a', 'a'] - - This function is analogous to :func:`unique_justseen`. - - """ - return flatten(g for _, g in groupby(iterable, key) for _ in g) - - -def classify_unique(iterable, key=None): - """Classify each element in terms of its uniqueness. - - For each element in the input iterable, return a 3-tuple consisting of: - - 1. The element itself - 2. ``False`` if the element is equal to the one preceding it in the input, - ``True`` otherwise (i.e. the equivalent of :func:`unique_justseen`) - 3. ``False`` if this element has been seen anywhere in the input before, - ``True`` otherwise (i.e. the equivalent of :func:`unique_everseen`) - - >>> list(classify_unique('otto')) # doctest: +NORMALIZE_WHITESPACE - [('o', True, True), - ('t', True, True), - ('t', False, False), - ('o', True, False)] - - This function is analogous to :func:`unique_everseen` and is subject to - the same performance considerations. - - """ - seen_set = set() - seen_list = [] - use_key = key is not None - previous = None - - for i, element in enumerate(iterable): - k = key(element) if use_key else element - is_unique_justseen = not i or previous != k - previous = k - is_unique_everseen = False - try: - if k not in seen_set: - seen_set.add(k) - is_unique_everseen = True - except TypeError: - if k not in seen_list: - seen_list.append(k) - is_unique_everseen = True - yield element, is_unique_justseen, is_unique_everseen - - -def minmax(iterable_or_value, *others, key=None, default=_marker): - """Returns both the smallest and largest items in an iterable - or the largest of two or more arguments. - - >>> minmax([3, 1, 5]) - (1, 5) - - >>> minmax(4, 2, 6) - (2, 6) - - If a *key* function is provided, it will be used to transform the input - items for comparison. - - >>> minmax([5, 30], key=str) # '30' sorts before '5' - (30, 5) - - If a *default* value is provided, it will be returned if there are no - input items. - - >>> minmax([], default=(0, 0)) - (0, 0) - - Otherwise ``ValueError`` is raised. - - This function is based on the - `recipe `__ by - Raymond Hettinger and takes care to minimize the number of comparisons - performed. - """ - iterable = (iterable_or_value, *others) if others else iterable_or_value - - it = iter(iterable) - - try: - lo = hi = next(it) - except StopIteration as exc: - if default is _marker: - raise ValueError( - '`minmax()` argument is an empty iterable. ' - 'Provide a `default` value to suppress this error.' - ) from exc - return default - - # Different branches depending on the presence of key. This saves a lot - # of unimportant copies which would slow the "key=None" branch - # significantly down. - if key is None: - for x, y in zip_longest(it, it, fillvalue=lo): - if y < x: - x, y = y, x - if x < lo: - lo = x - if hi < y: - hi = y - - else: - lo_key = hi_key = key(lo) - - for x, y in zip_longest(it, it, fillvalue=lo): - x_key, y_key = key(x), key(y) - - if y_key < x_key: - x, y, x_key, y_key = y, x, y_key, x_key - if x_key < lo_key: - lo, lo_key = x, x_key - if hi_key < y_key: - hi, hi_key = y, y_key - - return lo, hi - - -def constrained_batches( - iterable, max_size, max_count=None, get_len=len, strict=True -): - """Yield batches of items from *iterable* with a combined size limited by - *max_size*. - - >>> iterable = [b'12345', b'123', b'12345678', b'1', b'1', b'12', b'1'] - >>> list(constrained_batches(iterable, 10)) - [(b'12345', b'123'), (b'12345678', b'1', b'1'), (b'12', b'1')] - - If a *max_count* is supplied, the number of items per batch is also - limited: - - >>> iterable = [b'12345', b'123', b'12345678', b'1', b'1', b'12', b'1'] - >>> list(constrained_batches(iterable, 10, max_count = 2)) - [(b'12345', b'123'), (b'12345678', b'1'), (b'1', b'12'), (b'1',)] - - If a *get_len* function is supplied, use that instead of :func:`len` to - determine item size. - - If *strict* is ``True``, raise ``ValueError`` if any single item is bigger - than *max_size*. Otherwise, allow single items to exceed *max_size*. - """ - if max_size <= 0: - raise ValueError('maximum size must be greater than zero') - - batch = [] - batch_size = 0 - batch_count = 0 - for item in iterable: - item_len = get_len(item) - if strict and item_len > max_size: - raise ValueError('item size exceeds maximum size') - - reached_count = batch_count == max_count - reached_size = item_len + batch_size > max_size - if batch_count and (reached_size or reached_count): - yield tuple(batch) - batch.clear() - batch_size = 0 - batch_count = 0 - - batch.append(item) - batch_size += item_len - batch_count += 1 - - if batch: - yield tuple(batch) - - -def gray_product(*iterables): - """Like :func:`itertools.product`, but return tuples in an order such - that only one element in the generated tuple changes from one iteration - to the next. - - >>> list(gray_product('AB','CD')) - [('A', 'C'), ('B', 'C'), ('B', 'D'), ('A', 'D')] - - This function consumes all of the input iterables before producing output. - If any of the input iterables have fewer than two items, ``ValueError`` - is raised. - - For information on the algorithm, see - `this section `__ - of Donald Knuth's *The Art of Computer Programming*. - """ - all_iterables = tuple(tuple(x) for x in iterables) - iterable_count = len(all_iterables) - for iterable in all_iterables: - if len(iterable) < 2: - raise ValueError("each iterable must have two or more items") - - # This is based on "Algorithm H" from section 7.2.1.1, page 20. - # a holds the indexes of the source iterables for the n-tuple to be yielded - # f is the array of "focus pointers" - # o is the array of "directions" - a = [0] * iterable_count - f = list(range(iterable_count + 1)) - o = [1] * iterable_count - while True: - yield tuple(all_iterables[i][a[i]] for i in range(iterable_count)) - j = f[0] - f[0] = 0 - if j == iterable_count: - break - a[j] = a[j] + o[j] - if a[j] == 0 or a[j] == len(all_iterables[j]) - 1: - o[j] = -o[j] - f[j] = f[j + 1] - f[j + 1] = j + 1 - - -def partial_product(*iterables): - """Yields tuples containing one item from each iterator, with subsequent - tuples changing a single item at a time by advancing each iterator until it - is exhausted. This sequence guarantees every value in each iterable is - output at least once without generating all possible combinations. - - This may be useful, for example, when testing an expensive function. - - >>> list(partial_product('AB', 'C', 'DEF')) - [('A', 'C', 'D'), ('B', 'C', 'D'), ('B', 'C', 'E'), ('B', 'C', 'F')] - """ - - iterators = list(map(iter, iterables)) - - try: - prod = [next(it) for it in iterators] - except StopIteration: - return - yield tuple(prod) - - for i, it in enumerate(iterators): - for prod[i] in it: - yield tuple(prod) - - -def takewhile_inclusive(predicate, iterable): - """A variant of :func:`takewhile` that yields one additional element. - - >>> list(takewhile_inclusive(lambda x: x < 5, [1, 4, 6, 4, 1])) - [1, 4, 6] - - :func:`takewhile` would return ``[1, 4]``. - """ - for x in iterable: - yield x - if not predicate(x): - break - - -def outer_product(func, xs, ys, *args, **kwargs): - """A generalized outer product that applies a binary function to all - pairs of items. Returns a 2D matrix with ``len(xs)`` rows and ``len(ys)`` - columns. - Also accepts ``*args`` and ``**kwargs`` that are passed to ``func``. - - Multiplication table: - - >>> list(outer_product(mul, range(1, 4), range(1, 6))) - [(1, 2, 3, 4, 5), (2, 4, 6, 8, 10), (3, 6, 9, 12, 15)] - - Cross tabulation: - - >>> xs = ['A', 'B', 'A', 'A', 'B', 'B', 'A', 'A', 'B', 'B'] - >>> ys = ['X', 'X', 'X', 'Y', 'Z', 'Z', 'Y', 'Y', 'Z', 'Z'] - >>> rows = list(zip(xs, ys)) - >>> count_rows = lambda x, y: rows.count((x, y)) - >>> list(outer_product(count_rows, sorted(set(xs)), sorted(set(ys)))) - [(2, 3, 0), (1, 0, 4)] - - Usage with ``*args`` and ``**kwargs``: - - >>> animals = ['cat', 'wolf', 'mouse'] - >>> list(outer_product(min, animals, animals, key=len)) - [('cat', 'cat', 'cat'), ('cat', 'wolf', 'wolf'), ('cat', 'wolf', 'mouse')] - """ - ys = tuple(ys) - return batched( - starmap(lambda x, y: func(x, y, *args, **kwargs), product(xs, ys)), - n=len(ys), - ) - - -def iter_suppress(iterable, *exceptions): - """Yield each of the items from *iterable*. If the iteration raises one of - the specified *exceptions*, that exception will be suppressed and iteration - will stop. - - >>> from itertools import chain - >>> def breaks_at_five(x): - ... while True: - ... if x >= 5: - ... raise RuntimeError - ... yield x - ... x += 1 - >>> it_1 = iter_suppress(breaks_at_five(1), RuntimeError) - >>> it_2 = iter_suppress(breaks_at_five(2), RuntimeError) - >>> list(chain(it_1, it_2)) - [1, 2, 3, 4, 2, 3, 4] - """ - try: - yield from iterable - except exceptions: - return - - -def filter_map(func, iterable): - """Apply *func* to every element of *iterable*, yielding only those which - are not ``None``. - - >>> elems = ['1', 'a', '2', 'b', '3'] - >>> list(filter_map(lambda s: int(s) if s.isnumeric() else None, elems)) - [1, 2, 3] - """ - for x in iterable: - y = func(x) - if y is not None: - yield y - - -def powerset_of_sets(iterable): - """Yields all possible subsets of the iterable. - - >>> list(powerset_of_sets([1, 2, 3])) # doctest: +SKIP - [set(), {1}, {2}, {3}, {1, 2}, {1, 3}, {2, 3}, {1, 2, 3}] - >>> list(powerset_of_sets([1, 1, 0])) # doctest: +SKIP - [set(), {1}, {0}, {0, 1}] - - :func:`powerset_of_sets` takes care to minimize the number - of hash operations performed. - """ - sets = tuple(map(set, dict.fromkeys(map(frozenset, zip(iterable))))) - for r in range(len(sets) + 1): - yield from starmap(set().union, combinations(sets, r)) - - -def join_mappings(**field_to_map): - """ - Joins multiple mappings together using their common keys. - - >>> user_scores = {'elliot': 50, 'claris': 60} - >>> user_times = {'elliot': 30, 'claris': 40} - >>> join_mappings(score=user_scores, time=user_times) - {'elliot': {'score': 50, 'time': 30}, 'claris': {'score': 60, 'time': 40}} - """ - ret = defaultdict(dict) - - for field_name, mapping in field_to_map.items(): - for key, value in mapping.items(): - ret[key][field_name] = value - - return dict(ret) - - -def _complex_sumprod(v1, v2): - """High precision sumprod() for complex numbers. - Used by :func:`dft` and :func:`idft`. - """ - - r1 = chain((p.real for p in v1), (-p.imag for p in v1)) - r2 = chain((q.real for q in v2), (q.imag for q in v2)) - i1 = chain((p.real for p in v1), (p.imag for p in v1)) - i2 = chain((q.imag for q in v2), (q.real for q in v2)) - return complex(_fsumprod(r1, r2), _fsumprod(i1, i2)) - - -def dft(xarr): - """Discrete Fourier Tranform. *xarr* is a sequence of complex numbers. - Yields the components of the corresponding transformed output vector. - - >>> import cmath - >>> xarr = [1, 2-1j, -1j, -1+2j] - >>> Xarr = [2, -2-2j, -2j, 4+4j] - >>> all(map(cmath.isclose, dft(xarr), Xarr)) - True - - See :func:`idft` for the inverse Discrete Fourier Transform. - """ - N = len(xarr) - roots_of_unity = [e ** (n / N * tau * -1j) for n in range(N)] - for k in range(N): - coeffs = [roots_of_unity[k * n % N] for n in range(N)] - yield _complex_sumprod(xarr, coeffs) - - -def idft(Xarr): - """Inverse Discrete Fourier Tranform. *Xarr* is a sequence of - complex numbers. Yields the components of the corresponding - inverse-transformed output vector. - - >>> import cmath - >>> xarr = [1, 2-1j, -1j, -1+2j] - >>> Xarr = [2, -2-2j, -2j, 4+4j] - >>> all(map(cmath.isclose, idft(Xarr), xarr)) - True - - See :func:`dft` for the Discrete Fourier Transform. - """ - N = len(Xarr) - roots_of_unity = [e ** (n / N * tau * 1j) for n in range(N)] - for k in range(N): - coeffs = [roots_of_unity[k * n % N] for n in range(N)] - yield _complex_sumprod(Xarr, coeffs) / N - - -def doublestarmap(func, iterable): - """Apply *func* to every item of *iterable* by dictionary unpacking - the item into *func*. - - The difference between :func:`itertools.starmap` and :func:`doublestarmap` - parallels the distinction between ``func(*a)`` and ``func(**a)``. - - >>> iterable = [{'a': 1, 'b': 2}, {'a': 40, 'b': 60}] - >>> list(doublestarmap(lambda a, b: a + b, iterable)) - [3, 100] - - ``TypeError`` will be raised if *func*'s signature doesn't match the - mapping contained in *iterable* or if *iterable* does not contain mappings. - """ - for item in iterable: - yield func(**item) diff --git a/pkg_resources/_vendor/more_itertools/more.pyi b/pkg_resources/_vendor/more_itertools/more.pyi deleted file mode 100644 index e946023259..0000000000 --- a/pkg_resources/_vendor/more_itertools/more.pyi +++ /dev/null @@ -1,709 +0,0 @@ -"""Stubs for more_itertools.more""" - -from __future__ import annotations - -from types import TracebackType -from typing import ( - Any, - Callable, - Container, - ContextManager, - Generic, - Hashable, - Mapping, - Iterable, - Iterator, - Mapping, - overload, - Reversible, - Sequence, - Sized, - Type, - TypeVar, - type_check_only, -) -from typing_extensions import Protocol - -# Type and type variable definitions -_T = TypeVar('_T') -_T1 = TypeVar('_T1') -_T2 = TypeVar('_T2') -_U = TypeVar('_U') -_V = TypeVar('_V') -_W = TypeVar('_W') -_T_co = TypeVar('_T_co', covariant=True) -_GenFn = TypeVar('_GenFn', bound=Callable[..., Iterator[Any]]) -_Raisable = BaseException | Type[BaseException] - -@type_check_only -class _SizedIterable(Protocol[_T_co], Sized, Iterable[_T_co]): ... - -@type_check_only -class _SizedReversible(Protocol[_T_co], Sized, Reversible[_T_co]): ... - -@type_check_only -class _SupportsSlicing(Protocol[_T_co]): - def __getitem__(self, __k: slice) -> _T_co: ... - -def chunked( - iterable: Iterable[_T], n: int | None, strict: bool = ... -) -> Iterator[list[_T]]: ... -@overload -def first(iterable: Iterable[_T]) -> _T: ... -@overload -def first(iterable: Iterable[_T], default: _U) -> _T | _U: ... -@overload -def last(iterable: Iterable[_T]) -> _T: ... -@overload -def last(iterable: Iterable[_T], default: _U) -> _T | _U: ... -@overload -def nth_or_last(iterable: Iterable[_T], n: int) -> _T: ... -@overload -def nth_or_last(iterable: Iterable[_T], n: int, default: _U) -> _T | _U: ... - -class peekable(Generic[_T], Iterator[_T]): - def __init__(self, iterable: Iterable[_T]) -> None: ... - def __iter__(self) -> peekable[_T]: ... - def __bool__(self) -> bool: ... - @overload - def peek(self) -> _T: ... - @overload - def peek(self, default: _U) -> _T | _U: ... - def prepend(self, *items: _T) -> None: ... - def __next__(self) -> _T: ... - @overload - def __getitem__(self, index: int) -> _T: ... - @overload - def __getitem__(self, index: slice) -> list[_T]: ... - -def consumer(func: _GenFn) -> _GenFn: ... -def ilen(iterable: Iterable[_T]) -> int: ... -def iterate(func: Callable[[_T], _T], start: _T) -> Iterator[_T]: ... -def with_iter( - context_manager: ContextManager[Iterable[_T]], -) -> Iterator[_T]: ... -def one( - iterable: Iterable[_T], - too_short: _Raisable | None = ..., - too_long: _Raisable | None = ..., -) -> _T: ... -def raise_(exception: _Raisable, *args: Any) -> None: ... -def strictly_n( - iterable: Iterable[_T], - n: int, - too_short: _GenFn | None = ..., - too_long: _GenFn | None = ..., -) -> list[_T]: ... -def distinct_permutations( - iterable: Iterable[_T], r: int | None = ... -) -> Iterator[tuple[_T, ...]]: ... -def intersperse( - e: _U, iterable: Iterable[_T], n: int = ... -) -> Iterator[_T | _U]: ... -def unique_to_each(*iterables: Iterable[_T]) -> list[list[_T]]: ... -@overload -def windowed( - seq: Iterable[_T], n: int, *, step: int = ... -) -> Iterator[tuple[_T | None, ...]]: ... -@overload -def windowed( - seq: Iterable[_T], n: int, fillvalue: _U, step: int = ... -) -> Iterator[tuple[_T | _U, ...]]: ... -def substrings(iterable: Iterable[_T]) -> Iterator[tuple[_T, ...]]: ... -def substrings_indexes( - seq: Sequence[_T], reverse: bool = ... -) -> Iterator[tuple[Sequence[_T], int, int]]: ... - -class bucket(Generic[_T, _U], Container[_U]): - def __init__( - self, - iterable: Iterable[_T], - key: Callable[[_T], _U], - validator: Callable[[_U], object] | None = ..., - ) -> None: ... - def __contains__(self, value: object) -> bool: ... - def __iter__(self) -> Iterator[_U]: ... - def __getitem__(self, value: object) -> Iterator[_T]: ... - -def spy( - iterable: Iterable[_T], n: int = ... -) -> tuple[list[_T], Iterator[_T]]: ... -def interleave(*iterables: Iterable[_T]) -> Iterator[_T]: ... -def interleave_longest(*iterables: Iterable[_T]) -> Iterator[_T]: ... -def interleave_evenly( - iterables: list[Iterable[_T]], lengths: list[int] | None = ... -) -> Iterator[_T]: ... -def collapse( - iterable: Iterable[Any], - base_type: type | None = ..., - levels: int | None = ..., -) -> Iterator[Any]: ... -@overload -def side_effect( - func: Callable[[_T], object], - iterable: Iterable[_T], - chunk_size: None = ..., - before: Callable[[], object] | None = ..., - after: Callable[[], object] | None = ..., -) -> Iterator[_T]: ... -@overload -def side_effect( - func: Callable[[list[_T]], object], - iterable: Iterable[_T], - chunk_size: int, - before: Callable[[], object] | None = ..., - after: Callable[[], object] | None = ..., -) -> Iterator[_T]: ... -def sliced( - seq: _SupportsSlicing[_T], n: int, strict: bool = ... -) -> Iterator[_T]: ... -def split_at( - iterable: Iterable[_T], - pred: Callable[[_T], object], - maxsplit: int = ..., - keep_separator: bool = ..., -) -> Iterator[list[_T]]: ... -def split_before( - iterable: Iterable[_T], pred: Callable[[_T], object], maxsplit: int = ... -) -> Iterator[list[_T]]: ... -def split_after( - iterable: Iterable[_T], pred: Callable[[_T], object], maxsplit: int = ... -) -> Iterator[list[_T]]: ... -def split_when( - iterable: Iterable[_T], - pred: Callable[[_T, _T], object], - maxsplit: int = ..., -) -> Iterator[list[_T]]: ... -def split_into( - iterable: Iterable[_T], sizes: Iterable[int | None] -) -> Iterator[list[_T]]: ... -@overload -def padded( - iterable: Iterable[_T], - *, - n: int | None = ..., - next_multiple: bool = ..., -) -> Iterator[_T | None]: ... -@overload -def padded( - iterable: Iterable[_T], - fillvalue: _U, - n: int | None = ..., - next_multiple: bool = ..., -) -> Iterator[_T | _U]: ... -@overload -def repeat_last(iterable: Iterable[_T]) -> Iterator[_T]: ... -@overload -def repeat_last(iterable: Iterable[_T], default: _U) -> Iterator[_T | _U]: ... -def distribute(n: int, iterable: Iterable[_T]) -> list[Iterator[_T]]: ... -@overload -def stagger( - iterable: Iterable[_T], - offsets: _SizedIterable[int] = ..., - longest: bool = ..., -) -> Iterator[tuple[_T | None, ...]]: ... -@overload -def stagger( - iterable: Iterable[_T], - offsets: _SizedIterable[int] = ..., - longest: bool = ..., - fillvalue: _U = ..., -) -> Iterator[tuple[_T | _U, ...]]: ... - -class UnequalIterablesError(ValueError): - def __init__(self, details: tuple[int, int, int] | None = ...) -> None: ... - -@overload -def zip_equal(__iter1: Iterable[_T1]) -> Iterator[tuple[_T1]]: ... -@overload -def zip_equal( - __iter1: Iterable[_T1], __iter2: Iterable[_T2] -) -> Iterator[tuple[_T1, _T2]]: ... -@overload -def zip_equal( - __iter1: Iterable[_T], - __iter2: Iterable[_T], - __iter3: Iterable[_T], - *iterables: Iterable[_T], -) -> Iterator[tuple[_T, ...]]: ... -@overload -def zip_offset( - __iter1: Iterable[_T1], - *, - offsets: _SizedIterable[int], - longest: bool = ..., - fillvalue: None = None, -) -> Iterator[tuple[_T1 | None]]: ... -@overload -def zip_offset( - __iter1: Iterable[_T1], - __iter2: Iterable[_T2], - *, - offsets: _SizedIterable[int], - longest: bool = ..., - fillvalue: None = None, -) -> Iterator[tuple[_T1 | None, _T2 | None]]: ... -@overload -def zip_offset( - __iter1: Iterable[_T], - __iter2: Iterable[_T], - __iter3: Iterable[_T], - *iterables: Iterable[_T], - offsets: _SizedIterable[int], - longest: bool = ..., - fillvalue: None = None, -) -> Iterator[tuple[_T | None, ...]]: ... -@overload -def zip_offset( - __iter1: Iterable[_T1], - *, - offsets: _SizedIterable[int], - longest: bool = ..., - fillvalue: _U, -) -> Iterator[tuple[_T1 | _U]]: ... -@overload -def zip_offset( - __iter1: Iterable[_T1], - __iter2: Iterable[_T2], - *, - offsets: _SizedIterable[int], - longest: bool = ..., - fillvalue: _U, -) -> Iterator[tuple[_T1 | _U, _T2 | _U]]: ... -@overload -def zip_offset( - __iter1: Iterable[_T], - __iter2: Iterable[_T], - __iter3: Iterable[_T], - *iterables: Iterable[_T], - offsets: _SizedIterable[int], - longest: bool = ..., - fillvalue: _U, -) -> Iterator[tuple[_T | _U, ...]]: ... -def sort_together( - iterables: Iterable[Iterable[_T]], - key_list: Iterable[int] = ..., - key: Callable[..., Any] | None = ..., - reverse: bool = ..., -) -> list[tuple[_T, ...]]: ... -def unzip(iterable: Iterable[Sequence[_T]]) -> tuple[Iterator[_T], ...]: ... -def divide(n: int, iterable: Iterable[_T]) -> list[Iterator[_T]]: ... -def always_iterable( - obj: object, - base_type: type | tuple[type | tuple[Any, ...], ...] | None = ..., -) -> Iterator[Any]: ... -def adjacent( - predicate: Callable[[_T], bool], - iterable: Iterable[_T], - distance: int = ..., -) -> Iterator[tuple[bool, _T]]: ... -@overload -def groupby_transform( - iterable: Iterable[_T], - keyfunc: None = None, - valuefunc: None = None, - reducefunc: None = None, -) -> Iterator[tuple[_T, Iterator[_T]]]: ... -@overload -def groupby_transform( - iterable: Iterable[_T], - keyfunc: Callable[[_T], _U], - valuefunc: None, - reducefunc: None, -) -> Iterator[tuple[_U, Iterator[_T]]]: ... -@overload -def groupby_transform( - iterable: Iterable[_T], - keyfunc: None, - valuefunc: Callable[[_T], _V], - reducefunc: None, -) -> Iterable[tuple[_T, Iterable[_V]]]: ... -@overload -def groupby_transform( - iterable: Iterable[_T], - keyfunc: Callable[[_T], _U], - valuefunc: Callable[[_T], _V], - reducefunc: None, -) -> Iterable[tuple[_U, Iterator[_V]]]: ... -@overload -def groupby_transform( - iterable: Iterable[_T], - keyfunc: None, - valuefunc: None, - reducefunc: Callable[[Iterator[_T]], _W], -) -> Iterable[tuple[_T, _W]]: ... -@overload -def groupby_transform( - iterable: Iterable[_T], - keyfunc: Callable[[_T], _U], - valuefunc: None, - reducefunc: Callable[[Iterator[_T]], _W], -) -> Iterable[tuple[_U, _W]]: ... -@overload -def groupby_transform( - iterable: Iterable[_T], - keyfunc: None, - valuefunc: Callable[[_T], _V], - reducefunc: Callable[[Iterable[_V]], _W], -) -> Iterable[tuple[_T, _W]]: ... -@overload -def groupby_transform( - iterable: Iterable[_T], - keyfunc: Callable[[_T], _U], - valuefunc: Callable[[_T], _V], - reducefunc: Callable[[Iterable[_V]], _W], -) -> Iterable[tuple[_U, _W]]: ... - -class numeric_range(Generic[_T, _U], Sequence[_T], Hashable, Reversible[_T]): - @overload - def __init__(self, __stop: _T) -> None: ... - @overload - def __init__(self, __start: _T, __stop: _T) -> None: ... - @overload - def __init__(self, __start: _T, __stop: _T, __step: _U) -> None: ... - def __bool__(self) -> bool: ... - def __contains__(self, elem: object) -> bool: ... - def __eq__(self, other: object) -> bool: ... - @overload - def __getitem__(self, key: int) -> _T: ... - @overload - def __getitem__(self, key: slice) -> numeric_range[_T, _U]: ... - def __hash__(self) -> int: ... - def __iter__(self) -> Iterator[_T]: ... - def __len__(self) -> int: ... - def __reduce__( - self, - ) -> tuple[Type[numeric_range[_T, _U]], tuple[_T, _T, _U]]: ... - def __repr__(self) -> str: ... - def __reversed__(self) -> Iterator[_T]: ... - def count(self, value: _T) -> int: ... - def index(self, value: _T) -> int: ... # type: ignore - -def count_cycle( - iterable: Iterable[_T], n: int | None = ... -) -> Iterable[tuple[int, _T]]: ... -def mark_ends( - iterable: Iterable[_T], -) -> Iterable[tuple[bool, bool, _T]]: ... -def locate( - iterable: Iterable[_T], - pred: Callable[..., Any] = ..., - window_size: int | None = ..., -) -> Iterator[int]: ... -def lstrip( - iterable: Iterable[_T], pred: Callable[[_T], object] -) -> Iterator[_T]: ... -def rstrip( - iterable: Iterable[_T], pred: Callable[[_T], object] -) -> Iterator[_T]: ... -def strip( - iterable: Iterable[_T], pred: Callable[[_T], object] -) -> Iterator[_T]: ... - -class islice_extended(Generic[_T], Iterator[_T]): - def __init__(self, iterable: Iterable[_T], *args: int | None) -> None: ... - def __iter__(self) -> islice_extended[_T]: ... - def __next__(self) -> _T: ... - def __getitem__(self, index: slice) -> islice_extended[_T]: ... - -def always_reversible(iterable: Iterable[_T]) -> Iterator[_T]: ... -def consecutive_groups( - iterable: Iterable[_T], ordering: Callable[[_T], int] = ... -) -> Iterator[Iterator[_T]]: ... -@overload -def difference( - iterable: Iterable[_T], - func: Callable[[_T, _T], _U] = ..., - *, - initial: None = ..., -) -> Iterator[_T | _U]: ... -@overload -def difference( - iterable: Iterable[_T], func: Callable[[_T, _T], _U] = ..., *, initial: _U -) -> Iterator[_U]: ... - -class SequenceView(Generic[_T], Sequence[_T]): - def __init__(self, target: Sequence[_T]) -> None: ... - @overload - def __getitem__(self, index: int) -> _T: ... - @overload - def __getitem__(self, index: slice) -> Sequence[_T]: ... - def __len__(self) -> int: ... - -class seekable(Generic[_T], Iterator[_T]): - def __init__( - self, iterable: Iterable[_T], maxlen: int | None = ... - ) -> None: ... - def __iter__(self) -> seekable[_T]: ... - def __next__(self) -> _T: ... - def __bool__(self) -> bool: ... - @overload - def peek(self) -> _T: ... - @overload - def peek(self, default: _U) -> _T | _U: ... - def elements(self) -> SequenceView[_T]: ... - def seek(self, index: int) -> None: ... - def relative_seek(self, count: int) -> None: ... - -class run_length: - @staticmethod - def encode(iterable: Iterable[_T]) -> Iterator[tuple[_T, int]]: ... - @staticmethod - def decode(iterable: Iterable[tuple[_T, int]]) -> Iterator[_T]: ... - -def exactly_n( - iterable: Iterable[_T], n: int, predicate: Callable[[_T], object] = ... -) -> bool: ... -def circular_shifts(iterable: Iterable[_T]) -> list[tuple[_T, ...]]: ... -def make_decorator( - wrapping_func: Callable[..., _U], result_index: int = ... -) -> Callable[..., Callable[[Callable[..., Any]], Callable[..., _U]]]: ... -@overload -def map_reduce( - iterable: Iterable[_T], - keyfunc: Callable[[_T], _U], - valuefunc: None = ..., - reducefunc: None = ..., -) -> dict[_U, list[_T]]: ... -@overload -def map_reduce( - iterable: Iterable[_T], - keyfunc: Callable[[_T], _U], - valuefunc: Callable[[_T], _V], - reducefunc: None = ..., -) -> dict[_U, list[_V]]: ... -@overload -def map_reduce( - iterable: Iterable[_T], - keyfunc: Callable[[_T], _U], - valuefunc: None = ..., - reducefunc: Callable[[list[_T]], _W] = ..., -) -> dict[_U, _W]: ... -@overload -def map_reduce( - iterable: Iterable[_T], - keyfunc: Callable[[_T], _U], - valuefunc: Callable[[_T], _V], - reducefunc: Callable[[list[_V]], _W], -) -> dict[_U, _W]: ... -def rlocate( - iterable: Iterable[_T], - pred: Callable[..., object] = ..., - window_size: int | None = ..., -) -> Iterator[int]: ... -def replace( - iterable: Iterable[_T], - pred: Callable[..., object], - substitutes: Iterable[_U], - count: int | None = ..., - window_size: int = ..., -) -> Iterator[_T | _U]: ... -def partitions(iterable: Iterable[_T]) -> Iterator[list[list[_T]]]: ... -def set_partitions( - iterable: Iterable[_T], k: int | None = ... -) -> Iterator[list[list[_T]]]: ... - -class time_limited(Generic[_T], Iterator[_T]): - def __init__( - self, limit_seconds: float, iterable: Iterable[_T] - ) -> None: ... - def __iter__(self) -> islice_extended[_T]: ... - def __next__(self) -> _T: ... - -@overload -def only( - iterable: Iterable[_T], *, too_long: _Raisable | None = ... -) -> _T | None: ... -@overload -def only( - iterable: Iterable[_T], default: _U, too_long: _Raisable | None = ... -) -> _T | _U: ... -def ichunked(iterable: Iterable[_T], n: int) -> Iterator[Iterator[_T]]: ... -def distinct_combinations( - iterable: Iterable[_T], r: int -) -> Iterator[tuple[_T, ...]]: ... -def filter_except( - validator: Callable[[Any], object], - iterable: Iterable[_T], - *exceptions: Type[BaseException], -) -> Iterator[_T]: ... -def map_except( - function: Callable[[Any], _U], - iterable: Iterable[_T], - *exceptions: Type[BaseException], -) -> Iterator[_U]: ... -def map_if( - iterable: Iterable[Any], - pred: Callable[[Any], bool], - func: Callable[[Any], Any], - func_else: Callable[[Any], Any] | None = ..., -) -> Iterator[Any]: ... -def sample( - iterable: Iterable[_T], - k: int, - weights: Iterable[float] | None = ..., -) -> list[_T]: ... -def is_sorted( - iterable: Iterable[_T], - key: Callable[[_T], _U] | None = ..., - reverse: bool = False, - strict: bool = False, -) -> bool: ... - -class AbortThread(BaseException): - pass - -class callback_iter(Generic[_T], Iterator[_T]): - def __init__( - self, - func: Callable[..., Any], - callback_kwd: str = ..., - wait_seconds: float = ..., - ) -> None: ... - def __enter__(self) -> callback_iter[_T]: ... - def __exit__( - self, - exc_type: Type[BaseException] | None, - exc_value: BaseException | None, - traceback: TracebackType | None, - ) -> bool | None: ... - def __iter__(self) -> callback_iter[_T]: ... - def __next__(self) -> _T: ... - def _reader(self) -> Iterator[_T]: ... - @property - def done(self) -> bool: ... - @property - def result(self) -> Any: ... - -def windowed_complete( - iterable: Iterable[_T], n: int -) -> Iterator[tuple[_T, ...]]: ... -def all_unique( - iterable: Iterable[_T], key: Callable[[_T], _U] | None = ... -) -> bool: ... -def nth_product(index: int, *args: Iterable[_T]) -> tuple[_T, ...]: ... -def nth_combination_with_replacement( - iterable: Iterable[_T], r: int, index: int -) -> tuple[_T, ...]: ... -def nth_permutation( - iterable: Iterable[_T], r: int, index: int -) -> tuple[_T, ...]: ... -def value_chain(*args: _T | Iterable[_T]) -> Iterable[_T]: ... -def product_index(element: Iterable[_T], *args: Iterable[_T]) -> int: ... -def combination_index( - element: Iterable[_T], iterable: Iterable[_T] -) -> int: ... -def combination_with_replacement_index( - element: Iterable[_T], iterable: Iterable[_T] -) -> int: ... -def permutation_index( - element: Iterable[_T], iterable: Iterable[_T] -) -> int: ... -def repeat_each(iterable: Iterable[_T], n: int = ...) -> Iterator[_T]: ... - -class countable(Generic[_T], Iterator[_T]): - def __init__(self, iterable: Iterable[_T]) -> None: ... - def __iter__(self) -> countable[_T]: ... - def __next__(self) -> _T: ... - items_seen: int - -def chunked_even(iterable: Iterable[_T], n: int) -> Iterator[list[_T]]: ... -def zip_broadcast( - *objects: _T | Iterable[_T], - scalar_types: type | tuple[type | tuple[Any, ...], ...] | None = ..., - strict: bool = ..., -) -> Iterable[tuple[_T, ...]]: ... -def unique_in_window( - iterable: Iterable[_T], n: int, key: Callable[[_T], _U] | None = ... -) -> Iterator[_T]: ... -def duplicates_everseen( - iterable: Iterable[_T], key: Callable[[_T], _U] | None = ... -) -> Iterator[_T]: ... -def duplicates_justseen( - iterable: Iterable[_T], key: Callable[[_T], _U] | None = ... -) -> Iterator[_T]: ... -def classify_unique( - iterable: Iterable[_T], key: Callable[[_T], _U] | None = ... -) -> Iterator[tuple[_T, bool, bool]]: ... - -class _SupportsLessThan(Protocol): - def __lt__(self, __other: Any) -> bool: ... - -_SupportsLessThanT = TypeVar("_SupportsLessThanT", bound=_SupportsLessThan) - -@overload -def minmax( - iterable_or_value: Iterable[_SupportsLessThanT], *, key: None = None -) -> tuple[_SupportsLessThanT, _SupportsLessThanT]: ... -@overload -def minmax( - iterable_or_value: Iterable[_T], *, key: Callable[[_T], _SupportsLessThan] -) -> tuple[_T, _T]: ... -@overload -def minmax( - iterable_or_value: Iterable[_SupportsLessThanT], - *, - key: None = None, - default: _U, -) -> _U | tuple[_SupportsLessThanT, _SupportsLessThanT]: ... -@overload -def minmax( - iterable_or_value: Iterable[_T], - *, - key: Callable[[_T], _SupportsLessThan], - default: _U, -) -> _U | tuple[_T, _T]: ... -@overload -def minmax( - iterable_or_value: _SupportsLessThanT, - __other: _SupportsLessThanT, - *others: _SupportsLessThanT, -) -> tuple[_SupportsLessThanT, _SupportsLessThanT]: ... -@overload -def minmax( - iterable_or_value: _T, - __other: _T, - *others: _T, - key: Callable[[_T], _SupportsLessThan], -) -> tuple[_T, _T]: ... -def longest_common_prefix( - iterables: Iterable[Iterable[_T]], -) -> Iterator[_T]: ... -def iequals(*iterables: Iterable[Any]) -> bool: ... -def constrained_batches( - iterable: Iterable[_T], - max_size: int, - max_count: int | None = ..., - get_len: Callable[[_T], object] = ..., - strict: bool = ..., -) -> Iterator[tuple[_T]]: ... -def gray_product(*iterables: Iterable[_T]) -> Iterator[tuple[_T, ...]]: ... -def partial_product(*iterables: Iterable[_T]) -> Iterator[tuple[_T, ...]]: ... -def takewhile_inclusive( - predicate: Callable[[_T], bool], iterable: Iterable[_T] -) -> Iterator[_T]: ... -def outer_product( - func: Callable[[_T, _U], _V], - xs: Iterable[_T], - ys: Iterable[_U], - *args: Any, - **kwargs: Any, -) -> Iterator[tuple[_V, ...]]: ... -def iter_suppress( - iterable: Iterable[_T], - *exceptions: Type[BaseException], -) -> Iterator[_T]: ... -def filter_map( - func: Callable[[_T], _V | None], - iterable: Iterable[_T], -) -> Iterator[_V]: ... -def powerset_of_sets(iterable: Iterable[_T]) -> Iterator[set[_T]]: ... -def join_mappings( - **field_to_map: Mapping[_T, _V] -) -> dict[_T, dict[str, _V]]: ... -def doublestarmap( - func: Callable[..., _T], - iterable: Iterable[Mapping[str, Any]], -) -> Iterator[_T]: ... -def dft(xarr: Sequence[complex]) -> Iterator[complex]: ... -def idft(Xarr: Sequence[complex]) -> Iterator[complex]: ... diff --git a/pkg_resources/_vendor/more_itertools/py.typed b/pkg_resources/_vendor/more_itertools/py.typed deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/pkg_resources/_vendor/more_itertools/recipes.py b/pkg_resources/_vendor/more_itertools/recipes.py deleted file mode 100644 index b32fa95533..0000000000 --- a/pkg_resources/_vendor/more_itertools/recipes.py +++ /dev/null @@ -1,1046 +0,0 @@ -"""Imported from the recipes section of the itertools documentation. - -All functions taken from the recipes section of the itertools library docs -[1]_. -Some backward-compatible usability improvements have been made. - -.. [1] http://docs.python.org/library/itertools.html#recipes - -""" - -import math -import operator - -from collections import deque -from collections.abc import Sized -from functools import partial, reduce -from itertools import ( - chain, - combinations, - compress, - count, - cycle, - groupby, - islice, - product, - repeat, - starmap, - tee, - zip_longest, -) -from random import randrange, sample, choice -from sys import hexversion - -__all__ = [ - 'all_equal', - 'batched', - 'before_and_after', - 'consume', - 'convolve', - 'dotproduct', - 'first_true', - 'factor', - 'flatten', - 'grouper', - 'iter_except', - 'iter_index', - 'matmul', - 'ncycles', - 'nth', - 'nth_combination', - 'padnone', - 'pad_none', - 'pairwise', - 'partition', - 'polynomial_eval', - 'polynomial_from_roots', - 'polynomial_derivative', - 'powerset', - 'prepend', - 'quantify', - 'reshape', - 'random_combination_with_replacement', - 'random_combination', - 'random_permutation', - 'random_product', - 'repeatfunc', - 'roundrobin', - 'sieve', - 'sliding_window', - 'subslices', - 'sum_of_squares', - 'tabulate', - 'tail', - 'take', - 'totient', - 'transpose', - 'triplewise', - 'unique', - 'unique_everseen', - 'unique_justseen', -] - -_marker = object() - - -# zip with strict is available for Python 3.10+ -try: - zip(strict=True) -except TypeError: - _zip_strict = zip -else: - _zip_strict = partial(zip, strict=True) - -# math.sumprod is available for Python 3.12+ -_sumprod = getattr(math, 'sumprod', lambda x, y: dotproduct(x, y)) - - -def take(n, iterable): - """Return first *n* items of the iterable as a list. - - >>> take(3, range(10)) - [0, 1, 2] - - If there are fewer than *n* items in the iterable, all of them are - returned. - - >>> take(10, range(3)) - [0, 1, 2] - - """ - return list(islice(iterable, n)) - - -def tabulate(function, start=0): - """Return an iterator over the results of ``func(start)``, - ``func(start + 1)``, ``func(start + 2)``... - - *func* should be a function that accepts one integer argument. - - If *start* is not specified it defaults to 0. It will be incremented each - time the iterator is advanced. - - >>> square = lambda x: x ** 2 - >>> iterator = tabulate(square, -3) - >>> take(4, iterator) - [9, 4, 1, 0] - - """ - return map(function, count(start)) - - -def tail(n, iterable): - """Return an iterator over the last *n* items of *iterable*. - - >>> t = tail(3, 'ABCDEFG') - >>> list(t) - ['E', 'F', 'G'] - - """ - # If the given iterable has a length, then we can use islice to get its - # final elements. Note that if the iterable is not actually Iterable, - # either islice or deque will throw a TypeError. This is why we don't - # check if it is Iterable. - if isinstance(iterable, Sized): - yield from islice(iterable, max(0, len(iterable) - n), None) - else: - yield from iter(deque(iterable, maxlen=n)) - - -def consume(iterator, n=None): - """Advance *iterable* by *n* steps. If *n* is ``None``, consume it - entirely. - - Efficiently exhausts an iterator without returning values. Defaults to - consuming the whole iterator, but an optional second argument may be - provided to limit consumption. - - >>> i = (x for x in range(10)) - >>> next(i) - 0 - >>> consume(i, 3) - >>> next(i) - 4 - >>> consume(i) - >>> next(i) - Traceback (most recent call last): - File "", line 1, in - StopIteration - - If the iterator has fewer items remaining than the provided limit, the - whole iterator will be consumed. - - >>> i = (x for x in range(3)) - >>> consume(i, 5) - >>> next(i) - Traceback (most recent call last): - File "", line 1, in - StopIteration - - """ - # Use functions that consume iterators at C speed. - if n is None: - # feed the entire iterator into a zero-length deque - deque(iterator, maxlen=0) - else: - # advance to the empty slice starting at position n - next(islice(iterator, n, n), None) - - -def nth(iterable, n, default=None): - """Returns the nth item or a default value. - - >>> l = range(10) - >>> nth(l, 3) - 3 - >>> nth(l, 20, "zebra") - 'zebra' - - """ - return next(islice(iterable, n, None), default) - - -def all_equal(iterable, key=None): - """ - Returns ``True`` if all the elements are equal to each other. - - >>> all_equal('aaaa') - True - >>> all_equal('aaab') - False - - A function that accepts a single argument and returns a transformed version - of each input item can be specified with *key*: - - >>> all_equal('AaaA', key=str.casefold) - True - >>> all_equal([1, 2, 3], key=lambda x: x < 10) - True - - """ - return len(list(islice(groupby(iterable, key), 2))) <= 1 - - -def quantify(iterable, pred=bool): - """Return the how many times the predicate is true. - - >>> quantify([True, False, True]) - 2 - - """ - return sum(map(pred, iterable)) - - -def pad_none(iterable): - """Returns the sequence of elements and then returns ``None`` indefinitely. - - >>> take(5, pad_none(range(3))) - [0, 1, 2, None, None] - - Useful for emulating the behavior of the built-in :func:`map` function. - - See also :func:`padded`. - - """ - return chain(iterable, repeat(None)) - - -padnone = pad_none - - -def ncycles(iterable, n): - """Returns the sequence elements *n* times - - >>> list(ncycles(["a", "b"], 3)) - ['a', 'b', 'a', 'b', 'a', 'b'] - - """ - return chain.from_iterable(repeat(tuple(iterable), n)) - - -def dotproduct(vec1, vec2): - """Returns the dot product of the two iterables. - - >>> dotproduct([10, 10], [20, 20]) - 400 - - """ - return sum(map(operator.mul, vec1, vec2)) - - -def flatten(listOfLists): - """Return an iterator flattening one level of nesting in a list of lists. - - >>> list(flatten([[0, 1], [2, 3]])) - [0, 1, 2, 3] - - See also :func:`collapse`, which can flatten multiple levels of nesting. - - """ - return chain.from_iterable(listOfLists) - - -def repeatfunc(func, times=None, *args): - """Call *func* with *args* repeatedly, returning an iterable over the - results. - - If *times* is specified, the iterable will terminate after that many - repetitions: - - >>> from operator import add - >>> times = 4 - >>> args = 3, 5 - >>> list(repeatfunc(add, times, *args)) - [8, 8, 8, 8] - - If *times* is ``None`` the iterable will not terminate: - - >>> from random import randrange - >>> times = None - >>> args = 1, 11 - >>> take(6, repeatfunc(randrange, times, *args)) # doctest:+SKIP - [2, 4, 8, 1, 8, 4] - - """ - if times is None: - return starmap(func, repeat(args)) - return starmap(func, repeat(args, times)) - - -def _pairwise(iterable): - """Returns an iterator of paired items, overlapping, from the original - - >>> take(4, pairwise(count())) - [(0, 1), (1, 2), (2, 3), (3, 4)] - - On Python 3.10 and above, this is an alias for :func:`itertools.pairwise`. - - """ - a, b = tee(iterable) - next(b, None) - return zip(a, b) - - -try: - from itertools import pairwise as itertools_pairwise -except ImportError: - pairwise = _pairwise -else: - - def pairwise(iterable): - return itertools_pairwise(iterable) - - pairwise.__doc__ = _pairwise.__doc__ - - -class UnequalIterablesError(ValueError): - def __init__(self, details=None): - msg = 'Iterables have different lengths' - if details is not None: - msg += (': index 0 has length {}; index {} has length {}').format( - *details - ) - - super().__init__(msg) - - -def _zip_equal_generator(iterables): - for combo in zip_longest(*iterables, fillvalue=_marker): - for val in combo: - if val is _marker: - raise UnequalIterablesError() - yield combo - - -def _zip_equal(*iterables): - # Check whether the iterables are all the same size. - try: - first_size = len(iterables[0]) - for i, it in enumerate(iterables[1:], 1): - size = len(it) - if size != first_size: - raise UnequalIterablesError(details=(first_size, i, size)) - # All sizes are equal, we can use the built-in zip. - return zip(*iterables) - # If any one of the iterables didn't have a length, start reading - # them until one runs out. - except TypeError: - return _zip_equal_generator(iterables) - - -def grouper(iterable, n, incomplete='fill', fillvalue=None): - """Group elements from *iterable* into fixed-length groups of length *n*. - - >>> list(grouper('ABCDEF', 3)) - [('A', 'B', 'C'), ('D', 'E', 'F')] - - The keyword arguments *incomplete* and *fillvalue* control what happens for - iterables whose length is not a multiple of *n*. - - When *incomplete* is `'fill'`, the last group will contain instances of - *fillvalue*. - - >>> list(grouper('ABCDEFG', 3, incomplete='fill', fillvalue='x')) - [('A', 'B', 'C'), ('D', 'E', 'F'), ('G', 'x', 'x')] - - When *incomplete* is `'ignore'`, the last group will not be emitted. - - >>> list(grouper('ABCDEFG', 3, incomplete='ignore', fillvalue='x')) - [('A', 'B', 'C'), ('D', 'E', 'F')] - - When *incomplete* is `'strict'`, a subclass of `ValueError` will be raised. - - >>> it = grouper('ABCDEFG', 3, incomplete='strict') - >>> list(it) # doctest: +IGNORE_EXCEPTION_DETAIL - Traceback (most recent call last): - ... - UnequalIterablesError - - """ - args = [iter(iterable)] * n - if incomplete == 'fill': - return zip_longest(*args, fillvalue=fillvalue) - if incomplete == 'strict': - return _zip_equal(*args) - if incomplete == 'ignore': - return zip(*args) - else: - raise ValueError('Expected fill, strict, or ignore') - - -def roundrobin(*iterables): - """Yields an item from each iterable, alternating between them. - - >>> list(roundrobin('ABC', 'D', 'EF')) - ['A', 'D', 'E', 'B', 'F', 'C'] - - This function produces the same output as :func:`interleave_longest`, but - may perform better for some inputs (in particular when the number of - iterables is small). - - """ - # Algorithm credited to George Sakkis - iterators = map(iter, iterables) - for num_active in range(len(iterables), 0, -1): - iterators = cycle(islice(iterators, num_active)) - yield from map(next, iterators) - - -def partition(pred, iterable): - """ - Returns a 2-tuple of iterables derived from the input iterable. - The first yields the items that have ``pred(item) == False``. - The second yields the items that have ``pred(item) == True``. - - >>> is_odd = lambda x: x % 2 != 0 - >>> iterable = range(10) - >>> even_items, odd_items = partition(is_odd, iterable) - >>> list(even_items), list(odd_items) - ([0, 2, 4, 6, 8], [1, 3, 5, 7, 9]) - - If *pred* is None, :func:`bool` is used. - - >>> iterable = [0, 1, False, True, '', ' '] - >>> false_items, true_items = partition(None, iterable) - >>> list(false_items), list(true_items) - ([0, False, ''], [1, True, ' ']) - - """ - if pred is None: - pred = bool - - t1, t2, p = tee(iterable, 3) - p1, p2 = tee(map(pred, p)) - return (compress(t1, map(operator.not_, p1)), compress(t2, p2)) - - -def powerset(iterable): - """Yields all possible subsets of the iterable. - - >>> list(powerset([1, 2, 3])) - [(), (1,), (2,), (3,), (1, 2), (1, 3), (2, 3), (1, 2, 3)] - - :func:`powerset` will operate on iterables that aren't :class:`set` - instances, so repeated elements in the input will produce repeated elements - in the output. - - >>> seq = [1, 1, 0] - >>> list(powerset(seq)) - [(), (1,), (1,), (0,), (1, 1), (1, 0), (1, 0), (1, 1, 0)] - - For a variant that efficiently yields actual :class:`set` instances, see - :func:`powerset_of_sets`. - """ - s = list(iterable) - return chain.from_iterable(combinations(s, r) for r in range(len(s) + 1)) - - -def unique_everseen(iterable, key=None): - """ - Yield unique elements, preserving order. - - >>> list(unique_everseen('AAAABBBCCDAABBB')) - ['A', 'B', 'C', 'D'] - >>> list(unique_everseen('ABBCcAD', str.lower)) - ['A', 'B', 'C', 'D'] - - Sequences with a mix of hashable and unhashable items can be used. - The function will be slower (i.e., `O(n^2)`) for unhashable items. - - Remember that ``list`` objects are unhashable - you can use the *key* - parameter to transform the list to a tuple (which is hashable) to - avoid a slowdown. - - >>> iterable = ([1, 2], [2, 3], [1, 2]) - >>> list(unique_everseen(iterable)) # Slow - [[1, 2], [2, 3]] - >>> list(unique_everseen(iterable, key=tuple)) # Faster - [[1, 2], [2, 3]] - - Similarly, you may want to convert unhashable ``set`` objects with - ``key=frozenset``. For ``dict`` objects, - ``key=lambda x: frozenset(x.items())`` can be used. - - """ - seenset = set() - seenset_add = seenset.add - seenlist = [] - seenlist_add = seenlist.append - use_key = key is not None - - for element in iterable: - k = key(element) if use_key else element - try: - if k not in seenset: - seenset_add(k) - yield element - except TypeError: - if k not in seenlist: - seenlist_add(k) - yield element - - -def unique_justseen(iterable, key=None): - """Yields elements in order, ignoring serial duplicates - - >>> list(unique_justseen('AAAABBBCCDAABBB')) - ['A', 'B', 'C', 'D', 'A', 'B'] - >>> list(unique_justseen('ABBCcAD', str.lower)) - ['A', 'B', 'C', 'A', 'D'] - - """ - if key is None: - return map(operator.itemgetter(0), groupby(iterable)) - - return map(next, map(operator.itemgetter(1), groupby(iterable, key))) - - -def unique(iterable, key=None, reverse=False): - """Yields unique elements in sorted order. - - >>> list(unique([[1, 2], [3, 4], [1, 2]])) - [[1, 2], [3, 4]] - - *key* and *reverse* are passed to :func:`sorted`. - - >>> list(unique('ABBcCAD', str.casefold)) - ['A', 'B', 'c', 'D'] - >>> list(unique('ABBcCAD', str.casefold, reverse=True)) - ['D', 'c', 'B', 'A'] - - The elements in *iterable* need not be hashable, but they must be - comparable for sorting to work. - """ - return unique_justseen(sorted(iterable, key=key, reverse=reverse), key=key) - - -def iter_except(func, exception, first=None): - """Yields results from a function repeatedly until an exception is raised. - - Converts a call-until-exception interface to an iterator interface. - Like ``iter(func, sentinel)``, but uses an exception instead of a sentinel - to end the loop. - - >>> l = [0, 1, 2] - >>> list(iter_except(l.pop, IndexError)) - [2, 1, 0] - - Multiple exceptions can be specified as a stopping condition: - - >>> l = [1, 2, 3, '...', 4, 5, 6] - >>> list(iter_except(lambda: 1 + l.pop(), (IndexError, TypeError))) - [7, 6, 5] - >>> list(iter_except(lambda: 1 + l.pop(), (IndexError, TypeError))) - [4, 3, 2] - >>> list(iter_except(lambda: 1 + l.pop(), (IndexError, TypeError))) - [] - - """ - try: - if first is not None: - yield first() - while 1: - yield func() - except exception: - pass - - -def first_true(iterable, default=None, pred=None): - """ - Returns the first true value in the iterable. - - If no true value is found, returns *default* - - If *pred* is not None, returns the first item for which - ``pred(item) == True`` . - - >>> first_true(range(10)) - 1 - >>> first_true(range(10), pred=lambda x: x > 5) - 6 - >>> first_true(range(10), default='missing', pred=lambda x: x > 9) - 'missing' - - """ - return next(filter(pred, iterable), default) - - -def random_product(*args, repeat=1): - """Draw an item at random from each of the input iterables. - - >>> random_product('abc', range(4), 'XYZ') # doctest:+SKIP - ('c', 3, 'Z') - - If *repeat* is provided as a keyword argument, that many items will be - drawn from each iterable. - - >>> random_product('abcd', range(4), repeat=2) # doctest:+SKIP - ('a', 2, 'd', 3) - - This equivalent to taking a random selection from - ``itertools.product(*args, **kwarg)``. - - """ - pools = [tuple(pool) for pool in args] * repeat - return tuple(choice(pool) for pool in pools) - - -def random_permutation(iterable, r=None): - """Return a random *r* length permutation of the elements in *iterable*. - - If *r* is not specified or is ``None``, then *r* defaults to the length of - *iterable*. - - >>> random_permutation(range(5)) # doctest:+SKIP - (3, 4, 0, 1, 2) - - This equivalent to taking a random selection from - ``itertools.permutations(iterable, r)``. - - """ - pool = tuple(iterable) - r = len(pool) if r is None else r - return tuple(sample(pool, r)) - - -def random_combination(iterable, r): - """Return a random *r* length subsequence of the elements in *iterable*. - - >>> random_combination(range(5), 3) # doctest:+SKIP - (2, 3, 4) - - This equivalent to taking a random selection from - ``itertools.combinations(iterable, r)``. - - """ - pool = tuple(iterable) - n = len(pool) - indices = sorted(sample(range(n), r)) - return tuple(pool[i] for i in indices) - - -def random_combination_with_replacement(iterable, r): - """Return a random *r* length subsequence of elements in *iterable*, - allowing individual elements to be repeated. - - >>> random_combination_with_replacement(range(3), 5) # doctest:+SKIP - (0, 0, 1, 2, 2) - - This equivalent to taking a random selection from - ``itertools.combinations_with_replacement(iterable, r)``. - - """ - pool = tuple(iterable) - n = len(pool) - indices = sorted(randrange(n) for i in range(r)) - return tuple(pool[i] for i in indices) - - -def nth_combination(iterable, r, index): - """Equivalent to ``list(combinations(iterable, r))[index]``. - - The subsequences of *iterable* that are of length *r* can be ordered - lexicographically. :func:`nth_combination` computes the subsequence at - sort position *index* directly, without computing the previous - subsequences. - - >>> nth_combination(range(5), 3, 5) - (0, 3, 4) - - ``ValueError`` will be raised If *r* is negative or greater than the length - of *iterable*. - ``IndexError`` will be raised if the given *index* is invalid. - """ - pool = tuple(iterable) - n = len(pool) - if (r < 0) or (r > n): - raise ValueError - - c = 1 - k = min(r, n - r) - for i in range(1, k + 1): - c = c * (n - k + i) // i - - if index < 0: - index += c - - if (index < 0) or (index >= c): - raise IndexError - - result = [] - while r: - c, n, r = c * r // n, n - 1, r - 1 - while index >= c: - index -= c - c, n = c * (n - r) // n, n - 1 - result.append(pool[-1 - n]) - - return tuple(result) - - -def prepend(value, iterator): - """Yield *value*, followed by the elements in *iterator*. - - >>> value = '0' - >>> iterator = ['1', '2', '3'] - >>> list(prepend(value, iterator)) - ['0', '1', '2', '3'] - - To prepend multiple values, see :func:`itertools.chain` - or :func:`value_chain`. - - """ - return chain([value], iterator) - - -def convolve(signal, kernel): - """Convolve the iterable *signal* with the iterable *kernel*. - - >>> signal = (1, 2, 3, 4, 5) - >>> kernel = [3, 2, 1] - >>> list(convolve(signal, kernel)) - [3, 8, 14, 20, 26, 14, 5] - - Note: the input arguments are not interchangeable, as the *kernel* - is immediately consumed and stored. - - """ - # This implementation intentionally doesn't match the one in the itertools - # documentation. - kernel = tuple(kernel)[::-1] - n = len(kernel) - window = deque([0], maxlen=n) * n - for x in chain(signal, repeat(0, n - 1)): - window.append(x) - yield _sumprod(kernel, window) - - -def before_and_after(predicate, it): - """A variant of :func:`takewhile` that allows complete access to the - remainder of the iterator. - - >>> it = iter('ABCdEfGhI') - >>> all_upper, remainder = before_and_after(str.isupper, it) - >>> ''.join(all_upper) - 'ABC' - >>> ''.join(remainder) # takewhile() would lose the 'd' - 'dEfGhI' - - Note that the first iterator must be fully consumed before the second - iterator can generate valid results. - """ - it = iter(it) - transition = [] - - def true_iterator(): - for elem in it: - if predicate(elem): - yield elem - else: - transition.append(elem) - return - - # Note: this is different from itertools recipes to allow nesting - # before_and_after remainders into before_and_after again. See tests - # for an example. - remainder_iterator = chain(transition, it) - - return true_iterator(), remainder_iterator - - -def triplewise(iterable): - """Return overlapping triplets from *iterable*. - - >>> list(triplewise('ABCDE')) - [('A', 'B', 'C'), ('B', 'C', 'D'), ('C', 'D', 'E')] - - """ - for (a, _), (b, c) in pairwise(pairwise(iterable)): - yield a, b, c - - -def sliding_window(iterable, n): - """Return a sliding window of width *n* over *iterable*. - - >>> list(sliding_window(range(6), 4)) - [(0, 1, 2, 3), (1, 2, 3, 4), (2, 3, 4, 5)] - - If *iterable* has fewer than *n* items, then nothing is yielded: - - >>> list(sliding_window(range(3), 4)) - [] - - For a variant with more features, see :func:`windowed`. - """ - it = iter(iterable) - window = deque(islice(it, n - 1), maxlen=n) - for x in it: - window.append(x) - yield tuple(window) - - -def subslices(iterable): - """Return all contiguous non-empty subslices of *iterable*. - - >>> list(subslices('ABC')) - [['A'], ['A', 'B'], ['A', 'B', 'C'], ['B'], ['B', 'C'], ['C']] - - This is similar to :func:`substrings`, but emits items in a different - order. - """ - seq = list(iterable) - slices = starmap(slice, combinations(range(len(seq) + 1), 2)) - return map(operator.getitem, repeat(seq), slices) - - -def polynomial_from_roots(roots): - """Compute a polynomial's coefficients from its roots. - - >>> roots = [5, -4, 3] # (x - 5) * (x + 4) * (x - 3) - >>> polynomial_from_roots(roots) # x^3 - 4 * x^2 - 17 * x + 60 - [1, -4, -17, 60] - """ - factors = zip(repeat(1), map(operator.neg, roots)) - return list(reduce(convolve, factors, [1])) - - -def iter_index(iterable, value, start=0, stop=None): - """Yield the index of each place in *iterable* that *value* occurs, - beginning with index *start* and ending before index *stop*. - - - >>> list(iter_index('AABCADEAF', 'A')) - [0, 1, 4, 7] - >>> list(iter_index('AABCADEAF', 'A', 1)) # start index is inclusive - [1, 4, 7] - >>> list(iter_index('AABCADEAF', 'A', 1, 7)) # stop index is not inclusive - [1, 4] - - The behavior for non-scalar *values* matches the built-in Python types. - - >>> list(iter_index('ABCDABCD', 'AB')) - [0, 4] - >>> list(iter_index([0, 1, 2, 3, 0, 1, 2, 3], [0, 1])) - [] - >>> list(iter_index([[0, 1], [2, 3], [0, 1], [2, 3]], [0, 1])) - [0, 2] - - See :func:`locate` for a more general means of finding the indexes - associated with particular values. - - """ - seq_index = getattr(iterable, 'index', None) - if seq_index is None: - # Slow path for general iterables - it = islice(iterable, start, stop) - for i, element in enumerate(it, start): - if element is value or element == value: - yield i - else: - # Fast path for sequences - stop = len(iterable) if stop is None else stop - i = start - 1 - try: - while True: - yield (i := seq_index(value, i + 1, stop)) - except ValueError: - pass - - -def sieve(n): - """Yield the primes less than n. - - >>> list(sieve(30)) - [2, 3, 5, 7, 11, 13, 17, 19, 23, 29] - """ - if n > 2: - yield 2 - start = 3 - data = bytearray((0, 1)) * (n // 2) - limit = math.isqrt(n) + 1 - for p in iter_index(data, 1, start, limit): - yield from iter_index(data, 1, start, p * p) - data[p * p : n : p + p] = bytes(len(range(p * p, n, p + p))) - start = p * p - yield from iter_index(data, 1, start) - - -def _batched(iterable, n, *, strict=False): - """Batch data into tuples of length *n*. If the number of items in - *iterable* is not divisible by *n*: - * The last batch will be shorter if *strict* is ``False``. - * :exc:`ValueError` will be raised if *strict* is ``True``. - - >>> list(batched('ABCDEFG', 3)) - [('A', 'B', 'C'), ('D', 'E', 'F'), ('G',)] - - On Python 3.13 and above, this is an alias for :func:`itertools.batched`. - """ - if n < 1: - raise ValueError('n must be at least one') - it = iter(iterable) - while batch := tuple(islice(it, n)): - if strict and len(batch) != n: - raise ValueError('batched(): incomplete batch') - yield batch - - -if hexversion >= 0x30D00A2: - from itertools import batched as itertools_batched - - def batched(iterable, n, *, strict=False): - return itertools_batched(iterable, n, strict=strict) - -else: - batched = _batched - - batched.__doc__ = _batched.__doc__ - - -def transpose(it): - """Swap the rows and columns of the input matrix. - - >>> list(transpose([(1, 2, 3), (11, 22, 33)])) - [(1, 11), (2, 22), (3, 33)] - - The caller should ensure that the dimensions of the input are compatible. - If the input is empty, no output will be produced. - """ - return _zip_strict(*it) - - -def reshape(matrix, cols): - """Reshape the 2-D input *matrix* to have a column count given by *cols*. - - >>> matrix = [(0, 1), (2, 3), (4, 5)] - >>> cols = 3 - >>> list(reshape(matrix, cols)) - [(0, 1, 2), (3, 4, 5)] - """ - return batched(chain.from_iterable(matrix), cols) - - -def matmul(m1, m2): - """Multiply two matrices. - - >>> list(matmul([(7, 5), (3, 5)], [(2, 5), (7, 9)])) - [(49, 80), (41, 60)] - - The caller should ensure that the dimensions of the input matrices are - compatible with each other. - """ - n = len(m2[0]) - return batched(starmap(_sumprod, product(m1, transpose(m2))), n) - - -def factor(n): - """Yield the prime factors of n. - - >>> list(factor(360)) - [2, 2, 2, 3, 3, 5] - """ - for prime in sieve(math.isqrt(n) + 1): - while not n % prime: - yield prime - n //= prime - if n == 1: - return - if n > 1: - yield n - - -def polynomial_eval(coefficients, x): - """Evaluate a polynomial at a specific value. - - Example: evaluating x^3 - 4 * x^2 - 17 * x + 60 at x = 2.5: - - >>> coefficients = [1, -4, -17, 60] - >>> x = 2.5 - >>> polynomial_eval(coefficients, x) - 8.125 - """ - n = len(coefficients) - if n == 0: - return x * 0 # coerce zero to the type of x - powers = map(pow, repeat(x), reversed(range(n))) - return _sumprod(coefficients, powers) - - -def sum_of_squares(it): - """Return the sum of the squares of the input values. - - >>> sum_of_squares([10, 20, 30]) - 1400 - """ - return _sumprod(*tee(it)) - - -def polynomial_derivative(coefficients): - """Compute the first derivative of a polynomial. - - Example: evaluating the derivative of x^3 - 4 * x^2 - 17 * x + 60 - - >>> coefficients = [1, -4, -17, 60] - >>> derivative_coefficients = polynomial_derivative(coefficients) - >>> derivative_coefficients - [3, -8, -17] - """ - n = len(coefficients) - powers = reversed(range(1, n)) - return list(map(operator.mul, coefficients, powers)) - - -def totient(n): - """Return the count of natural numbers up to *n* that are coprime with *n*. - - >>> totient(9) - 6 - >>> totient(12) - 4 - """ - # The itertools docs use unique_justseen instead of set; see - # https://github.com/more-itertools/more-itertools/issues/823 - for p in set(factor(n)): - n = n // p * (p - 1) - - return n diff --git a/pkg_resources/_vendor/more_itertools/recipes.pyi b/pkg_resources/_vendor/more_itertools/recipes.pyi deleted file mode 100644 index 739acec05f..0000000000 --- a/pkg_resources/_vendor/more_itertools/recipes.pyi +++ /dev/null @@ -1,136 +0,0 @@ -"""Stubs for more_itertools.recipes""" - -from __future__ import annotations - -from typing import ( - Any, - Callable, - Iterable, - Iterator, - overload, - Sequence, - Type, - TypeVar, -) - -# Type and type variable definitions -_T = TypeVar('_T') -_T1 = TypeVar('_T1') -_T2 = TypeVar('_T2') -_U = TypeVar('_U') - -def take(n: int, iterable: Iterable[_T]) -> list[_T]: ... -def tabulate( - function: Callable[[int], _T], start: int = ... -) -> Iterator[_T]: ... -def tail(n: int, iterable: Iterable[_T]) -> Iterator[_T]: ... -def consume(iterator: Iterable[_T], n: int | None = ...) -> None: ... -@overload -def nth(iterable: Iterable[_T], n: int) -> _T | None: ... -@overload -def nth(iterable: Iterable[_T], n: int, default: _U) -> _T | _U: ... -def all_equal( - iterable: Iterable[_T], key: Callable[[_T], _U] | None = ... -) -> bool: ... -def quantify( - iterable: Iterable[_T], pred: Callable[[_T], bool] = ... -) -> int: ... -def pad_none(iterable: Iterable[_T]) -> Iterator[_T | None]: ... -def padnone(iterable: Iterable[_T]) -> Iterator[_T | None]: ... -def ncycles(iterable: Iterable[_T], n: int) -> Iterator[_T]: ... -def dotproduct(vec1: Iterable[_T1], vec2: Iterable[_T2]) -> Any: ... -def flatten(listOfLists: Iterable[Iterable[_T]]) -> Iterator[_T]: ... -def repeatfunc( - func: Callable[..., _U], times: int | None = ..., *args: Any -) -> Iterator[_U]: ... -def pairwise(iterable: Iterable[_T]) -> Iterator[tuple[_T, _T]]: ... -def grouper( - iterable: Iterable[_T], - n: int, - incomplete: str = ..., - fillvalue: _U = ..., -) -> Iterator[tuple[_T | _U, ...]]: ... -def roundrobin(*iterables: Iterable[_T]) -> Iterator[_T]: ... -def partition( - pred: Callable[[_T], object] | None, iterable: Iterable[_T] -) -> tuple[Iterator[_T], Iterator[_T]]: ... -def powerset(iterable: Iterable[_T]) -> Iterator[tuple[_T, ...]]: ... -def unique_everseen( - iterable: Iterable[_T], key: Callable[[_T], _U] | None = ... -) -> Iterator[_T]: ... -def unique_justseen( - iterable: Iterable[_T], key: Callable[[_T], object] | None = ... -) -> Iterator[_T]: ... -def unique( - iterable: Iterable[_T], - key: Callable[[_T], object] | None = ..., - reverse: bool = False, -) -> Iterator[_T]: ... -@overload -def iter_except( - func: Callable[[], _T], - exception: Type[BaseException] | tuple[Type[BaseException], ...], - first: None = ..., -) -> Iterator[_T]: ... -@overload -def iter_except( - func: Callable[[], _T], - exception: Type[BaseException] | tuple[Type[BaseException], ...], - first: Callable[[], _U], -) -> Iterator[_T | _U]: ... -@overload -def first_true( - iterable: Iterable[_T], *, pred: Callable[[_T], object] | None = ... -) -> _T | None: ... -@overload -def first_true( - iterable: Iterable[_T], - default: _U, - pred: Callable[[_T], object] | None = ..., -) -> _T | _U: ... -def random_product( - *args: Iterable[_T], repeat: int = ... -) -> tuple[_T, ...]: ... -def random_permutation( - iterable: Iterable[_T], r: int | None = ... -) -> tuple[_T, ...]: ... -def random_combination(iterable: Iterable[_T], r: int) -> tuple[_T, ...]: ... -def random_combination_with_replacement( - iterable: Iterable[_T], r: int -) -> tuple[_T, ...]: ... -def nth_combination( - iterable: Iterable[_T], r: int, index: int -) -> tuple[_T, ...]: ... -def prepend(value: _T, iterator: Iterable[_U]) -> Iterator[_T | _U]: ... -def convolve(signal: Iterable[_T], kernel: Iterable[_T]) -> Iterator[_T]: ... -def before_and_after( - predicate: Callable[[_T], bool], it: Iterable[_T] -) -> tuple[Iterator[_T], Iterator[_T]]: ... -def triplewise(iterable: Iterable[_T]) -> Iterator[tuple[_T, _T, _T]]: ... -def sliding_window( - iterable: Iterable[_T], n: int -) -> Iterator[tuple[_T, ...]]: ... -def subslices(iterable: Iterable[_T]) -> Iterator[list[_T]]: ... -def polynomial_from_roots(roots: Sequence[_T]) -> list[_T]: ... -def iter_index( - iterable: Iterable[_T], - value: Any, - start: int | None = ..., - stop: int | None = ..., -) -> Iterator[int]: ... -def sieve(n: int) -> Iterator[int]: ... -def batched( - iterable: Iterable[_T], n: int, *, strict: bool = False -) -> Iterator[tuple[_T]]: ... -def transpose( - it: Iterable[Iterable[_T]], -) -> Iterator[tuple[_T, ...]]: ... -def reshape( - matrix: Iterable[Iterable[_T]], cols: int -) -> Iterator[tuple[_T, ...]]: ... -def matmul(m1: Sequence[_T], m2: Sequence[_T]) -> Iterator[tuple[_T]]: ... -def factor(n: int) -> Iterator[int]: ... -def polynomial_eval(coefficients: Sequence[_T], x: _U) -> _U: ... -def sum_of_squares(it: Iterable[_T]) -> _T: ... -def polynomial_derivative(coefficients: Sequence[_T]) -> list[_T]: ... -def totient(n: int) -> int: ... diff --git a/pkg_resources/_vendor/packaging-24.1.dist-info/INSTALLER b/pkg_resources/_vendor/packaging-24.1.dist-info/INSTALLER deleted file mode 100644 index a1b589e38a..0000000000 --- a/pkg_resources/_vendor/packaging-24.1.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/pkg_resources/_vendor/packaging-24.1.dist-info/LICENSE b/pkg_resources/_vendor/packaging-24.1.dist-info/LICENSE deleted file mode 100644 index 6f62d44e4e..0000000000 --- a/pkg_resources/_vendor/packaging-24.1.dist-info/LICENSE +++ /dev/null @@ -1,3 +0,0 @@ -This software is made available under the terms of *either* of the licenses -found in LICENSE.APACHE or LICENSE.BSD. Contributions to this software is made -under the terms of *both* these licenses. diff --git a/pkg_resources/_vendor/packaging-24.1.dist-info/LICENSE.APACHE b/pkg_resources/_vendor/packaging-24.1.dist-info/LICENSE.APACHE deleted file mode 100644 index f433b1a53f..0000000000 --- a/pkg_resources/_vendor/packaging-24.1.dist-info/LICENSE.APACHE +++ /dev/null @@ -1,177 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS diff --git a/pkg_resources/_vendor/packaging-24.1.dist-info/LICENSE.BSD b/pkg_resources/_vendor/packaging-24.1.dist-info/LICENSE.BSD deleted file mode 100644 index 42ce7b75c9..0000000000 --- a/pkg_resources/_vendor/packaging-24.1.dist-info/LICENSE.BSD +++ /dev/null @@ -1,23 +0,0 @@ -Copyright (c) Donald Stufft and individual contributors. -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - - 1. Redistributions of source code must retain the above copyright notice, - this list of conditions and the following disclaimer. - - 2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/pkg_resources/_vendor/packaging-24.1.dist-info/METADATA b/pkg_resources/_vendor/packaging-24.1.dist-info/METADATA deleted file mode 100644 index 255dc46e0e..0000000000 --- a/pkg_resources/_vendor/packaging-24.1.dist-info/METADATA +++ /dev/null @@ -1,102 +0,0 @@ -Metadata-Version: 2.1 -Name: packaging -Version: 24.1 -Summary: Core utilities for Python packages -Author-email: Donald Stufft -Requires-Python: >=3.8 -Description-Content-Type: text/x-rst -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: Apache Software License -Classifier: License :: OSI Approved :: BSD License -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: 3.13 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Typing :: Typed -Project-URL: Documentation, https://packaging.pypa.io/ -Project-URL: Source, https://github.com/pypa/packaging - -packaging -========= - -.. start-intro - -Reusable core utilities for various Python Packaging -`interoperability specifications `_. - -This library provides utilities that implement the interoperability -specifications which have clearly one correct behaviour (eg: :pep:`440`) -or benefit greatly from having a single shared implementation (eg: :pep:`425`). - -.. end-intro - -The ``packaging`` project includes the following: version handling, specifiers, -markers, requirements, tags, utilities. - -Documentation -------------- - -The `documentation`_ provides information and the API for the following: - -- Version Handling -- Specifiers -- Markers -- Requirements -- Tags -- Utilities - -Installation ------------- - -Use ``pip`` to install these utilities:: - - pip install packaging - -The ``packaging`` library uses calendar-based versioning (``YY.N``). - -Discussion ----------- - -If you run into bugs, you can file them in our `issue tracker`_. - -You can also join ``#pypa`` on Freenode to ask questions or get involved. - - -.. _`documentation`: https://packaging.pypa.io/ -.. _`issue tracker`: https://github.com/pypa/packaging/issues - - -Code of Conduct ---------------- - -Everyone interacting in the packaging project's codebases, issue trackers, chat -rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_. - -.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md - -Contributing ------------- - -The ``CONTRIBUTING.rst`` file outlines how to contribute to this project as -well as how to report a potential security issue. The documentation for this -project also covers information about `project development`_ and `security`_. - -.. _`project development`: https://packaging.pypa.io/en/latest/development/ -.. _`security`: https://packaging.pypa.io/en/latest/security/ - -Project History ---------------- - -Please review the ``CHANGELOG.rst`` file or the `Changelog documentation`_ for -recent changes and project history. - -.. _`Changelog documentation`: https://packaging.pypa.io/en/latest/changelog/ - diff --git a/pkg_resources/_vendor/packaging-24.1.dist-info/RECORD b/pkg_resources/_vendor/packaging-24.1.dist-info/RECORD deleted file mode 100644 index 2b1e6bd4db..0000000000 --- a/pkg_resources/_vendor/packaging-24.1.dist-info/RECORD +++ /dev/null @@ -1,37 +0,0 @@ -packaging-24.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -packaging-24.1.dist-info/LICENSE,sha256=ytHvW9NA1z4HS6YU0m996spceUDD2MNIUuZcSQlobEg,197 -packaging-24.1.dist-info/LICENSE.APACHE,sha256=DVQuDIgE45qn836wDaWnYhSdxoLXgpRRKH4RuTjpRZQ,10174 -packaging-24.1.dist-info/LICENSE.BSD,sha256=tw5-m3QvHMb5SLNMFqo5_-zpQZY2S8iP8NIYDwAo-sU,1344 -packaging-24.1.dist-info/METADATA,sha256=X3ooO3WnCfzNSBrqQjefCD1POAF1M2WSLmsHMgQlFdk,3204 -packaging-24.1.dist-info/RECORD,, -packaging-24.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -packaging-24.1.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81 -packaging/__init__.py,sha256=dtw2bNmWCQ9WnMoK3bk_elL1svSlikXtLpZhCFIB9SE,496 -packaging/__pycache__/__init__.cpython-312.pyc,, -packaging/__pycache__/_elffile.cpython-312.pyc,, -packaging/__pycache__/_manylinux.cpython-312.pyc,, -packaging/__pycache__/_musllinux.cpython-312.pyc,, -packaging/__pycache__/_parser.cpython-312.pyc,, -packaging/__pycache__/_structures.cpython-312.pyc,, -packaging/__pycache__/_tokenizer.cpython-312.pyc,, -packaging/__pycache__/markers.cpython-312.pyc,, -packaging/__pycache__/metadata.cpython-312.pyc,, -packaging/__pycache__/requirements.cpython-312.pyc,, -packaging/__pycache__/specifiers.cpython-312.pyc,, -packaging/__pycache__/tags.cpython-312.pyc,, -packaging/__pycache__/utils.cpython-312.pyc,, -packaging/__pycache__/version.cpython-312.pyc,, -packaging/_elffile.py,sha256=_LcJW4YNKywYsl4169B2ukKRqwxjxst_8H0FRVQKlz8,3282 -packaging/_manylinux.py,sha256=Xo4V0PZz8sbuVCbTni0t1CR0AHeir_7ib4lTmV8scD4,9586 -packaging/_musllinux.py,sha256=p9ZqNYiOItGee8KcZFeHF_YcdhVwGHdK6r-8lgixvGQ,2694 -packaging/_parser.py,sha256=s_TvTvDNK0NrM2QB3VKThdWFM4Nc0P6JnkObkl3MjpM,10236 -packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431 -packaging/_tokenizer.py,sha256=J6v5H7Jzvb-g81xp_2QACKwO7LxHQA6ikryMU7zXwN8,5273 -packaging/markers.py,sha256=dWKSqn5Sp-jDmOG-W3GfLHKjwhf1IsznbT71VlBoB5M,10671 -packaging/metadata.py,sha256=KINuSkJ12u-SyoKNTy_pHNGAfMUtxNvZ53qA1zAKcKI,32349 -packaging/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -packaging/requirements.py,sha256=gYyRSAdbrIyKDY66ugIDUQjRMvxkH2ALioTmX3tnL6o,2947 -packaging/specifiers.py,sha256=rjpc3hoJuunRIT6DdH7gLTnQ5j5QKSuWjoTC5sdHtHI,39714 -packaging/tags.py,sha256=y8EbheOu9WS7s-MebaXMcHMF-jzsA_C1Lz5XRTiSy4w,18883 -packaging/utils.py,sha256=NAdYUwnlAOpkat_RthavX8a07YuVxgGL_vwrx73GSDM,5287 -packaging/version.py,sha256=V0H3SOj_8werrvorrb6QDLRhlcqSErNTTkCvvfszhDI,16198 diff --git a/pkg_resources/_vendor/packaging-24.1.dist-info/REQUESTED b/pkg_resources/_vendor/packaging-24.1.dist-info/REQUESTED deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/pkg_resources/_vendor/packaging-24.1.dist-info/WHEEL b/pkg_resources/_vendor/packaging-24.1.dist-info/WHEEL deleted file mode 100644 index 3b5e64b5e6..0000000000 --- a/pkg_resources/_vendor/packaging-24.1.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: flit 3.9.0 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/pkg_resources/_vendor/packaging/__init__.py b/pkg_resources/_vendor/packaging/__init__.py deleted file mode 100644 index 9ba41d8357..0000000000 --- a/pkg_resources/_vendor/packaging/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -__title__ = "packaging" -__summary__ = "Core utilities for Python packages" -__uri__ = "https://github.com/pypa/packaging" - -__version__ = "24.1" - -__author__ = "Donald Stufft and individual contributors" -__email__ = "donald@stufft.io" - -__license__ = "BSD-2-Clause or Apache-2.0" -__copyright__ = "2014 %s" % __author__ diff --git a/pkg_resources/_vendor/packaging/_elffile.py b/pkg_resources/_vendor/packaging/_elffile.py deleted file mode 100644 index f7a02180bf..0000000000 --- a/pkg_resources/_vendor/packaging/_elffile.py +++ /dev/null @@ -1,110 +0,0 @@ -""" -ELF file parser. - -This provides a class ``ELFFile`` that parses an ELF executable in a similar -interface to ``ZipFile``. Only the read interface is implemented. - -Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca -ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html -""" - -from __future__ import annotations - -import enum -import os -import struct -from typing import IO - - -class ELFInvalid(ValueError): - pass - - -class EIClass(enum.IntEnum): - C32 = 1 - C64 = 2 - - -class EIData(enum.IntEnum): - Lsb = 1 - Msb = 2 - - -class EMachine(enum.IntEnum): - I386 = 3 - S390 = 22 - Arm = 40 - X8664 = 62 - AArc64 = 183 - - -class ELFFile: - """ - Representation of an ELF executable. - """ - - def __init__(self, f: IO[bytes]) -> None: - self._f = f - - try: - ident = self._read("16B") - except struct.error: - raise ELFInvalid("unable to parse identification") - magic = bytes(ident[:4]) - if magic != b"\x7fELF": - raise ELFInvalid(f"invalid magic: {magic!r}") - - self.capacity = ident[4] # Format for program header (bitness). - self.encoding = ident[5] # Data structure encoding (endianness). - - try: - # e_fmt: Format for program header. - # p_fmt: Format for section header. - # p_idx: Indexes to find p_type, p_offset, and p_filesz. - e_fmt, self._p_fmt, self._p_idx = { - (1, 1): ("HHIIIIIHHH", ">IIIIIIII", (0, 1, 4)), # 32-bit MSB. - (2, 1): ("HHIQQQIHHH", ">IIQQQQQQ", (0, 2, 5)), # 64-bit MSB. - }[(self.capacity, self.encoding)] - except KeyError: - raise ELFInvalid( - f"unrecognized capacity ({self.capacity}) or " - f"encoding ({self.encoding})" - ) - - try: - ( - _, - self.machine, # Architecture type. - _, - _, - self._e_phoff, # Offset of program header. - _, - self.flags, # Processor-specific flags. - _, - self._e_phentsize, # Size of section. - self._e_phnum, # Number of sections. - ) = self._read(e_fmt) - except struct.error as e: - raise ELFInvalid("unable to parse machine and section information") from e - - def _read(self, fmt: str) -> tuple[int, ...]: - return struct.unpack(fmt, self._f.read(struct.calcsize(fmt))) - - @property - def interpreter(self) -> str | None: - """ - The path recorded in the ``PT_INTERP`` section header. - """ - for index in range(self._e_phnum): - self._f.seek(self._e_phoff + self._e_phentsize * index) - try: - data = self._read(self._p_fmt) - except struct.error: - continue - if data[self._p_idx[0]] != 3: # Not PT_INTERP. - continue - self._f.seek(data[self._p_idx[1]]) - return os.fsdecode(self._f.read(data[self._p_idx[2]])).strip("\0") - return None diff --git a/pkg_resources/_vendor/packaging/_manylinux.py b/pkg_resources/_vendor/packaging/_manylinux.py deleted file mode 100644 index 08f651fbd8..0000000000 --- a/pkg_resources/_vendor/packaging/_manylinux.py +++ /dev/null @@ -1,262 +0,0 @@ -from __future__ import annotations - -import collections -import contextlib -import functools -import os -import re -import sys -import warnings -from typing import Generator, Iterator, NamedTuple, Sequence - -from ._elffile import EIClass, EIData, ELFFile, EMachine - -EF_ARM_ABIMASK = 0xFF000000 -EF_ARM_ABI_VER5 = 0x05000000 -EF_ARM_ABI_FLOAT_HARD = 0x00000400 - - -# `os.PathLike` not a generic type until Python 3.9, so sticking with `str` -# as the type for `path` until then. -@contextlib.contextmanager -def _parse_elf(path: str) -> Generator[ELFFile | None, None, None]: - try: - with open(path, "rb") as f: - yield ELFFile(f) - except (OSError, TypeError, ValueError): - yield None - - -def _is_linux_armhf(executable: str) -> bool: - # hard-float ABI can be detected from the ELF header of the running - # process - # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf - with _parse_elf(executable) as f: - return ( - f is not None - and f.capacity == EIClass.C32 - and f.encoding == EIData.Lsb - and f.machine == EMachine.Arm - and f.flags & EF_ARM_ABIMASK == EF_ARM_ABI_VER5 - and f.flags & EF_ARM_ABI_FLOAT_HARD == EF_ARM_ABI_FLOAT_HARD - ) - - -def _is_linux_i686(executable: str) -> bool: - with _parse_elf(executable) as f: - return ( - f is not None - and f.capacity == EIClass.C32 - and f.encoding == EIData.Lsb - and f.machine == EMachine.I386 - ) - - -def _have_compatible_abi(executable: str, archs: Sequence[str]) -> bool: - if "armv7l" in archs: - return _is_linux_armhf(executable) - if "i686" in archs: - return _is_linux_i686(executable) - allowed_archs = { - "x86_64", - "aarch64", - "ppc64", - "ppc64le", - "s390x", - "loongarch64", - "riscv64", - } - return any(arch in allowed_archs for arch in archs) - - -# If glibc ever changes its major version, we need to know what the last -# minor version was, so we can build the complete list of all versions. -# For now, guess what the highest minor version might be, assume it will -# be 50 for testing. Once this actually happens, update the dictionary -# with the actual value. -_LAST_GLIBC_MINOR: dict[int, int] = collections.defaultdict(lambda: 50) - - -class _GLibCVersion(NamedTuple): - major: int - minor: int - - -def _glibc_version_string_confstr() -> str | None: - """ - Primary implementation of glibc_version_string using os.confstr. - """ - # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely - # to be broken or missing. This strategy is used in the standard library - # platform module. - # https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183 - try: - # Should be a string like "glibc 2.17". - version_string: str | None = os.confstr("CS_GNU_LIBC_VERSION") - assert version_string is not None - _, version = version_string.rsplit() - except (AssertionError, AttributeError, OSError, ValueError): - # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... - return None - return version - - -def _glibc_version_string_ctypes() -> str | None: - """ - Fallback implementation of glibc_version_string using ctypes. - """ - try: - import ctypes - except ImportError: - return None - - # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen - # manpage says, "If filename is NULL, then the returned handle is for the - # main program". This way we can let the linker do the work to figure out - # which libc our process is actually using. - # - # We must also handle the special case where the executable is not a - # dynamically linked executable. This can occur when using musl libc, - # for example. In this situation, dlopen() will error, leading to an - # OSError. Interestingly, at least in the case of musl, there is no - # errno set on the OSError. The single string argument used to construct - # OSError comes from libc itself and is therefore not portable to - # hard code here. In any case, failure to call dlopen() means we - # can proceed, so we bail on our attempt. - try: - process_namespace = ctypes.CDLL(None) - except OSError: - return None - - try: - gnu_get_libc_version = process_namespace.gnu_get_libc_version - except AttributeError: - # Symbol doesn't exist -> therefore, we are not linked to - # glibc. - return None - - # Call gnu_get_libc_version, which returns a string like "2.5" - gnu_get_libc_version.restype = ctypes.c_char_p - version_str: str = gnu_get_libc_version() - # py2 / py3 compatibility: - if not isinstance(version_str, str): - version_str = version_str.decode("ascii") - - return version_str - - -def _glibc_version_string() -> str | None: - """Returns glibc version string, or None if not using glibc.""" - return _glibc_version_string_confstr() or _glibc_version_string_ctypes() - - -def _parse_glibc_version(version_str: str) -> tuple[int, int]: - """Parse glibc version. - - We use a regexp instead of str.split because we want to discard any - random junk that might come after the minor version -- this might happen - in patched/forked versions of glibc (e.g. Linaro's version of glibc - uses version strings like "2.20-2014.11"). See gh-3588. - """ - m = re.match(r"(?P[0-9]+)\.(?P[0-9]+)", version_str) - if not m: - warnings.warn( - f"Expected glibc version with 2 components major.minor," - f" got: {version_str}", - RuntimeWarning, - ) - return -1, -1 - return int(m.group("major")), int(m.group("minor")) - - -@functools.lru_cache -def _get_glibc_version() -> tuple[int, int]: - version_str = _glibc_version_string() - if version_str is None: - return (-1, -1) - return _parse_glibc_version(version_str) - - -# From PEP 513, PEP 600 -def _is_compatible(arch: str, version: _GLibCVersion) -> bool: - sys_glibc = _get_glibc_version() - if sys_glibc < version: - return False - # Check for presence of _manylinux module. - try: - import _manylinux - except ImportError: - return True - if hasattr(_manylinux, "manylinux_compatible"): - result = _manylinux.manylinux_compatible(version[0], version[1], arch) - if result is not None: - return bool(result) - return True - if version == _GLibCVersion(2, 5): - if hasattr(_manylinux, "manylinux1_compatible"): - return bool(_manylinux.manylinux1_compatible) - if version == _GLibCVersion(2, 12): - if hasattr(_manylinux, "manylinux2010_compatible"): - return bool(_manylinux.manylinux2010_compatible) - if version == _GLibCVersion(2, 17): - if hasattr(_manylinux, "manylinux2014_compatible"): - return bool(_manylinux.manylinux2014_compatible) - return True - - -_LEGACY_MANYLINUX_MAP = { - # CentOS 7 w/ glibc 2.17 (PEP 599) - (2, 17): "manylinux2014", - # CentOS 6 w/ glibc 2.12 (PEP 571) - (2, 12): "manylinux2010", - # CentOS 5 w/ glibc 2.5 (PEP 513) - (2, 5): "manylinux1", -} - - -def platform_tags(archs: Sequence[str]) -> Iterator[str]: - """Generate manylinux tags compatible to the current platform. - - :param archs: Sequence of compatible architectures. - The first one shall be the closest to the actual architecture and be the part of - platform tag after the ``linux_`` prefix, e.g. ``x86_64``. - The ``linux_`` prefix is assumed as a prerequisite for the current platform to - be manylinux-compatible. - - :returns: An iterator of compatible manylinux tags. - """ - if not _have_compatible_abi(sys.executable, archs): - return - # Oldest glibc to be supported regardless of architecture is (2, 17). - too_old_glibc2 = _GLibCVersion(2, 16) - if set(archs) & {"x86_64", "i686"}: - # On x86/i686 also oldest glibc to be supported is (2, 5). - too_old_glibc2 = _GLibCVersion(2, 4) - current_glibc = _GLibCVersion(*_get_glibc_version()) - glibc_max_list = [current_glibc] - # We can assume compatibility across glibc major versions. - # https://sourceware.org/bugzilla/show_bug.cgi?id=24636 - # - # Build a list of maximum glibc versions so that we can - # output the canonical list of all glibc from current_glibc - # down to too_old_glibc2, including all intermediary versions. - for glibc_major in range(current_glibc.major - 1, 1, -1): - glibc_minor = _LAST_GLIBC_MINOR[glibc_major] - glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor)) - for arch in archs: - for glibc_max in glibc_max_list: - if glibc_max.major == too_old_glibc2.major: - min_minor = too_old_glibc2.minor - else: - # For other glibc major versions oldest supported is (x, 0). - min_minor = -1 - for glibc_minor in range(glibc_max.minor, min_minor, -1): - glibc_version = _GLibCVersion(glibc_max.major, glibc_minor) - tag = "manylinux_{}_{}".format(*glibc_version) - if _is_compatible(arch, glibc_version): - yield f"{tag}_{arch}" - # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags. - if glibc_version in _LEGACY_MANYLINUX_MAP: - legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version] - if _is_compatible(arch, glibc_version): - yield f"{legacy_tag}_{arch}" diff --git a/pkg_resources/_vendor/packaging/_musllinux.py b/pkg_resources/_vendor/packaging/_musllinux.py deleted file mode 100644 index d2bf30b563..0000000000 --- a/pkg_resources/_vendor/packaging/_musllinux.py +++ /dev/null @@ -1,85 +0,0 @@ -"""PEP 656 support. - -This module implements logic to detect if the currently running Python is -linked against musl, and what musl version is used. -""" - -from __future__ import annotations - -import functools -import re -import subprocess -import sys -from typing import Iterator, NamedTuple, Sequence - -from ._elffile import ELFFile - - -class _MuslVersion(NamedTuple): - major: int - minor: int - - -def _parse_musl_version(output: str) -> _MuslVersion | None: - lines = [n for n in (n.strip() for n in output.splitlines()) if n] - if len(lines) < 2 or lines[0][:4] != "musl": - return None - m = re.match(r"Version (\d+)\.(\d+)", lines[1]) - if not m: - return None - return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2))) - - -@functools.lru_cache -def _get_musl_version(executable: str) -> _MuslVersion | None: - """Detect currently-running musl runtime version. - - This is done by checking the specified executable's dynamic linking - information, and invoking the loader to parse its output for a version - string. If the loader is musl, the output would be something like:: - - musl libc (x86_64) - Version 1.2.2 - Dynamic Program Loader - """ - try: - with open(executable, "rb") as f: - ld = ELFFile(f).interpreter - except (OSError, TypeError, ValueError): - return None - if ld is None or "musl" not in ld: - return None - proc = subprocess.run([ld], stderr=subprocess.PIPE, text=True) - return _parse_musl_version(proc.stderr) - - -def platform_tags(archs: Sequence[str]) -> Iterator[str]: - """Generate musllinux tags compatible to the current platform. - - :param archs: Sequence of compatible architectures. - The first one shall be the closest to the actual architecture and be the part of - platform tag after the ``linux_`` prefix, e.g. ``x86_64``. - The ``linux_`` prefix is assumed as a prerequisite for the current platform to - be musllinux-compatible. - - :returns: An iterator of compatible musllinux tags. - """ - sys_musl = _get_musl_version(sys.executable) - if sys_musl is None: # Python not dynamically linked against musl. - return - for arch in archs: - for minor in range(sys_musl.minor, -1, -1): - yield f"musllinux_{sys_musl.major}_{minor}_{arch}" - - -if __name__ == "__main__": # pragma: no cover - import sysconfig - - plat = sysconfig.get_platform() - assert plat.startswith("linux-"), "not linux" - - print("plat:", plat) - print("musl:", _get_musl_version(sys.executable)) - print("tags:", end=" ") - for t in platform_tags(re.sub(r"[.-]", "_", plat.split("-", 1)[-1])): - print(t, end="\n ") diff --git a/pkg_resources/_vendor/packaging/_parser.py b/pkg_resources/_vendor/packaging/_parser.py deleted file mode 100644 index c1238c06ea..0000000000 --- a/pkg_resources/_vendor/packaging/_parser.py +++ /dev/null @@ -1,354 +0,0 @@ -"""Handwritten parser of dependency specifiers. - -The docstring for each __parse_* function contains EBNF-inspired grammar representing -the implementation. -""" - -from __future__ import annotations - -import ast -from typing import NamedTuple, Sequence, Tuple, Union - -from ._tokenizer import DEFAULT_RULES, Tokenizer - - -class Node: - def __init__(self, value: str) -> None: - self.value = value - - def __str__(self) -> str: - return self.value - - def __repr__(self) -> str: - return f"<{self.__class__.__name__}('{self}')>" - - def serialize(self) -> str: - raise NotImplementedError - - -class Variable(Node): - def serialize(self) -> str: - return str(self) - - -class Value(Node): - def serialize(self) -> str: - return f'"{self}"' - - -class Op(Node): - def serialize(self) -> str: - return str(self) - - -MarkerVar = Union[Variable, Value] -MarkerItem = Tuple[MarkerVar, Op, MarkerVar] -MarkerAtom = Union[MarkerItem, Sequence["MarkerAtom"]] -MarkerList = Sequence[Union["MarkerList", MarkerAtom, str]] - - -class ParsedRequirement(NamedTuple): - name: str - url: str - extras: list[str] - specifier: str - marker: MarkerList | None - - -# -------------------------------------------------------------------------------------- -# Recursive descent parser for dependency specifier -# -------------------------------------------------------------------------------------- -def parse_requirement(source: str) -> ParsedRequirement: - return _parse_requirement(Tokenizer(source, rules=DEFAULT_RULES)) - - -def _parse_requirement(tokenizer: Tokenizer) -> ParsedRequirement: - """ - requirement = WS? IDENTIFIER WS? extras WS? requirement_details - """ - tokenizer.consume("WS") - - name_token = tokenizer.expect( - "IDENTIFIER", expected="package name at the start of dependency specifier" - ) - name = name_token.text - tokenizer.consume("WS") - - extras = _parse_extras(tokenizer) - tokenizer.consume("WS") - - url, specifier, marker = _parse_requirement_details(tokenizer) - tokenizer.expect("END", expected="end of dependency specifier") - - return ParsedRequirement(name, url, extras, specifier, marker) - - -def _parse_requirement_details( - tokenizer: Tokenizer, -) -> tuple[str, str, MarkerList | None]: - """ - requirement_details = AT URL (WS requirement_marker?)? - | specifier WS? (requirement_marker)? - """ - - specifier = "" - url = "" - marker = None - - if tokenizer.check("AT"): - tokenizer.read() - tokenizer.consume("WS") - - url_start = tokenizer.position - url = tokenizer.expect("URL", expected="URL after @").text - if tokenizer.check("END", peek=True): - return (url, specifier, marker) - - tokenizer.expect("WS", expected="whitespace after URL") - - # The input might end after whitespace. - if tokenizer.check("END", peek=True): - return (url, specifier, marker) - - marker = _parse_requirement_marker( - tokenizer, span_start=url_start, after="URL and whitespace" - ) - else: - specifier_start = tokenizer.position - specifier = _parse_specifier(tokenizer) - tokenizer.consume("WS") - - if tokenizer.check("END", peek=True): - return (url, specifier, marker) - - marker = _parse_requirement_marker( - tokenizer, - span_start=specifier_start, - after=( - "version specifier" - if specifier - else "name and no valid version specifier" - ), - ) - - return (url, specifier, marker) - - -def _parse_requirement_marker( - tokenizer: Tokenizer, *, span_start: int, after: str -) -> MarkerList: - """ - requirement_marker = SEMICOLON marker WS? - """ - - if not tokenizer.check("SEMICOLON"): - tokenizer.raise_syntax_error( - f"Expected end or semicolon (after {after})", - span_start=span_start, - ) - tokenizer.read() - - marker = _parse_marker(tokenizer) - tokenizer.consume("WS") - - return marker - - -def _parse_extras(tokenizer: Tokenizer) -> list[str]: - """ - extras = (LEFT_BRACKET wsp* extras_list? wsp* RIGHT_BRACKET)? - """ - if not tokenizer.check("LEFT_BRACKET", peek=True): - return [] - - with tokenizer.enclosing_tokens( - "LEFT_BRACKET", - "RIGHT_BRACKET", - around="extras", - ): - tokenizer.consume("WS") - extras = _parse_extras_list(tokenizer) - tokenizer.consume("WS") - - return extras - - -def _parse_extras_list(tokenizer: Tokenizer) -> list[str]: - """ - extras_list = identifier (wsp* ',' wsp* identifier)* - """ - extras: list[str] = [] - - if not tokenizer.check("IDENTIFIER"): - return extras - - extras.append(tokenizer.read().text) - - while True: - tokenizer.consume("WS") - if tokenizer.check("IDENTIFIER", peek=True): - tokenizer.raise_syntax_error("Expected comma between extra names") - elif not tokenizer.check("COMMA"): - break - - tokenizer.read() - tokenizer.consume("WS") - - extra_token = tokenizer.expect("IDENTIFIER", expected="extra name after comma") - extras.append(extra_token.text) - - return extras - - -def _parse_specifier(tokenizer: Tokenizer) -> str: - """ - specifier = LEFT_PARENTHESIS WS? version_many WS? RIGHT_PARENTHESIS - | WS? version_many WS? - """ - with tokenizer.enclosing_tokens( - "LEFT_PARENTHESIS", - "RIGHT_PARENTHESIS", - around="version specifier", - ): - tokenizer.consume("WS") - parsed_specifiers = _parse_version_many(tokenizer) - tokenizer.consume("WS") - - return parsed_specifiers - - -def _parse_version_many(tokenizer: Tokenizer) -> str: - """ - version_many = (SPECIFIER (WS? COMMA WS? SPECIFIER)*)? - """ - parsed_specifiers = "" - while tokenizer.check("SPECIFIER"): - span_start = tokenizer.position - parsed_specifiers += tokenizer.read().text - if tokenizer.check("VERSION_PREFIX_TRAIL", peek=True): - tokenizer.raise_syntax_error( - ".* suffix can only be used with `==` or `!=` operators", - span_start=span_start, - span_end=tokenizer.position + 1, - ) - if tokenizer.check("VERSION_LOCAL_LABEL_TRAIL", peek=True): - tokenizer.raise_syntax_error( - "Local version label can only be used with `==` or `!=` operators", - span_start=span_start, - span_end=tokenizer.position, - ) - tokenizer.consume("WS") - if not tokenizer.check("COMMA"): - break - parsed_specifiers += tokenizer.read().text - tokenizer.consume("WS") - - return parsed_specifiers - - -# -------------------------------------------------------------------------------------- -# Recursive descent parser for marker expression -# -------------------------------------------------------------------------------------- -def parse_marker(source: str) -> MarkerList: - return _parse_full_marker(Tokenizer(source, rules=DEFAULT_RULES)) - - -def _parse_full_marker(tokenizer: Tokenizer) -> MarkerList: - retval = _parse_marker(tokenizer) - tokenizer.expect("END", expected="end of marker expression") - return retval - - -def _parse_marker(tokenizer: Tokenizer) -> MarkerList: - """ - marker = marker_atom (BOOLOP marker_atom)+ - """ - expression = [_parse_marker_atom(tokenizer)] - while tokenizer.check("BOOLOP"): - token = tokenizer.read() - expr_right = _parse_marker_atom(tokenizer) - expression.extend((token.text, expr_right)) - return expression - - -def _parse_marker_atom(tokenizer: Tokenizer) -> MarkerAtom: - """ - marker_atom = WS? LEFT_PARENTHESIS WS? marker WS? RIGHT_PARENTHESIS WS? - | WS? marker_item WS? - """ - - tokenizer.consume("WS") - if tokenizer.check("LEFT_PARENTHESIS", peek=True): - with tokenizer.enclosing_tokens( - "LEFT_PARENTHESIS", - "RIGHT_PARENTHESIS", - around="marker expression", - ): - tokenizer.consume("WS") - marker: MarkerAtom = _parse_marker(tokenizer) - tokenizer.consume("WS") - else: - marker = _parse_marker_item(tokenizer) - tokenizer.consume("WS") - return marker - - -def _parse_marker_item(tokenizer: Tokenizer) -> MarkerItem: - """ - marker_item = WS? marker_var WS? marker_op WS? marker_var WS? - """ - tokenizer.consume("WS") - marker_var_left = _parse_marker_var(tokenizer) - tokenizer.consume("WS") - marker_op = _parse_marker_op(tokenizer) - tokenizer.consume("WS") - marker_var_right = _parse_marker_var(tokenizer) - tokenizer.consume("WS") - return (marker_var_left, marker_op, marker_var_right) - - -def _parse_marker_var(tokenizer: Tokenizer) -> MarkerVar: - """ - marker_var = VARIABLE | QUOTED_STRING - """ - if tokenizer.check("VARIABLE"): - return process_env_var(tokenizer.read().text.replace(".", "_")) - elif tokenizer.check("QUOTED_STRING"): - return process_python_str(tokenizer.read().text) - else: - tokenizer.raise_syntax_error( - message="Expected a marker variable or quoted string" - ) - - -def process_env_var(env_var: str) -> Variable: - if env_var in ("platform_python_implementation", "python_implementation"): - return Variable("platform_python_implementation") - else: - return Variable(env_var) - - -def process_python_str(python_str: str) -> Value: - value = ast.literal_eval(python_str) - return Value(str(value)) - - -def _parse_marker_op(tokenizer: Tokenizer) -> Op: - """ - marker_op = IN | NOT IN | OP - """ - if tokenizer.check("IN"): - tokenizer.read() - return Op("in") - elif tokenizer.check("NOT"): - tokenizer.read() - tokenizer.expect("WS", expected="whitespace after 'not'") - tokenizer.expect("IN", expected="'in' after 'not'") - return Op("not in") - elif tokenizer.check("OP"): - return Op(tokenizer.read().text) - else: - return tokenizer.raise_syntax_error( - "Expected marker operator, one of " - "<=, <, !=, ==, >=, >, ~=, ===, in, not in" - ) diff --git a/pkg_resources/_vendor/packaging/_structures.py b/pkg_resources/_vendor/packaging/_structures.py deleted file mode 100644 index 90a6465f96..0000000000 --- a/pkg_resources/_vendor/packaging/_structures.py +++ /dev/null @@ -1,61 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - - -class InfinityType: - def __repr__(self) -> str: - return "Infinity" - - def __hash__(self) -> int: - return hash(repr(self)) - - def __lt__(self, other: object) -> bool: - return False - - def __le__(self, other: object) -> bool: - return False - - def __eq__(self, other: object) -> bool: - return isinstance(other, self.__class__) - - def __gt__(self, other: object) -> bool: - return True - - def __ge__(self, other: object) -> bool: - return True - - def __neg__(self: object) -> "NegativeInfinityType": - return NegativeInfinity - - -Infinity = InfinityType() - - -class NegativeInfinityType: - def __repr__(self) -> str: - return "-Infinity" - - def __hash__(self) -> int: - return hash(repr(self)) - - def __lt__(self, other: object) -> bool: - return True - - def __le__(self, other: object) -> bool: - return True - - def __eq__(self, other: object) -> bool: - return isinstance(other, self.__class__) - - def __gt__(self, other: object) -> bool: - return False - - def __ge__(self, other: object) -> bool: - return False - - def __neg__(self: object) -> InfinityType: - return Infinity - - -NegativeInfinity = NegativeInfinityType() diff --git a/pkg_resources/_vendor/packaging/_tokenizer.py b/pkg_resources/_vendor/packaging/_tokenizer.py deleted file mode 100644 index 89d041605c..0000000000 --- a/pkg_resources/_vendor/packaging/_tokenizer.py +++ /dev/null @@ -1,194 +0,0 @@ -from __future__ import annotations - -import contextlib -import re -from dataclasses import dataclass -from typing import Iterator, NoReturn - -from .specifiers import Specifier - - -@dataclass -class Token: - name: str - text: str - position: int - - -class ParserSyntaxError(Exception): - """The provided source text could not be parsed correctly.""" - - def __init__( - self, - message: str, - *, - source: str, - span: tuple[int, int], - ) -> None: - self.span = span - self.message = message - self.source = source - - super().__init__() - - def __str__(self) -> str: - marker = " " * self.span[0] + "~" * (self.span[1] - self.span[0]) + "^" - return "\n ".join([self.message, self.source, marker]) - - -DEFAULT_RULES: dict[str, str | re.Pattern[str]] = { - "LEFT_PARENTHESIS": r"\(", - "RIGHT_PARENTHESIS": r"\)", - "LEFT_BRACKET": r"\[", - "RIGHT_BRACKET": r"\]", - "SEMICOLON": r";", - "COMMA": r",", - "QUOTED_STRING": re.compile( - r""" - ( - ('[^']*') - | - ("[^"]*") - ) - """, - re.VERBOSE, - ), - "OP": r"(===|==|~=|!=|<=|>=|<|>)", - "BOOLOP": r"\b(or|and)\b", - "IN": r"\bin\b", - "NOT": r"\bnot\b", - "VARIABLE": re.compile( - r""" - \b( - python_version - |python_full_version - |os[._]name - |sys[._]platform - |platform_(release|system) - |platform[._](version|machine|python_implementation) - |python_implementation - |implementation_(name|version) - |extra - )\b - """, - re.VERBOSE, - ), - "SPECIFIER": re.compile( - Specifier._operator_regex_str + Specifier._version_regex_str, - re.VERBOSE | re.IGNORECASE, - ), - "AT": r"\@", - "URL": r"[^ \t]+", - "IDENTIFIER": r"\b[a-zA-Z0-9][a-zA-Z0-9._-]*\b", - "VERSION_PREFIX_TRAIL": r"\.\*", - "VERSION_LOCAL_LABEL_TRAIL": r"\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*", - "WS": r"[ \t]+", - "END": r"$", -} - - -class Tokenizer: - """Context-sensitive token parsing. - - Provides methods to examine the input stream to check whether the next token - matches. - """ - - def __init__( - self, - source: str, - *, - rules: dict[str, str | re.Pattern[str]], - ) -> None: - self.source = source - self.rules: dict[str, re.Pattern[str]] = { - name: re.compile(pattern) for name, pattern in rules.items() - } - self.next_token: Token | None = None - self.position = 0 - - def consume(self, name: str) -> None: - """Move beyond provided token name, if at current position.""" - if self.check(name): - self.read() - - def check(self, name: str, *, peek: bool = False) -> bool: - """Check whether the next token has the provided name. - - By default, if the check succeeds, the token *must* be read before - another check. If `peek` is set to `True`, the token is not loaded and - would need to be checked again. - """ - assert ( - self.next_token is None - ), f"Cannot check for {name!r}, already have {self.next_token!r}" - assert name in self.rules, f"Unknown token name: {name!r}" - - expression = self.rules[name] - - match = expression.match(self.source, self.position) - if match is None: - return False - if not peek: - self.next_token = Token(name, match[0], self.position) - return True - - def expect(self, name: str, *, expected: str) -> Token: - """Expect a certain token name next, failing with a syntax error otherwise. - - The token is *not* read. - """ - if not self.check(name): - raise self.raise_syntax_error(f"Expected {expected}") - return self.read() - - def read(self) -> Token: - """Consume the next token and return it.""" - token = self.next_token - assert token is not None - - self.position += len(token.text) - self.next_token = None - - return token - - def raise_syntax_error( - self, - message: str, - *, - span_start: int | None = None, - span_end: int | None = None, - ) -> NoReturn: - """Raise ParserSyntaxError at the given position.""" - span = ( - self.position if span_start is None else span_start, - self.position if span_end is None else span_end, - ) - raise ParserSyntaxError( - message, - source=self.source, - span=span, - ) - - @contextlib.contextmanager - def enclosing_tokens( - self, open_token: str, close_token: str, *, around: str - ) -> Iterator[None]: - if self.check(open_token): - open_position = self.position - self.read() - else: - open_position = None - - yield - - if open_position is None: - return - - if not self.check(close_token): - self.raise_syntax_error( - f"Expected matching {close_token} for {open_token}, after {around}", - span_start=open_position, - ) - - self.read() diff --git a/pkg_resources/_vendor/packaging/markers.py b/pkg_resources/_vendor/packaging/markers.py deleted file mode 100644 index 7ac7bb69a5..0000000000 --- a/pkg_resources/_vendor/packaging/markers.py +++ /dev/null @@ -1,325 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import operator -import os -import platform -import sys -from typing import Any, Callable, TypedDict, cast - -from ._parser import MarkerAtom, MarkerList, Op, Value, Variable -from ._parser import parse_marker as _parse_marker -from ._tokenizer import ParserSyntaxError -from .specifiers import InvalidSpecifier, Specifier -from .utils import canonicalize_name - -__all__ = [ - "InvalidMarker", - "UndefinedComparison", - "UndefinedEnvironmentName", - "Marker", - "default_environment", -] - -Operator = Callable[[str, str], bool] - - -class InvalidMarker(ValueError): - """ - An invalid marker was found, users should refer to PEP 508. - """ - - -class UndefinedComparison(ValueError): - """ - An invalid operation was attempted on a value that doesn't support it. - """ - - -class UndefinedEnvironmentName(ValueError): - """ - A name was attempted to be used that does not exist inside of the - environment. - """ - - -class Environment(TypedDict): - implementation_name: str - """The implementation's identifier, e.g. ``'cpython'``.""" - - implementation_version: str - """ - The implementation's version, e.g. ``'3.13.0a2'`` for CPython 3.13.0a2, or - ``'7.3.13'`` for PyPy3.10 v7.3.13. - """ - - os_name: str - """ - The value of :py:data:`os.name`. The name of the operating system dependent module - imported, e.g. ``'posix'``. - """ - - platform_machine: str - """ - Returns the machine type, e.g. ``'i386'``. - - An empty string if the value cannot be determined. - """ - - platform_release: str - """ - The system's release, e.g. ``'2.2.0'`` or ``'NT'``. - - An empty string if the value cannot be determined. - """ - - platform_system: str - """ - The system/OS name, e.g. ``'Linux'``, ``'Windows'`` or ``'Java'``. - - An empty string if the value cannot be determined. - """ - - platform_version: str - """ - The system's release version, e.g. ``'#3 on degas'``. - - An empty string if the value cannot be determined. - """ - - python_full_version: str - """ - The Python version as string ``'major.minor.patchlevel'``. - - Note that unlike the Python :py:data:`sys.version`, this value will always include - the patchlevel (it defaults to 0). - """ - - platform_python_implementation: str - """ - A string identifying the Python implementation, e.g. ``'CPython'``. - """ - - python_version: str - """The Python version as string ``'major.minor'``.""" - - sys_platform: str - """ - This string contains a platform identifier that can be used to append - platform-specific components to :py:data:`sys.path`, for instance. - - For Unix systems, except on Linux and AIX, this is the lowercased OS name as - returned by ``uname -s`` with the first part of the version as returned by - ``uname -r`` appended, e.g. ``'sunos5'`` or ``'freebsd8'``, at the time when Python - was built. - """ - - -def _normalize_extra_values(results: Any) -> Any: - """ - Normalize extra values. - """ - if isinstance(results[0], tuple): - lhs, op, rhs = results[0] - if isinstance(lhs, Variable) and lhs.value == "extra": - normalized_extra = canonicalize_name(rhs.value) - rhs = Value(normalized_extra) - elif isinstance(rhs, Variable) and rhs.value == "extra": - normalized_extra = canonicalize_name(lhs.value) - lhs = Value(normalized_extra) - results[0] = lhs, op, rhs - return results - - -def _format_marker( - marker: list[str] | MarkerAtom | str, first: bool | None = True -) -> str: - assert isinstance(marker, (list, tuple, str)) - - # Sometimes we have a structure like [[...]] which is a single item list - # where the single item is itself it's own list. In that case we want skip - # the rest of this function so that we don't get extraneous () on the - # outside. - if ( - isinstance(marker, list) - and len(marker) == 1 - and isinstance(marker[0], (list, tuple)) - ): - return _format_marker(marker[0]) - - if isinstance(marker, list): - inner = (_format_marker(m, first=False) for m in marker) - if first: - return " ".join(inner) - else: - return "(" + " ".join(inner) + ")" - elif isinstance(marker, tuple): - return " ".join([m.serialize() for m in marker]) - else: - return marker - - -_operators: dict[str, Operator] = { - "in": lambda lhs, rhs: lhs in rhs, - "not in": lambda lhs, rhs: lhs not in rhs, - "<": operator.lt, - "<=": operator.le, - "==": operator.eq, - "!=": operator.ne, - ">=": operator.ge, - ">": operator.gt, -} - - -def _eval_op(lhs: str, op: Op, rhs: str) -> bool: - try: - spec = Specifier("".join([op.serialize(), rhs])) - except InvalidSpecifier: - pass - else: - return spec.contains(lhs, prereleases=True) - - oper: Operator | None = _operators.get(op.serialize()) - if oper is None: - raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.") - - return oper(lhs, rhs) - - -def _normalize(*values: str, key: str) -> tuple[str, ...]: - # PEP 685 – Comparison of extra names for optional distribution dependencies - # https://peps.python.org/pep-0685/ - # > When comparing extra names, tools MUST normalize the names being - # > compared using the semantics outlined in PEP 503 for names - if key == "extra": - return tuple(canonicalize_name(v) for v in values) - - # other environment markers don't have such standards - return values - - -def _evaluate_markers(markers: MarkerList, environment: dict[str, str]) -> bool: - groups: list[list[bool]] = [[]] - - for marker in markers: - assert isinstance(marker, (list, tuple, str)) - - if isinstance(marker, list): - groups[-1].append(_evaluate_markers(marker, environment)) - elif isinstance(marker, tuple): - lhs, op, rhs = marker - - if isinstance(lhs, Variable): - environment_key = lhs.value - lhs_value = environment[environment_key] - rhs_value = rhs.value - else: - lhs_value = lhs.value - environment_key = rhs.value - rhs_value = environment[environment_key] - - lhs_value, rhs_value = _normalize(lhs_value, rhs_value, key=environment_key) - groups[-1].append(_eval_op(lhs_value, op, rhs_value)) - else: - assert marker in ["and", "or"] - if marker == "or": - groups.append([]) - - return any(all(item) for item in groups) - - -def format_full_version(info: sys._version_info) -> str: - version = "{0.major}.{0.minor}.{0.micro}".format(info) - kind = info.releaselevel - if kind != "final": - version += kind[0] + str(info.serial) - return version - - -def default_environment() -> Environment: - iver = format_full_version(sys.implementation.version) - implementation_name = sys.implementation.name - return { - "implementation_name": implementation_name, - "implementation_version": iver, - "os_name": os.name, - "platform_machine": platform.machine(), - "platform_release": platform.release(), - "platform_system": platform.system(), - "platform_version": platform.version(), - "python_full_version": platform.python_version(), - "platform_python_implementation": platform.python_implementation(), - "python_version": ".".join(platform.python_version_tuple()[:2]), - "sys_platform": sys.platform, - } - - -class Marker: - def __init__(self, marker: str) -> None: - # Note: We create a Marker object without calling this constructor in - # packaging.requirements.Requirement. If any additional logic is - # added here, make sure to mirror/adapt Requirement. - try: - self._markers = _normalize_extra_values(_parse_marker(marker)) - # The attribute `_markers` can be described in terms of a recursive type: - # MarkerList = List[Union[Tuple[Node, ...], str, MarkerList]] - # - # For example, the following expression: - # python_version > "3.6" or (python_version == "3.6" and os_name == "unix") - # - # is parsed into: - # [ - # (, ')>, ), - # 'and', - # [ - # (, , ), - # 'or', - # (, , ) - # ] - # ] - except ParserSyntaxError as e: - raise InvalidMarker(str(e)) from e - - def __str__(self) -> str: - return _format_marker(self._markers) - - def __repr__(self) -> str: - return f"" - - def __hash__(self) -> int: - return hash((self.__class__.__name__, str(self))) - - def __eq__(self, other: Any) -> bool: - if not isinstance(other, Marker): - return NotImplemented - - return str(self) == str(other) - - def evaluate(self, environment: dict[str, str] | None = None) -> bool: - """Evaluate a marker. - - Return the boolean from evaluating the given marker against the - environment. environment is an optional argument to override all or - part of the determined environment. - - The environment is determined from the current Python process. - """ - current_environment = cast("dict[str, str]", default_environment()) - current_environment["extra"] = "" - # Work around platform.python_version() returning something that is not PEP 440 - # compliant for non-tagged Python builds. We preserve default_environment()'s - # behavior of returning platform.python_version() verbatim, and leave it to the - # caller to provide a syntactically valid version if they want to override it. - if current_environment["python_full_version"].endswith("+"): - current_environment["python_full_version"] += "local" - if environment is not None: - current_environment.update(environment) - # The API used to allow setting extra to None. We need to handle this - # case for backwards compatibility. - if current_environment["extra"] is None: - current_environment["extra"] = "" - - return _evaluate_markers(self._markers, current_environment) diff --git a/pkg_resources/_vendor/packaging/metadata.py b/pkg_resources/_vendor/packaging/metadata.py deleted file mode 100644 index eb8dc844d2..0000000000 --- a/pkg_resources/_vendor/packaging/metadata.py +++ /dev/null @@ -1,804 +0,0 @@ -from __future__ import annotations - -import email.feedparser -import email.header -import email.message -import email.parser -import email.policy -import typing -from typing import ( - Any, - Callable, - Generic, - Literal, - TypedDict, - cast, -) - -from . import requirements, specifiers, utils -from . import version as version_module - -T = typing.TypeVar("T") - - -try: - ExceptionGroup -except NameError: # pragma: no cover - - class ExceptionGroup(Exception): - """A minimal implementation of :external:exc:`ExceptionGroup` from Python 3.11. - - If :external:exc:`ExceptionGroup` is already defined by Python itself, - that version is used instead. - """ - - message: str - exceptions: list[Exception] - - def __init__(self, message: str, exceptions: list[Exception]) -> None: - self.message = message - self.exceptions = exceptions - - def __repr__(self) -> str: - return f"{self.__class__.__name__}({self.message!r}, {self.exceptions!r})" - -else: # pragma: no cover - ExceptionGroup = ExceptionGroup - - -class InvalidMetadata(ValueError): - """A metadata field contains invalid data.""" - - field: str - """The name of the field that contains invalid data.""" - - def __init__(self, field: str, message: str) -> None: - self.field = field - super().__init__(message) - - -# The RawMetadata class attempts to make as few assumptions about the underlying -# serialization formats as possible. The idea is that as long as a serialization -# formats offer some very basic primitives in *some* way then we can support -# serializing to and from that format. -class RawMetadata(TypedDict, total=False): - """A dictionary of raw core metadata. - - Each field in core metadata maps to a key of this dictionary (when data is - provided). The key is lower-case and underscores are used instead of dashes - compared to the equivalent core metadata field. Any core metadata field that - can be specified multiple times or can hold multiple values in a single - field have a key with a plural name. See :class:`Metadata` whose attributes - match the keys of this dictionary. - - Core metadata fields that can be specified multiple times are stored as a - list or dict depending on which is appropriate for the field. Any fields - which hold multiple values in a single field are stored as a list. - - """ - - # Metadata 1.0 - PEP 241 - metadata_version: str - name: str - version: str - platforms: list[str] - summary: str - description: str - keywords: list[str] - home_page: str - author: str - author_email: str - license: str - - # Metadata 1.1 - PEP 314 - supported_platforms: list[str] - download_url: str - classifiers: list[str] - requires: list[str] - provides: list[str] - obsoletes: list[str] - - # Metadata 1.2 - PEP 345 - maintainer: str - maintainer_email: str - requires_dist: list[str] - provides_dist: list[str] - obsoletes_dist: list[str] - requires_python: str - requires_external: list[str] - project_urls: dict[str, str] - - # Metadata 2.0 - # PEP 426 attempted to completely revamp the metadata format - # but got stuck without ever being able to build consensus on - # it and ultimately ended up withdrawn. - # - # However, a number of tools had started emitting METADATA with - # `2.0` Metadata-Version, so for historical reasons, this version - # was skipped. - - # Metadata 2.1 - PEP 566 - description_content_type: str - provides_extra: list[str] - - # Metadata 2.2 - PEP 643 - dynamic: list[str] - - # Metadata 2.3 - PEP 685 - # No new fields were added in PEP 685, just some edge case were - # tightened up to provide better interoptability. - - -_STRING_FIELDS = { - "author", - "author_email", - "description", - "description_content_type", - "download_url", - "home_page", - "license", - "maintainer", - "maintainer_email", - "metadata_version", - "name", - "requires_python", - "summary", - "version", -} - -_LIST_FIELDS = { - "classifiers", - "dynamic", - "obsoletes", - "obsoletes_dist", - "platforms", - "provides", - "provides_dist", - "provides_extra", - "requires", - "requires_dist", - "requires_external", - "supported_platforms", -} - -_DICT_FIELDS = { - "project_urls", -} - - -def _parse_keywords(data: str) -> list[str]: - """Split a string of comma-separate keyboards into a list of keywords.""" - return [k.strip() for k in data.split(",")] - - -def _parse_project_urls(data: list[str]) -> dict[str, str]: - """Parse a list of label/URL string pairings separated by a comma.""" - urls = {} - for pair in data: - # Our logic is slightly tricky here as we want to try and do - # *something* reasonable with malformed data. - # - # The main thing that we have to worry about, is data that does - # not have a ',' at all to split the label from the Value. There - # isn't a singular right answer here, and we will fail validation - # later on (if the caller is validating) so it doesn't *really* - # matter, but since the missing value has to be an empty str - # and our return value is dict[str, str], if we let the key - # be the missing value, then they'd have multiple '' values that - # overwrite each other in a accumulating dict. - # - # The other potentional issue is that it's possible to have the - # same label multiple times in the metadata, with no solid "right" - # answer with what to do in that case. As such, we'll do the only - # thing we can, which is treat the field as unparseable and add it - # to our list of unparsed fields. - parts = [p.strip() for p in pair.split(",", 1)] - parts.extend([""] * (max(0, 2 - len(parts)))) # Ensure 2 items - - # TODO: The spec doesn't say anything about if the keys should be - # considered case sensitive or not... logically they should - # be case-preserving and case-insensitive, but doing that - # would open up more cases where we might have duplicate - # entries. - label, url = parts - if label in urls: - # The label already exists in our set of urls, so this field - # is unparseable, and we can just add the whole thing to our - # unparseable data and stop processing it. - raise KeyError("duplicate labels in project urls") - urls[label] = url - - return urls - - -def _get_payload(msg: email.message.Message, source: bytes | str) -> str: - """Get the body of the message.""" - # If our source is a str, then our caller has managed encodings for us, - # and we don't need to deal with it. - if isinstance(source, str): - payload: str = msg.get_payload() - return payload - # If our source is a bytes, then we're managing the encoding and we need - # to deal with it. - else: - bpayload: bytes = msg.get_payload(decode=True) - try: - return bpayload.decode("utf8", "strict") - except UnicodeDecodeError: - raise ValueError("payload in an invalid encoding") - - -# The various parse_FORMAT functions here are intended to be as lenient as -# possible in their parsing, while still returning a correctly typed -# RawMetadata. -# -# To aid in this, we also generally want to do as little touching of the -# data as possible, except where there are possibly some historic holdovers -# that make valid data awkward to work with. -# -# While this is a lower level, intermediate format than our ``Metadata`` -# class, some light touch ups can make a massive difference in usability. - -# Map METADATA fields to RawMetadata. -_EMAIL_TO_RAW_MAPPING = { - "author": "author", - "author-email": "author_email", - "classifier": "classifiers", - "description": "description", - "description-content-type": "description_content_type", - "download-url": "download_url", - "dynamic": "dynamic", - "home-page": "home_page", - "keywords": "keywords", - "license": "license", - "maintainer": "maintainer", - "maintainer-email": "maintainer_email", - "metadata-version": "metadata_version", - "name": "name", - "obsoletes": "obsoletes", - "obsoletes-dist": "obsoletes_dist", - "platform": "platforms", - "project-url": "project_urls", - "provides": "provides", - "provides-dist": "provides_dist", - "provides-extra": "provides_extra", - "requires": "requires", - "requires-dist": "requires_dist", - "requires-external": "requires_external", - "requires-python": "requires_python", - "summary": "summary", - "supported-platform": "supported_platforms", - "version": "version", -} -_RAW_TO_EMAIL_MAPPING = {raw: email for email, raw in _EMAIL_TO_RAW_MAPPING.items()} - - -def parse_email(data: bytes | str) -> tuple[RawMetadata, dict[str, list[str]]]: - """Parse a distribution's metadata stored as email headers (e.g. from ``METADATA``). - - This function returns a two-item tuple of dicts. The first dict is of - recognized fields from the core metadata specification. Fields that can be - parsed and translated into Python's built-in types are converted - appropriately. All other fields are left as-is. Fields that are allowed to - appear multiple times are stored as lists. - - The second dict contains all other fields from the metadata. This includes - any unrecognized fields. It also includes any fields which are expected to - be parsed into a built-in type but were not formatted appropriately. Finally, - any fields that are expected to appear only once but are repeated are - included in this dict. - - """ - raw: dict[str, str | list[str] | dict[str, str]] = {} - unparsed: dict[str, list[str]] = {} - - if isinstance(data, str): - parsed = email.parser.Parser(policy=email.policy.compat32).parsestr(data) - else: - parsed = email.parser.BytesParser(policy=email.policy.compat32).parsebytes(data) - - # We have to wrap parsed.keys() in a set, because in the case of multiple - # values for a key (a list), the key will appear multiple times in the - # list of keys, but we're avoiding that by using get_all(). - for name in frozenset(parsed.keys()): - # Header names in RFC are case insensitive, so we'll normalize to all - # lower case to make comparisons easier. - name = name.lower() - - # We use get_all() here, even for fields that aren't multiple use, - # because otherwise someone could have e.g. two Name fields, and we - # would just silently ignore it rather than doing something about it. - headers = parsed.get_all(name) or [] - - # The way the email module works when parsing bytes is that it - # unconditionally decodes the bytes as ascii using the surrogateescape - # handler. When you pull that data back out (such as with get_all() ), - # it looks to see if the str has any surrogate escapes, and if it does - # it wraps it in a Header object instead of returning the string. - # - # As such, we'll look for those Header objects, and fix up the encoding. - value = [] - # Flag if we have run into any issues processing the headers, thus - # signalling that the data belongs in 'unparsed'. - valid_encoding = True - for h in headers: - # It's unclear if this can return more types than just a Header or - # a str, so we'll just assert here to make sure. - assert isinstance(h, (email.header.Header, str)) - - # If it's a header object, we need to do our little dance to get - # the real data out of it. In cases where there is invalid data - # we're going to end up with mojibake, but there's no obvious, good - # way around that without reimplementing parts of the Header object - # ourselves. - # - # That should be fine since, if mojibacked happens, this key is - # going into the unparsed dict anyways. - if isinstance(h, email.header.Header): - # The Header object stores it's data as chunks, and each chunk - # can be independently encoded, so we'll need to check each - # of them. - chunks: list[tuple[bytes, str | None]] = [] - for bin, encoding in email.header.decode_header(h): - try: - bin.decode("utf8", "strict") - except UnicodeDecodeError: - # Enable mojibake. - encoding = "latin1" - valid_encoding = False - else: - encoding = "utf8" - chunks.append((bin, encoding)) - - # Turn our chunks back into a Header object, then let that - # Header object do the right thing to turn them into a - # string for us. - value.append(str(email.header.make_header(chunks))) - # This is already a string, so just add it. - else: - value.append(h) - - # We've processed all of our values to get them into a list of str, - # but we may have mojibake data, in which case this is an unparsed - # field. - if not valid_encoding: - unparsed[name] = value - continue - - raw_name = _EMAIL_TO_RAW_MAPPING.get(name) - if raw_name is None: - # This is a bit of a weird situation, we've encountered a key that - # we don't know what it means, so we don't know whether it's meant - # to be a list or not. - # - # Since we can't really tell one way or another, we'll just leave it - # as a list, even though it may be a single item list, because that's - # what makes the most sense for email headers. - unparsed[name] = value - continue - - # If this is one of our string fields, then we'll check to see if our - # value is a list of a single item. If it is then we'll assume that - # it was emitted as a single string, and unwrap the str from inside - # the list. - # - # If it's any other kind of data, then we haven't the faintest clue - # what we should parse it as, and we have to just add it to our list - # of unparsed stuff. - if raw_name in _STRING_FIELDS and len(value) == 1: - raw[raw_name] = value[0] - # If this is one of our list of string fields, then we can just assign - # the value, since email *only* has strings, and our get_all() call - # above ensures that this is a list. - elif raw_name in _LIST_FIELDS: - raw[raw_name] = value - # Special Case: Keywords - # The keywords field is implemented in the metadata spec as a str, - # but it conceptually is a list of strings, and is serialized using - # ", ".join(keywords), so we'll do some light data massaging to turn - # this into what it logically is. - elif raw_name == "keywords" and len(value) == 1: - raw[raw_name] = _parse_keywords(value[0]) - # Special Case: Project-URL - # The project urls is implemented in the metadata spec as a list of - # specially-formatted strings that represent a key and a value, which - # is fundamentally a mapping, however the email format doesn't support - # mappings in a sane way, so it was crammed into a list of strings - # instead. - # - # We will do a little light data massaging to turn this into a map as - # it logically should be. - elif raw_name == "project_urls": - try: - raw[raw_name] = _parse_project_urls(value) - except KeyError: - unparsed[name] = value - # Nothing that we've done has managed to parse this, so it'll just - # throw it in our unparseable data and move on. - else: - unparsed[name] = value - - # We need to support getting the Description from the message payload in - # addition to getting it from the the headers. This does mean, though, there - # is the possibility of it being set both ways, in which case we put both - # in 'unparsed' since we don't know which is right. - try: - payload = _get_payload(parsed, data) - except ValueError: - unparsed.setdefault("description", []).append( - parsed.get_payload(decode=isinstance(data, bytes)) - ) - else: - if payload: - # Check to see if we've already got a description, if so then both - # it, and this body move to unparseable. - if "description" in raw: - description_header = cast(str, raw.pop("description")) - unparsed.setdefault("description", []).extend( - [description_header, payload] - ) - elif "description" in unparsed: - unparsed["description"].append(payload) - else: - raw["description"] = payload - - # We need to cast our `raw` to a metadata, because a TypedDict only support - # literal key names, but we're computing our key names on purpose, but the - # way this function is implemented, our `TypedDict` can only have valid key - # names. - return cast(RawMetadata, raw), unparsed - - -_NOT_FOUND = object() - - -# Keep the two values in sync. -_VALID_METADATA_VERSIONS = ["1.0", "1.1", "1.2", "2.1", "2.2", "2.3"] -_MetadataVersion = Literal["1.0", "1.1", "1.2", "2.1", "2.2", "2.3"] - -_REQUIRED_ATTRS = frozenset(["metadata_version", "name", "version"]) - - -class _Validator(Generic[T]): - """Validate a metadata field. - - All _process_*() methods correspond to a core metadata field. The method is - called with the field's raw value. If the raw value is valid it is returned - in its "enriched" form (e.g. ``version.Version`` for the ``Version`` field). - If the raw value is invalid, :exc:`InvalidMetadata` is raised (with a cause - as appropriate). - """ - - name: str - raw_name: str - added: _MetadataVersion - - def __init__( - self, - *, - added: _MetadataVersion = "1.0", - ) -> None: - self.added = added - - def __set_name__(self, _owner: Metadata, name: str) -> None: - self.name = name - self.raw_name = _RAW_TO_EMAIL_MAPPING[name] - - def __get__(self, instance: Metadata, _owner: type[Metadata]) -> T: - # With Python 3.8, the caching can be replaced with functools.cached_property(). - # No need to check the cache as attribute lookup will resolve into the - # instance's __dict__ before __get__ is called. - cache = instance.__dict__ - value = instance._raw.get(self.name) - - # To make the _process_* methods easier, we'll check if the value is None - # and if this field is NOT a required attribute, and if both of those - # things are true, we'll skip the the converter. This will mean that the - # converters never have to deal with the None union. - if self.name in _REQUIRED_ATTRS or value is not None: - try: - converter: Callable[[Any], T] = getattr(self, f"_process_{self.name}") - except AttributeError: - pass - else: - value = converter(value) - - cache[self.name] = value - try: - del instance._raw[self.name] # type: ignore[misc] - except KeyError: - pass - - return cast(T, value) - - def _invalid_metadata( - self, msg: str, cause: Exception | None = None - ) -> InvalidMetadata: - exc = InvalidMetadata( - self.raw_name, msg.format_map({"field": repr(self.raw_name)}) - ) - exc.__cause__ = cause - return exc - - def _process_metadata_version(self, value: str) -> _MetadataVersion: - # Implicitly makes Metadata-Version required. - if value not in _VALID_METADATA_VERSIONS: - raise self._invalid_metadata(f"{value!r} is not a valid metadata version") - return cast(_MetadataVersion, value) - - def _process_name(self, value: str) -> str: - if not value: - raise self._invalid_metadata("{field} is a required field") - # Validate the name as a side-effect. - try: - utils.canonicalize_name(value, validate=True) - except utils.InvalidName as exc: - raise self._invalid_metadata( - f"{value!r} is invalid for {{field}}", cause=exc - ) - else: - return value - - def _process_version(self, value: str) -> version_module.Version: - if not value: - raise self._invalid_metadata("{field} is a required field") - try: - return version_module.parse(value) - except version_module.InvalidVersion as exc: - raise self._invalid_metadata( - f"{value!r} is invalid for {{field}}", cause=exc - ) - - def _process_summary(self, value: str) -> str: - """Check the field contains no newlines.""" - if "\n" in value: - raise self._invalid_metadata("{field} must be a single line") - return value - - def _process_description_content_type(self, value: str) -> str: - content_types = {"text/plain", "text/x-rst", "text/markdown"} - message = email.message.EmailMessage() - message["content-type"] = value - - content_type, parameters = ( - # Defaults to `text/plain` if parsing failed. - message.get_content_type().lower(), - message["content-type"].params, - ) - # Check if content-type is valid or defaulted to `text/plain` and thus was - # not parseable. - if content_type not in content_types or content_type not in value.lower(): - raise self._invalid_metadata( - f"{{field}} must be one of {list(content_types)}, not {value!r}" - ) - - charset = parameters.get("charset", "UTF-8") - if charset != "UTF-8": - raise self._invalid_metadata( - f"{{field}} can only specify the UTF-8 charset, not {list(charset)}" - ) - - markdown_variants = {"GFM", "CommonMark"} - variant = parameters.get("variant", "GFM") # Use an acceptable default. - if content_type == "text/markdown" and variant not in markdown_variants: - raise self._invalid_metadata( - f"valid Markdown variants for {{field}} are {list(markdown_variants)}, " - f"not {variant!r}", - ) - return value - - def _process_dynamic(self, value: list[str]) -> list[str]: - for dynamic_field in map(str.lower, value): - if dynamic_field in {"name", "version", "metadata-version"}: - raise self._invalid_metadata( - f"{value!r} is not allowed as a dynamic field" - ) - elif dynamic_field not in _EMAIL_TO_RAW_MAPPING: - raise self._invalid_metadata(f"{value!r} is not a valid dynamic field") - return list(map(str.lower, value)) - - def _process_provides_extra( - self, - value: list[str], - ) -> list[utils.NormalizedName]: - normalized_names = [] - try: - for name in value: - normalized_names.append(utils.canonicalize_name(name, validate=True)) - except utils.InvalidName as exc: - raise self._invalid_metadata( - f"{name!r} is invalid for {{field}}", cause=exc - ) - else: - return normalized_names - - def _process_requires_python(self, value: str) -> specifiers.SpecifierSet: - try: - return specifiers.SpecifierSet(value) - except specifiers.InvalidSpecifier as exc: - raise self._invalid_metadata( - f"{value!r} is invalid for {{field}}", cause=exc - ) - - def _process_requires_dist( - self, - value: list[str], - ) -> list[requirements.Requirement]: - reqs = [] - try: - for req in value: - reqs.append(requirements.Requirement(req)) - except requirements.InvalidRequirement as exc: - raise self._invalid_metadata(f"{req!r} is invalid for {{field}}", cause=exc) - else: - return reqs - - -class Metadata: - """Representation of distribution metadata. - - Compared to :class:`RawMetadata`, this class provides objects representing - metadata fields instead of only using built-in types. Any invalid metadata - will cause :exc:`InvalidMetadata` to be raised (with a - :py:attr:`~BaseException.__cause__` attribute as appropriate). - """ - - _raw: RawMetadata - - @classmethod - def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> Metadata: - """Create an instance from :class:`RawMetadata`. - - If *validate* is true, all metadata will be validated. All exceptions - related to validation will be gathered and raised as an :class:`ExceptionGroup`. - """ - ins = cls() - ins._raw = data.copy() # Mutations occur due to caching enriched values. - - if validate: - exceptions: list[Exception] = [] - try: - metadata_version = ins.metadata_version - metadata_age = _VALID_METADATA_VERSIONS.index(metadata_version) - except InvalidMetadata as metadata_version_exc: - exceptions.append(metadata_version_exc) - metadata_version = None - - # Make sure to check for the fields that are present, the required - # fields (so their absence can be reported). - fields_to_check = frozenset(ins._raw) | _REQUIRED_ATTRS - # Remove fields that have already been checked. - fields_to_check -= {"metadata_version"} - - for key in fields_to_check: - try: - if metadata_version: - # Can't use getattr() as that triggers descriptor protocol which - # will fail due to no value for the instance argument. - try: - field_metadata_version = cls.__dict__[key].added - except KeyError: - exc = InvalidMetadata(key, f"unrecognized field: {key!r}") - exceptions.append(exc) - continue - field_age = _VALID_METADATA_VERSIONS.index( - field_metadata_version - ) - if field_age > metadata_age: - field = _RAW_TO_EMAIL_MAPPING[key] - exc = InvalidMetadata( - field, - "{field} introduced in metadata version " - "{field_metadata_version}, not {metadata_version}", - ) - exceptions.append(exc) - continue - getattr(ins, key) - except InvalidMetadata as exc: - exceptions.append(exc) - - if exceptions: - raise ExceptionGroup("invalid metadata", exceptions) - - return ins - - @classmethod - def from_email(cls, data: bytes | str, *, validate: bool = True) -> Metadata: - """Parse metadata from email headers. - - If *validate* is true, the metadata will be validated. All exceptions - related to validation will be gathered and raised as an :class:`ExceptionGroup`. - """ - raw, unparsed = parse_email(data) - - if validate: - exceptions: list[Exception] = [] - for unparsed_key in unparsed: - if unparsed_key in _EMAIL_TO_RAW_MAPPING: - message = f"{unparsed_key!r} has invalid data" - else: - message = f"unrecognized field: {unparsed_key!r}" - exceptions.append(InvalidMetadata(unparsed_key, message)) - - if exceptions: - raise ExceptionGroup("unparsed", exceptions) - - try: - return cls.from_raw(raw, validate=validate) - except ExceptionGroup as exc_group: - raise ExceptionGroup( - "invalid or unparsed metadata", exc_group.exceptions - ) from None - - metadata_version: _Validator[_MetadataVersion] = _Validator() - """:external:ref:`core-metadata-metadata-version` - (required; validated to be a valid metadata version)""" - name: _Validator[str] = _Validator() - """:external:ref:`core-metadata-name` - (required; validated using :func:`~packaging.utils.canonicalize_name` and its - *validate* parameter)""" - version: _Validator[version_module.Version] = _Validator() - """:external:ref:`core-metadata-version` (required)""" - dynamic: _Validator[list[str] | None] = _Validator( - added="2.2", - ) - """:external:ref:`core-metadata-dynamic` - (validated against core metadata field names and lowercased)""" - platforms: _Validator[list[str] | None] = _Validator() - """:external:ref:`core-metadata-platform`""" - supported_platforms: _Validator[list[str] | None] = _Validator(added="1.1") - """:external:ref:`core-metadata-supported-platform`""" - summary: _Validator[str | None] = _Validator() - """:external:ref:`core-metadata-summary` (validated to contain no newlines)""" - description: _Validator[str | None] = _Validator() # TODO 2.1: can be in body - """:external:ref:`core-metadata-description`""" - description_content_type: _Validator[str | None] = _Validator(added="2.1") - """:external:ref:`core-metadata-description-content-type` (validated)""" - keywords: _Validator[list[str] | None] = _Validator() - """:external:ref:`core-metadata-keywords`""" - home_page: _Validator[str | None] = _Validator() - """:external:ref:`core-metadata-home-page`""" - download_url: _Validator[str | None] = _Validator(added="1.1") - """:external:ref:`core-metadata-download-url`""" - author: _Validator[str | None] = _Validator() - """:external:ref:`core-metadata-author`""" - author_email: _Validator[str | None] = _Validator() - """:external:ref:`core-metadata-author-email`""" - maintainer: _Validator[str | None] = _Validator(added="1.2") - """:external:ref:`core-metadata-maintainer`""" - maintainer_email: _Validator[str | None] = _Validator(added="1.2") - """:external:ref:`core-metadata-maintainer-email`""" - license: _Validator[str | None] = _Validator() - """:external:ref:`core-metadata-license`""" - classifiers: _Validator[list[str] | None] = _Validator(added="1.1") - """:external:ref:`core-metadata-classifier`""" - requires_dist: _Validator[list[requirements.Requirement] | None] = _Validator( - added="1.2" - ) - """:external:ref:`core-metadata-requires-dist`""" - requires_python: _Validator[specifiers.SpecifierSet | None] = _Validator( - added="1.2" - ) - """:external:ref:`core-metadata-requires-python`""" - # Because `Requires-External` allows for non-PEP 440 version specifiers, we - # don't do any processing on the values. - requires_external: _Validator[list[str] | None] = _Validator(added="1.2") - """:external:ref:`core-metadata-requires-external`""" - project_urls: _Validator[dict[str, str] | None] = _Validator(added="1.2") - """:external:ref:`core-metadata-project-url`""" - # PEP 685 lets us raise an error if an extra doesn't pass `Name` validation - # regardless of metadata version. - provides_extra: _Validator[list[utils.NormalizedName] | None] = _Validator( - added="2.1", - ) - """:external:ref:`core-metadata-provides-extra`""" - provides_dist: _Validator[list[str] | None] = _Validator(added="1.2") - """:external:ref:`core-metadata-provides-dist`""" - obsoletes_dist: _Validator[list[str] | None] = _Validator(added="1.2") - """:external:ref:`core-metadata-obsoletes-dist`""" - requires: _Validator[list[str] | None] = _Validator(added="1.1") - """``Requires`` (deprecated)""" - provides: _Validator[list[str] | None] = _Validator(added="1.1") - """``Provides`` (deprecated)""" - obsoletes: _Validator[list[str] | None] = _Validator(added="1.1") - """``Obsoletes`` (deprecated)""" diff --git a/pkg_resources/_vendor/packaging/py.typed b/pkg_resources/_vendor/packaging/py.typed deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/pkg_resources/_vendor/packaging/requirements.py b/pkg_resources/_vendor/packaging/requirements.py deleted file mode 100644 index 4e068c9567..0000000000 --- a/pkg_resources/_vendor/packaging/requirements.py +++ /dev/null @@ -1,91 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import annotations - -from typing import Any, Iterator - -from ._parser import parse_requirement as _parse_requirement -from ._tokenizer import ParserSyntaxError -from .markers import Marker, _normalize_extra_values -from .specifiers import SpecifierSet -from .utils import canonicalize_name - - -class InvalidRequirement(ValueError): - """ - An invalid requirement was found, users should refer to PEP 508. - """ - - -class Requirement: - """Parse a requirement. - - Parse a given requirement string into its parts, such as name, specifier, - URL, and extras. Raises InvalidRequirement on a badly-formed requirement - string. - """ - - # TODO: Can we test whether something is contained within a requirement? - # If so how do we do that? Do we need to test against the _name_ of - # the thing as well as the version? What about the markers? - # TODO: Can we normalize the name and extra name? - - def __init__(self, requirement_string: str) -> None: - try: - parsed = _parse_requirement(requirement_string) - except ParserSyntaxError as e: - raise InvalidRequirement(str(e)) from e - - self.name: str = parsed.name - self.url: str | None = parsed.url or None - self.extras: set[str] = set(parsed.extras or []) - self.specifier: SpecifierSet = SpecifierSet(parsed.specifier) - self.marker: Marker | None = None - if parsed.marker is not None: - self.marker = Marker.__new__(Marker) - self.marker._markers = _normalize_extra_values(parsed.marker) - - def _iter_parts(self, name: str) -> Iterator[str]: - yield name - - if self.extras: - formatted_extras = ",".join(sorted(self.extras)) - yield f"[{formatted_extras}]" - - if self.specifier: - yield str(self.specifier) - - if self.url: - yield f"@ {self.url}" - if self.marker: - yield " " - - if self.marker: - yield f"; {self.marker}" - - def __str__(self) -> str: - return "".join(self._iter_parts(self.name)) - - def __repr__(self) -> str: - return f"" - - def __hash__(self) -> int: - return hash( - ( - self.__class__.__name__, - *self._iter_parts(canonicalize_name(self.name)), - ) - ) - - def __eq__(self, other: Any) -> bool: - if not isinstance(other, Requirement): - return NotImplemented - - return ( - canonicalize_name(self.name) == canonicalize_name(other.name) - and self.extras == other.extras - and self.specifier == other.specifier - and self.url == other.url - and self.marker == other.marker - ) diff --git a/pkg_resources/_vendor/packaging/specifiers.py b/pkg_resources/_vendor/packaging/specifiers.py deleted file mode 100644 index 2fa75f7abb..0000000000 --- a/pkg_resources/_vendor/packaging/specifiers.py +++ /dev/null @@ -1,1009 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -""" -.. testsetup:: - - from packaging.specifiers import Specifier, SpecifierSet, InvalidSpecifier - from packaging.version import Version -""" - -from __future__ import annotations - -import abc -import itertools -import re -from typing import Callable, Iterable, Iterator, TypeVar, Union - -from .utils import canonicalize_version -from .version import Version - -UnparsedVersion = Union[Version, str] -UnparsedVersionVar = TypeVar("UnparsedVersionVar", bound=UnparsedVersion) -CallableOperator = Callable[[Version, str], bool] - - -def _coerce_version(version: UnparsedVersion) -> Version: - if not isinstance(version, Version): - version = Version(version) - return version - - -class InvalidSpecifier(ValueError): - """ - Raised when attempting to create a :class:`Specifier` with a specifier - string that is invalid. - - >>> Specifier("lolwat") - Traceback (most recent call last): - ... - packaging.specifiers.InvalidSpecifier: Invalid specifier: 'lolwat' - """ - - -class BaseSpecifier(metaclass=abc.ABCMeta): - @abc.abstractmethod - def __str__(self) -> str: - """ - Returns the str representation of this Specifier-like object. This - should be representative of the Specifier itself. - """ - - @abc.abstractmethod - def __hash__(self) -> int: - """ - Returns a hash value for this Specifier-like object. - """ - - @abc.abstractmethod - def __eq__(self, other: object) -> bool: - """ - Returns a boolean representing whether or not the two Specifier-like - objects are equal. - - :param other: The other object to check against. - """ - - @property - @abc.abstractmethod - def prereleases(self) -> bool | None: - """Whether or not pre-releases as a whole are allowed. - - This can be set to either ``True`` or ``False`` to explicitly enable or disable - prereleases or it can be set to ``None`` (the default) to use default semantics. - """ - - @prereleases.setter - def prereleases(self, value: bool) -> None: - """Setter for :attr:`prereleases`. - - :param value: The value to set. - """ - - @abc.abstractmethod - def contains(self, item: str, prereleases: bool | None = None) -> bool: - """ - Determines if the given item is contained within this specifier. - """ - - @abc.abstractmethod - def filter( - self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None - ) -> Iterator[UnparsedVersionVar]: - """ - Takes an iterable of items and filters them so that only items which - are contained within this specifier are allowed in it. - """ - - -class Specifier(BaseSpecifier): - """This class abstracts handling of version specifiers. - - .. tip:: - - It is generally not required to instantiate this manually. You should instead - prefer to work with :class:`SpecifierSet` instead, which can parse - comma-separated version specifiers (which is what package metadata contains). - """ - - _operator_regex_str = r""" - (?P(~=|==|!=|<=|>=|<|>|===)) - """ - _version_regex_str = r""" - (?P - (?: - # The identity operators allow for an escape hatch that will - # do an exact string match of the version you wish to install. - # This will not be parsed by PEP 440 and we cannot determine - # any semantic meaning from it. This operator is discouraged - # but included entirely as an escape hatch. - (?<====) # Only match for the identity operator - \s* - [^\s;)]* # The arbitrary version can be just about anything, - # we match everything except for whitespace, a - # semi-colon for marker support, and a closing paren - # since versions can be enclosed in them. - ) - | - (?: - # The (non)equality operators allow for wild card and local - # versions to be specified so we have to define these two - # operators separately to enable that. - (?<===|!=) # Only match for equals and not equals - - \s* - v? - (?:[0-9]+!)? # epoch - [0-9]+(?:\.[0-9]+)* # release - - # You cannot use a wild card and a pre-release, post-release, a dev or - # local version together so group them with a | and make them optional. - (?: - \.\* # Wild card syntax of .* - | - (?: # pre release - [-_\.]? - (alpha|beta|preview|pre|a|b|c|rc) - [-_\.]? - [0-9]* - )? - (?: # post release - (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) - )? - (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release - (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local - )? - ) - | - (?: - # The compatible operator requires at least two digits in the - # release segment. - (?<=~=) # Only match for the compatible operator - - \s* - v? - (?:[0-9]+!)? # epoch - [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *) - (?: # pre release - [-_\.]? - (alpha|beta|preview|pre|a|b|c|rc) - [-_\.]? - [0-9]* - )? - (?: # post release - (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) - )? - (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release - ) - | - (?: - # All other operators only allow a sub set of what the - # (non)equality operators do. Specifically they do not allow - # local versions to be specified nor do they allow the prefix - # matching wild cards. - (?=": "greater_than_equal", - "<": "less_than", - ">": "greater_than", - "===": "arbitrary", - } - - def __init__(self, spec: str = "", prereleases: bool | None = None) -> None: - """Initialize a Specifier instance. - - :param spec: - The string representation of a specifier which will be parsed and - normalized before use. - :param prereleases: - This tells the specifier if it should accept prerelease versions if - applicable or not. The default of ``None`` will autodetect it from the - given specifiers. - :raises InvalidSpecifier: - If the given specifier is invalid (i.e. bad syntax). - """ - match = self._regex.search(spec) - if not match: - raise InvalidSpecifier(f"Invalid specifier: '{spec}'") - - self._spec: tuple[str, str] = ( - match.group("operator").strip(), - match.group("version").strip(), - ) - - # Store whether or not this Specifier should accept prereleases - self._prereleases = prereleases - - # https://github.com/python/mypy/pull/13475#pullrequestreview-1079784515 - @property # type: ignore[override] - def prereleases(self) -> bool: - # If there is an explicit prereleases set for this, then we'll just - # blindly use that. - if self._prereleases is not None: - return self._prereleases - - # Look at all of our specifiers and determine if they are inclusive - # operators, and if they are if they are including an explicit - # prerelease. - operator, version = self._spec - if operator in ["==", ">=", "<=", "~=", "==="]: - # The == specifier can include a trailing .*, if it does we - # want to remove before parsing. - if operator == "==" and version.endswith(".*"): - version = version[:-2] - - # Parse the version, and if it is a pre-release than this - # specifier allows pre-releases. - if Version(version).is_prerelease: - return True - - return False - - @prereleases.setter - def prereleases(self, value: bool) -> None: - self._prereleases = value - - @property - def operator(self) -> str: - """The operator of this specifier. - - >>> Specifier("==1.2.3").operator - '==' - """ - return self._spec[0] - - @property - def version(self) -> str: - """The version of this specifier. - - >>> Specifier("==1.2.3").version - '1.2.3' - """ - return self._spec[1] - - def __repr__(self) -> str: - """A representation of the Specifier that shows all internal state. - - >>> Specifier('>=1.0.0') - =1.0.0')> - >>> Specifier('>=1.0.0', prereleases=False) - =1.0.0', prereleases=False)> - >>> Specifier('>=1.0.0', prereleases=True) - =1.0.0', prereleases=True)> - """ - pre = ( - f", prereleases={self.prereleases!r}" - if self._prereleases is not None - else "" - ) - - return f"<{self.__class__.__name__}({str(self)!r}{pre})>" - - def __str__(self) -> str: - """A string representation of the Specifier that can be round-tripped. - - >>> str(Specifier('>=1.0.0')) - '>=1.0.0' - >>> str(Specifier('>=1.0.0', prereleases=False)) - '>=1.0.0' - """ - return "{}{}".format(*self._spec) - - @property - def _canonical_spec(self) -> tuple[str, str]: - canonical_version = canonicalize_version( - self._spec[1], - strip_trailing_zero=(self._spec[0] != "~="), - ) - return self._spec[0], canonical_version - - def __hash__(self) -> int: - return hash(self._canonical_spec) - - def __eq__(self, other: object) -> bool: - """Whether or not the two Specifier-like objects are equal. - - :param other: The other object to check against. - - The value of :attr:`prereleases` is ignored. - - >>> Specifier("==1.2.3") == Specifier("== 1.2.3.0") - True - >>> (Specifier("==1.2.3", prereleases=False) == - ... Specifier("==1.2.3", prereleases=True)) - True - >>> Specifier("==1.2.3") == "==1.2.3" - True - >>> Specifier("==1.2.3") == Specifier("==1.2.4") - False - >>> Specifier("==1.2.3") == Specifier("~=1.2.3") - False - """ - if isinstance(other, str): - try: - other = self.__class__(str(other)) - except InvalidSpecifier: - return NotImplemented - elif not isinstance(other, self.__class__): - return NotImplemented - - return self._canonical_spec == other._canonical_spec - - def _get_operator(self, op: str) -> CallableOperator: - operator_callable: CallableOperator = getattr( - self, f"_compare_{self._operators[op]}" - ) - return operator_callable - - def _compare_compatible(self, prospective: Version, spec: str) -> bool: - # Compatible releases have an equivalent combination of >= and ==. That - # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to - # implement this in terms of the other specifiers instead of - # implementing it ourselves. The only thing we need to do is construct - # the other specifiers. - - # We want everything but the last item in the version, but we want to - # ignore suffix segments. - prefix = _version_join( - list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1] - ) - - # Add the prefix notation to the end of our string - prefix += ".*" - - return self._get_operator(">=")(prospective, spec) and self._get_operator("==")( - prospective, prefix - ) - - def _compare_equal(self, prospective: Version, spec: str) -> bool: - # We need special logic to handle prefix matching - if spec.endswith(".*"): - # In the case of prefix matching we want to ignore local segment. - normalized_prospective = canonicalize_version( - prospective.public, strip_trailing_zero=False - ) - # Get the normalized version string ignoring the trailing .* - normalized_spec = canonicalize_version(spec[:-2], strip_trailing_zero=False) - # Split the spec out by bangs and dots, and pretend that there is - # an implicit dot in between a release segment and a pre-release segment. - split_spec = _version_split(normalized_spec) - - # Split the prospective version out by bangs and dots, and pretend - # that there is an implicit dot in between a release segment and - # a pre-release segment. - split_prospective = _version_split(normalized_prospective) - - # 0-pad the prospective version before shortening it to get the correct - # shortened version. - padded_prospective, _ = _pad_version(split_prospective, split_spec) - - # Shorten the prospective version to be the same length as the spec - # so that we can determine if the specifier is a prefix of the - # prospective version or not. - shortened_prospective = padded_prospective[: len(split_spec)] - - return shortened_prospective == split_spec - else: - # Convert our spec string into a Version - spec_version = Version(spec) - - # If the specifier does not have a local segment, then we want to - # act as if the prospective version also does not have a local - # segment. - if not spec_version.local: - prospective = Version(prospective.public) - - return prospective == spec_version - - def _compare_not_equal(self, prospective: Version, spec: str) -> bool: - return not self._compare_equal(prospective, spec) - - def _compare_less_than_equal(self, prospective: Version, spec: str) -> bool: - # NB: Local version identifiers are NOT permitted in the version - # specifier, so local version labels can be universally removed from - # the prospective version. - return Version(prospective.public) <= Version(spec) - - def _compare_greater_than_equal(self, prospective: Version, spec: str) -> bool: - # NB: Local version identifiers are NOT permitted in the version - # specifier, so local version labels can be universally removed from - # the prospective version. - return Version(prospective.public) >= Version(spec) - - def _compare_less_than(self, prospective: Version, spec_str: str) -> bool: - # Convert our spec to a Version instance, since we'll want to work with - # it as a version. - spec = Version(spec_str) - - # Check to see if the prospective version is less than the spec - # version. If it's not we can short circuit and just return False now - # instead of doing extra unneeded work. - if not prospective < spec: - return False - - # This special case is here so that, unless the specifier itself - # includes is a pre-release version, that we do not accept pre-release - # versions for the version mentioned in the specifier (e.g. <3.1 should - # not match 3.1.dev0, but should match 3.0.dev0). - if not spec.is_prerelease and prospective.is_prerelease: - if Version(prospective.base_version) == Version(spec.base_version): - return False - - # If we've gotten to here, it means that prospective version is both - # less than the spec version *and* it's not a pre-release of the same - # version in the spec. - return True - - def _compare_greater_than(self, prospective: Version, spec_str: str) -> bool: - # Convert our spec to a Version instance, since we'll want to work with - # it as a version. - spec = Version(spec_str) - - # Check to see if the prospective version is greater than the spec - # version. If it's not we can short circuit and just return False now - # instead of doing extra unneeded work. - if not prospective > spec: - return False - - # This special case is here so that, unless the specifier itself - # includes is a post-release version, that we do not accept - # post-release versions for the version mentioned in the specifier - # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0). - if not spec.is_postrelease and prospective.is_postrelease: - if Version(prospective.base_version) == Version(spec.base_version): - return False - - # Ensure that we do not allow a local version of the version mentioned - # in the specifier, which is technically greater than, to match. - if prospective.local is not None: - if Version(prospective.base_version) == Version(spec.base_version): - return False - - # If we've gotten to here, it means that prospective version is both - # greater than the spec version *and* it's not a pre-release of the - # same version in the spec. - return True - - def _compare_arbitrary(self, prospective: Version, spec: str) -> bool: - return str(prospective).lower() == str(spec).lower() - - def __contains__(self, item: str | Version) -> bool: - """Return whether or not the item is contained in this specifier. - - :param item: The item to check for. - - This is used for the ``in`` operator and behaves the same as - :meth:`contains` with no ``prereleases`` argument passed. - - >>> "1.2.3" in Specifier(">=1.2.3") - True - >>> Version("1.2.3") in Specifier(">=1.2.3") - True - >>> "1.0.0" in Specifier(">=1.2.3") - False - >>> "1.3.0a1" in Specifier(">=1.2.3") - False - >>> "1.3.0a1" in Specifier(">=1.2.3", prereleases=True) - True - """ - return self.contains(item) - - def contains(self, item: UnparsedVersion, prereleases: bool | None = None) -> bool: - """Return whether or not the item is contained in this specifier. - - :param item: - The item to check for, which can be a version string or a - :class:`Version` instance. - :param prereleases: - Whether or not to match prereleases with this Specifier. If set to - ``None`` (the default), it uses :attr:`prereleases` to determine - whether or not prereleases are allowed. - - >>> Specifier(">=1.2.3").contains("1.2.3") - True - >>> Specifier(">=1.2.3").contains(Version("1.2.3")) - True - >>> Specifier(">=1.2.3").contains("1.0.0") - False - >>> Specifier(">=1.2.3").contains("1.3.0a1") - False - >>> Specifier(">=1.2.3", prereleases=True).contains("1.3.0a1") - True - >>> Specifier(">=1.2.3").contains("1.3.0a1", prereleases=True) - True - """ - - # Determine if prereleases are to be allowed or not. - if prereleases is None: - prereleases = self.prereleases - - # Normalize item to a Version, this allows us to have a shortcut for - # "2.0" in Specifier(">=2") - normalized_item = _coerce_version(item) - - # Determine if we should be supporting prereleases in this specifier - # or not, if we do not support prereleases than we can short circuit - # logic if this version is a prereleases. - if normalized_item.is_prerelease and not prereleases: - return False - - # Actually do the comparison to determine if this item is contained - # within this Specifier or not. - operator_callable: CallableOperator = self._get_operator(self.operator) - return operator_callable(normalized_item, self.version) - - def filter( - self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None - ) -> Iterator[UnparsedVersionVar]: - """Filter items in the given iterable, that match the specifier. - - :param iterable: - An iterable that can contain version strings and :class:`Version` instances. - The items in the iterable will be filtered according to the specifier. - :param prereleases: - Whether or not to allow prereleases in the returned iterator. If set to - ``None`` (the default), it will be intelligently decide whether to allow - prereleases or not (based on the :attr:`prereleases` attribute, and - whether the only versions matching are prereleases). - - This method is smarter than just ``filter(Specifier().contains, [...])`` - because it implements the rule from :pep:`440` that a prerelease item - SHOULD be accepted if no other versions match the given specifier. - - >>> list(Specifier(">=1.2.3").filter(["1.2", "1.3", "1.5a1"])) - ['1.3'] - >>> list(Specifier(">=1.2.3").filter(["1.2", "1.2.3", "1.3", Version("1.4")])) - ['1.2.3', '1.3', ] - >>> list(Specifier(">=1.2.3").filter(["1.2", "1.5a1"])) - ['1.5a1'] - >>> list(Specifier(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True)) - ['1.3', '1.5a1'] - >>> list(Specifier(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"])) - ['1.3', '1.5a1'] - """ - - yielded = False - found_prereleases = [] - - kw = {"prereleases": prereleases if prereleases is not None else True} - - # Attempt to iterate over all the values in the iterable and if any of - # them match, yield them. - for version in iterable: - parsed_version = _coerce_version(version) - - if self.contains(parsed_version, **kw): - # If our version is a prerelease, and we were not set to allow - # prereleases, then we'll store it for later in case nothing - # else matches this specifier. - if parsed_version.is_prerelease and not ( - prereleases or self.prereleases - ): - found_prereleases.append(version) - # Either this is not a prerelease, or we should have been - # accepting prereleases from the beginning. - else: - yielded = True - yield version - - # Now that we've iterated over everything, determine if we've yielded - # any values, and if we have not and we have any prereleases stored up - # then we will go ahead and yield the prereleases. - if not yielded and found_prereleases: - for version in found_prereleases: - yield version - - -_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$") - - -def _version_split(version: str) -> list[str]: - """Split version into components. - - The split components are intended for version comparison. The logic does - not attempt to retain the original version string, so joining the - components back with :func:`_version_join` may not produce the original - version string. - """ - result: list[str] = [] - - epoch, _, rest = version.rpartition("!") - result.append(epoch or "0") - - for item in rest.split("."): - match = _prefix_regex.search(item) - if match: - result.extend(match.groups()) - else: - result.append(item) - return result - - -def _version_join(components: list[str]) -> str: - """Join split version components into a version string. - - This function assumes the input came from :func:`_version_split`, where the - first component must be the epoch (either empty or numeric), and all other - components numeric. - """ - epoch, *rest = components - return f"{epoch}!{'.'.join(rest)}" - - -def _is_not_suffix(segment: str) -> bool: - return not any( - segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post") - ) - - -def _pad_version(left: list[str], right: list[str]) -> tuple[list[str], list[str]]: - left_split, right_split = [], [] - - # Get the release segment of our versions - left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left))) - right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right))) - - # Get the rest of our versions - left_split.append(left[len(left_split[0]) :]) - right_split.append(right[len(right_split[0]) :]) - - # Insert our padding - left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0]))) - right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0]))) - - return ( - list(itertools.chain.from_iterable(left_split)), - list(itertools.chain.from_iterable(right_split)), - ) - - -class SpecifierSet(BaseSpecifier): - """This class abstracts handling of a set of version specifiers. - - It can be passed a single specifier (``>=3.0``), a comma-separated list of - specifiers (``>=3.0,!=3.1``), or no specifier at all. - """ - - def __init__(self, specifiers: str = "", prereleases: bool | None = None) -> None: - """Initialize a SpecifierSet instance. - - :param specifiers: - The string representation of a specifier or a comma-separated list of - specifiers which will be parsed and normalized before use. - :param prereleases: - This tells the SpecifierSet if it should accept prerelease versions if - applicable or not. The default of ``None`` will autodetect it from the - given specifiers. - - :raises InvalidSpecifier: - If the given ``specifiers`` are not parseable than this exception will be - raised. - """ - - # Split on `,` to break each individual specifier into it's own item, and - # strip each item to remove leading/trailing whitespace. - split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] - - # Make each individual specifier a Specifier and save in a frozen set for later. - self._specs = frozenset(map(Specifier, split_specifiers)) - - # Store our prereleases value so we can use it later to determine if - # we accept prereleases or not. - self._prereleases = prereleases - - @property - def prereleases(self) -> bool | None: - # If we have been given an explicit prerelease modifier, then we'll - # pass that through here. - if self._prereleases is not None: - return self._prereleases - - # If we don't have any specifiers, and we don't have a forced value, - # then we'll just return None since we don't know if this should have - # pre-releases or not. - if not self._specs: - return None - - # Otherwise we'll see if any of the given specifiers accept - # prereleases, if any of them do we'll return True, otherwise False. - return any(s.prereleases for s in self._specs) - - @prereleases.setter - def prereleases(self, value: bool) -> None: - self._prereleases = value - - def __repr__(self) -> str: - """A representation of the specifier set that shows all internal state. - - Note that the ordering of the individual specifiers within the set may not - match the input string. - - >>> SpecifierSet('>=1.0.0,!=2.0.0') - =1.0.0')> - >>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=False) - =1.0.0', prereleases=False)> - >>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=True) - =1.0.0', prereleases=True)> - """ - pre = ( - f", prereleases={self.prereleases!r}" - if self._prereleases is not None - else "" - ) - - return f"" - - def __str__(self) -> str: - """A string representation of the specifier set that can be round-tripped. - - Note that the ordering of the individual specifiers within the set may not - match the input string. - - >>> str(SpecifierSet(">=1.0.0,!=1.0.1")) - '!=1.0.1,>=1.0.0' - >>> str(SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False)) - '!=1.0.1,>=1.0.0' - """ - return ",".join(sorted(str(s) for s in self._specs)) - - def __hash__(self) -> int: - return hash(self._specs) - - def __and__(self, other: SpecifierSet | str) -> SpecifierSet: - """Return a SpecifierSet which is a combination of the two sets. - - :param other: The other object to combine with. - - >>> SpecifierSet(">=1.0.0,!=1.0.1") & '<=2.0.0,!=2.0.1' - =1.0.0')> - >>> SpecifierSet(">=1.0.0,!=1.0.1") & SpecifierSet('<=2.0.0,!=2.0.1') - =1.0.0')> - """ - if isinstance(other, str): - other = SpecifierSet(other) - elif not isinstance(other, SpecifierSet): - return NotImplemented - - specifier = SpecifierSet() - specifier._specs = frozenset(self._specs | other._specs) - - if self._prereleases is None and other._prereleases is not None: - specifier._prereleases = other._prereleases - elif self._prereleases is not None and other._prereleases is None: - specifier._prereleases = self._prereleases - elif self._prereleases == other._prereleases: - specifier._prereleases = self._prereleases - else: - raise ValueError( - "Cannot combine SpecifierSets with True and False prerelease " - "overrides." - ) - - return specifier - - def __eq__(self, other: object) -> bool: - """Whether or not the two SpecifierSet-like objects are equal. - - :param other: The other object to check against. - - The value of :attr:`prereleases` is ignored. - - >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.1") - True - >>> (SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False) == - ... SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True)) - True - >>> SpecifierSet(">=1.0.0,!=1.0.1") == ">=1.0.0,!=1.0.1" - True - >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0") - False - >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.2") - False - """ - if isinstance(other, (str, Specifier)): - other = SpecifierSet(str(other)) - elif not isinstance(other, SpecifierSet): - return NotImplemented - - return self._specs == other._specs - - def __len__(self) -> int: - """Returns the number of specifiers in this specifier set.""" - return len(self._specs) - - def __iter__(self) -> Iterator[Specifier]: - """ - Returns an iterator over all the underlying :class:`Specifier` instances - in this specifier set. - - >>> sorted(SpecifierSet(">=1.0.0,!=1.0.1"), key=str) - [, =1.0.0')>] - """ - return iter(self._specs) - - def __contains__(self, item: UnparsedVersion) -> bool: - """Return whether or not the item is contained in this specifier. - - :param item: The item to check for. - - This is used for the ``in`` operator and behaves the same as - :meth:`contains` with no ``prereleases`` argument passed. - - >>> "1.2.3" in SpecifierSet(">=1.0.0,!=1.0.1") - True - >>> Version("1.2.3") in SpecifierSet(">=1.0.0,!=1.0.1") - True - >>> "1.0.1" in SpecifierSet(">=1.0.0,!=1.0.1") - False - >>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1") - False - >>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True) - True - """ - return self.contains(item) - - def contains( - self, - item: UnparsedVersion, - prereleases: bool | None = None, - installed: bool | None = None, - ) -> bool: - """Return whether or not the item is contained in this SpecifierSet. - - :param item: - The item to check for, which can be a version string or a - :class:`Version` instance. - :param prereleases: - Whether or not to match prereleases with this SpecifierSet. If set to - ``None`` (the default), it uses :attr:`prereleases` to determine - whether or not prereleases are allowed. - - >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.2.3") - True - >>> SpecifierSet(">=1.0.0,!=1.0.1").contains(Version("1.2.3")) - True - >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.0.1") - False - >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1") - False - >>> SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True).contains("1.3.0a1") - True - >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1", prereleases=True) - True - """ - # Ensure that our item is a Version instance. - if not isinstance(item, Version): - item = Version(item) - - # Determine if we're forcing a prerelease or not, if we're not forcing - # one for this particular filter call, then we'll use whatever the - # SpecifierSet thinks for whether or not we should support prereleases. - if prereleases is None: - prereleases = self.prereleases - - # We can determine if we're going to allow pre-releases by looking to - # see if any of the underlying items supports them. If none of them do - # and this item is a pre-release then we do not allow it and we can - # short circuit that here. - # Note: This means that 1.0.dev1 would not be contained in something - # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0 - if not prereleases and item.is_prerelease: - return False - - if installed and item.is_prerelease: - item = Version(item.base_version) - - # We simply dispatch to the underlying specs here to make sure that the - # given version is contained within all of them. - # Note: This use of all() here means that an empty set of specifiers - # will always return True, this is an explicit design decision. - return all(s.contains(item, prereleases=prereleases) for s in self._specs) - - def filter( - self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None - ) -> Iterator[UnparsedVersionVar]: - """Filter items in the given iterable, that match the specifiers in this set. - - :param iterable: - An iterable that can contain version strings and :class:`Version` instances. - The items in the iterable will be filtered according to the specifier. - :param prereleases: - Whether or not to allow prereleases in the returned iterator. If set to - ``None`` (the default), it will be intelligently decide whether to allow - prereleases or not (based on the :attr:`prereleases` attribute, and - whether the only versions matching are prereleases). - - This method is smarter than just ``filter(SpecifierSet(...).contains, [...])`` - because it implements the rule from :pep:`440` that a prerelease item - SHOULD be accepted if no other versions match the given specifier. - - >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", "1.5a1"])) - ['1.3'] - >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", Version("1.4")])) - ['1.3', ] - >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.5a1"])) - [] - >>> list(SpecifierSet(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True)) - ['1.3', '1.5a1'] - >>> list(SpecifierSet(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"])) - ['1.3', '1.5a1'] - - An "empty" SpecifierSet will filter items based on the presence of prerelease - versions in the set. - - >>> list(SpecifierSet("").filter(["1.3", "1.5a1"])) - ['1.3'] - >>> list(SpecifierSet("").filter(["1.5a1"])) - ['1.5a1'] - >>> list(SpecifierSet("", prereleases=True).filter(["1.3", "1.5a1"])) - ['1.3', '1.5a1'] - >>> list(SpecifierSet("").filter(["1.3", "1.5a1"], prereleases=True)) - ['1.3', '1.5a1'] - """ - # Determine if we're forcing a prerelease or not, if we're not forcing - # one for this particular filter call, then we'll use whatever the - # SpecifierSet thinks for whether or not we should support prereleases. - if prereleases is None: - prereleases = self.prereleases - - # If we have any specifiers, then we want to wrap our iterable in the - # filter method for each one, this will act as a logical AND amongst - # each specifier. - if self._specs: - for spec in self._specs: - iterable = spec.filter(iterable, prereleases=bool(prereleases)) - return iter(iterable) - # If we do not have any specifiers, then we need to have a rough filter - # which will filter out any pre-releases, unless there are no final - # releases. - else: - filtered: list[UnparsedVersionVar] = [] - found_prereleases: list[UnparsedVersionVar] = [] - - for item in iterable: - parsed_version = _coerce_version(item) - - # Store any item which is a pre-release for later unless we've - # already found a final version or we are accepting prereleases - if parsed_version.is_prerelease and not prereleases: - if not filtered: - found_prereleases.append(item) - else: - filtered.append(item) - - # If we've found no items except for pre-releases, then we'll go - # ahead and use the pre-releases - if not filtered and found_prereleases and prereleases is None: - return iter(found_prereleases) - - return iter(filtered) diff --git a/pkg_resources/_vendor/packaging/tags.py b/pkg_resources/_vendor/packaging/tags.py deleted file mode 100644 index 6667d29908..0000000000 --- a/pkg_resources/_vendor/packaging/tags.py +++ /dev/null @@ -1,568 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import logging -import platform -import re -import struct -import subprocess -import sys -import sysconfig -from importlib.machinery import EXTENSION_SUFFIXES -from typing import ( - Iterable, - Iterator, - Sequence, - Tuple, - cast, -) - -from . import _manylinux, _musllinux - -logger = logging.getLogger(__name__) - -PythonVersion = Sequence[int] -MacVersion = Tuple[int, int] - -INTERPRETER_SHORT_NAMES: dict[str, str] = { - "python": "py", # Generic. - "cpython": "cp", - "pypy": "pp", - "ironpython": "ip", - "jython": "jy", -} - - -_32_BIT_INTERPRETER = struct.calcsize("P") == 4 - - -class Tag: - """ - A representation of the tag triple for a wheel. - - Instances are considered immutable and thus are hashable. Equality checking - is also supported. - """ - - __slots__ = ["_interpreter", "_abi", "_platform", "_hash"] - - def __init__(self, interpreter: str, abi: str, platform: str) -> None: - self._interpreter = interpreter.lower() - self._abi = abi.lower() - self._platform = platform.lower() - # The __hash__ of every single element in a Set[Tag] will be evaluated each time - # that a set calls its `.disjoint()` method, which may be called hundreds of - # times when scanning a page of links for packages with tags matching that - # Set[Tag]. Pre-computing the value here produces significant speedups for - # downstream consumers. - self._hash = hash((self._interpreter, self._abi, self._platform)) - - @property - def interpreter(self) -> str: - return self._interpreter - - @property - def abi(self) -> str: - return self._abi - - @property - def platform(self) -> str: - return self._platform - - def __eq__(self, other: object) -> bool: - if not isinstance(other, Tag): - return NotImplemented - - return ( - (self._hash == other._hash) # Short-circuit ASAP for perf reasons. - and (self._platform == other._platform) - and (self._abi == other._abi) - and (self._interpreter == other._interpreter) - ) - - def __hash__(self) -> int: - return self._hash - - def __str__(self) -> str: - return f"{self._interpreter}-{self._abi}-{self._platform}" - - def __repr__(self) -> str: - return f"<{self} @ {id(self)}>" - - -def parse_tag(tag: str) -> frozenset[Tag]: - """ - Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances. - - Returning a set is required due to the possibility that the tag is a - compressed tag set. - """ - tags = set() - interpreters, abis, platforms = tag.split("-") - for interpreter in interpreters.split("."): - for abi in abis.split("."): - for platform_ in platforms.split("."): - tags.add(Tag(interpreter, abi, platform_)) - return frozenset(tags) - - -def _get_config_var(name: str, warn: bool = False) -> int | str | None: - value: int | str | None = sysconfig.get_config_var(name) - if value is None and warn: - logger.debug( - "Config variable '%s' is unset, Python ABI tag may be incorrect", name - ) - return value - - -def _normalize_string(string: str) -> str: - return string.replace(".", "_").replace("-", "_").replace(" ", "_") - - -def _is_threaded_cpython(abis: list[str]) -> bool: - """ - Determine if the ABI corresponds to a threaded (`--disable-gil`) build. - - The threaded builds are indicated by a "t" in the abiflags. - """ - if len(abis) == 0: - return False - # expect e.g., cp313 - m = re.match(r"cp\d+(.*)", abis[0]) - if not m: - return False - abiflags = m.group(1) - return "t" in abiflags - - -def _abi3_applies(python_version: PythonVersion, threading: bool) -> bool: - """ - Determine if the Python version supports abi3. - - PEP 384 was first implemented in Python 3.2. The threaded (`--disable-gil`) - builds do not support abi3. - """ - return len(python_version) > 1 and tuple(python_version) >= (3, 2) and not threading - - -def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> list[str]: - py_version = tuple(py_version) # To allow for version comparison. - abis = [] - version = _version_nodot(py_version[:2]) - threading = debug = pymalloc = ucs4 = "" - with_debug = _get_config_var("Py_DEBUG", warn) - has_refcount = hasattr(sys, "gettotalrefcount") - # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled - # extension modules is the best option. - # https://github.com/pypa/pip/issues/3383#issuecomment-173267692 - has_ext = "_d.pyd" in EXTENSION_SUFFIXES - if with_debug or (with_debug is None and (has_refcount or has_ext)): - debug = "d" - if py_version >= (3, 13) and _get_config_var("Py_GIL_DISABLED", warn): - threading = "t" - if py_version < (3, 8): - with_pymalloc = _get_config_var("WITH_PYMALLOC", warn) - if with_pymalloc or with_pymalloc is None: - pymalloc = "m" - if py_version < (3, 3): - unicode_size = _get_config_var("Py_UNICODE_SIZE", warn) - if unicode_size == 4 or ( - unicode_size is None and sys.maxunicode == 0x10FFFF - ): - ucs4 = "u" - elif debug: - # Debug builds can also load "normal" extension modules. - # We can also assume no UCS-4 or pymalloc requirement. - abis.append(f"cp{version}{threading}") - abis.insert(0, f"cp{version}{threading}{debug}{pymalloc}{ucs4}") - return abis - - -def cpython_tags( - python_version: PythonVersion | None = None, - abis: Iterable[str] | None = None, - platforms: Iterable[str] | None = None, - *, - warn: bool = False, -) -> Iterator[Tag]: - """ - Yields the tags for a CPython interpreter. - - The tags consist of: - - cp-- - - cp-abi3- - - cp-none- - - cp-abi3- # Older Python versions down to 3.2. - - If python_version only specifies a major version then user-provided ABIs and - the 'none' ABItag will be used. - - If 'abi3' or 'none' are specified in 'abis' then they will be yielded at - their normal position and not at the beginning. - """ - if not python_version: - python_version = sys.version_info[:2] - - interpreter = f"cp{_version_nodot(python_version[:2])}" - - if abis is None: - if len(python_version) > 1: - abis = _cpython_abis(python_version, warn) - else: - abis = [] - abis = list(abis) - # 'abi3' and 'none' are explicitly handled later. - for explicit_abi in ("abi3", "none"): - try: - abis.remove(explicit_abi) - except ValueError: - pass - - platforms = list(platforms or platform_tags()) - for abi in abis: - for platform_ in platforms: - yield Tag(interpreter, abi, platform_) - - threading = _is_threaded_cpython(abis) - use_abi3 = _abi3_applies(python_version, threading) - if use_abi3: - yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms) - yield from (Tag(interpreter, "none", platform_) for platform_ in platforms) - - if use_abi3: - for minor_version in range(python_version[1] - 1, 1, -1): - for platform_ in platforms: - interpreter = "cp{version}".format( - version=_version_nodot((python_version[0], minor_version)) - ) - yield Tag(interpreter, "abi3", platform_) - - -def _generic_abi() -> list[str]: - """ - Return the ABI tag based on EXT_SUFFIX. - """ - # The following are examples of `EXT_SUFFIX`. - # We want to keep the parts which are related to the ABI and remove the - # parts which are related to the platform: - # - linux: '.cpython-310-x86_64-linux-gnu.so' => cp310 - # - mac: '.cpython-310-darwin.so' => cp310 - # - win: '.cp310-win_amd64.pyd' => cp310 - # - win: '.pyd' => cp37 (uses _cpython_abis()) - # - pypy: '.pypy38-pp73-x86_64-linux-gnu.so' => pypy38_pp73 - # - graalpy: '.graalpy-38-native-x86_64-darwin.dylib' - # => graalpy_38_native - - ext_suffix = _get_config_var("EXT_SUFFIX", warn=True) - if not isinstance(ext_suffix, str) or ext_suffix[0] != ".": - raise SystemError("invalid sysconfig.get_config_var('EXT_SUFFIX')") - parts = ext_suffix.split(".") - if len(parts) < 3: - # CPython3.7 and earlier uses ".pyd" on Windows. - return _cpython_abis(sys.version_info[:2]) - soabi = parts[1] - if soabi.startswith("cpython"): - # non-windows - abi = "cp" + soabi.split("-")[1] - elif soabi.startswith("cp"): - # windows - abi = soabi.split("-")[0] - elif soabi.startswith("pypy"): - abi = "-".join(soabi.split("-")[:2]) - elif soabi.startswith("graalpy"): - abi = "-".join(soabi.split("-")[:3]) - elif soabi: - # pyston, ironpython, others? - abi = soabi - else: - return [] - return [_normalize_string(abi)] - - -def generic_tags( - interpreter: str | None = None, - abis: Iterable[str] | None = None, - platforms: Iterable[str] | None = None, - *, - warn: bool = False, -) -> Iterator[Tag]: - """ - Yields the tags for a generic interpreter. - - The tags consist of: - - -- - - The "none" ABI will be added if it was not explicitly provided. - """ - if not interpreter: - interp_name = interpreter_name() - interp_version = interpreter_version(warn=warn) - interpreter = "".join([interp_name, interp_version]) - if abis is None: - abis = _generic_abi() - else: - abis = list(abis) - platforms = list(platforms or platform_tags()) - if "none" not in abis: - abis.append("none") - for abi in abis: - for platform_ in platforms: - yield Tag(interpreter, abi, platform_) - - -def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]: - """ - Yields Python versions in descending order. - - After the latest version, the major-only version will be yielded, and then - all previous versions of that major version. - """ - if len(py_version) > 1: - yield f"py{_version_nodot(py_version[:2])}" - yield f"py{py_version[0]}" - if len(py_version) > 1: - for minor in range(py_version[1] - 1, -1, -1): - yield f"py{_version_nodot((py_version[0], minor))}" - - -def compatible_tags( - python_version: PythonVersion | None = None, - interpreter: str | None = None, - platforms: Iterable[str] | None = None, -) -> Iterator[Tag]: - """ - Yields the sequence of tags that are compatible with a specific version of Python. - - The tags consist of: - - py*-none- - - -none-any # ... if `interpreter` is provided. - - py*-none-any - """ - if not python_version: - python_version = sys.version_info[:2] - platforms = list(platforms or platform_tags()) - for version in _py_interpreter_range(python_version): - for platform_ in platforms: - yield Tag(version, "none", platform_) - if interpreter: - yield Tag(interpreter, "none", "any") - for version in _py_interpreter_range(python_version): - yield Tag(version, "none", "any") - - -def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str: - if not is_32bit: - return arch - - if arch.startswith("ppc"): - return "ppc" - - return "i386" - - -def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> list[str]: - formats = [cpu_arch] - if cpu_arch == "x86_64": - if version < (10, 4): - return [] - formats.extend(["intel", "fat64", "fat32"]) - - elif cpu_arch == "i386": - if version < (10, 4): - return [] - formats.extend(["intel", "fat32", "fat"]) - - elif cpu_arch == "ppc64": - # TODO: Need to care about 32-bit PPC for ppc64 through 10.2? - if version > (10, 5) or version < (10, 4): - return [] - formats.append("fat64") - - elif cpu_arch == "ppc": - if version > (10, 6): - return [] - formats.extend(["fat32", "fat"]) - - if cpu_arch in {"arm64", "x86_64"}: - formats.append("universal2") - - if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}: - formats.append("universal") - - return formats - - -def mac_platforms( - version: MacVersion | None = None, arch: str | None = None -) -> Iterator[str]: - """ - Yields the platform tags for a macOS system. - - The `version` parameter is a two-item tuple specifying the macOS version to - generate platform tags for. The `arch` parameter is the CPU architecture to - generate platform tags for. Both parameters default to the appropriate value - for the current system. - """ - version_str, _, cpu_arch = platform.mac_ver() - if version is None: - version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2]))) - if version == (10, 16): - # When built against an older macOS SDK, Python will report macOS 10.16 - # instead of the real version. - version_str = subprocess.run( - [ - sys.executable, - "-sS", - "-c", - "import platform; print(platform.mac_ver()[0])", - ], - check=True, - env={"SYSTEM_VERSION_COMPAT": "0"}, - stdout=subprocess.PIPE, - text=True, - ).stdout - version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2]))) - else: - version = version - if arch is None: - arch = _mac_arch(cpu_arch) - else: - arch = arch - - if (10, 0) <= version and version < (11, 0): - # Prior to Mac OS 11, each yearly release of Mac OS bumped the - # "minor" version number. The major version was always 10. - for minor_version in range(version[1], -1, -1): - compat_version = 10, minor_version - binary_formats = _mac_binary_formats(compat_version, arch) - for binary_format in binary_formats: - yield "macosx_{major}_{minor}_{binary_format}".format( - major=10, minor=minor_version, binary_format=binary_format - ) - - if version >= (11, 0): - # Starting with Mac OS 11, each yearly release bumps the major version - # number. The minor versions are now the midyear updates. - for major_version in range(version[0], 10, -1): - compat_version = major_version, 0 - binary_formats = _mac_binary_formats(compat_version, arch) - for binary_format in binary_formats: - yield "macosx_{major}_{minor}_{binary_format}".format( - major=major_version, minor=0, binary_format=binary_format - ) - - if version >= (11, 0): - # Mac OS 11 on x86_64 is compatible with binaries from previous releases. - # Arm64 support was introduced in 11.0, so no Arm binaries from previous - # releases exist. - # - # However, the "universal2" binary format can have a - # macOS version earlier than 11.0 when the x86_64 part of the binary supports - # that version of macOS. - if arch == "x86_64": - for minor_version in range(16, 3, -1): - compat_version = 10, minor_version - binary_formats = _mac_binary_formats(compat_version, arch) - for binary_format in binary_formats: - yield "macosx_{major}_{minor}_{binary_format}".format( - major=compat_version[0], - minor=compat_version[1], - binary_format=binary_format, - ) - else: - for minor_version in range(16, 3, -1): - compat_version = 10, minor_version - binary_format = "universal2" - yield "macosx_{major}_{minor}_{binary_format}".format( - major=compat_version[0], - minor=compat_version[1], - binary_format=binary_format, - ) - - -def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]: - linux = _normalize_string(sysconfig.get_platform()) - if not linux.startswith("linux_"): - # we should never be here, just yield the sysconfig one and return - yield linux - return - if is_32bit: - if linux == "linux_x86_64": - linux = "linux_i686" - elif linux == "linux_aarch64": - linux = "linux_armv8l" - _, arch = linux.split("_", 1) - archs = {"armv8l": ["armv8l", "armv7l"]}.get(arch, [arch]) - yield from _manylinux.platform_tags(archs) - yield from _musllinux.platform_tags(archs) - for arch in archs: - yield f"linux_{arch}" - - -def _generic_platforms() -> Iterator[str]: - yield _normalize_string(sysconfig.get_platform()) - - -def platform_tags() -> Iterator[str]: - """ - Provides the platform tags for this installation. - """ - if platform.system() == "Darwin": - return mac_platforms() - elif platform.system() == "Linux": - return _linux_platforms() - else: - return _generic_platforms() - - -def interpreter_name() -> str: - """ - Returns the name of the running interpreter. - - Some implementations have a reserved, two-letter abbreviation which will - be returned when appropriate. - """ - name = sys.implementation.name - return INTERPRETER_SHORT_NAMES.get(name) or name - - -def interpreter_version(*, warn: bool = False) -> str: - """ - Returns the version of the running interpreter. - """ - version = _get_config_var("py_version_nodot", warn=warn) - if version: - version = str(version) - else: - version = _version_nodot(sys.version_info[:2]) - return version - - -def _version_nodot(version: PythonVersion) -> str: - return "".join(map(str, version)) - - -def sys_tags(*, warn: bool = False) -> Iterator[Tag]: - """ - Returns the sequence of tag triples for the running interpreter. - - The order of the sequence corresponds to priority order for the - interpreter, from most to least important. - """ - - interp_name = interpreter_name() - if interp_name == "cp": - yield from cpython_tags(warn=warn) - else: - yield from generic_tags() - - if interp_name == "pp": - interp = "pp3" - elif interp_name == "cp": - interp = "cp" + interpreter_version(warn=warn) - else: - interp = None - yield from compatible_tags(interpreter=interp) diff --git a/pkg_resources/_vendor/packaging/utils.py b/pkg_resources/_vendor/packaging/utils.py deleted file mode 100644 index d33da5bb8b..0000000000 --- a/pkg_resources/_vendor/packaging/utils.py +++ /dev/null @@ -1,174 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import re -from typing import NewType, Tuple, Union, cast - -from .tags import Tag, parse_tag -from .version import InvalidVersion, Version - -BuildTag = Union[Tuple[()], Tuple[int, str]] -NormalizedName = NewType("NormalizedName", str) - - -class InvalidName(ValueError): - """ - An invalid distribution name; users should refer to the packaging user guide. - """ - - -class InvalidWheelFilename(ValueError): - """ - An invalid wheel filename was found, users should refer to PEP 427. - """ - - -class InvalidSdistFilename(ValueError): - """ - An invalid sdist filename was found, users should refer to the packaging user guide. - """ - - -# Core metadata spec for `Name` -_validate_regex = re.compile( - r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE -) -_canonicalize_regex = re.compile(r"[-_.]+") -_normalized_regex = re.compile(r"^([a-z0-9]|[a-z0-9]([a-z0-9-](?!--))*[a-z0-9])$") -# PEP 427: The build number must start with a digit. -_build_tag_regex = re.compile(r"(\d+)(.*)") - - -def canonicalize_name(name: str, *, validate: bool = False) -> NormalizedName: - if validate and not _validate_regex.match(name): - raise InvalidName(f"name is invalid: {name!r}") - # This is taken from PEP 503. - value = _canonicalize_regex.sub("-", name).lower() - return cast(NormalizedName, value) - - -def is_normalized_name(name: str) -> bool: - return _normalized_regex.match(name) is not None - - -def canonicalize_version( - version: Version | str, *, strip_trailing_zero: bool = True -) -> str: - """ - This is very similar to Version.__str__, but has one subtle difference - with the way it handles the release segment. - """ - if isinstance(version, str): - try: - parsed = Version(version) - except InvalidVersion: - # Legacy versions cannot be normalized - return version - else: - parsed = version - - parts = [] - - # Epoch - if parsed.epoch != 0: - parts.append(f"{parsed.epoch}!") - - # Release segment - release_segment = ".".join(str(x) for x in parsed.release) - if strip_trailing_zero: - # NB: This strips trailing '.0's to normalize - release_segment = re.sub(r"(\.0)+$", "", release_segment) - parts.append(release_segment) - - # Pre-release - if parsed.pre is not None: - parts.append("".join(str(x) for x in parsed.pre)) - - # Post-release - if parsed.post is not None: - parts.append(f".post{parsed.post}") - - # Development release - if parsed.dev is not None: - parts.append(f".dev{parsed.dev}") - - # Local version segment - if parsed.local is not None: - parts.append(f"+{parsed.local}") - - return "".join(parts) - - -def parse_wheel_filename( - filename: str, -) -> tuple[NormalizedName, Version, BuildTag, frozenset[Tag]]: - if not filename.endswith(".whl"): - raise InvalidWheelFilename( - f"Invalid wheel filename (extension must be '.whl'): {filename}" - ) - - filename = filename[:-4] - dashes = filename.count("-") - if dashes not in (4, 5): - raise InvalidWheelFilename( - f"Invalid wheel filename (wrong number of parts): {filename}" - ) - - parts = filename.split("-", dashes - 2) - name_part = parts[0] - # See PEP 427 for the rules on escaping the project name. - if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None: - raise InvalidWheelFilename(f"Invalid project name: {filename}") - name = canonicalize_name(name_part) - - try: - version = Version(parts[1]) - except InvalidVersion as e: - raise InvalidWheelFilename( - f"Invalid wheel filename (invalid version): {filename}" - ) from e - - if dashes == 5: - build_part = parts[2] - build_match = _build_tag_regex.match(build_part) - if build_match is None: - raise InvalidWheelFilename( - f"Invalid build number: {build_part} in '{filename}'" - ) - build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2))) - else: - build = () - tags = parse_tag(parts[-1]) - return (name, version, build, tags) - - -def parse_sdist_filename(filename: str) -> tuple[NormalizedName, Version]: - if filename.endswith(".tar.gz"): - file_stem = filename[: -len(".tar.gz")] - elif filename.endswith(".zip"): - file_stem = filename[: -len(".zip")] - else: - raise InvalidSdistFilename( - f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):" - f" {filename}" - ) - - # We are requiring a PEP 440 version, which cannot contain dashes, - # so we split on the last dash. - name_part, sep, version_part = file_stem.rpartition("-") - if not sep: - raise InvalidSdistFilename(f"Invalid sdist filename: {filename}") - - name = canonicalize_name(name_part) - - try: - version = Version(version_part) - except InvalidVersion as e: - raise InvalidSdistFilename( - f"Invalid sdist filename (invalid version): {filename}" - ) from e - - return (name, version) diff --git a/pkg_resources/_vendor/packaging/version.py b/pkg_resources/_vendor/packaging/version.py deleted file mode 100644 index 46bc261308..0000000000 --- a/pkg_resources/_vendor/packaging/version.py +++ /dev/null @@ -1,563 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -""" -.. testsetup:: - - from packaging.version import parse, Version -""" - -from __future__ import annotations - -import itertools -import re -from typing import Any, Callable, NamedTuple, SupportsInt, Tuple, Union - -from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType - -__all__ = ["VERSION_PATTERN", "parse", "Version", "InvalidVersion"] - -LocalType = Tuple[Union[int, str], ...] - -CmpPrePostDevType = Union[InfinityType, NegativeInfinityType, Tuple[str, int]] -CmpLocalType = Union[ - NegativeInfinityType, - Tuple[Union[Tuple[int, str], Tuple[NegativeInfinityType, Union[int, str]]], ...], -] -CmpKey = Tuple[ - int, - Tuple[int, ...], - CmpPrePostDevType, - CmpPrePostDevType, - CmpPrePostDevType, - CmpLocalType, -] -VersionComparisonMethod = Callable[[CmpKey, CmpKey], bool] - - -class _Version(NamedTuple): - epoch: int - release: tuple[int, ...] - dev: tuple[str, int] | None - pre: tuple[str, int] | None - post: tuple[str, int] | None - local: LocalType | None - - -def parse(version: str) -> Version: - """Parse the given version string. - - >>> parse('1.0.dev1') - - - :param version: The version string to parse. - :raises InvalidVersion: When the version string is not a valid version. - """ - return Version(version) - - -class InvalidVersion(ValueError): - """Raised when a version string is not a valid version. - - >>> Version("invalid") - Traceback (most recent call last): - ... - packaging.version.InvalidVersion: Invalid version: 'invalid' - """ - - -class _BaseVersion: - _key: tuple[Any, ...] - - def __hash__(self) -> int: - return hash(self._key) - - # Please keep the duplicated `isinstance` check - # in the six comparisons hereunder - # unless you find a way to avoid adding overhead function calls. - def __lt__(self, other: _BaseVersion) -> bool: - if not isinstance(other, _BaseVersion): - return NotImplemented - - return self._key < other._key - - def __le__(self, other: _BaseVersion) -> bool: - if not isinstance(other, _BaseVersion): - return NotImplemented - - return self._key <= other._key - - def __eq__(self, other: object) -> bool: - if not isinstance(other, _BaseVersion): - return NotImplemented - - return self._key == other._key - - def __ge__(self, other: _BaseVersion) -> bool: - if not isinstance(other, _BaseVersion): - return NotImplemented - - return self._key >= other._key - - def __gt__(self, other: _BaseVersion) -> bool: - if not isinstance(other, _BaseVersion): - return NotImplemented - - return self._key > other._key - - def __ne__(self, other: object) -> bool: - if not isinstance(other, _BaseVersion): - return NotImplemented - - return self._key != other._key - - -# Deliberately not anchored to the start and end of the string, to make it -# easier for 3rd party code to reuse -_VERSION_PATTERN = r""" - v? - (?: - (?:(?P[0-9]+)!)? # epoch - (?P[0-9]+(?:\.[0-9]+)*) # release segment - (?P
                                          # pre-release
-            [-_\.]?
-            (?Palpha|a|beta|b|preview|pre|c|rc)
-            [-_\.]?
-            (?P[0-9]+)?
-        )?
-        (?P                                         # post release
-            (?:-(?P[0-9]+))
-            |
-            (?:
-                [-_\.]?
-                (?Ppost|rev|r)
-                [-_\.]?
-                (?P[0-9]+)?
-            )
-        )?
-        (?P                                          # dev release
-            [-_\.]?
-            (?Pdev)
-            [-_\.]?
-            (?P[0-9]+)?
-        )?
-    )
-    (?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
-"""
-
-VERSION_PATTERN = _VERSION_PATTERN
-"""
-A string containing the regular expression used to match a valid version.
-
-The pattern is not anchored at either end, and is intended for embedding in larger
-expressions (for example, matching a version number as part of a file name). The
-regular expression should be compiled with the ``re.VERBOSE`` and ``re.IGNORECASE``
-flags set.
-
-:meta hide-value:
-"""
-
-
-class Version(_BaseVersion):
-    """This class abstracts handling of a project's versions.
-
-    A :class:`Version` instance is comparison aware and can be compared and
-    sorted using the standard Python interfaces.
-
-    >>> v1 = Version("1.0a5")
-    >>> v2 = Version("1.0")
-    >>> v1
-    
-    >>> v2
-    
-    >>> v1 < v2
-    True
-    >>> v1 == v2
-    False
-    >>> v1 > v2
-    False
-    >>> v1 >= v2
-    False
-    >>> v1 <= v2
-    True
-    """
-
-    _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
-    _key: CmpKey
-
-    def __init__(self, version: str) -> None:
-        """Initialize a Version object.
-
-        :param version:
-            The string representation of a version which will be parsed and normalized
-            before use.
-        :raises InvalidVersion:
-            If the ``version`` does not conform to PEP 440 in any way then this
-            exception will be raised.
-        """
-
-        # Validate the version and parse it into pieces
-        match = self._regex.search(version)
-        if not match:
-            raise InvalidVersion(f"Invalid version: '{version}'")
-
-        # Store the parsed out pieces of the version
-        self._version = _Version(
-            epoch=int(match.group("epoch")) if match.group("epoch") else 0,
-            release=tuple(int(i) for i in match.group("release").split(".")),
-            pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
-            post=_parse_letter_version(
-                match.group("post_l"), match.group("post_n1") or match.group("post_n2")
-            ),
-            dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
-            local=_parse_local_version(match.group("local")),
-        )
-
-        # Generate a key which will be used for sorting
-        self._key = _cmpkey(
-            self._version.epoch,
-            self._version.release,
-            self._version.pre,
-            self._version.post,
-            self._version.dev,
-            self._version.local,
-        )
-
-    def __repr__(self) -> str:
-        """A representation of the Version that shows all internal state.
-
-        >>> Version('1.0.0')
-        
-        """
-        return f""
-
-    def __str__(self) -> str:
-        """A string representation of the version that can be rounded-tripped.
-
-        >>> str(Version("1.0a5"))
-        '1.0a5'
-        """
-        parts = []
-
-        # Epoch
-        if self.epoch != 0:
-            parts.append(f"{self.epoch}!")
-
-        # Release segment
-        parts.append(".".join(str(x) for x in self.release))
-
-        # Pre-release
-        if self.pre is not None:
-            parts.append("".join(str(x) for x in self.pre))
-
-        # Post-release
-        if self.post is not None:
-            parts.append(f".post{self.post}")
-
-        # Development release
-        if self.dev is not None:
-            parts.append(f".dev{self.dev}")
-
-        # Local version segment
-        if self.local is not None:
-            parts.append(f"+{self.local}")
-
-        return "".join(parts)
-
-    @property
-    def epoch(self) -> int:
-        """The epoch of the version.
-
-        >>> Version("2.0.0").epoch
-        0
-        >>> Version("1!2.0.0").epoch
-        1
-        """
-        return self._version.epoch
-
-    @property
-    def release(self) -> tuple[int, ...]:
-        """The components of the "release" segment of the version.
-
-        >>> Version("1.2.3").release
-        (1, 2, 3)
-        >>> Version("2.0.0").release
-        (2, 0, 0)
-        >>> Version("1!2.0.0.post0").release
-        (2, 0, 0)
-
-        Includes trailing zeroes but not the epoch or any pre-release / development /
-        post-release suffixes.
-        """
-        return self._version.release
-
-    @property
-    def pre(self) -> tuple[str, int] | None:
-        """The pre-release segment of the version.
-
-        >>> print(Version("1.2.3").pre)
-        None
-        >>> Version("1.2.3a1").pre
-        ('a', 1)
-        >>> Version("1.2.3b1").pre
-        ('b', 1)
-        >>> Version("1.2.3rc1").pre
-        ('rc', 1)
-        """
-        return self._version.pre
-
-    @property
-    def post(self) -> int | None:
-        """The post-release number of the version.
-
-        >>> print(Version("1.2.3").post)
-        None
-        >>> Version("1.2.3.post1").post
-        1
-        """
-        return self._version.post[1] if self._version.post else None
-
-    @property
-    def dev(self) -> int | None:
-        """The development number of the version.
-
-        >>> print(Version("1.2.3").dev)
-        None
-        >>> Version("1.2.3.dev1").dev
-        1
-        """
-        return self._version.dev[1] if self._version.dev else None
-
-    @property
-    def local(self) -> str | None:
-        """The local version segment of the version.
-
-        >>> print(Version("1.2.3").local)
-        None
-        >>> Version("1.2.3+abc").local
-        'abc'
-        """
-        if self._version.local:
-            return ".".join(str(x) for x in self._version.local)
-        else:
-            return None
-
-    @property
-    def public(self) -> str:
-        """The public portion of the version.
-
-        >>> Version("1.2.3").public
-        '1.2.3'
-        >>> Version("1.2.3+abc").public
-        '1.2.3'
-        >>> Version("1.2.3+abc.dev1").public
-        '1.2.3'
-        """
-        return str(self).split("+", 1)[0]
-
-    @property
-    def base_version(self) -> str:
-        """The "base version" of the version.
-
-        >>> Version("1.2.3").base_version
-        '1.2.3'
-        >>> Version("1.2.3+abc").base_version
-        '1.2.3'
-        >>> Version("1!1.2.3+abc.dev1").base_version
-        '1!1.2.3'
-
-        The "base version" is the public version of the project without any pre or post
-        release markers.
-        """
-        parts = []
-
-        # Epoch
-        if self.epoch != 0:
-            parts.append(f"{self.epoch}!")
-
-        # Release segment
-        parts.append(".".join(str(x) for x in self.release))
-
-        return "".join(parts)
-
-    @property
-    def is_prerelease(self) -> bool:
-        """Whether this version is a pre-release.
-
-        >>> Version("1.2.3").is_prerelease
-        False
-        >>> Version("1.2.3a1").is_prerelease
-        True
-        >>> Version("1.2.3b1").is_prerelease
-        True
-        >>> Version("1.2.3rc1").is_prerelease
-        True
-        >>> Version("1.2.3dev1").is_prerelease
-        True
-        """
-        return self.dev is not None or self.pre is not None
-
-    @property
-    def is_postrelease(self) -> bool:
-        """Whether this version is a post-release.
-
-        >>> Version("1.2.3").is_postrelease
-        False
-        >>> Version("1.2.3.post1").is_postrelease
-        True
-        """
-        return self.post is not None
-
-    @property
-    def is_devrelease(self) -> bool:
-        """Whether this version is a development release.
-
-        >>> Version("1.2.3").is_devrelease
-        False
-        >>> Version("1.2.3.dev1").is_devrelease
-        True
-        """
-        return self.dev is not None
-
-    @property
-    def major(self) -> int:
-        """The first item of :attr:`release` or ``0`` if unavailable.
-
-        >>> Version("1.2.3").major
-        1
-        """
-        return self.release[0] if len(self.release) >= 1 else 0
-
-    @property
-    def minor(self) -> int:
-        """The second item of :attr:`release` or ``0`` if unavailable.
-
-        >>> Version("1.2.3").minor
-        2
-        >>> Version("1").minor
-        0
-        """
-        return self.release[1] if len(self.release) >= 2 else 0
-
-    @property
-    def micro(self) -> int:
-        """The third item of :attr:`release` or ``0`` if unavailable.
-
-        >>> Version("1.2.3").micro
-        3
-        >>> Version("1").micro
-        0
-        """
-        return self.release[2] if len(self.release) >= 3 else 0
-
-
-def _parse_letter_version(
-    letter: str | None, number: str | bytes | SupportsInt | None
-) -> tuple[str, int] | None:
-    if letter:
-        # We consider there to be an implicit 0 in a pre-release if there is
-        # not a numeral associated with it.
-        if number is None:
-            number = 0
-
-        # We normalize any letters to their lower case form
-        letter = letter.lower()
-
-        # We consider some words to be alternate spellings of other words and
-        # in those cases we want to normalize the spellings to our preferred
-        # spelling.
-        if letter == "alpha":
-            letter = "a"
-        elif letter == "beta":
-            letter = "b"
-        elif letter in ["c", "pre", "preview"]:
-            letter = "rc"
-        elif letter in ["rev", "r"]:
-            letter = "post"
-
-        return letter, int(number)
-    if not letter and number:
-        # We assume if we are given a number, but we are not given a letter
-        # then this is using the implicit post release syntax (e.g. 1.0-1)
-        letter = "post"
-
-        return letter, int(number)
-
-    return None
-
-
-_local_version_separators = re.compile(r"[\._-]")
-
-
-def _parse_local_version(local: str | None) -> LocalType | None:
-    """
-    Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
-    """
-    if local is not None:
-        return tuple(
-            part.lower() if not part.isdigit() else int(part)
-            for part in _local_version_separators.split(local)
-        )
-    return None
-
-
-def _cmpkey(
-    epoch: int,
-    release: tuple[int, ...],
-    pre: tuple[str, int] | None,
-    post: tuple[str, int] | None,
-    dev: tuple[str, int] | None,
-    local: LocalType | None,
-) -> CmpKey:
-    # When we compare a release version, we want to compare it with all of the
-    # trailing zeros removed. So we'll use a reverse the list, drop all the now
-    # leading zeros until we come to something non zero, then take the rest
-    # re-reverse it back into the correct order and make it a tuple and use
-    # that for our sorting key.
-    _release = tuple(
-        reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
-    )
-
-    # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
-    # We'll do this by abusing the pre segment, but we _only_ want to do this
-    # if there is not a pre or a post segment. If we have one of those then
-    # the normal sorting rules will handle this case correctly.
-    if pre is None and post is None and dev is not None:
-        _pre: CmpPrePostDevType = NegativeInfinity
-    # Versions without a pre-release (except as noted above) should sort after
-    # those with one.
-    elif pre is None:
-        _pre = Infinity
-    else:
-        _pre = pre
-
-    # Versions without a post segment should sort before those with one.
-    if post is None:
-        _post: CmpPrePostDevType = NegativeInfinity
-
-    else:
-        _post = post
-
-    # Versions without a development segment should sort after those with one.
-    if dev is None:
-        _dev: CmpPrePostDevType = Infinity
-
-    else:
-        _dev = dev
-
-    if local is None:
-        # Versions without a local segment should sort before those with one.
-        _local: CmpLocalType = NegativeInfinity
-    else:
-        # Versions with a local segment need that segment parsed to implement
-        # the sorting rules in PEP440.
-        # - Alpha numeric segments sort before numeric segments
-        # - Alpha numeric segments sort lexicographically
-        # - Numeric segments sort numerically
-        # - Shorter versions sort before longer versions when the prefixes
-        #   match exactly
-        _local = tuple(
-            (i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
-        )
-
-    return epoch, _release, _pre, _post, _dev, _local
diff --git a/pkg_resources/_vendor/platformdirs-4.2.2.dist-info/INSTALLER b/pkg_resources/_vendor/platformdirs-4.2.2.dist-info/INSTALLER
deleted file mode 100644
index a1b589e38a..0000000000
--- a/pkg_resources/_vendor/platformdirs-4.2.2.dist-info/INSTALLER
+++ /dev/null
@@ -1 +0,0 @@
-pip
diff --git a/pkg_resources/_vendor/platformdirs-4.2.2.dist-info/METADATA b/pkg_resources/_vendor/platformdirs-4.2.2.dist-info/METADATA
deleted file mode 100644
index ab51ef36ad..0000000000
--- a/pkg_resources/_vendor/platformdirs-4.2.2.dist-info/METADATA
+++ /dev/null
@@ -1,319 +0,0 @@
-Metadata-Version: 2.3
-Name: platformdirs
-Version: 4.2.2
-Summary: A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`.
-Project-URL: Documentation, https://platformdirs.readthedocs.io
-Project-URL: Homepage, https://github.com/platformdirs/platformdirs
-Project-URL: Source, https://github.com/platformdirs/platformdirs
-Project-URL: Tracker, https://github.com/platformdirs/platformdirs/issues
-Maintainer-email: Bernát Gábor , Julian Berman , Ofek Lev , Ronny Pfannschmidt 
-License-Expression: MIT
-License-File: LICENSE
-Keywords: appdirs,application,cache,directory,log,user
-Classifier: Development Status :: 5 - Production/Stable
-Classifier: Intended Audience :: Developers
-Classifier: License :: OSI Approved :: MIT License
-Classifier: Operating System :: OS Independent
-Classifier: Programming Language :: Python
-Classifier: Programming Language :: Python :: 3 :: Only
-Classifier: Programming Language :: Python :: 3.8
-Classifier: Programming Language :: Python :: 3.9
-Classifier: Programming Language :: Python :: 3.10
-Classifier: Programming Language :: Python :: 3.11
-Classifier: Programming Language :: Python :: 3.12
-Classifier: Programming Language :: Python :: Implementation :: CPython
-Classifier: Programming Language :: Python :: Implementation :: PyPy
-Classifier: Topic :: Software Development :: Libraries :: Python Modules
-Requires-Python: >=3.8
-Provides-Extra: docs
-Requires-Dist: furo>=2023.9.10; extra == 'docs'
-Requires-Dist: proselint>=0.13; extra == 'docs'
-Requires-Dist: sphinx-autodoc-typehints>=1.25.2; extra == 'docs'
-Requires-Dist: sphinx>=7.2.6; extra == 'docs'
-Provides-Extra: test
-Requires-Dist: appdirs==1.4.4; extra == 'test'
-Requires-Dist: covdefaults>=2.3; extra == 'test'
-Requires-Dist: pytest-cov>=4.1; extra == 'test'
-Requires-Dist: pytest-mock>=3.12; extra == 'test'
-Requires-Dist: pytest>=7.4.3; extra == 'test'
-Provides-Extra: type
-Requires-Dist: mypy>=1.8; extra == 'type'
-Description-Content-Type: text/x-rst
-
-The problem
-===========
-
-.. image:: https://github.com/platformdirs/platformdirs/actions/workflows/check.yml/badge.svg
-   :target: https://github.com/platformdirs/platformdirs/actions
-
-When writing desktop application, finding the right location to store user data
-and configuration varies per platform. Even for single-platform apps, there
-may by plenty of nuances in figuring out the right location.
-
-For example, if running on macOS, you should use::
-
-    ~/Library/Application Support/
-
-If on Windows (at least English Win) that should be::
-
-    C:\Documents and Settings\\Application Data\Local Settings\\
-
-or possibly::
-
-    C:\Documents and Settings\\Application Data\\
-
-for `roaming profiles `_ but that is another story.
-
-On Linux (and other Unices), according to the `XDG Basedir Spec`_, it should be::
-
-    ~/.local/share/
-
-.. _XDG Basedir Spec: https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html
-
-``platformdirs`` to the rescue
-==============================
-
-This kind of thing is what the ``platformdirs`` package is for.
-``platformdirs`` will help you choose an appropriate:
-
-- user data dir (``user_data_dir``)
-- user config dir (``user_config_dir``)
-- user cache dir (``user_cache_dir``)
-- site data dir (``site_data_dir``)
-- site config dir (``site_config_dir``)
-- user log dir (``user_log_dir``)
-- user documents dir (``user_documents_dir``)
-- user downloads dir (``user_downloads_dir``)
-- user pictures dir (``user_pictures_dir``)
-- user videos dir (``user_videos_dir``)
-- user music dir (``user_music_dir``)
-- user desktop dir (``user_desktop_dir``)
-- user runtime dir (``user_runtime_dir``)
-
-And also:
-
-- Is slightly opinionated on the directory names used. Look for "OPINION" in
-  documentation and code for when an opinion is being applied.
-
-Example output
-==============
-
-On macOS:
-
-.. code-block:: pycon
-
-    >>> from platformdirs import *
-    >>> appname = "SuperApp"
-    >>> appauthor = "Acme"
-    >>> user_data_dir(appname, appauthor)
-    '/Users/trentm/Library/Application Support/SuperApp'
-    >>> site_data_dir(appname, appauthor)
-    '/Library/Application Support/SuperApp'
-    >>> user_cache_dir(appname, appauthor)
-    '/Users/trentm/Library/Caches/SuperApp'
-    >>> user_log_dir(appname, appauthor)
-    '/Users/trentm/Library/Logs/SuperApp'
-    >>> user_documents_dir()
-    '/Users/trentm/Documents'
-    >>> user_downloads_dir()
-    '/Users/trentm/Downloads'
-    >>> user_pictures_dir()
-    '/Users/trentm/Pictures'
-    >>> user_videos_dir()
-    '/Users/trentm/Movies'
-    >>> user_music_dir()
-    '/Users/trentm/Music'
-    >>> user_desktop_dir()
-    '/Users/trentm/Desktop'
-    >>> user_runtime_dir(appname, appauthor)
-    '/Users/trentm/Library/Caches/TemporaryItems/SuperApp'
-
-On Windows:
-
-.. code-block:: pycon
-
-    >>> from platformdirs import *
-    >>> appname = "SuperApp"
-    >>> appauthor = "Acme"
-    >>> user_data_dir(appname, appauthor)
-    'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp'
-    >>> user_data_dir(appname, appauthor, roaming=True)
-    'C:\\Users\\trentm\\AppData\\Roaming\\Acme\\SuperApp'
-    >>> user_cache_dir(appname, appauthor)
-    'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp\\Cache'
-    >>> user_log_dir(appname, appauthor)
-    'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp\\Logs'
-    >>> user_documents_dir()
-    'C:\\Users\\trentm\\Documents'
-    >>> user_downloads_dir()
-    'C:\\Users\\trentm\\Downloads'
-    >>> user_pictures_dir()
-    'C:\\Users\\trentm\\Pictures'
-    >>> user_videos_dir()
-    'C:\\Users\\trentm\\Videos'
-    >>> user_music_dir()
-    'C:\\Users\\trentm\\Music'
-    >>> user_desktop_dir()
-    'C:\\Users\\trentm\\Desktop'
-    >>> user_runtime_dir(appname, appauthor)
-    'C:\\Users\\trentm\\AppData\\Local\\Temp\\Acme\\SuperApp'
-
-On Linux:
-
-.. code-block:: pycon
-
-    >>> from platformdirs import *
-    >>> appname = "SuperApp"
-    >>> appauthor = "Acme"
-    >>> user_data_dir(appname, appauthor)
-    '/home/trentm/.local/share/SuperApp'
-    >>> site_data_dir(appname, appauthor)
-    '/usr/local/share/SuperApp'
-    >>> site_data_dir(appname, appauthor, multipath=True)
-    '/usr/local/share/SuperApp:/usr/share/SuperApp'
-    >>> user_cache_dir(appname, appauthor)
-    '/home/trentm/.cache/SuperApp'
-    >>> user_log_dir(appname, appauthor)
-    '/home/trentm/.local/state/SuperApp/log'
-    >>> user_config_dir(appname)
-    '/home/trentm/.config/SuperApp'
-    >>> user_documents_dir()
-    '/home/trentm/Documents'
-    >>> user_downloads_dir()
-    '/home/trentm/Downloads'
-    >>> user_pictures_dir()
-    '/home/trentm/Pictures'
-    >>> user_videos_dir()
-    '/home/trentm/Videos'
-    >>> user_music_dir()
-    '/home/trentm/Music'
-    >>> user_desktop_dir()
-    '/home/trentm/Desktop'
-    >>> user_runtime_dir(appname, appauthor)
-    '/run/user/{os.getuid()}/SuperApp'
-    >>> site_config_dir(appname)
-    '/etc/xdg/SuperApp'
-    >>> os.environ["XDG_CONFIG_DIRS"] = "/etc:/usr/local/etc"
-    >>> site_config_dir(appname, multipath=True)
-    '/etc/SuperApp:/usr/local/etc/SuperApp'
-
-On Android::
-
-    >>> from platformdirs import *
-    >>> appname = "SuperApp"
-    >>> appauthor = "Acme"
-    >>> user_data_dir(appname, appauthor)
-    '/data/data/com.myApp/files/SuperApp'
-    >>> user_cache_dir(appname, appauthor)
-    '/data/data/com.myApp/cache/SuperApp'
-    >>> user_log_dir(appname, appauthor)
-    '/data/data/com.myApp/cache/SuperApp/log'
-    >>> user_config_dir(appname)
-    '/data/data/com.myApp/shared_prefs/SuperApp'
-    >>> user_documents_dir()
-    '/storage/emulated/0/Documents'
-    >>> user_downloads_dir()
-    '/storage/emulated/0/Downloads'
-    >>> user_pictures_dir()
-    '/storage/emulated/0/Pictures'
-    >>> user_videos_dir()
-    '/storage/emulated/0/DCIM/Camera'
-    >>> user_music_dir()
-    '/storage/emulated/0/Music'
-    >>> user_desktop_dir()
-    '/storage/emulated/0/Desktop'
-    >>> user_runtime_dir(appname, appauthor)
-    '/data/data/com.myApp/cache/SuperApp/tmp'
-
-Note: Some android apps like Termux and Pydroid are used as shells. These
-apps are used by the end user to emulate Linux environment. Presence of
-``SHELL`` environment variable is used by Platformdirs to differentiate
-between general android apps and android apps used as shells. Shell android
-apps also support ``XDG_*`` environment variables.
-
-
-``PlatformDirs`` for convenience
-================================
-
-.. code-block:: pycon
-
-    >>> from platformdirs import PlatformDirs
-    >>> dirs = PlatformDirs("SuperApp", "Acme")
-    >>> dirs.user_data_dir
-    '/Users/trentm/Library/Application Support/SuperApp'
-    >>> dirs.site_data_dir
-    '/Library/Application Support/SuperApp'
-    >>> dirs.user_cache_dir
-    '/Users/trentm/Library/Caches/SuperApp'
-    >>> dirs.user_log_dir
-    '/Users/trentm/Library/Logs/SuperApp'
-    >>> dirs.user_documents_dir
-    '/Users/trentm/Documents'
-    >>> dirs.user_downloads_dir
-    '/Users/trentm/Downloads'
-    >>> dirs.user_pictures_dir
-    '/Users/trentm/Pictures'
-    >>> dirs.user_videos_dir
-    '/Users/trentm/Movies'
-    >>> dirs.user_music_dir
-    '/Users/trentm/Music'
-    >>> dirs.user_desktop_dir
-    '/Users/trentm/Desktop'
-    >>> dirs.user_runtime_dir
-    '/Users/trentm/Library/Caches/TemporaryItems/SuperApp'
-
-Per-version isolation
-=====================
-
-If you have multiple versions of your app in use that you want to be
-able to run side-by-side, then you may want version-isolation for these
-dirs::
-
-    >>> from platformdirs import PlatformDirs
-    >>> dirs = PlatformDirs("SuperApp", "Acme", version="1.0")
-    >>> dirs.user_data_dir
-    '/Users/trentm/Library/Application Support/SuperApp/1.0'
-    >>> dirs.site_data_dir
-    '/Library/Application Support/SuperApp/1.0'
-    >>> dirs.user_cache_dir
-    '/Users/trentm/Library/Caches/SuperApp/1.0'
-    >>> dirs.user_log_dir
-    '/Users/trentm/Library/Logs/SuperApp/1.0'
-    >>> dirs.user_documents_dir
-    '/Users/trentm/Documents'
-    >>> dirs.user_downloads_dir
-    '/Users/trentm/Downloads'
-    >>> dirs.user_pictures_dir
-    '/Users/trentm/Pictures'
-    >>> dirs.user_videos_dir
-    '/Users/trentm/Movies'
-    >>> dirs.user_music_dir
-    '/Users/trentm/Music'
-    >>> dirs.user_desktop_dir
-    '/Users/trentm/Desktop'
-    >>> dirs.user_runtime_dir
-    '/Users/trentm/Library/Caches/TemporaryItems/SuperApp/1.0'
-
-Be wary of using this for configuration files though; you'll need to handle
-migrating configuration files manually.
-
-Why this Fork?
-==============
-
-This repository is a friendly fork of the wonderful work started by
-`ActiveState `_ who created
-``appdirs``, this package's ancestor.
-
-Maintaining an open source project is no easy task, particularly
-from within an organization, and the Python community is indebted
-to ``appdirs`` (and to Trent Mick and Jeff Rouse in particular) for
-creating an incredibly useful simple module, as evidenced by the wide
-number of users it has attracted over the years.
-
-Nonetheless, given the number of long-standing open issues
-and pull requests, and no clear path towards `ensuring
-that maintenance of the package would continue or grow
-`_, this fork was
-created.
-
-Contributions are most welcome.
diff --git a/pkg_resources/_vendor/platformdirs-4.2.2.dist-info/RECORD b/pkg_resources/_vendor/platformdirs-4.2.2.dist-info/RECORD
deleted file mode 100644
index 64c0c8ea2e..0000000000
--- a/pkg_resources/_vendor/platformdirs-4.2.2.dist-info/RECORD
+++ /dev/null
@@ -1,23 +0,0 @@
-platformdirs-4.2.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-platformdirs-4.2.2.dist-info/METADATA,sha256=zmsie01G1MtXR0wgIv5XpVeTO7idr0WWvfmxKsKWuGk,11429
-platformdirs-4.2.2.dist-info/RECORD,,
-platformdirs-4.2.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-platformdirs-4.2.2.dist-info/WHEEL,sha256=zEMcRr9Kr03x1ozGwg5v9NQBKn3kndp6LSoSlVg-jhU,87
-platformdirs-4.2.2.dist-info/licenses/LICENSE,sha256=KeD9YukphQ6G6yjD_czwzv30-pSHkBHP-z0NS-1tTbY,1089
-platformdirs/__init__.py,sha256=EMGE8qeHRR9CzDFr8kL3tA8hdZZniYjXBVZd0UGTWK0,22225
-platformdirs/__main__.py,sha256=HnsUQHpiBaiTxwcmwVw-nFaPdVNZtQIdi1eWDtI-MzI,1493
-platformdirs/__pycache__/__init__.cpython-312.pyc,,
-platformdirs/__pycache__/__main__.cpython-312.pyc,,
-platformdirs/__pycache__/android.cpython-312.pyc,,
-platformdirs/__pycache__/api.cpython-312.pyc,,
-platformdirs/__pycache__/macos.cpython-312.pyc,,
-platformdirs/__pycache__/unix.cpython-312.pyc,,
-platformdirs/__pycache__/version.cpython-312.pyc,,
-platformdirs/__pycache__/windows.cpython-312.pyc,,
-platformdirs/android.py,sha256=xZXY9Jd46WOsxT2U6-5HsNtDZ-IQqxcEUrBLl3hYk4o,9016
-platformdirs/api.py,sha256=QBYdUac2eC521ek_y53uD1Dcq-lJX8IgSRVd4InC6uc,8996
-platformdirs/macos.py,sha256=wftsbsvq6nZ0WORXSiCrZNkRHz_WKuktl0a6mC7MFkI,5580
-platformdirs/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-platformdirs/unix.py,sha256=Cci9Wqt35dAMsg6HT9nRGHSBW5obb0pR3AE1JJnsCXg,10643
-platformdirs/version.py,sha256=r7F76tZRjgQKzrpx_I0_ZMQOMU-PS7eGnHD7zEK3KB0,411
-platformdirs/windows.py,sha256=IFpiohUBwxPtCzlyKwNtxyW4Jk8haa6W8o59mfrDXVo,10125
diff --git a/pkg_resources/_vendor/platformdirs-4.2.2.dist-info/REQUESTED b/pkg_resources/_vendor/platformdirs-4.2.2.dist-info/REQUESTED
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/pkg_resources/_vendor/platformdirs-4.2.2.dist-info/WHEEL b/pkg_resources/_vendor/platformdirs-4.2.2.dist-info/WHEEL
deleted file mode 100644
index 516596c767..0000000000
--- a/pkg_resources/_vendor/platformdirs-4.2.2.dist-info/WHEEL
+++ /dev/null
@@ -1,4 +0,0 @@
-Wheel-Version: 1.0
-Generator: hatchling 1.24.2
-Root-Is-Purelib: true
-Tag: py3-none-any
diff --git a/pkg_resources/_vendor/platformdirs-4.2.2.dist-info/licenses/LICENSE b/pkg_resources/_vendor/platformdirs-4.2.2.dist-info/licenses/LICENSE
deleted file mode 100644
index f35fed9191..0000000000
--- a/pkg_resources/_vendor/platformdirs-4.2.2.dist-info/licenses/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-MIT License
-
-Copyright (c) 2010-202x The platformdirs developers
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
diff --git a/pkg_resources/_vendor/platformdirs/__init__.py b/pkg_resources/_vendor/platformdirs/__init__.py
deleted file mode 100644
index 3f7d9490d1..0000000000
--- a/pkg_resources/_vendor/platformdirs/__init__.py
+++ /dev/null
@@ -1,627 +0,0 @@
-"""
-Utilities for determining application-specific dirs.
-
-See  for details and usage.
-
-"""
-
-from __future__ import annotations
-
-import os
-import sys
-from typing import TYPE_CHECKING
-
-from .api import PlatformDirsABC
-from .version import __version__
-from .version import __version_tuple__ as __version_info__
-
-if TYPE_CHECKING:
-    from pathlib import Path
-    from typing import Literal
-
-
-def _set_platform_dir_class() -> type[PlatformDirsABC]:
-    if sys.platform == "win32":
-        from platformdirs.windows import Windows as Result  # noqa: PLC0415
-    elif sys.platform == "darwin":
-        from platformdirs.macos import MacOS as Result  # noqa: PLC0415
-    else:
-        from platformdirs.unix import Unix as Result  # noqa: PLC0415
-
-    if os.getenv("ANDROID_DATA") == "/data" and os.getenv("ANDROID_ROOT") == "/system":
-        if os.getenv("SHELL") or os.getenv("PREFIX"):
-            return Result
-
-        from platformdirs.android import _android_folder  # noqa: PLC0415
-
-        if _android_folder() is not None:
-            from platformdirs.android import Android  # noqa: PLC0415
-
-            return Android  # return to avoid redefinition of a result
-
-    return Result
-
-
-PlatformDirs = _set_platform_dir_class()  #: Currently active platform
-AppDirs = PlatformDirs  #: Backwards compatibility with appdirs
-
-
-def user_data_dir(
-    appname: str | None = None,
-    appauthor: str | None | Literal[False] = None,
-    version: str | None = None,
-    roaming: bool = False,  # noqa: FBT001, FBT002
-    ensure_exists: bool = False,  # noqa: FBT001, FBT002
-) -> str:
-    """
-    :param appname: See `appname `.
-    :param appauthor: See `appauthor `.
-    :param version: See `version `.
-    :param roaming: See `roaming `.
-    :param ensure_exists: See `ensure_exists `.
-    :returns: data directory tied to the user
-    """
-    return PlatformDirs(
-        appname=appname,
-        appauthor=appauthor,
-        version=version,
-        roaming=roaming,
-        ensure_exists=ensure_exists,
-    ).user_data_dir
-
-
-def site_data_dir(
-    appname: str | None = None,
-    appauthor: str | None | Literal[False] = None,
-    version: str | None = None,
-    multipath: bool = False,  # noqa: FBT001, FBT002
-    ensure_exists: bool = False,  # noqa: FBT001, FBT002
-) -> str:
-    """
-    :param appname: See `appname `.
-    :param appauthor: See `appauthor `.
-    :param version: See `version `.
-    :param multipath: See `roaming `.
-    :param ensure_exists: See `ensure_exists `.
-    :returns: data directory shared by users
-    """
-    return PlatformDirs(
-        appname=appname,
-        appauthor=appauthor,
-        version=version,
-        multipath=multipath,
-        ensure_exists=ensure_exists,
-    ).site_data_dir
-
-
-def user_config_dir(
-    appname: str | None = None,
-    appauthor: str | None | Literal[False] = None,
-    version: str | None = None,
-    roaming: bool = False,  # noqa: FBT001, FBT002
-    ensure_exists: bool = False,  # noqa: FBT001, FBT002
-) -> str:
-    """
-    :param appname: See `appname `.
-    :param appauthor: See `appauthor `.
-    :param version: See `version `.
-    :param roaming: See `roaming `.
-    :param ensure_exists: See `ensure_exists `.
-    :returns: config directory tied to the user
-    """
-    return PlatformDirs(
-        appname=appname,
-        appauthor=appauthor,
-        version=version,
-        roaming=roaming,
-        ensure_exists=ensure_exists,
-    ).user_config_dir
-
-
-def site_config_dir(
-    appname: str | None = None,
-    appauthor: str | None | Literal[False] = None,
-    version: str | None = None,
-    multipath: bool = False,  # noqa: FBT001, FBT002
-    ensure_exists: bool = False,  # noqa: FBT001, FBT002
-) -> str:
-    """
-    :param appname: See `appname `.
-    :param appauthor: See `appauthor `.
-    :param version: See `version `.
-    :param multipath: See `roaming `.
-    :param ensure_exists: See `ensure_exists `.
-    :returns: config directory shared by the users
-    """
-    return PlatformDirs(
-        appname=appname,
-        appauthor=appauthor,
-        version=version,
-        multipath=multipath,
-        ensure_exists=ensure_exists,
-    ).site_config_dir
-
-
-def user_cache_dir(
-    appname: str | None = None,
-    appauthor: str | None | Literal[False] = None,
-    version: str | None = None,
-    opinion: bool = True,  # noqa: FBT001, FBT002
-    ensure_exists: bool = False,  # noqa: FBT001, FBT002
-) -> str:
-    """
-    :param appname: See `appname `.
-    :param appauthor: See `appauthor `.
-    :param version: See `version `.
-    :param opinion: See `roaming `.
-    :param ensure_exists: See `ensure_exists `.
-    :returns: cache directory tied to the user
-    """
-    return PlatformDirs(
-        appname=appname,
-        appauthor=appauthor,
-        version=version,
-        opinion=opinion,
-        ensure_exists=ensure_exists,
-    ).user_cache_dir
-
-
-def site_cache_dir(
-    appname: str | None = None,
-    appauthor: str | None | Literal[False] = None,
-    version: str | None = None,
-    opinion: bool = True,  # noqa: FBT001, FBT002
-    ensure_exists: bool = False,  # noqa: FBT001, FBT002
-) -> str:
-    """
-    :param appname: See `appname `.
-    :param appauthor: See `appauthor `.
-    :param version: See `version `.
-    :param opinion: See `opinion `.
-    :param ensure_exists: See `ensure_exists `.
-    :returns: cache directory tied to the user
-    """
-    return PlatformDirs(
-        appname=appname,
-        appauthor=appauthor,
-        version=version,
-        opinion=opinion,
-        ensure_exists=ensure_exists,
-    ).site_cache_dir
-
-
-def user_state_dir(
-    appname: str | None = None,
-    appauthor: str | None | Literal[False] = None,
-    version: str | None = None,
-    roaming: bool = False,  # noqa: FBT001, FBT002
-    ensure_exists: bool = False,  # noqa: FBT001, FBT002
-) -> str:
-    """
-    :param appname: See `appname `.
-    :param appauthor: See `appauthor `.
-    :param version: See `version `.
-    :param roaming: See `roaming `.
-    :param ensure_exists: See `ensure_exists `.
-    :returns: state directory tied to the user
-    """
-    return PlatformDirs(
-        appname=appname,
-        appauthor=appauthor,
-        version=version,
-        roaming=roaming,
-        ensure_exists=ensure_exists,
-    ).user_state_dir
-
-
-def user_log_dir(
-    appname: str | None = None,
-    appauthor: str | None | Literal[False] = None,
-    version: str | None = None,
-    opinion: bool = True,  # noqa: FBT001, FBT002
-    ensure_exists: bool = False,  # noqa: FBT001, FBT002
-) -> str:
-    """
-    :param appname: See `appname `.
-    :param appauthor: See `appauthor `.
-    :param version: See `version `.
-    :param opinion: See `roaming `.
-    :param ensure_exists: See `ensure_exists `.
-    :returns: log directory tied to the user
-    """
-    return PlatformDirs(
-        appname=appname,
-        appauthor=appauthor,
-        version=version,
-        opinion=opinion,
-        ensure_exists=ensure_exists,
-    ).user_log_dir
-
-
-def user_documents_dir() -> str:
-    """:returns: documents directory tied to the user"""
-    return PlatformDirs().user_documents_dir
-
-
-def user_downloads_dir() -> str:
-    """:returns: downloads directory tied to the user"""
-    return PlatformDirs().user_downloads_dir
-
-
-def user_pictures_dir() -> str:
-    """:returns: pictures directory tied to the user"""
-    return PlatformDirs().user_pictures_dir
-
-
-def user_videos_dir() -> str:
-    """:returns: videos directory tied to the user"""
-    return PlatformDirs().user_videos_dir
-
-
-def user_music_dir() -> str:
-    """:returns: music directory tied to the user"""
-    return PlatformDirs().user_music_dir
-
-
-def user_desktop_dir() -> str:
-    """:returns: desktop directory tied to the user"""
-    return PlatformDirs().user_desktop_dir
-
-
-def user_runtime_dir(
-    appname: str | None = None,
-    appauthor: str | None | Literal[False] = None,
-    version: str | None = None,
-    opinion: bool = True,  # noqa: FBT001, FBT002
-    ensure_exists: bool = False,  # noqa: FBT001, FBT002
-) -> str:
-    """
-    :param appname: See `appname `.
-    :param appauthor: See `appauthor `.
-    :param version: See `version `.
-    :param opinion: See `opinion `.
-    :param ensure_exists: See `ensure_exists `.
-    :returns: runtime directory tied to the user
-    """
-    return PlatformDirs(
-        appname=appname,
-        appauthor=appauthor,
-        version=version,
-        opinion=opinion,
-        ensure_exists=ensure_exists,
-    ).user_runtime_dir
-
-
-def site_runtime_dir(
-    appname: str | None = None,
-    appauthor: str | None | Literal[False] = None,
-    version: str | None = None,
-    opinion: bool = True,  # noqa: FBT001, FBT002
-    ensure_exists: bool = False,  # noqa: FBT001, FBT002
-) -> str:
-    """
-    :param appname: See `appname `.
-    :param appauthor: See `appauthor `.
-    :param version: See `version `.
-    :param opinion: See `opinion `.
-    :param ensure_exists: See `ensure_exists `.
-    :returns: runtime directory shared by users
-    """
-    return PlatformDirs(
-        appname=appname,
-        appauthor=appauthor,
-        version=version,
-        opinion=opinion,
-        ensure_exists=ensure_exists,
-    ).site_runtime_dir
-
-
-def user_data_path(
-    appname: str | None = None,
-    appauthor: str | None | Literal[False] = None,
-    version: str | None = None,
-    roaming: bool = False,  # noqa: FBT001, FBT002
-    ensure_exists: bool = False,  # noqa: FBT001, FBT002
-) -> Path:
-    """
-    :param appname: See `appname `.
-    :param appauthor: See `appauthor `.
-    :param version: See `version `.
-    :param roaming: See `roaming `.
-    :param ensure_exists: See `ensure_exists `.
-    :returns: data path tied to the user
-    """
-    return PlatformDirs(
-        appname=appname,
-        appauthor=appauthor,
-        version=version,
-        roaming=roaming,
-        ensure_exists=ensure_exists,
-    ).user_data_path
-
-
-def site_data_path(
-    appname: str | None = None,
-    appauthor: str | None | Literal[False] = None,
-    version: str | None = None,
-    multipath: bool = False,  # noqa: FBT001, FBT002
-    ensure_exists: bool = False,  # noqa: FBT001, FBT002
-) -> Path:
-    """
-    :param appname: See `appname `.
-    :param appauthor: See `appauthor `.
-    :param version: See `version `.
-    :param multipath: See `multipath `.
-    :param ensure_exists: See `ensure_exists `.
-    :returns: data path shared by users
-    """
-    return PlatformDirs(
-        appname=appname,
-        appauthor=appauthor,
-        version=version,
-        multipath=multipath,
-        ensure_exists=ensure_exists,
-    ).site_data_path
-
-
-def user_config_path(
-    appname: str | None = None,
-    appauthor: str | None | Literal[False] = None,
-    version: str | None = None,
-    roaming: bool = False,  # noqa: FBT001, FBT002
-    ensure_exists: bool = False,  # noqa: FBT001, FBT002
-) -> Path:
-    """
-    :param appname: See `appname `.
-    :param appauthor: See `appauthor `.
-    :param version: See `version `.
-    :param roaming: See `roaming `.
-    :param ensure_exists: See `ensure_exists `.
-    :returns: config path tied to the user
-    """
-    return PlatformDirs(
-        appname=appname,
-        appauthor=appauthor,
-        version=version,
-        roaming=roaming,
-        ensure_exists=ensure_exists,
-    ).user_config_path
-
-
-def site_config_path(
-    appname: str | None = None,
-    appauthor: str | None | Literal[False] = None,
-    version: str | None = None,
-    multipath: bool = False,  # noqa: FBT001, FBT002
-    ensure_exists: bool = False,  # noqa: FBT001, FBT002
-) -> Path:
-    """
-    :param appname: See `appname `.
-    :param appauthor: See `appauthor `.
-    :param version: See `version `.
-    :param multipath: See `roaming `.
-    :param ensure_exists: See `ensure_exists `.
-    :returns: config path shared by the users
-    """
-    return PlatformDirs(
-        appname=appname,
-        appauthor=appauthor,
-        version=version,
-        multipath=multipath,
-        ensure_exists=ensure_exists,
-    ).site_config_path
-
-
-def site_cache_path(
-    appname: str | None = None,
-    appauthor: str | None | Literal[False] = None,
-    version: str | None = None,
-    opinion: bool = True,  # noqa: FBT001, FBT002
-    ensure_exists: bool = False,  # noqa: FBT001, FBT002
-) -> Path:
-    """
-    :param appname: See `appname `.
-    :param appauthor: See `appauthor `.
-    :param version: See `version `.
-    :param opinion: See `opinion `.
-    :param ensure_exists: See `ensure_exists `.
-    :returns: cache directory tied to the user
-    """
-    return PlatformDirs(
-        appname=appname,
-        appauthor=appauthor,
-        version=version,
-        opinion=opinion,
-        ensure_exists=ensure_exists,
-    ).site_cache_path
-
-
-def user_cache_path(
-    appname: str | None = None,
-    appauthor: str | None | Literal[False] = None,
-    version: str | None = None,
-    opinion: bool = True,  # noqa: FBT001, FBT002
-    ensure_exists: bool = False,  # noqa: FBT001, FBT002
-) -> Path:
-    """
-    :param appname: See `appname `.
-    :param appauthor: See `appauthor `.
-    :param version: See `version `.
-    :param opinion: See `roaming `.
-    :param ensure_exists: See `ensure_exists `.
-    :returns: cache path tied to the user
-    """
-    return PlatformDirs(
-        appname=appname,
-        appauthor=appauthor,
-        version=version,
-        opinion=opinion,
-        ensure_exists=ensure_exists,
-    ).user_cache_path
-
-
-def user_state_path(
-    appname: str | None = None,
-    appauthor: str | None | Literal[False] = None,
-    version: str | None = None,
-    roaming: bool = False,  # noqa: FBT001, FBT002
-    ensure_exists: bool = False,  # noqa: FBT001, FBT002
-) -> Path:
-    """
-    :param appname: See `appname `.
-    :param appauthor: See `appauthor `.
-    :param version: See `version `.
-    :param roaming: See `roaming `.
-    :param ensure_exists: See `ensure_exists `.
-    :returns: state path tied to the user
-    """
-    return PlatformDirs(
-        appname=appname,
-        appauthor=appauthor,
-        version=version,
-        roaming=roaming,
-        ensure_exists=ensure_exists,
-    ).user_state_path
-
-
-def user_log_path(
-    appname: str | None = None,
-    appauthor: str | None | Literal[False] = None,
-    version: str | None = None,
-    opinion: bool = True,  # noqa: FBT001, FBT002
-    ensure_exists: bool = False,  # noqa: FBT001, FBT002
-) -> Path:
-    """
-    :param appname: See `appname `.
-    :param appauthor: See `appauthor `.
-    :param version: See `version `.
-    :param opinion: See `roaming `.
-    :param ensure_exists: See `ensure_exists `.
-    :returns: log path tied to the user
-    """
-    return PlatformDirs(
-        appname=appname,
-        appauthor=appauthor,
-        version=version,
-        opinion=opinion,
-        ensure_exists=ensure_exists,
-    ).user_log_path
-
-
-def user_documents_path() -> Path:
-    """:returns: documents a path tied to the user"""
-    return PlatformDirs().user_documents_path
-
-
-def user_downloads_path() -> Path:
-    """:returns: downloads path tied to the user"""
-    return PlatformDirs().user_downloads_path
-
-
-def user_pictures_path() -> Path:
-    """:returns: pictures path tied to the user"""
-    return PlatformDirs().user_pictures_path
-
-
-def user_videos_path() -> Path:
-    """:returns: videos path tied to the user"""
-    return PlatformDirs().user_videos_path
-
-
-def user_music_path() -> Path:
-    """:returns: music path tied to the user"""
-    return PlatformDirs().user_music_path
-
-
-def user_desktop_path() -> Path:
-    """:returns: desktop path tied to the user"""
-    return PlatformDirs().user_desktop_path
-
-
-def user_runtime_path(
-    appname: str | None = None,
-    appauthor: str | None | Literal[False] = None,
-    version: str | None = None,
-    opinion: bool = True,  # noqa: FBT001, FBT002
-    ensure_exists: bool = False,  # noqa: FBT001, FBT002
-) -> Path:
-    """
-    :param appname: See `appname `.
-    :param appauthor: See `appauthor `.
-    :param version: See `version `.
-    :param opinion: See `opinion `.
-    :param ensure_exists: See `ensure_exists `.
-    :returns: runtime path tied to the user
-    """
-    return PlatformDirs(
-        appname=appname,
-        appauthor=appauthor,
-        version=version,
-        opinion=opinion,
-        ensure_exists=ensure_exists,
-    ).user_runtime_path
-
-
-def site_runtime_path(
-    appname: str | None = None,
-    appauthor: str | None | Literal[False] = None,
-    version: str | None = None,
-    opinion: bool = True,  # noqa: FBT001, FBT002
-    ensure_exists: bool = False,  # noqa: FBT001, FBT002
-) -> Path:
-    """
-    :param appname: See `appname `.
-    :param appauthor: See `appauthor `.
-    :param version: See `version `.
-    :param opinion: See `opinion `.
-    :param ensure_exists: See `ensure_exists `.
-    :returns: runtime path shared by users
-    """
-    return PlatformDirs(
-        appname=appname,
-        appauthor=appauthor,
-        version=version,
-        opinion=opinion,
-        ensure_exists=ensure_exists,
-    ).site_runtime_path
-
-
-__all__ = [
-    "AppDirs",
-    "PlatformDirs",
-    "PlatformDirsABC",
-    "__version__",
-    "__version_info__",
-    "site_cache_dir",
-    "site_cache_path",
-    "site_config_dir",
-    "site_config_path",
-    "site_data_dir",
-    "site_data_path",
-    "site_runtime_dir",
-    "site_runtime_path",
-    "user_cache_dir",
-    "user_cache_path",
-    "user_config_dir",
-    "user_config_path",
-    "user_data_dir",
-    "user_data_path",
-    "user_desktop_dir",
-    "user_desktop_path",
-    "user_documents_dir",
-    "user_documents_path",
-    "user_downloads_dir",
-    "user_downloads_path",
-    "user_log_dir",
-    "user_log_path",
-    "user_music_dir",
-    "user_music_path",
-    "user_pictures_dir",
-    "user_pictures_path",
-    "user_runtime_dir",
-    "user_runtime_path",
-    "user_state_dir",
-    "user_state_path",
-    "user_videos_dir",
-    "user_videos_path",
-]
diff --git a/pkg_resources/_vendor/platformdirs/__main__.py b/pkg_resources/_vendor/platformdirs/__main__.py
deleted file mode 100644
index 922c521358..0000000000
--- a/pkg_resources/_vendor/platformdirs/__main__.py
+++ /dev/null
@@ -1,55 +0,0 @@
-"""Main entry point."""
-
-from __future__ import annotations
-
-from platformdirs import PlatformDirs, __version__
-
-PROPS = (
-    "user_data_dir",
-    "user_config_dir",
-    "user_cache_dir",
-    "user_state_dir",
-    "user_log_dir",
-    "user_documents_dir",
-    "user_downloads_dir",
-    "user_pictures_dir",
-    "user_videos_dir",
-    "user_music_dir",
-    "user_runtime_dir",
-    "site_data_dir",
-    "site_config_dir",
-    "site_cache_dir",
-    "site_runtime_dir",
-)
-
-
-def main() -> None:
-    """Run the main entry point."""
-    app_name = "MyApp"
-    app_author = "MyCompany"
-
-    print(f"-- platformdirs {__version__} --")  # noqa: T201
-
-    print("-- app dirs (with optional 'version')")  # noqa: T201
-    dirs = PlatformDirs(app_name, app_author, version="1.0")
-    for prop in PROPS:
-        print(f"{prop}: {getattr(dirs, prop)}")  # noqa: T201
-
-    print("\n-- app dirs (without optional 'version')")  # noqa: T201
-    dirs = PlatformDirs(app_name, app_author)
-    for prop in PROPS:
-        print(f"{prop}: {getattr(dirs, prop)}")  # noqa: T201
-
-    print("\n-- app dirs (without optional 'appauthor')")  # noqa: T201
-    dirs = PlatformDirs(app_name)
-    for prop in PROPS:
-        print(f"{prop}: {getattr(dirs, prop)}")  # noqa: T201
-
-    print("\n-- app dirs (with disabled 'appauthor')")  # noqa: T201
-    dirs = PlatformDirs(app_name, appauthor=False)
-    for prop in PROPS:
-        print(f"{prop}: {getattr(dirs, prop)}")  # noqa: T201
-
-
-if __name__ == "__main__":
-    main()
diff --git a/pkg_resources/_vendor/platformdirs/android.py b/pkg_resources/_vendor/platformdirs/android.py
deleted file mode 100644
index afd3141c72..0000000000
--- a/pkg_resources/_vendor/platformdirs/android.py
+++ /dev/null
@@ -1,249 +0,0 @@
-"""Android."""
-
-from __future__ import annotations
-
-import os
-import re
-import sys
-from functools import lru_cache
-from typing import TYPE_CHECKING, cast
-
-from .api import PlatformDirsABC
-
-
-class Android(PlatformDirsABC):
-    """
-    Follows the guidance `from here `_.
-
-    Makes use of the `appname `, `version
-    `, `ensure_exists `.
-
-    """
-
-    @property
-    def user_data_dir(self) -> str:
-        """:return: data directory tied to the user, e.g. ``/data/user///files/``"""
-        return self._append_app_name_and_version(cast(str, _android_folder()), "files")
-
-    @property
-    def site_data_dir(self) -> str:
-        """:return: data directory shared by users, same as `user_data_dir`"""
-        return self.user_data_dir
-
-    @property
-    def user_config_dir(self) -> str:
-        """
-        :return: config directory tied to the user, e.g. \
-        ``/data/user///shared_prefs/``
-        """
-        return self._append_app_name_and_version(cast(str, _android_folder()), "shared_prefs")
-
-    @property
-    def site_config_dir(self) -> str:
-        """:return: config directory shared by the users, same as `user_config_dir`"""
-        return self.user_config_dir
-
-    @property
-    def user_cache_dir(self) -> str:
-        """:return: cache directory tied to the user, e.g.,``/data/user///cache/``"""
-        return self._append_app_name_and_version(cast(str, _android_folder()), "cache")
-
-    @property
-    def site_cache_dir(self) -> str:
-        """:return: cache directory shared by users, same as `user_cache_dir`"""
-        return self.user_cache_dir
-
-    @property
-    def user_state_dir(self) -> str:
-        """:return: state directory tied to the user, same as `user_data_dir`"""
-        return self.user_data_dir
-
-    @property
-    def user_log_dir(self) -> str:
-        """
-        :return: log directory tied to the user, same as `user_cache_dir` if not opinionated else ``log`` in it,
-          e.g. ``/data/user///cache//log``
-        """
-        path = self.user_cache_dir
-        if self.opinion:
-            path = os.path.join(path, "log")  # noqa: PTH118
-        return path
-
-    @property
-    def user_documents_dir(self) -> str:
-        """:return: documents directory tied to the user e.g. ``/storage/emulated/0/Documents``"""
-        return _android_documents_folder()
-
-    @property
-    def user_downloads_dir(self) -> str:
-        """:return: downloads directory tied to the user e.g. ``/storage/emulated/0/Downloads``"""
-        return _android_downloads_folder()
-
-    @property
-    def user_pictures_dir(self) -> str:
-        """:return: pictures directory tied to the user e.g. ``/storage/emulated/0/Pictures``"""
-        return _android_pictures_folder()
-
-    @property
-    def user_videos_dir(self) -> str:
-        """:return: videos directory tied to the user e.g. ``/storage/emulated/0/DCIM/Camera``"""
-        return _android_videos_folder()
-
-    @property
-    def user_music_dir(self) -> str:
-        """:return: music directory tied to the user e.g. ``/storage/emulated/0/Music``"""
-        return _android_music_folder()
-
-    @property
-    def user_desktop_dir(self) -> str:
-        """:return: desktop directory tied to the user e.g. ``/storage/emulated/0/Desktop``"""
-        return "/storage/emulated/0/Desktop"
-
-    @property
-    def user_runtime_dir(self) -> str:
-        """
-        :return: runtime directory tied to the user, same as `user_cache_dir` if not opinionated else ``tmp`` in it,
-          e.g. ``/data/user///cache//tmp``
-        """
-        path = self.user_cache_dir
-        if self.opinion:
-            path = os.path.join(path, "tmp")  # noqa: PTH118
-        return path
-
-    @property
-    def site_runtime_dir(self) -> str:
-        """:return: runtime directory shared by users, same as `user_runtime_dir`"""
-        return self.user_runtime_dir
-
-
-@lru_cache(maxsize=1)
-def _android_folder() -> str | None:  # noqa: C901, PLR0912
-    """:return: base folder for the Android OS or None if it cannot be found"""
-    result: str | None = None
-    # type checker isn't happy with our "import android", just don't do this when type checking see
-    # https://stackoverflow.com/a/61394121
-    if not TYPE_CHECKING:
-        try:
-            # First try to get a path to android app using python4android (if available)...
-            from android import mActivity  # noqa: PLC0415
-
-            context = cast("android.content.Context", mActivity.getApplicationContext())  # noqa: F821
-            result = context.getFilesDir().getParentFile().getAbsolutePath()
-        except Exception:  # noqa: BLE001
-            result = None
-    if result is None:
-        try:
-            # ...and fall back to using plain pyjnius, if python4android isn't available or doesn't deliver any useful
-            # result...
-            from jnius import autoclass  # noqa: PLC0415
-
-            context = autoclass("android.content.Context")
-            result = context.getFilesDir().getParentFile().getAbsolutePath()
-        except Exception:  # noqa: BLE001
-            result = None
-    if result is None:
-        # and if that fails, too, find an android folder looking at path on the sys.path
-        # warning: only works for apps installed under /data, not adopted storage etc.
-        pattern = re.compile(r"/data/(data|user/\d+)/(.+)/files")
-        for path in sys.path:
-            if pattern.match(path):
-                result = path.split("/files")[0]
-                break
-        else:
-            result = None
-    if result is None:
-        # one last try: find an android folder looking at path on the sys.path taking adopted storage paths into
-        # account
-        pattern = re.compile(r"/mnt/expand/[a-fA-F0-9-]{36}/(data|user/\d+)/(.+)/files")
-        for path in sys.path:
-            if pattern.match(path):
-                result = path.split("/files")[0]
-                break
-        else:
-            result = None
-    return result
-
-
-@lru_cache(maxsize=1)
-def _android_documents_folder() -> str:
-    """:return: documents folder for the Android OS"""
-    # Get directories with pyjnius
-    try:
-        from jnius import autoclass  # noqa: PLC0415
-
-        context = autoclass("android.content.Context")
-        environment = autoclass("android.os.Environment")
-        documents_dir: str = context.getExternalFilesDir(environment.DIRECTORY_DOCUMENTS).getAbsolutePath()
-    except Exception:  # noqa: BLE001
-        documents_dir = "/storage/emulated/0/Documents"
-
-    return documents_dir
-
-
-@lru_cache(maxsize=1)
-def _android_downloads_folder() -> str:
-    """:return: downloads folder for the Android OS"""
-    # Get directories with pyjnius
-    try:
-        from jnius import autoclass  # noqa: PLC0415
-
-        context = autoclass("android.content.Context")
-        environment = autoclass("android.os.Environment")
-        downloads_dir: str = context.getExternalFilesDir(environment.DIRECTORY_DOWNLOADS).getAbsolutePath()
-    except Exception:  # noqa: BLE001
-        downloads_dir = "/storage/emulated/0/Downloads"
-
-    return downloads_dir
-
-
-@lru_cache(maxsize=1)
-def _android_pictures_folder() -> str:
-    """:return: pictures folder for the Android OS"""
-    # Get directories with pyjnius
-    try:
-        from jnius import autoclass  # noqa: PLC0415
-
-        context = autoclass("android.content.Context")
-        environment = autoclass("android.os.Environment")
-        pictures_dir: str = context.getExternalFilesDir(environment.DIRECTORY_PICTURES).getAbsolutePath()
-    except Exception:  # noqa: BLE001
-        pictures_dir = "/storage/emulated/0/Pictures"
-
-    return pictures_dir
-
-
-@lru_cache(maxsize=1)
-def _android_videos_folder() -> str:
-    """:return: videos folder for the Android OS"""
-    # Get directories with pyjnius
-    try:
-        from jnius import autoclass  # noqa: PLC0415
-
-        context = autoclass("android.content.Context")
-        environment = autoclass("android.os.Environment")
-        videos_dir: str = context.getExternalFilesDir(environment.DIRECTORY_DCIM).getAbsolutePath()
-    except Exception:  # noqa: BLE001
-        videos_dir = "/storage/emulated/0/DCIM/Camera"
-
-    return videos_dir
-
-
-@lru_cache(maxsize=1)
-def _android_music_folder() -> str:
-    """:return: music folder for the Android OS"""
-    # Get directories with pyjnius
-    try:
-        from jnius import autoclass  # noqa: PLC0415
-
-        context = autoclass("android.content.Context")
-        environment = autoclass("android.os.Environment")
-        music_dir: str = context.getExternalFilesDir(environment.DIRECTORY_MUSIC).getAbsolutePath()
-    except Exception:  # noqa: BLE001
-        music_dir = "/storage/emulated/0/Music"
-
-    return music_dir
-
-
-__all__ = [
-    "Android",
-]
diff --git a/pkg_resources/_vendor/platformdirs/api.py b/pkg_resources/_vendor/platformdirs/api.py
deleted file mode 100644
index c50caa648a..0000000000
--- a/pkg_resources/_vendor/platformdirs/api.py
+++ /dev/null
@@ -1,292 +0,0 @@
-"""Base API."""
-
-from __future__ import annotations
-
-import os
-from abc import ABC, abstractmethod
-from pathlib import Path
-from typing import TYPE_CHECKING
-
-if TYPE_CHECKING:
-    from typing import Iterator, Literal
-
-
-class PlatformDirsABC(ABC):  # noqa: PLR0904
-    """Abstract base class for platform directories."""
-
-    def __init__(  # noqa: PLR0913, PLR0917
-        self,
-        appname: str | None = None,
-        appauthor: str | None | Literal[False] = None,
-        version: str | None = None,
-        roaming: bool = False,  # noqa: FBT001, FBT002
-        multipath: bool = False,  # noqa: FBT001, FBT002
-        opinion: bool = True,  # noqa: FBT001, FBT002
-        ensure_exists: bool = False,  # noqa: FBT001, FBT002
-    ) -> None:
-        """
-        Create a new platform directory.
-
-        :param appname: See `appname`.
-        :param appauthor: See `appauthor`.
-        :param version: See `version`.
-        :param roaming: See `roaming`.
-        :param multipath: See `multipath`.
-        :param opinion: See `opinion`.
-        :param ensure_exists: See `ensure_exists`.
-
-        """
-        self.appname = appname  #: The name of application.
-        self.appauthor = appauthor
-        """
-        The name of the app author or distributing body for this application.
-
-        Typically, it is the owning company name. Defaults to `appname`. You may pass ``False`` to disable it.
-
-        """
-        self.version = version
-        """
-        An optional version path element to append to the path.
-
-        You might want to use this if you want multiple versions of your app to be able to run independently. If used,
-        this would typically be ``.``.
-
-        """
-        self.roaming = roaming
-        """
-        Whether to use the roaming appdata directory on Windows.
-
-        That means that for users on a Windows network setup for roaming profiles, this user data will be synced on
-        login (see
-        `here `_).
-
-        """
-        self.multipath = multipath
-        """
-        An optional parameter which indicates that the entire list of data dirs should be returned.
-
-        By default, the first item would only be returned.
-
-        """
-        self.opinion = opinion  #: A flag to indicating to use opinionated values.
-        self.ensure_exists = ensure_exists
-        """
-        Optionally create the directory (and any missing parents) upon access if it does not exist.
-
-        By default, no directories are created.
-
-        """
-
-    def _append_app_name_and_version(self, *base: str) -> str:
-        params = list(base[1:])
-        if self.appname:
-            params.append(self.appname)
-            if self.version:
-                params.append(self.version)
-        path = os.path.join(base[0], *params)  # noqa: PTH118
-        self._optionally_create_directory(path)
-        return path
-
-    def _optionally_create_directory(self, path: str) -> None:
-        if self.ensure_exists:
-            Path(path).mkdir(parents=True, exist_ok=True)
-
-    @property
-    @abstractmethod
-    def user_data_dir(self) -> str:
-        """:return: data directory tied to the user"""
-
-    @property
-    @abstractmethod
-    def site_data_dir(self) -> str:
-        """:return: data directory shared by users"""
-
-    @property
-    @abstractmethod
-    def user_config_dir(self) -> str:
-        """:return: config directory tied to the user"""
-
-    @property
-    @abstractmethod
-    def site_config_dir(self) -> str:
-        """:return: config directory shared by the users"""
-
-    @property
-    @abstractmethod
-    def user_cache_dir(self) -> str:
-        """:return: cache directory tied to the user"""
-
-    @property
-    @abstractmethod
-    def site_cache_dir(self) -> str:
-        """:return: cache directory shared by users"""
-
-    @property
-    @abstractmethod
-    def user_state_dir(self) -> str:
-        """:return: state directory tied to the user"""
-
-    @property
-    @abstractmethod
-    def user_log_dir(self) -> str:
-        """:return: log directory tied to the user"""
-
-    @property
-    @abstractmethod
-    def user_documents_dir(self) -> str:
-        """:return: documents directory tied to the user"""
-
-    @property
-    @abstractmethod
-    def user_downloads_dir(self) -> str:
-        """:return: downloads directory tied to the user"""
-
-    @property
-    @abstractmethod
-    def user_pictures_dir(self) -> str:
-        """:return: pictures directory tied to the user"""
-
-    @property
-    @abstractmethod
-    def user_videos_dir(self) -> str:
-        """:return: videos directory tied to the user"""
-
-    @property
-    @abstractmethod
-    def user_music_dir(self) -> str:
-        """:return: music directory tied to the user"""
-
-    @property
-    @abstractmethod
-    def user_desktop_dir(self) -> str:
-        """:return: desktop directory tied to the user"""
-
-    @property
-    @abstractmethod
-    def user_runtime_dir(self) -> str:
-        """:return: runtime directory tied to the user"""
-
-    @property
-    @abstractmethod
-    def site_runtime_dir(self) -> str:
-        """:return: runtime directory shared by users"""
-
-    @property
-    def user_data_path(self) -> Path:
-        """:return: data path tied to the user"""
-        return Path(self.user_data_dir)
-
-    @property
-    def site_data_path(self) -> Path:
-        """:return: data path shared by users"""
-        return Path(self.site_data_dir)
-
-    @property
-    def user_config_path(self) -> Path:
-        """:return: config path tied to the user"""
-        return Path(self.user_config_dir)
-
-    @property
-    def site_config_path(self) -> Path:
-        """:return: config path shared by the users"""
-        return Path(self.site_config_dir)
-
-    @property
-    def user_cache_path(self) -> Path:
-        """:return: cache path tied to the user"""
-        return Path(self.user_cache_dir)
-
-    @property
-    def site_cache_path(self) -> Path:
-        """:return: cache path shared by users"""
-        return Path(self.site_cache_dir)
-
-    @property
-    def user_state_path(self) -> Path:
-        """:return: state path tied to the user"""
-        return Path(self.user_state_dir)
-
-    @property
-    def user_log_path(self) -> Path:
-        """:return: log path tied to the user"""
-        return Path(self.user_log_dir)
-
-    @property
-    def user_documents_path(self) -> Path:
-        """:return: documents a path tied to the user"""
-        return Path(self.user_documents_dir)
-
-    @property
-    def user_downloads_path(self) -> Path:
-        """:return: downloads path tied to the user"""
-        return Path(self.user_downloads_dir)
-
-    @property
-    def user_pictures_path(self) -> Path:
-        """:return: pictures path tied to the user"""
-        return Path(self.user_pictures_dir)
-
-    @property
-    def user_videos_path(self) -> Path:
-        """:return: videos path tied to the user"""
-        return Path(self.user_videos_dir)
-
-    @property
-    def user_music_path(self) -> Path:
-        """:return: music path tied to the user"""
-        return Path(self.user_music_dir)
-
-    @property
-    def user_desktop_path(self) -> Path:
-        """:return: desktop path tied to the user"""
-        return Path(self.user_desktop_dir)
-
-    @property
-    def user_runtime_path(self) -> Path:
-        """:return: runtime path tied to the user"""
-        return Path(self.user_runtime_dir)
-
-    @property
-    def site_runtime_path(self) -> Path:
-        """:return: runtime path shared by users"""
-        return Path(self.site_runtime_dir)
-
-    def iter_config_dirs(self) -> Iterator[str]:
-        """:yield: all user and site configuration directories."""
-        yield self.user_config_dir
-        yield self.site_config_dir
-
-    def iter_data_dirs(self) -> Iterator[str]:
-        """:yield: all user and site data directories."""
-        yield self.user_data_dir
-        yield self.site_data_dir
-
-    def iter_cache_dirs(self) -> Iterator[str]:
-        """:yield: all user and site cache directories."""
-        yield self.user_cache_dir
-        yield self.site_cache_dir
-
-    def iter_runtime_dirs(self) -> Iterator[str]:
-        """:yield: all user and site runtime directories."""
-        yield self.user_runtime_dir
-        yield self.site_runtime_dir
-
-    def iter_config_paths(self) -> Iterator[Path]:
-        """:yield: all user and site configuration paths."""
-        for path in self.iter_config_dirs():
-            yield Path(path)
-
-    def iter_data_paths(self) -> Iterator[Path]:
-        """:yield: all user and site data paths."""
-        for path in self.iter_data_dirs():
-            yield Path(path)
-
-    def iter_cache_paths(self) -> Iterator[Path]:
-        """:yield: all user and site cache paths."""
-        for path in self.iter_cache_dirs():
-            yield Path(path)
-
-    def iter_runtime_paths(self) -> Iterator[Path]:
-        """:yield: all user and site runtime paths."""
-        for path in self.iter_runtime_dirs():
-            yield Path(path)
diff --git a/pkg_resources/_vendor/platformdirs/macos.py b/pkg_resources/_vendor/platformdirs/macos.py
deleted file mode 100644
index eb1ba5df1d..0000000000
--- a/pkg_resources/_vendor/platformdirs/macos.py
+++ /dev/null
@@ -1,130 +0,0 @@
-"""macOS."""
-
-from __future__ import annotations
-
-import os.path
-import sys
-
-from .api import PlatformDirsABC
-
-
-class MacOS(PlatformDirsABC):
-    """
-    Platform directories for the macOS operating system.
-
-    Follows the guidance from
-    `Apple documentation `_.
-    Makes use of the `appname `,
-    `version `,
-    `ensure_exists `.
-
-    """
-
-    @property
-    def user_data_dir(self) -> str:
-        """:return: data directory tied to the user, e.g. ``~/Library/Application Support/$appname/$version``"""
-        return self._append_app_name_and_version(os.path.expanduser("~/Library/Application Support"))  # noqa: PTH111
-
-    @property
-    def site_data_dir(self) -> str:
-        """
-        :return: data directory shared by users, e.g. ``/Library/Application Support/$appname/$version``.
-          If we're using a Python binary managed by `Homebrew `_, the directory
-          will be under the Homebrew prefix, e.g. ``/opt/homebrew/share/$appname/$version``.
-          If `multipath ` is enabled, and we're in Homebrew,
-          the response is a multi-path string separated by ":", e.g.
-          ``/opt/homebrew/share/$appname/$version:/Library/Application Support/$appname/$version``
-        """
-        is_homebrew = sys.prefix.startswith("/opt/homebrew")
-        path_list = [self._append_app_name_and_version("/opt/homebrew/share")] if is_homebrew else []
-        path_list.append(self._append_app_name_and_version("/Library/Application Support"))
-        if self.multipath:
-            return os.pathsep.join(path_list)
-        return path_list[0]
-
-    @property
-    def user_config_dir(self) -> str:
-        """:return: config directory tied to the user, same as `user_data_dir`"""
-        return self.user_data_dir
-
-    @property
-    def site_config_dir(self) -> str:
-        """:return: config directory shared by the users, same as `site_data_dir`"""
-        return self.site_data_dir
-
-    @property
-    def user_cache_dir(self) -> str:
-        """:return: cache directory tied to the user, e.g. ``~/Library/Caches/$appname/$version``"""
-        return self._append_app_name_and_version(os.path.expanduser("~/Library/Caches"))  # noqa: PTH111
-
-    @property
-    def site_cache_dir(self) -> str:
-        """
-        :return: cache directory shared by users, e.g. ``/Library/Caches/$appname/$version``.
-          If we're using a Python binary managed by `Homebrew `_, the directory
-          will be under the Homebrew prefix, e.g. ``/opt/homebrew/var/cache/$appname/$version``.
-          If `multipath ` is enabled, and we're in Homebrew,
-          the response is a multi-path string separated by ":", e.g.
-          ``/opt/homebrew/var/cache/$appname/$version:/Library/Caches/$appname/$version``
-        """
-        is_homebrew = sys.prefix.startswith("/opt/homebrew")
-        path_list = [self._append_app_name_and_version("/opt/homebrew/var/cache")] if is_homebrew else []
-        path_list.append(self._append_app_name_and_version("/Library/Caches"))
-        if self.multipath:
-            return os.pathsep.join(path_list)
-        return path_list[0]
-
-    @property
-    def user_state_dir(self) -> str:
-        """:return: state directory tied to the user, same as `user_data_dir`"""
-        return self.user_data_dir
-
-    @property
-    def user_log_dir(self) -> str:
-        """:return: log directory tied to the user, e.g. ``~/Library/Logs/$appname/$version``"""
-        return self._append_app_name_and_version(os.path.expanduser("~/Library/Logs"))  # noqa: PTH111
-
-    @property
-    def user_documents_dir(self) -> str:
-        """:return: documents directory tied to the user, e.g. ``~/Documents``"""
-        return os.path.expanduser("~/Documents")  # noqa: PTH111
-
-    @property
-    def user_downloads_dir(self) -> str:
-        """:return: downloads directory tied to the user, e.g. ``~/Downloads``"""
-        return os.path.expanduser("~/Downloads")  # noqa: PTH111
-
-    @property
-    def user_pictures_dir(self) -> str:
-        """:return: pictures directory tied to the user, e.g. ``~/Pictures``"""
-        return os.path.expanduser("~/Pictures")  # noqa: PTH111
-
-    @property
-    def user_videos_dir(self) -> str:
-        """:return: videos directory tied to the user, e.g. ``~/Movies``"""
-        return os.path.expanduser("~/Movies")  # noqa: PTH111
-
-    @property
-    def user_music_dir(self) -> str:
-        """:return: music directory tied to the user, e.g. ``~/Music``"""
-        return os.path.expanduser("~/Music")  # noqa: PTH111
-
-    @property
-    def user_desktop_dir(self) -> str:
-        """:return: desktop directory tied to the user, e.g. ``~/Desktop``"""
-        return os.path.expanduser("~/Desktop")  # noqa: PTH111
-
-    @property
-    def user_runtime_dir(self) -> str:
-        """:return: runtime directory tied to the user, e.g. ``~/Library/Caches/TemporaryItems/$appname/$version``"""
-        return self._append_app_name_and_version(os.path.expanduser("~/Library/Caches/TemporaryItems"))  # noqa: PTH111
-
-    @property
-    def site_runtime_dir(self) -> str:
-        """:return: runtime directory shared by users, same as `user_runtime_dir`"""
-        return self.user_runtime_dir
-
-
-__all__ = [
-    "MacOS",
-]
diff --git a/pkg_resources/_vendor/platformdirs/py.typed b/pkg_resources/_vendor/platformdirs/py.typed
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/pkg_resources/_vendor/platformdirs/unix.py b/pkg_resources/_vendor/platformdirs/unix.py
deleted file mode 100644
index 9500ade614..0000000000
--- a/pkg_resources/_vendor/platformdirs/unix.py
+++ /dev/null
@@ -1,275 +0,0 @@
-"""Unix."""
-
-from __future__ import annotations
-
-import os
-import sys
-from configparser import ConfigParser
-from pathlib import Path
-from typing import Iterator, NoReturn
-
-from .api import PlatformDirsABC
-
-if sys.platform == "win32":
-
-    def getuid() -> NoReturn:
-        msg = "should only be used on Unix"
-        raise RuntimeError(msg)
-
-else:
-    from os import getuid
-
-
-class Unix(PlatformDirsABC):  # noqa: PLR0904
-    """
-    On Unix/Linux, we follow the `XDG Basedir Spec `_.
-
-    The spec allows overriding directories with environment variables. The examples shown are the default values,
-    alongside the name of the environment variable that overrides them. Makes use of the `appname
-    `, `version `, `multipath
-    `, `opinion `, `ensure_exists
-    `.
-
-    """
-
-    @property
-    def user_data_dir(self) -> str:
-        """
-        :return: data directory tied to the user, e.g. ``~/.local/share/$appname/$version`` or
-         ``$XDG_DATA_HOME/$appname/$version``
-        """
-        path = os.environ.get("XDG_DATA_HOME", "")
-        if not path.strip():
-            path = os.path.expanduser("~/.local/share")  # noqa: PTH111
-        return self._append_app_name_and_version(path)
-
-    @property
-    def _site_data_dirs(self) -> list[str]:
-        path = os.environ.get("XDG_DATA_DIRS", "")
-        if not path.strip():
-            path = f"/usr/local/share{os.pathsep}/usr/share"
-        return [self._append_app_name_and_version(p) for p in path.split(os.pathsep)]
-
-    @property
-    def site_data_dir(self) -> str:
-        """
-        :return: data directories shared by users (if `multipath ` is
-         enabled and ``XDG_DATA_DIRS`` is set and a multi path the response is also a multi path separated by the
-         OS path separator), e.g. ``/usr/local/share/$appname/$version`` or ``/usr/share/$appname/$version``
-        """
-        # XDG default for $XDG_DATA_DIRS; only first, if multipath is False
-        dirs = self._site_data_dirs
-        if not self.multipath:
-            return dirs[0]
-        return os.pathsep.join(dirs)
-
-    @property
-    def user_config_dir(self) -> str:
-        """
-        :return: config directory tied to the user, e.g. ``~/.config/$appname/$version`` or
-         ``$XDG_CONFIG_HOME/$appname/$version``
-        """
-        path = os.environ.get("XDG_CONFIG_HOME", "")
-        if not path.strip():
-            path = os.path.expanduser("~/.config")  # noqa: PTH111
-        return self._append_app_name_and_version(path)
-
-    @property
-    def _site_config_dirs(self) -> list[str]:
-        path = os.environ.get("XDG_CONFIG_DIRS", "")
-        if not path.strip():
-            path = "/etc/xdg"
-        return [self._append_app_name_and_version(p) for p in path.split(os.pathsep)]
-
-    @property
-    def site_config_dir(self) -> str:
-        """
-        :return: config directories shared by users (if `multipath `
-         is enabled and ``XDG_CONFIG_DIRS`` is set and a multi path the response is also a multi path separated by
-         the OS path separator), e.g. ``/etc/xdg/$appname/$version``
-        """
-        # XDG default for $XDG_CONFIG_DIRS only first, if multipath is False
-        dirs = self._site_config_dirs
-        if not self.multipath:
-            return dirs[0]
-        return os.pathsep.join(dirs)
-
-    @property
-    def user_cache_dir(self) -> str:
-        """
-        :return: cache directory tied to the user, e.g. ``~/.cache/$appname/$version`` or
-         ``~/$XDG_CACHE_HOME/$appname/$version``
-        """
-        path = os.environ.get("XDG_CACHE_HOME", "")
-        if not path.strip():
-            path = os.path.expanduser("~/.cache")  # noqa: PTH111
-        return self._append_app_name_and_version(path)
-
-    @property
-    def site_cache_dir(self) -> str:
-        """:return: cache directory shared by users, e.g. ``/var/cache/$appname/$version``"""
-        return self._append_app_name_and_version("/var/cache")
-
-    @property
-    def user_state_dir(self) -> str:
-        """
-        :return: state directory tied to the user, e.g. ``~/.local/state/$appname/$version`` or
-         ``$XDG_STATE_HOME/$appname/$version``
-        """
-        path = os.environ.get("XDG_STATE_HOME", "")
-        if not path.strip():
-            path = os.path.expanduser("~/.local/state")  # noqa: PTH111
-        return self._append_app_name_and_version(path)
-
-    @property
-    def user_log_dir(self) -> str:
-        """:return: log directory tied to the user, same as `user_state_dir` if not opinionated else ``log`` in it"""
-        path = self.user_state_dir
-        if self.opinion:
-            path = os.path.join(path, "log")  # noqa: PTH118
-            self._optionally_create_directory(path)
-        return path
-
-    @property
-    def user_documents_dir(self) -> str:
-        """:return: documents directory tied to the user, e.g. ``~/Documents``"""
-        return _get_user_media_dir("XDG_DOCUMENTS_DIR", "~/Documents")
-
-    @property
-    def user_downloads_dir(self) -> str:
-        """:return: downloads directory tied to the user, e.g. ``~/Downloads``"""
-        return _get_user_media_dir("XDG_DOWNLOAD_DIR", "~/Downloads")
-
-    @property
-    def user_pictures_dir(self) -> str:
-        """:return: pictures directory tied to the user, e.g. ``~/Pictures``"""
-        return _get_user_media_dir("XDG_PICTURES_DIR", "~/Pictures")
-
-    @property
-    def user_videos_dir(self) -> str:
-        """:return: videos directory tied to the user, e.g. ``~/Videos``"""
-        return _get_user_media_dir("XDG_VIDEOS_DIR", "~/Videos")
-
-    @property
-    def user_music_dir(self) -> str:
-        """:return: music directory tied to the user, e.g. ``~/Music``"""
-        return _get_user_media_dir("XDG_MUSIC_DIR", "~/Music")
-
-    @property
-    def user_desktop_dir(self) -> str:
-        """:return: desktop directory tied to the user, e.g. ``~/Desktop``"""
-        return _get_user_media_dir("XDG_DESKTOP_DIR", "~/Desktop")
-
-    @property
-    def user_runtime_dir(self) -> str:
-        """
-        :return: runtime directory tied to the user, e.g. ``/run/user/$(id -u)/$appname/$version`` or
-         ``$XDG_RUNTIME_DIR/$appname/$version``.
-
-         For FreeBSD/OpenBSD/NetBSD, it would return ``/var/run/user/$(id -u)/$appname/$version`` if
-         exists, otherwise ``/tmp/runtime-$(id -u)/$appname/$version``, if``$XDG_RUNTIME_DIR``
-         is not set.
-        """
-        path = os.environ.get("XDG_RUNTIME_DIR", "")
-        if not path.strip():
-            if sys.platform.startswith(("freebsd", "openbsd", "netbsd")):
-                path = f"/var/run/user/{getuid()}"
-                if not Path(path).exists():
-                    path = f"/tmp/runtime-{getuid()}"  # noqa: S108
-            else:
-                path = f"/run/user/{getuid()}"
-        return self._append_app_name_and_version(path)
-
-    @property
-    def site_runtime_dir(self) -> str:
-        """
-        :return: runtime directory shared by users, e.g. ``/run/$appname/$version`` or \
-        ``$XDG_RUNTIME_DIR/$appname/$version``.
-
-        Note that this behaves almost exactly like `user_runtime_dir` if ``$XDG_RUNTIME_DIR`` is set, but will
-        fall back to paths associated to the root user instead of a regular logged-in user if it's not set.
-
-        If you wish to ensure that a logged-in root user path is returned e.g. ``/run/user/0``, use `user_runtime_dir`
-        instead.
-
-        For FreeBSD/OpenBSD/NetBSD, it would return ``/var/run/$appname/$version`` if ``$XDG_RUNTIME_DIR`` is not set.
-        """
-        path = os.environ.get("XDG_RUNTIME_DIR", "")
-        if not path.strip():
-            if sys.platform.startswith(("freebsd", "openbsd", "netbsd")):
-                path = "/var/run"
-            else:
-                path = "/run"
-        return self._append_app_name_and_version(path)
-
-    @property
-    def site_data_path(self) -> Path:
-        """:return: data path shared by users. Only return the first item, even if ``multipath`` is set to ``True``"""
-        return self._first_item_as_path_if_multipath(self.site_data_dir)
-
-    @property
-    def site_config_path(self) -> Path:
-        """:return: config path shared by the users, returns the first item, even if ``multipath`` is set to ``True``"""
-        return self._first_item_as_path_if_multipath(self.site_config_dir)
-
-    @property
-    def site_cache_path(self) -> Path:
-        """:return: cache path shared by users. Only return the first item, even if ``multipath`` is set to ``True``"""
-        return self._first_item_as_path_if_multipath(self.site_cache_dir)
-
-    def _first_item_as_path_if_multipath(self, directory: str) -> Path:
-        if self.multipath:
-            # If multipath is True, the first path is returned.
-            directory = directory.split(os.pathsep)[0]
-        return Path(directory)
-
-    def iter_config_dirs(self) -> Iterator[str]:
-        """:yield: all user and site configuration directories."""
-        yield self.user_config_dir
-        yield from self._site_config_dirs
-
-    def iter_data_dirs(self) -> Iterator[str]:
-        """:yield: all user and site data directories."""
-        yield self.user_data_dir
-        yield from self._site_data_dirs
-
-
-def _get_user_media_dir(env_var: str, fallback_tilde_path: str) -> str:
-    media_dir = _get_user_dirs_folder(env_var)
-    if media_dir is None:
-        media_dir = os.environ.get(env_var, "").strip()
-        if not media_dir:
-            media_dir = os.path.expanduser(fallback_tilde_path)  # noqa: PTH111
-
-    return media_dir
-
-
-def _get_user_dirs_folder(key: str) -> str | None:
-    """
-    Return directory from user-dirs.dirs config file.
-
-    See https://freedesktop.org/wiki/Software/xdg-user-dirs/.
-
-    """
-    user_dirs_config_path = Path(Unix().user_config_dir) / "user-dirs.dirs"
-    if user_dirs_config_path.exists():
-        parser = ConfigParser()
-
-        with user_dirs_config_path.open() as stream:
-            # Add fake section header, so ConfigParser doesn't complain
-            parser.read_string(f"[top]\n{stream.read()}")
-
-        if key not in parser["top"]:
-            return None
-
-        path = parser["top"][key].strip('"')
-        # Handle relative home paths
-        return path.replace("$HOME", os.path.expanduser("~"))  # noqa: PTH111
-
-    return None
-
-
-__all__ = [
-    "Unix",
-]
diff --git a/pkg_resources/_vendor/platformdirs/version.py b/pkg_resources/_vendor/platformdirs/version.py
deleted file mode 100644
index 6483ddce0b..0000000000
--- a/pkg_resources/_vendor/platformdirs/version.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# file generated by setuptools_scm
-# don't change, don't track in version control
-TYPE_CHECKING = False
-if TYPE_CHECKING:
-    from typing import Tuple, Union
-    VERSION_TUPLE = Tuple[Union[int, str], ...]
-else:
-    VERSION_TUPLE = object
-
-version: str
-__version__: str
-__version_tuple__: VERSION_TUPLE
-version_tuple: VERSION_TUPLE
-
-__version__ = version = '4.2.2'
-__version_tuple__ = version_tuple = (4, 2, 2)
diff --git a/pkg_resources/_vendor/platformdirs/windows.py b/pkg_resources/_vendor/platformdirs/windows.py
deleted file mode 100644
index d7bc96091a..0000000000
--- a/pkg_resources/_vendor/platformdirs/windows.py
+++ /dev/null
@@ -1,272 +0,0 @@
-"""Windows."""
-
-from __future__ import annotations
-
-import os
-import sys
-from functools import lru_cache
-from typing import TYPE_CHECKING
-
-from .api import PlatformDirsABC
-
-if TYPE_CHECKING:
-    from collections.abc import Callable
-
-
-class Windows(PlatformDirsABC):
-    """
-    `MSDN on where to store app data files `_.
-
-    Makes use of the `appname `, `appauthor
-    `, `version `, `roaming
-    `, `opinion `, `ensure_exists
-    `.
-
-    """
-
-    @property
-    def user_data_dir(self) -> str:
-        """
-        :return: data directory tied to the user, e.g.
-         ``%USERPROFILE%\\AppData\\Local\\$appauthor\\$appname`` (not roaming) or
-         ``%USERPROFILE%\\AppData\\Roaming\\$appauthor\\$appname`` (roaming)
-        """
-        const = "CSIDL_APPDATA" if self.roaming else "CSIDL_LOCAL_APPDATA"
-        path = os.path.normpath(get_win_folder(const))
-        return self._append_parts(path)
-
-    def _append_parts(self, path: str, *, opinion_value: str | None = None) -> str:
-        params = []
-        if self.appname:
-            if self.appauthor is not False:
-                author = self.appauthor or self.appname
-                params.append(author)
-            params.append(self.appname)
-            if opinion_value is not None and self.opinion:
-                params.append(opinion_value)
-            if self.version:
-                params.append(self.version)
-        path = os.path.join(path, *params)  # noqa: PTH118
-        self._optionally_create_directory(path)
-        return path
-
-    @property
-    def site_data_dir(self) -> str:
-        """:return: data directory shared by users, e.g. ``C:\\ProgramData\\$appauthor\\$appname``"""
-        path = os.path.normpath(get_win_folder("CSIDL_COMMON_APPDATA"))
-        return self._append_parts(path)
-
-    @property
-    def user_config_dir(self) -> str:
-        """:return: config directory tied to the user, same as `user_data_dir`"""
-        return self.user_data_dir
-
-    @property
-    def site_config_dir(self) -> str:
-        """:return: config directory shared by the users, same as `site_data_dir`"""
-        return self.site_data_dir
-
-    @property
-    def user_cache_dir(self) -> str:
-        """
-        :return: cache directory tied to the user (if opinionated with ``Cache`` folder within ``$appname``) e.g.
-         ``%USERPROFILE%\\AppData\\Local\\$appauthor\\$appname\\Cache\\$version``
-        """
-        path = os.path.normpath(get_win_folder("CSIDL_LOCAL_APPDATA"))
-        return self._append_parts(path, opinion_value="Cache")
-
-    @property
-    def site_cache_dir(self) -> str:
-        """:return: cache directory shared by users, e.g. ``C:\\ProgramData\\$appauthor\\$appname\\Cache\\$version``"""
-        path = os.path.normpath(get_win_folder("CSIDL_COMMON_APPDATA"))
-        return self._append_parts(path, opinion_value="Cache")
-
-    @property
-    def user_state_dir(self) -> str:
-        """:return: state directory tied to the user, same as `user_data_dir`"""
-        return self.user_data_dir
-
-    @property
-    def user_log_dir(self) -> str:
-        """:return: log directory tied to the user, same as `user_data_dir` if not opinionated else ``Logs`` in it"""
-        path = self.user_data_dir
-        if self.opinion:
-            path = os.path.join(path, "Logs")  # noqa: PTH118
-            self._optionally_create_directory(path)
-        return path
-
-    @property
-    def user_documents_dir(self) -> str:
-        """:return: documents directory tied to the user e.g. ``%USERPROFILE%\\Documents``"""
-        return os.path.normpath(get_win_folder("CSIDL_PERSONAL"))
-
-    @property
-    def user_downloads_dir(self) -> str:
-        """:return: downloads directory tied to the user e.g. ``%USERPROFILE%\\Downloads``"""
-        return os.path.normpath(get_win_folder("CSIDL_DOWNLOADS"))
-
-    @property
-    def user_pictures_dir(self) -> str:
-        """:return: pictures directory tied to the user e.g. ``%USERPROFILE%\\Pictures``"""
-        return os.path.normpath(get_win_folder("CSIDL_MYPICTURES"))
-
-    @property
-    def user_videos_dir(self) -> str:
-        """:return: videos directory tied to the user e.g. ``%USERPROFILE%\\Videos``"""
-        return os.path.normpath(get_win_folder("CSIDL_MYVIDEO"))
-
-    @property
-    def user_music_dir(self) -> str:
-        """:return: music directory tied to the user e.g. ``%USERPROFILE%\\Music``"""
-        return os.path.normpath(get_win_folder("CSIDL_MYMUSIC"))
-
-    @property
-    def user_desktop_dir(self) -> str:
-        """:return: desktop directory tied to the user, e.g. ``%USERPROFILE%\\Desktop``"""
-        return os.path.normpath(get_win_folder("CSIDL_DESKTOPDIRECTORY"))
-
-    @property
-    def user_runtime_dir(self) -> str:
-        """
-        :return: runtime directory tied to the user, e.g.
-         ``%USERPROFILE%\\AppData\\Local\\Temp\\$appauthor\\$appname``
-        """
-        path = os.path.normpath(os.path.join(get_win_folder("CSIDL_LOCAL_APPDATA"), "Temp"))  # noqa: PTH118
-        return self._append_parts(path)
-
-    @property
-    def site_runtime_dir(self) -> str:
-        """:return: runtime directory shared by users, same as `user_runtime_dir`"""
-        return self.user_runtime_dir
-
-
-def get_win_folder_from_env_vars(csidl_name: str) -> str:
-    """Get folder from environment variables."""
-    result = get_win_folder_if_csidl_name_not_env_var(csidl_name)
-    if result is not None:
-        return result
-
-    env_var_name = {
-        "CSIDL_APPDATA": "APPDATA",
-        "CSIDL_COMMON_APPDATA": "ALLUSERSPROFILE",
-        "CSIDL_LOCAL_APPDATA": "LOCALAPPDATA",
-    }.get(csidl_name)
-    if env_var_name is None:
-        msg = f"Unknown CSIDL name: {csidl_name}"
-        raise ValueError(msg)
-    result = os.environ.get(env_var_name)
-    if result is None:
-        msg = f"Unset environment variable: {env_var_name}"
-        raise ValueError(msg)
-    return result
-
-
-def get_win_folder_if_csidl_name_not_env_var(csidl_name: str) -> str | None:
-    """Get a folder for a CSIDL name that does not exist as an environment variable."""
-    if csidl_name == "CSIDL_PERSONAL":
-        return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Documents")  # noqa: PTH118
-
-    if csidl_name == "CSIDL_DOWNLOADS":
-        return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Downloads")  # noqa: PTH118
-
-    if csidl_name == "CSIDL_MYPICTURES":
-        return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Pictures")  # noqa: PTH118
-
-    if csidl_name == "CSIDL_MYVIDEO":
-        return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Videos")  # noqa: PTH118
-
-    if csidl_name == "CSIDL_MYMUSIC":
-        return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Music")  # noqa: PTH118
-    return None
-
-
-def get_win_folder_from_registry(csidl_name: str) -> str:
-    """
-    Get folder from the registry.
-
-    This is a fallback technique at best. I'm not sure if using the registry for these guarantees us the correct answer
-    for all CSIDL_* names.
-
-    """
-    shell_folder_name = {
-        "CSIDL_APPDATA": "AppData",
-        "CSIDL_COMMON_APPDATA": "Common AppData",
-        "CSIDL_LOCAL_APPDATA": "Local AppData",
-        "CSIDL_PERSONAL": "Personal",
-        "CSIDL_DOWNLOADS": "{374DE290-123F-4565-9164-39C4925E467B}",
-        "CSIDL_MYPICTURES": "My Pictures",
-        "CSIDL_MYVIDEO": "My Video",
-        "CSIDL_MYMUSIC": "My Music",
-    }.get(csidl_name)
-    if shell_folder_name is None:
-        msg = f"Unknown CSIDL name: {csidl_name}"
-        raise ValueError(msg)
-    if sys.platform != "win32":  # only needed for mypy type checker to know that this code runs only on Windows
-        raise NotImplementedError
-    import winreg  # noqa: PLC0415
-
-    key = winreg.OpenKey(winreg.HKEY_CURRENT_USER, r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders")
-    directory, _ = winreg.QueryValueEx(key, shell_folder_name)
-    return str(directory)
-
-
-def get_win_folder_via_ctypes(csidl_name: str) -> str:
-    """Get folder with ctypes."""
-    # There is no 'CSIDL_DOWNLOADS'.
-    # Use 'CSIDL_PROFILE' (40) and append the default folder 'Downloads' instead.
-    # https://learn.microsoft.com/en-us/windows/win32/shell/knownfolderid
-
-    import ctypes  # noqa: PLC0415
-
-    csidl_const = {
-        "CSIDL_APPDATA": 26,
-        "CSIDL_COMMON_APPDATA": 35,
-        "CSIDL_LOCAL_APPDATA": 28,
-        "CSIDL_PERSONAL": 5,
-        "CSIDL_MYPICTURES": 39,
-        "CSIDL_MYVIDEO": 14,
-        "CSIDL_MYMUSIC": 13,
-        "CSIDL_DOWNLOADS": 40,
-        "CSIDL_DESKTOPDIRECTORY": 16,
-    }.get(csidl_name)
-    if csidl_const is None:
-        msg = f"Unknown CSIDL name: {csidl_name}"
-        raise ValueError(msg)
-
-    buf = ctypes.create_unicode_buffer(1024)
-    windll = getattr(ctypes, "windll")  # noqa: B009 # using getattr to avoid false positive with mypy type checker
-    windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
-
-    # Downgrade to short path name if it has high-bit chars.
-    if any(ord(c) > 255 for c in buf):  # noqa: PLR2004
-        buf2 = ctypes.create_unicode_buffer(1024)
-        if windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
-            buf = buf2
-
-    if csidl_name == "CSIDL_DOWNLOADS":
-        return os.path.join(buf.value, "Downloads")  # noqa: PTH118
-
-    return buf.value
-
-
-def _pick_get_win_folder() -> Callable[[str], str]:
-    try:
-        import ctypes  # noqa: PLC0415
-    except ImportError:
-        pass
-    else:
-        if hasattr(ctypes, "windll"):
-            return get_win_folder_via_ctypes
-    try:
-        import winreg  # noqa: PLC0415, F401
-    except ImportError:
-        return get_win_folder_from_env_vars
-    else:
-        return get_win_folder_from_registry
-
-
-get_win_folder = lru_cache(maxsize=None)(_pick_get_win_folder())
-
-__all__ = [
-    "Windows",
-]
diff --git a/pkg_resources/_vendor/ruff.toml b/pkg_resources/_vendor/ruff.toml
deleted file mode 100644
index 00fee625a5..0000000000
--- a/pkg_resources/_vendor/ruff.toml
+++ /dev/null
@@ -1 +0,0 @@
-exclude = ["*"]
diff --git a/pkg_resources/_vendor/typeguard-4.3.0.dist-info/INSTALLER b/pkg_resources/_vendor/typeguard-4.3.0.dist-info/INSTALLER
deleted file mode 100644
index a1b589e38a..0000000000
--- a/pkg_resources/_vendor/typeguard-4.3.0.dist-info/INSTALLER
+++ /dev/null
@@ -1 +0,0 @@
-pip
diff --git a/pkg_resources/_vendor/typeguard-4.3.0.dist-info/LICENSE b/pkg_resources/_vendor/typeguard-4.3.0.dist-info/LICENSE
deleted file mode 100644
index 07806f8af9..0000000000
--- a/pkg_resources/_vendor/typeguard-4.3.0.dist-info/LICENSE
+++ /dev/null
@@ -1,19 +0,0 @@
-This is the MIT license: http://www.opensource.org/licenses/mit-license.php
-
-Copyright (c) Alex Grönholm
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of this
-software and associated documentation files (the "Software"), to deal in the Software
-without restriction, including without limitation the rights to use, copy, modify, merge,
-publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons
-to whom the Software is furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all copies or
-substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
-INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
-PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
-FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
diff --git a/pkg_resources/_vendor/typeguard-4.3.0.dist-info/METADATA b/pkg_resources/_vendor/typeguard-4.3.0.dist-info/METADATA
deleted file mode 100644
index 6e5750b485..0000000000
--- a/pkg_resources/_vendor/typeguard-4.3.0.dist-info/METADATA
+++ /dev/null
@@ -1,81 +0,0 @@
-Metadata-Version: 2.1
-Name: typeguard
-Version: 4.3.0
-Summary: Run-time type checker for Python
-Author-email: Alex Grönholm 
-License: MIT
-Project-URL: Documentation, https://typeguard.readthedocs.io/en/latest/
-Project-URL: Change log, https://typeguard.readthedocs.io/en/latest/versionhistory.html
-Project-URL: Source code, https://github.com/agronholm/typeguard
-Project-URL: Issue tracker, https://github.com/agronholm/typeguard/issues
-Classifier: Development Status :: 5 - Production/Stable
-Classifier: Intended Audience :: Developers
-Classifier: License :: OSI Approved :: MIT License
-Classifier: Programming Language :: Python
-Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3.8
-Classifier: Programming Language :: Python :: 3.9
-Classifier: Programming Language :: Python :: 3.10
-Classifier: Programming Language :: Python :: 3.11
-Classifier: Programming Language :: Python :: 3.12
-Requires-Python: >=3.8
-Description-Content-Type: text/x-rst
-License-File: LICENSE
-Requires-Dist: typing-extensions >=4.10.0
-Requires-Dist: importlib-metadata >=3.6 ; python_version < "3.10"
-Provides-Extra: doc
-Requires-Dist: packaging ; extra == 'doc'
-Requires-Dist: Sphinx >=7 ; extra == 'doc'
-Requires-Dist: sphinx-autodoc-typehints >=1.2.0 ; extra == 'doc'
-Requires-Dist: sphinx-rtd-theme >=1.3.0 ; extra == 'doc'
-Provides-Extra: test
-Requires-Dist: coverage[toml] >=7 ; extra == 'test'
-Requires-Dist: pytest >=7 ; extra == 'test'
-Requires-Dist: mypy >=1.2.0 ; (platform_python_implementation != "PyPy") and extra == 'test'
-
-.. image:: https://github.com/agronholm/typeguard/actions/workflows/test.yml/badge.svg
-  :target: https://github.com/agronholm/typeguard/actions/workflows/test.yml
-  :alt: Build Status
-.. image:: https://coveralls.io/repos/agronholm/typeguard/badge.svg?branch=master&service=github
-  :target: https://coveralls.io/github/agronholm/typeguard?branch=master
-  :alt: Code Coverage
-.. image:: https://readthedocs.org/projects/typeguard/badge/?version=latest
-  :target: https://typeguard.readthedocs.io/en/latest/?badge=latest
-  :alt: Documentation
-
-This library provides run-time type checking for functions defined with
-`PEP 484 `_ argument (and return) type
-annotations, and any arbitrary objects. It can be used together with static type
-checkers as an additional layer of type safety, to catch type violations that could only
-be detected at run time.
-
-Two principal ways to do type checking are provided:
-
-#. The ``check_type`` function:
-
-   * like ``isinstance()``, but supports arbitrary type annotations (within limits)
-   * can be used as a ``cast()`` replacement, but with actual checking of the value
-#. Code instrumentation:
-
-   * entire modules, or individual functions (via ``@typechecked``) are recompiled, with
-     type checking code injected into them
-   * automatically checks function arguments, return values and assignments to annotated
-     local variables
-   * for generator functions (regular and async), checks yield and send values
-   * requires the original source code of the instrumented module(s) to be accessible
-
-Two options are provided for code instrumentation:
-
-#. the ``@typechecked`` function:
-
-   * can be applied to functions individually
-#. the import hook (``typeguard.install_import_hook()``):
-
-   * automatically instruments targeted modules on import
-   * no manual code changes required in the target modules
-   * requires the import hook to be installed before the targeted modules are imported
-   * may clash with other import hooks
-
-See the documentation_ for further information.
-
-.. _documentation: https://typeguard.readthedocs.io/en/latest/
diff --git a/pkg_resources/_vendor/typeguard-4.3.0.dist-info/RECORD b/pkg_resources/_vendor/typeguard-4.3.0.dist-info/RECORD
deleted file mode 100644
index 801e73347c..0000000000
--- a/pkg_resources/_vendor/typeguard-4.3.0.dist-info/RECORD
+++ /dev/null
@@ -1,34 +0,0 @@
-typeguard-4.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-typeguard-4.3.0.dist-info/LICENSE,sha256=YWP3mH37ONa8MgzitwsvArhivEESZRbVUu8c1DJH51g,1130
-typeguard-4.3.0.dist-info/METADATA,sha256=z2dcHAp0TwhYCFU5Deh8x31nazElgujUz9tbuP0pjSE,3717
-typeguard-4.3.0.dist-info/RECORD,,
-typeguard-4.3.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
-typeguard-4.3.0.dist-info/entry_points.txt,sha256=qp7NQ1aLtiSgMQqo6gWlfGpy0IIXzoMJmeQTLpzqFZQ,48
-typeguard-4.3.0.dist-info/top_level.txt,sha256=4z28AhuDodwRS_c1J_l8H51t5QuwfTseskYzlxp6grs,10
-typeguard/__init__.py,sha256=Onh4w38elPCjtlcU3JY9k3h70NjsxXIkAflmQn-Z0FY,2071
-typeguard/__pycache__/__init__.cpython-312.pyc,,
-typeguard/__pycache__/_checkers.cpython-312.pyc,,
-typeguard/__pycache__/_config.cpython-312.pyc,,
-typeguard/__pycache__/_decorators.cpython-312.pyc,,
-typeguard/__pycache__/_exceptions.cpython-312.pyc,,
-typeguard/__pycache__/_functions.cpython-312.pyc,,
-typeguard/__pycache__/_importhook.cpython-312.pyc,,
-typeguard/__pycache__/_memo.cpython-312.pyc,,
-typeguard/__pycache__/_pytest_plugin.cpython-312.pyc,,
-typeguard/__pycache__/_suppression.cpython-312.pyc,,
-typeguard/__pycache__/_transformer.cpython-312.pyc,,
-typeguard/__pycache__/_union_transformer.cpython-312.pyc,,
-typeguard/__pycache__/_utils.cpython-312.pyc,,
-typeguard/_checkers.py,sha256=JRrgKicdOEfIBoNEtegYCEIlhpad-a1u1Em7GCj0WCI,31360
-typeguard/_config.py,sha256=nIz8QwDa-oFO3L9O8_6srzlmd99pSby2wOM4Wb7F_B0,2846
-typeguard/_decorators.py,sha256=v6dsIeWvPhExGLP_wXF-RmDUyjZf_Ak28g7gBJ_v0-0,9033
-typeguard/_exceptions.py,sha256=ZIPeiV-FBd5Emw2EaWd2Fvlsrwi4ocwT2fVGBIAtHcQ,1121
-typeguard/_functions.py,sha256=ibgSAKa5ptIm1eR9ARG0BSozAFJPFNASZqhPVyQeqig,10393
-typeguard/_importhook.py,sha256=ugjCDvFcdWMU7UugqlJG91IpVNpEIxtRr-99s0h1k7M,6389
-typeguard/_memo.py,sha256=1juQV_vxnD2JYKbSrebiQuj4oKHz6n67v9pYA-CCISg,1303
-typeguard/_pytest_plugin.py,sha256=-fcSqkv54rIfIF8pDavY5YQPkj4OX8GMt_lL7CQSD4I,4416
-typeguard/_suppression.py,sha256=VQfzxcwIbu3if0f7VBkKM7hkYOA7tNFw9a7jMBsmMg4,2266
-typeguard/_transformer.py,sha256=9Ha7_QhdwoUni_6hvdY-hZbuEergowHrNL2vzHIakFY,44937
-typeguard/_union_transformer.py,sha256=v_42r7-6HuRX2SoFwnyJ-E5PlxXpVeUJPJR1-HU9qSo,1354
-typeguard/_utils.py,sha256=5HhO1rPn5f1M6ymkVAEv7Xmlz1cX-j0OnTMlyHqqrR8,5270
-typeguard/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
diff --git a/pkg_resources/_vendor/typeguard-4.3.0.dist-info/WHEEL b/pkg_resources/_vendor/typeguard-4.3.0.dist-info/WHEEL
deleted file mode 100644
index bab98d6758..0000000000
--- a/pkg_resources/_vendor/typeguard-4.3.0.dist-info/WHEEL
+++ /dev/null
@@ -1,5 +0,0 @@
-Wheel-Version: 1.0
-Generator: bdist_wheel (0.43.0)
-Root-Is-Purelib: true
-Tag: py3-none-any
-
diff --git a/pkg_resources/_vendor/typeguard-4.3.0.dist-info/entry_points.txt b/pkg_resources/_vendor/typeguard-4.3.0.dist-info/entry_points.txt
deleted file mode 100644
index 47c9d0bd91..0000000000
--- a/pkg_resources/_vendor/typeguard-4.3.0.dist-info/entry_points.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-[pytest11]
-typeguard = typeguard._pytest_plugin
diff --git a/pkg_resources/_vendor/typeguard-4.3.0.dist-info/top_level.txt b/pkg_resources/_vendor/typeguard-4.3.0.dist-info/top_level.txt
deleted file mode 100644
index be5ec23ea2..0000000000
--- a/pkg_resources/_vendor/typeguard-4.3.0.dist-info/top_level.txt
+++ /dev/null
@@ -1 +0,0 @@
-typeguard
diff --git a/pkg_resources/_vendor/typeguard/__init__.py b/pkg_resources/_vendor/typeguard/__init__.py
deleted file mode 100644
index 6781cad094..0000000000
--- a/pkg_resources/_vendor/typeguard/__init__.py
+++ /dev/null
@@ -1,48 +0,0 @@
-import os
-from typing import Any
-
-from ._checkers import TypeCheckerCallable as TypeCheckerCallable
-from ._checkers import TypeCheckLookupCallback as TypeCheckLookupCallback
-from ._checkers import check_type_internal as check_type_internal
-from ._checkers import checker_lookup_functions as checker_lookup_functions
-from ._checkers import load_plugins as load_plugins
-from ._config import CollectionCheckStrategy as CollectionCheckStrategy
-from ._config import ForwardRefPolicy as ForwardRefPolicy
-from ._config import TypeCheckConfiguration as TypeCheckConfiguration
-from ._decorators import typechecked as typechecked
-from ._decorators import typeguard_ignore as typeguard_ignore
-from ._exceptions import InstrumentationWarning as InstrumentationWarning
-from ._exceptions import TypeCheckError as TypeCheckError
-from ._exceptions import TypeCheckWarning as TypeCheckWarning
-from ._exceptions import TypeHintWarning as TypeHintWarning
-from ._functions import TypeCheckFailCallback as TypeCheckFailCallback
-from ._functions import check_type as check_type
-from ._functions import warn_on_error as warn_on_error
-from ._importhook import ImportHookManager as ImportHookManager
-from ._importhook import TypeguardFinder as TypeguardFinder
-from ._importhook import install_import_hook as install_import_hook
-from ._memo import TypeCheckMemo as TypeCheckMemo
-from ._suppression import suppress_type_checks as suppress_type_checks
-from ._utils import Unset as Unset
-
-# Re-export imports so they look like they live directly in this package
-for value in list(locals().values()):
-    if getattr(value, "__module__", "").startswith(f"{__name__}."):
-        value.__module__ = __name__
-
-
-config: TypeCheckConfiguration
-
-
-def __getattr__(name: str) -> Any:
-    if name == "config":
-        from ._config import global_config
-
-        return global_config
-
-    raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
-
-
-# Automatically load checker lookup functions unless explicitly disabled
-if "TYPEGUARD_DISABLE_PLUGIN_AUTOLOAD" not in os.environ:
-    load_plugins()
diff --git a/pkg_resources/_vendor/typeguard/_checkers.py b/pkg_resources/_vendor/typeguard/_checkers.py
deleted file mode 100644
index 67dd5ad4dc..0000000000
--- a/pkg_resources/_vendor/typeguard/_checkers.py
+++ /dev/null
@@ -1,993 +0,0 @@
-from __future__ import annotations
-
-import collections.abc
-import inspect
-import sys
-import types
-import typing
-import warnings
-from enum import Enum
-from inspect import Parameter, isclass, isfunction
-from io import BufferedIOBase, IOBase, RawIOBase, TextIOBase
-from textwrap import indent
-from typing import (
-    IO,
-    AbstractSet,
-    Any,
-    BinaryIO,
-    Callable,
-    Dict,
-    ForwardRef,
-    List,
-    Mapping,
-    MutableMapping,
-    NewType,
-    Optional,
-    Sequence,
-    Set,
-    TextIO,
-    Tuple,
-    Type,
-    TypeVar,
-    Union,
-)
-from unittest.mock import Mock
-from weakref import WeakKeyDictionary
-
-try:
-    import typing_extensions
-except ImportError:
-    typing_extensions = None  # type: ignore[assignment]
-
-# Must use this because typing.is_typeddict does not recognize
-# TypedDict from typing_extensions, and as of version 4.12.0
-# typing_extensions.TypedDict is different from typing.TypedDict
-# on all versions.
-from typing_extensions import is_typeddict
-
-from ._config import ForwardRefPolicy
-from ._exceptions import TypeCheckError, TypeHintWarning
-from ._memo import TypeCheckMemo
-from ._utils import evaluate_forwardref, get_stacklevel, get_type_name, qualified_name
-
-if sys.version_info >= (3, 11):
-    from typing import (
-        Annotated,
-        NotRequired,
-        TypeAlias,
-        get_args,
-        get_origin,
-    )
-
-    SubclassableAny = Any
-else:
-    from typing_extensions import (
-        Annotated,
-        NotRequired,
-        TypeAlias,
-        get_args,
-        get_origin,
-    )
-    from typing_extensions import Any as SubclassableAny
-
-if sys.version_info >= (3, 10):
-    from importlib.metadata import entry_points
-    from typing import ParamSpec
-else:
-    from importlib_metadata import entry_points
-    from typing_extensions import ParamSpec
-
-TypeCheckerCallable: TypeAlias = Callable[
-    [Any, Any, Tuple[Any, ...], TypeCheckMemo], Any
-]
-TypeCheckLookupCallback: TypeAlias = Callable[
-    [Any, Tuple[Any, ...], Tuple[Any, ...]], Optional[TypeCheckerCallable]
-]
-
-checker_lookup_functions: list[TypeCheckLookupCallback] = []
-generic_alias_types: tuple[type, ...] = (type(List), type(List[Any]))
-if sys.version_info >= (3, 9):
-    generic_alias_types += (types.GenericAlias,)
-
-protocol_check_cache: WeakKeyDictionary[
-    type[Any], dict[type[Any], TypeCheckError | None]
-] = WeakKeyDictionary()
-
-# Sentinel
-_missing = object()
-
-# Lifted from mypy.sharedparse
-BINARY_MAGIC_METHODS = {
-    "__add__",
-    "__and__",
-    "__cmp__",
-    "__divmod__",
-    "__div__",
-    "__eq__",
-    "__floordiv__",
-    "__ge__",
-    "__gt__",
-    "__iadd__",
-    "__iand__",
-    "__idiv__",
-    "__ifloordiv__",
-    "__ilshift__",
-    "__imatmul__",
-    "__imod__",
-    "__imul__",
-    "__ior__",
-    "__ipow__",
-    "__irshift__",
-    "__isub__",
-    "__itruediv__",
-    "__ixor__",
-    "__le__",
-    "__lshift__",
-    "__lt__",
-    "__matmul__",
-    "__mod__",
-    "__mul__",
-    "__ne__",
-    "__or__",
-    "__pow__",
-    "__radd__",
-    "__rand__",
-    "__rdiv__",
-    "__rfloordiv__",
-    "__rlshift__",
-    "__rmatmul__",
-    "__rmod__",
-    "__rmul__",
-    "__ror__",
-    "__rpow__",
-    "__rrshift__",
-    "__rshift__",
-    "__rsub__",
-    "__rtruediv__",
-    "__rxor__",
-    "__sub__",
-    "__truediv__",
-    "__xor__",
-}
-
-
-def check_callable(
-    value: Any,
-    origin_type: Any,
-    args: tuple[Any, ...],
-    memo: TypeCheckMemo,
-) -> None:
-    if not callable(value):
-        raise TypeCheckError("is not callable")
-
-    if args:
-        try:
-            signature = inspect.signature(value)
-        except (TypeError, ValueError):
-            return
-
-        argument_types = args[0]
-        if isinstance(argument_types, list) and not any(
-            type(item) is ParamSpec for item in argument_types
-        ):
-            # The callable must not have keyword-only arguments without defaults
-            unfulfilled_kwonlyargs = [
-                param.name
-                for param in signature.parameters.values()
-                if param.kind == Parameter.KEYWORD_ONLY
-                and param.default == Parameter.empty
-            ]
-            if unfulfilled_kwonlyargs:
-                raise TypeCheckError(
-                    f"has mandatory keyword-only arguments in its declaration: "
-                    f'{", ".join(unfulfilled_kwonlyargs)}'
-                )
-
-            num_positional_args = num_mandatory_pos_args = 0
-            has_varargs = False
-            for param in signature.parameters.values():
-                if param.kind in (
-                    Parameter.POSITIONAL_ONLY,
-                    Parameter.POSITIONAL_OR_KEYWORD,
-                ):
-                    num_positional_args += 1
-                    if param.default is Parameter.empty:
-                        num_mandatory_pos_args += 1
-                elif param.kind == Parameter.VAR_POSITIONAL:
-                    has_varargs = True
-
-            if num_mandatory_pos_args > len(argument_types):
-                raise TypeCheckError(
-                    f"has too many mandatory positional arguments in its declaration; "
-                    f"expected {len(argument_types)} but {num_mandatory_pos_args} "
-                    f"mandatory positional argument(s) declared"
-                )
-            elif not has_varargs and num_positional_args < len(argument_types):
-                raise TypeCheckError(
-                    f"has too few arguments in its declaration; expected "
-                    f"{len(argument_types)} but {num_positional_args} argument(s) "
-                    f"declared"
-                )
-
-
-def check_mapping(
-    value: Any,
-    origin_type: Any,
-    args: tuple[Any, ...],
-    memo: TypeCheckMemo,
-) -> None:
-    if origin_type is Dict or origin_type is dict:
-        if not isinstance(value, dict):
-            raise TypeCheckError("is not a dict")
-    if origin_type is MutableMapping or origin_type is collections.abc.MutableMapping:
-        if not isinstance(value, collections.abc.MutableMapping):
-            raise TypeCheckError("is not a mutable mapping")
-    elif not isinstance(value, collections.abc.Mapping):
-        raise TypeCheckError("is not a mapping")
-
-    if args:
-        key_type, value_type = args
-        if key_type is not Any or value_type is not Any:
-            samples = memo.config.collection_check_strategy.iterate_samples(
-                value.items()
-            )
-            for k, v in samples:
-                try:
-                    check_type_internal(k, key_type, memo)
-                except TypeCheckError as exc:
-                    exc.append_path_element(f"key {k!r}")
-                    raise
-
-                try:
-                    check_type_internal(v, value_type, memo)
-                except TypeCheckError as exc:
-                    exc.append_path_element(f"value of key {k!r}")
-                    raise
-
-
-def check_typed_dict(
-    value: Any,
-    origin_type: Any,
-    args: tuple[Any, ...],
-    memo: TypeCheckMemo,
-) -> None:
-    if not isinstance(value, dict):
-        raise TypeCheckError("is not a dict")
-
-    declared_keys = frozenset(origin_type.__annotations__)
-    if hasattr(origin_type, "__required_keys__"):
-        required_keys = set(origin_type.__required_keys__)
-    else:  # py3.8 and lower
-        required_keys = set(declared_keys) if origin_type.__total__ else set()
-
-    existing_keys = set(value)
-    extra_keys = existing_keys - declared_keys
-    if extra_keys:
-        keys_formatted = ", ".join(f'"{key}"' for key in sorted(extra_keys, key=repr))
-        raise TypeCheckError(f"has unexpected extra key(s): {keys_formatted}")
-
-    # Detect NotRequired fields which are hidden by get_type_hints()
-    type_hints: dict[str, type] = {}
-    for key, annotation in origin_type.__annotations__.items():
-        if isinstance(annotation, ForwardRef):
-            annotation = evaluate_forwardref(annotation, memo)
-            if get_origin(annotation) is NotRequired:
-                required_keys.discard(key)
-                annotation = get_args(annotation)[0]
-
-        type_hints[key] = annotation
-
-    missing_keys = required_keys - existing_keys
-    if missing_keys:
-        keys_formatted = ", ".join(f'"{key}"' for key in sorted(missing_keys, key=repr))
-        raise TypeCheckError(f"is missing required key(s): {keys_formatted}")
-
-    for key, argtype in type_hints.items():
-        argvalue = value.get(key, _missing)
-        if argvalue is not _missing:
-            try:
-                check_type_internal(argvalue, argtype, memo)
-            except TypeCheckError as exc:
-                exc.append_path_element(f"value of key {key!r}")
-                raise
-
-
-def check_list(
-    value: Any,
-    origin_type: Any,
-    args: tuple[Any, ...],
-    memo: TypeCheckMemo,
-) -> None:
-    if not isinstance(value, list):
-        raise TypeCheckError("is not a list")
-
-    if args and args != (Any,):
-        samples = memo.config.collection_check_strategy.iterate_samples(value)
-        for i, v in enumerate(samples):
-            try:
-                check_type_internal(v, args[0], memo)
-            except TypeCheckError as exc:
-                exc.append_path_element(f"item {i}")
-                raise
-
-
-def check_sequence(
-    value: Any,
-    origin_type: Any,
-    args: tuple[Any, ...],
-    memo: TypeCheckMemo,
-) -> None:
-    if not isinstance(value, collections.abc.Sequence):
-        raise TypeCheckError("is not a sequence")
-
-    if args and args != (Any,):
-        samples = memo.config.collection_check_strategy.iterate_samples(value)
-        for i, v in enumerate(samples):
-            try:
-                check_type_internal(v, args[0], memo)
-            except TypeCheckError as exc:
-                exc.append_path_element(f"item {i}")
-                raise
-
-
-def check_set(
-    value: Any,
-    origin_type: Any,
-    args: tuple[Any, ...],
-    memo: TypeCheckMemo,
-) -> None:
-    if origin_type is frozenset:
-        if not isinstance(value, frozenset):
-            raise TypeCheckError("is not a frozenset")
-    elif not isinstance(value, AbstractSet):
-        raise TypeCheckError("is not a set")
-
-    if args and args != (Any,):
-        samples = memo.config.collection_check_strategy.iterate_samples(value)
-        for v in samples:
-            try:
-                check_type_internal(v, args[0], memo)
-            except TypeCheckError as exc:
-                exc.append_path_element(f"[{v}]")
-                raise
-
-
-def check_tuple(
-    value: Any,
-    origin_type: Any,
-    args: tuple[Any, ...],
-    memo: TypeCheckMemo,
-) -> None:
-    # Specialized check for NamedTuples
-    if field_types := getattr(origin_type, "__annotations__", None):
-        if not isinstance(value, origin_type):
-            raise TypeCheckError(
-                f"is not a named tuple of type {qualified_name(origin_type)}"
-            )
-
-        for name, field_type in field_types.items():
-            try:
-                check_type_internal(getattr(value, name), field_type, memo)
-            except TypeCheckError as exc:
-                exc.append_path_element(f"attribute {name!r}")
-                raise
-
-        return
-    elif not isinstance(value, tuple):
-        raise TypeCheckError("is not a tuple")
-
-    if args:
-        use_ellipsis = args[-1] is Ellipsis
-        tuple_params = args[: -1 if use_ellipsis else None]
-    else:
-        # Unparametrized Tuple or plain tuple
-        return
-
-    if use_ellipsis:
-        element_type = tuple_params[0]
-        samples = memo.config.collection_check_strategy.iterate_samples(value)
-        for i, element in enumerate(samples):
-            try:
-                check_type_internal(element, element_type, memo)
-            except TypeCheckError as exc:
-                exc.append_path_element(f"item {i}")
-                raise
-    elif tuple_params == ((),):
-        if value != ():
-            raise TypeCheckError("is not an empty tuple")
-    else:
-        if len(value) != len(tuple_params):
-            raise TypeCheckError(
-                f"has wrong number of elements (expected {len(tuple_params)}, got "
-                f"{len(value)} instead)"
-            )
-
-        for i, (element, element_type) in enumerate(zip(value, tuple_params)):
-            try:
-                check_type_internal(element, element_type, memo)
-            except TypeCheckError as exc:
-                exc.append_path_element(f"item {i}")
-                raise
-
-
-def check_union(
-    value: Any,
-    origin_type: Any,
-    args: tuple[Any, ...],
-    memo: TypeCheckMemo,
-) -> None:
-    errors: dict[str, TypeCheckError] = {}
-    try:
-        for type_ in args:
-            try:
-                check_type_internal(value, type_, memo)
-                return
-            except TypeCheckError as exc:
-                errors[get_type_name(type_)] = exc
-
-        formatted_errors = indent(
-            "\n".join(f"{key}: {error}" for key, error in errors.items()), "  "
-        )
-    finally:
-        del errors  # avoid creating ref cycle
-    raise TypeCheckError(f"did not match any element in the union:\n{formatted_errors}")
-
-
-def check_uniontype(
-    value: Any,
-    origin_type: Any,
-    args: tuple[Any, ...],
-    memo: TypeCheckMemo,
-) -> None:
-    errors: dict[str, TypeCheckError] = {}
-    for type_ in args:
-        try:
-            check_type_internal(value, type_, memo)
-            return
-        except TypeCheckError as exc:
-            errors[get_type_name(type_)] = exc
-
-    formatted_errors = indent(
-        "\n".join(f"{key}: {error}" for key, error in errors.items()), "  "
-    )
-    raise TypeCheckError(f"did not match any element in the union:\n{formatted_errors}")
-
-
-def check_class(
-    value: Any,
-    origin_type: Any,
-    args: tuple[Any, ...],
-    memo: TypeCheckMemo,
-) -> None:
-    if not isclass(value) and not isinstance(value, generic_alias_types):
-        raise TypeCheckError("is not a class")
-
-    if not args:
-        return
-
-    if isinstance(args[0], ForwardRef):
-        expected_class = evaluate_forwardref(args[0], memo)
-    else:
-        expected_class = args[0]
-
-    if expected_class is Any:
-        return
-    elif getattr(expected_class, "_is_protocol", False):
-        check_protocol(value, expected_class, (), memo)
-    elif isinstance(expected_class, TypeVar):
-        check_typevar(value, expected_class, (), memo, subclass_check=True)
-    elif get_origin(expected_class) is Union:
-        errors: dict[str, TypeCheckError] = {}
-        for arg in get_args(expected_class):
-            if arg is Any:
-                return
-
-            try:
-                check_class(value, type, (arg,), memo)
-                return
-            except TypeCheckError as exc:
-                errors[get_type_name(arg)] = exc
-        else:
-            formatted_errors = indent(
-                "\n".join(f"{key}: {error}" for key, error in errors.items()), "  "
-            )
-            raise TypeCheckError(
-                f"did not match any element in the union:\n{formatted_errors}"
-            )
-    elif not issubclass(value, expected_class):  # type: ignore[arg-type]
-        raise TypeCheckError(f"is not a subclass of {qualified_name(expected_class)}")
-
-
-def check_newtype(
-    value: Any,
-    origin_type: Any,
-    args: tuple[Any, ...],
-    memo: TypeCheckMemo,
-) -> None:
-    check_type_internal(value, origin_type.__supertype__, memo)
-
-
-def check_instance(
-    value: Any,
-    origin_type: Any,
-    args: tuple[Any, ...],
-    memo: TypeCheckMemo,
-) -> None:
-    if not isinstance(value, origin_type):
-        raise TypeCheckError(f"is not an instance of {qualified_name(origin_type)}")
-
-
-def check_typevar(
-    value: Any,
-    origin_type: TypeVar,
-    args: tuple[Any, ...],
-    memo: TypeCheckMemo,
-    *,
-    subclass_check: bool = False,
-) -> None:
-    if origin_type.__bound__ is not None:
-        annotation = (
-            Type[origin_type.__bound__] if subclass_check else origin_type.__bound__
-        )
-        check_type_internal(value, annotation, memo)
-    elif origin_type.__constraints__:
-        for constraint in origin_type.__constraints__:
-            annotation = Type[constraint] if subclass_check else constraint
-            try:
-                check_type_internal(value, annotation, memo)
-            except TypeCheckError:
-                pass
-            else:
-                break
-        else:
-            formatted_constraints = ", ".join(
-                get_type_name(constraint) for constraint in origin_type.__constraints__
-            )
-            raise TypeCheckError(
-                f"does not match any of the constraints " f"({formatted_constraints})"
-            )
-
-
-if typing_extensions is None:
-
-    def _is_literal_type(typ: object) -> bool:
-        return typ is typing.Literal
-
-else:
-
-    def _is_literal_type(typ: object) -> bool:
-        return typ is typing.Literal or typ is typing_extensions.Literal
-
-
-def check_literal(
-    value: Any,
-    origin_type: Any,
-    args: tuple[Any, ...],
-    memo: TypeCheckMemo,
-) -> None:
-    def get_literal_args(literal_args: tuple[Any, ...]) -> tuple[Any, ...]:
-        retval: list[Any] = []
-        for arg in literal_args:
-            if _is_literal_type(get_origin(arg)):
-                retval.extend(get_literal_args(arg.__args__))
-            elif arg is None or isinstance(arg, (int, str, bytes, bool, Enum)):
-                retval.append(arg)
-            else:
-                raise TypeError(
-                    f"Illegal literal value: {arg}"
-                )  # TypeError here is deliberate
-
-        return tuple(retval)
-
-    final_args = tuple(get_literal_args(args))
-    try:
-        index = final_args.index(value)
-    except ValueError:
-        pass
-    else:
-        if type(final_args[index]) is type(value):
-            return
-
-    formatted_args = ", ".join(repr(arg) for arg in final_args)
-    raise TypeCheckError(f"is not any of ({formatted_args})") from None
-
-
-def check_literal_string(
-    value: Any,
-    origin_type: Any,
-    args: tuple[Any, ...],
-    memo: TypeCheckMemo,
-) -> None:
-    check_type_internal(value, str, memo)
-
-
-def check_typeguard(
-    value: Any,
-    origin_type: Any,
-    args: tuple[Any, ...],
-    memo: TypeCheckMemo,
-) -> None:
-    check_type_internal(value, bool, memo)
-
-
-def check_none(
-    value: Any,
-    origin_type: Any,
-    args: tuple[Any, ...],
-    memo: TypeCheckMemo,
-) -> None:
-    if value is not None:
-        raise TypeCheckError("is not None")
-
-
-def check_number(
-    value: Any,
-    origin_type: Any,
-    args: tuple[Any, ...],
-    memo: TypeCheckMemo,
-) -> None:
-    if origin_type is complex and not isinstance(value, (complex, float, int)):
-        raise TypeCheckError("is neither complex, float or int")
-    elif origin_type is float and not isinstance(value, (float, int)):
-        raise TypeCheckError("is neither float or int")
-
-
-def check_io(
-    value: Any,
-    origin_type: Any,
-    args: tuple[Any, ...],
-    memo: TypeCheckMemo,
-) -> None:
-    if origin_type is TextIO or (origin_type is IO and args == (str,)):
-        if not isinstance(value, TextIOBase):
-            raise TypeCheckError("is not a text based I/O object")
-    elif origin_type is BinaryIO or (origin_type is IO and args == (bytes,)):
-        if not isinstance(value, (RawIOBase, BufferedIOBase)):
-            raise TypeCheckError("is not a binary I/O object")
-    elif not isinstance(value, IOBase):
-        raise TypeCheckError("is not an I/O object")
-
-
-def check_protocol(
-    value: Any,
-    origin_type: Any,
-    args: tuple[Any, ...],
-    memo: TypeCheckMemo,
-) -> None:
-    subject: type[Any] = value if isclass(value) else type(value)
-
-    if subject in protocol_check_cache:
-        result_map = protocol_check_cache[subject]
-        if origin_type in result_map:
-            if exc := result_map[origin_type]:
-                raise exc
-            else:
-                return
-
-    # Collect a set of methods and non-method attributes present in the protocol
-    ignored_attrs = set(dir(typing.Protocol)) | {
-        "__annotations__",
-        "__non_callable_proto_members__",
-    }
-    expected_methods: dict[str, tuple[Any, Any]] = {}
-    expected_noncallable_members: dict[str, Any] = {}
-    for attrname in dir(origin_type):
-        # Skip attributes present in typing.Protocol
-        if attrname in ignored_attrs:
-            continue
-
-        member = getattr(origin_type, attrname)
-        if callable(member):
-            signature = inspect.signature(member)
-            argtypes = [
-                (p.annotation if p.annotation is not Parameter.empty else Any)
-                for p in signature.parameters.values()
-                if p.kind is not Parameter.KEYWORD_ONLY
-            ] or Ellipsis
-            return_annotation = (
-                signature.return_annotation
-                if signature.return_annotation is not Parameter.empty
-                else Any
-            )
-            expected_methods[attrname] = argtypes, return_annotation
-        else:
-            expected_noncallable_members[attrname] = member
-
-    for attrname, annotation in typing.get_type_hints(origin_type).items():
-        expected_noncallable_members[attrname] = annotation
-
-    subject_annotations = typing.get_type_hints(subject)
-
-    # Check that all required methods are present and their signatures are compatible
-    result_map = protocol_check_cache.setdefault(subject, {})
-    try:
-        for attrname, callable_args in expected_methods.items():
-            try:
-                method = getattr(subject, attrname)
-            except AttributeError:
-                if attrname in subject_annotations:
-                    raise TypeCheckError(
-                        f"is not compatible with the {origin_type.__qualname__} protocol "
-                        f"because its {attrname!r} attribute is not a method"
-                    ) from None
-                else:
-                    raise TypeCheckError(
-                        f"is not compatible with the {origin_type.__qualname__} protocol "
-                        f"because it has no method named {attrname!r}"
-                    ) from None
-
-            if not callable(method):
-                raise TypeCheckError(
-                    f"is not compatible with the {origin_type.__qualname__} protocol "
-                    f"because its {attrname!r} attribute is not a callable"
-                )
-
-            # TODO: raise exception on added keyword-only arguments without defaults
-            try:
-                check_callable(method, Callable, callable_args, memo)
-            except TypeCheckError as exc:
-                raise TypeCheckError(
-                    f"is not compatible with the {origin_type.__qualname__} protocol "
-                    f"because its {attrname!r} method {exc}"
-                ) from None
-
-        # Check that all required non-callable members are present
-        for attrname in expected_noncallable_members:
-            # TODO: implement assignability checks for non-callable members
-            if attrname not in subject_annotations and not hasattr(subject, attrname):
-                raise TypeCheckError(
-                    f"is not compatible with the {origin_type.__qualname__} protocol "
-                    f"because it has no attribute named {attrname!r}"
-                )
-    except TypeCheckError as exc:
-        result_map[origin_type] = exc
-        raise
-    else:
-        result_map[origin_type] = None
-
-
-def check_byteslike(
-    value: Any,
-    origin_type: Any,
-    args: tuple[Any, ...],
-    memo: TypeCheckMemo,
-) -> None:
-    if not isinstance(value, (bytearray, bytes, memoryview)):
-        raise TypeCheckError("is not bytes-like")
-
-
-def check_self(
-    value: Any,
-    origin_type: Any,
-    args: tuple[Any, ...],
-    memo: TypeCheckMemo,
-) -> None:
-    if memo.self_type is None:
-        raise TypeCheckError("cannot be checked against Self outside of a method call")
-
-    if isclass(value):
-        if not issubclass(value, memo.self_type):
-            raise TypeCheckError(
-                f"is not an instance of the self type "
-                f"({qualified_name(memo.self_type)})"
-            )
-    elif not isinstance(value, memo.self_type):
-        raise TypeCheckError(
-            f"is not an instance of the self type ({qualified_name(memo.self_type)})"
-        )
-
-
-def check_paramspec(
-    value: Any,
-    origin_type: Any,
-    args: tuple[Any, ...],
-    memo: TypeCheckMemo,
-) -> None:
-    pass  # No-op for now
-
-
-def check_instanceof(
-    value: Any,
-    origin_type: Any,
-    args: tuple[Any, ...],
-    memo: TypeCheckMemo,
-) -> None:
-    if not isinstance(value, origin_type):
-        raise TypeCheckError(f"is not an instance of {qualified_name(origin_type)}")
-
-
-def check_type_internal(
-    value: Any,
-    annotation: Any,
-    memo: TypeCheckMemo,
-) -> None:
-    """
-    Check that the given object is compatible with the given type annotation.
-
-    This function should only be used by type checker callables. Applications should use
-    :func:`~.check_type` instead.
-
-    :param value: the value to check
-    :param annotation: the type annotation to check against
-    :param memo: a memo object containing configuration and information necessary for
-        looking up forward references
-    """
-
-    if isinstance(annotation, ForwardRef):
-        try:
-            annotation = evaluate_forwardref(annotation, memo)
-        except NameError:
-            if memo.config.forward_ref_policy is ForwardRefPolicy.ERROR:
-                raise
-            elif memo.config.forward_ref_policy is ForwardRefPolicy.WARN:
-                warnings.warn(
-                    f"Cannot resolve forward reference {annotation.__forward_arg__!r}",
-                    TypeHintWarning,
-                    stacklevel=get_stacklevel(),
-                )
-
-            return
-
-    if annotation is Any or annotation is SubclassableAny or isinstance(value, Mock):
-        return
-
-    # Skip type checks if value is an instance of a class that inherits from Any
-    if not isclass(value) and SubclassableAny in type(value).__bases__:
-        return
-
-    extras: tuple[Any, ...]
-    origin_type = get_origin(annotation)
-    if origin_type is Annotated:
-        annotation, *extras_ = get_args(annotation)
-        extras = tuple(extras_)
-        origin_type = get_origin(annotation)
-    else:
-        extras = ()
-
-    if origin_type is not None:
-        args = get_args(annotation)
-
-        # Compatibility hack to distinguish between unparametrized and empty tuple
-        # (tuple[()]), necessary due to https://github.com/python/cpython/issues/91137
-        if origin_type in (tuple, Tuple) and annotation is not Tuple and not args:
-            args = ((),)
-    else:
-        origin_type = annotation
-        args = ()
-
-    for lookup_func in checker_lookup_functions:
-        checker = lookup_func(origin_type, args, extras)
-        if checker:
-            checker(value, origin_type, args, memo)
-            return
-
-    if isclass(origin_type):
-        if not isinstance(value, origin_type):
-            raise TypeCheckError(f"is not an instance of {qualified_name(origin_type)}")
-    elif type(origin_type) is str:  # noqa: E721
-        warnings.warn(
-            f"Skipping type check against {origin_type!r}; this looks like a "
-            f"string-form forward reference imported from another module",
-            TypeHintWarning,
-            stacklevel=get_stacklevel(),
-        )
-
-
-# Equality checks are applied to these
-origin_type_checkers = {
-    bytes: check_byteslike,
-    AbstractSet: check_set,
-    BinaryIO: check_io,
-    Callable: check_callable,
-    collections.abc.Callable: check_callable,
-    complex: check_number,
-    dict: check_mapping,
-    Dict: check_mapping,
-    float: check_number,
-    frozenset: check_set,
-    IO: check_io,
-    list: check_list,
-    List: check_list,
-    typing.Literal: check_literal,
-    Mapping: check_mapping,
-    MutableMapping: check_mapping,
-    None: check_none,
-    collections.abc.Mapping: check_mapping,
-    collections.abc.MutableMapping: check_mapping,
-    Sequence: check_sequence,
-    collections.abc.Sequence: check_sequence,
-    collections.abc.Set: check_set,
-    set: check_set,
-    Set: check_set,
-    TextIO: check_io,
-    tuple: check_tuple,
-    Tuple: check_tuple,
-    type: check_class,
-    Type: check_class,
-    Union: check_union,
-}
-if sys.version_info >= (3, 10):
-    origin_type_checkers[types.UnionType] = check_uniontype
-    origin_type_checkers[typing.TypeGuard] = check_typeguard
-if sys.version_info >= (3, 11):
-    origin_type_checkers.update(
-        {typing.LiteralString: check_literal_string, typing.Self: check_self}
-    )
-if typing_extensions is not None:
-    # On some Python versions, these may simply be re-exports from typing,
-    # but exactly which Python versions is subject to change,
-    # so it's best to err on the safe side
-    # and update the dictionary on all Python versions
-    # if typing_extensions is installed
-    origin_type_checkers[typing_extensions.Literal] = check_literal
-    origin_type_checkers[typing_extensions.LiteralString] = check_literal_string
-    origin_type_checkers[typing_extensions.Self] = check_self
-    origin_type_checkers[typing_extensions.TypeGuard] = check_typeguard
-
-
-def builtin_checker_lookup(
-    origin_type: Any, args: tuple[Any, ...], extras: tuple[Any, ...]
-) -> TypeCheckerCallable | None:
-    checker = origin_type_checkers.get(origin_type)
-    if checker is not None:
-        return checker
-    elif is_typeddict(origin_type):
-        return check_typed_dict
-    elif isclass(origin_type) and issubclass(
-        origin_type,
-        Tuple,  # type: ignore[arg-type]
-    ):
-        # NamedTuple
-        return check_tuple
-    elif getattr(origin_type, "_is_protocol", False):
-        return check_protocol
-    elif isinstance(origin_type, ParamSpec):
-        return check_paramspec
-    elif isinstance(origin_type, TypeVar):
-        return check_typevar
-    elif origin_type.__class__ is NewType:
-        # typing.NewType on Python 3.10+
-        return check_newtype
-    elif (
-        isfunction(origin_type)
-        and getattr(origin_type, "__module__", None) == "typing"
-        and getattr(origin_type, "__qualname__", "").startswith("NewType.")
-        and hasattr(origin_type, "__supertype__")
-    ):
-        # typing.NewType on Python 3.9 and below
-        return check_newtype
-
-    return None
-
-
-checker_lookup_functions.append(builtin_checker_lookup)
-
-
-def load_plugins() -> None:
-    """
-    Load all type checker lookup functions from entry points.
-
-    All entry points from the ``typeguard.checker_lookup`` group are loaded, and the
-    returned lookup functions are added to :data:`typeguard.checker_lookup_functions`.
-
-    .. note:: This function is called implicitly on import, unless the
-        ``TYPEGUARD_DISABLE_PLUGIN_AUTOLOAD`` environment variable is present.
-    """
-
-    for ep in entry_points(group="typeguard.checker_lookup"):
-        try:
-            plugin = ep.load()
-        except Exception as exc:
-            warnings.warn(
-                f"Failed to load plugin {ep.name!r}: " f"{qualified_name(exc)}: {exc}",
-                stacklevel=2,
-            )
-            continue
-
-        if not callable(plugin):
-            warnings.warn(
-                f"Plugin {ep} returned a non-callable object: {plugin!r}", stacklevel=2
-            )
-            continue
-
-        checker_lookup_functions.insert(0, plugin)
diff --git a/pkg_resources/_vendor/typeguard/_config.py b/pkg_resources/_vendor/typeguard/_config.py
deleted file mode 100644
index 36efad5396..0000000000
--- a/pkg_resources/_vendor/typeguard/_config.py
+++ /dev/null
@@ -1,108 +0,0 @@
-from __future__ import annotations
-
-from collections.abc import Iterable
-from dataclasses import dataclass
-from enum import Enum, auto
-from typing import TYPE_CHECKING, TypeVar
-
-if TYPE_CHECKING:
-    from ._functions import TypeCheckFailCallback
-
-T = TypeVar("T")
-
-
-class ForwardRefPolicy(Enum):
-    """
-    Defines how unresolved forward references are handled.
-
-    Members:
-
-    * ``ERROR``: propagate the :exc:`NameError` when the forward reference lookup fails
-    * ``WARN``: emit a :class:`~.TypeHintWarning` if the forward reference lookup fails
-    * ``IGNORE``: silently skip checks for unresolveable forward references
-    """
-
-    ERROR = auto()
-    WARN = auto()
-    IGNORE = auto()
-
-
-class CollectionCheckStrategy(Enum):
-    """
-    Specifies how thoroughly the contents of collections are type checked.
-
-    This has an effect on the following built-in checkers:
-
-    * ``AbstractSet``
-    * ``Dict``
-    * ``List``
-    * ``Mapping``
-    * ``Set``
-    * ``Tuple[, ...]`` (arbitrarily sized tuples)
-
-    Members:
-
-    * ``FIRST_ITEM``: check only the first item
-    * ``ALL_ITEMS``: check all items
-    """
-
-    FIRST_ITEM = auto()
-    ALL_ITEMS = auto()
-
-    def iterate_samples(self, collection: Iterable[T]) -> Iterable[T]:
-        if self is CollectionCheckStrategy.FIRST_ITEM:
-            try:
-                return [next(iter(collection))]
-            except StopIteration:
-                return ()
-        else:
-            return collection
-
-
-@dataclass
-class TypeCheckConfiguration:
-    """
-     You can change Typeguard's behavior with these settings.
-
-    .. attribute:: typecheck_fail_callback
-       :type: Callable[[TypeCheckError, TypeCheckMemo], Any]
-
-         Callable that is called when type checking fails.
-
-         Default: ``None`` (the :exc:`~.TypeCheckError` is raised directly)
-
-    .. attribute:: forward_ref_policy
-       :type: ForwardRefPolicy
-
-         Specifies what to do when a forward reference fails to resolve.
-
-         Default: ``WARN``
-
-    .. attribute:: collection_check_strategy
-       :type: CollectionCheckStrategy
-
-         Specifies how thoroughly the contents of collections (list, dict, etc.) are
-         type checked.
-
-         Default: ``FIRST_ITEM``
-
-    .. attribute:: debug_instrumentation
-       :type: bool
-
-         If set to ``True``, the code of modules or functions instrumented by typeguard
-         is printed to ``sys.stderr`` after the instrumentation is done
-
-         Requires Python 3.9 or newer.
-
-         Default: ``False``
-    """
-
-    forward_ref_policy: ForwardRefPolicy = ForwardRefPolicy.WARN
-    typecheck_fail_callback: TypeCheckFailCallback | None = None
-    collection_check_strategy: CollectionCheckStrategy = (
-        CollectionCheckStrategy.FIRST_ITEM
-    )
-    debug_instrumentation: bool = False
-
-
-global_config = TypeCheckConfiguration()
diff --git a/pkg_resources/_vendor/typeguard/_decorators.py b/pkg_resources/_vendor/typeguard/_decorators.py
deleted file mode 100644
index cf3253351f..0000000000
--- a/pkg_resources/_vendor/typeguard/_decorators.py
+++ /dev/null
@@ -1,235 +0,0 @@
-from __future__ import annotations
-
-import ast
-import inspect
-import sys
-from collections.abc import Sequence
-from functools import partial
-from inspect import isclass, isfunction
-from types import CodeType, FrameType, FunctionType
-from typing import TYPE_CHECKING, Any, Callable, ForwardRef, TypeVar, cast, overload
-from warnings import warn
-
-from ._config import CollectionCheckStrategy, ForwardRefPolicy, global_config
-from ._exceptions import InstrumentationWarning
-from ._functions import TypeCheckFailCallback
-from ._transformer import TypeguardTransformer
-from ._utils import Unset, function_name, get_stacklevel, is_method_of, unset
-
-if TYPE_CHECKING:
-    from typeshed.stdlib.types import _Cell
-
-    _F = TypeVar("_F")
-
-    def typeguard_ignore(f: _F) -> _F:
-        """This decorator is a noop during static type-checking."""
-        return f
-
-else:
-    from typing import no_type_check as typeguard_ignore  # noqa: F401
-
-T_CallableOrType = TypeVar("T_CallableOrType", bound=Callable[..., Any])
-
-
-def make_cell(value: object) -> _Cell:
-    return (lambda: value).__closure__[0]  # type: ignore[index]
-
-
-def find_target_function(
-    new_code: CodeType, target_path: Sequence[str], firstlineno: int
-) -> CodeType | None:
-    target_name = target_path[0]
-    for const in new_code.co_consts:
-        if isinstance(const, CodeType):
-            if const.co_name == target_name:
-                if const.co_firstlineno == firstlineno:
-                    return const
-                elif len(target_path) > 1:
-                    target_code = find_target_function(
-                        const, target_path[1:], firstlineno
-                    )
-                    if target_code:
-                        return target_code
-
-    return None
-
-
-def instrument(f: T_CallableOrType) -> FunctionType | str:
-    if not getattr(f, "__code__", None):
-        return "no code associated"
-    elif not getattr(f, "__module__", None):
-        return "__module__ attribute is not set"
-    elif f.__code__.co_filename == "":
-        return "cannot instrument functions defined in a REPL"
-    elif hasattr(f, "__wrapped__"):
-        return (
-            "@typechecked only supports instrumenting functions wrapped with "
-            "@classmethod, @staticmethod or @property"
-        )
-
-    target_path = [item for item in f.__qualname__.split(".") if item != ""]
-    module_source = inspect.getsource(sys.modules[f.__module__])
-    module_ast = ast.parse(module_source)
-    instrumentor = TypeguardTransformer(target_path, f.__code__.co_firstlineno)
-    instrumentor.visit(module_ast)
-
-    if not instrumentor.target_node or instrumentor.target_lineno is None:
-        return "instrumentor did not find the target function"
-
-    module_code = compile(module_ast, f.__code__.co_filename, "exec", dont_inherit=True)
-    new_code = find_target_function(
-        module_code, target_path, instrumentor.target_lineno
-    )
-    if not new_code:
-        return "cannot find the target function in the AST"
-
-    if global_config.debug_instrumentation and sys.version_info >= (3, 9):
-        # Find the matching AST node, then unparse it to source and print to stdout
-        print(
-            f"Source code of {f.__qualname__}() after instrumentation:"
-            "\n----------------------------------------------",
-            file=sys.stderr,
-        )
-        print(ast.unparse(instrumentor.target_node), file=sys.stderr)
-        print(
-            "----------------------------------------------",
-            file=sys.stderr,
-        )
-
-    closure = f.__closure__
-    if new_code.co_freevars != f.__code__.co_freevars:
-        # Create a new closure and find values for the new free variables
-        frame = cast(FrameType, inspect.currentframe())
-        frame = cast(FrameType, frame.f_back)
-        frame_locals = cast(FrameType, frame.f_back).f_locals
-        cells: list[_Cell] = []
-        for key in new_code.co_freevars:
-            if key in instrumentor.names_used_in_annotations:
-                # Find the value and make a new cell from it
-                value = frame_locals.get(key) or ForwardRef(key)
-                cells.append(make_cell(value))
-            else:
-                # Reuse the cell from the existing closure
-                assert f.__closure__
-                cells.append(f.__closure__[f.__code__.co_freevars.index(key)])
-
-        closure = tuple(cells)
-
-    new_function = FunctionType(new_code, f.__globals__, f.__name__, closure=closure)
-    new_function.__module__ = f.__module__
-    new_function.__name__ = f.__name__
-    new_function.__qualname__ = f.__qualname__
-    new_function.__annotations__ = f.__annotations__
-    new_function.__doc__ = f.__doc__
-    new_function.__defaults__ = f.__defaults__
-    new_function.__kwdefaults__ = f.__kwdefaults__
-    return new_function
-
-
-@overload
-def typechecked(
-    *,
-    forward_ref_policy: ForwardRefPolicy | Unset = unset,
-    typecheck_fail_callback: TypeCheckFailCallback | Unset = unset,
-    collection_check_strategy: CollectionCheckStrategy | Unset = unset,
-    debug_instrumentation: bool | Unset = unset,
-) -> Callable[[T_CallableOrType], T_CallableOrType]: ...
-
-
-@overload
-def typechecked(target: T_CallableOrType) -> T_CallableOrType: ...
-
-
-def typechecked(
-    target: T_CallableOrType | None = None,
-    *,
-    forward_ref_policy: ForwardRefPolicy | Unset = unset,
-    typecheck_fail_callback: TypeCheckFailCallback | Unset = unset,
-    collection_check_strategy: CollectionCheckStrategy | Unset = unset,
-    debug_instrumentation: bool | Unset = unset,
-) -> Any:
-    """
-    Instrument the target function to perform run-time type checking.
-
-    This decorator recompiles the target function, injecting code to type check
-    arguments, return values, yield values (excluding ``yield from``) and assignments to
-    annotated local variables.
-
-    This can also be used as a class decorator. This will instrument all type annotated
-    methods, including :func:`@classmethod `,
-    :func:`@staticmethod `,  and :class:`@property ` decorated
-    methods in the class.
-
-    .. note:: When Python is run in optimized mode (``-O`` or ``-OO``, this decorator
-        is a no-op). This is a feature meant for selectively introducing type checking
-        into a code base where the checks aren't meant to be run in production.
-
-    :param target: the function or class to enable type checking for
-    :param forward_ref_policy: override for
-        :attr:`.TypeCheckConfiguration.forward_ref_policy`
-    :param typecheck_fail_callback: override for
-        :attr:`.TypeCheckConfiguration.typecheck_fail_callback`
-    :param collection_check_strategy: override for
-        :attr:`.TypeCheckConfiguration.collection_check_strategy`
-    :param debug_instrumentation: override for
-        :attr:`.TypeCheckConfiguration.debug_instrumentation`
-
-    """
-    if target is None:
-        return partial(
-            typechecked,
-            forward_ref_policy=forward_ref_policy,
-            typecheck_fail_callback=typecheck_fail_callback,
-            collection_check_strategy=collection_check_strategy,
-            debug_instrumentation=debug_instrumentation,
-        )
-
-    if not __debug__:
-        return target
-
-    if isclass(target):
-        for key, attr in target.__dict__.items():
-            if is_method_of(attr, target):
-                retval = instrument(attr)
-                if isfunction(retval):
-                    setattr(target, key, retval)
-            elif isinstance(attr, (classmethod, staticmethod)):
-                if is_method_of(attr.__func__, target):
-                    retval = instrument(attr.__func__)
-                    if isfunction(retval):
-                        wrapper = attr.__class__(retval)
-                        setattr(target, key, wrapper)
-            elif isinstance(attr, property):
-                kwargs: dict[str, Any] = dict(doc=attr.__doc__)
-                for name in ("fset", "fget", "fdel"):
-                    property_func = kwargs[name] = getattr(attr, name)
-                    if is_method_of(property_func, target):
-                        retval = instrument(property_func)
-                        if isfunction(retval):
-                            kwargs[name] = retval
-
-                setattr(target, key, attr.__class__(**kwargs))
-
-        return target
-
-    # Find either the first Python wrapper or the actual function
-    wrapper_class: (
-        type[classmethod[Any, Any, Any]] | type[staticmethod[Any, Any]] | None
-    ) = None
-    if isinstance(target, (classmethod, staticmethod)):
-        wrapper_class = target.__class__
-        target = target.__func__
-
-    retval = instrument(target)
-    if isinstance(retval, str):
-        warn(
-            f"{retval} -- not typechecking {function_name(target)}",
-            InstrumentationWarning,
-            stacklevel=get_stacklevel(),
-        )
-        return target
-
-    if wrapper_class is None:
-        return retval
-    else:
-        return wrapper_class(retval)
diff --git a/pkg_resources/_vendor/typeguard/_exceptions.py b/pkg_resources/_vendor/typeguard/_exceptions.py
deleted file mode 100644
index 625437a649..0000000000
--- a/pkg_resources/_vendor/typeguard/_exceptions.py
+++ /dev/null
@@ -1,42 +0,0 @@
-from collections import deque
-from typing import Deque
-
-
-class TypeHintWarning(UserWarning):
-    """
-    A warning that is emitted when a type hint in string form could not be resolved to
-    an actual type.
-    """
-
-
-class TypeCheckWarning(UserWarning):
-    """Emitted by typeguard's type checkers when a type mismatch is detected."""
-
-    def __init__(self, message: str):
-        super().__init__(message)
-
-
-class InstrumentationWarning(UserWarning):
-    """Emitted when there's a problem with instrumenting a function for type checks."""
-
-    def __init__(self, message: str):
-        super().__init__(message)
-
-
-class TypeCheckError(Exception):
-    """
-    Raised by typeguard's type checkers when a type mismatch is detected.
-    """
-
-    def __init__(self, message: str):
-        super().__init__(message)
-        self._path: Deque[str] = deque()
-
-    def append_path_element(self, element: str) -> None:
-        self._path.append(element)
-
-    def __str__(self) -> str:
-        if self._path:
-            return " of ".join(self._path) + " " + str(self.args[0])
-        else:
-            return str(self.args[0])
diff --git a/pkg_resources/_vendor/typeguard/_functions.py b/pkg_resources/_vendor/typeguard/_functions.py
deleted file mode 100644
index 28497856a3..0000000000
--- a/pkg_resources/_vendor/typeguard/_functions.py
+++ /dev/null
@@ -1,308 +0,0 @@
-from __future__ import annotations
-
-import sys
-import warnings
-from typing import Any, Callable, NoReturn, TypeVar, Union, overload
-
-from . import _suppression
-from ._checkers import BINARY_MAGIC_METHODS, check_type_internal
-from ._config import (
-    CollectionCheckStrategy,
-    ForwardRefPolicy,
-    TypeCheckConfiguration,
-)
-from ._exceptions import TypeCheckError, TypeCheckWarning
-from ._memo import TypeCheckMemo
-from ._utils import get_stacklevel, qualified_name
-
-if sys.version_info >= (3, 11):
-    from typing import Literal, Never, TypeAlias
-else:
-    from typing_extensions import Literal, Never, TypeAlias
-
-T = TypeVar("T")
-TypeCheckFailCallback: TypeAlias = Callable[[TypeCheckError, TypeCheckMemo], Any]
-
-
-@overload
-def check_type(
-    value: object,
-    expected_type: type[T],
-    *,
-    forward_ref_policy: ForwardRefPolicy = ...,
-    typecheck_fail_callback: TypeCheckFailCallback | None = ...,
-    collection_check_strategy: CollectionCheckStrategy = ...,
-) -> T: ...
-
-
-@overload
-def check_type(
-    value: object,
-    expected_type: Any,
-    *,
-    forward_ref_policy: ForwardRefPolicy = ...,
-    typecheck_fail_callback: TypeCheckFailCallback | None = ...,
-    collection_check_strategy: CollectionCheckStrategy = ...,
-) -> Any: ...
-
-
-def check_type(
-    value: object,
-    expected_type: Any,
-    *,
-    forward_ref_policy: ForwardRefPolicy = TypeCheckConfiguration().forward_ref_policy,
-    typecheck_fail_callback: TypeCheckFailCallback | None = (
-        TypeCheckConfiguration().typecheck_fail_callback
-    ),
-    collection_check_strategy: CollectionCheckStrategy = (
-        TypeCheckConfiguration().collection_check_strategy
-    ),
-) -> Any:
-    """
-    Ensure that ``value`` matches ``expected_type``.
-
-    The types from the :mod:`typing` module do not support :func:`isinstance` or
-    :func:`issubclass` so a number of type specific checks are required. This function
-    knows which checker to call for which type.
-
-    This function wraps :func:`~.check_type_internal` in the following ways:
-
-    * Respects type checking suppression (:func:`~.suppress_type_checks`)
-    * Forms a :class:`~.TypeCheckMemo` from the current stack frame
-    * Calls the configured type check fail callback if the check fails
-
-    Note that this function is independent of the globally shared configuration in
-    :data:`typeguard.config`. This means that usage within libraries is safe from being
-    affected configuration changes made by other libraries or by the integrating
-    application. Instead, configuration options have the same default values as their
-    corresponding fields in :class:`TypeCheckConfiguration`.
-
-    :param value: value to be checked against ``expected_type``
-    :param expected_type: a class or generic type instance, or a tuple of such things
-    :param forward_ref_policy: see :attr:`TypeCheckConfiguration.forward_ref_policy`
-    :param typecheck_fail_callback:
-        see :attr`TypeCheckConfiguration.typecheck_fail_callback`
-    :param collection_check_strategy:
-        see :attr:`TypeCheckConfiguration.collection_check_strategy`
-    :return: ``value``, unmodified
-    :raises TypeCheckError: if there is a type mismatch
-
-    """
-    if type(expected_type) is tuple:
-        expected_type = Union[expected_type]
-
-    config = TypeCheckConfiguration(
-        forward_ref_policy=forward_ref_policy,
-        typecheck_fail_callback=typecheck_fail_callback,
-        collection_check_strategy=collection_check_strategy,
-    )
-
-    if _suppression.type_checks_suppressed or expected_type is Any:
-        return value
-
-    frame = sys._getframe(1)
-    memo = TypeCheckMemo(frame.f_globals, frame.f_locals, config=config)
-    try:
-        check_type_internal(value, expected_type, memo)
-    except TypeCheckError as exc:
-        exc.append_path_element(qualified_name(value, add_class_prefix=True))
-        if config.typecheck_fail_callback:
-            config.typecheck_fail_callback(exc, memo)
-        else:
-            raise
-
-    return value
-
-
-def check_argument_types(
-    func_name: str,
-    arguments: dict[str, tuple[Any, Any]],
-    memo: TypeCheckMemo,
-) -> Literal[True]:
-    if _suppression.type_checks_suppressed:
-        return True
-
-    for argname, (value, annotation) in arguments.items():
-        if annotation is NoReturn or annotation is Never:
-            exc = TypeCheckError(
-                f"{func_name}() was declared never to be called but it was"
-            )
-            if memo.config.typecheck_fail_callback:
-                memo.config.typecheck_fail_callback(exc, memo)
-            else:
-                raise exc
-
-        try:
-            check_type_internal(value, annotation, memo)
-        except TypeCheckError as exc:
-            qualname = qualified_name(value, add_class_prefix=True)
-            exc.append_path_element(f'argument "{argname}" ({qualname})')
-            if memo.config.typecheck_fail_callback:
-                memo.config.typecheck_fail_callback(exc, memo)
-            else:
-                raise
-
-    return True
-
-
-def check_return_type(
-    func_name: str,
-    retval: T,
-    annotation: Any,
-    memo: TypeCheckMemo,
-) -> T:
-    if _suppression.type_checks_suppressed:
-        return retval
-
-    if annotation is NoReturn or annotation is Never:
-        exc = TypeCheckError(f"{func_name}() was declared never to return but it did")
-        if memo.config.typecheck_fail_callback:
-            memo.config.typecheck_fail_callback(exc, memo)
-        else:
-            raise exc
-
-    try:
-        check_type_internal(retval, annotation, memo)
-    except TypeCheckError as exc:
-        # Allow NotImplemented if this is a binary magic method (__eq__() et al)
-        if retval is NotImplemented and annotation is bool:
-            # This does (and cannot) not check if it's actually a method
-            func_name = func_name.rsplit(".", 1)[-1]
-            if func_name in BINARY_MAGIC_METHODS:
-                return retval
-
-        qualname = qualified_name(retval, add_class_prefix=True)
-        exc.append_path_element(f"the return value ({qualname})")
-        if memo.config.typecheck_fail_callback:
-            memo.config.typecheck_fail_callback(exc, memo)
-        else:
-            raise
-
-    return retval
-
-
-def check_send_type(
-    func_name: str,
-    sendval: T,
-    annotation: Any,
-    memo: TypeCheckMemo,
-) -> T:
-    if _suppression.type_checks_suppressed:
-        return sendval
-
-    if annotation is NoReturn or annotation is Never:
-        exc = TypeCheckError(
-            f"{func_name}() was declared never to be sent a value to but it was"
-        )
-        if memo.config.typecheck_fail_callback:
-            memo.config.typecheck_fail_callback(exc, memo)
-        else:
-            raise exc
-
-    try:
-        check_type_internal(sendval, annotation, memo)
-    except TypeCheckError as exc:
-        qualname = qualified_name(sendval, add_class_prefix=True)
-        exc.append_path_element(f"the value sent to generator ({qualname})")
-        if memo.config.typecheck_fail_callback:
-            memo.config.typecheck_fail_callback(exc, memo)
-        else:
-            raise
-
-    return sendval
-
-
-def check_yield_type(
-    func_name: str,
-    yieldval: T,
-    annotation: Any,
-    memo: TypeCheckMemo,
-) -> T:
-    if _suppression.type_checks_suppressed:
-        return yieldval
-
-    if annotation is NoReturn or annotation is Never:
-        exc = TypeCheckError(f"{func_name}() was declared never to yield but it did")
-        if memo.config.typecheck_fail_callback:
-            memo.config.typecheck_fail_callback(exc, memo)
-        else:
-            raise exc
-
-    try:
-        check_type_internal(yieldval, annotation, memo)
-    except TypeCheckError as exc:
-        qualname = qualified_name(yieldval, add_class_prefix=True)
-        exc.append_path_element(f"the yielded value ({qualname})")
-        if memo.config.typecheck_fail_callback:
-            memo.config.typecheck_fail_callback(exc, memo)
-        else:
-            raise
-
-    return yieldval
-
-
-def check_variable_assignment(
-    value: object, varname: str, annotation: Any, memo: TypeCheckMemo
-) -> Any:
-    if _suppression.type_checks_suppressed:
-        return value
-
-    try:
-        check_type_internal(value, annotation, memo)
-    except TypeCheckError as exc:
-        qualname = qualified_name(value, add_class_prefix=True)
-        exc.append_path_element(f"value assigned to {varname} ({qualname})")
-        if memo.config.typecheck_fail_callback:
-            memo.config.typecheck_fail_callback(exc, memo)
-        else:
-            raise
-
-    return value
-
-
-def check_multi_variable_assignment(
-    value: Any, targets: list[dict[str, Any]], memo: TypeCheckMemo
-) -> Any:
-    if max(len(target) for target in targets) == 1:
-        iterated_values = [value]
-    else:
-        iterated_values = list(value)
-
-    if not _suppression.type_checks_suppressed:
-        for expected_types in targets:
-            value_index = 0
-            for ann_index, (varname, expected_type) in enumerate(
-                expected_types.items()
-            ):
-                if varname.startswith("*"):
-                    varname = varname[1:]
-                    keys_left = len(expected_types) - 1 - ann_index
-                    next_value_index = len(iterated_values) - keys_left
-                    obj: object = iterated_values[value_index:next_value_index]
-                    value_index = next_value_index
-                else:
-                    obj = iterated_values[value_index]
-                    value_index += 1
-
-                try:
-                    check_type_internal(obj, expected_type, memo)
-                except TypeCheckError as exc:
-                    qualname = qualified_name(obj, add_class_prefix=True)
-                    exc.append_path_element(f"value assigned to {varname} ({qualname})")
-                    if memo.config.typecheck_fail_callback:
-                        memo.config.typecheck_fail_callback(exc, memo)
-                    else:
-                        raise
-
-    return iterated_values[0] if len(iterated_values) == 1 else iterated_values
-
-
-def warn_on_error(exc: TypeCheckError, memo: TypeCheckMemo) -> None:
-    """
-    Emit a warning on a type mismatch.
-
-    This is intended to be used as an error handler in
-    :attr:`TypeCheckConfiguration.typecheck_fail_callback`.
-
-    """
-    warnings.warn(TypeCheckWarning(str(exc)), stacklevel=get_stacklevel())
diff --git a/pkg_resources/_vendor/typeguard/_importhook.py b/pkg_resources/_vendor/typeguard/_importhook.py
deleted file mode 100644
index 8590540a5a..0000000000
--- a/pkg_resources/_vendor/typeguard/_importhook.py
+++ /dev/null
@@ -1,213 +0,0 @@
-from __future__ import annotations
-
-import ast
-import sys
-import types
-from collections.abc import Callable, Iterable
-from importlib.abc import MetaPathFinder
-from importlib.machinery import ModuleSpec, SourceFileLoader
-from importlib.util import cache_from_source, decode_source
-from inspect import isclass
-from os import PathLike
-from types import CodeType, ModuleType, TracebackType
-from typing import Sequence, TypeVar
-from unittest.mock import patch
-
-from ._config import global_config
-from ._transformer import TypeguardTransformer
-
-if sys.version_info >= (3, 12):
-    from collections.abc import Buffer
-else:
-    from typing_extensions import Buffer
-
-if sys.version_info >= (3, 11):
-    from typing import ParamSpec
-else:
-    from typing_extensions import ParamSpec
-
-if sys.version_info >= (3, 10):
-    from importlib.metadata import PackageNotFoundError, version
-else:
-    from importlib_metadata import PackageNotFoundError, version
-
-try:
-    OPTIMIZATION = "typeguard" + "".join(version("typeguard").split(".")[:3])
-except PackageNotFoundError:
-    OPTIMIZATION = "typeguard"
-
-P = ParamSpec("P")
-T = TypeVar("T")
-
-
-# The name of this function is magical
-def _call_with_frames_removed(
-    f: Callable[P, T], *args: P.args, **kwargs: P.kwargs
-) -> T:
-    return f(*args, **kwargs)
-
-
-def optimized_cache_from_source(path: str, debug_override: bool | None = None) -> str:
-    return cache_from_source(path, debug_override, optimization=OPTIMIZATION)
-
-
-class TypeguardLoader(SourceFileLoader):
-    @staticmethod
-    def source_to_code(
-        data: Buffer | str | ast.Module | ast.Expression | ast.Interactive,
-        path: Buffer | str | PathLike[str] = "",
-    ) -> CodeType:
-        if isinstance(data, (ast.Module, ast.Expression, ast.Interactive)):
-            tree = data
-        else:
-            if isinstance(data, str):
-                source = data
-            else:
-                source = decode_source(data)
-
-            tree = _call_with_frames_removed(
-                ast.parse,
-                source,
-                path,
-                "exec",
-            )
-
-        tree = TypeguardTransformer().visit(tree)
-        ast.fix_missing_locations(tree)
-
-        if global_config.debug_instrumentation and sys.version_info >= (3, 9):
-            print(
-                f"Source code of {path!r} after instrumentation:\n"
-                "----------------------------------------------",
-                file=sys.stderr,
-            )
-            print(ast.unparse(tree), file=sys.stderr)
-            print("----------------------------------------------", file=sys.stderr)
-
-        return _call_with_frames_removed(
-            compile, tree, path, "exec", 0, dont_inherit=True
-        )
-
-    def exec_module(self, module: ModuleType) -> None:
-        # Use a custom optimization marker – the import lock should make this monkey
-        # patch safe
-        with patch(
-            "importlib._bootstrap_external.cache_from_source",
-            optimized_cache_from_source,
-        ):
-            super().exec_module(module)
-
-
-class TypeguardFinder(MetaPathFinder):
-    """
-    Wraps another path finder and instruments the module with
-    :func:`@typechecked ` if :meth:`should_instrument` returns
-    ``True``.
-
-    Should not be used directly, but rather via :func:`~.install_import_hook`.
-
-    .. versionadded:: 2.6
-    """
-
-    def __init__(self, packages: list[str] | None, original_pathfinder: MetaPathFinder):
-        self.packages = packages
-        self._original_pathfinder = original_pathfinder
-
-    def find_spec(
-        self,
-        fullname: str,
-        path: Sequence[str] | None,
-        target: types.ModuleType | None = None,
-    ) -> ModuleSpec | None:
-        if self.should_instrument(fullname):
-            spec = self._original_pathfinder.find_spec(fullname, path, target)
-            if spec is not None and isinstance(spec.loader, SourceFileLoader):
-                spec.loader = TypeguardLoader(spec.loader.name, spec.loader.path)
-                return spec
-
-        return None
-
-    def should_instrument(self, module_name: str) -> bool:
-        """
-        Determine whether the module with the given name should be instrumented.
-
-        :param module_name: full name of the module that is about to be imported (e.g.
-            ``xyz.abc``)
-
-        """
-        if self.packages is None:
-            return True
-
-        for package in self.packages:
-            if module_name == package or module_name.startswith(package + "."):
-                return True
-
-        return False
-
-
-class ImportHookManager:
-    """
-    A handle that can be used to uninstall the Typeguard import hook.
-    """
-
-    def __init__(self, hook: MetaPathFinder):
-        self.hook = hook
-
-    def __enter__(self) -> None:
-        pass
-
-    def __exit__(
-        self,
-        exc_type: type[BaseException],
-        exc_val: BaseException,
-        exc_tb: TracebackType,
-    ) -> None:
-        self.uninstall()
-
-    def uninstall(self) -> None:
-        """Uninstall the import hook."""
-        try:
-            sys.meta_path.remove(self.hook)
-        except ValueError:
-            pass  # already removed
-
-
-def install_import_hook(
-    packages: Iterable[str] | None = None,
-    *,
-    cls: type[TypeguardFinder] = TypeguardFinder,
-) -> ImportHookManager:
-    """
-    Install an import hook that instruments functions for automatic type checking.
-
-    This only affects modules loaded **after** this hook has been installed.
-
-    :param packages: an iterable of package names to instrument, or ``None`` to
-        instrument all packages
-    :param cls: a custom meta path finder class
-    :return: a context manager that uninstalls the hook on exit (or when you call
-        ``.uninstall()``)
-
-    .. versionadded:: 2.6
-
-    """
-    if packages is None:
-        target_packages: list[str] | None = None
-    elif isinstance(packages, str):
-        target_packages = [packages]
-    else:
-        target_packages = list(packages)
-
-    for finder in sys.meta_path:
-        if (
-            isclass(finder)
-            and finder.__name__ == "PathFinder"
-            and hasattr(finder, "find_spec")
-        ):
-            break
-    else:
-        raise RuntimeError("Cannot find a PathFinder in sys.meta_path")
-
-    hook = cls(target_packages, finder)
-    sys.meta_path.insert(0, hook)
-    return ImportHookManager(hook)
diff --git a/pkg_resources/_vendor/typeguard/_memo.py b/pkg_resources/_vendor/typeguard/_memo.py
deleted file mode 100644
index 1d0d80c66d..0000000000
--- a/pkg_resources/_vendor/typeguard/_memo.py
+++ /dev/null
@@ -1,48 +0,0 @@
-from __future__ import annotations
-
-from typing import Any
-
-from typeguard._config import TypeCheckConfiguration, global_config
-
-
-class TypeCheckMemo:
-    """
-    Contains information necessary for type checkers to do their work.
-
-    .. attribute:: globals
-       :type: dict[str, Any]
-
-        Dictionary of global variables to use for resolving forward references.
-
-    .. attribute:: locals
-       :type: dict[str, Any]
-
-        Dictionary of local variables to use for resolving forward references.
-
-    .. attribute:: self_type
-       :type: type | None
-
-        When running type checks within an instance method or class method, this is the
-        class object that the first argument (usually named ``self`` or ``cls``) refers
-        to.
-
-    .. attribute:: config
-       :type: TypeCheckConfiguration
-
-         Contains the configuration for a particular set of type checking operations.
-    """
-
-    __slots__ = "globals", "locals", "self_type", "config"
-
-    def __init__(
-        self,
-        globals: dict[str, Any],
-        locals: dict[str, Any],
-        *,
-        self_type: type | None = None,
-        config: TypeCheckConfiguration = global_config,
-    ):
-        self.globals = globals
-        self.locals = locals
-        self.self_type = self_type
-        self.config = config
diff --git a/pkg_resources/_vendor/typeguard/_pytest_plugin.py b/pkg_resources/_vendor/typeguard/_pytest_plugin.py
deleted file mode 100644
index 7b2f494ec7..0000000000
--- a/pkg_resources/_vendor/typeguard/_pytest_plugin.py
+++ /dev/null
@@ -1,127 +0,0 @@
-from __future__ import annotations
-
-import sys
-import warnings
-from typing import TYPE_CHECKING, Any, Literal
-
-from typeguard._config import CollectionCheckStrategy, ForwardRefPolicy, global_config
-from typeguard._exceptions import InstrumentationWarning
-from typeguard._importhook import install_import_hook
-from typeguard._utils import qualified_name, resolve_reference
-
-if TYPE_CHECKING:
-    from pytest import Config, Parser
-
-
-def pytest_addoption(parser: Parser) -> None:
-    def add_ini_option(
-        opt_type: (
-            Literal["string", "paths", "pathlist", "args", "linelist", "bool"] | None
-        ),
-    ) -> None:
-        parser.addini(
-            group.options[-1].names()[0][2:],
-            group.options[-1].attrs()["help"],
-            opt_type,
-        )
-
-    group = parser.getgroup("typeguard")
-    group.addoption(
-        "--typeguard-packages",
-        action="store",
-        help="comma separated name list of packages and modules to instrument for "
-        "type checking, or :all: to instrument all modules loaded after typeguard",
-    )
-    add_ini_option("linelist")
-
-    group.addoption(
-        "--typeguard-debug-instrumentation",
-        action="store_true",
-        help="print all instrumented code to stderr",
-    )
-    add_ini_option("bool")
-
-    group.addoption(
-        "--typeguard-typecheck-fail-callback",
-        action="store",
-        help=(
-            "a module:varname (e.g. typeguard:warn_on_error) reference to a function "
-            "that is called (with the exception, and memo object as arguments) to "
-            "handle a TypeCheckError"
-        ),
-    )
-    add_ini_option("string")
-
-    group.addoption(
-        "--typeguard-forward-ref-policy",
-        action="store",
-        choices=list(ForwardRefPolicy.__members__),
-        help=(
-            "determines how to deal with unresolveable forward references in type "
-            "annotations"
-        ),
-    )
-    add_ini_option("string")
-
-    group.addoption(
-        "--typeguard-collection-check-strategy",
-        action="store",
-        choices=list(CollectionCheckStrategy.__members__),
-        help="determines how thoroughly to check collections (list, dict, etc)",
-    )
-    add_ini_option("string")
-
-
-def pytest_configure(config: Config) -> None:
-    def getoption(name: str) -> Any:
-        return config.getoption(name.replace("-", "_")) or config.getini(name)
-
-    packages: list[str] | None = []
-    if packages_option := config.getoption("typeguard_packages"):
-        packages = [pkg.strip() for pkg in packages_option.split(",")]
-    elif packages_ini := config.getini("typeguard-packages"):
-        packages = packages_ini
-
-    if packages:
-        if packages == [":all:"]:
-            packages = None
-        else:
-            already_imported_packages = sorted(
-                package for package in packages if package in sys.modules
-            )
-            if already_imported_packages:
-                warnings.warn(
-                    f"typeguard cannot check these packages because they are already "
-                    f"imported: {', '.join(already_imported_packages)}",
-                    InstrumentationWarning,
-                    stacklevel=1,
-                )
-
-        install_import_hook(packages=packages)
-
-    debug_option = getoption("typeguard-debug-instrumentation")
-    if debug_option:
-        global_config.debug_instrumentation = True
-
-    fail_callback_option = getoption("typeguard-typecheck-fail-callback")
-    if fail_callback_option:
-        callback = resolve_reference(fail_callback_option)
-        if not callable(callback):
-            raise TypeError(
-                f"{fail_callback_option} ({qualified_name(callback.__class__)}) is not "
-                f"a callable"
-            )
-
-        global_config.typecheck_fail_callback = callback
-
-    forward_ref_policy_option = getoption("typeguard-forward-ref-policy")
-    if forward_ref_policy_option:
-        forward_ref_policy = ForwardRefPolicy.__members__[forward_ref_policy_option]
-        global_config.forward_ref_policy = forward_ref_policy
-
-    collection_check_strategy_option = getoption("typeguard-collection-check-strategy")
-    if collection_check_strategy_option:
-        collection_check_strategy = CollectionCheckStrategy.__members__[
-            collection_check_strategy_option
-        ]
-        global_config.collection_check_strategy = collection_check_strategy
diff --git a/pkg_resources/_vendor/typeguard/_suppression.py b/pkg_resources/_vendor/typeguard/_suppression.py
deleted file mode 100644
index bbbfbfbe8e..0000000000
--- a/pkg_resources/_vendor/typeguard/_suppression.py
+++ /dev/null
@@ -1,86 +0,0 @@
-from __future__ import annotations
-
-import sys
-from collections.abc import Callable, Generator
-from contextlib import contextmanager
-from functools import update_wrapper
-from threading import Lock
-from typing import ContextManager, TypeVar, overload
-
-if sys.version_info >= (3, 10):
-    from typing import ParamSpec
-else:
-    from typing_extensions import ParamSpec
-
-P = ParamSpec("P")
-T = TypeVar("T")
-
-type_checks_suppressed = 0
-type_checks_suppress_lock = Lock()
-
-
-@overload
-def suppress_type_checks(func: Callable[P, T]) -> Callable[P, T]: ...
-
-
-@overload
-def suppress_type_checks() -> ContextManager[None]: ...
-
-
-def suppress_type_checks(
-    func: Callable[P, T] | None = None,
-) -> Callable[P, T] | ContextManager[None]:
-    """
-    Temporarily suppress all type checking.
-
-    This function has two operating modes, based on how it's used:
-
-    #. as a context manager (``with suppress_type_checks(): ...``)
-    #. as a decorator (``@suppress_type_checks``)
-
-    When used as a context manager, :func:`check_type` and any automatically
-    instrumented functions skip the actual type checking. These context managers can be
-    nested.
-
-    When used as a decorator, all type checking is suppressed while the function is
-    running.
-
-    Type checking will resume once no more context managers are active and no decorated
-    functions are running.
-
-    Both operating modes are thread-safe.
-
-    """
-
-    def wrapper(*args: P.args, **kwargs: P.kwargs) -> T:
-        global type_checks_suppressed
-
-        with type_checks_suppress_lock:
-            type_checks_suppressed += 1
-
-        assert func is not None
-        try:
-            return func(*args, **kwargs)
-        finally:
-            with type_checks_suppress_lock:
-                type_checks_suppressed -= 1
-
-    def cm() -> Generator[None, None, None]:
-        global type_checks_suppressed
-
-        with type_checks_suppress_lock:
-            type_checks_suppressed += 1
-
-        try:
-            yield
-        finally:
-            with type_checks_suppress_lock:
-                type_checks_suppressed -= 1
-
-    if func is None:
-        # Context manager mode
-        return contextmanager(cm)()
-    else:
-        # Decorator mode
-        update_wrapper(wrapper, func)
-        return wrapper
diff --git a/pkg_resources/_vendor/typeguard/_transformer.py b/pkg_resources/_vendor/typeguard/_transformer.py
deleted file mode 100644
index 13ac3630e6..0000000000
--- a/pkg_resources/_vendor/typeguard/_transformer.py
+++ /dev/null
@@ -1,1229 +0,0 @@
-from __future__ import annotations
-
-import ast
-import builtins
-import sys
-import typing
-from ast import (
-    AST,
-    Add,
-    AnnAssign,
-    Assign,
-    AsyncFunctionDef,
-    Attribute,
-    AugAssign,
-    BinOp,
-    BitAnd,
-    BitOr,
-    BitXor,
-    Call,
-    ClassDef,
-    Constant,
-    Dict,
-    Div,
-    Expr,
-    Expression,
-    FloorDiv,
-    FunctionDef,
-    If,
-    Import,
-    ImportFrom,
-    Index,
-    List,
-    Load,
-    LShift,
-    MatMult,
-    Mod,
-    Module,
-    Mult,
-    Name,
-    NamedExpr,
-    NodeTransformer,
-    NodeVisitor,
-    Pass,
-    Pow,
-    Return,
-    RShift,
-    Starred,
-    Store,
-    Sub,
-    Subscript,
-    Tuple,
-    Yield,
-    YieldFrom,
-    alias,
-    copy_location,
-    expr,
-    fix_missing_locations,
-    keyword,
-    walk,
-)
-from collections import defaultdict
-from collections.abc import Generator, Sequence
-from contextlib import contextmanager
-from copy import deepcopy
-from dataclasses import dataclass, field
-from typing import Any, ClassVar, cast, overload
-
-generator_names = (
-    "typing.Generator",
-    "collections.abc.Generator",
-    "typing.Iterator",
-    "collections.abc.Iterator",
-    "typing.Iterable",
-    "collections.abc.Iterable",
-    "typing.AsyncIterator",
-    "collections.abc.AsyncIterator",
-    "typing.AsyncIterable",
-    "collections.abc.AsyncIterable",
-    "typing.AsyncGenerator",
-    "collections.abc.AsyncGenerator",
-)
-anytype_names = (
-    "typing.Any",
-    "typing_extensions.Any",
-)
-literal_names = (
-    "typing.Literal",
-    "typing_extensions.Literal",
-)
-annotated_names = (
-    "typing.Annotated",
-    "typing_extensions.Annotated",
-)
-ignore_decorators = (
-    "typing.no_type_check",
-    "typeguard.typeguard_ignore",
-)
-aug_assign_functions = {
-    Add: "iadd",
-    Sub: "isub",
-    Mult: "imul",
-    MatMult: "imatmul",
-    Div: "itruediv",
-    FloorDiv: "ifloordiv",
-    Mod: "imod",
-    Pow: "ipow",
-    LShift: "ilshift",
-    RShift: "irshift",
-    BitAnd: "iand",
-    BitXor: "ixor",
-    BitOr: "ior",
-}
-
-
-@dataclass
-class TransformMemo:
-    node: Module | ClassDef | FunctionDef | AsyncFunctionDef | None
-    parent: TransformMemo | None
-    path: tuple[str, ...]
-    joined_path: Constant = field(init=False)
-    return_annotation: expr | None = None
-    yield_annotation: expr | None = None
-    send_annotation: expr | None = None
-    is_async: bool = False
-    local_names: set[str] = field(init=False, default_factory=set)
-    imported_names: dict[str, str] = field(init=False, default_factory=dict)
-    ignored_names: set[str] = field(init=False, default_factory=set)
-    load_names: defaultdict[str, dict[str, Name]] = field(
-        init=False, default_factory=lambda: defaultdict(dict)
-    )
-    has_yield_expressions: bool = field(init=False, default=False)
-    has_return_expressions: bool = field(init=False, default=False)
-    memo_var_name: Name | None = field(init=False, default=None)
-    should_instrument: bool = field(init=False, default=True)
-    variable_annotations: dict[str, expr] = field(init=False, default_factory=dict)
-    configuration_overrides: dict[str, Any] = field(init=False, default_factory=dict)
-    code_inject_index: int = field(init=False, default=0)
-
-    def __post_init__(self) -> None:
-        elements: list[str] = []
-        memo = self
-        while isinstance(memo.node, (ClassDef, FunctionDef, AsyncFunctionDef)):
-            elements.insert(0, memo.node.name)
-            if not memo.parent:
-                break
-
-            memo = memo.parent
-            if isinstance(memo.node, (FunctionDef, AsyncFunctionDef)):
-                elements.insert(0, "")
-
-        self.joined_path = Constant(".".join(elements))
-
-        # Figure out where to insert instrumentation code
-        if self.node:
-            for index, child in enumerate(self.node.body):
-                if isinstance(child, ImportFrom) and child.module == "__future__":
-                    # (module only) __future__ imports must come first
-                    continue
-                elif (
-                    isinstance(child, Expr)
-                    and isinstance(child.value, Constant)
-                    and isinstance(child.value.value, str)
-                ):
-                    continue  # docstring
-
-                self.code_inject_index = index
-                break
-
-    def get_unused_name(self, name: str) -> str:
-        memo: TransformMemo | None = self
-        while memo is not None:
-            if name in memo.local_names:
-                memo = self
-                name += "_"
-            else:
-                memo = memo.parent
-
-        self.local_names.add(name)
-        return name
-
-    def is_ignored_name(self, expression: expr | Expr | None) -> bool:
-        top_expression = (
-            expression.value if isinstance(expression, Expr) else expression
-        )
-
-        if isinstance(top_expression, Attribute) and isinstance(
-            top_expression.value, Name
-        ):
-            name = top_expression.value.id
-        elif isinstance(top_expression, Name):
-            name = top_expression.id
-        else:
-            return False
-
-        memo: TransformMemo | None = self
-        while memo is not None:
-            if name in memo.ignored_names:
-                return True
-
-            memo = memo.parent
-
-        return False
-
-    def get_memo_name(self) -> Name:
-        if not self.memo_var_name:
-            self.memo_var_name = Name(id="memo", ctx=Load())
-
-        return self.memo_var_name
-
-    def get_import(self, module: str, name: str) -> Name:
-        if module in self.load_names and name in self.load_names[module]:
-            return self.load_names[module][name]
-
-        qualified_name = f"{module}.{name}"
-        if name in self.imported_names and self.imported_names[name] == qualified_name:
-            return Name(id=name, ctx=Load())
-
-        alias = self.get_unused_name(name)
-        node = self.load_names[module][name] = Name(id=alias, ctx=Load())
-        self.imported_names[name] = qualified_name
-        return node
-
-    def insert_imports(self, node: Module | FunctionDef | AsyncFunctionDef) -> None:
-        """Insert imports needed by injected code."""
-        if not self.load_names:
-            return
-
-        # Insert imports after any "from __future__ ..." imports and any docstring
-        for modulename, names in self.load_names.items():
-            aliases = [
-                alias(orig_name, new_name.id if orig_name != new_name.id else None)
-                for orig_name, new_name in sorted(names.items())
-            ]
-            node.body.insert(self.code_inject_index, ImportFrom(modulename, aliases, 0))
-
-    def name_matches(self, expression: expr | Expr | None, *names: str) -> bool:
-        if expression is None:
-            return False
-
-        path: list[str] = []
-        top_expression = (
-            expression.value if isinstance(expression, Expr) else expression
-        )
-
-        if isinstance(top_expression, Subscript):
-            top_expression = top_expression.value
-        elif isinstance(top_expression, Call):
-            top_expression = top_expression.func
-
-        while isinstance(top_expression, Attribute):
-            path.insert(0, top_expression.attr)
-            top_expression = top_expression.value
-
-        if not isinstance(top_expression, Name):
-            return False
-
-        if top_expression.id in self.imported_names:
-            translated = self.imported_names[top_expression.id]
-        elif hasattr(builtins, top_expression.id):
-            translated = "builtins." + top_expression.id
-        else:
-            translated = top_expression.id
-
-        path.insert(0, translated)
-        joined_path = ".".join(path)
-        if joined_path in names:
-            return True
-        elif self.parent:
-            return self.parent.name_matches(expression, *names)
-        else:
-            return False
-
-    def get_config_keywords(self) -> list[keyword]:
-        if self.parent and isinstance(self.parent.node, ClassDef):
-            overrides = self.parent.configuration_overrides.copy()
-        else:
-            overrides = {}
-
-        overrides.update(self.configuration_overrides)
-        return [keyword(key, value) for key, value in overrides.items()]
-
-
-class NameCollector(NodeVisitor):
-    def __init__(self) -> None:
-        self.names: set[str] = set()
-
-    def visit_Import(self, node: Import) -> None:
-        for name in node.names:
-            self.names.add(name.asname or name.name)
-
-    def visit_ImportFrom(self, node: ImportFrom) -> None:
-        for name in node.names:
-            self.names.add(name.asname or name.name)
-
-    def visit_Assign(self, node: Assign) -> None:
-        for target in node.targets:
-            if isinstance(target, Name):
-                self.names.add(target.id)
-
-    def visit_NamedExpr(self, node: NamedExpr) -> Any:
-        if isinstance(node.target, Name):
-            self.names.add(node.target.id)
-
-    def visit_FunctionDef(self, node: FunctionDef) -> None:
-        pass
-
-    def visit_ClassDef(self, node: ClassDef) -> None:
-        pass
-
-
-class GeneratorDetector(NodeVisitor):
-    """Detects if a function node is a generator function."""
-
-    contains_yields: bool = False
-    in_root_function: bool = False
-
-    def visit_Yield(self, node: Yield) -> Any:
-        self.contains_yields = True
-
-    def visit_YieldFrom(self, node: YieldFrom) -> Any:
-        self.contains_yields = True
-
-    def visit_ClassDef(self, node: ClassDef) -> Any:
-        pass
-
-    def visit_FunctionDef(self, node: FunctionDef | AsyncFunctionDef) -> Any:
-        if not self.in_root_function:
-            self.in_root_function = True
-            self.generic_visit(node)
-            self.in_root_function = False
-
-    def visit_AsyncFunctionDef(self, node: AsyncFunctionDef) -> Any:
-        self.visit_FunctionDef(node)
-
-
-class AnnotationTransformer(NodeTransformer):
-    type_substitutions: ClassVar[dict[str, tuple[str, str]]] = {
-        "builtins.dict": ("typing", "Dict"),
-        "builtins.list": ("typing", "List"),
-        "builtins.tuple": ("typing", "Tuple"),
-        "builtins.set": ("typing", "Set"),
-        "builtins.frozenset": ("typing", "FrozenSet"),
-    }
-
-    def __init__(self, transformer: TypeguardTransformer):
-        self.transformer = transformer
-        self._memo = transformer._memo
-        self._level = 0
-
-    def visit(self, node: AST) -> Any:
-        # Don't process Literals
-        if isinstance(node, expr) and self._memo.name_matches(node, *literal_names):
-            return node
-
-        self._level += 1
-        new_node = super().visit(node)
-        self._level -= 1
-
-        if isinstance(new_node, Expression) and not hasattr(new_node, "body"):
-            return None
-
-        # Return None if this new node matches a variation of typing.Any
-        if (
-            self._level == 0
-            and isinstance(new_node, expr)
-            and self._memo.name_matches(new_node, *anytype_names)
-        ):
-            return None
-
-        return new_node
-
-    def visit_BinOp(self, node: BinOp) -> Any:
-        self.generic_visit(node)
-
-        if isinstance(node.op, BitOr):
-            # If either branch of the BinOp has been transformed to `None`, it means
-            # that a type in the union was ignored, so the entire annotation should e
-            # ignored
-            if not hasattr(node, "left") or not hasattr(node, "right"):
-                return None
-
-            # Return Any if either side is Any
-            if self._memo.name_matches(node.left, *anytype_names):
-                return node.left
-            elif self._memo.name_matches(node.right, *anytype_names):
-                return node.right
-
-            if sys.version_info < (3, 10):
-                union_name = self.transformer._get_import("typing", "Union")
-                return Subscript(
-                    value=union_name,
-                    slice=Index(
-                        Tuple(elts=[node.left, node.right], ctx=Load()), ctx=Load()
-                    ),
-                    ctx=Load(),
-                )
-
-        return node
-
-    def visit_Attribute(self, node: Attribute) -> Any:
-        if self._memo.is_ignored_name(node):
-            return None
-
-        return node
-
-    def visit_Subscript(self, node: Subscript) -> Any:
-        if self._memo.is_ignored_name(node.value):
-            return None
-
-        # The subscript of typing(_extensions).Literal can be any arbitrary string, so
-        # don't try to evaluate it as code
-        if node.slice:
-            if isinstance(node.slice, Index):
-                # Python 3.8
-                slice_value = node.slice.value  # type: ignore[attr-defined]
-            else:
-                slice_value = node.slice
-
-            if isinstance(slice_value, Tuple):
-                if self._memo.name_matches(node.value, *annotated_names):
-                    # Only treat the first argument to typing.Annotated as a potential
-                    # forward reference
-                    items = cast(
-                        typing.List[expr],
-                        [self.visit(slice_value.elts[0])] + slice_value.elts[1:],
-                    )
-                else:
-                    items = cast(
-                        typing.List[expr],
-                        [self.visit(item) for item in slice_value.elts],
-                    )
-
-                # If this is a Union and any of the items is Any, erase the entire
-                # annotation
-                if self._memo.name_matches(node.value, "typing.Union") and any(
-                    item is None
-                    or (
-                        isinstance(item, expr)
-                        and self._memo.name_matches(item, *anytype_names)
-                    )
-                    for item in items
-                ):
-                    return None
-
-                # If all items in the subscript were Any, erase the subscript entirely
-                if all(item is None for item in items):
-                    return node.value
-
-                for index, item in enumerate(items):
-                    if item is None:
-                        items[index] = self.transformer._get_import("typing", "Any")
-
-                slice_value.elts = items
-            else:
-                self.generic_visit(node)
-
-                # If the transformer erased the slice entirely, just return the node
-                # value without the subscript (unless it's Optional, in which case erase
-                # the node entirely
-                if self._memo.name_matches(
-                    node.value, "typing.Optional"
-                ) and not hasattr(node, "slice"):
-                    return None
-                if sys.version_info >= (3, 9) and not hasattr(node, "slice"):
-                    return node.value
-                elif sys.version_info < (3, 9) and not hasattr(node.slice, "value"):
-                    return node.value
-
-        return node
-
-    def visit_Name(self, node: Name) -> Any:
-        if self._memo.is_ignored_name(node):
-            return None
-
-        if sys.version_info < (3, 9):
-            for typename, substitute in self.type_substitutions.items():
-                if self._memo.name_matches(node, typename):
-                    new_node = self.transformer._get_import(*substitute)
-                    return copy_location(new_node, node)
-
-        return node
-
-    def visit_Call(self, node: Call) -> Any:
-        # Don't recurse into calls
-        return node
-
-    def visit_Constant(self, node: Constant) -> Any:
-        if isinstance(node.value, str):
-            expression = ast.parse(node.value, mode="eval")
-            new_node = self.visit(expression)
-            if new_node:
-                return copy_location(new_node.body, node)
-            else:
-                return None
-
-        return node
-
-
-class TypeguardTransformer(NodeTransformer):
-    def __init__(
-        self, target_path: Sequence[str] | None = None, target_lineno: int | None = None
-    ) -> None:
-        self._target_path = tuple(target_path) if target_path else None
-        self._memo = self._module_memo = TransformMemo(None, None, ())
-        self.names_used_in_annotations: set[str] = set()
-        self.target_node: FunctionDef | AsyncFunctionDef | None = None
-        self.target_lineno = target_lineno
-
-    def generic_visit(self, node: AST) -> AST:
-        has_non_empty_body_initially = bool(getattr(node, "body", None))
-        initial_type = type(node)
-
-        node = super().generic_visit(node)
-
-        if (
-            type(node) is initial_type
-            and has_non_empty_body_initially
-            and hasattr(node, "body")
-            and not node.body
-        ):
-            # If we have still the same node type after transformation
-            # but we've optimised it's body away, we add a `pass` statement.
-            node.body = [Pass()]
-
-        return node
-
-    @contextmanager
-    def _use_memo(
-        self, node: ClassDef | FunctionDef | AsyncFunctionDef
-    ) -> Generator[None, Any, None]:
-        new_memo = TransformMemo(node, self._memo, self._memo.path + (node.name,))
-        old_memo = self._memo
-        self._memo = new_memo
-
-        if isinstance(node, (FunctionDef, AsyncFunctionDef)):
-            new_memo.should_instrument = (
-                self._target_path is None or new_memo.path == self._target_path
-            )
-            if new_memo.should_instrument:
-                # Check if the function is a generator function
-                detector = GeneratorDetector()
-                detector.visit(node)
-
-                # Extract yield, send and return types where possible from a subscripted
-                # annotation like Generator[int, str, bool]
-                return_annotation = deepcopy(node.returns)
-                if detector.contains_yields and new_memo.name_matches(
-                    return_annotation, *generator_names
-                ):
-                    if isinstance(return_annotation, Subscript):
-                        annotation_slice = return_annotation.slice
-
-                        # Python < 3.9
-                        if isinstance(annotation_slice, Index):
-                            annotation_slice = (
-                                annotation_slice.value  # type: ignore[attr-defined]
-                            )
-
-                        if isinstance(annotation_slice, Tuple):
-                            items = annotation_slice.elts
-                        else:
-                            items = [annotation_slice]
-
-                        if len(items) > 0:
-                            new_memo.yield_annotation = self._convert_annotation(
-                                items[0]
-                            )
-
-                        if len(items) > 1:
-                            new_memo.send_annotation = self._convert_annotation(
-                                items[1]
-                            )
-
-                        if len(items) > 2:
-                            new_memo.return_annotation = self._convert_annotation(
-                                items[2]
-                            )
-                else:
-                    new_memo.return_annotation = self._convert_annotation(
-                        return_annotation
-                    )
-
-        if isinstance(node, AsyncFunctionDef):
-            new_memo.is_async = True
-
-        yield
-        self._memo = old_memo
-
-    def _get_import(self, module: str, name: str) -> Name:
-        memo = self._memo if self._target_path else self._module_memo
-        return memo.get_import(module, name)
-
-    @overload
-    def _convert_annotation(self, annotation: None) -> None: ...
-
-    @overload
-    def _convert_annotation(self, annotation: expr) -> expr: ...
-
-    def _convert_annotation(self, annotation: expr | None) -> expr | None:
-        if annotation is None:
-            return None
-
-        # Convert PEP 604 unions (x | y) and generic built-in collections where
-        # necessary, and undo forward references
-        new_annotation = cast(expr, AnnotationTransformer(self).visit(annotation))
-        if isinstance(new_annotation, expr):
-            new_annotation = ast.copy_location(new_annotation, annotation)
-
-            # Store names used in the annotation
-            names = {node.id for node in walk(new_annotation) if isinstance(node, Name)}
-            self.names_used_in_annotations.update(names)
-
-        return new_annotation
-
-    def visit_Name(self, node: Name) -> Name:
-        self._memo.local_names.add(node.id)
-        return node
-
-    def visit_Module(self, node: Module) -> Module:
-        self._module_memo = self._memo = TransformMemo(node, None, ())
-        self.generic_visit(node)
-        self._module_memo.insert_imports(node)
-
-        fix_missing_locations(node)
-        return node
-
-    def visit_Import(self, node: Import) -> Import:
-        for name in node.names:
-            self._memo.local_names.add(name.asname or name.name)
-            self._memo.imported_names[name.asname or name.name] = name.name
-
-        return node
-
-    def visit_ImportFrom(self, node: ImportFrom) -> ImportFrom:
-        for name in node.names:
-            if name.name != "*":
-                alias = name.asname or name.name
-                self._memo.local_names.add(alias)
-                self._memo.imported_names[alias] = f"{node.module}.{name.name}"
-
-        return node
-
-    def visit_ClassDef(self, node: ClassDef) -> ClassDef | None:
-        self._memo.local_names.add(node.name)
-
-        # Eliminate top level classes not belonging to the target path
-        if (
-            self._target_path is not None
-            and not self._memo.path
-            and node.name != self._target_path[0]
-        ):
-            return None
-
-        with self._use_memo(node):
-            for decorator in node.decorator_list.copy():
-                if self._memo.name_matches(decorator, "typeguard.typechecked"):
-                    # Remove the decorator to prevent duplicate instrumentation
-                    node.decorator_list.remove(decorator)
-
-                    # Store any configuration overrides
-                    if isinstance(decorator, Call) and decorator.keywords:
-                        self._memo.configuration_overrides.update(
-                            {kw.arg: kw.value for kw in decorator.keywords if kw.arg}
-                        )
-
-            self.generic_visit(node)
-            return node
-
-    def visit_FunctionDef(
-        self, node: FunctionDef | AsyncFunctionDef
-    ) -> FunctionDef | AsyncFunctionDef | None:
-        """
-        Injects type checks for function arguments, and for a return of None if the
-        function is annotated to return something else than Any or None, and the body
-        ends without an explicit "return".
-
-        """
-        self._memo.local_names.add(node.name)
-
-        # Eliminate top level functions not belonging to the target path
-        if (
-            self._target_path is not None
-            and not self._memo.path
-            and node.name != self._target_path[0]
-        ):
-            return None
-
-        # Skip instrumentation if we're instrumenting the whole module and the function
-        # contains either @no_type_check or @typeguard_ignore
-        if self._target_path is None:
-            for decorator in node.decorator_list:
-                if self._memo.name_matches(decorator, *ignore_decorators):
-                    return node
-
-        with self._use_memo(node):
-            arg_annotations: dict[str, Any] = {}
-            if self._target_path is None or self._memo.path == self._target_path:
-                # Find line number we're supposed to match against
-                if node.decorator_list:
-                    first_lineno = node.decorator_list[0].lineno
-                else:
-                    first_lineno = node.lineno
-
-                for decorator in node.decorator_list.copy():
-                    if self._memo.name_matches(decorator, "typing.overload"):
-                        # Remove overloads entirely
-                        return None
-                    elif self._memo.name_matches(decorator, "typeguard.typechecked"):
-                        # Remove the decorator to prevent duplicate instrumentation
-                        node.decorator_list.remove(decorator)
-
-                        # Store any configuration overrides
-                        if isinstance(decorator, Call) and decorator.keywords:
-                            self._memo.configuration_overrides = {
-                                kw.arg: kw.value for kw in decorator.keywords if kw.arg
-                            }
-
-                if self.target_lineno == first_lineno:
-                    assert self.target_node is None
-                    self.target_node = node
-                    if node.decorator_list:
-                        self.target_lineno = node.decorator_list[0].lineno
-                    else:
-                        self.target_lineno = node.lineno
-
-                all_args = node.args.args + node.args.kwonlyargs + node.args.posonlyargs
-
-                # Ensure that any type shadowed by the positional or keyword-only
-                # argument names are ignored in this function
-                for arg in all_args:
-                    self._memo.ignored_names.add(arg.arg)
-
-                # Ensure that any type shadowed by the variable positional argument name
-                # (e.g. "args" in *args) is ignored this function
-                if node.args.vararg:
-                    self._memo.ignored_names.add(node.args.vararg.arg)
-
-                # Ensure that any type shadowed by the variable keywrod argument name
-                # (e.g. "kwargs" in *kwargs) is ignored this function
-                if node.args.kwarg:
-                    self._memo.ignored_names.add(node.args.kwarg.arg)
-
-                for arg in all_args:
-                    annotation = self._convert_annotation(deepcopy(arg.annotation))
-                    if annotation:
-                        arg_annotations[arg.arg] = annotation
-
-                if node.args.vararg:
-                    annotation_ = self._convert_annotation(node.args.vararg.annotation)
-                    if annotation_:
-                        if sys.version_info >= (3, 9):
-                            container = Name("tuple", ctx=Load())
-                        else:
-                            container = self._get_import("typing", "Tuple")
-
-                        subscript_slice: Tuple | Index = Tuple(
-                            [
-                                annotation_,
-                                Constant(Ellipsis),
-                            ],
-                            ctx=Load(),
-                        )
-                        if sys.version_info < (3, 9):
-                            subscript_slice = Index(subscript_slice, ctx=Load())
-
-                        arg_annotations[node.args.vararg.arg] = Subscript(
-                            container, subscript_slice, ctx=Load()
-                        )
-
-                if node.args.kwarg:
-                    annotation_ = self._convert_annotation(node.args.kwarg.annotation)
-                    if annotation_:
-                        if sys.version_info >= (3, 9):
-                            container = Name("dict", ctx=Load())
-                        else:
-                            container = self._get_import("typing", "Dict")
-
-                        subscript_slice = Tuple(
-                            [
-                                Name("str", ctx=Load()),
-                                annotation_,
-                            ],
-                            ctx=Load(),
-                        )
-                        if sys.version_info < (3, 9):
-                            subscript_slice = Index(subscript_slice, ctx=Load())
-
-                        arg_annotations[node.args.kwarg.arg] = Subscript(
-                            container, subscript_slice, ctx=Load()
-                        )
-
-                if arg_annotations:
-                    self._memo.variable_annotations.update(arg_annotations)
-
-            self.generic_visit(node)
-
-            if arg_annotations:
-                annotations_dict = Dict(
-                    keys=[Constant(key) for key in arg_annotations.keys()],
-                    values=[
-                        Tuple([Name(key, ctx=Load()), annotation], ctx=Load())
-                        for key, annotation in arg_annotations.items()
-                    ],
-                )
-                func_name = self._get_import(
-                    "typeguard._functions", "check_argument_types"
-                )
-                args = [
-                    self._memo.joined_path,
-                    annotations_dict,
-                    self._memo.get_memo_name(),
-                ]
-                node.body.insert(
-                    self._memo.code_inject_index, Expr(Call(func_name, args, []))
-                )
-
-            # Add a checked "return None" to the end if there's no explicit return
-            # Skip if the return annotation is None or Any
-            if (
-                self._memo.return_annotation
-                and (not self._memo.is_async or not self._memo.has_yield_expressions)
-                and not isinstance(node.body[-1], Return)
-                and (
-                    not isinstance(self._memo.return_annotation, Constant)
-                    or self._memo.return_annotation.value is not None
-                )
-            ):
-                func_name = self._get_import(
-                    "typeguard._functions", "check_return_type"
-                )
-                return_node = Return(
-                    Call(
-                        func_name,
-                        [
-                            self._memo.joined_path,
-                            Constant(None),
-                            self._memo.return_annotation,
-                            self._memo.get_memo_name(),
-                        ],
-                        [],
-                    )
-                )
-
-                # Replace a placeholder "pass" at the end
-                if isinstance(node.body[-1], Pass):
-                    copy_location(return_node, node.body[-1])
-                    del node.body[-1]
-
-                node.body.append(return_node)
-
-            # Insert code to create the call memo, if it was ever needed for this
-            # function
-            if self._memo.memo_var_name:
-                memo_kwargs: dict[str, Any] = {}
-                if self._memo.parent and isinstance(self._memo.parent.node, ClassDef):
-                    for decorator in node.decorator_list:
-                        if (
-                            isinstance(decorator, Name)
-                            and decorator.id == "staticmethod"
-                        ):
-                            break
-                        elif (
-                            isinstance(decorator, Name)
-                            and decorator.id == "classmethod"
-                        ):
-                            memo_kwargs["self_type"] = Name(
-                                id=node.args.args[0].arg, ctx=Load()
-                            )
-                            break
-                    else:
-                        if node.args.args:
-                            if node.name == "__new__":
-                                memo_kwargs["self_type"] = Name(
-                                    id=node.args.args[0].arg, ctx=Load()
-                                )
-                            else:
-                                memo_kwargs["self_type"] = Attribute(
-                                    Name(id=node.args.args[0].arg, ctx=Load()),
-                                    "__class__",
-                                    ctx=Load(),
-                                )
-
-                # Construct the function reference
-                # Nested functions get special treatment: the function name is added
-                # to free variables (and the closure of the resulting function)
-                names: list[str] = [node.name]
-                memo = self._memo.parent
-                while memo:
-                    if isinstance(memo.node, (FunctionDef, AsyncFunctionDef)):
-                        # This is a nested function. Use the function name as-is.
-                        del names[:-1]
-                        break
-                    elif not isinstance(memo.node, ClassDef):
-                        break
-
-                    names.insert(0, memo.node.name)
-                    memo = memo.parent
-
-                config_keywords = self._memo.get_config_keywords()
-                if config_keywords:
-                    memo_kwargs["config"] = Call(
-                        self._get_import("dataclasses", "replace"),
-                        [self._get_import("typeguard._config", "global_config")],
-                        config_keywords,
-                    )
-
-                self._memo.memo_var_name.id = self._memo.get_unused_name("memo")
-                memo_store_name = Name(id=self._memo.memo_var_name.id, ctx=Store())
-                globals_call = Call(Name(id="globals", ctx=Load()), [], [])
-                locals_call = Call(Name(id="locals", ctx=Load()), [], [])
-                memo_expr = Call(
-                    self._get_import("typeguard", "TypeCheckMemo"),
-                    [globals_call, locals_call],
-                    [keyword(key, value) for key, value in memo_kwargs.items()],
-                )
-                node.body.insert(
-                    self._memo.code_inject_index,
-                    Assign([memo_store_name], memo_expr),
-                )
-
-                self._memo.insert_imports(node)
-
-                # Special case the __new__() method to create a local alias from the
-                # class name to the first argument (usually "cls")
-                if (
-                    isinstance(node, FunctionDef)
-                    and node.args
-                    and self._memo.parent is not None
-                    and isinstance(self._memo.parent.node, ClassDef)
-                    and node.name == "__new__"
-                ):
-                    first_args_expr = Name(node.args.args[0].arg, ctx=Load())
-                    cls_name = Name(self._memo.parent.node.name, ctx=Store())
-                    node.body.insert(
-                        self._memo.code_inject_index,
-                        Assign([cls_name], first_args_expr),
-                    )
-
-                # Rmove any placeholder "pass" at the end
-                if isinstance(node.body[-1], Pass):
-                    del node.body[-1]
-
-        return node
-
-    def visit_AsyncFunctionDef(
-        self, node: AsyncFunctionDef
-    ) -> FunctionDef | AsyncFunctionDef | None:
-        return self.visit_FunctionDef(node)
-
-    def visit_Return(self, node: Return) -> Return:
-        """This injects type checks into "return" statements."""
-        self.generic_visit(node)
-        if (
-            self._memo.return_annotation
-            and self._memo.should_instrument
-            and not self._memo.is_ignored_name(self._memo.return_annotation)
-        ):
-            func_name = self._get_import("typeguard._functions", "check_return_type")
-            old_node = node
-            retval = old_node.value or Constant(None)
-            node = Return(
-                Call(
-                    func_name,
-                    [
-                        self._memo.joined_path,
-                        retval,
-                        self._memo.return_annotation,
-                        self._memo.get_memo_name(),
-                    ],
-                    [],
-                )
-            )
-            copy_location(node, old_node)
-
-        return node
-
-    def visit_Yield(self, node: Yield) -> Yield | Call:
-        """
-        This injects type checks into "yield" expressions, checking both the yielded
-        value and the value sent back to the generator, when appropriate.
-
-        """
-        self._memo.has_yield_expressions = True
-        self.generic_visit(node)
-
-        if (
-            self._memo.yield_annotation
-            and self._memo.should_instrument
-            and not self._memo.is_ignored_name(self._memo.yield_annotation)
-        ):
-            func_name = self._get_import("typeguard._functions", "check_yield_type")
-            yieldval = node.value or Constant(None)
-            node.value = Call(
-                func_name,
-                [
-                    self._memo.joined_path,
-                    yieldval,
-                    self._memo.yield_annotation,
-                    self._memo.get_memo_name(),
-                ],
-                [],
-            )
-
-        if (
-            self._memo.send_annotation
-            and self._memo.should_instrument
-            and not self._memo.is_ignored_name(self._memo.send_annotation)
-        ):
-            func_name = self._get_import("typeguard._functions", "check_send_type")
-            old_node = node
-            call_node = Call(
-                func_name,
-                [
-                    self._memo.joined_path,
-                    old_node,
-                    self._memo.send_annotation,
-                    self._memo.get_memo_name(),
-                ],
-                [],
-            )
-            copy_location(call_node, old_node)
-            return call_node
-
-        return node
-
-    def visit_AnnAssign(self, node: AnnAssign) -> Any:
-        """
-        This injects a type check into a local variable annotation-assignment within a
-        function body.
-
-        """
-        self.generic_visit(node)
-
-        if (
-            isinstance(self._memo.node, (FunctionDef, AsyncFunctionDef))
-            and node.annotation
-            and isinstance(node.target, Name)
-        ):
-            self._memo.ignored_names.add(node.target.id)
-            annotation = self._convert_annotation(deepcopy(node.annotation))
-            if annotation:
-                self._memo.variable_annotations[node.target.id] = annotation
-                if node.value:
-                    func_name = self._get_import(
-                        "typeguard._functions", "check_variable_assignment"
-                    )
-                    node.value = Call(
-                        func_name,
-                        [
-                            node.value,
-                            Constant(node.target.id),
-                            annotation,
-                            self._memo.get_memo_name(),
-                        ],
-                        [],
-                    )
-
-        return node
-
-    def visit_Assign(self, node: Assign) -> Any:
-        """
-        This injects a type check into a local variable assignment within a function
-        body. The variable must have been annotated earlier in the function body.
-
-        """
-        self.generic_visit(node)
-
-        # Only instrument function-local assignments
-        if isinstance(self._memo.node, (FunctionDef, AsyncFunctionDef)):
-            targets: list[dict[Constant, expr | None]] = []
-            check_required = False
-            for target in node.targets:
-                elts: Sequence[expr]
-                if isinstance(target, Name):
-                    elts = [target]
-                elif isinstance(target, Tuple):
-                    elts = target.elts
-                else:
-                    continue
-
-                annotations_: dict[Constant, expr | None] = {}
-                for exp in elts:
-                    prefix = ""
-                    if isinstance(exp, Starred):
-                        exp = exp.value
-                        prefix = "*"
-
-                    if isinstance(exp, Name):
-                        self._memo.ignored_names.add(exp.id)
-                        name = prefix + exp.id
-                        annotation = self._memo.variable_annotations.get(exp.id)
-                        if annotation:
-                            annotations_[Constant(name)] = annotation
-                            check_required = True
-                        else:
-                            annotations_[Constant(name)] = None
-
-                targets.append(annotations_)
-
-            if check_required:
-                # Replace missing annotations with typing.Any
-                for item in targets:
-                    for key, expression in item.items():
-                        if expression is None:
-                            item[key] = self._get_import("typing", "Any")
-
-                if len(targets) == 1 and len(targets[0]) == 1:
-                    func_name = self._get_import(
-                        "typeguard._functions", "check_variable_assignment"
-                    )
-                    target_varname = next(iter(targets[0]))
-                    node.value = Call(
-                        func_name,
-                        [
-                            node.value,
-                            target_varname,
-                            targets[0][target_varname],
-                            self._memo.get_memo_name(),
-                        ],
-                        [],
-                    )
-                elif targets:
-                    func_name = self._get_import(
-                        "typeguard._functions", "check_multi_variable_assignment"
-                    )
-                    targets_arg = List(
-                        [
-                            Dict(keys=list(target), values=list(target.values()))
-                            for target in targets
-                        ],
-                        ctx=Load(),
-                    )
-                    node.value = Call(
-                        func_name,
-                        [node.value, targets_arg, self._memo.get_memo_name()],
-                        [],
-                    )
-
-        return node
-
-    def visit_NamedExpr(self, node: NamedExpr) -> Any:
-        """This injects a type check into an assignment expression (a := foo())."""
-        self.generic_visit(node)
-
-        # Only instrument function-local assignments
-        if isinstance(self._memo.node, (FunctionDef, AsyncFunctionDef)) and isinstance(
-            node.target, Name
-        ):
-            self._memo.ignored_names.add(node.target.id)
-
-            # Bail out if no matching annotation is found
-            annotation = self._memo.variable_annotations.get(node.target.id)
-            if annotation is None:
-                return node
-
-            func_name = self._get_import(
-                "typeguard._functions", "check_variable_assignment"
-            )
-            node.value = Call(
-                func_name,
-                [
-                    node.value,
-                    Constant(node.target.id),
-                    annotation,
-                    self._memo.get_memo_name(),
-                ],
-                [],
-            )
-
-        return node
-
-    def visit_AugAssign(self, node: AugAssign) -> Any:
-        """
-        This injects a type check into an augmented assignment expression (a += 1).
-
-        """
-        self.generic_visit(node)
-
-        # Only instrument function-local assignments
-        if isinstance(self._memo.node, (FunctionDef, AsyncFunctionDef)) and isinstance(
-            node.target, Name
-        ):
-            # Bail out if no matching annotation is found
-            annotation = self._memo.variable_annotations.get(node.target.id)
-            if annotation is None:
-                return node
-
-            # Bail out if the operator is not found (newer Python version?)
-            try:
-                operator_func_name = aug_assign_functions[node.op.__class__]
-            except KeyError:
-                return node
-
-            operator_func = self._get_import("operator", operator_func_name)
-            operator_call = Call(
-                operator_func, [Name(node.target.id, ctx=Load()), node.value], []
-            )
-            check_call = Call(
-                self._get_import("typeguard._functions", "check_variable_assignment"),
-                [
-                    operator_call,
-                    Constant(node.target.id),
-                    annotation,
-                    self._memo.get_memo_name(),
-                ],
-                [],
-            )
-            return Assign(targets=[node.target], value=check_call)
-
-        return node
-
-    def visit_If(self, node: If) -> Any:
-        """
-        This blocks names from being collected from a module-level
-        "if typing.TYPE_CHECKING:" block, so that they won't be type checked.
-
-        """
-        self.generic_visit(node)
-
-        if (
-            self._memo is self._module_memo
-            and isinstance(node.test, Name)
-            and self._memo.name_matches(node.test, "typing.TYPE_CHECKING")
-        ):
-            collector = NameCollector()
-            collector.visit(node)
-            self._memo.ignored_names.update(collector.names)
-
-        return node
diff --git a/pkg_resources/_vendor/typeguard/_union_transformer.py b/pkg_resources/_vendor/typeguard/_union_transformer.py
deleted file mode 100644
index 19617e6af5..0000000000
--- a/pkg_resources/_vendor/typeguard/_union_transformer.py
+++ /dev/null
@@ -1,55 +0,0 @@
-"""
-Transforms lazily evaluated PEP 604 unions into typing.Unions, for compatibility with
-Python versions older than 3.10.
-"""
-
-from __future__ import annotations
-
-from ast import (
-    BinOp,
-    BitOr,
-    Index,
-    Load,
-    Name,
-    NodeTransformer,
-    Subscript,
-    fix_missing_locations,
-    parse,
-)
-from ast import Tuple as ASTTuple
-from types import CodeType
-from typing import Any, Dict, FrozenSet, List, Set, Tuple, Union
-
-type_substitutions = {
-    "dict": Dict,
-    "list": List,
-    "tuple": Tuple,
-    "set": Set,
-    "frozenset": FrozenSet,
-    "Union": Union,
-}
-
-
-class UnionTransformer(NodeTransformer):
-    def __init__(self, union_name: Name | None = None):
-        self.union_name = union_name or Name(id="Union", ctx=Load())
-
-    def visit_BinOp(self, node: BinOp) -> Any:
-        self.generic_visit(node)
-        if isinstance(node.op, BitOr):
-            return Subscript(
-                value=self.union_name,
-                slice=Index(
-                    ASTTuple(elts=[node.left, node.right], ctx=Load()), ctx=Load()
-                ),
-                ctx=Load(),
-            )
-
-        return node
-
-
-def compile_type_hint(hint: str) -> CodeType:
-    parsed = parse(hint, "", "eval")
-    UnionTransformer().visit(parsed)
-    fix_missing_locations(parsed)
-    return compile(parsed, "", "eval", flags=0)
diff --git a/pkg_resources/_vendor/typeguard/_utils.py b/pkg_resources/_vendor/typeguard/_utils.py
deleted file mode 100644
index 9bcc8417f8..0000000000
--- a/pkg_resources/_vendor/typeguard/_utils.py
+++ /dev/null
@@ -1,173 +0,0 @@
-from __future__ import annotations
-
-import inspect
-import sys
-from importlib import import_module
-from inspect import currentframe
-from types import CodeType, FrameType, FunctionType
-from typing import TYPE_CHECKING, Any, Callable, ForwardRef, Union, cast, final
-from weakref import WeakValueDictionary
-
-if TYPE_CHECKING:
-    from ._memo import TypeCheckMemo
-
-if sys.version_info >= (3, 13):
-    from typing import get_args, get_origin
-
-    def evaluate_forwardref(forwardref: ForwardRef, memo: TypeCheckMemo) -> Any:
-        return forwardref._evaluate(
-            memo.globals, memo.locals, type_params=(), recursive_guard=frozenset()
-        )
-
-elif sys.version_info >= (3, 10):
-    from typing import get_args, get_origin
-
-    def evaluate_forwardref(forwardref: ForwardRef, memo: TypeCheckMemo) -> Any:
-        return forwardref._evaluate(
-            memo.globals, memo.locals, recursive_guard=frozenset()
-        )
-
-else:
-    from typing_extensions import get_args, get_origin
-
-    evaluate_extra_args: tuple[frozenset[Any], ...] = (
-        (frozenset(),) if sys.version_info >= (3, 9) else ()
-    )
-
-    def evaluate_forwardref(forwardref: ForwardRef, memo: TypeCheckMemo) -> Any:
-        from ._union_transformer import compile_type_hint, type_substitutions
-
-        if not forwardref.__forward_evaluated__:
-            forwardref.__forward_code__ = compile_type_hint(forwardref.__forward_arg__)
-
-        try:
-            return forwardref._evaluate(memo.globals, memo.locals, *evaluate_extra_args)
-        except NameError:
-            if sys.version_info < (3, 10):
-                # Try again, with the type substitutions (list -> List etc.) in place
-                new_globals = memo.globals.copy()
-                new_globals.setdefault("Union", Union)
-                if sys.version_info < (3, 9):
-                    new_globals.update(type_substitutions)
-
-                return forwardref._evaluate(
-                    new_globals, memo.locals or new_globals, *evaluate_extra_args
-                )
-
-            raise
-
-
-_functions_map: WeakValueDictionary[CodeType, FunctionType] = WeakValueDictionary()
-
-
-def get_type_name(type_: Any) -> str:
-    name: str
-    for attrname in "__name__", "_name", "__forward_arg__":
-        candidate = getattr(type_, attrname, None)
-        if isinstance(candidate, str):
-            name = candidate
-            break
-    else:
-        origin = get_origin(type_)
-        candidate = getattr(origin, "_name", None)
-        if candidate is None:
-            candidate = type_.__class__.__name__.strip("_")
-
-        if isinstance(candidate, str):
-            name = candidate
-        else:
-            return "(unknown)"
-
-    args = get_args(type_)
-    if args:
-        if name == "Literal":
-            formatted_args = ", ".join(repr(arg) for arg in args)
-        else:
-            formatted_args = ", ".join(get_type_name(arg) for arg in args)
-
-        name += f"[{formatted_args}]"
-
-    module = getattr(type_, "__module__", None)
-    if module and module not in (None, "typing", "typing_extensions", "builtins"):
-        name = module + "." + name
-
-    return name
-
-
-def qualified_name(obj: Any, *, add_class_prefix: bool = False) -> str:
-    """
-    Return the qualified name (e.g. package.module.Type) for the given object.
-
-    Builtins and types from the :mod:`typing` package get special treatment by having
-    the module name stripped from the generated name.
-
-    """
-    if obj is None:
-        return "None"
-    elif inspect.isclass(obj):
-        prefix = "class " if add_class_prefix else ""
-        type_ = obj
-    else:
-        prefix = ""
-        type_ = type(obj)
-
-    module = type_.__module__
-    qualname = type_.__qualname__
-    name = qualname if module in ("typing", "builtins") else f"{module}.{qualname}"
-    return prefix + name
-
-
-def function_name(func: Callable[..., Any]) -> str:
-    """
-    Return the qualified name of the given function.
-
-    Builtins and types from the :mod:`typing` package get special treatment by having
-    the module name stripped from the generated name.
-
-    """
-    # For partial functions and objects with __call__ defined, __qualname__ does not
-    # exist
-    module = getattr(func, "__module__", "")
-    qualname = (module + ".") if module not in ("builtins", "") else ""
-    return qualname + getattr(func, "__qualname__", repr(func))
-
-
-def resolve_reference(reference: str) -> Any:
-    modulename, varname = reference.partition(":")[::2]
-    if not modulename or not varname:
-        raise ValueError(f"{reference!r} is not a module:varname reference")
-
-    obj = import_module(modulename)
-    for attr in varname.split("."):
-        obj = getattr(obj, attr)
-
-    return obj
-
-
-def is_method_of(obj: object, cls: type) -> bool:
-    return (
-        inspect.isfunction(obj)
-        and obj.__module__ == cls.__module__
-        and obj.__qualname__.startswith(cls.__qualname__ + ".")
-    )
-
-
-def get_stacklevel() -> int:
-    level = 1
-    frame = cast(FrameType, currentframe()).f_back
-    while frame and frame.f_globals.get("__name__", "").startswith("typeguard."):
-        level += 1
-        frame = frame.f_back
-
-    return level
-
-
-@final
-class Unset:
-    __slots__ = ()
-
-    def __repr__(self) -> str:
-        return ""
-
-
-unset = Unset()
diff --git a/pkg_resources/_vendor/typeguard/py.typed b/pkg_resources/_vendor/typeguard/py.typed
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/pkg_resources/_vendor/typing_extensions-4.12.2.dist-info/INSTALLER b/pkg_resources/_vendor/typing_extensions-4.12.2.dist-info/INSTALLER
deleted file mode 100644
index a1b589e38a..0000000000
--- a/pkg_resources/_vendor/typing_extensions-4.12.2.dist-info/INSTALLER
+++ /dev/null
@@ -1 +0,0 @@
-pip
diff --git a/pkg_resources/_vendor/typing_extensions-4.12.2.dist-info/LICENSE b/pkg_resources/_vendor/typing_extensions-4.12.2.dist-info/LICENSE
deleted file mode 100644
index f26bcf4d2d..0000000000
--- a/pkg_resources/_vendor/typing_extensions-4.12.2.dist-info/LICENSE
+++ /dev/null
@@ -1,279 +0,0 @@
-A. HISTORY OF THE SOFTWARE
-==========================
-
-Python was created in the early 1990s by Guido van Rossum at Stichting
-Mathematisch Centrum (CWI, see https://www.cwi.nl) in the Netherlands
-as a successor of a language called ABC.  Guido remains Python's
-principal author, although it includes many contributions from others.
-
-In 1995, Guido continued his work on Python at the Corporation for
-National Research Initiatives (CNRI, see https://www.cnri.reston.va.us)
-in Reston, Virginia where he released several versions of the
-software.
-
-In May 2000, Guido and the Python core development team moved to
-BeOpen.com to form the BeOpen PythonLabs team.  In October of the same
-year, the PythonLabs team moved to Digital Creations, which became
-Zope Corporation.  In 2001, the Python Software Foundation (PSF, see
-https://www.python.org/psf/) was formed, a non-profit organization
-created specifically to own Python-related Intellectual Property.
-Zope Corporation was a sponsoring member of the PSF.
-
-All Python releases are Open Source (see https://opensource.org for
-the Open Source Definition).  Historically, most, but not all, Python
-releases have also been GPL-compatible; the table below summarizes
-the various releases.
-
-    Release         Derived     Year        Owner       GPL-
-                    from                                compatible? (1)
-
-    0.9.0 thru 1.2              1991-1995   CWI         yes
-    1.3 thru 1.5.2  1.2         1995-1999   CNRI        yes
-    1.6             1.5.2       2000        CNRI        no
-    2.0             1.6         2000        BeOpen.com  no
-    1.6.1           1.6         2001        CNRI        yes (2)
-    2.1             2.0+1.6.1   2001        PSF         no
-    2.0.1           2.0+1.6.1   2001        PSF         yes
-    2.1.1           2.1+2.0.1   2001        PSF         yes
-    2.1.2           2.1.1       2002        PSF         yes
-    2.1.3           2.1.2       2002        PSF         yes
-    2.2 and above   2.1.1       2001-now    PSF         yes
-
-Footnotes:
-
-(1) GPL-compatible doesn't mean that we're distributing Python under
-    the GPL.  All Python licenses, unlike the GPL, let you distribute
-    a modified version without making your changes open source.  The
-    GPL-compatible licenses make it possible to combine Python with
-    other software that is released under the GPL; the others don't.
-
-(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
-    because its license has a choice of law clause.  According to
-    CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
-    is "not incompatible" with the GPL.
-
-Thanks to the many outside volunteers who have worked under Guido's
-direction to make these releases possible.
-
-
-B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
-===============================================================
-
-Python software and documentation are licensed under the
-Python Software Foundation License Version 2.
-
-Starting with Python 3.8.6, examples, recipes, and other code in
-the documentation are dual licensed under the PSF License Version 2
-and the Zero-Clause BSD license.
-
-Some software incorporated into Python is under different licenses.
-The licenses are listed with code falling under that license.
-
-
-PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
---------------------------------------------
-
-1. This LICENSE AGREEMENT is between the Python Software Foundation
-("PSF"), and the Individual or Organization ("Licensee") accessing and
-otherwise using this software ("Python") in source or binary form and
-its associated documentation.
-
-2. Subject to the terms and conditions of this License Agreement, PSF hereby
-grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
-analyze, test, perform and/or display publicly, prepare derivative works,
-distribute, and otherwise use Python alone or in any derivative version,
-provided, however, that PSF's License Agreement and PSF's notice of copyright,
-i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
-2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023 Python Software Foundation;
-All Rights Reserved" are retained in Python alone or in any derivative version
-prepared by Licensee.
-
-3. In the event Licensee prepares a derivative work that is based on
-or incorporates Python or any part thereof, and wants to make
-the derivative work available to others as provided herein, then
-Licensee hereby agrees to include in any such work a brief summary of
-the changes made to Python.
-
-4. PSF is making Python available to Licensee on an "AS IS"
-basis.  PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
-IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
-DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
-FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
-INFRINGE ANY THIRD PARTY RIGHTS.
-
-5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
-FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
-A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
-OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
-
-6. This License Agreement will automatically terminate upon a material
-breach of its terms and conditions.
-
-7. Nothing in this License Agreement shall be deemed to create any
-relationship of agency, partnership, or joint venture between PSF and
-Licensee.  This License Agreement does not grant permission to use PSF
-trademarks or trade name in a trademark sense to endorse or promote
-products or services of Licensee, or any third party.
-
-8. By copying, installing or otherwise using Python, Licensee
-agrees to be bound by the terms and conditions of this License
-Agreement.
-
-
-BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
--------------------------------------------
-
-BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
-
-1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
-office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
-Individual or Organization ("Licensee") accessing and otherwise using
-this software in source or binary form and its associated
-documentation ("the Software").
-
-2. Subject to the terms and conditions of this BeOpen Python License
-Agreement, BeOpen hereby grants Licensee a non-exclusive,
-royalty-free, world-wide license to reproduce, analyze, test, perform
-and/or display publicly, prepare derivative works, distribute, and
-otherwise use the Software alone or in any derivative version,
-provided, however, that the BeOpen Python License is retained in the
-Software, alone or in any derivative version prepared by Licensee.
-
-3. BeOpen is making the Software available to Licensee on an "AS IS"
-basis.  BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
-IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
-DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
-FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
-INFRINGE ANY THIRD PARTY RIGHTS.
-
-4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
-SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
-AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
-DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
-
-5. This License Agreement will automatically terminate upon a material
-breach of its terms and conditions.
-
-6. This License Agreement shall be governed by and interpreted in all
-respects by the law of the State of California, excluding conflict of
-law provisions.  Nothing in this License Agreement shall be deemed to
-create any relationship of agency, partnership, or joint venture
-between BeOpen and Licensee.  This License Agreement does not grant
-permission to use BeOpen trademarks or trade names in a trademark
-sense to endorse or promote products or services of Licensee, or any
-third party.  As an exception, the "BeOpen Python" logos available at
-http://www.pythonlabs.com/logos.html may be used according to the
-permissions granted on that web page.
-
-7. By copying, installing or otherwise using the software, Licensee
-agrees to be bound by the terms and conditions of this License
-Agreement.
-
-
-CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
----------------------------------------
-
-1. This LICENSE AGREEMENT is between the Corporation for National
-Research Initiatives, having an office at 1895 Preston White Drive,
-Reston, VA 20191 ("CNRI"), and the Individual or Organization
-("Licensee") accessing and otherwise using Python 1.6.1 software in
-source or binary form and its associated documentation.
-
-2. Subject to the terms and conditions of this License Agreement, CNRI
-hereby grants Licensee a nonexclusive, royalty-free, world-wide
-license to reproduce, analyze, test, perform and/or display publicly,
-prepare derivative works, distribute, and otherwise use Python 1.6.1
-alone or in any derivative version, provided, however, that CNRI's
-License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
-1995-2001 Corporation for National Research Initiatives; All Rights
-Reserved" are retained in Python 1.6.1 alone or in any derivative
-version prepared by Licensee.  Alternately, in lieu of CNRI's License
-Agreement, Licensee may substitute the following text (omitting the
-quotes): "Python 1.6.1 is made available subject to the terms and
-conditions in CNRI's License Agreement.  This Agreement together with
-Python 1.6.1 may be located on the internet using the following
-unique, persistent identifier (known as a handle): 1895.22/1013.  This
-Agreement may also be obtained from a proxy server on the internet
-using the following URL: http://hdl.handle.net/1895.22/1013".
-
-3. In the event Licensee prepares a derivative work that is based on
-or incorporates Python 1.6.1 or any part thereof, and wants to make
-the derivative work available to others as provided herein, then
-Licensee hereby agrees to include in any such work a brief summary of
-the changes made to Python 1.6.1.
-
-4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
-basis.  CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
-IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
-DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
-FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
-INFRINGE ANY THIRD PARTY RIGHTS.
-
-5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
-1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
-A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
-OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
-
-6. This License Agreement will automatically terminate upon a material
-breach of its terms and conditions.
-
-7. This License Agreement shall be governed by the federal
-intellectual property law of the United States, including without
-limitation the federal copyright law, and, to the extent such
-U.S. federal law does not apply, by the law of the Commonwealth of
-Virginia, excluding Virginia's conflict of law provisions.
-Notwithstanding the foregoing, with regard to derivative works based
-on Python 1.6.1 that incorporate non-separable material that was
-previously distributed under the GNU General Public License (GPL), the
-law of the Commonwealth of Virginia shall govern this License
-Agreement only as to issues arising under or with respect to
-Paragraphs 4, 5, and 7 of this License Agreement.  Nothing in this
-License Agreement shall be deemed to create any relationship of
-agency, partnership, or joint venture between CNRI and Licensee.  This
-License Agreement does not grant permission to use CNRI trademarks or
-trade name in a trademark sense to endorse or promote products or
-services of Licensee, or any third party.
-
-8. By clicking on the "ACCEPT" button where indicated, or by copying,
-installing or otherwise using Python 1.6.1, Licensee agrees to be
-bound by the terms and conditions of this License Agreement.
-
-        ACCEPT
-
-
-CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
---------------------------------------------------
-
-Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
-The Netherlands.  All rights reserved.
-
-Permission to use, copy, modify, and distribute this software and its
-documentation for any purpose and without fee is hereby granted,
-provided that the above copyright notice appear in all copies and that
-both that copyright notice and this permission notice appear in
-supporting documentation, and that the name of Stichting Mathematisch
-Centrum or CWI not be used in advertising or publicity pertaining to
-distribution of the software without specific, written prior
-permission.
-
-STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
-THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
-FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
-FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
-OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-ZERO-CLAUSE BSD LICENSE FOR CODE IN THE PYTHON DOCUMENTATION
-----------------------------------------------------------------------
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
-REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
-AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
-INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
-LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
-OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
-PERFORMANCE OF THIS SOFTWARE.
diff --git a/pkg_resources/_vendor/typing_extensions-4.12.2.dist-info/METADATA b/pkg_resources/_vendor/typing_extensions-4.12.2.dist-info/METADATA
deleted file mode 100644
index f15e2b3877..0000000000
--- a/pkg_resources/_vendor/typing_extensions-4.12.2.dist-info/METADATA
+++ /dev/null
@@ -1,67 +0,0 @@
-Metadata-Version: 2.1
-Name: typing_extensions
-Version: 4.12.2
-Summary: Backported and Experimental Type Hints for Python 3.8+
-Keywords: annotations,backport,checker,checking,function,hinting,hints,type,typechecking,typehinting,typehints,typing
-Author-email: "Guido van Rossum, Jukka Lehtosalo, Łukasz Langa, Michael Lee" 
-Requires-Python: >=3.8
-Description-Content-Type: text/markdown
-Classifier: Development Status :: 5 - Production/Stable
-Classifier: Environment :: Console
-Classifier: Intended Audience :: Developers
-Classifier: License :: OSI Approved :: Python Software Foundation License
-Classifier: Operating System :: OS Independent
-Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3 :: Only
-Classifier: Programming Language :: Python :: 3.8
-Classifier: Programming Language :: Python :: 3.9
-Classifier: Programming Language :: Python :: 3.10
-Classifier: Programming Language :: Python :: 3.11
-Classifier: Programming Language :: Python :: 3.12
-Classifier: Programming Language :: Python :: 3.13
-Classifier: Topic :: Software Development
-Project-URL: Bug Tracker, https://github.com/python/typing_extensions/issues
-Project-URL: Changes, https://github.com/python/typing_extensions/blob/main/CHANGELOG.md
-Project-URL: Documentation, https://typing-extensions.readthedocs.io/
-Project-URL: Home, https://github.com/python/typing_extensions
-Project-URL: Q & A, https://github.com/python/typing/discussions
-Project-URL: Repository, https://github.com/python/typing_extensions
-
-# Typing Extensions
-
-[![Chat at https://gitter.im/python/typing](https://badges.gitter.im/python/typing.svg)](https://gitter.im/python/typing)
-
-[Documentation](https://typing-extensions.readthedocs.io/en/latest/#) –
-[PyPI](https://pypi.org/project/typing-extensions/)
-
-## Overview
-
-The `typing_extensions` module serves two related purposes:
-
-- Enable use of new type system features on older Python versions. For example,
-  `typing.TypeGuard` is new in Python 3.10, but `typing_extensions` allows
-  users on previous Python versions to use it too.
-- Enable experimentation with new type system PEPs before they are accepted and
-  added to the `typing` module.
-
-`typing_extensions` is treated specially by static type checkers such as
-mypy and pyright. Objects defined in `typing_extensions` are treated the same
-way as equivalent forms in `typing`.
-
-`typing_extensions` uses
-[Semantic Versioning](https://semver.org/). The
-major version will be incremented only for backwards-incompatible changes.
-Therefore, it's safe to depend
-on `typing_extensions` like this: `typing_extensions >=x.y, <(x+1)`,
-where `x.y` is the first version that includes all features you need.
-
-## Included items
-
-See [the documentation](https://typing-extensions.readthedocs.io/en/latest/#) for a
-complete listing of module contents.
-
-## Contributing
-
-See [CONTRIBUTING.md](https://github.com/python/typing_extensions/blob/main/CONTRIBUTING.md)
-for how to contribute to `typing_extensions`.
-
diff --git a/pkg_resources/_vendor/typing_extensions-4.12.2.dist-info/RECORD b/pkg_resources/_vendor/typing_extensions-4.12.2.dist-info/RECORD
deleted file mode 100644
index bc7b45334d..0000000000
--- a/pkg_resources/_vendor/typing_extensions-4.12.2.dist-info/RECORD
+++ /dev/null
@@ -1,7 +0,0 @@
-__pycache__/typing_extensions.cpython-312.pyc,,
-typing_extensions-4.12.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-typing_extensions-4.12.2.dist-info/LICENSE,sha256=Oy-B_iHRgcSZxZolbI4ZaEVdZonSaaqFNzv7avQdo78,13936
-typing_extensions-4.12.2.dist-info/METADATA,sha256=BeUQIa8cnYbrjWx-N8TOznM9UGW5Gm2DicVpDtRA8W0,3018
-typing_extensions-4.12.2.dist-info/RECORD,,
-typing_extensions-4.12.2.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
-typing_extensions.py,sha256=gwekpyG9DVG3lxWKX4ni8u7nk3We5slG98mA9F3DJQw,134451
diff --git a/pkg_resources/_vendor/typing_extensions-4.12.2.dist-info/WHEEL b/pkg_resources/_vendor/typing_extensions-4.12.2.dist-info/WHEEL
deleted file mode 100644
index 3b5e64b5e6..0000000000
--- a/pkg_resources/_vendor/typing_extensions-4.12.2.dist-info/WHEEL
+++ /dev/null
@@ -1,4 +0,0 @@
-Wheel-Version: 1.0
-Generator: flit 3.9.0
-Root-Is-Purelib: true
-Tag: py3-none-any
diff --git a/pkg_resources/_vendor/typing_extensions.py b/pkg_resources/_vendor/typing_extensions.py
deleted file mode 100644
index dec429ca87..0000000000
--- a/pkg_resources/_vendor/typing_extensions.py
+++ /dev/null
@@ -1,3641 +0,0 @@
-import abc
-import collections
-import collections.abc
-import contextlib
-import functools
-import inspect
-import operator
-import sys
-import types as _types
-import typing
-import warnings
-
-__all__ = [
-    # Super-special typing primitives.
-    'Any',
-    'ClassVar',
-    'Concatenate',
-    'Final',
-    'LiteralString',
-    'ParamSpec',
-    'ParamSpecArgs',
-    'ParamSpecKwargs',
-    'Self',
-    'Type',
-    'TypeVar',
-    'TypeVarTuple',
-    'Unpack',
-
-    # ABCs (from collections.abc).
-    'Awaitable',
-    'AsyncIterator',
-    'AsyncIterable',
-    'Coroutine',
-    'AsyncGenerator',
-    'AsyncContextManager',
-    'Buffer',
-    'ChainMap',
-
-    # Concrete collection types.
-    'ContextManager',
-    'Counter',
-    'Deque',
-    'DefaultDict',
-    'NamedTuple',
-    'OrderedDict',
-    'TypedDict',
-
-    # Structural checks, a.k.a. protocols.
-    'SupportsAbs',
-    'SupportsBytes',
-    'SupportsComplex',
-    'SupportsFloat',
-    'SupportsIndex',
-    'SupportsInt',
-    'SupportsRound',
-
-    # One-off things.
-    'Annotated',
-    'assert_never',
-    'assert_type',
-    'clear_overloads',
-    'dataclass_transform',
-    'deprecated',
-    'Doc',
-    'get_overloads',
-    'final',
-    'get_args',
-    'get_origin',
-    'get_original_bases',
-    'get_protocol_members',
-    'get_type_hints',
-    'IntVar',
-    'is_protocol',
-    'is_typeddict',
-    'Literal',
-    'NewType',
-    'overload',
-    'override',
-    'Protocol',
-    'reveal_type',
-    'runtime',
-    'runtime_checkable',
-    'Text',
-    'TypeAlias',
-    'TypeAliasType',
-    'TypeGuard',
-    'TypeIs',
-    'TYPE_CHECKING',
-    'Never',
-    'NoReturn',
-    'ReadOnly',
-    'Required',
-    'NotRequired',
-
-    # Pure aliases, have always been in typing
-    'AbstractSet',
-    'AnyStr',
-    'BinaryIO',
-    'Callable',
-    'Collection',
-    'Container',
-    'Dict',
-    'ForwardRef',
-    'FrozenSet',
-    'Generator',
-    'Generic',
-    'Hashable',
-    'IO',
-    'ItemsView',
-    'Iterable',
-    'Iterator',
-    'KeysView',
-    'List',
-    'Mapping',
-    'MappingView',
-    'Match',
-    'MutableMapping',
-    'MutableSequence',
-    'MutableSet',
-    'NoDefault',
-    'Optional',
-    'Pattern',
-    'Reversible',
-    'Sequence',
-    'Set',
-    'Sized',
-    'TextIO',
-    'Tuple',
-    'Union',
-    'ValuesView',
-    'cast',
-    'no_type_check',
-    'no_type_check_decorator',
-]
-
-# for backward compatibility
-PEP_560 = True
-GenericMeta = type
-_PEP_696_IMPLEMENTED = sys.version_info >= (3, 13, 0, "beta")
-
-# The functions below are modified copies of typing internal helpers.
-# They are needed by _ProtocolMeta and they provide support for PEP 646.
-
-
-class _Sentinel:
-    def __repr__(self):
-        return ""
-
-
-_marker = _Sentinel()
-
-
-if sys.version_info >= (3, 10):
-    def _should_collect_from_parameters(t):
-        return isinstance(
-            t, (typing._GenericAlias, _types.GenericAlias, _types.UnionType)
-        )
-elif sys.version_info >= (3, 9):
-    def _should_collect_from_parameters(t):
-        return isinstance(t, (typing._GenericAlias, _types.GenericAlias))
-else:
-    def _should_collect_from_parameters(t):
-        return isinstance(t, typing._GenericAlias) and not t._special
-
-
-NoReturn = typing.NoReturn
-
-# Some unconstrained type variables.  These are used by the container types.
-# (These are not for export.)
-T = typing.TypeVar('T')  # Any type.
-KT = typing.TypeVar('KT')  # Key type.
-VT = typing.TypeVar('VT')  # Value type.
-T_co = typing.TypeVar('T_co', covariant=True)  # Any type covariant containers.
-T_contra = typing.TypeVar('T_contra', contravariant=True)  # Ditto contravariant.
-
-
-if sys.version_info >= (3, 11):
-    from typing import Any
-else:
-
-    class _AnyMeta(type):
-        def __instancecheck__(self, obj):
-            if self is Any:
-                raise TypeError("typing_extensions.Any cannot be used with isinstance()")
-            return super().__instancecheck__(obj)
-
-        def __repr__(self):
-            if self is Any:
-                return "typing_extensions.Any"
-            return super().__repr__()
-
-    class Any(metaclass=_AnyMeta):
-        """Special type indicating an unconstrained type.
-        - Any is compatible with every type.
-        - Any assumed to have all methods.
-        - All values assumed to be instances of Any.
-        Note that all the above statements are true from the point of view of
-        static type checkers. At runtime, Any should not be used with instance
-        checks.
-        """
-        def __new__(cls, *args, **kwargs):
-            if cls is Any:
-                raise TypeError("Any cannot be instantiated")
-            return super().__new__(cls, *args, **kwargs)
-
-
-ClassVar = typing.ClassVar
-
-
-class _ExtensionsSpecialForm(typing._SpecialForm, _root=True):
-    def __repr__(self):
-        return 'typing_extensions.' + self._name
-
-
-Final = typing.Final
-
-if sys.version_info >= (3, 11):
-    final = typing.final
-else:
-    # @final exists in 3.8+, but we backport it for all versions
-    # before 3.11 to keep support for the __final__ attribute.
-    # See https://bugs.python.org/issue46342
-    def final(f):
-        """This decorator can be used to indicate to type checkers that
-        the decorated method cannot be overridden, and decorated class
-        cannot be subclassed. For example:
-
-            class Base:
-                @final
-                def done(self) -> None:
-                    ...
-            class Sub(Base):
-                def done(self) -> None:  # Error reported by type checker
-                    ...
-            @final
-            class Leaf:
-                ...
-            class Other(Leaf):  # Error reported by type checker
-                ...
-
-        There is no runtime checking of these properties. The decorator
-        sets the ``__final__`` attribute to ``True`` on the decorated object
-        to allow runtime introspection.
-        """
-        try:
-            f.__final__ = True
-        except (AttributeError, TypeError):
-            # Skip the attribute silently if it is not writable.
-            # AttributeError happens if the object has __slots__ or a
-            # read-only property, TypeError if it's a builtin class.
-            pass
-        return f
-
-
-def IntVar(name):
-    return typing.TypeVar(name)
-
-
-# A Literal bug was fixed in 3.11.0, 3.10.1 and 3.9.8
-if sys.version_info >= (3, 10, 1):
-    Literal = typing.Literal
-else:
-    def _flatten_literal_params(parameters):
-        """An internal helper for Literal creation: flatten Literals among parameters"""
-        params = []
-        for p in parameters:
-            if isinstance(p, _LiteralGenericAlias):
-                params.extend(p.__args__)
-            else:
-                params.append(p)
-        return tuple(params)
-
-    def _value_and_type_iter(params):
-        for p in params:
-            yield p, type(p)
-
-    class _LiteralGenericAlias(typing._GenericAlias, _root=True):
-        def __eq__(self, other):
-            if not isinstance(other, _LiteralGenericAlias):
-                return NotImplemented
-            these_args_deduped = set(_value_and_type_iter(self.__args__))
-            other_args_deduped = set(_value_and_type_iter(other.__args__))
-            return these_args_deduped == other_args_deduped
-
-        def __hash__(self):
-            return hash(frozenset(_value_and_type_iter(self.__args__)))
-
-    class _LiteralForm(_ExtensionsSpecialForm, _root=True):
-        def __init__(self, doc: str):
-            self._name = 'Literal'
-            self._doc = self.__doc__ = doc
-
-        def __getitem__(self, parameters):
-            if not isinstance(parameters, tuple):
-                parameters = (parameters,)
-
-            parameters = _flatten_literal_params(parameters)
-
-            val_type_pairs = list(_value_and_type_iter(parameters))
-            try:
-                deduped_pairs = set(val_type_pairs)
-            except TypeError:
-                # unhashable parameters
-                pass
-            else:
-                # similar logic to typing._deduplicate on Python 3.9+
-                if len(deduped_pairs) < len(val_type_pairs):
-                    new_parameters = []
-                    for pair in val_type_pairs:
-                        if pair in deduped_pairs:
-                            new_parameters.append(pair[0])
-                            deduped_pairs.remove(pair)
-                    assert not deduped_pairs, deduped_pairs
-                    parameters = tuple(new_parameters)
-
-            return _LiteralGenericAlias(self, parameters)
-
-    Literal = _LiteralForm(doc="""\
-                           A type that can be used to indicate to type checkers
-                           that the corresponding value has a value literally equivalent
-                           to the provided parameter. For example:
-
-                               var: Literal[4] = 4
-
-                           The type checker understands that 'var' is literally equal to
-                           the value 4 and no other value.
-
-                           Literal[...] cannot be subclassed. There is no runtime
-                           checking verifying that the parameter is actually a value
-                           instead of a type.""")
-
-
-_overload_dummy = typing._overload_dummy
-
-
-if hasattr(typing, "get_overloads"):  # 3.11+
-    overload = typing.overload
-    get_overloads = typing.get_overloads
-    clear_overloads = typing.clear_overloads
-else:
-    # {module: {qualname: {firstlineno: func}}}
-    _overload_registry = collections.defaultdict(
-        functools.partial(collections.defaultdict, dict)
-    )
-
-    def overload(func):
-        """Decorator for overloaded functions/methods.
-
-        In a stub file, place two or more stub definitions for the same
-        function in a row, each decorated with @overload.  For example:
-
-        @overload
-        def utf8(value: None) -> None: ...
-        @overload
-        def utf8(value: bytes) -> bytes: ...
-        @overload
-        def utf8(value: str) -> bytes: ...
-
-        In a non-stub file (i.e. a regular .py file), do the same but
-        follow it with an implementation.  The implementation should *not*
-        be decorated with @overload.  For example:
-
-        @overload
-        def utf8(value: None) -> None: ...
-        @overload
-        def utf8(value: bytes) -> bytes: ...
-        @overload
-        def utf8(value: str) -> bytes: ...
-        def utf8(value):
-            # implementation goes here
-
-        The overloads for a function can be retrieved at runtime using the
-        get_overloads() function.
-        """
-        # classmethod and staticmethod
-        f = getattr(func, "__func__", func)
-        try:
-            _overload_registry[f.__module__][f.__qualname__][
-                f.__code__.co_firstlineno
-            ] = func
-        except AttributeError:
-            # Not a normal function; ignore.
-            pass
-        return _overload_dummy
-
-    def get_overloads(func):
-        """Return all defined overloads for *func* as a sequence."""
-        # classmethod and staticmethod
-        f = getattr(func, "__func__", func)
-        if f.__module__ not in _overload_registry:
-            return []
-        mod_dict = _overload_registry[f.__module__]
-        if f.__qualname__ not in mod_dict:
-            return []
-        return list(mod_dict[f.__qualname__].values())
-
-    def clear_overloads():
-        """Clear all overloads in the registry."""
-        _overload_registry.clear()
-
-
-# This is not a real generic class.  Don't use outside annotations.
-Type = typing.Type
-
-# Various ABCs mimicking those in collections.abc.
-# A few are simply re-exported for completeness.
-Awaitable = typing.Awaitable
-Coroutine = typing.Coroutine
-AsyncIterable = typing.AsyncIterable
-AsyncIterator = typing.AsyncIterator
-Deque = typing.Deque
-DefaultDict = typing.DefaultDict
-OrderedDict = typing.OrderedDict
-Counter = typing.Counter
-ChainMap = typing.ChainMap
-Text = typing.Text
-TYPE_CHECKING = typing.TYPE_CHECKING
-
-
-if sys.version_info >= (3, 13, 0, "beta"):
-    from typing import AsyncContextManager, AsyncGenerator, ContextManager, Generator
-else:
-    def _is_dunder(attr):
-        return attr.startswith('__') and attr.endswith('__')
-
-    # Python <3.9 doesn't have typing._SpecialGenericAlias
-    _special_generic_alias_base = getattr(
-        typing, "_SpecialGenericAlias", typing._GenericAlias
-    )
-
-    class _SpecialGenericAlias(_special_generic_alias_base, _root=True):
-        def __init__(self, origin, nparams, *, inst=True, name=None, defaults=()):
-            if _special_generic_alias_base is typing._GenericAlias:
-                # Python <3.9
-                self.__origin__ = origin
-                self._nparams = nparams
-                super().__init__(origin, nparams, special=True, inst=inst, name=name)
-            else:
-                # Python >= 3.9
-                super().__init__(origin, nparams, inst=inst, name=name)
-            self._defaults = defaults
-
-        def __setattr__(self, attr, val):
-            allowed_attrs = {'_name', '_inst', '_nparams', '_defaults'}
-            if _special_generic_alias_base is typing._GenericAlias:
-                # Python <3.9
-                allowed_attrs.add("__origin__")
-            if _is_dunder(attr) or attr in allowed_attrs:
-                object.__setattr__(self, attr, val)
-            else:
-                setattr(self.__origin__, attr, val)
-
-        @typing._tp_cache
-        def __getitem__(self, params):
-            if not isinstance(params, tuple):
-                params = (params,)
-            msg = "Parameters to generic types must be types."
-            params = tuple(typing._type_check(p, msg) for p in params)
-            if (
-                self._defaults
-                and len(params) < self._nparams
-                and len(params) + len(self._defaults) >= self._nparams
-            ):
-                params = (*params, *self._defaults[len(params) - self._nparams:])
-            actual_len = len(params)
-
-            if actual_len != self._nparams:
-                if self._defaults:
-                    expected = f"at least {self._nparams - len(self._defaults)}"
-                else:
-                    expected = str(self._nparams)
-                if not self._nparams:
-                    raise TypeError(f"{self} is not a generic class")
-                raise TypeError(
-                    f"Too {'many' if actual_len > self._nparams else 'few'}"
-                    f" arguments for {self};"
-                    f" actual {actual_len}, expected {expected}"
-                )
-            return self.copy_with(params)
-
-    _NoneType = type(None)
-    Generator = _SpecialGenericAlias(
-        collections.abc.Generator, 3, defaults=(_NoneType, _NoneType)
-    )
-    AsyncGenerator = _SpecialGenericAlias(
-        collections.abc.AsyncGenerator, 2, defaults=(_NoneType,)
-    )
-    ContextManager = _SpecialGenericAlias(
-        contextlib.AbstractContextManager,
-        2,
-        name="ContextManager",
-        defaults=(typing.Optional[bool],)
-    )
-    AsyncContextManager = _SpecialGenericAlias(
-        contextlib.AbstractAsyncContextManager,
-        2,
-        name="AsyncContextManager",
-        defaults=(typing.Optional[bool],)
-    )
-
-
-_PROTO_ALLOWLIST = {
-    'collections.abc': [
-        'Callable', 'Awaitable', 'Iterable', 'Iterator', 'AsyncIterable',
-        'Hashable', 'Sized', 'Container', 'Collection', 'Reversible', 'Buffer',
-    ],
-    'contextlib': ['AbstractContextManager', 'AbstractAsyncContextManager'],
-    'typing_extensions': ['Buffer'],
-}
-
-
-_EXCLUDED_ATTRS = frozenset(typing.EXCLUDED_ATTRIBUTES) | {
-    "__match_args__", "__protocol_attrs__", "__non_callable_proto_members__",
-    "__final__",
-}
-
-
-def _get_protocol_attrs(cls):
-    attrs = set()
-    for base in cls.__mro__[:-1]:  # without object
-        if base.__name__ in {'Protocol', 'Generic'}:
-            continue
-        annotations = getattr(base, '__annotations__', {})
-        for attr in (*base.__dict__, *annotations):
-            if (not attr.startswith('_abc_') and attr not in _EXCLUDED_ATTRS):
-                attrs.add(attr)
-    return attrs
-
-
-def _caller(depth=2):
-    try:
-        return sys._getframe(depth).f_globals.get('__name__', '__main__')
-    except (AttributeError, ValueError):  # For platforms without _getframe()
-        return None
-
-
-# `__match_args__` attribute was removed from protocol members in 3.13,
-# we want to backport this change to older Python versions.
-if sys.version_info >= (3, 13):
-    Protocol = typing.Protocol
-else:
-    def _allow_reckless_class_checks(depth=3):
-        """Allow instance and class checks for special stdlib modules.
-        The abc and functools modules indiscriminately call isinstance() and
-        issubclass() on the whole MRO of a user class, which may contain protocols.
-        """
-        return _caller(depth) in {'abc', 'functools', None}
-
-    def _no_init(self, *args, **kwargs):
-        if type(self)._is_protocol:
-            raise TypeError('Protocols cannot be instantiated')
-
-    def _type_check_issubclass_arg_1(arg):
-        """Raise TypeError if `arg` is not an instance of `type`
-        in `issubclass(arg, )`.
-
-        In most cases, this is verified by type.__subclasscheck__.
-        Checking it again unnecessarily would slow down issubclass() checks,
-        so, we don't perform this check unless we absolutely have to.
-
-        For various error paths, however,
-        we want to ensure that *this* error message is shown to the user
-        where relevant, rather than a typing.py-specific error message.
-        """
-        if not isinstance(arg, type):
-            # Same error message as for issubclass(1, int).
-            raise TypeError('issubclass() arg 1 must be a class')
-
-    # Inheriting from typing._ProtocolMeta isn't actually desirable,
-    # but is necessary to allow typing.Protocol and typing_extensions.Protocol
-    # to mix without getting TypeErrors about "metaclass conflict"
-    class _ProtocolMeta(type(typing.Protocol)):
-        # This metaclass is somewhat unfortunate,
-        # but is necessary for several reasons...
-        #
-        # NOTE: DO NOT call super() in any methods in this class
-        # That would call the methods on typing._ProtocolMeta on Python 3.8-3.11
-        # and those are slow
-        def __new__(mcls, name, bases, namespace, **kwargs):
-            if name == "Protocol" and len(bases) < 2:
-                pass
-            elif {Protocol, typing.Protocol} & set(bases):
-                for base in bases:
-                    if not (
-                        base in {object, typing.Generic, Protocol, typing.Protocol}
-                        or base.__name__ in _PROTO_ALLOWLIST.get(base.__module__, [])
-                        or is_protocol(base)
-                    ):
-                        raise TypeError(
-                            f"Protocols can only inherit from other protocols, "
-                            f"got {base!r}"
-                        )
-            return abc.ABCMeta.__new__(mcls, name, bases, namespace, **kwargs)
-
-        def __init__(cls, *args, **kwargs):
-            abc.ABCMeta.__init__(cls, *args, **kwargs)
-            if getattr(cls, "_is_protocol", False):
-                cls.__protocol_attrs__ = _get_protocol_attrs(cls)
-
-        def __subclasscheck__(cls, other):
-            if cls is Protocol:
-                return type.__subclasscheck__(cls, other)
-            if (
-                getattr(cls, '_is_protocol', False)
-                and not _allow_reckless_class_checks()
-            ):
-                if not getattr(cls, '_is_runtime_protocol', False):
-                    _type_check_issubclass_arg_1(other)
-                    raise TypeError(
-                        "Instance and class checks can only be used with "
-                        "@runtime_checkable protocols"
-                    )
-                if (
-                    # this attribute is set by @runtime_checkable:
-                    cls.__non_callable_proto_members__
-                    and cls.__dict__.get("__subclasshook__") is _proto_hook
-                ):
-                    _type_check_issubclass_arg_1(other)
-                    non_method_attrs = sorted(cls.__non_callable_proto_members__)
-                    raise TypeError(
-                        "Protocols with non-method members don't support issubclass()."
-                        f" Non-method members: {str(non_method_attrs)[1:-1]}."
-                    )
-            return abc.ABCMeta.__subclasscheck__(cls, other)
-
-        def __instancecheck__(cls, instance):
-            # We need this method for situations where attributes are
-            # assigned in __init__.
-            if cls is Protocol:
-                return type.__instancecheck__(cls, instance)
-            if not getattr(cls, "_is_protocol", False):
-                # i.e., it's a concrete subclass of a protocol
-                return abc.ABCMeta.__instancecheck__(cls, instance)
-
-            if (
-                not getattr(cls, '_is_runtime_protocol', False) and
-                not _allow_reckless_class_checks()
-            ):
-                raise TypeError("Instance and class checks can only be used with"
-                                " @runtime_checkable protocols")
-
-            if abc.ABCMeta.__instancecheck__(cls, instance):
-                return True
-
-            for attr in cls.__protocol_attrs__:
-                try:
-                    val = inspect.getattr_static(instance, attr)
-                except AttributeError:
-                    break
-                # this attribute is set by @runtime_checkable:
-                if val is None and attr not in cls.__non_callable_proto_members__:
-                    break
-            else:
-                return True
-
-            return False
-
-        def __eq__(cls, other):
-            # Hack so that typing.Generic.__class_getitem__
-            # treats typing_extensions.Protocol
-            # as equivalent to typing.Protocol
-            if abc.ABCMeta.__eq__(cls, other) is True:
-                return True
-            return cls is Protocol and other is typing.Protocol
-
-        # This has to be defined, or the abc-module cache
-        # complains about classes with this metaclass being unhashable,
-        # if we define only __eq__!
-        def __hash__(cls) -> int:
-            return type.__hash__(cls)
-
-    @classmethod
-    def _proto_hook(cls, other):
-        if not cls.__dict__.get('_is_protocol', False):
-            return NotImplemented
-
-        for attr in cls.__protocol_attrs__:
-            for base in other.__mro__:
-                # Check if the members appears in the class dictionary...
-                if attr in base.__dict__:
-                    if base.__dict__[attr] is None:
-                        return NotImplemented
-                    break
-
-                # ...or in annotations, if it is a sub-protocol.
-                annotations = getattr(base, '__annotations__', {})
-                if (
-                    isinstance(annotations, collections.abc.Mapping)
-                    and attr in annotations
-                    and is_protocol(other)
-                ):
-                    break
-            else:
-                return NotImplemented
-        return True
-
-    class Protocol(typing.Generic, metaclass=_ProtocolMeta):
-        __doc__ = typing.Protocol.__doc__
-        __slots__ = ()
-        _is_protocol = True
-        _is_runtime_protocol = False
-
-        def __init_subclass__(cls, *args, **kwargs):
-            super().__init_subclass__(*args, **kwargs)
-
-            # Determine if this is a protocol or a concrete subclass.
-            if not cls.__dict__.get('_is_protocol', False):
-                cls._is_protocol = any(b is Protocol for b in cls.__bases__)
-
-            # Set (or override) the protocol subclass hook.
-            if '__subclasshook__' not in cls.__dict__:
-                cls.__subclasshook__ = _proto_hook
-
-            # Prohibit instantiation for protocol classes
-            if cls._is_protocol and cls.__init__ is Protocol.__init__:
-                cls.__init__ = _no_init
-
-
-if sys.version_info >= (3, 13):
-    runtime_checkable = typing.runtime_checkable
-else:
-    def runtime_checkable(cls):
-        """Mark a protocol class as a runtime protocol.
-
-        Such protocol can be used with isinstance() and issubclass().
-        Raise TypeError if applied to a non-protocol class.
-        This allows a simple-minded structural check very similar to
-        one trick ponies in collections.abc such as Iterable.
-
-        For example::
-
-            @runtime_checkable
-            class Closable(Protocol):
-                def close(self): ...
-
-            assert isinstance(open('/some/file'), Closable)
-
-        Warning: this will check only the presence of the required methods,
-        not their type signatures!
-        """
-        if not issubclass(cls, typing.Generic) or not getattr(cls, '_is_protocol', False):
-            raise TypeError(f'@runtime_checkable can be only applied to protocol classes,'
-                            f' got {cls!r}')
-        cls._is_runtime_protocol = True
-
-        # typing.Protocol classes on <=3.11 break if we execute this block,
-        # because typing.Protocol classes on <=3.11 don't have a
-        # `__protocol_attrs__` attribute, and this block relies on the
-        # `__protocol_attrs__` attribute. Meanwhile, typing.Protocol classes on 3.12.2+
-        # break if we *don't* execute this block, because *they* assume that all
-        # protocol classes have a `__non_callable_proto_members__` attribute
-        # (which this block sets)
-        if isinstance(cls, _ProtocolMeta) or sys.version_info >= (3, 12, 2):
-            # PEP 544 prohibits using issubclass()
-            # with protocols that have non-method members.
-            # See gh-113320 for why we compute this attribute here,
-            # rather than in `_ProtocolMeta.__init__`
-            cls.__non_callable_proto_members__ = set()
-            for attr in cls.__protocol_attrs__:
-                try:
-                    is_callable = callable(getattr(cls, attr, None))
-                except Exception as e:
-                    raise TypeError(
-                        f"Failed to determine whether protocol member {attr!r} "
-                        "is a method member"
-                    ) from e
-                else:
-                    if not is_callable:
-                        cls.__non_callable_proto_members__.add(attr)
-
-        return cls
-
-
-# The "runtime" alias exists for backwards compatibility.
-runtime = runtime_checkable
-
-
-# Our version of runtime-checkable protocols is faster on Python 3.8-3.11
-if sys.version_info >= (3, 12):
-    SupportsInt = typing.SupportsInt
-    SupportsFloat = typing.SupportsFloat
-    SupportsComplex = typing.SupportsComplex
-    SupportsBytes = typing.SupportsBytes
-    SupportsIndex = typing.SupportsIndex
-    SupportsAbs = typing.SupportsAbs
-    SupportsRound = typing.SupportsRound
-else:
-    @runtime_checkable
-    class SupportsInt(Protocol):
-        """An ABC with one abstract method __int__."""
-        __slots__ = ()
-
-        @abc.abstractmethod
-        def __int__(self) -> int:
-            pass
-
-    @runtime_checkable
-    class SupportsFloat(Protocol):
-        """An ABC with one abstract method __float__."""
-        __slots__ = ()
-
-        @abc.abstractmethod
-        def __float__(self) -> float:
-            pass
-
-    @runtime_checkable
-    class SupportsComplex(Protocol):
-        """An ABC with one abstract method __complex__."""
-        __slots__ = ()
-
-        @abc.abstractmethod
-        def __complex__(self) -> complex:
-            pass
-
-    @runtime_checkable
-    class SupportsBytes(Protocol):
-        """An ABC with one abstract method __bytes__."""
-        __slots__ = ()
-
-        @abc.abstractmethod
-        def __bytes__(self) -> bytes:
-            pass
-
-    @runtime_checkable
-    class SupportsIndex(Protocol):
-        __slots__ = ()
-
-        @abc.abstractmethod
-        def __index__(self) -> int:
-            pass
-
-    @runtime_checkable
-    class SupportsAbs(Protocol[T_co]):
-        """
-        An ABC with one abstract method __abs__ that is covariant in its return type.
-        """
-        __slots__ = ()
-
-        @abc.abstractmethod
-        def __abs__(self) -> T_co:
-            pass
-
-    @runtime_checkable
-    class SupportsRound(Protocol[T_co]):
-        """
-        An ABC with one abstract method __round__ that is covariant in its return type.
-        """
-        __slots__ = ()
-
-        @abc.abstractmethod
-        def __round__(self, ndigits: int = 0) -> T_co:
-            pass
-
-
-def _ensure_subclassable(mro_entries):
-    def inner(func):
-        if sys.implementation.name == "pypy" and sys.version_info < (3, 9):
-            cls_dict = {
-                "__call__": staticmethod(func),
-                "__mro_entries__": staticmethod(mro_entries)
-            }
-            t = type(func.__name__, (), cls_dict)
-            return functools.update_wrapper(t(), func)
-        else:
-            func.__mro_entries__ = mro_entries
-            return func
-    return inner
-
-
-# Update this to something like >=3.13.0b1 if and when
-# PEP 728 is implemented in CPython
-_PEP_728_IMPLEMENTED = False
-
-if _PEP_728_IMPLEMENTED:
-    # The standard library TypedDict in Python 3.8 does not store runtime information
-    # about which (if any) keys are optional.  See https://bugs.python.org/issue38834
-    # The standard library TypedDict in Python 3.9.0/1 does not honour the "total"
-    # keyword with old-style TypedDict().  See https://bugs.python.org/issue42059
-    # The standard library TypedDict below Python 3.11 does not store runtime
-    # information about optional and required keys when using Required or NotRequired.
-    # Generic TypedDicts are also impossible using typing.TypedDict on Python <3.11.
-    # Aaaand on 3.12 we add __orig_bases__ to TypedDict
-    # to enable better runtime introspection.
-    # On 3.13 we deprecate some odd ways of creating TypedDicts.
-    # Also on 3.13, PEP 705 adds the ReadOnly[] qualifier.
-    # PEP 728 (still pending) makes more changes.
-    TypedDict = typing.TypedDict
-    _TypedDictMeta = typing._TypedDictMeta
-    is_typeddict = typing.is_typeddict
-else:
-    # 3.10.0 and later
-    _TAKES_MODULE = "module" in inspect.signature(typing._type_check).parameters
-
-    def _get_typeddict_qualifiers(annotation_type):
-        while True:
-            annotation_origin = get_origin(annotation_type)
-            if annotation_origin is Annotated:
-                annotation_args = get_args(annotation_type)
-                if annotation_args:
-                    annotation_type = annotation_args[0]
-                else:
-                    break
-            elif annotation_origin is Required:
-                yield Required
-                annotation_type, = get_args(annotation_type)
-            elif annotation_origin is NotRequired:
-                yield NotRequired
-                annotation_type, = get_args(annotation_type)
-            elif annotation_origin is ReadOnly:
-                yield ReadOnly
-                annotation_type, = get_args(annotation_type)
-            else:
-                break
-
-    class _TypedDictMeta(type):
-        def __new__(cls, name, bases, ns, *, total=True, closed=False):
-            """Create new typed dict class object.
-
-            This method is called when TypedDict is subclassed,
-            or when TypedDict is instantiated. This way
-            TypedDict supports all three syntax forms described in its docstring.
-            Subclasses and instances of TypedDict return actual dictionaries.
-            """
-            for base in bases:
-                if type(base) is not _TypedDictMeta and base is not typing.Generic:
-                    raise TypeError('cannot inherit from both a TypedDict type '
-                                    'and a non-TypedDict base class')
-
-            if any(issubclass(b, typing.Generic) for b in bases):
-                generic_base = (typing.Generic,)
-            else:
-                generic_base = ()
-
-            # typing.py generally doesn't let you inherit from plain Generic, unless
-            # the name of the class happens to be "Protocol"
-            tp_dict = type.__new__(_TypedDictMeta, "Protocol", (*generic_base, dict), ns)
-            tp_dict.__name__ = name
-            if tp_dict.__qualname__ == "Protocol":
-                tp_dict.__qualname__ = name
-
-            if not hasattr(tp_dict, '__orig_bases__'):
-                tp_dict.__orig_bases__ = bases
-
-            annotations = {}
-            if "__annotations__" in ns:
-                own_annotations = ns["__annotations__"]
-            elif "__annotate__" in ns:
-                # TODO: Use inspect.VALUE here, and make the annotations lazily evaluated
-                own_annotations = ns["__annotate__"](1)
-            else:
-                own_annotations = {}
-            msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type"
-            if _TAKES_MODULE:
-                own_annotations = {
-                    n: typing._type_check(tp, msg, module=tp_dict.__module__)
-                    for n, tp in own_annotations.items()
-                }
-            else:
-                own_annotations = {
-                    n: typing._type_check(tp, msg)
-                    for n, tp in own_annotations.items()
-                }
-            required_keys = set()
-            optional_keys = set()
-            readonly_keys = set()
-            mutable_keys = set()
-            extra_items_type = None
-
-            for base in bases:
-                base_dict = base.__dict__
-
-                annotations.update(base_dict.get('__annotations__', {}))
-                required_keys.update(base_dict.get('__required_keys__', ()))
-                optional_keys.update(base_dict.get('__optional_keys__', ()))
-                readonly_keys.update(base_dict.get('__readonly_keys__', ()))
-                mutable_keys.update(base_dict.get('__mutable_keys__', ()))
-                base_extra_items_type = base_dict.get('__extra_items__', None)
-                if base_extra_items_type is not None:
-                    extra_items_type = base_extra_items_type
-
-            if closed and extra_items_type is None:
-                extra_items_type = Never
-            if closed and "__extra_items__" in own_annotations:
-                annotation_type = own_annotations.pop("__extra_items__")
-                qualifiers = set(_get_typeddict_qualifiers(annotation_type))
-                if Required in qualifiers:
-                    raise TypeError(
-                        "Special key __extra_items__ does not support "
-                        "Required"
-                    )
-                if NotRequired in qualifiers:
-                    raise TypeError(
-                        "Special key __extra_items__ does not support "
-                        "NotRequired"
-                    )
-                extra_items_type = annotation_type
-
-            annotations.update(own_annotations)
-            for annotation_key, annotation_type in own_annotations.items():
-                qualifiers = set(_get_typeddict_qualifiers(annotation_type))
-
-                if Required in qualifiers:
-                    required_keys.add(annotation_key)
-                elif NotRequired in qualifiers:
-                    optional_keys.add(annotation_key)
-                elif total:
-                    required_keys.add(annotation_key)
-                else:
-                    optional_keys.add(annotation_key)
-                if ReadOnly in qualifiers:
-                    mutable_keys.discard(annotation_key)
-                    readonly_keys.add(annotation_key)
-                else:
-                    mutable_keys.add(annotation_key)
-                    readonly_keys.discard(annotation_key)
-
-            tp_dict.__annotations__ = annotations
-            tp_dict.__required_keys__ = frozenset(required_keys)
-            tp_dict.__optional_keys__ = frozenset(optional_keys)
-            tp_dict.__readonly_keys__ = frozenset(readonly_keys)
-            tp_dict.__mutable_keys__ = frozenset(mutable_keys)
-            if not hasattr(tp_dict, '__total__'):
-                tp_dict.__total__ = total
-            tp_dict.__closed__ = closed
-            tp_dict.__extra_items__ = extra_items_type
-            return tp_dict
-
-        __call__ = dict  # static method
-
-        def __subclasscheck__(cls, other):
-            # Typed dicts are only for static structural subtyping.
-            raise TypeError('TypedDict does not support instance and class checks')
-
-        __instancecheck__ = __subclasscheck__
-
-    _TypedDict = type.__new__(_TypedDictMeta, 'TypedDict', (), {})
-
-    @_ensure_subclassable(lambda bases: (_TypedDict,))
-    def TypedDict(typename, fields=_marker, /, *, total=True, closed=False, **kwargs):
-        """A simple typed namespace. At runtime it is equivalent to a plain dict.
-
-        TypedDict creates a dictionary type such that a type checker will expect all
-        instances to have a certain set of keys, where each key is
-        associated with a value of a consistent type. This expectation
-        is not checked at runtime.
-
-        Usage::
-
-            class Point2D(TypedDict):
-                x: int
-                y: int
-                label: str
-
-            a: Point2D = {'x': 1, 'y': 2, 'label': 'good'}  # OK
-            b: Point2D = {'z': 3, 'label': 'bad'}           # Fails type check
-
-            assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first')
-
-        The type info can be accessed via the Point2D.__annotations__ dict, and
-        the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets.
-        TypedDict supports an additional equivalent form::
-
-            Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str})
-
-        By default, all keys must be present in a TypedDict. It is possible
-        to override this by specifying totality::
-
-            class Point2D(TypedDict, total=False):
-                x: int
-                y: int
-
-        This means that a Point2D TypedDict can have any of the keys omitted. A type
-        checker is only expected to support a literal False or True as the value of
-        the total argument. True is the default, and makes all items defined in the
-        class body be required.
-
-        The Required and NotRequired special forms can also be used to mark
-        individual keys as being required or not required::
-
-            class Point2D(TypedDict):
-                x: int  # the "x" key must always be present (Required is the default)
-                y: NotRequired[int]  # the "y" key can be omitted
-
-        See PEP 655 for more details on Required and NotRequired.
-        """
-        if fields is _marker or fields is None:
-            if fields is _marker:
-                deprecated_thing = "Failing to pass a value for the 'fields' parameter"
-            else:
-                deprecated_thing = "Passing `None` as the 'fields' parameter"
-
-            example = f"`{typename} = TypedDict({typename!r}, {{}})`"
-            deprecation_msg = (
-                f"{deprecated_thing} is deprecated and will be disallowed in "
-                "Python 3.15. To create a TypedDict class with 0 fields "
-                "using the functional syntax, pass an empty dictionary, e.g. "
-            ) + example + "."
-            warnings.warn(deprecation_msg, DeprecationWarning, stacklevel=2)
-            if closed is not False and closed is not True:
-                kwargs["closed"] = closed
-                closed = False
-            fields = kwargs
-        elif kwargs:
-            raise TypeError("TypedDict takes either a dict or keyword arguments,"
-                            " but not both")
-        if kwargs:
-            if sys.version_info >= (3, 13):
-                raise TypeError("TypedDict takes no keyword arguments")
-            warnings.warn(
-                "The kwargs-based syntax for TypedDict definitions is deprecated "
-                "in Python 3.11, will be removed in Python 3.13, and may not be "
-                "understood by third-party type checkers.",
-                DeprecationWarning,
-                stacklevel=2,
-            )
-
-        ns = {'__annotations__': dict(fields)}
-        module = _caller()
-        if module is not None:
-            # Setting correct module is necessary to make typed dict classes pickleable.
-            ns['__module__'] = module
-
-        td = _TypedDictMeta(typename, (), ns, total=total, closed=closed)
-        td.__orig_bases__ = (TypedDict,)
-        return td
-
-    if hasattr(typing, "_TypedDictMeta"):
-        _TYPEDDICT_TYPES = (typing._TypedDictMeta, _TypedDictMeta)
-    else:
-        _TYPEDDICT_TYPES = (_TypedDictMeta,)
-
-    def is_typeddict(tp):
-        """Check if an annotation is a TypedDict class
-
-        For example::
-            class Film(TypedDict):
-                title: str
-                year: int
-
-            is_typeddict(Film)  # => True
-            is_typeddict(Union[list, str])  # => False
-        """
-        # On 3.8, this would otherwise return True
-        if hasattr(typing, "TypedDict") and tp is typing.TypedDict:
-            return False
-        return isinstance(tp, _TYPEDDICT_TYPES)
-
-
-if hasattr(typing, "assert_type"):
-    assert_type = typing.assert_type
-
-else:
-    def assert_type(val, typ, /):
-        """Assert (to the type checker) that the value is of the given type.
-
-        When the type checker encounters a call to assert_type(), it
-        emits an error if the value is not of the specified type::
-
-            def greet(name: str) -> None:
-                assert_type(name, str)  # ok
-                assert_type(name, int)  # type checker error
-
-        At runtime this returns the first argument unchanged and otherwise
-        does nothing.
-        """
-        return val
-
-
-if hasattr(typing, "ReadOnly"):  # 3.13+
-    get_type_hints = typing.get_type_hints
-else:  # <=3.13
-    # replaces _strip_annotations()
-    def _strip_extras(t):
-        """Strips Annotated, Required and NotRequired from a given type."""
-        if isinstance(t, _AnnotatedAlias):
-            return _strip_extras(t.__origin__)
-        if hasattr(t, "__origin__") and t.__origin__ in (Required, NotRequired, ReadOnly):
-            return _strip_extras(t.__args__[0])
-        if isinstance(t, typing._GenericAlias):
-            stripped_args = tuple(_strip_extras(a) for a in t.__args__)
-            if stripped_args == t.__args__:
-                return t
-            return t.copy_with(stripped_args)
-        if hasattr(_types, "GenericAlias") and isinstance(t, _types.GenericAlias):
-            stripped_args = tuple(_strip_extras(a) for a in t.__args__)
-            if stripped_args == t.__args__:
-                return t
-            return _types.GenericAlias(t.__origin__, stripped_args)
-        if hasattr(_types, "UnionType") and isinstance(t, _types.UnionType):
-            stripped_args = tuple(_strip_extras(a) for a in t.__args__)
-            if stripped_args == t.__args__:
-                return t
-            return functools.reduce(operator.or_, stripped_args)
-
-        return t
-
-    def get_type_hints(obj, globalns=None, localns=None, include_extras=False):
-        """Return type hints for an object.
-
-        This is often the same as obj.__annotations__, but it handles
-        forward references encoded as string literals, adds Optional[t] if a
-        default value equal to None is set and recursively replaces all
-        'Annotated[T, ...]', 'Required[T]' or 'NotRequired[T]' with 'T'
-        (unless 'include_extras=True').
-
-        The argument may be a module, class, method, or function. The annotations
-        are returned as a dictionary. For classes, annotations include also
-        inherited members.
-
-        TypeError is raised if the argument is not of a type that can contain
-        annotations, and an empty dictionary is returned if no annotations are
-        present.
-
-        BEWARE -- the behavior of globalns and localns is counterintuitive
-        (unless you are familiar with how eval() and exec() work).  The
-        search order is locals first, then globals.
-
-        - If no dict arguments are passed, an attempt is made to use the
-          globals from obj (or the respective module's globals for classes),
-          and these are also used as the locals.  If the object does not appear
-          to have globals, an empty dictionary is used.
-
-        - If one dict argument is passed, it is used for both globals and
-          locals.
-
-        - If two dict arguments are passed, they specify globals and
-          locals, respectively.
-        """
-        if hasattr(typing, "Annotated"):  # 3.9+
-            hint = typing.get_type_hints(
-                obj, globalns=globalns, localns=localns, include_extras=True
-            )
-        else:  # 3.8
-            hint = typing.get_type_hints(obj, globalns=globalns, localns=localns)
-        if include_extras:
-            return hint
-        return {k: _strip_extras(t) for k, t in hint.items()}
-
-
-# Python 3.9+ has PEP 593 (Annotated)
-if hasattr(typing, 'Annotated'):
-    Annotated = typing.Annotated
-    # Not exported and not a public API, but needed for get_origin() and get_args()
-    # to work.
-    _AnnotatedAlias = typing._AnnotatedAlias
-# 3.8
-else:
-    class _AnnotatedAlias(typing._GenericAlias, _root=True):
-        """Runtime representation of an annotated type.
-
-        At its core 'Annotated[t, dec1, dec2, ...]' is an alias for the type 't'
-        with extra annotations. The alias behaves like a normal typing alias,
-        instantiating is the same as instantiating the underlying type, binding
-        it to types is also the same.
-        """
-        def __init__(self, origin, metadata):
-            if isinstance(origin, _AnnotatedAlias):
-                metadata = origin.__metadata__ + metadata
-                origin = origin.__origin__
-            super().__init__(origin, origin)
-            self.__metadata__ = metadata
-
-        def copy_with(self, params):
-            assert len(params) == 1
-            new_type = params[0]
-            return _AnnotatedAlias(new_type, self.__metadata__)
-
-        def __repr__(self):
-            return (f"typing_extensions.Annotated[{typing._type_repr(self.__origin__)}, "
-                    f"{', '.join(repr(a) for a in self.__metadata__)}]")
-
-        def __reduce__(self):
-            return operator.getitem, (
-                Annotated, (self.__origin__, *self.__metadata__)
-            )
-
-        def __eq__(self, other):
-            if not isinstance(other, _AnnotatedAlias):
-                return NotImplemented
-            if self.__origin__ != other.__origin__:
-                return False
-            return self.__metadata__ == other.__metadata__
-
-        def __hash__(self):
-            return hash((self.__origin__, self.__metadata__))
-
-    class Annotated:
-        """Add context specific metadata to a type.
-
-        Example: Annotated[int, runtime_check.Unsigned] indicates to the
-        hypothetical runtime_check module that this type is an unsigned int.
-        Every other consumer of this type can ignore this metadata and treat
-        this type as int.
-
-        The first argument to Annotated must be a valid type (and will be in
-        the __origin__ field), the remaining arguments are kept as a tuple in
-        the __extra__ field.
-
-        Details:
-
-        - It's an error to call `Annotated` with less than two arguments.
-        - Nested Annotated are flattened::
-
-            Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3]
-
-        - Instantiating an annotated type is equivalent to instantiating the
-        underlying type::
-
-            Annotated[C, Ann1](5) == C(5)
-
-        - Annotated can be used as a generic type alias::
-
-            Optimized = Annotated[T, runtime.Optimize()]
-            Optimized[int] == Annotated[int, runtime.Optimize()]
-
-            OptimizedList = Annotated[List[T], runtime.Optimize()]
-            OptimizedList[int] == Annotated[List[int], runtime.Optimize()]
-        """
-
-        __slots__ = ()
-
-        def __new__(cls, *args, **kwargs):
-            raise TypeError("Type Annotated cannot be instantiated.")
-
-        @typing._tp_cache
-        def __class_getitem__(cls, params):
-            if not isinstance(params, tuple) or len(params) < 2:
-                raise TypeError("Annotated[...] should be used "
-                                "with at least two arguments (a type and an "
-                                "annotation).")
-            allowed_special_forms = (ClassVar, Final)
-            if get_origin(params[0]) in allowed_special_forms:
-                origin = params[0]
-            else:
-                msg = "Annotated[t, ...]: t must be a type."
-                origin = typing._type_check(params[0], msg)
-            metadata = tuple(params[1:])
-            return _AnnotatedAlias(origin, metadata)
-
-        def __init_subclass__(cls, *args, **kwargs):
-            raise TypeError(
-                f"Cannot subclass {cls.__module__}.Annotated"
-            )
-
-# Python 3.8 has get_origin() and get_args() but those implementations aren't
-# Annotated-aware, so we can't use those. Python 3.9's versions don't support
-# ParamSpecArgs and ParamSpecKwargs, so only Python 3.10's versions will do.
-if sys.version_info[:2] >= (3, 10):
-    get_origin = typing.get_origin
-    get_args = typing.get_args
-# 3.8-3.9
-else:
-    try:
-        # 3.9+
-        from typing import _BaseGenericAlias
-    except ImportError:
-        _BaseGenericAlias = typing._GenericAlias
-    try:
-        # 3.9+
-        from typing import GenericAlias as _typing_GenericAlias
-    except ImportError:
-        _typing_GenericAlias = typing._GenericAlias
-
-    def get_origin(tp):
-        """Get the unsubscripted version of a type.
-
-        This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar
-        and Annotated. Return None for unsupported types. Examples::
-
-            get_origin(Literal[42]) is Literal
-            get_origin(int) is None
-            get_origin(ClassVar[int]) is ClassVar
-            get_origin(Generic) is Generic
-            get_origin(Generic[T]) is Generic
-            get_origin(Union[T, int]) is Union
-            get_origin(List[Tuple[T, T]][int]) == list
-            get_origin(P.args) is P
-        """
-        if isinstance(tp, _AnnotatedAlias):
-            return Annotated
-        if isinstance(tp, (typing._GenericAlias, _typing_GenericAlias, _BaseGenericAlias,
-                           ParamSpecArgs, ParamSpecKwargs)):
-            return tp.__origin__
-        if tp is typing.Generic:
-            return typing.Generic
-        return None
-
-    def get_args(tp):
-        """Get type arguments with all substitutions performed.
-
-        For unions, basic simplifications used by Union constructor are performed.
-        Examples::
-            get_args(Dict[str, int]) == (str, int)
-            get_args(int) == ()
-            get_args(Union[int, Union[T, int], str][int]) == (int, str)
-            get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int])
-            get_args(Callable[[], T][int]) == ([], int)
-        """
-        if isinstance(tp, _AnnotatedAlias):
-            return (tp.__origin__, *tp.__metadata__)
-        if isinstance(tp, (typing._GenericAlias, _typing_GenericAlias)):
-            if getattr(tp, "_special", False):
-                return ()
-            res = tp.__args__
-            if get_origin(tp) is collections.abc.Callable and res[0] is not Ellipsis:
-                res = (list(res[:-1]), res[-1])
-            return res
-        return ()
-
-
-# 3.10+
-if hasattr(typing, 'TypeAlias'):
-    TypeAlias = typing.TypeAlias
-# 3.9
-elif sys.version_info[:2] >= (3, 9):
-    @_ExtensionsSpecialForm
-    def TypeAlias(self, parameters):
-        """Special marker indicating that an assignment should
-        be recognized as a proper type alias definition by type
-        checkers.
-
-        For example::
-
-            Predicate: TypeAlias = Callable[..., bool]
-
-        It's invalid when used anywhere except as in the example above.
-        """
-        raise TypeError(f"{self} is not subscriptable")
-# 3.8
-else:
-    TypeAlias = _ExtensionsSpecialForm(
-        'TypeAlias',
-        doc="""Special marker indicating that an assignment should
-        be recognized as a proper type alias definition by type
-        checkers.
-
-        For example::
-
-            Predicate: TypeAlias = Callable[..., bool]
-
-        It's invalid when used anywhere except as in the example
-        above."""
-    )
-
-
-if hasattr(typing, "NoDefault"):
-    NoDefault = typing.NoDefault
-else:
-    class NoDefaultTypeMeta(type):
-        def __setattr__(cls, attr, value):
-            # TypeError is consistent with the behavior of NoneType
-            raise TypeError(
-                f"cannot set {attr!r} attribute of immutable type {cls.__name__!r}"
-            )
-
-    class NoDefaultType(metaclass=NoDefaultTypeMeta):
-        """The type of the NoDefault singleton."""
-
-        __slots__ = ()
-
-        def __new__(cls):
-            return globals().get("NoDefault") or object.__new__(cls)
-
-        def __repr__(self):
-            return "typing_extensions.NoDefault"
-
-        def __reduce__(self):
-            return "NoDefault"
-
-    NoDefault = NoDefaultType()
-    del NoDefaultType, NoDefaultTypeMeta
-
-
-def _set_default(type_param, default):
-    type_param.has_default = lambda: default is not NoDefault
-    type_param.__default__ = default
-
-
-def _set_module(typevarlike):
-    # for pickling:
-    def_mod = _caller(depth=3)
-    if def_mod != 'typing_extensions':
-        typevarlike.__module__ = def_mod
-
-
-class _DefaultMixin:
-    """Mixin for TypeVarLike defaults."""
-
-    __slots__ = ()
-    __init__ = _set_default
-
-
-# Classes using this metaclass must provide a _backported_typevarlike ClassVar
-class _TypeVarLikeMeta(type):
-    def __instancecheck__(cls, __instance: Any) -> bool:
-        return isinstance(__instance, cls._backported_typevarlike)
-
-
-if _PEP_696_IMPLEMENTED:
-    from typing import TypeVar
-else:
-    # Add default and infer_variance parameters from PEP 696 and 695
-    class TypeVar(metaclass=_TypeVarLikeMeta):
-        """Type variable."""
-
-        _backported_typevarlike = typing.TypeVar
-
-        def __new__(cls, name, *constraints, bound=None,
-                    covariant=False, contravariant=False,
-                    default=NoDefault, infer_variance=False):
-            if hasattr(typing, "TypeAliasType"):
-                # PEP 695 implemented (3.12+), can pass infer_variance to typing.TypeVar
-                typevar = typing.TypeVar(name, *constraints, bound=bound,
-                                         covariant=covariant, contravariant=contravariant,
-                                         infer_variance=infer_variance)
-            else:
-                typevar = typing.TypeVar(name, *constraints, bound=bound,
-                                         covariant=covariant, contravariant=contravariant)
-                if infer_variance and (covariant or contravariant):
-                    raise ValueError("Variance cannot be specified with infer_variance.")
-                typevar.__infer_variance__ = infer_variance
-
-            _set_default(typevar, default)
-            _set_module(typevar)
-
-            def _tvar_prepare_subst(alias, args):
-                if (
-                    typevar.has_default()
-                    and alias.__parameters__.index(typevar) == len(args)
-                ):
-                    args += (typevar.__default__,)
-                return args
-
-            typevar.__typing_prepare_subst__ = _tvar_prepare_subst
-            return typevar
-
-        def __init_subclass__(cls) -> None:
-            raise TypeError(f"type '{__name__}.TypeVar' is not an acceptable base type")
-
-
-# Python 3.10+ has PEP 612
-if hasattr(typing, 'ParamSpecArgs'):
-    ParamSpecArgs = typing.ParamSpecArgs
-    ParamSpecKwargs = typing.ParamSpecKwargs
-# 3.8-3.9
-else:
-    class _Immutable:
-        """Mixin to indicate that object should not be copied."""
-        __slots__ = ()
-
-        def __copy__(self):
-            return self
-
-        def __deepcopy__(self, memo):
-            return self
-
-    class ParamSpecArgs(_Immutable):
-        """The args for a ParamSpec object.
-
-        Given a ParamSpec object P, P.args is an instance of ParamSpecArgs.
-
-        ParamSpecArgs objects have a reference back to their ParamSpec:
-
-        P.args.__origin__ is P
-
-        This type is meant for runtime introspection and has no special meaning to
-        static type checkers.
-        """
-        def __init__(self, origin):
-            self.__origin__ = origin
-
-        def __repr__(self):
-            return f"{self.__origin__.__name__}.args"
-
-        def __eq__(self, other):
-            if not isinstance(other, ParamSpecArgs):
-                return NotImplemented
-            return self.__origin__ == other.__origin__
-
-    class ParamSpecKwargs(_Immutable):
-        """The kwargs for a ParamSpec object.
-
-        Given a ParamSpec object P, P.kwargs is an instance of ParamSpecKwargs.
-
-        ParamSpecKwargs objects have a reference back to their ParamSpec:
-
-        P.kwargs.__origin__ is P
-
-        This type is meant for runtime introspection and has no special meaning to
-        static type checkers.
-        """
-        def __init__(self, origin):
-            self.__origin__ = origin
-
-        def __repr__(self):
-            return f"{self.__origin__.__name__}.kwargs"
-
-        def __eq__(self, other):
-            if not isinstance(other, ParamSpecKwargs):
-                return NotImplemented
-            return self.__origin__ == other.__origin__
-
-
-if _PEP_696_IMPLEMENTED:
-    from typing import ParamSpec
-
-# 3.10+
-elif hasattr(typing, 'ParamSpec'):
-
-    # Add default parameter - PEP 696
-    class ParamSpec(metaclass=_TypeVarLikeMeta):
-        """Parameter specification."""
-
-        _backported_typevarlike = typing.ParamSpec
-
-        def __new__(cls, name, *, bound=None,
-                    covariant=False, contravariant=False,
-                    infer_variance=False, default=NoDefault):
-            if hasattr(typing, "TypeAliasType"):
-                # PEP 695 implemented, can pass infer_variance to typing.TypeVar
-                paramspec = typing.ParamSpec(name, bound=bound,
-                                             covariant=covariant,
-                                             contravariant=contravariant,
-                                             infer_variance=infer_variance)
-            else:
-                paramspec = typing.ParamSpec(name, bound=bound,
-                                             covariant=covariant,
-                                             contravariant=contravariant)
-                paramspec.__infer_variance__ = infer_variance
-
-            _set_default(paramspec, default)
-            _set_module(paramspec)
-
-            def _paramspec_prepare_subst(alias, args):
-                params = alias.__parameters__
-                i = params.index(paramspec)
-                if i == len(args) and paramspec.has_default():
-                    args = [*args, paramspec.__default__]
-                if i >= len(args):
-                    raise TypeError(f"Too few arguments for {alias}")
-                # Special case where Z[[int, str, bool]] == Z[int, str, bool] in PEP 612.
-                if len(params) == 1 and not typing._is_param_expr(args[0]):
-                    assert i == 0
-                    args = (args,)
-                # Convert lists to tuples to help other libraries cache the results.
-                elif isinstance(args[i], list):
-                    args = (*args[:i], tuple(args[i]), *args[i + 1:])
-                return args
-
-            paramspec.__typing_prepare_subst__ = _paramspec_prepare_subst
-            return paramspec
-
-        def __init_subclass__(cls) -> None:
-            raise TypeError(f"type '{__name__}.ParamSpec' is not an acceptable base type")
-
-# 3.8-3.9
-else:
-
-    # Inherits from list as a workaround for Callable checks in Python < 3.9.2.
-    class ParamSpec(list, _DefaultMixin):
-        """Parameter specification variable.
-
-        Usage::
-
-           P = ParamSpec('P')
-
-        Parameter specification variables exist primarily for the benefit of static
-        type checkers.  They are used to forward the parameter types of one
-        callable to another callable, a pattern commonly found in higher order
-        functions and decorators.  They are only valid when used in ``Concatenate``,
-        or s the first argument to ``Callable``. In Python 3.10 and higher,
-        they are also supported in user-defined Generics at runtime.
-        See class Generic for more information on generic types.  An
-        example for annotating a decorator::
-
-           T = TypeVar('T')
-           P = ParamSpec('P')
-
-           def add_logging(f: Callable[P, T]) -> Callable[P, T]:
-               '''A type-safe decorator to add logging to a function.'''
-               def inner(*args: P.args, **kwargs: P.kwargs) -> T:
-                   logging.info(f'{f.__name__} was called')
-                   return f(*args, **kwargs)
-               return inner
-
-           @add_logging
-           def add_two(x: float, y: float) -> float:
-               '''Add two numbers together.'''
-               return x + y
-
-        Parameter specification variables defined with covariant=True or
-        contravariant=True can be used to declare covariant or contravariant
-        generic types.  These keyword arguments are valid, but their actual semantics
-        are yet to be decided.  See PEP 612 for details.
-
-        Parameter specification variables can be introspected. e.g.:
-
-           P.__name__ == 'T'
-           P.__bound__ == None
-           P.__covariant__ == False
-           P.__contravariant__ == False
-
-        Note that only parameter specification variables defined in global scope can
-        be pickled.
-        """
-
-        # Trick Generic __parameters__.
-        __class__ = typing.TypeVar
-
-        @property
-        def args(self):
-            return ParamSpecArgs(self)
-
-        @property
-        def kwargs(self):
-            return ParamSpecKwargs(self)
-
-        def __init__(self, name, *, bound=None, covariant=False, contravariant=False,
-                     infer_variance=False, default=NoDefault):
-            list.__init__(self, [self])
-            self.__name__ = name
-            self.__covariant__ = bool(covariant)
-            self.__contravariant__ = bool(contravariant)
-            self.__infer_variance__ = bool(infer_variance)
-            if bound:
-                self.__bound__ = typing._type_check(bound, 'Bound must be a type.')
-            else:
-                self.__bound__ = None
-            _DefaultMixin.__init__(self, default)
-
-            # for pickling:
-            def_mod = _caller()
-            if def_mod != 'typing_extensions':
-                self.__module__ = def_mod
-
-        def __repr__(self):
-            if self.__infer_variance__:
-                prefix = ''
-            elif self.__covariant__:
-                prefix = '+'
-            elif self.__contravariant__:
-                prefix = '-'
-            else:
-                prefix = '~'
-            return prefix + self.__name__
-
-        def __hash__(self):
-            return object.__hash__(self)
-
-        def __eq__(self, other):
-            return self is other
-
-        def __reduce__(self):
-            return self.__name__
-
-        # Hack to get typing._type_check to pass.
-        def __call__(self, *args, **kwargs):
-            pass
-
-
-# 3.8-3.9
-if not hasattr(typing, 'Concatenate'):
-    # Inherits from list as a workaround for Callable checks in Python < 3.9.2.
-    class _ConcatenateGenericAlias(list):
-
-        # Trick Generic into looking into this for __parameters__.
-        __class__ = typing._GenericAlias
-
-        # Flag in 3.8.
-        _special = False
-
-        def __init__(self, origin, args):
-            super().__init__(args)
-            self.__origin__ = origin
-            self.__args__ = args
-
-        def __repr__(self):
-            _type_repr = typing._type_repr
-            return (f'{_type_repr(self.__origin__)}'
-                    f'[{", ".join(_type_repr(arg) for arg in self.__args__)}]')
-
-        def __hash__(self):
-            return hash((self.__origin__, self.__args__))
-
-        # Hack to get typing._type_check to pass in Generic.
-        def __call__(self, *args, **kwargs):
-            pass
-
-        @property
-        def __parameters__(self):
-            return tuple(
-                tp for tp in self.__args__ if isinstance(tp, (typing.TypeVar, ParamSpec))
-            )
-
-
-# 3.8-3.9
-@typing._tp_cache
-def _concatenate_getitem(self, parameters):
-    if parameters == ():
-        raise TypeError("Cannot take a Concatenate of no types.")
-    if not isinstance(parameters, tuple):
-        parameters = (parameters,)
-    if not isinstance(parameters[-1], ParamSpec):
-        raise TypeError("The last parameter to Concatenate should be a "
-                        "ParamSpec variable.")
-    msg = "Concatenate[arg, ...]: each arg must be a type."
-    parameters = tuple(typing._type_check(p, msg) for p in parameters)
-    return _ConcatenateGenericAlias(self, parameters)
-
-
-# 3.10+
-if hasattr(typing, 'Concatenate'):
-    Concatenate = typing.Concatenate
-    _ConcatenateGenericAlias = typing._ConcatenateGenericAlias
-# 3.9
-elif sys.version_info[:2] >= (3, 9):
-    @_ExtensionsSpecialForm
-    def Concatenate(self, parameters):
-        """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a
-        higher order function which adds, removes or transforms parameters of a
-        callable.
-
-        For example::
-
-           Callable[Concatenate[int, P], int]
-
-        See PEP 612 for detailed information.
-        """
-        return _concatenate_getitem(self, parameters)
-# 3.8
-else:
-    class _ConcatenateForm(_ExtensionsSpecialForm, _root=True):
-        def __getitem__(self, parameters):
-            return _concatenate_getitem(self, parameters)
-
-    Concatenate = _ConcatenateForm(
-        'Concatenate',
-        doc="""Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a
-        higher order function which adds, removes or transforms parameters of a
-        callable.
-
-        For example::
-
-           Callable[Concatenate[int, P], int]
-
-        See PEP 612 for detailed information.
-        """)
-
-# 3.10+
-if hasattr(typing, 'TypeGuard'):
-    TypeGuard = typing.TypeGuard
-# 3.9
-elif sys.version_info[:2] >= (3, 9):
-    @_ExtensionsSpecialForm
-    def TypeGuard(self, parameters):
-        """Special typing form used to annotate the return type of a user-defined
-        type guard function.  ``TypeGuard`` only accepts a single type argument.
-        At runtime, functions marked this way should return a boolean.
-
-        ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static
-        type checkers to determine a more precise type of an expression within a
-        program's code flow.  Usually type narrowing is done by analyzing
-        conditional code flow and applying the narrowing to a block of code.  The
-        conditional expression here is sometimes referred to as a "type guard".
-
-        Sometimes it would be convenient to use a user-defined boolean function
-        as a type guard.  Such a function should use ``TypeGuard[...]`` as its
-        return type to alert static type checkers to this intention.
-
-        Using  ``-> TypeGuard`` tells the static type checker that for a given
-        function:
-
-        1. The return value is a boolean.
-        2. If the return value is ``True``, the type of its argument
-        is the type inside ``TypeGuard``.
-
-        For example::
-
-            def is_str(val: Union[str, float]):
-                # "isinstance" type guard
-                if isinstance(val, str):
-                    # Type of ``val`` is narrowed to ``str``
-                    ...
-                else:
-                    # Else, type of ``val`` is narrowed to ``float``.
-                    ...
-
-        Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower
-        form of ``TypeA`` (it can even be a wider form) and this may lead to
-        type-unsafe results.  The main reason is to allow for things like
-        narrowing ``List[object]`` to ``List[str]`` even though the latter is not
-        a subtype of the former, since ``List`` is invariant.  The responsibility of
-        writing type-safe type guards is left to the user.
-
-        ``TypeGuard`` also works with type variables.  For more information, see
-        PEP 647 (User-Defined Type Guards).
-        """
-        item = typing._type_check(parameters, f'{self} accepts only a single type.')
-        return typing._GenericAlias(self, (item,))
-# 3.8
-else:
-    class _TypeGuardForm(_ExtensionsSpecialForm, _root=True):
-        def __getitem__(self, parameters):
-            item = typing._type_check(parameters,
-                                      f'{self._name} accepts only a single type')
-            return typing._GenericAlias(self, (item,))
-
-    TypeGuard = _TypeGuardForm(
-        'TypeGuard',
-        doc="""Special typing form used to annotate the return type of a user-defined
-        type guard function.  ``TypeGuard`` only accepts a single type argument.
-        At runtime, functions marked this way should return a boolean.
-
-        ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static
-        type checkers to determine a more precise type of an expression within a
-        program's code flow.  Usually type narrowing is done by analyzing
-        conditional code flow and applying the narrowing to a block of code.  The
-        conditional expression here is sometimes referred to as a "type guard".
-
-        Sometimes it would be convenient to use a user-defined boolean function
-        as a type guard.  Such a function should use ``TypeGuard[...]`` as its
-        return type to alert static type checkers to this intention.
-
-        Using  ``-> TypeGuard`` tells the static type checker that for a given
-        function:
-
-        1. The return value is a boolean.
-        2. If the return value is ``True``, the type of its argument
-        is the type inside ``TypeGuard``.
-
-        For example::
-
-            def is_str(val: Union[str, float]):
-                # "isinstance" type guard
-                if isinstance(val, str):
-                    # Type of ``val`` is narrowed to ``str``
-                    ...
-                else:
-                    # Else, type of ``val`` is narrowed to ``float``.
-                    ...
-
-        Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower
-        form of ``TypeA`` (it can even be a wider form) and this may lead to
-        type-unsafe results.  The main reason is to allow for things like
-        narrowing ``List[object]`` to ``List[str]`` even though the latter is not
-        a subtype of the former, since ``List`` is invariant.  The responsibility of
-        writing type-safe type guards is left to the user.
-
-        ``TypeGuard`` also works with type variables.  For more information, see
-        PEP 647 (User-Defined Type Guards).
-        """)
-
-# 3.13+
-if hasattr(typing, 'TypeIs'):
-    TypeIs = typing.TypeIs
-# 3.9
-elif sys.version_info[:2] >= (3, 9):
-    @_ExtensionsSpecialForm
-    def TypeIs(self, parameters):
-        """Special typing form used to annotate the return type of a user-defined
-        type narrower function.  ``TypeIs`` only accepts a single type argument.
-        At runtime, functions marked this way should return a boolean.
-
-        ``TypeIs`` aims to benefit *type narrowing* -- a technique used by static
-        type checkers to determine a more precise type of an expression within a
-        program's code flow.  Usually type narrowing is done by analyzing
-        conditional code flow and applying the narrowing to a block of code.  The
-        conditional expression here is sometimes referred to as a "type guard".
-
-        Sometimes it would be convenient to use a user-defined boolean function
-        as a type guard.  Such a function should use ``TypeIs[...]`` as its
-        return type to alert static type checkers to this intention.
-
-        Using  ``-> TypeIs`` tells the static type checker that for a given
-        function:
-
-        1. The return value is a boolean.
-        2. If the return value is ``True``, the type of its argument
-        is the intersection of the type inside ``TypeGuard`` and the argument's
-        previously known type.
-
-        For example::
-
-            def is_awaitable(val: object) -> TypeIs[Awaitable[Any]]:
-                return hasattr(val, '__await__')
-
-            def f(val: Union[int, Awaitable[int]]) -> int:
-                if is_awaitable(val):
-                    assert_type(val, Awaitable[int])
-                else:
-                    assert_type(val, int)
-
-        ``TypeIs`` also works with type variables.  For more information, see
-        PEP 742 (Narrowing types with TypeIs).
-        """
-        item = typing._type_check(parameters, f'{self} accepts only a single type.')
-        return typing._GenericAlias(self, (item,))
-# 3.8
-else:
-    class _TypeIsForm(_ExtensionsSpecialForm, _root=True):
-        def __getitem__(self, parameters):
-            item = typing._type_check(parameters,
-                                      f'{self._name} accepts only a single type')
-            return typing._GenericAlias(self, (item,))
-
-    TypeIs = _TypeIsForm(
-        'TypeIs',
-        doc="""Special typing form used to annotate the return type of a user-defined
-        type narrower function.  ``TypeIs`` only accepts a single type argument.
-        At runtime, functions marked this way should return a boolean.
-
-        ``TypeIs`` aims to benefit *type narrowing* -- a technique used by static
-        type checkers to determine a more precise type of an expression within a
-        program's code flow.  Usually type narrowing is done by analyzing
-        conditional code flow and applying the narrowing to a block of code.  The
-        conditional expression here is sometimes referred to as a "type guard".
-
-        Sometimes it would be convenient to use a user-defined boolean function
-        as a type guard.  Such a function should use ``TypeIs[...]`` as its
-        return type to alert static type checkers to this intention.
-
-        Using  ``-> TypeIs`` tells the static type checker that for a given
-        function:
-
-        1. The return value is a boolean.
-        2. If the return value is ``True``, the type of its argument
-        is the intersection of the type inside ``TypeGuard`` and the argument's
-        previously known type.
-
-        For example::
-
-            def is_awaitable(val: object) -> TypeIs[Awaitable[Any]]:
-                return hasattr(val, '__await__')
-
-            def f(val: Union[int, Awaitable[int]]) -> int:
-                if is_awaitable(val):
-                    assert_type(val, Awaitable[int])
-                else:
-                    assert_type(val, int)
-
-        ``TypeIs`` also works with type variables.  For more information, see
-        PEP 742 (Narrowing types with TypeIs).
-        """)
-
-
-# Vendored from cpython typing._SpecialFrom
-class _SpecialForm(typing._Final, _root=True):
-    __slots__ = ('_name', '__doc__', '_getitem')
-
-    def __init__(self, getitem):
-        self._getitem = getitem
-        self._name = getitem.__name__
-        self.__doc__ = getitem.__doc__
-
-    def __getattr__(self, item):
-        if item in {'__name__', '__qualname__'}:
-            return self._name
-
-        raise AttributeError(item)
-
-    def __mro_entries__(self, bases):
-        raise TypeError(f"Cannot subclass {self!r}")
-
-    def __repr__(self):
-        return f'typing_extensions.{self._name}'
-
-    def __reduce__(self):
-        return self._name
-
-    def __call__(self, *args, **kwds):
-        raise TypeError(f"Cannot instantiate {self!r}")
-
-    def __or__(self, other):
-        return typing.Union[self, other]
-
-    def __ror__(self, other):
-        return typing.Union[other, self]
-
-    def __instancecheck__(self, obj):
-        raise TypeError(f"{self} cannot be used with isinstance()")
-
-    def __subclasscheck__(self, cls):
-        raise TypeError(f"{self} cannot be used with issubclass()")
-
-    @typing._tp_cache
-    def __getitem__(self, parameters):
-        return self._getitem(self, parameters)
-
-
-if hasattr(typing, "LiteralString"):  # 3.11+
-    LiteralString = typing.LiteralString
-else:
-    @_SpecialForm
-    def LiteralString(self, params):
-        """Represents an arbitrary literal string.
-
-        Example::
-
-          from typing_extensions import LiteralString
-
-          def query(sql: LiteralString) -> ...:
-              ...
-
-          query("SELECT * FROM table")  # ok
-          query(f"SELECT * FROM {input()}")  # not ok
-
-        See PEP 675 for details.
-
-        """
-        raise TypeError(f"{self} is not subscriptable")
-
-
-if hasattr(typing, "Self"):  # 3.11+
-    Self = typing.Self
-else:
-    @_SpecialForm
-    def Self(self, params):
-        """Used to spell the type of "self" in classes.
-
-        Example::
-
-          from typing import Self
-
-          class ReturnsSelf:
-              def parse(self, data: bytes) -> Self:
-                  ...
-                  return self
-
-        """
-
-        raise TypeError(f"{self} is not subscriptable")
-
-
-if hasattr(typing, "Never"):  # 3.11+
-    Never = typing.Never
-else:
-    @_SpecialForm
-    def Never(self, params):
-        """The bottom type, a type that has no members.
-
-        This can be used to define a function that should never be
-        called, or a function that never returns::
-
-            from typing_extensions import Never
-
-            def never_call_me(arg: Never) -> None:
-                pass
-
-            def int_or_str(arg: int | str) -> None:
-                never_call_me(arg)  # type checker error
-                match arg:
-                    case int():
-                        print("It's an int")
-                    case str():
-                        print("It's a str")
-                    case _:
-                        never_call_me(arg)  # ok, arg is of type Never
-
-        """
-
-        raise TypeError(f"{self} is not subscriptable")
-
-
-if hasattr(typing, 'Required'):  # 3.11+
-    Required = typing.Required
-    NotRequired = typing.NotRequired
-elif sys.version_info[:2] >= (3, 9):  # 3.9-3.10
-    @_ExtensionsSpecialForm
-    def Required(self, parameters):
-        """A special typing construct to mark a key of a total=False TypedDict
-        as required. For example:
-
-            class Movie(TypedDict, total=False):
-                title: Required[str]
-                year: int
-
-            m = Movie(
-                title='The Matrix',  # typechecker error if key is omitted
-                year=1999,
-            )
-
-        There is no runtime checking that a required key is actually provided
-        when instantiating a related TypedDict.
-        """
-        item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
-        return typing._GenericAlias(self, (item,))
-
-    @_ExtensionsSpecialForm
-    def NotRequired(self, parameters):
-        """A special typing construct to mark a key of a TypedDict as
-        potentially missing. For example:
-
-            class Movie(TypedDict):
-                title: str
-                year: NotRequired[int]
-
-            m = Movie(
-                title='The Matrix',  # typechecker error if key is omitted
-                year=1999,
-            )
-        """
-        item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
-        return typing._GenericAlias(self, (item,))
-
-else:  # 3.8
-    class _RequiredForm(_ExtensionsSpecialForm, _root=True):
-        def __getitem__(self, parameters):
-            item = typing._type_check(parameters,
-                                      f'{self._name} accepts only a single type.')
-            return typing._GenericAlias(self, (item,))
-
-    Required = _RequiredForm(
-        'Required',
-        doc="""A special typing construct to mark a key of a total=False TypedDict
-        as required. For example:
-
-            class Movie(TypedDict, total=False):
-                title: Required[str]
-                year: int
-
-            m = Movie(
-                title='The Matrix',  # typechecker error if key is omitted
-                year=1999,
-            )
-
-        There is no runtime checking that a required key is actually provided
-        when instantiating a related TypedDict.
-        """)
-    NotRequired = _RequiredForm(
-        'NotRequired',
-        doc="""A special typing construct to mark a key of a TypedDict as
-        potentially missing. For example:
-
-            class Movie(TypedDict):
-                title: str
-                year: NotRequired[int]
-
-            m = Movie(
-                title='The Matrix',  # typechecker error if key is omitted
-                year=1999,
-            )
-        """)
-
-
-if hasattr(typing, 'ReadOnly'):
-    ReadOnly = typing.ReadOnly
-elif sys.version_info[:2] >= (3, 9):  # 3.9-3.12
-    @_ExtensionsSpecialForm
-    def ReadOnly(self, parameters):
-        """A special typing construct to mark an item of a TypedDict as read-only.
-
-        For example:
-
-            class Movie(TypedDict):
-                title: ReadOnly[str]
-                year: int
-
-            def mutate_movie(m: Movie) -> None:
-                m["year"] = 1992  # allowed
-                m["title"] = "The Matrix"  # typechecker error
-
-        There is no runtime checking for this property.
-        """
-        item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
-        return typing._GenericAlias(self, (item,))
-
-else:  # 3.8
-    class _ReadOnlyForm(_ExtensionsSpecialForm, _root=True):
-        def __getitem__(self, parameters):
-            item = typing._type_check(parameters,
-                                      f'{self._name} accepts only a single type.')
-            return typing._GenericAlias(self, (item,))
-
-    ReadOnly = _ReadOnlyForm(
-        'ReadOnly',
-        doc="""A special typing construct to mark a key of a TypedDict as read-only.
-
-        For example:
-
-            class Movie(TypedDict):
-                title: ReadOnly[str]
-                year: int
-
-            def mutate_movie(m: Movie) -> None:
-                m["year"] = 1992  # allowed
-                m["title"] = "The Matrix"  # typechecker error
-
-        There is no runtime checking for this propery.
-        """)
-
-
-_UNPACK_DOC = """\
-Type unpack operator.
-
-The type unpack operator takes the child types from some container type,
-such as `tuple[int, str]` or a `TypeVarTuple`, and 'pulls them out'. For
-example:
-
-  # For some generic class `Foo`:
-  Foo[Unpack[tuple[int, str]]]  # Equivalent to Foo[int, str]
-
-  Ts = TypeVarTuple('Ts')
-  # Specifies that `Bar` is generic in an arbitrary number of types.
-  # (Think of `Ts` as a tuple of an arbitrary number of individual
-  #  `TypeVar`s, which the `Unpack` is 'pulling out' directly into the
-  #  `Generic[]`.)
-  class Bar(Generic[Unpack[Ts]]): ...
-  Bar[int]  # Valid
-  Bar[int, str]  # Also valid
-
-From Python 3.11, this can also be done using the `*` operator:
-
-    Foo[*tuple[int, str]]
-    class Bar(Generic[*Ts]): ...
-
-The operator can also be used along with a `TypedDict` to annotate
-`**kwargs` in a function signature. For instance:
-
-  class Movie(TypedDict):
-    name: str
-    year: int
-
-  # This function expects two keyword arguments - *name* of type `str` and
-  # *year* of type `int`.
-  def foo(**kwargs: Unpack[Movie]): ...
-
-Note that there is only some runtime checking of this operator. Not
-everything the runtime allows may be accepted by static type checkers.
-
-For more information, see PEP 646 and PEP 692.
-"""
-
-
-if sys.version_info >= (3, 12):  # PEP 692 changed the repr of Unpack[]
-    Unpack = typing.Unpack
-
-    def _is_unpack(obj):
-        return get_origin(obj) is Unpack
-
-elif sys.version_info[:2] >= (3, 9):  # 3.9+
-    class _UnpackSpecialForm(_ExtensionsSpecialForm, _root=True):
-        def __init__(self, getitem):
-            super().__init__(getitem)
-            self.__doc__ = _UNPACK_DOC
-
-    class _UnpackAlias(typing._GenericAlias, _root=True):
-        __class__ = typing.TypeVar
-
-        @property
-        def __typing_unpacked_tuple_args__(self):
-            assert self.__origin__ is Unpack
-            assert len(self.__args__) == 1
-            arg, = self.__args__
-            if isinstance(arg, (typing._GenericAlias, _types.GenericAlias)):
-                if arg.__origin__ is not tuple:
-                    raise TypeError("Unpack[...] must be used with a tuple type")
-                return arg.__args__
-            return None
-
-    @_UnpackSpecialForm
-    def Unpack(self, parameters):
-        item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
-        return _UnpackAlias(self, (item,))
-
-    def _is_unpack(obj):
-        return isinstance(obj, _UnpackAlias)
-
-else:  # 3.8
-    class _UnpackAlias(typing._GenericAlias, _root=True):
-        __class__ = typing.TypeVar
-
-    class _UnpackForm(_ExtensionsSpecialForm, _root=True):
-        def __getitem__(self, parameters):
-            item = typing._type_check(parameters,
-                                      f'{self._name} accepts only a single type.')
-            return _UnpackAlias(self, (item,))
-
-    Unpack = _UnpackForm('Unpack', doc=_UNPACK_DOC)
-
-    def _is_unpack(obj):
-        return isinstance(obj, _UnpackAlias)
-
-
-if _PEP_696_IMPLEMENTED:
-    from typing import TypeVarTuple
-
-elif hasattr(typing, "TypeVarTuple"):  # 3.11+
-
-    def _unpack_args(*args):
-        newargs = []
-        for arg in args:
-            subargs = getattr(arg, '__typing_unpacked_tuple_args__', None)
-            if subargs is not None and not (subargs and subargs[-1] is ...):
-                newargs.extend(subargs)
-            else:
-                newargs.append(arg)
-        return newargs
-
-    # Add default parameter - PEP 696
-    class TypeVarTuple(metaclass=_TypeVarLikeMeta):
-        """Type variable tuple."""
-
-        _backported_typevarlike = typing.TypeVarTuple
-
-        def __new__(cls, name, *, default=NoDefault):
-            tvt = typing.TypeVarTuple(name)
-            _set_default(tvt, default)
-            _set_module(tvt)
-
-            def _typevartuple_prepare_subst(alias, args):
-                params = alias.__parameters__
-                typevartuple_index = params.index(tvt)
-                for param in params[typevartuple_index + 1:]:
-                    if isinstance(param, TypeVarTuple):
-                        raise TypeError(
-                            f"More than one TypeVarTuple parameter in {alias}"
-                        )
-
-                alen = len(args)
-                plen = len(params)
-                left = typevartuple_index
-                right = plen - typevartuple_index - 1
-                var_tuple_index = None
-                fillarg = None
-                for k, arg in enumerate(args):
-                    if not isinstance(arg, type):
-                        subargs = getattr(arg, '__typing_unpacked_tuple_args__', None)
-                        if subargs and len(subargs) == 2 and subargs[-1] is ...:
-                            if var_tuple_index is not None:
-                                raise TypeError(
-                                    "More than one unpacked "
-                                    "arbitrary-length tuple argument"
-                                )
-                            var_tuple_index = k
-                            fillarg = subargs[0]
-                if var_tuple_index is not None:
-                    left = min(left, var_tuple_index)
-                    right = min(right, alen - var_tuple_index - 1)
-                elif left + right > alen:
-                    raise TypeError(f"Too few arguments for {alias};"
-                                    f" actual {alen}, expected at least {plen - 1}")
-                if left == alen - right and tvt.has_default():
-                    replacement = _unpack_args(tvt.__default__)
-                else:
-                    replacement = args[left: alen - right]
-
-                return (
-                    *args[:left],
-                    *([fillarg] * (typevartuple_index - left)),
-                    replacement,
-                    *([fillarg] * (plen - right - left - typevartuple_index - 1)),
-                    *args[alen - right:],
-                )
-
-            tvt.__typing_prepare_subst__ = _typevartuple_prepare_subst
-            return tvt
-
-        def __init_subclass__(self, *args, **kwds):
-            raise TypeError("Cannot subclass special typing classes")
-
-else:  # <=3.10
-    class TypeVarTuple(_DefaultMixin):
-        """Type variable tuple.
-
-        Usage::
-
-            Ts = TypeVarTuple('Ts')
-
-        In the same way that a normal type variable is a stand-in for a single
-        type such as ``int``, a type variable *tuple* is a stand-in for a *tuple*
-        type such as ``Tuple[int, str]``.
-
-        Type variable tuples can be used in ``Generic`` declarations.
-        Consider the following example::
-
-            class Array(Generic[*Ts]): ...
-
-        The ``Ts`` type variable tuple here behaves like ``tuple[T1, T2]``,
-        where ``T1`` and ``T2`` are type variables. To use these type variables
-        as type parameters of ``Array``, we must *unpack* the type variable tuple using
-        the star operator: ``*Ts``. The signature of ``Array`` then behaves
-        as if we had simply written ``class Array(Generic[T1, T2]): ...``.
-        In contrast to ``Generic[T1, T2]``, however, ``Generic[*Shape]`` allows
-        us to parameterise the class with an *arbitrary* number of type parameters.
-
-        Type variable tuples can be used anywhere a normal ``TypeVar`` can.
-        This includes class definitions, as shown above, as well as function
-        signatures and variable annotations::
-
-            class Array(Generic[*Ts]):
-
-                def __init__(self, shape: Tuple[*Ts]):
-                    self._shape: Tuple[*Ts] = shape
-
-                def get_shape(self) -> Tuple[*Ts]:
-                    return self._shape
-
-            shape = (Height(480), Width(640))
-            x: Array[Height, Width] = Array(shape)
-            y = abs(x)  # Inferred type is Array[Height, Width]
-            z = x + x   #        ...    is Array[Height, Width]
-            x.get_shape()  #     ...    is tuple[Height, Width]
-
-        """
-
-        # Trick Generic __parameters__.
-        __class__ = typing.TypeVar
-
-        def __iter__(self):
-            yield self.__unpacked__
-
-        def __init__(self, name, *, default=NoDefault):
-            self.__name__ = name
-            _DefaultMixin.__init__(self, default)
-
-            # for pickling:
-            def_mod = _caller()
-            if def_mod != 'typing_extensions':
-                self.__module__ = def_mod
-
-            self.__unpacked__ = Unpack[self]
-
-        def __repr__(self):
-            return self.__name__
-
-        def __hash__(self):
-            return object.__hash__(self)
-
-        def __eq__(self, other):
-            return self is other
-
-        def __reduce__(self):
-            return self.__name__
-
-        def __init_subclass__(self, *args, **kwds):
-            if '_root' not in kwds:
-                raise TypeError("Cannot subclass special typing classes")
-
-
-if hasattr(typing, "reveal_type"):  # 3.11+
-    reveal_type = typing.reveal_type
-else:  # <=3.10
-    def reveal_type(obj: T, /) -> T:
-        """Reveal the inferred type of a variable.
-
-        When a static type checker encounters a call to ``reveal_type()``,
-        it will emit the inferred type of the argument::
-
-            x: int = 1
-            reveal_type(x)
-
-        Running a static type checker (e.g., ``mypy``) on this example
-        will produce output similar to 'Revealed type is "builtins.int"'.
-
-        At runtime, the function prints the runtime type of the
-        argument and returns it unchanged.
-
-        """
-        print(f"Runtime type is {type(obj).__name__!r}", file=sys.stderr)
-        return obj
-
-
-if hasattr(typing, "_ASSERT_NEVER_REPR_MAX_LENGTH"):  # 3.11+
-    _ASSERT_NEVER_REPR_MAX_LENGTH = typing._ASSERT_NEVER_REPR_MAX_LENGTH
-else:  # <=3.10
-    _ASSERT_NEVER_REPR_MAX_LENGTH = 100
-
-
-if hasattr(typing, "assert_never"):  # 3.11+
-    assert_never = typing.assert_never
-else:  # <=3.10
-    def assert_never(arg: Never, /) -> Never:
-        """Assert to the type checker that a line of code is unreachable.
-
-        Example::
-
-            def int_or_str(arg: int | str) -> None:
-                match arg:
-                    case int():
-                        print("It's an int")
-                    case str():
-                        print("It's a str")
-                    case _:
-                        assert_never(arg)
-
-        If a type checker finds that a call to assert_never() is
-        reachable, it will emit an error.
-
-        At runtime, this throws an exception when called.
-
-        """
-        value = repr(arg)
-        if len(value) > _ASSERT_NEVER_REPR_MAX_LENGTH:
-            value = value[:_ASSERT_NEVER_REPR_MAX_LENGTH] + '...'
-        raise AssertionError(f"Expected code to be unreachable, but got: {value}")
-
-
-if sys.version_info >= (3, 12):  # 3.12+
-    # dataclass_transform exists in 3.11 but lacks the frozen_default parameter
-    dataclass_transform = typing.dataclass_transform
-else:  # <=3.11
-    def dataclass_transform(
-        *,
-        eq_default: bool = True,
-        order_default: bool = False,
-        kw_only_default: bool = False,
-        frozen_default: bool = False,
-        field_specifiers: typing.Tuple[
-            typing.Union[typing.Type[typing.Any], typing.Callable[..., typing.Any]],
-            ...
-        ] = (),
-        **kwargs: typing.Any,
-    ) -> typing.Callable[[T], T]:
-        """Decorator that marks a function, class, or metaclass as providing
-        dataclass-like behavior.
-
-        Example:
-
-            from typing_extensions import dataclass_transform
-
-            _T = TypeVar("_T")
-
-            # Used on a decorator function
-            @dataclass_transform()
-            def create_model(cls: type[_T]) -> type[_T]:
-                ...
-                return cls
-
-            @create_model
-            class CustomerModel:
-                id: int
-                name: str
-
-            # Used on a base class
-            @dataclass_transform()
-            class ModelBase: ...
-
-            class CustomerModel(ModelBase):
-                id: int
-                name: str
-
-            # Used on a metaclass
-            @dataclass_transform()
-            class ModelMeta(type): ...
-
-            class ModelBase(metaclass=ModelMeta): ...
-
-            class CustomerModel(ModelBase):
-                id: int
-                name: str
-
-        Each of the ``CustomerModel`` classes defined in this example will now
-        behave similarly to a dataclass created with the ``@dataclasses.dataclass``
-        decorator. For example, the type checker will synthesize an ``__init__``
-        method.
-
-        The arguments to this decorator can be used to customize this behavior:
-        - ``eq_default`` indicates whether the ``eq`` parameter is assumed to be
-          True or False if it is omitted by the caller.
-        - ``order_default`` indicates whether the ``order`` parameter is
-          assumed to be True or False if it is omitted by the caller.
-        - ``kw_only_default`` indicates whether the ``kw_only`` parameter is
-          assumed to be True or False if it is omitted by the caller.
-        - ``frozen_default`` indicates whether the ``frozen`` parameter is
-          assumed to be True or False if it is omitted by the caller.
-        - ``field_specifiers`` specifies a static list of supported classes
-          or functions that describe fields, similar to ``dataclasses.field()``.
-
-        At runtime, this decorator records its arguments in the
-        ``__dataclass_transform__`` attribute on the decorated object.
-
-        See PEP 681 for details.
-
-        """
-        def decorator(cls_or_fn):
-            cls_or_fn.__dataclass_transform__ = {
-                "eq_default": eq_default,
-                "order_default": order_default,
-                "kw_only_default": kw_only_default,
-                "frozen_default": frozen_default,
-                "field_specifiers": field_specifiers,
-                "kwargs": kwargs,
-            }
-            return cls_or_fn
-        return decorator
-
-
-if hasattr(typing, "override"):  # 3.12+
-    override = typing.override
-else:  # <=3.11
-    _F = typing.TypeVar("_F", bound=typing.Callable[..., typing.Any])
-
-    def override(arg: _F, /) -> _F:
-        """Indicate that a method is intended to override a method in a base class.
-
-        Usage:
-
-            class Base:
-                def method(self) -> None:
-                    pass
-
-            class Child(Base):
-                @override
-                def method(self) -> None:
-                    super().method()
-
-        When this decorator is applied to a method, the type checker will
-        validate that it overrides a method with the same name on a base class.
-        This helps prevent bugs that may occur when a base class is changed
-        without an equivalent change to a child class.
-
-        There is no runtime checking of these properties. The decorator
-        sets the ``__override__`` attribute to ``True`` on the decorated object
-        to allow runtime introspection.
-
-        See PEP 698 for details.
-
-        """
-        try:
-            arg.__override__ = True
-        except (AttributeError, TypeError):
-            # Skip the attribute silently if it is not writable.
-            # AttributeError happens if the object has __slots__ or a
-            # read-only property, TypeError if it's a builtin class.
-            pass
-        return arg
-
-
-if hasattr(warnings, "deprecated"):
-    deprecated = warnings.deprecated
-else:
-    _T = typing.TypeVar("_T")
-
-    class deprecated:
-        """Indicate that a class, function or overload is deprecated.
-
-        When this decorator is applied to an object, the type checker
-        will generate a diagnostic on usage of the deprecated object.
-
-        Usage:
-
-            @deprecated("Use B instead")
-            class A:
-                pass
-
-            @deprecated("Use g instead")
-            def f():
-                pass
-
-            @overload
-            @deprecated("int support is deprecated")
-            def g(x: int) -> int: ...
-            @overload
-            def g(x: str) -> int: ...
-
-        The warning specified by *category* will be emitted at runtime
-        on use of deprecated objects. For functions, that happens on calls;
-        for classes, on instantiation and on creation of subclasses.
-        If the *category* is ``None``, no warning is emitted at runtime.
-        The *stacklevel* determines where the
-        warning is emitted. If it is ``1`` (the default), the warning
-        is emitted at the direct caller of the deprecated object; if it
-        is higher, it is emitted further up the stack.
-        Static type checker behavior is not affected by the *category*
-        and *stacklevel* arguments.
-
-        The deprecation message passed to the decorator is saved in the
-        ``__deprecated__`` attribute on the decorated object.
-        If applied to an overload, the decorator
-        must be after the ``@overload`` decorator for the attribute to
-        exist on the overload as returned by ``get_overloads()``.
-
-        See PEP 702 for details.
-
-        """
-        def __init__(
-            self,
-            message: str,
-            /,
-            *,
-            category: typing.Optional[typing.Type[Warning]] = DeprecationWarning,
-            stacklevel: int = 1,
-        ) -> None:
-            if not isinstance(message, str):
-                raise TypeError(
-                    "Expected an object of type str for 'message', not "
-                    f"{type(message).__name__!r}"
-                )
-            self.message = message
-            self.category = category
-            self.stacklevel = stacklevel
-
-        def __call__(self, arg: _T, /) -> _T:
-            # Make sure the inner functions created below don't
-            # retain a reference to self.
-            msg = self.message
-            category = self.category
-            stacklevel = self.stacklevel
-            if category is None:
-                arg.__deprecated__ = msg
-                return arg
-            elif isinstance(arg, type):
-                import functools
-                from types import MethodType
-
-                original_new = arg.__new__
-
-                @functools.wraps(original_new)
-                def __new__(cls, *args, **kwargs):
-                    if cls is arg:
-                        warnings.warn(msg, category=category, stacklevel=stacklevel + 1)
-                    if original_new is not object.__new__:
-                        return original_new(cls, *args, **kwargs)
-                    # Mirrors a similar check in object.__new__.
-                    elif cls.__init__ is object.__init__ and (args or kwargs):
-                        raise TypeError(f"{cls.__name__}() takes no arguments")
-                    else:
-                        return original_new(cls)
-
-                arg.__new__ = staticmethod(__new__)
-
-                original_init_subclass = arg.__init_subclass__
-                # We need slightly different behavior if __init_subclass__
-                # is a bound method (likely if it was implemented in Python)
-                if isinstance(original_init_subclass, MethodType):
-                    original_init_subclass = original_init_subclass.__func__
-
-                    @functools.wraps(original_init_subclass)
-                    def __init_subclass__(*args, **kwargs):
-                        warnings.warn(msg, category=category, stacklevel=stacklevel + 1)
-                        return original_init_subclass(*args, **kwargs)
-
-                    arg.__init_subclass__ = classmethod(__init_subclass__)
-                # Or otherwise, which likely means it's a builtin such as
-                # object's implementation of __init_subclass__.
-                else:
-                    @functools.wraps(original_init_subclass)
-                    def __init_subclass__(*args, **kwargs):
-                        warnings.warn(msg, category=category, stacklevel=stacklevel + 1)
-                        return original_init_subclass(*args, **kwargs)
-
-                    arg.__init_subclass__ = __init_subclass__
-
-                arg.__deprecated__ = __new__.__deprecated__ = msg
-                __init_subclass__.__deprecated__ = msg
-                return arg
-            elif callable(arg):
-                import functools
-
-                @functools.wraps(arg)
-                def wrapper(*args, **kwargs):
-                    warnings.warn(msg, category=category, stacklevel=stacklevel + 1)
-                    return arg(*args, **kwargs)
-
-                arg.__deprecated__ = wrapper.__deprecated__ = msg
-                return wrapper
-            else:
-                raise TypeError(
-                    "@deprecated decorator with non-None category must be applied to "
-                    f"a class or callable, not {arg!r}"
-                )
-
-
-# We have to do some monkey patching to deal with the dual nature of
-# Unpack/TypeVarTuple:
-# - We want Unpack to be a kind of TypeVar so it gets accepted in
-#   Generic[Unpack[Ts]]
-# - We want it to *not* be treated as a TypeVar for the purposes of
-#   counting generic parameters, so that when we subscript a generic,
-#   the runtime doesn't try to substitute the Unpack with the subscripted type.
-if not hasattr(typing, "TypeVarTuple"):
-    def _check_generic(cls, parameters, elen=_marker):
-        """Check correct count for parameters of a generic cls (internal helper).
-
-        This gives a nice error message in case of count mismatch.
-        """
-        if not elen:
-            raise TypeError(f"{cls} is not a generic class")
-        if elen is _marker:
-            if not hasattr(cls, "__parameters__") or not cls.__parameters__:
-                raise TypeError(f"{cls} is not a generic class")
-            elen = len(cls.__parameters__)
-        alen = len(parameters)
-        if alen != elen:
-            expect_val = elen
-            if hasattr(cls, "__parameters__"):
-                parameters = [p for p in cls.__parameters__ if not _is_unpack(p)]
-                num_tv_tuples = sum(isinstance(p, TypeVarTuple) for p in parameters)
-                if (num_tv_tuples > 0) and (alen >= elen - num_tv_tuples):
-                    return
-
-                # deal with TypeVarLike defaults
-                # required TypeVarLikes cannot appear after a defaulted one.
-                if alen < elen:
-                    # since we validate TypeVarLike default in _collect_type_vars
-                    # or _collect_parameters we can safely check parameters[alen]
-                    if (
-                        getattr(parameters[alen], '__default__', NoDefault)
-                        is not NoDefault
-                    ):
-                        return
-
-                    num_default_tv = sum(getattr(p, '__default__', NoDefault)
-                                         is not NoDefault for p in parameters)
-
-                    elen -= num_default_tv
-
-                    expect_val = f"at least {elen}"
-
-            things = "arguments" if sys.version_info >= (3, 10) else "parameters"
-            raise TypeError(f"Too {'many' if alen > elen else 'few'} {things}"
-                            f" for {cls}; actual {alen}, expected {expect_val}")
-else:
-    # Python 3.11+
-
-    def _check_generic(cls, parameters, elen):
-        """Check correct count for parameters of a generic cls (internal helper).
-
-        This gives a nice error message in case of count mismatch.
-        """
-        if not elen:
-            raise TypeError(f"{cls} is not a generic class")
-        alen = len(parameters)
-        if alen != elen:
-            expect_val = elen
-            if hasattr(cls, "__parameters__"):
-                parameters = [p for p in cls.__parameters__ if not _is_unpack(p)]
-
-                # deal with TypeVarLike defaults
-                # required TypeVarLikes cannot appear after a defaulted one.
-                if alen < elen:
-                    # since we validate TypeVarLike default in _collect_type_vars
-                    # or _collect_parameters we can safely check parameters[alen]
-                    if (
-                        getattr(parameters[alen], '__default__', NoDefault)
-                        is not NoDefault
-                    ):
-                        return
-
-                    num_default_tv = sum(getattr(p, '__default__', NoDefault)
-                                         is not NoDefault for p in parameters)
-
-                    elen -= num_default_tv
-
-                    expect_val = f"at least {elen}"
-
-            raise TypeError(f"Too {'many' if alen > elen else 'few'} arguments"
-                            f" for {cls}; actual {alen}, expected {expect_val}")
-
-if not _PEP_696_IMPLEMENTED:
-    typing._check_generic = _check_generic
-
-
-def _has_generic_or_protocol_as_origin() -> bool:
-    try:
-        frame = sys._getframe(2)
-    # - Catch AttributeError: not all Python implementations have sys._getframe()
-    # - Catch ValueError: maybe we're called from an unexpected module
-    #   and the call stack isn't deep enough
-    except (AttributeError, ValueError):
-        return False  # err on the side of leniency
-    else:
-        # If we somehow get invoked from outside typing.py,
-        # also err on the side of leniency
-        if frame.f_globals.get("__name__") != "typing":
-            return False
-        origin = frame.f_locals.get("origin")
-        # Cannot use "in" because origin may be an object with a buggy __eq__ that
-        # throws an error.
-        return origin is typing.Generic or origin is Protocol or origin is typing.Protocol
-
-
-_TYPEVARTUPLE_TYPES = {TypeVarTuple, getattr(typing, "TypeVarTuple", None)}
-
-
-def _is_unpacked_typevartuple(x) -> bool:
-    if get_origin(x) is not Unpack:
-        return False
-    args = get_args(x)
-    return (
-        bool(args)
-        and len(args) == 1
-        and type(args[0]) in _TYPEVARTUPLE_TYPES
-    )
-
-
-# Python 3.11+ _collect_type_vars was renamed to _collect_parameters
-if hasattr(typing, '_collect_type_vars'):
-    def _collect_type_vars(types, typevar_types=None):
-        """Collect all type variable contained in types in order of
-        first appearance (lexicographic order). For example::
-
-            _collect_type_vars((T, List[S, T])) == (T, S)
-        """
-        if typevar_types is None:
-            typevar_types = typing.TypeVar
-        tvars = []
-
-        # A required TypeVarLike cannot appear after a TypeVarLike with a default
-        # if it was a direct call to `Generic[]` or `Protocol[]`
-        enforce_default_ordering = _has_generic_or_protocol_as_origin()
-        default_encountered = False
-
-        # Also, a TypeVarLike with a default cannot appear after a TypeVarTuple
-        type_var_tuple_encountered = False
-
-        for t in types:
-            if _is_unpacked_typevartuple(t):
-                type_var_tuple_encountered = True
-            elif isinstance(t, typevar_types) and t not in tvars:
-                if enforce_default_ordering:
-                    has_default = getattr(t, '__default__', NoDefault) is not NoDefault
-                    if has_default:
-                        if type_var_tuple_encountered:
-                            raise TypeError('Type parameter with a default'
-                                            ' follows TypeVarTuple')
-                        default_encountered = True
-                    elif default_encountered:
-                        raise TypeError(f'Type parameter {t!r} without a default'
-                                        ' follows type parameter with a default')
-
-                tvars.append(t)
-            if _should_collect_from_parameters(t):
-                tvars.extend([t for t in t.__parameters__ if t not in tvars])
-        return tuple(tvars)
-
-    typing._collect_type_vars = _collect_type_vars
-else:
-    def _collect_parameters(args):
-        """Collect all type variables and parameter specifications in args
-        in order of first appearance (lexicographic order).
-
-        For example::
-
-            assert _collect_parameters((T, Callable[P, T])) == (T, P)
-        """
-        parameters = []
-
-        # A required TypeVarLike cannot appear after a TypeVarLike with default
-        # if it was a direct call to `Generic[]` or `Protocol[]`
-        enforce_default_ordering = _has_generic_or_protocol_as_origin()
-        default_encountered = False
-
-        # Also, a TypeVarLike with a default cannot appear after a TypeVarTuple
-        type_var_tuple_encountered = False
-
-        for t in args:
-            if isinstance(t, type):
-                # We don't want __parameters__ descriptor of a bare Python class.
-                pass
-            elif isinstance(t, tuple):
-                # `t` might be a tuple, when `ParamSpec` is substituted with
-                # `[T, int]`, or `[int, *Ts]`, etc.
-                for x in t:
-                    for collected in _collect_parameters([x]):
-                        if collected not in parameters:
-                            parameters.append(collected)
-            elif hasattr(t, '__typing_subst__'):
-                if t not in parameters:
-                    if enforce_default_ordering:
-                        has_default = (
-                            getattr(t, '__default__', NoDefault) is not NoDefault
-                        )
-
-                        if type_var_tuple_encountered and has_default:
-                            raise TypeError('Type parameter with a default'
-                                            ' follows TypeVarTuple')
-
-                        if has_default:
-                            default_encountered = True
-                        elif default_encountered:
-                            raise TypeError(f'Type parameter {t!r} without a default'
-                                            ' follows type parameter with a default')
-
-                    parameters.append(t)
-            else:
-                if _is_unpacked_typevartuple(t):
-                    type_var_tuple_encountered = True
-                for x in getattr(t, '__parameters__', ()):
-                    if x not in parameters:
-                        parameters.append(x)
-
-        return tuple(parameters)
-
-    if not _PEP_696_IMPLEMENTED:
-        typing._collect_parameters = _collect_parameters
-
-# Backport typing.NamedTuple as it exists in Python 3.13.
-# In 3.11, the ability to define generic `NamedTuple`s was supported.
-# This was explicitly disallowed in 3.9-3.10, and only half-worked in <=3.8.
-# On 3.12, we added __orig_bases__ to call-based NamedTuples
-# On 3.13, we deprecated kwargs-based NamedTuples
-if sys.version_info >= (3, 13):
-    NamedTuple = typing.NamedTuple
-else:
-    def _make_nmtuple(name, types, module, defaults=()):
-        fields = [n for n, t in types]
-        annotations = {n: typing._type_check(t, f"field {n} annotation must be a type")
-                       for n, t in types}
-        nm_tpl = collections.namedtuple(name, fields,
-                                        defaults=defaults, module=module)
-        nm_tpl.__annotations__ = nm_tpl.__new__.__annotations__ = annotations
-        # The `_field_types` attribute was removed in 3.9;
-        # in earlier versions, it is the same as the `__annotations__` attribute
-        if sys.version_info < (3, 9):
-            nm_tpl._field_types = annotations
-        return nm_tpl
-
-    _prohibited_namedtuple_fields = typing._prohibited
-    _special_namedtuple_fields = frozenset({'__module__', '__name__', '__annotations__'})
-
-    class _NamedTupleMeta(type):
-        def __new__(cls, typename, bases, ns):
-            assert _NamedTuple in bases
-            for base in bases:
-                if base is not _NamedTuple and base is not typing.Generic:
-                    raise TypeError(
-                        'can only inherit from a NamedTuple type and Generic')
-            bases = tuple(tuple if base is _NamedTuple else base for base in bases)
-            if "__annotations__" in ns:
-                types = ns["__annotations__"]
-            elif "__annotate__" in ns:
-                # TODO: Use inspect.VALUE here, and make the annotations lazily evaluated
-                types = ns["__annotate__"](1)
-            else:
-                types = {}
-            default_names = []
-            for field_name in types:
-                if field_name in ns:
-                    default_names.append(field_name)
-                elif default_names:
-                    raise TypeError(f"Non-default namedtuple field {field_name} "
-                                    f"cannot follow default field"
-                                    f"{'s' if len(default_names) > 1 else ''} "
-                                    f"{', '.join(default_names)}")
-            nm_tpl = _make_nmtuple(
-                typename, types.items(),
-                defaults=[ns[n] for n in default_names],
-                module=ns['__module__']
-            )
-            nm_tpl.__bases__ = bases
-            if typing.Generic in bases:
-                if hasattr(typing, '_generic_class_getitem'):  # 3.12+
-                    nm_tpl.__class_getitem__ = classmethod(typing._generic_class_getitem)
-                else:
-                    class_getitem = typing.Generic.__class_getitem__.__func__
-                    nm_tpl.__class_getitem__ = classmethod(class_getitem)
-            # update from user namespace without overriding special namedtuple attributes
-            for key, val in ns.items():
-                if key in _prohibited_namedtuple_fields:
-                    raise AttributeError("Cannot overwrite NamedTuple attribute " + key)
-                elif key not in _special_namedtuple_fields:
-                    if key not in nm_tpl._fields:
-                        setattr(nm_tpl, key, ns[key])
-                    try:
-                        set_name = type(val).__set_name__
-                    except AttributeError:
-                        pass
-                    else:
-                        try:
-                            set_name(val, nm_tpl, key)
-                        except BaseException as e:
-                            msg = (
-                                f"Error calling __set_name__ on {type(val).__name__!r} "
-                                f"instance {key!r} in {typename!r}"
-                            )
-                            # BaseException.add_note() existed on py311,
-                            # but the __set_name__ machinery didn't start
-                            # using add_note() until py312.
-                            # Making sure exceptions are raised in the same way
-                            # as in "normal" classes seems most important here.
-                            if sys.version_info >= (3, 12):
-                                e.add_note(msg)
-                                raise
-                            else:
-                                raise RuntimeError(msg) from e
-
-            if typing.Generic in bases:
-                nm_tpl.__init_subclass__()
-            return nm_tpl
-
-    _NamedTuple = type.__new__(_NamedTupleMeta, 'NamedTuple', (), {})
-
-    def _namedtuple_mro_entries(bases):
-        assert NamedTuple in bases
-        return (_NamedTuple,)
-
-    @_ensure_subclassable(_namedtuple_mro_entries)
-    def NamedTuple(typename, fields=_marker, /, **kwargs):
-        """Typed version of namedtuple.
-
-        Usage::
-
-            class Employee(NamedTuple):
-                name: str
-                id: int
-
-        This is equivalent to::
-
-            Employee = collections.namedtuple('Employee', ['name', 'id'])
-
-        The resulting class has an extra __annotations__ attribute, giving a
-        dict that maps field names to types.  (The field names are also in
-        the _fields attribute, which is part of the namedtuple API.)
-        An alternative equivalent functional syntax is also accepted::
-
-            Employee = NamedTuple('Employee', [('name', str), ('id', int)])
-        """
-        if fields is _marker:
-            if kwargs:
-                deprecated_thing = "Creating NamedTuple classes using keyword arguments"
-                deprecation_msg = (
-                    "{name} is deprecated and will be disallowed in Python {remove}. "
-                    "Use the class-based or functional syntax instead."
-                )
-            else:
-                deprecated_thing = "Failing to pass a value for the 'fields' parameter"
-                example = f"`{typename} = NamedTuple({typename!r}, [])`"
-                deprecation_msg = (
-                    "{name} is deprecated and will be disallowed in Python {remove}. "
-                    "To create a NamedTuple class with 0 fields "
-                    "using the functional syntax, "
-                    "pass an empty list, e.g. "
-                ) + example + "."
-        elif fields is None:
-            if kwargs:
-                raise TypeError(
-                    "Cannot pass `None` as the 'fields' parameter "
-                    "and also specify fields using keyword arguments"
-                )
-            else:
-                deprecated_thing = "Passing `None` as the 'fields' parameter"
-                example = f"`{typename} = NamedTuple({typename!r}, [])`"
-                deprecation_msg = (
-                    "{name} is deprecated and will be disallowed in Python {remove}. "
-                    "To create a NamedTuple class with 0 fields "
-                    "using the functional syntax, "
-                    "pass an empty list, e.g. "
-                ) + example + "."
-        elif kwargs:
-            raise TypeError("Either list of fields or keywords"
-                            " can be provided to NamedTuple, not both")
-        if fields is _marker or fields is None:
-            warnings.warn(
-                deprecation_msg.format(name=deprecated_thing, remove="3.15"),
-                DeprecationWarning,
-                stacklevel=2,
-            )
-            fields = kwargs.items()
-        nt = _make_nmtuple(typename, fields, module=_caller())
-        nt.__orig_bases__ = (NamedTuple,)
-        return nt
-
-
-if hasattr(collections.abc, "Buffer"):
-    Buffer = collections.abc.Buffer
-else:
-    class Buffer(abc.ABC):  # noqa: B024
-        """Base class for classes that implement the buffer protocol.
-
-        The buffer protocol allows Python objects to expose a low-level
-        memory buffer interface. Before Python 3.12, it is not possible
-        to implement the buffer protocol in pure Python code, or even
-        to check whether a class implements the buffer protocol. In
-        Python 3.12 and higher, the ``__buffer__`` method allows access
-        to the buffer protocol from Python code, and the
-        ``collections.abc.Buffer`` ABC allows checking whether a class
-        implements the buffer protocol.
-
-        To indicate support for the buffer protocol in earlier versions,
-        inherit from this ABC, either in a stub file or at runtime,
-        or use ABC registration. This ABC provides no methods, because
-        there is no Python-accessible methods shared by pre-3.12 buffer
-        classes. It is useful primarily for static checks.
-
-        """
-
-    # As a courtesy, register the most common stdlib buffer classes.
-    Buffer.register(memoryview)
-    Buffer.register(bytearray)
-    Buffer.register(bytes)
-
-
-# Backport of types.get_original_bases, available on 3.12+ in CPython
-if hasattr(_types, "get_original_bases"):
-    get_original_bases = _types.get_original_bases
-else:
-    def get_original_bases(cls, /):
-        """Return the class's "original" bases prior to modification by `__mro_entries__`.
-
-        Examples::
-
-            from typing import TypeVar, Generic
-            from typing_extensions import NamedTuple, TypedDict
-
-            T = TypeVar("T")
-            class Foo(Generic[T]): ...
-            class Bar(Foo[int], float): ...
-            class Baz(list[str]): ...
-            Eggs = NamedTuple("Eggs", [("a", int), ("b", str)])
-            Spam = TypedDict("Spam", {"a": int, "b": str})
-
-            assert get_original_bases(Bar) == (Foo[int], float)
-            assert get_original_bases(Baz) == (list[str],)
-            assert get_original_bases(Eggs) == (NamedTuple,)
-            assert get_original_bases(Spam) == (TypedDict,)
-            assert get_original_bases(int) == (object,)
-        """
-        try:
-            return cls.__dict__.get("__orig_bases__", cls.__bases__)
-        except AttributeError:
-            raise TypeError(
-                f'Expected an instance of type, not {type(cls).__name__!r}'
-            ) from None
-
-
-# NewType is a class on Python 3.10+, making it pickleable
-# The error message for subclassing instances of NewType was improved on 3.11+
-if sys.version_info >= (3, 11):
-    NewType = typing.NewType
-else:
-    class NewType:
-        """NewType creates simple unique types with almost zero
-        runtime overhead. NewType(name, tp) is considered a subtype of tp
-        by static type checkers. At runtime, NewType(name, tp) returns
-        a dummy callable that simply returns its argument. Usage::
-            UserId = NewType('UserId', int)
-            def name_by_id(user_id: UserId) -> str:
-                ...
-            UserId('user')          # Fails type check
-            name_by_id(42)          # Fails type check
-            name_by_id(UserId(42))  # OK
-            num = UserId(5) + 1     # type: int
-        """
-
-        def __call__(self, obj, /):
-            return obj
-
-        def __init__(self, name, tp):
-            self.__qualname__ = name
-            if '.' in name:
-                name = name.rpartition('.')[-1]
-            self.__name__ = name
-            self.__supertype__ = tp
-            def_mod = _caller()
-            if def_mod != 'typing_extensions':
-                self.__module__ = def_mod
-
-        def __mro_entries__(self, bases):
-            # We defined __mro_entries__ to get a better error message
-            # if a user attempts to subclass a NewType instance. bpo-46170
-            supercls_name = self.__name__
-
-            class Dummy:
-                def __init_subclass__(cls):
-                    subcls_name = cls.__name__
-                    raise TypeError(
-                        f"Cannot subclass an instance of NewType. "
-                        f"Perhaps you were looking for: "
-                        f"`{subcls_name} = NewType({subcls_name!r}, {supercls_name})`"
-                    )
-
-            return (Dummy,)
-
-        def __repr__(self):
-            return f'{self.__module__}.{self.__qualname__}'
-
-        def __reduce__(self):
-            return self.__qualname__
-
-        if sys.version_info >= (3, 10):
-            # PEP 604 methods
-            # It doesn't make sense to have these methods on Python <3.10
-
-            def __or__(self, other):
-                return typing.Union[self, other]
-
-            def __ror__(self, other):
-                return typing.Union[other, self]
-
-
-if hasattr(typing, "TypeAliasType"):
-    TypeAliasType = typing.TypeAliasType
-else:
-    def _is_unionable(obj):
-        """Corresponds to is_unionable() in unionobject.c in CPython."""
-        return obj is None or isinstance(obj, (
-            type,
-            _types.GenericAlias,
-            _types.UnionType,
-            TypeAliasType,
-        ))
-
-    class TypeAliasType:
-        """Create named, parameterized type aliases.
-
-        This provides a backport of the new `type` statement in Python 3.12:
-
-            type ListOrSet[T] = list[T] | set[T]
-
-        is equivalent to:
-
-            T = TypeVar("T")
-            ListOrSet = TypeAliasType("ListOrSet", list[T] | set[T], type_params=(T,))
-
-        The name ListOrSet can then be used as an alias for the type it refers to.
-
-        The type_params argument should contain all the type parameters used
-        in the value of the type alias. If the alias is not generic, this
-        argument is omitted.
-
-        Static type checkers should only support type aliases declared using
-        TypeAliasType that follow these rules:
-
-        - The first argument (the name) must be a string literal.
-        - The TypeAliasType instance must be immediately assigned to a variable
-          of the same name. (For example, 'X = TypeAliasType("Y", int)' is invalid,
-          as is 'X, Y = TypeAliasType("X", int), TypeAliasType("Y", int)').
-
-        """
-
-        def __init__(self, name: str, value, *, type_params=()):
-            if not isinstance(name, str):
-                raise TypeError("TypeAliasType name must be a string")
-            self.__value__ = value
-            self.__type_params__ = type_params
-
-            parameters = []
-            for type_param in type_params:
-                if isinstance(type_param, TypeVarTuple):
-                    parameters.extend(type_param)
-                else:
-                    parameters.append(type_param)
-            self.__parameters__ = tuple(parameters)
-            def_mod = _caller()
-            if def_mod != 'typing_extensions':
-                self.__module__ = def_mod
-            # Setting this attribute closes the TypeAliasType from further modification
-            self.__name__ = name
-
-        def __setattr__(self, name: str, value: object, /) -> None:
-            if hasattr(self, "__name__"):
-                self._raise_attribute_error(name)
-            super().__setattr__(name, value)
-
-        def __delattr__(self, name: str, /) -> Never:
-            self._raise_attribute_error(name)
-
-        def _raise_attribute_error(self, name: str) -> Never:
-            # Match the Python 3.12 error messages exactly
-            if name == "__name__":
-                raise AttributeError("readonly attribute")
-            elif name in {"__value__", "__type_params__", "__parameters__", "__module__"}:
-                raise AttributeError(
-                    f"attribute '{name}' of 'typing.TypeAliasType' objects "
-                    "is not writable"
-                )
-            else:
-                raise AttributeError(
-                    f"'typing.TypeAliasType' object has no attribute '{name}'"
-                )
-
-        def __repr__(self) -> str:
-            return self.__name__
-
-        def __getitem__(self, parameters):
-            if not isinstance(parameters, tuple):
-                parameters = (parameters,)
-            parameters = [
-                typing._type_check(
-                    item, f'Subscripting {self.__name__} requires a type.'
-                )
-                for item in parameters
-            ]
-            return typing._GenericAlias(self, tuple(parameters))
-
-        def __reduce__(self):
-            return self.__name__
-
-        def __init_subclass__(cls, *args, **kwargs):
-            raise TypeError(
-                "type 'typing_extensions.TypeAliasType' is not an acceptable base type"
-            )
-
-        # The presence of this method convinces typing._type_check
-        # that TypeAliasTypes are types.
-        def __call__(self):
-            raise TypeError("Type alias is not callable")
-
-        if sys.version_info >= (3, 10):
-            def __or__(self, right):
-                # For forward compatibility with 3.12, reject Unions
-                # that are not accepted by the built-in Union.
-                if not _is_unionable(right):
-                    return NotImplemented
-                return typing.Union[self, right]
-
-            def __ror__(self, left):
-                if not _is_unionable(left):
-                    return NotImplemented
-                return typing.Union[left, self]
-
-
-if hasattr(typing, "is_protocol"):
-    is_protocol = typing.is_protocol
-    get_protocol_members = typing.get_protocol_members
-else:
-    def is_protocol(tp: type, /) -> bool:
-        """Return True if the given type is a Protocol.
-
-        Example::
-
-            >>> from typing_extensions import Protocol, is_protocol
-            >>> class P(Protocol):
-            ...     def a(self) -> str: ...
-            ...     b: int
-            >>> is_protocol(P)
-            True
-            >>> is_protocol(int)
-            False
-        """
-        return (
-            isinstance(tp, type)
-            and getattr(tp, '_is_protocol', False)
-            and tp is not Protocol
-            and tp is not typing.Protocol
-        )
-
-    def get_protocol_members(tp: type, /) -> typing.FrozenSet[str]:
-        """Return the set of members defined in a Protocol.
-
-        Example::
-
-            >>> from typing_extensions import Protocol, get_protocol_members
-            >>> class P(Protocol):
-            ...     def a(self) -> str: ...
-            ...     b: int
-            >>> get_protocol_members(P)
-            frozenset({'a', 'b'})
-
-        Raise a TypeError for arguments that are not Protocols.
-        """
-        if not is_protocol(tp):
-            raise TypeError(f'{tp!r} is not a Protocol')
-        if hasattr(tp, '__protocol_attrs__'):
-            return frozenset(tp.__protocol_attrs__)
-        return frozenset(_get_protocol_attrs(tp))
-
-
-if hasattr(typing, "Doc"):
-    Doc = typing.Doc
-else:
-    class Doc:
-        """Define the documentation of a type annotation using ``Annotated``, to be
-         used in class attributes, function and method parameters, return values,
-         and variables.
-
-        The value should be a positional-only string literal to allow static tools
-        like editors and documentation generators to use it.
-
-        This complements docstrings.
-
-        The string value passed is available in the attribute ``documentation``.
-
-        Example::
-
-            >>> from typing_extensions import Annotated, Doc
-            >>> def hi(to: Annotated[str, Doc("Who to say hi to")]) -> None: ...
-        """
-        def __init__(self, documentation: str, /) -> None:
-            self.documentation = documentation
-
-        def __repr__(self) -> str:
-            return f"Doc({self.documentation!r})"
-
-        def __hash__(self) -> int:
-            return hash(self.documentation)
-
-        def __eq__(self, other: object) -> bool:
-            if not isinstance(other, Doc):
-                return NotImplemented
-            return self.documentation == other.documentation
-
-
-_CapsuleType = getattr(_types, "CapsuleType", None)
-
-if _CapsuleType is None:
-    try:
-        import _socket
-    except ImportError:
-        pass
-    else:
-        _CAPI = getattr(_socket, "CAPI", None)
-        if _CAPI is not None:
-            _CapsuleType = type(_CAPI)
-
-if _CapsuleType is not None:
-    CapsuleType = _CapsuleType
-    __all__.append("CapsuleType")
-
-
-# Aliases for items that have always been in typing.
-# Explicitly assign these (rather than using `from typing import *` at the top),
-# so that we get a CI error if one of these is deleted from typing.py
-# in a future version of Python
-AbstractSet = typing.AbstractSet
-AnyStr = typing.AnyStr
-BinaryIO = typing.BinaryIO
-Callable = typing.Callable
-Collection = typing.Collection
-Container = typing.Container
-Dict = typing.Dict
-ForwardRef = typing.ForwardRef
-FrozenSet = typing.FrozenSet
-Generic = typing.Generic
-Hashable = typing.Hashable
-IO = typing.IO
-ItemsView = typing.ItemsView
-Iterable = typing.Iterable
-Iterator = typing.Iterator
-KeysView = typing.KeysView
-List = typing.List
-Mapping = typing.Mapping
-MappingView = typing.MappingView
-Match = typing.Match
-MutableMapping = typing.MutableMapping
-MutableSequence = typing.MutableSequence
-MutableSet = typing.MutableSet
-Optional = typing.Optional
-Pattern = typing.Pattern
-Reversible = typing.Reversible
-Sequence = typing.Sequence
-Set = typing.Set
-Sized = typing.Sized
-TextIO = typing.TextIO
-Tuple = typing.Tuple
-Union = typing.Union
-ValuesView = typing.ValuesView
-cast = typing.cast
-no_type_check = typing.no_type_check
-no_type_check_decorator = typing.no_type_check_decorator
diff --git a/pkg_resources/_vendor/zipp-3.19.2.dist-info/INSTALLER b/pkg_resources/_vendor/zipp-3.19.2.dist-info/INSTALLER
deleted file mode 100644
index a1b589e38a..0000000000
--- a/pkg_resources/_vendor/zipp-3.19.2.dist-info/INSTALLER
+++ /dev/null
@@ -1 +0,0 @@
-pip
diff --git a/pkg_resources/_vendor/zipp-3.19.2.dist-info/LICENSE b/pkg_resources/_vendor/zipp-3.19.2.dist-info/LICENSE
deleted file mode 100644
index 1bb5a44356..0000000000
--- a/pkg_resources/_vendor/zipp-3.19.2.dist-info/LICENSE
+++ /dev/null
@@ -1,17 +0,0 @@
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to
-deal in the Software without restriction, including without limitation the
-rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
-sell copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
-IN THE SOFTWARE.
diff --git a/pkg_resources/_vendor/zipp-3.19.2.dist-info/METADATA b/pkg_resources/_vendor/zipp-3.19.2.dist-info/METADATA
deleted file mode 100644
index 1399281717..0000000000
--- a/pkg_resources/_vendor/zipp-3.19.2.dist-info/METADATA
+++ /dev/null
@@ -1,102 +0,0 @@
-Metadata-Version: 2.1
-Name: zipp
-Version: 3.19.2
-Summary: Backport of pathlib-compatible object wrapper for zip files
-Author-email: "Jason R. Coombs" 
-Project-URL: Homepage, https://github.com/jaraco/zipp
-Classifier: Development Status :: 5 - Production/Stable
-Classifier: Intended Audience :: Developers
-Classifier: License :: OSI Approved :: MIT License
-Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3 :: Only
-Requires-Python: >=3.8
-Description-Content-Type: text/x-rst
-License-File: LICENSE
-Provides-Extra: doc
-Requires-Dist: sphinx >=3.5 ; extra == 'doc'
-Requires-Dist: jaraco.packaging >=9.3 ; extra == 'doc'
-Requires-Dist: rst.linker >=1.9 ; extra == 'doc'
-Requires-Dist: furo ; extra == 'doc'
-Requires-Dist: sphinx-lint ; extra == 'doc'
-Requires-Dist: jaraco.tidelift >=1.4 ; extra == 'doc'
-Provides-Extra: test
-Requires-Dist: pytest !=8.1.*,>=6 ; extra == 'test'
-Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'test'
-Requires-Dist: pytest-cov ; extra == 'test'
-Requires-Dist: pytest-mypy ; extra == 'test'
-Requires-Dist: pytest-enabler >=2.2 ; extra == 'test'
-Requires-Dist: pytest-ruff >=0.2.1 ; extra == 'test'
-Requires-Dist: jaraco.itertools ; extra == 'test'
-Requires-Dist: jaraco.functools ; extra == 'test'
-Requires-Dist: more-itertools ; extra == 'test'
-Requires-Dist: big-O ; extra == 'test'
-Requires-Dist: pytest-ignore-flaky ; extra == 'test'
-Requires-Dist: jaraco.test ; extra == 'test'
-Requires-Dist: importlib-resources ; (python_version < "3.9") and extra == 'test'
-
-.. image:: https://img.shields.io/pypi/v/zipp.svg
-   :target: https://pypi.org/project/zipp
-
-.. image:: https://img.shields.io/pypi/pyversions/zipp.svg
-
-.. image:: https://github.com/jaraco/zipp/actions/workflows/main.yml/badge.svg
-   :target: https://github.com/jaraco/zipp/actions?query=workflow%3A%22tests%22
-   :alt: tests
-
-.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
-    :target: https://github.com/astral-sh/ruff
-    :alt: Ruff
-
-.. .. image:: https://readthedocs.org/projects/PROJECT_RTD/badge/?version=latest
-..    :target: https://PROJECT_RTD.readthedocs.io/en/latest/?badge=latest
-
-.. image:: https://img.shields.io/badge/skeleton-2024-informational
-   :target: https://blog.jaraco.com/skeleton
-
-.. image:: https://tidelift.com/badges/package/pypi/zipp
-   :target: https://tidelift.com/subscription/pkg/pypi-zipp?utm_source=pypi-zipp&utm_medium=readme
-
-
-A pathlib-compatible Zipfile object wrapper. Official backport of the standard library
-`Path object `_.
-
-
-Compatibility
-=============
-
-New features are introduced in this third-party library and later merged
-into CPython. The following table indicates which versions of this library
-were contributed to different versions in the standard library:
-
-.. list-table::
-   :header-rows: 1
-
-   * - zipp
-     - stdlib
-   * - 3.18
-     - 3.13
-   * - 3.16
-     - 3.12
-   * - 3.5
-     - 3.11
-   * - 3.2
-     - 3.10
-   * - 3.3 ??
-     - 3.9
-   * - 1.0
-     - 3.8
-
-
-Usage
-=====
-
-Use ``zipp.Path`` in place of ``zipfile.Path`` on any Python.
-
-For Enterprise
-==============
-
-Available as part of the Tidelift Subscription.
-
-This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use.
-
-`Learn more `_.
diff --git a/pkg_resources/_vendor/zipp-3.19.2.dist-info/RECORD b/pkg_resources/_vendor/zipp-3.19.2.dist-info/RECORD
deleted file mode 100644
index 77c02835d8..0000000000
--- a/pkg_resources/_vendor/zipp-3.19.2.dist-info/RECORD
+++ /dev/null
@@ -1,15 +0,0 @@
-zipp-3.19.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-zipp-3.19.2.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
-zipp-3.19.2.dist-info/METADATA,sha256=UIrk_kMIHGSwsKKChYizqMw0MMZpPRZ2ZiVpQAsN_bE,3575
-zipp-3.19.2.dist-info/RECORD,,
-zipp-3.19.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-zipp-3.19.2.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
-zipp-3.19.2.dist-info/top_level.txt,sha256=iAbdoSHfaGqBfVb2XuR9JqSQHCoOsOtG6y9C_LSpqFw,5
-zipp/__init__.py,sha256=QuI1g00G4fRAcGt-HqbV0oWIkmSgedCGGYsHHYzNa8A,13412
-zipp/__pycache__/__init__.cpython-312.pyc,,
-zipp/__pycache__/glob.cpython-312.pyc,,
-zipp/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-zipp/compat/__pycache__/__init__.cpython-312.pyc,,
-zipp/compat/__pycache__/py310.cpython-312.pyc,,
-zipp/compat/py310.py,sha256=eZpkW0zRtunkhEh8jjX3gCGe22emoKCBJw72Zt4RkhA,219
-zipp/glob.py,sha256=etWpnfEoRyfUvrUsi6sTiGmErvPwe6HzY6pT8jg_lUI,3082
diff --git a/pkg_resources/_vendor/zipp-3.19.2.dist-info/REQUESTED b/pkg_resources/_vendor/zipp-3.19.2.dist-info/REQUESTED
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/pkg_resources/_vendor/zipp-3.19.2.dist-info/WHEEL b/pkg_resources/_vendor/zipp-3.19.2.dist-info/WHEEL
deleted file mode 100644
index bab98d6758..0000000000
--- a/pkg_resources/_vendor/zipp-3.19.2.dist-info/WHEEL
+++ /dev/null
@@ -1,5 +0,0 @@
-Wheel-Version: 1.0
-Generator: bdist_wheel (0.43.0)
-Root-Is-Purelib: true
-Tag: py3-none-any
-
diff --git a/pkg_resources/_vendor/zipp-3.19.2.dist-info/top_level.txt b/pkg_resources/_vendor/zipp-3.19.2.dist-info/top_level.txt
deleted file mode 100644
index e82f676f82..0000000000
--- a/pkg_resources/_vendor/zipp-3.19.2.dist-info/top_level.txt
+++ /dev/null
@@ -1 +0,0 @@
-zipp
diff --git a/pkg_resources/_vendor/zipp/__init__.py b/pkg_resources/_vendor/zipp/__init__.py
deleted file mode 100644
index d65297b835..0000000000
--- a/pkg_resources/_vendor/zipp/__init__.py
+++ /dev/null
@@ -1,501 +0,0 @@
-import io
-import posixpath
-import zipfile
-import itertools
-import contextlib
-import pathlib
-import re
-import stat
-import sys
-
-from .compat.py310 import text_encoding
-from .glob import Translator
-
-
-__all__ = ['Path']
-
-
-def _parents(path):
-    """
-    Given a path with elements separated by
-    posixpath.sep, generate all parents of that path.
-
-    >>> list(_parents('b/d'))
-    ['b']
-    >>> list(_parents('/b/d/'))
-    ['/b']
-    >>> list(_parents('b/d/f/'))
-    ['b/d', 'b']
-    >>> list(_parents('b'))
-    []
-    >>> list(_parents(''))
-    []
-    """
-    return itertools.islice(_ancestry(path), 1, None)
-
-
-def _ancestry(path):
-    """
-    Given a path with elements separated by
-    posixpath.sep, generate all elements of that path
-
-    >>> list(_ancestry('b/d'))
-    ['b/d', 'b']
-    >>> list(_ancestry('/b/d/'))
-    ['/b/d', '/b']
-    >>> list(_ancestry('b/d/f/'))
-    ['b/d/f', 'b/d', 'b']
-    >>> list(_ancestry('b'))
-    ['b']
-    >>> list(_ancestry(''))
-    []
-    """
-    path = path.rstrip(posixpath.sep)
-    while path and path != posixpath.sep:
-        yield path
-        path, tail = posixpath.split(path)
-
-
-_dedupe = dict.fromkeys
-"""Deduplicate an iterable in original order"""
-
-
-def _difference(minuend, subtrahend):
-    """
-    Return items in minuend not in subtrahend, retaining order
-    with O(1) lookup.
-    """
-    return itertools.filterfalse(set(subtrahend).__contains__, minuend)
-
-
-class InitializedState:
-    """
-    Mix-in to save the initialization state for pickling.
-    """
-
-    def __init__(self, *args, **kwargs):
-        self.__args = args
-        self.__kwargs = kwargs
-        super().__init__(*args, **kwargs)
-
-    def __getstate__(self):
-        return self.__args, self.__kwargs
-
-    def __setstate__(self, state):
-        args, kwargs = state
-        super().__init__(*args, **kwargs)
-
-
-class SanitizedNames:
-    """
-    ZipFile mix-in to ensure names are sanitized.
-    """
-
-    def namelist(self):
-        return list(map(self._sanitize, super().namelist()))
-
-    @staticmethod
-    def _sanitize(name):
-        r"""
-        Ensure a relative path with posix separators and no dot names.
-
-        Modeled after
-        https://github.com/python/cpython/blob/bcc1be39cb1d04ad9fc0bd1b9193d3972835a57c/Lib/zipfile/__init__.py#L1799-L1813
-        but provides consistent cross-platform behavior.
-
-        >>> san = SanitizedNames._sanitize
-        >>> san('/foo/bar')
-        'foo/bar'
-        >>> san('//foo.txt')
-        'foo.txt'
-        >>> san('foo/.././bar.txt')
-        'foo/bar.txt'
-        >>> san('foo../.bar.txt')
-        'foo../.bar.txt'
-        >>> san('\\foo\\bar.txt')
-        'foo/bar.txt'
-        >>> san('D:\\foo.txt')
-        'D/foo.txt'
-        >>> san('\\\\server\\share\\file.txt')
-        'server/share/file.txt'
-        >>> san('\\\\?\\GLOBALROOT\\Volume3')
-        '?/GLOBALROOT/Volume3'
-        >>> san('\\\\.\\PhysicalDrive1\\root')
-        'PhysicalDrive1/root'
-
-        Retain any trailing slash.
-        >>> san('abc/')
-        'abc/'
-
-        Raises a ValueError if the result is empty.
-        >>> san('../..')
-        Traceback (most recent call last):
-        ...
-        ValueError: Empty filename
-        """
-
-        def allowed(part):
-            return part and part not in {'..', '.'}
-
-        # Remove the drive letter.
-        # Don't use ntpath.splitdrive, because that also strips UNC paths
-        bare = re.sub('^([A-Z]):', r'\1', name, flags=re.IGNORECASE)
-        clean = bare.replace('\\', '/')
-        parts = clean.split('/')
-        joined = '/'.join(filter(allowed, parts))
-        if not joined:
-            raise ValueError("Empty filename")
-        return joined + '/' * name.endswith('/')
-
-
-class CompleteDirs(InitializedState, SanitizedNames, zipfile.ZipFile):
-    """
-    A ZipFile subclass that ensures that implied directories
-    are always included in the namelist.
-
-    >>> list(CompleteDirs._implied_dirs(['foo/bar.txt', 'foo/bar/baz.txt']))
-    ['foo/', 'foo/bar/']
-    >>> list(CompleteDirs._implied_dirs(['foo/bar.txt', 'foo/bar/baz.txt', 'foo/bar/']))
-    ['foo/']
-    """
-
-    @staticmethod
-    def _implied_dirs(names):
-        parents = itertools.chain.from_iterable(map(_parents, names))
-        as_dirs = (p + posixpath.sep for p in parents)
-        return _dedupe(_difference(as_dirs, names))
-
-    def namelist(self):
-        names = super().namelist()
-        return names + list(self._implied_dirs(names))
-
-    def _name_set(self):
-        return set(self.namelist())
-
-    def resolve_dir(self, name):
-        """
-        If the name represents a directory, return that name
-        as a directory (with the trailing slash).
-        """
-        names = self._name_set()
-        dirname = name + '/'
-        dir_match = name not in names and dirname in names
-        return dirname if dir_match else name
-
-    def getinfo(self, name):
-        """
-        Supplement getinfo for implied dirs.
-        """
-        try:
-            return super().getinfo(name)
-        except KeyError:
-            if not name.endswith('/') or name not in self._name_set():
-                raise
-            return zipfile.ZipInfo(filename=name)
-
-    @classmethod
-    def make(cls, source):
-        """
-        Given a source (filename or zipfile), return an
-        appropriate CompleteDirs subclass.
-        """
-        if isinstance(source, CompleteDirs):
-            return source
-
-        if not isinstance(source, zipfile.ZipFile):
-            return cls(source)
-
-        # Only allow for FastLookup when supplied zipfile is read-only
-        if 'r' not in source.mode:
-            cls = CompleteDirs
-
-        source.__class__ = cls
-        return source
-
-    @classmethod
-    def inject(cls, zf: zipfile.ZipFile) -> zipfile.ZipFile:
-        """
-        Given a writable zip file zf, inject directory entries for
-        any directories implied by the presence of children.
-        """
-        for name in cls._implied_dirs(zf.namelist()):
-            zf.writestr(name, b"")
-        return zf
-
-
-class FastLookup(CompleteDirs):
-    """
-    ZipFile subclass to ensure implicit
-    dirs exist and are resolved rapidly.
-    """
-
-    def namelist(self):
-        with contextlib.suppress(AttributeError):
-            return self.__names
-        self.__names = super().namelist()
-        return self.__names
-
-    def _name_set(self):
-        with contextlib.suppress(AttributeError):
-            return self.__lookup
-        self.__lookup = super()._name_set()
-        return self.__lookup
-
-
-def _extract_text_encoding(encoding=None, *args, **kwargs):
-    # compute stack level so that the caller of the caller sees any warning.
-    is_pypy = sys.implementation.name == 'pypy'
-    stack_level = 3 + is_pypy
-    return text_encoding(encoding, stack_level), args, kwargs
-
-
-class Path:
-    """
-    A :class:`importlib.resources.abc.Traversable` interface for zip files.
-
-    Implements many of the features users enjoy from
-    :class:`pathlib.Path`.
-
-    Consider a zip file with this structure::
-
-        .
-        ├── a.txt
-        └── b
-            ├── c.txt
-            └── d
-                └── e.txt
-
-    >>> data = io.BytesIO()
-    >>> zf = zipfile.ZipFile(data, 'w')
-    >>> zf.writestr('a.txt', 'content of a')
-    >>> zf.writestr('b/c.txt', 'content of c')
-    >>> zf.writestr('b/d/e.txt', 'content of e')
-    >>> zf.filename = 'mem/abcde.zip'
-
-    Path accepts the zipfile object itself or a filename
-
-    >>> path = Path(zf)
-
-    From there, several path operations are available.
-
-    Directory iteration (including the zip file itself):
-
-    >>> a, b = path.iterdir()
-    >>> a
-    Path('mem/abcde.zip', 'a.txt')
-    >>> b
-    Path('mem/abcde.zip', 'b/')
-
-    name property:
-
-    >>> b.name
-    'b'
-
-    join with divide operator:
-
-    >>> c = b / 'c.txt'
-    >>> c
-    Path('mem/abcde.zip', 'b/c.txt')
-    >>> c.name
-    'c.txt'
-
-    Read text:
-
-    >>> c.read_text(encoding='utf-8')
-    'content of c'
-
-    existence:
-
-    >>> c.exists()
-    True
-    >>> (b / 'missing.txt').exists()
-    False
-
-    Coercion to string:
-
-    >>> import os
-    >>> str(c).replace(os.sep, posixpath.sep)
-    'mem/abcde.zip/b/c.txt'
-
-    At the root, ``name``, ``filename``, and ``parent``
-    resolve to the zipfile.
-
-    >>> str(path)
-    'mem/abcde.zip/'
-    >>> path.name
-    'abcde.zip'
-    >>> path.filename == pathlib.Path('mem/abcde.zip')
-    True
-    >>> str(path.parent)
-    'mem'
-
-    If the zipfile has no filename, such attributes are not
-    valid and accessing them will raise an Exception.
-
-    >>> zf.filename = None
-    >>> path.name
-    Traceback (most recent call last):
-    ...
-    TypeError: ...
-
-    >>> path.filename
-    Traceback (most recent call last):
-    ...
-    TypeError: ...
-
-    >>> path.parent
-    Traceback (most recent call last):
-    ...
-    TypeError: ...
-
-    # workaround python/cpython#106763
-    >>> pass
-    """
-
-    __repr = "{self.__class__.__name__}({self.root.filename!r}, {self.at!r})"
-
-    def __init__(self, root, at=""):
-        """
-        Construct a Path from a ZipFile or filename.
-
-        Note: When the source is an existing ZipFile object,
-        its type (__class__) will be mutated to a
-        specialized type. If the caller wishes to retain the
-        original type, the caller should either create a
-        separate ZipFile object or pass a filename.
-        """
-        self.root = FastLookup.make(root)
-        self.at = at
-
-    def __eq__(self, other):
-        """
-        >>> Path(zipfile.ZipFile(io.BytesIO(), 'w')) == 'foo'
-        False
-        """
-        if self.__class__ is not other.__class__:
-            return NotImplemented
-        return (self.root, self.at) == (other.root, other.at)
-
-    def __hash__(self):
-        return hash((self.root, self.at))
-
-    def open(self, mode='r', *args, pwd=None, **kwargs):
-        """
-        Open this entry as text or binary following the semantics
-        of ``pathlib.Path.open()`` by passing arguments through
-        to io.TextIOWrapper().
-        """
-        if self.is_dir():
-            raise IsADirectoryError(self)
-        zip_mode = mode[0]
-        if not self.exists() and zip_mode == 'r':
-            raise FileNotFoundError(self)
-        stream = self.root.open(self.at, zip_mode, pwd=pwd)
-        if 'b' in mode:
-            if args or kwargs:
-                raise ValueError("encoding args invalid for binary operation")
-            return stream
-        # Text mode:
-        encoding, args, kwargs = _extract_text_encoding(*args, **kwargs)
-        return io.TextIOWrapper(stream, encoding, *args, **kwargs)
-
-    def _base(self):
-        return pathlib.PurePosixPath(self.at or self.root.filename)
-
-    @property
-    def name(self):
-        return self._base().name
-
-    @property
-    def suffix(self):
-        return self._base().suffix
-
-    @property
-    def suffixes(self):
-        return self._base().suffixes
-
-    @property
-    def stem(self):
-        return self._base().stem
-
-    @property
-    def filename(self):
-        return pathlib.Path(self.root.filename).joinpath(self.at)
-
-    def read_text(self, *args, **kwargs):
-        encoding, args, kwargs = _extract_text_encoding(*args, **kwargs)
-        with self.open('r', encoding, *args, **kwargs) as strm:
-            return strm.read()
-
-    def read_bytes(self):
-        with self.open('rb') as strm:
-            return strm.read()
-
-    def _is_child(self, path):
-        return posixpath.dirname(path.at.rstrip("/")) == self.at.rstrip("/")
-
-    def _next(self, at):
-        return self.__class__(self.root, at)
-
-    def is_dir(self):
-        return not self.at or self.at.endswith("/")
-
-    def is_file(self):
-        return self.exists() and not self.is_dir()
-
-    def exists(self):
-        return self.at in self.root._name_set()
-
-    def iterdir(self):
-        if not self.is_dir():
-            raise ValueError("Can't listdir a file")
-        subs = map(self._next, self.root.namelist())
-        return filter(self._is_child, subs)
-
-    def match(self, path_pattern):
-        return pathlib.PurePosixPath(self.at).match(path_pattern)
-
-    def is_symlink(self):
-        """
-        Return whether this path is a symlink.
-        """
-        info = self.root.getinfo(self.at)
-        mode = info.external_attr >> 16
-        return stat.S_ISLNK(mode)
-
-    def glob(self, pattern):
-        if not pattern:
-            raise ValueError(f"Unacceptable pattern: {pattern!r}")
-
-        prefix = re.escape(self.at)
-        tr = Translator(seps='/')
-        matches = re.compile(prefix + tr.translate(pattern)).fullmatch
-        names = (data.filename for data in self.root.filelist)
-        return map(self._next, filter(matches, names))
-
-    def rglob(self, pattern):
-        return self.glob(f'**/{pattern}')
-
-    def relative_to(self, other, *extra):
-        return posixpath.relpath(str(self), str(other.joinpath(*extra)))
-
-    def __str__(self):
-        return posixpath.join(self.root.filename, self.at)
-
-    def __repr__(self):
-        return self.__repr.format(self=self)
-
-    def joinpath(self, *other):
-        next = posixpath.join(self.at, *other)
-        return self._next(self.root.resolve_dir(next))
-
-    __truediv__ = joinpath
-
-    @property
-    def parent(self):
-        if not self.at:
-            return self.filename.parent
-        parent_at = posixpath.dirname(self.at.rstrip('/'))
-        if parent_at:
-            parent_at += '/'
-        return self._next(parent_at)
diff --git a/pkg_resources/_vendor/zipp/compat/__init__.py b/pkg_resources/_vendor/zipp/compat/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/pkg_resources/_vendor/zipp/compat/py310.py b/pkg_resources/_vendor/zipp/compat/py310.py
deleted file mode 100644
index d5ca53e037..0000000000
--- a/pkg_resources/_vendor/zipp/compat/py310.py
+++ /dev/null
@@ -1,11 +0,0 @@
-import sys
-import io
-
-
-def _text_encoding(encoding, stacklevel=2, /):  # pragma: no cover
-    return encoding
-
-
-text_encoding = (
-    io.text_encoding if sys.version_info > (3, 10) else _text_encoding  # type: ignore
-)
diff --git a/pkg_resources/_vendor/zipp/glob.py b/pkg_resources/_vendor/zipp/glob.py
deleted file mode 100644
index 69c41d77c3..0000000000
--- a/pkg_resources/_vendor/zipp/glob.py
+++ /dev/null
@@ -1,106 +0,0 @@
-import os
-import re
-
-
-_default_seps = os.sep + str(os.altsep) * bool(os.altsep)
-
-
-class Translator:
-    """
-    >>> Translator('xyz')
-    Traceback (most recent call last):
-    ...
-    AssertionError: Invalid separators
-
-    >>> Translator('')
-    Traceback (most recent call last):
-    ...
-    AssertionError: Invalid separators
-    """
-
-    seps: str
-
-    def __init__(self, seps: str = _default_seps):
-        assert seps and set(seps) <= set(_default_seps), "Invalid separators"
-        self.seps = seps
-
-    def translate(self, pattern):
-        """
-        Given a glob pattern, produce a regex that matches it.
-        """
-        return self.extend(self.translate_core(pattern))
-
-    def extend(self, pattern):
-        r"""
-        Extend regex for pattern-wide concerns.
-
-        Apply '(?s:)' to create a non-matching group that
-        matches newlines (valid on Unix).
-
-        Append '\Z' to imply fullmatch even when match is used.
-        """
-        return rf'(?s:{pattern})\Z'
-
-    def translate_core(self, pattern):
-        r"""
-        Given a glob pattern, produce a regex that matches it.
-
-        >>> t = Translator()
-        >>> t.translate_core('*.txt').replace('\\\\', '')
-        '[^/]*\\.txt'
-        >>> t.translate_core('a?txt')
-        'a[^/]txt'
-        >>> t.translate_core('**/*').replace('\\\\', '')
-        '.*/[^/][^/]*'
-        """
-        self.restrict_rglob(pattern)
-        return ''.join(map(self.replace, separate(self.star_not_empty(pattern))))
-
-    def replace(self, match):
-        """
-        Perform the replacements for a match from :func:`separate`.
-        """
-        return match.group('set') or (
-            re.escape(match.group(0))
-            .replace('\\*\\*', r'.*')
-            .replace('\\*', rf'[^{re.escape(self.seps)}]*')
-            .replace('\\?', r'[^/]')
-        )
-
-    def restrict_rglob(self, pattern):
-        """
-        Raise ValueError if ** appears in anything but a full path segment.
-
-        >>> Translator().translate('**foo')
-        Traceback (most recent call last):
-        ...
-        ValueError: ** must appear alone in a path segment
-        """
-        seps_pattern = rf'[{re.escape(self.seps)}]+'
-        segments = re.split(seps_pattern, pattern)
-        if any('**' in segment and segment != '**' for segment in segments):
-            raise ValueError("** must appear alone in a path segment")
-
-    def star_not_empty(self, pattern):
-        """
-        Ensure that * will not match an empty segment.
-        """
-
-        def handle_segment(match):
-            segment = match.group(0)
-            return '?*' if segment == '*' else segment
-
-        not_seps_pattern = rf'[^{re.escape(self.seps)}]+'
-        return re.sub(not_seps_pattern, handle_segment, pattern)
-
-
-def separate(pattern):
-    """
-    Separate out character sets to avoid translating their contents.
-
-    >>> [m.group(0) for m in separate('*.txt')]
-    ['*.txt']
-    >>> [m.group(0) for m in separate('a[?]txt')]
-    ['a', '[?]', 'txt']
-    """
-    return re.finditer(r'([^\[]+)|(?P[\[].*?[\]])|([\[][^\]]*$)', pattern)
diff --git a/tools/vendored.py b/tools/vendored.py
index 22b18e50a4..6ac06db332 100644
--- a/tools/vendored.py
+++ b/tools/vendored.py
@@ -12,7 +12,6 @@ def remove_all(paths):
 
 
 def update_vendored():
-    update_pkg_resources()
     update_setuptools()
 
 
@@ -25,23 +24,6 @@ def clean(vendor):
     remove_all(path for path in vendor.glob('*') if path.basename() not in ignored)
 
 
-def update_pkg_resources():
-    deps = [
-        'packaging >= 24',
-        'platformdirs >= 2.6.2',
-        'jaraco.text >= 3.7',
-    ]
-    # workaround for https://github.com/pypa/pip/issues/12770
-    deps += [
-        'importlib_resources >= 5.10.2',
-        'zipp >= 3.7',
-        'backports.tarfile',
-    ]
-    vendor = Path('pkg_resources/_vendor')
-    clean(vendor)
-    install_deps(deps, vendor)
-
-
 @functools.cache
 def metadata():
     return jaraco.packaging.metadata.load('.')

From 1523957495248b3cd10cbcd27105ffd8523be319 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 2 Jul 2024 23:28:51 -0400
Subject: [PATCH 0841/1761] Remove pkg_resources.extern.

---
 pkg_resources/extern/__init__.py | 104 -------------------------------
 1 file changed, 104 deletions(-)
 delete mode 100644 pkg_resources/extern/__init__.py

diff --git a/pkg_resources/extern/__init__.py b/pkg_resources/extern/__init__.py
deleted file mode 100644
index daa978ff72..0000000000
--- a/pkg_resources/extern/__init__.py
+++ /dev/null
@@ -1,104 +0,0 @@
-from __future__ import annotations
-from importlib.machinery import ModuleSpec
-import importlib.util
-import sys
-from types import ModuleType
-from typing import Iterable, Sequence
-
-
-class VendorImporter:
-    """
-    A PEP 302 meta path importer for finding optionally-vendored
-    or otherwise naturally-installed packages from root_name.
-    """
-
-    def __init__(
-        self,
-        root_name: str,
-        vendored_names: Iterable[str] = (),
-        vendor_pkg: str | None = None,
-    ):
-        self.root_name = root_name
-        self.vendored_names = set(vendored_names)
-        self.vendor_pkg = vendor_pkg or root_name.replace('extern', '_vendor')
-
-    @property
-    def search_path(self):
-        """
-        Search first the vendor package then as a natural package.
-        """
-        yield self.vendor_pkg + '.'
-        yield ''
-
-    def _module_matches_namespace(self, fullname):
-        """Figure out if the target module is vendored."""
-        root, base, target = fullname.partition(self.root_name + '.')
-        return not root and any(map(target.startswith, self.vendored_names))
-
-    def load_module(self, fullname: str):
-        """
-        Iterate over the search path to locate and load fullname.
-        """
-        root, base, target = fullname.partition(self.root_name + '.')
-        for prefix in self.search_path:
-            extant = prefix + target
-            try:
-                __import__(extant)
-            except ImportError:
-                continue
-            mod = sys.modules[extant]
-            sys.modules[fullname] = mod
-            return mod
-        else:
-            raise ImportError(
-                "The '{target}' package is required; "
-                "normally this is bundled with this package so if you get "
-                "this warning, consult the packager of your "
-                "distribution.".format(**locals())
-            )
-
-    def create_module(self, spec: ModuleSpec):
-        return self.load_module(spec.name)
-
-    def exec_module(self, module: ModuleType):
-        pass
-
-    def find_spec(
-        self,
-        fullname: str,
-        path: Sequence[str] | None = None,
-        target: ModuleType | None = None,
-    ):
-        """Return a module spec for vendored names."""
-        return (
-            # This should fix itself next mypy release https://github.com/python/typeshed/pull/11890
-            importlib.util.spec_from_loader(fullname, self)  # type: ignore[arg-type]
-            if self._module_matches_namespace(fullname)
-            else None
-        )
-
-    def install(self):
-        """
-        Install this importer into sys.meta_path if not already present.
-        """
-        if self not in sys.meta_path:
-            sys.meta_path.append(self)
-
-
-# [[[cog
-# import cog
-# from tools.vendored import yield_top_level
-# names = "\n".join(f"    {x!r}," for x in yield_top_level('pkg_resources'))
-# cog.outl(f"names = (\n{names}\n)")
-# ]]]
-names = (
-    'backports',
-    'importlib_resources',
-    'jaraco',
-    'more_itertools',
-    'packaging',
-    'platformdirs',
-    'zipp',
-)
-# [[[end]]]
-VendorImporter(__name__, names).install()

From c4086b9e25fb10946d517770eb28662939d6d1e2 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 2 Jul 2024 23:39:30 -0400
Subject: [PATCH 0842/1761] Clean up references to extern modules.

---
 conftest.py |  3 ---
 mypy.ini    | 10 ++++------
 2 files changed, 4 insertions(+), 9 deletions(-)

diff --git a/conftest.py b/conftest.py
index 99d020b733..532e83112a 100644
--- a/conftest.py
+++ b/conftest.py
@@ -32,11 +32,8 @@ def pytest_configure(config):
     'setuptools/tests/mod_with_constant.py',
     'setuptools/_distutils',
     '_distutils_hack',
-    'setuptools/extern',
-    'pkg_resources/extern',
     'pkg_resources/tests/data',
     'setuptools/_vendor',
-    'pkg_resources/_vendor',
     'setuptools/config/_validate_pyproject',
     'setuptools/modified.py',
     'setuptools/tests/bdist_wheel_testdata',
diff --git a/mypy.ini b/mypy.ini
index 3581693038..7ca6de8eed 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -12,7 +12,7 @@ exclude = (?x)(
 	| ^.tox/
 	| ^.eggs/
 	| ^pkg_resources/tests/data/my-test-package-source/setup.py$ # Duplicate module name
-	| ^.+?/(_vendor|extern)/ # Vendored
+	| ^setuptools/_vendor/ # Vendored
 	| ^setuptools/_distutils/ # Vendored
 	| ^setuptools/config/_validate_pyproject/ # Auto-generated
     | ^setuptools/tests/bdist_wheel_testdata/  # Duplicate module name
@@ -31,15 +31,13 @@ disable_error_code = attr-defined
 [mypy-pkg_resources.tests.*]
 disable_error_code = import-not-found
 
-# - Avoid raising issues when importing from "extern" modules, as those are added to path dynamically.
-#   https://github.com/pypa/setuptools/pull/3979#discussion_r1367968993
 # - distutils._modified has different errors on Python 3.8 [import-untyped], on Python 3.9+ [import-not-found]
 # - All jaraco modules are still untyped
 # - _validate_project sometimes complains about trove_classifiers (#4296)
-[mypy-pkg_resources.extern.*,setuptools.extern.*,distutils._modified,jaraco.*,trove_classifiers]
+[mypy-distutils._modified,jaraco.*,trove_classifiers]
 ignore_missing_imports = True
 
-# Even when excluding vendored/generated modules, there might be problems: https://github.com/python/mypy/issues/11936#issuecomment-1466764006
-[mypy-setuptools._vendor.packaging._manylinux,setuptools.config._validate_pyproject.*]
+# Even when excluding generated modules, there might be problems: https://github.com/python/mypy/issues/11936#issuecomment-1466764006
+[mypy-setuptools.config._validate_pyproject.*]
 follow_imports = silent
 # silent => ignore errors when following imports

From ed15a3b1e278c22dcdd7b8a88f6ac05375c0e64a Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 2 Jul 2024 23:42:04 -0400
Subject: [PATCH 0843/1761] Add news fragment.

---
 newsfragments/2825.removal.rst | 1 +
 1 file changed, 1 insertion(+)
 create mode 100644 newsfragments/2825.removal.rst

diff --git a/newsfragments/2825.removal.rst b/newsfragments/2825.removal.rst
new file mode 100644
index 0000000000..38a9dac9b0
--- /dev/null
+++ b/newsfragments/2825.removal.rst
@@ -0,0 +1 @@
+Now setuptools declares its own dependencies. Dependencies are still vendored for bootstrapping purposes, but installed dependencies are preferred. Downstream packagers can de-vendor by simply removing the ``setuptools/_vendor`` directory.
\ No newline at end of file

From cb4b6708a0b5dc6db9e090595c79d8eb95fcbcdb Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 2 Jul 2024 23:51:11 -0400
Subject: [PATCH 0844/1761] Suppress type errors around wheel.

---
 mypy.ini | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/mypy.ini b/mypy.ini
index 7ca6de8eed..c4b30d1acd 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -34,7 +34,8 @@ disable_error_code = import-not-found
 # - distutils._modified has different errors on Python 3.8 [import-untyped], on Python 3.9+ [import-not-found]
 # - All jaraco modules are still untyped
 # - _validate_project sometimes complains about trove_classifiers (#4296)
-[mypy-distutils._modified,jaraco.*,trove_classifiers]
+# - wheel appears to be untyped
+[mypy-distutils._modified,jaraco.*,trove_classifiers,wheel.*]
 ignore_missing_imports = True
 
 # Even when excluding generated modules, there might be problems: https://github.com/python/mypy/issues/11936#issuecomment-1466764006

From e7c320b614d71b95a366a33a99296384b85b8527 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 2 Jul 2024 23:52:23 -0400
Subject: [PATCH 0845/1761] Fix type error in vendored tool.

---
 tools/vendored.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/tools/vendored.py b/tools/vendored.py
index 6ac06db332..5f8a6c0906 100644
--- a/tools/vendored.py
+++ b/tools/vendored.py
@@ -24,7 +24,7 @@ def clean(vendor):
     remove_all(path for path in vendor.glob('*') if path.basename() not in ignored)
 
 
-@functools.cache
+@functools.lru_cache
 def metadata():
     return jaraco.packaging.metadata.load('.')
 

From 3fd66b903901db002315055f8757dcbf8c4fa63f Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 2 Jul 2024 23:54:55 -0400
Subject: [PATCH 0846/1761] Fix incorrect type declaration in _python_requires.

---
 setuptools/config/_apply_pyprojecttoml.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setuptools/config/_apply_pyprojecttoml.py b/setuptools/config/_apply_pyprojecttoml.py
index 8c1a81dda5..6cc59d2b95 100644
--- a/setuptools/config/_apply_pyprojecttoml.py
+++ b/setuptools/config/_apply_pyprojecttoml.py
@@ -203,7 +203,7 @@ def _project_urls(dist: Distribution, val: dict, _root_dir):
     _set_config(dist, "project_urls", val)
 
 
-def _python_requires(dist: Distribution, val: dict, _root_dir):
+def _python_requires(dist: Distribution, val: str, _root_dir):
     from packaging.specifiers import SpecifierSet
 
     _set_config(dist, "python_requires", SpecifierSet(val))

From bb17215ed3fada4ca0ba513684e39511513c1bbb Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Wed, 3 Jul 2024 00:24:23 -0400
Subject: [PATCH 0847/1761] Fix type errors in pkg_resources, now that
 packaging types are recognized properly.

---
 pkg_resources/__init__.py | 19 ++++++++++++-------
 1 file changed, 12 insertions(+), 7 deletions(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index e53525032c..0fbd3c1765 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -540,8 +540,7 @@ def get_distribution(dist: Distribution | _PkgReqType) -> Distribution:
     if isinstance(dist, str):
         dist = Requirement.parse(dist)
     if isinstance(dist, Requirement):
-        # Bad type narrowing, dist has to be a Requirement here, so get_provider has to return Distribution
-        dist = get_provider(dist)  # type: ignore[assignment]
+        dist = get_provider(dist)
     if not isinstance(dist, Distribution):
         raise TypeError("Expected str, Requirement, or Distribution", dist)
     return dist
@@ -1120,7 +1119,7 @@ def markers_pass(self, req: Requirement, extras: tuple[str, ...] | None = None):
         evaluation. Otherwise, return True.
         """
         extra_evals = (
-            req.marker.evaluate({'extra': extra})
+            req.marker.evaluate({'extra': extra})  # type: ignore
             for extra in self.get(req, ()) + (extras or (None,))
         )
         return not req.marker or any(extra_evals)
@@ -3432,6 +3431,10 @@ class RequirementParseError(_packaging_requirements.InvalidRequirement):
 
 
 class Requirement(_packaging_requirements.Requirement):
+    # prefer variable length tuple to set (as found in
+    # packaging.requirements.Requirement)
+    extras: tuple[str, ...]  # type: ignore[assignment]
+
     def __init__(self, requirement_string: str):
         """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
         super().__init__(requirement_string)
@@ -3439,8 +3442,7 @@ def __init__(self, requirement_string: str):
         project_name = safe_name(self.name)
         self.project_name, self.key = project_name, project_name.lower()
         self.specs = [(spec.operator, spec.version) for spec in self.specifier]
-        # packaging.requirements.Requirement uses a set for its extras. We use a variable-length tuple
-        self.extras: tuple[str] = tuple(map(safe_extra, self.extras))
+        self.extras = tuple(map(safe_extra, self.extras))
         self.hashCmp = (
             self.key,
             self.url,
@@ -3456,7 +3458,7 @@ def __eq__(self, other: object):
     def __ne__(self, other):
         return not self == other
 
-    def __contains__(self, item: Distribution | str | tuple[str, ...]) -> bool:
+    def __contains__(self, item: Distribution | str) -> bool:
         if isinstance(item, Distribution):
             if item.key != self.key:
                 return False
@@ -3466,7 +3468,10 @@ def __contains__(self, item: Distribution | str | tuple[str, ...]) -> bool:
         # Allow prereleases always in order to match the previous behavior of
         # this method. In the future this should be smarter and follow PEP 440
         # more accurately.
-        return self.specifier.contains(item, prereleases=True)
+        return self.specifier.contains(
+            item,  # type: ignore[arg-type]
+            prereleases=True,
+        )
 
     def __hash__(self):
         return self.__hash

From 242ef247e999bfe6e7a3a8eb4d984cb948ad0b4d Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Wed, 3 Jul 2024 00:32:30 -0400
Subject: [PATCH 0848/1761] Suppress coverage errors.

---
 setuptools/_importlib.py          | 4 ++--
 setuptools/command/bdist_wheel.py | 2 +-
 2 files changed, 3 insertions(+), 3 deletions(-)

diff --git a/setuptools/_importlib.py b/setuptools/_importlib.py
index 14384bef5d..b2d5b5b84a 100644
--- a/setuptools/_importlib.py
+++ b/setuptools/_importlib.py
@@ -2,12 +2,12 @@
 
 
 if sys.version_info < (3, 10):
-    import importlib_metadata as metadata
+    import importlib_metadata as metadata  # pragma: no cover
 else:
     import importlib.metadata as metadata  # noqa: F401
 
 
 if sys.version_info < (3, 9):
-    import importlib_resources as resources
+    import importlib_resources as resources  # pragma: no cover
 else:
     import importlib.resources as resources  # noqa: F401
diff --git a/setuptools/command/bdist_wheel.py b/setuptools/command/bdist_wheel.py
index 50248cdc25..14cdd6e934 100644
--- a/setuptools/command/bdist_wheel.py
+++ b/setuptools/command/bdist_wheel.py
@@ -67,7 +67,7 @@ def python_tag() -> str:
 def get_platform(archive_root: str | None) -> str:
     """Return our platform name 'win32', 'linux_x86_64'"""
     result = sysconfig.get_platform()
-    if result.startswith("macosx") and archive_root is not None:
+    if result.startswith("macosx") and archive_root is not None:  # pragma: no cover
         from wheel.macosx_libfile import calculate_macosx_platform_tag
 
         result = calculate_macosx_platform_tag(archive_root, result)

From d25f6d985b66b8153abee52ad7e6d00f5580d532 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Wed, 3 Jul 2024 11:22:23 -0400
Subject: [PATCH 0849/1761] Moved the dependencies to a 'core' extra to avoid
 dangers with cyclic dependencies at build time.

---
 newsfragments/2825.removal.rst |  2 +-
 pyproject.toml                 | 24 +++++++++++++-----------
 tools/vendored.py              | 12 ++++++++++--
 tox.ini                        |  1 +
 4 files changed, 25 insertions(+), 14 deletions(-)

diff --git a/newsfragments/2825.removal.rst b/newsfragments/2825.removal.rst
index 38a9dac9b0..f7a2460d4b 100644
--- a/newsfragments/2825.removal.rst
+++ b/newsfragments/2825.removal.rst
@@ -1 +1 @@
-Now setuptools declares its own dependencies. Dependencies are still vendored for bootstrapping purposes, but installed dependencies are preferred. Downstream packagers can de-vendor by simply removing the ``setuptools/_vendor`` directory.
\ No newline at end of file
+Now setuptools declares its own dependencies in the ``core`` extra. Dependencies are still vendored for bootstrapping purposes, but installed dependencies are preferred. Downstream packagers can de-vendor by simply removing the ``setuptools/_vendor`` directory.
diff --git a/pyproject.toml b/pyproject.toml
index 7c517943a4..b02f3930ae 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -25,17 +25,6 @@ classifiers = [
 keywords = ["CPAN PyPI distutils eggs package management"]
 requires-python = ">=3.8"
 dependencies = [
-	"packaging>=24",
-	"ordered-set>=3.1.1",
-	"more_itertools>=8.8",
-	"jaraco.text>=3.7",
-	"importlib_resources>=5.10.2",
-	"importlib_metadata>=6",
-	"tomli>=2.0.1",
-	"wheel>=0.43.0",
-
-	# pkg_resources
-	"platformdirs >= 2.6.2",
 ]
 
 [project.urls]
@@ -107,6 +96,19 @@ doc = [
 ]
 ssl = []
 certs = []
+core = [
+	"packaging>=24",
+	"ordered-set>=3.1.1",
+	"more_itertools>=8.8",
+	"jaraco.text>=3.7",
+	"importlib_resources>=5.10.2",
+	"importlib_metadata>=6",
+	"tomli>=2.0.1",
+	"wheel>=0.43.0",
+
+	# pkg_resources
+	"platformdirs >= 2.6.2",
+]
 
 [project.entry-points."distutils.commands"]
 alias = "setuptools.command.alias:alias"
diff --git a/tools/vendored.py b/tools/vendored.py
index 5f8a6c0906..2101e7c20f 100644
--- a/tools/vendored.py
+++ b/tools/vendored.py
@@ -1,4 +1,5 @@
 import functools
+import re
 import sys
 import subprocess
 
@@ -29,11 +30,18 @@ def metadata():
     return jaraco.packaging.metadata.load('.')
 
 
+def upgrade_core(dep):
+    """
+    Remove 'extra == "core"' from any dependency.
+    """
+    return re.sub('''(;| and) extra == ['"]core['"]''', '', dep)
+
+
 def load_deps():
     """
-    Read the dependencies from `.`.
+    Read the dependencies from `.[core]`.
     """
-    return metadata().get_all('Requires-Dist')
+    return list(map(upgrade_core, metadata().get_all('Requires-Dist')))
 
 
 def min_python():
diff --git a/tox.ini b/tox.ini
index 8a3f6260b7..00e38fbb9a 100644
--- a/tox.ini
+++ b/tox.ini
@@ -10,6 +10,7 @@ commands =
 usedevelop = True
 extras =
 	test
+	core
 pass_env =
 	SETUPTOOLS_USE_DISTUTILS
 	SETUPTOOLS_ENFORCE_DEPRECATION

From fa7ee9130dd012ddfb4bf22e39e442a1e73ff0ed Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Wed, 3 Jul 2024 11:50:50 -0400
Subject: [PATCH 0850/1761] Ensure that package data from vendored packages
 gets installed.

---
 pyproject.toml | 4 ++++
 1 file changed, 4 insertions(+)

diff --git a/pyproject.toml b/pyproject.toml
index b02f3930ae..183c534e0a 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -185,6 +185,10 @@ exclude = [
 ]
 namespaces = true
 
+[tool.setuptools.package-data]
+# ensure that `setuptools/_vendor/jaraco/text/Lorem ipsum.txt` is installed
+"*" = ["*.txt"]
+
 [tool.distutils.sdist]
 formats = "zip"
 

From 4a3406baf94b1ef8122364b417c9564344a52921 Mon Sep 17 00:00:00 2001
From: Christoph Reiter 
Date: Thu, 4 Jul 2024 22:53:21 +0200
Subject: [PATCH 0851/1761] CI: also set CC/CXX when pip installing with
 mingw+clang

Now that setuptools with mingw support is on pypi, this uncovered
the same issue as fixed in 23174730a61af359f when pip installing
MarkupSafe.

Apply the same work around here too.
---
 .github/workflows/main.yml | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index b6b757dbf5..1f63867d85 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -143,6 +143,9 @@ jobs:
             git
       - name: Install Dependencies
         shell: msys2 {0}
+        env:
+          CC: ${{ matrix.cc }}
+          CXX: ${{ matrix.cxx }}
         run: |
           export VIRTUALENV_NO_SETUPTOOLS=1
 

From bacd9c6f92ed1926644f5743d7139d16ee65801b Mon Sep 17 00:00:00 2001
From: Christoph Reiter 
Date: Thu, 4 Jul 2024 22:25:54 +0200
Subject: [PATCH 0852/1761] sysconfig: skip customize_compiler() with MSVC
 Python again

By enabling customize_compiler() when using the mingw compiler class
in 2ad8784dfeb81682 this also enabled it for when the mingw compiler
class was used with a MSVC built CPython.

MSVC CPython doesn't have any of the config vars that are required in
customize_compiler() though. And while it would be nice if all the env
vars would be considered in that scenario too, it's not clear how this
should be implemented without any sysconfig provided fallbacks
(if CC isn't set but CFLAGS is, there is no way to pass things to
set_executables() etc.)

Given that, just restore the previous behaviour, skip customize_compiler()
with MSVC Python in all cases, and add a test.

Fixes https://github.com/pypa/setuptools/issues/4456
---
 distutils/sysconfig.py                 |  4 +++-
 distutils/tests/test_mingwccompiler.py | 10 ++++++++++
 2 files changed, 13 insertions(+), 1 deletion(-)

diff --git a/distutils/sysconfig.py b/distutils/sysconfig.py
index 4ba0be5602..7ebe67687e 100644
--- a/distutils/sysconfig.py
+++ b/distutils/sysconfig.py
@@ -293,7 +293,9 @@ def customize_compiler(compiler):  # noqa: C901
     Mainly needed on Unix, so we can plug in the information that
     varies across Unices and is stored in Python's Makefile.
     """
-    if compiler.compiler_type in ["unix", "cygwin", "mingw32"]:
+    if compiler.compiler_type in ["unix", "cygwin"] or (
+        compiler.compiler_type == "mingw32" and is_mingw()
+    ):
         _customize_macos()
 
         (
diff --git a/distutils/tests/test_mingwccompiler.py b/distutils/tests/test_mingwccompiler.py
index d81360e782..fd201cd773 100644
--- a/distutils/tests/test_mingwccompiler.py
+++ b/distutils/tests/test_mingwccompiler.py
@@ -2,6 +2,7 @@
 
 from distutils.util import split_quoted, is_mingw
 from distutils.errors import DistutilsPlatformError, CCompilerError
+from distutils import sysconfig
 
 
 class TestMingw32CCompiler:
@@ -43,3 +44,12 @@ def test_cygwincc_error(self, monkeypatch):
 
         with pytest.raises(CCompilerError):
             distutils.cygwinccompiler.Mingw32CCompiler()
+
+    def test_customize_compiler_with_msvc_python(self):
+        from distutils.cygwinccompiler import Mingw32CCompiler
+
+        # In case we have an MSVC Python build, but still want to use
+        # Mingw32CCompiler, then customize_compiler() shouldn't fail at least.
+        # https://github.com/pypa/setuptools/issues/4456
+        compiler = Mingw32CCompiler()
+        sysconfig.customize_compiler(compiler)

From 7d66cde95cdf32b70d457064f5be0b2c5f02ce62 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 4 Jul 2024 20:45:49 -0400
Subject: [PATCH 0853/1761] Move paths_on_pythonpath out of the command.test
 module.

---
 setuptools/_path.py                 | 30 ++++++++++++++++++++++++++
 setuptools/command/test.py          | 33 +++--------------------------
 setuptools/tests/test_develop.py    |  9 ++++----
 setuptools/tests/test_namespaces.py | 13 ++++++------
 4 files changed, 44 insertions(+), 41 deletions(-)

diff --git a/setuptools/_path.py b/setuptools/_path.py
index fb8ef0e198..7ce6919136 100644
--- a/setuptools/_path.py
+++ b/setuptools/_path.py
@@ -1,7 +1,11 @@
+import contextlib
 import os
 import sys
 from typing import Union
 
+from setuptools.extern.more_itertools import unique_everseen
+
+
 if sys.version_info >= (3, 9):
     StrPath = Union[str, os.PathLike[str]]  #  Same as _typeshed.StrPath
 else:
@@ -38,3 +42,29 @@ def normpath(filename: StrPath) -> str:
     # See pkg_resources.normalize_path for notes about cygwin
     file = os.path.abspath(filename) if sys.platform == 'cygwin' else filename
     return os.path.normcase(os.path.realpath(os.path.normpath(file)))
+
+
+@contextlib.contextmanager
+def paths_on_pythonpath(paths):
+    """
+    Add the indicated paths to the head of the PYTHONPATH environment
+    variable so that subprocesses will also see the packages at
+    these paths.
+
+    Do this in a context that restores the value on exit.
+    """
+    nothing = object()
+    orig_pythonpath = os.environ.get('PYTHONPATH', nothing)
+    current_pythonpath = os.environ.get('PYTHONPATH', '')
+    try:
+        prefix = os.pathsep.join(unique_everseen(paths))
+        to_join = filter(None, [prefix, current_pythonpath])
+        new_path = os.pathsep.join(to_join)
+        if new_path:
+            os.environ['PYTHONPATH'] = new_path
+        yield
+    finally:
+        if orig_pythonpath is nothing:
+            os.environ.pop('PYTHONPATH', None)
+        else:
+            os.environ['PYTHONPATH'] = orig_pythonpath
diff --git a/setuptools/command/test.py b/setuptools/command/test.py
index af1349e1c6..1ef69c47fd 100644
--- a/setuptools/command/test.py
+++ b/setuptools/command/test.py
@@ -1,4 +1,3 @@
-import os
 import operator
 import sys
 import contextlib
@@ -18,8 +17,8 @@
     require,
 )
 from .._importlib import metadata
+from .._path import paths_on_pythonpath
 from setuptools import Command
-from setuptools.extern.more_itertools import unique_everseen
 from setuptools.extern.jaraco.functools import pass_none
 
 
@@ -150,7 +149,7 @@ def project_on_sys_path(self, include_dists=()):
             working_set.__init__()
             add_activation_listener(lambda dist: dist.activate())
             require('%s==%s' % (ei_cmd.egg_name, ei_cmd.egg_version))
-            with self.paths_on_pythonpath([project_path]):
+            with paths_on_pythonpath([project_path]):
                 yield
         finally:
             sys.path[:] = old_path
@@ -158,32 +157,6 @@ def project_on_sys_path(self, include_dists=()):
             sys.modules.update(old_modules)
             working_set.__init__()
 
-    @staticmethod
-    @contextlib.contextmanager
-    def paths_on_pythonpath(paths):
-        """
-        Add the indicated paths to the head of the PYTHONPATH environment
-        variable so that subprocesses will also see the packages at
-        these paths.
-
-        Do this in a context that restores the value on exit.
-        """
-        nothing = object()
-        orig_pythonpath = os.environ.get('PYTHONPATH', nothing)
-        current_pythonpath = os.environ.get('PYTHONPATH', '')
-        try:
-            prefix = os.pathsep.join(unique_everseen(paths))
-            to_join = filter(None, [prefix, current_pythonpath])
-            new_path = os.pathsep.join(to_join)
-            if new_path:
-                os.environ['PYTHONPATH'] = new_path
-            yield
-        finally:
-            if orig_pythonpath is nothing:
-                os.environ.pop('PYTHONPATH', None)
-            else:
-                os.environ['PYTHONPATH'] = orig_pythonpath
-
     @staticmethod
     def install_dists(dist):
         """
@@ -218,7 +191,7 @@ def run(self):
         self.announce('running "%s"' % cmd)
 
         paths = map(operator.attrgetter('location'), installed_dists)
-        with self.paths_on_pythonpath(paths):
+        with paths_on_pythonpath(paths):
             with self.project_on_sys_path():
                 self.run_tests()
 
diff --git a/setuptools/tests/test_develop.py b/setuptools/tests/test_develop.py
index d36447edbb..6f17beb703 100644
--- a/setuptools/tests/test_develop.py
+++ b/setuptools/tests/test_develop.py
@@ -6,12 +6,11 @@
 import pathlib
 import platform
 
-from setuptools.command import test
-
 import pytest
 
 from setuptools.command.develop import develop
 from setuptools.dist import Distribution
+from setuptools._path import paths_on_pythonpath
 from . import contexts
 from . import namespaces
 
@@ -124,7 +123,7 @@ def install_develop(src_dir, target):
             str(target),
         ]
         with src_dir.as_cwd():
-            with test.test.paths_on_pythonpath([str(target)]):
+            with paths_on_pythonpath([str(target)]):
                 subprocess.check_call(develop_cmd)
 
     @pytest.mark.skipif(
@@ -163,7 +162,7 @@ def test_namespace_package_importable(self, tmpdir):
             '-c',
             'import myns.pkgA; import myns.pkgB',
         ]
-        with test.test.paths_on_pythonpath([str(target)]):
+        with paths_on_pythonpath([str(target)]):
             subprocess.check_call(try_import)
 
         # additionally ensure that pkg_resources import works
@@ -172,5 +171,5 @@ def test_namespace_package_importable(self, tmpdir):
             '-c',
             'import pkg_resources',
         ]
-        with test.test.paths_on_pythonpath([str(target)]):
+        with paths_on_pythonpath([str(target)]):
             subprocess.check_call(pkg_resources_imp)
diff --git a/setuptools/tests/test_namespaces.py b/setuptools/tests/test_namespaces.py
index 76b5af296a..56689301da 100644
--- a/setuptools/tests/test_namespaces.py
+++ b/setuptools/tests/test_namespaces.py
@@ -1,8 +1,9 @@
 import sys
 import subprocess
 
+from setuptools._path import paths_on_pythonpath
+
 from . import namespaces
-from setuptools.command import test
 
 
 class TestNamespaces:
@@ -45,7 +46,7 @@ def test_mixed_site_and_non_site(self, tmpdir):
             '-c',
             'import myns.pkgA; import myns.pkgB',
         ]
-        with test.test.paths_on_pythonpath(map(str, targets)):
+        with paths_on_pythonpath(map(str, targets)):
             subprocess.check_call(try_import)
 
     def test_pkg_resources_import(self, tmpdir):
@@ -65,7 +66,7 @@ def test_pkg_resources_import(self, tmpdir):
             str(target),
             str(pkg),
         ]
-        with test.test.paths_on_pythonpath([str(target)]):
+        with paths_on_pythonpath([str(target)]):
             subprocess.check_call(install_cmd)
         namespaces.make_site_dir(target)
         try_import = [
@@ -73,7 +74,7 @@ def test_pkg_resources_import(self, tmpdir):
             '-c',
             'import pkg_resources',
         ]
-        with test.test.paths_on_pythonpath([str(target)]):
+        with paths_on_pythonpath([str(target)]):
             subprocess.check_call(try_import)
 
     def test_namespace_package_installed_and_cwd(self, tmpdir):
@@ -102,7 +103,7 @@ def test_namespace_package_installed_and_cwd(self, tmpdir):
             '-c',
             'import pkg_resources; import myns.pkgA',
         ]
-        with test.test.paths_on_pythonpath([str(target)]):
+        with paths_on_pythonpath([str(target)]):
             subprocess.check_call(pkg_resources_imp, cwd=str(pkg_A))
 
     def test_packages_in_the_same_namespace_installed_and_cwd(self, tmpdir):
@@ -133,5 +134,5 @@ def test_packages_in_the_same_namespace_installed_and_cwd(self, tmpdir):
             '-c',
             'import pkg_resources; import myns.pkgA; import myns.pkgB',
         ]
-        with test.test.paths_on_pythonpath([str(target)]):
+        with paths_on_pythonpath([str(target)]):
             subprocess.check_call(pkg_resources_imp, cwd=str(pkg_B))

From b5bd38edc1cec78b6c1baca047aebf23c6635a48 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 4 Jul 2024 20:46:04 -0400
Subject: [PATCH 0854/1761] Remove test module and associated tests.

---
 pyproject.toml                      |   1 -
 setuptools/command/test.py          | 223 ----------------------------
 setuptools/tests/test_egg_info.py   |  11 --
 setuptools/tests/test_setuptools.py |  37 -----
 setuptools/tests/test_test.py       |  39 -----
 setuptools/tests/test_virtualenv.py |  74 ---------
 6 files changed, 385 deletions(-)
 delete mode 100644 setuptools/command/test.py
 delete mode 100644 setuptools/tests/test_test.py

diff --git a/pyproject.toml b/pyproject.toml
index 00e7ee169f..f5441ddf0a 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -118,7 +118,6 @@ rotate = "setuptools.command.rotate:rotate"
 saveopts = "setuptools.command.saveopts:saveopts"
 sdist = "setuptools.command.sdist:sdist"
 setopt = "setuptools.command.setopt:setopt"
-test = "setuptools.command.test:test"
 upload_docs = "setuptools.command.upload_docs:upload_docs"
 
 [project.entry-points."setuptools.finalize_distribution_options"]
diff --git a/setuptools/command/test.py b/setuptools/command/test.py
deleted file mode 100644
index 1ef69c47fd..0000000000
--- a/setuptools/command/test.py
+++ /dev/null
@@ -1,223 +0,0 @@
-import operator
-import sys
-import contextlib
-import itertools
-import unittest
-from distutils.errors import DistutilsError, DistutilsOptionError
-from distutils import log
-from unittest import TestLoader
-
-from pkg_resources import (
-    resource_listdir,
-    resource_exists,
-    normalize_path,
-    working_set,
-    evaluate_marker,
-    add_activation_listener,
-    require,
-)
-from .._importlib import metadata
-from .._path import paths_on_pythonpath
-from setuptools import Command
-from setuptools.extern.jaraco.functools import pass_none
-
-
-class ScanningLoader(TestLoader):
-    def __init__(self):
-        TestLoader.__init__(self)
-        self._visited = set()
-
-    def loadTestsFromModule(self, module, pattern=None):
-        """Return a suite of all tests cases contained in the given module
-
-        If the module is a package, load tests from all the modules in it.
-        If the module has an ``additional_tests`` function, call it and add
-        the return value to the tests.
-        """
-        if module in self._visited:
-            return None
-        self._visited.add(module)
-
-        tests = []
-        tests.append(TestLoader.loadTestsFromModule(self, module))
-
-        if hasattr(module, "additional_tests"):
-            tests.append(module.additional_tests())
-
-        if hasattr(module, '__path__'):
-            for file in resource_listdir(module.__name__, ''):
-                if file.endswith('.py') and file != '__init__.py':
-                    submodule = module.__name__ + '.' + file[:-3]
-                else:
-                    if resource_exists(module.__name__, file + '/__init__.py'):
-                        submodule = module.__name__ + '.' + file
-                    else:
-                        continue
-                tests.append(self.loadTestsFromName(submodule))
-
-        if len(tests) != 1:
-            return self.suiteClass(tests)
-        else:
-            return tests[0]  # don't create a nested suite for only one return
-
-
-# adapted from jaraco.classes.properties:NonDataProperty
-class NonDataProperty:
-    def __init__(self, fget):
-        self.fget = fget
-
-    def __get__(self, obj, objtype=None):
-        if obj is None:
-            return self
-        return self.fget(obj)
-
-
-class test(Command):
-    """Command to run unit tests after in-place build"""
-
-    description = "run unit tests after in-place build (deprecated)"
-
-    user_options = [
-        ('test-module=', 'm', "Run 'test_suite' in specified module"),
-        (
-            'test-suite=',
-            's',
-            "Run single test, case or suite (e.g. 'module.test_suite')",
-        ),
-        ('test-runner=', 'r', "Test runner to use"),
-    ]
-
-    def initialize_options(self):
-        self.test_suite = None
-        self.test_module = None
-        self.test_loader = None
-        self.test_runner = None
-
-    def finalize_options(self):
-        if self.test_suite and self.test_module:
-            msg = "You may specify a module or a suite, but not both"
-            raise DistutilsOptionError(msg)
-
-        if self.test_suite is None:
-            if self.test_module is None:
-                self.test_suite = self.distribution.test_suite
-            else:
-                self.test_suite = self.test_module + ".test_suite"
-
-        if self.test_loader is None:
-            self.test_loader = getattr(self.distribution, 'test_loader', None)
-        if self.test_loader is None:
-            self.test_loader = "setuptools.command.test:ScanningLoader"
-        if self.test_runner is None:
-            self.test_runner = getattr(self.distribution, 'test_runner', None)
-
-    @NonDataProperty
-    def test_args(self):
-        return list(self._test_args())
-
-    def _test_args(self):
-        if not self.test_suite:
-            yield 'discover'
-        if self.verbose:
-            yield '--verbose'
-        if self.test_suite:
-            yield self.test_suite
-
-    def with_project_on_sys_path(self, func):
-        """
-        Backward compatibility for project_on_sys_path context.
-        """
-        with self.project_on_sys_path():
-            func()
-
-    @contextlib.contextmanager
-    def project_on_sys_path(self, include_dists=()):
-        self.run_command('egg_info')
-
-        # Build extensions in-place
-        self.reinitialize_command('build_ext', inplace=True)
-        self.run_command('build_ext')
-
-        ei_cmd = self.get_finalized_command("egg_info")
-
-        old_path = sys.path[:]
-        old_modules = sys.modules.copy()
-
-        try:
-            project_path = normalize_path(ei_cmd.egg_base)
-            sys.path.insert(0, project_path)
-            working_set.__init__()
-            add_activation_listener(lambda dist: dist.activate())
-            require('%s==%s' % (ei_cmd.egg_name, ei_cmd.egg_version))
-            with paths_on_pythonpath([project_path]):
-                yield
-        finally:
-            sys.path[:] = old_path
-            sys.modules.clear()
-            sys.modules.update(old_modules)
-            working_set.__init__()
-
-    @staticmethod
-    def install_dists(dist):
-        """
-        Install the requirements indicated by self.distribution and
-        return an iterable of the dists that were built.
-        """
-        ir_d = dist.fetch_build_eggs(dist.install_requires)
-        tr_d = dist.fetch_build_eggs(dist.tests_require or [])
-        er_d = dist.fetch_build_eggs(
-            v
-            for k, v in dist.extras_require.items()
-            if k.startswith(':') and evaluate_marker(k[1:])
-        )
-        return itertools.chain(ir_d, tr_d, er_d)
-
-    def run(self):
-        self.announce(
-            "WARNING: Testing via this command is deprecated and will be "
-            "removed in a future version. Users looking for a generic test "
-            "entry point independent of test runner are encouraged to use "
-            "tox.",
-            log.WARN,
-        )
-
-        installed_dists = self.install_dists(self.distribution)
-
-        cmd = ' '.join(self._argv)
-        if self.dry_run:
-            self.announce('skipping "%s" (dry run)' % cmd)
-            return
-
-        self.announce('running "%s"' % cmd)
-
-        paths = map(operator.attrgetter('location'), installed_dists)
-        with paths_on_pythonpath(paths):
-            with self.project_on_sys_path():
-                self.run_tests()
-
-    def run_tests(self):
-        test = unittest.main(
-            None,
-            None,
-            self._argv,
-            testLoader=self._resolve_as_ep(self.test_loader),
-            testRunner=self._resolve_as_ep(self.test_runner),
-            exit=False,
-        )
-        if not test.result.wasSuccessful():
-            msg = 'Test failed: %s' % test.result
-            self.announce(msg, log.ERROR)
-            raise DistutilsError(msg)
-
-    @property
-    def _argv(self):
-        return ['unittest'] + self.test_args
-
-    @staticmethod
-    @pass_none
-    def _resolve_as_ep(val):
-        """
-        Load the indicated attribute value, called, as a as if it were
-        specified as an entry point.
-        """
-        return metadata.EntryPoint(value=val, name=None, group=None).load()()
diff --git a/setuptools/tests/test_egg_info.py b/setuptools/tests/test_egg_info.py
index 8616f813c1..856dd127bc 100644
--- a/setuptools/tests/test_egg_info.py
+++ b/setuptools/tests/test_egg_info.py
@@ -395,17 +395,6 @@ def parametrize(*test_list, **format_dict):
         setup_requires =
             barbazquux; {mismatch_marker}
 
-        """,
-        """
-        tests_require_with_markers
-        {'cmd': ['test'], 'output': "Ran 0 tests in"}
-
-        tests_require=["barbazquux;{mismatch_marker}"],
-
-        [options]
-        tests_require =
-            barbazquux; {mismatch_marker}
-
         """,
         """
         extras_require_with_extra
diff --git a/setuptools/tests/test_setuptools.py b/setuptools/tests/test_setuptools.py
index b1ca2396bd..3aa9bf048e 100644
--- a/setuptools/tests/test_setuptools.py
+++ b/setuptools/tests/test_setuptools.py
@@ -221,43 +221,6 @@ def testInvalidIncludeExclude(self):
             self.dist.exclude(package_dir=['q'])
 
 
-class TestCommandTests:
-    def testTestIsCommand(self):
-        test_cmd = makeSetup().get_command_obj('test')
-        assert isinstance(test_cmd, distutils.cmd.Command)
-
-    def testLongOptSuiteWNoDefault(self):
-        ts1 = makeSetup(script_args=['test', '--test-suite=foo.tests.suite'])
-        ts1 = ts1.get_command_obj('test')
-        ts1.ensure_finalized()
-        assert ts1.test_suite == 'foo.tests.suite'
-
-    def testDefaultSuite(self):
-        ts2 = makeSetup(test_suite='bar.tests.suite').get_command_obj('test')
-        ts2.ensure_finalized()
-        assert ts2.test_suite == 'bar.tests.suite'
-
-    def testDefaultWModuleOnCmdLine(self):
-        ts3 = makeSetup(
-            test_suite='bar.tests', script_args=['test', '-m', 'foo.tests']
-        ).get_command_obj('test')
-        ts3.ensure_finalized()
-        assert ts3.test_module == 'foo.tests'
-        assert ts3.test_suite == 'foo.tests.test_suite'
-
-    def testConflictingOptions(self):
-        ts4 = makeSetup(
-            script_args=['test', '-m', 'bar.tests', '-s', 'foo.tests.suite']
-        ).get_command_obj('test')
-        with pytest.raises(DistutilsOptionError):
-            ts4.ensure_finalized()
-
-    def testNoSuite(self):
-        ts5 = makeSetup().get_command_obj('test')
-        ts5.ensure_finalized()
-        assert ts5.test_suite is None
-
-
 @pytest.fixture
 def example_source(tmpdir):
     tmpdir.mkdir('foo')
diff --git a/setuptools/tests/test_test.py b/setuptools/tests/test_test.py
deleted file mode 100644
index 989996f3ba..0000000000
--- a/setuptools/tests/test_test.py
+++ /dev/null
@@ -1,39 +0,0 @@
-import pytest
-from jaraco import path
-
-from setuptools.command.test import test
-from setuptools.dist import Distribution
-
-from .textwrap import DALS
-
-
-@pytest.mark.usefixtures('tmpdir_cwd')
-def test_tests_are_run_once(capfd):
-    params = dict(
-        packages=['dummy'],
-    )
-    files = {
-        'setup.py': 'from setuptools import setup; setup('
-        + ','.join(f'{name}={params[name]!r}' for name in params)
-        + ')',
-        'dummy': {
-            '__init__.py': '',
-            'test_dummy.py': DALS(
-                """
-                import unittest
-                class TestTest(unittest.TestCase):
-                    def test_test(self):
-                        print('Foo')
-                """
-            ),
-        },
-    }
-    path.build(files)
-    dist = Distribution(params)
-    dist.script_name = 'setup.py'
-    cmd = test(dist)
-    cmd.ensure_finalized()
-    cmd.run()
-    out, err = capfd.readouterr()
-    assert out.endswith('Foo\n')
-    assert len(out.split('Foo')) == 2
diff --git a/setuptools/tests/test_virtualenv.py b/setuptools/tests/test_virtualenv.py
index b84702aa70..21e5273529 100644
--- a/setuptools/tests/test_virtualenv.py
+++ b/setuptools/tests/test_virtualenv.py
@@ -111,80 +111,6 @@ def test_pip_upgrade_from_source(
     venv.run(["pip", "install", "--no-cache-dir", "--upgrade", str(setuptools_sdist)])
 
 
-def _check_test_command_install_requirements(venv, tmpdir):
-    """
-    Check the test command will install all required dependencies.
-    """
-
-    def sdist(distname, version):
-        dist_path = tmpdir.join('%s-%s.tar.gz' % (distname, version))
-        make_nspkg_sdist(str(dist_path), distname, version)
-        return dist_path
-
-    dependency_links = [
-        pathlib.Path(str(dist_path)).as_uri()
-        for dist_path in (
-            sdist('foobar', '2.4'),
-            sdist('bits', '4.2'),
-            sdist('bobs', '6.0'),
-            sdist('pieces', '0.6'),
-        )
-    ]
-    with tmpdir.join('setup.py').open('w') as fp:
-        fp.write(
-            DALS(
-                '''
-            from setuptools import setup
-
-            setup(
-                dependency_links={dependency_links!r},
-                install_requires=[
-                    'barbazquux1; sys_platform in ""',
-                    'foobar==2.4',
-                ],
-                setup_requires='bits==4.2',
-                tests_require="""
-                    bobs==6.0
-                """,
-                extras_require={{
-                    'test': ['barbazquux2'],
-                    ':"" in sys_platform': 'pieces==0.6',
-                    ':python_version > "1"': """
-                        pieces
-                        foobar
-                    """,
-                }}
-            )
-            '''.format(dependency_links=dependency_links)
-            )
-        )
-    with tmpdir.join('test.py').open('w') as fp:
-        fp.write(
-            DALS(
-                """
-            import foobar
-            import bits
-            import bobs
-            import pieces
-
-            open('success', 'w').close()
-            """
-            )
-        )
-
-    cmd = ["python", 'setup.py', 'test', '-s', 'test']
-    venv.run(cmd, cwd=str(tmpdir))
-    assert tmpdir.join('success').check()
-
-
-def test_test_command_install_requirements(venv, tmpdir, tmpdir_cwd):
-    # Ensure pip is installed.
-    venv.run(["python", "-c", "import pip"])
-    # disable index URL so bits and bobs aren't requested from PyPI
-    with contexts.environment(PYTHONPATH=None, PIP_NO_INDEX="1"):
-        _check_test_command_install_requirements(venv, tmpdir)
-
-
 def test_no_missing_dependencies(bare_venv, request):
     """
     Quick and dirty test to ensure all external dependencies are vendored.

From 67a0a043e8f5fa3bd201cc36697bd1ad66747264 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 5 Jul 2024 08:39:04 -0400
Subject: [PATCH 0855/1761] Add some doctests to paths_on_pythonpath, giving it
 full test coverage.

---
 setuptools/_path.py | 12 ++++++++++++
 1 file changed, 12 insertions(+)

diff --git a/setuptools/_path.py b/setuptools/_path.py
index 7ce6919136..7a521ed705 100644
--- a/setuptools/_path.py
+++ b/setuptools/_path.py
@@ -52,6 +52,18 @@ def paths_on_pythonpath(paths):
     these paths.
 
     Do this in a context that restores the value on exit.
+
+    >>> getfixture('monkeypatch').setenv('PYTHONPATH', 'anything')
+    >>> with paths_on_pythonpath(['foo', 'bar']):
+    ...     assert 'foo' in os.environ['PYTHONPATH']
+    ...     assert 'anything' in os.environ['PYTHONPATH']
+    >>> os.environ['PYTHONPATH']
+    'anything'
+
+    >>> getfixture('monkeypatch').delenv('PYTHONPATH')
+    >>> with paths_on_pythonpath(['foo', 'bar']):
+    ...     assert 'foo' in os.environ['PYTHONPATH']
+    >>> os.environ.get('PYTHONPATH')
     """
     nothing = object()
     orig_pythonpath = os.environ.get('PYTHONPATH', nothing)

From 6929cc4b127bbef49006c30a02beedaed47be42b Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 5 Jul 2024 08:47:00 -0400
Subject: [PATCH 0856/1761] Add news fragment.

---
 newsfragments/931.removal.rst | 1 +
 1 file changed, 1 insertion(+)
 create mode 100644 newsfragments/931.removal.rst

diff --git a/newsfragments/931.removal.rst b/newsfragments/931.removal.rst
new file mode 100644
index 0000000000..cffc7456d5
--- /dev/null
+++ b/newsfragments/931.removal.rst
@@ -0,0 +1 @@
+The test command has been removed. Users relying on 'setup.py test' will need to migrate to another test runner or pin setuptools before this version.
\ No newline at end of file

From 6404823b4a16f340b7786793cd1256fe451f47eb Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 5 Jul 2024 08:58:44 -0400
Subject: [PATCH 0857/1761] Removed support for tests_require and other test
 command parameters.

---
 pyproject.toml                                |  4 ----
 setuptools/config/setupcfg.py                 |  1 -
 setuptools/dist.py                            | 11 ----------
 setuptools/tests/config/setupcfg_examples.txt |  1 -
 .../tests/config/test_apply_pyprojecttoml.py  | 21 -------------------
 setuptools/tests/config/test_setupcfg.py      |  6 ------
 setuptools/tests/test_bdist_wheel.py          |  1 -
 7 files changed, 45 deletions(-)

diff --git a/pyproject.toml b/pyproject.toml
index f5441ddf0a..9f3c41127b 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -129,19 +129,15 @@ eager_resources = "setuptools.dist:assert_string_list"
 namespace_packages = "setuptools.dist:check_nsp"
 extras_require = "setuptools.dist:check_extras"
 install_requires = "setuptools.dist:check_requirements"
-tests_require = "setuptools.dist:check_requirements"
 setup_requires = "setuptools.dist:check_requirements"
 python_requires = "setuptools.dist:check_specifier"
 entry_points = "setuptools.dist:check_entry_points"
-test_suite = "setuptools.dist:check_test_suite"
 zip_safe = "setuptools.dist:assert_bool"
 package_data = "setuptools.dist:check_package_data"
 exclude_package_data = "setuptools.dist:check_package_data"
 include_package_data = "setuptools.dist:assert_bool"
 packages = "setuptools.dist:check_packages"
 dependency_links = "setuptools.dist:assert_string_list"
-test_loader = "setuptools.dist:check_importable"
-test_runner = "setuptools.dist:check_importable"
 use_2to3 = "setuptools.dist:invalid_unless_false"
 
 [project.entry-points."egg_info.writers"]
diff --git a/setuptools/config/setupcfg.py b/setuptools/config/setupcfg.py
index 80ebe3d9bd..7dbcbe60ec 100644
--- a/setuptools/config/setupcfg.py
+++ b/setuptools/config/setupcfg.py
@@ -646,7 +646,6 @@ def parsers(self):
                 self._parse_requirements_list, "install_requires"
             ),
             'setup_requires': self._parse_list_semicolon,
-            'tests_require': self._parse_list_semicolon,
             'packages': self._parse_packages,
             'entry_points': self._parse_file_in_root,
             'py_modules': parse_list,
diff --git a/setuptools/dist.py b/setuptools/dist.py
index 32e8d43c64..a93c26deb5 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -170,11 +170,6 @@ def check_entry_points(dist, attr, value):
         raise DistutilsSetupError(e) from e
 
 
-def check_test_suite(dist, attr, value):
-    if not isinstance(value, str):
-        raise DistutilsSetupError("test_suite must be a string")
-
-
 def check_package_data(dist, attr, value):
     """Verify that value is a dictionary of package names to glob lists"""
     if not isinstance(value, dict):
@@ -235,12 +230,6 @@ class Distribution(_Distribution):
         EasyInstall and requests one of your extras, the corresponding
         additional requirements will be installed if needed.
 
-     'test_suite' -- the name of a test suite to run for the 'test' command.
-        If the user runs 'python setup.py test', the package will be installed,
-        and the named test suite will be run.  The format is the same as
-        would be used on a 'unittest.py' command line.  That is, it is the
-        dotted name of an object to import and call to generate a test suite.
-
      'package_data' -- a dictionary mapping package names to lists of filenames
         or globs to use to find data files contained in the named packages.
         If the dictionary has filenames or globs listed under '""' (the empty
diff --git a/setuptools/tests/config/setupcfg_examples.txt b/setuptools/tests/config/setupcfg_examples.txt
index 5db3565464..6aab887ff1 100644
--- a/setuptools/tests/config/setupcfg_examples.txt
+++ b/setuptools/tests/config/setupcfg_examples.txt
@@ -15,7 +15,6 @@ https://github.com/pallets/click/raw/6411f425fae545f42795665af4162006b36c5e4a/se
 https://github.com/sqlalchemy/sqlalchemy/raw/533f5718904b620be8d63f2474229945d6f8ba5d/setup.cfg
 https://github.com/pytest-dev/pluggy/raw/461ef63291d13589c4e21aa182cd1529257e9a0a/setup.cfg
 https://github.com/pytest-dev/pytest/raw/c7be96dae487edbd2f55b561b31b68afac1dabe6/setup.cfg
-https://github.com/tqdm/tqdm/raw/fc69d5dcf578f7c7986fa76841a6b793f813df35/setup.cfg
 https://github.com/platformdirs/platformdirs/raw/7b7852128dd6f07511b618d6edea35046bd0c6ff/setup.cfg
 https://github.com/pandas-dev/pandas/raw/bc17343f934a33dc231c8c74be95d8365537c376/setup.cfg
 https://github.com/django/django/raw/4e249d11a6e56ca8feb4b055b681cec457ef3a3d/setup.cfg
diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py
index 6b3ee9cf1e..68da71fd99 100644
--- a/setuptools/tests/config/test_apply_pyprojecttoml.py
+++ b/setuptools/tests/config/test_apply_pyprojecttoml.py
@@ -51,7 +51,6 @@ def test_apply_pyproject_equivalent_to_setupcfg(url, monkeypatch, tmp_path):
 
     dist_toml = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject_example)
     dist_cfg = setupcfg.apply_configuration(makedist(tmp_path), setupcfg_example)
-    _port_tests_require(dist_cfg)
 
     pkg_info_toml = core_metadata(dist_toml)
     pkg_info_cfg = core_metadata(dist_cfg)
@@ -84,12 +83,6 @@ def test_apply_pyproject_equivalent_to_setupcfg(url, monkeypatch, tmp_path):
 
     assert set(dist_toml.install_requires) == set(dist_cfg.install_requires)
     if any(getattr(d, "extras_require", None) for d in (dist_toml, dist_cfg)):
-        if (
-            "testing" in dist_toml.extras_require
-            and "testing" not in dist_cfg.extras_require
-        ):
-            # ini2toml can automatically convert `tests_require` to `testing` extra
-            dist_toml.extras_require.pop("testing")
         extra_req_toml = {(k, *sorted(v)) for k, v in dist_toml.extras_require.items()}
         extra_req_cfg = {(k, *sorted(v)) for k, v in dist_cfg.extras_require.items()}
         assert extra_req_toml == extra_req_cfg
@@ -467,8 +460,6 @@ def core_metadata(dist) -> str:
     skip_prefixes += ("Project-URL: Homepage,", "Home-page:")
     # May be missing in original (relying on default) but backfilled in the TOML
     skip_prefixes += ("Description-Content-Type:",)
-    # ini2toml can automatically convert `tests_require` to `testing` extra
-    skip_lines.add("Provides-Extra: testing")
     # Remove empty lines
     skip_lines.add("")
 
@@ -479,15 +470,3 @@ def core_metadata(dist) -> str:
         result.append(line + "\n")
 
     return "".join(result)
-
-
-def _port_tests_require(dist):
-    """
-    ``ini2toml`` "forward fix" deprecated tests_require definitions by moving
-    them into an extra called ``testing``.
-    """
-    tests_require = getattr(dist, "tests_require", None) or []
-    if tests_require:
-        dist.tests_require = []
-        dist.extras_require.setdefault("testing", []).extend(tests_require)
-        dist._finalize_requires()
diff --git a/setuptools/tests/config/test_setupcfg.py b/setuptools/tests/config/test_setupcfg.py
index bf9777c668..502b6eb5dc 100644
--- a/setuptools/tests/config/test_setupcfg.py
+++ b/setuptools/tests/config/test_setupcfg.py
@@ -465,7 +465,6 @@ def test_basic(self, tmpdir):
             'scripts = bin/one.py, bin/two.py\n'
             'eager_resources = bin/one.py, bin/two.py\n'
             'install_requires = docutils>=0.3; pack ==1.1, ==1.3; hey\n'
-            'tests_require = mock==0.7.2; pytest\n'
             'setup_requires = docutils>=0.3; spack ==1.1, ==1.3; there\n'
             'dependency_links = http://some.com/here/1, '
             'http://some.com/there/2\n'
@@ -494,7 +493,6 @@ def test_basic(self, tmpdir):
                 'spack ==1.1, ==1.3',
                 'there',
             ])
-            assert dist.tests_require == ['mock==0.7.2', 'pytest']
             assert dist.python_requires == '>=1.0, !=2.8'
             assert dist.py_modules == ['module1', 'module2']
 
@@ -521,9 +519,6 @@ def test_multiline(self, tmpdir):
             '  docutils>=0.3\n'
             '  pack ==1.1, ==1.3\n'
             '  hey\n'
-            'tests_require = \n'
-            '  mock==0.7.2\n'
-            '  pytest\n'
             'setup_requires = \n'
             '  docutils>=0.3\n'
             '  spack ==1.1, ==1.3\n'
@@ -552,7 +547,6 @@ def test_multiline(self, tmpdir):
                 'spack ==1.1, ==1.3',
                 'there',
             ])
-            assert dist.tests_require == ['mock==0.7.2', 'pytest']
 
     def test_package_dir_fail(self, tmpdir):
         fake_env(tmpdir, '[options]\npackage_dir = a b\n')
diff --git a/setuptools/tests/test_bdist_wheel.py b/setuptools/tests/test_bdist_wheel.py
index 232b66d368..46fcff548e 100644
--- a/setuptools/tests/test_bdist_wheel.py
+++ b/setuptools/tests/test_bdist_wheel.py
@@ -98,7 +98,6 @@
                 setup_requires=["setuptools"],
                 install_requires=["quux", "splort"],
                 extras_require={"simple": ["simple.dist"]},
-                tests_require=["foo", "bar>=10.0.0"],
                 entry_points={
                     "console_scripts": [
                         "complex-dist=complexdist:main",

From 9e873c2f24b44c9e3f9cfd4b5dd07e591929a80b Mon Sep 17 00:00:00 2001
From: DWesl <22566757+DWesl@users.noreply.github.com>
Date: Mon, 8 Jul 2024 07:02:59 -0400
Subject: [PATCH 0858/1761] TST: Clarify RPATH testing asserts.

Still need to get the test compiling in the alternate location.  Did I add -L/tmp?
---
 distutils/tests/test_build_ext.py | 14 ++++++++++----
 1 file changed, 10 insertions(+), 4 deletions(-)

diff --git a/distutils/tests/test_build_ext.py b/distutils/tests/test_build_ext.py
index d2bf7f3fc8..d376404ce1 100644
--- a/distutils/tests/test_build_ext.py
+++ b/distutils/tests/test_build_ext.py
@@ -165,14 +165,20 @@ def _test_xx(copy_so):
             so_headers = subprocess.check_output(
                 ["readelf", "-d", xx.__file__], universal_newlines=True
             )
+            import pprint
+            pprint.pprint(so_headers)
+            rpaths = [
+                rpath
+                for line in so_headers.split("\n") if "RPATH" in line or "RUNPATH" in line
+                for rpath in line.split()[2][1:-1].split(":")
+            ]
             if not copy_so:
-                import pprint
-                pprint.pprint(so_headers)
+                pprint.pprint(rpaths)
                 # Linked against a library in /usr/lib{,64}
-                assert 'RPATH' not in so_headers and 'RUNPATH' not in so_headers
+                assert "/usr/lib" not in rpaths and "/usr/lib64" not in rpaths
             else:
                 # Linked against a library in /tmp
-                assert 'RPATH' in so_headers or 'RUNPATH' in so_headers
+                assert "/tmp" in rpaths
                 # The import is the real test here
 
     def test_solaris_enable_shared(self):

From bd5b61400c501cab6fb5322dee113b91a91a0a9d Mon Sep 17 00:00:00 2001
From: DWesl <22566757+DWesl@users.noreply.github.com>
Date: Mon, 8 Jul 2024 07:06:24 -0400
Subject: [PATCH 0859/1761] FIX: Sort library names by length to avoid links

I think /usr/lib/libz.so links to /usr/lib/libz.so.10, which in turn links to /usr/lib/libz.so.10.5.1 (numbers may differ), with the last being the actual library.  Ensure that is the one I copy for linking.
---
 distutils/tests/test_build_ext.py | 1 +
 1 file changed, 1 insertion(+)

diff --git a/distutils/tests/test_build_ext.py b/distutils/tests/test_build_ext.py
index d376404ce1..b775a4dc27 100644
--- a/distutils/tests/test_build_ext.py
+++ b/distutils/tests/test_build_ext.py
@@ -113,6 +113,7 @@ def test_build_ext(self, copy_so):
                 )
             elif sys.platform == 'linux':
                 libz_so = glob.glob('/usr/lib*/libz.so*')
+                libz_so.sort(key=lambda lib_path: len(lib_path))
                 shutil.copyfile(libz_so[-1], '/tmp/libxx_z.so')
 
                 xx_ext = Extension(

From a2a64ec4795d8c441e0602bdf0b3feca5d4fd263 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 8 Jul 2024 14:37:33 -0400
Subject: [PATCH 0860/1761] Explicitly declare the 'core' extra as 'for
 informational purposes'

---
 newsfragments/2825.removal.rst | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/newsfragments/2825.removal.rst b/newsfragments/2825.removal.rst
index f7a2460d4b..e775fcf318 100644
--- a/newsfragments/2825.removal.rst
+++ b/newsfragments/2825.removal.rst
@@ -1 +1 @@
-Now setuptools declares its own dependencies in the ``core`` extra. Dependencies are still vendored for bootstrapping purposes, but installed dependencies are preferred. Downstream packagers can de-vendor by simply removing the ``setuptools/_vendor`` directory.
+Now setuptools declares its own dependencies in the ``core`` extra. Dependencies are still vendored for bootstrapping purposes, but setuptools will prefer installed dependencies if present. The ``core`` extra is used for informational purposes and should *not* be declared in package metadata (e.g. ``build-requires``). Downstream packagers can de-vendor by simply removing the ``setuptools/_vendor`` directory.

From c4e64c194285e73895a858fa226cd5225beebfed Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 9 Jul 2024 11:44:04 -0400
Subject: [PATCH 0861/1761] Add news fragment.

---
 newsfragments/+4571b0f4.bugfix.rst | 1 +
 1 file changed, 1 insertion(+)
 create mode 100644 newsfragments/+4571b0f4.bugfix.rst

diff --git a/newsfragments/+4571b0f4.bugfix.rst b/newsfragments/+4571b0f4.bugfix.rst
new file mode 100644
index 0000000000..94c7cacdb7
--- /dev/null
+++ b/newsfragments/+4571b0f4.bugfix.rst
@@ -0,0 +1 @@
+Bugfix for building Cython extension on Windows (pypa/distutils#268).
\ No newline at end of file

From 356e9a00c4f04c1c6cf76a4a5f97325d0ab4df46 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 9 Jul 2024 11:49:44 -0400
Subject: [PATCH 0862/1761] =?UTF-8?q?Bump=20version:=2070.2.0=20=E2=86=92?=
 =?UTF-8?q?=2070.3.0?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg                   |  2 +-
 NEWS.rst                           | 15 +++++++++++++++
 newsfragments/+4571b0f4.bugfix.rst |  1 -
 newsfragments/4137.feature.rst     |  1 -
 pyproject.toml                     |  2 +-
 5 files changed, 17 insertions(+), 4 deletions(-)
 delete mode 100644 newsfragments/+4571b0f4.bugfix.rst
 delete mode 100644 newsfragments/4137.feature.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 96806f9494..2149b39451 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 70.2.0
+current_version = 70.3.0
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index df3c50f6a2..565b1f4b80 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,18 @@
+v70.3.0
+=======
+
+Features
+--------
+
+- Support for loading distutils from the standard library is now deprecated, including use of SETUPTOOLS_USE_DISTUTILS=stdlib and importing distutils before importing setuptools. (#4137)
+
+
+Bugfixes
+--------
+
+- Bugfix for building Cython extension on Windows (pypa/distutils#268).
+
+
 v70.2.0
 =======
 
diff --git a/newsfragments/+4571b0f4.bugfix.rst b/newsfragments/+4571b0f4.bugfix.rst
deleted file mode 100644
index 94c7cacdb7..0000000000
--- a/newsfragments/+4571b0f4.bugfix.rst
+++ /dev/null
@@ -1 +0,0 @@
-Bugfix for building Cython extension on Windows (pypa/distutils#268).
\ No newline at end of file
diff --git a/newsfragments/4137.feature.rst b/newsfragments/4137.feature.rst
deleted file mode 100644
index 89ca0cb758..0000000000
--- a/newsfragments/4137.feature.rst
+++ /dev/null
@@ -1 +0,0 @@
-Support for loading distutils from the standard library is now deprecated, including use of SETUPTOOLS_USE_DISTUTILS=stdlib and importing distutils before importing setuptools.
\ No newline at end of file
diff --git a/pyproject.toml b/pyproject.toml
index 00e7ee169f..2672b0d97c 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "70.2.0"
+version = "70.3.0"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From c9c550fc1c646b0b95d2f9973349a37d2b01c6f1 Mon Sep 17 00:00:00 2001
From: DWesl <22566757+DWesl@users.noreply.github.com>
Date: Tue, 9 Jul 2024 13:58:09 -0400
Subject: [PATCH 0863/1761] BUG: Resolve links before sorting library paths.

Maybe this will work better?  I should look into whether ELF `so`s store their basename.
---
 distutils/tests/test_build_ext.py | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/distutils/tests/test_build_ext.py b/distutils/tests/test_build_ext.py
index b775a4dc27..d7296f2c18 100644
--- a/distutils/tests/test_build_ext.py
+++ b/distutils/tests/test_build_ext.py
@@ -1,7 +1,7 @@
 import contextlib
 import glob
 import importlib
-import os
+import os.path
 import platform
 import re
 import shutil
@@ -112,8 +112,8 @@ def test_build_ext(self, copy_so):
                     runtime_library_dirs=['/usr/lib'],
                 )
             elif sys.platform == 'linux':
-                libz_so = glob.glob('/usr/lib*/libz.so*')
-                libz_so.sort(key=lambda lib_path: len(lib_path))
+                libz_so = {os.path.realpath(name) for name in glob.iglob('/usr/lib*/libz.so*')}
+                libz_so = sorted(libz_so, key=lambda lib_path: len(lib_path))
                 shutil.copyfile(libz_so[-1], '/tmp/libxx_z.so')
 
                 xx_ext = Extension(

From 225f7fe32f5a07b99ff3fb403f517e6dbf208d68 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Tue, 9 Jul 2024 14:43:09 -0400
Subject: [PATCH 0864/1761] Rewrite marker evaluation to address two type check
 failures.
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

1. The early `req.marker` works around a limitation that mypy can't detect that `not req.marker` is evaluated prior to entering the generator.

2. The `None` → `""` honors the type spec (`dict-item`) and avoids relying on the [deprecated code path](https://github.com/pypa/packaging/blob/793ee28b4c70886e8de5731e24e444388916b3ee/src/packaging/markers.py#L322-L323) allowing `None`.
---
 pkg_resources/__init__.py | 7 +++----
 1 file changed, 3 insertions(+), 4 deletions(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index 0fbd3c1765..8aa190cab2 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -1118,11 +1118,10 @@ def markers_pass(self, req: Requirement, extras: tuple[str, ...] | None = None):
         Return False if the req has a marker and fails
         evaluation. Otherwise, return True.
         """
-        extra_evals = (
-            req.marker.evaluate({'extra': extra})  # type: ignore
-            for extra in self.get(req, ()) + (extras or (None,))
+        return not req.marker or any(
+            req.marker.evaluate({'extra': extra})
+            for extra in self.get(req, ()) + (extras or ("",))
         )
-        return not req.marker or any(extra_evals)
 
 
 class Environment:

From 0e1e7078560708b450ff5fdd59b9c1f425b15595 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Tue, 9 Jul 2024 14:45:44 -0400
Subject: [PATCH 0865/1761] Update `Requirement.__contains__` type spec to
 accept `UnparsedVersion`.

---
 pkg_resources/__init__.py | 5 ++++-
 1 file changed, 4 insertions(+), 1 deletion(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index 8aa190cab2..b8cf28cdb5 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -89,6 +89,7 @@
     # no write support, probably under GAE
     WRITE_SUPPORT = False
 
+import packaging.specifiers
 from jaraco.text import (
     yield_lines,
     drop_comment,
@@ -3457,7 +3458,9 @@ def __eq__(self, other: object):
     def __ne__(self, other):
         return not self == other
 
-    def __contains__(self, item: Distribution | str) -> bool:
+    def __contains__(
+        self, item: Distribution | packaging.specifiers.UnparsedVersion
+    ) -> bool:
         if isinstance(item, Distribution):
             if item.key != self.key:
                 return False

From 930ebe55b0a68d9da16fed8a4fa5a305b47945cb Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 9 Jul 2024 15:52:32 -0400
Subject: [PATCH 0866/1761] Rely on os.path.join and os.path.dirname when
 adding the vendored path and only add it if it's not already present.

---
 pkg_resources/__init__.py | 2 +-
 setuptools/__init__.py    | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index b8cf28cdb5..0857970fec 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -74,7 +74,7 @@
 
 import _imp
 
-sys.path.append(os.path.dirname(__file__) + '/../setuptools/_vendor')
+sys.path.extend(((vendor_path := os.path.join(os.path.dirname(os.path.dirname(__file__)), 'setuptools', '_vendor')) not in sys.path) * [vendor_path])  # fmt: skip
 
 # capture these to bypass sandboxing
 from os import utime
diff --git a/setuptools/__init__.py b/setuptools/__init__.py
index 2917c6a811..8b0e494f01 100644
--- a/setuptools/__init__.py
+++ b/setuptools/__init__.py
@@ -6,7 +6,7 @@
 import sys
 from typing import TYPE_CHECKING
 
-sys.path.append(os.path.dirname(__file__) + '/_vendor')
+sys.path.extend(((vendor_path := os.path.join(os.path.dirname(os.path.dirname(__file__)), 'setuptools', '_vendor')) not in sys.path) * [vendor_path])  # fmt: skip
 
 import _distutils_hack.override  # noqa: F401
 import distutils.core

From 9eb89def774d29239f5d01d95a33b6f84bbbc536 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 9 Jul 2024 15:59:32 -0400
Subject: [PATCH 0867/1761] Resolve a version from an item, addressing missed
 `arg-type` check.

---
 pkg_resources/__init__.py | 6 ++++--
 1 file changed, 4 insertions(+), 2 deletions(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index 0857970fec..822232a7fb 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -3465,13 +3465,15 @@ def __contains__(
             if item.key != self.key:
                 return False
 
-            item = item.version
+            version = item.version
+        else:
+            version = item
 
         # Allow prereleases always in order to match the previous behavior of
         # this method. In the future this should be smarter and follow PEP 440
         # more accurately.
         return self.specifier.contains(
-            item,  # type: ignore[arg-type]
+            version,
             prereleases=True,
         )
 

From 17d5d8ba9e4812eb07d20253078b3e3600ea0ba6 Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Tue, 9 Jul 2024 23:11:44 +0200
Subject: [PATCH 0868/1761] Fix 92989c2i / #4450

Restore the previous functionality, where the function did not return
explicitly in case of exceptions, and hence returned `None`.
---
 setuptools/tests/test_sdist.py | 1 +
 1 file changed, 1 insertion(+)

diff --git a/setuptools/tests/test_sdist.py b/setuptools/tests/test_sdist.py
index 1be568d3fa..096cdc3924 100644
--- a/setuptools/tests/test_sdist.py
+++ b/setuptools/tests/test_sdist.py
@@ -124,6 +124,7 @@ def symlink_or_skip_test(src, dst):
         os.symlink(src, dst)
     except (OSError, NotImplementedError):
         pytest.skip("symlink not supported in OS")
+        return None
     return dst
 
 

From b7410ab911ad8faebe5f8be5f21b47bf2167c243 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Wed, 10 Jul 2024 14:38:24 -0400
Subject: [PATCH 0869/1761] Pin pytest-ruff on Windows.

Workaround for businho/pytest-ruff#28; Closes #4467.
---
 pyproject.toml | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/pyproject.toml b/pyproject.toml
index 2672b0d97c..b8d0116b54 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -70,6 +70,9 @@ test = [
 	"pyproject-hooks!=1.1",
 
 	"jaraco.test",
+
+	# workaround for businho/pytest-ruff#28
+	'pytest-ruff < 0.4; platform_system == "Windows"',
 ]
 doc = [
 	# upstream

From 7a810b97b706cc98f15de700d7666df7ebab38d9 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Wed, 10 Jul 2024 14:57:45 -0400
Subject: [PATCH 0870/1761] Move workaround for #3921 to the local section,
 restoring upstream to match upstream.

---
 pyproject.toml | 7 +++++--
 1 file changed, 5 insertions(+), 2 deletions(-)

diff --git a/pyproject.toml b/pyproject.toml
index b8d0116b54..a584dd025c 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -39,8 +39,7 @@ test = [
 	"pytest-cov",
 	"pytest-mypy",
 	"pytest-enabler >= 2.2",
-	# workaround for pypa/setuptools#3921
-	'pytest-ruff >= 0.3.2; sys_platform != "cygwin"',
+	"pytest-ruff >= 0.2.1",
 
 	# local
 	"virtualenv>=13.0.0",
@@ -66,6 +65,10 @@ test = [
 	"importlib_metadata",
 	"pytest-subprocess",
 
+	# require newer pytest-ruff than upstream for pypa/setuptools#4368
+	# also exclude cygwin for pypa/setuptools#3921
+	'pytest-ruff >= 0.3.2; sys_platform != "cygwin"',
+
 	# workaround for pypa/setuptools#4333
 	"pyproject-hooks!=1.1",
 

From 33c4896dbaeda2fd7a5fef701431dea05bb83bab Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Wed, 10 Jul 2024 15:34:09 -0400
Subject: [PATCH 0871/1761] Exclude pytest-ruff (and thus ruff), which cannot
 build on cygwin.

Ref pypa/setuptools#3921
---
 pyproject.toml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/pyproject.toml b/pyproject.toml
index ad67d3b12d..1307e1fa20 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -32,7 +32,7 @@ test = [
 	"pytest-cov",
 	"pytest-mypy",
 	"pytest-enabler >= 2.2",
-	"pytest-ruff >= 0.2.1",
+	"pytest-ruff >= 0.2.1; sys_platform != 'cygwin'",
 
 	# local
 ]

From 1bcd839f162be4c99dd4dd144c85a937548392bb Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Wed, 10 Jul 2024 15:42:56 -0400
Subject: [PATCH 0872/1761] Revert "Disable cygwin tests for now. Ref
 pypa/setuptools#3921"

This reverts commit d3e5de05f6afe958d0fde20945ed0f7a2dfef270.
---
 .github/workflows/main.yml | 2 --
 1 file changed, 2 deletions(-)

diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index 1f63867d85..70d70bc6f1 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -92,8 +92,6 @@ jobs:
         run: tox -e ${{ matrix.job }}
 
   test_cygwin:
-    # disabled due to lack of Rust support pypa/setuptools#3921
-    if: ${{ false }}
     strategy:
       matrix:
         python:

From d6abb6409b881ad13e232a61ead5facf761ee779 Mon Sep 17 00:00:00 2001
From: Marco Treglia 
Date: Thu, 11 Jul 2024 17:19:30 +0200
Subject: [PATCH 0873/1761] Rename arguments on _EditableFinder and
 _EditableNamespaceFinder

This commit rename the `find_spec` arguments in _EditableFinder and
_EditableNamespaceFinder to be compliant of the standard
`importlib.abc.MetaPathFinder` interface.

https://docs.python.org/3/library/importlib.html#importlib.machinery.PathFinder.find_spec
https://github.com/abravalheri/typeshed/blob/eecac02ef7fdbe82b9dc57fa3ca4bb9d341116d7/stdlib/_typeshed/importlib.pyi

The `find_module` arguments were not renamed because the method `find_module`
is deprecated starting from Pyhton3.10 and has been removed in Pyhton3.12

https://docs.python.org/3/reference/import.html#the-meta-path
---
 setuptools/command/editable_wheel.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py
index ae31bb4c79..d2ca2b48a6 100644
--- a/setuptools/command/editable_wheel.py
+++ b/setuptools/command/editable_wheel.py
@@ -805,7 +805,7 @@ def _get_root(self):
 
 class _EditableFinder:  # MetaPathFinder
     @classmethod
-    def find_spec(cls, fullname: str, _path=None, _target=None) -> ModuleSpec | None:
+    def find_spec(cls, fullname: str, path=None, target=None) -> ModuleSpec | None:  # type: ignore
         # Top-level packages and modules (we know these exist in the FS)
         if fullname in MAPPING:
             pkg_path = MAPPING[fullname]
@@ -851,7 +851,7 @@ def _paths(cls, fullname: str) -> list[str]:
         return [*paths, PATH_PLACEHOLDER]
 
     @classmethod
-    def find_spec(cls, fullname: str, _target=None) -> ModuleSpec | None:
+    def find_spec(cls, fullname: str, target=None) -> ModuleSpec | None:  # type: ignore
         if fullname in NAMESPACES:
             spec = ModuleSpec(fullname, None, is_package=True)
             spec.submodule_search_locations = cls._paths(fullname)

From f21f19a1eb5ea1e070e601ff4f05d762469a59ad Mon Sep 17 00:00:00 2001
From: Christoph Reiter 
Date: Fri, 12 Jul 2024 21:16:45 +0200
Subject: [PATCH 0874/1761] find_library_file: port from doctest to normal test
 and fix for cygwin

Starting with b42197ceb9ac8a0cd95b530 UnixCCompiler got some improved support
for cygwin which made the doctest fail.

The doctest is hard to read as is, and adding more platform specific exceptions
wouldn't help, so just convert to a normal test and special case cygwin to make
the test pass there again.
---
 distutils/tests/test_unixccompiler.py | 30 +++++++++++++++++++++++++++
 distutils/unixccompiler.py            | 16 --------------
 2 files changed, 30 insertions(+), 16 deletions(-)

diff --git a/distutils/tests/test_unixccompiler.py b/distutils/tests/test_unixccompiler.py
index d2c88e9116..afd08bbb87 100644
--- a/distutils/tests/test_unixccompiler.py
+++ b/distutils/tests/test_unixccompiler.py
@@ -315,3 +315,33 @@ def test_has_function(self):
         self.cc.output_dir = 'scratch'
         os.chdir(self.mkdtemp())
         self.cc.has_function('abort')
+
+    def test_find_library_file(self, monkeypatch):
+        compiler = UnixCCompiler()
+        compiler._library_root = lambda dir: dir
+        monkeypatch.setattr(os.path, 'exists', lambda d: 'existing' in d)
+
+        dylibname = 'libabc.dylib' if sys.platform != 'cygwin' else 'cygabc.dll'
+        dirs = ('/foo/bar/missing', '/foo/bar/existing')
+        assert (
+            compiler.find_library_file(dirs, 'abc').replace('\\', '/')
+            == '/foo/bar/existing/' + dylibname
+        )
+        assert (
+            compiler.find_library_file(reversed(dirs), 'abc').replace('\\', '/')
+            == '/foo/bar/existing/' + dylibname
+        )
+
+        monkeypatch.setattr(
+            os.path,
+            'exists',
+            lambda d: 'existing' in d and '.a' in d and '.dll.a' not in d,
+        )
+        assert (
+            compiler.find_library_file(dirs, 'abc').replace('\\', '/')
+            == '/foo/bar/existing/libabc.a'
+        )
+        assert (
+            compiler.find_library_file(reversed(dirs), 'abc').replace('\\', '/')
+            == '/foo/bar/existing/libabc.a'
+        )
diff --git a/distutils/unixccompiler.py b/distutils/unixccompiler.py
index 7e68596b26..6450fff547 100644
--- a/distutils/unixccompiler.py
+++ b/distutils/unixccompiler.py
@@ -371,22 +371,6 @@ def find_library_file(self, dirs, lib, debug=False):
         data to go on: GCC seems to prefer the shared library, so
         assume that *all* Unix C compilers do,
         ignoring even GCC's "-static" option.
-
-        >>> compiler = UnixCCompiler()
-        >>> compiler._library_root = lambda dir: dir
-        >>> monkeypatch = getfixture('monkeypatch')
-        >>> monkeypatch.setattr(os.path, 'exists', lambda d: 'existing' in d)
-        >>> dirs = ('/foo/bar/missing', '/foo/bar/existing')
-        >>> compiler.find_library_file(dirs, 'abc').replace('\\', '/')
-        '/foo/bar/existing/libabc.dylib'
-        >>> compiler.find_library_file(reversed(dirs), 'abc').replace('\\', '/')
-        '/foo/bar/existing/libabc.dylib'
-        >>> monkeypatch.setattr(os.path, 'exists',
-        ...     lambda d: 'existing' in d and '.a' in d)
-        >>> compiler.find_library_file(dirs, 'abc').replace('\\', '/')
-        '/foo/bar/existing/libabc.a'
-        >>> compiler.find_library_file(reversed(dirs), 'abc').replace('\\', '/')
-        '/foo/bar/existing/libabc.a'
         """
         lib_names = (
             self.library_filename(lib, lib_type=type)

From 7d78834c7b9e12a5a0b5b084d43383c548adb39d Mon Sep 17 00:00:00 2001
From: Christoph Reiter 
Date: Fri, 12 Jul 2024 20:56:07 +0200
Subject: [PATCH 0875/1761] test_customize_compiler_with_msvc_python: skip for
 cygwin

Mingw32CCompiler() checks that the default compiler isn't cygwin,
so it can't be used under cygwin, so skip it there.
---
 distutils/tests/test_mingwccompiler.py | 1 +
 1 file changed, 1 insertion(+)

diff --git a/distutils/tests/test_mingwccompiler.py b/distutils/tests/test_mingwccompiler.py
index fd201cd773..28f4762b2a 100644
--- a/distutils/tests/test_mingwccompiler.py
+++ b/distutils/tests/test_mingwccompiler.py
@@ -45,6 +45,7 @@ def test_cygwincc_error(self, monkeypatch):
         with pytest.raises(CCompilerError):
             distutils.cygwinccompiler.Mingw32CCompiler()
 
+    @pytest.mark.skipif('sys.platform == "cygwin"')
     def test_customize_compiler_with_msvc_python(self):
         from distutils.cygwinccompiler import Mingw32CCompiler
 

From 1b344658b9c406be8bb6cb48ee5d6c1b9ea7d765 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Wed, 17 Jul 2024 10:42:00 -0400
Subject: [PATCH 0876/1761] =?UTF-8?q?Bump=20version:=2070.3.0=20=E2=86=92?=
 =?UTF-8?q?=2071.0.0?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg               | 2 +-
 NEWS.rst                       | 9 +++++++++
 newsfragments/2825.removal.rst | 1 -
 pyproject.toml                 | 2 +-
 4 files changed, 11 insertions(+), 3 deletions(-)
 delete mode 100644 newsfragments/2825.removal.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 2149b39451..24c407b586 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 70.3.0
+current_version = 71.0.0
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index 565b1f4b80..52b10f837b 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,12 @@
+v71.0.0
+=======
+
+Deprecations and Removals
+-------------------------
+
+- Now setuptools declares its own dependencies in the ``core`` extra. Dependencies are still vendored for bootstrapping purposes, but setuptools will prefer installed dependencies if present. The ``core`` extra is used for informational purposes and should *not* be declared in package metadata (e.g. ``build-requires``). Downstream packagers can de-vendor by simply removing the ``setuptools/_vendor`` directory. (#2825)
+
+
 v70.3.0
 =======
 
diff --git a/newsfragments/2825.removal.rst b/newsfragments/2825.removal.rst
deleted file mode 100644
index e775fcf318..0000000000
--- a/newsfragments/2825.removal.rst
+++ /dev/null
@@ -1 +0,0 @@
-Now setuptools declares its own dependencies in the ``core`` extra. Dependencies are still vendored for bootstrapping purposes, but setuptools will prefer installed dependencies if present. The ``core`` extra is used for informational purposes and should *not* be declared in package metadata (e.g. ``build-requires``). Downstream packagers can de-vendor by simply removing the ``setuptools/_vendor`` directory.
diff --git a/pyproject.toml b/pyproject.toml
index 5955df5287..54931e2ff2 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "70.3.0"
+version = "71.0.0"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From 6c6e2e168eed329e3f0ca5089f8f5ad283dfd74a Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Wed, 17 Jul 2024 15:14:41 -0400
Subject: [PATCH 0877/1761] =?UTF-8?q?=F0=9F=91=B9=20Feed=20the=20hobgoblin?=
 =?UTF-8?q?s=20(delint).?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

Closes #4473.
---
 pkg_resources/tests/test_pkg_resources.py | 2 +-
 setuptools/command/editable_wheel.py      | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/pkg_resources/tests/test_pkg_resources.py b/pkg_resources/tests/test_pkg_resources.py
index 4c4c68dfff..424d5ac44b 100644
--- a/pkg_resources/tests/test_pkg_resources.py
+++ b/pkg_resources/tests/test_pkg_resources.py
@@ -279,7 +279,7 @@ def test_distribution_version_missing(
     assert expected_text in msg
     # Check that the message portion contains the path.
     assert metadata_path in msg, str((metadata_path, msg))
-    assert type(dist) == expected_dist_type
+    assert type(dist) is expected_dist_type
 
 
 @pytest.mark.xfail(
diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py
index f6f0657e8c..13c450cb49 100644
--- a/setuptools/command/editable_wheel.py
+++ b/setuptools/command/editable_wheel.py
@@ -299,7 +299,7 @@ def _run_build_subcommands(self) -> None:
         build = self.get_finalized_command("build")
         for name in build.get_sub_commands():
             cmd = self.get_finalized_command(name)
-            if name == "build_py" and type(cmd) != build_py_cls:
+            if name == "build_py" and type(cmd) is not build_py_cls:
                 self._safely_run(name)
             else:
                 self.run_command(name)

From 9c53695500f496d91ba372e61668392d01a68a97 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Wed, 17 Jul 2024 16:54:25 -0400
Subject: [PATCH 0878/1761] Update intersphinx link to point to redirected
 target.

---
 docs/conf.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/docs/conf.py b/docs/conf.py
index a0e3398d54..0f85661789 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -106,7 +106,7 @@
 
 intersphinx_mapping.update({
     'pip': ('https://pip.pypa.io/en/latest', None),
-    'build': ('https://pypa-build.readthedocs.io/en/latest', None),
+    'build': ('https://build.pypa.io/en/latest', None),
     'PyPUG': ('https://packaging.python.org/en/latest/', None),
     'packaging': ('https://packaging.pypa.io/en/latest/', None),
     'twine': ('https://twine.readthedocs.io/en/stable/', None),

From aa41ab5de437a96bd62f31c1c1fe5633850e80f4 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Wed, 17 Jul 2024 17:07:55 -0400
Subject: [PATCH 0879/1761] Pin Sphinx to <7.4 as workaround for
 sphinx-doc/sphinx#12613. Closes #4474.

---
 pyproject.toml | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/pyproject.toml b/pyproject.toml
index 54931e2ff2..8b6fd48edd 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -99,6 +99,9 @@ doc = [
 
 	# workaround for pypa/setuptools#4333
 	"pyproject-hooks!=1.1",
+
+	# workaround for sphinx-doc/sphinx#12613
+	"sphinx < 7.4",
 ]
 ssl = []
 certs = []

From 8482e6b2a75ba5b74d2cb88c85383b6dcb7c5f94 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Wed, 17 Jul 2024 19:58:22 -0400
Subject: [PATCH 0880/1761] Revert "Ensure that package data from vendored
 packages gets installed."

This reverts commit fa7ee9130dd012ddfb4bf22e39e442a1e73ff0ed.
---
 pyproject.toml | 4 ----
 1 file changed, 4 deletions(-)

diff --git a/pyproject.toml b/pyproject.toml
index 8b6fd48edd..9bf9cc6df9 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -194,10 +194,6 @@ exclude = [
 ]
 namespaces = true
 
-[tool.setuptools.package-data]
-# ensure that `setuptools/_vendor/jaraco/text/Lorem ipsum.txt` is installed
-"*" = ["*.txt"]
-
 [tool.distutils.sdist]
 formats = "zip"
 

From 1a52f11e3c28b3776a5d5184c536a280f7061acd Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Wed, 17 Jul 2024 20:52:03 -0400
Subject: [PATCH 0881/1761] Revert "Disable inclusion of package data as it
 causes 'tests' to be included as data. Fixes #2505."

This reverts commit fbc9bd6fdbce132b9b5136345b0f6b3a1f6debaf.
---
 pyproject.toml | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)

diff --git a/pyproject.toml b/pyproject.toml
index 9bf9cc6df9..a117304491 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -178,9 +178,7 @@ PKG-INFO = "setuptools.command.egg_info:write_pkg_info"
 "dependency_links.txt" = "setuptools.command.egg_info:overwrite_arg"
 
 [tool.setuptools]
-# disabled as it causes tests to be included #2505
-# include_package_data = true
-include-package-data = false
+include-package-data = true
 
 [tool.setuptools.packages.find]
 include = [

From 5be48b997dc49f9c75f6615cb5bfab2d15323104 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Wed, 17 Jul 2024 21:06:26 -0400
Subject: [PATCH 0882/1761] Add test asserting cli scripts are included in
 wheel.

Captures missed expectation in #4475.
---
 setuptools/tests/test_setuptools.py | 7 +++++++
 1 file changed, 7 insertions(+)

diff --git a/setuptools/tests/test_setuptools.py b/setuptools/tests/test_setuptools.py
index 0c5b1f18fa..176c7862de 100644
--- a/setuptools/tests/test_setuptools.py
+++ b/setuptools/tests/test_setuptools.py
@@ -307,3 +307,10 @@ def test_its_own_wheel_does_not_contain_tests(setuptools_wheel):
 
     for member in contents:
         assert '/tests/' not in member
+
+
+def test_wheel_includes_cli_scripts(setuptools_wheel):
+    with ZipFile(setuptools_wheel) as zipfile:
+        contents = [f.replace(os.sep, '/') for f in zipfile.namelist()]
+
+    assert any('cli-64.exe' in member for member in contents)

From 9f07e225b6e283bb5c9497518ad59ed104181d34 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Wed, 17 Jul 2024 21:14:33 -0400
Subject: [PATCH 0883/1761] Remove test as it's redundant to the check in
 test_its_own_wheel_does_not_contain_tests.

---
 setuptools/tests/config/test_apply_pyprojecttoml.py | 6 ------
 1 file changed, 6 deletions(-)

diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py
index 6b3ee9cf1e..ff9c5fc66e 100644
--- a/setuptools/tests/config/test_apply_pyprojecttoml.py
+++ b/setuptools/tests/config/test_apply_pyprojecttoml.py
@@ -12,7 +12,6 @@
 from inspect import cleandoc
 from pathlib import Path
 from unittest.mock import Mock
-from zipfile import ZipFile
 
 import pytest
 from ini2toml.api import LiteTranslator
@@ -422,11 +421,6 @@ def test_example_file_in_sdist(self, setuptools_sdist):
         with tarfile.open(setuptools_sdist) as tar:
             assert any(name.endswith(EXAMPLES_FILE) for name in tar.getnames())
 
-    def test_example_file_not_in_wheel(self, setuptools_wheel):
-        """Meta test to ensure auxiliary test files are not in wheel"""
-        with ZipFile(setuptools_wheel) as zipfile:
-            assert not any(name.endswith(EXAMPLES_FILE) for name in zipfile.namelist())
-
 
 class TestInteropCommandLineParsing:
     def test_version(self, tmp_path, monkeypatch, capsys):

From 75116176d417bcb65033da0373432d6d8086ab37 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Wed, 17 Jul 2024 21:20:30 -0400
Subject: [PATCH 0884/1761] Mark the file as xfail for now.

It's more important to be able to include the important resources than to exclude unwanted ones. Ref #4475.
---
 setuptools/tests/test_setuptools.py | 1 +
 1 file changed, 1 insertion(+)

diff --git a/setuptools/tests/test_setuptools.py b/setuptools/tests/test_setuptools.py
index 176c7862de..613a52d042 100644
--- a/setuptools/tests/test_setuptools.py
+++ b/setuptools/tests/test_setuptools.py
@@ -301,6 +301,7 @@ def test_findall_missing_symlink(tmpdir, can_symlink):
         assert found == []
 
 
+@pytest.mark.xfail(reason="unable to exclude tests; #4475 #3260")
 def test_its_own_wheel_does_not_contain_tests(setuptools_wheel):
     with ZipFile(setuptools_wheel) as zipfile:
         contents = [f.replace(os.sep, '/') for f in zipfile.namelist()]

From cf298e76bae4781ca4a1a85e7bb8ea6c8f260611 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Wed, 17 Jul 2024 21:39:57 -0400
Subject: [PATCH 0885/1761] Add news fragment.

---
 newsfragments/4475.bugfix.rst | 1 +
 1 file changed, 1 insertion(+)
 create mode 100644 newsfragments/4475.bugfix.rst

diff --git a/newsfragments/4475.bugfix.rst b/newsfragments/4475.bugfix.rst
new file mode 100644
index 0000000000..73e323eb2d
--- /dev/null
+++ b/newsfragments/4475.bugfix.rst
@@ -0,0 +1 @@
+Restored package data that went missing in 71.0. This change also incidentally causes tests to be installed once again.
\ No newline at end of file

From f2a6bb190202577595a45ceebdedb0016b7cf864 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Wed, 17 Jul 2024 21:48:13 -0400
Subject: [PATCH 0886/1761] =?UTF-8?q?Bump=20version:=2071.0.0=20=E2=86=92?=
 =?UTF-8?q?=2071.0.1?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg              | 2 +-
 NEWS.rst                      | 9 +++++++++
 newsfragments/4475.bugfix.rst | 1 -
 pyproject.toml                | 2 +-
 4 files changed, 11 insertions(+), 3 deletions(-)
 delete mode 100644 newsfragments/4475.bugfix.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 24c407b586..3e6e8fd332 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 71.0.0
+current_version = 71.0.1
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index 52b10f837b..1b569cc88a 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,12 @@
+v71.0.1
+=======
+
+Bugfixes
+--------
+
+- Restored package data that went missing in 71.0. This change also incidentally causes tests to be installed once again. (#4475)
+
+
 v71.0.0
 =======
 
diff --git a/newsfragments/4475.bugfix.rst b/newsfragments/4475.bugfix.rst
deleted file mode 100644
index 73e323eb2d..0000000000
--- a/newsfragments/4475.bugfix.rst
+++ /dev/null
@@ -1 +0,0 @@
-Restored package data that went missing in 71.0. This change also incidentally causes tests to be installed once again.
\ No newline at end of file
diff --git a/pyproject.toml b/pyproject.toml
index a117304491..b3399570bc 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "71.0.0"
+version = "71.0.1"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From ea5ce1a2e1406a51bd235c8afd854716d4b8a775 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 18 Jul 2024 09:31:58 -0400
Subject: [PATCH 0887/1761] Update changelog to reflect common experience seen
 in #4478 and #4483.

---
 NEWS.rst | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/NEWS.rst b/NEWS.rst
index 1b569cc88a..974aae24fa 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -13,7 +13,7 @@ v71.0.0
 Deprecations and Removals
 -------------------------
 
-- Now setuptools declares its own dependencies in the ``core`` extra. Dependencies are still vendored for bootstrapping purposes, but setuptools will prefer installed dependencies if present. The ``core`` extra is used for informational purposes and should *not* be declared in package metadata (e.g. ``build-requires``). Downstream packagers can de-vendor by simply removing the ``setuptools/_vendor`` directory. (#2825)
+- Now setuptools declares its own dependencies in the ``core`` extra. Dependencies are still vendored for bootstrapping purposes, but setuptools will prefer installed dependencies if present. The ``core`` extra is used for informational purposes and should *not* be declared in package metadata (e.g. ``build-requires``). Downstream packagers can de-vendor by simply removing the ``setuptools/_vendor`` directory. Since Setuptools now prefers installed dependencies, those installing to an environment with old, incompatible dependencies will not work. In that case, either uninstall the incompatible dependencies or upgrade them to satisfy those declared in ``core``. (#2825)
 
 
 v70.3.0

From 284e8afc5a481a1ac40405111058421a0c68c683 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 18 Jul 2024 09:14:31 -0400
Subject: [PATCH 0888/1761] Add a failing test covering the missed expectation.

Ref #4480
---
 setuptools/tests/test_setuptools.py | 11 +++++++++++
 1 file changed, 11 insertions(+)

diff --git a/setuptools/tests/test_setuptools.py b/setuptools/tests/test_setuptools.py
index 613a52d042..9b7285459a 100644
--- a/setuptools/tests/test_setuptools.py
+++ b/setuptools/tests/test_setuptools.py
@@ -1,5 +1,6 @@
 """Tests for the 'setuptools' package"""
 
+import re
 import sys
 import os
 import distutils.core
@@ -315,3 +316,13 @@ def test_wheel_includes_cli_scripts(setuptools_wheel):
         contents = [f.replace(os.sep, '/') for f in zipfile.namelist()]
 
     assert any('cli-64.exe' in member for member in contents)
+
+
+@pytest.mark.xfail(reason="#4480")
+def test_wheel_includes_vendored_metadata(setuptools_wheel):
+    with ZipFile(setuptools_wheel) as zipfile:
+        contents = [f.replace(os.sep, '/') for f in zipfile.namelist()]
+
+    assert any(
+        re.search(r'_vendor/.*\.dist-info/METADATA', member) for member in contents
+    )

From 65e00b6e96c531c2b0be023475bb956ebc976c39 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 18 Jul 2024 09:22:17 -0400
Subject: [PATCH 0889/1761] Include all vendored files in the sdist.

Closes #4480
---
 MANIFEST.in                         | 2 +-
 newsfragments/4480.bugfix.rst       | 1 +
 setuptools/tests/test_setuptools.py | 1 -
 3 files changed, 2 insertions(+), 2 deletions(-)
 create mode 100644 newsfragments/4480.bugfix.rst

diff --git a/MANIFEST.in b/MANIFEST.in
index c4f12dc68a..092612cb21 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -2,7 +2,7 @@ recursive-include setuptools *.py *.exe *.xml *.tmpl
 recursive-include tests *.py
 recursive-include setuptools/tests *.html
 recursive-include docs *.py *.txt *.rst *.conf *.css *.css_t Makefile indexsidebar.html
-recursive-include setuptools/_vendor *.py *.txt
+recursive-include setuptools/_vendor *
 recursive-include pkg_resources *.py *.txt
 recursive-include pkg_resources/tests/data *
 recursive-include tools *
diff --git a/newsfragments/4480.bugfix.rst b/newsfragments/4480.bugfix.rst
new file mode 100644
index 0000000000..a43949fa7d
--- /dev/null
+++ b/newsfragments/4480.bugfix.rst
@@ -0,0 +1 @@
+Include all vendored files in the sdist.
\ No newline at end of file
diff --git a/setuptools/tests/test_setuptools.py b/setuptools/tests/test_setuptools.py
index 9b7285459a..566af6980e 100644
--- a/setuptools/tests/test_setuptools.py
+++ b/setuptools/tests/test_setuptools.py
@@ -318,7 +318,6 @@ def test_wheel_includes_cli_scripts(setuptools_wheel):
     assert any('cli-64.exe' in member for member in contents)
 
 
-@pytest.mark.xfail(reason="#4480")
 def test_wheel_includes_vendored_metadata(setuptools_wheel):
     with ZipFile(setuptools_wheel) as zipfile:
         contents = [f.replace(os.sep, '/') for f in zipfile.namelist()]

From 17b735a260dc6e51cce1edbeb21eaaa5a32ef188 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 18 Jul 2024 09:51:06 -0400
Subject: [PATCH 0890/1761] =?UTF-8?q?Bump=20version:=2071.0.1=20=E2=86=92?=
 =?UTF-8?q?=2071.0.2?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg              | 2 +-
 NEWS.rst                      | 9 +++++++++
 newsfragments/4480.bugfix.rst | 1 -
 pyproject.toml                | 2 +-
 4 files changed, 11 insertions(+), 3 deletions(-)
 delete mode 100644 newsfragments/4480.bugfix.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 3e6e8fd332..6c082474d9 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 71.0.1
+current_version = 71.0.2
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index 974aae24fa..cd8adc74a1 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,12 @@
+v71.0.2
+=======
+
+Bugfixes
+--------
+
+- Include all vendored files in the sdist. (#4480)
+
+
 v71.0.1
 =======
 
diff --git a/newsfragments/4480.bugfix.rst b/newsfragments/4480.bugfix.rst
deleted file mode 100644
index a43949fa7d..0000000000
--- a/newsfragments/4480.bugfix.rst
+++ /dev/null
@@ -1 +0,0 @@
-Include all vendored files in the sdist.
\ No newline at end of file
diff --git a/pyproject.toml b/pyproject.toml
index b3399570bc..c814f09b42 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "71.0.1"
+version = "71.0.2"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From 299d27655f3f3a06d698b9ae06a3e3ad13943e81 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 18 Jul 2024 10:42:40 -0400
Subject: [PATCH 0891/1761] Reset the backports module when enabling vendored
 packages.

Closes #4476
---
 newsfragments/4476.bugfix.rst | 1 +
 pkg_resources/__init__.py     | 2 ++
 ruff.toml                     | 5 +++++
 setuptools/__init__.py        | 2 ++
 4 files changed, 10 insertions(+)
 create mode 100644 newsfragments/4476.bugfix.rst

diff --git a/newsfragments/4476.bugfix.rst b/newsfragments/4476.bugfix.rst
new file mode 100644
index 0000000000..96122578c8
--- /dev/null
+++ b/newsfragments/4476.bugfix.rst
@@ -0,0 +1 @@
+Reset the backports module when enabling vendored packages.
\ No newline at end of file
diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index 822232a7fb..6b273d4d23 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -75,6 +75,8 @@
 import _imp
 
 sys.path.extend(((vendor_path := os.path.join(os.path.dirname(os.path.dirname(__file__)), 'setuptools', '_vendor')) not in sys.path) * [vendor_path])  # fmt: skip
+# workaround for #4476
+sys.modules.pop('backports', None)
 
 # capture these to bypass sandboxing
 from os import utime
diff --git a/ruff.toml b/ruff.toml
index be78969cdb..af3c567d83 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -43,6 +43,11 @@ ignore = [
 	"ISC002",
 ]
 
+# Suppress nuisance warnings about E402 due to workaround for #4476
+[lint.per-file-ignores]
+"setuptools/__init__.py" = ["E402"]
+"pkg_resources/__init__.py" = ["E402"]
+
 [format]
 # Enable preview to get hugged parenthesis unwrapping
 preview = true
diff --git a/setuptools/__init__.py b/setuptools/__init__.py
index 8b0e494f01..afca08be9c 100644
--- a/setuptools/__init__.py
+++ b/setuptools/__init__.py
@@ -7,6 +7,8 @@
 from typing import TYPE_CHECKING
 
 sys.path.extend(((vendor_path := os.path.join(os.path.dirname(os.path.dirname(__file__)), 'setuptools', '_vendor')) not in sys.path) * [vendor_path])  # fmt: skip
+# workaround for #4476
+sys.modules.pop('backports', None)
 
 import _distutils_hack.override  # noqa: F401
 import distutils.core

From 6d915ca1b67d43609714a70bff23526a362dd0f1 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 18 Jul 2024 12:48:33 -0400
Subject: [PATCH 0892/1761] =?UTF-8?q?Bump=20version:=2071.0.2=20=E2=86=92?=
 =?UTF-8?q?=2071.0.3?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg              | 2 +-
 NEWS.rst                      | 9 +++++++++
 newsfragments/4476.bugfix.rst | 1 -
 pyproject.toml                | 2 +-
 4 files changed, 11 insertions(+), 3 deletions(-)
 delete mode 100644 newsfragments/4476.bugfix.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 6c082474d9..5648405125 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 71.0.2
+current_version = 71.0.3
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index cd8adc74a1..6c72073293 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,12 @@
+v71.0.3
+=======
+
+Bugfixes
+--------
+
+- Reset the backports module when enabling vendored packages. (#4476)
+
+
 v71.0.2
 =======
 
diff --git a/newsfragments/4476.bugfix.rst b/newsfragments/4476.bugfix.rst
deleted file mode 100644
index 96122578c8..0000000000
--- a/newsfragments/4476.bugfix.rst
+++ /dev/null
@@ -1 +0,0 @@
-Reset the backports module when enabling vendored packages.
\ No newline at end of file
diff --git a/pyproject.toml b/pyproject.toml
index c814f09b42..6e93ec1fb5 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "71.0.2"
+version = "71.0.3"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From b4963ff1983a16976d8aab56488a222633f7c827 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 18 Jul 2024 14:17:55 -0400
Subject: [PATCH 0893/1761] Rely on pytest defaults for reporting.

Closes #4491
---
 pytest.ini | 1 -
 1 file changed, 1 deletion(-)

diff --git a/pytest.ini b/pytest.ini
index e49b81561a..292b65864c 100644
--- a/pytest.ini
+++ b/pytest.ini
@@ -4,7 +4,6 @@ addopts=
 	--doctest-modules
 	--import-mode importlib
 	--doctest-glob=pkg_resources/api_tests.txt
-	-r sxX
 consider_namespace_packages=true
 filterwarnings=
 	# Fail on warnings

From 6e90cbd45013d0b89f5f5f6f250fc2e70bd146d1 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 18 Jul 2024 14:37:50 -0400
Subject: [PATCH 0894/1761] Revert "Pin Sphinx to <7.4 as workaround for
 sphinx-doc/sphinx#12613. Closes #4474."

This reverts commit aa41ab5de437a96bd62f31c1c1fe5633850e80f4.

A fix has been released in 7.4.6 (sphinx-doc/sphinx#12615).
---
 pyproject.toml | 3 ---
 1 file changed, 3 deletions(-)

diff --git a/pyproject.toml b/pyproject.toml
index 6e93ec1fb5..bcd2416334 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -99,9 +99,6 @@ doc = [
 
 	# workaround for pypa/setuptools#4333
 	"pyproject-hooks!=1.1",
-
-	# workaround for sphinx-doc/sphinx#12613
-	"sphinx < 7.4",
 ]
 ssl = []
 certs = []

From dc56040f4ef01fba97624ac0186463c7876e5ec2 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 18 Jul 2024 21:34:41 -0400
Subject: [PATCH 0895/1761] Revert "Fix error when integrating with pip
 (#3823)"

This reverts commit 19cbbadd304c41873e6d1fd531b964eaf675672d, reversing
changes made to bd37bfc622a6f2220c2e4e30b18f2cd2904b7da6.
---
 setuptools/command/egg_info.py | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)

diff --git a/setuptools/command/egg_info.py b/setuptools/command/egg_info.py
index 9e63a934e6..fd9b54a411 100644
--- a/setuptools/command/egg_info.py
+++ b/setuptools/command/egg_info.py
@@ -255,8 +255,7 @@ def finalize_options(self):
         # to the version info
         #
         pd = self.distribution._patched_dist
-        key = getattr(pd, "key", None) or getattr(pd, "name", None)
-        if pd is not None and key == self.egg_name.lower():
+        if pd is not None and pd.key == self.egg_name.lower():
             pd._version = self.egg_version
             pd._parsed_version = packaging.version.Version(self.egg_version)
             self.distribution._patched_dist = None

From fbba8dec6d2fa251e826cb6024740f21d5505d30 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 18 Jul 2024 22:04:21 -0400
Subject: [PATCH 0896/1761] =?UTF-8?q?=F0=9F=91=B9=20Feed=20the=20hobgoblin?=
 =?UTF-8?q?s=20(delint).?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 pkg_resources/__init__.py | 1 +
 1 file changed, 1 insertion(+)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index 6b273d4d23..dbcc87d771 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -2628,6 +2628,7 @@ def _normalize_cached(filename: StrPath) -> str: ...
     @overload
     def _normalize_cached(filename: BytesPath) -> bytes: ...
     def _normalize_cached(filename: StrOrBytesPath) -> str | bytes: ...
+
 else:
 
     @functools.lru_cache(maxsize=None)

From c7844325a6b933c8e71be0532848905651c194f7 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 18 Jul 2024 21:52:03 -0400
Subject: [PATCH 0897/1761] Removed lingering unused code around
 Distribution._patched_dist.

Fixes #4489
---
 newsfragments/4489.bugfix.rst  |  1 +
 setuptools/command/egg_info.py | 10 ----------
 setuptools/dist.py             | 18 ------------------
 3 files changed, 1 insertion(+), 28 deletions(-)
 create mode 100644 newsfragments/4489.bugfix.rst

diff --git a/newsfragments/4489.bugfix.rst b/newsfragments/4489.bugfix.rst
new file mode 100644
index 0000000000..3f11d73393
--- /dev/null
+++ b/newsfragments/4489.bugfix.rst
@@ -0,0 +1 @@
+Removed lingering unused code around Distribution._patched_dist.
\ No newline at end of file
diff --git a/setuptools/command/egg_info.py b/setuptools/command/egg_info.py
index fd9b54a411..30b62f5f2e 100644
--- a/setuptools/command/egg_info.py
+++ b/setuptools/command/egg_info.py
@@ -250,16 +250,6 @@ def finalize_options(self):
         #
         self.distribution.metadata.version = self.egg_version
 
-        # If we bootstrapped around the lack of a PKG-INFO, as might be the
-        # case in a fresh checkout, make sure that any special tags get added
-        # to the version info
-        #
-        pd = self.distribution._patched_dist
-        if pd is not None and pd.key == self.egg_name.lower():
-            pd._version = self.egg_version
-            pd._parsed_version = packaging.version.Version(self.egg_version)
-            self.distribution._patched_dist = None
-
     def _get_egg_basename(self, py_version=PY_MAJOR, platform=None):
         """Compute filename of the output egg. Private API."""
         return _egg_basename(self.egg_name, self.egg_version, py_version, platform)
diff --git a/setuptools/dist.py b/setuptools/dist.py
index bcab50ba65..b4496ab986 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -6,7 +6,6 @@
 import os
 import re
 import sys
-from contextlib import suppress
 from glob import iglob
 from pathlib import Path
 from typing import TYPE_CHECKING, MutableMapping
@@ -28,7 +27,6 @@
 from packaging.version import Version
 
 from . import _entry_points
-from . import _normalization
 from . import _reqs
 from . import command as _  # noqa  -- imported for side-effects
 from ._importlib import metadata
@@ -269,24 +267,9 @@ class Distribution(_Distribution):
         'extras_require': dict,
     }
 
-    _patched_dist = None
     # Used by build_py, editable_wheel and install_lib commands for legacy namespaces
     namespace_packages: list[str]  #: :meta private: DEPRECATED
 
-    def patch_missing_pkg_info(self, attrs):
-        # Fake up a replacement for the data that would normally come from
-        # PKG-INFO, but which might not yet be built if this is a fresh
-        # checkout.
-        #
-        if not attrs or 'name' not in attrs or 'version' not in attrs:
-            return
-        name = _normalization.safe_name(str(attrs['name'])).lower()
-        with suppress(metadata.PackageNotFoundError):
-            dist = metadata.distribution(name)
-            if dist is not None and not dist.read_text('PKG-INFO'):
-                dist._version = _normalization.safe_version(str(attrs['version']))
-                self._patched_dist = dist
-
     def __init__(self, attrs: MutableMapping | None = None) -> None:
         have_package_data = hasattr(self, "package_data")
         if not have_package_data:
@@ -295,7 +278,6 @@ def __init__(self, attrs: MutableMapping | None = None) -> None:
         self.dist_files: list[tuple[str, str, str]] = []
         # Filter-out setuptools' specific options.
         self.src_root = attrs.pop("src_root", None)
-        self.patch_missing_pkg_info(attrs)
         self.dependency_links = attrs.pop('dependency_links', [])
         self.setup_requires = attrs.pop('setup_requires', [])
         for ep in metadata.entry_points(group='distutils.setup_keywords'):

From 8608d384c84ece28a10831d1f5144544c8c5f692 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= 
Date: Thu, 18 Jul 2024 21:07:53 +0200
Subject: [PATCH 0898/1761] Add Python version specifiers to [core]
 dependencies

Add Python version specifiers to importlib_metadata, importlib_resources
and tomli dependencies, to require them only on Python versions on which
they are actually used.
---
 newsfragments/4492.misc.rst | 1 +
 pyproject.toml              | 6 +++---
 2 files changed, 4 insertions(+), 3 deletions(-)
 create mode 100644 newsfragments/4492.misc.rst

diff --git a/newsfragments/4492.misc.rst b/newsfragments/4492.misc.rst
new file mode 100644
index 0000000000..9da07e43f9
--- /dev/null
+++ b/newsfragments/4492.misc.rst
@@ -0,0 +1 @@
+Now backports in ``core`` dependencies are installed only on Python versions requiring them.
diff --git a/pyproject.toml b/pyproject.toml
index bcd2416334..61a9f9462a 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -107,9 +107,9 @@ core = [
 	"ordered-set>=3.1.1",
 	"more_itertools>=8.8",
 	"jaraco.text>=3.7",
-	"importlib_resources>=5.10.2",
-	"importlib_metadata>=6",
-	"tomli>=2.0.1",
+	"importlib_resources>=5.10.2; python_version < '3.9'",
+	"importlib_metadata>=6; python_version < '3.10'",
+	"tomli>=2.0.1; python_version < '3.11'",
 	"wheel>=0.43.0",
 
 	# pkg_resources

From f087fb4ca05eb08c46abdd2cd67b18a3f33e3c79 Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos Orfanos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Fri, 19 Jul 2024 16:32:46 +0200
Subject: [PATCH 0899/1761] "preserve" does not require preview any more
 (jaraco/skeleton#133)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

* "preserve" does not require preview any more
* Update URL in ruff.toml comment

---------

Co-authored-by: Bartosz Sławecki 
---
 ruff.toml | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)

diff --git a/ruff.toml b/ruff.toml
index 70612985a7..7da4bee791 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -22,7 +22,5 @@ ignore = [
 ]
 
 [format]
-# Enable preview, required for quote-style = "preserve"
-preview = true
-# https://docs.astral.sh/ruff/settings/#format-quote-style
+# https://docs.astral.sh/ruff/settings/#format_quote-style
 quote-style = "preserve"

From 30f940e74b599400347d1162b7096f184cc46d31 Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos Orfanos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Fri, 19 Jul 2024 16:34:53 +0200
Subject: [PATCH 0900/1761] Enforce ruff/Perflint rule PERF401
 (jaraco/skeleton#132)

---
 ruff.toml | 1 +
 1 file changed, 1 insertion(+)

diff --git a/ruff.toml b/ruff.toml
index 7da4bee791..f1d03f8379 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -1,6 +1,7 @@
 [lint]
 extend-select = [
 	"C901",
+	"PERF401",
 	"W",
 ]
 ignore = [

From ab34814ca3ffe511ad63bb9589da06fd76758db8 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 19 Jul 2024 12:33:01 -0400
Subject: [PATCH 0901/1761] Re-enable preview, this time not for one specific
 feature, but for all features in preview.

Ref jaraco/skeleton#133
---
 ruff.toml | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/ruff.toml b/ruff.toml
index f1d03f8379..922aa1f198 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -23,5 +23,8 @@ ignore = [
 ]
 
 [format]
+# Enable preview to get hugged parenthesis unwrapping and other nice surprises
+# See https://github.com/jaraco/skeleton/pull/133#issuecomment-2239538373
+preview = true
 # https://docs.astral.sh/ruff/settings/#format_quote-style
 quote-style = "preserve"

From 3debdcc4bcf9904457c9154694b2fcbd1f773886 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 19 Jul 2024 16:21:30 -0400
Subject: [PATCH 0902/1761] Simply accept pathlib objects and convert them
 early for compatibilty with upstream.

---
 distutils/_msvccompiler.py        | 4 ++--
 distutils/extension.py            | 3 +--
 distutils/filelist.py             | 2 +-
 distutils/tests/test_build_ext.py | 3 +--
 distutils/tests/test_extension.py | 4 ++--
 5 files changed, 7 insertions(+), 9 deletions(-)

diff --git a/distutils/_msvccompiler.py b/distutils/_msvccompiler.py
index 8c6fb5b5e6..4f081c7e92 100644
--- a/distutils/_msvccompiler.py
+++ b/distutils/_msvccompiler.py
@@ -367,9 +367,9 @@ def compile(  # noqa: C901
                 src = os.path.abspath(src)
 
             if ext in self._c_extensions:
-                input_opt = "/Tc" + str(src)
+                input_opt = "/Tc" + src
             elif ext in self._cpp_extensions:
-                input_opt = "/Tp" + str(src)
+                input_opt = "/Tp" + src
                 add_cpp_opts = True
             elif ext in self._rc_extensions:
                 # compile .RC to .RES file
diff --git a/distutils/extension.py b/distutils/extension.py
index 3f950d5a73..6b195598f6 100644
--- a/distutils/extension.py
+++ b/distutils/extension.py
@@ -4,7 +4,6 @@
 modules in setup scripts."""
 
 import os
-import pathlib
 import warnings
 
 # This class is really only used by the "build_ext" command, so it might
@@ -116,7 +115,7 @@ def __init__(
             )
 
         self.name = name
-        self.sources = list(map(pathlib.Path, sources))
+        self.sources = list(map(os.fspath, sources))
         self.include_dirs = include_dirs or []
         self.define_macros = define_macros or []
         self.undef_macros = undef_macros or []
diff --git a/distutils/filelist.py b/distutils/filelist.py
index d0ad54aba6..3205762654 100644
--- a/distutils/filelist.py
+++ b/distutils/filelist.py
@@ -251,7 +251,7 @@ def exclude_pattern(self, pattern, anchor=1, prefix=None, is_regex=0):
         pattern_re = translate_pattern(pattern, anchor, prefix, is_regex)
         self.debug_print("exclude_pattern: applying regex r'%s'" % pattern_re.pattern)
         for i in range(len(self.files) - 1, -1, -1):
-            if pattern_re.search(str(self.files[i])):
+            if pattern_re.search(self.files[i]):
                 self.debug_print(" removing " + self.files[i])
                 del self.files[i]
                 files_found = True
diff --git a/distutils/tests/test_build_ext.py b/distutils/tests/test_build_ext.py
index 7e8a4ddb29..ae66bc4eb8 100644
--- a/distutils/tests/test_build_ext.py
+++ b/distutils/tests/test_build_ext.py
@@ -4,7 +4,6 @@
 import textwrap
 import site
 import contextlib
-import pathlib
 import platform
 import tempfile
 import importlib
@@ -336,7 +335,7 @@ def test_get_source_files(self):
         dist = Distribution({'name': 'xx', 'ext_modules': modules})
         cmd = self.build_ext(dist)
         cmd.ensure_finalized()
-        assert cmd.get_source_files() == [pathlib.Path('xxx')]
+        assert cmd.get_source_files() == ['xxx']
 
     def test_unicode_module_names(self):
         modules = [
diff --git a/distutils/tests/test_extension.py b/distutils/tests/test_extension.py
index 023c7f9fc3..3fbea21e28 100644
--- a/distutils/tests/test_extension.py
+++ b/distutils/tests/test_extension.py
@@ -75,9 +75,9 @@ def test_extension_init(self):
         with pytest.raises(AssertionError):
             Extension('name', ['file', 1])
         ext = Extension('name', ['file1', 'file2'])
-        assert ext.sources == [Path('file1'), Path('file2')]
+        assert ext.sources == ['file1', 'file2']
         ext = Extension('name', [Path('file1'), Path('file2')])
-        assert ext.sources == [Path('file1'), Path('file2')]
+        assert ext.sources == ['file1', 'file2']
 
         # others arguments have defaults
         for attr in (

From 48f95c0a1b8aa033d5a489eff8c4a6abc881b743 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 19 Jul 2024 17:49:09 -0400
Subject: [PATCH 0903/1761] =?UTF-8?q?Bump=20version:=2071.0.3=20=E2=86=92?=
 =?UTF-8?q?=2071.0.4?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg              | 2 +-
 NEWS.rst                      | 9 +++++++++
 newsfragments/4489.bugfix.rst | 1 -
 pyproject.toml                | 2 +-
 4 files changed, 11 insertions(+), 3 deletions(-)
 delete mode 100644 newsfragments/4489.bugfix.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 5648405125..c74a2d56c8 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 71.0.3
+current_version = 71.0.4
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index 6c72073293..9065b038b7 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,12 @@
+v71.0.4
+=======
+
+Bugfixes
+--------
+
+- Removed lingering unused code around Distribution._patched_dist. (#4489)
+
+
 v71.0.3
 =======
 
diff --git a/newsfragments/4489.bugfix.rst b/newsfragments/4489.bugfix.rst
deleted file mode 100644
index 3f11d73393..0000000000
--- a/newsfragments/4489.bugfix.rst
+++ /dev/null
@@ -1 +0,0 @@
-Removed lingering unused code around Distribution._patched_dist.
\ No newline at end of file
diff --git a/pyproject.toml b/pyproject.toml
index bcd2416334..020f57ee17 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "71.0.3"
+version = "71.0.4"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From 3aba4d4b2abd6b15760f1532871e2321acd2d658 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Sat, 20 Jul 2024 15:59:27 -0400
Subject: [PATCH 0904/1761] Update mypy to 1.11

---
 mypy.ini                             |  3 ++-
 pyproject.toml                       |  5 ++++-
 setuptools/command/editable_wheel.py |  7 ++-----
 setuptools/command/install.py        |  9 +++++++--
 setuptools/command/install_lib.py    |  2 +-
 setuptools/command/upload_docs.py    |  2 +-
 setuptools/tests/test_wheel.py       | 10 ++++++++--
 7 files changed, 25 insertions(+), 13 deletions(-)

diff --git a/mypy.ini b/mypy.ini
index c4b30d1acd..4fba13c286 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -38,7 +38,8 @@ disable_error_code = import-not-found
 [mypy-distutils._modified,jaraco.*,trove_classifiers,wheel.*]
 ignore_missing_imports = True
 
-# Even when excluding generated modules, there might be problems: https://github.com/python/mypy/issues/11936#issuecomment-1466764006
+# Even when excluding a module, import issues can show up due to following import
+# https://github.com/python/mypy/issues/11936#issuecomment-1466764006
 [mypy-setuptools.config._validate_pyproject.*]
 follow_imports = silent
 # silent => ignore errors when following imports
diff --git a/pyproject.toml b/pyproject.toml
index 020f57ee17..58e991c50e 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -59,7 +59,10 @@ test = [
 	# for tools/finalize.py
 	'jaraco.develop >= 7.21; python_version >= "3.9" and sys_platform != "cygwin"',
 	"pytest-home >= 0.5",
-	"mypy==1.10.0", # pin mypy version so a new version doesn't suddenly cause the CI to fail
+	# pin mypy version so a new version doesn't suddenly cause the CI to fail,
+	# until types-setuptools is removed from typeshed.
+	# For help with static-typing issues, or mypy update, ping @Avasam 
+	"mypy==1.11",
 	# No Python 3.11 dependencies require tomli, but needed for type-checking since we import it directly
 	"tomli",
 	# No Python 3.12 dependencies require importlib_metadata, but needed for type-checking since we import it directly
diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py
index 13c450cb49..49fd609b15 100644
--- a/setuptools/command/editable_wheel.py
+++ b/setuptools/command/editable_wheel.py
@@ -443,8 +443,7 @@ def __init__(
     ):
         self.auxiliary_dir = Path(auxiliary_dir)
         self.build_lib = Path(build_lib).resolve()
-        # TODO: Update typeshed distutils stubs to overload non-None return type by default
-        self._file = dist.get_command_obj("build_py").copy_file  # type: ignore[union-attr]
+        self._file = dist.get_command_obj("build_py").copy_file
         super().__init__(dist, name, [self.auxiliary_dir])
 
     def __call__(self, wheel: WheelFile, files: list[str], mapping: dict[str, str]):
@@ -462,9 +461,7 @@ def _create_file(self, relative_output: str, src_file: str, link=None):
         dest = self.auxiliary_dir / relative_output
         if not dest.parent.is_dir():
             dest.parent.mkdir(parents=True)
-        # TODO: Update typeshed distutils stubs so distutils.cmd.Command.copy_file, accepts PathLike
-        # same with methods used by copy_file
-        self._file(src_file, dest, link=link)  # type: ignore[arg-type]
+        self._file(src_file, dest, link=link)
 
     def _create_links(self, outputs, output_mapping):
         self.auxiliary_dir.mkdir(parents=True, exist_ok=True)
diff --git a/setuptools/command/install.py b/setuptools/command/install.py
index c49fcda939..f1ea2adf1d 100644
--- a/setuptools/command/install.py
+++ b/setuptools/command/install.py
@@ -1,9 +1,12 @@
+from __future__ import annotations
+
+from collections.abc import Callable
 from distutils.errors import DistutilsArgError
 import inspect
 import glob
 import platform
 import distutils.command.install as orig
-from typing import cast
+from typing import Any, ClassVar, cast
 
 import setuptools
 from ..warnings import SetuptoolsDeprecationWarning, SetuptoolsWarning
@@ -29,7 +32,9 @@ class install(orig.install):
         'old-and-unmanageable',
         'single-version-externally-managed',
     ]
-    new_commands = [
+    # Type the same as distutils.command.install.install.sub_commands
+    # Must keep the second tuple item potentially None due to invariance
+    new_commands: ClassVar[list[tuple[str, Callable[[Any], bool] | None]]] = [
         ('install_egg_info', lambda self: True),
         ('install_scripts', lambda self: True),
     ]
diff --git a/setuptools/command/install_lib.py b/setuptools/command/install_lib.py
index 5e74be247e..9a4b0472f0 100644
--- a/setuptools/command/install_lib.py
+++ b/setuptools/command/install_lib.py
@@ -97,7 +97,7 @@ def copy_tree(
         exclude = self.get_exclusions()
 
         if not exclude:
-            return orig.install_lib.copy_tree(self, infile, outfile)  # type: ignore[arg-type] # Fixed upstream
+            return orig.install_lib.copy_tree(self, infile, outfile)
 
         # Exclude namespace package __init__.py* files from the output
 
diff --git a/setuptools/command/upload_docs.py b/setuptools/command/upload_docs.py
index 3fbbb62553..32c9abd796 100644
--- a/setuptools/command/upload_docs.py
+++ b/setuptools/command/upload_docs.py
@@ -50,7 +50,7 @@ def has_sphinx(self):
             and metadata.entry_points(group='distutils.commands', name='build_sphinx')
         )
 
-    sub_commands = [('build_sphinx', has_sphinx)]  # type: ignore[list-item] # TODO: Fix in typeshed distutils stubs
+    sub_commands = [('build_sphinx', has_sphinx)]
 
     def initialize_options(self):
         upload.initialize_options(self)
diff --git a/setuptools/tests/test_wheel.py b/setuptools/tests/test_wheel.py
index e58ccd8d18..cc5d54b6d9 100644
--- a/setuptools/tests/test_wheel.py
+++ b/setuptools/tests/test_wheel.py
@@ -1,5 +1,7 @@
 """wheel tests"""
 
+from __future__ import annotations
+
 from distutils.sysconfig import get_config_var
 from distutils.util import get_platform
 import contextlib
@@ -11,6 +13,7 @@
 import shutil
 import subprocess
 import sys
+from typing import Any
 import zipfile
 
 import pytest
@@ -176,7 +179,10 @@ def __repr__(self):
         return '%s(**%r)' % (self._id, self._fields)
 
 
-WHEEL_INSTALL_TESTS = (
+# Using Any to avoid possible type union issues later in test
+# making a TypedDict is not worth in a test and anonymous/inline TypedDict are experimental
+# https://github.com/python/mypy/issues/9884
+WHEEL_INSTALL_TESTS: tuple[dict[str, Any], ...] = (
     dict(
         id='basic',
         file_defs={'foo': {'__init__.py': ''}},
@@ -547,7 +553,7 @@ def __repr__(self):
 @pytest.mark.parametrize(
     'params',
     WHEEL_INSTALL_TESTS,
-    ids=list(params['id'] for params in WHEEL_INSTALL_TESTS),
+    ids=[params['id'] for params in WHEEL_INSTALL_TESTS],
 )
 def test_wheel_install(params):
     project_name = params.get('name', 'foo')

From e7575ae7b31c987916bf4724a979b91a8bfa3244 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Sun, 21 Jul 2024 00:10:17 -0400
Subject: [PATCH 0905/1761] Update pyproject.toml

---
 pyproject.toml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/pyproject.toml b/pyproject.toml
index 58e991c50e..db64f2bebd 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -62,7 +62,7 @@ test = [
 	# pin mypy version so a new version doesn't suddenly cause the CI to fail,
 	# until types-setuptools is removed from typeshed.
 	# For help with static-typing issues, or mypy update, ping @Avasam 
-	"mypy==1.11",
+	"mypy==1.11.*",
 	# No Python 3.11 dependencies require tomli, but needed for type-checking since we import it directly
 	"tomli",
 	# No Python 3.12 dependencies require importlib_metadata, but needed for type-checking since we import it directly

From 022cedbf2c535e6fb81adc97afbfdc1e3e39cb8a Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sun, 21 Jul 2024 11:59:21 -0400
Subject: [PATCH 0906/1761] Switch to uv for vendoring.

More durable workaround for pypa/pip#12770. Ref #4492.
---
 tools/vendored.py | 9 +--------
 tox.ini           | 1 +
 2 files changed, 2 insertions(+), 8 deletions(-)

diff --git a/tools/vendored.py b/tools/vendored.py
index 2101e7c20f..2525d5fdce 100644
--- a/tools/vendored.py
+++ b/tools/vendored.py
@@ -1,6 +1,5 @@
 import functools
 import re
-import sys
 import subprocess
 
 import jaraco.packaging.metadata
@@ -52,14 +51,8 @@ def install_deps(deps, vendor):
     """
     Install the deps to vendor.
     """
-    # workaround for https://github.com/pypa/pip/issues/12770
-    deps += [
-        'zipp >= 3.7',
-        'backports.tarfile',
-    ]
     install_args = [
-        sys.executable,
-        '-m',
+        'uv',
         'pip',
         'install',
         '--target',
diff --git a/tox.ini b/tox.ini
index 00e38fbb9a..f457ff1fee 100644
--- a/tox.ini
+++ b/tox.ini
@@ -77,6 +77,7 @@ deps =
 	jaraco.packaging
 	# workaround for pypa/pyproject-hooks#192
 	pyproject-hooks<1.1
+	uv
 commands =
 	vendor: python -m tools.vendored
 

From 08bd31115732ece3cca50bd93f338e1b90dead34 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sun, 21 Jul 2024 12:02:22 -0400
Subject: [PATCH 0907/1761] =?UTF-8?q?Bump=20version:=2071.0.4=20=E2=86=92?=
 =?UTF-8?q?=2071.1.0?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg               |  2 +-
 NEWS.rst                       | 17 +++++++++++++++++
 newsfragments/4409.feature.rst |  3 ---
 newsfragments/4492.misc.rst    |  1 -
 pyproject.toml                 |  2 +-
 5 files changed, 19 insertions(+), 6 deletions(-)
 delete mode 100644 newsfragments/4409.feature.rst
 delete mode 100644 newsfragments/4492.misc.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index c74a2d56c8..486c1c369c 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 71.0.4
+current_version = 71.1.0
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index 9065b038b7..9fbcf41427 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,20 @@
+v71.1.0
+=======
+
+Features
+--------
+
+- Added return types to typed public functions -- by :user:`Avasam`
+
+  Marked `pkg_resources` as ``py.typed`` -- by :user:`Avasam` (#4409)
+
+
+Misc
+----
+
+- #4492
+
+
 v71.0.4
 =======
 
diff --git a/newsfragments/4409.feature.rst b/newsfragments/4409.feature.rst
deleted file mode 100644
index 9dd157092c..0000000000
--- a/newsfragments/4409.feature.rst
+++ /dev/null
@@ -1,3 +0,0 @@
-Added return types to typed public functions -- by :user:`Avasam`
-
-Marked `pkg_resources` as ``py.typed`` -- by :user:`Avasam`
diff --git a/newsfragments/4492.misc.rst b/newsfragments/4492.misc.rst
deleted file mode 100644
index 9da07e43f9..0000000000
--- a/newsfragments/4492.misc.rst
+++ /dev/null
@@ -1 +0,0 @@
-Now backports in ``core`` dependencies are installed only on Python versions requiring them.
diff --git a/pyproject.toml b/pyproject.toml
index 717431bd81..2d8d49d184 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "71.0.4"
+version = "71.1.0"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From 63c89f93d6d43ff96ce5f7f5a862395f924905d0 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sun, 21 Jul 2024 12:17:34 -0400
Subject: [PATCH 0908/1761] =?UTF-8?q?=F0=9F=91=B9=20Feed=20the=20hobgoblin?=
 =?UTF-8?q?s=20(delint).?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 setuptools/tests/test_setuptools.py | 1 -
 setuptools/tests/test_virtualenv.py | 5 -----
 2 files changed, 6 deletions(-)

diff --git a/setuptools/tests/test_setuptools.py b/setuptools/tests/test_setuptools.py
index 073d14507f..9865ee847c 100644
--- a/setuptools/tests/test_setuptools.py
+++ b/setuptools/tests/test_setuptools.py
@@ -5,7 +5,6 @@
 import os
 import distutils.core
 import distutils.cmd
-from distutils.errors import DistutilsOptionError
 from distutils.errors import DistutilsSetupError
 from distutils.core import Extension
 from zipfile import ZipFile
diff --git a/setuptools/tests/test_virtualenv.py b/setuptools/tests/test_virtualenv.py
index 21e5273529..4554581ed0 100644
--- a/setuptools/tests/test_virtualenv.py
+++ b/setuptools/tests/test_virtualenv.py
@@ -4,14 +4,9 @@
 from urllib.request import urlopen
 from urllib.error import URLError
 
-import pathlib
 
 import pytest
 
-from . import contexts
-from .textwrap import DALS
-from .test_easy_install import make_nspkg_sdist
-
 
 @pytest.fixture(autouse=True)
 def pytest_virtualenv_works(venv):

From 61c9f88d84f659c8964b8a209b7ea89475d7ad31 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Mon, 22 Jul 2024 02:58:29 -0400
Subject: [PATCH 0909/1761] Merge overloaded method definitions from typeshed

---
 setuptools/__init__.py     | 22 +++++++++++++++++-----
 setuptools/command/test.py | 19 ++++++++++++++++---
 2 files changed, 33 insertions(+), 8 deletions(-)

diff --git a/setuptools/__init__.py b/setuptools/__init__.py
index afca08be9c..69c1f5acb9 100644
--- a/setuptools/__init__.py
+++ b/setuptools/__init__.py
@@ -1,10 +1,13 @@
 """Extensions to the 'distutils' for large or complex distributions"""
 
+from __future__ import annotations
+
 import functools
 import os
 import re
 import sys
-from typing import TYPE_CHECKING
+from typing import TYPE_CHECKING, TypeVar, overload
+
 
 sys.path.extend(((vendor_path := os.path.join(os.path.dirname(os.path.dirname(__file__)), 'setuptools', '_vendor')) not in sys.path) * [vendor_path])  # fmt: skip
 # workaround for #4476
@@ -15,7 +18,7 @@
 from distutils.errors import DistutilsOptionError
 
 from . import logging, monkey
-from . import version as _version_module
+from .version import __version__ as __version__
 from .depends import Require
 from .discovery import PackageFinder, PEP420PackageFinder
 from .dist import Distribution
@@ -33,11 +36,10 @@
     'find_namespace_packages',
 ]
 
-__version__ = _version_module.__version__
+_CommandT = TypeVar("_CommandT", bound="_Command")
 
 bootstrap_install_from = None
 
-
 find_packages = PackageFinder.find
 find_namespace_packages = PEP420PackageFinder.find
 
@@ -221,7 +223,17 @@ def ensure_string_list(self, option):
                     "'%s' must be a list of strings (got %r)" % (option, val)
                 )
 
-    def reinitialize_command(self, command, reinit_subcommands=False, **kw):
+    @overload  # type:ignore[override] # Extra **kw param
+    def reinitialize_command(
+        self, command: str, reinit_subcommands: bool = False, **kw
+    ) -> _Command: ...
+    @overload
+    def reinitialize_command(
+        self, command: _CommandT, reinit_subcommands: bool = False, **kw
+    ) -> _CommandT: ...
+    def reinitialize_command(
+        self, command: str | _Command, reinit_subcommands: bool = False, **kw
+    ) -> _Command:
         cmd = _Command.reinitialize_command(self, command, reinit_subcommands)
         vars(cmd).update(kw)
         return cmd
diff --git a/setuptools/command/test.py b/setuptools/command/test.py
index fbdf9fb942..a1e30ee966 100644
--- a/setuptools/command/test.py
+++ b/setuptools/command/test.py
@@ -1,8 +1,11 @@
+from __future__ import annotations
+from collections.abc import Callable
 import os
 import operator
 import sys
 import contextlib
 import itertools
+from typing import TYPE_CHECKING, Generic, TypeVar, overload
 import unittest
 from distutils.errors import DistutilsError, DistutilsOptionError
 from distutils import log
@@ -22,6 +25,12 @@
 from more_itertools import unique_everseen
 from jaraco.functools import pass_none
 
+if TYPE_CHECKING:
+    from typing_extensions import Self
+
+_T = TypeVar("_T")
+_R = TypeVar("_R")
+
 
 class ScanningLoader(TestLoader):
     def __init__(self):
@@ -63,11 +72,15 @@ def loadTestsFromModule(self, module, pattern=None):
 
 
 # adapted from jaraco.classes.properties:NonDataProperty
-class NonDataProperty:
-    def __init__(self, fget):
+class NonDataProperty(Generic[_T, _R]):
+    def __init__(self, fget: Callable[[_T], _R]):
         self.fget = fget
 
-    def __get__(self, obj, objtype=None):
+    @overload
+    def __get__(self, obj: None, objtype: object = None) -> Self: ...
+    @overload
+    def __get__(self, obj: _T, objtype: object = None) -> _R: ...
+    def __get__(self, obj: _T | None, objtype: object = None) -> Self | _R:
         if obj is None:
             return self
         return self.fget(obj)

From 99d2c722ca5d58ef1360ed86a3252cc16bd84dfd Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 22 Jul 2024 09:39:50 -0400
Subject: [PATCH 0910/1761] Add documentation clarifying how to reliably
 install setuptools with its dependencies.

Ref #4483
---
 docs/userguide/quickstart.rst | 12 +++++++++++-
 1 file changed, 11 insertions(+), 1 deletion(-)

diff --git a/docs/userguide/quickstart.rst b/docs/userguide/quickstart.rst
index c4875d71fc..b0f1401e0e 100644
--- a/docs/userguide/quickstart.rst
+++ b/docs/userguide/quickstart.rst
@@ -7,7 +7,7 @@ Installation
 
 You can install the latest version of ``setuptools`` using :pypi:`pip`::
 
-    pip install --upgrade setuptools
+    pip install --upgrade setuptools[core]
 
 Most of the times, however, you don't have to...
 
@@ -56,6 +56,16 @@ containing a ``build-system`` section similar to the example below:
 This section declares what are your build system dependencies, and which
 library will be used to actually do the packaging.
 
+.. note::
+
+   Package maintainers might be tempted to use ``setuptools[core]`` as the
+   requirement, given the guidance above. Avoid doing so, as the extra
+   is currently considered an internal implementation detail and is likely
+   to go away in the future and the Setuptools team will not support
+   compatibility for problems arising from packages published with this
+   extra declared. Vendored packages will satisfy the dependencies in
+   the most common isolated build scenarios.
+
 .. note::
 
    Historically this documentation has unnecessarily listed ``wheel``

From ab0822fd0c053f56e97c59b10438a5b25727001d Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Thu, 25 Jul 2024 11:57:06 -0400
Subject: [PATCH 0911/1761] Remove redundant packaging aliases in pkg_resources

---
 pkg_resources/__init__.py | 34 +++++++++++++++++-----------------
 1 file changed, 17 insertions(+), 17 deletions(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index 3c88d8d3d7..0b7ac13b55 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -92,16 +92,16 @@
     # no write support, probably under GAE
     WRITE_SUPPORT = False
 
-import packaging.specifiers
 from jaraco.text import (
     yield_lines,
     drop_comment,
     join_continuation,
 )
-from packaging import markers as _packaging_markers
-from packaging import requirements as _packaging_requirements
-from packaging import utils as _packaging_utils
-from packaging import version as _packaging_version
+import packaging.markers
+import packaging.requirements
+import packaging.specifiers
+import packaging.utils
+import packaging.version
 from platformdirs import user_cache_dir as _user_cache_dir
 
 if TYPE_CHECKING:
@@ -156,7 +156,7 @@ class PEP440Warning(RuntimeWarning):
     """
 
 
-parse_version = _packaging_version.Version
+parse_version = packaging.version.Version
 
 _state_vars: dict[str, str] = {}
 
@@ -801,7 +801,7 @@ def add(
             return
 
         self.by_key[dist.key] = dist
-        normalized_name = _packaging_utils.canonicalize_name(dist.key)
+        normalized_name = packaging.utils.canonicalize_name(dist.key)
         self.normalized_to_canonical_keys[normalized_name] = dist.key
         if dist.key not in keys:
             keys.append(dist.key)
@@ -1561,8 +1561,8 @@ def safe_version(version: str) -> str:
     """
     try:
         # normalize the version
-        return str(_packaging_version.Version(version))
-    except _packaging_version.InvalidVersion:
+        return str(packaging.version.Version(version))
+    except packaging.version.InvalidVersion:
         version = version.replace(' ', '.')
         return re.sub('[^A-Za-z0-9.]+', '-', version)
 
@@ -1639,9 +1639,9 @@ def evaluate_marker(text: str, extra: str | None = None) -> bool:
     This implementation uses the 'pyparsing' module.
     """
     try:
-        marker = _packaging_markers.Marker(text)
+        marker = packaging.markers.Marker(text)
         return marker.evaluate()
-    except _packaging_markers.InvalidMarker as e:
+    except packaging.markers.InvalidMarker as e:
         raise SyntaxError(e) from e
 
 
@@ -3001,12 +3001,12 @@ def parsed_version(self):
         if not hasattr(self, "_parsed_version"):
             try:
                 self._parsed_version = parse_version(self.version)
-            except _packaging_version.InvalidVersion as ex:
+            except packaging.version.InvalidVersion as ex:
                 info = f"(package: {self.project_name})"
                 if hasattr(ex, "add_note"):
                     ex.add_note(info)  # PEP 678
                     raise
-                raise _packaging_version.InvalidVersion(f"{str(ex)} {info}") from None
+                raise packaging.version.InvalidVersion(f"{str(ex)} {info}") from None
 
         return self._parsed_version
 
@@ -3014,7 +3014,7 @@ def parsed_version(self):
     def _forgiving_parsed_version(self):
         try:
             return self.parsed_version
-        except _packaging_version.InvalidVersion as ex:
+        except packaging.version.InvalidVersion as ex:
             self._parsed_version = parse_version(_forgiving_version(self.version))
 
             notes = "\n".join(getattr(ex, "__notes__", []))  # PEP 678
@@ -3194,7 +3194,7 @@ def from_filename(
 
     def as_requirement(self):
         """Return a ``Requirement`` that matches this distribution exactly"""
-        if isinstance(self.parsed_version, _packaging_version.Version):
+        if isinstance(self.parsed_version, packaging.version.Version):
             spec = "%s==%s" % (self.project_name, self.parsed_version)
         else:
             spec = "%s===%s" % (self.project_name, self.parsed_version)
@@ -3452,11 +3452,11 @@ def parse_requirements(strs: _NestedStr) -> map[Requirement]:
     return map(Requirement, join_continuation(map(drop_comment, yield_lines(strs))))
 
 
-class RequirementParseError(_packaging_requirements.InvalidRequirement):
+class RequirementParseError(packaging.requirements.InvalidRequirement):
     "Compatibility wrapper for InvalidRequirement"
 
 
-class Requirement(_packaging_requirements.Requirement):
+class Requirement(packaging.requirements.Requirement):
     # prefer variable length tuple to set (as found in
     # packaging.requirements.Requirement)
     extras: tuple[str, ...]  # type: ignore[assignment]

From 5e1b3c414779317bc3e105d9bae82ce70c22dbf9 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sun, 28 Jul 2024 17:06:09 -0400
Subject: [PATCH 0912/1761] =?UTF-8?q?Bump=20version:=2071.1.0=20=E2=86=92?=
 =?UTF-8?q?=2072.0.0?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg              | 2 +-
 NEWS.rst                      | 9 +++++++++
 newsfragments/931.removal.rst | 1 -
 pyproject.toml                | 2 +-
 4 files changed, 11 insertions(+), 3 deletions(-)
 delete mode 100644 newsfragments/931.removal.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 486c1c369c..6254b96778 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 71.1.0
+current_version = 72.0.0
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index 9fbcf41427..fe753cfee7 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,12 @@
+v72.0.0
+=======
+
+Deprecations and Removals
+-------------------------
+
+- The test command has been removed. Users relying on 'setup.py test' will need to migrate to another test runner or pin setuptools before this version. (#931)
+
+
 v71.1.0
 =======
 
diff --git a/newsfragments/931.removal.rst b/newsfragments/931.removal.rst
deleted file mode 100644
index cffc7456d5..0000000000
--- a/newsfragments/931.removal.rst
+++ /dev/null
@@ -1 +0,0 @@
-The test command has been removed. Users relying on 'setup.py test' will need to migrate to another test runner or pin setuptools before this version.
\ No newline at end of file
diff --git a/pyproject.toml b/pyproject.toml
index 41889d9854..10aa918d9a 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "71.1.0"
+version = "72.0.0"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From a6726b95f7a50dc5945e012050f00450c883fdcd Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 29 Jul 2024 10:44:18 -0400
Subject: [PATCH 0913/1761] Add celery and requests to the packages that test
 integration. Ref #4520

---
 setuptools/tests/integration/test_pip_install_sdist.py | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/setuptools/tests/integration/test_pip_install_sdist.py b/setuptools/tests/integration/test_pip_install_sdist.py
index 17bf2af9d2..ee70b1c286 100644
--- a/setuptools/tests/integration/test_pip_install_sdist.py
+++ b/setuptools/tests/integration/test_pip_install_sdist.py
@@ -54,6 +54,8 @@
     ("pyyaml", LATEST),  # cython + custom build_ext + custom distclass
     ("charset-normalizer", LATEST),  # uses mypyc, used by aiohttp
     ("protobuf", LATEST),
+    ("requests", LATEST),
+    ("celery", LATEST),
     # When adding packages to this list, make sure they expose a `__version__`
     # attribute, or modify the tests below
 ]

From c437aaa8d5b969a9fe8c8147463bfcb85b31ab26 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 29 Jul 2024 10:07:27 -0400
Subject: [PATCH 0914/1761] Restore the tests command and deprecate access to
 the module.

Closes #4520; Closes #4519.
---
 newsfragments/4520.feature.rst |  1 +
 setuptools/command/test.py     | 42 ++++++++++++++++++++++++++++++++++
 2 files changed, 43 insertions(+)
 create mode 100644 newsfragments/4520.feature.rst
 create mode 100644 setuptools/command/test.py

diff --git a/newsfragments/4520.feature.rst b/newsfragments/4520.feature.rst
new file mode 100644
index 0000000000..d221b05aab
--- /dev/null
+++ b/newsfragments/4520.feature.rst
@@ -0,0 +1 @@
+Restore the tests command and deprecate access to the module. (#4519)
diff --git a/setuptools/command/test.py b/setuptools/command/test.py
new file mode 100644
index 0000000000..38e1164c27
--- /dev/null
+++ b/setuptools/command/test.py
@@ -0,0 +1,42 @@
+from setuptools import Command
+from setuptools.warnings import SetuptoolsDeprecationWarning
+
+
+def __getattr__(name):
+    if name == 'test':
+        SetuptoolsDeprecationWarning.emit(
+            "The test command is disabled and references to it are deprecated.",
+            "Please remove any references to `setuptools.command.test` in all "
+            "supported versions of the affected package.",
+            due_date=(2024, 11, 15),
+            stacklevel=2,
+        )
+        return _test
+    raise AttributeError(name)
+
+
+class _test(Command):
+    """
+    Stub to warn when test command is referenced or used.
+    """
+
+    description = "stub for old test command (do not use)"
+
+    user_options = [
+        ('test-module=', 'm', "Run 'test_suite' in specified module"),
+        (
+            'test-suite=',
+            's',
+            "Run single test, case or suite (e.g. 'module.test_suite')",
+        ),
+        ('test-runner=', 'r', "Test runner to use"),
+    ]
+
+    def initialize_options(self):
+        pass
+
+    def finalize_options(self):
+        pass
+
+    def run(self):
+        raise RuntimeError("Support for the test command was removed in Setuptools 72")

From 441799f8b45a1a01c608db49333403db1b0d7100 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 29 Jul 2024 10:50:22 -0400
Subject: [PATCH 0915/1761] =?UTF-8?q?Bump=20version:=2072.0.0=20=E2=86=92?=
 =?UTF-8?q?=2072.1.0?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg               | 2 +-
 NEWS.rst                       | 9 +++++++++
 newsfragments/4520.feature.rst | 1 -
 pyproject.toml                 | 2 +-
 4 files changed, 11 insertions(+), 3 deletions(-)
 delete mode 100644 newsfragments/4520.feature.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 6254b96778..2b055f2389 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 72.0.0
+current_version = 72.1.0
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index fe753cfee7..e89dfcba6d 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,12 @@
+v72.1.0
+=======
+
+Features
+--------
+
+- Restore the tests command and deprecate access to the module. (#4519) (#4520)
+
+
 v72.0.0
 =======
 
diff --git a/newsfragments/4520.feature.rst b/newsfragments/4520.feature.rst
deleted file mode 100644
index d221b05aab..0000000000
--- a/newsfragments/4520.feature.rst
+++ /dev/null
@@ -1 +0,0 @@
-Restore the tests command and deprecate access to the module. (#4519)
diff --git a/pyproject.toml b/pyproject.toml
index 10aa918d9a..034de18f54 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "72.0.0"
+version = "72.1.0"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From b57ccedfff09cb307ba7171c21445b89e8940a8d Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 29 Jul 2024 12:22:46 -0400
Subject: [PATCH 0916/1761] Reword docs on 'Build system requirement' to use
 imperative voice.

---
 docs/userguide/dependency_management.rst | 15 ++++++++-------
 1 file changed, 8 insertions(+), 7 deletions(-)

diff --git a/docs/userguide/dependency_management.rst b/docs/userguide/dependency_management.rst
index a2c0c890f3..1a0c3d8037 100644
--- a/docs/userguide/dependency_management.rst
+++ b/docs/userguide/dependency_management.rst
@@ -18,10 +18,11 @@ Build system requirement
 ========================
 
 After organizing all the scripts and files and getting ready for packaging,
-there needs to be a way to specify what programs and libraries are actually needed
-to do the packaging (in our case, ``setuptools`` of course).
-This needs to be specified in your ``pyproject.toml`` file
-(if you have forgot what this is, go to :doc:`/userguide/quickstart` or :doc:`/build_meta`):
+there needs to be a way to specify what programs and libraries (build backend)
+are actually needed to build the package for distribution. For Setuptools, the
+requisite library is ``setuptools``. Specify the build backend in a
+``pyproject.toml`` file (see also :doc:`/userguide/quickstart` or
+:doc:`/build_meta`):
 
 .. code-block:: toml
 
@@ -29,13 +30,13 @@ This needs to be specified in your ``pyproject.toml`` file
     requires = ["setuptools"]
     #...
 
-Please note that you should also include here any other ``setuptools`` plugin
-(e.g., :pypi:`setuptools-scm`, :pypi:`setuptools-golang`, :pypi:`setuptools-rust`)
+Please note also to include any other ``setuptools`` plugins
+(e.g., :pypi:`setuptools_scm`, :pypi:`setuptools-golang`, :pypi:`setuptools-rust`)
 or build-time dependency (e.g., :pypi:`Cython`, :pypi:`cppy`, :pypi:`pybind11`).
 
 .. note::
     In previous versions of ``setuptools``,
-    this used to be accomplished with the ``setup_requires`` keyword but is
+    the ``setup_requires`` keyword performed a similar function but is
     now considered deprecated in favor of the :pep:`517` style described above.
     To peek into how this legacy keyword is used, consult our :doc:`guide on
     deprecated practice (WIP) `.

From b688433ed2f0053db6e6fe4a795c17364097f67a Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 29 Jul 2024 12:39:57 -0400
Subject: [PATCH 0917/1761] Add guidance on pinning the upper bound in
 build-system.requires.

Closes #4521
---
 docs/userguide/dependency_management.rst | 33 +++++++++++++++++++++++-
 1 file changed, 32 insertions(+), 1 deletion(-)

diff --git a/docs/userguide/dependency_management.rst b/docs/userguide/dependency_management.rst
index 1a0c3d8037..b61405eb8b 100644
--- a/docs/userguide/dependency_management.rst
+++ b/docs/userguide/dependency_management.rst
@@ -30,10 +30,41 @@ requisite library is ``setuptools``. Specify the build backend in a
     requires = ["setuptools"]
     #...
 
-Please note also to include any other ``setuptools`` plugins
+Also include any other ``setuptools`` plugins
 (e.g., :pypi:`setuptools_scm`, :pypi:`setuptools-golang`, :pypi:`setuptools-rust`)
 or build-time dependency (e.g., :pypi:`Cython`, :pypi:`cppy`, :pypi:`pybind11`).
 
+.. code-block:: toml
+
+    [build-system]
+    requires = ["setuptools", "cython", "setuptools_scm"]
+
+
+If the project depends on a feature introduced in a specific version of Setuptools,
+it is good practice to specify it as a lower bound:
+
+.. code-block:: toml
+
+    [build-system]
+    requires = ["setuptools >= 61.2"]
+
+Some may be tempted to also include an upper-bound for yet unreleased major
+versions (e.g. ``setuptools <= 70``) or pin to a specific version (e.g.
+``setuptools == 70.0.4``) in order to avoid the project being uninstallable
+should those backward-incompatible changes affect this release of the project.
+Setuptools maintainers recommend strongly against this precautionary approach.
+The team primarily maintains one release, the latest monotonically-increasing
+release, and encourages users to use that latest release (work at HEAD). As a
+result, the team is cognizant of and takes responsibility for making
+backward-incompatible changes and aims to mitigate the impact of any breaking
+changes prior to releasing that change. By pinning against an unreleased
+version, it causes toil (maintenance burden) for each and every project that
+does the pinning (and the consumers that use it) and increases the risk of
+erosion if maintenance is unsustained. This tradeoff between reproducibility
+and compatibility is especially stark because Setuptools frequently releases
+backward-incompatible releases for a variety of reasons, many which won't
+affect a given project.
+
 .. note::
     In previous versions of ``setuptools``,
     the ``setup_requires`` keyword performed a similar function but is

From 8192dfca15616c940070b12527a6ac0565ddbe8c Mon Sep 17 00:00:00 2001
From: connortann <71127464+connortann@users.noreply.github.com>
Date: Tue, 30 Jul 2024 10:24:08 +0100
Subject: [PATCH 0918/1761] [Docs] Minor grammar fix in
 dependency_management.rst

---
 docs/userguide/dependency_management.rst | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/docs/userguide/dependency_management.rst b/docs/userguide/dependency_management.rst
index b61405eb8b..38ca7bdff7 100644
--- a/docs/userguide/dependency_management.rst
+++ b/docs/userguide/dependency_management.rst
@@ -62,7 +62,7 @@ version, it causes toil (maintenance burden) for each and every project that
 does the pinning (and the consumers that use it) and increases the risk of
 erosion if maintenance is unsustained. This tradeoff between reproducibility
 and compatibility is especially stark because Setuptools frequently releases
-backward-incompatible releases for a variety of reasons, many which won't
+backward-incompatible releases for a variety of reasons, many of which won't
 affect a given project.
 
 .. note::

From 54936c73b07ff74ce16370957550ad6423bb21ba Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Wed, 31 Jul 2024 14:37:44 -0400
Subject: [PATCH 0919/1761] Pin towncrier in docs build.

Closes #4533; Ref sphinx-contrib/sphinxcontrib-towncrier#92
---
 pyproject.toml | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/pyproject.toml b/pyproject.toml
index 034de18f54..0c7d5e752a 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -102,6 +102,9 @@ doc = [
 
 	# workaround for pypa/setuptools#4333
 	"pyproject-hooks!=1.1",
+
+	# workaround for sphinx-contrib/sphinxcontrib-towncrier#92
+	"towncrier<24.7",
 ]
 ssl = []
 certs = []

From 80d391c3ef5de7fef07d7a9b81b9fa6658c2d32b Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Wed, 31 Jul 2024 20:13:58 -0400
Subject: [PATCH 0920/1761] Changed the import of ``ctypes.wintypes`` from
 ``__import__`` to a regular ``import`` statement

---
 newsfragments/4534.misc.rst   | 1 +
 setuptools/windows_support.py | 4 ++--
 2 files changed, 3 insertions(+), 2 deletions(-)
 create mode 100644 newsfragments/4534.misc.rst

diff --git a/newsfragments/4534.misc.rst b/newsfragments/4534.misc.rst
new file mode 100644
index 0000000000..f7c1a1d314
--- /dev/null
+++ b/newsfragments/4534.misc.rst
@@ -0,0 +1 @@
+Changed the import of ``ctypes.wintypes`` from ``__import__`` to a regular ``import`` statement -- by :user:`Avasam`
diff --git a/setuptools/windows_support.py b/setuptools/windows_support.py
index 8299ac1cdf..7a2b53a291 100644
--- a/setuptools/windows_support.py
+++ b/setuptools/windows_support.py
@@ -8,7 +8,7 @@ def windows_only(func):
 
 
 @windows_only
-def hide_file(path):
+def hide_file(path: str) -> None:
     """
     Set the hidden attribute on a file or directory.
 
@@ -17,8 +17,8 @@ def hide_file(path):
     `path` must be text.
     """
     import ctypes
+    import ctypes.wintypes
 
-    __import__('ctypes.wintypes')
     SetFileAttributes = ctypes.windll.kernel32.SetFileAttributesW
     SetFileAttributes.argtypes = ctypes.wintypes.LPWSTR, ctypes.wintypes.DWORD
     SetFileAttributes.restype = ctypes.wintypes.BOOL

From 9228ad687a9b514898ca4583c94db83e0c354623 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Fri, 17 May 2024 13:26:50 +0100
Subject: [PATCH 0921/1761] Use local schemas to define validation

---
 setuptools/config/_validate_pyproject/NOTICE |   2 +-
 setuptools/config/distutils.schema.json      |  26 ++
 setuptools/config/setuptools.schema.json     | 352 +++++++++++++++++++
 tools/generate_validation_code.py            |  12 +-
 tox.ini                                      |   3 +-
 5 files changed, 389 insertions(+), 6 deletions(-)
 create mode 100644 setuptools/config/distutils.schema.json
 create mode 100644 setuptools/config/setuptools.schema.json

diff --git a/setuptools/config/_validate_pyproject/NOTICE b/setuptools/config/_validate_pyproject/NOTICE
index 121ba5fd22..74e8821fc8 100644
--- a/setuptools/config/_validate_pyproject/NOTICE
+++ b/setuptools/config/_validate_pyproject/NOTICE
@@ -1,7 +1,7 @@
 The code contained in this directory was automatically generated using the
 following command:
 
-    python -m validate_pyproject.pre_compile --output-dir=setuptools/config/_validate_pyproject --enable-plugins setuptools distutils --very-verbose
+    python -m validate_pyproject.pre_compile --output-dir=setuptools/config/_validate_pyproject --enable-plugins setuptools distutils --very-verbose -t distutils=setuptools/config/distutils.schema.json -t setuptools=setuptools/config/setuptools.schema.json
 
 Please avoid changing it manually.
 
diff --git a/setuptools/config/distutils.schema.json b/setuptools/config/distutils.schema.json
new file mode 100644
index 0000000000..93cd2e868a
--- /dev/null
+++ b/setuptools/config/distutils.schema.json
@@ -0,0 +1,26 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+
+  "$id": "https://setuptools.pypa.io/en/latest/deprecated/distutils/configfile.html",
+  "title": "``tool.distutils`` table",
+  "$$description": [
+    "**EXPERIMENTAL** (NOT OFFICIALLY SUPPORTED): Use ``tool.distutils``",
+    "subtables to configure arguments for ``distutils`` commands.",
+    "Originally, ``distutils`` allowed developers to configure arguments for",
+    "``setup.py`` commands via `distutils configuration files",
+    "`_.",
+    "See also `the old Python docs _`."
+  ],
+
+  "type": "object",
+  "properties": {
+    "global": {
+      "type": "object",
+      "description": "Global options applied to all ``distutils`` commands"
+    }
+  },
+  "patternProperties": {
+    ".+": {"type": "object"}
+  },
+  "$comment": "TODO: Is there a practical way of making this schema more specific?"
+}
diff --git a/setuptools/config/setuptools.schema.json b/setuptools/config/setuptools.schema.json
new file mode 100644
index 0000000000..adc203ddb0
--- /dev/null
+++ b/setuptools/config/setuptools.schema.json
@@ -0,0 +1,352 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+
+  "$id": "https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html",
+  "title": "``tool.setuptools`` table",
+  "$$description": [
+    "``setuptools``-specific configurations that can be set by users that require",
+    "customization.",
+    "These configurations are completely optional and probably can be skipped when",
+    "creating simple packages. They are equivalent to some of the `Keywords",
+    "`_",
+    "used by the ``setup.py`` file, and can be set via the ``tool.setuptools`` table.",
+    "It considers only ``setuptools`` `parameters",
+    "`_",
+    "that are not covered by :pep:`621`; and intentionally excludes ``dependency_links``",
+    "and ``setup_requires`` (incompatible with modern workflows/standards)."
+  ],
+
+  "type": "object",
+  "additionalProperties": false,
+  "properties": {
+    "platforms": {
+      "type": "array",
+      "items": {"type": "string"}
+    },
+    "provides": {
+      "$$description": [
+        "Package and virtual package names contained within this package",
+        "**(not supported by pip)**"
+      ],
+      "type": "array",
+      "items": {"type": "string", "format": "pep508-identifier"}
+    },
+    "obsoletes": {
+      "$$description": [
+        "Packages which this package renders obsolete",
+        "**(not supported by pip)**"
+      ],
+      "type": "array",
+      "items": {"type": "string", "format": "pep508-identifier"}
+    },
+    "zip-safe": {
+      "$$description": [
+        "Whether the project can be safely installed and run from a zip file.",
+        "**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and",
+        "``setup.py install`` in the context of ``eggs`` (**DEPRECATED**)."
+      ],
+      "type": "boolean"
+    },
+    "script-files": {
+      "$$description": [
+        "Legacy way of defining scripts (entry-points are preferred).",
+        "Equivalent to the ``script`` keyword in ``setup.py``",
+        "(it was renamed to avoid confusion with entry-point based ``project.scripts``",
+        "defined in :pep:`621`).",
+        "**DISCOURAGED**: generic script wrappers are tricky and may not work properly.",
+        "Whenever possible, please use ``project.scripts`` instead."
+      ],
+      "type": "array",
+      "items": {"type": "string"},
+      "$comment": "TODO: is this field deprecated/should be removed?"
+    },
+    "eager-resources": {
+      "$$description": [
+        "Resources that should be extracted together, if any of them is needed,",
+        "or if any C extensions included in the project are imported.",
+        "**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and",
+        "``setup.py install`` in the context of ``eggs`` (**DEPRECATED**)."
+      ],
+      "type": "array",
+      "items": {"type": "string"}
+    },
+    "packages": {
+      "$$description": [
+        "Packages that should be included in the distribution.",
+        "It can be given either as a list of package identifiers",
+        "or as a ``dict``-like structure with a single key ``find``",
+        "which corresponds to a dynamic call to",
+        "``setuptools.config.expand.find_packages`` function.",
+        "The ``find`` key is associated with a nested ``dict``-like structure that can",
+        "contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,",
+        "mimicking the keyword arguments of the associated function."
+      ],
+      "oneOf": [
+        {
+          "title": "Array of Python package identifiers",
+          "type": "array",
+          "items": {"$ref": "#/definitions/package-name"}
+        },
+        {"$ref": "#/definitions/find-directive"}
+      ]
+    },
+    "package-dir": {
+      "$$description": [
+        ":class:`dict`-like structure mapping from package names to directories where their",
+        "code can be found.",
+        "The empty string (as key) means that all packages are contained inside",
+        "the given directory will be included in the distribution."
+      ],
+      "type": "object",
+      "additionalProperties": false,
+      "propertyNames": {
+        "anyOf": [{"const": ""}, {"$ref": "#/definitions/package-name"}]
+      },
+      "patternProperties": {
+        "^.*$": {"type": "string" }
+      }
+    },
+    "package-data": {
+      "$$description": [
+        "Mapping from package names to lists of glob patterns.",
+        "Usually this option is not needed when using ``include-package-data = true``",
+        "For more information on how to include data files, check ``setuptools`` `docs",
+        "`_."
+      ],
+      "type": "object",
+      "additionalProperties": false,
+      "propertyNames": {
+        "anyOf": [{"type": "string", "format": "python-module-name"}, {"const": "*"}]
+      },
+      "patternProperties": {
+        "^.*$": {"type": "array", "items": {"type": "string"}}
+      }
+    },
+    "include-package-data": {
+      "$$description": [
+        "Automatically include any data files inside the package directories",
+        "that are specified by ``MANIFEST.in``",
+        "For more information on how to include data files, check ``setuptools`` `docs",
+        "`_."
+      ],
+      "type": "boolean"
+    },
+    "exclude-package-data": {
+      "$$description": [
+        "Mapping from package names to lists of glob patterns that should be excluded",
+        "For more information on how to include data files, check ``setuptools`` `docs",
+        "`_."
+      ],
+      "type": "object",
+      "additionalProperties": false,
+      "propertyNames": {
+        "anyOf": [{"type": "string", "format": "python-module-name"}, {"const": "*"}]
+      },
+      "patternProperties": {
+          "^.*$": {"type": "array", "items": {"type": "string"}}
+      }
+    },
+    "namespace-packages": {
+      "type": "array",
+      "items": {"type": "string", "format": "python-module-name"},
+      "$comment": "https://setuptools.pypa.io/en/latest/userguide/package_discovery.html",
+      "description": "**DEPRECATED**: use implicit namespaces instead (:pep:`420`)."
+    },
+    "py-modules": {
+      "description": "Modules that setuptools will manipulate",
+      "type": "array",
+      "items": {"type": "string", "format": "python-module-name"},
+      "$comment": "TODO: clarify the relationship with ``packages``"
+    },
+    "data-files": {
+      "$$description": [
+        "``dict``-like structure where each key represents a directory and",
+        "the value is a list of glob patterns that should be installed in them.",
+        "**DISCOURAGED**: please notice this might not work as expected with wheels.",
+        "Whenever possible, consider using data files inside the package directories",
+        "(or create a new namespace package that only contains data files).",
+        "See `data files support",
+        "`_."
+      ],
+      "type": "object",
+      "patternProperties": {
+          "^.*$": {"type": "array", "items": {"type": "string"}}
+      }
+    },
+    "cmdclass": {
+      "$$description": [
+        "Mapping of distutils-style command names to ``setuptools.Command`` subclasses",
+        "which in turn should be represented by strings with a qualified class name",
+        "(i.e., \"dotted\" form with module), e.g.::\n\n",
+        "    cmdclass = {mycmd = \"pkg.subpkg.module.CommandClass\"}\n\n",
+        "The command class should be a directly defined at the top-level of the",
+        "containing module (no class nesting)."
+      ],
+      "type": "object",
+      "patternProperties": {
+          "^.*$": {"type": "string", "format": "python-qualified-identifier"}
+      }
+    },
+    "license-files": {
+      "type": "array",
+      "items": {"type": "string"},
+      "$$description": [
+        "**PROVISIONAL**: list of glob patterns for all license files being distributed.",
+        "(likely to become standard with :pep:`639`).",
+        "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"
+      ],
+      "$comment": "TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?"
+    },
+    "dynamic": {
+      "type": "object",
+      "description": "Instructions for loading :pep:`621`-related metadata dynamically",
+      "additionalProperties": false,
+      "properties": {
+        "version": {
+          "$$description": [
+            "A version dynamically loaded via either the ``attr:`` or ``file:``",
+            "directives. Please make sure the given file or attribute respects :pep:`440`.",
+            "Also ensure to set ``project.dynamic`` accordingly."
+          ],
+          "oneOf": [
+            {"$ref": "#/definitions/attr-directive"},
+            {"$ref": "#/definitions/file-directive"}
+          ]
+        },
+        "classifiers": {"$ref": "#/definitions/file-directive"},
+        "description": {"$ref": "#/definitions/file-directive"},
+        "entry-points": {"$ref": "#/definitions/file-directive"},
+        "dependencies": {"$ref": "#/definitions/file-directive-for-dependencies"},
+        "optional-dependencies": {
+          "type": "object",
+          "propertyNames": {"type": "string", "format": "pep508-identifier"},
+          "additionalProperties": false,
+          "patternProperties": {
+            ".+": {"$ref": "#/definitions/file-directive-for-dependencies"}
+          }
+        },
+        "readme": {
+          "type": "object",
+          "anyOf": [
+            {"$ref": "#/definitions/file-directive"},
+            {
+              "type": "object",
+              "properties": {
+                "content-type": {"type": "string"},
+                "file": { "$ref": "#/definitions/file-directive/properties/file" }
+              },
+              "additionalProperties": false}
+          ],
+          "required": ["file"]
+        }
+      }
+    }
+  },
+
+  "definitions": {
+    "package-name": {
+      "$id": "#/definitions/package-name",
+      "title": "Valid package name",
+      "description": "Valid package name (importable or :pep:`561`).",
+      "type": "string",
+      "anyOf": [
+        {"type": "string", "format": "python-module-name"},
+        {"type": "string", "format": "pep561-stub-name"}
+      ]
+    },
+    "file-directive": {
+      "$id": "#/definitions/file-directive",
+      "title": "'file:' directive",
+      "description":
+        "Value is read from a file (or list of files and then concatenated)",
+      "type": "object",
+      "additionalProperties": false,
+      "properties": {
+        "file": {
+          "oneOf": [
+            {"type": "string"},
+            {"type": "array", "items": {"type": "string"}}
+          ]
+        }
+      },
+      "required": ["file"]
+    },
+    "file-directive-for-dependencies": {
+      "title": "'file:' directive for dependencies",
+      "allOf": [
+        {
+          "$$description": [
+            "**BETA**: subset of the ``requirements.txt`` format",
+            "without ``pip`` flags and options",
+            "(one :pep:`508`-compliant string per line,",
+            "lines that are blank or start with ``#`` are excluded).",
+            "See `dynamic metadata",
+            "`_."
+          ]
+        },
+        {"$ref": "#/definitions/file-directive"}
+      ]
+    },
+    "attr-directive": {
+      "title": "'attr:' directive",
+      "$id": "#/definitions/attr-directive",
+      "$$description": [
+        "Value is read from a module attribute. Supports callables and iterables;",
+        "unsupported types are cast via ``str()``"
+      ],
+      "type": "object",
+      "additionalProperties": false,
+      "properties": {
+        "attr": {"type": "string", "format": "python-qualified-identifier"}
+      },
+      "required": ["attr"]
+    },
+    "find-directive": {
+      "$id": "#/definitions/find-directive",
+      "title": "'find:' directive",
+      "type": "object",
+      "additionalProperties": false,
+      "properties": {
+        "find": {
+          "type": "object",
+          "$$description": [
+            "Dynamic `package discovery",
+            "`_."
+          ],
+          "additionalProperties": false,
+          "properties": {
+            "where": {
+              "description":
+                "Directories to be searched for packages (Unix-style relative path)",
+              "type": "array",
+              "items": {"type": "string"}
+            },
+            "exclude": {
+              "type": "array",
+              "$$description": [
+                "Exclude packages that match the values listed in this field.",
+                "Can container shell-style wildcards (e.g. ``'pkg.*'``)"
+              ],
+              "items": {"type": "string"}
+            },
+            "include": {
+              "type": "array",
+              "$$description": [
+                "Restrict the found packages to just the ones listed in this field.",
+                "Can container shell-style wildcards (e.g. ``'pkg.*'``)"
+              ],
+              "items": {"type": "string"}
+            },
+            "namespaces": {
+              "type": "boolean",
+              "$$description": [
+                "When ``True``, directories without a ``__init__.py`` file will also",
+                "be scanned for :pep:`420`-style implicit namespaces"
+              ]
+            }
+          }
+        }
+      }
+    }
+  }
+}
diff --git a/tools/generate_validation_code.py b/tools/generate_validation_code.py
index ca933e8f3b..fcf4f5392c 100644
--- a/tools/generate_validation_code.py
+++ b/tools/generate_validation_code.py
@@ -1,17 +1,17 @@
 from __future__ import annotations
 
-from os import PathLike
+import itertools
 import subprocess
 import sys
-
 from pathlib import Path
 
 
-def generate_pyproject_validation(dest: str | PathLike[str]):
+def generate_pyproject_validation(dest: Path, schemas: Path):
     """
     Generates validation code for ``pyproject.toml`` based on JSON schemas and the
     ``validate-pyproject`` library.
     """
+    schema_args = (("-t", f"{f.name.partition('.')[0]}={f}") for f in schemas)
     cmd = [
         sys.executable,
         "-m",
@@ -21,13 +21,17 @@ def generate_pyproject_validation(dest: str | PathLike[str]):
         "setuptools",
         "distutils",
         "--very-verbose",
+        *itertools.chain.from_iterable(schema_args),
     ]
     subprocess.check_call(cmd)
     print(f"Validation code generated at: {dest}")
 
 
 def main():
-    generate_pyproject_validation(Path("setuptools/config/_validate_pyproject"))
+    generate_pyproject_validation(
+        Path("setuptools/config/_validate_pyproject"),
+        schemas=Path("setuptools/config").glob("*.schema.json"),
+    )
 
 
 __name__ == '__main__' and main()
diff --git a/tox.ini b/tox.ini
index f457ff1fee..8320a3814a 100644
--- a/tox.ini
+++ b/tox.ini
@@ -84,7 +84,8 @@ commands =
 [testenv:generate-validation-code]
 skip_install = True
 deps =
-	validate-pyproject[all]==0.17
+	# validate-pyproject[all]==0.17
+	validate-pyproject[all] @ https://github.com/abravalheri/validate-pyproject/archive/refs/heads/allow-overwrite.zip
 commands =
 	python -m tools.generate_validation_code
 

From b1386b03b57ab0da05b8131f65856fd17cb9b805 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Fri, 17 May 2024 14:13:16 +0100
Subject: [PATCH 0922/1761] Fix typing errors

---
 tools/generate_validation_code.py | 8 +++++---
 1 file changed, 5 insertions(+), 3 deletions(-)

diff --git a/tools/generate_validation_code.py b/tools/generate_validation_code.py
index fcf4f5392c..82c7f011d9 100644
--- a/tools/generate_validation_code.py
+++ b/tools/generate_validation_code.py
@@ -4,9 +4,10 @@
 import subprocess
 import sys
 from pathlib import Path
+from typing import Iterable
 
 
-def generate_pyproject_validation(dest: Path, schemas: Path):
+def generate_pyproject_validation(dest: Path, schemas: Iterable[Path]) -> bool:
     """
     Generates validation code for ``pyproject.toml`` based on JSON schemas and the
     ``validate-pyproject`` library.
@@ -25,10 +26,11 @@ def generate_pyproject_validation(dest: Path, schemas: Path):
     ]
     subprocess.check_call(cmd)
     print(f"Validation code generated at: {dest}")
+    return True
 
 
-def main():
-    generate_pyproject_validation(
+def main() -> bool:
+    return generate_pyproject_validation(
         Path("setuptools/config/_validate_pyproject"),
         schemas=Path("setuptools/config").glob("*.schema.json"),
     )

From 47ee01b46f57ca1008b7d4c02b6fbff07631f299 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Fri, 17 May 2024 15:50:13 +0100
Subject: [PATCH 0923/1761] Add missing license information

---
 setuptools/config/NOTICE | 10 ++++++++++
 1 file changed, 10 insertions(+)
 create mode 100644 setuptools/config/NOTICE

diff --git a/setuptools/config/NOTICE b/setuptools/config/NOTICE
new file mode 100644
index 0000000000..01864511b0
--- /dev/null
+++ b/setuptools/config/NOTICE
@@ -0,0 +1,10 @@
+The following files include code from opensource projects
+(either as direct copies or modified versions):
+
+- `setuptools.schema.json`, `distutils.schema.json`:
+    - project: `validate-pyproject` - licensed under MPL-2.0
+      (https://github.com/abravalheri/validate-pyproject):
+
+      This Source Code Form is subject to the terms of the Mozilla Public
+      License, v. 2.0. If a copy of the MPL was not distributed with this file,
+      You can obtain one at https://mozilla.org/MPL/2.0/.

From 8120448416651da6c224c5c36a620cc39381fc8e Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 20 May 2024 10:31:45 +0100
Subject: [PATCH 0924/1761] Use updated validate-pyproject

---
 tox.ini | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/tox.ini b/tox.ini
index 8320a3814a..4b63c8b967 100644
--- a/tox.ini
+++ b/tox.ini
@@ -84,7 +84,7 @@ commands =
 [testenv:generate-validation-code]
 skip_install = True
 deps =
-	# validate-pyproject[all]==0.17
+	# validate-pyproject[all]==0.18
 	validate-pyproject[all] @ https://github.com/abravalheri/validate-pyproject/archive/refs/heads/allow-overwrite.zip
 commands =
 	python -m tools.generate_validation_code

From a78df652f25242fb74697ea53340827576fb6f81 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 20 May 2024 10:42:22 +0100
Subject: [PATCH 0925/1761] Ensure license files are included in the
 distributions

---
 MANIFEST.in | 2 +-
 setup.py    | 7 ++++---
 2 files changed, 5 insertions(+), 4 deletions(-)

diff --git a/MANIFEST.in b/MANIFEST.in
index 092612cb21..f3ed8c6dc4 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -10,7 +10,7 @@ recursive-include newsfragments *
 include *.py
 include *.rst
 include MANIFEST.in
-include LICENSE
+global-include LICEN[CS]E* COPYING* NOTICE* AUTHORS*
 include launcher.c
 include msvc-build-launcher.cmd
 include mypy.ini
diff --git a/setup.py b/setup.py
index 542edaea68..1cd9e36c15 100755
--- a/setup.py
+++ b/setup.py
@@ -10,9 +10,10 @@
 here = os.path.dirname(__file__)
 
 
-package_data = dict(
-    setuptools=['script (dev).tmpl', 'script.tmpl', 'site-patch.py'],
-)
+package_data = {
+    "": ["LICEN[CS]E*", "COPYING*", "NOTICE*", "AUTHORS*"],
+    "setuptools": ['script (dev).tmpl', 'script.tmpl', 'site-patch.py'],
+}
 
 force_windows_specific_files = os.environ.get(
     "SETUPTOOLS_INSTALL_WINDOWS_SPECIFIC_FILES", "1"

From 1d2fd37368fc2381e21cd5466aeb222fc11db2db Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 20 May 2024 11:17:45 +0100
Subject: [PATCH 0926/1761] Add schemas to sdist

---
 MANIFEST.in | 1 +
 1 file changed, 1 insertion(+)

diff --git a/MANIFEST.in b/MANIFEST.in
index f3ed8c6dc4..0643e7ee2d 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -17,4 +17,5 @@ include mypy.ini
 include pytest.ini
 include tox.ini
 include setuptools/tests/config/setupcfg_examples.txt
+include setuptools/config/*.schema.json
 global-exclude *.py[cod] __pycache__

From d6601bada764bdaa5c4e4e4e83f0cb3c34adc013 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 1 Aug 2024 11:12:54 +0100
Subject: [PATCH 0927/1761] Use stable version of validate-pyproject

---
 .../config/_validate_pyproject/fastjsonschema_validations.py   | 2 +-
 setuptools/config/_validate_pyproject/formats.py               | 2 +-
 tox.ini                                                        | 3 +--
 3 files changed, 3 insertions(+), 4 deletions(-)

diff --git a/setuptools/config/_validate_pyproject/fastjsonschema_validations.py b/setuptools/config/_validate_pyproject/fastjsonschema_validations.py
index 1c58a55ea8..1f6c971e0b 100644
--- a/setuptools/config/_validate_pyproject/fastjsonschema_validations.py
+++ b/setuptools/config/_validate_pyproject/fastjsonschema_validations.py
@@ -10,7 +10,7 @@
 # *** PLEASE DO NOT MODIFY DIRECTLY: Automatically generated code *** 
 
 
-VERSION = "2.19.1"
+VERSION = "2.20.0"
 from decimal import Decimal
 import re
 from .fastjsonschema_exceptions import JsonSchemaValueException
diff --git a/setuptools/config/_validate_pyproject/formats.py b/setuptools/config/_validate_pyproject/formats.py
index aacf4092b0..5a0599cbb5 100644
--- a/setuptools/config/_validate_pyproject/formats.py
+++ b/setuptools/config/_validate_pyproject/formats.py
@@ -91,9 +91,9 @@ def pep508(value: str) -> bool:
         """
         try:
             _req.Requirement(value)
+            return True
         except _req.InvalidRequirement:
             return False
-        return True
 
 except ImportError:  # pragma: no cover
     _logger.warning(
diff --git a/tox.ini b/tox.ini
index 4b63c8b967..bc0540b0d4 100644
--- a/tox.ini
+++ b/tox.ini
@@ -84,8 +84,7 @@ commands =
 [testenv:generate-validation-code]
 skip_install = True
 deps =
-	# validate-pyproject[all]==0.18
-	validate-pyproject[all] @ https://github.com/abravalheri/validate-pyproject/archive/refs/heads/allow-overwrite.zip
+	validate-pyproject[all]==0.18
 commands =
 	python -m tools.generate_validation_code
 

From 4b98bf009287262413a634250921b7f791e7610e Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 15 Jul 2024 11:53:38 -0400
Subject: [PATCH 0928/1761] Prefer f-strings

---
 distutils/tests/test_unixccompiler.py | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/distutils/tests/test_unixccompiler.py b/distutils/tests/test_unixccompiler.py
index afd08bbb87..42b98dfe08 100644
--- a/distutils/tests/test_unixccompiler.py
+++ b/distutils/tests/test_unixccompiler.py
@@ -321,15 +321,15 @@ def test_find_library_file(self, monkeypatch):
         compiler._library_root = lambda dir: dir
         monkeypatch.setattr(os.path, 'exists', lambda d: 'existing' in d)
 
-        dylibname = 'libabc.dylib' if sys.platform != 'cygwin' else 'cygabc.dll'
+        libname = 'libabc.dylib' if sys.platform != 'cygwin' else 'cygabc.dll'
         dirs = ('/foo/bar/missing', '/foo/bar/existing')
         assert (
             compiler.find_library_file(dirs, 'abc').replace('\\', '/')
-            == '/foo/bar/existing/' + dylibname
+            == f'/foo/bar/existing/{libname}'
         )
         assert (
             compiler.find_library_file(reversed(dirs), 'abc').replace('\\', '/')
-            == '/foo/bar/existing/' + dylibname
+            == f'/foo/bar/existing/{libname}'
         )
 
         monkeypatch.setattr(

From f350a98aaf9f800859f8e83422ff056157beec5d Mon Sep 17 00:00:00 2001
From: Michael Carlstrom 
Date: Thu, 18 Jul 2024 22:58:46 -0400
Subject: [PATCH 0929/1761] Allow path objects

---
 distutils/command/install_data.py    | 11 +++++++++++
 distutils/tests/test_install_data.py | 26 +++++++++++++++++---------
 2 files changed, 28 insertions(+), 9 deletions(-)

diff --git a/distutils/command/install_data.py b/distutils/command/install_data.py
index 624c0b901b..6b7c9aefd9 100644
--- a/distutils/command/install_data.py
+++ b/distutils/command/install_data.py
@@ -6,6 +6,7 @@
 # contributed by Bastian Kleineidam
 
 import os
+from pathlib import Path
 
 from ..core import Command
 from ..util import change_root, convert_path
@@ -56,6 +57,16 @@ def run(self):
                     )
                 (out, _) = self.copy_file(f, self.install_dir)
                 self.outfiles.append(out)
+            elif isinstance(f, Path):
+                # it's a simple file, so copy it
+                f = convert_path(str(f))
+                if self.warn_dir:
+                    self.warn(
+                        "setup script did not provide a directory for "
+                        f"'{f}' -- installing right in '{self.install_dir}'"
+                    )
+                (out, _) = self.copy_file(f, self.install_dir)
+                self.outfiles.append(out)
             else:
                 # it's a tuple with path to install to and a list of files
                 dir = convert_path(f[0])
diff --git a/distutils/tests/test_install_data.py b/distutils/tests/test_install_data.py
index f34070b10b..43fd98ed6e 100644
--- a/distutils/tests/test_install_data.py
+++ b/distutils/tests/test_install_data.py
@@ -1,6 +1,7 @@
 """Tests for distutils.command.install_data."""
 
 import os
+from pathlib import Path
 from distutils.command.install_data import install_data
 from distutils.tests import support
 
@@ -18,22 +19,27 @@ def test_simple_run(self):
 
         # data_files can contain
         #  - simple files
+        #  - a Path object
         #  - a tuple with a path, and a list of file
         one = os.path.join(pkg_dir, 'one')
         self.write_file(one, 'xxx')
         inst2 = os.path.join(pkg_dir, 'inst2')
         two = os.path.join(pkg_dir, 'two')
         self.write_file(two, 'xxx')
+        three = Path(pkg_dir) / 'three'
+        self.write_file(three, 'xxx')
 
-        cmd.data_files = [one, (inst2, [two])]
-        assert cmd.get_inputs() == [one, (inst2, [two])]
+        cmd.data_files = [one, (inst2, [two]), three]
+        assert cmd.get_inputs() == [one, (inst2, [two]), three]
 
         # let's run the command
         cmd.ensure_finalized()
         cmd.run()
 
         # let's check the result
-        assert len(cmd.get_outputs()) == 2
+        assert len(cmd.get_outputs()) == 3
+        rthree = os.path.split(one)[-1]
+        assert os.path.exists(os.path.join(inst, rthree))
         rtwo = os.path.split(two)[-1]
         assert os.path.exists(os.path.join(inst2, rtwo))
         rone = os.path.split(one)[-1]
@@ -46,21 +52,23 @@ def test_simple_run(self):
         cmd.run()
 
         # let's check the result
-        assert len(cmd.get_outputs()) == 2
+        assert len(cmd.get_outputs()) == 3
+        assert os.path.exists(os.path.join(inst, rthree))
         assert os.path.exists(os.path.join(inst2, rtwo))
         assert os.path.exists(os.path.join(inst, rone))
         cmd.outfiles = []
 
         # now using root and empty dir
         cmd.root = os.path.join(pkg_dir, 'root')
-        inst4 = os.path.join(pkg_dir, 'inst4')
-        three = os.path.join(cmd.install_dir, 'three')
-        self.write_file(three, 'xx')
-        cmd.data_files = [one, (inst2, [two]), ('inst3', [three]), (inst4, [])]
+        inst5 = os.path.join(pkg_dir, 'inst5')
+        four = os.path.join(cmd.install_dir, 'four')
+        self.write_file(four, 'xx')
+        cmd.data_files = [one, (inst2, [two]), ('inst5', [four]), (inst5, [])]
         cmd.ensure_finalized()
         cmd.run()
 
         # let's check the result
-        assert len(cmd.get_outputs()) == 4
+        assert len(cmd.get_outputs()) == 5
+        assert os.path.exists(os.path.join(inst, rthree))
         assert os.path.exists(os.path.join(inst2, rtwo))
         assert os.path.exists(os.path.join(inst, rone))

From 202ff81fb5adcbde9cf990f27a6c9fbdc4ad7602 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 19 Jul 2024 15:17:08 -0400
Subject: [PATCH 0930/1761] Need to include 'three' in the input.

---
 distutils/tests/test_install_data.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/distutils/tests/test_install_data.py b/distutils/tests/test_install_data.py
index 43fd98ed6e..e340c75800 100644
--- a/distutils/tests/test_install_data.py
+++ b/distutils/tests/test_install_data.py
@@ -63,7 +63,7 @@ def test_simple_run(self):
         inst5 = os.path.join(pkg_dir, 'inst5')
         four = os.path.join(cmd.install_dir, 'four')
         self.write_file(four, 'xx')
-        cmd.data_files = [one, (inst2, [two]), ('inst5', [four]), (inst5, [])]
+        cmd.data_files = [one, (inst2, [two]), three, ('inst5', [four]), (inst5, [])]
         cmd.ensure_finalized()
         cmd.run()
 

From c2d9fba3913a4394c9222199eb8518197df2ff06 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 19 Jul 2024 15:25:43 -0400
Subject: [PATCH 0931/1761] Consolidate str and Path handling.

---
 distutils/command/install_data.py | 14 ++------------
 1 file changed, 2 insertions(+), 12 deletions(-)

diff --git a/distutils/command/install_data.py b/distutils/command/install_data.py
index 6b7c9aefd9..a53786b9f6 100644
--- a/distutils/command/install_data.py
+++ b/distutils/command/install_data.py
@@ -47,19 +47,9 @@ def finalize_options(self):
     def run(self):
         self.mkpath(self.install_dir)
         for f in self.data_files:
-            if isinstance(f, str):
+            if isinstance(f, (str, Path)):
                 # it's a simple file, so copy it
-                f = convert_path(f)
-                if self.warn_dir:
-                    self.warn(
-                        "setup script did not provide a directory for "
-                        f"'{f}' -- installing right in '{self.install_dir}'"
-                    )
-                (out, _) = self.copy_file(f, self.install_dir)
-                self.outfiles.append(out)
-            elif isinstance(f, Path):
-                # it's a simple file, so copy it
-                f = convert_path(str(f))
+                f = convert_path(os.fspath(f))
                 if self.warn_dir:
                     self.warn(
                         "setup script did not provide a directory for "

From a6a55cfe05f891aa83253a389083d746e09de4fa Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 15 Jul 2024 19:59:32 -0400
Subject: [PATCH 0932/1761] Remove r string now that docstring is gone.

---
 distutils/unixccompiler.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/distutils/unixccompiler.py b/distutils/unixccompiler.py
index 6450fff547..3254c53f2f 100644
--- a/distutils/unixccompiler.py
+++ b/distutils/unixccompiler.py
@@ -366,7 +366,7 @@ def _library_root(dir):
         return os.path.join(match.group(1), dir[1:]) if apply_root else dir
 
     def find_library_file(self, dirs, lib, debug=False):
-        r"""
+        """
         Second-guess the linker with not much hard
         data to go on: GCC seems to prefer the shared library, so
         assume that *all* Unix C compilers do,

From 29281a6441f5c95baf451b8c6a05b5ac9dd95796 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 19 Jul 2024 15:37:03 -0400
Subject: [PATCH 0933/1761] Expand convert_path to also accept pathlib.Path
 objects.

---
 distutils/command/install_data.py |  2 +-
 distutils/tests/test_util.py      |  5 +++++
 distutils/util.py                 | 10 +++++++++-
 3 files changed, 15 insertions(+), 2 deletions(-)

diff --git a/distutils/command/install_data.py b/distutils/command/install_data.py
index a53786b9f6..735adf2b52 100644
--- a/distutils/command/install_data.py
+++ b/distutils/command/install_data.py
@@ -49,7 +49,7 @@ def run(self):
         for f in self.data_files:
             if isinstance(f, (str, Path)):
                 # it's a simple file, so copy it
-                f = convert_path(os.fspath(f))
+                f = convert_path(f)
                 if self.warn_dir:
                     self.warn(
                         "setup script did not provide a directory for "
diff --git a/distutils/tests/test_util.py b/distutils/tests/test_util.py
index 0de4e1a59c..a614a1dae5 100644
--- a/distutils/tests/test_util.py
+++ b/distutils/tests/test_util.py
@@ -5,6 +5,7 @@
 import email.policy
 import io
 import os
+import pathlib
 import sys
 import sysconfig as stdlib_sysconfig
 import unittest.mock as mock
@@ -72,6 +73,7 @@ def _join(path):
         os.path.join = _join
 
         assert convert_path('/home/to/my/stuff') == '/home/to/my/stuff'
+        assert convert_path(pathlib.Path('/home/to/my/stuff')) == '/home/to/my/stuff'
 
         # win
         os.sep = '\\'
@@ -85,8 +87,11 @@ def _join(*path):
             convert_path('/home/to/my/stuff')
         with pytest.raises(ValueError):
             convert_path('home/to/my/stuff/')
+        with pytest.raises(ValueError):
+            convert_path(pathlib.Path('/home/to/my/stuff'))
 
         assert convert_path('home/to/my/stuff') == 'home\\to\\my\\stuff'
+        assert convert_path(pathlib.Path('home/to/my/stuff')) == 'home\\to\\my\\stuff'
         assert convert_path('.') == os.curdir
 
     def test_change_root(self):
diff --git a/distutils/util.py b/distutils/util.py
index 9db89b0979..635d715ad6 100644
--- a/distutils/util.py
+++ b/distutils/util.py
@@ -7,6 +7,7 @@
 import functools
 import importlib.util
 import os
+import pathlib
 import re
 import string
 import subprocess
@@ -116,7 +117,14 @@ def split_version(s):
     return [int(n) for n in s.split('.')]
 
 
-def convert_path(pathname):
+def convert_path(pathname: str | pathlib.Path) -> str:
+    """
+    Allow for pathlib.Path inputs and then make native.
+    """
+    return make_native(os.fspath(pathname))
+
+
+def make_native(pathname: str) -> str:
     """Return 'pathname' as a name that will work on the native filesystem,
     i.e. split it on '/' and put it back together again using the current
     directory separator.  Needed because filenames in the setup script are

From 0edebdc35ddbc3c6ef4e723c51825a5a85bf71e2 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 19 Jul 2024 14:54:16 -0400
Subject: [PATCH 0934/1761] =?UTF-8?q?=F0=9F=91=B9=20Feed=20the=20hobgoblin?=
 =?UTF-8?q?s=20(delint).?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 distutils/compat/py38.py | 1 +
 1 file changed, 1 insertion(+)

diff --git a/distutils/compat/py38.py b/distutils/compat/py38.py
index 2d44211147..03ec73ef0e 100644
--- a/distutils/compat/py38.py
+++ b/distutils/compat/py38.py
@@ -14,6 +14,7 @@ def removeprefix(self, prefix):
             return self[len(prefix) :]
         else:
             return self[:]
+
 else:
 
     def removesuffix(self, suffix):

From 2a7122a885ddf34664ed9f6edb88c66a8510ef68 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 19 Jul 2024 15:37:53 -0400
Subject: [PATCH 0935/1761] Prefer simply 'pathlib' for import.

---
 distutils/command/install_data.py    | 4 ++--
 distutils/tests/test_install_data.py | 9 +++++----
 2 files changed, 7 insertions(+), 6 deletions(-)

diff --git a/distutils/command/install_data.py b/distutils/command/install_data.py
index 735adf2b52..cb03d0e5c6 100644
--- a/distutils/command/install_data.py
+++ b/distutils/command/install_data.py
@@ -6,7 +6,7 @@
 # contributed by Bastian Kleineidam
 
 import os
-from pathlib import Path
+import pathlib
 
 from ..core import Command
 from ..util import change_root, convert_path
@@ -47,7 +47,7 @@ def finalize_options(self):
     def run(self):
         self.mkpath(self.install_dir)
         for f in self.data_files:
-            if isinstance(f, (str, Path)):
+            if isinstance(f, (str, pathlib.Path)):
                 # it's a simple file, so copy it
                 f = convert_path(f)
                 if self.warn_dir:
diff --git a/distutils/tests/test_install_data.py b/distutils/tests/test_install_data.py
index e340c75800..4b15a26945 100644
--- a/distutils/tests/test_install_data.py
+++ b/distutils/tests/test_install_data.py
@@ -1,12 +1,13 @@
 """Tests for distutils.command.install_data."""
 
 import os
-from pathlib import Path
-from distutils.command.install_data import install_data
-from distutils.tests import support
+import pathlib
 
 import pytest
 
+from distutils.command.install_data import install_data
+from distutils.tests import support
+
 
 @pytest.mark.usefixtures('save_env')
 class TestInstallData(
@@ -26,7 +27,7 @@ def test_simple_run(self):
         inst2 = os.path.join(pkg_dir, 'inst2')
         two = os.path.join(pkg_dir, 'two')
         self.write_file(two, 'xxx')
-        three = Path(pkg_dir) / 'three'
+        three = pathlib.Path(pkg_dir) / 'three'
         self.write_file(three, 'xxx')
 
         cmd.data_files = [one, (inst2, [two]), three]

From dcb1bf8308f46d1b8e5235382122f0a4060fb65b Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 19 Jul 2024 15:10:31 -0400
Subject: [PATCH 0936/1761] =?UTF-8?q?=F0=9F=91=B9=20Feed=20the=20hobgoblin?=
 =?UTF-8?q?s=20(delint).?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

Fix PERF401 errors.
---
 distutils/bcppcompiler.py        |  3 +--
 distutils/ccompiler.py           | 16 ++++++----------
 distutils/command/bdist.py       |  7 ++++---
 distutils/command/build_ext.py   |  5 +----
 distutils/command/check.py       |  7 +++----
 distutils/command/install_lib.py |  4 +---
 distutils/command/sdist.py       |  8 ++++----
 distutils/dist.py                | 15 +++------------
 distutils/msvc9compiler.py       |  4 +---
 distutils/msvccompiler.py        |  4 +---
 10 files changed, 25 insertions(+), 48 deletions(-)

diff --git a/distutils/bcppcompiler.py b/distutils/bcppcompiler.py
index e47dca5d09..9157b43328 100644
--- a/distutils/bcppcompiler.py
+++ b/distutils/bcppcompiler.py
@@ -236,8 +236,7 @@ def link(  # noqa: C901
                 temp_dir = os.path.dirname(objects[0])  # preserve tree structure
                 def_file = os.path.join(temp_dir, f'{modname}.def')
                 contents = ['EXPORTS']
-                for sym in export_symbols or []:
-                    contents.append(f'  {sym}=_{sym}')
+                contents.extend(f'  {sym}=_{sym}' for sym in export_symbols)
                 self.execute(write_file, (def_file, contents), f"writing {def_file}")
 
             # Borland C++ has problems with '/' in paths
diff --git a/distutils/ccompiler.py b/distutils/ccompiler.py
index 9d5297b944..42b0812454 100644
--- a/distutils/ccompiler.py
+++ b/distutils/ccompiler.py
@@ -1124,10 +1124,10 @@ def show_compilers():
     # commands that use it.
     from distutils.fancy_getopt import FancyGetopt
 
-    compilers = []
-    for compiler in compiler_class.keys():
-        compilers.append(("compiler=" + compiler, None, compiler_class[compiler][2]))
-    compilers.sort()
+    compilers = sorted(
+        ("compiler=" + compiler, None, compiler_class[compiler][2])
+        for compiler in compiler_class.keys()
+    )
     pretty_printer = FancyGetopt(compilers)
     pretty_printer.print_help("List of available compilers:")
 
@@ -1218,8 +1218,7 @@ def gen_preprocess_options(macros, include_dirs):
                 # shell at all costs when we spawn the command!
                 pp_opts.append("-D{}={}".format(*macro))
 
-    for dir in include_dirs:
-        pp_opts.append(f"-I{dir}")
+    pp_opts.extend(f"-I{dir}" for dir in include_dirs)
     return pp_opts
 
 
@@ -1230,10 +1229,7 @@ def gen_lib_options(compiler, library_dirs, runtime_library_dirs, libraries):
     directories.  Returns a list of command-line options suitable for use
     with some compiler (depending on the two format strings passed in).
     """
-    lib_opts = []
-
-    for dir in library_dirs:
-        lib_opts.append(compiler.library_dir_option(dir))
+    lib_opts = [compiler.library_dir_option(dir) for dir in library_dirs]
 
     for dir in runtime_library_dirs:
         lib_opts.extend(always_iterable(compiler.runtime_library_dir_option(dir)))
diff --git a/distutils/command/bdist.py b/distutils/command/bdist.py
index 1738f4e56b..f334075159 100644
--- a/distutils/command/bdist.py
+++ b/distutils/command/bdist.py
@@ -15,9 +15,10 @@ def show_formats():
     """Print list of available formats (arguments to "--format" option)."""
     from ..fancy_getopt import FancyGetopt
 
-    formats = []
-    for format in bdist.format_commands:
-        formats.append(("formats=" + format, None, bdist.format_commands[format][1]))
+    formats = [
+        ("formats=" + format, None, bdist.format_commands[format][1])
+        for format in bdist.format_commands
+    ]
     pretty_printer = FancyGetopt(formats)
     pretty_printer.print_help("List of available distribution formats:")
 
diff --git a/distutils/command/build_ext.py b/distutils/command/build_ext.py
index 18e1601a28..cf475fe824 100644
--- a/distutils/command/build_ext.py
+++ b/distutils/command/build_ext.py
@@ -465,10 +465,7 @@ def get_outputs(self):
         # And build the list of output (built) filenames.  Note that this
         # ignores the 'inplace' flag, and assumes everything goes in the
         # "build" tree.
-        outputs = []
-        for ext in self.extensions:
-            outputs.append(self.get_ext_fullpath(ext.name))
-        return outputs
+        return [self.get_ext_fullpath(ext.name) for ext in self.extensions]
 
     def build_extensions(self):
         # First, sanity-check the 'extensions' list
diff --git a/distutils/command/check.py b/distutils/command/check.py
index 58b3f949f9..93d754e73d 100644
--- a/distutils/command/check.py
+++ b/distutils/command/check.py
@@ -100,10 +100,9 @@ def check_metadata(self):
         """
         metadata = self.distribution.metadata
 
-        missing = []
-        for attr in 'name', 'version':
-            if not getattr(metadata, attr, None):
-                missing.append(attr)
+        missing = [
+            attr for attr in ('name', 'version') if not getattr(metadata, attr, None)
+        ]
 
         if missing:
             self.warn("missing required meta-data: {}".format(', '.join(missing)))
diff --git a/distutils/command/install_lib.py b/distutils/command/install_lib.py
index 54a12d38a8..01579d46b4 100644
--- a/distutils/command/install_lib.py
+++ b/distutils/command/install_lib.py
@@ -161,9 +161,7 @@ def _mutate_outputs(self, has_any, build_cmd, cmd_option, output_dir):
         build_dir = getattr(build_cmd, cmd_option)
 
         prefix_len = len(build_dir) + len(os.sep)
-        outputs = []
-        for file in build_files:
-            outputs.append(os.path.join(output_dir, file[prefix_len:]))
+        outputs = [os.path.join(output_dir, file[prefix_len:]) for file in build_files]
 
         return outputs
 
diff --git a/distutils/command/sdist.py b/distutils/command/sdist.py
index 04333dd214..e8abb73920 100644
--- a/distutils/command/sdist.py
+++ b/distutils/command/sdist.py
@@ -24,10 +24,10 @@ def show_formats():
     from ..archive_util import ARCHIVE_FORMATS
     from ..fancy_getopt import FancyGetopt
 
-    formats = []
-    for format in ARCHIVE_FORMATS.keys():
-        formats.append(("formats=" + format, None, ARCHIVE_FORMATS[format][2]))
-    formats.sort()
+    formats = sorted(
+        ("formats=" + format, None, ARCHIVE_FORMATS[format][2])
+        for format in ARCHIVE_FORMATS.keys()
+    )
     FancyGetopt(formats).print_help("List of available source distribution formats:")
 
 
diff --git a/distutils/dist.py b/distutils/dist.py
index d7d4ca8fc8..0a57d60be9 100644
--- a/distutils/dist.py
+++ b/distutils/dist.py
@@ -745,10 +745,7 @@ def print_commands(self):
         for cmd in std_commands:
             is_std.add(cmd)
 
-        extra_commands = []
-        for cmd in self.cmdclass.keys():
-            if cmd not in is_std:
-                extra_commands.append(cmd)
+        extra_commands = [cmd for cmd in self.cmdclass.keys() if cmd not in is_std]
 
         max_length = 0
         for cmd in std_commands + extra_commands:
@@ -776,10 +773,7 @@ def get_command_list(self):
         for cmd in std_commands:
             is_std.add(cmd)
 
-        extra_commands = []
-        for cmd in self.cmdclass.keys():
-            if cmd not in is_std:
-                extra_commands.append(cmd)
+        extra_commands = [cmd for cmd in self.cmdclass.keys() if cmd not in is_std]
 
         rv = []
         for cmd in std_commands + extra_commands:
@@ -1301,7 +1295,4 @@ def fix_help_options(options):
     """Convert a 4-tuple 'help_options' list as found in various command
     classes to the 3-tuple form required by FancyGetopt.
     """
-    new_options = []
-    for help_tuple in options:
-        new_options.append(help_tuple[0:3])
-    return new_options
+    return [opt[0:3] for opt in options]
diff --git a/distutils/msvc9compiler.py b/distutils/msvc9compiler.py
index f860a8d383..4c70848730 100644
--- a/distutils/msvc9compiler.py
+++ b/distutils/msvc9compiler.py
@@ -640,9 +640,7 @@ def link(  # noqa: C901
                 else:
                     ldflags = self.ldflags_shared
 
-            export_opts = []
-            for sym in export_symbols or []:
-                export_opts.append("/EXPORT:" + sym)
+            export_opts = [f"/EXPORT:{sym}" for sym in export_symbols or []]
 
             ld_args = (
                 ldflags + lib_opts + export_opts + objects + ['/OUT:' + output_filename]
diff --git a/distutils/msvccompiler.py b/distutils/msvccompiler.py
index 2bf94e60c9..2a5e61d78d 100644
--- a/distutils/msvccompiler.py
+++ b/distutils/msvccompiler.py
@@ -534,9 +534,7 @@ def link(  # noqa: C901
                 else:
                     ldflags = self.ldflags_shared
 
-            export_opts = []
-            for sym in export_symbols or []:
-                export_opts.append("/EXPORT:" + sym)
+            export_opts = [f"/EXPORT:{sym}" for sym in export_symbols or []]
 
             ld_args = (
                 ldflags + lib_opts + export_opts + objects + ['/OUT:' + output_filename]

From 6099343a445935b13136da23a4b92d816dd5ee29 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 19 Jul 2024 15:44:54 -0400
Subject: [PATCH 0937/1761] Extract a singledispatchmethod _copy for handling
 the copy of each data file.

---
 distutils/command/install_data.py | 69 ++++++++++++++++++-------------
 1 file changed, 40 insertions(+), 29 deletions(-)

diff --git a/distutils/command/install_data.py b/distutils/command/install_data.py
index cb03d0e5c6..e13b5ca664 100644
--- a/distutils/command/install_data.py
+++ b/distutils/command/install_data.py
@@ -5,13 +5,19 @@
 
 # contributed by Bastian Kleineidam
 
+import functools
 import os
 import pathlib
 
+from typing import Tuple, Iterable
+
 from ..core import Command
 from ..util import change_root, convert_path
 
 
+StrPath = str | pathlib.Path
+
+
 class install_data(Command):
     description = "install data files"
 
@@ -47,36 +53,41 @@ def finalize_options(self):
     def run(self):
         self.mkpath(self.install_dir)
         for f in self.data_files:
-            if isinstance(f, (str, pathlib.Path)):
-                # it's a simple file, so copy it
-                f = convert_path(f)
-                if self.warn_dir:
-                    self.warn(
-                        "setup script did not provide a directory for "
-                        f"'{f}' -- installing right in '{self.install_dir}'"
-                    )
-                (out, _) = self.copy_file(f, self.install_dir)
+            self._copy(f)
+
+    @functools.singledispatchmethod
+    def _copy(self, f: StrPath | Tuple[StrPath, Iterable[StrPath]]):
+        # it's a tuple with path to install to and a list of files
+        dir = convert_path(f[0])
+        if not os.path.isabs(dir):
+            dir = os.path.join(self.install_dir, dir)
+        elif self.root:
+            dir = change_root(self.root, dir)
+        self.mkpath(dir)
+
+        if f[1] == []:
+            # If there are no files listed, the user must be
+            # trying to create an empty directory, so add the
+            # directory to the list of output files.
+            self.outfiles.append(dir)
+        else:
+            # Copy files, adding them to the list of output files.
+            for data in f[1]:
+                data = convert_path(data)
+                (out, _) = self.copy_file(data, dir)
                 self.outfiles.append(out)
-            else:
-                # it's a tuple with path to install to and a list of files
-                dir = convert_path(f[0])
-                if not os.path.isabs(dir):
-                    dir = os.path.join(self.install_dir, dir)
-                elif self.root:
-                    dir = change_root(self.root, dir)
-                self.mkpath(dir)
-
-                if f[1] == []:
-                    # If there are no files listed, the user must be
-                    # trying to create an empty directory, so add the
-                    # directory to the list of output files.
-                    self.outfiles.append(dir)
-                else:
-                    # Copy files, adding them to the list of output files.
-                    for data in f[1]:
-                        data = convert_path(data)
-                        (out, _) = self.copy_file(data, dir)
-                        self.outfiles.append(out)
+
+    @_copy.register
+    def _(self, f: StrPath):
+        # it's a simple file, so copy it
+        f = convert_path(f)
+        if self.warn_dir:
+            self.warn(
+                "setup script did not provide a directory for "
+                f"'{f}' -- installing right in '{self.install_dir}'"
+            )
+        (out, _) = self.copy_file(f, self.install_dir)
+        self.outfiles.append(out)
 
     def get_inputs(self):
         return self.data_files or []

From 3a2d7a62e355db2d1e67af38ba2be03c63099480 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 19 Jul 2024 16:12:53 -0400
Subject: [PATCH 0938/1761] Use explicit registration for compatibility with
 older Pythons.

Prior to 3.11, singledispatch[method] doesn't know about unions.
---
 distutils/command/install_data.py | 15 +++++++--------
 distutils/util.py                 |  2 ++
 2 files changed, 9 insertions(+), 8 deletions(-)

diff --git a/distutils/command/install_data.py b/distutils/command/install_data.py
index e13b5ca664..bd2932ab0b 100644
--- a/distutils/command/install_data.py
+++ b/distutils/command/install_data.py
@@ -5,19 +5,17 @@
 
 # contributed by Bastian Kleineidam
 
+from __future__ import annotations
+
 import functools
 import os
-import pathlib
 
-from typing import Tuple, Iterable
+from typing import Iterable
 
 from ..core import Command
 from ..util import change_root, convert_path
 
 
-StrPath = str | pathlib.Path
-
-
 class install_data(Command):
     description = "install data files"
 
@@ -56,7 +54,7 @@ def run(self):
             self._copy(f)
 
     @functools.singledispatchmethod
-    def _copy(self, f: StrPath | Tuple[StrPath, Iterable[StrPath]]):
+    def _copy(self, f: tuple[str | os.PathLike, Iterable[str | os.PathLike]]):
         # it's a tuple with path to install to and a list of files
         dir = convert_path(f[0])
         if not os.path.isabs(dir):
@@ -77,8 +75,9 @@ def _copy(self, f: StrPath | Tuple[StrPath, Iterable[StrPath]]):
                 (out, _) = self.copy_file(data, dir)
                 self.outfiles.append(out)
 
-    @_copy.register
-    def _(self, f: StrPath):
+    @_copy.register(str)
+    @_copy.register(os.PathLike)
+    def _(self, f: str | os.PathLike):
         # it's a simple file, so copy it
         f = convert_path(f)
         if self.warn_dir:
diff --git a/distutils/util.py b/distutils/util.py
index 635d715ad6..95503e957c 100644
--- a/distutils/util.py
+++ b/distutils/util.py
@@ -4,6 +4,8 @@
 one of the other *util.py modules.
 """
 
+from __future__ import annotations
+
 import functools
 import importlib.util
 import os

From a5e3e5f47e333b0c3d04a5f35cf8a3a67a4ffb62 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 19 Jul 2024 16:13:35 -0400
Subject: [PATCH 0939/1761] Prefer os.PathLike in convert_path

---
 distutils/util.py | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)

diff --git a/distutils/util.py b/distutils/util.py
index 95503e957c..7751eb9465 100644
--- a/distutils/util.py
+++ b/distutils/util.py
@@ -9,7 +9,6 @@
 import functools
 import importlib.util
 import os
-import pathlib
 import re
 import string
 import subprocess
@@ -119,7 +118,7 @@ def split_version(s):
     return [int(n) for n in s.split('.')]
 
 
-def convert_path(pathname: str | pathlib.Path) -> str:
+def convert_path(pathname: str | os.PathLike) -> str:
     """
     Allow for pathlib.Path inputs and then make native.
     """

From 87f77315d5fb2b00e88db0bd6931a2f94c10cdfd Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 19 Jul 2024 16:39:26 -0400
Subject: [PATCH 0940/1761] Convert needs to accept None for Setuptools' sake.

---
 distutils/util.py | 8 ++++++++
 1 file changed, 8 insertions(+)

diff --git a/distutils/util.py b/distutils/util.py
index 7751eb9465..68565534ca 100644
--- a/distutils/util.py
+++ b/distutils/util.py
@@ -16,6 +16,7 @@
 import sysconfig
 import tempfile
 
+from ._functools import pass_none
 from ._log import log
 from ._modified import newer
 from .errors import DistutilsByteCompileError, DistutilsPlatformError
@@ -118,9 +119,16 @@ def split_version(s):
     return [int(n) for n in s.split('.')]
 
 
+@pass_none
 def convert_path(pathname: str | os.PathLike) -> str:
     """
     Allow for pathlib.Path inputs and then make native.
+
+    Also if None is passed, will just pass it through as
+    Setuptools relies on this behavior.
+
+    >>> convert_path(None) is None
+    True
     """
     return make_native(os.fspath(pathname))
 

From 2e6a69e92aa2eb5a85e068d8aa8e92427855a9fc Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 19 Jul 2024 16:50:07 -0400
Subject: [PATCH 0941/1761] In test_convert_path, utilize posixpath.join and
 ntpath.join for maximum compatibility with other libraries.

---
 distutils/tests/test_util.py | 14 ++++----------
 1 file changed, 4 insertions(+), 10 deletions(-)

diff --git a/distutils/tests/test_util.py b/distutils/tests/test_util.py
index a614a1dae5..b24f1fb4cd 100644
--- a/distutils/tests/test_util.py
+++ b/distutils/tests/test_util.py
@@ -4,8 +4,10 @@
 import email.generator
 import email.policy
 import io
+import ntpath
 import os
 import pathlib
+import posixpath
 import sys
 import sysconfig as stdlib_sysconfig
 import unittest.mock as mock
@@ -66,22 +68,14 @@ def test_get_platform(self):
     def test_convert_path(self):
         # linux/mac
         os.sep = '/'
-
-        def _join(path):
-            return '/'.join(path)
-
-        os.path.join = _join
+        os.path.join = posixpath.join
 
         assert convert_path('/home/to/my/stuff') == '/home/to/my/stuff'
         assert convert_path(pathlib.Path('/home/to/my/stuff')) == '/home/to/my/stuff'
 
         # win
         os.sep = '\\'
-
-        def _join(*path):
-            return '\\'.join(path)
-
-        os.path.join = _join
+        os.path.join = ntpath.join
 
         with pytest.raises(ValueError):
             convert_path('/home/to/my/stuff')

From 90cbfbe72f5ed07d1fd589bfb7c1660f66159756 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 19 Jul 2024 17:06:10 -0400
Subject: [PATCH 0942/1761] Wrap paths in PurePosixPath to ensure that any
 WindowsPaths don't get backslashes.

---
 distutils/util.py | 5 ++++-
 1 file changed, 4 insertions(+), 1 deletion(-)

diff --git a/distutils/util.py b/distutils/util.py
index 68565534ca..f8e4fbb778 100644
--- a/distutils/util.py
+++ b/distutils/util.py
@@ -9,6 +9,7 @@
 import functools
 import importlib.util
 import os
+import pathlib
 import re
 import string
 import subprocess
@@ -130,7 +131,9 @@ def convert_path(pathname: str | os.PathLike) -> str:
     >>> convert_path(None) is None
     True
     """
-    return make_native(os.fspath(pathname))
+    # wrap in PurePosixPath to retain forward slashes on Windows
+    # see https://github.com/pypa/distutils/pull/272#issuecomment-2240100013
+    return make_native(os.fspath(pathlib.PurePosixPath(pathname)))
 
 
 def make_native(pathname: str) -> str:

From e2cbf97dda5929ae835c52921461c09bda521a06 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 19 Jul 2024 17:48:30 -0400
Subject: [PATCH 0943/1761] convert_path no longer fails if passed a path with
 a trailing slash. Instead, trailing slashes are stripped just as they are
 with pathlib.Path.

Ref https://github.com/pypa/distutils/pull/272#issuecomment-2240252653.
---
 distutils/tests/test_util.py | 2 --
 distutils/util.py            | 2 --
 2 files changed, 4 deletions(-)

diff --git a/distutils/tests/test_util.py b/distutils/tests/test_util.py
index b24f1fb4cd..c05df03a4b 100644
--- a/distutils/tests/test_util.py
+++ b/distutils/tests/test_util.py
@@ -79,8 +79,6 @@ def test_convert_path(self):
 
         with pytest.raises(ValueError):
             convert_path('/home/to/my/stuff')
-        with pytest.raises(ValueError):
-            convert_path('home/to/my/stuff/')
         with pytest.raises(ValueError):
             convert_path(pathlib.Path('/home/to/my/stuff'))
 
diff --git a/distutils/util.py b/distutils/util.py
index f8e4fbb778..50ebb1c155 100644
--- a/distutils/util.py
+++ b/distutils/util.py
@@ -151,8 +151,6 @@ def make_native(pathname: str) -> str:
         return pathname
     if pathname[0] == '/':
         raise ValueError(f"path '{pathname}' cannot be absolute")
-    if pathname[-1] == '/':
-        raise ValueError(f"path '{pathname}' cannot end with '/'")
 
     paths = pathname.split('/')
     while '.' in paths:

From 9d6ca5fe777e7bc2ab611814adac989789d7f023 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 20 Jul 2024 14:25:46 -0400
Subject: [PATCH 0944/1761] convert_path now converts to a platform-native
 path.Path, but then calls `.as_posix()` on it.

This change will have the unintended effect of adding support for backslashes on Windows. Maybe that's fine, or maybe it should be prohibited.
---
 distutils/util.py | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/distutils/util.py b/distutils/util.py
index 50ebb1c155..3163eb4804 100644
--- a/distutils/util.py
+++ b/distutils/util.py
@@ -123,17 +123,17 @@ def split_version(s):
 @pass_none
 def convert_path(pathname: str | os.PathLike) -> str:
     """
-    Allow for pathlib.Path inputs and then make native.
+    Allow for pathlib.Path inputs, coax to posix, and then make native.
 
-    Also if None is passed, will just pass it through as
+    If None is passed, will just pass it through as
     Setuptools relies on this behavior.
 
     >>> convert_path(None) is None
     True
     """
-    # wrap in PurePosixPath to retain forward slashes on Windows
+    # Use .as_posix() to retain forward slashes on Windows
     # see https://github.com/pypa/distutils/pull/272#issuecomment-2240100013
-    return make_native(os.fspath(pathlib.PurePosixPath(pathname)))
+    return make_native(pathlib.Path(pathname).as_posix())
 
 
 def make_native(pathname: str) -> str:

From 4eaa02f2fff8f8e6d4eac5442988dc586fe56004 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 20 Jul 2024 14:45:06 -0400
Subject: [PATCH 0945/1761] Separate test_convert_path into two tests to avoid
 interactions in monkeypatching os.path.

---
 distutils/tests/test_util.py | 5 ++---
 1 file changed, 2 insertions(+), 3 deletions(-)

diff --git a/distutils/tests/test_util.py b/distutils/tests/test_util.py
index c05df03a4b..434c9b8085 100644
--- a/distutils/tests/test_util.py
+++ b/distutils/tests/test_util.py
@@ -65,15 +65,14 @@ def test_get_platform(self):
             with mock.patch.dict('os.environ', {'VSCMD_ARG_TGT_ARCH': 'arm64'}):
                 assert get_platform() == 'win-arm64'
 
-    def test_convert_path(self):
-        # linux/mac
+    def test_convert_path_unix(self):
         os.sep = '/'
         os.path.join = posixpath.join
 
         assert convert_path('/home/to/my/stuff') == '/home/to/my/stuff'
         assert convert_path(pathlib.Path('/home/to/my/stuff')) == '/home/to/my/stuff'
 
-        # win
+    def test_convert_path_windows(self):
         os.sep = '\\'
         os.path.join = ntpath.join
 

From 5469f24a5978acb5c7397a4b2e2b2ffd78c7c9e6 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 20 Jul 2024 15:16:56 -0400
Subject: [PATCH 0946/1761] Remove expectation that a ValueError is raised for
 data_files being drive-relative absolute on Windows.

---
 distutils/tests/test_util.py | 5 -----
 distutils/util.py            | 2 --
 2 files changed, 7 deletions(-)

diff --git a/distutils/tests/test_util.py b/distutils/tests/test_util.py
index 434c9b8085..16fcfe3e8c 100644
--- a/distutils/tests/test_util.py
+++ b/distutils/tests/test_util.py
@@ -76,11 +76,6 @@ def test_convert_path_windows(self):
         os.sep = '\\'
         os.path.join = ntpath.join
 
-        with pytest.raises(ValueError):
-            convert_path('/home/to/my/stuff')
-        with pytest.raises(ValueError):
-            convert_path(pathlib.Path('/home/to/my/stuff'))
-
         assert convert_path('home/to/my/stuff') == 'home\\to\\my\\stuff'
         assert convert_path(pathlib.Path('home/to/my/stuff')) == 'home\\to\\my\\stuff'
         assert convert_path('.') == os.curdir
diff --git a/distutils/util.py b/distutils/util.py
index 3163eb4804..c7e73011f6 100644
--- a/distutils/util.py
+++ b/distutils/util.py
@@ -149,8 +149,6 @@ def make_native(pathname: str) -> str:
         return pathname
     if not pathname:
         return pathname
-    if pathname[0] == '/':
-        raise ValueError(f"path '{pathname}' cannot be absolute")
 
     paths = pathname.split('/')
     while '.' in paths:

From 28e7431142f4bc380518f4691076af2bd81222b7 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 20 Jul 2024 15:33:26 -0400
Subject: [PATCH 0947/1761] Simplify convert_path by simply relying on the
 logic in PurePath.

Test for convert_path no longer runs on all operating systems as it's too difficult (impossible) to monkeypatch PurePath reliably.
---
 distutils/tests/test_util.py | 10 ++--------
 distutils/util.py            | 36 +++++++-----------------------------
 2 files changed, 9 insertions(+), 37 deletions(-)

diff --git a/distutils/tests/test_util.py b/distutils/tests/test_util.py
index 16fcfe3e8c..25e26e017d 100644
--- a/distutils/tests/test_util.py
+++ b/distutils/tests/test_util.py
@@ -4,10 +4,8 @@
 import email.generator
 import email.policy
 import io
-import ntpath
 import os
 import pathlib
-import posixpath
 import sys
 import sysconfig as stdlib_sysconfig
 import unittest.mock as mock
@@ -65,17 +63,13 @@ def test_get_platform(self):
             with mock.patch.dict('os.environ', {'VSCMD_ARG_TGT_ARCH': 'arm64'}):
                 assert get_platform() == 'win-arm64'
 
+    @pytest.mark.skipif('platform.system() == "Windows"')
     def test_convert_path_unix(self):
-        os.sep = '/'
-        os.path.join = posixpath.join
-
         assert convert_path('/home/to/my/stuff') == '/home/to/my/stuff'
         assert convert_path(pathlib.Path('/home/to/my/stuff')) == '/home/to/my/stuff'
 
+    @pytest.mark.skipif('platform.system() != "Windows"')
     def test_convert_path_windows(self):
-        os.sep = '\\'
-        os.path.join = ntpath.join
-
         assert convert_path('home/to/my/stuff') == 'home\\to\\my\\stuff'
         assert convert_path(pathlib.Path('home/to/my/stuff')) == 'home\\to\\my\\stuff'
         assert convert_path('.') == os.curdir
diff --git a/distutils/util.py b/distutils/util.py
index c7e73011f6..4cc6bd283c 100644
--- a/distutils/util.py
+++ b/distutils/util.py
@@ -122,43 +122,21 @@ def split_version(s):
 
 @pass_none
 def convert_path(pathname: str | os.PathLike) -> str:
-    """
-    Allow for pathlib.Path inputs, coax to posix, and then make native.
+    r"""
+    Allow for pathlib.Path inputs, coax to a native path string.
 
     If None is passed, will just pass it through as
     Setuptools relies on this behavior.
 
     >>> convert_path(None) is None
     True
-    """
-    # Use .as_posix() to retain forward slashes on Windows
-    # see https://github.com/pypa/distutils/pull/272#issuecomment-2240100013
-    return make_native(pathlib.Path(pathname).as_posix())
-
-
-def make_native(pathname: str) -> str:
-    """Return 'pathname' as a name that will work on the native filesystem,
-    i.e. split it on '/' and put it back together again using the current
-    directory separator.  Needed because filenames in the setup script are
-    always supplied in Unix style, and have to be converted to the local
-    convention before we can actually use them in the filesystem.  Raises
-    ValueError on non-Unix-ish systems if 'pathname' either starts or
-    ends with a slash.
-    """
-    if os.sep == '/':
-        return pathname
-    if not pathname:
-        return pathname
 
-    paths = pathname.split('/')
-    while '.' in paths:
-        paths.remove('.')
-    if not paths:
-        return os.curdir
-    return os.path.join(*paths)
+    Removes empty paths.
 
-
-# convert_path ()
+    >>> convert_path('foo/./bar').replace('\\', '/')
+    'foo/bar'
+    """
+    return os.fspath(pathlib.PurePath(pathname))
 
 
 def change_root(new_root, pathname):

From 8cada19d07e286558e5193841f66483f631170b3 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 20 Jul 2024 15:43:45 -0400
Subject: [PATCH 0948/1761] Harmonize convert_path tests across Unix and
 Windows.

---
 distutils/tests/test_util.py | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)

diff --git a/distutils/tests/test_util.py b/distutils/tests/test_util.py
index 25e26e017d..d1254ca18b 100644
--- a/distutils/tests/test_util.py
+++ b/distutils/tests/test_util.py
@@ -67,11 +67,12 @@ def test_get_platform(self):
     def test_convert_path_unix(self):
         assert convert_path('/home/to/my/stuff') == '/home/to/my/stuff'
         assert convert_path(pathlib.Path('/home/to/my/stuff')) == '/home/to/my/stuff'
+        assert convert_path('.') == os.curdir
 
     @pytest.mark.skipif('platform.system() != "Windows"')
     def test_convert_path_windows(self):
-        assert convert_path('home/to/my/stuff') == 'home\\to\\my\\stuff'
-        assert convert_path(pathlib.Path('home/to/my/stuff')) == 'home\\to\\my\\stuff'
+        assert convert_path('/home/to/my/stuff') == r'\home\to\my\stuff'
+        assert convert_path(pathlib.Path('/home/to/my/stuff')) == r'\home\to\my\stuff'
         assert convert_path('.') == os.curdir
 
     def test_change_root(self):

From 8f2498a1b072f3206b85f786016b9defbb4b796c Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 20 Jul 2024 15:52:05 -0400
Subject: [PATCH 0949/1761] Consolidate convert_path tests and just generate
 the expected value in a platform-sensitive way. Should fix failures on mingw.

---
 distutils/tests/test_util.py | 14 ++++----------
 1 file changed, 4 insertions(+), 10 deletions(-)

diff --git a/distutils/tests/test_util.py b/distutils/tests/test_util.py
index d1254ca18b..00c9743ed0 100644
--- a/distutils/tests/test_util.py
+++ b/distutils/tests/test_util.py
@@ -63,16 +63,10 @@ def test_get_platform(self):
             with mock.patch.dict('os.environ', {'VSCMD_ARG_TGT_ARCH': 'arm64'}):
                 assert get_platform() == 'win-arm64'
 
-    @pytest.mark.skipif('platform.system() == "Windows"')
-    def test_convert_path_unix(self):
-        assert convert_path('/home/to/my/stuff') == '/home/to/my/stuff'
-        assert convert_path(pathlib.Path('/home/to/my/stuff')) == '/home/to/my/stuff'
-        assert convert_path('.') == os.curdir
-
-    @pytest.mark.skipif('platform.system() != "Windows"')
-    def test_convert_path_windows(self):
-        assert convert_path('/home/to/my/stuff') == r'\home\to\my\stuff'
-        assert convert_path(pathlib.Path('/home/to/my/stuff')) == r'\home\to\my\stuff'
+    def test_convert_path(self):
+        expected = os.sep.join(('', 'home', 'to', 'my', 'stuff'))
+        assert convert_path('/home/to/my/stuff') == expected
+        assert convert_path(pathlib.Path('/home/to/my/stuff')) == expected
         assert convert_path('.') == os.curdir
 
     def test_change_root(self):

From 65675e5f102f5887827c9edd0372a9cbbd924784 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 1 Aug 2024 21:07:41 -0400
Subject: [PATCH 0950/1761] Harmonize '-shared' parameter for C and C++ in
 Mingw32CCompiler

---
 distutils/cygwinccompiler.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/distutils/cygwinccompiler.py b/distutils/cygwinccompiler.py
index f0704ee7e4..10a0f7a872 100644
--- a/distutils/cygwinccompiler.py
+++ b/distutils/cygwinccompiler.py
@@ -288,7 +288,7 @@ def __init__(self, verbose=False, dry_run=False, force=False):
         self.set_executables(
             compiler=f'{self.cc} -O -Wall',
             compiler_so=f'{self.cc} -shared -O -Wall',
-            compiler_so_cxx=f'{self.cxx} -mdll -O -Wall',
+            compiler_so_cxx=f'{self.cxx} -shared -O -Wall',
             compiler_cxx=f'{self.cxx} -O -Wall',
             linker_exe=f'{self.cc}',
             linker_so=f'{self.linker_dll} {shared_option}',

From 52cd70b8dd7f5c3405ab0d4a9719b4cb0493fe9e Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 1 Aug 2024 21:37:36 -0400
Subject: [PATCH 0951/1761] In sysconfig.customize_compiler, initialize
 ldcxxshared from config vars.

---
 distutils/sysconfig.py | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/distutils/sysconfig.py b/distutils/sysconfig.py
index 31bdbec1bb..847a26ba49 100644
--- a/distutils/sysconfig.py
+++ b/distutils/sysconfig.py
@@ -304,6 +304,7 @@ def customize_compiler(compiler):  # noqa: C901
             cflags,
             ccshared,
             ldshared,
+            ldcxxshared,
             shlib_suffix,
             ar,
             ar_flags,
@@ -313,13 +314,13 @@ def customize_compiler(compiler):  # noqa: C901
             'CFLAGS',
             'CCSHARED',
             'LDSHARED',
+            'LDCXXSHARED',
             'SHLIB_SUFFIX',
             'AR',
             'ARFLAGS',
         )
 
         cxxflags = cflags
-        ldcxxshared = ""
 
         if 'CC' in os.environ:
             newcc = os.environ['CC']

From 1f3188ca5a69f3aff440a13dcfcdef76f53ecab4 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 2 Aug 2024 10:23:15 -0400
Subject: [PATCH 0952/1761] Refactored customize_compiler to reduce logical
 branches and extract _add_flags logic.

Reduces cyclomatic complexity so it passes QA checks.
---
 distutils/sysconfig.py | 66 ++++++++++++++++++++++--------------------
 1 file changed, 34 insertions(+), 32 deletions(-)

diff --git a/distutils/sysconfig.py b/distutils/sysconfig.py
index 847a26ba49..8929deab5a 100644
--- a/distutils/sysconfig.py
+++ b/distutils/sysconfig.py
@@ -287,7 +287,7 @@ def _customize_macos():
     )
 
 
-def customize_compiler(compiler):  # noqa: C901
+def customize_compiler(compiler):
     """Do any platform-specific customization of a CCompiler instance.
 
     Mainly needed on Unix, so we can plug in the information that
@@ -329,40 +329,32 @@ def customize_compiler(compiler):  # noqa: C901
                 #       command for LDSHARED as well
                 ldshared = newcc + ldshared[len(cc) :]
             cc = newcc
-        if 'CXX' in os.environ:
-            cxx = os.environ['CXX']
-        if 'LDSHARED' in os.environ:
-            ldshared = os.environ['LDSHARED']
-        if 'LDCXXSHARED' in os.environ:
-            ldcxxshared = os.environ['LDCXXSHARED']
-        if 'CPP' in os.environ:
-            cpp = os.environ['CPP']
-        else:
-            cpp = cc + " -E"  # not always
-        if 'LDFLAGS' in os.environ:
-            ldshared = ldshared + ' ' + os.environ['LDFLAGS']
-            ldcxxshared = ldcxxshared + ' ' + os.environ['LDFLAGS']
-        if 'CFLAGS' in os.environ:
-            cflags = cflags + ' ' + os.environ['CFLAGS']
-            ldshared = ldshared + ' ' + os.environ['CFLAGS']
-        if 'CXXFLAGS' in os.environ:
-            cxxflags = os.environ['CXXFLAGS']
-            ldcxxshared = ldcxxshared + ' ' + os.environ['CXXFLAGS']
-        if 'CPPFLAGS' in os.environ:
-            cpp = cpp + ' ' + os.environ['CPPFLAGS']
-            cflags = cflags + ' ' + os.environ['CPPFLAGS']
-            cxxflags = cxxflags + ' ' + os.environ['CPPFLAGS']
-            ldshared = ldshared + ' ' + os.environ['CPPFLAGS']
-            ldcxxshared = ldcxxshared + ' ' + os.environ['CPPFLAGS']
-        if 'AR' in os.environ:
-            ar = os.environ['AR']
-        if 'ARFLAGS' in os.environ:
-            archiver = ar + ' ' + os.environ['ARFLAGS']
-        else:
-            archiver = ar + ' ' + ar_flags
+        cxx = os.environ.get('CXX', cxx)
+        ldshared = os.environ.get('LDSHARED', ldshared)
+        ldcxxshared = os.environ.get('LDCXXSHARED', ldcxxshared)
+        cpp = os.environ.get(
+            'CPP',
+            cc + " -E",  # not always
+        )
 
+        ldshared = _add_flags(ldshared, 'LD')
+        ldcxxshared = _add_flags(ldcxxshared, 'LD')
+        cflags = _add_flags(cflags, 'C')
+        ldshared = _add_flags(ldshared, 'C')
+        cxxflags = os.environ.get('CXXFLAGS', cxxflags)
+        ldcxxshared = _add_flags(ldcxxshared, 'CXX')
+        cpp = _add_flags(cpp, 'CPP')
+        cflags = _add_flags(cflags, 'CPP')
+        cxxflags = _add_flags(cxxflags, 'CPP')
+        ldshared = _add_flags(ldshared, 'CPP')
+        ldcxxshared = _add_flags(ldcxxshared, 'CPP')
+
+        ar = os.environ.get('AR', ar)
+
+        archiver = ar + ' ' + os.environ.get('ARFLAGS', ar_flags)
         cc_cmd = cc + ' ' + cflags
         cxx_cmd = cxx + ' ' + cxxflags
+
         compiler.set_executables(
             preprocessor=cpp,
             compiler=cc_cmd,
@@ -577,3 +569,13 @@ def get_config_var(name):
 
         warnings.warn('SO is deprecated, use EXT_SUFFIX', DeprecationWarning, 2)
     return get_config_vars().get(name)
+
+
+def _add_flags(value: str, type: str) -> str:
+    """
+    Add any flags from the environment for the given type.
+
+    type is the prefix to FLAGS in the environment key (e.g. "C" for "CFLAGS").
+    """
+    flags = os.environ.get(f'{type}FLAGS')
+    return f'{value} {flags}' if flags else value

From 3b86d4babdd5ffba14e10194c5b97b6001f532f8 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 20 Jul 2024 18:40:07 -0400
Subject: [PATCH 0953/1761] Use simple import

---
 distutils/tests/test_extension.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/distutils/tests/test_extension.py b/distutils/tests/test_extension.py
index c5219d7bd7..733709966c 100644
--- a/distutils/tests/test_extension.py
+++ b/distutils/tests/test_extension.py
@@ -1,8 +1,8 @@
 """Tests for distutils.extension."""
 
 import os
+import pathlib
 import warnings
-from pathlib import Path
 
 from distutils.extension import Extension, read_setup_file
 
@@ -77,7 +77,7 @@ def test_extension_init(self):
             Extension('name', ['file', 1])
         ext = Extension('name', ['file1', 'file2'])
         assert ext.sources == ['file1', 'file2']
-        ext = Extension('name', [Path('file1'), Path('file2')])
+        ext = Extension('name', [pathlib.Path('file1'), pathlib.Path('file2')])
         assert ext.sources == ['file1', 'file2']
 
         # others arguments have defaults

From d13da588f6b303da509638c341f4dd37ee49da47 Mon Sep 17 00:00:00 2001
From: Christoph Reiter 
Date: Sun, 21 Jul 2024 17:23:15 +0200
Subject: [PATCH 0954/1761] CI: add a job for running tests under MSVC CPython
 with GCC as the default compiler

The tests currently assume everywhere that there is only one compiler per platform,
and while it would be possible to parametrize all the tests it would make things more
complex and we'd also have to decide which compiler is required for running the tests and
which one is optional etc.

To avoid all this introduce a DISTUTILS_TEST_DEFAULT_COMPILER env var which can be used
to override the default compiler type for the whole test run. This keeps the tests as is
and makes sure all tests run against the alternative compiler. Also add it to pass_env
for tox, so it gets passed to pytest, if set.

The added CI job installs an ucrt targeting GCC via MSYS2, and forces the MSVC CPython
to use it via DISTUTILS_TEST_DEFAULT_COMPILER=mingw32.
---
 .github/workflows/main.yml | 22 ++++++++++++++++++++++
 conftest.py                | 23 +++++++++++++++++++++++
 tox.ini                    |  2 ++
 3 files changed, 47 insertions(+)

diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index 70d70bc6f1..2f4ec6b478 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -163,6 +163,28 @@ jobs:
           source /tmp/venv/bin/activate
           pytest
 
+  test_msvc_python_mingw:
+    runs-on: windows-latest
+    steps:
+      - uses: actions/checkout@v4
+      - name: Setup Python
+        uses: actions/setup-python@v4
+        with:
+          python-version: 3.12
+      - name: Install tox
+        run: python -m pip install tox
+      - name: Install GCC
+        uses: msys2/setup-msys2@v2
+        with:
+          msystem: ucrt64
+          install: mingw-w64-ucrt-x86_64-cc
+      - name: Run
+        run: |
+          $env:MSYS2_ROOT = msys2 -c 'cygpath -m /'
+          $env:PATH = "$env:MSYS2_ROOT/ucrt64/bin;$env:PATH"
+          $env:DISTUTILS_TEST_DEFAULT_COMPILER = "mingw32"
+          tox
+
   ci_setuptools:
     # Integration testing with setuptools
     strategy:
diff --git a/conftest.py b/conftest.py
index 6639aa65bd..fd6cb6d61b 100644
--- a/conftest.py
+++ b/conftest.py
@@ -162,3 +162,26 @@ def disable_macos_customization(monkeypatch):
     from distutils import sysconfig
 
     monkeypatch.setattr(sysconfig, '_customize_macos', lambda: None)
+
+
+@pytest.fixture(autouse=True, scope="session")
+def monkey_patch_get_default_compiler():
+    """
+    Monkey patch distutils get_default_compiler to allow overriding the
+    default compiler. Mainly to test mingw32 with a MSVC Python.
+    """
+    from distutils import ccompiler
+
+    default_compiler = os.environ.get("DISTUTILS_TEST_DEFAULT_COMPILER")
+
+    if default_compiler is not None:
+
+        def patched_get_default_compiler(*args, **kwargs):
+            return default_compiler
+
+        original = ccompiler.get_default_compiler
+        ccompiler.get_default_compiler = patched_get_default_compiler
+        yield
+        ccompiler.get_default_compiler = original
+    else:
+        yield
diff --git a/tox.ini b/tox.ini
index d4bcc4165d..5483587625 100644
--- a/tox.ini
+++ b/tox.ini
@@ -5,6 +5,8 @@ setenv =
 	PYTHONWARNDEFAULTENCODING = 1
 	# pypa/distutils#99
 	VIRTUALENV_NO_SETUPTOOLS = 1
+pass_env =
+	DISTUTILS_TEST_DEFAULT_COMPILER
 commands =
 	pytest {posargs}
 usedevelop = True

From 8307b56117acf27a3b15413c0d488d35717b6d0b Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 2 Aug 2024 10:27:14 -0400
Subject: [PATCH 0955/1761] Don't add flags when the value is None, such as
 when on PyPy, LDCXXSHARED is unset.

---
 distutils/sysconfig.py | 1 +
 1 file changed, 1 insertion(+)

diff --git a/distutils/sysconfig.py b/distutils/sysconfig.py
index 8929deab5a..fbdd5d73ae 100644
--- a/distutils/sysconfig.py
+++ b/distutils/sysconfig.py
@@ -571,6 +571,7 @@ def get_config_var(name):
     return get_config_vars().get(name)
 
 
+@pass_none
 def _add_flags(value: str, type: str) -> str:
     """
     Add any flags from the environment for the given type.

From d4a685ac7ba5b43505f91c814f5508819b1087c8 Mon Sep 17 00:00:00 2001
From: Christoph Reiter 
Date: Sun, 21 Jul 2024 18:13:03 +0200
Subject: [PATCH 0956/1761] mingw: make get_msvcr() a noop

This was added back in the day to make mingw use the same CRT as CPython
(https://bugs.python.org/issue870382), but at least with newer mingw-w64 and
ucrt switching the CRT at "runtime" isn't supported anymore. To build a
compatible extension you have to use a ucrt mingw-w64 build, so things match
up and link against the same CRT.

CPython 3.5+ uses ucrt (see https://wiki.python.org/moin/WindowsCompilers), so
anything besides that is no longer relevant, which only leaves vcruntime140.

Since it's not clear what linking against vcruntime140 solves, and there have
been reports of it needing to be patched out:

* https://github.com/pypa/setuptools/issues/4101
* https://github.com/pypa/distutils/issues/204#issuecomment-1420892028

let's just make it return nothing. Keep get_msvcr() around for now to avoid breaking
code which patched it.

Fixes #204
---
 distutils/cygwinccompiler.py            | 40 ++---------------------
 distutils/tests/test_cygwinccompiler.py | 42 -------------------------
 2 files changed, 2 insertions(+), 80 deletions(-)

diff --git a/distutils/cygwinccompiler.py b/distutils/cygwinccompiler.py
index 7b812fd055..f3e593a40e 100644
--- a/distutils/cygwinccompiler.py
+++ b/distutils/cygwinccompiler.py
@@ -9,13 +9,11 @@
 import copy
 import os
 import pathlib
-import re
 import shlex
 import sys
 import warnings
 from subprocess import check_output
 
-from ._collections import RangeMap
 from .errors import (
     CCompilerError,
     CompileError,
@@ -26,42 +24,10 @@
 from .unixccompiler import UnixCCompiler
 from .version import LooseVersion, suppress_known_deprecation
 
-_msvcr_lookup = RangeMap.left(
-    {
-        # MSVC 7.0
-        1300: ['msvcr70'],
-        # MSVC 7.1
-        1310: ['msvcr71'],
-        # VS2005 / MSVC 8.0
-        1400: ['msvcr80'],
-        # VS2008 / MSVC 9.0
-        1500: ['msvcr90'],
-        # VS2010 / MSVC 10.0
-        1600: ['msvcr100'],
-        # VS2012 / MSVC 11.0
-        1700: ['msvcr110'],
-        # VS2013 / MSVC 12.0
-        1800: ['msvcr120'],
-        # VS2015 / MSVC 14.0
-        1900: ['vcruntime140'],
-        2000: RangeMap.undefined_value,
-    },
-)
-
 
 def get_msvcr():
-    """Include the appropriate MSVC runtime library if Python was built
-    with MSVC 7.0 or later.
-    """
-    match = re.search(r'MSC v\.(\d{4})', sys.version)
-    try:
-        msc_ver = int(match.group(1))
-    except AttributeError:
-        return []
-    try:
-        return _msvcr_lookup[msc_ver]
-    except KeyError:
-        raise ValueError(f"Unknown MS Compiler version {msc_ver} ")
+    """No longer needed, but kept for backward compatibility."""
+    return []
 
 
 _runtime_library_dirs_msg = (
@@ -109,8 +75,6 @@ def __init__(self, verbose=False, dry_run=False, force=False):
             linker_so=(f'{self.linker_dll} -mcygwin {shared_option}'),
         )
 
-        # Include the appropriate MSVC runtime library if Python was built
-        # with MSVC 7.0 or later.
         self.dll_libraries = get_msvcr()
 
     @property
diff --git a/distutils/tests/test_cygwinccompiler.py b/distutils/tests/test_cygwinccompiler.py
index 2e1640b757..677bc0ac99 100644
--- a/distutils/tests/test_cygwinccompiler.py
+++ b/distutils/tests/test_cygwinccompiler.py
@@ -71,50 +71,8 @@ def test_check_config_h(self):
         assert check_config_h()[0] == CONFIG_H_OK
 
     def test_get_msvcr(self):
-        # []
-        sys.version = (
-            '2.6.1 (r261:67515, Dec  6 2008, 16:42:21) '
-            '\n[GCC 4.0.1 (Apple Computer, Inc. build 5370)]'
-        )
         assert get_msvcr() == []
 
-        # MSVC 7.0
-        sys.version = (
-            '2.5.1 (r251:54863, Apr 18 2007, 08:51:08) [MSC v.1300 32 bits (Intel)]'
-        )
-        assert get_msvcr() == ['msvcr70']
-
-        # MSVC 7.1
-        sys.version = (
-            '2.5.1 (r251:54863, Apr 18 2007, 08:51:08) [MSC v.1310 32 bits (Intel)]'
-        )
-        assert get_msvcr() == ['msvcr71']
-
-        # VS2005 / MSVC 8.0
-        sys.version = (
-            '2.5.1 (r251:54863, Apr 18 2007, 08:51:08) [MSC v.1400 32 bits (Intel)]'
-        )
-        assert get_msvcr() == ['msvcr80']
-
-        # VS2008 / MSVC 9.0
-        sys.version = (
-            '2.5.1 (r251:54863, Apr 18 2007, 08:51:08) [MSC v.1500 32 bits (Intel)]'
-        )
-        assert get_msvcr() == ['msvcr90']
-
-        sys.version = (
-            '3.10.0 (tags/v3.10.0:b494f59, Oct  4 2021, 18:46:30) '
-            '[MSC v.1929 32 bit (Intel)]'
-        )
-        assert get_msvcr() == ['vcruntime140']
-
-        # unknown
-        sys.version = (
-            '2.5.1 (r251:54863, Apr 18 2007, 08:51:08) [MSC v.2000 32 bits (Intel)]'
-        )
-        with pytest.raises(ValueError):
-            get_msvcr()
-
     @pytest.mark.skipif('sys.platform != "cygwin"')
     def test_dll_libraries_not_none(self):
         from distutils.cygwinccompiler import CygwinCCompiler

From f577bcb5e15dde5411128939ce6336e1a20b9736 Mon Sep 17 00:00:00 2001
From: Christoph Reiter 
Date: Sun, 21 Jul 2024 13:22:12 +0200
Subject: [PATCH 0957/1761] venv_install_options: add missing clear_argv
 fixture

Otherwise the test fails if arguments are passed to pytest, for example --no-cov:

FAILED distutils/tests/test_dist.py::TestDistributionBehavior::test_venv_install_options -
      distutils.errors.DistutilsArgError: option --no-cov not recognized
---
 distutils/tests/test_dist.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/distutils/tests/test_dist.py b/distutils/tests/test_dist.py
index 5bd206fec1..4d78a19803 100644
--- a/distutils/tests/test_dist.py
+++ b/distutils/tests/test_dist.py
@@ -88,7 +88,7 @@ def test_command_packages_cmdline(self, clear_argv):
         'distutils' not in Distribution.parse_config_files.__module__,
         reason='Cannot test when virtualenv has monkey-patched Distribution',
     )
-    def test_venv_install_options(self, tmp_path):
+    def test_venv_install_options(self, tmp_path, clear_argv):
         sys.argv.append("install")
         file = str(tmp_path / 'file')
 

From 1f999b9c92f6850de5b9ba6be249324fed403103 Mon Sep 17 00:00:00 2001
From: Christoph Reiter 
Date: Sun, 21 Jul 2024 21:08:19 +0200
Subject: [PATCH 0958/1761] Remove unused RangeMap

Its last use in cygwinccompiler was just removed.
---
 distutils/_collections.py | 145 --------------------------------------
 1 file changed, 145 deletions(-)

diff --git a/distutils/_collections.py b/distutils/_collections.py
index d11a83467c..863030b3cf 100644
--- a/distutils/_collections.py
+++ b/distutils/_collections.py
@@ -1,11 +1,7 @@
 from __future__ import annotations
 
 import collections
-import functools
 import itertools
-import operator
-from collections.abc import Mapping
-from typing import Any
 
 
 # from jaraco.collections 3.5.1
@@ -60,144 +56,3 @@ def __contains__(self, other):
 
     def __len__(self):
         return len(list(iter(self)))
-
-
-# from jaraco.collections 5.0.1
-class RangeMap(dict):
-    """
-    A dictionary-like object that uses the keys as bounds for a range.
-    Inclusion of the value for that range is determined by the
-    key_match_comparator, which defaults to less-than-or-equal.
-    A value is returned for a key if it is the first key that matches in
-    the sorted list of keys.
-
-    One may supply keyword parameters to be passed to the sort function used
-    to sort keys (i.e. key, reverse) as sort_params.
-
-    Create a map that maps 1-3 -> 'a', 4-6 -> 'b'
-
-    >>> r = RangeMap({3: 'a', 6: 'b'})  # boy, that was easy
-    >>> r[1], r[2], r[3], r[4], r[5], r[6]
-    ('a', 'a', 'a', 'b', 'b', 'b')
-
-    Even float values should work so long as the comparison operator
-    supports it.
-
-    >>> r[4.5]
-    'b'
-
-    Notice that the way rangemap is defined, it must be open-ended
-    on one side.
-
-    >>> r[0]
-    'a'
-    >>> r[-1]
-    'a'
-
-    One can close the open-end of the RangeMap by using undefined_value
-
-    >>> r = RangeMap({0: RangeMap.undefined_value, 3: 'a', 6: 'b'})
-    >>> r[0]
-    Traceback (most recent call last):
-    ...
-    KeyError: 0
-
-    One can get the first or last elements in the range by using RangeMap.Item
-
-    >>> last_item = RangeMap.Item(-1)
-    >>> r[last_item]
-    'b'
-
-    .last_item is a shortcut for Item(-1)
-
-    >>> r[RangeMap.last_item]
-    'b'
-
-    Sometimes it's useful to find the bounds for a RangeMap
-
-    >>> r.bounds()
-    (0, 6)
-
-    RangeMap supports .get(key, default)
-
-    >>> r.get(0, 'not found')
-    'not found'
-
-    >>> r.get(7, 'not found')
-    'not found'
-
-    One often wishes to define the ranges by their left-most values,
-    which requires use of sort params and a key_match_comparator.
-
-    >>> r = RangeMap({1: 'a', 4: 'b'},
-    ...     sort_params=dict(reverse=True),
-    ...     key_match_comparator=operator.ge)
-    >>> r[1], r[2], r[3], r[4], r[5], r[6]
-    ('a', 'a', 'a', 'b', 'b', 'b')
-
-    That wasn't nearly as easy as before, so an alternate constructor
-    is provided:
-
-    >>> r = RangeMap.left({1: 'a', 4: 'b', 7: RangeMap.undefined_value})
-    >>> r[1], r[2], r[3], r[4], r[5], r[6]
-    ('a', 'a', 'a', 'b', 'b', 'b')
-
-    """
-
-    def __init__(
-        self,
-        source,
-        sort_params: Mapping[str, Any] = {},
-        key_match_comparator=operator.le,
-    ):
-        dict.__init__(self, source)
-        self.sort_params = sort_params
-        self.match = key_match_comparator
-
-    @classmethod
-    def left(cls, source):
-        return cls(
-            source, sort_params=dict(reverse=True), key_match_comparator=operator.ge
-        )
-
-    def __getitem__(self, item):
-        sorted_keys = sorted(self.keys(), **self.sort_params)
-        if isinstance(item, RangeMap.Item):
-            result = self.__getitem__(sorted_keys[item])
-        else:
-            key = self._find_first_match_(sorted_keys, item)
-            result = dict.__getitem__(self, key)
-            if result is RangeMap.undefined_value:
-                raise KeyError(key)
-        return result
-
-    def get(self, key, default=None):
-        """
-        Return the value for key if key is in the dictionary, else default.
-        If default is not given, it defaults to None, so that this method
-        never raises a KeyError.
-        """
-        try:
-            return self[key]
-        except KeyError:
-            return default
-
-    def _find_first_match_(self, keys, item):
-        is_match = functools.partial(self.match, item)
-        matches = list(filter(is_match, keys))
-        if matches:
-            return matches[0]
-        raise KeyError(item)
-
-    def bounds(self):
-        sorted_keys = sorted(self.keys(), **self.sort_params)
-        return (sorted_keys[RangeMap.first_item], sorted_keys[RangeMap.last_item])
-
-    # some special values for the RangeMap
-    undefined_value = type('RangeValueUndefined', (), {})()
-
-    class Item(int):
-        "RangeMap Item"
-
-    first_item = Item(0)
-    last_item = Item(-1)

From 32e5fea1c5b5e407292494908dfca5d6195e426e Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 2 Aug 2024 11:32:01 -0400
Subject: [PATCH 0959/1761] Rely on monkeysession to monkeypatch.

---
 conftest.py | 16 ++++++----------
 1 file changed, 6 insertions(+), 10 deletions(-)

diff --git a/conftest.py b/conftest.py
index fd6cb6d61b..352366cb8f 100644
--- a/conftest.py
+++ b/conftest.py
@@ -165,7 +165,7 @@ def disable_macos_customization(monkeypatch):
 
 
 @pytest.fixture(autouse=True, scope="session")
-def monkey_patch_get_default_compiler():
+def monkey_patch_get_default_compiler(monkeysession):
     """
     Monkey patch distutils get_default_compiler to allow overriding the
     default compiler. Mainly to test mingw32 with a MSVC Python.
@@ -174,14 +174,10 @@ def monkey_patch_get_default_compiler():
 
     default_compiler = os.environ.get("DISTUTILS_TEST_DEFAULT_COMPILER")
 
-    if default_compiler is not None:
+    if default_compiler is None:
+        return
 
-        def patched_get_default_compiler(*args, **kwargs):
-            return default_compiler
+    def patched_getter(*args, **kwargs):
+        return default_compiler
 
-        original = ccompiler.get_default_compiler
-        ccompiler.get_default_compiler = patched_get_default_compiler
-        yield
-        ccompiler.get_default_compiler = original
-    else:
-        yield
+    monkeysession.setattr(ccompiler, 'get_default_compiler', patched_getter)

From b7ee725f3a5ba336cfca89f55d8a3b7d3bf7d27b Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 2 Aug 2024 11:41:51 -0400
Subject: [PATCH 0960/1761] Apply isort. Ref pypa/distutils#240

---
 distutils/ccompiler.py                 | 2 +-
 distutils/command/install_data.py      | 1 -
 distutils/spawn.py                     | 1 -
 distutils/tests/test_archive_util.py   | 2 +-
 distutils/tests/test_build.py          | 3 +--
 distutils/tests/test_build_ext.py      | 2 +-
 distutils/tests/test_extension.py      | 1 -
 distutils/tests/test_install_data.py   | 5 ++---
 distutils/tests/test_mingwccompiler.py | 8 ++++----
 distutils/tests/test_spawn.py          | 2 +-
 distutils/tests/test_sysconfig.py      | 2 +-
 11 files changed, 12 insertions(+), 17 deletions(-)

diff --git a/distutils/ccompiler.py b/distutils/ccompiler.py
index 42b0812454..bc4743bcbf 100644
--- a/distutils/ccompiler.py
+++ b/distutils/ccompiler.py
@@ -22,7 +22,7 @@
 )
 from .file_util import move_file
 from .spawn import spawn
-from .util import execute, split_quoted, is_mingw
+from .util import execute, is_mingw, split_quoted
 
 
 class CCompiler:
diff --git a/distutils/command/install_data.py b/distutils/command/install_data.py
index bd2932ab0b..a90ec3b4d0 100644
--- a/distutils/command/install_data.py
+++ b/distutils/command/install_data.py
@@ -9,7 +9,6 @@
 
 import functools
 import os
-
 from typing import Iterable
 
 from ..core import Command
diff --git a/distutils/spawn.py b/distutils/spawn.py
index 50d30a2761..107b011397 100644
--- a/distutils/spawn.py
+++ b/distutils/spawn.py
@@ -12,7 +12,6 @@
 import subprocess
 import sys
 import warnings
-
 from typing import Mapping
 
 from ._log import log
diff --git a/distutils/tests/test_archive_util.py b/distutils/tests/test_archive_util.py
index abbcd36cb0..389eba16e8 100644
--- a/distutils/tests/test_archive_util.py
+++ b/distutils/tests/test_archive_util.py
@@ -18,10 +18,10 @@
 from distutils.spawn import spawn
 from distutils.tests import support
 from os.path import splitdrive
-from test.support import patch
 
 import path
 import pytest
+from test.support import patch
 
 from .compat.py38 import check_warnings
 from .unix_compat import UID_0_SUPPORT, grp, pwd, require_uid_0, require_unix_id
diff --git a/distutils/tests/test_build.py b/distutils/tests/test_build.py
index 8fb1bc1b77..d379aca0bb 100644
--- a/distutils/tests/test_build.py
+++ b/distutils/tests/test_build.py
@@ -4,8 +4,7 @@
 import sys
 from distutils.command.build import build
 from distutils.tests import support
-from sysconfig import get_config_var
-from sysconfig import get_platform
+from sysconfig import get_config_var, get_platform
 
 
 class TestBuild(support.TempdirManager):
diff --git a/distutils/tests/test_build_ext.py b/distutils/tests/test_build_ext.py
index 6c4c4ba869..8bd3cef855 100644
--- a/distutils/tests/test_build_ext.py
+++ b/distutils/tests/test_build_ext.py
@@ -25,11 +25,11 @@
     fixup_build_ext,
 )
 from io import StringIO
-from test import support
 
 import jaraco.path
 import path
 import pytest
+from test import support
 
 from .compat import py38 as import_helper
 
diff --git a/distutils/tests/test_extension.py b/distutils/tests/test_extension.py
index 733709966c..41872e04e8 100644
--- a/distutils/tests/test_extension.py
+++ b/distutils/tests/test_extension.py
@@ -3,7 +3,6 @@
 import os
 import pathlib
 import warnings
-
 from distutils.extension import Extension, read_setup_file
 
 import pytest
diff --git a/distutils/tests/test_install_data.py b/distutils/tests/test_install_data.py
index 4b15a26945..c800f86c64 100644
--- a/distutils/tests/test_install_data.py
+++ b/distutils/tests/test_install_data.py
@@ -2,12 +2,11 @@
 
 import os
 import pathlib
-
-import pytest
-
 from distutils.command.install_data import install_data
 from distutils.tests import support
 
+import pytest
+
 
 @pytest.mark.usefixtures('save_env')
 class TestInstallData(
diff --git a/distutils/tests/test_mingwccompiler.py b/distutils/tests/test_mingwccompiler.py
index 28f4762b2a..3e3ad5058c 100644
--- a/distutils/tests/test_mingwccompiler.py
+++ b/distutils/tests/test_mingwccompiler.py
@@ -1,8 +1,8 @@
-import pytest
-
-from distutils.util import split_quoted, is_mingw
-from distutils.errors import DistutilsPlatformError, CCompilerError
 from distutils import sysconfig
+from distutils.errors import CCompilerError, DistutilsPlatformError
+from distutils.util import is_mingw, split_quoted
+
+import pytest
 
 
 class TestMingw32CCompiler:
diff --git a/distutils/tests/test_spawn.py b/distutils/tests/test_spawn.py
index 2576bdd53d..fd7b669cbf 100644
--- a/distutils/tests/test_spawn.py
+++ b/distutils/tests/test_spawn.py
@@ -7,10 +7,10 @@
 from distutils.errors import DistutilsExecError
 from distutils.spawn import find_executable, spawn
 from distutils.tests import support
-from test.support import unix_shell
 
 import path
 import pytest
+from test.support import unix_shell
 
 from .compat import py38 as os_helper
 
diff --git a/distutils/tests/test_sysconfig.py b/distutils/tests/test_sysconfig.py
index edc17992ef..49274a36ae 100644
--- a/distutils/tests/test_sysconfig.py
+++ b/distutils/tests/test_sysconfig.py
@@ -9,12 +9,12 @@
 from distutils import sysconfig
 from distutils.ccompiler import new_compiler  # noqa: F401
 from distutils.unixccompiler import UnixCCompiler
-from test.support import swap_item
 
 import jaraco.envs
 import path
 import pytest
 from jaraco.text import trim
+from test.support import swap_item
 
 
 def _gen_makefile(root, contents):

From d4ad24b09ed3fdeaef2e7f493e5446898fe5c504 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 2 Aug 2024 11:51:46 -0400
Subject: [PATCH 0961/1761] Add news fragment.

---
 newsfragments/4538.feature.rst | 1 +
 1 file changed, 1 insertion(+)
 create mode 100644 newsfragments/4538.feature.rst

diff --git a/newsfragments/4538.feature.rst b/newsfragments/4538.feature.rst
new file mode 100644
index 0000000000..9c36ad5209
--- /dev/null
+++ b/newsfragments/4538.feature.rst
@@ -0,0 +1 @@
+Merged with distutils@d7ffdb9c7 including: Support for Pathlike objects in data files and extensions (pypa/distutils#272, pypa/distutils#237), native support for C++ compilers (pypa/distuils#228) and removed unused get_msvcr() (pypa/distutils#274).

From 780a782ccec6724fcec3f36de120fac07cb1fbba Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 2 Aug 2024 17:11:51 -0400
Subject: [PATCH 0962/1761] Correct reference in news fragment. Ref #4539

---
 newsfragments/4538.feature.rst | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/newsfragments/4538.feature.rst b/newsfragments/4538.feature.rst
index 9c36ad5209..d5e37e74f4 100644
--- a/newsfragments/4538.feature.rst
+++ b/newsfragments/4538.feature.rst
@@ -1 +1 @@
-Merged with distutils@d7ffdb9c7 including: Support for Pathlike objects in data files and extensions (pypa/distutils#272, pypa/distutils#237), native support for C++ compilers (pypa/distuils#228) and removed unused get_msvcr() (pypa/distutils#274).
+Merged with pypa/distutils@b7ee725f3 including: Support for Pathlike objects in data files and extensions (pypa/distutils#272, pypa/distutils#237), native support for C++ compilers (pypa/distuils#228) and removed unused get_msvcr() (pypa/distutils#274).

From 76942cfe9ab14b6d30eac50b9b3dff8cc9241b76 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 2 Aug 2024 17:32:45 -0400
Subject: [PATCH 0963/1761] =?UTF-8?q?Bump=20version:=2072.1.0=20=E2=86=92?=
 =?UTF-8?q?=2072.2.0?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg               | 2 +-
 NEWS.rst                       | 9 +++++++++
 newsfragments/4538.feature.rst | 1 -
 pyproject.toml                 | 2 +-
 4 files changed, 11 insertions(+), 3 deletions(-)
 delete mode 100644 newsfragments/4538.feature.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 2b055f2389..7215487880 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 72.1.0
+current_version = 72.2.0
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index e89dfcba6d..a2d5eeba36 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,12 @@
+v72.2.0
+=======
+
+Features
+--------
+
+- Merged with pypa/distutils@b7ee725f3 including: Support for Pathlike objects in data files and extensions (pypa/distutils#272, pypa/distutils#237), native support for C++ compilers (pypa/distuils#228) and removed unused get_msvcr() (pypa/distutils#274). (#4538)
+
+
 v72.1.0
 =======
 
diff --git a/newsfragments/4538.feature.rst b/newsfragments/4538.feature.rst
deleted file mode 100644
index d5e37e74f4..0000000000
--- a/newsfragments/4538.feature.rst
+++ /dev/null
@@ -1 +0,0 @@
-Merged with pypa/distutils@b7ee725f3 including: Support for Pathlike objects in data files and extensions (pypa/distutils#272, pypa/distutils#237), native support for C++ compilers (pypa/distuils#228) and removed unused get_msvcr() (pypa/distutils#274).
diff --git a/pyproject.toml b/pyproject.toml
index 0c7d5e752a..95efd1e9b3 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "72.1.0"
+version = "72.2.0"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From 40c56e200263566a9f49ee7e7c6c17c7627592ec Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Sat, 3 Aug 2024 20:38:56 +0200
Subject: [PATCH 0964/1761] Let _show_help() use commands instead of
 self.commands

Depending on the context, _show_help() is called with a different
commands argument. I believe the intent was to adapt the help message
to the context. However, it would operate on self.commands instead
of the commands argument, which would break this purpose.
---
 distutils/dist.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/distutils/dist.py b/distutils/dist.py
index 0a57d60be9..115302b3e7 100644
--- a/distutils/dist.py
+++ b/distutils/dist.py
@@ -658,7 +658,7 @@ def _show_help(
             )
             print()
 
-        for command in self.commands:
+        for command in commands:
             if isinstance(command, type) and issubclass(command, Command):
                 klass = command
             else:

From bb5f936f4b73d4c10f3a0a87a63f54997c4c5740 Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Sat, 29 Jun 2024 12:37:17 +0200
Subject: [PATCH 0965/1761] Apply ruff/tryceratops rule TRY300

TRY300 Consider moving this statement to an `else` block
---
 distutils/cygwinccompiler.py | 5 +++--
 distutils/file_util.py       | 3 ++-
 2 files changed, 5 insertions(+), 3 deletions(-)

diff --git a/distutils/cygwinccompiler.py b/distutils/cygwinccompiler.py
index ce412e8329..88b9bc6588 100644
--- a/distutils/cygwinccompiler.py
+++ b/distutils/cygwinccompiler.py
@@ -309,6 +309,9 @@ def check_config_h():
     fn = sysconfig.get_config_h_filename()
     try:
         config_h = pathlib.Path(fn).read_text(encoding='utf-8')
+    except OSError as exc:
+        return (CONFIG_H_UNCERTAIN, f"couldn't read '{fn}': {exc.strerror}")
+    else:
         substring = '__GNUC__'
         if substring in config_h:
             code = CONFIG_H_OK
@@ -317,8 +320,6 @@ def check_config_h():
             code = CONFIG_H_NOTOK
             mention_inflected = 'does not mention'
         return code, f"{fn!r} {mention_inflected} {substring!r}"
-    except OSError as exc:
-        return (CONFIG_H_UNCERTAIN, f"couldn't read '{fn}': {exc.strerror}")
 
 
 def is_cygwincc(cc):
diff --git a/distutils/file_util.py b/distutils/file_util.py
index b19a5dcfa4..85ee4dafcb 100644
--- a/distutils/file_util.py
+++ b/distutils/file_util.py
@@ -140,12 +140,13 @@ def copy_file(  # noqa: C901
         if not (os.path.exists(dst) and os.path.samefile(src, dst)):
             try:
                 os.link(src, dst)
-                return (dst, 1)
             except OSError:
                 # If hard linking fails, fall back on copying file
                 # (some special filesystems don't support hard linking
                 #  even under Unix, see issue #8876).
                 pass
+            else:
+                return (dst, 1)
     elif link == 'sym':
         if not (os.path.exists(dst) and os.path.samefile(src, dst)):
             os.symlink(src, dst)

From 51dc58b0f6cb5d16d610caf308cc82a1e05199c3 Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Sat, 29 Jun 2024 12:51:47 +0200
Subject: [PATCH 0966/1761] Apply ruff/tryceratops rule TRY301

TRY301 Abstract `raise` to an inner function
---
 distutils/command/install_lib.py | 6 +++---
 distutils/msvc9compiler.py       | 2 +-
 2 files changed, 4 insertions(+), 4 deletions(-)

diff --git a/distutils/command/install_lib.py b/distutils/command/install_lib.py
index 01579d46b4..4c1230a286 100644
--- a/distutils/command/install_lib.py
+++ b/distutils/command/install_lib.py
@@ -81,9 +81,9 @@ def finalize_options(self):
         if not isinstance(self.optimize, int):
             try:
                 self.optimize = int(self.optimize)
-                if self.optimize not in (0, 1, 2):
-                    raise AssertionError
-            except (ValueError, AssertionError):
+            except ValueError:
+                pass
+            if self.optimize not in (0, 1, 2):
                 raise DistutilsOptionError("optimize must be 0, 1, or 2")
 
     def run(self):
diff --git a/distutils/msvc9compiler.py b/distutils/msvc9compiler.py
index 4c70848730..b41f54f2b3 100644
--- a/distutils/msvc9compiler.py
+++ b/distutils/msvc9compiler.py
@@ -154,7 +154,7 @@ def load_macros(self, version):
             if version >= 8.0:
                 self.set_macro("FrameworkSDKDir", NET_BASE, "sdkinstallrootv2.0")
             else:
-                raise KeyError("sdkinstallrootv2.0")
+                raise KeyError("sdkinstallrootv2.0")  # noqa: TRY301
         except KeyError:
             raise DistutilsPlatformError(
                 """Python was built with Visual Studio 2008;

From e1664778ccadd96d71ce0b38542202ea6b209d2a Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Sat, 29 Jun 2024 12:58:03 +0200
Subject: [PATCH 0967/1761] Enforce ruff/tryceratops rules (TRY)

---
 distutils/extension.py | 2 +-
 ruff.toml              | 2 ++
 2 files changed, 3 insertions(+), 1 deletion(-)

diff --git a/distutils/extension.py b/distutils/extension.py
index b302082f7a..33159079c1 100644
--- a/distutils/extension.py
+++ b/distutils/extension.py
@@ -105,7 +105,7 @@ def __init__(
         **kw,  # To catch unknown keywords
     ):
         if not isinstance(name, str):
-            raise AssertionError("'name' must be a string")
+            raise AssertionError("'name' must be a string")  # noqa: TRY004
         if not (
             isinstance(sources, list)
             and all(isinstance(v, (str, os.PathLike)) for v in sources)
diff --git a/ruff.toml b/ruff.toml
index b7850b6ddd..84accd2878 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -8,9 +8,11 @@ extend-select = [
 	"ISC",
 	"RUF010",
 	"RUF100",
+	"TRY",
 	"UP",
 ]
 ignore = [
+	"TRY003",
 	# https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules
 	"W191",
 	"E111",

From ffa18a49d7b2596a470aef14f92a7ea6e6332e71 Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Sat, 29 Jun 2024 11:13:48 +0200
Subject: [PATCH 0968/1761] Enforce ruff/flake8-2020 rule (YTT301)

YTT301 `sys.version[0]` referenced (python10), use `sys.version_info`

The Python documentation discourages the use of `sys.version`:
	Do not extract version information out of it, rather,
	use `version_info` and [...]
---
 distutils/sysconfig.py | 2 +-
 ruff.toml              | 1 +
 2 files changed, 2 insertions(+), 1 deletion(-)

diff --git a/distutils/sysconfig.py b/distutils/sysconfig.py
index fbdd5d73ae..28a7c571dc 100644
--- a/distutils/sysconfig.py
+++ b/distutils/sysconfig.py
@@ -236,7 +236,7 @@ def get_python_lib(plat_specific=False, standard_lib=False, prefix=None):
         if prefix is None:
             prefix = PREFIX
         if standard_lib:
-            return os.path.join(prefix, "lib-python", sys.version[0])
+            return os.path.join(prefix, "lib-python", sys.version_info.major)
         return os.path.join(prefix, 'site-packages')
 
     early_prefix = prefix
diff --git a/ruff.toml b/ruff.toml
index b7850b6ddd..b23dd90333 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -9,6 +9,7 @@ extend-select = [
 	"RUF010",
 	"RUF100",
 	"UP",
+	"YTT",
 ]
 ignore = [
 	# https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules

From fec08a87e69f296e85f89aad82ca49f45833adbd Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Sun, 4 Aug 2024 09:15:49 +0200
Subject: [PATCH 0969/1761] Fix example code in documentation

---
 docs/userguide/package_discovery.rst | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/docs/userguide/package_discovery.rst b/docs/userguide/package_discovery.rst
index 6a7bdbf8da..c33877e1f6 100644
--- a/docs/userguide/package_discovery.rst
+++ b/docs/userguide/package_discovery.rst
@@ -88,8 +88,9 @@ exactly to the directory structure, you also need to configure ``package_dir``:
             package_dir = {
                 "mypkg": "lib",  # mypkg.module corresponds to lib/module.py
                 "mypkg.subpkg1": "lib1",  # mypkg.subpkg1.module1 corresponds to lib1/module1.py
-                "mypkg.subpkg2": "lib2"   # mypkg.subpkg2.module2 corresponds to lib2/module2.py
+                "mypkg.subpkg2": "lib2",  # mypkg.subpkg2.module2 corresponds to lib2/module2.py
                 # ...
+            }
         )
 
 .. tab:: pyproject.toml

From ab13bf2c34ce2e1bce60cc8a752b9a2065a83df5 Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Sat, 3 Aug 2024 21:36:13 +0200
Subject: [PATCH 0970/1761] Enforce flake8-bugbear (B) and isort (I) ruff rules

Ignore B028 and B904.
---
 ruff.toml | 6 ++++++
 1 file changed, 6 insertions(+)

diff --git a/ruff.toml b/ruff.toml
index b7850b6ddd..41c9459bdb 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -5,6 +5,8 @@ extend-select = [
 	"W",
 
 	# local
+	"B",
+	"I",
 	"ISC",
 	"RUF010",
 	"RUF100",
@@ -26,6 +28,10 @@ ignore = [
 	"COM819",
 	"ISC001",
 	"ISC002",
+
+	# local
+	"B028",
+	"B904",
 ]
 
 [format]

From eb87dd6caef32791d1ee14c843e3abeeaf4b3a94 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 8 Aug 2024 15:13:30 +0100
Subject: [PATCH 0971/1761] Fix requirement normalisation inconsistency on
 macOS

---
 setuptools/tests/test_build_meta.py | 7 ++++---
 1 file changed, 4 insertions(+), 3 deletions(-)

diff --git a/setuptools/tests/test_build_meta.py b/setuptools/tests/test_build_meta.py
index ecb1dcfd87..ab8255e80d 100644
--- a/setuptools/tests/test_build_meta.py
+++ b/setuptools/tests/test_build_meta.py
@@ -436,16 +436,17 @@ def test_build_with_pyproject_config(self, tmpdir, setup_script):
         }
         assert license == "---- placeholder MIT license ----"
 
-        metadata = metadata.replace("(", "").replace(")", "")
+        metadata = metadata.replace("(", "").replace(")", "").replace("'", '"')
         # ^-- compatibility hack for pypa/wheel#552
+        #     + normalise all quotes to `"` to avoid inconsistency in #4547
 
         for line in (
             "Summary: This is a Python package",
             "License: MIT",
             "Classifier: Intended Audience :: Developers",
             "Requires-Dist: appdirs",
-            "Requires-Dist: tomli >=1 ; extra == 'all'",
-            "Requires-Dist: importlib ; python_version == \"2.6\" and extra == 'all'",
+            "Requires-Dist: tomli >=1 ; extra == \"all\"",
+            "Requires-Dist: importlib ; python_version == \"2.6\" and extra == \"all\"",
         ):
             assert line in metadata
 

From 7735e68e62caacc1476bb41a4d8904777f09a821 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 8 Aug 2024 15:21:05 +0100
Subject: [PATCH 0972/1761] Debug failures of CI in macOS

---
 setuptools/tests/test_build_meta.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setuptools/tests/test_build_meta.py b/setuptools/tests/test_build_meta.py
index ab8255e80d..8d6b0ffc23 100644
--- a/setuptools/tests/test_build_meta.py
+++ b/setuptools/tests/test_build_meta.py
@@ -448,7 +448,7 @@ def test_build_with_pyproject_config(self, tmpdir, setup_script):
             "Requires-Dist: tomli >=1 ; extra == \"all\"",
             "Requires-Dist: importlib ; python_version == \"2.6\" and extra == \"all\"",
         ):
-            assert line in metadata
+            assert line in metadata, (line, metadata)
 
         assert metadata.strip().endswith("This is a ``README``")
         assert epoints.strip() == "[console_scripts]\nfoo = foo.cli:main"

From 19753d93c37d0a8ce3843e4e7d7627a0bdf88439 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 8 Aug 2024 15:41:41 +0100
Subject: [PATCH 0973/1761] Normalise requirement string in test using
 packaging

---
 setuptools/tests/test_build_meta.py | 6 ++++--
 1 file changed, 4 insertions(+), 2 deletions(-)

diff --git a/setuptools/tests/test_build_meta.py b/setuptools/tests/test_build_meta.py
index 8d6b0ffc23..63476d5155 100644
--- a/setuptools/tests/test_build_meta.py
+++ b/setuptools/tests/test_build_meta.py
@@ -12,6 +12,7 @@
 
 import pytest
 from jaraco import path
+from packaging.requirements import Requirement
 
 from .textwrap import DALS
 
@@ -445,8 +446,9 @@ def test_build_with_pyproject_config(self, tmpdir, setup_script):
             "License: MIT",
             "Classifier: Intended Audience :: Developers",
             "Requires-Dist: appdirs",
-            "Requires-Dist: tomli >=1 ; extra == \"all\"",
-            "Requires-Dist: importlib ; python_version == \"2.6\" and extra == \"all\"",
+            "Requires-Dist: " + str(Requirement('tomli>=1 ; extra == "all"')),
+            "Requires-Dist: "
+            + str(Requirement('importlib; python_version=="2.6" and extra =="all"')),
         ):
             assert line in metadata, (line, metadata)
 

From bd04b1592cec7c793e5e461e0bf192efa1094f01 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 8 Aug 2024 15:56:39 +0100
Subject: [PATCH 0974/1761] Bump test dependency on wheel for consistent tests

---
 pyproject.toml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/pyproject.toml b/pyproject.toml
index 95efd1e9b3..1ce17e63ab 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -44,7 +44,7 @@ test = [
 
 	# local
 	"virtualenv>=13.0.0",
-	"wheel",
+	"wheel>=0.44.0",  # Consistent requirement normalisation in METADATA (see #4547)
 	"pip>=19.1", # For proper file:// URLs support.
 	"packaging>=23.2",
 	"jaraco.envs>=2.2",

From c3e7be1b828fab1432b255d523b4db780d46132b Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 8 Aug 2024 16:00:07 +0100
Subject: [PATCH 0975/1761] Remove unused hack in test

---
 setuptools/tests/test_build_meta.py | 4 ----
 1 file changed, 4 deletions(-)

diff --git a/setuptools/tests/test_build_meta.py b/setuptools/tests/test_build_meta.py
index 63476d5155..16f75f8763 100644
--- a/setuptools/tests/test_build_meta.py
+++ b/setuptools/tests/test_build_meta.py
@@ -437,10 +437,6 @@ def test_build_with_pyproject_config(self, tmpdir, setup_script):
         }
         assert license == "---- placeholder MIT license ----"
 
-        metadata = metadata.replace("(", "").replace(")", "").replace("'", '"')
-        # ^-- compatibility hack for pypa/wheel#552
-        #     + normalise all quotes to `"` to avoid inconsistency in #4547
-
         for line in (
             "Summary: This is a Python package",
             "License: MIT",

From 9da51c4ad5abf7b6248a84faaf6c0833ff506519 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 8 Aug 2024 16:23:01 +0100
Subject: [PATCH 0976/1761] Add newsfragment

---
 newsfragments/4546.misc.rst | 2 ++
 1 file changed, 2 insertions(+)
 create mode 100644 newsfragments/4546.misc.rst

diff --git a/newsfragments/4546.misc.rst b/newsfragments/4546.misc.rst
new file mode 100644
index 0000000000..f056a2b379
--- /dev/null
+++ b/newsfragments/4546.misc.rst
@@ -0,0 +1,2 @@
+Added lower bound to test dependency on ``wheel`` (0.44.0) to avoid
+small inconsistencies in ``Requires-Dist`` normalisation for ``METADATA``.

From d13f9c570e0b382fe8eb69e95ea11de83a898070 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Thu, 8 Aug 2024 11:56:45 -0400
Subject: [PATCH 0977/1761] Rename `_InstallerTypeT` and mark type aliases with
 `TypeAlias` (#4436)

* Rename _InstallerTypeT & Mark type aliases with TypeAlias to help static checkers
---
 pkg_resources/__init__.py                 | 46 +++++++++++------------
 setuptools/_path.py                       | 12 ++++--
 setuptools/_reqs.py                       | 10 +++--
 setuptools/build_meta.py                  |  7 +++-
 setuptools/compat/py311.py                |  3 +-
 setuptools/config/_apply_pyprojecttoml.py |  7 ++--
 6 files changed, 50 insertions(+), 35 deletions(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index 0b7ac13b55..a87fdf8570 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -106,7 +106,7 @@
 
 if TYPE_CHECKING:
     from _typeshed import BytesPath, StrPath, StrOrBytesPath
-    from typing_extensions import Self
+    from typing_extensions import Self, TypeAlias
 
 warnings.warn(
     "pkg_resources is deprecated as an API. "
@@ -118,20 +118,20 @@
 _T = TypeVar("_T")
 _DistributionT = TypeVar("_DistributionT", bound="Distribution")
 # Type aliases
-_NestedStr = Union[str, Iterable[Union[str, Iterable["_NestedStr"]]]]
-_InstallerTypeT = Callable[["Requirement"], "_DistributionT"]
-_InstallerType = Callable[["Requirement"], Union["Distribution", None]]
-_PkgReqType = Union[str, "Requirement"]
-_EPDistType = Union["Distribution", _PkgReqType]
-_MetadataType = Union["IResourceProvider", None]
-_ResolvedEntryPoint = Any  # Can be any attribute in the module
-_ResourceStream = Any  # TODO / Incomplete: A readable file-like object
+_NestedStr: TypeAlias = Union[str, Iterable[Union[str, Iterable["_NestedStr"]]]]
+_StrictInstallerType: TypeAlias = Callable[["Requirement"], "_DistributionT"]
+_InstallerType: TypeAlias = Callable[["Requirement"], Union["Distribution", None]]
+_PkgReqType: TypeAlias = Union[str, "Requirement"]
+_EPDistType: TypeAlias = Union["Distribution", _PkgReqType]
+_MetadataType: TypeAlias = Union["IResourceProvider", None]
+_ResolvedEntryPoint: TypeAlias = Any  # Can be any attribute in the module
+_ResourceStream: TypeAlias = Any  # TODO / Incomplete: A readable file-like object
 # Any object works, but let's indicate we expect something like a module (optionally has __loader__ or __file__)
-_ModuleLike = Union[object, types.ModuleType]
+_ModuleLike: TypeAlias = Union[object, types.ModuleType]
 # Any: Should be _ModuleLike but we end up with issues where _ModuleLike doesn't have _ZipLoaderModule's __loader__
-_ProviderFactoryType = Callable[[Any], "IResourceProvider"]
-_DistFinderType = Callable[[_T, str, bool], Iterable["Distribution"]]
-_NSHandlerType = Callable[[_T, str, str, types.ModuleType], Union[str, None]]
+_ProviderFactoryType: TypeAlias = Callable[[Any], "IResourceProvider"]
+_DistFinderType: TypeAlias = Callable[[_T, str, bool], Iterable["Distribution"]]
+_NSHandlerType: TypeAlias = Callable[[_T, str, str, types.ModuleType], Union[str, None]]
 _AdapterT = TypeVar(
     "_AdapterT", _DistFinderType[Any], _ProviderFactoryType, _NSHandlerType[Any]
 )
@@ -814,7 +814,7 @@ def resolve(
         self,
         requirements: Iterable[Requirement],
         env: Environment | None,
-        installer: _InstallerTypeT[_DistributionT],
+        installer: _StrictInstallerType[_DistributionT],
         replace_conflicting: bool = False,
         extras: tuple[str, ...] | None = None,
     ) -> list[_DistributionT]: ...
@@ -824,7 +824,7 @@ def resolve(
         requirements: Iterable[Requirement],
         env: Environment | None = None,
         *,
-        installer: _InstallerTypeT[_DistributionT],
+        installer: _StrictInstallerType[_DistributionT],
         replace_conflicting: bool = False,
         extras: tuple[str, ...] | None = None,
     ) -> list[_DistributionT]: ...
@@ -841,7 +841,7 @@ def resolve(
         self,
         requirements: Iterable[Requirement],
         env: Environment | None = None,
-        installer: _InstallerType | None | _InstallerTypeT[_DistributionT] = None,
+        installer: _InstallerType | None | _StrictInstallerType[_DistributionT] = None,
         replace_conflicting: bool = False,
         extras: tuple[str, ...] | None = None,
     ) -> list[Distribution] | list[_DistributionT]:
@@ -947,7 +947,7 @@ def find_plugins(
         self,
         plugin_env: Environment,
         full_env: Environment | None,
-        installer: _InstallerTypeT[_DistributionT],
+        installer: _StrictInstallerType[_DistributionT],
         fallback: bool = True,
     ) -> tuple[list[_DistributionT], dict[Distribution, Exception]]: ...
     @overload
@@ -956,7 +956,7 @@ def find_plugins(
         plugin_env: Environment,
         full_env: Environment | None = None,
         *,
-        installer: _InstallerTypeT[_DistributionT],
+        installer: _StrictInstallerType[_DistributionT],
         fallback: bool = True,
     ) -> tuple[list[_DistributionT], dict[Distribution, Exception]]: ...
     @overload
@@ -971,7 +971,7 @@ def find_plugins(
         self,
         plugin_env: Environment,
         full_env: Environment | None = None,
-        installer: _InstallerType | None | _InstallerTypeT[_DistributionT] = None,
+        installer: _InstallerType | None | _StrictInstallerType[_DistributionT] = None,
         fallback: bool = True,
     ) -> tuple[
         list[Distribution] | list[_DistributionT],
@@ -1217,7 +1217,7 @@ def best_match(
         self,
         req: Requirement,
         working_set: WorkingSet,
-        installer: _InstallerTypeT[_DistributionT],
+        installer: _StrictInstallerType[_DistributionT],
         replace_conflicting: bool = False,
     ) -> _DistributionT: ...
     @overload
@@ -1232,7 +1232,7 @@ def best_match(
         self,
         req: Requirement,
         working_set: WorkingSet,
-        installer: _InstallerType | None | _InstallerTypeT[_DistributionT] = None,
+        installer: _InstallerType | None | _StrictInstallerType[_DistributionT] = None,
         replace_conflicting: bool = False,
     ) -> Distribution | None:
         """Find distribution best matching `req` and usable on `working_set`
@@ -1265,7 +1265,7 @@ def best_match(
     def obtain(
         self,
         requirement: Requirement,
-        installer: _InstallerTypeT[_DistributionT],
+        installer: _StrictInstallerType[_DistributionT],
     ) -> _DistributionT: ...
     @overload
     def obtain(
@@ -1285,7 +1285,7 @@ def obtain(
         installer: Callable[[Requirement], None]
         | _InstallerType
         | None
-        | _InstallerTypeT[_DistributionT] = None,
+        | _StrictInstallerType[_DistributionT] = None,
     ) -> Distribution | None:
         """Obtain a distribution matching `requirement` (e.g. via download)
 
diff --git a/setuptools/_path.py b/setuptools/_path.py
index d00db25464..bd9378761b 100644
--- a/setuptools/_path.py
+++ b/setuptools/_path.py
@@ -1,15 +1,21 @@
+from __future__ import annotations
+
 import contextlib
 import os
 import sys
-from typing import Union
+from typing import Union, TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing_extensions import TypeAlias
+
 
 from more_itertools import unique_everseen
 
 
 if sys.version_info >= (3, 9):
-    StrPath = Union[str, os.PathLike[str]]  #  Same as _typeshed.StrPath
+    StrPath: TypeAlias = Union[str, os.PathLike[str]]  #  Same as _typeshed.StrPath
 else:
-    StrPath = Union[str, os.PathLike]
+    StrPath: TypeAlias = Union[str, os.PathLike]
 
 
 def ensure_directory(path):
diff --git a/setuptools/_reqs.py b/setuptools/_reqs.py
index 1b64d9df79..cdea95fa3d 100644
--- a/setuptools/_reqs.py
+++ b/setuptools/_reqs.py
@@ -1,11 +1,15 @@
-from functools import lru_cache
-from typing import Callable, Iterable, Iterator, TypeVar, Union, overload
+from __future__ import annotations
 
+from functools import lru_cache
+from typing import Callable, Iterable, Iterator, TypeVar, Union, overload, TYPE_CHECKING
 import jaraco.text as text
 from packaging.requirements import Requirement
 
+if TYPE_CHECKING:
+    from typing_extensions import TypeAlias
+
 _T = TypeVar("_T")
-_StrOrIter = Union[str, Iterable[str]]
+_StrOrIter: TypeAlias = Union[str, Iterable[str]]
 
 
 parse_req: Callable[[str], Requirement] = lru_cache()(Requirement)
diff --git a/setuptools/build_meta.py b/setuptools/build_meta.py
index c52c872fd0..d8ab28be74 100644
--- a/setuptools/build_meta.py
+++ b/setuptools/build_meta.py
@@ -38,7 +38,7 @@
 import tempfile
 import warnings
 from pathlib import Path
-from typing import Dict, Iterator, List, Optional, Union, Iterable
+from typing import TYPE_CHECKING, Dict, Iterator, List, Union, Iterable
 
 import setuptools
 import distutils
@@ -48,6 +48,9 @@
 from .warnings import SetuptoolsDeprecationWarning
 from distutils.util import strtobool
 
+if TYPE_CHECKING:
+    from typing_extensions import TypeAlias
+
 
 __all__ = [
     'get_requires_for_build_sdist',
@@ -142,7 +145,7 @@ def suppress_known_deprecation():
         yield
 
 
-_ConfigSettings = Optional[Dict[str, Union[str, List[str], None]]]
+_ConfigSettings: TypeAlias = Union[Dict[str, Union[str, List[str], None]], None]
 """
 Currently the user can run::
 
diff --git a/setuptools/compat/py311.py b/setuptools/compat/py311.py
index 5069c441c4..cd5abc5407 100644
--- a/setuptools/compat/py311.py
+++ b/setuptools/compat/py311.py
@@ -6,9 +6,10 @@
 
 if TYPE_CHECKING:
     from _typeshed import StrOrBytesPath, ExcInfo
+    from typing_extensions import TypeAlias
 
 # Same as shutil._OnExcCallback from typeshed
-_OnExcCallback = Callable[[Callable[..., Any], str, BaseException], object]
+_OnExcCallback: TypeAlias = Callable[[Callable[..., Any], str, BaseException], object]
 
 
 def shutil_rmtree(
diff --git a/setuptools/config/_apply_pyprojecttoml.py b/setuptools/config/_apply_pyprojecttoml.py
index 6cc59d2b95..20f36e85aa 100644
--- a/setuptools/config/_apply_pyprojecttoml.py
+++ b/setuptools/config/_apply_pyprojecttoml.py
@@ -33,11 +33,12 @@
     from distutils.dist import _OptionsList
     from setuptools._importlib import metadata
     from setuptools.dist import Distribution
+    from typing_extensions import TypeAlias
 
 EMPTY: Mapping = MappingProxyType({})  # Immutable dict-like
-_ProjectReadmeValue = Union[str, Dict[str, str]]
-_CorrespFn = Callable[["Distribution", Any, StrPath], None]
-_Correspondence = Union[str, _CorrespFn]
+_ProjectReadmeValue: TypeAlias = Union[str, Dict[str, str]]
+_CorrespFn: TypeAlias = Callable[["Distribution", Any, StrPath], None]
+_Correspondence: TypeAlias = Union[str, _CorrespFn]
 
 _logger = logging.getLogger(__name__)
 

From 7923172aaeae3e766d9c8c21a1acc58bfc163c06 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Thu, 8 Aug 2024 13:27:55 -0400
Subject: [PATCH 0978/1761] Runtime changes for typeshed parameter annotations
 merge (#4505)

* Runtime changes for typeshed merge
* Update setuptools/command/editable_wheel.py
---
 newsfragments/4505.feature.rst       |  1 +
 setuptools/command/build_ext.py      |  3 +--
 setuptools/command/easy_install.py   | 14 ++++++++++----
 setuptools/command/editable_wheel.py |  5 +++--
 setuptools/dist.py                   |  2 +-
 setuptools/extension.py              |  5 +++--
 6 files changed, 19 insertions(+), 11 deletions(-)
 create mode 100644 newsfragments/4505.feature.rst

diff --git a/newsfragments/4505.feature.rst b/newsfragments/4505.feature.rst
new file mode 100644
index 0000000000..e032dd997e
--- /dev/null
+++ b/newsfragments/4505.feature.rst
@@ -0,0 +1 @@
+Changed the order of type checks in ``setuptools.command.easy_install.CommandSpec.from_param`` to support any `collections.abc.Iterable` of `str` param -- by :user:`Avasam`
diff --git a/setuptools/command/build_ext.py b/setuptools/command/build_ext.py
index 508704f3c0..da8d56fac2 100644
--- a/setuptools/command/build_ext.py
+++ b/setuptools/command/build_ext.py
@@ -8,7 +8,6 @@
 from typing import Iterator
 from pathlib import Path
 
-from distutils.command.build_ext import build_ext as _du_build_ext
 from distutils.ccompiler import new_compiler
 from distutils.sysconfig import customize_compiler, get_config_var
 from distutils import log
@@ -24,7 +23,7 @@
     # also. Ref #1229.
     __import__('Cython.Compiler.Main')
 except ImportError:
-    _build_ext = _du_build_ext
+    from distutils.command.build_ext import build_ext as _build_ext
 
 # make sure _config_vars is initialized
 get_config_var("LDSHARED")
diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py
index 36114d40ed..74b79d1adc 100644
--- a/setuptools/command/easy_install.py
+++ b/setuptools/command/easy_install.py
@@ -12,6 +12,7 @@
 
 from __future__ import annotations
 
+from collections.abc import Iterable
 from glob import glob
 from distutils.util import get_platform
 from distutils.util import convert_path, subst_vars
@@ -26,6 +27,7 @@
 from distutils.command import install
 import sys
 import os
+from typing import TYPE_CHECKING
 import zipimport
 import shutil
 import tempfile
@@ -78,6 +80,8 @@
 from .._path import ensure_directory
 from jaraco.text import yield_lines
 
+if TYPE_CHECKING:
+    from typing_extensions import Self
 
 # Turn on PEP440Warnings
 warnings.filterwarnings("default", category=pkg_resources.PEP440Warning)
@@ -2055,19 +2059,21 @@ def _sys_executable(cls):
         return os.environ.get('__PYVENV_LAUNCHER__', _default)
 
     @classmethod
-    def from_param(cls, param):
+    def from_param(cls, param: Self | str | Iterable[str] | None) -> Self:
         """
         Construct a CommandSpec from a parameter to build_scripts, which may
         be None.
         """
         if isinstance(param, cls):
             return param
-        if isinstance(param, list):
+        if isinstance(param, str):
+            return cls.from_string(param)
+        if isinstance(param, Iterable):
             return cls(param)
         if param is None:
             return cls.from_environment()
-        # otherwise, assume it's a string.
-        return cls.from_string(param)
+        # AttributeError to keep backwards compatibility, this should really be a TypeError though
+        raise AttributeError(f"Argument has an unsupported type {type(param)}")
 
     @classmethod
     def from_environment(cls):
diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py
index 49fd609b15..5ad8ecdd6a 100644
--- a/setuptools/command/editable_wheel.py
+++ b/setuptools/command/editable_wheel.py
@@ -466,8 +466,9 @@ def _create_file(self, relative_output: str, src_file: str, link=None):
     def _create_links(self, outputs, output_mapping):
         self.auxiliary_dir.mkdir(parents=True, exist_ok=True)
         link_type = "sym" if _can_symlink_files(self.auxiliary_dir) else "hard"
-        mappings = {self._normalize_output(k): v for k, v in output_mapping.items()}
-        mappings.pop(None, None)  # remove files that are not relative to build_lib
+        normalised = ((self._normalize_output(k), v) for k, v in output_mapping.items())
+        # remove files that are not relative to build_lib
+        mappings = {k: v for k, v in normalised if k is not None}
 
         for output in outputs:
             relative = self._normalize_output(output)
diff --git a/setuptools/dist.py b/setuptools/dist.py
index 6a29b2b2b7..6c73ae792f 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -55,7 +55,7 @@ def assert_string_list(dist, attr, value):
     try:
         # verify that value is a list or tuple to exclude unordered
         # or single-use iterables
-        assert isinstance(value, (list, tuple))
+        assert isinstance(value, sequence)
         # verify that elements of value are strings
         assert ''.join(value) != value
     except (TypeError, ValueError, AttributeError, AssertionError) as e:
diff --git a/setuptools/extension.py b/setuptools/extension.py
index 25420f42de..8a8a9206fe 100644
--- a/setuptools/extension.py
+++ b/setuptools/extension.py
@@ -1,3 +1,4 @@
+from __future__ import annotations
 import re
 import functools
 import distutils.core
@@ -126,10 +127,10 @@ class Extension(_Extension):
       specified on Windows. (since v63)
     """
 
-    def __init__(self, name, sources, *args, **kw):
+    def __init__(self, name: str, sources, *args, py_limited_api: bool = False, **kw):
         # The *args is needed for compatibility as calls may use positional
         # arguments. py_limited_api may be set only via keyword.
-        self.py_limited_api = kw.pop("py_limited_api", False)
+        self.py_limited_api = py_limited_api
         super().__init__(name, sources, *args, **kw)
 
     def _convert_pyx_sources_to_lang(self):

From aed69d1c0e75d149762d8cb8e81e26b15b7d4555 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Thu, 8 Aug 2024 13:30:19 -0400
Subject: [PATCH 0979/1761] Add Type annotations developer guideline (#4397)

---
 docs/development/developer-guide.rst | 32 ++++++++++++++++++++++++++++
 1 file changed, 32 insertions(+)

diff --git a/docs/development/developer-guide.rst b/docs/development/developer-guide.rst
index 8c9142fc30..4566fef8d0 100644
--- a/docs/development/developer-guide.rst
+++ b/docs/development/developer-guide.rst
@@ -131,3 +131,35 @@ simple Python script ``tools/vendor.py``.
 To refresh the dependencies, run the following command::
 
     $ tox -e vendor
+
+----------------
+Type annotations
+----------------
+
+Most standards and best practices are enforced by
+`Ruff `_'s ``ANN2``, ``FA``, ``PYI``, ``UP``
+and ``YTT`` rules.
+
+Explicit return types have to be added for typed public functions whose
+parameters are *all* annotated. This is enforced by ``ANN2``, but it's worth noting
+that this is due to mypy inferring ``Any`` even for simple return types. Mypy also
+doesn't count functions with missing parameter annotations as "typed". (see
+`python/mypy#4409 `_,
+`python/mypy#10149 `_ and
+`python/mypy#6646 `_).
+Otherwise, return annotations can be omitted to reduce verbosity,
+especially for complex return types.
+
+Instead of typing an explicit return type annotation as
+``Generator[..., None, None]``, we'll prefer using an ``Iterator`` as it is more
+concise and conceptually easier to deal with. Returning a ``Generator`` with no
+``yield`` type or ``send`` type can sometimes be considered as exposing
+implementation details. See
+`Y058 `_.
+
+Avoid importing private type-checking-only symbols. These are often
+`typeshed `_ internal details and are not
+guaranteed to be stable.
+Importing from ``_typeshed`` or ``typing_extensions`` is fine, but if you find
+yourself importing the same symbol in ``TYPE_CHECKING`` blocks a lot, consider
+implementing an alias directly in ``setuptools``.

From 896be0165bcdb717da49d570b2e8300502fdf92f Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Thu, 8 Aug 2024 14:14:05 -0400
Subject: [PATCH 0980/1761] Fix most mypy `attr-defined` & pyright
 `reportAttributeAccessIssue` by explicitly defining attributes on classes
 (#4535)

* Fix most mypy attr-defined by explicitly defining attributes on classes
---
 mypy.ini                              | 6 +++---
 setuptools/command/build_ext.py       | 2 ++
 setuptools/command/build_py.py        | 4 +++-
 setuptools/command/editable_wheel.py  | 1 +
 setuptools/command/egg_info.py        | 6 ++++--
 setuptools/command/install.py         | 3 +++
 setuptools/command/install_lib.py     | 4 ++++
 setuptools/command/install_scripts.py | 3 +++
 setuptools/command/sdist.py           | 2 ++
 setuptools/command/upload.py          | 3 +++
 setuptools/config/pyprojecttoml.py    | 2 +-
 setuptools/dist.py                    | 2 ++
 setuptools/extension.py               | 9 +++++++++
 setuptools/tests/__init__.py          | 9 ++++-----
 setuptools/tests/test_build_ext.py    | 1 +
 15 files changed, 45 insertions(+), 12 deletions(-)

diff --git a/mypy.ini b/mypy.ini
index 4fba13c286..19b4de03a3 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -20,9 +20,9 @@ exclude = (?x)(
 # Too many false-positives
 disable_error_code = overload-overlap
 
-# Ignoring attr-defined because setuptools wraps a lot of distutils classes, adding new attributes,
-# w/o updating all the attributes and return types from the base classes for type-checkers to understand
-# Especially with setuptools.dist.command vs distutils.dist.command vs setuptools._distutils.dist.command
+# DistributionMetadata.license_files and DistributionMetadata.license_file
+# are dynamically patched in setuptools/_core_metadata.py
+# and no DistributionMetadata subclass exists in setuptools
 [mypy-setuptools.*]
 disable_error_code = attr-defined
 
diff --git a/setuptools/command/build_ext.py b/setuptools/command/build_ext.py
index da8d56fac2..0a2af0dd53 100644
--- a/setuptools/command/build_ext.py
+++ b/setuptools/command/build_ext.py
@@ -12,6 +12,7 @@
 from distutils.sysconfig import customize_compiler, get_config_var
 from distutils import log
 
+from setuptools.dist import Distribution
 from setuptools.errors import BaseError
 from setuptools.extension import Extension, Library
 
@@ -83,6 +84,7 @@ def get_abi3_suffix():
 
 
 class build_ext(_build_ext):
+    distribution: Distribution  # override distutils.dist.Distribution with setuptools.dist.Distribution
     editable_mode: bool = False
     inplace: bool = False
 
diff --git a/setuptools/command/build_py.py b/setuptools/command/build_py.py
index 15a4f63fdd..0cdb64f424 100644
--- a/setuptools/command/build_py.py
+++ b/setuptools/command/build_py.py
@@ -14,8 +14,9 @@
 from typing import Iterable, Iterator
 
 from more_itertools import unique_everseen
-from ..warnings import SetuptoolsDeprecationWarning
 
+from ..dist import Distribution
+from ..warnings import SetuptoolsDeprecationWarning
 
 _IMPLICIT_DATA_FILES = ('*.pyi', 'py.typed')
 
@@ -34,6 +35,7 @@ class build_py(orig.build_py):
     'py_modules' and 'packages' in the same setup operation.
     """
 
+    distribution: Distribution  # override distutils.dist.Distribution with setuptools.dist.Distribution
     editable_mode: bool = False
     existing_egg_info_dir: str | None = None  #: Private API, internal use only.
 
diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py
index 5ad8ecdd6a..abc7b9f2c8 100644
--- a/setuptools/command/editable_wheel.py
+++ b/setuptools/command/editable_wheel.py
@@ -153,6 +153,7 @@ def run(self):
             self._create_wheel_file(bdist_wheel)
         except Exception:
             traceback.print_exc()
+            # TODO: Fix false-positive [attr-defined] in typeshed
             project = self.distribution.name or self.distribution.get_name()
             _DebuggingTips.emit(project=project)
             raise
diff --git a/setuptools/command/egg_info.py b/setuptools/command/egg_info.py
index 30b62f5f2e..8a2d42ea04 100644
--- a/setuptools/command/egg_info.py
+++ b/setuptools/command/egg_info.py
@@ -24,13 +24,14 @@
 from setuptools.command.sdist import walk_revctrl
 from setuptools.command.setopt import edit_config
 from setuptools.command import bdist_egg
+from setuptools.dist import Distribution
 import setuptools.unicode_utils as unicode_utils
 from setuptools.glob import glob
 
-import packaging
+import packaging.requirements
+import packaging.version
 from ..warnings import SetuptoolsDeprecationWarning
 
-
 PY_MAJOR = '{}.{}'.format(*sys.version_info)
 
 
@@ -520,6 +521,7 @@ def _safe_path(self, path):
 
 
 class manifest_maker(sdist):
+    distribution: Distribution  # override distutils.dist.Distribution with setuptools.dist.Distribution
     template = "MANIFEST.in"
 
     def initialize_options(self):
diff --git a/setuptools/command/install.py b/setuptools/command/install.py
index f1ea2adf1d..12ef06f542 100644
--- a/setuptools/command/install.py
+++ b/setuptools/command/install.py
@@ -9,6 +9,7 @@
 from typing import Any, ClassVar, cast
 
 import setuptools
+from ..dist import Distribution
 from ..warnings import SetuptoolsDeprecationWarning, SetuptoolsWarning
 from .bdist_egg import bdist_egg as bdist_egg_cls
 
@@ -20,6 +21,8 @@
 class install(orig.install):
     """Use easy_install to install the package, w/dependencies"""
 
+    distribution: Distribution  # override distutils.dist.Distribution with setuptools.dist.Distribution
+
     user_options = orig.install.user_options + [
         ('old-and-unmanageable', None, "Try not to use this!"),
         (
diff --git a/setuptools/command/install_lib.py b/setuptools/command/install_lib.py
index 3c77c6ebc6..d1577384df 100644
--- a/setuptools/command/install_lib.py
+++ b/setuptools/command/install_lib.py
@@ -3,12 +3,16 @@
 import sys
 from itertools import product, starmap
 import distutils.command.install_lib as orig
+
 from .._path import StrPath
+from ..dist import Distribution
 
 
 class install_lib(orig.install_lib):
     """Don't add compiled flags to filenames of non-Python files"""
 
+    distribution: Distribution  # override distutils.dist.Distribution with setuptools.dist.Distribution
+
     def run(self):
         self.build()
         outfiles = self.install()
diff --git a/setuptools/command/install_scripts.py b/setuptools/command/install_scripts.py
index f44281b49b..fa5de91f46 100644
--- a/setuptools/command/install_scripts.py
+++ b/setuptools/command/install_scripts.py
@@ -6,11 +6,14 @@
 import sys
 
 from .._path import ensure_directory
+from ..dist import Distribution
 
 
 class install_scripts(orig.install_scripts):
     """Do normal script install, plus any egg_info wrapper scripts"""
 
+    distribution: Distribution  # override distutils.dist.Distribution with setuptools.dist.Distribution
+
     def initialize_options(self):
         orig.install_scripts.initialize_options(self)
         self.no_ep = False
diff --git a/setuptools/command/sdist.py b/setuptools/command/sdist.py
index a834ba4a78..ca0f712792 100644
--- a/setuptools/command/sdist.py
+++ b/setuptools/command/sdist.py
@@ -5,6 +5,7 @@
 from itertools import chain
 
 from .._importlib import metadata
+from ..dist import Distribution
 from .build import _ORIGINAL_SUBCOMMANDS
 
 _default_revctrl = list
@@ -43,6 +44,7 @@ class sdist(orig.sdist):
         ),
     ]
 
+    distribution: Distribution  # override distutils.dist.Distribution with setuptools.dist.Distribution
     negative_opt = {}
 
     README_EXTENSIONS = ['', '.rst', '.txt', '.md']
diff --git a/setuptools/command/upload.py b/setuptools/command/upload.py
index 1cca47cea9..e05c82ae0f 100644
--- a/setuptools/command/upload.py
+++ b/setuptools/command/upload.py
@@ -1,12 +1,15 @@
 from distutils import log
 from distutils.command import upload as orig
 
+from setuptools.dist import Distribution
 from setuptools.errors import RemovedCommandError
 
 
 class upload(orig.upload):
     """Formerly used to upload packages to PyPI."""
 
+    distribution: Distribution  # override distutils.dist.Distribution with setuptools.dist.Distribution
+
     def run(self):
         msg = (
             "The upload command has been removed, use twine to upload "
diff --git a/setuptools/config/pyprojecttoml.py b/setuptools/config/pyprojecttoml.py
index a83e43bb35..b7d521b7ad 100644
--- a/setuptools/config/pyprojecttoml.py
+++ b/setuptools/config/pyprojecttoml.py
@@ -121,7 +121,7 @@ def read_configuration(
     # the default would be an improvement.
     # `ini2toml` backfills include_package_data=False when nothing is explicitly given,
     # therefore setting a default here is backwards compatible.
-    if dist and getattr(dist, "include_package_data", None) is not None:
+    if dist and dist.include_package_data is not None:
         setuptools_table.setdefault("include-package-data", dist.include_package_data)
     else:
         setuptools_table.setdefault("include-package-data", True)
diff --git a/setuptools/dist.py b/setuptools/dist.py
index 6c73ae792f..03017e56e1 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -265,6 +265,8 @@ def __init__(self, attrs: MutableMapping | None = None) -> None:
             self.package_data: dict[str, list[str]] = {}
         attrs = attrs or {}
         self.dist_files: list[tuple[str, str, str]] = []
+        self.include_package_data: bool | None = None
+        self.exclude_package_data: dict[str, list[str]] | None = None
         # Filter-out setuptools' specific options.
         self.src_root = attrs.pop("src_root", None)
         self.dependency_links = attrs.pop('dependency_links', [])
diff --git a/setuptools/extension.py b/setuptools/extension.py
index 8a8a9206fe..96d392ef2b 100644
--- a/setuptools/extension.py
+++ b/setuptools/extension.py
@@ -127,6 +127,15 @@ class Extension(_Extension):
       specified on Windows. (since v63)
     """
 
+    # These 4 are set and used in setuptools/command/build_ext.py
+    # The lack of a default value and risk of `AttributeError` is purposeful
+    # to avoid people forgetting to call finalize_options if they modify the extension list.
+    # See example/rationale in https://github.com/pypa/setuptools/issues/4529.
+    _full_name: str  #: Private API, internal use only.
+    _links_to_dynamic: bool  #: Private API, internal use only.
+    _needs_stub: bool  #: Private API, internal use only.
+    _file_name: str  #: Private API, internal use only.
+
     def __init__(self, name: str, sources, *args, py_limited_api: bool = False, **kw):
         # The *args is needed for compatibility as calls may use positional
         # arguments. py_limited_api may be set only via keyword.
diff --git a/setuptools/tests/__init__.py b/setuptools/tests/__init__.py
index 738ebf43be..415ece4234 100644
--- a/setuptools/tests/__init__.py
+++ b/setuptools/tests/__init__.py
@@ -6,10 +6,9 @@
 
 __all__ = ['fail_on_ascii']
 
-locale_encoding = (
-    locale.getencoding()
-    if sys.version_info >= (3, 11)
-    else locale.getpreferredencoding(False)
-)
+if sys.version_info >= (3, 11):
+    locale_encoding = locale.getencoding()
+else:
+    locale_encoding = locale.getpreferredencoding(False)
 is_ascii = locale_encoding == 'ANSI_X3.4-1968'
 fail_on_ascii = pytest.mark.xfail(is_ascii, reason="Test fails in this locale")
diff --git a/setuptools/tests/test_build_ext.py b/setuptools/tests/test_build_ext.py
index 0482fb5d5c..943fc6df2f 100644
--- a/setuptools/tests/test_build_ext.py
+++ b/setuptools/tests/test_build_ext.py
@@ -189,6 +189,7 @@ def get_build_ext_cmd(self, optional: bool, **opts):
         dist = Distribution(dict(ext_modules=[extension]))
         dist.script_name = 'setup.py'
         cmd = build_ext(dist)
+        # TODO: False-positive [attr-defined], raise upstream
         vars(cmd).update(build_lib=".build/lib", build_temp=".build/tmp", **opts)
         cmd.ensure_finalized()
         return cmd

From ab8290c35fba23c116a261bd2c9641fd2d68251b Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Thu, 8 Aug 2024 14:15:38 -0400
Subject: [PATCH 0981/1761] `bdist_wheel` typing improvement (#4383)

* bdist_wheel typing improvement
* Extract compression conversion logic to _zip_compression
* Add newsfragment
* Update mypy.ini
---
 mypy.ini                          |  1 -
 newsfragments/4383.bugfix.rst     |  1 +
 setuptools/command/bdist_wheel.py | 59 ++++++++++++++++++-------------
 3 files changed, 36 insertions(+), 25 deletions(-)
 create mode 100644 newsfragments/4383.bugfix.rst

diff --git a/mypy.ini b/mypy.ini
index 19b4de03a3..569c7f0ace 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -15,7 +15,6 @@ exclude = (?x)(
 	| ^setuptools/_vendor/ # Vendored
 	| ^setuptools/_distutils/ # Vendored
 	| ^setuptools/config/_validate_pyproject/ # Auto-generated
-    | ^setuptools/tests/bdist_wheel_testdata/  # Duplicate module name
 	)
 # Too many false-positives
 disable_error_code = overload-overlap
diff --git a/newsfragments/4383.bugfix.rst b/newsfragments/4383.bugfix.rst
new file mode 100644
index 0000000000..e5fd603abb
--- /dev/null
+++ b/newsfragments/4383.bugfix.rst
@@ -0,0 +1 @@
+Prevent an error in ``bdist_wheel`` if ``compression`` is set to a `str` (even if valid) after finalizing options but before running the command. -- by :user:`Avasam`
diff --git a/setuptools/command/bdist_wheel.py b/setuptools/command/bdist_wheel.py
index 5b9bcec60c..de6440f22f 100644
--- a/setuptools/command/bdist_wheel.py
+++ b/setuptools/command/bdist_wheel.py
@@ -23,13 +23,14 @@
 from zipfile import ZIP_DEFLATED, ZIP_STORED
 
 from .. import Command, __version__
+from .egg_info import egg_info as egg_info_cls
 from wheel.metadata import pkginfo_to_metadata
 from packaging import tags
 from packaging import version as _packaging_version
 from wheel.wheelfile import WheelFile
 
 if TYPE_CHECKING:
-    import types
+    from _typeshed import ExcInfo
 
 
 def safe_name(name: str) -> str:
@@ -152,12 +153,14 @@ def safer_version(version: str) -> str:
 def remove_readonly(
     func: Callable[..., object],
     path: str,
-    excinfo: tuple[type[Exception], Exception, types.TracebackType],
+    excinfo: ExcInfo,
 ) -> None:
     remove_readonly_exc(func, path, excinfo[1])
 
 
-def remove_readonly_exc(func: Callable[..., object], path: str, exc: Exception) -> None:
+def remove_readonly_exc(
+    func: Callable[..., object], path: str, exc: BaseException
+) -> None:
     os.chmod(path, stat.S_IWRITE)
     func(path)
 
@@ -232,40 +235,35 @@ class bdist_wheel(Command):
 
     def initialize_options(self) -> None:
         self.bdist_dir: str | None = None
-        self.data_dir = None
+        self.data_dir: str | None = None
         self.plat_name: str | None = None
-        self.plat_tag = None
+        self.plat_tag: str | None = None
         self.format = "zip"
         self.keep_temp = False
         self.dist_dir: str | None = None
-        self.egginfo_dir = None
+        self.egginfo_dir: str | None = None
         self.root_is_pure: bool | None = None
-        self.skip_build = None
+        self.skip_build = False
         self.relative = False
         self.owner = None
         self.group = None
         self.universal: bool = False
-        self.compression: str | int = "deflated"
+        self.compression: int | str = "deflated"
         self.python_tag: str = python_tag()
         self.build_number: str | None = None
         self.py_limited_api: str | Literal[False] = False
         self.plat_name_supplied = False
 
-    def finalize_options(self):
-        if self.bdist_dir is None:
+    def finalize_options(self) -> None:
+        if not self.bdist_dir:
             bdist_base = self.get_finalized_command("bdist").bdist_base
             self.bdist_dir = os.path.join(bdist_base, "wheel")
 
-        egg_info = self.distribution.get_command_obj("egg_info")
+        egg_info = cast(egg_info_cls, self.distribution.get_command_obj("egg_info"))
         egg_info.ensure_finalized()  # needed for correct `wheel_dist_name`
 
         self.data_dir = self.wheel_dist_name + ".data"
-        self.plat_name_supplied = self.plat_name is not None
-
-        try:
-            self.compression = self.supported_compressions[self.compression]
-        except KeyError:
-            raise ValueError(f"Unsupported compression: {self.compression}") from None
+        self.plat_name_supplied = bool(self.plat_name)
 
         need_options = ("dist_dir", "plat_name", "skip_build")
 
@@ -293,21 +291,21 @@ def finalize_options(self):
             raise ValueError("Build tag (build-number) must start with a digit.")
 
     @property
-    def wheel_dist_name(self):
+    def wheel_dist_name(self) -> str:
         """Return distribution full name with - replaced with _"""
-        components = (
+        components = [
             safer_name(self.distribution.get_name()),
             safer_version(self.distribution.get_version()),
-        )
+        ]
         if self.build_number:
-            components += (self.build_number,)
+            components.append(self.build_number)
         return "-".join(components)
 
     def get_tag(self) -> tuple[str, str, str]:
         # bdist sets self.plat_name if unset, we should only use it for purepy
         # wheels if the user supplied it.
-        if self.plat_name_supplied:
-            plat_name = cast(str, self.plat_name)
+        if self.plat_name_supplied and self.plat_name:
+            plat_name = self.plat_name
         elif self.root_is_pure:
             plat_name = "any"
         else:
@@ -431,7 +429,7 @@ def run(self):
             os.makedirs(self.dist_dir)
 
         wheel_path = os.path.join(self.dist_dir, archive_basename + ".whl")
-        with WheelFile(wheel_path, "w", self.compression) as wf:
+        with WheelFile(wheel_path, "w", self._zip_compression()) as wf:
             wf.write_files(archive_root)
 
         # Add to 'Distribution.dist_files' so that the "upload" command works
@@ -595,3 +593,16 @@ def adios(p: str) -> None:
             shutil.copy(license_path, os.path.join(distinfo_path, filename))
 
         adios(egginfo_path)
+
+    def _zip_compression(self) -> int:
+        if (
+            isinstance(self.compression, int)
+            and self.compression in self.supported_compressions.values()
+        ):
+            return self.compression
+
+        compression = self.supported_compressions.get(str(self.compression))
+        if compression is not None:
+            return compression
+
+        raise ValueError(f"Unsupported compression: {self.compression!r}")

From f9398b1f2bd6cce0aa4efa01d8b69da021205151 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Thu, 8 Aug 2024 14:20:53 -0400
Subject: [PATCH 0982/1761] Mark abstract base classes and methods (#4503)

* Mark abstract base classes and methods
---
 newsfragments/4503.feature.rst |  1 +
 pkg_resources/__init__.py      |  3 +-
 setuptools/__init__.py         | 65 ++++++++++++++++++++--------------
 setuptools/command/setopt.py   |  7 +++-
 setuptools/sandbox.py          |  3 +-
 5 files changed, 49 insertions(+), 30 deletions(-)
 create mode 100644 newsfragments/4503.feature.rst

diff --git a/newsfragments/4503.feature.rst b/newsfragments/4503.feature.rst
new file mode 100644
index 0000000000..9c2e433242
--- /dev/null
+++ b/newsfragments/4503.feature.rst
@@ -0,0 +1 @@
+Mark abstract base classes and methods with `abc.ABC` and `abc.abstractmethod` -- by :user:`Avasam`
diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index a87fdf8570..f4206c493d 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -22,6 +22,7 @@
 
 from __future__ import annotations
 
+from abc import ABC
 import sys
 
 if sys.version_info < (3, 8):  # noqa: UP036 # Check for unsupported versions
@@ -311,7 +312,7 @@ def get_supported_platform():
 ]
 
 
-class ResolutionError(Exception):
+class ResolutionError(Exception, ABC):
     """Abstract base for dependency resolution errors"""
 
     def __repr__(self):
diff --git a/setuptools/__init__.py b/setuptools/__init__.py
index 69c1f5acb9..84294edb31 100644
--- a/setuptools/__init__.py
+++ b/setuptools/__init__.py
@@ -2,6 +2,7 @@
 
 from __future__ import annotations
 
+from abc import ABC, abstractmethod
 import functools
 import os
 import re
@@ -119,7 +120,7 @@ def setup(**attrs):
     _Command = monkey.get_unpatched(distutils.core.Command)
 
 
-class Command(_Command):
+class Command(_Command, ABC):
     """
     Setuptools internal actions are organized using a *command design pattern*.
     This means that each action (or group of closely related actions) executed during
@@ -132,42 +133,25 @@ class Command(_Command):
     When creating a new command from scratch, custom defined classes **SHOULD** inherit
     from ``setuptools.Command`` and implement a few mandatory methods.
     Between these mandatory methods, are listed:
-
-    .. method:: initialize_options(self)
-
-        Set or (reset) all options/attributes/caches used by the command
-        to their default values. Note that these values may be overwritten during
-        the build.
-
-    .. method:: finalize_options(self)
-
-        Set final values for all options/attributes used by the command.
-        Most of the time, each option/attribute/cache should only be set if it does not
-        have any value yet (e.g. ``if self.attr is None: self.attr = val``).
-
-    .. method:: run(self)
-
-        Execute the actions intended by the command.
-        (Side effects **SHOULD** only take place when ``run`` is executed,
-        for example, creating new files or writing to the terminal output).
+    :meth:`initialize_options`, :meth:`finalize_options` and :meth:`run`.
 
     A useful analogy for command classes is to think of them as subroutines with local
-    variables called "options".  The options are "declared" in ``initialize_options()``
-    and "defined" (given their final values, aka "finalized") in ``finalize_options()``,
+    variables called "options".  The options are "declared" in :meth:`initialize_options`
+    and "defined" (given their final values, aka "finalized") in :meth:`finalize_options`,
     both of which must be defined by every command class. The "body" of the subroutine,
-    (where it does all the work) is the ``run()`` method.
-    Between ``initialize_options()`` and ``finalize_options()``, ``setuptools`` may set
+    (where it does all the work) is the :meth:`run` method.
+    Between :meth:`initialize_options` and :meth:`finalize_options`, ``setuptools`` may set
     the values for options/attributes based on user's input (or circumstance),
     which means that the implementation should be careful to not overwrite values in
-    ``finalize_options`` unless necessary.
+    :meth:`finalize_options` unless necessary.
 
     Please note that other commands (or other parts of setuptools) may also overwrite
     the values of the command's options/attributes multiple times during the build
     process.
-    Therefore it is important to consistently implement ``initialize_options()`` and
-    ``finalize_options()``. For example, all derived attributes (or attributes that
+    Therefore it is important to consistently implement :meth:`initialize_options` and
+    :meth:`finalize_options`. For example, all derived attributes (or attributes that
     depend on the value of other attributes) **SHOULD** be recomputed in
-    ``finalize_options``.
+    :meth:`finalize_options`.
 
     When overwriting existing commands, custom defined classes **MUST** abide by the
     same APIs implemented by the original class. They also **SHOULD** inherit from the
@@ -238,6 +222,33 @@ def reinitialize_command(
         vars(cmd).update(kw)
         return cmd
 
+    @abstractmethod
+    def initialize_options(self) -> None:
+        """
+        Set or (reset) all options/attributes/caches used by the command
+        to their default values. Note that these values may be overwritten during
+        the build.
+        """
+        raise NotImplementedError
+
+    @abstractmethod
+    def finalize_options(self) -> None:
+        """
+        Set final values for all options/attributes used by the command.
+        Most of the time, each option/attribute/cache should only be set if it does not
+        have any value yet (e.g. ``if self.attr is None: self.attr = val``).
+        """
+        raise NotImplementedError
+
+    @abstractmethod
+    def run(self) -> None:
+        """
+        Execute the actions intended by the command.
+        (Side effects **SHOULD** only take place when :meth:`run` is executed,
+        for example, creating new files or writing to the terminal output).
+        """
+        raise NotImplementedError
+
 
 def _find_all_simple(path):
     """
diff --git a/setuptools/command/setopt.py b/setuptools/command/setopt.py
index b78d845e60..0cd67c0f67 100644
--- a/setuptools/command/setopt.py
+++ b/setuptools/command/setopt.py
@@ -1,3 +1,4 @@
+from abc import ABC, abstractmethod
 from distutils.util import convert_path
 from distutils import log
 from distutils.errors import DistutilsOptionError
@@ -68,7 +69,7 @@ def edit_config(filename, settings, dry_run=False):
             opts.write(f)
 
 
-class option_base(Command):
+class option_base(Command, ABC):
     """Abstract base class for commands that mess with config files"""
 
     user_options = [
@@ -103,6 +104,10 @@ def finalize_options(self):
             )
         (self.filename,) = filenames
 
+    @abstractmethod
+    def run(self) -> None:
+        raise NotImplementedError
+
 
 class setopt(option_base):
     """Save command-line options to a file"""
diff --git a/setuptools/sandbox.py b/setuptools/sandbox.py
index 147b26749e..31ba1e3f8d 100644
--- a/setuptools/sandbox.py
+++ b/setuptools/sandbox.py
@@ -1,5 +1,6 @@
 from __future__ import annotations
 
+from abc import ABC
 import os
 import sys
 import tempfile
@@ -265,7 +266,7 @@ def run_setup(setup_script, args):
             # Normal exit, just return
 
 
-class AbstractSandbox:
+class AbstractSandbox(ABC):
     """Wrap 'os' module and 'open()' builtin for virtualizing setup scripts"""
 
     _active = False

From 06acfd262258d809242c74179477af324389e1c7 Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos Orfanos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Thu, 8 Aug 2024 23:14:35 +0200
Subject: [PATCH 0983/1761] Update to the latest ruff version
 (jaraco/skeleton#137)

---
 .pre-commit-config.yaml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 5a4a7e9166..ff54405ead 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,6 +1,6 @@
 repos:
 - repo: https://github.com/astral-sh/ruff-pre-commit
-  rev: v0.1.8
+  rev: v0.5.6
   hooks:
   - id: ruff
   - id: ruff-format

From dd30b7600f33ce06a479a73002b950f4a3947759 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Thu, 8 Aug 2024 17:19:17 -0400
Subject: [PATCH 0984/1761] Add Protocols, remove @overload, from `.coveragerc`
 `exclude_also` (jaraco/skeleton#135)

Co-authored-by: Jason R. Coombs 
---
 .coveragerc | 6 ++++--
 1 file changed, 4 insertions(+), 2 deletions(-)

diff --git a/.coveragerc b/.coveragerc
index 35b98b1df9..2e3f4dd791 100644
--- a/.coveragerc
+++ b/.coveragerc
@@ -8,6 +8,8 @@ disable_warnings =
 [report]
 show_missing = True
 exclude_also =
-	# jaraco/skeleton#97
-	@overload
+	# Exclude common false positives per
+	# https://coverage.readthedocs.io/en/latest/excluding.html#advanced-exclusion
+	# Ref jaraco/skeleton#97 and jaraco/skeleton#135
+	class .*\bProtocol\):
 	if TYPE_CHECKING:

From 01d4d6b4bd44a127f90c3bf833d84929d6e8968a Mon Sep 17 00:00:00 2001
From: biredel <67849440+biredel@users.noreply.github.com>
Date: Fri, 9 Aug 2024 11:12:42 +0200
Subject: [PATCH 0985/1761] docs: add keyword changelog to history html

While search engines get worse, meta tags must get better again.
---
 docs/history.rst | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/docs/history.rst b/docs/history.rst
index 4f302ca06b..7a4bb4dbee 100644
--- a/docs/history.rst
+++ b/docs/history.rst
@@ -5,6 +5,9 @@
 History
 *******
 
+.. meta::
+   :keywords: changelog
+
 .. towncrier-draft-entries:: DRAFT, unreleased as on |today|
 
 .. include:: ../NEWS (links).rst

From a6f7484b25628ae351d4b96fa6254ada387ece10 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Fri, 9 Aug 2024 09:54:25 -0400
Subject: [PATCH 0986/1761] Redundant abstractmethod run in
 `setuptools.command.setopt.option_base` (#4549)

---
 setuptools/command/setopt.py | 6 +-----
 1 file changed, 1 insertion(+), 5 deletions(-)

diff --git a/setuptools/command/setopt.py b/setuptools/command/setopt.py
index 0cd67c0f67..3d8181d2fa 100644
--- a/setuptools/command/setopt.py
+++ b/setuptools/command/setopt.py
@@ -1,4 +1,4 @@
-from abc import ABC, abstractmethod
+from abc import ABC
 from distutils.util import convert_path
 from distutils import log
 from distutils.errors import DistutilsOptionError
@@ -104,10 +104,6 @@ def finalize_options(self):
             )
         (self.filename,) = filenames
 
-    @abstractmethod
-    def run(self) -> None:
-        raise NotImplementedError
-
 
 class setopt(option_base):
     """Save command-line options to a file"""

From 7b17c15ab57e0731491b1b6e64aa0eb6a7e71df0 Mon Sep 17 00:00:00 2001
From: Cal Jacobson 
Date: Sun, 11 Aug 2024 13:16:13 -0500
Subject: [PATCH 0987/1761] handle failures to find a user home directory

In certain environments (notably, the [bazel sandbox](https://bazel.build/docs/sandboxing) on windows), it is possible for `pathlib.Path('~').expanduser()` to fail to find the user home directory and [raise a `RuntimeError`](https://docs.python.org/3/library/pathlib.html#pathlib.Path.expanduser). This causes `distutils` (and ultimately, `setuptools`) to fail.

With this patch, we catch and handle the exception by logging a warning and continuing without a user's config file.

motivated by https://github.com/bazelbuild/rules_python/pull/1067
---
 distutils/dist.py | 7 ++++++-
 1 file changed, 6 insertions(+), 1 deletion(-)

diff --git a/distutils/dist.py b/distutils/dist.py
index 115302b3e7..9b16058ddc 100644
--- a/distutils/dist.py
+++ b/distutils/dist.py
@@ -354,7 +354,12 @@ def _gen_paths(self):
         prefix = '.' * (os.name == 'posix')
         filename = prefix + 'pydistutils.cfg'
         if self.want_user_cfg:
-            yield pathlib.Path('~').expanduser() / filename
+            try:
+                user_home = pathlib.Path('~').expanduser()
+            except RuntimeError:
+                self.announce("Failed to locate user home directory. Skipping user config.", logging.WARNING)
+            else:
+                yield user_home / filename
 
         # All platforms support local setup.cfg
         yield pathlib.Path('setup.cfg')

From 6f0511fb9cbffe655102c356cb381a470ee95b53 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 12 Aug 2024 11:47:36 +0100
Subject: [PATCH 0988/1761] Remove outdated import error check for warnings

---
 distutils/dist.py | 11 ++---------
 1 file changed, 2 insertions(+), 9 deletions(-)

diff --git a/distutils/dist.py b/distutils/dist.py
index 115302b3e7..eacb8148e8 100644
--- a/distutils/dist.py
+++ b/distutils/dist.py
@@ -10,16 +10,12 @@
 import pathlib
 import re
 import sys
+import warnings
 from collections.abc import Iterable
 from email import message_from_file
 
 from ._vendor.packaging.utils import canonicalize_name, canonicalize_version
 
-try:
-    import warnings
-except ImportError:
-    warnings = None
-
 from ._log import log
 from .debug import DEBUG
 from .errors import (
@@ -249,10 +245,7 @@ def __init__(self, attrs=None):  # noqa: C901
                 attrs['license'] = attrs['licence']
                 del attrs['licence']
                 msg = "'licence' distribution option is deprecated; use 'license'"
-                if warnings is not None:
-                    warnings.warn(msg)
-                else:
-                    sys.stderr.write(msg + "\n")
+                warnings.warn(msg)
 
             # Now work on the rest of the attributes.  Any attribute that's
             # not already defined is invalid!

From 8c6015c5d75641ca42d06db5021763c4f12c1d32 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 12 Aug 2024 12:01:07 +0100
Subject: [PATCH 0989/1761] Satisfy ruff import format

---
 distutils/dist.py | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)

diff --git a/distutils/dist.py b/distutils/dist.py
index eacb8148e8..ea19add23d 100644
--- a/distutils/dist.py
+++ b/distutils/dist.py
@@ -14,9 +14,8 @@
 from collections.abc import Iterable
 from email import message_from_file
 
-from ._vendor.packaging.utils import canonicalize_name, canonicalize_version
-
 from ._log import log
+from ._vendor.packaging.utils import canonicalize_name, canonicalize_version
 from .debug import DEBUG
 from .errors import (
     DistutilsArgError,

From 5349827daa8a1de35fbaac7b87b632a12100d2cd Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Fri, 9 Aug 2024 15:20:30 -0400
Subject: [PATCH 0990/1761] Configure Ruff/Isort for consistent imports
 accounting for distutils hack

---
 ruff.toml | 11 +++++++++++
 1 file changed, 11 insertions(+)

diff --git a/ruff.toml b/ruff.toml
index 09f256d5cb..b55b4e8067 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -14,6 +14,7 @@ extend-select = [
 	"ANN2", # missing-return-type-*
 	"FA", # flake8-future-annotations
 	"F404", # late-future-import
+	"I", # isort
 	"PYI", # flake8-pyi
 	"UP", # pyupgrade
 	"TRY",
@@ -56,6 +57,16 @@ ignore = [
 "setuptools/__init__.py" = ["E402"]
 "pkg_resources/__init__.py" = ["E402"]
 
+[lint.isort]
+combine-as-imports = true
+split-on-trailing-comma = false
+# Force Ruff/isort to always import setuptools before distutils in tests as long as distutils_hack is supported
+# This also ensures _distutils_hack is imported before distutils
+# https://github.com/pypa/setuptools/issues/4137
+section-order = ["future", "standard-library", "eager", "third-party", "first-party", "local-folder", "delayed"]
+sections.eager = ["_distutils_hack"]
+sections.delayed = ["distutils"]
+
 [lint.flake8-annotations]
 ignore-fully-untyped = true
 

From 9081bcf14925cb6575a03184737ae79247acb3e8 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Fri, 9 Aug 2024 15:21:19 -0400
Subject: [PATCH 0991/1761] Apply Ruff/Isort

---
 _distutils_hack/__init__.py                   |   3 +-
 conftest.py                                   |   1 -
 pkg_resources/__init__.py                     |  76 ++++++-------
 .../tests/test_find_distributions.py          |   5 +-
 pkg_resources/tests/test_pkg_resources.py     |  23 ++--
 pkg_resources/tests/test_resources.py         |  18 ++--
 pkg_resources/tests/test_working_set.py       |   2 +-
 setuptools/__init__.py                        |  10 +-
 setuptools/_core_metadata.py                  |   7 +-
 setuptools/_entry_points.py                   |   9 +-
 setuptools/_imp.py                            |   6 +-
 setuptools/_importlib.py                      |   1 -
 setuptools/_path.py                           |   3 +-
 setuptools/_reqs.py                           |   3 +-
 setuptools/archive_util.py                    |  11 +-
 setuptools/build_meta.py                      |  14 +--
 setuptools/command/__init__.py                |   3 +-
 setuptools/command/_requirestxt.py            |   2 +-
 setuptools/command/alias.py                   |   4 +-
 setuptools/command/bdist_egg.py               |  15 +--
 setuptools/command/bdist_rpm.py               |   4 +-
 setuptools/command/bdist_wheel.py             |  11 +-
 setuptools/command/build.py                   |   1 +
 setuptools/command/build_clib.py              |   2 +-
 setuptools/command/build_ext.py               |  16 +--
 setuptools/command/build_py.py                |  15 +--
 setuptools/command/develop.py                 |  15 ++-
 setuptools/command/dist_info.py               |   5 +-
 setuptools/command/easy_install.py            | 102 +++++++++---------
 setuptools/command/editable_wheel.py          |  28 +----
 setuptools/command/egg_info.py                |  33 +++---
 setuptools/command/install.py                 |  10 +-
 setuptools/command/install_egg_info.py        |   7 +-
 setuptools/command/install_lib.py             |   5 +-
 setuptools/command/install_scripts.py         |   6 +-
 setuptools/command/register.py                |   6 +-
 setuptools/command/rotate.py                  |   7 +-
 setuptools/command/sdist.py                   |   7 +-
 setuptools/command/setopt.py                  |  13 +--
 setuptools/command/upload.py                  |   6 +-
 setuptools/command/upload_docs.py             |  18 ++--
 setuptools/compat/py310.py                    |   1 -
 setuptools/compat/py311.py                    |   4 +-
 setuptools/config/_apply_pyprojecttoml.py     |  19 ++--
 setuptools/config/expand.py                   |  21 ++--
 setuptools/config/pyprojecttoml.py            |   6 +-
 setuptools/config/setupcfg.py                 |  14 +--
 setuptools/depends.py                         |   9 +-
 setuptools/discovery.py                       |   8 +-
 setuptools/dist.py                            |  30 +++---
 setuptools/errors.py                          |   1 -
 setuptools/extension.py                       |  10 +-
 setuptools/glob.py                            |   2 +-
 setuptools/installer.py                       |   7 +-
 setuptools/launch.py                          |   2 +-
 setuptools/logging.py                         |   6 +-
 setuptools/modified.py                        |   2 +-
 setuptools/monkey.py                          |   1 -
 setuptools/msvc.py                            |  13 +--
 setuptools/namespaces.py                      |   4 +-
 setuptools/package_index.py                   |  46 ++++----
 setuptools/sandbox.py                         |  19 ++--
 setuptools/tests/__init__.py                  |   1 -
 setuptools/tests/compat/py39.py               |   1 -
 setuptools/tests/config/downloads/preload.py  |   1 -
 .../tests/config/test_apply_pyprojecttoml.py  |   9 +-
 setuptools/tests/config/test_expand.py        |   3 +-
 setuptools/tests/config/test_pyprojecttoml.py |   7 +-
 setuptools/tests/config/test_setupcfg.py      |   8 +-
 setuptools/tests/contexts.py                  |   8 +-
 setuptools/tests/environment.py               |   4 +-
 setuptools/tests/fixtures.py                  |   6 +-
 setuptools/tests/integration/helpers.py       |   2 +-
 setuptools/tests/server.py                    |   4 +-
 setuptools/tests/test_archive_util.py         |   2 +-
 setuptools/tests/test_bdist_deprecations.py   |   2 +-
 setuptools/tests/test_bdist_wheel.py          |   5 +-
 setuptools/tests/test_build.py                |   2 +-
 setuptools/tests/test_build_clib.py           |   5 +-
 setuptools/tests/test_build_ext.py            |   9 +-
 setuptools/tests/test_build_meta.py           |  10 +-
 setuptools/tests/test_build_py.py             |   4 +-
 setuptools/tests/test_config_discovery.py     |  16 +--
 setuptools/tests/test_core_metadata.py        |   8 +-
 setuptools/tests/test_develop.py              |  10 +-
 setuptools/tests/test_dist.py                 |  17 ++-
 setuptools/tests/test_dist_info.py            |   2 +-
 setuptools/tests/test_distutils_adoption.py   |   3 +-
 setuptools/tests/test_easy_install.py         |  36 +++----
 setuptools/tests/test_editable_install.py     |  18 ++--
 setuptools/tests/test_egg_info.py             |  13 +--
 setuptools/tests/test_extern.py               |   3 +-
 setuptools/tests/test_find_packages.py        |   3 +-
 setuptools/tests/test_find_py_modules.py      |   2 +-
 setuptools/tests/test_install_scripts.py      |   1 +
 setuptools/tests/test_integration.py          |   3 +-
 setuptools/tests/test_logging.py              |   4 +-
 setuptools/tests/test_manifest.py             |  14 +--
 setuptools/tests/test_msvc14.py               |   6 +-
 setuptools/tests/test_namespaces.py           |   2 +-
 setuptools/tests/test_packageindex.py         |   7 +-
 setuptools/tests/test_register.py             |   8 +-
 setuptools/tests/test_sdist.py                |  21 ++--
 setuptools/tests/test_setuptools.py           |  14 +--
 setuptools/tests/test_upload.py               |   8 +-
 setuptools/tests/test_virtualenv.py           |   5 +-
 setuptools/tests/test_warnings.py             |   1 -
 setuptools/tests/test_wheel.py                |  16 +--
 setuptools/tests/test_windows_wrappers.py     |   6 +-
 setuptools/unicode_utils.py                   |   2 +-
 setuptools/wheel.py                           |  11 +-
 tools/build_launchers.py                      |   3 +-
 tools/finalize.py                             |   3 +-
 tools/ppc64le-patch.py                        |   2 +-
 114 files changed, 542 insertions(+), 577 deletions(-)

diff --git a/_distutils_hack/__init__.py b/_distutils_hack/__init__.py
index 35ab5cad49..b05d04e98e 100644
--- a/_distutils_hack/__init__.py
+++ b/_distutils_hack/__init__.py
@@ -1,7 +1,6 @@
 # don't import any costly modules
-import sys
 import os
-
+import sys
 
 report_url = (
     "https://github.com/pypa/setuptools/issues/new?"
diff --git a/conftest.py b/conftest.py
index 532e83112a..01ed2fa2e6 100644
--- a/conftest.py
+++ b/conftest.py
@@ -2,7 +2,6 @@
 
 import pytest
 
-
 pytest_plugins = 'setuptools.tests.fixtures'
 
 
diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index f4206c493d..8bbf249371 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -22,68 +22,66 @@
 
 from __future__ import annotations
 
-from abc import ABC
 import sys
+from abc import ABC
 
 if sys.version_info < (3, 8):  # noqa: UP036 # Check for unsupported versions
     raise RuntimeError("Python 3.8 or later is required")
 
-import os
+import _imp
+import collections
+import email.parser
+import errno
+import functools
+import importlib
+import importlib.abc
+import importlib.machinery
+import inspect
 import io
-import time
+import ntpath
+import operator
+import os
+import pkgutil
+import platform
+import plistlib
+import posixpath
 import re
+import stat
+import tempfile
+import textwrap
+import time
 import types
+import warnings
+import zipfile
+import zipimport
+from pkgutil import get_importer
 from typing import (
+    TYPE_CHECKING,
     Any,
     BinaryIO,
-    Literal,
+    Callable,
     Dict,
+    Iterable,
     Iterator,
+    Literal,
     Mapping,
     MutableSequence,
     NamedTuple,
     NoReturn,
-    Tuple,
-    Union,
-    TYPE_CHECKING,
     Protocol,
-    Callable,
-    Iterable,
+    Tuple,
     TypeVar,
+    Union,
     overload,
 )
-import zipfile
-import zipimport
-import warnings
-import stat
-import functools
-import pkgutil
-import operator
-import platform
-import collections
-import plistlib
-import email.parser
-import errno
-import tempfile
-import textwrap
-import inspect
-import ntpath
-import posixpath
-import importlib
-import importlib.abc
-import importlib.machinery
-from pkgutil import get_importer
-
-import _imp
 
 sys.path.extend(((vendor_path := os.path.join(os.path.dirname(os.path.dirname(__file__)), 'setuptools', '_vendor')) not in sys.path) * [vendor_path])  # fmt: skip
 # workaround for #4476
 sys.modules.pop('backports', None)
 
 # capture these to bypass sandboxing
-from os import utime
-from os import open as os_open
-from os.path import isdir, split
+from os import open as os_open, utime  # isort: skip
+from os.path import isdir, split  # isort: skip
 
 try:
     from os import mkdir, rename, unlink
@@ -93,20 +91,16 @@
     # no write support, probably under GAE
     WRITE_SUPPORT = False
 
-from jaraco.text import (
-    yield_lines,
-    drop_comment,
-    join_continuation,
-)
 import packaging.markers
 import packaging.requirements
 import packaging.specifiers
 import packaging.utils
 import packaging.version
+from jaraco.text import drop_comment, join_continuation, yield_lines
 from platformdirs import user_cache_dir as _user_cache_dir
 
 if TYPE_CHECKING:
-    from _typeshed import BytesPath, StrPath, StrOrBytesPath
+    from _typeshed import BytesPath, StrOrBytesPath, StrPath
     from typing_extensions import Self, TypeAlias
 
 warnings.warn(
diff --git a/pkg_resources/tests/test_find_distributions.py b/pkg_resources/tests/test_find_distributions.py
index 8263ca6c41..301b36d6cd 100644
--- a/pkg_resources/tests/test_find_distributions.py
+++ b/pkg_resources/tests/test_find_distributions.py
@@ -1,8 +1,9 @@
-from pathlib import Path
 import shutil
+from pathlib import Path
+
 import pytest
-import pkg_resources
 
+import pkg_resources
 
 TESTS_DATA_DIR = Path(__file__).parent / 'data'
 
diff --git a/pkg_resources/tests/test_pkg_resources.py b/pkg_resources/tests/test_pkg_resources.py
index 424d5ac44b..023adf60b0 100644
--- a/pkg_resources/tests/test_pkg_resources.py
+++ b/pkg_resources/tests/test_pkg_resources.py
@@ -1,28 +1,23 @@
 from __future__ import annotations
 
 import builtins
-import sys
-import tempfile
-import os
-import zipfile
 import datetime
+import os
 import plistlib
-import subprocess
 import stat
-import distutils.dist
-import distutils.command.install_egg_info
-
+import subprocess
+import sys
+import tempfile
+import zipfile
 from unittest import mock
 
-from pkg_resources import (
-    DistInfoDistribution,
-    Distribution,
-    EggInfoDistribution,
-)
-
 import pytest
 
 import pkg_resources
+from pkg_resources import DistInfoDistribution, Distribution, EggInfoDistribution
+
+import distutils.command.install_egg_info
+import distutils.dist
 
 
 class EggRemover(str):
diff --git a/pkg_resources/tests/test_resources.py b/pkg_resources/tests/test_resources.py
index 9837c2719d..3b67296952 100644
--- a/pkg_resources/tests/test_resources.py
+++ b/pkg_resources/tests/test_resources.py
@@ -1,23 +1,23 @@
+import itertools
 import os
-import sys
-import string
 import platform
-import itertools
+import string
+import sys
 
 import pytest
 from packaging.specifiers import SpecifierSet
 
 import pkg_resources
 from pkg_resources import (
-    parse_requirements,
-    VersionConflict,
-    parse_version,
     Distribution,
     EntryPoint,
     Requirement,
-    safe_version,
-    safe_name,
+    VersionConflict,
     WorkingSet,
+    parse_requirements,
+    parse_version,
+    safe_name,
+    safe_version,
 )
 
 
@@ -862,8 +862,8 @@ def test_path_order(self, symlinked_tmpdir):
             (subpkg / '__init__.py').write_text(vers_str % number, encoding='utf-8')
 
         with pytest.warns(DeprecationWarning, match="pkg_resources.declare_namespace"):
-            import nspkg.subpkg
             import nspkg
+            import nspkg.subpkg
         expected = [str(site.realpath() / 'nspkg') for site in site_dirs]
         assert nspkg.__path__ == expected
         assert nspkg.subpkg.__version__ == 1
diff --git a/pkg_resources/tests/test_working_set.py b/pkg_resources/tests/test_working_set.py
index 57f62b5492..0537bb69a4 100644
--- a/pkg_resources/tests/test_working_set.py
+++ b/pkg_resources/tests/test_working_set.py
@@ -1,7 +1,7 @@
+import functools
 import inspect
 import re
 import textwrap
-import functools
 
 import pytest
 
diff --git a/setuptools/__init__.py b/setuptools/__init__.py
index 84294edb31..73de2a03d3 100644
--- a/setuptools/__init__.py
+++ b/setuptools/__init__.py
@@ -2,30 +2,30 @@
 
 from __future__ import annotations
 
-from abc import ABC, abstractmethod
 import functools
 import os
 import re
 import sys
+from abc import ABC, abstractmethod
 from typing import TYPE_CHECKING, TypeVar, overload
 
-
 sys.path.extend(((vendor_path := os.path.join(os.path.dirname(os.path.dirname(__file__)), 'setuptools', '_vendor')) not in sys.path) * [vendor_path])  # fmt: skip
 # workaround for #4476
 sys.modules.pop('backports', None)
 
 import _distutils_hack.override  # noqa: F401
-import distutils.core
-from distutils.errors import DistutilsOptionError
 
 from . import logging, monkey
-from .version import __version__ as __version__
 from .depends import Require
 from .discovery import PackageFinder, PEP420PackageFinder
 from .dist import Distribution
 from .extension import Extension
+from .version import __version__ as __version__
 from .warnings import SetuptoolsDeprecationWarning
 
+import distutils.core
+from distutils.errors import DistutilsOptionError
+
 __all__ = [
     'setup',
     'Distribution',
diff --git a/setuptools/_core_metadata.py b/setuptools/_core_metadata.py
index 82ec19fc75..2e9c48a77b 100644
--- a/setuptools/_core_metadata.py
+++ b/setuptools/_core_metadata.py
@@ -13,15 +13,16 @@
 from email.message import Message
 from tempfile import NamedTemporaryFile
 
-from distutils.util import rfc822_escape
-
-from . import _normalization, _reqs
 from packaging.markers import Marker
 from packaging.requirements import Requirement
 from packaging.utils import canonicalize_name, canonicalize_version
 from packaging.version import Version
+
+from . import _normalization, _reqs
 from .warnings import SetuptoolsDeprecationWarning
 
+from distutils.util import rfc822_escape
+
 
 def get_metadata_version(self):
     mv = getattr(self, 'metadata_version', None)
diff --git a/setuptools/_entry_points.py b/setuptools/_entry_points.py
index 5de12582be..e785fc7df8 100644
--- a/setuptools/_entry_points.py
+++ b/setuptools/_entry_points.py
@@ -1,13 +1,14 @@
 import functools
-import operator
 import itertools
+import operator
 
-from .errors import OptionError
-from jaraco.text import yield_lines
 from jaraco.functools import pass_none
+from jaraco.text import yield_lines
+from more_itertools import consume
+
 from ._importlib import metadata
 from ._itertools import ensure_unique
-from more_itertools import consume
+from .errors import OptionError
 
 
 def ensure_valid(ep):
diff --git a/setuptools/_imp.py b/setuptools/_imp.py
index 38b146fc4d..bddbf6a683 100644
--- a/setuptools/_imp.py
+++ b/setuptools/_imp.py
@@ -3,14 +3,12 @@
 from the deprecated imp module.
 """
 
-import os
-import importlib.util
 import importlib.machinery
+import importlib.util
+import os
 import tokenize
-
 from importlib.util import module_from_spec
 
-
 PY_SOURCE = 1
 PY_COMPILED = 2
 C_EXTENSION = 3
diff --git a/setuptools/_importlib.py b/setuptools/_importlib.py
index b2d5b5b84a..5317be0fa0 100644
--- a/setuptools/_importlib.py
+++ b/setuptools/_importlib.py
@@ -1,6 +1,5 @@
 import sys
 
-
 if sys.version_info < (3, 10):
     import importlib_metadata as metadata  # pragma: no cover
 else:
diff --git a/setuptools/_path.py b/setuptools/_path.py
index bd9378761b..dd4a9db8cb 100644
--- a/setuptools/_path.py
+++ b/setuptools/_path.py
@@ -3,7 +3,7 @@
 import contextlib
 import os
 import sys
-from typing import Union, TYPE_CHECKING
+from typing import TYPE_CHECKING, Union
 
 if TYPE_CHECKING:
     from typing_extensions import TypeAlias
@@ -11,7 +11,6 @@
 
 from more_itertools import unique_everseen
 
-
 if sys.version_info >= (3, 9):
     StrPath: TypeAlias = Union[str, os.PathLike[str]]  #  Same as _typeshed.StrPath
 else:
diff --git a/setuptools/_reqs.py b/setuptools/_reqs.py
index cdea95fa3d..2d09244b43 100644
--- a/setuptools/_reqs.py
+++ b/setuptools/_reqs.py
@@ -1,7 +1,8 @@
 from __future__ import annotations
 
 from functools import lru_cache
-from typing import Callable, Iterable, Iterator, TypeVar, Union, overload, TYPE_CHECKING
+from typing import TYPE_CHECKING, Callable, Iterable, Iterator, TypeVar, Union, overload
+
 import jaraco.text as text
 from packaging.requirements import Requirement
 
diff --git a/setuptools/archive_util.py b/setuptools/archive_util.py
index 6b8460bd92..e4acd75f9b 100644
--- a/setuptools/archive_util.py
+++ b/setuptools/archive_util.py
@@ -1,15 +1,16 @@
 """Utilities for extracting common archive formats"""
 
-import zipfile
-import tarfile
+import contextlib
 import os
-import shutil
 import posixpath
-import contextlib
-from distutils.errors import DistutilsError
+import shutil
+import tarfile
+import zipfile
 
 from ._path import ensure_directory
 
+from distutils.errors import DistutilsError
+
 __all__ = [
     "unpack_archive",
     "unpack_zipfile",
diff --git a/setuptools/build_meta.py b/setuptools/build_meta.py
index d8ab28be74..7663306862 100644
--- a/setuptools/build_meta.py
+++ b/setuptools/build_meta.py
@@ -28,24 +28,26 @@
 
 from __future__ import annotations
 
+import contextlib
 import io
 import os
 import shlex
-import sys
-import tokenize
 import shutil
-import contextlib
+import sys
 import tempfile
+import tokenize
 import warnings
 from pathlib import Path
-from typing import TYPE_CHECKING, Dict, Iterator, List, Union, Iterable
+from typing import TYPE_CHECKING, Dict, Iterable, Iterator, List, Union
 
 import setuptools
-import distutils
+
 from . import errors
-from ._path import same_path, StrPath
+from ._path import StrPath, same_path
 from ._reqs import parse_strings
 from .warnings import SetuptoolsDeprecationWarning
+
+import distutils
 from distutils.util import strtobool
 
 if TYPE_CHECKING:
diff --git a/setuptools/command/__init__.py b/setuptools/command/__init__.py
index 5acd7687d6..bf011e896d 100644
--- a/setuptools/command/__init__.py
+++ b/setuptools/command/__init__.py
@@ -1,6 +1,7 @@
-from distutils.command.bdist import bdist
 import sys
 
+from distutils.command.bdist import bdist
+
 if 'egg' not in bdist.format_commands:
     try:
         bdist.format_commands['egg'] = ('bdist_egg', "Python .egg file")
diff --git a/setuptools/command/_requirestxt.py b/setuptools/command/_requirestxt.py
index ef35d183e8..b87476d6f4 100644
--- a/setuptools/command/_requirestxt.py
+++ b/setuptools/command/_requirestxt.py
@@ -14,10 +14,10 @@
 from itertools import filterfalse
 from typing import Dict, Mapping, TypeVar
 
-from .. import _reqs
 from jaraco.text import yield_lines
 from packaging.requirements import Requirement
 
+from .. import _reqs
 
 # dict can work as an ordered set
 _T = TypeVar("_T")
diff --git a/setuptools/command/alias.py b/setuptools/command/alias.py
index e7b4d5456a..4eed652381 100644
--- a/setuptools/command/alias.py
+++ b/setuptools/command/alias.py
@@ -1,6 +1,6 @@
-from distutils.errors import DistutilsOptionError
+from setuptools.command.setopt import config_file, edit_config, option_base
 
-from setuptools.command.setopt import edit_config, option_base, config_file
+from distutils.errors import DistutilsOptionError
 
 
 def shquote(arg):
diff --git a/setuptools/command/bdist_egg.py b/setuptools/command/bdist_egg.py
index 559f7d6032..f3b7150208 100644
--- a/setuptools/command/bdist_egg.py
+++ b/setuptools/command/bdist_egg.py
@@ -2,20 +2,21 @@
 
 Build .egg distributions"""
 
-from distutils.dir_util import remove_tree, mkpath
-from distutils import log
-from types import CodeType
-import sys
+import marshal
 import os
 import re
+import sys
 import textwrap
-import marshal
+from sysconfig import get_path, get_python_version
+from types import CodeType
 
-from setuptools.extension import Library
 from setuptools import Command
+from setuptools.extension import Library
+
 from .._path import ensure_directory
 
-from sysconfig import get_path, get_python_version
+from distutils import log
+from distutils.dir_util import mkpath, remove_tree
 
 
 def _get_purelib():
diff --git a/setuptools/command/bdist_rpm.py b/setuptools/command/bdist_rpm.py
index 70ed6b6097..abf2b88bfc 100644
--- a/setuptools/command/bdist_rpm.py
+++ b/setuptools/command/bdist_rpm.py
@@ -1,7 +1,7 @@
-import distutils.command.bdist_rpm as orig
-
 from ..warnings import SetuptoolsDeprecationWarning
 
+import distutils.command.bdist_rpm as orig
+
 
 class bdist_rpm(orig.bdist_rpm):
     """
diff --git a/setuptools/command/bdist_wheel.py b/setuptools/command/bdist_wheel.py
index de6440f22f..eca5568aad 100644
--- a/setuptools/command/bdist_wheel.py
+++ b/setuptools/command/bdist_wheel.py
@@ -16,19 +16,20 @@
 import warnings
 from email.generator import BytesGenerator, Generator
 from email.policy import EmailPolicy
-from distutils import log
 from glob import iglob
 from shutil import rmtree
 from typing import TYPE_CHECKING, Callable, Iterable, Literal, Sequence, cast
 from zipfile import ZIP_DEFLATED, ZIP_STORED
 
-from .. import Command, __version__
-from .egg_info import egg_info as egg_info_cls
+from packaging import tags, version as _packaging_version
 from wheel.metadata import pkginfo_to_metadata
-from packaging import tags
-from packaging import version as _packaging_version
 from wheel.wheelfile import WheelFile
 
+from .. import Command, __version__
+from .egg_info import egg_info as egg_info_cls
+
+from distutils import log
+
 if TYPE_CHECKING:
     from _typeshed import ExcInfo
 
diff --git a/setuptools/command/build.py b/setuptools/command/build.py
index bc765a17ae..fd53fae8ca 100644
--- a/setuptools/command/build.py
+++ b/setuptools/command/build.py
@@ -1,6 +1,7 @@
 from __future__ import annotations
 
 from typing import Protocol
+
 from distutils.command.build import build as _build
 
 _ORIGINAL_SUBCOMMANDS = {"build_py", "build_clib", "build_ext", "build_scripts"}
diff --git a/setuptools/command/build_clib.py b/setuptools/command/build_clib.py
index acd4d1d3ba..5366b0c5c6 100644
--- a/setuptools/command/build_clib.py
+++ b/setuptools/command/build_clib.py
@@ -1,6 +1,6 @@
 import distutils.command.build_clib as orig
-from distutils.errors import DistutilsSetupError
 from distutils import log
+from distutils.errors import DistutilsSetupError
 
 try:
     from distutils._modified import newer_pairwise_group
diff --git a/setuptools/command/build_ext.py b/setuptools/command/build_ext.py
index 0a2af0dd53..51c1771a33 100644
--- a/setuptools/command/build_ext.py
+++ b/setuptools/command/build_ext.py
@@ -1,24 +1,26 @@
 from __future__ import annotations
 
+import itertools
 import os
 import sys
-import itertools
 from importlib.machinery import EXTENSION_SUFFIXES
 from importlib.util import cache_from_source as _compiled_file_name
-from typing import Iterator
 from pathlib import Path
-
-from distutils.ccompiler import new_compiler
-from distutils.sysconfig import customize_compiler, get_config_var
-from distutils import log
+from typing import Iterator
 
 from setuptools.dist import Distribution
 from setuptools.errors import BaseError
 from setuptools.extension import Extension, Library
 
+from distutils import log
+from distutils.ccompiler import new_compiler
+from distutils.sysconfig import customize_compiler, get_config_var
+
 try:
     # Attempt to use Cython for building extensions, if available
-    from Cython.Distutils.build_ext import build_ext as _build_ext  # type: ignore[import-not-found] # Cython not installed on CI tests
+    from Cython.Distutils.build_ext import (  # type: ignore[import-not-found] # Cython not installed on CI tests
+        build_ext as _build_ext,
+    )
 
     # Additionally, assert that the compiler module will load
     # also. Ref #1229.
diff --git a/setuptools/command/build_py.py b/setuptools/command/build_py.py
index 0cdb64f424..e6d9656f10 100644
--- a/setuptools/command/build_py.py
+++ b/setuptools/command/build_py.py
@@ -1,15 +1,12 @@
 from __future__ import annotations
 
-from functools import partial
-from glob import glob
-from distutils.util import convert_path
-import distutils.command.build_py as orig
-import os
 import fnmatch
-import textwrap
-import distutils.errors
 import itertools
+import os
 import stat
+import textwrap
+from functools import partial
+from glob import glob
 from pathlib import Path
 from typing import Iterable, Iterator
 
@@ -18,6 +15,10 @@
 from ..dist import Distribution
 from ..warnings import SetuptoolsDeprecationWarning
 
+import distutils.command.build_py as orig
+import distutils.errors
+from distutils.util import convert_path
+
 _IMPLICIT_DATA_FILES = ('*.pyi', 'py.typed')
 
 
diff --git a/setuptools/command/develop.py b/setuptools/command/develop.py
index 55f24f396c..1938434b3a 100644
--- a/setuptools/command/develop.py
+++ b/setuptools/command/develop.py
@@ -1,17 +1,16 @@
-from distutils.util import convert_path
-from distutils import log
-from distutils.errors import DistutilsOptionError
-import os
 import glob
+import os
 
-from setuptools.command.easy_install import easy_install
-from setuptools import _normalization
-from setuptools import _path
-from setuptools import namespaces
 import setuptools
+from setuptools import _normalization, _path, namespaces
+from setuptools.command.easy_install import easy_install
 
 from ..unicode_utils import _read_utf8_with_fallback
 
+from distutils import log
+from distutils.errors import DistutilsOptionError
+from distutils.util import convert_path
+
 
 class develop(namespaces.DevelopInstaller, easy_install):
     """Set up package for development"""
diff --git a/setuptools/command/dist_info.py b/setuptools/command/dist_info.py
index 2adc1c46f3..1db3fbf6bd 100644
--- a/setuptools/command/dist_info.py
+++ b/setuptools/command/dist_info.py
@@ -6,14 +6,15 @@
 import os
 import shutil
 from contextlib import contextmanager
-from distutils import log
-from distutils.core import Command
 from pathlib import Path
 from typing import cast
 
 from .. import _normalization
 from .egg_info import egg_info as egg_info_cls
 
+from distutils import log
+from distutils.core import Command
+
 
 class dist_info(Command):
     """
diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py
index 74b79d1adc..54d1e48449 100644
--- a/setuptools/command/easy_install.py
+++ b/setuptools/command/easy_install.py
@@ -12,73 +12,69 @@
 
 from __future__ import annotations
 
-from collections.abc import Iterable
-from glob import glob
-from distutils.util import get_platform
-from distutils.util import convert_path, subst_vars
-from distutils.errors import (
-    DistutilsArgError,
-    DistutilsOptionError,
-    DistutilsError,
-    DistutilsPlatformError,
-)
-from distutils import log, dir_util
-from distutils.command.build_scripts import first_line_re
-from distutils.command import install
-import sys
+import configparser
+import contextlib
+import io
 import os
-from typing import TYPE_CHECKING
-import zipimport
-import shutil
-import tempfile
-import zipfile
-import re
-import stat
 import random
-import textwrap
-import warnings
+import re
+import shlex
+import shutil
 import site
+import stat
 import struct
-import contextlib
 import subprocess
-import shlex
-import io
-import configparser
+import sys
 import sysconfig
-
+import tempfile
+import textwrap
+import warnings
+import zipfile
+import zipimport
+from collections.abc import Iterable
+from glob import glob
 from sysconfig import get_path
+from typing import TYPE_CHECKING
 
-from setuptools import Command
-from setuptools.sandbox import run_setup
-from setuptools.command import setopt
-from setuptools.archive_util import unpack_archive
-from setuptools.package_index import (
-    PackageIndex,
-    parse_requirement_arg,
-    URL_SCHEME,
-)
-from setuptools.command import bdist_egg, egg_info
-from setuptools.warnings import SetuptoolsDeprecationWarning, SetuptoolsWarning
-from setuptools.wheel import Wheel
+from jaraco.text import yield_lines
+
+import pkg_resources
 from pkg_resources import (
-    normalize_path,
-    resource_string,
-    get_distribution,
-    find_distributions,
-    Environment,
-    Requirement,
+    DEVELOP_DIST,
     Distribution,
-    PathMetadata,
-    EggMetadata,
-    WorkingSet,
     DistributionNotFound,
+    EggMetadata,
+    Environment,
+    PathMetadata,
+    Requirement,
     VersionConflict,
-    DEVELOP_DIST,
+    WorkingSet,
+    find_distributions,
+    get_distribution,
+    normalize_path,
+    resource_string,
 )
-import pkg_resources
-from ..compat import py39, py311
+from setuptools import Command
+from setuptools.archive_util import unpack_archive
+from setuptools.command import bdist_egg, egg_info, setopt
+from setuptools.package_index import URL_SCHEME, PackageIndex, parse_requirement_arg
+from setuptools.sandbox import run_setup
+from setuptools.warnings import SetuptoolsDeprecationWarning, SetuptoolsWarning
+from setuptools.wheel import Wheel
+
 from .._path import ensure_directory
-from jaraco.text import yield_lines
+from ..compat import py39, py311
+
+from distutils import dir_util, log
+from distutils.command import install
+from distutils.command.build_scripts import first_line_re
+from distutils.errors import (
+    DistutilsArgError,
+    DistutilsError,
+    DistutilsOptionError,
+    DistutilsPlatformError,
+)
+from distutils.util import convert_path, get_platform, subst_vars
 
 if TYPE_CHECKING:
     from typing_extensions import Self
diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py
index abc7b9f2c8..46852c1a94 100644
--- a/setuptools/command/editable_wheel.py
+++ b/setuptools/command/editable_wheel.py
@@ -12,8 +12,8 @@
 
 from __future__ import annotations
 
-import logging
 import io
+import logging
 import os
 import shutil
 import traceback
@@ -23,32 +23,14 @@
 from itertools import chain, starmap
 from pathlib import Path
 from tempfile import TemporaryDirectory
-from typing import (
-    TYPE_CHECKING,
-    Iterable,
-    Iterator,
-    Mapping,
-    Protocol,
-    TypeVar,
-    cast,
-)
-
-from .. import (
-    Command,
-    _normalization,
-    _path,
-    errors,
-    namespaces,
-)
+from typing import TYPE_CHECKING, Iterable, Iterator, Mapping, Protocol, TypeVar, cast
+
+from .. import Command, _normalization, _path, errors, namespaces
 from .._path import StrPath
 from ..compat import py39
 from ..discovery import find_package_path
 from ..dist import Distribution
-from ..warnings import (
-    InformationOnly,
-    SetuptoolsDeprecationWarning,
-    SetuptoolsWarning,
-)
+from ..warnings import InformationOnly, SetuptoolsDeprecationWarning, SetuptoolsWarning
 from .build import build as build_cls
 from .build_py import build_py as build_py_cls
 from .dist_info import dist_info as dist_info_cls
diff --git a/setuptools/command/egg_info.py b/setuptools/command/egg_info.py
index 8a2d42ea04..794ecd3dc3 100644
--- a/setuptools/command/egg_info.py
+++ b/setuptools/command/egg_info.py
@@ -2,35 +2,36 @@
 
 Create a distribution's .egg-info directory and contents"""
 
-from distutils.filelist import FileList as _FileList
-from distutils.errors import DistutilsInternalError
-from distutils.util import convert_path
-from distutils import log
-import distutils.errors
-import distutils.filelist
+import collections
 import functools
 import os
 import re
 import sys
 import time
-import collections
 
-from .._importlib import metadata
-from .. import _entry_points, _normalization
-from . import _requirestxt
+import packaging
+import packaging.requirements
+import packaging.version
 
+import setuptools.unicode_utils as unicode_utils
 from setuptools import Command
-from setuptools.command.sdist import sdist
-from setuptools.command.sdist import walk_revctrl
-from setuptools.command.setopt import edit_config
 from setuptools.command import bdist_egg
+from setuptools.command.sdist import sdist, walk_revctrl
+from setuptools.command.setopt import edit_config
 from setuptools.dist import Distribution
-import setuptools.unicode_utils as unicode_utils
 from setuptools.glob import glob
 
-import packaging.requirements
-import packaging.version
+from .. import _entry_points, _normalization
+from .._importlib import metadata
 from ..warnings import SetuptoolsDeprecationWarning
+from . import _requirestxt
+
+import distutils.errors
+import distutils.filelist
+from distutils import log
+from distutils.errors import DistutilsInternalError
+from distutils.filelist import FileList as _FileList
+from distutils.util import convert_path
 
 PY_MAJOR = '{}.{}'.format(*sys.version_info)
 
diff --git a/setuptools/command/install.py b/setuptools/command/install.py
index 12ef06f542..e2ec1abdde 100644
--- a/setuptools/command/install.py
+++ b/setuptools/command/install.py
@@ -1,18 +1,20 @@
 from __future__ import annotations
 
-from collections.abc import Callable
-from distutils.errors import DistutilsArgError
-import inspect
 import glob
+import inspect
 import platform
-import distutils.command.install as orig
+from collections.abc import Callable
 from typing import Any, ClassVar, cast
 
 import setuptools
+
 from ..dist import Distribution
 from ..warnings import SetuptoolsDeprecationWarning, SetuptoolsWarning
 from .bdist_egg import bdist_egg as bdist_egg_cls
 
+import distutils.command.install as orig
+from distutils.errors import DistutilsArgError
+
 # Prior to numpy 1.9, NumPy relies on the '_install' name, so provide it for
 # now. See https://github.com/pypa/setuptools/issues/199/
 _install = orig.install
diff --git a/setuptools/command/install_egg_info.py b/setuptools/command/install_egg_info.py
index a1d2e8184f..be47254f00 100644
--- a/setuptools/command/install_egg_info.py
+++ b/setuptools/command/install_egg_info.py
@@ -1,11 +1,12 @@
-from distutils import log, dir_util
 import os
 
-from setuptools import Command
-from setuptools import namespaces
+from setuptools import Command, namespaces
 from setuptools.archive_util import unpack_archive
+
 from .._path import ensure_directory
 
+from distutils import dir_util, log
+
 
 class install_egg_info(namespaces.Installer, Command):
     """Install an .egg-info directory for the package"""
diff --git a/setuptools/command/install_lib.py b/setuptools/command/install_lib.py
index d1577384df..292f07ab6e 100644
--- a/setuptools/command/install_lib.py
+++ b/setuptools/command/install_lib.py
@@ -1,12 +1,14 @@
 from __future__ import annotations
+
 import os
 import sys
 from itertools import product, starmap
-import distutils.command.install_lib as orig
 
 from .._path import StrPath
 from ..dist import Distribution
 
+import distutils.command.install_lib as orig
+
 
 class install_lib(orig.install_lib):
     """Don't add compiled flags to filenames of non-Python files"""
@@ -107,6 +109,7 @@ def copy_tree(
         # Exclude namespace package __init__.py* files from the output
 
         from setuptools.archive_util import unpack_directory
+
         from distutils import log
 
         outfiles: list[str] = []
diff --git a/setuptools/command/install_scripts.py b/setuptools/command/install_scripts.py
index fa5de91f46..7b90611d1c 100644
--- a/setuptools/command/install_scripts.py
+++ b/setuptools/command/install_scripts.py
@@ -1,13 +1,14 @@
 from __future__ import annotations
 
-from distutils import log
-import distutils.command.install_scripts as orig
 import os
 import sys
 
 from .._path import ensure_directory
 from ..dist import Distribution
 
+import distutils.command.install_scripts as orig
+from distutils import log
+
 
 class install_scripts(orig.install_scripts):
     """Do normal script install, plus any egg_info wrapper scripts"""
@@ -32,6 +33,7 @@ def run(self) -> None:
     def _install_ep_scripts(self):
         # Delay import side-effects
         from pkg_resources import Distribution, PathMetadata
+
         from . import easy_install as ei
 
         ei_cmd = self.get_finalized_command("egg_info")
diff --git a/setuptools/command/register.py b/setuptools/command/register.py
index beee9782e7..575790e5f2 100644
--- a/setuptools/command/register.py
+++ b/setuptools/command/register.py
@@ -1,8 +1,8 @@
-from distutils import log
-import distutils.command.register as orig
-
 from setuptools.errors import RemovedCommandError
 
+import distutils.command.register as orig
+from distutils import log
+
 
 class register(orig.register):
     """Formerly used to register packages on PyPI."""
diff --git a/setuptools/command/rotate.py b/setuptools/command/rotate.py
index 064d7959ff..dcdfafbcf7 100644
--- a/setuptools/command/rotate.py
+++ b/setuptools/command/rotate.py
@@ -1,13 +1,14 @@
 from __future__ import annotations
 
-from distutils.util import convert_path
-from distutils import log
-from distutils.errors import DistutilsOptionError
 import os
 import shutil
 
 from setuptools import Command
 
+from distutils import log
+from distutils.errors import DistutilsOptionError
+from distutils.util import convert_path
+
 
 class rotate(Command):
     """Delete older distributions"""
diff --git a/setuptools/command/sdist.py b/setuptools/command/sdist.py
index ca0f712792..23393c0797 100644
--- a/setuptools/command/sdist.py
+++ b/setuptools/command/sdist.py
@@ -1,13 +1,14 @@
-from distutils import log
-import distutils.command.sdist as orig
-import os
 import contextlib
+import os
 from itertools import chain
 
 from .._importlib import metadata
 from ..dist import Distribution
 from .build import _ORIGINAL_SUBCOMMANDS
 
+import distutils.command.sdist as orig
+from distutils import log
+
 _default_revctrl = list
 
 
diff --git a/setuptools/command/setopt.py b/setuptools/command/setopt.py
index 3d8181d2fa..b2653bd466 100644
--- a/setuptools/command/setopt.py
+++ b/setuptools/command/setopt.py
@@ -1,14 +1,15 @@
-from abc import ABC
-from distutils.util import convert_path
-from distutils import log
-from distutils.errors import DistutilsOptionError
-import distutils
-import os
 import configparser
+import os
+from abc import ABC
 
 from .. import Command
 from ..unicode_utils import _cfg_read_utf8_with_fallback
 
+import distutils
+from distutils import log
+from distutils.errors import DistutilsOptionError
+from distutils.util import convert_path
+
 __all__ = ['config_file', 'edit_config', 'option_base', 'setopt']
 
 
diff --git a/setuptools/command/upload.py b/setuptools/command/upload.py
index e05c82ae0f..649b41fa30 100644
--- a/setuptools/command/upload.py
+++ b/setuptools/command/upload.py
@@ -1,9 +1,9 @@
-from distutils import log
-from distutils.command import upload as orig
-
 from setuptools.dist import Distribution
 from setuptools.errors import RemovedCommandError
 
+from distutils import log
+from distutils.command import upload as orig
+
 
 class upload(orig.upload):
     """Formerly used to upload packages to PyPI."""
diff --git a/setuptools/command/upload_docs.py b/setuptools/command/upload_docs.py
index 32c9abd796..3c2946cfc8 100644
--- a/setuptools/command/upload_docs.py
+++ b/setuptools/command/upload_docs.py
@@ -4,23 +4,23 @@
 sites other than PyPi such as devpi).
 """
 
-from base64 import standard_b64encode
-from distutils import log
-from distutils.errors import DistutilsOptionError
-import os
-import zipfile
-import tempfile
-import shutil
-import itertools
 import functools
 import http.client
+import itertools
+import os
+import shutil
+import tempfile
 import urllib.parse
+import zipfile
+from base64 import standard_b64encode
 
 from .._importlib import metadata
 from ..warnings import SetuptoolsDeprecationWarning
-
 from .upload import upload
 
+from distutils import log
+from distutils.errors import DistutilsOptionError
+
 
 def _encode(s):
     return s.encode('utf-8', 'surrogateescape')
diff --git a/setuptools/compat/py310.py b/setuptools/compat/py310.py
index cc875c004b..b3912f8e02 100644
--- a/setuptools/compat/py310.py
+++ b/setuptools/compat/py310.py
@@ -1,6 +1,5 @@
 import sys
 
-
 __all__ = ['tomllib']
 
 
diff --git a/setuptools/compat/py311.py b/setuptools/compat/py311.py
index cd5abc5407..52b58af32a 100644
--- a/setuptools/compat/py311.py
+++ b/setuptools/compat/py311.py
@@ -2,10 +2,10 @@
 
 import shutil
 import sys
-from typing import Any, Callable, TYPE_CHECKING
+from typing import TYPE_CHECKING, Any, Callable
 
 if TYPE_CHECKING:
-    from _typeshed import StrOrBytesPath, ExcInfo
+    from _typeshed import ExcInfo, StrOrBytesPath
     from typing_extensions import TypeAlias
 
 # Same as shutil._OnExcCallback from typeshed
diff --git a/setuptools/config/_apply_pyprojecttoml.py b/setuptools/config/_apply_pyprojecttoml.py
index 20f36e85aa..8224a02631 100644
--- a/setuptools/config/_apply_pyprojecttoml.py
+++ b/setuptools/config/_apply_pyprojecttoml.py
@@ -17,23 +17,19 @@
 from inspect import cleandoc
 from itertools import chain
 from types import MappingProxyType
-from typing import (
-    TYPE_CHECKING,
-    Any,
-    Callable,
-    Dict,
-    Mapping,
-    Union,
-)
+from typing import TYPE_CHECKING, Any, Callable, Dict, Mapping, Union
+
 from .._path import StrPath
 from ..errors import RemovedConfigError
 from ..warnings import SetuptoolsWarning
 
 if TYPE_CHECKING:
-    from distutils.dist import _OptionsList
+    from typing_extensions import TypeAlias
+
     from setuptools._importlib import metadata
     from setuptools.dist import Distribution
-    from typing_extensions import TypeAlias
+
+    from distutils.dist import _OptionsList
 
 EMPTY: Mapping = MappingProxyType({})  # Immutable dict-like
 _ProjectReadmeValue: TypeAlias = Union[str, Dict[str, str]]
@@ -262,9 +258,10 @@ def _copy_command_options(pyproject: dict, dist: Distribution, filename: StrPath
 
 
 def _valid_command_options(cmdclass: Mapping = EMPTY) -> dict[str, set[str]]:
-    from .._importlib import metadata
     from setuptools.dist import Distribution
 
+    from .._importlib import metadata
+
     valid_options = {"global": _normalise_cmd_options(Distribution.global_options)}
 
     unloaded_entry_points = metadata.entry_points(group='distutils.commands')
diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py
index de6339fa42..dfee1c5d37 100644
--- a/setuptools/config/expand.py
+++ b/setuptools/config/expand.py
@@ -25,28 +25,20 @@
 import os
 import pathlib
 import sys
-from glob import iglob
 from configparser import ConfigParser
+from glob import iglob
 from importlib.machinery import ModuleSpec, all_suffixes
 from itertools import chain
-from typing import (
-    TYPE_CHECKING,
-    Any,
-    Callable,
-    Iterable,
-    Iterator,
-    Mapping,
-    TypeVar,
-)
 from pathlib import Path
 from types import ModuleType
+from typing import TYPE_CHECKING, Any, Callable, Iterable, Iterator, Mapping, TypeVar
 
-from distutils.errors import DistutilsOptionError
-
-from .._path import same_path as _same_path, StrPath
+from .._path import StrPath, same_path as _same_path
 from ..discovery import find_package_path
 from ..warnings import SetuptoolsWarning
 
+from distutils.errors import DistutilsOptionError
+
 if TYPE_CHECKING:
     from setuptools.dist import Distribution
 
@@ -287,8 +279,9 @@ def find_packages(
 
     :rtype: list
     """
+    from more_itertools import always_iterable, unique_everseen
+
     from setuptools.discovery import construct_package_dir
-    from more_itertools import unique_everseen, always_iterable
 
     if namespaces:
         from setuptools.discovery import PEP420PackageFinder as PackageFinder
diff --git a/setuptools/config/pyprojecttoml.py b/setuptools/config/pyprojecttoml.py
index b7d521b7ad..943b9f5a00 100644
--- a/setuptools/config/pyprojecttoml.py
+++ b/setuptools/config/pyprojecttoml.py
@@ -21,13 +21,13 @@
 from ..errors import FileError, InvalidConfigError
 from ..warnings import SetuptoolsWarning
 from . import expand as _expand
-from ._apply_pyprojecttoml import _PREVIOUSLY_DEFINED, _MissingDynamic
-from ._apply_pyprojecttoml import apply as _apply
+from ._apply_pyprojecttoml import _PREVIOUSLY_DEFINED, _MissingDynamic, apply as _apply
 
 if TYPE_CHECKING:
-    from setuptools.dist import Distribution
     from typing_extensions import Self
 
+    from setuptools.dist import Distribution
+
 _logger = logging.getLogger(__name__)
 
 
diff --git a/setuptools/config/setupcfg.py b/setuptools/config/setupcfg.py
index 5a3b7478a0..e825477043 100644
--- a/setuptools/config/setupcfg.py
+++ b/setuptools/config/setupcfg.py
@@ -15,12 +15,11 @@
 import functools
 import os
 from collections import defaultdict
-from functools import partial
-from functools import wraps
+from functools import partial, wraps
 from typing import (
     TYPE_CHECKING,
-    Callable,
     Any,
+    Callable,
     Dict,
     Generic,
     Iterable,
@@ -30,20 +29,21 @@
     Union,
 )
 
-from .._path import StrPath
-from ..errors import FileError, OptionError
 from packaging.markers import default_environment as marker_env
 from packaging.requirements import InvalidRequirement, Requirement
 from packaging.specifiers import SpecifierSet
 from packaging.version import InvalidVersion, Version
+
+from .._path import StrPath
+from ..errors import FileError, OptionError
 from ..warnings import SetuptoolsDeprecationWarning
 from . import expand
 
 if TYPE_CHECKING:
-    from distutils.dist import DistributionMetadata
-
     from setuptools.dist import Distribution
 
+    from distutils.dist import DistributionMetadata
+
 SingleCommandOptions = Dict["str", Tuple["str", Any]]
 """Dict that associate the name of the options of a particular command to a
 tuple. The first element of the tuple indicates the origin of the option value
diff --git a/setuptools/depends.py b/setuptools/depends.py
index 871a0925ef..9398b95331 100644
--- a/setuptools/depends.py
+++ b/setuptools/depends.py
@@ -1,13 +1,12 @@
-import sys
-import marshal
 import contextlib
 import dis
+import marshal
+import sys
 
-
-from . import _imp
-from ._imp import find_module, PY_COMPILED, PY_FROZEN, PY_SOURCE
 from packaging.version import Version
 
+from . import _imp
+from ._imp import PY_COMPILED, PY_FROZEN, PY_SOURCE, find_module
 
 __all__ = ['Require', 'find_module']
 
diff --git a/setuptools/discovery.py b/setuptools/discovery.py
index 3179852c69..577be2f16b 100644
--- a/setuptools/discovery.py
+++ b/setuptools/discovery.py
@@ -44,16 +44,12 @@
 from fnmatch import fnmatchcase
 from glob import glob
 from pathlib import Path
-from typing import (
-    TYPE_CHECKING,
-    Iterable,
-    Iterator,
-    Mapping,
-)
+from typing import TYPE_CHECKING, Iterable, Iterator, Mapping
 
 import _distutils_hack.override  # noqa: F401
 
 from ._path import StrPath
+
 from distutils import log
 from distutils.util import convert_path
 
diff --git a/setuptools/dist.py b/setuptools/dist.py
index 03017e56e1..b2250e3476 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -10,31 +10,33 @@
 from pathlib import Path
 from typing import TYPE_CHECKING, MutableMapping
 
-import distutils.cmd
-import distutils.command
-import distutils.core
-import distutils.dist
-import distutils.log
-from distutils.debug import DEBUG
-from distutils.errors import DistutilsOptionError, DistutilsSetupError
-from distutils.fancy_getopt import translate_longopt
-from distutils.util import strtobool
-
 from more_itertools import partition, unique_everseen
 from ordered_set import OrderedSet
 from packaging.markers import InvalidMarker, Marker
 from packaging.specifiers import InvalidSpecifier, SpecifierSet
 from packaging.version import Version
 
-from . import _entry_points
-from . import _reqs
-from . import command as _  # noqa  -- imported for side-effects
+from . import (
+    _entry_points,
+    _reqs,
+    command as _,  # noqa: F401 # imported for side-effects
+)
 from ._importlib import metadata
-from .config import setupcfg, pyprojecttoml
+from .config import pyprojecttoml, setupcfg
 from .discovery import ConfigDiscovery
 from .monkey import get_unpatched
 from .warnings import InformationOnly, SetuptoolsDeprecationWarning
 
+import distutils.cmd
+import distutils.command
+import distutils.core
+import distutils.dist
+import distutils.log
+from distutils.debug import DEBUG
+from distutils.errors import DistutilsOptionError, DistutilsSetupError
+from distutils.fancy_getopt import translate_longopt
+from distutils.util import strtobool
+
 __all__ = ['Distribution']
 
 sequence = tuple, list
diff --git a/setuptools/errors.py b/setuptools/errors.py
index 67a5a1df10..dd4e58e9fc 100644
--- a/setuptools/errors.py
+++ b/setuptools/errors.py
@@ -5,7 +5,6 @@
 
 from distutils import errors as _distutils_errors
 
-
 # Re-export errors from distutils to facilitate the migration to PEP632
 
 ByteCompileError = _distutils_errors.DistutilsByteCompileError
diff --git a/setuptools/extension.py b/setuptools/extension.py
index 96d392ef2b..b9fff2367f 100644
--- a/setuptools/extension.py
+++ b/setuptools/extension.py
@@ -1,13 +1,15 @@
 from __future__ import annotations
-import re
+
 import functools
-import distutils.core
-import distutils.errors
-import distutils.extension
+import re
 from typing import TYPE_CHECKING
 
 from .monkey import get_unpatched
 
+import distutils.core
+import distutils.errors
+import distutils.extension
+
 
 def _have_cython():
     """
diff --git a/setuptools/glob.py b/setuptools/glob.py
index a184c0b643..ffe0ae92cb 100644
--- a/setuptools/glob.py
+++ b/setuptools/glob.py
@@ -6,9 +6,9 @@
  * Hidden files are not ignored.
 """
 
+import fnmatch
 import os
 import re
-import fnmatch
 
 __all__ = ["glob", "iglob", "escape"]
 
diff --git a/setuptools/installer.py b/setuptools/installer.py
index a6aff723c2..ce3559cd93 100644
--- a/setuptools/installer.py
+++ b/setuptools/installer.py
@@ -3,13 +3,14 @@
 import subprocess
 import sys
 import tempfile
-from distutils import log
-from distutils.errors import DistutilsError
 from functools import partial
 
 from . import _reqs
-from .wheel import Wheel
 from .warnings import SetuptoolsDeprecationWarning
+from .wheel import Wheel
+
+from distutils import log
+from distutils.errors import DistutilsError
 
 
 def _fixup_find_links(find_links):
diff --git a/setuptools/launch.py b/setuptools/launch.py
index 0208fdf33b..56c7d035f1 100644
--- a/setuptools/launch.py
+++ b/setuptools/launch.py
@@ -6,8 +6,8 @@
 # Note that setuptools gets imported implicitly by the
 # invocation of this script using python -m setuptools.launch
 
-import tokenize
 import sys
+import tokenize
 
 
 def run():
diff --git a/setuptools/logging.py b/setuptools/logging.py
index ceca99ca76..e9674c5a81 100644
--- a/setuptools/logging.py
+++ b/setuptools/logging.py
@@ -1,9 +1,11 @@
-import sys
 import inspect
 import logging
-import distutils.log
+import sys
+
 from . import monkey
 
+import distutils.log
+
 
 def _not_warning(record):
     return record.levelno < logging.WARNING
diff --git a/setuptools/modified.py b/setuptools/modified.py
index af6ceeac89..245a61580b 100644
--- a/setuptools/modified.py
+++ b/setuptools/modified.py
@@ -1,7 +1,7 @@
 from ._distutils._modified import (
     newer,
-    newer_pairwise,
     newer_group,
+    newer_pairwise,
     newer_pairwise_group,
 )
 
diff --git a/setuptools/monkey.py b/setuptools/monkey.py
index e513f95245..abcc2755be 100644
--- a/setuptools/monkey.py
+++ b/setuptools/monkey.py
@@ -14,7 +14,6 @@
 
 import distutils.filelist
 
-
 _T = TypeVar("_T")
 
 __all__: list[str] = []
diff --git a/setuptools/msvc.py b/setuptools/msvc.py
index 2768059213..ca332d59aa 100644
--- a/setuptools/msvc.py
+++ b/setuptools/msvc.py
@@ -13,19 +13,20 @@
 
 from __future__ import annotations
 
-import json
-from os import listdir, pathsep
-from os.path import join, isfile, isdir, dirname
-from subprocess import CalledProcessError
 import contextlib
-import platform
 import itertools
+import json
+import platform
 import subprocess
-import distutils.errors
+from os import listdir, pathsep
+from os.path import dirname, isdir, isfile, join
+from subprocess import CalledProcessError
 from typing import TYPE_CHECKING
 
 from more_itertools import unique_everseen
 
+import distutils.errors
+
 # https://github.com/python/mypy/issues/8166
 if not TYPE_CHECKING and platform.system() == 'Windows':
     import winreg
diff --git a/setuptools/namespaces.py b/setuptools/namespaces.py
index 2f2c1cfbe1..e82439e3ef 100644
--- a/setuptools/namespaces.py
+++ b/setuptools/namespaces.py
@@ -1,9 +1,9 @@
-import os
-from distutils import log
 import itertools
+import os
 
 from .compat import py39
 
+from distutils import log
 
 flatten = itertools.chain.from_iterable
 
diff --git a/setuptools/package_index.py b/setuptools/package_index.py
index c24c783762..a66cbb2e61 100644
--- a/setuptools/package_index.py
+++ b/setuptools/package_index.py
@@ -1,49 +1,49 @@
 """PyPI and direct package downloading."""
 
-import sys
-import subprocess
-import os
-import re
-import io
-import shutil
-import socket
 import base64
-import hashlib
-import itertools
 import configparser
+import hashlib
 import html
 import http.client
+import io
+import itertools
+import os
+import re
+import shutil
+import socket
+import subprocess
+import sys
+import urllib.error
 import urllib.parse
 import urllib.request
-import urllib.error
+from fnmatch import translate
 from functools import wraps
 
+from more_itertools import unique_everseen
+
 import setuptools
 from pkg_resources import (
-    CHECKOUT_DIST,
-    Distribution,
     BINARY_DIST,
-    normalize_path,
+    CHECKOUT_DIST,
+    DEVELOP_DIST,
+    EGG_DIST,
     SOURCE_DIST,
+    Distribution,
     Environment,
+    Requirement,
     find_distributions,
+    normalize_path,
+    parse_version,
     safe_name,
     safe_version,
     to_filename,
-    Requirement,
-    DEVELOP_DIST,
-    EGG_DIST,
-    parse_version,
 )
-from distutils import log
-from distutils.errors import DistutilsError
-from fnmatch import translate
 from setuptools.wheel import Wheel
 
-from more_itertools import unique_everseen
-
-from .unicode_utils import _read_utf8_with_fallback, _cfg_read_utf8_with_fallback
+from .unicode_utils import _cfg_read_utf8_with_fallback, _read_utf8_with_fallback
 
+from distutils import log
+from distutils.errors import DistutilsError
 
 EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.+!]+)$')
 HREF = re.compile(r"""href\s*=\s*['"]?([^'"> ]+)""", re.I)
diff --git a/setuptools/sandbox.py b/setuptools/sandbox.py
index 31ba1e3f8d..5ba32a89b3 100644
--- a/setuptools/sandbox.py
+++ b/setuptools/sandbox.py
@@ -1,22 +1,23 @@
 from __future__ import annotations
 
-from abc import ABC
-import os
-import sys
-import tempfile
-import operator
+import builtins
+import contextlib
 import functools
 import itertools
-import re
-import contextlib
+import operator
+import os
 import pickle
+import re
+import sys
+import tempfile
 import textwrap
-import builtins
+from abc import ABC
 
 import pkg_resources
-from distutils.errors import DistutilsError
 from pkg_resources import working_set
 
+from distutils.errors import DistutilsError
+
 if sys.platform.startswith('java'):
     import org.python.modules.posix.PosixModule as _os
 else:
diff --git a/setuptools/tests/__init__.py b/setuptools/tests/__init__.py
index 415ece4234..eb70bfb711 100644
--- a/setuptools/tests/__init__.py
+++ b/setuptools/tests/__init__.py
@@ -3,7 +3,6 @@
 
 import pytest
 
-
 __all__ = ['fail_on_ascii']
 
 if sys.version_info >= (3, 11):
diff --git a/setuptools/tests/compat/py39.py b/setuptools/tests/compat/py39.py
index 9c86065cd2..1fdb9dac1f 100644
--- a/setuptools/tests/compat/py39.py
+++ b/setuptools/tests/compat/py39.py
@@ -1,4 +1,3 @@
 from jaraco.test.cpython import from_test_support, try_import
 
-
 os_helper = try_import('os_helper') or from_test_support('can_symlink')
diff --git a/setuptools/tests/config/downloads/preload.py b/setuptools/tests/config/downloads/preload.py
index d559beff33..8eeb5dd75d 100644
--- a/setuptools/tests/config/downloads/preload.py
+++ b/setuptools/tests/config/downloads/preload.py
@@ -13,7 +13,6 @@
 
 from . import retrieve_file, urls_from_file
 
-
 if __name__ == "__main__":
     urls = urls_from_file(Path(sys.argv[1]))
     list(map(retrieve_file, urls))
diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py
index 6650a657d3..156823533a 100644
--- a/setuptools/tests/config/test_apply_pyprojecttoml.py
+++ b/setuptools/tests/config/test_apply_pyprojecttoml.py
@@ -15,20 +15,17 @@
 
 import pytest
 from ini2toml.api import LiteTranslator
-
 from packaging.metadata import Metadata
 
 import setuptools  # noqa ensure monkey patch to metadata
-from setuptools.dist import Distribution
-from setuptools.config import setupcfg, pyprojecttoml
-from setuptools.config import expand
-from setuptools.config._apply_pyprojecttoml import _MissingDynamic, _some_attrgetter
 from setuptools.command.egg_info import write_requirements
+from setuptools.config import expand, pyprojecttoml, setupcfg
+from setuptools.config._apply_pyprojecttoml import _MissingDynamic, _some_attrgetter
+from setuptools.dist import Distribution
 from setuptools.errors import RemovedConfigError
 
 from .downloads import retrieve_file, urls_from_file
 
-
 HERE = Path(__file__).parent
 EXAMPLES_FILE = "setupcfg_examples.txt"
 
diff --git a/setuptools/tests/config/test_expand.py b/setuptools/tests/config/test_expand.py
index b309a1ce7c..f51d2008d0 100644
--- a/setuptools/tests/config/test_expand.py
+++ b/setuptools/tests/config/test_expand.py
@@ -4,10 +4,11 @@
 
 import pytest
 
-from distutils.errors import DistutilsOptionError
 from setuptools.config import expand
 from setuptools.discovery import find_package_path
 
+from distutils.errors import DistutilsOptionError
+
 
 def write_files(files, root_dir):
     for file, content in files.items():
diff --git a/setuptools/tests/config/test_pyprojecttoml.py b/setuptools/tests/config/test_pyprojecttoml.py
index bf8cae5a24..bb15ce10de 100644
--- a/setuptools/tests/config/test_pyprojecttoml.py
+++ b/setuptools/tests/config/test_pyprojecttoml.py
@@ -7,18 +7,17 @@
 import tomli_w
 from path import Path
 
+import setuptools  # noqa: F401 # force distutils.core to be patched
 from setuptools.config.pyprojecttoml import (
     _ToolsTypoInMetadata,
-    read_configuration,
-    expand_configuration,
     apply_configuration,
+    expand_configuration,
+    read_configuration,
     validate,
 )
 from setuptools.dist import Distribution
 from setuptools.errors import OptionError
 
-
-import setuptools  # noqa -- force distutils.core to be patched
 import distutils.core
 
 EXAMPLE = """
diff --git a/setuptools/tests/config/test_setupcfg.py b/setuptools/tests/config/test_setupcfg.py
index 0074fe2308..2c50ee2db7 100644
--- a/setuptools/tests/config/test_setupcfg.py
+++ b/setuptools/tests/config/test_setupcfg.py
@@ -5,14 +5,16 @@
 from unittest.mock import Mock, patch
 
 import pytest
+from packaging.requirements import InvalidRequirement
 
-from distutils.errors import DistutilsOptionError, DistutilsFileError
-from setuptools.dist import Distribution, _Distribution
 from setuptools.config.setupcfg import ConfigHandler, read_configuration
-from packaging.requirements import InvalidRequirement
+from setuptools.dist import Distribution, _Distribution
 from setuptools.warnings import SetuptoolsDeprecationWarning
+
 from ..textwrap import DALS
 
+from distutils.errors import DistutilsFileError, DistutilsOptionError
+
 
 class ErrConfigHandler(ConfigHandler):
     """Erroneous handler. Fails to implement required methods."""
diff --git a/setuptools/tests/contexts.py b/setuptools/tests/contexts.py
index 112cdf4b28..97cceea0e7 100644
--- a/setuptools/tests/contexts.py
+++ b/setuptools/tests/contexts.py
@@ -1,10 +1,10 @@
-import tempfile
+import contextlib
+import io
 import os
 import shutil
-import sys
-import contextlib
 import site
-import io
+import sys
+import tempfile
 
 from filelock import FileLock
 
diff --git a/setuptools/tests/environment.py b/setuptools/tests/environment.py
index b9de4fda6b..ed5499ef7d 100644
--- a/setuptools/tests/environment.py
+++ b/setuptools/tests/environment.py
@@ -1,8 +1,8 @@
 import os
-import sys
 import subprocess
+import sys
 import unicodedata
-from subprocess import Popen as _Popen, PIPE as _PIPE
+from subprocess import PIPE as _PIPE, Popen as _Popen
 
 import jaraco.envs
 
diff --git a/setuptools/tests/fixtures.py b/setuptools/tests/fixtures.py
index a2870f11e1..a5472984b5 100644
--- a/setuptools/tests/fixtures.py
+++ b/setuptools/tests/fixtures.py
@@ -1,11 +1,11 @@
-import os
 import contextlib
-import sys
+import os
 import subprocess
+import sys
 from pathlib import Path
 
-import pytest
 import path
+import pytest
 
 from . import contexts, environment
 
diff --git a/setuptools/tests/integration/helpers.py b/setuptools/tests/integration/helpers.py
index 615c43b2e0..77b196e029 100644
--- a/setuptools/tests/integration/helpers.py
+++ b/setuptools/tests/integration/helpers.py
@@ -8,8 +8,8 @@
 import os
 import subprocess
 import tarfile
-from zipfile import ZipFile
 from pathlib import Path
+from zipfile import ZipFile
 
 
 def run(cmd, env=None):
diff --git a/setuptools/tests/server.py b/setuptools/tests/server.py
index 8cb735f1e6..15bbc3b1f0 100644
--- a/setuptools/tests/server.py
+++ b/setuptools/tests/server.py
@@ -1,9 +1,9 @@
 """Basic http server for tests to simulate PyPI or custom indexes"""
 
+import http.server
 import os
-import time
 import threading
-import http.server
+import time
 import urllib.parse
 import urllib.request
 
diff --git a/setuptools/tests/test_archive_util.py b/setuptools/tests/test_archive_util.py
index 06d7f05aa0..e3efc62889 100644
--- a/setuptools/tests/test_archive_util.py
+++ b/setuptools/tests/test_archive_util.py
@@ -1,5 +1,5 @@
-import tarfile
 import io
+import tarfile
 
 import pytest
 
diff --git a/setuptools/tests/test_bdist_deprecations.py b/setuptools/tests/test_bdist_deprecations.py
index f2b99b053b..d9d67b0616 100644
--- a/setuptools/tests/test_bdist_deprecations.py
+++ b/setuptools/tests/test_bdist_deprecations.py
@@ -5,8 +5,8 @@
 
 import pytest
 
-from setuptools.dist import Distribution
 from setuptools import SetuptoolsDeprecationWarning
+from setuptools.dist import Distribution
 
 
 @pytest.mark.skipif(sys.platform == 'win32', reason='non-Windows only')
diff --git a/setuptools/tests/test_bdist_wheel.py b/setuptools/tests/test_bdist_wheel.py
index 3ca277ba15..a1e3d9a73e 100644
--- a/setuptools/tests/test_bdist_wheel.py
+++ b/setuptools/tests/test_bdist_wheel.py
@@ -16,9 +16,9 @@
 
 import jaraco.path
 import pytest
+from packaging import tags
 
 import setuptools
-from distutils.core import run_setup
 from setuptools.command.bdist_wheel import (
     bdist_wheel,
     get_abi_tag,
@@ -26,7 +26,8 @@
     remove_readonly_exc,
 )
 from setuptools.dist import Distribution
-from packaging import tags
+
+from distutils.core import run_setup
 
 DEFAULT_FILES = {
     "dummy_dist-1.0.dist-info/top_level.txt",
diff --git a/setuptools/tests/test_build.py b/setuptools/tests/test_build.py
index 141522efd4..f0f1d9dcf2 100644
--- a/setuptools/tests/test_build.py
+++ b/setuptools/tests/test_build.py
@@ -1,6 +1,6 @@
 from setuptools import Command
-from setuptools.dist import Distribution
 from setuptools.command.build import build
+from setuptools.dist import Distribution
 
 
 def test_distribution_gives_setuptools_build_obj(tmpdir_cwd):
diff --git a/setuptools/tests/test_build_clib.py b/setuptools/tests/test_build_clib.py
index 2c5b956c7c..b5315df4f6 100644
--- a/setuptools/tests/test_build_clib.py
+++ b/setuptools/tests/test_build_clib.py
@@ -1,12 +1,13 @@
+import random
 from unittest import mock
 
 import pytest
 
-import random
-from distutils.errors import DistutilsSetupError
 from setuptools.command.build_clib import build_clib
 from setuptools.dist import Distribution
 
+from distutils.errors import DistutilsSetupError
+
 
 class TestBuildCLib:
     @mock.patch('setuptools.command.build_clib.newer_pairwise_group')
diff --git a/setuptools/tests/test_build_ext.py b/setuptools/tests/test_build_ext.py
index 943fc6df2f..814fbd86aa 100644
--- a/setuptools/tests/test_build_ext.py
+++ b/setuptools/tests/test_build_ext.py
@@ -1,21 +1,20 @@
 import os
 import sys
-import distutils.command.build_ext as orig
-from distutils.sysconfig import get_config_var
 from importlib.util import cache_from_source as _compiled_file_name
 
+import pytest
 from jaraco import path
 
 from setuptools.command.build_ext import build_ext, get_abi3_suffix
 from setuptools.dist import Distribution
-from setuptools.extension import Extension
 from setuptools.errors import CompileError
+from setuptools.extension import Extension
 
 from . import environment
 from .textwrap import DALS
 
-import pytest
-
+import distutils.command.build_ext as orig
+from distutils.sysconfig import get_config_var
 
 IS_PYPY = '__pypy__' in sys.builtin_module_names
 
diff --git a/setuptools/tests/test_build_meta.py b/setuptools/tests/test_build_meta.py
index 16f75f8763..c34f711662 100644
--- a/setuptools/tests/test_build_meta.py
+++ b/setuptools/tests/test_build_meta.py
@@ -1,14 +1,14 @@
+import contextlib
+import importlib
 import os
-import sys
+import re
 import shutil
 import signal
+import sys
 import tarfile
-import importlib
-import contextlib
 from concurrent import futures
-import re
-from zipfile import ZipFile
 from pathlib import Path
+from zipfile import ZipFile
 
 import pytest
 from jaraco import path
diff --git a/setuptools/tests/test_build_py.py b/setuptools/tests/test_build_py.py
index 6900fdefbd..b8cd77dc0b 100644
--- a/setuptools/tests/test_build_py.py
+++ b/setuptools/tests/test_build_py.py
@@ -1,12 +1,12 @@
 import os
-import stat
 import shutil
+import stat
 import warnings
 from pathlib import Path
 from unittest.mock import Mock
 
-import pytest
 import jaraco.path
+import pytest
 
 from setuptools import SetuptoolsDeprecationWarning
 from setuptools.dist import Distribution
diff --git a/setuptools/tests/test_config_discovery.py b/setuptools/tests/test_config_discovery.py
index ff9e672b68..af172953e3 100644
--- a/setuptools/tests/test_config_discovery.py
+++ b/setuptools/tests/test_config_discovery.py
@@ -3,22 +3,22 @@
 from configparser import ConfigParser
 from itertools import product
 
+import jaraco.path
+import pytest
+from path import Path
+
+import setuptools  # noqa: F401 # force distutils.core to be patched
 from setuptools.command.sdist import sdist
-from setuptools.dist import Distribution
 from setuptools.discovery import find_package_path, find_parent_package
+from setuptools.dist import Distribution
 from setuptools.errors import PackageDiscoveryError
 
-import setuptools  # noqa -- force distutils.core to be patched
-import distutils.core
-
-import pytest
-import jaraco.path
-from path import Path
-
 from .contexts import quiet
 from .integration.helpers import get_sdist_members, get_wheel_members, run
 from .textwrap import DALS
 
+import distutils.core
+
 
 class TestFindParentPackage:
     def test_single_package(self, tmp_path):
diff --git a/setuptools/tests/test_core_metadata.py b/setuptools/tests/test_core_metadata.py
index eaabf20a9d..3e2ce65ea3 100644
--- a/setuptools/tests/test_core_metadata.py
+++ b/setuptools/tests/test_core_metadata.py
@@ -1,17 +1,15 @@
 import functools
-import io
 import importlib
+import io
 from email import message_from_string
 
 import pytest
-
 from packaging.metadata import Metadata
 
-from setuptools import sic, _reqs
-from setuptools.dist import Distribution
+from setuptools import _reqs, sic
 from setuptools._core_metadata import rfc822_escape, rfc822_unescape
 from setuptools.command.egg_info import egg_info, write_requirements
-
+from setuptools.dist import Distribution
 
 EXAMPLE_BASE_INFO = dict(
     name="package",
diff --git a/setuptools/tests/test_develop.py b/setuptools/tests/test_develop.py
index 6f17beb703..929fa9c285 100644
--- a/setuptools/tests/test_develop.py
+++ b/setuptools/tests/test_develop.py
@@ -1,18 +1,18 @@
 """develop tests"""
 
 import os
-import sys
-import subprocess
 import pathlib
 import platform
+import subprocess
+import sys
 
 import pytest
 
+from setuptools._path import paths_on_pythonpath
 from setuptools.command.develop import develop
 from setuptools.dist import Distribution
-from setuptools._path import paths_on_pythonpath
-from . import contexts
-from . import namespaces
+
+from . import contexts, namespaces
 
 SETUP_PY = """\
 from setuptools import setup
diff --git a/setuptools/tests/test_dist.py b/setuptools/tests/test_dist.py
index 99cd582501..a5a5e7606d 100644
--- a/setuptools/tests/test_dist.py
+++ b/setuptools/tests/test_dist.py
@@ -1,20 +1,19 @@
 import collections
-import re
 import os
-import urllib.request
+import re
 import urllib.parse
-from distutils.errors import DistutilsSetupError
-from setuptools.dist import (
-    check_package_data,
-    check_specifier,
-)
+import urllib.request
+
+import pytest
+
 from setuptools import Distribution
+from setuptools.dist import check_package_data, check_specifier
 
-from .textwrap import DALS
 from .test_easy_install import make_nspkg_sdist
 from .test_find_packages import ensure_files
+from .textwrap import DALS
 
-import pytest
+from distutils.errors import DistutilsSetupError
 
 
 def test_dist_fetch_build_egg(tmpdir):
diff --git a/setuptools/tests/test_dist_info.py b/setuptools/tests/test_dist_info.py
index 44be6c3284..6e109c9db2 100644
--- a/setuptools/tests/test_dist_info.py
+++ b/setuptools/tests/test_dist_info.py
@@ -11,8 +11,8 @@
 
 import pkg_resources
 from setuptools.archive_util import unpack_archive
-from .textwrap import DALS
 
+from .textwrap import DALS
 
 read = partial(pathlib.Path.read_text, encoding="utf-8")
 
diff --git a/setuptools/tests/test_distutils_adoption.py b/setuptools/tests/test_distutils_adoption.py
index 74883d2199..0b020ba9fc 100644
--- a/setuptools/tests/test_distutils_adoption.py
+++ b/setuptools/tests/test_distutils_adoption.py
@@ -1,11 +1,10 @@
 import os
-import sys
 import platform
+import sys
 import textwrap
 
 import pytest
 
-
 IS_PYPY = '__pypy__' in sys.builtin_module_names
 
 _TEXT_KWARGS = {"text": True, "encoding": "utf-8"}  # For subprocess.run
diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py
index 7346a07929..ca6af9667e 100644
--- a/setuptools/tests/test_easy_install.py
+++ b/setuptools/tests/test_easy_install.py
@@ -1,42 +1,42 @@
 """Easy install Tests"""
 
-import sys
-import os
-import tempfile
-import site
 import contextlib
-import tarfile
-import logging
-import itertools
-import distutils.errors
 import io
-from typing import NamedTuple
-import zipfile
-import time
+import itertools
+import logging
+import os
+import pathlib
 import re
+import site
 import subprocess
-import pathlib
+import sys
+import tarfile
+import tempfile
+import time
 import warnings
+import zipfile
 from pathlib import Path
+from typing import NamedTuple
 from unittest import mock
 
 import pytest
 from jaraco import path
 
-from setuptools import sandbox
-from setuptools.sandbox import run_setup
+import pkg_resources
 import setuptools.command.easy_install as ei
+from pkg_resources import Distribution as PRDistribution, normalize_path, working_set
+from setuptools import sandbox
 from setuptools.command.easy_install import PthDistributions
 from setuptools.dist import Distribution
-from pkg_resources import normalize_path, working_set
-from pkg_resources import Distribution as PRDistribution
-from setuptools.tests.server import MockServer, path_to_url
+from setuptools.sandbox import run_setup
 from setuptools.tests import fail_on_ascii
-import pkg_resources
+from setuptools.tests.server import MockServer, path_to_url
 
 from . import contexts
 from .textwrap import DALS
 
+import distutils.errors
+
 
 @pytest.fixture(autouse=True)
 def pip_disable_index(monkeypatch):
diff --git a/setuptools/tests/test_editable_install.py b/setuptools/tests/test_editable_install.py
index 24c10a5054..287367ac18 100644
--- a/setuptools/tests/test_editable_install.py
+++ b/setuptools/tests/test_editable_install.py
@@ -1,8 +1,8 @@
 import os
+import platform
 import stat
-import sys
 import subprocess
-import platform
+import sys
 from copy import deepcopy
 from importlib import import_module
 from importlib.machinery import EXTENSION_SUFFIXES
@@ -11,31 +11,31 @@
 from unittest.mock import Mock
 from uuid import uuid4
 
-from distutils.core import run_setup
-
 import jaraco.envs
 import jaraco.path
 import pytest
 from path import Path as _Path
 
-from . import contexts, namespaces
-
 from setuptools._importlib import resources as importlib_resources
 from setuptools.command.editable_wheel import (
     _DebuggingTips,
-    _LinkTree,
-    _TopLevelFinder,
     _encode_pth,
-    _find_virtual_namespaces,
     _find_namespaces,
     _find_package_roots,
+    _find_virtual_namespaces,
     _finder_template,
+    _LinkTree,
+    _TopLevelFinder,
     editable_wheel,
 )
 from setuptools.dist import Distribution
 from setuptools.extension import Extension
 from setuptools.warnings import SetuptoolsDeprecationWarning
 
+from . import contexts, namespaces
+
+from distutils.core import run_setup
+
 
 @pytest.fixture(params=["strict", "lenient"])
 def editable_opts(request):
diff --git a/setuptools/tests/test_egg_info.py b/setuptools/tests/test_egg_info.py
index 856dd127bc..6e8d0c68c3 100644
--- a/setuptools/tests/test_egg_info.py
+++ b/setuptools/tests/test_egg_info.py
@@ -1,11 +1,11 @@
 from __future__ import annotations
 
-import sys
 import ast
-import os
 import glob
+import os
 import re
 import stat
+import sys
 import time
 from pathlib import Path
 from unittest import mock
@@ -14,16 +14,11 @@
 from jaraco import path
 
 from setuptools import errors
-from setuptools.command.egg_info import (
-    egg_info,
-    manifest_maker,
-    write_entries,
-)
+from setuptools.command.egg_info import egg_info, manifest_maker, write_entries
 from setuptools.dist import Distribution
 
-from . import environment
+from . import contexts, environment
 from .textwrap import DALS
-from . import contexts
 
 
 class Environment(str):
diff --git a/setuptools/tests/test_extern.py b/setuptools/tests/test_extern.py
index da01b25b98..105279d084 100644
--- a/setuptools/tests/test_extern.py
+++ b/setuptools/tests/test_extern.py
@@ -1,9 +1,10 @@
 import importlib
 import pickle
 
-from setuptools import Distribution
 import ordered_set
 
+from setuptools import Distribution
+
 
 def test_reimport_extern():
     ordered_set2 = importlib.import_module(ordered_set.__name__)
diff --git a/setuptools/tests/test_find_packages.py b/setuptools/tests/test_find_packages.py
index 0f4e2bef89..9fd9f8f663 100644
--- a/setuptools/tests/test_find_packages.py
+++ b/setuptools/tests/test_find_packages.py
@@ -6,8 +6,7 @@
 
 import pytest
 
-from setuptools import find_packages
-from setuptools import find_namespace_packages
+from setuptools import find_namespace_packages, find_packages
 from setuptools.discovery import FlatLayoutPackageFinder
 
 from .compat.py39 import os_helper
diff --git a/setuptools/tests/test_find_py_modules.py b/setuptools/tests/test_find_py_modules.py
index 2459c98fa3..8034b54429 100644
--- a/setuptools/tests/test_find_py_modules.py
+++ b/setuptools/tests/test_find_py_modules.py
@@ -6,8 +6,8 @@
 
 from setuptools.discovery import FlatLayoutModuleFinder, ModuleFinder
 
-from .test_find_packages import ensure_files
 from .compat.py39 import os_helper
+from .test_find_packages import ensure_files
 
 
 class TestModuleFinder:
diff --git a/setuptools/tests/test_install_scripts.py b/setuptools/tests/test_install_scripts.py
index 595b7ade67..2ae5496525 100644
--- a/setuptools/tests/test_install_scripts.py
+++ b/setuptools/tests/test_install_scripts.py
@@ -6,6 +6,7 @@
 
 from setuptools.command.install_scripts import install_scripts
 from setuptools.dist import Distribution
+
 from . import contexts
 
 
diff --git a/setuptools/tests/test_integration.py b/setuptools/tests/test_integration.py
index 78f459d018..b0e7d67b5e 100644
--- a/setuptools/tests/test_integration.py
+++ b/setuptools/tests/test_integration.py
@@ -10,11 +10,10 @@
 
 import pytest
 
-from setuptools.command.easy_install import easy_install
 from setuptools.command import easy_install as easy_install_pkg
+from setuptools.command.easy_install import easy_install
 from setuptools.dist import Distribution
 
-
 pytestmark = pytest.mark.skipif(
     'platform.python_implementation() == "PyPy" and platform.system() == "Windows"',
     reason="pypa/setuptools#2496",
diff --git a/setuptools/tests/test_logging.py b/setuptools/tests/test_logging.py
index cf89b3bd00..e01df7277c 100644
--- a/setuptools/tests/test_logging.py
+++ b/setuptools/tests/test_logging.py
@@ -5,7 +5,6 @@
 
 import pytest
 
-
 IS_PYPY = '__pypy__' in sys.builtin_module_names
 
 
@@ -25,6 +24,7 @@
 def test_verbosity_level(tmp_path, monkeypatch, flag, expected_level):
     """Make sure the correct verbosity level is set (issue #3038)"""
     import setuptools  # noqa: F401  # import setuptools to monkeypatch distutils
+
     import distutils  # <- load distutils after all the patches take place
 
     logger = logging.Logger(__name__)
@@ -61,7 +61,9 @@ def test_patching_does_not_cause_problems():
     # Ensure `dist.log` is only patched if necessary
 
     import _distutils_hack
+
     import setuptools.logging
+
     from distutils import dist
 
     setuptools.logging.configure()
diff --git a/setuptools/tests/test_manifest.py b/setuptools/tests/test_manifest.py
index f3eba733d9..ae5572b83c 100644
--- a/setuptools/tests/test_manifest.py
+++ b/setuptools/tests/test_manifest.py
@@ -3,22 +3,22 @@
 from __future__ import annotations
 
 import contextlib
+import io
+import itertools
+import logging
 import os
 import shutil
 import sys
 import tempfile
-import itertools
-import io
-import logging
-from distutils import log
-from distutils.errors import DistutilsTemplateError
+
+import pytest
 
 from setuptools.command.egg_info import FileList, egg_info, translate_pattern
 from setuptools.dist import Distribution
 from setuptools.tests.textwrap import DALS
 
-import pytest
-
+from distutils import log
+from distutils.errors import DistutilsTemplateError
 
 IS_PYPY = '__pypy__' in sys.builtin_module_names
 
diff --git a/setuptools/tests/test_msvc14.py b/setuptools/tests/test_msvc14.py
index 4b8344539f..57d3cc38e8 100644
--- a/setuptools/tests/test_msvc14.py
+++ b/setuptools/tests/test_msvc14.py
@@ -3,10 +3,12 @@
 """
 
 import os
-from distutils.errors import DistutilsPlatformError
-import pytest
 import sys
 
+import pytest
+
+from distutils.errors import DistutilsPlatformError
+
 
 @pytest.mark.skipif(sys.platform != "win32", reason="These tests are only for win32")
 class TestMSVC14:
diff --git a/setuptools/tests/test_namespaces.py b/setuptools/tests/test_namespaces.py
index 56689301da..a0f4120bf7 100644
--- a/setuptools/tests/test_namespaces.py
+++ b/setuptools/tests/test_namespaces.py
@@ -1,5 +1,5 @@
-import sys
 import subprocess
+import sys
 
 from setuptools._path import paths_on_pythonpath
 
diff --git a/setuptools/tests/test_packageindex.py b/setuptools/tests/test_packageindex.py
index f5f37e0563..e1f8458674 100644
--- a/setuptools/tests/test_packageindex.py
+++ b/setuptools/tests/test_packageindex.py
@@ -1,13 +1,14 @@
-import distutils.errors
-import urllib.request
-import urllib.error
 import http.client
+import urllib.error
+import urllib.request
 from inspect import cleandoc
 
 import pytest
 
 import setuptools.package_index
 
+import distutils.errors
+
 
 class TestPackageIndex:
     def test_regex(self):
diff --git a/setuptools/tests/test_register.py b/setuptools/tests/test_register.py
index ed85e9bbd3..0c7d109d31 100644
--- a/setuptools/tests/test_register.py
+++ b/setuptools/tests/test_register.py
@@ -1,11 +1,11 @@
-from setuptools.command.register import register
-from setuptools.dist import Distribution
-from setuptools.errors import RemovedCommandError
-
 from unittest import mock
 
 import pytest
 
+from setuptools.command.register import register
+from setuptools.dist import Distribution
+from setuptools.errors import RemovedCommandError
+
 
 class TestRegister:
     def test_register_exception(self):
diff --git a/setuptools/tests/test_sdist.py b/setuptools/tests/test_sdist.py
index 096cdc3924..2df202fd18 100644
--- a/setuptools/tests/test_sdist.py
+++ b/setuptools/tests/test_sdist.py
@@ -1,32 +1,31 @@
 """sdist tests"""
 
+import contextlib
+import io
+import logging
 import os
 import sys
+import tarfile
 import tempfile
 import unicodedata
-import contextlib
-import io
-import tarfile
-import logging
-import distutils
 from inspect import cleandoc
 from unittest import mock
 
+import jaraco.path
 import pytest
 
-from distutils.core import run_setup
-from setuptools import Command
+from setuptools import Command, SetuptoolsDeprecationWarning
 from setuptools._importlib import metadata
-from setuptools import SetuptoolsDeprecationWarning
-from setuptools.command.sdist import sdist
 from setuptools.command.egg_info import manifest_maker
+from setuptools.command.sdist import sdist
 from setuptools.dist import Distribution
 from setuptools.extension import Extension
 from setuptools.tests import fail_on_ascii
-from .text import Filenames
 
-import jaraco.path
+from .text import Filenames
 
+import distutils
+from distutils.core import run_setup
 
 SETUP_ATTRS = {
     'name': 'sdist_test',
diff --git a/setuptools/tests/test_setuptools.py b/setuptools/tests/test_setuptools.py
index 9865ee847c..6af1d98c6b 100644
--- a/setuptools/tests/test_setuptools.py
+++ b/setuptools/tests/test_setuptools.py
@@ -1,22 +1,22 @@
 """Tests for the 'setuptools' package"""
 
+import os
 import re
 import sys
-import os
-import distutils.core
-import distutils.cmd
-from distutils.errors import DistutilsSetupError
-from distutils.core import Extension
 from zipfile import ZipFile
 
 import pytest
+from packaging.version import Version
 
 import setuptools
-import setuptools.dist
 import setuptools.depends as dep
+import setuptools.dist
 from setuptools.depends import Require
 
-from packaging.version import Version
+import distutils.cmd
+import distutils.core
+from distutils.core import Extension
+from distutils.errors import DistutilsSetupError
 
 
 @pytest.fixture(autouse=True)
diff --git a/setuptools/tests/test_upload.py b/setuptools/tests/test_upload.py
index 4ed59bc24d..cbcd455c41 100644
--- a/setuptools/tests/test_upload.py
+++ b/setuptools/tests/test_upload.py
@@ -1,11 +1,11 @@
-from setuptools.command.upload import upload
-from setuptools.dist import Distribution
-from setuptools.errors import RemovedCommandError
-
 from unittest import mock
 
 import pytest
 
+from setuptools.command.upload import upload
+from setuptools.dist import Distribution
+from setuptools.errors import RemovedCommandError
+
 
 class TestUpload:
     def test_upload_exception(self):
diff --git a/setuptools/tests/test_virtualenv.py b/setuptools/tests/test_virtualenv.py
index 4554581ed0..cdc10f5004 100644
--- a/setuptools/tests/test_virtualenv.py
+++ b/setuptools/tests/test_virtualenv.py
@@ -1,9 +1,8 @@
 import os
-import sys
 import subprocess
-from urllib.request import urlopen
+import sys
 from urllib.error import URLError
-
+from urllib.request import urlopen
 
 import pytest
 
diff --git a/setuptools/tests/test_warnings.py b/setuptools/tests/test_warnings.py
index ac6d07795e..41193d4f71 100644
--- a/setuptools/tests/test_warnings.py
+++ b/setuptools/tests/test_warnings.py
@@ -4,7 +4,6 @@
 
 from setuptools.warnings import SetuptoolsDeprecationWarning, SetuptoolsWarning
 
-
 _EXAMPLES = {
     "default": dict(
         args=("Hello {x}", "\n\t{target} {v:.1f}"),
diff --git a/setuptools/tests/test_wheel.py b/setuptools/tests/test_wheel.py
index cc5d54b6d9..ee46f664e4 100644
--- a/setuptools/tests/test_wheel.py
+++ b/setuptools/tests/test_wheel.py
@@ -2,31 +2,31 @@
 
 from __future__ import annotations
 
-from distutils.sysconfig import get_config_var
-from distutils.util import get_platform
 import contextlib
-import pathlib
-import stat
 import glob
 import inspect
 import os
+import pathlib
 import shutil
+import stat
 import subprocess
 import sys
-from typing import Any
 import zipfile
+from typing import Any
 
 import pytest
 from jaraco import path
-
-from pkg_resources import Distribution, PathMetadata, PY_MAJOR
-from packaging.utils import canonicalize_name
 from packaging.tags import parse_tag
+from packaging.utils import canonicalize_name
+
+from pkg_resources import PY_MAJOR, Distribution, PathMetadata
 from setuptools.wheel import Wheel
 
 from .contexts import tempdir
 from .textwrap import DALS
 
+from distutils.sysconfig import get_config_var
+from distutils.util import get_platform
 
 WHEEL_INFO_TESTS = (
     ('invalid.whl', ValueError),
diff --git a/setuptools/tests/test_windows_wrappers.py b/setuptools/tests/test_windows_wrappers.py
index b272689351..4a112baf75 100644
--- a/setuptools/tests/test_windows_wrappers.py
+++ b/setuptools/tests/test_windows_wrappers.py
@@ -13,15 +13,15 @@
 """
 
 import pathlib
-import sys
 import platform
-import textwrap
 import subprocess
+import sys
+import textwrap
 
 import pytest
 
-from setuptools.command.easy_install import nt_quote_arg
 import pkg_resources
+from setuptools.command.easy_install import nt_quote_arg
 
 pytestmark = pytest.mark.skipif(sys.platform != 'win32', reason="Windows only")
 
diff --git a/setuptools/unicode_utils.py b/setuptools/unicode_utils.py
index 696b34c46a..862d79e898 100644
--- a/setuptools/unicode_utils.py
+++ b/setuptools/unicode_utils.py
@@ -1,5 +1,5 @@
-import unicodedata
 import sys
+import unicodedata
 from configparser import ConfigParser
 
 from .compat import py39
diff --git a/setuptools/wheel.py b/setuptools/wheel.py
index a05cd98d1f..69a73df244 100644
--- a/setuptools/wheel.py
+++ b/setuptools/wheel.py
@@ -1,26 +1,25 @@
 """Wheels support."""
 
+import contextlib
 import email
-import itertools
 import functools
+import itertools
 import os
 import posixpath
 import re
 import zipfile
-import contextlib
 
-from packaging.version import Version as parse_version
 from packaging.tags import sys_tags
 from packaging.utils import canonicalize_name
-
-from distutils.util import get_platform
+from packaging.version import Version as parse_version
 
 import setuptools
-from setuptools.command.egg_info import write_requirements, _egg_basename
 from setuptools.archive_util import _unpack_zipfile_obj
+from setuptools.command.egg_info import _egg_basename, write_requirements
 
 from .unicode_utils import _read_utf8_with_fallback
 
+from distutils.util import get_platform
 
 WHEEL_NAME = re.compile(
     r"""^(?P.+?)-(?P\d.*?)
diff --git a/tools/build_launchers.py b/tools/build_launchers.py
index c673445365..a8b85c5f55 100644
--- a/tools/build_launchers.py
+++ b/tools/build_launchers.py
@@ -17,15 +17,14 @@
 - C++ ATL for latest v143 build tools (ARM64)
 """
 
-import os
 import functools
 import itertools
+import os
 import pathlib
 import shutil
 import subprocess
 import tempfile
 
-
 BUILD_TARGETS = ["cli", "gui"]
 GUI = {"cli": 0, "gui": 1}
 BUILD_PLATFORMS = ["Win32", "x64", "arm64"]
diff --git a/tools/finalize.py b/tools/finalize.py
index 3ba5d16ac7..d646e67cd0 100644
--- a/tools/finalize.py
+++ b/tools/finalize.py
@@ -5,14 +5,13 @@
 __requires__ = ['bump2version', 'towncrier', 'jaraco.develop>=7.21']
 
 
-import subprocess
 import pathlib
 import re
+import subprocess
 import sys
 
 from jaraco.develop import towncrier
 
-
 bump_version_command = [
     sys.executable,
     '-m',
diff --git a/tools/ppc64le-patch.py b/tools/ppc64le-patch.py
index 2a8ff8e0a0..a0b04ce502 100644
--- a/tools/ppc64le-patch.py
+++ b/tools/ppc64le-patch.py
@@ -6,9 +6,9 @@
 TODO: is someone tracking this issue? Maybe just move to bionic?
 """
 
-import subprocess
 import collections
 import os
+import subprocess
 
 
 def patch():

From 153c0fe9d5001ee6cc42860970b753e333a8858e Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Mon, 12 Aug 2024 11:37:04 -0400
Subject: [PATCH 0992/1761] pre-commit missing --fix args to ruff check (#4552)

---
 .pre-commit-config.yaml | 11 ++++++-----
 1 file changed, 6 insertions(+), 5 deletions(-)

diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index ffcfca062f..aaefd5fbbb 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,6 +1,7 @@
 repos:
-- repo: https://github.com/astral-sh/ruff-pre-commit
-  rev: v0.4.9
-  hooks:
-  - id: ruff
-  - id: ruff-format
+  - repo: https://github.com/astral-sh/ruff-pre-commit
+    rev: v0.5.7
+    hooks:
+      - id: ruff
+        args: [--fix]
+      - id: ruff-format

From 7aa30d0d4da1db84a32d1d1e9afab49a20b1c717 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Mon, 12 Aug 2024 11:38:03 -0400
Subject: [PATCH 0993/1761] Remove sandbox Python 2 builtins.file support
 (#4554)

---
 newsfragments/4554.misc.rst |  1 +
 setuptools/sandbox.py       | 17 -----------------
 2 files changed, 1 insertion(+), 17 deletions(-)
 create mode 100644 newsfragments/4554.misc.rst

diff --git a/newsfragments/4554.misc.rst b/newsfragments/4554.misc.rst
new file mode 100644
index 0000000000..9992f93441
--- /dev/null
+++ b/newsfragments/4554.misc.rst
@@ -0,0 +1 @@
+Removed ``setputools.sandbox``'s Python 2 ``builtins.file`` support -- by :user:`Avasam`
diff --git a/setuptools/sandbox.py b/setuptools/sandbox.py
index 5ba32a89b3..9a101b7137 100644
--- a/setuptools/sandbox.py
+++ b/setuptools/sandbox.py
@@ -22,10 +22,6 @@
     import org.python.modules.posix.PosixModule as _os
 else:
     _os = sys.modules[os.name]
-try:
-    _file = file  # type: ignore[name-defined] # Check for global variable
-except NameError:
-    _file = None
 _open = open
 
 
@@ -285,15 +281,11 @@ def _copy(self, source):
 
     def __enter__(self):
         self._copy(self)
-        if _file:
-            builtins.file = self._file
         builtins.open = self._open
         self._active = True
 
     def __exit__(self, exc_type, exc_value, traceback):
         self._active = False
-        if _file:
-            builtins.file = _file
         builtins.open = _open
         self._copy(_os)
 
@@ -326,8 +318,6 @@ def wrap(self, path, *args, **kw):
 
         return wrap
 
-    if _file:
-        _file = _mk_single_path_wrapper('file', _file)
     _open = _mk_single_path_wrapper('open', _open)
     for __name in [
         "stat",
@@ -444,13 +434,6 @@ def _violation(self, operation, *args, **kw):
 
         raise SandboxViolation(operation, args, kw)
 
-    if _file:
-
-        def _file(self, path, mode='r', *args, **kw):
-            if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path):
-                self._violation("file", path, mode, *args, **kw)
-            return _file(path, mode, *args, **kw)
-
     def _open(self, path, mode='r', *args, **kw):
         if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path):
             self._violation("open", path, mode, *args, **kw)

From 8df185a23411df11e90c7a19501dfeee7d92f3a8 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 17 Jun 2024 19:27:45 +0100
Subject: [PATCH 0994/1761] Prevent abi3 from being used with no-GIL
 interpreter

---
 setuptools/command/bdist_wheel.py | 10 ++++++++++
 1 file changed, 10 insertions(+)

diff --git a/setuptools/command/bdist_wheel.py b/setuptools/command/bdist_wheel.py
index eca5568aad..cc3331fa98 100644
--- a/setuptools/command/bdist_wheel.py
+++ b/setuptools/command/bdist_wheel.py
@@ -26,6 +26,7 @@
 from wheel.wheelfile import WheelFile
 
 from .. import Command, __version__
+from ..warnings import SetuptoolsWarning
 from .egg_info import egg_info as egg_info_cls
 
 from distutils import log
@@ -279,6 +280,15 @@ def finalize_options(self) -> None:
         ):
             raise ValueError(f"py-limited-api must match '{PY_LIMITED_API_PATTERN}'")
 
+        if "t" in sys.abiflags:
+            SetuptoolsWarning.emit(
+                summary=f"Ignoring `py_limited_api={self.py_limited_api!r}`.",
+                details="`Py_LIMITED_API` is currently incompatible with "
+                f"`Py_GIL_DISABLED` ({sys.abiflags=!r}).",
+                see_url="https://github.com/python/cpython/issues/111506",
+            )
+            self.py_limited_api = False
+
         # Support legacy [wheel] section for setting universal
         wheel = self.distribution.get_option_dict("wheel")
         if "universal" in wheel:

From 1f960af5b16b5e4c19d781ab613d00329026d7f4 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 19 Jun 2024 10:00:44 +0100
Subject: [PATCH 0995/1761] Avoid sys.abiflags due to windows

---
 setuptools/command/bdist_wheel.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setuptools/command/bdist_wheel.py b/setuptools/command/bdist_wheel.py
index cc3331fa98..8e58c28309 100644
--- a/setuptools/command/bdist_wheel.py
+++ b/setuptools/command/bdist_wheel.py
@@ -280,7 +280,7 @@ def finalize_options(self) -> None:
         ):
             raise ValueError(f"py-limited-api must match '{PY_LIMITED_API_PATTERN}'")
 
-        if "t" in sys.abiflags:
+        if sysconfig.get_config_var("Py_GIL_DISABLED"):
             SetuptoolsWarning.emit(
                 summary=f"Ignoring `py_limited_api={self.py_limited_api!r}`.",
                 details="`Py_LIMITED_API` is currently incompatible with "

From f64a665985c3188f3480ed3ac9e8a0ce775705fd Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 19 Jun 2024 10:04:05 +0100
Subject: [PATCH 0996/1761] Fix py_limited_api check

---
 setuptools/command/bdist_wheel.py | 30 +++++++++++++++++-------------
 1 file changed, 17 insertions(+), 13 deletions(-)

diff --git a/setuptools/command/bdist_wheel.py b/setuptools/command/bdist_wheel.py
index 8e58c28309..6dd2e379bc 100644
--- a/setuptools/command/bdist_wheel.py
+++ b/setuptools/command/bdist_wheel.py
@@ -275,19 +275,7 @@ def finalize_options(self) -> None:
             self.distribution.has_ext_modules() or self.distribution.has_c_libraries()
         )
 
-        if self.py_limited_api and not re.match(
-            PY_LIMITED_API_PATTERN, self.py_limited_api
-        ):
-            raise ValueError(f"py-limited-api must match '{PY_LIMITED_API_PATTERN}'")
-
-        if sysconfig.get_config_var("Py_GIL_DISABLED"):
-            SetuptoolsWarning.emit(
-                summary=f"Ignoring `py_limited_api={self.py_limited_api!r}`.",
-                details="`Py_LIMITED_API` is currently incompatible with "
-                f"`Py_GIL_DISABLED` ({sys.abiflags=!r}).",
-                see_url="https://github.com/python/cpython/issues/111506",
-            )
-            self.py_limited_api = False
+        self._validate_py_limited_api()
 
         # Support legacy [wheel] section for setting universal
         wheel = self.distribution.get_option_dict("wheel")
@@ -301,6 +289,22 @@ def finalize_options(self) -> None:
         if self.build_number is not None and not self.build_number[:1].isdigit():
             raise ValueError("Build tag (build-number) must start with a digit.")
 
+    def _validate_py_limited_api(self) -> None:
+        if not self.py_limited_api:
+            return
+
+        if not re.match(PY_LIMITED_API_PATTERN, self.py_limited_api):
+            raise ValueError(f"py-limited-api must match '{PY_LIMITED_API_PATTERN}'")
+
+        if sysconfig.get_config_var("Py_GIL_DISABLED"):
+            SetuptoolsWarning.emit(
+                summary=f"Ignoring `py_limited_api={self.py_limited_api!r}`.",
+                details="`Py_LIMITED_API` is currently incompatible with "
+                f"`Py_GIL_DISABLED` ({sys.abiflags=!r}).",
+                see_url="https://github.com/python/cpython/issues/111506",
+            )
+            self.py_limited_api = False
+
     @property
     def wheel_dist_name(self) -> str:
         """Return distribution full name with - replaced with _"""

From 34a753a2adc4f8e30ecd235d1cb7727bdf0edc34 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 12 Aug 2024 16:39:32 +0100
Subject: [PATCH 0997/1761] Raise a clean error when py_limited_api is not
 supported

---
 setuptools/command/bdist_wheel.py | 12 +++++-------
 1 file changed, 5 insertions(+), 7 deletions(-)

diff --git a/setuptools/command/bdist_wheel.py b/setuptools/command/bdist_wheel.py
index 6dd2e379bc..8f06786659 100644
--- a/setuptools/command/bdist_wheel.py
+++ b/setuptools/command/bdist_wheel.py
@@ -26,7 +26,6 @@
 from wheel.wheelfile import WheelFile
 
 from .. import Command, __version__
-from ..warnings import SetuptoolsWarning
 from .egg_info import egg_info as egg_info_cls
 
 from distutils import log
@@ -297,13 +296,12 @@ def _validate_py_limited_api(self) -> None:
             raise ValueError(f"py-limited-api must match '{PY_LIMITED_API_PATTERN}'")
 
         if sysconfig.get_config_var("Py_GIL_DISABLED"):
-            SetuptoolsWarning.emit(
-                summary=f"Ignoring `py_limited_api={self.py_limited_api!r}`.",
-                details="`Py_LIMITED_API` is currently incompatible with "
-                f"`Py_GIL_DISABLED` ({sys.abiflags=!r}).",
-                see_url="https://github.com/python/cpython/issues/111506",
+            raise ValueError(
+                f"`py_limited_api={self.py_limited_api!r}` not supported. "
+                "`Py_LIMITED_API` is currently incompatible with "
+                f"`Py_GIL_DISABLED` ({sys.abiflags=!r}). "
+                "See https://github.com/python/cpython/issues/111506."
             )
-            self.py_limited_api = False
 
     @property
     def wheel_dist_name(self) -> str:

From 13e43c6e0f01b2a3d68cc2021c7bd44f36becf62 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 12 Aug 2024 16:50:30 +0100
Subject: [PATCH 0998/1761] Add news fragment

---
 newsfragments/4420.bugfix.rst | 2 ++
 1 file changed, 2 insertions(+)
 create mode 100644 newsfragments/4420.bugfix.rst

diff --git a/newsfragments/4420.bugfix.rst b/newsfragments/4420.bugfix.rst
new file mode 100644
index 0000000000..c5f75fcddb
--- /dev/null
+++ b/newsfragments/4420.bugfix.rst
@@ -0,0 +1,2 @@
+Raises an exception when ``py_limited_api`` is used in a build with
+``Py_GIL_DISABLED``. This is currently not supported (python/cpython#111506).

From 3841656c61bad87f922fcba50445b503209b69c2 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Mon, 12 Aug 2024 12:13:19 -0400
Subject: [PATCH 0999/1761] Loosen restrictions on mypy (jaraco/skeleton#136)

Based on changes downstream in setuptools.
---
 mypy.ini | 15 ++++++++++++---
 1 file changed, 12 insertions(+), 3 deletions(-)

diff --git a/mypy.ini b/mypy.ini
index b6f972769e..83b0d15c6b 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -1,5 +1,14 @@
 [mypy]
-ignore_missing_imports = True
-# required to support namespace packages
-# https://github.com/python/mypy/issues/14057
+# Is the project well-typed?
+strict = False
+
+# Early opt-in even when strict = False
+warn_unused_ignores = True
+warn_redundant_casts = True
+enable_error_code = ignore-without-code
+
+# Support namespace packages per https://github.com/python/mypy/issues/14057
 explicit_package_bases = True
+
+# Disable overload-overlap due to many false-positives
+disable_error_code = overload-overlap

From 1a27fd5b8815e65571e6c028d6bef2c1daf61688 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 12 Aug 2024 12:16:15 -0400
Subject: [PATCH 1000/1761] Split the test dependencies into four classes
 (test, cover, type, check). (jaraco/skeleton#139)

---
 pyproject.toml | 25 ++++++++++++++++++++-----
 tox.ini        |  4 ++++
 2 files changed, 24 insertions(+), 5 deletions(-)

diff --git a/pyproject.toml b/pyproject.toml
index 1307e1fa20..31057d85f1 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -28,14 +28,10 @@ Source = "https://github.com/PROJECT_PATH"
 test = [
 	# upstream
 	"pytest >= 6, != 8.1.*",
-	"pytest-checkdocs >= 2.4",
-	"pytest-cov",
-	"pytest-mypy",
-	"pytest-enabler >= 2.2",
-	"pytest-ruff >= 0.2.1; sys_platform != 'cygwin'",
 
 	# local
 ]
+
 doc = [
 	# upstream
 	"sphinx >= 3.5",
@@ -47,4 +43,23 @@ doc = [
 	# local
 ]
 
+check = [
+	"pytest-checkdocs >= 2.4",
+	"pytest-ruff >= 0.2.1; sys_platform != 'cygwin'",
+]
+
+cover = [
+	"pytest-cov",
+]
+
+enabler = [
+	"pytest-enabler >= 2.2",
+]
+
+type = [
+	"pytest-mypy",
+]
+
+
+
 [tool.setuptools_scm]
diff --git a/tox.ini b/tox.ini
index cc4db36e76..01f0975f5c 100644
--- a/tox.ini
+++ b/tox.ini
@@ -8,6 +8,10 @@ commands =
 usedevelop = True
 extras =
 	test
+	check
+	cover
+	enabler
+	type
 
 [testenv:diffcov]
 description = run tests and check that diff from main is covered

From eb78a68460c66f446d3055d32e86f203698185e0 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 12 Aug 2024 18:43:04 +0100
Subject: [PATCH 1001/1761] Prevent warning from deprecated entrypoint in wheel

---
 setuptools/config/_apply_pyprojecttoml.py | 4 ++++
 1 file changed, 4 insertions(+)

diff --git a/setuptools/config/_apply_pyprojecttoml.py b/setuptools/config/_apply_pyprojecttoml.py
index 8224a02631..9c1ac0ccee 100644
--- a/setuptools/config/_apply_pyprojecttoml.py
+++ b/setuptools/config/_apply_pyprojecttoml.py
@@ -276,6 +276,10 @@ def _valid_command_options(cmdclass: Mapping = EMPTY) -> dict[str, set[str]]:
 
 
 def _load_ep(ep: metadata.EntryPoint) -> tuple[str, type] | None:
+    if ep.value.startswith("wheel.bdist_wheel"):
+        # Ignore deprecated entrypoint from wheel and avoid warning pypa/wheel#631
+        return None
+
     # Ignore all the errors
     try:
         return (ep.name, ep.load())

From 3961909a430485f92a70a6ba47eb76e43cf2a579 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 12 Aug 2024 19:01:16 +0100
Subject: [PATCH 1002/1761] Add todo annotation

---
 setuptools/config/_apply_pyprojecttoml.py | 1 +
 1 file changed, 1 insertion(+)

diff --git a/setuptools/config/_apply_pyprojecttoml.py b/setuptools/config/_apply_pyprojecttoml.py
index 9c1ac0ccee..7b9c0b1a59 100644
--- a/setuptools/config/_apply_pyprojecttoml.py
+++ b/setuptools/config/_apply_pyprojecttoml.py
@@ -278,6 +278,7 @@ def _valid_command_options(cmdclass: Mapping = EMPTY) -> dict[str, set[str]]:
 def _load_ep(ep: metadata.EntryPoint) -> tuple[str, type] | None:
     if ep.value.startswith("wheel.bdist_wheel"):
         # Ignore deprecated entrypoint from wheel and avoid warning pypa/wheel#631
+        # TODO: remove check when `bdist_wheel` has been fully removed from pypa/wheel
         return None
 
     # Ignore all the errors

From e3cd584c4bb563fd88210953b75e46a9c9132108 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 12 Aug 2024 19:03:31 +0100
Subject: [PATCH 1003/1761] Add news fragment

---
 newsfragments/4559.misc.rst | 2 ++
 1 file changed, 2 insertions(+)
 create mode 100644 newsfragments/4559.misc.rst

diff --git a/newsfragments/4559.misc.rst b/newsfragments/4559.misc.rst
new file mode 100644
index 0000000000..34b5a30664
--- /dev/null
+++ b/newsfragments/4559.misc.rst
@@ -0,0 +1,2 @@
+Prevent deprecation warning from ``pypa/wheel#631`` to accidentally
+trigger when validating ``pyproject.toml``.

From 35e3265f846c8db714c0fbfa0640eb98842776b0 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 13 Aug 2024 13:25:50 +0100
Subject: [PATCH 1004/1761] Use --force-reinstall in integration test and skip
 wheel

---
 setuptools/tests/integration/test_pip_install_sdist.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setuptools/tests/integration/test_pip_install_sdist.py b/setuptools/tests/integration/test_pip_install_sdist.py
index ee70b1c286..2d59337aff 100644
--- a/setuptools/tests/integration/test_pip_install_sdist.py
+++ b/setuptools/tests/integration/test_pip_install_sdist.py
@@ -137,7 +137,7 @@ def test_install_sdist(package, version, tmp_path, venv_python, setuptools_wheel
     # Use a virtualenv to simulate PEP 517 isolation
     # but install fresh setuptools wheel to ensure the version under development
     env = EXTRA_ENV_VARS.get(package, {})
-    run([*venv_pip, "install", "wheel", "-I", setuptools_wheel])
+    run([*venv_pip, "install", "--force-reinstall", setuptools_wheel])
     run([*venv_pip, "install", *INSTALL_OPTIONS, sdist], env)
 
     # Execute a simple script to make sure the package was installed correctly

From ea11bacbaf4c6e06d707af9170818e23659c1d12 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 13 Aug 2024 13:32:25 +0100
Subject: [PATCH 1005/1761] Add news fragment

---
 newsfragments/4565.misc.rst | 3 +++
 1 file changed, 3 insertions(+)
 create mode 100644 newsfragments/4565.misc.rst

diff --git a/newsfragments/4565.misc.rst b/newsfragments/4565.misc.rst
new file mode 100644
index 0000000000..031f8d66ca
--- /dev/null
+++ b/newsfragments/4565.misc.rst
@@ -0,0 +1,3 @@
+Replace ``pip install -I`` with ``pip install --force-reinstall`` in
+integration tests. Additionally, remove ``wheel`` from virtual environment as
+it is no longer a build dependency.

From 0a8f477f79aaf5e861375dbb373b25415cc59bf1 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 13 Aug 2024 17:00:31 +0100
Subject: [PATCH 1006/1761] Modify schemas to allow dashes in module names

---
 setuptools/config/setuptools.schema.json | 6 +++---
 tox.ini                                  | 2 +-
 2 files changed, 4 insertions(+), 4 deletions(-)

diff --git a/setuptools/config/setuptools.schema.json b/setuptools/config/setuptools.schema.json
index adc203ddb0..50ee6217ee 100644
--- a/setuptools/config/setuptools.schema.json
+++ b/setuptools/config/setuptools.schema.json
@@ -148,14 +148,14 @@
     },
     "namespace-packages": {
       "type": "array",
-      "items": {"type": "string", "format": "python-module-name"},
+      "items": {"type": "string", "format": "python-module-name-relaxed"},
       "$comment": "https://setuptools.pypa.io/en/latest/userguide/package_discovery.html",
       "description": "**DEPRECATED**: use implicit namespaces instead (:pep:`420`)."
     },
     "py-modules": {
       "description": "Modules that setuptools will manipulate",
       "type": "array",
-      "items": {"type": "string", "format": "python-module-name"},
+      "items": {"type": "string", "format": "python-module-name-relaxed"},
       "$comment": "TODO: clarify the relationship with ``packages``"
     },
     "data-files": {
@@ -250,7 +250,7 @@
       "description": "Valid package name (importable or :pep:`561`).",
       "type": "string",
       "anyOf": [
-        {"type": "string", "format": "python-module-name"},
+        {"type": "string", "format": "python-module-name-relaxed"},
         {"type": "string", "format": "pep561-stub-name"}
       ]
     },
diff --git a/tox.ini b/tox.ini
index bc0540b0d4..6fc71eb16a 100644
--- a/tox.ini
+++ b/tox.ini
@@ -84,7 +84,7 @@ commands =
 [testenv:generate-validation-code]
 skip_install = True
 deps =
-	validate-pyproject[all]==0.18
+	validate-pyproject[all]==0.19
 commands =
 	python -m tools.generate_validation_code
 

From fa7da9fbda79a1462ffa599361c159b9fa0be75d Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 13 Aug 2024 17:05:57 +0100
Subject: [PATCH 1007/1761] Update generated validation code for pyproject.toml

---
 .../_validate_pyproject/error_reporting.py    |  6 +--
 .../fastjsonschema_validations.py             | 48 +++++++++----------
 .../config/_validate_pyproject/formats.py     | 23 ++++++++-
 3 files changed, 48 insertions(+), 29 deletions(-)

diff --git a/setuptools/config/_validate_pyproject/error_reporting.py b/setuptools/config/_validate_pyproject/error_reporting.py
index a6753604f5..3591231c09 100644
--- a/setuptools/config/_validate_pyproject/error_reporting.py
+++ b/setuptools/config/_validate_pyproject/error_reporting.py
@@ -6,7 +6,7 @@
 import typing
 from contextlib import contextmanager
 from textwrap import indent, wrap
-from typing import Any, Dict, Generator, Iterator, List, Optional, Sequence, Union, cast
+from typing import Any, Dict, Generator, Iterator, List, Optional, Sequence, Union
 
 from .fastjsonschema_exceptions import JsonSchemaValueException
 
@@ -316,9 +316,7 @@ def _label(self, path: Sequence[str]) -> str:
     def _value(self, value: Any, path: Sequence[str]) -> str:
         if path[-1] == "type" and not self._is_property(path):
             type_ = self._jargon(value)
-            return (
-                f"[{', '.join(type_)}]" if isinstance(value, list) else cast(str, type_)
-            )
+            return f"[{', '.join(type_)}]" if isinstance(type_, list) else type_
         return repr(value)
 
     def _inline_attrs(self, schema: dict, path: Sequence[str]) -> Iterator[str]:
diff --git a/setuptools/config/_validate_pyproject/fastjsonschema_validations.py b/setuptools/config/_validate_pyproject/fastjsonschema_validations.py
index 1f6c971e0b..7e403af4b7 100644
--- a/setuptools/config/_validate_pyproject/fastjsonschema_validations.py
+++ b/setuptools/config/_validate_pyproject/fastjsonschema_validations.py
@@ -31,7 +31,7 @@ def validate(data, custom_formats={}, name_prefix=None):
 
 def validate_https___packaging_python_org_en_latest_specifications_declaring_build_dependencies(data, custom_formats={}, name_prefix=None):
     if not isinstance(data, (dict)):
-        raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-build-dependencies/', 'title': 'Data structure for ``pyproject.toml`` files', '$$description': ['File format containing build-time configurations for the Python ecosystem. ', ':pep:`517` initially defined a build-system independent format for source trees', 'which was complemented by :pep:`518` to provide a way of specifying dependencies ', 'for building Python projects.', 'Please notice the ``project`` table (as initially defined in  :pep:`621`) is not included', 'in this schema and should be considered separately.'], 'type': 'object', 'additionalProperties': False, 'properties': {'build-system': {'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/pyproject-toml/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, 'tool': {'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/deprecated/distutils/configfile.html', 'title': '``tool.distutils`` table', '$$description': ['**EXPERIMENTAL** (NOT OFFICIALLY SUPPORTED): Use ``tool.distutils``', 'subtables to configure arguments for ``distutils`` commands.', 'Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` commands via `distutils configuration files', '`_.', 'See also `the old Python docs _`.'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html', 'title': '``tool.setuptools`` table', '$$description': ['``setuptools``-specific configurations that can be set by users that require', 'customization.', 'These configurations are completely optional and probably can be skipped when', 'creating simple packages. They are equivalent to some of the `Keywords', '`_', 'used by the ``setup.py`` file, and can be set via the ``tool.setuptools`` table.', 'It considers only ``setuptools`` `parameters', '`_', 'that are not covered by :pep:`621`; and intentionally excludes ``dependency_links``', 'and ``setup_requires`` (incompatible with modern workflows/standards).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'$$description': ['Whether the project can be safely installed and run from a zip file.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'boolean'}, 'script-files': {'$$description': ['Legacy way of defining scripts (entry-points are preferred).', 'Equivalent to the ``script`` keyword in ``setup.py``', '(it was renamed to avoid confusion with entry-point based ``project.scripts``', 'defined in :pep:`621`).', '**DISCOURAGED**: generic script wrappers are tricky and may not work properly.', 'Whenever possible, please use ``project.scripts`` instead.'], 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$ref': '#/definitions/package-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$ref': '#/definitions/package-name'}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html', 'description': '**DEPRECATED**: use implicit namespaces instead (:pep:`420`).'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['``dict``-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', '**DISCOURAGED**: please notice this might not work as expected with wheels.', 'Whenever possible, consider using data files inside the package directories', '(or create a new namespace package that only contains data files).', 'See `data files support', '`_.'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', '    cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['**PROVISIONAL**: list of glob patterns for all license files being distributed.', '(likely to become standard with :pep:`639`).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'dependencies': {'$ref': '#/definitions/file-directive-for-dependencies'}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$ref': '#/definitions/file-directive-for-dependencies'}}}, 'readme': {'type': 'object', 'anyOf': [{'$ref': '#/definitions/file-directive'}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'$ref': '#/definitions/file-directive/properties/file'}}, 'additionalProperties': False}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'file-directive-for-dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/pyproject-toml/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='type')
+        raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-build-dependencies/', 'title': 'Data structure for ``pyproject.toml`` files', '$$description': ['File format containing build-time configurations for the Python ecosystem. ', ':pep:`517` initially defined a build-system independent format for source trees', 'which was complemented by :pep:`518` to provide a way of specifying dependencies ', 'for building Python projects.', 'Please notice the ``project`` table (as initially defined in  :pep:`621`) is not included', 'in this schema and should be considered separately.'], 'type': 'object', 'additionalProperties': False, 'properties': {'build-system': {'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/pyproject-toml/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, 'tool': {'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/deprecated/distutils/configfile.html', 'title': '``tool.distutils`` table', '$$description': ['**EXPERIMENTAL** (NOT OFFICIALLY SUPPORTED): Use ``tool.distutils``', 'subtables to configure arguments for ``distutils`` commands.', 'Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` commands via `distutils configuration files', '`_.', 'See also `the old Python docs _`.'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html', 'title': '``tool.setuptools`` table', '$$description': ['``setuptools``-specific configurations that can be set by users that require', 'customization.', 'These configurations are completely optional and probably can be skipped when', 'creating simple packages. They are equivalent to some of the `Keywords', '`_', 'used by the ``setup.py`` file, and can be set via the ``tool.setuptools`` table.', 'It considers only ``setuptools`` `parameters', '`_', 'that are not covered by :pep:`621`; and intentionally excludes ``dependency_links``', 'and ``setup_requires`` (incompatible with modern workflows/standards).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'$$description': ['Whether the project can be safely installed and run from a zip file.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'boolean'}, 'script-files': {'$$description': ['Legacy way of defining scripts (entry-points are preferred).', 'Equivalent to the ``script`` keyword in ``setup.py``', '(it was renamed to avoid confusion with entry-point based ``project.scripts``', 'defined in :pep:`621`).', '**DISCOURAGED**: generic script wrappers are tricky and may not work properly.', 'Whenever possible, please use ``project.scripts`` instead.'], 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$ref': '#/definitions/package-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$ref': '#/definitions/package-name'}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html', 'description': '**DEPRECATED**: use implicit namespaces instead (:pep:`420`).'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['``dict``-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', '**DISCOURAGED**: please notice this might not work as expected with wheels.', 'Whenever possible, consider using data files inside the package directories', '(or create a new namespace package that only contains data files).', 'See `data files support', '`_.'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', '    cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['**PROVISIONAL**: list of glob patterns for all license files being distributed.', '(likely to become standard with :pep:`639`).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'dependencies': {'$ref': '#/definitions/file-directive-for-dependencies'}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$ref': '#/definitions/file-directive-for-dependencies'}}}, 'readme': {'type': 'object', 'anyOf': [{'$ref': '#/definitions/file-directive'}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'$ref': '#/definitions/file-directive/properties/file'}}, 'additionalProperties': False}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'file-directive-for-dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/pyproject-toml/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='type')
     data_is_dict = isinstance(data, dict)
     if data_is_dict:
         data_keys = set(data.keys())
@@ -86,7 +86,7 @@ def validate_https___packaging_python_org_en_latest_specifications_declaring_bui
             data_keys.remove("tool")
             data__tool = data["tool"]
             if not isinstance(data__tool, (dict)):
-                raise JsonSchemaValueException("" + (name_prefix or "data") + ".tool must be object", value=data__tool, name="" + (name_prefix or "data") + ".tool", definition={'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/deprecated/distutils/configfile.html', 'title': '``tool.distutils`` table', '$$description': ['**EXPERIMENTAL** (NOT OFFICIALLY SUPPORTED): Use ``tool.distutils``', 'subtables to configure arguments for ``distutils`` commands.', 'Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` commands via `distutils configuration files', '`_.', 'See also `the old Python docs _`.'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html', 'title': '``tool.setuptools`` table', '$$description': ['``setuptools``-specific configurations that can be set by users that require', 'customization.', 'These configurations are completely optional and probably can be skipped when', 'creating simple packages. They are equivalent to some of the `Keywords', '`_', 'used by the ``setup.py`` file, and can be set via the ``tool.setuptools`` table.', 'It considers only ``setuptools`` `parameters', '`_', 'that are not covered by :pep:`621`; and intentionally excludes ``dependency_links``', 'and ``setup_requires`` (incompatible with modern workflows/standards).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'$$description': ['Whether the project can be safely installed and run from a zip file.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'boolean'}, 'script-files': {'$$description': ['Legacy way of defining scripts (entry-points are preferred).', 'Equivalent to the ``script`` keyword in ``setup.py``', '(it was renamed to avoid confusion with entry-point based ``project.scripts``', 'defined in :pep:`621`).', '**DISCOURAGED**: generic script wrappers are tricky and may not work properly.', 'Whenever possible, please use ``project.scripts`` instead.'], 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$ref': '#/definitions/package-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$ref': '#/definitions/package-name'}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html', 'description': '**DEPRECATED**: use implicit namespaces instead (:pep:`420`).'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['``dict``-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', '**DISCOURAGED**: please notice this might not work as expected with wheels.', 'Whenever possible, consider using data files inside the package directories', '(or create a new namespace package that only contains data files).', 'See `data files support', '`_.'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', '    cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['**PROVISIONAL**: list of glob patterns for all license files being distributed.', '(likely to become standard with :pep:`639`).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'dependencies': {'$ref': '#/definitions/file-directive-for-dependencies'}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$ref': '#/definitions/file-directive-for-dependencies'}}}, 'readme': {'type': 'object', 'anyOf': [{'$ref': '#/definitions/file-directive'}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'$ref': '#/definitions/file-directive/properties/file'}}, 'additionalProperties': False}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'file-directive-for-dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}, rule='type')
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".tool must be object", value=data__tool, name="" + (name_prefix or "data") + ".tool", definition={'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/deprecated/distutils/configfile.html', 'title': '``tool.distutils`` table', '$$description': ['**EXPERIMENTAL** (NOT OFFICIALLY SUPPORTED): Use ``tool.distutils``', 'subtables to configure arguments for ``distutils`` commands.', 'Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` commands via `distutils configuration files', '`_.', 'See also `the old Python docs _`.'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html', 'title': '``tool.setuptools`` table', '$$description': ['``setuptools``-specific configurations that can be set by users that require', 'customization.', 'These configurations are completely optional and probably can be skipped when', 'creating simple packages. They are equivalent to some of the `Keywords', '`_', 'used by the ``setup.py`` file, and can be set via the ``tool.setuptools`` table.', 'It considers only ``setuptools`` `parameters', '`_', 'that are not covered by :pep:`621`; and intentionally excludes ``dependency_links``', 'and ``setup_requires`` (incompatible with modern workflows/standards).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'$$description': ['Whether the project can be safely installed and run from a zip file.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'boolean'}, 'script-files': {'$$description': ['Legacy way of defining scripts (entry-points are preferred).', 'Equivalent to the ``script`` keyword in ``setup.py``', '(it was renamed to avoid confusion with entry-point based ``project.scripts``', 'defined in :pep:`621`).', '**DISCOURAGED**: generic script wrappers are tricky and may not work properly.', 'Whenever possible, please use ``project.scripts`` instead.'], 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$ref': '#/definitions/package-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$ref': '#/definitions/package-name'}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html', 'description': '**DEPRECATED**: use implicit namespaces instead (:pep:`420`).'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['``dict``-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', '**DISCOURAGED**: please notice this might not work as expected with wheels.', 'Whenever possible, consider using data files inside the package directories', '(or create a new namespace package that only contains data files).', 'See `data files support', '`_.'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', '    cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['**PROVISIONAL**: list of glob patterns for all license files being distributed.', '(likely to become standard with :pep:`639`).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'dependencies': {'$ref': '#/definitions/file-directive-for-dependencies'}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$ref': '#/definitions/file-directive-for-dependencies'}}}, 'readme': {'type': 'object', 'anyOf': [{'$ref': '#/definitions/file-directive'}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'$ref': '#/definitions/file-directive/properties/file'}}, 'additionalProperties': False}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'file-directive-for-dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}, rule='type')
             data__tool_is_dict = isinstance(data__tool, dict)
             if data__tool_is_dict:
                 data__tool_keys = set(data__tool.keys())
@@ -99,12 +99,12 @@ def validate_https___packaging_python_org_en_latest_specifications_declaring_bui
                     data__tool__setuptools = data__tool["setuptools"]
                     validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html(data__tool__setuptools, custom_formats, (name_prefix or "data") + ".tool.setuptools")
         if data_keys:
-            raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-build-dependencies/', 'title': 'Data structure for ``pyproject.toml`` files', '$$description': ['File format containing build-time configurations for the Python ecosystem. ', ':pep:`517` initially defined a build-system independent format for source trees', 'which was complemented by :pep:`518` to provide a way of specifying dependencies ', 'for building Python projects.', 'Please notice the ``project`` table (as initially defined in  :pep:`621`) is not included', 'in this schema and should be considered separately.'], 'type': 'object', 'additionalProperties': False, 'properties': {'build-system': {'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/pyproject-toml/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, 'tool': {'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/deprecated/distutils/configfile.html', 'title': '``tool.distutils`` table', '$$description': ['**EXPERIMENTAL** (NOT OFFICIALLY SUPPORTED): Use ``tool.distutils``', 'subtables to configure arguments for ``distutils`` commands.', 'Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` commands via `distutils configuration files', '`_.', 'See also `the old Python docs _`.'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html', 'title': '``tool.setuptools`` table', '$$description': ['``setuptools``-specific configurations that can be set by users that require', 'customization.', 'These configurations are completely optional and probably can be skipped when', 'creating simple packages. They are equivalent to some of the `Keywords', '`_', 'used by the ``setup.py`` file, and can be set via the ``tool.setuptools`` table.', 'It considers only ``setuptools`` `parameters', '`_', 'that are not covered by :pep:`621`; and intentionally excludes ``dependency_links``', 'and ``setup_requires`` (incompatible with modern workflows/standards).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'$$description': ['Whether the project can be safely installed and run from a zip file.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'boolean'}, 'script-files': {'$$description': ['Legacy way of defining scripts (entry-points are preferred).', 'Equivalent to the ``script`` keyword in ``setup.py``', '(it was renamed to avoid confusion with entry-point based ``project.scripts``', 'defined in :pep:`621`).', '**DISCOURAGED**: generic script wrappers are tricky and may not work properly.', 'Whenever possible, please use ``project.scripts`` instead.'], 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$ref': '#/definitions/package-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$ref': '#/definitions/package-name'}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html', 'description': '**DEPRECATED**: use implicit namespaces instead (:pep:`420`).'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['``dict``-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', '**DISCOURAGED**: please notice this might not work as expected with wheels.', 'Whenever possible, consider using data files inside the package directories', '(or create a new namespace package that only contains data files).', 'See `data files support', '`_.'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', '    cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['**PROVISIONAL**: list of glob patterns for all license files being distributed.', '(likely to become standard with :pep:`639`).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'dependencies': {'$ref': '#/definitions/file-directive-for-dependencies'}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$ref': '#/definitions/file-directive-for-dependencies'}}}, 'readme': {'type': 'object', 'anyOf': [{'$ref': '#/definitions/file-directive'}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'$ref': '#/definitions/file-directive/properties/file'}}, 'additionalProperties': False}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'file-directive-for-dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/pyproject-toml/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='additionalProperties')
+            raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-build-dependencies/', 'title': 'Data structure for ``pyproject.toml`` files', '$$description': ['File format containing build-time configurations for the Python ecosystem. ', ':pep:`517` initially defined a build-system independent format for source trees', 'which was complemented by :pep:`518` to provide a way of specifying dependencies ', 'for building Python projects.', 'Please notice the ``project`` table (as initially defined in  :pep:`621`) is not included', 'in this schema and should be considered separately.'], 'type': 'object', 'additionalProperties': False, 'properties': {'build-system': {'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/pyproject-toml/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, 'tool': {'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/deprecated/distutils/configfile.html', 'title': '``tool.distutils`` table', '$$description': ['**EXPERIMENTAL** (NOT OFFICIALLY SUPPORTED): Use ``tool.distutils``', 'subtables to configure arguments for ``distutils`` commands.', 'Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` commands via `distutils configuration files', '`_.', 'See also `the old Python docs _`.'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html', 'title': '``tool.setuptools`` table', '$$description': ['``setuptools``-specific configurations that can be set by users that require', 'customization.', 'These configurations are completely optional and probably can be skipped when', 'creating simple packages. They are equivalent to some of the `Keywords', '`_', 'used by the ``setup.py`` file, and can be set via the ``tool.setuptools`` table.', 'It considers only ``setuptools`` `parameters', '`_', 'that are not covered by :pep:`621`; and intentionally excludes ``dependency_links``', 'and ``setup_requires`` (incompatible with modern workflows/standards).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'$$description': ['Whether the project can be safely installed and run from a zip file.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'boolean'}, 'script-files': {'$$description': ['Legacy way of defining scripts (entry-points are preferred).', 'Equivalent to the ``script`` keyword in ``setup.py``', '(it was renamed to avoid confusion with entry-point based ``project.scripts``', 'defined in :pep:`621`).', '**DISCOURAGED**: generic script wrappers are tricky and may not work properly.', 'Whenever possible, please use ``project.scripts`` instead.'], 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$ref': '#/definitions/package-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$ref': '#/definitions/package-name'}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html', 'description': '**DEPRECATED**: use implicit namespaces instead (:pep:`420`).'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['``dict``-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', '**DISCOURAGED**: please notice this might not work as expected with wheels.', 'Whenever possible, consider using data files inside the package directories', '(or create a new namespace package that only contains data files).', 'See `data files support', '`_.'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', '    cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['**PROVISIONAL**: list of glob patterns for all license files being distributed.', '(likely to become standard with :pep:`639`).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'dependencies': {'$ref': '#/definitions/file-directive-for-dependencies'}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$ref': '#/definitions/file-directive-for-dependencies'}}}, 'readme': {'type': 'object', 'anyOf': [{'$ref': '#/definitions/file-directive'}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'$ref': '#/definitions/file-directive/properties/file'}}, 'additionalProperties': False}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'file-directive-for-dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/pyproject-toml/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='additionalProperties')
     return data
 
 def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html(data, custom_formats={}, name_prefix=None):
     if not isinstance(data, (dict)):
-        raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html', 'title': '``tool.setuptools`` table', '$$description': ['``setuptools``-specific configurations that can be set by users that require', 'customization.', 'These configurations are completely optional and probably can be skipped when', 'creating simple packages. They are equivalent to some of the `Keywords', '`_', 'used by the ``setup.py`` file, and can be set via the ``tool.setuptools`` table.', 'It considers only ``setuptools`` `parameters', '`_', 'that are not covered by :pep:`621`; and intentionally excludes ``dependency_links``', 'and ``setup_requires`` (incompatible with modern workflows/standards).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'$$description': ['Whether the project can be safely installed and run from a zip file.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'boolean'}, 'script-files': {'$$description': ['Legacy way of defining scripts (entry-points are preferred).', 'Equivalent to the ``script`` keyword in ``setup.py``', '(it was renamed to avoid confusion with entry-point based ``project.scripts``', 'defined in :pep:`621`).', '**DISCOURAGED**: generic script wrappers are tricky and may not work properly.', 'Whenever possible, please use ``project.scripts`` instead.'], 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'type': 'string', 'format': 'pep561-stub-name'}]}}, {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'type': 'string', 'format': 'pep561-stub-name'}]}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html', 'description': '**DEPRECATED**: use implicit namespaces instead (:pep:`420`).'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['``dict``-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', '**DISCOURAGED**: please notice this might not work as expected with wheels.', 'Whenever possible, consider using data files inside the package directories', '(or create a new namespace package that only contains data files).', 'See `data files support', '`_.'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', '    cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['**PROVISIONAL**: list of glob patterns for all license files being distributed.', '(likely to become standard with :pep:`639`).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}}}, 'readme': {'type': 'object', 'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'additionalProperties': False}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'file-directive-for-dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}, rule='type')
+        raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html', 'title': '``tool.setuptools`` table', '$$description': ['``setuptools``-specific configurations that can be set by users that require', 'customization.', 'These configurations are completely optional and probably can be skipped when', 'creating simple packages. They are equivalent to some of the `Keywords', '`_', 'used by the ``setup.py`` file, and can be set via the ``tool.setuptools`` table.', 'It considers only ``setuptools`` `parameters', '`_', 'that are not covered by :pep:`621`; and intentionally excludes ``dependency_links``', 'and ``setup_requires`` (incompatible with modern workflows/standards).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'$$description': ['Whether the project can be safely installed and run from a zip file.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'boolean'}, 'script-files': {'$$description': ['Legacy way of defining scripts (entry-points are preferred).', 'Equivalent to the ``script`` keyword in ``setup.py``', '(it was renamed to avoid confusion with entry-point based ``project.scripts``', 'defined in :pep:`621`).', '**DISCOURAGED**: generic script wrappers are tricky and may not work properly.', 'Whenever possible, please use ``project.scripts`` instead.'], 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}}, {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html', 'description': '**DEPRECATED**: use implicit namespaces instead (:pep:`420`).'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['``dict``-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', '**DISCOURAGED**: please notice this might not work as expected with wheels.', 'Whenever possible, consider using data files inside the package directories', '(or create a new namespace package that only contains data files).', 'See `data files support', '`_.'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', '    cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['**PROVISIONAL**: list of glob patterns for all license files being distributed.', '(likely to become standard with :pep:`639`).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}}}, 'readme': {'type': 'object', 'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'additionalProperties': False}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'file-directive-for-dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}, rule='type')
     data_is_dict = isinstance(data, dict)
     if data_is_dict:
         data_keys = set(data.keys())
@@ -181,7 +181,7 @@ def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_htm
             if data__packages_one_of_count1 < 2:
                 try:
                     if not isinstance(data__packages, (list, tuple)):
-                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".packages must be array", value=data__packages, name="" + (name_prefix or "data") + ".packages", definition={'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'type': 'string', 'format': 'pep561-stub-name'}]}}, rule='type')
+                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".packages must be array", value=data__packages, name="" + (name_prefix or "data") + ".packages", definition={'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}}, rule='type')
                     data__packages_is_list = isinstance(data__packages, (list, tuple))
                     if data__packages_is_list:
                         data__packages_len = len(data__packages)
@@ -195,12 +195,12 @@ def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_htm
                     data__packages_one_of_count1 += 1
                 except JsonSchemaValueException: pass
             if data__packages_one_of_count1 != 1:
-                raise JsonSchemaValueException("" + (name_prefix or "data") + ".packages must be valid exactly by one definition" + (" (" + str(data__packages_one_of_count1) + " matches found)"), value=data__packages, name="" + (name_prefix or "data") + ".packages", definition={'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'type': 'string', 'format': 'pep561-stub-name'}]}}, {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}]}, rule='oneOf')
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".packages must be valid exactly by one definition" + (" (" + str(data__packages_one_of_count1) + " matches found)"), value=data__packages, name="" + (name_prefix or "data") + ".packages", definition={'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}}, {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}]}, rule='oneOf')
         if "package-dir" in data_keys:
             data_keys.remove("package-dir")
             data__packagedir = data["package-dir"]
             if not isinstance(data__packagedir, (dict)):
-                raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must be object", value=data__packagedir, name="" + (name_prefix or "data") + ".package-dir", definition={'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'type': 'string', 'format': 'pep561-stub-name'}]}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, rule='type')
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must be object", value=data__packagedir, name="" + (name_prefix or "data") + ".package-dir", definition={'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, rule='type')
             data__packagedir_is_dict = isinstance(data__packagedir, dict)
             if data__packagedir_is_dict:
                 data__packagedir_keys = set(data__packagedir.keys())
@@ -211,7 +211,7 @@ def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_htm
                         if not isinstance(data__packagedir_val, (str)):
                             raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir.{data__packagedir_key}".format(**locals()) + " must be string", value=data__packagedir_val, name="" + (name_prefix or "data") + ".package-dir.{data__packagedir_key}".format(**locals()) + "", definition={'type': 'string'}, rule='type')
                 if data__packagedir_keys:
-                    raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must not contain "+str(data__packagedir_keys)+" properties", value=data__packagedir, name="" + (name_prefix or "data") + ".package-dir", definition={'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'type': 'string', 'format': 'pep561-stub-name'}]}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, rule='additionalProperties')
+                    raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must not contain "+str(data__packagedir_keys)+" properties", value=data__packagedir, name="" + (name_prefix or "data") + ".package-dir", definition={'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, rule='additionalProperties')
                 data__packagedir_len = len(data__packagedir)
                 if data__packagedir_len != 0:
                     data__packagedir_property_names = True
@@ -230,11 +230,11 @@ def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_htm
                                     data__packagedir_key_any_of_count2 += 1
                                 except JsonSchemaValueException: pass
                             if not data__packagedir_key_any_of_count2:
-                                raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir cannot be validated by any definition", value=data__packagedir_key, name="" + (name_prefix or "data") + ".package-dir", definition={'anyOf': [{'const': ''}, {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'type': 'string', 'format': 'pep561-stub-name'}]}]}, rule='anyOf')
+                                raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir cannot be validated by any definition", value=data__packagedir_key, name="" + (name_prefix or "data") + ".package-dir", definition={'anyOf': [{'const': ''}, {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}]}, rule='anyOf')
                         except JsonSchemaValueException:
                             data__packagedir_property_names = False
                     if not data__packagedir_property_names:
-                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must be named by propertyName definition", value=data__packagedir, name="" + (name_prefix or "data") + ".package-dir", definition={'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'type': 'string', 'format': 'pep561-stub-name'}]}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, rule='propertyNames')
+                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must be named by propertyName definition", value=data__packagedir, name="" + (name_prefix or "data") + ".package-dir", definition={'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, rule='propertyNames')
         if "package-data" in data_keys:
             data_keys.remove("package-data")
             data__packagedata = data["package-data"]
@@ -342,30 +342,30 @@ def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_htm
             data_keys.remove("namespace-packages")
             data__namespacepackages = data["namespace-packages"]
             if not isinstance(data__namespacepackages, (list, tuple)):
-                raise JsonSchemaValueException("" + (name_prefix or "data") + ".namespace-packages must be array", value=data__namespacepackages, name="" + (name_prefix or "data") + ".namespace-packages", definition={'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html', 'description': '**DEPRECATED**: use implicit namespaces instead (:pep:`420`).'}, rule='type')
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".namespace-packages must be array", value=data__namespacepackages, name="" + (name_prefix or "data") + ".namespace-packages", definition={'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html', 'description': '**DEPRECATED**: use implicit namespaces instead (:pep:`420`).'}, rule='type')
             data__namespacepackages_is_list = isinstance(data__namespacepackages, (list, tuple))
             if data__namespacepackages_is_list:
                 data__namespacepackages_len = len(data__namespacepackages)
                 for data__namespacepackages_x, data__namespacepackages_item in enumerate(data__namespacepackages):
                     if not isinstance(data__namespacepackages_item, (str)):
-                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".namespace-packages[{data__namespacepackages_x}]".format(**locals()) + " must be string", value=data__namespacepackages_item, name="" + (name_prefix or "data") + ".namespace-packages[{data__namespacepackages_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'python-module-name'}, rule='type')
+                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".namespace-packages[{data__namespacepackages_x}]".format(**locals()) + " must be string", value=data__namespacepackages_item, name="" + (name_prefix or "data") + ".namespace-packages[{data__namespacepackages_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'python-module-name-relaxed'}, rule='type')
                     if isinstance(data__namespacepackages_item, str):
-                        if not custom_formats["python-module-name"](data__namespacepackages_item):
-                            raise JsonSchemaValueException("" + (name_prefix or "data") + ".namespace-packages[{data__namespacepackages_x}]".format(**locals()) + " must be python-module-name", value=data__namespacepackages_item, name="" + (name_prefix or "data") + ".namespace-packages[{data__namespacepackages_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'python-module-name'}, rule='format')
+                        if not custom_formats["python-module-name-relaxed"](data__namespacepackages_item):
+                            raise JsonSchemaValueException("" + (name_prefix or "data") + ".namespace-packages[{data__namespacepackages_x}]".format(**locals()) + " must be python-module-name-relaxed", value=data__namespacepackages_item, name="" + (name_prefix or "data") + ".namespace-packages[{data__namespacepackages_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'python-module-name-relaxed'}, rule='format')
         if "py-modules" in data_keys:
             data_keys.remove("py-modules")
             data__pymodules = data["py-modules"]
             if not isinstance(data__pymodules, (list, tuple)):
-                raise JsonSchemaValueException("" + (name_prefix or "data") + ".py-modules must be array", value=data__pymodules, name="" + (name_prefix or "data") + ".py-modules", definition={'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, rule='type')
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".py-modules must be array", value=data__pymodules, name="" + (name_prefix or "data") + ".py-modules", definition={'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, rule='type')
             data__pymodules_is_list = isinstance(data__pymodules, (list, tuple))
             if data__pymodules_is_list:
                 data__pymodules_len = len(data__pymodules)
                 for data__pymodules_x, data__pymodules_item in enumerate(data__pymodules):
                     if not isinstance(data__pymodules_item, (str)):
-                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".py-modules[{data__pymodules_x}]".format(**locals()) + " must be string", value=data__pymodules_item, name="" + (name_prefix or "data") + ".py-modules[{data__pymodules_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'python-module-name'}, rule='type')
+                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".py-modules[{data__pymodules_x}]".format(**locals()) + " must be string", value=data__pymodules_item, name="" + (name_prefix or "data") + ".py-modules[{data__pymodules_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'python-module-name-relaxed'}, rule='type')
                     if isinstance(data__pymodules_item, str):
-                        if not custom_formats["python-module-name"](data__pymodules_item):
-                            raise JsonSchemaValueException("" + (name_prefix or "data") + ".py-modules[{data__pymodules_x}]".format(**locals()) + " must be python-module-name", value=data__pymodules_item, name="" + (name_prefix or "data") + ".py-modules[{data__pymodules_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'python-module-name'}, rule='format')
+                        if not custom_formats["python-module-name-relaxed"](data__pymodules_item):
+                            raise JsonSchemaValueException("" + (name_prefix or "data") + ".py-modules[{data__pymodules_x}]".format(**locals()) + " must be python-module-name-relaxed", value=data__pymodules_item, name="" + (name_prefix or "data") + ".py-modules[{data__pymodules_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'python-module-name-relaxed'}, rule='format')
         if "data-files" in data_keys:
             data_keys.remove("data-files")
             data__datafiles = data["data-files"]
@@ -524,7 +524,7 @@ def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_htm
                 if data__dynamic_keys:
                     raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic must not contain "+str(data__dynamic_keys)+" properties", value=data__dynamic, name="" + (name_prefix or "data") + ".dynamic", definition={'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}}}, 'readme': {'type': 'object', 'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'additionalProperties': False}], 'required': ['file']}}}, rule='additionalProperties')
         if data_keys:
-            raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html', 'title': '``tool.setuptools`` table', '$$description': ['``setuptools``-specific configurations that can be set by users that require', 'customization.', 'These configurations are completely optional and probably can be skipped when', 'creating simple packages. They are equivalent to some of the `Keywords', '`_', 'used by the ``setup.py`` file, and can be set via the ``tool.setuptools`` table.', 'It considers only ``setuptools`` `parameters', '`_', 'that are not covered by :pep:`621`; and intentionally excludes ``dependency_links``', 'and ``setup_requires`` (incompatible with modern workflows/standards).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'$$description': ['Whether the project can be safely installed and run from a zip file.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'boolean'}, 'script-files': {'$$description': ['Legacy way of defining scripts (entry-points are preferred).', 'Equivalent to the ``script`` keyword in ``setup.py``', '(it was renamed to avoid confusion with entry-point based ``project.scripts``', 'defined in :pep:`621`).', '**DISCOURAGED**: generic script wrappers are tricky and may not work properly.', 'Whenever possible, please use ``project.scripts`` instead.'], 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'type': 'string', 'format': 'pep561-stub-name'}]}}, {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'type': 'string', 'format': 'pep561-stub-name'}]}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html', 'description': '**DEPRECATED**: use implicit namespaces instead (:pep:`420`).'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['``dict``-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', '**DISCOURAGED**: please notice this might not work as expected with wheels.', 'Whenever possible, consider using data files inside the package directories', '(or create a new namespace package that only contains data files).', 'See `data files support', '`_.'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', '    cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['**PROVISIONAL**: list of glob patterns for all license files being distributed.', '(likely to become standard with :pep:`639`).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}}}, 'readme': {'type': 'object', 'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'additionalProperties': False}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'file-directive-for-dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}, rule='additionalProperties')
+            raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html', 'title': '``tool.setuptools`` table', '$$description': ['``setuptools``-specific configurations that can be set by users that require', 'customization.', 'These configurations are completely optional and probably can be skipped when', 'creating simple packages. They are equivalent to some of the `Keywords', '`_', 'used by the ``setup.py`` file, and can be set via the ``tool.setuptools`` table.', 'It considers only ``setuptools`` `parameters', '`_', 'that are not covered by :pep:`621`; and intentionally excludes ``dependency_links``', 'and ``setup_requires`` (incompatible with modern workflows/standards).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'$$description': ['Whether the project can be safely installed and run from a zip file.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'boolean'}, 'script-files': {'$$description': ['Legacy way of defining scripts (entry-points are preferred).', 'Equivalent to the ``script`` keyword in ``setup.py``', '(it was renamed to avoid confusion with entry-point based ``project.scripts``', 'defined in :pep:`621`).', '**DISCOURAGED**: generic script wrappers are tricky and may not work properly.', 'Whenever possible, please use ``project.scripts`` instead.'], 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}}, {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html', 'description': '**DEPRECATED**: use implicit namespaces instead (:pep:`420`).'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['``dict``-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', '**DISCOURAGED**: please notice this might not work as expected with wheels.', 'Whenever possible, consider using data files inside the package directories', '(or create a new namespace package that only contains data files).', 'See `data files support', '`_.'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', '    cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['**PROVISIONAL**: list of glob patterns for all license files being distributed.', '(likely to become standard with :pep:`639`).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}}}, 'readme': {'type': 'object', 'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'additionalProperties': False}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'file-directive-for-dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}, rule='additionalProperties')
     return data
 
 def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html__definitions_file_directive_properties_file(data, custom_formats={}, name_prefix=None):
@@ -673,15 +673,15 @@ def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_htm
 
 def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html__definitions_package_name(data, custom_formats={}, name_prefix=None):
     if not isinstance(data, (str)):
-        raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, rule='type')
+        raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, rule='type')
     data_any_of_count9 = 0
     if not data_any_of_count9:
         try:
             if not isinstance(data, (str)):
-                raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'format': 'python-module-name'}, rule='type')
+                raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'format': 'python-module-name-relaxed'}, rule='type')
             if isinstance(data, str):
-                if not custom_formats["python-module-name"](data):
-                    raise JsonSchemaValueException("" + (name_prefix or "data") + " must be python-module-name", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'format': 'python-module-name'}, rule='format')
+                if not custom_formats["python-module-name-relaxed"](data):
+                    raise JsonSchemaValueException("" + (name_prefix or "data") + " must be python-module-name-relaxed", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'format': 'python-module-name-relaxed'}, rule='format')
             data_any_of_count9 += 1
         except JsonSchemaValueException: pass
     if not data_any_of_count9:
@@ -694,7 +694,7 @@ def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_htm
             data_any_of_count9 += 1
         except JsonSchemaValueException: pass
     if not data_any_of_count9:
-        raise JsonSchemaValueException("" + (name_prefix or "data") + " cannot be validated by any definition", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, rule='anyOf')
+        raise JsonSchemaValueException("" + (name_prefix or "data") + " cannot be validated by any definition", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, rule='anyOf')
     return data
 
 def validate_https___setuptools_pypa_io_en_latest_deprecated_distutils_configfile_html(data, custom_formats={}, name_prefix=None):
diff --git a/setuptools/config/_validate_pyproject/formats.py b/setuptools/config/_validate_pyproject/formats.py
index 5a0599cbb5..153b1f0b27 100644
--- a/setuptools/config/_validate_pyproject/formats.py
+++ b/setuptools/config/_validate_pyproject/formats.py
@@ -83,7 +83,9 @@ def pep508_identifier(name: str) -> bool:
         from packaging import requirements as _req
     except ImportError:  # pragma: no cover
         # let's try setuptools vendored version
-        from setuptools._vendor.packaging import requirements as _req  # type: ignore
+        from setuptools._vendor.packaging import (  # type: ignore[no-redef]
+            requirements as _req,
+        )
 
     def pep508(value: str) -> bool:
         """See :ref:`PyPA's dependency specifiers `
@@ -289,6 +291,25 @@ def python_module_name(value: str) -> bool:
     return python_qualified_identifier(value)
 
 
+def python_module_name_relaxed(value: str) -> bool:
+    """Similar to :obj:`python_module_name`, but relaxed to also accept
+    dash characters (``-``) and cover special cases like ``pip-run``.
+
+    It is recommended, however, that beginners avoid dash characters,
+    as they require advanced knowledge about Python internals.
+
+    The following are disallowed:
+
+    * names starting/ending in dashes,
+    * names ending in ``-stubs`` (potentially collide with :obj:`pep561_stub_name`).
+    """
+    if value.startswith("-") or value.endswith("-"):
+        return False
+    if value.endswith("-stubs"):
+        return False  # Avoid collision with PEP 561
+    return python_module_name(value.replace("-", "_"))
+
+
 def python_entrypoint_group(value: str) -> bool:
     """See ``Data model > group`` in the :ref:`PyPA's entry-points specification
     `.

From 0aac59ed259a0c238eab381a00eafdb77da9b37f Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 14 Aug 2024 13:14:39 +0100
Subject: [PATCH 1008/1761] Add unit tests for py-modules

---
 .../tests/config/test_apply_pyprojecttoml.py  | 27 +++++++++++++++++++
 1 file changed, 27 insertions(+)

diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py
index 156823533a..78959b6454 100644
--- a/setuptools/tests/config/test_apply_pyprojecttoml.py
+++ b/setuptools/tests/config/test_apply_pyprojecttoml.py
@@ -297,6 +297,33 @@ def test_default_patterns(self, tmp_path):
         assert set(dist.metadata.license_files) == {*license_files, "LICENSE.txt"}
 
 
+class TestPyModules:
+    # https://github.com/pypa/setuptools/issues/4316
+
+    def dist(self, name):
+        toml_config = f"""
+        [project]
+        name = "test"
+        version = "42.0"
+        [tool.setuptools]
+        py-modules = [{name!r}]
+        """
+        pyproject = Path("pyproject.toml")
+        pyproject.write_text(cleandoc(toml_config), encoding="utf-8")
+        return pyprojecttoml.apply_configuration(Distribution({}), pyproject)
+
+    @pytest.mark.parametrize("module", ["pip-run", "abc-d.λ-xyz-e"])
+    def test_valid_module_name(self, tmp_path, monkeypatch, module):
+        monkeypatch.chdir(tmp_path)
+        assert module in self.dist(module).py_modules
+
+    @pytest.mark.parametrize("module", ["pip run", "-pip-run", "pip-run-stubs"])
+    def test_invalid_module_name(self, tmp_path, monkeypatch, module):
+        monkeypatch.chdir(tmp_path)
+        with pytest.raises(ValueError, match="py-modules"):
+            self.dist(module).py_modules
+
+
 class TestDeprecatedFields:
     def test_namespace_packages(self, tmp_path):
         pyproject = tmp_path / "pyproject.toml"

From 49d7438a23b7ec7cfca20c4d0ae5083030f631bd Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Fri, 16 Aug 2024 15:28:12 +0100
Subject: [PATCH 1009/1761] Replace OrderedSet with dict

Since Python 3.7, ``dict`` preserve insertion order.
This means that ``dict`` is a capable implementation of an ordered set,
despite the method names not lining up completely.
---
 setuptools/dist.py                       | 5 ++---
 setuptools/tests/config/test_setupcfg.py | 2 +-
 setuptools/tests/test_core_metadata.py   | 2 +-
 setuptools/tests/test_dist.py            | 4 ++--
 4 files changed, 6 insertions(+), 7 deletions(-)

diff --git a/setuptools/dist.py b/setuptools/dist.py
index b2250e3476..715e8fbb73 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -11,7 +11,6 @@
 from typing import TYPE_CHECKING, MutableMapping
 
 from more_itertools import partition, unique_everseen
-from ordered_set import OrderedSet
 from packaging.markers import InvalidMarker, Marker
 from packaging.specifiers import InvalidSpecifier, SpecifierSet
 from packaging.version import Version
@@ -251,7 +250,7 @@ class Distribution(_Distribution):
     _DISTUTILS_UNSUPPORTED_METADATA = {
         'long_description_content_type': lambda: None,
         'project_urls': dict,
-        'provides_extras': OrderedSet,
+        'provides_extras': dict,  # behaves like an ordered set
         'license_file': lambda: None,
         'license_files': lambda: None,
         'install_requires': list,
@@ -349,7 +348,7 @@ def _finalize_requires(self):
                 # Setuptools allows a weird ": syntax for extras
                 extra = extra.split(':')[0]
                 if extra:
-                    self.metadata.provides_extras.add(extra)
+                    self.metadata.provides_extras.setdefault(extra)
 
     def _normalize_requires(self):
         """Make sure requirement-related attributes exist and are normalized"""
diff --git a/setuptools/tests/config/test_setupcfg.py b/setuptools/tests/config/test_setupcfg.py
index 2c50ee2db7..4f0a7349f5 100644
--- a/setuptools/tests/config/test_setupcfg.py
+++ b/setuptools/tests/config/test_setupcfg.py
@@ -675,7 +675,7 @@ def test_extras_require(self, tmpdir):
                 'pdf': ['ReportLab>=1.2', 'RXP'],
                 'rest': ['docutils>=0.3', 'pack==1.1,==1.3'],
             }
-            assert dist.metadata.provides_extras == set(['pdf', 'rest'])
+            assert set(dist.metadata.provides_extras) == {'pdf', 'rest'}
 
     @pytest.mark.parametrize(
         "config",
diff --git a/setuptools/tests/test_core_metadata.py b/setuptools/tests/test_core_metadata.py
index 3e2ce65ea3..34828ac750 100644
--- a/setuptools/tests/test_core_metadata.py
+++ b/setuptools/tests/test_core_metadata.py
@@ -188,7 +188,7 @@ def test_read_metadata(name, attrs):
         ('requires', dist_class.get_requires),
         ('classifiers', dist_class.get_classifiers),
         ('project_urls', lambda s: getattr(s, 'project_urls', {})),
-        ('provides_extras', lambda s: getattr(s, 'provides_extras', set())),
+        ('provides_extras', lambda s: getattr(s, 'provides_extras', {})),
     ]
 
     for attr, getter in tested_attrs:
diff --git a/setuptools/tests/test_dist.py b/setuptools/tests/test_dist.py
index a5a5e7606d..597785b519 100644
--- a/setuptools/tests/test_dist.py
+++ b/setuptools/tests/test_dist.py
@@ -77,12 +77,12 @@ def test_provides_extras_deterministic_order():
     extras['b'] = ['bar']
     attrs = dict(extras_require=extras)
     dist = Distribution(attrs)
-    assert dist.metadata.provides_extras == ['a', 'b']
+    assert list(dist.metadata.provides_extras) == ['a', 'b']
     attrs['extras_require'] = collections.OrderedDict(
         reversed(list(attrs['extras_require'].items()))
     )
     dist = Distribution(attrs)
-    assert dist.metadata.provides_extras == ['b', 'a']
+    assert list(dist.metadata.provides_extras) == ['b', 'a']
 
 
 CHECK_PACKAGE_DATA_TESTS = (

From d081fbdfc92da4b8bcf01455445cf739ca51d5f1 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Fri, 16 Aug 2024 15:33:52 +0100
Subject: [PATCH 1010/1761] Remove bundled ordered_set

---
 pyproject.toml                                |   1 -
 .../ordered_set-4.1.0.dist-info/INSTALLER     |   1 -
 .../ordered_set-4.1.0.dist-info/METADATA      | 158 ------
 .../ordered_set-4.1.0.dist-info/RECORD        |   8 -
 .../ordered_set-4.1.0.dist-info/REQUESTED     |   0
 .../_vendor/ordered_set-4.1.0.dist-info/WHEEL |   4 -
 setuptools/_vendor/ordered_set/__init__.py    | 536 ------------------
 setuptools/_vendor/ordered_set/py.typed       |   0
 setuptools/tests/test_extern.py               |  12 +-
 9 files changed, 3 insertions(+), 717 deletions(-)
 delete mode 100644 setuptools/_vendor/ordered_set-4.1.0.dist-info/INSTALLER
 delete mode 100644 setuptools/_vendor/ordered_set-4.1.0.dist-info/METADATA
 delete mode 100644 setuptools/_vendor/ordered_set-4.1.0.dist-info/RECORD
 delete mode 100644 setuptools/_vendor/ordered_set-4.1.0.dist-info/REQUESTED
 delete mode 100644 setuptools/_vendor/ordered_set-4.1.0.dist-info/WHEEL
 delete mode 100644 setuptools/_vendor/ordered_set/__init__.py
 delete mode 100644 setuptools/_vendor/ordered_set/py.typed

diff --git a/pyproject.toml b/pyproject.toml
index 1ce17e63ab..89c2fe890b 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -110,7 +110,6 @@ ssl = []
 certs = []
 core = [
 	"packaging>=24",
-	"ordered-set>=3.1.1",
 	"more_itertools>=8.8",
 	"jaraco.text>=3.7",
 	"importlib_resources>=5.10.2; python_version < '3.9'",
diff --git a/setuptools/_vendor/ordered_set-4.1.0.dist-info/INSTALLER b/setuptools/_vendor/ordered_set-4.1.0.dist-info/INSTALLER
deleted file mode 100644
index a1b589e38a..0000000000
--- a/setuptools/_vendor/ordered_set-4.1.0.dist-info/INSTALLER
+++ /dev/null
@@ -1 +0,0 @@
-pip
diff --git a/setuptools/_vendor/ordered_set-4.1.0.dist-info/METADATA b/setuptools/_vendor/ordered_set-4.1.0.dist-info/METADATA
deleted file mode 100644
index 7aea136818..0000000000
--- a/setuptools/_vendor/ordered_set-4.1.0.dist-info/METADATA
+++ /dev/null
@@ -1,158 +0,0 @@
-Metadata-Version: 2.1
-Name: ordered-set
-Version: 4.1.0
-Summary: An OrderedSet is a custom MutableSet that remembers its order, so that every
-Author-email: Elia Robyn Lake 
-Requires-Python: >=3.7
-Description-Content-Type: text/markdown
-Classifier: Development Status :: 5 - Production/Stable
-Classifier: Intended Audience :: Developers
-Classifier: License :: OSI Approved :: MIT License
-Classifier: Programming Language :: Python
-Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3.7
-Classifier: Programming Language :: Python :: 3.8
-Classifier: Programming Language :: Python :: 3.9
-Classifier: Programming Language :: Python :: 3.10
-Classifier: Programming Language :: Python :: Implementation :: CPython
-Classifier: Programming Language :: Python :: Implementation :: PyPy
-Requires-Dist: pytest ; extra == "dev"
-Requires-Dist: black ; extra == "dev"
-Requires-Dist: mypy ; extra == "dev"
-Project-URL: Home, https://github.com/rspeer/ordered-set
-Provides-Extra: dev
-
-[![Pypi](https://img.shields.io/pypi/v/ordered-set.svg)](https://pypi.python.org/pypi/ordered-set)
-
-An OrderedSet is a mutable data structure that is a hybrid of a list and a set.
-It remembers the order of its entries, and every entry has an index number that
-can be looked up.
-
-## Installation
-
-`ordered_set` is available on PyPI and packaged as a wheel. You can list it
-as a dependency of your project, in whatever form that takes.
-
-To install it into your current Python environment:
-
-    pip install ordered-set
-
-To install the code for development, after checking out the repository:
-
-    pip install flit
-    flit install
-
-## Usage examples
-
-An OrderedSet is created and used like a set:
-
-    >>> from ordered_set import OrderedSet
-
-    >>> letters = OrderedSet('abracadabra')
-
-    >>> letters
-    OrderedSet(['a', 'b', 'r', 'c', 'd'])
-
-    >>> 'r' in letters
-    True
-
-It is efficient to find the index of an entry in an OrderedSet, or find an
-entry by its index. To help with this use case, the `.add()` method returns
-the index of the added item, whether it was already in the set or not.
-
-    >>> letters.index('r')
-    2
-
-    >>> letters[2]
-    'r'
-
-    >>> letters.add('r')
-    2
-
-    >>> letters.add('x')
-    5
-
-OrderedSets implement the union (`|`), intersection (`&`), and difference (`-`)
-operators like sets do.
-
-    >>> letters |= OrderedSet('shazam')
-
-    >>> letters
-    OrderedSet(['a', 'b', 'r', 'c', 'd', 'x', 's', 'h', 'z', 'm'])
-
-    >>> letters & set('aeiou')
-    OrderedSet(['a'])
-
-    >>> letters -= 'abcd'
-
-    >>> letters
-    OrderedSet(['r', 'x', 's', 'h', 'z', 'm'])
-
-The `__getitem__()` and `index()` methods have been extended to accept any
-iterable except a string, returning a list, to perform NumPy-like "fancy
-indexing".
-
-    >>> letters = OrderedSet('abracadabra')
-
-    >>> letters[[0, 2, 3]]
-    ['a', 'r', 'c']
-
-    >>> letters.index(['a', 'r', 'c'])
-    [0, 2, 3]
-
-OrderedSet implements `__getstate__` and `__setstate__` so it can be pickled,
-and implements the abstract base classes `collections.MutableSet` and
-`collections.Sequence`.
-
-OrderedSet can be used as a generic collection type, similar to the collections
-in the `typing` module like List, Dict, and Set. For example, you can annotate
-a variable as having the type `OrderedSet[str]` or `OrderedSet[Tuple[int,
-str]]`.
-
-
-## OrderedSet in data science applications
-
-An OrderedSet can be used as a bi-directional mapping between a sparse
-vocabulary and dense index numbers. As of version 3.1, it accepts NumPy arrays
-of index numbers as well as lists.
-
-This combination of features makes OrderedSet a simple implementation of many
-of the things that `pandas.Index` is used for, and many of its operations are
-faster than the equivalent pandas operations.
-
-For further compatibility with pandas.Index, `get_loc` (the pandas method for
-looking up a single index) and `get_indexer` (the pandas method for fancy
-indexing in reverse) are both aliases for `index` (which handles both cases
-in OrderedSet).
-
-
-## Authors
-
-OrderedSet was implemented by Elia Robyn Lake (maiden name: Robyn Speer).
-Jon Crall contributed changes and tests to make it fit the Python set API.
-Roman Inflianskas added the original type annotations.
-
-
-## Comparisons
-
-The original implementation of OrderedSet was a [recipe posted to ActiveState
-Recipes][recipe] by Raymond Hettiger, released under the MIT license.
-
-[recipe]: https://code.activestate.com/recipes/576694-orderedset/
-
-Hettiger's implementation kept its content in a doubly-linked list referenced by a
-dict. As a result, looking up an item by its index was an O(N) operation, while
-deletion was O(1).
-
-This version makes different trade-offs for the sake of efficient lookups. Its
-content is a standard Python list instead of a doubly-linked list. This
-provides O(1) lookups by index at the expense of O(N) deletion, as well as
-slightly faster iteration.
-
-In Python 3.6 and later, the built-in `dict` type is inherently ordered. If you
-ignore the dictionary values, that also gives you a simple ordered set, with
-fast O(1) insertion, deletion, iteration and membership testing. However, `dict`
-does not provide the list-like random access features of OrderedSet. You
-would have to convert it to a list in O(N) to look up the index of an entry or
-look up an entry by its index.
-
diff --git a/setuptools/_vendor/ordered_set-4.1.0.dist-info/RECORD b/setuptools/_vendor/ordered_set-4.1.0.dist-info/RECORD
deleted file mode 100644
index a9875cde4e..0000000000
--- a/setuptools/_vendor/ordered_set-4.1.0.dist-info/RECORD
+++ /dev/null
@@ -1,8 +0,0 @@
-ordered_set-4.1.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-ordered_set-4.1.0.dist-info/METADATA,sha256=FqVN_VUTJTCDQ-vtnmXrbgapDjciET-54gSNJ47sro8,5340
-ordered_set-4.1.0.dist-info/RECORD,,
-ordered_set-4.1.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-ordered_set-4.1.0.dist-info/WHEEL,sha256=jPMR_Dzkc4X4icQtmz81lnNY_kAsfog7ry7qoRvYLXw,81
-ordered_set/__init__.py,sha256=ytazgKsyBKi9uFtBt938yXxQtdat1VCC681s9s0CMqg,17146
-ordered_set/__pycache__/__init__.cpython-312.pyc,,
-ordered_set/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
diff --git a/setuptools/_vendor/ordered_set-4.1.0.dist-info/REQUESTED b/setuptools/_vendor/ordered_set-4.1.0.dist-info/REQUESTED
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/setuptools/_vendor/ordered_set-4.1.0.dist-info/WHEEL b/setuptools/_vendor/ordered_set-4.1.0.dist-info/WHEEL
deleted file mode 100644
index c727d14823..0000000000
--- a/setuptools/_vendor/ordered_set-4.1.0.dist-info/WHEEL
+++ /dev/null
@@ -1,4 +0,0 @@
-Wheel-Version: 1.0
-Generator: flit 3.6.0
-Root-Is-Purelib: true
-Tag: py3-none-any
diff --git a/setuptools/_vendor/ordered_set/__init__.py b/setuptools/_vendor/ordered_set/__init__.py
deleted file mode 100644
index e86c70ed80..0000000000
--- a/setuptools/_vendor/ordered_set/__init__.py
+++ /dev/null
@@ -1,536 +0,0 @@
-"""
-An OrderedSet is a custom MutableSet that remembers its order, so that every
-entry has an index that can be looked up. It can also act like a Sequence.
-
-Based on a recipe originally posted to ActiveState Recipes by Raymond Hettiger,
-and released under the MIT license.
-"""
-import itertools as it
-from typing import (
-    Any,
-    Dict,
-    Iterable,
-    Iterator,
-    List,
-    MutableSet,
-    AbstractSet,
-    Sequence,
-    Set,
-    TypeVar,
-    Union,
-    overload,
-)
-
-SLICE_ALL = slice(None)
-__version__ = "4.1.0"
-
-
-T = TypeVar("T")
-
-# SetLike[T] is either a set of elements of type T, or a sequence, which
-# we will convert to an OrderedSet by adding its elements in order.
-SetLike = Union[AbstractSet[T], Sequence[T]]
-OrderedSetInitializer = Union[AbstractSet[T], Sequence[T], Iterable[T]]
-
-
-def _is_atomic(obj: Any) -> bool:
-    """
-    Returns True for objects which are iterable but should not be iterated in
-    the context of indexing an OrderedSet.
-
-    When we index by an iterable, usually that means we're being asked to look
-    up a list of things.
-
-    However, in the case of the .index() method, we shouldn't handle strings
-    and tuples like other iterables. They're not sequences of things to look
-    up, they're the single, atomic thing we're trying to find.
-
-    As an example, oset.index('hello') should give the index of 'hello' in an
-    OrderedSet of strings. It shouldn't give the indexes of each individual
-    character.
-    """
-    return isinstance(obj, str) or isinstance(obj, tuple)
-
-
-class OrderedSet(MutableSet[T], Sequence[T]):
-    """
-    An OrderedSet is a custom MutableSet that remembers its order, so that
-    every entry has an index that can be looked up.
-
-    Example:
-        >>> OrderedSet([1, 1, 2, 3, 2])
-        OrderedSet([1, 2, 3])
-    """
-
-    def __init__(self, initial: OrderedSetInitializer[T] = None):
-        self.items: List[T] = []
-        self.map: Dict[T, int] = {}
-        if initial is not None:
-            # In terms of duck-typing, the default __ior__ is compatible with
-            # the types we use, but it doesn't expect all the types we
-            # support as values for `initial`.
-            self |= initial  # type: ignore
-
-    def __len__(self):
-        """
-        Returns the number of unique elements in the ordered set
-
-        Example:
-            >>> len(OrderedSet([]))
-            0
-            >>> len(OrderedSet([1, 2]))
-            2
-        """
-        return len(self.items)
-
-    @overload
-    def __getitem__(self, index: slice) -> "OrderedSet[T]":
-        ...
-
-    @overload
-    def __getitem__(self, index: Sequence[int]) -> List[T]:
-        ...
-
-    @overload
-    def __getitem__(self, index: int) -> T:
-        ...
-
-    # concrete implementation
-    def __getitem__(self, index):
-        """
-        Get the item at a given index.
-
-        If `index` is a slice, you will get back that slice of items, as a
-        new OrderedSet.
-
-        If `index` is a list or a similar iterable, you'll get a list of
-        items corresponding to those indices. This is similar to NumPy's
-        "fancy indexing". The result is not an OrderedSet because you may ask
-        for duplicate indices, and the number of elements returned should be
-        the number of elements asked for.
-
-        Example:
-            >>> oset = OrderedSet([1, 2, 3])
-            >>> oset[1]
-            2
-        """
-        if isinstance(index, slice) and index == SLICE_ALL:
-            return self.copy()
-        elif isinstance(index, Iterable):
-            return [self.items[i] for i in index]
-        elif isinstance(index, slice) or hasattr(index, "__index__"):
-            result = self.items[index]
-            if isinstance(result, list):
-                return self.__class__(result)
-            else:
-                return result
-        else:
-            raise TypeError("Don't know how to index an OrderedSet by %r" % index)
-
-    def copy(self) -> "OrderedSet[T]":
-        """
-        Return a shallow copy of this object.
-
-        Example:
-            >>> this = OrderedSet([1, 2, 3])
-            >>> other = this.copy()
-            >>> this == other
-            True
-            >>> this is other
-            False
-        """
-        return self.__class__(self)
-
-    # Define the gritty details of how an OrderedSet is serialized as a pickle.
-    # We leave off type annotations, because the only code that should interact
-    # with these is a generalized tool such as pickle.
-    def __getstate__(self):
-        if len(self) == 0:
-            # In pickle, the state can't be an empty list.
-            # We need to return a truthy value, or else __setstate__ won't be run.
-            #
-            # This could have been done more gracefully by always putting the state
-            # in a tuple, but this way is backwards- and forwards- compatible with
-            # previous versions of OrderedSet.
-            return (None,)
-        else:
-            return list(self)
-
-    def __setstate__(self, state):
-        if state == (None,):
-            self.__init__([])
-        else:
-            self.__init__(state)
-
-    def __contains__(self, key: Any) -> bool:
-        """
-        Test if the item is in this ordered set.
-
-        Example:
-            >>> 1 in OrderedSet([1, 3, 2])
-            True
-            >>> 5 in OrderedSet([1, 3, 2])
-            False
-        """
-        return key in self.map
-
-    # Technically type-incompatible with MutableSet, because we return an
-    # int instead of nothing. This is also one of the things that makes
-    # OrderedSet convenient to use.
-    def add(self, key: T) -> int:
-        """
-        Add `key` as an item to this OrderedSet, then return its index.
-
-        If `key` is already in the OrderedSet, return the index it already
-        had.
-
-        Example:
-            >>> oset = OrderedSet()
-            >>> oset.append(3)
-            0
-            >>> print(oset)
-            OrderedSet([3])
-        """
-        if key not in self.map:
-            self.map[key] = len(self.items)
-            self.items.append(key)
-        return self.map[key]
-
-    append = add
-
-    def update(self, sequence: SetLike[T]) -> int:
-        """
-        Update the set with the given iterable sequence, then return the index
-        of the last element inserted.
-
-        Example:
-            >>> oset = OrderedSet([1, 2, 3])
-            >>> oset.update([3, 1, 5, 1, 4])
-            4
-            >>> print(oset)
-            OrderedSet([1, 2, 3, 5, 4])
-        """
-        item_index = 0
-        try:
-            for item in sequence:
-                item_index = self.add(item)
-        except TypeError:
-            raise ValueError(
-                "Argument needs to be an iterable, got %s" % type(sequence)
-            )
-        return item_index
-
-    @overload
-    def index(self, key: Sequence[T]) -> List[int]:
-        ...
-
-    @overload
-    def index(self, key: T) -> int:
-        ...
-
-    # concrete implementation
-    def index(self, key):
-        """
-        Get the index of a given entry, raising an IndexError if it's not
-        present.
-
-        `key` can be an iterable of entries that is not a string, in which case
-        this returns a list of indices.
-
-        Example:
-            >>> oset = OrderedSet([1, 2, 3])
-            >>> oset.index(2)
-            1
-        """
-        if isinstance(key, Iterable) and not _is_atomic(key):
-            return [self.index(subkey) for subkey in key]
-        return self.map[key]
-
-    # Provide some compatibility with pd.Index
-    get_loc = index
-    get_indexer = index
-
-    def pop(self, index=-1) -> T:
-        """
-        Remove and return item at index (default last).
-
-        Raises KeyError if the set is empty.
-        Raises IndexError if index is out of range.
-
-        Example:
-            >>> oset = OrderedSet([1, 2, 3])
-            >>> oset.pop()
-            3
-        """
-        if not self.items:
-            raise KeyError("Set is empty")
-
-        elem = self.items[index]
-        del self.items[index]
-        del self.map[elem]
-        return elem
-
-    def discard(self, key: T) -> None:
-        """
-        Remove an element.  Do not raise an exception if absent.
-
-        The MutableSet mixin uses this to implement the .remove() method, which
-        *does* raise an error when asked to remove a non-existent item.
-
-        Example:
-            >>> oset = OrderedSet([1, 2, 3])
-            >>> oset.discard(2)
-            >>> print(oset)
-            OrderedSet([1, 3])
-            >>> oset.discard(2)
-            >>> print(oset)
-            OrderedSet([1, 3])
-        """
-        if key in self:
-            i = self.map[key]
-            del self.items[i]
-            del self.map[key]
-            for k, v in self.map.items():
-                if v >= i:
-                    self.map[k] = v - 1
-
-    def clear(self) -> None:
-        """
-        Remove all items from this OrderedSet.
-        """
-        del self.items[:]
-        self.map.clear()
-
-    def __iter__(self) -> Iterator[T]:
-        """
-        Example:
-            >>> list(iter(OrderedSet([1, 2, 3])))
-            [1, 2, 3]
-        """
-        return iter(self.items)
-
-    def __reversed__(self) -> Iterator[T]:
-        """
-        Example:
-            >>> list(reversed(OrderedSet([1, 2, 3])))
-            [3, 2, 1]
-        """
-        return reversed(self.items)
-
-    def __repr__(self) -> str:
-        if not self:
-            return "%s()" % (self.__class__.__name__,)
-        return "%s(%r)" % (self.__class__.__name__, list(self))
-
-    def __eq__(self, other: Any) -> bool:
-        """
-        Returns true if the containers have the same items. If `other` is a
-        Sequence, then order is checked, otherwise it is ignored.
-
-        Example:
-            >>> oset = OrderedSet([1, 3, 2])
-            >>> oset == [1, 3, 2]
-            True
-            >>> oset == [1, 2, 3]
-            False
-            >>> oset == [2, 3]
-            False
-            >>> oset == OrderedSet([3, 2, 1])
-            False
-        """
-        if isinstance(other, Sequence):
-            # Check that this OrderedSet contains the same elements, in the
-            # same order, as the other object.
-            return list(self) == list(other)
-        try:
-            other_as_set = set(other)
-        except TypeError:
-            # If `other` can't be converted into a set, it's not equal.
-            return False
-        else:
-            return set(self) == other_as_set
-
-    def union(self, *sets: SetLike[T]) -> "OrderedSet[T]":
-        """
-        Combines all unique items.
-        Each items order is defined by its first appearance.
-
-        Example:
-            >>> oset = OrderedSet.union(OrderedSet([3, 1, 4, 1, 5]), [1, 3], [2, 0])
-            >>> print(oset)
-            OrderedSet([3, 1, 4, 5, 2, 0])
-            >>> oset.union([8, 9])
-            OrderedSet([3, 1, 4, 5, 2, 0, 8, 9])
-            >>> oset | {10}
-            OrderedSet([3, 1, 4, 5, 2, 0, 10])
-        """
-        cls: type = OrderedSet
-        if isinstance(self, OrderedSet):
-            cls = self.__class__
-        containers = map(list, it.chain([self], sets))
-        items = it.chain.from_iterable(containers)
-        return cls(items)
-
-    def __and__(self, other: SetLike[T]) -> "OrderedSet[T]":
-        # the parent implementation of this is backwards
-        return self.intersection(other)
-
-    def intersection(self, *sets: SetLike[T]) -> "OrderedSet[T]":
-        """
-        Returns elements in common between all sets. Order is defined only
-        by the first set.
-
-        Example:
-            >>> oset = OrderedSet.intersection(OrderedSet([0, 1, 2, 3]), [1, 2, 3])
-            >>> print(oset)
-            OrderedSet([1, 2, 3])
-            >>> oset.intersection([2, 4, 5], [1, 2, 3, 4])
-            OrderedSet([2])
-            >>> oset.intersection()
-            OrderedSet([1, 2, 3])
-        """
-        cls: type = OrderedSet
-        items: OrderedSetInitializer[T] = self
-        if isinstance(self, OrderedSet):
-            cls = self.__class__
-        if sets:
-            common = set.intersection(*map(set, sets))
-            items = (item for item in self if item in common)
-        return cls(items)
-
-    def difference(self, *sets: SetLike[T]) -> "OrderedSet[T]":
-        """
-        Returns all elements that are in this set but not the others.
-
-        Example:
-            >>> OrderedSet([1, 2, 3]).difference(OrderedSet([2]))
-            OrderedSet([1, 3])
-            >>> OrderedSet([1, 2, 3]).difference(OrderedSet([2]), OrderedSet([3]))
-            OrderedSet([1])
-            >>> OrderedSet([1, 2, 3]) - OrderedSet([2])
-            OrderedSet([1, 3])
-            >>> OrderedSet([1, 2, 3]).difference()
-            OrderedSet([1, 2, 3])
-        """
-        cls = self.__class__
-        items: OrderedSetInitializer[T] = self
-        if sets:
-            other = set.union(*map(set, sets))
-            items = (item for item in self if item not in other)
-        return cls(items)
-
-    def issubset(self, other: SetLike[T]) -> bool:
-        """
-        Report whether another set contains this set.
-
-        Example:
-            >>> OrderedSet([1, 2, 3]).issubset({1, 2})
-            False
-            >>> OrderedSet([1, 2, 3]).issubset({1, 2, 3, 4})
-            True
-            >>> OrderedSet([1, 2, 3]).issubset({1, 4, 3, 5})
-            False
-        """
-        if len(self) > len(other):  # Fast check for obvious cases
-            return False
-        return all(item in other for item in self)
-
-    def issuperset(self, other: SetLike[T]) -> bool:
-        """
-        Report whether this set contains another set.
-
-        Example:
-            >>> OrderedSet([1, 2]).issuperset([1, 2, 3])
-            False
-            >>> OrderedSet([1, 2, 3, 4]).issuperset({1, 2, 3})
-            True
-            >>> OrderedSet([1, 4, 3, 5]).issuperset({1, 2, 3})
-            False
-        """
-        if len(self) < len(other):  # Fast check for obvious cases
-            return False
-        return all(item in self for item in other)
-
-    def symmetric_difference(self, other: SetLike[T]) -> "OrderedSet[T]":
-        """
-        Return the symmetric difference of two OrderedSets as a new set.
-        That is, the new set will contain all elements that are in exactly
-        one of the sets.
-
-        Their order will be preserved, with elements from `self` preceding
-        elements from `other`.
-
-        Example:
-            >>> this = OrderedSet([1, 4, 3, 5, 7])
-            >>> other = OrderedSet([9, 7, 1, 3, 2])
-            >>> this.symmetric_difference(other)
-            OrderedSet([4, 5, 9, 2])
-        """
-        cls: type = OrderedSet
-        if isinstance(self, OrderedSet):
-            cls = self.__class__
-        diff1 = cls(self).difference(other)
-        diff2 = cls(other).difference(self)
-        return diff1.union(diff2)
-
-    def _update_items(self, items: list) -> None:
-        """
-        Replace the 'items' list of this OrderedSet with a new one, updating
-        self.map accordingly.
-        """
-        self.items = items
-        self.map = {item: idx for (idx, item) in enumerate(items)}
-
-    def difference_update(self, *sets: SetLike[T]) -> None:
-        """
-        Update this OrderedSet to remove items from one or more other sets.
-
-        Example:
-            >>> this = OrderedSet([1, 2, 3])
-            >>> this.difference_update(OrderedSet([2, 4]))
-            >>> print(this)
-            OrderedSet([1, 3])
-
-            >>> this = OrderedSet([1, 2, 3, 4, 5])
-            >>> this.difference_update(OrderedSet([2, 4]), OrderedSet([1, 4, 6]))
-            >>> print(this)
-            OrderedSet([3, 5])
-        """
-        items_to_remove = set()  # type: Set[T]
-        for other in sets:
-            items_as_set = set(other)  # type: Set[T]
-            items_to_remove |= items_as_set
-        self._update_items([item for item in self.items if item not in items_to_remove])
-
-    def intersection_update(self, other: SetLike[T]) -> None:
-        """
-        Update this OrderedSet to keep only items in another set, preserving
-        their order in this set.
-
-        Example:
-            >>> this = OrderedSet([1, 4, 3, 5, 7])
-            >>> other = OrderedSet([9, 7, 1, 3, 2])
-            >>> this.intersection_update(other)
-            >>> print(this)
-            OrderedSet([1, 3, 7])
-        """
-        other = set(other)
-        self._update_items([item for item in self.items if item in other])
-
-    def symmetric_difference_update(self, other: SetLike[T]) -> None:
-        """
-        Update this OrderedSet to remove items from another set, then
-        add items from the other set that were not present in this set.
-
-        Example:
-            >>> this = OrderedSet([1, 4, 3, 5, 7])
-            >>> other = OrderedSet([9, 7, 1, 3, 2])
-            >>> this.symmetric_difference_update(other)
-            >>> print(this)
-            OrderedSet([4, 5, 9, 2])
-        """
-        items_to_add = [item for item in other if item not in self]
-        items_to_remove = set(other)
-        self._update_items(
-            [item for item in self.items if item not in items_to_remove] + items_to_add
-        )
diff --git a/setuptools/_vendor/ordered_set/py.typed b/setuptools/_vendor/ordered_set/py.typed
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/setuptools/tests/test_extern.py b/setuptools/tests/test_extern.py
index 105279d084..d7eb3c62c1 100644
--- a/setuptools/tests/test_extern.py
+++ b/setuptools/tests/test_extern.py
@@ -1,20 +1,14 @@
 import importlib
 import pickle
 
-import ordered_set
+import packaging
 
 from setuptools import Distribution
 
 
 def test_reimport_extern():
-    ordered_set2 = importlib.import_module(ordered_set.__name__)
-    assert ordered_set is ordered_set2
-
-
-def test_orderedset_pickle_roundtrip():
-    o1 = ordered_set.OrderedSet([1, 2, 5])
-    o2 = pickle.loads(pickle.dumps(o1))
-    assert o1 == o2
+    packaging2 = importlib.import_module(packaging.__name__)
+    assert packaging is packaging2
 
 
 def test_distribution_picklable():

From dbfcf800b2d130066319ee6dc54f485ff3a09dc6 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Fri, 16 Aug 2024 15:44:28 +0100
Subject: [PATCH 1011/1761] Add newsfragment

---
 newsfragments/4574.removal.rst | 4 ++++
 1 file changed, 4 insertions(+)
 create mode 100644 newsfragments/4574.removal.rst

diff --git a/newsfragments/4574.removal.rst b/newsfragments/4574.removal.rst
new file mode 100644
index 0000000000..17c8f61ec1
--- /dev/null
+++ b/newsfragments/4574.removal.rst
@@ -0,0 +1,4 @@
+``setuptools`` is replacing the usages of :pypi:`ordered_set` with simple
+instances of ``dict[Hashable, None]``. This is done to remove the extra
+dependency and it is possible because since Python 3.7, ``dict`` maintain
+insertion order.

From df98503bfd237518f57432bf5d4b5b74e8555aab Mon Sep 17 00:00:00 2001
From: Christoph Reiter 
Date: Sat, 17 Aug 2024 12:09:57 +0200
Subject: [PATCH 1012/1761] Remove unused wininst-*.exe files

They where used by bdist_wininst, which was removed long ago in
https://github.com/pypa/distutils/pull/170
---
 distutils/command/wininst-10.0-amd64.exe | Bin 222208 -> 0 bytes
 distutils/command/wininst-10.0.exe       | Bin 190976 -> 0 bytes
 distutils/command/wininst-14.0-amd64.exe | Bin 587776 -> 0 bytes
 distutils/command/wininst-14.0.exe       | Bin 458240 -> 0 bytes
 distutils/command/wininst-6.0.exe        | Bin 61440 -> 0 bytes
 distutils/command/wininst-7.1.exe        | Bin 65536 -> 0 bytes
 distutils/command/wininst-8.0.exe        | Bin 61440 -> 0 bytes
 distutils/command/wininst-9.0-amd64.exe  | Bin 224256 -> 0 bytes
 distutils/command/wininst-9.0.exe        | Bin 196096 -> 0 bytes
 9 files changed, 0 insertions(+), 0 deletions(-)
 delete mode 100644 distutils/command/wininst-10.0-amd64.exe
 delete mode 100644 distutils/command/wininst-10.0.exe
 delete mode 100644 distutils/command/wininst-14.0-amd64.exe
 delete mode 100644 distutils/command/wininst-14.0.exe
 delete mode 100644 distutils/command/wininst-6.0.exe
 delete mode 100644 distutils/command/wininst-7.1.exe
 delete mode 100644 distutils/command/wininst-8.0.exe
 delete mode 100644 distutils/command/wininst-9.0-amd64.exe
 delete mode 100644 distutils/command/wininst-9.0.exe

diff --git a/distutils/command/wininst-10.0-amd64.exe b/distutils/command/wininst-10.0-amd64.exe
deleted file mode 100644
index 6fa0dce16315854223a9fefc3ac9f3cf71730a6a..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 222208
zcmeFadw5e-zWALs4GqwmqC~2q1gVM^wOSmD1&smAn`IHwx-pF
zZIn6VT+W%BGrluk#!=6l87z#Kh9YgbUqn#66wz^qpjK2UDDL0qyLQqRbv$#P^Lzhz
zpXcSF`?Buqdtcx6-7D4KU*>Q+9F9ExEX(0&<}UyI>iXYYavY9hhOIot@z?$vhBxPg
zHw>RT@1{k*1q*MxapASM_-e1c_14=WzUyxAEsWmkyXjV6VDc2-Ew^2N!#Rb814=SY
z|1N*jbMMa`zbE^5`^Y!;yvzI2qST(Gy1%*SBkuc7N$&YT-Cx=Bp1N<@vzPn(b0_Qx
za9>(v?wP0VJ5~PDqIcB&ikoWZN!$NyCN)8aELiqxZDrN}ecF1he8dv|mKO@O(--YGft*-DwZl(ZYk-lT3OMIIe*4s$vl
zm4_Cmqw6^Ck2)R8PLhP9|FztZ>lk`;(s#ej$#wW$|GdaKksI!eki9;@sAWJhcOi;C
z|9lR|oO2dle{JMihak
zT~N)DxvEKx{zf_fISUsptfi`gNelUdd8zqkx!%_Q|EGTi0(xUdQ@Jg08V
zxhn-=S?I~=;PQ*UgEM0I218YGN&^cA@d!;E!X(R0hUl4yC)BBik<
zdc59fMF!~!@1@gn9SPkPHr7}>(gSqkwH3l|)(-!HSBj#;S3EB--XoXiIaaW{PH+HNL
zhO)K+xJiAxQ$y&}Cw===N$($Odisr?DXp9gB=xBRm^K?Q+XR+7R^+RO%Ue?0d60rC
zRrtu-=6U{+l3(!TdH%tYW!%GwK#AY-UP;^GM7YGS-u1-P62C%RNcFif*!W@8ZFwi~
zmVR7DJ5qpCzBQQ8@&Yzg&b3SGhG}_ElIPeFE9#|pPs2h%Q^MOgCD)Ovk!Kq4WuU7i
zF3;7C4VL%dWSQLt0Bd=7$#dg@O!acR`Z-d4kiz(yaAHoO#g}&&!wjVyn*4Ygg$`P#P$y@w`0o6lXivq0Y@AZYMgo=!t?#
zMiSt4`cR$Z+SR&qt)7@xrzZmLQ$j|2sC2VbUa&N$DQ^I>uAwFRnI7w~VCuv@b^D)D
zWv#XP!7X)*2U)XZUNa+J$&*t1pYc5J(&5>;@ha%Iy3XJ9+iAbBPFkL)8-WsC
zk5xD#mxSrh7^53gOKR*sX#e1ra6+>}Mt`$;m`FfR!vQcCvRUT9eQ?Y8y2WQ&v!vXd$V|`k6AJ>yWyOMn
zvbl_IBq_(p91#%Z9g^ZRO=$^oJ&S@tx^z=xuyngXs*MDXKHX?H|MsOwFHb``Qmnf1
z!^jB%1K~i0z_8B{F`AWJ(T|yE)^
z!%%Fkb*V9S?`1|{ue-b@WULiL0CE9V2w?GEzExYASEO63ngT<<0&AZrP{?>sv~fqN
zQ0*4o39$D+6D&;%ES3h&w8qYgoH+K}$nia;6cS^|6G39#MYAHVx{J<@_M>Ud0*}>o
z)wM0=F1RK26UJz34~lWw)t9hw!LR##EBct%}C`H~TJ
z6}LtT>navTb@J9Y3)7Wz+nj|^Zi^mk&5gBvl|ENQbD7rcmH~pNK{1D8?0~47!e~V2
zktU~5WLY}5X`p%s)zJqX~f!PJef_CJFe
zfc!qVC17;ui7T!04$FIU2!o^DFm-2#d5P+|TPB`Y-jXg2BqrskD&-wQ;KH@&@G;b&
z1d4H!dCz?gM;e{oCd2a!uyipc;NsO~SP>c9NYqJnyK}3c`y&mO8O<+bQFSJSlo(+*xM`P^Svddd%vZazqiGWVPzCwxZY<
z0~Wp~aG$UGuA#Mb70rHpI`6fusBhibQr;0T@>VIDvb^`uF!cwkSLSh^8t!ekIvweg
zXy`Y$sfIpA1PWgR#K=`n2M97Aw7~$sjK533@T-zJHopkGvUb=s(K>#S1NHk_L=PCQ
zypgo1w9a!1j5pLVzw1lpIk?PJk9VqTjk?ZpFl6Hm%R7ywNMp-gLz&F6Vkj7ePKJg?
zwY=|sAK1PujRC}&;Y9Vaa3a_wIQL7+JgRIGu#!+A33o#5@=~TJf_3sRQc@Sl6Z3nJ
z*1(`>vuv9+N~wkuh3ya0Orr32f62v~2pCF9QRP1sUENhcM;FskYPPah!X-6=)j6b=
z{Z2-lV-+z2Db)oyQ)9BFbYohRR~9uy~U2+*3D8n+Zi`km)aP7yA2E
z$!4L$`qe^?BFvFN2t_@Vm{g3=u1-wyh2oQTRwocV3GX__r&#s&nTjezxSVZyyI4bu
zZk!|2!5lz3t5Th$)BTUBWo)**XUYI#tMyi>^_Ek4wobc!o)g89(HDzLiWP&@$c*au
zJ!dn>o)Stb1`*AJaK#|?NJ|Ot0vS$1DJoeZ2*fS#RjN~!Xx>&YH4q;cv)Tu3p2pvS
z|C26C-eqSC
zNG=hHy%uu`g)W*L6Urr+FxH9!w$(EFy<=$}3M$zY@Q8ZfZ(CQ7Opt;JHCM3oIlc-p
zb_E!ix|q3lWAE>|+SWIH?OD-e3GZnNL$6!jsnSludqk!Ne;eJFSF@A0+DTzqWSnCs
zJ)12>(n)sGgV|D4blXYSW=oku*Ym*pc-6<4Y|&23`<9(_Og4#y&{~oZ)Kpo3_3vqD
z0|hK>JPj}LP%RH)DpgC?(<31
z?KPnX^}Goka`9ZJLz2$^!*DQcd`{Ayn|P{ShuTc;cInOynly7fx-V?BXx3|@FxL2D
zU-VnZVt>9+v66vjN1)Jf{Itet)Q-E>+#u3E96$RW-qWs(sWc1Dj;;0`Wfl3SP3g1P
zsNW~+O+CTd8JS-s#9#i39uLluRawGoG4N_*cXcg>(kFVn8Zv$aaw2EDn$kv!x-D;=
zo%W=i7M4}!Eq2oVnPR{t9`sq>>2`7(l9(kds3y7C@?IjzMzu8jg31ElrM+o&k`@mZ
zS>EG&6R)xpM_S$-5>Y>6s@+%i#fJ2pHb=$Y?P+CX5ZKR3PPGh5^
z)rR>Zkan0Kl|UU>CCVohFkXS{R+8S5aSXA*>g@H?9Jx5x@$%=)3BQ=J$?}dHm+M&Z
znY?2#3NPRfBS|;58StxezZ`47t5TLXPu>!RuFIVc^UXujj$-*!STqKWH6a7$Z8v`|
z*fll@Bdi~bZRa)r25~?yFI7oxh-CvS65X)fJu759=s`+~j`C-E`VZa};h`_-^kwl1
zSGFxG%%qJV&FB{INNv0Mk2}X
zzWgJ30hF$i21dVN(|YeBIdztd-K
zA@q;4lOD+?0b_?G8C|j#t`wKr_r#lXp3dti0mm8OYx^7o?>0Dt&dizMErjcHyRnn(
zT!icn2-O*D2WdyIGFZybR3cO@E3@kjdkvWtqnHGp5RPuRHV+l~sn#BwRf?ijv-(tq
zmnBfVayg1Tp_6Z=(5pcE--RMXJ0b^vc~P$8MWIk*gMyO5J5E_pC%Mtg8~&#s$YY}L
z=F6bW@dp)!Gy5oZP?1np!rLL{9yUacGRM~`IPzvDKCvw3p4z9ztO)pgWxTBu0v<@S
zA!B=nNL}jfwG4?K9D@Kr*#3r^LTz}HAEbcM-7Lf!LvjS5(6fa?15p9dPQk~Gz``#B
za?5~9S1ldTR3QF@hK}fGy_8E^?n?40?4$6MQOX6XsFtkH=_X(!Yn>Kr9i^?wJIP_T
zBCteKS7Du`vEU1ThJ9uJS*23RM|+BzUzR~4C>|_Pc*34JZN6>KbTShN9OAy&FT|a3
zC)l!ym2dsNIk*PF^?N7w;JStsS!x~ryMrg>bWkKm8GmZU;tv?ffPsQv;TS|!l3}>*
z6--T-J%f-|(AZj$V%EvBbw)P581}GjAKMF=n>pFR_UoA6HYiPB;pHfsEpHN&KpX8e^)2xs(
zhU>@)J=64&5=ty8HV%oKQEbtbGT$(w{yuqJM$#`PsG1wktu|S)k>ZdnxuU{=#n0<|sJ4eY@pLlfq
zdTfp)uvscZA^6#Z^_3lyR4lnoK(bGb?2GDj>^@9yt0X4q?_tm&1{
zUug{5$Q8yve4$LqHqrkf&+v2PMVlXOHf4Vx4>bo~RMAaJWdEJ#_&GX85$p+_iklRV
z%{;`YKkILh(U;*aIdCb1)Qv-t%X#_a8eXDRj0^0A4c8lFs?h}veLyHta}0y7cD8HH
zz=TRW#Ut#qyi+caVQym_*ul@xhUI-i(65Ap?IrS0=~<8n11lPIjTq8H+2RTb8&8+C
za5q*3%MX-Jbsxs=<#wPt9(7d5&x`FnRj++RcfMY~*sZ+kp2i`RuCKg%`siaL$AQZc
z$_h1K{Q`3PiV?Cc*Yp|pz?emD_7`d=0IGF*X**ADxy1_)g`AGkDQ-P+cixM#Ml2m-
zY|!FGdTqX5y3;&vfy}ZC0?VvFz?)z88J-#~R+pzCCSXW-AM-O=xX}+I!Y!8fK9wK2
zF<^Lj53^%frX_SXa$cJo;ANz``pmoE6Ip2r0oR1-Qb5>l-Xl&`+nH?~QBHE*xz2oi
zrML+3QYx8cOjh#lr6wA?Y5JK~1dtUSG1
z+?DI0f_drVW$%nAHYizfl|1zDO2IZ5`A!uT|j{jP4Xj7_Q5`Js53K
zDXG|DPh=qe2r8vcm(c*j1(vslceSah6zrwE(UGya`Np|(=K`{WdyQ4)Ta(?nMtd?f
z%$Y=wEMac4k1`KOCH@5dX7X3VpQQTvtKhH9R_QWQ8Sx7kzLqz3K2~e2UxtXA(6Y7KzQ~wg0+lK
zgl*_)GuW{ezai?ixdnZ1I%JVi+6gMw9G5{w<8vMXyX$ggP`qA#Kslihg`}bh=ZT>Y
zM&4#TD;?-FgjqaVCs}t|-a8?z6%I!oMT1S2_lFb;C(a&^2`_{)Cmg@!QA+=U+-0^L
zmAX5Rnof~O9up7UK7rMP?O5?seX=yo8?_in&i!*@fGN2zye;rCp|MI
zOd0$`Lf?#Yrp&!Yo^-kR$~p9`1^?rN;%l
z1TZqnFe}E2BdkKn(*R4KK*{J}>DoZ))RKzQ_R>JfSk2jLli8+!FI}~z8xxT&z~j`j
zQ11KXG8;%;7`pp90n;oc(`%6F{wd9Vg@0Bd8T=3GVZ}0X@Q%d}U*>7}JrJ!wTm%Pr
z8einu{0K`tY@B=$w_P=c`%u7WtNUiPNSE~qTfdffqUs*l1=T~lnS!2B-Cjz-7^s
zvD0WyTLB|OK`nyL1!y13yM|<&QPlK+$$Qt*Tm5Q*gDu+_cyUJUh=1kLUd{7lQ3g1{
zny~S%IJI@6Il@MYnFN`Mw&DbbP;3jfsxlYL51_}-5y7W+Z+{5<^x+Y_&$N)@h1Ona{HxHEX0ezp?Vcdrnfn|_QM#uRYL
z1njlIblZCj3}n$NvmMFAu|sUUA7j^t#i|7$E9&DRm=PAC?28t3Uw`0*OBV#a~Q8lAziI1)O3+I6c^39N(n16@neycG0v)Mg!aFj
zEn_D}MdpxEY7*QaJB~ek%9e%VPVeajIAxSkDCEnL{&kvpZ?n7xM>JCwTwlf;$B$XlbVpL5(2;?eHGXpq($hS$5U?l}&{~+G@`e&WIkCN$Aq6}QPr!iXR{h>p
zP+4S@wbs+PkCd0b;xac-xyIA@7Eht1S)^h8TPzVSWVWex5|MqIrqU){aKhK*1Y`q5
zhci3X5WH->W_kPbj=oq)l$zeQN`W2=(s;$3gL_8Mk34xFwjzmq1yortkV*4evq6d~
zr6!5|rYOE7x*v`)9T}R?Ev@-}RVZAG4Gts$25}&iTj~nXYeP$9G#I!`twtVH8*%i7
z{UI$SfD4s-#Z)fCUFs^JUa;}K=maXCO67fGzbE=sK90(I;t^Fj4NkoOK?%?u<7VSo
zP3%iJM?FY+?FHbMkzhe%eMX4okfUbW^tGApwCmz-kX6IgmUj~gK~(NnLd88oFi9#M
zUje_Z{8%9ffR0sjbB&Y4u598NBrf%yNm`RPjpwY%`^q>r1NYOf@D4>_OP=j4?gEM-hh0g(X%BlWNUEEg8r?TdIgm
zsHDL1KFM1kF*zsA^RIcF?X7k4TSR)8}p+Fd63y
z3kBP+*yB^2Mx+#RQcuhjF5`<;$^OGTvv#ypBB$zvAWuG?kL{GD`wTOFKDcaV&z@UgxxP*$z
z+&9DGV1Q0Zpu+L?Acj!SJm1clqKbO=Vf^A_F^7_Bcz*_ELfum)IUTb4))R?3V8b~7
zMzLy&X);RmDxpylYegZyfOd|a_^B%SbqEW1`(f<>bRUp9{xk(%A91Q@DwqYu_RiEb
z04d0uENh>y3x52lX;|!Wsa)Mz9j=f
zc$`A~#Yi>&3smcBePxgSdeRx^=KxXwvmiTIv3wYf*x90wM3tCN+^AriBu$MlLmF=p
z%W|PhNl2GgIxPUf|!t0nNcqgt#B76!Kkw+j+(GOK6Wk1Vo?Y6@7Gg8@F<^$5m
zQ4{Z-wI!>>gIkn{{`QGqFd{qgyU$P)AFD>!XW}=Lu4a*)al+dz&DtO=e2-E0O#E3q
z1~UAFVr8Vn{nKqJA-w)|DKT5ST5PZSeKQr`yDoJB2HOiS3Re2`nMbb7KK-fZ>32Lq
z`)6ecpsa^k+_)_PNa(=rG~Ej|R>6(WonP
z)xsXX9Newcz>PdeY3zEDxOlm^vxcDlN-zvjkZgXq2Qv-(I8tohE(l;h8iJ7>0NFt^
zXjx}}hMiI3*2N9H56CE!r&%@cs;6|XdG(m;e3Mx!!cu%7@4~C*D@Sn8;-GXzL@3bL
zx0{x0C>t9$V;AES!xB0IB3kFua_Ah~x%&|^zQ
zNfyN@+oS8vpHH#LU1$MvC%R&hj|dC=0z_CSzZtTk@LEKP<^5tfK*jbK9K}|TWG~vg
zK=?|$J0D11+%?l@&+RQKpj(~L;u8X8!*xn7IX15>Bc*QKgF%_11dCh&Chz5q_GV%^
z338yRpTg&|56B7!(cZJ-%Peg&tG+oZo;JEtBx4!=6J9fGC_AoQruf$@7(o4M8AmJQ
z$+++y%IO(Ti;R#?Wm)Tcgcr-(LPO;pAZbW!aY>2mj{eaC9!BOYblEPCq_Cu_M+Z~v
z4y0k_YpmbbE-hk{_4e@`DVTH1U=(IM=zKL>S<~8~Qb1oZnP{d5*~E)=`|c&k(>sz9
zGPE+ak#-+B)E=l3kruWgAe!n8Q_Uc6iMFcx4i*JpN9g<|UvlchUUMrwbb^lYPt~1Z8$I-p>g%}h2H
z0yNkbd^(5Q-1ybc*^YttPgeS7nlc0uQ$E4pMhgC6u8AogMg1#O%41wt*2$oRBNDtx
zof^Iiftplx%X?cUb*AP05*VU*6mUUVVU1M$RS6sgjc4K0$K&~sK|wK>eMbjgw3P*1}jvpEBJSVQJ+$%M+CydYoS
z_(A|Aq3nqnIU1qe$~TlTI6glqt9P9~MTURHatC?}tE_=PXOihlWSUf;l4E(FwkxoN
z&5aDjk3v4g`wx35n63441~fQ8=2ITuu=yDpxMz19r6KOnjC-OtZlm~Ucq~9m^@WF9
z&Dw^?>g=o)JU*)Gg~wCboN*a=Y>`ZX)G|8(n5;Sc9WdE_ViqPJD~51A%BrIeOm>rc
z6ik-c6|yi{Vb|J=)az60Pl_;F3th|UJn?*gdBe*>#
zeilM;JTDD2M>MJEn@?aKl}_Yg$xnB$JaMON
zUd$;1NfYKsbDb!N{=#r-C0WDf1))^s9|mOsOvJYOyhaZs77Y+76+7gN3}Y7aZ0O461>oUQ!}^p__GsMiI3f{`8NL{!}@c_cVK}dJ!*fUd+dr(7)R6i%Nkv0cHBB
z6^I{)>6V#F2%Q*9pi#CPpC-S&!@S`gwj1$b14S{u@OL<5GW%f_#WKI6z8D7f!k_Ok
z>`fuP7F|gs!Q#BhX_vK5XK(%`kNHTWBB_rFL08QMny$~QF`xPkaa^MDF}5lQA&ci-
z5{k9C1pr#*+lx!I*cu7d57oY=C*O6Nk3koG2J2~auJJmug2tDaW9EYE
zsaD&HHh#BTHYPV`zoW3ju*|6}ETM%zD1z}$=ccVFk;wt^B4kvoyPv0V5){Ywt={JP
z-GN*Y;dOfAHBM0I*7cB1^#c)(1ZTwi-&FgrDpytbZTznPU3O}F|a!Qh8E+*1a4085ifi#wY
z6Xhil&mt_a$q9jz@VC3!y6FjlQ?@yLff}4&LKuxU6}xR&j$q35SEwRmY1{rO*UG`f
zep<#G%#DaRT)Bqdt>K4+!TF4<@G%^M_`q9Mrs3{nXAvJn9xv($A;4rVV2l)6LLq%P
zz5-Q>Y)TEWyhBv(*)yew1H{338h*jt4(>W>S8Pu~*a$LX?>2Fx0ID);jpcnw04Xn;
z^(qL+wc4ZA^M
z1{hLSwr@=v0LR|_>DF|0&%Xdt2S;@Wxfzsq1{F%uEyT{b&6j_>CcW&fswtal-*Hw
zehAl9n?KuWEwlA=UH#rq5YUm)vIT@k#Gy#1+GsDggm^Lz=F7Z7L2I&$ZQ}SzNDr~e
zO!lj7k|zkIgbedfZ)H9cO#PL%;kw_*gaQ4xuj;!%AR=JJ2nr>
zI-H$><9Jt|9(+54j<+`jddrntm<8jTC`lh?0>6uFP&Ash_L|GE4DT;5l&y&?8Kt?C
z06qzkW?`;l#<;?U14H{oe-VS}-@Rk|D{qWZ~cOxtLqmNRut2rOB58
z9?#T|i0|TV@-u^Y;y3|b;pM7L*QI1+MA5^NktnQy08Gz%nUso$I!@x?+RY!Ms_2zT&U(zK`0l+AOm7vg)0E~0Jxr-%-
zEsV=sh6kSuB>c13!T+mlsVg&F^u8F9=3gkNR@nY|Ku-4Mhhy8gJ~ThLoF6IaSN@UY9K_?;|PnB2({M(QcVf
z?HR-cQkZhsD6T*6&2-;WSX#cdw`DhlnMv@uIhnKy{58@*?1pZ+xE-+@I%zK0BI{@$
z;CN1tU)|gm{Hu=&ZNZ&zkz^zJUn!uPtoH$|toOO~S??Fw3oBdT9jn$Kok*FTA5p1S
z;E{5Q+FLmLLZSY|$z7s=WYS`KiPg0W0#Wh&Vayd{oxN{-J|ljqMn)n&XUOt*C(Cxl
z8Z+1`bmLtM&peJ(q*icE4$TsC%2+*QLME$^-5CzhzNsN1*H1+K~ro7L*
zA#KY)7TON82_i)DEHVF!)Km*e%`Szj3r@7_E~2i{PQ{bWQL19(3Ui8GDP%5H4-tR5
zrEVM{h3LHt=(Uq4dOeSt$QxFMpkXZBhY0aXTEhF0IB+xjUS|nkCJK#{5GA+cfhHZ_
z?hEJg`hgN6h4XM=;@483D*UD38mZjR@@`h(DEx76%JBofE^JEhwusr~x6Rwk*%jh;
z=KKmfR+F!Cy&nSkUWen2G(+N@jG;Gip`G}S1jr`}U!a0{$u@=?ceO(sV%`pBe^kNU
zd$9sq>Nh~r@c!AZ@di`?j(3SSR#ZID_*im~$mM7dq-THZkBZb>*&vUN`wQh9(UXkA
z``G}F9m?TzB*VKzHiM;5j>P^a3ctrF)Ui0G1J}Ap;L}+0MSKkYSwO5c_Y+*w!?>96
zpZ8W0DJ0<)l{~W0{&G|H1Ol2B`w6Dye1?v&@dk0~TeM`Vh%-!bqN6;^b60#~vES(2
z-0a~=eoJWlvT#%Vw9?|uBV_wlpd$Ip8o
z5A;4B?0xL+eLU3rc)0iRi{8gCdmoSVK7Q5vXxWck#bgZ1!Xc?0eAh?EU
z0Li|t6~{*7G2&$q|KR|cE>QN|*9Js1W3!&znv|8K!S7a;iUnSC3<{P(bt
zPbz`~*J=i?AZS_9G`_=tu~jp628`FWWV$FrWSx+BlEvs+KpiyNwdCHSV4}g@1!1WY
zD+KyCZ56UFg&m$0kB*(pL>8(&ThVk2V^d%2%UK>Xn6t>Egljlz)!ru&wTy1&L+yaEC5VY7U$0(}
zL%w!!t8mna1Fg=;DW2s6HC%YOIm_FVCe9zusTlmTK>23PS{Hpwi+$nq+y}Qq8DHqh
zk6nTC@iUZ|&_d`pPcE6(l9xtDeC$KVF
zdRl1tMhnSrX+hGmEx7;JEfoB(TPXTpw=ftW&`2*F1cF&uC`cX!2hLORWuTel+pf+&
z@aX@&xQQ8tHf20pe2MbCiO(8@{lYg*Ohg9+wP750ALdjIFkV&ldOGXX8
zSAC6Q{wK|ctd8hkf`mbTH=(|(7(;d&&~%VIBu=MFM}N~d0zbUyfcf|YZNzq%3Fbh$
zBQsOQTJl{tcz{uM;y@Vj3h4pomJGv`9$4j!oQ^Kj2nn!Yf>bd~qnH5rU?U_u-5JK&
zt|z|#rOiBAU*?&T_cd6+J;mSPp21n}f#!!QZDey0UBE%#PXB#du_@=9t=L>-xAN^g
z@IP(k+xg&s+RET>rQrW*E5-kmqiCrQM`hq*b5sw6d%4Pn&zJu<#^n4aWA&Yt%~(Bn
zf}w0|+swr{6my9mOflD$8RinM67GV@zLmNDM_mYmJ)`&(CWE*cGQ{2bUBu0y3uqx>
zwqjv6pUrI48}SP2)0yN6ZCDI9BvbAYoRG&MjifpiKbEtp5b+#Xl@rM=&(UMaye)g9
zpKzk1`0v8_-i)nbBN;C3(Ejej9+_Ya&f(}R6PJdi#T}toOI`@Q#CeC}uFey^
zi$LGvPh`D_6Mff$i8ud&wwws=^Xf;9ZRV*Y7(2IEk<)vt
zoSrRojB0{XU*g@DQqjMazbzr{)jn~guPn1y8ZvYoysR`95#W+JK4g9BIGPbXKhVC5
ziR>gQB6S?wLZtWkmX9FN*gm3%ZWkAG5r~_U`uuK+f)nI;*XRplO{Gx3qFm{&lVxet8#?SVr{wG
zI<>^5Zs$(V^89U`&YfCpRS{qru_}G)fmKD(sspS1p2jy2p~hO(j~HOvI<58*t$dsl
zG=|n|Ml0%Qn{9q_YVL4O3BNm*@Vo
zp(M$a;AjF%?Ed{$by&J&AMFi`q>
z$VviJt3z9wqcu6AUFrFn@k%MCYHCrH)2^eHZm=sYnn{J~(sos0!qPldW54R6GQFr$
z<5-B&x4zNfNtwzgeM@h~8oc1)(stxv?(G+9iAhJA|Mb*TPaWL7WpCor9BrwV6N)9B
zhzFSSEua#dd8eh$c72{+(!k7Ax_4tbUFj22Lq*>b^fWDbS6O-O91SG>i&A4hKYK1O;E*GEvvE8a
zmLgew#$g=G)v|E@t>B@q;yh2|Pvt3gXr`ypkXbf%;J?tmO5ruZ-*T;w%UxxTXc5nT
z{3x#d!%2^8^rhiFD*TvNTbRbWak`iQgb;jIdXxNmd_sjag@W?MbJUfX_+|P$(njjB
zITa2$Rb`#oA(FeB)g0knRNxbzDa^5Uq{C=qUVL_(otJzXmzmOfOdF0XSji@?BQ=yF
zvN{COmxhhd$Z8%mAY7=)aJdBVz_-Q@7u{PLG_*i4F&LlDVc#7mXJlqDet7^HP8?ag
zHlmfM9L=BJU}KcYe!`AVn>MdMUA1ZRp#_;bf$*-K7owfx+MIu4qq9w{j(fb4Pv72S
z2h;dOPP8Ltp|&DA+?X`d*nzWEgz9Br73DNR1%c!nZ7ESUfibnCq6HKS$7{>BeGtEU
zROzk7#@(ZIV|r2L#|trACRK>DwseQtGD7IEcC;~JRC-G3-J^|3qm5T8k9ew%=*~$M
zXhO=Fzoby<$9%C)eY@@}9Mx(zkQ~2xbhr?bSWWUFn@^k7Dr)tpZ0h$kELUK-l4F^kR!=8H
z2L#4k$w9%t=GDj#7&rQjI|8xpfh$;WGqtOQn&SCE&QGmo_|?WaL1T$K9G}(|h*!_#
zdtoyWzo#?EVkllcXJrv}n)9V4PwbG#^Y8>P=V^E!S0Lx4K3u`w)9}2LkQj!wR5sc=
zB6H-6v~H@%{dTY<_w||kdzt%QRHDshvDGts#SAALaki7Ep_3+2<3ZFEhs{Do)+RMF
zXJ>4UyK?`+Hv;h+i?q5g-7s>(^(M5FngcOu2Uj~faU4tHVAzC*+J@V>PNZ
z)|nH}IapOBp)Zk=K+_F7wd6ilc++DILOYr@o8JWB&SbR
z3H6}`4ZI9_^AUbegMF$C#RdSfW~gK4rU_&Ui9q8KjNSYIR)0{;R_FU5^*08^J4in*pnDI1-
z;52HA4Si@S-!A1HSDCzhQR&+B4;9EaKso=-WUG9Tg`S0aadv{=f#g*j8q^obsf!VR
z#yU=;k=^0#A!mpA>@a&az&bJryb!WWj@d*_XyeS;#7PB8zX&@`+EFw*X14BW9L8t^
z@!5J?zIe@LB)GsLcd3~;WhTZh`H};C1U<_~5;3NNT12AMKsKz1jLCYXK5|ihplQ?~
zt4hZfi?{&d!u8o%c{yv-w8vw9@o9D%YA||E(3Cie#($^puyKn?NKcj=K@m`rfeBYj
zW3afVflz2Zsk9O_`6+w1j760PEuKV_ZHE=vHyJNTLtQ@+GtLFscORz*H78${r
zT6`iv2%4dIzxW-2@&jPhKjw~6(c@y5oajj`X0Tf2HN_lI$CI&*6ROOOgj9(FoJ+m!eq+V(an?Eu=XFNdMHqale^KF7Ksu*`+^|zvG)7KaK67!1m?+M@@6wp>c?c01df8;i!f5AHUSoG)NHS0&B7n_Grewnu+NnG(-fcZ8EbTpudy
zZ=$a)Uu?hBOnN|iq-yS0O_
zGHAdZ)?)AFYO(eG1MxFb7efD)lSkgp_am-qF*kpv1P*U!9Np^&K)o+f@nsO2qtDEZ
zt83cwA$nV4WvG@&ZU`-PAD39l9Z895I2riKouly7y*_i7Z;MeU1ITOKLT~9Cjg7J0
z`P#O9rJrO3;0ket{4+J7#IQ(K&-3dKZbr6NwkySOF|?jKpLxb7>sgToy*M~O($jDw
zEw<&0C8V@oz_?9V&(pw>1CC(j$3aik$HB@Jx0Fh*pt$fMA$7PVG~+F^PVBXus{w^g
z0Kw8DL0JcF3!HhIE-(TvwxWeN-e8JKBy?f-|92A+I$0@xN?ep9=<8D!WLdr8hI&S<
z5~s>)rj)^JTfRigLkb0+hMYor%Fy!-oNCh-QyB>lCA8%aq=v})*cUD$JQ@1BNpq0)3)_E{T63s>R%TpjClM#fw1kzxD|
zVHa;Kq}G6uh&ad)c>2uv5Y|D)Ci4Z1{a-WZ;m-{AV
zQF$6RWbRv0Cc+M@Xd-ng0$f4;zeE81b1pbWpuQf^xMFMeYaD$kPrHW+?B5+$pOp1t
ztkc7J@*+c8^5T;t@fml^9iyjw#D)Mm7+k{c>c{O
zHub9(jHz0@XikPNhky5opaY|7vG65*I=Sch=Z@ff-3P0h1OQcwV{4A2pSFt>_>s21
znw-tJckyux|Dh))&(jkR$Wk(#c({%_@j;xjO5y>&S!P-FbE%;MD%1wKM15+<
znLmTGh1`Q8iba<0X7IhFmeLrwh;5l+TmG>$k&^EY6_~@wpnnQxWAZ#DYPWUDkq7KC
zjA(~cW*T`-MteN?CKi`xc}L~OJN_o;B^AxBT)Swh?dGZcWYJlw`492k^=%$Q7pphm
z>;$uKiD6OMwU7@Ds^9WFAADQ!oiTaw+;~G4G$}a*V(yq}Z%1oA%cs3XoKtQF8$bD4
z%SRd08qEqejJ2ivX;l>IS4P~o$#bGmCX3-tp#NN%PT_J=tr>oZFXIgn17VZmumiGp0C>P}}>q_X{vOR-&!BMoZmAv0@j^h2fI>{3F(qf>IMV^%ss
zAdjAP6oI6bE#O*uRBG&!le0BcbBXZ>Jq_z9IX5uox8iQ(hl3KXbB?N$`Xl)Pqbkxv
z$j>UmjXzKacx_Uyg31*gXQ*7csnyS1v^iu2E#&PpS7G7w(Da#f^ao;*zeOAOA@kE@
z7KZAHA>0W+&8NpXjHWF|>{ZoACa^^D7l&ym-dw?C9A=H(W56(NM&erxS23NY<4Wmg
z1hl8&l#JlZ0!KKjEuZ}j{0LURg+J6dZvdiQIIlWBy(k!GK^i|tOhKX!Fzm$g!vr+X
z;RAu#B?D!#YA;&Vf;F?Qm8KQU`z~OCe<1PwE>B~XoZ9)Y@<*jAdCqcaApToP3>&j&
z_F-7f$TQMo6*JC3)AXf2D%#Abm>kyJ)+sFcO7;PT
zc8J^9j9kF@$H*(vQ03eBrJ8ol>IjZluWd^a{W5{!h_sL+h;Sn;C*&lVzFG}0$>Ku|
zkF4sqjEyXi)xapEIXgsVC>hbI867O2365d#3_3VswTxQZW(J)2AgX4LH}dLwx(G_3
z!J0dMBTXv#5?7Fu$f{lTv~pB}Eqg?aVLk%USL-cf_RvYr2>
z`^CMjjabc)a;o&4p9@lrD%Li~(|G)WPI;X?A2iSTx^V7;>&Vx-qsD)S>p4oU_u~5h
z4IIA(kN#i4@tp6*v5iti11P9I2nJ^&v~u`RtB>Fqj#Tc;%^c>JKLl7SVhDp*Fe
zn@Bw-JDrS2=U+r>*hXqwKAV{KN+Kwpf)>ThVfHk%(j=a!`?n!S*wC0<=DGiI9+A7^
zN#+cCUYN#-eEpQV7)0|tk(kZY3k)ZgDvtc8m|4{nVx@7GJby$#au;up+)b6|X`K8a
zv2B>QR_aooWO3PxdnsVlV-;YWUgITMEla;k<(w9RPNj|@L#Y!#p4
znAt^~Ts|7ZJnb@H*(pHIEFY8yN-T1co63y70Wtpl(oSOc6UX_@z8
zXS=kkd4lUJCl@OYEGG^$bs3XBDWtaj2(Z
zBh3KW7l>3$?F14EM?+4Mj-1PmLum*_1#;N~6lZbkm^%cuSH7!IyZ1GP+Rn^+
zA$G}e5qIp8V~Ajm_Tz+mAa7LV8Lu7O*<44*Cw0Vj`%!(Kz-BFx=XP#(&IoAn>8waQ
zqIq=HFLqB&UbLW@MUHIN1^D4VHsMlF61K=5ug4qQArO}hudkT{8mwiMx2nf
zDp+2XPR>SgfrC3cpR4)V<*Z>
z>_-)71&>OfBN~>YjgBccBqm%hwkc$hWj%)b24`Pw`Ja6$1vn0J1?V`
z?N4pn`?~xF!z;p}a5iQi6!~S(Qx`nR4~Y5fU!`Q<69j=NTZ|O7)rooq
ziVnr38BFp045iM_mMV!=c%m+&Mo%9Tt0<24H2ewm1tJG8`QY?9y8hCcyG
zvR{u+dzP%{F&cjxOf1Qn`{|#AWsxQL&Nh3Vm!16Fxq+O;!M0H8j_604aROg}Yibvi
zXcd|YzR&utELuRspB^)FgOxiLeWX>sffjlrNW{(QA!A@$o;qXD$Sa8xap24&$$kjN
z#~@hRa7D-mx0O3?$CwIs*zEpK!1CcT*xgmT@yKFV43K6#SIpF^}`z2bltO`Sh
z(^wZYJ{CHR2OpLQpkgf%#9{K49J{T4iWSi@zr-C6J`k+@AlhH6KQfi6F7jxxFZt@5
z*Bbg=$P%$FbrVm@^fu^A)=@XU=BC#O6
zH1D#eyb`i6P4`=qS29PwE57Gp)=An6Ld{y~MnfQ1k_$?;ZSQHUt@dih=YjYwf6|=W
zwbEC$($BPb-p`|-g^jCU4zO|S*7(uK#PpnP@1}FN>}}i;JzRc(<;;M@cxyykVr*_Y
z*Z7Jk5`3M>mGa}|R&s96)LSR3Z{R4HB(I;Ml`P5DGtl74Rv)ELzV`p9V|+p~tHqR?!3#
z5~EKCuiM_$SZ57|0-{hEbX8!j;WKeQ3c#n5vgc#0&#aaL&Tc?aU<4lz$d?@Q=|-Ve
zZwW#?vE6`6zF3zO!HFlf$;PlmvZ&!sY$|yIj0--C#y8D+g2+8iaRu$R5Su$b9(XcW
zVNT{MGfj
z%g1kBOqP$|L_GCwU&|S53D*W(hONyvkR$OT{K_Zir^yL1oczEqdFX~ZYf1AwyV+Qq
zx*UHhDr0S`ncXYvv>$BG;W|rQU)mwh&#CM0)%6i|ynwG>OkMrzdcL|2
zRo7$G)uFEYUz0Z9Q`gtjwOw5gs~!XDI#FG9b-h$w!|HmOy4I-c73w-wU8k$-Om+Rf
zy4vHvMm^6_*Xz{vdUc(vuJhE@o^NEQjB2jB&Q#YEUzd~ub^S`EbgApR>iVj>u2I)!
zb$v=*f2FPuscW6OE>zd+)pfeM>gsxtx{gxUQ`EIcU31m-GX<-EsB7`7GLix6`X#GO
z>$H#5wNqWUs_SZXJ)o}3)b$E=ovN<4s_O!EJ-<)-MD<*uu2-nO0;>0O)U}LjI{%_o
zgp*pyr8)RnKD}sFl;1~4wqlNAh<@YW+_|1%4G^8sXEMZZMK7GaHugBVx(UfMvNC>6%$MsigjK>q<+8X82crk7iVM;r~NNmrpE`NS=oHfbn`-2k}P@E_M|jcFLQo#NCvaF1d6^H+R()lsHmT5rln#&^v#ua~&0WABVCK
zA`Bp|J&??geD6RqCo+&DT^YFg<@f{1+=yqcK?2%+F8c>)&jZQ4NTHpSr&96+9Lba`
zdb$1Jl!Xq*!m3OK626M+%m*w{cgSc}@SqRXfk5a}Nd!RhFN+@@v+(i(ukq4g*qA!N
z%?Nx1LE&3u!bV^~m!x(pxM^J^c1K^Ovtg+pGWEXgEz>y-5dpIBZ_PC5@D%3_@Dv9I
z`1Sf^9q$ZdpI)Dm63u?z!$yJKU)a;ypTEo91SAe1EO_dGVqQZ@$Ac8crI8q4G
zW2xLeNKyzQEw2YjSp?PeB8a_ZfvEyqPP7L>`8I+GqwneBU&K#f0B6rn9neL`f*^jP
z8af3*0B*3|jYbI7py+mmo<%l#dhxCBgFEC78$aMe(9(k@8$ZcD_z7qF6Zm!ud;xfP
z0HB{bK#&kJC9PA^c1qf2#-%VM=(`+5hV7Br2y&A+=)3T<68zZyf}nxsW>M9l#rAY-
z2OMbSEfK!(^1G{RYw`~63}Xt{wueV-3Dve3Ovg-CqIOz
z|AOG%a5*tF*rfOmQYlzb#q?K8l%oXKY%$}2shMZeKQ;z3m7b&n4T-?%2_0qKmR^>;|TAJIGH;DR!S=htz
zk$h64d(+3B-$J@j9P%>{0lEsB%th3=7oOfbQ~?u1w?}*QNIM8$#>r?f=>}=a)6gie
zvOBr6;k~AkL6O@tuQ%Cqpj4zblQi8fF*WmA)mwt8tAUQTOUSv|GVasV9Hj|9eo!gL
z*#!{x^6F_E3&MJ4^)+nu#g|@Ln0hAH5gnu_lSOk6Ci$6&+)w$0q@yS`=rZVlMdB!N
zPnE$!CG)BcJ~8BY;A-_vVzqp9z_DuTL^Pkdp@#CJ?)Xj48M=yv^v7-}DsQmDt@Z#Y?!F@31{7VZ-puC60TJF-v=E
zr8nr!)@}Zu$i}ulCjZ_&=JKm}N>rDb(`L)DU!KOh)T*bt!i;1RuCNn)=1(#SrIKKH
zI7_?kGpA$89KAN^Bi^q|FKx#d?~)+hE?Skwt_GfCf@PkD{|7FRjK}SahDYSRHdvwR
z0g5w&UFP32l}1vjZGw*yj?{crPX+$|DwEycZZpC`&+E(?Dv_|Ypf6L!Nu$3==cz%}
z4h%KNo9us#F2q-u2U9v6o;>W5W89MFCAxDn#)N+J*0ETwaVI}}+&|Vb78V*CciQE{
ziGo3)h??Q2-uh2o0VdnN3W}@H<`C28p8zXy5e2SZxw7}8)lakQC9ZWg5(5}I*+%V_no44lk128!g3gVNB
zxeaAW|0FKryAD!ClC?zGk^e3Aj0pv*D%BcW?tLZJGOTSvkvPNRQm5_~&_-+4q`~Pw
z+sBaDN7<-ovt89nQeI!;V^>!WxKXLEd~52nlKPF9bo__rE!X0@I*>U@1&ak5X*b2I
z)!3qYf^TPjdJG2a8wcL&GnO+rIG=l+{G^o}w1HC*r0JF3LBzO5jyLMsNosU$`x{clZFbFMGmpjV{5=jRU2Jq5qv18S~;uZ7xVkb(G$#7
zvxrZ0h&H_~OOLGi(`
z;~m?b7wNBgURobZm8FMj^_>zO8#zbg5Srgg^4JsZaAI`Dk{w#*hDFE6HoCRSLkmxh
z=2fn}eQ0cB9x#rcs9Wp&2iCZwC-4^X1m7vQ(z94#A8{VafFYs106y-eP+s&Dt~t@M
z#+q1XZp2+Ls-fMwj4`g?tmnr910_D_D@vqAVZY~zw8$kc^(I6SbXx)O?vr1w^5I`g+O>ohVfdsyc
zCWlCUwvOXKwsKBOTMl|II>cAv~+j?pZ=VZ2M)2$1JYnN$_MYzeE};{D~^G%MN-7dx9LItQzBN*v<6N!>jf;I$|Ki4t3tKSbZ`=
z<4C|mv{K#E)sUX9{(Nt?t4VfOBPqne=`x+DG;MDXj2h#r+Ja;{Mzm~#+OS+<`cLND
zIR4w@WCMq}t>=+5V$;W(H&V)csVA}LBal79VG7!I;X0i_i?UFd_ST+
z>V}U}et8E>$C@{*CY4y3sl$!zhir|`4kQ+i(~Q^5^Oi|DImN|j|GUNH^wLdw<+^*?
z<(y?cKkff8NtLaOnq~85>OR1;RIHp-WEL#=Iii-Is60c_?REzKIicG1@?D0*yhi3DAJUPvQWd8w;t)A$bEJygvb>8_
zMfq*3YQMQ0j!NGz{amcxFK(nh8C1p7KU$M#zDf>oeqbU`%XHqiwbSfj`~~RvIaPf8
z|6%P-;G?R}hyNs#ApsI5D3Kr_Q39e-jE&U9fs%oV+<}Q=5siw1Mky3+5oTB_NYY6(
z!*!He+uGJve{Fw#ZEH8%Y5>J7K*AzK+z?z)tKM;3aDe~<^M0Rm?o1W}_3i)9=R@Y+
zbI*RB^PJ~AYfW-$=mS&p4W`?gPAgHNAvFNbVc!%HaZ&~6O76q8LIGRnJ(!ZRhMmQ-
z-NJ;LyLGbo&FwrePuOdmgP&on)FRTjpENR=xmMfEPQv`3uzpTD$sEN89jFWCNl2re);~T%OXT=*FxV)a^Z3=06Bckv^cahb
zV7C5Ee%<44i>ATy*u}HoeBW<=;%hqQ^hYM#LNe<0BPtgByKeT))F2Iu~md6;HqeR_Fq;Oxv`j`fqyhQ3c0qT8Px&mGpBn&Miw-{m0&SnWe}
z0{(j-KZNhuGnhF1<4tQ)==EU=M@uLs{P~QvDf!HybFHu!=7z8A6TUROGzW-UnmIQ-
zJ3TxbUe;^?a&|ud^P~ipkb|N}*)P%|S>G|*wYnTa36fNIC`5Z5yN7NW=_khLm4l+-
z`xGjqT2;R)kMaB*ox6hEB8U~n9eUS&ytVkB4rF8(P%7rkeu^LBX!I$aRS*bf@pl>C
zyy;d9ElA=hh{1e-_-}S4*O6P&rz_uj#4T>rm3v4>7FI9|A1OApcM|+G~
zA#EUU78~M|cD5bQR?*RH$>}*p*nnA91{2N5pP@Ve{g3UAZP
zt8{ZwnR>~iF-zcef8^GJIaUNnUvqFuUYV=z9x}+r{mbtK_~U`|Lx+yz#;mFxmXpCP
z6?OHU@8Prgoe|t=D}9bZoP*iJ{|4nVkcUA$Ko*hdk7PPHsbp1fl+<#j)RGJ1OYXv4
zNYS1*7Hpw-R^vfWNp;|K>;37;zP?yS(>vmjrC1#Av;se85_czJw_T0%b`t;eOA_~k
z_du=Z{z9R^oz_i)3O0j}#|So-3zHuZWtQ+Y0VSp!BN|rO|6zpWk^59{-@RSFD}NZ%
za}sl{D={!sYP5zYvh`L+LE@v(cIdLRX1&#Mj7e5qLBQ4bSNNII=ZERsjvwdsgIEf%
ze@25CTE?7^1O67KP~Bd_&OeaF8V>{-VQ1a@Lh>~R3nJ6Gt8EJpzfV4jEQ62p?2j1@
zVsR2%QoIGB^DiHvM0gO_i)JXKCPH&=!{MFjbdx#uKI0?LkI7RWFxsaz};49ge6v*~XsG;Xx};4>jdjKj>Lv
zYO+LZJtbJnNbb$5J6jjwTG9F5ucbR}OV4wyyOVYbw|40hjG*ovihflutKC#Y2YyF&
z(rmVC-J(3q&2pk|%Et%|#z)<&+oHK1|2p}(%p(9bs6hPHW>#P}39ifY7q(ZpCcSu@
z-yAlIlPaac{o>*?=6Y)0N!ec!`DF^&#j~04SMC>s@WLqzNr1d$V@|hyB9_yda;}8V
zYh!8l6z}rMRy)j@&N)`mZoMf%z1g0*RuSwI8^a8T<^-X|W(nWIDg42=8-RjU1XV4g
zR2$2N21RT6zDab=HD$NJSa{{`@Qv;Kv{`$8Cc9jU4UJc{F9R0K+cpEP{o&xXaaB8w
z9i?cdN_MumKxgmjZ4HmlOB4nek@BE&^`FiOa5E_vm6+9^)~-GXuo#hKi^^}+5G1j_G=gZGgNF>{`1B5YbXDC
zgq>pZpL6u9=0B_PIJO!rD_#m0m5V%}AN=_Gfv~7!pTX|SUo@nv05W`49$=d<9~OTA
zOI@Ba7xlGeno^=Mlcdx|8A_@dQvy5I#}wY%>Qv$QHwHTp8Ts5>0_i0KgI|^m^kYH~
zHj#Bw_^~q62+!kOTc`qxOgBPvdI3G7-DgIvIVgf^=zDT%68av!OheziPU!P`#o{S~Qw0t!jIKGj|dBP8gWX}UxGc;d#vSrB?D6!Wl*R>3CL}3(1
zCgm`@^BQQZ(AHT34bR*w!a`#b%56CgV10$oi@n&a7wZtuU<+st+CPcqYUlj
zoAXj)`>jt9xU7*&8
z4ls5F&J-ntTwDeOFS2g$Z2dY~r)ShUT|gTE2>{>J-${eYc()6)s4ed6_|orgT)F|L
z?`THAne^ldqt!4!4fH8D=NG^%eO9ha05bk407CQfyipQKH!$A3B9nT?x$5r~*mR?V
z_lj%M^UP7D`5LYR40C|B{4K^R;Zjr2I`8sKqcG-od=ce7jbYszhC8hyfrY)#ioQ#!_ri1SEOYUXx5dfwVMTAVz&Dz9h4kl#Va|o
zq#;y|=G2tF@dhkRr?mtPeoA!p1E+B^zTkBK0SnN8vAg(V>rS-Eg@h<7j#rq!tz=n!
zrvGS(b=6(+ltmnX-BvlzINUNEgiEPEg
znf|3X$}@47whe519-Ojm)`VrUytXGG-aq>RB^@BlR#n9NWr>t2q?nW$lWJwD0zpFx
zZ_NX|
zT}f8hhz2G{l<h(kIY@@c~-d2DuM+ouv!&SE3ir)6D#n1Vg=0m
zOIR`M7M1UQ?ICfuHABCa$zlrB&&o@)Ch9apE$}Qysx?+8Oiv~ZmxRa@jwY@|hWWhp
zUQm9ahT-)*L7JzbQGEg7bKO=i)^&RXe!lP*9`r}PS28WUVk}B%(lu07Dvl8JLwG$@nNH3q&#b>1_1l#d?%4sco>+`k(G)
zWaTKM7BJb-wRRcNB$0OR=w3!vmNIIAlV$2unc(G;e?lU^U;1Ur!joUEKUY^4s!}(g
zqKhOLgvDDVZgRerbrHLvXJW_LUJ~EwE%Eu@5*vC;{B3WE_e~|it}{DI`s`!H#hD}i)7`bZSkJ}ny0#@sJwoN6sIb456aqo
zs3>)5tIs^GA}nfWtJ6HI`(n#e&kcwx6<3rO7RAuCk$bg}%Q=`T(UiB8a2A1^D$Ja(
z+31_)fx+`AUVV%Hf}%|3VP;w$j-S#>0GeJWJ`
zrFw)||MfOVpP7;zCp!8WI2w)Pd1+5@i;t>OQ+(&8#vB!A50vu_z7V77<-VoQCW75x
z1GQh|b#34}a#&sXqc)d4bmThM17hmus=ZB+QRs-z^}rNhSX(-JLLa8%c*7}QKJ1Gd
zKmCMXMqz#i0b6W#vV_xDUPJyfLLCJyA8x(N;5}xtp5#2$}xc@1w%yHd!i>hH+
zPJh<}*YfDAGIHd&US^mNwaF1Dn*9r7L%plh%Ed-}tmH9MeU4YW^(hsp+=lxar9*=I
zqPcufb>_M58^Cj9S|3lTk>`3KRo9wl*LrnZNslV`rk|wBX8mDv$|xzjP_>pBeS*})
zfc}?f<(%XZ9)v}8en<3$BeptBB>mfH$QQOX$Wi0+wng7y8kwRBSM5PkR8$ZguHx-Y
zPhDDI7GKi>mokB_y6dS?cUdlRssoDcp`>MUZrn%)!S}=Xjb6YX^bYfi_mWgUfH!xO
z>w!KhOJD?s_pvh*lk*ue8$B~0$dhU0u@J*t_q`@>HOq#Hx{d3BZ93Pm75FVI8>WND3WxM_XjnK{;Q)@Oyrk=iz
z(WNYCyp;|t*m5gQ0irc(B9GkRdf-U~v1{~%U+=2jqMtHcwdczd6I?66C7_D=`qscj
zdNl-K-xjdPshae16VEk=^^Ax!hj`OE`jiqIN`@?tdxz`3-;x?SJi_(BFL+REW4nMw
ztF&Mt6ue?p{T!Rq=wfpCxoIu)n}k9V_h+{k;9~LCg8b4)3aY6V14*tQbz~D-;5vSN
zrNasccgd4sX6XFp&dn+L{?to-@hmuk6ov;7{Q!Yu@vc})Vp9?K2S`(!N(_&>Y^tl<
zAUeD4sMnd0UUwAo(d3R2bD!jnlCbeqnb=WC1-jR``eZfU$p>aGN2d0>Jckam>u=^E
z-V|+w`~&_
zW!f~VV8)WD;zOy{GzLjG-sM@rS4XrHycX=bwQy^6th#i8iU=>=eGewq3%jaUqKi4p
zig!hCmmk4tK>qzynw+f~Uhp0%S>Kjrl02C!zQ^)p4qo*g`9YvfE2}O*t4mWovOXM+
zg7E?zo+n_tf5*=Eo7IZcLEntL<*PI0AVqV{RpHp|t?#Rb+s`OHGl)@^=u<4dGr6n
zN6);G%6M$3FEqX&Wyz4$eSBOSylNjFYW62D`jNf1-gs2ZAu;{r
ze-NejJ2LfjuXf0}DpqP03foNOiEt$zLQz0nn_u`GpL@^{ckC+Z6Zis>p^mI4=PN#0ilMAC0C134UG`}5t^gtJoq6c~fPvg%G%y4Z$`}P&
zTcq3FhIu`opY%k<=;q0rqd((EhIwl=qMq7AAI^i=;W_l7(^Wf2J@j$a9$}W*LZc+L
zZm^^tPQw!XaJs9mFKQo6bt%*j|
zv_2mF6`7gCKKysa@@1}O#BNO{xFn$>HiLwz{P!^$v?Z-)pXoZF;c86>%bEtNPFPQs
z&~-gyMN+9x!p(9yD5@}bVm;h>Xj_jVyks*2EN2MN80MSO)j+mvvFP9AIB1MHBw&eI
zF~%r<-Wt>{oIB8GH9uXhWlv9^$B%tfaYI#zV2%6I$C$7J`yRRisS
z!rQg;gw_3PQG0rod9FS@DsY{6-F0o)Vl7`UyA?b2(8~7IC8ND1cd;YTbznzO88S?^
z1ViMZ$v~P1zAr*!o`ls71uJA~H{z19P1=f0L(@~E3qE6*T2Z^Sc12G;{`5jZ})(RAY1^NZ4PxvVDnD=?RtBuSC%(SSm?N
zi>7iP-9;)zFM_Ps|at1}LarvAwcCK=(
zn?mYBF1YsPwnxNf5v0h8K1WjxN_Wm0EX0HsR;uq^bmOeAAPc&FQ)P7qMLq8A@|`0O
zNC^nvUt))dpYI|=$iQ-wp($;$uF{4T@DptQ@_cA&AaaH9pKXnheZ`F>X^Iscc-cDk
zoPY$Thww_7sbID>Q5DXUx!YX*t(4$7)`iLHK)G_Q5y~&$I==~}j5Sm~!jJzez}bW;
zwZr;?-Dh*H|F%zHU&%@gp*N-ry-}&?4Ml5s8p^0GI-Iq%W%URQ320)AZ*r}Fz-~xb
zt2~ODE1EV1o`!1G1*B+OtaoIB(UN72C17!eRYM7r!&PajtN0t8${R`R7=(|vwbMxi
zbJ7&>CXzWTx{1n^NX|_y_5_ZhStKE`X-Xt#!)LpQw#`?G_(|7SwU%3xq)Xu&_wm2o
z`V3xQykfVxTV&i7qHL>V-0cVq$D#wEWZcumn$MAjW#PdLt;4Ruam0I8
z|7|CFj5TtSX6TEIbPuXGUV26n9R$SC11Pv9W6sqa1MR=5P3~
znz!dUxt@Pe5t&b1PZmMOcyL1RV@&&6V;qSJm0C*~<9F^!j4`cqjOU-a`KA9=^F7D7
zX}eTDL!@cJ;(U=*51ch
zt;aalZlveFl+ULeW^!L%E9{)azWgPZU9~TzeCNKLb?Q$1!QDG@_>Z0Fu`ds7>3yu3
zbcXeuWH-}uUuK?ipox9?{hxPP&wqpx>zUcPo=Z;Me9L9Muji?o@421>Ptr(^9^*|X
zX(Z=sjnRC#%NTe5FfqoQ&M`jN*83FO&A(OJ`xsB%e9tjnc#=jk^jY!)yOEwSE1yp}
z%p}bI_GevS_JbN5W~F>5%s$@Q`$+9h9Dt|R+ky5UJJADXJtt}AT0PeLPtuHh{(575
z`lnsS`c<_(Rw>^()@L^NK32OEegE4|^cd^RlQc6}kM%b&-g=u-`TX_9y6q=j#_CFp
zRmyjcwOt6-UI-Pt6T|=8PV^Y-{F5}3ulMOw-|T&(vsQQFO4`d>9qoGy&d8G!3SA~n6eN>pZg?%g|2(y&bjZa{UXw>6f}#al}J1tnTW
znji#W(qUox)$E{iEjfrkmLLmtyZPp8*4_icRpuI|^C`w?Put+KBwh1DOYak7Z|k#8
z(m(n9-}Mjgw^l7e4c*0R8A{JCLgnj!=!P2Zt$}>eej4g8BywOHCaqbvp9D|6cVN7J
zYCLEufiFc2q%ik9$-Ibun^tY(PLeNT+A_KN8h-vvzanx;YK4h1nZ@l51*=YdUmtpn
z=0U%^W9cyMFGtvzJ3R51#hClbHx0)D!`-nYsM^y7V`GeS4*laL~&{bx!WF+Vh_pLYZ)HuE7~!4kTBF`8HkDp{Rmq
zbMp=ivp(P^uJ4q%#F9*6QFddr%|&+R=q*(B0(%U&-L4-hG9>&+K@qtO0v?vA-)9QS
z`1OaMD9DuG$amxZ@G}J|^6dKS$9_i?4pHZPh|VeTHg@<5Hya%XjL1R^0gh)b9gUR*
zb0E=qR&#gu5!bxl!4AB_3>?kB!hGT=9MzOqv+=?whU>2fd=AXxU-86W!Ue!LwPLN`
zeK1g10ZXiKhq$Xr`foYe`bvZ#SqG`2Shrl^J`fl~6%~cgf35Or9lMnGNqPU*F8`*_
zJT`ZY;r?RDP{V;@;hT#&ZPByYraKkm%hEH}@X6z0Qk<)@D0=jLon2%cfl$!9MspbcqI9OEU6lC
z8tv(-1619yXsCuq1$o_@4#a%YDh@8U$*LlL9SyuM&bcDY+D#2Vqt;H;dE9TfLg%o$
zC=W%ygpEYP;+(1BS*&nauQ0hN>Ixm@tKa-Ix!+Zo*mv0g!3^t2GtaYt@O?`sV-E7g{b?zh0a&-!aCzZR
zV)}^?B%WJ?DABed{(h;`!9f_9E)oRa<%%a+XoSAnx8zw#xs(*^{$aAQgfFG#TSZzq
zlHEDG#k!M^@VDZ*SBI6^-w@uzju`qli4Si5LA-K_yM)cL_26T%xKjR8(Pr5#s+=H>
zx9x!q6_J*eM}C_{l}Np=pEt6f-xQ9U*7ywUqQsjrQ3%ntS!vvP*BqQI
zrdX>5-;p%(PMA!+n__vfx9V<+_23U|Q>-u#sK@yu&Jkq#`leWets90WJ8$4}1N}PT
zD~dSk>i7=u?7YB~__WMrD8@QmPobaF2#+nsf^M^pfuhYecusRdhvUHk74G+f!|dX*
zJWo9Wk~seJ;oMdq?iWal{oR9`LN&zSsatR>NV7<}OI5~OiNhlK+ldbZCk8S5fgOlf
z4cj@97&44lojPgtpuRAk@%47CIOVwl{`*v;qY2?)O7QKekwJau*1QvU&8<0d$h8z3
z|5Us%^)=kh!A)WZ<-@eWw=UrZX$ktV`{RDcE?;r8udXG~gcSh3^b{yxjHo0?iC&2+Y%t
z0|MD?@!X%@qZ8AC_jWl*y7>Np4tN?|Av@t-%2MZAx%EXi%tZIfetz(3dDd-NE6)=s
zY7^DRrzUBDI+MAO<@&}m`ghV={d?9w)URqW`Wb7+Hwdi9x_ll&P*g(jhSJ`F9sRGn87`g?_+F!Je)8e?5(j}u^OLES`=_E&Ru=Pq_
z5(rxuV1w}9F)=iq9uYG$V{hOsg;jb_ers4PY@C+V#Az;;VJO$})VLtZQV#c79^A
zQKZM+CM-0e^)ECBjS`&~8^ig3WAPTkxTM8%e@}V>Yjv4hA$W4U@Z|Hl>cW=6
z?Kk&$&BF>FtiQ~F{~nrf4)=FtGpS&oR_6eDVIIBpqZ~Yffa?c&soAnpoA5!m6@$Pqj5p??9R^S=VDFLZ59Fy`}
zlx!1;Ic?|1e!-Hgmn{>?euy9Kvm=t*pp*U}NtbYftr3sBE}zzZ^BA{c
znH2ruT0VHt*qni^j^B9hIrIf}?pc`lK;?-}cv(_V*MeGRBYGZlIZe0AGs~75t>jI)
zDTqu;jZRaawktvy&jn_)a?qN@1J+ycFxiRA#*0w0QMu@lO03g$IlYHN6BbJ+&DVlX
z>o2;%6bh)^db93>?rx~K9`o-~XHI@R9*|(>Mpbqh&)6v*8zIz1TTSWoa**J^8~Ouz
zb4xvW!Pkl#Lu+dp{eUHXOL05MjprwVsAQsX{b9QUfa#1cqoPo1if6qv0$~4%8u5la
z{I~ofT-i$)-74oU)l6$miE8Gz?IH=i=FjR$tpYnh*sHvtb(04@lM;f_En*E}iK0zg`?
z@F#Yrc_{RZnh6{;@#SA6Znw4xz>09}k%9&)gQ|LrA2BCVIMTYPnM813^xL#A>*Yto
zv*x3E$s5W3n45Au_vsqhaL(NfPBuy<046qUb3{N_X-&RWrooHYEwrLwdJ69R#8!;*
zRN&<%!zYBTBKYG8yHDrCR4UK;qQ*Ae4`-oLTwBdSaP?UgXzm=ttX0w1nb2FM&4~Sq
zc_2xjNpaITRrSWX9wiRRzed^%JD=fWtMfq;`=+NfwmR?QNj4<~-kztEb93MIV?MqydUjRI%7y@ijmXT?St<1n1{^IG3`1W
z$ELZQMiAC6q$jvsDBk61c!;{K-~R!68eM4Arpj{=EMmetOV9N5cy3=hfx=WK!nysp<_%*|@`gNq7fXEc>C8YA=5OLSXc#Qlc@&~I{uuO!Vf3%bui;<&Em}9274+a~$C}PAi07WQf>o6?p?BCE6bY#!
zvS&Kw@A;P4O6op!kH^8~0D>g@ZkqEX47K99=Te9EKBKN<&&zUfwFC71;`~Bl9@w{j
z?*H29f6;7IIFeS$Q>8iG2Np43EGSu8<&|zJpXjO>EUP~D{C=(a-lNbFz301+>B2-!
z2^!NXgn1CoQwN0Ljb!@Jlce|Wo{pT=zUg?bA(}wwqv^y9C^vEQPe|E?nf!+cS%~Am
zD>AIIX@YHc4G^u8eMaOywXdz_Y{96my8Z$pBt9&gpu$dk$DtD+lf-#;;#v}|{@7TW
zyS?FYPl78v7)yG?6I$48{5&ChE|Tqxai+H^(?GN#@cyg5hj$#;1i4cZIC$mK3%2zk!9u5kUu=`O-YDk3qLcSdmcc`?aoyz
zH^f%wIudzEF|#vyTCin&SWl*z>*t*#cicDm4Gb`w7c`D{wAo)JeUYS(G@Z-%1wx0k
zYS%qdS)pXcm5=NLKlSWR-ck%G_j?=%0~S
z)tPBO7Hv>-^N5D8>$zHo^Ee_EV2w|_$(XEfsyFQd>4#HTql2!xhbioh-0hITn5~g(
zz_D2c)|cxwl#b!6&k?VlcwsQZ8~Gk~>m0v3tr|YV$St40HwW-0lBR0=A4O$Pm9EYh
zUr2l_n!BuJMc=Z^HoNZM%S-jd%YwJsr6*IGx%bjZIqJL);95V%GAXc%n`-OojLTY9
zelDfP+NH8poBy$hSiJW4bnH!3>DrKC`)gwTG47I4t}CNiEHeu-t^U-?hv7ZJ^iR-B
z+Y=LPchou#4bwTd`_x~M?LYD4PqW3EcIM-vslg~
zj0Di)cJB!mdm{CsEX+4FyfbVAg894FP#;|BLD09`2EiDsBAmlRe+^)P5Ic>ur#sWe
zlSWX$h$)J6Ch;GdJIK4$({wn4C>3}RYrN_>a@c#hu6^_I*bD7
zE|$C7z_&It;413x!no?_g6&Dy^%lfh>2P{xDT-R`=-h6+XXK6aPcGD^*JM!YT5
zb~N6oCK;;Wzn$anoGx>WVb(+UCg*rYXPV5h*YQw6H65@iOF?;>X4Frz@8>sK^or!i
zcp=6~YH+kSoO>^(68OcDzr&aESB_}NRk3Lo9uw|?T7mn)0m?O-?h>=66y2>1@Q1!x9R|VOMS%I8pu+fyu%Q;Y1`Iyi_SuIA>
z@Le5S$Xq%|$6<1hGb?f2n&lq%V}4z={{v)|4qvcbo=@X>Zt3u;^73@{mFmZ--*PAN
zhN4pTMf`%Ar$a%Cc3hdqd_IUt*weYgbcW$M`q$Y{e#ucH7jmU@?-i6ze5Y6e`JEpv
zQx)dE!>?$pbFfHucbed!0_r_VL@bN5xwh!#Cuk
zs?(e@C^%9&I8x^yK3wvvK4GvY9m|gj=`C5(_#}Pl(v8^N4a_kHMejxC_4RV{hmJ2;
z>Ea=cC~%(0{66K8n^UP|I{yV7mB|%JO7L977#J^8#LE~Ny)~8l0U^HzkQNqmKa}DD$o$vBP5HOFsedRgmW8c>oGS5S3#s^Y(aBsbPBrhw^lK6C=
zL0Isz;tTiIRiE;3cc$cjjZ^}T$<~-c)cf&{3=O^Rt-&0(b2+uUe$|3A#($8{et)DT
z_DLtfB16o#9{EzJRZVAyCX_B^yutI7EtGYgM=_N~skHvq9t4tcXC80*^OVD-I%G^qT{SwowLsjR*7jodsj~aN0TA
zqdb`V-EFJh^{kh(#AsN%@U(3QjEn0N4z`;P+Eim@Ur%8(=or%$Fc&68G^DKJl6ap}BA9QVczo{d$P3;qb<O~ee2h$9gX)R#Uyj3lZ-e>p4|3PV{Vs2#IeC~bVx-41JPy2&G^LlOt~lT+++|d4
z#c_?NFufiR;nZt%j15)g(}H-dqk83A9UhoETq)MwO}WwFT=B(s-?{DuoZ8{AoKE3*
zoPHtI=+>`HI<=qhoYJYJ=XY|S;GsP{BOM>@9QehJ-Y`n>;0rl@xP%7asDy-3Zo_k@
zZ0xXg7&tt^=Xg5X^;AB`Wv^+T`EP(dTk7-xozy(@ex~y`*#CDj%RBQg>dZg8YyKrC
z$v>$xzeHU`Gp11>CY+Y`+{?8w^m0Nk@rN=nC8!}fMc`19({wLoPj=sflLja
z3+d&LjFNspqJLx^LyJSnnfizZ_F3q1kCWm}mM{<#JN^-@uTTLWir?HN84c&~CFa6K
z!AIf{06P8v27*1;0floz7DE}g0cFdt624D_P&NAZHv9Ku`SnM#5AaL8Kf*0U$})-0
zSr~8hL3bUxaM9-x&&Q!d1DEz`;3k{WQq$xGHL3{;9UkCuT@E+)VUOu;O4q(utWV)t
za%m0rrL~M15Dd5-Z_H76QP#)W+2wnTX;9w>b-|m+f&SDbPiry{ZYCe(10MVSN}BL0
z(*3AVgIL{k$rHv4ixwy)`2kgSJsHX>IX2Y#c9)93o2WP^oCp7~TUi_N_Os3=uxn)U
z8L{Ek;4VckN)|N{o+b+?+l|3*kU2=UyT?wDNjb<=>p`EQ-9O-VNw@oGm!j*FMZ2}z
zW53BhQ^G04O?lRLz_mHxg3xHneAJCh^XNWg2TP;#ucM5QpQ67PeXZXNU3`#RljENQ`i{O
zwkzQogBmi*$2J3fOR;OCa%FIxW7p42JxnqflW48O=I;`W>E$Rm2&!yTQU_nemmcdw
z&{U9~w{*yg1M#h@LX9xX3x|b?@tW0fkz=sn%cdi`=7>Kny_c{I4V(V)n;$6@Nk>4D
zsxSTRz9@u9+=%(R1Vz$|L}wGXuLM$T7Nm$YT_MGQNRI>`YPhgoW&^3!@hBeZN;O7Qfx129W>q2aGLs?o+-}*QQ-gOkG0ijWMz&=%z|A{w_uo>#0R%c)l`J#DwmSSg%>BK^4=OXj+
zbXcrB!NsmyC`22L!iQFzBCBSd;-{D4K`h<8CQC?vSm+Cx>>|majAIt!;;VRzqF}FY
z=p}pILTc<@fM&T>|$giX)MDHE$FWl@o=tvFpt?rw%+S~8l
z*Z^L!vaIf#?^*55e7C+vCM%Y)S^aWW5k9U32PWHsZLx`nfT)6bcoZTpKXhow(oCv}
zOv;zI54;VamdK=iJ?fA!$4%}R7r}&zsP%!z{@%|ZJtB`3$O3W`gaBR4sd^V|x_7Y|
zWN_MDzm<#H23A&yXdZ8Z5Cf+hp@v50G)4TpKFnZKVz;lA!^#Sw3JB%>sH90HMTUF$
zpQck}k{q%__s9DOC6N6*Ki~p?g3oQpIDC(?pr-SOWHiF}s136*k4cSpL=bLk(XkQ9_bu@?-yccJP&V}|jvn)Nfz84MDgf)&#
zL!NEx&UswMK(8yecZ&d@`MKp2oU+$E1fGJt++n4Hs-kDmVSnVpl~;0;+h&-kWNp%6
zQAzLR~i&kh&JgS~M7^UlJM9&w|ec@}rFY6-CCq*8e
zW+!e^iB}90;gVn$%lAM$9=C9NvP_C9?`f2QDnBObv{2Jyqg*
z%1Ct|UYc9DlR7UQ?s06d?w9U!yyPp~9y~zD(g-w+2c^r;T6>%dsxvcv4&n#!&AcMr
zMv3PNN*pRE5$o(gx_G!&I5Bp*LW;4h2EmF%L>9CdzMOcpb2DR(@J*TcH_HLjskXRa4?A3J@$;QLtCdOfuDGBkhTD@-?=
zJ7-#C#ra>ZlPfDmA@~7Dow6y-SNKwJzktRf7@Qzb_D+dN%0@gS8-QI*b`GsFt?{IM
z+5&4vVh`jzEZM+r;(7r85P>|P*2`j+pm8MoFah|vI+0rs8!u^)O3emBD4P~f_#vE=
zuNBwK!=BLdaqAG4n8^j
z3%U?iUDzLY9K5HE%xSK=Bm5k64y|DKyXrpXWrN@>CiQvF|25|1^sw_EbJ=Z^GsDim
zsD~_kW6Q&Phs{5
z8U_C-f^
zBIJ@oVdt9s?yq^>UyHiFMm&|C1_5tWZ60DcM(K2ACXJH{E^zzfTq>3kzy}b-0eLbB
zPr(Fc+)m4tRJK1}J%^tb?y9cZpHY{Gm{|M0sT@bio~857bR@f$cu({#@t)|Me(&15
zgH!F^(HGS_$<2
z9flc7OzX|!qimL8ntEcl1kMPR62i|Kp*FH9HCU^_+vL#WN#;1lxlXFeQPHe&NTuq$
zPo%%5@`Ms2pCXM-;$w+)tR^bR+WMES#~|ssYFVv9{M+fY>5`4t2lcB|W8I^lWpOy)
z+k7&lHhlH3m1&TkA65#2rev_LlgyF3DmQ2_a7HA1>6HBe%v4<>X6+}Vby^oozR)V5
z;dESC0u9M8gVmSTe4%AG0P{UA0MU-0D#l}iFSthl$s^^*`^|xV_b%c~;=&ZY;jsWZ
zj^+a+e7yXb7f{?sm}0Wo!ee4CND5UkvQ~#bFsOYP)u%vu2PH8h;7e
zXn^&a`u=buGY~6Wixd$bj~57m6fq^uFms=S5VEdeuThQ8aCRXd67)^}9@R4YPU~$#
z#KuPz-=zJ=$4A{PPp0!hDrv#a1b#q#)J#c}d%v0e1JXr_Bs-ovh15{CkX=kDsy@Z%
zi&|$lT?%!9jW{RZM$8D;JVyUQH|M0ZdWOjYw$=y^m8Z=$0e&WLuIZ2;=IN11EMB-q
z5VEGwRxtu-z>uVX9dzPU8q@=vqdte%H}Y9nFsL*-8w3x8Ln_Hsb7TF4V`PmkF@Umq
zftd%bw**b4bwJ{||Goiz>+JifK@?4{xKZ>#)Rj-f@9Wy+xVd?xR3M3gVl%r+8Z+DD
zx#z3A+ekfwYt(*LIi5RO(n8ZRPtqoWC}l>$3%4!)UxqSUx~n?)0(c1?`ej9o21_=_1h)-ZJvG`t=|^vw>CdS%Z>RMdM=+~os#6ys4O*I80>k7i9ZZ?l
zGMOWrK<{HcPQf*t=CxxItN06DJh6&_Lg9}zKBS}GC%#)4*V&=t?UQ!Vvx1zX_fmmpyC)T(T=
z9+DkuZxZ&v%_PfSxEXAd&|NNlkprz^v=Up-0qfqZmU|*O!_KRu9kk*N1uj~->%y~>
zFmth)eUWMtm^pa8QdT%ibn&)X&p_S8b3Z4|hM5M|n!B5I7a!DlsrRi?vsJ2JDnQ{J
zqw28NdFR!XPEV6yg&mkk5*GDPMRV!_ca~QEOR}z
z!Pvx93;V?
zrgI}CiBYwM0T?5jThL>P9%Rb5gRkB}Ckk8@alRoBf|@T(#G(`+gzLH$H%|?6a#$Ne
zkn$rHpoBV-i>zS;zU5LyUaD}C9<#mrowS2{hHeq9vTt(TEDe2?N<^lw(gL~Gt`8IB
zko6o$RUB;$_k{Kxi=}%)ZE4=Hsv?#V%03D@i$^?3)+QuJ_7EA0sn2a_L?H{z8d)6_
zd&&jz)MW7zcKUeMG|3~(N@7eJ0nP{X*l)zqlp)GKJ43xioHtCMKK5rH%dbpCQ=}re31Y_=RwJDjMIsiR%D}jS%>hF^?IVM03qxsYw$3|!CKFZlvj$N0}
z3()EyJTtgbbIifK!M(QlJo_A$&>>N}3*q2eY@MF>n;F(GAb~*51LG}jlqFZZ!{|$9
z-*h7)z@F-H`(L$Xm5F;!4}axFSR8p8)-Y7r`=40_)$w?>J0+L_WOHbwRJ+FpJ^sk#
zR8KXYIWAoX_ZsYbe>FdB%DA-~I#mvd`giAgg(=al#4DC(K#N?W|KsDy7RTuZ3
z4fTj#l`jw(LvHsI+9ry7Elb=4>M}xlER1xbVH(P&`+HdhJ9=NVL<KBskk2K8Vr@Z*zqGw%3s*u+H$m2ppIda#8z2O4QucjsuCidZ;FjQVRvtNK_;o^
zp8toW3SaKNf_+kH@ph}0^8rf$0gX;&L^*iCSNUDR2WtSidx%9hv=d--*+n)O)upgX
zAT)o~I|4eZzmP5LN6rifccEYl=G5zHkfAfY&gi4oEWuMu#0Ao&h}0rI`g|3sFbn5N
zGnj_+)1_y)RDWI8XCYH;G`&mE`K#TAbsSfGRFnZj&~3zsW=aNYEzqTuvkj+Jmn+Yf
zY}^M`ri#c?hh&>4n4of!nf|n11KzHl7V)lvU`e0C{YyU2vEE#cfOJZ_j}fIR363vP
zkIO6ppq_pJ0BU!+{glnhF`4cKo5hOT*1IQ?nsCsc$ot|t_S(-&t(;9z={
z_H=j({sQk8?@4CuD=YXZosu3n8;cS3VubJ0zc@xmAZ4QMf5f@s<=6=5jlh|{s`k!|
z_ekMHe#9-PNUoNKrVJ{N%=v-nbJ0~%F)Lz3?8hHAakh*W^(6*p!nTc&sm8|tB^@cq?v15)+(@=NRK
z3_Gm)X5J%7;d?WwPboiS$RvcPWKN5uuY#_)vr#t9&iEkodjjVQkGE0`y=6MBO?ASk
zomFr(snahH4||$vi$1Fk#_W$Tr8A|Nhe)BT-Ie>}{eqlQes`sHKgE1a@3UF5zGc<$
z!q#x3S=(AJPzM&f-CY@dPO9m-WvX|afJ@l|?G$1QTzWwglQxo~4)w!a6!K+L_f!1f
zXfX}o=iX@A2dsKCjp#Wf6d&|P=BAR%RXadtp}NG6&sX)dEOoN0?wk5CZMLgU)T@}3
zfSQzT*6W+3WdUSYErxmm)c(Slt9BXB)wmh2{k|04z#mZbsIkupuFWa{ZE%KKPh)=0
zHW{F_z<&Ycin-zGoR4!QUua{VC%)P5-gh^*QX+Zeeo3mJnq}O+ZEi)s>Xj*Omtt>w
zLdTt}E|EQifg%*$CXL*F7ExRAX4xkA4h2{3toIHpvi>AgI}4g7!D-%rB!zbxZ{gQf
zHyoNu++qGx&FW5T9@hiIystp!R4~FmL5MXQWzy}ypuLjDFJn@@{pZvv-<;FJa!a(%rE?2jqcV6J2l1-%r+nYSvvWG$UYwkc-xC7LzPOIy{AF9QiZV}c
z2y|;hVm3e3KTXpWSSy^z6km<&WT6u(O%P#r8j_v5MjHlP<=eyA`(bzzI_w$Qw7N_0
zP&r+C)n>t3d(=ekw4S8>gCQAKTF~#U9d+;`{-2|;E=F0mmv!1oEOjiAt~egEM|hU<8UH0o>(hHj_}H6
z@l~;>xB^SSTB(zYDkp;4{82scN4H=S#RpTR+P-bU>B8evtTr#65?GSr+C
zI&{XO&%`kJVZ*@}Z)0XcXC+5;R!|cGQ)u57H6J0#2RfX$j7}o;GE$&u9+N$OP-8$>
zEq0bEkt-b5x{dIj4`FI90z*mo0|{)i?9hZy&J*EXt>{zL#E&vE+XO3eVJ?03JTu54
z*x5M`N*MeK2wAC=RVQbic)8{Vbe3dnNatm4c-#;`2FEe+-0yk07{W^UfOgKOhJy@Q
zlf8{C>ZH!?q?Xm#tj1QhmH^*K%s)h6`CNz@@J3*`3g4~
zFB~!oVN<+nJ>1-Tz4N4AiqD}q*hY?t_nOrBJzVbXloxdhOxE0wAdox_Ir;iTd6Ju4
z^nYoyXLkomwLKG$;#@45sD~Amq_tnNF!9_M(9*P>$6DAesE1p+ME=(C4ug5WV=R?VpOQ)Uks2@KY+_wvfh4G&G&el8G#08@a&x*ftp
z+kD@mNDvi|E}T6OoeC|N(^^&B#$}gSn@$rr&HfzCfOzf-)xVtpXFRu&XO)Ew_Yz;N
zyVS`7t3^~?U_2w0Anwo~1m{x0-$mgnKI$?`C2S=X+dunZRr5JgGg~%?V
zOUym$!b3+--T~vzX9F$wu&b+@;SnK^G(S`OW>;VwQ2@n~kgM>1Jq>(x%nglSHX%?t
zxAcN#fnwz%!#tfcGDvtX=f%`x6z(>fzH)THIlEo1a_ABZ>nJ%Mqi}dX$wr>o>)m-Q
zbmcYZSh91{-(ZyXhu}a10tek9Nc=M5NmX&BB_hEGePu?0_x%i51SEYe|8wdHlk0dM
zPO+?Z`iu_;&9=+p^xP7R8S0Q!NU=z|mQe1Ie&oT;&gpu8!bNrQ$Ks^xGxH58ool`K
zQt}+zwGmQ6vCqu2@lfuneV2t^H-SGs^a`$q=F^uKm{O!cqQR8nVQ%TXG{YR132=JN
zV;5%kD1BRnoVkp&QIL)p-*+kD<3E_?z!Udsh$10EkC(b}dK
ze))3ih(Zj3h1T1!Dzy9`UUwH2kxo_;zTwNMP25?(B~6cv7@5{ng3Q9s__w6x#sIs7
zLtDM~bUU@Y?kqh>HhS{g0@FT-K?t9^w__SYGQT4IZSH3}}
zUJ=dhvH%ksHFgd{BxTHugERAITCzWRL2>&TX14f4Q5Evg9lvWCl8%=RX$6Mva
zAHy7}7cQs2^bQV#A4-9$4(=ugQmoIHaV2DgbN!{aAAtV{YC4dKmmZ8UanA!ebAVmC
zS$zyzM%AlEgJ3=WL8r94LW&t`W~bAjr-21cu|~>cq=j`&E4v{F90p(uWIaUg@~uF&`qRjxMBZpl!+B0#~>9A
z5r4R#jp=L4q*_*GIW2li3GovM%H;i#7B-~Gh$C5j%vY=*^x;tvVS$u;W|GIDY_aI_pt*8m%ekOa0I*|GQLA
zCYH3@t%rG`OK3L#2E5vNs>o93Tfc7v4NA5S^zS<%rN`?{Mp*=fH9wu~L2;%e~BV63eXN0o>2H4G_H3X(()eqnfR3STm>DX?_&cWH)1IN!>dm^$M)C;onhc6OxcOgfR
z!gr9;3g6vJzaa5T{G}Jxkruefx?JP6?^=D6qy9R0P2;P#*(7YA^HueY0U|z(q2@wf
zgF|$V=X8!dJIC+H!JHf}r9T>@!P^=jXY6rL;YNQIT!JlFoR}|w$pp}g8r9Z<^3L#7
zuW+VJ&{)hcnEA+!Ab~=0~OGuZP7h;LiB}9gB^nK^u$Bh9vF8V!53?=?#m?
z(b)})iQ}6SvDm_{SnRk=rofpvzBv(#{T8!Fjl}{ocem3Wb(2J2Bh7j$RRa@P_8R$m
z1z!_5YW?EK3*k=Pzs7oA9K-=Z!^>_uKQ
z5)(f;J(1Yk>un_Vgw7Tfdx=ae5!>D4LIES|?*@94e=4T1;?K!Bfbxn5=!H}FVn;mp)k~!pzqWhvF**Sr%;M)!QSC1an0GvzoQ*_hmeD9vLT9FS>&%>FXAXGW2ZHORGpvRR
zdZGyUG66kp%?&u&|2q90at<3EuNi;fP)G^5=B`psE(-$I_
z4Z^Dlh3|K4H9Bx$_=0cbivbG_$U~I73ipIlY*&S)JYQr#pUv}N?Y6KVZVT5UtVo5?
z$KT`X?=kiFsQUYr`g=tEJ*@s7Y5`K%VmZ~NDFo4h#O8Y8ZeFxs`M~YGj|e|2Z>FKt
zYRb5mV25RE)eKvpcnfF_l~^y~9No5NY=X!uD+%^ga_P>0nZp#8N$8}RR_UV5;BYNI
zIx^cXx(lV6{Rx%nO|GZ<&jlkCyPj$~=*=0P4xt%uDJv-sJnVYP;d**Y)7zQ-+uVmy
z?T<{3$2Pd0ZbBsrEYK3$q@G=Mw?jRi&?h!nbS@v*GSMu4iP7Q<-;n7GFUa$SvDLst
zwVThr3Uhg8g?X{*N%xt{ODYOG@C^)m4~>Y&=W2ClU)7z4FFdWSB0RYrmXrHZSExqN
zt)tP1%xcXtCm-^=_q%@hIDlbJJ}S>4Q4I*)mChk1)Nsh18W;kHQZDr3Zr9N&*H>44
zm~VFnYF4IHExu!4a18zr4~iO4;4B0GVLVxn-yvu_FhJvZ!8WguY)(tp&X-hOJFP`{
zSQFnNW#)`@f0Y&`V~>Ia#pig{FDI>*WyfHb?st5YZ@G`R%j#f{+J<;_=|5CSqapqM9--C%=!0%#1Tr$kI
zx$@maGoX*EocL^;Ipx-wPFNgaH^MQ00j>Y0nif+eTz~
z0sTNp&4d&GF%}(nWu{8xWtMvH#hNu$#fCIQmx~n>z1^26)N!^@Skqt}Zwd4Vyn=n=
zxqp)UD$mZ~yLPvd_*U~;&?{qHPfB42*KvjxAN2?W(>%==#Ch9|)ROROI40WvPn_mS
zybO)6=Mb(IjFqa_lvD?>ZT%)-8ysvs1jidYE9(N7-I{;2^^bB|d}TO9iK3K-eW4Y{
z0P%GE_%2^NhAFV_5-Es2(OlLo@EwxYhLy^TpEZ|UYvB3lqQMLGD&@AIK+P#QScD7{
zzl&DY=+3eSW7)p&tYa{-tpYw#I(rt4$8-O}@M59f7ES5$%Q|V^mc$yGP@gX|wbuxZ
ze@k?+4?vgEBJ7xf*0bsu%z8^#CDY4X8fwW-B6F!QuYzCnd4;*pW5OiCM>=uGIhl7z
zu;<9E=5*MU)@8FroH*2L&P-Ph4c<*tdw*oJr}~|_Uc4~lSSE0~-`uH)J1%R_TSMXW
zBNkRHsO|?W+ft4+z_AzjLoKUr%d+pdv%i0BTFTcR;P_rE1LVEmaQ?kb&
z@pwXy?as>H2^~HT`)sq$W&Ju?Fow_gWf!}%v!v_iiEoKW_8`^u=FmHI{SaN>Mc32i
z1f=JEc+80nvsTzOU7f7S^88(}08=JXh9te##imGNhs&PyoA3IXjynC3aethNdM-1>
z>`XH+c^}y+N70Jntd!s-kt_RJE(r5A`W=3_zc|O$#?xS?`l~NDgw5vCzHT?1b}~M1Nu$V<@1zN#R~K;&bQ+P8@oSm}+vsXQPHuPY=38
zt9&t@G9$Eu?eIRwQRJp9_W5Gz2Wk}rSC$(ODLzDiYao>)&h=MA)~_fi2*t}>5tFa}
zs*b7VFgU0PwLfF*(n`@(Gkw3|OzF?7d$%jJh4h1yu}-T)kC!8x$r-PQyq~7lsD!hP
z$h5@dTycRyI{m$od8sN2Y|Ig#Y){g!5zlg*;dqSv@mx3aC{@GdRqw^T$C#WQD$~Tx
zd@{ntNG#Im__RVg+VER*>^kc==2)X6<~QGA8KFrw8cpKgWS`lJz~1ZFr@Yt1@I%mo
z4bl!7cr(rmq9fRLUibU1x*stPT!h8sW?9K|WNMrBFd8tz#z42HF|Zhx9j)F+DPK6f
zJ>kj!hD?-Xn0VeCX05HK@GU}m0TVhGJ>hd1to
z>%64T3ZUyVe^k)`C-Z%(MJ5Ald-
zIM&F;ixI75ajUn%@sSHO{!G|lP9GLX0@dtc2H#dcD<@Nat8dLIsZKnsLh^`=30U1}
zeGIJXEmLM+Ji0_%ESuTY)uSTJm%)p>tsk35bmPkR7sn$%mw@aQpz5m6!z&xYE1wCk
zYz%#Mx$C}J9KbbS`C%!!9uP@T_YLF)JhgEcmpA2j$7}lke$>qRyQgrAxIelj;e@Q4W12I>
zugwW=-bQL~XD9J_1B0HK2^~lC=n)=J4I1ux;6a|umMM{f)Lz`*^xn#dQ??S!qy;<<
zI^MCKc#8FoR5V!AU^D`sbvZOf!ZX%)?Sv0mDH0m3u$_RgAVvF_^Q;puL|!fSh>M&EA78DMQ)%O}B*lFbQ8T-q-K_n61)
z_TIfhqK?M{^W^7v;AYQ36vzVC%TK0OtBU9TakRR+lp9r_vacVas56&-SGh78pgYi2l`d}-I>+VI;Km|b$qkeomtbdyFti9EKQmYh{BSWKnwpnQYb}B
ziODNu7P&H#d{DI-aO2Iw2w&h9+?^p+mc{xB_IAoMw(Nd);$Z$)f$nBAD4l~aE0(RY
z8?EVX>9`!1p7=(i=BrHB+I62nnwcFy2_&9-3q{4ZpLX`iU6R9a0T<}IxKqAspG--Z
zPRcnV!>3YuLzw`CR$jW)06aM8V#7nLvQh&5xHNMn4Wf2YPYShGCgAVTBIc~WwfaWs
zm%7*y!ed9Up$jpkiA$RqprSBu(p)gd6~XiBhfttiS^pL~d3VmSS%>_UPt
zOwKpLlezRclbv9W%kKQ&-qQv
zRYeo3n8Fa_-%ut#9h34@QFD*zZ*8^yI)EdrDlg7%3RMx4M9mD2k)$ZHea%BYI4O@?
z{%(<+{~!ljS5G`8HYz?fb6K`wrqwfz1c_>jWlDr>@u?(s-sycED9f<`+8DHM*WLc!
z?n815GxwW&%2pFf2DuZWUHp1QPn4wgw*b){VQ
z0$IM$GS%hOfw4>JMe*tiS`P8VlP%5s^6W6h(TtG!YKM(QM^?bGXQQTng|r!K>Wq3s*Rq|8>@$D};t`hu;pJ
zVLkXvYQM1VepKRjc`K$Yk$1g)i){x9LgbesPKAq=*LE;7X@B7eH^9GFOKPL_B&S6b
zb;fY!tXdl!7VbO}JcTP1;FbedgV%}hZs^yc+}fYCOsaK8*1MYVoZ35Jil?R+AWT8U
zU7=xQK1nj4;GfDdIrAJ+<9wi1GVwG|L$K8P@ac4$w?FdTZRX(d8P~yWaj7qY#>8LLwepk|p1(U}e7QZ3-nks@Xrt*@qzF~qT|Md6V`_{S
zUK(w@a8I6l58UK?MgmuT8$>@>WHtTUNP3`mJxQ%!g#I!b97#3M0%*NsYXPUE#gDrKx@n%HQ}|Q;fYY�?!H
zi&Kpksd&|g`E!nW5RSNxOpemk+dAVNa$`cr>f#>}@|%MmhyRRkA?!K$av9xYjT@^z
z<0ADWZ`BbVrss+@d6(hEyU*G@KnejdB0|)uCRMWE@D7?uIf#bh?@?2?qPlVhPxBei
z{h$*Q(H1t75^pUcn1g>*^GoYbpgd8T!;77#`QlE#&_HrHnuIT#(h{$vk;KiFI8OtD
zmhdLGByi=yothCnlackj$cQ`QVmc7+yur99mptLlAI;wp?!3{6d_h{c^Coab1#UJX
zOH^Q1aB8@7juGi8#HTZ)m2ASDbB%>BQ_zaj07_hzY=eF2hwBRboItHR{G}WH+YTjM
zjDz%lTA}KHqIa^*ThR|@Fxd*!zS4~>X>a^a)ix-k%*toR3h+d9?(043B@bWZv)k%(
zF2lD}pv)7yY+sYNq&6|$qwahq&FjlgBrd8kFO*5JK6%U@FeCCVclsiU_8fTG70jI;
z?U~4q_Dqdp{3@AM21&opfq>V)!qLF=FSLTaYBka%E)Xo~Gn+TinO-U71o2-zi=aE$
zv(|X=;?BfC=8T^r71yJ7Dq(c_FYfFDEZ~Ciw!E5y?b5kC$`AF{#o~9og}J#c&!eOv
ziF{A^!-Kx6!#?BE!(L9L_2N%7_8Up`BLi7~$_7Tu&nq;Zz)NV%6IEFCMuJY+7K)C93;u7&^D
z;gxeh63=1ZG=AJbVZi|mmzdlZ1Sv3C^sp*nVgoA+C~548c6vai&(BP6s2UNX0Zf6&
z{}I-#_(1ItG9KQ_2_=(_g%{HuHIHY@1m(@?DL6NRc{%WMR59uU>>H>?`sfW@T`
ze{+t`QD(h?!awDE%UdcTR>2z=CgoA3Kv696ALa0dUvIV_35TYh{
zK7VHfhw*o2uzD=1*nlXBb2~eaHR3$AEU|5xAQL9^tN9hnM54{m&|bBj=g1Ho1@L15u*Wz4
z7_1bX9Udz7X|kxcfZq}8HIKv}OqM?rgkM_3JL6MF4@|38(292otV#t>b`W+g7-E
zK9)NYm`;?oxS(s{I-3`<Vg7(Gn}Rk1`T@sl0Q%1#a6H!f
zH9F1qAT4^3syAa##0@Vk8MY77!eg3P<|!>GG}YN%UPlYgd;=2sHpzB
zvMYKLi83N3_4Za~T}Q$EUMS*vtM9)kXVp_0<`Xc>E=S*0+2`ysgr1jXSyS0K7RRo2
zlIE%!bT)eEci!Nix}QT!fvDJqt2@*aqxI(Ckd@ZtXR&IWk5
z+~}HpI$)W~l0~;;*}%rOQAh5zzN!^%fkA)go15GH4m1t!(@srZ+QVFMQcJr53yTH&
zw`2mQr&vT-J4>9^*`bRLdbo^Y2R+ZMEWM3qo92(O-V%Eyb9|xKTnBxAy$toFbs@tX
zQygiz&A_WhbWAY@isOo1BG@;_6^5tg@wA0Ze+8#&efZ-Tt9<#`I#ry}Sa5jP;&dCw
z$D_mIB2&if8Gjo?aoqcuh3&u!K<*gC-6v~)RUvd_cm`(0U!a)`yjew-!9
zoo<%3n=x2izDphh(zEbqWIjf$fhq%2-)C^cqp}^BgsXQuf-`u}b2lgTaPS&xJsue$
z7@a#+(#<}*)fAN@hRzBdW1UB1Jyq+1rzF?H=9MI1H$Dp;2MC@$trK*j$M7bZMOIJI
zU{yx{JrwJv*yNh{EdJ_A8vLG6vTw4ub3{NK5P~PabGRX(W0_
z-htLaBN8PNBelZtdq)t);Pn@rAuYW73%sy^HGfn>owKa-C(1HXf}mhu>va94gh2VI
zuU+<_>~kSR1skm=1PR6{3^gK{8ahxw05eN}Qs~E9ER~pJN|J4edlr5+{{*l3GTBc(
z6R6I_mErX+S`fd8kq;k9;;Zq<5mZ*p&0>xfD;kMnq}&Lp%gy!375l6f%kzSHPAI4+
z&lNnL=ZiMuiZ$Evi7p#dnS+p!2oS!RJF2Fv#Bt9vD6m)U&FkwNQhh*d3Joh*EmUDZt#cQyTI7YjV~EwhFs#5%3Sh0RmQrvogHlB_
zzq?w9y8%@%mZ79`tYALH3J
z>+U;*V2IAhw=})KLuS+9#2ieHHdQi
zTv148&CT$y^DW1xnj`9qOpp4Slmw|-lsfVM{rmKmIpG(e7Heazm`5Tod|#*zM8O@J
zXDxx<1Bqvp(Tu+tRY2p#poNz~YK>Yj4-j=(@jn)lV9B}CXgW9LW
zvfz!GoiAi|X2{FUb@6j4|1U#56@5yKh5smk1+NH{VBNQrV1NZtr&LC0%;csCB^6Cm
z8VJfWhsf>r)8I+`ck{o@-MqhP$}(^B`{v<*^Al>DA{Xk*Zk3%Kek*wFm{`uU@)T+o
z@9cSF!7*DC)5C{{>tigpj=}Na!!PGZ0e3+fISUNjdIR|
zhF1$7)p?tL+8`t7jUdg!hXy$4Tj7uTxImC6E(s&|6CLqd8EYWO60WQ+toJ)cUIpVe64zJa9j1H;3kzt+U@+x
zoxLxPxHBm&RngfSnh3@(CJuTQe9@P3?_fVKgd=qXNgL{i70Jix`4yQvy&
zOkc$Bd`FIZb2$z!XHyhWw#)kaAz5%nu_
zcXOHhf`hC|L3XM8Jt^r)zPwKMp-t9sN|F|+YHjK#Eps=GR4vjPb=jP>jD^?n)zz!<
zs^7Fd-Ji8N&Kvwr6z3S(%n`j&)f^NfU;H^(r>1M=*^baWH8bfl(o=)PD6@>Al%dE{
zXngrDwZ&HYBI02H5tij1EnFm52Y13*F`ToQp1kG>C6(w|qlwyCF=e+AIgg=70lD24
zY{XCHJKR|s5-V|frRGmf82%N^-HCqojLoHDJw|HiuRIDgsI0=~I`^irN_YR;Sak#2
zmf*rg5D0r5PD+<7qhB^#js8;AH*aZ&TGezaXrSiHd9dzT`(jN-fvFL%fZhrip%6O|
zmV!by=6aiQ%8eHkV^r|;SLot9tn%PTfDhI~3BEVBiHU0yHzn;G{
zo5CFS@i_O(qh0kvWT9RP#JD$GNI~=d_&3lImi;Uv@>fj^kWz2famZW#rsI&f^ya7m
z+#SL5RRL@+IVBkLEsRl@OpIF6p6oZ+p(=%xZ$L~QFl#O09pcaPVTP8aYF8t|;2$t9
z?LZOA(@`I5H@{TVKtVznLclP+uh*;GpE6x|zR;ZpkK`P-CQ=lg4Yhh1?|
zjUrq~_x!
zHc&z^Kgr#n;u-^zSrJN$YO@I^mgk+UiQ*X9cDZ2?RUORo2Wl{Mwei1oH
zOHbews+Io}?QVt<@i{-&gYs7dBkA&VkB#u>+0tcC)qy*mp6qu#FU+MFp0Un~NBr3slqoU#Cl~J5E-T&wi>M
z+(eADq6l`Hd=IX;h@3(xAi_(8Q&*c^thOmIwL#9B)(FO3q|J!Lr!+~YF^MgP3CkAd
zOAkO?Zmk7If3maJI5~d9oAuH$HO^gVgSud!N@dg0@|&i7zODiXaF
zvZuI|P!A&A2}ZmW=l2jYy#zz~@DV+mcg8;^Dl0vClMvXYF)O~2G=FU5vwr7+#03oV
zL5^D&{?uL!!Rh|i8y4r7o886>?^+jOz9P#CHRg}srG)So>iBn9usAk$t77k9KjHL*L3nj@F)R9a2pZv
zjpaZ1j+7bSN0_Z+vaIf%gpXd`!Sfi_S-~>3s$*CpP>|s8e*sC#&7r<(MT?k~1)cD7
zVu+1&B;0n_$IwTWBlsh><;l9|t-Es|LX)ivDNbrO4@>Vqem8Rf6sd{@FWj01xnh-0
zPWRnP^&M=LzPHPpV~QJ$kE83|`hKF4FSbZEs_(%b2j7w6XpK<8i;*p<{w!mA*1PFo
zmccA(Y$8`-1uVl-$e=jC^DXB1t@bm8XNlsju;i@>=5RW9%F`3nH^ZZZ7+%NBl1dJ9
z>g1NVUJh*>fM7baD)It`cX#HYupr$PwO^)k$-XJ3`f3i=4^k@@`}km~oln7@S+HB8=#gf%)DsFZ5fI27(j#S3*+V5t
z>!k6e23BR}9a5JpQmN7WEBQ?;K3D#&{N@=hgT7CGbI&>oU{(Jo@*6F0wPiNp86e&N
zPG&Rhbk^B-%4}|VoPkjnBi;J`FJv~`Z&cjk|4C+Z%sZJf8?-Jngf>zoLYrAiXoIPx
z2yN;S+AI^H4d>M+C~Bwy)k9%VtJO(dOK<+TI4v^yB_13_dh>K4{pu#YdGkX?JXUlw
zVk4wCc|-sIklvis)s%diBGQ{-(v(;&5}zGb)j1s7dz7#DHUB_;)96w#C{i_ekr
zcw;wa2}6D}a|~|Sk{iWdak{hy9xb`4QBa|`g&Wyu{rOLvt(;#P&-S&-bOI^@(-=WU
zYgYNL_#c(zrlKzn^$1n`0IAJj0!m{*5qvmrTWS*_IEnupr4#<>lolj56ZqdHxp~x<
z-0Xt{oJ8GU+#s|TXNST5V`APX&@j48BsX>5*v$VgBscT5%uaez-W6N&p6-TRNH=kIaXNX~J
z+0F4-r>ZV!+09H59saaU%WmqG>}F*~CfGr7JfK?DtOPjg*_`UwV1#ff0nSu3LL=)#
zm#T*J;qM%I!}FUvMAGeBuB!5zs0p^Cws0B
zQIhrZ>%}ZOam~@ke_O1Ci_fD408J?Yz%p1AEmkVAg*ZQlklF%(ZA3w7bp6&@)Bs96
z%z!|rzEiBUQn;?PSn1Dv*+Pm%lq4BJ)d17N1#;q_GQX95)U!AlVs9`59pF9UC&plJ|i-LucPHbT{5Mn
zqk8a~lBWbOwukcWbX_7-ntyZ|woK_;qwsUe5P}(+XrI+S2;bS1l@cl?&eGh`!pFdG
z#$g74nL;IR#SNAbd6b8A#GUj>Bb4l6yRKC@^}Rx+!;hqe
zO6z5Ao<`2=Qmn)fO?qieqcroLXDO$BdRz~ijo
zWta4*o-@?Vv_haXM8LeEg~6B_bk>&c_BIDgCZvbB2%!n4m!8r#*jBgzBeF=5O*nNP
z1D0~FTan^qN|fZluSy%k-=ic-ACMvvC77CTkSK`*tZzw_&^ude3zYswx->-72$2HR
ziimI|OIHLgH_5CaSz2nH3@6QAq9jZ8O0x7mf|4fR?^2Sbt&sDjNR}GZeuq+OO0u+L
zsWid2L?lZMG9W3*k{%eil_d;G{4*_FvIkTNms*IIWaLW^SViXPY=l)3gr%JfZ2%5)knrmJ02rf>e8llpxgHLypAn
z5sH6G_m#+J2VWv7k|p!PeOD(F{02+Oqf$(oFms7wSh8(%bJ9
zRcYUcN=$|O3`V5mdEiB_<|@CLTiPEl)-X4a*Q`)4cVxv1!lB-HG#YfkU5^W1s}*o1
zbVh&QdPeeAtqGn)Im;w0W{_8YDzg{roh&++klBvP9?$AoZ~a5KmUmGT6&+TOIC+Q_
z{T(|gzu9L)E&d)xul1gg=fLhpoF@F>E+voYGRQ-_=vr+L#s-pi1@4egG!owt^3Zd!
zb!0y9E{F9xzgkx<-bmANWpP8*AP=?7nIbM-%>Cjc1UtgfM2W``yoF#v`cEz)D)Peb
zWd+Zt?#t=PVu?NAt-uvRK2htS#;B3?wQfn5k9H3Un}Nktd4P4N-GGT{0BiJ3st>{~
z)QNT!&!*vF!nO%S=M$~V;1D!~^2C;~rqDXGB$>pR@F46V*9Ph(uR@gBY{e>(vvq$<
zWGj-X6=eg!nIUD2K0Aq-Yi&qL%3M4(RF1-S2d<$`#xQ`vLLpy0V)Y@nl_CQhY`uJE
zGI?@^^)NrS?yrgHUqoT8zywgUG2h2u!!dXz5f>dE22B(qm1ww@(epG14K@n&72Vkf
z+*^;h8;)el<4{MyACI~A;MQeX!C!k0cn!4Z3eck4sI=&Q^h+>fqZRy_pb9D&pd6Jv
zf%y}@E_MUnGp_;8o@lG#Ub$lwy}HSqFk6^ZAj3dbUe#Y(f?=m&Jg_ENbWvOvb|<;O
zBCuBuZhRz*&LD{qV5;91DZb+D&;*t3-&iQzW^;adOT(ew#se*K)kpzh;Gr4&h%V4V~SAdK)KAL0X5=dQ<>%@|Nc`D%9UHxvHCtU$v5YIaFbk
zL^2-Vu~r&}I6;b12Y254{zSOT-7>g8kRr$j4o)j^$>{l%nN^=QG2FncfDXtY`plP-
zMW0xD5QmTnUwozCI6)k5C65PV2S!|YUP%C~KwHL$yeu`Q%G_&YJ1~8zFHqQVRKdD$
z$)bDhve%N2awcUN_sGec9mUIg$hQm5D9p(Yon$=cgu+e_clMsngt&3a{1VpC+2The
zpzk;W8Q>Qf&oR8#W(?7$R{Mi$RBVC(
zBm6#wMKTZy@xkOHM3?WXH9u4sw~;m}92~?MAU3|hD`q?X*xy_rkUE0f%{fi(KF}Gc
z?j6c+I?jVNCVsx~jhAd~>LJ0#L&;lHR
z8;kD;CuGSR3)@qj@&l^?zpq>8cz7tX1bh5!ncxubo*(C7!o(s;>;mQg^k@(D*?8!P
z^l;7h_wcrYf8WC*yN9(<@$c*5a5~bxhgH(Uek7>L5*d(Zg^Yl#(OKo&0k
z2W-pIU0F?cR?{8*+4$8mhI}@T?7CL8tmavztVMog9Z#eNvvp-0k7Ey(t}frctN5yz$2WVZoL$zZuZ9>D3Gc3p#UB$n8e%(3J45TLuTIWljkj6
zn=JZAA<3e%RdI>+f_HvsuuC<;XL4r82lXAQ1yFi;bFkCT`zDXF
zFVs;&d@lJ@czl1pbRwr!?F{ZKzr+qqw$1K$c!eq6B4yX(kxyyJP0p+T2EZD4j$U?i-V3G2elr&;Y~I3i;A#n;;~M21
zG{twgu^a#eT-GPY>%ny)oY}>`>IS(-vn;O~>S*8%qb0q%yjPMmYOg|j)
zOESV2>M-I$NBo?O@Toe?b(H4vPV7JUb(N;1{aRS4#XR8R2(y_;`umo)KQF
z!!9X*RYv$p73R5@22!S1Z7_bp)2B9nxJqg@p4(|f0a6y4J*I~bUmMC*B3JDs?}uwN
zFPVm&@*lv;s7twL|1V&3{}n85j}aN-@VM0zbH5nwJmFTJ*==zd&tf&XI_6O^&Ty*>
z)>E8jwg&Mhgw~mKHNy|P^0_lH?b~&)_TK<=x7E6+2vikIH0u!RhBlM>5f=}sH;R27
z_Bm(;1m~{APsGFT4#Xk5!*lyPLRfj={%W%|fkc%9lZh^mt{3`C52B55A%dOV)`KY9
z8P6IL=5S;3g&`#*E~lzJ_-1K)s=jn?Id^3~fXoT20ooJ2f
zYWr~7u9^xse(^B7eNRI1zp_1v&WnWi5KfN|Z|ul{CuQJdj_CinycdIuvGthA;-;eE
z0+%m#TNWyyTq2&-I!NWhR1xP*(oP#QML4}aJV6MxN`U$gf(7h3m+PP*KNTc
zhvP2W0Yf}LI2Ip^7lz}-!PCO=-uQVsloM+9kw?92;WOU|Zz+ubUb3(HqGuc~*Wo^}
z4x7hCgu+|garyO`@4^ipSeG3NhCeEbwKJGIor+j-NhwnagU-f-qZwkbPr4f0S#wDDop=ED<2+MZ;VybZUA0bU*PhR*d
z<_>Pv*#+8AHOg2hMkrl*i>9f(ifd=b8;vwZ4$b7trSfAEweX;|3Pk9Yh_VKEf#d6k#0dxzzQ>4;XpSG23NL#89Y$P{>Uy2)8T4hdad6x9g_QF
zK)hL_*1ehKIQil{5j^shdH7b4~*VaY(HdGFXl|Mz*%{01Y0#Z;%2OC
zWsz6DD>g368vkcDymB#(a8->Oa$Gisc*qBE41goq+^=%k;FU?_boTiU{Lq9$6A!wJ
zWj6{1Vq?GI;o(~gaZY53zc1l@ma9G&wMs^_|33D`>s`W~40?(fPVf7r4B5lBlC+Mb
zL@pwW#60{A-o}KkoJ4->>EFvX*yf+g-`rcIsAwPMpcZejt!1`z$-cYP+(1YQ#;e-;
z9I_U`Oih)Y1sfrrCWi#c-d3J&6kFTD~wNq<=R
z3KDlpQ-@()H_$c<%)=7bcDnAC`z*hVud)<_Xugu+-@EbCcDBD-jENBdbHZ^2;1Gvd
zLok$s#gQ~2vy#Uc>?Kb48_|_1_5xQ}v#!;Zd;bZEJR4?l7jT5p(V5f!4fPw@GNGI#
z>)ROhuf(f_g&2b~{V;#bzZIHfb>`4^yag0lmmo;6$@TeD6kC$GTt+~=af(^)=79M!
z#95eLCPbVLR*a@n5pxhMDbimDaqAH-;0qrkqgX!bbNzG@&#JR!BHQ
z-NZ9eb!wzAF@~7NM^VBn|Dq3XN*_A#b=1{|L)J*uhh6mHZj=G-KFpb{`*4PE?`2FG
z!>x`2bcMIBFnQh4y(`Jr)K!rzk)v7{Ffp_g+vfpvP+IvzY2_=gWST`gNQz^$CGlza
zi{?NcfiL|wd%t4mcw{lAR=%&f1
zCXL^$iH$iT&a&&YgR?nl2WJo(d!JRd;=}Z0WDZ{|OtSS)=wNugkPE-qCt_W$><%l`
zSx6)$!@gyW9+iCm_>O#v*D3?q#h=Pp(GeRhTQ~YDoU6XOtm~#hV&u6DJi9p8I+5rc
z3AwEDAIs1V0n-!t)~V`4iG7ddGDKK+mbz_6sqNs|<*mCIXuAX5aJkFpXpOjM29w{!
zW)dI1MntG9jSUqE2XX?9)gpG#=R;X$0r-B+}eMD?}Fn6jCHJEEB
z>A{3QgM!YKLvK**WDz^b0E9CFI~(wrLFZmxZwt{DaWlMcTGK*Le@uUM^
z6bnMvN`^Q8A+wwmE59w)CiQVdyku>$TKM4CA3Jy7VWiGe<(au$>rcdbF%;$GD{}pS
zbKpCPo`Kk)Gojv|<}rCC3#u1{;Bsy*E@-@~#F5PoKY-fuf0e
z+G!*&&Etq6qOYit=r4W%!n+H@Erq?t7KeY7m*JH=sJ-^d2EhCcv&PJo1~zS_KeA$T
zlG_pv>u!1s5@3a_U;OOEV@hJqPZQr84*>B@IwW0`HU8PqJI+l=Tm+ZO*(r1<;gv(1lD9RZs|_Ib?4&(
zZWjE3pJdUN9S9BQWG5uyzjeX`D&biYUMP}oD9`jkoYC-VVxIL^!~>`tQ|9uegeX%i
z#|?k@a25o2b8?5eLYAEh@|GS!ueshowcsNx<}f+-R{C?qg*#NVrLYEN_<(bD4G$}P
z>~DQ9$InwCcnj^JfUN4#m$WQ7bO>#-=i)h}75-esw#l%lv42FxEt58@xb!&K
zK3D;wP5aD4NP~H-JI9EO2VDp`xlAJzK9r4CA4?#7sK|(jkw^H@aYjTAxpEdz{;I-<
z3~f>rK9m>i%Yg*Tz(e`Q!d~RJ9%gLhd2>PkPIB>1!D*M%^2#MkX;kPehTvVkt>z^#
z@6kw!#46y800*Y#t`=Z<>A1~Gek~6E?$IFTHWL3xV&&}&95TQ{@zTY>`JKFNvURp~
z)aRUB0n0|mM^0Bt>D;_J4-!c4VDRbi>YT(de{^<^lth~@m1-IK8mC=fZ1mU6fe}6n
z=*kO%uf|U$z>L51Td;_C(s8YkEDGUXz5zvQN9bH7MgV0OKy;HuUvFW&ITR5JBXT5l@;6>qh4<3qLAG}5o$feG9On)}T
zB#TP6(RPqgb)-p#PTsh=Mq;D6@4ihh;HWcA4z+cF$xYN|WfwV^w*0UUY
z>dOz9+rpnPf-lPmev%!$UjEd|pGo0QoY#EdxvC6D$QRyL^2hb{)^poKccMm^-vk*Vc=ll-INRWYKUiiq|%4
zVjjR%?}iG@
zUrJr#_UN@luDOw)s&}C#G`SZ|)>4N+RJFJ2U5E?I${SU!jDI6c1t>EkbwKHQms9qs
zXu+49wAI}b_Mi}wUtf*TMQ9809Sr2nysr?i$(sr5TbQ;S^;HKAlRd`F$u0t!wR
z&C+q_5|_9{<$YMSq}{suX0k0BbUw{P@hI3!EwbA-aQnjsPBoj7Z%}-HYcyH4vV?U$
zLDSK7s}EyP=J*{qfC_@99%Q?OHEiQtNj;}
zvrX>tsa9SJ$`h8UhNGn-A@}Isa4~T)8;3Ziqr{ZBe##Hmcw#HtMU!nO{B?xRO-wx6i)v~mnXKQm*y%s`8;uKNx;mByd&MheS!b}fJc68@7
zT4TP39GGURHU^7uv52<44cX4wfEs@-UY{S3b0I}3=XG+9xB$#J8BaIHs?P_>%~hHC
zx>w%driP^|pAk78kY-YvobmAl1<#AxzXQ+J*%Vw?XVQZ0Tqq`sK9yFQ1<&(=pY3GP
zTk5;hU-h!FK=k)ypD2Ah5SyA;6aG5SSkS2AHd$>CNLoit)k||ORgb7+BH%YhZxzq<
zY`W;#Z?azcIenpH$N6JdxcpVG&mJqWPwLp<1@c0uOk1p)&F>khH<~+&c7P1YqDSSM
zXQkk#l10DLiNz}Ma?{_>Pol-RzOkQKEIX
z(1!FToAM%O^ON#gh&ViLIoO_TQ|zN2BdyyM8#Ea6FIePaeiD7fn>j7ZPHsiQn&+>2
zclKwR;Z<};jcqF5!6_b#&6#@RpB&szy?esbw1d%KC8I*B=xdlG*fsDFEG_p&2kRlvDw+m
zowK_?Ze=I+xFi;E6w4z^Cz+S^2~=&L-P`o!IadcjdBY=5b+-=lRc)NTM`|!zg;7b>
z&lw4(9;>!vrU~=9-a;IKv?4g9wiM6$$n(})O~lC18K%3W)f#+oYj}5-LiW_zhB9y^
z(paG$gewitEzSuIHQlaJB3cXeRKRrh|+z;~bmv!wvD1yG~P<)e=5i9r$
zRV9m#UkB$ZSU+ryoI%Xh3=T#j3tD7o<@mw5<5PdES$%Wv$n^rCjvc8r(rpjr_+UMY
z9D$J&!Xd2wJ&^A%CaAKK_t{ewv=$2ILj}4**q$8HoeYJNm|QL!4Z|zm8zVkGYQmkl
zIOmDhj)X^!txvDfBX;pX{=mKVk){(qJbv*=MvpVLq~{7+}>Ep
zqE5#xl(^}Qm9dPt1rm38W95Arad%7Hg^iV5{H60YOWg3r%D-iI#ZeBYg~rNfh%?u%
zl13Qb)lt=2^L}Zw^1sXzcv008Je@O8W=#$;{-_$4_^YgA=3A|gZv4Os!Ktb?-cD+Z
zyA+sB2i00FPgail(`Q+D+qO#8ab?gvzR{eVCI}kl4gQquC4GHza
z7(B^xGCO(9(b~sAg4{Sk7N^IFt<``Nkc*$tewzxFY377AW7FVF{3;8N8cMCwh{s^p
zaR&ZHE9VP!%pM>p@C<`cJqpUhR=&~t@jbxcOd?Cw@NsNxR{A0N|7n)&ID*RkDJ95~
zV^9YDjzOHJk7B#_rg!J9DK+E)Q6n3=!`=S3YbdhoxyHA
z=9R78ELhdTA^E4exF-lZ;tru3(SlY!XprqYRo@|KnPp1HLRqijr5~4f9${cb;lkYM
zS(hd45D{z7Kr~xk?~_nG_IAxt3?Wm<_!Ptpe*JbSaVsV6tK#&L00YSx%ZSh3EBm^cI?6J1zWN|l4pAZ2i1WkgMa{HZE?Fx>q?iC!vfSk|W
zt5v~blMDmsVP90%l514wC8IqK3FZ8O1c_5)fbYqx6ggM*$nCa7D>o%dxX)Eq36MRy
z&ppR65u2trU|9#na!hf_LSzXH`Gm=02|RMGp!cYeo8?|q4E2QMzOgp<)GdUQMX$a<
zo6SM5#X-O5sK;IqG~Xp7>j*hfoDo$hzvALySP_Ppwi)8{2r2wi<&+>74)SlXziRKC
z$^NMUxf(l^oO2n^4(#<;ZJQ0Vu)%u!Zs0Z3UJ;1#c3Qezr3+w}`Xfs9DkP7NO2E&1
znDQ2HbkHus38&)G)0g`S`C0$pg~yD5J6ZG~T@xXds@917QE=C6$J@P&2aZr(By>vX
zKsxlPD9~2$V7>g7`M05FOOL(WlX4y*ii?S9qUv=)3+1R230|8vSx(h6b)5645>$4{
z#!*4|e@-OGiM*0E$t57mKc8cnwd0^{@Bf75N8j&xY1-NZ34F!*(u*AmGm~kCFci-&-wkkBOWZJEPO8X@gIbq=LSheq{BIk`_HG;WFu5e
z7EPlPB*Hc2yOKrAX;vycL7D<|MJi$(5dr3;A^>Z-LeDiZr?mT5l-tz>s@@Ioreq*G
z=*Vy2C=X)FHt1_Y8fP_w6&_miAqcldMs#%V_)z>#$sL;gd9i%HVt@2HI&FAn+Ba-(
z8Tn3P#ijIfzzK
zy@?Oz*6^04CG|KxO#_4>StvK#s-)gcllvuMNa_;5rs@_>^7`fRaEkTI_|3RaF6AMB
z4F_6#1_v}}jc)GW>w?vxD-gR=16F`Rba5Yl7>LJ`ZT|huf1p{k&%S^y!OndUhHl
z`4sy>=>dd~x5Ky%3+C&`rE_>(+7vF<&fIk-QVc4yC`>7o826pQpUPqCB!A_g
z;Y0+lwGwW%CawS^Pe7nPm&nDWPZr7Mh$b6mzRcn|)(i4oweFT;j=&Aom7_w2K95@?
zR4KfC33H#=Do2Z$w?Z?;nu|#b(Z7dhz=@zJI3e9(@`hO_|DX#q3NB1tYP*j*Nvh5F
zWLnIAD4pt=XpaZAq?Lzv0L
zul1$1NF|I$#rALb*8aY>n*IGd`5t}Is5?-P(BbcL?-nh#0;B$I2asY9k(?U9x%`6xY*R
zdb^y*DieSHR;Tu&*lX+A!D&GKxC&L?+WT9-R7FcytT0_?Nms8*-o@8dw1~}YlzbS!
zXazxbtd#X&&EtT?(R-tHv+Po4yN9c!k-^h6TRNf;ll352ErS
zS#%;cQ+qTLr$-P5F#zhKHFa7SwR+(g$~r8Bd|mpPIGpb1rP9yCAcMlAK|bHBzV+AkgdP<
z$4k<9^e{oQIB_DKMdegn@f}rtJ
z&LB{$+%ta#Mbd&!T8X^?7x}#5zDx#u5*}5&ChxuM_wrV5^oPr_ela3roMepMm=l|m
zRXN2GItBAnPgzZD+%lBYSLPH-S4Bekhfky{+L_%-k
zPGQEGe7+IhTSXaPomNl2JZ#pDB9b&atb>%Hq)cRFJdMagWEFz7%9WYSOJt6U;X?Vx
zDu6xCbp)ctOqIE&Da*;3Wb-Hot(dpZtfeQ|v~$nO^X^g{UXsO3WLB=RsFa}b918L@
zvaV|F{Ie=1m)sIOU08=(D^3WaYdmQfIV5;p|cTdb5wCfHhXi5&xmgZQCsO5uDk<71?I7hwriMt}jz-nHSnjJ;S&rymq;h|>
zQCcG$^*trBipBEah7XRQQj>nFv7*NOiMz&}yPXpxLn1+m}5
zv>azPwDGwx_fnYaP@Xk&qJXrSjjB2=gp*x3H@*ty4-U*!&JJ~XV~#3s)KTRnVVWs#
zpk1DaT{*22k$+Ua+@tcX)A>F?p<3GE$;-&MK}HQ0d?bNk90NY%ARw8_g+wV#YNp_DnN50!F9sKji0OIu0zkUtu8UFMk>KCeNHw
zU!Fvc;KR6hB9z!W)=%PwDDpTh*G_q^x<_n7f!a
z%ry_nb#PBc&}-J`9Q;iKqwX~~)|zVvVCwUVTtbHTVFeALKP+*#L>(u^61LZafnkLOF
z649Fwq5#dp%eLd>AW8gjdCbi|a}#_ln%&8wm!6`usT~3-27izYTIXBXi5YC|EYS}W
z9IM)E+$TqQzv-JHL@%A6vh1VgJj#jh7A$R<|0CUT|3MDBbR7G_zHyRZcsFIX27v?v0C$2NB^YISG4FKoZ@FiV~`Km!!B%o>DU
zW~Vg?B4I{KRw%4{sHBBol`M{p)eb}=jnbPB;bnvaT0%06&!v3I6D_Z+ePj*%9!5Mf
zY$Kj}+lWVWDQBTe$u<8RHsSY0PwJ>O=9V=E4L)j({qo+_8pFf!46!KYv7-*J`4t=|
zqM|HwR?fjGv&_x1LIyC@U?2-c)<_>_Aux5fy+*j{a?Jlxdh$^m-F@pM)&)1-_PRLx
z`%q-e{xph|31k14GZ^gGK1J)_Yl%<_@0FL$32Uh|qZyBHaBEGleu4j)i$|dPf@YpU!)Idfwe0b0;EC
zs~%GiFJ@^npUmSDeq`R=GVhvAA$GBv-HohUICC|_z9VXeC)qPxmon3W51%D6oVgI!
zdZhe49TYf;Tt+>g(uIm@@#7M#ctf{
zk4|Jc;Ivxq*I(0ZsND^PIRmFfO$n`R)GL
zSXXAr``=ZmBh#tFGE-mEsSp%9Z&_yQb2|0BuEr>oq|V>yl+#sxp!(6hxl5-)o9$9@
zz1G#&>vU>qI<+n{bquMh)JgQ|jP4~5)p5^w;mK=Xajl@~$RLEFY
zt2XM?G3nG#QKk}g4(NhZL04KOYZAT%-)V3`I8qyaRA
z?P}yA4S>Oo9;HPs-Ab6Q0e+AMXzK>x(*V4sZI`gM8$gK$s7M2}cLQ*0fXXz$?rs2k
z-csXmK^nm72G9zC8p#e4K*B14%p8ZMx`gU<37y?axJv_!N&^VbpIO2+8UR~4yPJ94
z07hwmi_!pv-2et^0JZc~V~e{19Nwk6$;0z@6|QaoZ)$+iX@Ftf09I;%)6xLKzGwDg
zi3Z@l*{(uZ`AmRY0Z@%o9KR4-hVvu=$3p_N&$>dFFgRU;oC0*3rixp&O(Pn
zl_iIQOps&JAiYu`lQoDiD{>UIQI~21wKEMT@%uxHUkUWG`CM4PY<;s(~{|U`Q4D
z&TQn7ovO!OGqbE)32$ov&i!^TmUjdAhXzQK@I@=S0sOZHNK^4eE!_Za*8qxytH!o<
z1Gq{9j7Ya(Yd3%j4Ui`1iyEp>TUQfCKGn9MVL6(e7>luK}PMXOW=hkKt5Q
zUJs}Bj4q*zw0CqX;eHL!McO;N0nF9_U8G$^1(}m|nFi=0?L0eq6hN5#fi?kPa1Ni(6H4a^*-PH|XlLqJ_?ZdhOJgxz{NPAf~01iRIg`B4vSV5v1O_^JJ
zB!8>{x=6dbTM0E9po_GN`-{wh8m0ldNPAs3fSwwli?mPb2Jq4As*#GKtA^Hh19({j
zbdmNM-2fid09~YgRyTmK2IwN~bGrdtuK~J9dqXz>j|Onlb=Afu($j+}RMVvax=8!t
zZY3mNQ{6l@y|R{c19(RRbdmO@-2k4~0E5ycECV2u>OQ0a6fsw=T;2`f4ge%~2IXGS
z4P+7^`DirY0#f+w8FE)T3ufCbs(u{wV5D&siePrUDCcp%fN!Tf>vG??8693M;bl53oSOBJ4lkAPVjUL#%ZljmatY7XVd1K*
znL6Ae;d&hwUdfuE!&@cn*J0s+tZE(JE#Wd97QV(R(cumW7wfQaD^?#Jc0jBwhYsgS
z_@5l?{H9qbVM~XLCH$5SyCl3-hlfe{B^@r4@CqHSknrO=?3VCS9rjE30UfTBaFY(t
zlJGnoo-5&5I@}=PX*%2_;Ym8YSi%7vULs+)4$qMANF81#;bA(wT*3o&c!h)ub+|>s
zIXc`X;m$Wy-?vKmQypF^;oUmiF5y>oc(;Vxbl8&cKXkZ5!pn8IQ^JqxumjY$mgsPv
zgzwSeLJ2qMaIu7gI_#403>_XO;mJB&CgD0Au8^=-huspc&|$xX&(PsI3A=Q7l7#!~
zaJ_`{ba;k@zkXfyeU^kfba<|WKhWU@3AgKTlZ0Q<;l&be(cvW$eny9vN_d$LFO%>?
zI=o!Mi*n#2c+(7~P^$tFqjM}I3(
zT8E=d?u;v>Y03s#X*_Q8R`d!U*PLBlHt(3~%DeiWQPz}=YYuNk@6adv*Hl-9_Q_L|
z&F(BulcOftg5ul0#2)VK5bRIyqG4R>7d&wvJ<7H2eoaPw99pymkLZ}7jzM#>;6WX8
zy^fhg%oZI}r(^1gd0EF?C^2djyXn<*-HoAIBiCTOB;#-6#oDcvR}BmuUwKt==oob$
zwx3}xrm*<4jKGUa7$w_K=?#Q<))h)AP+ctNVWBJ*cudVNhpC@7WGPkvzkV;ug%}qd
z%h2BAT1#dJTk}oUQuGoNZPDV&YX%yTLX~HW6;?n}q!HnoimNQ+x%upxh!j05`%3-h
ztKxM?UAvvRMQtVsUl20dFa|IH0EUF50DwU3&O$q_FrAi(k9#xv6qxxAINW
zfU(Q|sp^nZ!UhE)r78%73(}O>Wn2@|3V~P#ME)9aF}TSZc7*c6uP}R?td<>eJ+fJu
zjMtdQ<9Vr{!jXR9NP*x;jq_FfN+%MRfg=0AjUw+OeSav%lNj>U1Ih(v?l1WgK|I0x
zb+C}&RvkpKD0h<%*88HTa!(k{@kMVS-`&MBxS+qfdok}-V-4~Rl3OKoFI0rpkLD7V
z$+L0^2cl0(zCiR@1s{Dr=)iJ|}c}U09>XX=*|
zGlQ4~I%fYiRr@Sr=IWUDbj)00ZqhMZRE*mEiM&onuF{dh1rRwwNB%)aF4mD_b>w1+
zRI4?u4|0@D(Q^gHZsw<=4QDS|VWPo}DV?%l`^(ZbxW5j>hTdSZWRtK+9<^o_b>u$6
z`op!U+xt-O6^yhaemaXYwLp8SRJ}mu_Aa?q2&62~V!X*yny4)7xkidmYra5$-UJ9N
zPi#{wwZgMf>|^CAqhiJ8#V-3ay-@Dji0%~2$K_esUwSU9mX-9G00P8%onk`_^Q;sN
zkrYJ2hCsqt1%zki@sg;{s8W+G$4}C=oS-0zfvDxKM;#kTkCUES^anRc8FfJ9w@ZGv
z-O|6L%Xm#eT$YAVhcHT6o`!fD2$gLm&d=;}>PSrq7w%7|pO8+!EF*nRI(GH2km(PO+>GthkpKAZ*>GUfy(s!iO$EVY;
z%t(KlbUoKy^Dsju6Btbvf36_ZM5kwJ77(AOA#PR>YNFE+bAjl>m|6vqIopwv2HFD<
z${Kb>!xWHAXR0X?bukRkU(-NNfb!ctD`ie=Vp`s>1~ztC*R;RWrYN~f@cO31AS=IL
zZBqDc?Nh&-;vao2@!vts+3^)MKmGftd8}wNF4-xwmW{EzMd-=hdaFdkxes^h_LrYe
z@gkG{Mx{lraMItCKD@j_n5%-3VRov@rWP6$JT`T|{>6M{O3>##0m)r?
zHz0B$PIw{%qx0`SUlLVD$mM4XGViyalKgqrE9VMGo8QzL1L{s4l|@_M41ZlXOcUO!
zKvem}j3hMB+tkk*bZVEHQ?KJh$w(7hZ$&>02gT{<1`HCozs~hF^-FB6DQ)vKJ4QF<
z;CKy|4Zkd;eDHE^ls1I!C6=SY_BV)fh3vnx?mCuFZN1$k}BhmY|+}NGTgfkY(sya9y
zXXY6Sm7>v(*I-6jDXy`vY17NpJT=x7>O1e9_^o9ljE*UZj&Wt~f12G_+G{&yKF5m!
z19t>PgE9KC{`#uo=wS_u^7$5q{H1m&o9+lrd6vcM5l%`kp
z4jNu(3x-D79=xv3{(Qfv-lCh=+z$&da3OJ8Z1itIrs~{Va~oFgO!SV8evz=VVf7*7
zg$_Ji5&dkPU$y0@m%r^*p8Y>N8_Gc
zPDzccvkS$NVBfuPd%Ng*e)P9>WWcZjmua@Tk@$zQ<}O$GfGjvSRFZ{%#j@Z@82Rwp
zXZUdM*&X5gBwJ4OxoP}FpR4DusT7G?t!!oE<~Q#tSt_jtYafLiiUN=H0_Kvd^Epsq
zb$xE*F+H7uMZ_#+5aB{^&$Jrf6BfthMI1NxsGI`vc^B_3ONB|iJ=K13#c_Vsyppou
z8R#JolUwEQ8q+n2xFlJ_Cnb~ET5nLxLeDoRKDV}$1dB1z;XF!f@EIuM-Xc9Pb9da7
z9(O$xJFQ{m>2W{w&C%n|StdR1?MILM?Wn|;f5x~MMxT-a)NEQAy~`NA
zcd1?@>5GfU0q5Xxl*{E1TB6f3HLW!B$RDWRf9Jy(oo^tUom
z@dcD}K+RTm=&s7AAv3k<8qra6&x8Aveu~d0N7wCc$
z=V%;QmK7StC3mc#3bmN&RVM^bOs*Eax$swlIk?wRb?ahuj+G8tcSIYOGRY+)Gzc`P3xoC%y`;Jl%jqmEHGJJG+(N)urPDF{L}F
zRUI~36PRR1_;$O{yvnC!C__KCdw#txK6JHRD>6U;?YaH<+Q=v@<4QQux)3Kv;l-skoDYGGdW`ua)N`VxH3K;^ud-$LanWocAC
z6TZm2PXY2&?ty}vYJed$an@4MD9Yta~@YyF`!b(T|(E`gBq*Lf=Fk<
zY$e8qUzF+`p^4!a1^*oK0$D~09e-9O@UX!558X`Fl;E)`qNK1q6TLEtQfkl)yp)MQ
z<92Ct<4Z04D1Dxc^J$*-6}I;<{jq|de#IDtJDtV@tE1xwDx|=kJ<2*7Q+fV2Q4d9=@)lc^6C{Y3`VsfHY=8wF~b
z-_&^w`9c7-(V7EUNWf=3KrQUq-!9>7DdB1<
z;YA76DWM&#%1B{V&a&RQOy*IIlRcSCA}t-=Nqt34cm+@ccY-uDaqK$qgc-!Y6$$V{
za73z25wzCn_}vZCG~Q<(ko2F@8N9mTB1do{3`oC0!u@Qqe#g0&&T{Kn^c5o^QTM7S
zjK#;L8KOFyA^IV82x&yG)&Z+lm#C5qBRDm}6pd3%QQc8Y(QrG1_yLg&SVR;ii~56k
z_(?|reIKoBOj_SSm?9skusB#ZB5x8loxSMD>r#*><4BbEbTec`S
z^uiW}3Oy?Y6m7LtdPD4|Y7UN~x3wY!SS5Uq21*wF6FY87BU5iFKlVUL1Ha}LU(D0P
z+K58|Ma`6zJ}e<;v6);}g66J
z*qad-`WzRFHLY=HXt0>?7&;xzT1gM>^}|u(nhmS2#QLiqNY0bw&q?l?2+L>szuBjs
z-)Qq2>rtrHwTs_2Iq$rEgS}PQ=bc^RI3YTa^A0L;@`v*dhO8XO5+~c|9X0RNarB`F
z@p!hJcM>P@GF}`1TQuBzJ^dVYcI@Gp6B>*MY+rMh54bl)7;hhVZhY20@SusDh%?%m
zK5oR7s+oIUS2O({lQRth`MCA2ZMk;Z&5dnY|
zSad20oII07F-a4&yfByf^Q^2Xx|X3J6D6_Uo335W2%%B&8-Fq-=Ah~xdd>|kvp6K&T>m7
zH}<60W91o~qME*2)ltw-Qp}EI(N?~d$F>9b8KI){ldsDUe7+LG_hBIjuUV0j&V5NK
zp5&{5^gO^i!utfiq>B=6!~lQzh19ksI1+wbY60eS{1VU)LW2Rg#|AhyS#*^RAOu;%
zm<3Zv<~(Mn6ygB~)ly|X1c+XJ0SuI3aHc+^u%Ir(zmML}y5s>El5P+75_!NNo{#8f
z^+5=bvU}>x?GgxJNZb!ad3h*ksIx|Fq)F?tZ2dr4
z(2}N2TW?}hQl_o>)1#9wJECk`7fUPN?{9Z09s=ZBa+dDn`iy|vjb575HLm~3=*>-4
zl~~Q-tP?sjjK?2bxFMWh5I*D#9cSh`8bIlj&D_qrun8&H$6u8AFCwGuMD7gg6ytIc
z3`Gliv-Z=zeYiuk56lpT^3O87xoRT=i=k#h@FxGovFY;Oa@yYHR(Er-wT|vwQRR>RWTBhw<9~T>kX%Ar*B<
zMYSUHSOcbQ4)#{)G#v;2YmMjD8_%_rwkF0ZmHKsR%StQXTW=x^21QP@YSOKASIp!c
z&&ULtB_3eiw!?nq-#FZIL?w%_ZuFir6q)N~G24ZZkLx@j`?=C8H4WKUY#vQ)?M9xIn`}7b@C_fm6MxnCQ83r
z`9cuJnKcuzI^FP~aJtZ4KYmv`tZVBCYeNo{!>+($AK_D*6;S6gaS5?5QT#3T;9__`
zag@W{L9xWVl5Jew?Pi|y#Y-ku`EJS|RCcEjO6Bb+6yB61dYD5O3w=^v2Flkbi)PQK
zG0G3vTsSM$PV3Z*r8mrukiY8!rjrcHrvQ5WN@kcWsv;YS=qfQ&<=d?*tA(wZdy-f}
z>qel*Y?&@@F)x656^uaTR%&|4*IaKIhPP`8N`A#_?6J17M9-o&bP8E2E2BrVd
zAaSR%iV1HWDieHyZ=LKo!ztz}?
zD_UD|siL*^rfJox1=LEuuQT^11jWztdw&0XpXZB@GdHu(nVB=^oH=uby0r_XK80^G
z$H3zyY`ist0#c0*Mpf9D;efO!WfqO96tJ;k*FxU|UxR99*o>-F^qtI1J8VY5_i*oz
ziqvDz*Od*vil4f&p|x)2&mLw}g~X5qBiLEMyq0d%GSd%&&I&69*n@IViqK6gF$Le|
zCQLeln>g-}77>%(3G$)HP1v$fP{A!@A2Oa~VljnrCyEV@#qfE)LSN;p#
z{lVCp$rgFXxMHdhrxphRm_n!|QsF@~k;$=n_YgEO05&E&Kojk81jZ~S058^=`t>ce
z;Rx|1WZ(&kvpq8R5S+1|CIaCMMe!>%62KdvFJ;12G>z~>3{9e?jsx!FOZw4C{6Ux-$XQ%0
z?Z;mUD^X;vooG0*iBr+Uo3L+&Ci*xwaYCfx8xW<3V9fc~$prZpW^)*JU9Qq(VXL=?
zR&_V8i#F(jw!+(;PMP)!nIBUuZm3ZcuxRaP1GJACZ?_tJgwMPP5+2m*NuXp#sC>Gw
zb&}-`(Iy`P0pAzjSjWbT1GLpGZrVkM0C?9L#cs7+ZfuOObQRL<*@ZY5e~49MESiITMfi1xm>sBi5=f5K!bvGWEN=(O
z!|5zqx`6B|Qd`^FNv(v$-e1VT$+FlRX^-k4O~UKkrPA=UTL
zvYhQHwI;vYPr=$4z<$_C8xFaElMMExnon!{9MTRttS$4ogb?bZ&v7Z;A*A!E6VtJ?
z0y(%1@TxGpDq&?@N~6aih>ke{REecZHg0(5M__r&OkxiD!_Y
zyA9~>wLqXX3xg8XD6ISs4b*%*rTJWUh_E)H1jRN@*mk;54{~zJlR)X%rGTn^u+Fej
zfs?;o+iDd&QEIJh7?KHAIc~cxZ;_c~>`~n7b|UfJfFi_WoDvp1gEL-~6oQfrj|xYT
zqhi58a3(ujTCEXb%Ucx1dLR&N0=C>{%oZzNQWO^}O*N_l0(_RtzVVpB*;tMGK8O`505apr+cbdf=PEq^`?ICSjqWN|DO|lL&Mky15*-yM)
z;IQY?vWtO2qGH<_ERhD|_fQY0$2pZCd?B#}QOQPc^ni?H;j)P?pa53_aU3T-@o<`v
zrwTQ06yidumR&b&Bj5?OBoKxnMQBE%+vpPMo@)pyq^eD~0ht(~XEimL8q2W1F1bpo
zOjwM17?q3d1(n5!g-YbE$SXHZ@{a&c2cKYE>Wdq>91OJ$wmvB@fL&MTQb>-S69v*`xX)9N3BPM
z5*ql4c$K8&?8@IHEcyWb(@}XG-X20z#X@1`hxd*&7O*Beb+J5-R)m&EL5M*$p_=|Q
z53Ku9lcA^J*9moo4Ksikqv(wj1<-+3G~nn%8bI~?+s!6J2dm<%fDoDpFrW-l7P6I}
zW&(@Tv46=yj5FcO(vu6y_#}0O$saGhYQ=mdP~jz4CGix&Bl|(
zW)C!*x{7UOA#KgI>hgh{>4qBM!mZr60Idd1D4VqwPl!Wa+=;p~e*?FZjy0fV!=MWD
zHKl72!(mZ1j*rjpiU)%CO;T)qQQp{$Cm}B?EH0esuoj9D72Z{m$yG4=Iz_Ry*hALh
zEaR#LSAsnr7;cd(ioujwp;Lk9^AyEew9Ja@1&bkPCLix*GrvI!-z)j6BtIQZK)MnS
zY6@ClY)MhPoQFxED9%D_!Tc+V!|>DCGFeglBL>FUqEQq>UjZLe6~z$U<0F>mjVBxqMG!OQNFKhy6-c6n}+m<{(!RbGt_xYuUlaNUbRzA7}X?ian@!$FE*_UEhK+
zZgxIKsvG7e9G4O-Q!Td2^fk`O
zF!fiiEo)1Te20!|7IaMe6q16Pf_en_@~7n
zy%*3@peWgkf+~k1ktN1@kVyjN#CU-!y{-*9qCI}wq68oa0KF0!JT0EqgQP;_4jDaC
z-3U5@m9I5FhjR9SwV}cJQ&EZOLUsjGxa}0CDgLu?p6d*}<{U%1i#JNu0W&Po9JAy~
zaS<9Xi$xV_nn;5nti@>-0~j8o#d#>#v>&!IQ_CK=IF~(cgP?g94JZI49#~*u>6mpI
zb%|hbr}^QX=Mg1NAIk_fv{XuWloi3dM=AD9ZL##kP{H$MVV$Cq@$ITHO>JSneL$rY
z$|IW_s5^2h)svN)>!LBi+H0XZdzk>LO2%KCIn`ZG5q%dsRtg#4sCbX!$d2@(L5Bew
zkd)SAC5KkJ5#cj{p2ouZJOs`GqLYeAA;y;Q70xaPO36T|ka$@Opub16Te15eZE_P5Z?O7(
zn2GGb@v!YqNIwMJ5~LXi@EM3tHTVd8Dv@G?wVpHGTlHHJ%Hv_nFdSvWNV>^qB#MRn
zo0T)Gh>5>h!+V3yqIh=}AMO}mqzX2m85j!ygphsS7!c-=8epOjeAMB1&0SG45nW^B
zH;P6}p65YO&e8>;&OA$^vdnN2c@9Upx=e4N3uMPL|Wnzasl#sD3}oHV%-kI=(IT7vUri
zW^=F$3FlvcY@rFzfGQ2FkkJWJ1Xo@sJpVUV9fM=}K-=cd_I+C2?UT+%QfXAot56Gi
zKg^)yL^Mfjnzn_N6k%$mu~(aO@uR|f5ADnq%e3{B#{3LRaEDr@HqWpX(}xqWBNk6Q
z2!XONiZC8^ZEmnAFu?H@-slTTQKJ<6H2aGgl{|-54B|(@4dMsF4Td%3qJKhQfgJQU
zN|2lX6hBrX54MH
z(OFNxN`FL{VU77stPmS-uq?n=T|UPb22e=F^AJ~bH%Br+RMdhme}ER#mDWD!I}IP7Epr^3aEiRp`;2c5nw4f1%G2YlBV#f>T#!e6+W~1F>nq1NVo<3qj2N-
zLCBoK_fwCn1_mf<$>zJlHSpeW3;0fO;|+_DFojoADShyn&HKSM@CvvE)F9M2=(xE7%Ls3YFqCDWn^C0z3KtxKKeo>q$vb(7|bgyDZqi^oj&?8ONO2cnlc(`Ra
z%$)bBBzlHATf%i$yir}!fO=rvP<4=%VXCqC04|zw*ydiZK@@QW5W54MWXFf;{roR@
zq-Mxb{_o_G{sKq;zvGdNPJk0mVIo?X%;SE3Y3(h>FC{>F2Y%@@jg4Q5hk7mv>A){V
zRgqotvRMLcBMFRS`m1a?Zd_c$vW=moTef_)@Y>s8ON_^`#G7HDY#nh+LBb?7S7JLx
z3bY)S1r&&fT{#k(q4mNa1Lq@{YSpF-z&g6J<&_-X
zlrzY|{S;^7P8wK0045I7c1?LeKveQx5-Kcjzyr=|gh!MuFM_89;0~4<*6PYMYZzud
z*%@nKCvbo)%H@czEUFYMyYy)al$fbL&{0Zq0sw&V99SwW4NE6DZHyJHAcC=i0`Ox<
z4fe{3*eh^XWtz>LYOknJ!4-uMRurE^ZALseIY(VF#S1&bIx?qw75g2^gH5wwY_sc%
z!U7Bdy6{x@lMp
z;ZrG<&Q+4$j!?li)TnioK@IEotkb%RNGDCuT~thcC*aU|`&bQjkJ8gVkWqyPC`#(_
z!5OV7ii!jH!q7yhRbb|#YcUQH77xZaenl}+o9R0Gi80H
z$$tZ#BEE4S(%JdOI_&p~;#6>YjBiZBPsTUmGz}l%8)H}q_{OR1BUVv7nnhvPHGyQn
zAv0b$Bi?Z&iwEx*jd+~zq5W#(^0iKQMgx#QcNN8xP%~ll>x2-&27p-tlY(98SGmHt
z3!k74s9+9R_k;VgZ%U}Q{S@Exp$Q_wn3+6h4m9_#EYE8vQTTq|Y1YQv}
zKobSuBAdF*&Zg?$QkTtNO)M%P8E0OAiC#;py)t#_Cr63HSC@Tt)D^xNM>f_S6a$N5
zn(}=_fAu4ZD)ch7`%%OSkq|OSt7$B(yQ#xc0t(E-&^C41tktZ1lI?)`&4?A5U)?Y$
zxLs+_ufp!I=~N|(1KV4Vl~wW%IuU8=PyIC?z<2xP5>w3Q11z54nL0DLs71KEh4F5K
zivc=1#Ybr(tHXT_UbWOc@d;se9&8?jN^MXB@hiv7y-wIKPBd65Ja3V@%WLgC%o9{C
z&e+=x2bmOWs}}q^z-B8L3M-|W>tyUK4xj@AeVtJx>N0;q&Z
zJwjn5qGTtEVN7`jTE&?11QrFRd>qQC97?eJ0*=_Gk!@{4Yw58mJEU>sh79H^IH0Db
zIoZcHZV+Fer71`}{QMa-E%sV`xS$`v?EB&(5Dso3mA|G1s|hJeC6yua`;SU#y^vNN
z=F7WBy6i>Dzk>FfUqz}mW}i^5$na}6*!g`$*&1p(n0?0g$A^HJiviz1$6QR|cGlm(
zOx3Er1j{0E**)R7?8X7Aj^j-73)?trO<{uJH{1FsZiJ`+%U*}I&BO)n5EqEntFbQB
z=Bqfe1$EjcFNmR~ugIg*U+*Y!v72$5Sb}yn2DzYV%XK*h!0OtneucY9e38@5~r
zY0PfIG7!nQai+T#4-&|zS{2c)YSZ!!qB!OnK-ngai7_4Hg6`VnG0G0*F)o}-EgnV)
zI%_$BVzA$rP#HAOgZ4(coFzo7F1e~Geii81E|4j~4`fk00r8MT#z456VRG?79-#B-
zH|^7P6r_T*H<{OC_}LmDOl7)`j(7t1X-O1di8K($n`Yn$n`tfMR4t0=7HWnh%z3m;
zn4=mBfI$HtZ5k4W-Jvle2?}gL;nFFBA>DZiVVL&qq&v18Awplof@O+>bVsxLe&J3Gb}9&W
z>e~r--oDI^_#xS;7bQCy98$0Vu7=<*dsfPo&L&i5J*mUp~??=rrC_Zc70
zdw{mtY$ghi>h(3RqAy576eTZUViQi`OyZ^igJm{Kn5gZd)g)aKiD9CFC9*kaFOk8F
zirat!v}=f{v!OT>jA5diw8lud(i?Sw`J$iZ&iDoqaiEw{ZNuzjTN6-2dzp*Y{4>Hh
z6O*}U&GqzyHTbaDU~kUEh$u6=9MrDObua(dZ8ppUrBXTwO<`0!wGZnURJ$*=j+)M?
zYSfD8dhHrU{CG!@6eXf5OjL|XQ)H78=^!OS=oR!2Jyr4=xPwSc0wK{*xWT-?*f=IB+DFMqQbZ&W
zm1+JO9~v)<`BMsDf}(fCC}%Tnq0$gw5-`D<8XrZ}rx-|zXk3W$wd4><Fz!X2?b7{4
z5Q+lIrgcM{80(Fu{1KC3Ev-M(tj$aU`sQC)QH<#dwPIy^81Fz1`W|&f1^X%UD-F}}
zsGfj>5W8Dk#ju>U<;}n4g2H&~kdX3*wIIRyUaT_#HvUUj}FwosEw)
zx!zLj*7?hg7oGUoP?fqf7`iwV+NAcEJMlpX%0m$5wAIfOIxUzKxJD?81AH3*uCq8l
z^b-c7a_B&`!sfE5r(`^0MaHKf?*(PtP59LwA)NS$DG`xWj1*6m;%ygpK2KT@=nxs&
zrt{e_Amf$*R88`)F2aZQGU#=0%WN!6yc?zt7*BgAwXQE(2ZSC*$#EB8K5H!RfyMY4
zvRd%Q85Ab=LRMqJjSEthB8@3%N|DA?QHorCPm5_6CLZ9UVQu=mHhA0K!sATWNz5cd
z-d_kr=|&}No!J}678n$@mZ&ByLS1=TfWFRjEwcI^pg$%9K@#+*nOv{hp-+lx%h2D|
z6e>a=29$7B;Upr?G9I+S(o=ZrR~wLp_4q{1S`pd;!3yB+E4(R&3t4SRpJry9#8=Tt
znN_+;PdAN3nWWWi8LkRLind(_8J94z7YZwz>`Mn(KE`SO^4iG(6k+LxA5gHFtV96t
zBlIqj$TZp4m@LP)O^KszJW19G1v==ql$29)(_vxcIFw!Cw{J2uZ}AQ!)Esrm4OnYd
zz8~CHzGtNANTg}6gF;)Ri+2^7);iq8L78AEOWTK~^^2tE1NOF4C+J?qR0;YES>WfT
zHTd*N`?Ql3{lK#Ge?pe$WblSt#9xH#Y&ff>`*H>dB992aFqpe&Rg--U-p1#kllmRQ
zXgF5%LSl5wh4-OX3%tZ&%li?fVb;lVW3vA>K*=dWv9;DxVUwrFNw&urFi6Gjg!p)l
zpHPd6q-Ji0(IlGPn3#u!AD}t38Z;n=|0^_wG`@}qPrbqP;+VFeI7=P$h+v-qdVC2=
zNq08}`<3`|k*Fxfi;7}uJZ?Cc^ut`CRrUy}nzS>MNY4TlV2Z>*RZGFl64E11U*E$B;u;I
z3}NbV)F{w3xYe&VRa^Zgqebj_1~Cn`;3*|
zeR$5_K_}ff;X8IY-ef6@W}%~o8LWJ*`lAT#
z3bk@uia65ZKU1p_{1KwANOOXSA3cnRsQ9E%k(Yka^UBv24Tnu
zamoimqQF!^O0FAxn-9n$+@bLku4&+1G@kdMfX}93B?_+M1|0%$FgX0vptCsq4-@1U
zKpcb_y{y7{yux79Kt23DX`s?c^+BWuwNnJok=jw4u3LIVG#!xha#Sfpm49-(ktPDX
zE7Sn-{zI8zANL*YK}NGLA56|5a(a=YFy4+Z6l+x1O{Z`S2fh{Na=;_}XmQ8sJl1wBx|Z4s9CtKsDfLiP#;7T
zdKSU=fKi6tAkMG)8V|doDF~-tDRx}(&g;Z^N$K$PEza~Y@)MJq~7NK4DA3QBimI>RuT;j4(^>wGxMuoa3{#V7!
zX2n1g{Uek?8;R9#9)iFzd?7?5s8)sp7@6f=PPN~FGv#CKT*k%Vicio0vl>laf$xCC}k0+0Wpg>FIY^WHpS
z4TEqq$m=hQv)6Tuc@1X!CLDN0BKOR+mD9vGhv$^7t>4#%dzuhrC9
z@!FZWGrk$10SgnvK}6dEP$F=aQUJ~PV#2Hp!gX9F;dNmr@C1t~Knx}d-v=fw42i3j
z*InDWM`rNB19Pp^Z8I6tf!2K8gjVeIhTa&1$L4|vAzRN9+B4lXpF}GrP7P&7;Bk;-tz_Mx#9)fq=e}GA6Lb{NL9wxsjU!Av17BFVjvs!
z4=Ir5(-Z6@0>GeFbKgSZbF>jp~n;XDqHX
zPSTW5_EMMYU9RpD
z28I8V1|`Gsy*pkvI=NgaVV_!NxToX|G@C2FF3Rx51&ucV37y$cksS*GCp8Zs+=d;R
z)S)oL@SN5Jq-O9KWA%d-2tqP1fRWcF_W>Wl5o3l`9VN2H2PPGz_{&={kb808z??_&
z9O~I&#&6ulWgDn3o4j#X-_;Z3#|RQn+iwU`JCwq(T6)JAG`hp}6?V-=Yt%MajS1&)
zwhCt85C{4fie3hPp%UT(i5>V9nTbv%B&-Fmn}eezNRT30)I@7gw;~XuHXVj_Fh;kL
zfb}uq3aOsa*=QIf#gEDOa7^?-Z8E!H#CTx3Csg70%OWtM;DMzZLk#g+T#2&)pYb+y
zv&0{`^WiNFLZiyU%-NWC*tL!Ov~(-S;T~56yJMvNp?%THfQwM0v
zH@}P~8x?d72|@}=#-BzrAw}uNHCL1B6%i0J+>k-c9=i4tNWE1`y@YlG`WV9OvzQtX
z@fb3PV2eiTN3_Wb%uF^VEBstL=$=U-RAVIR{jyx5$`n>JC_-!uqBH_G)#~c-2B@ZN
zvVTbW7=6b{Jn%^naWkz>P0KR+V_J`}A?~jn)mad9zHBD-HKOCki
zw!SVW6?d5a=%lqmU9?I=)$lP?GfLlQS%3zq!-in{R&3j6nSpev94KrgPE~tVA+Rb{
zt>Rdf9IKM+ZV5_NaXoSe!}ZS}km|zqw)CQeVx6AQLSc1UTv={hll{Z-s;L$tNAp|GB&(fkS5sVAA27&|X*I9hn&|Z|MdL6{*Dz^3j
z*pae#KOPbWHb5sWeH;epB<1KtlL0eNZm*_-dp54YH?DT-OfAsc%{zC`8d+_1GhP`m&Gh1Jy3?Xv)D&N?
z9wlGrq$tinTJe_ilAZ$Df<^^(@Wc@^FF-Vi7eY->;B%d`qGT+U4w?6Hb$Q09Fs_pL
z3yt%iT>U+qARYyskN^g&J1p*anf5lnyHv|3D$T*
zB5Xe*@eDCjlWJ0j$hEj!fKtkJAs*bggNovp@PoSZ0(`Etvn>xj?a8^EiG+0pO+`Yp
zx>OzFfg1~ak}UxW2uA@poIlG)m3rEOkq64X$kIYt!nvY*PKFV-NaPtudHNwwueLm5
zhTF(WdB)j>1w_F)HF2NbcoGl0wAJso@kBu@ic5*dNS<{L%e{0NzL7{(ulUzud_q?F
z%9(qhiY}qj7ItF^oi0)ok3l3_;|!Vv)HKEmC(KWH8r+L?A#zuXTTx1g{4VIMNYnW!
z)7414r(C8HbgJtLV;?MOZWl2iT)9RzzPEVUgd4Yy_o2HBdn2v;&2@;yS`f6>gF2Ov
z*E6z0<5YxK0dVTP-`TPPX0**8BMExOnnXPVa{#pCJxZk5P>9q=7uH7--*e(uYWVVV
zupv?5wjCD;9Xq9kRkY>tp8NoH=^$Il<(l|X^(YVADN&s_cu};$?SPA?B?uEnq78|G
z87@qZ8>z$Z;EMG(?j_nJ4N^GtkXQ5f&IuI!1{<#t5^YmH*7%cQVfvZAM4
zWc+2tRp{i2MYvUc~
zWnIc$Y%rlv^{8;;9T&p`purOz$T==Z9;;NArl^$lOqtA%BlA8z7Y75
zp&6QaW)iSrUM7GdZKa9X8xs>2EPSEnXWTEIKGNL$`N6q~eU0WOAdZVL`<6R``Yu9(WGt`JgBSH*4EHo00ihFr2Yz3A_5?4fAgn%R#sCFi?=Q8c?v_l
z1Qg8c1jX|pqO6vIwweej+UK=5_Dg_N1fe*!F@Q7;qEvtLa@fBYPAtao-fiU+>T@{9
znS*yt!-y|*GxQNYK4RO6gi7`aOf55W;L?mOnFd$W16}9f!qW?0ZJskP1N=@y@v#9Ry1ow8)e%ri32%y+V#71+SN-KhtfjN;;^ray;w7e
z{YS2Gr*PRR}))4_R(
z=8zO`R$&%&7k)U5w)+cT!4vmK9QYj*=0o^S$79Wtn$$|1PIh7EV?QFMCp1=YAn7M8
zWjP=ZkqIxeGQ`8NBghGu_Ga-YIJPZW1ndqFP{0EKZSu?rpVAHnEH@{O3Hag$J$;S4z+fCf<3N6ZH=aXb4D!er
z+Pm;jG@}=P00=sM=lQ>V$am>;kd@Ip*JRHK8*1A_zE#;IMPuqv9X{kMzTPZ0$?7)_
z4Fb#{CrsdshseY!Hrp_8BOoW~Q+(l>-N?r#Z1Er@$lg@JemoWk50^e2o_5}x1@^HP
zTn*rHRM?tIEqU0BzU=(bZtp3^{+=)FY_WHoS3e7K7_#Rg0EzLFc%&J2{z(RkV<$n6
zWo*AlkE0K}B?J3G8IH$@SERWCHWoCa^FiB86|Q0Y>5Ll7EO0Mz<|ZH6I2eF{dDeY_
zTI=9%LuE4X&dl1h`TLDbK7eY$2}t+xBgo(gdv_YE?=}{o(3noW1$e*)o#l#`x%4v(
z0Lcz>EZ~e`>mFh`*(4m6%o%h5hw@lLyc?U8=mad4dXxE;5Qk30&^)JOU}AV7s4*Rv
zj=*q*8LUPY5MW)MEs*46hs+RUF*ycHOTrXLVkzw;ds+$<(~fXVOL$&NOV_=`aw$+u
zOGj|T`#QFPt8_{Oud8GDpm2^SOOfm5`g3WFeCiMeEqzz72
zfx*RoQ=kYZP@V{r(h{*Q!L-JnmIB4J>&3J*r%0zFf?m3U)By)T!oSfevN0DJg>^le
zTlJjlC+M>QkEKY%C*E+(Kndd98RR&3EPnP0g+niiJrfU*NtuI*PwH?uwTcGS&fiKl
z*X^tqTBIX0y_n(JN}QJ9opQm7?GjZs0?$tvh6x+@*%#FNo0+1GIFlVKHeUeL
zNh<0MVW{)6BX9#&?&r!4*m$roLh)40d?(I!8rYlEMU*&||DI`&3eAvdv2>QMk)|SA
zLKH{mp~glzmXJK6JQNh3Ww+@o_O^FKQEM>tppH6QI^i?O&`FF({e~e32*H~fyNFt_
zj4e_@&#p)nPa!lcmH=)SlqazJ{o`x!r?iHyl*SEd45%j0iAabjQzbPbD)}6tii6h3
zX1wSG8$k!DS
zTy9G}e?hw&BT0WNk=)fV7_O6H3XUoooq2_pB(wBaq*dL{_rSh0T&rqbGE`^+2~{f_
zh!{%GchW+8%HkVQzCXaZsex<{H($`Yw6fujEbEUn&b$l4>~qTzS=Bl|78L=T{#uyp
za@d_24i3XkXgUa^6X9+zJqC-83@IKrc;F6FQ-9cYbPhcvWMWEVj>7H=WOK~A35>mp
zxo9eadYWcSA(m;Jz@aSM9HD}dgv71*En~%gU@NwVJrk9CUy-&f!gW$b7sY&%z71Ls
z0lHEh5FuRKOu``AR1JM_nK=RqRhz4U?=c0vgfH<0>>W{t<0ulmG7~9~Q67g}h*#k)
z57^?Y;iThXSI$_iL=f0shXZ3~AyGpz7PaaSU#AWN%^;e6Yg?la%AlArZCDs00hFSc
z35g`>Q*1Idj^u|vXq+Tqy0hV77T(N702&i8MXmxEVB`&(IL_z`y2}iYL_r8I-munDK;};}Ik=8n14rJzv7Z-jE
zYIxvS9aJB7#Q#DdM|-$&!YsN5Wf)7Fd2eA%mAzjB(J$yxSXTn}cxMLOy@TaW1ermz
zlhB`{&0jNo-fl0)4*pk^sR0WaSQvLb{Dp=+Z6fh~-!PH*mw*tB=Pw7GWzcJjG1W9<
zoxq^MX;_5NsjBn6g^wr~Eh)=EjCL#gRPD{*7zWCZ;&hXfZ1e(M5zwunnhTKv@=ykbWj+SdOQuCOAaw*d$v+6ZfuV|y
zFcsm2vlbhiZL;Ns;OcUq98#d<;!s#fWb$b;qJ*c;E&?O00A&L8xr4$&2%!iCC95H)
zCW$rfQ{o)Eo^%p*h{O6)qGx@HLWjVDl;y#7Fd`nJiZhR5TJ{WTV8^8u+E#_Oni01U
z{;5IzGOMT=P2g%mGhh+W4dN8QIVv-fSk)yoU_{TzRMFyG3
zbNdfW$%rgbpy|7{7-gO5({=D*;HXRk#>iYY3qjS;$(M6`2xG%G#AMSc;ASr96!?%Z
zf8Ha~rIxN}nU0&PgKAt_Oj}5Ay`*WyqfFtF3#tw4O^HqcC*MGlgD!trx+1IU_VSY;
zdg+}~D6jpkHf^E9Dh;+&_ec|Xyc}1YmeJMPlD819YQ+5>IXxEBd+%y4A$->eJq
z=t8i)3!`jZ@YLEn&3clFyM&Ax2EU43kAJS#UFyTP35Wrc+SUiyn?)bArn{7Irmbc6
z4rCCVnYek)5H`&HO|x9V&uVBm_NgKFv&EMp9?P*J`ueKypCLmtxCvVp;4(GH__mOb
z5kf-js6b+fKb?)%ROrL(w9zvV?6k+533fghgcRe0OP?0`U|K$=;()dAh*iK1hk3BC
z3Q`-FI0DvCD6M>aug15HS4NOhcnVuO2)B5N%Zzd0GIMCblO}0`U6)$qaR~_il7e7D
zJNvS9ws~tVI)cNXj*QEGN=?CP3lv_(QI|w|tX%gn(!)|f1L0^~R?}#T@x{vT4ncWi
zn|aw=FnnPdXNr*12^(z6qHD~&EJQn?+o`>QfQXXm;Q}T22;J<}TZ;cjhGs#vLe6HK
z@aLnL4hA>#3pk^v6RIni=%6=kGy_mICWy!?0bj#DE!ZOgCxLinP1pqHO&|S0gLife_uYu_ARvwV!UK0Byqg%VGr`a=
z_&=XLP6UQ3^IZl8(6v(dkqu%vRHLaJd-yNkiQy1FQaC`Q6^%S%2ZZrQhEY*0u_k~n
zQLUKEn5S01;DI!dZcq7R^ML+>DA$8%M#BPbL&<>@cX_QF)6qVP6GZW<
zg6a2A2({EOW{trKr+0!eRHSkhgD__hVBZUa5Xyok(8JI{Ao9;DUaCg3tbX38hTB6*
z^%b*0+#l4C{o0XKH%h@I8zrpYLZiwK=|09M?!
zgey{YL2LR^qll_S@B-XYqSkq9*N81y(Z_ODNJq)WFgJsjFp*V<-5$Ls>li1IryGj0
z`XvH(I+{43Dy@D-{Ibj{oR4us;Yjqv_hKhS+wlnCZW3n1os11idPz)T`N#~D)oK_l
z`~-epRJ;=`_ypl$^;`c46yI>XfW3y9147|=qyc4JJZdYv8DoyALLRLbCa+f7<<+!W
zAmNY%)xN?jDA}l#RZR6_H@-;=C2O4FlnHWc5F|k`a6wK{&Og)7oh!
z^{-GQ!_ghP5ksr&gyO9#j7N9j39(3_2YjZ~Vu{3}Id}tWjAbKvzY|Y16@sh57(ycL
zEeMcF?IeM8nLmOj0W_9g!c7FCb;6OhNMYN@B0irdl1kz=4U8+v=t$0(d_r)=@_`jJ
z?6GWmw&4xwp+k0g#&iQ~diP%dEUA6@3TdcAx|V~h1v9a&0=UF}v-J8MgDX7s~hkG-0EPa#1IdR%rILK8Qfem^
z>73+6PnukhKn+b6whF&xVNRBi`@)}R;m#I+q;it4wD{WM(FlZu718}Tcc#A$BZ4I_+hJq9o}~9mn~m(A1&iU;8QyoSkMj$
zEWE!2CST!YdsWQhG2Mgn5tgz*T5gu*m`drY2?7lFoY&oXx&bEh*mg1Mua8^GKi%#}0uPA{tK5B7bYeb+O05?Uh4
z45LiPA!iFx9p-U!_8@GxKnz_#T7WquXa;MI>$8z2*!(8aS@9m74wBL!yX(VnibPnP
z4z&uGLHj^L?o=_`NeIC`CVILFtj1%&+vP6BRs4|g0}bwwPg`rDGXc{IFyd(4OEHEk
z#DpRUx{wxkVOcD*`Oa`OkeEn|+vpl5_iLj!fyK$FxF)no_ehRES25jLm?N~(TNw5k6=NA?7|GECrN}6@1ldq3o8Uj3;6}#
z6{^?gKvGbFgnJS!W9WdfGm0ZRl*RA^sc@gKF*Sq@2my}$5Z?%z0*xX(5UkL^>Uk3x
zGRC_Q3^3Oz_hx)fs?ddat4(I65hA<_1tyU}gY{OB-3N$@!QtbN0HM$Y#st9{maIYq
zWj9bs6)FDK9rJ)ZxUlX42KLDBU^GFeYUCH7=e^Jlmi?G;qJ(ukQoPRP(CKg-HNhIr
z_<)i8aAdiH`OIFMIwpJ|X6f-jmQIv~x)cb(ij+9_VTn(}dKot5W5T|R)ZhSjCTbDh
z{tZ~mG{)$Iqvr52Ylk|C&&w$$T`wDxMS*2wOb8U_{s-vF-{dnFVb;5aY=
zl*P&=-Gp(7FY~AJ)iX#7&N2xcLsZZ{olC9ske%tIFy{~IydhNZMrxyyW&<*~V0vIK
zd@MA5iO?_~{w?7Te3ki&Eo}G|Cc`x$CTsI?;6Nic%HS`B>#FmI}UGjW${`8;j%O$d=jaKs0Uzb?ZY
z7pCslaZS~Tcl9$^On1Z}Ps;#37=W)>s%QsG98D;gi}9PXk`LN
zcokzMLIo3+*uCmiH^}hOb2y+OnLZZPT}vm`U$AU+7fFi`8LGo{V)gA#WgLdVD4qLZ
z)tmh>i^TiH*vcVF?^upJ0Oft*Rb-sp2px)T8X|FTyFKk0-EnUcx2`{owVp%LF7;S;$5u!ze{QC%H
z@QSv-h*?C8;-S#(?oiGSPYL$?zNjJhM`6%@H$~}$<+Rw4hxB>vc_4_15w^|g
zpftpU2=6^4Mk^Q}5)&b$KP09=i0&xH_+1ftgx43CDJUTg80~?cF$;^wO>)67otS=y
zepBZ}kc^Nrw(H*ZP6VxP9hE6YwE!QrE0DpM<@&k9C>l1Q=HXGFcNoRk;TJoMVg)XC
zh+;!-AE50l>kNnZwkVc$tV2{sS$jCd-C1T?Q|wm!90Oefrew?o1AMg4W*
zp^)&Rtj8VV?OBb>l`_LnDBj|b01}`Y;xn;%a^axU#TzQPUsNUDP;oIqba+^}egEPV
z+a9GALZ51DfxQM1GsksQxi&cpd2vwT*Gt7P`e9+bLlF$HaPBM0qr;+TrGGoCgZfQT
z5yF&?noL+DxT6TM#UGvTpfGHQzx$_(h%T(SXKKsNfCMU1!X%fas1O^XY>PT{e-w?MaBL=tEvk^LUhMy-
zxcdsHWCYCH9W6!zW@ASYK*3=`?~ck6HT6^-^})Ctm#4yv%b|Eea}$knn46!D?Raj6
zU~bNrn$WJ>#^t!C6pBlwIjmeo#gkB{9%%RMapNZ_5^V}_^Xn9Tlfws
z-ItR8zU03u`Oiy!uH+|3{z1vNmGi_;*R^4Cj#jpUz@{7aJG
zB>A3!V)^|gf3)PQC4a8uXG#7GlK+w94;v-s?<@HsQn{_n%7aXHRr1eB{$a`AEcqWv
z{)>{oRPvvd{5Z*1Oa3s)_m%vnkz)OqCI6)4?~?p4CI4;7FO~cP$xo2{Fv%Y#`TeDS
zbd`Kp$-gy3tnZTK*GYbrzd-VzmHc^(l{7)pmQu0qo{uRk@l6-Hee?ugHwB(PMe68fq
zmHdU0zeMtjB>zpx|48yHCI5ir+t%Sr5*%&68>RVwQt}T-zE)bl<0L;o^8F=WF8M8w
zig5Z}^3O^BVaczQ{4XW{ZOJc{{H2ngD*1CIe~RRfmi&Q|-&^uKOMa`=-ajPY)~{Vs
zKkFs`%lrCmOLxD&?o+aMAd@vXIWb@Rmgp~hSM={sA2!jMrISw+{nF3G{P(Bp=y$Jk
z-$YY5?X5}p&%*{fe~Es{WRr9CiM(E&o~uvhvvUiTfrIm)Bm#x8#GI$n_4(1MQ_?f^
z_E1}T3Pg&W^sF=~PAsP_ae7v21`5_>r6_atydgI$PdPXRBmzS8PUyzhtz<#ILTD~Be^@Qr6zf`o)F(^&XFUZZ#<`*nnq%TA~
z{m#qLC#NT7EJ)4HNYUpm$VucE%EVF@xS4UYV(j^(bZx;>I<+QtT3mFDI&!9Fa$NMxIro+O
zV6>Evi-}R=INx4AD>zCseOCK+wFS#`=}GhQ(s_MwPGa&R05T7Nm5QM5$zqdp({p$V
z75`!<&y0*g-?bV}t(h4si_vj$y2wekXj#mxnHu^XnU%*UW?;zEv$K?`iGY3z?EA=M
zS=q`wL-InU2;;ViWXM{Sm7SlZWbl)M^N>DPJ5|reiUTu+jY|xMg_ETkvXWWZS=qcY
zaS19&Ov=#1-qS*TMn>r95h)oNtW8r`)k;2FDUPJlHj>JrgY$+egO&Q+-0WOsNWoxn
z{>19}+1a^^Fp=$RAn?)q6H&YLd}MNVmM%M)b@72Ti05+C44L{YKJS4@F~3@$$AAk6
zA%zH|;rCaCa;U=(O)$ft&n=A1;`Oloq!~b*!JUElHX$fLgq%Z9-4<)4JvC&iFW^*$&^I{75h1pq>H#s|3KW}i#h`}lI
zqzOTtp|WG<#A&0aYonty69&^bN5^VtAY@M^W*GG1Cnb|58PYTO^ejrk{$dJs*;#1|
zrsQU4K9!xG62s@(!jWmglH2)+i%xnLxNiZb9mn&zv3xG(0+lC))4;Y1kIc*gZdf3CQQ0X5R@qELRu*AH
zas7-K!6rZIo1AUPNKw+H&CbzhDRZ*(sCTqfg2maO9Gpi&543YidKLpaYj{i{N?Z_|
zo{7HL+C&jCg|UTs3&h#BM9kMVVE8?3*f3>Vof+AQDN1pKl!#&J32rgyBm*1OOH(d4
zJ1sXcb9!Q?4RFy0v^Z6nsn5i&@Q6%i>mR>RuguBS2TPq0*SpjcaZ3A8cVv?jvxf3Y
z$NE#T>>P3`rA&c?Q)t6M`Fwh&UTK>Oqe4QIQHd#fC6?tl2}UDw3T2}a&UTr}**S$$
zge?!%Ymbj!qR-9G1*Q`dwZTNf3%s7jF%erHZ56T=DF(_8JhlXbkfKaZO-o7S6P1~X
zY3a$zEJJ3JK35r-r`Ic!Qn20@G%+^?1!QC>
zrGY?IN?_i?Y(uUxIXg4QfEq?9CugK5FH%m|7w`-tW&_4q2E7s^f$>S!XHclVARVz7
zRKvzhnq{6pI1dQi(_ud6
z=`l|FLM7IXK`&FM=jCK17TPh1t$zf6S`Hzj?d49>V*#U76zN!=v_3^L4ebifDP%i}
z2(s|-aI7+nAj>V{PKt@e#7~yiI{hX*80kop0O#iNdbDBA$ibN-sWIAdgQEy5GJKUt
zneg`jgv4Nn{xUo+wN56RlWkC@WU~PWa>%3wwJ>ps9z;)SYI<@y$N^`&*ieXl+1L(&f~_i>4O?I=
zQJ9(nJ-7iv1;on`G_h-;1&KBUpPovHG>@k>Hc*+C4G_)L=YY3jvr8sR0kWki>8EU*
zLu@|UfmNDBluX+`fgF0&D9yA7!o}q#GuYO>L{8ZHOXJX%2Q9VDvUWQ*Qb`$ta~ZOb
z+3|=pnFw5tA2z-#9YJDNk@NZ=r1P|=0hV(tC)rLbB@v_vdD+-G
zsg?i#tT#K%#Ff}?#i4DE)6u?xQ+PXcD8;meO9)A7DZ&|<(vA!f;n+H}Nbez0
z+gLDshpmE{vOw3KAO^deEU`9NVJ3ANTx{Y1}0f|Ixi44vgoJ@6tpau1;
z%qd(zR0?L4%Hey26s8Q!C2oxX=rR
z&D1BR6gnV(u{5mzb`)C@C??u3^zG{hT!Al?k*RDdw9SAekk}A$m%NZ@A}rNHqTv}$
z*uDVl*`Nf|@`i)@Bnoj7gaI~cu>;D`j@@Ag01O3)j@$*}u7**};&Zbzq}^X8%g)YW
z%MQxq-xrWa{}uBE(rDIM~HWbR*GU_TSmk
z9I?eq6G`MJQ?MJQq7|ZxJn6ihK@^EzAba9}03?z6C&ZKp(!UM=#4&2jmW*aSIM0lX
zLZywaNGg;F`aFy+p)Rq-Z8(^%qr_b7`nEV*pWX~{&mW%H`Ra}t9&h#9m+AiUt!Hz_
ze41?j_>GkJN0*-M{7L1x!v5wT8%s~0_;-+QtH*cCHm-0FuKewletTA@_-^ZWL%HEu
z?c(h>&vUzrm!CY-+j9Bm%o$rw-l{G6!T8jgjBZuy7Nsn`opkSyL;CXQc~0F=XLL$V
zo}jq%w~xHypfXGKjBmQ`GMDgU?&7qMj$d{eKlP;Rw0k8!Cl+n-{PfPcEn_m)Y?}1b
zp`B+VZtwc)myCwdhItn>$9#S|H)q1}|9lm4ddN80`58qsP95x#Sifh>t<8@N+V|~K
zZ@sbOnBvp#D&Mbk4wRR=E!cG0`#^x%YvaF`WsmHlTNwYJeu>o+f>XDy=FTnYwD_#k
zF6D_e!+d|dR#vic@qjH`JAHb2&j+C=D}|2^xfeWkTc&tvoyVL0YdT-t>GSN$;0c-X
ze`ln;;xsS$%B>lT79}NS^lb>ymrR!>-T3p3P8rXA>eQ$Dmi+nI`xJj4IK4q1W8T*F
z_sZR-Wu@EyxzO+Dq{!eiem9r>@Sf+`aYF56_y@C;xLg>#-@NXW{yFlYinpzaeK`jXlogb@fXArC62n
z`V-G)`mCG#>{72|oiD#<^>}UQdYR|O_uZGPUsGH^@v;103vW2}Z>{N+K7V#n^N}=t
z(c-Zgy?*PoC~HZdldX+GKa|Z~dd9cz$)D4*FKoYk``Yf8Chy+X^T(GqBnTh>^wWyh
zj(@nXrXg(RjSJC#rR_YPHhb4+KX=+PaoX5T+A~2u4Z1#_-!whxGB#nUE4TMr!i8BE
z($-yjDQi^W?(``yew%otw=s3s+7sD>$6i`E^NoD(I+v+lJGb|69yoG{+pOKH9s2{l
zzT5Qb+|5C*&we}abL**^QOD}HnBG4&p)~6jqXb;FRc&S_*__@t={jvbmCQ?-9P@i?b^9zk6bwGb@Idy
zFHE|0@~@U}e_ohnJTu^EkKL=D9yvs=aa^c-WwYnn-}=QnddpW)
zE7os$DcJb_vA%2mHKS|IlfHva$NrFWtKe1cTH>rz>qm6IvgPqpTh{b_v)Jf1XHB2y
z{lAdCbM{W~hPRK5{bK(7{40GkGOQOSawp&KJoo5F0mX0p;`hOe$|;*ZPt5;f_h%P{
zzkaK~DFhhL@6~VGf9A{e^XFSWn4%7>Ta=xCFK5a6MWe#6%^jv3?&0p08P>P!&9PH1
zY;vD}U~yc|pCS51x0-Jb+qt!7)JKL_`j$Pm)_wKeT_5z^QBmBv@cb8p!>(-_Z!Nr;
zw_7VbyS&eSZv3$G@ov$D*VQYM%>N3Gt~t;*?yOU}-|G=OJ69NmL7&x}4%_3ie{GoV
z*3L&>{`^hP@87KG<{_(_ryX@lV@kS{bvX87e%$<>JNz=%e*DwKiqd~~e)E->HK)#Z
z`J%4&#GQ+OymsWy-}6o-<@UQ$xajUvy7@
zW$u+_gFTMSP6?}@{_*-(oGyJ(?*B;L=b;Jrb`AUHV!+o|>nCmwHg^3WW0S`V{|V3@
z`rR+K{?REb?k>#NUur6xIC7gdU@YIqqu-ceU0rYNikY*ns*AY%hN-8<{=3ta;FtDqk$qZR+-Fzsa&Ga{
zJLfK3Bjis$eL7=q|L9=91uMpSzZ=|V=+Av+Pjr>7*&aK}xOR2YdeiaPEwA?2f9vPj
zx32Ab{qve@KfZao$-K*b{kn?2+ZLT4_3p51!%F@vTy*#=ZO#wbedf1}7&hhN#j)p8
z*ShatnH(p?4bLivP1ApzrX%{M^Tko-(X|Uj8We%@=dDf7dUXG1fR`Y5u19$Oc3HXe=MHV$&-Vw+GbcwD=uKS53JxX=(nwr?)K9pSv}8#s0@VVy`X<(>8Bh
z?@@XEgRUQj)=ezWy%+Ert^dcLt#QgaS?n@r_Y~)I>pa{oHAgyaT2|p&(j(ffaz?k#
zFRP9#ek%F5&&6xAyi;!X_K1D0LGBs+zUPFg^Sp9C9@2N}q09aH=YKTd(S8a2zw#Q{
z=bgkqd)Hj}kN@9W(t91eF{Z0^O>?);hHUKeiZ0K$``qz8q@b3{{fAsjss*1OBntzL5ynasqythKn
z<@O!?T7FRUj3v2$_2f10{AegRSGFR0>$H_UU|4zG1xdhhUo
z=QfPkmGLk2ip=wO!m`b)?__1Xuw~KHf8;;wJag2-F=02;!|!ALa7m6%oU(L{94WqG@u>!oT9uoI__lvCA;qt7W3nZ-7|#nOLH&8DxZGg&(t{^|Mf}yW3O1|yWD+r-ds)gGnpg5
zUQpC$%*Absn;Wkl+j!~pr+L53`*!@5MWTWhANwE6ck|MJzh2qxUhRqF|Gb~^@76Cm&9V;n?R}@u(uTW}_Pu}G
zuzp^P)-1cZ(pYn2$b{nSU1m@D>vhiK*2h6d{=V}@Me|W>bW_6og444f-_g*y?4L8?
zzdUhv*WZiJn?CsV|Iqd(;8b;QAMhdbltX3}ks+BSML33Ic8Cmx%=1h{l6juzka?am
z&ly8S9ArwF6HSPsQhjSXJoTLS|Np-CdavtS`?}BCzqRiBUcUV$lEcsOU&)_HZ@kdb|i@|RH`XHw>4-*Yefm89th
zhIDj$en=$u<^_y*C$GV~N*<~WX7|McOFezE1t)##PAE4d>k#ssalC7Uw*_}tI^|)!
z=e|Q6pA1g|1J`ad&h(UEo4^G~rE^D5FL47@NqE*bfiU}
z`HjAO<*z-I7-UwY8GMCx^$~OJfeXx!UK(BR{3wzJz1$FH
zVb$Io`GmOa)58WT-Bi<9q0PCAuWs6%Z4sM#u)$T|7l9(WYrgQv2`3b1s=0Gg`SrBz
zXJOCz1szH=y=+|ujqd`5YbUZPzq{;J$UNz~GhbOeU?ofBdW*wp___1F%48aPIPZ_9
z0;w!CeJJPJ2%}7_TV4fAfAv!ZZz9QuLOC~{#LzcbiIbpVp|v(*8vJB8Kcp^g>Ia9W
zzPU0Q8e&nuaQXFhRP2Zpm#}9D<;uHdPra+;(;ua+J2m%m_jooRXQwKj6F74nrJJ?%
z)k*1IBGH$DyT#HI^j-T7>genXJWXuTee@~WIrq+SGAR6P^R%RD<94j4t^3
z+EDNh-mz2k?9Dq)J~&`qZNiOd9_`a`(#V@vA-+@cTAJ}DRMZ0m4c27D0?*XS|JNxN%R>bXV_a9CzL_VuF5EgIL
z|7bM&=32F{hQb8Hhu`+B;-#LA}d?#IePz
zj>4yjx4%x@&=T@RqR{!#F+vAK-3Op;~b_Y+|$(d9Zm8Uzm`KlDV
zx^RAu62Ku@VH>%b7^NC>xIKA1S@)D~7l?X~@y*SQqZ$Gx_P+W2ecJ`Xn{Eiz}y2
zZ2Nnas7%=mQrA*=mK)^uv|Q@<#qs-9n)&8F4Lhl5j;Nz1b4)R{V{t6@@<9FyhMcl^=&b#}cd-b(XyY4wozFqwOjNsHwc0sb7+
z|2+u>Qa(|boWD=3*-Gf}n8De0CwASe@Isb(az4dun2^`2=7NQZ^;4;1GhxJysyx0m
zK9Alu^Ag5&o{T@)L}_kY&GLD>#YHiw12M<=^2J?;*DLRLUNmS1jt37=+o$Q6+m&?v
zH2vlsVe$0C2^<5I8G-%G_v2U0qKU}Ah&!@8QMI8onlMJJoyZmHI)2|b4
zLd#me)1XuLm~Esr`F``G!sA@H%~?0`!-)FHW@>9l*V*}4P5bbn-IsRDJqI#yEs%c##
zvZMLkeoL5->Fbm8G%tCTqQ-r;f4oq&P^&f3yWF5ToZk*}f6*qeqUxH6qQRWSP4
zVSIAuhpf%It%&2yex+3y@}+SzS`LrbP59W!H)SNMH(!(cJk`K=k>JEdly#6&Qa&YP
z3DLD~ILqP`T-Rdxf_T_uCEIB38wK~w)3j=I+L?i=l6w8RD|CGIM48*%y?JGYv
z2ePDh)-@h$Tq1L?8B2dRp>XNJnCC;hSq>VvSG0l4W~5X}=Hfx*4z`lsw%1EmC`L#V
ziGM#JfmLzdCCsYXJf0{QN|Zl#3g@%64MESZT~l2{KMSit276H#M>~SIzg{qTgpczq
zP`>oOZuMGu=v&LYQ%uLkH@c<{S>x)MiOI?QwcOD}U$N<)$Ef+wS4Z2dwu;(yt)v05)+iZrx2BLCA130hv`x$cPa-gHG7}9Q^Qk5*2pB5Fcy>X
zW}<`K=ha3N>Fhjf$oSZctrtr&*01aKPQMIv^zWd3{{rowZ$d>?{P@
z&Ta4J3HCIGGsoYGBg_$@%WnTc5ft5ewsg)*qIN^fZQJX&?V4f-qL1mW-rPBfO)nJw
zx~Z>R4~jQ*R-3s$i6#Oknxaj*h;1#fQ*5S3ET@m0wQWb0B&9f(Iyz<8U
z_}w(pb5ZMAYn!2VevUm&ShUn+d7#;onMbO(vgs-wlsapJ4;=_9h2go}q*&$a(F
zu4G?)STBYCluZ@L8$w)b|5TB^9IZw3?uRdud&$DnnqSK6t0=o-(@A0`cRZ?d7gV&`
zKVMahuJf{(b2j$f5PZA3{p;7yp3Vt$ywA{J&XvFj(eLT;gl#qL+P6!JEiQ=%rm37s
zDs`pC&DjPbV?zHca@+J
z*3j3^6cprwh+wq7-gZOQb1m(<{mcvvFX|MTsP9cqCf-a*81w6vG#FJ*prBS{m1$f;
zgVyNloj%&f8#}riJ;h01XHq7r2;0}2n4?QF1?TL7!34lFJFk!9&WaU#hD=V2$a8#_
zbE1p)`UrEt-+wc;ZHqIwwcBr)N_z?fg5s^XvvW9B`l_0L&Sig_+#b3-vi9?iU#VUD
z{o3n4YqNCurL)A{V;_}xiOlJ^g)jElb|+4_%Sv^B%_!;p&*UaV=fAcU@KIbjIVa-v1?x6
zyHrgF{r$!IPN%W$7}Ze}KPaO~dG$sc#CM+dl*S3XAS@5J3
zd#%K_D;5^SGg&^0ILG;Uuzle@Iy$%O=bY)x>IU11_Qgl!#pr#Gh(IZ+_*!3QG<%E9
zAkCi4DpLH$Pfzbx7IqDNzSnm?X%S!CRTT4RCa$WY#be1+gD1(ZZ7dQqelsVUPShgF;w2$9
zP`JX(k?(wR<*CByJkL|7j8Aei@sn0Cel*FTFQ4yWuxa>0b>Z_9>fN;8@EVRHN;lPb
z8j0&CPW%#Jqiv4yrt@A=6H`MjT#7wECH^!a<}&GCi0Qjc>vZ8|uc!!pDUP6SXOKYhM4iyM-8*48&TDqNH*sf6V~aUA1b=bx-gwGps-}7-
zhw~20>isjU?AtWwZr5}1w9cL8-Zw^aNs_qmx-jpYuk+~T`|>26|6*RHb9!`_i{9H^
zw^RC6uCIjednTS`_RvJ}xYKR9-kQje_YHq_`<_Z8>Ml7EtNHIfYwdMK
zUAB;~kSV{-u@mk748Q
z%S9vJ*%&E7d;h_cemj?~Ih^e@uHbeam_*vu99sYa6z%__-(W-e%Ah
zjxD4ve1{m3^ktAIe>Xb$`&ZQO=QFnrji*~L>b~3$r@qoN8_*(Ly+QX7(v3=Q3p5EzyN($c^k|T6~9^xQ7(lyRFjN8H+|1d>5
z&SBp(fk%cfah;1HrARU})m*zg`OLe|Neg~`+3f>+S-zDcnPSu_559ZlrZqUR-}l6)
z&Jb)fg?0mIuy>Ee#2fal@0
z&u7W%j@O9SC>=Mgjo!5Z8XWQx5
z3-g}BdS;Y58VtJGYlQ;ezf)$Pkg3>nnZMKZ#A=|p@|G)+?DJtKj^xUF&b)AX8mWS&
zAEET2IH7li
ztwc&s@MK(Lc<%1_36Bxmif1LqJMIxK67(7e8Ra;StuoU!?UXj_tJ2I%jP|?@w(<2_
zxHHI4(d(&bH%NXw&#lIKzyxoAd!{)@Rn95x%l`Q9xKEZEzB(^2GEuZ}sx{(Ea}`u?
z^fQx%<@?W8GhVWqy2mqiJ3i6z^o}^k?yBg{_Qn8shrI5z=3bwqSka?5yxFu
z3@;8EENWD#YyX!1x|VjfDl*KC_TmODUV!voyCmlW*7eiu_pjYXL@rD{6gH@S_EEo4
z{MwsQBLxlL>faw2CQQXy?O%o>iJu6qzjcm@3E=ve^&(F5TTQ@<&dtkuf|rY9i33jy
zJFb$4bQ3jPE^>Gzy;y+1AJR>y#LfTki@LJV8ISFVEO<&9PseSR)R~jFcb=+rDOT}~
z{lw|AMOYT@rWi4!&Ry~{%uY#t!?yKV&5i;8%apJ2Thq~)n{$otdx&GMdN-_Jw1y#P
z&|m@}Y+mr4fSFG0naOV^>9$#TgkS=o6Q_H+vffdYhKN|}q^o**;V}2_raK|<;{%KC
z36wL>&sB7kh%vyCeuW-v)BTpWjJ~B9<-S%_oS$8>B%*~@$J8qS^CS5o#y@<(Hv3I7-mh!5KG!($(JrY=bh%xCylo#
z5##x9)-Y!qLjz8Z7-ihy@IX
z&KPC!6*sAiZmi)Kr8^~03YJ_10)Ub0*D9i
zSRm2^L=SKrjG9m!oP)g+L}CD=VQ@k?I0sAoiMRl+20lz|FuX0~7^F72rJ}5B7}_fmM@4R3QC)kUkqY2e(~BLLmMzNS_p(
zg9WfeU@0td0q}7F1ptx(cm>FV3M9G?2)vkyI0>XL3(ma(fpxdUFF^X#;M^7vSV>1*
z4}7RTBLFD_yaVL%!MP3~T9`B_3N@iHI0q~Di9|vC5J>+xIJX4E0dNKIq52L1gar5l
zkS7M`Mu1KMoC(qg%czKa0Z9V<3ZxHS4@Cr4G!lWum&EPBrvMZKNFCsP7{>mZ0K-A~
z0}t)*0
zpx+W_
z9oqjMz)}Fu9oio(ZX)6bxC8i5dx`~w0{GXV{r_A4?*{rI`x^sd0sOhZhXL{fBn|K)
zh==U22Z$cv_(S{O23QQ>(L?)N1Iz_*4e%j*g#l6ocniow_Wy7F|LLLq%>fTP;4cL}
z#2*Am4&Zel59u=i#0>ELL;L#xECKM$q5bUvJ`ZpU@W}v00a69{8<2>AOyh6ARfw(J|G5w6S4aL_@Vu6Ks+~yuLC}m
z-*7-m0B-|%$e#bL{|_G8-vaQQ1^nf}hw3XBkUYTefjne?LqI11&N#IHU4X9uJbP$=
z2Y`72ZUa6!plCp90RI5;kp2H#{~tKCzZu|R1N_Cnhxh{l$pZWq$V2wO0f-6U)IUcYpXy!M=pUoVy;Z`-dWdJGxIh(1T(4lL&~n2J3(=z=~a0GbjnLnbFMp
zUvdWtu^eu8`b(c1*sX10&Vq?P)C&pzsjnl=NA-xQ2T#nVGdHlF`XHU7eYl6*VAHnM
zzw&n!4=t#51>0DlOJTPIHI7}9%1v7)W!2)6V
zuwK|Q47@Yyz)^x%TK|2(fh=@T9Ujy`6Lg$7JZNB;gA%3$^g!>N#XZae)E>!!Bm;~U
zCI~~oOkn=7ZWs=ZKMoEq&M_Q390D9795NhA9C{oU909N^-x$ikQQAjo9;F4PLJJN$
zaL|K;0UV6rU;+m-I8K5?8XS;)9KcR<8?bws<)2l|&^l#HOHe@;2b*F+I$%BYpB+S)
zU6@evu!COk4;R>r>khWPJN%J@mZtwff8rev?d50zT8+8Q!Cq>x`4NUL#Jo?S>(HmS
z#^CBhoO9^M9s0Kqed|MC^*8R)F4q1VFQCvY
z?=IU==1mj9cmxC<_-QeR=Ewm24uhpPZ<_mSFi~a`!OCF#$?7aDA6O6gUmx=4ah^HA
za~RwjOD=!55)Dr)A%clA!H>nAz&)t9u7h6~G9C*jVBth8oP>pQuy8IGMq}YoEIfvV
z$FcAt7GA=_%UE~?3%|j_t5|pq3$J70cUbs67XE;RKVjhwEWC+@x3KVMEWC|{cd+mm
zEc_J{)L5MH2-d2xu=ICS&5mzX9{4iama=g{j!$BZeJLI
zSJ{B}$wy_H^VPUw<~SODe-PG%nv_e;)A>sL5J9FGwo5xiPK%@|P+Kt)a#%)&v=eHR
ze?%{An%_*Le1WUpapooA=Z3z;(c$kTTx+B7_|OKSt#w_)9)6K?9Cy(0s8Z11cstj;
zEyff$|H94@)_1ARf!hDTF^{2k|H@cTvO0QVY!CxgHLT{o&oDerA63yz`TEixef
z7}X+2mD&x$X3&3Wfcy-D>l3oIdkQ@u{xT*$r-{PlqdXd(pbTSqI8dRsfYyb%{Z+qR
zusEW>@j~HgioY?;)0g^h{O0VI#o`@G#nuO7E>C37FN-|x5eO%WR!uW3-K7_(E{&^w
zABnz-hBxhi{7eyiU@zK;Ss%S2m(=9`W0)_Ok!RuD$!nrR$i~ZD`e8KU&rHgAPuOWv
zE7WKHNHf~wPV<@TRpK5;!^_=(e6OTm%@WVFxh@)B>Imt>&yyDa{9|*Oi83e*jBk=U
zdwy6rhrI2X?#mJQeRojasZ>*G<%<$4tt0TxyMW&Y`3Y9lIZ{OTr#+c)w!u7Y(eT0x
zkbbmjwLq@t&m@55FgOQ+qY$Dw25)PG8K>Sssb
z9Sg}t!vmB-`Zr_)5k)#yE^}yjAQJ{#VDTr;Ua(nX9a8OR$zC4t=JviSCbl=eKMF5u
z!K8=h)BfH_a17qakHOL%DXIqXpuFQipYqy8={ND!%Yh^CVroqJVWD`pPH%wr5eE3@
zD!#$q$L&Z{f%bNY7x*-;`hoTqUJlCJlghiK%~|U%|0f<{+~Yp#JPL2R4%!pG`LDP_
z`^QoeWAKhu@QyS5XSzQ{oO8G3gqYM1CwTvSh|3`F?dKH
zz%lRcPdBWbkNpeh3=E9~!)ZbQBI8a)iJWVFdOR-Ju`GK;VhR-w!IEjd=^1r12WgH%J8R)aVIB^yWb6{akEL>jq
zo|)1oU8#UB;Ti4Y!9Xso_~imt0IRUgx0Q*QvES`cRvdDw1UN;
zV{9~;vIS4d<0-Fx2_doIyTcNrjH=GENPRfKU(JC3HA#buU8(H)s8LXe-=*wFUCrzt
z4deq?CoYkp)*jPsly+@yixJd
zf=aMeE)wbe%I}^{HY#QQp2v(fSJxKsCqvnVI
z`TZ7=Q{DJ;z3HOXaxje=h1WiZ#)Bw0OIPN{79!Ap*&ur}xPFM?j4GELg-2Cm?En7Z
z+fhC+-bLL$!1X(=Mzop^pnX(g;?0XB+~1P}dGJ{ZNWR{;yU2nU#K&Ud-SxCDnYtJu
zn*z_}hN3q7LmsKnlAl~qOBbJUO;PA+rXtBU$Vx`T^9w+Eh&V(kOA?dB!1&UR1m&?`
z`a)^hB7LS|6kb;ku;b6SN$Ixwk~pLAXc0_%SlN+W{0y#dX|$O1g>kq2=9|Xg`Qn)I
zZmo^xrCRbhs(R;iLklUL%6C;$W6y#bg@~BT$_#1roHiq;B%^dv)E?*~|0v(?Dd
z@sv{TNLP=MNvgb8E)J-#GC<~>K&O5ZaWLam-}k7TWRNd1yZA_lz(05|(o$BvyH+Xn
zsk>*;R8W%`g7x@#O`LLDDd%8UpIEy5k6$WxMF^w|J1WSkbzBl;)6==+I&`HAnDo3o
zrrv%-N^3O@O46%#sGC1mZJMm$nIUsZK6UTJz;dD7g1+a
zBXzA;Q7Y+Gc)E(7dT*CQf`&lU@?#jbw
ztG}BWbm0YjlP8tReBP==E>~NTiI<2cWH
zmkS~;hNuYMjQT1b@oFZhiBGoJ|2kZtSl`T3y_@kZuAzF99A8b9W?ECV;MK{kT3yA0
z;gYxg@9uKq?L=jQsc;`OvTAg?6^6yNzZ>ah=`jO0}65O+w>ijQAh7b{(?w6w0c&$LabvEtjgM
z3>eN>fdzH;nTtBcgSrBb+6|taLDuuO>&%k8p>^I;cjYbEKxK0@QUp1*qVB4Tp?h0X
z6=F{e=5g00G`LJmt1vi>D|V#HoM9oNkUg1^M&EG%hI|lnxsUtG>wOhG=OX|vE=21EF-Hj`+
z&`Yh|^OPd89k0FfNY8jbpe(o
zvThb!y>+KlG22chG>@`9R$KA5&R{Lfdv>Mam3qRI9bZIuk_bJ;hqN>nIkgZu%hW`gJb&bh(5GjDt;+tW
z!uMl89gG$xS~^SnRi$fds~j31Dp~s+AKu1M8d45XoOq#vw-IZojhp{_P6b)MX7a>5
zIx%DP;xUG2&8&!`OAZRWw5jLHx4mCvqE*rJH5r%g){x&B%S=>G6-Ew)Fd<(AjVjpTY!ysbgJ>xC-@?^hluG-p$j;f9%9dsQgO6xtA?
zP`~l$WUo+F73tnf#gSEYR5OgiW?FqYs66>nR_1AHR5Pbyt%|*iyVKc*_OD)_RGZGe
z5?R6hiA=lgr;Ca*m1{F|!DI8QA#UJ88QAOvsmB;^oow+j7&yf^cTX^~P${w5Zc+8S
zl5Bdp`b}YL6IA#E!4YKY10{|rrMs*W_^A73KYN}kAa(R8Z0bzb#bp{p#T+}Qg)R-f
zo=mbmy!580uY_VnqRmPCVDhz${^QS=T;3p4;%+?8)-9_ej-7eT
zZnA*7FB4!+OT=3W`+2c$;=u)E(TkVDKPh>e07G-9?(8fBoRhGq;J^qdoWy{DY^XVEN7a6QhQ*Kk2clNT|GkzvIS>j0@NJk_gIyJB!WnmS~kM%q>E(o4P~b>?hfL;zp<#+9v2rV|Jd(qnfc8&yv+vn7&VSt43;9Q
zslxWVbq}ZH3$w2)t>er3c0W3HW{WXEI!b`+QZ_si0?E#}(bN#SvjO=8W5Wiy{M!?&<%Ubp+L$*!xVvEycschu8a@po^
zUWJz7)ri3iSQ@#bZQ+8XT;w!EnA@G+ARoCoT$KJ#{Gi%yRRc>*XvtFE~z#Q
z(dIrnk6_kRGf44>)K&Lu(oy3pXfSdwkYGB&p@L|pW*mMdd1YGOptF3)ph=-kJ#;X%
zZPav(b~*Mc1qm|ql;UT%7tZ-FwQ>?{xLKt;IW<-H`IK#rVg4qyns+~n=fL8p#K`@lj|?jwd`_Ekr+AIQL;1k`6A_U<
z50*R!7M917SHQyH+oT7s3*#Z5UFM$+;Rp4JchG*)2eI%F7JiO}hq3So7A6{akm6wg
z{vU@x^Jmq%b8%itTtj2udV_0H~p&U)JVC{m)m3T6f%JG8AG~C>KRFG$KaJp
z7>r)f%sTD};&Cuo;M0E8qf2Ob^C&Eby_~nL`Gg=NUlAH!)Cc-E!9)2jqxQgaP9VL<
znDn~lG|Er@g;{)}d>{Cr;qg+Kc&P!hqWuLlJXHfDZ?wu5(IW`vN0~AD7o|Q0&Vc#o
z79k8?!otp*o()UBfAqnuV6>H?083-^Zw=OU+lKwYpRw>VmOkG*QCdb|J|q>I?+4}a
z+U)QNDpQebceY-k3Vhd2uQHr8UN;JFsfX%sqB`roYnRgZQFzWnO#M^`$KOjB?348?
zP2luK-O+|c-wr0U2K7Y=;=%kpSQu>oHy#$P9q{`E=3j1N=2JIkdKnGqM&ZdFVE!fb
z9cmu6Iqdoi^ru1~Z+&rOzru_15jU8>BR{|sGBgZmZ7(9Fc7j?_(hm7nj<^T5jufiM
zYM9K2uDh#l6dyONIM;~e>Jx8or}0=DjJZFRz^xNitCS>|k+LzA3j2QP!^6EkL=dZ3
z$5<@Vt@`ZS=TCD%43sH)V^arF+@a|*8$qpJ(Q*)bsNHr2~c89r~avaYcnNnEv>p+bb3-KRVE$>@6S
z*~%wLLZ1-+7A|xuXlC6u&JWY#QZmvtB+4T*>@T#wXcl!v)`R)tkC^${5U!rgm{())
z8Z;;`c>a?RIJ+h22IkZ51N(sINo6Q5YsJ6j*MhrfY)Cbq%OySJnlH5f#={cnIvBD$
z3{R*8?V(r|eaay~mj3N0nf7bBOD4T{evMaW`)0=z%9itG&>IVqC3;16x6`)r%lSQy
z(C;Nfil?ud6gJmWBjO9q``e$12nffSI-4zXDHmFiH~A5)PEDheCW@o)$k){m8@5)E
zh8?S}gsng7myh8_XD4bc5-p#e=9MTZRm&GpV1FQTBex?_=l8;SMYV=LY5%*7&s&6E
zS1J}tup`Qs8;Z{g))HD+lKy(O(4v%ko5!F&I-&68PwR+Rr&&~^-Xkn!8;Zbs|7yIr
zq;|QKMVXX*s;f&5#gdJ|r=8?;GZksFz6VE2b-3O0`XQlIG~m0a&aQ`Oo5*BdPm7l;
zIxbi?GAZ4`%|;EL{Ek+i50)5o9l6?7PEBk
zMiDtKhs)RGyT_L(h36CGR`FyMM`F!yhwR>%^Hu=!l@EY@zpOD2&OUDDw1VJyZz|C
z{2@=JfhuY?KD2Q(+iz&Zkb6xgdtXLH_;Ki}+uFwKR2ysc4vtHW!j&CVQ#c&2vheq1
zavq%f$=W!v>xQMz0H4rJxzc}zSk{Bm0y4J{m#2{_FGe%C#`C^)FLUV
zmV|H%A9J<;&I!=Al;T(T0c
z@#UHfnIcDq3o9sDy7(WA@3^u;RN~gNvLlQ(9;OXkc|4l@P9;vD?fz2QWyY83qpF2x
zg_P@>MDBdLv23Sb&nmnLhgj6w8`<0)bMGI!MdsNE1+{=K~^MSNXDA*Ylw
z*xWBEOuWnM_xUq$&u1}mqRA*&b
zRInP~76uYK|P$uF|FRD*EJvj|+W
z;0{%9QWkBQRpQ9{6)DQXxcGdT&r|T~O9!4hBDSCn{PNcT=k)^1&;V?qu9c=AJPc
z@lfv6e>a|$*W$~gG~gZ68zCDyYwdPh?zeOC^suh7)Op!N4S~;cfxD<3#HfTVcs|!&
z2DN|g*=oEmswv=kW26+Y7kurFF70gnl(UgyjHJ`xbpv|bXRln&?7N?ki*4$&!>?GH
z>ZpPfg_3HkX1QudeO1D-lP6y=`5kMlo}g}Vv5}VQwdm)_s1J-cmC};pCwwHVoj6?^
z#@MbTt0ZOKmaZo0hSk2XpI2-S=9)8JHj#_hyJ%x$b4g>C@j#<8EPQna^}GNX#Jnev@QWGvyL)KjS{8G-IWrD#0i0hGH*LvXx8X*K5=4}lD7
z5>hR7_^PLBenp%h`V1G7O|Hl3q;D0{u}_Hg*FqI2{aU9hd0=RG>~T>4;Cb>B*?Gzi
z@cc1S1oQkNNVR>;RqCp85#F7_?X>f42&!4P;8XtcF{qMLCyz(z=ECw-qccX~k!K+L
z(W5VVoc<)X2=33lFnHn7%D%OVpGr)oJBQ$Pw=my_#g|1h$Ye^Eckd$I3&LEdmm#LNlL{)p(J}8&n1N<=IaugPn%TR9K;j1$%Yf%T}arqCQm9CjlW2e
z`0?w?MDk}(Cte2i<|-k6Nms@OgO?xi^V8@QB4+#EzPx{`CRP7uzg>2iV(77?HHxS?
zDI|j90je|^k)17GbAI9JHJkevC~$9g;8V;uGwiW!1k`ug}r^sYx-GuzFbwP50^j7Cz=im-cTJ(SW+{2*S
zDI9oa>H@#!9M!hVN7*<;kmtIVUBCrWy<&0RPr?&2ZFg>p2u|DI4um_e*R6g=1zY=z
z()=3e=&wl*J6+GLT4+kHBcl=M^w9gsuWuK;(FY?Q1
zZ}}Jyw#2-Uk}khLIp9qhRPlk*d4aCIOD?qvG)+>Q^Vv$SNfxW+j{tDKE-5s4n
z)J>Ym+`k*vPoZ*B54{qVbM||=T!YU*O)uL*i}B#gN4g$W3PJK?l8hq(RU_94gHN-)
zlTM)=v%91BRcwzVti*{vRH$6CNR26y}Zs(sJojXvMJA
z0*(aEj2~4$#wKaEqU57Qj_IU%=8gHM9d|5|0q;NAi`ZGp1^@`<9!ja3fF5i{?H>Do`{9M@FIVWV+5bBJK>oU@aqAD&dp&R%Xd!z5}$u+*F@T^mFSP^#nw7I{Up^$yH#JEQ!Kjyn*5u8tqQ9eQB
z6b03WKyb}^vBt(<>m6WN>z8s=M7LWkhT)~H;QkG)w=g~*jaiR31?CsPdORE~ervkZ
z!ln4H)tVh&E~~w|AE9;ULK640+bV%oYrouysZk@_CDp>RY_D!+ydhwsH1=R8tvl}Km8Gl`Ep1L;r^|MUbtx&Gq+A;HfZ4L=nH#<}v
z<+VwwSfw$zPe9CD;ck^$nW&lEo;th%=O%A93U9TDJciCng*5JwJ*t_MV>C)(>OW)*
zg_6W6!YN_lp%RG7cEv9Z@VbZi5$}I|>X4E1QvKbVPy
z9rwnh6&j5A;G+uO=X1~`Ul-pD$~Q1B-pG#R7P-hSw`y&F6_xb*{I4;qjh2+xXAzed
zjv->57xeG?iD&k@35SJuj>0=Gfc1?vjK>y#JWbDAm{hL+v_yp?SxUEH^4`@oeZd
zDe`@!z`&;3gZX9Uc&#$KfOW6yBx0$fUL;S{a)dvIo#)S0S#;S}GKk6q&&Rck0`6%_
zm{!~BkJZ!2WV^3ZW{~)64O9?1;|h(!%f`U}`EE`MDjEE$3l{|B0zXdRlv(<%9^-ve
z@%#;oO2F^8Oz@7XlW|_0leVfO@aRy`{*1qL4iHo6-|?uG$!?YVgrxaA;6Q23(6Bus
z7fu0p8q6Br!ll4D@u+PR5w`p>--Xp8D45rjK*!oxE_$(qfwi7l#kfN!>o{0{kPfZa
zA&DC%XFR{j+MyvER_$iBpRysbNw($LaK%!ofu*SrHEuL{Ou9S7PHkJgxE`OtQk>8F
zOixG1XMqLKpNvPDrwq98T0Q3yB;abfg)$e;X)P?DWUZUI_l85RY(t=gpmIF*ZG$9d
zb(pMT=f={Lld@%Rx~)Zci$xn{E7-?3`Dcv1nTAvoOf7;{Lv%+y6Rw_VERSMMDEn|h
z)?e7~-k3xK8a0;?#+Qtu~+sbr}+u16(beF;rVKvNjdyf8F(
z40cn%O!%E)CJy}04&WSoaYr6}zyo~11$tXO_&yd`Gi?TQ0`KXz00$q;9A*y=7);va
z*uhlX!lAEy9RvcWb<0uB$x-{3g|3CUw)HKftBuyd2X&N99FZ2LVCgo(*;UQNOZk?g
z@-2JBEh{YxXOx+{x|4^Tg&8ET2#(7jRR{3v296_V4pN4W^Dq{e+Tp?SPly)`bAa!R
zfrI}boc|yUlGgxXkV72$(77BajTs;Z@C7rAf8Ih4zTW}L>hT}KQGnYW#2s;K0Ul=%
zf~^4oy9K5+Zh@S;L&?CD!MQ!i$&rJwBak|B2%yCkW^+)dP?#E6H|`43!mQc{-xC15
zrXb|jLHOT#On{z0p_uX<Z8
zdQ3oxZh@SbgR?*RKpy1&Pnb5yImG3L#itAMVSZ5Zf5&J8Zi~OT)j%CP{w1pg;{T1I
zd_ehwY7GX%qzB#;f1n@2kUpp!kRB-CkUVHtikBT6E<-IEq9T;ey7SK0)j^sg${TBv}*a?aT?HPOt2*U`P_boS&Sc#q&tJ^;a)R%?1qs3nVpvN8{(0LEIKtIO#p!WhjkUS&<
z1AH*R1NHNRoT|#+)K*fHhXG&xnwtDoC0R@X00v*MlLz0c(?8;q)z;UL$Hc-Qep!?%
z`1l(L0p|!+u)Pj)Cnhob1uzKg%OS&jzmVuKd_@g2?kJ96!uKXH{72!c<(Tj_tZ@2g
znDA+=aO-|dI2tRQy$=(PgL@1QpMa2vn1qy!oZ>hooQnDc4J{o#10xgj$y29USkJJr
zpXK1>;y%Z7o|lhbKu}0nZkHib~2Vs%q*e4b7`s+Shb+
z_4KdbFfcT_X>4L@W^Q3=Wo=_?XYb(Xm
z2V?i&x}bO)kfsfYw+Fx8hkVfJKyom8VDv+L<{;keARZc#pfp)PYS6gJ0%9D&uNTN8
zlqwYG3=T*mM#2WfI08$tfEt2IiP7o+LZK@G%K@hy$Ui3U?qHOJN)KIC-GD~>10E-s
zB?yD`LD$%$l0oC|pL*gws3!-IHgwH^##9y<7r5e?0ptqf2FMcVa|9SFGn6MN1Zojb
zeL^MrpEc!pkVmK`VXkpd`7kZ$kEGl|o}jA}lrO%2+SQT0h5oU(BanqkYytS7+Oh&=
zWdSLGdkml-Dz)XIc6;DsN(+TTdNDB$fb%H#e_H6j+n3-!>HX*S1!?=Y-Jvv&>RlGJ
zQ*D4D`#?6p#9(R~sxv5`eE+|+kE8M&)tkUS!BAPfw86e@^i~+w>?s(f>nw
zP|t$;?EfXb3;(3&4B9BB4R`=uu0SVrS7i@s?SJ;9qrUm4#*XqY@{hj%eSO^oIfw2A
zZ-aE*K_7-{=mHQ~rI{~Xg_QH&WQ2%y17%iYt(*lf<(5Pi`
zpy8+;9r>8@2x7G-C|8h1XneqoInek8wVnUBYr&uPBnbP{o}m2xPkj!c)}c`lstM}@
zdzc)IZJ05_?!TsUl;8hL`}ucD&^;_v=YQAspZfmOPa&&7qlP;eccJmt8puFb3+SK0
zQJrH(aHyvKt?fvkz+d+J-?tY?JEo07?FPEuLs$NPr+}#sfxqnczt<1_^ML-l9BFpL
z(*4Jt0NyPm(fT2cLkvG;-+YaUKU!~-bBL)AHDYudk;MM{hm4VKF`dV
zGiT47(~RE!D_3tc`gWu1i9K$&8~r+SzR~E%jUG1o38VXsK1=s~nGeXAuvoXz6}U+|
zxuaeoZ%m&}%a!30JF_gsm)wbpmAJV`Hgii!Vxs=1)HK>!V!vAZlsSi7joi7Z#CNT>
zfMxu>Narj*m14rV^VideJ(+v=*w`*0KKB?)3^HPb=!0^Mt!W5_puOe4Bkxrw^j5t+
z-o)bK;$CumEQh$nqi?Ez9-Ba%my0gP{
zw|Tn8_gI{sKpv!zwjwFW|MtU&#diHSmg|Q5ud93aT1j+=0J~E_S6r=5{@2yVO7S}De|z&&{*&9a>R)XE>_K7wIj4Bo78G3NbpG?OnZ>F6
zv*AxJ{a@?yQ9VFXlHECMilTLLFFSDjHAlSvQz>h#AFqnlggab~I*+E6t8Q3*<4vDm
z^M#vlS^LE=eR%X$$tG9ja_OIV@=f-c`b$9bOH{G-O-YwtSdf)vIJoxR09&Y*0
zBad!-?D6kz|K1bdfAR-Ee5&!v*Ux_b%W2
zz5h3_yxRWT*M4{4_piTk@DFeP_n|)?eyii{KmGa0{~Z0xv3K5m@A&&4oKS!7f>zH9
zR;RR}_3tkKzdQZEyZ%3FL9yradO`8uU4Fim%h!-6u(MJmD2wq%zm~m_0<}w&iq!FC
z=l~xD9mCF*3oEZSufePO+J;EgglpNPs8V+(SFc=LSG`C(Xx$fg-WPK|RJ*kDTBX7b
z!MaH81cJFShwCQDceC9{i8;*PNmj36a$OT#HsQKpWr)ud>vXI594*lm_#COudP99B-!P>f$Xop~HieQ%IqH`M
z*M;(iSV~PMXY*
zsn@vp_^fq(WoT(-pw@lVV&ma++1kzO7z>6%N_?m|+^~?Z&bkrV^NaWnd8HKJB+%&|
zs$H_M+9nACDB;cES{;aBxiyjfj76I;VKZf!vM>M%7PtPf6DSXX%+pA4qZ9DK)h
z86VlLteH@=99~be>Z8+rVePe)+cys4pR0p>C${sn)k6)9
zJayum_0}$pKkB6_66Bl0L2Z@M@RdB6s*@})t_#*Is){CO^^@W={-|)if7}^vVK5|x
zb?sepD)uag$uYWCJ9Seb_I$Lz%g2jj`nx^v_Uq1mY~7`A%3<|uuvTeot5??ryPU^*
zs~0W|x{VYQPHa^U8;%0$t<0=WMRhrE`mGy?Gl2$nSep>72HnraC?a*#Q-s~kOjAq$
z7kAn8trFvLbLWl=uKSX3sCsd={X($yFXK(jaAV^aM5(?*hItc0rdD3EvI$g`nJ0+Q9S!{?X)gVZ;aEcqI$meUt#pwr3G5|N6*Vs
zEuIA$eSS%a)=Q%LOs(e`y|84C*2hNAyZ*g#=Z|%``JG?lhHp1|VM)0T-)3~1pY2As
z<=Yz7-TdDk)!lpcHBsHoe>keU<*SJ5ZuDa`PY6XKTGBs=M{m
z9M#?WUK7<_`)G{nZhcimb+`TfQT<9C|J0~{h1SO!ea;nbd9sXNRP5GYhV?(&El*lh
zcjNcQ=}AVve2$x6W%L;Zrace2^-);jmbb&|^WFM8Xmsm;U!2}%^x36ud$bz8dNT0K5hM5JtbvWN+0jJi781F5)%?ryxyb{NtYJ-RX!re?CAVs
zc9;3bjG+BZ*Id10=Nu;BT%8o|m)8gDE?syHd!^3gv-V6cStxT)!;RZVHV
zda*)#-VoAMc>$_HBE$4EYnt1RV5|$X$*&#
zg~KPU&ld7Kh1tmEJm1a6fB7uGDw|U@XY;M;r+?8mzGQ38^t(&$OCRasT;T-^!pqru
zv}QsexO9Qc=(sp9Sme4b2vsj!AiKOaM5;se3)p3oc~@Y;GG?7jrzV^~F=s;MlEAsS
z6T^Xp#Pv~!la{2?0!Voum6qFErA_vRos49a5%8*v@_{NNcYw-RpVl_07572C)u8gh
zYEUk_kp8J(I5Vf88kKvY8s+3FZ^wZ4l(yv7q;Q{#-Zc5HTa{YpDCG?7tp=7Sse!qD
z)WFI8!cK3G>OI+$<0L1lWYSBX+&k>%A(=e%B@g7K%p1-e)l2oy9k2R3<5X|jR^rPe
zzRWSbRC?}wmF~<_Hk}qK&FSN)KC3u?b{{n?GG7hLy;2Qxu2B6Z%`Qp*(f8i>}Kgm0Z?4oSEHM
z4RXe-m~!|%Ic{2hX^%ulCH|PWNA^}J&TyqmiVumuRq~SCSDhXh={Y@jgy(c8!!w|b
zyqbDw@p3*>+Kp#>>wAZFn@E_`64XE^$z#*W5r1R)VmrCyf4qvf3!M+0XLiOV&(fgI21+;cpmopGM@_Q7qbWK5@(O1x9V&y{L)V4fPCJ6DZ%%2Zm%pf<0`
zV-xM1ndwn|oU=P^=B&PIcw~wi9+<3#=T1_?or%i&udsDeV8f;BaO-Un6GyAIa^7H8zQ_0I@6>DEjQoawdPi+sB-D2h##sS*W^ns}D
zNSiu?deQf@v9Hm(yqV`<>wy%{i1L1(5xL2p5rp%G-L}&H#nu)Qj=i;eMOfh%%mt;K
zkpz_-F#6ymH8_$;*%MU1a@tP(NLXnT-FD+>yR&G!v9#ToXxsUuY^i-!>cTGlR_EER
z@3?VI*DvD`GWUjLGg;kvXm(O@X-Ip5c#`j+*|arz~y
zeszgq8M$;F2V%-s8v5
zadvQ7O1r%x7{C5hlYZj_dE%qgio2CsgXGNeNU>~L{i$(rTK})&_kWN}V*S^7{F&qX
zsbPT;jQd%P`)O)$M{4`Pw*IaCu*ZXw+_c_kW*zSyr52m8cf_Wb)k|d+WV8+mr&pxq
z@VM3FFMG36uOZjl^aeO;0CwqE9PFsU-RZWTO`ApQD%O1v@uYJ-+cY5Dzak|^##x7r
zMEWiSC2wiiZ(so92;;q6x7IV(+qUTLoa9Bq4rH%+!FWCPn(O9+dZyo*YqKr$)8AI=
z7s%Ibnf0}=Ki594fs9RAlcTm`$8LXWCVeP^U~{$~+4y!otklcMqc%QUC%XR)B+LN%
zUw^`-sLb{Bzn>4a(i!dRjJ6@IY5r*a+jg*M>oYw`r85^u
zXD*P=wKTmfwN>iDL%f&ouafp`vCtm#;O{HtrQh^tY?b+V
zG<-Yd_^ncB5MEIC53#!x5A#dDIxtb@2s`M5Ppc8m58b&5^(FJUcJ8gZ+y~fl*gmDl
zxUTmFAJP9#t~=LG{HcSI)S&ePI=CXWCAIbr_o_(LZ6T!&zRsQ}2f6Pubr5`0sZEI0
z9SO_WH;DP^pfY!M<=!I==K4N}>pS;KkC=M_TkobUww_8mxJN`j)wQ1esd4_U$Ip$(
z@_2v77HoW5@$(3>!TQs6^%3)(*!q<=vU*SRAbZ}T$wQBJYR|`AyQBW&-Mh)N%qd4k
z9#bO&+tkS1N7YEDB|6^=i@n(T)wbS8-#^+q8O@K(mLm__IuXB&ahao%l$SR0)^*;A
zi2ccLrF0#viiub1Vh8tZ5s-^a&e{C6#wd5V-<0hVdd?Tpj_-5-OhmF|IN4{gi_~CPr=WHB9d#Rxd2Zsk$r24%%
zw6nH#+L!BdCUbMszOC-~-Iuu#bNDjWDfC=}F_U>o7W0yxlm!gk1dUR|s_MJJQANOFFso~{*H9YrH?!zut?ucjZ
z&50%&CZ?TC{&IT0XNtK;k$WZ?OUt;UkFJv>pL{P9-`9Ey7jLgUjdOpkdk^<^RI@v8
zADX0wMpD#J+F~eeF|>|(oG(TB=qtW*ukvv%_SHq_lwn8D8JRaScTVRXG<|YvYq#}f
z#~Nb;sV9evj#HoDp69d34BMWuZPHVDyBUvlzu?|Dde0o62Z?J3b{gq2_iwl3+-=;;
ze-{bbe8kyrs?H1Z|A3kQ*LB*hSmt9NbJTvM$%J13fB#%ZHOIy0)`gvSQDB)*W^?M%sEGlB|Z54^Tt6?hYv%)L~c5qxtH=9?NcWRNdzt^-Gf{Oo{tr
zSqr|?9hXHH(c@g)x=MF$!;{tUh?nc$K(2WM)NuDX$jNH}QnzK?lS=-jU+FQen>mI(
zACGZ%*4Na7ZOiArl*Z@bjF+5G4`joI_jnM^xrb+!PJ2bGnDHe>pMevW-zp@b5+i!UA@6ke?f-X
zaJn7h#>Q-Urtu+jr5>)wCJ%NjW{rw#RqVWea_6;1>g=JfJ1TMqxrB4
z#<$5EcT%>Ej`9X^Eg$?u2RsR`rS
zp?e>nncYVX3JhbNC6oR>M9JE{#MLZuX?tVdR?gVKbzGiPYyaguQ_UTY|BR^r7W})<
zREM)(l@WcWD(=?SPr`22c<`>r()e;^j^rM3eb*&Tsbxtr^Xj8AGe^dZI&3O-e2X6kY(Cwx(SH+E
zWe`*hNw>k9RuOV;RI*e@-TlXI_H+ya%
z&q2I=|8-86YjM}#T-bPG)|Tyf{YolnBUv`Qj59l!14UxSn)TR{eb!*(GIeF+=@{%$
z?;+>fc2DYTLlp{^690emTkhn0A%;xZVz@PYs_uw8Ncc
zxa(|^pN?@JHTrCiI%xA_`qimiKk{Tu(*2U}3DYMShr-e8l6E)O{%Buz>%v{n;=dF?t#_yAK)2=<%haDHX8!x(x@1y+nFn`X>PX5@CDXBk>zjK$dcV4t!
zVy=(+9>dH0b0F7a=Ae%lo0@Fq9uYH^=CXDXHy1qZB9EGZIQ$+#4qYd!3>JXhU)wt5
zH$Udu;!oB0p0WnS{ik`>WbOCHe2;nn86F$I#2a5XZr%I!`UdvY-Mv`fLXY}5l569#
zzq7FIV$sGG%@=d?xO`n+>`^O_SvD?f6Y*n^op(jLUvrdN>vyq7Z9*2CxE_MX!u&>J2sE;c5m&~l)KF*E1>Ri_>2VQukffGezRnc
z?PC*E3vRozJKf61C8!d9pK>rJ?0(#;@YfXM)^s*uaN8ae_AcC}61JJN?9q;!FDRwy
z{dj!6t*Z8Qf%WyTP3XZ9n{ysYk1~8~fjDaHqi%(|<28m~Ak_V3NUOXSwzC
zrosINe{OKQ!TSx~VQ`JX>kKY7IN#uOgOd%8HaN`S0D~Wx{2nrB%k`uwXPd$MD=3|~8YMOHP+q+xs1B;4+WI20DO!1L)IG^#e&KHNv?eCzeD(Xp
z%WIZ&Nx7m|h&Sg#!5Y%5PMFmYic}XYC)V1z)q!APRb`zjl^A9J)cio8PVb?5Q9@~L
zWuUa07X@`0_p+O0C>RM!Ci2zO31z{`fN*{_XY=f@t*(j4Zj*np^)W|YORI9G*9C(n
zCbdC)&xr&~fz`E=a*Td4=`N}aNf5QjnXZ4@Z+@pw?!O?u%Y#e8lJsoe_rmQK>*lYF
zRG|+SeO7ItA!KS5w+F=SieOzmZwVAP=BpuzWx@IeDzyqT2q<;4pqSXK<+rbhbgYrwmJ|tPd6ys?#MF@LZ2S7`(RA
z&qJ!HnzvAS&9KugF`>M!vZg*nv5ccq`&DsGz|Y&q5qg@$Eg??w6AHt9d^nSf?AAnmgo(
zozu+qwRQR22+pE4Z1`r$k*k!^+12?TKX2%EmT0k@_g9L6sHM7=OvOdLoTcl>txfj1
zk-Bx0QkTyuF0*6Ixh9`QGYhICbX}!hH@(e$#YL$t5>lF@ur?&4&~}d~mo~^)cDCa*l5-8@)rGn*lJp6Kk^duSD0{-f7P`zt?moG>A6a7Vu
zmtl$*heLB{YIz|_eLSHwSh+NKlKU^T{n0_(fs_{7C?%8?@41+2MYzq*3U;YE;S#!};nOU5k-Q-Zl?QA?K=R6U%uAoHx<+CD+}h
z`!%jpRr<0QC~jQD7Q~c3Z->fmMxg{d(I);{Vd(S@Nn|3i-yQs{@5neatr0d}n&79uL~k
z)m5U|-lDD)O&i=ZI%fAuQ;Yoqxx@8XD^F0&HS3c9A-d;wGFi09y8j-`B7t>>6L6PkRJ|<{pG6_lD=z|Za}3+
zXd1AdE-W?th--^l*8`&ZxP-Ajm;|}O>r{U1{019Ok-E|yRktcGSL=C8Ohvd?+xofc
zw~|VCC)9ljdI0qY>-0;Bj5a!>Op5Ms?Pk%nCg(qmX;ZnFF-xFP&q*jPWNzrDL>|Ht
z%GIR*<8qI<$lH%x3M-k(l@`t{QYyJ??&;@)92vOX5y>r<%t8iO3v`{-C$S+e12P&F
zG%Q}s1WBnWt}U24o2&M_wxvhY03+uTw%-UwbNFR@g&~T$s?xQPfSK~D%Oqc>crt$4
z8T)@0o-oIp+QpG&6n??1>P2<6^=J#+*+KmRGfy;~ZGl{axZ21K1c$H6ljndL2EB25
zQk<@g-f^`X?tnoXUPl`9$Hw1g{Mq=e{`bZ|#H6*|#NW^8Ta7-<=(ig^)98&hKSr-G
zdWO+UjNZrSQ;j~<=su&{l`*f;)698g^p3@DJv9x}uYv04a@LFBK65@Vv-|VA%=xt=
z-1CXCf#KWD`P|vvhi^9L8+$mv&YT}>;ul+pJ^og^vT;n_^y-AKpUt}?pY-iny!XV3
zrnL#$-t2j+ciGj;YtQzKQt$D3?b)~ceGc1JALj4>&i!8v{9g_9Tm!Zb$z>)$`8+@AWI&|mcTg>RYejz1j+C2w*Kk#dR1Dx#o_vvMuyh4e>yBYlxR
z2#>;9rE^i-M3;S&<(yo9>^X^7!U_&VBwiWX#4Q!!HA*ewei|a~A45dXH+rGb{h)*?
zGwyQ?GVRjw%kCtSwp`UEo+XG6q575Vq0EiB&cBpH^3#M!o>v-N1xi_NK;-;tMB?EQ
zrQQ+vE~9@Fl>BW*B)xAT;_thNNL|%TX+Xlj&I)U;>x?*^_J~%75_iF=jD5R?&*{MuKd7t$}{-bDf-Vh{vXHx
zFE<_7G)(<|)4@$w^&J1_zNtf8#s%fFmp}EgPks93YhIpwvj6Azz0&@UQj-quu*6`I
z!90W03{EweYcR*)Sc4e`Qw$~=wDBL`>z4b7!9xby4em3z%iwl{Ee0Pn*lci(!A669
zx!^l7^&B?tRR${z&NJvYSYj~GV2(kbL9fAM&%60=H`r!yyTKNNTMafFyxrhBgKG>n
z8EiBdHdtk_!r(lEB?fa0`V4vv+W3{xJNB6T8r)@YyTN9IYYeV17%^C3aH>I{!Pq*E
zsqTL`wcju6DdKwId7F$5d#0?y05*D~ZbWE=84
z@+ZWL-*b>zNXUenh`WgG-z}z}H~yE~Z~tDr-W=OZyemw+e=XW^=1*q4knBsoO)+5q
ze;gn6Edx*7kzfv6db!t?L*{96Nd5dP>nNMMn!g~&j(OBE@X#Rcwcqh*{9v$B*>D~-
zt1E{pl?xAngIU;^2TubpMylWy;3gyt-wa;JLiq|f>(c6a7SY$iS;JOi_<+t?Fo&r9Tt(5OwkJ=4>`wYHq1aASqbtd_NZv|J5
zC0+Px@QiWn69OL#zJ+XscYuq|R_aN30DNaW>B5hL(M)@&hjc_aM9B
zd%?+h{Bi`I3r3LRaKXfU=;y)(e^kWY&F~%I3&q^?
z!1sY^(}@=@=tow-1sBdBZ}0&4>?PD2ybYW%i@Jj6fIK5m`{9C1kz;V43#ca$^}a_v
z2~H@ZuHZS~<~igIE_m-1gokehAD_!Upz!VB%?z3g;A_D#-2MdMdEm@XG3LQb!Ivt@
zC;SL_=0a=re}c5bcY)Qn
zW2^Ah;9GZKtMCrccPHgKL0GT=NrM-G%aAO1Blry@8{Q251{n`;2Xi*kMsUH`k!kRQ
zU={n$mBIz@K&s#y!PI8T4o?Gj-a}u8?*fl*#vWJ+JqD)VOMii9fFDD0;8Vc`$W*xC
zawHGF0&GS~;9J2pTc~rm;KRr~cndiATS`^H)4*b+3f=~e+$!sp{N5aV4rzq%1>Z*2
zz`ggej(I1G`Y!4Xo&jEfw7{o_pF!>qA
zA9xB_i+JH-@Gz1A?*QlQrk}ve!9B=W_+IdWUr;~rY2Z`HRCp_R`m?kbJR97El)yKG
zeSV33!jr(8ka_Sm;9;Z!-T~IP(XQ|a_%afP?*~8q9QFyX0Cyry@Lk{~d$3h_Dflq5
z4&DMze4hG;7lGeHn&D4^zeTpf4}e$hrT*ayz}3ihc1*(*;I+s`cnG`~*$UqZzJhFnw}V4|!*~eK0Dp??fbRn5zKWf|*MW~CNxc%(cJKh=
zh3B+mSHGow;k&_CkP>)1IPQ0}7d#id{s8%hH-XcF6g{Y-NF;WOk@{43!H?s!E?b!-o(z~+rTw{WPIpN9fFfO
zutRt*Sc~}KVQ>S|2p8OeG{Xfq{F$`G9efGd3*Qg+J3_s{Q^1cShv3t|Ymp=H5IFjO
zXkYkR@IfT4PlCqX@GQ9Cprc$b;A!A&WIWst-h$-9*Md8dY4BZO!e974IXn^Ee2i-c
zT<}d~0sIg+=^d_@@G9^K5`haIc#pb(3*L2{x`j7`Iqy?0xZu4f7;oWQ!DAwD!51Be
zFGMD&eITFxQd8kM368pt_o4FO4}uBoJ?4ieg3FN#_-62DNE3WFxU#pSZilZ1llnMn
zH(YQ$vJWo!Y4!)}fLDNbA=!QDr{E;s4IK~91+$WQ{~zuLZ$OIRtHD1arEp(AN9{xc
z@HQ}%;;7B=M({VtK6pF0Zh)f>!Ue|+q-@EId!WOOr5BzE=JB5Kc(~xKA;b^&gY_AX
z3d1Ad1DV7H-v)-+^QsjtxD)aAqwT=gkqmfN7WFgIA&*?kz^9NR@dFMWMf~s_a1k;O
z9srLZ6>!1N_}Ehdz5+Zio4w=U<>0T7jqrWor_XTIgYXLQ_*h3h2^ak2S&rH(?%*0^
zKm2xZ$T&wGfMFeMd!S5lP;ZK5_COK*wd^0F}>bAoLS4?JKF}UDaQ+S88KehqBh-AR`
zfu-yLH5M-THZm2S#a>R?=Q*koE;x!knp)vL@XZT|6MhK%;f3tqFo3dvQ!XMNcrMuM
z6T|~g0zZM2!1KVH5I=kk_!2S?z8{>Qhi$=|z}GIu&fy2Z>pkQet;Z;r<6HrGIAWA3tovNdb!?#wMYs)3~ocx;DTcpQP*(6
zA0j32R`7LX9{eEKKY%^Jz2NCc6+BxI3Bv`SZ=l}bd%<-}sW-UboMn#M4lf69LE7PK
z!9${L@N>v&coTT;O^#YC?%p7v7(^oQD)5Of(Z}FVg0CWL;qBo2$b)e8Wk;QXY=e&li;=zXD)2aR0A8}r
zQ45fi!Pp5HM$+Jd6TU*7!gIidU!~6BAuxO!b}4?qOK!(r;pJcn`{X9JNdQfQg%Fcevnpkq&s;z1TBy9NqyoZ{a$Z
zPMd=H-=ePIMPQ$;s2_MK7>;!x
z_-AB1{0Mk<3-tgW4+fBF@Fwu$N3j`rDfk)^f*$}29>X5sMc|K-&F~%IlpkRO@LX{8
zPv}4JZ14xjUifjaYA0<37i`;wZNpQ3#x>>Vlzk}m2JU{A>mXb(@t5Qio&wH5a^a=m
zE@T>9@TxZQ310yI87YS!0f#(C+rcxyCy++?v^})-^Yl}AHnC1?Z%{6{A8bVq!UZ2WNLk?9z}bJGE!bPd559<`!S{jZy-C{eso?LBT=)U7
z*ME~|coKLPQUM;-_Zik1HXdohi?GyX3vrX@MiD?aty8pc~rw-
z_6iwJp8{V(a^d^I0corW!M)&8BmiFvzMRhbF8nxn&j|KIfNutuk0cN972xbqtntA8
z;H}6Z_y%w%as<8$eCRaRQ{mgejMG^k%1Tf^uyCA5jfWS3pFW$lJ$MD!jI4k^38qb8
zO$#n~9r7T21^5{9Bzy<>iHWRh!ZUI_>Wj!;`0e2PlUc`tt0}BuA;;l@Wk})(>;e2d
zk^)}?ej7=Hw}5XWS@3DO#Q!nYXyJL_myv1kb>PZ#sW-vM5G5$k;L
zXW`;-o1j!ghyMvQT$PYXhOu7Vng9~m&R>K8vETtXc
z>p<@;(t-<)M)tzxdl>SKi;5F2$~P<4zy;-76x-o~@{NXLa6$RLLI>+?g7S@lsZN4M
z*<(JBH6lUT=iSe`fuQW!-Ub(xz0NbZ2N#q*%J;zqWgqeq?u!LwukkQkQ1;lq9WE$)
zVQ=N$Q&8_`&HbQ8_8OiCmwl>-BkSO@N44xPy#_9L1#%F66)5{ax5EW*MHXgyQS^A4UwDQ-DXi07erGkd2luHxxR>Xlbs
zQ7^vuqWZxPexO#bUablW3e@@MpRXoOnxxJ;>n!Gd8H!)7vi_CCqUQ;9?;f7S6oAD16}79o)}!U){0E$uU?<#1_x1sPyt~$II7xwUIw2i4=1;u4
z#Bal$c>E~|cjECaTb??Bg6i_F?px$^Y`FKzu96hqEz2*M&)+M{Qjty%Tpg%n}j@N`QzGRUAv{+UCT#DI8KkS{o$}^R)lH}NDIfdpL7M+
z(p{9lR&*aRy4Z2oaIWPaKJ20?V@x@9nMLo~-w)fET$H~?Ouf1~dC1oTq^0G-+=~8x0FLhGXHsjKB`<@s-ZFN%l+|j^oJOW0qWAEn8JpK;7+-;|m
z>yv(OZL{0-q#lPzJ!;$UY`^q}B@f-D#~5P!dRe|VRgY!w=5@7~lfxa=J>Ko>>B7|+
zQqE|4IF`5t7RYAtbRf}e37ktXDu%|R7W4Kf4Q
zZv`r67H&SIlG$UuWukg#0w%Q&OIYTOlE;Idd(1Hm;4F^C7jy
ze+^t>Z{VB{SBa-kmtYYtlD6wtLROowRoZW7`Mc(OVzi(_5|dge?%`ZQav
z&vnKlzIsa2r)@BzZL@}))~oUOUKBGj6cDG35%D9&sf`4&qgL16FD=xB9ezU{rA_}E
z8(Ko@(vL#mrx~m2XxFQ?E!X2Rg|?U96?S!%Q9uOwaCR25uOSwCTk-JfQ{qS_@F
zRMszcwpMWHza}aP*{!IioM?pX~EZi?A7--aG0PW1HfJBV;|hu*s&0
zIvyX#w7TGbHINGXm$8Q-y8>MutdE|IJ?(Db#-?AT!KJ~FFC>3299>yIvu0`SwZXd4
zzJ}`jMY1o_g`*c&hU$Z(FPe5vH14i}qw&Z1I4SjM=XA;Iv~!|`Cit{-Y<*tj&-dpS
N70sMIW8OSN{vUE=L&yLC

diff --git a/distutils/command/wininst-10.0.exe b/distutils/command/wininst-10.0.exe
deleted file mode 100644
index afc3bc6c14847283f32f53327184c6c2806efe0a..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 190976
zcmeFaeSB2awKqO*Nro^mgCr0kLX@Nh2W@mf6DHW4rdgaoXZPN%-mqT;31v&a30^%lpxNc?*|R
zEAI`zI=_fyU*va03hezQ(;gMS&&SmdA
z9=720ROHwf1I-`EH
zK2xWgWYFmjtFg16$J>VgLc|eQD2!m5PLIIQTjv88!NeavYNX^Q21u}!fxQP`6sha(
zH0ZWbpwOu69Z!L8{_DQppv(Q{Bo@1UuYsr<`U9QYK)|K`uI27t$02cF4uFU*sw*}A
zu)jQ=ZsoOgtM26P)ahP)8VOYQ-{U=HBw`mBa;*xc%l9I^8|jzfec)?w?bp`T*WHCA
zR#((nmxYg$UxRZY_W%CCF|
zmaD5@_j4;MI@5JZ_U+fr*NJY6kW~2Zgme{3
z_EJQI+xcH+w16pXX>A1`x(9ZPb
z9)8n`WsY`NO*IiE^jIqV0cma#fF1oWAY*$mZ{{;%ZZma9f3T5o@n*ljLg6jSYodDq
z^jO0`r|9ewDk5p*Cn)U!Sco*#V3HcOcWh5vbc!5&!_OY3ZJ87
zyFj1RFhEg0UqH!Ti0@)Piywt>DZ+{6eudBShfID%EJg%4LGjWl4x0UGl&KNBfGJWd~I*7ojE0)aw3#
zHM|8>hl(QZu8__r&kUj6-4@Z^CAqU)qWiGq&IuI-9|0oKeTslSarFDd>JuLMZyta)
zu4nzV)pF?uGO!5vm{<=bL1EUUhNZ|tjiR5NT^;a9u8FvxFCa3RK
zy+Pb{7GZ1nRp`K`n|!w#P1bM+!q$BiQ-tI7g6mC>Ey?NL>1QQ8uY$0GvJi^ta4=+@zy85H9#;q;dX2D1jv-|5@FSG
z`BIof!R3>}Bp%j%u1wHNh(qbh7J?BD*oM*+23^da)=j6rQj~ZU53SO(x}@~Tx>P$!
zWhQ$}`lNLnwc>gED{2!-N0+oNK_eaHlhUDR-0<1c(jnm;<7ZDvOlXW-^W=R&)4yDT%GA-Jc;XFwXyUunL?*Q_}Gvr2$>_^fpVX`=w2oSC7FC0
zfh}oCm)rxMgsEk$$F&4H{hwE0WU660?TMCt8jZLNpZq)Is=^>4xBL~FxAnB(YIdy6vb^T)d9
z>*Now1B*y$7kY#XnRRE>)Zk)9N(q>)l0r(peFamNHL`R!QxJ10>^5*-@iqQN>Qo`$3~^%O#j4k>%TqaO<1;um*yql-c7vM2R9ahJ@V
zz3kb8r;`21_49Sk&W1_8VAl;JSqFOKe*otrVan@^gb5K|gaa=^j@N1RbwYg|p;z!l
zU;^1HL-Wxt8EAw(3>6ja3vRDRrS<+`%cInIn^h7<`+{4MF}@CDLAwA@GDo%$mA3^;
zTd48IflxrLCaBv6p`dmctWPQTsQ^9#xNaB#Q74%JkgEdBBmf%#iW!f%E7kI#rc;13
zRX`g7egH8kW^ClXtL!3Xq`-+I4DUh}ycaDKZ)4!Zr6IiZ#Q^hFz{3RiFaU!l#+E*$
z`$CGci`eOhFj5ba!c=+59MVU&5WsDyKo0VIplJy%xNT4nQW6H0OL<^nq_g78Dy2i;
zO7>Q8JAMy}PN4@@4NT3Vf&{Y&xhi^)*%gkz*2pJ+Ktiipr4jdlFXB$(zt0!!0h?fv
zq?$YnO>Q{Yf$oeRuJC=R4hp3qlwLuZpf#Ze)NCEP0MgJ=st9>g&*~BKT;Z2y=Obev
z*p7Y>32!B)oDb&R2wdVzoADJ3KY}N5QaP!n`{cGCLr-5pdI1AbKsk;9Ec^!=ZD*Gf
z7WWQi_H>jO>;*AC<)kz_uaJR87%m9*cCZMw+u;iWZ=-S&UGmCrNCc^BB5AZGk(Vgj
zpgAFhiM*6iaYQpjR3Q8Qp#iN61Dfga8AZ@3W_rJ^ZZ!Bb#DKsgy~r#GzPlfms265V|WZdHQ=2)3xfB!Zk8>_u>u8tg&Prv_!^q@!PS
zpLM=vZ6+(Ym>;k`b^Rt}&ULg`PEX5K5K!IwelDV0f|asxp7gacVZr|nk$L&@FyB&j>e-SZ^r
z56LY*NqSszpM8=f1WYzM+Fhx!LFl=CNb#}32(rR>bDkOOw&3z2#*dw-<6;UjCKaiX
zZHSzy246%_V^TYU8tV=qsPXGr1T}u`ASMxq?t)|#^KI7VJ6PaFYjZgZz*Z_{fdkg&
z@3X)TYx4~VI6JM)*RqIbt<41pNbW9*8;@XPk3p4EuCAt1Eo`9M@ExgPr}UQnZQ@l11bmcFE=Vd$Rl?wC7fntfW>UOe{8>!GEJ|gY
zqra-En73@mkxaV?88X1QaAPE^@;G_RdO2iV7I7cO?Cya45v)}j6M4JT&>dNnCWr4w
zxqTRCk$5hynZX;z_-kXP!c3h~`C=7Xrm~%q-^@eprH({q$Pk|lC5`ru+`SD=&c;sJ
zS2Nfbaw`0eHy!;Io(hrg6Q|1cCId>X2sVP9m^Fh&JJ#R1&k+tK&(zG*;$~!f)uV_xsecL=TL<*ByX}MSI;77Vyy6}YA&U?
z`v&K>@F1jmMN;h#B;h3YtTSjmfYgN6FGzW8Qu_rohSck5cb?*$#qI&_hGKUgcU`eN
z$<1{3a?^t@vTiB|n5fl&S>O&jH55S==C1}+ys}SZEN@NCi0JD4sQB^5;
zCNjHJ#xkj4O)xm>ynlz*)0%qBP$5H&=4kitC8bjAHgj`*KFq7p_T}i?W?xV@4s6N}
zqMDe3QhX#lfTvFuRHdaxD%q>3*QB?wL=8lpW7~TJm|s&KXu_YN8hUVYn_8-ryRVcB
zlyXx_t>O1cRJ$-_(eziM_J<2;B?w8Zv+i-*q}k076{#UB+HHgB9)x#2rGe4qErFn6
zSOoKhrR41;s)zhLh*2UKDhw_iR=44{QX;68&W)pz7Xr*Dzo&YALf0Znt^rfo+~X#x
zH)Pu90t99}pZ%Uoo7^s(QlNkf2&~{!((02&V4KOX{gaA~*w_vu&s8J$F4bDS98*_E
zzvlZh`qiG;NL^gjrc8>->?vc4i6$!4>g0pNocGiSj~%Jp*MtTbgp?tdfJ{tlk*f{?
zsh50p7BNTaT(&};zzTD9W=vE$moWDTnn1_jNCKv0uP2`OK0%3+FEgZzl#VSo9=R=K
zf&gYOW^%`ypQJ2Gb|YZbxoT5S7RJ^g5k}jmTwpf1z-)fF!D!kf$X7*X863yNiQ@Q_
z8;m?N*6`)X-*nRh{#loC<8Isc7`P|DgGql?+<*k7?CnvL6+*
ziAuZ2Br$KB-D8$6haU+4nFA?mkC5JgCYV6I20#g$(k(IBBk2kTFZgV+sL`qEU4+VlYl*X=1!@9WmiFL>V$iT7xDD|KT4To^vto
ztVRn297-Z9;yw#{Iee<2bqIy@fdG0Z5@25Qe@iudk8h}*ga(V1+RD9?%Z4vl9fW~s
zC9=mm!58vBcwa0>)GCHibhknMP%yLV(R1g6M?fPDFYFI8{30xNgaJPbyn|i>o7Pt%;^0FDAI
zGX$?Uz!d&j21JFR)Ka`6?iW$;PBb6rgoWR(nKpbSc(>n1{-d@+3=ZQl`iIR2XEcMMF*W2hB(g%3$qqT@2OE@(zRnK}uFXV^@hMh-hDwECmeGQNI%+gESZq)O*6>p#lJZ>i
zi6=+}ky|K>#)dkivH=3l`XZ`a0D%%JEGgU3BEgkuG`VH0n@+4(?GR~p3t*_Ls%#3%
zXlZ!Vf_YFZ!LPKBl`#6*xQ3J!HHm5P@i(nA7Xd9lF=RmLt1%F1c3=B`m5KRi;?b!fC
z)Z-#yEV8TfXqYdADyGJeM$67&8LZ)XfTmS0Hk@L8BTV>)0AUx+Dw@I-Vm977c8-~!
zH0NdRveq*FoKihMmRzLm26WN07+BO%&)I>M1r(tA4QLJ@2`>l6U^`>%Nj1hoF^{M*
z@NlOzFI5y|Q!11xxd>AgbH+QD<}mmdng)#}}%F
z`!Fp5tg?$3h|a)91ivr%o54u&7qq4ag`wp)A^$}@RCX~=Ge!=wulXP_uBcJjt7KC)
zXlZ5`Et9V**JQXHy=1@_GC>+vVg4OtjtkagQg67QAH_yvH7`~{A8FUT$mE>y?*(0r
za3XzwF6H9K$vaR5D1z^a%z2aeF6^_JP9Qbm!3Jsa^vDkZ>fEWx?dMJzJD(&(+L~sA
z*{~s_Z5AS`JNVJ)NCx)Nkk)2maisoiFeBy0RU^d_qciJs{s2_cM`T;yfgW(peroHv
z)T_7|u-YC%oaDCcy+H3*YMoJQxE{f%Or{=9wd1Ss$+{<`m051Xdgp5|6vDWyVEkby
zXe9+@VZ1;AKKTU3eDuc(<`%jrtxZh}!J`|LP*5uQH4O4Vfzl>oM$cUq(v|vx4-E%s
zoJ*zZxN9MLNq`1ra8lC&toKtSTxTFWMGjL8FA-iH?@HyLlFGmB8}cX8+U`L#8f!UA
z)#^oUC0#+fC}h-hg;vIKO71`hy=25&oJ#*gNcN
z8v*=|TzkKxId{AZ#ax;Wm^Z|oM~BlQwNSqi$B=|QM~I}Om^kIMSRrpQr4)kJHk~kbBNWQ}$^BXP%2P@=tKcMM}O-LH}sj6pD
zb1Tk5a**nys&T+n$diE`^Dwo-ERFdh)g1D>Om@tUgRQ@$I@Hgnhx8$fV_`p~5Fmt4|^HDYFTWoK*#;wxj%D9)Pt-1%r%mpcGOFih`D*g!hr4
zIkCgEDxiuFTwRR1DWFa`kTIYJF4YjhAd6^5gb&kz*+r%Wu!V{7%Is&+7v+jOiK{NPG31}ExCo&pH^CBy=AQ@g
zV)~`duvjfShu3oNOu!-^wau659J#r+AeT+7&32xf6r~Sm7f_@Le>UWmbp$htk2~
z;maM?)yZha!mT-gi3Q5GHvbjLDciZ3S3h(xR;}Ta_z>o@wQ6hg6ZmMH>q5K0N&X0>
zJS`(X_Gw)~gG{g&1hc`>9}Ck4fV22kjyu|0TbM50#I|eDE|8BdUhzK#$fuufBh=XU
z@sH?Cwbr!tfclTBo6DK554HoDPhN;w929=gfN2*k=+azEnqA10%}HoF(5D)kn3AzD
zJ<$V56^9JI2-{*IPiFIX>=ofA`huH*Sbh*JXTIfkF_6G0NbSWKubQdq0s!Nyay;ll
z4$WL3PAkV%Ao1K&z9>q
z_p^D0taP)r$E8C6||1yNJ
zc?3pgI+6XOs(%i3CV|NXuY_dY#+eX`^*|ad(wIT<%o$pU2+c)J8JGn}yRc+Oswm9e
zup!B9k#w!LIpg?A&Brih^6BT4)0C;*AVdwYEHGuN?qH5Ww%M?$XXS{28RMBdD=9RZ?|P@L3{Gej5H%En+8&xPct3r-D!6qe8y!HcDk!_HIRtze1ie6ct?0
zfGjMA={KZbz{CYQw?ZBb$8Kn04OHJYNGn`eznRbam!rK_3?D(%;LZd}_XTMSSHMq>
zB~4d~yC@+RK8z<+2l%yFZMG%S>`e%TKGF2>NjP@0TMz?(RIrgjHnXqnM!eMVP@2u0
z<|k9Cdp%2GYW(D&`uzj+6k8}SFR!2tF0yr#UKd)1m8hh=V=nlTbi9d%bbe`VCK2;5
z3RMT>Ur_=!m^weRHa8)*Wr`=1>6h0d8j&5Gp(TfAaOzkdNG+RdbM73~s9|%jVPkmW
zdiYGSSY!j0KrtDHDmS4b_#*gyG3em@56fEeYK$LMH7nHJk*P}Df+>wwi4RIHQHfV5
z;i5{+^~-w@%__0_f1?tXk(D^{Usd86j6#Ez@Q3D5LtLxThh4lAJl4JGK6xV+CTXGa
z6DTQK15xLw8o1(At1|@d8wS1=1Zh*+9$RP`;eQ1g*t#e7KlojtYM=b#APQ^qpU|wd
zci8hn<0lvsUXeP!y>n{*42%J5g}akF;qC!!TXOgM;9!q548+`hGhOi7@xAg#XV|0^
zfZ0JE;$**18!Ps}3azTbCR;4XO=|qUj?2butp}__UmhkX0l8rj0Hxznr~Tv#b=3*O
zWj_?#GSw5%Hl2K6A7SVj(umM6j)Ovj*;oHPV+a#dXcar`11u;w>P9&%M<3V(H7;X60JZrS#=952RMlyz0YxD2W
zB9WKK>!v~s?WVNv$C|9Z%({2sjj)lbpv_21KGytjoz1f;lTMzEDUq~=Dvf=dc*1#3
z<%_0Y7C%nk>!%7GnNmlz+u$pgUxoop6(9&Io&49|KwtSFec@W*rTu192U!opB4)}z
zTjZi$!@F3ay9k&30qirF9HIQ62@DCg&tvl&8Wx0Px;0G46JlvJyF#Eb7CwzUH0-GE
z-C~Bx`inH;qG6fD$de*eq%^BVV$jknOj57LFk;h&ktxOwpt@R9#OAP^zZlFp!7uNiB@cAA3MEW}9{=;#bLm*FrKw!pp^LEgpqKjPqEtFR
z)h{2$vUDP(uB|{>--(_3+;W9E`4hKal>HRTex8ky#5=QJzU>QSw_gNzfWaLdgtK7l
z7$}xYsRr1Ui!k^g17J5DB4Kvo8C(rIVE(DA0a=O3BN9u8C)z@;;4DOF6-k*l^73=E6J$jV
zGpUxzw?L7zA-1V!DO!8DPQsQ{Y+?%UfQiettzxqAEW(b136tb9&4X%4s@_8Do+z!{
z`k7z;g#>eJs2gZrgKDA0+X`(%&1>SWoj?J5flN^x68Sw^9NLe(n?komXunHj8@H4j
zlrRBC!fkj4gXoM}t<@NxvW3Wk2!g5UN|@+UcF~qEHC?$OxbkaxLPog&T*d?s%wTkk
zqz#!a-uol1F@ehPy}%BBNvs@oW_THfBW(|#EwwM?AKnzQ9kgL{8Eqrmss^!x28R+D
zTbL1ki}NjR0d|dXHx=`JTyZg<+8XsO5={)lvH$_0#ahRxaH$B-t?rkDy^O=Zr$fH;@7o5j*-
zmX35Rx@l890Q?xWN5P1oRh<`@U$$fTf^%crK4f0bAukas1Zc9`-mLczmbukvpIpt3!x$2q#(RP3_jCIw5t3|gik{d7{9
zO+fO>H%XDD91dqto8a`}V8OShxlqsv=HMc)5ZjI#kRykwYtok2(Lob3$c;yaBcc;f
z!rWQpx!}GZl+kMj3l5&Z*YK{Z?K(@IKn0Sp*Q_|BaT^U%Xp5z=H8twEQekePhOq4)
zLHIw15b9<9FoZZ~MspTgw*TS==mm8dQ0Q5YnO|s@rNSp)J)8VXVs&q%q3@Yz
zLWR#n-0c-O@${l>LkiT=KAjn=GUjeBLS@B<7A*6FS{wtwQ*>{NxSIjJE#`jgx~pBf
zn0xb&nW$j<9ZP9wK@_&JAmy-bDw~a0)8e4O>Iz9*Lu@$8Q%NB
z4rTUX_-duO+W;D58@E&|;RRAW6~V&`vD87ca;$TQd{ye;^*nh)<>X%>Wh}QE%W`8i
z@Bp#^nr#Hs;Av4;?iz#-`GPAUXcfUW6;#E7Jq+8~clKkTMU8us1PlpVzm`CP63S$*
z5qQK$-~o0xov|#5*dT`omL3_MY|bW()PJn){8P;A#Y@`+!Fa+X6^AF
z*{H_MO|&=8r9ySw^(ZoJUl9VcPZGdP4Q_(qsu`fDMaH1+Ie2Fmpu6XK_5v48|Tn*
zpCW-Ft>Krqzd^-4Ny@_+|TAZa00zq~+1JV#z=>4@wiy+p=lhFk-3
z6V=6YDVe|giN|1z2ub}0t1^2VfLc6mAJw5kW5HB7i;+*B8r8%hu6qYR&1_#^7C
zG0x3%By0
zKgY6uf~=oNF~_lUpGf=T{{q;iFL!3c#rt3v#6#+Mfj*G>8EC<2&ZY!?w=Dylb@w>q
zr`Fx&dgBH5HfV37_BLtnH0_P3wsb+YDh9FsnwTCoa!B7GsnU@VXBgoH3>VL7bFPjR%(ag0Grub=zOx~J3n
z$NggE0JRx??0Gx-kqxqAJZI+zB9GsjN!UT3zy?tC30@cQ#-;FojmM{vJ0
zif42MN~#&%p}L>2ln-f=D$UjLFo=Zhbt?cZ}wEZt|aFBT_D_+8om`%hvEivT`6lq18c$=4QAGcgM3vR^IJJB1dq)lyoTU8BB0t
zTjkj=ER!rZqEpG3&?sU$%Dt8UdrJK`^^nywG(pD7E_}J4Y|M~4<4-|=9S0rl^I~lI
zDo<$pyGz`}!p4IxtZc3SL|eRq-HYBUH=_VGsI7R(b%?2g8BNPyh+|MS2;&9~eHCYw
zUWsgBakCL8^&V}oaMz4PKr`E6z=TJp?S~AQ|2@WSERfNXS&gjB&|D&zw4u+Xz{JoB>8L=QqHZB#g~66dQV7)
z>|P7J-MIm2on?vfRndDOvT2Tj>WkhZQrUFTds-@+DSF$bvO>`-OJ%b}?y7#P9=9@2iyQQ+*kDrmsmfj*(9(Ej?fBXyrT^J~Z^Va&Q*4-6)!EG@E
z!fVmv@Lh+&x_kWbGZaL$PJ<7VsbgU9b3n1O2*yh-csLwLg3
zy9fu2`lB}fmj0;A8r}iF0V_-LB8TmkX_W)UqY_B6{PJ7Sgb(u1ErJqinHawr2oMt9
zv5J*_D1o~g^f|rB`Y{Ve(w9`LgaSa5zOoNw{lj(ZVs-0A-4at6**%a{hR?dk`)P`3
zq+gO~)R_AVhyIxE}Pj1l&>PbQ@wE@n#q)Xl_AuMEb0SMr{=AJWrj(&_E93Y!5p
z!`p`jnAbXPk%Hr26Zy{*70yHTRrW(h?`aj(?v(*uUyGInu>Cc$vNvJG$D*5Cv*sWN
z@}~+RxSIfkT8>0Yw4(04iD|
z?(H+S1oW-tSaL?d&^k_t8B0OJyXYi|d=zr%QcyxepP^;q+HSmcSiCORFG5J)GI33M
z>&=EmEmmjsiTWFz4M(gGrqRS`X3Ukd{%*`^u0UWF1+x01d7L?xJ{mkrj6rNEVq@u7
z;1g{YjT`N+N8d4`jT{H(Nswd9@iUQI^`belL@$;?-cHzG7NTh)db0NjdY4(M9GF&V
zw!hjG)uWxnQgnRF3ejIAdP+2BCDe!G7!H6em*(X<_j8rxi1y`*3!JV
zZ~*!PSfE(jSacm&+e7=vqbg$=VXKj5#F%JSjFJ>DJC`;@iQl|UYVh2@Gap<>~kX$fe|m
ztM7H`B##+Px?0U1{{f+ENb0cMf)GY)Oq!wbq*srhF}|UonlXsBt?u-812?DX%c&C1
zh+Pi{OdudvB{8geazciPTlUq8)v~fCM_!0Xcqb_D7{#Tg6#i?lA}*U-+;Mu_THGH}c_uU#<|8OCLFH}fA6Xte_=DyBnuvE_
zs^)5W6Qq}$-l4gaSZbEa@>q%DJWGm%Z>r)|7ZqtjZi2_#8V=(_#cUE*+Axk#-%q7B
z_fo1KmR}NfV-0|NKqdVgdcWf!Bi^K+
zC&FAQ%M;z*m~l*&%5%laQ{iLWIB?5Zo>xB#l(d+}DITf}EYm)igLMZ|)Jzg!UN$e)ePV#SbW>aRcye$6U+cW9sPoQ82I_@sowFpr4JJ(Vi&4{#
z!fZgKSX#(Vgvvh926Y4SsO!!vjIRV&)JD4P(uGo=eOWr*##|d+(K9yVA?bMZLv!>U
z^R)fODAQj##tv0;soEeD^@54%H47Uo5c=owtx~WP`&d;9R4u2PW0)?c!95Hpgwn0e
zf1%I@la6ypuL|#E98cKK$I>SOXS|xh&>-*xLYLr@5d?P#-FjS5bu>XCqacI7rCeSq
zmtD%=q^eIJSf`qQFc#Ax7CIQ9opK(H3!oyFJ|0p0xM%{GZW-g(qd45bg(xlJIEMZ~
z>QYT-xinEK7dy@!Eg;W1=1b0Ksu2sR5gUMym`7!c9#eWWV>
z4Y~%Z1-CCS#e(yUsi3N39zq_ir|>=Us0jp}>ap%#y}JjkkFFcH0ZwgwvSTj6%&{sn
zr=~G0*RwL3NF$a`bs->nR#Dh0w3{&42_KJb+OTp88N;6CYcCTvteUdcBGd*!|2WW}
zp!>lvL|Uk}4@2_RqF+bo5=B!xmIbYQEZ)5P;GRNB8MWfVvek+w7{#W5kByQi==3bF
z`_Du+Le@PdH^Dg$){2Wg1SD(-j<0(!@(|h9k+OS42*l#ZuY1wiZi{$s_m#(Vq!dEq
zBc9bblqo+$N(>f(`!sZq*1vjzFn)%539H9tfR8_p8mhEkaVgp{9cLyVX6dEVIwVt*
zgiC#BP7S^1e06P+2U7C5{$gY)Op#IpA(Oi7NrsKY>Le;>-LuehABa?YO}mi^yCF`a
z9bq45-FNFOfpLOk0j43qi1C?MB>4E
zcvKQoB_?<$s4XY3qDcCk(0
zOo;SyE%fNcLbRFCJgy3%1R?Yd3gJ>8Z2DC(9gUijXJup|Nuoi%@*9w>BP2A#7*bBw
z=Ev|IOV2`XFgCsJM@?k?jY
z&6jctG74J}g&zOVcmy<{YywNnQ_B~KMkbnweMd8ht-HOdQS4|p9)b}Lf}}&}pT}jm
zg3e$hD+8Lru4+64lKumE-l5JWvdX7AtDb#PJnP9~nG5%Yj*g|nc+$n6gKfG!kGo9c
zmpa77pQKnN88>uzirvx=V5SUQ$7!Oc?0^!J^~`h2w_&jV(JIv_66bjP`&
zwC*&xA$1);V^4w+dcW&kKiRTtM;5soXBG9NL$-Ubc`ie&RPVejdRN
zpJaXu;}@nFA4|^xE}@|pBR5T*a8a4v-7EPQVh*s-e#GgYxh6+*@1-PxoK*2fbXs_N
zF>#00Kg#HtZa-?@pD@|Uyef0S^^0h$bQIPjZYbCfTN&NKS=qaWym+vfXH@pu582&4
zkVDKSV0gJtq4&3%%!<3`mKHt#ak1ZJ4PS{u5;-bybXggJcw7KcyuB@YRgJ_4i2e;`
z(EqOpr8NC40v|3YLvu^AYt&kL&H8y+dQUEHjDrl%@DZ)a%!zIb?cGl!_3spp=K%}_>(}m;|>q9@oW-!&yg-eXVSlo&X
z3g^McH-I}(Fs++sQk7`Pka%4sAQuG0+^4}QT>>|9)18cps)oNw2MZgf!x=_un70Wx
zO=|cqmM
z-{AkIAGmZ+L|EKuR{;^B<3jUYe
z?@?pYiPq^3B~rQwY^72h(O&!2YVS
ztD;<9fM{{UOf*`|L-$UQ+wwuS`vIOMOr)7>GppZaHgIF%;1o;qggXlXG~^(baFn9h
zZCUgEihyw5RDT23@>;WO!g<4FrRf7b2)FKjqbWu%66f2zab5y$JthMgA=4f9qtYRl
z(`~8y%Zg><*g=0Ya|OO(ZT@#;k{-x)eqn8X62amMl8B8Z=K74#m+@aACNyFT>3V97w4mzQ@
zz%15hRmC!3SP|z&`ml#{P$(LMODNWr==e0)Db-jR9pWI4KH&~5+FoF7wo<>~EzWb+
zW?H?GYI2=#Tbs`lBRqM|n6>#MV#J1gYx8>u4D)jzA*F}mJH}_AOCLg_*pMuKU>17q
ztwVld<`8F)NkEQaE*SX8;rC7)gbE0atzsx~Nhm*1;g<`{D0ji)$ggOsgKW>y8+&NJ
z7x~qTP>jy~YYXfh_GmO|h%_@(7BeAff$!otpS0i}sUdlB^gUQA$WBp*#erD?`B|9$
zD2G_PVV1Z$cZjEKw>9D+mD#FPj*T_ax&{d?x`F|{Cgqg}%=IdaT{2^-l24OLU5U$t
z^Ev!zsf7#}JH8EY9yO9#gMC28Xmv*$X15AM(bTgENQai4u>vDiVglDFxpT1v^jX|Y
zhdtWXJ*NWj_q}TinMZe2MH)_j_Kf48xP|sQ6?<)KvLtsS=8mP2h3?a}if*3MYpd&#
z3?b_>ha$SSi`9=Tsuk8(L%*4C#fS6l`d-!HzCHXVKUEqVbGNxj0^Dt)UPw1dW7$ib
z6R$8LHYU1vINx4_$=-T5p}+vW!`V@X8T*qdjD(72SKQYNdr^UaU1P9CI3nHZdC>^P+HmN?lxPc-C>OxYD*}
zI%*R!`D$@-JU79*$86oR*!yCwn6(7q2n7}Wd}pPt4hDi*h{G##GnXrPH!CrB%jVS>
z1l`R?@WMn5cN{(f3+6ck9?#nlM+=lK&-=7cGq&kclP1Q94b9l)i%;^GNZ##|dxzlN
zq)0b0gA+#^8_asK`mx9@ipUo5#p<166)J6pWZN#1SNJ>#T9PVeT1%uaWku2!skKJlFmlUTwisj}I4
zur+I3OJ$o>*-|T6#wF4LwUT@S-siE0t;_D+iOGuO-6|5^9g=tF@r1GZ*@)%0Gb*3O
z-ivn)SmtYZRwq@riI?*U$FWGM!ZJLTFdY3be3btJ-pWytJCtd!8{dFvzA3o38J;7T
zPOW~i`!p)b=Q`bwSexG_Zn!}$YC!TePp!RFxgCoq56wi)C074wFM|@Ncaya_2XR8f
zP91L)D|brX9g&iU>9VSwj$`(&`Nrt+Gr(Iv1M)+XN=DvR>z*9@aR^El&hXAt+&e_~
zR&d{V41jgrV4!q_7Wtn7yKy%oKc=bSDdMzMcmcFAZc`Mo+bV2l=N@34+?8&hwUEv3+*fYqUtJssS;6p9gb18e?
z&YnK@T+E)O?75IV>Dn=Hk3H!;47kUhbTfidu$WOud#sg$h3x5K&l2`5V$WIZ>0{44
z_B64lnLTsZGnYMe?3u-$HukizXD%yiD|>Ec&qwe)JnSzrUs=`>c`6^hLl;?<58I)2
zVLpBu<`m6HZqGNL+m8-~W3;WA!hSt2B6}4-B~-3P@`=&fxZ@vvO3K`)T)417ic04k
zFB^M}=Y>xcp;vLd>gdFFtEP8!dVLe0TR<=FIKA}Hi|+$?^!+@9P8sMz>`A6o^4_pMH#z#weUO*Uk9sX(fNl%AdI~-jTyXC&W#47WmzbU0BNHe(J)CZe%r*H{;!h&&|zv
z?8N^G{7*aDn%e06>(zkjHK-;p&AaokBHH45K9z-6wc?gz4>FK(+n1P^vKM1v1
z#V2&?iNKhiQd&G_F+xZUQO6$d*uQ;mm*)!J(@
zpm798z>XAbgu8Aavc7mgYlHluHW>5OXHO5H+<^yBigNCCaC!%Vz?Eu~^J<&esZDf9
z0t5kXw&5`g|I6{;>M>3`+Oo(fe8jl-G2`B!hM^|fG?<~b`iOp)gz;r~zuI
zwb@7Hx(BrNK|e;kVL5<%CunHHe>?uW@P7pVr|{p6|IhFbUgv_>xp?Q`or8B4-dT9t
z@V4P?!P|nj8E-S*CcI4}+eYP)Wj)#(|M~bY!hed#pc@<*))wp=wH?+DDvTO8qV0z1
z4hlj?Sfs`ICKwGrQgsI$g%g^-XoR5DjZO8kw0P%DI3FjS7rrpnU3czvT)V|t>_=NN
zNPOdZz8{{gZfrZC7ffLM)(yOsaRZnB*)u?#slt6oa8OPwI`A~1ue0(O=UW1_*t`b?
zO6&8@epPZRxN)t79lE1flckTPjOR&5VV5!#(|%m{{;y0O$2+mzqeSI)u@$fnTWS-SQXQo2U5gmN^RPXoa#
z&Hdcf=k_y-FQYd|J|GTasTC)}9t28kLnEyCP8HxBL%)^98$RoZ%X7gtI%U92ajb^3
zpDIh;JS#h0I$d>#3ec0Ko{&*6pA#GUu)D>1jvp_tUyNzbgc`9>M)$|Pu!-9%a4?^y
z*?##g_=w<9z(fi*{y4dO5OfJZ6Vs&yCTX-YwlRJEc;~BiRw0?Oe)2|h(|P57*RR%$
zldnpFYquoJYZhUU!lm%?mRn%i&|3K0Q0erW9P-Xo3|+BAM`7icQ!y_fM!MA`Il~9n
zhk#yb>@(`ttc0iQQ9I7p_4pEi
zkIr!SP`1k}d;Rh=S`KTt3Vj`$zyPCTcO+xh`isHD5+t&FDwNM}rjO2)r|EI$Mzu~v*cIJ?dAcuYf?9@R3LQ<-vDrt4WIOmity
zju4ZvNlwj_1C*;MQ&;%ddizEb+vwnd=Q5P>=!$cSzRV)}bM$4HGshRgiVh#!`1yJO
z)qgB`a`0Pb8IOiCR4~aykm2@?$C{$|f^oP6YO#giM4$(5g-6Qh&qRp*XWP3webi-DEnBsG3-BYPbTQTQ`qQit%Po{iC~
zgN?u5s%tuWW!Vod;3uXY11{l8o#vjMnvMqEcB(8B~)w@
zThBZ!kZ*l>E&{Y0uJOx44j$XAOltibS~E)4abrrBvM9`kbctz5W1AT=g`|zo1~_kH
zP^OZle0u8&JsC`0isRThK1=!!A`JVEt-D{wGJBbHR5)Ye%+|dx3-Q9lTB8up6QT#5-mLmbpxcaJfvB6#r(u)Mnk(^*qX;$~KMlW$xMaJT&tt!5
zX{2k^!LGzIY?!tFu`JpDT15xKT|F10bkz?I_gLj*A1<
zd`B*UKInJn=wrI8kOY6TcR)#45rdnIms+>98$01mL#whTEuwhNc&>V2L_0!BB(Nh#
zg()17gk7Cd|Im;mhu|_xl4wnEv`>h6%U3Xqevk9vcqcKEv^
zF}UXo)L^jS|22^z{{SRoSX{Hgws3tt(U8<210iC*j?o(wM=yF*5fD19`B@Q$zN>&E!W9_^`*L6>hc!b!3JGXw
zpsxjbsAS;lG-b?PWItzYNTQG>6y1Q|sKCWvN$e$-2kxWnko$4P)fkPbE93Hv`15!d
zU$GNsfUV8XAuOpsz*iAc0{B^#YW%Vb{n`ol6R?FFUx8>8glno#f-lvP^%?q*At3KZ
zSwz_YBI^V6TV$!<<>Lw|-#j>^4
zg;-a~Kf#PLrn?$6C8odz!7@n?WQkQ_d~x*vN4HMPMgm3K(XS3+&U&yTWC^a`heOGC
zqV+-<@?>g6QYh2~&GI`04oc6)-0UiCc@$OIeO6uzxDLR7f!3@dB%29IE@Ig-2s8(X
z8!!KjIw~{I-n;=BI}F
zMOH;7Y?0$$R`^HcEo)I<+CdKgp{^e(dJK#m^N1=cMesfb|FFv59~+3hkI8matV5Yq
zggXGCHVTxDAd|+cUlBa+mAiuOD;f{+*^|;xj{;v+;?HzDa>jGIJP?o7MT}InVp+Q6
zeMB59meIaNeB#<;JWcqw;6D>*2E>|~Vojb{laJ#B*!WL4%wkPeoG!$|&y8&L;W}GJ
zJfmncC?bnzntLlYS;yxAh^`0pq~jkLJ(>77x$rPY+}p5BjY_k8o0x93Hs68_*k+mD
z@7|{8GREYiXC~6d?8Xau(u=E~<;N6Pw-E(IQsRz?dlP;e?patpD9;fN?20c#F~>RqZ8r26Ppje=w*TRz5)H(eKz2iH#RWN
zqp>TOVWbeCC`YOO@vSwrTEydBkG=Utd3Gcs+*MQL=&y{6fIc~R1Elv^%J
z87sid|R%#=Hx$h!tNT7K}vfswZt%(1jZO
z6#R&{IGfVsz=(vj(4?g`9k@k+@FvopAb8P&(_(q#COvOHJ72f{!_b8qKx_p>07qVv
zX*iBf+?T%4w3{h|8VNu3&0$W*8^r67eSXvVBr>djWd(+n4*8D}n3;3iF-YpTG=S{__XQ7(T$7S~8{)iuPP<&nktW;?{)
zYK%@kI67Jtp9JsCU~EPz)`VClSwatPN^rax!q0kODHT`ebbt0taWy6y@~@nO;=UP%
zAr0X?b|MfL@q>An<3L!peE$&4JsOO!4ZVCLq~F=K)`&AeB)aGowg|Lj%-w?q!kw0~
z$al+|9$+F4>*X_UlHxtP4%4*mmYf9c^zHUl$nT*MVb{i((V+E$9P|)4Z8QU0{xNzF
zv*8Ch0NFILL2qrc791|43%{Z@z9OgOxzXbvyS
zki#pH(}Zipz{QD)u?P`ACnikvn^th6;ihmn((qHnBD{en=4(VHwA)NHf7>9l5TNw~
zs9j_`sC@eGLR1fD<;VI~x8*B*#q>oP?5E)kQfvTT?v%g?cX#UoDT#r&xJg`A!9>@z
z(2@Gape-={`qtOa9q#0AE=Q4N7ryh(L>y4K_#m)k?B(^=~wGC`l+wcsKV5Hu`D5^612)mQ;~U={R|Rj}|!
z$b$hEMgeX5Qmy-JS%EpNv3UUV4$?_uP_zphxCjNN2|i@(@XModT_&^m{W7)s@Cij4
znjmokX#Mh1f=Y=sen=Bt0cUJs>%#59O~h6cRCl7>I#lS-Q7BzgiD``LGCKnj;CeZS
zKCt$r^^XmDHD%Qv8=VA8L@2xLA5Xxk1i7iycuLO!}KJXRyDmZx~1{aaJemmY_?G_RFW6AzBy`
z8)$2!^d&b69`KHHeA4~}W%tY1KZtane8-=u0W36#-YLE6jIA|DT{v8Y3wmO8KB=KE
zF#`@2>t6b8+SO>kDrP>-z@B8#ek+KF$CAhgu{vCL6ejx{&X@S-Fh+$kOP&<7V}w7)
zTjKAgV%&X+1e(&;j;8!6kUFse2%8obXPd}up%2L}2ltL3Kki+fCmG8a_2F9#C
z;Ej9-g2oOynb|Oqf$_?lj4U@B>i#pb08{$3AAq=C)Pi)@D&}Q$4|Jx_QD*%d9s!RX
zzsr}S_xa@ocd`~jQ{L{E&4jTRO+tX&&#`@U806&)5}q&qYOFT^y|Fh{cHNVm>F{^<
z(Pek!^6Vv-CoEG9j_MgZuug+JOsL*~=hdobg3!UL|A4A4#DbuPDz)~=
zP}NIUrmB9MUmn9Mq(R*{1T>8SQLV4>%Q7&lwZ;h7Fkq>w+s-Q9o2q!v*D8M3chriH
z8eZ}IRK-s_aC-&2oD{UW0;g;{(wE
zII%>p6b?cDoIx^)49JU#Li7c9A2m%z6=kV=oRzvD2qs2|SA7X76moBs5LvNQ4j)&
z>F;-{dL{{?yZikA-}Awf>F&CoI(6z))v0q%88d+NeHnY~F!kYDUFJ3-0G#C{^(Fr+
zlKZP*6p3Mfn5Z9~l*z#w#yar)QEHN&Q#ZnlDKMP~SB@-R)T~a8
z(WR0VBYlH6EIiTe-<4T6befr@4-Yn0$El}cR(_!;XwuC3)cT=yBWKpR^RS`$cA?-P
z+*KTwpYWG5--!#Fa$@u=`xi94z#OCx?O)K9bL!poNp-{L)H(AYRQ)!5iRZHA6XqDD
zF*yAmAwbZ_0SNSDUGmsDYOkq;K&CMX7veH1Z7839>5OV_iL`@MLUFh2``9gZl}qdY
z7nCjq?!&LzHB(yO2vkXd_4rlcl`pWuPA`L`z+L<$RraZDw47fgpQ|!>QkP>vjpU?2
zsw>B<+BHI2UyPT~t0+%j;l@IwE{`I5he+$Mq{Q>x`n+eop%fISuSli%G#(!>tsfDM
zA07IR-V<6NS4%L5ggWVK1ubjpz9m3=QB8`&f}}Fg$S?3$>yRHy>Ml-ZWZV@FGLQTbR5D;b<>ba?ea+L
z$un>0Mhd6pi&Qs3M1)9GrWYjVK7^@QT6Y#gb2pEp@)Pj7!FL&GA-W_wf&yHN^^=PwCTpcySMk^(eRisbo4@_Tp1t0mmp(-QVz9
z+bbIJ7~WAlu3Gom!=v>TF10sCTDK0NYVSB{y-K-grQnObkxeBjaOhU52M2=KCX|$w
z%2Vh7$I0eXC>Uv@e5>lQ$&0A-(x~%9GG8nu%8VqEf&_ecvd1ee;Ka9V3
zg}P}W)z|CN!NE7ZS&cu9Mo6ED|(o$n@S
zo%*kc!G`1YUlHd76mPN`7?I#KNi3VW^2L$NO{9{M^$I`hqeU8Ymtg{kWhIWPQZb~H
zVfX~CZ}=lWBoCX3b=-F1df_B}I}rkk-~K3`>h>-8g_ydQx&l{CZh9O76eVucqjoGQW%FrZR*bH)bE1
z3;!bdr?4*uS%=vF?`E4Ng0NtgEWo53-VYAf?V*zsfu^Z$vx)u6{0B~qA`!B5B
z={RRltBsbLNUh~F%1cWOQ)$B@78krjvjW9qhZm`!2&^(yuR096Tj8!Bp(oW1HRB7-
zAqcMtn+juDaNL01XP$BUZlDy$G`JaP4xTVe)6%e5SuS?V8Ck@+oXGr5$_J
zlebCh*8pHCP()y#yg&-PL)bexO-L-%lSS~MaB{XhOFemu6bN$oI_9$Mr)W1oj70Us
ztVr>Lbw~jN+^Zj0_0Zd>b8-ml6JYSk9jAsWKpOh4B6&oSJgP_@T%@Rp^aH&CYi)bU*)h@}94!xK0_lfp
zlpybslEnHIVZ`M2B>nt?~8Ew_cPz?a^nGOrX!
z!K=#P9pM;t04ANlTX-pw$5UW%Eee6UXOS%7!usGUDzZG(MTlA*`UVfsv%1O&tHDDU
zXxpuUww(qVCTS_?rx%dTebSBuw6d7V7LEpVAjtDfO~l0XXOUl8y9qB5G-aNtjJknk
zoJIu7i{YOUcAS`{LeGNDMo>`_z9bs{n||T5!^5ND|J5&iCg@$qk=7yoT~mQelMd#<
zw1N#FXwr!>!p{`IJ-_KecL>gshK(QOjice;e0B$pm$5Ww86P4#--$E2^VrMS3Roh9
z7vO?oa>Wr^G%-s(BoEy~QUDx4pT~aLm-K-jCZ!wa!KmDIsdT5)7eEN~Bv8d5NTqZ?
z-$r1q46U(lY3&~fUGY%vT|H&IJPm+>0I`C5=Lw4f$Ks$YXLiqu;o$X?Q{|=gNwDUs
z8*Jt~$v0$YJr2~Ibusva2gsiAr8yPJ^6G-DM*r0Fl-Rri2u0yiy@Tex**(Se@y2{7
zJH(yc5T33nz*+3dTKudS1tip^W?*;IO6{HpFLdfHOTQ0aNABVBJL^($@T{a!@=@%e
z@r0*y0eE;U<^9-_0EFLVt9jILM6nekPe);&xE7*35?u9@*Ot>z+?{IewV}i{F#4*~
zqEWzr>}N;fAzH{J(ZX0L8~yO?D4DgVNBj7lRGMDHt4WaLvQBE{kFpvkj}?bVZ?WHl
z$-&4BJ(tM-z%wF?E&M9B`;*7P&$%8g%-2iTGkIy|Vw*8mu!Rb52J+oF&2Rc6efJkk
z3_>X+k=jveo%Q?^Mw`~I+m~mZ*IW)hf?_O;kTee>r(tXa>J+EgDF6ni*y&zBiW8zt
z_C3dgi?rgncs7B3aUN1d&neC<(|17tyA$aw;(vJr0EYZ*HXj)B;ZjUan1LgQSR_(y
zg=|uYEw4f4?9i>eiIBYNou|p_b7Uf9=xxyfKOcw#Ppn!ww&1-j8b@6pk;`5VVPT&t
zj*)+wlz-R{qEDlnwLC$Dtc?88=#znzbK7Yt=+ce8`tQRE@+a!Q>9lAFX|eeG3QZy>
zqE?GU;8}ke5a>oqQWs($3Tla_jz)YxAVNJtC^4X!xw@S=R;RlCX<$C4dN?OMS{qYo
zuVfRvE=W@Se62VeR$-2!@f7Wc!WH=0>@FJDR##Z+OIUQnF+}38zl0(Q%$Im_2wZuI
z)#&GFWBQ;t`rZI2h7lBEeSD1tf_nf6P(b)80L~f!01`*4C(qw!s9ru(^?c>sJV{{S
z^y*{(lj>vtMfKmkdxq9Kr~_vFNcE2YqIDf1F5qOl=t%sBBtX$W1;c41
zK*=Le{0IqjzH$tjEGdn;1)QygT$Zn4E?ZQQt
z8A&t)pS|W{u~~*=_){yEU?*-k2GeyrrC~eS8Yxi6_ncUHu4OYRA0F
zI~mNc2qP0p&~e#^=VSRC98HaZ!17I=e_;J0Va7?%sg-|Rv~aQhS;ZOQsXbx?Aqcr%
zl@mIrXQMU4S?s9j;)UE>Ld1#u5Er6xSU
z6S!6rJqa8A9voHZxZxv%ZYVjL2gd~~Q=qI}Tv7(7ai5757T-|rXVNq6aOO~E&QBIC
zpGbn5X6Rg=2M5Rd;5)ce+6FAo#~mo{y5tymC>m1dh9{Ri_!IpEhbw*gV5JB1Jo&tg
zUJIYdm=MXBATs97gs`)!r?e!R4{o$6-$$^a=+5Z<@hs>sn5K`m!v#h6@|1o6f9d&n
z$n|utkorac6G8g?F8x*gql`B*+CA^D$$;i=3XYbv0Jo{u
zo-^Pgz$9Ojo>R+naPUNT##Hr$a?wu+;YnW>NgwJ<+Oj+eh@_-Yqdf~r@ru)fBAh@-
zb);yjKNKYSrVfb*9*|%|`lEO1r$V1mB;QZM7rz3bpNl4xXN@yUplXpeKi#eL@e7OAM9AM4<~r^%_d-xA5o8{
zQ*btdR{fn3P0Lm+)u9U>{*;+`>kEHIgzK&0SAEIeV%ib8u0?_hr`}Bpb$dPUE2H7W
zTpqz%kklF%%|p1rk;DlQ7tVG`8yn%e@I%@lxjo;|uBb_VlX88a(HVKu2WTW6;Ltg{
za!)Dy5Zi$NI|_E^^Z_{*K&rPNhD`6UO}@$mG?$xm9IFQFNw{ZdnwAt!04^URw7_8~
zRI3ML!|tXG;l#RxLapl1l)=j0mITOPgqo`ht)yEU_Eye-^zE^yWtm6ZF1lM<%X-~_9(OKi@(SFl&j``uUuJQ+>7_~
zubSJdoKs(H;@y4KTobNG@_QUGd|0+Ce2KIz4?bCDn&ASxVHnTs4Nt+__jvn0d^z6W
z@pfhp%*1$Y;Jo3)dN&FhiW|NY^9rVUjxPIJIB$keU}@W+jHd9noJhTsArLcDo(8>v
zv{Jki@CiLMvQ@`-9Ej3v>o*VWq1848rfflq*L+U)NFgu@GZ(h8fL<_(uq&_w)nV}U
z2{gQL!*c@t9>8X$l$`^ySt*;anSO`j7aRIT0*|G*Q+Eg*6Ueb<8_tA1igQ_KeKI6RDOdKt=nDEd<^hJ3j2hDXw~
z}yI
z^v7<2?u-c}Y=0DSJ08bRIi}5)<&vIYHM-T|>0S1Xn(Lx^7B7-VdFhRPM*Bh}aY5z*
zn-MdGLb)rNH1ARxxATB_;6I7*Pt=H&Lr&Ztv_J~9gBmyQ#%q1P`!UjRgq+(Z1s_GQ
znQcQkdiHbZN0SOJPz+aeBx@}ry$kx*X@C1jfTZpJ49*e|S4UJ`p@
zvRK>4UWqz{ea$l<0bz=#=DrQ|$0%{^S5`8zF%YSK4+$h~VYkn|5yuTVaW-3?O}4|%
zf72Aa)o!QnVnD1Md0_}zSD90niV)=^b-Rpb(u#L*eQF)UD=trkshe+s_d%kIDZl{f
z!<#9M=wrH!^%xn;3@I&z!HJpjG6DzG7^f+R!&jVM=RXJ3c_>onTc~r3RVPjg*fZGF
z)B4mmnz@MX*(c{&0d7X;MdfTH#y(VXn0q!wtakcFvE3A5rfdf!n>zs;n*=qc&94Db
z>>dbCcq%9RJvVwcK{|td%Ik?#E403XsA25ed
zdQxy_@knn?3Xs#Xb3jYYEEOP!h%T`9gC9Yh(FB`7fiR!@q^9Yg3G(gXc;mMSuvJR;
z#N%&{8$YFK_*s-$Db*z4?|KB)lp&~QF@o-?%)`&!#rRoqGk&U#IpHR=ABC_qEE`Q0$e{^(HbFPt=ENBa9Ue
zqsB2)IZ7(-c6ku_87L^6shn7_XmLgJi%?y(Wwd3yI#=HnNie)_1W(Y!1{6`G94!x#
zl7errnX;QYlN?5Xek-M~8|Rkb^n&oB)abj^PivZR0C}agf<;SW@Hfp(KWX%nNk7!q
zY1Gzf)YfTB4UJk^q_m-NSJK5s4q4(WLd3=xGu=hXarILPzVdcu9c=n%tnAcA8Z#Z&
zDqZk8A;;H^oK~cqTDYKDZ$EKRZFXn8mDPBI{ua#U5f?MKsKgR6gkGS3ZF}{&M
zE`Ax0->r}9#6!=wZ9dGOj?D~DxHa2+jJ|^%py}4QOuWb7|0asud>8|wHLe}MgB-TT
z*0@7>BQD3!LA78zvTO=Ku&f0S;nhreo4Q~Ea$<}mLwZ?Gvql>Ny~TX8e+{o{E+v5J
z9MW{sT;`>+VV?qLDBu`-ki-%JV)e4Q=VVkZT_C0J2$swP%hYaDPAFl2qJq>Xs0}vX
zEF*XrFFt5-#_ptBoG4g8YXmciDZ`kKL#zcm@TnQ(CQ*C1oqjjIO}|^of<$Y04ZoG^
z9`51~;|PCj+`}Ki-Td)HBY*t$WjubjKG=zep6@VbJ2c}M-dlNngitNWC~jttW*nv$
zX!scf7j{Vq;Q$`NufU-XOK^cWgCiMuA=J?-1RG12)#ztwekM0WS+6f-ucM_F-byhK
z0&QjrpJ5f;oMoo8BRfYXnJG8-Pe2PNU(AjX=!8;u{rx2v3h)Cq-LFHerPApF>Ro_W
zOfvlo#9V5xFgs39{4gbsXi&A{1Hx;_8cKzd-!=sFgJKAWf=n6uK_RND2-PF4nE0+6
z0)7Wq@)1QzmUntUc3*&RqIV0;PQT9;=P!G-3am3=Jb)zv79kxlxtoC~GRt8OWCIf>
zQ)+g>UYTpwiL!Yh%C=5HnS<5{Q*k?bSD{(_5d_ksFjqk!N6{$)Wl*o
zoJ!TkPA#)>A8nculXJbNXn^b
zQwGzSq^)kj)nS}<(i@cQqxqIPe5QASm6PXjQy!c$cfcN>GLfP^y{6($Wnx-5d1dFL
zpP-Cab%>GMd3tMSF8&tTbCn}yPQ}|&@B_T~?4`auy{f~!k~Luw!H;mI;PXVBEq4FZ
zixp>U{vIm3pp;ESg7P~W-4Nga74`8`GGynJ%rz-fa%zSagul)@s4KfcVx81I&HkxZ
zDU*@u1RYZYr!mk4jzh|X0%Xjr`i|mL5&s^=pB|Z0v4^b7p(GIxMH8TTB!|;n>0qNG
zP|AZtZh)y5qW!vZm|g4ySed$Vh>hTH?8B=f3sLn5H;u;vJqEg^a0LosLaP+)Mzh9a
z7V@u-D^mCLGtE
zw0xIwe+H6mFczCTd3w`#+u{OW0*Nk~H=Hm@kW%Gdn9&9|07jv?0dpiA?RLf5TcFsf
zzjwz0Qj{_RvEzSdnj21ZL*#X5V*(r#Q@QjtNS;7KtZE^P|zTK(pKK)1iVmt?aJso_fE0T
zPUFAnckE@rqkZ-SNLXW(5n6UCaf8IsQJ%ncjL08~mYvR{Cg7L-7nb8-0t&nX_uSD)
zMa0BW#OkwZ`O|jO`Eh0n&3A-ZirKLBN;++^s3A+_jBtCAy
z2TGlVhq^nRvK3P{Jm=681<$}!8G(Qzyzqjrr5JN%a!EJ|!ThVV6yH=hF|AS-kHS%0
zc%IL`z!zH%;VQU26y>|rQ+HMk^Esfxh%j({2x7X>3AR%DH}?jZsmGSHJv3?OALC)L
zbf=myZI(>50_4n+W%M*=Wz6s^OmfYtHqw)r|G*uO91Zt2!GxuJHBXXq5ISvP#
z369?JzcQ~i1;u%dnV!|(zC!@hNU)G*{^ZaT5z49|$y=?RPvp$KzW-J4v!6+&)
zYB|%gIGnK>OpDwq!Njp$ZzV4}n1cQrTs8wre_c_YWz4XD4WB^9v=`y4=Z0d|i`hJ*
zb<|rv+e?0E#+D!`j3B)VF4~6TjzVy%n%VHZV=ga5F}1A(gF62h)Lr5F1pF6j9;@SK
zs(X8;DT&~|P(=mzrSrHr&VOIv{FK6`*}ExU_;SQWGDmU_{>hv$l;GG?3AnQWw*_>M?M333O5BUYL7|vS%0SmRsuK0~X*r|o*PP;w
zoe6XL3`e~WUU(pkbd{J(9cCtbjk+!Ya>?=yCr)M^Eb-E$iCW9xXox*Sk*L+V!Q1(b
zNwqdWcvZry^-ZBvY#oxocpjr9qf48*7#8qw&=K*UQpzNZ*?z8v{tBA=q3aU*4B^@|hDm`x3|XCC(g}7@HVZDa2os
zNKmw565cT7kun!fkjQB%lwjq{;L|{!$cWR55wPtTjR10U3X8AT80l$=h>ap5%Yb)r
z)B{tjdoRUOv0O`l8&lGePW9xwSH$_wT@X$(uYfK|39G)CFAy;WK@3FAZr)DqY2HqN
zHgBh5EU{%iJR*1;ukS&KJPhuEFJAVIQS_yHa_Gt=Um{`6@%n3st7wj!L5umMat|yN
z&uijlOw3L8h`4KTd*B%wQM#UKt_FUA*K)m+y^N=p8IicM
zT_S6o&xLx~A4K|o{bg~D!E7BISD;J7L)b4U5u7|h7HxGZTQi`PB|lzDL9~>@{-s3E
zBg;xlaPQTO^yDFpRmp|$w&y<4e&PeQ-KHT-!zbcKN8(cZ;zlDbbqM_EzyKId1k<6a
zj7v(}@#ePfB!%Cw+}g}97BBPiQCdgLnnSldg_VK?faJ{
zd5~_Ub6J81X;;L|fmvP4^-98t4^FhJAK2hsZkOBgDhk_a0@ob@$<+sv8z8}NA%Rsj
zj4JMnN<|b}sEh(k93BqsjC2=Z*_CzR`qgv7a7oG|MU!2p0lWtQr~VQ@@V>;MbpzTY
zK#C)y@eCsPJkB
z(jUt1i>6+U)Zt`(h?o+phO(O@Q7EI0m|)X}vYq~MYZI_6ZM)-uzhiZm{Z%752r0L1
zJ27X^o;Bm(3>lBqj6K_J!B64Lde54;Yj96ql#w=RrO6w162CuT?j%MDHhM=Ru~GXT
z*cgFt?u_uw1J~pfI;w`~?bqateq_xl0DY}i3aZ%mB~91&K2v2PA9HCXw`5W?`rzQJ*3H~
zcK_use|aGftVrGMLgI`!c2Y)CL~!R_ubeQ}lC{JR5`WbvJGo}U378;R;=0k8a{5W8
z%8VexLL1jRILws8*wpT%e6*pbO?XygT_BE*5b|KkdaOS96_U0DDIg8T`kBVYuMvQI
zhNNwFxFZ;z(UezgJd{ZBzO)3OQ99vmT|QMqSF|Vj
zXT^_k*RPACV#7m^H^s^+6qH6mD5!2-JU1^%sQVp%p|Sb2x!VFC^>9ep5)pvC
z2E1@TO_{X#d?5qt*<`HY^{!3a&tKtE9ADQgAq(l1UJL@KFOBW^oMx96wjYU`eTgg|
zPq9>8>BW(RC!0G1yaI%OHY8C;(2tnHq}pDh5*cT?A3lrxn<$Z1=+${@WJ6Mhq>(i)
zD8m+h0DEv$aP-3d>sxHCN=R+;8n7yD%yz8C)!trZY-=370u2zv9792q;gl-hQjQGc
zHj3|+v-&6LM3V49wYNusT2Lnh9(|GF-IN!?lP+#s+7eN7g{o~oVx<)b4wvCBl7@K~
zu7`$4n_jryX=X2?BXP963BaIXIC$T_6pfVvza-#bv)7oJ+lkj@U(}lHd$QD!k+Rkd
z@pmuYrRQ5AyX@kalLDm(q1$iE#Gs(@a3zBH2sNgUh9&n6PocsC*MPpY`yASGSI&GU
zt)(&JulfqblhuKtd|jUufHPST01lK3FjDr0p2B;-Vuqj?;rH;)y_xXBVEnJ2Itiyt
z8Ss;fL3W|EZ7ObHf--7;N4g=CtQ`*pRW`2j-0`;g4
z2THd#jubviBYZ0rtL3Hjo+=`Lfs+t6ebbYrD{S*4bVixJ=t|61i6{V016i9+Oe
z$v?|J#sM?L700RgY7-}-nLxTRee4UpD9!J5LXYMoJg%~#$bwR}5@*=1Eyqff^DM!t
z?ju$e{!>mi(TNnChk~IKP2*q@NrIcv_=pni5x37qB=#`|2R5xR!+_^YQY{0W$f;3{
zTT9tY&`DM!h7mL~Xc#S<0RAw$!(@BoX(&NOf&B(Fp)dA=VptZ2J}IG3hmm3#bz~Y?
z25UJFM6S1!IWPH*e4!34!KoK_upMdog{`HX{1*T>p>8z7JzD^yr=Fb6f3wUAF@vZO(QoDEe)rkE0?{5#La7aKvUt3aE%6(1?%zKLZP>eJ}h9LU7
zg%{R{FII|Ib+=nzOZdh9*sT+^fCD$@$g->JgHRA^qi0DXj%M>%GS4ay+UNJ7k}=MVrA!Z}-C6kl17GPB#kPQ=L@
z6pDGdqQvZs?l}@aJ3$^NGt$Jz8+C89$=`Cxo1K*h5
zTHqz-1C}&{(7};M%((w51{!}h;Ojg}03AliaB94P)+8gf%7h4~Au&xN+2nN2N(2cAEH9LzS3%Q@cUSkxKAUK&GWfb+29
z@>Ren$Hf5Tfe*oEtCV#SEkUcpMYx&cFrj=n2(G*_9hPSuM@=^YA&W&f;Ft^*@p_y-
zH`{s$X($G0+)ilx1yBl@VW=JGLObAp5S3tXgrB5@1rx>BhvEyu7K(dQd`%HwP2ww0
zeEnT~%@AKth%ea2QocvT*G;+9O%$+}0)VbvB&ncYd8*Yw`i;Qm=+HnKJez#YGAki(I8FhRS8n
zqa+#&y!*1pQTNfii(M$*xwdI8?-Ci}D5#xYU=h8Z%?Hf8XydR*r*U$9*MvTna}fsI
z8M=}d2V_7RrS(BL#Wz!E{NU7jJoT)&Vn#R1`ja{N_#I>|R~JLwHJj83y%RR^nZ`Ss
zo}1sMA@#1;%|G}seb?*cAABUf3q>iUfMOcewwVT?JY3DCgm4V
zVm8q@Y>lIg>)y|^X`C*3UcpB;TVqxVGy$uEJ7998l`o$X3Slf{W>a=On~vzscSRZt
zSivNCO%y{gZ5F`tI#?`*1`vF!o=Zpl-ui=isDrojJ+o~ybqn!}8Ih56@7ZXc7)hA}
zM-q~woKZB2o~1!NU=(FF=D|^N1;WW0u3%#7kyNbY+;BL=)WEF;i2AD@c
z^=nv}?+WNqVojTd=5U9N5ph5%`z>cKSKZp6M=dT5uiEC#j)>M2vs#5rje%%0_O
z>M87)*+1aA$PzzyU?wBm#^LHG-{T&Oy|G%+Qd~OC-PA+A1+gXPwR#;bJ+#CLogkVz
z;^SK4M0)o_2gbH}5?+aobu@vE{e3h#T(B_%k>DvmLx|uh+t$>1uzx
zA46D(teCgqWx~bIrTi9afp~co40g-8^6W5j%94~T=JjIsTZ)Kql=D%*_QcM=zNp1R
zY+uTr*o;WF5JhorlE@B%Fe9AhP4P~gr6QDtuE3hWxeJZ?d=w^>Y2Z{k98W`|a`Olx
zAstE-hq+Me*P%Y4R(K)%UmO#$FTY0e!X@zSU+6{EXuvHfMNDy5us5*Cjskh9K+rUb
zk2H|PH~nWp)QEYUZ9*DAei+4_V?j>4`xO2jngy7nOL2Iq#9fUQ?&@<(4d{BqQ!?wvDI?UbJV~uZn7{iHc}+1?A;6x)9#Rxgh9h#h*WbNZzLiEb
z;#6>nv51RR=ECRy@>zj{GVT+Ys7x2}Zk73q*?sp%;F*LtDUb_X1+Y=f#?yqBz{(f_
z6q^Z%f{o?nQt(d1U|8_Arge%PqHNJ6yc-4#9HW0v<`~U7h~`c+3i4o+KD^jlghrC*
z6Q1Pfm|N7{$;Mhgnl(8UF2|5!M*kG~B1NPpKK>xm6Y3E+orQbn+cDxQj_FZ!yX>vjdo7e0FqQ7|UOyV1c?D3MoAl3grjyETlo)
zr+?(2({}iO02w4UMv>dR3>NMpv$RFH2fiq3fL(j!IW)gdI7MfMTsWmyC2K)0pvq6x8g2prTJUE4vP>`h1@uXu4eTd7mz;Xa4Cm3qVE-)xA8
z6;=0PHFjtOt4=*gxCpzrIzy-hgAk@Y5sB6}{}YvH)gB>ztNsLnd+^>b(>5+e7A@$-
zFB=24u6LCf!JF_^Yy`>uVzHO7Z*a@0V!%P^>cwyYy}CtZBs~lAx|;ssuzzGOFS0VQ
zUm!OPLEH^Hy(s)vBzPE+MPppC7L=*L*W8p!3ogM2DB{mZs~dfQ8FVdJkbI=$ATn5l
zr!g)5&c=9qi!UBU+(rSx$@nd1A3$du^RdCL!{+J+6|V(n;aBISt4%#P2ZA$>ojAk9
zkS9tQ6br_|V|9ZnK>}@hk@P%ozZT<*w=e;cMl4&2DnI~g0|5wm+DPD4=O|O3pz7#^
zUBCYrg|6?!uLa=!Z4^@K@VMnGbhpVHt~qss%G4gt!>?ZK(278I^p1mGwSh{D(;lW2
zYhDRoj4L$p2K=071X_7%ZtW5NM8-!*RWDjzk;BTZEWrX+}Q|C*$4fAg!uDipkjh=D-i&2&t9h4
zn)h_M=o4Jq&U+EK4b_4CY8xm67qDHT^E%)x$ZndGdZX*I;rL+7QINTO9=j7y_10OS
zGC^1oYKXJ%ZRIjR&GAO490f<%&$zcE^H=Q=te8(5Ii=7_{I*Qv90BgJM}bwlirIgI
zHI#=48TP&Uj5mrxd-2*DTnZ2fLE1(cZ=_L5x?=vxvmnH#msK&-rQrGeEq1XK9E~@<
z%ew%4w2YlPfRA>JAk6i&0mx|8czIHQEc*N&j}%~dY2`c$8Erdgf`r2?o(fFs3YM^!
z(<_ZueDP~jb9k&g)a2RJ7FTUAO?=gRr;B-&*&ihsvX0|Ym0o2KRTHd5;c5f1(YTr!
zWndDk@XDD(G5fHZsDf5Y^K^rf=o~Y3fm})y8T2yoUi5Z6h#$fBZvWNbp@sOBf{6&g
zF$s?l$0VYN5LGKwwG7x_LK9;Z)?tDti7{Txy1S@>aKuPMX&dOANZMyhqHMgTw<~#(ZNkv&Ervm{ZT2C
zkqNvid^v``9*^B1oAeoyQZJl`AT|&2NNk8a<9MDtp2wYa9G7%qNKY0&EupDM%GZt+
zAsb$C(vi6ffYcEqy}bjg0cYHb5$w7Tqx-U={nTNh9FQg3^(8XXj)UD021s+3awRoF
zn81fLgz!N&ZHI`|j{`^8Bzy_=CN4Fbvjm4~&*Ge(q&f^*;#gwvw!g&|(4Z41B7y%!
z1n-KGUSbVM!%nWT2q0eLJ6M$Z2$hPZB-p;qs9u6q+hWF5yTiE_1dLZVz@
zUg;3WlN<>mkGPLjh(yB=68RUE>W2TeuQ8HuRc+5wH@@8P5pmV}B+*e3jzM|V}A
z0h^xR!R`jIltHar`Za{DUh*nUGFLa`Cs}(jeLdW?nJ$Zh6gB`e{gAlI>XA
zbppOJ7K1Ee`4!xbR9*)X?Krm^yR4GR%@=)-u1t9xW-c#vqt8(CIPa0aVEJFl_O@C1
z$O*nMh^Tyn2!B8H81ewslofECyc-{eBcJqcadp%iKKuTY-oq#$<9)3BUU-v=fV!B{
z;5~%E9RwC!JWb@`w9S0N+)7PGSZXxvmtJ>TjLrKXGL*50U*}MS7x67(Xf=(FV`j?D
zfLQN@ok9Uzn#$8~fvBsnZe5%cWmV>kE|i^lxyR7XWem)eD!_`1;)?4ky4kl*;4l(^
zgK-%cF;9R{=!&xwOX
zjQnIoD%G>;qN{Lv89XOB+3S(cgvl_>0UfKKxlx$kJ0_U$RAw-UCKtIl+A4jVpm|qz__xE{SjCLisrEW??vFCT|X%=>=B}^htOpCZygzgp%K8;Y2E3SBikx`8B2fxx6yAYz!9!AQb
z<@V-cb~Y-taB?F#z(*V{rYWwBef3(DC45Gu<#%T7H5`SNSmVy_vNxZ2f7IR+`_J1m
z7sCCe7#!#+Nu7PnWUszUXt)Sj%9w(sdO5HlCsEly#}~S;9IeYg7W#nNGRIMC<94QtO86_#h&=sD2#>C&$=K=%oSyj
zaceR5a-xjaz<*GNr$q|fLbcBCF@|Zxs!Sp^29?H;v
zFj?RK-e4*Rs~k9(9tTh8H<lmwYMA^-hF{*&tfCLaci?4n@l_
zT){|7e7U9UQj2PEt1X?$*O;
z=x@Zhxt49fN?%Blg|rOg1{d-LNy34FaT#VDYs@suA8Dx~KOZy0c@!Tr&2qDrs%KN(
znnEo9IWcS)?cHd^bg#J~XRJKOn6v{U14~;z#vF1Cq)X;wskz>7u&(1<)
z()OION~a*nH?@F0fg9(?iPnM%Wx&bNic_2Cu(i{bEOG#IOphzg<@IX=Pa@C1E*79g2b`(#19ZCS*-ND2+TAQX_Bg2O3Ir|GOnNh!QQ0aOglg
zPBa0_HaWdM2KN(zUCj{z%GVh7m}(wR@AuGS2Hbhx6ut9&S`TtwjyuoOq2Pu)&xeto
zn<>YJUpJMi*y>G{3VwgXL{IETCn_dLpwqVP!R`TaTbUGktjX`8dsvroaTzk4Yz
ziK-Y8(RAcr5#2@ML54RbN=8#LolBPW)b
zZ$XA%hdR#(*JyFp_R=kTYYy?0i}@MYAWEq~&FZbb_|F$vHx7U&n3Gl}({NCEc?MmG56*KM_&+4j20dvjr%fY@NZNGdPa00ulI8MwGEw
zG_;)MM>D}tIy^+18iumK7xyD7al0sYB5zeM|FFL$nyDhop*h9z}wO
zIekOfH80^EhhdSs>Y~OJc=0~^eV*+$GaF^vphND~mZZB4{AG0DIE?SJ8pd@N|N28FZF+cr}^Ty@lWn})q9TDcg9M{v6~H{!QV$JKnjFju|aVSgjbmC1hT
z2i;%qg83c)4%OnjKRlf7RGfHoRat0bW|KGFGCjT}--Mno?S5>=eQr_c9EbhQEZ1Z<
zg$kYH!p#igdm?@3;{tIBg789(0HKtFa20!s
zzVnX>Y%~LK!cKTE_(E;Yt;_G~P%cz+J8;--lheHPWdFF8rbP&FWC`cjzL*3i4;5saa%6}e7_>b%pS8pfgOgiCuga4>Sdh)L1R
zDRD`G>+xb0yN{iNsL)7+@a_OAF<~~YD)+?Ey|c^b@{2Gr^L#$en8NSD)TO4+B1b1=g_OD=qWvTeHxU$YFrP5xNNS8xDiG4Ke{R9G{U126BZuB@^fWkzMVW
zjD4~jR|L%b5hzqwJc$WYSktRfDE8u2OBKsIi+ckNL;$JmuZRPL>{rp`NLSGe3^@gc
zmYvNsE6v@}H0;hDICXhP;LgR-ChihFKEnx^U)LSv>(u3~BYt!{*a=ih`DMQP2{aOI
zvrppfgi|FN1mIJc5wgJtPLYt5xC_2Va`s{rz}s+y!`Q)3%mP@?3th2HsK$&K$}707
zFV*`bwc-zej9=1*t>7Ww5cWVsj0${(Zb2&$HibhEZ;S9jW#&nShZc^L8t8scIzoH_
z;6kfhckqxn8-yI5PD!Pc+v5e80T!y&(1e-{2#-?XI5h?)c-W{Pq9P8m!mu64;Ig&}
zC_}9}1e<1K5zHldyQxfh6h5dSYD->Nh1&vM=|K69og@tK44=0knVMu~2soR1Ru_gQV)Xd(VEOR#6IQTb3x(}&jX;%(Zsd_+s0!=^gT
zmb|#Z>dj6ZcI81v8UC>8i#SY6f)d^o*VJ0~pSBR;lreIWLag3FsE%}VWU{ygSZM~D
z4@2V3r@J=ln0SO{p;HcDe(-p{%PHIa&1om{UA8>%BfEc}AAx>)AxFC6!e>t_F7{5D
z0<09XMz|EA8@ri<3a7i7{Li}i_2BupPl)TzSWirbhBl6)h35`
zL9Y=-!@UOeez|{N3|tuC+G>{=$?&m6m+b08>u!p5=tGTpvFZME{rVsfS-L;1#u@>U
zrlz3aoT|f8;0|I~Euq`#fy=i|1Y*BL1?z7NK%4=@`O|vgNP`hZd?=?35nw&jm1ofgd9aL7;Cu^H!SFGTj;ZS
z$&mz%vkS~YfR_(#b~~=as7k{f5o*;mgTxm$6Cc#FY0C5LT7Cr%z}2E|v$8eMv<=pZ?06Yx8{Cq24nDH#
ziZ0c&DLpucQQTeI!WqlE%U&-jtIV1GxfDpE{>W#z|B2tCq^Hucb$*Y%c?YG$%E$B3
z8aVCE8AH`o-MRBH9-M_ej$;&l0yNiRTSgtvk>F`sewhW6db8c;zj>Try*bM!;KOg)
zU;uZiO_uIaei+w9{_
zPGtyC_7G5p1yU}b5jjWfn-+Oy5{2~C+v*A&KVbesOM`fSewU*V7l%Q>@&IPHqf`V(
z8!euoa6)y*`{y|P;d{a6YhltF=R3Y2JQyRK&alNvq2>FJM>JfBF6I?gwDn^RKo9hjHuBW}MoFKEnQquOmsjsFdx%MgcdQ?7(IL
z%tc~g{+$kHcu12PGbEad}5X@Dv
zCOkVg!pjQ3x}^}c@QYFv`=Iz{GhWOxP3<+$4N2*=N^V0oN@LpgP
zT>5<}@N>M?53{`48Qhzlu^rIVJ^VF(0|%tQ1331Yjw7FqmmF~>sBMsxTa&u
zf|ZiAW0$nUUxZUlI8G!=ly(G~y!iAbG<7+k*|ZKhz^d<0Lmo3dm_A~fO*P1|9dJwf
zEZSC%x2+tXTRA4Tatv?Xjo45KD?rqtvNZWxmD#Z;VorHHt_4iNmlS*z(W*h|ouM7k
zcSq>i=zC1)k9e=;Y5bVnLyttGVnaIK{iiNcCW4h{4b;hcO^?<FQM&inq(#=+8#SFr?Z6SNSz8d2I-YFTKKRqXi#i={X!mVEtMI0y~m1yW4^0(mK4
zf$8~A&6MZ+PkH4T{!>fjRR5`!Qt(}bi}ny(47E}+&k|+$bSn^}UUXOJX+#O^xI>Rd
z-_t@5;l18IN-E64t-QdHm3Jx9Ov&OYE*B}DAT*mb=rT?Wp*fV|HkfSE
z^A0I6m7e$DoSUAC6u5+*tE9mB^jshX#^R}MC3-lA_pK465mqd1+f~f|?g6O>8|#Ba
zcjfZAy3vLggVP%;#p=4=K3!BBXoMjIZ7chyn1mFe9SMR|x^N5JHmnigkP1Uw?JAMh
zzle^`v`DVWz_N#0NAQOcsq<3LP?4M|Jzv;U%#HGmAi0(-hHFTAu!%3B
zP=2}CxK_@#sH*U=`of+%p0Jgtqd<{Sz!q9V)g?q}8nd1Uw+TYdG)Xj)$Wht}^;v43
zoV+1p*Z!`4O}P@;MbC#O;&oG6wAIC|hg3j9DF{}(mP+e$(E+#`lXu_k(mFd2|GD(Q
z^8iWhxS!=C^VIh-~`4wN68blfw$Ro9&n<`<|o)Y
zfIU4yFLS7^PVRVWKrUk=^~ZJ_0Mj-SwzS}*_|4($qsrCFJ%8f7-WK*LL;a_IuG}hu
zct4fp@aXDzG5ypw5VzQj#MQYvFQB%i0trjmJ#bb~&pA2xFtK&?m|OIvww3DB+rxuJ
z!IT}~au3^pSPS|oy0AwJ2Jm8yRFgAoaAT8KBj_2bu~d}7bCkdiu?XD?)1x-Mf%t{G
zdvxs=z=#dL6gZ$Z+}t5d9Jq_a*RLtPHjK6RiU`=s<>&
zP!oPd8>q`QgXZY)MJih^1?~o$z%U7L)mEA>4Wmf+hcFtaZB4zGviCv=)El!P?sYh;ju$#}=8INp>dL(`nd>v$8ERnpGFp0I-k
z;RY<03VSHfz5#ROfxbj%2M@s67{sN^;Q(mZyC**AYoRr!9t6mz^mZi{f`9?!!D-=a
zm2>42oRkLAh2A`*Rf#``PpJkRCv&1PIfKFtoADGaL$j7R0(_2@m9Xs?U20RNwo#A<
zE*d_WQyp*T7_Cd16K*i_k7l$KSa1>P^32*+>InH~Ag%=c4S!CiTxN0(p!46Bn9RKJ
z)iWy*4*y-TIJ!TY!I|9tuJ8oT{rdQT;C|tRnf}Q(IWCVM^YgT#CbfZ<3PAXF>PJ4@
z2&dWT2L#B2L|lT@MUF;>ia4-(>On%Cn8h=A6Edt795wuSlS>(|cZJ9F
zpJ2A6UC-KZjG{IWM=)X2XV!dda@6V#4OkQQ
zR8M%MnBB0(&Ags1Kdtb8s13W32vAc0$O`VExmV|i)`GuAi1d8CY0yhCl(h4Yp~?s8
z13n_y^;<3Gu3VABL*+}%trR-|Ql5u{IhR*WWnrPO{4so<%AFgV;j={s3~=gTt5N46
zHTDA>54JUzTaA)0v>NpTto;VNrU6(}8@5wbF!9_vs~Uzk~RJY>Z7Fk1fM2!Z)PuDAkqRU`np5ujdy+|3W1|30zExN96YdRIj7x#Q!|1e+I
z$Gt#{f4;Q8u>aB?B5NU9j4bWrw)2E}>3%tx?Rqxal%0P>Qy@+yPx9yD;&li>;MzHI
zX&`RwabgEtR>~R?0ZtteMPg0LMd#te5RBrmmW?3b#n!L?O_km$Lx4`wCxRZKwE-It
zfAi~zr^A^aiC=#Z@xM7O9u7#v*}5qHz8{U|rIW=*|I+(%^TO%o93Jg+d)>|G#{>UB
zKSJx~Fzm$d$2%Q4`0R_dst!+X+QkqU&C^e8a>GAH&gd<0qT|?dJkO=dQZbwP3#-E*
zT|BIs$!jL?WJrNsKV&H+t>7W%q%G__9}@E~ZS
za6}l6Ejg2M!?=wjv=q%)zZ~hy;k*Yo5=C%{>#C18sq&>c^PS4jVs>q0l#B(^!y|L5
zoJunBbM(ihRulvi5O!AeFq;1h!T~s{Q#v9YQObVJE5XsFI5&d%4FnI)7@Q2rcHEo5
z5&$&yM12QAXUhVi8*VaQ*<66h6LHb~Ri__rd
z8`y>1iMLP-sB%J7RuNi*Mq34y;H=z?XE;^Dnw)&~v(<&dvXw558mKC$tae#DYX;gbtw~$6{93YK4zyFja4qAUGjgnK@
zS$&mDR^{=efI~7k(hg_yeunjUd`oa4KF|W0!#)YWBn#M;1Tw`fQ#i!=1v-M**wwHw
zM6Ahe;Eb)5o;{2oKHVmX$YSVh}&-#hW!5~QgtMhi~B
zBkMS79>;rmN4)@hENnp$V{+>+NP~G~&6!oer689uqTPDY{H)+zW6-{n7oXz3*VdkCvTI>{3q{_Z}6YIN1p9Jxk@hbpIo4f3{UIjO$+C!-ClWu+Pwrq-R_n0
z@ZKP`HGHm77~`M3M1DlT&>N&yhh4n}wY%4#FMk82L=$@X2O$bQIs*@7q#h*d)ibpq
z-FQQXIy?Y#&T)SOWrp;KECgG4ZP08&Kb6QsT6i_)l<7)>c!l_SoVqVf+c=Ag>Ao9+
z!|q!l7VW-IP8OYS(L^m~xAO2%{b5cm!XR@7!h|^{s81BeA@WFs
zK`?05wG?;?vV>3@ek|k7jWkHr?u(`1S_lMMIEBT8YVe+Q5LRi3ycp-9
z++wrEcJXwa)9}CyWsJ!!EC;l%lBOMD4%$EsfjnO2so)Zi2Z!&ph>(Y3ELGN9_MyZ_
zOiuT8jv=@d1)tb;tf9yB5~G2ljjieUWvd`hAyv&rDexiMF#v$i5GMfOBewZ|PWno{
z&h0=1bJcA)`r_m1Dqgn~{0ziA5H2Zz(L1o{dK9g5b|)&R7en%20J{(B5EihR0QN`G
zATV5S5W+1qxa>cPhI{);rn^1{py4CL^+UtU(4r(hU#~XIL-|BS;MJldZzoctTJxpg
zOoX0}2eK)!y5C{H{U;&%7#!Sc>x|POniYje?YdZ*7%lPQ(@SI<`by~qUH1{(r2|1D
zD?=J0;SmFx{tsl2H2p_N?mx$MTpvt6z%-!3W&J9|BvBl+a4n_>fm^6-T5|?B#AToC
zqq!m48^ckiHB6=8B#4;>gl3Q?Au&nK#WtBUCx;#96`ThbK^QiRAk0ccZ~%eAHq265%Y#MWTim8{WKKZb0UIs
z&eX`&1hK?fYWH$wnA$aH4Gwn`r3Z2-yemza1Xd{pGw>zt*hs=N_=U!?Q^GZGeM)Ly
zc4^0~#6wy8bk1Od`=x*lNu&7r66+Ix1GNHld;lMH!8CmE@$N|Ht4*lv_UE~6P%Wwp
zo`Xb|Ihf!Etz58nHLzoDA+s8TmjkAgjFiPp0}>#ur4L{h0$T__Lbszny;)^TrNEnb
zr8Ag>xLZ+%@tK(N$8<&*S6GKea+}JzG)GlK(|oh>jBKAEreFpP~zl@@f>?tZkhEIK(s<`Wg0~YIlZmiI`rLL`;T|wb8U56COFBUr7K4?HFqY%anrk
z#H;%E{JZ^9_`6rhi~ZdTbjatcV!DF81gCN(UMx9UxoP}8ND{gP
z39V5>>t`{XOW24#G?IOp3qxe1IiT`#RBn|UffDY!{z?NgR?O}(RRiiFlaEjfe(K^2
z#8M=*3d@DU&@%j5bz}b$U19#VmFkA5DDY1I0df>{C|CzyCmhD7HC|-5@PPmmIV~o%
z4-tBhGZoa^fG=wWsaqJmSOMZ`#TgTh`!~Df_M`o}q=6Qq7^G8xnaIK{*oF349UxbT
zm!(tTx|s<~H|0P8n^p3~N3CQ~}n6Pw-_(-~7i3
zgfCN-$%1qRH&6u$K~5`h0`CiiDAHGP`x}UR!T%j>0PL08RIz~W<-52-vE~ooHf6jL
zk4=^mQ#hc@c63sk!JZ366GU>@n5^UoYJd|v+USiRkV!)(X`5t~?b)MTIbar5hWW1^
zuguYd#LVD&;%?;{y{kR}OfI+s*a}awmICmm)_)Mc+e}!ZJ^8>T!VVG6_tP;j1=>n0
zVD^0}y3Q&{xk%>=1T5@^Oy+nEmgNV!vNvfUo
z`XV;2II4W7|3JR+D%7)oQoivr1ZkFhV=^hG^ILCDlWz!VM&=r3Gc0ZGuvOLL1+~+4XuHY$R!(F
ze;bu-Ji8tZhh*as9NG<#Y#jXpfSpm#=KrH)V{%{R*;eHvQ8|}v^w8nQFXjr##v*_l
zm242hp{e&B2toL&>9PAb;^gb#8wqiOs;xb&j(~_G*&o9R-kdw34I)IHWP$!X+I5idccxK+}-Mx4c@zBpzLXU(851
ziHA=-`0lIlY|~77kUt7SOjw;)+$`+L1Drf6B~mtu_xpEgYN&s
z-21>qS!Ms@0}L<-`V5JNN=Zg#QLTj(G^h*;+7>nw3X-C=nbG7H<}+%$sn{^a@HE@M
z?Ys5a+VW$ywY9son}2FkK@7CoQu}BBsjTiamY4=8#C+f9+~i8DpivKrjJXIv347`$Lpl`4f92o<10esx894T
zW-brs4s06^;RU$%08fwPK;zCI$%qnx5#4M2B?1%5y#VPnNHkTeJtG2w2`n0M86Q`YuO?1C3*K
zMB7hDJpZ%(B&uq9t?qJ)_e2eTs>0OR6GiOJ{Z@`$KW{&wLjTcz5><0*f6e53V*neL
z_E{^K4R_$U<^P-ggx=d?YCnmhXl=ih;Q-rDqPX{ApOrzkpXfE7Y(L5GujPK!@;|bl
zM7?NP--~|6Rz)xW-|+fL7zp$sM>d@SytYKW5cU(hs`Gk)az)A!6DjQ{8*tz*%6{?}
z)Tzs;;xA?U2~ikfr7;+!rI73=JkR~S{lt#K(e{%ObZtB9H^rvOhA-<6p7qD?7uV?~-$A~2<#iYI0JHj*b(
zQ_5RpMEQZXdpy(jvFZbWe7xLZP8^G-*KXsLRjGRcm?*o-bNH+Y0U*0d)KDpjva1k}
zU2>La^^>M#Vpk!sOI!rzz^+0MY*&fH>Cn&&BCeI~D*qs&$gWb49AsB{+5p^36PL8B
zBp7y;0!*~MLDFyfT6HC6Y`HqfwJzk~DSTiDmefvN=(Ux!@Fdi4QdvBm2kHQm%Khkn
z-K26Cl4Mf3877q*rAg)5pEIeP{Xa9Q{G2i6)M#T0tStM#l%^PIMxm`2>2gm~o8^+q
zU94+jbN_=eC8{%Tk200`x?xONg>gTiyDOq{kTJzY#*{1itth_8Dtwid>(*dHis#nw
z`~llydUKih0AHG<^@RLV4R~7!mIS@B|71P!tXdR$4Lws&XLw1-L+hb%l<;5urE)Zs8+1BgR||Fz!~k#fI5rtu!`(Z{b{-#Qq_C9+&uDmQd$6
zJy`%ctYu;hlP>cOFevFQmS0;bbF@h%s_k8Dlh92n^Ud4-%s->rt|oLoO7;07CMFef
zvZD`e?^HRg_4$Dgmy!CGeBwNUD5IGsD7;EBUzTp6L)@7S%dnQMR!;2l>x6}!o8
zs7QVvAtQ_8|0_MWHzbgu|-lLjMpwEfj0ec;RS1n
zOqmvALok8Mb!@I{(QsO
z%2$`=P5sIixFNs|1r^5^AD}}DblwJ6e8`~zj?=B8gHpa%P_hcQZhO7s#C&A(!J|MS
ze)ZE#bdm%_`agnG0lp&%rvj7pY-9SeiP38?X!mQ)bbGo$-kv@xiO>?2?I6R~7NfS5
ziH#ss1|vw6J`xa-@Llkb&S8ZW(HiJ`anvPp3}`;im;qXwL2Q}mrfQ64PolJc}q
zJ^g8$IySPxCi0+U;J#dEH!UY{RZ92pO034Mwyg)p;s*G$FvmsNt%_wpJQz>>oUA=wZoeH%1f
znU@v2XYcXAqgkl4J~$13aYRs!uRR{)JqK6uI``q!v-dRdKKYwLAH0HJeayXd2c+&a
z)ZH;74`;4$EDB2t?gvbVXhOw9_~Z2~@=D#qv~I?A%t*A>Ds^|`LBu_b{w-XZvnPKR
zj^$_Kt;+A4Tv}XYa-wzwA&xMd=)$j3cP(-|j;JYlOqx>X!ZX6K0IxmDEaAd*j^=z~
zO0iv8e7CiHk6A~WhiGfo}%H$GLqVn8=JakFg6CW%+=Jvz{gOrDMEyNSObY#@W
z)Lz6rg@b<-KqNhGtqXUhk3svUMrjtXPP(-uysbF{-s7)_aDs
zB`0T(uNRzdf~xOhVq)>l5BUN?db6JKYV6SPBntlyCNFU?lW-=1-ousRqZ!kwo{Qkl
zt^ni3$N`x%m=2;K;wNfw4lkeIr`&lQs&L2#yf&g?q@=Qbjxt#r5tyOmg$`phGy&Du`!#7kDlT4m%t4lH!H3sW)oE*|1JsG<672jBS0^;Z0rBFWj8@?2DglSo(yQ&
zOlld0PQXVCyh)dF>IW|YgES2FhD9|CMjURILwk@1`<7E;c+|(eNlinDaB9?$&3PIV
z+HH+F9p&ZKbBE(}K(IXwfeZ#`q1x;wzI^7=nq1}86kAl6%ma>35y3&1BJNc*qs0}h
z%ca6*U8|7fZS2j!p(U!qY$tm2|`0AK64~iA>Hj
z1YaE`dFp$TMlWVTl8J%!`*;npc$fONaUS&oPyE7s7C|^8`Q%Pxr4wHv8JHBtsYe#i
z5FRVTa4esNDskExlX)SWF>%M#=8fgs=~?1YE}sEbh$oTsWs;|&FbmitBTdpq<*+4yUEZ5qWs{!a49ndUKY+2MNA4W?9*VFjP`=J
z6@y@Hcr6{*#S#20acY0oG-MqRR8{7^nxa&#O0#&A%gaP5io<2zMcJQLD4VT02-Avq
z4kdqqgmn?|{CFuIv8b0oZL>PM)c3RDn{ts}acLK6rVEY8R=A@Qe~^5{okQoPPZfdM
zK9ym;itC9)Y*jOPNjz8;+6OI*&SBHR_Fkm1cC$#Bup5H_
zzKoq`S!yxkA%HBdhe-l-f5LRl!^ZBlD=YP?s^c(S5cD5t#mUe|qM-=yfQ*5YK`RD;
z5XZ+Ru0I*%8UV6Vt|=#j&Wi?BqajRPnklsRnJAh)HmWgvGU&hn5IAQOubvG0M>MFK
zj;>I~M@*sX2S6+IyfvoKYX(5cAr93$!xVa9G}Mzx8ONOrvPFZo5{H&xG>vQQJYe*x
zLH(y#qRCf=H)#oOU@TH&$sx9JxW2EATYol}F+T49ALUy5bGa~VH|&m^ZXuU!kVjl&
zfGgfWRac;@Jsbn`W2SlnHoWW9^9ycMClstyPi2plx=Oud8L6kC5-iyNihQ4%ycMn~{?W;F4e2Y)a!7(f`LM@&J
z|8pP;-WC!Re=KV3Y_>GwQb}?0hvAtVEy}SDUfy`j)E8Uy*A5g71d&05F2f!#&{o80(S=|mjmq<1Gz!O(qy$k*%gcr6I{zlADtqu!}oN394v(GEu2wZ3&zk_eZ-!En84)}v|ijgqcsB_
z`6fu`FXZqEp226O@WU`!G$1jaBPOlLti|H
zNgtPa2HdK84IJvbRh6#JCl@H#!+K{uLmZ}9W9eSTHXF1q_uq`N
zPo>qwxOm04vQls>>qZ8bgzux^-3Pp-QGpehwrWN~FdfsYw&yIIpLi2h9k?ZpEBpX`YK}Y}&bzWf_rW>5*j<#V9=^
z0<6w0uPD>L&1u9y*`Lz^R_~k7jwZj=j%ZC7Hfu4A8M_DgNaSloJ^+(EmaD5vu6A17
z!nV(10i{)V$9V$Y*#BZQlKu3v`
z9@N@b;R07%3phBOG9tFZySr&vBGQ7cCZHnYLa4Zr+mBnh26dn;XTPK^Ft?4!n@i;F
zSlLJ3D>v)pkzWBK$gUFu(sX)U0XS8i+lJe)Agb@JDXgE!JE*|{??oL_5t7K7K}fwI
zUq78Q2xk;zBy>h%Lw{?^5c?4gYODmS1662FepMQ7XB~m6tj{;!d$=_rp3GO*3?#?9!{m6DH8>))G=g(gcGK;umbh5>R|nXsS)MTMxTg9Fv>pOr4jFpw(=$6
zG2B)}9lM>*!0AoyyNMjl&ci$g2j!%QduZY}P?B{{9K4A+QH9XGbe~oFo?1Cv?CQ%B
zz9_o>@@l>=d|!X&a+FRrByZTaxkf
zUCuds@Hr4I8<#XJEbj2OKo6=6$Kz^gAR)2>5jWw&t$POsv`buw`5TiSOuV#4YE+i4e%*-#JaOIW
zbt9+_<)o26sa(7&)4ggI%<^0D0GoBWZnPFp;~7^SE9s(RV&$T+yqMomXwAB_;7)HM
z+_qUV;Y(;uI&=;U9%_b79q|kl6dWaLElDTENE;EEZRLxjBf@FytFO)OYqTffel6~D
zLwd;kZmT{(s)F@N7~~fDu=y-pg~@j`d4!GZ4Py=|Y}RfN#;s@iULsCI-ez$zXTAfO
zV9cU0a{PLvAgc(#abuKU3do~o?3K>p%tr)ma3VgjdMDaBep4df>&v5z@
zH2bw0pEt2TvY=Bp_@aClnd))7On5_2;(r1)h%eE4X?K1Qk2@$X
zeEymR-g6L#1xazkpNQ8b3uu6=$RNQLFqofCl9v)$u|RuUbU>t8Pjn*-bd>O)n@Ug!off|58Vw`{NiyQd)+Z=793+Dj(7KP
zcxLTZ)_^amYwRKwn+SbX8*x4Q11^b|ZGfzLGOOp&_!*A}-VMOHG+W-1*IoewVzVc<
zGP_BgfPXgpL)trCe7FYegW`UG{3xNO&>ZlOrE3Gx8J-(iNnM?cxw$e}W787CZn`g}
zs23AM&zObn&7C;2pp&!qSJ>XXw4Io^fjK#vWpM*DZP<2V%?A3TyE4Z2MT#m9y)UN`
zT)>Iq_rQ<*;x=!hCr-@9NArBRUBG?o5yTOE3&M63gBh0~l4*=s;pX_NYvJcNF!Nf#
zy;HTT76(fAw$5C9H?b9v^R?kmW8ei!Kh}mnheRAd=9hlFdW=^3F+^0WSP+sly@(M}>>+SLUklykkz9I|%)nh2b}=(rocC+YwWAjXCW(pHp_5ude2pg4
zQi0D!{vfEU?!pE%GO8U?0nWfsGFo&%xt3*)UCU+GQ@(@92VQ4^*Ch1biQ-M<({jnL
zwbm7R&n51_GyENh?I=ahOX)d$Wtl!^@o{(xHnA1eM5lHjGU^!K>1dv>CL!KTWYj!7
zdB`Ldc@I>ii5iraAJxST^dfkiq_4oby0`8SX`}rgL*c9w?lo*2haBgcu~We38Ct0k
zB^GH5o6&-gqH`kJ>#&ZKQsEMBV2*uugLam}-U9Kds>2^S;GW6FvRQ*L$B-VvsPx>6nFUN=YDKu$E
z-si|!<&pO{GBL$AIo^1Gfp~rJM*qPdsCKR0n1iW2XU6D>_oUB5ke0K-^o8WmWfje#
zAN>)7=Xe>oM;a3J*PdR~z*+{SA|i=eiEsg($cVKZIi+w@-Yv*LiWq$0aKy3lt=H~A
zyhQqgn0tdx
zyzOhlud$qGN^M(9uOR~~zTC1K6K6qO)4DuYF;>;XIs0Eq#$3N7e^$Ek`<>d(sy^cg
zJR=+Pls2m%Znv_Vf^Ym6f1m4QnD;vN+i_3S2vyk)%x37E7T;
zoCluDKp}H5SzJIc_^Kp~i!o$NxJ)a|Y7*l+_u}-2zQRVwXyFhoFgqO5Y&rYa<-`8c
z$Ssf3d@1y2e^3{_fT8SJ*5z>wlk~Y>KtuSp$X8Z$V)KOE;pq~*_CeTmf6iea5
zG2VDle~fItRZthJ3$7>4cQ3jtr?FXUD$lp$mVRSO@CBL)p)DR>*F-7O7OlWxwv>aZ
zTwDgrgjCcnMrUcW7r^t2)>L6s@Df+hYE;l)9^6ROTsHrHK1=c=D)*g;M`VI)Z6KjA
zbdUj9aB1rUrL9a`cx_pnDQb$WB;=lgjgag7US#5{>s3b#qeZ>2D<5C;kENI
zCHKWr*-AY}pRLG?kxTrEq#0k4SSl#EQzU|c*?I%(wbK5bi(cHj+>S5Q0pa?obMCtd>WfYa6G1@B>XnACbEUv`{N>5rJ8+QG1a^gHDmsK
z1CG7%Lex%yRzl#F1px(IVv2}63WKzFcZ5k0>rMym
zETYDORiib&3r)rt!}zF#X%L$gj)ga$b5WAN_#QMo|4*D}g#Cs%ZrlpIq0ZoNQCUu7
zc_2~y0iQ}+W2jDWUQHrhwAf}1jD)sQJ9UB=M{V+pd;U2AM|Mg83Kv8Y5ueW1kWe2B
z7hfq2eDkYBEdI0P?#M_Y`Sn(ZVZ1gZM2b`DaaeIKf*afGK7u-mFNWS{YP&OW#Hl;U
z`xt7nwiLmjf%zM)R_a0^gE|t2SdzR6CE=s9!(TFDFA(epJqayg0_~9)L{y=V4JFRn
zsSDKOMD5vhUTQqOMokIQ9j*y{6KpscP0`V@TEaUt`WV(nA1k;5Aj&^+0=2tI%sEe#JB3-l~;C6Pp+=BAW{
z$}#n!cw$Seg*Zk9?4c0=q9qIGV-UpoGA)r9UGefAO5u1y`KRea{S;{KGJ^2qSmVpf
z>Q8$DAGqMuA3DJXl7K)ibu0|{iD+A1x;GB4%qSXSahI#CEV%lZ7zQ~)2m%bTDR}F&
z<-gQQkHMSQa$J>d`X)ALRUA#XUE&g>?nSotO4tg|)r-gVR+Li(d$6&P70lZB2;LRQ
zeH?$G7?Ifz%SaUISi28`FzrB2hvQQ|mauY3eh%CiA;M?=ByX4g+g_nfp<`7D5t`jx
zzOSp$RaRb(*cZ?e5bB{t&2x$M_wZWYgQxRIU+jXeaSXX4??eB<7waBz!#K*}?LkN!
zLGQ%qOw!Z5y7iG|=e5gsLl+OUvWgwEzb^b26m^^cX;p{&o7JSwy&LNg%PV{X4t3#0
zqgH`7#Es1Hc0rdx-8f=9Dg}Y4>GxxP#u|uxZT#M2ISA+4j;pDO+pj{+VY~k*g)eP_LcBcHl`#WRcyaW)YOq>=X=h$8y>7Fq}rT-QJN}d-e|v!|GE;7hU$cIsq{$;dGV4
zRHlunPjz1QwraadjgOS3dB3MhV(Z5sDxF4U;qI_B#p9}9V2jKPkqTUoAa{%KW<8AM
zozQpYWLfFsN$1Wu%m&^9E%ve)?}ft})w3_VF2;M-Fa(Zj686iSvEDHNre1bk9L|(C
zS$#XL!w!1)Ygp%-?;Y2;ug%vO)7(iPa4l+jL#%IS466AC;TcirAt%)*V{U&5yZy_gyajK;}Y*t($TtNjFwPakT=DfT7e8MQ3ail7X?)@!bKxeAup8(Xq6We
zKE`A~*KbFl#-p%!Keo{5r$?{mewvOh4xCP1iLPClNd1=LJ%H!f`gFX+E}*2+vW1Tg
zzo-ziK}R7D#!}4GOhkrUVe?Fw$AbZR`Gm+dnz2sN2hf8up6RP@V~N4nx-LjP8Q;_T
zNI6B4autzsDMUFIahehT`4jBoQp7)LVmeTXxXf3Si8t@Y#ei!P-OdL7hY3)w>Z1N9wGqL@U}k+IJ>feF!UpK%W|H6!yW
zzXP7ODF^7ylK}rI_@CUWO$>*iLpBCyq_Qx89=EG!;N04ZB$#m~U`U5Y
zqg{65`U5zlOq^}@ro&^ZVdTO&Lf$I90V
zjnQ4_rin3qjX70sj2gHU@j+zYqXP^Ny6@5me4&SE#|b|5-7@h8I#VA3$Nrl~KMevN
z0nPLKM!;4iDHh_v@HiR)uhz@fV6#te;Sqq3VuPFYBO1{xN5x3ltXrsur3UXibY;J#
zLmnNmA*|LrU=P071*))d{y5-$F;KNNe_@(ZHwM#Bu*905?#*;5TS_KXTkY5-A#xIQ
zISThe_6{68{{CwE)&HT3dh3+WIkp(pQ2F1PGMw
zt{<~zE;ZQ|EIkafcta|dry0uT%hBrnfzmd|YilP4@18~N3dd=sZ$ygQUbdhpYQ{1U
zD1AdKJ*aF>$}eqO^Er|XJD67)@ougrG7$a<#NsU9s6=%5+TdNxrV5dON3;!)dV(Os
zu>?TYH=+z$1lhgtR---V`yYrc!GT
z-x~40BaWq_dhbDTh=1F}LH<1;KE^K`7w)o855F!OLW_>xiol;5QCHwk)Q(nu_tw7^
zAw|t7{3oU8rHwCAirxyIK(*{M)AbA^Go5f%qv^Xf=Tb-f)87>#a;G-x+Azkmy9Rr<
zf8PLPEftpXjHLiGJiM{KN(&s^yArCZStS2lCG}KEp|x3jh3}C=zUo(;%hoJ*{9LXA
z3!N6ZGRvO48-G*0NqDw)e!kOgDeL@PJ3!dZ#aU^HD@Uo%DJ-3P@+(C=hMVtI+?ox#_IcIo@6~p3Ww{c6na9YAeJF(&5_^
z(RQMkW9RCa!T`d_RdwZ8WqFfP8J#q;28y$^+=&=>bOvA|MBAH=6$&d%(E2KM3iR(R
zm=EzCq8&;bzD9efV}9KkcfV_o7FWiavbsueO$!~O)XXlME*dDz%ZC@aga%Jf>x
z%EAc|Zzi7s=&Ar?v*girw4g$qk%?MSPNXA&T~=lAhJ#SMAOHwb){Duae;WhEgihp+
zNhUFzh#d&kq}Q9S+nLF5@)a}!OeJRLbKUO#vBCp|M?psPD9Gp^1(`Go(s&fu`$s_r
zjRFh-(gN46DOeY2$1$LllcW3oqV$vd{ud)oVjr2zUk+NQ;fPR~sEQ}5Q;0ug;HMFQ
zH-U|*7!oc(aZ-OTMgh21Cc>v1%sSM^tbLeAPh!@gBw|(;GfO{$A@v}a2t)mZgM2eK
z(JoxsGOwsyJEClvH6u{k1J$16A09}=c)(5)3-~c80WQx)Ww^5AiE)WtUxOrslC-%z
z7aOqW0ZRrf#pTH{V2|i9yUR1#fYk%$+vTFHX)aH$0bZtOO?P=NF<@1Im3uBNLtgpb
zEA>p7F3)*Jro8_8vRt0*GI_5^oS|o(=<=LzWF4-<&Ubk(Fkr{Ol4MVEc`h_yUje3*
zIN9a-g#mt3&zj-#Ofq0E>##hRXNCcLN{1D=JjDj={{AWpU7ivH{98Ti43}r70lQI$
z&2o8W8L%=PcBRX6xdEf=LpqhX*Y!&SOm{YjxpQ2eD-2kg4x8ulTxq~8I&6Wu
zZU#FI@HYTUzOKKQF+~RE-+BQ$hwAdcZLl1bn|1I*44!C!AJf5)F!)>pT;E^)c>Hi8pk*~HoeOwHJ#vkI&N*q
za;!^qeIdEf$Y3eX0DrE7cQIH>G{CRvU{b6Mp$7P$0P8%%A_9z*YhWJM3+!dE6m5X-
z)WKUBd?_;r;G1(0I=S*bn@Sr7J%8V7sz1nRb}!jtN61H&SbEh9FY579h}8rIX?h=
zdw=y486&3%U>4~G&S$WkB>s1&ZP
zAh{KagswiwV~R@M_1NiS4=TE?kqBKPKgz#|lEJ@r@g@JJiI4a+&41S4(&9#t5aWoF?+e{0I5S*mNLwv$XS}??ZPTot&R!-8YAYS1ltpwr)PSWfz
zp5o-=l>8$nX@VE`a+0QW;pZgH%;GnkY^3C^oTM38EaoK5x}uVkG|h_HNHWPZyNW_a
z(2Oc_IZ3mqIFFMwZHn=nY@=ivCuw>VNt~o9QS`=AqiGHlM>$FJo)Da*xlVk-Nt)lp
zeooSaCR#a3lbCpglQeOO7dT0im3WGiG&_kuBFQAvq$GljpovJ_$w``g#7a)mY$KL(
zl4clj9Vcm45#^kunM7Q{Nt!xDAt!0N5V@SBDMFmbNtzbKcuvwhAksKVvM-W2N%Agw
zW2pTkFi^jm^DURJ|CvaPMIUPEgbLRh)lg8|dX)i}sed8sF}*J+N1ZnJ^<(h`Fc@
zHz_}Dz3WAA9VIDSLUFsow8!?W4fqJ*G7^%!{c{FU+%hJHp@ard-YuYNeLoh(CbS9drclZ3`&SO016f
zk9A0sgxDPMZ93%d5;DmVZ)=yO9%aai$&TCPO&8nZ^5K_^S=Xut3}f>ZHJQ>jvtYz;xRN>X5+nNgJpe
zh_%xv<8mu_0xkw(up!51<5(ot(>T(mNE4kJ=Zd_qt$!XMj72!~rO%V;k;xgpSYe(M|_tK2}?+pBfdw6d>|otj(EF1s#_(b
zz!9IOLpDiBp(8$BhisIP8IJf29de(9%yPtM>X5YzS#hN!H;W&l&wyp6l|ADTf~ne8v{T^sMK(Jade!
z9q&pivt6FK2J8zR22(+~0ecfLoyrSb9-J2dMR?N|J?n)o&(#KOqYnFp%Ts2+?$Kc|
z6-cH*)PLJwC9DOKZvZdWvlbeB1I(qvU@DM&1FS%Y!Bij_1lYv>Dq$^k
z0@=J_68#l_r@TWM=+8%Pt(Fuu9Olw(^rg1)3QVZ_t*McsZ5lq4@JZ@V@m(6n44gP!
zfTMBoEx4-txpo9|w`wi9%G-(KRfllKAhzB*x~LAPXK=;<@(7em&=8-Il;><4YByXpM*02%Cpw2@DcO|%;bRMI6y-JqgrB=jn=H?-y^if
zu1w>2bWS6Tr-k@ben$yf(Fh-5#VK*k@K3@g!}s+D3)&<2>S>Qm5=*vY&Y&~1I1**Y
z`K~~IIQjO3*#U1#I4-y_#!;2BX8WRrIID(pj!pBmrmE(3vu#$as*zD|Ou{-;k`X9L
zmppUh5P^&Ma7Fdw+vrmCx1Y+o)yt|MVZaAEzIBE#AG1v+0$Gu
z{GZ5~EPPnch;NwD8OgZ22UlxwUhCgDy^gN}`ql{k*YG)FiKD(AFQfyKvA(Xf+rk)`
zeFyb&I>MPj=VL_$@hjtNn?48+3p#fo9ar0Q91fl0I)ZBZ-iBdyd+-SnNAGeP8-^9t
z9(UdWuUFTsvDJpIt0iY0g<)Z+KX>$-tZ~L*wM|T?;q<=NF7RO;dZA}ku(-}8M?RhC
zJu4mt>G)VXl)Z|`sNG;9xSw>!dfHzUhMkl-viWg5h}T}x`{witaXRXv^MNCMzoU%S
zz;=?1z;<5jxone&EF
zi}|?@Zx37mthQNjJ{Qp5W54U+ZG#GlOftX)X(X$BNe^1T)lD7PUmqG4{e;01
z?)On+Wk83@AzgDtRDW*8O4Q$zCwGJ~rW4L6u@#0F=;ZzMl1|>R{1UtOB5K2U-x^%8
zo#b1SVo}f2v%P0z8&mE=E2a^@sTIS}HC9kh93R*K8tdNgB>iL!$)X#vB94Q{dSVKz
zQ-e-0GA%M{7e*k4fYocIVb%*6~6ye&Cr(sLd)Ob(q
zyl|p6T7O=K{re}!BYVtGGi>meM#QxXZ-(M$=FqPQ*tNN3y>fk+5
zRYj4gpP{M{?U!TBY$a)uACFVkUB#0yyWtyBDnae`)biZbxRx7LOmACL&#zb~xb{W9u!R@Q+c
zOSEzLAU2}bAq&1j1+PZIu_ze6oRO0VnO!Cx$LHsh`e3Hf2Q$$JV~C{i$X2AmHhnRf
znI4##!8Fu`sjax3(c%-eou7{DII~^+0hA!90$sgU>RtzaU9W(jEjZujH2K_8>fo^h
zheld3sdecGN;Uw3uXQWaBcmF5o>S`h3u`e=10$DcSEVlr*zl9FBrq#>Nub7xi_~am
zwU$d$)=OzNy5L~*W>!CeCB;*SP3B_eDoUSboMc{@jUiE3R!vh8y@|dHp6@9PPR9un
zn(1kcBUn04LE3%cT6`|FiBD>2<{@W+`dyQ%W@Jg`wf3>5;Ebe5v$7cuQj>7gZUA(N
zmaxPTqh6$JPQ}^r?47ETvmfj8V>^R0P9Zm;W0lPat+Dy?7!({4T^(sR2`G8vP$gZm
zq&w#1N^^mVRW`e0Pe#%E;_$vWv-ja-yw(+(jfF;wam1=Cl+EqR=Gg3Zb!1K>I`Xl`
z;0*X!#(|_`_;jgkUIxO8VocEQN;F6)GAq4VqCrMc97-n}m8%v~)^I9~UU45XX-=!7
z$f_c$59lben!M>|SyepH@h0diB{~7<1QYZH5}gQi;*f;cm=K$z$Yu~?GpXteoKgZu
zl2BEW3A$OL5%9NYm=kwX&l
zYbGSkQIuv7l4erXOj*?^R5i*3JyD`Z13lUVog~rl<6d;?kc1p0k#VLwiqZ{2(oL$`
zj$Gi#7*sXJ1pSyqLqdzjnxJbXdK}Q>h9qP@6O!R5$}kAYFsUk6R)s)+MW>mdM@uyB
zV-%fkg6@O{g=UNgdi;=t>_;YFQ>Jfcrtfg3FOsSFp27MV_g38dzQ}mxE^HQhzcxY7l<04O{$@yeCm@sN6pkWc
zFix0M)dPJ2G=@-B$OQd~M1KqPw4JVW1BWNr)YpG-tb`sNEo>-K45-p%H*1M^M!f6ZGp6{R7ZAx_{C-
z@|;8;1^VcagxrNpn)8^W=$JvsF_Wsu=_m;K5mo(Yf-aKi4xl?s&}T|?C(xZk5<*8O
zHD{NjsLLRv%cQCUq}6O*hO1&l$4$^LNi;%q7M(Cb|CdB}1Km9YA>P|HXOE+(N4-g|
zi#^B(Z#z*7v={y#m;Hn)emWUd02Jb(7xkWuIz!p)#EI2k!api1Br}3kZRC>d@bL>tm;Mup-h!(B*w)|_8BioP%i{lbKjMER;OK`4p)U`hnmbx>l6{9yDR
zi8jREoX{tbNppVfDEgY^8xz4Yvz2_ctjk!*2fu2eL=PqY(INc;+IOaA@Syf%w$4Bj-rr3h?(ww
zhOEk1{Ra~gE73zq{Ffcf5kun53E7TJn)5qH(RT(RX1e=BvMOWsA56&Y5eG+X*yg4DyA(Q6((NXlHL5P{|zFt;k
ztp0-uxly8rlK5E?ZAiR1A!jln#~nq-4MNOx_n&Y~2m)oS{(}kmRHBEH_#F~$NW3{A
z|0OdSazFSLzm;fx*;ndLhoDBO{-Fds-&_8O(~|tS(-Pk3v~0%?`?EhfEq6ZVwEPOs
zxp=+?&&U7dw0!X2PRnyoIW7DC%W0{>vkT8>;&~RHzj@SYY2Dzo{MVCC%Z`VgmW6np
zisw{3UxMd%@QxSpj`}B1$3srbd^~64xfkz{hnnzx%}y}^Vd&kGX0ad=;A8Ses;VSb2&G`ri@_!(-*hA?WxEvS>AF
zj%8`gk2GXWGpWfZYl7Xea+C>ru|$ssdbA0;SfWn_`qUxln80+TJC>z09qGuLZc
zn-pqbcC0iLr;jBXX2;60CZ)GY^f;i$4M7LlEVP;o$FdC7gFeYX){H?}ap!3zP^STP
z+R3N|K*8ErdHTsHCs5;o8b1gnWz{<&ZfD$U*(b=uWD7L+xYoG0*uMA#WCHfZtG8h`
zY6(3=g5)~ra33U_2^KW>!R#L(`uk~K*%`!TSkU)A-h^F>`>T^tuK)!N#r^fks0~0t
zJ8^$A2&L0^r{)q4w@|(EwF0?D4tTBNU%+OE8f;74ArtfziH2U{{?-J2xxDOd*;LyuSWKftiv;~_I_je}f%@Pf*#QnVq`VSHft;BtJ2r^VAqut?d
zH^_j0kCVulEi1y7#C^mB{R@evF8sj+O;Ksl2<$@KM~5KeJF*hEjyc@NNNC*9Pil^t
z)YQUufEw&b+&`M2|18lRKzEp+>m?d0iMw+MI&NS(x*YBB7A`?v`@
zQ=*}exKEg%VHN~+{?g#Ym(i86bSnN_wnEgv%)r7sSc*nV
zv?0KQ(G;u?l$r^#vFg7=JAc4@H!DLL7
z=%EBYNumvbHzVU~+VHzN9PSR5Xb69e*+Ra9jR)?b1paY}hQQaD3H&;VhQQa*>TO2H
zLZ;({!+pYFhuK1&#}(BW3;AGnjFIS}1pWjY4f+J$jEwh@Wr^m3CD9E_;u4lEq_DPU
z|De>_NxpSunb&s3{}EK`P#3RNpF7;2GZSHKtohtT!hR#)4`ZX-Y#x{|(L>Dx1rlw{
z17_5ZK^D#RmBam&!9-Hx2XpD@(E&QZVDvi@Z7B1D(VHaNmu#f85zk;#&-_)cLo_|3jJ3X1cq9dWpi@Zi9lP-CXiSIL?TC15ZeQzd$+iD0}$8xw&U9X&^gjvpQF9}RYx
zDfI6l7brB8fWc&Jk!VAqAB=uTqK$>$j0`W6aopiPZjfQ7&|f7hGIW5!@0ldgL(K!J
z5^c-_W@LPiQ`l&yuFWfTqfQd-x>#DhcZRMfp(?c@Dg}a#D4R#)EFqE8e|Ayvf6lf6
zC$Qyo3UM%cr$oaNS57Ap2ctJiG%RtmaT-yndjQYTT7(bJg9xA&^%LM-i~|eC%Lkz@
z2MRBq9UFe)=j+InbTF>;{~VXzD-WcZ(ed*8vg3EVLZ^m=ffM2B;|%XQ0{+R2kh^RO(JpzvYs%a#!TNY6~GYQYDf1>G=>&=9X=IvF~{=
zKW{ajH#Q=LqqT6gNC+;o@fE-}&>yW-NQd+YwK_9@IOSERq1H$)B`LN62#cua<4o{G
zB1Rn(EdAK+q3ELs%I%8Ky@P}fL`d`|tt-V@-%Lw5X9LmVC>H9us0jE8k-dT9LsWJH
zQAHTItS1a&b{=9R;-dh=FnDOf|A?H`#z^eq`W=pG+i}ueUOk~JzVPX3+))}O3X+XG
zN|)jOtJcD!tke$w!ElGQB#;pPM0;y0PDezWH3(}H=S|!24ji&pHSY
zU;haNP9|^ygByJJOUugXEIc2acZn5cbRr%>xx5+T
zR?tGeuOwSAuHYj2+M1uV)-b#^_YUxi)$bX@H-&-=Tgv5KA)Pm8^|xvQS|xc?M-xhE
z3Ml$)JGCEAY?QwF(wB#buXHVM#P-d)TY=7T)m;O`>C;e
z)n>3o*#g&g;gRGPezS?Z!w--r&qp{_pZZ?P|
z583eL24^C;nkWijhLhKptYArsbcy;tPLkLP
zB?0q~_v3k_aACL;
zZ_nA|I6{55uz5-T2;H5yO?M}5^R|~|w*=?KR+I@>4w}_Pjv|JmEV~77T{M4TGp@~v
zyWwvb^oGBd^F}f5q|8kT<1XWC>M59PXxhP?*}g;ucZl-7piAh6e__Cu+Tm{bb??aN
z2;gvq_$*jX!|QC39^W^@N1+kX`J`afa@G{}-Ex)-$D1ooVMj8$ShRKZBF?;4d@O&S
zjU3^nA({@mUk+bQvTF}t5~;Bg4F9B9BjzYygJmVB1CHORx5-C+B76eh
zpq-}sfiH)&z`Og~s0}aw+A)7`z1X0tdi%>hf}XYCbMj
zvSauww?Q=U2Hdu#mv}cs3lPapHf0F6_pcx7IOz!T2pnH8SHv86f040~DE5T%_fiCt
zKY~d|9S65MvYzM`K9JBWA-E?D|XIzfaT7k!Rkkg3Tud;JA3gX?7WRz3V%
z*(1<5(j1>I$M;oS?T`DcOeDR?LtZ1_M*RyOMVnN_tz18a$e~918&|FZN*B@5S7Iu_fQerhf_WO88zOEowexHp
zKDG$HvEej%w0y1*ZODkV%<%MV#QI4F8O@@-8Lw-EkAxU;GrvM=birb~OBbifZW1TM
zO@0(2Zx90h9I&d`MCKrPUrkwq5JrKN0dQ?NIcdeZ?%NXs539p>K^a9nip~$m?LTv0
zU=%$q?x@
z7Fh>bj;viVQZ&G^i`2L3kMVK~o~Dz!@eaNXLfy!pmnNf#FM+q%m&iBs5=YaDOArJ?
zLG{TMWf;vb@E8))P*tntJY*4*k<={FGYV@6D0MbM24spfdg}y=igpU#O8NY{b`c^i
zvK|4~h*Ag!6fCMohPE>DVC9JfOHtPD7rly7NZvN*P&A((#CZ)
zKk54kH`PV2OcB|CMvyO*rAzuEV?c3SyQj#@Z@f_sN5E;xjoyjmG#KfWoc*0gbVtEQ
z;Ao&3?t<;`xzIwtar?qEkzN4EoU9abA-pfN@1uR+?mLjY$9t?3E^v@54LN3H**m|E
z+jrXt2lB17-KIE@cSX|dY#_$X$x6%l2q_yTrH_EPQPk(mA(ifxh7
z6tM|C;F}sjO^6n=_8Vg=bazZoDa1{BjbfR=yTEUudqXiziAe}gMf@Aeyk-&x4~8{P
zAH6PaK{XKsgqa%gj_^%84?!eee}0;nN0n2(@A{@8rVlyYNi;6guRBy58y(mV+CC+)qW%sBpM&NX*tD1aZaYYS{ZI4XcK*AY|I+#Sfd38t`vm{pg}-7AR17g1+JC_R
z2%fQIWQ#;R>TA38NN=Qe+g4yGLXQ7MBuM2(e}}Gvrv^l(E%Yud7nfcR4!+#>J*EPL
zryYmsS{;oU`1A2vdsAf%-^cN{{=vieTen}S`v=0}O%FWUh{t@+
ztlS*{C|J~1FmI)zY~QJDu1$xnz}KemCOSx!+}xO?Y_7BA0q(&~cWO6e)eoFle|$k=D7bbvegb~UQORk^W668sap)V&^mwyRWU8}p1zMe)
z-J)jAnx&@ULGkYaHfa-O4{b-%7o?;$^pC!DOz0`3*D<^nG93CtA1XGaAzgdoJn#AB
zsDnFSTiX-ZMwDO+99Y_y)!O+e0YldSqWLL#bK@{&`)i22wl~1fp=p%)Ndc54l%v;s
z_P}}xN%f|2z4R0b*|{0y7t87>fG8B)#{Ae6HC?f+7l}k|GR+823e2_
zwNN(4Qt#;LIuJG!*@vNLfs!n-haT%oXNDd?x*@LM67T7?zm1uUm{-lQdReeYFMBKU
z6y$J@i)D@{iMNrp=rJB4p;F4RSg9+f-T>DVtJGaUzpIqGv*>r8Qg<4D
zeNWQZzz_AUCFm#Nb19p5!rMKT3TROcej1FG>I5yg7l|MbL*d0ojD|RUXx71g^&r_l
zqjQuYN9bR4R~TcZYq4_A7OXW&9i3S7b(Jaiyo|nAuk;D0YRBNA0c4wn(6dkD=4kEC
z%vK(Wv^q&)2$i8+r=J=;2~;coKLHiRbK1!FqrJoWQ5p|z#9Q*lzeL=^2#^`=s3&TS
zwV`UIt{dkdv3W_}kOi+n0Y3pX{@dy&1`$D_J;>t|;9o`@PH4_j*ODFfpFk1)0!WQV
zuE~qkNy?U)@Crj76oUR^sL&Pklk~g9cU`1Sk-9p*mF16K6&~3zv!|XjwsJlK%7T)%
z(CzfP#J*9D;suCLhfRO3Y$OdxWrrmYq?V{@@Q|~wE2=4zsVTDSLlcqSklEL4mw4rD
z^m{0kvUJrb_gn&^mAVPse>W(1laCc&*Nw{W$RUBR>n3%Tuj>}|JYUzXivLUCI=HiH
zB`@S0r!LjI1$Bh4_4yBiCacCjT1}5wiw}@w3jHMxCdks6YRVGL|1q+Wampr>h(6oL
zpwLtd7oFWaqZCR$VQ#Z%4CrjK)DqpFfOT=b+RQGkq9>FD0tS@wZ6g7<1Rte7*-G6L
zc$7F6@IQpVt5aJ2gvT{+OW5O0tvzw0_j(E9UaH7v^qPbK|E}Z8HX`$yDcU9~xUQuN
z8Oz1RS&$l@+$#hA$ALh9*=0`#Hqo231L0w^Vk)jBO1mxm-v|7)gr3?h^b*!t=
zO0gY*{2qoLG_!|*aSvnB+~1#RoPYn?Q}3|)8<(d-
z^V)Ce{II`yx;Whnnonp4aIG_oK$vwKr??C#mxyEd3h)ZHbvw7%segi4WdU;8rCT7MCBgd4}896`sgb
zU;i8+r#?OQMLfs8ka^{+eA}MDtHATE)Rio5#MWGN1psQ2?5+#KV-Xe5c*+i^bTq}P
zsS<~Ip7aaiom|DUxeeL6W=AdWXiD*(g+LFdMXxT;XI>?KI0pV~qsbwnTK$o=pG7uN
zUD~!s=V=U_gdIRB
zmtiuO^Q);Y_~r3F6+Y7!S4W6h}lQlKJ&qH$HjW1;Ac!W>LjkcLexZa
z&A07wdX+jdvWXJno@cQ~dy-+58tWAearLYN)WOw~6xf`OhP|Q1G(NYz!i9&+L?U8t
zgB}wVTTb2Nxr^N~J@o{A5o#
zSE-1UInTQgh0;+d4TbC|l!8Jw6f)j|vZqnmbSgWN%8sD2poH=uP$PWcICgulfNL&o
zcTQ;tMMJ)P;Dtc^fWF?>P&ePcM+u@6ymXQya$)3%a^a^~*N560~|b8qJd
zWU1)<(D9md!HbZZVMJKHteGOLHj#DdKkp3RD&(=!75V?=&Jf#5pT(xx;4DCM_vqHL
zb%zds3DI9%(~KP>z9HCiBEeuCPH_b`EvCQz=jiXYS^W2T{(CL|y^{aV!(TD~NsRB{
z<47WBH9@|HZ?J%W29gLNnJwPOqrP8!=L^~|-U|%x7w<;G*e_mvO8E#HA3F4<`4d>7Z{l2N=J
zKRkk}qx`nemwjn#=zXM|k3@4?`8co@jwxW?e!uTnSr
z@}UZTh!;heXwP}&p&X2XxJIq+RX%5o?hg2gM~d++XV?|Wk#m?x?&;7hqT!;J
zx>sL8ThG(DHv)IR+K8vn`M_y@$`?&K)0@ill0l-I3=-XBkm!a%;y6BU$S!4r#J6~?
zod$!%OdSaMm9iXq3n)20?4jnqbY^G^(hZ4hkXXwGiB=dSXt>aBN-z5W@~}Z-Z44PC
zkmE_p!3GJQDMHIA$71!jSUFc>%^
ziANF@GDbWFSySrn#6Fdc5e?{X)oBL1}z70vdK`bCFf`WiFQ4uLiZ!Wtc
z1`u5^EGw266-{ihC&rGVU;_nFqlg8tAR1yWSqmyrRCM3Z+`CIj%=i0yp5OcD%fsBg
zclw+;bLN~gXJ%smyQ0&&RCEyv=&0zx&WdVDZ0TFQ71auC<^$_GE0ISRFDW9WGgW~Y
zkN0L7v{8zZ|2?XQbSbzEv6TgLa(a-n?3852u?*)+x~pT&+MDU^4h;(_LO7b%utrVIrN
zvwf-kN`VaI8L6AE0a^uH3Cb!oKcovJxlVXG(DMQj@d>>}g;}y_=zq?V(GV?~D=8x0
zGJgj;m~aK;`}{7%b2v+0vnELCC`tv8ZBKMb4U|7V2!wQZN$nWnhTNpvI7%4q1!=O6O$5W@6!HNk)q#$+bab1*MhKB#m>9
zrloSaj*u%!Eedi;-re;^1X;T2jY!*AL@ktq7u*flO>e{iN1-=TM);TBh~!Ds1kf81
z5coB-!8S2)CLLIr>keP^3`;hg)*q4cUkE)UsvnxQ=??-)qc@2-6cNYPFsPT!U=k)d
zmT+J`oexnR2@wsdBN2Woa~I+XBvXu%_vK_-6E0T3ysxx?2DFOQS6zG&a3o*NZ&2AT
zh(|2n8GK~UlkH$tKuHHys-SYL`~m`syj2W6{gI)k_oZ#n3qBCjLztx`8h}1a2E8CjpwtV}q(MO~q?NkmL@!7%
zvY_ZV=4OW3lha%Str|oOq8cRR5voDIAS(O`#sE#jujTVOa&2~gH$qY4`KjDRFhD3ytSf(pqd=%uz^HH
zIz~O&nXW@UiTHmr@tL+Vf|ZI0$|)jkfk%3R&JdsVq(2l&O9GyMC@l%0=u}#guTG^U
z`GV4tbQbAYMx?N!L2E_(GV?|krDY_dGAt}nS_ZvD&(YF!htiVdB|Ym(o?Ge2G^GOf
zlnpOx84@sLO`)_T6{eMzr0C@qP1EmUN?t($Wb&MAh3JYJ>jKu*YIrcR?#HeUO5>zY!XMJ>(TOS+wktuf@m-kPve1
zUJ*vfp~xTMFlwN+A5c~Hr2AqAJXJ(Nj6^Q-b|^}KN5*vN214`;a%thxz1>4Nz(Z*2
zAvEz28h8kGJ%k(&4^D11>LoE0m$79UE<$+|4hC8)09x$
zfft`?O0{5Mrl~G&R1x0rqn7tTe~J0+Sdn@MU!2VCN2wD(oI+koA2}}4Wz2tIAtqB7
z^P6BQ(!>(>rM99lT+S|GQ`sHGioKfTN~{6s}q;X`SotsQW;0o-weau;){TC&u2$xy<3C<$nk^`-iEmSD*g%2c^K
zupdeT8Yw=SR8Fkwa7T&~+;WZYVHp}jm=P)ucH+U<10-?ARqhxw9(wFbXA=?x{hEiK
zdVzCq1i_1DqMX^{Fm;whMQOWAZ1RK9Nwh&tav=5061mX|QrwKw)t;vA;wQ$QW@|Tl
z8cRg`-^^)*eJJbt4o&Td-+*TdE6WhJ<743)sM`0dh}bz7(*NAAD*9cVMNF9jsh%K8
ztZegMLe(Sr{Hazq35!$?u~
z3mK|p5L6>abc!C;PE!2S>=4#{Mh+o{LPPrjqMnQaAG0|GR@t~XlScg(Dq}SOLE9CI
z1~8FumXZOskTBMQ%pL=*M#!!an`O>PO&>}cMCyZX&LsOE@(vQh@j)fa(!t9xg9||<
z1oot5vr%*3z&LH%43L=D=UzZ;vK7V9kxnAu+AmPSl`7dJJ#A7L1yJMOBw~QqJQwd&
zCJ>gfVC-5MLZqNTC$JY134zAqtYR9&w0xz~NXhbnhy@XF;e#u6lUQ8kBT2*W%
ziBqFWv9ChWacpWEN)P6oy3nQHh^&f)O;jJmFGqqcWYjSIM##hkuA)-n=O%tI{KSqb
zih8gL9TdVoM`Rz^3j1SsmiRP*nMzjg0v-j6SwD(P?1|wIV|jh)@+%0${){;w#^@xD
zboq7SaZ=r+nFyNMnXSaok=H_R9w~2T?$Dsb#Y~>Ik`VUAOg0Q1DIe5{#$g|-FhJ4e
zJz)
zr_dF~H4mf%EQskwGJqs*gEb9Daa6d1`
ztwTL%kP!<_Hty~tRaEvW-Fs0EJbD93Re3Ri9{Ef)rX5?$#v_v
z=TL>YupWa`Z4j>=5#hY_%r7d;K|`8%pt>|ZDNt!G>0@P?;^#In3xCHm%!WZE2x(r(
z7fDE4C9hf!P?v<<5o$|9>MD40Lzryv>IgTO7a}j
zqMq-0hAby77(_bo9tzd7z2w=L)WnTyR^{Y)>kGsnDqHB{E*$1AQ3)w!e&XUG@}5H)Wyy^2LI}A`4=JXwl`Z7sjcH*-C21jT%#5V8h^vJcQYF-;lGl=rx77s&NVA}e&6iqLX7AFJf@Wpr|+LfU^PoFKLPFBats!_N3nvr(sD
zY6x93a5~L=K%H!=H2R(<&?49WH4kY^Af-ogp_93jWbZ`3QnzoBc4KvcCEFoxQx$0f
zTb*Ru;x;uAL0s(VY{czCAFjs^S97KW)_I6x!Rl#d#QF6?sAe$KSeX)bCpQx9#5+XCA?;S)30C6z|0PuE{
z7jj2b_#A!5lcV@yDE>Sdp{H3+z_Lt3m^1SpnpcW6w7m>vYT7(dDns*1?4os@OmIs$
zd@{=kKZEh5%6#F6e5r#XBMjWd01#CbnF(geVxkD9kw6E2sxoy#ZWUvflXDR0J64W*
zI7u>_IJsN#i7iMpT`<3h99d4BoFn)lui3&yFyPHir{hqLm7JAHcttlY#uh&H#a7}r
z4NmSncyZ40zKoSaXEp#ISUH%EpGsbn97poUsW$h-7q*oJ0Tio=$;l-{-7?|zAlf4n
z9_D56axCZMKor4jSiox{aNGvv91S&2P8$4CsxS^{!O11l&GO8qS0(3JqB*41&E!mf
z%p0VFzu^^8IV;IhHY&;a7CZ;m6X?PWm8HY<557zdJ~2vK^D@*RcQukXvr!x-TdtwpM^F0Y*^uR!Ha)`-}TASnJWsg89fYyY2SM0|j1Aw5libXzq
zjMta!C(CTu{}qGS;2U0SHoWjeEBom(ovYcIpILcq(IA?bo03%j-0FjT)w9{$ZiFO@
zCL~KbMy-;R#Nj$z5h!;nz$9OWcU<2^C>1EtI3?9EgY@yA<#4|MU_W8sP;m24p8#+>
z>5VS!q-8^FUxtj-90tcQ;8g_9^d
zogXWA$ZQl^Ib=Q*_6K4Ran7p=R{Ai|SIBi(PT%(
z@Ih+Y1giy+iQ{{+bE*$WNT7qu3+)7r8^2yu@eLvfw7m_CXAICuhklCcwo
zbMn?ZSJnRN60bHPpHv(Ncyivks=z0tjg#93$VW-vHs2#X5WvpwIq|kg10gd1i%*cY
zCQA1z$>da3l=L;Sh;mFZ6zJjEGfgu}Ev2{27VpLaD?CloEaB5nHjr~Fpshep^__En
zL&EY(`lCu@V+ctu72$9b9L{=cx}s9;IDi3_LXqvVIr)81=ZUw4Z#v3G!P6yP?NN^I
zbg}MqvF>!S?sT#4bg}MCvF%?kwjCFQB8wZMK~WGHwNV}?7lLv(SVLqCh0JJrLENb7
zPCB7Q%5;_Da&fVbxYu4ihYT@4Qwv#zx!6uLv@_*{
zV;JR^^vG6rPR=F*a)mS~#XHQn*x2@LPDNLBphP70g?1GUX_SzK5_JOD5({clXH*A7
zRqT;xWLhTf7|uyuK$UayqRTs1`~K>}$#iXTUWw+|D#jubi>v?Q4ewkz*Z8I
zlbZsJmaSueOc(mS0su1Uzogjh^caK`+j(XQ&MU#T=>Wp6#Ozmv4L78sy<8S61pKk|
z4~$k%3-$NT>SR@t7~rymW3aibx759T>=lal%@=uqG{#WsGix+r*Fz(A7$U2gMCdU?$DWwkqLky+en+k$-N*!q?
zBn`3!_-zK;aeN(^SiwN*cf^pGP*oifsfsi7q}4nNv1!&k2s%5Z-c$=p(|k`G*q!4V
zRcbGjcvVMYSqwSWQNAo~mL#Or@ivTDkqHc;VFCu89os2|+NgXxlt0aI!qswuORmffNx
z(9K*Fs7avX6B$xPZ!t*gf{`43^eb6)CAyd-R47h`W|Dz$sege9
z;7D8zhJgl1{0&A*IchiFIjg%%%Y{qxJ#cBTj!;0*U6_C&k)8R4m8UD^6u78B8YM_s
z;cB3vgCiL@`JM)rBr=bVJT;Fk`b|i@N|WmzEle`Ri3A&hRo72qy+*Q3D|*tlE-0Hj7?o*
zjBIFcb;Nf;g&cT>oD~q0SR6N
z*KLkdf#8Ogz5bJ)f55s~g|yrvlCmC7RKYFxG2}v2xe-xDq5%;3Iyl^ks6a48f1(M+
zi2_0`AF4P5Uf@`uyXWLe32EZBHj~4~@hk0v<1NZL8!oCrEz&CSG`5jd04H>##Q!r=
z&=d|ZKE)P)09w=oR^ku(!X8l)U2NdP3MP8Kxd^^w8}h|3ju%r06yOF{v4alQBrm8V
zG)||P$TkFta!6_jzX~kD_2F&*i6!%BED2Cz$*?GKW}A-iSe*WxD2W=uls1AXQn+UrERmvy^2@Eg*6N$hAvHUW7uCh3PI!TK%XAifCpX(Wt)IE-tJ2
z&!sIJb+WWoK{E}GJVUVW5Kvmc>XXv$;+GLrWT*V79GslcZ4pMn;rmhFVc{G64O#Q*
zL+zjwkr{>B&*by9b*>k0!0V%Ve$;i6Z8@dyO&bES
zUFc)qk>fl-+@KkUs!DnV`b2PyIK7G<@mV~@aU2+?0tZ~w%bv`_tdM3*)nW3NK6Gpl
z-mO%-ejONF&X)D_p>C(6F*r6vyOQrafabkNIvv&Nk-H=oS&kQL*Ek6lC!9gBY0lLks>DmA)KqB
zN?9BveMIwWou&3esZen1VNmA>^v>~D=`R{UPgFg@LrW!ud=gExIjBC)HA9QZ=8vyc
zhfK047d=32#^CAXu@wPvEQ*{X(Zye_BLQq={~dq?KHdF6O%x?e_*wuG4D9Z&5~Bs0
zs7M2mxoaUEVMs>NP*yIw09VyIlRs^_oNQgkLbHDOBs{M
zA!G$!m#F!qATMY#agYg4?ogGeKs97ZJuT@N6ORuQA*`B6``5{u63&#VY4!%2C>Wcj;V62zcqC%06A~hFl=4jEUTyo?B4@I+Y$((Op@$}cNHDw>`nWnUy;x$X
z3!&&2p1*xYLUx~;j1dtCet>N
zK(t7JM1)ceLl*#Tp?hnSu<1Cx&|-i9TWD{w-q(}Ld4bEUrdKIxj3VIyEzm#EloeOZ
zE2ak#+d+dIZzs}Mx>Fowx0Iw(HH?G~t5PsFO9GLAAIP(iAd
zY&5ylkx^$Pl2L89Kwq*}AINCbNKur35oDkipf8jVCJs0lr5AOq_zSV$)u9gH1yhuS
zjpJFA(+r~`!DP$HsQ?faN|L8Zh)L@_hEl0WrI>}smI8?nVXey@oHE;6P9ksG6X^+#q{@FcJZg8neA
z7d5dX%$^RDiEva?h+*CUaPQ6zUhFh&st<`WSoQ!T2TZs4!{D?k=?%mY>W+QjQlV*G
zFiYiqo%90^^AYKZKWZ=5$SwJ6(0m0?hI3XBSELD`
zWvVzK5YXt2q=-qq2Q(DX5CBhQ9@L}G5b<-pu~mVN9{?m3hT$_Im%I*To7g>w+{0
zceIK7Q28lY~CD;rydA@dpC6iv(&HZSo9&{%(+i+d)j!afU4D
zo}u6hJm-;@-W6IbDhJHC;?&S)tfcv{SX~im#Hr4a
zk;~|+IGZmNxUy7~Cn!M~tuRQ{bb>Jaw+Fh?>Y{rGS3a^USB?|K$f-wC5J_Ufl`GGh
zD!u8Ejjj1?I;vTqoa>F=S^^8_A
zzYex|W-Qp$s6hgdps{icZCuz!3`HEjfbUhE(_Dh+7#)spv$Uk;&A;Ny@l%$1oSLkO
zs4V25Cq_+ZJb<9)LT^dE9_DLLWDC?MUS0e%q+hf{
z)FeBDN{^!x>`^B8kx8&Y>eWamU{ActSt$ZuC=xX`r{P07o=EmY4+S;zLs%Z67C5OG
zmKWp{WfR0WPlS>zCq`CzOJtRC1+J!S$4U^2(PS{N~64Dpky
z^y!;hz~L((Rtog
zhJmy5!Gd(0S`6Ca6!Sd}B$FE*gmAr!Sml)~ZSx>h;N&zdSmP1<6Q#Hfi@Bh&4`2;P
zBo}a;P%=@^ZetGOQM6@(BmlZRxiA=}A|wWN6=BjV=2y{FY_$ZtgL48i4Xs*CKXUR2
z7q*Oz6A4$A;;n?(1Nq?uQKLwdhhoVQqA)GS{MCo5KScVE0H3;nS1AhvtExPPl}$^<
zR2U>PV!PRvE6>nbrtbJwpe`-Pa$g0ABv!K7lrRAjfJ$g(0P8}dUe(aG4Te2k13^_?NL9l1iG
z7N5tfz>$bitI$4WJ(CMfbU
zBY7DIgBgDL+9(7J96L)dWn#Ked-xFX}Sg&%iTv`6EzvlVf1xo1e^?Vf_oxH
zMF58;Jw)^fcoj1|S#k=b{(M4829OP@JmlW7(j@Z&O{X~&@P4#`I_#TBZ!wTpD2()c
zs3k}hOo)f+e;I+{pm1{hJ>Wivz}>r{1Ma5OP^1RjDZQ>`H-F!~3;IzkMYKP_N-H$A
zPq7a9zpG9CsARf}
zSel%i?V+A6)hY0_waD+0{d9TuRq*z*q1}B~WM5U4QQ2YY%VheUA=!hnbLF52FefHO
zgp`w>w`5&l!MqVgD;anm^riR|nop)HVg{=%)We9%0IRN;9MZQDRGkx2?h|ue4c8(V
zwGy?g5{tkzf*Ki7GdmNwD-%fdNdgH@%%ppN3F&=bZ216IGNr=G5_J<=Go=o}5A4wf
z;s-D=q1p~~)Zy=~=o0AD*Rn9;m@%tQ)zRTZk1hMX%YfCV&UXe}{$IxDZ9i%j22QuJ
zM~IalH3-;2+rG;6?aH!4&p*e2u5Lc>9{8OMg@%J=U)n^kc@m7wviiwR_m}^!M-NL~+jY93y
zgi;Dr-`7ho&RYN@0Htip(_0K(E*lFqETOg&R`RfhBzTRbdAa2h&}q3Pq1JLsGHu8$
zbLeLt{ai&q*V0d7f+@Ewq@O?1&mHu0H~rjCKM&H+BlPnG{Vb-RrS$Ux{k%j!uh7ry
z^s}0N-l3m$^z$M8Y^0w}^pm2W&*^6i{cNS5ZS=F9ezL}p8miMzHvQD2pN90aH~kdS
zF;nPg0{x7kp9|@yA^p^&pKSW6PCr@nvmF>Mw``-I1iR&yE%cKtK*%l0+3<49Ci>Y(
zKOfT1I{JBsepb`Z>-6&q{k%j!FVIi2^GI$REb`Ihu6eHb(u_iHs!ex35b%QJdPVvNXF;UIv7&D
zlZYNlMAZ@q5USL+TErou1+hAf*$vhR8mC4dsy9p^K|ZmT(ns88EYb-if7Ij6U+9B#
zXhvJq$jLc`@d?qikxHhS31V#i0~y7cjU_*hoyKwy>5-$@^{6&dcSx5()JwcULW4;h
znfA!>?i}o1Ls2kp7u7)^%*lt?2gw{W6qwCgJ{^dKQJ9*NkRa+H$*aKC9xT|MeAcA-
zqUgB!K~yd39~|t!yCX~opMaEG6)7J;vl~Mj2sTf`IFn|w9lr>BfYB|bQ;@ck)rvs6
z!hRGvk%K@P=M8_N6P$PbX&S3}45CaLE$q+a?abs28BUKdm-uULBs>Ug)bwzI8DWP|
z15+jzB!TQe>fLSd>60`8@)!3|f7m~?!>ZRoqZ&jsRi{!T^7S+jXBKN0@z_be&^)S!
z9AaZ4r$cE^E8d;SsLC>2B61R85|coRkEMrT(yMhcw>YSPnyf0-Dtv%BJ%uS2lx8BE;gd4b><|b-p18q~z}niFj)WS#<0JjJ3G=<*nUINU|9JKc*oOI#UvdC?@F>|fxD`?_Gl
zej>*I`}_X`fh{*xS##i`;BLU3gF6Vf6fPgm4UYVcxvI+I!jZpz@Y8|&azmB%2JR_b
z4crB|VT$4Ib#Q;deStH$uF4t=HwJDRTp(N=To&ASaJ%8oz}YXLm!;I6`z!tI6I2nQ~jl>j#%
zZYA>j8ELZc8woc9ZVLIW#y4CFTn60taEIWo!##xi3r_WxDytvdNVq9*o^T7{lHju7
z*23+DD~7uRN5QGr0Jd;;aBgt(;TFTKfZGCh46YRJ3fz6TXK<}>s;HA5++a8xIP&*W
z(U(qu|2ViIaQo56Ti|lwmcYfp&4HT%HwkV8oC%x`+(*Qx;O@g!z@35n8Ezxo3b>_k
zNpK6`JmJWn8QN_FrvvxTKeqxW*0d5Q*2sHKthR;DtgHy
z=o@e1M{*2-YdMF$JE+QX5%9x=eAie3KSG!+Naqf-OdU?*d4;6}#`4p=qugQ>_zGVp
zF7ZHUN^DZ}zx)}G*rccgWXem5GPukE_RQo{PWt?BIXI|97w=owJXRtFu4p
zX(oR4*5H!2L!jvPh8lajfqqKFtS15>8&
zMM?2V$!SSkx}FOBNJU&f_vw70AJYkLbRYPj3!GU|qNE5q-=t(AH*7I74+~G=vskGy
z{Dg$DV@5?LB(U7*BDun3F4JRNWsh-(TBZ)=T66gVL9&2rv&3=<%a1P%N*2TeIXiHA
z*zm3qNL~0skBHfc!xzx32lQV~J)VkUlPr{H_Uh!Nb*
zvJ#TRBDqX2a1lbsCvfsX!-S+Pf=YtqXhB%wtgu7^BySOF7{yKGCxTiJVzHEM5ytSj
zDFVK=qSDM*Rn(k8QGHTefbL`6r23B$OFVbQS>+$2$AIA6dWmdfXI!y_?vLepZ=rKv(xWjM

OPEHI2%PD3L3F4K9SA@F znAlWqQ0&q$K_n6;BybhLM)q7##`I*7fE$sVm?A>Xqqr^!u@UjyS^OnJnsOxrj!7au z7tlr@MDP=cFMmlaLeX_hHJQ9=+Ul1aB}@y$i2TYJP5s9JRUl9xAtR^rW}RNNC1^l)>+Z~WUeTcKamwcqZ!D8 zV!#i!Or1X)gK;t6GL@jtV6L@@OFS)8t(7>LkQ_ajz#=W2#rj8dmd!XD?reTE*L50~ zCVLaP^XSAOmZ>1NI$h{eDj)sBPv>Ghi1;km*wmDSuymR#DBwfWZi4DIV>xWSUW)O?ut%tqbxJy&8mqYu4j4J;F*+qf^ZbC=5sEQO=$A)VegHI5Amm4KErUl} zCXTkmi~B@NFG9^|dJ~3r{|g%M>m2(uy`^gsoGju-Ces~>ft^SONKDvbKA4QCsMv^D zOvl`?hzKx)TtRGfj4)LP2uJr^Vpv2>Y!aU&P!37jKUIP7D9?Q+FF4dCVD@a@EdS5| zKi+JmpQpErvuCK6vx~dOEFMFjyR|P0gaypVe>M>L_;gMdO6sXhHA;se0|c!xZy+ab zxJW1@75~@hPopUdkeDQJNYsi z3Ah+Gi^2CtVme8tyJ?so;o^8p_~5R|uqLtqfh1-Ylo&>ca%>bKf~i6>W(ISklL405 z{1nI}z@A7DN#acqX<~;j#u2_T+QJDZ(TPWh%||H;oc@&S|RcV~Rm zOo>I^W>IE-wieKXy^{_ox|JU}hy~;)^HwT1473*?N?9cqUQ9BYB>sPir_+%as8iQ` zB0A=8BXr1eTG^)ZJ}*?&7bU(g~~(x9jvzyjcG? z#%)KxFeAES~KbEA2j2PbJwX z#stKIs8~TFi`5Bdnm&Pq(<})KtSoEHC~lY_noJ^OYO@xR^yC>N0NbnNO~9lf63us5 zrdmdjIA9dPjdD}cLkX7vOmXb;K1$)o9VQ@B4VkaO|9Pi?`vN=fmJk+=Z^B!G`DNH$ z;6BNPaIy^Ap~c{Tl4*w46PzUpVdiNikNdUw5p-bZSQaHMjZNu30_f~nN;$BjK#~wh z$54*1^Pa5P{IJOME`*uDp*v?34i1ITaentQ0Y8j!u16HzwVge>7+ML!L`B39J_JK8 zo$$jn@6|m6%$MM}qEknLyCa;~Gzh{L_ zS*+yb6uMuNl9LEa#0tad=HwTgL5>@Mo`so|s45hE z4pwrR?_>oID8Chm!lGraNX!yZD4lsCO{|cXaTtaYgqX+<01U%L6Jinu7Mh_M{xsL4 zj6$#((pf@6I#)TTho>tTN7v*>es{<>FoKqzi50y;jwA= zs!AR>eJDNndqUk9(fr4}oBH=222Z~A%R!s^Arn~*zS+LDSB%4|&lk0x7-W9wc;My@ zXKr%#oGso~tU8RHrxALrUa!KUS@-C-8Ofu2c*e~6rGMDP$<|RP*E&C3thwlcN-4Ld zaD?IASCZVLiv|{*)ZEi_e*4%v#new#T1&=#U~yLO*WPGSXz-*&KW>@z>SxL6eqxb7r5f$ z7blOO-=g>Qq}rC6qe6%2cmFz^R(9ff3@&&z~kusuU047rk{&-TXj6_T0e1A z>5iIYOS`8rvp1yaU00i~TXNc1b@1pR8Ug1V&s-j+d-m9Ob5B^Q#~q))PhMMQd$YPo zx~+a>|3eM)j+90IFyu|d^B;G`iFDQ{svgkP{&A|Efh;##OVBHbrLkhh`3=377C${? zb@Us1)028XtgiV^|J>a_PQ7}VG3fCF-8(h6S5A9+=XKlhM=?p_`vZS7KDT_{kkjv; zq#a34n|{jV*<5yBN*qUi)loBQzOG6S`8NKV*hArMx8?C+yqgKeOP=cNcv+)g>n~QH zRd!s>A#6zWzW2rptG%YjdOS!=nldBq)}e=U@6|`?K6YwxtefC>sPw?)Z6yuMZXTQ7 zEHAP#dvo?|e%YC;HtUc3?cH_4Lc^~5%zDk*--ZV@_@9riHy^b$>Ui;tuuH0@F(Kbu zC6AbJPWQyl)q3Tok2U5Nyi&C$G{D)`wC{>=yD{Z{)>i756C!!7X_+hc%?+&F=k`r*Mx>mysP*&PT8Nqc6NkRX3N)%ngggSo%`WRbJs zccbmAxNgVxg{2)hxAzJ55LdsYEW{1v{9~8z|9mJUM7G_{b=dXzGnQn3UUn6 z8xC07zdANap8huVoI4elX?odt(ujsR8s6zGu36#D-&%W@RhaodP{}v?-l@c(KunqM zEvvUbuYY-my=QC5pf&q8>fG2^&`X^*ng49=vkXh^YeA9r)w6aTTBq`Kd%nq_>-)wo{9HQX@Dqz)Usg{&VJ+^tJ>i)4 z%3mzptNt?bs~+x_^(iKe|MXq@)X}HhE$oD*+Wp6m=&Amu)NkhT0{7V|4g7T5S6K_c zynXiP`LfSnGS*e!oxJ1P^^bP>;u5WOTbB5j9(CaF+ds1q`=$>R4zBuGq@8!|kfC?T zt5Ac4R=3}#)+eqqC=3*V=8;d}h7+auI7!PL645zkKIK z^GfE%6jEs}_4NsJ2Y6c>g=X35{b+4E^pP2BN>5hdX+K->j8*G0Z@oGfwi}d)b2i;R{C!2<}jT+(h+vA0ICZ!d#2j<-1XayRKec@>}+`Jtb- z-}*YXj`cR3$m;c{9GJ zyzqUmy+e-nSm&8)*n93IBXgTCy_c>oHuh>-+NaUY%f5Ex8i&>2RXLt+)15f!rR!AP zoRw2%PrNXR@1r_-PM-yK$(cV+uq(emUgdzvxSaDrw#V;ou=y+g_LxU1J;tv6Bi>@) zngf$0Qa6u%C@`~JpJwIlyIAnLkC6AnUD1+CZeIH_DPd(%{JiIB zajLUzW5(ORjb$%896r@3F(T?0t1#b-%?t0;?&be}HZt;z+Tf^y*v4q?wOaQ+JB>VM z4fLCtE?PIEJ)*(;nzq@j?@rD2-1Df=t8mLDx5F)()8EZ<;$5^@;p#f(H|Nwj9H$Y_ zJf`(uC3W$MdF>yqIyPWRsVGSM-l@P<7ux(vex5k{Y3~f5yIJRaDTk+XANz6Vt$Yy` zeDvGhbH=Tcg{XZRK7TGRc|qdnUqiD^$3Ho>=zZhMn@6A4?@9fA{_#oA;_JWsIVQUJ z4-NgLP4+W}yy9@1UIuRa>3LY|!WT_XM^m@+UNq<1{USR%KlZPd;$ELCYi_k~OW5?~ zfM$Svq+!30rc3KSO}n)1gXqxwHuq-M+hyXiH$x`pwDbsad;Pt$cIz&yYkz;-Q1JdY zx%az;Axr9mCY-4=khI@-_ zdR*9l*z4N%%WKe6Raaj3fH9($>vfmJmB017ec`6{Y1JD}8xOBbYfoG|YMRw|eTOx# z|8dIRjlcLuZhG_A;0>3v8^3R>EZu(d*Q{+Dd)fcEGyUUMi{zrhUK(keXO!6fka_Fv z7Mt;s0EAsLpw}{Uh%vtg5$bjry_a83z zzrH>vVy|!RM3X+>>I?3!+)@4Xz@{_Y{WsL3e*TcO`n+DJ*cnGN1HuV@9l;k7quOncICo1kAK})8STOrh=wHJ_$_7GuN#a^ zdRTutwR`HeYLjn=QaAfeTa)_JL2kv7s_9GFQx1>WG4IN1lcH!xood#%$71b$M(c$> z-}JO4IQQF4e@wph?MnWM#)*&Sdat=Tb=cUnm4;1!{hl^wwtn3UrUV2e4c}Al`zO!o{llE8`^`_T?QeVg{Z2{T&BeVr`>!5+qP6T?&cbrdnv@+2PW3T+ z7F>Lwb<$%a&)h%DZZ&3S#Ttyh_D#HdTKP{L{cDvmM^lGas)u>*D6apzFxhpj{C0bm zPt;4(3HJlNhMqPU=A?15;?e*)~mfA;uy7di#TuU z=yfmVtRL>b(Dj6^R-4}A{aypk+c}c+L!?lglt4HRKbvzmTV!^FH0*++)OuyUrxjbQzW4N~4xA*^S9&=^I z>eVTCw!Ij3^n>r((7>h@6K&^A6l<^9_bh4bK=#Fz`iXld7VUoGboS!Pc8$@)J^IX; z_4nboiwBR{*kfD%l5nqP7lo`!Gk!Ae#wfc3zj);L;60nF#$7HhS-h_C_g?Sj&-)zQ>`^!+;PoKQ z6$!3kQ||nHLt1h3z=SM&=VkXohLk5Amn_b>_wwqcvhZ^?U(sG%T7B~WT|9-Iq>5jn~f}V?RMBD`I@cXXB@S%Y|;HGBO8)OdYr!feamun z?G4Y0kACB!ziYs#qo(dtYFw;?lRlNdd(gahU9|7O14phey`d-kKJ0YVYfTMuJTWUx z%Vy(CgRqgCwSCWMC0^>A64$z>IbzTFZIK&x89c}vlm4)HcjMjW0h?;-^R!QTT4f#0 zIQwU@wbt76{nzzBWf&QEg!|_0=|%22=bRh*-M^EW_^3(NdZ)-&d^@+YS3+T6Wc>aM zpTj>a&F5D=@2xW5J3=#|{v&5{L;+j>Z^tC=70+111$P%ZH~kd7NH9sQ>DEm3JJW0Q zb3gCVDT*IowEyF@V}%J1N~-Q1EdAg#rY_-k-sAbAhxhbv{&H(_@Q`|&gT4){iC1cU zv(HzD84o(qS`>KvlKIUu8#Wi8-NPBCTD&b(BadC7SAXoNZnMScDt^wnL%5sQxIN;FRv4cvadX%AI=XY3NKs-HNl592m!?_d4I zpwMJloPG&AakBNg$b?PLBIc{a$NRRL!TuqaZ&UXsoHZ*!bHfW$l|A3AV7FfUn{z3M zf26*m=c#5N>AB*++E3?Ygg@&4*yz5s$Ew>IZ$I7f#5Tu`OR8`GH9F7j{=(vFS9R;uv1m5;`v zNkdhge?F&cXjrAkT^STpWXVm=J$^dsj`oePrtJ$FJ{SzIuPQuJT{yV1cK?asy2I9*w8QETh8U$jG#39Jq1*HO zNXH!g#JDL-$1Cs?qTN(z(%(W?cVp`f>8=bFV)1Id#ru;gQvM@7>!)?Yy-< z%f7DclK0~`vu~9|zusFK6g{;_^O5^8yP0+RR`(CrsOBX&~_^Nz=^y2{A zq}8w7V$Y>t3p>85G)mmhGP!2Q?3kx^*Y(mil;}=Z8?0)4IzVH{=*wpu&mB9fJIw0D z-0$Wek5ezJmG3L6zG*wMew%b&!=e5^M3x`HATzL=D$t6~ z)nEnn+Hiixil@a(dw+A(>X6<^Q~U2~R{!wF-E;aG4_}?qeeihD%G)(}Uf+2-E#}ej zwt@G>Nz2a}|Mu?mka@{R(w>-{nx2-&p8HJBiA#ynbiCR_MR)!h{x*4A_@UUac=>JP zgqyq_I!~9>>esxSr7re&P&;0>FM3E=^+MzK9QdN@;*c0q)e*^7-=EMuH=$f__0G8(k4>#rUlow?-`CXEd5m573M*^B^2mhC z>X~V+yui8pR(d;cth`d6&%RYw;_>knmH&Hd{k4Nd-g-rcvb?s5tyh0rXm)c(Sx;ZT zdUL}ittq#|UO9g^>QHUKgraA?%?gWZHN-g^SC|$C|G+w6a>V-Mg9CQgwmeG<36Uox znB8%n`uMlG2HQ5|Sp2kVyV39aj=6Eq9Y_m%O+DF5y{-PUyg_Vn|MD?@NXVg|UEQ|J zlH;!rTb%Ma*17fSGro-BYT?JCq8zwds`8rb?ilo|Dx7-zD>!9x9@+lEjuYO zTPIqpwPReL?WLa#ath8^9%x9PbnKOV>f3aA9OZt_`Lb!|oQ4sTTGG8Wn!~eP%e<|> zec*3a@x4*LN`XO%(_V8*eBQpk%znpZ{Xr$Io;n-%t?5;;@y7h?ENxz`t$WhPaB12T zzpH0TLi~5_NHEGPn0jyBMuSZawS_*F*AMh~{^VoL-yg577lhZ&k57Nr-^stIA=o=- zyRkvfIUFPNz83Z=_a~b>|G@tozb^v{fS_yImb0quBURQN+GUcz{pr|!*9VO#{k-s3 zizkOqOs#&oy{FiErS`FeDtC)ts{M@q`jq81{3$;zX7tqbcXk%;r}}G~3e|g#7(dgm z^v!Jdg5&A@hLnX_uWbK(_V&w{&t>QDRt~vKSVfmhO>=S|Z9{x66aZJ_(_h?9aX;7= zVw(KKZN#Xk2D>NAE^F<8_#ff*mOoha*xNr$50ve;$ypaYeEW_eO_vW9e{R_R;ojTp z6K_(VzsWujU%L6&+|S3hYft$l$9~yJGm9hRwTxa57~wWX%Qh|5J>|&>-}vgkm%54V zB0`R(UHe!&tLDiw&Ic#}8f$Y34sYx^<79!k?$#YCS8__6C2Hm&ZvE^Nv}}#0UK(a# zkQ?n?I4#TnfLcKM#|xg}*Oo80zdW$eymi5GgMC|nHrn{`4}X?vySMG!+VJqrH`4v; zJf3*>@%fk)_)V?#hVbiVcdQHYdyU&zGJ9YNwN7(Kef5H@x6h~Z%YIF8Uza5E+cA1# zPdmz7Tk8PZ!auIZRKC^5ed$eWO5OMDUS}G0Hh;v>H`huVWz_{cFMMtYuYdN+ym#r7 zv-@!cqTTO9B)e|CpUyUG9o_lUCz}n~mD0bdYWl#R(gqAz*nha`Xx)STUWEN% z^2_70zOhAr_Z@`gFTxT?CZtvb-Y|M>|S1;|ew8_if%dOF2&B$8E zs_#}$)NMOG)%E41DJyeyCta90d$Otz-)=#lITL=&Odfx~+-{u70TtVz^Eoyf?j9d= zJN~b+Jyaf9#Q(9@^1zyXLtoi`8Dc%J$~@f5bMTI#-*BI89z4k6ta&eAZiWv^&lc`pGHt;6rRqDbep7Mvb3*B;BZ*nA-zM9C zY)JaJwmH6NWkOuq^La70vsGi?+K&%Eyo?=@Xf!p<>X)d6%@=+7du#7RMxOmWYOvaw z=*HLr_u6Y*52KxZX8H}BHDjG9-Mb;8eU_Q_HP5-HzVj-4w8!nzmcr?pEr)qdv);L` zu(;^_+Zb0T&YaX~9?wR&NLTgue;wl!Fjh4>NK`r{@YFqRzqSjjW>5UN#3!ToQ{Qu0 zcjrEJpyqM?9tTIgSUG3+w?{){>&DIhKkU5+SQN|JHQFRa1!T-%8cbjS5ip}N3=BCa z2&jmpAxLxp2_}LB6%_>m6G0FJ5m7-<5inlb_J<&d7|McfAug7KX z8jr$crQVD(p7Qz;v**iQX*RDMKRC6FfB5LxtGKH#_D)*++|&N}kKSdiZFTkMex`V* z{hHe)@0+6hxbHW^27XODt^38kFz7w?%<$dGs8y}2XN-Ewv1NSd#!C8lX>8W#^;^Du zviNo7R>(rfn@1o12HP zs~(J~b6{RqW{XB>XYpWczAeD?O3mFEWN7@f4WxqNDi_V*K4BX?CX zTDDv=+rQ`H#xv>XFDyHIp$FsbWxcL}S7LnFHC0y?tHqC~ZrY!$li<@9ym_pyTH-Sm zh2)%>J5pS&_ih>X{9RIG_?h(bb6?U{?yK1`rFZhqZ=so~C4M8e221tcKH-@0w)b@_ z)~}hU9&fU!>jv2-hmAGsJlBTre7R2d^PM%4Mv-x6hnGZcbB&9%ldB9LQuHL^?(MqO ziI=UTXXm)Y_8%}U<~mO{?_q7m9_G=Md^gGJz1hBB3txmR-#@yS$v(e=UIqKkj?8@< zF(60t#rW)?xl4DK&SGVJ8GSl)8vA|L@*nxTDpGcqoDQfh?xOJYuug78Y2=kP2d~_b zIn?vO^aDmcor~6S=N`K;%;RWZ!$oCQpSm2`v|~c~{k6r%2Q@}j&Ud(7k*f8I^#8JQ zcW!>S^Xqz*?0>z{ImLGJ{Z@^UUW@MTyfNj-nPQbUE2J&f`W974?!F+NH2X@m->5D7R{5(k2K1ek zUHUzCKy8|0O^fobfQ`CEXPE`kVFnj6H1^LNu;KiI?$g~%4#rZo<1MFnro3Dv6Fsl> z;f%*AvD}#0x_i?ai)%Jo%n#O7nf3gUe9RoX;AWjyH)X%fuJmXf`z6ybCH?gLVd`TZ zrrBDoGV=R$?xS?#b)DQ5K5bP4CZ^Y#uOD7IHa&U38RapnGLHT*P!MM^m&}Cw#Gs|wm;Dp0#GCyDKv$SS$!Nom2D^_)F>T=3)wL^w~{gXlcOF!h5%`@%$ zK)deb(`e6?;f>2Kjh8zWZ0L1Kw&$*Z10Ls`B+re$RhD6@=a>4i?dq?D*KU?8ZEjB< zD0f8J?68!MN_Nqkvx8*fvLfyjDNc32v1)96$fiW!p&zD?`YaK(Ex~Qm_*rqI6YX9V z>p5D>eCax2h}oV8Id5x@{@ncFB}c7njrI2a?k|=7#h0`i9=l*V{{Ee>;p#5>Pv?5g z%Ghx=>fF~8lRWx|uIXy;QjFJYC8Ab}Xmcd5vdJ{WolgRC$$l zEn)W81Cftx7EQC8FfDgux2U0-zE5OhPDqzb%kep`)08dM7IUJ%(dhB}KbjkB3|sns zZ5ksvb!VJq;EoQmx{+#SE$YRXY(1q#ADUXN0eP`ySAocOa zO2?1ZkZkZ{-_$&PhQsBhX0N+czg^>US8vpM*DtJ=uj4~!vl54BS&cey z`^C0~%0pkzKI@^M8L!YP=bUNk*;RY{?26UOlkOyM-a1iD?Wg4^>mys!TJE>SFI>_~ ztEpdb-XyhZ##*Dj60@G@AB&1$HaBisy1@QZZ=+}Pe!QJKGu1n?i_*23PI_n8Rp+mq zoLAGZiZ#Xe)To8am##T<`r_+`d4n7LnlcY`f8S@wBa=O$5xp;Y-q;%~cksx`Onb4F zXQ%GS`x!o>@7BkYGA8xdFle@Kj9b6lnYWjm9Orj`T+-DeM%^SMS`U;x*^(t|v~1u3 zwIS^KnTFqe=czus^Xb%V_f0vMi}HpB%2bEB9dSy!FV*F(aFu~KR`~LqyvKWUecb~+ zuq4)%i}eZo6$oUpE?NG{wy0ouQXOHO7}wg zSxA34%(&f>)`0tIq~8N(EH0JCGShC^km5ijpbP#8;ZcCn^8rQhB&5%VISiNvehumO zhIuhC2D})Ol;>K&82kgmOTlad^rLi;mEK)7VaDoq=}B;{YiFMuTF9RqN{ zUm|=rm>qy2;5(2$mP1Le1ZINYMfw9_#>zrzEPm`(4k-t00L;PL1ob}~oPqpD3F?0d z_yoA0MEpcwETxggg7|K`AjNjm}i3651)-3~$$ zeb)d+;O`Kg)aP8FFC{N6F2=8aEclY12=|ME`o|&|>5<_1kfgk#0e$f22v6!CZ(T_b z2Hz&A{}tdfz;6rc-v?Y3{3s-8PwN3Q@NWoD>Yuck-pKzcLH!4SqaM1Q71Tc|)8XJb zki>r!zy^PU@TC4104V2f$%6V{20k6UK~VqbZPIAh-3~*N_OuQ#0sn;XBtN!5{|@z! zRhZI~;a)AMe|KzM%fGh)H@pcm*VB zPa6RX@Sp#p|DQnor2ZWNCj4hYQb0JM1KtF8QvY_q0PszM`VRr00$wMme-CgK@B@&f zzTyBw@V5w0>i@6${{=z)yTZ>1_|JnR{#OHf;Li}A#Ago-0^cgA|K;G?;I{?KIQ|8;9CUs9|k@R{JNn2 z7lV%hFM%ZGxfU=6|A_FU{{O1~UlG*58~lud|9z08ykh_k_$!1b_3r=-0pBU8|CQh~ z!S4y`-xpjB{5Yf>umLa!|M4&SKQ>YC)c>;(u4DgihC9(`9-x5u5=Hv|RYCoG!hJN{ z4?z-r*8oP~?-8EV=U?^zOM?36!p}(f-wR2~D;m%Te}V9%{ucs+!M6+Qe+Bpq@H>L~ z_W@T0KL#latOv}%zau=U|G(=0=LGfd0zbpyKNphtj{?}>PZ6Hf{{mnjc#5F@mw`_Q zzagl9FYvM8rI0;1T!V$%JjA}AfGE-xjH@DT{FK*A4TNOdLQyY!WEBHP3bWY>L_Sk72YqAn<}QN zD$~akZ}efEFW1*q)z3}U+1rn#*dDht*7ysw;nC~rJGjwt@EwIoQ{?sw?@fgY1JTRR z1-EPgp+3%j-T}Yw*#hv!FxQnyrz!A{i&BTQi84(7i$fk*2h7E)U7ib387~34cwj9) z|6Ngm8(tcA|E))RnF&J*%aUIugRSoqC$_1ztHEsOePP`#=C z)Ie$|HJlntX;RZEHf2S*PytjFl|`MRnkjJ%JLHfTIT;8`&R&AEH)0}ZAHms|;-B)A z5#k}Aj1m{XLQG6tOhQajtc#emn2eab*Z?u6n7Wvb zm}AHEg=q@YBB}Jl*&pWsoC-J{>Y&_|pUKUGElr#PEL_+}BFD8iFOc!mhi6ydcZyiSDIi}0%= zyh(&V6yePx{E-NMEW)3N@D>sNOoTrd;V(q^OA-EBgufBtts?xb2!AKS-;3}MBK)HW z|0Kdci|{WZ{HqB6Cc?jq@HP?tQ-o7}JGZaQq1Ee@x($>+@Qb=9CUe;^Jgqq{WMU1Y z&=c)bt(SAgC&z+YJ!=_TFHuz%4SFfNY(90{&yq2W``qucY(GxlY_mrW8a~aOnDVaH zvaf0zTU{5W%3l*VuNZc_>${ROO?B1Zx~sI*F*e1PXuN%9yYS?AtudpP)iUDq(BD)m zTf(^Y2Fe|{|5}d~z0V@O?IgXqO_Z*;%$hUc+xWcS7ESpim3l_PRPYTiW-^-%n0fE2 z8HHTFz7a!9Ts+Rh{|>%?Er%X&r{Bxf&0jIz{ESJ_cDK!0<_uoC&f#I7W*qN#Ml+sq z-f+Z`I!5Ys_&*$bJEO6!y#LEu#(H_^*Hc*`{AVv8MraLV3x1|oEhy|EzpB-ELVYdc zh%VeEdNqwIG+WTM4E?t+!rxNhotJ4@WpEPi)A;TgrE*JN>DMwg8&k~v=L*akYi+53 z4&_@#B}jMVjk|}+b>vj=%HAFM%mm_lCYTnI&F*bbyyJVS!?l@_jr)@OwT!5Ne9jf|pLl1YXN%Hh(~2YM&F8{K zhs~KX<;&H!I!4YBetMG2%U?K5sAn7=&*wT7$)@(3kl(9_&$wl>&f`r*%~3Us+}`~B zQ?ZgkL*Jl%#KHglg0IwzgbzBs(B1^RQO$BrINIBqy~yv$6xD~;$_E4W+ud1?Cj;yJ z>lmf;(VnDSeB3HqP|F7wEdywpONovZB#HTWf!mC*R_(C;w`#^;64ui z_sqx?w|F~Z&@x=#a4Y;p(y7KWHYZVWOC2N0kAHztm-PQWt^*JE3>|W2M;&9&Ea-D# zr&F2M#{QAJ>KQR-z&AYKdb8xwxQ!in#<|Nik-O>`36Yeev{Yi`oM*QF1CQ4+)+mtt zoSzi`>0RQ$!B-hsa^RtR>v9&msLa?>&nRgCzcrv%{q4xoHwKFGyWfgC#;3g~$Q)eO zXQJ2yv*cUel^5I(*2j!_voW6@DHIeIu94P1QhF>Vtm5ITi6Y?>26-CwU#TdcH;9ZkSMr*A_{$yM zS;xq%M*24v_Flv-`*kFu+M;a7Hm_Wt-cQ#m+j0i7>KRdF{6c<@y|+{^Nv&ZVi$r@l zUftu62rm}l^6Q(#)|u5aq8HNT>rRz48BO9`Wu$yYeEPMA7A9_s81ZDq-$p!) zm^$y{RmO&Ch|gp4^^qccln7TA;d>9g7$m=Zn^AWE%@_Nfz7(Y*;@+IC#A#>^Fj%{T z^K@B%L!C@c^L4Aw8b-wu`hnqcBT}=QR!a=;yMvv5e(xsvIUi%XbJdnHHyB$KrEydC zpBrDKAoVH9Qe}iu{`@+J)iL2y(~nn`jd*3RANAz=R2hqw)BWG%RlJRKwAj4bVtuLo z^k*TlwNq?ACUDX!k6QaLzWI4akG*9tPlj1+T0g;vdZN-@r)}T2;8IVM{D@!)ht?-8 zjZzJ*>#5F595|&>V=`kc-bBP4H|ZxkxY2Ce^jo}S zgOg>wx~JQxZK-8sWg|b*qbAAkNlco8@ui%D{CGe7DDQSeXIM!c<4`fU?~kWR+ZNB8 zDOSf=uf=ZHuZWnjtgeZIaD)&g1B@0v|R^ANANGV$%suQTEPP*Y2!%e)@Hf z`aSkiMupQWM-Iu@ZnZqBs{5B?WAvkCLu)sox)P;dG$bo z@v%JROI2s4Y}5b#(`1EK7oFV|1u{i8OE$B&ZBx;!u+_;PXcxBh#?!AH9jnqyGY6Pf zn7dvvEo~?mdrQVIMXgRh?pIM=OxgW=rSZdlFbbQEV@&(K-_*VA=d{a}#pCXn>K2V& z^=w=0S=KS;iX)3Zq?jI+cbFv2mbMOFxGHz7+9|!9U3+uHq>N{uvRdG5xc7wl(V>}d zSe!P6h$_p%`WI{2F>}{;Q`1Tf^OlHOQ>AR8vr$@JRsQaez}jtBe{3ASpR=#nJK9jf z^PJP?;QsqO)|$m0S1+CSP4k{r<=!)^nv_^;s2FFzlNSQk9}nDIS96MW$l<`Oxw&>9 zS`syK%-!x7W`sTnN|WmLdE)ckkymb=F-RWWv~c_qb6@joQ^kHRerFVuFk}D4C%?)R zY#G^V4TFvHmp?UG*IeW-tCfGc=Q|g}{cpakeD~z&$jF6zolpEYWtMlZtL$K-g(*)b z>mDhvHDGU=T@>%FQTKWO2*X#8?(Wf#c-R)om7b(^C;ITruiQ_j8Pl5IaCzx7qLmN5 zJ?d9uv2*US4XgV4Z_3VO?hfz&$>R8cegW(=`}?&-jF?!VIa~K=v}4zcuWEKKT-tN& zv#KSmgbmU;j53Q))7Rd;wYpS|of|Qqp`JU>CD{Cg;#2X3=B0XS2MTAWmKIHz)A077 ztzmZco~LJ@uTYl!ui>RscJZyQFKrQAPzYo=M{rVskFnMe8uCx-j9E;i9y zW1AH`OPnc}d!6ed+w-x0>?hgXUW+YO9CmR?s61hAbZ`f^RO4#BqSULmpMBDI8RQp^ zzS{q6tJ}lE8}S&6=#y*#JjXt!wWXbiLCJH`R4J%S~hcTt)*n_vH4k+2WK!5e1y~_h1 z-D@+E^go~WVH7LxRLPj39~dn$-(MW=>Z#4z_Sxd(-qz)3O1|~cuuVzTJ?6AI?|#1C zuW19Q5z$WHjdtBMo;t>HY?kBRxsRv!?QfPYx%=nVw!SYX$keU9GbUMg&*w+}y)-WH z941p^Oo!b!Dy!3z{rqOe-P(soPdt#P zckbH#Op!@er00q&yzSxVPHeuvD$3I$>sZW_2~6YRhdeL#eXV}NsG!Nq;S_IwiN{&X zgDXBZndfMamU~*4KU7C5;dJQmAk)<*u1~&wS>lznFD*J+ZJ%MTVmHrAHaS-kYU({T z+e$6A$-F3PZ7bH^we-P*G3Q;(5*FL&73c><7r5~r1a5XWTt9bugVX(3)veW1S;`;B z8m&`Yx%69V!=oj)obH!9h+Aakc}K2su$N4DXf}CPFRuP3*QuvBJ8+h5H+7v+HdNhW zyZ$Ht;fq46B&Q8taqNCy)|Q&_qi>I9-d1YOo)fez&oJG~BzBj4`9^EQ5SvS9$8sV( zYx2Gb1f?lGv@6=^y(kOOC_YsjV3Kd698d-UNPt!cp!@F4e zF$RmDI~7_;-873e2tV9SoO!yG`lVrSy7O2OyQEl^_t-o?TXEc(i!(~@ns2`0rMC7& zlGXsZm#L{tJ+l}+x0J-byCOJ`G+tkvaMU=$V)u*sA3hF^*N;@@oi)`tc(Bmt@O~qY z(&gu=x#C05Q<9|P5XDP!`h zCXMT57i&^kYg+r@!1k#t4#+O6-;rpXqRF|uMqF)q_r1N87N7rVk>w%#%-29nExj-y zZ+b?!c~be-UNI&i$^;R%F=twh{rk2y}Ntnz}S))gW@;G2cOa?EbQ^+wqeZ^ zbBi)c&hw^u^Xk1@rl#!}+S{T`+3=u=_mV)rktOAyLSLJfj=Zb&Nc;yUH6+~DBHmf= zn9CB$;o%3mm8e+QdwyAMzQOVB;3LcJ&ka$$ziPs|-A0LJUQMRojM&@unlIAyaI#pl zb3zR#Wv9`o8%8UXw52Sz=KnZ(!GL39C+B&{>DhGM!?9C*D{pE{y?n1B$-_8^l@)m0 z&Uki@kgl$Ii-K2b8x|yb4wj0RZP|Xd=aq*`9&?ft7FPEWu zlUB{6eoQ`eedk0@&b8Z`Kjg=TB`+AiD)85B*8ZD=LUz9|-1T^o!6q-eqvq>_>H@XO zu6%mwKlc9T?TN<6XN;H?5^-(wrQ+B()$gJWV(d(F&d!{2cH=JFxj74?EaJMIys(y$ z^R27qq}od=U0CVEN82<%ZhSgAdH=?6g09-uk2`*3N;ckVh`lmaZ>Oz8_ZJp^yn#~#8*a_%Y93|oej!Hw{m#gl zO5s1sEllJ3A5(Vh<(@Zo#O0C8x6QT)Zwwo)^0Pv>c~_4fi*y`VIj=7^jAVvb>8&-3 z3hdTk9^?B?;%1L2@~5K9eY!5>ndSzsdKuDnT$dwSZ%krxgLIC@$&B5(CU4`1h=?0& zhCedMb95{}`6{u;>XJ*g#m@>2GMT3zXimKUu=)MjSFF|DSo_|456URLZ_wjr2kq)xoL%G7k$6w_f|6E1xoS*E+T zo3-rlZ0+$Lc7eARU0iCs>B`5<(cg=fQvF+aFPj}Xl_RvLFLRCSqI&AHQvU1h>|?Vh zAMtC*W3T(VmtD3-)!<0=lkJhCTNUbDwAY;1?{8M2Fl_qMfz4ct`{mrO7RnY;xr%Ms z4_4e;cjsC0QLU+_C71hU9v{aVG~3KRdFeV^^YBs|Gqvmzhrn#@fqh1qu*!NXR$rVs zU6hOXmk@EsV=nrr;tLLW=0uJv`ai|#DdKJ^!bghm*&TQppRzeHvaeVz zqf!>-SN5P@Av(kE{D_H0#gVga4%eJ!p|!Ykd~G7U*0ZyF#%XE!-b-s3+umY6#+UbD zhKRm1MYxU#*Aw9jM7VtG#jOsk8b(|n=v(6XF~jagUVjee3){dwWQ!BbeB`FM*D#XF z^8oO$Ywy(!Tx+ap5aEU*{Nl9ncVfn)JSFM&v+a@yzbwM9i12C=UL(S#&+SZJ zYLDN?F=YO%$o76hXp+k1dPc@)DtuA&#seor^r;fzXGQpR5q?dCSBvnuvQv0t*JrV# zdyn;f-$&~irlIMhiZ3{)cFnO<=hiU}XFZnKLsR<|KdfaG9YFcryqhzmX7BQkA@z)88SpH{n5sR+iai$B zGxizrd2QqDw4T0j7vppF*KP5~r`9sc>ZptndsUB>^_ifkmQ%~fIfMS)WxsxvLwVE~ zKcsh>pI+5{%e{j;aOU#(l{=T$GB(ZPyU#i&lhf8%%Sf^0hj(}~eC^2zm>(U)kH2Zw z>!@3pk3OQo=MP1=e`|2b!3{msbf9t>jN)niyxX~wr^&aTeCbD&Wigs z_2vFAXIQJ1rc~5#WAKz7^uO*Umd7!`WMoAb? z$ozBYzE&RNm(ktlK6;y@`?^}?um{x!d_OVm1OO4^F!(I_0It!@!3mt4KP0FWtcv) z-^_VoDtXamq3@n`k2Wdja_{9=oi(hZjFh8^cCiMzO)TXlv*-3mij-GiN&Yep@BOWN zK#v~JFZZ6O^ul(p#oqakzb$({#{2D!*1_%vRm|4O_j;%yzWcOF`LB#fYd3a<^2~^* z8@ila?xc2%*{zPTtq1aVvf;$*1Jkz@Ct`j#3mnfI)l3#N-=z+{1Hu6#M*Gpa5M;9NnRSvXq zdgN0V`g*UD&ib93JP$fG*y_IjVdL|oma%0m`p<=oge^v@j?)fo*Uj-&nE1$o-G9?} z#pfz%F(wJm($d#DyxE_6ZpP`lEzeC7)Q@d_m^w}I_O?3H-6J)O50z>yd%d99YhF&k zjM~S#X8WsVD;?FfU%#Mda@=^`W#-|%16WoOr%om3s8wB#8Dg}3c;HlHNxeO8v%KW$ zQmYJi4f}pS@Zt9}$5J=tzr3t!Q1MD3`%}QIF!7DMhs?h|m{pXP#w}2CTyXCAz)7#4 z^fs@svwJu~CZcD_;j0g1PCPS-{*>eC`9b{CnN3++uBEA%t`^&y)@4Z=!>90Za?|2L zCJ8cU4dwFVRMb`u8YibZPbX@~5Wh%Gd$rkR=`t4{zhGzix4JLu9cvmrYd&83KyZ1INZZP>A<#*wWN}zO3^Yj3-L(3(kiB(2puUVh6xgr^7bK_F`{QZ`pRR+;b<5(qUER-K4&woBlVNPBzX+5*FQ>(7V z^ZaJ??z_lxrq^b}UA%c;;FzLY>x_DomVTVdCnrxsU&)nL0*ya zu%+tTdw1_)WzKjKY?@V&Frjvs%0SP|r9+dx7U*m|-)&w?tbu&^(VP3mQnieYJyHI6 zo_wDDK)wRcAJ=K|pI@vtEwAUznq!y;!c8{p2plzGen8yH{khX7ZI{vA)BJe_ zConsXck|)8{1^kX(xn=gvt|4Ttkt_x9J9ksZ?l-1v;2bC!f=DfO9S^c8|G}8o|m!z zt!&s6?c4)v-|DSr*SA%A4XV93D`Cag26@9MjU;ce;D-iDgXU|`zEEm<%x8MydztFQ zz$M*BSTzhb&bfLmN&EZH8P~VG3-(i0FfS|A7(bJzSYm(sl~h)0e}lDm&OE)nb;yB~ zc|Xp2rNz7GtE zU7I^~sB!+GtnE9ra&O&TRdlyx`XK#;iexvvpErui>#LUd@Gk4;RH;okaYt!hQ1QG` zWv@#OH+DS~64BGJCaYJZxbop=25~kgbh&fQEp8T`TP`=c+-Qyd37tU~)V-raZzSG4 zv|2A}fcv<68RFFq+_PWbom4Qb)QlQ-W8(PP_j|ox@`|0nS{?k%$}4hW54+sys;@P# z>mFOSNNd7P?~o{l|Fc6+-dRL@L`>@Y^IXN*16$&T77sGr?JR4fYZ>LYKkWR^uM@-c z_LVY+>uu6e{w7{wxj%B(|BF$x6AFj9@%hhmd@U-4d=q-R~NjL_iyZ9UZs~( z#5s3iLE2sQXWg`>&PRQJLx073uoZRpS%(^&bhoaEJ1b{0*sk_be8$Lcd-Y0|pF41B zc;gYrOSg~PE-f@ztzSP=u_m&xW`5V`p~Ii+B+J)(EwlSH<;$qJJ$?gXjq>k5mwB_X zvSoJvWY>w(RqTyFuP=?Ul5_P{*U{T{qt-C)N%pAC%GOv_-)@%uBs;*Pfpf^N!O8#LBez9e zYo^^<^36D+b=K)0?{=3}-q&y`iS_3sR5{odOJ}D|T%p} z)zdQ#Qnp=anK7e#mt?U!leo!sob9<+{nK84ef7Od%aYgY+c=f+!G?P>3wzfeUc+6# zD9xm_arl*6OIF06_HsRu`fiiOF3A_Fb^bdQZ`Dl?%bR$}BHR5#e8I$rwIymzr`E6d zqIdY7T<{??pGTSjuUp%mY&S?BxySLOR@R1ZzBvrFDT?}=wT8&`dZQkF;90Kan-1$8 zD3SF`877n7AK_Lr@{Z#E4eKo&$F1kD$Gd^~1+2#t6Y=lSf2d}j)IGB@-`mY*cek#! zS~f9h^u-X9sKSk@+vY#pjeg6gY~Xwtw(9|0=z5^$)l$Wv=# zwORYh4L_DJ4(*p(`{MiS3SGT$bDf>u`hB12iCn)ITIcOLnNh5pBAs#Y=)K_cMsJnR zpO~v+mgr#nXnEnSfbVa7FMnd6;Up`}zP==R)e7x?nqIq?dYe=#>EGXRT_SVO;;Z`< zgF^Lpxi$FB5x;zF_@x6(56>g!rPp$NsM_VPqg}o(X}EXQFylmFmzJWr-v@Qm^!-w= zV^HFt#;7w08<$br{ZrH9OZp{>O$*Z3jny8D$IZZ0NRQ{-C>STkf0f&UE}ThFSRe{lnrarK14`>yK(W zT;RM|r+%*0Z0Cb!<4soiUXjm2=XalyQWx6&yjh0ktGIFFGfkS7yf?Cs-+|}j);W=@ zW@|eaEuL3j+*da}@R|Jf?h#h!3cC7>Yt%9F>+yU3gtA;OU8%x}6V>&iUR@WE*2kj#Iex4>*R9vQWlIn0rXSUN&FTB@oR7SR zLdp9Yy)|+Sze{P=Z^h-r`W!#j%8F}#m9<2PyEd5qu%jvGhSCKp#e1Eex!z^>R?!+-xjm?9V4&7Syc$8lL8}&V1 z_FYYRS~63)D2{Dd`R3vI!R-9UCp@%Nb0-~U7mT>tI{ubp*ucxCo1MAQrZKj4!JFp{ zJG?huX>9o%4H_JHBI(Yu>RaH@i@>5c_ZizEVGxL2Gzh%yC zGA%)-Fvz?xnLEZ?50uk*f6B>cyuT03*dazA8&+UL67qF-e8U)PpIsue*Fvd>49Ii8#U9Q1O5RylE(q(0aHQhYK?E!lZ+?~OKXggszCxmOa;p>ZiG-w^M z9}KM*%9+-O#7%VIr$_37pDrm2Ct4GJ+I&uO%TJT|Bk7QMoS=OWv~U%KAbyCp?c5sL z5+4B_>}-*~E1ko3TWk2@cJO5eoqaomv4VSNPSO^phy7OQbnsnuI&Q*Ad_;E=57Cu` zM;kGm=HoMsv`DlK9OIZ~IGDr`ii0vU8}I5m9tIqisD2iJ=EWs@$PhcUw%=wnK0jiO zc8J|*_#V(Kg94x!S|}X{8Y^9qXu6)HjM0kmHUsqLI`e|a0(Kl|-?Wi5W5iD(UJ@sT zxJW#JFT#aVh=YVDVM!PYy(5U{3I`G&&yzNx-|zwicIZaFTmeC^qD|;vv@7}=U!sR0 zEcUAjqTru`KMH)@kJqv&Yk@h?k3|^@%<8c$N?Ty=wVFj~3CwMgEJ{pV zLQ<+rSLtrud&tPj^^|Ay>fNVrzy1Rh6bB9(JY*!y-o~XBStl zo4beSVlQtWUqAmPynw)yCIMuq&NGb zReNHD;bL?mV+;@Nmj^RBWbu9pIjm6!7HH8{o&4}!t?*umIc4xCE@;OHWrki)d~}Rm z7%z!?4mF8VM{R4;sj0#r3mWmqE;5EO0YCgFBNx%p3|bgLA11~YKlE2(=SFMo15+S+ zr6+tcF+PwHnLh%OagL0^WL)q>j@`k0>9NF<9!opMMch3Rk0;!{@jp!9kM!#9bg$>f zPyD&U-Gz21BMC{9NjW1G50~@v9i6KROxc^LE2B&~;k-68FT-)h=xLXXyojO;`&wcl+?Sl=4I>)RJ$ zNshVjM@q{bd1WF6T-y;pzkd0#d&8ZdTVhM%b)(bufls0K!nP$$XTtAv{=8k0SUT4? ziCb9SY_v~na8fU%?D;PI5+-Fu^i%tPrCo^TL;ZI?ggOb!?ay-HDpKAW)Rf=hNo_l! zos!ld>|K8r2a|4dq}>X2Y45KZ)a2js3HA7A^%qRHGJfrKF84pyVaN0){Tu0#o<#a6 z89)A*!@o~&;_vkQ(I)wIzZ9|X5GT3g@kX3~>^Z`|*j{4%cBnyV{f_USm)9a_PVUP> zkZvIQEYVoVNX-dru{|~6cshZa`a8Wpt8YKlDyc92$Vd9KpP)UE5tZbVwCq2Nw{trp z+K`yYIKUrU$T&sX%-@gq6FRgXqTe6m@j)q*QI6!?gD!t3dhFtl0bc(y{(q;vbWVxf zQ<5^~mjb^YC*WE|IJr)d(kG)rAjVTNj(Wg`Tm{HoxUg*bBQq%(VOjFyQSVSc|GYhr zSov*`vcJn~`C&-wYA*}Yzsc{p@O)jo%}P-!ka!!Ls?F1-?>yol$+{YK zNCvUrqf5W*90%E(*h7+aKvlWAcw3aB{2>(~Dae5k2|2oWbCjZVAcqip$e|EZcI!e@ z#Fwp0DM2>w(nUL_sB%bhbDIGf2Duut4RSf;FGzbxGrDh(yCl+Q+~_>pz>egbKPs^C z2JTqIKL<7)@2*VYi~NEjrA^;LBai(1D>Yn0$UT}i%8kVAPWvMFGi0RZ-c% z`uh4(3nJtN#sRb3NbZoyrk9%Y07mbgC> z{Vf1zyaP>D3D-DbZCDkV4#!ZJkdQjFbGcv;ChlLy`B?Mc-RrP5trg>?Z>6qYFYtq* zV~C{MPwc<`k4|BD=KnSTk=iG#qm*te{u@L|9Pwa zzp?#SJ%FS{zS1B}k+62NJif&6e{+cX|0j9v=trLF$cR}?jC6wVh{&kb(J`@cYu2ud zU%z2v!luoMNn4UrQnzl~zGG)vdPe51tnA%6xqI^R_ZI9c++TFy;GyD@!=+_MjvgyN zUQt>QJpuMV zoqy~did|<_@s*lDE|~-ta4&op$JNh=qR2M!m`QbIj>1Ni&Q9*+_2~e=pg<2*FMPlT zzbI(4lN-;|#fkJMnln4NF*~?<`z>|y!hLWMmlx=#iXix%$5SQy57S}&@r@gy*%vps zzT9BdFs_p~wuGgyLAE=RBwN`mLbLxDi1Gd4wc@+5QF_+%n&?-Is!@c3*uo? z8`I1}&d(1vRz`k)+i`h82-41-cusVnPIkUdK8RQt#usHs`|9Aw2j42uupKKhh68(P24iDWCtp?HP?!TyO&#o<{k)L3PX0j921syKVSRe?NbQjp z0SR|^F81eacNTK*pb$K%z#iSCT?zak-yVTn?98g2X|K^$f%prBGd@FA_WQ!7V3LJfiJj=i~3~>E=mxRu%dq{aw)Ag?2;&9@OuGMI6FZ z=?bEo0EK6+bU*j)Q2oE{n1ak9=x;k>E6_1`_6TH>K*rIMMTJ9}o8sF_G-p%( zkn;@c-o?-4Co)HPa~Qs-&Oriy8) zOYNbh5q9q*{*9@4TF$2eX=z2-(GpKUXlX#{&~h%NC6FT^IdkyMCtBjk3N1~k)-qym zMKwa|>9eTIv}99dv^1sm(9()Zh154UqT(QptgWc!kmeS1C=Y?OrlmQh4QXkOZ%jez zTUk*9Xo-0fTAETHN{PJ%)ksS=b()rXR52|%R0b{esd!o%P$9H5q&#S8M%mHQoYJGE z5v4^-7R99Hd`g~{bEvk%B%Qg`OIpsOZqm|*IuB`O!=b7m_4KW%a$;|8K$X#QE>$d$ z1(0*CSX4Hof$kh?2PE6joJt~cKJ*jVFBix_NOMy?$^+8S#DKCVe7-4VCy=I)B)pD5 zYC&4gF`~vnTG^OTgCOU#IFvl33Cfic@DC+?-Uw-JZ9(fx^r;0WvI>&uN%SXDMn+Ml ztNbKM8EI8X2}v1wdFjE@7Hmt31(^RHPtAXir+6?aSc_mwe+|wGHvs6V6zs`{_55A3 zjj%m3hE3vtFBlRxQ>TDH7Ctt%lxykdALLKl@i{*l$_%R;ut&14Ux+O=7tg}T6VpGN zAL$Yv9upoBjtL!!nDCg$@IF#fn9+%j#%3>+x_Y=}cpPSDRFtP!+J^J>e-@wX4)^5%#$y!l{sE|L|+V1>rJ!gZx^oW%3W$tu51|S>|OYF_THY(_T=l$ zL4ls$0rvPp9Ii#K_QAN?;xeN;L0wH<&EM6TXqfQ>m@P&XiIbVfKOR4S{UTF+)HQ(= z@&~axv}Yj~b+?G)yFU_y3lRyQvF`7~7sUU)`{<;rCv>i^)j4kc_GuREAU{bk;5_qVTuS=8V6#&&;Qo7&BydX;o=JJG=` zn!lFUeElgLyE~Xm1!ke{qVb<^>zr>e&x_RSM{&M8@iPJDze?w+MCW)! z)A1V6*}YaIoihrZ-9__nF|b{afiVA-9+?7vLU+-0rm;GwBkHbg^!M&*($ew?Gkh(ezd4bq+7;p1Yv4yJ$b>EbQzqS`UkyI=gq&uZIhZ8Ul3G zUo`!5KAqh~<4^Madv`Ctzjrse(Aixy{fIZ6-9_8U+t$wRqWZ#$WW~ z@7-7Y?CdV8zh`&dzb)VGihu8}Fy!yuR}TGq_e7=6?xOAI;Hb{-qWU{1^W8}%G-2Lz zPMppU`DcDnoKjI2r*3wz8*oLOvV&drg?4|%)#B6$4RNZtgFo#WaY_sR!aLYC!j6L7 zjt*ge!fp`4$|Eh|p;IePCBW{_j&1O72e<*%Km||$gaK;-4nPhK#K#oGsR+C7z)$G; z4tNOE166>Ez<=5$!~>KAmw}tWbKoZ+cUhbo2511ffDN!1SP5(fvVo&OHSh!wM;UFq zE>6V*2?*~C=>Ql5(|{2`FQDxj(g1D)r-5Q%8xRh-0oH&Xpa%28>18^27 z1Xdyq9^eXC1G>OCfC+>l9XUt}XhnJ41TF&QKn{=$gah7yJrINV@)2ewq$^+y%p?+Z z;tKc!%Yh`I5V#E71)c-qb>fsFFbBFTSQF?v>dQg)vYro(2;^Kl|K?0rPI>gMP8YJiX`gBU!Ov)QGNBB@U(sP$E z3-S*1)D1KFNt3S_QfaGEBLWlsj-k zr0e43O|N5JD8`|G`U!r|LHkJJ+nVd+Pm(sr91HAL3+*hO0zJU{5^m<_8sse~RoG<_ zI~y*Kd|F&Tghln1oWl(WLZNz~8Mso^3Zj)I&(ja{xS?!sCsG1cBp2);d=TDO_%oF{ z-N8;6b}I?XC&C!0?DqCWmfAyu-tAg-(4B%FK3Kinu{BL6J`vMWe*1JoPK|ugXjy-> zw<0rhq!sn~W9;~`Y@$EMT>p1VmJ@IPUCg4!(RJkKLgxH5B!UC56xuh?jYVw~M{e}I z-HkANYt0Qoy1}sVb#1Q$uJ|0Z1g8M59-C4ku|O75dY+hF#S~n-zcz7eo|A8YH?kqH zL`vAjkL%U$=Psr1>uQOabF9u@N#f!!b1>r~V^|P@@6qUD64qDXV>{ZQ8~-;x-Bu}z zAu-nfHpoyW=-7mq%AJO0GqWv~yH*fFF-V4HP#+ zZ3x5fAR6(>9CUVSqPQhypxd=~CGM6^q=8U8x|9S3_uGpyT|WHMq^LMjw)~_pU!p%p zIL1s8=&5I<>luiyOHoe*y^TKwf<-wbB&iNIrn}L@%ESq=2qI(ID&nT=g%;1H>&hFd z3d3m&M+QIAf~*D13iIKvy1XDVDQ%!H7xxzyq;xB;3NE>nq?Z2#0!fjC6=8#D+}|z5+iphz);mfeni?Xy*b&Sb8=r zOQD+75HXD6-ro8lcythm0l>;o-&EMhz44CATuf+S#uxLjWZqaFHvHs_FwH@BuA;Cq zi0x<8!H=IeS5O9_q#yCyAQ^L60i>_eV{whRA=k;D9+q&~CT9Lp`jKKnFV=qT0V!%k z2YddACXDGNX%i$+hnX$iP~mvEUV`)xGPJo`diryz_d?&+pTQCl0ohq=Zob>Btx%{dgQ) zkArx)B69;sf2xwS#?ul!qoFU-{P&fv;>zYhUnpJmLtOBJ2l9@Bq^~)<$te8S&PYj* z>m1~cM=8P-o}(-~=7S+6yc}Bl{k}$0slpPR@8{dG#_(nUoo`E$73z&RhG;Gi?^bwm zU5$KQcw8Tn3F<{hdlZ|{-h+fC)1uU562FDM88LT)5LP0k+7v~{=$Vt9h9{4=HktmX5>}a??f6R%h%Nk_d9IA zAQVcSU~CidLovlTPI!RE^7kjLjYav96!~qCACTI{Z)v^(=&+=`3OaCl_@f3$&I$CM z>l=*eWJ;BUXZ!Je;LP zH^7#WSHiUQ2cC>yAl~;i{o2SwqhJ_ zTo%aaq)TrWf@|;ye)|xVq%C!g-@ED2)fFQtb{gSve`~l=c*Q{!wv-jW)wl|7fhl96 zmmqs&45e?+|Kop{=K3{YB7aX28G)1($TlrJ=K@*)vge5KApWnxj`%0?U-`H4a|;Yc zM(xc!U3h*0VD|hwg8+NMJ)+?D(w4cQ-Z1SI+O(g(5+q%`CgOpaq>!U3Wa z88e9f1QG%%2}#BXDL@v$I&^v+H5o5t0BM0FYpV$-E9QwC8Fz$kBwiAhNb+4x5uM{BqRRhHR z20+{gpioHp2+It{|H2{vmj^#_=3gHF1?Bz=N&NNVP=4Zny*Tl|pxl2ciN9=F5omM)FD&rL!`emV4m; zMAq#rCg1rF|090#t#kykAFg|LV>AjbHIBdL>^o@|>3qUo9EB)FYq~xC`6_t^u{cd7ug?0}6l)AQgxQ z;(*maIItWD0RjPkz#H%YTmeVG9Vu%DgYDc1IPdr@F5-d_rOb_5x5Lg z0o`!X6cgmE47>=~17rZHKmrg4ga96Z9iRuO157{$kObP&kOuG)cm&)9t^t>U(?B^; z3={x6fH+_|;13Z0u7Dk22xtK#0C}KoC*lQO0ylvoUS0tBAt0*{VUbY#|%s zlb6Mo2HM7pFSc&t-fd}`A|;6rO*c=W-8R)UYPSgqA|`1OeAYfzDkX~#LPao$P~wx% zK1d()B($Jxd|K(I=K6i-+_^uy*(8)A>V!LY=FFTqbIzPIo12~c<*W4Z9j}jr^b7ib zjsEQ}`X+ty2lyPtQ3mhMGoQVL+y5r*^iESpQ%VQP8lVFOd$xx(+9B>gKBwtS?t z{LX(6j}?4gD&Wp%Cp>#B;3i4?rSO(7zLRO8@4-97_-Fy0&oAHO&gYk(acBRBE&ch{ zBvPt%e3E-&hO=_{*>tcC{q;6;HKX?Xfy+EH*rZdn%aKZ9cd)CF}*}!Gi~8ZEelmzklD{ zyLZprxN*Z2i$!zv=uzXkuIcUVH9b8&2EQrmo6&2&Sw61=)t)SwCu6Jq*Z9_bpPJ%^ zLM&%BNtb%4AKEw3PBx$6ZgUorbg5fDNadi^H)~M=dV<&t3E2IYB@=- zvN_JTF*;5WjZv>yLVj+PxmO$~eY3VMWl}&&@YS6IhXEjp^V9LbVv?<7%wQ_g<;u> zayL|LmFKw}o$^kNj;Xv1q?Q_}K_1Gd8zU|Hb(Jq<_CrhR*IliOeix9N>`loi0dJI`gh3{WjLVrk2rluR z4Y-b_!(WPLo9szvtgdcapG0BRmRH1M)o-D9Qa=R3V?>0FhZ<}4Vo}PtafRdS$52pD z*E@52NsI(_`f-v^J-=4dd!S=XPtlq6WW3kxNP1@;T!CG+jyZ@p8@IKZdcLoB$<xi~@;~!I9866MPzh@Ime^fjk>^l*c`wojmdUb0hA!Xql9^ z&kcRENuI_YogZt}qMz;tx_Dp=KgSw{p~sI9<+@I-_C(xU-`r9j?buB_itk73m%Yn5 zY$XqSh>i78FSAefvPL24FRgraiFl3mEX!s;^$g=){j-%%BVOa6gy={3C*oe?fvQOg z?V=G+f5*k>udZs@#y#nwxX7M1)Pql3`ndh%jmlP9Y#k!fu1euNchx)U^pEqU@U+L( zVk(#)PwB6lAEv$)%Gxo|Jq9 zoX5dWL@%OdodI=DP>U2-k^L2=h`2yRgd=hjd8h$Vz$jb$#<9l~k1*2T0TOC7Z$5oS zX;0@j(D!BNd)@pc`YuL|_?>)jhZ%kr_M0?!ObQ03J$#=}B=OXNG45at*g5FK*5>t* zRWMI$eO?IHhnf;Gk4R?@>^qP9q}Z-`Wg=WTYd6KN2>C~4PqX#y7$83s>>rsbY+Fj>)OXm4U z+j)Lrh~Q=CIh63!xIK2BGgh=vOg#s@vAw24)_K946yIkw5KD}nM-R2AS*L=2=k2CrWJXcmEXf!Bs8VEk>^S&Wi&qO@Vno|FPC7`_zoG_oAH_) zEfy);#=)@HyuUyc($m^sw)QRSKkE0ymLno6k!87ly5sL=N9JdT{Uk~3$lK~H3QDH? z_CeXV)y9iL&*Ka^P#vb`ow+Mh)0O_*%+={*mGhIp#Hbv;V$TpA#%s98CMs8s%(^)T z@8e#+h(me7L8AVgd*s#Jz>Cj2PW~DW>&j`$U5@se@`1C0Q?fkNF5~cP#{AXt2Y|Z( z;0n)}JPYKGDdzpTiOSIQ?0fH)ujZVY%ieh&%Z_&hP`3$#EGFZgJWYs;ctA^U(5gi diff --git a/distutils/command/wininst-14.0-amd64.exe b/distutils/command/wininst-14.0-amd64.exe deleted file mode 100644 index 253c2e2eccefa79393827f44f85680536906574a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 587776 zcmdR%dw5jU)$k{o3>hwAqB0s4b=0V-<0Tqs0|A`_Gv%JBxX9HDh^;|wJ;Q*4T5ev>_gnkSBtgaZ{hs&x=gaft zoPG9Qd+oi~T6^ua*S<{ECHFd<4u>O;f6H<>nt01!Ks|r_;mdJ21|GL;pyQRkZyw*2 z6M6IaaWk&2_05`n{Z+Fszt%VP^6RdRq>^gLmhi^LI?=J>~h)+Xe3>&%dJ5r#v6tE^t4;dg=@*`(Q1t z4mljt?#OrePOYAph1uyi!O=f=pu^GJ$KjZjtD-v>@hVio0A#VlBmtrRvTw(%zBaLm z&2`k&XESB;5^E#Lz9e%dMuLt()7*|HB@pQAn0P4fb#BKimq@cj5MY$Ai7&*Y$Bc=jt7CVEByamGh!RJ^4BB0i2HKcrWj6y@C!<`!j4X$8%$ds-wb!Q!3u;FF+}0%(4Mh8)>U!5&-kR2NyWw zs_9oxt93Y{6z3>~#=@3OKL9tocJ@?~6z!m)V-O$j{Se%hGp`3wRSB9p`T_Up_uxhm z`~UTy-nb%q*utEK=4io)Gh@#7oJo4TB`3C{ZT0HadSX(sTTjd^F4Pmz;-U$9e7i4f z?A49;bYqv^xWUsP5>!{=@H7|%mI-TgW0g7aetB=w8=IqZbYo3Bn#DV~wU5Iv<9n^x znPnS@w7l;jL59?f?ob(i&Sx}R>mvs74Vy&Ks5Mq%vurAy?) z`%Oz9N3RS&CBssYTI=0*CpB-&@ifk~EK4^g6_++ie%+W^T&^3@;(#7s?wiu~-8N|# zYtpW2-FUqCN#H}qt}1Ebej}ywH||(QY_o2x^#n~lKCQUa(Ge^zsT0t$6``_b$m6rT zL#1BEPMrpN8h=fqJYOReBZj4Y(?e6QQn_$pCIC2jQulcOe@ z9lvrolIP`!toU9PDN4X{qoMd^f{~-H|ncci@v(k!{u;izbf9!B&&qJu`WvBq3=6yf zjnl>}nNL2a3*qXw-%A5^!}Y~AZiik5aHA?==D3l{6?vvtEA24&c4KCgUGPHYby-W-2N9umEwvDwq` z4NTLmHr;5`Te|XeCwU^n3S%p(RMS)mlH>Iwa-cbKtV*dXcSHx)m5Ww(pxc_!%628~ zy4Q`!am4o2jVPKVStdrOgp3P{X#Sic%A$6qs_MNHvT*_PD4Zzzbct~bl7v+QH zhs5?qjNp6jvJKk8yut-}hv-%-Ec{$m@F<&7Q`d*-0nT?r{uX!`R0LOK6JkVtJkN~U zpu%&jMPY@ZFF&XlS~q-B)Kxe9%vc|AYgT8~RC*i&Vg?S+air>LYG&j}N>9+sR+~rM zB>n#JV(BXMymES8AZ*;MXH<+fS(`MO0TbefUU56xy5IKrsq{{KI|^wt;|fEkxsjfS zu2lL3Ma@`;{Frb2CRcGEB1a)se0izpF6qD3g^@&{D3Z{97hNo)RjJ@f(xZ*d=ABtw zXkCSCqu{zxa0!OXEuq~%Tz1A0ya`J z1VV4gR4=Prd)o-oP-P~3t{W-y6X+=oEr}R(U(xb1)*EkAODW0x;OXAzX9{{LrTY72 ze4mcS_i*dT7t(H1RN=o(;m=f?1oTsP-4810!UFkJtTPw?i`*Z+KPK7C&Ly0VbLw^SxG<}Zc` z;n8(_&23Lfu;3-hg#?!BiLn^g`FdjH;~BhENAZ_Y8iVrU3EIUyL-R!Dd}?TJD6X>y z>*?ewLxnUO??sGV>3rP?xHTw5jlQIuMNhaL%hDKksQYG7G%a251Er63bGYTz(bz!U zkcHBZpO3{>wu00nEbpl*b*NK{*n!FAX?Td%?AdbkmiR}EBz>?>-o3{*fJ&`5Ebp== zu%SOoHR~OQlU%C->dYX{8oJ}fAVIc}*`<7{-7sLs1=nxLg`V1C+5OQVLaq2qmo33yqz4m?0l$t(ta zfsoXVIFhkc*|5|+C)+36%Qlp)u)NRxmAOl&?p$G*X2Z{flY52Id1cLMzw~0sB1(9he|Iq8jK0_F0z#-d4!$h+uCBC2x7n`Bu zZ|=^g{ncRQ%2exI$Q>Py=^husYg?NplRz?Nf4Oq9+f_PI6*&AGiqH&$lm7~UP!$?|^mf?G%^ zL>?^GL%^1shdt8s3qH9^V2*fHb;=UyI|t%;Ug3D+dpPJm4s)E1<7tIs<0Cyd>^@w- zz1r?&T{YePPJB9geB^%?iZ(Fbi;bcVVO7S{e}*Yk^hm^X%EM$xdIKuqd_GcdP#!Vo z5re9-yh(vhxc&qlX|7NBnDl;?l-BI}qqNx5aG&Ifk4#8e(dT7;FmjC;=MVvSkNMlX z@_>sTjXfFfD2#vAZ}!KO{n&%5>;aUNt_0@;l4WX}?i2+;-!}4EmBzHZXMvdfgKPzw zEVxCLzKZImj|S1Bu#)_LBo<3dCbxCOlr>9NQ)Hm4^eY$xxkAik5foAC#=gEGCPyT1}yXm=a|6MUn-F;lPkNV!@*y^r3F!?79sA?N;O{ zH3fB~8_X<(tu@4s#`&ciIaVPp5U7s?YFHLZH^wiJ)O(+V*%^qGKpfEv;$DHMF3%)y zljOZZSOj5)jHPf(rZNIFF$1(ifHr1;W(m-V3MdFCArl2I=qa#EfS<_#&-)HWWLnjG zivWpH95J9*GW4YQBl4iUhJ^w_ggl}okB8T*yN{Z*z$X;geU)}T?4nnBn)N!ppQ-F~2N%%22Mci+Id zFa~f)Tq-1{E~g3Al1UL0%R5%Ihy7uGa5QtplxS8-8Vsu&Fes zJv4@l6SGhZnIGFx@!56xXD5vv^)(R@I;wXuMuDX z^)v{Vsv38A8eULI1fEtgm|Ksk0J+pakSsU^#1>|^7#-0|sb`@osndFg@E{2%T)$S9 zYSvo}Ajvtt_*Z>q;|iN8 zA}#ezxaQrj^nK#=B?Kagp%Z^EuaP%`Fzw3BjVBe?bdTUnf0?0RPcz%;#hY0& zUq2~LG+g)nf(n(rp(jE*TE7b8E9rfsQFyn{@<#Dm@m;l0zSjbSxYyWmn-$F3i14$@ zj#`|Jf>XoosK*j2r(*FbcGNsOO8E~5*-?#|s6?pH@;dCeM0Xs5xczzC2&!^;8Xlud zR0;d;R{?mGPyyJd>~OJ-=Mtk2t8#l9u9c9C-_z`sSuM`)3MXClIP{6iUf^O&_W+ZL0mzqvWqS6 zo%9-Oyb#Rn!c&7fxHb;!a7wMxnjDx9<~Q}&#QraI?_%zo#A0UNYxen# zWWKdXpSq65Fz?G7-j6UZ2Qq7HLRhgXxBm`;wWS9%p^JJXu}as=-ZY~#ghsZgC9kYj zmSt8hmEOQ@Sa_g(2Ey^Bn-)c;D-S#v98>Z0_;C zGYp_QMj$V*|A(fr;300>_n+(ISPFp|E|JYOd_hwY=mzCtXPkUiMJX7|8{^CD+9%mn z_ddGHSQ|E&Ep(cH5{?>c1MKP>ODcGRM56dI(z9W{Lbl~NUEuv%rFp^UawBEL((hkrXGr1k3St&Duu@+IToW8@SOLrmro zW$bMdNsXg3f{5qoBA)INnR{;%o=l^E21Ec>H&7dY>%x8u1{8{`ctz|>#5NnXYXIs%5Ae7SCb!{GWF-yP6o8X6=8~ zK?tQA5(A<%XteMu3Cc{7+Lmot>JZ-vaX!m?tuzlyG@#gk*=;`dxVWz{@hoo%iRrur zL0L~w{CWw(LP+uV`f)2rg0WF|cADp>rQHRuBjt=RP4Rq(rhHgp3&Q75V7@&8lsbw> z+(mO`0ys~;>z7MornF&KhZ7eSRwYIkRVBtKUv>TVfIt|o2QTbyXQ_&LFPy9`GWMAr zCPga;tMcb5R^f7F{8C`HlXY1Vhv;WHo`xgQ(DBc5Jq@xPtYT#4nLzY*3R0p>{vd+Ko&eLF&;EiEMsDOcPsR*4N{d7mID zeIq5GXID+;;NIDQM2rSioLL~$$dtY83~-o>*wi2rkH^6~*T!a6GJw)8&&)!ax5B@) z55zkbs5VDgbha^)YaiOreC3nwWsSR-T!0fUo_jg(`sKnRSt#iCMTsA(8Yzu$59sk4 z6XF%TMU7=R40P2|D^WRh9f3_U^4q#*$WtPSn`8l8`D|tiU1VZzd7pohB>1Y-d*{ofKs`Mzd%S-U4Q2(li`a`JNua;?(r|E0ohiJ|$@Vmxbza>5?TcQBU*)%V@)4`Fyeaq>F5XU>VZ|x zP92-Ur!_$xxIOv_sV0jnjODJ&z+ri3sm|B4pwLafUcNkW<0>x+_U{E0Wb1X?!Bnv1 z4)ppC!#f&uMyr2Zzy1Dk?%nZiC+LaO`xB>6eNT68=Hs6@sp{vu)q1O^@p$CBzT)PK zha9TblW(ZY$DLw6^%WxMCW(z13%;e!k*TBIEDa-MQ%C0sh5emjKaDDQ7;KSbZQko@ z8t*?G`uf);yfI9>@YFuKf0Ox}6GYSQMD9EdBJ65TDCW)Ye)u#enmkLg~_- z&@chZ`!AIqy)I-7lt{dzrHnAs#Y-v=pO(~f&3XS6Ay@X!FRCa|sM=|MxCq5%FKq_O zPs^8eK|HZ2rxy*WjF5d#BQ!}2U5BfR{Ub0$O-9nVwBO)E+0KvOhOjSUX)~j*L=_vfQH__x8B$dLq)z@n)f-u`9mc6YXz#{{+U=V5u9uWR~SUgKy<< zf-KTyF0)I7ghpr6Rnw@7l530=jXua?6ydmFH&X5l-Z$BG;K8}#IJ|qg=8dC0!ByM@qmdEJ`#96BRvGdl8s2|v!BX%cRK)r&~oJZr7^-`ah|tQ?}YX ziR~)VUxbWyf04Z=H~XsW{xVsucc~Eqz3W;M&QLd491t$j=b6&qfG&`R+{at(<&t(Z z@DapdDK$E=y@mwQ+#|%VRvz?k@&}7Yg#0Um{&B_S&K4xN#PNu~H7N44elPNZ{W&U` zcNlT&_ZsUg??iAZQ%8Du?IqHxNq8~h%d4l*7n=ei0;k;pj1qxEvz2TpE+qwzQGEXe zo`$EOM*aRm`l6@t?}W`(H5+r?Tu+={L$!(3)mTua!QUt4b&E6BXz*Q-~A{ z-Bza<`t;2RhOIvy{CM43fe!%`Tk?B*k`Wp+& z>145EEbmfcG&LDiW=!X@S&UGY_XMKtPNSL!NosyUX$x&4(cu`Y$XP)dC zoxZ5d@Gt~NKNsUrm-&{Mne<5{bQBHJmLAE*N{TIGvbkcB=svagM|NGxDq^`jO65_a z78T1j(8mJRkTq2dh}Dt(tQ&bYWO6XUO1gfc2*$1x-M`MW7(;AvUZLjijJa9!m90%_ zY`Df%;K;hxH{B$pt)k86pf<9x+w53+P>E6|ShwBPwVRO~|FTjqg{tDQo0kgL&A%L- zCl1%3OEFf>E-tqH(9j?#IZF@$HJTxaqV&yQDWzY^a)Z!fnK*;gw7?Rbz(AY$tBk{u zsm;qC%_eg^X?Pv+5)8WKoj_E^ z4_8$!!arxi0mBuRZYzHG#phTWkyy){*%Pt7B5&#D9~NCCD* z7LYlWu`j+2mGA@|vVL1DM2h<3`m98^E^{&tpUk+}-kl;eHAjmp2pRFo z0HQS+L}mbQ0g(BAQqPI+%b9bG4V2Wa$W-9&04`J1c;FVgUT5PpJAGLhbGik*BsG2YIvl2-*<`o87uk`|NaW6J47Rr5JwdQ>&Zb$C_ks z#RE>Z;$;_zP6A_QZ)NY4D)zB_cjuoya(1)o0qU0iMfZ3(wp&P+Nzc-nL{cp8mC^!R zNJK&g;CCe^l)NA}O?WP0P3y|4J@(kf$W zw?_v%DC@JF#~!^Jl+6tG*vj^e!mfcWNFSNWzsaB@ypqAx*8yGMmCQnN}j7~~qJ&f2$@+99pFb z^@qcEtI8A*gr=X) z6`C@QFqgjr9;4tNM9{dy?LKf7nan@fDQybWJB=t=Yk81Z!*#bEDRA_VhtUC2Qz%?z zESH{Uu6S3_im6X)s)Zo|J^7G8NW_ec+J{NaHj{)yeK46a99n$0;*ihsPI@3q_&>DgwJ4a7}h3ID*625*a&&7WG#x%2cWYdVUzCQE$I38*~fSs2aQSdC{jg z(P07D41i*pQWj<(X+nIhqVg=+mc$l3wUt;3oq-H41zvNuhm18e58J0%ng--XmEZE_ z*o`g6YlWGUGpGZr5enSH>o@;vVsNt}>0!g;VUmoK{={rigeP zK%#de>8G$tjjWEk5#{Xp$~2lN&G{{jj{wWmsf6Z20M%kBV0i~573XO5Q}<;X{bX(L zMz<2Ls#*n2nImT#OjRe?j94J>(suLYw=7GOx+j$~K-0WZWosJI7PkNRW|}Y+ZDVg@ z!+$KM+-elcL<9nbGHL|e%@{8d1C@N{(8YFF@tKePJsY}@klkr+qXONXrqtY&qdLvf zsNHEw&3o9GOm&}=2>j13bGUSA!!{2@9}c9J%qXM=R7Tc&?O9g)Jld{iSua#{*Gi@# ztgB_Msx)Gpv8v|PZ`*~!iw;rNRhpo!#nqn0Dx=fh1Nd8L!M3{09ze#`hlxlZp@3wP z>!Y^;iT7tN3FbL0L1c_#Je)Gaqr#ITs=_h_k|k=$_H?Z@?qDX`=~8CWb+P~@y|>M_ zrLUBAgnJ~RXAVbo`r5Gj0qUvam%yctq(w49orN-smS)E)Ph$@8=w(2?sW zGI&0BkE)yRU|2ppjGfqPNWT9{dq`$aeBFpb8N;oWdi{R!5qjbmNWF};quTRw9cCVD zNT!o%vUmbdC&j#s7?E>Tl=+&P<|06Y8SbM;P=`-%;5ZAL+@ab=w1>I-W9*QTm4G`u z4I&u8Z0mt}2^igY(>(W0HGPnUn2$*{$Ixx!BMa?puaGT1@|qiD8ad{`{GvCh&b`Pn z3A@Ou7-V@bmcnnq2x8q&gy1moM0c8V)sVo#*T99G*&6wh!Z{&6a{6_G`~q;i(NnoY zZRBM(@&|j6??XkHgA{UQpB=tTm?!v;Q21&5pAuqWq%L85r1|Oj4%rtZ_QNb}hgn(w z)rf;r` z#Q>xu(yqbOm-d{*OO0O2%x51db;qzsx>ke0K0H&6Y-A755bn0#KKB&54ktU?e2(Dt zx#y6a=?RTlne0XD0izHf<$u?jgDqi-5cN_dvt0JexiPKH?ty69KGV$)M)2YL{KPG@x9SYaZDp z0J|61nc}(Y#(S-To%sW>6y$1P9@7KMK0)vXfTbCsdu#ZV#xbvy?C~o%skRd^9q}uh zCGAPJLhj_vddlR7J3&q*x}*YF`3<~0=GOHAF! zP15bvBHYgr>Q#J!Z}nC7P!?d^LJ5gUSNdJ|*tp_Aw;O1CT>s{{F>h_uwkU zNYfnBwEE>zk2dPlY1iiuW!?38nHcz4LYwJd1!qs84$2ayUh!*Hz3yTnC|!LJuKM_1 z-)(2wn+$FtTEu~(FCnGdh45PykNa5iREvAxxJw<&l={pHs`^MhdY$vtF1Bry6J`Oe zY!r7-4Q)@b>(*PPdk($tGD@|D^z@%m(yI>a2BmaR7s_Zg-h`ILB(aYWCTmUA(rD%K zP8QsY9y6`g2$tbGO<@#uW#jYinJLz77K`Ow1x00>Y$orQki%@V^X|lp1i9*kLYdre z*tsF5Pn9U!Hxgio7zM53b4q&u3WSzn5sB06#NXSA&vYjyT*F1%N`4KHge!iPWIyJ2 zD%KSPUX1EYvA2>Bho%%OD>*9Lp=6_hW9&+PeRWSIYra#-WWmM6;O7#Do8dar&e2b8 zPgI(~dtOftu`?9@zu3@e{7VpFa4-BNJ<#2?X%J|OC8{>gp~vr z?7>~&G!Ks{X+F$sq}wCbF*CvtoO)3)&%6B^1^Z`U&FxVIImo=!h8$zwuL99g=~Zl5eU|qQIxDIi@qSLyWkTfr10mMiNHNF!OO%dlB5g8j#XDdW zyaEF34n@;~J$Q^{WbDKK@_M0J!GB23#C%w1EBU*+joUiSR`g=s_AF}JiifR? zrKOJ-`-J-S%Y_N{+8V2xoS#$E9D5f7$3}lQGtaS%g*hn?1J>MGEkUt0Jq@oBy=<~- zuAXDQqf%ro!_)9PNs<224m6HpbEzcKIHyDy1*3+;aVV&3OT|+mRPw1w{{*&>{p%J* z2d2G$g!#5;-#9hH)psoKo5ZF^3YDPyMPyh`hUbweD>|5YS$7`q)jfG0Ay(zEymtdl zr#H`eiGo>L2*^I_v+g zB~hq!H<1uy=rAG1gd9kdA%{>#$RT9GUp7BWkH=u7O^>U^s1uGrkG~Up06iWjLpMFH z>dte_jGjCRB@$|3=T@MF9%uaNVDzxhEG`>Pb(_ zOsTY#8kU%aOcBK@w`T#n|0xs_ExekX=1Z-jNakaSvd8H7x^o>Oq#Nj*HX}C)c3~tg z&@WY&U2FdC1vugVVqyOU1CFKU;6d~)*NbM}h&Ej+KeV+E2zTWpP z%#-~IHH9$no~t0NGD%{mW{BBX?UBV0b|hepmF>W*IQ=clOeZG5P&T!ZHM0gtCCT5|==Lq%5iB~>zhr7;DpG)`*8+sx6e@2A4dkZ6o z(jYOxc?xlV@qb}4f&j^`ZUNIODHX9AaiI|E5Db6nJwCZJIDw#*kgIdy^ zUMGL3>|>>wGiNLV3g6x&Ea?}=-asEP|Lh~`zB4_qZXgC-T1YgW6WR5+10I2>Q@4psbTez6vR zg9DhW!c*VYTRwD}XRtT5S8F^CXP~7Htp25Q?dmrMNsQX23G(Xv@{5Oq9@_`^p;tH(6}wNUc)zpx|uP-WT|Ldf*Gq;WqsF zR(tB#--T`$=*zpIqZ?H5SzOJMI=VYGcBHuhuB0!)KXxMoZ~ivEH)l@7>PV~g^X@AA zRS^<+V-s^Uw}U00~JcQrnZVF_i)Tr~>h zS?%e=tPb`CQ@n0mREVo*F#1RKv#_&;8;u|YnnwD~IPl9pdCR--W`>9Yq8=g2^4`p+ zwlrYN(VvKt89}v3(G8@S<95^0{w5|`(mRWR22G{>fR^{?Bxh7nQj~@$qw9Vq9DBT2 zv;njV@1hDbS}vX^{I_!cz4TSv#Pfd6DNV7dDEUm>xoU3Wz2#C|IK&L4#pe6C z>M>t)?5SvMkmbDqK&4&_sCa4tjWGA%>p~p*$@qO~D)u>9;v$D7fC(8vWQJ>b-heDvmE4C40uC4`7 z()%KxK*^Hx27+j)2R3Bg!GaijcYd)uXslP1SYdH? z1}j!-$%Kb{@HEe;6+!3vV8sS4d3z?Jl|%IXTT<*qra~KH?&PSPR2?6nDXb01MoTXz zA~nxqXV_{3TZsJ5me{8Xdj>BhG_hT?PVj4cK5W_Mxu(^lIa||DOV;|mFMH8)xA&pn zHNEuwb(d*-x>~mNH}-4w&DGl=%%vnq0bu7fI!6_ug1J3GXHPJ%C+O-4_UQ@c_XOQN z!M;7gem%kdJ;4Dz!GfOPz@DIoU^w1ZXp_OY-qRRDquNd1vz2CF-6~WiITI(O$i}VH zWn*_^J~^POr{NH3HAME@-U;tDHZ8Vn%M&ta@#dZXpOHe3Z=-4F+D(fd`h981(n!69 zC@Yot?fEe1T+#iBXlgAV7KZw-*Kq7Je7NoTT$QoTcrR#duHx9Oo2|5|KM27*OIGPk zpM1FJj#)@vc+WPiC0)p&g0OP~one*UvfoqRvQzbd6{N^gL5q2=?p$|utLhj2b*e5Q z&r7YyBcdp=id_=WlJ_Z2`ZuvP?`X~6syUmrCP@iZAep1qkNnWeaU>TU>-g64jOx@X zI+OL1!858QxljW1Dy?FjXVl96+~I_+kMYEXJgL(lQR);PVoN1mLu=%Y4Z59^8EgtG zKopo*d~UHxF-+O$qbG+B1dL9u?b#5*E;iniURk`m%^5w;v$(&|s;mBDGHWh zpQ74=WqEhN%T~Vca14;|(Rm@GQ{$v%7K4NYgJ!g3kr4A;3@yDda2?bC62pN1HHO0f zHHJaoBLXP16oB|$HckE;S`_`S>oJ&9i@xvGMX~h33`>PYFyz4uO9u%{oB38OwUyGp z7>SXUyxA9ruq<0rxgsa=wy#6TO1v%aheU=lgN9wikKqr;Q#r9ehd6%r-^l7#)lCp_ znQ#~HRm|6KLib29V5TOANHDq{)pV2xQ5;)I@wsKwQO}8Er{fa?JcPy;?OVJ zp7qjUPTSq;jGn@{DA!|wgLL{0a@*3y2zu8&Vu}Zeh__m?w<5_)zSiRV={&mXJn!i( zX;-Lh45VnGxpwc7P)7C@35|OQb4GT!wWwq{|bXm z&>zL}<9*vxC}{Q z5WPb}uqeVw1kr{?hRrIzQFrnIulUdmC(m#e&pJCKJhwwd2#%^>unYlGJijZ;bGjH| z@aXY0gX8O}i|x;wJz3+5#+EcE7+R=j9xbDr{XZ%u_6I>3;(m;h%ozONNjk#`qYYg| zbvNpCVmnzlFy?HLcbB^*xY*qY{=c8p67C+&R|)t9&gQCZ^m{7xrC6MN<+Lu-tIkT_PGbV zxHCHO;HniS!*%LW8COEc$1QBe=gJ)L-2Hn>S&Ahk3o<@mylDr=3L_0ZC+oT_I_>q% zXA9pr*)CJ{q&bCgSFZ7p35?HF_j7iqjV3LyyCWxhG>+3`=t&foXLp-44->vQZ+$wp ziz|!PThXCC7(CfLgC!5;A1qUBQQ1FblW~<#W;0KoYR_h*VZ9F_a9~I6g%LYpCo{8D znunUt9C*OHBx2ma|GuDcxm)d`(v4wSe7h@RXob49)(97cx&6<$U?N;Ft{W0@exezd z1wzKf!C>-v3!L1HIHSsGaz_Y8r+-}}F`!s8&O1$fGlgn#ALqy{VoEoD2L2v8)cq+x)To;F8~0I>8U-|v$!&c<2f6>?O%Ei_g_i4)P)gA?hKcefVCDtEUAJdG<+vufHWW}IZUI!|oVoNd6>YepN=ywO&+ z8`DAm`cT7`SR1!e3UFvaQEHATX~OXjJyAc?n_BKy?4S-k<+_cP3cZaOB+gAi|Azlj zh1aMG%PkUaW5|N^iK@P5QKGT3<qloVF%^xSa@5@f9LcUIh`_%mmq7JqHi(J;c(C_D7! z1TDJ}&bPRqwuI`>^0!2oVpXj2#P0wo=-N1XEl#z7exkmm!Q@v(zPRuH-r9vd> zsT%re9#sQj&YXTK6HQxyA8J5xdZN)DW=8Mt1XNFk1((9VP_wdV6%nL1bGHcA6TXl1 zFsX>bnRW|SDKK2|b?xY?iZ^P{G3-8&+QY)y?p9y4uh!A0KV(i9W9Yxa88;>&N!J4s zU_S5qC4q3F(g)NrKq0IlzuA&4cU$lISlPdmI=`GIqM;@5vPLAFi-&+X-rBezHg$ZF;0kj##~A5+w`q^{Bj@T{f+DuFC-kmhvT~O1fiQ&N;Eq&`x8@Z z$COL;l)lxPOriBfRAnz@0$JtP>9h32%_ZzEi`PiySD9~9VYL;=qZ?xa;?2K^mQ5{j z811wM2UE5+Q9Rpmbra<3)tFE?*wy*QR52hje}t;%M~rYu6#)&Z7HCpa9zk~7w8i%q z-sTS(S}>FxR469!++z^U#D&3(qSM&8)TyD0y6B;S^qV#pY1P&2n(EMI^c8|6w28z_ zki#$ut)@epi3r)}SF-!c4>(J#uB-e7dvbScY+w_Dx4bSw6d6`q9y{I`Q)0Zk@k4dL zfga%YEj=+BCbJ-8bfgTxg2QT0iwyv5<0pxmPm3gKOO2aPtN3g-7m1B2*Znx3-yyT< zuEzbBs2yUAJ}v!o|II^)HpUF8_{LNDjqV&%F8KX#n;(wt_V>Cc}ay1iS-?JAs2c7maq7^{bPMP@|lI>3b>GoV{63t z1^&LiLE~yH+qnT_ZZMLVY(^4QUBSe7J(#%mecsiH@tgGIl$nv_Z8MtWwsgZ2-{x&9b2KlQn3b0;Z)m1Gsv<>F9s7L!g3vEi?FuQrXhG-_5-&9eQv=Etw!>Vy6(Cb`qd4bJ zv#90l{K*tugm~Bl-P1Un79=L<9r^SpMIfALo`yIds)0w7Y`kZ*S#9Te7LQ;##ulZc zbfX%XVnSt%)hqOp+I;R9I>f5f%QkQqY19Q}XRplG%Lg4*y#{6LwccFGJsI6HZL|gb zY3e1FlI-3pV_&KT*~%7|2OoW^mZA>?FNrV>HmX|2_&i80OM7Zr{$sV|mova79B;fv zq@y)8kJqpX?$~Lz3QNsm8A_FwPiY5ZdPaH7erI>3J2x8A>c2e>eIFZS#oNE8V{)CrR!_XeJ#0crY*=D!iKrqqm}-08VHA!3J2z1|NQ@6F)<4Q=`FGF}7q5CzV{Soa9LN#jZ-aZX?*M;M8EidW7O{se=mzXFqXA zuw0i_W{5;7SDdkKc-ssiml`r4Sw}t+tYic&>1lX?XX+Xr@svB7Qlo-g_n-n7W&>Ac z1DDx>Xnz?a8sC@I$w|hV_||-Fz8BLcz@ZkK9e~ZF})cE?>>{*A~t)f$n0VmV& zR4!dPF;>qgP_Q^U@>wrrbi7TMD|{Q|xI(Dnn-JrBsA4~_{VMub1wTt>Lz0FtMLP0jbJMU112791^uH?`9r^uLa$a<} zv8E&cC_dEieqyI0ZpzCNvZ~v&WIlM^oQl5I8b(iT{ilga{G09ZE@yN?+{}rdB99|w zIq?Fd?GX-y^pdvpaMe2#`53C~QDz^m)*(g(7GI?pf0tY@#$RPkhf@ZNj(o;dM;ZeL zQZZHkG&kP5&{gFy`D*?|E2JDKRN&>#b*i1Q)Va5LJXn2HCS=h3-ubRM& z5ZUes1du^ga*>8}1DXoaQeU$8oC*GmV0_;Zo_j7)_RvzKJ)SCzPEr#60>CP8Usj@@ zu`MHRwaf+{AM)Lv z7#odudHBx5`((8yCd^Az&Z^&cEI^(+kCtlgKh|^Sp#;!+6XqvJ9g@g5DrZHK-pkoy zttnX;Ryn`+s!TU~5@|Ykg%AUya=vu4=^7H$@~Jc4{tcgbqoybBs$2jatjhU7R8-EI zy*_=I4Lg9u2gha-E;<+u&r&5dl!9Z^Rrj>)GnnrAzr%!D2h(lw*8bEqU-LIRcN$~o@v}IGI>$!2 zCi6lJn?XnnBXzeZ$r{V{0~lC4!Zu)1m!ecf@gfG#BFUnWdy7K(7vjTp-9=Kb==izE z$lYB^jZ0?9l^m7tX&4Cj^kIt@FcipEFjtg(sZMlb(5Q@Nq*YHoQAgfx_2rBtU$*uQIZ^r~g5RV83TjNG-XWmH2!WES?^IkqtwHMvmGsBOqsZ`E3rsYy1Sfa zavmzF2RcSIakYkKvhzX zvyzZ+tO2Cza{EAnOkq2yEEzmYLRV<4G^*y7!>lsOQC~{uNK_#P4~N(KcjO;QLsLg{3+8ja=%E?9XT~&_ z#0JzbIg)+mslQ~jCq%&z?Z{^{jw3O)+L%yH&m5iU^p7&2^fp zN0a1K>N$$7hRjqFcXq+yj{L*HZuGYqXiTZu*wxKC#q{Bdo`)V-n$iEi<9SUl1r5)a z39n^u`2RCrU)jy;2N|aR_q?w8L0)(B+2(8SE-bqT><(-XL%lekVdg|OSEn98p>Zc_ z{t+}@$wS0uYd1s3rcXyj6tgisbrrCR+?cS#d<~a5mx~{r@tL!-t z$Ry6=RE3N1Pi?i|6^ELLTv(}V3uL>VX04QoiYRG&JV@=sQ(#mM zspy>3*O+jcztg`yo#$_18PGYQoNi2fLAoyiNTvJNqbb=t=&9uHvi1HJK@;|GOjPQl z7UmsgmmrHqC@m=^spTsl$YT_dDeYc@YRpkCI5p~gcgRVJ@1i5LN$e5ng6X3&6k_89 zCl5D46-WBtk|JlCL#8pgT1M1DBcuyoxRG_l0YliZ3Alo_ib=&qTyk{ok++>3X7plT zSX+oiGa2*D8yVx|=j)o__+gvMr*7OH^9;+qcU&L zrlvZIKYB%cYk)D=6I^G6HZe*$*Du94VmT)S8=Ga;qI-fZso&w=$o{SqODQZx(}2@i z@hxXlT$JB1tX9h&`Ld*-S?k!f3(%NAc8P$WGsrA>;EVqLZ*Iwoy13o(rdH|3FIaEc zf?++HpSg&hDIT9wjf?P>ukp9)&Wmu0H=BPimz}hDzsIm-aZ!2L&(8?0F`p$?mTFvN zO>kNwTc+(3?_QS3>~A|@y+`9#PGcK)Do7m$W$VyC&dIUpaq)_N(c=!jUPE&1NcJV$ zHrY0IDp!myisma5H_gGuL1)OOWwyQF_KDiVThQSYzANRDez9XT7u2KRn$u6o-aCrN z9cQmr9kM%s+o`{}%i>oAWO2k^>;Nuk^yN1YBgSUYEhAV8%B%gUXK_CQxkoqe!TxBj z=#05Et0g^%9|dFQ(28__bw8QrU%>_@&En#M)=!-*}=#52et37=PZiPCWfb> zjoRs~_^TgUSj&?AD;WwqKKoO6vC#d(Lsu3t8i5mCsn&(dRgc+M2&{dceP) zrNa1r_iS!Qx*)Ldyg@D%EWoqGpAi&BX2u``?W%0nsvpd(Z!R&XTu!RRN_r_2$UT##HAJaet*_ZX ziL3fA(q#AF!j8J^aXzPya?+!`Ob3|XAx zlvehv?oDQM&T??i;kxh8`sw`eD|_jehq+>@qGk3@tzxfcycObXk!KX4R_6xKl1ON+ zf4yfxDHb;EQ%(Dq{FLm5TMv3P$KXPC3xq?jycAy<_sJk!i#TI(IGak-Lz{9THea12GSCBds|xMZmMJ;_o5q&h1KSp{ zDx@tF*JjuVLG&$ht1A=ckK|p#1=RNK$BEDr+QyHJsyjpM!cL68CzzNV*Elkk2tA9f z8|$mpe>ILK&mE{$tce}QS2Z=5(@#yJQnO^;e^uHQG^Q1oXbYRQQom5pYqQan9J#3kSFc$*rHJj=0BMu-HG-+q5Ih# zyf=6Hs6_C$xzf(m2(lu3KC(U$I(50RO4t$z3NMlMNj7T|c?&g0mzJ*FaE_D`>PVEQ zW-<+uV-M4Ln|D()^ia}uyrY|~t=d0tv)tW5o$ zi#*w1xia;_>|BR+(oZ!GeVc0_uEmD z*Hq8<)N{3ZzNVfptLIbd`Jj5Xt6YWZd8m30QqRNHvq(LUQqRHad8~T+)bj-OELP8x z)zhy3sVaP$dY+-4rRsUMdJa)fyS>i^rM8c%=UwV~K}cc(>Up+$7OUss>giU`{R(os zdj3;A*QsZ#dcLBb!a>bX!o=d0&+>RF?nKUdG7dj3>BOVsly_5Ao;smVL)*{Pn( z)bj=P{Ed3vtDdduIY~VaSI;8#9HpK^)br$C@%>cz>vIJE9g27JREbxp=QN&(N#<|* zbkA`O2o&nCqG0K#+=3K4RBvf1oW6%+hM&iBKj(M8R}`kix*re zK%uzzm4JM9F6*1;U{JHqYS#$}nt=_q+sl`V1H;^WiaK$auY8?v{si%jf5U;r_e24s zsmgWkUHle#W^*ljnwmKkkt5 z=gu2hIN}~sLC?Osw$+(tme|ew{AATkHx=cAaMjMoDBiqXhE*aNA+yTaW0eo3O2eGW^8QxhneCk|C-vpnJ{Moe4%=$Ew-Hh@8R(G68u{Qi=@nXj zcR-=DFL1Yi!y{n-+PPiYXqEg|DNX(Iu@qUOuJb-CQ{J$i@@7bRH9h62qqwsUEH9UA zRtLXAJ^#SSvga#6P% z&99`58F>69o#t>G-pw!DnLqsplodL9mTWTr^{(7g+sqFw$)=Hl583`|Zst%u4CmHn zp+r9TE6EaQuwpLr?f0Fyd;gqcJ_kmA0PJ`GPqG2s0Jf97x(-@7fPI7yIoKda$T^QZ z#pX?#2GJ*kEzK*P4rQC5C{q``qj+SG8|HnqSn3zD>qpfpshV9)5r@ou(oR z+u7(a_-VL~F-7m-pVZ4*<6Fc~`W1nb*D_n@le06F1NBzx5*f*%4OP*;`ATT#VklVdd_JV38lKN}y7=61H|cx?Kx z7ri9CHEguBDYP=@j5*K7IaGCttjp%azRx15fbIcpyz2VsQ}{dYHrC zQ~JnddI|JyYYRIW71;Z1GO=aPv)JV)bK?p%J@W+H7PyY)+q{WRWex)@Lxn+RIhpzB z3(amJKD})rB(zNTLSewdQzDNVLSYelhf5r3;^iG~Z{!Xge=6I2~2k{qy-;kr0nu|8U4 zoIfCfOQ0X;3!E*HisspLxYm;N2QZ(b&q9(x@tcYrw@9cT!rg~=u99_i{?q2!!p-K> zu*de?bNnY?ztelhUL^qx9X3qe$cu<^EZiK;YuFGQh;Dru(csm>Af|*>oZmJq6o2<4 zp~z;GLYX(c2Zb-1WThV8Z^=*GuQJn&DQR}wZ-im1NH?c5P?oJ|Y{n(e4|}y_BmqoG zFhM?rY3YHO+?+c}-3KN)Y?Ul{jMUVz;K}s;>&1}Vvk9m1CQGSI`}fBA#UCYCJ}zf0 za_Fy~{nQ7iKjIga?>b&;UC|c1o-=Jl+?VlFF;_y|sU^StI)X1wE1o6c5<8sa)S3D& zl<$y_Y=ulPIf_QOEW~Y&E&5Vw!5{cfJr`#-Po%`CWUo zeXohJ>dl|TPpXj&5Md|VZ<^MjF3p#)Z6w0-{tb5{KeuYqHMnnWlZ-pYDrNmWEh%*1 z5&ws?H-V43I3NFWup}Je6O$N(&&Btc|AsZ#<4vO)T#uDZTS62*Z^AY3oa$eOGp{mXtU zt!m-FelbBcNWgMT3B=rLMBhoA-TU~RA|gnacNTS`u@H%z`eYY`_f8%(k#Im7^+=<% z=IiqAE*2yNd+&LFNOYLzGEr2b*lPhzqS)gn;fGtm5U79~rS(bxJtUf)tHtz1eHh!918-v?I%j&46-<;;q zG7*O}63Y`b!y+d%bQ-mqc@uClYfEO#zOFTRSZLm$9>GJLKtZIo_4{i-=N*VSR&8g7 z@4-xQRHR8rog~tsyGAAM_294Q!VADcwcpI}SCy4H>rg2BYrmO!y?wR6cK6lS&#ds*wq0+}ytcegOBq^8$8*EC zXgX`Z^#|>%%KL10wv_eRUb}nZ%vN+GMb->I_6IDkt?#QnmSFcq$iJsy^cZ z=fdK(8Z?2Zsso@nZnl(e*`jxw_8=Ep_SvRcY%O-H+ zcKKQ_U%9iac+)Iv>!#r?!6Pd|(R`iYX3cf;n;htGKeArkT#?;UR&r3KRmN2aS3qN37tjKeG(A6sZnNW2F^*T-T5$lOQz(`{pLy-Q-wf8#@>nCYo!FojGLXt_HDx( zxhmO0xm12uPC)^_+A_zitYYc&i>G(RO(p-HMk}}zuz@g2b)#ow`ex@<&Sw+P_$LY8kd%d!SH2JvtGKhxfn}9bidazLTvAds>Ia^T=ls3W`9h6mW!h8TW0qCl z3X=TJoPc?163fGqF~wHsR;H4(FOc2klway5 zeKYNWB(p6?KT*Nq4DU0w)JNydUtrhH&(QpHBz~+$^h2t)4~NknMo29@M6Z|qw2D0- zK6O1rJC%F$jp18G_690a7c;fE1@5bK-gKtyB78Gm;e4T4l0qY~^~H znnrGXqn)a!K}%(85|5Lro5dGa z@N~%nb+WN*3mz`~ZA5$^S8C~#KYSJ!3MlqmW>xkZ7*cetaSk5yy>&-5U~3jx8DXzm zp?#3a3l`#s?9=3t81jzn=U(SPNbSZ(Sv4}C$sN@s^O2)0u$|mdne|NJ^-Sc8H)hk2GenGgu~N7$ur?r%bV-X3szDN8B~4djj zZ>UuB$Ux#3n4(gI-wyrDo^=?qor7Or`IpG&{&s6?QTMjGrCrlK4gSl$ozO~5Y5l!8 z;QI+z!tOo+IeHY#8+ti7;0zK?%V8Qe+%~-*{L1b2hQRU>!;h@{?DxCzOxoBhekcYY z70blNKvqAL6ue1V!BGrv4>V*#5Us`?74kQhANqaiOLHs2*W_1(XAeqU5rto1dfq|_AiJQmGRJkl;B$Y-c_mUGm-ebu-Ty(n;kHrs+TkP#8EK~oiA(LG9#qePXr2?!X zs8{h`8Xc#fRd6ermLdOUYa}Xd7~sMlg(?@kKsA-tbMrH74|WvsgK#YgTL?J&c-QtAd3T^) zq^tKwNagF*7X7>We@v<58o6OZ>U;vr8|%f*6)$JH|5aMgtg8@bYIlmI^Q;Qiky%!C zw4$~HEM!=B-Y$QakGSd!N+OVItF(ewT-&|WBiZs`D{hkT7IJZYvEt(EdI0w-)1>q zXo~l+54Mhw0%DUPl~W-29qE?c({J@J%WV@#PWUV42mHH;e@FB06#kX*Zwmhk`F9!r zO89pp|El>{#6KVZCh@O;fAjdq?I!4A&y=pN2~J(@t1J`fx3CeRet*};v+fRrCw~*3 z+&L>e`9F&%cde4|@4}ORV0daX^Ap!dvZ=BX6j|+%dc3| zSm3#La6H$o_wH0?*)y2J!tdmt@Q-=JA+i1WRhKu&8Amu9XQYXF+l^05kU}Y5=JaCA zF@q8`n&drB8c#Y9AL^1Prc-qY&lW{x-a}i^AMEQRilKBKFGf%OnYthBM3zL~=TR$k z1+wo=!sg^%PU%o*mt8DOr=R*c^*DI}BAMWL3;#3UVzY-9#`9}k(>5D1W4*e`RHjY- z%-STj3NoubU|{4b)k2pAr~B<^LgT*JjJU5|9)~H>)-&JL?ansM?TAkAN5G^NfJ}LC$ zBO>Cx06gG)#KSPsgJ*_Ol2=OJFN9FUj8E$alRGhu-$>qba6a_}yTq{PWUA7ywX^LU zg3Nu#=YZU#LZ+=8#cxLZ)5M}G<(<5Jn+$sUD!Doo%KM7%u0$4h9E#r;Js6e2VNnr3 z#J_dRAHmi}-L0#(`*(R>B)Mx9I?kK7pvKBkN70tnW9b11%;0>!Zw+HKi+lue+Baeg z{R01Htev4YDe<9XnIKi+Q(ia3Q!%fQuPbjVu~5Jc4VzS&s`H>!or_GJ=%xPMl9Qpe ziO$N9=o0~bi|EW*Pd(d7JCF{&QOEy;4n18)NDl`VAtSJs?f;uDN2G@vnA9?dWl@q} znAL+9$ulK*9sU&oa2@6+)&Vys?Hp)E_G^!Dd|_6^1qbwm$>QINjegol)uEJPm}ag zH+`n0BbiD1WRrfrqz`t}FPC(rLP@VM>1Rv2*vG@qFetkiZkXmXV@k>ZQd`ZkrL6mt zE8JpDz}ctR?Bopw3nC4-!1-K>MfyI#}j>Q{{Xt&czfX z+~-`eAOE85JbL_1H9h7usQoP{oF@UXcS89p+TG$7QSgr0YSjh6yqGq_CRfJB@li46 zOY_`-6BH>%F!xOYbNvDsCe=m2Z!E;0D(BU268LLQ;O}fO<}x?TeY@hkRGCxI9f`29 zLX#{0Az8S44B+QiA-jPh?qaV2fx6B32!l-n0N6xF0X8QdE)P}Y6}TlX4}{+AAz;^6 zfK>XMLx0WS*k{WNty1ROp>ny|^W_FmK}9Eni{(E005(Q#YFGyw7L1_ZuEemZxaI6c z_i60Ku`Tz!M4|X|ntx!^r5Q+IOEZ}t)|orbUSv=$@~NCFV3Em*xLsJC9Lmf6mvJU| z0Q$WU##>PQmD7n-89DV_>T9d8p8MNA!ML3lVCx?k(}?o<&G_sJ=Y@R++Z>B0u>FA$ zR+3sD1~DmW)u-pLm<(|i`ws=Z$Z>0-)?eusF!sh$S?E=Nrs(xwhe@?S-2D_P-HS_d zt^5hP{0w?Tq{Kg41=mvbJM5K_+eNS~3q{=|$ljU{Jfw)7BuumMV&k!}S&Kulju8a3s-jMn_Wj;mR-M2jvQ zMB7f33q*#4^mgGLpg9ONYLSZ-Zy0%?7_(peN!l=nhujH(;1^C78phq2=O#id4cGI- z-uHb`^=l%)n3FaBc`iv*%Ja#H3GWDowqP)PGQ|Er#`CA>({Bzi1%&)02w4V%Y<^yb zZDgpaDJTXC6vqQanTF#30SGOBX?Ig>OK9&^d=(`Yw7(Ii(@*oG6nFXDE~AxeQc*6m zu%+6sr{EUb`k%rr`!_k5HMso+wHn-BVw#!8k6tSEn{aMh$4XmMbXz>83Xb*p28ia& zwi;y1A;%S)#B1=12X|*}boCt3+RUNaz)pI*Wvm z;4z|)*L7lG9m#u(@`?5OWmDyHsq$%ErQX)|lP=X#C0ufAJXd(-!v9)lSn71w_@JqC zj?_7IzdDcaR_Bld)TwXeUw)H(w&&Go%SuOgL8R?&PMjxsbpv0eClYgN5@* z2K?PP&)7@A+juA_+fOY1(b_5O7T;Plw)y?25jL}d)lbv3fjX7 z7AK&lNfYp%mTI!Q>ojQ2Tf17z$f;v&e2|T5LZofNhNof+c3F8n(F9i(vk*50hO7;= zZwh_f3%`1m;oLyPhr+K>yAs`iuB%jB`4@0fb1h}DY|(c_0q1XB;TM_TeKT($R&$DRCoL!27N|a;!?Vn8a_pAN6 z8UBX0+ADIqFc_3uTh2lHii)T^Yeu7|T|@u|S?O5*hF-qW8*P8Y|H0TXbMq^fXYL-) zyIKo#E3C&?-aI7Y|Kr%fa|YBj*3@(^nA5l2^Zk25D4f2WS`on$8{SUCIW;(#djVFf zuH*Jxz{zRIe(BsaK6?s)0#^dafpN#kQ~?;~gKE0$5_o?qm=%s_a|n8q?|;+irb8?y<$ zZTmHJ3y9@VR7RmYirEWkT1rEws{`<{TN||sYL%NM=-n{Cmj^^nEn9p^F5dl8!1#m|g}x)K86e#kppAZyG?@M5EtRoA)1ha2%hk7*-PPkMAX;V9(ZPQ zW)YqYg=7{-d88R9QoOiO7RCRi$+98Tivc9Fz7!JiUtAXMUl!&WThDD<#nQ%eEkDqz zkV<}8I4{U&AncvRPrM(=mlNPP@@G2Qb0R-jpB9BZlO=-SNSKESV^xutBe@rQO861{ z34Fi{qLLtMpPl$aal1jQy&BHUJuYi(=A3MX3#XmXrSvUSF-SDPe@gZjgjfd9x@5g-a*57v{V6 z2h~nh(=v1eUBk+^iJKhHoVRl_UN-0yn3P%b zy)Sr1AmkZM5iSZ=wW^Pgq0xx9r-+#d?bybso3%8Hc~w=E%Zk8Q9#vlhqcyTB8lPJ8 zePQrIu|y%~vzHK?&-wNZ==EoBBd{R4xV&j>mOY?;+_xw~gBJkbMgpvX_C&(#L^~Sx zydpnR!_R4+8mwce`skYP#|L9%`jZrxm*oq7Q2Q|#W4c0Xu{ok^^h#i;nctMHb)Y+> zck`RF>>>5z>M0Y1UwEpeFe`4(ENfv^i###nooZTZQD2IAx#OvlZ^-AofHS{EtV=jU z%F)M2>y_st$%5$4+r;JMW0juK!Kkn?%L#jiNXgL74)xK8iELrdK2e}TwHIK}@x6p2 zRV|`OU4E{Nr)P&mIZX}uFz^xnXA6YnMz;j5vc;LKmfZ9OH68J+#r+%b6W)l0+(!)= z%wfm#XI)TUnq;Rt4~AZ?-||h!)J6KJgds!9SNrhUB_?s4{pcafQDa4wViY=SC@aq! zrq;%8W$)19WqMhCJobMoyW~e@-_TU76-mi<&0K)s^~K=zLiVVB)Pp*!D~B8)kH4Ws z$&qw&G8`a})Z^7%{pornPxyc2LEu#4|?DsKUb|ITYR9L zJp_BfEk*o_!;BA7|7MaT%gekqu8Pg^ystsTsur&~+npw3 zC_g92@sngxi{!nA?iUmDe0BF}GytXbBeI1V>svgJRf)EtxAjWgtxNL^T!~2A?*1#a zuQ8nwFc$s-IS>9@9Vk5sW8z9?e0&OP7%NzBYekFN3D)BiOzYzltsmsJuH`@B7e6Wg z8F)U|=sW;h-hb^gaIxfdF1!qdK5$JfIn;!JB?6nQ6ZH9oW{rqJYSF0%5l5m` z!m~rk;ljxqg_92sBEY^YyRwHgX>K#KLS3SP(F&wBX06Z|N~{ryaULbZVizDYV!=0O z5TFcp)~UCz5X@>M_m{!a-hDuSf3Wn`5LNWHK2JpQb-4Wb)iqs^` z*@kv@VOSr+fZE zB6g(D@d8|1i;B6^J|!S;5RgSQm=jUTOs541=e&F&n}fFt>o{+au)K+6BLln<6biBo z0FLJ(otGVX!%L|xuJ z>~2E=#3RU;qe=D-G@ZVfnngVYgR3{`cFX&!M>)Z4gI0kxfSy^bKz$jtpUUcJ9dqL{ zPOE?{U>!5>DfQ)4w8^3Q9Y!S2r2moYt1F@z=J>>nHwKfdX5=4*kMYxcmnF#--o7Z_S8|YEIC~=W+v; z&o{72UW-eW74|||7E3G=L$uXd}PjI%@ej;(T-)G>L4mS+M7q7() z4iRt`>po1ahD7Tdt@<^jSj%_zn?23R^PyVinV~*;rxVx|Yu~h|5i?dgYt;!KRz;js zE-hevdlFeeHQ?PwW>=(Ykt-)GQmdJ**2wZjEWTxhJ{guWr~C$|{3S8aw(7*PU;0Rg zt@?lPVN6E*jaW5F-9~@885791>cqc&&Ai-9fyr7ji!bX%UoP}QsLwVbmDps0EXs&P z#pei%^W;;pnod{az4hR$hY0Jp+Uti(S}?HmIE^E9D^INg7ovI^LjDP(5QI?SU+`#} z>P1JlUMvxQ67f1Zi*#$dP8K?QAEDIA`xBN$PU9velhCTmMB^n7D)lg*yD>WqFrfSjB0)x&Al5ujGFNx(JV4v2=;3DVp8_JyP=Ef?ZST2A! z zeg;oiY6wfcGYnD?a}|xlrI`X#yLvqlE1z5A2TKKo$qMLY@Q_3W_oZjunTTDWV{b1k zDqgDrj`y%e&VN#Sn!O)@l2wUVTNjcT&xEz?B_VD4gC6eNkKj+OV%=SkN%Gig`{-d) zIb~r)kVT(lAg3bU%v_V9#xOp{;09RN#7l>9YhI|+;6YPqkvfD-HS?RZ?L#3&-lRO( zdJ&a@zxFdBy|O--)Xr>n@}f&xQCn~wC?m=k$&A!u9(Yq1Ao?ZR zXU@gB+#+Q21lcv&+lL6rFW_;k?xe1nA*(F+KYbbY8SS1kXkE4u!IL!&EmBW^WjI{- z_CCd>s!LfysBiNb?~~wk08l%V&4wIyepNZ_x;1x$>OR@IqBAdJBIwP= z1ImN-t!5(K`n0fn8#nUV?s-^P74X@;rMte^<4h5u(D)fk7c#dp2?_rVc z9|#p>toES_tJb=wcs6he*N#-tHsDzKMWOjGK?I{1W=$XQbovllg(G>t=Z>~6GZmaL zyzTo9!bZINrIGP*(5;M3dn#J^0)2E79!@0?xipb~O(NQ4iytS1e|eHxh49Y@D(?;%4KI53Vc*p7LWH4uFyn8c<00O3usjss8aD2?- zqmiMo%UCKGDi+Cla4SB;ems;fkU0%A$G8aiP}iaPlQg`9Pu9cQ`8UY=C(9lWI+vBs zoGVLSc|9*syn*AkHH;_^0(9B^k|<#Ui#W_=1iLgy6H z4+&0Z8b+r0QR+S7%nUhH&yWEen*{VM!JQlnLKEjFtSqCFmF2pXeOQ@R+1rQ#RJNA5 z|5=$v+IZYt{m-0^=Y=;|i&d6x(py#km7TAt{#&g2$}?iKH0|3gu`YY&O?yt6p=-f{ z519O~WZW(ky~nB-J41wr;j6}FTm=OkWVi_8%@30}J0Ad5ku7+Qo8?9@`r$YcI&k4G z`}{?!=rF_L4C`5Xn>&VsX~rE|vl=I;JBmv^H^IA1%ZeLC3Q|2leC!D3osO9@P$8J#$dL)|B|H`tzw#*6grn22rsI zeB!^x|1#aqaCVeAyIA3oIBNI0w6)S-`!DGqwAH9-3-_YsE8Dk~4~hC~TSc_;*G7fY zlnq~FkKihXk*{#jYhDINczRESC(J1#-hBEO|Apx4_RA@9SGSz~BZJoFE5)2UwS0K^(G1+J!~JV~l9= zfcj;DX1Ufekgi6(JFTs{UGQJWvjW@fM%BxRH949k>$$&7L>1U5Z7c)4G-!SU?j;fJeT4`dMxHv0dO}^9T(^%*8l(mkf(qtW zk$&+>YgkGl-T0BqX*EOrn+=>6xEffEy~RS^;rzv1n9mYL#eN%mPAz3)uT}Q0ii19_ zd`)oHKiQ|H`kNKp^fB)-zBwTJxVomZ+FaFgJWF&Q7G@U@3-}(90h0j_Hxo7qt`6P3 z7dXir>FEn$rEJ_8a?fB2%i>bEi|?9}X|PuUAgD+2o_K$@d}vMvBYlze z>sPhL*{XYq9A(xDe3z*rt@R%naPpRsYG_oiP>p=5R+;lgpL9P$Q~Y{STomYR1+`Ze zjuiP>SUo7}P}4_gfEG(Vqu6tFWG*;_daLKCQvl}El9*?z6dAK7 zcu3+j)tobfea~o;^KG}s=xjLw7R6vtNcdT`<)b8neg<75M zH6<5Yb;5@*^*BKmv%K-?Syh=y*mFBq=Da1gGc}c zP|di}P)%1{_* zthZVyfd)+5x4OO6UvX>crRr6RCy?Q4`GDr56jq`JD^YbMl>%54(<=25K*&v*8!PlG zfw|Re?%3UiF_*r7#_FKwjnDCX#_d_A4aZSOadSxsS+u7sIfc{bHyZf7$H^p@8>e#D zQBHBgdG`<#>L{{{LLDdLQoiHVU?4JyyF2J@L35nT^{@#wuNk7OcL89SwHeRUdqX6 z$Xr}rB98uJ*|aYQj{sdebDPj=Xg$g`SbttK8CRDV(Tj8E&g2a}gtkz}1nZ9LbfA0= z9Bty9(V>otImd-M&Y5#~sH4iN7r%b=;2I4d<8ZR@iin9kmq_M!#Os$pD}f>ju)p>m zA%Tsdj`OU#)5&bznX4PQz`9dR)f%b0qe!V2F=F;q{AJ7bFD=!3@Nap ztki*tb8iWCoHhFzCf3=ZjzW7_DEcZq*jltt)1SBXR6|$lE_g;vDixwsZ^8xv=vV+U zs*YB5s6I2?5_?O!(Y4l$MmX=87lq5w$0bt1-*g4uUsG%RsZdA2xD>i}HTo^gwY6)Ls!eb>oov_A8SXXquJulR8f&Gh6$5i`5OjqslP?Ux; z=bi*i3|D~xr%erHU1vhf3UH`v9Rp)Pn^&I)cu+`AY@iv7^QDFVn7*DS;cra%PznFb zg!e&)vF#@OiG<%G{0jddxSpD7T#^ws(i+;k&#Et_nO8Kci{xFszGp@+UQmpbGt^Us z7qzJ^N5fk$@4$XI;@!%0(vg2gs)IeW#V$3y|HnzRwSpsGXItpg?0ALgpYNX<-EHc0 zZi(#Izu*b6BTU(|_0@0&<>eh3t6wC({WKEV+hx`J$k=dsQA1TpP3Hvr$cX=yr#iX3 z*Eu~{RueC;*_%6eEH70DPX|H|h2?cw?{RuUa_VlquUUxhjLUS|RKushKeD{d@Wap< z8TN5n5!b2;-XlAwT68^``!aHak2Xy@=b54{v1&mxMT35MCL2|tH=2Na*GIg7yDCdZ zKA(uZ8L!ZBuS5$f?UP+vUPWH467M@!sJA(RdUe@RoRkzKxMQ2T+rR}(m z%!_a#vjf5J#Y>}wKdMBnV)gg)J8|D1<|?J1$XJ?yhWdwbd?p}o2G zv7xD!a&@riJeZV#;ASLXtrZ_w1_-So&+PjkX`H@zv| z!+};4Ct1rXh5dSNS(=mKs^Z&uN>EmoaB!Dzaqx9t3CA^k*>~2I;Kp_4!r*b`4!3Q} zUx!+5fLeB_NBc1ZrNJF`9@AxqF#Onj^6ToT`f#eQbiSrO42pw0s55G<$X$j88+%y3 z8ZposuBzy;sXIq?44b+Y?RvB0&7h>SCZu{8bt;!v%|nwq@GBV;o-PXt_pD#a z40m|K&+KO>^3}ds!!$AAjGMw$DPFM`bgs!_O6(A@&3VNR^*91lB905AGFx`2zrZ^s zQh&w1q$$cXG`*=QwvkV^uN<4ev{silX2!NAVnpk(x;&a0dy|+HJR-@rZx*fF1i*P~ zcp5kwjQPz#)T{;>Q1pijO0zmbgEGI_I%Ymb@7FcQuP~r2qLu`KP_`NtAjyKc_!-tq zo;RP($>`%^#4dHQfkn%>GbkAQiivINIWfJS9cmEuINm!c>|f>wYzFXhK8z@Q0U^jB zcx-nP=u!{&)g%BP!vkK1<2@_A{7^n#mf^-U#adCqI5&;zj13po;&?Jhgv9KeK$qvc zXJm(Xgth1?-LQ6k)C{9iC%&tPV;3YR$k`>svP(S;R|$mX??B)W?y`;vHrpp?YzE0! zwM(rdnK6_?GT?3@TgRIGG9aeNd{g9sRFSlH{Aoo{_xtv&$+zlcxfUHpo_n~vK2}H1 z%SukLuZQ#XGmA)<3cdr3F4Qis-KE;}%I)#cT>#TSN{^b$N=Di_YWHxsqI)>Ffa1aq z-5O*?+ohfpR^WCyeik@<^&cXgkF;e+(504A9uun72@|Tu#Z_BtIXUN10Rs^8lO zKyx^)8H!};B6gz4`&$lB6A1qCo#lK4pH%%p)ktWr2s)fd65;Uvx z`RFx&D}8(2_OMCXngjha48zq-`nreELyO^_ILWvlgMiQb|*iaO#SNE3&o;xA41 zC;o!x*X_-7DvcQz#3t0L!Y(|1(Q zXzYm_1~wlR;d8bP6JH3C3lw2RSiAg^eUqc@f@E($DY>^#m{!U%Dx&z#7VztM2K*I` z)7laq1QOX9LrS{>wTD~{M-HW`HwJTlBBqsQ0{-hS@Q2c>mY^J}72!l#@)T|YEy7~@ z;A(-&Th&1lQ&Ec-DIn#U?cTJNHS2w57Jn8Lt_SmUwLfmu zEkFJQ!XhNJO4YFeBJ(?*eNX3PEFF)3j&)XjoVj8;uZS(l=Xeq$to-0`-P1bw5Cz^M zDK4hAbg7ZBm}hxTS<64~H5=<4eL64JI|Uie#>SWx`Z%*@{fgx43`YU7|!9SuN6^KkM6i~vSirhQVN8&3T!fGTUL8UNs9VntL{zLQ#idNk4_V{ z);YSmFYzem4pD1dnr@X0)Z$%=45G7m7#c~?!vzRBgSwY$6WLqnn^QH&nOeZ%LOvb9N z^?B3yYah*>g5mypT^4 zNU^a`H8yM2GOv<$s@nE2-FaFySA14H$@uOmlWVS>{+g1>WswUrgZ-TJ%k%9 z_8%gPU)HuD4r$Kj+Kyp)Y}_?)N-#LWh6Ajsg0T0vtdue zo~=j-A(fP7znMO^FqN)92~+9hF)Dwfvi$8We)RHeGnTjZy*Wqg+Bpx{gIvB`GK#{; z-TGTA;14UgwDBkj@+AFQMlKYaV&BI5bM|%eXS)13O#bu}AJ8{5vrds6ST%tm61Yh4 z$7>X7yPo3Sqlh%Rmp1kA`UK)z6O&XOiVTvRcL zAT!0CB|>Wy)?9!I&A>e%{KP0ITGa}K6{#D(nZvHJ3tDG0h1GmAfX{+Y?1Y=dkBp^X~FSh zix(7(J;gqC?5VbO+-YMo=Zay=75EPTz=(H}1f=idB_P_06U4QE^z5ISh&?@NiPH`E z$PbT0P0?n#x7ig$5cCc=S=-cU>p+u>ge0P$)9ZVAs~BOGx5!cw?{Aor;Ug*_rGAos zRI+dla#!d{_#9J)@aF=AT$q(X@nh}CV(Ql=c%G;TGt5 ze`GY4g;lz~T%p9yOHVcAwiexcB(^m#Vk>A4@TRtE`I9cUaXn{Pxl)IHt;exVuwbO; zR@DkKNsO&LlO%$I;?jwZ)LdWKz2^M4705`K6#&kUzASMsUh9jx>59Y}TEu61CIIJ6 zeMA&FpI{R|oQHw{V4(VFwd`BoW@;uhb^~ZO_Gk|%KNF_F3c+Gx_cqJg9YiyEPSVqg zw*>v6uTRtOGUa{$N6;49cT8|($zZ!c@|fgt4A4iC7CGw&S#SFDZe+Z#^KQJB+}UN; z#NB0McH57ZIrDdEbrXA_9))GYzYl~j-&yW_T^63di~g2x^@skIWz`kxnd!6YjJD5v zm0a7FZZyBiFc?$Y9GA`HsIv{5nojeQ2?Wa>pz^IHgKWR5!W>fX-P&axZhk{1k{p1g ziuaL)mokZZrEHG+H!F7W`|-n2YCLwJM&BV_O1a9jypJC?(`+jOuO`46CBW2>#Y#f&M2wvgOTfWy-E@}25U zb7VMBpxo)-vl^A#`($%Gw^M=a@qKl$wnv2;=%kv1+B((!mQ{)F4|K6NQ1vw(8R&Lj zS3FSdK}w)|1B>6+sEpq_Y^ntufHySKfKqdmPqM@ewM{sRwfN#(*8(*V9u()6=(nQd8*h0)eK`6aZHr0D$|61Cs*ORu$mG$-DLUq6&wcFj^jjSq<0Qv}Gt6 zj@B72Awy&A;Py?vx{Z_@XdkRLau}iy5XqE9rucC63Q_+ZWsb+9CaIxOVS)k0QBxFc zR-&e5ENs;DccT7#P4dj(!QghQKH9~N{?Tv&3$ui5&CZ(DQJ^EC9WW9xN365)b$FW( zi8$fvQ}|>0S;IUrBTPz8X=e=u0jHFUf5ywbzWj$LCw4PAf0hFJ_FwAe3HNxp1!mnJ z-PcrDu&RMuHpw#!>Y1F&YQk;}*EOT%E+HNoI1@0{E&J1ttJ7sucd`P`Eu1CbR4rL3 zlqiHpO4fh{0G3SqMI!CT1~krOd_e+79`C=&X2|j9HuOlpn=?t4h0W%?c=~Q=ty;o9 zVTaMz1y0x@8VEz~90aTHo1k|&5?*?AVzD?HAxN68mNOn>7_xtVBDVr9;^@u}v57n= z;BaQLUj5?<7Zbd*P~T-?C##?l-N_a}W76WjlsFFt~Md)p&t${z9*e`71Y zZ=coy(#!^gL{0xnJE8gE=u95X_fY>)_wgp=RHQ62dP6JBfkhH za@`e6J8oK3HL+n5;oe%oZP8W3stezu#aB9TGZp@U3vypx2WIhV&w%w z7}={c*sBh`;sLh9V={t0(OR{{Z`YNIO9Z7D_gR5mRXVeBx@&wF2`;5&zK(-p-w1b1D~PJ(-xc<>MXJDTEUm0h<=75eh#St zLy{n_r^~VN%sDq#Y?Un3qomEQ`&nMzjh*c#%0PrvKd9<(sY5uAOH{{F2m6U_mVPWV z)Hxm_mQLS-@=)iQ){-yb?4iyH!PDgx=!kbW2bOwBpXQ306zUrrgP0^VuRGd6qe9`g z(5jfjxB{wJS?EFa*7j%D`dX-SignjSA^y$_tjJ<%BR0vZ`v~le+Gy1crW3s7xk@@k zYtfKGl{0h}cl*-eUj4e@!ZVO2wF*ik$tbI}V~)7jGrX}v4P0X=+F~ufNyFr^LRS&c z=wxdE5=Qo6X5FGI%UgG!(Z^_!@HIZ@gjJWT2lyoL5@p=8<_?yYm=qK@ZOuhci^(gw zfl;#csiR=5Mw(9KqZ1gg>GIntaykEvZdd9uN z6Qss>f1L4%^o*OysLjmURL+;oO%csf^W-~SjobK>dk7gzGTdB`4|_=Hnba&$gC)rN7R>bVQfZ0JnysJs`nP*>)xo&kWU8G$ibneg-kx<{nc(+pvB;fjGHB6#5>Te7%l2>8PfTJWLa~fJz@{K zz%bonk+kp)=#TW)GBqDdLB_j#yCdT?_y}g{b(fOeU8MdXwPOz0D|Rm-SLVZD<LMFs*sw8G>9FTwOr+f%RAI2m|ScLjrbdUnPUs>Lq%)# zGilUWuQ%`N+ZO@jT4E}=qfLzysWAcksYHijXv4&H0xDHw=BxwH3YEtCnYCQBt=G$+ zY4Yc=P^X9LgMZX+!908Jf*K#~1st+0nYd4g)7^xzdQVNr5CsBCGZhnAB#~gLuf)AY zoGJJ;1!F#8pb_s^c`nk%F5eGnPp;5NyPeD+ZCpYXeA zIa}Fj5Bn2J*CiiNqtgW?G|vO3_~|AQKuOOdUDct-Nw$A!q(-$-Er6q>G#FQGIvz}%}V&?Ml#lh3eVcC6>MI2Aa zf#Qzd!2j+GU@b~kB9@yqvsE=pk~*0UN?mTgJP1ST zP1f{?Thn0@YZyYCS}&1qiG_sW<89CtvasH7tv1I4`k)~6hEKMi|8b-Z?_>KsKeBP|srk5lp|h`jYuB3L`@~XA4s&?i zyO@eDg}&LgMZD({FhMUYK7yEYEO7T=WXZ`i|Le1z!#E5j$@qytV}|CU~WJkK|{r_!(ST~>1H zD!%PJ0hQ9jQCWJreFfdW7G?)M*QhN3qv@pEX1(g=RJU_{b$jgNwYmS@>a%AK zl>P8TcA%4e3e<5lO(F$Y_209!7}6LZP^`#hnX~G)5~~-8**b8jRrfaC48>5>UZG>K zl9(+qow@e4T9|u8bzll7YKE$3WfwS2LxRkJ)p=5Kj1SjSyirk|SYH}NFd&y1F-$ijRPUA{Oh-)g{TBs>Y8~{oyCBf;S z@`J@%X0qyS7BoE$QFYlr08y^$=!#QXmirQQ>v(;~0EpCUiD*4KU7j#i>h-?%7y2Um z>WR55>ZiwU1LgGmmm~`>r{nbbJB;_1UcO&L*CcAx@hNN`oQU_mm>C->)j6Iay1GE7 z94w#5QZhbR!S=&LtP!Iz6j8;y4n$aQvaBbjpHS7NMJ_5f!!BaQdVyz8@yicg%AmDv z%J+AvYJdIHtx}bYN9HM6jM*JjC-7IRg1-qqz2R&H2&Qy$k@NiPxZnNSTZtYC3ACQ$NwPU zdp^HYton*H5o5Vfdqoc#<1s`O11ePm?L>)jJnxSlw))VJU0rhE5I{AO@o_zI^a zrul5S9Y&}6(*pd}$coPSiTbM0GJ}>LPd!k5% z(Rxh~_-fO#$a8$gua^4zCu|VhTWWF4yT)W_vhe4=x6@(KZ_L0$rKugWk`fB<RPpAq<1yveTE4D2LxKfr zun;k3jryV<{tx~lWRUJhZ5cv(vj2~*0YHU%~t0V6cA^J_BY*DSbTG9Pn2Jdg<;n%+5QJecY zBh@$EMA>~6n(rW@1v4G(>Uyi2JwR3DVw^^8QSZn}jX4Y%Q{|k7!e0Pe z9UoTV91@tpY2~fJ)~xQNg38EQWPP_X8w0?NU0YWpj^~PoJ$rQ)6sa$cYGJ5Egl$>G z3&er7=;(u^%SNr#POHntaHqer3H28yb$MfZ^u%|~rpi(4aL+lRp+}~Fv1~gkLO986 z6PH%mDvGlL)D_omz=g6$Cj=QPm(4o$=HipYLA?r0IOX z&!zE}Y#Pi35}4btq0Fhs2XmDiW|WUXS0#*8yjIsBlT4s^PVA#8v@K|4PVd8tGd{4(v9Q^8e6ZSqs6wAqm z?M4SA*V&?kVHNjFMdB>^MA3xc9TEMkr^by8#)WBk^;%<6W|0Cz%*MxlSCgw;^-Lxb z-KsV!#)3X~nu$Whne&<8Z z@uM1G&$?gIP%#dM%a7)1iWf3{i*JFSe3Q7k9WE_fJmM}|@|E0@Yt@gZo9MtVCt^?b z>*Wo+opBsd9M5N$HSDaN`A2Oe!|miic>V)r4ek1p^3gE9;nE**qM}vhL-|r;VSFxQ ze<1b<>r2g;!b)dvaCpsanT5fj){1koME!AID0Y&4x@ZT+%b#OI7_Oh%kUjR|;5kxR z#5?i_PX=1!wF{*MobFr&my8XgKL3k6+O&%8>CnHE7GEzP@!mDJWnUP4oiHalL+>LY zR!!cqg~7w9t|VLA-r;9z=q)6v#UyDR!dS%CzYE6j-=vO0CDiV@kqj9=cBkhBO2^IS z!#>Nu46T*)bD++5s9wc59R6lSHfS4nE}4Y6HLHvUwlX-N=Ta{_auD9)swS#-a?+%a zzr7dl^<=1tavm3%`lR$Mfh@I>^_&Io0Xo0)dpb{=tc3U*LgtkO{S_~$o%yIBYE!vp|e#LoMo|1TEH1&-aMj2g2|`d2h<%}j2?Q8t+w#4O+9apQHPu-K}j z#hdh^TZAS5zAy^zutRa5SG+B--?{6L==nnbMeK)!mRkku*bV;zAT=$Y4hg@`*b$re z?0nf{3=U*tAD8zyA7#$Bp}l8YceRlrwD&A4auNXyj_Tw%1Pc`O=9z@bMztHe5CZl< ztV<5I>bDUuJ?py#8q*&_affsW9;#kac?5hV^Z#hwwUxFBy>8tl8Yus=83IdWYLkqW z%IYM)95#s0(1!A+#pP?p9v|%6P`-9?dDH6hwI|?@y}aoJ@~4emK7WP==8UC#d zm$>GAKy4)hA_ncmpOOM&+9|M0fFC93yOMgGt+98B(0=q@QlCGH-9tQ3vo~vQfv)r$ zEj_Ra10REdNS^?_q|PM>ZqKq$5p-Je=WO{iqHIiikbN!SH1CnNP01ZhVz=Y~O6v4y z($Zf~rGH0Ir{9&9{zoFwZ@i@a`M|S#s<()Vk7O7p+g|>jZ(qXS3+yv7B>I~?9(}gU z!Z)fR8V5q2rwe(iIfgu2=``fY>Dp{e18##%-5c0}&`}wyxpPrF%Xk2TDbbruph@xr znSYB^Z;(mgCG|082ko8{Z!-*pH=0hUbjB6&S%t`Rq|?C*i?utuVCHITJ&dB+&qLA) zK5;5anA%Sz5tZYVb`RvdUMe+SGAwQEq38PpsyAZ;&d;~2`I}`=<8P*Y4&CN@TtA}; zy_5v}cB_&Sl1o3ns8%8P*#^G_)u58sFLmg*pDeRyg#Jhi~rz}uCcKM_f* zi~rCeP5{S;K18H&<_0tQYedNYP#uA>P3jZjeB0j>2WhiRjkg-WvzD(E3z{H-SA~a)LhTMuFpo9=dEO9p$ z34ex(%Q12>20=#6;ZW0vCUPm}fcokj0McZk9c)eu7HF#kq@RQdHwW$vRO?&~cP8#+ zWJmcXx;n`3W=)^QzxY83q^EhGSn_A4|50SDkc_g$`|=N|J8UU073YFoIoU_s=N^&X zM@f;bt2J^&`))2Eu&W!%`O0L#!4sGdREhIZxeXv%^1Q0-Reo*rUnChCGLG|)9cQoW=+YYN**tu zECsjBL1R*qZ{IF0ob}Xy?4zjtZ2Lt1o?|V!-LzJl#ZPP zCorFP@+tKcVWTn(aJWr-V&J!x*-;w$C26V`Sveodk6lg`s6i2q+JS1BxUP*=5T`8x zdg{Qb3w{ym^jq~E^s{zd#@*zQt}K_lRijqL#6p00W=`yRf}z+1tL_N`{!#0#x_c9W zJ9JW>RsWoeu7)iD^I=c`&6S%Pp1J3jgE(r!h|wR475VqP=XV;lGc9SkQC@BDv(DLM zE&n3Vf8;8wOx_>wDGPdM7FXm#ojvT+Vi%B=3_0;pp>@9aQG2%Q&JBt3i22P1{$I;o zAzHbI-=6&9`pzJ2GO)zXA(IPGuC+)M*Cbji)&eW;ujyQoeH$IKua!U5^5+8iQzm~- zkw1mLQ=vB0RG?9Mv! z{(`(3i&(n5i0m9R{{m;_a&e^4hno@D1P-zXhB|WXzC6ln=cD`rUBw!+*mpXsg@yi87_M>TPzVw z*xpS%|dB>plN)`&AJbU*MNn` zaQ7>_low4GZ;)i2Unqz>ZuUIQKrh`1E~*KFnQiLB2Lwr|Sku&XgVVs!ns=ZmCk{aa zu$on;2px{6pJ_jV!>*f5dkaU4La-iy1_My+moOgzW8rY9Aee9{)vSK+4oS26!c_OV zlxIlBnqB*|e8!6e+Y661n|9&;r~Ou%-r5b^Us7I-8w5eZ@o#iFe)4Gq*ET+FPo3I> zz~E#cZ@m;kHG7asC+`&6E^lbj^3FHRsZ6ej7|&gYSD@?TWfUhbgFKbtyk7jMk-tZ> z68;GZY-K@_?OWNf%Xj!|O2-E$=|hjB)k9j&&D(aPEbyokjn;52p8#II6$|jXM`d8O zx0R(tlLh15J==zCZhx<9rCRh5jfhino?yJ~OAkWrTAT~{m*K%F1E)G-tY}-K^EuoF zM4pexUKNa6FZQxt%ox)Y>{$_NJUBj@q8FzX<+8sva{S41R=s%Jpsd^i&7r%80+;&m z3)x5?O)$g9MjBVa@K3ejtmbefN#>`JXqj%Ch$YplYFWzc)($Yhbk$Eqz zMa^X1$NoTrIyT~cEzVYY5|QSlb)agyUlU;t={sF%cc;1rv4Kgm6N0;&0+J>h>7D8y zA{iwtoXg1-FO$cT3Kyz2!F68Hsf5JZbfV+?lj^`698X=fm#d%<(>Z4uO zZP4L6u%b%AP&P^+8_iN|bF9_83EQ5mN0V7?fiRUyi z6|d{G5@j{FdsrIQvY-15p0Sa}BKP2wiubd5GBdlINIU+%s}k*{mrvM64{>dy)2jKy z0jf#Joyj*m<(awAErXc2vH7POOKBbwOKO`hN!4+PxHUw{yPIQK+JAL!FP+zN)4+w+OIj=R(+?ZOeq=yk6l%<9H;wIX z@)t)-GW9DNs4IDSWDil2U)w$Vd4)BeFDD;xh%A@0tSrxN6wZ=qz1VwH`dfP1HiPp3 zqu8l%wi}<4=&Gyg*XoCP{zjL13G~sruCBY$)&1uhy6n^y<_p^kd-lj@XhhFzVI|f@ zQD%}SPL%ahUEv^daFohxD_0g4uyvMKLp4(nt?)i3r>yuzO8rJ?unnEvU!*Q}<;5tn z{w(3}&?j}&`*wjmG!gdREuSoA$Dt>HKG>=|<80#MZvo*2K*%YC%-mLBHrK3@7j0AT zVy5mf|JQymHT`~?eWCQ*qnl3ld!OiXLnHbTo9OqqM1h!sM7IeG3gkVxg8h1*k?j2! z!X0(*|0XrMy?;u=;h|6KsATVdEuZxMgkY-obBNQup9-A+wfF8mFnvCvTiK5BjXsiy zsk!leY>nC7%LA|M^4W3065o+>pg&h_MShZ{+6RH@hezw9Vu3Nq3h+XiF}|pzuUJp8 zUA{oP%!R&0bJG@xTq`%q-($b24yFw`f1&P;hAt}Qswm@_tL9QaRkP>U6_&6K);=8^ z&pj7N$2alRO7Tj0V|q&y1b?8n(bL~QFJ8+V(*f(byuDXRX>TQ!IO|e1B<_}&T;6wd zuAxUsuJwW9ZMvQHyyMF}gA*;EjYzabWj~QEZ^^S4{m=P`5)Q9$w7l$tQbM1MAGMCB zH>--9Di`G~)|Gj-lZS_~iyM?=2pC%k*WWyt*YN-ZwZ8An5Ni%O4og6 zj~3~oQRNdOHz9}_nTkQKmzYENFhyho)5yHK&lVTb)m^I#r>R^%0bsPpr@D|C%c~`bRVUP0ypMh zlL2!&$#2z|tu(W}Nkk(u#|cvT7yQteM?<8Cls=wDVt1Tqmq zp1NLwVeKlnm+DV$N8SdCGEoX3>I2ppe>_#^;|Hqqi}X5QBX_qtWu77}bG=$7UJ-Za z&77lAbQ||aSsn(eMU+~TA%H6%O%z9CcMz5=n{^iZLzb2-6WlCIOP-pNED}dwEapC^ z{UsY}B_vBko+{QE22oG^nFOzgad4Q~tWJDEWJqqgzylE(3*fyL z%}o-lM9UARKe9#@=rik7uQ18N!+BcI{{C39XS1HtJ-ZW#bRM^BAMi1&tn_5lyNv`- z0{URGTpZo50Cy3HN$cJeok4f+FJd7CFHiLDD@>CQ(7T(d!1T^WUXa~ys&|~#UMyYI z%h84^hr8_L;AT~?ST?mmZ{6Y?6UW@Al3uYw>SM5wEAQ8GWh`eO7SkIx_?P+UY;3K3 zrDV3^_Y+jxDOB5Eh1sh_rXK>$PIxjqBVN#rVt++p91b1NJ5E zoH97#7;;!fx=)Q>s(Jr>oWOFWFTDR5J?qR2B*ph%7a>ouuuLSsxM$27t8z_XM$>XM zFNl-Zl1W|gFeHYDP0uX(x=vas9XgG0@I;esTsa>y1CB}L94k2|n4CqD)58_Ac#zwp zxJt>XB6s9?RVhDJV)C~95jm5%_YG>sPn%R0@vddl!D1w^;HL*B z4wbpcT6hdW+cvl7nw|_7c68`R-n_UXv^qBsS`Fpx$_zdX1woKSX~qlb76eWsm-VI# zAlF1M7Qr^j1Jx}XvoqA+Hqdpsk$%n31)9En)SsI*ke8b8Y+xwr@?Q5xgzXT|pM~}g z3d-JlEXiuZLu3t&IFWF2+|ca`c@9)xJe-`uk3W%}p`8@s$^&Z=Hjg}%7S5_dCeE%1 z=fZe&Tz-W-GYbKrz>;PIT_gVWuqPuC{#e3H9sLjbaC`&Er+*ujyX{JTyIZm}noe^2 z$tl2k1DK54v@%s-Kp!vGl*J=HV}@V-j-!F2!;7VUCzuWI~55>dB4Zr~ZEf$0k{ z_^<=U_!kOM3&|Bwj)f3J%3H5KBg(W?;T083AtJmJYpw@^Xt+jBV77?C`J- z%SH{u=HV}lQ=&fzwJv%zPJRqi|1_BL_J^r31=EL*YM6cp8|e2 z3Z_GT8YaEOv#$~Y;#SL*Y_KYwM#SH>vi4M#NPHs@>A{U(M1H6x&A+L%PE>W9X}y|bDfH3RtsDwS zZPnY@Z%a*&K9qtYFCB_;Nhpe?@e;S-m4!vrv_=EQj$L<4>bXSMBM>c1K;#XMLTsz;h;Uaqj3=VrmhwgywbH0BHW_Zy~B7z)f+3Bv#;^N<^IYsdv6}7uKZn+ zriMPE1qoDxTxVW2`?GT+g~lgd1%g7BI+yX0UIizOS@K}z&mPF;1nIoY zGgtQ*GcJ(5z5>5k?5E<@%g2#Z-EcL$nf~x5QIiHKFzT(qnDz5(1Cc-`;Ee(znzTaK z2@?-(gpsfIm5eVDYOvpf9k;sF0PQO6u5}wz_RKkW#$y744XvY0Ai`tmW`D)DrvIlQB(>Z=j0EZBTNXOV(tkq_q zA}O0v$Lmr}U}s$*`}bdP*=AK3-<94m@Go$xv;01im!oh zk%v+0rG$TxjDHGGK8+{P1D3uKEP<@>yjFuNzVRH{lKGZd?RUE`!Tms1VMd^L&80Xx!>x%(c2%tu&g;3z3Vek{geuzZN=8eLuF}_mOM6ryr!h0}k8$CKyw+ zW^7hQuwCTDRNSo4*fWh{GB888e@P3KxZ$UfjS^vhTKqja-j(^g$@+byn<-g8eGepB zCcR={4(u)&n`LynyvP2Uf&49KDABNmz6=ycB@+mC$G?m>B{%aqQX{>~+a|OxB?2N( zepyn62xjJ%3<_n7H+Olr+>@+U=#*;BQW#<#EDHxTHRAbuo16Xj>2*40e}W9E_bIYx z!BvIP>$`h8>6_=JYILY(kEg%?nELL9mv8Uth8Ni^$v;k|kLTpd)iWSUVcC7KYQW=J zLTPa(5??%D<&X>N%2C&orZ;>cQBWMX{Ml*cv-T^0Lbvj_a!> z);2#Y-rw=$&?lH$KP%tUG(jJz`E%p{AA4T{-&B?TpEfPDY$vMF> z@j5B%E8=?iKTb)Jrpou?5{&vyOXFQVX$KE`SUWxy8qsK2S7H5u=JVo&zfkH!j4v>> zplkMeFLm``*)s+4+et*1&B zDd-TzV!~!S7z9VeSLMM6YCmsYgE=+cGb^4qOBkhfolD+ianjBA5i~8N$uLj4J$wIJ z^Ek|jBL99Gzh%Nq*@#kk^V?Ea&>QAP&0}b(Ty|BS8FlgI?Z`p-{7uE8ZGR5sBDeRA zlxr(00H!Yw&f#Cn3x~VTePAsOW~Kl0nAur|h$u(17g@qz8ICA~8Gxt2m1%Zyw&Iib z8Fw9b%`^-yr^ECt%wCTXE6F}-9CSLmPSD$QdfN^hucr9{!ZD_K2*tdN;zG}*Jz(Zk z@M3>Vwr2Zt+3Wr0Nrjk=-UvjPM#jVA-4YTKo(03TA^}1iP3Y(!w zedn-7FfGiO1^e}H;FI^z2WOR@1?}%wln? z4L!HC7)*`z7HUYbxXLb)0FnLzvy1v5={1=T@I6EBs*;~NoWck6U1kwR8R;{_H_hgVz%{<3UtE~gwnR{WLNYPm@c_mZg? zfa6k%3)T#H_mY|H4%Y9Lz#l4dxRV|33AkLDYI854@j8>&)CPM%8yvB11FRbN5~(L9 z`#9#QUXOhkdmH!?!|5Z1s`K#KKf4rLi^qFdYYkTr{Rr7zU)o*G~{^lmXo7dch+u@dsO>Big2ZG4C3qMBeC~UeYL~(@@{1>qrk7$W*7GA9Z_gT1a zksg%W%8m-5v26Hu%+uSE729^Kr(uC0A6XK)9pBug;bUoSOz{!du6E4O+mROAcC2Vu zJ9aM7@bTz9F~!F|+9(?}U8d;mNQrGbh6lAnUXz#Opi}%EY&fN1jj$-a)MTff^AOJO zVeo0GmN4WZ&y)Vtl?I{LSZdzJD=aj8f(u?+s@Je2SeAK}ZgLxME0R1_aTus!aD<2i zm=$^x`~E~W4yvGWQU)78u@RHh9V-+<%VF0n=}P1ga!$9@)Swut2v6=D-y$hrD~jYK zm1Gmy0IXF}4VV{NYs>Ys@=n5Ji{F(hndMSl+0sFHhTGg(zTsFQ&cJK{)q0Bb8QwOV zPDho#?$SH#fjR8lU&9zdUXQ~BFQ&{FAp{QYrh=eZ9GyQ%ddlG6sIKF{q2O~P5*=fw zr&n02zhh_$z~skx1x)V7g<+CrMhL~#&2$4yPLK-FB*G+4crs$LI=u=>G-S?~kx3h% z2&`MB9>~U!NlRtY;l+eZ?-IKI7WZ<20r(7EV1E<{Yj^s>!&Iz_)Bm6d6z&=$J&8NQ z=?LI-I1=KdRYK~0c&#CI3435j9apU(wS;EPrxH?W>Kyo_DJ})4w8xU|KX@j6FEQ?t z-6@GQGECe*0@EC{2cd>MhE>CYW=V)eQ>6iVRc{EaDsMM11<88MjBIm$#EXxx5inL1 zr%IdPq>m*N+-KQ5`DxPpNz_@Vr&~?MBElgf*pag8@MXNKo2{03fMRc~#l}DEJHTk% zfeAmba|!Ku1UxltNR2>Kj4X(rMu|#g%B;H_08k7naWQVnu&}Y;Y3JyqHn?2)T}jlh z>WHEk-COD8@r;`H=V$P_J8v86bI}yfZ1PqeNX`vw5$vm%n$9_AEh2RAq6dpUCh778 zm@g5Yq|4nTG=i;K9FbtJeGy2o*_U;$Z-t@PoCylOra|Hz!M(%_O0Cd(>$-UwsRrKB2B~zt)$j8_QZ--%cBJ%HMzKPxd5bh! z#lCMnHaBwLQuWs8F>>EJa(e55(~Z;$*|+}X(g;?*Fe1UqJ`E(8#nHLG6~@kZ=2C@T z^=^$`M?!B&6%qTEO0XrPJE^zW8azz?_s9`7kLJrWCBz%zogd8j?o|8B-R_v3VLt&ii~FHv%=LVtkw%poSw%~>EncA0j< zibY5Ym%Od60ldJa!Pmf^hQlPe7Kc0DOED4nGPe37w3OY}Pr3}D36y;kN%PIrZkMov z7JTAdQ*0uKASC??5v~ByR_q*a-U5#cPrt2WPjdBZj)%2xvejOaU~}iEybxE{NVI< zL+oIPpQgYtT(9BN+hPI3Bf)fNuF$Ylg&F|EJe3Yt2GGG$Z_hUpqQ6WC7@loWVq~cT zMVVg5$q}JY_6KE!=ciV1_;1nRSPm;LmT=6uUV-C}dL5HcM|h5+bz#;ExHK>u-vU;1 zcY}LJ$KQqFQJZC%+|Ll{Be0)+b+d+-zra=kUYsg@+uhH`8T;AIa9)RJ^_NnOUc-f` zArLE;WdYqxiB}Plwr#^ORIlNK%2>ki*wqRQjla>bbUSM3JPZ#9(_xxkL#hnJZ7Ll) z(9Z%WvHBVXiXU;Ffg)31gtZk8ifDML6%2pW%b$eu!AmiNA?Z2=hV%70_TCyE2CXky zmZ{xK?{gh^7X~K|%WG}uv3q7~IJcs`z)6hsSg$DxM$XV{_y`tRAXRzGYXKdsLohVW z(qLGG8e#{-xTy*Z*XuQ$6cZR)%m14+7?#7X0SqzIW7Drxp!lO+$0XDdo)QuCn8Hu3 z;5c8eV=rv3Si*s`Rzec+{qGoM3<^!hMC5Csc=%m~AHcNdQ_QpKD8Chrs_%}xO#$n7T zF+lt4HEf<845+0(Kpcj*H`;>Xg&Q?g&Or^agJFcqY!mbvy2S*BR=WC|=^6}6XE7LJ zrmH8ZEGO!9*ic7cmkFdqG`!RbhD^Qu&u@w?43AD$xT(2J!&4RN2nU1KZ!Gny7&O=$ z?SIz_C%PPQl(o^lQuXF;of%8k{4*w%Sod0XgNBe>P(vU>__Dz6)w+L00mCG{hMqBn zVIvj^7!2QDufg#2j99?XiZvh5Yp|mRh=`bVukT9~C`Rgad{G_&iYR!g6%3n7H7wPj z{8+*;7E7Uw5;OHWPKyMC*1!0&*8Qsk@50~&GaGDLie{Jsrlpoo({OU@jSMH19kw&Z z8tsgWiWH1oq1RwR4T1ejG#YvF3&WQVrC+t0hAO>A!}%bzw>>5mIXVPP_o|a0^g;S+yh7E(S8FxY zUmuJSKA>a4x1l32{7J82GHQq&41Mhi4CC|~zAKFd46S7T_Nz23J%}3GCG!d2fz!PK zl(^s$1q!EL$EZk9L_vpEF!a;Qe|=hPVOXL{>1VIh@N^UE2$T^)+@$p{Uhhn?eisFv zTJ^czQ#BZ#ye^jfT4_@#U8mPjfEvPL$v7Y0emr#u{wX^A*Txe5vO*Ppy@mqRaBSgk z)lYU`p~HVoZ2QTCixmvi>UD6aBP<3w(!e{1+B>+oM4TllY3wN9;G2ZuUh0)G^+ zw(9F9y&WsAiY2e#V^t{q{}9|;SzMTc8p1-_G5wmS*YN$7v4r6TPJ!XQi5d(Kp@z=G z(2D2g>ouGs!w~(rFoJ&lFkgY9pI*o7QzJkT1uwON;l;}}Ed2)M$I?bPP1PJFdL7A; zVCan82$i=LPWmdT>}lUxQn(gxt4aC4P0%oM^%RDYh0%@*;cF0Ry>iN^kUmJm5xa1f6`}3Qzi&y3O%pnhwQ|iHzb=FDqbsLtqb7 zM=kV0`oFOXo>mrXc$$YAg6JKK@#8gm4gF&Z!<*w27=CbQFuZViEMRD*8y?kbn1CAE zr5gy}fjuvP5{GgXD8}e@?3oY&iYR!g6%23Tz#GMq@u2)z!oaEWyh5*IcqACK{uZP0 zV-z@PWj{7tqTysgNi6%<`12I1UZ>YE5H%cu{`HexgW;viVhKa#`3ek==`~D54V{Of z6>sL~HSD`Iw!HaxjsnG7yoM(a>Ie%(6ui_52B%*Bs2IcWi7KsA^g7lTM}Xmo_pgDP z{m9nEA>-n`t$NsX<29@gM2jJHqSjz>ZD}z2Abl}fq3cfl{A^s)XvAPF6xU%l01n}c zcb<#iIOj+ln3s%09w}EE*RU(TT-a5{Rec(_{2Rh}%@$UMZQNG9kUGQ~r$0-5Toe5n zVvl1#1;y}h`;X;WnT*2#yxw;ynw!|iQlIRl*u>z=IBZaetFyKckG8tE)Z>Hl-gqw> z(i`u2dQj5;-0u~5!|h#pP~N+;&6UJ^mQYN8!`>F&yb8>(AHr}UJXVa)m%^#X_&g-& z`&sO}yzkz_O$7%bR3+8N(N>l9HiTib*w*pFa(ZV{&lEri$|_}Zjl;wF9p;8&cRtUF z`8hQWcP17hY~FePb%WD_6zS>%$YIr-*wzdtC6L>qp zZ|$~wPxMplusqAyW<;rI@jH9_&7zrZy8D}R58aXITPt)&=D>&x z2&Trv0U!JXF3Bf=^9>9lo*eol*MO(r(WB_YO{buQj)n?NQKTXBtHnwg} zWBAr6F zQY{dIN1R+W*6&Q>{7r?q+ZVmU#@(1pLV+`7LjkefQ>=AAP%YW-w^c_vt7C{-iIu6Cm3#RyhnqFcxQLwrc%wJHnkqjPj*Xaf^;EAVkDb zEb3rujObrG=)kD%yt@d@Py~V$ff0&85FGL7F3NHjff~A~1tUhaKP|GXtOz zW-_**P>t>aMv$qvLBwFQ1Esk|I_*H(k%`l*+y#(0n60IAd5&n^6zW@G#~uiD>_I?N z&~%D+<8X^r)G%vs!_++rM+YyBBaS9s0uUh;9@VOzdiG4<0i2FK1aR&cF@K(EC@MAmp1#)2&x|I zH8A)n##$R!t$i%P2I+zSnoYH&LKGc=*J3=GC^(Y!bSy~o!e42riEI$uvSlb;kMN?K zRFd@cX%z183A)GCa7I1!oSNpIDAJTgk>;W((hQCw&4IMYZGJO~H2;YrjTl9m8BwGu zh$79YQKb3t)X1>E8%3HIqDXUp6lrEhk!E}pX@*9T#y=!7?C(dBrap=^4@HsYmMGE` zMUm#rDAJgtNVD^l$oP3BiZs7(od)N7*4dN1dsZ<2gx_=vC?rgs&opXc(ONA z(WzAZz3C086zAVnoJ{EcJA0%M=Os#r^Bg6_InHIY)@_znphXnz{9d`S2+928hll+R z&!5r>97Om{#es8}iunv-{4qXH_U0_1<|cd7mFvmMHBGtpplbkLN)aj;cq!YY;iZJV zG2o@FzBOK^+}jzv-1MD_mxl%i;AQIIsPOVvv`EGaYb@~c*#|OS2%HdjNvmoD?%5mL zhI{t1)^JY~I|KLQ8b8d-2!Olvq^RJ23M~qR+xLE0xKF+_7;dE>RBq7VZq*MeUuq5a zjJnRieP}OoQ_qlufdO#O926DY&!I(u{ouRz!uEso#cja7aDCfwFMP2z+_$(p1NSJ6 z&&}xpa95;81@{WHC=l)+cZ7v|%9WN(&o&7f;Qe^=?hu5CP2zR(&EGip172TsF7 zb|Bx+92gZIo0 znq6l290~{HUCF|77KzyT4Lk7NUCU-*YUP6`Ypal&$EjKF2PhHFk{SbV#Z8-jedSdU z+E&nSymBtKpPdNWv3E2VE3zrJd0mb1l{6X(JT!1}s)I@Q^~s+MJK^5o;7uAYalPBi6RHP3=zx~uXn z%N$7`3!hkg@!9J+Pr^F$I=o6qZj=&0OF8}KcBa2DTzV=!iAtBto*OQ0h_>XnGyU^d zg@HAwEngxn+=8!~*UM5L9x-y;m$ftfu(0X1#z2P1kA(l znWf_p0J1x>)R}Ah)@`W+lQw~?!Mn6L#|&z~uR4CWc|~7i{_M074t%;?y^;?(Jx978 z=lN?UWHCu{q^^vf>?|&JetcTh?FkjU_-!G1H0`rIyo!?@-V3w0Rd2J@JPB-8W!^ZP%nN7|-`Wa0>8MfRem(78hs~-~YEOK6hgB2!czUj=vUpD^i zxoBUbc%HeOMCR=<6%& z>f-E$61s8tN%Xy77uP#-8+~JJ<5szbk6Y(D!-gfX60Q!;)V-utZ}|NLRNeNvD-!6% zb9gbX$=3zDZxYAB?Y}|pNjNSHCuOt6ZSSXMX#scPraT_2aM3`*CWaC z+Uw7wUZ06xf2Q2)E%mJ1|F}_Q153T!@4a|)f)8}*|Lfp`@74$N!6$eVnGZaPHbQTID)fxfUzeLgjjaay?hMj#92?DA!YzYk%e1lUG~3+gk%Hr`b!&KW4m=c?j+=HJ){;8YhR;SF`!-BF4%+bk z$YYO(DaSz@-h>lLYI#Iw*b^iF|&p3!uv8*2QMsyuj@O0fz7eKBG0i^(?C>s15^E@vh9f`p{xXdBTZfeLNoFHf>dE) zhRJd-!j78i3OI4%$dPHvY*zt?*{%$Aw#$>S!Msjvv@Gi}ywRKBE*dEuOtMrXa-OMX zy`}mfx+Zp7TdLnxZ~h?PIA_uZ?5pS}gMJ1G2jeU?klpCO8>QdbOeXId>ehHze-0j& zKu}n>Fk#`%GoV1%T<6hNxlmxrSE+F z#-a40tTbOHe&c;-2xVh{xh(wUnbsCqP40y`YYQfsd_Aa@Nohn1=M=Or6>~O499i^_ zAIAJVL-9zHvx~5BWLMX)K;qQ=21&$%3}qNPM;?YwYm%0lHDX_%jCuBE-v{crR6CoG zony^bUw35h31F33dhVgf4OTh%9i3HjF#p4kot^4@7i{71TyXOEM3Y~j>1>hx3N237hWAwFxSytP?pN4M_bW8h&AHElcy$+~x@VWT zXOFCdLkoxLhU!NtGf**hc82BN`|)64wiL`_6mXb9bL7a}Cce(N4R3mkm6?rD{M1GQ z!BUL_{7hoONTCJ%hbW+!Mzd6(jH@7l{|mf+_EL zlPZ+u*FT9QA?25cXv^eRUz$1zLosKTzUrP(%lO0Ad(N`9VWYo zcbM#=8z#HJEHjB?t_tFv)2Lox>r3JtRdyv3@3fX(Ct+#%_?KOiPG)p33Sf-CDvS{% zukL6=UbUd(DY4C-OK3x0(Lg7AKe={{eU8+cD^+ULv)zttq{#k5_#Jy|s`Jypb)o8q z@ImH-+4+ekcV&sYa-_G2Qj=V%gj_*<(r`>5ZVs))NE4D0`;Iy$DKW@O`Q}mi#yKOD z5`)Rb7~x7pW%iu7qr;RAfWrNsI-q4q$=5 zD=aX!a-^cYGK08bUMp^ZP_W7i6Vex$WKdY*|HU(wc-JT+WG5h1hmisUUp%Go#Z!9T z8<3ayLe^S0fiGmORgOcNe+F9X07OKLwbpcEI8|#2XW^lh)_Q86s4ux4&|1n|zh-j_D~JQpm3Kfo4b=<#z#q@J zP*zh5U}k?3B%X9NHRpokNKG{pvw#PgblNrw53SXd|C^tKni>T!B@nt(YHC7@#va`c z$IE85GVaGrK?UaI=BGL^_rNg=CP-QJ(=)$*NM?tA%#;qHtXiNPWI44mHxaXV^G;I~ z)>6_MLP2Ug(^&EnLP0*pUFBTnTDG2UvD6pj8?9kkZ5ga#HuQQC1(DY3N;Q$zq7^)L z^Edg%Ih(ZBb@XExdJ&tF`I3o9`rd)c!+FFP-SAK@EX+5Iy=Hw4#vsXIR7R@W@{+wo zn(J*`<&3iGT4PMjRF}p5G_^?`f_Wdu6G6%4u61S!Ekm7W2rZe;lZ2L0PK(fzWvRJK zPM+jjM452L6=|{xTXJQVZx+*J(?e*o5=22P$j6urZjx+QmcIKHro6mQYivc5^Hg4Z zh}T&SCn54sB3Jb5M|n_8u6SUjW{4c zBhs=WHg;Ma>MRq7q224>Ydm=OkBkSmlAmbnt+bQ4$@1)QkaE0NFQ5Q9)+t)qxDHOePqH zcgkD>G2}KyQ7uLZ>hKN+wlNbu9aos)mg+9*%_?FLSmz7r=awK7Jr}8{$iBqmdy$eJ z_GjYZGdwU>#T$8i*L62od=p+vXRMGl-61Nv)%fmTc#H9!9pk$~jPL%Ru^=|Udr8WB zQ7+(BLjl?VuOh?%Z>rn}Ry#469qK%d`T)kKqnzE*2V5bBdA8)Ttcb(BZGD)S92d;I zQ&r|AWLyjUF{SxF4fS$i`6^2De=Uq7Lk;!7$W!G71nyey8VvX}o{bLihl&f z6TfyPrVC}{`-s47R$|zB0dn&nL{m*}K8Gir&BDk{NX>*C!=UdeWF)R-Qj<7}Nlm)> z8&M6+G@E{|3*xA|sCMA(uSTiK6Gw%RnqzkT;-%)k>Nch3Rl$sRvC4QsqH<_tQTaD? zUWIe6-mi*Eo$qEsUsqtmt{VOzNMBFabDx6T#CNG}@!d}#Fyp&(dyPpnd2UhF^}mSl z8rzYll*)FQYXXH^mNIvxYwL3i6QwjgOWHb#>LG3IBX*-3#&^)sA+(v~`{YzJrYtT3dgJ52USWtPn|CKSzzzwDl8sBB*oMI7gG9${;~?3JIz{ zOj*0-)JfhdC7bVdrmvmJv$K4+Fm-)X2z5Q#Qa@4A)*`86Uc^?nv0REP#hL?)DiLbf zmq`kF817X4&7w9aTdomU80(+%1EMN~*4Y=S+^3IXgLQWA07j%aST>3^M7)p&eXTL# zw(nF%)a0Lm58Kk&+xKV%Jct59>Fh_&`Pu30vlNvXL!JGuQD>huJfzNk^9xmHyD==K z(RGJ)_FtgaseogvvoGlph0Z?r6{F6+2LeP9jn;h~(Afr_WgZAYvNJ<3{*%2jzjkj+ zc78)Ljdb?8bI&r$&ZkLUX|i(}o}!VRq_MLM8k^<=Nn36EZcD5 z>Na)>Ej2mg)j-!VcuyNJjDO;TY)i)vajqW13aTyMWyW{m2in`gUYMNUV6(@uN=LD+ zjelw*dm0eM5QMduBk|2xH{A=@=a>k8N9tO~g<7ROS+HYWRGi$UG9Rp{*dSxWo+k_aU(%sZwPr1o49!y7CF9|N z4&mXS^F!g`UcJIXV}(Y%zbMy-Rcbc2uwk`K8(;LUqWOC^zDO5}GW@M?7wH(dC?y%I`^Od%WCH)$A_1M2E z&Kbz{C#x*~XQyFCp$h|DW_+2+!{PZVdG{sbc$3r7_AGE<)oI3Q;I#17E4b_cT+NkX z;X-5W`Pe2Smn?gGspH_jj8D68ertv=gA;gg>S^>qtZEl)|yZ^I)3D?cO4n zeQ|7LoUxzDcu7iRI6q?LV*#INdIZYvKtDaTB5XgEd9efKHs5k&%Dp9gxksWu{#?pb zbp@A1I%wGh4rDW}FkyC*7wLa_O^*`ibX75F5pn~ygV7ut3f>6ljBpO+Y18Z5H9Xz@Q81q1 zBZj$N!qQfoWf|9sbon(stj?KXbXGdkSwmXsEL*Lnvix9ze0b>+{VoCh@@4v+tGDax z4-M_gV7`sVu3e8~on0~euQ;CCm8iFCu(5!yGo5quW;t)jo8vq$&*>bN zH_tgB@AhZm=uc|}&0Jm=T)aEwdn~n>yj$gaYL4O@$E8-~%yIRFPC$AX)(FwVrFy9r zJkx=Y%xMYtFj`IC2PuI_d1ks&9yi*BO)mNI_{es)p*~+;-g)fP80}Mcwb;`}PDN0M!7SoKzzL9*T~Pvt_n3<% z)KZ+#2)OUt2JW+g7DdzSe&+=4^Ns-SpY7KM?(4DnLcx8-+b3{;L~!4~4czB$)^Pv7 zH&5XHh~R!e8@PWBzIO`W&);?e_eTQvtKm2uQr8x2)NudPmJ_(|DDKHNQth03g6y2t zspM_7AYV0gW*AJJ)$29%-|+@RKka6cO&xi?4fYPy0>=3;c>c?7VDUVSo0n(eCwG-| zkBIs4@~^3zzM#Yx^-^PB^P7H0oo1A}J&z=etKnf>#ze9!Ekw{TCkO!?RpS3h28 zAcI6Rfa>Lh%mAW_@r@Zke-4pJwSc^L$qAVev;aAn5mbR2W(3V3Bgj&ZLzr8#U8xAR z6E7P=2}qzBLizZ5p8Qp|h3KoMkJBkG4pOAeJIR7*IN{A9W)Fp<(e6QQ(1TWFt8R$kV32+lUZ4kd0_vl9d%{H-F`s3q(vxs5NOpQ)$^u}}5Z@5yB4&^+Im z%934s*8LW=zQbf%G*~Qv7c-JJJJqBBb+eR5>RQ%D+?BWaKP(dI`-zRHiEp4uUP{N^{E;KvQ6t@v@a z%#W!$KmLMb;Xm$)iu3NrVoO9>cooW$Id9JW5jgK`Bn``Xa=ehE&3n~WmG{1cwcG(c zY{q;{hxPD?X{~tgeg|G9O3O2TiF)|LyQAX0fzj~Zw>2v7t@A|Sy=Ejm4tej@3sv6R zg&B_y@ZP{#o#4H?Yg_T&3q_3grs}-+i`T=?2vKol%_C7r!%~!`=;3=KaOAm28kQrE zg&t17K;_5%m?iB1Kb}3a6a0Ai>Q?-Crp%B1bbkB=>EY4rM7W4!lI|EJM%2S!;&e4d z57$NDyjPGkEa!#L!)&878xvb`Qr(Jcm6NM%={dA?v2rRKSMKoK?6r9oKScxOt1Z=- zr#0oSu~fs&fQ^%}n;hfhP3#HdWOD7Gaq@MD3KM3ST$rn}HtfQEbjzMj=!oE zOOCk&uM(xl&(zfO9BoiSXL+z;G95^m?We)=7&c6{d2UIg!Qp6asNBsneiVJr@(pIA z<>i^chi{KMT26-7{hV|RH~p6SqI_-z9XepCZ!AaDAg-n{)X*7UxmUALNI7B^AdO)Z zeKrc4zKDj=<++=k$>q7PhZsiVY!S4Fr7{ks2LukKxw^NdUF>ADHu;vu$TXH74};Li zw}X_ibZ>bq&DFg{Bjm?Xn9R6ht-`n`>w{@6?c0?0R-|Q|i^Cy#oH%&v96EvQnsh6U z8_cnbvvIA&IU}FZ*qWl2XmKh7Y|21h-gDK_vQ@cWV0$l>?j6p_PQ z*$ILC-E->EY=k(W~M?9nm)zNQoW{aQ&Y{%)}`{yszJ?_b*(@%f6VIN1l;8(CK0 zh{9w}?tW(^P9BT2MouoiE+Qvy+C+Pp*k-FS-r|SRaDk((bR89Km3u- zwD%U|mac7|IpGZM)!tWwCIK*Bi1;H01x766~24jz$u?@2+RmFphm`h{}6 zL%F(?>u;3njdT?bCf+&PnOd_R(VY?G@-+GxKtJ7a5Rhy5a#ro~1Zu?cc=n5l1!VJf zaehY0KEml<&K%X>NS}5sNx_CjE#g<4*N_Ct0T8kS8vU7w5O| zHGXF{FV0Kiadh7Zo5qbzrPxLV=H+}eOG7qt&+Q-grZ#g2II!phxhbc zwll%hTu{;lsN>y5JRM8KP85PPfp-bp`Kg_6MsdzlS$K0^C{3XQ)e!e)htxNpDuJJo z68Tu$5;~D0Smy*ct{{&^t?4dJi`2NpLB_@vu*Oll=mcvPXA?n;rZsgGHy=-lgqz1c ziV8O!q)lnW`cI0kel`AgDB8^9I!2qOD}$(dH4fE^=O29`ea9HLeKy1hz398zeN(iguFW;>_sjIM&|(R&mPI8@7%wAaD(viL7}0i zo6GU_ai!4v@$kmPzGCw7G$A?fSui#6T0HRL8+_Ro>Fv{CQ!h6`W5Y4@G+uZuiBz~N zkrNwy-FWdH6CU}kQsNVc73wZPK;?TV0bhfYt8LAQn8G9013L*x$;o5gs}$atY2R8> ztS#SKdgIZRSS+sdb#sXKQ)%|CUS7OkE@izd+2Q#;)$Z%*5PvU!-sLcF<%NSLCvvd* z?OP?3NRdRSbSq!8o)^uQm5A74vAs*La>RlhT5oeZ(ldCEBfZptyR>3)gY?)LI68*> zHxrk}Ir~yf1Q$+_n2559czc^(B9bj>Ayq6U+3MnaN!+*w zSi1gIb@4#fw{`Kjg{B?tOCDLcbf^AhA1`j;{Tr&rWjR}HIfy-qhq^c<=KYPeEGw$u zt8zxUx+A8x6}!#}k!TiQ7eCUTcgDPa{EE|1E&cJ9CmJgg))sJ;ys#c;5H?m}YnAJO zyQoA~z2tCOLW^O_nBAGqOahfbq#NN3thoI41)5dirX2=Y^CNfx=woaITm-UFfy_n< zj=DA%$ZP~Y@GJ~qqx(+v`&TMll%{b}g19yu6TvyRlsHM;XkaGdAL);*uUo1w_LE_( z@}z@`GxCIyi|$vsNM$mI87FTM7x7}f%*9`~n~Rti@mgup&@f!|E*6xICoU@D>*5qH zdfC85t5hyp2QF%;%8Mf|D$uwH23&>8N-{6SR?ht9lwY)*8GTw94tZ(8@x>vXmou^D zjR}K)(Y!I>)G)m9hk3s=-jI(2G#Dw;9c8b2kHPzU~@P=DS-gGc#PbTL@ z1Q+1SxuI*z4+M;i2DCGI*F2a?9pOji*j$>&6>j3mvNqK%r) z3OQ;TfVUVmaj)s@#t=p9)@)=qjG!31X=rQ)yR|ut8s8_TD(v$yvI;x*C=8>*aho!X z>Ny|`w>@xk0Jk-}jt6ej^w=Oia4coywlD)KFyDL{RpGQqWqf`VWLA92FIr|b_YK2E zm&^&^qP52b7iqdKMl$Ot@X*Shzi1v3dWYemF0+0GJcN0|5PHut;n?WC%eu!@@4a}| zai#bATf%VLqcZ}y?S$Uzu&v%L`4=s--slmAi!Q%0fQx=%dM|sM_evP|n{Z9Uas^$# zSFV4BAx_W5X8C#nR+#A7t6bfQ%6GU_(EB~g)zejeo&diU*`I@XQqZC^*3}e$4afOd ze!xlx7I}&SSGlmjtJse$N)RJVkVbIS)^eZ8Qjz-uK62P>PTz1=cuUI+idf zRdG#&gBH=)@|jqaRyU!At4)FT2SCG;6Ipf|K4S(o7 zc-8I4!U|e`8wEkhDu=g9mL!}*f|1R{{T-FGUJb&o7dVnEoXU@_rzS>*(E;ZV1<3t} z8Ia=`kYiXWqKpnDK_LuNH0Un1TFoY}{JrJUoc`jA-@7k4KtT%HcMe zN1?%!#!_F=B0V`;21SuE5C~^G#G)i7C#OAb)07o@Px z-Axfn9OwZ_K(V{x{kCSyMeFH~_i=i|sq4S&HR$pFuil zV&Zn2sC(Uf3{fV_7$ztIM0Ooq02mMq8U{la!4m+;avUHIK>9O4ES7r8Ba7H_Itxn` zKsp%A%OSKJlIM?D$Bu;ab6PbwGR}L-P3;Mr-7D-uV4d;f(>KsC!$g zBlE^;h@p<~#-ILoOn9SztL8?=`M&RBfOGO9LU=gNQ_{`*EcH0Uvz6vO4bNj>4j7+T z!>}?wPl7>ZeCFY=Y<%91Saincnb^)@e7*+TmyFN5ErHLAuvO3ay#m`%Wxs6h74khM zZzy|SVcj~LxUJAr@Duqf-^l!x9prvIk$%S0&jsYKe2ravlNVRP*3DA5*QUaOB?dCP0~FqAAw-zlL!uf&H0 z%QC(Nk}(S|TdX=~3Iy7M?)-rroUqPna~EW>vtW{yX=h(=p)BhFbNL!ukh9oKUR^co zT?u%*vrt@R&wZ0zq_)!0qDe3#ecj~yXCRqTNu$&io>Zp@3rkFJc!C#I)z~bYO5-Q% z6mETSS$^hI0#gm=nJROhdi=XdY`rE;+$B@%W$=`VtJ`by+>l`N%soUjo6Tr8iObyV zyqbP4qn|=9ceCpTyZ93?Oi#Ch*3I_JwU+7^k)7QjE(T5`0qx3o#L~V%%D!2n-R??8 zyQ@GeUi`tg@-;@gdb@bo_dHSVpH%*nM7h8C9?tgr9tV)5M6AFiaKKfsb#|3RxHtJ` zN}r#O%k`lW%NUK)>SywEcA`a=+r!=zd9NDxwn z&fm{u=#GQON`{Vym9G-pH(Vw5@hwBU&d})GFz46VM@M4Nqmpe)hDx^MM260|QKL`V z>|ZB+GB7|=>9gs~_S5H>$_Qxf(9r_a_ z;CQLPgajdF=#BqlGISQq%+NA)(HD9NW$2v<)S}Qi=Z0Tr zA9Y~Jp!U(d*qA9#!9*`ZsgD}s5-9Vc2l==wG5^Tn4~sv4|CT0>T>~WxLh>s3URTQS zT}!h|m*ufpIhu+a>AVo(9?qw!O&0T^7u{uG`qVir`+eW(udtc4!LP7c(0(+RW}yV# zXf7?$hw0V#c)=Iv9Kn7E<2N2tRk`oEYWTA88N9oTufI4x1D9Tw`tcd%xJj(LETgv9 z+VNH#IIe{iP#2FFY-|Z5`+b_9%b^!o7(OWV!njdc)ZPLJgx6@&EY8(ac#S5_aCY!= zyp^8W$KdWG4Mf~m1@0}=;!VDFMv%{T<^g0D*WwfcrZ@d`Wf3lXrvsu;Py^sNXCk0> zZIdxhP>*F$pTVH+OHkj#i`b~jmt6+XlYHsyNxUor_o?jOQhynckVN|>k&ZBtWWc-6ed{M`a8_LTGofz;xm&SIf$e?7dsJxn@YJ6PY$17fn4e5@ z<_H(YW|;3)c=t2-MJsUsY058;cOQ@x`u>P`hYFl8UUB@>SDb*QP&u{-&cWN&)jc6g zc4tWPezMc=-xTKi^`C_QUiew~?`u8}|Gn_b@ZX0^;lDrS4gY<(FZ}nOd%}PBd>#IK z&%NQl@7ss(EQp(OJRxGZxhLd!Z~7?o`+Y=@5a~Z8I)r$CBjG>9`#psB5bt*ozC*mf zp70#vy_fJC;{Dr%*AVZoC47c>{{`VO#QUv;zo7A$;*iJU#EG}&2#5XTna6yM=eI1! zg8!YodjXv8_EN~JE$}Gd%ie*%>?sJ@Mp5Dr9`1<;6A%%mka+`3``wU41Mv|b78^e- zsf~xP1H#2=@jbF=MZR@xHqQbq+M_w|O0(Ehh11!}HMdl(%wbI<-#fHe4i<+tN@t^i zXdPk(U1V}~LwLb1)CPA~UbLXy)L>~6wGbmeCBcpd3<;$7?};}V8c6MxTNyUKUPQb- znj8Xev#DkkZ!3Ww6>k?CKiJWtsPHDR7^2b9F)37g-oM|_p0f&U$PwUiqs5%7P&?$U=#VYcA#Xv0fZCx5>d2S9i#|61b+UeGDcnr%`O=LtCVUPg zadk(JkUWIR0^WgePleg*Yy68xaaB1c=b03Uh_ft12rqcO#;q__Vdrv9j}oWZ?r*?< z&fjRuePhv^Hf7s#7R`r*=^r}%M|>?$IX*=bAxZ#9J=PO*9}4WxN)S8h(D%VN2>_;R z{q^!xPf`u*}9`u(%->-V+~_51BG+|@F-e5~J3|5U$kfl;oe+53fhpW=0H z)9;hC_M~`CT6$Ck!V)UZi2*#X^P?um0w5coSQkd`R>Q#V4~v z;v`;E<#U8RSEhv?w=uMA z)8B}A6CGNj=9M<_GVz8`ymGYh^0>Uq@e^qi!ow?gsVQK1d0Br`i8s;WMK%mMI+TK@ zQN~a_*Ur3)s!!&$EvGh43x(&SDyK@|hspZnj64m=Y55Gvll3>f@TLtU=X69)g~3TZ z{7{auoEkY?!O29NQ=l+j502r4)8G7HjWtf%?2k<$adNc#VbF@b0uqI(PFr$fnycyl0Ck3Px#+qFHq{({9UUK8((bp+8#n9K& zjndP7c>x15bh<=6-O5nua)favm};f2-ZpnUnOCr`ScN%FWLrs34rTneTY448izs+@ z$pyVf?TM6s@)=70MSA{oL*?&-`kB%WYCr;fqR|0%OSyUh|I1?#!v{yNzpeb6WLdjg z`Vr@)C;+^uf2$dd0R1A>f-Xwuv9nWNj_56*o)x z8kg=GA{?B4#~{UGT4%YeQ8uX=L9uC3I&iLrgJ*LY4!`U?4sM?sH4X;$=pYUTppF>f z;5(e7qwv8q<3i%#BZUtv^>O7w3s~WGjYuZf4KgbraAX!fCeP!I>~Oz~3LGQ>c3?Ja z+x*bIdn8<38JZtPguw;GF!&)2{E%KQM8ObWW^2g!^H>=fkTXF%(Q$m-37;NxkErnR zV{!-aaS|$Ni#OVfk9TnBlENJK=VvCa*>}F`^%a+h6 z-Qih6qRippFS*pygU8?c;@n2(2#%)s^V95ZYZCbhsxFe`F^GNWf8vGwv?ROGkm}nS zsKW?%#F@>vfKJezsH$oO7|%_mm%*;7Q9KB&#~3nIn3#hlWV`NV^4DRYmNt7X|p;n#ML$V zGT59B$4*<5&cwb-DE}{|3-y8zjDeN+4r8b+?8A?`Z9kr+we3X|0kg1kZCiM<(zd_p z1z#PuZFd>)wjcm+)AS-z<+fcNfHzCMJ>Q79{xagUNsiW-dmU#|Fw7WVvIZUNmg>1x9M zc5xjg{6tCk2@>{b+|OMQ2f@B}qz3q%qZ#0;P^GqE8FWvDeD2^E%JoC#`nGa?UAeAN zuK!i8FDci5Dc2{J>*LDx5xUBH7^|pual5S!ff2UaM7|%_=h+$`dtahW?#JDOil-CAHh5+E5}m5s*O?x^I~t#Qol}GjzAJC{C+Il z<~F(#h3jGy=}F9*fTIkYbfgxMy_K8f$v#KLR`EWdssaih!+#Ly_;y_vsbh8l&TicA%i=d zH;Y^e43QbUcq^anN4UDhsJs{1@MD-|PbVv7am(4+gr3{tr7q7gud#D%WXKgFJuPde zI6Or-gGrnTp+%*qvHa2y3Az25&KO1S2} z)p@x7@vrFNTGdW^{Wg$Z=4j!1?9I-@wfULo;o60$QtjcxmyzDUhnb&;qZi^5T``44 zB!b&0qGj%|<=i^SQu8oO5GD#O_qlOP;*b*$%MtP%=_L@XPKtB$jv5-Td?{Aj%Or@- zDZD6<9E04nrI$brK?{)*LOdn*-Z8c!$q60v5G7(-WbO)s7U{wSquF1O!JEJLH}m2e z5}0>KB`{w?I4+WqQwjPxI9E+royA=@?K)NV#|YNj|F7e)-n~3Vu&yWFa^zvX?;jn9 z^|AUG!TKTM+D3qt6kr;8xo2hX0TWwuZUPhkR%haS6eboAsXS~gch7-h)miw7CkfOo zG7B@^%~%*o6#Jz&N=^=oer;#^=0Xis#MW5(}>)7QQDc z7N&iN1S&7HFuIr=N{UvQwRpE$EU`|(`@-Lk19&$mq&*7oHa&41!22HNbi>mZdspWu zhq6E}b8P1Zv&eTi#BX_VpG_=*WxorS{i--;AJwvVB^AcSc^_q5z9_xaUMBJ8T{iI| z;Y<^s`?k}?i(R<70lGBCkf4h^BZxq%l4NIjP6BZ6eVSv1q|;9TLN5iV>n?Gh^I z0AyPM*8r2WBxNL=l`-z0-2a^ydyU1bg(01+`5K{5= z*ZFr6H(75wMlBBU6FXW@O}|hW)>hMtS<}Z;(;aHl9cX%y=j8JoVv{tMH9sMpk4v?S z3F(eF0w5t>f#7T$qst($_e;Q3>Rr*J!-({L~G9=!8Vv4wki zPcdRJ7B=9%MSAmhhj9k%>e6d5j}={8&ehxU^=UC}%lJpy+m@#WYk0XKwr$C6cUw-; z+wx9YO!4x~!|lb(gC}Wtxg@r2`K;Y-Nz~i2^3<5(Wy(VuUP8zRX0!1E4k&{}^4i6{ z2v7SJyR6dx z!=25Y<2w)Gw@+Hzl_eS>)#7cl>2#`#8-lslRB^knpEQ@f9YAk0@sZ>CUE9Bc#*uTEX)BCu>;l!QL_~uff~UScayfWAg~r&=hPu2|Eo# zj>%ILj~E(LkT1r?IZWmu;t#V6=EXhP5sY?1R~K_mgyQ%bA|eTdKb!)}RSc;V(<^_K^2e zyx{gX$2p7n8ow(WDp1S4qjzv_Y*k@iz*=rZ7Gf_-`P^n#Ma}#3EBM@YJ+FCAhXOfeU}9C%xk+k$1~em$su{>Ua!J4 zUm-qUFwcBu38s2`c;?AgxsM*t3=gV1BG0t+3Ivrh1n~?JmuB=I+#>S~Q9mTl$ZM7e zxdVCO%p;L-KG~DtYV>N>Z)Hwroz@3Ei+*8VYi3#{O&Z-tm3O}G(i!Z5iMfvkGW|~S zOo>k(NtP{Lsq$r;vTRLnjV~8d`H*GLv@OdPE)L|&GqJXDgk;(3T?${m0S~iazAWk) zNGwbBfnZtoW!ti>XRF-d`LYtS%t^BB7Fm{^M#8ELH0uH-HOvP$9z~d4g6HXr5BtVb zf(NzN@E&6#SpF*A*CVj;p;$8$Y11!O<=HhMas=_FD$l-uJqGfu;#HMz+LUJ=9C4uN zz>E90!8f`*D_9iBH{W2+zB7CiMxO2cO5vN&(pvLP-|m5^Zy(>h-7PrxQRSN}f~t?q zH|ZgA1o4e3)Bd|Lf=pw2P@buUmXAuZt$09|Y~}7;vgFdFsSu#DB?rk?#2%PrOXw?0 zHa67+@kWzNt8liN@gJYN)p9ppVk9sJQ^wl#1hxxUi^Y9pPC;=?WK5M{EOMzQJq`w2 zdAa4(YGfioz0)rK&yoApyfZAP+UR++zyH}5Y-Xg>G&XbocWN4V%Vi&7`GU_qaNEZY zaSe`YK(BXZ@|gV@#%J!doN6^``S(f!P*Y+^irtltXoE?X7xOVZGhwQojdB1gt ztQlyNHu<7Wxv;4-S|8_gH#pNsF${KOZm=wi$FAxgy}8P$^DZ$a z4&&SC$b17VX~O{!h2j&i8-29W$ z8gpX+X?+`!puEP~CDpM|T zUjt^)Qz5HGfLVo9e(&GdRpvcjlzEXC72XtOUUlF-@gy(a%lMz{vV&`}V!C8Cbr7=N zVZ68cr^^HE4leFa5d$(e>|fQ@nNb&C-GHuVss4;)jDMBgzs}heUtC}Mdf3G#j2#nt z^TMhm%r&B`C*3y8zQTW){afrXuXDf|N%+=ZoW0e)!W75;Eh(Rb=^$Rta(h%%Mp ztM%gNSASlc-W!wra&0&9jZz9r<-9?8b%mT_qVhOiPhnMFjgeEFqdcCW6>+NarmvhJ zMSir@jLHdO#$C&&je)NKbO{$r3*#7{HJW8dnu?reO6vjim zI51C;u@ZKFV?dThJjLA02G>VYsZ|dOU0@z_uhZY)_Q~wvf=kft(=T{^xj; zrM`gxx_}mbWROnr^)E+c1)&+v-T+Eyk)diTck5tXg`(R83upHwd$0KmCW&x$q7Lu* z&`gjG-tOoO0Ww0D4}9iUQq8jtG{P;z zZY*2yX%3vdqoa@`V>Ln>{2LRpXTery=qD!JM^#}HWe>^ zgF&OEdN1$;*G$AuzPR7g2Iu*F*~KYn#Mzcwn%?F`L?M=dIoLK>6#S4@)jr#8HVwZn zh0pZyaYG#DAH7c^At!!-DeSrPZiW#+KU6+5;R%4))?K7dJFjz3aXp*U}C(M&2uxPwpqA6Db8wH*1WhMdWXb`Z^Ptx>=~+KvgCuS-#UnTETQw<&^0K(J8HDn zE(*IB0o4`j^o&DpPERe9>AgIBmMu`#)Xi8iHgEBXnfjcnKP>JG> zMI#nbOlAZ`BxI7B$#tr=Zm-pIp;j*oM$`FdFdnXRD5+w&cjNy1F#yb*@eTsCxyPobk@4? z#pL89Q5cSDqTGWAooZw7iq5LT84J$@MqB(%oe{gEenJCQV{~iONkP+sGU`Z)Y&0=N zx)@CH&X{hL?Hj%0Wc!xNT-V_JvVC905MQc&{XNt}saWNuR}ZG_xAst^T8VPc)Z1qXUP<_exS4x z@=as1`W`}fNh>#mzJ*&Xmg~^r6EKk~!VTvO#Bq|0jx2N{L}0(e+OVi1ywkImr) z^ij5VN!9b{>jEH3Uquy-;TfEFw^Ut1U-xNWx!~6Dh5Qj&^YPHI0GOBCyDEpG)^XH~ z@JPz@F9rj(!FUVGgOEh3o&#zLD)%tTWHQp>(*a*qjtzjn?6iEr?Q=$sCJTI&1^%)H zjsV`#Ri&`Na7hnYzCZ!2l~l1rulHSsO+ng<+jQ7FfUR<1(i3!s4t<%Rdth89s9lG` zMuRfXA=nrJt2`qNV@R~&6pSqON;;2OOC$v>A4y~928-Xp0pd~j3=0-N%MX~8O_39{ zH>6&e0&&{k*s;dC&@FZb?@x|Yg1Y2}k5IIF z%~ZSv6_5~+xlXf_dtFfOnw*` zqDPQlI2(JIZ13>q)Ap4&_-hw-ufe=~VW~@v40K`ha?cd^8&?`7iwkk#$#rqJ{Q zyqDb-8qX!1g+Xu}SZ8BS1GgRFIoW$I7b6TGxmpo%dx6-_uI@)d^z^dG`CM*_d~mms z1AB(Cr(KP0W*hBkk`fpsi%qRmY-;ncshPe4?1-qzmN?VZFFw!xozYdvM{EiByrv%c zyl-YRpT|c(Vi`SG&y3#R{<}Yn9`kwF@&uok)*GAmpRjmXCdK9F78Y+wLKg1|!Qy=% z$KnnC*66*php^i5jWTXa`ZS&4ZBR+ci$_EGKwRnV8SZdSLa|3RZ8Bmhk1s ziGnND0UdPu5lE9fJJDrttGq}a{B4PSe}&{r$Mxm7y!#$>R%&FZTU`%{ezQy6=ZR(f z!V5rh9@}VQ22I|t7_KB5tNO!1Oqf{S4~Ohxc)uNZcVoUlo=5w^`*}mjT!5G_#QT+i z_qz?nBzwo|Tv|`OpDE@)3fy=*q4og6HR&WHOw`={i z@c_-5cvet`!Y&~zxG41I4a^Fb5G(jX&XXv=KiuGFzdJr|u+5p{rEPExF-hPGyHL+Q zJgy>M5Fbm+VVGM@yDVOE7vM!Oh7N<}|NqFCmR|`d>?zB_`Q&JA-qqTCVri? z|KQ#8h4`JBnZ@tdpCW#5A)d$JU!ff$UP-EM#zxFMg2tBqh2EJ(_%%Hdhx;r)T%Y6B zxl`g;1$SD5r`3Wx*ky31#4_MB3cr;;3+7e-+!UTJKBM3%^jUDP`se2GSo&-SZ=1`R znNQs%YHkfn{1JOMv90Cyr+JhFZ%bVs-ixt9G@eq%`*XaM+po{bm8xlb2YfEWw`lNu zsT!dv2|kPAD>ZnbR6SC_7l`(2aEDZli~1=2Y=)D&DwVHD)ucWH{0_h?DhZY?Rd)(l z4#93B*jN~D1?)nCO()nSsrn@Wa}w+lf_bEBcob84GYK}DVAofk7G4G*J>SwFwiSzB zpKNOxv#nS3!nWStoozjp*w&LmMYx_oo6z5XLkl?xOFUPwk_#QG;7PHd%gmNyS!as- z2MrwwOnEr+)#8LGM9Re;xZJ-mmmB%BAjxHvRknj`eF#qm*E)m>a^Qq_r zSLo5z&nWmX^|lKe`|0M;8g++0tCEX zgM$Ds6L1hP%0wO#Suaug(S8T?iY;5Qqydh7f z4SC|=ujBV)FtLavVA+qEu44Jwzs2#hVq>tdw9kKG5a_l01WS7sv$U_|JdA#mY8D+9 zT&Gs(of4wu~^H#>!%eKJ2b=xX@1cZzPjVa=F zbbB9OpA6>%=*97ExU@1EUKCI}3@5uH*%FC{vZWPla63m)lM(R5cK>jYle%R!X+}av z9wVoJ1t!EfTSzw@$q0=A2#<^Wd*F`&ng&i(sfuLyh0E zu>r+bF$@p%A6|kosj3G`Ynl)Nt|cgwqFTINjW<$Li?>T!up}WbEBMOo>ow-ae|REs z7){XL)njE}O?w%A_0OX>;`ul&Z7;zhC_36Jwy|E2p*NV&w^obI1?93*M5O&=uoO!& zUyc546C4Zh;erBXE$arvbiNC`C{@D@C|QsQxTD-nQh(vcSpW zsF##PB?-j{N+QP>{mZ|w<_nxE7V5BV41;21eZ?Fdwg#}(LNT(wqC|%hZ?jq`M%Gtc zqC+3y{Mc63SBw_0%2UHj0o3$z?byHZhM^*zTrLKXRdGWxuKbWngQ&e=)O3{`)8=q; zOCfuX_*HG7U;L5ZNEG32Y%n&AEX;(;WBI4X{xI>&wqr<0DmrLKDCt#Z zQprfFrRDgb7nupZ4%;XalOB+WY>HI3h$A}GOxM~iok^2^jD0!L8_$$hp{eLqpV+8DCm6)o&1a_$e_h0 z%a=k+4pzo#CBf6zp!Q!|(&Kg@gBGET(^7xKG(+qW_ZS4 z^srdnMuXrChmHjKPDegoEifCap`|c?_xeCOn+ru`YZb=f3!$%%y)Yq<$wF3rO_DQZ$IxHpO(1vEq$ba zQKUEer;UV_UfR1bF8xD9w7sOC8khdIKFU8i4*v`KDF5>hV*6)cALXx%OTUAJ6$}2^ z*xfK3#Yd=zKb1GS*Rr5E#B|-aA;_v zat=Wq-dj12vMa9^QAr#)rlgJCsi8}|qR}_Wza@|r`uB%6xQU}T9XDNfL(6JNkkI_+ z;NQ^7zv6@g&K7nezRS{Kp>#7-Wb^XLOfz4E?u1jV|JFkI6_ynGZ{3II{G`xQT+|6+ zxK;*w_^>bZvH-u(s zpEK}TwWCbj8w>EqPf>aBIW5_jQj(u{Sj!nn$y}WcYMz<-S zkUX+{~zMj5bI-Jg%!0UCaj<9QhphO0kz8k}2F<`k#m2cO*s!|s%=aplC#2Dg|EB6SI7gPUdp{db-lGlDANVjEFx zx+kR(^bH^xy|rLD#!Nz&Oi!Q+tsxRJ=;EfMPV zf4q#(s&IOTGKaat>`=$0bc_UMc2<;}=z+^;?rPFZVS+vn*GsjX+@7H{88U16#{{ps z4>z+2Lrk`2&cSTKUJMR0uI%}V%1#kwyW-0x_;Hjy9A*2z&+w%Mr(Y5|zgr6&)I%FW zAKlL_7={+CB9OFlrR2E2nUSbmt>4e3 zayzwtA31CJCj>9(DD>nM#&$fy8>R7JuR?+(V;JJw@R>b98?+aZQ`8V zOTlqtJU>ynR|}k7@i-H_m!jiw9-gRdxTg@#;U#hXoY_mkaX7DY)d*kSW;Jp-W_OY6_^T{;gGA@UI1)FT2FHF!c z+I(G1^~BFtQO_B)GU@9iAGZDO-ZWl@#H{93oR&^&-8aP5eRiU{wfTE=aa`TzI@-PN zMMmA6o8Ic~a+d!SZ7n`6rgk8hR;ksABzo^#>iqa| zZog6I%NHl8Q(MoL$JeQ?XWi?JE)wnZf0CM@&iSHFgVrueRHwGq4Hb2YwT|`%bFJ$> z-gR2FB9&fq46W^dEVjL?E=tf|Z7uxu!ni)x*23=Xm5la6L=#kI>EorMPNTi9M0ING zT*3s_mJwvP5|4C+o_68q~ z?c=gUb!uzuxcEA?HMV=5f4)bjsq_SOj*6@Eu>iuFEQ%0C8+Zc zzu|To<2pZ4owNpH&3B19&2c4oPirO^Uv16g+{U;n>iY0(xK*<5DbZQ*tH)?(!H+$G zT5)`EaHtfZ*puKIkDn<2$$I`@A+V#!pP49szt58QdglBh2*Q{wxh-uNm_}Gcb(f|? zxMrMe?FEfB9XO@kwC4;svQ{mFH;I5jb;|$kTZXSn)1Jx7V4PJ?Zn30C(bn{MrT=

64S?Ez{~ zM4=-EeuF)OJ9R%3DakXs(cSdPg(Z}!g#BJjyt#>bySZfq>HQouA{I+)4NY%*hb=Z@ zac&^yPFL}pJ?X} z3pZiFDG<^>76129-w(DCE%-b29`b?I{GBETM#}SczYCR zB6c()Uf@*bEWGSNw6~>CI_s7`AWT8aDqPk8~Sh$dLm)3z0_p%+Rd1= z@L~wlOf-9KsOYtF=Y&bu`smE@LUhT=p_%7%uieDG#(f6x6QbXOt}~Rt(g)COB@dwK z=xf(9{367O?+IzR>skD+MBkOXimrPHO>e{BUUbD4bjiDT-ht)4b z(LIVK-|Ej;~BXp;!9 zn=`RV=)pU(Xn<1XR+|>S4qTzLnH9*`W`?k9x4D(V#%HRed>Cf}3=a z!pKKOJ;=3DB&fajVYVSs)Lg;`5uxxyQP*f6wc=SfuSY!pvc|x(d~HARyeJmWZ~De_ zYX9)u^NSwwOf&IZ)6!2ouYm=?(x2l?`s~kVm-MSY3wy@%hPRFWJgL8U{tf3aEO`ER z-*_Ipw_kWZe0z`m`9E(Nc-~2zSAXd9ZN#Rx;Q7h)KKt{E{^1##+asPsO*~th`ibX> zv3UN>(X0#*-t7DTwuyUUnsJ^@@oOGD}g%7a&;b9>fOWutW!N9CgCIS-w+3 z879un@7R(W$Uq;yomfz{C{xBZh^K(76X83QKv5<#r6a4oD1G6Hh$9r5UPe+8on{>1 zvE7*-L8OlzU;|6*!22+Vh@8Y725#6Rt@x${R_AkZ-H3C--1Pg0BkVYDKsbhY{apAR z;$}&hzc!P1He9kJQmEoK_B~vHxB4>N%;>$)W1EKnDaAlz38PV*z7XDu1%ye{Oc?X5 zc(m1b*11X}byY{kxq}bpq|+)MDCv?OZLG5ei{5+bsBe5P#Z@FOE?BhkrH`bV32?y< z7X6(bPs5|5Zt3i*Bl%J-xqx`o%1n1~3jEk(@Vr^Wa})|z+X+(-^_agA0Z4q4bWEIe z^Jgb$+@}bylb~^6b2=kM=$ps@{6pceqSNF1XLM08^;~)4x_eH+iBV}y2hMdPGEG#0 z&wFO*VHg;Yvm|&&PP)1UA&;E(pQ9nZ%j7p~N)-6V_BE6V+}C9qIw)sKI(PG=d|!HG z69yjxuWqS#&Gx8EyAE`@PCbB_^f-9anh`7_m-uvPO*35LBWDJS$R}RBUX7DsMdTDO zUN6Dowcd&X zsl*+YqZJg_Ve3VQ{g|$CqE3%}epgK2#MQ1hU>OG+yH56MC z7jMA+cHoBDr#AZceD7}@7@keDraMvnBv@pdgzIzp`lt~^QbUyJG?3H@p-W-EU_A6i zBW4!h1O`*k2%scpfiX~9>PkSuZB>U|-jP*@OTB}t4&O@uEbt8hJw*mO3YX5tTTp+FO6^Vq;VLr`gU1*bnVzYn*6%j=ldqfD|?$9g<57Y z)|5}59MnQ_G6aOa)&B)e13nreUTGTe*%0wc(_nqne`HYQ<;B6&6{yr1ys$`KeykL? z04%$(7xY(L5#M-5KjN3y%s^fP1v;E!K>@E;6!0o$B4Q<0_Ez209;$l+*LUYl=p3S<%L{=btEdwZvO#R-Km-sQD2%%ZH_^lP%5!YKNY2_-4h=by!nscGT2ITMjG6*jX`4}>1ov+kY z|F3eV9qwu00H9z)SF{bCTPktQ8R)Rh;YTsWNEouz4#8L+qcLcX`K!NRR>yIjAv%{80i9~|gUsKT10FQ`!G1 zxQewI9Ls=^k*eWG$)q!}atKDIkwRa=43vbAEt3CgdUD#^r0UznlWTV6U`m$P7RKI8 z-x?9V3QyvcHeM?ci*aS+I0}{!zJNaPSvpR4HobBj0W%$a4G*Lbd{~>)9Yy0CIG}); zj=qNBSSx~fiF)6oSB{KjrlYVAVN7EYyg{U+UW4$r>mm zge4t?iwNJw=@9loq$5YN1_}^iO^4U;WconCB1F22=#`>HSklqg@X1^r^z0(tP%ck< z)$pUR5*!CX1Ulk=`3yACKyf83<ar8BEuZWF=N-Mk#6qv%2fDQ*nt`T99D8ZEt6sf|JfX-({3L7YH1wkvu zH&EybPB4Z^gT6#g7KgNuppsSu(485+YaCV=aaK^mlNP`WylrHp{z^IY^MbN{?Lr@> zl-s_^n^9+Tryv+%Njpt2c$d-~!}RO6Sm0hto@}2?A+`&1M|ioEHPQ?dQ4o0I$@Vr9 z(XJ3H2Tv(pKEkWvlGTkjme!;bX7(nj_AIQ(F_RqIg|W`-OVur?J;7AhDy02GEt^Fc zp{914_eyYDy2pOlcReovS7{50^W2igxWxq5-EjMoQ3Ed{w1k{4>f!>?Bq~r2x@mR6 zTCxJ|^1Ve13Ln$Qcr{7YWGte61!3{1VF>c@8X&9qU@ax^(+uKZdm{Z4c%=|+H13;M z(HbD&%PjB}96Ck7X=3T=e@8LvxK@gbr$g%usNIB$zzW8fGp%1{!t`W(8EE}75nn|7 zJFX=H>ep(Ks()h5MOWh)UsA1KD7J|41()P#QSi9D-&((I#y10t<<>8BhB2s2p_W$6 zGs>bfj3OQ$!LKynza%)HcqBN7#6#br{ang9T*}#6YQz>H|9fL%N!1EoF>fHm_>O;0 z-NaMEQNO>$e$+cat{aG;!-{p=(kMD+^L?baKbvp(zhGg4ouC~t9(`wnG3pAXEfOs5 zQk;Z$*3U1M4}54yQ|+Wy(WEKMhB=Pc4JcoTM)TU3^#Ls}JQaI0KDF!n^TklqBNiwp zq-jFz95Zs-_Im=)Oxpc6%h2p$anc#1wMCep3_@|3a%*WVSjMa!^es(t{r2)e$y@$I zE^n6q5JWHkplFTPv#vYH$_fAWRRBZHdVT zSeFQ$6_kNOY|X7WJUve11eqrx*{s|j%5&D;+E8BG?3>&}p_?OTYOBJRIEi3oxy)8k zdt{xTn=J{{t-%8hk9^T6ja0q*}h5VC29lel7Je zS_z79db%B__%#@xucAVnZp#LDKI6d8tQ~Vlq4d=({>2{prXBbZLQy|XOBDy95 z3H%msEK^fxL+up*Fo@uMia!lsNCJNr!$1IR%U@*sn>$DXzY795Q4R!fki8~?|15@% ztU|#)vO9}?YuaR{P(ezDXaIJgbgLkya0?QHWe=~wFU#ksVGWJHkksV2h;n4HV>5lx z)oMC@HOUAO^f&Id+o1te;xNLWbN7ADdww{Hd$OkYKutacG;qLqVa zn&bN&rFj5jPkGhgw7c}Cxi z7l9y}T8T|YZY_wW2DhHuzY(-##)^2zl7s1zbC~D$MI;J7qi>wsKaEPzV@1r+PvcV% zP2-HdU59cI%~C$2ubtbE4Wijj!LZQTYgvg&8&)Dp<6u~V?vPq%07=m@BZjEVg>*Cd zZ!;%!Xp)7ZFzoBRIEABAQv}kQxgLda9~U;ETN_m0T4(*z* zQ}$~U7s0l_xo;;{W;yUvY-w^}Mk<9Q2b&tNwShSLZfM>)*hzJ*4MT1{9h-%J?c5lx z4f|J>#amt+9(52|UP5en*-u7w-SYC#bB4Y~K}2R@dAU!sykKvi)!pVY%B{+oZo8cB zm83Peji=EakTd;9qCU;;qL85lS#g;-`-JSPD^u9+a)9hE14LAu617#gyL<{oTjVFP zc9#=EM<547+J_-Vm|n=Xeo+E-9bDhw8i6b3EeI(ZLcfG+JF*9D_0lr-mD@hnN+aR9~i$B}PeXi8=jP zQ`KSHJ5;Xf)8zU#GQ%{={yk8bH2w%POyfc~Fe$O`BHlU09c|%GimpBW zPKs2624f8{v^^wfBzoLzBuJeGOKT*NIuz+>r^mkAS3-?dw4RdbkW|VtS+>J)77FGZ>T<8GvnrZ7@6JHDrdUfh8uLIOpy*w7w<{ zmIln}`t@M?o2tIZJJ5X8*TcHd7bQ5!Xhib`cY$@s10-NBQkXBaYHGdj9l{vUWaIfl zWMvpfj>A5qz&`T`YIg=l7P;)(mnAYSJ#VGxM`J0v4wjkKKh`pXi#0I7v1H$mL5}2NCtd5r-~LO|5}H+z4vL8vF!5JE&i1!jV=Dx-|1WYZ#9#Q5Xf1z zE><^ou7Fts)|fPD<(3FSGikd$BHke|LSr&*_r&S@l5u7?2sK)u3?>=Z9{C+IRRIF! z8suICBLEK&s({YS6MGSo;b%V4K@ zhRrnoBje--!%UNcy`8QW@Ld{H8H?BVq^dheW;Z7y>dK@B(M&{L|L~Yi+wyV26?Bx~#J7a~{N3F8?=-s7 z!L9UOgUz4H_e~?jUG)gIMur(*X>4chpw8;00m;-;WpB~FQ4xi@DS~KeWp5$40QXUX znQ<1F;mv4#`$Aa7jU{{tx&>D<$eE(6Z%r4XqbG1{CwXWzA}}`?1Z3;<&4Ctqs|^1g zVt}BV=(Y|_Fv`WwGZyMnkhRw5FqwEyVf_d+7@dSURB(;HVZw3pb!{|&eSMo@h>`i@ z8dDy1ow`jEQvIKRyJ*gE1!rF%bQc&?srq_s4Txp}rK_~E88T5AR0IKZyW~HY9%TCt zsk)FsWiZEZnzry+4CsU^6Azk@+DS2$n}sbVnZRUiDYqlhVMrCi6l2#C(Wmf-1Qx<7 z5>bd>VTaKZ(Wfvqhp$rMl3wMK$he{>ViU}7xFi8ah+8hnnuzT$cXCOCHJonclEj-p zoQ*GTPDEA|LfpZL#G96gEGjt!X3GmDI+fno@S@|yW*6d`8pyE1B^{tQirQ@=PBxca z1SYErCn8m_FuCX@vBBkk2+Rf-s_qqfV|$C9h^;LT6PT?nl;{C^V`B>vL1)!0Of7e6 zdYaCP+l8e?|H^ijQcYjeSutIhS@f@LWEn4vEcVWdaSd!^!Pn7LGFws)Bz>IThmQW7u!9MA#2J=wx(Dn zs%0_Gu`b3ZYReQY#=5jkt3Iv9P*CyAEwO*+yA%2@-|ece5v}7U-#e=NVsHW?O>d37jJ1HjQG3_0EtYTJ(`NoVLa_!gcrIYV#Dev_`X;=Q9YFuy6jtcFnDF?{s!#q7#S>i) z1Q@G61#F`ZW7VgCHRv!_eG1qMI*e7H0`{^Z^&8D;^PR^w$K*B*;_ZEN`kC>`T1OVoU7==`1v*51W@54c<{< zJK-ISxn=TbAhrv0ktBViDO)1_nupM6L1v`cNLG`PhSdb7lLbD=N;@=JsX4H8tyA40 zzscefj3t{u{4`M+iI|#qP|fD5gB8GFk+c<{lU5!~F<4E?unMf_CX)bF?*9T-lM+}> zI(>5D()-yiY7q|;sk>=yxs|)D&IPN91!=La!1!$9J1zmf-YjF~0J<0$-v}-)@^`Ri zMpdXLHt;c-L3pP%j3)E22AD>ZN?IdW#b2?_G?kDyg9d%eG?lO-TcrJ?ZlowMQ$C`@ z&M;tF;A73uG0+14In7lKZZ(-q13aN!QciSj95)eu4ucGVd6pA zka7&v5pPKO3^0@eBMjpC>CCB~w=LW%K6gmfILe^2rTXWl@QdOzMxX46+I@NY=f-fI z_zdLzoSRuu{d04e3_GYBCZ1b3H@8&(+!~%nz;b3fQGv`!#2c}oP2=cO_-q0T78Hq2 zqc_2Vwu?l}f+lfE%yc5rLE;Go3#uhzx1aY3Y*6zydSkC2yOxMPg;x-m_)J~~{z7kp z1=SMKr|_={ELc!fS4VGx1r_Ei!Ghjp(ETqo7F7GnEa(h_{yiECs(ob^v_NA)+bhm* z5G-hWWe#--v!Ewxo2hQyS!dZyyW0_aw(d-gp?}MsgZK~T1c(#FHxS<^qH$w!4i{A0sMt^q1<$o?>Y0DN*jT+V^@y$TKi(^hJ+-@jlH}WV zu`5Hyg`=S%B-?Q6*|_dC)6z1trR5LkFzlf@dKcQp-=EAKl) z1LRAf0cuBIyhR$IDUzlEG9XVPJXANs4$ek;)DPT&>(bqU%FL2LaTcAfxeDhz9Z+6n zxdT_Wy933zDf;ZCum1qM?=E*>dZ$O-Ul5h+2NX@L8PlFKFv&ZNo;Ko1s(l{h%Y@)x zk`9GaaVDLpfpezr9Dm@K1|gv03CkN^0~U zaF!vmoyU>J?@G(qbz$saE<3&HZ$CsaaC6fy2Jo0d^2+qAe+)L3FPWUSGU3%MZy9)EQiAQ;$NyHK4r6UNa^{1S|35K4T*4K1Rxfc@`;P6KVSj z=v-Og^lmOj_oqja)lG`OExB9{(4-vT`z=jM`EAmM3dSCViAjALHW4g@ZP=C9EyR8H zhQ)iXq92s0$*JP8(4-M$J%ja^V1(A;3^Px}bFS1U;u=aTtnpugIWe3fh3`|cx(-Oe z%4RnEklE~X9{HzGLSM~(1VzVrR&{IkAkpmpHhAGwZN^$Ib)Hw){r*dC@5MihnY-3C z*g^~4=kcY7|6-lTZ?nHR=0NZ|8|VN7X0Iu9(oB%}(Ze0`EOn{vT60pu??O8gu_2Oe zo!QXJnu>V)Cd@K*gBv=WPJXNc*5M#f^QC4Z87jJe!d? z1!!-;Q48*sMP)h_G}9Q;9MfjMi5S${8e)VL&40b!v&?qS<#zMoh5wCq4-)N;UlV(4 zvaxoTD(W3M@c7UcaDDL(&Ws^|_sq3G--EFVSjJ!5gU22Iv#|#&>fwR|^#if-p1+6Y z0BynNf66oeIT~>?oW<>0K&q1@ts1ZPIa< zW~0)eA4!T-$}4b9?w;-S<)H_c3J@ z|3jAbJQ*5ZH&C9~>ia!vkZS)%qA`LlV2I)vi5n=~gHCt6_gLtyOYAlw5E-!z1=~Eq zp-W%>APqW(=-*ggEP%F6U`k-@x7F2U`(#TV69EAL2!J` zHN?-Mzyj#|3Qz>7bxXj=LZxxyABpAi06wJBZ}qAk;g_ z?e2l(`fRj3vEBC!>XB;68`U7ZQes}M^yUs&_BrD;e@{FQs1Q8Q(LZWD&%cFoURpVj zxSexGiiMvZPh)a?8lBb?2LO`fCGg>q%mEPJ;vG<8(|7;YEF+9*y>)0SET3?J_a+q5=c3qG!(TZeac7?EdzxyJYRP|^X|#8q z&5)0+G<%gyH$FKE4zPsGlJfLUv1(%R1icO|d4cHl9vja4y+e$eMz4ds2E7itkLmRp zrdM&;RH4JBhU#Q9@n_}|s;f@i0k-aMf$C&4T@}bQdt`v`8KPMWn~tIc$)1j$-wehn zv7RDk_hyp;G8#*&rpnNpw_8;yTy1k0&wudJ7 zqAk*k)=2IIY|bFL%h0vFy23e`>_W60 zGOm4)7c6cQR6Wx&nSjGpD2!>?S3Uu|dtWsC3CPcMCy5*Peslu*_gvp&f|MWENm<+H z?|{MpOy?rq(?^f(ga~nWhMw99nJF>dfeAZ>-I`h=r)zz3!7!`zDmsR4bzrfYx+dNCLhoH;?Hd~~()vbP znL@(dsPuEtJA?T#6OZudG4wX*3sc5xkDP7oB;EAxwwm4{=(W53O(f`1XQli8%i38y zMm_b_ZO>Zzis`0?dWz`gLF%a9=q56>C%Q>H4!Zg5cShd`y6NPhBD(1!4E`g!f$GlM z75d!*iRaLDz{HqC8r?j{$PnGUM#%I@HwOpzK{tbrhi*D>AwnG8ga{!)H-`wL|3$hP z6h}9R73fjd`wyo{5B(8Kqg12f--1S*5}5A~+q@<8&qVs?EdOB}j9~a$Sk3L0~4tzN63*i0s%==gi3s-RFhm#)`mhpPso2V%XgUjn*P? zz8j@Jx4MxAbfG<)F`5+)Z_s(mxaEpIq*jE__v>3FH5k87ON}Uc>A8GycJU`<6vM zib)WD1<;tTp_q-f(CkW0>z&|R74$HX`+zMUE#*S%saLKglyJd`&6`KP;-nN@E2@pD z6tQRBXguk!BNtjah%lxP*k}uYoFt>mi{iNR~gvpltA} zxKPNO0Vl&_{zGRi?BJWZal-Y@T@)vc#7kGF=0bstIwPeUF@Bl>4ay-)*de+82YC%N z1E|im$04oQ!lzM&d4h0XRkw$(fx?kuzq|HOJ$#Yr40&`bFfiPyxfYG{Luru}ni{kf zF6dNIge(`1`a_Xt;QcGB-#Kuo$59m>hx;EkCr1Y0G0&IJdIDf?K^bBIA8xu|D@VAr?u7klWD%B1Fu?DBo8G#Ikw5HA9*clleoJxVLwD*@IBZ5;Yl9l$}!FEEl zAxmCGO?TCPCDm@Ehv4nWJZTf4bDKxCyAeEPFQQ3^*lJ~NG%TpRbfD=Yp9f_$z!4Ua zI-H@H+?Jjx*IM)niV!tZ{}}Bmq25#E`hTD-4Kc|X7W`pM&*X?WPp6x4o=(sYBbOEE zMVHL^3_)%E?NRr^@8FP(iF+t`MQ7FFjD>@o=p~#w>5QBsx=9DT963?+lMd*Jq{=7( zC-kGo`pe1`8Hn~XfQG#kICK(dMeqA z$-9InZ=t?YBvnOj>{oJLHL;OIF5o&++y{5-`AWKs7{MY|s!`1Zx7tcVVJYI^B0_Mb z1CfFeVC|p8waSQOL@x6tFM##Qi>Rr_`n3GZ*Ju_L9FXh7ygnWAOc<|4GCDWpyWLoy z&cBP+r>Xk-w2+&akM-#g=2L?8>A{>T6dXEvJFQVuGfUKeDS?`t=KxznI}O$=A!;gt zsaOIQ@~Sm8ec?5VdI?#3ew#u{k1p_xZQh~0Hz~F$>FT}+ZgkttB}B#}t`OM#nCv*@ zeg-sDq3e;I^JXAa5&Z7DK-SXA-F(~*YW&cilh}>^ZAZ;lF7+4=9k!MGzaQs=@AMWd zPcaLUSl)47;|pIdgKzxrN>1@iLR5#PB`c zco|C$FEYDN;q49z4C4&Ws>1Vy|(B=YmX?;>*kz^{{BI`Df7 z`FY^iL*5(s^^!{mezVC@1HZZCy@B6+a@W9bAvtQ`*Fjzw_*Ezl3w}$?=1%4OMv2A1N9fPg1+`dz)#)apQAitkjd7-qj1#vvyqYe_izuT6a#JvNF z7VNzkf$&zyebjH^c082ZcS-(VQxUSgOR9bbPpgD;&rWhcZ=vJVUaF>xD*>OxaB{Js zauy*Fihw&A{yPnRi&XupfO{DJOAYRos-F<>Y``ljsEk~A9tl`J!EPj2p;S$NVyMqS zuxSKSr0VMgtdw9E6Ks}LJypPF6YMO4&BZDG@c`2E0`l-~$!oTF*A(j>-o-H<-r^03 zbq{asv8Tz*jUn94K;CD`MG5p0q|1N4HnlYjO9?>$o3XGMt!orD~~u2yE11NRTgY> z&-se>X64|&3pzaNH;TGXxMUIyK1DQ0b7=^C!dsl6{c1q8P!(O0|5kbvp(>F24!p5{ z<*tfO&Fd0S(o<>B)tc8OpbiuI0((pXHr9kati!SZE4PbTAUv&c6%dFY$&YG82;#qy zh@aLmf`6vLLHy)t4LFD&@J$*V#DBhkgZKfbBMwAdi=^t41RTT0e2KmMXHQJYg8tr47fNR-_c%|W@0}l(qHHEUl zVRbO)bU$%Ui(P|{cji|w4ehSBVQA>(v`SVp#d{? zR-EH#5fN=T;#||h6F{!m3w4UUc?^`>#2Y(n4*5zHRczKkhq6G(F_3D$JP4d8ECg5e37(K8ybHf>^&c`c_;cX4TZ4nD4cUH3D#>KSg%Se>je{fuW}(&@`yj^qN`eVDa3Xu6hQLa(6O5gwreG( zz9AMuVJunGdrh5<+YF%l2W>b{Pt&ihg1 zNkk`2Bp^Z?SDXpKnIznJfiva^V{D1Ow8)3ZCBHsOKG-It#bjFVVx`5iAqL44TXga8 z<;X~gK^A}^FOGE8Lf&jdcgXL6lS>NIDyBEoBDzA*GmQ+_#2hR3PQ&hn*wRx_lrAtH ziYdY(FoOT>sjqyOMn075H=-0-U5AYiW#}Ov4WYjmLv+cU9lCS~#d&)-$uL*z5?vB} zIdYQLCpyF`8zP!xTjkC2;D49c_a~4I*T>0*`WR}WA;jMB63jenV`nN}xAU zonL6_?b95`+Mq?~!5mG*>!{g>1F-gahA5crEWGs)i-LQMMT@m9Qq5mk6r6)a3uZ8g zg4yAx4N>sb8(0)v68gpU`g+BpAl9nQQBxL7Tr|monj*)tMnM+rM{+ct{)?3bMZS_f z#+qcvf})x;HCYe~(SINdeu^l7yeL5yG<8+CUuP^zXWbkl3*N@EAnB?$pz9OLf^`^@ z(BSr?2*C24N4+bPr2|jkm7FSk5GxePaksHG02?3$!To?-|FF|&aZTV@m1UfowSPz+ zUmqCqTqcxUSc0w)aq>O3yEKjv1qyC0rl=8?6Ef1uUFG)od?X&4#Wg_zMW?J0O-@$7 z#Okz;#ltlJCS5w5BBWSD0*%9btQGUGEY;};h`e2>IFiPt^iPH)_7BpSCPUKk3cUUQ zOor4Je4$i5njg9ehI9@I0+tXAF$ze`f*}Qi1Y8J)pKEaN z#%Bq*5Dd3yaPY>11Y8J)D>e8csrnn@O+AqEse_)?;NXpS3Ahjp$;F%M1#jFe;MpX| zRT2!m@m~du1;d*N2HyB70dr8g=>!9BOu-;g7Yl}$5DdI=g@DbbbfXD27hxc80Fa&+ zkTQjY^B*S}irq8>P#3g4&V;?8!$tw7i+~lsgLovwKjNNf=x0{c z113bjuQl=mc~=k_>!ROw4G!|A-5YQr`jJl}l>zcD5^y2L( zicpAhvPo3Cm(qf~H8GJzzjA_syeUE<(m5#I&j<$c-Y#G)`dvmakT*{kP4qjDU~?-+ zhyP-b_fV4PC9#DTjz}tI44)UE`#}ds)cau5Jn~L$q9y@#^t4bn%r2&s6g@iFaU`mO2isgLtQx zCqzk0Sr#3$##dNar#p`;8z_hVgLubv3U#_@237l@Xu|U^L8Ez8Q8n=n{lx5yV5RyH z@63Dn=E4fM0Lt}6*>og{9j>xV)Wh240BRWbrFpMX)cbIHB<~BTf1uhY*Ok;fZ|cMp z-wm*dz}t5jokD`VEr#hNO&hl0D_qtGbByUN9BqNiiaVJ46FQ~UAbdp}?7AX+R#43> z-El>}zGZx4rO$_jWh@Sj1TuIISwtX1KB7mPKgd+I)Z099gL4nS{uGi!DO; z?a^;Y@_(7_D)`FUYqI)pLh&Zbz5pEuCJi1g=(MzQdy4Ni?lm?5++}4P*F}EfQ7<8H zpR$OiHzAG50QPJF+lcFHps<5ip}!^BjnEA*3nWVo|M%HO=h00Ng!V^f|9QzL>AsvS zxYKFfxYMa_Cp711J-JKxy`+vHG?RTdaxYQ@p$T`xVU9YH!U}DUs9;-Yok|!2u_kUUoVP z?;h{N;wIDLCS!41fW_^A@8po*tZ))49N!ow3(5hT%{vr~JzP@dMe3qM5!{ToC5k(L zd}9vSQoS(zxYI-JpNV}sm2`Wp1Cm0Qa9J?gCPh|y)Z5{Y@h^`Wt=&fVoJOPGR8MeO z6#HH6SKbV_{VS>Fc_ImQdV1)7l;l(!Y16-#3WHtBReQi&ME*?Y`wtKF-6BgX$jdXS zYU#jeyTAQVw6VUO@1&s*N>eE1-A`&}W_cn*;WTv6e<<~y@vuu|d&gES8K2~n==#4C zTy?2A?&w-~bc+&Q7rqCK5@99PxbPId6#uX#1B*M(7O3rtdS!a(3pmdz@SLgn4E!=M z@(lyt-i!k&sZtG@W4QF;gld}HnwkSWDKE->Y_Ecz(6tsGiVl>48F*hk;!&HNh1X2< zxfK8MDs&LWi+?@ptV1QW?_6}X;M0e^CS$yWkzu@ws=dIC?x zAxiKr+*$t$L*afT3Z-Dn86Nu~siuQ+(!3mi8X+IE*CzJ)B!qE`Jdm;_* zQYARO6v6yL*TSQ-rZIvb^HUDON|<`joO0QJEZcj1)shR7e3uXj6)C|Qt{0c3c)^C_ zbSV&oyh~Tm;=U(!L+BMq!*p^$fh0%PH{JH;j<$ub#yE>C9QlfRPg|h^U$D64D+?+{-fF6nN>?BCHV?U zFjp^Dz-cci$9V>fu#h|B2Kk{N4|0(QvuXj2>@~*7Qi!5(CjL>wku=5}yc*$(Sxp%G zAZ3Jgy;Cu+3&#Lcm?0iux@tu%&@|?Z5tMMBk|9Kh@M4S?h?z}iF!FWBv72tZA<>-9 zhm7V_k6=z9pc7ej7e$R^FEqO^BNK_=5^I>%RY)}aenA)D2=p1 zUg1%17Bf2GAd8uNN6vy0k{mzvW*~$K);N6vpN*o$#tH{OWBP7^uFpG#hIKrquT)Db z-EnWQSMdg0>MWc$-d8{~`(h_~cJsi~T;@5gkG+`K5AgEf9Z$0=!A4WlqE<`^>uS=6K<`WT}Vb7S`6#N$loCbBqgzEEIbq(2qx5`1`_ zg*Q+1Ipa2Ab52i}jqy&M%H#crHr~){5Z}a`aBtIE--ChdIiVjM#)!Y+Q5R!cZ=tn& z5lP;&t#kSc%xO=~Y`6L)F*V?$_DVG$@o?+Yne6k}kg-%#SO;v#hcG`#udZ=bmVw83 z^mE{OXBM2*)jaSsoP~2H`tm8}aDg%Kv(16`0CzF!GRORm`$t~W{PzU{{i!q z{7d2{Iks-FgS2&1E0_x-moSYB+p7^k4M2>^iNu9v&?>$N9h!~NH}`KW#UMCis^hN9 zMDsjbO!9(l>NkZm5^vsDO*RHya`B+w8aK&fH}8C!oj zeg73nG1*(`0?tJq^>dECLX>SV(6_ zX3evmH|vvWv%Xc|tpBnc2OG9wyEc~bX}pX(3$MJ;cPcF=xzwGr%Q2UD1H!XAx-pW* zPz9s8h~}`*7|ro`Di53#K97E7Fv<{AuY~zz6Yb5RZ{VQ{dpu?10(eRj=Y{sszPw)> z(H8+C42Dp6k^ke;YGe9h%5x3E>CZKW6aPG<>BP#_l61b~V9p?UQO=CrnKNL5sF25+hLaxWogBjKJ6fQIwNXr3$*F*qw`)Nt;ynb ze`==xdq`ltwdiKl6W5}9LzWhK(;_eWH(*(Y?31WjIzCX$>Wy1NXQ|l@kC1@_UY1xn zre>BSw*HAm6Mu$!K>VqZch%k0RfXN!>QYe-){l(vFK&!RH^O&Nf1h{*z2l>_G9&y9 zTwLiH=`Mp0Hqp?RSgkZIhevflWN!403_UZ!nCMqcrimUIrbKy%e$A#1k^|`&t6Y$v zmY=_XiRWLll&j5BGR;!NK4U3G!$C-XX6{jD?roPb4uh4bjHS(zRmWdQ5N6&F3l01q zH}hh4CCDo@JZ9eK&Aiu&yf{OxwH+aW+(zRC-aWMaIHTuq*UobCy@GwBR*+`9@13bi%SI)h~} zm*gpWfbc0}UN#|vnHy>1^Y``szfookjuw35>$awg|MsKaTQ)i(Smnf%SN0L z1QxhRfQtyoh3M)!`84Y2fSg+=j}6FJAe&GURcG`&1i21UzSy*3!d&>j{Vv+er~7ZtO#2~xr7Li zm>F8cNL)xr2n76dN)03m35kjL^O%+UfB&$C1nOoaSUqXQgW$P9!)!mbo!cFlfk`r> zG%zE-ZpMXpDh$kU)Xi`PW+)iRS(K6H&TO7LGv*?}0)1|Y^5Wa1w+AVaamFl^ST|!5 zo*XE$ZiWYSiSk6jVA?S$XDqA)T1t zps@5L$O5!hi05g+R7^agUM&AqKh_e!isE6S9yQq$EJ|epaw52zzbVz-^l|d)jbI`} z6a5%|1H(3e=$+ReuCG-_)!I6PXPe$enty6WXv_t0IrNXlA5)B|kH5JKLxcDuNYS7t zaGa2l8c9%CoSEcHftCZ@!;gnq>njaCbv_|O)D}y>0-sr-d&T!z3BPBAuH(;%+L95P z9Q%#^$8~ms4D{%V(>Tc(^j?ZWp>f){Uzp7TI>ce=^QpJ-_|tOhcwmj1V3Qt0vL8EWjO%a5Ri?aVYk{2sjNzH((iu+V434a&5iC+Y$*DyXJcCkwgE)cjv+<2B zl<<&>>*JkOsqzG8AXR!?k7J>=g~;hJl<4Zo_30F-kKMe?Af9HRvlHT@sFojM-_!Iu zY*+9OBgZU+c9}qqS87pKT~;Ldb(;v_kx!$e$L;7=FU1c`STO?f@}VBt-blfJjB~c~ z0a0;zpx-sq-kK4v`ZdkXwMt-CI($Sj;UkiTJM`v0uPpy^A5Vg`%^##DDFsO3nyRRn zd^-^s_X$)T+W9n}@J`2Jj)3nWMa^^ur({Dhkd5QL4@jA=2NboC^#m%q0SCcV)KIb# zf4G#W1k_Ommv5}%u6F{yk3z}-6c%(n8}R)TUy;f+raa*L+)?KXqx8K2-$6WnAMimH zzVre>F2UdAQ2jL6%nN<5lE$#)3B&QYWU5lIF;w=|4^cV;LG6IcaVE|`Wa0Q}HjbYP z-C_EF-tQgX{iuGI(+jpa>oG}S7FKX+qyi!-g*ULruE2*yKGrB#9fJS3MqG55A5p+$qP{&H93nCF@{s?~q14}^*OCxyP8(0cj zSo(cnDN1%po`CCQK+nM6n9vl^11Mpj+7&9UTEE0SvghMK{hroSN zQPb+TYkNrT9(dOm94JxuIn`rMXx+%`7{?NiW=OSU^Khv@an-)>o#s?q;kPEM+hlm` zsm{k!mD}5s)<0#ZC=V$JZPg?PJdcBc3uPk$yl)2`>o`gFUshQ;R8|ML0?uEi%FzLl z6ghA)skmGyPM&bF_bgfMlySCSZIe}G=aLRk})`*lO z^?xX<|CmjBpqiG!S}L#L08}sO=&o=J1_-V>Ftp>rrJm%f#dS1)#WN4dW{=t>t6OD+ z15?qDGzQYE8@dh2dfzS#iBxkB^6{|1OT!qJ@N9wxE_T$nhOZV0fXknO3ovyDZ%kF~ z_zDlkraj5f2zjT%6~H$%^vf~2Y9W%}KwLY_&mOQr=#s~%2_e@BDM^u2JZfXXw$S0D zKSV<_ImcNjI5ZSK=#eMJHfFyQq<5M0BdRF+ z0yY^`jsN2oq;_7Ac3_eF6&561Ha-)-=s_4n`dW|_HAu@T@w&HvqVc*|RA%(qC;j0YZbxs)N&6Un@U|nkFWp`#3+=+%%Vc3Bf0+8Vbbazu2 z>tv?y0~ho^AP6iKm6a#B0v^axQS!i9zLtt^aRsv6K?P1^SnizIrr<8sAw=yh2$%); zTP+>qquSJMN($EbPW%f29TQC#_4qrM8mGF=BfZ)Q$q!uw-68P7j7rBK4siyCBQ^6d zq-q!XZ8~7SbHd$lo`Rj|IbcLQap3Ne<$=3#R{ZW$l)&BDfxC0@I5u!Mj*jR12EBt+ zNrA;9Bb%fZ>7hCtCWbmkm_*EeMLs-T2AfzGCZEE znx6jmvkQkywG>eUcLm!D;kfZ9Jfk4&4oO@9luYSWh3EO8S8jCFEdfTwC~iY&*(|ca zN;PmD0fV+7bk}u+{PgsC+oXlQK~x&0j0_lC{L)_BTT-`VETwuLanLE(Fv>NjI9~}4 zao`sg{*iS`FzwpP6v#;zCQG$+MUK)|oOu#{%LA3@j7n5pnH{J^XH@^eA@UHYEkRmg4l#;Vm#gteBp)I;jd})s9qf$Qr#>pN=cqMpGT< zp#&VUPFzg{x0*NBO&>)?!6VkN#K0Mu1Y?sYEkHNyoUm97<lO&tChsu+lH9>wsXnDMlw*tF`tml|;Zvchg`~G3 zv{W14!x)(LQHtuXbK8wnhUFN#)32$HVjQ6)Ofe#WG8uoX^-8Nf{-Q8(uf34JcJi; zA~P1iM{(cTF!bO*J+H5O-fKATA4J|s$b`I)QQkN7ylOI{nK}#U`uds=oAvJE9^5Kx)l`#JaCnMp_h_4CKim*g&U?m5qS&U2pgoM$_A zhEsG_hukGCawJZJ@EymiE#8z!11EQE@Hga!GaRp?<~E?cnrB-)YT3zFT z9d(Va!JldV|D3A%*XB6$Lt6g-4(xCZuGgw}Rf&A(uV1W{?7Qb84z#b?>OF;gPO*+S z9&SRe8|ThIgP#-b%Wu4C_;5(|2I)p1M`isBZG`dEHF{p*9gsUDWp!)y?kd(Vf5Q^o z34%^gs0C9BG<}~dSP>y7S8|J!r9YR&86NEj45o&IJ9(4mKUC`Qe?kwC1xyy?_#J|9 zbEMVrpd9+MZI`>Y+3|40()(3EU4RU8YH`+m$a`b3W-M}AvqeDbqlf-+CZ7( z!5Y=27|!sNI0nLsjj#Ws5O#`*u-+z`hC1uM3FiV2r!zK#C-CR(BS`9Pp^P+(bzf=v zCN>?-pd^L{Fk=&;ia>)nh9rO&_pla)mx$nH{u%Bqd+Qn_>ny5)B>fGGOZMNB8T!dL zk;pwgsM)>klcP~O>`={CkA9N3Ao%kLACYHcZT0R*PbPZ9t_*iZi(Abjp-1 zQSJ;ppn8Re1TR@Eg&TGcto}p>r+T*#GqW?JC1Z1-7jkgy;PeizeD?a?m0x^;o0(U| zNHATsR~V}VSKWSQFCk)e6VeXs(8w=0ZtzzB2dDZQbF}<_IZNJIFw$R~Fu#XW*HXgC z!bgfvpPz~Z5Kb!O1knSc{jHZ1F@Zgo%}eOtQmMW{YY3;hf_Fw-!LlQofAsoD!H6t0o2M**Qlnrt96{a+q^ zv$JlkIG$Dkiy8QYpTKM}=EoeZ#Kj}jc-XYpM|p1_lff#E+kxE*pH;#ce~=>ebo|zn z3+tOAt%X}%j#tZN(|0k?pK&mOCF`6H$BRC%U(^Q%2T;c)#!9U-Opz zk43clQ^|0LZsPN`wr*l(FQKu%<-t?mc5y_2a4-c+x7cp43N z)`$e|SFg=&>So_Iap7Ai#Cf6rRRMI~ywmksxp4++YNZ0FuHI{rW@kX%h5G}v)va(( z>u2%Doh{7+#(B2>WQ}e%rZ)b&t(L71JPia#5M8-VzWjmakIb8Q8Z6Tp_)P@jVl#1nKR8KJ#i>X^F(pzMwEJ}y%u&j~|dV^zPQ=yE_ z7}2W0LGSmAT7{dJ{4u>sRI9QEqz7v2`D?s_;Q)E$&9d&aDpEF1tFkc>t;(zNsi+q2 z`y+#<*Gd_5Dvns^LfOKAMYYnK8}LcEvfQYlEk`qD>s2h|vP)lYYF0W8?ORH3P|wqg=3ItXkO}g!{7k#a{})ZLJ@d`iqgfYM@)i{?RcrV`3?qE0ZZQ^oipA{ zh;6|GpRxu2C|kDdL#fVqf-~tYdXP8{$v^1-ik@?lgxPcagrhy*cH%t`C9=|KC2ZvX zxsR@AMgT+uv)(G;aX8y#D$jI@v-7k(HyWB|v@_I0%1RKU zFglc~*!$|Es)-&t-Bq&PQJXBa2k%I9241TDFAXwNq}@=MPLk_WG%^fhD^GD68G;-( z`vxnG%*D|!(X(WrlnKh`a*swvAFxE}Puz~bzr_xFyk4W);8qGKrOZQStGa4csb`uo z1))cAhaW;ibBU>3QBcxmHf`FFDkV*=v}UPkw&^uN=>pXU9#VGr{Um{$t# zy|(o0NIp3!8Lm=qub0mRkkEOi!luYp*qCYBL}{bxHGWfTgA;0ud(9IK&TYc8e*E3$ zm-ifdZvFWx#=ck7KNlN@6zpq;`cKZ5myd-Bca7ois``Ou|*>U2yDV|~Iw zw@9J33zkVXIF!*{G0pqX_`@`xrnatC(Vdv)BVUuZI>t2r#xQruGzZ#DvyaU*Q@=CQ z^p3L!^|%c4X~m!H)leAbGTvD98d}2r_hX*F{wg*z%0FRd{D~kMi+RpGDLONRdG58D zryMtJE*lCskqFB5Z^JxAdl1h&6=k~gPZaZfP6kMrr_EB!$@UGO{6zRBZlG`|vd*ag%4koPgBxlT&O)Y!9QOmk8^(-cj%%``=gZ8A+^nR<&- zV0UDjrY`#!Z1cew+pHDl{(rzWO^*3PYKt+;e>rMD`j*VH_E3x@ivI&iJcKsSB8iFt zQIZg)xzmXNUu5dIUl%;)|3|R^(Qi8)wP%{_a*VJ7#WM3-95o_KMp{t@elr>-QM1(I zn(+e4zbK*v6g4)ovNftfGAZrb2`Jus?74PavGZy%Q_Lyn&lS$1Ic&TWw7R8oe1-Rv zPO6`Eo`#Q6{p>Ff6odz;t;ssA;bU|^Z`_pffUE-69A`;6nUwwKQ-$bHYVx^}J<~!&c?l>*7XBFNBiCd? z?vnRei+7+` z=pP{&NYV1-T}bKb)@n>-d5r{g7+62@PGe7^WO)Gi{r3WC13GQ11tsyXb1Y9b1^#-% zg`Eieg4bG!6BGe$jFQgs?Db4LxXWJ9+)C=7EbCb%9nc$ApGog9Pfu)AqJL3vA*?Gk zqv-1hi`h!cth(~p;{U6=)k^l(4^MC<+w{K1`Ai9#B+U58#ZlOi+a5dMKDy|OA zERN&q9p;)Mk@(GkdZEmbn|$VGV2spe(v8E<(gFm>q@9om;+DW{5elQ5xM05igrWp} zbJRI9psFJeRM7_R5|%l`U1co|sV)iQK7)y{x77cHi zv~*0Vy$SiTqttFz5;d}}P&QX{36gARvINbv<#iDqoE!xfRNDv{(tx6i4h&)}7AI>& z%@yTKpEyA4ysGrbxS+VaFHzoXCbqj8ab|fbA z3(=c0zuZ|ZMP>fArgr52j-xKREZT@#$Ye#m!dYB!cd|1-P3cO+C{Ums+-+)5-OY+3 zpqB)_(HZ!I>;vN!DT<}`&=0bjnk;y zu}@g4e5y1QqL~EpN zA3=xKS`O2biZ%b&=xE*)3_Kyv1RI+=2-PWlyG&~14r;+k=IGT%+5?V|Y#pp-;`A^G z@ZIWRRL|oV9rYp(6^lo)}y98QxcjEm21!XqpmzBj-?u8_&rz@(tsh2|f+mXn7s|YTUgq_)-)&wyYGzZ_ z2@|vA303Uo=+4?`{baHLZtrUlRJFQa%brGdINkxq?{|@k@*RxV=9Y&2Oh02|^qp*K zoPh$u z5GmO*n6q`K{%u%0xeE#$_sW)7KPcNIbVbd6wpRE8H5jS+>w4p zl9$pGIbOYA?FN*goc$DLipKngl6@#Khii^U8gLPEF6HnU$0Lp0`g*>Q=)#c9scCQ2 zY&D(TYFZt&TY##h3DME%7~iHWKw>HxBSKtFtxzOMQ}G9NBHk2f`7+It2!fbu=sp9M24Rc75dQfn`|2RIP!+RE=B`-3%hEY3d~(KHZP$7SIOHD z!5T#bH+Cd~kD^4dhPH$V@N<9&9OHLjwHqUcE7er-Ml>EAR15`}BR__bZ=XBE8u^E! zBmY*HBkvrxf#f)hxpNo}#T&G3A4JD}J>&kWj6~;g{|#-8ks)!`0rN3N{i9%6qQkW# z@Pv$Qc-V2qds{g?A^5Q3X~Cd;Voa8SELQ3S(l0BzcRbWoj`5SQgjTVC+Ab(Ev;6p^ zaD0&Elp=4i;Z7@gWBzIRZ`8cw?WayF;WS)aImn`gR}mxoo`1Fo8Lo;`)9}5CY|t>#ikqOn zK1WO5;VIeb9i<)E8FQ1U7+haItlm>y5C2Q>5J{2kSf1o3YhJ@{(sVUqa^o9j3$v;( z^V-|DE{WGzC_cm1E6T7ng0Peo6?!XXZ6L)SMi_Z)KIQ>=t zT^+$G92i;!{NfSh9$fDp*5GCfv)HuB%N{xFvdb}XyOr2SmI5Vo%*Bcj%>NWD3 zyefJUOohZ3kLQ*0g5VNKGgicHdEobX1?u`zUaq>nl9$8PZ*aP-?5Lgk9Vv1wALQd3 zu3xGAJW0RPkmAI@Rg_X>R8XAG*yRCFO`%jclC)oJ7j_2Dr=a;swD3PDOiGs~?cl-M zt%1up_Tb@l)EH_8+1e|zUU~E*9{qC{2P}@jt-&#*3Sfxy!*>?n>W2U1x{^1#GuDci zhWp@tVg>X)c*;=R_l^ygxTHF%)MCV!q!?pq)va#|+y(`NatT?iZ4F$KCT(jG)x2Ap zFKL+HlS3rNaE}D&nYG24UoU4w3`3u3hw+3IspvD&IeNFCF328cEcx4 zuc9Somw7ptikZ^);bGxAqR-&!c(svXTu2FT`f5daZKB%T=FERP@KD|y-YYL$oPA5- z<)WY2ux$@XrqwU{)@{4=E!#eHX0%zApbJWxe-gESLqXknU&=NkV~6#djC=5k2v0Mq z7Ha@np{4IL(cu!~gJbeVk)p;*7({_MI236mhZgkEZ+#7C98Lc`I1wAwy(z(o1b|Gt zH$`O3;n)~bP8TD{&e>^Qu@~v=xveVRLVv8f zov6fxJr_hafC{072ztLy;P>_A$?et%GrWNk_7ZBt)9NhgGcVH=f4ujml=PYVr87Vp znO5)MSbz1CBI}*S-e0BodIU~lo3huVH`1vGSG34Z@$Re0JjcOznIB7G}7LczEV^D_i#U2jcT|lPD)Goa|W9+>s}D1p%2L ziHIbAxa4BI5FM`(?1|^}#aTok`+%cKzQH+7nk_HZ>}H3?e?{VLylxwC7``|Qu|*Cf zXi&$&h|5%5+;_V12~bLCGfNnb9UjgYv1i~Hc)2T1kU04wa}DXI5;5yg2&Km9}Q z;`4n&gO??m>)?y~qwC=2z4kh|mOEJ-gVnND#y7XF+vdyxl8iOh%BdpFzUH&6h|XW%>C}-Vrb-Yi$;E~$f1wO zO3!{~(#UDXZ@F_9lZjN89hIo)VKvuP(g9kF<`B@e0q_&~hXt*^wtnF$s1NSBlapG~ z{OvvNIm6%H)7!hG(feb16+ zPyYW5Tb-5z6W|YU)@g(A$s{sJv{DFdQ_>j1NIg+JDt(5GyYOs8a>Q|y4m$AgC{M@2 z;ICgIW1{|1b@ac|sY;lYWdncBiypW9HEZ(rA$33-dN1(pxNjxM)W@eFEe)4QyWk~= zsEDG|NLOoyA)RE3q!r^NS8!+_)rs2ZzfLpe^i%cGwS@Fx5?$}s+vC=6GkDGq_4-*G zQTlCXbtV5qps#7V-GF=vh5VUZWW+=DK{a_V2NXnlgmfC|5()|GSaXqtLWORoOgY}U zXxPP){A=E1%A(~(VT;U|tL!fy z37(oX{Z{->KJyJiWl+10SA;?`@-=x$>%iLXuMh?FI`El0XixYkVsi_|6LCFjbhNyE1L z!oRTXu3jUv>bh&MZh~Z3SeDDAv*H6)aG)>WCNSdc7-^OoHhG}PoJ)yj^HwyU|8Syr zzw9SvQ1wArB>G7(k@-wp?N9={U=tQKrmrY)WWEI#rRZNUN{%56aT6Zw`hfeGZ}*7| zYbx0#0+IO^!m#^}?;QyWkS>YrnPmkc4vh;$q~|7k=-9d|fXHQ{GS&?@=fD22M2I!q zO#j(_IQ}llf2{wy@*&S6fC4=2LllK4{pAtchkO?xDSI0@BHp{PJa|TM;Mb9ZA4sdP zD$5h^T&Ld7&L~%Q5ffMiR-OsbM)wQ;$eAvEFG?B1bjQn+ZJ#?hrdEQx$2*pnpOwU4 z?^((2;3+{qOdv2EpFB4-3+u@%H4fK*9T`u$!?V;YF}bmQ5&IgMaU?R_ll(8^q&Km# z;bW54^-_VVq5DM07H8#q)QzmBKX zIqHj{Ai?Ti?o7jntXD9y3 zX7DKV$ogTrM<0crZveh4^aw1~MG8F<&0g|GMew#1xGI{eOcf+up?{-TsZ&_#U3sul zkNze#iGHX+tW5gOKrl;c7gxS)Zp`%nzBFbDYs+cEO~n~Nant)%q5$&)%Zj!|ZxW6w z4A-f@LqNLuz3{HJ6^mUjfB6Ll zTcM+c+>;CBa9IVg?{Ub{JH^tcNPBmhoE(L&s#mzJDk=O{r3Y~8DRshiSwEsYSCufi zE+&+E-*Tzxe6gYxSgd!yGNLsIfoXw0LSxsHnca7W(ax#g7LAdt0xcjD{rCOxq{T5W zZ)CkI&?j8s;V5m3_M%A&p5D@d`lh@J_5D@VrKsFf65U$`qFwpl zC}!al)guwArfCj-f*Qm!&D9fj5iAwg=o-t(?^dz?mwH4b5@~`tMvW#1A9ta-fJ8&d zPve|fwHt>m!LxN6+Hh3;T7o1U=%U^K$$*3M{%r65SH?u zIche%t7gMZtb0ezhRM9_dTx4{54S5ALg!J)=HAvmpCB-z%*jZnx5(zn1wi)v6pJo- z3|htu2UByCJ#XXqzyQC5gt|)B``(pJg^DpkUR5!iG%%!t?loe@os#3qjzO1HS!9zo z&!uX?8`R2PT#Y+JINi8o#gRx&y)WI(jyoh=-0iEDE%P!4#L&5m<&26 zrPM#0m~bFD(K|@LdN|@rKO>(O^&^2C=xdfK*&NMCZuPc6^>!?fsNS|4-SyU@ znYghul}kH)gN!2|MEkjd3e@mDth%#^;p=O>$M12&HR>o~5amIV4sg`4IgAz0bQ;DRX<2n_NcSC!vq2m^gu`N8`!|8Y zGK7CLd!~jkg`ZfAI(ViHscVlHfu#5~5vWmmm6Neu4z0S1lSHeV-%BxBMw-p@a5sqoTj5Z%86)elsTe-u}kD z<#4E;pJVvc$+fBP^dC-jJa8YV@!nG?rzwpSEls}s87hA=5s`h1gO{g;Hc@6Z7O(_O zX^=wo+!k&P?^l`vqc#&6<3(Nx^%zt4B5Y!OA^9|aeM5ko4K5Ss@Z*@>GNX@AJ` z*l@Omqg>(ykHXRKZPZlTH}@GkkqYB*)F|s}w>bJgq@fDIQIv+BwSn!7qbHy2fTL|O z9Gwsijp{^0BW(QS7)5rI$EKmPJL2a@$HY&Rh8|?_P0tWD&&p)@luGnWx$Db7I%=0t zM&_FMe8tmJ8_%)1_FM*dYUGu0GsFLBH*@Wxr#j&2sYh&D3f*g0A6_8j^iu)gdij%Y zlhe(LoW_`_x+;#GU?T?WV?x-a8t-8@eo}PSJ!uhAXdxnpeSatS#x)4iadgG$VBOLc zEzXhU+TW07{OV~mZ5RqjEI!GW2>ym1M~N1pJ<|L36cqUW_B6#R`YHM2=X5?z?nbDi z2czcb*!-rAg?1Z9XhVbzYeDX3o&>}RV9^zs)^d$IyJ>B2w6*Iywq~u%R<6P+SJ>@M zpgr3>!Xin>>_u|;Z>^p^+f7^f9Or5F?1^KxHPdeEMcPueTb)w}Gx>V0Hmx)3V`m+y zE{^J^*=f;c8#_0v^xtG+jh>T%{=XDMc5o2`V2-1{S*zP;Y-HxjNRl4^(lkN$K;u7j zw_GnP67>~RW&1j^UNf>NmyGe`K4WQx4%(b`Z#ea}#z(hs=o06uCMcUijvSDu&fM&y ziPok~M*EsPlc>OS+@TPqW`3S-VN`Tr!uVfc6wu_%gPt5R!dU~p)EFU5%mP|={YOFj zuR#i0YOg<>kS!Crhc3jp(EIxm#Z)aSrg}MdZAy`xtZBhu=U){dkU0hZsIQM>InFmb zjSU6f^+dmn5#|btn$^TnLf80JG@xLeEU;!Dh~hU2 z>q20?EsEi@nTv`!+xx~!JX!}-mP{@3I@QkKnule~!!XI{rp{ErCV*CUoY8TxR%}(1 z)DZouJGf(d0Clt=Ql0N~wG@jyH331e^PIi*F-z)rbT>R!c5Z`AfeyG%L5Rbloeqx? z89;V#=Hu@Bgdf|+Yg6sg*%q$O2bT8pmAEDCKS=w>CK`~CYdE@rak~_ z3rMUn>;e~O0m~IiwtS7vgN0U6R>X1GrysE}GOQao6h``S40s$yCf1!09A7YH1t%kc zW2S&n-8Ex6Ft_MlsV&X4tkksQd>cr@dgJiEa$Y#MS(*&O**@#<`*(4>I^*1K{ zS}`59+{LYx$jsI^>-k2FBa%|GNJ?cLB&D(}=DzuLv_0mc+1>=cjpAJddsAxZZ=C(B zxK^XzF)hsRKIS_UPpVOw7+pvZaZSgs@#)5hjk0Nqo)v+dIvj1MHv_9xkb`Kve?*gP z`1>XaWte$dc4S6iJ z*r7En!f1E+vGZv*a^?+3g>?U!`Fq!&ZgiJS$>QJ+7JjTA)@uK~-q|U%d32otsivAHK8P@w+b+o6>@pL^Q{PYg7ulM!Z#%Mf? zHhPmwmlJA1{CjE50q+IIBBmhv+QA)C(4AIV_ilKQ%GB2+?#>*{Noh09ABR=oVeVMo zMCv)o`w7}hWgmd>;+5USU8Kf>B9(Pu2KWXmoWRokwVXU%_=Vw9#d?0D}IQ1jNN72 z#Kum|+Kxv}9dGCwzuV9yeaoRs>NIrAzvs}Ueb=EAK)~5|8ag!VYUpHMMpF&1(@s`H zYR%L5B(|C)v6W1E_Bx@=S9OW!7K}+>kcLmFzrHOzjOXje46>g89PZDv7JY6FCu20g zZEn778{rN&<`tlR5OecNAv)u1q7ubugiuU4veNSRLSW8y2PUP3H~Qt&GXJbxPV6#| zEE`$h@zAnnGjXxC4vPG1A7{98b%CG>ds}63U&j*mmI*46v$x1wFM-oCQBlKG*^PK>?%V{r0(^{z1X?m_$A zkmx(dk~BQq=qMw!(fZi8)s98fd=#q5bwj z_5GPra+vvU-r&&Pyj*#*LVTiQ$5hDK84LGw8uQXZ~X52=(0pA`3o z+XcyfBiCQwYLnI_cxww;NdQiPGdQuAu{V4S%GzY7MIKq-WnyGm+H5ajwSF;QU{`Cv zSXEKEqvo&bW}2hsA$2p``;)4opLj2;D!SEsZdH-jn^RSEhoe>okqsaWR(Q0UOXj2S zQuC3BsH)Mo`jp{|a>9SjA8?%HS=I*J!leq~?s4DDj(!6Lb@)a&w)IU$^e0t?T6|d?D~u+4FJ$-1p-w8fdaj*Z9rhTQnzM8&lJC^qz|`SOr=A;_S`g;APCK)DbkCsl zF#43Vx+F7={CLId?KJZ1v~xvI!|#q2Bs(|Ll#f>nNO-8EDPL`-DQ_fA`6AMk_o+)% zY08OWz$+oe{JisC6@~REm4{yrbT~rnt43ersCiTJcx&#O9op6?P5 z+DA{FJF)7}HD0Z5a%A+D`910y>$cRj9BBxj<8&-<#1AN$iwpO-*RJ99w2!-5AHaCHnka#yCOFpDql!jwst5k z=0UBIakd0)nl|0uUbus|7r6>QA`nsSC|*s-#dEayOy4}cP2XgkQ!j{AKK2ut(6T2x z%^1s_6gkV?JW&m&GjMZ`bLq{ejKrDu=3JhJ^K>rP0yUtjV)aQX^D*5dg^9V^0|{9LIezsDQzxn|}R#|ml0__wsIC4b5S>tcZFWp<3npvdP@3HgkJAjnv#B042Epl9V<|@ zbc|W;0XbOv_F=?soBQ@b+?DG+9V^}ac9Xgh?ImcTfNyx-~ENvM8nFrK_l+LH~k;26%yKt8&4**03kl zr5`fsLgVzp{3CLBd*NrEZ69lqH;5(21)a1F2meKMC1*w2c@)+XlTW;OKdoecspFBg zY|^a62I=ggy%LL`G_qAP8xW$ag4$o;>c-Pu_ExXY<)Nh6F}~f8>ik9&e|Ssh@`IMY zb$sv?Dt&wI1WivX*)p%EOP{e!(wFP+0t+#Kyx^D>d9legY$LD|(;ivx8up$_X5h-$ zAW-1}{p%D|Pret#hfme4uN7y>oS6Te-h#Vj;MS!Qedzl_piW|A86cma8?R{rGo0%R z439g&1$;&w2KyjSVz;s5_V5q{yS9hmwL*hcd&&+p?lt3nSp3RXnu|vp`&bqapLpfy zWal=7Qc?pVi&#pysULz$FY_>QJBy*ul^D6J#qI^J@qIFz+=wDFgFyHIDGF1|0P zmDWUPJ$#aMjqLwQopHdM`Z9nf*vD@&rHX|5N=1+1R(cEpeq}CYjI6t_-x71nJ@YsFTicU2s|lozd*Akn`i7oROij7E z0li9VFgnpMK#6vZSUoswACi#!4<&muN?IH)!qY{FqMs7WG}4^Lj7gk7$nhHSy?rhH z=*3DuiVvb61+l3tw7&;Yjh*W_T|8S?t20nb8W;4Z9(vuvVX5%We#Yo(yApR7?Vo(4 zvFb3#YI5$OsH)s0i6T>wQVz9MSLIKyjI1}$TP$f?fWa~Pm2K{mv&+$W>WyNV#0rnI zr`}g@F|J!;gQ0$>8>>wdOQU#Qs8RrJ++P(frgUcl5PDxt^uAOWgUtf=Wc6Eb&9u6R zL>&V};Y{%D8(_@fm>0{}G`n8;*k;>(SSmHUi!*)rh8jR8!b>M~=+)|tidSzVNq z;Qf!1E{Ag>40>O)?Cw?3rXM?A*?-2$-gdmQfmqq`-IryA$T2X`xH?vPfGVvBOa6pX z?icUOD7@##O73AT>wp3^c8XAN4Ig0ih!uU^ESe_cvEXA13#>a*uZ)!{|JKSVDjw*j zysqUk^-j(Iz{=v5SidEy7yp{Nb7tm(;EYz}iU=$clqajGW zk2KGI7^Cy{N{+ww_dt?S4jRHOBMiQkhvNId`^ru6@ScibY+B_yo25w{Bw%AD0qm6m zwgR=1Fc)A?ET2`HX7pBI_ppK9Tp3^<;`m}hu7DQNSb-ho%X5_GsK7rnhZ@}^TiFS_ zDVI|*V9xzLvV@+et(0w~;Y6B5XVu%^eiGSP+lTtbgv;Ht6h@pRNKedgpPd$F^{4viCvZ*Qqckv2}8^il~`ag1r}~CY&HIl z1O;7yU&jjiSs7qJoX~?mkxV3_jTM@S9=$0vaYFsLYs|OmM+L>dUq3#1`S|^avkPep z88_A~ZJ}GgqFqjX9AGT_XLJ^Ms11CUA_oX>tgaGb5ck+oYb^U+t}?QUgJTkQ5WkV) z+w2aMgnRRkQ*lc;RcmR9iT(nch~sSNepRChf?0VKie51&npmXcp;&4{k!$>|>AOM^ z4}&(-!okcK4(@*`iUXI~`dGn13i$(OD+FxrzG+UJJSGAhttXSKv2(j2rv|s4Kl8JF^aPOfT>~>M44HEeD{GoxjCSyH$gd)Zn*;Y8?QN{PyCghB>RJKT}w%mbC^TEX8-UpTT@EksRIPO!^NM_pI$=hE$m)`wYdEoA+xqpWHzjA$^ zXM=zHjKC(sUW@Qg^a3f6#g2DZX`|-<%slc^P{NUyxg764oFgw;6+V>R2xHm}9$x^D zyFf;ln9F@3in*Qmmn+P{jXPp4)5IKm`!QGf4uHA^pk<)Za;n1IbqaF|Z@hlQ#9h!O zCzRg@>MHJ|`{3KPm}>;QJ>ux=dW!1`@K^D7(DypsZ|2`#(6Je`Y~y}A_wRH6A=giM z?#Vxg@q;%+r&O_*JP5x-N3Fgox_bo7Pp=qg;fvf|!N>EY!(C%Z$t^_{`W9#-y=1+& zHwo>$Jqg)ThyAhcEcEE>CG(@o&<<-Lzp!*J*GL0FEK{BJek`RPWw?ggd}6e|Or)}G zW9{FML|zuP2O%4G{}U5xQ+SNbV?^FGTOXXQf95RSlQ);id3l}E% zPFK+utL~*XWAt<8mTTfdO*4L^9&(KFa@8+gxLUp} z*HcLUbaBL(jMFO0#tmnex7fOA?roD(fLe<)W!4%TE+!bRdO3G)W0GZ%FM>(iZKT}Q(r z=hc^UzO6(_wlUX2Y4j7k7Rec5xo91b2>VigtpONzIuh5t&xeIa5dr9p+3I^q33<#^~|XXFAa1 zP}Qt8d9p1SQWXrxpvOFB7xU=Y^r)j&I{|v^Z(QSu1 zkYiNJ_2>b8fLWxtS4k6LR1n}02$1l1C$XWolAaRorQBmg(W}V2^H{(Ae3UMg#4bFl z-;oaZChJ;9e0SzizXH92YdDP38u%rF>n|jDGo|NlRJnFA>;}|^Z0s&I- zD4A18SKd6e=j<@Y`x}S<9K-30e+Q?NqBtE956e&smVR72P3B?W7oT;s8=dFG+^X3k zn4Rp=53vC0y^O)6vt8!qi1t~88K84QN!QMK?U^MDQ%G&PYaC(SFzen8;}19!ixN3> zd?@{DP7teT_~LT?lC%mv{Sw`oIGh>yVSiBn4l2k|;@u`EEHgQcUn~|w#RTN$G_}q< z`X(yj$kEr6B`-=HeYITwhKTMk<^qgKB{OC3hvdFyGJ8MS|0tizF5tf!-|ep8zjHx1 z@ZW%Cz8m-t!iSHBf6F~54F8J&b3*XXjN<<{==8dR|K<7J!2kdL6or3!C;U$wV8K65 z;lIelf8P^C0PxjP}Ld zsV^%WR$oS{zK{(@XvuYV_hW$=RvNG=X?PU;cuHa}uyz>#VjF>O0991Lol}lxBj9EY zA>-EMT=X1i#z3{Wt3+VOEH@0B6nGhK=^DDN?@kgook+RT={d$@TucpSNUSI9LV^`-Y&i0a+`Bk;Aj{RJyY?YU?{f4_cJ!T; zB_2exrV}7?1UYjoANz9?jOnTpHJQD&TNhqYl3m8Uzk4!h;`HhtdQ%eoyT{o~p<#Cq zkDgM0z?oK*=siz%l7Z>wp48kh<_4y(WKSvkM)sN0R|3#*V*`B(4>mUb-fZwAuIB7B zo?um!ZI*w{?5o*{2%qZIXLKA^pM!ixpZ}=77HKJj1Y`EOo<0{C=YnJUTu+DFq{CS* z`c3dk-}qinw1BuS3PjlIbp_nQ?zJ}WgnIov+iBS*)9YvG_4D+a9#4Np^*Vs7BfWm> zcV@3Y=W6x(Pk*+1y-<36j6OfgNA&qc^{whNHsDyFi;SW>y6m&;pY}<|naz_BS?n?Q zSioE#1;+0ErSKZN_hoaxfA2F$xIyoC{6Dkz`?y-Y|1*oTIp(VO#~kxV`Aqt+zV2$w zixzd+|Bhom)dJ#{C=hn9uY@bvy{@?Z`}cbKA=T>-erxvnFjuSB|M*jfUfaHd&~~e@ zr!kgM4UdZR12pbLl0^VXg%gJzrWc^T&B&&ls4zqez4Hm#8!~l7LXGm|oavWi(G|yN zr8Al%={mo#6dpqi4*&&>(%)i_PIPHa(gj5JlVSiX#RpO((cZJ6L(q%qVY?5BqF zB58FMfB91OSK`d&3Tizg*|+>w~G&sR{ z`>7+5@NYc&TdQ#8OE3=Nx@USbR0bv>5KS*a8&jDEe1W@*jJ|{9Q|m+o21E|V(OqQa zT@z`cufsV~Ixrz?)hpN|13h?=tK_V}gi`_&azdA}yd0MX7MMclF=*h;-+X%-aM4L< z;94%0^=sfC6m6>bZ|(I0x*nr}?LHIFHiomH2Z+GY*QA0Mpn|WzFsUF%HY|z?l2qLa zrQ4s3Qo-XVMg=!9WN}n*V~h&!GpS&Bj0%b^=;BBq4!}4%c;<;?(}7bBrq7~-txrG) zk4EWWFVn7jI!K2~gbpehQuae9KnML1Oac?~m`H&M!xbq+eTlo}2e*LSWAlU7M~_Pj z)t^=|xZ-SP>p+JX7t+^2fpIApiyYE^XOhEy%4)9{ndCrjKVb=a zn@T3y+~Q`%Eqq1BhsfnZC0BMuC0>(TOtiQ~Pqve}#{b0Z9hitZ9^baW#Db8dbUH52 zC}RtGY&!YvBgdr^|EHbSjlOgho^cKri%33vx_h4SyW5Y;Gg>}3N#mnAHfg-^Sd=t= z7rhOiAkR3Tv5O;(IG%CVQ9L8ng6_CH!~5v5c}5m&fknyU8BadifoHtjH%1LduN`mp z`?fselY4|3CO`=jFy6p6ra}$TeLXq|+0vJau&f_4<||_J^)Xkq6d=PO#qMFdxvmWX zHuRP=Ve=@dUlk9P%0L-gd`!Z(q!~pDIPbI8_ zoiL0~-z_la0brnP_zA=KgAOph@`!@*mM9*#vp+osjQi-qw}CMQ7;)5P!k013Wd&V; zY0G(?aCxs9F&mcyI>R~Q*l@bPC7d(R-&-(3id~{p!I?Gvvde8r)LePodg`CbY?E9& zl&WkXEfr{?zKCSs^$7E=MNa4;LX|`=;k1+Ud_rCemNf zE8(bnD51NaN9Yo2d`;v_5nsqH$vTkpQ^_^>2Aea4MiEsV*!d)dB$oueVjp8^WZ|iY zA!t|OR+$CHyFC!t%E)!ixxPP_%JhmIdYgfl0wCbH8kvsmo*WfP1_x08679xumqy;7o2rZ!{WBtRz z18cYq4{Fjt3oSCY@}K~?NVelwNdj39Fc>Hspw*$9uV|O!W-gj4YL>_I z1X1KStl}c}!t2ChX!4yd$G2JQc6=XyxQp>UM7>>)?+fJilJWhV65Wn((Jh_Ecf@jY ze8m`Pjc)<|^z_Oe-*0f`gB7UpeUMW8B;)(BdacHCu|5?|HRHQgy&Ep$d&@D$chx`D z_+Cs@R!&0o-8foA7T=R;9R6i=JoDLvuVKvHoD<|u-a;foPIy6wG2WyKTVs4(Bs=q$ zYK$lF`uJmf?y<%=A8KKY)7-N}MNyU$H_BH`>3)=DPub-de>kq&F! z<4qan7)zLBw_`l*$DPJ_5;iuO!!5BfF7MT0jJL3<6xJmBNJq`%)+l2)GFF(s7E{sZp&!Pts;Io4@3!LA{5u&Q)Xmy!l!Y$Z>f0yDzA*J|6Gxvn)FH9eMZv z9*7%fc=rVlsBylH*T)~{WAg5)%evv+ODeh_W%3VM{zP+Fj&sJuhBi>q#$wZ3&fVdf zqBn*Yvex1jRM8g6J_yuM{ADa~J5c8EK9c>8`EEQOfzvj3#pK?V7fL2*oqfqrC?4Z7O1 zm5OWPLYHGjI{%0fA(S&mUd`bRU~pfPR|VZViLl@(D!GOIE)=BUK(?mz>d?FFqD?)) z_pj?wsY{wiU6tof1|Hlp#`{$j{$Cpn_@^howHosFYELx@Zn`tAKSG2Q7# z6CT|i@^%zQBxXTR4~1^F$6~Tur&BwzmS$cJNyb}HyD*(*8K3AjH-J{edO9i1GM!>X zHP({fiA~Y@V#&eQrA#@~Z;;7cW*JV!r3@=7Lxs&WQS7JKyTbn<-XA;Z)uvy=%qUEY zt~wEzrgnho@=h@Q@1TO|q8Lm?M#J@8z@&`23NDW*gmC;7tFE#qhX1GloT<)}0)N3S z@VDv86M^+lb@BKs?*!{j|5LCQ#$YWlvX2L=KpB3)#vrj(>7jJo#O3-Q>-w&B-H$uA zyx(L!@0MppGch_yJ)wTQA2?P&Zf`%j9~Co?gY(Y)i2C()ju)!193l6JuHZ5N(b4~L zQNCq_?QQnwCK#31!tby(!1Q|?=ksd-x@!ubVVuFezi6meT-F8}19@UM;nv^eWP$>t zfUi6OR}sr_;Q`0;EpnL>-mYSK8xNk!!Ms`Q75%|Ari2hNjPm^9j( zObSU?O)CjPQapN#+u>@dEI!{mh?EO6ygm3k!q=m+`1T}k&zBS6_~F!-OG^=*8b0W$ zJcud}Zmk^FwlYD)=Zv<>jJKu5n^ymTwSn4fZk$W zougiDHkO;O&g9iu)~nx{uln;U&w8~?z4E10`Ztt{y}!t@qN$Q_txEr{qRNaN<`Yow z>RFkg+<1_bCBy-4!$0^s|7`yGcH!1Q|M=7enQ@5<7L2a+FG?EiJGZiUN{X$uxb|M9 zT{%7OBQv2vJqZn-M$Po-bW}J5#o4TyI1AEB$vQ zjeb5+PM58(sdoo2PJsfpsMvf7?yK~VR#Y&0gs&$r$0vFBNSL4M7Yr;?Us*gr!{ylD zp6Xa~FN0pGt(w7~l_QX@i_;+>XK|u$4u3sR5Em`UcY|C~yjRQ16z?>`UVA{Gl>#Gy zyRF{d{B0o@Jz>D%k1LC_yywfU+zhwMo!WId=yWoPe*jy-2$j$9(!oO`yvanft%O7+ zJkHenh^{Y3bU%0ymnMmzp8EXxmb_M@0w+Q2Ydd5__8T8um57V+0r-rg<|#$l?-`9X z@TB3!%UraTDI%)NYKYe$hkYBHt`x>3&UfB)s{eH6ts7MBnH(jfxkJt-vtG)#bG z2c8qCZ!LHJfyR@5W%^#hrE3?Du~$Vcpr&yy&c!3wbn&=~kA;`dRh7tA(>8q1yeww; zo<=I^W4UjEXDcO;4TOJFFhhwg9M7)x`Jsw&}4R z-!SEbdMxf8(&J~$9^396W{>}}L-qJzwb^4wv_YfgKD)=JgG2XyUd_k!c^en2&#%V! zxyZQkiZ1(nlzWHO`=40AEKwE4_kPZ_SnqF~{QY}>?oQSFLse$)`$iiy*2Ae}8{}Uj zz3;y7ukk5;-_E7$@xJErF8gk|c33@LXaTTHRTJOid#1*EJonP?-{U*Bs~-2X8XOXB z(AZHM-{a_dBz{Y+zI5aI_efZ{gFKsPVXZ7r#uJwd4?~p4RM*RJ+(iZWd7u>DW{jk` z`iWqL19zhEuw6XtbveUgcLkqE>wWM2SiMgktKJ4OPj;^Nv&-V)y)M4q z2PILZbOm@P9IM`6e0X%d4|k}yXMDXkP~5Kf^E=}45OTb$j;#r0&?VNKkmT%?>j*xP zYlPym76q5o3(HF@?gfwcH{#>n=KGS>3Jrnq=asYO{d5HhJf7fZhCJ8Momy*)sEj zs@MCztS$;BB-HEkvK%#V*o5jLY!&t11=U3l$hES%h;Qn>)2oYqAy;R05xVYrZ(()O zFXcM4y6B}Ou6?VE{>jyMDJjFKFCjdE8?kzX$8rN<eNy*_1iP zoE?=pOetjNDz?{=NcPk_lM>2Fv=W>Qn>zf>f-5Dlda1|^*h5R}GTgzMyiwG#w3nQ7 zPyBdq$BJaPzbUDbHg{?HTbxU4@(L;7l<->lxS(7jeIx*g92BaSNcN_?M8LUU9oH-d zmCuUVzg4dh;I6j)nWMhr%1%a%2&x3LtC5+itBx8`o);dV;Y|P7K`7oiOYIAAp}jsp z)YZZBPU0QbCR{aVv`K@TjA?)8i&o^R+Kc$MJb1ww6sZhca3-c1ycJA|Dw2Koq9o2q zU;GJG58~^~_*&mCCb9v1FJBWu1N{+N5^1Q^ibq|7xD?b7zuU*mhbDVhyaFyY1+6=I z6E!$h`wgD_b?zjd8W+i{je6>XD8!pK1~Ti^74dekg)LrTqli-5T3|5cdg?0mo-_a2R+T2e zj6WEc&q?hq9yrcMK!f14v~o^)%WvgS^PZA$hQpyN91~k40@2}-L{5F(K~Gu*$F}WP zGNsyQWnWLd`g(U^b}8mm)!=#@gGJaBz8AU3hAS{m}*q z9+rJP$uw}5`v>zvChj&tv9-CQKKil$j1&fnlknpD;n~(&QP@%*$b4nIV1st?t9JTi z?ynDxk*n;X<7~i?cTmF`yPqk{L0IoDj0HzLe2*~rP#*MZth!4E|J2*4)e`>oZH29o z?0+s0=q@;b<`<{Nz^UJe3K&nL)Y77he93P8OC&Wn!Gj!=P*JKP?*zetyWQ-ipz0f# zcjfxsrDkptf)U*w#Qp@l^DLI$+n5AI!8>~gio&BW38p~5+fR;au8^@9ZR;VMYWMMAPaLgB6NV-Y|pQc)B>pO$$~VvjBv=pr6F`xQu5nZ_?IEkEb7HERzqt7F} z>bWbOTQpB`xuia*(06+~Yqh!PA_`NPyz4UV>GVJpOfQr6WJ9DjDh{CC_K2 z%ZHXa9$HKMarlRxNKcU=de$W`?MX^A$7_YWhO&#Qlun~xr{DQe5*nXEExzYVtB&M) zIf&p$y6^W=N&32pk%VxvJF=m4dwTWW+mx`TX{nt)@?DuS!9u+ z@gD1X&XAmJk)EtQpVlR_?{-T1p|ml%-jXrt-XDUCd?}b}bu`16wm8LNd(_R@0I?yPb?jtjFD;mKyBx55mxFIP##1m0Ru4af@OA8R)RiDJI z{Dv^$J(cplwI~l69R8;JR*8x)gOYQOgj3uk3o5NUqP;#t>MuUuOw^3pP?auDCxlko z_+0(;B4unM62RP%WKCsq($|GmIi>U{_WoA^`a`Xz$p>k0RtC@4Cs|dI5D5nT9>dkl zaP^C@4&gUY=?WAH;Piz7+aq1!^$5!PdMnuT1a|Lfa;S5b`GwV0z7P;YbJb8tRa&6H zsw#Aj8nUh4WOraB(8}m!$-v~uz~q+e_?u~x>rAW}fz&=1$|_^t_cHI5?I>p7>Bin8 z$4jOg&)x=l{KNRC3_TrmG7oh8jpCxv?&~g2l9MEVq)w~wk52G?DeWKYH^%jJ)?MI@ zzlJ`+cNbpKi0}|9<17j8?W?Ifjpus8s4>Lpdl{^WWi&(!Po#7^nSABu~~;V~jK(+q#;( zku=%H-&s0YqHbgb>L3X``!gj8d|GHNQD1WNEhK>}2+TQJ5;&klOHmkeIHd+WN)jk+ z4T(~+J8>YBs+2e|K#2oLB@=-qkUEyXVFocK228ruTAv1q7+{=otKesYEbB%V_xceI z6g-w>5FSZq38Fv@2xNc2gR%3LSQVSNH;;DgC>9(K?^pgThWEaRY@u^ZsUWd4$|wI+ zp}bn45aDYnH{U`k_-z#B@lt_bV%?OE9+bav+lit4vZGM`Z*45S- z|0IU5C%8W@$`?{&6y*zeFe>A!xJs%(9Xum$3u61mWnNEdWJ2pIb|^jdO(jU-FKiSF zs}7{Dkr$@937x+x^XF)p%gr*sjw@r&{nUB9EPO{OHQ!x)>Q8x6Li}k4AThgrUmA$( zkXPo#{8U@-cDOc(lEUPCG5u6(VTXE7jnyLx09DTmW<9;StVhZBYJUMq6pwuuK}s0~ z-NpgDx{X94n5@xF#n(D~mKdl0E8U`M^`;LQ1$IJ_hNCe;`V-D zjzQnAzZE`tjH-q_eyC0(^`%9l60J5{>Jsq?*oE~dakVH?j8zIp%CdE9x!$f7UFf~u z?cc;_Yqg?LKakJXYE>7F^TXhYZPF-XSRJUvgi`y-+-&6P3v3+w$`6Y6os)-7dRj*^jE&?W1%Z zcHhbT+}@%0mBpjP?iK8G`d^w6QRg`xD1PuQ`GR zu@aIh$R=VD5kxLIB(Z%hMjxp$xEK`&5%EOR;TAsV+egBZ0g3mA~wD#gOnD#aI11DmLwj?mN(|* zCPcCy{xM6$ECb1~HA;AyieS`8U43*Ne2j_i);z3BXwT5gQ3Icg_X`NfM4|=rU)(6o zuS4;9QG%m(9wLeeXYmmR*CKIkFxFEpRL;+F^d@~mrFOx1L@N5^n?rxX^AzaP7ioX1 z3BD*8{(Y0(fgP)DqA6EntvWD?;_{UMEa~Sgp#R8M0DYbb^h}%u0eWm4(7p#ufI+V8 zJWyAe01py?Ia)GOA$!VxNS2{qjMv7EvFEzqDB3|3HQ7Sdm?T({scW&{Rr<$_Pw*WO zIoX_N(R0<+L6~0%opHqXKxNq6B4dQA=Pk6E87Xu&tZMpdy*|fxFBr+-JDo(U?S@HMC#EU2_vax;rvGr_UZ?~SWm+N=Y zuYH$q;$e)x5AEeJ9_E;m`^VwYBlB*tc~$FuF!RqM$dQTwopI(L&S}d!xt&bF~m}M%WWxg`YjEO4~8ezfr zOAEdzt1`}}7l<$SaE^fJi_BV(DRz{OjMOEM$z7bz{{4K}N}f4}Y>wQ>_R;ZLi}BRY z)TW)al)SM`;jL1Z8`<#tzO+)o*z3EVdWLEsb2Xw9b4iIUEp?5u+g*^(VHw`DXl5{v zC>96{c&J|OeRs+Ja{&eyS!>lg@wvF6Yz)ogFdYl`0N-$y;LN^)57`SPDDXP&g<>q) zIG-B$=fDgdg~u zdusaU?uNano7a0B6OV}IWQbcY6p@~!Xt+T93ASdji3o|s_WE{PoFZzXk-W(wQ)=Su z(3$rr^_JMfa@~4~)XqAGMQDoDx*b9#?W8cE_rmWnV>+=Qx1L??NJ&r{b8-gn8kgTf zp>3jDmodDF-@4MZSE%>h=-ScPoi`H=ovkCVWogk3$hz7qk}bc-X$XW5RUZ-)p=9Xp zqWm}4bt*4&I+Fb{uVXvx=y=#Vr8(%7D444e^Yk{la2Lq9SXQ6UuctbwX6&kpt3=zA z<|}m1a*x3;t-5%!i|l53D>GP?DvPxo-yr|08dl{#^RtRyXy?Y8#sOqT8v>gd@pr@1 z2?@)p#xD6m3a3xiie||4(AV_l`%FE+yYLWphHa>;8NFZ5O2SpAUKT$Lcfrg;N`=XT_r4D`N#nUf<iL!k0r?6v~ce4v7C*j=IQdYU+_aO>lBj$Sq?P$Q*kvJW~lmg{@-N3>ENR#-4}7&@djrll{#A9`)2D`9$Fh zo6)NoYM!!9GHqYcd5rbTU7Y$}KHJuQ?rrNmg>9%bHuI=@Y_vi`Qg7kb&}#?`b{mPI z=XqjS%)K(M6$6dOuC<*2+V`A1KI{FWZC$^6>nZ8?&M2bZ>}E*}Or>rnM!a#%9m+1lMY*_6eKcuIm6J(me0b^VeDDJ zQ=zwMuSw554Tlkl%>-eW=)Jd>0bF9y#JU-Y#rn?b536X{LPfcSn$f`}_yRBxks}=a ztChUvb5^xhl`ZA7i}jT4HN4-npn5Er|Rk?7#*ss0ii^O8#XFXR6?!$vP z4@T}wxzE(9zDgv1F_Du2oNV}H8w%4P$r;^|^DRZ6yFRq+3fXC`4=uH>RdN;iCz@&u z%Rad}+ewv{hD5$hwrR-hY+A77m8R{BBNZodq>&(Q z=l@6ByTC_PU5o!oCPM-wOh5t%B0-`?gBp!!LO^F=A~P`2SVi#(7NuIXQkfBahfY!$ z4x_eeYkTe0>TR{Q-dbBL;41+kf$&BF#RqEDJ&q52K>%U?-?jIdM;@r{@BaCG$eeTb zd+oK?UTf{O)+WwHCqQ?Dojq5VlF`f+1L&LE-o{>LARMSVsbQYtj&Vy-P3-oLuyUAUs^n|JiE1NVd=y36V#%!nBT6ZgSN; zre2)(icr6F_Pll{6iIK>YxmjiYxnN_@3VHJrLwGKU9x`pJHTGPYA1`pURp26B_N&G z?u^~ew_pjgLl>sk?gbEh^(S4-Ihp75+WjB0nzfrlr}WzWI*(79rrC{5(~ao$TgMTu zNdw23xEJmR&-v~BD}Dv%c|IWDl5^`3$_gy`{tG*A#QuFXozJGEB7<}}cguK(mlJ+2 z8-DJ)ajO$f&-uZjar(k3K{nBJp(CP1{Cu29ot(O}#Xo7ZS{Ogs{JIKX z0%WmylvT07dHqo^YHlrvu%~ayW!9?4Uw1sbO8L@UX{H2*7}_aZ{!~7z$5~~l75qG# z4Cj$SW)oz$$Cc9WvI;SS-)@2(t5plph5H0-OUtm#LJ$Sj* za&yKuG18gZhgzdyc5Iz(IN5GVX7lI^b?l6zC8Ggb>Np?G+H_+CcAXvJ08N~Au^xBV zJ|^|=SfG2Btf4tXZ=4Zg$A5$GKmsj(jW+Qtl#YaJP7IwKh)&Gmmb9wivCCMbbn-6g zq>g7juSNjxY7!ex+@4=Nl7#qjkM$Yc_)~utKD=H1kDOA-cD#g4f#|6du@X3=>66fj zOt0G&eng6f6DPVFACPpbEjks^28>;=hU(gbSH-Z(%L*UJ4&f5q;hr;x?9+qS(~kyI zg6z?$XR0_n9-H`SF;)oKb3*v2cNER>&Jw!|%8U--7^M3yc4yWKJsS0lE)}|2GPoKY z?0MvOHSS|m^G7ewNvw>?ovNaQ1m=)fiR}Wu&Hw|_s{eolIZNW(VHW{+@UPRygK$>e zKE`_b_=QR;j#ltvbqOOj&t_d&S0IOGf6x#OPm2xzPtp*N=S=EV}Z{0lO9(XYG4u3XM&Q27@hni+Fgrf$ z{-?<2St=A0AqBhCg-c}85Z9^#(Z`odCDDZbW_g<6)COeoWTx7BLgdZZsyb^;97yMGjBoG{4mvJ705$?JSEg#n3dd z3d&J1m*c1T2L-xw@K0^^TPy3zxg;@PnC-0cHNtE+8)h36dqjE&Z%3zE1Le(!%62Q+ z)RbG~Z~+SFwe~hPYYqiNqgFKn#V`c+_S&X|a56K*&J0xm3=EdF!8~>B-Pq4M)eIsN zLmDp8d6r54{G1J*!Ff6)udeA|uEre#r)X1y{3KQ|mT}VZRDm=U{xK|uxk986pz;8NP`v2a*MJ`P$Q|sIToZF;=-)m9}>mSos*NrWK_7G zWl)#1rL}t^dB7vp$3ZkkABD&UG`#ezlTU+pt=c`tOyW?Th~sgzp18=8s&?2(AJH2I zbY}3FB{Fhf&8|7b@DXH)o-Lawp(38rS%dvzY;9&vpypLFE??yine{$0QB_vHCxX2! z_bk;}D+M|JHD3l4x`~O7O@xK#G#uc0s>fX_z@s+;x;pD+v#n1y9cmm*&e*-Q)YU@y zwexx)k}vo4@vrtrSwbno``ey!Hudyzc26HKN!6J_%+E@FBrsi)!k;W3HJN;XQD`|A zUUww)r4p3#g=@0Vfodk? zzf3KC)c^uv3PXwfIwX8^k?4)0&f(uAtN0Q-p?ge`=cNj$*7H88%J%7>dLI1j$HJDV zQ_&}y4czL8PX1h7H<=~1!xyPVC)OOiL)FugnqDu>)j~jU|FZ5Dxfh4GU6}JAfx%6a z*}5CH25RC%WZ>1@m{Ss+m>a1ri_})At3QVX+9^^^bfQzLAXRE12VQd}>Q|qM*ic9L z%c$}-*p*!Ax3-(Am=2MHr|o_}VoC>v5h;`a%I;dQpWbl4gzR3sc=SfE%Jx`mqf?92 zBH$}*Bp6zxu9+g6V*xp0LKC&1B&+!-SUax7l~oHyr~-xoWA22VdORK@VfMmy;#04)(TnT#$$~(+&U-9L72X)h3T73 zZJO!RTQRrh%+B?^a4!ABO!OGf5+1PP({~I{WPc}uZ{e_q45tt7JMVqiJeR0zgiDB? zaPYrz#);nQW5b<8n%{a=d{Ej>G!%UZ6y0y-)kV)K@pBAbzL8idL{BN;8rl|nBV8Se zbHg1IEE_tpVfT7mwcpUvane$FC6GYIF#R%>2dug*6&F2B)VY(^eKqQ~`P{vI9<-?n zOC=#Qhgq#cvK+0U-ttvl=M~I5uR=Je!>kRY^>J*_X%(uTv_NDgGS4c0N^s%gN|HHc zv6V7F#!{in9-o+!;uyjrT4y7^DtGs|KEJkT!?xiU?VAFfN{Alrf1 zS|;ar{>NyksmFQzSFBRQ748DCa*40VX?qz|iD*UoC}Fc=(`6f-$t@5&5hlI)WqN;q zfse4>Uz5+(Sj7wWHB|vu6E0cuI4@_St7)qJoIBmsJXktSCTaf5GFNF%_cBx# z@l63SItfG#2ktQ(jKt0swpTOi0^%UvZPJZLJ=U9n=-e_U)a|zhX?~xT18d3R$u07f zw}H5@pmOy15lDyk7t9yVF7%DAwYiM*jIvPwS=E=8{xH;Ma^yi>9A{wzAoT}`{NzZZ{t7zhxSBT66%i@C z){4j^HeN)Sb2xC$-BvxRAb2?UH*E7Hn*EW2ZQ|Yt>WB9^Z`&C^415Uo^UvHU8T=9a zJqosYh&>Uy&6-qD8};1+!m%I9%s4&2)f{`7CwlJB*llK48fz$RD?%-9@tmrgy(B^Q z#PV4qv+-fe5sDJ-xbsF#j*9p0V&ci+S#nUo_Vg)o2+Pp#Yw<+R$wgF;OwXlN!~l;q zDK|3di1-l)8%vIQkt|U&C^;dS|e!{w1`}HMpf)Q+Rzd}1`EG!k|yLV zkk#FiDTCp5C=e3Z`JRIG0Cq?nCL>J(i_{({0F zxwJVj##T_xYori!QuwLkP&OB9V%Lq=L+SBM?5jI9v%f)YV`*_p)>oa@zyoYu>S#Hs zBSiOU4%Je76Gn&JgHxvu*ZwROvD?d87l%l~~xH+Kl<+$!L+-ttvmobTI$Jw0IbMJI8@(s97GV4q;C^`T^9S$`?1Rh}2zeDzrCpo7qf zEtr+8@`dC1rfALEHx~G+9ig0f9P_w2D9>2uHT$*siu%zwnYuCm`WWD^q9fjNMLp1* zEx2NC)g7mr{>g`P^HWbn@f)r6;-^_z>Dxj`SckT;nK(ggZ0rGBV-{1dJw%ClU)hHt z;$(@sCtv!wrLC@JZOG}bSrsf1eF=o44A9AMh3u@bIbivA`7MbZp^4A)r6Q$N?%>qy z(L0zDrdh(t!Nkcyg*!zCjhu1Fv#m3Qkuf@^Z#~=EA?wBDlk@+$VC$jmaqRQ0F2kMWlqv(PBM$kK}r{$foCP?|0_R{t1v8ty+kOVtvxs;8Wc+LR&u z8FozvZ@j`+JvfN*S?AS3a8v=J|0Lx*o^l3?q zK0PmR@>%8Jv&3tYbzogmM91(S);-)VXuhRAP}A;eYL&d@Z!Qrw$k(zT3pF^@{XW;E z4iEe2PM?EkZwG7g#u2rAXwwG*>r@`6OJa_^OSM#K9bE`%7Z;^;c{-W|z2 zzcYDUjpwpX=w^SqSr;vM3-H0g9R!3(&*jv;Rn|5h>g6qBrV?(>^NZb_a`>&)^p;+& zVy_PlLh~osiKjM-rO@Zp_>$6-K{ZrX-sYFXMXShzIIq#94##i5tbyWRktT5hp~er-M^(RP|3CpA%!kj#TO(~z2!Se_AFW}h*n7)_v0Fs`p2^oyuD}O>H42 zHZ-IP+!OhReM;hbu~ea5Tp3TmE8IAXulTU@UIqRHD%aFS^6SvZ@E^p6rlDa*9>#FT z4tVOe1!ScaJ4Q&}g`BN)cM!IBim;6!9;w}_`k9Wnk(;B=zkwgVh;uR~hWc7@V}lee zJel)OrIo{O;EWs_kkp=1lT_-s^U~#*PGkwZjYH$dh#!p2NPGp?@2Jao;#T!7UXE!K-N} zy5HSnXd^PbRK8nE4CLsxPhnE6~Zyvo{SpChoStlfsf=j z-jq8_%1y=X(Uf}>o4aJ5@Xd0my|d^v#_Fo|U=n2VxI~EeFFR?yav&SDt+a9T>mUMLxkHhb<;l)zW?dcFF2R}h*x~Pec?~C zgZV%-w`O~AbNTiLPCc?Dni~vI_~FN4Mh|clu8*#|?s42oAw2V%*v>1>J%ADBCQ`4X zohP*>sEY&Ey9hf4LN+>uMa&eua7@KG*2T z0KoNfro3Jq_baWrZ~;~8pc-`w3YOOy^dc_OL)En|V6DPd^)yW99e&5wI>#!%W4&h) z_N%x7F+an7c-dN8`KX#JWj?h2^>|dpubnJ_KOj>3dL9L}E|(VD)wB2O79sWf<3ADs z&sycJIx#qKQBlLbqk_(b{i)6G;9FVfWADP_s4GeZQg(y7)s+1cALmJ}3}CkY-Gc*P zbo?%Y9h;0l&>4eye0-aal5r9l{b-1AM&mds*e_rm@F!X)RgE4O+7oxwN?(a&(Cn=m zofp~@ZZ42uG>%rQ&1>ycUuwFxT}#d{10IfKybI+HE&8v1d#_Q)h&C{@4xgjdSJM%! z3q-F@EW4Cao~o%?Aqg_19CR;GqdczrSH<%@4Yy~V6dc72))JlINanPJF8ihOb~RpC z9?Xx4o}VlM@MoSZ#>4CkN7$`5k&a&(-7m*G%@MD)HU!GwhxwzHv!l7A)Q>sL! zOk?*RReKWH{Q={1JqxE~(7P4yy&w@kJHT-0Sik`ev|#>_#rF`Z5`*Ic#zc#HL&Hea zgdxH4uTL}>1@u1RQE>c9gX4#pb}!)MHGaLu@m92LCk2PNo-M*D^C*lRV6PnOe2$fT zFAFI&c7o}TQjOHGUTtEGH1jAQ?~=MfnxE_6gM#T}`dJ3k*Q>{MM#1zk2NnsiHmJ+U zD6?y0I!el~{T%*IsvaE-?G{w$7(-Bm0){^a>A_|ZKSL5F~9{8 zpnsq{G_Xyz4>stY(1?;z4|=Avd^Mj3%ZQ(P@Y3K&`8g>#6ns9Pj_EK;?J8=QMu)~( z^}m9HIC`{`ZnTqrAr<2xN$;)|?bX0*YpwMXk;ZEGm9HRrQ=q(qyR;NAwh@|0W*VUx zSG)T2bV2A1>cSHQY?sQh04FyhRB}ep<#?bgR_|xC?S|DK^z?C4PamI`k6Pbh?@LL) ziWwyyV}GEHk3^v$#M}D3az}JZfjajFHd)KLuNRt+FS#YCiw%;(mn!KIn{qf)#wBD&~a)2r6NPj z1v6vsk;w}_uMV6SXK(oig@ZhSO>6U&4Dv#=k?N3eXW5`%XUTv2Wwy zP-CBqH7={?I5kMRZ;$)N^U;ib&xwZ^`xmD&-q%P?jy?O|8vE5n9B>%_&hU&O zqLaLeV9`3PPK@=0=Lfz|7pUNGCxQVS} z9u_&o&jITwsCkv_%+M_f#s%F<@603EnU@LsB6O>Cv@8Wpw}i@!Kc3b&{9mssdGa-V z5j^mP~>Vv2T_FpSk z`y;2RoqUIth!e4R{EVL`_v7UzkCQRP`@oS-CWT1c7$dM^pVUP(7lc~Q+{{&>3IqD^;>TXz;^;d;_E@8Fs1kb%zFg%OL!E}Ut&Q2j_zNmPC$Phq^~A8)b*^35XMIlJegyCX(-$(XEtYsKB!zR zwjU})6;gV=deoGD%uK;$QZ-W`EYbUo$tk$g$`XCuL~uco$nF-VhymX%fZ=@gG*?JAB8#cRoF^;}+1e z?X*5#p1DRucWMBmF1;PmDC0fAds27X^*5=f5F2@wcQjKS9+gxYbV`et9y)E<*`W?R_b^4$K7GS|(RoIas2UvVqh18y2pDlk!;gksa4r{hsex!_+GmRcCsZZ_GC zlAKH71)DQ3F*cU0>DZ2i_>V|X!Wi+6*6!9sY)etDtwGLtgEun)rp_2~ci$8SI zLA=K+l*-mVY|qd>(75qQK=^}&GlwqqbX$}T%jvhC$9iTN6XW}vfx!^9>FCt(7nSRS zy4V7S$aP9H5=_rLxn|NWe=2m$*)lo%cay;@p}MpS(F_^&Jmu}nG0%21scz1X=i!$) zA9l;tGClSC@N~a7vk$W<{=P0g>D>>5bHq5^2Vor_` zyB+k%9Ieg(rL?cn_Zr^~qtfF$7H;tWukr2q&hfpE(MJ#Cy9H_Y5aR;}<(?|D!6YxL zV#5&N9^$5|arrFI|DaqI-TzO@tYpRUVA3eYaKeWg@=3a z(MKcw1ZERhnGt}mmmrJZ`rLy)(BiZxw`Ia^Z&6i)tFZ{y#`98^J1?uYsWo&I2P^sT zO7&{haxoHjHHmLJfT{P4Y7L#~TJ~8#YlCOh2G2`;&cZR;joN-P(^PI?Ek&$xlPDvSzuf=>nZ6UzEyIc$C{u5hikPx>PPe zX%bEGOZ?9g$Bgy-+NOrWP@^QSmk0r03=~W3J9ipYE z>py$$rL2&b{>K*bMPF+!WuJf#le4^Av~Vt89*{3`;~V!Ml^_~t#r{bFt1h=CHX?Qt zw9)k}a2$US#Kj=CA6d*r2JwyK69@TrI5i>FNL6)qRl#QjLWiZA7?b?|J5c{3*NgqE z{W4kMSaxuL*q022(AP_49xD;=FE7&jmw-5H<&H8hlwc<3Qw-mW-o~*}+PY!|WJ|8S zJc}+mw%6>xbGuMHyevfWn<1N0XQ@NzN6M=E5_mAPNBcaMd!V>__jPkDy+)*p3Nh11 zOXbe5wcG_B>j$~K6S(Q=5y*q#mwo1}%i83%TEg!-g1+!a*};+Fec8C>e0F3KJnC^+ zId+Qpi9$P@kn6|m5ljNka+7X@7Lq8FKvta$GiHDGfpcZa$7*!@gHWKIDW1T>K5ZafflH1sWc zF^jjRgP{@73}_Y?BeP9TQ$qkHoY9@ta8N;qJHLFXjJs)XaIod9f)CiU(KBk5yK!Ym zE*y|X+`C)d!G5k~*A5)YpWLD_PVTN{9~R7U^qx}0L*X2!Bpyz_l3<=Z4vY8Yr6{Ps z;|rqKhOK)O)x6x|(@m5f$Cz0jnuiU?ih@(*&zbV4QvMtp-s=X(a*jw%`Ew%;vX>G~ zW*_}*fLDkQ)<_jEG6>h(pr0I635si(-?i)=*PJ|kt3lNY76_mT%B=_Z6bcuy_jwJT zLg<-W0LZF|ztXLeRI(Exg-!GN~DF5b2)z!k~l~m9C|P@zbed-f_19@Frkm}v(R}#j4G`5Su|JS zjh>aQE_Vq{4k1`$sWmhtJT`UZbDS;od8H4-DHs_{EaB^Rua%EE z7tp9sii-l_d0~$Uj;T*}-8v3KYy%0VJo7)9}^*LRX2aG z$LAD^HqmM(kyjuBUoAiE7JK%t<#HSi|HQ?1gU+c^}-hy6^lc3>k~ zbz^KVOrFSMobj>)lnJ(bgqWDika+sUYpO^Bn z*%zsONsNK%g{$djeDPSTG@EpmXZOq5L6BQ^)MN)-%XW|OMnZp~t@0y76w-6UPz8ch)PQr){$SFoYs%KlXcb>Zn0L27E^3fzPmO1-UF zg$ylr2k(n6do0|M6LL1RyW{!rTp>q8d%or3AW*&W1>n^E{osAw--h;VO?ZqPy702c zq}K~C&95R?_u;Ja=2vF{wVdDpY#Gb(cl26{F;oVu>tAve)jkFkAF^C>FBYbHQxmn; z%dX>U6RT<;$^u#HTk${0Qg^lI>;+J~?O=rZJm-u~63aVn-0 z1hg6!t)NPaDk{weCgTt0d;5>yy}}nNp>z3!%j&7~4T1cs5{(fy0=SxfFW3MQxEe*s z)PU_(EIReNR>22@86J%iR_$jZNP?q+cY+RrC4wRbEly)X#0Euhy^=Uj1NDp-70ZOC zAc>@}yDW*&Wq$2H(Ff(9zyAdXVzh{K-?u?%ZI%PM}$LipsLIlSzOux z5X2;ah(c%d3FS2es;YA1&W0@=1rxNEJD`I~nb(NzqzQGJdVwBAD zXa+%c*4bicm{^%8erkaB(w0pXEzfjDaM!yIEYEZI?em-Mv~+^ruV44q zyh4x#S?$pu=PVO~<$6{wTwscR!q-_f%Yy@FVU>-~j?9NlM(nrAd7|PvaXAgI%H>dq z$b~DpnPEjv{I*$^R10Jfu>v{XUHmY%oc}F}I&49JeQtFgSK?{7SedvFrVUH^1UXn4M8#{7I_n)kfC zPISH4sNdOO4pXE13Acl8SnIikZ%;JWET^sI)E}R^8|HBGl_oe|+IuGv^=2h;e^jbXYCIW&k*NAdQ`hhPToLfA)8nXY8Fw}P0A#p?6E$=;|{JR-2f zT`>A^#+1V#mNOe3<|-amaEBox&U#DW!vLVLpNIq>e*a9l z-Oxv8eJ$@ZL;HwJIef(Hu<^9%hHu~EZSFx;ZZKT+X$~!R)!~+*Y)E0Wz{dyE$SzVi-$`U7v5*D0wU6i z`zoGMFQWm%*r2%9`U?6RdzkFWkD(*f6{hW5Qf;fsA}~RmQYhfr^r^RIkJmL}kLXJ` z^#TIe=5ALD^pk4fC!j5aLc`SQx{3m-R|WbJ;*b>^Pep7fdUMQHH_Q>m2(SPyd9Pt$ zwaD(TUd_l)TX4!c4h}kZi*-Dj@I$Z(kHB8|xXnIvA|wxbgvC zI#BPkQl~3pD;V9WJI@v{MbitAs0i~AErc32=@vd zZf9}n65fF>TJKt+Q$!0%6o1lNvWG?xZ#G|(r(ixS05^NI6~#_bdBm_F5K1SdqvIyU z-R^}F3oWzKWOdZcJ%xFOnfiwO-s=C+wPm>v2D6QM_qKF8#*@U z0)Nz<^P0tiKH&50MmXyM0hEKGzf`fJwQ z_+q@bbhaVD!tj@O_%W>F*7olFRgmcivZO%{an z;43)wX#s`u6NrqvXOSjlrRCtbtfuc1?- zDPrG;Z?=_1%uGHrbAtA&hosl@f0_uM`e1FEUKHZPULv|@pZXdvPxh#Txu(HEsRq>s zmVuy<5%n|RL#Ski^*9q@gJ?(mLO`@*%Xfh&M3EGTu9osPh%S|90nr9W5=3wEq{AFH zf{$pK8SX7KAk&LFHiNfV9!eqK#dSGs14y$>z*? z=HEf1FcFO=!({Ed6lb2|(aaWBMbNswz6HaNBm<~WzD$labyxqAEpsUT8^O2rhl#9S z0;xk%78HK=w9wns1Y;W#JYC0B3Sh8+1!7?@x{xS-++f6D^$;BgZ}4-^(L{Ay4htFq z){bSDwiOER-8pw0)fvvx<=q}HR-0S2Htc92!4rJikP!Q}!bjP=&{bD%P89oSM>_gS zyilD=QzmGTKB3T#W_JDbvQR8C^AU7HM|c-zt2{C%?I>PiuVg{%K4CCK$l-~La`Qb4 z#cT+o%FQt!Wos{&P``!!)jCG-+)*r&_{+d;bu#v!Ky)=;J+zW1tqfauxOzmg3g3mX z#>L;ek=H1Bb-IzYmh)$S6OZfVo;@2!t?`a(_V#V@Mx594kxqYHrlwD3Uk3<%mP%Wy zjo01II+AmO$1NQ!pDt4xczH`XpUYZzoMGRW6>}WriTZNX9T4HA4$1h+Bl=ju_R9;f z{s;@pxcV`-(Fr?!d%pD7G~YPU@A~U@Gf_=Dd}gkW!O@RCK)txH>MY$HhgF4p*xHyY zeI-<)_%|Sd^mG%`)s)?Rx>$=kWU$Di4u7NwA}xGlPv2~sR^hR>i+8#76z8r0`=S_S zp=tE|Et|j+V*mdvz!aQyGsT47Owq$0%HGqp?A?%!?^)K}wd}F*-QTn9kgjD%xH^{= z8ftCg6v~WI?qU&UP>(X|>7$;5RkjJ5#`y{&hDz=vP#&d#$?jPqT!2i|0#l|UX*1Qe z%nd!1xt3WIFdnLmjxVjdm2T(s?E^un@ znzt31^V{HbHwC)F>0VRjkYm22hcXwMK1rFb{U6=6Pvz;hL8|MhM(!`#IeX&P3k)!{(awJh@4bt&c3IL zyGZmer9~VbaT>V`(hLhSLjJui7v7p3*zQ)I12WC|G^^^}& zmwSrXI*=-MVbg~1mKtgo_V}y^bYZMh$3S5d#rX%a*|~8S!SM_?L!8&N;NTWB4kX85 zI?e%x9KOU(pEdjh=_8R5hX1A2?qfSnK%u8Z#+zZ+CGwxCQ2hWn(odm#TEm~=wsod# zDj9UKYx)SCer0J1?xZPSFA)WFePxDqIBKWOIsn=<>o7Psi%VF1*-8S5vVJ;b!rPX~ zXMr5o7~Qc~jQ)u*aT)3sxGJI{FLhIe)}b{s1?yHhh%!q=1Z$0suhABW6fWgk`VJ#? zVnA?YV{%F&zqMz^#1Ig_=-b4MK;lyvKEeEkM^dUKU60u8pC=i^qhwyCw9OaF9aAH- zmcp9h-*&o)hoD@A`X%qgrSPHYhxa*L0vJ=FFMLHA<*7V;MSfP;KO|ua?xLW~tEm~- zu?+Gt@?cR)kR77iN9yzC7t0X&3Hyd{z%8#wm>)z`oe8v^n7^QYpvJrYmCXw#$fNM~ zI=+SGTQXwCFy|#)j&%KVLjOMZ%{<`}c_OYHCqJAOj{eM7L)1DGBzA=Ok&U+|4s20{ zk7r}e&Kf%ZqHhwe!YKp!YPGqR#R_-1{x)S`=u5Kf-bf}WV7saz6LsdtpI$Z%s6;3K zN$mubLIU&DK{2!^6bS34LPn(-4%f1WF{MFiP@yhALLf6@*Ze61vr6Q#UA@ij;fov@ zN560kA4HIlwIMb%ED3Pf2-M=0@(;46=ZQJITif-g<`AC|SK;J=)`WETC!6l?*@y{5 z2A^j+N53pU+AG`Q>kVT|WQs~Sh4;$e{&|se@*|USwccH>mhWh&xgt2Sq2|V5|Av~m zuBHmsL~EYdkH-6tbhS>(g;a8DOKVa`* zY}j@d%*j1vb3C6f1(Cd`x$uag;ilr)E<@Y`_g9OUB}Wh%`n-e&2>6N@T^0%PUeY*}Iak!d(K_Vw|j(DnuhMMuAJCfCTw4Eg7hS=pAUeHEq9A&S- zrtYKxalajXCt93sXSpn$gzgF4gtSd(QiwJ^k!iqfseL=aGa{OSPG zs20ekV=<(|$EHM(uKS8`C@h4%V5h7spAsV?WjRAF4lxYLi(XS=aCBDn@uA*w{5Xr) zjeBrM#ALApWDR$KtltE!15?P_4-Y3}@_LD>6s=FJmo*}rD#?|H|u092>Zj6_9#?}5dt~%X|wx*G_5v9f|!Pe*z z69rv2YjoWl)Mx8@7Gy0A5CAs4#1J&TuHqquug_007_)GQ?4+(HAp}feFQI|Mve+G7A$FC>d=L$=eOy#`DViT-~SMbT~}t+zOk#f zXP|@|4$qK%xVFp}p20?J9`fyn63q5t6zaaw05m)*>QRD)G#(n zC#}jMA>3Qv|J3DMiR8Dkmt}F{h0lgm)OUHZ9xGdKIr^(?B+2ic>ToT)Cbw^wwp`18 zYJWo^nd|~R%?W*F6BjyU`E$eXd=p+x{7zr)iY{&SOSaXS_fOv16hqsDAGzXUO!Zq9 zNac<+#&}CB;LNMWt_V)@0^ABMlzFZ7X$*f=z5a^Em}3&o=h4uS>(pJnCk^Cxc4Rsd z;Pf*7v#Lh;thXbRhRCwJ+7=pS2%9A6jG^uK=o@jt{@uQ9-k$7R$^T2=Qs)gwGt+;o z880o+gr83dG$I>){F&UA&mvJYG&-fH8hF!=jPf`Y9 z1v#bsb*r7;C9i&bnGziTWYdDMteSQ0bRRW`bcZ|xnnW8+oslV>e%k0smvOTGeh6Jg z@~kK+@1)+N&Ip^*3Rvd;IAx}Ex6bz-(5>Lks)r941Oh$_gM}iA7Z@!(mvZJ_%MX`n za3qRP`$W_M&c`U5ghJ)!MeQjlyhujj(l3$rzCJ#)y*ZRIlAdYrU1?7wJX-n?IT0}r zh$H6RLLG_XwZspmrSXSq>HJhnB9FEugkEsb(mQ^RFIIDE@(K?yWUulQdl^N5ou*GuvrNS3m%@nT=~;aq9bS4nzPX1XZCDQ~A=fX`6uDm%SJ%JVGgPnz;)OM0!H z{wGNnL7DXXP5K#8DA$(@wux(uL6^{VJ0_i=P;_$h!YOm2{C- zNx#gbUoPn@Gt015|ndJv`FW+RR z&zJTOGwIS7(Ca4xpQ|#{ao&^q?ex2blU{45Yxr`kBI%j%Jvy`e;7;Xr{Z2dGFYSxW zK>0DI{nMmmQW~LwBJ^c@v>6jWz{h8&P?DUJI{$VD)*3^HN z)PI$ouKR~jFXgLEy6#`CojzX54>##2nDi5+{Lyy0lv#SDNk7`8A0z3`%=8i6)8FS4 zdKFFg(=l^Rtd|n)Sh`o=eCQ0mf;_-HDslHd3;suZX|fZH(zC zeDDGOa>GT)|&!O>}pqoeTz*q z&2f%d1^@JG8vlB|E7jUu6{D%Lm2BJ+CEK?Lr$P9Y4Bs(5meid+;1{k>1WWeUY&5|$ zx#anm97^bQ^;J*n>Z@++>ZtV4SXcj|N9m#Ib(L)ot;@R7)h98(#GnPvk9eY;I6s=}Q7_1$k!cLW6&hQp8(UoZFiS*x`!$v} z@F?dIs6gGaqKuusyn-UeJ3wA4cwR_W9??q^3*O6#*OvGxCVJdX=%$1S+<-ZpcM-wFAdbFOh^0`ec9z!nh{#!28Q)7qNR!yk zW$IC_&hF}fF5ytj-lJ`(P;W2?Pn}6`U|OVS5AsZ>PQWorjZ1e5aheIp)1Bgahd3A6 ze_^9!%6uoIUh6g4-=$iczpOLWeu|@%dS8M(8UP^c)>?V zZi8-jacP508RDiOI5-Qo%(Gq<6c@zdSQ1na!n7qm+TeKyK$B?j8xYXvWgxkImI_r{ zZ}&cP%K*c#W*lz8@Fa}^PH|%ICb{ozJOi=q{HVNiZw-mqtl^#_5o*vIMjz8NZ79j0 z4Np;@fysZH1|H&Psc`h__gb5$*9&R|aOgT(G3SZ;Xp%t}E}citMDfLMYVu}8+@7|M zL(-|tzFe2;%f+TImq=ekI#9)3`?3=#cJ9kiGXZm14Vuu%Y`vN&zVodVvh>t3%|F?w zDGTZdY0?%b3QR8w`BC@nPAw?2PN(Z>loe&zw-4V)w)3s{$SHG*P`xoO3HvoXF}dyh zEEQ=*9j%+wus>eIeiy?4XEziKL1vODNd6R^m4HW(El}`0rt1(p z9jD6X94x(N+0~ev)RwcH(r9kFQ3-K}y`&zKw3=pOlGK4+ zQ#oFfCDBbLV5f~I>gB-0yi{Eq6sz`6@esL@}KAOe4xfwL)wN^@? z0Zp3BLvi(vBo?GOD1#3fIxg7{X4A-0TkyM78jKcS#*Z4U8`2YR@NAF9E7=bQYsHr$ z>NeB6jK3_rKc(8!tsI&kz@3Cp9@`CE%V|mT14B*A!}u|W_fP5_P40OLnl(R<=82)J z4db>kqp!vblh5kZN!4EA-pAeesq`n7+vAIs3k+jC&1S(ft^(txe!IM=j!Hb3gJbaM!`kf}17lgqmd0W^!& z$Q0CalGI{rLq?cdN~D&->001{sO8=5*<7+G|Ix^WTMjJ$yQ~+KQRQJOkt?KkB-4<^6Q!r&GDm$W9~9 z;BgYCQIV}-it|iGAM>L|QIR$}4;~9-KIao?{jSOI9zW43!&Kr^CKFQSd`_q#@uh*M zv7h8_xw6=ad{ITSpO09aOkl$k(n!u0xO}G3hSWs#Y zRVFpC@0nKqC9TYqQX0|p`DL93#_`+qyczt%-?s1~>7%V5t>u}~pT&~iLo zn$vnL-9Bufuow&yVv&w@$gph1tBi+^y)VOBtZ!ZwM>)OAY-^f zXO%Gok@Ao_M$>)415^5x=6IDTPvEWXW3WVV9~q$Scg{zT1`TEL!u~q3jf(JZF0Cph zJ_53+*zx`J7h={Y;-Ws~GV8L)tV?GAKI^6_Vo@I;r=+v!}%m(++);hBL2_)&lV zSmsOj7T&&%DaIr0mRI3xwW(x9Vq3W5mQZ2Tx8SMl1%;pg9_>)!5)m&KIRC{95sm1E zEsUgic#K_6;zpWb4+C3s%`>_fj4%=He!WqKI*R`*xOl_)fMluJ9@?(&aIx*xl6Y33 zL1K)@ro(#O!@&}Q_NAqzHN$^>zatAbwwe2x!+Ux5S-CO@E_$14`3APc3U{|;2gM2E zCY@NYS6y`KH~3B^g1HH=7zH{nGYK7Hft4t}VYA>^p)Z+x%6fr|vr68>r5#y`74e^& zo*YL{kaDOZZKeWF1q2s8oVPJc-qL(IgBN+&%BDM`*3_(FB6ts27~8M$?AhdT75WBB zL^h%hjat3S^TAHx_q4rPY&( zA9s?v-*-59hskzbq8nAWyS3fLTPb~yCPFd66@h~-lg%EIj6LqQ zJ`c}Wn-!kXW*ZjXhWv34hI%jG3(RLapW!DEJ+;*%zJ)CHGJnm%;7>7=ll6PG{HYKv zUyisQwX46L{7vGXq}?8NVh6Jc#liP-`Yka7k7w3P#Slu!;=j zDeVhCR251OrS1nfOk~CJ)QeAIGLl|Ff-i224||DWUTQs=>DMNS!DE?!(&;~;w*C3) zk@iGl*Zi3SU!qn(9}x>lM`R)kPBsP1GW(0ATCXKA*ohTO5Fhx9l*K7wYEC-Y?9TuA z9%hV_`-O?(qc(^Lvzyou!O|bP8Xu4Zpl~${&DFR_-ewXjes=ClStUF>-m0#01*&i) zdG}9X-uSi92iG~{SMozN#LGS0v=*)*gd=|s2G88xD?2!LcdvZWdiPBfpF>mN7c^)c zr0EyGGbiBI%A{YL&ryKqio+N2lr8GD&{IX!6bXSKR3!XacIXdc)hr#ysu|-^B5zuA zU=D9ZC5LshP4l&EC#W>U=%Onz#N6$h`8LO{B{ax}6Mm1#B;rW-UN4{Hh!(DDE;CN< z9oaYaZkSTl5R1^ps)<$etqqK6kbt%iVF8y4bmIj}1-x{7YE z)9^5})@Nsldm4is?)qmNw@a6W_vVCpQM)^SVtB73bR3WQ@lo=@*E+F`c@;|ZzxU8X z3K3>Mw^ys1z*qch2^aDr%g&+!SXgSwO0n_D4gg+Z+=A-2SRY${)o9#$QAM zZreQzv(?G#jo9$2e@08IH~l*;y(+~ww?S^@Mkh@Rv9H_iV2lwmC3rBM}$^t_hp{(cC`q{D$f*PUi@xPiXTm{pG9i zFb?@~=khyN*OhaW)lA_)d2_(gvhw2#H^+-seiH1x^1!whpobx3ONpOyajNRyy{eTR zb1~bF{V!b*j$yH^?C?9TC8Cn?5w2m+Yg~`msY_GnQh@NAg+2A^@2Ot7E?liU<}w|N z51i$?A$yi9*fPttjRl}PRwNz!u;-4&t`e+vaA8(UfQ!)z8@LZl->X3g9bkZ^`O77j zt>^~d^0@-v&45*dcpbwKZs0gyH1}L_^rGVf(9A&A`fvzI01IEu#?a@!@=d-?`%IMR zcSh(5F@vugwj(fVyMI&MU$cezz+H?{Z3G0<0yWJ+SC?x1CJ;ShvdmAYPkMfqGe28GaVo8k6Sb#P$4rfSl9tEzB_}C4s`B)xxW^rM z*&}l`%GQ@0-6yf(ymg+`|8QKcrXW?N$HpY<;kg)|9-7Q@&Ma3bW0sdQ%UePp9%^{8 zF!YDZkj@2L0>nABXD44~hY%yo_UEh%2^*p(oHZ2yk!cm&Nn-^bm!U72GTAptXR38N zSAu{xx+clNJWBu)GIhZl&_#@Lm?u5=G#Id6Pp?I(%q&O8njVqSPwbe<{_@R}qXlq+ z7*jFd8-0#VE5EvMHPiidFq`R4Vtt8RFCf!>_@S0#kGO5H+D7K@2A7U`748X{t>#Yn zJSAEHGPh`C?hbW-&5F=@V6s~#JrkK3nT^a}hjP-R`@YC*&t?knhT{Cc0v>z}?*thI z_L|~=he4=L3id?;d%?vXU=Pl$NCWxk?}L*odR$fiF97awMKOvHt13B)?^scgAaHQS zTByc~20Gt{O6nE<=?s~c0#d}Ml0 zPws`?IThBonR44U1OB}Dn)@6Kwi&YZU;JOAJLRpK$)*SJ;!T2=6bvaSKYU z-iQIEfIvs3MppFA#LBmz0jJPm8&HL--9ob_D0(qA(SQVAK zIj`cN3UOkf`VpS%#(hX^&0^x#wQRi1GBI49yBlv;>~oL>F88AYxX4{KJ3C;FaYP;5 zVQ}BdK+TvUd0faXhoJ$SXRB*C#^%m(U&Q(LV$7eO2VIF~wcB{CU>;ZRs~)qCl;9+_ z?;=YI{yByT(lAnLdONXVxX+f20Rf_!25lm713h z(OmO6vFg;*LwoeeqCa|ejw)lu?6Z?u9ode6HH9#P54agR@Yrd*MLB`q~5-6lKrerG$C<2x%K?B$|j4Z7jtaT(Uw= zR*Vi89@@^AV^@N{e%->(_1Cuk7(zEmj1tO?ZxGlE6@oH`?o_3OSH z>&s|RM)JL}zLJYS4Ay`0tiaM>{p||{>#4}qw6rIz|Kqn|{muhYrC|MUsj?f^kECv} zzUdQJ;}xB7e%8e5Q|E?uFB9!-z(Lg0_3C8i%|?5-z7Uo3(KfY>6B#C|#*Z0THl5^Z zwDW-dtjLWA4Zh#S!XZPUdUIK4eD6(%V)$fr!}m5K;AG%CI_|Vi4rR0^?0J>VvAgJ- zfUDJ4?ZQA5)koBgwy!D+DKF8Yu|VA>sAA5Q=J~>BW(9u$ECS_w#M>HjjEb;fTm=!Q z_3HLT&(2npDIu3W=AOa5<08j$DLD$D3j&mgUXZOWpxcZG{Z%WCaeL=KSfb;}X`d3h zMP2Tb!=!pm%r9S|9-;^ut}9DRgwsea>vvw_s|rh6hW@*gt`%oi%wN+Z7RFZ;pfco> zh>>Iund^?fN{x6C71|@dRdSu%b-=T#N6%)!GP8QX7g4|-@J-L82YgX-z+ZC$#DGuL z1Fk+P^bF&zBV&5JUClzpSVlboNaW&QAy_F=_Q{Jf=DQ`SJ3Z4{Fl&5Mli=hRa_fTi zww^QDjI?FXI(6Ui8k;cQb2Njhcaxxh;S@A8t^0&=qWIxCzVUEs|CcJWl zj#zVO+qe?W81m_aD1Vdv2vnagktmYgs4dgoK*iwTO2FCl1D(vNvkiF&uqkf-e*MTa z3t1a$Jy*je`D^Z5g#Gyv+g6Pu*hBQ@z2;f{?s`$)V_*M#e`2lu{N(RRG52Ok(i8uX z3e)e$$2-3->XP0^r+?o&B-8aC1RR;|9CNI^!v!u4OT3WuZ;So@H~aTDnZL1e8OhqU zu@4{ChJPAnZVaO=T_@Lwk`2t zYBL%s_9zYyh<~UdHlrq;>-lpPdBw^%2qwbimt z*Tm{+LiZ)pZ4MP^2eA>if0d9!oIHtoVMWBESBS@vu%5eIE!k*KLjwTE&wd1oqnhfa za;fVctFQWsa+wVss{VPV{z=phL&h=?6i|2J>Lj)3yHPT`YtFKJ=x0S{Kj%w7cO|l@ z;kxhHk6jocF;17*UiDV}?DkLWQrgfpRQo;0b9`2BIjQ(Chs^ic_sn+=Y#&QI8E@v0 z^?jJGFX{W2{O#rwsAHl8BA)yeshE@Bp7+rCj2xzI<468IJB#Rj$pea{{3I4pfb+Az z$~Z=eFd!%1dd<5i!P)>FQ+J)Rz+d=+mBH^b@~h8Kh;h;h^@eAlUVl(8?<#|OP8HFu z04>Ugba^?RrZMfn3}(enN7|uH(zES$_F}HY8tAR3JvKO%{*L)>+D@XA=z=JFxLx)I zok!#~eFi$!xVgLa&dVhjWs!5SHmOzkqZc9RJw&WTAGb7PM{%X>2en0!dme`!)ya{C zg0a!b6Xb1HWa0C&&dlR-L^Ay0OLV^Q!FZ=e$h-l_U);$^F3qiv^=#eJ^t-H z+T+Jckvi=0tKIuyeF+l4(CEZexqoMf`VAi$PrdQ!N3SWAYN(Du2_C9m0!ZlP51-W8 zME;kfYkS(@!?MvQ?3J*QT&7SUtYDSowCbQs{@JQL{W47*nSPm}`lny6R=MdHpW5@6 zR0Z|wqx8!>^=9fN(g0Z1y7YT9Gpt^acQcpFUtNg?zCGG9CP430pChj_Po7%+xjtV4;Sm{wyl2-&1@1IID2oWu`0hb*T;X?_X*RT_MXI(gC@Q)MxDx zi>yaz%wAlnpY_zJ}E=%if{h_At{hv9q zur-M~$4Lj_3i^g`F3JiyRl!!WMIJKc#Pt$TUH7UKO&y=FwF-A%s%InQsBB<6K+s zHvO^+-!Lh`zDBbs5m@juSinEFJs$l1!YK(sS2>GCiipf)mll>Dn0JI`IoyiuaYL1w4TzJzY4jY0AZ@uUL%}_Jq|U9T3(s z9@A=2VOR9tZ519b`9bXhHJb9_!ozt_q%y(zSN{nvfZqtcu;`^WIoYoqyNp)6m*bufnH*b{5n$F&(PYJ_9s zHOuc|XQ^p3_HIdjEA=aOAn+ip6N0G6iso0p`6dywoNC7(0iNOQywUkm?-J=k=eE~e z>9ubipWl+oB8@vuTpZmo>+8Tf38AkN#t0q0=Etof@|fHlIHR+=dU7wf0oX@ z=TrSd^dHC8+VVE3(VmAjdF7uo1JZ5B`aQ8Ot(CWV=I_0ZwA-9m7X^>t?OOZI5$wm? zkL3;1rQDD<=_`z$eb8zp>s&{45^P?|kVvo7B>xH|6Z8@=I-?U4ac*U~(-ED>E~2T5 z7&a~w7L?(ZBV2-PI@i#7;YwAc^n&4a5D1;mnMwJ1s56)5;KZjcn^79 z&n_p>h9}%{vFom>`QY8sA2LY8`yt?y>1J* zOeku5-HO=7>5NvDa5ywCptD@0_->b&)ug{AmdMV?Gg5LoIbH1a*n=hMSL6f^4YS|9 zhU-RGW!LVpo4R(^?kl#x*bZ#-K>+{7 z1eMTd=EAF_2?9^EpCYg3^USHw9Isz7TK5+si~AKc8z`qnw?$G{r$pOA_a>nGi^a60 za9*-<=Brh=sC8jcM>tDbpPLnOVCZ^+{q9Z3fXsgH>e^NNVSZ#>!o#QfK4eg>s-NL@ zJOkk_HH?N&8zK?V-MGNlKf@SH=U3Y6p-@Q;phxJk8^nScm#pp7t`PZ+4AZy(1R;1; zj`8Y$#iZ2XO1=f>Sb>H4pUD}am5uRDbV<4ymYFY)@noQLJ~i3DPkOLH zCRo0uJQ3Bmlsj%dZRHy~ff3>Gekl#Ztz8jo7(_{-VRDHmAz-UC%u%aP!#5tQ;bp2} z@JZ;o+4w;+RH$BmpT;jgR^#KW#>L`JH4ZFJru)C-3*XWBhO4cH1vU!yvQQK-01y*k zya>BfhlF0xJ$xr$V8-y!N|+}5gRSwIItM#(X1&+HoNgoHlOzay}+uRVM}LfRdaK&vXf5h4|HJvt#AQwno%C(qh3Df zd_{g3ST#C*$z)^Y?_&65oX2i2mgjMuTIKcE%}0MZcXaP}7W95c4d+-mfimWq-V5dk62>rojFhd)-f1yO0fgox~U2U%BB^rc z*br=JXdsBuuVnveE!kYOS@xn4DX7@Noi>RwG=iiT^DEcF=;@^DHg5tl5thVZyVD zeN9oO&=tFk?NAfl+o|ruHJq9CD{@0WOQ3xP3`P+twclG5mZU)~p+>2du)FUtiQFU_ z5o9WPrx&}quK=|b1qv@=iRp7ySkAlKxJFN$MFy8{zdvm~EhZ3YMHYqfsZ+g4{&up9 zSh#RJr(cB&_PV!eCx#F9I;=xkya1pdad|KN3{O0C4&wP4$43T60FefKI%I{)I<(gp zfdf>j*dR=al;z9P6&YyBJF*-%!e3e)xR)lr582@}8@xgs%nu24tW{nZClSVPyA^S$ ztD=4RzPoI+??iCnR6cUFSuJE%rSZ#XT-H0IkZ-89tf)$?`%bIzbgOaNeT5tb@(rOS zL*kTPA&o`p-2vL)x1q+IlYtF~C&@8gKy{UTU%nO2qbRT{bCmt=huDiLtguR z8(G`-x~J3%KgnK)XpLn&-d;CMUBF=cMXh-*E{=Y1jEf_BhtMn-A!7 zGXn?d{S>a7{`AK`o^~L-^N__QhEgwW*_#5ND5nRzvm<;hP&Ho&&rz4TL2seS9b25k z-+_0GbNJHuA@=8SwjnLr7KEAb7OG`wGR$fj%6i@amYd;{WO~)cqHu@faBOb@?h53_ z&4}qu)GVYE7HUS4zCUW-#c@bNuC(_`%s(!6}@m;;%{7i1t&2BZLG4dl09<868D_BSjy z#~|&8$I5K?Bb0|JqZ3`bk47jt85~U0TZA9%fag(!g_#C3p*QV@izyF}fXv9QY`^;{ znae&qvxIN9I&H5D^ElRHd)+(SxkX%L5Kj} zdoaF@=EY61%{UB%WR+I)hp}3BQ`MS%koWt~#7`+xGm&r}P@9#xgFK=r4D72w-sF&C zYbpF5h@sCN&w*Q?DH2Q00>bgX8T5AJeCO&k9z}}I5KJi`E1jkZ7 z%A*Y$1CV`35Ijz0diOuH2-g0m{NEK%fA6>7e{FvE{s*LFZC25KpZ$-I9Bcnmn_u`E z7LI;g;NaBo6A9xniNRpBri8DV0uwPD+9q6s6|8d`L{-?@SA zN>bM8xHL6u$r(hI4-aE$u*kYmJcFVb#3z$8Gt~<#7Nth`Q}ldFwNLhrm^s6UM-kDX z8$wnb^X2QcWw5XFk?!L&uTGremTEDW`*#|KVtDnj8d}jMw=~^UU0toDIWd_C=lN>3 zyHHk6wGCg4q84zmxwzUDw7Dg1aC8Wnoa%S2N_Pj!l9x8S^%36Cj2>@rY-A*aR3$5Q z1e4qW10EFnfL2oO?YRAKkt_!3DcVgvckon^V<1))iz=kN=uB1=V zEz9Z4uR-S3*^iTX3S7SMYuIl0N%37Ho=va{^Oqs2d}sz@$C6jjlfWy!Afj!rzT2b! zw-`whjn5NwG1f1|L^bHmyBI_IC)?auUVj0Qd;z_vxpf5beg-Oy}We;8jqT1_T z;V&~kBbQ{n>*aL7Hs?A4v$i%E(`p-u)!ceWypyYgt{kbDQ|+*|ax+41vbY#27nxj) zmJ4z?y2f&0<9EEPuFIDv+h(Cjlu$PrQ`RF#9xeC9{hkEY$`WbTJ2|PV5c^&S*vgHtw^{%u|=CE4sxUi%UOCM#*5mxP1=d9Sce@4m!(6sCP=;fvulC9 zoyPU6A$>)AwU9U&*S}-0dkzS;+qP~6I%4{0zHNN`bJYz?_-}7d5^*ZlGKAT8i&+#e z{!Fbz1(p)9Y`csz7r~KeV^N#3#EqFv3=>)+EFgvQ(0uUg{|D9EL0+=xX6QlW1aFyr z$piq#x+BM>o>fpCOm+uS3&22obaVZ#)$ePeIh}E#ZaTdq>4FV**`xb2&Dr$m^Mx0( z%aMQt>daP?@KxTj3=*izoHy=9f+{k2GPgTyo4w4Sv+Ot@KGN%U5@hebn-wm|G0O4W zDZJ?@e+^DtE{ooJfok@;dZY1RMR)+r&7&Np`Tuw-XsT}@PAPOu{(cjR}OI$wFWwRz*V;XnX}26 z3Tg?61VW%nk3sO8RWN+uY@~XL74$^F`F}BzTw{$UM4lgnp-s#zJ=StMwdc) z$Waj<$8XaO(y8HG$>?N4!l~Q@_TOx;+reF6f0_y8!2Wc5og{t{j_K6Ep)`B_2DwAZ z49rsP^-pmZntr)T)7&IXt;%Wc2;7-DY&9$WLGzf#g&flurvq=CQUMpI$WVj1m``1 z+6sPx=L*N`C&4i|8gXAnU{>;~@z;wbcmw1zbYmY|aO2^h$0S_nlXhZXb&0qt2`xDh zR~`&zH~G@BR|&MRo#ExyZy&xgA&YjUL~qcVj43_xDPl*sN(gv*z#Z5MoA7i#6YC1i z%5aDN-IS6|FU9&C3q#h~RLxHo?V*xLH`-k7Dx2!?cWE{pty{5I_-Tq}>txGK_KW4e zpZQBNei#x~LMa6^-Fh_qTM8jZZv6}Qnl?EBI9|AbBA{DDPy(+$vjg&kK{af|7OkjV zoceIpinbcJy)2xsz5Z!xTc4*pGx$6iC3sb}z^h&u8Q$hq8i>Drrh40dL*F-X66a-c zlGXx8GVDKF0)3^Oax`?e-UDk}N~j$%ZXn8C;J2b!JK}-hR!SUJ{hDI@$N|ja3h^Pw z!{0IAaqac&lAF}jgSjQUXy?#_@J0O#|0adsV}3&49%Fv8m>*MGI(@;vK0mGcG3G~( zeEsL=v7^k-tF8Z=`MFQMea!jE>N7u^8~<v6&A`kl3cn9E{YhC$j=;Cc{&Pjeeg%D&Jyrra+Ux#=7>TV@OlNLbDD9w~ zG$q!mjgnmvc+)!7s8|2oTlKA{2xH){H2sKMatu#$hq4Hnyu%&zOoJ784SQWNF?1I|kEo*_EjNAV#&4yN%s&>f|YmP+PpB>CP(`tKu zyzQZbwY@Q+ZE5pav(1`gwmBr;<|ZYz?bqMFZDv_*z99R}F`IoRC~y`q)c+IB7Bg4J zZ1$RXvySg>Hbr{-yqZ0Q0H>JK^_Dw$Zv`JxK&_W6BQ@Ua5#j&zka{lG>FCVja|biN zyMIICE@w6*SH^Pxl-nQg9Z#S2fy@U|_*NgxV6Dh0&!gAN zkFhBJu^*|Ag``xuaVYdmm^#>i-p{C2bCl-gG)eRFV`Rmf&o`TwQgaRdD@{LbHZ4EK zU!IGBX}4M;Au0XWo0cOiy_O&2ig?ran@vk;|E3*LQmSlB0TL-o#e}}mScu+>E0c@kI`mJiWcpVH|4d+4c_{X?REdAh6L6%zCvEM z6aDF8$ws2KK)K78&fkmusd7)-Vw6~U~0$Ct0ZC|{P% zy&7(v3*QJe#G;kOk@Mu%=0KO{44&bti~2`UfbZE2bpoErbSRQzT&`|La${t}f4KD{ z&eGdDeL%vy+v4(J@P*N@F&Bey_;)5Q25bq$PHljZ+Ih8fC?F09Jh$eU*)24Ezl;+( zG{)xG$}`LKE59&9XUQ17Ag?;Td<5o--0RHh6 z4gUP~d;quUm6O_BlanH&jSqizB&wplncl8E$!~K_f=bO(HuELT?L=3k7b2G{b?I#l zt_$?(9J_u|N+AF&1u06YQ_O~@)=Qx>Y?0J>tCc&Cz0L_PARD!>b(Q*t*w=c?TJ~lR zYl)4+hvl`2K!^QBZ@EiKY0Qg-)uM*&w^M3-r)4n-6{2pKP{~P=+ zB4CglM*dgui~D6&;hee4H<8p>3Ppw!)I#5`^jyZ;Gg*9S8ImGt#+zq=aqCi-1@=Nh z5S!2e6?GJoU$KSl_469MJ;4;CI-$M0;_aEHiT&CmQCHO_k{??Wg)0wdJFCn>eklt% z5dOz#Z@%T|P@`25rI>Y;@)>w=Sgj+_k#)4ixD1>F4=`$1>&R?={1`zxd)>Q&WxdNN zj9}JWL^%yEoVKh*RH%QQxQI?7R)r8W7Ez8`L=E$Yx4A|nMMfFFS=|Q*8s_J;xrWD< zP)?sElus58V8mh_u!`~rUqxlzCKfmtk$vs_d|63#=cpU+q?V5|hwT}#k`#T!)m^`4 zG^_#=uaCr8BIMY63>dzMO-4T$5*Hy6hURqquVA>3053;@;SW~*Q!s>z{Sz?!MiD@J z-TxYfvbX4I8)oBNPdGm;^=|@3dPihf$njF3d(2W>pe0|0b20ZB>fYO1cQt#AS@%(^ z?%sIafqFR==dcn-S2tlWM6{sR@9|zfe4MlNZ2!r!$EC8Fo`pt+mAXKbWjBkmtR^wU zx0|48A@(QZy|5(@@svF+wA-I2*8S=BwdvKtjOS$ChJVK&``Y7VZ!0y zT%LkbCowCXvvf(a?;QQ=%*ZtJJ#3@RS2tj4Cc85&b-GNSB+Iq47I9-zUwI=zTol|B;`nAN-jGQ_t1B)V2>-g(j<5Y~5 zw^U*tOLl=3sHL!Yj^-T%H9)B`0rJ)+nHj^R$&{e2&p}zSH!w#SAD(|H(#{Ur^osUmA@R2LR}}LnLD=gGo>pJm z=4@cvnE|F+&P~6kDSbC{o#|{iCHzC~Hr%KxZZ~e$ScU*wnf5f^Q_xI#*PgLhy6>%R zt!c<{HY~7(r$|?1WbzNtrTfy`GNx$Jc4y=B>WvqoFrALZcE^TdYIN;(bKbV#Z@t#J zdK@s_8`#hKOZSg;HjLtRhUY$Uqjq4oHtw@Ef9s)*2o0ab6AyfIw4EqOX|9HoBQW7k z$&l8-4h&3^8#5gZ$9d{cu;0~40Y>nxZfhp(L{3_T5G~!&*x^30MOuH&Q2OgYci9qK zwXL-wgNRGSd~FG*imk-A%eSFlYF_5-|4yJga|!XcFOD`QpFZ=+5Bz{`xE9?M>2#NG z^Bv!qJhjZ9QQLY!!|=$Ep5}(+h^@yF<=dkWR+HpjG^2m<0$ST)6DF?GlihY_kE82@ zvCRd0Q)?Qqu{=&|37@lOpF-iHSCu@uBONpQA2?gghN|%&;UFuO0=f! z2QK^QKOcTMUpo-i@E9_Y{~}`OI6|o-Xcn;E(mMKE)`+dcUEVFhUk)x@afa43?5!ti zrVV}T33<`BlXp=8$-?RT?xNO55?&m-qDWJQ6d0aVn_q+*9qcU>Tpf;-%?);E!%fK! z{at72!h)MkGF_RV7OGL)5Cm-z+|XjQLw6!bF*QSyB8}MI@4AE65cOzLjyt$`w2_2Z4l)t>|GTQ7d#AdR8&T8FInqzG;7;g($(F*&l5d z1sTCf36iPVoo~`v&9>d>`CRR)X>A!vk!kXo$G|62vp87^iq4bY!dzY3Wk;%L<9KDB zt*HADb=C43%|-u>-`a}$>#r2OP=hLIc315N^IVO%KJJG&+ArQ1Xda?Hgk8eYGmO`L zy+rk%JD`nFyQ8oc5~Sk+^T5XNi;b204o7RBSN*43e^mhUkxDIa;S~D=t?($ew=ijV zv>AFZ-i_+R9c`XoOI{q5gjF5f5G&R=_j(Bbg;Us#{TZ^Dy~4JETpsR=qDui4(#Y=v#V*OcJRW;)6xU6umEI&Ihmbq}S& z00XI{%WR8UyQttynvohXRShZr+}g5Y*rCuR$&MyDE*q}vl9S?$1(R%@>tPM*)o!z} zARn?G4@)z9xyY#ExVl(n4XA+QTP7?Z3i@vF(hw}=w^O;clEh1Yle^uDu@4~W*%oyKR;4DbE`W_R5y}Qa2{wi1un=D} zK0@!&W-F#LhDlX3Q_<8AZ6D*FMMRV9_X_Sd`m4rYcta2kRkO ziR0w%^7fUf`oRMqz-5h7(TdI>T9H<@-4mLbtl0u6Vw{RhIhjm(RI*7CEIJ7SIHKDW0?1++f0is3nbs@!D-_+Dzv)J#F21F>ph2 z>AJ|P@3gONMNF6KCGQx~=uU97VMvpuDta%HJRO@b^VinAEbq?WShve}oIBb=r77;{ zW=C6cQrG;|S}0H3&HRSaT>NOkc^}|>bB1~O314}M|Jx~=sESEqK@6nyF zykseqi3G@*po>x2f7y&OpspD!yJn}Yg-GBY8{x)x&6V0l;g1wKtQ!W)WOBy|q~-&k zkQ(LtOkij6^_$pBw554>)h0!>JH$tO<<*Q|v-X6dyi#JiG;7Jq*%{a${|)zY2?e5sSX|!(?XzM-c?Jd%(c3IP7XI zU!(>;Ne*mJ2}B(Bx>G3%L{jZ_pD_EFe)_JGKcfN>TvD=Q&$h_3!^ys>g2BRXd-(+D zD!efz&6L0jaPooG*e+iz7Sbu)bG}gsqom@2GZ?8T={hUj#O!G{ef}z~wyrX(--sko zX!qr^peluSzrpIQG-)^XG|+CAt}I*IHsi6s_bywB_ZRD)deozvxqP!R)sX9<`6$!|oa-Gdy0o42{qs&;cp ze6w_8i#*#)Hj2IJZd>a6Xgl>+8Y{|rI;JMu?^2o<-uW-pK-6sQSBb6N)3>#i-0DW) zZ?yL}+B^OIE>~%-?_y4{+eA=P;81Z+A5>L99&4Aeku_b?4Y z7N02;0g7aCVY8|+x2WqAa)8>`?NP^mW6~C>i1o9XQ8+9To~_|)dQ7?^=X6a14hdJ9 z536EdcaRMbP1b{Oh-l4b-x}AN*=q2&Qsd!hyV6R(44*wg(4O|LeZ~g{u%}KU5oPdIf>24q&L5h8?1f93ya#&rj-mJPV>?7Xs z;c?RQty|4^Kg{d@u0`vtu@q~vz!{L9IG)kQ9k;~CQ}Pq=gMhbhsl~b3_%oj~^WT7y z_WB1^@fM@*5f({Rvm>OX;F5Sjsl=a1UUCAWEH^f`EeF#BAA%Dff)lyo7wQJfBSX2U z5Omjf*4|N;@FAOBrhke7a47hcXHtex&=vW6%Ax`v7u;Uy0;jgZz6xTnuQ9WCsLH~ zJLa5+e`(FVo;@XxxFF5s_)gZ!vKyw^k|g4Ursw5Z<(nzbt1-(D>{0g8o3&u}b(Dzh zq?u=Dh?Qq2F7pBEdyKgcvs^li*LDgKwAcL>V4C>&^A0AJbX7yhg))JLA|sU)-&k&y z-$8jc<6F~fMT3W%A{ z2(CP5eEW86D3~|>V{~bDxWR43K>#wWE;U_G4Qw6)IpVQD2puhN6xh+y#P_; z*mLfrk+!6Yx>x**jT8SvXqZKUMyeZ&pbg> zqogf7F8NA>O|!kHv87!q0N-n0$L6L&ee1IrP8MG`qcRKa{Ef67>%`C1jn#=dZW21- z=T`3w{2k=|JIH&^2VFCvgLkSn7@D+Ef}(nXkqc_t*OF;=LsBi$(E41>M?%`|#xk>( zKiB>u4}}!ofsofEELZd94KK({d8)8Gy`1L|)T@lAuo0$Z)YKbplr z^jSGDA1m?#%{{?cIT+aBsH)WMLA#g-ZZ|d$RcsNd*zs{ETAayGVVsfHCIqH~1yX3y z#TmVH5&Hs9K`fZ!9wCaGVnp#=)d{5@|NPb%Q5}D~Zjy|k;rMD0D@_LY}c@VVoq?@g-UR69Tue{t4$yr;Q3T=%FYodU4_4 z#@>05$P&Fe)?!Z;5MY;YDs}zbABYs!N1499>geX6v4u z!jk5|DM{KSCt1FO$}g#sslF2$oo8QIyYFxV_YSd` zf@;m%+u+*kY`AS-#ATcV&9twz_pa_!-R0lf17A=vR^8hy%lnJJsl{ur`-ij_@Ev&g&Kwmmo})mi56xc!HY+9N{=-OGES z>Dj^yVxxRKmEQlom9E5EWZT!BFYMw*;t^JtxA{*E%t*zAsAs63?1=5uo=Rl0J#D`6ChN@p)m|kWo~K>QWPWB`0|3rO78^Z)NUjReM-c2R6yx zt|E8+fM9j(X%o(r#Q{o64mk3^0S$z2Y!B}m+n$l_*|gIMtS^=M7sD#RwZg^@_M8R; zh2YZ-&#=O1UMrA;alq!Gw@-8jKE~>cb+%}#WScR3Zs;JcUV7ROvaH2YS5Uzdz{Kih z={ks&ENSUN*~`u#jJvE`o57ZiR5?(UO1;z;*~Mis&((c7g`VJPSe!f~wY>dyJ5AsIu;9r#2q4X1r}{l+zrmI5tJyIYe4mDGH;c$wA!%Y?(Zt4pMd*ElxmgzW2EgbBU~`vr@wX%f=?Bys=m=@H0*wY5AGf~MS=1KIbMp#S zN84ECpx>*+nki~$9P~U~0{;=s`#r{6aby(W2;rJO)`Yfz(UKNa#8o#6AqD2}Owb6$ zH^ogdIaj(!F^;cC)tiSDOf>fX;7BxBY8>Rp%sgQ{vp{_$yp%d#{U7Y?@180+ z_jlx?pSoL)q`P6UJ;B>LXX{zU^u}w?DkEgv)a0c@9D4hK9h^q~G`-7tR%yQPxRPBC zrZ+S_#pv3|j)EszaOGa(ikHp97nwr9qO{dC4r5^OQgLamy$-Q*l5yUD_tNDuMVI{} zse4(hZt05RGJD-iRHby9D$O$39J^t1&Vo*l1zF^P_iJj@z2m#HXG`tjWc%IXS~_Xv zj=7z*P8d&71tihX6z zNz#{PDycc$UsWgo0F(l?TA$bc;ugAmw)ioQ;Y{ye-nWH^MIQHj-?Ar=b4Q-+Tej#R zWe+HLz%9JausJLU;|Izid4vUf9sUo|EGRJq{?F{TzH&F+bM5%bKHSB|#s4ivJ~eLU zlJz--yo>&E#+IvMOT>6gT?+dXZs&ZVV{kW(qVNW87~F49NtoRV-;ml15t81?GvjXc zqNJENuH}-I7rsjxaD7}v|4gF=(0f55BO-f#j2Eflq_8Ys3s&Y>27me3Kp_BHq}<}> zjn+GzoR1w{&FTK}##r?+S<{SZbZ0I@h$VL3+%G?93N`?S$F@H3U z(MEO?Tv+*T$-O*(v?+c#v`!WIhruzO(4rU{6wJhc-UO+|3U_ox<8}kmiB+q;ZW3e? zL5t$x`|(Q&JlxUXR?;X-t`FQ!Fh0B@Hqo)sVkROuY4EkC*`sO6w* zJXMLV;m8Q$rs(|lqc0Q!;S>`LtSgx81TeVh zNFOkWRE3xv0|9Z-iAM#*<-{jYfLPZHh=oWC6d<1H4+yq9b`uK_D!Nd2${>J9Ja?G# zX`wqFPXP$0etklS^&_dEyjI_ z_>l+*;V6LK3gNiEKjE-sLKeS@k-a)!#mAZ1cbsF5=PvCtPLo84c1aB(VWhuM71c=J z7;L1V-S-&j$ItW`=^N&)8tFlzX|UAQNdL22jZ_x;h1N*t_gm=JKrMNT@zU>P(;CEg zW3~h_mLn2o@N7zAzpJOckb=NDET$S3PMBa)qsO+en)Ju1q@$j#Y`VEyoWn~Eyo8Pl zBYG`TR1&R*E3Hsxk7{&*#p?7Cx;U9&JJGyvDiu%syh436C72SjD6KB*X_uZLcE%F_ zC+)c;jty0+Q348})eS?~h&K(}3-3!>bn(kE4ddhToAx8bc~kmsHp*VVlMZ9o3m5^; zN}dKAl(dvCyLOP6g8nWK|4e37d`EV~d`HfJms7#1;0?s~y_uNF$#{vhw7FV|yUkTI z!GqgeEeQ0HVH5`?*Mh4V5yq#|FxHs*H8#J1X%dzG(?wy6J3YbKC45t$td!h@sSF-IYPDgzxt=^%QxVAPEEEYwWoamk*=))3xjC}m)QEJ z?ci4?r#)>mSDmQjz@<>y3*4xDJJ#BX|pl!$lOJVd;56&b3h zt)f~mnW34u8LvONxBfeqC)B_6DD}UZ8*l$liS@@0sIQ{`?BWdJo9SipXUdfgS5o@v zD|5iNE-e8uzk)cst9E+y)E#~855%RNgzg+cPPTe2t!yxU+S`HCVjWO}B31E{p-nuqqmxYQ;BCyeCn2i z84|)tqNG{@RIL!)&Z5n~3Vdj@wG!kh(ENqNCNVi;vZk0$@}S{{>y=DXMB`CDS#OD# z*Ft(&->lhP?6XJ>SE8xob+Q z$FhP)ndT!5G8(r8FU$3W^74qx+mzut11ZNA`G~{~&*}Zb|MWzImyM@&mdRwQ)@}0G zT|3;N*{P8XmyLKO?$om3E530RZBdOlgL85)KXDZ0HU=-tI&h{6aAzAO8Zl{hbD%X< z;@uNMcR*zIrdOVQ{O9x_IA^r##ISfLWSt_PmLUcu7O&MyD>^;#@}>qaOO*j*r_LUh zpk!9^hXM1{I42HnWBMg(o;b1gnWv1&);uY#AksT+nl)G6-j^e7y>o@!X7xvE_cAR) zMaW|OnpQFYkv%5wzf)6^f-`d6&s}-yCEFhBa6uh6ni6a`am`u|p{|V?(Mr^`TRLh1 zJwR9QT{hlhYr!}TNl+a64oPEYLf;YHL~FoZz;D4`J8-7bL+YE+N7Fc*s@V0K(eeW= zse{ab)|9s5>svp(dZ$L0mCOo~u(7F_i6vvoKaj2+HaY*IoLwF+?y zWu>$nObN}{5NLtGb!b&PwW<#7mX?1*bD$M5`^~kNAeBtcSUyo(lc4~hZ*rhQ|NXC( z-`6N9rG_NeF#gRfv$dsTW92I-jmhjy(Kx&knosi4^nv$A92XPdZ9X%0qM)}ZDwd@c zH$z(er-rlw<8`W+pi-Z4vk{ax^%?3}HW(#amY5`{jYRTp4S?yF1|7uq#SCX8{uS{ z(sF{!ats@%AjDYpyHbb|`Mk1g^rLi1G>$^t+0Wfy|8cJ4B`_kXe9E)kSWst`PkD}C zdmXehschO$uyeH6`KeG{Hf`txTatgFeO-1o7f597>++Jhvad_)p&C|1d7b9(gpcL5 zbrU`?Z?j+H7jCT*)=ewf1Z!8}(BE?MTPkdnHGGEtz^`7vCque-B{glW z!6`crK-1E;^VhNHm8lNf%ZobryWIr9EgYZN_iSy!w7*b;@wSbV?__V`hi;_3q;At* zR6lvonx)xK@EaSOL*Figb8^q|bY`yqL;*^`tT|z&)KufW9J6N`aQIi|So40!qmo?$ zkz#5T@_&A~mS>Lja$s=j2V9{W@U6KTAGwzUM;5Id;euWhQnFqAZ8BXUUxAHxDwz|0 zG=R%bMG53}p$TGc3l+P3JToah!&^>XkPh>L)b3N)DJKA$F+P$G>e-KUrI3Q>zxXS0 zVV&ihRu;HO2XW7Dj=(o@ao>YnP_pVI?pE?yr&igLvm8YGr3La^Xe5OKD02c{KApy67uxxQ# zE8`QaGlQdH>tsdu+F%)P9i_azZ@DS2;PhjyF)=6zEXYj4ytHZ$M&$_Hh)`*-{{V0p zuRSCdhZFu!BOHq};FPzyv@MYemCCAhhUC|0_V&-|cy-hzL~qPtH1 zPOIdvFg%)QYb%9_Ud5bY*(JIoP%v+Xs_fveu@J*f=(*z-_z^)7@+MIXLy&as$!Wpt z3EV6nYU_w*+qsHlm3OQ-uBaoLmdsr+JNmVJ?o0kg?lToi2?oh?H6g5u?pwUp9<1Vi z^ej7=zSDH7mj09)KYdIcE@2ZkMm!QjS5D|Nlqksf>4ovV2af?3JWZUcnjj#9FjME4 zFK6`0fp&PByk!Yp%?(fWL-a87urgJl9g4;HYzpy$B{glxTj~t@5&M$k!CzslqPQL+ z9|cQzR$*jY1$(4G&t8j3mYk|i_P{~mgIToEzBXs3eQj2fqi83Rj;I)5`y&$O<4WEz zZj}z9d&0(u$bZh#bB6ixTWc|XLAmUpX*bfK;Z)tuzS?ZuUN>GohVxwMIptKxwFaLt z<3u1LUAsW_P9lAmXQa5IMlYO#Vfy-nv<=X_nI|rI|f#hPp2Jtn+Nm^hhmiWL01JQwXqtJ_pzX6 z3>zxIb>kKkJCqNJoG!jT&W2)reyOpHpsm5t`uqyxI&~#DzCbWr!UalF60lE6Rbtd@ ziwhL&U$}O#!ru;7c+X&kOQjInH}A4o&LqwFk zK_cl^SKsgmUJt`F;s91wkYc>35d&42_A{j-wJH`h^F@4L>6Z2F(Y(F%N?I)QSBM}V zyV9DABG__pfz-@Zp&F50nr8o@jJIZFN`theZ|`l*DNe=@m~GjmEshs6GF4+NYq?_f z3ynKIH)p@tSYTb18rP{SP(Zu>x@*+@OY8C7Ys(CC{`(ZJ9jx%TQkcl2uKBb=%}H9x zeoH1-hcN~UQ_jw64!#a+EAFcHa&YYAmDW`^|zWu{M?2xksN)#MtH(Pg%<16U48; zm#(C`gczTmkMtWT6juEeb9_0B?a`s8_9amkJ3De5US}-NDNo*?saZaEj zhx%NIY7o7Sy3%r!aZfPMD`=NMCiV5+h&C07&?XudPPn>pOsEc$#S~RTJ(k!kQXdYY z-a=!ky6eH>7UUHOR8yc;VeWHozF3rCtB(DXQ7P5pt_MA$EJ`Z|<#0bJc?T|j{@#Z9 zok8!H_-UOAo4d^!nvsUIw-b7@*RZz;J@E}yfKZ}faue;1q{2_?@E#7WHu=4IhARJ+zg#TE^WaAlkeS*&OX81r; z@2T8A@?R8x0p;ltt}fqZuM@8aCQ`pNSHC#-$z=bimi;MRtwd7U%Y&Wat`-=@H@MUS zt20T?C@8P-$TV1{{`FGw6&_PEC2Z{5jlZdz9#I%?H+FM_tunLmi%gjf(-EXqs>EmG zeKi}8Gud4gSOFj|!P(+fV;@T@vOv>k5q&~S(aP+Lj}NRS$>?mYYLj;NLgOf8$zN+{)Yz1Dr%NJBv

QbLC(Bnv{x7BHHU)bL9A2urh9Fvng{fl)!m5#tJM#gKnL%h1m>ol@e&IA z@u<*3<3u{Y9^FyWi7mbYrSS2MoR-9q-IrN}3+uS%$ni})g%?+rwvn_ZC^)M?VwcJL zAHA29bdtQUibRi*{A9AyV|@9P{Kn-Rz3Wezz40=!Yd&`C2C)e-V#YOJINTg`0Lnb6 zmd_9-Ws;aEjWOn*7oV7sQ-$>79fMgOTZt9CCLjA3A*rP*5Q&`VJY|oSQp10`hM#@i zI4-9XD6NE8HZw~LO6+xF@FOgwgfl3A@AePFs0t+(iAEF|2?Ney+KU%6r|A69WkaKR zx0bLyR&@wJwV9gHyfNphASC|lodosOyXrslo$82gcb1;)n-q_cGyD&VuY^NylLzPU z>#C1N@Sdfsj-oE}L)&5>QhGEM*XG-^t*`lq8w<{r!HZ`B068Uc83&2TTZ7OVzauJN?{UP3DwviogGD6t;s|l99uz$cDIcFe;sI&|` z2MLQ1o(OXFk&P)uBldNeH8Q^y%&%9>(>tNFWQI^vyvv^{sz_BqGu&Miy1|(Gi!dM);U>weq$lr)icWe$+QgRr;iazD?#!#6Edcc%_l} zwLDiz0go5X>LCWB&*l!M3-n6GVH+MRu8s5++-%A~A>FvFs~t(q^se2gr~L;DhS3j! z>$>J+PO~-+4dI=L4uuzD^XL~}@?qf2`x0N$l#!BB1VyF)IAA1yc8NqVa;89Rlqlgw z$ip;e!;pv}KpyXGNH)gFSVULISYCrUi!nr=B2JV$z53g3{07Kh?9X#Frsk+<3J5_8 z-ICYjvq*koVU`s7Ftz2;Cxqtlr!eOFD%$=Z8k%3CQB;V`SK!-=(mw__m z;gM!@L~b1e-h%^2bgJt1XOT&LMkIwYBF?RhDC)O48$gzK{CBGcj+@zV`4}WNmsN%r zGeo{(Y(2{wQ5fYJi=aAyt@sJmf#oR|_(w@Tx`1&5cF1G6B<7cqA82mgNB|&n{@7>J z=Dct5bewWjPS-I@@{KEKwLQtsEE$W`rPv!91?QVv(OKlAso)$lS;gE(?3%CEL^N+A zCk2LYkLJ~Lg;e{+;v><>R#8K-3WPhF@CDovCs)(QC_(?oxdU2Hf1Z1<_uN5=2%tR$ zlS_7qO+vn-DU&i|X$C}=BS-#vIqgUD9z8QTX_{n`SbDO39o->YgKv2?j>nTP@c%UA zc+>0mSh7!DjV97bI9xpm;+zTbhv;%6jt5Dd5JI8Z_;(cP$B zz$$y4I9OJcUSO}ghYLZrp~haCQQ^2^5+{NbMDy-FBL?^^`VQb-X9D=kbwd$ywiV4j zE23d9+Ok4`ELviJu5mw=N;a7TG=^Qr8LN&U-Pas+LvwX$2%N*jMZg*#<*r_i4twZn|6n>TVVu}A~{fr#t6f& zEym1iWsWQXU@s4CO~zpO*1w;ggnl-2&8H5Y9`C0K>3BaE-w}s&wlRIsek$8L_SwGu z{0(;G-_cJ+m(^N;;vN*LnrnPOPb`y@ht5Xy)EsfG2NSVW-LoXyXi-lm1DMF)MEIq* zi?dlrG_M9Bx!$3bZ&{4TxlpH1RDqGt3y}@+ekdp@7!CIAs@`vpANXz8(Z2odY5l&v z$o%&2iUxjLAA#~WJb8~8ahccGscYo;Na#ufTYFf@Odq;(9<7-m`>Ps<@!4r~d(l9;a?7$fCgd8Wj4i(Ap1qnk%6jRF z@M?YN$}gw@U5VuNB`V{1z^*U;r>sv|O#T%DreWA=H9cFnM_%N-h;{*r8(OH-=@t@(U%2w_8G1ugm5s-;MR z=CtbaoxbhLgs>I%%{+@?`y}Ty;jj9hxfCm|9PrhZG83M;b_Tc5;!qUxSdhP zk-~WI*0M1xl!&YlMzl~Xds3aF&e4hv*w>A}Ff=3HqmB7E6!qz$@-8A+V_9|i9^cN$ z+4glRSZJYf`I{0om^e0QN$u-)YoR46Mg?VBXf@F6sOB9;wcEE%jcQ|2OGPP&*Po~6 zI3fsnmB@G&Fm4q_Dkevb?{(JkqZl4Wj+SAWfKkTS-XXp=MTl7Vw`vsQe^_Rh*Q%|Wc2b>b($-4ESBS7?PaHcgU{eArr9Ix z1V)?vzvN{4|0EDFO6al0)6G)N#$GSxbp;Ov0|kv}-f-G70kIvB0Ln~hXb?a=GC2l_ z1v2|L%k0O`Q{s1tt=}^03~JMJy*%i(>47aMOGXc*4^*n7i`o?8oC+)ba)Z=aQ! zlr)qmi&;Z!Z_4hjXjsacSzV!pW)Xt(6+a~SYuLbF;@V*mWq1)Kp%cm+=l9@JXIq_% z8Eb2yzo;JIkPDkacu4j+5by?|%{r~n%6t5u3!+W!Z5qQJv&r73WFkka^L#OODeveT z*BngBXEFamh+Ceq5exF?#H)sL<(;R=xiYeFz1R&J!73$-KP*k6=qb4nZ&pFUNQ>ESJ?%yw5?E9S zAT&8%SJG?N1-L-4@KD|vd;qGf0-4}iYk5Ynea|aM#wNNieN?dzB@|G;gbUXXA&Fj8 zJ|O}bUtA^LexY+e_=@h}=BGzAeBV@f2|e|5EwG}vFzUa^c<2v3;xXYbgouqS+U*-g zOpEdBQ9wi^S?j-%pS1P(d@+4*Mf2uNO6EMm+dQ*6TUvm4~3z76{&Pw1p1T9P?hbGVjH*h3xs z)v6A=^>e;ui||i3GT2v{BJS`M_#;mPX(n=`W% zGxo0|cX&cCnwZ!lC$2(wdDM4(*R0ZH5MZ>z$rmOQFii@O`+}ltxm74Clqf=g%pOr* z@HyLItqPfrSQEyh+vurV+ngz-$v|~0>ev)$_<=}9bYr*>n481Bl;KK1X>f)OMC=oQ zr)(f*jy@uM_J>ytA3%oN%O|{#`N85_;UqwYU7aAqCm_Su8cv{E%O~l4xU0)qO7CZZ z6QzYw-$h2^YK4T2N0t&~nwDA7f#UtU%?aT{p`Eu5InWgnc7HA?p@WT7f2 zc;Pz&2YXnIBh9+T8S&R6a~Z+e<=Nw91gD9KUno0=5nv;kAOhEL9`17HFoF#B{G7tb zcwQYkA;!msHQW41vbld#AlWD*} zi9!pMkU*4-R#ROYO>JWWm(E~uq}kaZDm48~#D00m(`vV%L=qvOAdDxJmiGmdXGKNQ zQ#%^2O(FJLY6ownl*vWy!jx$}D)=A`JU|2Hp{_!Z2FxKKwnCcg$dijcKJH1Z3_O-0 zMe%P1w5CTz=vNPPONS4+^sob8{=+k|tU^|@=CF*rDk&#=$T-@&%p&RZRRE1 zRY%$H7X7hPUwN}55G}Xg&648o#v1YnahJ=T&9~uOn%sj-3sGN2Aeyo&CE8}(_95FC zavpC}a3%jW2Uk9CtlVx|5+Fk68-&Jq#45-#3!XQgvkHhUm!z+3HZ4+3BUW^XtiJDO zcYn*41WPGpeQ#^S_}RNV->ZvSu<&cgIAl#r?V5QjY~JS`wym~|KXYj@j|H#0~rX*hu1L-iH+b1LQzp8s* z)>|US8DCEjd>84XqQa(6$@}jp@!7IoN7R`1pM5E2{qZW>znLm2p|EMN!Y8EAvgwH9 z5FywO5?ZB!FM~*@Z;m0-7bYTktHR(?43S5Ym;Q&_7%n}|bsU!-i4+Wm8l|M0Y3F8UwZd_{OlHi$zOCi-{ zFD^~mYvIzLgDd%OU2x^I##yhLxOA!dhIgG&X%!&$5afE+_=#1JE(KL9*Zr%=WEtFH z=M1bcb>`@1+B$C%LU>Dupqf@iR>hr!sn)7o&yx7`QvQEc(9CUnRl}CiZF@^ww0SBv z>u%dt&32GHNiPLfu1=OiW_x57mtfgP!TIliWmv!7wjC@({c>Zwe^g-QpObv}5qaB< zc^@b&gZ1#f#6}sh8~V5yo+*;5d%qB=rGF2Iw&rC)f@9*^4X|tjaLqGV;rUXSs9RN7 zBoUrh6+sXmd0pfg&ZbmHQEF59?$xPygytD-KT#xMsHCb)3ml%XD%>{$8S(NCn|9G* zRFJBD_8@ked{PCk$_^Z~`A6z$9qcoaNqSl{zrpMca(BG(Z)0QQHaz)CuC2Bcljm0Y zfK4KC6|IJEh=`viplca!T+3@Qoj@+{#ncy^(uZS!A~~!u8Nlp-V|hBM(}zd4q6gpv zok~J*V&=FxI5&%gpGq7EclmRql949OD$&*N*>|6c=~pB&fa*rJbR$!`F}^xD#&~l~ zZ!hkhn9z%|F$tX@t(SDd5gZeF_1K+wNh%q&(yHpjlO`XcYI!Dc zWt^MP!Ou=m9aMrGbxbf7BMGXWF#?~QYRp(dmn!&7QR<#>-AQiDJZ79lSv4 ze28;p>U^@Qrw3PD(xa3K>&Bs-jRK=EhO6 zQQ|y;{_ZGe3yiWmm7*IPP!8P{jYc%@p;4x6+BbpkC`m1GQ${8eeD5=CY#{7O;s553 z@jHjCMt^QJubMYRCx2jfpxLHElSti#!hRJh4^me2X(^n=*nv>3!D~w#Z+QDsE-NT? zM)Nl0!CRw>5IdDo>|H9e1=89-QVu9nVRD>W;BeIH&^S5-p*zKRe7fpIrhUWbd8CWa zG&a#KVFMGQ8xdE>ia za~=f$Sg`pGY|&t_xtH=mU=yWS!R9V=hI(Q1-*l@FY({?vY+U*fug}W>4k_0^u&yO`03Pi9?64BnMIJy@>!KF0_MiLlw4&zzCABX8AnW2D zU}~<5xxB&LM-O}uXtybGy41a{&$<}F@;uhuKR%*&?!5`?;uorX3#gbG&C8}6kuzn9 zM7}cazo_4eSf?gAVMY8$IV8AStqD0ukkF=&082$7A0V%d%OetCbC6Fd*es+2{bAER zd?0LUm>3f_XY|9-G3 z-@G^#PFw_Z<(r9D4lZ&ru*{TqV52rz@FQ?r_T-U9-iRa7(DfscRc#+7yJPqi@Y3~v z;0byw&1fFV$Zfl^?NPipO(px%*`ZuwT_J#(w_6+}gY&MUeIL#VTePBWlKs=GXOODg z9erg+>C{3T0E%{St=ZneEJ9Yl-SJv*3M=C-{;1SFQX>nfrk z`TvToriK_s3yy}Eyqg7!pua00FM1!iFt~^{xDRCK%O$S4v5rbe`n;jlqyjta|5Ws` zPf(`BHjflMCLiq=3P?0^vZ7-LFwaOu`Vt{RquphkmUI!}&$5{YE;iCn|d<|SunQ*H;P&%3O$KHB?4U04r3frPO*gR41E8s`W z>{bJ#SoX)5vAKdVgYhMCd8wSv+=SkfAbXf9ekeH!qV2U)u_-;E><8RMaqaaR1scj8$O4Hj6FeA5TEA8xnQMY%Q7=gTU_MYF3HAZWO__iM%c7b1#E}o# zg32c2V#(~m4z1k-&dq42aj;$c3t3YsNq{=jwi_Fmv_x+W1`6QljQ6>~4z9 z6`4EM5g5}r6LtHPr9UwJtFe2JGFPug^FGfQbgo`CF6lp4uT#^U zD;cEB)#P?_u6E1)nwJF{y>qpMxjGL$26lC7vPM>y$poD&x7RUWa=W14e7#}3b6CyS zUA^-)jn6WJ>-*2wS7u}>g`-)W%vS|lgf(Bce9L^n^RwhZP4mE_k8tX&4>3E?hbYf! zkDlYfgSwEDL?`mV?_2kIRaZ_^y!d-=LPRT4!x=l}?kc4E*nOFtbb19E%B zO@!Eu@1_Bpz%bFxsPmfAXDK<7rjJqT->Wl4|2xc>YvYT;oR(joDf(Y%q?izjgeZJA z&w!KA|3Tj!ag2AnQq;SlG&h=0iDNWRerxxiS$k;8(vjNw*g}JJ?Obf-y%dXBv7Oz| zsylWq)hE$4Y>fYG66}WNq#BnpFTxCG7&XbVo>U@=47CAAuGfNi4h5R?`OS$m zYei>iV%{VFR?EuQ$~%{0+DO)m)6diNH>}0Ea2EKbFv0h;R3E0GcXjfOmqpbZ)JA(k+UTH$Q4WFsEAu%oW*Wa|K0$C%%<3tS z5s5)&(UsUeOSDEqv1d_T}U+IK0gaXCr$;mP(KEyq6Gonz0b$hQx7x=5B+sVwA0>_EEXrD zH9dPQMNr$wGyIfy_|lBqj>xetaVCOk-~5%QiDjKp?v?3yFJS~fPDO6HbxrtpNHu(? z1;VMm34w67F9#nxGk>w3R=!kub)^+qzvHamVe)Iq2`ltH_+7*6{?nU>FtyM^tFpA<%v9w8tOf59WQO&t z{`yZO`+4Xu0IjH}F?AyOYQ$6QGol)3p?hW8H^>+oQ_FBtm4h}~rLQb_#!YtAZX;_Q zHv>$lNH+LXLG3|siQ@xD6q ztEZr*AsL6IXbVB;zk#p5LSDE6dxe^O?Cb4or;~7{Ba)59^>KPjn=`8>pOl2<9ZRrP zYE5ZOK_K|HJ}Xs@0yQe4y9A5@zNQ^~sJ@2L3;A_2al=kx#ugS-;A+Wdd5c*1n*K`x z5C;lzvNZ%;!Mb05fQT= zl>T5gH>N8k**COyNRFe?nfXa1yDKHdms2|gpCspyPa4(;lp+~z>KC3`#2zRE@lE>w zXgd@5sEaG`=Y(*%;RuFHR*4oHk7zs+gPMh1+?51^h+@64V6@x=JOHcWQJrNyqEr&lvj6X!`Rx^g+W!B4Ka&0Z<~PTiH*em& zdGp?z_Sl(1hEs+S{`NucxpInSZ2|Q}#yeyExtVp?yhEraGJa>bO1MZC6mOl&<|Lv= z1~K~0r;R^}3?2R?I;^D6dS8MAYktn&(8T$uu3q{DqRG5EMmp?K>N&t;i~c^6IvO)2 z0p~S;bX8U7X-~(MOomls>8jejH=&R*J}9V2)N0XpL$zM3QMf8yx$%EvIKJF=;e5W_ z<`n14tchS-2njBywdwG|xa_K8V6wWzaqdXbEJDJHY*4*{RW=&3y%$4VwtmE- z3FNDZSUw5hEDZv18n+smmJG+L{A+?^J>9iGZZ3@19rY-ro3Jh|CH6Ev20O|Lg!kls zf<`Sn& z|KCix=s4C&S%+Cn8&CIbp__rhf6rdURy(eaM-{#{|0EK#pU5@~s-DAosa6k9mU_Fa z0keUmX^8+P9@Z-*GESD9nB6OVjy0@8*O#Rh!0ip|48t%oq3M>Cu{l<1F!M{yIb7_bw1HDuSs6?1#swTt4 zicjtUSeeM=Us7KDcD)JytggVNOZH#E`2|`2e5*PymQ&u`1CIq2UTD31OLPEu(lk%J zf@?~g+6>Be_ypdDN9l#|G+`0e4i$srmG2Q%Re+t!26f>+Hp-`iatsgWK>=u-trtwJ zuF48WNNh=5xfs?rmj(ZmKlkIv;$mb%S5=$i)&YDXkZ-)Gz00sgqIkm|a7Po9!Qa}6 z_zRIZsW`cuui3mW9=;*Z8~%szE;k!mr{mTKbgr6`qpBoM&vr$I^lq#fU7Y92jr&U92<#R+gX)SC0$9;c7`}?-E32`fz?X~4Yg6D$ zYf_%Qavm=$=i#z)4t7;-k{}vu#T!09{!>m!*p20XDi26j`ESWXZysLa!5a>05W_bV z#B#$8h}t)xLRhQUCRQY^Bd$F8_DuwGYx)~x3jIXC?B3n2oA$-67i(eKoKl>e4^2HQ z9Hb^Pvf9`3eqL?v2DHDfiWB_R)fIEI{im^NaK+(xiOV^!#1pB@b}iZ`3uVKhuBE*^ zxoxfj_wZs*?i+TV>vjrzt*$sR(2O@iK`pw>($4~a2ElH?Ui6tQIeeDq9~t~?fd3Fz zRh&pY*2#Hx*H7O46VvsR-;)Qre)20k)RZ#g^#Cz;{a3RZW=)EW8{=5vc68bH8!`Z& z2?hOuxy@?GOsNq4#`xvJm@x*1EA#JaFI-vM&Risye`wyE+Ba%hy8|O%aU7MnPv?nq zK#}@S=hsQogQt+6uwovEVF5d4mVp6;2@*Bc%J-Ps8xXn?AFkPmRt5iw7dfnG=&zoY z@<#x5>^U8f_W;={6OL+Yhl|yofk0!Z<)oRj?Rn@}zO_1ZW6rR^aB7D{SX_}jvD273 zX01$3fHE%lJx=8ht<~ruB(L$$YC$hi6gPwuu%#UM;xaVG#e( z4{gM=OY}Pz&Ce4G4jgL7=;OLwtVcJf)2Ya62mkAmXh%r0qf$#uEw1Q+Xv(yBkTLf1b>1U~sKGUvMfH zZgj_~e2^lSnr|6HcSzJHscEo~lOa1KieqwI!(EVsGj@|3^_VuDlQE#0k6aU|LM%6_;BHZ!(6jC^pGuZtJu{+lXiDTuH} z9ER0rvieJ;Cjare5{l?bC@n!FBwC^t)Z<0#2+~D1_T8K-?dN3G0?tjUo*K+c(zxiS zp9|QUM`$0CnWDCtA$AFJMkQCtC-`N?{wZE`1_cwn_g^U3#Vsh_QeSzJSFoGsWxWQw zWni}y@q)ld90*2^p|O|MVX2-BtPwY7aSm125HpqYa@)b%VH7A(+q;bFUn_9-iA(F9|R#(w0?CoDosh==_romj88*Gwl3?m`dg! zaj^W4B=URn$I9H+MNAtXO*jZla}#Ay&)98npJ=EJ#tkw^Y^|~W=?fFGD|~gaFWlfv znY}Oi_#r(J@)zE33w(SR1L$21C_|)0hM0m55lqQj7(7 zVC!82Cy?E^(;cbKR>xmyD!-QMB(paX>7gpKD?eC8YS$H}l4Gc(60e7aCs+KsfqY%LSLcF zF|arPrl3!Bp-c4P4mt2F9B~IuL8BhxR7(mvMb}Jun^}|F#WPC9c6se^6tWR? z%^^wZb5KZXIIEBZsc(u@EH#`}MvBxoMJm=+>9?7_@L6*#*L+v_toe==t{X8DKI=x8 zb?bck;Bpk+Dt+Pi_CozTu$QDc>ehEWBNBq8tM!I^|2X{e`9rZ?7{i6RYLRO#4gS z7xHP2{b?p|11HaMg(uF(NcPqnNd|z4H@cSIx=^w#nYhqV*d#DmO#+dlFd~3(Ss0H^ z0*7N@Eq#p$6r|GICV_!e`pX7Gr_$plX`j?Obi`WeL*|+2ifiYDJ@mUtw;cA|xU@;y zUE)CxtjS44hsm~Df#dTDBy(EK*=AITM*T#|q`yO8ay>6Y%jQg7U=oW-l-IM(t8_x0 zp`W?0Im~=oEJ?y~XUcI0zgLnbwfiEsN?p83#okDh{U$ZK!&elnjHp)|LtQM`dZj7Znw}ZfUk^K47We<3tP7P;^-iU_s-$P0x zRd+GG$h4kcdx0b_lGeFZF4DwQlyfCid=@N7f91d>Sj7Q=cdJioI0>{`@fNj(oNKLqWqNCn>)%DQzZYEGhSz zl&4Gz>W=}po0Q+0lnPQpCgo=)rJ9uMP0GzC#Y@T@lk#JeawRDXP0EEPWezERlj1We z^GUhNq@1c#EIq7M11>g+N0>yde5o2xXA%dPL{a%re1b{r`j?Eff^ z#HvNPg|(9PIZL89d~uhz^1f;ihcdtNKCe7D+}8W--7k0uRh6IB#UKreZ}giju#v%D<6<19iekz z@;Jx|T^?c==u*e%0^DgX1xEA24V^Irx96mcFEX(hYeg-4x>gkl5+b!7F6%XQ@-OA< zWEOAuIn)VrmyG7CwM`Y7?_*u7#Kynd_kP>dm-l65^&>JsZ3k_zU>r)WWJ}cl8sA;3 zib<`F{G@^}+IUCu_uJ-*OpU51b%r{rTqlx+nyL+`k*R-Ecj^pNyJheuGCZN?rh`!D zS~WZk1UCKC(mzjits0j3`K0vE=eSm#oB6p&e+Hsyu2s`BJ_)#L596Ec{<*GI=VfG3 zZ=`>o?pk$z=I52^pU-oxIw$kuI)jX9+Thqc)wB{*yYCS&x((p1diL3n{9SD#Prq%j zW0`-hReCVxqfiPh1MT0`pCnJK|NBC}v#?c>-}B$obgZW6-%aW)epkx8ie4d9ZB0j9 zt>9wH$4FY*H{#k%Sn5L?qh(9_n(61ueXU}6;9t?(`QEsKtWIUYEXCDB_a zDRrdGl$4n~%;CX3w8K%@U&{9%%A!<=DDECw80%%)^IErd(GXf9Ul`j5`^C%KWY&em zD`h4Zre0_8%FZwG6_I1|Oi%cj5`<{D>5+2``s?2|ZmI>k;eH4XjH>SBE*E38!qm;;!i9ADS3} zL-Yt>DCMr`8`#w*)$op4S$3+Ud8%#t zz$^hHd5Qj*{h%v6vV^5W7Ac30s404eGwecL>fdimAvQ@X7;0B|v?$ov_C6>1U8@NC zF6>+@JmBWN@6xM9foqk7yGyMG6tP}owfN{x&F2eUs|phZIc%_u2_36N8{bW(x|sye znvvi*|5m@(Idts;NFv9~M2_7mNDlbK2;mP;qpAwNtXah|^n)cYict-}!Wd!$yAh;j z2C>+*Zy-o3|K58g63|x!h(y7ECojG*2D(=MGV{o6%zxQDG86OHo5z{1#4L2_S*Qd% z-#!a>l1(+drDvg@kyr4{ER^EkFbl6{7B-~MLWwJb#VD%j|8X87SAP7@{c*h+k6uUX zFTJ#yBh#$tRpzEbDDncQ9iY3AWc3+*PEupv-LP^Itx;I zfAMF}dj*x9NT#Kqqlv=_;Ztm%+P#jmIz^FB;E8_fM5$ODFfN2$>=!45ZYC01Z>V+o z#tbdEO7=5M(!Bu;sPbQTai&qVx|xn?<+@s>M?~WO5t&m|$?fVC^0QbtMXme*;+I70 z*`QXwDMP2vQ;jWNYg_Olsy4NM1qp7~y17E>69;dcVomQRSJqg67ho_@C+;d1{2h>W zP5AOIRYrskUwH9Kpz`2leu9e|z1uuxGAb^vTwHmi@8>0*?^-R~>mU~WYhO{hYqUu~ zi^{pWxp!$h8k|U;Tw&dz=Z>1h+S{+g6R9QrtO`ni+%DUQAd%;LI**iLH$M}&s9my= zThoTADLbvE#rKjcQJIiNs=*z1NP{k14uU@}Bj$lv`MRF1AdfHd{APYUk;`8tV9rAn z2P~9INCZiM)UPNm%?s9a1GOgfU%w93sB6|`B6{=pw6rXlqzZJEbU5^_| zFyCq3bcP|5&a=s3SMzG3J@r{cy2FRZbXN#Mm5z{C)h529xOu{KRIeLSmhMvv1Nu1M zwV2Uwt=arGa0y8>vPSY;P|F(WJR?J&d;GQx-hy1=2#H?7KEOjUu0z8-E3*e?La%>@ zgvqI81&_D#7zyN~+KNn?qvq=mme41shgX@mGSCv4{*}5=UM)*TL_Ph6OnysHuO2tg zf@1Ysd5%~zd#Lq^S;hwJgE3sx#FKa+TXW z3o2EGc@`d_j?vGW^VuP$E-`o1^~Cc=y*Cb?cTO75dr>f`4@x*;T^Hl770vg~07Up6 zY;c2mgflVuUGusvgvX8|s9%!dsXvmA4#z1-_$B*VMl1ZFycvcZG5@R3Dq~yKNPjq!1~HU|DirkB6!iU`Y?gwh{9c27<=_jx%Ps!al{r(r^ceP zpIzc4L`^|_O}wb{b!b_~o9kU3k8%dXK4rf*m$99K@VeJKqS@Qj;t!;~azF6tSs)Bv zSUB%A+;F@j)_KPeHQ(jnZb+O^R&qy-{O9lncXp`P*O0Xo7-g_ICR)&z`_fAsVvkIg zRF}S%7)}2nZCP*0u}!eGkdx3irGOWSjv3BSUidif3l`BYi0?#)WV#p*EWRek(Gf5_ zyvB+&(hIlM%rOwUjCj$IZBT3%)rtLz1k0Gzv8Ym73FNEay}Cc%cvG>y=GT*zIBaE& ztaWd1Ueldt4!mJpc{ zw?Lw+!nLxOpJ8<;zEfEfzl2$v-v7ZjL+*hsY@g^6Q>c{!D1^cBpY48 zpB~TEXih{m5lLuq0g;*NZ>N!}7XOdER&b{c=Vif)tiVOgUjz6$|4>3r6BYi}tuio{ z7WNi?tY^6COESmov>gBZLg$c5RZUuYwSEpJx_y>3HlAt=1P``Gb)?B(NRn2ldeNX3 zn6zTa#_L{gXrYu;qdqau(s%WH=|tooV|jkoAo2$6wDPZ8EXX{oLoQ-+$kuI;^l5t}S<6XPn^PNP{te=@#k35|%V<40v2&p4NLn1n zA}e-Pg)0Fi3;WqcB3;FsTg4XsX3pWrS7!E)k$i__W>m<3sHHQC^^)ok<4s7tzlOoq z^-*p(_5br!Q~x1mcWi2JPtnkGYH8>iV) zZmKxzbXt9D3?oi(vay$4vYjui_^m;~lT8`Pg!LFB59zz~rCd{Cy% zHaP;{pnk3mE!)IaCT+Elc5oe8^s$~r*jqJHM9kDeB|8Cjk*`qVn9HEcUU9YP=Tv*A zj^VcOM4udYi~k&vZaMWHUpdZe9Bd=j3>Hl7T4#z3ds=%&9WMHKsaN#z5C62@-p2OPts#B0l2$p2tm2-`7dp>ytywBP?QdB-iWu*SjQI zy*7x==R)7u+uVjNZiADCi2X@qfY|z=Sj$!Ce9qa$q{2Ndy&ZS2ajbr~4^beG*vvx# z524M0-0%(1v@9m}5`AEQbZ2asaZ^S`3)BL$s~9Hu{}5(qPs98`$Nu=9359tF0>{FI z@mDSq02pE0*pkyDO7h}sVsCRzukH)`uWL$*z%%t;s2b&Ys+j+T{~rG2yEXe~LZ;S#e&%sSs8K z>bH-|?A*i+1V25Gv-74L#>1LW?2b%vb#kSUoCKRnb9ANIQt72s3PS&)ozS!gvfWaI`Q<&GvtB3gAHM6+d##lJ? zQ;MxRH6I_GRG9P4)$Us%)%rcrujm0)W(Q6aC)ZBkA^A8e;LgEsf@_toz|pi>W1!H! zxQ4q*LzfwxXl!(DGq^txE50uTD~`~eS^&k3Uohi-M0d97Fsg8@zPhlFHKDNb>SF&m z$7;n0=s6yqP*`7iw4-Ss7(hc%UwO>JKxa*43h-)#ISpW;9fGj4s0cJULTLOMM%J)R z+FItAvjd$~lZH5&7U}ejYCo6-a}#Q;et0GLtaX?r`p1a;D%5BPgT4;G(_LI9u@B>Q zzPRGev3EsPz`S==9^(q^!F1Dqxd5I37Dxl<_5|hi%QgNhqN@SmfZm~#=%Hs5bs0y~ zuPFjz&E z5`W%P;!QmzUfEOPw4M^rki^8C)z>^M`dBt!OxR7=Cnu~(DQIt0yHh6Yd^(UhVT)zL zZjuR`Hn(zNDdl?@xPov)Azs@tUvidp{Z=p>AduENd#yNWO6L4(Gb_bsPwI>(LGyXt z(5hhqtyfwGtpYV@#pR&E30m#N1G}*VPNyM^*=vW<59tL&47S8eQWn(n>MKw1PeqKq zW*XPAOl#Eh&Bgm{-euLS)=2?OM(&U$ZFRq_ENn_9=V)xGLD+CHLCeH+DK<{>AG48j z#1rk@#&A$)U5(+6Q@w``4OkEZ+e4%b&%NSBjV}wX#V%c2cj+a z7uTvj_Fy;}A2@lP61%T*t`hJMcde98#YlYl6Zyff}AU4t3jELCHD>1+8gZ<00SD0T0{nrsI6 z=sT9@8+~C%tnY&S$W7S#MFQDXoeQ6DY8JyeFX+%wON}o8=rzGhoyImAH7}Cb!C_>DiKbVZYRX-f~WBKY;e%zI_3Ii@NwvG%Ki6(V*HG`~HQxu$x7Oz&j zcyIL@Zsf84>H$-DG;dPa5h|3a61iOq$#}5rLKd{P3B&KCs>l-YCBT=>ajQtxgq@EU z-PkTu!4Z0c1W^O$gb&lkT+Q+xc}k|DOl3m79VN@O8%zFujxt3 zxFgWqdE_7bLYJ7z94GI; zFz^0h$+8q{g z2zzjLZZ}NUYPUKvr6BGnTZf`YZVKrg9vzvGEmtx}9?~7d#X~*#fF^tx)PR+fv$N`} zPVg6tPac6EiGv#K>O1%2pd&9luOK`x&+47NN@)mTr^9YF>{O4bW~W!fP8(RQV5uqo zAe~t~Tm-K5oW*J3yHSG?UsGQ>u1*ASB2h#Ja7cs$lvXk$zPiGV(#d$yR}j>m+`K?e zxGG%|Haf;dee7LEyl4aY!V?O@6Y|WVVmH};yE+9JWCKuZwIee}mWe3F?V_m-h=|U6 zwdg#)*lTfHX}67liS;9*_4twaXH;Ha94Pf#ecjd@h`W7!7^~S=H|PyOR5k#6hs|RH zfVc}YbceZwF)~0Vj_zc<$O*_+zk>?@-Flq4m)WlBhnfD=Ses=Jp!?H(AE37<8k@I> zYymA0%zM*!+ON36OaGhz6|h4rV%%fg*3)1>=LeOr=sw~VH@v_g_1dJ*TM}VkZ1TTj z?g8Y#q!@~cJRyUmZj@&$IR%K6o@GrQ*`5Bk9YFtU(FW?fiUzpDXX?oty-Uw7&hcTU zPW7u_2?{sLTG#Ym!~MlzQT&8_v-n|Z#)9jz!wxF3dZ&r(e=%J!h(Tl* z>FdBw{QO13p)u0~t9D0z_DkK7lN_s$?_>1_{9-?$=2StrcWkQe4Bohh_~L8pCsvNT zR@Ue1h6LWxWE>kUxdSKZTrgO>!fI0?Uf?WU$TwGN$=pAH1`6DjH_Ptv(u44omPHwS z1%A??rC$k%qxnigCQGuAMel<{fv|66#YCUXqk-9eRUOnn2-^5*|E{Vg~eHcvf#oYxOpeDjWgMXC2%ks;)8tp2w%OF z{~5iW_-adIM*mY{Dw7Nm3syof-Po!4;MPhQ-e6b$yxuFfW5XOf#O!eL=M}8n%X@w9 zW>?G3JjaUlu3++@Vq};u-%bmT1rdw&w`8@uEDPxw~Zh$Qt3z znUbwOM<*U@Um$?fX4ku_3jFk_-N!+MkNDHUeSPQd5+{^2J|q3I#C^De$YjdXgFSfmrz-A}ZljWknHUq5nf7d575oY?qt><#I}J?rT})g_}=oHj5>KtGzowM!~D(1-aosHoMRT2RH>@;tYm+5ut!Ma zzc`;7(@I2>-PhQ2%hQ3RU!yM`RlA7n#o<57EF6TzDSNvl=pc^BwGTW5uDszTBCy!v zE%r)!y$$DzYMv0(2(^b=MMF93VsUx!wCt=nH$1f}oIkjC;PRbV>FE4qUZ68KNVEms z2I@By4P2i+vJ0UVGXWmo<RuM%cb=~9=Th@EcAlxa$w}%sF;QMxwg}V0w-S_>UL*$>;(uhop~%i6Bith5 zi{_B=qEnk$Ha$D@0>h+lvB7OTLv-9`2qkCI^3eqgu!m*-9D&q&p#XqXZ4)f@Rm2a^y%)fKsRI=AsD z^*$#A*s9IeU+QK2o9OOn89!wcB#5nA??1{L`2ifNN~noC zaV4K&>U<+rCqeo&f5OKwEs&JgDr!Pb&}v?;Q`vcDX*(QdYrD664LmFVM|{JM%93}v ziD+v(2(7X5r|R?;m$g}!NufAeO&J!MicR( z3z(ULhpiBdElOmbrI@`8jysPM17exi8g=s(`0;O$K9-LfObgzv8~FN`j%LB$Ois!& z+T`Okrf88{iVE(;A-8zh)1To3#lm)-X&M%dv;Wi|Y#gMo z>9Lmf5@_i&f+PssZInvDeFF{9E|F-D?759^h`n@A;`B_ctTKoP=7 z6GcJuLVk3BrPyE+=bOZ9C9z1TVe1B>KjzFGWDi1usD0rq(?l*!$cdgv9mQnLmWhK0 z=jf+loe)k^bV!{81*18B9Ts6wFxp3=3iV2hDiq284+q~8pW+ky_e=ZtHTnJD=skaX ze->+z8*54SZ#D;0qDDMu#R6qvM`~C*`^N!Az|YT6^K+>A zDV9zp){7|;^EB98m?%R7s>-C=r^UY=dO_=g1d_VYV}dmVo6WR79Becg{@Zgn+n)S%;IJ6M457d-+7 zjtM#Z=iz>WxA(x?E8uNG9u^P57Vx$nysd*4IK{rZnPP)f+wugWRO2&<%qzG`zM%TV zJqe&^ZZ{uC^oXCW|vacaPU#7~MJLhxVN9HuJpUT&(>ygaUtG1NM#o#y;j5-Nh_0+8nz~T0TsD z-CGv?ZMdOPrL}@pb&EBv5VP_48?k{pH9sJ!n{|?Oxm9=hS+8Zf>kE4dq^duwzflzj zJ-V0P!q~5%QZGV}MDM*MJ>J%hL>m#qR3!1>$K3Y#g~YNN{~tF0C|e^)RKdMN9gANA zIV;_bGi9DRf~>MxC%%csLyyLVoL4U63MzkoFkTcmt?~3%mO_z%6SQE6oI3a`m~7Gg zs6AMg`mD|WJD;;-gVdNOzn0F%i9-L(1f_nFBl)_So}``+pveOC}$WP#3L>b%3PAOH0{&^X77qYo1!+EG zBscv5nT7q}Hl!DmUYpxYXzmsfR1T?}ujeAXqH2xsFzR=Nejsa4qz-9dknYf7lOz3G ze2`XA<@H|^sXlz7=6aE{yL?sa0x=&36^C_ac=5j{=CNjYEUwj}BgVK(ZaoXiEqTJP zqA}p+*R2190JpY@AKDJ$dsNhv)cFrtn{#YUvLUx?_<=D<`_2It2m&2)T zI&8hTYxn$^+_+nYqpJoF)l~w%Z5lp4RHZoGwy6Xx|-|9RX7DE6X@=$@Jit6Jy% z)xAW@aHe9d*EL16Sl&&$<<03WKO+^`E&F?HnQNA4{D78&-?e3Ks$%jb`&^Ug^Z2xu z_dWSv^tsPBwd_mlb4_NSlP$OZHPz?F9$P+0pYM=@GSPd&c6!TKrS(}R(GF~s`eH!a zv~EGp6WJcCeF;mi9FnNVwAGhA+(x#lC~xLOBb8n9D#R2K&nkqUCC2fa>Q&#dUK~db zT5qod!CkeF>G%+_a%N-@cKmyfi<-VUlM{-VOmK&bhIoF>caIt_dJX+Ktx!&+$^wQ4he_f7_OR<47?u6*Zu^+>{r($G9z;@Ni z#W-Y>`W~u7Uswl>E>wH|_;q}h=#;_**7n$NajYBmAgwktOOxmJnMETbB5)IB?TMcO29KWrEih&{XWYZ@|Ih?AB}? zOtEoLUpaC<$GCA0;pwU%;RsZzNhEy!hl3#DF`T*B_jE(T9uB66=WqZLo_qY;kf7IF zt&h@i0$id;M+b*J2pYe2fuq$GK$7U_XsTm;Gm)zCiGrNSMWQ1_MScXkoMu0!WbNu$ zdM6%lfGnE>^cNeTfY!!w9Q~N9OT4*g7dNO9RKRG}C1au54JS>IKuhcn%-igUO~Y~8 zAV*)(lRe=LkB`ksTc+I*+!Tlpuc-W{jHw3O(bi;~ISVZ5R( zSA=d1$%y~S(b7Yx{QJ^DOm*7TtL!V%I@n@&5Zk;NGQ6$!@NQ5OzD(=jpo|XQmg?X- zT<|hG2ubS@mkGL#{UuHQMSUqExV;%3qE2oE4B$y@}R8W)QHk0~@4lf~mM*{h4p62Y{*5p)My))W+3*~o@ZGBA`2i?|# z>1~~s(N>BcWfw=IpR3QS*{K-i;Q2E5&#|Xa*A}`(o{xfC2Vvdg*IUXUks_nWW zF334O;6V8c07ghiyHm~( za4^`{T+NtcdMn5JT`6ctl-O@TGqgT9Zb1qhH*y*@WM9!$YM_ba%@+@$gt-j!um@eZ zZ7R2 zxmv9NxlS_VEEz2^WiCR-z#>Hi>T(<7u8NU@@p!YIzkB>6xKQT0-bS`UtFBjp%YLKj z`Va%zSwPkkyK(>#1oA)e>>m0L9UjpC-=+2ckMw`eckO>}eOM#leU@w+0VmlAzyx+A zrtja3fzNPgV`keJ@W=sC4-9(%FnuEiGUo?poZGa4rDoP+)au`+x;OU^Y3xi^K-N8M z0c}^wHmb08>5%SnU0A=J`^YT;P<}g|VvhH>oZ>&We8^M>hR1yx8sr6#8c1 z1>KGMFd;YAsc{H$Y2A1_HB`r#q3WRb)-|)8DV^_I=lDncsU2k%PBX`Qrx3yU2Ok5zjVvqCb4>>7IhejM_H`5v8w zmNtUI*M21L0T6 zNb_uV^F2!j#x`-|t!rssfsR7Y$(gy|_!H_3IQXuPlIk>>JkEObWU~F_{l+kyE&krD zsDTknhN;enQmsGEa?kkd)`|C)Y<#y=p6^;REVlXJjj!dq`h(P_8-K#+*_BAOaz!h( za`b6%QHP@R%vw2>fVsMfqP0V0pHXTOrGw5aH7gCK5gMl0C7o+PYCQ0!Hi;?U5-(ba z+KfI7jTePj-w+L+eemnJ8cR`&D=GJ|jVKHpB|q8z(XYhzv}XH@)h@EMW#t2sitEP# zJjT|`HnWBqi@~5`*8-?@qUdDY$My)n(*WFC9S2A;`10(MGHe9IuVD5(ZFZB7+!Z4* zi$u2As!$DLlB(ZS>#tHcW^52MqQJm`L3-?hY71K?ZV1_|&7I5R>g4`JtYyv(QfFzn zMeCrleh)*vphj$1T$S?+0uJH*8`R-HmDAm>*qM&ygRS-B_qcP0<@t|9Hd;D3w8cND z1;?G`Yx=cUeJ&BmZ-}FTXB9&I_*JY|)fZle`zx}Bwgg&|QE#!hwl7_fwSvK8fj5FZ zM%MH}8u6a{!Gr`ctSC!`iuZRe=C}HNy0=a=CT1r0VoXE+bUSRPQVl*7?8T z*=vnNM=om-f5u8@b7I(PZBW0*wRxE}VMvP_@L0A)UcX_i_P^Yqwu;j;BP_Wrd2WmS z*0eO8mcR>NE=q7>15@U?AiR*H!DkleknOqNSsaBBFrtOqU3zgw_D{@8&>?d!(#vP% zHScj3R%AiG_rD@2!T z=7x!TQ4^Nifx4d!N(IDZ<2J2{Kvgq~wPQ1GVghLj#1;c0z9&-&57sVBQ$~c91pPEn zYV&%wc}UGN8DXd`^WjkZ-VX}&IC^JoQA?1q)26J^v!<;h_Pu~0cZZH-qc7J$Rx9Xb zwq0_20Cw8HmYEoRgvQIa5c@VVK1Y4X)Z=_0?#G$sD{m=Io^i+RFOtLcd=@qrUalfS z=9&M?gaFFkr4h%xv__>g4nlmd!D1VO?|;tCQ^N^_g+)` z&2JG{W~172hSa!0O}uwMX6?<~B}n20HZYD*9b;hhwbj~;-t0tz7$RwVKJ|}Z!IvuX z1O3!Gp4zg6YpCaVq)w|sb=(6iZL#MU{EoUcFiIkwN=F?b&THZ{Rs&h+llJHF4-#9> z0Bx%4Q`Jq;)p4e={Fid35f!DC%q{LgDY=2ZMn&#Hu7uWQHv2Xw1!GtO<;vHnNP;YLJrBa z-2=N|j>dZGEBA>e`}zaB?zzdj6XQ6qYShI{^oE~i^!04aM0)J25cijU;UwKUTb!&} zeR8kqY$2WX>CQe6a>d(sgSsC)!PjFWRh!AB-r?8o?!6B%d%mT=9GfQZmYTKQC6!uV zsPkA2Wg0M#??~21O|es%xw2ZdD{;c$sE$ed5uJeR)?*zahr7@Y%+03Q=Em6t760Mt zEBJYtpu9p*&gvs{)kE8;)YNuHP&YnM4N5jH{fJG&z+B`Auih7ggK? zIPL1%d%ljxwn{Ki~N^_XJ_yR|lWTlNB*EDZhI8)px!_|E~5iQSLHP$ni@ zLvkgYf#nv+@y_}Jx#uE`36#&8=47qMha@{*^vk<79E0LTPXG~cM1R4U3B;_TGqgL! z3}G>^(*dafK;lK0>q7nw-bJWR;3_vSJRGyjgmO-28BZ29fIgY)Tw;HwV^D5##&J~xis23oM|o$h!!Ec#)-kN^LR>awd9A!Pj^!}J;Ld8! zm1B1&Y5sSJX^9pDKJF6boW53$$6jHdg5bG8bW+!d;5uCWT^AH+)zuVSDWjmF6H)u! zn$j&C6ZRiuyI#!W*s;Ya4h{*|<9Ye`VKh6n^*omt`TNzgOX!91aJzpkC7a2+&RyE> zvKMZtIVuJzvOXYNboVtAonkeS>wV(Rg~Io9E=vYjM@Y`%4ZztcfLF8HYJj6+o38=p zwh%3Na*vU@C}C_(#Mf=H^eF&E411DSate%F$Z;sygb&iVw*aacW>mn985&Lk(*7)zEU#39{>J!`tDfK zTH4~~)|Lv1b8vt-s^i(G`8Y{Xd$+!iz_sdq`ZOVum+i9F-&YeE6&{|QWjyS-T=u=Z z@umHT`-gH=(~J2RkL5{3ZBJFp!k3wIV%OLMZp%L*nBQ+s%Er!=8x%ixJ#Dvu(6#@* zr>E1L)L%0TA)_7Ul{eYye}b=c?|8?GA&$t|U=4p8Id^t4uzfPnhE*S(XZY#-7U}#e zetO=5X?$tBzgU3uL}mb_2Wx&$)z*beg)#J#l|2`p;$B-I6B_XpUv=S-a22iqmiYeA zE64R5;R(gMYf5)afEov9v<)zWY;$n~oliVD2x%g~tP^0?3ovU^lqmDo@ZNNtU72qd z7kszCemh&@+9L?x#kleO8$D5kxulC++-0^bZh}XPG_g_}8Y!pGStLe>!L#tYE zx>vY1p~sQGcD|_9LB-t5YxBKpg#FvL4x*=-UE&mT;)LFJH$t7gw~;;j5$aPI9IXyR zv^@O-hPlB3-Z4@+g&rVMX(gh~&jc?S_J?MlaW3%`mx&g%+djjOxk^fixAz$(YS30g-hY1>3ui=_@mTCKed5>9s@PVPKNw2b=5K({8#7_;9t+X4L%2c ztmxXli2caX*U+X&Pq4BI@2ppX`-HtuN@kB;1ACuQRaA*BSVJ9wtpV#;z|w-lngn|{ zyEYMZkCjj!)yUFy{F@=y+tmsGF_9y+N9azq;GVdiMFTSqnB^ zBvBq^N}06)4-8ob{-lfPWnebNx-A2~MP0Hk%-d#W;E=%?6OI{31llB5ZQ+PJH3L)Z zKqg_S*}v&!077I<>w!-JXq_ngI*f{%sjn}dlU=I;cK_|tM-A+XyAn%)Snlg3;AsK1 z29;xxdM%alb}~jECXc;Bl&N2TCCUc3cyKHc*;(qIP*d7Tclw`^(I3Z(Lo+&TR*6s3 zIm~)G=UB15bX)9Lq)FTWH|h{^*llTZe)W&ZT09z{#wS6-n`FlR>Ix>aEDgvMiI7ox z3qDi0;gvGY=Lk1M+y6qHaKx8hvhw?0NQ4^}#jDPfJ>j~9-Xas1qQRgNv@ucLWiJ+X zUFy+&bQ+~e@A5aJ4{3~jf=Lc){6p1akt9|xBjAKaW9(-AgB|{LgVf?whH5gzW*K41 zT4u0^6eQG%8J3))2um9deCQdftZE$f_=4Pr8UM>DQOaB^wac;KN{rjoM?^+uoCl~Q zl8tEmpvjq`RVK^<8$S{*Z>Gh~?cDKUamPm@%z)8VuEu(eHF~=mRmIL&Jm#_U`RdX@ zzP8Lm4A)cD&MT2(B=6^^eOq4c2sLs={a!;gW4ZYX15~-`>PXqx_oQ;diC zB*v<=f1asZ7? z*8Tm^JnR1MFweTb8_l!s?;7*mD0ME^&&b=!n63xL-m4?_9=gGRm)Lu7{QUi=unOte zdj;NE7tiE8${U`J`xCf{0#$+t%r zdXYkIuF-*M5($)0{Rn`1veK=1(Qj@Q#^4A&NG_3ds=*W6rr9MGEMsfsp$voB*J`Yr z^m3CCh*TCP2;Ny;Quv1y=6)53z|*9-joC9tQt`{rs_Jsw;r`~T;aWJ!qU-fs zDICnH9+lI7skaG}{j6xd6gMt|85q4T1EaV>VPaf3?~DV`**(f($F(+AMPc3arX%z_ z209h#FoD%TNU*98mwSkG*zgUwg5l~8A?rES4i4lQ7|d=3rFd-}&RGC^bZ~^n6+-|A zttV})e88i$FDB~xdd!*L^x%h&2HAA7vh0zdh?7%65$_;f)`QvtiGk?x2peE7Sj+{B zxe&R$P&=jR7>I)CB$i`$rPcqaI*WQ@4<+PwTVKP<_yt!aqWKoZgh)oJZ8!HxhDw@2 znIz1h&gvQzI-!!4db+fcz=Hoe2~Uu~TbY=VKPLI5eV!jO=}uLSJ^;3UrCxui+&)Q= z^=FP*eHvJOh{ljOCzBwP>(B#Ua;vS5#4>p3ZK6wz-X#sNH+tXG)VyREh;>Y9=798G zr~!MLcP~QlqK{Yh25c+8_C#S=h-ys_^^$}pC+RM8Vrw>DMs+;3&%0-(D7_f+dLya@V0#BbdPzLk5^i4!dILe2yE%x;mUm-2xl^(mWcOXd2?G+foLKC zBHWb6Q)Es~_^B)&s$O?AakmXys^;;QJ2ZQIWk_dmG@U7_xoh3rxWr9nZG8i7(apsy~l#4S1)yOPler&hb=j zb2;v5b5-H@d`~k!fxezl+@J4Sx-ph>;yM=r-aX$$noRMlFyV|<6F z>P!Fm9)d`hwoHhOD{xiqo$Ga1t!d~z(KJ0_)%_$ zr$g%A)7FGCLAYvG@b?;*=3-pA6bqgEvmzGqBPf+Pt23il@{~Xu=CB(gvTW`?!3mbjnZ`2KaU-&P| zL|^THkSBzfz?mUY4zo&|Azv4kT|!<}>l^x5EpDN58PVp5E0b8P=9XQD zwd~9(-)yzGa$gQZRj#U+?e48{?LjDx$lURa*dxvpZ@Y8%K=%9faJPJxZ@uGfd7mJA zttxi@zIgD%(eB92+0{!oYLC0EJHw$-x-d$8+DMX$CLFExJC$xe{Stp@M$p8r&apUK zzuzq-)vH&FSVw{;E9l9lm&{(%XOG?Isx6LtAm%MH(@+k%mU2t&iOqtF(7M163HW?T zf`WHHZ^2W`uF;HqR*zVEfrc_h{CU7N6QcAh8-3g?xuxaPeCzE5x}G@? zbbSQ67J{x#ndpkX1g#iv=)1|LI&PMIo2(w{4@g>WOXF|l%WM@rO&>UxJjMidb$-Cs zhxihI-WzXGxv=Y#+}2KhN<o!dxgx>Hu9$$~s8kgLR51T}mASZ!BJyn|mZ?I_kPYP-jH8c#jnhcW)X-;ON z&c$K;l2p?tHb-&$158~gdNa>1aoAfdN@weSq03sfqfoGiOC!VgOO2wgtX`Qxxy|#y zm7&_he#A;Y9GfE?+P9p`5FWcQIfNe~=-5q(?6;4x7Q}=K65Wndz3%qb|EAmeW@Aqk zp7a_`9_yo;$X{d%v4;XI=|rfx#^H{79G>_2CmH3l*Wu>SKw{6yaqd_~W3!f`KX8PG zQD%bWzZ>y;4I7igJymZwn!3Qr_`{#P`&G%so#b|b=uXnh-^4G)BfV)zbWSE(FD30N!qeZdu86gg9@|bE$x2J(uFZ0a3aC`?=&w%%!6bI+q5e=2GFVParsezi^=m}>=IevyU?T0mq*)&Hgvy+Tzx0~JGOkqpo zY_?QWGP|ZKjVXm^r>OAsI;im81xYHL%IT$ULMHE_Na>m-b;&f6ZU1NTAbm3|>0A32 z9ZeOUDk87|gva_YofVy1GCPA2ktvN4?Xek=*ByT98yFD+d?q8B3nS76zKs!W{C~@c zbekEB$eYfHexMoA#{ZrX+4W{JBGE<&Bl2c4qP0f2f=NS=3Mn0M9&nWvtrje&Ork}5Mv9Z4uK>zf%nGWdHDYOs6I!i<4JlY9BP z)gl+DmpQ*+<(1_@=D|$W&Yk!83p$!)pAv6d+v@~x)nAQ2HwWC1#Fp!^BnpJIH$M`O9?tTK|I6|LNx9d4EZy+F`TXvY9 zrY$CpX`*9->mRjet^Z{e=uQZ5>0EJrV8{Or!Y?Z7hVb*R{|}(NfgI*8-RjcTms)Ce zb?$80mAiC(^Se1nAvw+N@3UL?q7-9!e`RhjgoY*ki`}^!-H^aWF)1x=^;NC)Ii?`i z;(z^)qiF>Az(w3kL{QO}V4OSYL>8JwFNi)Rq3}w?VKO>?3~AJ@m2mCc;Lg$GMhAMW zoluY!<6vlmdXr?{IQ>ny*=W0w#svj(SM9ZYmGd+?8cP1>#@>>XMSP8`a{Rd`HWRl^ zP&dXgI>ZnN9)>i@?|7@GZ{GOInn8Ji9q5#Z3;Zt$i}aG)%JGQRwW%W8UvOeGBFA0L zCQUDZhp1~y4^Fv;3=Dr3XUCYFgApx*8!9MHuu z29e2oY6WBDF1b=eQsG@CaPwayV^&L)SMQjX=bgOckM4U0;^03NUA7ji!@wt6yZiV}C>4@gyKZWy zvl42jrcgJn9SPr}wR<)sTDyE}dSmPfgH&jg+loYe=Mc5cdV1PKrgeeO(=>P>xiQw4 ztRCx2clgJRZB6HbAY^caN$_r~^1{#7k;*Pkkn0ZrMbIAg^M@6!g7DE7cGKgztvD!5 z)8OsU8kwe0rUtLHOB8qpkuRhn60M9@;H@|MPg{Zakd4WE55(lb6?g~1WJ>EfMl80s z?sE6r;trn+5{(uwp~2Jm{9_7}hgdD~mLwv-0M3L`r#m9UPYEKU%hNP^$B^(XMB9ff zJrL~|bU8t^njnQ}wRX$UBdV$W2PpGQ*Hg+on}!Zh<_S@}Y$&JI=<;Ny9H7e!G-jys zobGg09vfZFltswY8a-VF0iQA`M{Dy=r;7<~o}fr;^W=~hF-S_OUNgX3Aqgn*^z5{0 zC#}D(s(sSk;Jya*c>I;Aw+0QHOuZLvHQtq|@n$DxTvCm9M!FiWXAE^$7(w%4@lJs(v#e)`fJK^rggDIV!&`GzA@@)}G6d4?eH(jSE zb!wfSJh;Ol&Ct*;$ZMKTPhQzJ6V*&q5Aqgmhui#n(edn zOr4GWWJ0I6znUSu;Hu;h4w{o3!VH~WAL*^AGP4rhPU!T$Q@7J}dXnGh^aL2A)04D} zt+eR$H1Qctpqevb^cQcqGCDk+w+m#&E2nNsR46cnVCdb&ZQ z)RS^t5e2nqrJh|sO1+GFGxySE)Fz@&MPe_F>v@_|?;Ghss#T{=OVPupXFw0WE0gq) z^OGb!$XZ)1N=O3R8ieQ+Wy9-dBLb z%EFDZsP|l)m2;YBHCwmcEd-KMmWS;Gy|>*IcwNN&$w56o8X8WqnxxAIPAZ$tM}HxW zh%E6RgapYNZPd*sUjw`*rrVErPHFnsSE0k)b!UQ$RitsTdmz^yxS039=VDi#uDMu{ zX1)^_yM8M2zLt8lu75FTwVGu1z{C1$?OUKRx{-OSk^Q2c$*97fNz7z(C{Ys$b>m*7 zBw3fO<~uTYQ)5HJK+DsJl20V1K%?mR?zdZkSK&JY@@Vj49*l-h6e`xmA&l*`ddKPv z-_$4^>NH!uR~t%b_XKG5r@9n5UOEnPTzz?x92>3{atw%g9Xf)YiB|6IW(>7*ZI*WFh``kOX5x%tqs@j<=L}oUFkw}Mn zCzfDG=8VBs?F9}p@=~Gt+vL>9Uw#@R-#jZh@(X@sMn1`fyxsMB)0QQ$bsPYQvaU|D zpsy5(lf55l+`=;YNRR8}=rFS@7&Ap^Fc0)=p(;I0C zy^MPNVy%3+Ec$zTo%*~J<|Ut(na|^+zsmT$%Km(<`8+22vy9I_w?ChwKRcQ*yUTK8 zEPf^NSG2}y!I9EfL@TaPFzyjx=ov?6ybLZ=q;~dgGv8AWPKlhY) zPfv-rNMbkJJST>E#)Kb>HXR@2JTZrqQpOOUfgQAY`=Yise1}7?~s;gf4Mvc-J^b$ zpI4%!_&JH6I91*&1{{y3L6>PqSgMRwksJ#R$k4$%-61X~I^`gk!9_7nzZ7G_1qLzBR@ zr=?e5wr=NeDioj3)Aie6{kGqKe4@P^X>S&mk|#fN42n;21&#MoA1|X25zSk6_VVWD z?U!iA`7ia)$y!=nQF6<;Tcit~Lg^Y1dI}fKcgk)haI{89FX{ep4Xxm(K=8vV=_zch zcA}iqosN6y4uKm6NS@D;_RgbOKM7n z#d_Db@rF#ef}d8-2JqRMS{Ppw+?S0$wbg#kk!MlK7sTb}X@0%dz@<4@!Q$v8M+^EJ zRbv}F+_5|>k*mICXF+|-u7di0YrOI7Rs!7yU4W%42xjMShsB%Nl2Sg(d7J#|vxsz` zH~3Z;u{Gx3rvohYcC08+Prxi3E4r+H_|)%@J62%Pd=bKjW;nucaI@_>M#h7TEZO9A z<{SLfE~8TTRyIvVJv1e8E%p-d;iY=%zX{|{-1Z4LWKU4Uaxd0_6SNO@WSWx`iaH+w z-ua!O+v6|N&JT9%#6ECys zc+uZb%E6wu%V<@bniKZU@%%rJ6!P1?O6IzHf4Yw5#x-&3ULJ{d!U5GG9l=RMs@o=L zb`U$XW6`7`qGaasi;*P4NjP?yz|c-82+qPBm@v~I)%eo2KpR{3w2by2v$Ta69N8s> zY#KK2X({mM2G{59X~~mcD3ueg;QB&Ws9k=YmITU|0ny9dSM>JC(K4C!AV|7;^Ly;t zc8)eF_?e8(Ey#*)A!DLn9L4?V`;uo(`MN!=qy5J&-ReekZ+uSxXy4N+0IbW~(<%V0 zE7;R20IUOmZSw2vM^G!e5S>pInSQ3+NhlXV#a;TcN<8qInim|Z%4Q^4T$IJ0s5<`! zD=6;|SI>ee_M4;C$LC8P2k9zXXCJ_0Y?In~6f3|slpsZot<^HET&Eqk1mF1TB{!B# zFcYo%I1JDC_(wK3kmU}dNg=ehbZgfMeoUT zyBYxiv5TsdKD2S1^(Au6uYV_glHz~>fRi+U1p?qq9?RF6z$fa>Cdy&8v{u*8{XXhl z#$3waioYTD4wq!CB#XpU5jO#uR47CATCA60J5GXg`guAc4`+5QxsaULqGMjr9hLt& zFNw+r2tNlZyAr6JOJ6ml59>S{mCN;`jmo=W!Wxyu^}*I^P#^guE z3P^JvM|}x&E(4wQ;RHIJN(^z*(P?BZ?wLR}m_X;9^4zXonr@?0CV)X_Em!d-(fKvs z+p;t|2LfagowtS3&PktKo78XPLhUs4+fBSC@NDa5sEp#Wk+;S~6;#-PMAME<})7*hdO(0>F4>TVPu?8d&=hhg{@cvqotOTk-e z;GKPL5?+I8f%knbrL{3#tbR@o8`F>K$L{dn-y^)a-vM4d+r={l%rZhl)I{BY7Chq@ z2zka%M#Kf{)*JDn&(GoXvTg1F<@+8m61p^YA$RDDzN#G(+R%B%H?A1@h%y)uRbHVNb`4n2v-{5LUvRyr|@; zc+sywO|UD+ai@KmvZnX93!!&NoqK!|@$p2-$dRR!)#??lgAnvunar0F zM-R1a_wPN{^8*BmZ>s0CAf?sUB5hEvo>;TC7Y5 zJ623DOt>HaNWw3~W;s^W7Os|?zi@(VI9g9db!?Un-_}dA69$P&y1-e-x85gZ$L#oWRbJsGJ+ z*4P=0>WZdYho!jo$$R;_`zVxti%TXdIrVAs>zi^2NV_d@69IRm3{Uh*t0kn=u?W{XT$ zE&T1%L-cshXvHxx7FGQie&jkBSrojQHCX0HH4{e6_)h^lI zQsVS)=*b<}Ox!xKsDsY|(OC7pbMfe1C$=>$c8C;gR_rKcbG(sj=wSF_{6_=$```_qbaC>xd{TS*U><@t(pDR{%iG?0fy4M zJRbd8@X~|N;(`pHMepfGRQpbDi=h3^`W0@kQjg03OrHMF?Arr=t8V%kqtR1HB!wLgdy!4Aj8~^cgRjjq9v^vmC+^ zE{T&;E{R{C%{??X6>IADWF?&LS$b-n;-mO*e?cHPx4^jt&Mj~rxtu`YE?GH6Mno8K znOo5peY{y?GQadIdlrOBnsv&PC1uPz5m2JsG|8Rm-tZZ+!D-sTXOXgnr#TyWL5d)% zo62XyIc}H zi92eMwAdW|m`dYCN7o8xyxuejH6X({O;bMWC3TO)rZvv7tI3XktKlLmQp0m3H@3Mm zOI@Y|i3nR@4SEO2V!0C07+4?miJxc8|FkT#Ra*n1hZOpIO*CM){B*x*%s-We0C;Mu zmFGP&;&#Rh$QXM7rgyYs^(UifyA03Xh+7;|H%LX5d+LAZ$&_uzL>l_R%GKo7Z}0ptrwFS0uQdq zk#GFV8^|ZW5;;8&HBqbMLH4RLZ-s{pQ`glbdC2k9r~{43HUsX5^_n%{uSdwP>xHo_ zlCAhG%3d>B_Ow*l-BWugJ2zE!v0YZ{wUC0k#dCIIvZkdB3B0Lep{r+?vUOE&O!}I0 z9gfqWZ5>`S2m0VzQ9)sUf*GsjWC)kV4ws0P162W`;gBQ=%O07JdS$faxJ#-CSD0F# zrdAQ-w2&@w%+S76OQ3u$sq0I3jPC6_!8lCASl%7RKX!w0PzH>%C7zoNBQsRP*l~6m zjEDXgFs8+Ow>KApM;(1~m)a$JM$M-jp># zqkrNL^MHGi)C>{`Zu`;yq3u1Onp(cN(a@2WL`4)E3Mh&w3RXlDYC^X+RBYH4A=nET zkP=W-RIFe_K}A%MVj+Mi*eEt^2x1AMf~eT#{pOrQ%f0vi)_d#wvQ}o#%$}Juv#0Ob zvxhW+Te2;Qq;En8GWK9rP>$V$Y9$TQfRRd+^6@-h$ozc}DP31((HfHHQY=s?Xu2z# z=t!Eo|58j&ZnA|3>7tLOss||z5l9df%4cz#7u`zs8=*-%SCi>GASi_tagiFylqQvw zD7AV>I$7Y7O2p8y1Xb6Pk49cQpqEK>_oX_^F?Yh8pcYRpe76$cMxJleJ_5sHwBA5FzXGo|;35 znm@b&YvIpDprfdgYCS$)vpNHBAyHxl`XVV%yNagvV>D%bZFq?H9$-&Z~Z!6H{+o+0W@jb!Z>WB(uhZl)KBU_fu<2@($`hpHT?Hw=lH*ue{A0LL`Kwbl#Nzcix~v3)y=t zgh(f;Q#yOmjoG^nK_fbQE+_}q2?#F97n**@LAdPhzNT!({iDG?$7~87TU)T!;yaPS zn6hJNmfE4LWPeHRp`hSW5NU1Mm5kgYU$Ff3sd+Jjv{>0b)HA7Z7z84{E*Ei3`AOa#zyR*=|o@fj>SV-Boii zOevM2g|-*hKqnF875q8ZmHeyKuPce56+p=kT9E~&pP`=>yhTYqL%zr@q)cS_Fmo?O zHazZ!MN~s}z(`GuOx`z?HT&bH*>yW`kgHWNJpD_1k{=Y-U|wg=UoZ!>7+>kPLgxIy zLy4a{Qv(7u2Dl8j{DmTo zLs7wAZp4UGw!Hy95R_z#rDW6qNvc)J=+Q)ue-5HE{?VnqoajD6>w@{Mh-w5w6)i8{$U9-KJS({Hi?^ zzq^&+e%epcrpdR;170oNtKp_~s}cRqE@46r(Ot@gI?5Jq2_HP%G1zXlagivxn6g}y zFei~`OZo5{0~R*%1c#gIuPmV{Sw}VXIcgNcey;I#5?xsr+vY>U`vU5a5E@E)u;_(I z87@Uek(YjM$-v5mDr*eWYBHG(Rx=&x5S-0eNP<<;#H4RMJ~10Co}Hx5Me^ek`T+}R zeIwg+uxl`asz=&tR*ir^rhR{1;2<;6fbD4~>d3K*yx*Nkcb|OO0&oecE6IM*Ts9fY zU~)CdoaL{Pj!w#|`U*qlimXY@Wexyh10p)@!=hIINXA}rm!EDEOnTxYl7^3Li3!SE zie1fW@prC&JLWruG8VprnNG3#n(PlB2_a`{vd^%End6H-4Wbovc|lez%?f4j`ynr3 zwkG6G4vs2To3Q*j*vk6II*){QFN)1Inw&l{c=E6(9nyd$(LSpKjdTUdln*&`BoQeG zjyWiZ_lKbFgx(r=Z17i+WuTfUx9rjp=wP9kR9Ig{&dS=EW{O#*_6s>JQ3? zP*=mSpP;Horu>yO9Z1E{*=kzn>&eFQn)V2qSTN9fVL2o=bfA%GL6-joajDW^PGZu> za6>|5@<@PtXnPdR=}?(CYk~ePcs7TZ&rcB)SpIu{az;1hXB}2r|B|0*61Oowje8Ec zvD`yAhn((9-r>FuHccCKFoHsBs7wpX9I;^pt ziUy*7@k%;`AF=`xG1Zu8@VVZf;k7{fU&oD%TyxE1e|XLU|XZ0s>S}$NUyd#6s|Tyav6IAS-06ZyVLH%1t5G7aKLGa#zK}C5W8B zp|oPng<^bAFa|j@6>s1u`aZQAHVW+YR$l5Z&U3vqz}=s*DgQtc*uLQnVXp zP+f`qnp43KHwE8A1>epOcV!`b8-!CqF>Br1fe85$PA&-_)O3o?~+z%fHsiVT-MGA#_&ssDmv&Xb5)WGBRx1SRyjmF zTJTNL8eyaW&TOJktxwAUy#IXtwBv*<<}o$rl5@~zc3#(@y?oJ%Qg?AdsoYX z=8}R!@6=P#c*3l~HF;>B1Zxn-R?UT+4&EFhQA)mem~yD?98sWlgXwS+(9~VksRXE~ zvfel|kkle+=xD>~VhaXZ8`-6y=#Lbpfw0!xn^|jzfgv0MSFqvl$R_=OtZn4HVNE-o z8Pf;@jhgqH#%T22);hd+GNqH04lmLV$tXzp(@+~pba=(V3+xPdm{Yrx=I5@7;;txQ zfijS(Vr&EhmR1}IPFss7KrldvC}o;nL~R#j51?UH2M-jJ7h|v|2a~ss=J8=9HXwI0 z&sQ`wK;3ID0k%*Di;)q>)AXH9YOq2eNgLQKSyWhwv$E_W0@Le(6!Z%+sSa22*9Np4 zZmUemLaERv?5)!phDvyhltxm;9wu9b$fz~ufvukOcuuArG5+0gR4#++d%_?nlaYI+ zjd{bQL|xs(f^0kv%`y;%{5IhUdD1R3KqdTmwopCB$75g6=;y1~=v%u9&Xw3`+^kB# zG}Gig8s2`Uq=(HWvRZnLR~K=?5foRaE)1$yI{>OI{EVgdNPH{<3slJK%Pjlm)1|#v z4go^Jw<3WdL^zsaAw8&AMhzg9$ttFZP1*>~7QES;uqhT>KV@U&V+Rv0r`~LnQvA!k z*?#vdy#?%zQg8OJuA>nLzA|t2crzu+KEUi(9pVH9({YMEv)_F7W>5Oy%^r})-ZFsl zeX83VVe8nQ#%Jj`gNNeWogGw)?v0d;!{7o3RA8vW2HwR`yCZzqnB}6}aBHul;YWPD3Lobv zW8I}Y5o~`1YM%qcIE)VQ1~ISU{{Vk{bva0mCO$dWoBB207e)l#)l>vel>iD(EanApC2Qg-=MZ0QO?o$^Rw(@8D<5qqA)`nSxEY5+)qNaCD!2A~~J> z8req%IfF05jkHktPrK6bJ#&FChhHxKx0&=&uqk)(F9VJgDjR>`n+t^+%BGU`)XktE z=N*#N?MjkIb3&Q?9$0#_f0|jLkZ3hut3H9{?H%Rc4Y|{@#d5@nOZ$nt5X6#7fY(B@d?!2mM-CZR7 zr)up;Ks4eGU?}H(_Plv5+YhZ99TP{F3aQJj%~?PW#AZ6u(5xrUDX1@PL-f>X_Hc*hT>4d+!)egIU4ij)dFIVbN& zQ$0uF!@BvDzo$wOfz^|X@_x;@34h}v>WdsR8U5u?d7md2;G4zQUREU-AB88em}L@d z^_1egr&C@6mQeP4Q{Lr0o%}lQsplIIx$8wL1{M z`3M$js6(tuWI;9&v8t8X-FYp)M8<0D(#X0UTf1C@gWUw`R>|HATueJMC6hBI$dbLb zK+_IW`G`9u7-R(Wz77W^&xr?aR^y~tVWnz=?u0=IC&mWPq9i=qD1e^{=>c3ewxI;T z_*iQZ!7p}Zg@zw2%CKk>iy|yq#G(LQuW0G~;C!5AXTEEPTWA?9FL)Sciwvz<1}I-A zYZebcn{Sov&k4LqE+AH`LlGi0Y-$z-R`3E)j4Ga}b6}Np6T)eVfAIXZ4-Ga{olS~QrFQR5gX&@yU`vT3xe8lRR;Q>@Q6AX!%+GHZ0s5T9O4$c8Hu zPKL|{z(QJ|J6dEBehcf%fC>e+eGV;amJeAol3>1a&=Ud_>vKtZ`3gh?4e817!Wb<) zS4A{cVtvETtZP?gLEAF|x?|t?5+%jDhtQS7Pe(EFh5?~&lV}}?Kt$_E_B0yvsY%3q ziH{adSrEFNAE6XQS)@oP+XLz&L>-CtLyD?xK_@RzRRhsyKs85OUQTyS*AKmemlE1$AXM?{kI|(cuvZ6AU{$;VO z1G=Gd+K^JpT#+c%W=Ifqvh;!QUJj|490>2x6_a0|!^rIA&@z1dpdt{kK(RiD5VAm_ zK8GS?6`D+ow~(kU19RewWg%1qd9e3!0Ia&0k-kdbd# zC^7=U3{c)qfIL7gKCU~mwHw>+HPUvkXA%Q{wTYO|R3 zE9)e=^{v=2-|(~MT_T1;&Qij&uOd4v$qQajXo<9cij4%1h+L}iHJ{OTWf8hwwFd1_ zB#2Vl4ncUn<`ZJ4+6NqE#39KxCRr32L20Q-k8gENq-9M!{M+$0Uy$ZeO*M~dZ4x-J zf@F?qA)T2PqC-I?lK*OUwVL=AuUe7*5O0`u!W#vaXk>jZPc#&KII;4)G}HSM6v*YL(aN0l2GRSFwx<~OOCzk!vcYQ)uKRHh2A?24>Wz`8xJ2k_aQ}a~8K{ zo8MtmD>+~kKt3OMjpU2uQ*vz_p%IofbipK#VU3`QoGERW(D}nbvE6!I6v(JJg z(M$nqUqBKvp_&PJX3Gi72gLWHTcbJ{T(Zx}e+aYP5{FA*HQ=jw&V%|Uc$k3GmrEwh zC&A6BubaTc#!2=Sn}zBw;Bp#(y-tazQ6-TLZmdZGcq27LWX($Uqk(ObVpI+ypcxTb zfI&_%Nd5<%@*e)b(CIe5%g~7;)8{5K2|g3m+&oz(vIyUQkf{QALMDY4WCE!fG70`k zCN36(vfF1VGVwtsYwBBsPOyMJxF!WcHGnq~?p3Y}yl&D7rhH_gb>0zaH#Xy#KFL#WD9Cu12qv#J9b52?f`A4blfL&Slw zK2=!>ovQYD#SZEU*hj*Yp(>ukq8>mr%tu>OhFqBz)+qff3fKzd?JWa=W)UT9MoMUa z#qkitH&Pv`Y7c%ilIsm8fDr)2li^Ium8f)p3-?@9bO6QHN-$YiB7~{@h4>`34jzVU zS41w4pV1iwp_`&Tv59e+17i!pUy;KhRhsbNxf?tM}jR?G_!d#8MYvf;*p z2kP9|7o`-&Fkh(vOHAghoI`FmL@r3cpjeJzC~YYQ#nO_-&fXnzh?~g3cmu?qZ~~_Q zv9L^!Wt-H2Ln%Y0V9XoX1EUd8o5b^KaI@Y%mg&5%XW=-3Ay*|@!Jo|0M6gl+Or@{p=+158M3!DXz*no-7wh3ci`L}xO{ zOhrOJ?M}wFnUy~pTp|xpV;kN{FyXUJ{GbuPlDc_-wFPyi#FY75^F&a}?z5Uq2Ia+# zd_K=9GjxhWD17>>>g&ozkvZCWe&4{M2a10zlx!T zeF#khXVeEn14~RCpn<}v)-=GU$`~3vYC(gQstgURVQHelco2lpU=?VfMrbhYB~1e| zus6{FgjiSJLcqC8q9g6`h3G5^l_L0jtWKqxoD!8&&YA`DAj693+f{)>pe2JsId{%m4f2^)Ko%1i*XQB`r}-zst_&#%9x2+i3{ z&NbQM8dwotksp4I-xGS_KC&m(v+ZlJ_`SZi4VGFpRaX&$EC+mK;V2zB><-K(=j}+A zUbhDRxQo$igC>}Z*AYIPglCqtct8LJttGnP!0k5o7MHWV`I@*H6lAm&!2*9MVdU80 zz=X~uF4JNRj}buff^LXZfX z(y%cQDX`~+d8pGsi(fD0h_s0#Ya8%l0p7c;mnt$|J83UPcquSm+i5RlcqubppfZW5 z4li}a3sVpA;-EX?uY`J0%Pv>`R%F3f7ngGe5@D4_M4Rk#B?h713ZVhLTns|F6+%-< z%J?h7AE}eHvSnI~rvf}>FR1aQ2(P|VpArkTIG7e1?dq56DIJe77#k{(<{l8!Z+X>O2OO+<5Q7wbC`rfvuF_=_ug&1m|!+kIs!=t zL70b!jO^VlO7MC&M^Z>Gem3YvgU>^fuji|C@DdI?(?Shab=uknR%P0%0jnZy)r3`n zwrWWf#N|pNbF$#+P7EEg=rIulYO=gAmUJiSX~QtpscuK?I>fFGss$Q0#6rZJ5JXMF z_kg@RD9Oil3{|qbfs{>ajRDlT707_Df^`Lv|EJb*Cd7y7GL$R_)nzE&7H;>8bR+qH zk)Kr^Wxcu)*$lFbs@9Zsq8vTwE@pzxh_MKVx-iEZb)+_%m_V*!8YFravaysM&H;CY zJno~t5r<1;5e90aS(uSwhMu~p^*u%sPm)782y0CQXi3TdI1F$- z;7E*K0f+@F57r>sN_2|jt~wm_-u2DVroC%-v;vG(2wtr#gY+knuhOLb=#(&SU`xyJ zKCNrW6fCYBEpsI{EO(hZz+}F|h=L)D7S5zD@W^yXpjs+>;9mt6Yzk;-02k|*W+B%3 z<^k3R?}PLrJG<<)h`pS35Pva53Ug4@DuU3wa3` zG+-jPjMjn53iT~0A&`N#+3c+Rz^cC(a`|Bzxs6GHHWY!VB z2uZ@x)q)hE-2k;g6|1o}=mNA3kC1c}I%TEe|BOdx1q~>>%7CylYd868VEw8}9gXKw zJ`Icy**fI%)4!u$iO^K`J9BToVG-WcE<$h=fFMbwQ$B6+S}~d%9w?TIRMW}U3%Z_j z8;rv{f%|0~L77`AGm0{IP-Zk`?xxHb%G^hp2PpFpWyVqF5z0J9nemi)k}?x1^E729 zQD!n_o~O(d%DhOK>6DpCnU^W^DrHJ3Glw#-QRa2Z%%{wolzE#n?^0$FWtLFpead`D znWdEZgfgE|W*KFcQ|3#`d_|dWC{sq6?RB#L(9dg#8(3-!(!o_Tqj9;3{|$!@uMzK za#pYBE3rkm+R2A1_F5OEd>0FRq+nWA$o(G>xx5X2h1o}R%`k<=vpe{7CO#lV7>4&AoXpk}k#^ zB;{&#ug*`;n;-a+RqGnLpH&Mzd(bjZy!~rjB#4a}OdVh2P%=zaWT?P*swUYHQE)O} zb%dM&36_n^yl4*IkUgkl`{YBk!sjvebW&roG!$6Z!d%twGoQU&9T6ZEQbQy|AROTj z@fH7DkaGcil!M``YdXnTHP@Su)kkk{dsJUnN{-P*Nm`x6zMPL1M{ureyL_xts@1R> zQKFs?y#~!M^1o(!;CX5^RH67ghrPuIUx3YErIWz4*pDrg+@3>K*ZTYL)BqfmmIcxZ>DobcI=Bbitw@vyB1+okE&_R7DYoMtJ#MvBveJ)$n4htWFxT2Q$Q=V+D zri*E|E3W!vSE)e|lvH^ieyxj@b}^V0EA^^!;L}`xz?HN62&wp}XAO=ZFq>*7efRV3_k7|EHrF>GhT7kHVL&{K} z&4z3exq(h-4jewu#jTBOSJPaE=pt~B6{@vSL_3Q19Cn{JR2n3o<$}N}hQ2-^oEJfO z5Co5=uLnGN#M7L38i1DO@G&4h95zQw!#@dL$O%Q>MuO_Mq^APOD#;fiXDvle9I10O z(0et}i3uSwoFK<&_%|&I#Ma4I6j+KIOmUST^Bo2dsiHRqtj{Q|B4l<&s${V8|3pk7 z1}D<}af`mXE#raZ7RIuvBpP1OR5+>Q{|3vg0)Hb+Gi3R^EWcVANDdh!F?^1gldtPv zgWC+KVICfLO@pl8vixQw-%pJ~w&#gdNwn%$5Rm-G0N7k!jC2_C)O~x_XRC?Ydq;&U zj8N9_?}yu2!iIeK`G2H|q~zb7$YZi;ycUi3{{-=|+|E*QX+v%l`ir7yC~n>etdPPq zY+uy{R(2Zd{w}a$hlmRBhq^d^M`G>h-ybd3fROkONZg(Ih-iOf{LVnD6ZA)*)vCqB z+4Pra;gB(3z%IbPlm5Eg0N<*7RLR&EQ82E*J2H;bHhWUC_rgm7%ukrv@R$D0$3raC zcM}bGXEPD``@Ub|3)p8M|sI6!mCC7 zG}fN0mu%#JV46#%FP~g9s$uDW7WkDM%6>JtTBLzg=~uO+EI3_F7?vD{Qsdbety1=1 z(F)`+3TTi839T9=U0~`kiOxhSdL^zNwD(52r0fYVj>`3WawoAV0&)#1sH6SU z8Zm09Kvdgrq-}H5bN`6inCPBX80wUP$wzAhx)IYX=e0(V9V{Kgr4@qYukj;;_6q4fVt|!{z}l`I=>~oSbUAwI3)bt92fn|aL^)30~8Fj{7U0!$ws_p{|xZ$nAUe!E<$tbOZiXC zzW>Ao{3pg+4&zF$5MoSOxv*u%$jB?31FwG@^33TNcJeWJvVs2=qp7myylP344CXWk z-!5_Z4@Aa)A>vvgkXxn{$+#TbOx|Yym{$HR$Udo6LM;%ZS|I|+97?(;#;nG!Abar7 zu0Y7PzLM@&%uN2z(X0~pAe%Euo#)dOz1bSkJT#qag`j4CkZu3J`Ubc1Z8?lBZiS&F z1nBRMZG{N9LV7)9kf{)|!9f0_6|{(~+Z34^=9u}PjDOHPKo?UEQQ3qbeYwTL;O2iJ z&NU(Al8X5+#HJ<$`34#SPZsU{FU`3{mvxbg(%b_RA?wv#VNLCM=f8WeZ{^*x0%rdw zrU<$Sn!nIEiJ*?t{tI#FzYxJq2tq5A9J<7CVY-bhRFj;(xthx)^JqetH_jh4i4^F? zrbsJrSXV4MMMQ+L`QF(7$FQxJyUj(zeBWwSOEtjU+oZw$-~7cahtLRqi41J-L%Wq3 zSj+gAG4M76GZ`4lz{WiKE~4XmF|g6!iwWn(z)=hw#K2w*?7%=p2Kq3tF`bW#>GTzs z(6BuNjTktLfo=?(!9ZUIZf0N<1LGKYo`F{xc$8VrZ5h~$fuk6xzKX`1 zGx;oSgez(PYYa?b-~k2(G0>NRlNmUQfrA*>i-GMKsLa4G{&f1~47|-iDFc%k7{kEL z3|zv%$qck(U_S=7XW(_F92XgQgn?TbxSD}W80g8s_e}om7`HwHwHWx9X(uuUmNM`< z12Y+Tnt=xx_?t=hr-&|hM~3b)#$C$5n+!~5U?Nl0RyKp za1jFo7`U5(js4x6sgFkYW~QHs7`T9eFPG8fEn(nw2BtGGnSpT(j9_2@1AQ4dmw^)* zXve@o3^ZV%76a><@4sT;eFo+;Fq46a3_QfZtqg1|mnT!sWejwYD|hG-7A5l;a7~qh zjq#rCqWv4)jgZ6m-)3O?AO$L3lvss=%3?a5fW+46HO4RBpiG4ewWRUtaWs5yL8sFg zP97$|G%cMwsfe4fj3sq{q;jP4^Yst>pf>z7pOYS7`d^Ts&CJJ$#p>H@%&5`QwKncTDXLPMV=n?Y`XC zs$lu2q*Z@iuADgO{V_fyvi938W{2^@uF}39M`Bt6YWmh`vqrT zQR?|ge={$_++=txf0#@0P^C^c(|hpahc9(B>#(P)lS^esuV|r{mEyd7T6Mo6zUv)a2loY0JpOJQH%i60iKYW*X+qz|+7w#z#tqOV+ zlqOmo>#F6ZEZn`0mt3MX%6a;D#iI>n&JL$!v-0Og?%ZGWGBf)H$I|xd^R-7bd#*hg zQD3*SbJu&^qdF-6np4H=%W{Q%?iF!Q`N5q#Dj>n;;fiy0cWln=sNTuCaa-ea^iVDP z)I{5(eR4(Sd69jOxxSh)XlbRxmdk46wLcV|vf7Q^>uvnU%=8a#!=n#QHX8`C5X|Zyj!HcW}`CEmvM;4@H?x zmlE#Peyq5Hlh%jN8VNqUdUn!2@a69{^VEk}vTT-Uh2<4-_wGY}A5N&)c4X3o_~~Nz z&^9>*;oSJ2ujJATSh%KJ-hpE7$&x zZ@!jnlVeU1H)90Kr_{EB^=IgX2L}f`#NU5n@rPr&_MTk+72Q7>by-);6>np)zAMiv zjJzV+QCyhIzcAz6=m{q#f9dr|xS_{L75BY~e{u>O9VS}!dhH&#J=-Dq*uf9?pKf^9 zNu@s8!^B9KIJJ02(3XJVy^>s~sihZjV{}pe$DMqGQg2O4sY-G@Y;f&`#o)4+tW_J^ z`3)}QCM{HFnf7lxn{{;IAp84avn@(#xK6yfb(`Q;WG5Qb5mTf8Lh67a6=8v5<456!|eXKUm)1 zV{E|tB5v3zta&uPZsQ(`k=|!|gD|{_D+hzx7D` zxZn2Dn4uZVA12ra{B66GyQYx43kT-XX-S$h=cPAf1m1N|Kd^7{iKXq{aw01fB(5dg z;OQ(cb(Nj-qt6@)4(@+Oa6-S$!dnyL?|hZ3&%9~{_h7?1{*kTyJh%UvSpFq2@KKoI zE5`&cugf_fc4}sB)3!H0gZhC?{E*IEr7^CayH`pkxdo(6bnSmEbw){t%57tHG#n0e z6V&>(pm%J030WAD$g;x#IDxvdc3R-H#?L*&H^*Q`zg( zgx6NOdakj`$N%pAt^2;yB<`{P54#Ez*S~d^R3^_iE$`cP!jRhz7e|;(3Dvc_S?zRU zb4rq@LtOi!Js&NX&dC{I*|*oaB5p(i+M8LHj8z@=h1a&28xzX%+EaG=N1xqiw*_=+ za0oCwc=yzSQI!LBbQ9W6{Ni+?+HGyS$UqS*wL&<=;U0I-H2G%uDvxMt&g9MD$A7%} z(AZ%0xGle`FRCpZ$=~2~?X;p}Gra{XXF_y&X3@ zT)QRyXxux-arq&~6vsf*pBwsbtCGuqY3i$f+m4iQx8$LGTi@;dyLisVgcmbhQzkyT z5HqGUp;9iq(aXT$Q@-BgZXJQ}3x+-j?>)((Gav1ZjK64an{#U7%Dr?Pc8cDGjv4to z;I&-*MCp)zZ6}8~XBD{=y*Rgjcz}1gL#;E$vyZ4>?R?>Wm$}yI-Ob(i+t>RT&Rip1 zZk&3#e$7XhG~;KMuN40{tY_c-tRTGYv_eH;n=k&#l`q|Lo8IOY z^o*04jSG=5;>^Ps^8ALT%t?|9h{gPSN{959>Pa*=<#~JSgP^;QeXRE?KMSP zz1N=@(UD~oJnNV6NU78CzFwwB$7P+$+f>nmAI91;dwEWQ-#oeX*6{}?6CZw>Vv~7t z_1hXR@r?0;)e~=BH+2YH7+UFZI(_rh-@y{9Z(Pahko z_qFTLP?;RPj9mAWU4J~vHEa79`!NO?dKQYG174(+aF1Hi^xxn%tSC2gkK?xXw~q9= zup@QV^i3yB`{&xlA2}7TpyD*9!!8R>v`Gm!YB*`X)uWyrAJsV|b&F$4&6TdXgOZZ? zUOA#-pJHy>RFq$3?qBaKv!2U*o(pzQn3q?Pm{Yua+JH@>h9~^9rCnAZ|D1HhcHce| zyQ`CU$2w15yYkW79}5e+jf$AK+x6fS2Rpv#pZr5#dhru-Q~P%R!j<(OdUWD} zyCvLM9pv}=mimAry?avqxs2}53nq&m&%C+PY5Se8hx`0WUa48Sxis%Y!mtCM^3p?Q1G-WZU4PHuh7(rvxooxXElXtzCWointIUKCWkmi=^@ z?X~LIImhs=Bb++LbnIPTG>O%6{_c_GsWY>l$Uj|^Aw!3ebaZPcI<2A zFOtu;UF&Ris?0CbH2G~9&|qojQq$aD25J?DKkd8Q=H$0mK5YjT_!`UQ|8V!ak{)e3 zbIvueo-1hG7DOFia&7cTtl!>Xy`pk+(yK3{D(XrwpLHPPMJL}lzJlYHF(uqf7eT*u z4lbKV^$7TL+1({n?^18Cw(}E=4wV{p8enOEuxyd{ieKq}Ih$&o1|oij4eAr?rM`WZ z47u<)%k`4aoBp{)m$J$qWMm~=cj28spS1MiS>b~IL#Chj?UCkwy5GsGi39C+E;+kd z`IEY(OGH$~#sWzal#tTo6-gCsa!;kP9IsEPC52F`2`SXv8u1t0sqq#c3s_s{9WoHfq zX%bnZCVuzcanz0L8_T=cv)=kjhdc}aRbsO{^==L)>;(ggQbapH%cVD1Zv3i{!*?Do zdiZ#(nw!{de!&*b=Y#uBxy`g#Fv~sc;J`wMJqLw{9V#D;`!-0JqGT@$&@qa+IA=yw zR<@h){DFCCgYK2+%L0uCU$q!xlNs!#cJo8w!OvNXP3~uIvrSf8lh$j!wP3s4_XfW^ zzWL7MJjdOADy9!u;kInm&VZ%0i=6GkCLLWhTG2o!df%ccqZEp{k(TuM#nO}czOVFh z$TAvfwQg>xYLQ%e7v%OA^>X+Tx3g0EJ$123?z5?CU&wWS%<6zi?h&)?F3nk?Y#4B^ z&C}qEqkhb*cZnNOSu@W!)>`Gmr*BJRk2oeK^|_~az1Ev^YV0BH?=OU>({0VaE?7Mw zs_%Gd%lRwdP-W=q7VG6B>`#AZ3|4j98Mxo*(YM>7#dAi6<=gAIUhUM=>TT!Bc}2-# z7w@gS^3IkntKVFhmFkgSKUAU`Q z)aIevmSuHHrJBPy)pF(A7QAFePaQAEeShCs=v`dlcjCdkIoj;k_96A!#}_Yh-=HJ8 zvvElFey3=z+ZAuW+qv##m*=Uu_jVVb(Eokx#fImSr&TGLmcw0AZ+AF+r60eGL#FBR zimI8m7ap5Di5E!g#||6z{oe0lZaglykonZ|T8?>`dZbIt^n`C6&65)@EWD}v(L7st zx?=H+T;H>)Z8c&&5A7=A9zBNoQ|~q8M)r=fP2z>_M=x!8Gtj`vP~%kF9*gq-x*uJj zUA4?nq5rwK2v*93i7x3UU0T&4(^)zTOWi7i zq6$kETKrCP-=|fbT>Cp^F;yax^IshC}h1c(U@d@<+A6^o(?HymnS~ec-^5~ z_&C<1z2*ZWqt(2!9W5C)v>Ivu}(0gWnOcnv}O=ikGT89?99}d@Nd3VsPG2 zjVCuAyp#L>g6kgVH+-#diyzrX=WTIf_b+nzHrpOwGn!TIzSm~++z#qy2|SgkYikVs zU4oqFzWMom#o|DzM^Z_3xb2n+;cfa`#;n~u{nYI2Ke?_6&(*ZM3a9V+DVN^Si=zF1 zw7KqwR?l3`s(b6JHaB7Bs*NKBr*|&wsuHX%yCl~?SGi^Dzg3*>mS)%gT+*1Bw19D4 zLgH+&zk5f|pL;Bpe7kV=k^S-5slqB1+l>+2e1TC0>i+@gaWUixuXg6#K#$IxSar~A zz%I8sx%^0eoU%<1Gjq(i`)2>Tv*YKPVST-gs{f+cq^Ja|xB$24%X`~C^5826hHO7t_c}^& z;qi&rwu1&mn13=(t8vul9RH-~6>+Hg^xXK7qH1l>%b3LP)p)1k&xULm zyDYw=&ij}c13T9(cHVn->K+N4E4SW@lgt0>nU32RPtJ6U^e+}!rr)XmB!_<~=iYyv zedpzh*ABa*rrn91lz74I{&0oAi|T}1qO8x}eAAHLOK$z(@M_Sj_0hTtipJ`=Jk7tT=|OS+>st3-EzWeZNfLsoyzvO zIr*Zsi}q2a0S<9*)OWb>Q}4g`F)jPHf2Y$GYopPt1Mlz9t4X-}X|tF6jv)8QG*7Fl zxEr?%Hy>P50b9q~|-1n0Qj(zy){3OA#yC>gXf48CctZ%FI&MXLb4Y^{THszeyj};{kg0?pHc?@UzW3+aa)f>YN4npzi^+ zPpqFzxpw`UQ(`2b=&IN%fFMirOO<-{i^6pg>L;avLm|ZH$xlj|EC+hG(CjAH8^}*enJfofnxTGD8aNox zunBL#(xH5$w7)|SIXp}5dkS)H^AUbpUHabTo_US&*>06Jbc0YoDGeOF7Jq$UHASD3 zO8ARrS!;_1x=-n}sfc?n8uO`ke%8fU#p9MEi@68(qJ3n6QV-+^xz~{p`CX=e{lH zz9Z+pE9Wkhb7#waPb%lWF6X{0=boTF%-1yXsBLgN(`BFATUPPp%J=4LU8&-%IfB?; z!}d9>tSPfyG+KIWUdNc2jpgZ_lK46Za8N%f4IJp+(A%S_}arA(GXFF7~4WkY^a z%49ic$>H0`xgjJ?{iHN-kS>m`>)~;in~2+C3l;i|CKsm2PfD3A2Td9_yXDf;kax?4 zCu-NoPfD3A2cmn6{G^o0a%fA#CO2CS?;+Mq;zs5~|l%VGTe*AB^7igzMxh~ZiHouU&o2&=nUVHzu zx#L{4&yl2k4qKX3uX%ZODEN(4aOYgVZ<*gIa!m{O4!QWf=zKJN-(D`i>NgJVUg?Sb z=@xPxlRT*`YE87s?GkR>H&(#R;BDt1+D82v^H1(Flb@6_Sq|jh9r;NqljYE6YVDCD zy;E&2>zwW|>dV3XxktuMHlJ6_y?7Mq`>*@)Bx#?=@QH9ArrotsZU0^=EaVPyTdDhfkT_2R_YY)xl~DKrasPIA1AWcWX&Y_uV1qZv=eAeqtuQzN>wltM92- z%-wYr>yJI}++VoY-u3;9@%T96FBn?*`}7*ll|JCNYg6tAHoBdP#;;KqURFInMDwbB z=IN>H9`Cp@(ss}E!PnkBTYIoq)WJo|bX**gSvK#b8{YWoe5jeQqeFiVe}DFS9zUqZu&X889Q=}v-`**>-N}CY#zE74f2Q0W zU0%Ov?!zx`yL){r|1iXV#{6TS_1$~K*hFnrO&+B={Ktlvh@?Fs_uQ`f_H}%|Gcx)3 zkHwuFF4VducAI`DZT^CD3kr9>ng6&gKV;5cy}lo3Og(ITCO>-6M}FYE6?)D^T_#=X z{l3&{v<>f^hEw5Vqx<8&j)|8_QgHtA0rUVrb&E;PfvwLQDY;q>R{P$}I-Z`= zj=%l*-0PQbTAEu#&Gwm7Y2tKzzIIxG>YGQUMbQsVM6R>HkaBl==4s7PrQ|cLsw>y+ zxArND-8rs8y|R0$*~pU#u1C!sjSgB)JA8HL#D?;I0@u_$Uf>4(I~jwXpAj4%X~aKO znR>!_;CZ!q-kN`JmS+eLuQv5eiQIAg$)5$`&$=79L{#&=ZByfoPkc&Nj*7l)x4Xh- z_tCavUoJf{>SA#B<@SdU2y_qq43><(>bGgluaUy|8yhR!j3)ChJvh*%DrUP~JbPf` zZQt~fix%k*>i+Pv^PZ&kSNm+aG$eMzTK|39JM#BFdU{?x|F(ALX_(e{(Vu%oJnlT7!YkjS0)Z22~=Y^@{qrw36#IRDY!P%fUSHHV+2?{!|J z(v6pt^3p!{9QyMB)ED@*;yw>Nh0z#a$obKlGqt|l2H~@JOYL?9>&_h)Z`-BF?^pJS z_eY#l+eSRy9&)ibHsE&Q^gc41*xxqJ7S}@F_{nOX?^(FO?P{;#7cI_QZTm=}*Rw+^ zzir|U_Wjf2;)8E~a_M=hsQEdaUo40J2l<%YP;_YHG6~W%e)8CbZ3>sqa*i512d8cm$yLyB^ z#>WewpGMT_-^Q<2{kWxOo4M%Hq~4;5vtBJNUi0x(4_@TK?F-M(dN|4E^Ph=J{}gd| zg`@uz#6|5Ant6>lx8EjyS*O9T+->!?|I&YFa%hWlRMnx_@EJ8{V{TYqE8g|aIm-Of z-j^{W^q=f2b~$c5$mv3w<+_j4Di=?Q_p>f~ZR2`YI;Kaajc4RE_U_ODHtXC1+WGOv z1?J}Nj_)tMy``IQzoBTjld|1O@6n5OiesdLBRzkWiC+H7yA-oM@%?Qx$EzPY9jo^n zy;gDC@orNebmb=>IyCQe53gxAu5=#q@l89otCJ_cG|~!Wr(V4GLMyw$*TEG1R30#xDJI%%0A3Cv3Vni|pBIa;W%dq~S~xCqV{&%(()FooPPP}Y7$pHM032$A>Z*4^+JWk_S)sM8n$`4?Qokn z^kRp$&xh1WBHruI6vjkvf3SY!g5|w^Q%el3cRBBr3Qg~i&z*QjIdtNqTg~SwyShsq zgDd*+Q}f(=zt}zXUC&Nq6WXfVxgN^hbT7hpxph0;nI5AS?+_gEo$@_&pX(?;x%MT` zZ>+s_c&`44S$5ke4_&x$;c$kTKtFHaQ4^M0`?D4|H%;)c6uR%-Be$AskGJy)9VZ>OO-&W-zKmCMo7Q3H#5nsi=x zcxCtKAE$ZS^4m<2g*b8oGE2{xvWmFd*q}f7z4^8;I9DOhB(Ws>`DyoqTrQXRjvnh2 zue|Qo=a_z%_-&v11$PUy-|Bv{TUU0(q{FPEE|L3-xsslw{yG*7UD^HPunMdX*U;|r zE3bYpa1L=49o!4GyAXp}wUrC5c-cUDH+$x_wN`r(?L? zos=yHyzO=<^qpbQ?CKPM?|$^iUw^G1?5bO} z+^myZdeWc)qkQ#KJ)e9~IU1wq82&Wx?UTLT&h44<=lbHlEBO}epOr_W_!n0+>{OM&^4;5CnSmR<<56L*~7?|Gc!-G}q8|ESC9hant`F+cAmb2Z2r;%)6!iJTz+0L zz-IiX^`2@OTkntNo!a~G##&C$>GvF;a=pt^yFJMcHwva5dS+hL#&Y;n)b~&HSI8f1 zC@$k)csR>u?}pIpI?i1u7rlyzGyZwXE_MBlbGe4)8D6)ZT$!{o$uY>jWR!m4=A^=@ zYQfzN-|=>HN*1r1TtDo4uh5grJBA1o%id|#Y|E03(c3+Du)5TC+usK(w~W)7yUd(t zx9?GrAoR_#UORg4|CRKs7rRkq?|X539$;R%;UmUvNJ*|4YQ_!7qza`&Z;fZuo9@ z@wv{b3$9CFS@?ad{rzUYW32H>uN=#xTYoN#=k_0_Z@b5$oiYFeDML3QLY3 z80S3XbH=>8+=NW5|8Sn-)i08kAA3ZfFDS@`U!d3BB0=T3Yx=S$m9EeBhL2k}IJ(cx z)z0E1*vF5hnPZZjtGrTA8{M$m{8FsfD{)EMHg1a5pXcVT zM+LE|53}_Dj!l0wp}4-zCec5{S{Rle|3@BHiuCaybY zOnS9G>9OChnq{}^ZSx#9v!t#bwo7G}7m-H%_&Dq8e9wy+4mBjW9p3l z++xSI{o;x=>MLH~vQO2om=-JPV>#5w?#+TFW8I^l_xoEizb0e%b7TI9awYyYpYkai z0;~??`dNgAWfgO;4u-tjIen#yUj_S)ls|Mz`S`M}!l(qj@>$iszFU-RxBJ(Th)JOyf*q^4>C>KWZF691muKO!GS}mWy}tJ~Nc#5qWs0;%rk`VErp1f`hiZxW zjat`(FDjk3k4s#$sBx{J((DK_Ll+bT5fw{h7g&nIvhGriMl?oIqoT3JiXA&KnxIjG z9gPJwwrC7?G)5O}*nihG=NfiFbLV^i?&tNo|9GCG!)M>;I&IFJIWu$4nO(=bTTkCr z44Craev5_&wzta3w7WDW=xoE;)1mz-pZ{{)veED_(sp{U+Uxt+ze)M=Bzb)E-A~W@ z&Zyro`PY?a%4*b?HQxWT!7Kar-)qOUQBVEaF{;+kc%|>`n|aOK>}okcd0^6-RsrbL+M`Ha7UEU^n)i$|i8(n!~#vTz)=zVxzr&dGd$;Gk+TT zFehU{-`&YG7w){iBV_ZtN%`U1Y~3g9y4JA6`nsMgv)21J$;~O;@=crhn;-S`ncO{V z()k{T9&c578iqF&`fmzPAF+N?;aA(`u8Y50m_1-+u;#%x3&VcD^rC}D=K+0v=1rZ@ zeOu3BS>kCAw=Mj2pM5v{;#!t+MPCd~Y@7Dk%HWO4?${`_D5AgT9+>-njW6LfY6fQ`J)@9txas;@G|# zd%tmYeCXfy;YnZLwMUXl?e6$kM|J+b#g)!oe7-qrx^hlRJ6UsYfo$J5gw#Qzt z4EaHCw}LRA4r9C~UoE_mnNak@Cfl7hQ*RnVR*kNhQrvso4{N%`4={^d5mc~3H@?Za zn)jDJ_UPQ|_9g$XE%qH&WE1LJ!9dhzM8*jmmn8!3HuH(a|QcS!FJTj#;g@!A`L7EUcG&e+#( z(f;4!`JUtC!C5w|e!TirSpB3{`gd6s?tYVP`rKS|UOi%Zf6JcV&+6^H=-82FkN4+i zZ2a}%clweaeexC!*>Pv4Mfcn}7UB>fERYj57v?T;2(5s$g4DG(^H*^(TpFk1v|Iw$ zos&k>Ir4Gjk~lr*$fbi-0~*aGb2?59pJKr$3O?yXdm`5r{Mg0@=2(jV9AD36LWz5(M53S3Ni;^^+G=nsobO~u5 zX#wdG(qhspq@|=4q-(aYe9Wwv%1LcVTaqeB+mqUpI*_`Mx{~^m29O4khLVPpMv#so zRg%V##*-$JYDrT_Ge{?pW|2-M%_jYZG>3En=@QcAq`9Q)Nb^XylID}{A}t`@M_Nc) zOj<%(N?JiGugB_JLF!5xL7G9jgtUP43Mp5g;*$oD#*^ld=93nZ9w99zEg>~)K*yg{ zLFz&pN~$GYLRv^#N@^o#>9~?ANwY}{NJ~jA8nW0-8g-25Az87Y3XM9Qg@Vs9>O^&ln)3h; zp69r}eL1cTeD(m3Hn5HSoP|u*s&y(T50WEp-+LXI9d^xs?x=XHYHY?Ce9cx z4UZ?(3izku0U(Q#9S&Aq5TSYqWsEvjm718Mh)&i>Q5DgN$@)~CSXLz-YnASw<5T*k z(cxks);Sizn15yJrshgJEB-&Fg6SmaQc|HNViNS)L{+pp39es?B$Za$Pj$TFUy8!& z-5AbTR|;d1lt%l1StMn2vNrRdDh>BLH6}%&Q>UcrG~!`W`UP;xu<-Ceu@qou$F;Im zROL@6jO|})*X)3(CFGRJNty%&wjq52whE=OlGvGIdQh=^)$JocuM$F;q|&J3)G_~7 zsnx@MvQjZWW4(S`Q5>BfBYV~Xy8p6k9YE(&ZY9v(N5a<+e>!Og_g=ANys525$ zOq#!GC`hNW@%)$BRfhYI3WxjsQ9b@k`c=;Nlz5$56=QPRb5eh%Xxml+$#aTu=r0t$ zYJIdWL2EqiIfXZN2R?}^z23yCP^BnXmv3tK(&<#0R9_S&AOTKcU37e=Nn|mE!Y4UN zi#@L*Bx(%w^ok(ppD16Sp>ATC+}z;HN7N7u>fBMQ(y5XlybgO?j*C-k)X>{IVmF6A zO4u-ArAm{m$xKR4)hpn5s`Xkp#nIo6%B2J|i}Z2I5R6MpP7p5`%1@Y`oSe9ULxO@Z zTnBbpfqqshpjeV>rG)ZG1&ziktiBf}cG$Q0LHB8RgS;O*%Yr%gi9IK2E>!fFI^g9R4FP&k~%3$t*g>|{CDDgB0IY9 za8mb+`+!cXOGtym%MQ56$tlnr)G_3vO^r%Sh^{nqBU9p2HDf=Amcg7l1Db?J(bfl= zaVG`GMaRQM22A?+WL*k;NPylQkV{O4YZhp#2KqI)DZtNotWnXWB||GqNMsKUz;L(X z=B7zaOoZEuNJye8!*yMqn3$l|C+Okg?tl%0oP((W{1kBNC8yhS(b`l_m89hy6s(LG zTr49flu;}n`o$(DtMC>kCK+}dN)wfwEZ!kSMnWi8yuSkN+KXxTZcN4dAN21AA(-n6 zwWLW-*WkSeI;@wjy&~!Eh;jFBMoz~7fRT~uy?Q7(Zpgp^14D)l6p0tw~M74^r~@Sqx2g~=?enx3ResN!$Yl{t67VyRYPv(=jUgRuBQtZiUI5uuAvdNEx6t;F2h1= z!vRxRz+cWn8LtG#aT(eSa2B<}w>D>CVF4xxjsGz=S1T)PTGr$&$e`x5tZ7rS8k?I# z4qt!$wK>PRxMYQ9Wpfssy`5|5uq=oHnS_RBV=net*;x>agKXmC<1H*|XQXS>wc0A_ z+vE8}3;yh3v>_G75)wJ`fVk-Jc^%hK;q$rljE-ss8?OC?VsQ0fgL;79X63|ETY)u%Ue_Y?u!G3XND} zD~VTwV)@p=hWTxT4a5Be88`^E)U}61Kx;u5l(GE2c?Kt}QSgc=go)7;{ z@h@KPuu2T8dcJ4NFY87fF6$KCN;Jppe%HhN$oc89^ItV_PxKZ2IQi8a^#TX)sLL|H zwo99JvN z?77MXY>{ajtC>GHTQ_IN&`qoQ)#|l#z~?2A*R$$uesA!+Iw!9<%XVzLXy}JsgSss` zKJ2TD-u>^W+HSh)A@93i_w??(lqSOq>uqTDDDi4%`GtFsr~K>BMyf(I37x;NX?ZKu ztV@qSoFChTMt?W%#D=C1`p>awNA~;~%E&Nnbx+_5PzTrMMvgOt#yL)-dH+&Bdq_8vniyu8RWVf4iG^tKR zve_>_hd+0$2(W1QxaIKN*Y<{Ir-k)S&2z_H+Be-dVWLd)O1@&KZD#7axg2+M=bpZ) z*;Br4Dx4ncwrj?X=_@V#{et7IHe7o*@7wl&T0e?+vGd*a9jvzc@$ktS5oLqoudeBk zHgWd2fzX0t-rW0YM0ta4!xl8$oyEoO?xZ+-{^;e2Ny7S;!9Sh8po|;zb>u|PT zuGdO)ixYgUy}6}DXYUUwe0_7Luk1F#wa zBq^yuoAT~M?+&#-*kSSIzwD>}wdg{+d*El=9b5NPxNchn?|IL>cKXJ^X@&7aR)3zb zb4K4r?Z^2|gzm?yrCtAKnqk|c9&Mh{_Q@Y_*8F(q$<1cv`(wLjW$)_EI~4pf@5a{V z7BSC$8#v}e&-!DA#x>q^&GV#t=ZyGe-Ns%$y5sr2gqV)7KX;x*X1&>QZphzxivG(g zhArMw=lyos7aJV`n z6nSg6w+#dCZkrQ5Tz>!8#ia?CJMy|ayQXc<8QHc^_vNXz#}AnIpr2#aSVeS^hXAuHLF9n=b9%{i6ON zSN*(0fq$J^+$D0M;M(BD5Azg_duN>c)BD`P8*TX}!~WbcGxF~9^jRnGuN@iYT+<`u zzBl&iocxbxVU~WM-}K(K-Kob9Ym?WW7Bu7ScfCwMIH0)bd|XLI-u5{66We|mT9jCK z#fpX8!GMt-GY|BC*6V6maOt&iN87Y~WjWce|ClSAFHZ=%X|a6$vQ#d+@116tHs?A2 zH+h-a0bPuZ)xloV-)zZf+-b+4w|y4>)uD+?m!P`!Yp!i{waHZ;pzFT8kxDhA$8fZ<)LNU zt{->U=Hc2q_?W&pF6m*dyk&zb%4;53b2BX0t)2YxCjHmGd#^S+ThnH`cA4Atu+p%d zcL%;{R@-?)@PV6`x6}#wJL;P+9_=~P#xwRe?yDxN@1@>8Ro=;Y>SQeboo4AVZ=PQm z=aH8Cz`60n*q~?KoyrQH&Z*bHI$=SOKCeMXrKh#5Z=LLMr`kBK@BQS&?=yFG{n^_q z=+&h}^I1g&GV@y#{$3n*M0Gf7^OfJH-+0))KoxITuz%?(>oFG(&UO?VjNH1k*Z5;A zJ2ib)M+kg+cEkGgwe1e9n=*3EDQBI$=Oo`12MVkYbYffgmHYO--&1<|-`Sq>ulU3D-udaZ6Plf?Vf(&pH@|($p2NwHEm|I67g5j6 zhdd`=O|Nk9xO{Wm66gCDEu#nN{%D@J!+%8QOJ}av==}Z<|1-1OTib0r-|na8Kbsxt za;nx(C;$4s-}M2OD^Aw9ess>DdtI}J9-fmI;oQUfGqYKZ@BZ@M`t1kv2AR=u?|NN~ zO$)ubCkk&GRQYkXb!6uYZ<-go>loVW&p!?)l)o zb)ss}5!-8f7w%p$uxxec{a4q=O^UAFWtH=d6>YImaPk@ZNB14tJN>rn+Fw0FR-T{i zu=3@)l;f`sFTUS$jO(4qt;=UUt^N8@_QkCSNAPD(?f6XMK%ia3NYmPYqXOq4(yVL$VyqPn) z8gM9U!hMjLB_}hh$H~kaak5%AT#Z_vaW!gp;A&Vnay2b_ay2b^u4bJ8&a6%tXIA%f z&de%-Gq+0R%aYRM09wHlt~YBwz5YBwt9YFnGjEUay07EK&w z7EOI*mQ9DtESqU%mNwb4IySkoI?eaU>eyb8)oJlqR=0&^jk+y6)TrAE`;i#fO5w^S zx8u0nX)?~!y8)NAXbD_LpPO?hm%8IFB#ASFrn7aUU{+7ZrNF%4D7fQhpGS$IqQOU+ zL1g}sqJIj_^{L?QH4@^bi1CxT49*GW2dlbB2qDEa4e7?Y!JK3w%ulKyKF(IgK}any zeun2vah5X?N~DI6ZV*ZjelcKAg?Y?G@MR85^l6J^|38_(vBc6|wt-UpS0#Yoe34-n z0~@HxXZVPYeE)l&*<+CSEQIM&S;RD?Gy@0v!?-~#7dS959G^9C*fJAzpw~c7qtbAJ z+(6DB{h_vFL6HNwaPheWQ1nlL;~>U?r<;M`r-7rO!La|$v@yN^PWpqPkewio9tw&Y zIMBCKf+hw(a)Y5HSQ-&943$b^%*Pk3xL+}#|CH8$Hyz9y-OvO*&JOr=>7wun3wKit zOiod#HOZ-Q@i-`!tjkoSB*VBEja}k6IeNfohgzpg);V!Q#nu92JfQ}L4TKgBuhK*N)`)FA4$deAoV9A$a4tU+C{)Y7BS%RqQ#{7@lvd*ymT{Z>#aH8dbyZuEx@{ZXp$jJzlbB zYxD|({s#M_1o0RsIKRr;Yy;2Zbzn>d>oyMlsvra&LFW3xkukL>#CD3u5xyTBK93cT zZ#w*CBXCY|WbpW4nx-xoa>o8M8Cp90o(q&j@eggNdP##IW|UYiY&DXrA&!*5ul(7w z@gzK!=*&30X@IeiPLjW~$WnL(`7>XB#iNn_umX&`_5schYgzjC?8AkKK3;GZ`c?7A9x;^sjNyYJ{6O#xhm^v^K9i+0kQ>4UgMS$G zcRnWRgae~Lbk;R$nwi(CZDCobu2sGI4de|QSvPLdw3$tF+ZHWbeb!phrfs|S9Xi_C zck0~5!O_XtrE9nDJ$m-)?b^r9y{`u^czSvJ`1<+x3kd8#ASifXNa&!j!Qt?;+Y!S@ zj2!j(=t!k13g*{ij7?0^Bx}d%^eL%n=^2^hznC!b%SkYgE#3x*fAQ&@l@i8{ z;TSpqN5Y2fP;wUEvEwDNvKkk^XZ9G5)o%pZN0DA>$o%^@VybM)zBhFOsg&+63h!yh z!c8EpFlXGO7SrE;t>GzyNN-7dTxR}KI+6x0ty=DVl)f)%0BK7pUs60Dqz&rEGhxx;VUoVmw^YR=ouO79a0m*>yL88!wh60eo=nopF_1 zS2}hm|NqlVhTa^n0n+u}GzXW;9R4kRc2$aoJ=Vj&6u7&<9voK{;<`cOy1;If;=iTG z?lIW?yJ>u>ME@Q?6LMyEp~;mcU`OIVsV}zD5uaZ^mHWTfcZOICtPNobQXBZM%a8dP z+aTT_q(VA)rzNeu#3z;XPn7A?^?`AtAq^eW1m1HyLTXqq*s_!0s*mSNG!ScBRgaWZ zQg*=b?hfk$`zNU^jvuYgbcC}}1u2e&zj2TUt`x=Gu`S@a)LFTc<+k-elXuSI1?7FCgH+^M75sQd*Uz-+W$%Me$pB?SY+x2>qOJk z!{Es!IURgB@ta1#-zQb4gGEK5s_3z>@+Ay{29%y6!jgx;!Z2wKGn-Gua>HWhaB%_H4Pnoqimw1Bjb zw21UK((9yB`ge#Qkv=2k`mp+tliHBlllCBWCG{i?C5(g4y>(g;!|X*{WxG=nsYG@CSsbO~u5X(4Gbsg!;R zv4NCxr+Ou|Cv_zaC5<6XB+VerAuS*+CLNrr;ew&R3y1dyhQZq5@c2~DUzflQR;6%Y zi&4R&HrPeQ;{oCH2oP4oKA|u8$EWH<1AK<~mEj?sR2@-&m4@?IM{&U_9p|Ohfo5|3 zA^-lViJVtz9Gt^iE(8`d4@^$ueAUrnx)2A>2FCAh1PsB_!}nB-e&7xHVfy%Xj4>|yv;2&HEME+1^uuudp#)fNe@HnBG#D7` z&@=?&-T(7F)|D}+e_olk{!pN@iRKKOJmV$_<;r&=KcTv zQVl5J=v4h@&q@EP`hS`N;12J<`ZpC;I#vHyHT+~)BPIQV>6QQJpJ@tHIOD$>aG(F5 z{FCA<{;zKbS(B%HHTCOh)3ayHob}D@IdgO7&Hr}6!bOXhe7AJj@)avr<*r_{cHR07 z8}l}8-m-Pu_8s}(@BCrc?jL_D*t7TNeftmmQh4yt;UhKBX z|2%;D9}TenZ}0#AcK-k4`v129?tiw61Kj`X`(Lg1!(LA6|Li|u`?weUKhOipCRhHe zoD}|-4^j_?Iz?vuS3~xn{gdLG?$3X=zos$pv&AR+|3)%Q-=)&hm}?B5nXeo^OP?^* zzwBr=bM^55+1?sM$vNX^_DP)X$7ca}41l|TxbcU3eYnNPCj_{UQ)%GV9PY^BW*qM4 z;Z_{(!r=xS?z`bu8}72{Mm~H5yObFG*mDf$&(wz0l{A1fo-~Uzm$ZPin6#9X>&Mcw zC-o$aAdM%@Ak8LSLYhZfKw3n4h15VQ51@QVrF27ym82P@*`&Fo`J{!U#iUX?rPbIX zkmaW!btR1;)sjl_2PP}TnR)5L9U{KRiSHkHi73Kg+4VpUJ(l;SqgY< zASMCEw%~Od_S6CQk!07>#AhJBy`W2lhp8AvERCWla5(^F6sXarrZ^|3rhu51KuRY@ zot6+S=A?>=fpI?Rj@C;Tr$~gC>0wntvI1XPq4fMR)Y15+8N5nwnjGh?g8Z?}@Xigr zx+TVde4^tM5@QrpU}+>VOs!8&)j`hq-cK^To0X85seo5y(^LtGIQ%HplgYXV@f|A3 zKNM?Bug40EQEL+D?OIMOhv}XSVXvQ5mbEfpw&NU*7%JdRI(-V;^S~r1C>##hc@2)= zRjnuF6oty$MxhsnzaY>j z>|)eFjY^9p(j~y~og&aV1Qva0Qgq3Qm=L^`iesYa#QhMD732)Hhvmk1&J>C2*c63W zOL(9w%cobXard||cpuL(Sp$_X9=(stgY^Jazm3$Iwu_Ra=BKQYfA_3Ap?PYq>Hx z&R05L!~oE?#MMafOqQ6aj#DKnqB5Zx_3Ck{Y7Nu|8#}~u=F|CFNLoxD*M@O&Bo>}tY+z@EfDV8v(DP!l_b&!}e4HL+v-A`^Q!uz^@A zk9<(o{c|;8EPtp82LWqM7|Wk+!nl9=CM@P}!u`QsYQo|rEXeqyEeJib_e3o648LgIdKw6MIz#CZPV{+Cqp2yxwpp;~w@g6)qKUqOuLD;^&g;sEHgu)G14 z_7M;Q-k+<+@x)U8S=D$6aVV4)k4HW+*4F^wLgEpycUXTV!~>u;iN~Xw-DWr|zhrkI z4u|%O?N6$2Y5yX~E^!92RG-rEkhmG7fez=HaAXlzzp;_kPy4f#l5sG;iK}p>QO}R# zJBlNLp9;suI&u9TrXl5pqnL>>GAsG8Qs6oX$%mB(M_Msg)Pzc9S(2nBBt?52vN;rOMtTjhSBYRpwHUn;Z+bRYX;Eq>Zx1^!qfjPp-Nu&VvzR$SGN>+NxMFplEme(Ne* zXw`6f@f*fW$JuceiB7Cx9B)@aTxn$xJ62Wgc&1^Aab7_qhE$8S2H)FIC7pjRPt|ZA zmj~Mc>k&SE=HogW;~9_ZZE%%|agT8IwGRB$P$JVa{$qbUZ<68YVe7&(in&jP(?DBb z{(3sv_`WaZgX@RlV8dA{T#dul?T&|>P5UHFSr0L=Mx}kj*&nP6wniCO=rdP69F`H& zN)z|y<8)YmVSKNmdVE~%ge!WmWng_s?o;8|8Xbjr1hB=Ss~!$l^RRbOq#ho_;~W@! zzXN?g6_2fx!ZQo&?c;Q@*H%Docr5UH^8P6t);3%9#g3b_N7c*8`fKbZvHxV};-|`m zyh?591+RYN#q<)Pbj;!ThSxNy9pg1l1IG;8 zHeTsW&qt|mWLFdxA3tMSRj)_zeHc(4c5Y*O#_Oe&Z?t&j!Pd?kmJY9PQogv7AJ;?T zI{WHrVGL{!_>P4XUb>#+8asAnXX)ZTFoOPZ`UhRJC>O3E!MYmox|Q6dwbR%+1^yOo5uV2de;&5 z3tLUq>m9CO#N&d;9P0}AI!-)0lf*2l+ND)~*dH?I1y9rXj2}X3Wo2tsTi(me%EHmC zrkRyoZqdpDL&{iiI8f|9wFa>TaZO@5u^F)qu{p6lF?%Zl?#qd5liihA%oO}Qi7m+< zKrF4p3MH;f_6TAtVkL1s;&|fv#9HD8#2LhL;w)lZyM->BxDi7xhuE5U32|fMT;e9g zdBjbL^NI00HqaFiH)qHd65A3N5w{>NCT>Z5g_wO`1Uz>Te@6CF;?~3lVg+#paT{VT zfgSI*#L{y}J7Np6w)}cu_JLPu`_W5 zv9!)kN!*+4@x;>m0$SoeWX~XGZxg{d2C+c)Y+_I19AYoxCB!&pfG(HVmm!x&jL(SZ z@`>>o6<1L z!T@7fdyt-oEQlwe4V;`J;tJx9#1@Hc|Lurvh&vJ66L%(dCGJ8TK290$1uEgLwyH2eK=P-H9`ZdE#thAL1p%V~O*L zWpqPTNNh%2OkA6|gxHeUKwO8I)3W`uB9;@|5-W(^iCu}u5(f~&!UfSq5StOl6W1or zAhsmVCay!ggxHEWkJy&DfY_b5h*(B9cvpzch)aoU6IT#h5?hR8`(KCHhS-YOp4gVy zmDrs)lvqYLj7nllVl8nU;w)k-;v8aI;#^{P;sRnB-B1=0TM}O(u0vc(Y)f20>`pA# zvHg`P*zr;jTN1kv*CF;Kb|(&%;$@GdMor)T?XNh~L>L#!Z{^TFKs* z*|Q`&agJp7WcFOiPMj~XFS8d)9Kg6(;vmK)5{EK25SvY4%%!mXweDe@pER~zhp0M|- z(SIz3A4}=sco4c&N;ie_$MG(7IA(?pM|GL9^5gd`pi8Q(4`xpye;oTkhxI%b6yLkV zn!u4Mwx?*t{lRe=HV24|?ZXk~0Hx<2Y$uL1=ZJP}AGi)ddLF@c;s`T*((@0t7i2#S z4351O-A|lzL&o;v2s3ih^AxrlBX(*9$6 zbc8uaDSdpK9_>N4Ot}#EUe(?=*JbqIC*v=iHbkg%Cws&Ny{;=I+e$xJ7`*(!U((^2y2e^MYM~)89 z2jmg(HwY+&+MimiH#S#|;qW|hgjmvkVK_&K<%LFY8DhN{8OK#k?Rb8fGM;CS&`PEK z!t)K=k#yYfyu|yviS&OyI}7X2Upf3o~O9JMr!AHz9LKK1)jIaQvP`U z;&G{74|qPCwx1|*zm4U>^W3zYc)pu5p7&LmogdPEVLxDMPZE#6F@5YGOzk+jY|7YQ z;Bk@Cix&42XSJEbe#G?rXZ?vW1=g=%i(@U2^)HwrYhmMHf3Rk}>B!)$nCWrExe`1- zrFv%lQ6M*n@@M@Ct^!~V$F=deOXb7s5at#^cGjPGR~{GEKN#b){=sP1i0K*I0qYNN z)e3WZvHk|b8CkV`v-8^6kC~naK4M>+C?>$#neqH*?aaHfzD?VWX*{Xk{KVB(Qa|li z+5Xsd8&^{>$La~c=Y=^|PhOS%7ON-Y`N-TPl}yyjg$iZBou8xQ8(I=q!KV%J#$Z$M3N*t(PE^ z{t)vwwl|i)Pi23{@;8c=Q2dkGq1kNPW_S4^lu^xt2 z?ho!i`baH$MVC~ zI#T(uboebs%(3unEhG9!^(Kw8vh=01O*-EORIaCF;X_UOAxz%~+JtHPUMBmG_92z$ z1HL<*y9L})n_xY@buw9rsY1~-q zSCusG>_CB(Q`mC_yW0j&nx~M`(31TMd1Mj)PMkyhJ#j8^E^$8bT;f9Fv&6;37l})V z&l4MnHxYARvGVRAmJ=T#RuEq&b|IGL2|S5Ukv)|77h)xG5pe+3S0`dE*_RP#6OSdf zpzv*obIBep*{ObI#Q9{G?gI;nCz8FG_$OjH#jinJN_Oe|u_wD3*(=B{-QQSDW%W0m z>@F1EoY;o!Qa|lVc4=PGp6qKVyesiM;s9dlJ~e{)2eQW#Zy~mzar(;pMY7K#yC>ye zn>d^7(mYK7*)7PvgzRgH^N3Fq7Z7hHE+Rfbe1&*EaVha$;tJxU#1>z(`rJiqLwt$Y zp17FUl~}sI2M`yMJ%acGaVYJdC2>62&k<)3|3REhe3^I&@k!!5;@!jr#0QCsh<_u# zLcET+lz1C)1@WK67SmXLd`oOYTtaM5{4=pD@fqR(;+w<~#Mg-9iEj{R5ML$ECO$@N zL+4{V;w5BP6Ni#rnitI@dm`Cg$lj5-fb8+a5p;a&5Eqd>h*(SUortfHT~Dl}_;ra( z$-aQNf_Oc##dKD`+lg(6rR%#r@sDJ8CEiCY%|lA_t^s7vCc88*+M76n?9x1BJeAjq z?D1rmo-;CNf1SymLG~2lB8qQAoK5x}#7l^!`vWb7uSfPgvWF0RQhxTt1!NyX%#pn{ zaS_>7#2J)+ec~%*r(rlQi|h@^UP|_b#1+JYh_flYoY*3p9WUv69Z%(PB)bjSrK@%} z*}Ib6p6nUK#bkFQb|w21;+%MvUqfPPo>?Fcpzt$@BZvn}@hSao#PMX$B(^7e2jUE} z#}O;Y-kvy{>}(Vb-j5)jL-q&?--S4j>`BB0#9tAYQh59>NOVPHpT@9qUfx||vQH(> zqWCVvrDR`0TtS>pY%zn?XDYD`@etxA@yywajhf49u-h?=t?8Atc5HBIlBmRcCfOr*g5pkYmr}Uc=Um<%0 zaUt285tovkjVi!8ngq6geaT)y_OFR8W-|LmVjJQ#VteA{#Kn|fb7EJrk0h2;er<^Z z$R0x+L41HXp7=0v25~-dHgN&*65?Np^N5cVU!n5Z5*Ltt6tOg}f19|7?1zX;i1!m$ z5O0v;&tmoUofMw(Ye8&J_NBzG#Gex{q5A1g96ARt-DeX7m+=RxPsFE zjQ9%KmBhJ}eh=bOvaclOzG27X3u0FaZ%1rH_5@;k;$$g2aZTa?;#i7|f2Nou=`n(Qe6}X|lgK z>d8h0qfN#GnGgGmt7oMY*f@SvW%_I$Mw+d|aef>vVQYV}1Z-Ri*P`NgS{l<~^EEgc zz#N;e!PUVM<2W{cesw`axrSae-nss9H zLAV;3IX13>?^T#`RAqUYU7D?w#%-c2%g^k_{b7vn6`7_t21c7qSz4_urKhcImn^P-;$O**^=WEn z=bO=voK)HV*gPQqWsc1cDZ<1C!aB#h2GeT02A+dDEODvs{Y`$7rCBfE7ux}w}`@`n@jOAhTImYvc zv2lMG<6kL0wg+jo1zR_PEcF*Sj*r(MDFun86p(SA#MF*#YR4LtN`&FDMvaW~>BcwE z*n9@N?wMX!P5n);hs+<}-odj1*FdnI*VMmicx<0i?zq3mQfZNK-WPvoNU9%96GN~& z12$i4tZ#fX#I${~`G0AZ3a;zGy*IWWwjMxQy@GZ;pQY6=xK0F*2fpLQ99u7-tvp{? z>tTOQ_t(_lbbs0Wej1d5IW`Y(EDxJE!2X2YQ5cz>K@v;x8OK+ahcUh-#vEgPW&gl9 zrSd+G&HJC~;q`DaY`bCm4YsGSHH5G(u;H&RCQ&@_24|pQ62k|^(qF?Q z=@Zk6YIciC^9SihoqVb!4}Ms_4<8y-h8I5=ssEox4~zeUZG6|6%pahHsKs{EqYHq_j4X4_GyB-#&c@a-n^D zu{{_%P8Buh{B2QP9DKe67@i|R7t zj;Njw-S(ls)?%Kh@;moL&7IhHKl*1~oF^*x=$@##2i*^#fBw)MQFFK45w)<;^%pVx zqHjcXaV!xv_fYdfG5ru9QI-1>Ma{BWBr50ei>UIDQc(@w4G&`YTpKr0vx=ic%^5RG zRQdfMM0N4M&N$ob5QZ<=>L6qL8yboImCFWK zQA58OEox50R8c+aZWdL$?kwZ>&qXbsX?qOw3%w$UT5P2jHOqb`(=IziEot%_v)``} zRlB;?aj`zUy+!pr6)S44$1G7@7Vi*Me)WQ=p*1T+RWxl`gz<73dWu^3JX+L}L(@g& zGPj5d?N8L)+;UO1jhc%6bAD1EQL}cA5|z6-Nz~9+Yedz)IVx)Y{Zdg4`|F;>{Ib4u z5>?r0kf^x_^rC9{MWPz=3Pe@B|6NoU{vFeV*1uwW&q-dQYO|t6H6(s5YQFDA#pYDwSaOg;CBn$_*P zsKpK6GXEPb&SL!H@7zVr^&Tl|*46Q%YDX^>HRs$OQ4QU$ikds6Le$Wby2WDtX)Q!` zG3z3#e1@B-p62~UEy)-zs^NCLs9OIFQL}QViz+W)B&v)122l-P>=re2*AY>bcP=oy z)ooD=yOfJ6_mQ2${5`|uOoz7?)g{77R4&9r)MAeyQFGgm6jfVujHsc%XNc;#X}YLN z?IKZgo!5&B{e!55iw=pJ&z}?3aN|!=wds#VRa(3gHFsX!^H`oxYg}mz#a#eU5lETz^b?dd4e{RZguH$x-h-PVfJ{ zU{=Tjk0vhOPp)Kh{I<+48?4PN_#+n{7dBgL&esV1ZCiT(Z65VqdK-ka`uyP+wfbF- zy6Mq)UvTSFvReEvcV7%DdHT@fucx-3)g5oiAFolUow2siW6<$o@B?FVzP{%4V{T*< ze#h>b+D_B{^hjFU>xVtV&#BxlE1N_$%At)jrp#*bLH}}t$45Uy>*vQZ_0Psw_&u*;)6$G z+_BRhKX&3Bwj9~#P@@a)tNvqRakEz*D|^=*ru1yWD_e|TFzcKZ|K+9$cIs`-_)U?k zjs|BpNwPg3)XYV`~)(t+s z%()$JZIu~Ns(1flKx?@9yDpg7j*49+_RZ8-_tdw!>qF{_^iKYwaSZZ z&EL7&=~Zz5_WS~?wpSEO8u6C5&Att&+nIl_-mqn8aBDt#P_O1WR&DrRcEjIiG^p@6 z*|MlZc9@(Wlae%X@WK)gRzEg;-*5dM<>lAnH(h()qGE)cZ*p-+$Je)-@xR>J{>!7` z_4u_y{O``QTJgPmyS@9PpgnKaXuImmDc1a|1`h(i_{M>6HGFXs!^2K|uP?f4Hf(Or z52>ZI9IA8T!?%uq+r4E+K5UKZY?s(hyo=qDhDAO$e6L?e)crX|!LO6qO||M#hrg6L zclVGvw*1l+qnEdFeBd$W-K2W=q6NNj^q{NtmpSoPJ)@fEwCluA9-ZNuo!pJzv0+Ql zn33*$%d|Py$Fwzg{B7&j^>Y8de6!j538gVjc$?UnWsl}|=VQ~-pRb(n!58GtHn<1$ z;X6ibeq8?#cYbg08pHoM(uY5O@~@HyPi^_F_3Wk$KGT!myQlNul#o7r__XYWc0FqI zh5@(hawDwhINI?Q-~7F+1J{AiJzsui);ZX})H+s9_HFo+8;5s)s;$R&@XWLRa#9!m zvl(p;Yh1eXp{cX*^2x`vy8KPge4bBP@O9gwG&lbCv>Rc^>U;1zBBqs`TIbBKtW$V? z%~W^(+{i=a`Rf(@cP~b^RbKModmEO0-Qar*{^qYayW3^+{EJ1OJ#A>+i(lGc&8X?E zo%r>8`UL70I`Fq5`0)$PUHKk0S3T@l=FD3ToiqIT;68l7k)ajI2Yd0)ROJoY{@R{j zGuY#z;$}~NX!#7)q%b$W!SdfeydCPw|9;tiij$QKU-GB#f@143j}5~g){mBV;2U1< z)*!>FBY(C+ZhMvg+vDrg1HSC9ZpXLTmKB&W(wvXceE*xHa~t03d7rD>j`Zf=W{r!Q z+RUC;1>QU!HRz!SYkyt&?6lQgg8TO7Cwn|7Szp?j-_fS2;aGlWK6d0EDYHIv;TMGe z{3u!B$aAMcbhCCDJUZP{)ouNi!0*^s`^aM33Xjb{{n|3op69=<(>Abfv^_t{Va>rN zbw7CcTeiIK7v7WqI;mEL%RP6#PvTX>pWk)hC+E-o{_9h{_@cbexTJnP_|_MiuFMvC z^V2(L){_tG&cBeUwr8Gm=Wo3@JXF5cihpCVJW6X1zc3khKFf8l8^2*!9?5@>Y5t;nZ+`4o{8RZ)&b+leX0Il@4{!U;j^g;i zZ#>3zn%yGdo1RdPq_*k>?>!cCLai?&y7M-x8r8UO(~HidUVMM0#ncV81-?!Af;+$T z=*!o2@v3k5-j$DTacQczkHGgz=`-3g+Ks=_e9zEn-aYsYS9i+hpX|lA{I;{(6jwXm zWqz~z*$w;h@57R2U-#(BH+#@_#>L0bPX8D?b^h>9{46`C$lz=B_#-`QbuL?9=Fy|C zL$<-&pC8@t#l~fiJM(vx{lA(tpd;^;>2u4wUMF4_Vr`+Y3*twfz0}kxqdy-I`+Mf7 zP+z{y>EMUIZ3^P|U7Qf*F|iYGy?NiOQQs^0Khn-dD@JzZ1MD_Gz4uioe!+te&tifE zzQKsn340It^936QzDro{&Tm;4`PKR{UHEfIgyt1u*&nsW{=Fji(blKOe zJzxGL^NS~QJoyL1uJ=hk9?TDWX0`EKvmoAT!}>FqzUt5K9d@*y_L2)E%TB%%u1(&PrriEcm^taQcIETm2|XMN z!hcMEC+yUvW^EnyPH6R;7yN%G*p7U=*1_(bkW#3byW8rWaO@Y~QJV6%LfvIGXAHga zR=EGGY*EsIx5B)eqs@O<_g3(p`DR4NS#O1w$H&3{x5Bi*dgm5IycG-;hh=jG@XvMX zv%BM4p`>tf&D!8Y^tJHruj$UMcDxo2M2}HES@K#Ke$!>py(zDS zB-`Q*vZU8S|C#Z%y@tIOY}a|g|JOop%cIjfJG>TZ3>g3FLR0Y1mBIhl!qx8bg>xRg z5**{lHd}T1l~7~#oq^{Ly%K(x4|&*r`zwKsm%S1`6wayV{PinAS723fNBc_Ho_@RT z&!b)mB@ZsW%@24b+;dnq=~$0fLZ@5Bfo~MAgo`<{Mj`cH39oII&pr66LJ)eFjSst5 zAzZL=nJv3eA-H{j8>wF^gw0RKHrTVZLRdaS_M-5+3ZZ}g7Q@|X6~dnRh5PJMDg>Kp zD)?U^G_I9#hzqF@{y4C6{Nlb9LhiaLvi_aHF1iY#mW?t?R;xnroFsc^{_LgDDrCf< z4!2$k&&(%!MHaslCdi!T|8U@?aQIP)McZvJg;{-m9J_hhOQC38#^j)xFNJ!?mNvA= zd?|dtsmGn`aW93WRN0}ELthI1%|1^#=l4>W{od=Sp~p*M-wpps_U&H^mJ^mIWmvxy zw3F+ODyi{OXr?T0sw{sY9PjNj_T|kNf~sh8#|`IR2+a}?HcdSELh!I%TPN`Q7ebD1 z@VUOLUkHW8*>3&jfjxe5)A-3R1lu@SuT|q-2(Q`>8uLi?LRj11{p*mh7eY*%y2FdS zUkEn}XBG_Y{zCAKlBK_H_d-~(y=(jrjb8}0nnoxmo4*j=I(F(2^WwR1ze#by$otQQ z^|u}!j=S<)7$*O=OZJK9!o1i0j_up~TqqnN8*9Gxxv=?EyE4^^=fd9+lU>fwc`hi; zBVvBApFR`%m^*JQx%*5Aa=3oL=gKodv!lcG zf|JjLn{zk(!S8=2jQk}m_tK7MLVsD>wn=NA330_cZ}(aFOvnn5wKdOvCVbI&d%v6G zp9!(b;XTeIJrkCmeSZ8`ttm89b z(D2b6cPpL=jYS9X)%DL!sSV++^99+Pr$X%=>%Hebek#=1bZ4K#t*64V)Xnp*UVJLt z-}1$}g(sd0Lprs)s@(rnXzc#Iwfpx^g|t2ygE=Wcy^I(Guu@zm>+yP zzvZTKVP&H`oxE0-3+v4aM(VyT7p(KM8|7!03&CeryS|=SF7&jUTo9@+7wlx0Cj1az zE|ksB(zYF4F6>O7_vMPg<-+{-f$+av7^!Tw_$R(x_;cUVjG;Zsh0cv~+sZnZ3mI?P zO#Z2Lxo~etKvrgxa^Y%WoK?TNZKOVXQr14nR@RTwKQddtlQhr z*)e58J-gJ%&qkIBCoaBQyJt|D;C{MJ{m6hap?$v|v+E0GLc^k-@V`vhTKM;jSq^1F zcbCuaN3<&wItFZP?qUme(Ul2Va@m15wSf)q<%ahLVNHi0UjO~vAml8+H}?4>gV0bp zWk{_%24Ru^+&slKgW$HZ=TP7C2EqOJeIesc7=&?Nv!-n!5}EV+Ogr<3WE@CDeEIFGzhoUsXuJ_#vn|Udt0=b3jDC3)2c5F!m*eA z>U7i_gg1>cm+u~95Z-PoJ0Bcn5T>Q-avu*j2xdJlzg#@XARLWpdMYr`Ajr+U;6H@x zH|3{et_Hz&bnD&oTnxg4PU&xB>D42)AeLsf z;v8ai2K*8^{2s49K_f10iB8swnxbN?0s}@-fk9L-NZJiP=*Q{A30(|^LC_qn7JpWe zi%m>U#`OxZswgpTQgRGyTQQg`$3;dnLu7oWHW^Zhg#eg#td8uNE?i`0SJ*UMBs4@^ zdlsotXQZfN;m1TUS6$MMnw#(m}ON&6h%cu%o#Bt21G^0HGqn74Y&G);c%RB{lb0k zfA4$4uj$jNy1Kf$x;l4A>4*u|A*DkkPUoKy+U7xCaKn?k{^F!)ad^V8)(4Hj-#a!D zo+g=0?v(J)2uMo|4J2OBAfFq?&@eoBkv9}xS0-OP#_;`;Q)3e#op*eAH1Z1c?LQ>c z%WvpVzhR+)L7o9YLkA2S0rf}tw|bCnPk(=kC~ea99}qY=$j`sOZ@>VrAin^-Pk;>n zF`!L40{jSnj3?>e>d_{ie}Lb>0MDU;l6b8iWIS)bVNiFt@ub9v;^VtII|!*4b0lx* z;AeRlZ;u4};1{Qp;hpL5c<-cmp-Ai>o*Ltc5~=riq6j~VNdaye_3jLjU#mB~1KwI) zRQ-1*Ur`Y_PU--!#HXZ81w(J=f%s%gAovEJERmK5vO~kuMB)tMF`y`T^~J^)-p5IS zM{RH`>BNMOnFA2F=_CUXg!iXj@PVggWJbg9I>Qq|#1l6u5Q}&RhAaWb2G*=aslM=1 z4e{(W38-xp2p^Rsj)R6Rkp8q5IS@ac1Q`KL;YqV2MBJ)9Q&YusTb1|&TekK0qMyhK9VSZ0?YCbVyuSpm zj;E3EpxQ$H!^J70pfq?r2eKf3{uu+r;!yILbnM}uF$5lc3iTEzC5(jkdi+zVejx&Y zrN$-_{SJ;tz=fnlhXiiFK+-|V5t2*r-czrnw0I%$CJ*rdQgTuXhJr<*J^5agphJoZ z!~>}W?}$eWNV)3ZKnO?hx3#quw8Dr_3Kt4U$_gL^!b$x7#j(_rOJ3oL9a9C&c<^Wy zJVGV1r?M4L+1hjvVtGpEn;Iix#vKIMr2OO9bB}fn@z}sA)TtRw}k|F_3 z3MWTL!O(D_NC52JlPVid=M4Tt*X5OzoWTf?`1?&1iPOcQ|*X;cZ+=Z2?YW2 zDS`la)rn9;aQjV59tAJZCZ&7DhfAIXBmEsjlW4|itrM{*8ZrhfAZth^h$NB`aG?Ac zcjhlBCN@PdDt2Fw@Q4?LLiLeZlCWv^L4iq8sp;X+`hIASBvuqNjl^OT@&_zz zqLSd9ZTv_VSu!zD656#)I3esNO#p`W8lD&}Lj994KgJ~jVaTjHrqD$uc`P*}nWiaD zNZ|u`0G-6}UKPA+1uv{YiBmD?R0iY2i zV1|eZp9oKVrA0->M#jP$VS@0;ND$fracp!4BevzF8gh8cuNU^6|8y?l+vnL?$2>C;`HH!`@7K9GqprR5a&LFu}1f3XU3q+=f z15F32AY7OLO*aMD244MhAmpAeQ)h%$BF2yXbr{p7LqeAh@Zr<5!%!>*3Aq$VysFnK zSbFs60j(NpV6ccOZUArlqbet-fo&g|JCyF|x z5K<-`4^H3*By1C4A+S#qV9&PHr-yN91R?&Qd!af^+<&y8V-%)xj7&ndl{gU~+c3XK z>Wbug=l}psfo~L+SI~ff$$*W-@T8(B9Sf((7~J*+#It`y9I=3|Y!U#_1|(T@N@tLk zSi<##!2&Jqq)4`OLS7PW5x?6@JZ5Ji+`z)&5iJ{$uHAr%ICn1#ARKMS;=^az5>2)@O90FXi{vXMP|Pbe<* zGcY`ARVPPFniPSOf2+bmYYsFAIjE4#g4qMcSin_+h_uvHEMR`kRZqfG5S;{7(I!Q!e1eoBpd=;le()#77P1!;Qi+hq zt#P2%+O`x*QzUg#j8_^hhbT1{`{CaxF{szH-Uf06$WPG1o;v8{?0~&+=a0Kj(if}X zk!|&8=x}i3Wa*<2k7_?q1yQkLIIQF;77;=!C5`%Ml~!2VsxY+*K@@2HZB;O8J`?Ie z*rq$dMInCwjs90cD=9rN@FwFxS3r*=ad>MTM&K5V=}+iD7`7yhY`6pplKzbn zIU*qpLg3fRn}nVcE(YypBJc;b^hSJ@3Yz&kp1Z>F++>C4!uTaT8m zFO{?5&Q%O8u2nm2^-Oi|oT*7&w1>ou-f0$oypMg&Pj&10Ay|i)DIps+%pOEI_9lTtlJdFoO)W=V=?~}k?6)#-I35k5tGCVFW zB0S#omZNCaFck6T-D8J z(Zzdi^zN$Cu-hs23jH6O$#)GMqVngtsp|K7Pf+y#(^IaamX>PyY=PRj4ka<8_RUHX z@UBN4+RzYwS>sdPxV&?D1Xi6+;-)!SX07d-j4 z*2O{hqLc0)`t+%(e6d&gLBku1i^3W=-@KW*{OZP&mABji-rV$SMU~}OCv|Xr z7!$BEUG1X$K-D9M4CE}jSSyYw?{)Z;jq1_joZ$U-3gh>MZ2w+U>2#&4WWoB{&Sr&m zzZF#q*IK`ce6eox_%xN}333HWymjub8qa4%D~okUA;p=4j;++2T>7xkZtoN~lLKmN z7gy(~m*04Nu<7p9_V@3oUa!73yWhj>uRrd)7n7KE+x(J2`HbJJ550eoUX+wR@SxF? zVD8-H@tWVy_fm=qQRQfVUoTn`TNv^2+V{Bd0axM;COuTie^sqs6O^Seta6`xk8tbg z?H>%ns)i1X^}Umx*muzQtA%%iZ`MYs-uGzkb*p<|VOhbc^+)QaT`3;;?0bo$>6@cR z=T{y+@3?$l;I_^CtrT6W4lh@#xzuS?UC^=UT1$t?QTs{A%n39z60)uBRV&e(hP`Bb8U@ zH#U8~P}}lI&97wF(9FV;#r9e2ubAe|8Lm?~NXxP|@LF=qq#Vzt@DVkI4th^Yy4RHC zndW9^DUQiAnQ2sj*53JSU$o|eYr)vD=}%1K&;?)T1fbR?wNJteW6v zs`J*>`+l+V*wYh&l3zNC;#xkuwLNm6(rHuLGShi3`N~WGF56&ucu}@SMqNP%x2EFW z-!tB(l>0P}|J~%2XK&lO(TaW<&HXbYp3SlMt2}KQbcZuve}%^pjYV0FmfI?8-HxfB z%6A*ma-{u|?YSzKa~J9I(8UlRrzcHhAGz|Eb`kJ?s-K^GnTK3r<_f_;H z_{r1`ybGg*ZdJoJ7cS#G+%Vs${l)EF!@ic;?s;If`&E_ue)}w)4e`ai**mR#&i$ny zSk=ip^Y55+(Zlx{?p+T0Sh=Q}@XWf|>L|P^3mm*}kxxK!ohZYpDKqTb+b2(tRet?8 zby?MoKKU0ees-OobwqjDnn^)rdwYns?--ny)k~!y&*I$Y65iYkg<5`Nn?g0>Tf8s1 z*Cs5~s7&?J34SYQ>G{|3m`JKLnpc)sU^-;M;w`gLiBd% z-nQ*h(w&s|KS=g@R~0whHOqT)dhyuEVKtvGXg+yReX-=?r$yN{^@DVB z_o!MfI&d~Q|Brc|N3y-gTFNWOyBVK!*LRwwp<&Z6&o5fOASkoqbH<1n7b1q3pK_Zq zuEjF%kL?=!?&j+MX@Y_t2Rr$FSsf8kb1N|8vZ3Dt-;tT0{a4!8Ou24)F+z9#BKv?N zxh~5ZkC<20<}1B;n>lWGrD$N{GN1S^`GIMTt~xynlzEopgRHoAB6sbEjP1EweaeEidS9xxtGj2-x6UkH=6=YkD*2=Sh1g!+ zPx?(x|N2`=oak;#aOSD*yueoz-F!amE##FpZ_wG;^`iTH@mH(mxczq@U&%?lo-IG7 z+*|JM4xaM!$_q-xQx_@BGVoI@9j>dfq}Nr=o3mD_KWG}ECj4Z~3tWDSt75-irO&_+ z)#T0ArUTD4n6*paWZuavtlcivE+%WkUm925-)Z!&B$j{nO*ftIc^`DQS?|?eHY7z$ zFSxh9rQ@9Vm|p19qOQ()wheQcD|z8fnY|BAcje2=5ub?M_ctL;k)I(&0#>Ui!qd+Q-X zBP^$M%(t*u{Y0Su)~dbD{z zN&fJ3^n}7O?NU~Ey(>2Dusq$)Z}>##eJvcbwL;b5Odmq+rO?ecuulJL<+HX&tqe~wvs!wik8)V(2DQI{# za{ZH3Uf`x^>dBC5Jx&c8Z7 z`N_4h##Yr@-^QKlxMJPOOxxS1eLalMTpT&7^6%a&Pi&fb?fA+@?TQ_HhF;ikY6;ZT zxwEgeRnMg=_rK_sF#D46wTdhDhvY7M-uiX-5DfY_2w^& zm8YiHulRVTY{Qk^nd@_P-PUc)`22@eQc0e!V*2VqN1WFFe)a7d$8Pf$DTfYTIPQV^ z;@AaOmc005`usZ|`Yo90t1z#`^7P!f!ZlgPG)B*SQq*qx)!TPx1YKO79l34zte!@O zbJWE*X6IKuELe3|u;a2q)SsUcx9prc>3QK`olbjnb|vlZaC-MH*!#1+%k!?=CXPtk z;^}^H>&Z?Z^WS>)-1z6$shj^;Rlcbz`SC&<8di3_V;A1ac^2%}h7e$wqE;!5k^VOab zol&bImN+-pb-x=L*Wr*`%^yuR`%lgHK7fc5~3ZTjSq!O}}41QY&)w zhEaU~&yHIg^)_gIp73#3zq1un?(g1yCfbWDPP0zBd?|U_?v?sR+V+1R+~U5z%4kl< z#w*7CmZWUjB{*GlZs25Y-#yOxznxudR1)1wr3%d{j&<|zq89pM)x+j7v*xUN+~?|? z*`odRJ?{nkExF=u(=~mzR>NP9(nklV-+Jj0b8Ps>jUVlrPi|P{W9w)U>^Ext!}m$P z)0T7Jo%wyc#|@*3FA2k{HgvN)Xu0T=QSOHPX|^5t1HT@gD)5xcS<<5Qq0whYpV4}2 zN4xK^Jh0Tv>Dq^l^TuD9sHeH({H_Pe)5^2MPAF9;=Z`yRX!>MKX+cZx`}#v>J*~W2 zKRq*6qsxUUaX#rMHfgF~I1{rsrPCRO@FDr7weRwh`Y-){?MtSA)GL$jw?_``cu2#> zL-D}rrlKvI=c<_+7u}gLttPt1vPso;E#`^KWidwoRYp#2jWiLlB?-Uf)f4`IR zN45JqhPFF=<@L4~_jA6@D|PUWHe4Gdm)!Bi5ufG>bzO#>bDaL^&WNW2BG=`+$9+1J zb(4;t1Xukn4#+y#XOP2CMXddyV#qhsGPEAwaVJtvJ= zy{D#r!TF+)uDuS7c{%Rt;}J!f{sV9DUwn^m-z$RWJ?Hk*XU=D5E?%5`ef>+Dy`P3J z4ISAqv!~POo>{yl+n*$MHRm3mt)8&0XUUca9!HPQ{-W5Wlds{RVej_5ooL~ltG#~y zq;k7Fv)WaH!RJ~gITZR-RB*mlZ;kRCZ#pjbLq-o=H23} z5l^2Vs0$5P%&YAFtYmVnH;O51Usm;()BU+iMn*b@f4XU*uzviSquY)u-#X4Y=9;*F zcOZ|ezh;=&?0cunTeW9TIhgw3jOL|Ut(+a@yKKF+`EHgIw@y!sI(h96tMez5Zkmlz*}L&ULp!~Ei7MQL`&2-0s`CE~&Hcp=&FeXDIMiJ}KQh#aDfEJBPg{ zK7Fgb?8hYjed7I{XWN!V4>vC;x;Xi=TI!1MLs74l6zg#NPgi!#eWelJc{OkNVdaFA z{N(X1OP)n;?Y3UHa5&s(mS49~hY>x^!E-bi8Gj*71lpC->2oqM6j3Gs_m zim&~w*(Y)lSN&bDM8Vl1v0CG9gn2e>ik=|uE#Gi;u)_6$)#|gpZdECX>sGSkbHC!e z_&Z0=-P~37$;0_p{G$Q)L(=ZvRKK$GYM(LIwT`=n*P)(gYlcrhRuyj0et%2J$bBa* zuN+>vy7cH)O&ht=^`VM$xu?}?i}$KNv+9yGbr@>{6HhmXxe!7Biw{O_RECv ztDZ!LaN^>Ix0uF|)w9p7l{H(go=muQ zv*mBE*k|Ki5^KNgj-Km%JFK*-zk+>AS9!C?f2s@#y{A6)oWJ6P-W}yU|14M4(mJOm zm^~_{q=O)7*1khg*Lj!28#auq`=rsS_FP_3Ri4F}njQPc?CWFbuwQ+~!J{F)Dh|i* zep#9&SaWRO0r>+D)l&CXulei7z#e~8r|(g?W38WZ*C6Xrq^iyeVXtiUgz2|B~q~HBF0auSi zzus0hD%!n7>7Gxq>)>1JcDMJam<(~1AN1Z+VRG30u%;6`qVJDzN?hFJ9b2ApA$;G$ zvZyTM4oTJd0Wl9K-a^jc&?7*RY6VvY$UL8NE_Ga*1kNc`o-EZ}3E+{JutUI#))WG5^ z(;Q2_KRbH#jp_NrmGcAlEqAoqzj@o?Dp$ozHA>5a>PEp1@91Ouq9!|>3?EcFHpWEG zHpyz_)`kMr?(6k8N7YApJ(ow zv$gZNS1XVBer{@<|EQ(*!mbiOwUWZjq3g5k7thHvy)vj$XLw+(rPidDk~d~8DljUt|9q#w^}?Db>0`%!kB>LK?&*I2Qn1GQmDyIC z7H-ggw7u9{P+pK8{<`tOw#K(rPfyfkS=~NWEE+qu@X!9<8=fb{U9_2){57_RQ(U3I zHn>Sunb%pz)Ggt@x9i)}W0i|vCI?Mui4!>htlT+n^d8W(KmMZ7F7;Y&0TO)hX z;SL3L8NG{}+*00Vd>`NFQ|@`nWcIK0v!8P($n^9I{rQ|l8b>^~ zSvF=JbE~a%%Ri;w{z%IZmE7%1bQk4b4!MYU18SUn5QNPiG`{_v5pL7VgA_2({f zzqu?|V^v*Ep8uJP1==qje6D`?`NDE>L`_Is#uGD-ppv>Ve%TugG;~I5>Ra-y+>&qi zvGiOk`Wm-=Dog;3y1?r&tjbNuSys5&X#R$WUAJFsZ(H^?Y`4{eJ^S6OUTx6HvY*W> zjz8yPwX-Tv|F6F@y*oV=rN?w}&v@@@<#W)CXOgO*W7}X z8tdoX60ea^>wYP&c&JT2vp$0r}ZmF_uxm21tw#r98>wxW_#WhU9#6Flb| z#{?hw%`3ezFF3xoR^Q&QotkTAXh#$Kb$yZPy+e6OCqK(6a6w_j(wvgOtM^(?8Juit z8P!y|eZ~9Q+#7|;WzPRca8JEfct z!~e*kZ$%bz#cv;*6jW|;%w87VX+yqs!>Pj3uXP(f-F$no=at5GWhm8~ug{#v|& z*LP>O+qBN6Rz=;E^$WvN>A}gez1RdT-CeD-dV1ZV~f)-e6AT*{osk_Czt@J zugPAtCs$|Cfkl?8f8-~h%|7BePu_B@w=q1{>ZI>}(ndpLR+3erj%b>H7@h*i-Drb-SIxl64L_nyY$p?ZM5Vm7jUhD#%sHa>~gnf zG%P9e*}7$!Nz~E#wjJMGD64;7wP<6-*Sd(>Cry@mWe<+3Z?F1dJwMCceOYm4a+TGg z*bDj}`#tgQ_1o9<$#+F@C8sijTV4h7x_|I-n^?*#+`Ca{L-Tz1i(QvneI<5_aVp1m zm(RYQDCb>1Mwz$cuF{3d=L(Ca7AyK0%+k;uUaEPu*An$rvu>)5XnMdi{v_nyS{|sf z-d;sDWMCgt>&?k#4d(`$Z%S|1F3hZxNf*^!#xKLy8tuGa$&W30r_=4tS=|qL-?jHz zZ_`Q{vP{2su%6yGM@xg!#gh#uHw<+f>RsPsN#~kg=W-VJRQ-6!z5lD;eP?H@_O9p| z&_~W+W%sYJv^S0@$GQoa=X-nsQ6Hhq2JG-V! z_f5N#*DR`-F#73-ly+kZ#do`|PG8=^bmDM7yHvy1;J89bB1Af1&a}B0m ziD;iOJ$1{Ze(jb|R>(g;<@EWl@nsH037P%pB)NUAOZ>d_SzO8N`0?p4ev5Gmkc)lm z)-7VsG;U;qzI(Xc&Zw|w$A^oy)m#?}k3NdBkUtz=7PXif1}~xK=WaPmZfF* z)kS_8X3D!TB=}&?(7bzFy-%*m8>rO0XMo4B_x)#D9rwKC+}}fUbV@(pC$?S-7McaU zj`1JSRW5o|T3O$b2XFEMKUORZ==tXn|EYQphnHvG2!7b3@i#%>{V`E5XOG@8XYbhO z%UnV_{VhK(X+Uu3?k))rx|vL`|1ja;!@XBtJxbYH`=s~2kf+~j;~tkrJ2y;LS8TE# z zZXNDE>h_m;8h83W>QOi0UD^G88y4Jq*S`90_rZ^A;?!>4EQo5l-gN804g2C5RS{bh zuI69vdhJQ__-ZTTv8Tp;(>l3sMaR>(Z8OgpdH7b28hP==%HDq;zczDIg?8i03q$wp zIKSl7hO_6UoIajBXa2`dCpY%=db@e**FQIvull3pWJ=Y3CH~U`9^1TUxZCAo-5?XuYroCIANAJXZQ<{H=TS z_tU$Mj?^0I&mXm+ajWC!&srPw`tABS;r^70v(aa^?@kkQy)Io&vR=7+TC%;ik-qzu zgMZI4s#y7;@ru6hxwHfv1t z!#bM;=&`u$gy_ywnTwQ>KeL2Guv==4V(FK?-T9DXdO+3w>;pH&-923t7V zK3qS_Z<=q?`!nyj%WrsW|2^SLg;BQ+Rl^op9<*EUkCE#JOxv=T9)MO z@M-+8R&R74%N_2coy?XVn78r6HNA;f#_u}6L$iFE@&lz4VcFyIldGSY8XjyZC>=9I z|90{%{L3*1dX0E|b=*Mz%%blv_%|YYwU56&$D4Q7`PtLt#fxX! zyj*{M>F`f`XEuxs?K#@1=k_JMEc333Pt<20=a%%`mT=VL!4}0YvyU74cIvX{-LOH< z78BpjU$32;XIDN+u&UNr)6+VRy)n7Rk<2|Fce#jkX>Sx;{W9)|9RgW!SzZn|G zSA18K8TK^d>f+G41J5gYivuQ?JnJ5Vylbmq0$}car@tH{M=r6r`6+zO`1K0PFadCk zbNtrs{fVynTwb7=c-We)mpgqwIAylB=9venIa;-sY zE%BAYZ|76atsS-HwMCOr?!d3RdlU@N9(Qr#&{s;AU(buY+uwST@S|trr!E=8J(st0 z3AEmO`}vxi$M$_X{Y-Vh#)bT5jfjmyVwHQZ9bTT@(e+Ml&gyOs4&VH(gAT1;-&pr$ z;n)c}E)NXS3tSy8YtQ%HDewMdz>%yOf=BhsCjJ)kUf1{8n6Ix#^v#Q(snq^@-|+sY z7hL``vwOjno6|hq5>HrbdBOe zl^a7inWuZKEBH3uN`LiZ*L+vi#iqj(XGa-q?|XYfW#^>2&MU4R^5rSaXx>};WaVac z-zg@09ol(S_x1TZag6=5JMT{nk6yOzVo`y4s@mnrQHR1;)F~>x4#W5Vap9;2R}1NJ zQ;?1_oKWM!=_IZrLc9v3r3^cDxM5%f;LZmd$Da;*9P20U;_Z|TrfW1Zie&<@Ert359xCsL3%s*h7Ckq7YIKK z>DA#Iwq09tWorm<41L1ornBL&N1?7PgWL&trl-n8H zrC_UpSqNqbxIaRAxJi%;yHmM3kpE`LA5N5UK`r7sL-ZC{({K*9}aG9D1R1J{}aIN1mTqc zAL9$#$hfd?g|`W84wxBWJi+}C!ZAK@GKk9ucMetma6*II1KibA{l|hEc4hO5!N&N` z1LF(sH;^9JCmc-T>LV?#9EYrb*j&Z!2I1$a`iG4%+z#OW6KqVcSzrc$`!7h3>mOcM z;KE5v-Wsa@e+PFTaNnltKLOnK;64mC=BGtqhJyPuq{sD-c}5q?e}bz26mUa5@J>_p zk7;TN?rmV>_*r1Q!2JZ$jT5>2ZC^YX7IG`j3D(77%|c*m7WI zg6RkD$B-V^M=%&;aId85e=@jxg8K$l|Kq`J3+@uIF+Jyl83gWkkRI2+toDD2s{bg6 zV-4|lgN^Au8;m!&UqX6Z|Kq^41NR?P{bz!^FSzef^`8iC2XG$+TLa8uFvGz8^&i^* z34o9B4F}U6;%^3<17-#oPjEkiaE#9wFnn+?XSM%pRQ<<7cqa(o2R6oc9vEM6zlHR; zK4rE43sn7!AWjE}zY}asuUTLQfcrV5$MruJj2XDsQuY5kxch+n4psjN;I;?%5wO+4 zECMqW+rA#{~4Fuf4cqO zr|Lfm;&g%d$H3MEvjmJExW6MUMGi7lgsB0<8LHZexo{!TTX)7w@^h)}Q;W&MmmZAbq zFR!QQgws3fD5~T1Y6gnO$@EAY>0pt9lo1apAr+*ERFMKwgByO;ksQ)M90V9kep-n9 z(fp7kFx1>^v+Uzcnu%vg1z%ea70xsh=3yzfZq?{kx4=k!#^HQ%!$L}yTpqU zqf=vGi!Bj~(?uqU#rTL+44jw%)FWd=7$AK<7jJ+U!?7TI=nM9?$B-xGTk}gN53{Av z=ceg&6f~e&bcz6uaR4gBkIqFLDM*M-NeEAkj3NAK{3GI%BI5+DhrMY36gcf85(>y* zvJfdaAtnQy$;%K>1tYc+QzS~sNQg*^C!$H{g9}c@ZG#t`PXwH&pg@U<9rOftt>q=s z+uLL0;p%~CfhbXEpA=xZs6vVIapKn-DR zWGYllxHtpBfhZAdKn8S3gz?5|@~{T!ro-uzs6;Lz+J*CBoCG6JZ4wD1;CvWxg(N1% zGXYKp$3_5r$>|3&4}q=!X#6Dy26|?s*Q|{68bEHsj}8MZfVj&e8N!lTVbDXM7D${8 zVcAp|PK$plNbA4&HL5)o}+@3=wU$52GsZ33}FB3D{NiWYHE)eB{`1B|R6PS(f> z|H$~pX)p}>hcAu5-yUEvT;`97a}}iVd8v#9U}XJporE#MC&4#6Z=4pxqG2=h$MBgy zIu7=0hBRj(Jf9VZ86W3g&j|kr-_r8OuxQxK{4spykB)==v=&48UqSi7$odiGXM|6J zZ)y2sSTt;A{un;3wjA*2y|EYM3%jii-V(mc!JQ?#UWW3L1oYkHa7V86wE-cpdp}upyd5Z$RQOD zS6d|{;;`hCN=N`U?Pulah?at%mMZNB->4?@yQ?GX)LzJ1 z*b`a1^gz}eXJpiDP_I{|Q=#P#D3&Rq=^W@ZvRXMVF` zy?zCZKafH{z>Bi^=88z&r883J*dit1LrfzNpb_6%5%LU%JVzrXigraH!qD)WrztTWVFSHLKKftCT7flI6qXDA+N;6hu9{ zKs}3z`mNHaz-3kO=lrBRd}nneNVP-)p#>5IwMPOM0TOUb5m{QYJXHXjEaws^=R!$2 z{b7oNZwKRI&Rj&+J{jK;{5g8SdqOCO3zSC;bxza?7pVk5oj9l>(^MGu3LTMYP#0wC z;($y!osoV$ncg4s7^ZI}z!xYZ4UREVu2)P}2$T1hb0p(bV4QlWBAXU7Ih#~dIUAvg zoQ;dIoDIiFPLHBVkru?^V_r~4DmhBYMBU(Y?d6dshbu?Ja>Vg$VEiuZii};Hkul&x z@CgNnz=yyeMz(-I&1ZZURb+9}NzNkGQO+W$iyT~xD5u}7Td!S3;^+ZwmuOShYmxQI zThO*@pk3XBc2!NbD+>N}m@4A)<&ZMRu9bf*72&-ot{k(D2|PRTkV#8dWYXY_OoUFz zgyVqdwi$-`P5|x4#Tw~wtR#G=Mbi`bmdr<=$Y&|01(7eGa2H{>9n4b%smk$PROI-O zzgj(2r&)2JKMSSmvlg8^^p$wQ*^Fbo-=iemg8jG@$*$TtSX%YyI?5 zN1apkdmV4yej32R=$%eTM1bP@(1J%A%plTh@Z+se2~F> zkimTL=R51ybM!#|ZE6G45w~5Mmq?k&fJ4xaCC7=p81jyg>mb`fMi~L!jhsoj4zdE; zlmOb45o#+%$hs_rdWK>mm`5R|YdP>RnRW!YY1zi(ARcxTJP&P03_K6>%Um(RjgAtW z>G-UAmB69Gd5pF{bqkqy_guJRZWSW*GHg>Y{n=?HZYmERhG~J21kPH7D!~+jeur(C zBSHjSW#LBSs;&TXTLr0$mHegFE1mvVZ2}(5TsZ?gI*l@J)4Hv6)yI9pVQ4?4MEjA% zWAhN(jr-|TF0^Z9q~oj#Ttm{3NDs7-*rsYAQ?)>*YC`>MAXBlV)g(*uqH&XD)Aacn zEIN)1o5qi@2^y>=`HhKpKMHTA%`P_$pT<>|oAzhhvf837e}Yd*y@5l-QH6F4?HSq^ zAKDi`0NOD#j;wEXIi$5^o*d#iE4H>JSvdWq@8%G71a%&S+q()fb!OD}Pr|f~KwhK@ zKqoW@xzP?8L4U7Z-`b{HI&siqTMjx6ZHlNvOxFN8|9?vN0@5tB3~x8 zJcj&Z(w0ptHhr0KSz&yAz+Eioi2HswC=A;zvp)5?NFUmqzEBz3tP;|9)&?1l`#F|O zX7ijZH-V!vp_Bd~>sdHUYt@_)YOR8xE6>bl$`mK@((;8q z^`~*;+p8k;R6a5n8X2J$Ak<0EUcdhHKCu?Kqkd{{v2~i z*I)-}Z6R$ZNNWRWtr6DQI82#L;G#nIJ%(Iln5u;g0gfTSF)Y^d=a^!icY{98@rOQ+ z@1%|_Q`;j;p#WLBz}f@!gOZj(;a7xtfhdC|;9&uHv7O07aAnHPq)h)85BrWx=WO(U{AYDhf?`UGa((6lAWtAW&uA)Ju$ z>aBEk(nd~`l#zW57j;ZEK^=vB)DiUmjzDMAX5)IJDuW7wW|%e&Kog-l&_)dz<^XMA zEtZ?3K#s+LUYa@N7=lC4krmfO1?mXu2GW~AdJ}T&@FPA%JUxh~3-NT2Ni|v997{}V zW1ux3Xl(?v)@eg)f)-duBX|w;JjestbC-!wR7blH!vBjxc?~Af7Xx7(lvgThAuB{6EM_4Us1#3H_%r= zZxPe_3e_j5!TK@ytBJWV9+$&o**0?}MBUM}W7h^gu7s1sM(U zs#qd@lR3B#D+XR;*?ebJ)Lys~wRhQp+H*FMG8WHYQ1TiUmnlA{%0kda9p-3Yt_I3$ z2IVpffLV(w{R$FREvLQ9O-_4G6^D@53|Pq;0E?AB?fxY^FwL<{Bj}UQ>E;GpR2F@4-9(!FvJ3Oh^TZ$4!n2)B_t2To1oQE1E_$&18&wC3K_X zv2835VN5O=tFyytw=Ngy2B{!jkbSz&n)T|*YGD$&M&zvybq@9J0`-pTyq=!llA%|z zWjaAma!yKuGb@g)xe+=(-wMWpLTD#08)2?zJyK03>$V=xy0- zzJP`)(N*w1K%fs(a~jP*#3%YQnCA!r9T{XfG51X2z_-I=Q*Dq5Fg|sG@hM|$ zO6%sN3}wu3uxRpwjKw+w=q_3?f25V8RxiPaolXbF&QM=EP+vL$=WC_7*Z!7=s~T?$e8z~%TKmR=(k<2 zaCA5qIkar^$Ne^2wlV3<{Kn<|cW5qM=c>ng1&yD?mL~6LI@9*QBL5_`2HtRnF_6Sf z@X3#~C-ihp(9<wbN6YFGj`PueA$xNC*rSUQyCX06& zJoZChLGUSqZ;817SsyIvUw<;!CgBw}9SD70+ByNAWBMQ1hZ4L_ukiqW0ig3xW%{q^ znsnXLbfVqNHqHFT@&29m{i`yS&R-fI(m2vK9ak1#{tu9;5?*EF%%myv``@LnbeTZP z&;L{9c=LG4m`s2$8Rw*%d>9rU~HoQ>-_a5J<8_*;O#1^8Ql zzlAeAjY{W-=TA~$OpkFO@Rc%8O}Cf-H*6Zuf7fR6<9`ExHXi>i_-wfU}8$VYoff<)_`W&1@ss5BC#&M!#ZmR|Bwi?L1buJOsmwysQr>AX#e_@Rx2-YZI-=t{{W1YfW9pIZG z^B_~m!x+|v^;*|8VXjpoA8DSVc~F{-Y0YjAY`3ZcQpNkLU{@H2`OS{UcH{iSR3Gzy zs_jhSPut9Lu*-|<4$qa*>o<5mK`N}@!}}5Du;-1rhEK;K*ZETd+zY|) zBlZJfiNl$0W*Uw%-W%rLI$ogMBrFdkcq)&^f$?t-uymD>Zfak+?!glw7giYVkRa?= z{rP5aj+E0MkvsXwHG^g-Kap>PCn|a*kCr#cqf(sjKrR{>Gzgircp$Co{Sf!07ZEn` zu{^56;ZQ~#m+5Yk9?pRxNQ(e2g7gT|BY?+@!*_)GbD0ZulKI0v4!kZb;FH}Zsfadq*5eZjs+Iaw);!2S9FRDVkz`QDaC@fIX*OgGVIV7{UGN7np{ z^zkb>r-gMhyuX@U^J(2*O~?`0Zx^Hvas<{n0$Sw=-$NaBNu7hb2xp-#F4?FHl(T)a zdA(VcNd=!;+r(ug=`e|7&Pn7*+p`%4XIFmc%P^lZWj!r>r1{Zdbl*+cztlE<*2ZXA zO2cQn+3ojdxG}?+-)tC?c~NSfl+eA^VciMuH-|YYF>5`J35$&<8#WzI+w5>w`N+M* z7O*d-Ju(6LV*>kw=si&N&^~^cucqPBaM}_}V}P83bt{16Y@Vvr%}*AaNQ;g2T`BR3?1pw=A0etX=+x z<&!pEo2)L6vcarpjn|~b?FsSvgUMpWCC?KjzUFBLDv0xLj)WY;Hhyep)FBd(BK>1Zc zAIips?PkIhsKFUBZ8$@w31`SOkU%WirJBt7S(us}Zr6?+Ed^L(RzX@~SofsygZUC- zUzJ!dnPUq3+hPB&M8_h>e6WwL^{n5|&bhUTCuCB&>igk8 zeA9TyvT3-qElc*&>Dm1ipwS_Yrc2wKo;-Q>5As-$WkqID-&<2G#@f zJ<--FeH5=z0DW`=T6u;e&5!0ax=l)pBhBw;<7^rxZR5F^lW=w}RRQ+8$V0o8L;B59 z+9{0#+h&%F`Ax@T%L!Tj#2$5W&5OhQrtze0Iz4U6stZ~E7@yWQ8Ai5I$$9&*HuXg3 z!7P_7xhSigGz{8i!lKJW+jJbt&Bu8){J1w1uRj1wp*s^4P!F*JvVt}>3G8Fw&IH?? z)y629>rb3L0KS9sR?V>Uwn8;oC5&Fn_%lrbT?R9notDOlwsAROUP{uR66KcAPqJ@? zm}d>~OiLq%j5p_{A|rtPog2s|&Xzm>s9#`M)+Jtk!u$TApZ={2-B~oJ<8|2&Rr*S2Q2}s!jI+v`z4)CV3u+ zhCzi%8-JXdq=2r2ImyC9T3mj*0#Ey&4N;oSKd;n~LhC-Z?VCJ#oF=PVGv-2R{ z3dsLYR$Q8H9345NBZhNFsvM*mAw{PBOit1?`I%1o6WZA4D4_0OI+fbzn4ojHF?^mzbZ7xW|D0O~9NjgJgl8`s%M zIGd6RHk^l$oK30MucFIXwMhXbY*s*FOd1Bj+7Ho|@%aL3>_g5qx5_Lk&tJ*oBEXN` zqJT2kc}Sy;EWdt0OG19O>WC8B(rJi#09^;>ziFL{W2uTPQ{n6-oG)gdzl2S_NC)5& zG6VEFnkLJ3E1+^PIZS-TKoc49eG3)PL@>3i_~f}Q^2{I&yVo8C6beRgjv=c7wzRk= zMbJNiNnyn$$~}|fEld|&ezqL}{*wK*Dy`=Y2pTf>RZHrbb&f+-6|z?m37;8UoQ{;~TR1(XQpG!t)Updap|Va&lVCT}OvtiUH)K<3cnNIvx65pBa% zh`t8T;lcb`KwIfg+>a5_rpF@gWg+g9z~yJQIp#M_ub;`Ji_k8>++&x4lEH0Y%V>X+ zdZEK_;(3k&RH=T}c({N?k1P!S1`LBGl;bS-EGYsFE!XMmx2bdcYo&@=F0?s9u zx4E;XUUFWJy1#?a34grLfakUQv8{?%c!4!+&3VtUVeZprF^n*V$)3;d|7E_`7`mMX~oV* zS{$11X`2y;yw{18@mN>(09}EU4b)l{osTS>2>JWt-8nyOh;QYv{+J4Nz+;$v9s*pzT9BMPvi8yeL>(!R-p}vC?5n=ET_YMMX&wwFk49 zosLO2e78fYA?Rc_q#I2K+Gd8a>RQ4BbeW{}uk_f29%s-t z-g}b+dWE_iQV(F>cSM~#WyS*?>!*D-3omKove&R@h0{(E-C^;Jw73Nj_ZXPtzYy0K z;$?u@{tI!PAb)o-?^tnvpd(FtY)1gU(l)W)2lu1^Pn)~wBlBv>9uaE%0ejGb;Cw9b zRhxZigv|L_7`=ZD(zdRdCKKZpm@m_W`7+fY*lR@Jg9hh!CFh@6w3n0x%PGJ^FNf3< z`5jdU@L~Q~-%I-4gJD?qN-H<$C~f?)o48K~-#H2Q+gF1Q%62n(mibMrz3U>aYF<6; z`SB;u#xdMPyOGk*5ipp#7j>5iT`zbaF6<-Jhq11Hu{NwT;jy5My9=l?@=fJo?~xI4 z&lv7|ig5X*=if}Wan{&~7}M*6?xqKK7wCfiri09?srw}fI8t;BqWr3@Yh+js;&XqX zv*_dbvK;MtY3)HfRuP$l(Y(;MZ=vg&5Bs=uVBD+Y0(Z}d$(0ad?g^I(-vbAAM#6=$ z3w4Kx9E3dtnA659qK*{YS@6A$;ilkA`hhkw0Lu@!`-+6yw$HLmR74J7JlXg%`3v)r zsuEIlR&J(m=Obak8U_PLUJ~F0W();u7knRJxFtBUVN-5xeCG|^FJS`XEfZ(MX8n5I zD(wnQqEEzg6Km2H(RnZ_Y+Q-8S$J`wm^E%9+8Z7Nl5@y>*hg**cjg$sX&VpIKUu<4 zKkAKqd$`*nXsMiu%VIeb&U`uM`jv+sj0^rL+zIyF1?9?_yR48i=PZ_!tVD&8^Lk+b zgRC#A-LT~Z+fCf_1USLjBoio`i8I{uB+U=&`OyCvLjMQ*;9;E??kWa1b$6j;ZGx%C z@ol)sxIrHor|QA|oVu7+#JUba9~t!~kvVKU*>1Y;WBW7Z4)M*>DP^T&!O^Dhqjk6L zs}xZmFl8)$Ve3e05XS(_W_COlylc6N=oy&#taxpUnSJ3(Iy0{O|h_eGj)`eThE+6m?c8-GFu zO5uCV3ouV-46@o-4EL-ile~cMb(02PXPqKy2j(RkK9dg8cqjtON8kWsLRcpx#)M+> zJ`-|$pbfHH3uLz@$Zoh7EQgZaOt=(}5ckc&oC9vtHh?F2wtXJ z(#BuE>c_-KTAF_fr)fi^WzDk^<0mQWi-bSiF%A8a5%f#cUBr^M-S%!NQV*uvhx$H6 zG#pI#i*5TVZK5uIyjzA&*S$y)8SRC-W~GxlR*=D4(rJ&^iCf2OB<^i=_+}c1yh26v z7>ptt2U=FpHiPDT3phvpu+2Og(H1eU5bM?*Vcw3TEp=_Ejr^lw(>5cIHvKM9b}2g2 zf6JfFpSEfK!RrZl3|HKCj*;yD;58yy{U1x7wkeA=ekSxPDR)A_z1{o{aF-9^&8|B@IzkHaXY8!L))yp$#Fl~=g|DMrbZEc1e3$& zFPg^Ko(b}$1KxM24Rgd=$UxlMvtypZa+1hfEPc-azWdoYwFTtsTet(enb=oL=pyjM z13@2X&zf*|815T>pcg&IA!QBh+!aMs4Q3J>cY;r;J01xdFxD6do|K{oORIO91~ffs zoASr^LAIXdf!C;5XL)cL^Du2_IBc7SC(9l{OZ~ zx)PlGHUj+$?rjQdd(H{+CHKP-H}$t^Cv2QqyvDaz?`96V6~5Qe5c+K0t{TNE&D?sW zs@5}){!H3Pqd$c&$+zVG5t?tOeN#j$!R(Y;_tajKpS0Vb&c`w29J!~f9gKy{U{9A3 zg8Mn)?t8L*7h(LFaS6Q!bQ+gV&<9&f`d|Ub0dn7Id)SxSPO>kx9;YYh*1CQ}&OfRW z_mfK67Bdex0|e{3%`jReZx9E`j^jCEShD&cW_q}f-au#z?YI*%fVB%3}lN=?LYql9cC%cyRBzA>6rY7-R!=-4WLHt&kRUBp5FuF3l^fva-uAX(!Tn zfo?m}!dSe=ECX8yo(%lH6wFB$-$>)J5{TCT<~A#?Wd9hA6J-Hc{dwf zpD&;<<#kg+@2PSh0JlE4wJx`9`xH!nNgi}wcrWfuYA>#=^&~p(PulfR$TJ5_Cw9KX zoZpYJagZ9W2l}m$pVULa zBqekZ%mQ{j(D>0dT`x=<=Yic)Z1&s=Ifln~r?rmR`6fzm=dn6c3n0fWSl7YxFihP; z>e!#z??{t#cZktFQ%0mK|8q?5px8i|b2Imx8wZXRU7}={2Fh8k}Oh6Ae z1U+0A^l(itm1?DCg?jqlk^jZs`@qLFt^5Bg$)pCsASDO}K{&ybA_$!}X{Dr0+i0VL zq@A=AohBohw5bzBg$6+o1i`eOB$H(F2SE@7#URKb$aR7shd6QwqKyjuy`MdM+O+E4 z~(I znP%v@%{P^k)T?*!oMYnL&D-gBdRqGv_WYgh@9OzaGVLxxA53ibWPazZU4wb=&%NVk z&4%K!ubre?QSBc*&R9N4%}3AeblmS;cSiobzl+@=ceFiZ7uT(w#vpi}UF z^F2lK?Nwc&v0mVvq%KDXy&a@}UYm9vX(ODrwY9P1@3(@lp=`W{O564t$|iN?a}q+| zOThbT+kOvn48H^5`xT;j&87WzFDuew;4HFMdpOxa|H0#0T>G)-?#b^u9efA%XL_F( zPoVZ#q}uJp31!j;>d0%ygBmcXvlJozGzv1LNAPyqN$y1M_&SAf) zvu#bu>w3ojcmM6{8vXo2`TOuY_p*0OO z31y2`@Lh^hmb<;)raa1e`M%2u^{&Y`DhKuCI~qf`BVBmPGA0~rrOd;zR?Zc=t)Z;^ zz2Nh=P}xLQ)3)3ibw*3O+zdKL^zx5&cI$jvXSq)8Abu3+=a%n|oOsL9>s_Gp9GxfY zJVxgMI>U9kbcWjcW_hsPaA%OwI^%RE>byYbWjYIVuF+Yk)1&iIozLm));XZ_8=ZpOlN`444vobJXz;4I-_+ObdDAU+aJ)` zt+PvKo6c&TYjiqwX6Q`OnV>UT=PPKxYNOVRHitTRgIuX>&g z>Fm?_iq5BX`gPXm9M{|3!gWz@G5WX<>G%6|zOJ)PXN8{E9-U9?d`suwbsDb>wi~N6 zQRh6J4xQKNtk&70^F^J#I)`*_(z#zju>Iq8&d|9?XMxW9b=K)@*ZHE(Zk-?N9Mw6g zP#+ha$LKswXNt~rohxCsuCvs7n+PP@(pI#YC>tkbMB zO6Mm1{DV4sbp~`kt+P?*LppEM8R}Q2-p@RpX*>1%24ySAcjXkgSI>9ki>_YdA5R% z)M1mHh3*1pP9DeI9eGjSifq?{+?-VxE|nT;LHM~l9DBOlmq(_%-37Uei`+Sd9QU*y zi}rxJRwX<03v*KW0&7}$X2N50vR$s+yk#8Gte2+b7UV2(=NGJ|?69zu{K6EvBypwZ zZ;SAy2#%W&Q@UYZPPSbOPdOx>Ir;V?XO0wHkiA0sf0XplR*;)tkn3KZ?97($BHl-8 zCKnYHMN<4 zp0mPb%XT}cwZGigV-L0Fn;0u~jA1D`&K$Qk4AYfGqM6V$ue2cDo^H1nNY~Zl9KR&n zIjbNiN6nB&W&GynUX`QnA?_9VD|05!jWLa5**q(bbj-37Buvz&9E3(~lxEsvD zFGGEpFfaE9qsUzu%-f}$adsZb?D=i^e9}wHl=ElKyI|(FT>eVU%(L6F8DXXNRx@&z z<>u-48p7w%|Lnq?lw@^z#6|Lb(>V)s^X&PncxO6HdY>&*ukyofIXTO>$9axCmf~DC z+nuvQskg%!H%7XUh4KtO&%V9p2lAL+Go7zjpRS(JYUoAVDxMpWUXYzv=p+EWjPGFA zM=ThTq?Sje z#1U_c4>muKC92w0N^YSm+r7lGt)uFq`7`H*c9{iw59jAQbL}$F9qVYvWlLG`T-2Jb(zT;xE99=4p0Z%NEfl6YC5(OC>6}@d zvxGf#x|*LlGcAiwbg*86U<~JuYQw`T5I>T(gSumbi2C^JeC`3;3?}PI+WOZh^Zf+j&vm zDprhAcXAzYXxq1aX7Q37msDpNYrEP!*;#(~VOL~&PQi-Yyx;=YHd*D0m|dvvxcLRM zxMmTTjZH{aqs5>Nv@NG#sq7#0BssrG+dfUQC+xUi&$g@R38lfEkV5imTgJ<^V|tNzLdg zRU46(lf5$MH|3M`HMH{sb0_p{r^lw}+2?be)kadSiA;AEPInTcQf1=ll$O`(2Dul? ze09Ct)2;@M6R#{Yi|O@8k)caddj7WSm3m8dh~)egOdHqCJhE8ZGbWTQR4<0sDHF0# z9U~)}QjohcCw;Yx`!nUfHdowLlMJ#m%O%`y%jN!x$lxXxd^(VR)qGdZlH8@aOSJ2j zHoHs0LKB^yzrC(f-$gEDc(qHRS{OF7ICt9>-?{UgYW-`Jo=i{XJ(kg6pYjS>PTDr5 z=4sD1SVvOCSR_S|LN0BfRHEjCgL|;nH}XDsA*AN7QX+9~E4hnxyTZkFD0p?F&V3PS z$vnH+c7Kr^qjiugqa>kPMXoYmx%ObGij1JfMTI#9Tt2dS?vR!|JB9C#Caa281jis) zv5;fO4NULMyp_2H`FSf?ZVR#ta)3iR#%L(@CvkBuNXEnS*Zz%zr9;Og48B&KT_qdj$0f0uKGp1Bf#ieX-M zZeh;0t4Qee&i`vXdHp9R-1jG?-n`WO&eGnk+AWMuio(%JW`0b#Nree_cWJ}SyDa`M z7lzZW#PzSfNxRb5f5k0|M&uQJ%8cEt|0iPoSH$>7+6em(Zs9xKBJ^A69HBAzqvIRQ z#O=9Os`l7}Pa#%2=b!cL;5diQ6QZ1dq7TLMdyFqL{=4(_fBW&Bv+4S1;@iW zJ;A3NXwL&5!_ z%BJ7F1)DMd?(Gkq=D%&_?8Qs$IZKy0a<5qKT#=XWx{{|MMJrbouf8f6^7I+WDKlrC zn>zcv^V8;BaG`DPy!q)DEm)Xw@uL5ZT?*xWShykh2CW{XH$thbHLKSgq2I6jBxu&v z26^4X1LNYX*OY4XQU8SvA8s%QAO6kH>wXOuS#>`CPu8EV*MEZgZ?7L*f3W)7`l0og z{C@k_tkaH|8>!UX4=?-BtSGzP~f)U6YzO<#~A?>St+Yjvwt?dtI*{Axyjc$bg6CREtf@vey{&`-Rp zdRn#lZr&#?44rP*=gZFhcjIyMrrPDXyuUzy{`dBOb_8PCP)1Po54&mnd_R8whaZF= z?ax=e;0NG&Y$Uz-O!zPw$9v%CC~B1Y@EkUy7`zp(NAY+s+=7zu<8U4uS{gnbHlhr? zA8tYp{1}|UCRi+W;LoTGKMqemgumU#r^1_24ZZ??8LN~JKLkq;RVsiNvc^?EUibVeaMFweswzUA>xPOXJ_yySNK7A=9$ERPl9!5Tsoyzt?9jLR5#1zYDcCipfujHcp+@w{A2!V53Di1%6W8StlzmCD4A z!=Xit30@euRH+KQ@XpIPAHEa5mQDWPyWwU07RrxrhfghK+@u`tznuKR`(cukIPt>B z70lgFyQyk;R~|W!uY$d0%pG2M*bVd_Z-zCXX|z25-GXsch7Rx5H{Qju+4k>-i5V}9L1WSm%tmc^J3Qe5#tLtN zuc2amHylAeyzr13auOd4mm?S63!gy_e5de1rA)-o2H!xj_z`&O!{i}85xP+vz8H3+ z1bjb?_K=f!;bkZdp9LFHCcYhBwO%PFz73wafqBO#!9RN$Q@s3r=c+n#6<-V+Q5)V5 zyHEfxdkZ5i zegKv>u^y>Y3ENRTz6#3cfz%(0$&cjs01&3 z2|4i_;l9mE+40fv1eAuiz#){57arTf`ozb>^H2&t4c>{W@NMu*RD0r!hM_7$ac;QwQi&sxEUnl{;5r#j_zKnN4+cS*MufztckP|OFsFSwvv2Y1;udY*QWF@nF_g+s`_mGOjmU7QQ=g3ZW@Z-c|A0xvw}1^S4e z3TM1TAH$6*1zv-yC@+OiBM-h4ZbE+i7>wOW-|<2#iopx{2ux+h3(JuaFZ82$yl@!V z@j}>jeK{6(qa^AGi}nfQvk_2HyuS zIhFCmXTq0}4c`OzIW0_e;7#yCG>Eqektx!svG{b_#|wWzKKv-WAu&v~-3~~T3 zJmjn}Wyep2mn4O$G<+uXolTzMv(5=qe@3PFa@dTjId2>M1bOfS&@hc-@WNA28@?Fc zg~~bB3;R(e<%2NRN;y6bwxa-b`rwoqVQLtk1bxZ0JBf3_x6mNvJ+Npd$5UPgA4gr3 zcfj{izqA9tN4@w_STu{VolHFNfOErCJU#~Ahm80dctk4WhL3}@2_)?TvWG|+GVH~e zz}O2J&plX=a2+b$jr#C78}ousfRCU`d^A1(-t$H2O5~Oi>&OSn z9q`08j3MO~xCtrB$E56f=7n-$d0Ch;Q$7S6ZXh1KAFjBO$LN;}p0bv6;itmQXaJvD z&Sys6M1QFt1>Z!Kl=r~9ZYE}Y6pX?PS@W6YRUyhA|k0A@b9bR}ZIYxaO{2G}l zABJnIiGlKRc+`F54ei9i=g}B-y5J@Ev*+M5;TW<}Us&}3~LsrU#ZV!7e?Fdcl$PMZU{U{MHw5(@*WPagu$R%?M z581%n;bY;=s1iR`N1OHR1?*SCy?k72*~d(<0{PiDgvJJR5cvkzp*HpzFElrDK9f=7 z!+1j!@x!Dh=8ip6_zW6kFYSb9`NGZ*klR6{!*@UO^B%>8gH>ZMK9%3Py( zVu*w1q5$or!sRHPzBu9SsF?bd@G;a!U)tdZ$j7<*;Lm8B_Q&DjZSJh5klA8m3N1nT&ZNbRrvGxacwVH|m$cf_Cc2br4?hIDMxt zZn*df`o&l|;i4y5*Ywv77k3aFeY8Ex`bBoyG(S&{y+D3Qj=^7EBoFC}dWq}E%j6a9 z8{uBBlG}`@3;uu-Xnz#03Xl&RTMU0j{q%7hUi%ttG8d&V;&o!6O#@tuvgms`-0h7p zHI>+ma4j+re>vQ(o9FJtX@qN$VlK+zZf{bb@ifA)^Y5!|oHqqNzZ>6SN}FBqEI!-V&3TjH$9yhY zDt#P)Yxthic+OP@kKr@n;^|{Ne4WpxkCy8K%;VpKXApx6?zexqa?09-xAIvxsl-zW z5042~UBqLC_Z=Lr#)zi|PCk@li6;iuA5NRZPzG#{vlYF7_NpmRv2*x=OWJy@Oo52{AJL5X1Gcse&NH&L;N0SK8xdt zU-&GtbFMCEO~MnuunXCUUwG-+#7YcVZ~*y;U$|vjxT+wYaX51ZpGiROrNYH2oR@g) z@S0iSs)l$<;cKbkDwBA+;TPv|4&oVt$IS^>UgAlBH5U>a@d!Vfizl7|cm$ul?joKz zShXNrwG)r4xwc!c|9;fW_2K8>uz(+SJ7iIaGQCoT?G)xLi|Ym~k!jiAQ)!DaR1cRJhM|j0^FYVDuW! zM?AuB#S>5E4dmgC!75-Tyd;o7^xm4o$I z4j-zbFN~E3K5;i=OAN+)!qp-75(o8T;Yrn;i~1Hg>ptRpK+sp8|+6;_8DQ= z1H?cd4e&TLPRt2#E;0~LI=l;|(S8+t7kOyE7e?2xc4%Lijud?nW;__Krcz&+`4BNv zUzqzaIYxaaTX*O+z04!^W8h<`l=|&(ZY_PGemeXP4N%`u7p^K%D$i|3;aT;J zCu5Za*CIb-72{*?MQy}30$*-mpP`)qe6W#tsN;b}O~gRX6vH`w+NFIPJi3{Eft-nl z(Jibmaz?0H$!Er2IE?&^iSXarSZm}=KYZ;G9%G!l;Zu)t{bG(f;UkZc!^GJRpKWKI z5T%|gbCtleo?zT$PT?o0i}!^F;PNNK`6v$N1#U(G{208xBU~lQdq;4erx+8w z2|j?V_!>C&H0?81LVG7;OS$mg=UDIfYWPMM*9Gcy!;@Yh$0)bJWiK(;cn5s=W!5a- z1NVM~F~^(X>Bz$8SP8#IR=jW+W#Y}RhO0lLLB>aT`s=ih7p5aKbzJbBH`s6Sz3?Yg zhEMF~8jaji2iBnyybrE@WRcXl0Wz{*!C59ju#H2D7-NK@8mFEXh$)4p$Dbnh5aZFFO2z`@x^Pz zTkz?y5@q2>Vd^mJ{ZRHFn1$N$zHfN0g51>Ufm=`~ejFb0Eq%eq!P%%CKQzKven-0; zD_n+Z@D6w{a^S0BFLL39rtj%HUYLf=cwsr3iWfeKO7IYFN`UEBm783s7!o(ScGyR3%(e3AuE0a#)L;Go0;)}XQNbnD%^lf zcrV8LLojpq2$e`3VGrt{ zTzDRz6BaM=z%JyYTzDp*lNCdG5^O_5lndjcB2*t`G)U@f1S^qiHCC+FlW?Hhc9H1tCVkq#g{~=1ibK^%gArM6~2pl@xAc; z%OjNi1m+ihgA(y0usAD1mEnbZW)lzfqu})@lkzfn%wqb8kB4oj1TTDb2{}qU0r;(* z@x+h7TXTsmp6eYn77{aFc;706bUa4hY|!oQ#~i67=) zAEBI-yWl=$c-l0<$5AHjbini*STB@o#P?7>0`Ityeo|fq&sxj8P@V)oMB|kA!>h_8 zR6peLhsdjYuT{j58D6r6>s@u_eP ziedcA;ESl3@{Q2=0Aq+3CZi5~3cMC2QopN)HXo!sj{JuwJVg9>3w#`TSesc7N2sfj zV$GJrT2#s!9D#>>7-#l}6nHtRl64BNMoy0Hf&V~m{0Q7@9p|E7Cio|0r#u0ki`qCg z6_%hHd?{>2eY79FK0@7s+K9OVMs8q^$PJUwOFxNCcr_ZP+*-?L#MaRk?Z?17P&D~t zs*m7#IC~>?0=@_}t${YBJ{)UdU*f!~m2)BM5#%29KEix6Cc=cr$P>;b%t38Ovj4+% zr~^L$?|PEC!&kw>o{CUWv^fl0J|MU8ZSe9w#spsspGD>P0XX|Z#*JfzB_EM9cwr0j z;Jabu$HYcn8R1;ii4VXbWFk)CUY{_(#90D2qjbihZ6HGZf;^O~PsuS<#$0&dcgQ4l zJ}3S``a<7@EC0s);)UOT!G4PuZb1g(5l;S+vBe8xQ5@b3FGDu!3$H;Lc;P$9i5HHe za=h@+A!5T@;1~a3Kct-zc*Zw)$}`~9Zl87^!Jkno{Thd#en;QwSK9Xx zD))y7{>DL`gRw7-l1tRDgv~!P{?u=Sf7?WC)E|Orn~8z?!lzLT<(=@DF~*tt@$f<9 zk#U15KQTwt7k-Znl#jxPwh$ZjJ@BHRsY7`NT!+%AKLC#$XB?8 zQ(t%;N}+xkd>(D2eiz(~?9?BFXGU-g^^@R2QQ|IDMpk6kLn^l$XQ(%?9O_Jr+KUO7R}}?gEX8=w*f!xL$;fbgW zZ-EPt8=nEMK~?xtc;$(#sguZaxa=hQj(5Nu3&)CwmrP--@tLp*4bw+I%sPd>_u0O+|((zuHdM5LT7Y?IQyzs8G7$ah? zf@dXhEItVybPi)homjYa8tV?PtOoV%bb~TbX9T{NOupg!;H@bJRfDgDsWUkiFT4wt zFrHQL6I6!pnPpJlqZ-;Ah5MXK4$xmSY)6?KE4(O`WARyV6LR9?XB*UWr~=;w7o10% z9Gd~JLSEV|ft4tha^Z8xjqie6Pzin<#-~vqKNUWR+VMVk=p15`@(T>=>7Zexw$g|p@|4pIlE%%lH!;c4>?DhZzmZ$s($O85oJ#1Fx9(&-o83hzO7{3yI` z0exhw%HdlSzJwRv zn8kVVUGT3c6WLmy+9fGps;<+7$kPI;8z&>}d}ALY-82elBB;PlLCj zQR-B}zaT~VAbj!)`i1X+j^*?h-v)m~-P9k0XFHiwycI4&8}XTNCF&Qyg7t;GcrSce z?&AaSD^!CYht525j`sWDH>eyx3hnvi8@?SL@1j0F0T!cnd_VjJ4dJ7%G^i8M2;Kry zP&Z!KR=_w5||J7VO@zL-kWW`(H`6vr-gYO_e z@ejabuOU|{kB6_L;#0|CcwH&6;mhDZP$%_AV9RxkGxgixv1<&fhw^wBcOx-VU-$|d zq&xtxU(49y%V2jo`6=zgDK{|=_^Gf5#o>jI-@-nO?|{>8qaC~zUULUICgt#RWMynk zchWD^hEIcE-^HBbhv9xzJU75c!!uDCz8XG;GVtv%{2uylQqe?=bZ^I@mzU{sCIfDfZV>UiM%X4V=$1Mbtp zc%4Z;z*mtQAAohO#EtjC8{5b$yyFqBagUKdXVG_fN;`WaLxGoCU_l6Ng@YeBTB+|!^v-w+juj40L9~L;Ms4{cf1vDL9uw#+vFxH#tWZ9 zE_^3ExQ9H$$HHq+6}}XHf-3OiaPD82FMK+D4SDgS(DDvz?QF&bI#CK<=tGHk;UKc% zg?qot*y2qv8O7mK;3^b_FNRGh8t;dnpji9>+@qH<;k;4sG}Mnzgv-$&-U%N@?RXD- z9d+Wn;a0Q}uij&gpl*Buyd3r7v*68W7+(ROK%@8$_!%0<$G%T4AusW4gopPrZg?wP zgi7$4Fyi0X_wWXIB+A6c!THFJPlxv-C%y;%h}`&?56O9CIES2vx1s>$L-3uya$Ust z!_6p)Ix+q1nIF?Wz8w~QO77u{;nAOwC-`{y9!kS!4ALhQjTct_jhw*?dr{1Et|Rb< zFUTu=Icz}*c+;2U4vNE0!E2ER zUkYDCUBu80pZSjd;ydBu@7aIxcK8%3k#e~24?I7?N5foHg?GYUXPJWgzkNea9jCEzRJTPP9V1E>DN-iQ}ohiv#V z_zB9y55T>Dr9XHRJO?@PR`@1z<9pz4Tggwx#|S?}rIh!>Y!#`h@OD^>I;i7=i^3w6 zow@eIqrxNAMtmI1N1gZ*m=zJJdhx;ys1n}}N0E63@faf2&nOc=4mU^geW>^`c*>+m z6_1|^-N=S7f$LEP-V05;MXDOSun1M*i(#KJQg!2nukAq$QfJRd)sFh{!h544)gXR+ zuSj*!-jT|WkA-KTcDxhbfV}t$__uxNBYp_>?Mol=Li2u+Y8amc3(+XP4L&$IQjOz1 z@Ya|}6`jo7!K)66R2F;*%sV(zW#CPW@`_27k*kBU?V>c_yn;v!WXJ{I1B67Uu9h@&G_D&7Lm zM^=0qd<^x`P5|zHOr+|%`BZ9Ht(}oKh}4^LXYRp9C*Qy?8H7K9lj3Ix6saz~G*TsVy9sk!6^J_}xl;!}Ap z1fRKrxuZOGd8GOpStuWdM>~lh9}iz%K|J^X`~_Jybr#F#_=2BKDV&0@g|so zy!b5WL+$uB*nJ!4N@EX%pQ0MhI|zS5Ui>)R_jaxe_-MG>9g+N_XvP-aj@l`&gdg9< zzK0)x?RS%Vc;Qp`Fc_ zT!9ktrLYT?;f1rFBLDHJ@KscS55RR#lb3ieT>A`ph%bj5I*D1z;q%Xu+xW!iBGqLm z+D1OZ8&NFY1D|@H{G(1Me6NfA#;X??&zHE~P$vqW^)fNzlVI8_#Df=J_9|mLmo{P4 zYs{n6hn=s}KIOx2M5?{JnLoS9WEW8m7;0NIGZ!!+}1h^9Q;7j3pl)|xI*n#3G z55U{sVvogF!i8@$*LXi1L^1PNk8qD3t{wOUnEVcL;>%&=yR=1{Mp%uy@WQL!V~@p` zz$qW_yb3=RUV_T;nedQ4))zhjE=84i7d-H52G}^2TtxMhd6KCN9@BW1@HZs zYXqvo_rf1h4So#n{t5GoPl4wSu)pI|;U}mVZ~Tl~6)Q|6gU!p<$5Ip!Z`iqZ+=OZsZ6IzB?^LQ(~1X*a)4zERt_)>WDSF9a;1>E=z zbB|ZwvJOxtUU=U3JU7D+z@L#1Z~B38Lmg5FevWGJgV3^>ImIWzg{Tdm0kg(fKX~Ey z$S(dT_Sm1MbY?bIK{|!^igLjoV&*) zm4;7;);%Zj9EzCX!^nsCz!y+2ej~giYLZIFSHXn6CaF?n0N`z6?Hz#_?V7q5~!=!ve+@mY^7XC5$_e`gma> znu<^0`{A7^1uyg<8(#PmD#pj~-RhU3tcA1*Z$ugRG58?g)t-P){!97PVi(0LRw z;DukHH2e_Eies$s!Vggvz8~&&G;@kK!6_&TKNaSnjl}POH=#~^g%Azmh5aJDFzOiQ z3@=PT6?kDL8pR7s&@f);M_#2-=a8T7=aPTPExUW13VV>QYRjsg}U)c@N#6J zP8KXgsv z`+YE(?}PW_Q(!k5e%7o7IKd|X81Q0k58RKKBE%q zJK$R6#{1yWCr?r?>cqo`kOS|5kD_9HJG|@^)($=k7NAPJ8@_<5@!jwSzOUSaFNgP| zQG5+N@l^H!yam>w#7j6Yd<$9d!*JSZ#EG}T95haS2mJJOzB8NhLD-Z?-|>D}eFlBO z3(NVA@OHc~;wJ#(Uu_C@PaR0wd2RKk-Jm48`IdFzg)i z1aE+`)5tk|7ChEUZsSYg;_2i+-VX0UDfns_H-oXl3(rI4cqe=hmEs5BmC1}V-VKc@ zkfavQ~5M*hI>kqIBantVnIZ->t!BfbkBaTQ~W&w{H^I=&d*gfj8f za0sR0g?}nxufQk3Yp>=SfiHz8T*JBX7Wfew#t*>R*G^Jh_%!%98o&=hM=5I)FT5SO z@s+R@mEhap2gr`^gPV~PKL$Imqc4|pJ%WqZ&=KdEr&%%muy#zIxLn6~G7J>9^1qd?Ne{ipBTB;tJ+H zo4&waq@*0i+{)U)3;R(NUijl}#7})gC3%YyD0joncQBsB6LaSzbrR~tTj059BR&Im zAU|ICF{;21z{z*fPkanK3)SFLVIK10U9c8a;e9Ztio7KT2i$TmeaDZ(8>%O%RJ;dv zp%T0>>OR`X3o}s#-hBTg^~S^OBlvE(n}>7ZjW8bhrT#khJ=BhmU(d0q6Yqs**3vJ$ z4Q3&=nDfG8>lhz=JbW9);(Oq!^;}!=iSRp=gdc^6G>|9wSa@M0^NuftBd7vzYND@b zBVK4jKD^L{I`P6ue)d?r5ym4AUbqoeNqIBRD^NFn9EP`&^Y}#g;3LfU62=z3jgs&^ z@cnk;!S})N$B7?rfF~mpekwFR!Ft3Ck49Pec<4nAyznLD!Ec1`p=x~BGdv&Yq%U~+ zUh7LyKRydSg?jM;_yZcmo1W!*heq&j82KD~Al?iwdXe$B<6oMjzC=d6+Q`0yO!!i` z29@#|fR(TX+4zh^;VY;bFYHIde3q2(dlbj#M+pyonQ@CSYD~o&4df@xLTrrN` zQR74S#N&-h_!Nr53*SZ2Q;Zt_fp_DDd%Vir;e~N%(86^JPD80vjT$rXHoPz&mEnaq zp~2IP8XNIZryG@UBTB>zKSmwY5pG5nywDUNo>Q4qcpQqw3zJbV#|pDhR3c*wS0mdQ zj4iwqwc~~VhU{k=HJV=|&+)=S)P)zmf`;+JkC68)qsGm6M-uTs)9YL_&L$psJj$GI z)R>Gf#|tk-rWrLICO1ba_)SF_|_*c9SFZ>ae;D!6V!Tj<*v+!8d z!TX}Z>B!7`iNcGK11~H@8}Y)MQ8e!j3OArC-pdkpA}3z>9!kLr|AEqZ-$@wRP0V=V zVaUw;6~fa|4ezlCi;*2Kyb~qydREwkN_lNA+=vo+EhhXBxp*BR96>dB;T~_2Gdv#_ z9))ap;khUlFU&$i)EBNsUY^Ga??l6RVUv_+a9+3(b>oE}p=jy|H=#_t(DWAhxrjN1 z$Dv`oFd1d?+)ua&`SHRl(I8%U3u@;%m9Q2iaUB;vhswF$2|qwxc;N`L;e`ji&A!BS zN;ngZa*YsPi7MHz|o{7AlBJaTrtmOCA$a)`tuYlxlk5Z8Q ztx?+j{OqSdGvtwQ_L)_>nyxZ+=GDzj3hylcs4X{u5%;TERyxaasyIe)uC zc`5%pVKrO<`KaAesUOfD!fdaMbGfu1I&SL|&&zRJpQx*Qek*sl8}vJt*U7^ZkNbJvC31!> z^QO;O#LtW8$_{ z{Zw*)dz@RW=W*E2bEiz7K4sBda#MazpT1~9`6BI6(|J1_x5+e_&^MVU*VURPb2?6F ziq0vzvCeb=?QJq^UVHR_9gf>#-Itz8>szenT4(X!xs*(wk)g#Qj|Tlu+@BcdCOX4E z52SU@tp6wL1>efR%A@!L=;CkBH+_n~mk~UliL)-+!adH1KX1$2-}Em!$8I{B zUP}KCsH@ZZXXmGyd^hFtDCfT0q;_>aZ4C9ZrfOr$nWktX931EAQ+7U|SvsK2+2lIr zjQP~&vd$dM8H45WGkT}~{Vb^v%#lTsBS_1gpGloiIr%bU;ZEZz$yoRNuc4g8JRkB; zXc-&KTl&3Y|5ym)tccKh_(tx9Bv$i4W_aTK$x;YrdXVxnByEl8_a%>aJT92|-+U9K z-p2|3)cP#H z#7J@bRipM(oR+P@9NM0n6V~6h^XaVy&lSv8{pa@F+~K(33X~#z#|2j)IVgLJe!kGElk>?Q@O$UeGXLVa zQ!deZx#Rivow6-AcRF8i?Md=8SHJ0Ba5o4}9tUo_j%~5d9{-6|9-OD&jwkCrl$*Oe zpN!)HGLBmAZ=b)(trEko&c_}STI=(soA%ZAvMtsf^X0e4ZPHeJaIH@kua1^}Zd>=k z{QT{4!4)4|2a8z!EdL$WeQ-TU*RK3IqXrbWWLH#ZdIgQ z+~av2PZj5=`O1v4dG=VSxg1rXa@A6;{j*xgqOvHoHP-?aE4*J!AXIl(}6= zYdJg;T9r#^RcbAx)U3JH+GE?>k&*embDFh0bZhx3Z4~nAR(6c-yTc6Hlzn37UE`nI z4JPhf+h-;;L#5>6gK87=`Zam9g0st7a>C2muL_vkWm-NLQgRCO&v%RLR5~Tpl+G{} zTqB#vyX!~V(7(x)CUu14V}~0l+l#c*MP! z^@;V?`qX+`eMWs&y`$b$UtC{WUtV8XUtRC1_tm%6chq;)2kLw3`|1bkhw4Y_$Lf{O z=rj3Zd<1Oe`}@L_s`OTSJzk%;&D-Ja@&>#;-ahYucgQ>99rLPMW38z+rq)~=Uu&sN zthLsr*4k<_YX6zOhVp2HtTB->7P7@grntzGN;0I4?C2pgM#zfb+*q4z%!-RSsboId zn!1{Lm<_(m&TsOY{T9E~Z}VsQUH(#krQgHcbTKaj%!y(?%*=(g+18xZ>}oD;u59)+ zw>5V)_cRYQk2ITF%q^A{Ym2QVtHsq)+EUr#X=!WeYUybiXc=iyt)^CUtEJW2YHQ7E zb+z(={i=uI36S?g(@-IhCA}j4CIad`%r% zE)9`KMt_Vy-k<1C^=J4U{$hW*znaYIAglVws4>5hT#6@;QkyfH9nHnf<;~U2zUGeR zKyzR7Q1e)`u_dM@z9q3GwI!p)(Nf$}-csG-Yw2hSwDh$MwT!hGTVq<|TN7JTTQgc6 zt;Mb7t<|l*){fRdYhUY7>sYJG;yc-xXQL;^6Yoj%qb;aw-*Hy3ct?O79Sl731Xx-R4@Uf?^s{F zzI=W4df)nv^?~($eDD3(dgF$e4e=WiH>7UJ*x=YuyrFzU^#Wkr@|Jol$^AC+zK5J2A>U2px`jNqk>f7bawY3n_T?V-u$6MM3Soyo?o*AqArMvO618r z0=KqF4#L=^f9tT>y(BVvj_g`O0#r0=v$)}JcEon_TZD~(z z;KcVdhm=xFE1ftQWD+kyL>5^k%O<-NT57GWcItG{NxcRYU3Ar~o9jmwj+%zne%kIel0B{T2arFyBEv(vq()$5U!-=+L2qbc=#(33zx nOeRywL^6#`B1b_68ZuE4Pe%eJS!kdY@I?E(J=y>9$KJpvp8vAx diff --git a/distutils/command/wininst-14.0.exe b/distutils/command/wininst-14.0.exe deleted file mode 100644 index 46f5f356676c800f99742deb6bf4c0a96aa166c0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 458240 zcmd?Se|%KcwKsewGf9RJI0GgcF=8x9MF%Z9pox>%31Jcxf)he!NF{+*h~pHMhB;s> z!NikkbJ$Kx@74QgFTI7L_i~@ombQWc5hn@FBuF6;E(D<(E9x00)qoU&raA9-?K6{v zAGUq_zMtp);|s!mSYNaQ$9)B`x_QT~_ zPdv43`Ca42rDVrIC+<5pzVOwJRq@|n@}8(F#rt3Ko~nxQ=aW^7@O&@vk*Z=mzxTz@ z7BAq>mlrSQ&zBaLGZ>mHlNkbvXrLo*_xN{tnloc$!`z!7$b^UJy1W3c>~u0e|;D3&(=rc7zf2 z$e$nxNqEp-{3$F=)`^X<1i{h36H;2rz@Nhp3Ka5xktA#)k0n`HG?6@4{7p|5rrjk7 zOJ7bB7G4oG`n4JHj?n?YvkUlr+239MB)pm`G^RC8mV}08>R|RjdY>A9;9c?16cp#6JfKN;_ymuZzHPfx2!y2T0w?{V$2;d6#ey$U>z4PWMw2~0gVtSH$((~* zEmEfUbr4-kUjzV1JMOZy?MaK}c7ybTM{J=bj%xN{6(J?}S<2l^%3v)X_POF5Kq)2% zo<6}_4^S?j03SRdVBr8%)+2wRiGk;MF>??g4;YcV!psi2)f+TT3xt?+pi+80avn+K z0hfP_OUEaAPvC;s_%1>{z69+U>KpRkV+*->Ilcs!>#Aa;TPVji%Hds#I!AuOGbdB# z$cylUVyM57-ht%MCg^e1c1Tv<}VgObwQH_;MYJ)ZyI-7>e_N?E-UJihXClq0}8! zoIR?y1a#b&25OmI-d6~viO8c)dr%PU?f!`_?;a}Kq~3}SOT}NH*mH|Poe;%#>9NLm zEZT7%>am2ia~1%4f)oMS+#c1iI8m@&0F2`331W@Y0}Xw%AvQLfLdAk@pVFA^z*{-H z`DmnAK%EK};;GEb&QR87X9piXV|*Jg`PRx*R%o&I$kFSKZ4G&@xrs^sWLqfmLkJ#u zS@tx+cBIB$mLdkfhk)RWWahrZ__^^5#h*Q+;Q`lNLjs^>N(URh?wV^fN$uiW^Rwj8 zUCQk2G&Vas+lVLpS>jv8lZ_p4GbzN_BEI$TBuXQ`Rg!6JHFgAxCL8A=d5f(R!MW=j zdVy=x>{Lf#V@Nt1oR{qgmKoL?=&=m zRgkboNv1^LWf!PN35;QzhAh*8RYqQ}KaW}1{$Qa2L6$h&dD)gS&7YmUKz(?Pks2ou z5*rVK2`EoV&@>?D3R0M%zCe4Di;|*{IWG0Fk9b=Wn(8mU(6r0(MeeEA{T$VY#Mg^c zgA21A-v9WjLvZ13ysguvtuds#Lxn&3koy^+mv@+JAF8(XUS_r4A zAx#4s)0jUyzr=$Em=4OO5JI`WBsDxi5a=C=?E_-4l`4Q12O4P+JLU9eXRy?v(FA1+ zhfH7w3p=+yVHVMtC}ur2OjNeNC6e&jt872;$}%p@w#e@rKYN8J!K+$m5Z_ue$?DIx zTtQB%`g)T&f3bgZ{@47IuH8@sW_<$Em>_}^X3cjm(5X7%Lm&>iL^cCyTgbtl&z=W{ zMt8LxP%mIiqh8y*VX#dQqGZ6Z&QSQ)UX;aIPRQ){?f75R&$IkQ0O>1&JgZR>8%f6_lT zzgD_`R(sO8;^;j!;iPdXv*h*!`O}1Gj?Wx)4!Rbo&G!)vQu9-!L`wEBs~BubDvp}q z%z8L*MB20jV<4AJn`gfVIm6CEkR`j+dkH7EddH7|d`=g7vrxH(5>&VGbQri2oAVz* ztGPWsGdrt3k!M8a0rk=kjRMBUadVJXNmd>c;p)}jI|!)vN=(uTeeFJP-w4X-n-IjFH8G9`FbsIX1*~+CwJoW( z{oxVy-KBcJASNh>(7(iv2Ol!TdbKTNJ?wG?zhNl%xPZ48Y6^%LOcl!1Pwq34Rui}& znQWoxEt)jwF82_9^kg9C7`OVZucJ+)@3HpLLuMN2KeHa7P?%CN=zShl^=QYu8|le< z;M-0<8ncgi_vqg9@OJ6met3IzZ=ZHd=Cl`B2S;Nblp-gq7yv6j!0OHiH2-}}>cHLJ=N$BgJ}MT(7ek6cJ5s52gGbpx^#BAW>(`F6_9%7V5uD42v%Py3 zg9W5iExjJHgA03q2C9vA`95@W?*K*-E&UW27jZHD;BRL%sHI<)v7wKq1^U2Dd@C2s z1LK0Tl^Bly07mqlMhx1Gh`ogXc!GiRV4Sz8 zDlU(ER8_>rmysb>jFf&2n9*l!7qlIeU7PWj*E)*@b=g|#dFZ`27Ry5^wSUrMy_PCM zr}0B*GG7Nf%=pW8Bn}1!kQ1E>>Io1b-6a+Tg@uS#SsBfY0T{fZKAxqFJcz1;2RZXaAw3LOK2dXmeV%@H1pavn3S&f80LPD>JljS^8yL^C1vmg<24p_=t- z>3%)QsE%9-o?Ajpx9gE%rjz@i5)8V6&H?<+ySyt1l@X~xdVDsdf+k^dcOKP2q3D5) z^zg2vv@vcc38BrLv7bV=6aVYO)r!kFnk1z@SiFSFcuB|go&pe8IPd}jh=kg)V1OcB z>R^;c2g@7vr!mCE#e*2wNrIq{gwxDs%v(JBBLVs>fdz2~71!8UV^Y!8a-2um-5B)$B7U*6WQ$Tt(ywtnUsdVQ=eY zaa9px`rA+veZpt2fL>{Q3R61Bhba3uMo~X=)ac>OT%r#hGw7)}RSXUDbT5q^%gDhOO9RSFx|ar&t-2QjisC%YoGK~JKLz0eC&fmZiOO{Yg4oEoZxFr)?mI8| zpONeO1;50Hm#4gY07mO#1C7=eLkpWX}FAG~O_0-xiYX%z1hXu?t9Ugo-;dM#+7PMkFET zopCa;6COUomSg%~wN-J3P}@^L2y^0n@Y47SZwI_I>cYDRUZNMg&F~Tt;e8EWA|1Tj z;3d+*yB+K!?W0-z<6srJt^=f5!hL(-tKhz7_~vroYw#6v-*))!gOAAkH6F4JAvqKR z9qM{K>%WeGaqw{Sfk*xCdZ-&cm6fngNVj;reJJ0TpuP@sL#0byaGvYdKI5XuX*7&C zZ7H$$6-p3LE#0a%Q$HfRoKM)hv=`D^ed7smS6zq(?RIMz1o9<(L#f!uM92QQCGZan0WHU8a4x8jN1|*SAx?$<4=m1u*^_aDL)!KWZ@5t?@h9NItuT_K217T+e z5D{GEYT}*xa4Frvq~2h0qWa^95(PHs_LQsLN^JVLZoH=^Y|2d$w2D2I)GQUv<*H=^ z>hA2QjHdV!A~!<(qTz$fH&WZvn5f?58Pz}1V0*%F>Gs$zD5V3gV#VlEq(QaBPlfAN zpL$;c*p8cHT#BcMPPablwD zpNv@zR4tlm)`6&%m5_qaRJ8OB$P`WYPDLZ2iPHYao9Nf)^d#pAz9#h(FmC#4Lh8L_ zFtxX@8ao`q`h&#=m#Tndijh=|soYG1J`VkBF|zm|$WmQy&4wXkDj_}=Jl}3IlQoU3 zK$?VlJ&lAgoQ>`zmn)xrmn)xy{;>851moH#36nXf4vGgMu zkFI7~p?}`SbxW-a-I^Uz=YU>>%dwaRFhT1^N54wLZ0LIdOfu4eH}63ohdORMsRaCh zP+qPEYN-XwsXw3r%YBAe0|r(g{<1dgiM3&X6pDB=mZKR1D|tXUZ^_^#aP~kzlfI^< zlc5i?eJ!HB3sF%>U>bJNCxSr=RLjtOP>CZafF*m8Fhbpa0eSDy@zYyf+d(Dr7T!Tc zQ91IPlp38mfUG_mn5f0sfDE!EzC{fmEV&PiPWavOs{}>ZwE>8*W7fm#4NayL zGUQy~JOBtW2H^E`d4k2$$k4~RfCsUI?X#y(6V#1oHO;NQdk@V)Qg(#^PE`y+6w%~%9x24q=yoS-e=tZ{JjNEcOHJtKVogusvv|9B z%gpQnYv>vUb^j1oP%=hiY`Pp{7#4OD(%iJ5!&mS76fNj9qk(1BAU2*u+sxsM8Kpu+YhpO~Cs6t4i6H92Pju;j7lPD+ zZ=vbBFe^FYzkhEQz}Py)ou-ugP2;6I3gt^C=`1TvRd=EK(MgxV>gBM^IxJ3D+aDa& z>RV;QtWbvevF#-9fP&8$>wxuMAxh1`ZRh>+kjCes4|+&H#7uyAy}Y~}{ngE7BUz)_ z&BT6ez^!$Y&A@*X*`H02TV#LsU;^6BoP%4)20J`&mJ6Do1Hd$U!vA%-;59@E(!Cfs zwDgrAJ4>fnh;D&BloG5R?>6o)^O@X$n2X{)EPk4?dyP(l_c6u==xZP*S@O#{5dv@G(g@GLCBg3dU!> z+H}~!-_#5OJ4AwQw0XGLU~F-zzoz-DXA9L@xtVILo6<4bM$>d=-MtJYgI7NQ%cAG$ z<8QV4lZS_yMDEMP6f~L^be@I~eF-GjeM{lP#Lol9por~w#q{vRlHrAfc_T6g%NZin zH_7brr^nVjq*O4|24srRdUMd(Ow6^2!(>?6c$$1El`mhs^qSTkA^-_p0#g~GV zn)F(%1Qa+2$z!N427+jyxPX0pJ||eKMS$666o>(`UQhxm510pVN}vz^AcyEu z-t5OqIB*{B1`th4e;#>i&?+~0yssd@yPZNuiCbP#a3wN$yAX#e#zsj@T%j#dE4Vx! z-vsTL%V&W@&CQd$yi@`x^8%8vexH$y1n}a~Od&AGwR1V{Djmg1X(KEFRUqi>V<%{! zTShr}VX&qg^BKSZP;CcT7v>7q4y$v3Nqxpvp8%`%`IoH+t*0YiSI~6d%j6>% zO?s*NaCdUvaG}j6YKy6k(QN^CoKJRPvGIAdLPK4u1!}n%_&Hvn1bh=+VKWIBbj~y~ zn%0uiPL@+tcO%si&)2B94)v>O8Shf!pTHmyv3G=sY$h2h^>Fk~msdrTlU1-DhF9-j zV=r0s!UtgOSaoy?bEY*sZO#Q$=?2niy({%%sW>!_pGu`6()NcX>V>(ZSnf*m8j&|( z0vsiXO&ze)+=yj#n}+pV@4Rn8CMr8f*s%AsbQ9PJH6^>wcTrKaFmjraC(K6V@z2*K1(xk>CSYUP;?k_V)+3EL^_T6UgAw5xx;|*H$Y2FG(QgpQc;B03lw9#sE015&x`oq0%F|JxcCSZzZ3mH0 zODAQ31&L;Em-i>b5y}tElF1JZ zGjExjEHge6kjcgD+RF_TyoL-mU}+4QWzwl7FzjBa7GDUF@IA1ZY%uP? zOPZi=LvfG=cPiXQQ{4vrYY!;u-3O;kSHILIq`BC0Q|Mxkv3(1H;cE_Mo5{Q)4?` z>V7NN#^xgktH^tY@T{@7i-7`oT2u=~Hj-5+G7SOoZh4){Gtw*3`Ked3ncLu&c?m+f zc5FA*8a=cZ7j3eIJMPRJ^*C#ZYCeV`V1>M}I7s&|& znZjYL9|gnVQkPA}BstIz=rmDY0vjn$rHo3&c2KmJc+ksje|tbDw|eRW(wr?|Rp}Pp zIv6tz9!0d5Y7`&gEN=DRa`-$-uQJ7{7EJqD|N0*h;!V#NPY`2FYyfP0Rif<9!9vhczKakkZD-zKth-LotqQTYjywfDI@+$_+8#!z=`2NV}(oo z(L1EH^P*s!2UDoKdQl!tki3@Vq)YL@R}kp-W{w2>D*|H1=>VDhV@$Y|xOF;`xzs-* zg}e_HP=S~20z7?$pxc{-aoX*sAqx4x`P2eB%|YjhZf`!a$0Lc%@f0))$ZrxfgLF$Q z0Vle5;Rs$|yvMr-SNn$X10iGht>CE50&2k^Rhp_C(b!rR3ICUk(8``y?>Hj`#r+Dw|_ zslU8r0JAvU>Yu?i%|vG|d|YL&vRLk;Ja-L&i`6Z2blvs}HK6P03X&cB^4eVoUcRb& z92mywAhlPSPVrGyQ{JU{75I2$e1Blp@c7=)mj*r?QHsF@L^yT-Bw|h7zc*{d|6BO6 z5yaTjQA3Fo$6!3j0V&m_Dxi{^xk!Y*}V(#;L z4<{hh#FSlA5v$(u$5EL!Ad_6+LQ#VMe($_dxxHR`8MQ!hOw}+HlQq_Fu^j>AZg3># zO=L=sA>p4b7i|5AVA;KE|2C}O5iHvEFafX*&QiY*U{Li57%fMm=c$yw^lfA}A^%nu{B?k84WtcQS!b^R@rVoLc*!Ud9yh_4Q@V+vPWH``| zkeBeTD|oif^}KX)kqyxR`%Hsia@(1R!G zDo7dEVZ6Nnt0kFGr?8NYoB+t&!l7BId3Mhqcl;CqAIS@@ZDI=(KTI~^OC_TXOu#81D_II zt=e*f3^#b6z$SCdM|68kLti5}cZ4Tk3nG&(o?g=_yQ=~37b1IU!!p;LHr#X-y z^lhZ0#n(Q>BbaAE@Cu&&Ku^Q$aaEG1s zGpHaS@;vM*igObotyDFIosC(DP^7Jjv|W)lhMg~gp5T&cKy!Gm`CGd_6kNUm2kWqH z-lx&P5Mz9&jpIFT4N4$H^-3Tz4`K%^BEwhX^NujU`@c0Z@>YM$OBo=P6J=B1^an7=*Y*6S$ZHQ zQwJ!RGH~sc7qjT-f27#tn<=%H!x^dQQp{PkBmD>E;Q^B&3#%B=otw+%Uvpp4L;w z3ZJ&%B?^2#Vo^TQpCL-ZC^@L8ffqVS<(F64b1XJ+ez1MqL z7PFoudLxbIi19uR=dv9uYBdJIE!6qd(-2U)_NW`hH&fekT}fYNjq4%F=mc}9&pe-< z1~%ZE<+1zrX~=WMy0B()p0_r8SsxYW%@xlT+0n|Q!q_W+lE?<;u0kp>=1P~O^3u^zW-8V> zHV-s>xf{zvI=)ni7eG2t^C>Knbs4i%og-2{V)=a*t zc`TMCeJPU1vkyzK{(Fbp0n1s~8&aGR{0kw-}zF0I#rCk`bAJARmtMaJ3 zD3~8$!3dAg_9USgokZP?a6TFu+sPh@Es``Hqc!~4zNVP5^ad~(&1zA<(QA2kySYIw z6?me`lVFALrz2$#NPciQbKiGvA8$Mr54|aMCkQrp}f&5b#)t0 zddD@A-ZU!d^E@dZdPc1v_#P=!`-}9dijDiIaUH%YmwF0e@y7inLd(&*Z@4gm6|**W z@Ev{PRwir)a6D424}p^~sK?EzSu{m^ z?6&xLM4M;JsVmZAPYTtVoz<6tSdX%UR*bN4L`y(#c~L4^p*o-o;$8VEO^yc1fZB}<+DGpn`u-6NRja=SF1V=t58_rbqa0=Pcl^wM7L|ddM?B0l2 zTc`TJ--Gx*fJ9g|;7cI10m7wj`iLgv*i|Doz6KnFKc%?f4(`r3tWTnW;*#}j)G%M% zJKF|D)Ov%?8lN@{5d=}j1|mp5%Fy08lOBNj-V$J;#2lJ@;W8+rT13GEOOvx(RI>Hf29h=)iF$7y|&) zt^V>|thW}B0s+D3%l2*q9lRkp&?}D11NzaSHz3??o#8|>gOa+lmj+oSNLb}@M8zLo zgcxOfDUYQ#vG#wEFU7|H9LcwX;ISFobuwX)payn%P2Z+*;P62{J0Mf<pJujJ|PT zH`(^?H>K!dQaX5GqV$oDDnQpqoN(1=vD!=vGr4&2SzO)>z>G8S@yH2;UQwu@7rJ<) z&`H>TJ{g8~Vu2u7K;ujp`IxwYH|sFIjXdkEBU!IU)@BUP5Tb64^KxYS)!AO++0Nh$ zh;H%qcU}Q^J5SI#oIuJ1+@r8pMVIS~-z1>Ff#ZPwilXR(D>ConnV%oYJO-JoF!7E~ zy&`1~Px-Bpl*vf>Fwn<_T~NJHZ+5%h?9Y$Ez|43zMUT^^AgVbA3;Iawh~7}7qsH83 zEL4=9LSi0+ZCJNp1&R7G=tiyS?MJy7FLfO^jrN+4j-96l(a1>Q?GiMPlAN?W;NN{X9r5W~xM2x@f; zWaE~F5mKh4NqTK|uSMagPu)o>&Jo#w)`Y=9Eqa&FF+qT2b=`T;_ItJq#d&(Gn z(U;_Xfrz0_`4jKuyEozuMJDdXYD=G zt>VrEV}J8m4O{#AYZI*f#vz<8d+q`#VjYNlibvpx*!T&a@)wp>chPZHo@17Ah<3n! zwu|RHi=1a+#t2_L+wv#N*ki0ULWYUv?dgJ`r|1!%>jr^Xo9)Oh3cYw1stgWv@FdnA zrON!qBgi4{Oc8g^G8oSqhs2!^iN=1sB^ZbJ(-?c2V$a0bGb#2=jy=t>=a|?tCH5Q} zd#1*o<6_V8s7kBFh?-NesFfIyATV+0g|(wjRc2m;i)UGwowFUecvgO5z`#Bi0}qf? ztr(<@Fkx!@FD&A7v>TbVwWuZq`f~>;B}W07S4pqL|AETz^2Fgv$q`hj7P#_iT-AW6 zVf#qk*yk-DTgDEt@W^h=G7QY@9q>(_GxgcjwTXF7^QuI-LyP*{F3vY}jF+}e;rLwQ zQq@-9Y3o_<(Kjx93RYjLO(c& zlWf@cUe#6=!N@D<^I>cd!>%26;$k-}KsR4#HTpBqP`0*~s9D?@vSkJ+^DOxjgYzuh0u%`3gXUI@=>lODDsN({*l5w=6|lV)PJrt%ik_d z{v_+ms+ac96__E1Gg+%r7jKJygrw|vjHvPxgXkF3Fm%a7rrJ4dsO8fHwC*P$HJbPe zYNzUrlsc+u;g2;fd}t(gLjKesy}1NT^=15|UhJ-BZShGF)+YsY1$JV}yXY>x7Gn<{ zAD}MgDeWzh4DhJ2#|SR<0OBpgrjx*?>}^EZI*`r=#;J=j>ifa(4RTgJG{UOpFX5Vi z&Y}&YSX3EiQ2?}En`jy%p2y;V=ZSA=t#pSCfhAnVFB(Wc;Lg~ zR)s&-s_+4SaOW9tCqRIkz(~ngF;WRhypJw+@8(R|hWPg_Q(BEJl+fJLYo4-yNlP@t z*ivn5d!(ujn{$r%$6shQ_!Ddfxy5w&jC774QHDOn-U&MgvuGqd%f7G_jbBzo*gef{|4aEQ2RSPF$>`)(JZ<7 z&$dvZLwTOR8ao{@n65n=e39&>!65B{QfXHtrUmfEHpzQoSvap^+4SB5Zu(DiZ zJI)Rn-(`hU6=xSKoCd2tE1a%4ds*QOTyVh(=PAxptPm!p9;mNMRftvQvj-RAd{*~z zm6i5ur7BKYkrq^#$>uD=k*4n6IS7%z5LcmNlo;6C-Mw-+!PtfJgL5&KC3p84yW}89Dp*#IcRKUrKeb019eAa?dX1D(0W4LxgWde z_7@{r)AbS95(N0<_eG_mPbod>x7tsvN-JS+qdZ=i0Vi(T%&hb%@YkUN3ec(Ovz{O< zzd8y_qy>)goG?fjI|$R#!Ke{0sN9qmR@%c%4Nyy?Q#s1XD4*34ak_h1=>TEf60x8| zr2c@l;M_6}?u^2nIR{V;RM_7Nj4^xw5ReAS(||JYjF$~efby4D#d-pb(L7uqcF^D) zu$G1ZpTM<<-%lqQ-(kf|>7Lcsv$}p(k_mL==Fm;(8)BfE;_D7DXYa$9t=!CZtc03b(T8hojzH;09>02`eeF3gzzzpyp%Mm++r8T6WhSNI414NK<2%^ZP#lF3HvKufRDN*qId z1AH}SN=LB7pd{d!lrk$7r^Bpa_Bjr!$)QB+F{Q*5e9nNzQc5gn!X?$pqI_jeE>{n* z+`WY1IPekwA~zPrYBQDZq$%GqE8odrv&@+1B|%&%h12Er`_SfE@VV)U0CSI0Yf@@c zmD((&Hdm?5S7xnHX1SDE^I%z5+AiWE2hSBq_@$u)`MfhHCaqCJca3N4{&8mk;6sh3EvR#RDcZDYTF2bw>Fk5u;0r zJY-&bw>%_1J57FehTu=jTa)b2lOZ>Y716@9ug(I{H95l|W zo+^z;y6%sabyJn<=}Pr9wr(m$So?dm86ImGm(w=>jXB8hzNhZo2)8 z)d?88+dM8-Ji}N#-KF~UMArAfF)-Fz+j!Q6dMF@A_ZikrXY;4#&7UrrbWqbET@o=o z6;I;}OtCRf?#l;HomC_ioFq0rMAE7{Q>i*iqkBHyY@K=YQ^m%+;msQ_jmevD5(C+I zk?mupq_8oSYF3=e%5&K>`81prL)7-MB(f(IW8RIia38~}amAog*N-7iDeZ}1%bTEO z2ZW~_g8U4wS{kc^iL@fzXIW$)SalbpbAn>yN$m5T!wvzA9LDWIl-UT@cAtaXMFDV-LgoTn-Q!y?ZesACgnP4b-S8h~L#@D`-N0EhwceY5QdMiyg>?-t0+z!Atd{jM+8 znl0v={n_E-jAal+VIv72TPHKh1Czz?6Elx=qLwoz`BM|yPdH-)#SVZLv;0i9Al;?R z&9-SRXUruRVX#r2$&mk)fPsAf7^NspSvMURUx({OFj5fxqFd~9vsZVazEA*)2KjG#fO~c>TNiJR|!Hx&fyFn2#v$t(OY9U zjTy#?57F_qMDz0I%&I+()<9Dj6_+Vq#vLewm)wMwY~`h8U`TJO_?QYbH90?0e+P4A zyiBq!Tvq0bVw?`wD3g<{_J8yWPJqg<6q<1bQ&AT5Y-LVr9EWSv^Xu_4JHtk-bO`Kl zoTVreLc{=BrQ`LPV+k8chQuTJo^v+W0m3MyUG|REi5ShDU4c%q@jc2`o<+94|0M4$ z^r;T&4=?ezG;5KAEy#e{Xl${!GUtKSPg=X8D`FJ>ukqKcEs-~Dj@K3(U+&P!oc1@W zxEMR2IaxbVwNhfi$bBTF4;!5w3$U{2Qr{mB8&hc)1Xdi(^&m1Jrzv$6=(L>j)c@VwAz27u}d2tIY`{V08mf;&+ZJ!z zz+~p45%~sI?y$SER*B4!2}GJwEgA|aJ-{lu5Nom@to?lqGt$i@)t{;3H!+6acmIs~ zRCHnE;sFv4K(Tbd+5(9K>IWV{>9$U&7%`rllgW`yjv)(P)-O1SAv^7FA)7sn>{LQF z`=3GfK;Pd$_E(U_qfpI;pb8s7PtI}FS6$KDVu;E}s>{`D^a!-F&L&61*NZajUH(*v zf$b0jksGn(0P`dIvo4SHPIEmgva}40;X2SXP%7-0PVyCEWp0Kte|p$R>RaSPGN0>G zJ@MM5>l`X;Bm?%p#pk`PQx1_Sri1tXDPhRsmNS_28e3zMTq&N;`&`&a%anO@F;G2* z(r5-%lz~XS?7X=q$f-kN<0K@K-%sv7W2H-!h?`4+m8=<8S!1aAkbCribH!+5tUV3^ z-+K5ARkp~)T4iXME&hA4Ta};r^{2rn-3fZ_8aBye7pr0mHo)1g-?x2dL(s z3=Ff{PoTqHgMM*VLwm9!FCC6ZZaHH_sQ9|cxe5ASjWVxjrB>uc%YaRg#$L3z_E0n( zKCE$Gndlksb+sP`Wm5Ztb5|%u^Atm{d|^=TN3RPfkhE|uDDzzxEMJMWpl_lrXv2%M z2SO|zIA^{DgAnyq(jOvkfJNj-y-+76QY#fLGZrn)EAoo+2dH*%)j~SK$91E((6w=w z1dIUZ*&)~>p~gA~XDPT51*+0`EF=`oo7MfNho+#VOdD7ShBI+zC&mo>p*3%^E}sRH z`N|sm;Z?Wl(`MAndI>XjRJTT1$Scq0dLwp}Dm@;{eirxEMZbb0k~B!=qx=Vz(({;% zi#sQZ+IL7z+Ic{SdIppQTpGcKP$}{C*+oyXbMi+<;{A3bq&jZH zi~OAGTQTUHP+5~#dJvhV;^23TN@<6EXqct;gP331KT&KCEA8k$L$H$p7;WnNYjk$- zJIR+rY5a$wJS(@4Z8?+dyCg5~5*E?q8bqN+If!P_Yh~|PW5)o3nvS9yS?~Ug*3#S0 z-XT8sI4nWvB+*;opwV;D=#kF9@UYlxT!EDBH~X)z$zvb_1A!bk^u%{m$NMadWitZKt>m6VY*EflbrL|pisNKp{_3hoKw4)mG&z0aXDk3 zG9ULa4k+WFXFj=TUrTZOr_!UG&^ z?m{w%NgY#Knd$p+cDEl|{UE~~g?f((qJIuEkGL}l6TrC(jcB=f#b`>~ky(l~fF1)* zjdvQMUb+=Nrt&jFnkcfQT4jvCL}FRq3Wk9Xt?P!)ls-jUIJP(DWv-MJX|L>vKD zwhQ9Up<>KOjEA~EVu$8Wkp{azG4=y)}O-8-GRQ6I|g(`8-VI87&1Ib&v&lrjvhTl>90yS3TP~;By!^Z0Y zpUqu_+4 zGwP{iCW};I0?azX#v4FPG``+;%G}x1aC7HiayMsKJ6sOIb9uud1)p*gcVg0bsrG+x z%jG2pnvE=@yya%=*dRJ=%s?4*yO-FAEmx2Ur>rmu%$Je}4$axCmG(73 z9U}=Axd#Jq9t`M>f1p}uS;NMN`N%QMSkRk!KaEddQE#q`LEYljH58{AiZWsFz#xBr z-dv0SCg$w5I!{R^))JkfNvHJL(HA%g7aflp;MLlN?Pu880m;0gn4psS7oV1fjp$C$lFj}n38TlUa(D2~>X-gr z#d>U=_VP^sbg)wg!Cnv8sF4EG53D=I7NSQIVCljgk{#D>X_)$9*2pe}jhQGo?Cb#r zg=muRuZu8@AThD-72`v(3HB^yJOJP(>7rZ^Gw+$Hl4vis;7pkP=DMk}eNx>ND+)Zw z+^H`0+ZaFSaLKr^aWa7W1|teeMK)Ht&pO|PgogQMy)p8hG5haE(mRnc?-`5#I%^kN zBMilSZU)Y6zK50=8*7F|*cC`#DpZokf-DBH)=B}~LVMJ77QF8AWz zYfMUt@?l$JG(KYMBarZY5&0`7m%#^xNsArA<5dn}YPCbyh+jW`FLwy1A9Dz2@cZa-hfw5m2tWI_LpYD$ z4_7#Z-{2Q~!XYF*=@3NxeE88_!592@vtqI<-vQ$zy80CPX;^|&w3Rc+&UzTvkgd)f zvAEhoSok7?G(VZ|*6(e4NH?w)vEpffGYyvKsa6;wWA;dodSb|g4W82xjbNBDjbWGi zdxYU!BCcOXnTd!}40Ht_w~BF7F3j0&&$sxHcaKJ==&CU9T-|6aOW$vL%^h zV*NS1^0OvE8Ye%KZ(fxQrui{~V8hHa;#%JFd|Z9MUoq0W@X=f>>XhfppPOql+1%Xq zMY%GpZnI|&7L%!JZZ0BLok4=&T(5kq5UdzqJ(!8dcpN&-8z-gY73NEc zjyMUi=dPT=d@?{Eb|8g+BFIm3$`m|dS*AJ@<=A`}6vf8#@S-G`#*4GX#*gqeYN_Bb zUn#&!9d-*Rqi1&_T7Cwco5Nr<4LMw7Z0Ab~WNZfyL63>89N=c$gD@NU##eH9xK&*` zU+z|IgC=Ma^K|tYnRe5Z&E8RaPf*?i*$7pBs`c&Q_bJr6zvWUw(A1<9&x7hVFR z^!uvFS~#+jkHc+r%^`YR9VVaKK=l`&216*N8AaN;St zO=V#6YL#w)Hm?>j^?j&Do9#SRXfHL_o@L3lPM-*~-1cgG!60v;bV^E`Z$G>0yuBUf zOe}eC#|qJXO{Ke_yNzvkHalt*=Wga~Rt&Py#K!VRI9czYSfJDI5~v6DX?23E-ka=a zS5L5MHF;|*0D6}FSnYWVY+Z%P`gWUEf~jQPDPyO#Lu~pCsz!;diL0Z5&UM$xbsM36 zyoWG(2wOLf!D9>g5>Wo$${IF%+@2ca>~W`Rl-c9<;@9#WhyFeA;jwz5lTsnXY2>2|2T zO5IC1g{L^1aBYYZT#4dX=|=gIK}s%Ih&Ef3Dql*F!~z!zT8$p;i2N63o$FigaK6^$ z+=T+f*RfV@FMXvB?bcM<9G$doB6Gg5-ZZDl+02|9YM5zGGzsmr)E43`wclF$8o1zt#)xoIvddB_O{jNA0u@SRwYc~e+=Z!7F{2!oot#yq`vXx(gD1Bzrf)8mWm==}3 z6iqn%Y2cFdIl4${utb}J*)%E?#l1igr$G?H zJV^X1zMfVH7GTcp_Ac@JSwsR(h$)AZ(ndRP0up_E8)24#J#G z;JEIOX^Y46tZpNkLB>@GV>#)?9|S>ad;kFNM1JuHbu{6xYh*@+-{#nq+RUJ`Zf}~` zY@mxbw^~~uP{4q@5?I|!5f4&>E;rs$0fOVwRq8BF6}#g71#?Je6AMxg1=S6x(RRfU z2@j%3+%VaKr&JKr?km=EXhSzjZ@ zdRfaTTIKL1Np#>yL9P~mP3c0wVYSDOE>OlIdl;LAanNx?$Tv*S`J-_ zrsXtomv)b9Idp9qu;uR4+@&MOTFy%D(xGb9l)KBgyOg{0IEHjE5H#nmgS+|M&E@V4 z?z*^}#a)5BChlf(H-o!r+_i8wmAhu{&f{fua5u!=eQ@pL==(P3q=a*A=ORrfh8|qB5c5HQ`*uhO*B`|@ z`Y?{<&^zO_z+gCl_m*C~3!n|XdmSG*1@(RE==u&apIN1RF*0Q8Wcbw?oea=}|4)I<`<_#~LbU#YjyMS1VMcl)>4<6{%3ei$#jfO?L>$V+bh+3VFiQsSB*QlR-F8kdh-~lxSsDj z1ocy>IqHA#j6Z~)LdPkkhty37P%prN+A-FDJ_igUU{}GH=B@u39au&Z zpFw>H?OX|?U@~CH$Z*fermByLMyd_m-Q+yA`P4ydaP2I#xAaP_0t^oHW6H zo0j*0f0A33b+qucH(!E>-HQ!FUj@FncH{(4hD6u7)#_Lxx>-DOhNyE2H4tA9xzzcf z4sLZ51c#u$fDN!{B_cE`n&ejhg0zh%r9@SM2j7oV_Yk1U82Y$-x&Y=TW?<0Fddpnu zsv}gvHnv<d6xh~H-Y`BB?{ad;_xRl{xSS<5pRa$Z1 zy1BcT3}Y8ON#E|DYCLB@=EtVGRQp4zwJF%q zW}KCZIZ>+II@NwmI%5mrPzP&`UIGSyf%Z1tgEJhvO22_K+IeQ_90!VznmMst@nQ2= zh}4?A!VFv$Sk<;{GnoS0)Os`&G8*p*#e&>>d(r63Wr~`xz-)oav%g>b&3L5gzb91P zw%t(Orq1P=aBgcW=o8IQahErU_pMVP9y$lXQh)J5g0S7t<`dLID5o)0iO zOXY(efE2|YdxTOhf-d${P6`c$qO&=J1f}yHFv(klG3OL$jd4!F`XOzObq>Dz>Z@;2 zemwwL5VqZb&?WrKQ@%uXHG1g*pV_;KgvIM5*dP>|bw&WK9D-D}IUkP(OhO$3XNY;b zbcSfkpy&s{K)Wc;+TtoxAHymXKSh3<{B*Xk_SRT;*>1oh84SjdFb%l?8xmh=VJ+0x zVhQdEsr4wFH{Msl8>qclPv7htV+-vfzWRYqz4`(oxZqE!r*9piWzFhQkcOU5VQIRu zGDhWize*(XplcyPcL&{lm#pqiGz!tNI;AI*)C65|cV!p04UTj_bjd^_9PgEsFR3c> zZDa%-y+KanW%x5_W>tYe)Zz5Zi8XVYy!JdHflWjegIzYCeQP_2gk~hsEHDl;sTgKTWOySmyvYX4@t?p z9tP~FC2$ZQO=}r9p@drw#&#%N&AE73;p&av<`_3H{lgDt1qUe3-`l*xAv&u25}@mT z_1}n`=(Ttij3mDdIIFBO5DY^f`WCuqvYLx7dZ4XM{nd{ld*_&Bvq_m_QvWmlmVh@D zke8T>YC?LnHG5BEF4Me>Ty!E`tt@v#v0VKAC=CQ3cjW4x_do)6Ex44K8Ng1x5&Z~d zD}m?1WJ<7!+!q_k{b@b9JJyo>=F@O%o`3Nb{s_`HVQQZLsh>XrEBWKq75wo_FCH6z z6xfIdbD9+X>6ajR=w6C2$ADB#<{0$3l)wfA;!+x1`?CS0K?dLwc5cN=nIr7nMp`Go zN_rzm=ngK@E}leU^g_KRlW0@vP@Ar8)1~OTx(zzwv}YTcoNrB}N{;%Z=4{{WpmPs> zLNg2Hp)kB5+a|!j;DkLQ2@8pRu;ZmHBE1F8NXq;x#2y`iPA(~9-57^RSz*4hr6HGh z-;ZM5cTR?MQhwS$e)a#z%_^{`?|`*=KL|Kta{_808f}Dz+(%bE(JST1Pg{)AEzFTt zkd^qaH~McXxRoAh1z(}ZxB|iPGLXVqz*ZA3heP)`Am>n(gf^VR@!XVA10s4~d_bz3*id`^J1Y?4STFBYIYzyT(*M^?844nmkl}mIhZy7&y zBj;pNP2|3*<+hVWV{W#9pM5Fnf4xggD>goX)|Z#}Lno8RQ55WVI7o6c6m;~T8%QH;telY&DXHHBGnox1%wM`e+4}({`uHqW9Xt`r~<^N4JLALlC2XP3nUx3 z^=q4nY;uQL&kn5sJl>zRg+j4%-I1k-W8@Hcm@UK&tt48OLY5Hcwy|-<54KRlr5`sr zcu<@-=qVQvUWVEw;?CLGSlq{XtFc`*;*;;36IY%T#OqJu3!SE2w2KPk=oD-uw;ujX zd@7|z{PakT=s%3#+cjbo&*S^2bVz0-!jb0aDRB>WuGNU^-m!JwZxNj>=!fe8znt}; z0Dv@KY^BQh)7PiSt%m#G_E$D``pfh{DLS^tgJTUJSlnZKSjt=4(9k-Zd%0+P8%L!t z$!9gWU$Y&uwd1fPmI)1p2I9T2BjUuAp2P2B3A$xU$AeSeek7h~cI%SPHYyfdE%R)li(zUaf4aC+wC@*Z1B^eb zL=TjrXwFu^s}b*iw4~n=oG12+vs$D-13)v@h_xwU;1ohj3Nd_lBT(!96AB-g^0|~2 zi*;D!5MUgqTL}BaJ;#l$(Kk_f^zBaV+^_JN>x*;_b-gq7uXqa^a+9%8gl{6T@E{5r zW;xK0f4YE<*(J#XL;i7WzR7sVmYS=sqSee|Gn=2v-7r15mkx(Wb{ zjR5z_1uM|o1pi%LdaCM4S_LsdSDVCUQV7lm8AuiT=I6lZ7*Ln@VT(U{8zy!Ubl$vm zss0twMe9p7_J#b#kUtFzYW${MJW)YlB}B8)L8jq7dXu~^b;y6Km#T_7@1{AY3xIY3 z5I!YTuD*;AUcmX-f?PZ)J5K3-jC4wMYOvZ;4vDXR8>u1QDML4V(5yNTeL%Hf=bq|= z#8+oQ4Uf+-R8WA=FxDYZpJ5as2%;W>23pWLX1^LVz7lq zQaH_ufwjaSg6#1(EEi+uyc*!;uj0<%HTyn-9G0g0vFe6Nt z$KRO&tCDTZK>sh#sh5#3T!#LBE+>uyAQNL?g4&|rA0W3R;>;Gm&n5$FuYIwr|8f2@ zA$IkDl-A`t^l1RFz}MoT>wxzrxEW5*{4aoJ?R3e|S^dHSfZzT5pJp}$=Oji`>DqzT z$GiR&>RcgxwO;y!wZ>;-woY4!?(P5~$D)M;+rWjmM{+9c8blZ`(Zv;P_Kqd^>D@K8M(W2NHH+18uQuz zhQ?%KpSK~1MqGm^{}u@#P9ZHLW;tTkW_zYPOf6& zw5_Di!Ov-%v^Y!?^!czFRY(e2BA1gS$4feccV0oHV}&;$V^G*ddi4dt2H9J8%M zov4y+$(y0AOx0Few3TV<1E?@~tBR*VHI#&AqEuT2K7DRhpXFdp0BL5oYv0M#zLTMS zXR69q+>nVx6h5-VU^b!;McXcbD-S<{VXKXu!2$FZSk}59BZDMjI}xvGAEA4LVECfA z_nv=o;48_YZWDcTiMrg1Yq#iptM&E+@Xn}s$Kr6gLq@Rf(SP`pBf@y}#oLsENO zg1v&Yx5`TVVo;*9(K+Z2 z?1E7!ZKfPB2l#K(Z8*|+m--aifc5j3N}8>`jQ2`>!TUF$IcTJg0d;ATumB$wX_E^G zf+iWEu$AL-H@^JF@xUj%a|luoecRE0L*S%T>BCi%9K~V|5;DPLI~kqj_Du!Jivce7 zF};cISh~sabQ9^8B|UEn55vd3-^G=+>Pi^=E2~j3k!uOA%4qW*M+>RhDtDrlF4P*vTrfU-UzaXTwxL?e9aVJdRh2Mnsr7<{_W$0!*HpMBMMjEpA{v zkThLOT!uKg9k+^P$U_;mCt(&!R_O%CJt6%&<==LiGZF~^Cc3WyL)hclOv(gloAvGX z3{S9m2Ht@G9>i+nIJ{3YXlr(K30WA?vZG-%a~QmD#to{rP~1vJiQ{&nf873h^*DTW z2$m&E%`uA=UGe!PFINuWSGaA)i9K4`z_$BAaY3D*8cnPjm` zY5qw7Tp0uWH4ZpU>eKV$&W(`&cDlzOXNf?fGHdTxtfHRqAWx#=@9dJHxOU4Vu&1|0DNY_Xuz;XgW(dv`Z1 z4;B9Q|Ns5?kiD;&GiT16IdkUBnKQ#TgNnrIye~!dszJyJ>f!uDzDUp=IeUu&UferG$=rxghqv< zW_p}ZZfL-a2P+q7JKAGU=fNpwni})0rpg7@AoN)}FIlScLcY*p3QMqOYjADcSchG{ zBzvBmot-LYW@pQzvnR;u+2_h5v#-3x*eg%K3V zv*6I1R_L;AXabJxPMBUHim0g(vvC0|fE4!uO4Z?}RKyCgYH>AX))0{zy@LoHKKBJA za*whhjsDpi4@sGvX_k|6JlK-vZU@}1PO9^Z*f$bJ1FeS4dL|0X*_Fa_TCuy=qBfy2 zH8ZSA!0hhjWD6niz40Umi5hV_2;P*9xoEp9dD~H|Dy<_q(`3Ac$req+>&azrFS%IS zeD3y|Y1Yd5Jh9hSliyyG|LM40OB3(3j%&ZQ+rf_-9iN)}>^k^CVPS!*+pOXltZ|#w z#0vy1Kl^(^n_8@{mI7nz(jE%P6kEcg)`Z7UU&;HycvZ{?tMONgnqN2 z7?iw8FRoEIXnuqko1(Za=EOFPLY&1xV6e!vgUwsaPP`<)_zv2$iS6{9ZIrG-k&ten z=ghVh`EDMm^b9_}8Z7bn)A)he4jTy)A777qZ{Ice)p{z!>1tAj(@RWZlOHE8hcl~p z%oDhId^0`OdwV>JYh#x>{6gq$N+RB^b=Yc%X}6eDfRN5XKy!;Z2hZRWsn%)rd}oon zc)p8sTi|CI!Qx1#Ot%N=mBY>}vBf-}gW*)xJG6aEd^*7jVoFtNL+Fx`2<0t!{DDzG zx5>N2=AD5VHk5QI>S^!STD#Ynb1636x0+2^0GrqX8+7Y%hl5b$`O!7lY=98cj33bp zUxFWLnEeUgj6LCA2yC|MxexL!;9sFy8AOW4$IUYmL`tsH8- zToyW+4AOuNq+tb1vY{T3f<3BOgAN9VI68(ktt19RM4j0I#Q4LWeVSk?By=+;x^Jw_ zF=ywvM=4#10WCfU!$C&Fph;3ZZaO0Y0rK-(SS5D8LsC`IjC*z)plgooRCoR?pQFXe z{(3P@d$6RbCVJW}a_{Of#G>wDD+wfIuzVWFfU`Kya4Qv{r;l?F!2vU5e9fNd{+K89 zEVksh->v-Ef(j^GE!qbJ^>iI-w-8h}wqhX&RfpCyIEE^S?#jqfWF_?%2(R<##fZy* z@EDT0IlNb?JCwQvr3HZ-{=JVz`^PCUM4gH=zJl>1DZ!wX?tC?fPB%qG$+y6pQbB}~Ki$*F4 zrs)`@vj*YtWVVYSX4Ou$VhKpfHp|_mjdeJs4n)zy!s3;4c`@CJ(Q;2w<1`nd428-d zQ5G8b)%QJjpx*o-TomGy3#y_jhE7T(x`@d$5p*p?Ws(&2e+7LFoPKNF18VqK#kJ%9oXuGuBEijue^3QNm1Cm$()RR`mA{> z2!mxRY~Ib-nv*5T_JdQcVzIZ?jm`S4Bq`wt)*H+YBZ5LlhdFHi$KeQ7dFU{Gk&3x< zRbM`huxlSz9{OHnKt&`7chGv8)lcqPMd~+|pPM`wCfv(~dhIlYM$ z5x2onA3AJ(`KrgjXc}?sibQliagJ+(;a5~cKWv z{*5}BW4$jQ*sfL`FPA6%2i)OU!$zZ-cn+YO4tV9s_&~T1&_A#ZNdPX$gNV5-u6zA1 zMvN&{y^Hp!-hrc+aWzWi1loYziqR; z#moNh&@y~{_vWu3g3TT7HA5UP4{ICZ*ggc|#u~it7EVtus*~U3h5JN41xVF7M5pcV zArtU&5j8)CmhmRQbb~tZs&KM1uzwKRHAkR@BfwwNwnF|IG{WmOdg?FFAQM)yHW={E zGT_13={`+Q@di>*m_0y*;|8*B*FUVm2fPLsU@P7}e8Qy#U~&vF7zpH1D~bD*migK9 zNiC84uzMyG)op5y6?OAEHfs{Qjb5SQ>zIou$oW*&8$(X7$65ww*)xfl1ceUGVu!%# zC1{GjQp^+q4xSgBRq^$o^p*2ce}F{HCP|)+aB=(FKT9MJH#{1pNbWiqRazCAU-m;i#tQs9q^5e+~AIBiL$) zbz5B{!8>dmkmj9>aR-{0u@cX_WlJcv=yW{w8rFlSULArV*uTlio{Stiy;Hj z@gbCFB+n13O~FzHwHKt%2w(mx_Bm|`Z(>(U=b{TQCJ<)%D{|^YL;9To@g7@D=kcB{cN8avUPVeo2>eIzo(p6^#7A}so)I1u0P z;Z+(@V7co$Y|FVclG3ONOb#}`NQ32ox}54dwIM7oH=(hH($PkIKuz03M(Lb`?+@5o z-6VG_p9cQ%|G`9XV}U#lv~x`uJR<=nlwS{k+ND1L+C>I#=hDa>32k8cqdI%LKpfzg zj>ga{J(3&%h+1g1@utOSiH}H|%HIhsB!4$)AzB>ON~cYyiWrK<73$k?B>RMP4RuH5 zcT^v^uL|-I5$zscIScQ|}sZA*NCJDoC|s=aQ#`PJQ&wc`hk;oT3`m zA;B1Ont}aJPTEJ=)iB-y`voMgBd{k7i6w^W11t)bsxWM$_zlE>!|Goo=1be8Nu0#| zC4a@si{!x&u|;4`4mEyQJ#vN^mseNrsUtn!1U8qQCy)-}Tpn@MbXf*Gj5Y3X#hdkt zyEI<$Uy<~WuDDL~1VM9(sjfD>X!N>GG1^Mb0I7OF!`+%%mz@KjcTG~a4ST)PPc$NK z%7yHg4BGFbPJ*ETnYs>(32#OOJWnJ%bny_pFekR@Rr)<*D4;F>K!XTG5I7OsRV zSRa*X!;&Y|>hfsawhhePBMcJQ zz93cfQMS*|QT`W=KoKZIbthZ+P$PYko_>u;zs^XHUhlPe7hAC)MplY&+ab(f=*fC; zXT(!n)RPqT)H~`4V~LTE6l3F|o}oh;Ph9B`Weq2^X^w~aDPmDzUz4Ppt;tU2ouHMI z6#%xzxmguCsv@P;%gzQsQ3dUL(1KP|A-jb5G~k6NwKfRLr{6>A zq_4FhLjkO5@JZxzKw1Zg6YBuvwgFR$PL5s?*z%E?a2Hx8+#Nrj?`fY;9&dVTjKdLi zOM&|c{~+FS(|I!aE85^89c_`O#R#dItT5crRa|THwC6}w+wsEFez{bIM#aekFx>Ca zdb&npZvRg#^6{j!x*=83KqPJ4s`%5DcGwSdbgLUHP`Qa1gC=M}Hi4^uFG&))m8`CESDxY(Fb{6Xj6)GlIRfLhxch__7`T<) zB5ZY3qpH!E#%o<&W|W+Zyxiogo5Xl3r8QC72#=yUQq@0Dh3W7~lB%k3M@X^P$smik zKrp*4Fe)q01!-g_$b!&Ze7c>4@Ev{ynMAeqwg5w}GOxc6-0$wP{sCNgCCNN^It@Fp zE!$w@fcbJ0Svf8idHF~fK)1jit&3uSTMf%7z*P}At-DB2z5*}F9y<$XJ`(_8^3&^= z4pV56R7GKwSXYc@w0M0R;-+8}?gg_b4pxLp?aOcTKT6UPLCn!K=nFVW4+aN}hh%7g zD3lY+?f;^VA7H;H^c@}9=A~DtjWfW}vjxFna=paoXeI}n%OEoNyaLmdxumB}_zZcP z*o)L79Bn>2Rsl}!`(35b%Gq)ZWTe*<@JYD$RCJh^JfXlgbW5mo%;*Aq+Km~hn}FDG z@&Kk`T0Z7Nj4Q)C+50fAqLA6V5ULY*g>Q918Q#xE(r6h`G@lNjjdzLE@ByTdN;n~b zb7KomIvhguu&J~6L5It4seGNtz*}aX(7POV1y!C8`nBi?%n=CeCSmSq3Dt66`Tj3y zTHne_ISvt+!S!%ITNT0#+rh~5o`}-H?gU=IMN5Ss<&D$~i#R69uDR?hIyJosneLFRFCu$1#*WX$#QXN9H9=T9RshES?; zZtS`wGg3CCci6P)yzf1Hw$RxF>`y3BJB_m)fxR1;YTd93iTk~{FHhkTKfrF_q=i-o zhQ{~<)lNrDAOYv;S~EOED8k?G;xL!TW~^6<^M+Iea$Pf|wQb6_;N=3rM)+5c`+Cs~ zsj3KykuUERCD2W}I~n)VTF)IX=TKzxt8pn`&&%G=n$Q>gIrv^)hc{Ylc@teeE8k9+ z56gGcZ0qEK;EtM&=VX=AXUccbT31CNq4`EMCGk0$(O&MT|@{GC=3qp_J(Q+;o>Zx z0@k3J$s9NYDw~u14+CZF?4?FRF1QL^K>2P*zViKq^BN^E?Uo89FnwXN61ZfcT?ynb zx}n?-DB6SNR=WJAyn_;|<@@MTr37ZmOO?PZ`6eY$C|{`rX3MjbK(Rbosapjs7hM2k zraMK6Y!ZB}$-d!)woCg!Fb}UjEZB;3zTZp~y%fdxM?kWcXVcvvx=W|KB)UtbyKZ!s zOm}EcL}Z@}KVoWMG%bwoFu7n>=U{L!m#U|rZX2m@b#KHgV8hR)Mz7kBzg8#fIRtDy zn|m+3>kdhwR@RHu30f3t^_Nj$Bn15y64)OqpT=nuBdc{CxqC-d%RNG`fZjR}g4N6S z>JMwFEe-{x>Zt%+TN+fpm@H33IEA#Y35*OFS3ataX-T*NNTjaQS;>PlV0%Jgp&>nKdh~`3QphW5JYR-9L zM50x>%ezy|GCsTtW2bzX^6_D%-L~jV8P3Nhtn|K)hIk!~@VYz86{nr)vT8$TX@gy6 zZGfwb)?e}4jqf-<5;5^$g@?=AP&Bv@k;^~fA}>N@X+5Al_$A6liV_dqlf&gBsV$^oBc22MN|oYqZDt+ zWB6FuiV|>yk)w@0hQ8+N&i)R5rb5<$qgz=#jWxm&3TthPb_-IdB1QgQ_8q!_HcwjD zWh3NbckXecbHK%@!YE9ZeEHj{3ftKfg5H(Q)S*9(4^FmR=S7K|cn!lMhoK$@`#w)$C5K>ofFErJ3v=W*q?R~V4 z-vw#J6VYmPe2}koD~AS4cdrIv_?m4i`_o=xs{jbCVATf!8oA8NCVXMGo!tzz!8^MR zl^jir958Pe@ZV8_Bo#BEzWKs%RG`lMaji@)2a<3fTTw}&FTfTS^*7# zo&`o)N(i2eaqDR(JBQ$K25O~B%@*ZxkE=wzDWR^m`yi>PosR2surU&@-xHhYxS~z8Gejns`+$g#5^+SX76p)u#iUuM(#u@_ zKF$8|0bn$-=Li(e9}Crs#CAAwlb-n3B5`%6#I=sY(B6hZT}6wQ??hsqD-LsNg}J8I z@qI+X-Qk4qbV|tMYEs>QgT{tiDwZ62%B9sytZqcAPs*~%SIN5oWtgc;zsalYz|eUNmL_B>uH}TPxrLXk^9g@g~rtYtpt|%i4-|XMZmxAX}TmFq#d%I1Mu zvH_YwU9|x&7<|`1d8I;AQNE7Wno6gAq6Lc)Ss{Hm(U^Oc(V86QjFw<9*p>V6aGNM$OQHM!ARR5(kEZ{k0UP68a-gNIAtKVak&WHB5 zKxkMBt6s;Im--}(s#r{fn}AgH0$zH?bALGb7JSux2`#eT4ELmc53uPJAN+eRd_o<~!8%;Y& z^JzVki)Ex>|K<&Ww}1hW3>CAD=lX!g9f3h<07zhdhLNHNsM57^8&L%)y=d}q6`@P_ z5AZ~TKRX-@zRFknKa#5V;6bzs=py&b&Ou01i*~UX=c>n3o6Bcn=lIa-58Q)>7w6?T5^gCg}TCx%w)ZY=X%j#nGD z0d9xfRW{o-bbrQD@E*HkA#@BMI6uw)3e^v_vr%sc9D_$?JC~jZz|s@nSQVE0VEyOT zI7FoF^MO9St|e!;w`E>EK(1 zXsoI<^8`6mB4pj*#BJ5n>2@gQnL3|qHT2_#5&--SmQ zI&OVE2b+dyTG^+3Bwg&~qv_J6JR28ch1bCf9HNBdm4)8PE|7b33y+=u#?voZ^@Z(~ z&r*q3>07HT-?8{|OVc z+LwkJGDLS|?18TlcmVPR=RkCL`Iq9_ZqmkfbqknLsyYkO7pO{AUvq}Q&4;)ln==XG z+DTwzarS&xX_8b;?|d_NB93VN$5y3&r?fE;*DjzUYUsiS3n0xEnE{C5!UKDFt=!7Z4A=7&xWBzHG4A8blc%@4G@CZ^y(aXHQ#tONst8) z;jtLw+|U|;3^f?YmBRjt ze#MPng4)lJxU%7B5|{-s)XuO*iz=lxlZ$d}?9mMnCCC7fNbKK(nMn49d?Q98Hf_-M zJ=iPnc@v_rr4Tb`MY}L0OAejwKA(%d{`~!TmbE zQ#?^rw<5J_Sb?Lyjl9rzo7jz_#7vQwx63{ugh>`A=wu;ENmWk(WvDpS_v6YXd9Knj zUyKpt3f1J4f5j>X0XJPmft=WXpaI4Q^6rW-0q~@Y*v&B2g{!fXT_CFQcaqByX7W%q zCUzo2Hk2JJk_I#PthTb$fQia|%KG*xHvIloL$-A-_dmctAQRnB81WrGX z8852QIGmI?zls*P2u0-hJNZd)pp_!WL&l3j)lMlVwHn`yw@OERyA z*iP^URV#7^CG3W_1N=297R727g3=@A-n<4ip$LK+%X$yIfX{CR--Uy&a+}pGb~B|k z)aF_n;Gs?i>_&(k?3U(7hpTtk)C7@no}Lk*Ci8~LO8`J_D>{m7zt81qu z1{(lG3}7H2%~)>umb6v0O2FxwPlM;2*MUnD+YLeOI*T)Afcb^g)`co9zAB9g%I5(J znF?2ASt?R5LN#`+0haHUXjfxhF3x2uQ>P{tB1X33B~qBieX}UjWiGekR=90Hhs0QJ z=mKqw+=y|Ttj@GUfw#iXF3vmEmi8fBo4xZAH3a9=dZsmc>3C&C&9TN!wd)JaqVsbZ zOIzKcANPiS3^#eMAB)}&$6qZ4dg4b=az{eR9Z~;@l@e@vHEyRNF`(P-jnHk~aq6Z9 z+7QiYH3Gx@7#i+3&@ewFzhg-L{$gokd`2+OTS14pc==Z)kSpDL54wa@^%Cr$G?ls* zPfFl?X<4zAZZ23dPYGm6_fE%?v@tIj99^9b_iN(gz36~6^Cc4s_3LJ5c_Nv!q5DnT z%3EX-7u_&)VYr3S3tPA%)fs)v17_++81*wt?J|4~u>?4;H|J^qbY)ZfE^gw`_Cu6X zDO~>pWV&AYeK43k!QG7nbsKvXZ@Ch|Uf?CuF}HCTt_3MF2Vn~)VIXuy|7~bZOa=1< zDX^f~43$5H8{q8_NVFKkkACEoLL;YnmINzRH5?_8Z+q;G1HiyJ~ehXGDMJ6oT z8~y!r`i3)}uyV>>FPv7t8AZ%G^!#7IhyGyfk{%;*+lPUZ(FDhE|->_!5ags zV)b}~cGQtp0yCw1-+}a30#hX4J2XNDvX(B8b5QX|cq=aF5H4IYNC{+0_bNO$Z$)yV zCq;4vDxg*AO-nERDZqiA(U53udc|&}TF%}FOOOZRGrz?@3KdWS6Xh%=kRvS{$qShz zEi2?0OqQxo#~XB=y~GdHq%H~hX79r|4;biA+Kb^z5Z8Ul>zyuDe+>33E}w&WrPinf zE|c!nI6oFjzJpWR!$hu=j~et{e0&i_+of8!JgB^K8AGXCCsN3LOL?G4R8GvEK)H2;a0-BnOrd$ zut8@fdt58K5qY?WFD%n7e4QQ!+hSxt!c@G&>?u)D}f?u*+AYV#Y^5(0wvP2!<@h8xwnQ{26F4p z7m4SW?cyKvmXs-hi=|}^{OJ<+pThiW`wZOhkIi86Bp3ivbnyx75v`I&K4W3t-2#ak zQpmWRXrZBUpT7WA5%3uJgTtE#c)HR!k%;ad!tMbLX((pwK#28VVRHA^rz;Z6rPWZL z;2k6=zlj_08LJ~CNGdTZt=0c2%&AnGUX*_hh@w9eWBaFddcFne>1+_AC6b8!bI@Vw z@SQ90m1MM3<-uKLeh1l@{4(w_Oz_o5EQmlAr>O)*a0ez>YT@<;ZjB6oJ&cSIU*Y5+ zNcSQiU~v8ph8Hsq6RHTLZGl0KT9Bp6w~$Skogog5P2@9{TR$S!^goUi`dVm!Er+rI z*Do%{11w;4z4FmCZ;H<)OF*8r2U(TH2+55C^ku$jC+Db(lV zQOr@;Ch!-X)Jyn88eRegODm+R4)6{9%6^5ZuUrU`!p}8vZyqw6{8EZThS@e$E*C*p zIm@tLVZnj(?qeSks}J-OE4s{7N1e%76W;jAq7wOX`PanikP}7z-{6zNW{LH}8C32W z3hN?OPlI%g0P_dz*zj|ws74)fR;HZjEWZY32Wq(Ak_azEMayNpVVmmlI8{&Sr-!oH%jDVRtHEI~$32Y;)#h3xJ^1dh!}6f=HTtVrc%^L?GGX^d zUI#4=11D1d8hyjC4GF7IFREgc<<+1D?+a)blJSXvCu+&sUjh7X)RKS2{E@GyaA8{6 z3Zn(Lz1eusO5K_Rp*EaiV_8x|{?q>0n*>&FNBA3Ame#o3I$-PGLAxvWvLEaR$EX z>%o*xU-dO~Vdp{wmiQ+iYeum0Q)+u*OGC^*SR`Fg8F$mZv=x0wd^e*9Vqqc|jea3* zOv!JszSg}G@$=DBwpsX*cPI--Ak5M$ndA1- zVhAmv5@oIcM$G&&r(p$#4s6WxVbV;?_Up+{u!UX5>9v>Wg>xaT0)PJ!)HvEH)_)yd ziLbuu%K)~>=_>VAUrKa9MXRUahU&Tsa~={kP3(LsuZhLuf>tp52LS-b(l^cQ$(rZR zhp?RJo(h^~Bd-{YIH=JC%7k5RaL^Z^Xe7XKyvBCxP-UfRnlt#PB7?7bBz~RjjZb+8 z!>wwBmdj&;tqsDKkE43tc(o!h7x^*1mzhIi7HG zJ>KU_B4{H8wDUQ%Cx~vVKsU7q#kC+$W2_`RY}U2J*ul-?1<3*Wm5c{(aoaepx?L=!v!{gJjem;%?5D23pwuL z#=8~lxfeMFKIUaGnJ#2Gg+EIk-w+*Pv(^2(ixI0BvJ|!@bnRhVyn}`Z&AfY)90%g{ znEN0C3mRH9N)>fB-Yo7(+NtCsGfHFNi1AvUNwY%N&K={lE$<0~LJR^ElA{1dSr?bp%=SxS)6f&#Iu z#f7J^KXt*gmJ9O)}q!)k^EPPM;!3w;}|e}mmb2OYfG7Q7kRIX-_ymnPjk6c!{AgBm`u=hp$G zKFiUSLQHgS2%C=Pxvzxklnc?JYqmW;A%%q7+AO%`tOdxNKSvUO;$R`_>-56?CE3mU z!MGvbp@JdJLF+$_C&SW;MJ%OUMW`6<3>69P_-UKe{G@SBOAwd{WI7$Le9)TIuhYI$ z5kBUkn)p0LL--if*=Oy>xj`3V6WwPaMn|f^i3IOV=u-k5DW4!59c)y-B?#HS0X%e z{cFKYUiWr5E)LDiGrP~gbk0KaCx?Os#B@?|E~KASxQWXnYvZ%$xF_l+Al)B)AInF# z^OINa^Jb+nPL!nW#;w-Xvm~y_y%hn##9Ka0p{UdHGUH`vm=B59IAIC7{ojC(vM1_m zdQdA^KL}HOOHvMqEZ?b&r#M6=`BqmV_q592%zlkDbkDBASwZR;`p#of-24cb{$2QJ z$nj2p5$?H<5k}eahxq(=1xq5sZxicJ!vl@7uogCBlr6uETnij~*`qJh2fT%U#a`_i zk^kvX{(IPNSa6a5Jt8TMQafq)wPUrOe>a=Y^W!b@@77Y4YhOlFyj*#09W@kbdU`2$gisE%P$oXzaqZy1DFE@Tbv5VP)$Iv__as zIq%bksG?e14kp~~HNK@FD(7}3c(HWv+a#2Nd6JKUDJWk;N3`%8*sZ<*B!ps;s#b{` zxv%nNAE|mBz8A6`dq4vJ8r=IF9w!_29qLdm1GJO%$GxYb%8WR$_tHep^U7kpH%s@@ zn!oF6pTi3Uz{AMCK$bi*qt!aRHIAC>bKUk@M{SM6+vuo054)@!-t%;zg|2I<9G|1c z>GNF=aIDAksB{w>C5D}@#LH&_QiKVmd<_wC8~gM*eCtM-3Z;_uCfuEc+*Y6~wXY(L zwyu!PZAV&CW2@4ZCtw_*aY1#fuQeQJlr6z=DUpevuL_Mo1OvqmABPa7VD5!cE7^hc0vc9q_z zDR_M4=kx_IippO`8@Vu1{R@}1Bpy%+MEHJ|=r>C)n@MDwLq8N2OsdWTEY1mn*~$aR zuXDtq)tn;^uZ8`Bgh8R+wuKl(_(q4we0z~VYm9tTM{m}^%EkLkW$V+#x|xt`=Mh^i zHm6F}skqZScAZ7Ze>$k-dxDDZGT;;}#5`jZV)>1sP-}3F6*1RPnPPQ3sjObExcYcv z-u=8>%A}OBmj+~t2 zM1AOlPp3XuW9h}w_BahsG;WKE@fy#gK|D}~`TtxT*2|aq|4bztgE#DdpFUpU;ih104b(VF*AeN(zPR(V&^ac-|-`|D+N> z!RPUeefSI~!^aT*E`+7UT691gBJMc7^@MHg1(7gEj52T2U}4muAmkB)Gxjp}ylTop z#3j$zdFnQI*GcM|!cU{tf;Bi>HkM%S0t01(YjQB!PeLP$@6tvb89hn5m!^4`7UP$M z;8a|(CrPR%OSb2Bv(s&!q&t4_3>Po$FBTHg8bU5tdGf%tMy;O`xb)5|u)AnUPg>hq zMrJZ4P~e`f=k{L#-UJBvu6Xc;!h}$CTXS&a3}M26ZCax|2!B#qqkA_1z+Ug)h1a;nufI{;BDen~ z+^(k}NB-w=r{e)~rQex?1eIFCZUNfgP4FwFq`~$Au)s=vS}!tuTlLHby5Oj zmRw8Ak=1kYgwpOqimgbofr_}8Z->C!2dFs8$0iGZ32t#94_$uqYkt#1s_q9MlW6*^ zx8muZL+N)Gc@B+PS^(<7{b{SIhVrCmmIkF24VS59KB|4StC1nEYCblO=Nbya&{kHhJqK<+QV0Kj3{)q3K(99L?XI}Hl)9jly7W!afa zF9(?>!Ou*fsRV4&-E#pS-e%9YsN1xQyT7UB0o;7Np0L4`g3CpC zEXVbHTyMhlEL<1kIygJa-E-0g3O6NyYdtcyLTkOhzOtS$RqOTjSKoBgB`9ka{z~vy zioajt?@IjLguli3^WZNeN%<^bkf%KtQh|G(_RlE~z>X@~Iqz>6uDXeNpp1KTHmRzM$Bj6 z=M!GAX*B$pu;mRNkl5lVc3i{M07n8ceU05F-J6MK4v9gjDjrW<#5C+8T-&e!hCLxz zr`=BrU>)oRl*MPqWFVCJfTW{I;2NZR{iu)*E=TNT7Ii@sHe}^rGRv&ez2Ig(U6Nfi ze}ZVojQ%rCh;~f5=VLvJEq@9)G_f934BUmW_$>CN!rs!s_TeoELR+wZhuct@Ze5b2 zkfrBCS(2BR+dxbI9pE8)HvqFU#LffRHUJ+^zduA~A1UOnmO6IZQrdab=7iSK!Zrw0 zeoZ1D%jG@E#S%dW5x7%VOqJH`uvQxKHJ~|E?I39*c0{Esfv$^A6V{zPXR$u->GgLE z-F!h8vn745BZ0j37Y?Kel;%Ko zQy3~o2uL$4;6P9=vge~DZLEjZa3CLZ+}K^wMDy6;G^p#-ORc|Q*xSrfo&;2}qPq`E z#ayXCumIAB^k4y)8r_Ey==e08p~i4>h?w4#WxO$$t*F8mIIMEh|8P?d;v}GJ*0b(B zxB}hvq&u8lW`;b)6+r2RC$bxfdKh=MP;$8&PW+%TO*Yd3j#*~$f)<cQ>K!bl)I%%0Jo@&se8S*M=S0#-g=x;uBTAL-o7n(^Y~u;c>C_7TSh{YP4Bk=Axit4HvD*EQvJIQ)>M-3zi- zB#}!RysWOvIN){fD`L$LL3WTY^Y|vzk3uJJCz6b5=D$1n?{56!Y{-F5T1^u}k}v-r zHXCeB*Jr2$!Z~W6@b5)Bs)dmH9ljii)Z`OH z3IX+bIQi#T{IxQNh7a-<3A$4U0QvEnmXpqjK`OiE9JbaTin9dr~%n*qKtLtuy zgS!}S2cIJeD+mTn*vm)@g}sy?+6G`S%FQOjS~&~kkadl9`<2yHs_PF&oVPQI9|bfG z2e7ioJp8+G`&oO4P_OXhB}nu0rd_a^hA8hwOyjPuK)jzNh$=_F$-KMa7M z;n7XyC)SGduKC%!4Mhxw4`Ivh3TYF651!s-U0$-7wsQu+=IKZAedqdz!1g6~Cvsw9 z@d4+5Y@%bb^<(v@4#gDtj~o?nLYA%Q;1&`bg>tW2oC^idWI0Q`aet{Myhvv;#_gm7 z7;&PY^(lVrB2%Y!AlVR{pqTs=1`ouy!v&o3CMNk>59Lb>!he5BQLn$WsyN{2U?Uaa z?yA0z7^Chhw4t^AltwcSHydBmA0;9hjC{79<};BdQ{AR~94GhT33(!G@G-|2k9uAE9Gb_U_bPyhJ zs4E+Wv?_NWg7^Pef3B%%~4?Xj*bB1N-Q@$3L(_1mc}WL6_g@~)+DwD>9&|~ zIDm=C01AP5D^gV0;T%LM?AU6GzmAeeNs9h?9u znpmDf6>uV1rwV+NVN~Gxjj=0`v1dWnU{Qyi3$V7;gKac2X*+lwZbaQnvP$>i+hhD2 zuflWQ(iU%Nhf#qqnvPn5j+*ESR1iR_KzCjN-jLXw8cfHSMn87?u|eG4D`4tv9W%)y zBZ;{ocmdmhp|(V5K?pI$A(#43!>o|sx^ncnR1@BSt)iKz7i$<5;>D_0EZE5s$%*%i zEIKDXh?HtN?IfWn(V>EFKBAyM!Ac#Ypv#Sd1{_h)bs~#iP_vdjGSc+h*!2T z2O=H=Bc15w%S&;xS#a51NubpC>c5Nkj$V)bR4o&)Yf4wG_pkZ@ucz1$K?Y|$)5)g@ zW$jMbl9vdQ2bIRmsg;Krz`6f152rba_PEYr%j`mBQ2P`y%N~n~6~B*eaDW#Q939U_ z@6}5zQLXVxT?&paq9ec{=#;~DcXMzIujcaSQO)7Jjzm5ZUr@M!!J%35d|1L zT>a*tFn+9T7a!uqSh$uZTVN5KodhKhFKu3Uzv~=Bnk6rE4GQi2PMk6pYECucJHIzW z9Y#Cix1I^DUfc^^KSIRzWv%BMNIv}{s?rqH3e-G%F%GptUYKjLir7UUOc73t8wx7n zK(~KdFs1xDLV*i+OX5 z0vvh)$aD*3Y5+73LHk@Wp!MnmZPihs9YiKV7tm5UG~Tkx_^S7*>-1E{*YF$a9dte& zA07L$3t(~A`Rl9oqUjuvVMUCrrF4Rxd^TCghkOD2@Z=tl)rH_dyFZ}y<_Rh6M@b3% zxc{Ti8j5hOuzv5R~* zi*W*9`D$3R5bw<`u`#^pb3jpyyr)wL!@eS+J(LjDe?Jaq&FEc6fQkUtjBcd^VhI4pOW(vx6cL)t(ssz%x4I;AJ>3ZB2(1osJwo@;v&>I=B?wmIOk&=>ejgqp6blNu~ zPmjflNr}0T-Dnic8>V}4VG(~N!4{W=)8G>nthQxPb+AAGcSsSU z#8L<$ES*IThJZUS!7VBl&G^UBj_uzEWiL*Z&`pijNhiczN_73K`Fp(Kg;8|5oT_y zdQj_uL!S5Jq`qb<+g{A>{`(P>icnYn_-|?mLyjA2%(BS$;3=H6;p7bO#CXFQzSOP; zT<4TwTHc*aIL8dvK5x2q8aBcpDlnxj45cKkGcMq=6tjoWXgVhiEiU5Vxc`O#2!9aw z0QSwV0Q6wj_{-DUebyPpQ~>2&;B_7r2%(rwY9qGEBG{lG zu~N<>tHceTiBEJE458rYjE_or$Y718_JUQ`55MQ?+qdPPzx znO+&HF$XHDG9s!f!M{aSWl+XjKI;XuW~H?^<^<7)P@h?|M0V7qL9a;%i>k>6eQ32~ z!?{!Fo9AN69oc{j@JU)%r+@v$wZw5o^G2(xGeO+2qSslGz2KN%Yu-IpURS5DHr3>F zn&HC$2cq^g95@|T5qwT?HdG4B0yO`(X`6NhjIRbW=0N3%sD<%nwil~?=~mE!s)w@HSy@euFhFieO)&a66)Ar^r^7T>7#^>4y|nhR_0qI5*Yhy+oY z^`pdbCL5`}utSEB!Ih7c&`#)G7aFwa9-1OommhvAggz{F^q-_6FPr`4U<8-Yo=E6{ z-ZKr!Hyz3c*8@=z)R8Ub$uTy=R>!VHIzyEc;GT>DZfqFbcM#o?;1)%K>m3QM)SnK; zf+LDun$S=ir`mBW0iqb0$(EdJ)%vYmg=zwo-YWx}VBni-UD?5}Hf>4~^S4?|zGAC4 zVU}-^O`Ucrg?gRl^!8+F&@PIC7cnQUszkHYN+$;OvQ-v$52eZCFBM)0g>a^G6zCw;%WXTK44-7Q2 z+-PLUip)akCu2|kE%(>Nql&-yi^b%r*bhf9tm83dC9gjz$9(Naa5zHEVvxa_%~1)G zMbk(kh|4}t^$s6(9&5fvg%YsESEGLJYgk^#$b5*A-M=|SBebF!^M(vhBr<2c2!y-^3!vS%@1ZRg~NF`VR71o`g* zdxpu+m5#{Izli#b)q-RQ)W`%gK|+4ER+4Do7)eG_lDtIa$vL#Dqda*9zbt#A8+1iw z*($nt@0yR7k#MkXL9!dRqexQ{+3iUTKY9rPqN8Ar{+UHsooKlup! zFkMesiRt=9@WrIbztHiNFED!Zz-jq*rl75(rv7}bs*zocESv1uLk7oJ=y^N@JfHvP z5WvrGjRZI<65y_a*Z|v+B@$pS%0vc*7BVQ@9|iDZc-b2muA3Db;Hpjl>nIZeG!ceV zq5$3(3Gns&*Z_+=0lYB;u#PbN=9b7RPmBaO{U|hy?*wpA2;idx@UbX>Uy!Fts8O~Y z1>g(F5=p~1C{swEAlGzXeh{*gZ*Qd?f!OVcSaW_FOljFzpVl5N-42*tPbA&HPY_-o z(rQki+`pV)xjUoNCDHa#jAM!LlGDerofuuowl_+tNHWQvA$~94L67mI4W})0vLt~% zKF2@|%;a`~v5r*(O_;yJh=u^K8(;ul^kV>D13JY7TowjA=*IxgI2zzCVZg6pjQzo+ zm~}M3PoHj(;`$#0co&*3CMgQTfcyLyz|LsE1xm$0lRKGpoMzzrG86(3!oL^89yxU- z!p|rfW`-mfIFhs6Nr?Y|FEG@s$A!!>57YQqnnY%w-{6+52CSevRV=cP`)J|0YO-&t znVbOlMdHROgMx5gJnS9- zo0j`@mak_U!zSYnO1;;noeiV&2q=w|>s}KzW zaN^hd#d?v---7(HX|Nh!K!cZYAsVd2Ri^=2r`XegbqpHVFtO2TFq3(ZmS}JjTZlW* z;1|&}*aI45Y(??~-b>34M^Is^p7A0!9(RHYX`sS68Cw^4;m}HS7=|wf9s2PHPKS3N zJAw|CtyNHURvMdhD9S9xG8pxeyw#5$0^yU{?*0aiRtO0GUO*4_8Z55JEi^$J==0r) z#O%qhAkFe^zE|$U6HfGe>u>m{)~l}&D5b^Xt-c302zJgBcpqMYC-40a;Md))=KhCp z1=A%oR?{6e6s7p836Q|VSM9+)q*1buV+MvPz}AK*@RHp+8EZr0_M`bUaPjTS4jV|= zV(wlq52S;q!OJ(Q)l}HH2KPDD2Ns__?wf_fwYjQ5s$R|~OB$UaQX|15TTdzH*Vq$$ zzI{~84Y0v`Ua7$?7C2O1WqtBSJK4|}AfC`v0@38iFy%E|l zO$c`Yo%taHlfBi`z6C=<0+@ZTU)7N!kn>e1@lPZ9OS`u^ zmC|s$o!E9v-(E<^FCO#@fx;8Pa zdXPw|_n>`=aqrDHO-?gL^KQ8KN``2xD(@>bB}00z04ze(1|o&5-O<6_}veS z#+x0x@%{x>@&B2|+wwK&nc2Hj75Bgn6p zc|;F7KJ9l)2JKh;;n-u)lm4HF_FcoYfAy+kb^x6WQ5}GElGq(Ur0z5Zl}|W<7U*7l zv=%tB1MtxPecAOr4LXl}kkgs3@F&6qIF{yW3rB~1Sjo;AQmw}v0P@1uR9A#k*ia5h zSA_e9!M^hV2OC-l*UR0H^HTJ_wMHZ`%FX8GUKp!fI)Yb}+r>~lQ`jmFOE3A4VZd{Z zl9dfsGOoGfx!+KOT4}=LE`}R=PwOAFj1u)gQ}O~#Pcq&jhlhg)ij|hbIO;xNxm98p z@iOdyII2!7EX>#wcOdH#g5CMVm3qpP)) zxVO}*UDx9!))BSU>>9h#9;v^#pk9XmmU5DdUlyzQ&$=7M|M_<(ulRMjqdM3A@>s<$ z2p50msV#m)tm6NLZO?jV_{;s#wWlV}O+s@^8*@5BO&)HWQ~J4c!>|2me0T9@(R{a- z_^!39QT3-+L|6SJ7vC>d@xKZef9^>vJ_g@?vM)yMzrdNgdc!~S+ml!O7{#xSRs4bt zdBw+Qcvr0A7lw2m#AY6Eh@IC0mP<8;*jk*=WUH}hFR<^eM$KQrSRYyQlUMr4qm{liT>8XL zr3<>(PNP`PHMZ5}&O=PBFP;18x}Vlh&-p02pRS{Rx;@Sy*geZ)63kGGeq!+lK8#U( zez^D^C$ab#{d7~T;&)h#+As5+yxPYo{=Qhn&kPqYbt*pGPuCs0pPquxI^Xr@{S?Yk zPLw))nbB7>sjp@nudhD87wtaS=&RVn^DHYe8+7k?PfWU>q{1s>6#hO!7Ucx^osay$QW-#5yhmG2=^hVeIpnq6L!Tx^=@dq8Hq{E>7X?Mq@{Yfaj=4hqwIb@Xn$Gc8S>Cvsu z>puHvrI&_FKjV~@o_e&>Kl;w7`yu25=`k8vlznnbI%SDvhn4I@fd$|5p zr?UPhuP$}Fk5ZRD;kvwy<2fQV{gd04Ke8?{dd0+})n#hHXqSZlN9t1d_EFm9j)O*B z#{K%Ix66;DezaMZ%cL~yM~c}iwjMAr_Ett?eDX>U;FN=yrC$;*z5B^2JqG8!cC^yp z_|~Yq&vR1h9;5UXM=O11xOAyg=_j`jA03+B!UHhZfz*ffzev5gLoEnExA5L<_4X>$ zt~7K|2kD?W$IwBoH10RNiIyH2)DY0cmioun25oNagbmn*)(xfwR418q_z4)% z8tSS2-idE#BYx1$N$QLMHtx*m@Lp{FL40c=zU}^oPK_0O*@~2F3jzyFvU+m`->&@1 zpvIMV9f>Qk-Dc3namuloay)DferaH{?)GTf z5Z@NidY3vWK&{)sTQ|}gd1}9(e7-Gw{WzTBM+9eG!aa3cG&N2>Hi^f>W^Ndp0Vfxm z7<~K2*5lCTjsF<5@hv{7w26VuJ;%dlVi=p?qG)XXOZc|0zIL%d7zP$_%fMViLzqY! z!q{VI2;4znC>Pf-Cb#x3~(hLmlI9V7( z^8%+q#?djz3S-djK1no)fx+R%qqf4je;YJd)CmKjNiGoP0JK^3m`}b0C+nLlZcf7w z`QyGUwGLbM)v_&nvWysirP$q^qwjt>>W2l`26(mHoi0aYI@UJ=c#S+T`QUp;7XA^fO#x zUF8&bb@h-3)jAS61KcWGM{U(C&zccsc`L6II2A_%>vap|A5Zy5Q~vXL{FPT;LV`t|Wep*9?8j)# zL|g<44Z+s(2qGaMdUP^r9lq3k7!@NPDz53t+SElqkllBNP4a8*&*0 z6DcN}nh5n(ofSpL6Mu<@5)}7Oc@GYq6!oE4-N-!#Gw=ew`qsEc%@T*b`(o>0=a3%f zVcquV0Y8$lrhW9UUt{U>=wJ6nLB@`yuZn#$a#ZeK3A`+7JG<{);9P2(78md?8LiG3 zQN4Ak75gmG5puENpxZv(vnky>V}#o5`7UcoFA7qO?s~W>wZ(en$g!54YToFY#Ndq4 z+H1H=1BlF=dEUIybTkt7yb^_D7CPYPDD=s9eukP}@N*RU^6sCZrc-~8Lf?A(XQ*lE z&rv9Q>u0Fxtv^Shhj;x9HT9n2LMZ}V4C&jOik)qr1_>M?faBGM?wjPX6f7grmm8c_ z=*=6!2K2L*S_l?2E;gTs1SA%_YZ{e=ir~=eL>wGN{FKLeD=$$+B%8b>9s&%(Eok0K zc~gHv-t&J#-ft){5bqX05NAZ$pa^NvG48$sG(B7y8wsBwZ=OA>qKM+7euWc|lJS>> zKOQraJ^dWEj(-ogoX*K(mf?#iqv=F5t%%6<+ZdT*BUbo45W6Nu@|cLFN0f2?iDtSe zBGY&9^XW{2*od7@6x$Xfc}&FKM!=m=tNiXnGkp+|>B<ePoJSJibBg*(5 zt~Q-2559~ctdDYDfUvN2PLHnT^v1I*-msWz&Sp{kl`XQjA8*SEpas>sJcj zL+>g0I$a;tuMgmSbRJWqNh-DTqr3nloA&T@Zw|ze@9IogSSv9BM+(aR$qES;WI#q5s*NhS129} zrv#xHrUaqS-bPI#DY3C1G73rrAC92J(c1IFtufovM(14p(Dv+$rqSDT%^4Bxc@;#- zvD>ppr&LlT?U%fgH@ewTn_{x(t#sgD{QoWXob@-bR~(#zqS*6}(<9i^iv~Y7dyYm;A}KLA z$!OU5e?5kok;kx^u{AT_F0H6n>KdmjAGav=SH{(cSML7z>rbXeeb7Up zG}IdURodg+aii)Qly-}|tFoTQH`&DW?PO%CnL{OgOeJj<#WXEG1;zZR3Y`FM zXbaE{ZY@N)btg=@la=lN3k)fkkhC%0$?iv>eI;;_-$yibGW5{)&HGjE^O7 zC343pjX0;vUfVOG514A=^J?R3de%18#^-^rzkyW^j#N>9E-Rd~EaI>8H+nwV-$i%L zO&R8}9c&&@cd$7;0sEiv63bv}X(F{Q&bO$YVfD1%g!2WY6^))l@W|R!bwH~A8X~Of zEvXvLdk~VIb~s$sID($`LbsJpwD%mkRZggF^BgL4Th^LA?eJ=<-RWtc?6y3IZ&%A* zJcn?<+U%(an%uV#z=t_N_jNe^y?QzB3z+5oV6YHCS$i{7qLSMDAP!>b0UmLyJu~XY z<=ZuRY?Hejqty~Sj$p2btgc9+vzW&iT^gOOGhokTgrw3Y zWV(ay^TzIUpM&p(!9`Xay3y+cOm-I9YnySxfOr!G0?OtT9HG9VPV0{7SO~2-_G>(d zIJgmuhW`cBGJ{B*S^;>~{1|Gr6^5wwIqr@}txr*0G_`i&%@3oN>!vxQF*&J(IBEB+5N%$^ zT_kNBWAlUVF(?nLcpfpEBTy)gFs26K6vC@w@#+hdeFTZf!|$jJb)qE71Y{88dv-o< zIEhxhqm$?n+?)uB&ITz3i3Z~iC)xcl5N3pA0fx5)54pz?M=V9PP#A#0U{2u6O`8Ifv!AO^9N4RNOLWsL@ zd5O0qc=MnMYnD-U%0z_XgHM>nJ6LZaGcXPNmeG5 zUNfsKQL4tqB@=sPgR%3WVhWEd%1=k~mSuX&2KuX!B$$_&RhBH>a}V7w94M{LyFlIH zWN}|3)N~#W1F*?2DuIMWuLV=9f3Y~%gag)>^ub08hcaVC&|8*TQ3&Fty88RRV+p$);)#MAMVh?{F*Z`KlUv*BDqTB<6CfQGfV7E2Kq&}e!c zo`Don-1eYIkwVTPbf&Q3IwcDz$3cyNFoDSYzptvh zr!$#M5N@AaCQ#p>2Y27XNH(hvYTY3Z-qIVfM6=`>cu{$9Xe_>nW-lu=`{%iY zbca0X$8_v+JH~XN%|amZXFSXCl;Nqyb0?nr@jQx0OPg7D2cQk>pF>|)>Hc|2clxIn zjp$FC$Q6t>R_cKr(&nLFGHouwJqbSAj2h|6&Dxr{u#sgHF*F0 za}guxVCsS$_0OtknVx4_6naws)DMv98H@E8ChzVVN0sZSf39S-v{V1o^GdbppTE4K z(%`iV)&ALl2H%2bFP;-ugj1|Mm8ZKq1bmBu<>Gm>f6Z6jQ!4 zZG3(uL^}l$VxZ-6na@)ZUiK;kgjc3_P>(%)^t5M@z3fC<}i2 z2Ks3RhC2CNcAsQUs~C$Hvf08j=} zCZqgM*F&aj%ru3r)KS0gE7SFDg|7deKn2=oVj$Jh^fDh! zuVyr5o%NCL_+UDk(~=FQGhb8*VKDQ~Dtd1$P1W6u$A#xUJpaP;1fGB6c^!|IzTWC~ z^fgx2psLajFQ{4ndori-T7=7EX)u1=4zK_I{CI9TBil~mxDNU8A%je}p}14R$B*CA z2uz(-KMXIV(0zFxZ>S*r=*NH-l>tTX(`dpM@w|&?6P{1;?85U6o*(gO>C-?^H0tK0>V3QGb0nLZ<7l7bZOu>_e=Tba#@#Nsi$D^fh;>ULtx@Ax5lplli*KFXj7>@%_F`nD- z+>U1@o`2$z7bFcmQsaDU8VE~nD3t_^ASWeQEd(#Zl2JOM!O8r= ztsmnADb7AtkV5Te2$I1*ke#rP!nb3HaB-Z$Jpg~h+$Q|hyL;kqoI4DEv1C+#DsguS z28SHSPwi8QyT_i=YV5kjfZk8~9BQ+yb3dF&zEw!ju%Su%+ff?n9K#{#2Vhn6lh!NI*ZZQ+QKNZrvwg+YD#cCpfqTy-?BwD$D)|lb0ery# z2z}{eCEELhTDy1DXkJ`+fa13J!0*x)r`GNTAHWT2em-K}<%>>Nqj_;_vH46U=Hp83 zmiWL=*B1A%f_{n*K&Q>mro+*OdpJ}GCSPf>>11~0M#^-_?9@RTlB&v8S7FQoA>V}S^=5`$QR>x6~bR1R5qgZQazoRRqxu&6d&baky$vWffTd|!)mtUEj zZl6f4l_A@c?}pwd&M{T%VY+su>Pw5qVY9x;5hJBNrZR5CSYmuPMwPr=c0(2I=wVW*;8+`OSBCO7%Bu{2=!<9Dqg-hK-UeWdaW^be#1lruz6VL3+j|PdqjW`K zw3bEEQBy)ouo5(IW54VMCYJ*km^`G#sxQq&ji?)p3Uj?bI7Vx5AfbEc33TP1KCoM8BVFj{%6KTtMC|DWQspFE<9H)tT zVUGd#Mj9(KFpM^&4#C`6*E9vjtK>P3Gey14uy8~jiyw!0oNSb;u{bBIRI|<^!Bj8Y zJVxjE&bYQl@YsxZu0;mM3fSRbfxOs$so9A?vp_{ArNNL^C{e}@%V*F-C12sh9>Uh=*K(yx~7F77WO%o z2~0z}3aaiJr^(G^m2*d zxz2vJ;IZ4Wyjy6;edM<|EL@j$Tw94B3Q{& zO6X8nS34r~>9GIGg#L2mDMFtZp+5>j=ru5x%YPaD1CPa3`B z7m!-;_$eN#vf|O_UX|8chiPcdv<_NzHdc`CL0U&9X&uu@>lkOddZD?_SlNmg4Al9P zOpd-n4Ji$Z=J6F`@X0ifPxw32JT~%org>1l&^(N*Lj+HfV^F?uO9)dw!jMo=K4{mI z;5pv`pRKoqF!h6C74@Tp9J~%9|E@9Q-qlFnU3-#q*KqiDbu0lSQ$ptBg()GI+O7lM z{%Xi1L^Cxc5id*)ISVgL4H+ia&d6&QKoRML_MZ$0Li?3i(Um3lyrHLPf3j)69=nS= z}Gr6+`X`#KpAVu17KROi=f)DNeX8KOsu+b|L)MJz$(nJ4<8{E4X9DI)yg00Q`G+tk z&Xi&6iSMCP@O?X8W{tp$fS32?!uV21+qn*Kg-2*BBoK`*bGKIK=TAwd zU9OnjC3ZnxW}F*`R`I`mT>5fVN1QVi>PZ(gnn!pBOwe3;O;FBpaL|*_IqyIoXgDY6 zxOClEv^jCkW3VH3#yLYS1`Kgdhy^bi&N+nr(u{L19*!3hG}AD0{qYsxm*K=MumWuQ zOPAQ?CY}Lfm(Q;X$}VXfv@N>``=Wg=5xFvKOI>J#hl;fE@LqF8$+gCcdoT|&*JAc` zKCHSGun1!gdl5}?l*v;W3*wWfLl6$cJC7g?lc&AVF>U$)v!|~!(a7&4ptZN+r4cJw z?#7O1Pp<*y7)hHa@G^-GAw5L1r%X0`3MJ+d5s^=GuyVW%#W>cqOqo69x1BxFXC|9H zP0UR+R;B<$(tkH(i*?SNgP1d=<~wsdwz#~CTKgzIcdE7bu#KIMnbkpeOvP4vm@UEm zGn+is+9UFvMF+*tjH~C27chG=*yhjAhbNX^YyyQjNZ707E@7{LNmR5gH80=1S(!)K z67wqdvZ+)eC33FFIf#@5ML8%?prtaQ%D+F~IX&l)P#+=|$0rm|OgO5}sj_`@s&k&u zvQ(zaa!{F5C1M~V36$G@PL-)-fH~C8W_pFjw>W_$KE1 zE1kt6=GqR~)dl7PWHB!CAapMVo_|PQt9!H~URwuQ)CFEE;~6mVm3~D~Ud!U38eZE| z`&;tbaL5o1uLT!mQ$~R~h}XU};-wQ_TRIIe#A`)U@uK0i5QWzsA?pC~S_MR17kKRt z$y`_dt0U&R>dG!LS3J*vF<0@Qf-+YN`~+hh)-adw&Tq+F=VL7EGIK3I6Wl<|^<6K# zbi!Qcr2vMQ>t3=cYMAS%3HT;K_S#^)5OYn1sQW+5T<{<4Q@pgJoD;YG1qfsN769YF0yM9 zb8RH+6fxJ43%kTzfV^SjAAFQ-d9Wj1Tf3x7ytamCz@KN#*nX8G{bUpFX39mg%EJnPxovaEPUi*@Q!`q!{NbcJZkEBv3MQN}a^m6yGvH%`f z*|b>QwysN9<#$q)x?LPKxUUn~w!6}ipq}GZrNt{e2wvL4zUibeE$l#CsM2XGs(bf= z*mNWS!Jg70R$kvR!L)_tcT$+G2fW~F9|OKM&l+;{Tp!>sd1kI()9$Zw4qWg=6%K-{ zj-+*|I(XBS?=rWAxDVP4?q<5&T=Wh`TpXT}c<6n!{Lni+W945Zj*K0W6;`Zz_U4LH z-t-WDJS-K9y!@>!UU~avr1Ey&S<0IOlL`*kgsCil8x4yIf4gRk^7c7QaKMz^B`KuE zgo(JcXnAX;*M7d3N7weA>s>a64)djZmxbZhkKq+eVmNarKya^)v`1}~oM{trs7&u3 zh!=P7hO`OfY_cJ3Vg!shblNO0_eK386oIQ-G6Xn@#6oRWIt~mzbrWmg#ptqgYuRgN zCawNujs&C->4O{m9Ryy9(i z!%`&! zA7x&!)hY~Rdtt&?=~alUVClexBxw^$CysR(K$esgU@2xzz!9%&a7yc9oYJ};)t>{8 z=4s=Wx^B@|_nZc=(k%&}V&%Y@HBpwu3T26~(qV2Zt5`r3+9b_YTFs zi#cV_0klln+=Uv-wr6Q4W7uG%z4xILAZ-AaKFPwuF;AuJM7(yNvLQ&-Ic4uJ;*|Yp zu1eXk1wko$E=*X#nRYU)3CGQ}0VvCa#ZauDK=V|D#kkeKm9S{dR0;nMUIP-ot+3c} z9i>o(g%hbdC;adgobZbPEwi+kts#6nVX>O=YcP(k!$kYI>Ha$ii^ncgDSIVe15!5l z_=i-TQ}*3kIA!&KR>r?94Q1O4i_00q1|#iNe?I}zwqxmR@n_pr3v4rF^BmFm4M-J;%H!K;8F@EzDnA5h znac0Y4NB!hj6JCqs617~V4Fd|j`r$?XJuE(9cf#NMewxoUDG%cQn?VhiEA7ouP{=P z#*xQ04%RFu*dG4c$8(pe4RSqR1G0Tvfie)Av`L^ea_wUxQgz-Sn+kb@^aQlrAd~5) zPDSl&Cs6VkPj{(3uDl;OT_X7}_na8Xe_x@~Fk7W~HC_YqcuETzX*ni+ZyxbRNgPaH!0`vCujH6172EW_8dYZG}$am6SpiI`v4^ zd4nui${S=QpydX6caEk(+6kSX8C!R$Jr3M?V(sxe3!QZrs}#Qtuiftg8A#PR#lM5y zk0zNT04-BI)2g9(d!bXz__|9ZFTLZ$NUjPUn@(DQ!K)s>U&_A^V1%V_csXFd&&a<^ zLiu--{ChLZ8w3{@&VOH&e^X#{q44kMmwPvBYoRXs3p4m&){h;*lVp>2Ca0)zbxJF@ zm=Nv0n?bZon+VHDJl&JA&a?;@V2@m%mZ*0R7w=>inGGrPHC;u!?2AS=oEvE zJw)_4izm>&NahTUEE{ApYKv3TbqpsUl$}%~ZlY6Gu!5&csaG>*p|epdVEV8=OP}@| z$Y8_~md!g3Sbn65K6D+p(n3ZziJ?0dNfeaGhLn!nMnMBWZja;j;4|!XK*z|#JfJ#r zYm56>j(ZpA`S&!Ho*&V@7$<|C?P>Y&wOwuBNFP4aPZmCb+qVbDsa^YijqRck@=E&( z*mK=+`_6~=aG4!frl{oWxP3`Gk=yrdp7C+DuhrI7dajiMlCs$^R~ZMa2oj0YX#dDpDUX~pGjm8^rpwPCub!-a9wt1_eq$y@(A6XsL> zS?@4vyGFhw3$tx_}rMYT**YxaDoKsS_e&=Xmw(9$s(JMFGD>^G>{i+S6DeH?Qu=fX#dPRSFfm+`K(hjDO4Kt%tPf zM)TU~K7kVVEu6~^hw24c6F4>B`a`*kOlVtjGl-uPlm&%}l3qSBN-&~3!+;(J1q-oBjvWL->ulZ2)) z`#>qjs-B}CibGF0gb;%2(CisYG1%U}3YQwjh%-H6daHXr4-Qj;?cridt9zr#Xv4#Z$m?J8|duKTRl}sIwc`>Q16P+B;{0IA_9oeWEvQA|%Zs^&)KC zbSu;H=P$r;SgjJb)1+2mxKdE3SwgR<*RmHv!rpL=q<*qVzFJVIImH3@Ew{L{XCA%erD5vjb3 z|J93{Z<2OC!ax5`0)1YqX!ZQYwBG=2iqk$3Vk8*t(b7ciIoNiOn zClPk<=(0pOFhiy5E89Aj2+3IMgGBfNYqtR;!Zm3KPL)2LiJ$foVQdP5M@XAUFtnEl z!_5c|m+qRmQVlMQGlf zmQVdTI4y%pghAVbQgRa3AZ3ZrviT26$qmp9I3>6K={P9qlL#Mt(q)P8eTC@3Y*N>wTO z-FtsfN?r;bi*x0$`a5vEQ> zaH{kUH2(k+ArhS(5@8hyhV~MnB?-adlI{F965%_nCjG7wVG*=4PWyp*!MV5-i7<6* zP(IGXxht98!*NT?A5tRx9a6d6-s}nVXDD;RW)h?@$0l6${eUylnH}ctk5^};yLUc*^YId1ofcaa zNk%|gQwXqaz7ivwY*Su_8SEgeq`?Y&jTFO?=?m+pip)YF`zkk4~cV>jShr!?|&WUvz+%aT&#A`m@4S27f6I*5AXvupC{fKcduHa$BIL%l7 zj`cobtlM-Fh~ZaeoKogfIPWDlK)$BT^pjC0nW)LWp`2SNhZ$Vc0ROp5Zh*hxK12|p zuQbSq%xCW^4e;&t%2#?os)E4@^ni>w#W_olvW=xM8#*)MU<5Zv_ch_ro7iD= z?e{^WGup ze;W}qK=>O(#Q@>M4!)FDI}y&aa!$VPoVNI?6{e(o>RNw_rgYj`-19fV}a-RXibEx)PIS! zIHRSzzlMu#x@>UhH&`Ikl&O;VALlW;UJa0gx3bqo?U~qD`~tQ3!2y`~QC2l~e+?Vk zbmXz@X_VeQBKwN`7A9`qYXjDJtXyWh8ZT{I<8D#8Ab6Q?d_!ClfXdh$5|GNTPEe`* zL}lkxJ}^V6av&<3<(*RNaY5&s0dhN@^Yx*D=17@arNFbLAw5HN};aa z$)WQDhE59@IwoM~fPkU8wxL|dlfvao(kH_^d^K$KjrNxDQX*Cf63C5JOfUKs?#z$7Kf<(j0YnN~$PelMhs zrVp)4mj+{pt2;68h zKYsNDu)e(=trrHwI)rFF7qm{!7dj<2jjDdcp3c$Eg|7^G7Cr3bqWw6QyW6wKiK4v& zOaN*>JxWFUs#k(gTYJMSxzp+HH+SOE5s!@u&@-OET50=gpJ30>V^0}s{Sus=$c=Rq zZiL4yzirPL)uo;h28%xQs*?lMd)t8x>W#<04n^{f%lblteFRSMNFygk!Y7oN; zdzel6&JOV!jEx!$udhd_q!Gy7S|>^@&+S=!9_DkSb~$)1!u#8_F6(S_X>pFGl7 z@j5<|yp!0N@0jN-YE4dDF*A8m$#h#!zO8(F3X=+zHO1pFzSEdKdeT2>#CP<+nvSLL z!86txk3@*m_u25oJAg{J(MC}ah?Y$Tr<6{X?o`! ztDqw|(@b*iag`gEk1U-W?}!sxt|%HTs}PMPA!4KR4pJwo>*Z@{q<@~F5;6DRtVO=z zGMnw$JkEOW!740}>%88!(r(qe3i+O-dOvl#TJQYlJFfQ!XbZHrakXCXpvBkXQ*H5u zXmP=lSTI}gOe&nAwD~@@?i0}Fw)h44lMEd-`?V#VR3G;>X`21+w$%^uZ}t&r_EEO@ z`T3JVy3q1Z#Hkdx;<=Maf$y;<%v-;63S3j%EehO;tTha9f)sdcxJrRPJkW=*;tmQlWh zl3TAxg-hl6gCmW1ao2rz$c)-OVXix9RuHz(y>$C9l`6A=gZmC%zUlWWKiXIR`>TBAzkKZXs{c@5{Way! z{k_VM_Lcws(^P+zA&KMOU?YTMhVTZPIXW&B?(K>sW zSLY5zSz<_aO0;``S0{#eQw(B=XsEUrs`U8Mkq#=KS`a(UVUQ*VsEh_W_7)=OKut9h zVV~*Iu$nYo(z`f6%M972#mPE*j+izPx=EH;JRxZ(4)9(Hzm%?$5aG&ZUAip-S6JYD zbTiJ{M#{&X9A{3lG}}*??+x@ininR6#P-f|BU=EQp4u?pC-iToxE zS`2#-P}nm=t5I}q#mXhjkF9bL@5Io2Fro`+4(bw`2g2K+`Qze~h~|OtE}&Te1UjeY z?6zpmIGJe9?h2Y;bqUS?wng)iypu@HC&RixO>>vf?B5p6mfVww=CRN&pm{I)QRj_* zJ)ksQ=(4tG@@ZN&b#F%&@7|Y+xf7DS=-#Um;C~wPEG&<~O(I0WP}iK; z46j4J4^;9T@p?mZJTuJ26CHKI`t@wsm6*3$?Xg;ZJh>aF3<-kDU%HA)EvAhpj>_F$ zeTP)}s-=rm!M3uKfJ#IVR6gq}D({^XRQ`IbW2(&QDk>=_1(oJsJ4U4*N?w=P7OMh* z8c6(13{{nezG5gT4UV3o)r3J@uXiuPPnPJ2fy-q>Xan8*+gmtO=d^mv9{YIMY|@Rd zM~d&IOGQw*@F`66VjbkdvmO1?ZBxbfqalA>j&}0X%>bP{E`VsH|qFoa={5e~wD59NZ8>$ZQLa4AwM@V#HeVr)m_= zq7@Wtt9WDBq#F&6*wW=tYx7oR((M5=+=y z3{H!LR8H$b=e$;@WnbksM}MiON!8XjJ_vSi^t#Uh9lnE6&1w0F3?i28E{o)}?1CcZ zwCsVl=Ctf}Sw2d?OiDU%jC>h>a~#F~4VNj&s{@(cE%v(o$6f3YU$ML3>!w?^U3vV) zK8b_wa&52dZn01OeB8A?$5-r+_jj|lCC6XvY8>*HYg^deVy|v)Q(Lbi-s_0-I)->1 zv8uV8rbcOCJz-s(%y}JxjX2mAEKpYBQGywwW)%o#@8Qm2rXTAZ=AqWkVa{vm941{U ztNwlYAD8C78JKA0Qt`88*Fh>}WxHbZ;(oEke4ucYJJfYCq+?8Wmbd?wmn}N)ycl7f z0h(Eecnq+i&E^Bk_8`63EPiM{YCj!n-Oz-i)gi9S^wqNmdaI`6ndKFJ)H~b&2VlZ> zfKy#leC@azVlF{rCAi*%D0y8U~ItF}Rs34me9=b=3w}ioSYo zq_=7|o^+mlZl0P2OJBBc2u9O3l@#YiJ6)ivmqttHYdToYP=v|(b{O8^rzO5}n>|j7 zhffNbCHJn>V?1VfN;hY6G;yKAE+DhLq0)n#2p!=_h_SpcG|pMZ7e37l-RfLvKz?Ff z(zb-!!-rk-^>ElwI3O$A`x=F3NJU5o4Yz4E6pyWL;|se(Kg)y<2j|1!XRl1=PdJrL z*_qzg2xCcNiK2NP;wB#27P!A$f}G;@wka~(0-I#uOz92|Css17Jt8YlTuC%>R_;UO zUFI*AT@bq8`5HQi*L+~4%T!omC4KP(9P1CwUldM}^Lv{jL-(7T3x{K(zgb4& zQw=x=EZ!-;O@;ANMAkU&2;SZtIN^x-=(0h$SYlI~nv{IWqeX_ElVxH)R*>slNhEW= zy_b+aY)=xu5I+@Pqp*bCGJp01f8tU2lVu>XR*{k=M$`j7@C1ibr zCoykc+8^{{Sv4gfq0%Beh?fmxH7|1c0P%K~n~2=x(hjwHy`(MWC4G=9C8ulqxHvmL zI=gZMzfv|U6H28$KIZUx^dOzMBHrYPs%;7}R#xKEeO7gZuwGvkDKzNG6G;O0y`9S; z9p8wb0nV{R_&U_y&j$vG3k0{`@q;)$-n1w;ul{fYWo#dyWMZtiAKyIG@jcJP&LffR z5FD&ek2mDiI|m94VN~Y7-t-?}x zI-OT%kEUNN)vj|6bjOxqa~9VA2D*FW4Z(N|OWMYAO{$@Mqmyc!s27#tID|?VD+{R~ z$yj^)?|R9idwkTEG@ZBq9(o;vS8f_zGX$^oYvS17c=ngz>enQ)zXVvnW&-_{C8;5c zt!dAKvB@?d*O@3q-3<%sdUtQJ7bSrX_qEfIR0C^g+XQ5t;t6)#r6izyH3@97S1*1v zO(#uUgpsJopmUri6hYCAkv@m{LKdSBBXLrV`{tyYNl-)|uE8L&hu%%bmGu4*OYue0 zPG`XK#eXs3{J)thIR@m@V4)7FL!rn@w#>!`2JDe+U8I#?>`I9X>0t>>N+X zN0B`w0%IPsQ1FMzT_-k3afCl2h4mq*g5c?C|L`LBzeQeD0;_}4a+uLv1MAKLUt1z% z1r;cVKO#4!_X zHF+BP*0KqQ>Ovh&uCSgXy-<>;wBk?U4l%5!ImGdy;0{|B<9ZQ&=9o|yc6iH(_0N>z zpM5L%Yf-&B6vO=B8ljQS9@wII~^)P zpf7@RCF6o)-a1Ut9DAWwAkkTrNSV<@BO{m9i-`=bYR0rA9s4H8G%Sgzm^uXk5ax_} zu~xgu5#N~gka^rh%X8R)ks3`9@)WO48t*viAKTPo`2{0lgg7xw*Ai>ya>OL%r<|az#Rl3tk>O2} z&YFs8xP_+Sqc1Wfcu<@^G_=uaHMnmyA1Szr=g~h?I*2vg1(mo?`C-Z#nkPzXfa2C0 zXo0K^33VvM9g1ur6$&DNFTT?=GNlmgky(%~&0wns(TKFq!?a)AYOXB|5x3~)889l> z7Nqhl-J^|b^{_^M(hD7jR%GnsC?Yd9^-`dO+QFGuMAEit^yW2Io<@viF*R+Wo@F^` zGc9QDVMsWfv`t)p*b&|IhSK4%qQHdR5TVF~AxS8T(K!Z4FQX~AuCnPs(bju`xp8pKh+3h>0BI2R?p(mjVj+vmN~o)a5O-#M?xDwRNgG`(Ui32MSIeJP@RqHW&%y&mgYl%EMXM}HvQGv=- zpy%5GNvXVqa9cF0AT1{V5Fb^QahBrR0NC0ApuesPvxi~nseYB-K3LcjV((Fg@kysE zE&LwUp${Y~Dg1oAl4si_36!Ky8CnZT3Jir3HDBRZNJ@c}6oDTi&|CO{HVZP&VYXp% z&V<7^n9};l7?S|$AWGOb%4Lnt%$DX1Apg?#(_!91$4nHwAuGZKZ+!8Yk`sI+Gs?wI zFv?EYZNU2Bkd)4zd15`;eCo-7#Y9UiLWz&jn$$jT87)PR-A zvAnu|Ok#46Lfl&fw%Fnxf$t^BgXHgL-2L#Ks(x?4Qez_-|9ez=9aC&Q^V}1wBhsp1 zC~`z6FSid)o)15HUPq|SkVorz7yDF6wjrm(kTmwQU;-x<2h7sfHl@4L4mPEyHiES!K_TFh;>gi<0$DvXu=J;Gr&QI z>h+FrVUyQ=+M)Uod*oZhRBjVHTj%aUK_Q2~5Wgrb+N;YoR>Uz`vR!y29i#BCO?`Ya zWC-VzR1}W_Mqre(h2)W3H8+4$Jj5w^tV}SHP8xn&a8B7Po%@N(DKug-PBE@d3~jI( zvQQ=ItR5r;!SmlG?VOkc7OBq{j*WGMMeTM+;ElMWDB8IC0-+(qSw!4X6b+LBR2baM zsnwy5yUijpg(3Vw*f4?@ObMtJe!sV$&1^tua9zA^;(lz7|6P zW#DJxOQ`8d;JpAomLi&>38Yy8fzs0kgd&=nU;|i_kA0ggmp%B>=0Xuo>j~sr1>|i% z5Q=DeoaKV;94r^uAlv4Gh^A7Oivm*U2l6d}(8XbE@?l+IxyFozX5lsg{_BHv?lvplqe6^`GBAT`mNIGnO45ZNygd&>$Lm*oekSF~> zD1z-0UlRxIkL6PC1L;X^L=o<5*Q7vpBEBGBMuNprp!?D_dmwu#Zk9HV0^Lb#Oc;D9 z?i@MpOv;l2-2>O0#<^rE%Zpfsi4~d#B-&EJRO<^3wuQu8%X+xd{*`aGv=+w5FUX}Z zRFL9?8dVmNk6ufIB?gV8b0=eof-|z9A%?3@65s z7;d&m1;g;i(FXyJXmie<0wY4_?OJ9KHiQArJunN?n(bc4M^djbp}J7ZK5;)9s^5WX zJ&lF5k3lfsTUKZ-(ks)SQ)_E=_=s1+*O4CP~VI z9|q2s1h0oyyV*+`*vM6w3MAF6MdOsZq3U{Rj#46e+|h?Do)CM);obcX9EK|PzHX7R z;wkh5(Nd#u>EG#-bLm82lLHHY`;;s zmcF<$2I!^ua!n&;tYF)lT>nBSsW)=yf8XRP7eZ(FLZ5P_FJq-8`NHl|!iKA1#`0Ux z6iRrcCLE_#)bO9saJ)2Q`7DILna@-rl=!hGCQ*~(ZB2NrCY&@IB_~>}c$2F|$Vt@1 zd<*rP<5Wfi!wavl-^6*$lG{f=aUxm&Yv|01$}1Gho2Uw)I@(%!zSd zY8$kWX*_jWg=Ys2OfXg;-R3mXZOF4igv~%{m}>K6By^y@EYL)O3h#*ZOmys0M8_WK zsk2CQ?1ezdVN;)Gr*)sA^>BUyZu?0^JC@*K{Nz*aiJyGRJ>#cz@YDW+Z9aXT@e>dI zuTO<%{KP|_a!mt2f#!^#c-TEk7~>}vCi9aF%lJtSXZ$3GGk%i8iJ#VRenReypX3EDv-oas`7k+ z63Y0A@e%l`j`&LEr#HF243=j61PKU1>d#N}eEei3A?fuRD+Kfab5YFFVzRQaIkLHy zOnX*OL9*y*E(|koDo8SKf>gKcBdIPiSzokw*)YtW0q7pb*$_McK84=>Q=HJS!TmTa zevUm}$1dsgGhy{>6zhm7J^kvrxD&1WF#Qy2V19^hdPdPgqF{^1$sNdiOt!Z(neU!1 z<{T7@Jed&sQ*mfs5&BDTkX;e_t%}fp&+9&u3w=KCCZW%!-Vpk|4p~|;AwRrZ_)vFv zXKB$#I%CB`?C=vUjf&8}gg!~=XV~$p4qbSaV-O;)+_>@`L;GPei*1-JoE z0v_8;uUcR%9<|B&-#*~&>@^rL9p9I8{t#Y_+UD$!bEXeigccVZIMoLn1`Vw(us_Z% z@FyUnF|J+C{=nb)fS<)KIc?4u_r!SyXCGMOA=^TnCbGjTyU;s`%Ls!xEKJlpL*T$m z^WLXAWSX;2z1YXNIz*_0IgXnu(SXL+Ee06Eq;Fxv2G+85ok!aYw0&f>M-M~%g26Ql zU_q~6gC1Q{57v2;y{th8DXmYcd6WGz{;0MZT&Y{QMXVNf4H#cj|HTbp_{^rycht`d zawspwIA}Fsbk2vJr2JnHkoiK(WD=jXZHMEHZH8mOs>5-kIvgWZ9gZ6dl;Iem>Tujx zz=vamsl#z&0UwSLrVhuAn&BAX>Try3bvQ=2IvgWhGaMsE9gY#B4#x;rhhv1R!!g3u z;TYkX;TSRMaEusrINm4^#|Tr0%Yiu_McgzNqE`C5mA}+1nE7T32KFZHy^Blb1$f?J}V@r(RYN>x@D!qGScJ&R9-g zu2~q+YdL5gK)IfX$W%45;A>e=L}-F9^aEE0#&v{|sf%Y*@P-l=>I-{P31iI1iYvHV z3EN4!8w)d*-;Ay*>xam-Ruhwl7$wCsns7J|Qp4}ogr7?}as9Ba;IB&lgH-5(tCX;C zgt0Nat{_vWg>DG?VVj(y8`c#}Q6P6xDAx_^3eHkO-zCM5#_+m=enM@cJNykkh9jG2 zDat1_XcqUp zWuy-P>yB2wR#6?+DM{K7Wkz_mK|Jp8o7rw8^B-@%hBq2{R2bYT@asO{A3Gh%oIc=J z*{gPp^he`zAMm~Ha`qpb&gHX`AB$&vz+bl0F8qO4 z_<&z+2kcJ_8wW=H&Vl1d?bzGCtuORttnU=Kj}Q2xP8)kKd>6^Y7{VATSo@KKB^mgZ zb~*bKqnZPwesem7&Glt`dZ)nSe8AUq+J0d^;D$~)O|n<*Bs!`or%2id7Ey#&(+TD=O?hc*qLQn~$lm`JM9o zG8Og()-GCn{pmYPh3(xgU;nxdQ(?2)7RSS(=4+G zww5*NN-zA_rn|cIzWa4Uzj*0kJuY}$5(^_jPV1t|Z7X_|yfvj{8`k1qy>`PDv{xX- zIVVix5X)C|xv%JHyy)yeMSoKgwCHaK@}h+!ktqAkDc&1W{#N*kT(&bS48<2b_B#4#cc5luf0^EAAuh{anrk8C;GGGNtgNv;s{#u$(ZG=QKNWJkq{>@-`k? zDrKA)@q-+4B@6MSOL0m_CPKu>DWwNPb;Ws8yq_N}DVm=^ySv!syIE4ver^jjR(=m! zOv*WEtk{j0ENRyfo@d1te1fS*D=98fQ_u{qAuSFi=$f9zpX@B@95s@*n~N7BL;H{{ z=}w-Ry$?E9sHHi7w347(2;J}P=bi^JF( zm7O*_ zD!qqm%oeI#K9B6E2(#}bFVI>hO222`W(AO3@ zf6|Pw6+=tF9n`&+^^?{sNyPOs_R_bQU_*?7D>#3&N}IozSpi-;5rv(5O-jf%7Sft7 zg}P;!8ia^>VInpctSCWAEyO0v94Onoz2tf@W=qLmlmE8#3fPVwNN99f_N5Qr0aIlb z7*Sf*tT4_E=$Vr&9%IGX$SzBI95~S~vx>o#yXD{mqb>U=>7aSdSyGCcY7$Z@%=0s6 zo)K!~5JU<^xKuIX0W3}Ju%DJC&F0zKjX);Ua!_R;hMNb@ACm{~zz(3DF%V4=xb-$l zs7V$Y)?wr4hhB>+Ng6CAH%H59(!zX+cJxyc8F->tmS`zU zgyS5PC{}2YoM{H0C>B{hNr@UEnG;HA6BDkw48(&?=Rq)GH{37mYRjKG=ESWXbK)DE z6Cb-m;Y8Seuwl|d+d&`wk(x-Ew)n!|>LC244#Gvha5hECKmotNu%|6RjgYX1Mvcgl zUV${yh|{%5)?S?EG(Dx*3A3cKwkhsMet}YyBE0Eui;z1_{W0!7no^ghYdc>nD>9UV zzt}ELF5hvE(SS`{=~6Pb2hrvn?9<9~gksoSBom7!bT~!GA`RHqoM#WofvGA*rKLgG zQzCz07@$qE)@JF9myda=%Uw;T9?If9#mZH=vVr@`Tq5=4GpB0SF|6 z3}j+UXjyXO~wvNn$zW^%atNSDBZ{|r%O?a%$AG% zp3OA6f^E1`B)G*Oz1-z=AG{z_hgno_>2kW2N;;_K25DNC)8)wNuBJ{yNh3Q+`sk|9 zaIB%m-rL5&)vmAUj3cQ-$lb*c{GcK!wBXbU0)y+Yq`?G>)gZKRmd~kIJH4<+>7jn5 zMEjCyfa3y}krNO&EHLoZ0f0vmLk0%72LQ)D*}%Xh0f0vm=LH5A1Aq&AOo4kXbG zn@N)Tk?+xfz}Oia2>AH`z$1y7WN_lCJ@YU!ktr7E7;5Mo!-P@Va}1@AY0fbWzXobV zfO8BpkVJcqA&(N#!H7MS;n!zrGdx$D;k{P{&hQYDXfy21GGtZ6u73=voga({0@oL^ zP!;^htN|3M(JWt-U5sTEU8$&NFU=)5PLUcmQU@zvi7-sA ztgZcWc-aP|D{(4qmxMhHaQcMxnUysU)b1IMo%D0Uq6U;D{YLe*E*MRL7ZjZ!gNhTBxq0y!FS4x7a8nEqtjLHt3zb)dH|%1FKS~wd>-N z+Ajx|eMzK=YQ)~Ak*J4`-f*#(x(l9w9{{alleQrZ=+vVPov>6kCoJ3xMPQ^Ci~B@E zV<~7Fle8^r04L?fBuh+^m2x-g#-)Wh2aN0O+p6D|mffRbK2feTPzzzML1!S~Rp8al zsv*I?ke@ssPX{CH-?+#b0n~0Bdr%KT;7|k(LJVP3zkS#-WRhmGuBvjSFXKhNj0Yc2 z#tlrWs~MN=yC1ymJ5WqVhcO-QFCEyVR^XKdyaL5SQ7kHeMu`!Qd9gqGuxpMU8{Oe3 zuqEMVoKbhVt_Dv^uhB}BBhD3eq4VwK2yj-ez+acO!c}=4eHN9Xim|RrisR}?x)ha% zIgzzO`tE74#oKm7=yL77M3$kVo6*VYJZb#^e(F;0{VQaQW2ICd#(Iu%6WhE?NPk5q zgJP0)65cU{cSycS=g#uY^^RW>e%^{;>_6Wt9eG+tz0g^N^93G48oS4eS{cTcg9^Go z!!$+cUb2ct;w?B-vML>N)0wknVoo#p=-8_uy*_tQo=R=3ugiyC==hdly`@zL|A=Jd z3Id{+zF67`h)ycTf=(y7EK))0<$sKXw5Ia9E?v3_Yv8^a36_tIs}ZW-p_!2uoYoy2wt7>zRIhS7=$q! zaG&9FHG|VLu#+t#UAhCwv*eI;>74PjDjqA&i_gWvIvQ(8lr_!~QiV5gm~>@~hQT^3 z`u+FRqHoU%R5YfWnxgB27yW&|ZWNv2E4oj>qP;jP;H&k-aY1W+LwAb)WTRT^`=Bqh zCwN7hqCW^;^ufN}sP!UW(SrjPeE|-T_-Z{ZF=(xC?oQErHmJ3J1RKEH*ZRNxv_*d! zyy#4&ZeMOH9SoA6!FYwiR_PIf8Ep(^o?;F)xKMRv(`&y4}Q}ma?i#BMB7WT!X zA4iLuvZXE9vx;%V00UQ3DzqLk%>?HWaMzO-lZoL{a^8cm9O!v1PU{|*RdPCZVVj;i z64w@8kEqNlON;UK8rt(BuR}mEd|hpA!CA7>ma)crYpTi(=!tl@hnKb_P@&R!NLbxi zWo;>Kx!72-nnhn@toRRmnPaT@8+*ClSW%6aq@CCfUD}f9xCki02SG87-_m}_RO~~> z?&W^iy==rrUgSIZzsT3x;vOV!T5s{{r4Im9-RLZmV5>!Gs1C4#(YPw-nClh;5IKHfGv~qus`g?+e%o5_5gw zNLMvgpNcWJ*h8x#XLzv(cat<81)>;61@{w>t2W&tEVP$aK`9V*lMTj7QV#&wLj&lV z1_vgNctU(QwyJU5FOj~zOC@Kb{YPF!96mcfgwJS)Fp#J~2`aWvCt*w5F`P1YS$9d} z77_axE8fBsjI?aftY)UL=P2yQO+O>d>u5pxN3o$v!&t;ud*_hAPYUKB-jXzWW3k%D|m?*YAj=@y?4qKK+6(! z`peDwz7mc=`%lL~5S)!)8h`E1o|Iqc_OhetkZ~owflh&vv-_F1=-}RQ=1p0hrDpYJ zY0k2f`XZJ)SBwQslWIUr);6Ij#uqFHmD&^=Y(3m(*dpD-IB-#z&EW2vaKt$b zM;nU6kZ5I%`)pgteD_GN1KM+o*CA!4OWqOK*b8067xF13 zqmai08Ho1Md8E&`kd6d_^G!YuZ9<}%Y-1Oct)~u`Z{BK4Kz(u6zbXP}r?%R|^UYhJ zI&YSmU5FO9KL8%Pqmwo? zSni(;8drE5DrbU7w8g%maxva$dp&wkgQd}iw~HGrrL<*A_%fowQgejfYI`(P&ITau zt#`Uo@YY~ib0a<)*!+M_DmGYNY-R!T2@Yox8Z4`Bq@T(g5kRL98!V4Bvw-OYfn}iv z%SSiTPbE!AXw!3p<*pm)r!t+Auz;#&_Ope~6gF7izmff{qkT{fmj5nfKd)|KKhN)A zKTpuvzXr=c&SpRN(th{`%k8t+PX+CDYG4zc@7WtoVdz|61Do@l#@?<#7;SiJsJt9+ zsUTQG9#m>9ujg+yJd&LteU2w4!W+vU=kaTJJT*j<$m3~-W-J$Y{EIxE zTBPY(9#0KoEdMi)UnQqULo{8&G6ldbC?p6pyFY zGnR+)_`BqIv|5v!(P$+#rm_5M_A#$Y&Y$An<3S+edj9sloEVMF&Vc5d=dClAKf@#c z%Ok1jn=sCStn*$qmfz0bo~PZabz_MW+=YmFq5%8&=KX_*veTeT@O^K=V*I_GhZ1MF zFTr<3!7MZp&3<_VI|Mo&LBt&e=ir4V#7jp~0qhXy00a@2aPT%~Kqu!elm{N@03{Cg4*8jB7C$tixWv$_?O1Bs-6i&6D!6#~tp^9DCt-!Io%-D6@Lz zJI>BGH#$a&jy z@w!XP>sH3=NbloymzLM9jMtIg$Lq|}q42tu@jBvtypCJje7tUDypH^Rylz!_-O6|! ziG93oRe9aYc)fP4hS#ksuUi?f*Q&g3WxQ@>yk5&sK7-e-jMuG<*K5Zryl!Q@?oxQY zcC5ndE``@!3a{6yyzWwX9YK(bj9KYa5P03C@VZOk^;#{js~?&j$(+|43eN-(f_r#cjg@KYV?Bg+9PT-^Z~M7P^jwf?Slz zfBAlSao(#4Ew~9G&l}5k!{#i`dxApp5b_UW`3EfIUJ9`ykGUPSgDY&75Y7zL3+wt(Yz5GRM+<%~WHQDtex3(Xmk~ zzZ5kIY?QhLQ5vIE4pwcjIkpQ%slG^ff<~#$B>{|53C|*5jZx}Z0CmkMRmvcLN28Px z1*&GAU`DBLi+z9{7^Ug~B9rqsGDU7sah6^=JPX3J)?x5bfeUzO3o*4lnPU`3S^YpK7&aaGD_`Sp)pEL#UkSWpiyc? zUoK{RMyVD|9)lXCCV&YPNfW>*bpdMEsZpx048)C6GuIxEQEJm)N#>tKqf`+x3}BS{ z;z=sU&nT6_3-7j3YLEu$Q${H=C7m)#oia-OuQf`|DundzzEP?lCF`P5>bqqmeoh&s zP8p^C=ZsQ+wk!PFfl(@cDpwynGfLg_Ph`~AC>4pkkKZV@0o#qJcLXy^J;`AL8l{|a z`cpj$g-BSf$JQ3t0tl2rs273OkDhPpIS541_=QcT|;cdN8p~4>3{#K&wKlLnq(5 znCEiq)s)LvK${SiRuw873wP|M?F2bUZ3~?*d-kKVRjVcZ> z@5fUCdeIZ@mFVyQl0Heb^b&6 zbx@t2?W@OroTEd=R zX;;}@;BAVDV(2x&YD+8QUtL3It*auWPq4nnoXCdR!rew3zut<@zykW(B8;yO5H~Nn zOzK0$*1KnxUO3vGQXK(GyXD#~eA++QfD{r7Rh0W3} zCcTN!#odVGE2+l6iE`K<6hY*IHDk9%3JVK;F z9~M>-ZK#W-A#0%?y$%c$!F%2Pv>mmr42=U6K=K6-qfoOXwBwlv8P7CJ*KnSx<~;M= zRm3w$D)m=L)B_lWBi7vuoI~4y7@bVU)!>`Uq`_1;e4);chYN7j#4SIH=-w6cuz=O%?# z35WqV$+S90rPXMKDs5;*9^Tr}%0Y!6Kdsj0cTTI!tfZX~ItKATb>!q*V?hbXnp=ku zMf%e2gH2b`02C=43v+z&3hrdDu2;@7w@o=;PIQz&tN zD)ufE%f;lb-XQ%;$#rtGUw zD@aO?a~!3mI**~mtj_;Hu}YnLc2?)bO8V>6^qYgF$DSz2W#etM!0U~#oPPf_Yz$%N zVB+5AM_=X7Y<#+Z;wQ|iXcs6y42Zo_^7%G)Mo<&Oj|sZJXqTi6+y1>J?so%3&;dR7 z^T@LS=+t=KPvKRF!|s(7oJ6DoY#|dhMX0hHgHe_0=M`DE~db@047~VX<`%f#W(@9- z8L3`xGgx}|@9Oyp+Zl9zLd0>grnwMbiUaq`o5(LV@}X67_;ANLKvXpjF3qKTmOzK! z?y{^)$9b&R-^*~v08CoZCbXVicyshcWrg|kiIzlm;mtwcg*OM??82K~l=?!J8kgSS zt{A7aS-JA&uYT^_&+xnQrmx?XHz8sz#vHowCY5fyNv5p7UW`H5BH=hhERI9qq96Ir z8;K%Eh#1iNvM^H~1-!+1G})D^L9yRYzl!7<~`NcR@`}q0|wafjE!8r zt?k|L-+qw%x96HaUAhqNn&^H22b}qemVNN?+gLCJ+o<{>#DR06X3O4!X!6~@hhH5K zn+1P?yRdCQnhR{;)r1_kdxTG*^F>Rod3;g1wTXsQeszvC9YYk|ComA+-T{PDfl}-( zP*=HUes>By`etQkN`JYJwG(u`BWAcEb)F`hKFrojRW~NA5VlraEy4a}~v-L49Z1BE>r8z5orwW@bsHXbm87 zx|lNtp3_GPN9hUzcy=EwwcM*R>>C(Q8N+UGdMupw!QlW-jr8&Lsvq|@+)AW54Wu!y zZpIxu(t7yIX4~SwqfH!~CAlwmehHJ}%sqq{*ndvuGdlt__c* zs(1I{=s5Z2pN-`YVyMqY>(S<-^$E32x3X{Cu>;RPx;JRwIRjgYFIOtOb~YD$`UZE` ziNws0EDx=HgLZ*3*cm?9=Ug+f%@>eKzOcV5VWefqc?wDx?O5hv#`0^?UP|}@)I$l+ zMED!Dr<3LUnI?u@?kFj0HQ}dg!k^KE_n@3;4=2m%PEE{D*r~&Ff-6r}Lt{BP<7DnS ze=fL6ap~TGus3M)CUPjqP#|BTQ0}(#=K`}5`VdPGx1B#1oF%(-AKr8;0u`6;tPS`U z&JeM|+~64N=hof8y|%-REL~jF#(O&wK#oMiIr|Le<$a8MDlx>?%ZVY_X$C)+?qirs z_W`#-ns7hw9Zqd-2sd`tvCP&!uzmIKw+waWju$1 z#T<`;9#uxqB+k+n0uuC;b+T z0p=FRl&BUzzUjr>x09pZ!U673+n@#DALe$Q+z>Oj>*TD4@Rb@zovbD{~hF(pN5Q6Uz0KOICGN zU(vDDzlpx`edXnw+UqNC zyA+O1l6K#&aP04|uk;l<(pN5KSPej5k$S+rPkVi3WPZo`%I8_CzB1r}Zs;pd@^q*4 z71{N;tZDq;)mQGF8(d%6SKMA-SivkSml(@W=_~ANu@3Z=7vBk> zuY_C{P+$2owuwUF@Y7dzVJU{Ds($*)JGUv)e}eSot%~&j{qz;wLYCy6RAjQBjnz+_ z!AgLQET&pFIV{N@HaWsEShU7U*QcXG)|eS-ReE!g$??UDu(86zI=;qznCU!S$>Seg z%_C8R2OQ+&oBtnoZv)>{k@b%!P1=UGkRk=Ef)t91mR$=0Z4F8TB!E~ovDq^qHm8T= zJToLjG~4?P&NdXgrGptb)ooA?sm`$S-d=>94X>D>;(GQK;}MP8v61>PExh7p_TT(* zJcTvzj-+_a=hxnUse!JN4p{+7+3sdzQX!|}CN)NbM4!H%$-JIy)H7VKr;1I&Ju=ef zp7s2&XFVO(f3==RU)S0*n|;tvd&c*y=iPIxXDV+`4%&08UXPnC?5921J?nX-pL$ZM ze>!^i&)>>$*Lh6;JoQ?ne`H=|Pr60>r-S;Z>>TTf_K#1mr-}`J<8=MQ>zVW~*YoX$ zNdNq~pY}xihu4#Mj`c+QC!p8kX10FX6YU>f&nGaJ^ycf4*bh8}&6}L^OMTZ!uG`GY zyxWx0ZkQs@XGc~=mO-zi(B&~fTF@hC*nXqbX7DDf4zujKO}VEVr%rc-Vb#2X-kFnI zy%=d0_Es&FNgTQIn!>J>7L$QJ!{Nd9V+g&1n6U8J8&6kux+aiyvy4KFVa*o~dlS@I zVQgF5cGn0Nr(DtQA@EF1y3}RU8{*Jf2N-O2@Cg9%i7MKfNRb2WZwwZp#m2@yl z^B)q;mw@I=beca<8=?6$*waAsjJ`Bq5<~MbI?a=TG^?G92sa>g4hO?Y{k9-AqFI2{ zlh2vdw-eaQ0alO~1ZN-N7wo2>@DGW?&Tbg5Ap4n8bfA8yV>XV~qxX7H_!&;&N}_N% zQMiaGJfGd->q+4ucsDSGJ2-_=WiJXZJspL|6NPU8g>TR)JnDf6h5t5)Q~2_}6ux0V z3Ww(i3V(P%ffH$NqVRJ73tJz=8@L#jI=1n8PUO7W`^Ou6 z7fUrq4cq_|BBO^j*bWE*f^B|{N2kzYMxCn zb3EP*OzJN=sZnu+)CV{-yH7{zY$Ejnka~em>X&OGq+Vtjh|~)PB=wfrg4B0%FtMc0 z;mkahGxIrAoZ_7dyo;N&K{7|*0U$|3(L-()Tp=@sTZD()O}_3EF6c`E)q*b zJY&d>seQRRR#s2Y<;Dn-rPiOu$ZA_7ysOxEKaDW>OG1RiO6zl>x|eGB)4e3ES#N3W zoTbwq(s}?k|0f9Ahs}S+*}Rz8{Dvr-Z(rJz_D8|U1GD*uoXt`3P|D-AK=RwRRLYif zmfBnHjZj~X=hUa$28hBjGi9vQ&eW+tm_B z>vEg1(Jr0V5TXOn+S(tjf5T~ALA0g`QWGi9+|`rT33xLwtr@2^D(*w;bClm@D3G;PHGAW5G3B#XUa4<>&EILXG3d;%4 zCE~(WjDWa|J7MmMeY-yijP-Go9 z=*1R9i>1XDbN49*rOdHFFE%P#>~E)HkX+cOSdX@Su{_eY--}}BQN!{rbSW=_!OPQO zLW$mA+OvhvM+@H(DZF&x!adr#BwBbvq_BJ7!aZ7hWwh|O-}BZ2?S%ssMsFdqJ>pY4 z%;YWRu+Y`SDG^_<|4ZLF(_{x2FtCrLLSK9+FG7{Cyky`quoQ{rX%>6I5reCN% zvD}F(u1e%M_1+1@0Mm&9JQjI6!ts~Ni&BofSRpS;MX)(}QCj_?5%QvRdC^FDQAYiu zaq^<^i(|ROwyS6RU{fwgkqaivjx?o7E=a8}NS6yT!T9=u335SpY-#D1RD*O& z8c~t>5Yilch~7gV;@U5-sl~v_n;IeS|L+PES!g0jRrZ!pp!u2<*{;q_m6w~9xMF#R zMVW3ck!Pf^I_QJoN$J)qgLLZ>gFNGAc>Pt#Gj3A?FkLsNYwasS(}f?lfF}=Ae`=9u znAN%I>)i9#tz%PlI7~&CP5`mPEg{S_Q$!^>1Ma;GFQEa z6^)yH?SXso=2V)_F~qfU`d2HmU|y$YMA2T+Vjf|S^QF#_zB1#DucKV#r(8#s znTpvy^HFnoh4tqIgcBS0%_&3p^#{qfpdl_Zv{S`pQHF$ucNRQk&~rgu)Nf27FSyZ-hYd(}^QSM;Cvo8EZ|SE-^O zT#K}v=y2)aQuH}uuPX^4P5VrQuXmZ%{Kx1{R7^40K5{|*r27PMPb2hD>^~6^v9IK2 z-jnc3%zuojSQf%R7WZY`^ELYZ_74oTFz%I-#}bR)m?191KxNVHS<)YJalM;CP1c&x zn>;7y^&$^7`LXhTn*1L;XLure0ur$_fQKjfLkz=EJ!?eS-u2AUOBZW}K)F>1A2!6^ zUqNK#VKF%+ADp34@?q)rdQQnYK}j9}xi>NE-0bn=M9hcTl~@SY*3^oNQlQv|bqexp zEPnga_u8#S5keB$e=p-r?v3{2`9vPH^_3iIG#|XH4n7N~-{`d)7>ud5EyZ-!1&2`A zy1Nk*d&5lpwb!Ir>pn(aT>NGAe}T3tCfUCbZ!l>uS2oa-I>T(+BKsHM(H2(rDCW1` zG;IB~z4LQZlWcxV+GUufY_*^AeW`G`3Wtk|uzxX0F-cv^ms{i;By-pl%Ct4QW&cfp zBb!wJ&G>c$wkDzH-p(ZoeBdgibySx@vSX`L*+kE3q1o1GoTSb-4{MyIblB~_4yxgp z*QVFtuZU39!oNHIA#et$Tb!sh5tLuq<#eX zY&C-tO}1U0q|(kqqV397SkC7g}FS@wc3G;*jBFe=EL~%1hjVG!|O@M;XR0qeUKEwDGM8;G{n$8l|dZxrxX zH_>l-Q#StAZNV+6?P6?%lGUG@Ltn~ks7~ZfK!ABX0k5#EQkVF5S?l1?20qNjudEJv zLmu9%=6963cuGOX;WuImjIt*{^Roi9DRfyx^4Os#?=6rNX632()`s&gIHX7W>CgIdNqo?XvG_#!n= zRO1r$*m*s^-#~$xjC!nfMzjWX4b|&XT}G|0X4@_uB5(V!U9M0YFY`W-RCvn~+7orp zvY)W}`MuFianUXPG2d7AuM zEZFA+4LJ-`e8-?pS^zX6*c$Ulp0B$JuMJ!Rg_5ZpP@yEPTvYBAigE~NN{H_Ay5Tir z_YMR1LE?>2S8$2fP7)6cN`Nh>1g@g73H?~7qeL}ifKjYcdHKK z?gIm9XgGB;35pD5Zf?bJG8i<;ez(A5QX?+_9Z}aW;qRGb%1;^AXv$BOlcaW7 z7r6$hso^P|w*z^L$q4Q=VIfK1N;C2(VX^z4Tb|-g1i`T#N~)h+o?0oFr`4B_kjvBM z@{w|RMt%7>xqN(1x^EEfoJKA~sGX~f)8HwMYahJhkW8qCu-9(^2>x=ccMZ5A5T+`35NNgdj8cc9w$K z8DHz6@8GM1ZyQXpWd!O9(q)IJ7gLX+!C3D~ zvo!*(LR?YdFg4-E9hZf5kOs3@=*PA#x${!IX1D|4j$L_Ps*JmDT$cKI>GHe`dEPjA z-uU`?6Xbc>mARHEb#}srNH_8P1=n)Z0U+0b6D)g!P`Qa#kVsZ}of#2c^9HipuaZJ|`_rWPdNr!P*~ zCN*VX-EG^|kY}!Itfw1snr)5OOP`ODn$xp(;+BuBt?(b@O7|wBpUA}wU9GX<7$NNSEr!Wa8;QN{>kq!HFxf%o$358 zh)elAyz1Noh*DRcRq8+=BcMOL0UGm|`YY%q`5o95l^WsjvR`SI+QV`YPUn0gGAu!c z%+?m?_r}Z*lbd~}`m*ogR)U-LGFvmZCpW!>MmB7#&kyI6rF&Bq04VwdZ;-Vi;DubN z$K5qsd}z9N3af&%4A}6ZYYo81n{urIP4QjaP?i?DAg3%1*DHmFB0Re{C8z9^FFvR2 zdv6?qtLJwpt;&Zip788a4&zp8Tr#e-4m+IO;=LSq#<5|Un9Oh>h|ZE#<3=RwgSZ|G z@qkma$_{3g9i1Db<765ct)MN)7#DmMQclS~7`zDYFu%(~@ZI?_Izl>;w``DdQaKWA z0+}E?uMg+hcCpG|5}9xW@)J&`0~(q3gKmOMPGlf5g(GBw@#PaGe}7LheXo(}0Gxpa zB9r5Rvmn#=5i*4ZnT~QY{rwlQWEysu2y_o(sn`!M%fsoLt`*lH1){pF%lgMoW0F=Q39gS?uo^xJ;elk^g_TS?tz%Mh3V}tl7!~JB;k@cx z%oS3uVMV-hw>cDNJ0|5C@jSjx^NivXy_LRYvSu}y1hGIX)`*zXBdlcK(=f5?OkEq8<1C; z)kgL(tfQOuD+|_}74t@fYX=Ku{+w6{Zb?V!eNhLr!I>M)IF6dx$`bL;h!?FGSDo!c z>18Hv#Y*Kxj+y|mL(V@steGS1gaewt=Abuj67E9l0@|aASWRZoOP#|p--C2Q{%h(& zClJRL8qQRu;)pC9Dzjk(z#=5JHxQ^mlZlU#B1AIL+n~wBr;!pP6X}jt-PT6myf<;5 zrbWRb(U$<;`X`J!=6dUtf_D|>=>CIEY$i7)Z52EUU!jA$(eWvwV{JN7Ou=oPUp==>5Yv3LZ( zyRfu6LifE8NQGB|h5Zer2#$~+=1QMP%@Y`eCQ9qx?&3OWBfg|{gce_h^Sqi$SYAX( z@s$S@RDm?dkyQBAK+U!bQXdzH8DCvPON3#K({a3BG?C}S#a5q#jTaFfBjr~G9%~8U z44Jn|=08Hy&IX%y9Oh7XeyGc$cql;-N72Y^+oGq$S`K1~ZT0^gwUh1ONKe~A8xan+ zgN^tm%0EDp^=WJe?Z_9g9S|v1KT(ow2e)pB+72jNU)urYB-_CqPe5|`skCj210xq# z06p{yb@>L$;XmfPkgNv>0Yh{f%nlv+5nPM;70bNDhNj>KJmT#3ceJWiS~{QQ>r-M4 z{s7h>v(HK20kFf3T3^O8h29y|C!xuR&)l` z?<_)Rul1ov2z6fXTV&4{IExh|wMPS2=w|T%f=;wG!mam07@Mdzkz3U7E(|u3#OK)M ziN41CUq@GN`S)4t=p`C_4CV!~sP1Ph1e2gy*#jykr7#v`sC-GSt|4sXH8fUWFE|hO zf;BY6La8cevF$YAg)$_azP&3G^SpT%$8VDJgZ z0fPa(;06PN5Nzech9tutWQEzl|27ptqsVMT#)8F~v4F}6V*wQen^1n04{$OTP)#O3 z0YN8YX;0bpOjH1=Fc#2z7z?N@!V�VkkI>$}~d(uZJ57cs;^Uz@cHejzJwU6!4Wk zrwnrow5g(%2F(hwT!0IuJ19bM%oW zFrWDM1m=^pnjJu3`e~vfIX4Vq(351_#SN&OC1Zr(pUxlziLC|@g5~R@LXffzECg?R zw673+g1UVZ7lL8m3~tbgNXfV~yt(U*V)%;5A&Ap{1{{7jH#CL5 zhN$d1qvZGlYS(+Vhk*P&Nx6?WZq#{akaD91PN$P{*Mf)oNI54GNg&Y6GvP`zSt0=u z(=_;UHLWOhK^_(1Bw~dK7vh|)!m_?05TRLc3mOD*$z`JumkB}~%AAA6Gg(n}@kv7$ z;xkF<5}HiNqMVf1l6=y{<}R{llL+zS00i8IF)C>+sN6K3)g+5w=Jl|)lDLsIo`kja zu+`rNDvuFTO)WMGI6;~lVQocLk{HT%qS9kXla4R<7UFsv*jPDmh|LyZ(N{#yHH55U zUfsEl_}Tsz0YahlMOWr_(!oF!Um8iQGnm*U@;a<#@nFl$U-yz)| zDqP*B$55p63$DV^BUn>Y;S>%B^UqwxPjGeZo^mO?CTP|4B9tA|>68$m3uV>lvIe}c_z1rT33W6*a6*2V= zA|oS56Seo=7piqpiJ0f~TqIG$wQv{a1Xx z;e!ri@^i|itLWCcwfM>Rgty{>Cg^5-pQ+zzZ1dG)M6i`Y<`a571>5o%RZwsqDfJrT zYl7`L>_4C}n209CPDGR)!YG6dd8|0;mzX;Gla03v5Tb5D4}~ump_|{!C8VyyM^S_h z&sm=2BtQmx7R4~dLNR8?9qp76`}3FFe#l!87!dmd3g`~vs883Kt8Ncb3c*7olt7 zokdnj7AS3yxiF%TQkcul4A#0!AhSeAl4(T9T=J!$F&feCFCq7WZ$Mj^M(BlF0_~ol z1ot9Q_;Q34X&uQg=$*2kbONQ5Nc2212n)$!?oL6)g@uI5DOQc=2S=%dV97mr?pf(lQE6pFk|z9$Wi6W&k2~O_S35IE*dey zxr&>R-|jhNqv9@l z-ISIo`t~ccu{FCG>YQMWxmG?tdt}p5%Zm79MlvX%+AA4iPi4s5v}O!mh?!Su&-&25w>o!O zHkOt49o}qk4OMFwMg+R1cZV;87w7kUQJv@#Jfj@8HA0l2DdISDXctT#F#O5G5*? zP!Kr;z|EFJH<32cGaYm+#Q@^$j?HM9oA2xSbP1_(p8eD6A=XU+c$h3ThK51bSovlr z{i3b$mJEgOJ%X|3WOdZ-@?Ju&hI#O@SznF*<-km>E!+xZ9QILWAaiGOvo}#B7?hn# z09n@|s~D1E$3x+k3j}25=I@J+5xtrx$4}#ziRr@Z0Z{JvGn6EvkNoN z?vM#@PzA)}V2H`oh6ESB4o!lW4}ke_%{C5|uBI^SwC|1`_`(wQ>OS5`ZSOs@e{S8B zZ}AS=I2vOmtEnu&cd5}7?GQ@=JjbQ$48BV|#wtSw;w#?zF1fK#fRpX2v)j#%ypIXc z*Uetsgmfi=$Bg3p`4r3aO$c^FU?W0TVQGFW^%HHocx+N~#ZatZ zxugnWA)Z#OE)8SD9yfHLNBlpadFV#7oWBPi%DyWVxYf=zLuWXEof7h+WcU`##scQT zr8i)E7>Rai7kfkJokK-Tp!nVb}RX+cs zE#;kiZM!PuWR92(;itg>~hT_s8T{)Z4~Eu_&-LCMQ7ko_{x3 zya?Wqi0Xq_vdVGHwTQp76Nz2Yj}I6rBv1Yx-*!-i93iPGQS99C;S{)W$zRN>t^y_C zQH2H;bpydR`8qmT=Ipv?OXuElhmNJarFxd~A$-9w3FNma+wF(%o1`?kvYH9KsSm;7 z1Se_|e1qpm-|ESflEk=GI=9h zNV?&*MIL9q_q>DN;2hKzihm;mgyC$2$jdOIJ2zv|b2+(Z3PXN21r`44IM5ubze2qF6)pn^V ztK|Gw?args@$jRdxk9S>#Gn+Il|hJ|-|E~$Ai$<_{=pSlMg3%DJ_ccpz_h~FQl3-6P55CT!yGVZ6@adj+X?yk~}*ScOe`AvkYO0d=@fqyb4Mv`E8`pq9=-|_YfbsksOiW*+b`c_9)KA z&fE7ujym7Qw`nKb+)T<2_;8)F9kVwrkC$4F!;YaZO;P{{Ll2^`^y8yO0Pw+|3#{7) zX$@{$7N1+wzB~`LOrc&qj)3)2i&@#DK0A)OLrH==FjS%WX45?Q1e>-!Ql8zk?65oxHV~_lEKOAmzC@`h zEGPR8k=L0fF%(A%oO4p({nN`)EdYN|$&qSQ3M}x&D@ZdGD|e^BB`_uT?zClz)feOz zq*kXX6LGF@<7BwMQV6hk1Q94lJi_v`C^(1_vWU?02Bv2O6KL{*x%)4eN;M`|$D_s- z+(TCx#A=Ap*xw14!(Hf9+t{YFsuD+n`c^3pBVNT0Ap*W0xCYJ8lfwvyRvLpR1v;bG zJC*A#_Up|)GiHE*^&O)_zTOf|O~7SHtkDOO;g$1m(el;2QQ{~N?qm^f0a&TV6qSK@B|3S#t&|u3Cc{i;;f+X zmR7>Wh!C=A2wmO-p_%L^0ctQ1!o^YM6(3agkJ4EHyMbBH3Ru2HTrgHmAq(IV$1fQ> zs$DX6kS-Z}qOmN{KrU)Tk2{Mj@Ir%rl2eyNF6N*Y`NUzRjH8Vql`m2VOJ?19lRk* zlX+$<<>!Y7FOcu+)$j-IIeo*6wT2&`(7WMIt)%@!>*I}ejW~oO`_oaQcd)=zU1?UD zL&M;Au!iVDfg*VeNFSB`9Q%D@hn?C)Tla$WS=b0lr2=BURLxID&UvL#gXo;s$5#J4 zRG`o(*R7@g5`cnadgON5ZMnQNtaWstg8UAy$1cqXb%wFEjyv=q6*n6QA0S1bW!~7i zjw;|>gk4nv3yRKi(8)exOZz5jhwmlquI6&r0Y88MY#P?-F<>>IVr!Owh|<`ah5Fq} zxVRPGz!`SlhgSau+-jOLzm(m95C)T~JYuL*IgHn^$OSEg0byWl2^FdZX4}4E84CI_T*}Aw4;ra2n*yM$@-Z|~ z+59ps=7K1Lt=7Sck+x8+@=dGEVPpB?m~&Co1Ff8GsAc-71eRE>+$3;-YOz>978UVo z7PFa?pMQdCXTdor)6N(dYOTF(J>=EK_ffv{b_lJlO+iyJdCt)(RGHJ04xh-I=nNh) z?qz1&IRc4m(D)VJsLCGeti9r3FwV2C)y}i7rSq)nRFyEsQPvXOlal(G{84wq>t8i* zpx18f*sSe&j+JteFY4cLc2wb=fwelFQhkUFJi?t5vSgyXNf?f~sg@%N*u2-&S?1P6 z4xLT7ocCF9x{$5DWQFGpP9hP@4=gL`FsmjiUBS^vg4*H6sb#(YL}jt^mS`b0n>CU9 zHvx_Be30Zyq%OtL*N4pE#NZC33@l_heoP(D(Sx1(87j|lQ_feb&EixgpF8M8r*bha z*X1qJW)G`qX)&8CAfOGIh1xsrG@`wiN3+XCj^H%ZAf1HW`znsAI2K9$dMSSQFJ9>k zHGC`}SpR}fQB-QZ;M>9SeXGpMaWs>GG5cwza$KxlMMtn_p{ZabqM>ghKX$W7ka|x|0+S*0VV;tVFaCTfSsAw#Lh31xow5;wim#!e1WahIKH{%vGGeJal= ztPv;77ohMWv_?Tmua$+7p!U`Vzu)GnKtc;Vp$QrGBycQ%IO{ zmXXhI`aBNHn+k9~%^K{o%OOGxbo?*m5?LlSik=8vyXpk(9##*VFZ|vLU>~fEL+7M~ zGI*EK2%Y%Q_b_>-uusPmoUwqzo`g#Y9kBKOMB%c5OM)yvj|spNto{&=dnjCdsAd_8 zkf|}N%{xIEiCqYsvfbyqyC;VNS$Q*1z{7fqh ze)qnpB#M*ukkd~hatJvA64uZsx=5plDpR~Iq)hFAGQ|l?BfE;o%ttnqsfS3JN{pyP zSpy@v7#*;T3V=`bC|MXBse~W zJ$)%gTQ;kF2`>YSD`;^?2XNJL3xNM;3@zjWZ|2N+byaajOdnjs`PE<0FW1Q)MC zxqqo|1VF4lgz<*MMsdkqZv2evg#_J;l)^>{BtBaYBwEpLJ&-sgD&z!N1|(8}#K;&V z%7Dc3C=&Z3NGt#nZr^Cs`u7eD#hArA#ReyA3NGr8B#&OgNmAG^HoxnI4Uyy#fYC`p z*jyCFCO?ABO~7U$$HqSZHu1f%5wtN1Z2t0Roi?>A5qsXWp3c9*mUVXu`|pdfcCTHD zNdAE9xxlQ|F@7)H$2#r1%4<^J)N?hR-Z~3h*hxrwpH4 z@c9uwGw@l0PfVQ*Khdho`qX*DY3d|j%-~v4=ZgUF3O;Y)^A~)!;jbz8T;#(S#X75m1^a~5s_VVEV-$H=mJ7+DvDJT_dS8od@=s-g6$1>+hsT}ZFxiGFLjZlGGCS2m>TwYb^EPxPrp zyRkv9WnFA7muatF^X={4Bzm_O<=~*ToxO)bd|E=s@KQrjDpr`2A#Yfv-8Ige z?52|UQOOXNYTQtHhC&mxLUL>&a9bf7#uiU78XC$hZy0J}h<$B$r9Z{(BZ6O5tX5lK zO8)W1F*aDV`VutyeB!Eo*fX@ggMCytYsVuHge?;$>`2Tx4!%39fABvb@PIPwD>U@A zk{{?$TcS|k>J4?h5y9Q~A5LDxo|sKG&9U_7eEP!&Ivb1aWlYxJgkbrY4khGU30?ZOOqjQJ_mcL^U~P?5W9akk3Ag= z=e^(b4~LGN5k7w#5YDU5#KQRka;-mn$XSN)Su-GkeY1KuVjMUB zp1hKX5$B&r>j6Fx!gp8km&t$7U#^IKIh((HmzTuoBG$FVzIXHY-^ac$ioWL&6Q@3j z<$)HP5W@$$aUz0A+8Pn3G{B~RMvFpkD&MgtO=J1JeDwnJEn>h{A`of4V+7)f%8rq; zBct9iPKFVeVksaxDZT6ypcFbH#_4HF!?=PdrI9Y@un_JPsr|(}jm?TYc1J2U%bY28 zM0cbiZ&4iSEi{rNGsXYMnQ+|`?ZIQEktUr))Q&drNpT6hf%4eiBG_PX8%>weiM_9u zFoKMq4v%567PX`6C1YkyN~SY4xr>5!)YhaM5V!-| zZ_cAg%=D!sck~Y0k;`7e9S-Qnkv)QTjD?g8jjF|k1r)U-jpkeLklLD4246yL&G!Z# zkperMMTjAh#;zxH+qBRfO%=mF)Gk6rB;bNHQeXS|;%gEFqOraPgWPS_M_)zjwLn>2 zg%9Ld>#l@2kh&b!Ii+-_t*-IVuneI}i7nLp zLz&5YwZsyvbqZdP5VZPVz?0%iPj-zYk|nje=(=D7Vh>?l1(N7#d=M-_;cZ^UPYN=G zusGwfG(()8;6Jbiz%Q7qD6SAf*96z%QR-UyfYWP}x_)&3B~sTN);c<}Bz677J4EWb z%V$Rc%+`j``*;{c51a7-K!(s;t*#78!EdOjhR}-=TkgGouU*f8FMY#S-M(!1yW)m!r1GoYMe3$^#qHckrPw@L-an}K;AU>5VWV45`d$z2 z+}QomvP^#6d;9W>g7+g)yY3y}mQogCiZH+G-E%<&ZgsDYmrg8NVNs{!iucy)^TfA0 zegBF$%odG3+30GD8Vf%1Bq1=&3$`c|4bzw?S$1W}#fUIw3AOW~@VhP=3KfD^9F}71 zYpK!+kM+^bpoX;$RwZ!B9IsnCafA0d>BIu>1nEST)!#w1avN)}slpvYjd86GKH@P% zFs2+CBr<|q@h1;IlJU)7&<`5({0E#J8%ETgu=$49-V-;8fS8bu95kE}BkaUg{LN(k z2FVc6qG0Wb+q@Ulp18xCR(s+uueJ7s&zn?x;vR2&?TLlHo8-GKp*N|bcs;*#Vu^R0 zbixaNg%kJs?DUQVLj)s|8UiC~&Jw*jBwlzj@{?>raBFB`M4#6MeQ3kLTK6XY5`Bt^ zKGTUlKlD_UV++$C#v9d*aBIAA=2i!n(i%uUeeshIoOt6SG!j2!s1#>?eS`!uji{9F zF>_D}fB|TNP7>(EUvWYuAU#4Ti3lYTp(G-dM1+!vP!b4r8xiUaQCoymo+ZHCJD!Mz zGWYtXiT4q5iJvh9i)~#D!T6*p2*wQ&A7GhfeXTf+ufu2&1|z%6(qO?+zDsS5kXBa^ zfn87{U1{~M5fF2-Yb5xz-Zf5kjmP3^P+tAK;UaomWR+B4X^5Lz?ZdDjDIBs)o#8c4 zE%cdN9OGa^qk2(?^>xG(LMC92;-M78j$%Gwip7w*#WjH|){x%P!BpfY|FI6Wcmg6w z!9DLX;FZGh8qe{_;5elNKSC4OVwa@}?@RP}>lAGe?mdkGq$5z3$WoweWxpn=Sv*06Nu(N|Y{(BUMkN)sIR@Vl zPGcUW@Ja|gAeV?TAMe96*BK&Swt@^La1B-OpN!@Hjeq6aMij(T%abS()HgLqxLc9VvO-C|Q4l;%DKj3U1yL+Xm-Ob?aGgaHn_U_9)C`Z#cm z*WOR)`+kCv9Bj1x>VxPXtW8pEyNZ>-I~gEs6N{JrF&eQeokz2_$z>@es_Qd3{{YLH z1NFbi0%t;oGbql3n%Ay!jgs;v`LT=B^N3WI)nWg;^%nuvYz2E@&(`{jR{X^glzm?F zD|7^9Umf<>eu=8;5O&}(e7~~{f9x&R>F{IiQvOcvzZF@B)O*cJAWxn8x@ulcQ55$s zo24Y#58>d+`EX3!MMqBHO{vThM^2mv=OXCZyv!m74_tDMP1*f9f(|A(17}1j4Z_#C z1IWRZ)VD$Lb%Fg7Q2{M-`Krh0s*opJ1?-SB{gSnuSopI|L;VdNIWt4@zCYW=Mw57{oYXr;Y?cVP()R`wOg#T9z zpO#o(ufaW@JHe=(P2ZSSeJRS_DDCK)1$VF=(>;dWpVy^jFRbWM4hwHAK+hw79mGx8 zHn_fF2O`d6Ua0x|Ba76PRDA(9Fn)6hjO_G+s2K3XVY|W->YyS@xDM}Rd>+Avo*&mf z^h}?hYu}X4`5YdmVco+NQ$)nWw0ac0Y~f)#0u;pDV4WJq-rOMTJB_gaq1U5r9uUA7 z0RY#8L8PH|ZxX*idBX*OgWT--vjZ|P7Nnd0+gRM*6G)8(wu11)@EkM62SnIBx|<5~ zE2ot3jLdafVzRu3OHE_4KpV0fy;i%!>`MSUhrMtfcHmkN{gxj7dIY^r7SzZsvd|oH zO3Nj!v>1HD*k^jfCD9!J{29j`cSC$lk^Th^@zM8x5%2j1ANU`InI+Qj z=fq3Usv1)~>6tu7OEkxWy>p~11tXC$Ln#<1#xJZh4bF5WsUDa7q=nR3qcJE0jWK6 z;rC8lc?8S*qEraJO6xn@l(_Q6xZx7jN^!hi2tvK)bHeJyv|d>8`-csf31g9|>V?eH zhtC$70y;f}M$ip;xJW1+D{bEdTr1>>4_}3NW#4g8j?tSUb&c>|FLjOeT@#vtW?}7Q z9CEoOguJ+XC^dXl_-Z%%@+i+vxdvg>*QY};59T*A-fuQ0UY#nw=iayF|Tbi)P-rqB-Ym(bO)gD^`|v!xJ%$r6)xAc@JiD zkmLH=npA93<4!lJDV0q}Kaz$12lQvOZjv$kJ_Pk}F%Yg0a+yQJ`H#>L(8p3KCxqf5 zk?TQA2tV!4H0`3hNVQMJN2ZCzd$Zh?G@2#zk$K{7mtG>iQlbs(xUf?W@N(XX2!9~+J+@vD-Esk?JKfsxvjWF5Ztfa0u zN#{`Q^QH(T?l{+KFX&lq*q6ZrN+Yap8V{?RLNlTbHaKbXNKGZ_KhO3ac|afx%OtxK zq3`X^RBv2aAnY3#Taty~U?_BML>^DWxFHSk$Gjk>gulkYcXLEJ{CR$kd`e3Q`;0ZI z*oaBX^`$Jc7UR_0ggov>hEi8yhnqHHzPg!j#K2o~5PO0H2&EwV@?q>j`Zboo%@W{D zo)HTx;KszK_+mC>D=I;tWMAUOUxuBA&_q;sf1Yh0KwXZri4Q_wV6FIDD7*65#eQVI zKMyuZyiVc;Di#A4k!lxK4^?;%*H9`{!dfmfLM=|iEEcc0@>NrdD^HsC%xcdnS9jj_RJN*G&Z?br<)lyO7;;DOHY}?WppZp}&gC?~YUsWyMeo zYX1oKKSAyBcn50dU}7+=_!2C@CCID`-2EB^z|wzSjT)doFKskSL2C$Z(G)ZyBd`7cE~$ zc!yy|^$t;&uJ8Oig-qaXccRIE1|-qsvfwq}_p0#M2uUhBb1_wTRkXq>kqU`bCkPtQ zI31`qy(_0teV&vm!_zJUt6P9kcKZPXh=XgoeuMQ_DZil$CyehqpIBM;9l*&|&>_qQ zqgRy{_6roDSql-Gow&k_U5rhx(f|PF5kOlHfOBI3zB>RwS1*9?Qy~JFK>$B5jK%N- zj`l_{ycyP?{xJLwMPf0$PX~B20sPM%0H2QqxCZVP{Q-Qo7r-nIP~$}ln+{*N=o$lC zqM)Xlk9!I*M;s;1k~{4jb+eTqW6Z2DU_Nl!S>?Nm_%k}o3<$5HXTAxw@Os;kbr?3%sZ+k8EC|LR z07vmHr-zFC$L_h-KIOSjT!xw{I4k?1w0~ODiG(}bvJt@LxsP3jV>2z|jkto3X520n zGp@6n$WzRQp~0o4q4U`A`gh^$he6KjQ#4(4blE=IFEkZVj1^WjUwe~N}b7B;}& zlTC_o8{)nFW)YuFR9~AXr8LrlG;!Ep~b)taYj!1u`=PvEG{+c8}LofiShhV*Sm0m*pZ5Uzc&Y3KtK)%Tr>`CS7=n1twt8aP33 z4~MLL676PbJgd?{kqvg3krQK7KUYywy!AVQR83M|du-7!YT1uD~J8zt- z86MnsN$8y_ia`&4@tTb4?m^qYxzY9ynP;MH=DmGrJ5{4?Dq2W_{UUs8wB5I=CvCY% z+6?3!I`W%|wp=P{wB34))AmDrX+VAsAQ9Tm#YHF@ZC^naLEE2G8P2t5PuuP&ZE^pO zPTL&SN$5Qd?Sgkbp+~e8a`WG!?Ocs329^Y^_w=Ey&bP1P!bhU*yZF|~@&JY;34%U+ z`%fqy{qyZlP#S!DZxqNi01~0?x8(Usw`q-FCCDOZdkvL2N7~*5>o>^*@a+~X9(wYv zD;B-8<69CddxruLq)Lh>j!8#hSA?0N9OD)5JgIDo6-pRgUo`_E`D(hcFPT6VI=qDY zkcIj28VJFjJsxH$46Txq!r6sYZv#(kU&;y03zIK)HL=F!Tdo-mJqHdUurLOJxIPHH zI^?tnn9qd3kt+y+aU6jRZ1whoz(=W33amOJC~!xF0=Cm5AmO#300IT{M}Zn3ptPZ8 zLIFF!^FRac9g!^;PwpW_h!}HYaQW_VZ(RNWt^YK=(TUfm#pMvJ>x4@N#|0i5>4XcM zeZ~W`SX`uo?wFJCPJRh_8rca-Waq00-w_^ukY*T!4_WnD_cWQSoJq?3U! z@SLxL)sC`1r(g=f0Y+lKuNe%SbMTWZ)_6<&dR z(DvZYV)lvt0$Z73?~R@WUhmtO+|yO(+iq<80K`UC&8zm>R96~o(3^1$9-JoX8cR#D zw-oY9$Kw!{;<(8>7SWEZOg%&rgR~+H7|b-YK@iAaq?U%hwl%(KKu@9wiq6h#HZ>FK zsE6po&5#%z1n?r#B{vqjVLt$j4!{865I$<7JP-HU<-sZ}?rv0aA$y{Wi~rHyX+$f} zws#gh*0*$OaL_MVtc2AC!=?GC!lv=wc@=)rw@3yVI%o8y|Wj#NDv3T zlP-FvG%4CU_w?$WZ@-P{oyB_ZJcHD8>7CL2^-hxF-1sY!X2rM_E!soDtN+~|!rs6D zxeIJs7~4flp!IN(l%Cm)p%1!?5RCJ~E5>VsX%5c< zNs&rfcvmhBub99y@NOd)9NCqvWte>0z8l4nis`$x@ITT=R`0&EebZmxeMx&>=P_X_6H9(_mtIb4MEzN3zb^c_shTo9@}OS;xITu7qt z#)VfP1Mj;fJOidA-nrEO153qQI8rfE@z0RHdf)Yyiq!{W`Ys|Bvygf&eRsA}(KoqA zzm1R1S$zGN%oXT(SjIw2Rp%#=8WwkWc$W~e^XR&JO2z4q;YUT7Y+N^;UVyEFM{tY_5(sOL(X{q%r;Vmn zE)19l%&M;)8i6v6uf0Q?ZMyR?xw)NX&0oX6TIgqeYkm#kMtj$scs4bUD^&ItBg9^I zPlKx7<|N+c9iXaAJF>2V+^B@{ z22{TuH_4Y0`5Q)P4>>$Vvs~t!O+S44#lY0K9+ALwYG9q-M*VPnrq&OTBh(MHPLK8j zaM_^a@*UhfICcm1hkx(~j$LsHyB#$vIjkICV*d!sZCdHnrP7FmWroebD;)7pkaMtc zHBQPI%U#+oVp&M1qj}hYlyleu?-**K6JXIC|4v>WKSkX1Wn$+? ztG-0CVbu=W7fkMCbe@JXnWXJ`N=p+nN&9mU&k|1w(*DGXWQrA`z>xj}fF?&Aq&J%Jn%>6A>Q194xh zcNR6v!CL}1)I}=>Yt6D!vyw!kOt{+?+2AHeu8(B_H~SFOls20|&qT^%@UloR!)sI7 zT1*jt?~cQj;s|euw)?)z+P?F+bUfa=dJT$NUypOM$8{Lix>Y=FIudL2MDJ4T>(k9{ zwnR_xRjAh|2rRq{+-yN4JsDMK*s;poT%0OHmH56=%be+EmkpRXp)z+R+;MT92M(MO zP9tZ}o4m>NbtMfS&6kn0#j1mJ$9+^c0)tqp>N%y028+(&E}w`b;_twA0}cpPQP=|2 ze-yvuwUk!uW?8V{<0SUxRK%ybO4(YXu8dGC0BY^ndAx#F$I0{A?`iJ^lum^unnAOI z^~fHm#wEAm4Frol3756fhJc6&R?hx>g;0_rGYucwn1rg4Ptp#Hg1yUvTMhsP;`Fj$ z%|9cL*L)Fq?9?B*xx1LIFNXAB-+xJ)4@yU9s&;k-%Y6$wZ-~}->@#$;r*Fe>qrHT5 zl%a$+6v8(I)I)ehqAF%Dp(p%rS%2_f)``pEPwx%?O%xLFFFO4yVM@XLWD*FQbrv|D(R}%Yw4r6=lJ8P*Ur*F+fUtT!!-6 z`e&GUu4Nmh5v423Sy~#C){z-_Zx3r{^R}RHOko>o#kd`lz`W$ z5K09FF|5uW`2#@1|HN8111znKP_`B#y0^NTSJIA3YAhIegQTVu+>rU5vJC8o-flQ=N=ZgOC+#tC2>@a-FmjHt~Jx&RNT z&Jaq)1Gd`?p+r11;32#M0fju6F56#r8p0oy!m1le4c~1>bCn%>Dnj^{QX|{K75bn( zaj6)z@TK*b*fTWcBVMSFqQyYy4wA0TX?Gcn-VVIyJ-g~)H)t8tGoGX2;pq?#)H!g3 z#NLb(9j>Yt2#j6YLhYD#|h4wpe``T^zhn09tm{RXhIx=ISzHPEz3>NfgjRXvNu zcL~RmLbe~CtiTWHS3;U0VTVYlK+349R4jO^)+2i%3qos0#`48wfwJITAeV4;+5988 zo;*d}q-hUM%QN%DgVVBv+v^x5d$ZLz7iO@v?Y0Bve_n=Rhl7%7B^nYe~xr_^jh zVBn^&;_OWbFx#GqI3MU+mpl|Pob4UHW5Uh6DWKO8U-hBOz^l?sHu zeQ?KckTQj8R<_%w=*72d&TbI6IB2bL?}w!tOUxQ4SXKwlt^nN9XQ;3Soo0n!mqV6B z)`gT(-y;PcC75L6iV+JJfhMq}Q@0OlDtD-JRzfJW1dkFwm9Tgi+d0R)vlKPh!q`$7 zK8_f4)Ob)?o|28fh#whWYsbMK`a-o1&0G`uN&(d zT(@QYSezAf;df{XF7!q;^M-`_#96p=qTYFmA3Pa90b(1aQ(|7b8l0z~yIp|eeKJPz{xi6V8h%nBPq#(5M~srEXYOl?N>4GCHflPh$Dr~Z+U%1^;WYLRoHhs(6= zN~0Kd)8SDX=gOInBO5LFB^^XS3JbiH`ML_KgHn?&j~1lI#AT63AX2{U zOTiJf3^csed6c$#Z!@w&xFk*lCK&fl4G$3G92L0z^6CuiwJ! zSiHg}+dj`t<*m*R&MbkD8M*T+b0>N`;kJu!y!7RuBQkQz!rq}g?d~{AGMghrrEjlOTSMavb)=}X~gjV83b7vz?8cvD1fYH}E{`s0GjiQ;g` zC`-c|3pG-_gR(V>`sSE7PEHU}uJ!FMOf!n%84{O~Q9=nMAfgEa+qr0A%^Bu$gm=-u!EOiHQxEHDQz(so1)i!CX_w^2*W<~DvyB1fB(`qER`4&N{BR|qJu{v%2A-l!c!lir zTV%gW&;DghcFGu#uBBW!F#@QtPTdSkGOGv4xQ}`~$P&7un9(^*;R%J`=eggYA{=41 z1oMmyRw=f;7+H$h68a)fD*z_=5x}qahIt>xUo1?@%^S?-=rBhMnCTIioS@Wqwq0E6 zU0gEtTCZ_BuJBoDY%n@tIX#}W!)4+k(RZLiY1SK>Pzxg2x7JR(mO|Y?c3n-}^*-em_M&WfoIsoSzKF9KFmaGSg#j)ybmK`k zi^B_AhnTu*rx|?DhlUn1=q?nHkt{2vr4Vxl_BVP&3t(G-^w6EGKgJX+oVw8<#Pr8v z4ysIhF%FlSlp{o+D?QiZM0h}>ZG=G1F5giyZ1kiMAZU#pLg;tdjzND4dsjkz3zfk+ zw~vtuAC}n|DdOc%b(uz{fqUM6GK3_j#d&%lv5$BnO{Z$ z^#NQZL`>NMXMX|^8QXobOTDr$*Ruz&hu>hZ4S$qyiNXf~7S9xkwIV=g;X(y;Ho_AQ zUyWVN;d@8{-UGev29L3jZKtLQGEAXpq;L{OKHEM$CwgX7VKJkXaUnCJMhGBWf0R4g z_afQHD+0wVG#_ZAvQP)FogPr)uOju5Dxd9%D`W-H$|uHFOu4j*w_#~s$m&r= zq~e$KikHPy%%_;1TU1j7wvh>{=i7&2H3Z1WTg+z&b<$wTgUWS0tGQ=7*b$}#C<|M# zXl&lur4P>7r4Yg%-fzY&Q27=tgZWav$WjW;#NO!(Vfj-*OX6AQ^YxE@csABdLJ<(p zSU$05c|I-M<4KNt6W#3Ru$FIgh zHdlVTkhbmCZ%!CX!o#VszU_u%L$yz1O#D|owuOADVB_=HTuj<76Y>6lI2pt;we3J&r6R0Uy5Pr^sIK!UJ{(`sIYYHg0GnSu;bR7SV zW8tcH8riuaYJ|Zeeni1@Q^$^-M3G=$2$ZU zXl-NwNtZ$e*u6^$5uY&-i^;Ke9_0ue5NlDFZtp_yuP%4hxHuRpQMnoDU~FU-S4~9< zbQ!^8+^jno2uJO3<+!NNKpl@P#3kG;$6iW}88=V@V9~L#07%|retd>H7qK~vo;av^ z&x+UdlN7);+E31|I*3{w#8bpWF(aq%u*!`m=vMzz;0UVyk0ipi8g-cb#RMIuo(k)n zg^lQu%I7|P0>=$8TX1~&mR8zY9szQ*F;E+DLbfdM)WN^Q%w>#hYoOeT1|PkHyuxq^Y<$) zaQnEkJzFmN%`TnA(wztdIU zqiI&gUq1SZ`71OVCx4+;p8UmPV3w<{2>^yu!)lw8eP&YRoPOWX^^999`69!tG?$fs z=$Vc{Z7(qkH%T>-(I|d+Eavf~!sa>xx?n0io){$#_Jc_d%N&~_u9;03Ejo|3_fbuM z(C^DW`-?ZDrtDGp;@nfXq{h_Bc3%x%|eQ9yh%2IBiM5crD)8KcLT>|7QKi*Y^3}O}*D3)~~*rxx;gg|MJ(^7h8-!{x2p=kKQW$WF%?! zT__r2lm3B@98YDMjAyAzPR4ETYe_zRyD$(P_-%&g>y$ z9(QKDjd|Jn=4?lq*Qx%_Qcxud2ZE~x8d(BSVq93$m#hEhbtyo~B zF#x{o1e3Y4bDS%tkMij`7oQephcM^~N95Xt_pb)`<)h9u;l3$8UC?=n7Tkbk{ARaGrBnwr2XICvs?jEvx5 zCwS5BN2hI(l~{o03Q&2N&Gi^c2t86vGSENvM3ksTwV$=ZGzr~~8ME^EzqYpY^;yUB zNK3yjzm9*gET}j$!=Opl-C z?QbPepIDWYRjOdO@QfVTI*sWf!B1DQw;>#4f&*&@34>ow9J>o%2cb#bPKk zb+VApcNToK;bv==^q9o`vv6zGqmphJCzuBy=GrI z!}iY&-C0MO>ZucbBg$%imxB0)tfjMMvKYWp_4-(&dg}SU5sZh#4B>R0LDWO$;qjdp z*@~r@j&NVitB9B!e7+!(h~EXnSUN?qr1*fUz2{U(Sllm%=3?%Y>l^QNmxb_m2FGtSe z)(=k4>+Oo(^4(hI@fKG`E|9q6BS^fdCUA7Q@9Xl_q(x*j*GBx82D-CvDy`OsjYV(! zXOo_7;#Hgout*Q3_@Z8`>7 zJl`glN3CjrZ!kIQ^g0$BcguL{!W`_r=3;_e_zF1{`6|)ke5VDDBqRKt@q#U-slT(b zi=t~};Lo3MEVL~-s0&BZ1A}EOMs3|MsymslPGd1(aOW1a`En|uywQE|@xYZX&L%75 z>U2!gSKr}+&V9DRqWlXSOTFbh%AZpc5Mqb8m=`U_r-lgJ(czYkY$zsTeCHhbC8jFw zXk)gjrTS&+ZD)d5U*B=)_&U(T=1Dzf0M6h;IlzH)bAm7EkfB|EWFpxWFDv}x5*4z5 zsc9hNT@;#M6AVc-&d~hpf+2|`8ERTd1*wlPG=n?a9gl3TfhcGF8%~VdHkx# zp%|Zin=;jb8MFP93OQR_lZ>M-9k#OON%#$8RgXIGEy>IQtGn@GV8bHG!6J_Mua>Sq zhGWRDp`1$(v@^kOK{?m6D_S2Fdfjxhp`)=H+pCT2>Y|}C74Md*7;n8r<Zs(8L*%}#Y+5xMN8=2A)qW;lG)S!qT{aT4f12Zy9(;_SdkqlKTKOM{bY0P=KlfgJqw_?c!85RDkJcr%E-SMdL((C3Pw1A>IrGvQG8n zG)8qvuT1@{&0UdsY6-Qf)>K3YJccb?AbtV)JD=`uU_um)TEQiXE-Se47~InC zKqzyBel~&kj7KhvP$(c`6~u0diJ&t-LaDN7zd{=&0ceQWvDirlcMPPmbCEA4=v{P zXU#40XRrPXOC17}{HMM>(_ZKT9TNiIIOGU?qZ3MIq~4sPmY^`Vv>>C%#=Li_x|V|w zglH86Q4YD-9R-?XNz4`trIHTq$8E2Ej(K)N^v1IrqPH?{N6Bq1_EfHq=v#zxy8&$B zIDYDyTk`~x625-b(J8ciZ(%dGPVR=2;NQy7cbwJ1^k5;(7@9jVIuV(^<}sQ2Ctr~) zR(Lh#V8Vc6gl>{AxXgmCHIIj8Lh-M&9 zP_LaY96om(oRiJhDPQyRd7S#4d=6BPa}WbNP@N}%O1M*K%Pi`CU1~spw+#&N>+%`C zn~Ohn?)nQf@yhl~Tl)1}XfsBA$4*GqN8k|Md$t@j2Va;1vB7l=5=^!SVft5^@m=A7 z{B2H7X*Qhg;hTBE<9K5+nSsS>FiB?7y}^g(NO2~+QgX1gFK{`3#2KhMZYTonaBb5$(r@ePgYQh`=i{ZZ+~HLZrOJ7Eu!`f`fP#$17>buOSXv^qObezWsED<8zOwOivxJ2dII@J zOi4|V^hi&yR^h1eM|*9#U){+1U>X|KcN~BN<4i7&hhj|r5F4bZTETWVbrrdYs?B?- zv6x?zkn&*4-L`!FjJ`J}{st4}V!wq%=%(bBzd6lD6GfTgM$Qc5)oNp6;PE-U;U)Sj z#2n_yKiMYi3M5hPy9A$^A=%Bu+&E#Sl>bFIfQOiT3MsZk>dnnSB6mxsn9RpuoxE^`{XzfJfiq+mz@2acq!{#|4&8-vpf=N{bC&e`L{%HGhG1)Y0LOoDtR zr~d{_fp5v6k11K^U0ajO6<`KE4`IjMTgT_uveIMBUzN`9^eLRuY_zsU#VR^9Nzss{( zDUGuF*)_#x112;*HZ%e$``VMB5k4lT1Q)zNA^g9{(I)jwC!3EWF8o0&v`(ze!ixPD zXmnz#T-tpKHzC-&U;E3Lyi))$1ns4*n;!qW^)E2~6})E*hj&{v4&= z`zQkamC2&cw|zu5T_el5rQO$&=Lhf+c@sqvJuv=A3W-a{4Fb^J2jKaM05M=kitX=o z^7P{XUaSFrmV(`ok)I%)p`RGgFXOPpe0>N+3Hv16K49x_*m(lgHHz~1#qR$3KF^o9 z^4e5RUv~~28^59!=*W%F`7ALds)F;0To#cGx?eAq#&9{kMoU*@d~7Un2L5(h;gO)f zgKG?LzrN%3*BX&GUW6?JB6vI_Gh^)`3~=S3d%Hg9-V%s7i{4*(<BUoY)`eXCL0T~vC=p9Scml1|?v zyo=o0Bl_*Vk-I^_H@H$6Uxo)XxGG~y;clB(Uw_poRfT3XC~Sk-qE-)e=?T-L}I!|<-1f7az1UmMe)$2%_dg0y>|1ZC)jK0t1ogA z&{oin;RseEMTbZ2KD>ME&fub6A5I-4x}tgOrKA@-9OU|9E`zHv%x)FIbhYF#v{~A1 z5C|fe6q<+m;R@eB~a_trrhoHS>m@bUb8l>3Z6}=sOrlYM-Fw%XjkE2weM7GjCy7k_7vp!My2!yy!W?~0BM%$g^ zwB?2v@mG;z{;w{7I~t9>X*{rQ0u}5s3%0(_^&maq)FGA_HqvyuN3Gq$?3;6 zd|_?nv9K#>ek#|W#Hc%{6I8MOAoktC$?{p9%LjTlI9a|){j@@ljxS=rRx`!PcC;C3 zK;6v7r3w?1|3TJ{>cV}12|ojKn5azI%u zSJqNDH9bSX^;YEt!NgA{0aS&2-f7H==f$)!crg}}tXn%}-J17jbU`PM0~xbZj8hlv5eH)MoZTM|Obt1n!21`L12c<%CK*)xx!C z3~MaF_(Idl0qXIz+wv@T_~Hc5XAVEdSF z6Aiz;Nuimpl-4p)_FND8=Dyb5a666Sep_1_znDSa!N%N@@$1bL)x+VG+$JBcnONt# z($3{yuKVdEt(l_uyja3qjN}vS&W*N|?FB6NKr&_Ef0rWx;{Q3PF&8bpID z)sMa;vtU0H@*lF!o9v_YN|jKeuuBL*Bs09Ws#?e)q`+#>H&RoBgMkz_^wmXj)$^@g zM@$ms@v0QF+HGQ&AMB%=gUp7dF#m!c=(FKi?Lt^MbyW50T-^N;H09sx9WoX4P?UZ4nWSaA+d@!cVtNO}M@fkAF7%VR{gZd9>wNIDPcuT1lcevT~csbfe;TAb*Jx6`A zUVLz(Ugrwu#inx9_k#L6UjtJH`+lq5mGp_fO#&6}4rpcN7M6YWivDg-6Io!gu2mI- zb?Z8f^!0+Y%sSbRf!UwC;?2qWyv@0qDn3th-YJeZ=Nv)HY|g>?vF7CR)ojiXjdq{r z9AvpyU5`jJk8942U~M($CyS*ydiIyg9pd1wx26o{sVJjo)Z}^A`11|Mb-fwX*Eq-w zgd>DwOd%y$^cG!Y;v+F>T*g5zp?;D^-y^ zF!wR-<&g28R@?mPVqP*OGOFUGVvwm+dnAGVY>BJN_xEgRrTi@r;%wKhGmMN9g$=mtBB-J+F(iL~fj8l_l^O6-jB7R}c!I$ILr zEm~l;=&S)+bY{FosIcuG@|4L;^ZgRLJw!Ge;UnUAB!x2oEg70;^N};{xvJbMEz~ee z-}ITKe;F$+tAF9P!e%avtQ&!K_`JF!r~8v;7SCHZvSGLXV>Hc^3tEch8c!OXYMx~H zMi@IHnHZ^`&SlJ`nRB@j>i?)s)<`vTjP5#FXkv%YbeL4at;*MoR%4HQ;|wub`3CF@ zRnS`1_EI5t2aDYsFA%$x52$pWRLUKi?~=u=6t&dR$ate&j_po17WKL}4yi7BRc86I@ z;?WSf&&ARy{l472g=t;q>2J;?P_Y(y)f2igc8zX(Xwp!^=$^iAZgkoe{t@0QE>%Zy zZ&FniTFA|5eDUJ1Hg(#${ILa58-%^;vt()qN6SLai;H%eKAhYei;o4mukalVbW2gF zl5mT)@3Uoug%x^#gw1AGoO*n-XZHK@!xp>QdOecuh4?$rCP{bblLO+&^iT3@?`?LH zOduQ`x!lhVnM>^RK=;YGt!l}4KRa?>XqNT5C7*f2$&~PR!K=#+38kZ<>YV_Tm|$)M zo_{d55%|tHYa@WOE4>j|8omeNLnM8T9TvI?BL^uTsa}is*=prYTDE5zOE-fJj8s?0 z0iO|{K?Z}0T%-0tpmx8*H=d-E zjUh%$OLtn}&lg_1jp#4h2;c05uc{B)q*31NntP=!$^F~%_zRI`Bp_u8`HGu!ff z4{1_@P$h}ghC+E@2Ke+^+a_{>QjeYWnk`R z07;m(35EO25I#+u7{!A4)cM80@{jNVbK$tdZI0q}hkra$BLY2+TK>qqB1gg>iacA= zJ6E|@8nJ4@d0+C)^9U_{^N>Jys&6_fBV0`}jd4>~@9BPqfW*v~wZ+}=8ikSkMOYJI zcC3xMqW-dDTibFF@8`LJ^20D%y~DDh9m4@)}9_cM(QZ; z8SA@CQDm+dPHzfL#8OP^|7&8(3u2})oZ{{m)7eBV?Uj&~L-}DnPiA@*pvxk|yeT{j zKFi2|_g|SUP&T)Un;?`a0yOH$ZcES1G(=DK(l&rt#@ZHB4v^|p2b1|Qt@HbYGhn-1eS zd_~i_3gf^xY;q*c>m+n@3b&@F%jFp1M#h!zWMUtmtekPwUdGN$qvlPG>_!1-)hCz! zT}G=&3N)?qB|)n#%^_`^m(a%NzZ+}gcT7N$vveCHV-L65O>FJc!m;hTdFXc931H|{ zr^+I(UHoTzv#SbPd^7mRF3EQ`|HcJWF{HdKUQLcw%_*!6Yn&2LJKLK=OOqFC{iSS` z0@XP<6?L`G&8`FOhJClD)D6jg$-ztG`~K13C4>DW*r7}Xx9O4Yzin!5ua9hXn?>F0UFo#EH{X+VI0k+WP1SE^PFJ;ieY##G@ zOgA$nceUpG2AR(p5m#4hww;o#HHb1=ZEK&JQE{FljEGQT7XPO5ZzBIDn3TBq-Wb6% zDWH2!ZkxJ5sZQTEwWwivvy9!9EB9y@s*U~(U;25RG*Po~A*Aq$W6TdVId0o(k|SfP zzZb5IVESdA?7nu2ja<2~gucaQ?CBF5Z5^@Eo}91!&=xi{-`ZVK$-P5D3VPHSR8B^> z@wTypS!e8P;$VA(y5XK<(fTO?(DpBk9n*+!8#&R9~+S7;sH|k4(HXMI!N``h0 zT7q~+k9O`Wh9fDW!%1*qNq|GrG+bM`OX+NNl<8V4L-hI1*^sE0>je3+=%tv{Y8lv| zxgMiieQT(+EMJr|(h(Y4XHXoL#Epe33SZSVt5{5 zJGRZ|Hx-ibaB(?1zwoY-(>1hM_*u}Sn3L< zpu8Qa?lrRo3)O=>wL5ZYcw`3)Ghz6|yph{B?Ij(;w1JLX9izHMr8C-Es7($B;eigJ z?7_#CU%cdWLoGa~=~a0Uq?S}F&~ z!~`HxzJT+?7KA%QyyN9ND=NuTccIG?$EGR|P%csl2wEJ&Ep!U}>kZU`lSV6i?Qi z)cag@7i9X=>lRFz;Ebf!T{C6E(U3FKcv~*reO$ED9XJGS@x-$(tFGr1cSAdGbv=W9 znN36H&Mpo=#jBV&wA^%B-IPo&zqKZ}rDdWIoE!c=No{F^C07%Vvx~(_fY_j_j8iIX ziKG6G= z&=2m0b&`v|a5vQP1VXW<=^k0^yPz7?FaFK_sS?!vo8G~T`D?Y_n<+hGcam>Pf%SMf zpU;jIRZqn{V;Bh2VOf*{F}fr|fNFXTT?OtPrt0m7Hxn!1l3w2+6S|pC%HbmK_U(+m zNdCYbhc!5Wg;E7rtmxp%Aq!`@pG~XV}YkM}0PdQ^cYzD_bKd%M!W3YIWU$p%apq6dbmAhW&6(%?8obEPfk9A@(zb{>@b% zHfTXc_3+n>c@cgO;<98ny~tnNcZ1weG2UxAnr1p~*vbTfPlksq#|;@1aNOW-2$4zK zKoDIXi|XbiG=_Iu@E_FhN01}3+M|yCmNkbf2PmVo@r4P=DBW**cNng`F?7@d6GFri z2RW$+2t3$Fm6a*@wMWhrp}ZDf59Pn3f>xDn)jlc?ZlA0)_N{yZDnvvQvt`7C~-jGl_A3)7AVfI#t3Vy8|AcjaGy=Mu9`tq@$+0h+r+Vz4qD= zT%^`5NS=TX)vgwZGhPamX4m7QDZ2LNT;;dwsnhicVZy_u9!6Fdf+P|H*2Nt>X)W?j zkKs$sT{kEBOk^X>Dhs=C+aw#%QT)*6mcu)rOhUFm9xdhiM|b$gLJ;O`5*oJn^9cN) zBhLOpiDVXi~EOV%eGPFJn2`rAQ`j6gjeW4*zNPB$QPzlh6*fmwD{NIp1Cu-lH46 zA7%?pn-@8=t)zn``JD=1rpnWsC0FFH>Zk0fBKT;RxQ5=;)nm#^H3NKUm$-7A)=O$E+eoyUgS%%1=cR)MX!e$m&{}La(^*y&-~=C zWl9lyX6@QzqTZIAfmf(<3BHyLI6 z2IY1hs;Y|Gmib;5NH57NzNI8eG>h_vrP1-y%X%KwDiK*#hN}nGm|gfNe@(<96~tpd z-&{^^I=62Oq_{ph$(Iwi9`&gE5R14PK1$8E;8n!nsgvZ0hEVJ=I?K9BblXz}L$j@2 zk%hXQ;<+o<)T{tQD^Ir+ZK?VYO6i9FRY*odeOoKlTk;%l<+CO$8=aNv97#ce^LM=! ze3Sj2-hQ;r89G4@vs`%NthJPhiS6vo!)bfHYsj9MA5m{!#0T8Ebk z-#Fa?Y~7#u1C&|5&~!=uf7E^{J3Y#2~-P z^7fgK|Nb2da#!Sa3$kmylkPaYo4y>ob8M+AYgcf%e8CuX4mUy|L56pPdWgrWu2e}# z_Koq5ROuUZTBW^#;YS`2k6IbHvOxP@{Jgxa{G-_^hh04N5-u;EU7a#5_Dsiox znT#+pQbjUySVf#~y*N-d&i3_$cV!M-2SnTascUE~U8bJG{PCkKbdHwmAXR4yC- z?DBGh2|%=WW$M>5bHB`Ujv5S2Wi_z1cl|Rz6wjO?qxgiG7sWF-i|BU3%(LT}r{38w zvo}*vmScA?{zSA*klLZX)gNr1oUfZXX{ws>8Cjo^np6d%u1_%)xR71hs1w2bkoyG|rU%&HoG7d1X-^t;&S%vB zfC&{$ou85OhIsvNeFjly$ADh&8JObI%WjnWGqS$U7T2WIdAFXp`9F*0%=nC)YhyVh zNNb5CwXUrEe~px)=dSvW32ZKOb%n2Rw!nHHtu#GpzWZ8lw8~o+YTOT+62NZ?GEdDP z*m>Z|1H#Ft@q98f$VK+Xz2r0#gN^U-9BkZA#$f0j`MrOy{66%C{64c&eqZv_II`p51w(38PFY7;#H|syY{5^kue2Jji9lc8yOGr%gt2Z?N$I zpJcw6QYJ1<8J38qnZ1!V5GynECvox?ZT z*dQ60;R_OvIH)3F{g&HCIpTGB7Y>1CW+kf~u!cFeaI#f1v-0cP%{iIn;YD>0q8whL z%#*5H_2ifXDj0H_#+`D+3uN;JQ36Zrk?Pu?S!+`&Ym>*D^x{aTqgf=tSD-Ef0iVzb z?nEuWt{xK|B>xOt*-Y2enMM98ZjC)EZ`>2xA*VXl>Y`pMV!)y?({uP6Xm5VY|VPD)BSWN0m7E{lBtW$ zzkifRbx1b6oa0^idBGsxb_FR&O^kf(o#IVW_dztYrRgeyog5yBuMfx zBb7Agm#Keoylb4lS!xW;b#)HP!2guMkNk!W#trfm>xcDXac3GT4&{865`ir)jplX} zO>jKC6CIJoKUlpLHjB>1gM*OHr$YYDr=Bv7KK}UQ>*Z=IPN;&joslFgfb)Gv)}J+P~(^pkbrrqbTI zO^c2-T_1fqN!(ex8~#k9C{DU7)~-Z9ukzguqE-l~EZ;2Arc4T`T;CZ1mFXKEP)>K_ zH-WkC`Y1Nbrvd5|dQuucMRH*k+i$r|SBvHe8+af+uJXfe^5iYAD%gkab zkd^D5YUHwjy3AJ`P(yr$QrGEHD^?q6?nXImuTa0CVB|}ol1~8gHF@4_K97;- zUsk9dsV%KS?dI>gMaQbvZKaOFW}@BP-<;CPpT?1l@JNc(`~jZ4HTfDo@DG#NI*pNkoPzk+XL!^fu9F2xu1c-t;#*tFV#q!v&3CkJ#&rMn`pKJGvr1<@@~WN? zWLFYsORZ<9I31A-@fK94llLViXGn6gBa-h;uW8cBzmFvo^(I}C@f&5~7+wP2>XsEE zv5F#0osTLZc8|u~xAfx}Z*rA4zdzd$|2E~L>Zvn)J$R5~=Pcb zndc;%c?Oxx6THNk@=?>GPtN4(nU@Gf`>+pO?iQPA#g*eI``#x?%F zTr(-gHT9ILP(Ksiy~*a9n@p~$6YWi8sl_#eiawiZh;$0DTFTi>bBa7aXEIHPRPw6| z^|JhhX&&P5@k|q%G*`Gr4tH&?NkPLb)a!2T2Rbn~HAr=S4stkWkc&PA(7!MufF!XpH$|I1PcvEFH@WEn6C$mpg74uFVZg7?_6ZJ@B zQB56$Wo@NjqW(@+r!w{R_l5N_HuJxS%~>=xSElS{w)BUX7Wu-mFkk-aiJxCqha47) zQkH~XakE+y-yj7#5|EI~yq_D1wg2~DLjqyRyad38Zlc;+qvwFwPB1Yo>rc9u2TTjM z=?P)Lv_GC8?dcPwZRa94Ts0uf-<%-r2gjv-n&IYy?`k5(;D1`aVHi-*<90qom!v0) zeL7xoU~6$uhkZ7c0wo#s8FdVd_&wMXR~-&$bY5hhHV2slgW@*@21?D3SM;3NP#H;$ zTTBR9TM{c1JbvbA=6FnlAMBv!#Pk8GuD{6XKs|3)Fi?)GoLt$p!z3b6$I)LT=o`sT zCi^>$i=Bb}#{yfE1Ap!fY;y*agBR=g$R-u~Ay(W6v8CbLL^K!CXKDD5{g|Ktvm{E1 z$oL4FN#trdMqu!XgMyfQ0y946bHCAI@fN4KpW(W;4k@W!9O!HH`pVtoNP{=Ll&B&8 z!5F64n1&d}{~_I0^{CW`Qf5=zY?NVBP+J+DTE_1WS7+_7BVI@?mX5eI8`0)p*@Hivq)C zlM@S)u>18_O@9r+;X*8d@jVlUbg0PhrT-2bGa7i!b%VHsUL!y9gog4Mr#i{;=sHju6 zN{UWiEh(u=HorP$r=;lQdnF}J{l-q&D=9izp1kSm0XwBrQk4A26T6T#c8V*Lk52iK zC+{G2jh!-1KIX}fJb5$K#dgYk`B*GJ^5o4@6YZ20^07vKX_BxR%;W~baG9~r&&^Rm7I6$ly~Ig8}cJh-jmhK zc1pK=?35pQ@}8o8Zl}0he00i>Jb6c{d+n4w`Isv|^5o4^H`^%_<>T4%BTwE_)fINi zeEC=`Kl0=qt!CLNHS+OF`H?5@X=;L%q6faHjU#THYI1TUTH#oLn2rRMyN+xS%4^JL z3}eq`j&Xwdy^6P&Kc9w|lEx=$*HOtA`Ez`y0F=wh8tAz?>P!x9VRYHT zW;TvzB2XoqN%&6ksN$_?QtnKNY&Nq+-ZKAUWg)IL#IeSX!XtMUX{)l>vXoI7n%1~e zc7H@n_GRI-RLsD~wnQo`jJ(FJA_3OOI>y;@bod)`x3g7tG3 zse?Elx|0hI1h;CtlYo=HbhH^ddIv#rYF>PnoW%7nT> zeyn+2djhk_<)$YRD3%r-{<3h03DvNmItPH-Z%x>ZYy$qAf|=(qeWC7*L!EpAsQuQH z)hC3yH4gQCl*$MNh&Fmy_o234Y!cU^FU6tCxy*5F0}=iZ2YiPG*m7dk2XMb->5qDE zWOm~i_(-~u`yO$VY|Z#ORNS6V;iPY~6@`=X;+*8PFvuIAX|m|`S^h3bz~JO~A6PxQ z`AKsas5=hW=VpBxBr9c~HM3WbjuZ1`rgv3z_+lugorjD1^-{B%Ed#M*za{oTs<0Tx zGE|~Jj^oRr<|wN`B7rt%$7!SRASc~EP)+?7=5ymU4bbQJ#rynw7Sw^5so$dhmNx6q zjNhlLKeZJhqzzh;A8*C!R(-hx^sopBeME(CQJ+2A4P$Z=9%DAOm?}poB}^S1K8Xs- z6i%1y@%;P~Cb0%$<$lsdazZt*a^Gq!cJjsms>$rJ&4buJ4AoCGd5USv#(p?+oM3OF zYc+ru^SwFVAIVldc)rm(;Kb>zpNMl&LJh~U(Mb}!3fiblh(Z@kjCaAqkAM37@tAZ$ zqO9~cHhp#@K5e!h8k@k?Wyn@+%`Ku$wrU!PW&6oqs}pMaj}|DoKS7Hp#9RE!$IKSn z3sfReMZW7J=^ett-)?b#rqglda%rW#9+g|L2jDcdLzL&T;Ih#6>wb$yL#^^IhkEDz zff!3YYD0l41Fe=8pskPE^Mm^Sz<_+85762;po<3tq-E7UQfsq@Y|fp>F(MW&7J_G3 za0i-d`boPtBtXpUH@Uy_Q{7f`|J-*J9xnoqPXP33gF9q`!+WDz{N6x)pe5}-GWT|I z^**p<5pVHi3$D|l-sAcM!sTG~PxApSVlhHrt17~;xJa>w_qM8sNwc2&kN$p}$x9MY zD6UMPQ8$;e&(RycK3l(0@%%b+Ojbgf{>wsVuBJRW>;IkGjnF0L<>bdO6~Vzp)CBZ*9sz$M1u$7m8@Gr9lRlO*>dFZ1Y) zc%LnKMy zDk^WvGF&ATO_oDK-{SRhMrb%Uz-1TV@lGEL`bOxl(McQBGiS)L9~S{rq9q6HymE#Y z9sYZkpIE*lZy{{!x4^g!j=7PC>|;+%=`fo-pc=r0;6mzmf`-WgqGX!y)Tf?-D>Gth z#3axdTar;wk|9i89Ftg$q1sMti|g(p@LBB@(mKz&G9z?M|`W64P46$o<(cU?smRstIoGI<}sCoe?6?)YFvPmT#P=XT)jYGS-NP}e? zCf4`YXbf=ZD!iJ;xv$Af(isGcb{Xkrv&YFp8jTwzaYL?n6VRJpqJqvbY}&>7CmujE zVnzgTqFlSqmb24z?;-whx%*#SdG6wJx1Li29rHsao8bCK;Cimg{(~Do_iGY$h#Psi z2E-kwiJ`^Lkh2gYm`#p+4Ej4)C(BL7ows8jBewljMV)K-LMUHWcaK~d$>6Fk2e&=P zd`0U{>e`azo7A->!=F`dUx@ULG1{>SwXX+KBqc|ig`#|&mAfvbY1Rb3&EpowY-%k! z=Kp2vKArec!}&(6bmzpBNxEoYO>Pvo`D=2I`O@kZPN<7~$7pYxJE8v5NIru~Jl> zOd;)R&gMr>e#K4R$rRRp<#?H{R^9<4CW!6n~_)=#U z?eKRMY%hA1D-7K@vOHq6ql7Eye&tZ*wv?WT`;~wBl3w}bO<2UA9%#uY_Xj{qX^W&; z#kWfFSGsTE8kd}xV5Gcyl~ui)Dfn|&|B_jKwfmpj1O<0NH`NbwPe`hE`&z2qHU`;X z@Q(p72(N~b#Du5?`_+_PIAqF{W2#iS1=dy25}PH;HJM~_5lXr5dKR3yf8!eqOk-Eb zIg>EYLnLO`1-^8=!*S>DP{FQUN5XfK&-e)YC2#)Xg0{o&jp`cPNs(P27PUdt&QQ*c zTvy?C+peP0Uf*CTo*~5-_4*G=`Jz4k`JtS0V{5^7E>uI{twP|C>tdm>KRHI>E%`;e zgu)ySyF#w{k&GB3yFg^;t|LO}En(BzASsUi8FBR84VOR)-2`_-F^^_5I>VEMLNq~l z%fHoxMw*~I$ZCeWV5>BNUP`WZ`{TW|C7V{zOP|t;@GltoARNk>X|&K&n%+~OcQg0Z zXlBcHZX0#JDTr(QhYbWM3O*U@Y_9YrF1uhZLrLO-9mn}vV@!Y6csAlg_6t+)L=1jH%4y!VsTXH%Q1-*%7RsN&+`En# zTZD29)YjHlKJm5i<8Nt|S&Sc^WUTYV&nvx^^7PMUbE!(0z*qkp6F5E;WRCieCUG4| zPo{AHzcYnj{+~_Z+*4hIH$uj8%9f&?vGqw7m_*ll$Isxuw`TA@Q(!Ifvo!@u_(ppQ ztmgT%#?}dEad)F!YyNM|q|BDv92g=hOfTW+8|xBeJD-5o3Uv7651 zoc~nXS#=)1CCuS*ZAnIMsz5=M{T+gBGHTQavQwLEplnR^P>St1v#F+`twe)dE*>izeU8v!Bi57&fRzC)Y*s?l0+u z`^4?Z-L!G)QkU=Csi!va(RYS=_f{c{{|0quyvo<`h`kg8$tq&$nVvSd=j1Hh7g1g( z`_KO)*g(aj0#uz(F0+2Iz3cM5Y{K=A*BLkKj8c&rV%l3;)jnjFp}_aWVZ@Gt7Pi#y zfiWj>(zsRg@qarfI_VCzM?68{B4Nr>q7=#se>CX4JF?c8bGMU+nN#!b_8;^*aq#_k z8@nlP2tQ5%IrwM?_~J+z<=l1LAH}=A_RiG)B9X3_P8a?ufC0}?Pu!w;r&?VSuTywuj3654oo#;hyZ$pZOFRJ) zEZ#ZVEJj((Ci~PuK#P4!;Jq@^{sB z!pgBKd}WMz?%i%PPq|!FFvEW5dig(oi8R4s=iV zjS>xl4R+6fV8gH66piX1ORa%rE}V#CaXmfRTOn~ZZhaQUi+^^%$R;VH9GkueAHe*k zDxOm9e$nCHxYbI19my#nwXbjP=U>)Tj+O13I_#e(N?m)xvU@)4TlU}c-`y9VE01VW ze+srG7<|#WxQ~>+Z`~xl)w)Sm>()&&?e_VuX}&bP?1!j8MTS@!erh!fz4^F9PA|>J zXpi-14dE<~dDXjH7($V8>_6q2?x0HbJ^dj&aBTn0~fVX7WQ@K6XfHz%IC`0 z^Evb7vAo6VrE}wkyP6OF?Ju8h5d1~p0)?BFFj${Jc%KdX_#E+fxxuGBgy3nbO|}5$ zl|Ii)OceSmS@bKWJq|~yQXS@4Xm zn5K4^&(ReV)ywwdB(>RooT7eZKTc4O+K*?e2kggU6|x`asN1c_4Oh}g<>ReSL%qFy zJ~$PYVl;7*G%M0Q zJq9k7oaS{(=}1v*2G%Ta_$EYeUZ75X9FCd0psFZUI^LHvy_j|qa%YtbUu$LRoJZkW z8LM*_V7=<&%fmnA3-^{S+WE03Z7oxCIc#4 zXpBBKEFaYQWRFFii)`u?N_Sh-p`V@?>P)ezGY*i5pb6CBk~4CvT1q~XJaXq)h@<<( zdR9MzCP<^7+DSLtXq4M%{DKlT8Wrl;bAm>JjmAS{?~BG)L8C&wNj8lIN5&==5&OY- z-|VfL5nZz#>heEDqmRZ&dLGsu%i&Z&4kD%vL%AvvA$VSlP(=P)Ly5p(DIy^UqvKcL z#w9xbUTLV+%N&MVy*d(|==w?9(0tTOoHc*CdX#?cQTWA(q91hmS4eR7_vv3DsYRyOquNG9Nv7xaaWOr|iZ(W=ypD)ce@X zhavPhabtm00pq>q!K%YO#+G>bgwNJTeHbjZQ?)L<8J`cygznA$GZuPO38XCC#)aMc zTMBBk>szh|t3Z2tIT_`+ndvW*x)wezj%nY;ccrTm2P>huY2c5LQ?Mnag(v1cW3Q5E z&jtSMD)+Nnz3HJlZjxEIHv7SXo@IxRh3W;o@o`FQr&+eoB4xTI(gn zH@H43vC&s&;;_(+V3`v*mdqjTu@nM!v<8kj{VAc@NgR>dO_b(|G6J_i9-3q}Ff}wc z8c8?W)~1B!CfPUS8L_dSJltGnS; zegoa#aNqqGp^o~dyFqqooRIs|y;om;>Eg)i^X1h1;(+29_c>NFDlq2bC)M5bW8MNi zS?+tj&qG~pKF-*ik8K?{vHAh`#%A`xVi+jI7%tp2x+gA7I ziR_KGxi@aZyP4U}=n7XjhYF0Pc}8h|{n3nU6n%|y8YdTJxeE!~d-^va{Vhj z&E0eh5WvrU&ka0qL_erHaKul<;Pk3^y?sYaQsfS+&PSxq49Xc|L!a-dKV~wO z+!KPCVvKZ=klf88M9dE%`ENm+Zv3$@k6e6_vL^GCO3uGxA+OoO-4IO3xd+0d zNTa@~EAoB$p^|RAPL`?P+KCZCrnEOjLQeM*zpk{`XnC-3OTm^0z0;V@S<@P~ZGH@@ z8ApuHRjHfr#M4b*RA1$bjVgX@PK@e<1ShzLMK65n5etr9 z74D#%UKMJ2pb4=7oCi|j3;3w(WZ=4+F4uV(9OcL571YB+%#RSD=1Ca|$24K^b`k;B$7hY_cuB-D~Y?oqcc~i|2;0>HDS32qFaT3ICs+;NE_%W z3XcZ^Z+bvoiZf65ovZj79oP6`+8nDAb9_lh&X>NqD2~+!8h2SI%$e+~u?pj5O2gM! zm1;_6>N=-|pJo>5-)voh>JswkX5U2xG+U<03bmK9lh|xMQ$DXV>t-+4nJd(WiY|YYlf{&i>*YLnxU+K?e)=$OPek2J4+&q~EzfRJ0Q|wvHV_{cO%j(Pr53_|T z+O-0h|(;B(`LMf@ZEbC+tMy0VjlODZvPcQ!P3Cw8>e<{ManD zgI3r>zm?~Ks))~0&hP|)s`KWfn(ShdRi|FSv{#y4xSLKim!_#V=6j>=`?na*d&{Ed zGZ#k-%hb;o>gkK!B~cqj_$CIrPxloBy372TMtVwfQcCdr2!?jui01_@N$DYHsdQvg z3RdOj*{11$s_q%^bIx(R%D?ydw;*+n<9Ytc-w*ibfV&71XsR+gT$pp_60HX6z-Gc^ zjWVQPc12brIwSn&6s6sCJ?9eh(adIt<|lDZh2pY|y-!MWHA4rz3O?=mG{R`{X6o$; zL9=@UTP9WqbFd^e+||KJsL;C~#P9s!p4kQpbK)u`Qz&r#p(Kj?w@^#SIVvgr%+br2 z;Oyi}js)kUm=ucXkpUnM{BNEkC(A}FH7z-O)Yu!EMKwwI>y8d-+_xq*FeB-f|1(C0 zZafxrFNqARHgK0eM4ZQeh)?`Xu>;t%@*j~wDa{32gKp}HZFA_I09Jjci#?-kc+A+Q zK8_kFR2`fWNJ|c!ndDco0)dW+aSMKwJI12yUXf}2A=-~gh$>EGm>#loxDjle4J-ir_9PBYE4JA zF)O>exT*9nynM(D?#fF(<}XUGl0*E(lX*!OE#?m_!tW8{l{9Sg;Y6jp982%$Z{#}KJz)6X2^2YQ9&QY& z#_d)+6iA}KqFJP`{hQ%ec!^_oTeTr6^o^HgsOT#OfBZI@a};4{NXC(B_zzTtZx9iI z8wP)BD>=aFVX#&i@3S`~3c}3lsVTn0JO`I5W9i;N_ciW&&V`)r27Ud#`wTNIbD(>s z@1#KYd>^M%r8uJMzRKMwo&aiAGmTcNm&!o{Nz7jn`dVhNR7I}F2IZx)8*Sp`59r9z5~IGr0QUHAPHEJ@jA~%$omH>*i*g}^}oU9Z>MV1hkY!u z1na0(?u{d>8+W@K??ChoFX2aR8)t9J*pXer7d=rk!-VE%6t&2>vs4#+Vx{iFE{FbOv6(}cw~k!It|Z((8U}bCkq9N^wIGy zW5})}9Np^)MP^m&yACR{tB2#;kaK9t-oQVT{4F8pQbr5cI?n%JaAd}jz%%Jaclc*~ z^wy<0E8vMYIj0><`GdH-YV3l@haVH;q^ z{^&NgOL5NOT1^o9c3B|&RsD)nvmAtTGeA)328>u5uyzG{(yQFJf0Kn%=E$@%HRB0c z&O04d*yWcV2&@;i;3uLX)q#2JQ}2Rb*=P3nnd(E}5uca@cG(0LN!}vx&i@L5_dNc& z2)wf|fnUI;hGqOeA@FTBh%ZK9S6>3Z``G6p@TrtDJ3US{t_5}T% zFI^*qUDRD+GzlXU@}RUAE(;t<@)uO9r>TWE+2%If!#1!I#rszL!y4q!cWa3sp&hVg)+VNiyUr3L1}4MRbGw+WmKN9P0lFR4yAeNqVA%&pc=>{T~DK7#X4)4{*BMcqX45^i~4SbDgiTqh!7uwMT@y+Huh z_98UZw`&z%((JC*B>xe~J;7+*r9!TmnvK(`HhO*mUtr{C{}8MCyUc82;}n}`+x-7x zX()J=sclyeQ?fPWVh}iWB%?il)sddhZ*C`700qbOLZg)~) zLT&5q+B&{aZEs+kX$_}evm$-I+FUDRwaI+_`P)_r^zPw#4D1c7(HGjM_+D{CDrV+#0vUC zmpx5%gZ##!e=Yn34NEz96n0t>n zbiWPVf0>3Xd5(w70NT~VWZfOQH7PXhI~;J;qP-=4TyJkrx&{}ansgaE*!Uwf zApcWflVy^$-oo$ig%#wf;?=G{-DwzE1A9nZUO6oeD^q?n^HL6meOL(Vq=L>5n7 znq%$&$Tvhh{Nj3)a7QRbE+wY&A*ALUl6l-;rsf@x z!H)C{YcHp4`ur` zSbhCuA>HXTC_?-*{h}S@j$jF`ljwqU)jPH@*t*2D;pi6JML_#K3sB4*C9Vb=du^E0?B~90)vO z%DF9iTPAt>h`+7jedN|!gkC~2Q9iKBnY88{qcv2jTDmc&IaE%JIlR5TZtiHg0a|jX zi4%urFMaz7WvU`0)F&g4aolWLeHS)2E$yAzRPu@FCqj$5n@Wzzz9cESBEy?q7d?r5ah8P?)I@J+46hjH21TMy7}7E z@_!jwJNvb7OFZf(^1MySOFkNak~5&{ag@C8{|6<{+I=EQ{&vKFK*|5n9)i5cg-gdbRqF>xl&QvHGzneV)w-W`#tkHNg5Dt>m%yGkKMuP_m|q6l z1U;JKes0OOKQSBQLb7d?vz3@T-1Sdlx5}u_mXVvG*wBjaw5_P<2(&u$r9k(|x1h1<}sV8p7Kl4F~k zP-&-n@k4X-vPlMn19`9%CB83>K#t69h0%H820qL5$d@wJvBTn?VwU^BTE^-bMa!M5 z$7A9L-R>8IgN*1cPLFp`^cH+S)kvhH+Cz-3EV8XUr*2IWR#3)TXXGn(i9(6Fir)6n zyhXM9lXQ3&QVv}t0#|6d%A329+v9{dY%%3W?Ku{N4jh5o8y5FnFB+m z9qNq_#P35djp;=F*D1MIIJJ}Lj50NrR%#E?rdtU-E%b^{X_>7$>8i zZg;a>^{*~!x%sPfp&R&Fm7WT9)s5i)vP-!I(pk%b6Z;{^EdbrOluD!2@+O)g%9}my z!nTgcc4j{gYRfr?=H^XlPPs?Zd#{)+IgpF~y}zTq^rp;_%6zAvTffMcp$wV5EI_2$5qe&2fv__sy)< zKmeu9QPk8Gp2FQmuCW3Kmn|x1Y)fTPi+^^}ZvTbGF{8Eq@0`XuvfBMA#gU09DYn%A znMT)TwQO>5{_Y!G^tOLcQ8%YR&c%`Jv7K3ad`@q2foNwe13 zrcmhPQOg*$d0Y>R-kQu!C;X9@?!;+h(`dCaLX?$xSyM~0Z^=wJ&R1AMsI52u;GH5|Fl7 zXho&E)1ewQLV(=N@4NOnXJ&%f_V)kzfB29&XP^CEd#}CrdL9ffV^N$}#l{qI8;iDj zxiX{_LBr1joY^hs^!^*~!B1hlb%)ppFp=Zt)lP$XFUl~78{6-0@wBXT8K-*Zlg}K! z(J`uBf(_3ur{Us*WDgd3zm9j2{secX8}PF8=3&X%w>i>r;i&d#?U9w?sV#(FUF0@K zp08)LJDhoLML!`L%KC>W6Q{Pz%8i%H;Vi?8%j_f&fEQM&(b&R#8;-4x7AK{3S)vMW z7!jH8@ZR?uzRB(*Lbex~{Yp+}Vm{*?riv+9?w(OLIAbqDr=~ZgN$*QL)S?@lJEGRouF+k5kkIY` zys)Bw^1*w&n~~D49no3tEQG*}kKti95654Pyy7^1&={hbmaOK38?R|qO#Egy5k8Sq z_^T?_lS`ZqH{p7ZFd4q%;ZbCov^g_!;`X&@e+F+S$5%fl$4H%HBsuO*%Rx&S-NlI( z=>i&7hcxEvu`!>`rnG26Jn;8J_)oExYHX7egi74^yWJ>SuYVIj?+{9~q#{>z8@V4u z8v185A>ybC@yz#mkP}7ZZI#74%#ZEXC)XZVyI+0#WkJWef+;$)9dX2_GbzE)O~A*UYz}DdjW4oTlPr}U9LXNFzAga8y0rD%5v&|Iu>i$YUTclrV?)9&|4o{mNBm^ zFXZaXDl=A0F&23m=M{x6Wrsxm>by7W_#b771aDvM`@=?$u?=yxqdbFH!)dQBN3|PS zYeqP)v7{*M4Z1}4uTuT0QG5ThPEl@8CS}fL?tM_L&42#MH|+}|mJ`KoqOVa9Yv>0b z{X~3w^@I4*Ky83GQlN&4Ne8tcMu5!zbalxzvIHt+8an;z1(Dua(9Lqh$0PDLp-XeQ z)fQX}hQk#ofq1z}s+_ktGFKqgAgr|qq;_2+lI2hyen+JF`5Id7bzSa)A_s5A2R4Af zNzHAW^Mg)S50DfTNhdn9ipmK&BA95o{b{OTU18|vro$1WlgGKxyyM=Bq(z99_TvY9 zT@7yI#PQbxW~G1OoCP^lt0zIwTp(q-Ve!jdC^h5BGf&fNGR4}1nmx&V!zcNI!@4sV z8P?{osUi)1jIezvsE^eYn=_HqCX|WZL18;ngsin%)$U47ugei%!BCbGx!n7m_Q=5C z#*69l1;}eh5<*)z%q`_aIcInE1)YAG|0)&LXK#VE>m=RWb>>$rau^xAF8yw^G)+hi zXex56wqo)7bO(1yU(dSyU=D}GN-Yq*MLoY9_(4l;IT-SEI%hAuo^P#pvJ<+>4sGNo zTK0mRQ%L;VTJP5H+r1BTM9aEwXuU`0I?|$3_eb7Df`pZ_0_}r;s$UH0&qu$`k9eaN z&828~cg#IC8t%c97tDQ*u!s0+gqmMKc{f_tYE436xYhUmzOJr{G2M>ycaUJt4*DbTO&?foR< z7a3Y5gcvB0GjzO~KLYeLUP!4^Ula7yD;kYZnL@-9+%?Y;$`S>N4dYdAGUr}1r$|CU z#*rbB2@jQp#zcn7LnAuN1|5;1C1E!Y*#4XsIqZpaJHnSki>b#@8)43bewdDNsgN8V zl^_xQY9ZkVl*uf}(;kC#;amzsFS&5nm9a81^vTUL5f8t!5JK)9cYJChJd0w3T6IT= z7qiA))EHk8c?UJvS)nVi@()csLoV0~iTI_b6}=bL7`9J*(isYDfYk*>bW0k3;9?gk z^$ek79r>)s(ctwyQ|4JtoXb49tw2MLgsMh=!HHIveveZR9;zw*vTTxAo%l(u% zzlvuV9a~E;T9D!0@&#&Jfa--@sRHVMpVU<(LbC$uJ@a`eG%cY1!n0M~A9??U17C4`xEBwG_O@o^$3vz0fI;6QdR0ITig2-Cz>^nzo@AdYhEQ6rOdGF}uQuFuEvKB!GSKAg3 zQQ4oi@7LHIZQuVf!}}$*F}#&Byth%295%cHwKX$2yz9Tr>~|-6Uz7>@l=#~lYY$C_ zfMZd4nmZ~{!cQ{K%_hHAx$!>zg4lTfB zM|L_(@~w}@U!A;T#?GkD%N8^}3yv7(S+H8gf00P|)m&VDY3-G4G!wqeyj2AN)Z|y_ zO)%pYUtc);rG8qBb5dnVA}M#gG$2NFI<8Y}S&rfSQ5$qxczNsRIm!D*I1PdagbV8> zGLL%qKBh;)=lzriBU`tW8(WqaUFC@`b4SPEVlSSL^Fkv#n$=bAkuo^elONokWL)01 z`WD;fLmY;NW`X^4Y^R7Lz#kK<)uEpP=GM(9Ral-73$!!D$46q_^1n zgS~H&WaBz-v#{C&YMtFGnCL(3R>4HKq+0b`t)7mF?kzqRC|41y#ZYZaS--m0D;(g8 z{^uNx?BC|RrJ?f!I!uUkDm>2nVws?(zs#VQCR(@&W^UPstEz=wgdOi!GfiuA$$pla znjuU3VtQ)kzL0SY#LqRBcyNWd@vKPOP;`mM0OyM)Nz@z94ygQhsoSDZP`rArhfS_6 zkHj8Sj^5hdb*y6c_dT+|vjgV-#vQwjzJBaX?x)}vZ1i=B=2U~biuSVZOoy6x zZ+%#;x0A=jTV5$-H!pml-cP15mItv&zM7M2wcpTKg%~@6%0?_#SG>cR*g6?oJ*80- zi7_TW zUN@qg^8XL^T9E3`;MU>TYuD;D?4@3o*%m#&Bet2ixT7TE=hhW^s*fbXht>b=Nt@^o zDAD^0e>B{M156ytYuZm{t?4de>v{|`czyA{<;IPkf>^nm!!9q%l_WQsTN(Ac6ZwO% z$3P!2CYSQ0t(}uZ#Jm&{Ays!v)hDv5IIyb?6Q9d%$g4`Q!7|cBaJpv^TZ;&hs@K@TOFB`A1?yA%1sGmMt)7TQ>J-hz%;#oGnTioLoip zm$v8|ns~m3!L_?m&1zR8$@^WUw1qd_pHm$xNZeqr3>ZU^x2|bVWUVKT*zMat9j0} zaM5V*!EuCic>S6+JBs99VYu~rk{$+R<+ChNRCaYl>fDY*^z56{(Mj;VN`IjZRRXtg_9 z?TLqzqbu}2<3dv;GB7ecI#TY*2#&rPX|7iFWuPYo-u6)Ueq^YTw};F$QG))bMtlDm zNBdvbrjNEjfce%He`mB{8VyQ(`Uor+1IvnB<3zOiC?F7Q(;hu1x=KQD^A44RvTa1L zz44SJ#G3uwr+h=}&PC;x0Y*-Y43E&`&c)rQsN=R}#3}}S?Q0KKRn@P+S#~TYYoan1 zlY-*E({Hrfff@1ytl>=3s7R&{{(e+Qs6W>Saz5xC>L& z4kEXoJ5T+NAIrz`17?lAKOsw%(W!?ogYPLq0$>dd7w9MkZoM`4qQ+AhaLDm-J`wavpQ?A3_7dyo6 z;Sy2qp2#0>^S6f;?tq;TZ%JnXe3L{XtJTl8%FXOJlvcqCiuv$VJZMY^4M{teL zQ9nnc1ov&MFyp$MW9?Z${jFnmC!kG&xhQr>6mYq!qD=via2@dHx&qaKjQ^nm(H^51f6Fi>!kgCAElohj1k8uW^sv*p&QG=hjmKz+|#MeoB6M#wn@(XXoTHb4sq< zo<1cPnQt;B(|?#YC7TeiU_6KubZCpe0Wm!zD8{5_#JniEyw9Y^$2WdRU#`wG+gz%nR3tCZA_&M)FFbtuShNC1+E_Td?9oJeGtBYGqBh6Vtr>~3 zV`RK;u9d1loGc{$b5(?XR6F8afs4t$~MV$6xKci?=mB&;h+Wu*-3S>gAgP@oaa zqGv@LfVXv6*>vmj__bp7no)*cj28b(cTBxPTT>=-^t{xSN%YFt*IXUcmAet0U+cU|y0!tO#v_JQ zwGv$lRKbpDRj@;*D%c%Ali`7ES@DNjP@T}KVDGh*y-pDo=ArRXCnj!Wx$9raM3ynq)Tpk@8yUq>&<=CoZl$HNef%cC2 z#=CLtG6;)H{aWt!&SxEe=DCsQ`o8xPiD%vN%Fa%7WqNr0GD`v{L9LRjqg6h3v`T3f zi#(e*VbSw}Xld_D(gaPPqweu5SyNI_c7nBtv`!FtA4uh0k<2UApCRuZsk|2@^GYYu z@`_UwXjO^qZ~^Kk7x3%q%n-4MIQ6|)olhO6P~@%8vgkYG>&f*;H71$7$4tJB+gu(R z5(SZjGc-=U^R8X&=jQjT^|*~Wty<yGYdd$-ej2X9lOQjGv|RCg!wO zE?0N1)3gX2zvP0K6}@gPcqa)({Yye_&dH<(?R*P_ctt8)z3 zw~n8HkZ?!prioJ{qwCq+G3X3e7w{5X0ITGVHd+2Pt^uQqBrXhV+7=Xo!=hKG!whz7 za%%Sd&7PXC-J_>wxW)nI#w>168Wy0^ALkPci4M_gR^! z!UHfmCvTI911#apXYFZgD=n#H+Dc3Enl_$X8ZAekurRO5tc-a>0WWc-T1$nP{G-g< zFi;~C=4iEM&mX8#H}Tb+&2rE2UdkT`E|`3h4?aG~d|Q)wfXO^(xTr17mz3%9@8AEE zZIb5hOgP)mHcq`S+qlx`q~$xhYMz2be&sOaK65z*X6r47r6}A#45LO1M>XMmv2NfVBIVuJg7^% zwnbf$y^dwo(Hq?Avm%+}!ZO|I<3x{Op3~`Y#`DNqtyWpzk{yKH(wM^ZcV~G-T>Rw2ezEK9fm-3P^)|oA!Y!#C^ltey5BjQ! za;UK^&o}x(PMdI!Z}boN^==X5RC0DMhk|ztY95^8nJ@C<=l2Ec_c`mgRetMA&VGX5 z@Y!T|l7}kE`Lvbem-1T~h@+T4EUl`_Us+Y9#DbVFL2^a5&ys@mR>3;!_jdWEiCg&% zPbI@`Jm@C=&C0P#eiQ5S0`c*DPEVx~(yIc>Ni)3} z{w|y1`$NMdP{{?pJ)5Wdc5a&P+tYB4c9EvnlyA?*_kBAzzvJ7p$>ZDEAZONJ)|7h| zxHQspW!=#yDq;|?JJGeB`;frmyKV7pzPUe@$M3YopD}%)@8C=`O+eHk+XRL!7iVwO{d_L;^=?HQ@A$f;H{gh1+~9rfhaa+D^F2rnpd|OE|DNjtCN@nuy@N zTOubi!xt0U>w?HT+3|A&>UAViVxN&$6B=R6Y1U0<^B`@y!qe82owvYMHj@LvRW|F6 zHklk(S&{hE3{Qy8m5F~uCMkNAEMJp*`iM-G5WUg40utq5^l}khd@v8C=~_ z{d@12rgq1z7i1>4PtCsuKh%d_W5OF?wJ)U@}_~#IKUy1Z~P+b~izWc;c{$y@JI2#>+JM#OOT_j7q zW17w}ciln%jjlAK5-oUC$36rh2?y&yQ@tZHk$$m?(=TRmcqkzYs9-c_ozMON(0X;6OGRcr#PX3|>YMdWFn@s;Db)7K> zjb$Exm2$;x^Ri&nLqjyPY6n8&gD&as-EK7iBT#oZVS;Q0rG`9f zwk^=}MS`Gl!drj|5O zDyO6t$-j}+vhtd^o1X$Z?|L_x?@r6STH27@9;4hX*)!$0K&JHA`{tAmsKeG%zdEF! z%*i!Td**C!csUeO~r(aiU6ZVc~M~)tnH&S^#(1 z+{_-t3?jrTXFrD*CZ3X9>XUWWZiaQQCh-!#pvwG&2!#yn?UPY^dAO?M2Lr1AQxqAh zF*{)#FYMF7&W>~c8PkY=eEgT^a+24?3ETn~p;~PxzpPDD8g^Bo#W45O7yOV7H)`Hs z;^FNYGU3_5EOptp$t~EyPRvoF`_Y%kluWkN6{Z`Yy&5)SLZ*QF>~w(D}% zy;l6FgNjO=>k1?=8q!`+-;tk|QVL9IE4Xt_9qNGeoSeTr}E*UliYi za}D9%z{D)|WC+y<;}j+*jHR~HNIOhaXI;`a>M%tz@xYOz&O-r5>s-y1xAErNQ0PKY z6scBOyc+}6NC4HTFS3e~+G1K3xZ2{Bp)n>JpB^~TEf_{tdUvqw zpJ+&%cR0Z87xo(AzJ|x%VBuIsK9;#l{^rbky(%wPf|ZnQ2_;>K8SWsU++Pd+NCR z2ro{gtSFlpfgIx#a8JW|Fh?Z*X7=7*3AL6d=#^*{GC z-L47h@z;_pkxz5j>g5wpS6#d8=kRH@`iE3MMD;Zvrvmc+9KE zGxQP+e;SQA6itktmrON7n2!^Motnq+jGb^&(JQc-sToGsM-{_FE9_(uGG5mLQ!#7_ ziedTc^*<*E^RS$lND_iHD;FqA(d02UpCY9#36d)509w&Y;YWJmmBT!L+BX7?1N}1`0 zi%*;Be6={$Ik(lh*^!DPAgz~wmOjNh^%M^ip1}w}c&5HerAdvopGlz(b*X$3d#3Z7 zP6$6SG?8D}6QR+?Z9GBA!LeT2VJWlXy}PtSy$`8yzI^Iv^?=MfTKmqnIc1Tf1eb(oN?| zOk1yuq!z-B2>TslL>!1^H!c{%z3DHcm5Z?XgSBt>9^j=a5P3NvxMi4yoVOWc8ct?W zOObfjH_N^j62*80t>HOao!3WK3~+V55JNK*d$IAI&8CELTrmjR$XX}?LNCCk9Dg-~ zu)+3Fyed%GUFoN*nrCVn@Ii;3rM`Vg#&-Bi?jiHpL*pfpwLlLTd)Dkvx2L*6>nZYLbd7A@oQrh z{gtp{@&XFYRY9#<`wVbr>Jh64$!8SB=a-T+%h$2~T=cOMq4DO&&q*cP5^jN<;rT&7 zfxyJx^2Q}YntMt&oTpnb4p$bjS?SsJ1EK-SJreFXAQu!90&USFi~Av8M9Vt$T_S5? z8(?!s#5#fis=#zKJl@eI6*=K^A;~XcOH2gFOv+ip_G}p&??rVTmf^%sr$asSM|&FY z5emcf8&FmFo^c>xoIn^79=p|jbxZaVR@C{CxHJ4-{G4RMND|Nm|ErV8Rr_c@?80nk zb@Rbn!uLzb(Q=shI~O|>j+OBrN@LecW7Feb(a^2!T`VjL;a1RZ6!|3F&cwRNy&3bv zXW`szEt-Cx#WYq;i?qR$PV%L>&ZKBFpe=zB=SK!6ZJrxxD~0*rADq9EpJQ660da|q ze}26dW`w=wwn47Ukx&z#%JiJ0{+s76%+sK4eykry%VY3%y`R}NM2sIgH|@x4I+_bw zNp&Gm+*X{Z_mEZ>ZH7q=q4++3k!n8*p~7V)rc?d(nSvVfZ=)+$i376@Uh9Jrny@Cm zsH!@)(izOCQjgU#FJ@S@oN59>OA+?tXf;yzFNgl&-(HDIE>XR!)qz`hSY>pX&gWLE zPo#I(1bwlD-MvHyml(?mV##|HX1Nfl$Y|0%RQlEDs2-zOmQ`8qv$9>#0?9zxLyElE zBdEk$ac0ePbY$1ls{Z_$s}}u>kE{B-=~Z*N2n6KpM%l?6{8$@wJ%0dn0PcEFLr%5t zz=jOtK+svGM%)cb)sx0Vy-p^RhZdiJya_J5=&R|AV<-zN(c{JmERGV?B-3U`R84C& z_q0}TK2xiJ>TCa**6Ny%wEEePw)(Z9(^`eCCc80D&C^eo@+};`d?n_* zUcQ>i;*?dbUeYM@%xgCzy&LA*y%c0!;t9I#e;bkq569rGdhBLLdly^yzcD1Q%<1Z1I?7)$};nP=s}u zH0t14lhkMSC%bqVU3^Z^Mz6tw#R@H8t2QnQF07%Wf4-TH79)w4j&8F$IzUGUaKs|X zbaY#?qquuv{^r;D214EOVl?&iiLHJFmzgK;Gg2saO9o`lH%Q{R#BMmYUKe6?1EJq~ zpZNnxg*~g~Al62C6-j3T+maH#N|Q&Z9(yG@kor~H>&-)qf;}Wb!X~I_>hpEx=SDum z58a0}1j+tE!CTxO!k%lRSL63x`$9m;lr5TTBQP&@u(y}E@NlxST4Q=ZQ3O}7T9>qo zrElwl`(#@qHqAk6>$q@UoYm^}6%_8ONf&zA!+4$i)?R;@!vw9c`%}ZnQ`=IXKd(Q> zg8}7b2y~oLh+Jn$Kvn!i!l+P=Ojlo?${(F7CvxlF&z{)ah?Gu@$w{$&N3T3^SBW zu(EDTX4TbKR$@_76f9NO%bXA3dI$-bxRrDpr{YUv_e-!h*2~q^u?^FVfvWn4pa^2+ zcm;Yyx-hR_LJCE4t+jyWM}!Ik&ShK8faPdXJ1!%HzK!HRq6(smEc4v zidN(%K=}Ns*!QFnWU;;vLSw9WxRI_R->LBX)oPrS(`F{3)hJh|x(fyqO|Y(~IUERw z53u}3?uC9DA@+j1IPy_|kCLTBeBB-iW~)0}r~yw8#?Zr};uvi>jC+oaQ%mM(9z*yt z9Zw}{Y99mAykO3%G?n^%7D)W3x+HYeBT~bX`18V{$yPT@&6WlM-iR%Q_mrX!6qX(P zS1cU0wl!RaJ4{{6b)OVfLs1SoCHnWao*{0WH#(tcuf_ZXeSMe);&Eqsd<#cU8u zLuYtgAlM)G1cD#NU4h`SME*wcEn;eELYhUX0Mv;ypuRn_;72e9wU5WhG|`?*yH#*P zow3`k`lJa?see#ry#G9SPuXh5{S4RC`DB^Y!dl(mbgGfM8!GJ1i{f9nO8qw{L)7dJ z#OtMzOqsS)<5c_GSpl`1gQUjz16BtTs9sb|EyeXtQ5{XQc)O3Pxz)Zv>m9KcStFv0 z+J|+1Gscnx80rnN>#yyd$Y0b^8T_a3OTt>iF#sU{!vZ9C7Zlk6B#P_@ z7>%XK?xI7BBO6`ZjIL7u1>;ZvIit9(urD2weu1Q4An8v*Qr8fQdD)2}bW+~=t^TM^@N0rSel6ihF7%BS0IZv8v@>kDBB;Hm>09D)R}`gd zJJjP=`Dc^m)x9g(=G_TSz})(KWHG!weK2wP!DD4Po{+1tW7_eu97iea1Xts(MiLw4 z0cr&b^(b&)HRRhE?-DAlp{m|50avV*!kLY`O5@y9Tw%(1)E8Ma!V*0Ub;nrd>OK^Z zS4}Xx72nob_O|w$LH*V0hr-ycY24*BA!?~BTTsLq-OLnlFJg0OGl|CAowJ9YaJH1a zZL>}_>!ec!7!|CO!&}SVhPgQLOjS1C?wL6R2U(gmH+|&jg4BIS)7_x7rgy}LcmTU& z?-xV?rfCSKx5In?RGwqkvIa(Z?;poABm=MhYRp}f7hVfAn*=i$kjZJtn_y}JbFKDy z??>&Ev?)COktbupcePlr0S)5buhSXspjnYq8JnC-RBpjc7h>cX#j%^fd`tg@-Y5nr>>)xD-2 zje6ARMV7ct3lw*|4n%9WyJ~wP4bz+)8y)d7o?!8GM{$8xHYM7CyK=OM4@XyE+jAhC z7irk<2#txA9YCt4bl<`|jk4|WF=`=U0?qT8bpj$Yx%A&0XI?W+fkZy%lqryi98I&d zKhyMofqt|35uM%N#N*mgSKWv{p!fa|FE%JdY@Px|tJ=c51&RuJ)S!^)B_=3J^pk)> zJdvk^V%67%gW@qf@CYb&1B%_z+HU4Zed$#T6tVzfW!--DBRfUp>anuze$`>8ysJU+ z=pQL!DZ&I77zdCxc0H%{HvZ}YvyI!SwKR_N!lNm^+Yf+HO(%&JFm}a$qqe~4xcT^B zo#HEhI|<*?Pg9al`^Rl-x~s>L|Mf`Z0NOSC0o}J;Wj**D7`I}-KsCDJh0t;nsI8|1 zwN?Wa-@CgJYiXb+-qNE)XkFH3tau?_V9@Z2{W$&m7>j_OH5_^Gk!5H9A(w;OGzZMxdt{ygVOcFfkE9S&|QFsfFj5Oq)y_q0z+B((3kF~v` z?P76P+6B#6+!b)+Igt|?n~1&S3giVfsEZ5v!Xbd> zrM_`sCXA<*@FZccWT|^7Wpd$4C?uvkbmjMTIR6QGaQz+Wb%x*PnzFC(Bt-v&x@bmh zIkdUBg5t|jSgJe1yQQ8Ysb_lpY2Bh}CJWi0Y*IWTgeRL^x6EXdoy!vmjwF3N{Gs?* zUCHIiMn9m@p29!U=(y7xrOBOUP0x4fMxWG;qL%m({@Dc_O2yQ4j(J_2z;z4@3)F=% zNYIyd-6f}5D-~%!4#jlo3`{h%Ox#z)L5*$Jqn{<5w4?F!ho`34X>XhCG?ND@3^i}_ zS`KJos^N1f2h|gQUG%1fLo4GC=&ti9ebHqXZvM@P`5c zbksyLRsZUpiygd$`=bOO${!Rmt&j69bT-!^T5*_Xqzf;+k}3@^kgjyCj?Yr%cj*>U z@6UzzHcfQna;k%l_-J)MeF167yI-y2dXz@e33s?!Jwe$s56pe~z()K_13UHqVPJQe zEuLXukFQA^*qcZ}(guchi?B|Hhs|D39-9g})A#THgYQ@TgYT>V!S`eJf4}{ysq%s_ z66-yQD-uZQ9qI*2M$3_+rZk~X%kkz%b7+o|xj2)f)?w8Js!41&*+y~rSFs;ip1v~A zdhs<^+aYT=S^i_wBS)uIL%@w!Kkl&i@tS3_k9)PJVJt^?R0R;Ap1`ep!(mLN@PSq` zCWPhsU>)Su#n<=~SF}_NsShDM^ldp$+qa_ilej|lXUI958~18kSrR62G;{DuY+7$K zwxxtLaTo6>854d&qKz~h7G*1=qp(XvIz4Q>_LCWl+Es?{wdjie$;XcWenwxY$oPZt zsxcCk81Cn``(KyVEOfER_ z|A0RebKfVMxrgd~r$Wn&PMXHEyjRygkgR<`6mSw(QZrtS2-ArI(Q;L+z)D(a=?cV0 z(p=s=TyMKp4Cvaz&kD9epM_;mT*m1oO8eW<~NS93K0I7|Q$g1zO-U za+?jGT{mj@d=6qF4LB7?q7BdjhoHfL&nJTyiy)vsc+KhZfGbqRQF$B3~Q{_ZL?ycG5M!*p#9| zxKOWdUpLD4IVnDhXqf@>bu0xB7-$9o3^dF}!;L zt9Nkwy&vse3@t}<3hmx)GJ6+FelUA?6o#tq-Njb#Ob(IVy9<-OYdR|7W>WVp$S!Dp zi5!(1h(6T{Qz1Tat8>Anp$Q9a4UJncKjf{fQqQ#t>M^=~yTWg79ou+ves}_gHNNgp zZndx^tJGbTlePWX;zZ-gOHmp+c`Me3C+CO6Wu6OZII`lxT?>yNs4t~;i3AgEev&e= z<`?*jFzdh+VeKKz((i!)`s9iHetpgx&C)Bm&Cd2qTllk(ws2u-9rXi}tt&|d|2)fF zFX*WhdfHE9L}O1-qVd;?zn6pyw#IsSz**TvjVCS%k7)e$UwL(eCSyQs_DJ#p|8Ilx z&8dID?3Ev?;wVj_YNvY?{;5HvK)0c*FBD4*2|0w4TcSR2*J4Lud(e%)uC0D`<1b++ zY|rflshC7l)6v^N+i5UZk!yn?5WIC2VP772jMlrFf56{M!Qrdp6E&M%Cxph<`8rW9 z@^wj^pA3PC_nFJ;@Ypyhb}fAfjazO4<=%_*S|K1 z3U>jdy(a`ne!6D@NqP`#miVv$sd%ylB>ipysm~_H!o^0jbXx;SJaGLzAdSqlfE1G= z!AsDuDs(c1(dLJEUs+eue1PBZx0H&mOx{cE(tX*!-@L;90s*-JYho3XG2ZmT|zh&$e)jr zQ08h)QUiZKtISz4t1aO&SJxSmWemyWkG8CGUe^Qvds*T&i8Q3pkV8kfkNy%kveMB4DW;EC@pyt+M+ z(Zps-Y~Ju@RgeHG*Hi(DwevK+YL-|P&y8lqa#~~V2gPqzIl58Mb^)s8z(w)P?EA-b zL*S#kapAwvjrf)Sup2$|Q{8ZOKx>%LukHN~QQ!1s7;+TPQF_ueGLxCJ9^|fE+k3g4 zv$pr*H>|^;w)Yh_gf3s(yYfTE;He)F4)JetK#^r4`hJh=wdp;nB1e4@c-vf8XkCYT zlYHB9kcD!kQ)3XbCE2RP#J~`i%k%(rs%UDnr?eLSw*;rzuE)t_IP868jpGa{c8POO_4jxwO2fd(z66VrjlrH5+e541~Px96C$o{m~#wnaFkL*sRpa|S9>F zM%J^!#H-YmMcNAdmGzHp=i3FW=3=mJ!OAlxdPeZ7o~I$aKZCTfUgSSV?SWIL zQhOFHHMRM(a6ZZg)kvC^^I`-26+KM$x^dl7-$J)GLau_{SwD?2tLv{|(+@;;A;n09 zF2Qu+*%krR`lTN{Y7B52>W^HA!k&x)JtyA>_+vQ%;cChej1N$ko=g3jAu8*yu^|Vm zTlG1St>Q7gbSYb|;07K0tLdJlde0GgVX6AIlwJ@3y(Ggw5|DsZoi+ZZj)G=E824UH|?aAPRP*sv5%MZLO-iJ?gY z9+MVuZ&*q(-&{cvp(6O01>kx{mlgIl4VlQKT6N4Jv+h`Q$x=h-rBGiY%SO$j;1YjG z1l*bGUa7eQf%-f;)zK;ltD{wrnu&Hm9N74SJF3+dLva2;$=yf=2}$sgjc+PYYILJj zozy!5^qSg^-MejJ&b0KM%DOK z%Dh|tO1~FI+KNcJN;dM^x0+5V+%TD1#!aWhC6jlH$iTeM;B+bf2zy^Py(t4DevgR` zBxwtoG;WHO7jS#{kg)Igi~=bk_I^<|>a``cYJFv6!#jv<-U*$%b*YnU*(C4Nd%aJ4 z;#sm=+zWj>*B?WZNnGK-7Av3htQZtEdOW3qx{kB6)uNL^ak``TNsYLoio2qlaK;wf zB-ldp<$ytG1LOFB=@f@ilXT-g7j7xF8PP^xp`(Lsx1ED3;U=dgTLXUky1 z`!rr;$_BPp{Nq0&wunVgKSx=~Cdq;U8S}f#mcye9R{)_Q~PjGJ?#FbnLn_rAvXL;UCCr7AQDV zx)ex245Wt6*0Tv@>wX2okqhX4@FOZ^tGJ|=II-0_N3Ey`HU<-M9xhi7KmH{2OBrQRn`33){|Mr zYsCqc5axqV&89N8^=p>YPX>+sbBCp+mTFyNszm%s7_Vp3fx^zc1F!g`ZAoLJ z=!!b=wnk3?>}IJNMvhc@3tZt<9j!;f=H|)Ce5n%2kh~I0%7w#HhC#+8@`pi+JLs`l zfYNd{VJZS1Nu`*67lwIX=uCaJv;m(?OOXjzZ(vHB?qO**5>}`ln5dm%l6u5tlBak;yg*qxaPEsPum%VQEBgM^O(r0?d<{ta?NsEe4*i*AR-q5 zxY6)-ao_jF$7m^9i+t*kD8z(YqqQ$c^z}TjORU@ztvygu5T4?%cjt0HinJgnR%YG8D4=?6#|2a%B60lG+2_Ez4-Er1pq+OBoMhA9a;ZIPBdrM?W0$ZYkt} zD~fwwsrC-F@k8-bD^f*$ml5J+gyfJ=vYFod|Cdg3i@=JcE%ph7V*8(yU|Jm9%QKsJ z@Gq}44eg(nIjT}`{fCVEJgKFFlDL%8!MCIaoxc*NQA?bD>n38B`DLoimneg7k8@Mu zyxMngzg_XJT!untb6?_gZHs$4-SekGZa{5p>WA8xhvT+CL2c;zg)E_F{^H=A3F}th z^h$4VO6A=$S9-JJ2<~@3FJu&sI8pF}KoO@`HJc;7jnZt(%FWS=ww8&pmPYbKD|WX` z+*B$(@!37O)XTD>C%(STQ)2qw7+bKR)j{@hQwc%(*0FE#Rq;MZ!FP7>`ac{_)<#M4rV8pk8p8kdS@+_LC>k}S$qSxjA2SBsua zR9A;v;plx*iAbmL0DJ-dht=MhRK|5;hyFdR+V$@t^(?=|2dVqoM7T%>r=Ww-y+> z0U>kx5}xSyq&4p3-;?flWwqsD_N<#Kogh!rR$Bp2wepl%Alr8>RXr zvKxllOvF`41`Cx9Ls{ce=|Z$=nddQ++9#lZFY~Bec3_gkFO>e5=lU}Htdv*HlssQ% zzm?KurWE)x2dtE*%#l(`}_}HdE&LG7GGfyUdgY zzRW3B$}MI}sV{Sym2#b#ve=h-o|ST?nX=TEInzox-%PpDmpRKy87C>W(TZt(iQK(9 zAOT)$6>`=RSQez^-OHKTh$n(;U!@K1^c7rZakSx)!?o5_At6Ht* z1yUN)To>JD=%TT5_+|OxSX*4qy^K-~%LTD}doD?p!94c8Up+(es6GyyNL!viINr|v zP%62=ADmz(M@f!s_ge*~AmjB13+#_~rpiq72jR7t4X;QgpXU!&+R0a^l4tsZlXNm$ z^}=B-T;vZs*cJen<$(Qc#``*?SZuvh0(pKAM=NAdFZ5tu z4ahknTp@H0!h;J5vk7MyXE~AtnE)=dpFJ?=?dPIsd6E4*Ct5y-H5>In$q_gKKRXn) zu%L0nM$vY&k$HaNS5mLKX9kFM-9~jcztBe%zIG$1@_8v4)mh+0rihmn9>x+HU#$l? zmI01V4Ui4a2tPP|gl}r%+QfMwSirta+1G-U2B&Lu=k{eDu~PnOrg(gr%1Sw4rWE-y zb1h`jX{OBaW#(BaPfE(^=!H;cf?LXtmP9m8nNSkZIL3?7d6|Vvchj^{Gs&m>JWWqS32a(1y+*9HTKv_}fXuLO@dxi&rPFBH6+d9)A#( z0pZwbOtd)lrN~bu)()9Xa%yYN@l%iWWt{#Zi~D@({7SiFR`e&JW(L*m=Q7BOfl76Y zJe1+vE{?%ke{x~}Cvabrf;&;XS+5iT6-SAh$I{C-B8`mxtR_v?2d_NRU!$){3287}vPt(-N!s4-u3<^&{&>{Q!;*BtCBu?*!8z$kXCOG9n`#pr z$qTMUc<8$pDmGjrxWb}SWONt49BFs8qqERC`QY(aX1p58<@R+CH7tp(ch{f~)Mttv zL`X?-xK^IXOHa}RoO!f*a?2g@xz+Wz%W`(A*O?2`A~%$W++&vRCqjeqxU=JE)ka}w zQ?>S1Rol^ZpKJz-ILVbR@q{!KU>~wOkE#VQZ{VhL0}5UiZMuzu zD{;%n=0x=j?>`kD<9IN8b)>DQc;E5k#>trbS-BzOZ@gO0-zyR2vOGrNZP63yJP~uZ zM%t#JkDxu0ONQ}e&|mqKDGght$sy;;acr6G&lrAX_DBvn?vBdWSM^tR6$gB2CCap)A+Y2PiIFRpgwU%E*)yNS{%$2yqjw4 zF4B$gfOKZi>}awZ%hS4nPsnnsCo~<70rZ$AYyH8Qy4y4{UFVP8$D!u%2QRXdM_I`? zGRJmq2HF3BOhN2EQCG5all&)=1)uQcAzjZ@yPgiKzz=ltG&}jHR`jz*QhB!s)Pvb*A#?1<%BKW5upZTq^MW}jDCx&~c|_ot zFsECVM-y*}ryruQnleQmWk%Fk{V21bvUrT$C#z1XBy=KgW?O!>n77FG#j*R2%4h1v z8491LFfe&LU%v^cq@@YGC7zZkQoj^l>C-YT>WBJKrb2y3Kg#Y=U*R#beWra#>R46D z&HbX@+!glT&V)H+?(MM0uL6{ePhyWRr8(`G9O78Q?^LS=e1bJz8b3eCHR=hvOj8bZ zz9c7}k%gxALzzeQBS&MBYb~mS#rt428SQl?V^Mi+z?^!jal8(@ojp*kOwnSEN&n`_8>yA`!LJG?X)y4qtSjqJe`4cxdf@UMCt*$%@R;%8DY*lI zLcna|Iz$wKr`R4|K1s>Zla_oodZ zA+O{7>c<7L4>V!>)jkGG4&BLA4xAdL%1bhbpa!=#et zyngk@1bSJl4)P;J`Cw!-6F1&iB5ra8=|nCZ50&bC3QBKb}i&^s6Ge1{l(V zl;ucoA^gqEpVe- zS8i<&eQ0qcXoVy+f;mtKJHK#k7JqMEKO)lheI6!ma4m|zk#r{5bX1U08pMRER*P0L zsJ>Y9MuA+Z-&l6LMAK|~JrkVO)Si(G6F;CPUMBID!XwTtUs>9_5g1jO1>lt5q#l1> z_@2@4-Xp7A8^#`{onWbz#VEf`D4z2(s4&w>eK_CSQKakhoV#je@4IA9*5+5YQJc1y znBTN)rJEz$*fU&z-d-~S=ss(;+`YqEEulMl`&f6!%2#HDSCy=~HFRlW{jF)0#}*0A zC|Zf*BgeC4y^(rre&uid*vXdGkN zZDv!)Uk#SZ6v~MGP3)9R8*^@Vx4-8%N$g5YDthz09 zQ`1hNW}?+AW8aV=*Og52Hj8=&9ji&o0h?w1!|xUDGPhJ9xMH~qbXybS-SQmyEhB*5 zy*$h8Y&4en&s>9V*{$JQ1FD+V#CqjCW0k*nU+e}NlMrSokB2`@toIkby7!d2^MC2N zuOoD;bXo?b`-r_@Nk(|-nR?h;Lr04besoZniJxIWy%(Fajcw9#qANO+VTyWwuMq+o zkES8>jT3E@&~&t)5uU!%SFSKudJgpk?_s6iX0CJrQxZy-vfmhRmaO!6b*x!3v&xBT z$myilgX6{SLev%a)){!JxjKA~_W4t1d=`jo$_RTg@j*?Q#PFYu9-JiaBS z(o0Q|Qd5$pMyHiZI#v!(7mnv3_sX1lJRVQW6vXUTW0A(OD+)R4<5yOyOY)eL_$8^V z$o%ZAZy}OnB1Ihq2W{e+CJQ?|R2$QR3BrC)1CksdaF9m$gEcrK(8V!P7(p6FnGb2b zbu+4sbs+TYPoocokMS6=lGS=?32~$@Mh&*w@EGsKs#&#J(C*j+gYVz3+&iRNp&)8} zovTdz$#%5RszkZK>@~1RgyZ)tt77t&#@ttat8kkk7x=xW*6hl`UM?{QU#6mD`>Ur} zQavn-ec`kCPRF?Zy1Mxo^~6Wsn;Bb2Xsy`ioW7w=Pu_wR@;*F7x258J;420ZcbnMq z)*^&J&JerXIs~K32p8muZE_1~rWsho*~=D(PiF;!g6;szg{0)U8Rn73hBfm7!$mtN-66tlpidu3TE2{E;S2G42}%u;;ct1 z;o5zn92>cEy~O>fb;R2iRlj3KO-kf+v}1C-)L}o9c0j$#Pqoo{BWH%X6s5o#;|Aim z9DmisF~NK6LzfdnpSq|z%+dik!IUgl4@!U6-*mbziA4|vovhXR;18ig7Tuo6pGRL< z;>R5Ba7J`q^7<@fU(*;N_7eGX$O6w3_NPsi%kqBx6S8LDo?Tcr z-S&~skSHng?DpttHwR*Plpth%gE4y00QZA8#>9dUJRA6Uqb?I99KhZU_ zU(B^`bUG^x!jMM$f0Ev)JuwBT1~iHGPm^YYRGh+v*2?*xWqRd&pC5bWd?+XKKfQ8B zp~)-6#(vYVr!9 z#?p)5ek?m9{BM!Z-RcM}O00wQGSAO-esxxfO?q7v@n*16jT`)FTfSL(1NFj%z<0x~ z;i*#DBx3^poWtX)Rk1~mN!0h~1Sy=IMtmKm_r?vknT8+Gbd?ouI&W>jH#<}36 zb?n_ItYOqQc{R~Bq2$`zLb(=sGNELNyZ1gCD7p65P+a1(r_&f3=)yXP@UGY+GW%X1UL$R!T(2L`dxclxI}84lh%+J>h<&qe(d%1C_`gC-F7-AI`i~tO!RMQeg5Uot;riHLz>ID<1-?k zyUh_U)@xvPTGspRtf&AJHir2yC&xrwg1gRuxQ^}~HZEbP6(93HGqR5Bc}XCcxjMG+ z`=TZ5>)POHYJU}NocNg7!k6SdtEv42@7`y2MB01W-*UU&Gs}f{i|V?4;3o+!q|b5= z385u|lz)lPs{I!Tt!XLhNeHdP-KZX!cpp(OWY*kD)ishyj?YSE6f!F<scIVi6eG^@-trcH@%)<+lfD)X-zwXQ3z+z zI+28`EAoX0v5!GDKQ!Ig;Eyd!C$kb8{Dtp9XuWR|S|M)PrD=qp;5U@N;YZwaS|iL|?2?Sy%YqVgYq z+g5!~BP$L3PK$GAhK7t%spU{jkv4yKc;JmL+s^i=57}FE+9O;I&4J zV)wYCUY6QaVGU1ucso-VSn4;auVS`4nS=n_C;sq-G5Be5^nZtDpXJKqDD6%EJt3~3 zWBcI9LtsT3`W!XhW?>gT{8Q*S%w)ra7t%mb4u(wr;zOut4pOMH>Hg=z2C>`tp>x09v zdn}RKZ%rC6^qZz3q3=S!PN(fcKNm)Ce$l zA~6=3MpKBn`c;Tj$S+OG>3Yodg`}Fvg`(4huU}Ji@Rsf6hyI#;77ERfURPGDG3VG6 zS}z=bQ^H!Q>Xz`B7Txj}O9f=2d)X+gkfbU>t(Z<)q!9}fF4^SZ;o^NOk00}u<*xIp zlbgl-Mfhy2`$b}F6<{o*s?^mDf<196TAc9yJe*ryr3SB|i31#q2SfS9y&hY5Jm&V{ zWQN!%e}d_^J!dt_uwb8u2Sj%@k>3NWzParYc@v%B$ocZ*zI(nWo4f5 z8^XGK!YNnX^Q_ABNFs-)2B*~&b{Dt3(p{YCZnKqm^+%(-7)j~tQ(x(}Q1q}OAPKt+ zb0|eWTA5!R22z&oR*OVdoIqCNWoziahawk$ z?)s9TCvlaU)oD&5oZorE`8~OhMF8g)edhC7#HKChTp5RWP@GykSK58$Q^N2KJnouRY|2a3dIJ2*z{{fr9&! zga=(iJ`kFU)5_4K$iQfXtOMRnnUR69n?|X~XKky)h&WlozTd<}#Juz~`rX2KtVym2 z*|pMT1s%gx!t<<*MqOpS$Ou)D^&$gQ96eO37A+Kdz|)~h^n<%2III3a0q`|X0bty+ z?mxG7_b;3+6>ma81U;*L$2S**=ElksGNaS=yk01O!#BqBB15AX#gG@DvO{^B@Txw3 z^A*+V9(}00<7Mh*nlNx2v>!2p4we2@POn69wd$5_SyQ|NiR}aJu=USU_up%eT`l2a z5#LK6FW*5gP??W>s>gwS+$iSYFhi=@TJdJ&Q->U#Wp8L_o~SnSHP0|t;!Z&;xv%Ub z6B658V;j2jNUsOD-EWJo=x$l@#+H5F=B={4uo$DDhidmqwfwq%F_hr&U^t_2r8ndb z&oHiqBh?f6ceevKBR=(OI?k3V+pT`ZQvCbLN3?{cjfJx4zvv);}^I9~#` zvk2mSAA`J&iUU!nz34Baw#J5TM|cf&UZK8u4IsPQ6EG?cStxhKp_+IhKoPpNcK_D0 zH_&r$YeD4-*Pna0*-SQayjwG_YTfVO!yxuKn zwvggs&n4WUQIW)`P*$L~luA^&G%RPyyG>U==pc~f1&O({m@@T)ljt2qvp7?JXQtH4 zXTN$Ibq(kUBMU$xN{NYNPEMYm#|W!5GFou}*QKH9Yv$?==)$pYC)R$S9SrA0Cq=g9 z#I7Iky&Qg*X*Sw>e?zZ|54P04ZF~?79{>+AYhQ!`UV{{kp#kzlet3T4RSx!xBYYWL zkDH!a45MeaCR#!xIy2>qGd@B6W`=bp>-*!sB^KuJp}&Las=+=Xkr^V&u`yqsM&A;1PzQq&i;|YO?v|P$uV;2R8fj zj@uCMyb?MWE6-`<5o^y>TTOh zwj5r< z{MV}u0|b2^DBg#KJYBx|G3oJ6&tuZzo$kk^zdJJ?lkV1R}!9;Ue=-4q8sAC zl_4q=BmgdOG( zc9=tefo%@q_f2nDLpW&-;iNT$lhzP+SVP!h4PnO_hd_mT2>fQ?a6u1&fbLwgsWzUe zQ_z_fgI@0z%__NS${u!__X}Z}kvFHhJT3s=hq-KVo3+?k$IK7&{&5Svd8d4Il8-u) zYy2g>#{Uqy&|2e@l56}AvUUX^IivMpSa=i+mYPd_b>W`SBx|9MD%@kObIHp(kIN!; zWEi_<)Or9$_o!Xs5sL%WQK^>GOF!De>bG|TlNyH6Z&W}L{IkPI+pnh$#on$T9m*?b z9Ev%Re_|Zbthg(3fnznkB~7(GlMWH-4eDY|Z(HDC>kSkiGdll;kvuO`+q^3Y71r5p z0BNQLlym+cdG7)rRdwx;&nw9!ndA%*AiN}+gkmEWiB>|OB;o|L1``4k0w!>Kb)=(K z3p1byBylochSRyVz0$|Ewp#TTZF_556-ALu2J=9L1g&I*U{KUO9>nly5;SK1-?jHS zGn4Srw!hE+^ZWexd^kC?&wi}6_TFo+z4qE`shuL>FVfxcUrHU5EIP)J8WgCUhxrWa z6j-bt=-Z#YFS|Xpqp)3S_+NC(BK9(#=#~NY?Yq@(A-2-cNQ|^*2Mqvk6UNV8^V2F5 z-D`T9kA0*Wo>p1tUcEPnnYfp;&sH(C_PH*@1kVCCnfP0Tzm~#%n8tfK3W=#F!9P#s z68H45BG<%eoOPz*9Si{#VR@mvG^lvR7*$#vuT^!Ev4B4KgXm^YWvEtH!i%U5ro4jv z6E`)awh(j0xCHA_*(jI~4-}Y2&~x680R^0pxtW+hNDSsbx$qs=n8E{`>0?5y6ceHd ze^6N#`AY$8E3)_0EC+f3m!pZnj{IJAyhYOUuR30%-ufU}+ZoC9a&78dQ?8$cfpXeF|vxxoMVzfzd2C z;6Qh|Iw$G3N!v3x{sqe$c7t6T2M)gP(b%5{6c)T&?g04ctZj2$mw&aT$dGyf%m#YK ze!f_q#$tIIKVlVJY#}hD?n~Vx2UaL^fZM-jk&P~=UvDja16e@Pm!z>OhIe1e)L zK3d`vAN&y`6{(N}KBrX9LIm`vFm4&;Y{I0Aju{^n)p&FdoQ~$V3AR@CG}Qy9))r-I zZBeGyX5jp;P5(5pW-v971TtZ}w#a-FP-Nl5MEotnUkj)`2AKrhLzaJM5od49^MbAk zg1uStf;_X}Zm5}aH~Nd{o7o%9eeKzS)D|)kFJ@v(_aw}jX9r% zinjEhjmm-Zp~4aQYIp=v_vSZ0jipf~o~}*~U~~z*=uhf!!SdfB!2#2ZmQajN;&>RWatDSYO1X$h zBDY)H1=LW@CN5lINr}qnh^{jkL1SPM?k@D4iMYS5iJ&{?U10(kQ604# zu2fl7ZsVX%H>VEjZPx@lPGmr={N8eq%g&x`LIf1yVvn1Y*y$fDIX$QuMwZ(j0rx~8 zV(==t^;H_by-jmn-=ZM7Sk+TS$H#z>bC2fgKC4qgAb=IhDFT5NG)Ug%FHu<_Q4*31;d;4|0ut5_=M- zAHGE12Am+#Ke{<&uYis*waryZI)aUs?Ztmx_Cl8b2Hln^DMz;?xxSIoNSbWDr$s;O zCZ$Z2@&+2xkl7pd+{I*%$uH=`XfQYO%^o8WW@|qQGV&GMWSCH>y zzN52|4{zEt!B5uA(o9!!akFune~rGc6PJzl_aRot=gDgm>Y6GQV_nnq&0|W6u`f<2 zEVZsaLXR=cMjiXn?V6XJ(vzsSS;=Tl)GgeSv?YCW;=;|+{KCQ2C%F}hwv9w!)>3nY zJ^vu$7SF8CbXlC!md!($Y5zS0>7X3oEz7O`>nhRwI-~!(>4=2@izvjhLF3id)raUY z0h@8eiSwCRN)&?E5oHGuWRZnVqUSh{_>!dUh$YM4eG-L{9bElMJT#=hErqF!yhL>; zd;&sc8ikrpSfq1m~#Ax*`pK&N-)uV2>rHWR6 z=vKn#Jk>87UcFU7|2rzgp}lHApz0P)+9FD2K0(?vuyzOYPbU9 zga#V7UO0ds(qXF2iW;Jw$g9Y_U=>4S@QNUm!z%b1Fc^WtKMTX7(8CQu0-<42C_s!E zs^}En;9p2Ift?Vj7t0O2S#3xWJ|M-+W$2{P!U>{Nds?+!kPtt<{o?|mBj^jc_&`O#}CdxowK z@ryOMq&6(twFd$>jq>;#HtHFv2Q~0B(p2^=)j(&Vzo4pfVpZRKfvVp;f7MfCRWIRH z3k?&$8wqpnQEVMdZw0@c2{-jvk{83YPe?wbn!;4XJspG~wzAO>CAo`7*Uj?l@j(%s zdJ3(9MUC0|ASoqr+r3e264h*}f(xJk94qq~w{f5Xo&jHfUz0CWAbcBq-@(~qmuB_) z9(?Dk_itiA?yu2+{4ab4kkxDXS<$j_s8x+) z8iLnC5QcIV>*@}_q>8ipO3i0B4M;$=lAFU1`TyaCE4!w`1j+P~8Mjb5=Q1&3& z&!o}P`qD78y57XaPQi&b+c#8aU&$8+q{VB#`0q*h_6;eV)4w? zx!Xt#sJ|738e$Ly zZqR})u%lfZ&FE%o=+{C~-Fd!lyI|Q!5(nixWp|C@L-#yipBxeYH5z(cU6!Yk8DZSA zjqk9-(*(}jhJ39Kd$1JpfmI!zPZX!cv^xALt=;1F5Y}k^%B>{PO(U=kz}_&0AN;v9 zvHxVfOSKN({sD~$TkpCKnCv=$Xto)LK!LsBi412H$Vjef$-J{t1z7vA?vl9L9c;(XIT5 z+%0LrTI-RuO1Zq()@gNLfv4Uzr9;6rgLEicB5|O#n@fnt4Sh}!wDc2I8gH5lUtJz#0N(8zIp1l!8;uQ_BkHI+7ko?@krzd85FWWzz{1o68Hmq0L?9uhicVsZ4EQITlf^}w) z!)+;Nwm*Hr+k*GcE#ssY-s-)6);N=EQt_5?i2FA(@V;4kA(+};u*3qV20x4_;9822 zei`Hw#SI7DuP-0&4VwlDEj;Y?6*^rQFzU!kiV!Sh5~88kSM{ewhFJYw!Rc(6>?D1Sz>_oTVTs66 zqWlFG2@&@p=BH0V&2%N1irbE|n*l)3X=b(5I;|Pq1lFl%1Xvh&FrG{Gs*sVwJxQK^ zu3)*n6wBE)T81&uJ*LQXJEMD9=nA<s7s?l`+a8bmO)+lQ7^Q#*cR%}#9z zdHMpTc8f7^qI)j@2|uYe3!oN#;bG}PI*EiG44oLVRh{4saFGwYJS%&rf`&X)#kw%v9$%%a zgZRi^^OF$|#j+c~l-n?()?EJum0g|mBK?6ziv>5IMw5_Qfw;_>k8LSz>R-guA&rYgojB>djBa`BSl88Z3P2h+G>l--U zICiy~buMp|T$bywR;zph+(GvYrs18x14k9$Wio?WA`O2?I;m1Dc1lHE{8*zS$^+;7W=;Q^SV40p07FXfEG`q{E5)<4B7VT~QQig+b^1}&=c z_uKNjs7Pa{oSVI+TnQB9>Q34Hb|w1}wHd@kMlmMrv6{ym1OfI&y72%4b^=1uE#G^Z)lmbUTyx6I2L`F9b6A!+J{O}2i<7k}LWH%wATMNDK zrN+-B-qAZhL1kz)+>pL4oSnwV2fy7AnPud=kdB+NW|F zc;eo=Ikyh8sEmn)nC^UNrsGi+cQ$|$;@eT;Ey4`LTn8hj2c0gKfW!5 zi$yU0_5O$kpRl%jm|p3W%srOf$4e6BSpiDENIj&i6+-&^6?SLwa_6#|ybg3ILNY9? zk+z#s;QeGZ{7p5lGb88>tOc;t^zhJu!PSTuXd76U$kQ9)IRw!qT)0u!>q>AG_PBOQ z+gowRrzS73Tp7i6ZlY5e$q- zxZxmW;JeoM_-r`F9-BhZ33>{i-G)zYI;YEf&sn?-2*e>$h-1-nAd*DMLlJ2)U?1YL z5h6t#5v71`Hc8u!W$eiHbQ7>g{D#A#4*Ui#@Z;^d%c}P;Hy~nm91U9VLE7G%*HgE# zA99~udd%}U#f$lJ=<&5ZzwOlz06Sv$&=s~N=8pcnjMsYTqTML(e%xBShiCA zbcm)*OiF=iQhwxRD(l+_zr+2wBm`Es1iz`G){ZmI;uQ$>DqsDkbq+C5vvXPA-sR|5 zgm-Kqas<_WZAZVhOWPAEIhB&NZf!Pr?i%3zx}W;B-PHtRLGt5T-OrKRpV#i13?R+^ zst5p368-n-0ZZW*(tqBK;YBPlNjANif#y|J0iTz&o?~AR8n49gqi`Z-KKhpBoqN*>y27=6d%p%kO930~E35h*rfalFahVR%3x)nMd))nH@|2?qcQ zM})n#==^)Xr&1O)C1Q3A@06v~Dfj9Nzu43TEWM5QUL{X_?KC=#{0#g&0!~2j2YNT* z{?^7Z72tvvc1=H*xxmpZaM-Ro0f>cs-T$_x65;x-NyM{8(uN9^Tjv}F%xjWm6CW5< zh&zHRqE%V>HB^BVn@DT~4ELc7#y%J|yz$SJ4?rdo3vVmfNt|GGX(g+;A9*2@y&_hU zN$fgAw1^2}tOP%W7{k})qeE8GX847U?hE)8obOQ67u^?JKWtxc_6{HxQ2|UGSjBQc ziofh$-iY3gJq}4ve4_&qY6xz*cD8PPJZ=XrAv>0c#QUO2cmgKdpvUV_Y6j_#`XKf&0~ z;_I|R5?_zu1E~T#AS-xz62e1`AZe8DF^y*@N}`PY&m9^l z7nmMiJDUAImgf%S;qjOFlF-BaS+pdamg`6g!*sg^LnyDsS?sf!X;qsE_JTEy7@)>l z5W7akdWH31NHXV(Fi-F(xKTy7#m%V6QMkwTlC-@ECXjhO%G3&k7IvHY<`rihzWIxCY+G0R@wtb23*;b zlL=As{FRj78}@0E{msayjBj1Vt=*%L1ofn@Jt$Q?2y%>U@ zm0f@gP7_uC2G61asYZrkFT|$`FV0WICuuQ`7`e}jIdTTY%xwLTB*e}&v)hLxK_hHt zi{q14fu0ziX7-)3FyXt=4h14a;ZLZ%`(oZfrm3kZ6@ zJS2(JXj^=e7CnpoG(J_(s5U++N~7<`r}8jQ%swQE(`eq1Bu=C0@kvn{jaO4+E_(T1 zC#E$fCkB~tVX!F!GC~G>=Vlzl6*gD-@6m67tSxWM!NDzDwqh{L_nLhhb5N0ODrxd5 zJ5C-ohph;=6z8zj4{qzv(D`pP7j|%mt&7xGIBdO9b=W$IKEYw@zy`35L*9c@18EFe zjNECf`mv1t`);fv6zflFn8rVI$E`s)Zp}YPiJ3@*UWp`}420UzytPUVu&IQk4j1bD zz=xP5O;L8$QPhWf{~_COGo^0-ttv?OulS_rua z^3LQZCOYD^{Tlo~1PnN?oJ4rRb*_6>8B}k8GDifmBJkW-RVM z=A4(gL4vSj85qNA=W2V@qCLR2IZV1Jl6cqB-RWin{FH8jT9!o8BG@Cr+N+dGT&BFi zZx6X%t?)0?%R!2i+-#VR%wx7phL+tpUorf3`sU1{8@z3`pCSuP>9CI-SvU8Z4QYz8 zSvP$_-OK`4-=MB;)>ZC=2x8w%#4-I3^;`7C{FHbvoz7P*VlVxL))31IsQt9>M?Sn?n0BdteKT0!98m!$VBO*yDTtIn>0QrMO~L-Ze3Y z-LsG^rCGsxY_Yt*z&Ebea3#$F{pu)Gi%9i7IvtD;6C&cO6^8!9EnXdv{Km9FY_4iWCoMa26ogma>NbYO* zd4Ghzt^`n@x;(E(=2aCuPqj!t5BSRroBc22-Shbx-_~b&;%j(!n{l((^k)>B(bn*( z+=#$ zg?m=tRd{NRNuEiw4p*WJgKJV{{z)zxAo#L5xY;`ic!AklOy(HB^|8!bNp0~p_^I|A zM8So)qb}MPZIC+NEb|Ugek^m01j?An7MQf|i2a!32oiWZ-Df0m~x%o=cQg$ z@_k$R-@N=r{;dooL|2KgelK-fnZOSc(O+{wb?DKb;vJpe<}@uW_VGMEKQHCK1rre#DVnpmw^nvrY6h^*NC* z*600dpYIfX{+d6-xcQIKKBtbsKoEWYsf+x77-rydo#g$9`W!nH;@)w6P6epXY1~2O zDK-2D_4)JE*Yf9igZ`~VVSmPs!o90+FZ=?%jqt=YduWXK=To02$k(a8O@MknF$uin zM=4A8qcmcp-Tr|X2B|@<+izA|j_c>G@>T*0Gw%OjSGQg^q^l7wR6>ZLx#{7R9>im9w5d7h#HgD`u7?3G0J{j!%5J)fBZ z-G;y?m*lt0yl)oztTdPSyJs*qi~gA3IFnZmCTa}iaqff|SA4bvK1&c|TkiAusWSr~ z8MyPB>wb=fkEdXr0TX+x+t+XnVF`5zzsD{&@ZlT94+a2Gd;z>R!Y@Gn27cP;`C`px z(KH6=7sQkr;|J&?AQTNy^1np`G_(dcNQnV_UrclO@|^u+-%c7;UxzHl#=s|&FGiLB zmH7gqauF~Pm_KO@#4i|#=P6l#M4X4lsB+Gsd|n!S=*+YDOa`xgqRiJbjGUAzuq99f^4DhK zJ$)y9USbL~ogWtI4L^k1($ha>tkh{Tkw~aS87?NbKW@w{c$2 zD^rmS|D7g(1OQVA$SA<%Yd(6lh11a^A`YI0)U^V3FYo4uf8uv%X#|ONDq)` za_(l2A!8wj)xVR7O^(JfLr4wk=ScatQnKI=5X=%~o>{D*m|YeDomflr_e0mXm3o0- zAhnL)+lXIz{l1-b`0F=qmm3l&MGzm`-rT~IV!^ny;d%Ptx9dd>;Wh$cE8$6JzUZgD zLaHg?>%Y`MYA(;2HLgnBf*tF5(odyzunQE!KrnW7JvTkj_tazfY-|Cg^bI_#$bjEP zSb)V^By-RUdHwDuA>?0;`lW{F&=B3U)!N`QmyncO@*`Spyy&P%vC_)R)S-;baTZcY z39dGJzr=)wdz7uQ*+(#A0H|Z+XA}7td6r^u-YN5mmdEFXm~@hND~KVBi3j?QBAbHQ z=ngpeinyz(f1f2wRxzxUS;G?i-skW&*4k9j+Kfi!A#iE|7klv51K??ubgp!6VwKxF8=Dn^FB&Uhw6y+)fqa+`M{ zCPT~ld6*1)D8^(c+xeNyaa>G~V=^=-z+|NVpyeXUX!!IMbS4H3dJ?U|t2KVJp7RFI zyQPM^IKQDE#BYcaD!-wHoAVob(fCb_QrASwaZ2sRPf&{TM);Z$&WN_6U$;T#GU`z7 zqryf-AV1C?&qo%IP>VWvZ+2jMdUmRS?&kbr2%w@%H9((N0p)x~02+o{=|~}8yUxKo zDF4~KgIb8^9sD<+eqV=oQ2SqjcMu3M-a+e70dMN!yo2(_@eXQlDDU91tl%9y@8=rt zpg|MIJ5q?feofx-&V_gf5lpNyD(@g9(ZkC!c@GoskQxG1gUUai13()8*unV+pHtu; zZ&5B2=O3~8Or}L7OEu8FV3~I=@$%o{SAK7P1WP3~2rk#CLc z5XHbiN6#aS$dq@Nw)+S=RTy5gWtUsQ$e0g!_mP(ZKkNVmL^?SK@J) zauFQ%kKz3w5Bjy$odAa1b_(_vRcN@dN%H;#)QoT$%fC)9IBIj!RVt|HKbffAMxhsF zA(8aG?@Spco-9Lj6%F7v*DazJBDyq$rz+RQ0Zi;YN;P%Sxl{nGj3*tm5d0P>qfFBQ z+2$kxl1P>P4K$SW&c9$EEW_>Oa`p$8 zP}t#4xqQOT7S6(~NrY#iOnTuosw#2zeZ{me&}(?5v;opS0a+iSrH`$#XVx} z$+diBK0?Rd^0kFmxe`~*Amfny{g_!9@RFBzMz$msn%(R=1Oh00$9-bOvwD12D;Qe- zs={ksX)|zZqM?n41yHLQ$E(39%ndr(TxdyhAJXdjvpy~_RjJkoWft5PKa6u_4bb@Z z$VlHJ2cdSNL(xkAY%e+*F53nw;q#Nke24Fgi06aJWN`NS4lqRBTkqp=v~<9?_*xUJ z5Ypoge(I0iIz+XHsx$rmJ3)FPNrn(3eUH*eWIhZTe*K3#euC01HSp{-x50EudbJV4 zViLST>3V7*1+6fr20%~uW%6FRg|4_uy3~L>L-y98npG=rf(yo#CSCZgkq|g-W`x+w zgkBcvpETJ|Ia-Rz*)5#MfGa+OJpARqwU;g0%X+-vz^(oxnirm@HlU=g$hXn#b6cQT z?Al5yLYb>nNG(YdYLPgb*T#n{(><}|r^~kDeAp^$~pbw!bE|k8p$0@ZTmCGRgfRKKohzIR zFy2&!bAntfl#e?JR-tef3kY>A4PLFIQ=vGeUh{;ZVWyr}MTZ3o)%Ib1t*M%!g5;oa z8bJ{Wp~E8M3Y6&Of04G?mOiA9&!d8*J6-=;6{Om|{}-qrsY^A@*{%mj1xYCvQ$ecu z{XeRL{0#R`!8y*af?QkwPpBY&!fS^Ll2)nns32+55$~pcAj$HdP(kjgi)jaF%AQ|4 z(2lD=aoPczy+Q@a^=5R3niMXtAUR$INgBNis37@hyMXe9zW=LKkk`V(AN}^vsUW`^ z!2kcKAgN0(q(<0)@9`=~!sRQd5r(QDX-zBOs^g2R5h!n*8iCpysz#s;stS_ljj9m{ zl{huRKdyo#oWF(&aujT^F-9~MBuz9;1xfFk3X)H}bE_bKg`N2j6=Yn3-}@N8##E3` zCGpmT3X<~0t03dDo?8XUfxztND#%_4qfkNmsk2Bs!CyeyV)FD@^+8fF{D1*v7c;55zj(=PQxq(37 zlp1n;K?FOO3i1GL8U^prRFE`>^7uYbNWhX;s37^A8m@vAB$XO2q=LlY#o-qpx|;e< zC^|uj5}A~E-H@b*s33=^?}Q36Hg}>NiK>Jeb(Fv%s)AIR5}`a?1xcTV7k``7^00>8 z*+8Tlu7X@g8dPZXs4by_)YPbeem)r08TSz%pMu_Ap#gJUC$yM2Q(7|u+ZbWVBF+F* z4cPf{nXdvfTow88;j>hc4+Go_s3KumM|*wFQ@JXVY-)(pat8bPGB|wKRFRRPs>oO3 zc&n<46aWiVWE_tN@qb3zgWXn56-hvyt%{^np_ifS!raX3{OU;R2BB*?k2+GrjJiZq zM`{K`0y*HOsUt707e0Sy>>v%&ZtF00#;=vU{p$WAy z_r_8ATkYxzw0m$D&SQby1*=m?PNP`ySj~FSfPA=8Ko@o(Vj?1d+p=1M`6K(qV!|9= zdX#U&$dmMeb~p5a@GBW{C&SANRiGf?k7dm$c5)1v7Sd(fzdp{ZGdjn>k%(&ai$=+d zLMD41*;H>jGV>JUqiokh+l)QZ;;;9&Yr<< zx!)d$B(mS&m2UqwPOqvmns(A9*A+^_i)KVi-%>O+=YiP|xT!frauM+3z5d=zQpzEe zGGUz95Oj?tfar(0A?QxQkCviLx)hGL$O5x$-ZG-t2UiG9$|VR40+Yu8>hfFOq-%H} z30+0}H^kCtjYJJEQII-dJiBppv;$HetdCM$?Sk(|6VA~EWMX`t zF1P_~nYy50sTmds<}C$^X~w}B2)#A5N50?0d*r9^1~j}!-sU|5;pla(Ynq03%`_?- z?V8ErSF~#+{8qchgx7zrYvAu8)-~Jo!@5Q`!+dtE^uh=ScT};@k=lXYg7wPwJxYqS zJ=s%a9`wMCZveOK;5YMJJ%^Y#E6utWG0Sz)zAFd$JolRQ?lhTu)7zrkZ2MA4bl}>S z@OVv^;qn{RYp6UgdBMHc%>C)$%0=`xDJqf|zzv5BbGoRo6^>Zo^8glmu~|x^3wJC= zY%yHt8=+b3)19+YGRT@F12u+$Hg=ok3~KG*wjYi^DL9ZAT9UlJY$_GRJ;6>RInbhikxby+#yJey3g0<=_BAqr4Y z71)8VCQ}*xk`BBOKl~*bEhU-YHk2PulRXZz4xVwBYeS$ECq})2$rN{leC1npR=7qs z!#TcLXEqBr`6itS(MKrSh(TvS{1H9g^g2C_0HOfw)AQTaXg|80{hd@b)fIfz#Sc$8Un0+-&L^2&nM)3rI z8c0UOLpdI>g-E93AzO%K6#FH1PS+cVtHO%#1ELBzW>%{(;SGI4RL5JEpt?@0LQs7& z*sW_hd0N&N$9y+yzMRR`b(@uw#nYArw2?a=ZBRiWv;ib1Gr(2|ZGg(r20}5k;~I2Q zg9$Iy=`?5A)HM2qJU|nx*9{va7$yb@i)6{Mc{vtJDP=);$vELCLh!&iXP;n8fEdyhcyc0m8h8-$epa4JUPmOZm0?7ndLQ1sY3_K5a-ZR*;SbO7nd&=eZGTi8)E6mJ_ zZiDozjKNL=3F00S#$7L3f-Bm%YxZym6}z=VRs&+Q0EEt@L=k#vD@SM>N|QYS4nMQ) z>>;!v5PF5@~cbg!kkU=mlQc?VKkqHt8H7a^Gf=IyJmQPuB3^dhz3O8 zfc`XZ1o*9wH9w0tui!REC3))R_gFV$G{!K*l`Ha{jIGU9fguGnU6KFha`u}=V(-M+ zG5jx7R^oaXJf>LwOfgv9BQ$3ve@n);4;!K-L<4(a=Q@Mj>C{)S^xqN=+}~3sdkdLb zh|}rVLH-K9s#C_&Z8|gi6>ph3C=o^teexMQGhT&CYvh*YD^zA42Y_`L0E^EBfT4o< zACCiIQ4D}(X93_o8-NF<4+DUlE3M05lXzj68R=Npe+jQ7bR!ggTDu@ zA?Z}F21)#oCK0=T{Mt!uEN_kv(#ial1}S^*r?^IaEfC`|T&H^)hm6x(;>Ib49RxSV zdkyk+zux^OF7xmkf`w&qV~jTG6BbGTSk^j;1cKyc7~Vg-ZAIk6V}K@C34??E%b02i>Np7iIDe zk_nu8N5K;Fn&~vGlgRBX>duR%wGX0yRq8v6-lQ7_~|-m?)JBPmIa9i^|xyx%neq zuD7TqDUS*719clu^ZVUQ^RS}H?c!SUR4!brmQ4^m?W*Df$`9O>hmF)010pyidz$pp z?@c+%39uwNvLz8|7DRQBo-pOCIYGfXcW2e8;TqJzZB}pRN5||jD|d2`!7PbUX7|QJ zG$EAR!RZhc8jcNn5~AOp#Wy7Zk&suDqtSU7lyr&xjrTz1CHGk`YON<19UBA%i<;}|ntYQ-&6##T~W z2x1OKhSudG+;4^_w0?HSQ`ik-@etM&{)`D<>7MPMiXix%28UkOk~s99?fA{8uLmws zTyt9z`90fa+_RPYkZ>jOJMak$>wQw(u9a<>m6c`WlP}wrQRakU7=#=3hHbx{ZB($g zQv2M-JdX*lIxYU&4n;57_TLmYh3bzzgn<>qUTjmz?hJim1rPPX$quxL&)-YWLb8qZ zzK;^C>hSP;Hy{;Pd^7Cj>@QgNvEKQ-E~J4om%wQjdkUyk`i$($2eGHxZ7C=e*P>-V zW%3u(VHmHXJd)4;_j0NL)|HaijER9&y$5ZGMZ?p-Ufh$QuE2fp=Lsk_cW9GR!z(C2 z&0Z-eN+kVeM$eowXa#hxV)9Ui>df$E2WG6RpNWIFMptV6%t`du z2o(?1HbMC|=E>zHJhz#5QGV5bkCNi)-UV+cl*CTjHRpEhd*~ff$sC!R8h|-7epMmr zoC-uTn@643JvXXBD(7L>sCPA*Wt)69muv>9I34i>^ees^$*EQ zEgiFvRv_izv}4d>AFLjDgtoBON8aleDP7g4AHfgW%_2`f@=ywGc#}iNL+tR0u1@^I zW+J4}hZq_O}*3u;JKcs{9=cc;8Gmo@y%X_Gv_BFmln!LH1KL z872f*tv$xg>%f?=}(4hNQ? zuJZhY_R2-dEOt*bC-PkXy;G54rkn(3GYNiEseLxxGZ?AxC9%!*;898FF7TPrcK)r5 zEqQ=*V&qSOH!;gncoVG-R?t6o2TY3NVh#Z33hX=LaBf7m0Z#-Gaui&G)qzODx%pMq6~R)%uK)H zt0KrvH^sX}yM}1H<<2Wf@cVc!4H&iA?O0mS?A*dq^Xij%HpJuJXzA!L?1%4(Ux`oI z70w)x&xzgs`*A=(gMmXk0Aij2C$W6S2T|Uc0}pNel@*J~_w);o#KRNPj=AnZ#j3e` z%yauCu%C>&p@CpPZr$NeIWl^BYy&`q^UQvJB9CAl9fALgu7EPt#M7CsmiGbt6H_+r zI7=-#RV}G@rb{bH>8yE^R+4^dCAn8T#J(n$O#W9~_g9FKa2FO=lAm3-&pD3qxNfE0 ze|4tkGo34gyI{n_o!*@2R?N+bE;!*xTvL*FxIRd8CNla*z{#0IQNC~#yYRm?Dow#a zEsxH~J%f#o7E}6_(>LFeZm3Zb5KfHxTpLf{gzO-wsDg-?1DcDN|E9Ua!4&i(!@YW> z2}sui(s=~wNP;w(ARXbnS4niPQw+|v*cwqdF}Zmc!I~GO{?s6@6c8W&r3UdVf;c+0 zf{6RD2ks89^qUqUqNLe59nS^0HjXDoWQ_t3WZ3A(V{xlj!y?w_EP}oHImQB$hNcg7 z?jPRByH8#A`3XZ@l||czezi!ik`u9VcRg(ZQZ_ND+D{rqL)ts4vl178fs<@*q0#!${{LsFZO(JqiP_8BkrT@p=L}7 z^_;_IhJyg&q|xz73B(}b1Gn;3S}EQ7pr>(aklhgbb%iKh9ljqoWFiUILs~`(6v|HH zidU&N`Mt=u*^>J}rYSoAT2mD~`?)`Tm8wPhoVaxGrcbe%6!k?t0z1X>PpjFtVHyOn zoh*T(!mqk(vGxvVrVH)F>BlTp;0|)2jQoy~7KeTQcba$g}G= z;YhP6+zSZdR$`OMX;h9vMMCGRhs62nQ-A{7-K7OKw)#lj#^X4IJ?`qg!2VlHLE%LZlyLnQ{fS}z@ zSg63gm~ijbNlWSkd>!K6tx8qS#JP9NQaztd`WB?y-8eg^f)bi&(Q(DhCE|M-t6z(4 zzwIa_3aQ~U(4Ow5_=kkdp3b<#656VK*2y*B#l0>yEAlICkDLRHQ1EFeE2o!^Ji8 z-GGZ$xl85|Da;I!pk*6?s z4tL6yg~JeWP9QdqW$YW6q|g$crIph-1UMrc=jajV*=BJi71hm#%Hp&-r|ryo6f@Xu z_9e0#@PQ&{fTH+wK&l8Os4jTFLn)FaxwiUpV%$sm!9-wohb+Dko;{Pxd`tS-y&{`C#GXM3 zp{UTViF)R^jzTFRH9Ua6_8fy!;g%eee0`27x2~3=4o`-?k`$Vq>Rk}P7F<(dUJ{HX}U7%s=*)la4QZS3$=w#c;;Myv2o~rK8y(WAJ#%KDp4wE0^v+E=rY_VK zbVyf5?$6nAjFa5zTbyH&-}W?R;KhvpCLj7>9AL+i#oZ5qyI~`I}AaPCvWjEx` znSqnw+Gh!dEWmJ5S_m0V<0=rcXw@ow zjf_5uUCk3@AQRNgWT3b2A9N*wanHs#v1Y1k8u{6@vcErp2_N&b$#;liHq&Qynf77{ z)3#&v$LI(&wpgJyi{IOW`Chpy1z*6d%h1C@(>eKc?d@4?1m0O@HCe?di zr{7Pd?aoN~gvO#ufZ7RAF%=Ii5J!zQ?CNg|7UWD}-?&VtdxFMAob^_Io2r_V9u06+ zd4T5_et{e39hh03qdjnFh@LU!a3B0_AoED{E!nKnAYl$E17l;&-!MpCdy`Vq>Nx4A zj($k|ozx@#<~}C=Uh^#fyEx#G?5J#MHwy07<@Vsd2TDJdwU`gS7p-OVcX5wHx~176 zRh`D)lj!$ByzfRnlv>)1``S21gX8`BGhd>(@V8VS9>kb2?l~!a^8|qF4IF`c1hOyC zahN^*I_dfjg0lz{3x6@trj9P-?^PI8RR>Ws4ZzVWk#>^*o;t0O!(8=uq(9Rx;QtW$ z0L{_?lxami)FaB>BkIB|ncC-&M)%_T>sScz6DU)b(TzYPfd9?--{O!)fu1x7+(v(q)WGM0l6cqsXsFaT-V(x`hjw41Q&$ngl#%0HMLX8CMxMl>Pnc3 z>ViAMAdPY}ijJntC#4!dIWjE^-*(VxvVvY5nP%lJR|R&w0rnjpOJ@NAy>NL6&kksO z%`Ash*@n_>JHU#-Y$gJUn{uKvoLDsvxH9sO6D0c4;od~KqG&UVfX>(TuL&p>Y6~T}HMV|0- zYIYycpqi1G9y(5^wuu5Ntq##7efSg~!l7>E!>-OSS##kJ0k?EG2zs}qK{})=Q}_UI z9FB&OfcukHAQ);49XUAd*TC{{XgFqD@o^y5+Lom~z7IIp&g2<7oz05zj}u)|%P<7**JTb~(&l zkVRZIBwB>qfkw=$MBV!I1osGs{OB!!rf#31?&HC~&c!kA%X3FLe2>-;h<~K#%jsCF zLm&FK-iaS7YNDTXk8JT1D(y$w&$I`&1ahPUa0gL;#Dv8~e%ILP30foB|AJ;hZK9pf zhidak_#?{FK&D$Q8206$J*4@68~8ofl9 zDH>{rj$meXhk9t@hra9z1!eG-`uEA|(6gu30sfO1x-%y+2uTb_;)#4L1`j1Au&%XY>tLl7yRV<`}!BDKEja}x1WPLZB$O${7L#cz|(l5_Bi9MA3)REbe@ zohb-G(HvI?6ii46s-mMYbVbgxAQxfhK$5%!f^3#+H0B3?vAoDlctWUYS&-+XDUp#w zQm|*Ys8z{0JIysmz`G(^@i(bH_oZ6JnY?1p=aZm;x}Vq1s$-3GCEP?)q>U&BDjqpB zX*Pb-qPvtb7ss9omHe)CCKW`i*7~${4!KteQ-C zj^)I}C(oRjByZa*UDn7o_du?RjNXPVHa2GoyQVCerz2m_{o|z{UkcVCwymE2bm_;x z1GKmdil@_1l^1Z`3qLI`r-voD?Bub<`v|hP*t*{aGEk$+`*6D7VmpLHT^AB8UDZds zu=hFCReh{0u?#1Mkm_XQQBsv$iW1=)@Y`1k1s1O_Oj>EYw-cd$ zE7Ivbd<=Qf2oVyW4^a{g1P_y3J(%yNgR0UoLSVCV>VV^5@@G{ap;#&m9M-#a@9lZ5qBR|rf!p|J{G90qkM zotiDiq?f$!QioxI3U}b8z^0YICRRR`$|J)c&ninApJ>(O1g)#Hpw!~Nl60ez_W>k? zj(6Ft0+3>-u1s-{r9wUgwaU>R&u-Ha`3{rsPN*%8)gRjo z5Mzu=o@ceoYb=%0cC*jV591PPrd>{tEP9NkM>;)5W6rr(JFQB(bEJ~u9D#!^6Are_ zE6V*PnPozC3Fe52F<<^JLcm!O5%$a&+`l<9$(882t#MEpDcc&mO01sl4?L~LuIguf zwni%IFEN*uSE@!tep_7`tK@t(Q|1t@qUli1HXMe0{~a@Kb7Zh$;~;KAyWp1WR=k0` z!3Oi&Y$=2@x&a6~)*5!;8+;DRA80ym8lRy7aCdO;s7f0gV_TNX;Ffn$0(PRw(==4C=6&BQ#kVw})yoM*+%K|PgB z#@5BNJ4eZ^WaW5mG39V3!b-ZpKjvjZd#uuB*OWNAd{u0w0#FqjzCoCmDG<6E9V&X0 zx^i9?Zbn8in+i10#{#*hF}dY6Q-Pdf=;|Z{ajM7>T&w2@eiH~z>6(|NAqbdg48yM6I1I6@bERZ~<+WCD8&^t^r+?Ch6MTl4UV8&cs(KPEj%?5E z@+r?plY4_^*O*?L6+w-0qID%&sPcT&7k&smDF?G{RtyoV)5J!L4_}1W;RE7SOAT~D zjB+q4eF$$(SRWmvQwwX=OLVa&JK$P82iFXAu50ITwbb~+fctx}CJG%tG!6_Zp&j!W z?CbuU6evm5RQi2|_@vB;PhEeBnu>KNmUlgvKCwMT{Tu4;8t3_FFiiUG-k?ET9t4Pp zMi@X6j01avxFa(kb#&=N$BF%d`ARZpZF)M^5!~qufX$CzED1VtrfeJNTGA6bK4&lV z#T%3IBZu4OD^-U>`TGUm(qAXrdS5aQV9s_+o>nWFs-t*eU}IihE8-dU+Ild(aDJ$t z-@$rqr}YrUTxKjBx;ngd+;tk-68F1ZwjS?(lCVj*$yAK_k@?=p3YXbwWxsrvcgmLp z{RrTbg-c#p%p+1iqvMzwc>9ZmW5Vn|WYeNdk$LM7)i#~zU{KMTrj7@2hWs|z$;`tM zJKigI0k%dTE(PN7b^&ey;%1Z8MUiWE{TLsTm-35XWEQpy-nkk*3rzez7*7BxDFHc5 zaFB%@5b!90Uy0%g7%Y@PCyS(blM@5oroQ(4VzZ~)dKHd^p|?&nHmWIREv190DJCr? zz>I2=fsztY5H1aQQg#q2?9~v4V>?}CNy0bJ4Hn#iGT|L!zygbzE-R_;Cb|lvz8iX* zV1kA`$x8v6Hj=LqBl$8hk}Vj?ms2%nqe4d0vfk4@=xH_h^x*p9=a;EL1>&~ZlQlrK zfZbs}WX-`9Asr430&s0e-#$>rtNB0nw3?Ju^@rkVj>7s5Xpj}rB?oRC7yEiM9<2V_+$%C(6YuwJcOim$7SU8GG*lok^HsweHj*BG9c6=1^O2vDu1SWm5+6Vy2fb z@Iv>^ru5;3E}TvI3@>_mc~92wh@7IiVe~Ma>l#{KFQZ2q9`332>BKFREIBbZg6Wk; zyuz2LSm}2k{l;8Wj$b-P)(3F9+?_;qjHBWt3cJT&Iz|~ICuTRL5owiV@d|Ro{sd&I zaV3R{%R&>Bgs$SUt_fuL!o@1G!9QzGoS+FDKPRC0fgsR0h`;MHSz@Oq|Kim8%mHm{ zp-rJB7F;)i*M&eqhTy(t!F>@l)53W#Hd?rYy95jpJ1sIZgte!(E5k&<5giUVv$y~d zaZiQWHjX=C8UW_Q?s|th0Y(Ae`+{48Kn6&%vn8}_Vu7zwCTOMDJmjSLU4TDbHY~7~ z=<-h@HSIHO<^&2~5eS=v`p_6qx!we^cB=@H1aX{X*h-R5le zPm^@+-T+n#gTTH3KTR=#fjJL{b`dKStdXBZ!5X&LmTq-VHn#an5zOwsL{FO@Z^r#M zQ@V>$mA)nBmp+5=cM4Uv1McEbi2?9v0w#3Kt$qtCxkku5I=lcTR_CRakMfg>hJ zPu$jZFkXf_DY?T;Bho0E|COR3iwS@e>F4_rRh8RK}|>Y z9t0$WsfbZwJBYmWYeXOH*nkkCnZivbq)^-}i9#JZP7@WIcp5E;e$&^H-`dwsTH8o% zeygu^Uk< zVdltk+~j~Gy9r?%-T5H8Sa(}VPO75GokRuGO7WwcdDVf#5{+Xy1fnVV$o3! zkns{uBAn)7&O@an^@S$1d5%2fug7=f(^Vv$8LXdQFV~s>yMa$gTJ0OrcoGe zT8A|GZ|rw~UrSmqr1Fgn;^#K;TS0Nx>~Vl;uiQnFXyvN8qp+RbzF%x-aX~C@J4-v+ zn*_QcoEeez-=-AoXg6K%={LHj0a&q-g>V?Rkn5f(W*AvT8H#V0xv$7Dg zRLC*w#|deONTYOQRGJ(uzO@FwSWX&qdxCnMQSWf`a1&OjT9k%kecv zpa=v$FazS8O9s4c_ob0ivMw)B*uG(zBGJiwE&Y&wOzD18${Za|OQjz_gI`=o*ffpH zj4DrU!IFlm&Iw(Zw%2ovuszzC8#;ct?VOne*1%$N- z&c{mFm>a2u`}ClDA?E0ZgsgHsDcO<7%;9nv1IUS3vjKM=5>v!)#A6S*C*Ze5{6-Y) zfIHQpFHMKA(%DsQc6h7PVZj2UCM)|-surE0F@BO4UZhn{+HOyj_h2p0CT>r>WlC?8 zsv@p9pzX@T%9X``ShrEYOhcBUrgok^gBXuD<%>iQ>yg_Kdo57I^ zb2C(r2b@$s+_)fa074M8ze;0D+FqO}w`afOD14{JJ$akt9)s)!#U^(;o(8wsQJ{Al z!SDHU`!q*G)QUy0QqwB>)UqAjrA|m>x@UJu!?|+dTR_8Twdg89Kj71v+jnvY|YG)?G7$2z3 z08PcRrV@0aK`F+j(h-Q5KuudFk#SdxZ1K)Ko1nl53vOY{#zYY9LL?(w9*_)i;#HDm zY^%i}@xaDSTWzt@3OWG+Bm+iT>CPe!*rs!G8T&0TgRMRZo|Etj_8gTutHU>9mE%Hp zb+`oUJAWnZ1BAk0CN>DKXF=~u_neAoduzl#iZl}VYw8WsmZDYYDQGXz3Wo9Bmm`s6 zo5gRlP|H=kIb6l8aiy_a-_V3|#o8O3=t^aac*0QVJ|=}9?gMAllg-mp7R8P;M*Jl))BLL~QIY_B4@8~C%1 zKO6XSGk=oBe;?KwU^GW{P#-D%S&u97bIsSZ+KVRX`M*iH*pRN4) z27k8m=iB^AW-O81L;TszpGWzVZaPPDkMU;@f1c*ge*PTbPaU1cmd?^vQP{gN--6t%6CGj%~mjRO;AyECyWDD6LF=Zbe>$Ej^bYNLCq3`-ciTk`k! zosc`}G#Gz-eNN+_*Is7$tDD&|SjWbTHEtIDt+;Onula9{RHuveiQ*aP+Yfc7o zT%Tj@`^ea_I=vA2)|ppJg~+=$d0`$tOjw+gk$(_xX1rPY-tFu3ysMM<$ZyFZYNw{j zs%z0-59S>NZoLpK5Qd~BkrG@fPTW}Du|NK8YNvOp#-92a?*7I#!OU_f!x*(r6a^icMzllsgY|UCtYTg zbQvn?tb%lzAlO7sI!?sW`;yM0-py_;t-59fp;~FLfQL?j9Y^46BA3ZQ9**2wN->E0 z^^r-!IIynOFGK{+<`}5U1*p7RDxiWU>S*+~#ON&ywXP?cZyZHbjD5;PSI{V)0GK#& zc~@XGgRo8c`)|H)p;EyC&Y2u^35{NL)LKFMbU z{Vupr1NpDPTD=Uch@2qV22`Gc?P`=woeavQNAur2$qrk- zh@1=$jQb)YMt!;erW|u#E5uJ@U_x7Jpej5bEk#q4v{XzoK2PY2DX$fQ>f5th<=weB zVvQ8Go?M+O2dW;%)m>B0$>20{S>@F!AEOCjq=_R9OkQwN&rv$fJtUfna8qJJ5+~UVcAduAAb(%2QZDirxAvW_W*=IE}3`E>A_>&Y{{y3 zNT0PiBsYvS-N>ro?|}eRGS}ef^a?0Ra3UJO)=y7rnjR9lM?kt>kR4ERC=uVs}VZMhL}t`x|6pcud5~L1&txZ|PZSmt7ziO@j?!(E+)`*jzZc)?C;xJ@Bvr zYt@-7>4E*|-TE_iKzGe3X-BTL5k?2n1LSo)`~lVqBxUVG(wgK`PlD){niW9=hHQQs z80nzdj27qy!PLAu$wN1!`&x*GxNSo3xJgOxf8aHKY*>>9gZ;1x_mF%^{#ui?<74BR z5irqB_k_*I@fK|j*pCz=Q+GPq`McM_OCrHNAOl-^@OM6ZwZVq z@BYDb|Jv!EGxYl{hcrtHSsYUBWc=!;UrqFDz7;>;q@Ty==iOQU+UXm&``22yrWW58 z_WRfNW8C5O!gT|{xVq_AT;23DTDMuTQ^f=RwF6%PW|))4)l0wP>ZPC2dJ!Alx?$8Q z>26TW>R$`YYC$op!lWHX&Zh2>b5R$;@p66$ajIK%lxx(M#LY=J!0U^rMK6-vpUNG4 zj8KS&K-h5VDbM@HNX}MXEXRK}NR29gz>C`&CFYW|mqbNy%ZM0X|P|3WJs_AESVrsLr#!s7>Jc&uHC$EJHu zNwrD%e>1YxRw7&N3S_Ij9ocH{Mz-2@$W~i>Pkor`>3(}zsB9!r1yAqF0 z_u#|rNCSR&tV0^`^S?m&`FGLtM)wndZ3QV55^!pAss9lq>dr>gzZR1^>qhs7o+Fc8 z(-AW?Y;Z~P7~fb=i^k@qK*%_{_u8%r*CO zuKPLn%v^IXGjk5~^+EL7PJ%P z3;K^asHWc>LikY_8iEdg_I_^8VkWeIiz#RXjrVFkx0phRb22%c;Qc;y{YrQ)`4>b$ z+dBBD%`GlYE+zrSCttX@RH)teq z7Qyf+J7*ChSw}_RP`uBXP#wZff>YyH5PT;TC4usN>_KzzOw<#4oJB*NMNE=IRBqba zjKF24abcI>SFs)Da265an@kK+w?Qfsq%z)CfU`(_E~L(7qyn^}{V13eIQp~6gc3Ff zp$fAJMKi;MGT7z6nL%d@S5PfHR0)q;zJrgi!O#llnTNj@&i`Ief(0laTI!W0ungb0 z_8Eu!X%gHygIMwKIE5#iJ)A{jXu*w9m%^uve{m+%gs^L)g$xDm8w+E%oJAg(CWZ|7 zEk%@p6(B=~K8>OCyAaXCGwo2qmv!!=Ic)$I`d%E7nR=S0h{z0*7Sjt_fMv%5M}&p3 zWiCV%g0HXY(0nr;jn< z3t_|sj2Rm-K?{MgaArZAG(qfmBKvPa?2H8U!dL?A{~~al);9{g|H^s?aB4 zg2;>U#KpghX)z-~H9!NgOTgI@#@QucjHSR>%9!C!W9Y!A!il^y3(}_vT81Ys`z>f0 zBSA7)c6{KBk8yS!7=s@e{EQhN;lhS9dpR(c&n)OJdij$D@I-;%f&>@|N}xIOSOLyf zFwV{$W2^+mO2&*sn4nd_ST(btjlhBr=}WfG9iH2SZ{_qc9Eaap`8@ZD&-f}%|0R(6 zOE_*M7-~c0ZalFY`)UamB)S>r+l5y3i#*;zzV8?_YA{9*FnSm>5@-ya_rQ2Rvv%|? z*&uQ+p4dBGRxe|hT3Dui;IfY~V=cz`0E`ce8ElxWeqi*^EUOc$!$IVac;d(DvOY3) ziN5#*rS%D1eqzixk1+;-F~FGNKx60(0%LGyS(-FiLwF+mHnG?5dk=AlvCEZMro-TJ zm@#7lt$r7Ii~wVVG2@VQ=3uB*tSgv1z^_4Nh z6l076W0Wz2i0S$UjBhjR5}@fC!xP7*7Z-ebhta)ykXD`^-@)Z~#*D`p;|DN)FlOY? z7&_y?7@t{|FR+5h6L{hT)d{-{C%_#yef7b~2+Hs$xcbR3l?bVmkUGgQRT$#fFG&40 zlMML519&#eu&)QYrGv;_cw*OYFcVGpP$MMAR z>BTk9Sk_J~(+QA;Mhga4g*3+a9WfX%*fCjH)R<9L57ZtY7xXlQ=)rlW1?>MeI{Ip1 zRS>xoPwbqoypyr6D4IWyF7Szl76!WDC+N{4{~cZ!FyL!qs2(iD%&2@FO;!({*fU+$ z4D}QAxhBXItDi8G^#)_i5@yP23>_@Y%qZ&uu!6`R@Wc<(WzA4OvBWaP>L(0kkukl((GCCr4-7&=&(nNgMnP1XpW zI5J(<4E2+3Sf*J0grTfu7-N<&Gde~)x3DlXqpU_?1(8SbMEDWTsUQMj$76>2Nixmn zY+=R|V?dbkm?6v97SddLVD%G*#nlH-6N02! z!puDyLk9~pGZt48u*`$VljeBhBR=obj4&ac5H)6q zD~cFnmbfB7W9VRUWk#u^=*P~;@9@NT(@SuMcF%nvgXme>JsB8dmbl_eW9VRUWky+s zG+BLkV&8OGGqihTu}o)a_i$m1S>nn7d}9sHH7u^oD607&lbb>vjmnZ8bb#Q zEHmo52&^FTS3L3S^x~SK-D8bqI!n7p17l$A9)=gNI3^3LzRW0#i6-kip7?#btQp!p z&){VhD2rLzJ^2`8mbem5W9VRUWky-YF+a0ZVbp01h$+zck%VU59=OE++i>dJ0;`~F z8#-k&N*8Al z^j@+~kKup%){k)(X+ZZdc@D!=El33kWTyVOe|i%9?Wqs_xwYv70DVVb1$=Ie_Q>Ca z2MJl`10R<6Cn4v9NQf)gUfv|+t{(}R2qYn$?j)qoi-bsnZ3eam*t@}w_9Y>Y&ytXQ zHxg3rNrEQ;NXRCz1;7>t+v*GnNeF z67(Gz^qDSqF1c4ezo1W@6*}jFnDEdMA3!TtxZKg<1_nn)VLp27C{SpG85sN+0}76S z4oWcaF-GeKr$C=NVXh>6P;`RI_1}{OUQ$B8S|3!4j)gry`__rfcmgx$nX|vu5ttdf zj>B9-4?BT{G2;}*fF5=ND`Un1i~&9Dgn9p#CrOjXh9|&_r+?+KF?Nl9mI+D^`r8Tf z88cpE3^-#63m7xXFb4Fu6BhnkUNGA4PGrXu*r&^5XYAS#a}B-kghh-Q+c5_8z7seY zGnQiv=zS+}{#)KBv=^Spg(pBS{NK{!V(hw_)`d>NfomLNMm)xVZg>JWV}={X-~k5D zzvb!BQIQ`e9aSgp} ziOcYWWz*#?W9-@+a}8be1U|+LBa8uE^aOs!3`L9qUG#+I|CYx^lP7>D2uznJz}R&c zt+So70$i_P%&5T_&|yzl$(Vt4`cqZ`W7WUqIe@=of(DY4b6eoi)k#jDAJA}{`+RPL z&oAh>-vz0^gl|TqLdQL^8&80a`#@Usfu$l!H)8=SF^SMnCcIdTh#&C;=*j;p?;~T^?X)g)3N)MvpBOXHpG}0)hZ-$mfHC7T#()|vVesGbY=Ip_ z9KsWzNB^(9A;zu=m}{um5{4NwR$&aN*b+tWjfC?^Qlrh5qV?YI$@a^C7WN7ln@C2v{{*^bz*!5h@HMFM* z-x)JHXqOiSD!POpj2TrJ11h?N@qf!hKcf&toWK(%(2+u@(hw$SZeOAG(b^CJv|jcL z{ao`!?5c%&FX1O+h6~1kdM{y;F=HQ%aS&?igkO>`XwOo-LTkMO*kc@!3;O;N?V)(u z1L(vqJfVx~j_K+Gpa1*d$5S-UfIl?W{BLRzr2dXG|C<^EsZ%lLUpJ9sXnmFV9#4S2 z|EUNEv4;Y&hf&>40oafVt!KgvL53e=%o1eAXkFrzsUY(&eejFQpsycKfI)!2XBosE zM)Kk?*U*Y4%n)QwVT@UV%mIus6=eP;Pm(5Y5Kn-?fWPt}_Art+N$WAEpy%QYLFP5a zm?g-RVT`FD^DlY9zz!mQ#uH#r;IBN0J&fcTVy>Z4O_(9bY{wY01R3}}74!s|3Nrta z_X+yMgNWbo1Q;CnD-U81BYD-d4s;4Mt_d>)nRtvbOOSEH7*j#!U-EQl^2YImajGMB zsY2X=i;*sbI30vNhQ>85!2EBj0;K*9F8`Yf^_hcMU||q8;5#6-eRZ_%_uW`*X{5!L zW;C`e#vUiT)t73fvE@sv57KM{fE|lPMNCzFw$3oF=mNDQ5a(?2K`GP+UXxe{D3FG z2*Kato2mYTxt^;3Fp|fIF=mND-)Oz!zbiWbl6N2Nk|z$}2{2OdSKdtZAI$Ys{fCjf zlNe)`7_=W_OvRvo$y-a4H-aa?h{0cZGu3}+UEsg#KaAwP!Wgr}pi+!66@&gIF9_H{ z#8Er}Mh^bUo2mYTxt^;3Fp{T$F=mND{1{^@2K`IkN3^?)_ybRX0ffKuW~%?tdekXX z^&dv^;xNYVSj6DcJ%ur*;?ck4X#?9lhzJ7-1QZ6Sk5Ae&1~3jm>0lg+9@F~GDV>nz4B;#XW6ToH&SH$IaQ3e~J_PI_ z;yXM6#v}e7kQrKY(wOU6T663eW0q+4fz})TyGHddc@=0+JFyQ>fN_bx@@8nwg<`H} zY0cSSj9H?YCdQbGX8)46f+p`1o&e($f91{4nj4~Zme1CjtH&6#M6)XxV=9{cOP&YX z@lG7Z6JVU;ue=#rbGtCtv$W>cV~nZR9HZ-T5yqH`X8)4+7P{Aih+pvp7_Rs$Z-&;~ z4O(CP?}`?q%)>CoEYZvsV@yS}f5}s#$@`8ceE;2`!dh~4wINPlH7v&bZOQ!)Llk5D zjb#7BV5Tt+Vv&qN%?N%*7rV56*Q^-Cx={E?2*j4DW*g_3V%XxpJaPm(ey84vB=V!_a@10hxdbz|=cB|6hB*OePbw zD#1TBzF&V8M0B6R-}j4WI=u08jvd0GV=D#O)d@A`Dz+SpHJRinIeB0V)8gfMCE$ zfDwQU*anaW2msgt6Q!(3FW@Pl4p0I}1Ox)803(1FfE)5)0t`TYF9Ej!C4fu-7vvoV zb92CcfCfMwAPnFJumZ-QoO%I*z?BDoN-!6IITK*01at#lK-qo)a2K*7q5vY`Fn|IG z03-uS0FMA|fDr&k5pV&r01d!Vz)3(dAQw;pcmQ|`=mAUsIEq0BKpLP3&;lF=cmSdR z$$&h-T|gtC6EFgp1S|lZ0sv`%5@09bD1ZV80K@}I0L=h&a)bRrr+PSE)qnzkG@K7B z0o(v)z{CwW=71M~M}SH|9v~SI2Jiq_1NH-m02zQFU@?FdFbY0D0A2#nDHHN61H??_ z9Uh=WTYttKo1?}HHzoM-GbP0Hml7hPq=FPkk`QlY5{-xc8+A}dhF+>5SBP+5LkNG? zqYHVRk}pj48*y2-NVV>BmgKI_cRvx%%qLv^Y%P#j+@`v_Pv3s|%_iJgK1jb+x5-$n z9C10;h)dgu@*9tSTMPqIFjdq3Bjj4&3Y&RHC|VgVr$3&Uw~o~~SC{35w!f-p(R?Ps z+r`KcXHLn!Y6n8tIkt3MiSB5}LVJ#)xF%duJ?ifW4|98Ki~nOul{kHa1^v2q6#RX= z;arzVt(zH{<^$>eAw%CDGWQ?q(5?sz^C8{#98Gq9`*v-eiFVjAJ(VjYN8g&8b5{@Z zdn*OYh;L0GY|(1kJeTi5i4Y-utBtDE(&)94!jDAUF749wY(FU>K3PdTPp7{xt^06) zcbsZT0uMV=fbqU@y7X?oE4jI6*+`|u<$fnCJ3oyDnJeqm+)$6cPjxx-{7fM%4~a27dao<9$T(blq>R5*Cf!Q z;PXzI!k7kJxi^xH+^k=itT#IU8i8Fc;M>wiiE-qChjQavhR*KXZH1 zHDR?+-LErAUY!%JW^9|}5u!Ew**lH6ih)e&`P3Oi&nI2U&6OiqEv5{kPcVnJUU#g zTCU^&(7(LtX!0vNeyK0}l`~?BwW$Y0@8B#woFA$?68&!pe%{x?c4&B0^<2NCMx3_> z?B5|}pV~c#j;iI<=#;HW8YGY>_=5mgdGZOJekR>vVS!UkIG<1i8DlnX2)Rj(Xlke>}M>$jYP`{urS2aoS_FK7)ZJmgV2an^%23)Qc8zL#qeH2NtYW?*d zu2($hBTRdNrss>j%CrG}R+3BBtp8rAX*a3bZ$jAvvC8GUHkaD9WGQ=2a@*is8*mroA?6&) zU1(@l@~hOVUbp1(rQ;bkJny+eI_3mvH{*PdAVzE~=Pe?y#{2q8UscQCpKtY0Fa7aX zy5o7Le~zy2nh`?sxpjxb$MxF3czHcPyQW(;%gCtW)~E9e%R(2CM6SZ|$z($|EyvB> zrF}TcIY?i}vsh1CIA;#ud52cxwh99#-IOeA@3RJnnT>Mxy~8iz)lOheo4h!&*f^g?hHzd(EJ( zJPXP8wJY{*dPFYTCVn7b3I0L9MuvAm?qPD`;>PF!c^i{k>*d#qo@&GeXTkXRkLcFL*VTbxA z@)KP2H`rfNW4_YF)gLa!R+`yfE^`frdU|gUENsp^%%Yx)uM%0A7`rF^n2GrI3(dH~ z7Oea}o)ZhsQEb8m=V5jd-SMl}n>3p|d|UHs;K{B@k5IS0wp&l#iAR(x7YKXz=kkl< z17EkEDVA2w@H&W-$v9v_-M@G1Q3nHZvCrBhf89YZLBWqgwDb85c0Pv*SgG@R@N;De zpx4{utghsR3k{VF2fPBuUXZo)%f;$>{d_ncJh!W#Ipf7zKiPAW3^1T zm#mV}jU|oRuQ7D3wi79=7RFyVk zbo*B#AIj@JtvnM)jjptE!u{zXluwIF^gOvujSt0H2?=W?^t#@)zqg+2bGBMcQz?{< zON#6DPpXv8g!9!FK2o~L=9_w~UcO1%61%QHKcp6aiML@=uEU=z^L*qMh<6oah=-%5LyncXsc zfLNA!;{BkJkHKEG6M7HtN|L>-0=jlyDe*oyQGrB$Qqze_+HsV3gfC1~?cSb?Ida-p zQaA8^U9mZ!m#(~owWHhawkm2@g?}OK5-jDFWBTm*wzwIWg2&3=U1v*UZCSKx=;DXT z68RB@E=NvhNJ`hLq$lU3&tcKnwe*4C z!RQvX$-W6!d=U1Z#dOmArtzCz`!{M)`^@fiWZ!CvI<)>Y_18>(mv-VD_^=K zu2QM5l)8NXN$2P9zgsn|R1DUO(vCYoRv}P3TQr&N>*BwN60&Lw*RJ}4>y_S|qIbEz z8JEBTdhhy;dM1mlrd`i%*!66=pW15pz*!^w@z*4Y@jT~+FT7u5zyGA}SKQO_ZSmtt zF}mw%ftmCv!E+kAdGTh^*Y2&)yG~a>Mwa}yzI){SrGS;u+?u8A!f)?)yzBj;b=1fu z^}1@{xosMJG0Vi-8}*Ut_iv%9!yJZ9jX0kM>Tf+zUtxKm-~C0gW271GXG_s|!AD6X%u)5*)r>1Ig7P~>*78;q z^88t$s};aoE@s4SktH1eLYQy8JSnd2n7PAv$t2h5A&m{NeJK%+C)AhLN?32oZ>`cU zx9^dzZ7i>9e^y$RbyrL2#*JK?qHF4w(wmNCOzbSyy|ON|GJAu{dFyN5%)@N*TESr* z9`}P*@+xU;R_HEkz*V_Jd03GLs*y0Z>E*SOzwPvfpfvUKCC59oeyA&7tY=9P`zom( z#P8w!Bc`R>KG4#g=Pq2|vFP=^uY)u`sp&4ys+3lFg*{KtA>yQ2v#Qv%`_};7`TtYi zJ~7*Rulki8(aDsh-vbBf(jUEPiL}&kBBW4n?$_A0@PsFRWPGS=ITt*|o{cE#jhRQJ z=-?;`N=2(jx8GcPN6yK`N9ysY4mpFQw!X1|dj2z=zm0V9tAj3m`BrL6>+X3vJ~|fn z{kT5IUCPlhl(RI}JsY>7fgByDo<#0?w&$Cydcj;0)ssUwrpUw~tg1#w{l;apV%aCn z{Jmbn8!P2@5zBmy<~{h-5cj$2xcJkuQ02UNuEnBm+ttG9^4s_!&HHid9n~m_jw9g(^NUDpW?EENsuG*(y;@IbDD4=@>u*rczY=yyNt+H!V36PrU9&tbpG<` z+83jA>DwMv6|8%uMa+{p-FM0VE+OWGr@n6RQI&EN2j(@N*XO_TEmHh$Hma4lt!vPX zlCYiS)9^Q&gk;t1T!|;VcZbZma(2fr8hfjLr9@fgtL2G(VQcqRPrrY8#&-prm{wm- z<{X|h*vP7L-YZu4`L{;_O(qIw>qxxXm3*u5@0WL(HRhcydg5HJ<^OH}5f1itdl|1z zxs^K3WS@gETMy%F%ac}=EZ#V0gLYJ_b=7?f{);Ms+CJ3zEjoU-BXeIY+``pQw|}9& z)&{FNj8rd8evsoWa&XLeW@fU8^Bu{5bWz&zW4i7fL61E))x>H<;&g7BJ3gw_ZL2Vw zrz@uGlfgfc`quqT(5t?JGWo4qg^!jd-CRcyAeTv|b@Ul3=eLT#OjoNO-MwYY*prDS zTzV-MpXG1gk~zy3q7`!_>)SG!ysUhy2TKNIYSgcE95<+?Tr1>WkZ?HuLL)9E6^Ky6oiNQ$0K0U3HOzrL)7oSv8 znvc#~a3@bQ#oNmLF(uYhu=?_+QZ}gnt-<2Q7~THo$t1b_jD-FH6-bBvfiSxCU6(E^ zW8h2Eqn?&Y9wor;Egr&l<4BY2fF+%@9wbiUe!BuoBIQm{q_aUmzIw? z>3t;UXwA=B?W{NR7OhY}68(eDUrG@*_U9s;Zu|+u6UfMW$~@C7Ll+MPwJYbXRMf_cK3#Nb_}?=f(J5AuzMi`)rr2|jfPW$p+IJtY`)9PA^|$OOtD49U=Q$6+}wd2g(HA^llr`F|!bM5}>hxju)tr~>; z)l_?9h|YgvF^9t^QX?)x1CH;#E85K4R=Ah9Eg@WJSTFHXLhxk|9e+h^%(B9Zhe&4x za@?rhZzh!9h39qoG~hCLu=Atv$6{Ii=on(TaYR;Iw#u&pkD7EdDX3qE(vE*)LT*?V zKG9Prvf?85^PL1{FaPkAk#}Kg`K=l6lx1auWrjtH2UYpG(uSFgg5&$In5HXG`xilZ ziJ;r_Q9_TbebvJ{Pbnw;n!o#Q+w^4|A7f$!RkZz7%%jgQN%k_OtM4V!>3?Ua8h(6# znNEmP6IH(C@#rvJdMBM7dzbTZr^7pPRM?@%3HsUjI#0LGnLIY4?iaTG+Jl~7C8Bio z2XeRUz=I8C8eY0-{mnmZ3_SOWjd*_xC*F)4-gJJ~wvUB;(Q>N6k!q2*UVJSp6KR%SyfoNkXN&MZ^qu~@f^vmYDnk(TYqn++rK6{ zJ7lN-%3EiLh=Cra2RU~>*k80=Yf;MF_2k4Jwg@dk;nR=yl5O8&&uiS=F1+J}*V72z z!K}(*ZzJ7^GrA$ghw*)h_Z~HPCpY23ylMRh-Vt>EhfED)xSlfURIYwGZ=~=}hgVx= zapYTXa>fh&G*Q74yUx90H>jk)?c+>gr{AM6Ekll~ftShePw}cLwWqD(m|mYsO%hD=($xooMU@||l- zy;c>PSk^)LUWW4%>QCZy=Yzwu7n*!FPt+=-iK_+KIF!jsdx{?_F4L;iF@3C6+;Zh3 zoxW1}K~295C#A9gcFDN2S)q-Wp8O15bB%npz}i=BuGPKO$2UCuCQ?ESoWFO`npB1L z)jWG_js`d3ZahZsXTi9;m8aIJ)Gg2m|4~2s_43c8U$Td_;>z=s3k#$u?{u)o;I6`d!5>ce70m%bAA?bj?X^Gl;@kVp#L_GJs>$q$i_`r6g2e+`&E2t5?Wi}C*Sf1~eHpi?#NKSIV@3xDlp*!n-|d~~+p|JWR~ zHPzU9vJhQ5`Xf&0R89MjkkMaiMyG1pe}pWv&5>PTZ9VSlJBZIJF5BVPxoA^0HX|Dy z|N0DnfS;=n?O&9tI@!2E_WX20FWjF28Cz`!1bbPiOC2w1pxWlA`R(BWe42v3#Er-Tw7_9eKfIY@(JRy5$nt zd9Y3S*pBK{vt=~!h# z>19wXZ2pIj{x$WEMJ`8(aS;os%woC!H-qXQLlrMyG1pe<(jXn~TndPomPMYHUVUqD$u* zdT`Nz(0~hA4A+05)z`%R&$D>R&>uemw&kLNFl`%-Efx*9D6~EZ_Q|K8WmHY!{8T{C z&n-5&qYEoe1VDddAK169Ais|^HpG2pJ?Ot4z^I>S8`k~`7dHQW( zu94!8nsJHW5Klwj(Cg3W_)qC<7)GE?)!2+Y|KLz^vREOpg5%0k#V>KOwaL5p%a}Fc zic-M8)2Z(-b1&`Ost0xwW)FpNPu^^3z@^B*{?^!<$vh)1(sXXbB|X6G1+Fnm3pyKd zdDmfoU%XCV*^uM*^+Yo+iXH3}e!uF>0{(@^nsHavF}tySS3IXJq|d=@nSqJmn_C-k zB~3`8aE?@Y$>I(C(&>%3^g9rrS+9|*4Jv%r+ChG|vHYsrcIK>@W(&Fnd&Idl;=*@e z={xSRr%$vu;-YtA+Z*((3B0ud#%}~L{~bF9d|F{VxKtLiJLzouA(z5VkH7v93uNh4 zf3TG>|HBXSYmNo{wMXdeE;@f6r-JtwKz~v++Mfjb<2qF7b|za>rNZ8_o7?9@#WKzM z$S3(txY7c&{GR2-U!+v4|7gM`Uc<^K&o}&Z#KSwvo>>uMCv{KlMM6&a&a;H_;(~M- zUui$}(0aOEzv;$3&jDyp7-IFiq1Iad!@Nzn3zbm+i2k7aR(GhLG70fG4Ys%3+%R#) zmFuPi^tUgf+0Tee_!{@RvZ+_F-q^ITl2mr(;HlP#dkV_YM>gL6@cLw&Xjt4aTMjL9 z9zy(B?cU?a@oCUPtd~ut;QWctF_M;to2mB|nDRGiUsYsn-C=cPN5GG*AFqwwA)FD~ zQrR3zcFYrb|M>nRK2o@c>=DY3g8CipqsL5JzUV}Werx}<$;rSX^)tV&Pz*8b9BZE9 z!mZ!kV}f&|{h#PmQr4=zIUkaj_Wd{?Ie$ned*zYG#TJ&=EgR1FShR8z{7o+Mt{pHq zm?UzwE>d=Y;AQ5(tJx@^UoQ6X1%3xn>G}eVhF0OHd%o^UuMR4J@t03f9?+laCw}Yl zx!29O>y5Dg(7zku^J;j5BeaJv!tsLfpln@nOSNh3W8dl}RttAMR*AeO{x;3}yQE+c z<)PoVdR)X+I6pG98dutQD=&LLK&;r8)M;FMYVt{5f=5F5Jmnne*kwi=*w4EL)l@7k zTd(4IJMcg)I~jke$2hH|fQJyCW_q{cfxL`d*in0vE^&=Ci$%qrtUb?PG)6wl2suT{ zFQ`9Kc4c9}+`Owu-_5(Ea}tdS=l685b**|KrI4AWog$+u94CJ$sq(zuul99n+J$$N zyxjR8m&(4ms+Oi8Ovvdf%n;ddW1gA$!pR5irRqs1Bo7yaM5Mj^X&Lx>m7rE|KfzqN zFkK{LIFC6jvO*=QgBX>BT9+Fv19zQt6Dy5M&~ZBl<+Ixl;D2S=~DPI!o}yWZm`uoGM}$D zddYOZs8)tx&9`7S+pe>u>nYUmO!c!VtVy|&u^B;h<2`=g4z-)dC zW5{kosNy%eL1kH~lv~hAs=W@^^LjX5&|m8#@$9gAB*YhV{K)m{5C!*3{q+YMl?dM@ zrhC$r1sWa4Yqot%)-2=>eiiOt)Rf@)sNsl2FEL?)s3~{bzsIq6@J+OprB0>j)*`v9 zmE6zgh`x?znII;{t^Fxf^z54>oxj5@^Bgs999K&aS*m#8qrjK`0 zIyn2c@k4yfhw@txc4b1O+vWSidCFzS%Js#lddA&0O|AnuLP{ZV;Z`?{Tl9&af9l!% zY{Xp%gzM*iV%PgxUm3hO*`9Fxq`)P*t#U}(B3|@=5xBYh0g%6rx zGUXRLW47_Xyws$XCL*hmUo3xW;854`1L=<28{ZMNuT}37DkB~aIm8(iu%38I$CJmA zu*a*mHY#1Z`jOvC_1HDkts2ZKndUo=b2P&E=a0dU$jy_wVl%X7}POEPC>my{1pocQoC~>I>88 zo$!9>NqB#8ODBBjUziiT-rlm9f#BbC7+cm?Y!ji15bs|{54e`V=-Lgklz#*2D=jUDTh z&+n8OQStev`-RY?a16#bDzee@U*c6B^H;4X=+6t<0mlp1`%Zu9tAgkD25RRN?H(RH zyo~9=Ylqbn)Ws^H#dnUgTKJti{znVW*8ofxICdCm3_StEyFfJJt~j z@92uXjXL;YHQ(+mZZ;L|_}bG?f+=>}d6pRNR6HJ`mP|P?7H~;h(UI=_qVI1UdYEL& zzs*=BeE()EE32(LUvX`WKJ6|^I_pHu+%NuQ@1|X2M2}aOm3@c1=N;iOtXuQA;!K<2 zE3MqrYo;aJPvitp+@4*1YN=Yu6c=?tw$+Hg`Df3LrB3RXgVPh1r`XFA&2gJ}o=0oO z`cThgH8RWH)1?|0`i84!<%z9!mWkzAuyBtKuE#|yt&*HqMSj1c6k0d` zK(D_lmuK0FtCC0~E|e3>ANqUil;3hy!t+c)@@RYh(9<(o70r|#yEW38Pd)q`v#y-L z{mRjIr5EX(Zsy7roWc4@NQzcSY!fbMHG2G(HEwoZHL#@v>W8kF-F~xs!cx;y^W0^s z=!S!i0Up;fwrz@KCuVkib01@|JuNFV(w{tyR2ZM!{_Mhti=7moPD!rpdPNF<;o+B` zSW;qmRRdq$dH?d_%Inbwe%?Kv5TNEiH?o%_xNQfSAR4Eeb%BtOfWN-3{r*0yi;Fjf zE~rxvLNq?!U|K!C{DgmC#@1CD+4(84m*q2BU!Tr8*{bFt;#Nv<N3+1N&fTRqrm|DASE>)ykoh&z$}CaG2W4D5+d}-l!JPlPVe^7CW9h-rTZuIG7YH zKUXitCArxvhSN5axK=lMEbvRaXsguG7yM0a=3lg{tXJs?T|71%v1oMBGRqcn{{9wY z`#0U@hVvS>y>j@W;Wf15_Ro>DlBzaYlR|%ca#*#2egRu*{6=>@MTQjNZJzjyog1*}nXUTrdqY9hyGJh6}e`2Ev3Yp=sV$OO46*hMWfDVL|Y z=&V_YVZ3H>`Fk_~kmxS-1NEBtR9zD2K7G}Pcd2-f;~U>92;MlL&vw|T_u;r z@x+SVPHl{ocyL0~C%1Q!I-f_k;d5r5oN|t4fTO@efvc%O=Ut2nuL$2$ z@$U5D70tFT4#gGVf4-5?PEkuJY^ma(++Ff~U(@IaG26*sJn2TW>D|4e3KYlVy7^rD zTDU{in{jU5=ibZJXx<zt@~^Q3_Ky!#5>-5Pl;W&OOZrsk?*B*jW+mTBEFea+CEh4| z<5H(SELK;uPh9)Ffqu7JZmZ+?pzWhkuF~yO|!9uX%V~(9)_@r}$~Q4btc~;A`^Tq2n58m21}#fh!D25 z&n1M~w;yo##9yv;lnXdp)r70u2+zA+=bzg#e*aQ(dy7WFKqvPc#Vp=-<9-UoZ?1B< z(@6K-AZa(JeU01n19qxKxZp|-y*fAl@DioYbI+c+ zQtz%jekZ8u{pIc z)f_Kvx%yV6--t{*Tz^JQ0?!YNGncuvMJV93lhTMA*Y0WWS|OC*ditHHO7@^kChOHF z(eDct#qt7_)v5+N>sBadzpJs7m&({wq%qm#=YX+<10^VRaxZ*qy)y}F3J zqF%zQAWDcd$ahq{r5<=j)J#w+z2)x8xi+d@s`DKuSFV54+GXfr z7{jI>{(XS&`_Cnd`7)m9WY)K8hA-;e{XP2HE%8X5?uwiK>WLSXUWYdCdv8~hd);Tv zZneC=(1k1R9@oCJL+toZ<7Mg>l4}e5o`3an=Pn~6G>aB6z`cvUXmDAof9k+k^OO_vX`9a>1O#`16Pcdh7K&%M zFO$FufGj4KMH#W|(e!P8Kj^bVe@OVCy7%zYHq1+9xAEQ|Tx>~KC_f5yz){g(RK`*$mt%7V8@eIT#?@IXZ+<+e?)Shq6I z(Y4o?zgR1&9NfUZZuhmvhh(<8jYxPlX+(ByFC(1bsv&MG*{v*cjNsK;-R5dlm!HLd zW7(OGA>9OnG4ICB4*AKlmYU3laatvDl#@%E=Dxo)uzl@{t`@Q{Tg9E|R~z*^7PF`y zw6N7q3wftLr{KYQj|w}{Me3i(S;tpXUx^6L{Wd529Xy|><}avYA^-5%=ey>t>%Vau zDHq~WA`|y*7P%509`j3lNbACx_Qn$xYocyGwN%{bz@_cEETO3N^NyrJR+o#OV|bF+ zvaKD-&1MJvHRf-;9<-Slb@w*!z|Fi984o{PbZjqD&Wzf3tve8Yviczz;d(?y_IJVi zaDYreH^3M%MV#TkJz|B(Ay}_DW=SD7h$A9}xIl_6%uNtGgo2pCk~yq73QH7Px;3%^ z)?q-VJk7kVBjO6)OyE~~r4VdOSy;k)^N-Vf+QNV9KU&c4Jd(4SEp@kT;nOtrGJ{kwjjEoC~J#?_}-QO)*JjBOpwQ735kAv#7xwss^~TicnS z8&6wDrkI&cw^hxcjcD!kpLLKswhkQ|LVc;*T2ZZxt*uU)DMMyfCdSr#%uG;aq!Y-_ z9{5c@fGNNp;06c)L;_L)d4Li?HQ*881z_wT8R>@kN5B`<2222u_auZFzy@FkZ~+7W zf&gKFIA9$>1|SdE0#E=b0aO5L04=~yz+S+9z+r$9z!YE!um;!zsDKjyH-IPL3?Kjy z3Qz!VS+C;>bLi~$7t!54rE2m_#HmMNNLql_8N9yloXE?rglqnkO$NINn zXlQ19)C%R>Qz`Izg`pwE%#lj5GNGdDoNVDVv9L8WMSibEg$UZ3xuAIqnwnXgQO%I; zu&k(vAPNcyA_U9ZVTBOj&=CZ&vp1s{L*6J~3OML^l_U&0Nl5|FWdPdeE~U5@Sr2n` zybIhQ3a~xU5k;7857p#!l8$K@&UM-6=5app`Ew>TmvkyiZU5la5Je?8V60ib8dQy*IUTw6``kF|&b~CTL@9kM5tD zrQkm#Oy~S98^OOarumpn24ZAlXYV>gK^ZxjQUxhyR40lp?Qj^8)DWZHd-tmUJ$8SN z_SCVQvKc!>&u8>p#R}dGT||uRY;3Ir(R0Jm3O(XRe+vUU>!%&K|4BP^WLhe0Yh!F{ zY++{lufn6}F)Tn-+w|i=S4w|N`aitiWj!O?$kf%=*al8_t3NjfiXhyw1kru}on}lm zJ7Gngc3S^t0O!w%F=G~gIR6xb?&JTI*(`qlbG=e6DQ3o|zc1V0@kem=YC&*|2=0aJ zTu{Z#(S%}Uk6pHiAQ8Qkm932(9jB!T8dC+u|C2_bP>fxnJf^5>R&eQ2Oe|feZ3L-k zx}A+Z8W{z3j~;`_E2stWWGXLZy={UhXWKUT0a!DjL77U~8&iyJfKQP^EX-`pAo@z7 z;Rub7MgY{Qk+H3vt*ecllcOLUCo@NTxHQrAV(`xhlCTZ_*CS96XFDrXbZ*2TSz1~e z(b=b^g{F%D>o13(CxPhxLw@b`&ZsSV4pZ6S1~~dj`y%Y$4{rO&488j~zCLGsVi<3|1j< z$KSTi*2&r$ZrO&g>3?mwcbZvSTiH8WIl>Ke9eRLaS~qQ_AY3ALF5-xZy%S<=V~?y8 z#5AEinkJZoCYl~}ow>E0F;oW-Q#;r}P;k`F4y|rL{Qy{7rsf+qVYZwsHmB7kAbo=@ zFhCQOhOM28EqYH!_kY7CL)0a0Sxy#Pc0m(h(a_Lkle{2;?93~#_Y%4cy38o%a0x*Q93NXJ8+75f zTXdP07c7XPfPW^|R@=52qX9qxTei2dMXO9`T}9#$f}Ao3N$%(zVkWF}gnx@KkrAsO zN`G(|?7FW?$j?S4WUNaG$pl+?R0$zADIu2KO2`+mA3^#Yuor=?6iY^KgFOuVCg2By z{Ss_WNEe25!5ur$tY{M}g0LcU5QGbv3omvvaWRufBo=hqD1+=o8RCLlcJ80YhHRFR z(cd`_3Q>>^{v&KgmPRlk$O-!sFtN>pwet`*Ha18CHu{Hdjj(!p`gwYJLI!jFJpH^q z7qhTH4!*v=EC?ba9OA8R~c9(Bg7}HkCC3#?%D6grLueplz4m?H6>;3_zc4gY1z-0N8(IJ^)qA z4gmeMIl2zD7XZ*S^!b_JX{cXxTXc>-hlBb>pWi{(;Q-uIK-UWa(DfSus7;u%m8Wd9 zjL~g&Pp#iGHP?qZ>i+-$%^SUNP(L;RL4Z9#06+ntTG9PL`JMn&v)9zz8|G-b4**R+ z13>+tUj&u_#825tFh}*J0?_<204T2*faIvs?@`^!xAPdXvwC9Kjm3&H~X7Md|!gMR-vs!xLU)^ zBG(a*KA%HceTi38Og8~_#B^a^%~?HRfs7w_ENto=U)x;mr_4b&sHry$%KEPyXX;Odtks(UOh2 zr|fT=Xc@MB?qb(rLXvC-MW_RNe|@7<-78bv97Jv+EdE}I(ITl z%D-7ZAhy%wqQm|4rF|Ob#HHDI?QGTgZTZmBR+1G}y2`whvhw@0rbvnM$A`?Lr18nG z+<%x|+0*6{n=F{O=X=%N9Fa&(ZTP6XmsIWem&Vn*cZzcse;(n{=UGgA?S5oy%SFBe zV**yex)vHCyghIxsMscQSQdA2(hTu6I>O!EojsHST-yBo0VqHg8jEeW8-=) zme|f;5+^w#>%g;1t+GD(9clMzesl-QPeguWs-`XvMve^SIo*AgwYapk^DTwbOiNST zWoi4U&8=|dT@Cjyi0#YiY!U0pyqtc0J8Xw2X*7QPG8C}p^c|Z8`|X&klxvSjjj6HW z1_TZy{tzGf@_0v@G+&}aONGCRmHQmq?_6&Qc zAJ0Z6X!I$&P7|a z=h_1FEaTd+FK2%9e)jZ-qQ-juJ_5&TzW6&i_o}RGP)#Jnv#{M)WGzeVt!sF{ulh&F z4V5{Ac^-nojv|yMukR(@xhzZODILY1)IE|~FGuLqwXxw88j;g`r^izx68*ei{7iq; z6BoSt%3P^cs)AdtM!|cmfv+C7>G)M!?n^pib>pl8x3B}r9l{SmKum+zRzLUX$LzDK z2j7e*7j+MI@Q&Oxm-7rL*sQp&q$<4a3Ll&4m!~?%er@D9rf0FZ^p(;B{Mr+iv9iZo z?_B#@VPz@``_ui!&~yA+)4orcf*P@7`q4QHCUfVUN?)g8b)jmdguv~4+0D;hIN#gU z#J=z4`LMWqmogt}iY#*;S~Nc(^Ba|G)wT(o+PkagOb&3pe;D0s^;}eu(p}(}9d>B- zR=LYg^G<4p_o+&`e7C*yUTNXd+hY;zTtkKdap4OcuetqV@s(1}vvQJ@ zT6ac2oojGaBcgQj-n*kEy~hGxaJ`FpTz6;RS+0FW2O6JNj_=>#T{4%Cq&0WzeQmxg z$emT4)mpAep7-%hQrG*&R@)d{I=ki63qIF+iu2tQ38n8`b>>EP(?SP=B=tQ#Y;EV> z9s9l&|2@g>sLL+-pu4>XB-{44DKzZ=MP9wcO+WL1(|gf97B0Ld#XbFo3x0N4ZS9rY zn8=dpU%FvbnRAuUE9HYPqP{NAN#@&O$qWT(I_X}yp839J-lwk}FLjba9be7wzC567 zu_$3lR!d>wC`Z&5$M9R~{g0w04KM82!a15BF1UE}iKe&2rkb|Zic9q0UJEpQ_thon z!TXd$yVuX%uKS*d#&m@1&6nNtNk=+}1-a7l`6+fOk9XLf6fgMZQlr^e*KE-~mYHjT zzkfAfug-d5T-*huM(xn{z)FoTn_72k_r7wtBP8&Bz86X3*o*AvZdx5|m(yaMkN|~l zUeq_D6!Da6n7NW-x{SSMlmB?miNzAvc1>)J?iX1iBdN8JV{Q^&VN$$oBEV(8a(qc@ z_jboyBV@OjqtsANA--3+MJduZ7Vpds$=UjWC?6G__4e(Y$tRziJ(*k+H-q;%v6pua zUwwUV-PP?|Hf!H?Y_zcXz?vDmYiwlh?c|Q#iQCq2J)pM( zZ@kkP=bg7cUAwa5dCmgePe+4KeJrgPQZj#v_$*24aq4l26V z&WU~N7rUEleUXzMF8O?J5hssTq?ThQr>Kz<&m4sX0S=FZq|!DI-hUZbu%Vnt(E8qD z%@R~sGKZzp?Njvb+s3s=vtPXQZ~GuuVr)4SdGpdEo@38yLZoOX3>Q!DrjvK$C6<0! zutWWGLwcG^%9_el??cIt)>F7Po>qygEaAD8sUxd(|EgSkMcL#PXIUXFY1zj-iUr}5 z_M2J{m-|L9CjR7qL>_yyrKj0{o)vEs6aVCJp<-@MX|3G=n*jQ=4lpTY-%|2wbs1Z? z{dtE&%=-7w=9}!Iyy45dMn1T5{UG=r* z{I~~9ukW1O)w971#xeP(YQCDL`P)M?>jp>-=(86D#(Nyayy zb#xy#di(srUCFmrtegGeQxL4@&HHWD`RrbWoOkl38f>~`duKy&*rq!~@w#)axx#I< z2ck_0HJ1XOU0kQ!%KzRoS$g~E@$Iha-pX0;T{U`suD)d7(mvSmfaQ5=$W3d+v@8F$ zPMR%SW>P#Ulv=m<+=TvJ>Z7N%B}7}3R{ckZ3ODvm@>m=1y3PNp>_TCj&T!KIkGwaJ zt2zDu|6e8AlgL&$RAf0ROR}EQqG(egO|+gCojU21mN5z0_p!woWzQgMgplmAgs4bj zjL9&QC1V-%dpuv)d9~pEdB1<3&-Z)#{PBHtySiW3^}6QcUwD6J?eE4@Y#s;f-mvJk`iJN9Z|^?s zH{#lry>ntEU4Fh5Ew0vS|3f?Zs>sO?`z#H09=vIFtE!d*e229=wCZo?9w#PrpKft? zQ9{=8w4tpJ-xDu)z8;;b_SkeC4dyURLKZf|_(x5XQ&H39RGJ%bDlOV`sx3Nms_JH( zs)m?rrZJFfra6LZrsd43X?byK+LJgnok*^^P6F3lcNW)NZ#mbzWhU36We(Ru|0LI< z)pbt2RTZah@QPD!-CRYZb$b;JL$QiRTYDAFwi8t}+r_GAwx6$})jmr_tHV(ht&X=; zw2b~z(Kgal)iyR$)$T;?M<_%!dMy1O9G5j$g|i*ml1p2Yf&TMNbMEr`p%nONemA7? zPK7^qGvc^-?1lJal+EP+yr~fA2)iasKY-VdSMGBJVbmG`d-1$|IhVv)U|)ni0eB4K$Hw61HGVH|!!5;0*gu`Sk)Fj6okKfIaAtX|m90F4c=!N2a0`_cV(8atGX1~_` zzu|8~NXhlC|Ao&goQeuHArN3AIi&O`b^rg#dFR+aa+k%B9A^)?>Ee#LWK0<7;zM%@ zTBb_|-R<1Dn4lQWg>&beNFS*i29nx^^Wo3y zB?<(N=;H3sGcqQ$r#F135b*Dmq2S*`&jGp=)==<&Y9C%UdB#U7XjR+3cW;rsm(LK9 zyF6YbjgcpWN6>inca&WG= zNQ9r##IZj1T`>>V!WsspML7Nk!33o^?Gwc|(G~IKiP#D7|KqO_tvnpg9q zFka6MK2rN7M@u6|&7nN}c3yuOLRZ)f=EFm|OOGm6CDc@%_;gp}aJL~Z?_2*)1U0E> zzglQ-r3g_dpZgqVFOPG?`nqSFJVdHc@Yc8t*rCT*X3u4l{EQPXFHp({%y-~Q1pOrr z+;gS;`xOqHS~F%oE5m_vMVSpi&2z;cJxp!SdGI=RD8-IV^r_|WY@+7|v+mIGfh%5o zyTaUX=f-hv(Dy=nW8cUPAE-&6Ju0fr)S9(fXk^@} za~Dz9ZYHK?-Fuk#?A5!E*rKncRloiN23ilY89Zd@u;C*lwss@!9UPrTIlGJ=%j~(i<@THt)GiQns_nV^5S#Wpa0ejhSpa>- zdwVVuBEHg|y9}|l*GTRJ#Di33hRkU$WN!Zm^FtvY1(#-cvRQ3vQWg&6gzaXTQKy%0`q{QGkop?qsdEh}qDzI5T+ z;Sg#gJ2OHpGufjPAA-;S&K`|)SUVocrSM^hgbtlYX*fM%g?#w$yCX+yLv57MZ!~t< z5-zjn-?`6P%n-y-fxme4Qgl{^PS?;Gnuaqn{ovfcbBpy;)MJM?u}h)(_jZ%vBO9&A z>us|Xr3!nq?FxhDR{ zVIjLAa3c;WK;tqoT%+`$5JjUGqB;-)6)QtcbB4u{MgtUgY7>Moh|%Lwm>NzdiBY?P z;8GO+hvOZcjv`+v&nXsPr`@;RZ5rcHgo{Qj)E@`qf6{-`9hQeIC;$Mk?&wyoanZWys4_eaW;?7pElfgf>t$s11Ik-Ibs^^%XnAr|309^3z+Fyhdj z4}XvWnG+H<3TKdluxBad_i|d%d%heyCB*Ot_bmHcLytzrefj>B z^0+sI8H9I24CjV?_rZC=2{_~B6Oq6<#YJ-7LGko1Gzi05yypos3X;HF43lAO5|I$c zYr-wq&>|*1=MEe$5_q-2hH5&(>Mod z2$@GblgUmTFM+$T8$xRhupcP{VV^HcaDyLo!yOzMp!AAGy~f5r%)m`K$>6S_Mef(j z?c6xd6T;1{Z)j-WWZs=nJbgrNctj*g#=|)~4~dQ*!balWe47kAXzya+hY!>gkO}x7 z%Bv%wgLL5AbcBa*&4|>N)2M{a5t#i3HFHN%Z-Ti;EqH^T4|-{8wHtHtrp$Vh2y2X#2;4SQ6EJovr{YYm9|G^ib*9|AJ> zX->>$K+GQ+!3jEopLC8^@HY}Yhu|llmAwBlW%@9`skLWwS{4S@FS4_|LU?FAM7^C5 zF6uc29U+WO_3RL1dOVeM1N$PqLh(OY=|8FHuMcVty{A7>y$+dTCY?EF)Te0t4MAA> zrW>K5@!mM38}9ViapLwt9D>M$$cHF^I16zRq8OqKq6*?U#5)L1*NM}C(1&OP(H_DW zLIhz7VGhv;!V+QtgblEq;Pfz-htP)*L0CfALij*LKqNsdhFAr$0b(n}Ziqt=`4DFzu0WJP zR6sn2cnG z%TDIP+7s#1`V&Qy^l2T6HpE6P>PAO z8vUtaT>qc^3HC+*&*r-j@wAzg^QOi7hSn} z?RxQzo3~1C-zhCCzk9FZe&vIzhmU@J{M(b?tDpYy?D?O6)x3E5>UHg#zu&%l|KXpH zpFY=d|I>xSf4va+A1xIAZ^!?C+yDP@`v1Fy+JF6?U#R`xjz8u-nDSt@gNY92H<;GY zYzX7kpcqU`FdM-{1oIe7Lofruk#K z2p@7x6n?6nYQ-H#>BWHU_(7P((+@#+T)0SvAJt(;PA;ND($I5El7`T)5Ac&Z!3l0;5d5OB z;T*T}ct3?JBqCB4DpCfFR$08H3VA{te4+F3a-89hlqHMsQ>SS`kuqAAf%C9ezdy*I zwI@AKN(qI65++m{6Nwbzct3@Zj$=oh>%&v;4~rB>t0*G;fqEB*dJB$I~4i}=)_Osfx* zLMo*wA)SLl#qt;=E}tu3hlApd{1_Q0k3xWBBV(nUgFHG2VXiNWNJIL?R3qOTmrNlA8**4!IpK?VByIarD)LTn4nn!+U!sg)r`<&+ zlZFS$M8U~OCWUlrf;0v(W@{;GoiUezForOPn1cDQ7_(YWyidW*SB$+CcgUkKe-?9N zuu~@J`(XWmt=rH{mUbf9jQwjpwzC|Lxig)KpxN}-X2kpq^h+~rE=~KUWLI#XX38?m z@&z5{GwlNiI@ni&KjafJvYu>hg5u8f1+^HaXl*2%k0+6yFU`=JKimC|#5#8*LP53Y z+vdm(TFYm@=l$9Yokw9I7FNnBj&wdI0jme(vzT8IXdF+Pqc^0G81`wuxM|vdZHCei z^TjXhvNWXq9=^%=>b;PgU)m#m@-dhX9m|m~%|yZkx%#C(m91z9R#v`#AA``)^QAr7 zIiggdQwc#xc_GH%GE3>o&WzAnQYdT)rx)m~K{#Cc+WiFN`nTL-Dfcb&3b=*emhkbU z-HvbN)|c3+)|+ry(8LTUE3|J1^Y5GqP@y1U?CnWNNYyOAt^ zzLi$2ZKalt+B|BFzV?q=vLvL2(C&Ph9>P95?GS#;KII;3o2Zskn7;OdyP2z(F#5J(NM31_DpEMh~!4o{sO;+S;)eE* z>InHoCz)v9oz5G5=?3LBod*-l3jHeW@3J12>aP@`74tq&`4Y?u?LV#Xix2|pxmoR_ z-x|?=^_O}L=}o6Es06U|rrJfNmbL5uRsYfW9fgp^jM7u+bEsu)+UGQW*VxY2eNT79 zgGxg~-$Oep)Fx71QkqZ>?~9J#c=~LK6AuqY#ijyjpVXM^5sgo9n54y z?v7J3BO%XI>f1wp1#&;^0XRcmqLjNpUIDp1biE;e-pGu8mj-bDnQa&2;Sf!k%sPmO_3g;oEoWfA8 zl#ha(>)H^8_KoD0kUL=%OMZDOnX}4*yQchvpt&~%| zij{IolNU-kZ_Dbggi+p9}oEk$h~o5k;0Y_Ii)kj_abk`oujZ7 zE9K-)StB|5T?IAmSn=V2d=yfS{N}o|@=D>>;pKjC8NU~VTrgwaNbUeRm0e0NAIP0C z&mq4fczr*-;kdL$^7)WcJt6luG?MRw+!G<7G`a{m#cd4aWsv(JloZce$j2zlfliMG zeRIeKeOt(V@JIPdX(5C;6#9ZZ9daS=ltzNQ9qia4bTohSP*O*yqmH`1wVIBGSgo0w zj=sJ|Cyg%6G_X32a}E8S`12#}!Ddj4KsC@DY!2FhEkIjP9drgYKu=H;^aW{uJ`mIf zBS0N67SsikKs_)GYzfW>^}*#Jodw7M4Ztk0HJA;y0r!E1U>?{OECAbs7r_o-G1w6- z1C78c&={-+>35wkz|P=1unWiyz#br=0d@uT!EPX>uL;;5YEw`Int|qEchC~-0os5) zL0hmF=nVD-J;6SpFDM2B!M}O8Dwla6m7fBT%H~CouZtkHk7Xd0wJMOxWi?3U=LJY*;T=eIjvMI2&)YRXsoNNWVmL1(C2fIgr)7zk>BGEftwJx*;f32GfM9n=Mv zgL>cwuqBuc>Vt>CRv@iM7=W}M(Hbm(ejD%#Xb6^pZNbN2d+-I=0sIJd1U0Nt9>6xB zF(?8%fqlTvpbgjsbO1%5C)gGA2fKk0pa~ccnu2Md8Mqki4rYKoz^!0Ua39zU%m;ge z7r{PY2`C1igMGnwU_Vf85Y{(AeQ+RX3|fQc;2>}SXam}UgF#nt2YkR2Y_0jEvN&!f*nC$ za3~lGs-VG(2i3qdP#s(hYJwS{7PuAE0r!C&!F+HiSOTh`VSEf~f-gWV@FS=LY7B*W zunjmA>;tNxp&SNkg3h28=mT~H1HqwS5~w0VxdJu8RiGA_Meo7g^xhQVp!eWKdJmS+ zd$5Y$n`)V4@3QiJ(K2ZWR8jBpZN5l*5f>M5u;9rYCK2x<(cbKh|sJ;4~E z(<98mg>_LjQYL)`|DINy=%MrA^w8XmF-A#dj@Az8p;-kz5lZvy6c+ssikYK%0zI^D zMi0$3=%INZBU97wap;Lvnx|ic&?C%K>6|n@Y-NNvRjHoFlUCN~q4^6vG(V*$5;38N z)(YuS)W?T~i&h5cktzKRQ-+h)MCggYnhZTb_3L^GygFJrFQa)CJ#@mCo-k#&*@`Xu ztqJKzDa}VI-J`iKJqb#8%U-bX9-X>FArO6Mq$ z&NWj?(5#!ql;T43P&VeIav{c;llo&S8)D`7mC6IHn^XTvWkQS|ocd-e7bJItoXUn6 zHoYLH@*qadqbbf2+4gQr*a|YN3tXj=H)a7 zpz+m~^n@-O6FFdu&AFVzPj3{)osIn|2>IV)on zPO2Y*el(vKLii~knVeRV1Uc0k3P0Hm;loU`6RLldk1TCjT@q|ie5g)QE>o<9x<#%~ z?s6z$ES4kr6ryrWWu2uvt#eZOrZi&p(1jZdIjdJRBV%h-lphohS~Ve;eIaM{Y9ybI ztj;j=tj;j~7(PBMZ&*E{**Jxnh1e6WH7-M}4zPOLxSZMZab#@?%Wqas*zX-i^6Aw$ zFB_W|;_t|h{)Kot@@0YbJ2X2L;!MAN6XI;g*IgE8mi8>p_I&wm9EZmKvGB1t2!0E{ zonvY2%I6gG+nWyuOK(@De^dsTAI$Gj%DPEu#?qLzCnJ^Nh(>=#xkxoYh@S(Wr_2xf zt)37+S7lp3ez1IJe%SMMp2d%agMME`?T8)zH_apFAIky3KQ}%sw8}~)lj;_8*OS+0 z?z-_gz|!s8?ov%?8V?^nZ<)KS%rbW!_&7Cot7#t4+PTo03Ssm>$rbW;v{FuS9M6YW zfmVf7EawC}6P0mcdD2h^==X6{j#-UuYKPTC7Uq$BJ_u<+>B7puXx=_ELn{+PdeN^b z1v6}&QHVS34KsIGNoMYh;pd*rjHj}`QY`F|XM#I+jq+n0U-p>Yk&VoIHqsxf%uAO4 z6i?N}CSD1$YIdHGnek3iud21-<|; zg1!h>Pw*quKY?u=F((1rL%%E72kH<~BfKi04b;^C;>W1m51=P_1Qg-DDi{GZ<+%;i zYG6Fn)CZ-3^FUkZHwPC(O>M0+)Gfdas5gUK!4=>>koxd^@E~{*+y&Aa1NEgPP%i>K zVP7453^lD`_(H7#zJPiQ_z}DcYB=HiG}s2b1d6~P!9L(I&;~pQI)H~jPw)=t4;F(F zAhntCU;&s0-U0*RpC-5%>KkANcpuyfmV*1h%V0j33tj|IgC$@ISOsncpM!hAci;n1 zZIlzY64VE)Kx6PYXbxTj2Y?SjTksy}3RZ%?U^y5Ho(Ij5k0xL&)KV}IYFc}m4z&!l zg}OU9ALm<6>0jKzCxa5vPez&vmpcoxh7i$Q90D!{{FHJA@_ z$TwQ6s)c$!NNZ1nNZ*;Q870A89Vm66rm?_$yzdLPhdLfC!~6E2Db#yGOOX2TH0bMs z!=Uy6Jz>urbcT8gs1J1)&L)hI%My3UxZT9BNC@ z66&A9VPGQY3?_g+;5aZF;WhvRpp^pH5}1wf^alq(odVJt zsx4>>^+K>1?`=R&sMEkQs0V}oP|pP4!Chl80_w?NJQxFVFxLr4gE|1rgL)vi80zUD ztyPP_45))a4ZQCRZiPA!+z0glBGenfi{SU5GxU3aRZvHQ&p|o(4r~T$xH@rLy)9n7~$OdiUo6O+?v55asS-=8x5 zDSViiJc=*hOfKW&D#(-T?b4V@aEI-lGBZ;f=?nYbG!~_qhhUz~0-5<_WteGh#YXWV zjmGaxhyACWYGMA!=B>fJn`|9`trD^M7u!!~V^Z4trQa$y*kx-4Gy-93N3^d^<%s;n zEy{d+*}6d}-=DMbHrW@(#cW@m>C=BfpH8GS$f<2$YZvreYpP*19-_G!?ckG~#)))p zNRS8f;b8h~wM&pwd1G=GJ|?F#aDx66zD;BLY$u<|WBGEx zN22)h$<`@Yc!c!@rXq8+1{1}HiLIkV@$C&;H)1Q5Y(0ssUa~a>R$kdU5L=yO>mSTj zHfv(5gtT@jMxIc6&1PU!;{|y%pT}&yhyDxeaC95X{g#hSdkQ ze#87?Yj|v>m#t0E$s6>2d>Nwu4QnT?rZlKo>2FZ8k}ItLvDIR>{>N5~*%}Y?m#q)5 z^2g*XoJ>ytX}&~jS8R2et>Mv0SvKFKwvX9pP%~RhO=|&--;=uWdupMW@60@3Ft{}T? zZHx6Zw3bM*XX!{Mej4PoUdmS8*?J?(Z?<;IR^i#29?K85Udqn2u{G6L_$aIgvX!L9 zabP;^f8#i?HPUH(d1Y&x%wIPDrMgJ1k1!u5HI=FcHI;K_HiGw`$?0UCps(QT8q<&G z`*5~y%KWi}IV%(T(h88O|JF=|eV!N6=?%h)+{}L1k&rWCCB*DecW*>GqWPMa7x-Gvx1ug*HoxZ`6zk2u6=6euC}%V_a|1oN_llk z!_MNAEX7Zmo`Qdf6UB%g#I9mT>Cri1Q}^rI@;XQxUE8uGLlvGX>BRA_p&~%QQ`1)O8mzZrTO3p<2tuMorK(V;{B}frT)tCzkk@J;Z zF*S}=a?<3V7ZM3gdcOHQ*2-=+U1Ichtk|YGech!^I0>>W;ibLtjvjB zCF%5Vg-+7P!FWGnsm0y(l8&)48*dm59L^O?d|S3BOR@=9d5rC4g!dum-+tI4k-VAt zGT2%J?`tM@Znr~n&*!?Np7gOdaBy}k9bB%{%ir*Ir@$%un=d&f*^zi;^R}h<`jcqbxu2p7B#s{*{=0k1doGB%0f5?*2TCtEx4&d|N27 z3+bqP;f>{R&NI1X?5Z=8(72AtlBcNuS%$3Cn69EKUN z%vW?la@EH+I9r4-WqBIctv+-?a<2c1h(0QaU)Hs-&610fOp|$YPoEbJ=U$B0(Yta{ z@*wN_rbE9Xy^4RYc`)&kq`F6Q3;g+}ir+464MOrVow9~h5N=|9CJX!h2 z5R`}GI}YmHlJqM(++p31sL$W(Pa5?{d=oKN**_`yqWZ3c^aWO>F}1*gE=(;j>ts1^ zUSQfJrWWYgjHv~Rc9!wx1y+q?YJui;rTlw=fqR)+U`7B_3oLHQ)B@Aa+~MsD%uZoy zfyGu#Eim@^ZQi^<{T)m#&|J#Y0&Pv0T43?x65hVR!0k*eFd~Yn1-|RU)B=;<+~VyE zOgqNZ0=d~tE%2Q?QwvOLr?jtR?5{Uje3Yy@z|;cEQkhzyvpZ7@%{%m|7tBw3v5aU|Atk3$$6s)B^PtOf4|hg{cJ=bZ2UT z85~m!tU84D0FS^lIa3Q1^34;N8 z9~P&4pKQqmBwInmZ~++L(7IGW48Bqo6owydlS!;ALsWRMwbUxGBh8#}Wz} zf-;cSdgH+k;B=7o7Sh4?;9{^XxC*4T@(o}&a4Tp6W`i_G*bSP2`@rsC9*Fei(;aEd z6WVxQ%wXgv3mBf(DAmH)luM%-Pcu#(@#}*9jTu}WSEo|ftWLeIWnGs#2^)RW*j^Qa z)`$O7^JPC_Umz0uz4(Y-ytLt$$mV8>p5zMC=YNsSYc;5G*B!n|6)s{Xp&B14P4ozB zFv?k3^s}Hfb=)IPpPi%2==v$Yqo@#>VU?5VH`WsL9AiS8X!Cmj7)WxSk(_R2>HDtFHlA9i} z@sZIm?<5Nf=j^>*99#qJJ>1+p+ylIQ?7V#39NotupLqQS4W_p9^i(E56MGJh-eY_` zJRMxT9PNEPylBqO-hcg~i9O!=ah?=crr)5^#GI#>$0#p5H}86L4I0dxlZQL7h!d;%Mg!5$sKe_*w`LPMoxMcpq3Hqd) zxeM-Tz{l_6spR);>@HndHXK7 zJ%PU_k~z@iHMu`Q9v6j5T3@ffbb@o@i#^>vAHuf)BL0JXA-E`mO1DVd*hfktN+f17 zKrqRLOXCCZfkn!mfG|EU@WJ5t2wp!fLZ4Ihx+?G8foHFlXS0T zK(HL&hMwr-?BVY0;pRBpjLM3)hm+3)J1<9n&t!b_F-8g8VlMSJ3$-u{HBXU$*9iRn0$GErvc3mk`yW%2|0H-*4^myDI;Tey9FBp!*N5=4d zD(@DoP)FadU2PAXC+1!rYxDRK)6#V1rF z;Mh@d1grAQkBft3A2Y(1b?muB;=%Jud8`5sogVRfxQBlSmBJxrA7D%`N|d zSBfLAXCY0Zg3*u%!l&3p@D4ZpiumtEGP8&$*`vn?U)UB+h)lsZp+L$Q&#ox~kOE51hKW|hd81S#E}gFFq9%1D?0q%LI-A1tNYe81-{ z4~tL4MaEy(FDlPe$Q160baKVd% zoXQ)0u2WeidnklxvT{S8hUHVEJ{<5L=@N%~QE+z(Zs9=;>09I?W$$i*$jPK3@n1M2 z7bPg9v`Ss?4hoA9AHlIcqQr$}Zpj@Qpe$)l4B4Y7DJj(tbV6@@|w!vu%ewHFHX}HK? zB(wRQnPL*Ptjf&hZBrF>;6GWO%#Z9E(ncXgS(YY?rt!Ul<$!v(_$Gdc2(BT?;2=6R zEriG43*Up8CN)#=Ekq(FM@X(Fn%zX=1QBUAO=+5PaCDn9-vuZ;v9HoAj8Ze=UnW@7 z79$unnaV?bp0E;)1{%Q=xr9-BgNlJdgklnhtFxqu)VSa~x~Rt@{wgTmOq@u_ROSHE z1T9gxq7Pa*YN1AA5H7S<-)NZ<`Sy;lV|1qz|E-+*TGPG;alctTW~2?oU(^*)P*biRI=>PI&OXf(;s~u`b1@4&X*p(EK)wh6?_qy&wbXj z4nefg4`6!HFl_T>Rl~Dkupso_LdmeuVugUR9di?u7Ey3Qd^|M_U-cICIpd2{XHUEAN8GXTpQ1(HeBVrWzkS1=vewg_&>}bpYaz53I>bvF zi`g~bK82zarsj|okzwq1HeN#%E=O|Ln`~%LV1qvGu56pW(r3*KCywJ=5x%`_FoRra zTGEvxFq9tnk8fAA(xV(cLW`RaM;`@Oj)_q3k=%TI#W*C=&w`ZH@lrQNI&>ZZ^MR)0 zQgi}Tp}rW~!JM)lAc?~w4|?L4q$@7itx?8u#j=FpQp?4te->E_4Eo zn|a>d{}cUSefoUS6T-j7LsERlzUgq30wCx$8ICgj#$5|P(4+9P?nc=~K$r9yReRo? z(p|o_LSsvnrvAphNQ^*P5eU_SHZy~!S(szd`-Xf9rkY3T%a1qe&HrmR8~Pb#6Cn-l z`sH|lZqCOCE9kyFa#}fd#btlA{J}~Mu1}QVdOo2K4^CDNeif|l)1`^bu=2J+ia<~_ z=7W4VDgMv>a6UT&_f|r@gZv|?u@38U5T+0|5Y7swegd();V!Sj}}E&PJ;* ztDO=%a9haJEvrK}_M3lCZ);vfa_6VFtLB&AT-)3Au;z*B`%~4$dB3mdbZU91{*g|v zMEj;VPd)m&((YvX4<&ayK6`vWdTeetFh#wC}!wn{Yclxc7eC!riO36@wnNrQ6*|)KF!g+8I`NG?L&6%Kv~9t zk#{Z~zi0bCfA_DlvVIBv4;-)S-l>>4{Kl@`mgU`taFt`z#+F`e8&rHMxAst1ld}iM zuUmcWddqDm@;2tFnrqB&9&n&s=e+4t?fvtn%lo!+jqu&oDd_ZYaoFMIb`{fFOueU) zFS?o0OaFGwf;s!AcF8^5V%y_Wnf*)hsyAO!Pa5!^Yq>OAb4~jUy@$tjr_2x!kJeZ# z3tge&AM&JjY*bWmkj%KuQaZ<-3x4(I>K3x^x2YJNuGN?|;cUw{=gap=J)a&i_#^M+ zqUrOGetgjBez1%9uHozHw-!rXOaAc+a8QAU0pUj`s|IZ z|J2^OHRWlf{m1f{0Z#M7_Y~&^-c^Xz9DeK6tbc&3)-RW)>)szWwYldncB(z}^|epV z73o|u%Z-?DaE@H0SrT?M^KsBMz4w)qGcJ|)?3Piy?@)H>g?7UaPMoQG$YQAfi6eVt z#|lKV^Zq=1@YH&pM~9oOzqvnt@Tl8=>`lD+vtjppYO~u`C=9eieoY@5x@yFfXx(hT zDJk05^&bCRr@6964yUz$qxugHD_g#}xkY1M#4DA~p9@<=PMQ$>wkTYhHq~0z;rA9% zG1H7nK3Da=wZJdsuKtzL_rv86j=q0ib8@Nu$s_GZ0@p|>Z z<7N2q8{rf3x8HA(D;Z;b!1-=(-7;4rt-XJbZe|^rqGordCh)x4zw*k2^W^*g13U69<0uJJefk%0d4f zb)|(?*Nbx(Z7lE8DW~$geTAVvb$=D|e8ZM030fmdp0LrJ+AcxuKBFd zr&hN~$$OO3d;j+~Mu&BNT6*(G-IKR}J5p0Iz3YQ}+9fw{%^mru z?a7(nbwB#|!^C~^#8F4uKk?I;A3LRG-Q}Sz!u+*WTGefoE{n_wes`-bD#-D=tZmXG zt*pOp>X!P#Yd#c}*Z!*Gk-OV1H79qec=pEY#u@X*8WfJ!H!1hN z6rr}#3#lwS=|GP zUCBm0zpI-~(^vm$n7LTwbYMqN;?9%XA6CEoz4&#t>Fmlg(gWx2{*p6k(z8q_2lFdY z^2krI({iG$2G{uY67|tk*N(O^Hh69A^x%N{r1MjKV*j+1M%BK3-Rt<_LaWUQD~uNm z$WmYaPkv^*V@uNYk}G$b+0+~uR+s!*aniYZ$`3~8?1uHK^lk2u{K6qM_~|^cN8x#6 zpL;3`4OiJ7*IP2X+GKlSxy>ovb6Ga7wa2?I+p$LL+L|S;HMuMP&Q_(4i-JGIT=ssL z=rif~F+*9_mOGLq^VjOFS>c&cTG?vnl?yjNJbb>g=))WT(%`sGPm-f5bA4*5o z8tAp{U}D(vZmf;zPj)85cSL=XKJj}p-AuD+La0r#`<9#)DvvT3w(ojnNB_W2`Mvf& zG~M%8vE-0=wn3)sfacs?rp}lCF!V0&>6H3UM56T3-^r4`N1RQqs4eVv0_A1|d z%)uqjUa^(ZWUHFgz|XIr{C=wN)92|cif<3kD!TH)dg1Kj>MPbK`Q+~(EZvbkCS&$c zt(_U&E`7+=oL`iq?=h(+Ku=cdbX8Iwy;!d>-rm6PwW^8TAE#qwr_<~b%SM%2uWj)} zy!2cyw=F&0D8J)EyQ$wD_lwA=PP8vCm-%(}5E}-hTI*~O8}+zv%#ASMGLCv%&CXgL zoU`bLckYjE&(+?aP+N0$)sDiN+iS}IewweIlfA_FNK~cOhF&$j=KPr)b$PdQ>@B&` zq<0p*oE|>3t_;gkKQ|-9r`o5R^tIb>uMenhE!=4oz9QYSdsb%8yqt57-)B~S8gk|J zo$o(YU(b))nfA%=z`7BdnFnUrr0?2iYTW0gq58mnPQ5xS60Ot^J&bjJQyewcdbU$a z;(GvhZSAttFTq5>5zGlFR>f>FC%d=WMf1NscPoZ>F%nD~&-z@Kh zYHNcbJJmH!rudj@^p;9mJx<=SW}9>VvM{3+TYL0cc%mx5=+&kr#m|B(KNXnNJSji% zFu%>m;vKq@*$ca;9#|ncYFZro&afzQsMC{?DT$xH%Z-v&)cU2K8>H#|*EAdFxBGK6 z^Il{cZ0dhSvM}zG=}M~qgQgU!#FV5rn|RVmwIW+n{aImAiv!b_sLg5X(L8T#YrSPd zZ?wEKXRYqTnsGXz?>lOGuPoEh5^vNRKFVJ^c1w5TQI{Tf>YTW_OV3V$op)>ZHTo&& z&yIx;cC~+#8`-IRMU6qrYO=+KH zgO~nzY3R{++Czr@QZ^E zw^QK9o>R3(nAaLl6`z@Qu+RF7Z!Gp~9@MvB$@(jT!rzNlKJTmEWmRicE%jdt}XOSmpDQ4qkM97-#r0oMgt!nnfkWsuj~6C zm2Xr0>VI(9lc@5~zxNBz`>nZd%43_+-D_Hk9{)9dhA20G+f4Xq<#>b7awV!vk7+2RvzvBm^l(K(Dp56F9A;|p+M<%SX4*X;5MUHj^fZmZ9wRjqn=AwTo_p45$NTH9>c zl>FgmQ+aMi>*k5;MjyBO>4zJy*IN!;utYsz%;L!pb(cmiy1wlB&uI(qy&bt|mW$eg zT$A(j=ZCJJeM--F){}jm({9|YnCWw6WqQc=v2%vBZ#Pdj?)KcQ;zv8z9us9>Qw#g$ zeazNf)03X%j4|lB*I>7NkJyW7uvxufs1{@bUGOW0~BIkN3+&v#j`?T2jo<#f6IfxEnW7HlP=m9(XI4&fV1JT7Jqx^ zS#4Nx^w7e}{Ml8;C+<%6JN4&)jFVLVrQ!K`i!N&Z^4H#6g9&Scm-Ty5IjABa%Iv63 z>CZLhht4hZ(D)&=>V#Zno=R!nsXL?ZmrZ%qKk-4;c>NIH%n2PlKUi+7Zj-71Vd}fx zBQF+w|6tFK3*q(}aS7e!*RICS*t6QOeJk-lN482f7Pp_*qxyQsk;@dDcZ<&NyEH0A zW5nKmS>Ih;+CDdYs8%sI??9xDXJ4Iw=W8Fmm^f$N+TVuXm^W8?sA|Z4KaXYCCFcDT z=juQHd|4;r(@GacB(%vtJQy7?VLT_x6Mz!l5CU1%R5=!db?@C zlsd*O|$wA4twzD3_hIVYak z+)}sbLd1SW&kJfnu335IZ!+W#%j<4^O!W-=%V^Nu@oqhi>Y3X%KYYGs-_|Yjb&Nai zyEk)2Y53q3NjH1fc8OWp>}~g z{B!b+-^T4r^&EA(!}B^>*P+3hPV?^m{4Ded2 zSvcrvZb~_R{+r)5zxWTU2lJPW53vk-f2W(;#wqJhY(Jr1c3S0>b)qoz z)%Co#t<~J`kHkN`(DG`z{*T!wclUB?)xpMO+P1WWu=8d5Dk*`VbFRnzbV+f!+smyJ zMy+XIvvcsCPgf?la%}OJ+q94cFRv}>?od%}_bzm7a^Lp@IetqP`+xUo;!~H55#wHVZ81yc5HzCXmurj8U*9<>)y8hd zok`u##2j2OZTg+RE}xa(=`>Mm|E7nJJGXf~GdX)qX=G9B1A2+O{5KUI9eh4jCGGRh z4Zp23G}X4wvW^*RymUv~u(^d(?~dqGDevQQ^wz2uGu1R#Kgrwwy^HRa&KCQPoJZWW z7f+1&=gi;to^D?eKDNuweOFSh>BO%JIvVz}Me|Ck|B33BYyQ#;>a$LB>@oG|vmIim z)Gm7(vTfkT(A8V??#=I)T#>i6>h{ylYj2j%*F5anJ9Yo`6Tjz))t8^@w4&1y{m?1< zM6X^So$8!^(yp@O-I5=o?>~N4Tar6=_N_S=TFWxVhel-=dIc^egfkB}C! z@((SChb+<1eKRyhbkQ|ZfAZ}>yT_Zur^XFy_V~sawUSXcb?1EArj;8tFgN?d$O9R& zd&e)`*`5F1wqF^(fc(Heq2iA2^<6iHPwZZ9xqECSH{@dJ*tAo{L2bJps?8mL@T|%8 zW2@KYo!HjWTs3cFK=b(;=XJ^t?ALy3+E+e(o^OO}tJ6W9b{!5A51&?Hw_N4k)D{^x zMfo+i^?Oa-KW9OU!?|5DPd(mNombL7N&V91mfZURYc#W$KGe%-KVypSagFHV;uWE? zwNFC)RidKC)*9n06?3GPWv_xccUge$jFbxUW0?l4tX&!+NON{c=)UU;mPh zXzqlFTr-h;&cUN$C7RcQ9%oLje6QED{8Glg;*4$=O0y45JUG0a#Ub69M^5+;EjT9I z^Jm^{(fU&d4>vpfNGE>(&GmoW9yR#qn~8hX?sYd*RJ5J_YlybNs?ee7y3tccq)hS4 zeyn$0d!=UG&stngj~~=G?tjs8rNcapEjK%>yo!iyQTX|7@PtWe((s}VGV7@^Q7wM| zTw*kB!L8m^`gc?O!tam1^8V-p`O=d$@7oFqmrwp4Gru`<=BtnjSIgI8WR zj)%Y8o%@xno059ypkNyXzs1=sh z?Z_>@Zq=uJb2|MLTDb3d$gA!NQ?_hSjb5p>L9@lqXL{C>IQ8&3&AAD!SDzX^ z>rq}xoA3Ac&e1t+Wb@cI6GmbrCLPn>vVeEC@6LhpksElm$?*?z3py7|@87At)!C-gKa4?lG(e7Lio!p#Mw~e#KOXp=6Umsm)FxI==L_euE z_Eu1h-H#SIrQ-(WK51i|kz3k)cKVuGMi~=-;&!&*C;o76r*+Z#CyA3L)yZVWC3cbr zSN-%hu1+`Iyg1YF*Bu9(L??G92ED9)xV`#y@$Y9UXPe$VcR)I6QqC_9PMOc-QCG~T z#eRw$Y!#Iw>g89Xt*+U}z}P1Gfs^&?^OMvM{2A*rwKhs>dA!%_x0|gB4=*rYk+57n zYe2iq{D1V)mmD+OS(!ZSK#h&!b#mR5YUh)7=Zt>vt?V`IMY2cpr@^TXg&yL0_k4`c zuQFVyvPAE=?RJyu*{5vE3vIH_>2^I{>#DV8$FkN-)?D+y!f85|S~0WXTNYFOl3?u|{ugWod@zg)2K-J%9M&=9>>iE8~Jo{iBkfbh7oyt(@qQp4nE< zz_+ELNe5G#*t^3`?0%AdirO(93jh;}G|k+LZ9-L6lUV)>!@iSGE*$b^dtY4vie1E=AQuYq%;GwfK#&pZr zsim3w;gWt%(foj#NghtMGQH?>$<_Emy~VHn4DA1~Gf_P)i#=VIXqUFux^&c1@sk$Y zxZHF3M(OEO?G|>7@H_sUePVTnpRByxQ0&oJ$2v8jhmm;02+sKa(G1QYYqpj5qMYF6 zKjwPhxLI^{at zsh34qrS-!Z=hU+>{SR?_E$yay;Pr1tI}5j3rmqO^nVHr7@wuG5PnDVP@4UV;fkHJCE_IuTMt-s+^fs+>J=@r%8MtbzJ5MRTDV8%ydoySJFD*y1M6xN zP4%4`ran_zNu<5k>_~pREZ=$C))hu!Cl>bV@v11l>RIuUO$DDSgUg@Pn6$}%ctUqa z@yG59XGFk1vhtvN^GH=fF-qjrwZu?)YcWPwjU- zDC`iK`^I44tBbAQX4JLX-+jBj!gYn=Fuyi!K3kf!&0Ct%F6FVCjhj=|;AMSEhhF+| z=@9LAMJ?{?R*Dq_(4AK?iuR=?RTnJO*oZqx%&3Oez&6j z=-*1^zG>8N%guH!+tH)O>T`GTcb80p-CVn6_4r=&WL>wermqcqB^@&FeXO!e&yRsl z<|DMGil-Xa_BlB1jK!Oa>-!GcyeD@3l7gwezrR&+<*KFMpEdTkMNYb(kl&`A`}2 zVfoXj+_|zTiO;`_u<}xkd~Gu@c<&62kZ40mQ14w~flp75m2NLB2@O5*Ygo5t$HJ>3 zmpGReiChdfwHxEz#eMXOgk+D(kdN-hnnkXDM}Bn6xWCQm?D~vREne((v~~a6VV3D> zyQ}>iY+L#&M!G!dWxr@~C!d!Qp5ywfhEGVyA2I&O9Zm0d1&h6g{BqoLdYeaMPp01X zdo;NEJCXN;iD7@v_1!ve|DwoJ>?3A0P9VeZe{8|6(hE+Yz-|dxpp}nn3 z;e_#5&a58x&*@vUHW#$2UR~t2H~aFkbD0+}eSiLvcB_|H9Mt2l&W*OeUT~}9HPxfy z6?@m%E|310_+#&p7Av2ccUZH1#D=w@KD$W}U&7R`?b;gi zKId7|^u0X|#_W;rHrT!A|7-6{z-zj`zE6^y2!bGpIW$$XGtcMTOb87^f*7hH2|*-j z62u%s%`IxEsuHudw4tb)Tk}v&ONpTxt(vMDs^;PQ?Q`!Xxh?(w-}imK=l_1sck-<4 zv*%&0z1G@m@3Zf|msi~w8@Q7DwZ-zmorkRW+#Z^Hx6z&@YWN?^Mn3vz>5g|l*tjP% ze}hl;OPfMg?f87$*HhMiea7#z3hT6WZ7TH1o0imh>&g1bTdG99zB%T$&lj^6GTY8g z-LSn@;rN~1d!61fpZ|@_|IOR_C4E%-z_dymKfOMq&%0fvb4BdS17AP;!Abdxy&D?- zKFYVl)U@q|i6>zIXnSJk=9>cweklBLC>NLD-zBbN7uON9;P# zZ~v;bgEM@?zdoAQVD8#6>CGwzRnBn9Eu!WA?_PV|@6B#SKN-KgJ1J@6q;G#v@ny=44&8@x4eeJi_)qK< zH}vO_-wMjz?YJ|!sL99cx?RsdtZvl)rL*2ka6KP z>2S~U(Vw)BeG+o}^8A-SnKn7Is`PtR+qjUs+ss!!sCT7q@Oz!JL#Gv<{FLjNcj&in zo3zfqeA}ya^ys`N*Q$TtN2`}|dQz*>2iPl@(%*i2T*KcM9iQ3p-nwxYy1cH&G3u%r zrN`IhQZJU9_@(bgb>*P#A?KHSKb-hwwKk1feEMhmwru?&e@tCeao+qUJFzkY7h zuV0mZyVLNESEOXB)v0_K04D#)T%{Ab%&33jUhf417THR{0I<{J%YwBVQpGC1rDJMm zv|q5|Xrm{ekC<4o!v6DF@A#kky78JFtL5qyTji`6@>0RRzJue3d-!L3^U32wnFrtR zy8K4{-vj5hezaEkuw}(wM~1Y!?sN3Ul!UVp4c9l8mv40>w*(I zKfAZ@O4*i6rdKa2-)Biw|5Ez<9d}H4MLaX_o%e+1%^ycMi2dTdMVHS#oc`JymH6{j zhkhtFKU#5Wn~%Nh7h7%}KQ`#+!dXLl^t@fU&6RGCZge*1r;PKdciik9v2WVZk48%$ zesf|>sGPdH!D}OjO!;ijq3b8Q)jpASe#yEre+1OI@apPeV=Eu-fAZs@<=21l)smhb zqxUHbKYW}MT;;uA#9XoL+qF8TPV8HCm3jJ&ubQTvYdYuH7i~)0#ulyHd~xowa&1P` zT-Ur#_;+UHgVb)#uAI5OyJNpuE05%TSU1b>=l)g z$MFW=VE9+VCFv&sBH+J_{G>d5tm0b>{`piusTjPE1nBS=pg;Hm$u|Yi6#mU{{Q%Pe zQSjeIetZq&i_@XLl~DgO)Q>Nee4!TkvWVY<`tjwHFAg30;y7XH<#0U#;{ajsUqF5g zpf7G{@WmHdrRSjjaJ&x(;2dq~V)!~1~iGO^};H$v@ofH4~D#911pG&WYOWHdH&<6gS$WQQz z4|II1;7)u`4+s8nbjnvi{2?d)aahLpW%xgWOUCO>Kuh?qB0s_ZYk=DDFL2_26#V#f zy7Xx${s+O|4E`-}2|c|9Xb1m&3vT8lQDe>{*`b^`tg8p z_%9+q!G8|`#<}!7C;mskufc!9iT{D{gRV<&f=lRW8sJsili_a&|2nt? zUy}im@ZUgwg8%2u|AS8aCn8NS(tik-q@MtYfd2~elk$23YQg`W6aORO*Wo|q#D5C> zP2v9nt{-4JAPW9_$WQS3y!pS^iT^%GQy=M9!1V--1BAi<3-S|ubO+Rcf36e%!{Jxq z|K5rJ0q{45eKvB;r|o)3I3lq|G#$Pzc11>K>D?C$#_o$w1WRP#PJMljXegpn9PW%sozZv{n;g$ou1!xEV1LP<8f8PAx@5Fxs(!7N9tKgFK z;{oCDUqXI@{~myv@W1QC{|NXs_)j|VKM?*V@P7`sEMOYoRrv29Kk3i&=KnWN{P#ne zSCIZwxTL;GfY$K;j{IaiUGsmj6aTLH|DzNCX-Lxo>371d0GI*j0RI!p-`j(#=I!B8 z*}EcD)7!(pqPI6y-P^-f#XFR$?d=ib@9jkedV6>Vc-Nz9d3yv^@-9u)@b>Vn>OGbU zLDT)cJ&}Jb@>eJM11ox$BKdu)c=smx&zd{%YW_CjM&SuQvWd@aM|!a!EO^vMQkkq&!!du5wA){N09K^)uy(=|Ub4^u1R3Qtd4KA z8QdM*8~0~^NqKmT_3-fY@ba*E_;~nw_<8ty1bPH{&>kTk@gzTvDO(>22U#Bt2azwk ztPfOz7)249u{6Ws%x%WNpwPsWq>LbPh8qW;@q5(dgDvuD6V9rq;WNvOpg#Dt0OkE9 zJ|QhJsaSXlz7Wgkozeoo-rO%M8AoFg+dnlSEra~Tcu+FFIzTlE$w{Oz*Egl)&^h^u zaPk2Z&XXsTuiRbr40U|Ol}Wzqb)|Qff`0YO#BVr5>BBvH&XO|{g7Dkk_%-na+|=jn zOCRgEr5ztTxxzB>6--iMkR!^`*i2Fm>8|zL+s+PHM}3_`kvVKopR^R*=jJSzbPm6r zO*;Ct9j>@O_$I+wM22-r9c8=XtbEOyk%r>4^?pG~srY&AzRj$*kT$r|l^jb)6|OiJ zeq0ql8)NGeJm(0LvRq^DY>hJ}H3_{>?4Lk>7`!)r`5hkyCE+wNn&SAL)bXJMz9V*q zJHDXAxBNKk?~21I_h(-}4Z`7Ml+yb=b$|7n0uB_tuNLi6c=ZiZGTn}kiApF4qS0725zbmVF zK9WBX>0;f=N`ybXSXm^^-<4H7A1P}B(&f69l?VT}Vr7vue^*xVe59;}NO!GRStQRx z_-&v8SG@Itf5pe0zL@TQt);bYX`L8(?`o;Q^b1U2tZW@j)KK)B;$y*DHe!e|Ianmp{By zY;at7C`nV`7DfWkmJEY}N>PO+!rEAQFSvz~z{_r7&~&J42t$*p=2pFJND%d~WIC%3 z>N971t#r{=9X5oK`R~Z<;1&KQ)5Vd50VTt%@&ZeQIqs^cRWi&<7lg1|7g0wB89S7< zz=!f9V`=$=Ge~=bz+bF(IKncs9Z4;I72)+X?eO8l6Z>GOsr1D@jJ)%HuK0pt@$9$|swIFtuK1C~;z{`VJ0Zk*Zv2qDCFAqm^4ECu;`lTF5YKp8 z@Z$21Reo_i^AGXk{vm!wr5Bf9=oWv#t^c;BVFW!-$HzVX zfLpx#_#J9iGQWHLJH1{U|MNe@hp`q5d)hwt@-O^D{Oo^-7dS`x7o6kbUVdReNBjY} z1ppVFyN^@wz>@7OK2ON_{*nDG?~`Kh?&<6qFD~b3%8TQ>4SI3>*nh}h(JjCGc=Vl6 zvV8aW&z%Uhv}h zTJM*Pcdy@nZOM4|{`Fh;;`kohUmQPef5~|F_Q#wq8Sh^I+h)-TQZYSjl+z{C|A#;`sc}UK~$vC>ih0&yQ|##Jl*|2Df=~adXLd_x_*U zSh9Tg_Ahp;-+lbY9V?mNJ>K(p$$0njJAVJ-_?7<v1F}SEZrf5@kmG3Fs>(c1KttVuvp&d2#Z4Ag~h^-B5W7J{_eSea&H6TktQ0T z0~!MY0X~4^h$GJqlt7VWQ-BJH0`vl80Hy-ytv=MqPkpFhq_yGw))uV607n4Z0LuZ>0mA|D zfK~t%&>T<;;0Jj4g%5QZa2#*|uo;jGm!Ll~9K#;5zC%4cG(N3|I~DN8K~w z_65WN+5&h$FrX5^2XGhTbQutYv^>fSfm;i%C!lCE+6^ec*xmwE+UP?y0E7X00x|%T z0ZRd!0Y?Dm05<{UHz6&61+)e90SpH$1$+Y71~>#b4!8(-2q^zK+5u<|Fac43o`BJS zd4Q#WJitCcKHw+7O~7M78MLz&pgAA}5Dn-9$N)?RECOr>d`MSJ2xzhZf5;wO2@C4c>K*G*C;@@*-0 z*J&tjSPf4}%S?)d(F<3ZY%Lb&y!RA2);dF<-mZwS8Xgg8nGv}0hO{d-?P&{ld6l>! zm5R2Jn~g{`?zbWBaokQw0?2(*D5(nRAui-g!)12EoOj8UCMm-QW0NA4+$&1%@uGH- zu$ZhwXH%(_UgWMK+;^G*dwI)jl$7w-^06T&kn&@bG5(3JTBwIM=cq=fSvLs*hiGRe zuAg)ygpKrBS&j-({Etp-q$)@J&bGTs zTT#5NNK?uq!n*CoLJn1*w6IvuJ|HEKZigl&l6@G8T95n*xD+%aDT&fZEXf!{_F&#Y zyU8si#c{L9gCum041O*0DeaMQMD6c+IPPm~2d{$$3tfFEhL_onR`e&=8lf?LQ6ugG z4W-`uYq;Z1E|mOg%TDcEKBeU=6t1_8?oF;*#Qt9xxpT|uTjALW{l{fd5#dxW( zawT=Oc9!sKEgq!zy$R~6fjBvwH1JtFbrKhm(yt9}CZwo?o)|ZbbS5s7{2cG8iO*7g zOX8d|1_ zr3t`xnkTu%>S+@x-s*KYRmYQ1yraXRR2T_um6k9VIS!L|NAaDouZLfxA$PWsO}A&I zkjwL2T&9b&hcRi`TMWw>Ozvo|;G{|Cg}l@cWCX(oV!kF(RlUd<;tosn)8>V6;tfwr zA-8x+F7D7KtbZ055=DKFoa9zQS4oC5MJwDn6-u>^Xya@~1{uSaxPuAz){#*jNc@h8 zVOrKKObs-gg ztJ=y%SRuSrXhfILXjd~S&ZGEUUZGS>WXmWQk$1rDKy7g0Gwy^X(~fmDtq)w1f2fY%`ADT%xh9#R`^?EiAF;SQrk>I#gKeQ+!sn6hk{cxMHvn4yM zGRXaggVWpKwzIBOKN8}Q3=VD!zTFk&p><@hI4Ln2B-#=;mJPE?recf7wMs#xBYdr` zQ&vjn)S+M{)XN^ofawXb(jUAt1_X(*rKqJ|QAxNd@-N}uPWpF`h%DAGx46*M#2DNS zZjB?A=^dMr8JdFjVke!vqe~V|QbUN(MYU8G@mZXFRdK1%G8=vV+S?^oV$+_=8tMR< z-QkX%sVLlC8A=^=3+YOI<(gwaM^~yY8PA9e+)@>bD@D6fCh=RCA~%eB*$7u9;x{q6 zKm5JCpCL3hEq1td1vPHbv}BHj+t)l?XpT*LmKN4~@2=zuS`tZh^k|vg|EaX@>yA;Y zf6Ga`L&Kvf>bj!`shOAx7K@?UTbe?6niYhx;JiRU>DE9|Ev$FP;8wQ!2C8=mF{Hdh zLP$HGhRnvC!!CD5l0z~f%{;HDaA+n^{jnsE)j>!#LV{ElC>8kXpXdms9+DK~@;|Wt z-jMrI;cc)~Yz?X!JjgKyj)a-W`y0_GTBZ)^k6YyiVX}4UozcHHcGyFy>(7blN{LpV zIwd)Jb+*`ekn!0|YIn5OfkNt@tC!tzMIUK1Cb5H}eEQAb@wv~GrIQ=~@aQyVaxV#(l_o|?#BZIE_g4Kg9CwOad^y!U3}uh9!pLpEgwUX~mhg9_GF+GohpZx6 zH8qj+rbEkiWF$YcMmCy^81)JXFNP;arw9f=uym%si&{$ZIGHM;NQ&BRjkkkP!=a8l zSXKtbT4P9>_*CDvsQ#*h1`-`V8bDExh@W)DseNLYsCPUpHbz3jtRowc>V=Nk^>j+e z7R6X@%%rC>##*@iFZ#Ex`p$^<7eltf z%=W|s?IuwCA>qaSLEf`*kn!&ykD$K~|EEw-0QXGfH-(7WC?ryhn z|L^*pg%=)pR&GeOR@AOQoa^y_7FMDS@PvGY-w)r{3g;0Wec0lphv=AC_oV-pKRV*S zm){@b5e+Cl?#16dJZ*%0spVc26+dp+xO&dkt^CpPvSfwB9lN;jA$bcKonyJHk^akN zr|;kzh|}HpFQyN5rr(70-z>ebv0YOU zsMk*qIQ`e75o3=PoGZMC-v}M;YS~|k{5J#qpZhop3VQDIZ&vi5oxFFSgv6x2{gV3+ z7??6BH7)%OXbFRd49y-k+|lmPu<(eMty)L6Y5QtayY?NTJ9dhR?cAknTsP||&wqB` z{#&hkE_wJnihVA51pT+F{g27~{{~?Yp57c|N`O4aAs33y{WFYlJ_SgB3+bN+M9#SO zq)7Qh&J~w+#otMM@wCK0cs}|6>?!Jw^R+!%Q4gK24e#XcVL!M5fFP&em7^+NiV%h) zZ1}9l@jGXMaEVgZ5H2CsNVtJ;-b+{ti>tj~oT05=cr)8E@@*FPa6jr>?c zPlvrLv!`=~-b>m3*j@%IejMnp`vF>6lG`KcU7m|3dhq>V#P53Q77k z@ZYHZEPfmOjf`7mFyKKD~HBD7$!^6%~^O3sR=5$P)4Cz5D-dUF1~1b%j0J_CNU?>ElJM1LV%P zQ6pyAs9JB?D1I{B6fX2YX#kgd`wg@w*GB!Z)J84;&_><**hURlWusy@+NhU9Y}AG(-c-?P8`Yq* z7quqLn;IAHjjvO%n*bnlr6m9+L5c^EZ~_wmlZv_B@O}*XjSG@(6x9#V7l0;F0|5g7 z>3}pqDqt`m3y=vI2FL~s1yn{^|GAH+#UFwwDhKKrCBrS*RuAiU7|@Hx4vJ9Bl0`#&c<2;t-mF8QR;^~g0# zQMtv^7t|_6*$7i~K81BkQQeFAL+X{H0*a+?h;*b+O#vPh5Q--kpP0EnWZ8V_(k1HL zxpUNs6DO$c+qY8_CQP8h!on#0t_;O64ArPnBPuX3kb>rc&px5npL;^BNN|*N{lE55 z|Fvw_|F`A){mB#3Q#g1j_Mdo5hFkup{X_g?#FK1Kh-h~GCod(#Ex#)tsR8j;MI=-4 z$32{s%Uv?Bm_Yv^4$(_hnD>sELJ>pw;O&)RuEq%E>N=*WbI zGTl3ZHp&0c(C)>;yIZmTZA;|4?Oz2=za3O%kDN_>7>5MPH~R<{84N7`|zPYytlpI@h|-gNJ{j78NDR^tGaTf)xSjir>e|F zIEh8wbN#8mtIrxkXRVRe*rF!J8ezvchcfQ<1(6O|IIFS}I0HT{T&@fXK#h)Y@;|_> ze-8*LIB29hp%H*ZI}boQu5i#xSXa04B*eIK=VKR5;$^4neUnA84!T9ZOZE>F;UQ*( zYd%~jJ{OQz>m3fkaB=*RsoMu&Q&-+w0dIl>fxxTK^r?>cS(TBr<0l)ji918zY71vE^F-$(u#3 z4jOt!o5kke)B2pHI_h#ztMmUe+ANXJF$2kz#|%I{R$b1TNUc@TPbcn57V|K9)%Ui9jwl=WI_$&@Vzj;@3wMug*zPiGpI!59|~Uz{(|6= z60;EF@V{)OPD5xAHH=C_D>CqwfVV+t&tRk^H8)3CICAty%s{-iM`$+Uf&gjG^r?6! zr4L425JE{g;Z_e45JKv9q$N4~JM$)6X`l5!h$`N0msQ5mUz=|p`db(M9Zx;7`aAaN zK#+wKN~;-r5ZMSH|7Z&JV11 z7CpC}i>R<-?QBzmQj3qrKerhe4GHroMX?wcPYcgER4a=*$!tioMxD@g5Gb4cxgK{q z_@A7ow?VtEdHSNc)Cbt;XSJ2g7{?5G9&M0$*#=y}HFwEOCp_cX%p|xSg3^)@KT zD3zq@htMGFZI~7JtQ;~j&#N=YqQfkUK1mswSltqi@r*l!p-jRj+_}cT#toj&0SK0I zK#Lzxw}IDRK}Um7JDE!7kH36eW5x$|5JSC@9E1$Lk{{U(jgi6>SOVC!dVFC|5rFO zks&UL@@&SS`Ts({NiQAzyP?Iu9pgNf1poi%f71wH%^czZ8#UdTPN3hUXV7QpYjhPR zjOogZXJ#@VFnP=t<}h=H`GvX7sBAAbgPqAPV~?{J*lX;3?ihET%jQQ4Ukdw$D56~;&DkoQw4LL&YC#TBq${)xd$$9ca`6Z>b(n;yABr0jjaOF*9 zma<>DuXw4|)w(LD>S`x7P93CXs}t0z>TLBrb+Niu-JpJ!#jHkJ7v7gY=R52l@{El3vxY8}Y`Q#x!G= zvBmho_{q3z_?y+u7N%(GW|-ODj5QO@H_VadSaYfQp}EQ2X6`i)na9k=_P+Kv?QhxV z+H>t6+c(;G*$>-K*n#LEa4?KtZMq4~(Fz?-N74i7OnL-8o?b|=p*PTb=x^xL^e=P~ z{ebpiCNj&JPnnGv(N{2rr`g}w`dlL}n(M=j=Vo)uxHa4!?i=nT_Y?OUcb9vKe~s_Q z&*vBM8~E?}^ZaE#PzVzS2>5vo;R|7}@U3uLs3(St-NX^%IB~MLT6`$_Nr6%`$&{j` zBq>80Cgn)er8UwKsX(eISCgB|CUDV1PL_wt{{|OqJADYIk*nI!&Fc=BiuNuYieT>J{~&>Y-KEg0&`^sJ#X( zjMY|Yo3-z?GulP%SM7#&M|-H1(#z{LbqN@0qelZDr9zGP|0y%>}?jp81Vg%KoxF!k%tlXU1#OoR(>mZb|pR*zcwf(xsS&OfyD=)g_YY#UwC;nH**k^DeW9SqUs`10H^1 zt}!>6J4|`DG8@P?WjVGr+nMdo4rGV2)7W+FHug()A23zO{=!~mi&ziNhpP;X&Eht2 zM?n9#If^gE2k?z}niqMUZ_oGU`||1hU_OVR#V_W+;>!pP1y+y*Lx>dG3*CeyU@%Qs zE_@>F6V3>Q!bRZ^;hx|vmJww!LToSg6*I)4;wW*FI9+@fIQ>H0DSjuO6fcTCQhngI zozzK+lg3Hwq;I5CQYpEe++9wWXUeOA%gypmc`q7WwoUmjd_--j>Jrxr7i?szgJ6ZRkcQ%qS>|9TCz4so2h-GeTJF! zjaFJ`bWsn}d+9^vA#>+4?O;xNTxVVdUrT0(vy<5w?ECD8 zYy++-Cvhg%lIzIz=90NIE}Of?-Q*r|Hoh!hjemu2&I`QB59OEgpYp%(O$57;0Ia?t zd?cI_JjLpwB-WDz$u31mJuwfzm5P9;m*pmMD|sw%bPDsVsuH4%!W`SI>{L!DKPZuRF=2~-uxeI*h0>0z&F zuWxT|m+hVG-Rw#BH2W0$O#5Pt-fsILaH&GVr=lJET4bJ7rt4ZGltib|Z_yX&8caRR z5sB%@#4(9XDl?gx!Mx9`V0JS5nD3ag;QNo5(wH61Se^}G+pz=KH`r0^BsP~_#eT+a z$DFvsdU55r7TkKSjT|Q@%GZ@gN}jqyt*W2bI~lKGPH)3?gH$~3FN3q2`GZa82JoNo z$M`xzWze)8JnDPts4_;Yqh}aL4a#n_``Z2N{`LTSpuLto$R2EO2uh*tJbI~vULx#~ zpqOZTtUb=&(;jb6v?tqBz#Fpc*>rRLX_K@~`bs(= z9hHttr=>#aqI6BVDczMGOE$TT>@Qc9YsvNHhTxODtjZz4SCrfded>i?4ZtiLB9D~E z$&=-`KIagjOuaVaSqdVk1@*z22K7qOSlYCjeF5i;x%ar1y_$if?K&6fntTa|= zMO1VpOo>$5E3uG%@k(DMMaclS9gX=pRhg;GQx+;qmDS28$_C|-a!fg=`~tctt5#I2 zskJd@8>7(PYf! zR$4pA$sUlD1GP+Tv^HLwqRrMm(DJm6+F9)6#v{ zx7TCzZhC_LmcCHmqaW7&4aHbyd<*$`$+%{eGb@>O&9_V%IOz%MqjGR(I`AAopQL@E zM+{&_F;g(#K4S8n+}6gHXH(c=prF;@u4mYv*#~S>t~D0}IXDn=Z58(^m(QKx&T`Fp zi8uIEzPcbo^2G?lgm;Ak;hfMwY$5Vucky*`f;db3L@W{?i`AuINV<*EcHpwITtkkQ zyUBg!H{{{+6kuy1@N^Lrc1L@nRn!OQbM+&}cgA@07xSt)$iCNJV5eqc9~g8KO?xvH znTBj8JB!`R{sii-$SG)DJ8*)T+;VOgcZfR<3H^vG&({FY@4@%s2lB(f?Mf;7vp}^!NSCC0lCNA&t|Ui-?{|`?f$z@+$3G;0 zBbQeygVOAvu6EF^hA88dNy>C3PuZq?sq9mJv*frJIEe^%c1O zS@k+}s$|ViKdZlCoPi9kYX+M{3Y%t2XhvPlp3sd3Kr@_(&p`$sOkEVtitSK0=N#SJ*2a6(>ob zK|8r1g`>sCl@@4OntEF8t1Z$#*El^%|44t$IA^plM?qHP5G+|6%a`GNOW&enp-+`! zBiOFsM|tcH_6Q{PW%f2}<0^7>xyD=)w~*V(eGN+bjeEfPfSUU7qxebuCVnsfGk>2C z5;_Q3psF{8=|Y}`o8yql7lbPUB}R(_#i>#?`4u@{9tQe3AYYM-Rgjy3 zvyGA$%NynM;8c`SUugu+G!h)>6Xle}ajq$Upf{z}X3z|VLgyQ!j#no^^P8c*4~bM( zdsXYBje%aU6B?c;_(c_HclE(1w&)&4bz=_tLnx=U+0o24E7;#7eaWY&J#c*KSLp8a z38o%vXZt{oOa%ux&pt$dKI6XQn()c|Dt;S!;wLl{B80KRG-0RkwQy3nE8G_2r5Vx^ z@bE9CAEhgjk6aO$X$#$FfSeoWT7tK2XE?Sk^u-D6X7&c_&&}h0;O2oBtl_uw-|-hgXXS+6 zz{win2xfj|u?{FJ9s2qraU=A)3!+VWMQSbelO{>~rIXUHQh;0!IEs;9le6SlrJFKL zc~{v9efyDWfQJtTJ*o1 z)9i*-%{|i}dI9=TgmZClYSI$jnw~>{Mt@FkrFTMK-cKLKYULO-rav%~d8RLu!7PJz zOtF4!UtlYTU4dD41@o&MSCxB}>k5uImRrPqi5Yf|d(742JMszqJpK?Q`%V5Op(!-l zaIu5fMeHH=hF*&wJrjqBBgL`MZD)x~pxgG8#!6Fw*SXUB(jMuCR8_7c2g{9R8oc|g z912dhSNRdEkg~vJH|Rv~s0-8~x@3%ne)y4nGc?0|Xoc6&D-TPz@SbhHW57H9$$JZGRk0GxM)thPnH11cmcx{|EN86+w z)_%jh3WlytrF#$-OKoGnVh^xKtuET#Yy~^%pU*{6Ievt5KkOV`y5nL@u%H8~a{xD|z zG5#cfhK~_`6n+*i3BL-zV^wrVctCD>%=56$D^}$)g&!qdeNXqcQ@cE6utt+L$2NM;1XhuEbLQ zp4R!X{g4}FLL&F|b-(&GR@tY3!C#=$8Q@w0`ZS_T)1KD(wbOJN#$ciu zTvA|_w?q!mn}ZX-rf2B$^bdj86M7N&L2ZM@D*JU~nDMr;+SqD*56g$ASp}Al7<0La z^5Px+GuWT_*Px%oKr*+%jI`qr7?tB`(L81PnysJ-e2Mu`%3i@f0rrsZ>}Q}idBR?c zFSY5O^b-0bx+W-~87r`nSUqJCX$$@5OW`1-kq<_u6>Jsb!S}X{H^loQBdJol6aj4} z3i?_sH0_>{wu!J$Z?YO@yPeI z&SX(^Ev)jBnL${^yv4l39F!hI4j7nkBMmQmS$mkhjh!lV^yg#tGhr*VuzNyXr2wlJ64s2$#%2dxCuqY#v?~UpYaarM=i#@Z{TUPdpR@9AZXzG(ef zGG@_U7N6?KyonX`_w29iJ+=)r`H9e&KjAiVTe*XHc|X1i-lkL0I9Qmg?7 zxX*hEH6ia6Ar#h;(XfbI6^dZl>MafsmqE9GB9@jaNL66Tnu7W9we&=)F24+GQz^Y1 zI9PT4TfI=f1RKNyy|hsQ`iE?outxMVQjI0XN@KtAr*Yq);=L?g>InTStedZbLf&R% zHk!?bHFG;#VDVBLSBCS4Tw4S?+*+(4^x66kdJv)Y zbT6EfrKmRaSb8J9i#|l3f`#J>eVw@nNmH7w%{F4Q*tu*yE|i~u)?Vgk2`!{9SOtA8 zS5&9NO1fP8RI3H)@~56{d=9$wGf!K*+<`-Z9mMW}w5iU$Z>_{W$BO3~M&JQg4wjE> zzLwBJI12t8DaMFP(bsa)GRV(?ux#vw6|o%FyLFW&klgXgY^-Hh!NRdaIjDTAoP|B{ zH)xg~uyKS#Pfk|u7`OW{&mDO&0(^_Nny#ykfttf}1EY4;#Dj~Px| z2eEVP#wzG*`3KlIF3ERb$M;Yw!qQk1_@)&>X{oe@?f7-@>i*DKM=Rq&0rQm)l(ouw z<#T8yM?eMVm5W#}J;3_HTdktjfW5zk%43|{sO{Aliz)`GqttO&Z@de88@H~!FgUDvZ=eO?S*`6#%?ZQT=GoQ38y4m^92@hSENb{cz(v&JKXF*}J1J{5Zc3t(;A0%|RUb+X9*C&7I_cr?bpGF=f_Q*0!6{ZHXrEsPLq*XR-B;&A7-pZV0eqt`cCS8iH2#cG{ z8f+MBC1cqSuu@vfu4nUMk+5+UxB#v?Y?M8?zR;)AVU1p5@vE;vNx#8L83g^7hK;ft zxa?d=uEkh~+~zC67FJ()MQ91$J`+4KVLM z6}(`P|6QypHNp((CcP~!#wa|5Y;A&`cf;Hm3j5!7xj?=EyTD_agSEYv(oe|(?dPJ$ zTVZ+m4t(#4Qd)f-dmD$K3Df{hHMcLdZ?K;ybVA{K3;Zuj3v@N6KC^~B$5w=F{FwI@ zc8Fhz`@}=g#J&?xh^NJ1xr6!{(H;G5mOgwP*3xr&fHA?$h3;Up^x?N+Zw75LS1YGig2Xg1B45CgcMSH~B3*>EY!91#sQI~>L)t{!EWXvl zqSTYvx2nZVfIhz#>!=@?hfGtfa!+EFD{*05C&+b*uM0}E^S#jSU-ea(OBPYmvni)EJGr!!-)K>U)49lE*ysWwH(&$BiIc<-61w&_Mn9 z*Ri5b#}3$7tS{fSEWBMot0&}h;PEdjZD1oCYRTKTu)6+4*`(~jdg%w)4LqTJm&G2) zJJ>Z`4*Sy=*yT8+AJ>1;ujw_6`bKNe>k1>!*lz5Doou8z5o@E5&9BXBup;7OE=xB1 z(E)UOrZe^#Dzh}(1uM4M+(e(S^>PN6vze-b#iv6we%v6lccGy@? zGY=qVTCsz{H9lhxf?JdYpGf0!xucLWt@%#C)B@}%HWD~-gIHf~1YLWQ{FXc%Hsn=U zZJft?)LVb14d*Lm9BhosV0ZZzeES}>&=f|nx!PjXuu0p8-geR7(8pt+ z?_2W(X8)fiWv_-^p`P{u_Dt*qtt55l;JY3;6lh}*okLe*T7h$JX5NCHyi=$qs$x8L zLFbC!i-jVTKxL^_HjiE4IM{nf>Tko|wjNY}0=nyc-OqTH=u$In_yrr-ed(7N0anXy z%s}YEDOjJ5fNY+|o#Z#j>y^98SnNWV)4FS{{=U8!>%{KZ>G}}!Im}Ej+d}43xi(9m zu0lUyk8!23^S7KoEmVN*ZHTl>3I~@=gTyS7dn!q=$lh081%*+1TU|FYjLpV*thURU zoy-SjG_;7N_BD1Y&ql4b`r3mYK(D92r59obegr$}cI;XG2K!kQo663%>@oY;Q`nun z$!f4r_vhXMMn2+5pX01aHvrqVq zQlw1f+sJoA_<6hprXr+LSs@;)s}b1Ye+11X7}kyb+AKX9>z-Wf1lF{-w-3ZB5*Z2| zxb=aw>;}tCM`i&myGNnnJz;`jTN@7B8q0-qDO?WsAvXl8p>VXd31;>}?1Em9{**qJ zcgf#lSFI_u=)PKK?0!*2Hf!D5f^Ni&VguA#&@Z~H6O3W7VQ)5fg4=joYso!KoVZ7P z*IW$B`+@Kvf85y#rxff(HckVVYb{Pw-cjBIPfym;H7}#AG0GeYEA0yVHq!PWZ;SWN z#_Za_^O#kyK<{5It(RKLyPu5MKiLgvNm z-dJ_kG+FZ#ll*WLu^ZQ?`_Nz0mtZ9i#_o3v6Ax?N7Ume!75c$JxWX_#Cn&xM?8po*}c#lTX$5c6+4)-K=UBtVqj6&iCg=J;ojSzcJD)ijzL?XY5= z3AumX_|3Qn=}%)toMjF($3WW}VLwa8Biexr3Hu9s=s)SUY#?_9mY(Ub6K{vj=nNk! z_rw0#DwJ`Y;4a>qm}#j>bO!d9$WBeJuoC=XJyy{>KsATJpH2v8vHNlvtF~Lhec1VZ zL_e{T7znFuu-F*0K@@c{OpL@%d@OdD;w`&nhM0{}pCC@f>TaI65bK`RSeb6X>Sq^Z z^bttt0`VMV(N*k3-G(&slzgS~Qh-!L3X&Q~O(BzHoXLo=H0>_f6YhhNOT}54;aJs9 z#O~)i(tJy+DradHE#*njo5$*l^f05B@iHVMIs3qo@kqy=uyD@PfzTa;ak`-?beM@a z$B@q)X9}>(qVrdYj*#PR@wJ=KaPRAnq2>A*WsLGhC8MfQ1NQFvMgyZUFu)q3p<=xp z2K#qgXaSv!E=G6M*~jQ>48RJSn(6p$l4a!JU9Puf3wZ~ts0BEavJ`t=A7K}FJ$6&J z@H=qQVIM5NN1fb(Aiz=^iiTs^aZW?L;44bwZtIMTSKuaD2|7nT*vNT1gI`bq>8*P z9_mN?LyHK+*@hsTY-vb0rD<4eWSnpa!HOc1j-sRKSUL_?w|F`c7H^8e-K`iCVy&+Q z+eRGQlZ|H+ak3$WO@~Dwn;i)Y{sf$Im=3GQ9L$ab*kwHqu633xgl*T-3px&}K=8&sF& z4|oZ}J-!&1Th(qN;h!5g{}0c&)u(bGsYQeZb737hG3*h?2;jkE^4hug3MKVlrmD*OVhr?+6C zvcYN^VAjHpbW@xu(#;5*4U4sQq*EXh?T?|SU9)AB69@$sb_IQ<{G%? zL(>MEQUFep1mhG5j}s&jI6V>z?J^OkMzXLjoM4|0%j!b=a_C+gAk~TG={O|N1^YFe zH+l%0E9Mq(X9GeFHyGWi3!-Q-%jEMt}}uL4%1*3X_G^!UXK_&S4fp_gMp7ZyU1*E7RlP>K9-U zxP`Ts4RbpHvpX0k)p%CN&UX|nz&$a`Q!v9vVs=l*%wEVYXV)cixPDLi;{9QJz*OMWU=Nq}$CCGzjy306VA?(;i zuwj!u0Dm*k3^E%+TFYjL8EHnFaj<45!;+m1EB0h_ra2$l-D>Fao3XAtVCgx9=4G51 zxC^b`7pHmxVclzp(>yXx@n z%do86#knusTMax=v@f`0AUI<~a77s$F%sM`4xBIr_maNBfn+8l7%ne2S)gi9XQ%fy~aWLY6( zSrOzI1v%!=1#&@LLyqQT=wFeLVR2kMY=Y^qo9A$oVL6|VHQQ>~<2S!`$1>m$oFt`Sf(?=25 zRfz@XNW^YR7S_KLz&+;R)X{RRKM9{h0r9w(8%_~s?p@5?lDi#Wz+(tcba2Dz;D8&{&EPUeV7V($i&RRpX#qHsR12#B9egGlyPb(RUziR~GhZXR zVxG1^+Xh?R1)N2?Yw?^~`W(>02GBtP!F?|76-T{+z;_&QJRf*104@W8zc}D*KJZik z+ynw2alio?{pA?>LTC@Z80Scg?PQGS0ZZ2TV&o!m-e5An8j|fY>@5u;{gWZ#=U~?* zA5z;Nk~tdt^fSdg*fEMke~E?_C>^>=9(Z?=T?&TEM<&Jwj~udV-)VHf#9;ThED9l9qMrI(jlOi9MDQGcGeGIt`=an-nC|`FDPIR zmk+)A0?uh%Lk+jEXZa8(Hf)MNa2g1#1_7@Pfms?-V1hCkxSR=W&Idkofzb^(ow5g3 z*W+l-1xSTk%0pPlvn+Wf{&YU@O<|f9CUCD?Th7kj-R)#4+N`ET;^FMqzFpNi+eICp%*Y95_aESf>g#>m+ zwmdUy&h)HFv*c4I&7M6+C!d@y&(hD9&zLQb7^RWVnlm-Mue*B-g{|w{;xOsq&HYmC zaPUyK)MDg6JJ3D#2c%JlUPvwD^L~6vcvOFQbKlftq@NxLO)W=S_*G_V3DTHdnW?2n z?;E@GspH4J_;mD)DJ-@9YrB^eM$^e;^ zR|(I^6W{31o-z!WLU{Od1H!5}jm_D2max}KOFdn1OIT~9}fJ( zfqyvg4+s9?z<Sg1-XN#tq`wQicEWNuJ7umc>{Myo0EvDCPXioH&tm7!6j?_kx-t$GEb<|GPrWOi ztMFU{#_K(kwBjTbTt)#nLFHT(W(vMbsqbL)&91@VH9eA&lC+K*t5*SVG^R+2wUUdJ zD39pd5gtvS%@L%MCNy$>b- zZl=g`F49wFiafzZdTWDOCR@drBC81LZeWAjH2!Rq<-v5h^)VLe+>BOTZ#8Qn z!a5R~SZz5nfMJwJz|l6SbXT)yi5fBH0%96~qqdwK!9a&nbp8Uuf#wJ`?E)J@^fHp8 zH|CZFH8%0LaR<9vbE&is~&oB|S8% zgQ#2-S8GbVW{zmz*%W!%Y^-`}wW=+hF%jW8+7#JnFLgvI7uSdx=7dU8b!~)lRszne zEmy}dOiGFk)#_D<`5V$b8FQW!b!m#UK=OuQMyE7I)^m~d)&R>+w1DQ-@j0uP8td6m zDl--WQJQy=^fuCyi}N&#%h3+4^yMJE1GRWI3d^@~6Y|R?c}I0Ea~}57i;ggr$2Euu&M?zSEh10kwH#MJlsI7Co1x%~nqSi8&wvTB=lFD=iF1REChIOuIyj6c$ zZAp7Vwcf@;Y2HcDXjtsc=)Aee$0{`!c~g2w)dB=sw9eKMdXnJqR&dfJJ72PIGx>7fZ7xVSn|xnGs7#uVpi$)QS;fQ+9@FMA^v z<+d62`-5DDQB?|hTrSCZn5uIk{y3^)G-_L0s~0g5YFWJ#gasB>uf_yn=**FBYSwM1 z3dYRM>g9HUghN)!aeN6%v?9wa5h8gJr9kkK2({yB>Hddy4wj(5ZkJn22GlTU0D5GS zlJ#;(QT~lRmOC|yK32RXNWInK9ip<7F zuna0=^&)~mWm{X3z&M^S81pFgCwoRq&33RUx7Hu)G%2tl$)v9}>FZ4Tn_1uHOYCE&^xFkvG-m$T~nhpcY^OTn1cIvnzqsOil{Y&M3YScraF@~R1>d8l|;=dP*szO zc32X8xsOoJQ2Ce;eJ#HvtomBM(qE{&2@cdKL-cjJj#m9mzHuF2A0gD2Qhmn|eZ8(H zUs=yrDp1)@&2D_gK0%ZaN6j#^@hp^;6=SAqtrhDo34TF76oiA(T1$0WhtYg5-smNi zRwk?aQ%6{(jmjfBXC6_4p_?*RuZdt6tg!+(YEEK}r+VQ*sB5+YQlK1=YN~fV0{Z4i8FY_< znL;-#m7skdtzwwiQdzX%F+?M!jWb%OkJgQg)^(1S8vlj-CIxhw6l66r)IlQ&ZK-r` zq%4UGiUt`)*JR1uVoxYGiyH{6AQ{%T^*nHsS;uAt6Vj^RMjrwxAc4FawdHFRLFt!j z1B42;4*jFH**t3|+lbJ$CIT)C2sxMm#H#I!GOO1j`^p^1R#ClW2)c5JPO{6(O)t&?Q5y%COi?s%rbW3ALW zz(ezdoRB}5a~Siqw2BAI)k#{ZR;uAT1=CE-i9~!@I6V+-NPTj&B?IEg&AFUNbx4c4$jhm?fDXG;< z4rJR=Lk98I*~IHfeFX7x#ah%laE;dUT;gL6UU0pZ@vHEQd_0;{Vo)Uu2`GOvv#xd!rZH#K=y;L?x-UVob^RYs?yIk%Q4e4V|j=u4JFQ7-RzraSmHU z8o7zgQPM&1KL<8aHHez%TUovWb70m2OuV^DD}hEAg7NG+d|*z{DKc)Be4st3>R9-o zN_1p1^^{g(Sfmg;E^#i1My^EfhOESq&n$K-@TRf?H!M5LhLH@zYGL8)A5I07^J zaK3|H_73FFIvDU*9n?4#h*1uq)YED}Pb3Yl6REFOWK-xQ>lutps?5(J-$p5&ULzEq z`jJ|PKOu6Rh@wN}0VcC|z+Lo_%+}c-Q!bG}VA~JL)vwE#TgBlsX?+Y`#8$)z8 zZ);J&_23%K5em6RZ8?Q{Jo>6FMR+oldgd$=w@cNvLYX&PPBO84By=%Pj4~g!Wi@5# zF-2`-C_mep7%orY53RNvB&BTu1X`1mS&|t`>JAChdVuWoVy#1sqgIs68!ePKGg%YT z5Yrf!TS^N|u*kW!KiF4z+@PujJy~lGN;bLe}&KDWtWMZ$nnG z+4M0iIMg)rJP!~vA&oC0RIS4poZSn=GMKWn!+G1}hUaId>BSg*N#$gJt&V8r&} z0qVU=ffWzqj7^8FfpAgaivmH!_S#yZLe5vvse4Z|={%)STiC zA$LIGjSl{9mu2~Fl~nd7bOH zHdLwc9O|AK0~?*sX=(LNYW)RI88?X3H?YCfhy2l`Bwnc#%bW&aK?4iI0aaweP&E@X zP2>wm`*=H|u?&rd(JttM#;GYfcYf(=aN&(T`O5wn z;JFn(6d15E-kZc|k)k{zxECph$g9wG$U&gPi`@Y>Bqzdg4k9GWR#eg96y!2OfP-Cc z!(iLASD}BVl0K)x_tL=D$SuXUDb_$ohL-GnpjvienoV82gPdW|7#Lv85KVQ1*;IRn z(RkoRVE5iNmeJ4!yJhsGNZOm!TZ&-SCPGhbw2}X^j^CO>SpR?1pCkIi!ZJ4KTH$zU z3FCW%k)f{Ws5~Clg>)X)Wm7PSgl3V)O!kuo6${?Dp*zozl+A=fehD98<$~y(PaYs! zRe%ThX{D4R(0HCLpeKqZo@ciRkMM=qHNxX#Jmv|H_15zU(D5F=o<4GYFYssxjv^C5gmi6@Nn%ZukS|N8G>a$kxs=WiiH+Qm8j+zK zs~amiw^j|c;13g6%YhBaCTtLaNBQHUN*q?xAl+MpvlKUo%&|IYKdoPW4QgycsZ9Eg z7IsqNvjX(s$?+6Z?0Xdk#?oOi3OP=OYi@?ZDWr7>UEmWtgq*FzTy3ind0(}4jY0=} zY_;IG7-+O@@u=2;tmQdme>K85pWNl+S;9`*LxHNm= zR7zeF4v%CKu-!x1&Bk?fGFN{K;>sc))5c;wFBF2<^-uwB5Jh%CCU0x8#*BHGt+2+- zkzd$0VL5eRJr6W^t6Y#LnqXttN?{iusfjZ=*7KNYXa2z5tOgVbH6(d|p^a^~9T0(m z^b%V?mhQ;X077GHrOiLitz7|vG)aM%lCf2IMG(#Ih4dM80H1~;)uFSXEf`$|XKT~WAiywv?RKO1gf2UF?2-;;YXd=W`M>O#q3>jZ9&0CNDSRKZ;3<&2> zOPP2E#W+rQ?LZ$0*K?h|zyoH*uK*lhwo*B5MtTXN!PYYrU5N;GfFQ})!i`I*ukn)Z zWxiXD<;GBfyJ9868(PBJ0xYx@mE0t=N@Iwg+wx`82)jk^Ab$Ljo*bvj>u$@8l{*&+vX zXwBnTMNw{}Rp$unjYVJ9wCH&Y<^3EzEB0X=h+Ic0Z=q7@iFkFMjTIirScM9Yq#eG( zBPL1U6G-7strW6#wNSYd2FAm5a1&ric{Ql?XnDT}Xyn9|%cICLj=Tx8!6*EOBxf=kiJ>$aYkKm;ANG&I@diG0iEEO_Rq8Wz_&=zVOhS>i7n-HZ+>4oiU{ zVGhmRAvRbbN2pO-ss@uu^;QujWUKk(qd#>;5l>ljl8D)!el&Zq#f71*L65;}vID$pg}XV(!BDE=O%2)2rhZ5P*j>&0Qd`{k zxyV%qw@`Vr2?6>4Vlp^V@QiIf5wMER2)1QVKyePs=YzmPM8=Y&dE216_Rv^fjF4gA zH%N`UNT~y@A-q#5&Fh8i!E2BZcZ8hQ4#?&VqCEUfcVuAN>2$zmeuoe8R54xC;<~cP z`l%=}ErxA38*51-{K4DO4m8*b#sEQWc@%0dNK#vFBf|$^K|YpWN3)rl#`8NLt0H`> z7pXkdCK)lhf4hOY(GEq9uBTef2B7V8Yqa^^%453OFy(*5WF40>v{QLK#=sb#!E_;c z`>bHSOp$e#=m)edljn-FAg)ySIcpu&NtOjzub`LC&faR2&+-y5>OW|NRcl3Og|luw-Tsibc#>#!iuzYPc2Gi(k>v&)&hSqkWZAr>#_!Z~Vuwp|wcX|r7xj(!~WQ)fszBo5o?6WT`Ou(8(j z5Acy{@SwaP&HITjs1wd=$|7mrX}+LVns*croZd}&SDLpEkHH?&yl;?FIClVq!d0c< zZHN?o7hV^VpdIW9OrS}Hv?BgujV(awpZsiW*vXc)P*GdZKk>0vgm)3r5C|iwRPj=$ zweljj__BGJljw-zv1L&RD5_7!TPI$zJOY-;l2)Q4Nk^C#ITIZi?Cx+QajdBaMl{hF zDM_hu(K;eNmI!+l?S30Vgvd9xNfnq(6bxwB= zz(r98ih$tK8HNQl^-VRigqqV`g__2un%P24V_&Kvf9x^FUoilsH26C3&lI@^=ED^E zg*kEs#$PE%ok_pTq(5NNZ!L@5g0%rl(@R45lS;ND^#z)8Yjc3jZ{-y96`O+RZm*udr!qn(I$Wxkw=yL>>5h{6I2!{#xKLfEVCE{(OAYYgnZjcUye8-zT z1e6l@(>Q9)L2Amz9wOJR5{F!w7Mlb)B9^qf$-nW8Hw^nR|8q9fWCyj(P_qEg_3)CnB9JF7mP@XOT?Qv!j zDrpg3-AXbRChO*Ki)EZ%!oRdL&0uw$I|;9+=tnM(c2MO!3|KTG=V9QI&N&Zf>)di4 zX6coxBhKzoHGzC`#JNS(s8KaNaXYJ(&_(8DQod5R&=z_pLK~IujMW)6s>UhbBT6+2 z412dmgt2mZ5H&&;ED8Tc#x-jt%PfWI!Ep>P zY6yM>i{VO1FHD2DJXt`G{2%3g*+u=n$QiO^4XnwzoEfcTb+=2N_ zpl0?P1~)OFG+FS8 zVbBKM6grxN}cEwnv! zfC_{#hpjhuBZ|n}AjU)e+G?9S+1dzm*Yt4LODc(cVe?nUf!?*{QSUMV^0N5@5mi>WJ$n zUE3t~8qY>h)eQBS73Yr|Ye498agqOK7K55*k@erqLj3$J8vhrwNYcKH_#~WOk^t|M zr|2EiaMO%zlp)w*iA;LXSxa%_1e$qDO)JiQ44zQRxh@xn8LmmN&ML47$oQn;jgS9V z{jK5qEAS*s(K&z?6c5`1j5;@(N za19$!0yB}S@U)!hC)jjpUJ5D`latFL6beNlY8NSdfg!gYllTZDXW`vPKFUZ347y7A zA03i-yuUpnf!(-~--6lEJ-;ppo2R!LVZS8HqIwDi%LZXJ<>>$BbOT0DMAQH3kdr7J zarg_;&}9CU^Xz=X$IpO3XPcaN$(2= zr_j+R4Yv;@JKo&W#g-)nby)R*R>NY62*+L%{LRI+rp1qODcJ&Z#;f1S?0{lOxH=U3 z0Q`G406FVivBi_R#a^s)1&R*}Yc8?gd>z&*nMzUvxYu}58uOw0{aRmpcoYhO`UbT*&KrLV8GqUnF%vO5Ad`b5`%Z>ow7wI}MykgTjU!dV2H0x1N8H`dXTUcVU%Q!0kpMQ3hAcd1OPV6S2N?0VvX-~bvL zoux)K_9;WU1#yl#rp$>h`B9L=zfvH5<882t3tERBw2r3njpGn?wgx0yO#Xr>qqV0 zd2DJvHt3fz4c-G)0(OP?bJ*#S7DbuQi z8rGkR?S~4BwSnGX=J$BqQi-hr_;kaY=85?7e@aS8Qu0AQ+)55n5;Y=!h^0%{B;jUt zGr6_oa<^i)6w_o*06|@Cg2-69!ez{%hCzhH=(}=* z8qpfxC7^W+$B{AtaQXu*T4HTV%@F=;EQ_GvylLs8()nne+4UG0TwwT>Lq^<%%|ua+ z6B~;JS{h$Tt$7nCyte^Y20pCqDC~|`@%Tm)Pn{`zfW8F|dwlJ-T^=qAbel4b#Rg9N>4CD=MmXy#Fy*Zn!SGzXb^Bs4P4H<1nTef4ntS;ujO3EH}j} zJm)%de9CD8&=ehr9%1s;j(&A$e@ti(G`}L9uh2W**F5D{C-g7WPcVfCaN)jbrf`3H zBddg4NLkTCu2NY6LLmcTXinp=tjKl}oib_sg+p}{cX)Y=4Yk3>LX9+UnHSd37#~^` zi|8@h&3IIr_X-~IMH+oeE?SJdaT4W~E%nZ2A~(zoSe#mx$juB*;sTPn`C=|S7&G+c zS{feum=Y9nyqe$*H`SBgtV0@#vH-ZR6cA(Pwi3jKcRmcI!6i^`xm964n+nZ|kr)K$fpfJiHV7p(F%wm+*|7iXK=s~Ko@)jaa@eII^Gf_RN_X$l%bH7(HAEe?r`JmGm`Z;dM;JMP zMn=EY?NCN+?MYmnjm&@GYN*5%F4M_~F3?01s6LB3r-ssNC`>@%P;XBvJcL3nMs7T+ zlN4DbhHDNmcj2;zQb%2P8aE%_ILeW92lsh__(BF>cLg;(>4AlTx64G1;9QN9;fXxE z3(@xjI&DTDFL60ZJnOnJmdF$a<>QBW$vzC0O+}H#8)Cs|J&SJ1wDhW^Z+@(zSAK$` zw;=rBKMX9Qft{Q5E#di*|4QEk3Ou=r+#6Q3HsSjoF>=h&gzxM9dFGgM0*HuE^y0$3 zN2@J+Uuc>+&4^|%CcTM~H5?Y}<`OA=%NK z6x4VKb8;yQ<5j;*gq_1ikxlbsR50JM3Lh9VJ|%0xtGh;SJgtl7M$3$+vD$-mgE)H8(zlWl`*!IcPkjc@gxEK`~thG{XdEqKh&y#vsB6zJQb-#Ypyge7OJt`Uh(x;3&`!S8c*O%+*dzoW5Zg2j z7i9RJWzxL&Y}5D>4FZq;^#l~pO~CiRoPf*!Xab#{p8zZ@2|!|}+)+2`neDmf{y)%Y z@7(M3i*9gw?8b&!3}Z7IZ3A!fR*o#St}Xad^w8CC-2t8t6wl@q(Thm%f_ZxhTPYYw zn+VE8EkRYa2YMoN!<)kie?t;)a!A3lE|77_z0QE9q8l>NvB1;Ib&A$S*J1O#MaM)% z5NND0Od$VCKdowt7jQBBzFL6Y(?--XZ7?<1H6Em^hE zL?4+Vgxh#OjSLmgyzQQv4{AM8EBCt#g2-(7!r=l!bGszxEe5KnE7+&%51Eky6MK9)s9$ql+mfm5Z4;w8$bh-1f+Q*cpf> z%~YDOblDS{?lHr|&eCP~@K_ek;uB&Z%g)8H%9zybaf1CLE#k?U@Hgbxl}N)%_j)F- zgc<}MU0z;f5gBfacyV5LnP!UBWTM+J6W?{#=XfrF&&DXrvwH^~t!ZiA!W#pUFW1zM z&`iS)h5xEyvwk)k=&)jr$uK#X!eh;=>);TKHisvuhm^CqrFcQ3Y|s)!?vSkU7UvILuu)Qq0>wWF#?k@VKBNn8Uoj|=+hkU zsJz8@>&IEC<28TTv4CdpyVBMzRs?BG-emM9-5VP-8y)%ccEV63!iuHU`YCvZ8MFdV zpyEFI5#|vhWi>n$+j>cF|72pUqPio*%2!gR^l&}66iL)Bc}y$d-Iv3hVP*_Oowb@0_B%eo^UWeRF^NCOJ$l2S_AmPjC}=+ zQO84N%NSJRbTUuQdr@b25bU+V>YOz_7Z!_N8O|C{ z$e@N6j;~2g8!SQwZpg_-n*vPp#cESbuxVzfWf5cx+yvp+#{~!nKAisO7*1{faN?`} z7HjxV>&X-A=?CTaK(?)?yTG#R{=ZpI_I9+M&~{#F-f3yxw_rmO81#N&ODsoe-fIA zL%%>DB`W3>(SG%tktg5dm}eMjGH!NZJQJdS^FhY^!6>Sq z8AW@$XF}wKw-ZDB?}VrRx57(nIxnrkfzCGh{~P(K1@r%(DiLuLk&?5+PWH zJgqA_2hjoxp4*-~hQJ+;v(%A_>d=NFKt4oKlN*JT8JQO}!IMT+<1EMr9r`bCg-_pn z>cyYVUcNNS(BZbeeDqLG{vbCSmP#;Q)l=|@=t|zVNU8C1jViwt-`0xx*^mZPCB>1X zdFL?mXRbZH)nnsz)Bc3`QFDJ1O!PxfASu>i1oi_gTi#g=Fll^X%fn@y#+wG_9gYCC zuBBgj{AkOsa3^Y9`Ny5)vH$U)fujj=1wDcDT)DodaC|_F;<*C^aZq#&rMGa9po}uc zTj!46$fv1*JZmO49Vy!L%EZrdHUB813FkMTFqQv@2`j^WbQ;?T1sFka(fJ^r^!2H= zh^ZndN8|i8iA>})Ugqb3e19^ zKohI62bB*~G5v?Dm{WieBUH@yfC(`wMiC250DK&$VpJpX6$3y8;1XcpDBOLhRm|6b z7vojTfYBN$VGFJ1Gw>_DDdUt0#M-3MXHhW;o@SE zQ{Yj~#U(tuA{fIh4vt1eu&o5a2J$~VsVvm+yEI!a-80l-D0lqKHA9JnJ1%PE^JlIR zlBki&C|-oG-E}+3iSwm!6eJ+L07nZ~ z6mY`euS9n%<6{bPPMrJ49`px+DvQGJ6y!Tszeko`Xk)l0rJi&Hl~viTrF*nIjX(2L zJt3;fzoWs<{0#JjxNdhu-($JZNd;qrH2fsq|7>cZ7+vufQ^T|d)3gFXk$qY!ep;$O zO-u2oX>FD1iks%4tO8xQ0eChG&@W8nujj^($5mq@Kl@c-ZXC@`CZd8n=rw{n;#Z`! zP}oTNI{jXPpGBFWh?Xv+O$PP;n3B(MT%-2JgRkFV5ibm-MQT_Dj`I5TtvsBq0VeQ8<$f8^Cg3P7*;8!TeEhZv-#!S)N*Io&p?*Y70y8y|dMNG4y!&D_W%vCQR?huvlQ4={-x7J)dAI35IHs z;h`hx&b&*o2%a_!p(p3%D)i(`+NH$KmmROvDF#SiD}uqUi9~BP#`nhFM!rfJL%|`= zQsYk*C?rR!#(S$==UNuYkOy2tS14*6ZOHRQ8INCr5QN$7FskWCO)hZ9?W-4^M0aa6 z55*;fIm&?o^4LJ|3wni4bYB3|;CIA{qAI2^nQ3wGgf!FmP_n_Hf*CT@F5=cGXg9^r znUHH9N&*5z0BHTkYVeVei0_)O01}>4lkhQhZhOJM!6_h;nP!cMjW)-|Myr&?S-0`y z3+sW(Iq$cJ$nXZ2w~fEk42x2u8NG9i;dh7Zd&&~3nk&Yq%y>y3!sRLG_{zzqp~T|| z%%%z-?hfFTo2dmSGZc$WGE)l!BQB%wDKPwRH5f`4mC$dmFr=^OUx)r*rgQ|}HlQ?? z(RZSBB%|*@shZKZGaPX;JeAI^sF6k0xEqc*)<_ITL^UGA5pf8VG_sp!10v*8P>VC=8pn&=pg=i=Fj4T&=igMk&pp-Z9#w2OyUuIA-JEAVNOWMP$^5YD!4L(h1rKa*Q!%S>)MhpQn3VT1f`HI zRWVo+{xu;5Y83UMFRDY90}58!bHMVa-wdc}14+;zR856#gy#Sh^RlJ0upCnHwC@=3 zdjoF=UNTr6%xWv05z&%Vi4M#->J8dKXH3IPGTqO5R$%X69*Y>FPNA5_Ib)sr_dcE< zt7s+NTdq8|*bSnWVVV(&)-`>Wbdl~QjDS6LI^zaMB+DswIGb{ zO+x*S+fQ`IxhK3?lmj`!JYX%R!-yKqZr{Rp!p}|m{Qw^+MBjjZz0D)6wn-Te>zq$n zjED7)U^-fwqpjV(hncLdaSXgoBtvVaSz{AoA)$m=26I;EJ7e`D+#mRu+N(feaJS!-b!Zta&AX4S97}*Ue6m)=+z1foWA@Ca>G&N- z2XdY;vuDdQ*OZmna3gp`hs$>ar{tAR4prwM|7-fr*D21g&*I zSH608uGl6OIy`XapDo*bx0Rd+E`2=nBUU*;EReKcc@bAjq4Va24Ef?(i84#*vxa zUVo1LE-U|n|N75qVw}`0=EaDNr>NW{1;aOjAgn`EE4(j23z2+gG7Nl1@bEB-SOXs8 zm)sr}-H{0qbu3^#IpJ_waLB(+8=W2Mze)Qmz<%wJa*h{e6cQ zmd9(yKx@HQ+h(Ud_$iUQRdl;0XU(mls)BK}C`DK=IOgC7OZOx>;Ac)Lb+n}-TPn7t zPPWw9mb%zdi7j=tr7dh}OIzw@OWkd$hb@&-Dtd@-LgP1B=8*=|YXJOA?s8>^c+TL4 z=)r?H?vd0O1%&bRo<#{}1g=P5Bd>zP9wz?(bOMGNnSBoQV*`H>)pJwgrNq&c8n{PX zS(;tyf*L*hq;KHgGd4V^(DmTu(@-SR`QWCkRiN#voby?$hTX+od0Bkw6_hPel_6 z+{M7de$(d5$G=g{n`sX!#-%)WI>q@k7RTYE{1@)T>Da^19zR1a4NByUP{+m*_;h{opj=C z6*F}c%m$;LYU)m@p_ava>ZWuHHjnX-@w4Av{um|2;GS?1q!`@CP=YTOqL6b>D220w z_i+_`PR-{w$tlBZP6eW+6-;VKCwU3JUg*ZAVle#X6?JF%2}7}NJ3xX2AC@1G-4sjh z>1&N^@HNh8Wcc6Rid>2B=WN9B-4hL2F*RYR37v!trHW0KObi!*FKXrt?y7$|3J=Sj zUoO#!L%ZtlQIWV=k$mnw)A$N=b!bydV@2YJWjR+a>WtbNMa)1ZmG0JKIRe*8; z{aHR@m`4Em%RpHIU^!qtAOf%mFcB~SK!5)5_JaW<0Am0dfJK1ifR6wN0Y3n402%;o z0i8c)m;gW&U@U+IECM*Au6{FqVHZ&M8N(=$rU3>3EP(TXazFuKBj7DS9$+@01=`bJ z0@}p_1_K5Hd;lH*G2rnwhPem01-J-EL)mP=tAIBE9|E=m4gh`t4E-E*BlSSq0iXZ` z0A2<-0)9XrrGO>iWj^37zrFrfpcJqRuodtgfc}1fuIvMBwdqWFR{A7edc=&Z z^eMVISqtR8-LrcUUfiV2q#5b+Moo*Fk&$jI6>zBlwV5+!PybW7P%vZmv50J{5i1%AYH=l?|UgL-%6yivE3H>>G$SsYDF^Cc5B5q6WwqPf0e@n%>=# zWC8*h#wCHV>M{qU><3_yK3+QYa?FiVL7`K?F^KMxRryany4 zKg>qrgZw(i(?x^lMSxJ`t1utjkxDWE{>T>~bpxyb#3J8__98rw2DHZbn=yVjJZAx3 zM13j7Z;9v0fUd}ILFx#24KNh>n`jRPO2z_khvB*gw*&Mt&Dk z;^%U}2;}S0-U-i1fVPYZQ)%M@=}6`R2BQ81#zzoHG7ZoJ`7e+Xzh4JLAzzF3u6RxZ zbVhzX#-D=cC4j-mU&i=|en}8Rk@Q9W08$UY8-S6>Keov~9{E<7zriN|SCH?I`eO7? z^iBf2fc!^DvF%H80b$7BKt0iu0%(u?TQ>PGKt2Td(>D3fKpyJv`VCT|*8~`j{5`ZM zc_sq97@5S;L6E;5`5@FEv&lal`R>SnhLrea1Vkc#3++k%;{lzJf6pfW#mEmr{-RC( zvyktD{63_lPi8$O{&SG;hx|dLq)%@GMj`+7Kdk>E^iT4i z2#^DR6H*3{15hD<9rYytF#vDmSJ~u0ANgS9Pub+pBJYd**GNfT`G9ET@1i})|L@lS zQJegy0!IP-ZAb|}4-kPo9OCEJ|GPH%+t>dEoBT6??+^TMk-7m^0Ai7Ui1sAUzgz!@ zZStQCoUXwC7^x%RHNa5he@1(f$5=oI8D!__-V~0{P$2 zp5*^`>;HS3{HFn@2k^f{O8kBu5QY40v?uwe0Xif9fldBPkROcvk2d+wM!qld-y!t? zya5=A{FDE%{!7q5(K`w70`NB@bpYf7!jS(7^+Zn!pgr=d|FHhg*yKM0^}SHP6DiSa z0t`p~KH8H!|8D&sx5+;pINgE&Ia1=65fF)d4ce1FjR$l>{(YPL7b8Ci`AatW&qBTr z^81mx1I&OpQO5-g93`717+7mzR>wkky{`U2M z)h7Qr!08A4LrA57HvyxNw=y#P&Q3erZ?ti0#dLIXkhOAhV%j@7h`pS`m`+X(Dw&gr z@pf`>^mKZG@o{pHw{~)6IygB<+B)SjDv&O7azy)Fv~N%Cy<0iCP$ug zrp(DAXG)%tK6|>3l}};Qr_5|#PX8gZy_Cx5PRf`uRX%k_wrq0Yf?r(`)E}A$3C;=+3E09o|R9Y@r=5x^i24F&FY@-PiRRb z&6+fOfqhDIvYN=vUNCF&oQ&*eeG_>$Jo~WE>ayWsrcaem&X_Z0rX2@EKdToibPF;u zK{kET)buR*l&mT8Su?U{P0~$a8U5^;v**m4Ew|Od$(-J73_IWLE$w_4_&aaL?5T6+ z$+Ks?n*OYxDRX9JW~FCm&zLh?J}X_v&Y225S)yXZ01=>ZG0yma%Ybq~0bnD5>PXJB zGAHR~Oon?rH$96wp}9=fPn!n1)3dVXWC?T2M#+pRGty`4Fvu+U*bJ`$HjHc?9Pn99 z_|wSU{zKht&^0?9oSQmhiVovULLYLwGtE27^6)vC3$kWRXLa)aAt6D1UJM8blE+QT znkf&PngMbs1R#dSIzxLU(BBr&U3chfE2a(84)=~-m~M<86UZo;a7M#SVX_$mvzaMj zeq!jYO8S%i;a@AXrGKq$|JtA@`sZo;=fw#BWXw^f6zv_bMg$sY{kWrLd*%hEKNH4G zVse=x#=#-i!NJi%&3bKS(8p7%H?N)g;fwi~Jhl{E zUeLMv+_n4*r{D35-Q)V*qOX^@^eOn)s!oSiPL=KLbWgrR8~w_+_pgK<%v*f+VuzZW zKhBEZeYU>Lc*c;lE~8E1)|pdZeK`3^?a}n*qf#9_FJw4RnKDTF=r?zXxm-S5nI@YN zdoyg}xvW>Fe{|}mD0J9aam*8=`{|jxTYUUz>+Yb8b>9po`{jL|fmF`S>ckRBD(vQC@_^`mySCa2E z;hPI?M-9Tu| z^T#(2Z3sMDVEO2%%lrWk8R;8aUElUz=W%_%yLM@xL9-<9WK3P&qW8udR9gFHEuO*jCe>-wvN4mOtZ|hqH2f0Q0-#)(D>Bq^#`&{(8 zzv#@0^w_h%kD2sM+z;`)U)C`PHs1)?e>Uvnt0OAcRt=g}dTPt>_2oPa6)X0&yFr~|1tCxC-rAxjy+|vmIq&#yB+Pmn@!$noFjKV zJMG&IHz)nz@$gF8x}z6*bzN7nW7pQovUY=ZrsTTs>Z?rsZts^F`wHcTg4=s`9(vF1 z#vajor@z*P3_Ex0t9hqC@al0%Y-oEqyLHPcKjkT>t{JSI<-T>S_SKdrJ#KzrbzR+a zJJaIp4_y{Vtd`cD-Yi+h-gD^uwAgvZ_~gkCj!#d|d1YWm`+qsloISVW*{9e1&X~r& zdQo;_#E;YGT>bXp!#fAx2tT;D-MKe*Otfr1e}2j8Q=3YQtAa=0yE^Lb^!=x%Cm;Ca zN9Wx`Vg`N_ebLXoDz;;buNp>(22Olc99D8?;?=~f)3@GvV|M=q2WLdB*g5HV2g9@j z8&A*aKJW%RdhI;76QW@)_kY{gv1`8`PKgJV`wsiI{O+5##_sYHYj>u8YOO5pf3jjX z_u&QqPTQ}H+fh9A{T}zG)UMyG)wftZ%W<2t>-r%BJ!*{8U9#FFGfuCKIJDOD)q)$_ z{k~op+;NZF`){0n%l+WFpZDImyy%6ims*}ZedhI{H_qN|-1#Fr+i4UZtE8>RD7=CHq?7<_nr?y`nd;Y?-mRD7E%Bq(%+Yf9z{Nes9 zOHY0?tlGLez~|m~-z_iRcPwD_PR%EqcPX3(R_t5tT={*kreEmR_tj}T3r0*T zb?nHFU*k8Y_e%#`?%MQ*TS>>OPGik?9N|DZ*Qx5GwBmR~3zzQ6d?a?71Z$1l|Xz2j0E*>H4c0qF{ zvwr?tVRt4aR&MX>`OEH?DtE8*c{|VGl(MelYlscKf9X-59q%0;{;n=9X9fGTzCSSP>NhUqkGztQc{?C|X8nWvz4z}a?*EZ~ zm5*t_MwgYpAK1`tpE=KC!If>@gYSG3YF%(Y`(U(1ySU@wu+ZLD#yE{yP#3Xea`m!4 zql%CCBwTV>?zKj>-@|OMbo-?ELhvE?!yAKR>-WF#=BIDB_~C7H8&~E;YIOg~NN)0@ z*~c{3=Ov8azt1aUm}i|-xK#OyRjkIuZ3zM%h|B@>_C|K(qYil01Pw5sCVppC~*JQ}#%u-|3Xd-D?x zd>xYh>DHKa24#zF>$)C&wA(fR_;%T-@pmS8WYkA}Kjgx!6&}U9@YZARJ9Z1ZRg{@g zloK|uYFOpKcbtFe^Ty%b%*T0o9S?L^9`?$({bSj6mU-b9E@X`DJgSe^ge3#r*7xby z^G6?MaBF7Wx0?QjjVmW_=T2#Mzt#3|{g27@cS_fMT72i++ZP(D54ddKYWCSX^Gg5q zz3=ok-d-^C*cZ{6XXbPq-`KZz)b;BFuT0zMa(L;K1WQ8K^!sr?-~YyOOYydj(^usM z^w_weSHbqfHy>`e^5o?c_s=hUVmWzW=C+(CW50QKuTf7i<;=*6hs zowMilciDA4Gy1oRnehV+QLoPXX8e?qm5+`~f4P48#O@Oh&3TpAMzntWtCro&d&)C6 zeqajQpBFX0o5&?2xI^g>ul`0458t8dMokxOOIT9)XhCA`@yW4W4hQF^)puL>!KWTO zFTd^eSw|+}=dt}qJ$`rc2aGSY_GY zrQ*Uy=i2*A(!MNCA2xecbVk388ogy;>zB8=xOUSfC?tOALt5Qj@afwhM;~}|TE|se zdiGxa-L(V9?`<+y)J(qeq_EqaUoL!i{eb7=icj5#7?$@~^3AFt-zq9H8@-OtP)7YS z^woJ!#_gV&ez|_^lEW{#YJQy?9R1+y?XCrN8(MD)JTYW>))U2QTK_L;iyda4%@d^@ zjB>oZ)zzh@__*^oi_BtU+fhyh@ohZbRGyNaH@@S3{Z69W)Q25hHLI&6E&6=eV$iVE zmYJJ-_zXLGvs34JA9d-~X=3LuTK4Ps{-oO-im!g|{oC#t?aS{4wYIK%(B_jKU$Vo1xp*9VV&xiBi@(v~U-+69rz^$2QUi{IaRp82>XDU8@b6fYP{qOWV zI<8NT*tp5v7WUlOwd=dT$i40>Uhv&Dzjv1_`~0FNKJN8Oi^0D2KCkpCnY+{fz4G7s ze)-W${R++RWj^@Vm{+!^bk2S^@N$+<_to?KM#ay~y4y|{`TjZm{L7{#qxQrsZ7XqK z{KDjui?--;7Od_(bpEl8;#Z#>JF;*`-vb%TB9_d$@+f#t^~y)HGhW|4b6o8_t>fta zY*6t18Iq-6O&;PkYs$3G{U*g1RZl!y`APau-%XvmPtw8~$aP8NVU*EX!arRHCJ41h&dEx25UYuU= zvy=O)H-ks?xFeO{{59#rk7_5?PrQBeMnB7${M*&b2Yz1jUFwWmbp>snl$}2H_`{5M zo^EqawED|BJnHyr)$c<~KYXa)p4u2)&D>vVD8AQYP+ncDp; z(OOHZ!mVG$9p7;HO-Sl!`Q276kLp|^PK3{T{re7Q3QzX=*6|0`+h47k_juOIzC-=q zYVTXUdi~%}-u^sc>O1#tbzOTn=h~XavI84Ve!1ksx7!4--?ZS-2Z}kn*R^q)_wI=O z{oh}F>i&BHK_;`ygqRg+*WKTk!JT}w_Jf?|mmUn|UK=hp?e2CYKY!|bhC?1>Ui)Q7 z=bTd)FXtwlSe-ZJlX&CH-tCsTXPtX}W5tba@9dLr{XsnKvxl>{e7CVa0;a?}Fb353smHQ_|d+l>>&=mAvzv|mv%dZ?TT=V(v zV%pe4w+E~{Nb5g+`hfzj-1W0xzuMh8`JKsczF2qVrOOj$cKcx%Y%HQzb zJzd$Nf?4*>j9_&?w+XfH+`!LpFMH?bL8q3zp1$ka%Ri1C_2$VTzJc>zm)*Se)4VaG z-K%b^*hBG+n;QM zcUgKcZ(@n_>CBC3d)xW^l2Wj(KJ=^BJnc>mz_`;dd^GZIFx*soNzs~Me zCY}_#vEagQ>*hqPw4QmqL_O`-jxSwIitG8Uhp)=A>ySg>5^S+aJKdHU?)>BhK->B*B z-cNAM>{+`%y6%-L{bG*>o*RW`LN7zrX4%5C`-D0!SR>I-`uXgo~p7Y zjO@P7(vY3cZEt($rM8W#*Tf(69DG30rTT@i`HAm-Re6m&wj(uAxhLgz+NqxtcPvp4 zJJ-I}n(=~ivTM|`i~py+?|^D5X&atI1W`dy!2)V5Afg~x5U~-Op!6a|FoqNege0cW zbg_WFqAOxq!~)2wND)OvQ0yHWhy}&9z^Y)u0*d_4+?$Y~?*Hxi&U?P|7cPf8GtZP~ z=4n&z%;aU=n2Sr+trOizdt;K<)8?;%K)21&GlI?1Y-wvIlW9PD$}NLZ;kEA z)62ef9zDXP&kXmEhdx9P8Iz)$wkoz{Wa@ybEwq`J8e`3KoKKyiwA{+z(jyG#q zXzH;gWoO9ycSE0d?icAcH=*+5x|^XdUmdAtc(2nen^0F6U*&|tiw75%|84nr#rjz+ z3uaTr5Vf?3Z6`BNYTP?bDYX(DKIo&_MQ@wCctGoj>lwPs<{y>TTFF7$ewtxcL->PfVdd>@36Qf@7n!d0i#Q2P$U}f}ziu%iEg%tw=yXNhxc{Z^3hsAOG zW>)g9^vc(c*&n>C?AXNf36w?Ovv)k-syD2gRl1eH%Wz$mK6hE!y!%r}RSQSC9J~Gd z+r?^{n_m>=&3Dn-GtfNGzB#M>b04pwrYAE*N8gXPZZI(zK)eKK8?M3kM@HVW8xkb?R|2$Zs3+%RVy@)xQj_R-@a%t~B9GmByR6?)rfB(*JQIDRzYP?hEm3VvUgar;#YAlz3xp3_F$>=tD1XEVXf_=^>LDbcF~omGw1GH`m^0(^Td-oJ-azm?auUD zT1#6px)w%`FWlE`m!HaiaN<(Mf#N3HG57d?IX(`SJ*?2WvH#|jK;tTl176k0=3=GS zqSA7v{@}xng|iCI4!v=F^VXu189hv>MQM!AE4rNTUX`EMt!~(8;ezD>;jX%;nFID8 z;hIf}en{Uyc`#2s^%kwT;jWJ9yu76=)sGYo+FAN6qqgYI*jSBAyL+IfaVeVn*41dI z_FWjEb)rku6tiSDf6EJ2FeNh5tI?3p3|-2xxc5F3x%1UGzcHX>%wN)_@$| zKIPu#%Dp$wM%}Jx{A$muix?-U`gSmEh131HMdc1^W|Fa;20Z_>t1IJ?)`Cl(o##y+ zPNn}@(oIL_Qg_-i|L{U1ns8~sG437B>&$06=TtXokEptonp>Va}3k6ST^)E*e?NgnDa{>AFn^LO^7 zB(123sqqm(`<`jv=(b6-6>c`j8UC9_THf0pn;e#R*>h_k<$X9$z3ltvQ2&rcoUkkX z_*U}-k?JqM-!X_@d3)p&o%``YVUK29Z94W?xUQt3sn5|8`?Gd z1rcgxm91HY{bXk|uEqFaAt@TT24v!hxdRw1=!!f<< z)7VO_TlMa0iM|s%6_jO#88gf0>VJ0OO?R6rhqQDxZQ)7Q+tj$vj?e)%jYb`X=9M{}Ds0!b%Ic@#bfl zGm1jO4XCEVk-s1ARx;sC_jS91IzKirqc*^Y6omR2SklK>g)SLs=5vP4Kd1IvOrzth zpsZyc^pp!1t5$WnS$4vuxuJH|UyW5)4itKHFU(1BOG`9cw>;JG#*DI_UOrVrbz&Pu zx0wy}jpjL(eiI5`^fpW_tn8eal(NJiHE=u1?wf1Y{2<%v%C;9VAt9}NzTqAEw8z(i zw9_^x4conDr`}&#`A)Qw>=@>|+M3MT59Kes|3>6HuQ8Hial-V&=iQcNM^+jp%Qk4FkL$Cu z_^Wo(+T%vq)p3*a8>}TC;#wnWolEHF41No!Hl6%7&ZBc(Xo5qThuQK6zJ}+2*IPwd zt9`;Yb7*a1sdZJEb^1B2!6zDByQXBV@3l7Ndhk`G=~!v$ENBj09aH0T`S^(t-#zJk zy%lSxRV1fqZ>g?K^}KL3TlaNM^R17~S2l@5D}y8BUJS7HEvydoNZP5d-7}zv-q3!- ztVQ>y45e@9v_xhtfCT{mE1E{`<<@LUa?YNrTkgTrbL9iCQRzq4ne z*)q+1{w3#O`^$aw{{EWaG~y{ICVcd?xKCEYoR1FBG?1$GG#x+Fr}(|M^V)(qPPJ%m zLWAYY7azWVZz(IiTb{h&>Xq~RcsMokwWF%0U6Yn+ulW$v)BbPzQ0i&E==8l9`l2mXmDAUmy-?3Uh3AS5l9J}p zSM?7MIx*WmrZzQ*Usa`N<}t9lRRUwUf!U6!$nepzRMay)(b8viPUwb>g+4bQHJ;Ny z+tBFWP?q)kr>c~@IU2=vM-A8R+hJK9xfB)vE=R__xh--&@Fm1RSmR`B&aJkpS$Iw( z9j5;*w-1~l)cg<64YJGjS|lZhjo6uP{Onv#QA_pCriu?&ZEn=I%wLopS-dqrs3m`= z=G6U3)(b}&4$B>{q4#c}snZw@%NU8XsOGR&WckN*<`bgM8;?fy^g;v`tT|+M${n*-|7Y##u7uTHB$}0b6yee_pwB-B*QTecA zyeoQN>|Qucn%xo;|Bw?|crGERvED~>!e?je=pxOWyj?wazFjr#>ex-gT8M8kl(L8k zos#Yds7@t;8k+kas$VI4rM7lKerFH;rP{r`ihA6fv|ek=(u(eW4Ks()?S1z@F6+lD{Md8+`-{Ckr?%?m z8E5K9T$A-C2lej#-D0SI(Yp9P@z30>-JG6GTtBLE(xr{-Y`T3pHqD`a^3-KX-6o&1 z@t#8UQWvJi+sMpD0_X@@CM88 z#%8lG4Gnd39g;qLKJCTUA%ll~&@+uaY%=nA^`H^o<~o^7?K;nFo?+vtg6K2mA1`hj zJz@7j(YCdx<^{a`EEyP>BYrq`Ys@Ai!)Py$kS~ph`+-D@q;ykKZ z-`ou~ueb&s-RPG3D8uROw$$nBZx1=zx_@$5GVC<{+877h9sv?Nmlvk?tJe(heHZTO zHu80h&zDncylwtG;klsqQ?HVQyFpJU*3PE+JPzc(Sr)K&d0xn?SwzD=59{LaZ=w^5V+`^PII_k3Qvx%rbv>$R%bkh%AcPw>D0 zZKd{useetZp7*i%aly{jk3J5*^>D(>=arG&?^R@T8}2mRJ95`7e{p%}UbUO)*T>#| zA&R&)tbfS4Ip1~8?)ZK9`TM2`7y8<|l=;uPdS>(FucvP>*?mg4cJmdtL;EhTKezMZ zrTOPCb<=%!)j>mgZCRB4jZ?S#U#A{3OFoohyCLdZ%*K&+=9}tF`lVz}-LZx3yMObL zzu*7AI)*QQVWH{w#IbswcYZ>jBOuo{buk{leN50 zUz=rj#ah`<%hUJKj&J#st9E_=UO`jNtJnpHM)aI{PqH?=)E$( z=!~P&=;>cZoZV$(|6$L9mOpowZ26<`tfc&~dcT)PY_m#^F1fZc|JsA@1wHa6<+=0- z&0Wp$J5f0#{J5SoqsZrz`my94mc`Yp4xco9l2{rv=YC1*xHq`}FDee>{Hb|yb z_3J~o_)n;QJH{xI@$ipI>tp8*n>75^#bcH50*4f$H?!cFMkLRDd7c<@nMsa$5S1+ld#N zPFscdiCfh}{48X`mEaoB9CzPV+fTKrMmeijNc1N(=&LQU{Zc%2_#cDo1`XTl7fWCL zr1H?%;M~h^0*_4cdwz4ybkBs`*4O>+hE5vHzrWl`^WvDgm!fs+mYBRryR*ToDR0TM zSqz&1OPj3qnu&wP3SMX}JKd$wCNt`!ZOz`!-VF6&pW6QrSShLm-sc2$ui%3(`fCYe3%ISLt4-2gHx@h_g z5W8>7xIUuw==^26JucKpH|kVfGd)nU&$^$k)1r*%q4&;nWpm>xI^r8S-!DrpZTH{% zZb(Dll<6%8CuTe9&bb=xR=vn(F*j5NKBS z;L{neu;k3Ex!HrH-LJ=Uk1>C*?yUZf(whnv0HIUF8#&eg5bh@5=b(s(lP|3U1aMyk z{HN|FGvRkN_yb@x5uUDu>F+_v1hyils=(G{KstbpQpszQ1^f45%c@i*T`s`JK{{I% znD0I)8(_m!@`?fW4Dy=nf;jnO`X}tOhjhPsI?fMtoFDj~I6vTn?1g>;9&F#|!9F(F zTm^f3?1kh>0T%(erwyM4vlrqm{)#k0K4x;XS9w-6kLtsbx-unFM{>6nU}=Av0diQ*mBsHjB z;wIxud3JGtSLn}UbL_*JVssT!v6y@(F^7W=At#|2w#&Ieev)Q~5sr|Tfs8Q@Hz8Zb zm$wT_m0{Q{au1thEFJkH2M(Vj50iKD3A z9El7F6Alc4-D@$JcAjFMP|TCY+4GsWZJ!ZfwU@zWNdeJl+~6h^m?H0ToRNeNnHs=PFp)6&`N_SO~{f#i+V#%1#BhdParq~vo$F6*QzT>n`WlL-P@=-gQJ4}A{4f_N#FrBbJUBp&k%)liQZ5~h z#~3`Bn4G8wQ4@L6whDCuYVbKs3CF=6wO~qvdx_!(^dZs0X&YPy8SsWZtg!nJry?;T zv>7?@B(Ud^72Z}PT0->|GX)ZS-bW5Z=pn_E!--U;F~;==9&;{`GA%`NLl|pBQSk5ivAqN*bo)nZ9&PK^0K0?^J zYbTaT!V#M8;3BWaY+OgeaLnYyAu$elh&TcW;1z=czB6ov>YE{I0^sA59P5emVnz~Hc{Tq(2-g;Kc^5Jluqb9ley$W7e6*Gd)nkJ7g+Mu#gymD^>WAtP zgA2VKP~sxaBNg)Dj3U&KMQmr*gOVkr7xud{7*U}NmQ2iuV!{EGz0`T27*do4tsqI@ zP34Ahq!M(BY&{T=1$jga27@UMgCkd=$`H`Y&1aUqw=dC|qT7hUiRDR=Ghzg=MA*uK zsvs9C0YvK&gUO0w^93jXN&pm5V4I*w3F-m$gYFp;N5Rft1_RhElRyCMAM8a&=nu%r zj*v;D3@)1oHHaW<09&A=i0$Y?GR!4Ii^&oga7d6r?&iLX>&4=Tq>ON)GZl|T=qO^a zK*7X9J{Kg!ldz9crdTQyxd^yIIywh(0LP+7^5Fao{6EsSh4UZeQx4&JJ!SY5ggf_xGdJ@1TnHzq#P5Lc5tZ-; z2p{dM%mf1(U-?Nw9owA)nX;<5?>7&@d|a@8pRh=TnSOVJu=R zPRlb`(9=jcFu_4wm{6z$5%jJMu}}ydUL=eHVT=c1iHO6388Cw@B{HrMft{GMqfzePL!z_&vQnrhC)fd|>uVgkcWs=Hbpzg5_`@U%I!ipC{mI zmreLmWIg$H%sQ6;Bt3!8BPQmHZe9c^xXErswf~0ZUkigMI_)S zrWr6-=ZLvX7RSWJ6HoCa#N4-CRexNs`BiZeiDUu%!}tsMudK#QT#tbOKbBgBnL_SDISvJg#K0BG z;ARb;fT*CDm=BBNaOLFX0^x`wjnH~AXPyj<6?+sdCY%S_6T_4cit&#{D9V7mKvP9_ z&^j?-1BhIc&4Dm@#@xPQKscB+*eLBQS)pLkv4_K-!A6o`VpWjO$C9h8a~mxrG?TW< zDcUvy(zV&_pUTAY2onQ7zK;Z3VF_j+h?&sb9EgI~&v1;0-~j>phx9BWQfUKZf_R>& zeFKT&5t6r{4I$15@(KM5;1P4oWfVVIa1g;(SVelt`Tf{~HL2naq4F`UH0s98cD+fxcNaWX%-#*6FV83$bm1D5?H9A6pmfrK&a zpb$1uE|JFGN!~3o+&M9jhh!jTC0-U+@=gv1uC<4|5Zqyq4!L>^<+hEe+1Uk~3L&-J3=n`IdBr zPL8n7QA}1iPk`?bP?!J`j)%o~kPP^oXF3Kl?ESpG9o>Byc=1#LbM>&NyE5G9_RcQu zjv!aC@+`MQ3Mu9S3t_m#Ht$LrF03Y?>j0y{cNHnF#7{kEn>E3-Qnpo9XCMaZfaAqr zP2@O9#j$~AK%WifL^+E=W6#4;a2llFMz9QX$TbCLMLq8(j8XGie_P#%;7t7P!)p>13X!y+)U&~@XF zIczX;LPCs8R9AwbZ^3tVVKD})1yo>Sduazo4lfD93L?&;Sh|yIAVNZ+2q!}Vp@6VK zOqo=O!!UASVZbl)A~~-aBoPpTI~bVoH2|AQ1dDn^IZ$Cdb&`*1;M+MF7Rvi2MOfZJ z;QJTA@PQO@0XzAIJUl637zuh{qX3Um7RgZtM+`$p;))0s@p+0!i;WnhCH&;9QvU4< z%I4xhqC|@A+Ynlq5JcabxJv^V$%a8B24kMuv_ZkX$m%ZEUuv{HE?7;|(4>=fu^;TdcTpdB)@WzAj5t)tsZ_;#aD~GuB zLsoNeUgvkvz)C*w5l%w6WLItjv zir_XtTGeR_QJzXSfaqclF&LA`WZ-6m!yj8hRTA(JS9Ed%>W)`G>_u8p3yo0&o#*M?rxCv9Pchk`WK;4XYc zu+1gmZ6Ei4fb0A;Nh8zK0B%tOcd6n968U5O3ZZnB^!HxFJ_5X%K>j?bhu6(Po?uEBeynOi*RaaM|ii!$!^5jXBl$3<*?CcP%_aaM6 zOEhA{2-LT4U!1J^)zfsMGWD1-P)fl&Q_@A>~FUqfpvCJ;E_QF-HFdpHrUSI`CG zze7CEhJWj{?TtO{;Y3)G4`YCM0uj!n`uqsT49UrrQO2|X*{Cj-^;l~gFwafR3W{mS?=pdwbC_|^W4t~QAH=GCf z_j&?uOuP{eE9=>8I|H(P^|GYXErYy3Hu0NI4^n{#5wSWm+U5JCqX$+0MAOU6(9r{Q zfKaD5N46|eM<2@jL?iq}tbV(E&9=JGGF*DI?Q~lw_?r$8x?KQK4;)MSn;@)O&ri@8 zq=!of%<@01cHln*gvGJo*M}zljE`=KOUL+}y#q*mI)F8(8A5;${4<)perjKD4qL|v zT8Z1IpP5Otj}336w;A9#7I1IYLBGOB&`^%mnV>CTvLq->(wT1gBR(2#0Kqf8OyC*t zli+10tv6sK;rQMAhxUENqCm<>5S9`6CuHXijd@?z)z13j@YW|n?e$Rv1575*cH z#FBCg?QY*bPz3lX((ej=sK+72KlT!Sa4?5e`G-}3lyuU?Z^b{MBl0ko` zU3nW-+DWt-huiCWy`m)2#c%n)oUUXT>Oi1V=xu#HDfz9WOT0;GRqzo@64n_Koig7i zpcFjc372?dA0e%z3@K$(MgQ9H$x9`1k~9s-vf6UM3!?UeJb zT|QC+Tbn`IgGfFRT8FiO(2?n0ZL--eAE|*LgV?sn@hP+pds{5yik?^LS71}&gA(+w zd~NmoSE-eW{>SrC;FsYrxA0?&PC1h!!TFSY0^)->NQz|ma4qozA&BAz_np(=p9!N! z3E|_wHx?d(1174E=WlRl`e)eHTH6T$h9$@sM%z3hhd;RSkY9}0kUs`OeE6e*i%XP3 z3>h{eQVRh}Lvct5wGe}k1->Y#j|@^`%;o^Iha5bJi3GnpKw}|}2LD2NdI9)x=`x6; z0Th>GPqcsq5R98li*xeidBcgc%J$RXl%KNR=BjN)YF%=ny@Q~=iKvBW?_y;jb1{DD|_{!{$v-o_u?Qa=2%i1v$s z;!>zDN&WVmVL}W~9z)Vi2Atv`_+1p0XHl2Mo~397DuiEpX!wyPL^|Bwm1qLAmsGqZ z$g>?*2f!kh*9h}0?gtETM?Ym3l+hPx7J-h&!&$s_r&opGGQcst5 zlB!CN|GLh=QCmBT1{wo&p%Q#vgq)zI0zw;v1l6%z(?GKEpW^vZ4*rvR`UzxLp{M^l zT?z$mgb{Vc8bfNxKgkBxOBb*T3f;w;j%|igGcj+Yp)?M}D72CVrQ%d!0HqN=_~izW zHDx)NGXI2=M#!O*kWXBO1ZKC`VkoVH9h8af#E(|v-&uoywg8w*i$E6tK%aoGuRxBX z06W$tKDZ39S7K;I7$MIRfQ$wH!#C|8pq2_|%U$>l2tr3bf$Y5|WOE77RMkqV%HEGS zRP^y*=)#Zk1?b@UPuZDY%a=liNcqRTLpbm&l(6mCvS7>qZ!9zJLlixVQe&X{|DxTv ym8AVPA?!Cv=fxP@@pMq2g905C=%7Fc1v)6uL4ghmbWos!0v#0Spuqou6!>2)!}0(C diff --git a/distutils/command/wininst-7.1.exe b/distutils/command/wininst-7.1.exe deleted file mode 100644 index 1433bc1ad3775ec9277e13ca8bdca3a5dbc23643..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 65536 zcmeFa3s_WD_dkAy8DLO!Mg>Jhopdx3Gz>9lPzPZU1$1z@h>BOxkSrVw}f(+byre+iHvmS zlGvF!s(IOSXJk*At(rDv&YZa>)znN?wt0?f<{VY@XoG6@+>FfrZQ8U_J4w6m8!i zTH@J?nTMQ}V(yhJ+?9Y_N^8Yuq)EQ?=ejdW2#@ga=O$c{5Yn#ugJPld;KIq7p#g5{4Kryrp$#Vlsq*V7vgW8^NySAPsK2k`)6lNF->8Z>l)fkzT}B!Nc~cqD;G5_lwmf0957XHt7xZz}RFz)0q@)IO=)VzttcJA^UK zGfqfx_-5p38Lq*4GcC{{SKE6FOzAf#y0#PJ73DTi5>DCGt&U7nJv zu&x2u#xT3C!pP|PdE*#{(|Ox;6`amTYttR!bbiJ#*3Y74Y`SVfx)yR%Z%aQJ!{2%e zs7w4UW&YM8leg7j>Z@lN;MH{{G?|`1Fpyz12O!Z7C~Tu=>xml~`T}Nd0!PoE9Ld0j z(o8`j!pW8hb)7r>#(Bm5PcoKKO*`K_6+LHpm=#kPMFTETX z>CGi714}G?$GmWToV!OV*JMk$WLr{impx;Wg%s5(Etz+j>6S&Un3NaVlJ!l3XgFOB z2b6(++^D|N+tNSshUnuYZVIc;jbZYJ>f@v`g*6<{ihU{FEk*t9-o%$Jp+T{4luCLC zx&G2h#lGSG(wM%|V~TyFyGSdf3H7$*8hdo-?l#>yyS4hh!=dN9qa)&TtWDSC%5_67 zm(YYB=0?aTm^z60tO^p<^FNMdn6xw}s>8P${kLTLF!p?RB5Bj{(7Yv#F==eNMo!o0 z2Ql-433D)PUF${ji%F2Tnxt3W-}GFO20E>z<|EAxap-eP*W zJpZ7nb?)sa&C&zR1g-h16OA89tbuWX>rqfvNYQjI##=Ak+Ugzi{X|TMnb#W`zT;gQ zkz_JGfeR_gf#I0%ljty?)AJMWQK_G`NRfXOJX)6cF(yAwcUPl>hc5{cW?8!p zjD-}{zKRaQ(b;w0de*yC3uor-@C~XZ<&c+hOD{!^QOeJjG7Ot^5(vHlVre=J-TzQe z!iWBSy}WvIpq9bWpva`8npG@Ijr&%@kf4eXR*xk7;z^`85RzSY_wN`qr8#Msec3Rg z5E_4(=%|0yLJoDB5XaudMaVGUbqsR&hN16iG`^p(9FPysyMF{l**~F>RSR{u^RBw& z65yXO24(ja0t>FlZf%0dU@{J0VhCJzbY_Wy6Xb$64-!4)177MrhnVu}gWS;1keews zbG^;nU^8E`nHz29yEbzZR021nIk2R)o?TZ4CoYv>IO;Cx?Yag)BcL9@1I__z^z2Gt zwa}BAtTRTa1R1ekfLlolyAXGWujv8lEF!%LHVo^vCpV2@3xL4M^^SI8)6rC3AeKAK zHB^$QN0D65s!>!o5A`r4gnWN7pP~G*;pTc_NI1;(LSc|tcnK1y(}bHFOkEx3OG4!a zp*&hFFQf9V;pRru<3eGhP^d;>Cq26!g)LPHb<_>FTTjARSs8k&H##uxQVV zwAN6WQ8GrL;s+`bG%{J!UkG88Hfau+JOo5Jnr==wd>2Ao@WyK3=sCF~k;=sfv8=@i zNP}@evx$i`1jst=sKXXPBrR0qLqq5w4I#2X(zP5dz^=pS#_l0(r`-T6R0&IiIlzL` zh<08*ZPeR!O4uF)Hw7{*<)How8pUw2Wy)AFVu;0^CeB!sIo32j*3>;#Vf_{9_o89b zWFW61K?IseX)h>t>69s4sBD>*DXvkba!Y(**ep&KAogI4Z^voirm}7=GA2!|eGN23 zR=@&9H+ud{WWnfPFb0bSYy;?{v_(JbraGB0wI>G7i3kPg0n}>jk35HOzH4L-Vymf~ zUpWSJF;vaHQO@^7D|K%?M5gRvGLo?eOH+(;hwn;Iav+-I8XUe6c(!@G7mwkZW{Z)f z*+S`2i0nk!h7_fQ>*RY$sHRZ8JwdKdM92e^XWBs$zGdiQic9tw5WZ_grh(${tw3R& zM}eT32STBV5DW)4U?wD$rJ&Mkv66^mRIJ5%6qXZF}4@89Y-u0)@Prh9uM6ORUDvSyP*DVYMjuR}bPBdse(kc8E zy@=DwNHL}3gup2RU(g>21n?)a$AY6t6GMu|A1qNKpsz2aQyYFiYNLmv6#1(egfL6I z>G(-u{S*mA*-PZ9{6~0vFN+2M{HtJ-*+3Bt6ESEksNxono1U)#YPn238TJj)Ho!9k z34-<2qnHa#!WH%<7U@}D%Ao3LR#l~C*n7@I5$Xyf3MG@Eg*vz*pS*?gFdA!}jcync z^JoN+z=cK~zGFc)q=t=DY&QQNYrRH@k;Nx7SlSmIvjM|p+b7?zk_t5qL=$@c6sp073@pi@IeV{| zZG>)E4n-(AWi7HrtCgnqSOJ>a#3Ec^Uv!pmrD$|u?qF&K92@NUY88sug$KwD2-Zq= zny>apu4t%WKrmk{ff;j@K;Q`%0o4Nn1OJXdoqM4S`A|wDjRsI6ZE)S>JL8xdMs?cM}o4!PbVXg|~r1TAIPxmLLxd1*)~&SxbvPkhjMy zA;^B5UMRLXi`gwSx}!1D*mI(3`ePLfeDGyyax?dEgc61$R6sdGCQPimG@=ACCDyX2 zP^T2;eX(cP;)t)7b$K;H53b1`tyUTI{4tc{F+k53(LF1ERrTK!AgFgl=6q0v(2-S;XAv@Kp(T3hP~D(N>8JjdD6A7D1X)C#K0S%+KUw zPgs|z=L<)ov(T8rx)~L?P;%Cwi3sN|qHC5HZ2GjA6Au@{?h$ja2%~sdVAO7<5(8$M z2Yi|*vfkiQGzhLCO>?7MSMQD-Qv`+2DPSKfNrWW=x8}#i=JW054AN00)FH3fqWfqT5^C;wep@gzX`+xts7A?HtlbM`O1QF^%A{ zdawEY$gP=J0Db> zSQx+*&Ha^1-7&!ta(mQ%An;GKEGtZ_WU`lF6ELEOCslZf<|(xj7%VY`S+w>V#kYq-hR_;YHd$iYiQ9aH)2AvS$ zn`ujJq~^RS3cIzYA>rmGQ+T-fu4y1GH=jf>6Bu$Z97~a0FripeY?C{Z^!sg$aeZ)jkrBtOUi1s_h}nD)L`pp$KlCXFrmw@RW#%{N+rIoqnC z*6uqA?h;e=*x+}ZevgVQMJjBYV>UP~h6X=^QLe#Krq-ryiglbTQpMW;FhL9pnwMm1ey=hRmuE{h2`?01V zO@k>w81lksPvun*4vl#%t7w-STmEkF-Wc@d&4XU>P<{{bv*HlifyzyE7d+GidLmh! zX5+<2a#j)IBUwj)_y`Zh`iYMXc$A5c_2jp(W3D-`$bSng!DT7(|0O&$De`T?!(Byw zuJF)kdY-n_=F_IzvuqP-^90wFrnzq#%GQ$r&^PuHJ!}v*Gm~kHXantu?yV>yy{4eM zddDE(HCYZQG2P_dr}bd=&(OdI+S$c2as%!Ea=Q8rw2R8=_zkopg>fZCjNwv}rQRP( z3Rs4gLa^_anu;AutX*kv_Mpv6Y$Bsfdbu$#?-`AqbxOb?*wp}O4}(+E_O@HO`(=1ceB4ynabWh3@NnSa6-uXE=Z@c zJW4Q<!7IhdS*c6#jAOB8mV1R_S4D+T!hp9aW=Xy8-ei8Br)xo#(M z71AfftDfD6y3UFea00VZz$_q4fjl;Qhzxd9Xu@IF62HzIOIa+&0Gr<;%jp!Ym*7%G zzlhCTkv~hBO56-?!fA${^+3>$lRGp5RD`97Nl7?RL02O+O+IDQ7a61LA@ zSaR0Gjaa6ddXb+-3=x;8un|Ypp)~dqvYXXaBcud%p=n2`KdhJhfDl??hZuUWlxYQ5 zLw>WF4jBNuy(KL@?=AF2rQN%c@>_HW$p06e!IDA{Y|D;7RJ2BL4ueA4=iomX3=v{y zEJcyO8K&#n8q1Fp6Aa=8g>^d;7!=_`;HJob0?ED1X;|d}3R=4$nLC)$h&S6H0oP8e z19slGxKN~u?%I|#l z!>_B|PE`|ynUB>HzQdPH9%hq@J-UB8fqK&nMS-r5Ue5-j?n7g=<=iS@dOI;yf5v3p zUSMdZ@>Pi=T#Xwv@9Gu2mrd8e$KIl8nIc!L1#zSz^x5d9PvI9~yn=H6RxI|M5j}&s-{>3z^u>A9 zhj#vig)U8#ma^d7je`lriHSpOQL*G)z?w_X6wP94C4un1u!ZsDy6YASOTUnth*^F0 zDb1T;Fbr{t86HaEx!H%;GYrO{i?$FDVx2{1DBz9-T~PefekpA({{4RG#DmSt0<(MJ zdMHv1{=*)cT3x|^Bg!8R`Dqq)E(B;tc}70Vx&4eXwE3|e-bhp#I>5HSdo8R$f#H3AH)M^_SRfe;6ZW5HjRj1)?h%|E#!4^_r9qbWwpxK0sQsH2YEky2L;%rRVsr(w4sB_al`L0ol zcM&FHBaF0CBWRt*%2V8ubN1ox(gW(d{NiwM)R>C5PJCkp1Pqa-?W6}%4lu29CrcR2 z?g%5XtZ4*BH1VjDr`35H-LO9-8EennS^0V*c*1vaG~N1hadsFFm%Qyb z3D<2!E{w_h6QO9O&Aj5qC;J#VRv6{FqF{*GwxrB)0`Gs|Y{7=}$It+vW0g^TzN?lg z*BzM!6vt_db2@tfF0!(a1q9FTa4aZkY%ZBCmK^UXmNYe&%n?hP`cnzTV|UU1ih;?P!*D^7M=eKQ$-Pd2cXVm4cWdosU1 z6e~YE@ATm#G0+ghJZx zOzkkHzHFgTcc`@s#eq3b zn9&4C&~>On>48YQ_L)hkWJP#&E5%xrYMRR}QF3Ov@Y2q7i^FaH6uh2d9<@AHqRqV> zym(~p?U1G2b8pWvdFS5FHfyv8Jle$6g$TC;9<5?VkFI+hr?W;m9b{e-JzhjBvpb=U zF7UwUj2T_$UeGp1I~oFeqfUypa{CZB#6%(Oq4Kc1ekTUeqH) zn2bxy^Ic4tc>U%o+-DM1bDc1Fg~D*!9RFcY{41qU$Ve8Dtxy?Yd19Q*a*g#ZxpGv7v+a zUf?EAf<465x=ngv^??@`mK*E9=tKJ=|C?S6Ztg|)|E3q- z5BH+!f6$8*<1^TwM9@nb5MuH)v)jbd_ZpClv4lyMDiwP@jI?iDPd!hoYsa1aQX|JWtHdB6=Q9y+eEh$nmoLyO^dV2tJ(va*j^EL8s&TyQ5ax7e<nK z=8}5blDjyRY=u7K)$dex;BjcUHbTl!e9s0W<(MzyeTXdPVv`q8{E=CAmh}_LFjmP` zk{KYp#*5PE50&pW2Dr9IVGuaKX*foN*Fx=r^XJL=)0~K&OaK9LIaso910TkKNp>rh zYG| zxJXR`-a2kr+|>dJSjq!ZF-J^BQ!rxdS-88Oz|Ftm)|!l95n>9VEf~D9j+%J=Mmr3y zR0g*TVvWRinbN3cYtVgm9mzm+fR-i?g;no{nIMzLK1Tyn?oNmN$jB95DNwxp8brl~ zt_KC9n=QR*Ja(NO!Ku70NK}O)e-a_+nz%4$*X%mEo?Rg{K?eq!QgfVG!qVoUo*(V3 zY3?buh3dJrU2qk2gBO2qouS~Sb`P@nv*#$%QsnnUqV{*lp=BeBTQ>4kD*qC8uE{#& z4qbh11qn@f7fw?uE)Vg>zasxj%GTYj-TWt7YztbZE-hQpw zL)+wTF~6yZnJ0}(gph?8mk$j~48yu?LdMpc^X0(R)g)relDiCQnz1iKMv2)PTaavq zIIhJ-gHIh^60LWYH?rE6g-WAVEMbEv+qJ0R9XII>W?_!UDV59-jH|aJm{0Btn@KB8 zN!=@Kw^BfqLX{{UV(HK|MLb#EKw&L~-0j$nG`dBmYi&>&OGmgY#kwr*CW>dZdY&vp z@X01mzV8jl)Myz@NQ}8Bj;mq2_?CdiEh0Qh1S9AV=8YuUw7Q|fXDs>17hFpZt;yf$ z!LCEY@Cjp)E%*epCp(WEiaQ&R8>}?3ifRikPJC|zp$uYJM4I3QQ6W|ad;uT-U&(ao&g}f*i!-t3ISt+AGD4Fn*{F%+Wf?&5ZSvgy9aH!%>9E?)H$a~ z9%=c7Rag6GK%8=$am z0!_SP=M$!|zKuuW6D!RvMScMi;Y(GvrGDmC4-;`281tFfoIJJ_a|FOga< zC@i<-i#u6vD_&@yVvAMV77pY|9hNfL#7U)k5C>JYrA%&H>OWEF!KBhcI6KgOATk4e z1Ln=8`{RwG1}A!@An+D|BzT8Xo1D10FgT0*E|;3 zAkr7hC)uKcxu}4Nwx}R_Bde5qo|4krT$QF0j6w%u)0`nlQ<>u~J!UfmiHquH=?MCk zS?a^AMFvIwOTHLGWBq9fvzQ)Zy{!io`Oo4JAJUjxbFng{ty3tiX>Il>mwKaTz~c1E zWNubO3KyKpEtGLlVd!B1*V=O1-E4exwd z%EC%v+$x9KesTgVCr)lXXl^aGQETdeHt#8O0#;fbiu^L96gxX|-=4TQ+Ol6x#HA%M zC>NSQbJ=HZM#wSCW&H`7vs!Av4sZ28n=kEcc=!_m14u9z!g?TQd5%67t1YI zWSWZkKByKCWMz4z-((=yvOo>F;bi+79M{SY8 zmP1~)4%8uZWdOA!wj4_>gKljns^f zTa?xWq4@j_2Y;4*=wCRfYb*B2vpiBT{c+wwTGyx&xpeSF(PM-mh)G49&;6BLIk4yA-cBCho(Il#Jx}fne z;t$#iTOO?rDv-5vL}|fG7)pZ3@NxIo9L@ehXw1#jSs0|yHV!~5;2m0U4n$&ItmtB2 zVLjg#R@kfre=ytJ4ZL9}c0;Mif798G&rl)c;NN#Z{ZI!2{+Au_{I5FD?coluvrEi}uUAK)>jQpvUEG{K(miLEVrmf|nytZR!Y#7vJ`>T#z90f#ErW zB6<-CQLx}I;VVT4=`@~lX=`v*+ZL3_-3ab*(m${yxEwOD>@(zC@-8sosraH&dMM=h za+A7!@dfxiKj|T9F%%jvHWMPe(odtB_6xW;VO}l7?CE6cB|qdQiA)Wn#Rplm$Q0r( zy}Kg+7?8O*%)j_R?pw^B{EnSdLG>VWJ2KHhnjmD*S-G?;3PF~br&d-|xvO7L0p;fe+B4L`_&kZy zzw3{H=2S%`S8?mOXdv+NSO&OesY@$hBmF@EN{G`EVbTLk6Y_+6prNkFe;;l^bJXR> zz^O`e2&qFcKNr*Fu? zy4LoMf-@lfa?NzaY6f-~;#UKk{j=RdixqoZmQ7-dinqVofPi3(Jt|3`Sk93DGj^~? z&BG#|AB%2KQvhm7Mn~#m-ZjQ!oeZ{0LH3BxpF51!Rgsr6tW(V#tfPtVjAr-34Xf zLks(yZn>k(8KcgM*E<8!d?A0r?N_Kdmz1iueQJ`N)8DG-;| zMcYSug3=Sj^?B&{FXO>U1xPKZU{6Y*@`9zfbusT5cn8aiAn?dYuX4|4z`~@@PnI6j zREc9@EeZrbin*P0aLS@Ul+NIAn~E-piQC_Ii;Ec*>yS~OiTLg6L; z)+mAQv`d%;*(l6X!WYwYLSOuoVVWo~a7FV4WQK2o8oEP1F^kg6DNQ(NAEqnh&8Iv= z7L5UXFyk5lWA%wJ*>VPjSe<0(?XZL~*on|T)SwSW07*M!Ns} z@%9nI{FBto9Owdlw6sJpMAzc;P>UwWE`??=der^JMg;c(l3KLoPvjSDLVYa_Iq-Xe z#|2|rWGnwTnJK0!TKslGk7^fSDR8qyWkMR$5K}utNNXQNL1AP)@yN7XRM-r$|b5WS>U@m5vK+J(nDYYnZ#qc)JLE=_5p^ToSRD-50n_x zvXQZWa*Fx~PSI8Gff5DfZ9^0PGv(?3t@0)|ua{P1LFerJ|Be3iqWk|(`ny>oL>CVN z4;86p2hAp;Vo5u3(Sm}ogW^=s2$zCra{I3p6zx<{v{ON3F~Z=D3kp00lMfD$i=OWa zk{RNoAIO(WJ5syxYAnnZ`7aP>80CQ&2GNLiA_5O#GfE?bJoA;57$`La&^QP@R)^)) zz`$ctY*rynLxcYDLMS&&V&TWa_)F0a-*(BQlTcUCzrU+Y9Sb8ChixDuzt(-1M z(w-j#&(z83NtpTnL?uxPB8y}ofQ?K{o3kR!Z7mwVB?BkdU`7q*V&s9BxfCC)wD(J^Y&Zx=@!{h9xhT2x zD4L4%H-+M}jR8>3no=Da*@>DG|4zPKwmU%1<^#RIpTIMF;&GA}hBj7e*DC#Ex ztbjKGt_^kC{FmeSKa7jqCDC!!e@8Aj&iaB#m*zBs8sAMI&n4d4CFR!lhT~9Nvp7S!c0Xj zjR6Nq*j_-Og(C_CVTe~^dzA}&3Mx*P_h$;`y+BpQ;CBiNJZj%0&o0)n)F_COX^^Hm zr@dmQQQ+}sfvbDORmC^d*omKko|Mq*s`Oh77h0*HZLo%)#QX2M7J=5)f73N|YY25K z1RS}#r4_oR{Y$shf9cjHrKzO3ADU|Lg%f}Wy#W7WNB+KVLVH{-I`VU1HTuR;-;`3y zxC*M#+=0Mi4Nu!fiWlkk5(0TmmO57P5=}BF4?>sxN8nwUzIX@-+%FZBq2U#lRghTV zekd@B);r51C6O@+1s>*`bvc^-IR;Pi#1{5%oAF(ZfUTqvs2UMS z+$4dpmgAez>inA<&+!vUHJ73ZCHGyRz#QHMoBy<3f3NV9k3atS)FUkA^GHI=l1VfzYM2D+NlFZjHA0de;Xj>NxS?zbO+37t? ztSCTBXQp;^OvYiEI;SqeC7$PN)vX8CNBe%5 z+bC8kXJ~VX)%k5wnY_w%3`M|obcCYE(uN{m!NNler`!F0r*kdHLA&&_Jzo1v-PUi)14S@&TX;;M-skAK#n>5ju9zi}hLUcox z`o-L8HO;2@ITH%aBS=AjXaJ4>cmpm;$@tcM1(5JOAqAI=`JKi12CD!aGu;s#A8U_~ zkJV~QvajIB7uErl^Vsd$BEuV8eopyLw=A9z%a}dlEPqIHu4_uEXub?rrS+^ioXc0! z@|9CgM^4)VaGP3O+DITNZ(A!)X(^G}l(tqDMpjPWDKNrsHCW0Pm(p*qFl4WwZvy>i z=spr}8_+$TF?XZ;D8}4{?s~@D$#THma$hmOvQ8OO*T!jsu`+&n z4p(70z?Gnj(XrCwfyWm1!33z7f7N7Z~yloz%*WLeXtNwWCqT)b@5bm<{1&t-G4Hzb>9DUw;5x@Vu`)(KL8Le9<) z2E#fW%=J?2O48AeMN$^@wGs5D+hik!Oh`W|%bt{!rPY*XS90YRp7|PcuEV5>HFcy| zgnt=k1fvLeMay7F1l5HXvdHR3AF4x_0}D5~QostL-wdeh08LOMR7{y2#OGjS3%aGX zunJo7am^Trdqb{-oV7S4cD;kvh^R@XL7AAGikHVWdrV@2?O$;pZj;ze9`^ZoUb+e)f?L zXQ!wfbcxQ^ELSVymK;U;VA+(NDp%dnd1j`*Z_XecS3L7&y;?il^pZmj`z zZiRX^-A*fZxnUJT|)xP@kxtqha~rP2sTEp0aHk&(+XC;ETw5Ia_!LC1l0YrcbYMj>d>?cSJ6-8kI~=@X z1dt^xFX>xqy+WGO;>@j`Qvs%Qw67BO%gzd$m11YV)l>+_!#>>8A!osmA64Nhxx?K4 z6Fk~c+Pq(UNc^5F|3?1iPpD&@!Y&ikh=h-{!lr=0*MlL9Lt8t%FF*s4VrFs-LPCu2 z(2C3e8RM7SZWmuwiaY8Uz-9`<5whT%Bk%S#9C>7W zd?|&BKJ<%F6DNtSlv@5!=7ivr@79W)9@2^!>Fqf4?+;Vs%0rItB{H{e;)hGQ63}X0 zT$=aC7GDgHWrx6P;TN0dOuY3|GIzQ7N^8!MS4TyK<7rSzFadCD5~Hvhspjw?SQ3D>FNXh*ubAu^-z}t zEpgPPo7^3)e4mm3RLbDPcVuPy=IrSGkh*~(s1qphfE}Z&{GMbqnF*Gwl0ukFM>mW7m zCjzCG-&CU&*2}aXe_xgz%bhO1>=q(v=1P>uVs(2i;v3R07`xX@RpjViNQ8?c%Wtxg zT)Fuh>&;2&rk-N_@o!A?(!^Vpq6nWSh(imN@NDpyWBZ#@!NJ~2yd*+YL3yDCfY$Yua=7X(Zwne(drTCmsus; z1y0UsW%^R1iCz1J7B3xRuwcSSuQ7;UBsr?4=Y=JVGxy+PBcv60i4qZ#xxHdnq~GAy zaFnJVgIlA9$=#NIsqS&hSNZTgZRcMPDGGn136FJcEsIQT=U=2WkF;V+eFaeQYiMIJbv>e_| zbwbPFJ^9MH^v!@yBgn#jzz+*tHj4Y zM*tPrACQBDrA%ehm1#x~{#@=6p<0msOsUB| zJiW%8wA=%at&rsrgTlH7TlyxuburWk+MlA%P2DDm$pgQ2`YC?9qSaXX88Ccb#a}!D zM~~m&EecP+WcI{24)wrw_%`5LmmJa4+!`MZr`IS}7uynQVC*2)h$6hUYm!krF}2xf zg45yWZaFVo!b$8l1rXn9rCcV@nNM1lTw(;WcLlNxbu!_ugFC>h(VbF9?`6T`r+7y3JF)g;#=*?~-6QX5Bx$aU0p7WLn zx165zOX8gF630+@=cug1$0=IkjCiofkzT}B!Nc~cqD;G5_lwm zM-q4>fkzT}B!Nc~cqD=Uzezw_$T0jahG_y!#q$`z_jvA)yFb7iK!07fF^n8Qe@Q4e z0dfH=0P6rB1NHzi0Q9$FJMsY^14;nr0lxv1NRAl zF@W=sArH6?xC`+3ieZ$1E&w&4KOhV+44?;$155|Z11tnA1FQhN26zYXA>ebsPCyah z5a1->9N-tgb-;aq2ijHwx&YLG{(xryBLVcc47||aNyw=J@Y+tu5c*RCIs!ZZO~|VU zoC8zxnT74n3-9b&Rh|fGEh2i=A0RS$rm$b&Y7MCqPjU5 zs_aaYIeSiys&@uxI`bdObmoniGt=}`%Yp&@pXwiiIunQ){7t4WDCBy8lJSw}FzxA^ zF8Mq_FvEBiF-lLo9hj|KQ4y^`tRsloF?Kp4`MOgqnZ zc*a(qJOJq}xPySa01%6G1M180oCNS;eC0o(ejuJrfPpBlKz;1m$8tF~A6MjA* z8fhN&i5_|*_eXjy2Xf{-r6-5an15RY^d>PzuF2GAbuZ$SIK z@SF{J3gu;JA8(q-rviE+y%Bdez%sxvq%Wa91SlT|=z{cWv>%1%=K#+jeG=_=z_St1 z7ikP$ImvT5U?kFwsE-4Gc?zH-qeWLbctSh!`G7$vKZ5r0a)^97pbyfY;!g5@5fFoP zJ?eYmc_IKG0DG-N`_u5e6fgwob7;RSp7Cylyg$-=aBmBE888azyH5QlBHa%Cw>b6x zEYbr|UIO~W?-ameNWYIewvgp{fJme-qMZ0i19V3EUrzlmLOLAj<4*n0L>lJrwH0^b z*9I7Y^mWuHeI^5Z8KvA!BI@6abSTOXIrX23bZ?|T#+~G41?Z5zjQXVibU-(x-*oDK z3DSd+{@$to*+}<8`fJ?Dp6q}Gr0<|U>7Q(-1NvX;)PD}r(1X_jr~XN%y^#J8cfz*- zqLBUt^-2HZ0g$uTYfk+?kF*Bq(@y=*LK=4M1*ah=d*T3bNZ&+#>Q5@5Qw#ksK>BHv zf9KTy45S}N`g7c=y_W#Pk^T+!NuJmwmt)*|z2nsX3rG(|`n*&BbCG@$>AkpVj^#GUk2 z0Ek8U8tRk&|7rXmbm~6?IBMW;#+~r<0ntd~XzZc!|AtfluJM1yssAkC2Lb;p+`R!S z0P#rQMt#!fKaKzWPW?{>PEX)}fV&%D8DJRFKchbBV;rCh(rcXhe-7zqkUr(qzY*!a zNMo{iX#D?<`lSDV8voxq^*?s}44e7U>`d@VFz=dIA3<+zHW zGF7Izb5+x)WUHo5p=y6EpEfrmvqgT^OjBm|l&mLRtx@TpY38VMG7+gf$e%j%0e9J% z^AP{FD0`TmSd&HgifwX0~ct_B7S(nK`qkn5MCedCsgka~I4}Im-}a&S=qwOYW|k zF1d^HT`+S_#@q#}oSDyMKAxzH=_}2~pwD?DVuLXN92%6INx33eu-soI5+w+x12geJeARySiOhZ863^`_+x} zk$dx(ocz8^-K8I9C+;}eSZ+OGNm-lKp=i^rjOT7oy;pxQbNT2A5}z|!9@C}`R^0i$ zjoeB?2FU4m*8t_gU@j zJ)i!RRsEEC!jHP6ZBCy{8+`1OPlM0&d4@Tkn44Ht(Q!)Uz8#I*AL~`NE#-|hUmsO` z@J-=6g>C`z0{2N<&v+kD*S7xhrNwifY!}at{iNHJ;=%oT7qkzg)PNTU_wf z-5bA>vtg{l}4Rk1Lg}_r9p| zKG=H)o4U<9SLJnb`d9B>n(|%S+vg{)J$UAco@*<&Y~NH>-f8f*w7fRk`)ekAv+J|0 zuZvWc!Yezs?R(Sv;!f$C$GbKjmvORu3`GZnJ5e z@wwJV+g^Ix;q~g{pEIq#e8+Q1^s9=7;~V5JvDYQt@0WPYOi!J9>+p=s+-C=6b^g_3 z)|~lWPu{NyJYgI6-1o{OBY&7N_s6ep-@dx{<*2>8I-PoX%Vd7T>C;PJJ@#H%Np;wm z>pzaZHe>g(8L4|d{J~?#(6~WcW4{k`z6V&bV^W(RaWW3eq`n39W8;s^wug-Sc?BTU;=%BWB))}7J z9a0(hWh3{k@p-QB;^%>1J|EU~r}tYgAOBaIy{CTOb@klh$9_E9`sDEwFAlqS@>YSRhF ztSP#qSsfQ%Y_#Se3BzZOIJ;oZkdek?pPw6d`pop! zKWZB^)lVBf-?MrDJG;-naCGbNTE~uH|Lfm;v%KW%L&2|ZGkmyVyV`wF<=3xzRDJtI z>iML7GtTtt|J?L#g(IhwxpigJR|n4R`}E${+uwWHyR_?%?&IuN-4IkUOyq#BotI4= z^i-*#U!ZJ%R)(%|!IBq08kbW3TI6W?@-r19c9$Gm&R@NA_)OzZ-lKPXny~cq9WVE@ zymQol?MsR6OGYYtoiUu4*SPRskyocASAE{!=a(H%SM6Bq|60DqJ#B5*WqzBPx6a<_ zx8=>lgEpt9FZjhjE6efY(8!bTv>o^D`|A8PKl#49LKU<1qbUnE@BQ!sf9==G8@$?b zzBF^|{_j8jJUzYc-I(ZrBeUktyf<(D=d%WcUme$173AgFdUlw9`x}E|e%$Jre&E@p zc~^onXEols(RcUGk^%3VSNYopuJ>H|$DVgPeQnQgyXgGp-eFg_MmQGT$k`jq8<%w5 z9~sg2{8;zViyESrPOW{Z-{_J9{z+#g%Y9dCcek}$_+B5DoC(|4X8-!I_{QCjz4Fm( zt-gEB-ocAGG9h+Am5!TwXU-wRg#}6JyTA6$TEF4+Pqfbl!!i7QSr?2qg*gdaI@49N^<9(NZQ?uvr_4n+RbyLsZE9!Oimowj7*yD4z@}o9G zEzA2X-MVV%SL(`nO}>X`YGQsF_S}Md<9E!;Jl8mG>Hep^48P3}i@o*b=U#;k@3wz0 zub1-)7Ed;ibgfBMNH`Fo1&!sgblyfY{3#T~Q8*Do--jTyj(hTWJcf8mR%Lw#pYoBm1Ql*HoN$tSBm%>3z_ zjEt|PJ*V4e*33{Hu8QsSp6`fJJq&S+%&SJ;opyfoVK4tt|JpS!{(~RZCait4Ear;_ zkKs2*X?4ZwWzo@3eH)oGR-x_t%ZOp!R&Y^z_FB>mw~*u^d(5d`r+1~SC~7k7{`i?O z7kw7%Pc7Y>$cJAX_oG2I{>3ZP)4qIZe8iuJTYOMZtld{PkuHjw`=HyUC-XC z`R(YJ7tef<^V5WF5x>kjbN|<;W)%MH-sZVWVI%uoRj4lgmh#T~^-~%rU%7PgN&ZB^ zmD=TlKB@a=!pzGJg&ppdA3t{YovhdIZ}v!b1Sz}R>H1vtAH&Mtxo!S@LQ`xlbK?a| z$@M;i^BdZw##~z+>D9O)@bK?<*4S@->ll4=a{9tEsZW1h-PU&Z`|zKJoZa*Lv*)>Y zxBYnhvEP6AIqvCm)!wtJu1?>4dS~_3lX+YFosw;wS{d@)`eVjjLm4z<}mC&jIpc%jY9Gr6O$)W4m( z{Oqk^+_Die+m2ob3JNmbwCrm;cG)jmy5}DI{#;(tkyrDleVAx{#;?;$ZL&|jxW4k@ z=GVVgZTe0&{o~tnHh!{rVcq9(?VtFf{ikz3>wVy}PyO<)ee`5q$cOWj%^M?!?)sqY ziKg{8qMmv0<9mxYy#4y#_bYSCcKq7eWBcBZv^#CzF5BgO_S>zRFBRLSDn@*noXKrr zdsgk96zlu7$4x`wfOV_B+P?h!9!rh?H{VYjx9`fpwR>s&XU^DD$W?fK{M#2h+NZug z^_8a@&Od!_(yZQJg;l+MHDLSx<)h_GGHSk=D|ty$Rk-i;$RDbW*FzTkSd*fhHul}r z&iXsSAMif!D(^ho^y#pQqUV47?4$A-QSxkapSj-Wd5jYHq5^n2-X z{%DtBujIV{sp`O%gTtSb5BcJ$_2VmE_S-Q-)2fnrY3s}|{gd94>R-Qz|DNlm*MA;- z?4=hox7R%L!?@9}932`EvfxGKrOQ7p7(1p-^%X6cVCa~8{Bi;=+t*E z-kdw)g;(XjmoNE0?R^P2l->LAV~A8lMT%-#s1(s6t&CmShb+-FW?>jJ(=4`nrD!ju zMcU9pc`Kn5kxF}7q#`Y(w}rQ&MT)}zKF>45sNV1YUjOTNecyL|pT{-NIp;phea^j| zP^YL=dm#y z^+}~mLmeV69q6xCdM!9dH2#_r-6<`<>Ps@;W=qHYA2Dw12Hok87dVeCP#-5#e z>_Z3F_R|}h{ba@R%AgsEk>wMbM)0;Nel?iI-m*Jxt;M9o_deUkdoH#)WvcS6U(FHc z;b-rD%y?I`wS8T_v0c#MJ)R`N*ms2vAC^>4a=Kv>+w#QgjV)tunnh^KwYd9(gUv1q z`>&5GogVNq%dOVmvcuCwKe?`1w01+5cH?yIZL*`cnUElOkn(<9PK)~%+JcuWXPWxV zj8olw_%(0Jh~5`htA%CE%sW(TdG6xsAH61xcN{#=<;#iB;iD!eX{M}?ES``&tZFBD z{*9(c<1B{@7Zlpc)7g{|y+ui1qh=Vo?o3Jz`1En~t6oDwofpSceo43&_~z~DYMMua zYRUBayyz-BEVyW7QQ6<7HETC7V3^QbDn==#gzP?-aZcsYMTPTbyi>=is=c*#y9kGM zjK7nvxq9gtaqYGKx2rU^9w|O%Xs0>Ed~|qvtb~2_k%mI_;`Xe&!aX-cH%EOu{p4^_sS? zEfmQs;tInPpZ_(m*?(c1Z@pvkT(6HKl~;0Y=yMA+m|h@<}dOA2OOjZuhg&)oZJm)4lRW@%lx|OQkXjqsy|sf zIqGTtq2~|lhwm(}TB~~6X+liSvU6|pja0UrAC@@mj0Q6#oBZi>!4ik~VoLSU#}EDp zd-n2e(}O(sxcjTF4df;-V1^#K&=%Mdy`FXB-5>>jSB5gTsT ziykhfygU%JL^xaV<-Pez5A4g;R<)(~$qSv9ccj%SH<|mS@J7Y4q87`^kGOx?*7!@F zR;b-QdT)-eL6yld_iAkB^-A~H^JR4Hk*AvS7M#31`tI2syYkPa_a8^fPoec%+xu$2 zs@$Bu^FCrgx*%gIHJ4>b&ziL)H zb@>mbM5^34&>w4=nxuLpp;kRvXL*QPVehaxMu|-B&esfoh0svdy{*{;HD&><6lM=r2bz9;7lHa?}c;>hhDOZB^bx2bl(jfYud|4>QE`Otrx&6?f^ z%ZDp`3g#%6w0{ls4v1w1l@8&WE#ZYKziEG<8@}%Tgy$NMqy2)O&AZi7P{U6sZfqHR zrr3IMcEZDoii36g?`?}Qe^heWwdRw@y~3c68AaYf7J16g9CFR(KT@0U_(UIFCo{!) z%@n2R#Wjl?FC7W0@iOHlG}>{Bqe|%~Hx{wuhU)Xn(>#J-nBD3Zv7@lBz2Yd6c7azf zgGpD;&MMA5*LU26Q+`|hPlhO!RCXNBE4yntu_|T5!s@JHdzdBJ?--v9Bq0a)lESw2 z*{iCY`BvS`LZ}k7su$)xaL4(1D_`VC4_cZtA*G&3y#i56G+}GybwN2~uBC=Agyun2&&U1$1$&$lC2K2JU+FuH> zZJyQM<40Eooj=KrHoi=smmd(UOETn7*nX;S@$^gm688J`s?jwfHNuA!3>%_rN||gH zxN?FK^%9eNMd^=-CffymhgZ8&lCE8^THpI#NugtFW8M0{nyN~V<+=9D%ZhPMi8D%A zldN}lUdaG=YSn0s$R@#kdLw14aaN_*^t{)D^pf)`d&R{kt<+8S-GgQ7WE-_U$uuk7 z{W>Bbpo7cRdqAHvvV8GMW3TOLh2lfDOYs=@ToSO{UOS=SAAfN zO^K_~nkSxmSGQ}eSJ?Elkz6($s<1 z8n_{%mU{DSVSwksG_Ka#O%@f2N$NYRE0f)>-OAK_SKC_trL}aMFtE};H0t#*OV7M& zU)T6lZS?^@{k2998DlPZJZCgz534Qo@G@8c@Ge!=cPTSxDkNqd)LEbUV#?uLBMpn% z79Sr|d*YNuSwrf8IHT37x!fBLV~&kW6^%RCUEl0}Q9l zrxtzkaM*M*id8LG9Mfp}=Jn_H_O_Dq56cpl-6~C+6Sv;1b$6mlVNQ`}b7OWZf40Llla7-@u_JBFRx_fw^gUMRD8ZQ^KM<+ z(%8(;KUxe=wU zl`iGAulu*a0>Jai_)RC02FyFXX>{LxX@cwVg_L!QqXX=QnsZf5wJa`=Q&(RV1ddJl{?tsgiwsnBCYVO^qfT2Mzq_QPO( zoP|YVZj7L8OaZ4<>zmbUyIBj{BBGzNLi4V~_%$_9Ri}S-Fb~gH&C1z7Aoas~i(6B+ zjcLQb#VC}7OjnG5z$4ie`>LoOd8%Am@>XfnvfN&-+N;zDy65-5H*2%n&Q%rtyc%m& zhqf?#KiWp^lVa4TufP2qJ%fXSVJ~mkk2nx9eE8yF<8>$XJvQ_`eUHx3nvx-$ye|W$ zeY!sIYjTHXjzNZo$SF~4w%?#Z?IxqO^An;6N56D7ceZ;zWAntySvR&O%^o=`n}o#xh$p*k}6QKlsxgUoxAjlc`|peLmh~$HSA8?}z?9MN{F~n9x^S z^fNaf9@}WzZeXjd%Ok3z}AJ+?9Ho<=U8 zf7LuGS}E=3(yKSyxJAax-{FY9CML!d$NsXe;<*R<7-3f z4I77fehhZ=nnDWlmK4ohaHc|)`t8C-kC}fKx-A>@!o4`=q2G%cbqmSV8ejJN)jo&T zl^-8Nrm-X-0-hX@4Z2jLH|8s2v^FRE!SAMO1CpV?TOF6T3 zsK&(akD_&3QZ8G4^Usoe#_AnDmBcOo5I@J@qn)>AlPYEVm(mFbzi!&m+U(kKyDBnZ z@uRcTy&wNrr~YK_Uo)zgd?~6qnY!WGmyzX9r_X;?8QSkrMJBuPLF1#-4~=qHlm#AA zx|eom%Kg`Zkn%A@1FkG;*SNfQ``D|G4P&n9SUQ$?FSvDS$L#MH@2@;?L9=d0sq={= zH#c8Ny?$fq)f;^^Ki;xY5#L@NW_|a<{h@bA1xAS{k}S7`{fO8)!OD1B{kS1X8FTmU zWO^RmG3xJ6+n>i?NiDkeEhXmo-M!`mllHYn?N83*kJ%N`Yv3N!!g;$t-}__Jx@jgG z=hIXY)M^)PzPo|5K6c-`4OU;DtW$Dax%TSlytv)WwJUwp&&LkWe!b%9<9qSjZh5Y9 z&tR+>tTS`tf)v<3&YL;HO9(3*Safz_E_3b-+gB{103yd^%v@8yt`MyS{ zEM@53y0HgDo2|01l8^O{w!e`!`NUkEgzF3Y%nHiOU4xZPax>@bd`IiI%J;*IIj?rE zVXa`|48kd`}yDx6Ta2i-Ttrl2Zi4aId=0%|KjB;waS+k$1h3~l)u&+e5NTg-`7d2X7;_3 zH>)@?vCo&5PEx-V;SgG?*8hMgC-@p&>Du_J{4^&Ue)5+e_dB*Uu-#%7Jf5zrUNF}A zbT7+shq4>5>h)WDaK)4Cp)-Oimrp2lE$qZe7Hh|E^Lc;DX0d0# zZz`t4osZOHe7t+MeQV7-JLCLyo_hudy&EeaE&1S3c*AK@^^=xZ6Nc>@uTYD$z4zm; zwOlj{9vrp4zwl+i^iuy?w=5UW4$J1cWc{oSYem}A8?}{IT7D~8}lzXWEq#YDO?Dlu+zULEz)x}oDmg}nzZxP=#QVALx=SGQW>-OP2jx* zTJ`C-C8`M?(RuaLgE70RGFSjuTVwjS31h+LQ57xp-61JTDzE@>Ug6@S6CtI%8 z_qE->amWYt!2M1fmDzjTi{rK_{emJFC$s~wSPW_(9; zz2i|ui`TY=aVyAwJx>f@=-)ihvEH}squ1PI?n>p659ZQsu5P&VXUz1>yA{hR=DbS= z0n5VIoxFUrp~81q1;2KGPM^<%hQFMDBx=RLQcmU32=&~8lKuV)F;{2o&1{byqqXam zS(;hj1U+}&YPR;_xsR8WOypNj+i$Y$Y2Pi(${@V8>v4oe#h^=E_oGm;-FN zQd$Xn_~p`-0c=R+QV;s1wSntdz4e!y0@Z*t{50GI&E6TntR*|YvE?1PV$ zg}S>v=MaELK%bs-2>)l!AuusFAz#1}ileB(ES4BHZ7N~Jd4q0nH+m2Y;;dbrtv#Kl zOg3h6xfrIaNM(tg=pwN#vFplKnzQp%1LRnsV9yexqYD(VSjY*Kh*=_lE+Tb1=YY9L zbIP4@fKV6cl1@Fkh@l8+8CV3u;ml`BxYBk(sWk|; zfY{4s8%e>IVKyu-OU%N_h9wZS;)FwEp>k}XfFlnWwf$E(6-dkrf)cNgJXj(L5GELQ zxxp^BF^G08NcpQGRO)Y?|cZjYbWgBL|b^HA8ucfUE>;ahHrRARl0q zpqxazP_DH6 zl)7pKDQ&IzKr3y$G(JNDEt*B5vUtoc%3sIeD2yAO!-I{M0Vo|cN+f25IkSLF3`nnr zS+RmRJZXvqpgP+d->!(u0PLM3x$#AEoTNheVjFIdBNXDviUb~1kRn6^pt+b$!E6x* zH##CGW`@^9nzXY*hCmH2i!Nf>SY!QAYH;5wI0JnMv~Z#bL5Y9|>}AC?@l*sxxZDV{ z;fP@GBO|!8NbFCNr;yGQp>sb{AoQ6EZY)-4R~lzjZ{V~Dwni2xAc-ad6s(ZsC165U z8UC##YA`=S7Lsw92qVeVh86_Dj#-uv7b4(|7uD87W)C(?<<>PY1XUiJjqoBuS@XGQ zSEdUI1zaV_%4nlFt5A>_7N!nc!$F75|%J(0i7#hArMx9V75F-m^97X z?r9)J3xYs}yYk#d5NrGpmo!%yf)I)e5DHe_6~?nsFpA9rikqyOn6d&`TrSr(lEnb4 zNWrKMwoWo355}zdUOWyQ&f>r>Wl$5?1!~PF&^hvm9m}A>07D13WNG+ZmXrp)6v3!5 zf`R6*NU-^pA&A1TH^?VK8%f3?m%{S|_+a%Ev?KkCE8OJYtH9A`yemW5X>{3N!&U zoEMHYlZ3dHz_6nz6&S2s5?c(k!Lmtg5r{IDPGUnR#lv=!*bLMqVAwtqo6W^Je;nn( zW33fv9DX32)g!SZz|cXez6d{Cj5-YL2??r42kQ-i;fe$dQ0 zANse8P(ud3E!q}J77<=73Zv1&0%;71kQPRdgj0v4C7@Vh>`dE z>Jb(Qd!Lb2zYVGJ#Mn*};U33mbVeAH%fm{*7bXfK3c+}Q5vmjhwh*cy!YrU15F98n z3`>PL5DSS20$^`p4>X1qLT+Y=L?ouMnH;F`N}|TF^*RjS2ET>~vx(4Nq68WoPNWgL z$UmamGgtyKEg0{-g;SvKgj6tW;PddR<}1*oz9-NMP;?=m%LYk1fh-7hUm1$rp(dkUa+1{9-o$rAH?V2S-GA>1aZ`wraa@OvZ;W6}quzYH*x9=ZQrdTq#e z4AS)lcQ3d@!QBWhoQ}h|;A(*D39b&fv%!VaT-Zc#2ZK8ZToSlS;0^-!+eJ&P4t~$H zEzxD;_)`Yq2Cc5}1qk;VVhOW4;>m_^j9mO)2%nY;K3g0CgIa%Q4nxQn zLCBvN4vPGxa|h|f#UG7Vpa+Fs`+t%rjF>Oc0{M{`Q+6tz#=r+y(A8mVm@vo%G3d=` zLOvflm{1tyVR%?1U@>4UL}T;0aE6HnBjsR>D1jylhAtY$S(r7|(Z-2p=j!4~mHOR0 zTO?iCIwk}@3!H0t}m{alA%gDu6z)`N<=sAB;>VWiEttjS=o5yCd#9LG^i1f^*51#AD>3RT%09j(;2LBj&5iQFT&@x-KzTY z$o+T4iA17h@Ply;@Lz6J|GTs(5lU)mVgiTF;M^aOXeL=-)Cb_kFxG}sXk;{iiUt>7;BP& z#1etDY-7}R8$NHWn2gRFlHH=PMtw3>io1UL_H&_ASS;E`Q6K*EdW2)Z{AFCN3BE$BpuGJMcB(w~e z!343$=ol^}#bsTFLs|ir7$iJMG?C1}@mYZ}mTAFnBt8PyBV5+qqG+xH&^nml+rgPy zkPTR%qJbTiE`+#6L91z0PM81|%kZT=B!e_Nn8e|W0^mm`O2`iq(!N9=fNr)bNNA| zP#q(H-ajSBOs1NUVU3DxV@1ZRK9jr<&m5pH0;cr=y_Im8xcw5bzy`CT$l+*>3}1ER z(xY$*lQIIgsh~KR$>h#5Vbu#S5sH&9OWM~H!!)RqkswNHk!Q@90X+;@WIQL)e^H?` zVX%lk$_~JqQdL9i0x*Km_no94Hc1~m4m0(g$&e10mtQCWbm(dmS_D9if-Am3 z(V2W{Ls?-+--GGlFdXSRF-~Wob!{?!v|H2%Iyu}rhtV0q93HwWKxP66I2snCK{DWT zm~ZP#v-a}vuyygIp~X=d%*oZ7;zV<%SUWhn*n(Wa%CgiB$)uPKEQH|_)3^^|xR9EF zt^GbR^ip%tPjFvb9VCR6btR2Sd<2>?Fulo@J3kGbIs-(X*%cu*dc1glSQ_gCk*7KTM&WTERuA8VLk5@falmV9^fQdp^K``*oPk35n#2_v1CuWt>Z&Oe<8x0agVr1V&k%Rak`X2b58Nf&; z3?dO2@pNV<6l@-Z!7>J-RopK8kQ%BU3q}%D9(sEVVYons??izh%`muuM7XRIau3Rm#$jMmL8Rbj8N^U}7);A?zTtDI z&LpU0SZ(KWqrkuteOh3YR6#@-(Z|cnDqcrAe1PGELmj(hTiUmx)o(ufM2*WN7Q>W) z{UmC^UVA-1`f-pHwP_;!6fmePJZY-9)9r<} zQM`Hc2CJ^F#wsc*uyg0mVe#?tn3a_k2J5|;si`S8e*Ab$M@I+K(9nSEo_oVT3h2Pn z@vjtc4&Zz1!sLkp_jOxDUs*U$oVfKX!)OR_pcLmSF#CgXlQR=Zoi zR!dE287jTi(%#Yz{_FulvGT#|fno`N3xwtC*$j=rv{31QS^9_64*Vy8uqYP%+R(&b z@li}s=?I^LhYx{I1F-tFLJ07Ie_pHmFYOz|Vru9^D^dHjGcxe@G2xAA?EpB61>8F{ zu;1asX(+|&fYTN*nc@^C=u9#F86O!n0Oy%j2Jj5{iSsgpJP0rnaP%JZQ~SOnQ6OZ* z56KApaM}3|q9cQYyjXevL{B7)8Ry$&G6|mZGXH5@VhOp0c6VMQ)tBj9Gl90|2=(_SX1Et_;$6fr5e7Lj{GSnrTa{AYa zPg*L0i;!07yGu5^IC z@087M`3MaJ8AP^4ich9>2p_Tm|AY^hd|!9dg?KBw^yMH@ka}2Il-r2=?Qu-lg;1aLv++ebi`%9i+@@tP#u1U4{V4`*F7j2nz$`% zwd^i0zvgSkHJ;FQE#$}aQ9C>Jo{-O9^AQ?P=z$+-Kd65W^UT-Nh;`DNJT_|OIVSH8}A{;SkoiT=0eQ-Bk>h+F8f z#4b1xBf+H#xj2N1u`n?v!2}S3aUBd5o#9?{GW_T;dKBS47JOsjU~m9L{&D*Q?neI# zyISiM(V5?sm?w<3Id~3laHAo=5Mx6A2nccEj|?sVC@-rf~m@h}EgCt+^NK)+zP3tj;TjOfwPfkxl* z4++XR2xS1BrD;&?&-}r;fc>ZV)31|1sHI%^G4S>afZ}4PFG2n8oS{PuM;b%WO#+-^ z!RaOWC${$rxASnSGY|fwL*vgh!4snPuEVB7dkMu`3FpA%vDyF@p|nPbXHh?4fU`#m z`=JaSpjimkj^l{ys=~jdJ5kSlcs)P-V?9$aE4g|)c9T?jdi?iw298=7^rB z1)1(5O-DAPOEVE~!=W@5#K^Rg0i~i;K>#J=KKKs}K$Wg?5M}-eCmEMRF)p8|3=zz3 zk;Uk;4pvYmvJ*dBjelni{@DT`F2#Z@?!%gauWvz)!T>wcB`&x$uvbE8MG!8}B7jT* z{=+xzpP=v`xVk;?zaDTMX$IMQhs)+lpsBo-l$X7qameZ8ztDxBi>&&qgE33dmL`R2|BMp;GU<40zDMy gp+FA>dMMCCfgTFSUHwl%43ZEC7*ZR@wSiT@Iff)GJt8vjMnn8YM}(;;gcl2AoMGwq_X&Fa``CR>X%SDj10!)cI4;9r;vPFT22a}G1KeLao{@+e8++Rhd9vCBn(?)u0JdcuDx=($QYgX+eoKWeRpQA?3d(zIy-3^!W@`QriLm`|RxuFs{9p0~0@g5JCt46Lz zx}ixN(eQV{lFJ{fvU8qb^&M|(xPMTt$DqEGmv^bfDn|3W(S;hhj%xp17;;_BLm~6i zMEQw={Dda^f|hLgqW5BWR!*)wWfN*AT-uA4O^Kpe9MtA8V;j!p@U?_YH0R`GsbaW# z#@nk(jy_An4=6iQK`U>sQM^qRZK`NR4P}I?svrq;?(X^umAt(!A=IUjP#L0M{~8M4Zf9A@>5>@X)C5=T-!8a}URQpvV~2MJ9D3!XTvae1=0 z#%j)z^UXQAmWJm+wP&)=(eB|~N^us)$&PTz(Jnh8?2_ZC?1=JA=6Q@(a&%Co&l4`W zr2JE9%Gw7}wIipMfukc(IK;&#qtIi*SU}7r z)Nv^b(>Ul*o-j_U5HqlXe`La?MA1rejKWy5l$JS=Uu$?8MjVb27s!X$czoMp?2vNZ zMLSgFrB%()oHwL~eHfJ7(2lF**eEv~We;oRh7R_yRc<(ihb;M*6squL7(b0m372PE z;|_Y2TO`K@FShJz-5!YjeAWMT8v75R_U5^<}mWDnMYB`W|ySHhA zFWjh#YbECn%YjUr@d$FGjmInprbipo?lT^@9GG>R5rWuYeAAa|xLtBI`W$;)Qg*HK z_H2;t*pn_fHc^)HD$=rZ6WCLmY@RPzk;zwrn=a*ND9OvCN@}DDZ1cLeaK6;wJ(6R+ z?AYj?%n3=7W4r9wuEs0Xs0(e~!BT4EM5^9uZD>M8OZ|R~PIYXNom+gScCcQK+Jd>s=mB`5Fy0Rl18R z9hhZCvAQf`wkkHdl(c_QX;ed#Mu)d%D{w?xU=ITk>pdZA_Er$AXQ_^D ziH>cSwM)KM)vm{IA=0DU)dWR$VJOumjDV523`xs@g4-}`*}22m9tS-Ko1Ht9MJQox zwKDR@uLa>;AgAxAd=%_hV@RGan3C@ogHj&LnoVugNMengU*Gi}=tyYlI)L%O z-pz)wyCQ$#FJvaVz^eu=d%X6_d@;km#$0;Bd=J zTMv@u7R+cVl!0R(^SlUrk$tdopzt={MafB> z?^0&$5unWjvuaV5pNtX^%%T6ygd^AIsC6Bfh}>K(S23sy<;SS9^e)86n;`Q~O1Pwo zwd%xwZfv41CIcI@`46RezYbf~i89{CNSF^kXe5xeYATR@M1I0s3zWw*X|AtD3Uhtp zKQ{o8hs*If>Y>Fj+aL{8SSSrEehg^x^88JNIJ|#s{@)ObTu_C&i}67NtkyR0D<|Pn zuKfBwdXmI-lDJ+H8zpgrByN(#jcVk!Dm#p7&227zC5r2WQLO1Y zRz6NE?*zBHMziwu!tF2utjc;;B~`1^N>#3ci;3a}9mNJ#9;20SqVjVn=S^&YTU;I$ z);0(F(hG%yRg=vggD-o_98esEHdA0l*78EXoB|2}i?T84kPyOTB*KJb!nEj#-d)80eA z&aSAe9C;F%gIu|n-?*IA`ZQlpclb-E`7(Nv96flPuRZqRF++Ro#p7h{u^W%E+T%s_ zbi$>i_a%p7sjtWUP89ourxV2<;i*KiR}d4$ZlN+!yeKSz_!slhXr!K$dy~G-6Q=N1 zLZN2&@n(A1%*;o2ABNxw*-YkazXFLo0Yjl?_qx=FP_@U~8sR-;eN6mRc64thLm@jZ zZl;MTJCx02{JZ9-H z31gJbhfb91d{DrL!->v|mikc0EYaC(si(JKiOz0I{bk~dv&T~Z4?HBDwbXyYGQhF- zD5Kd@{|?LOwA7Q&C5z^y&n@+@;c@a%Oa05Z&&KpiC0(MuL|&eFXAa*E60xB`rnkus zRa-|-t#F~|b*(`Qeh13=i$>&jobvSRkJV3ZOZ3RRL$21V||VtuB_+kx?D&C@tU+CBDp)E zDXBw9N_6xI4<$N!ga;BGy~4PpZXuTE+G(VHP_#BBn2ptpfy%uNV4V=8o(8RQo8Rbd zSDAND8>sA@To0Bc_;28nJc-yUD^giZGIsrNJRA%TWU`6U z_?LygC!!x*eW{ZTP#N+|WsY=@AcA+K5WVLs^GRlR5RB z9OcVPstRSSMXt9s#y^;M5i#!}mt3L4mZIh|A^k^@4lQaS_WVHw+Amya*2Es5>d&*U*P6DA~vQ4A*ki$Zk~ehcvR%{yHYq ziR;ew)dy3f9JJb_wenc`R!|zKILXfD1T6$iGZUH?C_C};n<;1AMbmZU!fmo*l?&Gz z^F%ffx6?o!X2>SpQhxz*NymL1-G$0@AunCHJ>AmqG14z7uw~?hKZgYEFwHAL`BRqN z4y#-f0fW2|R5V|(!WZK47CtaxPorusA2c9iLDzc1!1nAe-|I9&cnmyfuZ4)Ak&v9+>xB)X6)iK1)^GS%8%gH0 zJ;xf`aouJf@d`|}8Z6_~Aet(fXjNyscs#(d#x_1&ijogKQ2=ktU2aon7efsBVf-1 zI@~~lmK!$WDmfc{89~?#TXV1>h8wyGnO@~C-^8GV6Vga&guX4VskJnGhN8MhzMnI3 zxG-$afmDc0d9M%5?dm{5(m~7Gzv3=unv;$QL*&zOIi|SdQY29umu8Zkjd5va`D@7# zx7x(KGi?S#aFV0mQvVY)CAU>Cnvc=IXDC<2dgMa2?C1@WpQq*LBs3wQqAoK*vFklS z$}4}@x02W}3y&c`7jJ^Ku0w;pp?$gj+BXu`?AS5ew2*HXvGo=3OtNFNDW@&V6jVhb&k|gOtAq zHjL1R<^AmP6@CTCO*lMT`-7_PgU^QqkfdWc@c3v@S?U>&0WqT;SS66&Z#x8iRjO04 z_jBcVEFMdiJlcf<^&}5{0prleWTbPHS$dr?529Rq)RWl!)BOzaC!>PTu@6$%4sn8_ zf&~dp`f3@2W`_@2IkxfXR@FW6R*q8^AU81hhHVz;OT!*qSqyL?uwl|kj;(EuT3XTD z9QD5%gD@20C1Rs`hUcEnE*oDbD3`6x;alyAs2 ztqf&yx5tRbN<88wP>zpAL}4ll5rJ}Y_mQJZ-3m!My1^{W<@r>75)&d8Q;z{F;AuI# zPe$ygC+=9BqtC<5k+XZ_vb&AVbAA?g@OZBaPGcNAoi;7Suq*@?U8(k%tYEMD=r#5- z-O9|BYs{8yZ~ zJQ;Y!k)9xQUTLC6T7Q`5z^E^61`K>_5?eZCbyIh(%~ zN4FB*&5{~OnPtZ+e%(`~6b;2lN;~J%^_LW|P|v{1(1v(E#-4>gUqdBwv3-fgjM+)Z z+O!<-OpT02Cs+#`?m%sSh|CbF4TLD^%WL(7sL8hrum{nu9OZG)PK%SJ;UF-`lSsp; z#OFB5^F)(+nk5@n0STY1_9+Ss^=$5_@i=Bbg?2*y=*mcce)Rt%Yf_2rF9ZI#7*>T`Ka5PxwK)B|>_ zR*htm#ES?ql&z8)0=&9d-$bD-+UMr%2W_?3li8Tiy|4Ln7Bw|2`-he~@H7p5~Ac2R~{k<^ZaXLyZGp zihItt90>Yt4{>N|`xtSN=QGJtol>l~CvepC4yEmL`H zs~sq#>|-pOJZD}8vrzO-Llb;xt1uj|P=p~KF5SgHtC!SHA#9*E%U2&`LngoO`=puy zdx+d<l!-Yk?z|?+!drm_vE~VT_+=`#ZlpQj+Dllnq#>j)N;)v)B?M=wDA$~vZ zV#ShP%r68sp2bcsKE;$1TdJd|54ya)LE;@+ghdapoJSQ#dO#LdAhT*a7{8UpnY58@ zPb9hqwpH%J=27nTrN-D0URj57JjS||T0Fr4s$FFoIbRrtyeK^!Q>dX>ZbDwPOPRxx zRPNFl%0}x?mFr{Tp~(UBJuyMe&E-j0Jn1$>OL`BXi6)Q1ALKFSGTV-PwwCC&eQiuM zaL_I*;b=mWa(OZkw$e<~J%)KOA$1pHX;K7U@N7XBZ2GfFj}7?QTq%Ejn`6VK8q71i zG2E~g51_{9Tsu40V}k3oKfM{&0s^tZ3MI7(lf1pbVzkGa%g3YYbrChJ0TO!4<)csn zV(l0N8ZtGV(gFGGr`0b9wwT;z-h^6+d+90KTB9;;VK>u$i2|>1;gFO^3%IiMN$u+5 zPpv|ZhK39_YMx>>X zXf02!q)r1Hm9D$IMuk2i#r!?eyC^%BC*q)s&F1-pN?fN#MxY-MGYZ6{5;0nx>*&$* zcxrQvm>9yKa|?U$^;#`H0@>H(V`V8OAM~qES^&Nt` zIrx5-hI*v&0(cU=kh0W2hE(m8S_le>N08t{g_Ph(v|Q`*~rf^^&@$69SD zEK(;qYLys83)Tq_lVr6*PmoJ~TRGfC+u(4ry^$_{9-&99&)z?la5&f$z4MRYC*M$r zwd2N+-st|2-pLz6dM9}nxT9Sdi`w<(^7OR;UO)8*-EtAtz)lf0Hn7PRPw%F*h_1Nq zQ0k-&U?(lz*d{2@OD0#EKBt-0p@A#Iv3@<_Ph(CIqV$J^5w4tX>`^pz5r-=&ACtRG zA!7`ZFz4jx+YRPX(EdJFZrA}b0}Izk=6HT{-$8qR(qXpeU+^GJy-i5cUKXK2lNyg} zxEilKjiS6fHX)!*P4@Adt*a__I)0R5K z{Mlwe+fdL#m&4X}*4Kd5-Txq%hG<+kXeCnsCrr2{hfimm1z7{C_U}jNjhyx-J#fP2 z|8x{nu8pGV|6~;A>qoKhe=&+2&%5$({Qfx_55 z`7YWEKn=Q+E<@sf!vTl19DWnr5($p6;-8>zx9eS~o|T=O+m;#=vld*CR>j1kify1R zr*}2-_<$)tKweyK7QL|e9}Kxx>08Y>xEP_Ga3EzB9%4Q;PMgt-j<=PVPG+R1K&5NN zn;Bx=NE2z>!3bu+@lr*e(r=H?==Qk(RSUDnurJJRbi?q0kLWeE-yG-2ih2eWHW0$r z_(Q4VVz}B>F)>{J6T+?VxmJKfXb^*?VxYbw?96zp%pOm+j`s0z+<_c3`KU4#nRTrs zO=bk=QIIbdDP<)J`VHY0EUa+WckaMF;a%_R6c97!dcq{JUdmqw4_?Y|l=3$f2)ATq z@muS#)zZ9fbk;Qo=A7SxJcM)=3$RisMF>U3b;$I;!>?s!n58uGq?SAclQ)P|E>QVS zPpq~VLkZSVaah)YV&p6Ld_qy{f5KeD!%_Q9@hN`nCmzkr}k?b*f`sV$C`n6Iv^oeza<| zw=GO=YrYWVnvL^;z)wR0ah7^o6265Vu}&826W;B2P!Wrpp8z|F_Xz9Qf_Z^aZuP|Q zb1^{Pii^%hZ*Pcp7A-MOi)%MVO8grovR!vB^KBzscl* zP0YEBtJU5{aE5?uz}C0mO1DH!znN1z|AXL+^R zrPRQy$xF=N1k3^sxo*O13)+z!!*m6KTDUsGj~53ld+We#tH;i-M+On4>w@CKdtGvG zXijR2@#=eNG2V)_wzY^wgskr3Jcz4*0-pj4<%SEmX5pj~-qx_=U%80NCmi;hWV~5xafjKOUyw`6;8~qLb8Te!LI!EkxnNfAD#R(+jsSoi@*H`iwmA`*PwiY{FFzEW; zQT@i_1@bZzf{trt9~-b^t;V4ISYl8IC<{dK1F!!7lB0q1K6m4HTz$^H)w4a$a_G}PWc z)ZG(M8(Pz85yF}tjvIoc+445_NTE_%K`iyJf~PD}D$E=Ic@V;n@d%aDCK2M&>M)V< zq(#~K8wn;fB*?+{H8NR0{Sk3m4Xc3pd! zks4#x0-o9MboZf#-uLOQb7Tvbnd`oO0Fy>&bDA7KKTjCK z9Qa)256H>O%T>1Go_)v^&nX?)AmUi++sZ2!nWe3d1$N9kiactu86^ZeMQx~Z$4lez zp2pj1#&o*97N>)6Le;wo_`NGy;Im>QA_=4P|G(CsKx*54#I6ZL{|!` zaxeMqNdGXCRIr1BZ8tfEFCv+6sw$I%);_VT`hHUVg1YKZyYAbAU60{gT)u z)wHUS3t{Z#NwK)%#J$hagqr8u934Glp$U$oOE^*k7zkEU(~7JzWTArVUa%G=J=aQ> zY9-<$S_uwA0UK*mu9cL;Qwd)B_O&_IBI1E%mb!j3?P%h=~ zkn;CQ`FH`l#l-55Lw&Z#(PsFs#@sl=>F|*{=^~<)@|3lx{5nnhZ07RrCF_#KU+^M$ z+JOt+s(M^`D6+mrAw%WwP!O#t@*=YFV(OSyOMZ)0qW6bNdw(U5i?4-dW$~?AExp(A zjp!9`y?3zOceLEiq*ut?a$P1VhbN4ummOLB`hi%0o2)?=zZSWCEiO2}#74z7@H|D0 zviM4^2@7~CF))L!&352YIuWU$;JX%042LiE5;3HqiQ?PBAk1o7;seHr(#c3e&4^c8 z{QJ&;nziULgZ&iw&4IDdq6R!otsJEg(~x#Vo2e^tSm3~dy&i`J8X&MNo@T2y5%&@e zuTrQ}KBjz$hU9629OVJ58vdfhDO4_weyIoa$_UsSRu7YhbR(fY;8NmxFeLvJ_GJ(q zu%S)`n_U>~3E?NBH;myTvaxDlew%X-gd@)%l6yb_ia0yMRgAFwq10uOZFY_df==7KG4(q(aw z=j$#nN)lJwq}5SkSX-LSo`txT>dDu6%*)!&WV{P`ip&+Ww?wYrjQuDwCY^D$tfs?U3+Piup z4t$7r%9$}((N12V?cDNMHJ3=k+q|@iVu-!#*Jy$xX=$dB9!w0J3PoRkwA zS74RXVv{avN-(mTj3g?mflIe$uAT6|Iq48$^{l;0;$4@Zb2uEiy% zO*E#(yOgP@D&rhMYj|9TFdD^KC?p*BYB;0?@xYJ1{G$zN@p5LooD-XLL^Kg@Idfdn zk?LS$yOcQ&2h_9^t#Y>6doex>ryq~ylBmjh37jVSpv)g=K@G70g

!#_T9hZq=TKaS9LgPs99=TLSy4C?z|beWi&9`{3n88E z`YW7jInCxRv66Rh@}|eDm(t7zOTC0r%dVE@3ugJv<}S0b*>gw2iM-37H=Gcklv`pf z$!39$1hv_twmgS0zS?fqH@^m~i^ zX5EJ>no8+rIn&0vmQuq1&5)Z$T{mPfmqu*@3f9OZct|Wd6>kjr*3l23LRw%Kz<`K< z)9cKe^on=}6P3xk(8{%ikU7#wu+%?`XF11?1JX&yr0gCT3`dW*riT-XFvy0(!bCX* zJ2HGSDBLHfM94Fu;ttB$J;q~6$CsJn+Wkh^*)v;qcEG?G+h;4vZ8T*9Ou@0Y9ty5P z3_G*}$Q~!5?sfDU9DVnu*}bc=w`@2PuJNe^wlX9nK$}^p(e&bvC{8%cG{a;_k(@n@ zFrsLbJUhDKgp_?u7=|Gv&9+sC%FbhrreSKD9g1lNZAs4Fs#cv_Bd?J&7UVEmC=`uq z_d0tG&OQoXOwouLz_Dn~%P1QZpv)SeY}mh}EM_2OvHvk;GUoTqDcd}lv1tHhG2j=R zP)(LW!ps4)d>K`~`DE&!<@k0l3dhxHmZv|5DgBQyFq$k#f~u)Ygq&kdIGoaaHFV5z zEZE4D(&O0~O{Fsm_?>1@N;5hozH2zxLz7vzGM9ksTl%*Jtc=_;mcxXC`S{RkY)&}5 zX6Y9&o|wDFW@8I5OH<=DLjhczcXc?-dm)iXheVYtBLW0y#;+{|BgP^S9&D+{x1G$^ ztTu7NNcnB=hl5V~cA`FBp#d@nxKhC*v%y z!U^}M3!~G8;pyTyux}#R*A{|_$N5LMg}}ga{z&G6SsJOGP&)=NjwYlv^$zA4@+3*W6`IYGy)=AR*q1*0f^TfsxD&j2rAU zrUT}+rM?VPA5$IQCa{T)4^;w>H0Le#kD|3cOTMU!Q}R;WQAx&j;4~Q$+7NnjmZ(!%)A=2m~7W zZ=&F)9uxNsN_dMTzV@S0ZJk>FnrQ0_K-^YtC84Jjdv3%RK;}yi zB7AZ?aP)auCYqjJL^1x<89;X9)i=%dS)6;{p6_F#9*aMWgU!Azx1bR2se`Se@qiE zlP{68JJL&}xlH@uz+RNoij8Rt6QOs{459s-e2s{%1)9dm&^TAW#?8YEhGoTV9Icab z+CpC{sa=pVa$v{02U$-Y-wGs}4FbbLs^-_#TnvJ6sEofET57D-=dT zUaypHa!Cs&ORJNWPR3p{Ty?nV2tK8UOU_O?-3s4aHX}wXp$R;8*wo$dYoM@lS_a#z z%p7x4Hm&o=Yf@6EaG*&;&4AnaZ7ROJwZ2JvVY9W{(jZ~Dh@Uhs?ZyPi?uMsg+0|}5 z!urDc4K;%Yh)z#_YCHxS&^J}{KdSZ5f1`hAhh^70%uXaU8IQod208K(OtX`UWw-O3 zu^E$Yz>G5x>G??8p?rZ6##HZw`9)2q&7;FxnCVZ??U zJI%eekO;z(IZsY=-9!Q%)3DXt{aWa^49-7=Uq2&{vR-N*Z2D$a2`ndZQZwX zek(0;0iL>ur{k$J>=Vc?5^pn88G9r9*=q-Tb$>hiM#2KzVC*F>eEbT~WLq}fqTbzW>VXqKQW33~Bhm5Xo^th;tFmjtgGwzUjvx5R7Vt-{B8dvdFa>;3mq) zsSex@G?QDz(KmAd!4B-7;RGh7MG2wcaTIv$pR0SOZ6R=w8S%o?giBaCfc#n%U8ijU zp=ur$BR^?Dw2%U<9I$G3o<$oh&;APM-nxE_3kD-0LIEPDT7gXpg2yBwG{Hj>wmu9uFMw-Y5(+=+#=HI0(C{9Mk1+;O4__(I@G zo5UgBn)4j}YR`GR>BPv+cZqn*>zlz@rrWtk((T+Cz^oZ|?k#|8rk%S8c=jR$ zteTmwsR(9a1B8rR;Xu8ScGesUGchgqx>Xc8x#{XSgA|PkY7#70Aw` zV}LZfQx0DvIlJZTZp|A!xwP+IC)VX&%dR60hjACCV-dg9Y!zo-YPKU>3QMQ2lW2iO zGP}=s94ab@|LEVM5nr9&{He8ZBr~gcLYxZAF5$lq)L~;?J>&-Zdi8}TyAL!Ovazd% zXYn76teuVQ0!HS)oe_NkbBbZZYWzijWR!m^$r!Kj21o>xNfL^}A6we?Q zI+cdQ;@z_IeB2QOl1Gy8UA?yv)nVjiXLrh)Vbc+7YaA*5)BhNPr?cUHI?q#0XK$QS zLG9`VsHr+grfY^zpMjcRT*FP}kDwd_x*i!(hGBO5$7uaJd#UP(Z+41bT>$DH@717X z8NR9i@)%iM5}9`I(jFs`*Eo`{DUBm>O-J{C!&LCw(7}%o_r5eZFs zzMAAO|CpfS-F!_Yej7{5?pQVqJHp3&sgC3ll}kx-R#tr`hj}dXB&=$Vo|42>j$|=3 z9j{rCs_w>Oj{uq(FBoFwp@$q%_Ct{ij6t) zPhJhe<-mp?;v?WIQcdT94*f}Kb9B>7Z$D%Zf#2UiWFI-qzr{ct;a=xORenUuwzAoV zb7M6;zI*NS&BV{YUBJAOvhnHidC^*TbvyPgRc3f#UH@V;Ekbgd*0kT}v^F|#m7ONb zZpZ}_t-B@+Y{tpf?A!eJuD+d>gv!~9@$j06VU5{QFqb8A_^=Ymd7D49#OuC|L-mCT zC*<}xrxov1XuIPCZWt=-Nxv~a!WaIld$S|tbIl(ILAh!o@MBL=(ygLD;e;<;WexiL zLB~IBI4)joI3a}k9#_YlHJ-y-`mWbu3i4%;o`hO4Z@ZkAQ~p) z6hx3h?oYNEA6Gw1Cv#vq#=MzCb2-9NKN1=ERX0vFf=y6;U-A#>^HFdtu6?Sp`Q%5y zSRE@l!bv9)e|7X)c1OjXY&fxcM2hMNx7)EH1E-BAP_ODkFi<{r@*_hwm+0WZU~x!Y z4M)7Scz?r)ijeVw=oo%%L=I{GD5xbmMoHm64e*dL@FZj4V2FOqo5myZ zSz=+syW-2TgFHr@q%+`WuR0{Cfb5*F9opH7ZJhUwh3r5JrmhuJ*PM&;ukFpBS?h56 zkK>X$#2AboFAi>#I?^jmKGj({V z`)Ye681qa!F{WiNB@#|t{t_KwO4s=*z2^zk!^{Sn5)2DzUNtXVPre>wuM-x?SMeP!y8v+OcFX6#Neehu+|`6?6I z=MdWGavKKoxqQ%n_jNVoL#z(w@aHscHssIct91I(I`|OY6Xvs5t=<|loz%a4>{alE z6QVC2GzudvyKYP97;_eHjS@`1+yIj{6}%F$keO{Bll(=2<(U^rIkL^op9V|WeF-7P z%knX}6qa4*#~haE8Y-r$aDMy#4rz>^Tsuwf%^x1hm6gd)Z?%ccgZ zZQ1ocbwv%%jX6#Yw2l)@&$9;qf9v7e2*w=$569qLYPS6^Idzj>RD?4d=qCUZYs^)t zOhiFUME7EQL^A3Z5QNn)qig++Y1Us@;+N?Cid<P2mohk5Q$@~Kn*{VWU7QJXZ03i+c|xU}M;+y^@H@_=!0*97z_O*#sC0~ki}O;m9Y zRs0I0&&z8IruJ}EIG7f576|GVESFIPRsgcYz!^Cnotmq0;0l1SkwH^BK&{|Ybd-S z*KEZ>e93E(-3s>L*Wsj7M2iR{?EADQROh%tX`5t1iz4GHGQmwLuHVIJShhG>S=tk&`IAJ;=mB3 zVIyhz3tbxp;;$$pCs!3YB zKwlL^Gui8!sL-1eXtc5kgtVZW=h2o0W=zUF3|#w7hrDzgp4$@(f)CIosZH*#w#Xrv zD?#cp?;%st<*Lu=o2A@6@l=N{-&P0XH$ySVu*xu}C3T5VeX{LxO0u!JZZgyQ?u5f7 zbt|F<8!$cP<`&%w!}6Qt^kieZWp_$MR*q%&lOYKw8ZPDfQq57zZpV*qN8sfMUWJ9q z^UTJxNncfkdk-4CO|eO*t3F}B*o^Vj>hYmV|` z%AlM$*&frZ+;RCB-ahLb!=4;=0kb0E@D!NBCXK{va~t2>&jtDxb!-&Isg8|iqvfB+ zR4InMh}}4+iQka3`T%hk;2^O*@dJ*&gg0E0Tfy;jggo`$u#>=9(&Ek3t_9(OV*FYH z{_+aoWXb=XKbw|B?UxJmJHOoxrKE*VIgg5X6V`&SmN+9>L<9W@kaC=5hhua@@Vh%X zISRNyOV5H9NP$J+tc)P9yO%L3EB8R=C-%KbZF`}r_<@Ut79sTVvCT9){0s2~Bz}W2 zaw~LO7W)$D>%DDwPUyz(b?WyW?9P6lCvp$fbMz!Qx)bo3nK)?wEMWEzZun9hBp)K@ zv0Iu%v|m0gO`;AK6u<#c$7Vc9oQz=CD0Yoz*I0HP$FA|Xe)sns2masVK>Mp4cN*{r zCd>ptG$0Ic6?vZmb^+)wWGBbJP5D@<^sw9@#x16mj4+HeT z%~Jp!fOf!ZfNg-E0~!HO0X%?tfSG`Y0QUiI1B3v+0Ip8~?*sk{*ax7$O7Mz)CUyZJ z@E2EN4?J?&y!Je;%3Ub9Q%kGdMM6c@N?YvM>U*h@v#>I!)V+LGNm^-{ThG<%QU*#Z zOUoC3BVWrXEiWkpQb&2Qt;#KkRpr&TvBkit=Uulcfq2H6*MOm@C zs-Ut^;5iLLK{a1dB@~GQH!W*MYDPiYtjxJt`m<}!tm$)7oLQWno;J(roRwK{EnCme znwv6bZng`3X*l$H*Ru8eR7cihb7#3yXUuU-n>%Yx-VOD?S**89ak)}c=BD)HXNaAS z%0U|uyCsiWYsimFO01vAd&XAZV;gUCS5;M1+2U7>UBP9!h1`m&B~Z?OosPYCU0B zAf|IUg=L~!d*liV7Kx>0LTPzH0q0tY%!0Ivs-=YjYrhgKEpQc9RlDbkl_X){Tv;#` z4T>2KmY$ZTt3yGidpXKyE%LxF6+m@T1feQRsDfOnot_UyU!Ol?X(i@zfp&9N6pO5* zIbwM^Su0KNCrlvaomL^1728PZDk|OOw#tfX(9P6*ye3`S*y=Ei#x60ZPp3{(yCFm(H?6Sz9>G>tQCMu#q=^|>HL#^kYeaPm0l+Vm zE_K^%)|P)CseNF9z;Gf|I;ceSLy|UHvMI@mbt5zSHaR~6|V7B zpR0nKWKxKwh0HX!x>$rgSq6p{+lopS)BlQVTUxldw8&O2E?wlVvc*=r-L^%=m=6WZ zOCh#u0i@i^%>}{LwmB6`3(IN#ij|;~#&fN|D)(YE0tPfIu2^m>Dl4q6*6L1huOMEt zcDbx{8Y@H|`yZHdOP?yNDn`SyGW@^GObdYC25YynLaeeCRV=L(fq8;$T3Kn)5?dzz zYhY%LD!|8b(QN~_A%h}!8RfcHl%f=()5z3tGkcj;Q6ela#0O$$xOl6 zVi*82yUO@iHVpwkQi&J*|jVvc9A}3@4N3l+l+y0JZ3()-8aA^qX8P8;emd-DxWkWO50PjxB~q^J;W#T87DnAl5cg7AgQ+;AL9 zN=l1LvBcU6i;Cdj*{Vtx^FnnP6pl%5X<-pxTJEL>fho!It96A(pXrY|@(QM9&za-M zoLi8c<(L!5%a}DSC8NNZGA(^ZrUU-+jpHkUVS$73tpfxe*R=&fx1O+HTB`|Rni(Lq z#@YZ(wnd^K5aHjL{mYqUfzlHV*LXD0R-**{QF#?s7k#Xn!G0J+huy*QbIrY&r-`AY z%FPz8YN5CSt}8bn?XIfATLRc_*m-@;yQVMmv#vRRfrSO#bFMOXVYM5R4#o@f4?aJr zVXNkVrBUYut)QhPWaz2|niqH37FS^CbKI5KOF%=4VF)Rkl;z?6T+9`uF{2idM{tc7 zsO=WqG!2N3Z{%xJgA)UDR9~K{?)WM;Nw3XcT^#P>J2@yVtvA)SLfA|s0{eziWNFQq zDf>_Ah4tftogav&s9)z7k%1$F!H}Z3|@kRMoz zY5b@=aM`A;|3~IqKd-f!*gxZl-T`KW^_NoRrbQC-S$|kwh$#(i(D)HH&8f~~M>F&TealE#aKkFL2@74Z0U zp8^gRDlvayZ1vb8sskquo}#UCWdW`3(3GSB&lB`KTWl5WFle2H?>4IvUK3pFw6em* zNR!_O$5eAgp|^yEyi85)zybIz70lVpfM-S}+WH#sKW|pQh!tMj?}+73l~xAqpSEZ* z^ju(1*54sDgwkm<_U1k#H^*IAymEj+B|cp5(JX=wMfTTyeV^bT=B;f;2@~}-2`@v0 zfSjKqp8N+)wUy+{GN0*s1X$24c5UR<_a>SU(r7)Sc04@Se=WK>NmYxv&bW9OW+@ zgwD-~AmFz#BFxp?@IiOrxe^f1aiL9|br7tnc_F}t^d{V?el5U`^aYgT6A|+~KorvJ zxDkVr@Vo*r8R=u(utE6T3BPm0nPZXOjyn*T8v*G^ccZ-t&vOA0T%`G9w2#9x47GV8 z%G=Q%?;Ol9Z{`@Jx8hFq>j9}qD`-#n5K=HlBfWu(7?g$QRe)rqJ20M+c!pDH9*6WE z+`|ED0U1d5qP-E%Si#N1(f?-jKL*d3;pPWX-irPQ;~CDD8E#kT7TkjX&j6+(-HG;K zp!spYZAh<2|I_gN6yRZ`Poe)2c=iD9LwX19#LqQ=nMn7beK4ML0Jm`TN!y4)33$ei z&ioL{kD`AIo=X6CBfSlG;`g(FG^8)0eJGw60B%QmBl?G#F|P(Zg7kUxKMK#~05}7o z`*0r$cn*+>be}H&*+>t=_`SOPpG5irlph3sq8Dm!#wIcJ*SH%1b$}G4KSDXtgMgCx zR-}Kd%O6XsIT7g-y8KI#hBXU)8F!*r0?a`A3$!PBW&2tdLVcgB*k$w$#(kCCliS$*pC;5}k zjKKI?boo~!4LO7!(&bM)9fS05ai{uTz%-;kL3@(_69Diz^aWl1-$yzL>C?LWmmm$j z4&8-2>5~k24CzZ~Pvgl2j2s|;cr5h8%AtSN<&WTl`5vVIggf>31Hg2o|AF?z&jJAE zZRm@-{GUepQKaA3>c50L2dD+uk^T_n zB>#DUD5QU+%YOyZlaW5I%b!O&7U{p>PV#C5q$B+~+LQdhZT=tD2XNEihDRc3hA#g_sB;(U z{|5IUz%ziUNdGt5lRO>=+=le?y8NF)`eCHs(dF+!`aYz0;!gZr1DJ{QKhd7#|84XC zEnWU4sB<^!{}=AW?`HvNNdFz}N&X7}wlT zzy62$-vazZZz14L)ZdJ|0Z<1>LHc8q6Fqr=Tao_B4fFq`F8@-L-;45Ba3^{tzzn3n zM0=9wx6S`Ib@{tdXDsUf5qIKO1Hgf_AMHv0`GC<#|3a644boGPepi?OQl!Tty%+Z( z03X1K^jBz4^8dE^-=@pI2zADw{_k+7`d+{^r2mHYB>yJ>qmcfoF8}W%orLrmUH(gu zjzfAk?t=j`;4!2xqdkr1+vfk4FUXT|S^kHnYRBZ7xMI@n;h1{=Ak zU_($u@SR+Au)#JwIF!3B*kHaTxQ?@fbZc-B+Sj4|t<*khSnwcf9}*c{NbMI|gH6=l z7!mvcwT~YjJcQbZ-xA!cwa5RhHk?cfKBErBSFK@OFgKVpap5>MFoX-@hT`A=D26{) zR8d7Qi)>}?^2GuV-_KpNWMDoE>FK$YuX#=s(4!C;+|LTcv8!y0-~uvyHSpN?yM53~&MUE`k~^sL7Iq#qACyj2LTPyi8rGhb1x$!RIxewE@ai>=>d zCZD*Z1oR+wfS^93c|e3li%xS)vU-hb0k7YdUaL>rl0Yg@R*wG4-QY-ZDSO9*ATSsI zLpv7(pdT-b!rct$dk$$p2cU_bQDzIgEMzYN5jv?{$pS>S2NDw}jYsr(6864TOKd5{ zWgwf*wwR!?A<$kk^fwH;I|TYVj2pp?PNJr_SBx?|g6R z?4752+8R!Jb2gNXXxh4@_^B(4{&n%Nd(Eu*hKRFeAw@+~ELZpItlqO#3Z`#}4f5*>UHX)>m?V_WWyaTYmHUo)`B7#hM#~ z3tm1Oeki7E@Lztgs^b1(8T`CIjxKDTGQMQjx|H+FLY{oru+Mg4!+q9wK9d^$^5h*m zcZK|>^T4kXPVG^CeR$A{iB~wwb6Z1Sh}tmp!@Wa1PmiCn)cn)3;vX627k$z*d&!bT zg=Ntl@$QC9ZqXNie?Fw_`@b=aYVI*VliO(QC-Ew1?B)hky%+yC6O{kuoL^W64@%I4FjSO4VrORWbxCeQie zgIS+1-g|s;?!Mo?7qauw#~yk){aoCTj*L-Zf4(%+_|U?qOerm&E&L$+gT-4vd#?O} zmHSK6*1l5s=55}ReVa~HjD6@Me$MmD!;c!L58nIgEkSqPe|K>9{-oF5h#ma;%Rhd6 zN1Vy?%KYD{XAVB__OYGvi)Y7;{?q$UY(H51i@U!ly0~$(M-2PP(xBgmgl>HFp`reU z#e=Fw>dp_|9enD<$!Dj2 zbn5foSKi~xz31+D>z4g>Pu%_Lr4N^HuUI~Pcho13n;R?f>-n?@sFYUe=%X{r-&?_rCx1+b>V=Qg_BjfARY3YYx74B>pF_Wc_yYj+o$w zj=lDikTY-HoBRIU1B=g&nebG}D|==Zwg!#j^Vi2!-1oixgLl02TzJc<4}u@}eHJvH z=6}iqqi%g>(L)cmWQ~tAy-`-|=viL#?C&1WX?r1MmU+$D_8EH*9$%w;cJ5%f0_{S$Z~yt55B)wrfB7fTWo7CIkEWb@ap>c3{W_-p`Hv%iwbqvQ^6v_l z|9<~(KU6;d+p#Z|81MTn?w8*<_lG~_=lg$^mKu9>Nk!?uDwq9f$peX>J$|2UTM!x2&AG-4W z-;bUA*~`i5$}g+;rz@VCQE#Lq-}nB!;8`oXQ&%tQ`oZ{F2M9i5BvSmwi8!By!ewhul{rX|JB}?fK&B-4IeYfSfr?g6f&hynJzL^ zLKKON_nKW?b0tM8g{07g22+DEA)=H@l&MUWp`SzpiG;$p_PJN+_y4``|9ijp`JU%} zUdMCJ-h1s~?X~t=`<#2MwN8_r_tNe_*S369jifNc-R8WU{EJ2TxLQ4x#V!y;RnuI@ ziQN|6`&Y76nK>xeZoirtL-#HzLSRL8nu5)EiDXu%*sZ==_1n^+yh(1kY~eS=^eR8{ znbbM3k zV`^WiCRbIDDMo}Av&5VaFfGa1Kuk;Dnh?5aUPgjQozIIGj~fzRpetSVPS$LOCktfdx|Zb5_a7r_#tYVJCi0=lr|dFM)S2X;=C2rfYc|yX=uBF9|Esu`;ocIK z%=Ad10@pVAi<144N4^BQKD(*s*+3Sw8j+FI>FiW&BPFv`9JDv>F%=;W8oVFOrN365 zA?O$rCMTYpBvF`I(KVXXHomcXuyNmbPfdwyM##8D?m0EKq}+qbVK=hHgw_o3v8-RK zBe{&?FVAwj(^Kzzt*fzOs7{c7u9f}vrm-rn?#}w^{OZxju%?bJym49VqLFu=dL~~w zPACr3u@YrqaZ_IMNS#mq2q))i%>+Y7h78l-`(uGS4^`Q1TVA1jXz!3{!lg9Myf<-t z*99@t_ZISozs}j&H8qCi)4rEN)piRaBBgf}$E}O$DPCUNlFa;h zFnI5+a-yzBjGmipvWahxBJaiw7B*2Aa9oihs`GROrp2Y`l|+*SW3H`|jJVfPQuXa} zWbG%rw(b#RPgUUKJP8t?XL5%>T-AiKRBn zZnDb?UaHisQ{r}1JJ`K# zOqtw!YRtneJl}QCXMY#^9r8}=l?Rz;$dT?P?S zq;rR(a8;AuqRV{x+n1Yc4fKuK^2@%>u!>D+`{@FUZ7FXP3=+;i(#iVDtUJ72WAg*C z!`j+wpA#tNTpE(y`kG5mp3pLK8ZdRFSJl=5xTe#ooRdA_s z&xUsv?Iyx|!oQGq=N!FezAomI6~j*nON-6qz3#HNtV0CXcNTd4=;*7-dDoIc`O7j- zrQ5Y-;=@|U!uL!ZL0!sQ#QV8~yZUw|UioZ0Wb>u#oor9Tu`j(5iZ?#pvvmIWwQ#}s zllr<}iEi-|8O%E;*KjWx6AWtpsre{z)Hl;|M6VYcJQ!O3O|Tw9Ja9vXfac|+st|VSL=qq)Lxf-_i8(ll-FLr;``h8Tb154&v$L=CuKC= zY3_e{C|CLw<2Aclg%`DR!hX{xhM8ktU7b?QLtp|V|@*QC)K;m%73byeR$fpAPV_SNgDMS8KV4KVQ;tzec)<{)I+dR*e5I_fs;OQl}TM z?v1^umKt}%)FJ-cN0GA?AsuH%o|Gij+zL*NTcCXLa^Tn{F>-#w0w(`+TZ-i`9H<*S zFSq`9B#ZUdlY2WkqMT3EM1Q^%67l+n=841ljK}juA0IpBa6Yt*)BJFE_Og(=);EVt zt7F6LQ;m;o6j*eW!~0cua_zf}_`AaCFBnPJM?J3H*dOpIb1Sbz7Vk~+t(A{&-4r-9 zkS6;{A=PiE?=^yYLCPbEk>o+GjhC;F@4tE}zVu2hWFP?iIymgjOfJgdg!8P^7 zT7jdhdTN$vMpLfb6n>mtsT;(qmbEr{&(kP@e8)}mYO$la&dNrz^Q}L}zx%rT$kF)s zs&z-hiMKm8zO^unu2EmD;2+N2_3KCt_h}miTU% ztbZ_;_4|_pX&SEt?*DS%UYoRDsz5ZdLLe?F`Jm*g#k%8n_X`u~Pe%`N|LD<6S2bUF z!CXCE^v}Ds+!O}3Bhs^b!$$JYH33cx-%otK>=i7GVLwQ|C zNU$@fY}GzjJ^zPSxHzhwIOR|zo-o>OOD=5ro+PCFdM&`Gt z8!Wq9Gm!eZ{q)4~LKz*$MHfuzJy(4$*8A$!CcCXtE@bre&JUaIFD9$Ijy?%(SmdPm zz?&-~uu93QE7PdcQe)C|`^to#Val-+nf(1q{39BN8859;D-m1XyMhp~^ITR_$BAdz zmI|Bh?EbR1?)}c}U?bgEi$71gt=MG8rgOCQL+{$BhohoAUnYK8oil2D%6eDV;f?a< z8$;Qm)4Dwrma{$x=WtKmn18KPBj3*NCX$5y>UMRH%J=B$0b>P8xVcZwAj_aCb;Q)RGr<2Pj}j^cSNz3 zEA{3Fwdi0@B`Zp5Kgzcsi{51~XFJ*`!kFlC{$A=mmgWa^Wr`lRZ<(;M@}1xAy>wFI zMG8;&z5<`lCtS~4xKF2--jvkgS*$GTmlEPjdfd!S7i2S$S>t`7lJZPs;F_6koIroZ zhFjy+dwDi9_Zj%vA0K!TDX#sdhcMz`94I@wZdoj$HAmc}tnvJ(-p8YPCogYix8wY} zn8kV9QNx+9hZxz;b{FRC)91LlOeRNAPpw`{dbh{Vhr_RXQ)3*BmuF;G z2fdi@bH=uaG{DT%hV0X)%LDFjyFLxQ z%r_2gIPzqHTf!~}*Yx}2cB4TN#7g+!y``Z&vs=p;m#TdvE64Xu9>PzzIdkuQWkcw? z;^^hA!q8Q>mGPx+J;#yplzI8C>+{pcG;XU`ShOW|5gUzljkbd2@!{FdAt_Fger z^8TadPTP1M*e<x~*zT4iC&ehd;NC<3?j}YaWzu95i-I&jQ5u{7m zcbIkP!FR4lX2k54$Gio-Mkh)OKmIB@w%_jU(ssU9Y5kK8`v-r%+(tCWwKd+`l* zzvb9pX~g8EvWlK?y_B7syK=s8xS3P_N@4Pmydu&|wimWtNqgHyIVD;u6S8X)M4mLI z-`<_4x=7|W$Dx9ImYeS1b-VSYFjV+_S>7FnJMZTEHte(&R4Ej?W3$$>zC1eb8==bpZH;C zW)(totXk}*=;h(c{9)pypx^NZsSfVeAPdL0TdGHk+R0I+{iBNtO0{gVqh2*OUhTPD z7aOeHT>i+g{o9VZV#k5h5;I5jeCD@$xr$qxIiy;%<_T_7WY{uHU<|Tpx9NYF?%2Ll z-Xp4C$GJ4H$~Nz02`O~RN^*Vj4ySjD)${$&7PIRzh|u#F?PL;{t+>0XH1{6+YN^{6 zr!Dhb7|WX`)ADO;nqQ&HWU#qkIe zX13sL*_Ola3WFBz%aO{Qe@9UHbbZu?_peGh_P^;bV1M1dBD|sg<-p5#noe)?MwYjR zdK@a{e?D9!zK5Lc-ziX_>wk>ZqI;6d#goLm=@}0lyJa*nadN~i)7jQ_vVq^NW^?kq zcL7Zt^?lnJLrpg@QL|Yuo@<>54k8HOT zS~&^QOOmC|+-5ITdN@DovISGSpfr7dB+~!Ig7Sp5igt&kq)i?=xK%J7@E_W|%OWk@ zkP!FeX-foaU3szoSbtB%UqdZbH}ehW=Vt~RB!)^y9Ze9b*;3AHY|WPFD^adka`49$`SLr*g<^b9u_UislvMJQGc5A% z%8a%^mE3-1%3$E6OOIYDp+fM0d7Grl*FZz2Ub|rJazp8(uT6y>pW%z3i{vcUNEPh~ zEmLkOS5B_rSW!H*ZC+ej^n%E^7nao++vX;DJ&!TF6aJkh&+ZmmnO;qH<2x3q-WU_d z8Q<2FVDzLqgXeSSSpE00s#tHkCQH}A?xh;0`E9!m!;<(pdCj@_L>G%Gd$y{I5-t$O zUDNi%3jnh!ww2p!l^y6}GOr3mB)wBet6m{lGH!EAtTXGjdTn14Z>V%QTdrHBp4g3A z6TXi>gLNd{5&fNH)dPnW#q=VMrkb4pAD($k0=Hiaf^6-mgL$Cf`<$(>ns%(~yoP-n=E)4fIg zxlcLg$w3QVt&apz`Uh^F51Rc6A@Pb$x>3^I%qduYMTuZom=_^piIYX~9xeZ#1Piy8 z7CvdiW%Ct-tyc+3UsS_{-WDZbyvd>RCMPoOPMyv-se3zA!T+d#$gICS?aXjX+^bBM zlHLNL$n=ZyZLUY)1;8`cz%LD+dN)U`1j(H`k}{+=#m<8jEXnZm-`-%5C_@qHl~t|jMkEyi<`*wk)>DIZ)TB$mCNg>PV) zq|RCvd4GzYXXkBW*V^wvI-!d8R=NIFV@=!ZJG;3?;RQfPQ&?nH9PgGpk)rIEl0Bb> z6%&p#h+63^QFddI=Tm>Qnv?U0qhW$(uxSRv&cLzz+w7_i9a64Xo*+83H;*&z(sjPL zH}6d`!C!{*7EN|`=UxPwH0yU7E;1Sm-nFku`m9~G&`asahz0B7igzq8?ulVeZmHcH zJos6cSbob*FUG^yBw2PNuVRlV8%qYOn5heoI#DVvEwC%PL@(u9j3DV=gygDkRV5vt zY9lY-A8)g3>FyU@SkifqBdzwActohWdQ5JxXRTO~a~0o+X1C6!J>&jCZ-}n>6~Pum zeI{&5KlGIS3fVGqF7qaRjZm*vh!q>h|7Sy2?xMsH_R@o1r*t<9TlyR3s`5{ak^6I* z4Ec|6E-)_Ss@oLJ5r3p{{?7hRwk4wutj)0|^Ae@!v0Lh@3W;C!T-sHsyZnm(vSl_) zB?M*JZ!Y;_dqLnvd--DL{O`Q$zdc>>BVm#!M?96AvMq*B#bV*Y2{}>z!l{bMZ_*@InToxO|nEY(t<7eY;B{JFW!P-a3 z%42OFW2bsu^TXX-{6Ft;lHWn^JgB_hF6$txy*r<}t<(*YP45F^VrtV%2Zwuqkwh5o zI(9fm>NQmf>+@Y+wAEz!_AN2Kfrf4NzqSjpRc*5%sYWY|mIY+-#%*1dDRU`WRguYz|uHGO|kQd+uI z@+?10w79-6bX?mN#KF`rzD2pOOPK5PmBd|NY=&%mWZ!l5emU0g>4sv&M`z1>;|mJE z{;I2gG7;~VIJuKKV{8fcnxD-I2A2fv1?iVx_R@eHvM&U~+u zy80vX?AWm30CL(^8r; zvAwGo$EB)WjCU}-aaQExw=*3f6-gyeMiPT>)m&6w5O--T@Nz;vS?ruY(}D}~#aqq~ z)*XmEzFzL+R%@0hj?TT&H7A@SLN0wiq50$WaYp^a#~zF3hn{yhcG#S=EM!@B_n|kf zbz!m9rbmoZ?T;=J*ckrGn0W~QZA~vy}JJZt6>EBUWY>yya3qJSXTJWrTJ|~ zyT5|DJ@+o7#b!x8DRN_D+(`>HZ;p7i@4Nrh@k!b(UvE~e=P$_OXKx+!l;#oOQ@>X5 z^Qb^=;*y%4RaYp{n%R$qZ?XpIRwl2_QWJ=Jx@X=d$Nb!*Sgov)vUB`r>-k4^e|@L= zKK>{%{Ak_VjUBgZq75zl6;`W%?BWjJVebEz#+PP}5#utauTmqI_2QErS%}CB)RliY z;v5{(v9C&&^P#_MV|k6k_VI(x|m$EJf)^#L)l9?OR|iT_{-J%5T6BddN)BG&xNZEYLV`6Ddy z%M8-nQwM79PMmK4tRqu+-1NdC$Iq)g>Ak+{6<2PPZF@Z$B4@w(!%kQA= zx%|eoXtvPh3Lf1Y;SX5zH>SGZ)9Ad$^egNwp!wEq4@Ev!Hj|~^+s~)G zkeDpk7tX`=q|@g#cgu6ho2BW>i+OZHQv5`lACr7-g6O!tYceOEQ7SK(T^kVT7l_jx zzqKJ_Gtb^?KZ8Ez7X!!b-)M_RI*br{Mr8wy39-x8nTY4KemdV+mN)vimwof)lZ(G{ z+Fjn}%%XC^xHN2);_JB6=hn-}Oc;rq7M)A%Y5R53%8PehC%=D&qRb1P2>lxj>fM`* zLk|i6)e+;j$8vaqe(&z_ft_jzZiksyyi~K*ethD^^<(#(3y!PGR@is~r0K1aoATY1OOg{q)!^-TQj@p|N93YISzT za-aDxf=ETSXWE#U2l^J$!w!I9_wg&HL-$7+MGiv8ef)~))Rt>tC>jn}|NWN_6r@8q zhZ-|yYLK&TQ@PnG3Q&b0$7m@kt2lw>0-PxS)lmauq~%e&>KW}&`JIafO7ONQg)a=^ z4hPKR7nLp?;m#BVj2PoJ3qI|gQ7<^T%-^WUjM3``MK;OWV8M!TB;TL&3<*bxx#0M=zLb_Di6Dt3ee20qK4 zL~ox!6DJ~gIzv%fDBOd;xln-}K}+kNkTe-U4!Rh8Di`2EO0~uNeq<$!5+DL@g&!tuRaIV{hw* zK-eHg^^!NBM!A3y?5Fr)f~@%(P{|a$sL}M0Zz!~QH3g#vQNRwOGS%Om$_Z#wPrrjm zfglE^PCy)3;>jc4fFn8?oV5dK{?D4IpO!Bebb8=KA%Zr~fKBz)Mt!M;sSz;Jv}W<)LQ0Bc^1o*sF;PR|~f z0tnXtf`KXGT%s}VcM~3*KU8i)ycon=Dk==Q;!f$m0&Sk!`kKx@pjH^tN?aajj&qm_ z>WP*^i|mGMPq)*f0>niC3iwf-rYlG`5#xkB^5LH9w_!?a!3&>&8PQ56WQZ)F>FrB# z!Z2NJecCjJ>3}zQ*o|Gt(;7yuLJ)lG0N+dO$LxBdne$cEE5>*+CvZrOabL6$Uxc_)p|hC6Kp5m4ln4it8|}j=`Fi^+0WW$ncE!g%LcC!BAU_3AwzH?cvFX!5T$L z@bJU(=&VU#BJHEN&RJnC*3C$wDj;?RB7``T?W|GE7l=;4M0F9xu+L~L*rL8Ol6)ZQ z3pU6|0$*`gtc#I^oaC?;Mv^_+y^Z$tMn-B| z1{l-c-NDTRn+NqE8qm}oW1r4j6ESrd8F|=((LUlfzQRPT7x05{KpX@Vsl^HR9p{13 z99(=M0td;#8R)2nHlEhVQPv$d&ab7GNz{@%%oCgf9EjGQWZYdH>!yxr4MC!;sn+h> z0XM{DaUk0HIyyr5B_}c&jQ_tdSo?awJY2DO=m8-o$R0R3`*Hbm#Y7maX%Krl=sOFC zAL5QZ(G!9^;kSP8b=3KJU_6LwKe8*)+TYfPLI%m!#3}djCcBY5tzqYtpoiNF(HmhV zI&2Q0daq);7);!VL{Ch7W~~;O(8DJ9yAf7^@8l>xg0H7Oc#smXy$q(nJ6Jya=7nyB zVD%2q5Y&00@nM!=f=^pOYzw-V{$uq9dVPvEjZH@IfPwbktMvHR1!009t_wIL*b}uN z6bO=rb~GDP@TkQB5UmZBY4>yL?OGqCL~8~V-CSUb0Dgv3g5Q(S^AUl9ZqO))-2{yS zHFCBGX->rR4!)k)6+DLnF%{q_RU=gyta05Hg|$MuS_69pJgb0f?!Wp4^gka4KQ|{o zgMR|@QRtwh&9t-@(noNX(8^QPqW0-0;n)?@dPT_m2hxR5 zegkPWobijMf7{=NvJd}s`5KhJFNWXj&>kIUpv<5)T^|Bvl^JCpD5pYuI%@OjxHbP5 zG@=NkIg87*X+z)%S`T!f_W)C4&#a%z&v9Um19KdhsT<~T6NfjJJ$abS)Ea~zoC zz#IquUpPSh-OB)8|L}ii_{q%#sRpFz_cgq941okcsO{)5cnyUe`x~4QLoqOt0gnD{ zUI;0AZBh#_UPL+aM1mId;}Jl@dJ1YFB@dgH&UmLnQ-NQfb0DO-25NlP5=++ zgxDbvqNadY z+zJqvH^mpIpMZMQ4S!WLb^SXW zZ#UHsVSs)(BGh%}@h2ROKhSCQ&IKIem{5YK`6#{5Hu~| zx_bw@5;XT9_(Weno<|S>&FVKSEROo6z*{!HtQ78r+~^7)+IS4`i9Q@Ij$biUo}TBJ z#un)I(^#HBkhd@f*+f?Y!D0s70fyWvO!7E~seC5L|A|i+5rD55Un;B_tS7#fslp2ZBNdLW z3;&?+ClUp!j94HUffJXVpCCF@aF7>Gvp@KWgfX>jVoD|xi!LZ`qvHkE($ngJ^G|;hWMrgb(cj|AY^h{N2X#dvGde{ad-; z^Zd73PdEDi-j5DGKZjNe9S|){kNPCIk1iL7m|#TA2lGWw;CO&cjLrb!eF?*93r~*} zJV%7M(avDKH0v-r05QXUz<DLO27khkq7i+d zhFV^UwXnwE>GaZANI#0{_4OG3w|p!x#dZokNN;u;4MA|~tj(?I1q?7uvBTVTn(sHz4Olu`tS3eky2sKk0X%C~KR*nD_#&dyq z-Sjw!EB}O37?(jGTsF}d6l@K&wx6*xXu_CikN9)P`1f{#nY-3(?MEvV0&;i>8-_Lg z1Ty3f*pVK&L239u8pA6^NH-Kzs-^JhTd^ zn!>CP9Uyc*d|d!ODWodm@9d$lRzw&D=vNi_ZQ&uaOABn^w04+^Ohq156;YOvlUXmP zC- zm5B;=B5EqKf6iu#0NE*;W>cW@8;;XD-Cxa!U_{W?*59scZcfY0mFM^|Cl7P_Ft;C2 eH^#aBaBe@G+YjgV!@2!%Za+j1%KuyL2mc2HGCo}Z diff --git a/distutils/command/wininst-9.0-amd64.exe b/distutils/command/wininst-9.0-amd64.exe deleted file mode 100644 index 94fbd4341b2a140fb0aa38ed1e10ff69600d4911..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 224256 zcmeFadwf*Y)%ZQhWK0Z+Cn%#q5e6AGb`WEOZD63zfr&XI6A9u46%~y|tW+q>C|1Pa zB$nYg*0%I+pZfIr)L!iE(SWy10!aWtAflkHf|q)Rk%DNsshsz__L)fnqECPC^ZV!h zd|p1}%szXsz4zK{ueJ8tYhO;owGX@8E|;r_KihV>+W3`!A@%(1$6k_88nNmm*I!TE zFtV*6x?$uMb8cJco4;V*tqX4Ys;}v$JMNel^WFSq--7rZzT58bg{MyUeRbX~UmjCl zUh2;keg8>Yy1V8*y(0g2kMDCU(!6h*Si53}`aN&O27Y(`X7h^G>UYA5ZuQ%}VlBU2 z^PX8Ti{EV%FJ2K?yVmlwrIE|~$=f{?Ub zm|(y^pUX9C%z|5PiroZyYe=9>R~NrG@H^mN2zD9cfVtkfhJ+9_spQu$oYns?L@{IL zJAkT3{gJ$9#IZiHiO?}VERU8a>dXvZ(8*+Qi4{X8+P?eSBMRy#J> zNO>20xzLp|JW;dDemPrem@lmqxwBsm?O#(7AGz{LdGUJ4l+U1(0qB*&<@U>6ZZPjG z&|2r(wr!X*{nds!*I#RxF@LB{N-`3weK&R<+a~=A{Yu)^Xqb=q9|fw;%ruxg4D+y= zQMp=ouOhZx3tL9w7Js#?u4ZjrO}k;f&>i+yHIoE+e75)Oo50K5YtTTg^(REz-qVRn z9CXKrM9t2K`65}Y-yX4Tn*6KmU5SH@u~M_g_KpQAdx_EdYOKjHUtTGKW%ok3DFdrh zC=95Rx_t`L`}!O%8sU@{*Zb%Qdjsxs^g}qIycs}?v9t(-YL9ge=XQn32|z+ z)TQ*IutSwEI*^8G+1^nSPJC>~z0|I?ERbGGd4Jzj=*l!om;!t`>guU!g@(Do_S!c} z>o!1G+xw=3TleR(*Erd)lI(+3hj&F&GyRPb^Ez1c_Ns`vusUMCsb8GsqfHLt z)`hf6wI^@arx_J1m#wnf?j3f+`8d4Qa-r}DTD~93TCUafQ zTeItew+#QF&iqJAZ6wBM+Gn`mH=34wE>(0vw8@IPvrE6!IHjO4{=B~QURUE3cTv1U zd+st{qGOEF)M2=h+_1kJb;g0s&5H-y zGo^vEVng#Vl4IW=v?o1tk=xnHdU*IjmD%=|UJq9^^%$v^vSt?zq_#PRi6j|`T32jR z6dcTPhIxg*(W#c|4s4F5bUR|6XkB=|C_}9U3yeO1gb{mngZX?Uy|XClUK>e_bH&pQ z(q5;4T&30+G;P$p#Xl>LhBf-I$kH**G8co&fz6Ye7mu`O3jB=NOzp`_%EIPlmC{kw zvq83sn3M7pKz-D#l~|v(PD+HCKL>CfD3p+Eh@dfFvR2XGKEv#>zW40u9sc7j+e>+uh;dK`kXV1qB&V$g~I^$^7GE>LMo8;C?HTSZ7~q zPI%)oGyFzLO?$*#CmrN;m7-h($`MzyMPFW2Vb~qZ!lxZ+X^+3B2qKuC-ru)CMw&Jo zZm7TS4|RdGP-A&`q&;C~Z1{u=Vn;)&4mQy@(!tH+XU06u<1dJxKoz=8|2iGs(9~`{ zjznda(qLzRF;Sl)Lp9G^b!rHF#Ba&_($ykb`i=SxN*~>DJ!5O;xA(eS&-1&-`4wHm zFgrIl%<3}Cb{OV%hS}KNi4FGhl9six`3bx5&f5*Uz4_40_|?s|)r+rbuB};ec603= zvGV5H1@TKs>vET8C-Uf;TmYlDGbk1&x{qWp5CvYXx0NUk>#;^Gb-5;##!G-S>`wQ` z%iO}O)!D*jgVZ}+9$zj)Xs8Q)z<65g44~i~nl6@rAZM08Xpo8x;jxxbpp5=wHW#=O zXAioavBgMw=1Mvv?{#8Nn4aqucze9qNO{&mVP`y6mIdXl6hqWn;}m3=2dy_Y6;NjR z1(Hh*>B)(sB%GXzT~eG!C~E#KlDw`YlDxell3b|l6bAqI=zRoXB%B=?IoUGIBm4e< zxqzNOusLk5HB#5uHES8auE8+rF)hQr)|#SfJ}6Bus%g&#!l@|*Dof2;dZtQ5nz4Q; zIe=gQeZfjLyIfhu_HIEhB;+#0;y}b#SKmmJ9UDrtYh!_s$20mo(08<}%qxt4LYYd){rT?nX+>)>G^Y=kVUt$uMuY+iTW_&7uy)RkrtD3MT&$^-4P~ROmhq6J*bz zpx5TAg7%;yMb;tWU$N!Ij(X6q$biV4e3y{n5&tY9%hJ_CFDMAFEq%9L1`N})S1@*H z_eY6I{K60UC2)&>j*G_(6RuI)V! zR5Gtitx#`96!~_Hpcel!DV$CZNy;3uZWOu_Q7aLP;C6YbM&M=E`qWBXBfT-unnerOF?Cwh+@JY^vc+=%SeG?lLMHM`fAm%D9R88>MS!5&PjE z1p5NJg4PIB7MfT>la92PSfJP~xFdaKXARiX8<$%}B1o)dd;6=xj2#c#GEE1rv03^II zU#R&gCFnAWZK+u+hIWxq&yl_#Un%X6LJPA%Mk@h@e0F0srPVtnnB`yniek8bUMFT1 zGE#RH?IBuE3a4rP@3!}1Bq=jhVe3?2H|C&(P!+a!xQJS++=iKK@8&sD=7WC4 zi!kuC7)UN%EzEjdCprlwX+%;}DpBMOsVTlla>}-b6pAS2eToJv(f!qxia|u#oNs%r zs{~tWmR4Xppsy8F_0_7@rQ|X<+1_iU-o$F76DFQ`7GeABUwzrlV4}tl(KQMq)6?!J z(UF%&l>}|ac%34-Q{K6PLdqGiWI7=fw!JriK|a>{;eEm(D3Qu5h((LE{eQ>A%?k5hUWg^9V=k%@*)PV^s=xfM!-I4gf1!2z{xV- z2!5!wbPJt>?`tj35{ODbx+yA2zf(zBt>xz`=1r~TM=C&!8W2+DDdFUoX^z2c?XHMqR#*7Nc!CT7g2*?0;6d?!)bb_$8c}ZrI=G06QgFkZoec0 zNtZA2;Ry?H7R(iHR%-G5M_`(0^6cMVPVS`VpEt`W5KW%{OWw1doEbF};ZCgf9c3_u zt{gB^X_j`Sn0On>pC~<+{O=z~RPvce<;V1N37R!)jAR`e=`c;%N>d`{_J*b%Q8QyC z8%k{NrLY<)zf~z40IjsWUvT2?bK+E&j&h=A_JLK{Ud@Sn6kQ!LH$;UQK5A5@{ltk< zGuDg~b#pE%S?9C8TP4nHD52u<#H$WX6UQhuuAx$E`5}@L4Zf$fJWPP73a#Y<6?I5! zxmThjEv8~JTFdP!CatyHEP-(iKCNYjiUQgoP^)PzGQ84B*eXli=#RbPg7H~f?Aj>VbB2^)mb-5ebHd{XV%57?4GzECSE9}uN~{f7`_a_-4}F3D z+X4;rHW!AawQ-J26hEqKeQU|*8_c((=ALNaO~V`{7Gkk=#eUn)7GlaLi-uvAmP8V} z3*tM&<~x)Wrlg3uRX3l9d0_8Wd&y}LGy3q#O@#D;9p*+OwOEXvm#q&p(ML;8 zF@6Afx{7K`yTY{vF8j&~rn>ga=0@x1*Ay_N&X%>IBl_91VTMt&!D_lg$mFn}>9C*a zuwUxSv+}Hay<^`F@S*i_F+QhAVkCNWU(`G#<6OjiufepQf%-1%D_;ax3>xKc3Y%*f z29ysd=X7e0nmhV^K)<=H(DmHEXeT~5^99?Ri59w6ejx8m;YBWN?@L4(X0M4XE7#7m z&bxxLy?>OqRQce`+%Bu_kdz}PaBO&kxuMSNikJv?k97|{4w*y)EdoC4wZ7G?cxbsA zonWAwmIy^0SJFzs$M#;O>R~>YtLYNng&7%C-X!>(c%FmZ3(IqHFA!%Q6wgajkM(R^ z{06&?%zEh{@6W!ADb{y{%iQ_>HlsQ~(1k*2N6^J=(XR?<&m zLaLFei1~u1CK7#=kuzjGyyRIQ9@T>80<^!X?Rw8aus@4;qP)soUYFT!ex>u{zNz)%>Z+ zPbh4r!zPBoO4ner6793SUp9nx;z5?a!6R-pDeDqXm+cLam@Qg{drzez56~YElqmDS zcH+vIf%IC_lzu6H>{>N2tyk6;{v2I-4e-CgWX_CI5ELg&JEM_Udd>2_bhWfZ&B>L& zV0uMEa&l!ua*FRNX=bg!n4Z+ozGnJ}iKG@*nuo;C3EI9@(%vZJ!aL3M^_~K)WhlIr z*i)#r$m)cOk%bTfv3c@1J7KdRF~k{A-#}a$2ep>p0s`M&P6IOECTubvDPJ*@)TSjR zVeQEt78q6)P?etEiLtR9HjmPEGZcp9w*v@mRu%GUd%sFz<}Vatdmj@L-2<`pRmRw2h?f-Be!hpeECI?~ehDnk}k2>!C$L6{qf1W5{Gx zNVlP3a8^v5D`lE{mDBqDU&SxTLt5wllY!nx!T*$2G4`_t;*>}QXOYXgh=Ti^WH`4zi6QD^@*1#WF%%;vP@u@`?^Crnn5YAeVV&N+r%b7j_L)kZL%1n z{5ZD>S!SUCpw}fNA!1eHE>P@zd*tX+m1cs$fP5xtW6XlbGElh zfuhe)>Ggz@V+4i^F}>L8s?$cG+zFG9WU{E+xuO` z@t!Uc6?S!|Q5s1#o&?ej?jF4jlBg!(jrQ5z*Td34RGQf4KcWoVJCgz&P49S~Leu*u zGa|%_1wD_S+Q;4EUWuB&^SASBcGT7E4_r}lxWU}X&;EvF+|`i0FtO__qv;jHy?yE8 z66M?1TE#Y88ocMKu_wn)p(|fdmaDbtL%6MwBt7_&_TT|f4Ubz`!s15LWN5YC2=ovt zk)LGwTbH<9f$7-4_Y^%NbI!nN2G(P;!e}Zs0^6*$UzK+5jRd>2mOn_F>A8@sa*U

iS?I*Q)>Sm}4ez20hiTHKEtBFQ>eBstd4{GhJTVcYDf*blrwgr7}CG51*O zag*oN56u#G)^Fh#wIhVddw#_Y`aGO4Cwr=>lxp<4@R1X2&Ij|Of{dW#rhC6i5_ax- zIBHI(>RWnsn~=ivY#-9stL-W#lQ^u!2HD;cGG)#YY@p(N+w0<8Evl+P_%NbCMdqfq zTW8On4apAdGCOLvqz@LFJ?YE{cbWm^9NH!;I&*M<;+NoW27itGsdXNHYx%2ohJC?Q zFn$_qnf{n047rgZ&-NA*UM0yY0)aWAlb9Pk<`44}4bwLKV8#_8y=o zoxW29r0-g9R(&TFzVU|Z>9w4Ho@H<&$@D?LqOF|)-`ZVGIe{JY!t_)6df|_x;NQMd zSsUAH_A4LOEYVFe8qR?kWBkz}w}Qng7aA3Pmi()~IR~+b^<*$w>2fub!*N5}-hCla zIEj>_o1C zEtEN<$bSKm*>m-!cURO>e*8?^+e&fzN}2Ye7^KhUGXmM(JBfA#MKzCZnt3DmE?q5j zaC93?fr#Lnk-0?w&@;B8k1{g!80IckgxeCSIhl;ce*k?oLFw-MRtf`H@61+OGVDxCwuvoutpQt7Lf9{}Txod~M8Gg{^55;` zA1kw5!H2X}&HSoK>|8;>T)*l^PDHe+-AMkngcsX8jU-jF?LCJg#EPj^&uaCoQcs`D zd`W-|HQ!K;w7q+Rg0a@}=bS6tu)S{))n~A&9;EYcyxx(~S*r5<6dL(d=3Cw$Ep`uW z3(?Ep8|0>!VI|ee-Dw!CIX6T2Efv(fY90)B#t}L-FPKjhZp;;(j$vgWc4|>Azy9dC znBt7?PHJx!|K$_|UU4#Zg-$VyeCAg6iSQmxU1T;0 z%{mn2F)qz6D}rao-0F;vzz{{cqA6z0Xc8%2Z!?KeP^yi#_c|%gw5Wb0c6Mx7%CPk| z@l~2zqfMCS8*^|qz%BQ9sWmjdJ2sXcI8k-PgKFWA%2@ZH+ho)Tkvpfp7^{DzGX5>f zT0?M8JS@4-BKJLNnlm8xW8&9C`XqOjPVt@x1pR$7KTb!hrey_8Yyg4RpMioo)rOZV zr`>))ifZe!W7;~;$%{W&rW7-6?`(lgH5@rgIbK5~^}fN-xoVHWkiInkl)!uyM zb3`upt|V@m_bI$?%e>zm&l=(a`KweM4llMxs zh46xOx#O&BRxz?ebh?)+o=^2Hw^JMmTzm;dY6!XfQ0xa15eTffJ4Tq@c+dHbbhxL{@$;wS2$A z0X>$+Lf{iz>=$(i{7t;8*aah5FRE2kdrjqY~^B;GYEr*EqJG zD6>C~B#OO|L+k~E(9&tqcdCR`5s*AWlxpH~Wj zwZT4Y#r%Pm_mBaUm0VdTfH!eht3vwuW2)eo@=nM{TN{%^G%eBEoR8n~S3jR``A`;d zV5=>vA~n@?T7Hu(?nupjO-^}J0^|^}d?jf2wY==hupIc)l_JsThh5Vh{U9>W#3-j9 z#C6M7*XL0NY6;NvbQD1ZM;XY_C^h=5rB~-efxgg4Lg3kvqhfWRyp(+dV6*%@@R6>o zw(h${MN~=zUOkU>M7e96!B}v+4BVIv2nOQ6!+y=_DbSiPNSK~LKU%X?j59@gm2hr~ z)FO~e54+GfTI(*tiHXmI(IHyP=GO@l??(}QU0>KU@-!!wicQmXMVTNV0d7o!TpLJ-s`B10fYI6 zf^~Kp3>+w+P%)}N2vR9VG&^-XUBZAV?+A*pE^Wf9DO8hu+uMNZRC>RRfIu@{7JbXs zaekQtkbe~&3o{UHJ&Uu-Jcuf&v_`=Q6!{2@BFkK|V!QWdp{R^*vV~rp!tX*EHKV&; z74I1ibpM3VP?f)6y~r62WeR-*yWrDvL`1L``yIe9y%wie zov)1%+4jCmGHMCI=$}>^#Hl$$_PoAL-Tx9Y zo1PyPkm$(C!^AXCoHI$626=(&A&%inItGod`(IombkAPrCq8smySFD|q1?#RX7lZ7{9y zJ1w!;@AuqwBJ+~P{;Gln9>?pEHp~b8Dle5hHC0t2h3DGcBc+<2pSk+lEpu5sa8*bP zEzZaWaQ3V-eROtTL4GA^GWL%~y(JF%?qfTtw>{+01m8-#?LCVWa{pXKqAeU-Kt`(h zqzFn+OyZD9H*zi z9UNEqcTfi2aWIsMu7=7ok&syM`!9vcgZ{lFTccW3+`|Loewt8m^Ciyg!PRB`1b4=O z5-w3G>#YZlj@EL~^K#Kr0rgepqGu3|6k0pxu;HFR(nQ;voi$sm-_^@2qKxro`?RzijT2o|&C|L{Zjp7DUfy&fFQhQdtJ6 z>F;sQh8XNjo~h}Yk_w-SDdoAUnF3VY2cv}sXdl~qx45$iVpzPCizV2kV(~x-<>|zx z27Z^Wmab8Id#S!e?OztdRISzElzY$e{j$Kd>N=+u-0s%mOQb?s6V+PUB|2pktip}p zvTjx>;tVIN*6o?3i@C*IB{qz6FHWun%s&U@`Yo|4kL}$ilX@mB z4_+b2)B#A;P z(IpN|K7aT#Xwu*`5qm75WW+O?dB=d(Z6Nk2ntXKLQ8a0Aa^1=7X+W;;E1rOkws$q~ zLX$;e0elKg9KX$~@t|zQ;9^;$ld+ApaP8SaRtj?imHD0;z#gF!^FEk?`6hfa-<1RN z&6K`L7ENCK2 zk)Pnp{C=diSrzok@Jh;4?y45yCP&vcl2Rx0-M3gKWJ3-sEHb01OU)}y9CF7-fcpuK ztRRD;hMU1vRiAl6l!dvVgbBmzu#TGkzW=M(iDY?-_be-|Mg=dw;>8-Q*7x_q5^(|e zx&S=3vQfC@&sYK?B}yNXE{l(slFm+)>94c+i0=auitS}~t#V0A<~~eJpDO9I<~jZ0 zEDsq?hw#p8&qgOqwT83`S*m%L}U{VPOOjc~-{Xn@B{18VdeiG7>ICz5Aior== z?KZkSBlZ`f6=kx2W+P(S&}wTBJ;Zpyk(xcSQXTjZZq}X{QX5zoFJ{zj-4g$&YM5+M zxJCgE+mo|r#iNq)-bR%&to?G99ODo1$>_~B8!6O$d$6uoiL$fn&Mr;I4i~|fp?(pA zjcgVY6@eWtJLN(w$FX=baHpK3TaH@$d06$krxsSs>5^tA<_4CmYi#osn#S@EURyI% zqT_Nzv_D{Y{|3H4U3;>9RTjBU@2KEV=BYC5ApSnaZ+Zrxn*lOLvKFhLNCxUO+uNy9 z&;N+!*qZ(9m(yB)K7AtDFl?0;vz|nKgT>!!!9uxh6dKW{jSd3N$b6y$p@nWaLMyjlemzCpA)mGvF)S-;-rZ_IV88lYzqHDGrW&E04K3F= zQQuY|MD4Y`mpD=1%tu-HDV?Yl!X2{Xdeh^)_-ri zB6}xrG_4ac4AH)|m>stFMt|QiieZMy*yME4kXJFcXZhwul0Q^n`#8Sg%!~2AsVN+H}S><6{uM9LTG6Bmi^E55p z^&YA@c9B}{!9D9zywhOz)Y!s3IbZT2Xe{H|yJ`?+L3i2N$nj|opUK_`hv(Ue@&b_7 zul}AphnRVS!kOSLrNuS&r!NrwP|8nve&nN3DQ_a@m8!PAGgJb4uHSV~PSwC(0#H9b z2Z77fQ<==}FNB+8)%kh8){41I&rAfQ&+E-ZBe&8rMsk)5n#uAX!4BP?ZFo$C;H-a; zM$1qQ{a*4TqZBvV-rWS8L@Dg{BFS(Y8SrH<5ce%E9noFu*o3Y|dO7mSGQcUkD=!az zoj=o*-06`V97N(c=rSNhMgIpr7uiw!J3`L0?7-pQK8y#vEdM@fN8WWs?uXGDZIIAn5= zzXKA_?A>RI%hI!i+M=m5r@t(yaJ+DX@+w&M9fDF^wtCJatF^F8iu_Mi@F!^KR zwiwo5^fU*F&#N*$S==l+ZB#1>sq&#PfYrWU)R|i8IYqX_^;i!vR8h0-O%5-l3xRM&~r=T42d(M*XDc^)6nS^A24KGPTs{Bd(FV+pJhHb)+r{*>T^;qYrN}!aV zN}N$Azq6q5V%WpkSs)9_Y{GGT0gz4*lzpN8FyeBHHKP_trGsnQM>?BN!l}?qBlVzP zHt@zJatPd9;8ZT#`_8Z&u)J3R^eoLDn9(U2XDUdn5@3aE%Cl-$ zuKGS;DVdW8`I1ur(-NoAvvTpZ=x5cEU-%CaMU#HvmvuoGzdKX}TZP&Q$P^l!bDkOe z2yH}Cm{85<1h?^nlu=)c-{KcPkKo6|p8|9HqSQP$=je45;!A^oQ3`RwKMD0KMAXt7 z$~SucQ;PY{jL85cR1w2TtjrtX9(67tD#sqy%VsOv`#tJ>wo%X!KQtWmWws-SS!r2= zL>Q*`AF+7v%GWrQj(q_Ajy7z(o;)TVbWLNSmT`TP%1J# zuOd&BG#YrP{R5i&YqHR)HUSZo$om0h(eB;iN=tcP6JMLT#@SYNw*$Q#XaIjJ#pl}b zYX=N6!ZtqVh~E?kdZq(C5ova+=&PRMuO^@E9Vw8hi%u7i)Wjfh>J&d_zRzmNI+p!1 z`ox+KmtwiyXZ@7eOq!@RkLs(7UK^(J9uPOI*+b4Vty5Ia*yYxECs)M!nhL~1+2@)k z{*DX`j8yw`C+lDHS!47=H(VV}d43>qDeu>uB!&a{9rXcuE~`Y*Ov5)#ZTNlf8_jE% zQZeO$3#2pRs`ceJNN3})KLLBkiMgUb=I)`#s-nAaS%_U0atz?y1sk#-O*x~nCpZx7 z{S%b$_eHL!6e1^pLVx5-PULq^6S9;ylfgQz7i^QB2gT_M5r<&kp009s&^)sz^J6G! zdT(-a{KpxpkAF&AWVOf#G5#VcsOv!tFPN{z)>oJyWEZfxco9&p_)xRF>_m1IA1YXK z52^T+V*b*Yw;y|B81SSj?s zZHuse0FiineZC`?f@cX~X- zT}N!93fY>ChqXkpb-VcVQl9^22A4l;7?VZU#s12$xw#H^CbxB^F8VI%`hhJXMgIMr zIPJ722I)BRaC6r5q%9ml+=(&dd*PZ*dZN>g|4mPP=+o}UdV?1}G}3!K;hIwfo_%QA z6PpNy&6e+pVL%L-dnBf&dVv+hd2u4Z6@#VO3{L=>df_#bf!@FB+>a1Xw%^x$FR&%C_vMTjv)sURQU-B8j_bF_?`Z5SKWI6Qt=>G;z?oSF=f2&AS!qwML2$j>@ zj(~xOBVcRt0(Naqz(le{!Vt9M3Rr&~c>(*3I+T$8UI{CL27iMz{N4GP{LQ99S?fjM zDn;OYj=+&|)EY)jcbX8RU?mcf&XkPe_C9t;6YEwoS;Im@Oxxd45G$-H;Ht@@&Aa07 zaoME!wf9SyA2pYmadOg4uUaCMi3pTpLZu1KQ2D3Zt)#NSa~IN4vs+!Mi< z6P!LVVa$W(K6AS=Y2^5$+7BUfA77%4g{yQj+Tvra!@ZWXfxgA>$&3(Fm%BlLweF@8 zS$UTincDlTTUE?Jm@A11?e8v-8!Me`X0KI6q`7Ug9sAtT*(F{9pQMTd@(3<}fsFsA z<~5m62+sLC`xbJ+pYVCdN|u=_&iFwAH@(e)wI z{ASuO?>I}Y@K+G=10o_s)H-e(X6uz|c`xcd(7QuQn%m$$*!zav6J<_P!rghvvHs)= z|HEXXd=sT}Zyvr6uld+7rdSJKWp!L`MZ{QlmPuXYKE(h}ZA$ z2x+ZbQJ&^HRg>6Ty*uolQJoyosWXQy)y+;cW3OX;^k&0>&2=rW#ycZ+yMWd$tIQ-s z^%73Jp~V8>;Fg7h4ZEA#`)e%8748&FA?|JAz?OdtRp*K*!MMj9`%v~=h3`aa)Shhd zvndh06|H*TFw8Q)y>3)5gUr}(nO!06iI+I(CDY~sxYOp?(;%AuXN6#Tu7fE=!L(BJ z`AqylI^P8MUKC3p?5_y!)gGu4=GqwO2|AY}Xy0Hf26MS9{A^RZ6tjPKJYYJ#9Y{wx zWExzrCD?H53J11F>@=iIq-}k9fxgTY-;=#VH`fHPNHYr!caB8@8=PDVuRUH4RYx$p zaH7s$_Q2miS>mu}$Tb%n zHt!MG0R`vEl`<6}1f9EHm10MIveY4cg=SAw#j09^M;1149 zi0!h&9S$7e2%Evp8OX4Qe4Jbl6@-&4IwtEX91$|t_3o%c7{f}85aJM8rMB;?3w6rz z`RDP$a-yH@fYYjs;HKErr6ddml}f0;IQwm;W_!NOHK=mp6RiB^iiFJ3iJ^B!@FSDiXQ*0l zuDwue{fmSWhh}K4zY%m=*Tzd#HnmJ_ByRFC%HCb=idPU0q4Yf~M+nwB=LPPrb#V|0 z|B4e+YnYLcl4-roI2%9cbhbBb5|521HR+@5Nn%!s>%i;Tlg;k*!6HP+ zhBhVlx)bkcsrrM7gT;59Y$R^x7#5z*uUAy@BsJw3$#%1Dea?wQb?bAXx?s+hb5DsOyra)jHgJAlR?a^w zlrqd~s#q;#VJdM?t?N1I()dVoN|niF6WyY+E(_ULq5UVOVXLXJw3y|jI(b>kpJ!8!&W6d?e zkG1-b4fm8I_OaPcbIRy~d&Z8NGB#f1lvN!} zFZ8&(las4uu|NpKNJrnq6gNXE8M6A1zN=w!20K^GSGe~n5`123*(7u_@2-jjKhjz{ zd5Mm@iCdO9Y)fclyfi%SCT;|l4OfYa2gOh5E@t|QuQ$SU`fDXob6Q2z94F_=YwJV= zA5<>$aPk-aM*%bM3Y+sn68lHqqTmwJpQ<3Zs#sFB#G#Y4Z$7{rS!=1su@+pXeN!%J z=K5?rb-G|z%#<1>7Q2y$){-Tc3fx$b3;ZP?SS^7=z^r1_Q9KQ+%s5J#>w+I>Egzmt z$&4Z#-my3K?ugjkZi_7|dhq=PuP5hKgqlAtiLsvKd6#v(%vNeYu*S6m7p%m!NnBbc z`Q}s~#N)>5HH=z`9R4YQuQR>$1QIYmw-7ny5RHL4EkcWy)Ax{wP&XLlto z2}!OY;`3B)J$p29f2CaGgs)%(m1n6XKt%t7Y7&uZ~x zs&?ucfB^J!N#4>0_vG7F6iv>m=q?tKRViBHZGz^Fh8}!P*Xm!>gMZUn#1WUv;Ot?D zvRA4>n8Xy|#!Ax|7P#6-6`c}H-+5bLUG`Q*+tSZ$UcE0l&&ZLt(rM9W_TksK$EmL^ znFxxNHC+1A@;dDapDi|BgdGIvh(;J=b4)X*8bPCK zVR3kwf3RI|)UnaP1K}1t@3dz~KO%qOiXqCP0eM^i)>_{|i=q>Bc(#~*F*$~lPVotP z$@&L59!9thyhw+J6VLp~uIp!NDvuhnk^TN&H+#|76-(#89XN1eH&b z=l%61g|X|GemnvOiC-z3@#LzeKfEuag>1-HVhJ(2u@?FH zrMKK2X{+e=_7TCJ_-*DIzc!&nHrsg?P3GVqHcof3SU8!N8THb;ixOQQnHv&2wM4ok zIJN3dwgP%i12DT)nK7zWy*s2+hgjcyM{<-L*eaMO>!OLb3lkeoOkR+=2u4->Mdl7n zssXj!Ni74roXzXUX)Lz}{a2nxyNl7-YzVscM8#lQViBq=a|Mq?rX-eC3R?W*YI<5X6a5_IbRrG$wFm{KD%rZAO|7M+Yx?~&Bxpjf( z>jED{-MvxwhvD;XHI5p?@)XJuMK0<-n_Y5>AAd~I$Kgk(1@OZv@wVY;8%4bzxO;LM zy96E?)!|ON*U*q$=XDp$W^cH@Olvt&5YF-MV{gmu$m8^p>fpZ2gK~Y8_|T)Zc*svl zMX?Lajorn#L|hKpGmDr^E4<$X{z;Z;}e!#=)|K^ z*0V08L#i>r`Z+5*yt^3TbcsFpf;E$&pu3oH&XvW6pqpLk-Gzy+Bboh-iVB09j*o#F znYAbDD}$Em^}lc$HZwuIKx!P#k5H$X^%d!ZCkm4K1mWlIP)KArPqj9d_>Q7Y=3HhT z-NiyC>A-oQkvMd!_RX`gAGr<`7Efff*wu<V&JG&nRMcD>3R}$-H_hxNp$Tqx61F%5zzLH5lhoDok{G_><$tB_M+s}Sn}#c z$@=+A4-F?p`^K4q+u`BbH-->E`7916N2tDM_46l8y3P9eDl_l z2vvHwn+2Pq(CkrMBl}?eGFofbFHUrQoc${RB$b7@Ea(?g^9lDYK4rl_jMUUQM(P_f zyNRa0*-W0~;7F2hMATz)e}i+hOdq~4ISkp&pjKyPOQL>?^&5l~FUq5&2_uQ-dlt0& zIVbZVnjtDWb`^uS)5O%ZcJj2WM@5+Ri`?}w+NdQ&2j<#2l8!qLrOl~xl%nk2!HIkp z^+)x<9W%_LWo8eEjm4JHo>&{)c-K>M%TL8@X8j8r9alu~y@hA1KJ13hPRUq`%(v`~ z%52o02>UMz?p^Sz_C(Zwk@jTR{{>NH=G4WrlP!5-W#l%7+2L_B{a=XB)SkG)e-T@B z3Uesm(O2*>#^_$vHRig&zTVx?NNf2XrEqCM4?@HGn$t&#H{6L1x5!XRsHD)DJ?;uB zny9B5-MKnSk6%Y{-vXIJwjoG%?{0Hz;*DYogA=krG8gAEZ-9-DL(_a_lwL0LeTj}i z>D|S8fVG_G-Fso1R9T_ZPr)AT0hvwE%})jgc5~w5Gmh0`jt$iE219VztdI5a*RK^n zCBM-umn>8^wOocm*T7=-tr46%aUw45R zehTZ{_5~$KcukcMu9PW)^>tL#v6U#{=pTNLk*3O{>rd{>t6n{E1L6cy+{eqaumb>79v-J`;glrgCk6RdHTe%Pb8n?qa<3(pCH12 z?R7jBVRI9U7v?7X=_N{ftqP3d4jpe@?@riRx7UV8ZP0r&?3|bk4kFWXyA2D{Pq14d zoxMikv>)v)IFW+nn#==rr6T~+-D};Jl4YG(b1j0&M=p?eunmt|qxV{2H#6<}8I#PS z=DyMt-{Widi z&zrC{NT?eB~OHMx*5__1})!JE^o2Kt?4sWI$Pe5Hm9k0d)CGIJ4#YQB8rLhrC3CHuxk{mxfKRJ2%W#L(yWK)?B}&;}ijO$BaP*zS!nl<=%qw5@ za&NV+Rwl!Bqr=JTsTSjquc515I)f4 z!P}G_q%Jtq18dm^sk2wa?x+iHTClnvizWN}fxcSZrr5gYE1kXdPWa>9WyO;H1ux4ez!4hUTy3gb7~b3b>B`S`?Doa5%)-Lxh-$IdgucnllEsEto(= z_-xXG+wQ84240V{(3<|R@X}Pve>jU~m(0M@nq>r7KuH^1#LA~|9>}H4%30LMOMIXi zG~{Y{FW7S?uvYJV+mO5a*H{xl#Yo;!D<56pf|~(7L|f)^03HR_D2q(amlAX5gZr)y zcD@TJrJJ4w&$6oI?Bst~#r#ekD&VxKxjo5li3W4CtpBtA!zBR_UQac!qRI6tOsLmH z%{OEtKu91Axh9~xE)_4B{r+$3f@|(X-ti7?(w>yH*TUK1f>^jQ5_l>8&pLA`CwHF} zuL}7nE$qL8xn)&+kejtDt56T_U$`e6d@Zbe=KqS1iI}BbMgCBDS_AckZqWZr9D1bn{4^Hfg)r?cSn2 z84WiEHfzgtvC`Z}61H~dJ9=>Il9Tn|-?f&2#0K|ktsDws`b*6V7&Z7bG7gL{RW^LaAl`?BwDGayf*UPhCc$gQgx_u#Ku> z<8gI!pLXh^ld+$<_iYM4I``aX&u=TlR3bs{Zy@2%@)Ot_4IJd-9P~$)(~loV&41}_ zVrxfCwz92%ig;J=o7^7a!;=yAm!u{$*7FsPk6AVx#@`Sk{d8Pq{N$w{{*kH8f>I^3 z8QJV0AKEL?m$Mq6e{K!SF9kHU0W!4TD>^g0Y=}QQe90-aiOA(eQX z!p*x{UydKHc{SJ@D@{$ZM|Gzr6lM#}4*|qzO0SepTH7773+O!6iV@-9My=&tS}p94 zY-1LH;FY)&o-8x69uXNy@fN>7JnEfr@D@Moglg?o_6*dt^Eu3L(4vGY zg-)Fo&hjnFrFwvGRsf)sX|Hi_Va;NfY7%7( zF*oEo!`-R6Lv;Z0@j(-X?pmS+lY?H9mW7kxGHKc9M|oEmU$SiGqimGo06Kk`Qy#M? zCBy$&n7LXkmE~y>i`kS}nCq(Ghq0UGi(PCu$;O5g@Q>=I=j5bZVc5*F*-Y_6c-c&U z0ewP`pxb&G{ApRT=v&9>gk!rP_+k9#(l8^vqcBnco?$;GcV*^2tYY}pKust;!nxPe z@LiHG*oU5L9sONn3blUq77iZaLe@{J^spq9xiJ73DW7N~9o+qnkk$Ub| zkd}JBqMpyI=dKs~Qj&zb5uTRj)4=T`>Ak5S=}dfu;kXRfO82KAi7Gg~~q zBgPjm)5{CE1?>It9dWLxPj_PSvB{$E-|X4ih!&Vl*fJdvn`*lD#Fog)g51A|_V{L^ zLzr3zwqt$8PKYvMbBn#38&!KEi5(ZS7y5+wIOnC97k6whx2!+>u#p_hSWptN`S^zz z=B3Nnm&RtRp7{PwZA8c=K>Uv^JtX@<7qSauu#ud`HlAthQAvmR-Nqm5_?(8HX7RU| zzk~5@*%hE124AgnFn{FW;bbAOP2N-{el2;~E02TmO?#Wm{48dn==vH@puhcCzRaDq z`~QQHLJ{Jo{psS^7xt$MVuQFa6U3Jl(188v!k9MOB!XdiHg0hS^8M+eSh*8bq+*JM z9O;ZFKFtZZWfH-qn<`O}grg=qiXlt9Bw}_ddQgX|Kq&OSL_#3>m+v2fd3yPv*CZ(r zomyG~20k{y?1&1P%y8*mi9M+3rZdH-?JrWJ~u|KYGKqsjVBCe>flky$Z*xx~{aSBZp;R@n?9aQXe5Fh30tK#46pK$3xp~PM) zmJZ?r#PBH{1mT7_)#xSB8~i2PKa0%w?pE#N!%vzQavQyC%b*34rWdVl@K}jv-)Qh{o~BV>rCug zCN69B#G428{jQ>@xtY^Q+;q{<)L3+2Ta-^%H}ynEZH_c`UTO}329qmHqlDd2CHr-^ z7Aw=9(A|8v%=iHtC8IU$xM0^Z7aL{JYm9H~mu_Hk&o~z`H(N=_qzp4a)ctk?rTG|n zZWzQY>2ZiAlB|gyjNPY1xH!J=VZ-E$x0@nt5?23~G`QDA68r3in$2)-No1wV(SS=E zS~tgfAyo;vb#k}7#k~(Uvlgp@m^MerM0vY%vG|eM94~iri~`KLkxzh>mq2KA(F%TeVS& zV7s{jIK=@#*~_ceI)M)BYt_-6#aq9AGlJ!Z{#fXW4>r>2irEL!T+d(lKKt|6R%EW9 z1|Q(YsFu$$D^qq3b1?M*(FS$yt3ft+b9I$6<+%Iva+;i2UG3b{ukI3I<2>KgNzAHr zC1&~fD`C8oZIsNl<5gDd7Rhjn-{*QtxntF(46>&rS!q2z9mhjwCdD*h6*e{#U_Vc5 zgKSpk+A6;Oc(Lo=ldKCVLS;@?W;Ypa_Po${WGkF8uK2z;t;IJG!c{TLI{jvK1C!UO z4JS(8Q?!9L3dkJmi=PJaSPrDoy5iG7z9Ar{#`VX|AGlphZZ(=XMXy{SMxaNGb*Q)A zx)*eCAhgN?&p2-D)mnZ}NK$H?l$M{!J9$})lVj)*_B^)ISm9j8W|F%n`*^xC&UYMe z2duZw&L!VOf|MH@DY36#?=%^+xDlgzPA(&7`+mSQL?PHTDAJZ(=KKRu&T|+CG6rfR zeVB=g9LBp?zIV?V#oM<`zzj~h`2_BXiS`M|<%Fc$fsdxj21n9|Je&-Y3>FtL;1f$hQw<+?y%aWJl2m_omyo6bIIY3nrH(r&RJY%-IwwxA1Y<+1>Av zQ4;l3)Kz?(+~(vmRREb=+wXfW(LSPka)r3El9H$StMnIT+Ea#Pf6ck({<9`(3+*B6 zyYMzw3?XWKYemj#V9lscB|gdwC)j0Zb+6?<2?eXjS1ZuoYAmSz9Y?6QiBb4r<%<@#I|~B zS&)sFSa;7C5I&yh8G-BF7&o~@J`2rYwM>DP4;HW!t*+UscuJaFv0s%?822U=OkvmL z%7Sdk``Wz1_qF@jjgNZB;o5w6r3=q1r&kh>N^u|405Qlv>${7M^4&#~IiS%~XYS;q znN}&&Y8IM=7sA!D{zI;p+sjRNCBdHtMP~RqgigS^Vkd^QXE!7=)!Eba1MO}gW1~}bKb6P{L+^59 zt0mHF*`mDg)Wk-14k z8NT%leRLl%Me#Fv7R1M!U5Ooqv67`Sd^Fz$$>Qe*KfZI6^)Z^U+jSPP0vGcnUa+&} zq2*ov!u{LMun)ZM{#bQ&Bze666vhenH^GqZAB?98iPcPO($+#U4hBsA5%~V~WgY zv0m(hqHK(TFN!h1R3nW*mrzU17Ta5~y?}u&ewy|3naG5E6luveK5W}#P2nBE&dXw- z-XQPu8xVD@0H1Zz1DvB94tmxf6NIF4C_#*i)sNoYj zbGwl^wBxSD>cjzsqanG3vHYgKs(y}D=2TVJSC!kTY63ujwdAVLZaH}FvL5dXa))R3WNQy~SU>8E>^nvr$k1OC)Xb_v zr*Me4=%|8+<_dN`k{3pei3lxnnh{<$x=JK&kzubf0)JNvZk+b)wT1~CL)#)%s&)N? zG?}I{9U=~DX)S4HNmc_=k$I9(blgR$)rG9r$hBCB_0klvGWm$8`f#7na=s#{?fnUR zvQ3N9_{h)JYowDu2URdcxAPMT?vJrR#iey!DnI$Ea{0bYKEYvKYOaTGvu--Fs+w`%S4liq&Wv_4%JL6)I9oqN+_J4wQOJpywOnXI61nmFkJi--XG{YY zTl4iCDgz39^+gAh$Av=kxe@wjj3a&I0TakG8nRnss48<7A%{h0Az9V(9YLeJ6~Qp%W7sx zD|OGQ$yx#S^)s}!zMi7Gv*$Ynr=-5za+iDyoGYh`tuT4bN(Uh7)Ql?~Lb3cT*o*Na zp7H7V&kU>OCHepg9yXeeMrDlUr8s69|K#%ojwBt6QY|Qn5N=`3h4qFM3xLSVrL2Z0 zjvR@fZoNoO3i!P$KsLz<@Gk)lzOOwPli!POv*5eL9KuoaA5rt4k@UwN)=p*nr zq$M`8Bz}r@T3=2$KJzO4c&S;&7eBt!Tfm`^y&5dxoJx{QNH3I2NJrB_r&zCFCFD@8 zA>Yr*XL}#tqSSd2MM_~05h*v04qEB*$^GC{PspCQQamCOOnxzW*Z;%XnZQR?U5|ez zlOX{TUQ{AML81mlVT<(&J z&CQ)2yC$3edHhEQcMTMOO)>v*Zzfo2?3%#|u3d2n$kMe@X1u;+lMI#2i!@!>AaA1i zU7L>VJa&oB5j$^ig7{ywc1^!@{rqO@MsPKo^xvI3;wTd-~WK_|0q_2Fefo&OpN8qkP(WFh+R{V%x4gj zx$?)uA11)@njs`lP-kDHXXOxn(yprqa++($>6wEeNLWL+A*fq#F{QwanJ5V>Q68Gb znw^MT#R?5K`$G8H8k?UTul{ynU)Q{;W%3dM;NsD2Hd zFWotzh~w(?4?ebDwLK=Yk6-}NuKnSWX3YjvbS4{cyw#0=5mWg65p36|20ep25F*Du zi4aLFf&9~XKolg?whyvgJI^g&6d5VC93iz7q9~MYK6`E4=FOWsDW2E7(_da2K1QuW zt#fexSR~-COZmQtoG5_peu$24s>(v$X%4&mU zmrLYDjrS0=54N%MgJ~VC&s2{*PLCaZYhuG+kdHO?#|NB`IZfhXmYl~awRqvpv}VQzv&UM&{=EgeL()xv zbIW@Zxsx9XuH%K*k*O+fqLS$d#S3%a$6s1>CVDu7R`v=RQq@CmJGcM%$5aL6m{uCM#tR)h-M0U#x z(Af(Q@x`}!)>$B`@)3F7H1`w{5%u|c(deL^F?Vf4dBct$!cCkL4;@mV9>TZJSblSn zk>twVNhmG8Sc?>H17hPkx*Pq+n+rI%RiJL`nR_C+S9RysdmxsR!qYS;t7n}FD9=1B zpmfYFHJ1ON$Uq(310HlM716=^uy?2K^%VPbd2 z9{Sd9n!8YRp)+J$tleUQ8mYqQ3HneWym+=|9?2WUr_L)IuUGLkNG%Gny1*J;?9WVQ zAANdNVsTZ`Tt43j!>Jf-qEi|oYs2i*ROMu9R2!*&T9|-dZm-(=+{qkUc2?{Bb*dRzg}R! z+WK|fAJ1yDY$JxTf>~8!@6b0l9fArz`8k@$;#os`8dziJ7eQ3T@?rKzm<;48cUFJL zW-BF{b4hZ~%F$Naq&7&dU5kx}AzdtXQ2$xYk)7DYg1*jhcKIQZFUt=J8G#n4O6XH` zh(cx4(9`!Mw1L$!fY+lrNDEfPbhUL=7mfKEp^1Z)tc;rA(D6fK8FCX^OOu(Vxiq1f z2n+v>(5C%K6B;u6Ovq`P)>Q&+xu!WFQ$9%CAvE2W?u$=#uLv`v%~78RY03IZZ4iZH zLyDPnVyMp!&i2_I^ob_(jRX27J4#;*%t^Wzq{qetE+rRjeP&9rDwU^R7fsGIG&u!m za)_dn4$K*{-U`MpEU1o+lOX{owaxwAOGX-j-MOlRE4-4191rR1GX8MSfX%fy}wx&QgsS&Ir2#_ z5?6C8XtzCi9Dr4Z`$exUawUt@1aj- zZ54#PAWE)|RB+dqAz$;{zYKQ=TgrZM&sQi6D&Vyykpi571nmg~d5*$wxM=(@Dt3B~DrQmbgwwPK;F&T$$&-X4PTx#xfWib2px?eQ}=u}#g5&~$D!%o$#B-6SfUC>wgX-@effi$`jQFj&pKVv+p$iA;e zK-2^jd)^+;*Fx4^`YD6Qqx!1e?Nq-^51kF38GBx6gi7S68^+CZ`{?5fr`mlCA6SV{ z6P0M9$_&YUeN{5ospQlHD-rUe5>17fO3v%6lJ7W`Y-8XxwnI7i&|5;XRH7-Doiv1% z-&YOWPhklyXK&V?~?mw)6J z4&;vtUtMh#e9K0U2&VdT@-owCg>Dp~yÎsYM~fS}Xp>Y#7qtYq?Xvt~um=bn8Y zp(bm-ieK(#=TL6txV!W%?Ot9F{}SOVl*98R-a8m)jLQ!JYI}AAN`{Whj2= z4hZbEUfFAY+9((>UfLM6b_cEP1gpD@t&eRTFz(#}H!|vfB|ItmRnWM1Ap~L6pUq!? zbX&Gj-{sX${m}>8Pen%k0Q>7OT#jVom%b6c(jTAfB}RVaJARx3IpDBne*ft1A;vue zJh0R^Q9pFgei?&eN7gTarfji?<=__X5M)K3CLNlgWgpwLUADzvF|}ZRlRrAIVBn%B zgEd^j2alL%TK9Eq5V8}4R`STe;;g_~=&i>CXV7v7J#PdWUDa+j;r-2up^>eLllh?Q zDl(SLfRV)~_48MlMaI2y!j4*toLVpHDDPF}7ukJO`KJA$%{5ZW&eW~tCRUP~#)GkB z9}A%1;@&TL5WUrjyArQ{<+udJvoGMlL1{;PT%Hl=NXWg?;Jwx1r`JgHR{*$ zMtT;AmcM6K`Q9)ZTB%Qi96ujv?G@3kjM8A(dKMEX7>QP+;dGu7_mdlU&LW+(WCm}( z7USM&^4l67LG+E}5ozR1lB^mteqG2cVgQC2OUmS}Zr(6)H6j#?oom?q(kiQRn08q5 z?^1rxhVEHwt+3v_xj83L+8pq;%=h>`?^tURyXcFw#3;Ov6zq?d`;2=xi5YNEVvF?C zGP8w(j3uokMtA!X&q%hqd4uo6U2ncmo_;&Rs3~Kl?Zo$WyIr*==Xkiq-X;k=5RbA!flmIjJ<9Wyi1C4wC%0sFp@uW~+blXg${%w%S6GQd{PclgJ<oxGmPU)X~8lF+mM`0m73~yCAx|3=8e~vu1*XCC_N*&CHcVF zm>xl*c)XE_kgaTE;x_pa-o{MWL}ZB!%GU5=P3A<{8;SKYR+1;zSn_w08K+!W$)B7@ zT+v{Ji`Zl?9j&34!D6iyP8hB{$Uyxod1A<_)zS+#74lXs&JCQkMvgWZ)trLw6uOZaW^4gO;>~J6C0@45BtTlodJzi* za@^-3Lc5}mLfuF_OOse8etl}pPpH-*eokQZ4_Ysa=#fMf_Ob{vNgVZG7O|+R;$dy* ztQR*_&f)n7hpdywKjS8}QdY>QI=PI;Ww+o=5D+|1moVPOn?f+rxFTz z%R-i$`)jG$jGpD1Q?$5W&>G}Vt!Z=zQ*8ARdD_Ud=zUBd+cV06_?wQ;ePRh~a>%OY z|E!WJV2Y$~Q+&lf{*(Kr6ktv$F zh2?tV-W$NYQGc2YRkZ7Pqy8ak;K?68oWExHGX7SFkLB;RM#G)3Aj~OG$rVU!M6T}C z8lm$-@f*C!Y5vkR;iKix2>DYWL7p4Vr@&L^CVnD6*Cm$l#7Xf2Dz%@Q{8gja73u$s5wUj$|vCacs4l~L$8C_h&H;b-CSinNWTX?d%E96#r}tc zoOLBR@f9dCYg$ZC7nLSH7K9z7#}YsFDSrY|ay@lz;^&f!9$%kWs-HGQx6QDT9o#hYC$FDjlF2;e2?@)9x_Cj#cn(jMqi#_R=#J+Te!b~0H zzOk~;3g07F?+W#M(6c37=VFf__p!up$SY{f5kl}LIsLiisSA_Sdn6qqN!7^d)_1e?v_ zAwoXrBs8ss^JVTOe!&`vdXO6o*gjPkn`N)4m)FVa0avx)suo-|{=JMc7bhxPzDJ)o z1AU&EoAyf53|V}m?kj=Nnba^J^Kb0Ns|g`2lXs{3`9|5bKUDGv->ZG^81=ImuApzf zQGXK;RgGmqNxYh8)B1BsGr5AkPsN7}LxyQRDtuKGJU$dM=zWa4wvl7;K>u0ze(mM4 zcK68eY}9lmrtwmzatd4hC1c=adD?XG0-R>M|z6&m%kundF*ShU}LXGlkQN zNkkQSPiDR>_Go$%Kq}2tx@d<6BY9!9?^B~*Z|_sBELF9>V)x=+n z+|`_b&C5=YU>Mraa=cpAcacGM5uElYQl6Dow*RE>5^u2|U^d;LCbbK#ZW0QaBAyY3 zx=NQS(sd=r*PiXdZ7|qfQ@l`=hB8#6)jdq0rAw)2LdNmt5#~&@mhaVCAHc5W9?^vY7^OKwPW@gsn&}cMi0l=4=D|y6ILtytEpClw1ERJ zN?*H;*E6OEIkb_Z;?!VKhdvpl@4V>Y6u>Y|1IO4fr)L`KCOC)Io;Zr7fe;KAiv{5g zv4%0U^j{{5Zdp{aECkpw%b5#{(&k$A6q+_|_qR5Q<)2(_98u0#{?}UqKafr9(+6j% z3{TWP^ViF6DHcmW)#k(gt97>WgZdAA3@JGzj?M_%r)?{x{3r@@{#gNQ zQxc1+J|%)@;jpRxx)0~C&<(f`zZ?>=tsJa`}3k4Ybo=%bPBZ`H>ee$k_klj_rb%o`#8OiMhd=FAD@1pM<0Jym+oUhcOU=2C1fHU_GW!f z^B;u{-p6llzIPw}eKe9|uO)Xnjr68j`TULD%+TzbpY@>GCu$vCh&ef?=4&B*6(*VlJ{+M}%GZ6n|q!)4XV4x}PE=e6}DxPm83)5!``72G)BHLAQ}e_yBzOQFFn-+J z8hvk(xD554yA6Fb?8t#_=E28UK9_Zt6FKidK``Lk%yHp8*F?NxQsKg{v{-1jTWoDs zTZpP$w$Zo3$)}Io^*#sQEvJ4+@Q8ykFlSmH*9Uy^`G13tc+jdI-M$TCt#qX~h-*#X z>Trp;fsL@^7jeZZ@sImsV{UW@8YiYOZHvck0{j=&q;1hbSFvw?FnG#iGt- z&xI{rbO2K($4_@_ZH;1UjRL3JA3BInsI@cX+ckHXq2ndtCn0wuz24Bwar$>m&kobK zYfd7BR>S(I>3f$G9C+3ho2Bn&;4_}hJ4|fgfYlf{ECrB}1g>*jC^h%jAg4X+spJLs|MF?Bf)_XxqX+Ooz$alqA@ieusSace4aSdBWIml@5GDQNzhb}_(%9k%N<;8=B^!Pd?u$F_UFz$kuxJ81Pxyhhr%CXWD&0rqfvCQ zldbUH(UYw|?=9vf*DS1rei5)=?hi2dKr~W|%hcb=MH$ohnBxudLQ0cl%JRiAI z7li7#C?M3|7&t^@^dq~q?()1>`*!H^)x-)+$A9{!@_Jy??Mr=9K4h)?mgV0GTKkro zzAxq+X?k$Fd}lVRF78EZY?zXKnS1zBJ_UVm&*c`Oo}WthnZEsV?8Nmt|0445nRO-+ znH{=9BhnfwZFk_;e(N0Xp#KGS{~^Q3ofs+-KSX0-dZ=*M>|>@ch0vsGEbFvqR~@G6 zU9*m~=~ENpflY^#=ut(-Qo2S47Q69r(Esd$$uWj*V(bMybVQ)0A4c>TMmI#OU!uAq zVRpg9*p&=$%pPEJQPha;cJzM9zC@G&!z;yZZ;i6p=J(U=a^fhvs z^WSO#AJKTF3a37(k0g%dJ?2TiuC7miILH2wr$5NC^5@~nw)$AqdMJs7>%c_blS}sS zJ3f(#eVi&GzoXMT8ijWE1Xs5K3MxHISS=Ui8V`!=^zrJ8VVMG2!_}k27`wz}xfk>e;L$fZ3oX$Yri6n+YBUSoT`bmurZ&GvIAscSYGs)L_q^Z%vyX}0hG z&^gUEod>)TLvimgGJX5gY{x5C-|zZ_@?$r1WNz3PCsqVVEWW}_ZKX{u!y&hr@%-Ve9jU^9a?Oj>I~3#M z9=DK8N2>5Cd`Gl$7JxSB(gJU~ta=W1D4;%0S<;u~g3ZMKO9_SLJa6O#vB(LlXP?}& zlRB)GrPaP!Qg3T6tNW}^diJP0yKn_vfLewy72*5?MYo%kA`F^clA|udH`2Heg{aK* zgb8@2H2*<0hlA7miI|C2GUXdjNCNX!^WWN*@=d|k9y8Ipq^_FT95Ep zdwio~*FDbSY;7h2Lp;BgXLZT*nl0uEBA=3khUGLCFT6*_Kc0ULKk8Ff0=j-V@2B!r zy~%7cN{wNP8fRay$`6 zXBy-LfB=$gU^GU_^oZxX`B5jyN*ga6X(t^nNkd6mD$$Du*ofJDA#x)DgN|$WUWWoh z@ImCOl3dly2S1(x^RbZfn=1VNPnbJGzcWQt6-WKb?lUgj%`0MO;wHvlKQm8N}Y)__> z=n%i6>dWzAsqhXSLFBv&vnGE64;(nnmLE??UB#3txN&jQKuC6ag}*3LSk@d}RuAq5 z&B?1E{w7<#`X}*|ZPKmJ?UAztH>(G1P+HTa@#1nGY2&-Jp`+?B z%0KnVbc-BaF?Jp*50F!hv* zQahhv&Z#S}ko>m~Sazgj+0UUY;w$_J8jX3Y{s@VOJM>6>j`l&##9pHF3V&7`$*V42 z=_K`+q?hGP*LrmkV+3=^!FAv-lIicQE>~s!{E}NF^ukR4vH|1`7*(r77Bng6neGi! zu1n@f!xru)?ogAN2I^Y=B#*GVxFkbxOLQK-RFb$>T_V#2W3jg>#fMPbw%TI=?r~C( z^+L{4-_S`1Fs3CA-7mB+HoRO$x{-2FHnzU)gomY;cg7?>@xaB=XzQnu_qYsh%OQpS-_Ypp}d*_hYKik#Z?w!e#a0Lqd z{%R=@kQe~oUpvn;Q-yck>vqMwTgj(&4r%}hDeFuXPA6UF<5z@fEe3WaPmc z3<)PfZdn<6$cdNskv`$TbZo~x!x3?c3P+zR>r=dZ0wL0 zXM92e{rWCH#Ia+>uooph7P;HKoh0($vhs)UbaUqzyq#o3xJ~1XNu&_SvhaLvqXiVtE(wD_%=O+wCJ_QfsM{VqQPOa}pY`k7miQqzH$4RDMA+ z=i}LuzJv(k;$al=JWBLrtFqR!0bT^3EZZo8c5tyB`D<{X;_td}WPs!iG|oInyanY6 zKGnNkyGps%fu@Rk*SRBR*IAgU6URa_>q>K-$I*I#;F+3U$D;;_74fF&a#Z?FdZmuh zZ*~N)#X%V~t~9v;bogIJazk6Wo{{y(FCEKy))Hm1Rtws`M)^aK8G@cv?KvkyQ!ffO z|6fCdgozuMt*W;*Kz8uw?t`9|=NoIy1jr|S;nr`errr&VSOSLEV z*sOUpp%>%EudyvBn@!^Pm4D^lYpWko;q zlx~X@TJ3TaDt(9pe|oM^Df7o0#mSp*_}b?<2Rv@vh*0A~6VJUe!?ztK#|u8MvhI6& z<;5OX@=(&==uTTg8aG77tx}3W45<~n5&>?2Y1y5_g)2nm&d!8TR_PRjn>IVUi zi@>|Gh~=AQwQ}*~jm@`GM<8CGJMtlfI`lLLKl9Q;lwN)4Wd&={JnmfetO)p4EXt{h zUzD{w=?Qo?M0ew$%RsmYMVjgN>XyU^&#QL7N5!pg==K1=RsVE{be7Wu^-nM5Dd5|% zFf4~4lDBENxdj@{Os4L1_~CDf)1MwHI3+0PebEd~@9a(!oCZAimDJJ$N8A&ZWeall zlMvvI&7zPY`4L_uTx_H;5Gy?8@+@SwljQI4qxhTrfruSb_)?<39K&C=yPwG392V@Q zm{7NHPc?k^GT0d~996^x7vHU^FfH+|otZJzOcQA)E=N#eQw!9wK*d?99tT;4+>6ye zv1@U@Lqtcdr`#^2Hg~YiN_=OO(_|#icSa?@M!h(7R}62SC(noSY&@6$xV${){e%7~ z`~^QaUFBQ+!b7JZ^69&%xYK5O#qimrMn?0!@CSCj;oq}=z1PUE#7_aH(EoV66diW4F~OO{LdB(U@VBe^iT|K_GFGE`X~ z23Wu2Gx{$uc;Eu4RsQ&m{i@>Ex~XIe{{{V(vSX3#JddUi`aURqACvT6-OO^$C2)iL zDnU9hT`%2#1$7Tz?iG}<3h%xY6j6k9Xxas2aIU9Ff=LkGG=D*`xz35FI7UoJNn~}7*!%k+(glzenV-vD(btK>C zJ%lEWU$){1`nb*dNWIvyl_@F*NneIIUb-pwkAbcOb-+cP4N4 z=Q-!Hlxc5EDQlY;x zhO9x{DpPrBDn(rW2dO6=m!DW$>yPv6xcnhs$D$AX&&!HpHZ7ZZ=$akoX^rWa?=4;I znA4HoaZ~h1o5!=9$U0CJpOV^@uvW7O=rw@*iTS>3TlezA)rUQnfOyFLnLl2+TyAoz z=`e5X{D~}nrK@?WJ)ixxr}OqJC=uk&m9qETRn`i7@vxaN$GE6lpHIxN_<(O?q}U%{ zEKHuy2V2}Cmq>Ngo-|pI`lv9&*I8hsLcLh_RiRg_T!JrNdcyhZ9_gs!TCQ;{px%n> zB)r=y&_>)cY=D^v_%$nH7jkpu6~oLMn`{5NA9X&V3cTl%y}`cGEMx?lgK;xE*^g@@ zqb=EiilOs&q`2T?m07czNJdqq!y38cGSA#~O0=dJwkv@JX+cN!N1LUWPs$w_B;RcE z&i~@3_b+>uxPyet2QC8M0~$7x13=3KtnHs8b^})C-T`ud?S6pRh1uK?z>>~^t{4aA z^v({`yBsstJ3IeXJ!m;?WKiE@Z;SZ_U-Dk@^0S$~zHf+^&opp!>+eHry_YaYw_Xb5 zwr+iZ6FWxFZ97hu zwh?Y5pfJbS`Li+XX=6#n+E7--pMG3O=#GpE)+9pllRu?i#LiF9wV}Jywyu5>Ve1qe z*~d%yX7w^$#YWNBSws#n!Ul&C`*}`I(=p^s77!fb6DSF_^?FtFUCQ>_s57Qh^{UR8 z#=Y-xc1(WXc79)RelG?v&IIOP#V>bFat%qmGM5`+OH<83#IvrGXZ~USKbu9r{t44TqH%AA@W6XNMZv|Si^*M}D>I1+-@gSkrz29dzlf>0;F3`+Zc&}>h z@%_T|_rdymMmO+qk36Sl@(>dEEa9VMUL!7o^J${PZ+w_B^_Q%=CC%7X)~4B z0!%Syf1TVOpbb_eIdKP)u zx#cUn=6EP8TeIZ@d2-XLr+@P!&5i*CcB~Ki4|HJB%$FNdZrmvB7%0-fruTAb2E0NT zu%`zDwzXAxm)-pTiHAE|nv|Tg>=^0>7O3HlAw@#IDNb ze=h&?xMtRjkNwVhO!m@!i?c^G${{#2`l&neA2S{wF61eSq{Z14`l>lIx=p^o2bsSF z#g_Nd2~=IihNr${rcU|X3TBIP8hv#QIxidpWcy{*kLB04 z7tCzTk}H{4_*!S}SR5$1qz#oaots~BN%92a*-PCx!4cwN#oWXFrJP!7o)`~h&m_XR z`>A4eq{I6X(7u(Mo@6_=b;ZGpWc4n>MYE(ir5>Y{SX=ZN==kK} zsQGt#+~K_1{soKc2hXIvWC1Bz;X`Zt7fT978|ws9$(%O*%L2zaMjY*0VaZ8%?Kt(O&G-G$`!j|`G{l<81 zkGfE#QkmVx7iUnnwj@j{RzG}4O0wCO8@Dxu>bYMjJ{I=5pzS{+%EQQKB6>E{UnXSN9GS^rT%;VE0JF5OtMdXC*OwFrx#Ld{Pj!I{f8D;nT`5bB9kdo;`&72i?96bBBiFq3;a$b2i-b4}Y#(QmZ{{ zeH-S0{Fbcpp^^1LY?0NTx2jA35qTcO)qN(cULb|tf0i9Cihh>Q4NJbgkt3cLUY|Vd zd13a+yyxw{J+J$AHK!ggDG0{rf0C^`WjvQ7Om4b}V=J-*Z}{r0Q}a21Z zx>MY{WWR*sBQG$;cCt1+NPWqiPv($x4oPNA0|W1I@4Tyna#==0e`?w3omveeFdD>z z#%3)|*!q@QmPMR#2(Zb7v2f6j~a)2s66rx_a_(_cMqyA!XChO|mC6Ge^# z17lg4gTA-IxuSP@>!%pe9ws8Xy=6>#%RuXA=$d*+P_l)UXhE4hkJb@2a2U}Q8N3W3 zC#62;Z@c&VBmu1~=b{i|&q)Gw7x$ha50328?c-WEirjiX&>X%Ulh$upO7O)E-)dS> zv2ly^b*;p1!>h(%89FRW1p231L!dnVnzaS-f0J0=E*W-`ijVLU)ognOt+SEZ{cfYM z`#p^qJAd3i)!$@8T6m_0bjo%)piv+@bh2dO8$S;iS+ck)pgTW)s=mBKF4UzT$(ths-(ths-Qr>$4`QxCV^8g?NZi3|Z0#c{- z1~TAo6e!Uo*>G+QpRDf@v-huD_lIrhaDS8JQQI0=U5+L18l|;%h#eW9EG_^LRQP4g z#)PaxLcWdM1xuJ+{JN*JDsiVlctntw&w23^YKIve_bS*_hzLer)dr_Sp3#wed)BKi zJ4~S4l$OJOMG3qa({k7#RiVE>&$+tTqPqPEM&sN1?xCz@_Kk>sYeVh(Sv%i6vNO6T6&{aB7mV(4het>E zWQ9kn@t+CVA~5vuWN>s-_adfAe1PHp=+=G7Y=5*PD-hFHBqnpB`5WL-skmR%4(YpD z^UnZU%MT|O`@ZFe6Q3y$b-9c{%sWDc+NLk;@P}0DH_cyS-rvf)Zq-%FaK^mDd9&*B zbg3Wl+^T8tm!Kp)zBbo<5nLd-at|aUhoH*Ra=3ntd%yBiA+x%ldgw=@>wjyF@89H05vq%W~Yc*?npT?V|x_ z1QHiAR5E7}qisJ{b@3rGkX?E=6Q5|PS4!mF+23}cYsl%G%T^ zM*VMiieFrbtdNsGlPI zupAf-XY-la=q&oTq&<={icUK|xX@kZj0y&*O?HqLbKA|U4MzwBhcAWWeVBBZ46r1h6(K77Fm_PQa z@c^kEp6g4bC1e!ecB39wO)!5O+RmG2RG}`&YOF#`KQ2hXrI3|>6Bm!^-9IySIv_1S zpUR+78}4GjsU`??{I`u%7>b|aBYW9Sgu(Cj$g6(OZc02t(z?#FPQhWZ@@`^CtidHg zA~mV5U!#+d+Wk%ZAwR7cRr}=#Qnan88aoxSkzmAT%T{hpx~0ez2-Bdgalj<*28#t7 znL|mamK)t>GOlt6sg^pc%0BV?_C-v1*i;;)g1%S6hgt30jTX139hEYvY8~4=)4Fh4 z(Dypu*7I$EXTAP5O+6*wVzZ~3v74!4Rq_Jadi+us=(E0s-%#Im`dd_^E_U+@xRUBr>g8<;^E%esM!no36ZCy-)c=BqYTsU?9mi);M z`o0W%pf=E%@JYi=$tQ{d23NYVQTj4_6x$t6s;SKn%M7zGzq2=b)@y2|llVL7wfHxX z8Baa+F)NBp>VIo&VD=V0*c6>Zz3_@%(Q5q-Rf^PBsWTYQRc0@jR%3(_dqoYB9aec+ z7z^oM4U4zh`ZAfrafiV-3ikj<)$z&1GHlg_)Z}0F!32gQM#vdZju~*vf-OLD^@12z zo|+F$VTeVj#WmI6#1@#E*QuYAP7o~zty@tz9hA?t{zGQaD*I9c`wBbR0KWyF$IDj8 zqXz-mdY9q@NvME53?%f}cQPd8A_dMCszIH;qH+s33&V3a}j; zWix|ogg2p9mNR7E*LA!r7@ujjeBh;bK2sHH1R3=xy7X|(RIShpmyqK8;Z=X6lQQmf z6e&L7Gppm@_ec#hr2eVrTG>z7sN-$QqSgR?V8!r>J6Umu2u z(VBISgl@6w-(AByJ%Q$w6VM$zY2RCCjCu!cU=1G zvJ_dSAI%+P%}A45ja7J8&`Xf5u1=ZDW1KERrT~&WL~UK4PL>)X+BL2AHyvi8mB6Jx9BVI zC?P#CJ~r$L(3f~sFMRnQj}ck0C47R=$yBycsmM_bjBlzsn^M7+54lY~G}fg?^M*9- z!_}=lTeKc zG?Sti_uXtwibYL3KsCb+5<%J|&bX{WG@5HBWMs;A2IAA*1k zTd*S_52#(>@)-?wobmnMMH6MVaq0-!YL(5>?s%||sWWkdEVED~(Q_H`w!pB_F7-=9 zW|_WO+z8`&8$pw;ajKPHCJxJR!%iZgYg}_5>n<6gB_)Ds8L<-=@Lso4h9Vt$54jc# z;wWuK6y`&$w9X9QQJ)i}TH0DJ7M}0R5S?sV7i+dMTK(Au8zrU88EwUO+ptBvEPF`+ z6kE@#;^-dl!j*E_dczF>n7EujaIYGE>^NiESFPJVFEVv8BD>gpm&rq-|OCgI9iE}=?KYE?ky@Nq8){`ZbTJGVI74|PCc(t2(GE*GWlE# z39HYC$Y*v%3_ITks$%&L{<$VAMNEQt;nNi|k-p;WB*(z*o%xhG2)4&FFY7=T=w97M8}&VKXH_Jx+Sr7CGiO4nrE~ zaDG}7Lj`XMiI=YTXkn3+37W4Z0Ev*VFJh_;S=$2E=L`cloWuF&t!dR;=xFxI-7=!O zZM`0iXTh(!%_C^tE*x&A+6P2Yi6ZDK(EC0cd9QJXQas#cGRXw8_j9|4>;2qMo5?NX zHA2vXf;5eu(;^x4yh#V09p7(Ri`Rj`mBGxj^_!zI*v5L{x1zSr;3_`ZUAY6cBb2aSodNJ_ zSP~3I{~#mtS&%&`GS4)9&44IR#e(ddDAf9-)W`)_aN@A6lZ)YJ%M<&U)W{#mQ}18P zdpVG{5^n`vYHH;7bRKlDR{pu9bB;TlpDMhR)M&m8ho}oiC9XI^j{Nq}6*vwG_WEhY z>K*~icKS)SVJL>L~|Rwr^WYpA#2G%y^iYvP-|;r}CjUW$HJQo7XuKcF=m zCwJ@bvFDrlE^auW$9o6EXvU&C$@aHj=#VUDCIPmi;J!ruH)HAG(z!B6y2p>)7TgnI6hsQO)FR~ z8Y8c&rG9vcX}`^~-=^Ac*V}J1?6*tox0&{vX}`_0-_Fu+R;|XTRC|J*@Q5UEmO>Ub zZ8ANCbbE|EV3aYkWQ=4p!?f)T57!#3Aru)SOFe?0HnsLsn-Lcr71m=+BYRMqhw3WQ z!$`CUsQf&+`pY-=&u5@%Jc5eG$;*D=OI zlQwSImRb3AR3!w=jcHpVMygQs!&d&yq=NObjj6(UdSuq9rHr4Ab$gzWEOX)7i}`iN z>PIV?gPUN34EZ#8!BrGV6`n`Vn0KhOL&yeGlZ<^p@7G#)Cd3pz&C1^;eG$6pM*)(K z5}nZ+^#bcesxV8Su<52rrU}Gz2OrQOXhP8Qtfx!HG_z(m8&8(RN`jiJMYkjT#jkPu z>-NYjo6G$)2#%)JDC~s*T(>9K$+@guvd&kw?9Z(7l?s>x`0E5ex*P>$JRA zmVw%uPC9BHyCRt@Clg<>hjUab)QM>RF=oi-<*ot|!Bd5Qz65N?yjO{ZfzBUmOIpih zb0~w-!Ioe&bz`mnAvyJc2+Lzue$0M3ed2dy?=Zg`-`NX-S&Jws-F&L1O+PRF&3h1jJoT#4sE7boq-PH87q zk^}&9Cy=3Q%8QJi*xepb!|HZ*n!7gT3JMdZS3BjXWocp1lU)Oei zAkpCBg%K&&luadNoo(Kmq(`a3MHlD;l&4YEeNt6Ce=^yY@8P3*0)6yBEBvvpFu#PH zYCPB$Z^>xam^tGqZb;&3k{B=CkNJ>_&(sx%%HQ$pP7=~X&A6wJs?Vfqinu63pM)3u z<>@}M=1JBXvHC zcy#QN;~@+Tra}{H9$P>A*!gX;2^8{WtXL@wZys_bm z=@lo<3zzAzA=Y8!jz9_CUs8`*y2)(WT-S9b*jXcS>zGhF} zj5I}dc0(7L6$9a8IAP!}B}UyZ<9@fk+gz^4_~Ssq61u_d^f+=v-0ZIc%h^j5s7^p7 zVuiLwNKg|U;v}NXqYXkDIeVD2WmWT$Vt6b&B)C{+Pi@QjNch&91U&LvA7h#4&eyY0 zR&|T_dRO3>!s=Vue2M=-$g0pv#{WPe%g6F3$TV8MN{F~SMRy~(SjTdC$y#qp6Hspbqe9vmRp zK$qV|8<7#qPC3S}8qX#S621@!#wh!68E){5E$D(PF#m9_|-GZ1* zYevIk@@{#b7qJ;ngGrPKk5|{jBO%mJNL9tHY;}Dr7tB|aLAfYRXQ?6-QZ`DoDZ)OD z$+&k1;<);%R_zmnYxlbX#Yj8K$IXCD#Ub7N%5Y0a_GM$MZD=CJcBb)mrZb&P580VU z@d}zmIlGu^d8$iy5gq(hu77;Dx`u!zE#YzT+s9UmFptY2=Y5-n3Hw?iY%x94&#f3U z&8U~o@|ee?EaNs+dng-$`p345a;NlNo2q@AjV0q`F{l`1)So2}HG51eZ@;>jeOSQy zokl4~oqX?wZ!#GJC%q;H((DKeElN;f{LYfc#@;w_)~J zX7eR9=x@pe619ddYmb`naiC``ACkiX>GW=p5;sva6dSWY7^^00g)K)I^>SiH`g=dW z&-0%}A&-6XYw8v%mU$yyIvAXFrkVU!>#1(W2Yb|RR4js{ZZ){fk78B$G7?kg#Q2yw z(_@C|*%MupijGa4q#bVVrwMv3Rw1M6RhUMqux+x04FHStQ-$w`*+Lp*I1fiPdZtQR`u@U_}sW#cuwZGC6$K5C{EVyP3 zc<5lu-jOR8L?%`vqFBj_{HtlsXwaiZH_Ej<&BG|&DZ-f%s8ELh!0A?mm$}t|x-nFv z0={i12$PA==^8ua<$&l?C+Emy-pY*&zOCW+MfRdX#F_3I0&;PL<_@XCQtHxHn^oaw zE&Jr7-?z$WsG}+oT{vZZi&K`MZ*oLVmqn&Y6@CWFfP#Z>zHLT5J7P5D4iJyx!U0DS zAh3X4#;0C@^sEd^RjTkeRAvL>fMEoZzGQeP(4fgj$r&`NUS}N!UI8$GYvMXpH%r=F zt8V940^ijFwJ#YNs79mNXm_2+tE>)}^I21#u#+%1+^$y>q$Fd2S!4&w2T3|za%~L8 zE_NkSB(avY7|(I^p*d?hf!n?`O9`{sWu`jxT6}<=5?0{-4ypi_D7`wd3?cJx44>Y; zK;7M<*JyaWr*=%v*zoE6b%jq!uTM09mdX1Gd+Q$S6U#vFrD=`ikfrH8d*7Y0l>)=J zU7H4a1uX}xO^-gU*QP`>U;Zy^Q<1Dq2QEu!5kIlEL9*PLjRBBl>9?;OxGX_D&a(8$ z3oJ|97?hB8G;!jL`qy}NmZi0-6-pL3q28H6#=QO+$?4l_)QjCaP(A~9qcfJX8LzVT z`HlP9P2U-R=PfeRlt2=zh&w5Z(hly2U{R4p>68N&C2@+h(Lm+tMd{(E(u>mDqQ*_* zVik*06${U*$iG6dQ$^!46X{`|89U=FTBIeTAqGr--zVXUP{qk-kQg4V1~g03s_;$f zk+r?nomz6U?i^r#tn%&CcVoP3uO?maJb3En?@9x!!fm10D5`#w*T_&i#~XHzb|=U0 z$N@$ktV8xV@igm{TXI;eTF)LgpXY)N@`y=&WgjU`LS7pl0G5_d|8nqBM*cevEQZ)4Eu z(DYEnEjuvHv&!tCE;kE{USk3~*c!Cyj|e|C|Lo?YtTIOQCsLP{{}!8SXQjE8O^x7T zesVUTw95Y71|`9~Wr4YVhpRevO7bciH5z0Oj3pX&{ce0h7Awh;dU);E+4XItI_!F# zN`X#VFzTnu*0E-NC|1$|`|HT1UItfF~Jx!xp#90-- zMZQkpt6tafRGyNh=2J+hp@)@Tm}{}Rh6PqTE36Ne{=@WO1q?Kvh*gW8*i7$mQ!~BD zAl5T=F&7o2S=)NG=vEue5c*m*ix*(!@Xv!RHvNobg`*eIFU@3|Bcm$3D=0NbdzWX^N+ABQ9M})|hCfES-`ZnL*oRXB1e})l58S@= z<=ay%4gLqmA%Ao&Vs*3aJETr{QASyGI*}7Ayc4A9V`QHftLoYGsdm$TW5Ql>!KQF` z<6@WS!Pn)+oxWk%sgGGQjNc!C>j?2y@j+4rqOI{fR7_sLpp?%Hhri50;WHl{CKOyu5X zLT+C3XinsdU{g9?XLl@6Hi}%>D7&+eXSKPD0G_V~k9{qya2xHS-1&s(oZy7&tl;@n z=lN)!huC2|2N1S%8Tu=ciut==|J|qm?$v+y=)Yg-zq|F{E&|rc)GDa0Amp5AKG+)i zR?OP2qi7$vmz*NX$*)9FhL_U@_44-+K6VoBI1*5^xs9NE8>c;aob~d>XPCxczq(pz zlMqmX%NgKs3Y%D*9m*?a#|p7j{db+){hJFvsIn~M{Sw-|WlW6Xpj zS8k{{+qlcN^j@83jq4Jb;Lbmi!y30&o}<6uIeJGnYtoP8$>$Ca#Rez17P9rUi}t~2 ztmoUp+5Wl(uA14mZjFpGqhmj~lrLF<sO6$Vx>Y8fSY!sioj|zJJ$x3TFzFf%5=wj6cWR^9swZM1w?3)s2gBW<_ z^q_TnaoH!q(p1GMb1pLDR~J~5z6|-^n{~F`z1iP2eYeGV5;|V9}*ka_S0_lS|lUwx4AJqtS`_sYMKF!$vAY?lHqO#$ zJDKFyjwTrp=(2Cq{8k5T`i!3_IS{1mS$}1#S<^1su4u_h)M3djU*|=rP>XR5@&k$) zKlz7JvBlQ*O!4kHGP)?&Etj&x0_UA03b%FYddSZd!)&y@!1V2#{|{T$63){-=!f23 zG6I|Fdwc!|j^-u9rL`_-Ejnd7vF<3^@}T&kd#RDSeys<7Rd!=;YSC&oX5Og8nnxYm zY}${pJElZ2R~>0j1EDkYz^=#;HR@x|15X~V)+W-ry2Vr={qmsdJo#c84o2tigC?_y zcYVjQQBa+FS1j}P3S_A9n2RzVxR>j|jcPi%mfGbar|Mx69x&zyTHJ$F)+iHpqXu>t z-O0_hu`Bnvl9*VC6Ux`6NMCbkK2_LwHU^WPnyJj7Iq?Q;J)+#pHTHMI1yfth=$Loq z9KsF+Db`8sT8F5rKMK@&@7jnK^n%M0>OQ#6@6CqJc{lp<&*8kr>}#=gFys^D?1r@~ z`q2opWxv}JVI_!G{KQaIl(_v>u-~J%XTo)lHO%I z`AHR?;S&;(vaw-!QzZ{qsb)bV$_$qp+=9WnScH{6Xu{t6u}12S1LtgC{?9m(Kl^5V znbpY;^UM%NKLE}YaBdMe_qlckilEU6fLoLbS4PLadx(I0!U5nOc@S{LMkSgYU5OdZ z^9aK@Tvc~Nd>xNpgIE(VbEZ7Hdw--*oo(k)hi8f!TxSO7Q6Dc>d1OYQfeaPqPOP1ov%+gz?) zn~vOg>{qEkS)NV)6R?{h7ZpshLLb5^LPHBuh0inMvJ*Oh^N7*hw1p0spN)a{&pQ|f zsHFgrW8{1x9DDp7zYF5Ur2Q^}A}3lO;~y!WDtT%bS_Gkf6r_nxy0Z+OuuF9U-O{oY zej6pQmf18HO;sB4UyvbGlbUD^>(Y)+vV)P9Mo+Y|yG*Z3^vT@A9L0yswR0Q?0<{J- zdI$LN+1;Kg73&1pU$eqexGB?5?aqmrJ~I0sK7fUl}yXGCXD6+QrsNzEv= zDV{1^%6rn(og&jYZG1u%i^bw)PBcs^1YR{A zS?&061w2YLM#bD29c`Ve`y&<7@gGu2qL|58h4X8}FX@#&c_uiyMm~KcJF}@I$0v5E z%P_*ovdZqIIUHuU2ox3M3dXWGM7>AwRE(3>2D7fk##77eBibr+QD7CfRrVhcot6ia zXr_5;NWRhXkynnmvB49m5;Bjz2ibK_B^F?FTOc>rjV{Q9+VCDC4xhf2dX3cg6 zmFq${IHG*yZ+Br6cd02n#>xk_C8yxq#9H_O{<^hm(3hZPT1WI6W}LSRFHxE3Ome#3 z(RgJ+uKda5Cxmoj{=$`S$5(12T{3_tSlEq6u;*DWv6KQ<=*hiS z-jn{)P6=;yh1J?K{LtYG1lkW}-hyyuAY> z{SoKMtU;Nt#tT!k1OAzw4Q9AQ&IBjsD) z&dfE#Bb@IkOr7&fMiXgNfGJl4fgE^q%^E;6J-fMdC26RW(OlTuCRvHnam|Ql*6aG= z?!9-<^-1&ex{pO+ERCR`ZaIT!p`4eB0sJ{jQl^j+BJ=|5uJ_WO zg=e)|`QIS{lid80>x_EsD2tEB4B9s8S(dOm&&W1wMh87Ro_Aq*s3Gm0?iwwa%|k8rJYzc$IU~XUvt>EaK3;yKtcyJv$Y?nZNtP-(!R0a)+;$ zpImY78k8#hvP?8!?crdkhAWCz3fJ~W|23le=6_AC_LLx;_M-zO{>4=;uJS3#71wlU z0UVDoEG2XNsX$2{?2}P?9b|CoMK(umxxPK~@2s+@6>;xfEv$Klzb!}SqP~>acYXWA z{c0!W)+QIR0@nF)Xh|V(bVgftG>I@-<@?A8aDT=&j);oYN_+z*3QDW6Y^Dl*4&Y~+ zG4|Na!p5bx%4C1xQ z+k9>FZpxQlIy$o0^NsmJV;X_7KmZyQtI zzq0k)HLfTRVvp3YEA5Bf%&h4OM#t=RMM{^6bB|hq*B`X7SM+mSyfLoWjE!Sm@-#6s zU9oW`c=slUA}s7}A6Fs;+Q;R}j~Sapu7P!vin8$8((T!2KD59%pRFn#vhT)5U>~iI zaT?37E6U>l;A}j6SF0bf8?LJHo&!A!KKfImIYwif?C(%uC=%HsO}PqGPw zOL&41i6R;?C^k}~K@Hg;d0?X(*;Eu%RH)G)VnwB}D_9YOn`kzVtN7Jg+i&}os%>q* zZPjWKsFwr;g52>IMFsD(tO`gAL6rTy&&;!%B|`uG{eQe(WapXt<;={P&pC5u-`GM< z-W@_&Kg;LdwVlX_pBf&^Q#xE}&T7UcZ5CKeJXfBezksU9mHO;n9yF>M1D#XT)&+qh zote0eRO9S+-mPlk2_#tEx2jq;B-W`WNF_Wb5zKDdcGu@eKG7WctZnbBlB$|afk6?R zh3DxiXZDYuM=i=)Y7yK>NfdJ;#h?dINgQjRMVu5a|3Rp~34V5rO2|oY+g%%uQUQNh zu3=|7Rh?j=#_g96*F9akp{1lv6L)#uEX%Ev^P>bbY77Cj<0Ej`J0d%sA=oKP!8{N) z-lby`slzM;Ka+gUZrM`!Kbij%RMn*dxoxe%RDevQnM7fz{ zL4i-l8mSt=>^NdG0ZO>*ZBrt(Wi1 zZTJip^g}D|v7OiC)`y@i1KSCDxc6d$+P=xAtSOi|_HMNrZ>Kt(>#fFSb~o)fGwFyY zgWgR#7AGCQpjdzatEm!46$OVor&$jyU$IoWFbM1Nfs92D-n)@m%!U-eewA827rUSX3flQrR0xi#JuV9L&99Pe_2 z!CL-FAM2HjImi4GetH6_uyekpGbZ*}q&FwTa&h^cA>Z}mo>6VR zP}}K`K3v>NJeeuJT4$wN_eVYMHfYAol6J7)#24|mS78U;-XcZRG+F7*&`p4Idw87)1HulU47JDgE}Y(rrkhnk0W z`AgRMw;YI{OwUM&B zQ7~tEMCXr~LGt)4SHCruYx_cuO7w$*C&Xv(|d0 zx-&kA@#6=3UikH>t;d~R?NoQ_@4Q;?iaT>__IF6#a!tm)b;h$j)lMtJb7!u(`4sO1 zFSWngySLi9c&`t1uTBikGIp=fDSlygPI-+^!hS0+c5bw>0h~kL!6JFIvbBaeyMGVL zj?uBD;m%PDuvfH3EWNFZbI@qiS=g^YwTY6INDZX?Ui7Qa3Lgit2#NJ-@RnMQa^ZtS z@Myl(p3;~oFoyb7HGFqN$aX6^%B`Ti@X!;4E;!wezC2Y@)o)T8K4$ZA?Zpd3&=Yvn z+0UhrmnEvc$L!>(J_4q$;tc-scm8@_oH925mD)&R*(H3=CfPMg5s=K{3J*^7m+vzV z4mhvNiG+tYxaZBIw4XPSZ*YjuyBj9M$@?8ytHnAWV0b28VslxmuKn@ePBH0_cZbAS za9E56?+CCV85}xBwueEV>P<+x$2V$H@noxMy!4R_?pe}txpR671&(06_Pv@J%$JW6 z&EZE7DaQNbn-$biisFi4lvDa7k+RJw2^RZG_7mQhEt#fP1)NpRThA7$XHd!19?}5; z@qJtE#*e7E!9o1ZJ}g10*5M>;Us>xYRbBn{XGG|1=V23$nUYiG?VTBJ6}YWNa&;>eVBfL5--Q04s~~H&t;l zjb)mkWm{DyI{8#VPxFtLT8l~jv}Ue_Qzu8+uOFpivN~(UL;FYCKe?01o0_s&(Qj4E z#wl#N6Q5_()%%(3fb4Wl#b7|b@`tq^ov;A+85ZET?Iiq8@R9q9g$a>6cT1|FEH=d= z=DFOx!#gEyLC+8ZsR>r_cSf*;zcYh*{GA(mL};Q<{DFosGr5GuF3M-$cmoSXerzri z2}e=v791_IPJCfio*Nq2a98F?j!IbW(q6HP39c;D{%5EB|7pd}6aQ&e-N zDfyJ(S^O;t4(0EN-~j%XT1|x}huoOuW)0&3-1-f7SLWHN!YhsGcg-Vfn3uZ4GH^bCVL>6>p z+cwXUP!5P5$Ho>2C_Cv=IJ?FkMZRiJa*;Srfw5XH*vSK_d$^Fa7ySe8>pZcB^}o|p z7UiTdxm{IN0aHw@KLg3bx#&H*Nj8Dt968BxZjuj}4{|R0*X6*{`!1B%O@FSvHmT#G zDmf?rKv-BriLkI@Qd>aDdN98vDCrs44xaa54sZeXit?IqnAc6uC(?iZy6I3g^r)4c z`3N=HnBalH;c}m#b`QsD-nV4~&^l6x3^&<{w>Z{2Z?iyWeN9s&-m|pHK%o<4k*(8L z@E8WP`bB?1&M;q~ISFor5(3=2y!aE%^X`QfE{CAZfE_W;#gDT9p8fCRJohwl`gM!r z!$e%Npsm!CK~zGZ>1CzI`Ruo{9!#;!B$M(A@ z`eTorki0`Oz?km5F;M?8C$sk$sf)suJ9roPBB$?;eRg3un6J*i+KhjX#A-Ix;_$(N z^Uosk(-N6juyCb`f@e<_;34l<#`8nzTp^eC#}^hsG~Bz(57*~v*{H~}jZJp<76J3P>;40i zxu-e@;zxQW+R$()IWl=qw3gQw_M3y)dY$Sa=z6<4vsKHpTGMQC$$F8Sy64*^Z$hJ2 zF8P`vm_z%GR0-yLQ^Gs?XJA8C^d1Tg)bFBmFGVGhkx&`IpYY(V0)_|&>mcPN6E#ie zoGsx_U!{lW9PoGUk*HBrR?eEkz@7w*EshTsfzh!<%3>U49uth1WIuIe1{HjPh9KGR zB;!xX*ccPko0S=+KktyW^H3&9{wWhkEB_!y5IIVn94<}I z{`%ef+U^Fv_?kn$k!0$j=2$-xI@#A&70gp_>9C-oT_O+o}61gLHUt7M_C~_Ju zYv+eQJOto`d1>f|u<(vuSRR9N{-}YgGu5UEGUYVbh1tk9^QD2VK8RTg;7|lkmPX~Y zK@H7O^_xSA^FVQG?hR z1_9|%@RabOL+G_Q8|9KowBTE;k%-t!3whU0_IvuS5W6dRV&Gw)om&(f#N$~TG5DLM zYD5o~)!J&@B})!O-+~yeIWvB&zHzM$2N2a+rT)8AiWc?DRm8NG5KZ8oa$(+5v(kMl zFSU;7d0FG^(fVcY`n|nF(`rZMhPY5_6s{G6+K+=JV6G>BL){UG;ggD2`TuK zN1lUSf37SJgKIt8FgYf#Hy(qpvF@aXze5I0xfPNNw{1}5i1g72p9@CGJXy7xYMJ^A6}l0Y-Ujo%hf9rkBg`|>Tcbo<~~+ZPs9MSqHlO0K{Yq3-!XDLH^nyD*ILr9KAS7+ zMuZWNAqU%LpcEiOQ=sD*6xnHDkv03;as0c)#bf0#-PjkB32FFf$mj|mzBFaSL zidk3Z=SjB3bnlPp39&#kA7et8QvA+prDm9!qCy0t6)Wx(gnF>XPM~t=-)FS%jR`u|jGmXO&i}Bo|M-=fhJYL2$PFn&%gfs&;ls zV@J}xGwlk5EE_<0{~<(YKR--{WFj;ntK(!}Nj#pTLwue%V<#f3k?Iq6O{YUDJ4j*= zGqFRQS>9r5FL=q#%QV8 zXRK+J(}FuHZw$Uyd26tva$fL_%E>{~Tgk$yM^)C|*ek@94%B&Nt!u^^dm2B9CXLr? zSj)Rr6+1(D4doMqcQ%xd3(m7!zA}1+SfL+|rfrcgl9F_TiZx2J!{5~huj20=+&>h~ zQ{bAB#`Xn-SMB*u-HcX;eyO)oNKdaFe`1(fJ3jDnMP@0vXtGX@UcAT@YPYEPJ)#_1 z=gag#zQfY;IRyIJ7gPPTgMEPmyRcZ0O)h7)#&ZavMz>lI91n_VH6GW?)YEGGh;uI? zoHujuv+f^GINvuc2=MpD;A#B5#cC21lsreWJlPeSuqBQRO12E>OU1_WVZSs@nr<|L z5+kgpleoa52|ac^E|I@F+wJJBnc>6;v1JYr>380*YA%oGgcBv9Q#5noBgc^?|3dER z^L(lNvg3DhDy)>&2*M8kp`$RNIp%JHLD>gST&?83Z=Q4L% zNlR7zT;11pLYz#^$5gd28~0mH0)0BGN@6-kwvh~V)~Hko%Nc~eU} zu}W>kFV*z;e?dqA+q>Ut6kIu+xIWk`oTxSyol?y_ogiI$vSeM_>TJ*Tez*AVwd&Vw zFuIFYKzMG+lTsz~Fiz~jm>3*hfIrzus0(9nJ^Li2>x*3`ZIzvSa9XO{?_C(8)BVZ}3j&6Q==%bH)ydv#xCMoEX-u#7be zv`*G6scozLV%!LPQ_s-Y6X4{wUgf~jF;+CCc!urS%etn6t@{rM+tG3`wufJ4`z)(* zhgk!F^Vb+ZP^Zt0AGp&X!OCTI8*4IHQVlO`HEpGsmPy5<^bD1CBI~VYK4rORDk3ZZ z=(a=Z#`R>`XkvZvOx8m8NVT;j;j=F8^m`OGxqb|Lomk~-4i--a6{KN`L~y%BDqMo1 z)SVeqO`}5n?B}P}c;Z;S*h`EexP!{6*p$bOmLCC?BSM!~S9}({1W{E@mt!vVw$Qn&68aW=BjL4`Bc{4HxY8h8ue5v z2^HEUaxU|8d6lfaE3C$?{Ms$seXMpPrGx9jiPF%;)t=A&4!|Cs>P!GUL%0G1enEVJ z0YBl_a2nz7@Rlyt0PqYR7%*joJa*Q0`=whYBDwp7<*37MoA)!T@dSpqssfz>V_?K; z`jUM?);JCaBN=US^qQRl`Zx=hUWSI<1aHFST%MXVfwx*seQ<>nZuCeqUnOQm<4YK6Z- zvZQbf~E=g)aWrlJPr5tob9SQhXW{<7qOnO2h4wzIdNAEi*U*eb)zLsf1 zjR5~Mf_ctryJWA7SRG?&dqhS>R_x8{{=H1e`cA1bv)gJMED?k&xPZTNLIc8yUh`Q) z^Q`+bH5n9zM#zM+nnVc%F0!hmAt`#;hc-XgVmiT{tK=vhiY%ukmmIG8x2s z8Q&VuHpwOSEb)zx?{uLw7DJAOrtcH9&xCb}1w_BL@j(d6^nggepwOANXPb1eUVB*J zW^x^$5c($Am!*ihmdAdL2X0?;dF(+6lNq45hAzJFLzuwwc<-(tVODzJDXKt;607Mo zmcEibGG9Y|V6A$#yc2r?uS{B^CzD)Jeo*j9&9=lA6XuVOT;liai&rr|`#CbcJHt$w z;B0@}lbRXMb~XZmZ&w{RNY|P9hi3Zc+qvq#t0$AI`k+u!^uo={8IFw1N$BC#x1&Je z;T&k9Bj=eB%r4m(tdQwSEj;hi3nNudS6#}fOJbe)N+0vQ_S@9;H>?uLBBgKGZK|wB zAxi)L50aVdtLv-{>_JnkrZ=cU`nnJVhx%?-@JziP6wm~-9>}Y!1l-DfNLdS-_+%-F zUl7e+5*?WlKi}<*^=^XT4Q#hlOh>fs$c7fDsCMp#NSV$-I^y%~>3yNcEgC!@2_Ahf z&r45nyC4QqNEPw|px72z&CbWCJaB;$$M&jBE6e zoLa;{z$6w5Uh%K51oJ@(=w|J05B&gg2=&z^jBdCy^On$ga=fls6Ff8aB3&oMGE7g% z_#k&#ev4&!+FvDC=tvIAo781|mTl9UIO8y1>@ot+t9#DY8H7gCSu>Ghe}oKug6WliB~;D3r`8T{GBuima_B>yk*t92!i z?~(AU6@R89G^o|B?fSm#k@2fjuRj`o6>#3GiCleT1S|MXHw0^6ZS)Dbo-Lf-Qbtu! zE7ZF>q-0Pm4t_DM`Bm+FRRq^9`0Y>=;;Yu1bPsq2ASnWz~MdkVFf1?$lV;6I_6jDe<3US!@0SEbH2% z*6x4rkmjt3-P@lbi5f~5EbCVI*p?o)^MNqAKv`c&TLsFRW>>7{MLeP>*8@2|G(#RT z1+VA^Wx*A)tSJ4`Gf>tV17%%hVhfb@CMeQ#e@a1Fd#xq?l-CX3Es~2_v3Zs(Zl_vJ zdD<}@AZxD+WU&@X{Q^?C$&3o47GT?HXJEyDlKcV9!mzj9vd>4zQ!?Z%sk1birJ6ad z1U%etUUDM!*jp!K*lK%QgLB43v!4_=Yg!V{f~m19b_chx-PIX5OBT%*Im$~deQb`B zx$ua7yJEA|B#f0TxI$&ZVX&R0z%1?Of?133kL}}+2D7$k#|6QbZeZ42wud8vS&x#y zZdi~8X02ug4p3mTWb7kd63i;c?aN?=x3hM|PUS<7pa!#s@MX7LAYj%p@RT>nvCYUD#`!-{ZNvF{Jny!Sc8nPs)XGoG35muxKv%i}(n3gwcy;V2U_I@@UeFPTRAOjy_N+CVl0iI3H0$vP!xRCSP4$1FD%S>ef)tR22pI6ZzeT8Z;u)UQ|(f zL6D&Tr$#@J%<%;Ssrfu#DKFJDHItd~<<+T1Z)7e_w!K&d$7)g@MCpALv;ACYj z5xY1|!CZ~&C!w`{cGe~ir9f+MSxsk9Zj#Z8og|g0P(U2}L{Ajhiq-zdfe6cgD6fV$ zpPT0wPtuSO`zd)*(NSs+vQnS;}YE`;_Kq9u zmTjzqKsM}XU~E>?A~NK@k+V_rQ;`(5Bq-Z9!YU^lL71j1GMdaN%?pl^HPm1BC02*( zf;}O(P*zrl{o<`8h;8RjstjAp9d+I#QhCRlN|bonTx4<&Q@OuQgS<$MjD-T4zU0+b zaF671y>Xk>v`K#po=ZhggXa?$1U49#KK(UbAsjsbkqH8x+Yc4$Bl%oOEb!bJ680h; zMcx~2&lkFe4iJ{xFf+4s^LR=4%PcMf3%2&2{o*{+ya};u=zk$PO?wybWw+Fznslfu zxGHHEcuZQUp|u$t8jBXwIpV4NlZw{|JvAk}t6I2hXo?n~tuEM)+q~5!N|3gd)YA6o ztTex%J^|HAA#E@V=ST#}38d}Y!;m&%6~hUQvy_8RMv3ICJw;39At2Uq;4>5FsZkr`5mTE~f9X_K$zLR?& zi;8?Fp|{9l6DA!A^!DrG1{~67N%YP1ov^+Vh2D#;f$x*-!G?5O?t;$>l!* z;wA&c0mE_AwT|4iXL%zDnu(;CX zi&C&u>{43Xt;xl|HZ7vLX@Hw7s2A8+yrTh@Z?$F|=P*(A_8dBuDK$-MYY0QoA5G-m zuY^nu*?_tk;wscXlF(pnxq`KMpg39XK_F1LPoB&G+V%pu>4R|mI!?g?wO!ysZM!wp z<{q(c13pXg89hj)lgI`=f=cQ!SSm*m*yX_pYt)4rZR?kc*lsobOPc0Qg!)J^r*fpc zB~%0iP_a6AT&z&TZE~NL?N8=h0woHWBZ5?Oty&oEOaraWQza(}lf%8WP59C&(2(%g-7z*jF9egDbwwd^mw_Wkof*#Dvv*I3K^I7UQAQi??4X6CL zBQ%<-%}=R~*(OLAo5_%>TT;={cyV_4IJ*P4%RDcOjVFRszPT(`g;Q_|SEMssGNS5@ zr{FK>#3ogMS8O1ksi_(`%5rAIM} zF7743!=c0H3#1xHg$|7kPow&UIsZ*6u$=g!^TlB}=L+tOEMoyjdSj;um(zh>DbG=;as;~p zt~+z%1-)xq46j5BKJP6G)_q^A#RkjR)xP6$Gca@Am-#mIXBqLO3xpP<&?&Q!+=g?a*qt4@hd*jgG4V=rY)a@z_EWu5S-{{CaDd1d4>(=r6F?EMdC8i0FQUv8ALy9$^;_xB^ta=dmJVj? z&-#dtDcJ!z%y>10g%%M~_GKAbUn<~yM32K)Rq^bZej4s`WWZvP@rUXns(*61I)xwq z$%l23i*4SFh(z@Co+nAbIT&#Ea+!NaF88D`uw-Ls360?0GCL;JVr|K830i1X?W@{+ zsH)*mmOLAZmT)gvsQsIt$_y^D_xUVTNApk-TBlV-2&6RmC2pN(NaUulX3x-gZ0W-y(r|!_?jyeF zDWpH?P4;6qrMwE+%CaEB@QJ6BVMthrtVLm2zoo*I zP4-7AT~4h&TTl41t=u9U4#Ox}X+@(%!8NqsQPMdBbQ48SR^J{$vA2(IVBa>m78TI- zmmWoHH&O5m$t2NO5;+I$l5ago>R`zO)_ua7>W)L}+iHATN@EFhbFFHgDffc%p6MYk zIgqU{TK!MfkG53yiGoRP-k*__Ej^Gq{|GW3^+G(Fgt9_|tQTbaoF4A%Euvy> zQ$D;Ja{^guQO>nqpf}Y7tkO>R62)qINU~g$Ma?kow_!Rnz^Bei28ZS))<{9lN-W=s zz396$WzJ(}M9||2Y|&LsY;f`rs=O-qGk$rg%xV&Yu4->EIqOv}N{ErdSO<(%3mT{- zO{}>{?Au~y!U|j@CdBMDzMlF@ujC&89d?v<>xK3a{ry|W_< zUubgNOgl8Gev5Z>VxCZIl|TA?u_(5bt>uj@-GVu%B0EEArLuRZPxG<7JcspMF)x{D zYwjV=g$EJ?GBRdw_Mglu;*U1!9gC@XS&StStac<&u7W}zKr~xwq>#JGBSQyvZ8G3mQEi=O_M{zRKYIIjdVLQ zPiavDl@l&BHAVgP8yV)lGR)PY_n(*T(ObM;2K>H~)v^dA3jRtK416CC$_GC0zPnzE z=Ozlad?~}pr}aQP;XBM6fDqL#_pEv)~WSBC5HtE*qfp`Gqck6geuNl4VA^ma|@qPHH~;{GEShU)$KUL zMO?MVzvMf)?r~qS?1dtwt?4T)4-ZAi_rh0Y^Gui0qJPi+$_)_A+NSsw*K%8FwX*~J zWhu_(7>d!HOKethC6HZG)DsNn@>^WqAx~rq=kjwQ0pVQs%BC5Jj$Vv+igU@{pT@W> zCp)-`hf=wA=PKU6#xO27(><%6(Tq#>e#5u)ujZKzEpB^59c&sc7AcyTufz&sOH_B20|Ffn!gR>k#G+Qx&&wpz$Jgyd_->)uEiWjcvM$ zIWU$tm#!R^(U6hB+fBT|4Ns4juzM7Mv#El!>6Q;~rETu_|1T#l0W9}WYRt>>Prl}3 zr~Z&If-+(!xncTaV=9bAC?n=^!}Q1BQeoK=_W6IAJoU#TsW6(B5nJbm>5reK!T?V* zVo$qa`s2r`uzU%7$PLpUV^d*0r44i4F#U0AD(nggyV?!YAH7pygCuN>8>T(KZ&ZFlO`?lsUZm_UgI4zwHmOv- zM*mn#Ed7BRLt?dBiVqj4VQ#a%9l^IsI#Q)}WneA<)g@0JSbV09+&K-8zV?0kGNdmM zpQcVfdUIx9ok*Cv4-}KtZ>Zm}Z=09XHsNCC>N~}(nMf{Y7AMX6$yv4388Pw`d4r4V zhikdaU0t@Vp`xfddV418DEH|w*c{5O@oaDK76m&Syw?WDG>k|yvC7Xg{!?D6(NjTOU#f$^lRugO;+xS+i>2VTPJ8y?K z#r~S#gCI{eTinp`&r9B?>i^EY->E5@4j?^Zhcd(u0NdX5^5IrBs-fO{_(4@ zlqP&Fo6_ql0|7OhK~s|+(v_aBs14rHR+Bt`(3Q5}CpxV@RImX}0y-4v08TuI!jFm9 zf)PyAQDmhPAnvTLD7i%GAO8VSs&o`s4jrYRj$$>52{Ur)Nwoa*T5!Qhv~*;HnNiK< z3}+3y9K26a0|Ts{0lbm=e@YZz=IFn?o|$p-1-VSK8UFC8aE#N%zSi{N8IcQ4h}?o2 z8dpm7fqW)7oY~8`j(yFK^&L6r*wxW0E!!3ypQ*mS8oq<(44y)M03WqMYme&>}BiG|YQYoZ-p?89`3SeGkYbX=4I++8{knT<*-fS(o1TfSS=qmpxU=4kKcVqimgX zu?g*zq=|w*ej;c7Ar%tEIpJWqz7W)4TW|;fMMeW;U!O~H+J2cfWHvk_JpKd&HmT2^ zl**waH>K-dXQ}2FSZdS$rP=Ra_(exHuVIw6*7ATFIyOZCxYyyT0mvCVEwk606KT!depA?*%0 zuNiv+@xKX(*&2I5N?gA_-PFKlg0!)a^!Rt&$|^?uYrY@9j-c3u#^2p5Q*S`fmb$WS zwa%6rN1(&`qOPF06E{X;tiAdJvltycghoJ-8_Vvkt_8v2;3^ zeL3K53phrYBqLpAUDzhSW*}-D0XXy+Kmg@-;&rZhd-n#3r0@>^ewrCV4fr-tJlL0Y5qvDpeDw$*E*0T^aAAF3=yx^3k2%)dR>{IJd|`C`@>= zt%8)QTNwnmL4GIsE(52dS!s5QYaFhY?Pf#gPI9e0M9i6vSGik51#%hxI4bYH%h0MX zbK+@Cb4ftsO5)adH{UivJ&8JC0HFt)(@(GAOvy6M`W9XUhg+gEi}Zj$-X~2{cX63wu(*f|4aJ&xug?~{O_eRyj*e*WKd-oi&Zu)JTg4|qf1ytM4FA= zjHkjzdH*Tcq5}-WQ(3`2;r614U8o=M3b$wCG!_nes@eu5cf z2HP)lOY`1IKG^^asdsZ=h1r&u?B(H)g?CH6^B%|Tca)l~KUvejCWWl!L5C8qkk+F< z_-*d&1%8Bl=OY-$rfiM()j2=XPZx9N)^Cm7EJmEYn`d*?b%R>*3$kTSVhj25BER>o z**@|8v++e#%v*5?g8b3S4iq7Y8p#<>4S35$y(gd1i`iy-s?8=SM+dD+3VLW`imJny zKNVgobsiKRFnUp;dg&2qS3C7rEhd*WGA@yvrSij@+c{B;rhwj^!ksbtOOBLj=P|(~ z3R8B9iZD;49^)$3BD(4>GEf(hC(jx3IYU0ZQiRh%msoepW+Lxxm3$kB-b%N9muuZE z>;Rfu$^(Vzu3I%{(h<6@zr?whnj>@I^1$&T7E5;E1rm0Xw2G~36i+yCc@|^$knNuu z@QmbqzDGK6qxx_CN$B`o^?(U_RR?__*{M&^-94O3Vr2k3rz2e}L3&LduC!eZp|kxR z3E;q#T?iu=j1GBW8XXxO{R}_xeqxK4{Ub~G0bY;wZlC?7EI_4_?=^|YXnVQmdkGo@WgN&ooyjfb$4xOFY63oJdYB@pae= z5GbFs)?c4D{U3N?0Gp?vpEM;@4JLs1GQuBplR2_s!lr>(}qBO&Vl7p*p^Wh!2x3WkteAAl;!vEde!FH zFafPypL4Q_<*DI9D)z$~$B|9I8Jk-xd7jHhO}H~BloyCxl?zDoZA`4E;*}ZFcDDtv z@;m?glyqF=Ir(ue$_^jMzt_jO;nE&zH7FH}GBETl`L4Sn)ub-#y}Y%)$!LE6&TNToGtH4@nKQ zl~CSdis*W=D@o@teIvRfVjsf|urTUb#d2FN`%0CW&4!+q%Ij2UfA(SpI)TWC(QG8| zxGjubB=30hGuZr`z)u=IB=0846QRogD9bDBGEXv&es8l3eBPVr{O#5j{lZo4f|GE1O z39IzP;HrT?{7tsi$ZHs+|HgwL6j%5Fx}F8lYxqEc)hIkr_`tDNqX3nH<}?aW5I$hJ zMr#kiM}R9~yx>3|kRH;jUr{^Rw}i*pLn>abIuD!u{!x!ECTZwQrubbi(6PsGdaFS( zO?-9aSBcD&ypBK-4KwJwzLX^`sr~*Mfi(n64AEG(HN3PR_HM?bTXfRM~aX>8uzaNdc6GhU~gWco*P9weM+iLM!#9A-0 z;9B#ZlUOgO#lrIEM)gY*>k$I!l1&c@sQ5`Jhh0BD4yB!71;+Y==WrW4SQ<`P3+Sr; z3rXzvNe7D|K9IVc(8D{kgSFwEJ%VS1zsU?c?ff=!j=!|2bB0v)D`2GJHJ^WR+}^ zu3_O=>&rx!XYeLI03YDjmVhb$8c&u&-v8GAl+cQ_<{~LU){icMUkl6g*JLH?Slz_YiREakP`))vEAzLcMqmOS>*g*9@DUJT3 z+KSW3(EBhmwCb=^lEFc%(%-~h_(iMEDd+e9Lm6eDE#_93A9GAa5P7w8pxW8ZQIYv) zjA)hZ6Ja`H={3y2;kNUZKZ>mujf#_#x*sM5ZAPkfW1`^njr3vX&#b1q36ki^N$ad3 zp&azCA$p*m=)vpsO(o7ZT&-5w5$OQwN(2_uiig95p&Z_r1H;waVP=6D;+y^HHDoc;9~X&))T^u&J*bCUolYKtRWCE#3nHhY@2pnnhvlO02 zWcAq|o$z=`7|UeXgq}w5MTL%lx4L$k!9Td_!@IYb!?a~(6!lK$9ea(~&7d2$201=t z)r4QoE&Ec&uRr7WWz~|Q3?T#MsQ4&5A(N6lp~chO6EU*2q)b9Kst-Bs(2WDSM<5hE zwBTUY?7yJ5Rit(DdWC)~ne*h3gd%@>GBzv^~I{a&! zJ_+X5L@JA=dUm3R9+0xNafy~_Yw%79lE=`{xs9)dhA^j*e~-&-C?6{9Qb)W`S!>x= zlnf!o-CJVu{s}nNM6k!|o>RmEI&A`~j0>3rFGf(GTUivmu(DTh9365Va$l7#GIJ6P zriJ}lgn)J9C8667@je96AuGjC_d#N1>`KPPEk{J|;;mK1r71ZG&WefIqx&$k=TDaN zSrWyOIS6VFt0WVVDX?)ngT)j$kL+2bT33ohHXA?KaOdsSIe!xtKT&|jIV3h@A+i0D z(Z>kC&%uhG%?JV6Z5LfIzib0l$ZX_djB8KsB^Yaey_^x3$!fsAllK{PI9Fdv{m9T1Uan^#*eFX?l)~zJddQ&5*bHB1103 zt)G@k<3*F0=aJ}G&Q+WzX)*^Z6mjinuo$aJ>qBQJ3ihv-1`BoFQn3IR&4;Od1jSoKThG2hUXBD(?a`` zj<3Wr1hSoJKw~8b{afJjR+eq`MK8`QTPvFzf*W=?-nVKemwIJ;zX)<;b%VNF1B`;X z2G8osLel9B%GRO`Fby6952aSx^uVOXHASG8r-P7R~ z%p&QqX9(lknLAK3<{3QwsDTf`frfgfHAV3c!-MLae^a?|40u!&w`h^%S0PeI@ zIsjV+8G%ze{7cK+;sW070XC*UWJt@;0af>N$#Y0EE;HwtWZ=!=VduQdQ0vhf+dKAt z{7^zuql^NO@=h?c${CQ?ep&4Dn6Gx#>q-aldb8iZ^nvf$WbXI+Q&A$E#5Vi^NN0CP> zTWT|unG(C1DD2JVUz+^AglfFDudQcra7*UcmVv!0R)j7`#6dN#@b7QyNz%|~i)#Ln zk*WVJyeK-b*Yt%SC9LTSzv;B*u7iKUtamTu5206TmM0GBe+vSht^V+C_Q$r|nz9u& zO)G;uyyzd?>JN7$Rsu*)pFVw2$Y(BG2YpY`$CSOx^Chw zyhamic=UBb0<4zno)zNQ6TDeiiWB{#hP;Lcht|g`WOt)Y&XA`h3M=VF@-TR{Orw|Y zV#yRU;eBLG&sKkxZ||Dh3h`;E95p&*nRm+6hdgV+I*G7iWiId60JTSb7zJCPTbB<666D)i()n#Ls7WIs;lb_Y% z;aSZ+3~+0aLAytx3x$)}y?}4AF)bVYHm=5bSKiihsf>R7^yIRDy+;5rn(}>qXHQuY zZ>;+M!R5&3I}rd&gNCaPjZYl8K}EVjv1TfgY!fKPP>~o1Q8aE>r*$=Cnl$7z8e+J% z=owkE_MQsR>Y7IysBWex_q=r-PR?5#v03yqHz(NN5nDHa)=95;{E?EQaA+wxJmH{X zKW~L@iq#>cHAewuE*vR67Oz{LxQo-panzcPe6)O6|J6pf6sxwQC+=+$pR!Emm|D+f zyol9W4VGQp=`g;J=sic&NJLQIMf+V?C4}W#NarzOlSt%=i_oWO`ouKhROO`>n$E;} zo{Uk~kz0l7m)PH^g63+ZhjL(iQ0{`-`iiTk25RL@OLv^2c8xoFUgwcl{6^i&=p@Jl ztNk`fqQJk~P!wZy#cmR0DiDR_FaGDqr0b~{DwkAIl3b_if#{Anm=~NyS8p@Toy}Hw zn933Rnh4Y6m50<^1m`}i&)7tTc1;cw=w&*L$|O6i_rGc6rDxM&OEfK?rso;7tR3lW3-O3Pv-hD{_KTwPr>*`AM8OezvbqL;zzV)W>C`1@ zU!7{4q=DgBNQ8j1Tb1#xMa3j!QM4NGA^U(gZZ+Pi9UohbLG3tbHQuBhd#%Rna754u zTJNlE&h&5u+&Eg;()nLI-~{Lu%}Nvu;?}Ta#|s&9iD_1jbuSN(qOi;@4ex5IIbp$> ziO_pabq0M{Uc=6{%kXtVq@8A1(R)hb4EDqE{=uA{kumX5b|QO!-WxiT>pf1x}Alm9>8>bCcEt~LR}7>p$dw45x3I$2$OFHE%Sfkun(UOVHPN3- zx%N^iL2#yhZy@-H{zBr~OIdF5+@wC#o{yw^ZdB{>^g9of64UQp7p&eL&P;^H;8QL> z3uBU_n)s298#O=^Yy-KduH>wF;Vz%$}n8NbZ$ zER-@u10j2NID=zk&RRT-p`pkxby#GfRD_T*=P*qpzcdwDsGAqf`O~?X8IfnDbbn-_ zR7ZBh`?UQ7!Q}?Oa|orZ)N8?jzVa;g9Dg_9SIgHXUMq93xmDWh&FlXt4A-$vLpz^M zISz+}P_9~!{YzFSXQ3`oByKKd#rD`!ErBVG1FU8`+UhH-b~0S;uS?JS@4raT`%IJf zEH`hP6roP43o|uwfpLX&I*{Z zeu?&mQ5m4ai8l7fuM_Pho1qDj zUzUgYa$;?MfjP}7t=f6h7a4I2ImQPk3WhF~lUTtud~$~NS-!V?k%CM33OG~qeNOEx zpLf%Ob8KgAjogXcP*q-zp;pf-@21cm6vb?(jk^*ht88!EoxOcgf2J=|J1gFU+h&n! zQodz1PGM$JHLGzVj%x3lR#SucIKPvf!P)QgejY;M3*!KF+53OP*^2dRB~PFamiHq} z&BGt%+MYKP1rw-$puW>5*YqpN!~c)n&MzuF2PtT8Shb6L6~Y^=3?U#mIHCpPyp)HwI_N6VtgYLl#)QL_4*)=<0K3 zNE96WyI{U@Ubnrwt;Umeo|nlZq9661<8_)E=m~8|PVU+e;{jQ-Ju3C#z|{Dy7>dtm+v1E zEnyA3fPm>9SP4a3Cieyejp+#9l4=kY0m!12Q-sn^q%zv<%ox)UszV=ezTYUgD$Y`G zTc}T}TjF`iR_Vsjor!{VbRuG$==j~fQD=p6)TZ|sjOfMgR~uhE#De$~HgbQ!`P}E- zFK7y6G;5$Q63TU667qt51cmcB^Rjt@RQVRlxGA&)H4LROV?F^VO1tqeOIz^kQXn2M4pIOlmG&31@C+lP*GPv>APpX_iO(p+#I z2Qp6JJLX==4wBDpR3X*5BmL1Up~!dU_U@u(fB0j#&y{NI<1#QG19J;)ZVL``2PR)l zlCD7J3^&&`Dz(|dkqfgNuB~s=u%Fts{t(#Q)NHrb43>x((05wxj?;qY*sZIxS(viR zcr)G%7Cn&7Fv*ZOZ`;lW{XUPnx`FYylYkva{Jc9XAJ6}qu^Fz)wjD~uPd2IDvLrQK zq|+t}j-?!@6E+YMRvwsqK~_stq_9_mJZNd5P{F& zNj*#bxSzrua5SbloX^F4a-+m~i|`_K@i+Q1)(rQOVx4}7M+_ZgfyDLpyCDJ zotCNVvnf&Vw`YYj3+6^fKSR3sxru^0j%Q@^TRzPS&O1VR5Ao&mZnPRtHFRh-$|F0h zsb324_(%^&7xqcR9(N1om(J^dE0}t;fRwi|5zKBibT-qAH z468cD43YF4i$>Rqpw75c77g8yw>e_4Vp9nnkBewz{S`HdH=+3ugSz{99 z$|QJ(`*ci#DkcGdJcJXyK0kdD$N(`)u8vk+Y7SJ{4wl9{)swI5Ibdknys$2Fpi1UI zxez{tE~>CHS7i>!=Ie9Tu{|L-)`b5Z6PId`@Lh^Plm=-;SO+xsbIw5=H!Z|lS zBr+=Dv98vtI!V6oxck$2vxn}}Jk}eWpyB)cj^Rban$J3#T)=u^Hn)X;;&ag?trGZo z4jl|wfvtUi^^9=uie~r69gLuHb-CGkl*c~IBs24)BdycRxCYHYN8;^Z!+P9 z5`K2~@ID>tdcq}4pH52qKtC52rRQJmhO?lPtqvca9zM(sXIdn~-NH@%a@_E9)2gNE zn6hKrP1&dGY*|lI6*p64zzXrf3r-Md(mW1zT3$4!rN zyqn8KUAf5SlOARN`=-U?yQ0Wml^*2{Hwq?M+)lq2KK`&M&$>~#eVL5X+AYfcZj>K& zMQQIAF5?^m>XqOSCk#yqGY*ID!QVmZc(`!m>LZawO zRuQ0=7U{?nbpq`Du$HPHySbdym5ZPTCe*A(UZUtwdzmLcBSU96ph_{JSXo5;>n#7P)?S-I-*&9ZZj&sV=RjzYD_ zIEvK0#xYziFpg3c(vCjrX5&$%rWuD{U2Pl_)TPETS=AUvox+rrlrTe`XB_3~hsH5W z4Kt2;>ICg@8lXRwZ``u#sBGh2B<@bV$2+o)s4t9rvAA~_x2z264dZ@B-0j9Kb6ve? z+^ym^yn-XMS3PO`H;emGOSQD-ztt1zY^L@W_uTUS?tb)0_!wnLKa?oSqCud?U)Dxw9>Lv^j z5))1!zVcqdV_UK+OXnX`IqI%{XOuQ)VPe8p-aE8&@5)MVh&!+=?3SubyE&sK(P~$; zh4#chTn`Ng$9B;#!qU8RxMsn7ukUWqRK{(cyb|NnZhZ3bIobFu5g*+{-SmUHaCuZK z1cK*f9)G((GI{f;D^Caw8g*qM`kaErW4mp{s}U6ArE#xJrU&ROU-X7Q%DHTi#_$vz zc_`O*UX3+Lr#O$uz|x=aiYmgRm9KD`<$IHwUnfwy9G1l>kKC^1NVRRmMkW4!*Z%}R~3smL?9Y`+Xf;8qIc%I zVfm@BKBxw83{Zb2LZ6jlMsI_9_zVy~~8pN@=zaF>#j9Z?J zTaD)ptWC$`UWN@xMcS}ctPFh}{}_gMQL}(#yoZB4)yZk$3n$0hoO_KkzdCX%QiNc3 zb>w)bCBNI>TkHRy~?S^KC@*U6yYFFmZ@!5xVFRZ<2bOLT(BUnQhV z?G_2&P2X3&{ePrKt5FVHT{Kx!2*EAAR}*EmuHX0~eEw>D%8bu&e4aEuCmWwqe12to zEaOv-&m!aV9mhA8&?Fe50lpT8%RVgJk_^*3ouIF8XF=G0{NlT1XsS`im_Q8Kv8xTsLY!w)d0eMsT#}4o zm|$W&lZsJA43qb=D3ZIk=zcej;$5z9jR8MqExr|t8 z;jM&s^}r;1+2x0aFHVNr%dSY~e_2}khOY2Sli`=Ah1Ye3PfUhikrrM>IBQLjkcu|# z%x_`4k=*3#-}SMj>^U{gvF>VC2Mzmu1-fCi1y zFhOuYQZ6(Hi6gMg^JNkxM{I7A#qgCz_X0~!vRUSdmP>9*rE2~ut~kBB@&YPVuO6cX z*@*kEUYV4`_`^FhLtm2z&q6f!kH2EK$Zp&$H@8u1LWhUoxCC1Ss}m*nq4ZaLF^JFK z1?xbco4WPCmAZ+Ln|a)l#ey5af>ER|FTVwfbLCII{3%3{D}I7J%4yyU=z5NGV74fA zwRnsO0bbzU+=0dMK3rGTKInR3wJOVY+9gT&!=A5yG`4YIc5r)5J@%qxKSUZAeMzs6 z1v@$BdfbW@*5r#C zE9`lqU?&&%BjbuA6Ym`z`wDJP z!-@me%dgoj`PH7a7eoVVH#cq#y@{Yr(w1#)5jw53=Vw=5wV=lt*H&W9EU_XUb5`g( zHcWTy6kg$9_{B4Nis#%k)9p_KYSW+UMUB0on%Al1ri1fI z@E&LQVDH(x!w*QZ?1LRH_Z2UeS~D=uv%-m(Sy}d$6hAQSomRr?Yu^4pN%S_8)z+$Vbgl*uiVB6iaW*(=k>6M z-rd170Ar!N+KeN0^vZZlg4FLoIFWy?(6cGM-rp`0f-=tta(zXre+}1Gw7i0`!5&&# zwKx6buxiWL6rr~RmCD$>$yRZ+vFSK`Z0_eVhq5odt3vsar=^n(FY_;4hYiqxzftQ| zR?~KBWsWy#W4^z`SvPp=!jI&c!-Xp{^_ZVaz8rx5L@rg5nI7g1TqdEYf`w*0>EXy@ zGMKTu$#$O}(X7y2qn@6GU$8n*f3G+W%VF=0l5Oa8*V7P2R^%}u40X}vWUteNDon=l zvkjd*l^Gh27GE^)!l6Q4#|4j1tPmZL@HawT*b&iXn`}&qWf|1&ls0(__}%}s&0`;x z%2|zngB%X4^_$P=c2`@=bszUKMU2uQMG&iND%VsXKFw|WBsaO5ds(VUReEiYpJtMU zE_9=gIjp3cOi4c|+Zu>!r4g2`o=^irajaWOqJ#u_-Oo|*YI4I;enL@w6R6vgDWRrMA*4$9u$$CIEF~+e=l14+3r}fgA zH%cF}LsiiUneo%ZuSo70q3ZA}LNXa*!zC@vWhTvO>AkXtO0l{RNUklK5Ys8(NNdaF zC1#EaSdyHhvbLJNN_HHx0JvB z9A&5F_4&DT^5m!2oF0_fGrdft+iVFuO5WOVz@DGeaNw#1xec*v8x9;d_gS`uPY%DNQP_e&l&@&srzd1)ESK|-8UTPpO?0ml>qd!% zQm{#5$rrf8IyH;gD~9scsc&$F@5s-vn&g%^c|{84*uHXkm1OtoK21Qgzn{YoC5Njd z2eHW&pCE%NtjI`VMb1=DTp~lMyUQI<#*y6TcCw`w{EG{5y8n)oipB@5fm9&rM8OvX z!1}-d={gyM>#p_}kE?KRfACuWDeX#8FQ-%60HB(q)prm=<7eU&o6hFxC+h|7wXS2N zB-(3)Ipd4m@w=H~#22*$82m4T1FaXv6-@`I;J0{szx}4X!BG!^Fx?mvq46Dy$eG>WG5=BNvsWRs0r^l$t@)K-^uiY z3C@+^uL$lL56frjztgwB-)i?;Yf(-zr6_{t-uu&`Frct!~MzmUsek zx`&M6yoXNLm#?gG+Tt<8e{sqLhrQgz7@6b{?vN&?75>wG&kb`Lt)=aQ7dB?o?H z-ni%dK@u09Mo!zjl3zr2!D%hSLuDOHvke|EH|nF`j%q6k$t4PV4B0BGCMYM|(-Ng?oGH=ti#V*{NEKds_PYRrArffxi4_{)Yv=TO_Jukno|38B^ zbvi_#y*uV5DK8KTmGC=Lj=8{VW=cO5YFh?yN((UJJMs(VMNnkGLfFObc%bYqXyM)w zxX`+TdXS;i5l5C(u@2YO>`BxPibDqk?c8q|dkac1 z8=TK>b3WUe(-ywQD@&2of;ADEfjq>qT<+_D2FiYZwo3A@#vJhrzm zgQuC|h_F~Yi4Kh>BG`ITeHJIMSUAO5AI-6?kK!Cts`VZ>wP+p24EhDy6Z?2SYUaQ& zYZ}$X!wiX7!|eA7QK!zc#pUK3QGmUMi!HfZp%YvHQb)KH=sOAuEC@OnI73G&o<9wXua;pn}iw?#50uLc;Uum#3q_E8j^tc9XX z1JgW`x1>-^Aa_V=!8WCv;v53m_^5W)J)`$p@401BUwp3e&88|1+$r>amgsZ1-cExH zWTVh+MsH4H%eys+`SH~SQS%Q+LJg1{fB?V}|!Alt2Lg-|pP z-N()K;5;Lg^K~!Fl4(g8y)m*t%_L7LX{sWroRa zC6hrgSZ!Ip7Vc5u-eB~F4Ph7(Dv;O?5)nui4|z&g+i{F#qRx>$7PN9L{{2u<<>z(3 zHSX_JiSg+G4sgsABGpd6AlX>Bv9yYh{C#OOxa*~15Tg(?G;7bkPN;Uc7OQJ~n5+n-KDL+S)6goZd9;gTq4*Yhl@@C2e2 z1Z2fRN0!DH)Fn;Hp%@Y>bE#rz$_1B8&z5AUB}%A9FYBDg>66`9aRR9bjkD=GM$slV zGX*E-fQ}w$v#e~^XOAfY^P!$6YH83XV<~Zzh~pF9_aDVa!It_gs}W!2QobwhyrMq- zwgYf7jc)KeA3v`251^1An`J5vM&4hP$ppQhjfR%uRAr3+mpcSUTLUA4;~%*VXcz+k zi)C;wl}I^QYdp0jNqDsIBl4r0{UdHYP-7H_6D%-tfbc8XsJKL$eI?EQ8V8K^L#!P= zX?EvZ)ueFiEW6d?3!IN*yRgkjwE}(_p=0XA{!Xi2eBvn1nG0*(C+hq9bKdi}^zM#E z#N22l{6W5AJu0hI`>F>l;oXAAPX^MgsZ?!Ws283JH#gFqmigNRNBuB=DV$M;i+ARJ z@PqUGZO2mlH*aHrk~cVC_NAr<-}i(ETW3hiPXTxjY5J_=0&vC>&oydokbS6Pq!UkO zVYA;gO@=a$1krP|L#@p{qP$P%A}uF{v1){G5-Z`E)_zo2{+8n0=M73-WR(2?FA?0CjK+9(fUm`TG zz1_4ryRgNRX+2cJKqxC!z4mpH=DyBpI>JDgmCj_Tj-@Ct~qd! z599g+@X^p)_bUtgz<(Qw%@;!2A7bR?S5mM?M01P_v#c+=pTb9Ru2>!4yzD5j-Waa& zN1YN1qilWumnbhj=H~v+CY%(Zrm#(upOaHBaFPoP{qApB5Zym4Ok>6}qagU{zdISR zZlks>j^ToX2Ct^03szYa@(rY50OyD{UdozCS&PP59%<}gzP0lO(Z)6(IT_JxxgVOP zD3s>CLkNx$u1|BnE3_bi>^kA6vOL7Ce<6+`fV=iRu+5LEQ*>T)QnC=*Jb_7WAs^8= zqal%EM(a~gY{^G5wg#2<{me0TK4+6tnn9hvY)%T_8OfM`yI7**zze7ne+qmK-4A?b zcROtd*T9nKcF*hF?rS7M_Bqvbw?05Ms3y0<*O|=iTDABUV}Rv+_HE$@f;-vR-rZd5 zvMzeq9?#sO(qM0Tg)amITZWp`WzVv;pR)EMK!RoTJjMxHbu3)Dvj%kKkKSIFSmivRJ%~zMHo2! zSge|y&^P>JS}__oWp+(ebFLsv}1AoXW< z^hdWFE*r(inQSuY^W9sC5u$rQG#7B@@P-UDm{4SfK=L`bnY2^)4&v>ywc@`?#sxs%y_AjUysioQ3Mr3o0C$nCZcb z^FMo^>8nMFyB z+ZygjVyhc_FOu9v65yS8Ti`;;eXisl;4j^c=h@(gdX3yWvmoj2Hpz8lLSBR6wGN_v zI9*?UeE9Tz@p7DP60RN;P`)#s^{l$sYaaH={TA}@alo#y*}kCNJN7eYV=`%luB{Q; z@q!$G7C%((!~W(Mvz{Q%xq4qWHfi(UA2)MRKR*}9PjIeXoqFg5SIm9j3@3TbSN+%# zwDjf#;szXU^Y!i?>D(mQ_E|F3%?f^(>TJ9i^f6!cy1A8M71iNa#@YBA;H_Rf(Uy`8eqC>z6+Bj=ee519bvFK$-iG|xY{{9Og5WEu1xPg)`^}lWylo8Q zMCHf5oa-I+{eFX^KiM61h`B?kA2|fF#i3N*V5gs(jgtyaEy8eVZ> z;^@hDGY{?WUh`{b%Lst&UC$smLN_b8mpfNykS};_-tt|+W0l$+@v&K+$QeP?pz1@$f2Y;-sdDPzwzTG zbCR##?8__xY?mw~#$8#0U zKMp50|2PWMP?d}{pt83jU>f~`DbBj?67;U`S){QkJW`Ffl+mA(JVy5-=lc9(;jr7?B;jql-3= zhP!#!OH!GXFq{uzVb2>jqLJtFDX@YQk~_$Zk|h+zjw(138UBKKHSJ2y!J9ZOu~UOh;=is_xOPs`Ud&aV9B#EGfozNYUUP zfjzSf4uFxR;&p^Q=AD4YH-^ikolSChTMgzV0=U%r?t+0M7_TFn5{z{Dk)uYCruoBt ziA^+A5?9zl6(o!c1^`uA^K*x+6Myz6<jFxb?bb{?Ipui3 zr&|X~tBwA#G}6rv?8;^B78DWce1@oSe|SK#0MS*=Z`unNaPu%5xwAUWnZg3B);3uU zS@oJSkBb^_FaFZM>+Wp&1;Fy<-K`ET3UgrZrf42r*u2$Q!k&mdQQ5sB9RDuM#jIz~ zU-+2lQA&{rK$ikzJkR;YNS;+j8qY<6m4YF?!YdknQiW+R>nlHdhiaHmP>%Fah5h0R z-N4H#AsnAO$6Xfa-*8*Pk8lddK7on~UC!rmqgDK- z?W``tOU*-!OL*oOgL#fH&fuBrZ0c^8kQzD(2QNj1zO`QxLSm{Qu(V(`E2p!!c^gjDIvcZuKr|fVO_%_Phf<{RtsdWv@J$UTrUeouJfZ|niX&1;QuDbIj$G0(-ug*;~(L&#nR)f?MpWb#^L^y3*Y zdh=XtbmKWQa5D+Z440I0Hm@~C2G4-u&LR4tIANY^ImD2j9xMLLS=Aw?N5 zh}`RHh%UOwYZml3+eUA6KL37t-It@cA8B(1KJ|_n6TXPcCC*+ae2{dv$=ly~Pg~P| z>cP>Kd#ftL+z?I!GWla!3Gd#(Cy_PK1p-iQFY|vRdsU;q`G2MCb%3e+elL;F-T$dn#ul;eQBQ0{D8HznEyUvR)TCIqajJ`msb{^ znYOHEExbp$`#_0Y$l`n#U(Ijv@ey!a=g|1kN7|=MR6LV&17)Acl)>8D-kh8tCBCnO z>idcbj0k0yUK#lTq3kcJ?(NnLag)=Mad=vS*cP5IQq?)gWTy&aJC!h&B!_R56qZf$ z(+gpnV?x+?>n6C>)3x}ik9CW=$O8H*+M4;%-BuL%kcQR*_c7UVvaL9|a4=uF?e_bC z%Y$^n8$6K^z&k5$Td*rah=3LFnQx%Y@%YSd&2Q23tR@+h7mZ{==?Sm7TR^&%I7y4^ z7Phut%vvW!jkTPO|KdoK_XS9ZqbO-RK3@tMs0MyHS6{;yTZGHRe8K@86iKd=Ah)%H zAcyho{vQDM+QtMW&f%@SF$)W3hz)+zzLb}SF+-h=U+@+@8429vYq_GAFb^`~;x%$D zDK0Jgz4PL6Vu>uu@-D5pqacwsT$T^yS94y? z(J&^<*@W_v5jk1rY+1;gFd{zpA+K|a&93yo7ziIk@@2IK!&YbG)oJ9(*^W<)=u0cp zN5u?`zVwK`d`kn>r_u7_-tKI=ms~Mv_Fu3P+=qb-sifIjeky79R_Y9jG<&0pL7JVR zUZzFmTNd;O;YKhw&=RdzeMh35Le|h=A*=2~fu2#ZHawJWmOAhF1t2QG=XgtR58%M^ zi);)OwptRxU$r_L1-EPoc)zppZvsKnjjXQ&clXP#B{4NYO9BU0yyMUyA*R>nKKhgRK7MTWWXE>Dw(cvFR~jCZf6s z8KM3B=#<5l*Nx=@(q1BZtCV4%4O2G6BkeE^3`Tl-?R^^M8{-J{+TsAh4t3v>+Hwz} zk^)(6h*$VHfW!3hfE+?j3vC~G*80z)ATlOJCHJsb)sE$Wt*D5OD2F)WYR6>@(2kXF zioh?Wi`x#4`~Ob;2qnSZ`?%S%h`chIPte4u^v^_dHvNZaM8J=xmAu%!oY7uSTnE{b zBgGa6_;%{x|BZ@+$Iur1Dn(xAE&qek7Tl+h8*RZ5^#aeDs$SrQ+9*b846dOzr7_@~ z3^8a7rcj2`7=#`rk=>>?Sh6_CpBdk<#+&#UlB+!-#u~rR)|waNQWmEu8jFr0akAjd z#&DW9=XJCZ;&P3>a<6-L;7dQ^FB+fwr7HJ+NxY-TPTY&cK5Rz0b$p{F#fNsNo@Czj z>C!@B=xuF8R7o$@;556XA65lS&JJy+OE6caN-GQHY$6U(HC9^r+0BHIdz=&)}_ zH__g8Zpl>3QFJ2bS=0X=MIn?iq=mNmk*}04qdU3`1Z{g2Q9GQ>AD^whidSkB%fxrq zUnLDli>fq2_{Y=WpU|NVfr>cr3*v(qj?iapysJA+Lf`Qw1@^8lqCn$udk9Yw+un~; zCJ%$|2>nIMP;YB_iz0WS=?L|dw$LkQ<6Qze(XdRD*7gxKi#?q`l@E&c{iii=RptLcfs~SOo(QKakDH=G zLLK6BPB{ijC@I@kBK@Q{lq9A^Dn^NvpK~InL^`iaB~oJ2mX%2bIq!t`MDYtKXGuvp z8w--&k?|KTQlT>_OPsw7Xhl*r5#;nGK&eENgxb%+lA$i=2+akV^4-T&gQH~3JS2KB z(Nu&B0=*FXP&rAr#_8Tf^-UMW>6-=&R?|XMNL$Isde=$iS>Z)hWhIE&3Y2|jOhskiQWK}8lCnE$sg7tZ)!9Dx zNu`5&ffB8?y`?C`Q>CR+-5sdE|4P2!+u{2uI;x9@$kegtyo>;_hbH)fFDfPro>cJz zhlJk41Tesy>R+!$2NdEcM;J+qkF@&C(hYi`I#Dtj_XKf;EQDDq|&Q zm%7=2s1DQq&Aij5Kw8V>|X}JC33Y{Y_ zs}kh1Xqu4Qmx!uK>2Rb6rLG6Fdp5A$X*Ytuj-+?yag(i0q4eD=M`QWH5uT!6Dl($)Q(y$?x@1 zXoEyB^xZ>BE)Bkxs4VvcHJ;#;2(CVPES5)czWKgDE6Tfv8c8CuSqiMDv9!Q_B9MY` zY1ArWXDcf0*=~Il(7MVWi4llBZJCIouKfH9A#aUY4HV{Cm94HS(7!}|`A?!x$+M0; zjB`}xwDD)vg^;b)B$s(iElT`F(r&u<5Xhz^ZJ0f+MD~15ocn9>68Rum0@LzQdQ2`V zeRVV*DtFzAXQsQeEGg@%W9)mPfZ#De9{uIvlt)i_q+l8!$S-Uj&WT>Z3(oy!7P;G& zZ=rdLp0EKEJdr3YtJJe>q7Vrg1~e|A|2&9d@@X^^p>k;1C^)1Q~oHo?{=5-OK z;+QMTBUj3qt8#bDOR7eo!dticO+4idTB8{e7?}RgQ6B?2+p)Jsyml z44h#Nr+(W`Or$h1qsMM*IG)< zH_n#}GgE^fByg9-yB|dhJH^mz7`>r}@KUVp`eb37C0UR7GE)L2M1dhzN$kTZbw9FK-#I5v3oz>D zit-z^jfvPkx%{Q#NL1Vj<{^oG4%5LouSlS;l@?bfZNC+KI&-mUd!s*HD(4`hy>Q$rD8>_Tk!A zSH{Xd8X*G~@7jmU{@&-{4b2UnI2jmZ-H8KoF7(W^%6YK|QLY44qc}R^e$To3VHw9k z=Tf;d$N^u*5eGM47pQR@e>Ty09pW==Abk1~$AY8)@JE~Pw*?bBXtGn;blXll*e z1G>p@-U7gW0>JK)5{~$C8P0zB<|gj!;{!%X%<^O0=(-8TfZagr0I$s$8|{ja;!%M$ zCj7&dpL44GZ7iXh3UO6_BCfQkQh!a(qRqW zI*SULC*sYy)kq6Y%1qrdF-ty#&)`Qg*NeG`U`64`28qmx>A})e-lMiYG8UD!})hF zJ39P;XJuYwVrsB7^B_=5fXHtNw?u8UrpIif0gjk`?zQi}7;5c$PX=9@*}3snrrqo? zli^kC=$)8(!c#Mw;jOUalt$M5LfL9E*t%cQ`r^+G6~~<$lG8u^uv(O+4-cHDj1?=y zc2N*!j@sCaVht8%!o%WDX^(XiJ6>`+DA$WcVqmDU^K-84UeTN+_Hurqxab+R)>uj3 z;m4e7owC~HS_$kA$^mj&b^InJ`pjlAgY#kuvpm=9{_55ovmhCRefPTn1!Jt|#ex=s zk*zGAu+Zyp>=~mijb_~wepNk=Z zaPGMGgww-NpV&yo>X}0EolRJoJA&iyq0mG$S?bd*v{t7sb{|LYMF*EH`Uhzywv|;1#%VX`#u(}(j!TIUsv&q#ZRK2sYK3t|g$#GN z@0OGvR45G$e}VGR9;IU0UQSsolX27h4n8>4P2pdSeg|5Q)|f4^je#8NAkMp4v9=OK z_fY_J97*fQXiyw&6OyhuJ0# zSsYH0ky)IFaLzdvUZdycR5T~@=+2Nf+CB#p&Gxo?;@d7;oHemlbABP^!Lh2<&S#T3 z6-fRtXg#rzZUx4fDUs2fjusS<1{X_P&6IbDV}HiObsEtY&!P2pqyP0I_6MG23l2We z`Rwb~^J?a7^89pT=w@aV^Qpz_0&gH;vZmC+gb5gmtxuMmjI4+VIP2&7VKRz?&kGqz zSe~h>R*irxu3keg8p{?!39P27gzRWG5 z5BOq3Z=GGkW#&Pdjm2$0V6d}xhc)shG1WEBV{agJ;&7~{&6u%1|8HlV+g?-a1_(r|N(R{S{wc_&z@&TM2Mv13X7Iry8kp+a@)B z;ayLuo-pSLzccMkKktje6a_=+-1yF8h&&L5(vgCWdjh7<{hhhXd3PH&apqR+q|DdC z-Ef1KXznsMKTnZz4~Bop2IC5KQIe52-a&E3@bR+kWSl!*l+E%BDs2oK@1RjeA50L= z4v876*Ze%(_a!0V;hry%5GS1+I0rx3A_2;fi+3rnn9N9g%QORs;8oVv} zE4b6dQs3#H$2b!y_lto%4yFgUC98O87e!(8C*QavfwD;JGQ_0ki?hSM_>pd?kLRe5 z)+L#^r%e(D|9orETTxJd{iYCg!2!$S73KDWv{*#iR%s&i$?pf0N?Ry2T$a5#Y-CB9 z*;`}|ICx`q`Z@mR1oiQFaEzUE5oKkuY{dvwyx9aMs#Vg{c*q!YYN@pO=y|GWRW9Y* zg>;yW(_sdaGY*}`C!ff0o z1(JD%J4In?_OjCYL+Dha7sV=5L{LO1U(ds;;UYu%RvM|?%G|5#e&scIS8dMnvsqK< zZ2CF1<~JU3Hg5nGy^Y@(i_Kn0D#2GH)}5miN%F}c53urRyWv5NZWXahZbBlUL4gL7x9Z=D8CXNMh?FSviOzk zQ1^i+I|A7&Z9|XndCw&<%cKaBg7PBUqmh%SA2a!(;16`_QPqf3h1Mvz=1PQ2hsIlDaQE3{(7HZVfzsJn z!Y4WA>k{m_8Jg)k_`-TepfA^(o%F42R1FDU&l_YOg+OYI_|iu zzInH`S?R6scTe^8%@dVVVc>kVK%9M=sa2So>WW7eP~#6Y+}ntq4BnE|>d(A?wTRBZ zI-N?mWUX%}UUXlKHNiiwAPJC3frjf&Zjl1ETAR@}6qcSXw^E7T2^vtObWc#P2@PeL zsV5oMRq0IA39=n1-F#IE(#JX3$f*spa0etdxu=|k`JK|_qakaM`l8AMaoZ;60ppGX zs#9?@gZ}-I;GBQT))(g)y>RyVD5tWmZ%{^@MzkH@s+jr^zH#6b8?&S$wx?5l*mohg zfR8^2N;`}(Fk{*NQV5Jl1ayJkqivYYITukZ zKm$>vwJIVWPG+~o1D|*e?hWi8+-}?3x{}5==Zdx_k^I3;Ge!F)_Kt?F+d##;n?}$$ zk2>G;bXEf`;~=*!$Y)#KC}6$F@uIgW%W?sZs5cI4@_LceLvQmn*hkKVMvr}FTcVpd zUAD9AY=0&j%cMwq9D>dB9&JO984+H`!Er`FJ=ALcFG|_-vQ!?P!>XR^7p8Dd+L#qaCbOUNnovDLQ91qi6K@ zTfap#9Q_4yeqdl1`T!1cnp7goa^9HAR5?`-+lxn-UDj0tp~^p%kM17=PV+nOW%MAOBY2WXGLxcjZg)AYpnM^ne1G97AFm`NcJ@r_8er)kVc*9T-&XJJLwt= zQwKSlSXm>XO?29mcX1%ud?P$jG%s!3M-Q<3-b^mY{?`PJX z%QJIPCNTrU{hZHkIO0nkly0PQsws64)#Dqfn?J&3!masIi!oX1MF$hUk=A(fMg|Zh zUkP6=4e=sCkhH?rtF;2#zY3q*t19%W3cdZqITdd2nRP>XW-dCfBFQ^2JOGQHR=dWY z2qdb;aI(tHDVMVb8rnmgi}s=GAzu2z ze=)=&fXE5d_1-5KVy8XCS2X$t48qG1pzaG~%^gH!4hsCV!rUFcQbre`?+7EfF}xq7 zR`lA+2s_uF3Ghica1d92t3i4rJg|L`-jMX&1RcNy87B9hz_1ELo)k&ksDNo3%!@!9 zRndjM_}{|&E(LGYnase{Y4PG;@bj787x~KC(&jZWMJmhs5d)J4`vJe0io0{Ag?|rc z;I@)GbRHrQ8u%^F)d#!5GNt^EndJy7iJg z?>dp3@)IN8-I(#byi2yNGI#`fnH!JpUXx(_E%?z%_7cKn`uA`5guQMowpzfp|0J_f^$DkC zxt{0`wgqe>P}wQ)+j`Sgg}FVvV__73L9x=II|#iiiyYBRURZQ*(%S_sGZMTl^N(_^ zeQpom*oZMVnt&3&C+uG{S zEaaD#omrzhLeD|4cCtg^S4SNbmL5x%m3DeR0U3#4pL>KPub z(MfZ*BA8wGF{T5$ENc2}_YV=n8Sc*Od%RLMT`qLPw>WSAocS4MCba|FW5Y8s+(Tfc zz+;BJIbR4OIGYX$uVE0qIq&YVxZ(|;Z^CSk-us+*lhl_qe&q#EEt-60BqMDC?^~_8G+i}Xj^6rA zC>(CTTT&M7`XO?a2CBz-&z7Vwns3eZE7J-0ThC)Lisq)mJ7$LCG{`%-Wn6Xv=7AL0#kx@%`=9KJ$zu42sH%6Q+HRMDIR z0!tmyVG=|pO0+d`QDdS*ymET(*zmQ2=lwer#ZuUOM)-0)e*P^RWMP4TCRsOsfZ?Qo zlmBEPjAR`Di3qz5Qh`W-Zzm9bi6w<`ne!FBlw=jsn>sC)bnyq`FOZwgS&PYcbXdh6RoG?tX- z72W)?+WTf27rNBu_ed*GTAyk4jg_nR$J)DK{S;SUQrCY{Dk7=vaL{?Lz3KBE<4Nw< z3A*0&8D!@USHpCtB=2}gV`w$A%L`rNjXcm;`23N%8`^e2eXT2^i)1zt)8gStdjZgQAlAu)g>p^ zRCY#=q&!ZtL}RwU#a5X{G9JI2a%I(((y@6)akeIn+6lsg(PkgSNZ^_Pp_l$*gC zC^0S(xObCUeq={73Un!~*O&2XZ*y&WL|XV7{wT*%tIymp^|9_vu$JF6;9JL_T}jd-{sziImvWrnGXW0mvHPZ@*d^TZq0 z1a9a427OLq-ONdhXYAJ4OSsFE`;gnX4>@|btbJ?jr57@M(Tloq(Ep^qr27o(%2L4q zbc7xx)2}>!fw3zrwW> z-W1311~UY8kMa9A2;SI-b-?omL1UQ#`XcA;TtA~PDSi_|3qJWZK6R2M+r8+VimUk0 zQFkY!y*my;wW_F7c*Sg_=6Ae}w#jtMLy*A3Y$ZZ?^=DeH*( zA7L}fMstt0J21DRK?_y0(lPIJq^QQXg+vEHJMFZh1Z2lS(oTq_l}J17$oRBEZFE{W zOl_Adk#^d|__Wew(t6i*=PmRpYo^S9-3~jJrl4q&_>S#H?JRj%usY_ESn~X~Lmu6J z@4B-jkEpVAJ@UbhB@gRuTs^To*GisAo%2X6c}B;x+G{hzr3rwsfq4yYu{HBjNgB|=n@zxi3X5IFTcU1tkf)_`lE<25JH&HhgPfnh}C8mZnFg-;A$*5F=jq&2u_N{oyXrygERC!~GpflNso=qG7X zNE4u%`UJ%}n#wJIQRzOYBD_JtQEn^TDsAG_O!6e}k=A6Rr_cHFW^d5S4WHvZ+A7#? zjB2^!4-zZ)(D23O4YqE}cWNbAmgP0eawk(dHa^B{ksU zSAjGCU9O(Lr2zG;^YL9`ahY^n_dMu-nW2@x+en`r7|JstaE18_dNbTT2#+m)4Sx6W zMLkdpU*vb6TzG-C;5p&42J=PIi%;HNk#M?a)hk(vNA~;Aj`)wh=TADRhQFZsT+$>O zNyNp5I9?C8MecYn#>+D6_|4f|LIYj?SswRiMDI5Oy>M%oFE=5I-viKo&zO|6u!suu zz#?Si8LX*^cs{-X6jvOPq*vN|6bG zvE5>sBu}Rzqgkx?ITxkbF>Md2wEBXN9_v?+@=-Yt51frx5f4VS7p-G1fPuZN!Mwqn zc$(RSjhO)1ydn2rB@5G`?7p(4ESL8=)ISb&YZBg3hB+b`N7&+IU{XM*a8E@RtQzRO z%yM=8($1+e>qL?5nK3QuOx`P3lB}d8o6|8<)_`a>cavLaJ|oJAeDi!}mnb}sHZMbd zEy9sGx7{80H{XzobA`q;Ao>258JUrcyRp6zjg0I>1(#(as~z_i_q*#JHEL({V40T;vz-Wftry zq;TApq}N_!UQQP3cm2VWk(R{?;d>RQvQcdYb508>6)r@HsxyJ2x(lKS%R+>y(2r@M zH5nSFy}tK}GCmvA!M1m#iUWnE%1h$*qNG;q)2oEk!|NL!nWC{%%Q#zF&;B+1t=iDp zVGZJ&P3W$dNf>1k!-dYZFsxWIM=iNi2Ra!y>O(bBICJ?hhVJFjB#-WNyCN zm9xJKf(bVaL)fKlt?*Eat{oS*DFH8Af`JRHHdv_BbhjtnRecK&5-+;@3bYUZ?G_`+ z-7bh4*1f#h^rNHvh5g`HMdp4k%YY_ukzckolZw>de&^Z{lOOz(@bRHX7!Voz*Ec6R zFgclHZaB(vfrG6BxGO6}DCuqWlTu`539kb)PO4Y;#zlV|y{*XHKqJ$N_-nuNAt{j!M%X&7J#2%}%ESJLqIR6cdWxr7Y`D^q6rvtM4j|#}5 zWczP`EV>B| zEqJWq$6d|w0PF~38}6u3HT;C(#re-}qp$Lpmw+j~@|TC-^~lzo-7OO$=(;-Y=h}QM z6>^8t0ktvwl1MY7kEs0*cAeZi&F_3I<&otGZ`;HcBImV=F}d7e_!s+%Lj1>j=5W<9VKbs&}n@+d%Q^-DAYIyZ&I1I_zPy3uj86fJj%bD z1Mc_pzvY;b09zG0E*=-k{jpQ0<@sLPk1b?l=)$}C_nb&a4~12_XQ=TIQ?Wo z4f1wuqqbcJ=tSwfib+!T0E!&+iENe2qIxjf;erlF2e~7!BS!QsRTy-eDeF;T+l)@O z-+XBe-SL@wn`b!8jFbydO}?OpUUWLX=KN%C+VM9pOU-X7jiA(-f_ivAOb$!DT>c=t zku0?IE|PH}eM0Ba69cAvIFN+!fJnv&;>?W4*&?<}MbqPT`d;^$GaSts_Yv(gTOO49 z-P@O6r!Z>^*n*qYyiB72G|=#|j-mV`FF*1tEVdSC^GfQ9oW=-czA#$iq+ALd;WP{g&Eb3C8tw zRB%~(V%_U`H{gXL|4if6yqO8cE8bPkv8i<_WBmyWIn&pgn3oy;L~SO7zM^$welLAq zo}+TEj^+%K)7iKb>e8NhyUzR}sv>pbgH|9k+o~q+Da{sL*&!dUI~rx%svxESTV57b znH^Covl*z&E{idpBcn`blK71bzJJK-!CtW9HVru?KykmPxGmij$9aZzmM}i{tr0PO z*=D6L`&uQgC9wvH(rN9ZnLfBZ+NoH+?H33(IUB!`;V@gI_pldTbm&hcw3UC(#w`M) z|DEQ~jT%>L{^-pK`(9!#3W2BzQ~o zoMcF{^}L+jG!MBqI~yH{aF`4j<%KP376Ejqm-$)@HLMRvZ1>aIxEE1QIC&Bawdh$gdR|Nh;I!;H1bpRG_@zV-TN4!r}jw3p)7iSNPwL#h0LH3 z*%B%uG&jRqBf9Od&d3*7l;?F~pgZzmb7J~TPJJjkpI%_=GuP01NcMvSA{Tmu9Ouyt z$e{KlDsAHZE>dunQoEk>=J5%d9qt_t`DXx{pXEE!}9Hlq!)RySgCDf+wF4=Dd z{q*6q7nf|1wxO>WZ=L<0r0bd~FT3yb7ng(@R0$Db%CSL=GUXn#I30G0s|{(1vb9pz zm%Pe3WOe{OsR|z4Spop^fJ%qkqY&7v-!Ma2PSUN$HPP*)&=QL8`Yw~M;1ZOx54VU9 z*g$uy;+rTpjp>hYD%m0j#bB4Irb|qh1|Nb3%|ecmo8B&$?U$Ct92R4}hbN6%2M)C? zc6{_!S0}((m;74oJ%z^za;0O-c9*>LIt+I0FEU?aZ`!s#^G4nT3)w8$Zf)SBN_7j# z1qbEEX2yDsFdFj}qj97624OZ}2hqeRZC_gQH(%Uf>9YSG%6CeK`=TKniLa%pi}U#y2}o&Jx(d}&VEfI6BU`QZgmlee}#FY`v_onhy_Gd?doMA|YHaXi8v z6Nm7OZjp>hlG03@b;^fP@kFcxEkSYV<7*bZND zzRnQC6?K3qZtn|snaI4>Igkntz^YO(E@Tr-SE*Blm zUXG7Dq(I6s8*Cydw)MrtB_f3Fw1yBX7Q_a3Hol9ogdeM-JKF$%Bx5BV;jl|OM|ZAG zMHi=t;IE{2wzbnG*1(4ao0YYblUI{3UGlpwsG*?Y^Nt3{+fM6DRO&wSeY#uVe$Q{d zk?$;Gr^%Xhk?fSqpxkG-Wck_QcdR!a0!(=+fnL_%c(YGLi0%@;jXc{y9Qbd*RVWh# z$ug4hDZj!qmfZlpQ8%$$>2@3kHTzW8o}tLsFQ)jZP<3e3aM$H^bc0>#OJbgPDHZTkvQx=uy;#sG>nFG%p| zAh7b7q)Uxt3_lm-vA6GfpnqOOzA&Pic1_SI+D%c+8!|#AMBb#C6#LBTes$|b_3u3Ol_=-#)u_ zsH4@vIXxO7T}%}jADzgGbuzftF9^VmS4V zv>}=H?L}+|NO`u3@PX53{`Agh=ALHCzu8`GPGG{T{RF{DSQkVVp-akN21Vv_DOc%n zz(dOw*rc5v4Zb`ZY(}nxfHB(|Le-~XVydVbgbKYu(`}zAM;?er>6eHXLibBT_xPvq z;^PedoOgCCusJGd`yN6!;&Gy52<`iXqf&+4w9}L|vC=vN&ibmw#^oE}nhtP&2pUD+ zBff6f3Z*HB34ah0wLjd)nj0&{HV~Tn9vCcsQ3_%oPAUGEcJVT>a|Cy7_KH;vo;otL zY9Pzd&}#LpRnI#0G}LpUdM;MarRsUBdM;Pb2KBsMJsZ{Y=jypqJy)scYW2KRJ?~P_ zU#jQb>Upnv-lv}TtLN|3^8xjIP(A;oo)4+#U(|DrdOo6_kE-Wm>iL9vKBb;dtLIwv zT&JGTtLKaA`LcSps^RFp9U+1dlboIPKJ@eFaxO$$W zo_*9)PL4)~9zIK+AFJn^>bX@tpI6UE)$;-Myh}ZAS5HGdE7WtQdS0cT)hEiPiX@%vqK57Q(yU_d)A1~}> z6`sF1)7vs7(fa%TlOik0)xozW!e+FjxLIOEpTZd%D{`$6e3~^NcveL z*c1tzBReH?AVx+%PVOCq50etr{0Ltvdu@FvkLBYH^~D%0rKf!lepnlcfxsW)ov3FT zC8yt+e}IeE4YUNWSvMj*6w0p9c55216Iu%Mdi%_;l)A#Y`a{?&KFVg%-{6}z3!8)y znyls4=L^+|{8KNIoPNr} zr`=0vq7~jW9v1kvg=bhdkbpbgsKXp5>__<8B6D+5&MSpUTl6*b<{MV;Pfr8S6F&p3UC;q6+yWW zLu#9U$StHU+nPtg@xlQ#xc#r9!g(IlhCk9mB>QJ^;wCG#hPNQcmSW>BIFf&01c zWVAA%GD4Sc*r+}r7w8Y)Go1x-^an8MGqYv4WZ4N`^TiXF1)bs?>t0bPw&#$di1%2_ zyNsV{Rdg9&Y)$SezGbCUJ-Xv3=|=FS2EI5jJWiDq`F69T{gD*v7KS}$s&ybw>nVfO{nSjh zjr*5ak=i*`{{8t>zVB@Euu@<^JiSku^YeH|5a`;t+uwGmgjnhz#Zxz#a z^tKgw5Ybj#(wO|~G-RyI*LEya8bPerj*XHoi488R#ia({hIafW&7h+E(eJFtgQ0Ha zj+>44CUBI3NN$@-e72**5VU{TbGi@fPW@oL20zS3BDr_l1Nf}xlwt+G^X<79-w7eI zSDzXUk@Z^xPA$bg7xANxZBPRO2kEfRwm(Gs=}}O^HNlUA%P>I~v?VGIZ8u`R*cIA( zK-(AWdE<#&EP~5C4&(CRGD;loH}7enN`Gn3_5=DxSH;1UtCVI>YktQf1=_0rc62;g z=>JS>z)!k_;^0%0ODVm&iY~Ukv8O8$XEL$JAA&LWM8PE6|&L=I@^Q& z7rP*m0FVFw@Bf1ksJK{WAAdSb*Wp4Po~y&@I-H=xQ9Ar`q|R@b;&}T)`}bBI>S)IX zo$e_e{zZrP>u{A0m+El74tMJ?TFw4j<9s1DD$QSL-lOmpfnoo~6S=9bTrxVLI%m!$ci^ z{v*4*w{^HlhfnG7K^@+u!v-DRq{HiV=+WUY9ro7YH{uhXzqfVxln(FL;YuA2)BPBz z!`?dlUR=rZ_n8j&>2RwKm+F43)xX#1@ID9{regn8aiy$;V*Ughz>XCaGwtUt;270 zn0ko~pL89btHWF!PSD{sI;_*-T{?V7hwF5>SBL-B;rBW`bCg}rU>#nh!#o{crNirW zc#{qn>abCVztrLHboi7GU)JG19e$|8LpuCkhi7UyE2Q9YuZDN@x25OFKXv%j2)o_I zdY(_v;YB)h=`dY~4jmrW`99X+ULCgTaHlT+Px|*=I=o$n)jFK3!^t`vtHa?s9IV4L zb=XaZ-{|tA{o17a`IHX#cj|XEUFUHBL*jGI;cz7-sPM&m?J)N*cGx+6c5$Lgx9o1h} zqLSKcD$5s5skoxDsyr4OO)rt8sI9D?r@z_dv?s2tuBf8m{OU4SUAYmctFCtqEu&l= z-!Za1zM#6&7#08Vl98iEawxzdN=NxyP+nhOGOv90+?vJecUiRMvrAP^qz?J3poQ^K z^U96cbu~4{?D;pBFX6lVuCFaGtt_dUT~Sk2R$e!|w#1n4uuGX;Kfk8VC=D2niPH*v zg|n}iGI{zmRj&QJchy;`Av~uj`cl)XzGfx478H9(k;MRd7-M3o+B<90*}RNq`(v#8po;G(1JNk7ehWw|lU9+)fC zxD+!iUPncsx>S{2U1PXP7E(dU+^TY1+0HMosv13NWLZ^}YSR^}YL`*tvPaSt9ZA;( zL+dYajc}FM)z#Fwau*M^@n>4OacxcA%^-4Y4rE{0Igzxh+$bomsV=N3Rb4zK4d1=~)XrMk5IboJHO*Wb$jN(DR~6KiA&GAk;Vc9};KO7dw_u9$wUcWVBu z;w8rXnracJX!7{Zl6UrxfLnA1dQKWm>)rgyWi$szAB@ zD#;vk1C>=qWwj(xf5l4*YpUnXzM`&X!8J9NWyMBaG@eYeuc)b8P+~|-@e;s4ySSvT zzI=M1RxrcPT#)i~ybNrI z_<{ws(1zJ|P*hVEP?b#$R96c%wCB&rkqY^#Z(>cLs>~%wTT@$J?W(P*m)^-#8DV3C zYiPabdT8ermDLLDs^P^;C~@|*$_4Z#+9vr>yky#v`q?(NEwuBs4;a6P4SPi3*wM!hMh*zu3#G2YA`a?92)EoOgWnp>UqBRyzS>!6MDygp*`Vy!eS-v<5fA#xX zm13ewpbMHlzp~zSZROIEx-tr=s&eUppeh%XcS%j4&Q)5opf*4aBV7}#Doby6O)g(- zD2iAEjH?6XE=Gd!DJ`#(*z(1dd}UbeCfj94(bqAprovcM!mRFMp4WGt05gLClsE>Bl#yMk2t-y-8QBpmx+)=B@ z(9kl7k6H>kw?k->luW;6GHR$f9FuFJ^P>Ys$J#7_iMCsSqRR!jqW!BYza;<@Qg)GR z(fmqQx-i75u>jkX_yG8_D5;?SfL=<&4XzMOF?U*clk1j-%0%KF->k|i-p678SBU#3Ius93p^ z%9+5FN|Ev9>G^51X|!uZ?Gm-3*dQA-W(>285mdQt+9_YAf%v6*uFG$sg9Y(C2{gCP zD5nkAT|9Kb#nKr6*r7#26&1ZIkxcxZ0wFs(uD^()6O*VP1EWi@KRAr1>+=-m(RiLZKD0 zBW&jv6@s{!W($#9%2=vu)Uahw6Am+faXHd}kOGlq1e&Z`w4fwP;43SHkk%VA#|FFR z)c~TY<+aFG3cDPRGKj5w(mx$zfA`gAhFN;bbF<_ewub%O%hN}Xuxg$&P zlW0MDJ|WUnRMstkACKqvNYp+Sir%qQC^pHV*>A_Fk*<=ud9rB9aymjWPx8FB4t71t zFTp^PNKKKU^+QXgZWvm)XIJf#*}|oOQSP|+kvh(mT_@6-a8~fAQ)=N15l615Dw)S` z;eTP)ZT6h`BBcoDZKEfUfG4YoG2n%8!1A?L@dvT!)8s3SC1|G$f$F7|wH;@Vs!Dnz z{Gwv!V-1KF020~MscT-@F}1v;Y)KsPw@YLGci`CO!ZFEuQQonB;0nEP6;voxXvcts zXl#VJE1fSq5mR-E@bHQ!>{tM6Hk{zR`itN`g+rWxG9bz=cEK6Os5^d|5GY_e>So)k z8lzZk)YVk!)!*T$si{@tU0qYHSlN<*Q6qhWqbZe$F7m3b>-A!`y(4xqKa-&2T zag~S%tGK9cH2)R*U88X%`duTEEl-xQ8dcB=`$?WkBPJl)OfSS<CbegTii~A)jTSxW7Us-i}vZ>-0q7mD(Um( z^H=xy&45h{y8q&vn`$q8xHRl5<0Eyub3m-e2bwuI=&CtxqoR zK4RUc|1oI&U1jOd4m#|5;wJws&wc$b@5_zL_PsqY{PDXBu6}0UH(Q!s4_&ePeoM;m6)NZ`9o~?@xO^P8xUR zzT`bJI@S+aZmGv7RY{*X=oxaK$aJip8N@Js6+SeKZcx-w<`rIyXBpP7hQYr8=n5g!RDqXZ#nOowcQ^6c>Nzn z?^|d6X8Ci^&^sfpA1SGJ-e{yBin9Gdw1L|DaDU?6E8?l@AdM}UA?yreP;f( z|7fal_1IVO+#fzJ+1cm#zizy5+kp!Q-?#gTr`No-<*f1lxNb$-(<9xpUV8TLRnND& zg6qCm`;Yba_x^Bg(*1j%G{#){#%GT&+WY&AVecdd&-`b7zh0#uHM-00zWk;IX=|>( zX=$%reLntuq{pu=c+AoB$p^YG^Zm+sXz!m>@0fo$;oOrOx>eqA?cAf==ao0ya#_`Y zPrKb*z3}XPC*RL`z4`j3Z>PU@)w}a*-h1x&@q;hlIq~IZ&wAs|C+1jxe)G-czuNQ1 zO&j*-O+EbHl&|K!uxH-2FaG7-ZqNK=(q&Ki-_A+fUwC%U$B$f~QJlzx(q|fu6rw zkoce7di?Gum-PuZ&Ffxw#vH2 z*bKJD$=ohnqMbzbIkCij3=4RMFhld-t-_cTW&+9>0k>Rbu zR5%hEIr~DTt#>SPed};&c5j6k*-)#Z8(W^?%GOIJ;y62q;-!i*7 zUCDcXtwD6UA)PTbBpA;iogs*}1oC9=kf%ChNTkHr*lUf^(eclM4E#+dsp0Ef){vAH zs;p=xL~W6oS4n5#()!Lv)b2WK@O+8_ zwpACYBZ-)m!zD@r{#k{Uxv2pKj~7MNCyGCy>FrL*LvYuwgK<(Qo}B@+{oMw*(Sl|DTW zD;XXfKV15pG<5jgXxiNU!kUl6e1ooRRtVBSpMsQ{j)jWst*aR86FAHDB8OwEHN!Q( zD}+OOmSQN)yvhWu<*Z0r=HVhC)8lroswLKnuPB*%o?wbB$6CU$GFIi6pQvHR4u9#cJunlIKd&FTXr&AdDEQBu5%=-63_{L%f!mb&4kK^^~VD z+{I>rf9i2{>pt3lIvB_ax z;3?M@SLhcXhgapwt`jZ{#s|Dyh@~ZwdFyO~{)ZO2>$;b%b~Qfi)?(-PmN3{Pgz&Rv zXK^-H_OJfTnm?8p-krLCygu6QP!e!#So>I-Fst^VSV&nlE92E2DuQc*2(F7BUIGO5 zORmb_Q4ZQ7!3bZkT4M{%>BAwiwWaalhT)$vA=8Vh6sc82EHRBQT(h4&g13brj9G99 z9L4CL%TNh~l9F=Br6X-{N_73Z4}ES14wK6+%*<(q6RzzC&E|sd30iyfCPyl)K z*xY=2M#pD_2Dzki%l}X-g=hNy8^&RC%E%a=8*Qn=3F~c_P_xx>|Bu@i|s?-a$-=M=JUthEL&@{ z9ZT)!k5EMP6YP5VnTw=-XZ*2zZl$5Y9wB>HYI7C(j%0jjpSc19-`FTSrDi;3V=Y3 zcdzbh#*6Q~6}h5(-7y)i`ych)Dh)1rJQjNA&UhusiyNtZ&wYo+Q}M~}n z52>Lf517qI{qj8ne3CB8dB4iW^*w$yaKDcOU4ql}g-LKJTYB>EEfnF1~EIpu==K?2ENQO?bY(aKffB z&M!_QLq!)8C%%e10Swocn%wpmzmC={&lY&B7}ZIwIn4h4C}`9C0guFYRZNv_i;QXY z>#fY(&t`k3Usjj-)+ZvqY($EbZFU&hecEoOJRTe$JIZuS`(1*6^Uqas`a=fanO}0x zGk@N#G&)e;fb2dD?)}DgJ!BiEhuBGm6Yk}44}CvOiP?Ys2l>O)*n3t_v~`B`(a~G+ z73sP5?{4!^S#=BJ%-`RCP!oMOL+miUWnD7WIJ5O8u$E^MH{Tp3G?G1L@47Z&OwT$_ z{@bLVJ^AT#f3CL!DzdbL!?*N$e@UhEow-vijqIrCz?+ff2p*Bv2TZ zcM5w=KhZtP(}F!Bn_89Nb>tM!E61Hk$FY(0RJt&=OxH5)+o#JZ{Q--~2ZQZdqZR&{ zsgz<*b9@e;vAQ&*Q{s6&R&5h_@?dN?ga6v27=k-$(I!hIv360Tai5&!)Y zJa70&hhN`WB-{Atbx;%W-0ND&8nQZbGL@Q}R#DZqm#5qAC{wlK|Ild`c$&~sA35J1 zyvW$`*2J*ui*R};y8m0*v^Ph+AT0e_)j*+_W*#TFJumw1j>DA}vFT@99QFN?NazF0 z#SRy&aIER(t|`^`GxpzvycQO3kz43z>CtH&@)xY1&muo`J1CcX(S3iRqG-@Y9_oIV z&1K}Z>%)o^Dq0xNucmyNOcZT6`}!!oT%1R4InzM(OC?{Zbd_NC?H94M4K|YJP>}Fi zJ25Rj;ya(ymbVQ;!_z)o9}5q&s$Z0NZ-|T=mEjQb3L{_rwBn_IgJkB5tZkR}LCyj9 z_VcVXWiI~9hDg24b>pi&S0*gLiu;BQCDH+0`Y*{d&?jAUrgw#I{V5Z6pSyLt9|x{E^mpNS)XE$gog zqHk|$u(yzM$l^5)Y*pvyMG!O6SI-4LoVF29@E+&hyX2gNv(F}J9iI^#A;XvOML&sC z*HocifZ%|5p;+T$>*!A5=Y_=IkD7TAHZ)Ikv0d3ew<&MC&GV_h{>%r6`@KeglU z+*%Ei5YU$hCX6dua~8S;?Fl1sDwJr*Uy|+U2|L2$<5Zzh)%;$yd+)MfDl9Y7yOTTp z!fYCo+RHs}-b&@}Zmi?+0&BQEqe<$%k2n`HEj7#d+YZYIJURyrQ~&Ajv18^RHyh{g z+=wOY72Rm?okc)w+nL)@p9B^n;%-^+oBf=+jGJkn(NWkzFw=w8CE{FPp+g9z%FuMb z@P~7x02X$?$#ZFt?DZ^pVsU3V7Xn5n#&WM9;I8$+3^|ss>F>%>_a`qsUR~bF*HcQR z-6|2UI36~EDdK3AJHl41ijLnUKpLNkWibUq5 z+jInW>N%a-Po(BV*h8*8PE6JM&CC0^MVIYpem#@I%{bbNdP#VMSEQs&B@(8%CWFHi z7~FO*VR41>htch0gAJ8*>6<&u_(6^eX3}p;N1_HsOGN#};18#a+4`L89^Lbs`ugg* z%hUyfb2T|js~6Aj2Kp4MP1_Ao){=Ua8Rhi0h!6N<1^g~~`r$DZE0IXHh_g0hY!S6} zSg+e#>8tZDS5;ajlD7kK2~*aZYwoL%< zv1hVbi}emZ-aNYhdlB%bekC=#`j}9=mB8saovZy`+=fNLwM@&DJkq%cL7#Wc`HPbq z7t_XPBM2MSx&3SWIzBe@5X5&~NVw2MZfRf5^li7rO*y0!K2QJl%>$?RtDAdo8nlBa zLI)`w({EWi6n7t+|MZQtdinW0mJ!kd-*NVk?7Bq^l;pdlGt&!oJ93jrQ}}AGjhnj^ z&gz&N?i;0XlF;LVa}5sH8j26)YSSI(+FnkTxBrwCoN-)HTrj%(Zi-~IZ0x!L+bn;% z_B=%5#k6)n7s`y9xqh$VR^4-!(bkm5%^d|~9N5j7ckm*h1H`knHAEY%d@g?B`nKw@ z9=J1@?L&K#HmK8eo&-zBv^5JOEC&beJx1UQ5^BXi^UWNh>4=~5g#oMJeh_r8=R4VR zJ*(>30g9##t*FlCPe&~ge&+8lEKt4WQHh@L+x_)M-Abd@OkbiwdnB(N;`yeHe_3^* z0DgTYWjcTCxzohd-Yg%73Y)bG3}@q4L-=O)FD z4KM8^A}4=I%oL_)+i-=+CA6-^`VHZT*=m-_{7+KO*_Y|nsPxN&)5Y}$b=PTl>q!Ee zI*cT{TOBLDGzT+fbk#NDG%gc+){JL-np6^hHtto0JI6-l@s2up#e#?;*-|ou#K~UT z*WR#rm2{LSiSW;}bC62*2Lzcl+hj=!;n2MCi&)=m?eKel@0;rx2Uyt*(K(8^IXmEg z{QZXE-lGZbMe?`4hBoh2hkv##xWsmD{iJE?lsB!8ot&D=ThAFw@)w)weU4oCc4Mr~ zW~Z=S&qk&J|Gjdph}sMZ_uHy-4C*2{sz2bkzNQPNTYbeg3pWLvW491?ojyZmbeC35Fmc7-VExxhcwa5arAswvx!L#p{Tl8{ z;zkCs#PQfCA0|6V{N8Of5zQ?yhfR#X*?F@pXZybH;L_V*=fF%yISK*zJg&98O2#;{y8wOv@>(~GePY*s+fIl*?XE~{HrEzu&y{xU2l;M zn;FWKdSAs(NkiJ%Yp!z4UUhcL@SW@ek+tZIv6Ko+H}=f`|>_FQ*Ac3Wx(cg;ZCk6>J;rGj?FQ| zTVUY9-ikr@e>w&MxS)u65I2ubpQU3z?gOtq#ao6&VMuuD;EUNv=vZ%?%Hh^}Y7j%Z z!4pUcZ8gu~T#3j~#+sM0@gGM=#y4=8h1Z6pnJFQxxzSlvD@C;krp^*Cvz_&&!u4I$ z$G-Vs*0=q&R>sDar3#3wO&5n@j(e`hcH#mKl}ucQTSNqeS1B09oE8uwy934qwO?Fy z-S!-;_}jR4_j3h$8y_(y+>Iy57N*H+|3w-S(|V<3-bbo-OUz@}=a2omawoi>;eq}< zm(;co5^uxYU!fPt6F#TQIFL*gi4{ZICR@m|9^551+bfpcPr}@`r+zNAD2_5Fb;M{G zLZ~<`pzM5T9_6*9^QyJYST`@*kt!laW-1|-G;Mx5U2AZ-1onC9aQyq>P?Ed(w2_0) z+67tNhIs>INES6Ig+3V-3mvPN>)Y!r@F!Ns4n=-o06=eu@QNl%%n~c1GLJSgYgwph zn3=2n*tCLmsj6NE^)-tkm?w;|*72n>YZ*$1>eDZO1n07qmo1-+&kqq+<)#aS44$|Y zT-Vfe+P~dUj;Zspns+tz-xBz^w)^|{ac|coD#34fDEoSFq{v}L0zq3%yRLU}k(Ib) zaJt&%^pbOg4TmquD~BTPY#AdFdyb?|vofzedr6~r4JqV5!CA0-99?^6yx;+8BGXE- zJZty`kcR$thL8|9cqF~8VcTtaul008$Jtpb9^^%05&t{v3_MSzA&l=^(jgSt!2&u_ zmFDq@4LW1*_xh=yZ|&)A^%f=nm`$CmBxv7gVvH$%DlqR53I+gPIrw~;a8)ipFlKOB zLU8d~&x?gcC9XB*Y18`DCy1T4@t1$%*tk4?XPS;p3nL@wL2^^x_*2= zpv0m5ajoHTZ5E+)S%}!ftBkDG*9fR~E^;9R2?V@6lwFtVwH;93eG$PT7RI$+LxgGd%fnxoa(pmOQ>PU;8l~Sd?`=AS9I6?{-PMe!Q9`VtcQCULLXdU|Bxnwx>B~Vsfdzg$B zcWRN(YFxW=L4E>*bw?x@`?sO?#Z6R9PWSP=`Rv*j%lY=D4w53&5nE)ij7&nUzblHh z#cqh|KyD2odHdMQ_nnnPL%+Y_-d7z$(S2pHd(DKEH8~Vn+mSNELP@O0Cy hJDV zLf$xIBRUNT2#eq05A|;1#`x9ZA*n-2DYcu)#w6oN(n6<*Z*DCS(*LxC&Lrua%j7vn zWTbSRkuA^l!s<(r3Oisv)L+^ zH2R`XAGln5+b1GSTY@E^*A>E_GFc}$;qXZahV`D+;Y0kDlDH!FErH)`Jhxx6m}{tC z&St;QwD$NiGwUuDmv=n}ckBEm&LdLK5neQ!kG07J< z`_=t{SI3U+ywy(r3gIqdhx5*d@f!{eVUG4*pKsd=+`zWm6}x6$^$=>|L?vU!U1n{% z(W7luIQ_=bvSY>iGES?_qFs#g^bnH@RSu1s&LvINgu6b-d5a@WM#3R=!=^-y^!Ejb z=Nair_}f*AvYI~>f>?XtuLTuk$@D$sRMxoUqoOb9?$~PG6vDRAyW1{(t7(c*=b$_K zhD2$bmf!hh?N0wm!)5XH+e|tO2G(D~^z*}Oj5ia6jW~bdn7Ak!-l^boTDry%u(~wOv>R8D9Y6 z+?OKtt7jr8w;Q&eGNe&e{3@&46a122hSJHrYDp0C54|SGqt9bTH}tC)f1={B$>pK`1gS!k(-gc%7e?kBMZ*I4- zDkXdoMbS|8HO@RuPjr4;@QTBoch9E9TKelbwjMx{kuDvJmeXJWKy|9C`J4UB`^5z> zA$<$-TUvB_S?dM-hlk`@=jF-|+!pS4zpxoBs<`V8m47|r!j@9;(3J;9OC^)v^edbe zl}SIk&YtTLCs#edR1Wo3dYLU)B~2Uq;`TX7n}*tO2#QZj%r5Qo9pX@f?d9tq(!#C6 z!pGiAEY^?4Aq~BRIAlJpl84>U_gaztGDC8p-DSJYeUPJ^mi655GC$XHrXKR4ip!6| z@6e=4*`fzdN8Pm7vQU~aEKS^5skHsj)Voev3?@cT?dOtM|a*BEJ<_DY%!)TtJv#(F=7+W7}8-XG#4 z?ekK07$PCd^{BBOG{YU_oNdllS8z%Hel+nX{;Rc?|E;&845TgW8jX0e9QoB-1B}EG zd4Y4)^x`(t54p#^6Ox=S?Mbrj;{<2KTPN_zL~thQf2mh6)g?Y45V)vOyim9^+WI?j z;WL7_`H}7u8k^7TuGntd%55*z`R3<-2Jl-be!S@*ll^oec<^VBh%JpzB$@lFvFMP| zl2)ar?jPA7>*-f2qar-0MYpJNgJcidrP-e`Z(L%1eA62qwK!cRWK{j?i$SC0%@1QH zN?QKae?HSqnoBY}z70nZz7X8_=o%Xv#PKEbO}zHcnxNHNcO>)$B#Pn)gD(j=uaSiH zKpP|qojPQf^6`$sdT3NQ`KrEastR7dw;P!WOHJqQ^kzz%z2Lp~QmtFLl6U+VtJ@xK zU9g{O!ieM&_c6{&P5Z#I^G(Bn4)6Q4zv)NwvDiEFjhnrMu{V4hHbiY9h*=aE0Ek!+ z_{48sOi$up6B_BB^| zXR#O^3=vRpk7Z`S`mU)j7PFQ4e;?_A!ZO;tLs5HC@LnGE04_=-QW^T^_z z0$Ysb0et-@pN}>?>7uX>Tg}_ejH#}=p9332iaF7A+oa|>>UIQDPqaHj*hS`26CYpW z<^8S2rrYv3bAA3O+SrXkc!}DpM1*$)7O7H(BeSO1797a5vY2r1w$Tqtl?{XAotx6> zjzRd$Z=}r>2BJnv{Y6Vgryjz^ocq{}{q8-gd;aR{l);55m!+JVbGzp+uBsLL1X2#! zO_zC**0%KK7-9Jjh(9g)9l%QU_=8Bc2vIDfwsV*@b&>R2x8AER^H-B6T2u&e1Gj6M z*HZFGRPHm>@LrT3msbAeY{0WI_oGkSCZTY+y5y24@rbW=i&^qK9{$c>j{~$*G1?^; zxP-U>UwwBbbKsHzt_}a6bPE?tv}?iN?T*01+1<>M&(X@+#?uzK{+^_Q3}?X<{aOtu zn1KQ%hn$CcKqv^niyNV?K=>gL!fpsT0q{tJngU@0oDE`h{s17dfEPhJK3LZW0$%Mz z7!RQ&03UdRg1Hk=F~BGYi~t7Kfr~$s18_BnK>^SRAj*KZfj%x+*9CGOLIqs``rKe0 zIJ!ZFLHY}zPXg9~n-`Q7a50F{`9pvx0A2_B=<>jH6euI$bjW!E9kA{PL<;Z>I36%h z9t!4uK(7LB1~D;^Xdvo<4}d=^ zr1ygI(fbSBYoNf-k1!X+SU>`SzyU9VboBm!2}w{oz=>z}_W>*pc>K)%c7TDm7GW)j z(fj)dhzj6uKp$<-n?R@_OZZqGYFK))r-JhT5pz%qd6 z&+HGJo1lDvJ3)*-pW=WZ0slU;|NrX$J)nHF{Y`-|f&4ijh5!iwA`5s4q@(Sx4}=zQ z!kPWO0gC}1JF~wnU=F}FAV%9O0*Er;9iWf4|NrX$FVF063G%Rl{3Rep=MMp*0C)rF zqsub_!U*{Bnf?6$O97rev%e$YtAJZTObjF%h&te(Kp$<-|JDEd&+Kmw@>~Y_3qXtw zBnXHc;156_Z4W~r^ng>&?C%R$0`TOS{p|sB0d4>>x;-O-r~>{D^wIYJU;Y2}nftR{4g-P!`~~Qv?Qa6)BH(9d_V))Y4fy?;{ha~x0`34YDUetont+f0fBJtP zC?CDQWG2fPB((feZngbr{LM*p8Uv%eik=LG3>AV%-+BOoe(cY!|I zp8u==51rZH3go#0@|S@a-Cm(U6ajAneYE|Jfm{ImSlz&wE4KuiK8 z28agWUqByi|Nqth2hZ$p0rIeb{6!!}=MM%V5BMX{N8A545C*_$XZF7jSQ7B`nf)CA za|3PyF%gg`AZmd3fj;_p{#XAWJ+r?J$ioiuSAiH^UpNpYz*|5c-JXB@f7Y4(|Mvfd zGyA)MJbWO37l_G#!~sD9{sSS$!-CM@VPR3?!5|EHSmZD~JO~{g7A_SY9C85PrJLS(_NljC6n{Xn2kht{Ws;Sr$q@u~34(E6t2 zc(`bNobz}BXnlSPJQB1%F*RP#sXhb-p+H|!AOsLX2tMQ-cnuj5c$q9Q1P4L_W+anB zz?)1#x}CFyi#vJ}GSlBX8Cxs#4bZ>aPo`s8oNfg^oZvPI%(e3QoBhO<{OlIW!^g?o z#SvWNPs%;@==^(s<-r7eU;nfH|K30SP5t-&${fr$INKxl6Mz1bQvXdy-*BB}_*WU^ zu?7CUR=^Y2-Ga%<&I3$Rw6HzV{CnSK?&t#iZqFt${bzqXz$|PlOQzH0(~3_HJ&vJvw*f#%X|Nf?<9cA6Y^c3`DE#PZw>tcDL{?8w# zzZ14kCSrpr1+G3P6Iqx9#Ko`ifZ2A}fZw{i0~6fR5gaZASQQso7#~<03haFj*qQ{` z7Y3n(&_Ea5+V=LfmlF1Ai7pHxi&p2VsT?K;RHFNFbyKf`t``g@uiUgN2KQj|Igd z#v;d}#bUzZ2ea)>|6BjxI{(&%u7w&bG+?0x3msVK!NLF*MzCA}i!4~sc5(tU$?d@G zWTt<-l+k|3*4ChbtWGAqf{K8@^S?8MPG?!7tB0JN<^SXYb8S7rw05U|_0X>8|I)wd z&S!RWwgP95rQOL~X)w7Ff@(N@PeAusEarX6)n}Z27RR5(-e@3t#KrZS1M4j^kOJPgaL-5sfG(#gKeo* z_O?4-YeKQ>Lt|C}Dx68b3oPU9!yl@Zlarp+KspxYD3!gV_{CVt?cJ4m;3qd4c za3TgLVQ?}AXJc><2BR=|41>opcmji$FnAe*S1@=LgFj&K8V0Xp@CF8d!r)B|{*1w2 zF?b7uw=sAJgTG<$E(Y&m@OKRUfx-J2e1O3}G58RJe_`+u2LHxj2-W{Rzj7{(L@^UG zkk|ZyyvHIQRu0HqiSWKQ3ahjO=QA&bdG-&}@>wzz?C}t!8#$$rmV4_5VHKQMIbxrB zn1mWZm9MdCBIvY&2x}+MCHaC{+_t=vPW}d4v-9#>f^QA|OJgI4=Q!5KU!dKY6;=eXYio*#E<>k+u&g2S9yK(DmJ1g2*`%Kk5gZamv&wLgdLD zB@y|DB6s$!Kq~lO5u~ll^CTIrzoeJpB(z(ANUVo_63(_S{)3cjM-M%QI zV2P>_rmDemjYX6m#N$8h+YO0_{+AaEE|LD1AzuEJ|K$%?cC41}lPkAA8+Usle|06k zb~6ff0|jf^1NBeie`YP*irpByt&rU0`D=tXhu&|#Pla;=1uOFa>3z}xHOt&HmU<{y zi8DGsUaqXU4CEmZ;QA(g>mUGwvq{=s>Af9=J@y3qKb>YSt12qB+BypBdI0j< zA-+N?yG9FX{yjg;*xMjp_9$4vHFWtX^J@Mauj6FE3a6Y6$5IN@9*4CxLQK={Bh;Cl zQWEYZj={iN&rQ=7Zojd>(6_|k9g%LFupAUDNEOt7TRs?Gc+19Z9t8_#IAtr0{7G}y z?ADow)jM0VRt9}JeQ$_~9ZVdJ!3tYW>%;YH-!u^zhc)t@vTSFnx={i+-U(2i>bj`x zhlJ{t;89o+<>~Q5!g23j+5+~D0Qu+3e?m6n_hc!+`E$mLyjnK`!1?p23>RrotK@d|YUQ*DF z4Lw(ejxpFHI`r|p5Q+ZrEs5d61T2pfuus`op}hsi^^|c~!xZ3IS``1zm8LBvdh&uD zY}4q>&GMWJ&F8OS323CwI(EIZp?^#o_1a&g=ZR`EOtZtPB|KhW=k zx-mz^2y%KIsimmLa|=GZRV|E>9-zzqEoeVMt|NIzBuQK1Gp}ggt*0a7uq;wwkAbis zt90d+m5-o`Elq7gFkly|I} z|Fi^-Ae*0NI21WievD+-LomR{VZrF@4>-QIT`m5G^ifz_5I8?NMu_S#xE_PaW0tU@ zG{#|}#wYD*18G=N5kXAA()K}lil{o{q>PA*X%uQL#H+y`AZ=uNu$Lw@V_e??{s=3mZ~goK zp_@|2$vk=tR{I*=-=kqn-A})?KtcaxLED$k{c|jPbeZ%REV}yC-kVh)$9Tc@EZX~o z>-SnssI{HI`A~71ZdoYhxk&=_!6!n{`t|-jg;qQuJ?=E!Q(sry+|2~h6wH+qj@$|i z>rkU6xv;2_Avx=ws?^&|aW2a!GX(|9%Lm5;WfLJUPD&O7*N=7tI3CC4@8s4kvX>ji zV0HC?osT~zXV@D^V~xRLgir0ix+lH#4g5b!r#`J;2zxhRp=lhJCwY23TW_O!tC2E+ ztlqoS&_YC`cBpP{>Xl!k6d5a_N|#Q{ZZ~>SI{H?s#xnzi0^Dm2fx^gl5iH=`z_ql^MF2zt1A5^ThUy-MGmIkpVu1e~C+3Im( zX|+w&qM-UpBSiLjRN7ZzCkr0Uqk9!obc%%*q8+#J149p@tmQR(YE{x+dU}OShct=7 znaL(<;#J#9*oV6N#WECs{Z@M*j4xZzSx#Ji%PmnpBZEVsQ%^RZLErb@^v9nFS)HaK zX!qA6 zIO2#duv@Egd^1u$>}Dh(uW-7rBTn$6Zgw@6$~Wfb}B=Ka`* zRIUm6IZOosG(V6XwA3E*{Z-WKfvnd$g|y{bLrn=@eHndbe2&+(KT`a0;L`(k+`Xt> z`3v9VlJIQii8zi};zAuwE|Z!nW&HjmMAPWz!5_FRbW!sjBG@op+(@#oZA;eB?^pJb zTDWTWx5bvjSXPa+`GHw!jjn_}MQx@Q8n2`Pr)_&Rg-3dMUNYEBq(zff79?aJ6vKaz z6gb!;A2eE+#CP>*s?CThCh0J1WBn@$C zMLy6JL-n<$DaD;1%H^y}Y;c>LQKNI3Q0`2VyUYY7mA~*LownieZN(7AGC$AN_eWvB zHX8}-q~IC*$lkJTzy5~9^MZP5>2hsmi6!%;3V$SMA*`Whzf^K(RK>YWx$^{w$0Fyr zQss*GS6wLtU%H!!Lb%i~&#N?#DUj@MU4MsKZtYnh7nbjQ@0&|>+4mWF*^Nw+g9VlH zW_kYaw%#}0$IS`O^;fIq1=&4#?fxWSuqSc;VXz%CuPtm%fJv35&h8b}SN-f=eJE5Z{WCrnO_4ZM{RPMcn1+Ets8u9kG6v`DnLd&h)7d#Vfz*lA^r_AQVc^;48 z<*|R~R*9nbzjv5Ewdyu&Hs5Z7jm#@?40>c@gd4xCAu2;*rI=tT{yfnHasP?B<@M%E z{KzMYKU`Vv_;lk+TzJqnPX$jI<>P$I$@G?aJO9Sr`z6X*4r<}KSw=U9U_E9u!PR77uQ)%U0bZ&Hbp zbtngkTXoM>TG!kMnJ#T{i5j0YN)>bXwP|xH3L9;ZU z{%ZfL>kaQT6Q><`BYTpCu@*x+cwrai(qj=Si={HG8m2t?Mh$l>ZcG%1?o~50 zyinXpiHACktF&9>DusTyS{F(kwNUd!ynzF0WOopv8Ed+8p~cT=Fx@*q4;gK)&}QL= z%MwsS$TqF=fLRI;`MC7B_oWizmOiOno!N$@Tw}PHbJvWZ_!Q35s+I>-s>zm=U0ID8 zT)%8QiCRM{RkNQ^HIb)PWumGdHBQ`XQxD(QHzp-sbT!3C&XXU;spTi*M={m15bP$4 z5-T>8%I9(RdGhzia1C_czpC)!oNB`1$dB(&k8^MrXQ#qnb1OX4GdZ`3ba7`8_nex& zL7*9|Y4b9SeD_(9G;_dlJ5oJ@rj6Z{!lvZ2EBE~VlO)xS>#Q=~fo~FRk1Ev4pHnMk zXKNUjI*>_ovkqVJ%g{y!Ec$YC{O*){GC=lv+3f=&HU9SNEWOgYzR-3j0%LdeBCm&^ zy$P=3w+L^kg%#bEZH*x2e)gy&ZZ9x!`Vq^jQi-W)d+(PdqL79mz50zxr5vW`3qsfC zmsfTNzQ98W;T5}%jM+`|N<=e3Go>HDsDw4GE$yFw{X3|4D|^wD6p`flDMDN7C}4Yk z2zT@Ll16)ce6Zs4fFGrnciM2b8#H4z*zVF<3#+FIIquh0O)C~;8LDjH$@}+o;9TCJ z50Z`M=Mc|=i>Y6B5E$CO(kvHAs7u0T$t)jidO*WYtEe^uXu(5$< zu}Vqaw~WB&>Dy;6{rW_{OSvjJ^CKRmmXWn5L7dxkV-`}6UMSLNbkbdx z{K&9kg`979`7K1WyZ2LicHe7(E-u{L81K z0pPdoSvq)%aT2;uBlHKHPD1x-gaA)jPD1x-gkU<2|IudxBj)JPeexHAU_OoiVK4xC z61q<##A)t-^Zy-Z{hJOx&3zL7GWjX~Qywu!I{Jf)=+J%g7lJ^K-J?VI$zKSf+ac>4 zv(^Z#lnRV-VsXk~VdUqR@E~U78i7@O0QXx8Uf03Xx|7g-8X>^bo0HIe8X<^ILry~XX@o#On?Q%|lfU42Fw(U!_zDJVV=yTOlVdRTX+8hcXX@~M zQqq8)ya7EP#M9-A5?HI@1xnHd%#38b+7|1IwbgHd+>H$D3~l$`S3C@f!R~vu{&Wcq0*v<@0k$QnkJoS_6|)(I zrJ~1w0sFq$8P!JHV(g~8x_XZ-K^u7a_D>n+5asXpVda9%Lp z0EHTGW9YMCup)-O5(X<{@GA+vxiCJ^p13FHPsR`i4`c9a3?9MYQ4EF-K1;o41b#1t zp~p+A_2%P!k~xOQVcGkTfIFdaH9Z*n(~ZFc7(9u=Z!mZSgKv_ExpT+l$%j&MJMW$s z-T=dhC!zZ^LiFX{(C4=rgEi)X{kwbr=iAB*E%94`b540Xp5k}M=qN0YADn+Z&Q|=B ziX>|HP_Ud=r+m&Qow|M*1*@(B?KktT@ZxBh-*@kESSm5#Jo>Qi;(B@_`*B!>$|<83 zwKK__K|0nc^M5^x?hr@8n#Um7tYtiH&F2N^c?(gn!hX=d@v9WOP1=LGTtI!#PwVTR z*DAa4A7=84_J8Jwf+fhDrppWx7alF5U};*X`X*~Ek-Y+7JdE+Q{3V&M!Lwleu|@Ed zmoeCN+pA&O|KIW;HW2EHaKN&s#}9FD=Vt4`Yo65}08U<=xZ955JP z1&*DL|K;u3K6)dXF<43`_`RL>3AupW9&!H-`bz=GZ!0=_RPICG!3oA^NKSZCj*1SY z>jPD3$FCJ3>QrovI28X}bo2?X+GyiaIN1&EV;qI%k? zTlo|Mo<|z=5hEmp;>}$xRyb4(Y)F~{@YkkiP|1@;G4~bg>PL)Q%ZVazsw*HH9RrH7 zoT#iMoh9hXr5PTn;u4KKekImt!nboelWzT4ysE6x&@UVKfc|xh;QI>Y0x4E_*-Asv z6@gj;D{G?PuNGTWa=f{X>SGcM-X7aVzPrSv9=!>-mTxEo{?ygD@yYE9sY`OHc@#I6 zorlIiq#=<`cTrEt)HNt0C{p7p@@Zbkwv{PFZ$E%e2(HKFp4 z3`>1B^&+O8{b;E3$|J>^JkNw;l}CB_IhEW`ilQ*a6T|jz&-*HY@uFv-y}|e*$%Dn< zJH!_7E_%~iPrQq=mG#SteKnxJ@PYOR&l`d{Cyi8+LH|XM_xV(8yTksdy!+&&kQhp3 zsZ%J=i1PT=Bk?&`t$`wXE+M>eEGuAm)R=Q!F6&56P3U>}n#cOq`!qXS%}zG)MxlyM zifJsicbRxca@o(gj+q-L_dPJmGr}YAP_1=P&bmS^ZQ#pM(h5s!P0|lnDq4cGyJ_Df zN)95YgX8{D4WK+c=RrjDX_(T0c~h?pS!Vd*@ct(*$DQfz3pTYJ8d2mF%YxVi&pF!v zWC!V5%XhL%2Y!sj@AWg|ZDS%FgJlqbuQu&Xtj(v#agzU6d`f~7=)^Z#Xbe2NN>sYJ~axk6{U zYpY0knztEa>IGK>RqL9B?|;3$;$TqdaUJzRPNS+@o4HlaDCRaoA@@0JO3s{D|;WY4G;PduPZS{lK@>(6+Vl*d3e_~J#EAR!}O?dB}vmmfCe z^IW%W?o)=V8#!KSp#8a~($Q0$&ve7w8_4+Bb z{a}}u2OOTz!WBsmjXJ0wZh>qU{Xq)ntjXv-)h>fi6PdX!{@f~qzOj9g^5Ju~9^MLn zT#IH#^i*Z8$|q^@e^UtFNAAJLr0l`-jrLOX`OP_3jr(0a6+Ewrk^%OCt$)y?o~xgB zHBpY0b{R4>qQ!po&h7G%=Xr&=rhW&!^5yByN*FYpNLM}6T{rrN3YLQ+$)eexI8)6; zO)JqxYKr$F-=d;F)8A1^PfnQhld^STcW)SHxt^ky)UCq(Mz`e!5I|ql5ygppn`4a3b2>+zsUa-7w z51GF+r(T&~W!Ze)yDY-pZ?fWztx_lUv(yy9SyTG)<24!TyDCqk3$qyVT!rPVVIq{% zX=+b`Jwr-RxI*WMbTnaWUg~+}@dBvJ91M0j_bw&>ESHUYL1?fZu0$TtI#a<7LBZn4 zKzoDdZ!hE*$UDLFnkeDZ@!$~k_HlQa8>)r4_lI`VueQM{<~%|#1}erPi!WXvi`L75 zMKzH-3NiBIM_mq#s8=%a4}MOOD+dcEJ7Re)D|xz|W7V0upjtTbCRys&@9UE( z-@IIS=ro%v1^J}i=^KpRe!;L%n@x_`ngW~~*tO(_BoaA-V=y@3i@9@YX z@k^?ub$L&o3Kz}Ztv6t6{@SD*M^NV-NTxiRM-hb0-ngL@ajQqp>ZT?#)Mq+rrY=Mw znAYa%`)uryDXW2>-+JlPyM%%-PhaEHo~PJ#`yw9?5AoX2aR|Cbq+cY-^Hpe4uI>IE zVSyP(?}3_>h)ea1>ILQ`x8$^fU8;Ov{Qh~(SD~VbiA5nnmL9Qg|WMs=8PYwE#hm?ONcU`1u?^Z~wMhw2Zo%xP`gHTx95VZdx=r6#Z<-yoI zyl%=&?(u_&0aCRK`l!|D>??<53JrdPHGM3LEeC87#V)krDy8$Eh_~Xp*0pI;Ew4el z<>P)&-V4(qwRGl}Rmhk|DMzg3vn8@W`BnLAe2RJ}S}|G}=T^E`?s#B2nRBrmc>lsd zZd7HRjYmXBzze1pYZ#82N2!E(w+3z1L`Ve!HUa?s25f{cFjbue{ZQ zE7yN}5>g^ZcZ;iqD_RSoZ4~A0bMpJcMk_-9r?{Eia3>Vm|sxrN1-0ITDj=ioZdRz zLDwc<7E)i0$XvRv3Z-<;(F2OvR2(Xm2+dz|#)8R`q>soUkHV$k746F38(?)+c#)gG zzIMtf1Zc`Wb5x|-P{8>AW2{?}+nZf4mj=zQZGG?cLS={jMbAwRjU*GjRlmwvk6&BP z!$0Kv5vg?AlWwUGA4pLPITYM;RO@0^oPRorlT&OzQ9*y#M={rW%H;<3a2v}|4U?^1 zi)PcCLMI5y?`x>VPq(S}6Ux~=mH6w`H-9k_3OOH)%PKXP@WRHFe6MDs&iz>WFr?T( zzjQk*ic?sWRbkE6@dh&a{ng*&Hd`&J@2|im7IENlu8Rf_0wkaIc?d;3>KcP}UIYG4 zHS{=3zg}kKE>5Y|e_f`)k}jcHG~0A{55tj9xchB&Ad1)T?oE`0UWAr%q8F_B_FOFC zQ$xmgk?Zps1v#b%TuhbwyUX?6%&i_uF|9%-FAjI)mi|uGoTO zu+nkxd#{I`ltK=#^4c|ih2SrfSf$nhYdAa~%3ptAQVaU?kpb3Oc_H41eac>a6c!T> z&Og)dU4w)a2KVpP%4M}Gd__=w8+0PKrEAz7Rd_@Sa~aAU*}*2oI^WT@4Ubs)lIO;3 z6%xv0j(^M6R3T=mn2x!gQEmTW5#)Sw_V(+Xf``SGSovwd5nJJ#3Crx1`FV znG;JtUy~0M3V1j!-21iMOm^FVT1K@**UPZrVRvvPxmeoclnAv<4d;#MlpAl+KO50M zG1238=&?QEy?hdsl_i{fN+wcp}}Wq$#)7`zy%o&1RWHt zEUeJq5I80Dd*|={A4Y=D@1T=)tX#pD2~IP@@7mn;@Pt5A?>e3ou6paIzl(PGI}562&L;@!Mb`xZ<)h1nfHKkL0dWTUJ`hkAx*W7V1n59? z%)kq-EkObZ2zR@)H3&r4_O83fKLp?-}cp;V$ zN3cL3re-*&Lr~_Yv92L{PeDC*ok6jnbWjPX3si}&8MK|5GdMJGgy2BI@q)t!M-LhS zv;}Aq&@$)-0zqB=j;@M^B0AQ*siAm7MIOENWIOn>k0SVvkHKG-*EP^mJb}}6d89h{ zC=S>Hbm8h?+QEN=`ogKd<=<_}gs0zil0DmQiab4z_VKgtK4EMRsXyJ$gt7f-tX{xjqL(zXI!5%A6*G{#DJbsWC6UYO9)A zbFPass*1U@OJ~iAx6#3=>MM#aFBzU*+T!Q9D)u@y+n+aaM#EA$U8Di~s2l~N_t4hI zmB?<(<+8iHPDi+Tkv=3O_WpHv<)!I;n`cKGcw=3B#D&gD7401nz0Gx0-rbeM95_Wq z_Hc&sDmd?2U+VHD>AQS*`eZ+c%(!H*N3wr*&8#Z@HJ{nUTW{;lIU#fn>^U6lkV9(x zEd<3;1Ns$o_UAXJ_UPc#^>)R)5-u5LhxP_`?q{=Zdp<~9TG>RJLnSB-jsft8n{Nvd zx@3n}!aKtH1eiv4IhM#MDM{00dA*r;kc0lh%NoONoMYq;X40FDH}oZnjj`K0zv+sCZGI;Ag zVnPykSH3|$mp8|G)OFYA?}`EawMT<*;O}kXFCrVnFKcd#c5v!KtdgWQbp(J(;Hug^61lHB#W_hO}R?^_2pY{Y|TF4?X8qUFtn_>(YlouNIX+)IfYhmtd|_E zAf=PsQpKGvC&oDEZYe6*9BtwS_@HRSZSj7@{lMj8J;}@TCfTMl;O}sw3UN(<^E%AQ zDvG$1spqot!=3UztI-?1woKSjD+`)v=Q7HYD! zMNSv9`4RWJT>Ev;Ch6uVvRLlLy>4%d>EvYZcaEasP>xtnCBaDd7t-3z+@iU$tyNA& z@Xu@3xh=os5;++`mkWo;Nv9}c8W-!?#3r-&Wk>y>AduG26)+N z{c68><|+%7jq6!ZN_<_}_aamoD;C7F56G~xdG zB=bj-%o~!-A5AiIIs)T}JL0V9@+%}#uF{HmrGZ%-lvj~FLoxUNxBm63M1|FQ0aANuh6k8Jqpz4zV!v5g;p;1ioZ z`QSsFKlSh206+?B}+B{;@CY_~Onl?fUZLPwf86e|~k(*S`K_*EgQ}uf6~M z&2R1d_IJL!|9jv6LH7?2{OI7*&;0n%PoDkh;h#PCe9zB+@yjE>df~;Rzy8f{kNxiW z?|WZ*`41=l_{yJ7{`u6ar(b*hjQU$2&<1`0drJ>!|Lpw#v*Z7p%m2+DkO#i6ACUjq z`R8mbX2FyBlA7m6nuImW0lC3e7HZ^pR91z8{)s&DENWO{KZ{ZAtsT*($+z*C+n^hg zF*hu3;|WwOwAr6@-ye5B+`6pcHl-q*ZyRl$j5iZa&9Mc*_T?cyDQ}w0VRtQY&z^}kkvMY;<*Z#AXEFPwe4pM%hMl{mJkT|= z&gaaO7pn=eW9(w~#K}C8a!m@4W9-@+!pjlJPh%j?7E9W*X=TNPNmdPzE;q6qbzWVGJwcbX)t+k5TTMP6@f|~4fp}9?JU9<_r z%@Up$<8C{1xMKxNH=I%F^wPScT$-XGmP$feD`Wl}Som#|Brk3YwKO)xqI2f6!n5uu zaLzmH^S3AzmdqM`mz0V=%VkrHSsVMzwm{tdSbLYl%HrC)yKmZcUpsd8(l+IC=3CKL zscdKF#MFfQXm9hPMIlp3asEVC<#PVWkly0JnJK7*`?lR0PuvN1u=B#?STX2!E=s|% zu|XDeS~-(a`D+Dl4_QE@qyk%>ipJudLDLsj>UUy+7%GTI{~@-+RKOzr3D2MNq=X|Z1NY0nN9rlG4u5%KQXiUp!Zf#`#B~*6Jll)zCX#FXU#X(nDqLr`MNTbzBFqtuQd5p&b+{s-^pH6Ue5i| zBy*27x?(QVC@?=b1>a`zXQ`rVUc-eJuy{>@3|_0}AyHs!z8nrr60LzmYIYhF-l z%B#tm-(mH&F6n+{()}W9uB|ot=E~RQ*KhATv(K7cdAjm<=8TLn8AE(mq-LZ|PEAS8 z@cGil$xe%EZh1hJB6DK%k2wkR4^A|-zkSZvJNeWw0cUhl*vrAi=CspUeLowDkJX8fwCs58*Hmete33#SI=ce0+!qH7?qmKgnooohSsrC4@tYHCVL z8?qu2SssZbthbiWoil6B1RQa}=iXR5LDkL)FZ4S3D7R zRwHXisgWh@M)dNqNd85`)aa6H)M!tM^7W4B$>>h+N{bAs9~@v$-w!Lbo=kW$2dm85 zG?iI0L}eBai+Bcm)!<@pktaP>r4wIz@!*I_Lpo^~N*YK@wJ(x??jSY1WRe>0xmXRR zP9?nQgqJ@yMGf`TDKAPTc00VpF~p;WtmdBfa}$2%#r$lR``LtDk|B9ZNM-(2!!Nib z$_JCd(P~@?yBnRK^>TV|TF>C_)Gn{h!wQPeQ;?wwf+JNyZI&u1@u`9hdA+$kwoJxl zsBzI@YFsc~jU)ZzNdLIv{GQR>IbHde3{&~kaem49D&JF}M)vr+M|5RGhSi(2cN4Zr z^Lo;}E|%s%D)tw%ubAikDl6ivmwfuMGvzd#avDZ{$Wt42qRTN&rB@G*b-z^(n$WXRylfj8)Esgx@7;DH^KIiH`H06D;taQ!>_j zjwjEX#SLA~KK$#r(p7pJah;c>8ly7lD`F2X)-ar?1-IH&J`4egH)L*imNIiOnN*f!m z^O}DdIvvdLj;kH!9aoa>9fu!Z#H2~P6`fjyANI)`6lL{ZumqGmMpIOJ(3(f3sZr5X z@|~iF)ly&LM*K>eMoN8MNPSJDz9#7Uirvc^s3)#DkanF%{pJ;$HgEd90{T5xaua$KQ!eY#msmV5k3+p`X!W2X zPX>Gcq4ODSX%SsULHb4d!GylirBT;s7Ul1wZ^@)@8KK5)Fnxhl`mA4=*bsNaC@t-J?Ut^To|4F3|A)WPJOg3-T-awWPb13Pj z%&Lb)ocn1H^1CPU@p$)bxX+~>dq%3v-r+sNx`%cRi43lnI&uD%KEyfM4=c4Z-k-!% zFh~_F%InIFU#{yy zmoaUogffmR-;Zrq>Ql%Omv22jWimdEV0>b%s_yGOV&UYcsr+cV$`1}@Jc3u}bh&ch z^O#cI$Y~dz%YQ!ZqnY>_!8kS?e;F!&Lr&L#^Mh>Z2S=*x4T-HnmrvC816rqtOP_*H zGj7rcj;prg<>cWr&W+}(vB4ZQx+GhTDj#tmc|5Kc{TrLE#@6DlBu9-sFuEtFdsLT< zld-aQb>L9yRmw6aP31&~s~qCap$+D=W$7~T5@zW)S?fTu8^px^mGD(@W?PO$@_sON zSrpSTSC>u~_&|4<)UC`(1e5QNW>WBCbtq|VV}A6nh%&E>PdDcCD9arOmHHO4 zj=gBllRo1Ja|rZs5PBGY&og?k8jVhjE*{mL)s-0$zmcbv`Z2O!{QA?B|KMts7rjUM zf*)WD^|cB2b${x4k4ozu+#@r%e*0I^gAC?BZE4+uyHX=5_1+@a4n{spfAUkMCRrUw z7-J+~E`RZuDecqt-@5(UIjz$@hw=AJSoCY@%qwZP^bZAXeQnmI=ZWW-mp#ujj!RGf zX$TC}<7Xb@=V->yY(0KT7^2D7{+xF}5fA*gzfkHD$kR1AGN?W^RtHmFqz(Q?srR}( zi`&`^@-|G3tTr=B)BlWOTpr1|%yY_4I~RB5D(N6kt{k&p;x!rMK^Nu#?ykelY~+b# zcQ!vRya;aYK)Rhf-LIt6uQ2~br*o>ay3f)*S2oG0`&gb6cz(#IOl{xX9jh0c7u~Mn z^-j`MZQGl(7jx2>9P^hv{_A>7oHLnu^DfHzOX$&#*t|Kyv#$*&kfrr_h<*I9L~$6w|e z)rsS8{zTfV=UkOMiH21F!FZg{c%0v6p8BMWc(hX8xgPZ@velMJBje@yc^>smYyK?v zuEG3*E3?FQ@`mLo>=E z)CuG&8_v+N9yJ}g$%UI3mTjv()<-f`=1$f|TprM;w6ZuIm;U7-^A*~L?O%FgZsYXJ zrj0quK+iUD^Z#+oyGll>ac%j%eKV-cv&YQj_9f3m(&tuZ_x7!4q`P`8dc!nmJ?=+G z2C^UE`OD}%`)V^E&D=n_`7@M1n5F!+KILaD^|uxDj_Jwk&h5&Mcy!-P`SF~UgHGlY zXLa>kKCbLtyFJ@qtS@Th+4T{m&DCps9Sl_Vw29a}LE>6O{kphxTcBK`v>noE+fFRL zE3fpZ`N)S{eAZv0f9B}YmSm0|*Oo-XZ<^{+|Bigx`d9ETUgc5Wa^W$z8d8=Lku@(f zM@H8ekF2i6o)d1p8h=QS^EW!3`C2w}b#^!3DQh;FUBe?8^+Wx`m49!(^0T()XKl^T z+M1u|KBH-USvU==My&;_0I9Aug>+Ti&)!` zE9T$T>zkpgcgZ7j244x|WOYWYoPQMXsCV4pQKc^Y zs`Asw0vql;_{+#WE*_UIJ*P!qi~FAGD{4Gy4)Ta|J338`ChenZLHa@F?2)s^VwXe0P9J&AD?zK?MOIS?NQb1I#0hs8GB&VoIl3fMjcyy& zo7I!qEvxn}?%kiFk9(Lou=A(&Ch@s4|AL`vlqXMR^rrX3p4M%<-68c8U*_mhEbK=a z+mH_swxHG{sYjQu_&V$--ZxxF*JU}3v7EJg*5r)t+Bqz17x~yT{&pFi;dhy0`&&n~ zdp3)k2lN+re|D}+4Hmh4zV=1ti90>&bFK`XyR)B1V`EDt?k9c8qxSvFeZ2(O7xAavuQm^}1g+{UP%R)(=_t+nK!X zS0~}=vd^OInKK{Ycatf@-*$e~M_MxJM@Oj9myc|VttwbO(a)ONA9HNnB+RpQztd0M z*=If-U;MnyJzMJaR(=O$-k!X6<8glVIwx!P^zq|rnM=^$GS}9>3&zUD)@S}G(>Kd_ zMQwdg61r??%l(cc zw!DlU^o>!mwkE$TN?q5|Htm}EfMbhpXNhyge&f`-6Bm~bUw*rz4``$P>%PSN{(4rr zMBgPZu3hG*sQg7ak@(**hWyT>%8^%GdqfxXZx#+m+x6mPv-}oMI~cihL}Yk(eQhuh;nvx@e47HMlTMn$G%tt{ykH zA$K@;(ogN=w?Dx)LF$Kh7t#;d->1rm`@D2@9{t35FOTWX>&flT?iv}%s`mx7uC_5h z%}$ZuPa~Q2edFqnE}(oTdeui=Jdz%9E8(+-lfGeaY<$1uLa&;OJn!84ys{41H|Oi8 zd`$aGEHh6&&&3a$=NR*JIV_M~6jyd{Qm;oZ_o^`^)aw$f8?H?bk@@SF?U-W6bK0{R zi$*dQWib}{WGvc{JQhuty3=(QH^0{AzHp2Vns+5kfBskgjw@v>{k`bUBBMLHufK5E zjQsL!R_qg2^@`kkD-lU!e`W0QuhSdXFF5;Td^2|g^!NS6oj)P{ZMm0cJ(Zkd8Nd3* z_qcId#~o*uw2AIYnaDF#{P->NuIj}3{L^#1>Q!V`eBH0czl7sZ!gJv!*J1zTaId`E z6H9~iqpTI=F*jyStY>7mFE-bT)OgkXNJD)55}%GIZXW3J^Cr4nd81dYL{_-)2HD?K zlk3cd_Xf|f*>zs^cI3k@EaS)Y6|ASk&09@5?`57O>k6iv^&2{_pE|bCt2W*2Ra@i3 zDvnzN*K0YfbFg0H);YZBr~h4Ebv^Q=_4j+ixEb^BT{ya&;@X%?qphzU6J({>T(4Y| zq5?e3`!49S>zb6Jc3{_C*k`x!(i9b8tlAgn_Xu`PxT}w|TYDLPu8L#8+!EXfbNB)8Yn;TP;3r@jDiuwfGy0uUi~RoyaxO;?)+bEH1LR z%;JYEe%9iHZ!`YaS)6LaZL;Qt7ON~?VR53xkrurckCmA4p0&8w;!ca3Ev~oNY4KKz z3oK5zIMHH;#S@p?@Gb7N_<+T=7AM+#jIo$u@$|*U|8FcFw7AdW9X212Tk|%H8!dKP zY_k}&ILG3(77HzATTHdM!lrMr#d#L5wfLBgzsQEUe9GcMi!WF_VKHrriN|O09E(L3%Ph{b*k*CP#VrVy-P`*Iw_pR z;V}DPr_`9Y$n_>2d{b$CR`8LQY~ZsJAK&%xM)^#>!DF8yzSQdA>riZmrp#`L%xexU zpSxJ@sdrnSV&G1MkEw7~p~}Op?V(EE)5cAl}Eo0o>v0ael3UO_U&RNES}e^KI- zH`{HR?uZMyRLxDjv1MsO%->24^Tiwo<`Q3i%52#=rfemlw$5)3hRU1R;cUE@5ak{F z(qK?dyj7~#Tg8qib~6+FJ;g3<;ZQUri6~V~Db;K|BV4ItF3kbiG(~oCd&ZSVjeL$a z$1}4n6mr|5q^FX@nrP5wSY0bI$C*zPFB|cQ7d6W>Q|}37-_Iu%3&nLUdqGLmbNJ2+ zyLUOeKtr?%^PeR4*{#8juq{>W?hrTgLTz%C?~E0t>cgbJon2sR^#(fZXeX%(v^BT# z4eZMDFq=v7)`Emn-oe4kE$;IJ^;n#p^P4YzjlqS$PA4(_k}cav!5{ll6_@rqNyk!l zyNlPRJV`4dEi?MLn4HdPL21u9+rCD=Kol){Tx&xsh<6b-Ey2Dz2zaZ}gogG|MY$@FP{4D& zfl%nSJ~!J`MKfQY@_AvO-Os$WZ4E8$VX|c{m3msuXbA@R>Nu*@?Gl#RqGfi7jIsF) z=XsRzL28S2aUVLk*nG30bye{}&W#Z^G78xbIaHUorm1zgGcB>TV9ZkQSCaN6d>;{N z)0IrgJ>wBQr<&Va+e+0??`&$r`Clg~GKOjzJFO(Zm$`j8nlJYQ4Wc0GW?f3Q;9^eB z)8%7IQ>kTAwk9fNzcJB$Oo>fr#jLXCC{33)9BgYdpSCFVLGdZoQQjJskBcAjieYYt z^ks|XUfFGEe2BJCL)|n+oxiJH&(JCwRBcT)d>8QdKH0jM*k6wm*+9t!xjycsM*`1)o(%NoGyuj4G$(t?ZR}BOklC?Z|jv z44ZNt-7G$VX<)-M-si+_R-7C5=CWl_BW=v+p!B(=?b23tU%b^@$h{Lu4`9f`YG*2 zl$UOb4~D}vRJD8vrH*^6LJiA8XW8%2`bPsX9Vs;wmK;isVm6=Axv37B{G1XW@x3Sg z$YrTD;Sl>{4)(-vL#f+qx%tGXiVuWK)ihmt(FVRQk4WZ9)!S2R-5z1mV)e*U#rVYL za}0OE87msu(vuue9D3w9U;5GkZ+ClVwO54Lt&b1H0}NBr?jB01j)v!%t=w*GSc`ZZY(?mX$gj}8oBBD*D>V$-6)W5uyP1KZnug;s@(c`p|u_*OGf~)!Jb4EM$eg(fxObw}?M-RO;!pLfJCuNyv zL)xA-dKk3RYZjl$lkH`suXaxde?6YDR}EYJEhjMxV;lc1G_!*Cg?4^ur|vCV z2M4a{E4;k9#5=7Pi;~QRNoKz_`z)p=-PeJ}{c+*twYaU|Fb=ExEnx|W{TRAJT=4^Z4Z_Pf7o5$$SKlOVO z)`^Cb%S^e{=l6gAsJ*{uoVlMG?-_o`-rq5&fB#+f{)qwZ@38mlZ1|!J@z-CgS1yd= zho7JP@k@AT|7QPz#Rt!v>G?p4);D+GnG<&La+Ts7^@CFG$!YD!S5{vY|j z68OIo_=`$l#dD^=idu|V3|nln7_?Y#vCd+^Vx`5Y77HyFSoB%Uu$X2s)uOU^`e!CR zCoP_^*lY2a#iJIFSnRQQ$l^YWdo1p<=)&J_&08#Pvbf&jT8k?zHd(BmQh?|G}UK-mKFkV&Kgp{|6KK4~}X;6_BRKHJ}Rk_3i)P z{``xpg#UWeUwgc%(ygj3bkOyySFS$K*3(wL=auVIj~cxDp`O#irr*`6aU1Tx@yN4} z_|5IJ{@(q%v6^o2%Wu(rsdfK2?tk#`k%z~qUp;*E;dc%k{#~EaK5k$@x#8J&J?mGu zJbUZ2#b>+!(V^#h{;brLyG+Vbe{1FwuiJ69JjsVA$^4Hpx^ra)3jd#dU&`T)%ECV{ z@i{M+k6)YeUH?0SzTX>k^PR|CtAEymGdsPM`Ht;avyhN$(@F4hOqzirsynQn9!lU3W zkv6dhe{c9GBw}8(gVfbP2_;u{<5AOwMRmpln zijtFpJ~NwggKr1>(3%-mTh4Z_Bx?(Qrgii&xSCdY05B^&%y5vbw)4{U&O8Max z;M_Xu4juq6WC~shF9e@L0`UFdPu_*Dzz>5<7qK4|d@Z;KSp^sTNF(Wl-wS>Y*$Ceb z4sW7Ag?E9EHZzxl_ki8EQqFL}egDpHA8^6ZounV02c8sxNAE^wR#WfZ6txZ15yB&1*{!N z5nS+l$TWEPGn~csS>82-`@t)a0DLO=Q=|@l7|i+{`GRMIGmt2}61)>x1@8pEh^&F{ z00$C1tO-C*@&=n}jZ{3CJ{p7sUm8p$3+S%HgokY;!o-0?+p2EH47?90?E zeEQ?m^%Jy5cmcTVYv>7l1^DVy#0x(S4*PHV4tNH5F|r>%37m->gja%#k;CvN@RQ%9 z%;DR?%x}>r3`TFk&mcwc?cleOsqp<^FESl2ID8-NQQU*YNB~{}-iXw}>%dh=JzVhP zNE3V$_zffi-wXa4Sph!=4*xd&6x;_+LDs@czz2}^@J-;k-=Y1&{os3$&G0p#_q$4M zgQtQ^kR9+Q@C9Ty{3!U2{j>{s75E)wAACPp{5|v!UIIRW9D?r#$9|u70WSa_K#syU zfoG6jxcY%ocOfU?o!|?IVvmrc;BDRXz3?!20P(>Og7beUYp0YC_;th&?*hvYQ2y`= z@Y6^Ud>c5CN!e8RH1LbaboegtCrBmyFnGg3${$_}u0rbI@%zQeo^s)*DRb-vTabEq z{GM^Lcif^w^vl=_-i~a7N5SiUg3iDz!NQ-?HsL41s}7^LX`~HoMhf9!@KK})z7703 zG8KLZ9QHHP56=K+AXV^6@Iy!)d_DLKQZMeo_dkcuz}JE!d&nES3j7kX9li_fL3Y6> z{hT%ZU((j#Y2X-y*N#;IxcCTl0gr(9{ffGPZv=mc6u^7HlSm<4@Xi-VKfDus5UGG~ z26rP>aKY~(weW86SI9#6QSdb+2v;xS_qX&L@XcV|adZ_f_~%~I3_lHC`4VjtJ{4Su zoQ4aoLnfrtPl9zPNCRB(Q%D_r3;0W<34R1T|Buu!d;-{rtbhl>Nw3hR;P-;Nkd1J` zgUDvM;Fv$rcHjlzJY)yF4&03Fh6_H6biogU6Hd~0;DSH;Gi?Wc5FB%g`hXXJA>=50 z6L{&XlrOvp9Q7Lg_AvTYup23Y3)a6*dBO#M%#3v*{18~^@u(GWL4S%zt%VC#BD>*r z-~qmK>VjvpPuX=yH@p&j2ss4b4CV~+sH5;a@FPfC2K^ZLG4_!1!#9K1vbR`0yb9cb z+zS`XVXw+9@I3JI$R4o=?;q|_`{5hGLzy0R1TOgeNWPmJj$VN&IphJJ3cfv;bi*sa zhCK2G-vWLc*$3Yb-j&b3LGVs6b&N-4kH9Zji1^`xH?vpPRCqo3AQFb}0H+jqR1{tU zUNg?4R>7x(VPqX#a5r)@o@IGV>d?V=RA$1e{1o#^sbax2{?<*!gaRZLOobrTE06&eKfOmmEL(+WY9o)wr zRe5m1jJJ7I1zhl}NC3VEY`Vguf^flONE9x(;Y!jYZoorV5ieY@`)cx&g>HlQT;oyG z;OoGZ)7b+Lz6yL1SqDD~ex=l-Hp2ISFCp9DC&0Itp^NZo;0KZ2@b%z`a_RzJ0_M*l zkMIJp8c7|=SO7jh7yW_vfGGj;1y2PpM5^G0;5(37coo=+EQCkEk0L?%z2Fl_7`_{9 zx&gg{3;rD00zU#?Qcd~8CxQMN(gqi7M6yQ_Ciq${dJ9)K(sqz4_!jUx$U^vj@MRI;Mri?dr1d83O@Qi+An+?_&a19ycfLk{nQtH)mru& zL3YEpgNKkVxZscvkREs%Sb}uJBVhGCxP#Y%TalyiZD8bs9@Q&uz^6ZiUgZ!MnEheO z6J84*M5e=gz^m5Nrr?{v_kRQ(hOY&`f&}4vz?YCP`~=vxfwF~1!F!NZ@O9veAElh( zN5R4O($?T<;8n;L_%!f#WIH?xe*Zpn0NxFL^?u3}z6X3AISl9JRW)cM?FXI)o{JRZ zq9>=DKzcn|pSqm&PP z7x-&rFZ>udc`M<=i@-|c7+mlh$O-sf@DHE%FexO>;CDYmyzu?tQ=g?A;CsPk+o@mp z3hJh$C@QaiWTyX48>I+@~&iNAM01trg*+qZI zz8h=6t-H}n_%`qt$U^uL(EAnQf~SISN5b%FU^^0pN5SpLD)>Hd(39v7JPmvh*$&?f zra#3P3eN!VLym|&_*3Kr{4kjLZR!&415bU2G8sepfcJkFoq-<|+|M38aKXawlV*4j z{4){|_uy9#P*(6R@FWt23*P-B(g|M!<{c!RaKSGi_riC82ap}`gW#s8(It4nGalvn zF?|p`6?_6Y3f~Q0|15n7=O0voH~o};5WW!HgiM7C79EzoF3|_@6G)}lgQ?GxX82KX zbr1b4d=0qb7YJN%AF>{P2=pI8&*6gKLbk*Afzy6P8-xpf4cQCt0^j=r{S169`1Kd* z_rxBYb(C^}hrxIM8vQRIUhofyAASOS;WwlkeiZ!JZ%IFVBUpTl{t#XQK7uTSZvk)l z9l8Op2k%DK!`Fb1A{*h`z#kx+;oaa%$Ts*1aO`o)9KH|?_fqC?!A@i!T<}$-8-5xL zyhL8$f*(VA;TyqUBPZd%h(IV-glOU={n6tbq&Oh3tZNf>)0vF8DNXKXMp;2&^2(S}a^Jg7m@#TiA=_ zq;T-&iKORT(hS~zA#08Bjo?p^Cir1+L?Qm+KJe5LP)dEkeyVJ#NE9_&Io;ex+F*20f~qp$U<_2M492H6Oo4&IGyhOYsCjBJA+0&gy5 z-4wnJ{7M;R2;T#aE+_r)JTSk4^ur6lawP3M(g`j`eDEgl-AEpM6}Wl^^$TAEei|u) zZv(%IOojJ=J7;>;boefC>viN2z88G3ih74{1}~aTIm0J`rR>SE30?t)knQj$@Z7mx zwFkZf{4H_}ehj=Yz>K={~)*(3BU!PL?S#heiM8iX@b81%D&C@aKU`!5Zn*SzRTTk!D^(4 z=e3(a*{8PKm5!3DdJJ#g6*w+Gn;7nD6-Q+Wmvl>JyUnEwmP9;#8e zU?;K{F8ip;{-}H4g0eqo2J>)1*~inzTvt%`(2T$ZWv|IJ=8b~6$Pu{g6)Af%Rx*DR zl)V)X!v$r(!~*75g0jD104^wd0PcYc%HDoknEwdMo_+h_f_jfJ<{pBwS5}a5L{Rp; z+CtwhDEn9Szy)RBsY&#)g0i>ND!8ER|Fj)0DEmA4=nn;DkELmFLD_R>H(XHmzu85b z6_mYerqXrHC^i5%X z_VS){;Q;ni2e7Xkz`kw(`zGvHe1rLII=^F>1gXyN(vM^B%>M7uZ&6szJmCklg*bEP z59xQ}+~0eE`*CKkXfQ+k#$5334i#>SH1Y+i|V=Oo>R{}^Njl1*S@CKtXZSV z%F5JLS6!v1OqrrCy67U6mzPJo_Ng>Eaqqb^>d{6M%<;eOQh%Md@et+hS4>!7xIfAE629|y=J6-R-z~={z<4dc3`Q!<5p}DGP2uH|1|>>4G@>1^TY<`UHQk z_;N`4EB>6VkNR?mjtG6(p^o=$_2m-YD}K%O-P{C!r>19-GYS3F^y{Y2Q1Zl}?e+CBDk)IuoCdSQnhaM11h; zPu*}<{<1?s-v#7S@|U}HtIl7LzbfZG?By6j!pTAcq zDX!Nw>c7IYwi+Gk)6KZ{7mLq^YT`25YXA1>W`aM{0;S2*0*FV)Wn+?9bIGTzy99rx z1)e!YV_#tENw+#_1-gw~cSB4!6a1MLhz?5MV&ikIPU4e3V4(Q4=HGO~ly~Y}CdQXF zC8nE6@tL+K%1>RLl|R!Pn99RrY#ci^eb(te)5=Xfon4-^d#9WI#wX>NE9Izlzpwss zz94DnFFyJZ*Vd~`eObCMJ2gE~U(WXTif-|yt>*|=7f3#1?cV6;+5Sw6H|<~nt)J$f z(C$rpkgVw|RxYN+i@qb$`nNu5xNG-fAB(Q9JY7?_qpj$;8+rS31| zx$blHKkko}>N8SXa!prVm1ZXRZqH6_Q=>w>mDQoY3S(Z!RB<-D1o)8#W{>Tfh4@yi zSuNJ?V}Am7@;2$8*cs+1j|*qctX8$E30LA{K6nTI+f)$Ka!g_V`9TRW${l0AK)Y_m z)~{BoR#MRhYlJN&Jsr4|m@mby93RcxxeY!C+ZEjNBdykb3tU3);GQ2_38!4=pb;C1 z+qe~<&DL*|cH5W##B^U8%czjlv>oL4T=M%K^_tG_-F^3&laJsraWHSpHK|9~>(rFT znKSZ45`TFK;}OE3oUI$Vn_NG-7S!LXellY9BRLHtjXM7kN<2z>P3b4rOaphC?HyBY z9n@4vT^*kp)tPp8;7Oj)zRdnozfUd`?Najl0d*z$H4Sey?+C`nT0tyrHbqirE0bbS zD#i0D!pNh{ZdI@AINsBjhPPPmCY_(u>Fgaaoe!$AxOC1+(A4=o_`8MbyL+ola>jrgaRY7t$tY z(IdEaE3I7mi@sJV^}3AELfmmJvXM~5)e>y|nyu9L`ofV?87NM_*1@ROO$no2O~Q3! zT+dKOn9@fi_ZV-jCy4L05?jC2P$zo$N%abK`df5pDX~jC3WK-MSG7^EOSCSxV{xv!8-zg?*NZBxc~>zmWqn=rI=QFx_) z#nNy~`!(Y`+FGt|Z)^%JZD^mow7IdZwVmB+CO5V&y}F@&>7~o2jQ6wiQ1fE8c(R++ zTr+;krA6bf9iHy@Pm8h%oZcVmErxcv@rTcLc3+dt5_LF!u4!$d+dGJbeaqOpkPU*C zgxX^_<8Pad+t~E0Dzq#V_J`%~HRBuFXSFPAy)D!>-rvz&+9+FvT{C`hL%2OO{@Q7m t#llYX91B0r#aXdWyDTBC(=Ll;8t>CCbLDw$pfpfgQ88=Ib#--?{69t_^alU{ diff --git a/distutils/command/wininst-9.0.exe b/distutils/command/wininst-9.0.exe deleted file mode 100644 index 2ec261f9fdcd034cff9602af0c81fb7953ce2e2b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 196096 zcmeFae|%KcwKsg`N0I>sW{?0uB1B0vIH<7$nmEBbgv=0y2@}UmNFf1RrKeL{X*dVy zEs2R|(&n%kOCPZp??Z2~(tBy^ZMpYR#A+cSm;}`Fvxq`lw6vaaQVklzL{iT4UHi<0 z1hu{QeLnB!dH;Cx`Q)5^_FjAMwbx#I?X}kaF@Y~`5iEirSn;pxg0L5F`t$PV-+pLF zo-p%Q6NKl+yng*&(~{S(zhC_7+PpOlk9?)!fv@L%`GIeI;}I$EOAqBW$lu8O>NoPr zmxl7b{>Xz5-8z2!*!)z{cjT09Z}`@nBkBJmcl_#zjPHkCyN=w?-!C3{5bx^!3yyq~ zziW;(;@$AATaKLK>AyUp^Y`8(Yx(=v{C(e7zbsOj=@tZig0RG774%&#_oU;_2^P~h z(-=Xx!z>5~jo5ue04M%~h$AXc7{Ls@=r8>iR)K0D+L2c*l-$e#36?VO_wb7%1>5h; z!VU^}EkfTU3Vilo-+g8w_p_6D?Cuk0!s^N&@O%~seBR%!(nA|0B>r+9fCw*YEASrq zmnR6TZf$t*0qFrjxOqELsO|rP_g$k4_5%A`4KQKny-4pv`Zai;{uEsMtqp4%zDyw0 zR>=xunZCMT8oC@-lq4 zw#$=R{~|eRM0==Ah-B!$RSwz{Sv3)p{@dExfljBKE53jl^vNp-`Y+P>QIK7kEBx~2 zW_+Mb!}tkBj;GgSwon%Q^aF zFYsfHLlhMZf`aAvE|Ih4F$h;6oLCXo92t-aZD z(IU6Cc!G;fW>wzlX!S!Vw%i?DY_U38N#Hv6hh`}4x5n&QYG8+B|KdE$VMOIxjym@H zb1e&Rwj6Wpue{cR4rsF+iItmXu-I#;LG?Aqe0$aEsHRNNa2nl{p(29_nzib(+V}PS(+AW+`DwsBGaO1yPVwjEJ(ZhCrsYS8n+=6hQJ)qPilpP+yw^I9=Wn zj0oBVKqnQyCOzjrJU z*%HartDjR2I#W@)Z+D4rw_|hovaZMPz=Lp~+ru(r8`9)sl~z|byRc~@QFhSv8_S`{{ zS|Jt2jy)fwdu(|Zk+oVaK*)!WWWtOgxtfjZfX8%1^C@&n;9>s)h^jZ8_ z;OuUXH=*tEHn3g%XLvwlRzY44vJVrhxR9?`bo08}=D`*6V(%tf^I*}&isnIg(~Rc9 zZ%Fp$!3Md++m$)qpSYdBx-!T5mo7~dL~OCZP*qTSMCAQeI#$jgsHJV0%s1puSlcG} zy-OiOx39!ZP=BcN>0Ox%A^WAQp!Pbl)ap{cRXfya5|n~y!9qklLjY+oIb&~8b(zo^ zj@Y!1zY4~|)WiC;rvP88Wtn`c77!Z;e%?Ng1kUw-ka?!!@oB5ZK)w z42DraUNETtmL+4>e+X(%qfBmC4m>vKQe-d84QKPv}$ zNH*T#S1ylj_!E-E6}iOZxMvmut0qlvOgW_w* zM6)6%S3c;S1%6-6bL|L2r+qJW9Wg zr*D;J=yDqe*~&r64G@7qJ^>X=w!0u5GRG3w%##7Fb%iH~;UGo>#;lnhxQ^dfq9|z-!TUCdAn{x&erV&I9*>De)&AC54&Eik=kvqt!>I-*5|1(WWYp!eii0Wzel!j z)FPJUG2e3#NJ(u!ln|Pwq`Q>5NTIOINr{7~2IEU9aw| zMdwua62#BkAOP!3jE$J%IS{Dm=Ga5K(9P+qW z8_cLUBsPPhFoMF60YZC_`g$@Mk8Iv5jPS_EFc9@qk$%;Oj)#C^^H!vy(fjz0pyyxo zW-k=9256iN8eP)!sx;Fu9?>7#X3!Nh1g3Q4)xMCsJpgSm-`|g zQ=z?7D4Vw%pfFNV{fp$5@pP;mYkac4gqdB4`AtFXU81R)<>{0PLm^64>yrB9Y~UXv zVH?)(qyi@AP=m5qAla5{;t^yN>N1*|ODVqoVY?32^A$mn9uK>S_4#;64FZx5si8rf zzn#*-guuGb-R?Opc}jdk(j6tfe(ClSUs9Um>5~e@9i(+s4{>h$3Wx>jU@T+&*TQx) zqv6&4tUF;*dURu;#vab!+|o+|a(J|^VZMzp@7-rYuvX7$^?9SZFk(vg;n}PE5S>MR zh_MOsUg<-O>lC?-KK#WX`Y`FaB;RcK0x(TQUySa~seIAbL~`f5ENNtHZ5JEhj0cVT*HvzxsrW`Os5?{KG|$Eea`*1PdSB=30N zc?_Pm++yuJ{kk63R@8xK5@W@efsM8nQQ$m)8Y}_I87y&*1=bjo$v0tQE3Zg=CTZ!l zK|~Dq$B?@{ypM!NiO(j@4+dkgBs#V#jIIre!bA{fvWT{Y_2(%*);fe|P-BKrG9od_ z(~!t)Uaxgjr^b9s zuXPl%VQAG((8X@#=F%>xjfbp+bVZL=7_OWOydOt*RY>2ckZLNV85NG!KNA-gVj3co zk?~%)i)(;LVuRxapOeko0`6dq*&XMc(3C}Zk<$_u3}&wp%_AbXlPAqzzozDCKL-CK z#7L1Sj%eFRB`6`LYL_OWJn?>j1+_C@Hb`nhen?XL+EJyI^3q~PX+*|$=y2Y8ZNvR3LRP^x&{5oTFI-e2HIjv%<$MY|z3*X%y7PE{wR zy*tg0)_x?l-2LtF+-szX_w9ccG}O)vfw=MCA;ELNvH1kvtkUK=ERAQUTw$y0m^zuT zb@{C<(B|^n*k#q^YO-<#Ub}8NNpfG57>tAk88$pcpS%W*!4xX#aul&(uU*t(`s9a= z_!VT*2K9ATuy!ibC1+=|b%qKWBaBZyt&))Uz=AEeRb zW_%izoRbZZrfnGJK&IQv^y{&Ro`xDGTfQQTuSXl_IMZS!W0k- zyj+E-HM~R>ZlB2#1;^Gtb#YKU2V`lE;D6xOm!Wkd4EVVq8OQ|iVuavimf^X24voau zKJP@ZQkUV*Aps3v&U`Or2uB@!ZEE$9`;PNFc-NqL$<$jcs zVzj+hB5w(l&NO*Bzg5<-Mw=rV12tP3?M_GsN9!~AsL?ip=bt7aLY|at67CI1UEAWQ98r;!s&2PVkjK-l4pimbRkqzB}&D#nf8i711C~D|CvNoC_6U4u!KH6b$ z@`(v4^@&+w}m>iK%&SrFN+D3MZ}MQjXe@Ze}YPWQ<#k zp;%yBl{gK$H3o_5+ib`_G07PMgVKCkQ&7-dzEC_vx;5r2-8A7Yo1AmktCDk5rRlB;lWh0lKf3Lc(09&+Xz);}bqyGT?bzfFF69qiNjbFN%beb`a_S z6ex_9415BY67m&=(I5yseX@-=K+Z=cK^Ox7ftv(;9f*d5;y)Xo>f^L{M8g^xtjI8u z96CmsjG>jHMD#M|@|E>Gia;SnS&V2;ynbVVczp}LIImY>pbP~gR`lzGeHeAzIlxKJ z`dSC%F??P$3c*U-)^>87lJg_H59l<*Pvf0WS>%b@FVWu^zC|i`j?{NTO)6cE60`BZ zbO|v|t`w~eU>8qMZ@+lj()A3LRNtv=%(t4GGWX8KSEp>pr_Ibi8Z-7SFdYqQ^ zr8FxYRHOMeM6qaoHeQNW2@mVD`8fzmj@fM*cw&`9n281V7GA}1ed;5)8G4G*+x{f(ElK+y|v3h^Ss)mRNf`Ox1C11rS+ zE?1%lFc*`)2>+$XKrW=bp_b=6QTR1bw59P8p&}@LCzaAXpUM{`S11 zd@5BPYa@17#}k9RNSWC&7#ulmxn^(lj66`4s)iE*Zehy0DU}t*?EO#)xHDbC2%|JL zl_ruiP2^s*`$2+`RrfGOZrg@`%(h4a%wlP0@*rS$qxp}RuC$;=S z>8=iTVHk#y*Nyw!%JZ{>oV@V6Rm1PbsgNiKd(Q!Tr-d~<@!_yY{U;K`qqk`=P#;<& zri2+YBA4Wd=QBr2l5ggth?W@KuesZ`WCaR5NW(9sA1vdJ&O#a|=~c>|sDibUqgC~7 z<-(!cx6M{xCUD6h!JORIUk757Y!-rCEs6y;SA`;0$gq7C7r-?^{SEjJ<|)YxIXB`A ziQ51_$^(lz;7QaO5~E1L96lFK0h<#4?xVR5=H6B~KQV^IHGl|eook`L8W9ikhzdkJ zL*^5SUhNBuNxenDoDK|Pt4L5R1#BuCiRz#>p&?yPP@hNHH4V0?m^TT0R0QBuNUS{p_raBNC?qc7Szkw10|hL+ z8fFI9qPs>mXkRH{k?FtFAk326gQME=HY`(jJ7#~(tuz7*M@5g1X4hT##_am}rXFX# zUtnzUKvOy(BM=mGF?7HeqQQW%M+mAd&3zG6%PnW+U_QD6KF#l}fK)Q-)nC4~^SCd)2`p04-tDDB-FkiDU67iV13qLc}Oe(F_}emJG@K zVDn13#5e_A6YF+-8$vVGxNQ-0FxI$LtK$YMb_PgOgW?Vpqx~BQ%{`+@OeUCqmBy`t zG@e+E`>w$~C&VBCc{_=jP9RNFAVuKI+N___{)+ivdBV@t3AgU)mL`FLhV==^FcQZl zoSyPE+LLd?EyasQ3K6lU#&Crja+wU_Le}~mIx8*Tr*fr@lyT%3A;QTxrY`2jaWVR* z<&_=yBm-~sN-)+VI>jP%6{HJa_1&Gx{p6Z{j+?{F(j4O(N+}rJU$Q_XJ%vm$Lj&es zNzzYzy9_NDeh}DRBQ!~9h25~;44FmBDEEv1uq>s5UgcmNinWr78|>yU;$#@ChTxfk z%*wCHeUh=>F5604$;Ma<|G!b<;wvRak?jilTxTIY#E{!jw3!&A5M)-8d5_M4wA+Z1 z#0rBLDccfyk{1z18mz7nK0fW-0z#fv`LWxiJm1EIP0qU}%2T7qVO9i9cU76>abE8N zQUG99baMX}7YKQL{#-S3{ydA(gUQiYMCF@&f^HoK)>bq+W_-tdC&hz=oR)Z)#KB3i zoqw&x7dtIJhmVl96zWL|e%~&{ghN{Cl_+sD2fCSJd?#UiXfqLnKn-c-SZ0Zo*F(lN z!%q|Z2R?ko5y{;HM3!TXB&n^x3yn{@&w%$lmmE>@(uWsC>T0xlO2D>1&qt1E3u4=5_#8dGjfdNDU;Zsr&|1~=MAXc>twV8*~Qh7-7=i0RN6r70otH4yb`&|#B_ z7Ka-AsI-2iv1hF+)rc=YJ=}=@Ecu*9JVgmtHDZ2PdkoRM5o`Vr8et#Zi1UB15&w=! z#b`u0au*b8yLPGAzz!y&xxUly*q0g9&VB<>80!}xkH~>vkIga$^y{Ai{fiOM+kj7M z>!dTXoVxjcA_rfR!tIw>Tns3=x%gs2)lUvFYPeFH24kc@wbRSZFy&G>!C^8n)6t?hThZdZ|*k-p}no?CIF8flMP-Vy>lp9}9 z(=1zKt%+Fn-;bnP_i=dKCy+x{^x`}II);!~ijcm>rj?1PVStuvw_%Uvco+!zND0S( z3ZO`#Hk~>eQzX(d!|@r2gt`^;?G$M}2>U8!J{5vMjj_g^OOY^g-yYTkO_h#));ojR zF~}uig+71;yUOS76tqYF#U!xmt#HYd)NO^Yce85!{>EITb1duZIA;!4X_KK56Jndm z{rog~cvRKbq^p)y+F`U!`yR*#N7p~y;ic`EMwXWf!E0Ql}qgCS32e_?3MuL*e zbhK7L>Xyh`yEDkDM3}Z<(5!&nKaR)$7g4~xoWU0r*v96z!3)iP!`bTQ5o|TX(rJ0s z5bBAk5n=z!s6zY7pUCG()K$c|APO19Y}EN1Mpjcw)oAO6>zEYQzD=tE=&o<- zzP9)Q&~zB5LJoY-NZ`B_aJlZ=9)EHqu#*Glrh%ms!^I3?n1nTRIz`S6YfC@DLeetB zX%9)c6q}i)dv)KAgl{;P#&f+EOjE-a)_za9ZWx9*$sv9=3;{n>R69eyf8DnYyCF+_ z+Z>x|gBQbQvvoE@;Gm%yz&$YuwHfuZCL1<#{t?-f#vRmn3EQf7kPl7XE;geMs6NJQ zpPhi8pGPkn8SRXC#d1UtVrdOPn~QA+v=JGKKMhzo)s8_bL+!_xG0F>}O|mz;s%V0REvo*_39|}WKhkb z`!-`|8RGOi#5JRVA{K2f2na(5?U&m|*A8&54UgE#!PZ$ZDQspj-eBKmV>aIn>D}_m zX<)`@bEVgrUgRI%@$&ap`1b-YV})PbWdyMu0^2-z>$zrnujlvDLhSsJ?#8YksiZ_s zN^@z)&uws1y4;7b37>BOa6H+eV%Zs6?y{g zfyBZkGE{Vz;~Bw+4AL1~BR=#__5VIp5?D9^Wx&0ij7t}E*dP>s_L%TwaR zfJoJSD*hJ?4z|wLa`|?Q6{)c0GOaK;T2CQ*gnMwLVH5uqWtm5&P6%szFp$Xjsa9{H zZA9G}WS9Z~YcybS@?QerYXra}Xi?xL?nvw$x%}9?cP28gkgj2uu2`d~ZRVo@tJ~TL zQqwhE4{0CvLZm>jI->cga-`!>yDbi(G>=Hjl25PT3}btVY+;PEYq4(%y2TREAq2Ga zd6cgG;CCdS(uVa3G(jv*L5O>9@S^y9?!+N42;WCoqrG;7IF|M-juW#1tKD{DWEtN> z8Or>vWSmQPiAjtWs!lwQz{v5M+BH=A2JJY3X5+x7^_4UUK&x2>l`Msmv{0G9B~@0> z-zlr_)NolQZPJLcRxXE?wmIJho~^HQzoz|YE(F3sS_vb^A|0BcT{V%p?lW+9go4`P zFF>-kt95;`#{QrGJW}-Yn6EvABO5PkyO9EEv|q463}Oa{WD(RxtcuX0B%4==CpYq= zfZoyV+kUvvi{;5JUjvv9^B*b+QX1P31tkS3+jSRcjFkQ>DPK2I-t`(O$tx+)S}D&R zZc4$*74*lK(G(1Ss-B-X!K(2==$ESREG@Wp``Yu6jvX^_fP^7 zR?pMaVZbd5Qi!aewDhEJT7<_SaH1cFJ?uZYJv|UQ(ck-NOK{CTaxaJ7e<2x-EH!-E z!pBa0h$IZcTKp_k{0zwi&nxnT^d5=Qhal27hC~u9hTM3G@OhD3sqCoMM53m4IiaVu z0`p3mwW0#F% zk6`KBdx0YQ8qBUf%Vkf}@z|RngDV+7ju#8qTMs#+zd=yB>Q17Fn13_&#dn#R1m6p91VdV0=*z*w`nI z)_VZbdd3lb2kqgF+{YW%p;`a0wm?ZZ(~Y3BCJC2gk7==`Z-M19&Z&I#BJZ>d=(G#0 z8;!ZZ4kT_I1;M&FjQcpdK<(yDzQ8_V-Oz5wbwT*m(Z(C*gEBZ05Uk>O;WFUg?0Dbl z_~ik$+R1wt%<{r%tkzokT(UFv^2|PvF&6y-M`LiE%{Ay5TxWI-{^}jwgxyaF9`JPF zNxJm7ZeH8oW#Cn+vr1{S3E_)JUqlgtlsj<1B29Jd`GUJ|)`bqu=C(dhfMw_%`8*)= zLNyU9`*5Y>FW z9nm=yMp?5u+}W+mzKPYZDadpHtzd6arVeIpO=YqgnGl`hMwl`kb6cO}nXc3{Tn**^ z|5{CG&ReMl8R-3uHYs1XuSLF%f+C2emd5n1ZO@cj2yo`Lv1pcK=h~~UZnR?vWF`kKd z`TwA%;c5)@{$HzUxE`Z%f3r=TEqS}ZM{&-UX(IkT3Ud1Wcj_8uO3oR?nE~F9X7nTY zlDd+Uceoo3M*o`~Nt}61?%*XVT~=Z$Vy3$dX6nAeOq@mkeDOFtfN}I8v1Aus#E%{4 zY%48Y9q;ATTMoc+jcrl(few>ocAJmIl>^q+Lvmjyt>$C%33k@yxh^kf_B+}A`A+Q^ zW(hSD)h?EMA#h33w)B!!hNIeooPiycDDIHEeF3&YUVP( zD>1m>L)U(YhY_UZ2kmiuLW8KSrL~9@$Uh=(z~?m8N)Gm$*pob46~Xl#YjjFCjmRAw zhO`5+x>>t94JQ8*6SL2bQ3~{Jt;c0r6C(#Vt?6hdu@xW3yT~~40(DklWkF|%oM)B6GBt3HRo;K%9ag#QUbXr)_wlk5?;y|v^+y?Wti4Y0 z*=-9QdnN%QU^i6{8DRt{fdyWhg+i;(Gy5hr(8sF#I^s6Ros@PNwR9p!HvDSr2d7IkYS>gCk8!AurAGLg2$R%1HlQ zl+j-46`%-2)#_egSrV|Xby4*rXqp5mDSi^-({!ddf=Q7m38bk$-hy6M-OG-+0tY?E z9N&2#au|E7tCODUcE@)Mxja6750NmSVYuAL0%u)^P53UNHcecl z<4nv{`QyZ5&*8OIuEUnVS%M5CT>%ZyYwH22`>0PhU9;#OKz_&q>9Q&PZlt44qYER< z(9*udETdi=2?FS(Pgi;h`54ZzePUj>Bv$sQ1h`)6;m=@z^Y~ zKjw6Mb2hiaX1NXli2~UJ9eI+iJ98Xplo*fLWr*$0oQ_ZQOh??}db8tg3wpwRXaNJS z+A&PnVQk2qbu6~nq}pOjO=<;r>!j;dr6WVdm<=3t9ck$>p;OcfCoGPYYPeYSmy(N* zEy=gKk7Ij$fGx=N9FY8KFjuY0R;z4kRZjPUBIFTbZABT#>QsYPHE8elW(&>u4Cbl9 zLNz!?4Hl`vVl`N*Ry?d$1l5WqkRXxE%lO)G7-rEt=B2A8ZbelX$j(O#Sgw(@Cj!_Z znNy6>iy&Qr;<+eiMJ~q`e5y@3B~5wq*lvH`eQUh7|cH%~YAf(<1Cz|HA7qWW_iv(yc?i*F_-Vk-N@TBZ!5akfSa#!jp%seF>P-Vs(S z^3@G7YY1c2KNn7~OmFMv6LbJTF5Yo(L<3q7YBa@bOP(A9$j2y?YD*MV->#<99cNGxyx z)HmemTxU`1hS(w;jT>@x2AO}Z#a|TEexD1v`f%{m?m8`9hvX0vsf<^UKuFXVp?uVp zxX$D+f^cWyLeF8@N@YXfV;E=PUWLjz=tyJT7wV?cJ12ybN;RINQYCB1#c8xfC1rN0 zti&v5l$2TJ66Nl3a%Oj?9qnRec6P5*dFZUb>Wf)ZDb8gvGw@|)R;+ugzLP8yLmTsk zlFVH<4y?l6NT@{5Z`~kz1?IPbNH-gW#k1gTP#TkP6)#2zJ0)OJLgcU`C*HBVrK6eM zHbr+foZV%X?rym>li_^J9pqiD(KM`csQ#RYnKmYO`Fdw|`ToN6bvfD*B)|oSH20bA zOd2Hdr?K|I{CUc1C;0-bN=1=A;J2BKv$u{tSmU#?S1~DBIwCWKvMN*Yun(@A-ySK zyLdwRqgi=%3@}p{6j4{raG!~15N^DRjZ{G21Q0}buD!{i$~>M5st5*E&VeA7(?l4e z3aF~qJDV@rBzt$6f#PoDzt}09gW*VVtEkNFh(q(y^s&5|%G(e{7yV8F^P`m4R@cjk z2n>U5b$=leZoUYd!4|~QprTvv&IBm%%1h(*Ln_kunRw-Kyo~xR>N0D(q9UxNYPL4? z65h@>-}_Xt6}<=D9Tk5-6;Jwf#c)oKBuM_}S8Nc(sCWu{wdHLxK-3BwtIXq7s|)OD zTt2JO|1|KaMw2g;vfwR=BYNdW<3G3c!$_$0UJx7v$o^ZE-f(rLiE%~)tja1UW)>1Y zDGX98Xklo3N$4}4qmU22em<2wh$cdPn#&fz#dI3g-pm$cvwL!Ida}CTa@2Edoz>No zu&_XX6{cs%6H8B(_GAtSdvKT}I7ZY@n?ukGF@AuP`W~l9X8V#R-_UHoSHY$iN9#_= z0Ah{R@LOkQrm%yvTWT^GHi~rjA_E~K7@`H}$p$(jsFo`jxMihq zgIG!9reWItT-@?U;#T-y$8FW;;`S|62U3l|Egv)kt$2^p*ef5wQ;P}uvqzM{Q;rw> zIXF5@qk0LA>aMY94aTOyFWD^YgwoOR$(UKkF!Li)ttjFL-?V-3l|ll6fHa^N6vaP8 zzl|8n$57Dc)SMd2ukPi}fgX0L<6Woa2wor9W+gk`hE&VJ1xnUwC|fNnG9(2`6O<-9 zMEYHXA)UR`p1=-90|i6!85$JMv1K@-_fhrWs3xl*WwRs7+gT?PuEFk1u%{4z7jvnfycD&#pJT`@>!C~VN`G9g`Y+MIQ1 zIs{L5<~8(*c@D!Ha^AdjY5XgwpX1~jJdqedvma9YXZKc`W~}q#u>$fX!*=m>@P5mo zew%=-?lY?2OJWt$45HbKflTXlwHWRg-8dJ{$2LxCrGqrg9nqVRH8m<^O*@%Q(CGo{ z>oehm^g%mDYWzRIWQsl^O|?FJeh#t|hN(6)gDau_RUL^8qTNg{0;>N(3Okf`E9O1r zg9$B7t7zkm>R++`8l~yM8SA05h#)9NnUJUiefu9r6J1n4pVj57t2QPtn$@YpDn-W& z_COKxP&HYr)$+K4q<8 zTq}ARePj$+SUpzp4cM_Sh5{5eaq$iFmcaXbg-N9A>m-K`8JKb_+BWGXJVrIdPj= zD;-&Jird<#VkZhQ1P!Q_*-+ck*k?x1Gz3u*>~Y+TzzxJcvIYY~D&mqb6N74fvD#Fs zjmIh>0dmb7d;TimKG9f(6B;+R`g*N!-arYD{|6KnFe&rHgF~L0StpovOQOi=pLZy6 zr04=-bZNxaU z90p4Nfjn>1NJ)>ebl9m@dT|%rxbDoUKs3G+8sx4#37?o@h4_>7=}k)jvftwy5#`u} zElCd5U(ua831G}$0WneHNv_Q|uznUWqXsUKj4TvSQ$@pBG@bQuLYu-~;F0Xkgm6Y5 zEMxxQXY@fml{UH$mR;2c!O^0H`ryA$``}fy|1+ecQSX0?_9$jp#(h5RuTYhICGi`n zu4)_W0cjx*iMo~LoWNKBO%ZrL2%NHX77wF#m8HW8Oa)a|aP|aUO#X7-+5R~$U!TQy zzQkX?j`SjO*2I5-;nbZu1$8KmeV8;q#5XQF0wk*}_w=m0#nl5_nro&-C4v5R<1GF{ zxSm|T{zL{I#A{x-LMh1!-OZ=rpc{{&EP+0Xw;<>u#}+(crzueBW!2gyo9@%RxaX&B ztlVd4%j9fJbU z{JFTi1sv;w9MN39^GcHwd#u*wU@3h+_WUF=G$*!njno3!F@k}LfsCwEo>f399?ZkH zouB|2!!2)93`}F=JIB)D@#jcmK;|_NWQltXZEqT(4CJK^RGid6=+B}dBQ=g<@HBTL zz2O{s(^H=-EkIezhQ&L}gvY?~o#U{V?9){E8h~IK9AiVOnd}}o8Mq|GZAQ-k z#=zlP;Lz=(-T6WS2l~#!cgLkI9Nsx%b}Mj^6OxjU!vCNKvG<5}R8aw*to(%k|HmJ=1*0x1LK)D~$f1(;Vl!cEhDj2(UVLl%apfJa<(MU0s@BZGTx3OgnNX3f z@_423tUx#4oW&kfwCrqr7ZBWXYmHZ^TIR(adtTv7wO--(_+Rx!uW%Xv|GeBQd|`!G zcpm?g>byb;{{8qrhktLqSNH+`m#y>)uRY)u?);KhIQeC-aP5O$VctVt;VJxIzuGI5 zG>cw7Y?M;rWtq_a7W?$FSvOU9RDJQ< zZ^p3CFtsj&E0B`XWH{5{Xqv!QvzLTK*6s*VxZUTQ{6y*I=Qe z#&yKf6ROglK(yL42i?)_r=##NRuV0AYz9S6H$0=+174dMNG{2@sTFz314V!_JgC^j zUV?R)bqhi@%Aj>^G1h48o3fqCpn1C9@~#PJJNCTQ(ygOd&mUyVf`kJ*wXN7rn-#Hs z(RGX+_Ij`*s-bh`a&-bN<6t8S?h2-T7C%K6_U+tO-4Xp41eI@F1+c)QrM9(MkxTIy zq9WHVE6I`;s99`LDeEDN=_|#q4)-B-QK^!&HB8yGsJP>iVwRa zZL#&DawE3K%m;qxr?pl@=x*&+e9m5IHCapxZOYwNwh*gX*sQ_{J(I1?4r!0j&OzKc zve2fk&93dvqPZ00vKSp!$34m*+cbl0*C@r~<*~|!Qi1lqda+8!iAH-Hu-puzig1z=X(C&hz?To{dwe9ejh=&h#k>CAuz%}e3T4XOugN3Ef2*HBT;Qk zmVA3I#^2&AZrQXW85$8FaXJpp4Z|OtIE29*nNZ7%#HnQ&x7=y1q)jhe5_^IS3RLhS zy*&pSZi_uJr+`B13tXMpuaPvzqFe!my}=d+uZ|0{g%7jF zve8#6kEuWq8?4s$VSO0>RcXOJQmzt6>NrQUVGHaCpSBk- zSSq-iECPk&MKd1zlQ7F&gKA^{{$iw3NbB|`X=OMu9`0?Zy7=T;Ay(xAy*k5sHCO7C zM$x3!!S8)GcCW5te^>Q6Y}DPy2bSPpHsQg!0K|;X>!7SJXWFe4AzAC*n6$V zpqq4~^T4W(7tj>q zs0C;>tG44Jr|;Yd=>bJ()~i@{_5^Te<77neMRl+M%$UAd>G$YH8f=tu^^{pUPP~H> z*A{|E2FfsRYj;ScFUpIeRtuHkKvkR3ho4wgGX-x8lFk8f!l zBI-Jnf8|J>WOY_{RQ2_;%#L@=xc7}^ZVK4jmY7W}6H9?T9fM%5C1xlPXXF0}tB&^q zaW;CwplIS=LRXr?TY)A>7?pPAA?QbUxB1XpP6lI!1#(pTKcy+j$#{a3aU6Po)~lAo z>`fvhcAxzx^c7kUJZCoHe&?eqxPS##7>5lOl5gadRfjtF(v9z(k!oW#4zMpae;?e} z!%QShZH`}-=i7tYjqt40(vp=;I|yeHf(5iUS1+Kg#CpLBY~s`lmhmV3e1KlSuL&!l zyY2M?evGkzF2UCe=xThupp-x9{zkokHiV)y{w(BAI%umG&^@nu0c{V}3+R44y@2jZ z)C+d=8fot}>gCT>{P`e%iv0O7f3D$A+H$PcMI#@@FxB&gIV_e`fQil|OC#na7{G{F%d_0)IOB)6SoTysjPmxsyM) z89v;>0NjN5H%T5XAzZ&sFR5w)UPyfF1w6; zomkm7UPJVH?KNl#HJI4o=y#IN-NBAox;&TH*=Xx6U1rcSNTRu6rZWHGn-`*CQ) zyAI{g4iAcxZS^bb;(L8?vUgLn<;l31M0v-e4B8rQ{^%!q{*wnKuds!6|B6zTxV6C! zKe43;tCOeKV%vOv>?Eb*y^P)(;<6Da#{aBin=;$7Hu+6$i%gsRW|Z5u$gFf(lx|-7 zYs00-?zsZXp-f7bS?M+t4*xm~wZ$k;=`s<9-6l$H8%{+TFQLv|_}_v5ZTR1U|2F(L z<9`GGS^4AV)$;G1$6Lc&!@C#nUcCG8?!&tu?|!_Kcqj25!h0xYB^^>g4<}RPU^+GW zEdDk84^i1Z(D6{`7HaXxu+jQd3~(Gp5s)Jd8J@D>T-ZTN4;|0cg>*0Hul7Ug|Ty$?9`{&Pgp zgj)*cN?*NiRzBdp&^7{c%Lq8M04+@Q+56;#Mz1&?3u8VTkppFC1EE~}=i`44{^#Pq z6#wP;ufacPoeNs$;+=zc4&K>#XXEX}+ljXwZ*1jy+=e%f=|678+d8^$3>vLLc^mNG zhX3tUcDir;7EoYBU+{0Vc0@m*ZGbW2M)lomSN_Oyfq`R%Lhw~XcEEc!rR5Vo$|j^o zSw?(CId=cELFE%`!|fN}#PvRs-F2)ji&!^qk_X_1+fU!=0sM}scyXap4SHgI+g$=F=;b(QIbjXg96d!%SvVKJ}3j0wZm=54t z<)3nKq_IUq0J|RIB?sn{j>T`XqICI)@_{Ka9=)ExYQ2S)6|AsevoCgW*)RrMF5_l- zMdF4{Q!ox;pv`ZaYP3h1or3w30aHwi{uXHx2joD2&Z36@rOmOt-e^?YRJ@(b>sNNF zD8`CZsfXjsGE*2c)RV&bEDHEdbU?cK;sK8FHS`9`2i4(HR;yE?>!HZD8Z(uO?{o#8 z@pQwHZ2qV-9)b{LI_(6c?UYzH<-diKz86xv2i=C_STNBzOmB9`eTGgv;3j0Xt{+=W zJRiw3wD+rER!*s>i`znKk@jr}P-J3VaSxQ&8XQ_CLpH1(g>Qv5(xun91bCXZcXTeF zkqeiBEV6n!nd8ySjgvgDHaL`I*2d|N+FAznN4!t0o2a!I+0-JG{}yDq_T&;wUAT`~ z+qV=t4(!PB91$&srU4DQ6JI+!I;kO@G!)c68J$#PBteD4u1L?T>>oCM1C{H|{T5-} zgHX+$a^cKWZ*cnIu-Zub(6ry9db+p3(YgfKl+@vp@dXHkwJ0Uf>d6=IiM6(;X8U?6 z>$K{=u(oSB7u`CJO-*T=1&}NxW zu_Qd%0ULX3`kSeu5)K|sy5w6xdHP$p^Q<&fqn!oy)XdiQ#&Jq}snUHAdsw~*zhl*JFf2D;A3!ve5JcN;~r&eCnO_g>voMWnlMawt;v>t!-Z(t%cNLr*xc+|f257AC7s6v7!SBsAcib#C|6rTYa_subFI~pZ zu*6Xoc8qPD@MzY2&XXCi2-#84AKt7CEG1T1vHy<73&d-fUFh=L1-RtCtO;v~kl1be zusVTNh-xLQVb6HmU(2wckd4nD!eOI|HFB2r;R@;_^n)j(@DdWIVB6D`belGCKkpF4 z4tUp?lhx#IDjYPXJvY| z5ZPZoCsZbWm5*61!lt`9+Vo0{T>9%4rht&zzb)W{shOO!nAH2JJX~FN8gwsd%!UK5 zG26WH5T5yucCtWkXwwW8Kc&@VQeJ!(*-cGTiSw~rDuQF{6%FW&KriG#!pXkhgLEXo zDlAjZfnlVKO*0oIx|=WEB4;Xr9CIS0`I1YvAQDy`hSTQ7s1t_2ajCzO%y)c`F7>B< zVa=C{;A8e>WBp7>%-xh#x-6ezAj`?$Yg>UTuy)&^$N|@#`&0JWuI3 zY3rRv;*pXPy>V_P@tjo5% z32{nIXZuk5qr0r~^j#)-zH-hije*Syw3^qCK`UK22fpq`?H?fy27EcDZs7(b!Py%a z(i0BE;3CsyjwjnKU9_*xM~C3eYB9x2mWy>mqdE}MA%Sf=229bYBwV3Om%4H`#x{)M zEB8w>`o}JWKUs8wZ45lVx0D4OQMEFN|2VX=@>KiL7C*R6@{L*cgK=&gitR6+oa9*lZze1|CCk51l8BJoAFUX8jE)ax?fmh0Obp8px#Uhw~UB=DcR+rC7B` zc~UabB^ymjmko}(~uICVRSCHa&&GS zM(6w&MxiqSodLPSe+?v<$W7J%Cg@G|Px;LH-(aWV^0+l(uP@2i;aCVm6aAqTqTw`= zJJ5{yx2eB2-vg=_X|4RVl65Y}>3+>bVA?kk*DctAGL{O4B{|(hAty`bNuU*I0hb*u z<&mG<^Lsf@D@7(;r5p~Y#9L1(7jFc22yiv{i4A#KWPV61+Cpu_H4D?>HhXoyj(+qU zXAlk@rjNchqOSJ;{>CHSg-U2*%DK#PO z5;8#y+u3QY4?*lB_WWV3)zjT@owmJVG=7B!esMaKAKQpAj*9L;MdWGc^^8LTNQav4 zL`|5zP;QsiG9RIZu1l8FsBQ)&)L~;L?ubfa>#=q~r2;_biMUY05hcloOC#dH!nfxf{uhct$ih3PJ}_an+9nW`Yw{C z4SWMxNJNmVvT3hDvJj@{uO6CT0eC0iPktTnRLD^(#Ew|((WT#Dfsn<;CeyV(Kgyl5 zQZ42Tr8VBgSqZE|zdCh@p3YUXh}juolG66M!?Js3?P% zAyn@*vKH~IV~DCpJx4a$JmZUTaJnH|yV*#0UP*t3VF@E6W^MHX0-k>Mrt-&`9e>PZ zf@5Dfrnz$_=}?vSeK2z^rhn3*1xx3E*J_gUwL{SFjr}!B;5?k_h9D+KItjwTMl@P4 z!m)7hT7mv3Fr_K}31CGRl3ho4?M%W|gSZ@?+>k+S2a2mrJV*E2X1yrn0r?F6OD?O` z%1pFSouF3Io<@A);#xed__yPKEKUw!-A--FQ=9T}r~uphsSGTmWXHb(QG*M#>_O0w zBd^cR!8LHGkCeq(_~8V9cDxKgbcLKh6aOgDKNkO1FCI4f?X)ZTHX|RY|MwyT_FrZW z__ms)tnoC26B*<8;DtPyC5^k}@g6a&O1Dg)8i;TenXGL^ymp{8zH}NW2lNPo#FhF#vSsclE$6#1lr<>9lHK} z9F0iVxMM_(wVzqz!v9K*W79R}sNvj_#%)yN_Ee39yv8}{8n=z8@m-9JPq*CYy5j~AK^603KsP0six6h#C62@*{-o}ItNuqypFh83m?8dfwn z7=t=bn~o{i7*EEi{t+R!++2CUu=K~AvUU_{;t9=^ZbQL!4 z)4L1}{%FT18MN9uv*Ybdi~UI&KVVp=UM37I$bVFxO`y{VG#jpv!3=Dn>^PTcvBnI* zy&TY+-z?>7IHo!Zv+*g+FsMr8^YW~a_9R9PetZ!>$AX*v@XM_8o`i_-orm2ubt|V- zdhpShr@^=JA8GJ`Hgrc#t)cYAd^iG=*3+~%&>YN9sp)le+L^1;=z8&#xNE;EX=ITH zuSy${?WgIay=@n;%<=S&yKpf?B)cB7LDpVc5KCm@H}|qw*E5ttzh!&#cYt{9Gn+Bn z184?*qgS0rY1+h52^o=Var(%8{6#7^k?}Oat9G-}kw;GIL0f;Bu<^af<$6GD14Ipu zmL@ZB?3;RleWJWOv*X=NX3e6`!Y}zsu!&`}dONb0wG1YaVdHQ387J-SXP~XmKZhw) zkTSFyG)JFDb!qdlJ3d8V(zbZ}a67~eEtj>A%7pdTvft2(QwFYAsL}!=+|`wK;$qy* zt#sw#_Q7pcSJX9Ve@ZgDaf;IJ3~G4*hUjUjFyPTqSc7WM!vLc!seVHa-f~Yz!_4`- z!km^>b`rL5%~`82rUm`gpYa7nn$1;%$+ zI;I~Q*HIh4)*=YQv03R@D`E}xw>J+yrjk0Re4X}#e}M8te<>~P2I#-AHE42w6iZ}8 zLHr)r_3%Mmr$w%U9e{G0g1wLIxkoVqJU#2N`|3PId^Y{if-~kjhg)oVF`#f$o~Gir zrpEpc%H_n*RDFaC)dJ@?z}7w2mXp98u;+py?V2AGn=!g_A!QrId^s2#e)mKZ$~IZz>4|@>ll(9SxK14@rD-shoZNm9tQ`#pgx!{w$ZL}Zp0p*Y6MH~hL z#GOh4Uc|Ix&+v3gQ__VPcZa6@pYIvYNXbYqR3{SBJnkNuJ4(3`6TTShzv9 zL;8omP&!O-7Jp#c0`TX;_O!K=RUClHog+yeUW#)lBxd{zI z(8$1f8~lpI(OEt0V1a^=6V_8S-Rwk&{{LWkGvZPia5l^=sn%#|$k#blI&&_q( z127f14wxD=f4MD9gT&mDM!WR;7il!06)=HPY>zSF>A=vyPq@kOm!g$~8g9{pvEWS8 znMNDUDO|XV)}jT1MhDdak+!-Fh_sA1Kx#e-)?&!*^ zI$E5oJ>nVN;!C&k7LV&plnwapigagKTe}^j4|dj1I`P(Cev0n=eC+^%!THgcfs$}C z)Sn9lb8L{Gx}#&CYLlIQlNfioKwBsxZDGP%WWhvBI=Bt~vWuiHbc5)gIBUsVVG(sqJ++{_6Z%phl(8LC-z609~tPC#BWi2SgZJCc(PD>@!NrX>yurA z8t0T|GC3QKpQ$yV=8@ff`4-;zl=l>xu;+;M7VvPxon<>6Yg>yK*ByECBau&lDvi&q@D(x+AlVe)C4K7KTE`^TGzohs}IEQLxNm#H$ z`W>1A88ExaYPuU1hU*We#-YSgPhGNMDSV>vLVWbk7z3Cs8)qA45bn_-83%^(xgzw6{OP~8)coh6D{JL3=2`d$cp+2L-fT4pQ z*21pKDX`BW&BEIEsS<3(;rXQRVt?$2jNj)O$@4gz@prRR8JF>lY3v$WyE27l9AUyp zB-e8vxt>uL40}Nv1a(v7Q+jjLgg6gEd>eabL1lvwvGv8!&A}OVv_@Z7qUUss zmX@H_%0onieoH47*sz0~hyi%{x11g-Q04R(f`jW(rCUsulWN)eqgTlB5)|pH$dQ$9 z)nlk=m>hRr$@5*FXA~t)6s9Oq$@5_11X8u~Rb30*6J`0$f%(z z?SYPtYQS)|?lda)?1O;gbP||3wMJ??1Awx0Ijvw#1CY)seeCo!Ia_qjGf?A8=e+mq zoR8|9@=MN*pUnB{#i>TmJv*n6i@i;L$vNkfIUh{t94R?x(uptT?{RYv$`s17x5+PO zc;|dF=MpDpeI2(xVKYZ$bYTg(jg>x(=Eov{Y%`~@iue$vq2|#Xzz?^In#R}}Www`a zF$;w1d4^?Ft{xj2`q1JNr|WkWG>w>U54T1~Th?cqr!v;OYYmPWvzv39M>LI{-Q+K3 zBlh89hx>~qEez+O0$Q}ti^HIz4C|VK1&uDVhgc&97Ie+LW`FbWrqT17yv2d`l^-TQ z7r1iOW%J@y8R$%maX_#p00dOolym+(v)7I!7!VnQFULz#Y((E4W#wfpL_iW?77eSe`kub5^*Z5O*YNi@uG(M_da>FZ3H7-26JUr1MzM2y!tz9UCN5y2~f<*~& z3~s8qM&lOR6w0c}Z#5blc`YvwPvY+!Y#NG6fwTTii0qz)h0~Ou@qu5p`+h9Xj_^2C zVv}R~VmGB?8zl=@gjh)$o7V6#O-`n1zmBgHkAM55>cI~!Inj%KolU#=Do#Adua)Ig zB+3J3cOcQh)W(8~b$(gSv?rgzvK4By4x(rFs>4c%x>kn4UY zCGMe_iO);h)C$KHF(?b)ULGzh4_{IqX;qiAxKU5BxVQF+jg~y?Mb?MbK`ZC`aQMVV zDX$E5bLo9S5qNPr$l8@SM1P)%k-?#i#7F|G6WRRs(NZxVV%7h?Yj~jqw8ayfg&0;l zTaHZ=Btb7!K3cyFqct16*0+U`C$p??3tI-4YCZ=cs6Y41D$JvKefI%GI4}FCafOHS z+p5DO%UO8t6y}&5ZtWEIN=Z8(g#zEV z;OyNxSc#aYQ+BJmcnqMI!#pprBDrYOo7cHkkG4ioRkOQkkWK5lO6=hz*zgxIn{JBV zZcrD${WY-1xfJfF_PlM|d)t}^BXju+dhu+(%z!KWws34L&P<;%B_n)_HW)kim?xJR z_k9DBG#cj#37@>wXgnybIXT^E{DFQc$Ew81xp)vb`30k~i6H2GfqI;Arw_Bnn}Lkg zx$#*USk*Qfg`+l)v4%H|u(Rx8R(CV1JxyLaYnDAsz%in$AcMqQeym;P;W6dm^UA|R z%Ok&dVn$*rS)ce~Vlt1^w&;nkBrfGkQf8Kiha@+q!^;x};lbTcBlc7~VMqc=B-8B3 zOy&ezbm-&h&@8l8*)%?}B5{ZYx=$V7m0>grC(!UTiUlQ*V>H$hH1*PODXn9M!LII( zPi^4S6f&@ZXy6+I8W{QSL=FYbN(@gmk@ig}4-bb~|CXAbn4VZj0Z)7(A=pkgXIY|{ zFRRs5L8Ea3ucpFWgeG8SsYb{J?su^SM(bs4?SlOqI46&;lp<>qgQUp26Q@ZB>6>f4 zT;?3dDBGSIW!q(xF{K;PW_h8t8yk2rb`GYo73%jXRB>=Muq$67$)~l{9+r?Xpo@{I zDUGmy|MX&nVg?p-7DYs!N`65)*jpay(oZBTR36^%L4;VQF;~OMj z?PfPLP@-Ego>uS9nNoXH)?6y3Ra61;kphaEV7~g+Sy{$^Y8IIbVQ2oPT=9+cE(k)~ zhAM|3zqRW1)`!)6T@U`0HX@Hh_ZZFcyQf|po(`Z8Cu_pXpG{Hc`J5XT&Fxt|n%Q6q z>zn4`+&OI;YL|M&1Z7vVoG&(I@QIsZr5*=wGs=o;d)74YgVtNAw<$ND9a)F8|5onznqOG)ak5fu-j#MW<)Zi( z$&=*gsZE!O^x;APPhKK<|Cs8!PqaiA#}ViUeN)p6GC&3R|X}@4UXK$`L`K<46J7>Rw$yt{~wBL>p5kpvbYRuh3AhxIPMs zW~;iPMR4FtZlNpic&&;UM`sFQhER&g;~D@~AAKFdbazUU5Gqa`Ux_tMib-LHIQZ_G?bGItLv<@ zi>jEAH+q3-N;vZrFVv!QrT~*Tp#y1#417mcn$`pU#G%J8cj@-`WzN-^2hiA+ZIU2O zWAV~)bj+D#WSo`a%=)MAKD&Z5Xz;?bqLEz#^hTOgla`aovAzWU?94zvfx3ke-Tri6 z3yM<_-#sJ3Jn9$BqxXC68^YGQ%;IKg!45tJ-g8al}mCuIl^aM;(EHduQB~lJn;G%P>j^>zw|=; zPvs5>{xd*8MdM!qaLyS3kT_j^^wX;!^=Z}X)%%zJN%e#NFVzqFAFBWN*FL57_{{RE zBeOoe^`8Gr^`8Hs`Y~r#?~p(}{VfoDF$KX{Bp}iM3Wl>tK$54S_$MUrtplHe1k^9R z>W^igjs)+&0^n>CKoIRv;6EXOb3Uzl28mbQ_D`z!|6i&{=ipyf|D9LQXuZ=1gHyxs z52c?v3?=$s!Eja|Nb)ok|D+GHKMjiN6cmd-9SVv5KR_YL{|6}U#Y*)jSkjmj6tB+w zR9r~({{ad~{y#vGaV8WFOUe_1n4bdSY?hSwuK+lk9`n-GpFxiebzT`NoSPm@%|dW8 ztVE?gQoOuKPSj*ydg5h^Xad2BOl)_F}Mil?I>`@!NRXPky}ze(;9EMWavjZVU;)n7%% z688+{0)J0c(k=JiB3$qYoLF;V>JDdRmA%k!?nWH zB2MHcAQgQ@=SLQqlhHITV;<5-0N^gTB*o#xsWp!+S-jNxOInd3)c_5TWBFVgoj?~6 z1>Q?(%|+^6R_3yNbpEA|pT9an$S#L%G)1w3@<;8}SSHDFp%S?5(=tqRGRVf89A@vJ zmB;28zdqsqh(ic^SPr!~n!*pI9Oa!T#%9HuQQQousIJ5m-|w6iS7J3}x?dT;?r^tp zLRo1q%yDo+pJ8D3& ze+!&mGgjkM;%d9s_y$BC#is|o;H#!<956QgE%1jmS4q+B;fv^smobOlWo)A(77jDE z`I~Ywu&^|&iORc*u_pTtOU`?((@E1!LA|TSIpvM&FE9MJK<~=)bp6C3;th1IHU>mv z0|682-58VZ&3`e!J8*JcexQBTC{Da$jq$b2?6D`~Vr5=TZU)|7Rb+wRIzNa`e3xbu zqLV)CBz>SiY5S_-oF++9)P_SO<&~2OQ)O_{VKfy<$OBQSz-rXi@5R6Y7i-?Ox)K2* z@k4yS>=>S^WQkPkg^-3bP}SF9AT^1X=t7~c% z=_yB^KFDg9RrV3}(>8I8_BTqVIC78FTr9V!?ntDZBlxVs<7Kgk&(W;s1?m}~i!WM7 z9pSvQ%UmdB+USn7Q!K+Ht&xXh5eZwTD{|!kb$KbYl1VUC?BmGXtZtT;XpYx1o}3HTovDT51o&nC^z_okjBFcNPv-((oQgm<&U<=#G+bXc!K&jUG!)m zc6O6ucheE@b`yqhw$^7n!p1ZFx@<1}NCjjY8t#zXZ^t5s@h5r^nu5qhKO?fbO8p0C zLjPZ=Y@VlHX3sis=X;A!!wT&c)gB2?wdy+UxxVmK_B_wpq1JF54NZ>?PYwpDPf6#L z9z-7^GBi-XJsUQ4~~EJtOAmo{g~=BTRj|Y-E{4Op2*zhijR4pdhLRbBiA%n^zwem zwF`P97c^Jcy!)?RU`K|ToGKR8^{m{Lyx!PWjNPKycCw7OUfz0>Q+fNCw~v!o@rE_B z**#qFWRAhozR~QblpLIk`-`#M(SuxL-6j^Zm8`NV@ehIZsb!u|z7L6?ifKmyA zhY@T#rICMB@FhM`nG{nN!W^WkeFK&Udm!5yUH_A$cq!ynKfR6~;!>OX)iin0&{p9t zc$mc<%kUuTYWD;p-j!YH9lxk9Aqg;8Ao#7}OvQuj{W^_r<Hc^93P_Xsi%Mu?O5;HTupE8=HhN8zzS zd8#jLr%AM8sBWGtqGqOlovH5Rj!Y90vEqfb0@U0g=JlCaG#9%$LGS!y=tzYR4BLbP)X5v1;9q* zZ=u}S_#?bGm->I)$&>R4ml1u0aC`DPDSpHxvRU1+x`S9vT5C^UD}g+MUdQ7kKX?3ezJYrem&`%fooDNMD_G><%A(;R&i%L7g z!==o+gYYO_gH?Ah`J-Sb&ELLHa3GrPH9;lcN96lW`A(192>`Bd2VNG_O{Cpl3IMB6 zZzBn5yS^^yCE^B&Xj3oGks&VYLRn}@!ZyKjA5jyk1pvQHhlVmFp?bvmZsU^x_07~L zD^m5^Rl$XYpil?nC3$2qAv; zK72(7_?0Z7YSiUVLOHcP>MLK62uy{Yr@qWdT#Y?yJ3wt&A>?T6cDc431xMOH?nzn^%|y7%vQM%h)0~G?t}SU+3m70 z{9DJqNqzK~%xaZ^S0a;9T5iRz?vXSmju1E3>n4EQ_%bT>=a(L-xM_i!(^o_C+;n0; zE=m%Euq+3>Ze6G>vSmqrOqv+-1#{nk;=dsRHn`(*Rrr+D-0K(3mN8(<|6h=$2y(F8?3<=x5TOpF8nK zvx9=WBp5pAl3==bS{CzitTnp6tzHUr`quF4?(aTt%Y;k$HWRl%8nBP&|@;m;R z{7&wY-)TqrUA}2lE{T?J%El8GkMT+eW**Uxi#zq>vxj*+d|#%YBvz?wi$^~_Tf96m z&bTE(Su~!-1q@LexrwZ zwMV@qe5#yGp$v6ZD||vG={*DnkLl&oZeGo4lE4Kc%uvH6O;F0_avIjA;J+;-Ny)ZB z;+AnkOOP53KuW(Jg837v-&Gl!6W7&;(9HhNr`c?4xMI=E;6?Fh0l$pISo8{BV$n|d zjf<_;*mn=g?<23t?^dxe65AnHI+t(yZnu8KkLt%Gd-Nl^TR(o%q94C}j>p6IMZ0)d zrJnd)Plv8aP#23TLSyk`5;wai7C*vEhX!hlU{M1rSd2xF^kFT8ilT$6xCR_d2(p+T zL`vetw)dadpVb7VgfnoZbqp4(FFhkl1{zOQ_GH1Dnvt((P^(Mr_>8b@jZ3la(3VPY z0SQto&)UR8uwtD(73q;kMx(pd?xgl*QhRM;6{*tdx-VdCVMe%c=px*|A5n3C+01O)T=u}8fI&4}D$7YR9wj%7=OHcQ-{EmPSCFE1LuoDUQbM};Nu)v0b6 zmjpi}!RKz?!-GbRV6M;4)JY`Y!DN=xA#pB>R)(%;Znq1ngA+-NEaD|=+CJ*lceIV7 z+I^9sa&+!$T6BGqqj`Db0!&}N6`qhCT${?N^Y|qXS`wWYAdyUPR{ffbUvL>GB1)0b zXGadJ@9XD5wMF-wFTEK4BFTzsUo)bA<0a&-3Kh%N_!@Qj4#B4AmZs?BH>`ueB=*zl zulsDo+flklYAC~Ujwnpr(c&L2lyIauYifSjJGEflsHsI8MwBK0UU$ zzwX+|6f#vwCNP_UQhp{W2bYntpza?MpG*AJ5`T7NQSBZvsBy&DnT0V+kbRK`*k0pN z|3-QT+#ILl-43|;#+c=4LUrhf`l=TI3)rWtMfy#hEbgC*wOtVQH5sUJi$txu1c3SbtheK{?1wPM<>n&fAnVOL zUKOQ4ce}eS-Z<#uhizLbNBDYY%i#Q7ZWJyS=5EM}hqF#52f4ct@``p1aEk+yGk_ic z31A@{pH;tGKt*MP*ZX-5bgiFhzCSp5{lJEwQP6pxC}@Z^X{&DWU|yuP?#Q@1_fB=s z&C|aV@7SxrqI>RToB(7*a$=JY3XjM>+{Y=BcZ%*tZ1NEuuJ~0yWH}{VwC}3IRVOw5 zAy&PkMT*PFlyrBW)1aTOTQAA9vn79W>~{Gm(!uL|m+PmUsf*UkX`phRuZoHbk8`E0 zM}6dRYRqc%WoFZ30Ae(;wlM@67GhWf0Xb#qcNALqRGN9j7GLooFiwp z%g0mMEj+jsp1C{q+$GQDMU%JljEo}UIbNzH#ZHO2sWOREYUjPG>ZU5*g}GF!YQ`)d zEBT<*yLhnslWZ#_8_%!GlY*D=jEo^5$crvGBr*1yoXX^Ig7p_hA=czb;lj|k7o}O` z#N^H9sXIgNMWI1#l#elAu%qg(8yWH}vR)(r_AiQF^ok=KS#VQjB}vtD#~Mw}N8;hyD@-@#6=2lhv@3-<;xg?5ZbT9d=%LkPimnB8ge$&fd= zNG8RDQ)O>jH2F0_$Y(-XYSk6=F5R388m4y8zjmg?r&%GJZY$g$VpRc+he5}4{EnBh8qIJW>>f5kEM5K+r9s0HOi8lJy5q?oS8bO?i z;WQZ7hSBS=>sHJi!BsvbdgT-sD@Z;kCf|l`_*=utS1y_Uoi%@{tPa)}zLUYO^{WgE zZqc`v#TzvLN-?|m-S980cw;0PUEB^aBgX9X9acH{2e~QK$H3w<-G66u5TE`G#>#H0 zP#aosZrX9)fspIj`UM%TN`mepi0z!adjzgSVOtyKVtmYAHH%^zIs^)H;W0pHcn4_f zQB1l!bGEs+XL=+XMvNjhj93njGc_xITC-LOo9;d>`I1-3ZX=aBopb2F%*k~&(PbAI zct~dHV;DE6R?7ufcr_dMP_X`2y*6+^$=W3abdT#9VV;r9XM~-ii!SPHGdy+K*1_pT zRXc#?qSVl76LD z??b_^4{WLRI%=)d)rO`@D)l>(U=4#olHVO$k{7#fpdG0yRl<-RV1`Ym|4*7>qp2eP zY)Ab0U79{n{CUAYG#?ZZ=1?p97(-3WBlpQo`!n8pRz`(JZ%Xz_#_oTf@uIUb{`!oJ zU7yJ46N8A%{?~G2ef71x3~I_1J+Z*)FlfyofD_7$a6r94Cl0WYSnZUHxuk{U47aPF zof&^kU;NeaNzwX$urRw?U5`RVaZPM6XBc7{KY4DT)W142bm5|;-@ZDfb2eqBUU@RH zAY&5@wQZN?wrv;K+qO$d&VUBZ@GH;nng|Jx#8&!eSAGyL|C@Po#G2uu>_u$XJipsS zTy0zCOt=#k?*m**zNl4SO;pe3X|Y$<_O9a;NLZVCJRNk*3G$^jGK!m_;yq+R%&We^ zD+_S-9M4!my5rPWoUEB4AN8sqIq9WqQ=3LdY98o{oP%En?ih~!sINPRDSZV2VQoJl zsPSi%_r?FQyuI2ojuwp%Q-zW|U0!unWueXW`kY}cbvd)JcIrQI@Wk8ZL062h7@x$A zOULE*$BiQ{cUTMB=5r=uz;z;8{5T&z$eW+%f#h`FCFbUdn01}#-=Ez7*8cbQ9~^cl za*X({6U!!!Lu@s3V(y8*y7#Xf9w6Ns@5;gA_F;9#JZ{gn&yNf~@%D*>=G!jr(C_l2 z6)*o)a!FVUOoQa>2gwge*o+mh>PAY%{ZY9@(Za}iz@*{P(9WFb0~V~+&%b+K5|hw6 z(rL2qEP(d_aPGJG$w@)$2ioC4>s#&3r}gK74W2%p{`q&O0bjZpxb<#RZa;A_!p_mL zDEcs*VZdm)9Cl-Bi=rR97)I&2Wy4SV=9V^@Tb#M1&Zlaf?93%ES*;IMKVLs#<6u^` z`|fzPzVq&G_Y3hHF`MMtE(|@eXWaxW%k#+1-?QBn{Zod#XU@RsWgqixjaB75^t{Xw zCs;7URbjn%vxvTTR+nkF+vm7{$eKYSlLV#u7|C%nJL+PmD(xv-ERwX9n<6)OxPi$)Gz&_ zDMk}=KWPXd3vmb`?-4I7GBPA=e`5b3O~`v8>#>)?-h}heM)at?~8#pa1;lPrFVawc70?asJECOGZ(A@fXjJoQQ9fTT(kk zCf1zeC2U^g1ef_VVKy8c$DGwKuHsfnRG}q=w;GQ<>L45D=Ov#A4NgN*g)L2unPScWInxpTNv^^>Td0ZU%T>O#06M(}rW1AaeB%||Ni!0&}WJ`P~?CP{Rx6D zCTX2$wN5lqu(qjGsyW`85guMYCu_XFc~hnon;dbxbx?Se1m#H(1vPET5_BcK?Tz}y zB~9I#ta7l}5j{dVKLX;yy_f5E>$?&XZyYYU-239Uv!cP4fE94 zHB>vCI`-GUm)GP^a>EaIBd@I7g!ef6q^;abZWAZ^e{=&D`we)B)`d)o(g_8Vc=?+g+Zn034aKa zij6tdC2VTiJ%A9g=nQe zGPzswBAe;f7s1=pdfjJS^WxNB+sUa`%VgMhb4JGEq!)W!ZR#m{6K#M;07lNYT8g$8 z&{(7KTLK&xU|R~Nb@96L-3D91ce&z2$_6{3-1ABV zkPyF-dXUCT#$mJJl`T~G>I_Icc8@2v%2%{d85_H)qQ35ZikI6l@yOqshc{x+dlw(Z zwlW6G-o$_KKA@N;iouTv#oW2KfWzGQe-%q7MUEstgPC&1*=5GI>u?;0icD!|bNv-W zdS?d4Q(DF>d^^3z@~o_^{yJA=KmBHN_SKF$U?&H{M^+}R4nXTD5qlRdeQp=Zik0}{ zCH{D966M^o+`1B*as|_ii*Ja0WPPOGM6{E5ieXtl$35P|h2GW2DSN%mne-$O9xpln zX}NS%@AVeXj-0egJn9^BP-Lvq%dz4r);OAFIh^Vq;Z(`TizW+=7|}(Pi~hfifh95s zW%kzo|8JFI>PJy=VFOvLIM#=cy;DB;3ywRB=0JVR}`76KQGokn9BfloKlGRB*A z`lFwi7s>7fjSDi0ImJ%8KiVVlk;C@tVBh${%1UsWGr}s==YK}x*pgtSHBH8p{pRtq z6<9-hk8H;?Wn6Q;>i2vwwu;B62XBdIivl~inc8wxKYd%ecw%7)lI_l{^DnpNl=v|y zS$de@;-u0s(}d&KWjD{s9oN*9cX9?12`56vVjcW0SNKy?(`Bb5?yo3Y(cOJukJNmi z5AfBDX+*Y?q|$yf%?m)0D_H=ro6i$Qki1y*cJdldJI$?B#ga?4va)xyIdF=5dnY@) zPIa2Se49C~{xc;Ushkt~SXI&UH657}08})tZSykT+qMVgSI*L5MN{X8d;&9eH^mfCb?9!1u$!@jT~4HNbvps1Q&Ab zkE5?`Ql9!ofA|_+B_sJI<4j5}wl?nO>$5`a6{?B^*2b53f4-id{EiA$qJ#FOz9#F! zTKH0*;nm#jkMDeqFKh5LKl>DbeNdo^7!~SPCMjgVZj#m1#Mfi6%bP`1!-nq5pb-3l z4+ex`fPkR>4-AHyqNP39(-tH0>EI3t<`mDK>y30&sqONqH%MtjP8RZ$ZCkbdhIOwr zmqnAiLOuk0eIEi--$zUTM|^1?Sob(!4YAy*gyucm_ZZ5^Pk*;*I4D{goGC?8Q9n2W zhPZDtiJ*@s%f$5m8(1jec4)0C?3goAo<|$Tpo%cEI&^i7)y=Tygjj78Vr9XEzRfix zD6F!uAs)W0nu}3CkSNJmr6xdk+`FxbFvS)~&oRXo3b(L2Fv*7|hcDPIjOk~WLlm!{ z11)qpb9>ne)`jbt+sCP$61?KB)CBK;TXMIn9poUp8iz`tmTbk#?y{zdU3GN_l@5?^ z=@HsWQX8gM1NTpDQJEd3@{FhS7vbci9?nG{CjW*x;7)KF%2gS-);G9?+b+^S{Ty5KC0SKQ0v1~u~ovm_j zEIUVDnjWX4W7$55Z4#1*W$$4KSWPlUW7$&~(^k_S{qBfz5*_xfbH2`zkDW^g$xLLvr@X60B~p|dVwm2MjGNo-hHah zc^Acz+JtPm>HlJB%+I*dr(XBdB8(KH^)!DHne?IE5xv`Kz@27#x076~ehX;o2y!87 z)?FCQpB1EPiuX$cM9K;DqnX$Gfc(6Jpf0IC-9TB&4_wrd*9BeO4kSQ(gyLQq4sJILB&zDh@P(w#1_Zi{e~-YC-da2qsF44f6rQ>Q8^pF?&$K z*&EY&G9brLkc)_8b_lhElG$a0F)<&E_;bxJPgq)0JmV2aN> zDN?;+YIi3k!G&%HVr`eIr#_Z8M`dLzSI(>BYd!6QU9nsaC*n)|@se`80I7Cx5k+F} z^BJmk2N#gJTD<|4$|_BK$YTSjMjLNo7#BG%-Fy278$k3h!|-( zvFI`gLtIMkxRsY!^frFgW*^h(!(@t=UlxnsOi;Z1iuj_u%wH20iwiBqqCtLzPm1=E zxkHL!q;Wk_3JZSkp;n9r7O7wyH!%E!?I?E;EoHy=@~#>_Iuq+ zf!=Vo7=<3ZJT?n|f%y5=Ffmo)P{Y#TCT3A~mAa>ZMUl+yHL+*~g$6oST~TFk6}XiB zL#K<4Mp=RoDln@=U+DJ#e+T7bHaf|X(9R-Vx8a$bk~lwXjp|-WgTWb zRpPNUQeqjYMAfNUx4<506&oU!DJhs%4zr?C&(4VpSq@ilr@?{cM~_IU!f8en9(OQs z$zhEFbiIxjZI$i_j|gQ|+WJ*}<=eXKqOHrzrLw*q(N*8;+YvP!-3@~K2?hrSZ>jiD7LO4xXFky&W4j_At&m4hIv}oZ zrc_imbnGLe+UN`uXja(mBPUqa(7G6zS0a5W;B5tcXSBUXiM% z0c?VzEBF9aPgo`8qDd3ImG3ytyUf(e(K(V!mug;9m>AXRV;}diI1-VmPH4rYD$>SE%8jQq z4q_RhtQ0n&ZemsI{kpF>7iSN40tA6?$@LSdR!()SSre`Kc@BtH2Y#mm{_xe7-`0(e zd9An<6Z`H=elhhDD~I6Zox=fhhm?^S`>v!|_uJ%!md*pbxp7a2<;OJ@8LB;fg_L_z zSt3D{nOEn4XmW}aEVLoGM>wmfFmL71#N3@nF-3(fPuuk3@(VVVN=go0Hmv+xu$NZY zlk-?@It5gyNv~_#5`kwi!;~%9mpz=3dBh#EC$>@6Vp=g^h|_p+*Hg;Ludm9!zlmyv z|6Xb7jOuw;~4ZshI~!p00;vRYi2=tqJ#I2s2rv6;)*>I<}L7|(I{rG>Wk(W z(E@q7JEO#iUZURyEj6NIbi+CsybUH%-8#tFU`n?sBfoZ%Fg3$gr4YKI>EC`(d_v!lU;gqxZHT0!61ZO(Jv6bl*k&JLWw;JuAft+T6x=`zl zV3#C(OMHDfKH3ZIYtlSq&_fPq36HIG08ke@6-T@mXe1<40FUW9c6mQl-{E2r7~XsvU9ARwqu3!s-sYjacpq=rnIxSx?I*LA7#@Asm=H zKcm$`1_P*!jHP=GiMU!7u$-?_7n6xOD0vk_+{)rCT3wkWsi6HLf>b8)BtF!oA3g6& zo)6&tbYNTKZF zRM|{AuK1F>kluciva8iz&{rd3v&dB`ky+$&E3jQ}e(xehC|B=0%@$upuzM()iHDjs zrqx%ZRiZt@M#z=?tW|fIolfB)s`|4R2vYl=H}!jRc(uBXimKH&FvHdFMTrF$lVqow ziX>h#n9EoE-C4cS*z~okNLFp)>KAM~eUyu4s%&*9cZtOB4LbOO&A9|1D+`Z7eNrOr zH|%{xc&++wnxDLMXBZBH;9YKgK|RxtAAW3d;8By$RQ&+ac?18;coZ{iFuh62^CTsg)pmKEVg;R}LFYt??3JmByJ*>h zXnV6DLE?vlw;oC~vo+Ea-$Aa%F8om$EfaHL&}X6@dCgb%P!RGI*5Msxw_di3#lAfY z&nfi8M5iaL@zN9af!Idrx}cy4#ZUPbe8$$VXy3R8swrvTcq88>9c4k0jIvA?c z8}`*miwEw;uOmHVd7UKC3jVlZxeGGzYRcx_UVWFk>L`$<1+i?<@mR+$z76mk(Zg`s zCSE^Orhbw0L&uYhJz0>#jW1cD*!wj4=LK^ZXK1z`xG2pb!`IRY8hsAF#qAj=6%q`w zsux3KNJ>Y!xZq{Z^Vr;|gQ5vPtQd@l`gFN|0?1lESRAE1a)m@OHi?IbVNuzNR`~2c55C=j#RMYo_z{oby%g zeErJ#x^zzvz`VPo!N1k1+R!HJB&a0hXdQdW^rADgHOtHOD=bwpI z+L9XW&Nk5DE6AH=Ux;Acr9OuKKnK~?t(?<32{fc?j{cTfbxmrj{uZ#dXr6Ng-%P-| zG9^lX5Y_F$p+}1azf#~v5`ft)(n-Q_%G2ZOo;+HqS03R>XO62%CkrK=C*$fX(l=*} zt3o7CgHNI z21+DV7t2DGeb!f?E}2fwX-3U%P*Z7lQ|-z2(o^DhrEqUkZFhU=N!N5rVlT2Hht-K_ z2y17dlp~(hu3(FQ3f;~XbFm)erVJKZEg{-fpA5~rpXrZ5s3ASlxxIl z4x@|4mlK9pzjYAIdW7_o=kRT|`-84}m>n7cJ@zP{Q2Es|K(rIYH_i5(l7P;*?%)zz z=je}Ix;Sl;z$UNR;w;N{(J%N=G1>&0Rd!0&X0TkaT#y1DZxh#7DNSR zN3QzsjVavw%FN!0>t3aQ#gOtuosxsEYjj?QvQ69c)dqU6*IBFm~)&$!iTJE$v#jCG+4~A z@1xOC^};8G(4C>dhT6Qi-UlBwFP7op+Hy0$90X@eYdSI)@e9se#i`Y*l`W|@O96N8 z&2jI+O9<9+DttINC*8^+W=jq>ypw&3isn(j?AfP_uC%Wjb1Qda7^J85?}qit9eX_@ zG1`zO+h_I;!OiSk?*(I%|w9y#tO_W9xkR@{NCh6F9EQ)f;zC)+HUjD894klqH>^+ z&t25o?$*%{}_pLpYfF!0WCdm?L`JjMeWNyEC`X9Fm*7Tqp8_)&Z$%JbXVR4Lk5&yivnF1QR$toPf_ge_u7y+T;(+d0tz* z0($IB0n94%n_kSAr`Hn;7C-?w&E~-2>;CG(LrJ&0rG!iQC#|+STJ$JxERazQ{g-y% z5-aw`im|@#y&-Ze(ihoc00$ANsJ-+-icQ) zsPJl!M`@|iQE?UuH`B*%oV@s~H}e<3(Z;mCXIL+s#nFyg^LubEJUj3~d|gqGH6b=v zL?*^J<*4j<8*HwW7x!CO;<+!t8?2}Gqz4|@ZJxY-byn#58eXlXp1@LXq{QKGC-YXF zE2FVHJ|{ynxch~{T_WsaY;uCx3WMv9eE9yec-(w1rNv-&^F7uEZ|rVlQ2zXvaVLa( z4wv!GpMVaztv%P*YJ29#=Umxi+M3BOwQ7&W$DNLG*MG6jU+a(6cw;wugrnT(A**o=`D+kJ`?f_>Mhnl=aPS#Ss7K=fA(DBc*A z>TC??`;AR<)Ax9*fuS>A8cqr$H17Lv-Z?-kTH9kpxA1Ot1wLH)LnHbmsp*$3^B8U+Tp|Tda;d>-*&5^>2hGSjKGHDm8~Uz-93L@r`9t zT&t@d9g0@Ntdfej%;lnOW~(p$ErBKQg3)*-5rIA2&K_(2x!3)oMVT)|-j|p$lEo`* z51uEDs|mbpL^CCH?rTNmtQ9_{W^fQIeGNTMK#4{GSr}IM0tUl&V(a0;FCS1tN)HYx z#n2SAFCYAW)GB?gXsrjU_-8mhs z@IP~(pbTWBK=ydYJ(23;v zp?@UL4IN944tf z`4wCoGtew&zAK+;6{XuU9CEk5)d(UnJ(qk}Rx_VmSwu8%EbZgVh7=OJ+q!5Q@Y%DGqmb`-5*-5rZFS(u67mV%27~fG09@Mg~IC{a2 zp43JJ_l7rkt$*{x?)Ffj^>4E9aMbbuGBxwv;Ej=Bfh346@TTBe;>>)*dRrOjP0WII z7uDAdb47;fsf{83_aOnRe-i8&vaV^SY)VLy-9G^ixkvVN@#h>+=XL5$*MawkTmuWk zg3++p(Ab<2vZ!FK73p&CmvsX{SFvn3W(|+uoe{e(x;JueV7#p3K{rfLZ$=`6Q*#X= zF>ic^2c4|F*s2XyxTm|8*^}u$7x#bsvAZVvo711~ITE+O(jeeVJF`pM!DpxVl6FSWU}aSS?e#qaSuh0gQ1Un%rWQAat~#myyd zq@3@=@?Gl1as6^5I$CpQe|({eRrXkmePV-u?osn}Z+I#8cjKPL)|`zw@tGU___{!u z11FSwq5n74_S;-8D$R(ttSjCi_lNOb*WDX=xxUVJg(q5BXM%7?3WO>R!nLYKgK*5j zMjL<&c9MHp{4^AOY1LOc8BWuR8T77jUQnLuzuEYhyE%#t8;B@E88_LZBwQ>U&Go6p z0D}fq(IQ{?9PJ{Z+B#tEQA?a)h#^+)I~$}oRykNngVZkHw6WYv>pocOay=s*Sybuc z>IW~WVqa84C^Run<`?1E3l?ZQ!3YDzSZhyU+{YrJ3lBkp#)5my%J=y(%rzi6i(>-9r@o5!Y(K^HRYz~aL^7$rcwpv|{^otb| zm`LepH9n=ef6znLotsHh6>_y_{v;dh*`pr7U=bBu5?n+%!PBs~Ds_xgPp5=Fb(&(Q zj4Jgk2eHM#ICb@uD)lQq%=ZKC_K^GOhYjqv+)N?)d)P3qUMoe&!C0T7)W_SLNeD%$ zUz6!k5hSG#g0Wm1 zN!F;8aTg_6PuD?Arty2BxK5Fh3&6w~)EPpRRL$PShAPx@TQk$hsfy~$VjSN+j zK%&e}uBsM=<^xD&x*FM(H?l@x}@g-v9@q$Hk{AiBEm7=$c4Z z14l*n^{YZ5PbJ2gq#56%UIq)*EKJVR@ctgo5cosgpc9AUC4<-$ZTcK}rb#}-yog5y zii=L(#p%38<4_`GP9OJRY#0M4w#pN`QsWTR8I4~P28QPGF>S=47}HQH%Gs!N2WJO? zUZZg?DTtRO;uKGkpwHn9WRRQU2pzxCT!R&MeB3NiF?E-enA2R`LC%mRZ+A)>-zL}Y z!U(q@qA||Nw%NTl;*Cv~q)ERyx5qBh??x0|V{KI-WOEuMHlM1haAh8g9JSwM4hys! zjSZxbbFBmH%YavR$qG!hM7kO?^35pdYm*x~d0n^1a`L8V0`Q_oc18dPub%6B_FNJ3ltLRrgJ5U2+)gtOf878fsP*>gO<)VsgC zyq{SJ*(NV=n8jE7m_;@Y$~=RoP79B39Ka~v62>4I1~Q7&_y3Mj+|DF+CZqV=e>#l9 z_dhcVF+0MxUq*i$mNOQ!ThV$=sl$_Z|4Wk*qcr(d?&nksL-{CN8Jbq5=Jt{0QLI)J zb9HAb9+;M9r1*p$x|uK0F!;n|Q0WZjw#aRwxHPe7yo@RC=ImfHfF^EGojV5d1X(7E zU?BRS30MWbff?S6epclqx*R1%Y?Hs9fH2}WD3lIoP;mV>; z;RS7)7YLkxf}jGY;RSz;&l{gpHNS*>^Ex4TP^ zHsuzdc;+47st%4$;P?=t!wJYwPV(xObAc|xkh_(ol#6X=HOtu7Q0A}=^sv2@9D zKFukJb2a39FG`OH>i^eewVhQ~s-(+)q+t@Et|%(?h5wh3wSJ;Bf2zQ9nGw>UQKJ8U zTjCC<#DSY5-m&4~InfHX*@HA^n5`MGuz5~EFF2XeO|T-9f_J@7s} z=SX1=VUHCp@kXu%l@;oOFEAu(lGBZC`;X7OYP=_WZgI2!+5_S1jHmV*+g@#b$2;tW zqPf1vu=3=1lc;he^%v`nZb#vx&%0V5 zX|n&{cDyvm2o>cwXBxC zY~*|sztnzl>KGYWiJ;6|iNwrZ89I(~nXLB*$Hrj+XK`}0x^26#0T%f#739^-bIIB) zEs}DEci_dU>UE0)*&2pQk+)`AAH=6dH2{MF1{mD!Y=ARxMU9N7AicsLpZ{XmKRfK5 z{ZF7UeOXBRlB3kyY?x9|{E63Ct_C7~;SDs%W;j>1%f2elrGDO*oOHEqS_c5zYz~sn z#pYM%1i_O}qo}nr%iSu#;X@`Wg%_whd88zMy5%Q+rYVBLl7lRl>>hkU|3UOOZsar# z(Vx{JO+Jxo=_K9oloe(4I-r@WQJZ-24^WI~WAJ^hR>z2!K}fD;vMOSU;zpKkmtlTV zi!@XLC>Mtg<8|TYyAxX9&9XeY==jVjxS!Kn72~C(92OMl41aAW znK|6fcK~he%>tX9nQl?V#z?sxxqDVZrr}UACO!&9TTFDRS$KRQ?$Zs4Wtf_3@~jbv z{-sg7R*h*d_P99WZ7;%cNBRyy>jHLL&r;f-mtsAE$>9B&s7Y| zb=SXGPhh>gz~9l4egIr>Mm^eDvRA0RPf`|Cs}#kwncagTne{IgCmAj72${U3U<#SBb;c4x+Y&fspBk<^)GjcB@c zkjEMkzj@Ha`U~r=AyC=G`n(OPF_0!{bTIK4bHr$ri`1C*yImd;U!g6|56;sCi`n!6 zh>L@``n=vMl`p7s$l5$QN#EToY-wm0Pv&NCSv6Zwu0mpQoXyrCL^{25*f1FPUC2A^ zE?$BQ>|w^eBl*OR=osT(5k8xBxdG!|xuIg#jpOYf`pu6&@+5C@&m-CjQ+bc&%5TLq znl^1(S8BDNWj?gM&uHq%_(pB1&-$ZSOf{m9$q1?SnWwn{CF@l#1&%lF`=*4JV(mLs z5@rTdMp&^$Dup?Q|BQ5DuV_1fUP@W-#!GB`V?G67$0UR#d8OB(U~i zZw|YL)$To^sUzR6T%aCJh31|S+AN_+V#g8c?jHHJTFB%n5+b$!33`l?)F<@1@iEOr zVG1mz#BYIEOk6sB*-`4bIYifC3xB zaC7)vVsRd9)^(%!8ZUjRLKOp>fP_B(F%2#E&JHB~Y8~5|KDooNeOiVUx;eO32H+iN z6l+;yy!2uC=5vg&6gdmeSW=}v{)Y?7O}P7_Tq`I z_Tr0&wijPAG`4n1d+{XUA6B;H<1|n>OzX;*mKW8%j7&h2iatZM89LUK_##i-6F_Bt zr%u?rZjja6QQAo|S8}oDVPDhG(8eljZfD$?;QDx2{C*A5X9hqd{Hy-9iS=oa%-f~_ zo&(be_Zxv-D_^zo05MG(wC)k^SLrwN_D)ySUjG&NmgO~1+F{Yw3omGPKb{f3@o{ft z7>C`$i-2P)JXw$IGbTsLh_I^J-Bjl3XaE1W)nWfe<6c?>_owO-v-AiqfjZZHhZ*i8 zSx8LQ%L*jkc6gYGy|e1)dFqoZ;q(n0EoFv|i#f-(auH>iE&rg;v;?!;^3%inKxT^xIJnG zZDzeP0y*IT7Z<{Szx*x?SnGOg=tb%D%J{vyJI*yXU~Ok;e3s11qM3^5yrbBeLAzJ} zi5=;}FEMhoN~))!SLE>PcBYk!&s=Jl>;<#;=iSE=6l5VDSm6mev{EGIOGyNj`` zMX|D6t|2C~t?R`k+I6udxv0c7%<~NrJ=T_`#f#-WZF+GiYV_etMR$>^q}=e>Ds_zk zdtUCdWyP^P*G03;$RyXY08Gz#R{ggk7U8=^57ezxMyI9 z7lX9ZyR8Lh1d?odf4YXFYvB_8w<^;XwF+D0D!Qnh$Hf4BZ0nQqyW_C@Mz`x<@hcFE ziaUaoH-YFr{Vv`IVo~u(pkAw$`{Sw=5bF{Q&{z;G0rrkO9JLC$fzMK6EUSyjA zc_#L7^q@VL!O)KKUh|9}L7xmU%sKRiN3)K6XM)w$+RMe!EVpl~5HU;P7miP#xFPZ_ zNjFx~<)qh}vZnUv)itSh9WS1ULVyeuY=pjLGyAi^?``$f?>-*j_5{0}s=rr{(w8X@|s6mNb z-s?|&CUOPRdZ(YQ4LyNg$KQkzKgmSkl*L|D7#Ejh!#BG}`BoL*gZ( zVq2wUiB?{v*h|hXR$`6#k9Dyc2Z?{z1rbsQ zqF*B1X^(KO#Lbc{a93n>Nh*->>GW2jh$shsf8w)!?|F#}cyD%3y*e_!{+^6UkufFd ztRN_rwU-l)sf9YhRXV{>1j+UWI!seS;`fqZnbG*VJnt|Xe<#nc;m%8*5ugg|Tf{MND_H`bGd^kG3>Nh4N~`7g}){c=4@5c`C%37wd^m zwT)?A zO|FP*uPiv%+(8a)B4R>EREDcmk{sDQtEWjP?9k~X(8=gPm6%LL9QLP&7N-UQj3otL z4o`BLC**59j~x$bLpbN5{9XII2Q;D!k(NUsiNEl=xuUPV70UZJ2qp0{!Djag zZ8ZLwU$gsmW7Bgw{L98SC$T~>yYC2p-R%Bq_%gHmYew`@;*RTHs&~K!2v_(Esonrx z$%WQ6eK4yZv!xc5!q2lUb+YA!eao$=kmho(1ag^QGtjJl#3cSh5Ud$au~P*+(`D&^ z6I#{GX#6RlT7#`>jtRYq*ZGKys6X}P$n6d}@$E|16lv00k5LI>2Ms&qcMCD=ueUT^ zHni80uu47m9s@)(ZP=}Fa29-irwd|Rg;K49$)QfclD+V_^a-Xbnj~sgPb|8Lm(-xN zHBV+7*}4jc_$q8&OkC%vM9X^xJ)zd^rVF@TxkDI(xqDp0yY-YF&T|G$BsWzeCv@v1 z`CKPCyVo5u%D)^L2N4fQv{n*b)|V*o*~o}gpU_A~ztBi!LXM%GrbxHN;@b&LPBhgj zqj4K-0rGB)NuSHyu#JozdfL8}v}g1=EE0OTgOtrWfoDN`^fihqXo|khqeD;X(1I&b zN*X$i3`X=uDY?UQyfrV3nHpIY9B*Z@WIR8sC+U&F^AJm*S=|394!a*>4fbk(qVoc8 zY#6i7gNxn~*mr)_b>i**#-#?s{ZPc)R(E7j;UQD^wX4oIfE>;>H3vSS`Oul9VRZ~^ zi)3A(fsSLsQqzmAnM$-}1HzVJZQ;-2M+$&3k&pVpsiz^()&lcnQS1>-ApZ|*UjrUh zarM8O-6RVn>>>#U2oM1k4MH@a#7!_8NB||cYa}5;09(u(S8D^_1!^G%H=)_wuG$Y< zZHtvsTl%S0`Op?f2u(n#@?jOl)}V;9Ueq86K_dJAoiq1tHpzzeecnIML-yX8bI+VP zGjrz5nVEACzqcmA!rW{aJWtyZEIggrTBH*OKIq|@w(vl`8~rO$eZ4j(ODPTW*j4#& zt2rxQn?9!!=?VGyh;?};ljYLEPHh4UtnL9U1(udLr*@i2i6_`EC9EvSzk`GfIKBu? zsPbUloUhyfJ2g7k0NGU6X=zyJ=r5Vtpv`g05Sf!9n-8!im1phD4OI2a4F{;j7+VrH zR9U%0NGc?j!0vP-)@-ophFJwsckl$nnZY3S43Ud``LPXQFXfZfcUf79H%lTBXU;GN z|Bk(Y2ia7pf`bP&-%y!x`n0hvAsi&8yWy+_tMFj`lXxj|7m1g*p;buA`X=OXqLLBb zLYWeOu=tGFMn&wPuiKg((5D$ny+9P+G;odFc`V*sh#^BWI@j)5aFG+8MSai&Zl57LT??9u)X$p9|rM<0-6cm`wRZIj1X-{Ayve+KM5W@Bg7D_oj2g53F2G%B( ziJRfihz@kn8pXG&Okj5cOzc*^4>W%j&l)%@-%qge9se?E=&XNV6Rm&4t}ShHhW;JD zodNU5*J4}shgbzfQ#M+s4E9Kh@;DaG;!c6j9bsP@?15O0Ab2^65mdhjgsoZg@+As>CTwf*mwI@YiEcM7gdJOr$??3+spCiAq zS3cc1;=MMB{=R;*89RB;i7vhSYes&Zk?nO+miBaS=3Wm%5Mfn3P1nICZ`t&zBFe_z+GCdNqqXPbeEGj4 z2g6DZP8{MoYbI`u*Gw!!1hR1$GUaaZ%}F%4>F$yp-_0ME|^`4F7B4 zCHT-^X@xyzF+$epZA69zEG6N0fKFqUYm-aG8tRsi!0}iyYGY_kpO0cwO2sRf|J>rh zKLK~{NcqSn-3S0*DMyH%SG^@oXb5Zj9xBs9m4R8AMoznPDlya^^rH}FM9}(t&NSTg zlMF3IH7Y6qUk*>tI1a4y@_BW3(%ELeGC60y%{Rm?ZZ!tXXfRzHo`c}lDdcJ#?J-%; zg2@1(N!NA>n!Ss(Is`WGwd)P>e5t79LU1%vZoa^OC(N}rGwf7~1u;#*)XMt_Qns>( ztR|)6ZY(blUklAvzWEW~<>K%!X!z35A~6Nz+iP=T4v`Q=LOuXtQkcoU6RD{OzwWL z&1=Q}y%{K#y3coTj+6|CBgt?$Ol^kDqWhk3Wd*~5F5z^+WzA4ve=30q`pq{@rg6J5 zQIWvA61}#u*w$dvhLHy%1g+%ae39aE)^_3a9CdM&M=o60*2l-nq{vUe&39jsYWK#u zo!E)Z6gRY^0i@9jW-|sCL6W4|VGai@9x)vL8Av>M6`u?=W)c{N zYUx;%K9uZRL7Qk_Me4DACk{^d68!D&`$Zz|hu>;lJwP4ELD~nl+#wF;fVr{@p!>my zRS|MwSm}sLa27Y5ARv*TSkq}tM0am-3w1Tf#@v0Zd-Se5*@fomHZ!TkbfO0{99_E$ z1k=&eg26TTEB&v*!6Ba8>c`_R`I`uA$)db1Y5bS`SU`))5O@Bp=gin1WVs6dhNi-- zhgH^l5QF(8zK7i6H`tkpZACiLd_h`~HuSI}T@^ix$qm}RRwN}uyzx{|A{KTN@!Vh- zddP%k1I3~ZQlT-iwf>uUL5;{GOJGYb{?bbRK7gw1sSfror*ar8KEB` zvGI0u?fDzwSz%UO?X)S_My#iD;h1U*t!4vTrs1#lt=jY1KHM(_$PGTL+Nmw7J-#1^Dj{iax7Qq+phvSh8tCV3E>#4HZ*x)R4 z?X>%qpGq1oa;3GTi$SXO#UQbub_cV?UPoSsYIiWJ0y}2o#W!57r+%zv1?sK%C4YpR zfZ6Bl4Mzi8$XnpF4TQv(hGr^J21%}lb7%O_Ehq!VbLqkze%Rd?<(-& zM(zdmet=C-$o3&ByBw4*3PnEB0_;f;J=b1$a1iwdoNf%lN-Ma_sM}S(ADGO+16#t3Xmh%x}k6{|_mkpA`jX`3#&W0q*CE0Lq zH|FmFa}QeILS7UbenJNnA@3XgI~D*x`p^={Y}t_aPi1@wrj;e!ATMp4VTWi_~mn@0xj|3dF>4ElxEuucbJU(?fU*aahc!s>vpRd+E9Z z7_|D>M6e2sI7wKT-LJ|y$pd>9Nr+af1r}o#yt84{Qpf=HTWmtZs0HmNvI)PUkRsXXtN(#)I3sUm@q)Nah(=98$uG;}E?7Ig!Z+ZBilWxSk2AE&#wSQ)&=wcv zUgs;QJETl+99(<8Lb<8v=7SGf8=Ez2Cv_S)SSo9tV^vTr@0;-FEY+V-OP z(1*>!5}X6e8;nxqsug-1d9{EtMEYfEzxA5GD8j);&6%3Hs-}a#` zNQg|c2VX*}%4(mwV;Yj|zdmPaK4H=B@S3~>!>kF0Ia@T`2FMQeEA6HT?(az4%8FKhG*&N~1YQ*ng z0a>lw5UH~nMmy$bZ3OPyzQH;_*FMnvu|=DBUwEK)b>xc@`GsBtM~_uZ`etp zHqiMY$A-#D!8M5o`r*P$_)zD4}E}t>~*xT*L$~b znw-6)c+k5Ttg(_yaJEdWqXQw1+8v~`!=E7hhgm8E)1?l4kr!XHq{6N(&Ak$`kW~IF zeQ|21F5ll0=Q_{|M<8!!1vXICB{(jg+|SFM%SX!z;flxyt@yyc)vn`p?)A-*166Vx zCpcwqXKcMM+m9!$jf%e(Y0^d?1%mR@W(X^y4v?zU!wf-N*ZrCyGQgi6W(Zyv1cJ1VY2&qLv@wF% zDy4p43o(&R63v00)_~%*RC_YkhaShySr3!MPyqsC9&GtPnj~_25wUkogouG4f=v=8 zP%Nh0dB~_c9$}VPh#!?ZW_fv$eQN@Gul&?^D!|F1 zrEKP)u8Ub8hKl`Us3?M=Vhb56Fw$rM8-@xN009a|I>9TyeB^ z2lFqmfmH)(`+?CSK^iS6Pd8f7Ba9Z){cmIhcBJFQA10$kn2Z+T-bRbtp~3VvT3m)S zIZ?2(|8WwgI4)K8BQ;2k)5B;n5g@fYxF$Xql{C;Feh_;A;qUrxkm|LX#f%pd+R3?C0e@5C)PGJJ$#_=tz$!ym?h9zK=vsES;F_&#>ikuAi+;CFP-h!*>5y$J}SZ5}i%|5r8- zDpqCbDhp`jWn@g7M-;JD-Ij^%;J2>7^fmoR%hrFic|>6z+>Lp0Z_^LiL!#L5E7~Ic zKiNE}T&%le*gT>rT0={E*)aQiTVco^VhkO&dFUAX+B|OT#*vA%qDM2)Z64HPFJ<$1 z5!=za&BKn5qir4}hoFc8JBvoLdC*j2mPUK2?;)lU>x&ym?u%lO2Ac;Y6plI2#HA15 zzUGgDZu7@m0ztSV&5DJ=(Pj@?(~;R@3Iw0M{uJKqJ9&v0)8z4n%F>w( zAFTk5u;|Z(g=bX=z-yumMs@3v)4&@rJnYB9`7ceHh85&3v4E{0SE61)mISbZ5PRF| zNiT}Ff{e#ItRR%^7M0kCf)#{b*b0LC{-Mz*N^N61$oGgIoLH)R0sv$OS!*P0CJ`m= zAPI&Yq#PpEu!FELMocwGW;qMU*(@}RMRm_MLwMZ9K3kErT6Nn%22UiS5GpFl2C_o8 zfq2OVG8;CKnbHQ5*V_g%;Xm6zWE;Q|GD<#O${zAK*+br;%J6Kct51(b*+VAk=80s? zj##Gi_CYVB$QD=$bP2)h=q!7^f^6sFww^A!z>J!kfnEl8Gz7nO4{HBzNbONC}H&y-ZyHX6R)I*gtm|g zgyTDfo~K8-H|7=}!s-BP2oF1Z-6l-mkq$n)+ZsYsdk$XpwPa6ANCq;xEg@t>>1zq$ zdAwVA{ud1}CCVH^G693^54@UXyPUnA-2D)fm;1xj3RaiQHn%9eiv^y&?gLu8hrjKc zq_5lU!2?K<3rV@3WB+xtrMcY_;I(Cdw|KNlX%nFxk}zA-gmN<-T%+Y@;0a3BW<<)W zkYyRMa3i`K`n7M!nN(izAxt8PHx4&~>C0jYk}NhN8QS%cuiNF<$~SbA%d-S+75~Cw zUycp=5dv?~ykeMOXeu(vkzV`bm%((hNXN8N+3%L?3vP%!XLo}O??Q=;nmna zsPi*q8 zLUit2ueda>ybRps<)K=nM<#N1JEK`zEpYAh5GF7aC^AKaoBWQ#vLZ4N8%xkDOqxr) z{T<>EFE`~U@Z%PwLC&UCEd#*@@zM&|)Upz-9!A?-ASo*mQ}NT<YS}Fik=c|H+h- z1W1!ITjdi4Y|6>??{FZ|>>D8pL6JO6XI!#*<9;d%mu%#Bu=dyhOP6eVS$pL6?vU2pXtGb_7rLzL=+^IEFqEQmCd z=MkHD4cw%&RJhT$cU6Rh7?-I#pVJRlF5#g0K(S^I;xQuoV z``p=0h)YJ7+={Hmm9g!|N8tG9_L~vKh1~vS{~c$TaPJhSXt1dL2o>BijsIrhFHUr_ zSIZDoB%A@gS~x);JPBNZAKjAxA13`1jJ~>kY9~MuR#NvK5R_g6aQBaI`diu>7mSZP zdf)=iegJ*LhO6#2SuNB#%Hy>XDrwa#v;JwK2f2h~$yYJ#QBzZ$2uQybk=8s?vADE^Xe zt9s+;EVFi-eP^>sm#pREX58S&4N%{|11y#wmu)}h-kVDUU{K>vK55x zwaf@JA=*f<88Q9{2=|4fV1j_5VeXB`X=snv91Ic+EpCV}`q$x9a)3qAnCx|n@By6V zac$kDM59_yO{to|KItrvwqLFR7!)DO2vSF~H0Nv~1_kP=bJ@S2ihTbLsz2sUY-x7m z1PxJ0TMN%5_dt9AI+cG1p~8*!H}Dd!C;VZ?8}b8Fu353>#`&(WZ_Cc!SLwe9RyV^P z7!wnRjbuQ}X4V0*Q9)^9#$UkXN+&bPUSCD!FjDNWF`dda-sA=D0OyNgJ%F7|2h~hc zgA;Z+T%P?Aa_$$p8u{wrMyz@Wrr}Qe6m@W5rhQsyFfy|DQ5Qg0M*rMJomGxPZV1|; zc@=fidsi`f7pa)Iu*^M44H6^#&k~c8aOCH&<34kd>bK>;uOB%HQJDkX^}CDHh>ocG z-9q)tM-EqreOCqhRD{OqY#4wRQ}g7?x}Xx~Ed*(KovSif*1^(h=J6u`q6U^$S%b|G zdn5zv(+7} zn@S(Y8o)}DWec^c^WP7@rX|TqSN43QERq9T)?N;1ZsURAfp!bZ2-2q>>OuqY}`_qGRoi`pk@$?K&9>1$LrbqD?JO;9~TlCSp z1+V4W_4>GAd7lq2uiOJ`r;XqX{CzrDA(rFOpFwVaG@KlxLNmD&EupuaX2)3EK-E{DINpUVmV{k{##vtc#*J5{Q;2^SXYC39h|+Z z$bWwLDhrNF+qVu3erFeq)%4n&fyu|}Y*yY~lG$wN5mAn(vNJGQAI+(-`6h`3lpgJN z>WAOpx<ofMeX5D)pez*7K;e-U5~(L>@7>DL~1lIa|QJsO5mv?LkQJ8$W<~t zW<`=sf5s$M-VKjnYye)`16ZcLg|r@!@WW{nH}-_&v_JNSv=U@ePe@LCq!(lr>c-T* z|3_+o2(?D?9lhWYvj&)7=m9L#CiI46C9k0;B&S_OT6Ddt5_OI(>;W7}e!Vw*nGT=X z6W&OEx;H$X#@orkxHM0qX!5yWxrj72TimE{)IVcF$Z2A0)y?$B-EmSpy;kf~ku_VpMb@ zJcee5wKPNd3Y&j-D>=C{klriERDD%<6;-1Aky5MTD8ahE+VUn4&9dgWy$XZF- zAfyZcyYU{Ez0XzDm{4|ei@omOND6u$-et1<_|U!*CM{bC@_Y6+{oej~nOC!FsPaB-5Ot*{eGrVy@YmVMXO{zex8{aM7i(%irEL2!4sWLf=CkdEs1z?=$og zG@oDGsVwq~!Ajw5<$9v;a=%Eh*AE6_a*Nf3>&!(1)+T9lb1KgGa@XC{72bszMS5!2 zZlc7i2dstLcy`^cU1fZ8*}CE+k}p~crC`rqv1aen5-AlgU>~?w|0?8FPDO{*5R7*6 zT1u;|yD{$_!XbN%T~dK0VCIlmnTw74{=bgfJ$Zm1dsP8+R^5WmH7jOZSdy@-2^HoB@7 z%%U?1h$qH^*ez!9LMMghF1U-yfIlVaB8J@a!%Ux;8LUNpfb+~W#?}+C01asP{N+a=+0*q=GMNc#bW&pxhI;aZ+|nivoF zC*^7~-OmeH2lGdKPj3!;^-6H%D+&rMv}^ z4AeWvI}qL=1+FX8=HKD=c&er>i^M&UwuU`{nd5EC0#^bFMMB6@7uWzjF?+aL4PR{+ z;m+XB@l*U?UBoo~#mY&+DJz4EUUaB~&|3cQT1~$FRUD&YhD~_UkzS@@r{ZM|UbN?z zJ%)8Rb)PgeWDEJvJm<1PTQ+ScI3*k9+2EHyd#|`6B@8F7RcRFLt=bW;P7%{F&C>kD zFsMc#ZaV~zsbxOU*wkyn%hSWl)56Oqh*TXAL3ihumX)ZVWH+Ig93sd4;e!j=f#fIZ zDYY4VX+vM;+6Z&4bm87$3TOZxoeI#T6XIrn+U>ZB3u@pLovVv%R(#|I*dO*N5=i98mYKd@ znfix(_Z5txI!5MA<3rBO?U89PI0ZMsZpE3NXyNi!nNg7t4O^Pjps_kIO0IxI-%(;j zFZka?;59F6h|tZQovSPuTyWXOd`Q!4PwnIr;DOG7Kw%2B(<#oDHDl2-M!O85_A3#) zTB%*gj2aIirVXYDQ^y+?O%K~QG|isPa!co43USYUx zp45W2vYZMBJ7jjysJaZg#w*AH`MM`(hnT_SkYO|Q82*TCyrcbOU7TwjE3Z?h!Nik~ zRW{d%TpK{_P+jJV1OWS|jrH9ly0BD$ugOKKsZ0W5{uksU1hCY21P?!cXsr?A04*!< z)nw6%Cz_ya+BmRitYlG|cn-`j9gK?f0P%0h>qx6d^RS8sLnn4X zLC>YT2SdId3z8X%MejNEkR*`E>s#6Q;>kOi#LzkLkS)~qx|4X^3x39p9V{ z{S&YLy=GjZm6vvL?fA&gFpX*PxD6Xf2(L_oSsQotcVQT~#T`L|c$tJP1o?bX8Cxno#xwkUmg-)7#oKttrOa}=l9`C&_-<}vzIFT~ zxsUh~;Q`K+0hdEHX{I<#=&4i*n=<%!&}ndfs;wxEv?z6Oc%GGSx(?QcvUk3A2cmZl zz%}F=D>skMeGjBRT(@BI7C)>(hnl(?k2Jq`iAV5GPVEte=@CDwq5eDb{oo9%ztg$` zeiN3UzG^4l%blGM&fZSP$?y=TwprSoZ7b|bWFj@6Q|t6p<@)9e zafoj6q}Z?)aGQt-T7^7@H;%1QD(X0cNy1fLxn07uK&jUXXC9wzA_{S99blL!8N()m z#%20=Z`uMxqrM8W-D0{!UQ7Cu4Muhmo&{(`w4$;)FQR0~L*p(wSxv}6?05jSh<^jp zm;r2J8`#4!8;+E7pn=luOW!Oo?W8{lBLbfh2077E|~4Sm@8rwd}QTbETA&+P5uF$Uant?bfvVvnxl! z?18)f&hskyW=O}4bYr@C=be36IVUTB_svM>5ft*zku%FL>=PXkI^BFW?HmUNiq6G2wcY#)F-|~UsLnf$c7HR1VVwUopAjDxWZGPhi^wQ>$~Vym%s-c z;r&%Rz6Ea^8?h~0zKOnS(M?}yqU7(RCzR$mU@JUz-`OM!oI^Ju>dA^v@7tGTDYh+k zHa}=ZF18PiVeBatAE4XF?T{jL0=0C}M#93IUaywqN_oUFT~nKD)0e z$+FwI_d#1u4$kT})8GQd1JU$YQs{BxE}*Ua7viyQT57{U$`5_Q2{_aIZ#<~;mQvu? z;-tX5rSSOWKX0?wZ>RL+S`@72)zROBmh;ZH*Nz9_YRw}5GWe=f^Ei7HWfxf92SLum zYd%mb;QUvuS(b+#PM8ncik6Yg(H7@DZoIFf_j!3gFy8CveR|%HjQ4tapOhCg-XEd& z(Rr9GCBF6aJ~;1(^bR?>jF#WwmCIntIb0s7Sq2v!sH7)+7#`uv14TQ; zw;Aa>>67qO`YTet1F1)Xs6CG89I=xyg{RVA(Ye~lOGO@SLSrw@tR#xJC?mlTUbG4# z@6OP~v=LO;;aU_1=iJ8u8z!CZcsUwT%mOV~DoBCS-P_#O-renFcXx}=K{ofd@Kmav zScam@Tf_-;Uw_lG7V$5BSjv0ZRN)OjASv5!f_6$_zAsvF;s@Q*e-Q`Z-ob-6Zi(}K z8N=0oA=S7xKMWoOQ49Zt1ULZX2fMxQ=lp;<&VRuQFB2CA$?%}!6j&xZd;<{fny|aT zlvA_FcXfDb1G<(_HxR3f7SZ~ab~#oGBHUgoTG4j+r8cFwslPoohFuVK7+LN=usvHZEt6;B5EFwme>_DuhcvQzX-JiDy zTr&Bldd*c=RlBh1;lR2U<{@V@Li;3BrZ!wWvVdx!?_{7I+ku!l){@z#tbhRsH;fTQ zXEy9E%DCCShjrcy>^{@1g@7;jmx?8L<1VKy*89ue?);ugxMexywgPo-WdHh1Sw zEEQuYpVGC_dS7hl&WB@^FxE8Oxyy}?1_c8iTiQ*&q3ZnO>Taz7EetL!gB(S7yI@u( z!CS9#jsHS21v0bOfF(m}Mu>G~h6-++;TUuqwFK#l;sqEpTD%!m`P6xq;)SgoaDY#} z=fa?e=VF_t#T!?i*{qDke;fWU%WU?|6xu5A5DLo?#O>g_D0hR2JhC>@z`|jdRn*Rd zzb01uTWxBBc9+`X7j`9=*qS*Tu9`kB?M>P8TbVNL&pWBNZ=^rkFdmZ|nUw=~62~^u zAKmdfuK9scSm0uefjN+oN5H0>!ggOG0 z>Km!v4te@!VAfKk`|{P>mj#Lsw9Q)fed0D!CP}#(D1J{(X3?5Ta@2yz7w@edt`@&X zBDQQ%VCE3Ga(+)cDqdEig#$?r!ZIejYFSBmetWq#9Grz8e~1T@Le2Lq<`0-mD6c1B zLlB-;rwSo?4g_W`%We{JFdaoEtWA5t2V*KCg_7Rkgp*nZqA~kyInHT}7PVL&c!w?F zRXH}GlIJjw)wWO@h${(X!K+t2G;CAVOBl)sk%JKfF7c;@JwH7(+xS!^_-6o*^El(_ zKtLz%DoqxtfLHT!Dd9-?ZMSH-Q?~{LEaD0NIE|A3h~#qZ)-o~TK|COYT^)W)tB8Dj zmzsp{VZaywWj36;0Kq!Ew4*=~w6N{>0=|`}qr`9VsKQAv!gxfcjCCh}4AP=cvrtre zUmAQvtW7eP1?m+j0f8+z#e}fRbVahHsuq8PJJ1Ya^$qM)rr=;1p0RLp^JWRU@18U) z@x4-D_&Xnx=1(jw_4U;j!YQUegaR^9kA1!{$X}9uwg|WFnx3TQfIfdgDJ}`q)+Al= z!e4A!@#;)|efEzKL9!%eGzC9uz<02}1c}ms8CM0b$YHva#4>TU4S5*8bhB=8_CFSi zg^;cLTHr6=Q-&h1#Ssx$JfK(TQITFO1FkkJYW$)G!uXWdc@Ls3LEzbU) zs{#Z+YZ%=)hELU5vhWE~_BAH_W#HLRT+76)S+mxK-^6zf32WZf{$fmQnzF z`bo?)*nw&d%}k z3ryI)$XI2A>%K`iUN$~=tZzW}zIr-7q@L&V?cYLwAcmQXinPN3O@xiTuF3c164=yQ z{EV0|-w4d5>eQ17;D0u`wx+-@p=Z!15ird>t5iIMMNv5d%$xc1PrTMyW#Yb{$W9f= zqrh4y|22uBL@Ov2bMS$BWCm>V<;wLq>m%|2E$>z;^Gb0rsPGBu4oRRb61m_9Gq{If z{bM;@JKf}aIm{G^9q~xNl87_;uQs*CE8qalmN-j>F^9;K9N6op3`LK;hkiWL6^TBAT8j^WQ4ASvJYLc3vXA@Jkbdo%o#w1-@+sY$=4;m8AlLUepPfK zTtgR#S6j42h|_@mjuzh|;Zze!P!Y<{Ubg}~#D>?ve6acg{W+W2n(szgVl;}uve5#121PeBO!>4qOXofP_jK|=C@I@D??nDz{|ap#hg_B9 zdQ86O4s5Ceh_)05`#^o_Y9hzimF?>cr=A40*-)TO2<1S(9sCZ91K`e3O$yIs7j|TK zz!aFc^LAj?2GyVTCE|E)yJJvCfJ025dn+)N3Rt`ZRQa!*f)l|eDaE}5+3YW#2Q{Ey zmmgRG8X3De!?5UAgkTYuq4+972aQJqb}~jsOe8MOFp#j%$?T)B2dDI~2ZLu7E#3Cu zks=$X*P76c9Ej*z&BAfBE6z6@i9>F_+v0N|DxH6~b>MN| zAr*69%i|-P4z~N7%q{13(?C(u8sq%C%|P=p!rkGc_s;F6#j*11N);J||8=Cc68^sf z{|DkWkstY{bkw@Qfmvr0aTSOlE2i61zG8yPdMKPO&w6cK@*$D zLDO1@NUX-tg;o(wP7J`fz_gsTNyC~O+qRui-_d2ZMCGIEm}wV5PSTTzJAk1 zKBJ~B5MDxv{YK@q>AS)Y#Q1p7=#09gNK4z(<%Y2yBnEt4a1>&i(82(60izlx~9^We0~6(!J$mT3C5KJu)Z9N zDr@7gjf!oVBw_zPH!_guja+dvGZId8jTO3K$0oxB)oULPjG z3*adh4X$?1@*)~r%WsS>(hAqAtAStdBHzLz>DtHPDa6-a<`y@ovU06P80Fd=y_d)` zr-8k_Yq|1aS<4Tx8*F&=JUO>}=|xHf?Xd+a-UN>V743nFBM5pI@VykM*q*aE#a=fI zGAmeQ#l)K7wr?xSs`gTLV*|!x6*as1y ztLD0C6`v^+f`y+q+930>ep-#ge)}{;y?(FNYR7p>+brk#np64t(-wq%N3n*1VzAO0 zPsP9#kH2Bxdx63x(%s|Gr~G>|sJJF%AmkN%^%C?hGJ=(@YhH&RPg%bH-O7{_aR3J$ zP-XfgNS_oQVeAB0=Em0y41&W1yr~;_DFYu@*R}^6Xl0yz$iMc0xiTr4Kn@4HUOO ze0A{qv#DO1MJ?VRE^L3rginFlf)1Es8Q!Z)ZqM(J%(M%uZhVAWZ_2kF-lLbnKE z`$=8DAJ4mL@h7v*E#BfJ()j6ZFYQllZNi(_fk!N*(I2Fn+`xKDRtuW~>-X}*o&cj( z3;z~ilxpGLz~-<#rILhm~+?&jZiQO>_dga^McQ|xif z&|VAfY$6s2~jJJJVfas`d>i8~nWluHMp?~rqvz9)Ntfetg!#nTaX(EKXU&^Xz5%*mR zNfjZpJ4_|# zjv*2t&h?yIHa-R6{^;qLnIVO%I8XkdM{Zmvb=LW;PqK%l#))q6AkdG(q)L z&Zv`EBKW42!1d6qvZ_6>cJbguM9$+-a-oIT0(k_YrA93++Ttk*eHL;irDiO*&kxZhfisVkDem@>?W-_ND1NXWT zz}P@xhB|cuIyIg87|#YM%pRBng4VxNn`4!cA=fYVy2p@EGsD}SeQ3eCbHKmirr^w5 zY^38ELnolfx*Bq|ApXS)EEeDJhWvY}GsEdiwAk0BKphFWmc*971_)1t#}F# zv{Mx-=cd7l)($%A&aH#PR<<5ZMS4-~FR_3R6rRKZ1YS~Jf$yA!8TPuPs38{1#VK?- zg4#_y4Nx*579t|O@}@dfFhjv@n}fXN8^ zPKr;TM!O@lDw!?M{d?ruarj)UN&u_05e6%PjI<}PBAiMUQo*k?WZFF=?mkUIv9JTR zP0A@uBP0DhPHGL_(~gA~#8>t|MD0nEU2=JG5~yr+eE#*%NrXs{Z`8{KpPbENYO4KkX3j!y`GZG%Z zbO@BjFWJ?3{0cnq_)S9(N#yaH);)eRX#7%?4DhFW{H7V>7wc0~P42pI2hQuQni4&B zk9>`0?lpGrBhK|0yXp~GlI{&!Z*hA~C893GEnG%LqC6VCCQN1{&k^v&Fhi=X5Tiuz`o5rGk{ZA^E>H)3+zv@SKM<37h4f^ z7TT-Zd%Xu{L0CVawXXgcC^~~hHn;Z%tiNR9H<369iAip6b|i70p6GCUr$iESkm%pz zCg2pecWNa43LQAj?Y%LQn1sYq?@f3IJeU5}30Y8v+Y7y3Fa0AV>Nv-{y_u2pf9Svy z+}??i#O-?GM7Q_4Na7|vG0W|p6iIxv8|@UgcXA|sr4F3#_GU#A@75EixxF(ZiF5VD zT(`F{l6X@$+I+XSD3U%#2cGHn&Wa=s)DvgBy|W{UUw$N+FxTy!6G?1GqRs?daJ@B> zzE=mn)9t-2lK7&YxWMh58%caxPh8~o7Do~T-DsD(z4IdJ%XQ#oZtv}p#6@~ymD@Wl zk~m9Gyx;B3i6q|8jkenDg>gi0$`LxS;`TZtiSc^kYPUBxlGt%vGGUF|n-@tuibTnT z^|k2aZZATC>X@4({Q5dhza^5sSx;}^^n9e3keN?BuBU&W(=oT|cvg28K~uQfTM)@Y z=yw_g>uG<^?VS-xrxVGPPB$go-U*TP>-2O85TuheLw54i?joMytTC9j^#1>$MD=S< zhh)&xKhV=RQ+k!fK`Muy)uv~WLgKC(fGoA5lj=w=jDL?zW*Cg80lMsCGAyXHu%#=| z>oA>TF(#hCEJfGtgZ!H$>iO3p*79$P@bPb&_!j-%C@T3iLoDXs@uHM}Cy06cJ5dzz zZ zDK!3FAU@>ZMdB#`E)|FQcbRyNf2%|b|K2bD%D>g(MgCR9ANY5**u=kU#Lw|dBy6n( zdqj{@w^A<>kMJXP9I=WYsjrCp`H}jDxSJoTJBU(#q#ht{<40GXpBGSw>w0=!+nadfby!k7Wf+UGn{?aW4Tq);Q7$hj zZ8Ya*t#Rb$Je)K!tHJD=HpKTSZXV=1eP`6>V70k%bVHijI?Vy=uiPDz9SEL&6P)o$ zn3_oYaT5tDj8ZH=~<+v%rZPa9)UZqXRIgUG&-LaM>#4sW1MGIF(fHwVr-f?8ONm z93CKTv(-0K%+AHo|L0Rok(vU3iA4PrH%mMB)HS#O&$d*;N9ML%M%Y?x4am3J%8X zKoRBs*TdSQ&>dmtysXhydp>K;WwqyX9!{*pz<0ioc2n(nCj$0$1Z&KV_J+ag zIojC#Li<2XCZ3S36-WMv(}CGmHJKPpOq-23I-NK%gv`P&bdWea_^IX9snVLDSlm$2 z{3I}tJq9mWC#Qv9qy!NZppKa-9>F7cfQxVP^TN&me?$fQiQU|T_O<*Udr%EdB0J;L zKs|M#WGYlGayaAD^^^-Ab4rRcK0{9l$&@r_{CGX(ZJCnpjGv&V{9UGGIO8YkDKB!$ z%JI&rS)5WTHd0D=S9I_m3XI%1XNNUGVC|WlnlxW(PE9&4pde{&=U=%m-iWm^_r>4P z5uzh78ow_)qK`7H;#MSc-)r*?C=t0-h}I5+cuktUj;{FtCSQMD|u5l%2=Xhj=710*vnfe;9Z=Y8YZ8Ogu)0soD2ClXdf0GMmR{ zlX>jX>F|-0i=H2`j`5y3R$Tg3k&z-TtH-{?v*V~mXkRB+C! zqwVR>8V5HFEGUAF*n)jQZ2mbK(!{~ZKr^-+V&{~$MW1CN+kN|SbF5T9$LDstvD+V< zV`_}Ue%t`;+1dYK9j~1NV_{`NI=1f`6F95YY~N%Z|KMi`m9QtnI9;3Fgev4?OKVM0 zX&FS>bZjtKTHrPUPPYne2QLP5JHj{NZc8{zoNU6F#5n{X4p}&GNF

4O*Kp59>M2 z5?pL{RwUKz#7U%mum=6Od7;`|(XwuyjZP52SWb(|qVzyfnz5)#>Ry1Cs;96ASOUSU z%&U6o5M3M1gw91pDS1`TbHWHc!I$Q>kvkagpztcJ8{d>`^T|QqnNqRm7(QZq!D4ay zd$eMdu)LuV1}Rb#6S-CaxG74kJQyr{qmnP9(mxhMb%XSf&*+*%x zP6nZ5iAEdeEOsaZlS!?G12An?&f+A+2|`C3G)?lFwR8aarcl;o9oLtC)2lohUc6ox z8WBpA+6ggmQ$CRh6}%`# ziH8|cOH_x6e_f!x%&PY{5MDUhA-teT24x=fJ8DJ6x;e4Cm}T8KaWJ9C(T(bd64hUR z8HFl}L>6!uRN_};?ZR2)4k=%7_k=q(nl*%o^rqm9{9uhWFq%R#D5(@m+EL~mR@HtG zJJ~zV)6|v(Pj09*vP&p?9^b=CqQ^u5Yh+l97bW0@zs6>gCuU2%`Bg{W!1rtA_xSP~ z>Mi7)ul5%DXeWK-7NhZzHit8^C?m5>ti(q8rTIUcNYRG2p)F7YW7%){l^eU2)r zulcpwbMK7YH|{m{qIdvA&pGo#QQaJ3}+Mr&XVBSr>~**cgXIKCfT5KL4_X>g5z0 z-4wI=uB_UQxy74bBDUSf%*5AYjRX3Y5tNjo4MBbti6SZv$Dfsn-h8x+yNX19cx5`L zrQxWP0~I41u5hX(dB`Cg9eCi=OZY6AXXalv2j>n5<4i5Q<6y%TNBMiDSE>n>PO~!A zzI6yrux9R7hGZYYjQ-2r!8G zLY;%mIQv#lT&zqg*Mf2_vCG9-U9~f`7;}+ocE%~I?OWmRCoZ!?8H(FgXYIdi3eGf9 zNRWDR$Zy}e991YV$H+d7gVV?c83nPvep6N=!Y|a9kL5eK~GK%DeY7YRi z0Wq@wC9@I2wP0Y3Y;y95uLmJ}&?QLuDZo@$lCvPmASEdVvgHz4GLR+5$i72nBPeXa z;27E2GTV-9`z1(8Vp1H=0*670BL=b)xYdYyVJH*~iILqVvoAyTWihfh%j}`Z9(oB< z)&Wd)r8o;x3{p~JAX_Mrr2<)MjO<*QJq+2yVq}k!*$9?aaQP)jIZMOGmF6r+Ge}8` zf$RXFz?I=ZHatc)`I$ua2xO0lk^MuNJrda?FG0$EOiH@5Al)D(JqEHuiEI>*jf#iugYu<+1e#Y*+j~x>!h>bq(REb7|7O2 zWS;`rr!le@%k0mP{aK9cTV(d<$o~8iq@*z^9nOLdgOrXK$i9H?fNGxtvQsg#56kQ? zko`rB?3ZNrX=IvCLcDL5Y^o3uq_BUWRvb*_IkRKtGYC2foAjt8rr={ zIRGe-auTG(8q0r?+5L^>AIfZFEcZvseN4*d&VtVkQfO%RCZ$j!gQ`~$Yb;+Sv-=y% zi89+5%l(nkj>V$tI_)etZID7kyEiGX018|=15#p*<)6#!{>JhenQe^a{z#e6q?~gW zoHIzFq1~I5$r4#7NQpI;9WuMWv3v^4a8$$?%l(l;K}1#81!utpgA^Lty-E4CMAijT zXgv4Mu9w-ynC_ijDYK1n-5)8pFqkpMaVQt>J;r%x6jD^z2hM^I3}QcsK@E9GN6kM3 zu{8X9Q*%US8zP{0_RBKc5Ci=YOKWr0b;4P2g2y{V!}3@&{e2RfG1K=hY_81iPaIq) zvkh_3A1w(?%O}o)PYkxin(5oIO$sWFnZ7qE%`&?`aqugdZHR;ZNck?nRM%(Dg3k<6 zV$JmPB{E~C?@h{$GP^%W`ZH)i^NckDSRM)4@f=>-n zVrlw{L}q9Qy-Arbv-=(YGTRvc{gGm4QcgJwP8p=c()2^P7LIOYXa~JXc~xfjJN{+1 zG5-4_r4C@K>q}?Bmj)@ZH2uXAnV}u@Cgm2H-S7C9*~a+qkCgLgh?Mirg7XF`u{8a| zfC5(x?VvX)FUjnF$G^-r#(#gLJRrdgIncY1+hw+{7TD`d7}Ze=g%E}B!`|l|e{`8r zpLdz!UvimFY;&33`Gd>U_?*l1(2Fk9LcHIB_wb)xrq5q+nGS4qnc9BuGAVdphWA3e z=i)u|Cw%u8m+6%)F4JFr=Q1tE`#ii)#`{%xKZG)Nql{mpjGt|Gnac2f6W-JC&SrKx zkK~{@bg(3U1Xm!4KKBp#^Q8QlBY$2_!7{8EzJYBe-i*6KE6Dtj<-A^5W8a!(-#R?= zw33>Al-5Em!9^p(>S;XI-^=!kYM3(1%`pLHO8^)%%j04KTrB}$&Mdb8U`UT5=+rzF z6GyaETb;|TN{xMMwH3hDnAqN7gFh>7hQi4Eq> z@*yz+Y!Uzl&hpD*0({IihH98N%ZK($4_zr%t5clIQ3HqP?Ym;kFK z0IZzl!(sxINdVY6%P;Sjo*S5+H0Sa(rY8-+X)&>pw<$1hH~?Y~K*4s6>Jb1K5fek3 z1Q-c`k^R#1D*&n0>CWZp$_f;h4&d}Yz&N+K7MY`vIVx7>Qe=)s=IB_NGmtq3nPd87 z$}#R6YuRmiHS;UyS9bq;tO+b_mN(e8`7ZiDY@3^3hcs&qy^c54ecb6et}M52#l+}2 zp7{mu5J4`<9ilHl=RZ*zRzaSRVr6beCUhRpiCCFGMv%MU!B!pT`D|YAk>}p_l;INdV|Po=;)|BuM~hJsz!Ja_HEZ>OSf8oHWQe z857wqwlq{@5#;$aCcx7Y0D6z-vzP#DB>*%Z&*%M;vw+FzaC$lnaynun%aX{j5b~Ug z31F81#NRJs0({0cglcF%p40u3!yY``XPllhG|)WIg{sfQ#P*EDhSiYg%a{NUO8_W9 zp0hCl7E1uAK%R5`(vt%q)!pgzbQ><7 zbD>{yeosaTcbC)CMLVX%>n_0Bqxy3FA>WN)8uTE~*L|}fRjMx{>mss-_V`M%!Z#d5 zMLtxuIhQhfE)10<)0qCy>G{y$$%jDKcR+p37KLi8eR$CK1$a*a7z4jAz+WXm?7?r0 z@1Fojb${&id~A?IvY;=r2P87AfBF#s#S)-@0WeVl7y^Lyxw}P(K~5r*qd7gAK@Q1+ zzT~{mmW67pfjqGVz)lI!zW~@E0So~Uo1E1EQr(|BJ)axokSyp+j$0zbI>-}S08Ewu z{R@D>62K4uvB?RMb;5nx={aqXL$aVRIn8WgsK)sB^fUe?z$M1N1Te;bY;x{ma?UwD z=UCi87F3fg=u6M_5*rpop4bAwAp!aq0H3oRAtC@`)AK5TRQCm^=Yqi=k_COq`ISV5 zMG*^tz5weafFTC@0xXpPhA@atj+24tMt(j-0#;2BKRvCapK)*h!0GwGAozor1fOE# zLpAKAp4h_TfCT7YSUe{I3}F$QV6u>??h{VW34wnbY%`K~8MN zVWvc8C=Pwe87Bey7Z!06zz`O($@v!Ha^^8PXPut2EDInws$(k-S28wF|H8s50U#`@V+)H9*&5-2u&5?&AvQfb0i?RW za(ccp*b`fE_z5GcHWY`x?7_WkjNt12g~go`zz`O($(anGO4SVuss|R-Gu-|01brg& z3wzyLkONWEVf`U{{ZQBdLOWSL9CLb(F;ih=tv(iWL>@TJM8eGKi7i2%lK}lokVhqe zAwgo-;$8r$?vI?Fj|`^LlA|xrToRd~dGrOykO2KlkT5*aptgntiA~PC08-r{rzd2P z6MO0Lf<$I$9(~FAfduGZf>cWYLxRL6=QbwiQ>W)sgPhn)k8u*2p?UNrCr$$NFF}r- zV(u6cBsMvJ1CZ)I<@B85&I4&u9eeHZBZ?W}w`pl@k z{!_MtxR75;CqMfFz<&o!EiPCpOX>7yUjTB2003Aj=ivmXbWPq|0Kicx(KI3TnH33TkXqGe9A%jEpK%QS1B z%T)CbmuY{K%k;-RF4OkCuqp3%neyPlX{o3$n3jQy^|8WQ54>qx}$lNf? z+*lL`ybZJB5RuN`#@P2quwySE0?SZle`}n-EskjTO$6$334{6*g>7caZHVLo$5E8I z7yp~^e>?s^kN=zT{~7#$693hij~)-qeC#-W1%3s7PvZ9^emn5nf!{OuJ%is){C48E z3%^}KYYrXcOxW!efrrhlv{g!W$}N;-20rl_d*nEqD_bCZmZAY{GW3xG(I>kW0)le!eE(*Xj68!*3dXQ}CODUk82>sP54u z{3hYohF=?gt@yR}Y#Yg=$ARxj{NIfKefj@M6J#G`3s41#gu|n zxLr?$r$6iX6iVMjXu@x~#h+!Oi4(i@#HVE9aoj~H+hiIdA?uJ>zR89++#N`%+E1B; z;eJlHB7GzM!QIE(b4C%EGRrMqMP_&^y}_UR4uXU)Ph9Ssi2EHA*wf7LU~#9%i}1H6 z5nSHQcNY@QB21-kz`A1GXUH&VIU9*C;LUQ2hX@7AzcPFPcg*pSzLDrd=1{k&&@*lD z3c>l|sZ>o-sHell4SBi2jR*#SMThP}=x&4{-*JD>`TCmC2su%MxDys%%EmJZaPTvM ze*($;GE&UKk+|qsc84Lza4iEKV2oSOXzxu2;A)_vWh4CChABE8nRLk`!v}}xBqnwXDFX6X@p>=FyJW%J!QqEPZ=*Mq?xaU}fm}ICZ0rSm zodgd39B}ZqL++A(`5QP4!e#l{4!9tj3!?4YOqKpl*9t^}?Yv9Ic;1HSM2Wo;{e`x; zS+l~3%f1bl1}ot^GrrQfSJ`RbcEG-^Idiw>3RmM!W(T+ppHSeqjc=>K8Q;h3sWk&m zl|~8^2$W&Kq60tUzJ%T1bTK;eGP?qn_mkN7I>%mKOFlB+NA%^F&s^*gQmhbH-O(D% z6zyKzgmu2Hyt@t|)W~1bLHjmKugch|hHj;=L(Q2jHAF4nEjRFgnTqHE#KiMIHwB7_>(ZC$XbWa0S#h zy?Yuy^XF3}C-{ne8&``flAQ0a8ZCCl`dRw#nuC=uD0?8AG%!(N6Wah8i>>0 zRA04kOQ;N(U9O^{N?eV$+~)fXu20|h9Y(fL2$6od_(f*52eHVcH(vI&swR|Sh`H4~ z@-kOa;tj*2cld2~Jf^t3acKD8S$Q*l|G26>A$IHd-kntWTsOr7`(Mv?zYBGlv`7t0quks(xu zqDR9QJ{*aWZ*uq~-se0;{bg}WWzJyfQrsq8irai&z@>LdzIvprM9jI7$Sy}01h*YVawR&xmRRY0RT+|>Z{KFCB+v4dJ)%_kPdacnj0M3@ zeg#2L?0|FR*L*{xMH?MGLB6EoYH~1&o)cza}qnrvLkmos{1+HC4G933`Y+rS+ zKW{12e2ecke=C%H`2M3qE6#W~5?9>>&m8-py-=Vn5{K80r|0e{e@lQ_tvHTgO+%24 zj@}_VGi1}{~>gZZ5*p2P5|CH+YmdO+#;hhHv&z6C6L zxcrbPY2*en)BASpj=Lmq*QZAM6*lbZII_YUx;*OVpqw zuav}O>C=BuT5su1WLd`zCTMvHpCZ?hv|`jRQjHA1Q#@44qtg33tm5Cn)v#Fd5MNWa zoK3J+!~BA~7TEQ+mE-OyY7M3lC{)T5m$Y?=^9a7p@!r!DQ<8I>$eL-QDGfJi!%8+` zKqT~sff0>yE+vUqgM!>=a-w@x1%3PEF{nkC0L%2pzg=>XuTZx(gUIoA;NYQdSkP)~) zxsBZ?Qd-iA}tw=F?(?UWprAjur0UD$$mG7Jy< zdsPa9V4j$6K=&rJs~9H!9jAnAV4m0y7AR}!?Q+688Q*i@GcyS6XOQ}Y<;2Akzn(bf zby#S=3ENeO<&PNAL4G%{x|}HV4M)6Ub)hC4ahG~b9fE+~GlTp`UBs6fU*0z_+&pKv zz==o1Q{s6lXD$pNw?ukJ@NS0XxXdQV7N=U~C3Y!ywm}wFY4yfcGoaVAVU2;^DlNEp zxYgL)!8JBGqA(T1FG-_Gh^>`YM0WbX*NazQ+g>vmSFIbkS3 z#>ve(q{77(uNIF>B}f;AghDr|nTYn#!MNYz_|~VS49XB!?5zIIakulDYs;W_)@}*~ z_RYM>R@$ga%rF{bvw15tG)&t`ZN#+i5Lf_@H@!A-32&*@eOp!rWwOOAPwzYd;$Su#N8x zbk6qxQoi!2*i?1Twd#tSi}!ZI%&;GKq$M(FweTv%dxYX)EM??Pmkg^I{zk0omWZHI zjge*mDD`uivNU{EbTb*xp#~-+9YHb~E7|iiJj>Tz$FuAtmO#`h3V2J7xZxnYHEX-U zv*JjpMO%7nRusTNGXp+O^>DVN2SP>L*5N5C%!#$Qx(<;=rtHD#dHaP>lJ zc$MbG&#~@6<|F<$5(|{UOdX0^ypEU2DEFw`({dlneNTOC;gJxf8YC^OOEGUPL5?72O8~x0Zx0rRk_{p@ z_~Pm|(>|&RWFqEUINq=1cyj}JT7H3#Xr#}@m7bdYM{wzGy@~y|9HYn|QM))xCB7-& zft~ZEK+5+$mM{IYS=O-}>APc&^xv|A> zsbh>B#IQV#5(rXSGc+&%iq%C^Mk=m`)bDz0B4~S$~TSF zQz7VLjd49ZI}DWxf7^ov&-NgZ%?R9SCAjXUZU*4U5&e))2hgOlal0yZP{Gf6y*CqB z!(=2+kv;2-)U(tI%kzk1kkIkpp-yP*a@y$t$!eoU|C0hAEP@Zw?3~<&yC0CmY@{Y9 zm>IsHOALq-qe}oEtz#3G3bA&pnA}#*BG$2ZNHowHsnunWATk|!>w-DCChN;xFy@5(jSB3%&w+Cy#QnXFUa@>cy~{98gxoDap`6G(ZdP%nRLxIN zO_zJ()ihmR4 zi-C?=C!H}i+^u?88WA{Y?` zN&sY+i8R`@GVDa6I<}NQhFU43Ee{6}B!*xyVpgySs_C`H)5t2M-1r?{b;f!;0STOC zGU8y!kU(RMr=06lEmc}RK<<{7Dv&{%1C!gVDw;QMg1DL7MxHKjLI!uTJyLJu$<;kq zs*IIIT|iEqYTakn z-563Eqf`w2pwJB4*&JN}*m4)YEe#9dQ*3P7itJhI6cS;5DlXFigH3J`bSK7*Aq*#- zXtfSv$f|wr%E(H&Kf{&XpCQMUOET=+gaJU8&%qdqY@Tk4;77{Vt32ef8fFdHnrhtml*^W2c+V zOue3v<3{EO&Bj;oCnh0A%Eab3!?41jFy_f?ZQChO&fgs0EM0=CjbR_9=2C_2)gG++edGAzPpt9y)0#rDdA+xO_cHw0$9eF zWFC-NcKfYDw$vy5j3ZM&MRFg*IXB{H6x?jMjd1k{lO_2TX?{e%SH!5t7ih}dFv_KO z@B!;u-1s71JwB0FjXG&{8Fg|}>763pqx24W9EQi?NYXvpjCjVw8slNSzswj{&Bu?= z=7){X=N}(0@WJDk@%_fn;`@xdz&}!YC!6;#y_3&tO794~XX%|~Jg)qlCCo*l03`B9 zB0nVZMj|y5*-Js%K9n|q(srY?zLXXpd}I;^gl6Z;Oz{Y8s=IfTgB;Zilk=EwR*I-ooj=z z!p^wS4$3H-*?bT%XJXUCWV}Y-`_{5&6MGW=FzuuD&BUg%bPel*!F&lG@S$Y`qR3Iw zb{L6d0!tZUrO0m)i)Ari`j%mwA1;c;Pl%=zZ z(!?@tD4-{s&GMXf5`4qhRMeE|P$ftzjTFUTriqJ}B)^o#uE)O$KNG_mrhR4jXWF-w zJqbaTuOsY6%wHoGIq>Ih70f3Fmld;lZQ8yU5j1P3BDkV@sCN9UVofI|kan9yhS54O zf}<{i5sZ>C9Sm^%$Ii*B5Rv(nh6V}2 zVXW0c_pAs|*JfkLO~`T*B(;&nHEp04TK(k;%ZqIo^AqNG$a(i#2(Q@MhWumA)172; z%CwH!r4%p-0^qfD45c(INMrmNAH+m3a0zD=6%k?7yW$^mX(A*4!OxCxCbnIT8;DpU zcg>4F8qjV(C>eXp71EU5^u=z-2Exc1<4r87N*`#9zv7SOJVFg-8V?LkDPDHil?O_O z<|gL95l_oz#8-IYIO2)N7$h|x3?jcfBYi4Zb`)Q2S>^avSqky(v4Q+8i|{GaAf*@) z%PdDMXUi0PgL%~$4gE)w(I?^wT|o1)GVx5f7w(`Sr-iA z{Z*urH0B%F1yDxW@(aFAkJOgAhN2yVW!}KF)^$AoS&kx1CLcOhoYi(Kf=X{cC=yr? ze}$w3;g!N?cJ-&`Ac#T~mgAKqdey)ErXGEKIilddakOas~23o1$xgf`k72HiDEZvczJZWv)X@GYN07Fg|B7q;?=z=zDoTJe{K=5&2qlNyCVb+ulL17)oILF2EAqjgF)EpxD z<02uFoV#%p9fn8cdrCZ-}j6y0{EypM*eTy>1*jAI7M^r4uCgU=ED45l@y%c|#_=ai$!3||- zHd0(0V?*uIn{4b(lk|qItt7pb`Qb+ zMUK1<(Mgc|apBqydF|w^I@B#%$?QSA`j{iHl@I2{N81qy_oj8(U*<8#kw@r*c?e_L zd)M9ch~-6sI0okOYLqx#+In4CYe1RfOooM-m9P_)qfW{|6~XroKTC4E@Vft{+>T}r zXU82Vm9yR_x1$Jl3}Khs5lk|<9R=DL7tm2|NAp*f+cn`<3cK8nmNHS1+pWd-U&!q! znu$$&P1$AC2eLaaCcD#=(e0A=%kM1gVj?!WhE@L5oI2NPi$rqyeJA6gLp_Focm2 zLpG4_CMHN^1S{K6B|+l0-w3oINTdkjzV<+~nevl%BV2~BhX@iYDaLbh4hAN!fOB#x#c_EBoPAdyP$C`e>A z@j)~Q66yOvK_ZLs&3{>I4NS1W1c|huN#DW5gyAFlgjN${4se)&$=E@_8{fq2Bl*JH z2u9w%8yV*p+ZM<7jpKXA@d0tXZyfIx z$7|x^G{(n~hsn=St+u+-OBG|N^b2RgO%qGL>KhxAk;)L_T;e{;X89Ot9wPD)S#LSS zl#`c|nDUibf8c-u+}ChDZ$xsp;ar;|xovRY{~5{U_+@EHV`?EbM@Y5><*aT}{2{w? z7H$523Rr@=SdBwJtCuv?5rL`jNW1nhL6o&#JOtG&^LVzkk9$wAK3a9vO@shuim%FW z#fh&?kUoHha;hl}nu0CSZYn2!1nDvLBv0HjaEM(gfJ@hyf(%rv==$YG+*r>)A|0&* z9~<9jQcGL#CF_nE;u~%ED_dR0j!X?@dY&?sH94kF#{~r>hlL^{^x{&Ip};G>uK_F^ z$E75=MoXIQ$WyEn&suRsCNxIdppn^%YWYK3hg2Brgl=-pfR{WHq$x;~YCSI2R<*i{ zDkCX9$SpQUhp0-mcX88_m;|XYuH*5~ilAP;dk_8et<8k{?S%%Vy91@)YvHTqq%(NS z?GkInGnRVlcMw~Se(xvMA%-bb`rHvwR$mPJTJHWsa`y*z?tU+H{|M@SVW9MaoI&n= z8MN+Z85oY>RGanL1nHhH4N^&)=+c6wIPth*wuA1`X4+Xg=x5ShG9@)wdr5sAxk&?A z{)YHVHbZ=aV`#uF{KxAd6- za0~rLMeDE%!$}&sjrY%_*O4{G%3J!4611Q0pl$|2fLS)2^5FeK`wCqF%TRW69d#JD ze?Y?<+Q@7IHAw>m%vxO&6#0Zl(>tLJU^vBPs`Iofl25{wr>nT&oyjJ0vE;c(+m>|oT9jUxLeKe1h{JY1Nal%cVP|U ztOd!?g2Xu?bG&*w5YS#3FD^1npv2C?1sM$)T9*mOO)NvGTD!9A!JN!GHMCT_S0V{g zk{Y7u!*ai6 zhbh;JxcD}{5ra%%0T~M&wn>l<6D;ngE`Xw^aIqs2ft(9Sq(QoO3s8xi7DI}!{uphwYs}pByz+>Jq0RrQfrROjOnzRCEE&E zg{9A+iEB6*MBtAbAw^((DAx`KSP9nW8E(bAZm9 z4OE*of|MMg#QG~Bk+fF}{HYDlJ;h`IWJ9l&P7&-M3Y>5-5NfP0{d8@L2CJ@_Iutp# zX?DhY52&*1va+hMdSFBIa7}wHdbHO9qZMgSWp8G%ZA^6AB#u;P$Bf2@QiU9+EWf(| zxp8tGn3gOx-<5juptn+Lnq@WqGr%%j3xU3Ui_WhUI`mDZw1)Dudv&^cZw$LB?KJOU zN;_Apv-v^gMR$%SU3r?#tZPV+o)`fod%%y6Ok`c#$|e;M+%A$Y84n3vuo)ZS&Iztz zx&~fV8gDDb4TlXbLu$T8E|tVnm-3Ti`QfqrKxyrM%+vsbuT9Fw7fw^zm9N`iv>uy? z77lDZE_t^;(o&`7)wIovoPVp+wIZ7c&|xK@ocweAQgM^OlW>FK`oQ@aqzK`?u##46 z0pmt^Yu`Fu6R}R#?)acZ+UkuDY8KIQO`yKMM#kKL>fIb96ya3|2qspC^C4J2hC;IR z=mgT{(v-c96cae)q%lIv9;C!Ime~{dAjyVxAU;?*eI`)NdCde4(l35~c(uj|(t;9D zp(w9T*IbG(V;%HVeRhG;xOrkgd_dXZW$p-}IAH&~u8D8PmugKsn(M?F;+s^mjKsyG zq*}$@`&abnJ$cbckY3*lBq?R#DRLGby=fB~ZQWl-&2UyN`v01N*pw_yrz}{qSW0$7 z*eV6UtB%9=6leXw(CRF>0Ujf)9)?yIfj|>SQ5$hT+&~vh3OBKy^nh=>ivH_}1WScJ zaB2m>_pnp>6lc`~;RrTII|r3&OUvW$mg?`umd9gXvJ^$NRF!JVz5ryPhn)H6a-2cX zr=Xa(h7f-6Ht8e~?T<(=gFjglYCr-$9-;b4kfC5b!<}=4S!`p0Go@&(4M+l%(yO$j zt(V{r1PVb0qQ5+dg#b8faMEwAG*QkR|F^a1Y^Xp8DsX^i;%+VifL7uM1#ex$fI7j| zBi_&I;-Le=^czqYUd2aGAn@WJpTI@n)gS+y<$rk5KNS`RgsC{84{*DawI|JX5=AgL z@PMDQ)dlF|rVbRbJEqBY05B>MfeWxT#!dJGB4#(*SvjBz*>Mq?vYmL*XDMF{#b#p} zi*pUT+%GK0Cq`UsDzZU%3wMvL~v6 zI3gG_XJHd?^fllFV`fwN(gA%yCAPgvZ+}O)>;F z29d0(W8@&~o9G^@x`L8W?ICpu=I9Bm(ci?fWq~!i6zN@B!%D8Lz6c;EURgQDMALzQ z+$pF+Ju?1Y>6O(X>c5IXCldUsEPj<&+)IC4?Rlger1V{(A7+3W zVu_4W#N!N@Rx+fr)G>(_w337Sy@jSVzYwN@@(M0vB$S2P3#0A$Vm9JRCj^uzZt|aR1XPn0)UjBXpvyW80vomEm^#a}ab-T`*YS0i@p^(q~nJ^&`#G%eVcr_*i-M$*ZSF=X7GuvoUf><4 zND+_7r+pYM?8xYMqxp6)03?Vjm?981x;BNn0cA1>6Or zUKBN`aS%wLb2{b@k`J~5j7Xwb6&NlpyDydMuTo7#s$_^Dz^8iPaJ-G!lf#-FCt9MN z`>0Iut$a2aHwe`1(6mH5!6&U%WAs8A#L5(JSgz8*B&jR9jyol|BG5X!>I&s`GSlmb zoL43em&Uhdh8Or1dgIx(*e%ngz~4F+-#rRL@C+y(44+=sAkEICRaKY954^^09^js;2cW< zP0EN`e|dm)Cj5$(!vRDcElHe1zXBn#gWUhg00t^)4x85ra-MpN-~f<=LwdApZ0Gei z;F}<4h{SK}3_08<3gomoxPJyY)+Hq409i9cio7q$e?I<;Bw?WMFczF0WDaR|RJBBh z_EGVXG#ZaWv@z@lyi>_ayEpi@_H<7#j~{;<_g9;?bsuR{vNATjtO&5rt*mgsTF zQcGEqdS=gx9`g9!AP@uZmg!X;gQ&zl`;p0t*V&-V$9%Aw>|@ZzuT}cSYItA_*070; zF~9*&oVicR;c~dFvH+N00gn@lJnlM=P$;#jXES9 zo_7xiM6>+Fgh4aJgrFIcKOz_os?!ifjnjU9CTB8;&(W!Vc=;tIXWAhM3@#73DgNEWLUN|2#bm=hj=%u3fHq(?KZ&R$dVdXZE zPd&4oP$w~Zn_7xvsqrxnAJ&~z?FV_IDOAd@TyiX!+BDyYt9;|Bf&&S0+_MzQ&2EK zE-h8g8lw%201m72PgX<(Iak)Vmh0}}aAa(bDwPk66@10&oAuCOf5h#<-n-l-DiWP*d4&RXO|jvJih7zOok~MJ1!TbXsX&h-{N?He3|&QKz8{8REi4bCr?me@S`-L)=0qB~f% z@+oev*5L58X7G6wurbFe#|B#{7^8}-42d&u72-#sK4CgP&EF;y66!9x%dL-~Hac5f zE1Xbwv1SOkbPNF6s4-FtsSgj~{Y-@ySar83%M17eIk9HPVOY*-RjuxZ!V7(2xXvr0 zbaxgeC&x<_-84InX!e`vFHUs$NQTb|s7S(VT@eXRTIF)i8sPw#peV2v<*#9xCfs~P4ec=rS;D;-{-d$8B0F^YtxbWIk6{1$t{N=l7pU6l(7 zQ%qBOgjj0<6~2IqOvC>S7GP_T3anmX=HsY`>W+lq8uR9n?NW) z)R=i+Rf`*ekgu~T>^bS?I#jJvOUq>(iCDmfVoFFvLf8HAN|zHc11sF z03tvn?%{|ha{=q#k17`o&G8J-RTqMS1|zZ`*31i{6JLfxy25^MKs; z;3ZSL)~yNBJeV<}4IJ8@3&r+ak9O*L6?7gz13`>r?I9UiDz@1CTR72i)tlVHv*^XeQ=X%mJpRoXv|RQur3L(k?!T!hK*YDYm$-5cx-tSRIfE5{# z_xDRG=(3WKD~Va z-t&jukVLT$Wl5I2Mzu`FBpjNYoFd!!QJW@E4C@o}9HmVx&m8AjA23f>mYSqJWi+?w zRQ?Wn*9il_Eel>GMCHT?psPSK9>kc&Q{o};Lf}GR1kCbJ#9pOS4BH@|!Z35Rhk61e zYg};;>1{BhVjCm@-_^MvI|dwAr?&ROaqcvL30P^5I_~@L;R}t$RvZ+uA>wC{%+etq z;!epzkv2IwOb8HE$;nnXxo>i8XqXVF?6ozeNJV+b4Mv)}cJ8~==nu308d1-**(QC2 zAy>K&n}kZ2-$+yEqbLJuCnT zNM_d|0hSh}geM;sfCP_r_IJv1#~P`mffTu8W{xl;57P$yVGSL|R))vblc|hGbElgPe0{{HePKKP&(Vx_0(gIePCFa6yM|l>?CA zN@styBLShV4hiG{B-q-?p8*wiKbxfVBw7WV0WE{qEK_|5Tr2d;X&>(8axw-*0SZ@n zxJCggc|)Bx-5hMDT6!B}=h!ARTnh!>CvZCrDT8P_S?anAh5c{KS{+Q^n8#y$0bRXxU-d0K-0Je1Nl+ zDBdBU65S`m51C<84CM;J@D2eA(fQLZvvo{`I}Vfh;4le!5i?`$Z;JO1i}$Au23v^W z&mySC&k{T*BQInm@3Fl(Lu^eUV%n5+N%|M$5m12=-=P|r;v2A|VM@9N0vyr>%;l&} zDqgMG|GVM05Np6@f^>d*Mb*&s@sDWsSHZHSq%{{D4j253!hsV|d3AXFwZiGeq|Xqp zlL{w8A|3{Tq&T)-|nEw~xgqr}eFz6EyrfcK7NpEa~c^_CR zmCX3S8e>w!{*hya@orzC<)hqLD(oSl*0s9Z;9Y2SLeG<+*Xl_MB2r!==Bd0?-G(s% zM@FODGoslVA7Ij*qZ6+tUA@Rbv+fesXe|4t_+zG$22;stusxcxc>qmu%sC=iz%M7* z&WL?vr5%1Q*as(J7TY31FP=dQo?%`(QGf{lG!k+Kum))LIVqRaESged!f&p6cFnye z5@aNrqsJ=i`xDY$$eqN4AT~qmU;`XeZdeMi%Oxb0CDy0JCL95P#K%ILN(s^^AU7xg zc3Aj{a>izNV^~Qef6lBLn}GC5Meu1KTE~ zd4u5j*zdPKD_cYiD>*Ir6XooP#p=V4hw(7!4I)()Qh5lXkf}L2t5cwUd~HI5G&~Ve z(5Y50L&+r@!FRn0kku$TWfH}qYcT7{(62mZF;hB>83|I8Q%Dq8_8l6oh*4jdcF!h@ z)Sm@+(h6~>4udj@WD4P|NXV_||ff5~n<_te}p3!N@T|0{D= z+99vLc9UOZGxm3q*FkL64;gpRrr7cd>o{yAk^~w(u7>^EyKi@jQ{jk1Y&PubrTLZj z1c zPY%CT_=nta)!wqn0<4>!7z4r6SXtr*de779N+^!Dq);R24!S}?STKx*xPluLdP{xdx+s08KO+aD&JQ+58*xAgO#O*nJ?S>@gN$CrZ4>- zi%(iW`J>=PBd`jgqdul2{jEoVd|*o1(AvLvH`+_0Np#muMCQBQywI(t)_s_{xV_6v zLIBz5BYfhYFhhD8coe%dFlJIO));P4KUPscR)v&9>e3bi*%{1l7psgXVf8=Wudo)L z^zKpX&PhS62B|=_00#@S3T(A*+*GWW6@C@I2(K-?#*`wf{pBvEjtlS< zG+{dL)t)P%y`wH7N}P-+q6_6sHHPHRps(gBy-lIii5-Hm&*%uohLzPrHfQ1~nE%e_%S=z(?Bb@IAljS`C`2J9@RDj>sJp`D{ z>ObU+;0iX)>IKf>q-H~MLM8wKQSpF{Oqc}5E9Oio`s)SpK3}9uyzG4Wh?m+D^E2N8gAQGHj6%#JFKv4 z%5I^I@Z&h8i8mktlFBotl5=4tflxj572~k&7>J^P@y@~+7L}XiCqN5umPJ5j;q{uA zm*9!fPDx!nh+# zzlexHCkm2vUsy@Ns!)nu1YO<;OIKuVPjy{RC1JC?!UGA_7ywoltn&0-mx zKqRF|sbC4AHq{^{;6v1%OSNoA6(J(I7*+yxa41zoKyg7Wk30U5n1dF|Fgo1YlnOFP zQ}#6o%y1|UzyvHwX9;6m!t@E0(kRk7+ONL|^*O|nBLpBqBa1MeBETwp0L&YCCrS-Z zqcRpcn@e+{1cVJWL+(U|9#QkpKotUUT;zQdrLBmcB7I7!Al)$Q4>?GUi%}g(4FwTA zJ{iM;J(MI&Y%LaV8Wk#m(26vl5x0LO&cI-@ZQiRf=%pmG*L~^%d);+33)0lWaOqvJ z=N)O6A*R2eU0!EN+U<3pfxYep_a*+VoF%qCLkt1CFrYi>r2|JXg#09oRS7 zmV`4h4KTId`zm!rpk3XB_T@l3z@YU-G?0a9JH}!#9&J`-;opM#eSf;4!!@^jeN9J-{dIWAdfOJ&yEFaRnm zH+_%`sVI1Y4#5#wAGQaR_IwS~(1)e|SYS73L-DjNz(ZH+%zKo&lh0gb510@>_UO~- zEQF&MY~3$mPAQ9WyXfu?rVywpmf(KDAtiX2l+JJsI_fE-E-IU&vY12i!s-qw0aL+) zsDNsg%5>!bM%Rx30~{J~1fv7f416S%96(hJY3hAjQ(lW+Sq+j}^TJ+y?V%$#i7-pv zJr@mzfSK%)vzth;oiYs4cV(-#KR!hcnBZ_t7FWvnNhu#PBI0ClQOiot)vE9tg(|TI zhF)udiE!vdEv@cF!$o?4{Ma|C^kr_VPZ?W^kBf>TtXdkv@<4n*fV~PUQ3a4$v=J!n zBl*cod%+xGzAeb5F;J&vhK#++;rmk_+5pAFq@>Y4u1W9>D& zC^M~Jl(L!u7%i`%!U~Z2NO#xTN7VWg@#T&aoj%f!?Lm9wdHj&Wv+VRHe+o8dz9SHl z56{xM%&rP~a2Iow9pSTTq*oj{J`8RuaW;@m(mj-we7{{Hv`M@U^YJ(z3(?Nb$ZfNC zXz9+ffu4;O?)+n(mHYtQ42Nss#{E+0HtwflXXmTq>hM*Aui9}9f|tSKB%FXnzXsle zcCT%P&Nt7CRnO9`ZO3fE%P=;!U_#!uU|L0N!DZ}O&Yo-8^A+}_BM7$O_3ZgJdv0XU z57~1Qdw#~ATiNp~_T0sud)c#+J!v!47JP_3kFh5S<7~mF*^`(eTks|JY+%nu_LSK3 z8hbXeXES@YvS%B6ax*Bciapir>CK*g?CH;*0qhybo?7Dq$p+4C5C(gAB* zFdh7}1=9&ZTku}?q#bu#@K@{^!pbGe-xf^kqAfTVK5PRAyD4f&2>OLXUWQ!)KcOcm ze318c+TLH9ypl};2%c$T(|ISHn;d16wjBq!MZgI1w|EmbH?+c6tS>tZ5|D(dCP$l9NmF!Jm8^D+Gt~7skC@{uy}T81 zh~Z0f##M2jfpUYS90#b@M6xwUY{foqFN3WIZWN-Q^#1d#lF-;@hBcuqttqR-gawz> zAbr9p59G!UC5R;rdp=8?#F2y$l(@I_D>VQ@?i8s3Ut;_v=!p~Jrc-G%m`NyaobeLQ z8C$@rft!aw2n!kLNlAtDkq_75|9?Ai= zXXI$fQtf}xGp#Q7Ff=>=jg~Q#rMnd7ss+L=@~m?$1IbKMl@G|{J^F`+OJ`n0vH%+k zWu8NHrzfCptCx0{zDAfSAXyqR9{G{d{W)u54&;=LL+V^=A)_BdnMiA`R3*QTeY|5i z>l|r0GBAM5f&uDpP&9+V*QJHz8|uSXNU5v`&ZH4WS2_n}L z;cei@z>?&A@VKd|ia?je5mj4Rs3vS#An%zVJqd|gs!i933?5 zCQB78j~aXB?F8BB{irLd6Ui#dCNK_U5=}hnB92&M9g)hOBpAlUB*Gvm<%Q0C^iOin zg{HOAK5MhQ&uV=RIG`zPt{TT@aATG9d@NO;;-t_HBTkE2r>0b37Zvv|w6;V)30>uW zkriE_fZkoR<2Icwt%KB4=x&Y#thZUQ^M^}F$j1e8)<~$#*FZVCy+FvV9j8wjEl8x~ zNWcVUlAVmCqa11>Ncz+W5DAf9!K>qvA1F5idYNPekbv+h1&tQ5$+YZm2kOm$#Btd!{GwqJmGG`#^|qbr{NC5eG2yq z+)}u?aM5sqaBgtFSx^s@@iE+bxN^9qaP#42!ZjklNqCQj3xe~8yN7;hg1Z2B1nw)i zjc~8P{fPYP;VR(*(1tB|zYezweZ3y;L%6MQhv0sM<9>1G{NVb+jfI;EHy^GLt`zQd zxVPaxh1(1FE!>ZA_uv90XD$RT25vrFE?gPh+i)MleF=9E?j+o0xNC4NaNej-FStQ) zW8tR4&4tT@TLDME{!};oc#K6B+(eA^CAe?lcENoH_cq*GxK(fkaC70Nz|BVd;dlqa z`NG}C|KH#)!ySR!0k;M2eYn5FmBP_417%KxOHj&Yp2{Q06u~uq9l?2>iQt}J3`5z= zA|1gio}9$}J!%s79pXIfXWO(Uj{K>VnQ*PAoVnJ0&RkTXKAqP`XBX-h^7)0!wS$6- z9;bNm=>^Hz`r>JsG1<9##aE6?9tbVS&Rg_=zwD8nmzj%9b$J=uLOm}O<`rp!Gmxj^ z-^o+)i_6RAM|KPtF?{6kQAjffZi&9AD1DKB-h%vPT!y`-^A@t2Q@)v~QHQ_WB0WE^ zFh8H4m$g{G9O3l8s6f9kJ3V(^W`1slzHnYaI-kYKIm|1{$}i*>3OpB;7#BTdUd*(q zGZU5Xgc;K&&xnjq#8m_=ENWVO{Isd_+PxM3#F>#ZW+o+|tWNRTy%qmxUE(t{rzJ$k z&Co^7oHir%zW5IYE9oK=5~3q#(rC;1vy9_)QwE8^30F{Ih~ zdD_f$^mfK@3eU^e76}Wpv~r)yV=v?_&dV>()3SOh@KJ=giLsOQe4;!EF>DMHFa(iY zrjWOg<(rq!Ytxq^^YjI|dX6i~(&y%m8aX^8HEZQ08R%?4|=>~1geXkk6W0ZHzj`|YtqBQBMTP^OZ0hs(fxr) z8?7&59f;mQsN5ezI~IXB)VPNSqWtNCzHoV59UaE!01(tfHc#m2la}mW#@#X#^bD;Oe0k~%=Iy*zVFmq8xI-jmxlD;T=p*BxgvOr&` z9aN;(YZqi-?#wIB#*h~AsLJEqOjNi?J0pKddLGSnp#b${ZFh`up?(n(AqOPO$S>9| z%uO#UlH(57FQa~D=~7r|6bppk%&hDpZA$iv^ui1z%+1v*osI0Zz>LfDg+lGZ{3Qhf zavrXY%FSN5SUXj}jAxiDAN`mo=(Xr=jKM;EF8S)0Wg`?rCs$L>o8i{P{7k+$9W(MF zbF}Eb390~4GNBRF1GO2HnJ-}V&CbJ;N-Zr!ub@qTnJ3nUe1Z$Y-3mNDR%CCU3`_j- z0@g2lVS3&oJy*a`M{ouZ2BB!cDxngsqYi{eSf8io(WFv(3UqY79o{=Zs-wQt8TLA} zjzA_#-T)qZ`wI2X2_OVAhH8tmvatdcrx&rg`#)x5&jJ!GUK&u8?QjK~q$b<9hdZ!&p=It?pW&QfDHI4le^2P4K($~Z3(fW1(e zvxL9mI?@Bck%hcILmQm>MDUU)g7FbME;ydB8N)Z}X!irX0l)US&+sj)Nou~J&B$j1 ziHW_0CP-HLQay-_%*^bC*;tOX=?fQv7}OSKFUsPJJOOZQxtK;)Cq zzF63?r#;nhPZT(IW`t%|_CJm4e<`3w5l*U%X4$Gd>5?USOijs=- zg^_uY8B4PBke(qcyxZwA1_WA6o28)pGq9ZGvtb&PNK~9|nI65!>s3)LR((c44!}?AR*^OxxEFuyyh=8_ zX^?$#~QYPdXx|k%M|fDf&g3a$gdM>fp5E?GQp`YGvuYa)AHQG z-@c?%Um9&} zLXf?7+5|)zA~CunxF~ob#Q~uRYE)aWd>&B=fRykK-@_F@?Vv*9YG}O%{Wq-u)ECJ4 znB4S5_)oMYh+mo91?*EUM9IovhZckW$!7%Z6i}8FBCpeS8uwxG7qYfq{-$Pk0TS5K*Yp5pkrhznvh94 zNN5>n%w$xLJqkfHxU<~cflfBWx^5clGt>At^x zJ>BEYM-DD=f4MoQV9ZAgEg!7QcxPn!Mc)s1U0UAP@?Asu`IG+`GG&LyzLj6Bav!$q zmsk57cqPMcXP-Z`TXSNc+kNA5df(-(SK8^2#Nef#O=I^!ur^7`Cvm75l4thlw{ z?zO}EifMD4{LkmQEnGN3bNf#pwfT@XFMO_F_LRoR`QH^jx9I&7jV|LSpH@9{*XVO{ z@pjLTZg1K?CU^aplg=F8b1~x9-p_u@JvUOA^MmfV&zVc96Hff=vyk%x$8ndZmrg%> zs7HGJf$hy-^&4<-Tk@OhzCNz`Xy2}Pb~z7Hm%GmU^1OF#kj3kZ7gy##(Pc{3tbg@M zubMC{bH^)@mzKIcchPCD_T>7(e&1a;8NYb$(d|3jK59Jh?x@qdr1uZIFB^S})4a6F zZhE(Gb{be_;06gAIEch zKlm|Z&*{jIet7!qx^okj96a&io#r#AaxB?VZRhhw$CNMnynfgG3q`|J(LeWb88vT; z=ck8P`g}kBIoE_wBb^`f^YhyOg4X+R@b;{fZN_}9$LY-7?>44?>wD|+-1UdgKi+?R z{noEGovrIRVO#1NpRb08&)K*0^W3j1wT4~)-LdV!KfQn4;quRuU+`g*zx(yy#V0p( zA9zt^=y9p2tJlJx%EB{VpO~}6XVdJQ6<)`E8#mZIUVCf{=lR7u?j_N$X_`)cpnfsy z52wEOs@<~FQWo4ix=3I8+}PYlesNozx3u@^dksTQnP#uJ;CJll?-%9&u>01n>-%4d z+P|~scQ0+7FMV+4%&ON;ymzqrTW^)^1Bb+RKo+cjL+Y;IThu%~)6LeavOD z*Ph)yoclj9&^2j)_}AYI^4j<1-)Da{M3u8`&L_6B)gzAAZx`P=KeW%5%g=7D&UkC! z9}BO&{Xvf4`Pvfa&2AoVKQ-3ZYFy-A*e!)~{oB(A*7>j4_2ZTyUpyb$dx!U1FP;3m z&;IXz-g*7f%6>mw^g4a=)C-e-JpFs?w(qm@3>O~#Pmld;o*lTmr>0g4R>E#p$}^xjmNs=(+K?lRjr>8dOuOx4DF+4_x%g z%^vgX<0ogwT`bO<_;k*REth7WIiKnELj;UqJejy<@8)mb*>idI@h>M^Y}-Qu|Jb*$ zqWbG2A+K#q{P=^ff?UVefBl-<+5bGAa(U)~Mdt?$Uy-?O*VE|-oqK1cy*?y=@RR$! zzIyK^@0#8}xXw0TcOFJKAaX?SNB*{8?8utLVMA2kt){>ABCtDp88a6a)=LG!Y|M_x})I=f}K z|JChJp54Aa@QpHqYwG&me+$^my>;>Su&w_*I(BnfTJhDu++5oaPeq=7$9MLB-VZ8U z_fz+G*J@+F{3N}2^Zt*oNWcG5e?tl~T&~f7`OSq-x1^<6-;IeLbZl{c_T7S|TNaN9 zyFPoccBqHD*OJh{t~bWU{P3lFTJ3W)3;r9TU)+52#^60Wszl z`L(&sclqVb!J*f`9B*5GqiBDulvC3Co5=BlFVAwFw!A5N)dI_l!=_c&2F|?bRMGwQ zh&{e$gEZjd>hqxoe7@NjI;D9}zn4FG!}HrW%-uY=V{>9hoYjd7Zs#3Iyiz>JJlB7dza$gL`kEIv2+ z?AU*}T^;t)H`}?7%F250?Nt%^+_QUTXRVitqt2hto!xiZu`9AVh_%7QK8iNx)H@9|CZ_bJWS55E3M_4V)GINxH~>%L`^IdJFV%Ol<%e0{L- zzsnaN`7E~JRDSQY*5QL=u3Q;=Idh}?H>(%UlxFtV--!SD#+S|?R&VaT=+&~2fg9g_ zeAkw58gIRO`R=%5H_kkNS316T@#fOIv%h?EqQ|>mt`05x*VdrGp}%)`A2Tv$aNnZh z5$<1IDTw{Ee)06NhL{z_U#2abdiM5F&DAR>k8MA8%UpK0;pwh#{M&1QdB>rGjT=mn zd&*+c2DrHAhW0x6RQC}^U*AEK)=yjHvU%pJ%G=A6)*M|h<=7I%h&m}JZw&%1famlN0C#)NG_W9F+#};&}Fb|ut=Z(>?N_!ry zKflrK+Kp9nKd;tL&U-aB_lb>(f;6`4xXtb!19D~tsfXyF>e9IUlQ%w!-TQK8?^i#3 zY;eWChP_Asc+XsKU2yqs<$&u~&+ogk*T1d)6Q8FH6$4j&`RY@}?fTxRV^38S-VJ(<=Kqs9)lPY*%Un|T z$2ecwD_!^4nLteWBiBDmFr2~8Mk_QB))b|?K$in&k2*~ zcolpwFmUqW#y)+E-+%P+KJ)v2=JiDHx6=RHtNMq31^l@^`;kL`jOl7yf3w@i1Hb6< z>Xafs|Jmca4+y#Ezv87`J>nZz^lXTU4?R2d<*=9jemH!0tJk>kzePXgRrbQf8RII) z>l2(O%<4IJY<|hxPmZm*Fve+f!056ADI>Ohw=U$@#ivGo@6=_~D?cv|`sC%!!S_a7 zf9&wH!v;=?UohbL$2Ruw|K?R~_Zva|27R?`@S~T%9uhOZ?eXV4Ck|>3d~R6H(rrWk zdFapKpTGa)6P4zF7To-0)^l4@`xd=9>QZ4~@N30GrcGa3_)4q(1uU-0QnzI`gX^)vO)QwhLiDO}x1G&*v_S?{53yWWPVZ|M{6GFP-yV zeD-?g<}*9aT|d3%%VFQCK3q^g>f4Pcatf}VO6wJL((m5fZytO7?SrcZU#N|X2&g-j zoKk&f{JNU=|8}Zsoz$gr)4$`7zWdF~7^%aDe(wThKlkWkQA=L5oq|2g$`(eo#VKV*yS zJ)KB-Oe4W!nl8yHelYpX#n8CN%kuHzuym=Bt1mXzHant={z&+2&GyjYM^C~G?xaQ9 z6zfGDU}cG+^Lh}SH#Ae0*@i70AhR$3>Q-109PqkYgOUF-W2Jl5uD+^D*07}#;n(8D zeulnFaHr-a51y<(vbJH#+Pgq4V@)&O{edMH`6}8TPIz+sor{% zTCJg7e2wGIvG!xCOl=gO^;<+Qq_#H1O>{&|aa?(1VcPXpA`9JW1hI4IXoWVu_49Qqa*Qol_hsWQG`%g|8G1wf* zHD=TOCQ>xFC70ow=a)+Po82cSt4eM-C{TI%^0*J&^*B{^mWdT3_`SJMHfM-6hIej| zT|U|SQYE&p=9WqjmCR+ayrVahSQ{OrHx3~&bxu;c!nDWUWX>#_M8#yjI5Zp+ZPze$ z;Hjxj@}MlAxPLUm^HbfBN{K-;g~gPDf@VEl1Ve_W$*A*XowTNaQ_Ks zRsXA(HqrIpaIE#_zd>fDnb$J@Da&0k<=4HH5MBFsTlYOO(&BBU=aVCE>RYHOEQqD$ zV6S-`c52LFe`>&pz{eeKX&aV#r0r9(BVuKRQ{Ns*<2SHWY!IdVLOWTiwY_a{srb$0 znV+Xygp?gv&Kr<=@V;qRUG$s~*1RPW`t`BlOC_5Nd9u!5Z$#WZKerdLDj2^vHn->g1AHH}M@* zJ<)Wo#RnP-r{%8nM6Zww^Q*C_Yk#`@!EdKn1|}yipi>}g+ju5cOrsbx4dufTq-nH$zWY5+h6tLjs-@E;+>YC z&FQ6qL>|Q_bGR?IdV#`UCAO#G9@X;bca3Jhp6~lwpOCLaV@G;0;A2VN z$i3&zWa$1B5<1svz_T(jm&533p5V_sEioV@SyrJIhf$i9-GB{?X!lE+ z^J-Z#hD)3j;Vz0+GWW{|;`;{6_Jv5{PK}!LTy?8I?H4@y?)EkJ(QPIhYxAd{Z{PAE zEU;8#%&C8K9ld{rS$=Qp{=N{B(4S=&UYuj%rjg8(bkpNVDq*&d?)7{mvuDD6k6P=! z^u;hT>a(*gwI|f*3bqT?ZdH{TQ4ln7`+oKF%&`mv9?jsd9)_eq`A)q(~;{Exuvfi}H<{$DYy!-_j-bJjhFmEAOOX zV7Nt#jW%{{+=X?Is&BQwPd#AuJlA64D?R`Ct*n}%terQ;N*he-53vX}(1kT$F_Z3Y zbE$gU5`oR`s&CrRG(+oCJCgnCfy(}iBmS4k9`i7HKVgn|W<$f6ZYv#0=XzWw=(uU= zbNWG=H0oa$HzL)%CnaUpoutM?I)O)*H*VFG+@KCyT?ScBS}t5Ib(l01bxT@McIpi5x2jL*u#aHh z@+Qtji6lJgJC1t%R9iqq)R#hN&DyNu zA4_FA`)kv24|@vf+ZYlWvt+xRUtg@NovfUH-D7qcRLa{p6EH?M@=lv9!eIY~`rZ-; z9;+cs=4CY}6)hPz|HswC$JNJ2O`ph3(%A2ND7{$LQLR7Go^xHLZt#>y- zP01&>3X(1)qT`=!t0+!P(mGr~Q2(l}%XPAo{vVZ$OiQA5a2Yplj>l0A_qMBjTbPS` z_Hn$wvQdY&u{uO6CqghOCW~WGy_Zo>f1HaU;G@Sqs|DN{q3SWieT}nQII% z!nbCboJY!7YXrYNE|cufp7l33|K;MsaUUl;dDoOa=WHbRrG#9jXC-yY*v>M4tEIW= z;<*n)FRxBSa6h_UXJ>9+U9PB{``X?44xgXL52yX2&eho6rVBKb)O#2?q+BPJB|r3; zQ`WupF!227Y$wvb`@`}j(cY%h9I3u3lz9>?xgFo>BNN+pl}!X5tXq)s{t)=<_?&7d z?)uh~M<#Y3TnyA9pSKQC?9~yBd2GPZm(CPNl1Se!SIjjR(IqwBE0uShj#+YNatzRkv@}@&42Lx7*sy4GQvHm|_!UM^iKCGbhHfbZ^`#!@QaPHuC9P zf0~!|n3;3n>^3=r#t9Rh$XsT6#v_}UxY)R*JRTk5!kxGK;an0769C5j)Vs8Yq-;r2 zs|#4;a%aR#?r`qz_+eSaJ$<=BcIaI$V}xKdb)CyCRql!*eWq96LzMYv?ED>tWdlD; za;r9Pqu%O6#<+X0hJMHU!>Wn(fp!xfmLUtGFK0je{Q0A|>%mZJ@SXm=LlJS3-?CFF z+iN=v0!mBl_De@(Y3$4@+eqE`?G{6If9$aZa~TIW#3^~TTRq%og z!tt~<3t`NQ5Y>;bwoWcgmBlDU{k>H^rW^E} zhdu=rXFse5ta?xDO^SBw=jyt`9%UqdV%7s#t zN%k5aJ$;mePL_O#LD>ewz;E4}*8O2&QrCB6aecI?r!To!&Xn=wg#og4MMuCalM27r3@` zOkbfZ8Cv0qi;$H~tqbuO;%;^7XZj*Pt1Nx=hkwu$JLkshA*LVS=~M4pR+aK=p{}mY zXUtuUlf5IJ#{C2RG=`A$9^bbi^1d6Xa;FI;b?Fm|{@&MQiNST`I?Pe@oAp{Y4NE7} zWk!wC8ZJyzvwyXvdYGoaF-LGCjhV_J4xR#!ZO?D1?6~B=eY@p0{;k3^mF#b=&a+lb z_OdxOeq!AF{^sW8tY4T~o?-@X%~Ym?rdziB6yaiSNeW^Kdafmitusjn zXuh<{D!w}?6CFu&u=~K_L!`QPk`^pS_FXvXzW32pNeR|65>ex>NRhJ->c#FkzY@og zJmq$Nk+Q2SxrBE?^e2zt(OX>BTADlacu!zw&+X*o{=l?5pn*@IZDI%iils82%tlW^ zPmYg!>iw<*ipTET+Fp5PzjH&I!<18^>R3NkjVYf+ zLw|?1da7@r&Vt38KdS=YW*+W35O4qx9bLPnbGSEd0;xTf9@z&f6~PMZS;}C zm|FAKsS;-V-#1veE14dv5_X;5yESxb*|T}3cJSRZ@1pV9gY{-Fe~vr9XmJpG9z;jJ zz@skp?m2eTn@c%HZ!JGs%?ZyupMM*B?{$S_+$$%uD?c{3EUpZX-1wR4lJm=uqU;+x z!=CRCgSLLnxhDU`tlIYjGOzl+H{#S{-@Sq9$;?y zux+35qm|S2k7b_ipYZ(DFXgU$MF$vcJu2NKN+rFNayW)v^(SmtDIV|vC*BYXAgewxfgz^8#8ydU+Z$|?d0pZ zH*cNjyG4Upx}!+xb2r*mVX*ri`v7UHVCv-zxfItG|1$#eLTU4SY#GHe7cy-PD$eeF z^*((n^m=Z`jW0PNRf89#HfLP?7MP#a=+1r4pM3Lq(RTIh55p%DPwy2;&^Sh!L^o}b zJQ(j37kcqcy!`Ux(`0Jlv3*>P5!tq};YaAZLbun@*ntDOBV7)$`Dk9M`ZKmPFP)TF=o5gP_0T{h#~xgwEo-xzYY z$X9;xoIKHe)8R%*m9H0-!rcLPp0ib_JOnYUOtOW|-(y&Za@YswcrSS;%h&W_E2)B1 zZsmzxmSIi0d32++Lt|YGG9;`k<&^p67;Tiv;>@8JnK5?JF~d&}Of?K9>zMkB^U1z? z&Jcb0i2pOWw-4#Q=yf?R3Vg{|&CI=Kx>IELOpdY6DK+=cH$G9NJ&-Fo>AKR*x;J-7 zJCUndAUA{cI`8gNd2K&b*hq2MV1|#$XG&G)md|fFa)_%`_K4`Vl%~`9%lEeU4IZz& zUAn3B6y-F=ohBH>aXhDPq*okcdg?M zE3zB><1KlbitbsTR_^^ud1tR1V)SU=R{B<6ttN6gzQUS?J`UR0g0ROm?E4+YP6>?Gg-LYmkt@;i=6X2xX&2|;NqvoE{mJh+xeeH>?Bw47nVp|g8` zpm}aa<_oT+_gcy0I`#zuS9H&l6Q`N zwaPx8Lq-V`09_ zT+ZCYN`kp3qr^0`f0?Av047eYVuS3gQhP)g_Ssab-%+#Ao7Lt`ek>WvWVwV>aNd`( zQtXQ#C=c0JHaL0;C*^jP$2{2Ybp5s4??z3wjk-_g*KYiM&A$|KVl>H3lW;*vmGEb5wi9^pV+Mc;Y=8GxXIsL8?>Yg=Z|7SO4k z*jg*JU13B<^}Cyi;QZsySM?lHi|^Ew?eL);2(oXrN}nL7SX$FEZ~+ZpFDn;e7dtlx zA4gD$wY3BN1cC+vs;rBuzY?yZdJ9C?ecata&)o}a4eA$QCMZ zgpqOw_&vahAP99g!axZcVpNvkuz>S`qVYq)$pM~%a0>7r0S8ubs8f*5l+xf204D`_ z2*FUo7J5{m>Q2Q6xCSUBKotv474RbPlYt*r%}{MYn5doszX13_`HV^e!fygU9r!^F zi;5d?DNr|Ei%JZ_uR;Da;0NVWDo`_}E(A&nE*zXZ;OD>(4M=4Q zjva71QGc6;A8+lf&90EA2bW8K>3ln z11LSXByie*S61b(2N(nOk64wzCty(s?}hZy@`5T26)410Ujj-3E({zF@C<~b<$=XE zRBV9Htja$S@P5D}tMYdO3|d#zbwJVbo(87|_&xBW@-zg;gfLK%k`Uw%3SU%vA^i5L z{6PtZY8T*gplEwVfl~tf3iwg^n}dTDvDDeC@;?dq5a36v@^=L+2)GR>+Mg1^X#xHQ z{HXlVKC>C>e|1&<-he?4)P1Y+N86MOa4}FcegrrLz_Y-Q%Ks=hXlLq-Rr#L)EDd;c zRsJyAsGwg{HvvWaQ#?2gz@LF1t&cG{mUZ$6EljF?5I(Rfe+R(4fUAL``Ne=!0sId5 z(e?zxF;p;asdHB4e+sZH;E7fFgAx;!FyKz0Xn#rurvv!se;EHgkUlDZOK@0-pAQrP z7Ya@e@HB*@@;?HO6>#dR`~v_>0UlnJzawBiz_mb8dBuWL1-u0OsQmwF{NGxYzb(Y! zhWKSb(fE0zWFxe;WVS zSLJUFadtxdBA}$;!okS{egXWbJWRo{13tSd{~*8z06$oj|8c;(0XG6g+cOTFI^a*h zkIMg_#{b<_`P)Mr9*AEB6m9QlaLRz+0zWE$3vk;3UtE=c2w)k&Pgmvd23QF26`=Is zlE7&L{_!8i|5ZpIEw2?gPKaLslmuKDI2_<-5RR6|1RNXSG@|i;Z&m(I5Y7+b^+3_` zo(87|_yh2x^8Baq-@htt zSP*bKP&#ml;IsgL2Yyuk|1|z@tjga8;&4Iy5};`O2yhC3UjjcW|D)ix0?u5O{|Ug- zfX7zl?+jP~a5GREaPidDu5S&A8pUI@t?aY z|F!Wyxhj8mh$9T~yMS&2mkdq^@GpdcoCIMZCn4EPjzPAPlQ3Y&$q_bk5;7)o9I}m^ zM2>-c1Hwv9Lb`>V6X75y!7`FlBkbfPRLtaIh#VB1ft(ch!+@U+gscDh?Bk6EomG4~glD3^pQ8w&YtKv8 zpu4}Cix*yBejOkBz_l869xz%?ekTKVdun0KJ7wY!V=?`JV@SP5)ri?w!f z2eny3Ch%wTr@Vv!!h#@2(C@VaZCo!KtgDkZEbg;$1RZ7WX)Ua;%~UEBnEvySezXlW1xySjK7i(f2^Mb*^t3q!D?`}gk^ zgjHvIL08?&8H=-Zfg&OxEi#ZC1xS$!Z} z5KcrC!68;i7}A4~kc5$tkdka5AtRw6p(3FrVIW~8!IFrO$dOpC&wnk?wY<<=nBioB zlNC-jIN9Oc3MU7g+u)Rg6K#7}So!P(D{Qg9wTw|cVtadNAUph`PRIy!mH(``A*?h+ z@I7}m2COHu1Eu2M@lo~gHMADy1~PK9gI;9ogkRqai~119RC&Y<9Je8Ch6hv*oCG*5 zI0kTEQV3z8_EXA>8pD`~cSznXx|?b#yA8V6<8FfzgY|eyX9)Z1fD^-|hg67wh{(lI zeU6wsdBCp-SZ?u{ZJ6#>hFmJ791%XNjm5s<#K(WS8vhdSPCT9<;I?EsrCn7+n5|a&yU>D_p_> ztSMVaQE?&N_+Lahl?b09!f8Y}oe1X<;d~-IM1+Tl@CXsUM}((|@C*@tMueXe;TJ@B zmI%)g;dvtbiU_|Z!f%N1J0iS5gcphM5)pn+gg+4Bk3{$r5&lesmx=HfBK(yIe^v8CA@@Wb~Q(GLBX?cA5DO9kVi1q1JN?G1q6x(H)cqbZ>{I7HyAZG2AEB?%er^ z@_pm=>EVHI8~NsjF{v?)VoUSJ=DorayLnCwVG_zib-nv&H4Ve5MRn1UAxx`0gl}M+=Bd^?O4$PAoeB8I z*}QJj&Ye=}h42G}@VsVv&$miLm^11K_VSHNt*Ien#Cu))b|WcN>+w|44*K;N@ekR& z9>3VNWH)_+LACATi04g(JO$#g#5=S-f$N$B+U;WRZMU8`92_&&mKH+ zRgM4N5T?Q#!mrAN*3Jk#v^5^Wl)0ht$uG%CzyH4YY%4=#EZpB@jJ|{t;XJzb+s2Ou zG3R`s{9~Eca_ajIK5rYube)9wPRj3)>aM|JmOuT&inkr{KR$#h+KZ+?WL+bY@Bbqm zup$BH;Yd`X^+qu5O^9XY31v-A*R!d9XNECn*a1%-y>COrZ%c%iB)d06=MQ1R)gk|* z3K6(sBL~lkAxy+p0=6THpZ0jK(;Vj=&Cb@`XE%cQgAPkceYv+Xj45s<$65 zKdX33NcV6k^qVWDkyDI%i>D z74qB0KO{Wz%3Tc9DrGY-c`<*DBhw|Fljnf&JrKY2P(G>75;w;Q_q8PvbipE*Z6ale|Ml}*d;h_e+@57f<<*%eK zNv7!?cq6p0!05uj2qu>v3}p&t4S)~!(=W)dP+m}=4sin+cOz8 zT4}i_Z%x0LyR>QPr^Q|h?XVkXxbML(BFsaCd5LgE{c8?};B2)*mNU1Ruk}ap5rsc1h3{^0No}%VKH`8AABCrz1CTF+*4{m75M*L`H}1&%M&!!u{4vDPs1)ep;QmYb*<8olD`CI%gtv z5}VDW=L2Gfq>MkMDCcyw8F(CjxGc9)_07N8NuxZ>e}l#1?A#RjXbk51Am%js|3Qx9 z=j<}0KQ;{GpM)^}Yl!Y|p}!;)H5dN;KXh-_cePF!#?;+K$9n<>+kN3}D;11SE>ym3 zUT>0k6DnkeF$pyUdB48=a##rNX9)p#-0-o@f?3ZM`a=~V+_w0j&ucp1hYeLx{)UjA zVmm95L8$?P;Oh9I-n;MrR=``icfK-L@&MZ0T{Y43*v%#*MjVIcJA31%*%_ zDjrFO(zJ9bxPNpgLw#IkJ~7z0%I$0%#?&_ecKh)%J^Q$c49PGiQGy`<=O1OJ-^2e= z7BeA#ancW=lg%TT0%^kiY_6T@k=EIJIyE16G`7;PXnfPOw)8KoRf$VFpw5=X%Ih?^ zT_(XOL+hf6vLeoBW>lKzg-%#%k8(}n#kcn#pL})g9nDCo8M#`ubl0x!dFSI%e>{|A2}A;EQRCTbgwo=y}gXLzx%pWw$k^X8Yd+vtCY3rS=Jxon6y z-OZ~ZmrTVV$nfNc&rtTgAIV&om8%+DqEt6H-LP8rXQ^_G(~7ww(rofg{HcCd#r0Fu zoVe3Sw6%NhE$_rDK4*ppuj1-0YGn;ej(nU;6D!uXf2^7pIO&^1PQASMRZ;le@#`uX z{L|*bp4x8O_xF?hJpNuSI_1#i+q1t~*o-lSLZjQ%%7b5O#6PQXppz)Sw&}f%>g9zm zA@66~c7>Z)Soi$6s#W%sl5U%tdFIP~@~xG|DhjE3H3=?a!^@YsRo^~;QmPa-vl3`W zB`NVZs!8Un-Dl0b1J4%hymAjk@zyW3xew}GG(3@Xipe9jFdthK%JNy~3M;d>!u89{ zb79PIC#Y%7a@TBz&daSg@O#s&Veq*(fr2X;1en_eize5NaMKCW}J$;Kk3t4CX{?t)#j z*u4>U^0!ONuDO>~%B%VBvGgt4dn{>t2^KEs-1JE248Rd zwnfZ1GfTeR>P*>0x#F(_tO$3M)pxZ^57qbYwiGB3r5%oYygNg_bosdlqu4Dki+#v$ z&7Bi!EyId*%L|8|49&FlOfpC)bUqEbM6)yKB16UD9J())YLzVtBA*-s482CIDK}oP z(I^OaI(gUYeCUmyGZUvGoOBA>qi032>Rk0sx0&9F^r%%%J6l}!y4>j4r(1XO)3kQ+ zL4Nv|!{s~V$WyKba`|dTYS_+x`{L=GUX>FSB~+za!cOhfZ&Z9YWpKnve5F|@oA!0h z;!4B8OMa7+yKmZPr5ra>tW@%js+VTvRd5z-|AD>p6%*=SJ@E*VXJiRciZ=<~H=GMZ9L(x9N&%xa6*`iPC8k z&8AB@rD&DoudJ$d$RBFOsDw6AlVYzmBfrGVG%vQ-C^R+*dcDvtFJ#|y{q~{8C)#Jm zoQ2|g(j`cyqOJ&Gwku>wpJ1w_{(i90tE+Zxx~%3HylLPAiP|0YXw?VzHOLl{hxct@yWPTxE8g#_BFLP% zyS!U+595&-jjkchp~>3w`y2Um%$&YNYA0DPZEFoSyO9+TYOIrBt=MkkNyZggOU*N; zc9QcTxz4%rAHBC!l#PziJJnmwOUpOKNV#=A6x%<#;lkzClT7OAwwt?ho3eL*ckrQU zi>y}9JI~b2ka;<0sOEIjJ->F%nB^np^t4kRmJ~V@4BwJ93ezd#v2|ROAJX^HDm9iX z6!2g55xJhYyRY-a9>tp*)leExLdM?Trq_-M>s0mX~P78_sdxO;8g?=w1nm{L1J z+u>F;!-tFEGMu45I&?H+S=xCm86C>rcnC}^pHEZ2a)?_tAng7b$CWCL%4^Ijd3jpq z<<6UA1i0_)3eMKk37rb!=lj_yf4*Jy$8SwCw18n?@7a*l*l0U#*gly*4SncVgz* zhrYMCNNQZw2N#aK<_Q&=hv5&)U%pj~Zl0Z9-g5V6c<(~qlqJ1#n$N3Py@M;Ei_86F zua8b^b)=+3C|wKvTyA@;ooumDJ4uVjm(5;6GgI7Ux&HE)Qchk&mM`pyMh$DC~H|XZ+Z0KlRkRpYr*?*Z3hUb6#(rSt@sN zACx{}8%rU0b(ypL-Fby}y?w3jqh$*5Un>+^P7A8E4$Php=U-$SwmEqErV@)*C)-Zx zms_9N=}dIkQR?vOM3k_v6i%Le8vl5{p-p1HX5$^^{40BK959=YKe`D-d2n?;at#w^ z1OCi6!B+UwGWzWRJ-YF~i1H?) za9tw2iwNrxVR|CWK!lkI`TWk$()rT^X%lvaMsz+%uq$XtRL6wv)DWgN9iBhwZgw^0 zk`_rE9L8KY2hX3aS>=dP?Jg#gAxsw?v|r2Q2wPO%k(=Cm)f&QOA99Hw(2+R)cX}&C zc*yDR@VskO44eH1G1*J-{_W=V@eomYWr(mG5mqF^M~N`Q;_Y)5xIs+p7AS9{)2F;6 zV`VJLFyFHR*paRwMaz|5%3%he zMEEWd9w5SlM40Nv#SA|)cwdS}=SymgCsG2_`RfEQC3e_k0J#1x1?|LbwH6M3LSr{On*$-hm&8w0bL8)k+uHMP)w51$lo zAFK%e6flCxpaopO9^GBqz)o{~1XHC(z(Z4dIh))doP>Zy-mN5D**}D78AkHBD+JqH zwurI|6%S#GufzDJxUAG|(GjuR9rC+I$gg`sw_@8mj15i*xfnczNtGpp%if?ZUYQ!g zWa<+5EoQmmdPQMAjDwK=wCuZxahQK>6(isoBJ8o~-#8QUCq2XgVXle+EJsLxslUGG zc5wAK}H3rh})emD@8_@Q9 zP?K}ct6S~cFedLZp?#3VfGA2wXdec^Fuw{4moDq^;`mi|GnQe@*-m)h&U~dask1oX z^%KTR5yW@gH@H$6$Z&-p=4a^e_<=kV+mJyZm0AZyog_`CQd{h|{ckRRxsHqElxS3O}&xaAwJ;BvI7J(Cq(uGAp4vGv=Olr`K+kr)h~b zMPHrSl5jkhAXhXd?(*w zm_K86^~BG6HMt?Vsgw#8Lg!g6MQPIl<9j-o+k_QEuf>^MrB#*|PqFr}dB&$+th?H2rC+XFDDW9xN@>DiBfOz9?}t zzcbC~*VG`OsjRqjV~^{oZr-t{R{fl9dxS@`1lzNmK;`!Av{G$>Pim>! z=>m%UYJu}5@kH~9(aT3Cf>dC>=pwW?%rDZNoQgO`YlG`zx2*Fa-!50(Fr##}7RHM( zv_E`rh~R%!}C_xGLyZ!T#xGA2AujcFRr z4ZSmH&OaxgyCSb4el2F!dv4)rrjw&~C(r&S@v2V7F%q69Ipizyc^7y8;B0!Z>`j!O z89Al5dY!Xs?k;8-lOVpbHcVz)+L0KQl4%^Tr=B5AdN>0cj_j9u=;m)5y=iD(-Pvry z`P#Tbr^58bw-c{+yDW_@ZgZ&P(~4(coDm}}y2jV>D=*x@UZIm$ChTPrMQ^Z`P&<}- z7?Vu{_30h$c~>iawjmAX(+U8?_ZT6Kqt6~8JsP>2r8+R5?z2QS71l>Jk*l5?H-9w} zBPNL*7bIE#J;`f&)9VB5aKEXC_S%?Ixw7lI|JQy>g|_4E#=Jg8R?l6B1K(9}$|YV* zb*i%(HJ1PI!^ri=5aw(g^#5CVDQDFLEf3V5moIi>+xuKcf#p&5ux8ONG4=Xpi4*UR zK65rH_C7T9LSE}~w;pGkyjkMWO&PJm@+Y)IH+$ps!>(SH9z6TMySJeSXYbuz_L zds%1t;jC`eOFO?$_{@C2-kz0O{^pLLO6Oa)!q48aL8Qq=+f5&A!`0;E*i~{`9=&m8 ztK_@c&Dxzuj?8e=hHYwWx;IJNGp`Z#x!B3+BkAYssRd{6=kRGpl2qhSc;;YSt6yYH zALr0Wq3u(pFOTID3gOs8FK8kcv3 zFji@;xCVqhXcmyvU?QLbVc4P zMl9=m#0?)j4&OIA%F+JK|G0+ilV%2duZfoh`C#T=7&AX^81t}DO_Fi%H#K7=z5T>< z=Zeo3#pLGe&g7LdW1ZC)su&sr%^WX-gwJXu&PsGsR=<)hwa;kV?Q3FYd@A`qKH&}f zF}1Ap)Ca)_9o>1o8b`Peoz+O|Rui~yaMkE8S*+1m|DXBMvpc#~qNexY8n5f{PG*?C z+R1jfjFC!FE9dH|dkJ3dhc+`E)0J^Pqk759Ritg_~S=J0d5JJ=Ux)ylmTY zD4-(NEBHaxeMgl}(u)~q#l|h!M}Ex8GW)7sNGQ(TTHqleZ;z4OJeH|(KEfxmY=}&J zBaOZ`X4YS`pfW{tXeZxRr+mL1>0c}5l5bL*%*CiMgtk4b5vQ+igz2LFW4)dArZ1>I2uzOs`A3mr|Ow%@mcAzyvs{;L9+)-YU7p^ z6BR~Qx|}(NZp)^e{5r~@8X=bMLgGK8;^VoITYq$$dhxyc=?B06JoMn~dw+L9Htm*b zF<}`m_C~WuZ^;X?SXAO3Uw`@N-1gc`lOKJ~xv{D-8`9_K6DDN02E@db?BAhYUSDwj zf<(!%e!XPWXk%0uuZCi1&raOyiM~eu|_@ec81zIQGsJ=59=cpBUl~wJk28= z7`5yB`o5P+7 zv#~oGI5dl_>5SxcBit_s-Te7=Z;)bDGnPv+RgPVG&cXG<(H5->9Z#o)}(;=8Q$~XtG|zoGA|`4B}i;A%JRQ75|*{etyCWNMfei;af)YL z(E1_!@Pz$GtuB`xMx5u4FZpaPDeVoNQi~;z;(mSYD5cdEF69kII*Q@y<*x_crhPj@ z_nD5>aa6hf$f%Xa)93cbC~OQ5hDGBJPpYYt#$*L zvNm{rfbXxCdlCuyIb-nt3HmuCMDZP2c8HgeKhJc*EAA3oSCvW@|KJzK@r(^jYO&hi<;}&?rRh&IAg5yv;;K4SKQ&_NFO$c;{{F60 zUNKZ#?xKql)4U?l|Btyr*`?mR2Kh{?yt=lh{x{W@cyIO?@@b`67(WlL9{2vf;CAP; z!gb{gHoXU)8K+JjWEOWW@^jJX;#8Wr@L)rJ>G6A2?7o3Ym+VK~50l<$=jyM;Iy$v# zH{UOIMTUakMcI7y9DRCEHLs_dVy?#UI|sG6+m{hJl|~C8%&brD zAI**Dm)OUxIP2(gSSS7Io}VKQ3#}PXci|39ZNMdaOqrYvmA-J*TRir3*D$7YFX%hf zvTvCFek=RZ)TnyHyBS6jnKG6stJhv$(Hj&}ecwOtix&#^H5@u%9ILB(#vjvi^l=jP z1yhbEadQ(|MfsLrc4MoTKg=|AbGCV_B({lL+){oWFLGm1>*C}y^;G?G=kWPJ(~VM@ z!+{%bYUPQ)jol-huQBcULCq}T0(?(1C=NfRchI`#xXDNZlYFkvJj3~oVfr^JDLqKV zhB4(M@V?i>OV21zUcGm(h+@Rs2PEb8p|cwVUsT?Gfz=5A^>QnwvwB-fAn)jL%|T3J z4D>(CPhB^t8BI?3)ye0!DZW!?dVj-}!I7=;!=U16dW?I2&cG5WJ;|0U?TfhBXKxEU zIqf2&1g$BI94!?Sr%Ty58#pwUPn95Bie%K9Iq!by-*nW3j(yJ}XQ!@0Y>l_WO2)#$ ziUiIxS3US{BLT6C&AqMAV}^W523jT|kv`Gm8FAEB>} z=qn~VZ->t9fp#&Xs(QfH^#E)_4~H!rd$3w?U{OyX9Eg^du&u2yc;L_lwO|}LMLQci zbRT^5nl|jtx#rgaQ!eObeLD}>QJ)Y2=j-6>?SmldzApGQ)O`tim*PCUoLqn(6eU4( z6ZUkrv-a{umG$6Iw{pXKAs;k-1ky#*L?A6R9dK^I7l=R_C_l=J@*w(Fu+g(E1fc18 zIpKWrqZf1LcK1_k9tFk3c*G;vf)?SP6R| zR<6Pxu8=-htx$qZ^ub61y5ltLs|tFXHi$dy(`yH(5MqnCzzOf+6bgtgr2=AL3TgYg zK~j(+Bn;_8HfV16`a%_;CQu=$7gP^w36+I9L)D=L(1w6RQ{k9_nwAnuwGFkD4y!5P zuYqM4C9nfyf?iiJFws?7jjN!e2}V@_!F8M_sPqT4$)iC?=VLj<1^j#~@`y<`4zXPI zpNqgD2CM$GOdO)R>Zdx3L*!QdEa^CegmeQLIRzyZ^+p<6I{Hlv7{<+8n3!2u+1R&o zY}>vA%ej+_dlwHcAOCKFJ%U2QBBEmA5_={0N$o!%eejTstQ<~WK~YIrMO95*LsLsz zM_2E#zJZ~U@evc#qh{t7$1JU^ZEWrA9UPsGJG;2LxqEnedHeYK`3D3Bod`Y|atb*n zvApkpfre2UJ8R%}s3h1us_`F@5tno%Ge(N^71PX2Zv!S$)6=;N`9P3gC>k8h!9{BB`eNbIm z)Cd7s^g#DO1}~^lgRZj>!D0v4;DGXp(`^*Qf^n0zV!$55SS@`uP5e4K!lu^vZLYzN z1q^xE!`IdqHI9OHgQQ>!eY{FI>_1Pi4uemABYyi|P`5@a3_Fxt!Orxssr9NIi*;p% zO_;$L0yG9}bB}d#a<+ri(dvLowyz7`8i@_;8jNNJErpgz5LGh%UmoI8~3f<8eAE@Z!qGjbos5@G3F|^+3HrsY??!FF=SZ}b>0LE<~ zI&=Ul&^un&VC&mUl&HOa*Bz_`pu#(BKxl6t*hU(Rci7ko{rByG=7W|^1kD3YiD2Pk z&Ge4#YPgsb8V>v1qw3bMH}|?CfbP2op|p^W*uH;LM}MbvjCi^I;gkGFK5rk}f5W#I z?W0>1@>N)n#|S}qt9|IqfwVCQSN>k321!CNpvtN6QIqWe+4KlR3M z6b*Z5d%HtB*xLCy*{qAZ+D3K(U>XWF9e}pXpOJ}gXHk6n!2i`+99QUePCkyHgl>!W z0%zj*|J*LCOsFzD6qk-5@^xJu>`f3XgV=#}7+ZW#fep^_gB%11t6IST0@N7`i!WfH zK+o}Y^Yum>(BJC+!yylCFM@Bw|7E|oa|5ZlyP?*ppbltUd>j5R8gQ2G)43Y;{P6K3t<1{5U$3YVt>+-l$_ zFj%*=gy$RCS1{{P(rV9nOrN%CJFYZ63hILV)r z6c&H|H|HAvzxL?=-7EiT1fV>J&6aMA08V1FTs4-#Lim|2K7AABO=};{AORG(SSa z!XqN1qGMuD$HgZkCMBn&o=HnTn~|AyF8ln2i#fS@`Iia`i;7E1%gQS%tEw;8)YjEE zG&VK2w6?W(THUX~pFS^t`TFhq%8#GF{*Pb8{`p1gzj_h-pRWJ^bpHRx?f;)% zg#S4l|04W9U4O8h1W&7iu$mX%qSOkZy+u(Jbog|c}2JK-nJy#4K9dl^STFusOQ1Kt%;i)(^@c5Ze- zzIHBX0oShidclMkOf;=wLVdun9MtFcH9Id~VEKcsysYp{e_exzT}XJ1#|_#LA8TD4 zSD4-Kg37JMafO#A8<;0s!_Y49wvIQ4_WK&-1J8iaZtKzkb7uqVsL;hA%8IJWaTF476^WC5YxJA*6z+wx4+^6 zfIfh-3$Dr2$qSVp+9QBMxC6X@tqWemF1`UU*&~7^B;#Eh6-&BgXPQLe&}sZ*4B3TMqdj@`_`Ht3XCqL|Bay$ z{`d)F2`?{OUs$;>_`7=eafb>O%zfIz#D+WA9sGUG8*VICAb3c2-8Ha+>tyeQT9!h` zEXs?H_toLPb`7QA!1nf9!$KNsx#9(c?*d>`Yqf0a#Q*ymcC~WA`uiH{zcVP0pf<5v zR%ziX&B9Y0k^)p)Qyz)LV+AA>sHv(d5{##6h!0Q&Rei*A^}6ONmRb$x#$y9y3s5v2 z1Sp!{#{rb$kmq=+i9Ezp1>_o@$|Eg!s)m%}sXB52Pfd|XJk>`W@zek@!&4PR7f%fl z#Z@W+RQWKhjmJ|3gaJ?CH4snrk;S|6h@uh>d4i`3$W1)eL|X7vA1MW@q^*Wz0aY{5 zM-qT)>l{V`R;ewXY9pFJbq(YZDWFRF`UoGM$|J0Js)>-{sSdJu2Mt$1?&GN0(4h;*1iJ1-2q0!-?Tgb^_W+o~M zHn~PbL_&2#V_}wsk5@|9I27hxU~)!RHx{zR@`c8R0v7_)GmegqR8*7!9v=Q49)Hct zDClUbX>-GO1RZT1Lj!KaKuZ@<1)l%6N9YpB>b^YwJKJG>Mg8uyqlc82>NGRXB}j2U z`lUw#3A-OK^Y*eaBbZ7Q6o-2r%w3tmtiBn#Tb-|ulZ&?*OliY^kFA+M{JX;6ir{`B z5g`v-YkaYEK>=LCmIO&)MZf6%>+$o~uV3iZ`>S*fN1u|zM_$9k;bfuzK0G(#--okC z{v94kl)qcd-{Hjh$Ho5paF&#RAHI_M@57@C{th1^DxY-C-{Cij!VT;GeK@x1@9@}l z{6#HtNEtY^?VW+1TJSKQz7Ab^dd!{7zhSA z6ap2l?vITHk}XTZHp1heNGDtPtW$N8ZPAv>#>0@-oiYg|4C8 z6f>KELa}|=( z^1^=Fw<;_Tj_cpAj+r|^P=8SVIu!}7xNXO?o$OYq0P z(9WJI3(E`j_Z}OT7nVQi3t@R-{sjf{Lj84L4(BhFpIi}^7wTUcomhVGW8^nnFme7n zFP>O_Le0eTd2_<@Li@WR5tbLWZ+6njHHOD=zU%dfPHot*H)3Ws65o^O)&Fa`yysCI2p_a zaj+P4f;Hgp!0q56@CbKzXmUXr@_78Ca?x92T3pk91DhjMLp0l1a1X4f;Hf3&;s6|yg2lHa2l8l z4$&_Mz;5syco=K}H-UYWw+rk9m9*i0=*{3d`ubLIH+T>{3-$r?R>YKplfeaG0k|6U zfemGIOyk99fr0I}lJ&q^=cFP1j6^(^K;qA%=} z%v|f`R0K&63pJ!lnnXI2P0i=|g*0zVn-a8itxlPrm}DlKq?(GgWODqsq%TmXfk4SI zb9vbn-K#^Ica)`BUE^bs=sTsfK|3_!1C*Ls($$eltYmj!RfH1ttQza+(AOM|8Hwn> z)O#BV$oC{W!P2R|lqwN3-K9-z@X$Gi=hr#6q3mV9Kbp0?)li}hWB!(eW6Nk^b6U1*}8CTki83+xk`#e^1&QG9CSC zuUFw+t2B#Kz4}b$T%MY5q5fHRU$oebDqJMSiXt)0u zZ~i5~^Yczsi(JGn!Q7I!FKvCK2EWbMDp2?9sfYe|d(fIHRaZ({-_|wSH`cHwrGUasL{1U%|RTs@D!OJ@;aX?3`iFW_n}hm`DSo;pe|mHFA+A zoShM9WR*T&`zs`GY?FbQD{U`%9hXaTitQiQo4l7%-#S(Px{aM<&iCq>tc!Q`;kw4` z^1RJ05gGFj%BkT<@JfI^#dky$S=g_+tW&c1Dl9%_$0|)#{d^buGebre)a1pQd1ufO z@9o#Z{iR85Tv0VT)i;ZHbu?gia#HeH;@Iiw^3;G?5tgev&3y6ZcK5F)ff%idFYHNm zd-1i3Q$1Uv%`sb?gq?i7G#85FCKos{6U)cFisbg!*xsrrVHQuP=dR=RN*@zI^ZdE> zzLBqD&+!#30{Pqhv_C14aq8+=8q$9^&|cU?rY?|%y>DJ0G|R`F4QkGfW#y}OE*^@^ zXZM?(mdsXr?zXv3De;rEWbzr&8{oSc%`fREvI%7hto+3X{P{3ZDi6`r*3sD2lQOUR zsapn!?Z#hZyi8NPbVxTd)LKw92P5^Vwl%47{IuGm`n!pg5qrQ|BrcQV{IhLMQ_8hx zRPx~L$vkp)S#vtY-tuEgg4?-!lrrI#-s*X&5HmHl_xjp+>(ryh#mt3eE&jedd91y% z%;);V#!fDG9dp;UvkjgW&>b|3tCQx^qT*K~b=hQ!ov^$#YUDB{zuQ>mU&LB6xhtE= z`0Z|GGi0Ejxq{eQH;QL|9ARQj^=g}9s-v#Ey*H(edtB>+d^SM){AT6VVlO|c;cVgD z2A%JvRv2nmM?xE=xrZ=w%31Dh4sX6FG=Rw^^Vm+Aj`j3te2ufd_Elb={J{1>=uU#+eKS&@ z;65;S4c{M(Q+}=UCSBp>`}l4m%ppy$IC_`Bg|UtMy81+2lB=Th*-6LRJ~=;N-soTL z<|il6J;^@bavjUWOXj7yC}?hq+s{sO1>WQ=#2cfSxy`o4jTqOTp8h^N%Ar(C%nz)s z+S@<#cDOdwdNK1a-@5KixwgY;4s)p6%g*LHyqjlphFa>)A)1NpjsE9{&s-0%Y&SNO zZ;mJ{#2y)wrOx`Fh6*VMs1~9zO4Z-?axG=R@F+f``F_;688?Jm$qUJ1O1>l&1+OU9(5Isj%f=36*Y^@~nI{Gz#c;{`&@Z zP(XE6J5--RpgOPj=mx0d*8|Bn0_owmBIZnRyBEI`s`}gwr0<`A()}w?{ayf){}+(F zl1@?o_mj28C1`4r$P06_tB5s^WM|YlZbDE>fWIPdNMQ# zJq5b=3I1Oe^d9J`&<7unn<{8IbRE<{2OtL?i}N;+^iVnFYf#-FJ`XMILyh^MD*Xw= z<|ny72FILwWWasx{hI14wbHLP{yAs`;(rf30$u|D3#L%%^S}bI8u)oGMyC6(wcxwp zVenJ%8&FQVb3p@013$;OqYXQK;_!cR`hY|G4z}l`x@XeEDKm20F&qt7pEMMfhJY!uP+?t1^S- zEG?3LCH!@C}TkO$)Q zvF*dJLQA|_ZPBDz5HK0@)?06xJ$v?;S6+F=Jn_U6X5+?utH0e-8MmR}aiH&Q;8s*gmc4s9!+iaHdQSETxH7%HDec_UGKZl%*g z`9^D}&@z=iT6<~jJpL|)6N@jk^^nw^e?UA~&k-79rmA!_dw=p5&L5y-CFOh?O&qT; zR;|*dZ+_EKr>`8Xt)s-y$N6QWi^jFDBGplTI;~XurVS0*_H}SHQ*VJUDY|bdH;2<_ z`_R)m-}WtWiuYcFd*@F=mXX9m}C;1sWY+$BKgXeJE3KHqt5^0oHat z#B_W<_7Y!G(4R8Kp&f^Oo8)ANf4<#Xtg~C%9c~|v@DU#2`@^teK6Gl&AsXS}_{kNV zr8}yl59=ddTy`AJ=PZAC*io;Ifp*$9D<1auVZS6tb@br%tBX?ytIqe^;bCX_V~*OR zvsQl`Wl8cGr!Jpwgq|WCveX`RX06rPP~J8J{+H9|mFjdkYxRzK+YIG%9w?W`1JGl2 zd77kk8twGPE|kxC;Mge6-s;B5dY!z$`pBh=a<&=D=R6P_)ZF6f^Sw^`GzS#ZXD$EA zMb%f?R)+OWsm|GEfj;MZGJeKtT>IQ?;0BM3x$D?yZQVy7;N@J>L2HRGpjC)y|yXJNq1;&w0G_ zgI2sB_Ye8K^9MD}hFrfmkC(jzdH>K3ckcVW_&Ig?{`6hlyV}ae{VQ+&+}9Quhdk?u z&T{oUqSX0BM?GH&Pj4D6g<7+7zPsK0tCEK-r~Trqia+9UA|Jr^)wt`?;S7kFl{0riHJ;blDtBpx2VWm+wJFc~Ob7$TH$7iG}TUK&XY$H$I>`LxDoa&COo(lltd?eC$-`>3z$ z{qV@NA?flYuHX6@sg%*ve?AOOTHm;XC$vY{)A?>;pV(w-@3+i_wAVS_2Hx!iwI?cJ!)Wug_QD2>adD^ZT}*2a4A7cB7Rgzn*m=OAYqN z$Jdz=t&Nu)$5%H`FcCY>gJzz!PI*Jd_PXpkie>9hq0k0CO&=Y>Zhaq}$dA6sNUX5+ zl(#tVC}tP($vS2P-*4sR^0vHJYFyV)S_+Bpk?oYKR4d>{?b(2p<;zj86w?{CHrR&_ zrC34itE6rZ%nWhL)ErSbV;tE`P~7`9LW9`h_ss}n`ZhM?zI)q;?kwM9?O`-C)JSd4 z@~1DZ>OE$dTdiUOp~N`<7{Tt|v39wEJ`K*0LAxu|5%9$c{h%>cXj`Zvd|LC%;cd&e zA7}TvDy(FzzG>%l`MPHOBbkwP?eMQ8p&5C?Hs?Fp&D%5WyzTmUy%pBvKHR!)b-Fuy zNmYMu_r=-vm3#vxd(LYALyY>e>cwr@)w9-ASMhP$?ygR@YI+-qE~%=Xbzaq+KRrGg zt>G8!**@(*jUHKPUvv4$nPtbbexP8>(QCSz-qc_6c^5t~q~8m$ZEr;?n-d1bZnHES zZR=BO_)<+;$4jc(vUS~SGOJR(Rnh*gSi838UQ*TBmd>WC=G2^ Date: Mon, 19 Aug 2024 16:10:01 -0400 Subject: [PATCH 1013/1761] Add test capturing missed expectation. Ref pypa/distutils#284 --- distutils/tests/test_modified.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/distutils/tests/test_modified.py b/distutils/tests/test_modified.py index 2bd82346cf..4f1a7caf9a 100644 --- a/distutils/tests/test_modified.py +++ b/distutils/tests/test_modified.py @@ -117,3 +117,11 @@ def test_newer_pairwise_group(groups_target): newer = newer_pairwise_group([groups_target.newer], [groups_target.target]) assert older == ([], []) assert newer == ([groups_target.newer], [groups_target.target]) + + +@pytest.mark.xfail(reason="pypa/distutils#284") +def test_newer_group_no_sources_no_target(tmp_path): + """ + Consider no sources and no target "newer". + """ + assert newer_group([], str(tmp_path / 'does-not-exist')) From 30b7331b07fbc404959cb37ac311afdfb90813be Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Mon, 19 Aug 2024 16:11:46 -0400 Subject: [PATCH 1014/1761] Ensure a missing target is still indicated as 'sources are newer' even when there are no sources. Closes pypa/distutils#284 --- distutils/_modified.py | 2 +- distutils/tests/test_modified.py | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/distutils/_modified.py b/distutils/_modified.py index 6532aa1073..b7bdaa2943 100644 --- a/distutils/_modified.py +++ b/distutils/_modified.py @@ -63,7 +63,7 @@ def missing_as_newer(source): return missing == 'newer' and not os.path.exists(source) ignored = os.path.exists if missing == 'ignore' else None - return any( + return not os.path.exists(target) or any( missing_as_newer(source) or _newer(source, target) for source in filter(ignored, sources) ) diff --git a/distutils/tests/test_modified.py b/distutils/tests/test_modified.py index 4f1a7caf9a..e35cec2d6f 100644 --- a/distutils/tests/test_modified.py +++ b/distutils/tests/test_modified.py @@ -119,7 +119,6 @@ def test_newer_pairwise_group(groups_target): assert newer == ([groups_target.newer], [groups_target.target]) -@pytest.mark.xfail(reason="pypa/distutils#284") def test_newer_group_no_sources_no_target(tmp_path): """ Consider no sources and no target "newer". From 4147b093d0aea4f57757c699a0b25bbc3aab2580 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Mon, 19 Aug 2024 16:31:35 -0400 Subject: [PATCH 1015/1761] =?UTF-8?q?Bump=20version:=2072.2.0=20=E2=86=92?= =?UTF-8?q?=2073.0.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- NEWS.rst | 34 ++++++++++++++++++++++++++++++ newsfragments/+61911d95.bugfix.rst | 1 - newsfragments/4383.bugfix.rst | 1 - newsfragments/4420.bugfix.rst | 2 -- newsfragments/4503.feature.rst | 1 - newsfragments/4505.feature.rst | 1 - newsfragments/4534.misc.rst | 1 - newsfragments/4546.misc.rst | 2 -- newsfragments/4554.misc.rst | 1 - newsfragments/4559.misc.rst | 2 -- newsfragments/4565.misc.rst | 3 --- newsfragments/4574.removal.rst | 4 ---- pyproject.toml | 2 +- 14 files changed, 36 insertions(+), 21 deletions(-) delete mode 100644 newsfragments/+61911d95.bugfix.rst delete mode 100644 newsfragments/4383.bugfix.rst delete mode 100644 newsfragments/4420.bugfix.rst delete mode 100644 newsfragments/4503.feature.rst delete mode 100644 newsfragments/4505.feature.rst delete mode 100644 newsfragments/4534.misc.rst delete mode 100644 newsfragments/4546.misc.rst delete mode 100644 newsfragments/4554.misc.rst delete mode 100644 newsfragments/4559.misc.rst delete mode 100644 newsfragments/4565.misc.rst delete mode 100644 newsfragments/4574.removal.rst diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 7215487880..8cd611b4a2 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 72.2.0 +current_version = 73.0.0 commit = True tag = True diff --git a/NEWS.rst b/NEWS.rst index a2d5eeba36..cfb8a2379f 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -1,3 +1,37 @@ +v73.0.0 +======= + +Features +-------- + +- Mark abstract base classes and methods with `abc.ABC` and `abc.abstractmethod` -- by :user:`Avasam` (#4503) +- Changed the order of type checks in ``setuptools.command.easy_install.CommandSpec.from_param`` to support any `collections.abc.Iterable` of `str` param -- by :user:`Avasam` (#4505) + + +Bugfixes +-------- + +- Prevent an error in ``bdist_wheel`` if ``compression`` is set to a `str` (even if valid) after finalizing options but before running the command. -- by :user:`Avasam` (#4383) +- Raises an exception when ``py_limited_api`` is used in a build with + ``Py_GIL_DISABLED``. This is currently not supported (python/cpython#111506). (#4420) +- Synced with pypa/distutils@30b7331 including fix for modified check on empty sources (pypa/distutils#284). + + +Deprecations and Removals +------------------------- + +- ``setuptools`` is replacing the usages of :pypi:`ordered_set` with simple + instances of ``dict[Hashable, None]``. This is done to remove the extra + dependency and it is possible because since Python 3.7, ``dict`` maintain + insertion order. (#4574) + + +Misc +---- + +- #4534, #4546, #4554, #4559, #4565 + + v72.2.0 ======= diff --git a/newsfragments/+61911d95.bugfix.rst b/newsfragments/+61911d95.bugfix.rst deleted file mode 100644 index f542998cd5..0000000000 --- a/newsfragments/+61911d95.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Synced with pypa/distutils@30b7331 including fix for modified check on empty sources (pypa/distutils#284). \ No newline at end of file diff --git a/newsfragments/4383.bugfix.rst b/newsfragments/4383.bugfix.rst deleted file mode 100644 index e5fd603abb..0000000000 --- a/newsfragments/4383.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Prevent an error in ``bdist_wheel`` if ``compression`` is set to a `str` (even if valid) after finalizing options but before running the command. -- by :user:`Avasam` diff --git a/newsfragments/4420.bugfix.rst b/newsfragments/4420.bugfix.rst deleted file mode 100644 index c5f75fcddb..0000000000 --- a/newsfragments/4420.bugfix.rst +++ /dev/null @@ -1,2 +0,0 @@ -Raises an exception when ``py_limited_api`` is used in a build with -``Py_GIL_DISABLED``. This is currently not supported (python/cpython#111506). diff --git a/newsfragments/4503.feature.rst b/newsfragments/4503.feature.rst deleted file mode 100644 index 9c2e433242..0000000000 --- a/newsfragments/4503.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Mark abstract base classes and methods with `abc.ABC` and `abc.abstractmethod` -- by :user:`Avasam` diff --git a/newsfragments/4505.feature.rst b/newsfragments/4505.feature.rst deleted file mode 100644 index e032dd997e..0000000000 --- a/newsfragments/4505.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Changed the order of type checks in ``setuptools.command.easy_install.CommandSpec.from_param`` to support any `collections.abc.Iterable` of `str` param -- by :user:`Avasam` diff --git a/newsfragments/4534.misc.rst b/newsfragments/4534.misc.rst deleted file mode 100644 index f7c1a1d314..0000000000 --- a/newsfragments/4534.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Changed the import of ``ctypes.wintypes`` from ``__import__`` to a regular ``import`` statement -- by :user:`Avasam` diff --git a/newsfragments/4546.misc.rst b/newsfragments/4546.misc.rst deleted file mode 100644 index f056a2b379..0000000000 --- a/newsfragments/4546.misc.rst +++ /dev/null @@ -1,2 +0,0 @@ -Added lower bound to test dependency on ``wheel`` (0.44.0) to avoid -small inconsistencies in ``Requires-Dist`` normalisation for ``METADATA``. diff --git a/newsfragments/4554.misc.rst b/newsfragments/4554.misc.rst deleted file mode 100644 index 9992f93441..0000000000 --- a/newsfragments/4554.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Removed ``setputools.sandbox``'s Python 2 ``builtins.file`` support -- by :user:`Avasam` diff --git a/newsfragments/4559.misc.rst b/newsfragments/4559.misc.rst deleted file mode 100644 index 34b5a30664..0000000000 --- a/newsfragments/4559.misc.rst +++ /dev/null @@ -1,2 +0,0 @@ -Prevent deprecation warning from ``pypa/wheel#631`` to accidentally -trigger when validating ``pyproject.toml``. diff --git a/newsfragments/4565.misc.rst b/newsfragments/4565.misc.rst deleted file mode 100644 index 031f8d66ca..0000000000 --- a/newsfragments/4565.misc.rst +++ /dev/null @@ -1,3 +0,0 @@ -Replace ``pip install -I`` with ``pip install --force-reinstall`` in -integration tests. Additionally, remove ``wheel`` from virtual environment as -it is no longer a build dependency. diff --git a/newsfragments/4574.removal.rst b/newsfragments/4574.removal.rst deleted file mode 100644 index 17c8f61ec1..0000000000 --- a/newsfragments/4574.removal.rst +++ /dev/null @@ -1,4 +0,0 @@ -``setuptools`` is replacing the usages of :pypi:`ordered_set` with simple -instances of ``dict[Hashable, None]``. This is done to remove the extra -dependency and it is possible because since Python 3.7, ``dict`` maintain -insertion order. diff --git a/pyproject.toml b/pyproject.toml index 89c2fe890b..c9f8e61bbe 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,7 +5,7 @@ backend-path = ["."] [project] name = "setuptools" -version = "72.2.0" +version = "73.0.0" authors = [ { name = "Python Packaging Authority", email = "distutils-sig@python.org" }, ] From 429ac589e5f290282f91b420350b002a2c519699 Mon Sep 17 00:00:00 2001 From: Avasam Date: Mon, 19 Aug 2024 18:59:56 -0400 Subject: [PATCH 1016/1761] Override distribution attribute type in all distutils-based commands --- setuptools/command/bdist_rpm.py | 3 +++ setuptools/command/build.py | 4 ++++ setuptools/command/build_clib.py | 4 ++++ setuptools/command/egg_info.py | 2 -- setuptools/command/register.py | 4 ++++ 5 files changed, 15 insertions(+), 2 deletions(-) diff --git a/setuptools/command/bdist_rpm.py b/setuptools/command/bdist_rpm.py index abf2b88bfc..e0d4caf2e9 100644 --- a/setuptools/command/bdist_rpm.py +++ b/setuptools/command/bdist_rpm.py @@ -1,3 +1,4 @@ +from ..dist import Distribution from ..warnings import SetuptoolsDeprecationWarning import distutils.command.bdist_rpm as orig @@ -12,6 +13,8 @@ class bdist_rpm(orig.bdist_rpm): disable eggs in RPM distributions. """ + distribution: Distribution # override distutils.dist.Distribution with setuptools.dist.Distribution + def run(self): SetuptoolsDeprecationWarning.emit( "Deprecated command", diff --git a/setuptools/command/build.py b/setuptools/command/build.py index fd53fae8ca..0c5e544804 100644 --- a/setuptools/command/build.py +++ b/setuptools/command/build.py @@ -2,12 +2,16 @@ from typing import Protocol +from ..dist import Distribution + from distutils.command.build import build as _build _ORIGINAL_SUBCOMMANDS = {"build_py", "build_clib", "build_ext", "build_scripts"} class build(_build): + distribution: Distribution # override distutils.dist.Distribution with setuptools.dist.Distribution + # copy to avoid sharing the object with parent class sub_commands = _build.sub_commands[:] diff --git a/setuptools/command/build_clib.py b/setuptools/command/build_clib.py index 5366b0c5c6..9db57ac8a2 100644 --- a/setuptools/command/build_clib.py +++ b/setuptools/command/build_clib.py @@ -1,3 +1,5 @@ +from ..dist import Distribution + import distutils.command.build_clib as orig from distutils import log from distutils.errors import DistutilsSetupError @@ -25,6 +27,8 @@ class build_clib(orig.build_clib): the compiler. """ + distribution: Distribution # override distutils.dist.Distribution with setuptools.dist.Distribution + def build_libraries(self, libraries): for lib_name, build_info in libraries: sources = build_info.get('sources') diff --git a/setuptools/command/egg_info.py b/setuptools/command/egg_info.py index 794ecd3dc3..09255a3240 100644 --- a/setuptools/command/egg_info.py +++ b/setuptools/command/egg_info.py @@ -18,7 +18,6 @@ from setuptools.command import bdist_egg from setuptools.command.sdist import sdist, walk_revctrl from setuptools.command.setopt import edit_config -from setuptools.dist import Distribution from setuptools.glob import glob from .. import _entry_points, _normalization @@ -522,7 +521,6 @@ def _safe_path(self, path): class manifest_maker(sdist): - distribution: Distribution # override distutils.dist.Distribution with setuptools.dist.Distribution template = "MANIFEST.in" def initialize_options(self): diff --git a/setuptools/command/register.py b/setuptools/command/register.py index 575790e5f2..93ef04aa0e 100644 --- a/setuptools/command/register.py +++ b/setuptools/command/register.py @@ -1,5 +1,7 @@ from setuptools.errors import RemovedCommandError +from ..dist import Distribution + import distutils.command.register as orig from distutils import log @@ -7,6 +9,8 @@ class register(orig.register): """Formerly used to register packages on PyPI.""" + distribution: Distribution # override distutils.dist.Distribution with setuptools.dist.Distribution + def run(self): msg = ( "The register command has been removed, use twine to upload " From fbc75bcc13558d3ce2a4a7ed90a6e6c32abbf947 Mon Sep 17 00:00:00 2001 From: Avasam Date: Fri, 9 Aug 2024 15:30:13 -0400 Subject: [PATCH 1017/1761] Raise `TypeError` in `easy_install.CommandSpec.from_param` --- newsfragments/4548.feature.rst | 1 + setuptools/command/easy_install.py | 3 +-- setuptools/tests/test_easy_install.py | 10 ++++++++++ 3 files changed, 12 insertions(+), 2 deletions(-) create mode 100644 newsfragments/4548.feature.rst diff --git a/newsfragments/4548.feature.rst b/newsfragments/4548.feature.rst new file mode 100644 index 0000000000..7fcf5f4377 --- /dev/null +++ b/newsfragments/4548.feature.rst @@ -0,0 +1 @@ +Changed the type of error raised by ``setuptools.command.easy_install.CommandSpec.from_param`` on unsupported argument from `AttributeError` to `TypeError` -- by :user:`Avasam` diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py index 54d1e48449..2770a72b92 100644 --- a/setuptools/command/easy_install.py +++ b/setuptools/command/easy_install.py @@ -2068,8 +2068,7 @@ def from_param(cls, param: Self | str | Iterable[str] | None) -> Self: return cls(param) if param is None: return cls.from_environment() - # AttributeError to keep backwards compatibility, this should really be a TypeError though - raise AttributeError(f"Argument has an unsupported type {type(param)}") + raise TypeError(f"Argument has an unsupported type {type(param)}") @classmethod def from_environment(cls): diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py index ca6af9667e..de257db80c 100644 --- a/setuptools/tests/test_easy_install.py +++ b/setuptools/tests/test_easy_install.py @@ -1332,6 +1332,16 @@ def test_from_simple_string_uses_shlex(self): assert len(cmd) == 2 assert '"' not in cmd.as_header() + def test_from_param_raises_expected_error(self) -> None: + """ + from_param should raise its own TypeError when the argument's type is unsupported + """ + with pytest.raises(TypeError) as exc_info: + ei.CommandSpec.from_param(object()) # type: ignore[arg-type] # We want a type error here + assert ( + str(exc_info.value) == "Argument has an unsupported type " + ), exc_info.value + class TestWindowsScriptWriter: def test_header(self): From b7ee00da2cfa8208c47812fb657392e8b88f620c Mon Sep 17 00:00:00 2001 From: Avasam Date: Tue, 20 Aug 2024 13:46:39 -0400 Subject: [PATCH 1018/1761] Remove ABCMeta metaclass, keep abstractmethods --- newsfragments/4579.bugfix.rst | 1 + pkg_resources/__init__.py | 3 +-- setuptools/__init__.py | 4 ++-- setuptools/command/setopt.py | 3 +-- setuptools/sandbox.py | 3 +-- 5 files changed, 6 insertions(+), 8 deletions(-) create mode 100644 newsfragments/4579.bugfix.rst diff --git a/newsfragments/4579.bugfix.rst b/newsfragments/4579.bugfix.rst new file mode 100644 index 0000000000..bd5ad8c203 --- /dev/null +++ b/newsfragments/4579.bugfix.rst @@ -0,0 +1 @@ +Remove `abc.ABCMeta` metaclass from abstract classes. `pypa/setuptools#4503 `_ had an unintended consequence of causing potential ``TypeError: metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases`` -- by :user:`Avasam` diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py index 8bbf249371..76aa5e77ba 100644 --- a/pkg_resources/__init__.py +++ b/pkg_resources/__init__.py @@ -23,7 +23,6 @@ from __future__ import annotations import sys -from abc import ABC if sys.version_info < (3, 8): # noqa: UP036 # Check for unsupported versions raise RuntimeError("Python 3.8 or later is required") @@ -306,7 +305,7 @@ def get_supported_platform(): ] -class ResolutionError(Exception, ABC): +class ResolutionError(Exception): """Abstract base for dependency resolution errors""" def __repr__(self): diff --git a/setuptools/__init__.py b/setuptools/__init__.py index 73de2a03d3..1d3156ff10 100644 --- a/setuptools/__init__.py +++ b/setuptools/__init__.py @@ -6,7 +6,7 @@ import os import re import sys -from abc import ABC, abstractmethod +from abc import abstractmethod from typing import TYPE_CHECKING, TypeVar, overload sys.path.extend(((vendor_path := os.path.join(os.path.dirname(os.path.dirname(__file__)), 'setuptools', '_vendor')) not in sys.path) * [vendor_path]) # fmt: skip @@ -120,7 +120,7 @@ def setup(**attrs): _Command = monkey.get_unpatched(distutils.core.Command) -class Command(_Command, ABC): +class Command(_Command): """ Setuptools internal actions are organized using a *command design pattern*. This means that each action (or group of closely related actions) executed during diff --git a/setuptools/command/setopt.py b/setuptools/command/setopt.py index b2653bd466..e351af22f0 100644 --- a/setuptools/command/setopt.py +++ b/setuptools/command/setopt.py @@ -1,6 +1,5 @@ import configparser import os -from abc import ABC from .. import Command from ..unicode_utils import _cfg_read_utf8_with_fallback @@ -70,7 +69,7 @@ def edit_config(filename, settings, dry_run=False): opts.write(f) -class option_base(Command, ABC): +class option_base(Command): """Abstract base class for commands that mess with config files""" user_options = [ diff --git a/setuptools/sandbox.py b/setuptools/sandbox.py index 9a101b7137..7d545f1004 100644 --- a/setuptools/sandbox.py +++ b/setuptools/sandbox.py @@ -11,7 +11,6 @@ import sys import tempfile import textwrap -from abc import ABC import pkg_resources from pkg_resources import working_set @@ -263,7 +262,7 @@ def run_setup(setup_script, args): # Normal exit, just return -class AbstractSandbox(ABC): +class AbstractSandbox: """Wrap 'os' module and 'open()' builtin for virtualizing setup scripts""" _active = False From ebddeb36f72c9d758b5cc0e9f81f8a66aa837d96 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Tue, 20 Aug 2024 14:08:21 -0400 Subject: [PATCH 1019/1761] =?UTF-8?q?Bump=20version:=2073.0.0=20=E2=86=92?= =?UTF-8?q?=2073.0.1?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- NEWS.rst | 9 +++++++++ newsfragments/4579.bugfix.rst | 1 - pyproject.toml | 2 +- 4 files changed, 11 insertions(+), 3 deletions(-) delete mode 100644 newsfragments/4579.bugfix.rst diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 8cd611b4a2..51d22156e4 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 73.0.0 +current_version = 73.0.1 commit = True tag = True diff --git a/NEWS.rst b/NEWS.rst index cfb8a2379f..3c36fbfa5a 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -1,3 +1,12 @@ +v73.0.1 +======= + +Bugfixes +-------- + +- Remove `abc.ABCMeta` metaclass from abstract classes. `pypa/setuptools#4503 `_ had an unintended consequence of causing potential ``TypeError: metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases`` -- by :user:`Avasam` (#4579) + + v73.0.0 ======= diff --git a/newsfragments/4579.bugfix.rst b/newsfragments/4579.bugfix.rst deleted file mode 100644 index bd5ad8c203..0000000000 --- a/newsfragments/4579.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Remove `abc.ABCMeta` metaclass from abstract classes. `pypa/setuptools#4503 `_ had an unintended consequence of causing potential ``TypeError: metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases`` -- by :user:`Avasam` diff --git a/pyproject.toml b/pyproject.toml index c9f8e61bbe..390da0c634 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,7 +5,7 @@ backend-path = ["."] [project] name = "setuptools" -version = "73.0.0" +version = "73.0.1" authors = [ { name = "Python Packaging Authority", email = "distutils-sig@python.org" }, ] From f1350e413775a9e79e20779cc9705e28a1c55900 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Wed, 21 Aug 2024 07:05:32 -0400 Subject: [PATCH 1020/1761] Add upstream and local sections for 'type' extra, since many projects will have 'types-*' dependencies. --- pyproject.toml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 31057d85f1..3866a3237e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -57,7 +57,10 @@ enabler = [ ] type = [ + # upstream "pytest-mypy", + + # local ] From 6452a6ba6e2db0e546fdc030091c7fbcb6fe90b0 Mon Sep 17 00:00:00 2001 From: Avasam Date: Wed, 21 Aug 2024 10:34:56 -0400 Subject: [PATCH 1021/1761] Type-check on all Python versions --- mypy.ini | 14 +++++++---- setuptools/__init__.py | 11 +++++++-- setuptools/build_meta.py | 5 ++-- setuptools/command/build_ext.py | 2 +- setuptools/command/sdist.py | 4 +++- setuptools/config/setupcfg.py | 5 +++- setuptools/dist.py | 4 +++- setuptools/errors.py | 41 +++++++++++++++++++-------------- setuptools/extension.py | 4 +++- 9 files changed, 59 insertions(+), 31 deletions(-) diff --git a/mypy.ini b/mypy.ini index 569c7f0ace..43bb9d56c9 100644 --- a/mypy.ini +++ b/mypy.ini @@ -1,7 +1,7 @@ [mypy] # CI should test for all versions, local development gets hints for oldest supported -# Some upstream typeshed distutils stubs fixes are necessary before we can start testing on Python 3.12 -python_version = 3.8 +# But our testing setup doesn't allow passing CLI arguments, so local devs have to set this manually. +# python_version = 3.8 strict = False warn_unused_ignores = True warn_redundant_casts = True @@ -30,15 +30,19 @@ disable_error_code = attr-defined [mypy-pkg_resources.tests.*] disable_error_code = import-not-found -# - distutils._modified has different errors on Python 3.8 [import-untyped], on Python 3.9+ [import-not-found] +# - distutils doesn't exist on Python 3.12, unfortunately, this means typing +# will be missing for subclasses of distutils on Python 3.12 until either: +# - support for `SETUPTOOLS_USE_DISTUTILS=stdlib` is dropped (#3625) +# for setuptools to import `_distutils` directly +# - or non-stdlib distutils typings are exposed # - All jaraco modules are still untyped # - _validate_project sometimes complains about trove_classifiers (#4296) # - wheel appears to be untyped -[mypy-distutils._modified,jaraco.*,trove_classifiers,wheel.*] +[mypy-distutils.*,jaraco.*,trove_classifiers,wheel.*] ignore_missing_imports = True # Even when excluding a module, import issues can show up due to following import # https://github.com/python/mypy/issues/11936#issuecomment-1466764006 -[mypy-setuptools.config._validate_pyproject.*] +[mypy-setuptools.config._validate_pyproject.*,setuptools._distutils.*] follow_imports = silent # silent => ignore errors when following imports diff --git a/setuptools/__init__.py b/setuptools/__init__.py index 1d3156ff10..1c39fd9dab 100644 --- a/setuptools/__init__.py +++ b/setuptools/__init__.py @@ -1,4 +1,9 @@ """Extensions to the 'distutils' for large or complex distributions""" +# mypy: disable_error_code=override +# Command.reinitialize_command has an extra **kw param that distutils doesn't have +# Can't disable on the exact line because distutils doesn't exists on Python 3.12 +# and mypy isn't aware of distutils_hack, causing distutils.core.Command to be Any, +# and a [unused-ignore] to be raised on 3.12+ from __future__ import annotations @@ -114,8 +119,10 @@ def setup(**attrs): setup.__doc__ = distutils.core.setup.__doc__ if TYPE_CHECKING: + from typing_extensions import TypeAlias + # Work around a mypy issue where type[T] can't be used as a base: https://github.com/python/mypy/issues/10962 - _Command = distutils.core.Command + _Command: TypeAlias = distutils.core.Command else: _Command = monkey.get_unpatched(distutils.core.Command) @@ -207,7 +214,7 @@ def ensure_string_list(self, option): "'%s' must be a list of strings (got %r)" % (option, val) ) - @overload # type:ignore[override] # Extra **kw param + @overload def reinitialize_command( self, command: str, reinit_subcommands: bool = False, **kw ) -> _Command: ... diff --git a/setuptools/build_meta.py b/setuptools/build_meta.py index 7663306862..a6b85afc42 100644 --- a/setuptools/build_meta.py +++ b/setuptools/build_meta.py @@ -387,9 +387,10 @@ def _build_with_temp_dir( # Build in a temporary directory, then copy to the target. os.makedirs(result_directory, exist_ok=True) - temp_opts = {"prefix": ".tmp-", "dir": result_directory} - with tempfile.TemporaryDirectory(**temp_opts) as tmp_dist_dir: + with tempfile.TemporaryDirectory( + prefix=".tmp-", dir=result_directory + ) as tmp_dist_dir: sys.argv = [ *sys.argv[:1], *self._global_args(config_settings), diff --git a/setuptools/command/build_ext.py b/setuptools/command/build_ext.py index 51c1771a33..d44d7b8ae1 100644 --- a/setuptools/command/build_ext.py +++ b/setuptools/command/build_ext.py @@ -32,7 +32,7 @@ get_config_var("LDSHARED") # Not publicly exposed in typeshed distutils stubs, but this is done on purpose # See https://github.com/pypa/setuptools/pull/4228#issuecomment-1959856400 -from distutils.sysconfig import _config_vars as _CONFIG_VARS # type: ignore # noqa +from distutils.sysconfig import _config_vars as _CONFIG_VARS # noqa: E402 def _customize_compiler_for_shlib(compiler): diff --git a/setuptools/command/sdist.py b/setuptools/command/sdist.py index 23393c0797..68afab89b4 100644 --- a/setuptools/command/sdist.py +++ b/setuptools/command/sdist.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import contextlib import os from itertools import chain @@ -46,7 +48,7 @@ class sdist(orig.sdist): ] distribution: Distribution # override distutils.dist.Distribution with setuptools.dist.Distribution - negative_opt = {} + negative_opt: dict[str, str] = {} README_EXTENSIONS = ['', '.rst', '.txt', '.md'] READMES = tuple('README{0}'.format(ext) for ext in README_EXTENSIONS) diff --git a/setuptools/config/setupcfg.py b/setuptools/config/setupcfg.py index e825477043..072b787062 100644 --- a/setuptools/config/setupcfg.py +++ b/setuptools/config/setupcfg.py @@ -24,9 +24,11 @@ Generic, Iterable, Iterator, + List, Tuple, TypeVar, Union, + cast, ) from packaging.markers import default_environment as marker_env @@ -108,7 +110,8 @@ def _apply( filenames = [*other_files, filepath] try: - _Distribution.parse_config_files(dist, filenames=filenames) # type: ignore[arg-type] # TODO: fix in distutils stubs + # TODO: Temporary cast until mypy 1.12 is released with upstream fixes from typeshed + _Distribution.parse_config_files(dist, filenames=cast(List[str], filenames)) handlers = parse_configuration( dist, dist.command_options, ignore_option_errors=ignore_option_errors ) diff --git a/setuptools/dist.py b/setuptools/dist.py index 715e8fbb73..68f877decd 100644 --- a/setuptools/dist.py +++ b/setuptools/dist.py @@ -195,8 +195,10 @@ def check_packages(dist, attr, value): if TYPE_CHECKING: + from typing_extensions import TypeAlias + # Work around a mypy issue where type[T] can't be used as a base: https://github.com/python/mypy/issues/10962 - _Distribution = distutils.core.Distribution + _Distribution: TypeAlias = distutils.core.Distribution else: _Distribution = get_unpatched(distutils.core.Distribution) diff --git a/setuptools/errors.py b/setuptools/errors.py index dd4e58e9fc..90fcf7170e 100644 --- a/setuptools/errors.py +++ b/setuptools/errors.py @@ -3,29 +3,36 @@ Provides exceptions used by setuptools modules. """ +from __future__ import annotations + +from typing import TYPE_CHECKING + from distutils import errors as _distutils_errors +if TYPE_CHECKING: + from typing_extensions import TypeAlias + # Re-export errors from distutils to facilitate the migration to PEP632 -ByteCompileError = _distutils_errors.DistutilsByteCompileError -CCompilerError = _distutils_errors.CCompilerError -ClassError = _distutils_errors.DistutilsClassError -CompileError = _distutils_errors.CompileError -ExecError = _distutils_errors.DistutilsExecError -FileError = _distutils_errors.DistutilsFileError -InternalError = _distutils_errors.DistutilsInternalError -LibError = _distutils_errors.LibError -LinkError = _distutils_errors.LinkError -ModuleError = _distutils_errors.DistutilsModuleError -OptionError = _distutils_errors.DistutilsOptionError -PlatformError = _distutils_errors.DistutilsPlatformError -PreprocessError = _distutils_errors.PreprocessError -SetupError = _distutils_errors.DistutilsSetupError -TemplateError = _distutils_errors.DistutilsTemplateError -UnknownFileError = _distutils_errors.UnknownFileError +ByteCompileError: TypeAlias = _distutils_errors.DistutilsByteCompileError +CCompilerError: TypeAlias = _distutils_errors.CCompilerError +ClassError: TypeAlias = _distutils_errors.DistutilsClassError +CompileError: TypeAlias = _distutils_errors.CompileError +ExecError: TypeAlias = _distutils_errors.DistutilsExecError +FileError: TypeAlias = _distutils_errors.DistutilsFileError +InternalError: TypeAlias = _distutils_errors.DistutilsInternalError +LibError: TypeAlias = _distutils_errors.LibError +LinkError: TypeAlias = _distutils_errors.LinkError +ModuleError: TypeAlias = _distutils_errors.DistutilsModuleError +OptionError: TypeAlias = _distutils_errors.DistutilsOptionError +PlatformError: TypeAlias = _distutils_errors.DistutilsPlatformError +PreprocessError: TypeAlias = _distutils_errors.PreprocessError +SetupError: TypeAlias = _distutils_errors.DistutilsSetupError +TemplateError: TypeAlias = _distutils_errors.DistutilsTemplateError +UnknownFileError: TypeAlias = _distutils_errors.UnknownFileError # The root error class in the hierarchy -BaseError = _distutils_errors.DistutilsError +BaseError: TypeAlias = _distutils_errors.DistutilsError class InvalidConfigError(OptionError): diff --git a/setuptools/extension.py b/setuptools/extension.py index b9fff2367f..dcc7709982 100644 --- a/setuptools/extension.py +++ b/setuptools/extension.py @@ -27,8 +27,10 @@ def _have_cython(): # for compatibility have_pyrex = _have_cython if TYPE_CHECKING: + from typing_extensions import TypeAlias + # Work around a mypy issue where type[T] can't be used as a base: https://github.com/python/mypy/issues/10962 - _Extension = distutils.core.Extension + _Extension: TypeAlias = distutils.core.Extension else: _Extension = get_unpatched(distutils.core.Extension) From 6171c87b27dff3fca981a3afad796ea2609a4f96 Mon Sep 17 00:00:00 2001 From: Avasam Date: Mon, 12 Aug 2024 14:56:21 -0400 Subject: [PATCH 1022/1761] Pin Ruff to a lower bound rather than pinning pytest-ruff to an upper-bound --- pyproject.toml | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index bae68252e4..dcb581b09e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,7 +39,7 @@ test = [ # local "virtualenv>=13.0.0", - "wheel>=0.44.0", # Consistent requirement normalisation in METADATA (see #4547) + "wheel>=0.44.0", # Consistent requirement normalisation in METADATA (see #4547) "pip>=19.1", # For proper file:// URLs support. "packaging>=23.2", "jaraco.envs>=2.2", @@ -64,17 +64,10 @@ test = [ "importlib_metadata", "pytest-subprocess", - # require newer pytest-ruff than upstream for pypa/setuptools#4368 - # also exclude cygwin for pypa/setuptools#3921 - 'pytest-ruff >= 0.3.2; sys_platform != "cygwin"', - # workaround for pypa/setuptools#4333 "pyproject-hooks!=1.1", "jaraco.test", - - # workaround for businho/pytest-ruff#28 - 'pytest-ruff < 0.4; platform_system == "Windows"', ] doc = [ @@ -118,8 +111,14 @@ core = [ ] check = [ + # upstream "pytest-checkdocs >= 2.4", "pytest-ruff >= 0.2.1; sys_platform != 'cygwin'", + + # local + + # workaround for businho/pytest-ruff#28 + "ruff >= 0.5.2; sys_platform != 'cygwin'", ] cover = [ From c4fb5937b869fbc20ac01e7e201464fb94e6481e Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sat, 29 Jun 2024 12:14:17 +0200 Subject: [PATCH 1023/1761] Apply ruff/Perflint rule PERF102 PERF102 When using only the values of a dict use the `values()` method --- distutils/command/install.py | 2 +- distutils/dist.py | 8 ++------ 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/distutils/command/install.py b/distutils/command/install.py index 1fc09eef89..b83e061e02 100644 --- a/distutils/command/install.py +++ b/distutils/command/install.py @@ -680,7 +680,7 @@ def create_home_path(self): if not self.user: return home = convert_path(os.path.expanduser("~")) - for _name, path in self.config_vars.items(): + for path in self.config_vars.values(): if str(path).startswith(home) and not os.path.isdir(path): self.debug_print(f"os.makedirs('{path}', 0o700)") os.makedirs(path, 0o700) diff --git a/distutils/dist.py b/distutils/dist.py index 115302b3e7..6cc7cd0c50 100644 --- a/distutils/dist.py +++ b/distutils/dist.py @@ -741,9 +741,7 @@ def print_commands(self): import distutils.command std_commands = distutils.command.__all__ - is_std = set() - for cmd in std_commands: - is_std.add(cmd) + is_std = set(std_commands) extra_commands = [cmd for cmd in self.cmdclass.keys() if cmd not in is_std] @@ -769,9 +767,7 @@ def get_command_list(self): import distutils.command std_commands = distutils.command.__all__ - is_std = set() - for cmd in std_commands: - is_std.add(cmd) + is_std = set(std_commands) extra_commands = [cmd for cmd in self.cmdclass.keys() if cmd not in is_std] From 7556b83700bee5565c72c0426b0156d9d5292203 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sat, 29 Jun 2024 12:11:54 +0200 Subject: [PATCH 1024/1761] Apply ruff/Perflint rule PERF402 PERF402 Use `list` or `list.copy` to create a copy of a list --- distutils/command/build_ext.py | 3 +-- distutils/cygwinccompiler.py | 3 +-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/distutils/command/build_ext.py b/distutils/command/build_ext.py index cf475fe824..a7e3038be6 100644 --- a/distutils/command/build_ext.py +++ b/distutils/command/build_ext.py @@ -638,8 +638,7 @@ def swig_sources(self, sources, extension): # Do not override commandline arguments if not self.swig_opts: - for o in extension.swig_opts: - swig_cmd.append(o) + swig_cmd.extend(extension.swig_opts) for source in swig_sources: target = swig_targets[source] diff --git a/distutils/cygwinccompiler.py b/distutils/cygwinccompiler.py index ce412e8329..3743f14efa 100644 --- a/distutils/cygwinccompiler.py +++ b/distutils/cygwinccompiler.py @@ -172,8 +172,7 @@ def link( # Generate .def file contents = [f"LIBRARY {os.path.basename(output_filename)}", "EXPORTS"] - for sym in export_symbols: - contents.append(sym) + contents.extend(export_symbols) self.execute(write_file, (def_file, contents), f"writing {def_file}") # next add options for def-file From 9d86ea810f1728ac157623754001d0194ba422e8 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sat, 29 Jun 2024 12:18:12 +0200 Subject: [PATCH 1025/1761] Apply ruff/Perflint rule PERF403 PERF403 Use a dictionary comprehension instead of a for-loop --- distutils/archive_util.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/distutils/archive_util.py b/distutils/archive_util.py index 07cd97f4d0..cc4699b1a3 100644 --- a/distutils/archive_util.py +++ b/distutils/archive_util.py @@ -266,8 +266,7 @@ def make_archive( raise ValueError(f"unknown archive format '{format}'") func = format_info[0] - for arg, val in format_info[1]: - kwargs[arg] = val + kwargs.update(format_info[1]) if format != 'zip': kwargs['owner'] = owner From 7f22d8bc039cca08d80b5ea8d48d7e7673d524cc Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sat, 29 Jun 2024 12:24:57 +0200 Subject: [PATCH 1026/1761] Enforce ruff/Perflint rules (PERF) --- ruff.toml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/ruff.toml b/ruff.toml index 41c9459bdb..a8215c0c8a 100644 --- a/ruff.toml +++ b/ruff.toml @@ -8,11 +8,13 @@ extend-select = [ "B", "I", "ISC", + "PERF", "RUF010", "RUF100", "UP", ] ignore = [ + "PERF203", # https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules "W191", "E111", From 2b5815cea160e3656d48fb528a451f6ae01d23ef Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Mon, 1 Jul 2024 19:09:12 +0200 Subject: [PATCH 1027/1761] Add `# local` to ignore conflicts with upstrea --- ruff.toml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/ruff.toml b/ruff.toml index a8215c0c8a..0d55637d20 100644 --- a/ruff.toml +++ b/ruff.toml @@ -14,7 +14,9 @@ extend-select = [ "UP", ] ignore = [ + # local "PERF203", + # https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules "W191", "E111", From 27c258fe7cfd9f4979b84e3fa74177b0a7d57426 Mon Sep 17 00:00:00 2001 From: Avasam Date: Thu, 22 Aug 2024 12:05:39 -0400 Subject: [PATCH 1028/1761] Move static-checkers-only dependencies into their dedicated extras --- pyproject.toml | 13 +++++-------- .../tests/integration/test_pip_install_sdist.py | 5 ++--- 2 files changed, 7 insertions(+), 11 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index dcb581b09e..13a2e92341 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -54,14 +54,6 @@ test = [ # for tools/finalize.py 'jaraco.develop >= 7.21; python_version >= "3.9" and sys_platform != "cygwin"', "pytest-home >= 0.5", - # pin mypy version so a new version doesn't suddenly cause the CI to fail, - # until types-setuptools is removed from typeshed. - # For help with static-typing issues, or mypy update, ping @Avasam - "mypy==1.11.*", - # No Python 3.11 dependencies require tomli, but needed for type-checking since we import it directly - "tomli", - # No Python 3.12 dependencies require importlib_metadata, but needed for type-checking since we import it directly - "importlib_metadata", "pytest-subprocess", # workaround for pypa/setuptools#4333 @@ -134,6 +126,11 @@ type = [ "pytest-mypy", # local + + # pin mypy version so a new version doesn't suddenly cause the CI to fail, + # until types-setuptools is removed from typeshed. + # For help with static-typing issues, or mypy update, ping @Avasam + "mypy==1.11.*", ] diff --git a/setuptools/tests/integration/test_pip_install_sdist.py b/setuptools/tests/integration/test_pip_install_sdist.py index 2d59337aff..e5203d18f9 100644 --- a/setuptools/tests/integration/test_pip_install_sdist.py +++ b/setuptools/tests/integration/test_pip_install_sdist.py @@ -202,13 +202,12 @@ def build_deps(package, sdist_file): "Manually" install them, since pip will not install build deps with `--no-build-isolation`. """ - import tomli as toml - # delay importing, since pytest discovery phase may hit this file from a # testenv without tomli + from setuptools.compat.py310 import tomllib archive = Archive(sdist_file) - info = toml.loads(_read_pyproject(archive)) + info = tomllib.loads(_read_pyproject(archive)) deps = info.get("build-system", {}).get("requires", []) deps += EXTRA_BUILD_DEPS.get(package, []) # Remove setuptools from requirements (and deduplicate) From ff643f213ef1e4da9a1c7eb480a3cd8551347d90 Mon Sep 17 00:00:00 2001 From: Avasam Date: Thu, 22 Aug 2024 12:21:00 -0400 Subject: [PATCH 1029/1761] disable coverage for integration tests --- .coveragerc | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.coveragerc b/.coveragerc index c8d1cbbd6e..be34df9d9d 100644 --- a/.coveragerc +++ b/.coveragerc @@ -7,6 +7,9 @@ omit = */_vendor/* */tools/* */setuptools/_distutils/* + # See #4588 for integration tests coverage + */setuptools/tests/integration/* + */setuptools/tests/test_integration.py disable_warnings = couldnt-parse From 57b8aa81d77416805dcaaa22d5d45fef3e8b331c Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Sun, 25 Aug 2024 09:29:10 +0100 Subject: [PATCH 1030/1761] Add `--fix` flag to ruff pre-commit hook for automatic suggestion of fixes (jaraco/skeleton#140) * Add `--fix` flag to ruff pre-commit hook for automatic suggestion of fixes. This is documented in https://github.com/astral-sh/ruff-pre-commit?tab=readme-ov-file#using-ruff-with-pre-commit and should be safe to apply, because it requires the developer to "manually approve" the suggested changes via `git add`. * Add --unsafe-fixes to ruff pre-commit hoot --- .pre-commit-config.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ff54405ead..8ec58e22fa 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,4 +3,5 @@ repos: rev: v0.5.6 hooks: - id: ruff + args: [--fix, --unsafe-fixes] - id: ruff-format From d3e83beaec3bdf4a628f2f0ae0a52d21c84e346f Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 25 Aug 2024 06:33:23 -0400 Subject: [PATCH 1031/1761] Disable mypy for now. Ref jaraco/skeleton#143 --- pyproject.toml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 3866a3237e..1d81b1cc4a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -64,5 +64,8 @@ type = [ ] - [tool.setuptools_scm] + + +[tool.pytest-enabler.mypy] +# Disabled due to jaraco/skeleton#143 From 3fcabf10b810c8585b858fb81fc3cd8c5efe898d Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 25 Aug 2024 13:26:38 -0400 Subject: [PATCH 1032/1761] Move overload-overlap disablement to its own line for easier diffs and simpler relevant comments. Ref #142 --- mypy.ini | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/mypy.ini b/mypy.ini index 83b0d15c6b..2806c330e7 100644 --- a/mypy.ini +++ b/mypy.ini @@ -10,5 +10,6 @@ enable_error_code = ignore-without-code # Support namespace packages per https://github.com/python/mypy/issues/14057 explicit_package_bases = True -# Disable overload-overlap due to many false-positives -disable_error_code = overload-overlap +disable_error_code = + # Disable due to many false positives + overload-overlap From a34397a2951db73b24aae168cb4108791f9a3d69 Mon Sep 17 00:00:00 2001 From: Kagami Sascha Rosylight Date: Sun, 25 Aug 2024 20:18:22 +0200 Subject: [PATCH 1033/1761] Use arm64 MSVC on arm64 Windows --- distutils/_msvccompiler.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/distutils/_msvccompiler.py b/distutils/_msvccompiler.py index b0322410c5..2f098a4455 100644 --- a/distutils/_msvccompiler.py +++ b/distutils/_msvccompiler.py @@ -33,7 +33,7 @@ LibError, LinkError, ) -from .util import get_platform +from .util import get_platform, get_host_platform def _find_vc2015(): @@ -250,6 +250,10 @@ def initialize(self, plat_name=None): # Get the vcvarsall.bat spec for the requested platform. plat_spec = PLAT_TO_VCVARS[plat_name] + # Use the native MSVC host if the host platform would need expensive emulation for x86. + if plat_name == get_host_platform() and plat_spec == 'x86_arm64': + plat_spec = plat_spec[4:] + vc_env = _get_vc_env(plat_spec) if not vc_env: raise DistutilsPlatformError( From 9824bd88e4f9dd6e5ffe21cc8edebab352e6ddc5 Mon Sep 17 00:00:00 2001 From: Cal Jacobson Date: Sun, 25 Aug 2024 23:18:34 -0500 Subject: [PATCH 1034/1761] Update distutils/dist.py Co-authored-by: Anderson Bravalheri --- distutils/dist.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/distutils/dist.py b/distutils/dist.py index 9b16058ddc..bc631c4257 100644 --- a/distutils/dist.py +++ b/distutils/dist.py @@ -355,11 +355,9 @@ def _gen_paths(self): filename = prefix + 'pydistutils.cfg' if self.want_user_cfg: try: - user_home = pathlib.Path('~').expanduser() + yield pathlib.Path('~').expanduser() / filename except RuntimeError: - self.announce("Failed to locate user home directory. Skipping user config.", logging.WARNING) - else: - yield user_home / filename + warnings.warn("Failed to locate user home directory. Skipping user config.") # All platforms support local setup.cfg yield pathlib.Path('setup.cfg') From d54bfb030bf334bded0ed131922cdb8a4cfd1fb9 Mon Sep 17 00:00:00 2001 From: Cal Jacobson Date: Mon, 26 Aug 2024 00:03:31 -0500 Subject: [PATCH 1035/1761] format --- distutils/dist.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/distutils/dist.py b/distutils/dist.py index 0d3c40153a..19f4cc5c52 100644 --- a/distutils/dist.py +++ b/distutils/dist.py @@ -349,7 +349,9 @@ def _gen_paths(self): try: yield pathlib.Path('~').expanduser() / filename except RuntimeError: - warnings.warn("Failed to locate user home directory. Skipping user config.") + warnings.warn( + "Failed to locate user home directory. Skipping user config." + ) # All platforms support local setup.cfg yield pathlib.Path('setup.cfg') From 4c990b9eab4d33faf517fcc2080662ebde0d2841 Mon Sep 17 00:00:00 2001 From: Kagami Sascha Rosylight Date: Mon, 26 Aug 2024 12:25:03 +0200 Subject: [PATCH 1036/1761] Check MSVC arm64 variant on arm64 host (#4555) --- newsfragments/4553.feature.rst | 1 + setuptools/msvc.py | 4 +++- 2 files changed, 4 insertions(+), 1 deletion(-) create mode 100644 newsfragments/4553.feature.rst diff --git a/newsfragments/4553.feature.rst b/newsfragments/4553.feature.rst new file mode 100644 index 0000000000..43ea1eeac9 --- /dev/null +++ b/newsfragments/4553.feature.rst @@ -0,0 +1 @@ +Added detection of ARM64 variant of MSVC -- by :user:`saschanaz` diff --git a/setuptools/msvc.py b/setuptools/msvc.py index ca332d59aa..57f09417ca 100644 --- a/setuptools/msvc.py +++ b/setuptools/msvc.py @@ -26,6 +26,7 @@ from more_itertools import unique_everseen import distutils.errors +from distutils.util import get_platform # https://github.com/python/mypy/issues/8166 if not TYPE_CHECKING and platform.system() == 'Windows': @@ -89,8 +90,9 @@ def _msvc14_find_vc2017(): if not root: return None, None + variant = 'arm64' if get_platform() == 'win-arm64' else 'x86.x64' suitable_components = ( - "Microsoft.VisualStudio.Component.VC.Tools.x86.x64", + f"Microsoft.VisualStudio.Component.VC.Tools.{variant}", "Microsoft.VisualStudio.Workload.WDExpress", ) From 9c37a8ad7398c435cc2a84c1f7532f2cb9ad599c Mon Sep 17 00:00:00 2001 From: Kagami Sascha Rosylight Date: Mon, 26 Aug 2024 18:49:53 +0200 Subject: [PATCH 1037/1761] conditionally construct PLAT_TO_VCVARS --- distutils/_msvccompiler.py | 31 +++++++++++++++++++------------ 1 file changed, 19 insertions(+), 12 deletions(-) diff --git a/distutils/_msvccompiler.py b/distutils/_msvccompiler.py index 2f098a4455..a66e5d5b19 100644 --- a/distutils/_msvccompiler.py +++ b/distutils/_msvccompiler.py @@ -179,14 +179,25 @@ def _find_exe(exe, paths=None): # A map keyed by get_platform() return values to values accepted by -# 'vcvarsall.bat'. Always cross-compile from x86 to work with the -# lighter-weight MSVC installs that do not include native 64-bit tools. -PLAT_TO_VCVARS = { - 'win32': 'x86', - 'win-amd64': 'x86_amd64', - 'win-arm32': 'x86_arm', - 'win-arm64': 'x86_arm64', -} +# 'vcvarsall.bat'. +if get_platform() == get_host_platform() and get_host_platform() == "win-arm64": + # Use the native MSVC host if the host platform would need expensive + # emulation for x86. + PLAT_TO_VCVARS = { + 'win32': 'arm64_x86', + 'win-amd64': 'arm64_amd64', + 'win-arm32': 'arm64_arm', + 'win-arm64': 'arm64', + } +else: + # Always cross-compile from x86 to work with the lighter-weight MSVC + # installs that do not include native 64-bit tools. + PLAT_TO_VCVARS = { + 'win32': 'x86', + 'win-amd64': 'x86_amd64', + 'win-arm32': 'x86_arm', + 'win-arm64': 'x86_arm64', + } class MSVCCompiler(CCompiler): @@ -250,10 +261,6 @@ def initialize(self, plat_name=None): # Get the vcvarsall.bat spec for the requested platform. plat_spec = PLAT_TO_VCVARS[plat_name] - # Use the native MSVC host if the host platform would need expensive emulation for x86. - if plat_name == get_host_platform() and plat_spec == 'x86_arm64': - plat_spec = plat_spec[4:] - vc_env = _get_vc_env(plat_spec) if not vc_env: raise DistutilsPlatformError( From b55552e658789e714d572805ade271c91c94425b Mon Sep 17 00:00:00 2001 From: Kagami Sascha Rosylight Date: Mon, 26 Aug 2024 18:56:09 +0200 Subject: [PATCH 1038/1761] function-ify --- distutils/_msvccompiler.py | 45 +++++++++++++++++++++----------------- 1 file changed, 25 insertions(+), 20 deletions(-) diff --git a/distutils/_msvccompiler.py b/distutils/_msvccompiler.py index a66e5d5b19..28131a0160 100644 --- a/distutils/_msvccompiler.py +++ b/distutils/_msvccompiler.py @@ -178,26 +178,31 @@ def _find_exe(exe, paths=None): return exe -# A map keyed by get_platform() return values to values accepted by -# 'vcvarsall.bat'. -if get_platform() == get_host_platform() and get_host_platform() == "win-arm64": - # Use the native MSVC host if the host platform would need expensive - # emulation for x86. - PLAT_TO_VCVARS = { - 'win32': 'arm64_x86', - 'win-amd64': 'arm64_amd64', - 'win-arm32': 'arm64_arm', - 'win-arm64': 'arm64', - } -else: - # Always cross-compile from x86 to work with the lighter-weight MSVC - # installs that do not include native 64-bit tools. - PLAT_TO_VCVARS = { - 'win32': 'x86', - 'win-amd64': 'x86_amd64', - 'win-arm32': 'x86_arm', - 'win-arm64': 'x86_arm64', - } +def _get_plat_to_vcvars(): + # A map keyed by get_platform() return values to values accepted by + # 'vcvarsall.bat'. + host_platform = get_host_platform() + if host_platform == get_platform() and get_host_platform() == "win-arm64": + # Use the native MSVC host if the host platform would need expensive + # emulation for x86. + return { + 'win32': 'arm64_x86', + 'win-amd64': 'arm64_amd64', + 'win-arm32': 'arm64_arm', + 'win-arm64': 'arm64', + } + else: + # Always cross-compile from x86 to work with the lighter-weight MSVC + # installs that do not include native 64-bit tools. + return { + 'win32': 'x86', + 'win-amd64': 'x86_amd64', + 'win-arm32': 'x86_arm', + 'win-arm64': 'x86_arm64', + } + + +PLAT_TO_VCVARS = _get_plat_to_vcvars() class MSVCCompiler(CCompiler): From a46c2401187c80b8647d3f3d049bed96c88b054d Mon Sep 17 00:00:00 2001 From: Kagami Sascha Rosylight Date: Mon, 26 Aug 2024 18:57:54 +0200 Subject: [PATCH 1039/1761] nit --- distutils/_msvccompiler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/distutils/_msvccompiler.py b/distutils/_msvccompiler.py index 28131a0160..6f2d8ceee4 100644 --- a/distutils/_msvccompiler.py +++ b/distutils/_msvccompiler.py @@ -182,7 +182,7 @@ def _get_plat_to_vcvars(): # A map keyed by get_platform() return values to values accepted by # 'vcvarsall.bat'. host_platform = get_host_platform() - if host_platform == get_platform() and get_host_platform() == "win-arm64": + if host_platform == get_platform() and host_platform == "win-arm64": # Use the native MSVC host if the host platform would need expensive # emulation for x86. return { From 71dd4afed8c56ae99d49410054d8fb6a12ad6ecd Mon Sep 17 00:00:00 2001 From: Kagami Sascha Rosylight Date: Mon, 26 Aug 2024 19:00:14 +0200 Subject: [PATCH 1040/1761] python supports comparison chains --- distutils/_msvccompiler.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/distutils/_msvccompiler.py b/distutils/_msvccompiler.py index 6f2d8ceee4..5a5888901c 100644 --- a/distutils/_msvccompiler.py +++ b/distutils/_msvccompiler.py @@ -181,8 +181,7 @@ def _find_exe(exe, paths=None): def _get_plat_to_vcvars(): # A map keyed by get_platform() return values to values accepted by # 'vcvarsall.bat'. - host_platform = get_host_platform() - if host_platform == get_platform() and host_platform == "win-arm64": + if get_platform() == get_host_platform() == "win-arm64": # Use the native MSVC host if the host platform would need expensive # emulation for x86. return { From c3c0f06e53e6571c579586855c0330c678e6985f Mon Sep 17 00:00:00 2001 From: Kagami Sascha Rosylight Date: Mon, 26 Aug 2024 20:22:06 +0200 Subject: [PATCH 1041/1761] apply feedback --- distutils/_msvccompiler.py | 42 ++++++++++++++++---------------------- 1 file changed, 18 insertions(+), 24 deletions(-) diff --git a/distutils/_msvccompiler.py b/distutils/_msvccompiler.py index 5a5888901c..e7a17684ca 100644 --- a/distutils/_msvccompiler.py +++ b/distutils/_msvccompiler.py @@ -178,30 +178,24 @@ def _find_exe(exe, paths=None): return exe -def _get_plat_to_vcvars(): - # A map keyed by get_platform() return values to values accepted by - # 'vcvarsall.bat'. - if get_platform() == get_host_platform() == "win-arm64": - # Use the native MSVC host if the host platform would need expensive - # emulation for x86. - return { - 'win32': 'arm64_x86', - 'win-amd64': 'arm64_amd64', - 'win-arm32': 'arm64_arm', - 'win-arm64': 'arm64', - } - else: - # Always cross-compile from x86 to work with the lighter-weight MSVC - # installs that do not include native 64-bit tools. - return { - 'win32': 'x86', - 'win-amd64': 'x86_amd64', - 'win-arm32': 'x86_arm', - 'win-arm64': 'x86_arm64', - } - - -PLAT_TO_VCVARS = _get_plat_to_vcvars() +if get_host_platform() == "win-arm64": + # Use the native MSVC host if the host platform would need expensive + # emulation for x86. + PLAT_TO_VCVARS = { + 'win32': 'arm64_x86', + 'win-amd64': 'arm64_amd64', + 'win-arm32': 'arm64_arm', + 'win-arm64': 'arm64', + } +else: + # Always cross-compile from x86 to work with the lighter-weight MSVC + # installs that do not include native 64-bit tools. + PLAT_TO_VCVARS = { + 'win32': 'x86', + 'win-amd64': 'x86_amd64', + 'win-arm32': 'x86_arm', + 'win-arm64': 'x86_arm64', + } class MSVCCompiler(CCompiler): From a13507cc6170a63271ac2a79d588c5a962dcf2fb Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Mon, 26 Aug 2024 16:05:57 -0400 Subject: [PATCH 1042/1761] Simply suppress the exception. --- distutils/dist.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/distutils/dist.py b/distutils/dist.py index 19f4cc5c52..60edc5b514 100644 --- a/distutils/dist.py +++ b/distutils/dist.py @@ -346,12 +346,8 @@ def _gen_paths(self): prefix = '.' * (os.name == 'posix') filename = prefix + 'pydistutils.cfg' if self.want_user_cfg: - try: + with contextlib.suppress(RuntimeError): yield pathlib.Path('~').expanduser() / filename - except RuntimeError: - warnings.warn( - "Failed to locate user home directory. Skipping user config." - ) # All platforms support local setup.cfg yield pathlib.Path('setup.cfg') From b413f919ce8b14fb14e6516475a2976ed4f33362 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Mon, 26 Aug 2024 16:16:15 -0400 Subject: [PATCH 1043/1761] Sort imports --- distutils/_msvccompiler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/distutils/_msvccompiler.py b/distutils/_msvccompiler.py index e7a17684ca..faebb44b52 100644 --- a/distutils/_msvccompiler.py +++ b/distutils/_msvccompiler.py @@ -33,7 +33,7 @@ LibError, LinkError, ) -from .util import get_platform, get_host_platform +from .util import get_host_platform, get_platform def _find_vc2015(): From 334a7fca2fcf216fdabf1adacc0c6ad04947daec Mon Sep 17 00:00:00 2001 From: Kagami Sascha Rosylight Date: Mon, 26 Aug 2024 22:31:15 +0200 Subject: [PATCH 1044/1761] restore the lost comment --- distutils/_msvccompiler.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/distutils/_msvccompiler.py b/distutils/_msvccompiler.py index faebb44b52..115a28f7b2 100644 --- a/distutils/_msvccompiler.py +++ b/distutils/_msvccompiler.py @@ -178,6 +178,8 @@ def _find_exe(exe, paths=None): return exe +# A map keyed by get_platform() return values to values accepted by +# 'vcvarsall.bat'. if get_host_platform() == "win-arm64": # Use the native MSVC host if the host platform would need expensive # emulation for x86. From 93a4dc928319e26466e2772f510374b57bc09bab Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Mon, 26 Aug 2024 16:50:01 -0400 Subject: [PATCH 1045/1761] Define the variable in one place. --- distutils/_msvccompiler.py | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/distutils/_msvccompiler.py b/distutils/_msvccompiler.py index faebb44b52..cb10bd561c 100644 --- a/distutils/_msvccompiler.py +++ b/distutils/_msvccompiler.py @@ -178,24 +178,28 @@ def _find_exe(exe, paths=None): return exe -if get_host_platform() == "win-arm64": - # Use the native MSVC host if the host platform would need expensive - # emulation for x86. - PLAT_TO_VCVARS = { +PLAT_TO_VCVARS = ( + { + # Use the native MSVC host if the host platform would need expensive + # emulation for x86. 'win32': 'arm64_x86', 'win-amd64': 'arm64_amd64', 'win-arm32': 'arm64_arm', 'win-arm64': 'arm64', } -else: - # Always cross-compile from x86 to work with the lighter-weight MSVC - # installs that do not include native 64-bit tools. - PLAT_TO_VCVARS = { + if get_host_platform() == "win-arm64" + else { + # Always cross-compile from x86 to work with the lighter-weight MSVC + # installs that do not include native 64-bit tools. 'win32': 'x86', 'win-amd64': 'x86_amd64', 'win-arm32': 'x86_arm', 'win-arm64': 'x86_arm64', } +) +""" +Maps get_platform() results to values expected by vcvarsall.bat. +""" class MSVCCompiler(CCompiler): From dde3ad3dfa5c04e1bf85c907a42b2efc0cb088f3 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Mon, 26 Aug 2024 17:10:16 -0400 Subject: [PATCH 1046/1761] Extract logic around the mappings into a function to compute the spec. --- distutils/_msvccompiler.py | 64 ++++++++++++++++++++++---------------- 1 file changed, 38 insertions(+), 26 deletions(-) diff --git a/distutils/_msvccompiler.py b/distutils/_msvccompiler.py index cb10bd561c..9052f265b5 100644 --- a/distutils/_msvccompiler.py +++ b/distutils/_msvccompiler.py @@ -178,28 +178,41 @@ def _find_exe(exe, paths=None): return exe -PLAT_TO_VCVARS = ( - { - # Use the native MSVC host if the host platform would need expensive - # emulation for x86. - 'win32': 'arm64_x86', - 'win-amd64': 'arm64_amd64', - 'win-arm32': 'arm64_arm', - 'win-arm64': 'arm64', - } - if get_host_platform() == "win-arm64" - else { - # Always cross-compile from x86 to work with the lighter-weight MSVC - # installs that do not include native 64-bit tools. - 'win32': 'x86', - 'win-amd64': 'x86_amd64', - 'win-arm32': 'x86_arm', - 'win-arm64': 'x86_arm64', - } -) -""" -Maps get_platform() results to values expected by vcvarsall.bat. -""" +_vcvars_names = { + 'win32': 'x86', + 'win-amd64': 'amd64', + 'win-arm32': 'arm', + 'win-arm64': 'arm64', +} + + +def _get_vcvars_spec(host_platform, platform): + """ + Given a host platform and platform, determine the spec for vcvarsall. + + Uses the native MSVC host if the host platform would need expensive + emulation for x86. + + >>> _get_vcvars_spec('win-arm64', 'win32') + 'arm64_x86' + >>> _get_vcvars_spec('win-arm64', 'win-amd64') + 'arm64_amd64' + + Always cross-compile from x86 to work with the lighter-weight MSVC + installs that do not include native 64-bit tools. + + >>> _get_vcvars_spec('win32', 'win32') + 'x86' + >>> _get_vcvars_spec('win-arm32', 'win-arm32') + 'x86_arm' + >>> _get_vcvars_spec('win-amd64', 'win-arm64') + 'x86_arm64' + """ + if host_platform != 'win-arm64': + host_platform = 'win32' + vc_hp = _vcvars_names[host_platform] + vc_plat = _vcvars_names[platform] + return vc_hp if vc_hp == vc_plat else f'{vc_hp}_{vc_plat}' class MSVCCompiler(CCompiler): @@ -255,13 +268,12 @@ def initialize(self, plat_name=None): if plat_name is None: plat_name = get_platform() # sanity check for platforms to prevent obscure errors later. - if plat_name not in PLAT_TO_VCVARS: + if plat_name not in _vcvars_names: raise DistutilsPlatformError( - f"--plat-name must be one of {tuple(PLAT_TO_VCVARS)}" + f"--plat-name must be one of {tuple(_vcvars_names)}" ) - # Get the vcvarsall.bat spec for the requested platform. - plat_spec = PLAT_TO_VCVARS[plat_name] + plat_spec = _get_vcvars_spec(get_host_platform(), get_platform()) vc_env = _get_vc_env(plat_spec) if not vc_env: From 761162d7eb116db5d855f0ffda62768e1f35c42c Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Mon, 26 Aug 2024 17:16:34 -0400 Subject: [PATCH 1047/1761] Tweak docstring --- distutils/_msvccompiler.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/distutils/_msvccompiler.py b/distutils/_msvccompiler.py index 9052f265b5..7400fbaa2b 100644 --- a/distutils/_msvccompiler.py +++ b/distutils/_msvccompiler.py @@ -198,8 +198,8 @@ def _get_vcvars_spec(host_platform, platform): >>> _get_vcvars_spec('win-arm64', 'win-amd64') 'arm64_amd64' - Always cross-compile from x86 to work with the lighter-weight MSVC - installs that do not include native 64-bit tools. + Otherwise, always cross-compile from x86 to work with the + lighter-weight MSVC installs that do not include native 64-bit tools. >>> _get_vcvars_spec('win32', 'win32') 'x86' From b690b3e88937277cbd7abc3545dbaacf49cebc50 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Tue, 27 Aug 2024 02:50:26 -0400 Subject: [PATCH 1048/1761] Remove legacy msvc compiler modules. --- distutils/msvc9compiler.py | 822 ------------------------------------- distutils/msvccompiler.py | 687 ------------------------------- 2 files changed, 1509 deletions(-) delete mode 100644 distutils/msvc9compiler.py delete mode 100644 distutils/msvccompiler.py diff --git a/distutils/msvc9compiler.py b/distutils/msvc9compiler.py deleted file mode 100644 index b41f54f2b3..0000000000 --- a/distutils/msvc9compiler.py +++ /dev/null @@ -1,822 +0,0 @@ -"""distutils.msvc9compiler - -Contains MSVCCompiler, an implementation of the abstract CCompiler class -for the Microsoft Visual Studio 2008. - -The module is compatible with VS 2005 and VS 2008. You can find legacy support -for older versions of VS in distutils.msvccompiler. -""" - -# Written by Perry Stoll -# hacked by Robin Becker and Thomas Heller to do a better job of -# finding DevStudio (through the registry) -# ported to VS2005 and VS 2008 by Christian Heimes - -import os -import re -import subprocess -import sys -import warnings -import winreg - -from ._log import log -from .ccompiler import CCompiler, gen_lib_options -from .errors import ( - CompileError, - DistutilsExecError, - DistutilsPlatformError, - LibError, - LinkError, -) -from .util import get_platform - -warnings.warn( - "msvc9compiler is deprecated and slated to be removed " - "in the future. Please discontinue use or file an issue " - "with pypa/distutils describing your use case.", - DeprecationWarning, -) - -RegOpenKeyEx = winreg.OpenKeyEx -RegEnumKey = winreg.EnumKey -RegEnumValue = winreg.EnumValue -RegError = winreg.error - -HKEYS = ( - winreg.HKEY_USERS, - winreg.HKEY_CURRENT_USER, - winreg.HKEY_LOCAL_MACHINE, - winreg.HKEY_CLASSES_ROOT, -) - -NATIVE_WIN64 = sys.platform == 'win32' and sys.maxsize > 2**32 -if NATIVE_WIN64: - # Visual C++ is a 32-bit application, so we need to look in - # the corresponding registry branch, if we're running a - # 64-bit Python on Win64 - VS_BASE = r"Software\Wow6432Node\Microsoft\VisualStudio\%0.1f" - WINSDK_BASE = r"Software\Wow6432Node\Microsoft\Microsoft SDKs\Windows" - NET_BASE = r"Software\Wow6432Node\Microsoft\.NETFramework" -else: - VS_BASE = r"Software\Microsoft\VisualStudio\%0.1f" - WINSDK_BASE = r"Software\Microsoft\Microsoft SDKs\Windows" - NET_BASE = r"Software\Microsoft\.NETFramework" - -# A map keyed by get_platform() return values to values accepted by -# 'vcvarsall.bat'. Note a cross-compile may combine these (eg, 'x86_amd64' is -# the param to cross-compile on x86 targeting amd64.) -PLAT_TO_VCVARS = { - 'win32': 'x86', - 'win-amd64': 'amd64', -} - - -class Reg: - """Helper class to read values from the registry""" - - def get_value(cls, path, key): - for base in HKEYS: - d = cls.read_values(base, path) - if d and key in d: - return d[key] - raise KeyError(key) - - get_value = classmethod(get_value) - - def read_keys(cls, base, key): - """Return list of registry keys.""" - try: - handle = RegOpenKeyEx(base, key) - except RegError: - return None - L = [] - i = 0 - while True: - try: - k = RegEnumKey(handle, i) - except RegError: - break - L.append(k) - i += 1 - return L - - read_keys = classmethod(read_keys) - - def read_values(cls, base, key): - """Return dict of registry keys and values. - - All names are converted to lowercase. - """ - try: - handle = RegOpenKeyEx(base, key) - except RegError: - return None - d = {} - i = 0 - while True: - try: - name, value, type = RegEnumValue(handle, i) - except RegError: - break - name = name.lower() - d[cls.convert_mbcs(name)] = cls.convert_mbcs(value) - i += 1 - return d - - read_values = classmethod(read_values) - - def convert_mbcs(s): - dec = getattr(s, "decode", None) - if dec is not None: - try: - s = dec("mbcs") - except UnicodeError: - pass - return s - - convert_mbcs = staticmethod(convert_mbcs) - - -class MacroExpander: - def __init__(self, version): - self.macros = {} - self.vsbase = VS_BASE % version - self.load_macros(version) - - def set_macro(self, macro, path, key): - self.macros[f"$({macro})"] = Reg.get_value(path, key) - - def load_macros(self, version): - self.set_macro("VCInstallDir", self.vsbase + r"\Setup\VC", "productdir") - self.set_macro("VSInstallDir", self.vsbase + r"\Setup\VS", "productdir") - self.set_macro("FrameworkDir", NET_BASE, "installroot") - try: - if version >= 8.0: - self.set_macro("FrameworkSDKDir", NET_BASE, "sdkinstallrootv2.0") - else: - raise KeyError("sdkinstallrootv2.0") # noqa: TRY301 - except KeyError: - raise DistutilsPlatformError( - """Python was built with Visual Studio 2008; -extensions must be built with a compiler than can generate compatible binaries. -Visual Studio 2008 was not found on this system. If you have Cygwin installed, -you can try compiling with MingW32, by passing "-c mingw32" to setup.py.""" - ) - - if version >= 9.0: - self.set_macro("FrameworkVersion", self.vsbase, "clr version") - self.set_macro("WindowsSdkDir", WINSDK_BASE, "currentinstallfolder") - else: - p = r"Software\Microsoft\NET Framework Setup\Product" - for base in HKEYS: - try: - h = RegOpenKeyEx(base, p) - except RegError: - continue - key = RegEnumKey(h, 0) - d = Reg.get_value(base, rf"{p}\{key}") - self.macros["$(FrameworkVersion)"] = d["version"] - - def sub(self, s): - for k, v in self.macros.items(): - s = s.replace(k, v) - return s - - -def get_build_version(): - """Return the version of MSVC that was used to build Python. - - For Python 2.3 and up, the version number is included in - sys.version. For earlier versions, assume the compiler is MSVC 6. - """ - prefix = "MSC v." - i = sys.version.find(prefix) - if i == -1: - return 6 - i = i + len(prefix) - s, rest = sys.version[i:].split(" ", 1) - majorVersion = int(s[:-2]) - 6 - if majorVersion >= 13: - # v13 was skipped and should be v14 - majorVersion += 1 - minorVersion = int(s[2:3]) / 10.0 - # I don't think paths are affected by minor version in version 6 - if majorVersion == 6: - minorVersion = 0 - if majorVersion >= 6: - return majorVersion + minorVersion - # else we don't know what version of the compiler this is - return None - - -def normalize_and_reduce_paths(paths): - """Return a list of normalized paths with duplicates removed. - - The current order of paths is maintained. - """ - # Paths are normalized so things like: /a and /a/ aren't both preserved. - reduced_paths = [] - for p in paths: - np = os.path.normpath(p) - # XXX(nnorwitz): O(n**2), if reduced_paths gets long perhaps use a set. - if np not in reduced_paths: - reduced_paths.append(np) - return reduced_paths - - -def removeDuplicates(variable): - """Remove duplicate values of an environment variable.""" - oldList = variable.split(os.pathsep) - newList = [] - for i in oldList: - if i not in newList: - newList.append(i) - newVariable = os.pathsep.join(newList) - return newVariable - - -def find_vcvarsall(version): - """Find the vcvarsall.bat file - - At first it tries to find the productdir of VS 2008 in the registry. If - that fails it falls back to the VS90COMNTOOLS env var. - """ - vsbase = VS_BASE % version - try: - productdir = Reg.get_value(rf"{vsbase}\Setup\VC", "productdir") - except KeyError: - log.debug("Unable to find productdir in registry") - productdir = None - - if not productdir or not os.path.isdir(productdir): - toolskey = f"VS{version:0.0f}0COMNTOOLS" - toolsdir = os.environ.get(toolskey, None) - - if toolsdir and os.path.isdir(toolsdir): - productdir = os.path.join(toolsdir, os.pardir, os.pardir, "VC") - productdir = os.path.abspath(productdir) - if not os.path.isdir(productdir): - log.debug(f"{productdir} is not a valid directory") - return None - else: - log.debug(f"Env var {toolskey} is not set or invalid") - if not productdir: - log.debug("No productdir found") - return None - vcvarsall = os.path.join(productdir, "vcvarsall.bat") - if os.path.isfile(vcvarsall): - return vcvarsall - log.debug("Unable to find vcvarsall.bat") - return None - - -def query_vcvarsall(version, arch="x86"): - """Launch vcvarsall.bat and read the settings from its environment""" - vcvarsall = find_vcvarsall(version) - interesting = {"include", "lib", "libpath", "path"} - result = {} - - if vcvarsall is None: - raise DistutilsPlatformError("Unable to find vcvarsall.bat") - log.debug("Calling 'vcvarsall.bat %s' (version=%s)", arch, version) - popen = subprocess.Popen( - f'"{vcvarsall}" {arch} & set', - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) - try: - stdout, stderr = popen.communicate() - if popen.wait() != 0: - raise DistutilsPlatformError(stderr.decode("mbcs")) - - stdout = stdout.decode("mbcs") - for line in stdout.split("\n"): - line = Reg.convert_mbcs(line) - if '=' not in line: - continue - line = line.strip() - key, value = line.split('=', 1) - key = key.lower() - if key in interesting: - if value.endswith(os.pathsep): - value = value[:-1] - result[key] = removeDuplicates(value) - - finally: - popen.stdout.close() - popen.stderr.close() - - if len(result) != len(interesting): - raise ValueError(str(list(result.keys()))) - - return result - - -# More globals -VERSION = get_build_version() -# MACROS = MacroExpander(VERSION) - - -class MSVCCompiler(CCompiler): - """Concrete class that implements an interface to Microsoft Visual C++, - as defined by the CCompiler abstract class.""" - - compiler_type = 'msvc' - - # Just set this so CCompiler's constructor doesn't barf. We currently - # don't use the 'set_executables()' bureaucracy provided by CCompiler, - # as it really isn't necessary for this sort of single-compiler class. - # Would be nice to have a consistent interface with UnixCCompiler, - # though, so it's worth thinking about. - executables = {} - - # Private class data (need to distinguish C from C++ source for compiler) - _c_extensions = ['.c'] - _cpp_extensions = ['.cc', '.cpp', '.cxx'] - _rc_extensions = ['.rc'] - _mc_extensions = ['.mc'] - - # Needed for the filename generation methods provided by the - # base class, CCompiler. - src_extensions = _c_extensions + _cpp_extensions + _rc_extensions + _mc_extensions - res_extension = '.res' - obj_extension = '.obj' - static_lib_extension = '.lib' - shared_lib_extension = '.dll' - static_lib_format = shared_lib_format = '%s%s' - exe_extension = '.exe' - - def __init__(self, verbose=False, dry_run=False, force=False): - super().__init__(verbose, dry_run, force) - self.__version = VERSION - self.__root = r"Software\Microsoft\VisualStudio" - # self.__macros = MACROS - self.__paths = [] - # target platform (.plat_name is consistent with 'bdist') - self.plat_name = None - self.__arch = None # deprecated name - self.initialized = False - - def initialize(self, plat_name=None): # noqa: C901 - # multi-init means we would need to check platform same each time... - assert not self.initialized, "don't init multiple times" - if self.__version < 8.0: - raise DistutilsPlatformError( - f"VC {self.__version:0.1f} is not supported by this module" - ) - if plat_name is None: - plat_name = get_platform() - # sanity check for platforms to prevent obscure errors later. - ok_plats = 'win32', 'win-amd64' - if plat_name not in ok_plats: - raise DistutilsPlatformError(f"--plat-name must be one of {ok_plats}") - - if ( - "DISTUTILS_USE_SDK" in os.environ - and "MSSdk" in os.environ - and self.find_exe("cl.exe") - ): - # Assume that the SDK set up everything alright; don't try to be - # smarter - self.cc = "cl.exe" - self.linker = "link.exe" - self.lib = "lib.exe" - self.rc = "rc.exe" - self.mc = "mc.exe" - else: - # On x86, 'vcvars32.bat amd64' creates an env that doesn't work; - # to cross compile, you use 'x86_amd64'. - # On AMD64, 'vcvars32.bat amd64' is a native build env; to cross - # compile use 'x86' (ie, it runs the x86 compiler directly) - if plat_name in (get_platform(), 'win32'): - # native build or cross-compile to win32 - plat_spec = PLAT_TO_VCVARS[plat_name] - else: - # cross compile from win32 -> some 64bit - plat_spec = ( - PLAT_TO_VCVARS[get_platform()] + '_' + PLAT_TO_VCVARS[plat_name] - ) - - vc_env = query_vcvarsall(VERSION, plat_spec) - - self.__paths = vc_env['path'].split(os.pathsep) - os.environ['lib'] = vc_env['lib'] - os.environ['include'] = vc_env['include'] - - if len(self.__paths) == 0: - raise DistutilsPlatformError( - f"Python was built with {self.__product}, " - "and extensions need to be built with the same " - "version of the compiler, but it isn't installed." - ) - - self.cc = self.find_exe("cl.exe") - self.linker = self.find_exe("link.exe") - self.lib = self.find_exe("lib.exe") - self.rc = self.find_exe("rc.exe") # resource compiler - self.mc = self.find_exe("mc.exe") # message compiler - # self.set_path_env_var('lib') - # self.set_path_env_var('include') - - # extend the MSVC path with the current path - try: - for p in os.environ['path'].split(';'): - self.__paths.append(p) - except KeyError: - pass - self.__paths = normalize_and_reduce_paths(self.__paths) - os.environ['path'] = ";".join(self.__paths) - - self.preprocess_options = None - if self.__arch == "x86": - self.compile_options = ['/nologo', '/O2', '/MD', '/W3', '/DNDEBUG'] - self.compile_options_debug = [ - '/nologo', - '/Od', - '/MDd', - '/W3', - '/Z7', - '/D_DEBUG', - ] - else: - # Win64 - self.compile_options = ['/nologo', '/O2', '/MD', '/W3', '/GS-', '/DNDEBUG'] - self.compile_options_debug = [ - '/nologo', - '/Od', - '/MDd', - '/W3', - '/GS-', - '/Z7', - '/D_DEBUG', - ] - - self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO'] - if self.__version >= 7: - self.ldflags_shared_debug = ['/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG'] - self.ldflags_static = ['/nologo'] - - self.initialized = True - - # -- Worker methods ------------------------------------------------ - - def object_filenames(self, source_filenames, strip_dir=False, output_dir=''): - # Copied from ccompiler.py, extended to return .res as 'object'-file - # for .rc input file - if output_dir is None: - output_dir = '' - obj_names = [] - for src_name in source_filenames: - (base, ext) = os.path.splitext(src_name) - base = os.path.splitdrive(base)[1] # Chop off the drive - base = base[os.path.isabs(base) :] # If abs, chop off leading / - if ext not in self.src_extensions: - # Better to raise an exception instead of silently continuing - # and later complain about sources and targets having - # different lengths - raise CompileError(f"Don't know how to compile {src_name}") - if strip_dir: - base = os.path.basename(base) - if ext in self._rc_extensions: - obj_names.append(os.path.join(output_dir, base + self.res_extension)) - elif ext in self._mc_extensions: - obj_names.append(os.path.join(output_dir, base + self.res_extension)) - else: - obj_names.append(os.path.join(output_dir, base + self.obj_extension)) - return obj_names - - def compile( # noqa: C901 - self, - sources, - output_dir=None, - macros=None, - include_dirs=None, - debug=False, - extra_preargs=None, - extra_postargs=None, - depends=None, - ): - if not self.initialized: - self.initialize() - compile_info = self._setup_compile( - output_dir, macros, include_dirs, sources, depends, extra_postargs - ) - macros, objects, extra_postargs, pp_opts, build = compile_info - - compile_opts = extra_preargs or [] - compile_opts.append('/c') - if debug: - compile_opts.extend(self.compile_options_debug) - else: - compile_opts.extend(self.compile_options) - - for obj in objects: - try: - src, ext = build[obj] - except KeyError: - continue - if debug: - # pass the full pathname to MSVC in debug mode, - # this allows the debugger to find the source file - # without asking the user to browse for it - src = os.path.abspath(src) - - if ext in self._c_extensions: - input_opt = "/Tc" + src - elif ext in self._cpp_extensions: - input_opt = "/Tp" + src - elif ext in self._rc_extensions: - # compile .RC to .RES file - input_opt = src - output_opt = "/fo" + obj - try: - self.spawn([self.rc] + pp_opts + [output_opt] + [input_opt]) - except DistutilsExecError as msg: - raise CompileError(msg) - continue - elif ext in self._mc_extensions: - # Compile .MC to .RC file to .RES file. - # * '-h dir' specifies the directory for the - # generated include file - # * '-r dir' specifies the target directory of the - # generated RC file and the binary message resource - # it includes - # - # For now (since there are no options to change this), - # we use the source-directory for the include file and - # the build directory for the RC file and message - # resources. This works at least for win32all. - h_dir = os.path.dirname(src) - rc_dir = os.path.dirname(obj) - try: - # first compile .MC to .RC and .H file - self.spawn([self.mc] + ['-h', h_dir, '-r', rc_dir] + [src]) - base, _ = os.path.splitext(os.path.basename(src)) - rc_file = os.path.join(rc_dir, base + '.rc') - # then compile .RC to .RES file - self.spawn([self.rc] + ["/fo" + obj] + [rc_file]) - - except DistutilsExecError as msg: - raise CompileError(msg) - continue - else: - # how to handle this file? - raise CompileError(f"Don't know how to compile {src} to {obj}") - - output_opt = "/Fo" + obj - try: - self.spawn( - [self.cc] - + compile_opts - + pp_opts - + [input_opt, output_opt] - + extra_postargs - ) - except DistutilsExecError as msg: - raise CompileError(msg) - - return objects - - def create_static_lib( - self, objects, output_libname, output_dir=None, debug=False, target_lang=None - ): - if not self.initialized: - self.initialize() - (objects, output_dir) = self._fix_object_args(objects, output_dir) - output_filename = self.library_filename(output_libname, output_dir=output_dir) - - if self._need_link(objects, output_filename): - lib_args = objects + ['/OUT:' + output_filename] - if debug: - pass # XXX what goes here? - try: - self.spawn([self.lib] + lib_args) - except DistutilsExecError as msg: - raise LibError(msg) - else: - log.debug("skipping %s (up-to-date)", output_filename) - - def link( # noqa: C901 - self, - target_desc, - objects, - output_filename, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - export_symbols=None, - debug=False, - extra_preargs=None, - extra_postargs=None, - build_temp=None, - target_lang=None, - ): - if not self.initialized: - self.initialize() - (objects, output_dir) = self._fix_object_args(objects, output_dir) - fixed_args = self._fix_lib_args(libraries, library_dirs, runtime_library_dirs) - (libraries, library_dirs, runtime_library_dirs) = fixed_args - - if runtime_library_dirs: - self.warn( - "I don't know what to do with 'runtime_library_dirs': " - + str(runtime_library_dirs) - ) - - lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs, libraries) - if output_dir is not None: - output_filename = os.path.join(output_dir, output_filename) - - if self._need_link(objects, output_filename): - if target_desc == CCompiler.EXECUTABLE: - if debug: - ldflags = self.ldflags_shared_debug[1:] - else: - ldflags = self.ldflags_shared[1:] - else: - if debug: - ldflags = self.ldflags_shared_debug - else: - ldflags = self.ldflags_shared - - export_opts = [f"/EXPORT:{sym}" for sym in export_symbols or []] - - ld_args = ( - ldflags + lib_opts + export_opts + objects + ['/OUT:' + output_filename] - ) - - # The MSVC linker generates .lib and .exp files, which cannot be - # suppressed by any linker switches. The .lib files may even be - # needed! Make sure they are generated in the temporary build - # directory. Since they have different names for debug and release - # builds, they can go into the same directory. - build_temp = os.path.dirname(objects[0]) - if export_symbols is not None: - (dll_name, dll_ext) = os.path.splitext( - os.path.basename(output_filename) - ) - implib_file = os.path.join(build_temp, self.library_filename(dll_name)) - ld_args.append('/IMPLIB:' + implib_file) - - self.manifest_setup_ldargs(output_filename, build_temp, ld_args) - - if extra_preargs: - ld_args[:0] = extra_preargs - if extra_postargs: - ld_args.extend(extra_postargs) - - self.mkpath(os.path.dirname(output_filename)) - try: - self.spawn([self.linker] + ld_args) - except DistutilsExecError as msg: - raise LinkError(msg) - - # embed the manifest - # XXX - this is somewhat fragile - if mt.exe fails, distutils - # will still consider the DLL up-to-date, but it will not have a - # manifest. Maybe we should link to a temp file? OTOH, that - # implies a build environment error that shouldn't go undetected. - mfinfo = self.manifest_get_embed_info(target_desc, ld_args) - if mfinfo is not None: - mffilename, mfid = mfinfo - out_arg = f'-outputresource:{output_filename};{mfid}' - try: - self.spawn(['mt.exe', '-nologo', '-manifest', mffilename, out_arg]) - except DistutilsExecError as msg: - raise LinkError(msg) - else: - log.debug("skipping %s (up-to-date)", output_filename) - - def manifest_setup_ldargs(self, output_filename, build_temp, ld_args): - # If we need a manifest at all, an embedded manifest is recommended. - # See MSDN article titled - # "Understanding manifest generation for C/C++ programs" - # (currently at https://learn.microsoft.com/en-us/cpp/build/understanding-manifest-generation-for-c-cpp-programs) - # Ask the linker to generate the manifest in the temp dir, so - # we can check it, and possibly embed it, later. - temp_manifest = os.path.join( - build_temp, os.path.basename(output_filename) + ".manifest" - ) - ld_args.append('/MANIFESTFILE:' + temp_manifest) - - def manifest_get_embed_info(self, target_desc, ld_args): - # If a manifest should be embedded, return a tuple of - # (manifest_filename, resource_id). Returns None if no manifest - # should be embedded. See https://bugs.python.org/issue7833 for why - # we want to avoid any manifest for extension modules if we can) - for arg in ld_args: - if arg.startswith("/MANIFESTFILE:"): - temp_manifest = arg.split(":", 1)[1] - break - else: - # no /MANIFESTFILE so nothing to do. - return None - if target_desc == CCompiler.EXECUTABLE: - # by default, executables always get the manifest with the - # CRT referenced. - mfid = 1 - else: - # Extension modules try and avoid any manifest if possible. - mfid = 2 - temp_manifest = self._remove_visual_c_ref(temp_manifest) - if temp_manifest is None: - return None - return temp_manifest, mfid - - def _remove_visual_c_ref(self, manifest_file): - try: - # Remove references to the Visual C runtime, so they will - # fall through to the Visual C dependency of Python.exe. - # This way, when installed for a restricted user (e.g. - # runtimes are not in WinSxS folder, but in Python's own - # folder), the runtimes do not need to be in every folder - # with .pyd's. - # Returns either the filename of the modified manifest or - # None if no manifest should be embedded. - manifest_f = open(manifest_file) - try: - manifest_buf = manifest_f.read() - finally: - manifest_f.close() - pattern = re.compile( - r"""|)""", - re.DOTALL, - ) - manifest_buf = re.sub(pattern, "", manifest_buf) - pattern = r"\s*" - manifest_buf = re.sub(pattern, "", manifest_buf) - # Now see if any other assemblies are referenced - if not, we - # don't want a manifest embedded. - pattern = re.compile( - r"""|)""", - re.DOTALL, - ) - if re.search(pattern, manifest_buf) is None: - return None - - manifest_f = open(manifest_file, 'w') - try: - manifest_f.write(manifest_buf) - return manifest_file - finally: - manifest_f.close() - except OSError: - pass - - # -- Miscellaneous methods ----------------------------------------- - # These are all used by the 'gen_lib_options() function, in - # ccompiler.py. - - def library_dir_option(self, dir): - return "/LIBPATH:" + dir - - def runtime_library_dir_option(self, dir): - raise DistutilsPlatformError( - "don't know how to set runtime library search path for MSVC++" - ) - - def library_option(self, lib): - return self.library_filename(lib) - - def find_library_file(self, dirs, lib, debug=False): - # Prefer a debugging library if found (and requested), but deal - # with it if we don't have one. - if debug: - try_names = [lib + "_d", lib] - else: - try_names = [lib] - for dir in dirs: - for name in try_names: - libfile = os.path.join(dir, self.library_filename(name)) - if os.path.exists(libfile): - return libfile - else: - # Oops, didn't find it in *any* of 'dirs' - return None - - # Helper methods for using the MSVC registry settings - - def find_exe(self, exe): - """Return path to an MSVC executable program. - - Tries to find the program in several places: first, one of the - MSVC program search paths from the registry; next, the directories - in the PATH environment variable. If any of those work, return an - absolute path that is known to exist. If none of them work, just - return the original program name, 'exe'. - """ - for p in self.__paths: - fn = os.path.join(os.path.abspath(p), exe) - if os.path.isfile(fn): - return fn - - # didn't find it; try existing path - for p in os.environ['Path'].split(';'): - fn = os.path.join(os.path.abspath(p), exe) - if os.path.isfile(fn): - return fn - - return exe diff --git a/distutils/msvccompiler.py b/distutils/msvccompiler.py deleted file mode 100644 index 2a5e61d78d..0000000000 --- a/distutils/msvccompiler.py +++ /dev/null @@ -1,687 +0,0 @@ -"""distutils.msvccompiler - -Contains MSVCCompiler, an implementation of the abstract CCompiler class -for the Microsoft Visual Studio. -""" - -# Written by Perry Stoll -# hacked by Robin Becker and Thomas Heller to do a better job of -# finding DevStudio (through the registry) - -import os -import sys -import warnings - -from ._log import log -from .ccompiler import CCompiler, gen_lib_options -from .errors import ( - CompileError, - DistutilsExecError, - DistutilsPlatformError, - LibError, - LinkError, -) - -_can_read_reg = False -try: - import winreg - - _can_read_reg = True - hkey_mod = winreg - - RegOpenKeyEx = winreg.OpenKeyEx - RegEnumKey = winreg.EnumKey - RegEnumValue = winreg.EnumValue - RegError = winreg.error - -except ImportError: - try: - import win32api - import win32con - - _can_read_reg = True - hkey_mod = win32con - - RegOpenKeyEx = win32api.RegOpenKeyEx - RegEnumKey = win32api.RegEnumKey - RegEnumValue = win32api.RegEnumValue - RegError = win32api.error - except ImportError: - log.info( - "Warning: Can't read registry to find the " - "necessary compiler setting\n" - "Make sure that Python modules winreg, " - "win32api or win32con are installed." - ) - pass - -if _can_read_reg: - HKEYS = ( - hkey_mod.HKEY_USERS, - hkey_mod.HKEY_CURRENT_USER, - hkey_mod.HKEY_LOCAL_MACHINE, - hkey_mod.HKEY_CLASSES_ROOT, - ) - - -warnings.warn( - "msvccompiler is deprecated and slated to be removed " - "in the future. Please discontinue use or file an issue " - "with pypa/distutils describing your use case.", - DeprecationWarning, -) - - -def read_keys(base, key): - """Return list of registry keys.""" - try: - handle = RegOpenKeyEx(base, key) - except RegError: - return None - L = [] - i = 0 - while True: - try: - k = RegEnumKey(handle, i) - except RegError: - break - L.append(k) - i += 1 - return L - - -def read_values(base, key): - """Return dict of registry keys and values. - - All names are converted to lowercase. - """ - try: - handle = RegOpenKeyEx(base, key) - except RegError: - return None - d = {} - i = 0 - while True: - try: - name, value, type = RegEnumValue(handle, i) - except RegError: - break - name = name.lower() - d[convert_mbcs(name)] = convert_mbcs(value) - i += 1 - return d - - -def convert_mbcs(s): - dec = getattr(s, "decode", None) - if dec is not None: - try: - s = dec("mbcs") - except UnicodeError: - pass - return s - - -class MacroExpander: - def __init__(self, version): - self.macros = {} - self.load_macros(version) - - def set_macro(self, macro, path, key): - for base in HKEYS: - d = read_values(base, path) - if d: - self.macros[f"$({macro})"] = d[key] - break - - def load_macros(self, version): - vsbase = rf"Software\Microsoft\VisualStudio\{version:0.1f}" - self.set_macro("VCInstallDir", vsbase + r"\Setup\VC", "productdir") - self.set_macro("VSInstallDir", vsbase + r"\Setup\VS", "productdir") - net = r"Software\Microsoft\.NETFramework" - self.set_macro("FrameworkDir", net, "installroot") - try: - if version > 7.0: - self.set_macro("FrameworkSDKDir", net, "sdkinstallrootv1.1") - else: - self.set_macro("FrameworkSDKDir", net, "sdkinstallroot") - except KeyError: - raise DistutilsPlatformError( - """Python was built with Visual Studio 2003; -extensions must be built with a compiler than can generate compatible binaries. -Visual Studio 2003 was not found on this system. If you have Cygwin installed, -you can try compiling with MingW32, by passing "-c mingw32" to setup.py.""" - ) - - p = r"Software\Microsoft\NET Framework Setup\Product" - for base in HKEYS: - try: - h = RegOpenKeyEx(base, p) - except RegError: - continue - key = RegEnumKey(h, 0) - d = read_values(base, rf"{p}\{key}") - self.macros["$(FrameworkVersion)"] = d["version"] - - def sub(self, s): - for k, v in self.macros.items(): - s = s.replace(k, v) - return s - - -def get_build_version(): - """Return the version of MSVC that was used to build Python. - - For Python 2.3 and up, the version number is included in - sys.version. For earlier versions, assume the compiler is MSVC 6. - """ - prefix = "MSC v." - i = sys.version.find(prefix) - if i == -1: - return 6 - i = i + len(prefix) - s, rest = sys.version[i:].split(" ", 1) - majorVersion = int(s[:-2]) - 6 - if majorVersion >= 13: - # v13 was skipped and should be v14 - majorVersion += 1 - minorVersion = int(s[2:3]) / 10.0 - # I don't think paths are affected by minor version in version 6 - if majorVersion == 6: - minorVersion = 0 - if majorVersion >= 6: - return majorVersion + minorVersion - # else we don't know what version of the compiler this is - return None - - -def get_build_architecture(): - """Return the processor architecture. - - Possible results are "Intel" or "AMD64". - """ - - prefix = " bit (" - i = sys.version.find(prefix) - if i == -1: - return "Intel" - j = sys.version.find(")", i) - return sys.version[i + len(prefix) : j] - - -def normalize_and_reduce_paths(paths): - """Return a list of normalized paths with duplicates removed. - - The current order of paths is maintained. - """ - # Paths are normalized so things like: /a and /a/ aren't both preserved. - reduced_paths = [] - for p in paths: - np = os.path.normpath(p) - # XXX(nnorwitz): O(n**2), if reduced_paths gets long perhaps use a set. - if np not in reduced_paths: - reduced_paths.append(np) - return reduced_paths - - -class MSVCCompiler(CCompiler): - """Concrete class that implements an interface to Microsoft Visual C++, - as defined by the CCompiler abstract class.""" - - compiler_type = 'msvc' - - # Just set this so CCompiler's constructor doesn't barf. We currently - # don't use the 'set_executables()' bureaucracy provided by CCompiler, - # as it really isn't necessary for this sort of single-compiler class. - # Would be nice to have a consistent interface with UnixCCompiler, - # though, so it's worth thinking about. - executables = {} - - # Private class data (need to distinguish C from C++ source for compiler) - _c_extensions = ['.c'] - _cpp_extensions = ['.cc', '.cpp', '.cxx'] - _rc_extensions = ['.rc'] - _mc_extensions = ['.mc'] - - # Needed for the filename generation methods provided by the - # base class, CCompiler. - src_extensions = _c_extensions + _cpp_extensions + _rc_extensions + _mc_extensions - res_extension = '.res' - obj_extension = '.obj' - static_lib_extension = '.lib' - shared_lib_extension = '.dll' - static_lib_format = shared_lib_format = '%s%s' - exe_extension = '.exe' - - def __init__(self, verbose=False, dry_run=False, force=False): - super().__init__(verbose, dry_run, force) - self.__version = get_build_version() - self.__arch = get_build_architecture() - if self.__arch == "Intel": - # x86 - if self.__version >= 7: - self.__root = r"Software\Microsoft\VisualStudio" - self.__macros = MacroExpander(self.__version) - else: - self.__root = r"Software\Microsoft\Devstudio" - self.__product = f"Visual Studio version {self.__version}" - else: - # Win64. Assume this was built with the platform SDK - self.__product = "Microsoft SDK compiler %s" % (self.__version + 6) - - self.initialized = False - - def initialize(self): - self.__paths = [] - if ( - "DISTUTILS_USE_SDK" in os.environ - and "MSSdk" in os.environ - and self.find_exe("cl.exe") - ): - # Assume that the SDK set up everything alright; don't try to be - # smarter - self.cc = "cl.exe" - self.linker = "link.exe" - self.lib = "lib.exe" - self.rc = "rc.exe" - self.mc = "mc.exe" - else: - self.__paths = self.get_msvc_paths("path") - - if len(self.__paths) == 0: - raise DistutilsPlatformError( - f"Python was built with {self.__product}, " - "and extensions need to be built with the same " - "version of the compiler, but it isn't installed." - ) - - self.cc = self.find_exe("cl.exe") - self.linker = self.find_exe("link.exe") - self.lib = self.find_exe("lib.exe") - self.rc = self.find_exe("rc.exe") # resource compiler - self.mc = self.find_exe("mc.exe") # message compiler - self.set_path_env_var('lib') - self.set_path_env_var('include') - - # extend the MSVC path with the current path - try: - for p in os.environ['path'].split(';'): - self.__paths.append(p) - except KeyError: - pass - self.__paths = normalize_and_reduce_paths(self.__paths) - os.environ['path'] = ";".join(self.__paths) - - self.preprocess_options = None - if self.__arch == "Intel": - self.compile_options = ['/nologo', '/O2', '/MD', '/W3', '/GX', '/DNDEBUG'] - self.compile_options_debug = [ - '/nologo', - '/Od', - '/MDd', - '/W3', - '/GX', - '/Z7', - '/D_DEBUG', - ] - else: - # Win64 - self.compile_options = ['/nologo', '/O2', '/MD', '/W3', '/GS-', '/DNDEBUG'] - self.compile_options_debug = [ - '/nologo', - '/Od', - '/MDd', - '/W3', - '/GS-', - '/Z7', - '/D_DEBUG', - ] - - self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO'] - if self.__version >= 7: - self.ldflags_shared_debug = ['/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG'] - else: - self.ldflags_shared_debug = [ - '/DLL', - '/nologo', - '/INCREMENTAL:no', - '/pdb:None', - '/DEBUG', - ] - self.ldflags_static = ['/nologo'] - - self.initialized = True - - # -- Worker methods ------------------------------------------------ - - def object_filenames(self, source_filenames, strip_dir=False, output_dir=''): - # Copied from ccompiler.py, extended to return .res as 'object'-file - # for .rc input file - if output_dir is None: - output_dir = '' - obj_names = [] - for src_name in source_filenames: - (base, ext) = os.path.splitext(src_name) - base = os.path.splitdrive(base)[1] # Chop off the drive - base = base[os.path.isabs(base) :] # If abs, chop off leading / - if ext not in self.src_extensions: - # Better to raise an exception instead of silently continuing - # and later complain about sources and targets having - # different lengths - raise CompileError(f"Don't know how to compile {src_name}") - if strip_dir: - base = os.path.basename(base) - if ext in self._rc_extensions: - obj_names.append(os.path.join(output_dir, base + self.res_extension)) - elif ext in self._mc_extensions: - obj_names.append(os.path.join(output_dir, base + self.res_extension)) - else: - obj_names.append(os.path.join(output_dir, base + self.obj_extension)) - return obj_names - - def compile( # noqa: C901 - self, - sources, - output_dir=None, - macros=None, - include_dirs=None, - debug=False, - extra_preargs=None, - extra_postargs=None, - depends=None, - ): - if not self.initialized: - self.initialize() - compile_info = self._setup_compile( - output_dir, macros, include_dirs, sources, depends, extra_postargs - ) - macros, objects, extra_postargs, pp_opts, build = compile_info - - compile_opts = extra_preargs or [] - compile_opts.append('/c') - if debug: - compile_opts.extend(self.compile_options_debug) - else: - compile_opts.extend(self.compile_options) - - for obj in objects: - try: - src, ext = build[obj] - except KeyError: - continue - if debug: - # pass the full pathname to MSVC in debug mode, - # this allows the debugger to find the source file - # without asking the user to browse for it - src = os.path.abspath(src) - - if ext in self._c_extensions: - input_opt = "/Tc" + src - elif ext in self._cpp_extensions: - input_opt = "/Tp" + src - elif ext in self._rc_extensions: - # compile .RC to .RES file - input_opt = src - output_opt = "/fo" + obj - try: - self.spawn([self.rc] + pp_opts + [output_opt] + [input_opt]) - except DistutilsExecError as msg: - raise CompileError(msg) - continue - elif ext in self._mc_extensions: - # Compile .MC to .RC file to .RES file. - # * '-h dir' specifies the directory for the - # generated include file - # * '-r dir' specifies the target directory of the - # generated RC file and the binary message resource - # it includes - # - # For now (since there are no options to change this), - # we use the source-directory for the include file and - # the build directory for the RC file and message - # resources. This works at least for win32all. - h_dir = os.path.dirname(src) - rc_dir = os.path.dirname(obj) - try: - # first compile .MC to .RC and .H file - self.spawn([self.mc] + ['-h', h_dir, '-r', rc_dir] + [src]) - base, _ = os.path.splitext(os.path.basename(src)) - rc_file = os.path.join(rc_dir, base + '.rc') - # then compile .RC to .RES file - self.spawn([self.rc] + ["/fo" + obj] + [rc_file]) - - except DistutilsExecError as msg: - raise CompileError(msg) - continue - else: - # how to handle this file? - raise CompileError(f"Don't know how to compile {src} to {obj}") - - output_opt = "/Fo" + obj - try: - self.spawn( - [self.cc] - + compile_opts - + pp_opts - + [input_opt, output_opt] - + extra_postargs - ) - except DistutilsExecError as msg: - raise CompileError(msg) - - return objects - - def create_static_lib( - self, objects, output_libname, output_dir=None, debug=False, target_lang=None - ): - if not self.initialized: - self.initialize() - (objects, output_dir) = self._fix_object_args(objects, output_dir) - output_filename = self.library_filename(output_libname, output_dir=output_dir) - - if self._need_link(objects, output_filename): - lib_args = objects + ['/OUT:' + output_filename] - if debug: - pass # XXX what goes here? - try: - self.spawn([self.lib] + lib_args) - except DistutilsExecError as msg: - raise LibError(msg) - else: - log.debug("skipping %s (up-to-date)", output_filename) - - def link( # noqa: C901 - self, - target_desc, - objects, - output_filename, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - export_symbols=None, - debug=False, - extra_preargs=None, - extra_postargs=None, - build_temp=None, - target_lang=None, - ): - if not self.initialized: - self.initialize() - (objects, output_dir) = self._fix_object_args(objects, output_dir) - fixed_args = self._fix_lib_args(libraries, library_dirs, runtime_library_dirs) - (libraries, library_dirs, runtime_library_dirs) = fixed_args - - if runtime_library_dirs: - self.warn( - "I don't know what to do with 'runtime_library_dirs': " - + str(runtime_library_dirs) - ) - - lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs, libraries) - if output_dir is not None: - output_filename = os.path.join(output_dir, output_filename) - - if self._need_link(objects, output_filename): - if target_desc == CCompiler.EXECUTABLE: - if debug: - ldflags = self.ldflags_shared_debug[1:] - else: - ldflags = self.ldflags_shared[1:] - else: - if debug: - ldflags = self.ldflags_shared_debug - else: - ldflags = self.ldflags_shared - - export_opts = [f"/EXPORT:{sym}" for sym in export_symbols or []] - - ld_args = ( - ldflags + lib_opts + export_opts + objects + ['/OUT:' + output_filename] - ) - - # The MSVC linker generates .lib and .exp files, which cannot be - # suppressed by any linker switches. The .lib files may even be - # needed! Make sure they are generated in the temporary build - # directory. Since they have different names for debug and release - # builds, they can go into the same directory. - if export_symbols is not None: - (dll_name, dll_ext) = os.path.splitext( - os.path.basename(output_filename) - ) - implib_file = os.path.join( - os.path.dirname(objects[0]), self.library_filename(dll_name) - ) - ld_args.append('/IMPLIB:' + implib_file) - - if extra_preargs: - ld_args[:0] = extra_preargs - if extra_postargs: - ld_args.extend(extra_postargs) - - self.mkpath(os.path.dirname(output_filename)) - try: - self.spawn([self.linker] + ld_args) - except DistutilsExecError as msg: - raise LinkError(msg) - - else: - log.debug("skipping %s (up-to-date)", output_filename) - - # -- Miscellaneous methods ----------------------------------------- - # These are all used by the 'gen_lib_options() function, in - # ccompiler.py. - - def library_dir_option(self, dir): - return "/LIBPATH:" + dir - - def runtime_library_dir_option(self, dir): - raise DistutilsPlatformError( - "don't know how to set runtime library search path for MSVC++" - ) - - def library_option(self, lib): - return self.library_filename(lib) - - def find_library_file(self, dirs, lib, debug=False): - # Prefer a debugging library if found (and requested), but deal - # with it if we don't have one. - if debug: - try_names = [lib + "_d", lib] - else: - try_names = [lib] - for dir in dirs: - for name in try_names: - libfile = os.path.join(dir, self.library_filename(name)) - if os.path.exists(libfile): - return libfile - else: - # Oops, didn't find it in *any* of 'dirs' - return None - - # Helper methods for using the MSVC registry settings - - def find_exe(self, exe): - """Return path to an MSVC executable program. - - Tries to find the program in several places: first, one of the - MSVC program search paths from the registry; next, the directories - in the PATH environment variable. If any of those work, return an - absolute path that is known to exist. If none of them work, just - return the original program name, 'exe'. - """ - for p in self.__paths: - fn = os.path.join(os.path.abspath(p), exe) - if os.path.isfile(fn): - return fn - - # didn't find it; try existing path - for p in os.environ['Path'].split(';'): - fn = os.path.join(os.path.abspath(p), exe) - if os.path.isfile(fn): - return fn - - return exe - - def get_msvc_paths(self, path, platform='x86'): - """Get a list of devstudio directories (include, lib or path). - - Return a list of strings. The list will be empty if unable to - access the registry or appropriate registry keys not found. - """ - if not _can_read_reg: - return [] - - path = path + " dirs" - if self.__version >= 7: - key = rf"{self.__root}\{self.__version:0.1f}\VC\VC_OBJECTS_PLATFORM_INFO\Win32\Directories" - else: - key = ( - rf"{self.__root}\6.0\Build System\Components\Platforms" - rf"\Win32 ({platform})\Directories" - ) - - for base in HKEYS: - d = read_values(base, key) - if d: - if self.__version >= 7: - return self.__macros.sub(d[path]).split(";") - else: - return d[path].split(";") - # MSVC 6 seems to create the registry entries we need only when - # the GUI is run. - if self.__version == 6: - for base in HKEYS: - if read_values(base, rf"{self.__root}\6.0") is not None: - self.warn( - "It seems you have Visual Studio 6 installed, " - "but the expected registry settings are not present.\n" - "You must at least run the Visual Studio GUI once " - "so that these entries are created." - ) - break - return [] - - def set_path_env_var(self, name): - """Set environment variable 'name' to an MSVC path type value. - - This is equivalent to a SET command prior to execution of spawned - commands. - """ - - if name == "lib": - p = self.get_msvc_paths("library") - else: - p = self.get_msvc_paths(name) - if p: - os.environ[name] = ';'.join(p) - - -if get_build_version() >= 8.0: - log.debug("Importing new compiler from distutils.msvc9compiler") - OldMSVCCompiler = MSVCCompiler - # get_build_architecture not really relevant now we support cross-compile - from distutils.msvc9compiler import ( - MacroExpander, - MSVCCompiler, - ) From 7af6f9719eb4ca5ff49cbcf13d52c70ea2c66e22 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Tue, 27 Aug 2024 03:03:20 -0400 Subject: [PATCH 1049/1761] Remove associated tests --- distutils/tests/test_msvc9compiler.py | 184 -------------------------- 1 file changed, 184 deletions(-) delete mode 100644 distutils/tests/test_msvc9compiler.py diff --git a/distutils/tests/test_msvc9compiler.py b/distutils/tests/test_msvc9compiler.py deleted file mode 100644 index 6f6aabee4d..0000000000 --- a/distutils/tests/test_msvc9compiler.py +++ /dev/null @@ -1,184 +0,0 @@ -"""Tests for distutils.msvc9compiler.""" - -import os -import sys -from distutils.errors import DistutilsPlatformError -from distutils.tests import support - -import pytest - -# A manifest with the only assembly reference being the msvcrt assembly, so -# should have the assembly completely stripped. Note that although the -# assembly has a reference the assembly is removed - that is -# currently a "feature", not a bug :) -_MANIFEST_WITH_ONLY_MSVC_REFERENCE = """\ - - - - - - - - - - - - - - - - - -""" - -# A manifest with references to assemblies other than msvcrt. When processed, -# this assembly should be returned with just the msvcrt part removed. -_MANIFEST_WITH_MULTIPLE_REFERENCES = """\ - - - - - - - - - - - - - - - - - - - - - - -""" - -_CLEANED_MANIFEST = """\ - - - - - - - - - - - - - - - - - - -""" - -if sys.platform == "win32": - from distutils.msvccompiler import get_build_version - - if get_build_version() >= 8.0: - SKIP_MESSAGE = None - else: - SKIP_MESSAGE = "These tests are only for MSVC8.0 or above" -else: - SKIP_MESSAGE = "These tests are only for win32" - - -@pytest.mark.skipif('SKIP_MESSAGE', reason=SKIP_MESSAGE) -class Testmsvc9compiler(support.TempdirManager): - def test_no_compiler(self): - # makes sure query_vcvarsall raises - # a DistutilsPlatformError if the compiler - # is not found - from distutils.msvc9compiler import query_vcvarsall - - def _find_vcvarsall(version): - return None - - from distutils import msvc9compiler - - old_find_vcvarsall = msvc9compiler.find_vcvarsall - msvc9compiler.find_vcvarsall = _find_vcvarsall - try: - with pytest.raises(DistutilsPlatformError): - query_vcvarsall('wont find this version') - finally: - msvc9compiler.find_vcvarsall = old_find_vcvarsall - - def test_reg_class(self): - from distutils.msvc9compiler import Reg - - with pytest.raises(KeyError): - Reg.get_value('xxx', 'xxx') - - # looking for values that should exist on all - # windows registry versions. - path = r'Control Panel\Desktop' - v = Reg.get_value(path, 'dragfullwindows') - assert v in ('0', '1', '2') - - import winreg - - HKCU = winreg.HKEY_CURRENT_USER - keys = Reg.read_keys(HKCU, 'xxxx') - assert keys is None - - keys = Reg.read_keys(HKCU, r'Control Panel') - assert 'Desktop' in keys - - def test_remove_visual_c_ref(self): - from distutils.msvc9compiler import MSVCCompiler - - tempdir = self.mkdtemp() - manifest = os.path.join(tempdir, 'manifest') - f = open(manifest, 'w') - try: - f.write(_MANIFEST_WITH_MULTIPLE_REFERENCES) - finally: - f.close() - - compiler = MSVCCompiler() - compiler._remove_visual_c_ref(manifest) - - # see what we got - f = open(manifest) - try: - # removing trailing spaces - content = '\n'.join([line.rstrip() for line in f]) - finally: - f.close() - - # makes sure the manifest was properly cleaned - assert content == _CLEANED_MANIFEST - - def test_remove_entire_manifest(self): - from distutils.msvc9compiler import MSVCCompiler - - tempdir = self.mkdtemp() - manifest = os.path.join(tempdir, 'manifest') - f = open(manifest, 'w') - try: - f.write(_MANIFEST_WITH_ONLY_MSVC_REFERENCE) - finally: - f.close() - - compiler = MSVCCompiler() - got = compiler._remove_visual_c_ref(manifest) - assert got is None From 4e4e23cfcf7194d9d02ec262cfec33aa6701c28e Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Tue, 27 Aug 2024 03:22:44 -0400 Subject: [PATCH 1050/1761] Remove monkeypatching of _msvccompiler. The monkeypatch indicates that the functionality was ported from Python 3.8, so the version in distutils should be adequate now. --- setuptools/monkey.py | 37 - setuptools/msvc.py | 1750 ------------------------------------------ 2 files changed, 1787 deletions(-) delete mode 100644 setuptools/msvc.py diff --git a/setuptools/monkey.py b/setuptools/monkey.py index abcc2755be..e45ab9d961 100644 --- a/setuptools/monkey.py +++ b/setuptools/monkey.py @@ -4,12 +4,10 @@ from __future__ import annotations -import functools import inspect import platform import sys import types -from importlib import import_module from typing import TypeVar import distutils.filelist @@ -84,8 +82,6 @@ def patch_all(): 'distutils.command.build_ext' ].Extension = setuptools.extension.Extension - patch_for_msvc_specialized_compiler() - def _patch_distribution_metadata(): from . import _core_metadata @@ -121,36 +117,3 @@ def patch_func(replacement, target_mod, func_name): def get_unpatched_function(candidate): return candidate.unpatched - - -def patch_for_msvc_specialized_compiler(): - """ - Patch functions in distutils to use standalone Microsoft Visual C++ - compilers. - """ - from . import msvc - - if platform.system() != 'Windows': - # Compilers only available on Microsoft Windows - return - - def patch_params(mod_name, func_name): - """ - Prepare the parameters for patch_func to patch indicated function. - """ - repl_prefix = 'msvc14_' - repl_name = repl_prefix + func_name.lstrip('_') - repl = getattr(msvc, repl_name) - mod = import_module(mod_name) - if not hasattr(mod, func_name): - raise ImportError(func_name) - return repl, mod, func_name - - # Python 3.5+ - msvc14 = functools.partial(patch_params, 'distutils._msvccompiler') - - try: - # Patch distutils._msvccompiler._get_vc_env - patch_func(*msvc14('_get_vc_env')) - except ImportError: - pass diff --git a/setuptools/msvc.py b/setuptools/msvc.py deleted file mode 100644 index 57f09417ca..0000000000 --- a/setuptools/msvc.py +++ /dev/null @@ -1,1750 +0,0 @@ -""" -Improved support for Microsoft Visual C++ compilers. - -Known supported compilers: --------------------------- -Microsoft Visual C++ 14.X: - Microsoft Visual C++ Build Tools 2015 (x86, x64, arm) - Microsoft Visual Studio Build Tools 2017 (x86, x64, arm, arm64) - Microsoft Visual Studio Build Tools 2019 (x86, x64, arm, arm64) - -This may also support compilers shipped with compatible Visual Studio versions. -""" - -from __future__ import annotations - -import contextlib -import itertools -import json -import platform -import subprocess -from os import listdir, pathsep -from os.path import dirname, isdir, isfile, join -from subprocess import CalledProcessError -from typing import TYPE_CHECKING - -from more_itertools import unique_everseen - -import distutils.errors -from distutils.util import get_platform - -# https://github.com/python/mypy/issues/8166 -if not TYPE_CHECKING and platform.system() == 'Windows': - import winreg - from os import environ -else: - # Mock winreg and environ so the module can be imported on this platform. - - class winreg: - HKEY_USERS = None - HKEY_CURRENT_USER = None - HKEY_LOCAL_MACHINE = None - HKEY_CLASSES_ROOT = None - - environ: dict[str, str] = dict() - - -def _msvc14_find_vc2015(): - """Python 3.8 "distutils/_msvccompiler.py" backport""" - try: - key = winreg.OpenKey( - winreg.HKEY_LOCAL_MACHINE, - r"Software\Microsoft\VisualStudio\SxS\VC7", - 0, - winreg.KEY_READ | winreg.KEY_WOW64_32KEY, - ) - except OSError: - return None, None - - best_version = 0 - best_dir = None - with key: - for i in itertools.count(): - try: - v, vc_dir, vt = winreg.EnumValue(key, i) - except OSError: - break - if v and vt == winreg.REG_SZ and isdir(vc_dir): - try: - version = int(float(v)) - except (ValueError, TypeError): - continue - if version >= 14 and version > best_version: - best_version, best_dir = version, vc_dir - return best_version, best_dir - - -def _msvc14_find_vc2017(): - """Python 3.8 "distutils/_msvccompiler.py" backport - - Returns "15, path" based on the result of invoking vswhere.exe - If no install is found, returns "None, None" - - The version is returned to avoid unnecessarily changing the function - result. It may be ignored when the path is not None. - - If vswhere.exe is not available, by definition, VS 2017 is not - installed. - """ - root = environ.get("ProgramFiles(x86)") or environ.get("ProgramFiles") - if not root: - return None, None - - variant = 'arm64' if get_platform() == 'win-arm64' else 'x86.x64' - suitable_components = ( - f"Microsoft.VisualStudio.Component.VC.Tools.{variant}", - "Microsoft.VisualStudio.Workload.WDExpress", - ) - - for component in suitable_components: - # Workaround for `-requiresAny` (only available on VS 2017 > 15.6) - with contextlib.suppress(CalledProcessError, OSError, UnicodeDecodeError): - path = ( - subprocess.check_output([ - join(root, "Microsoft Visual Studio", "Installer", "vswhere.exe"), - "-latest", - "-prerelease", - "-requires", - component, - "-property", - "installationPath", - "-products", - "*", - ]) - .decode(encoding="mbcs", errors="strict") - .strip() - ) - - path = join(path, "VC", "Auxiliary", "Build") - if isdir(path): - return 15, path - - return None, None # no suitable component found - - -PLAT_SPEC_TO_RUNTIME = { - 'x86': 'x86', - 'x86_amd64': 'x64', - 'x86_arm': 'arm', - 'x86_arm64': 'arm64', -} - - -def _msvc14_find_vcvarsall(plat_spec): - """Python 3.8 "distutils/_msvccompiler.py" backport""" - _, best_dir = _msvc14_find_vc2017() - vcruntime = None - - if plat_spec in PLAT_SPEC_TO_RUNTIME: - vcruntime_plat = PLAT_SPEC_TO_RUNTIME[plat_spec] - else: - vcruntime_plat = 'x64' if 'amd64' in plat_spec else 'x86' - - if best_dir: - vcredist = join( - best_dir, - "..", - "..", - "redist", - "MSVC", - "**", - vcruntime_plat, - "Microsoft.VC14*.CRT", - "vcruntime140.dll", - ) - try: - import glob - - vcruntime = glob.glob(vcredist, recursive=True)[-1] - except (ImportError, OSError, LookupError): - vcruntime = None - - if not best_dir: - best_version, best_dir = _msvc14_find_vc2015() - if best_version: - vcruntime = join( - best_dir, - 'redist', - vcruntime_plat, - "Microsoft.VC140.CRT", - "vcruntime140.dll", - ) - - if not best_dir: - return None, None - - vcvarsall = join(best_dir, "vcvarsall.bat") - if not isfile(vcvarsall): - return None, None - - if not vcruntime or not isfile(vcruntime): - vcruntime = None - - return vcvarsall, vcruntime - - -def _msvc14_get_vc_env(plat_spec): - """Python 3.8 "distutils/_msvccompiler.py" backport""" - if "DISTUTILS_USE_SDK" in environ: - return {key.lower(): value for key, value in environ.items()} - - vcvarsall, vcruntime = _msvc14_find_vcvarsall(plat_spec) - if not vcvarsall: - raise distutils.errors.DistutilsPlatformError("Unable to find vcvarsall.bat") - - try: - out = subprocess.check_output( - 'cmd /u /c "{}" {} && set'.format(vcvarsall, plat_spec), - stderr=subprocess.STDOUT, - ).decode('utf-16le', errors='replace') - except subprocess.CalledProcessError as exc: - raise distutils.errors.DistutilsPlatformError( - "Error executing {}".format(exc.cmd) - ) from exc - - env = { - key.lower(): value - for key, _, value in (line.partition('=') for line in out.splitlines()) - if key and value - } - - if vcruntime: - env['py_vcruntime_redist'] = vcruntime - return env - - -def msvc14_get_vc_env(plat_spec): - """ - Patched "distutils._msvccompiler._get_vc_env" for support extra - Microsoft Visual C++ 14.X compilers. - - Set environment without use of "vcvarsall.bat". - - Parameters - ---------- - plat_spec: str - Target architecture. - - Return - ------ - dict - environment - """ - - # Always use backport from CPython 3.8 - try: - return _msvc14_get_vc_env(plat_spec) - except distutils.errors.DistutilsPlatformError as exc: - _augment_exception(exc, 14.0) - raise - - -def _augment_exception(exc, version, arch=''): - """ - Add details to the exception message to help guide the user - as to what action will resolve it. - """ - # Error if MSVC++ directory not found or environment not set - message = exc.args[0] - - if "vcvarsall" in message.lower() or "visual c" in message.lower(): - # Special error message if MSVC++ not installed - tmpl = 'Microsoft Visual C++ {version:0.1f} or greater is required.' - message = tmpl.format(**locals()) - msdownload = 'www.microsoft.com/download/details.aspx?id=%d' - if version == 9.0: - if arch.lower().find('ia64') > -1: - # For VC++ 9.0, if IA64 support is needed, redirect user - # to Windows SDK 7.0. - # Note: No download link available from Microsoft. - message += ' Get it with "Microsoft Windows SDK 7.0"' - else: - # For VC++ 9.0 redirect user to Vc++ for Python 2.7 : - # This redirection link is maintained by Microsoft. - # Contact vspython@microsoft.com if it needs updating. - message += ' Get it from http://aka.ms/vcpython27' - elif version == 10.0: - # For VC++ 10.0 Redirect user to Windows SDK 7.1 - message += ' Get it with "Microsoft Windows SDK 7.1": ' - message += msdownload % 8279 - elif version >= 14.0: - # For VC++ 14.X Redirect user to latest Visual C++ Build Tools - message += ( - ' Get it with "Microsoft C++ Build Tools": ' - r'https://visualstudio.microsoft.com' - r'/visual-cpp-build-tools/' - ) - - exc.args = (message,) - - -class PlatformInfo: - """ - Current and Target Architectures information. - - Parameters - ---------- - arch: str - Target architecture. - """ - - current_cpu = environ.get('processor_architecture', '').lower() - - def __init__(self, arch): - self.arch = arch.lower().replace('x64', 'amd64') - - @property - def target_cpu(self): - """ - Return Target CPU architecture. - - Return - ------ - str - Target CPU - """ - return self.arch[self.arch.find('_') + 1 :] - - def target_is_x86(self): - """ - Return True if target CPU is x86 32 bits.. - - Return - ------ - bool - CPU is x86 32 bits - """ - return self.target_cpu == 'x86' - - def current_is_x86(self): - """ - Return True if current CPU is x86 32 bits.. - - Return - ------ - bool - CPU is x86 32 bits - """ - return self.current_cpu == 'x86' - - def current_dir(self, hidex86=False, x64=False): - """ - Current platform specific subfolder. - - Parameters - ---------- - hidex86: bool - return '' and not '\x86' if architecture is x86. - x64: bool - return '\x64' and not '\amd64' if architecture is amd64. - - Return - ------ - str - subfolder: '\target', or '' (see hidex86 parameter) - """ - return ( - '' - if (self.current_cpu == 'x86' and hidex86) - else r'\x64' - if (self.current_cpu == 'amd64' and x64) - else r'\%s' % self.current_cpu - ) - - def target_dir(self, hidex86=False, x64=False): - r""" - Target platform specific subfolder. - - Parameters - ---------- - hidex86: bool - return '' and not '\x86' if architecture is x86. - x64: bool - return '\x64' and not '\amd64' if architecture is amd64. - - Return - ------ - str - subfolder: '\current', or '' (see hidex86 parameter) - """ - return ( - '' - if (self.target_cpu == 'x86' and hidex86) - else r'\x64' - if (self.target_cpu == 'amd64' and x64) - else r'\%s' % self.target_cpu - ) - - def cross_dir(self, forcex86=False): - r""" - Cross platform specific subfolder. - - Parameters - ---------- - forcex86: bool - Use 'x86' as current architecture even if current architecture is - not x86. - - Return - ------ - str - subfolder: '' if target architecture is current architecture, - '\current_target' if not. - """ - current = 'x86' if forcex86 else self.current_cpu - return ( - '' - if self.target_cpu == current - else self.target_dir().replace('\\', '\\%s_' % current) - ) - - -class RegistryInfo: - """ - Microsoft Visual Studio related registry information. - - Parameters - ---------- - platform_info: PlatformInfo - "PlatformInfo" instance. - """ - - HKEYS = ( - winreg.HKEY_USERS, - winreg.HKEY_CURRENT_USER, - winreg.HKEY_LOCAL_MACHINE, - winreg.HKEY_CLASSES_ROOT, - ) - - def __init__(self, platform_info): - self.pi = platform_info - - @property - def visualstudio(self): - """ - Microsoft Visual Studio root registry key. - - Return - ------ - str - Registry key - """ - return 'VisualStudio' - - @property - def sxs(self): - """ - Microsoft Visual Studio SxS registry key. - - Return - ------ - str - Registry key - """ - return join(self.visualstudio, 'SxS') - - @property - def vc(self): - """ - Microsoft Visual C++ VC7 registry key. - - Return - ------ - str - Registry key - """ - return join(self.sxs, 'VC7') - - @property - def vs(self): - """ - Microsoft Visual Studio VS7 registry key. - - Return - ------ - str - Registry key - """ - return join(self.sxs, 'VS7') - - @property - def vc_for_python(self): - """ - Microsoft Visual C++ for Python registry key. - - Return - ------ - str - Registry key - """ - return r'DevDiv\VCForPython' - - @property - def microsoft_sdk(self): - """ - Microsoft SDK registry key. - - Return - ------ - str - Registry key - """ - return 'Microsoft SDKs' - - @property - def windows_sdk(self): - """ - Microsoft Windows/Platform SDK registry key. - - Return - ------ - str - Registry key - """ - return join(self.microsoft_sdk, 'Windows') - - @property - def netfx_sdk(self): - """ - Microsoft .NET Framework SDK registry key. - - Return - ------ - str - Registry key - """ - return join(self.microsoft_sdk, 'NETFXSDK') - - @property - def windows_kits_roots(self): - """ - Microsoft Windows Kits Roots registry key. - - Return - ------ - str - Registry key - """ - return r'Windows Kits\Installed Roots' - - def microsoft(self, key, x86=False): - """ - Return key in Microsoft software registry. - - Parameters - ---------- - key: str - Registry key path where look. - x86: str - Force x86 software registry. - - Return - ------ - str - Registry key - """ - node64 = '' if self.pi.current_is_x86() or x86 else 'Wow6432Node' - return join('Software', node64, 'Microsoft', key) - - def lookup(self, key, name): - """ - Look for values in registry in Microsoft software registry. - - Parameters - ---------- - key: str - Registry key path where look. - name: str - Value name to find. - - Return - ------ - str - value - """ - key_read = winreg.KEY_READ - openkey = winreg.OpenKey - closekey = winreg.CloseKey - ms = self.microsoft - for hkey in self.HKEYS: - bkey = None - try: - bkey = openkey(hkey, ms(key), 0, key_read) - except OSError: - if not self.pi.current_is_x86(): - try: - bkey = openkey(hkey, ms(key, True), 0, key_read) - except OSError: - continue - else: - continue - try: - return winreg.QueryValueEx(bkey, name)[0] - except OSError: - pass - finally: - if bkey: - closekey(bkey) - return None - - -class SystemInfo: - """ - Microsoft Windows and Visual Studio related system information. - - Parameters - ---------- - registry_info: RegistryInfo - "RegistryInfo" instance. - vc_ver: float - Required Microsoft Visual C++ version. - """ - - # Variables and properties in this class use originals CamelCase variables - # names from Microsoft source files for more easy comparison. - WinDir = environ.get('WinDir', '') - ProgramFiles = environ.get('ProgramFiles', '') - ProgramFilesx86 = environ.get('ProgramFiles(x86)', ProgramFiles) - - def __init__(self, registry_info, vc_ver=None): - self.ri = registry_info - self.pi = self.ri.pi - - self.known_vs_paths = self.find_programdata_vs_vers() - - # Except for VS15+, VC version is aligned with VS version - self.vs_ver = self.vc_ver = vc_ver or self._find_latest_available_vs_ver() - - def _find_latest_available_vs_ver(self): - """ - Find the latest VC version - - Return - ------ - float - version - """ - reg_vc_vers = self.find_reg_vs_vers() - - if not (reg_vc_vers or self.known_vs_paths): - raise distutils.errors.DistutilsPlatformError( - 'No Microsoft Visual C++ version found' - ) - - vc_vers = set(reg_vc_vers) - vc_vers.update(self.known_vs_paths) - return sorted(vc_vers)[-1] - - def find_reg_vs_vers(self): - """ - Find Microsoft Visual Studio versions available in registry. - - Return - ------ - list of float - Versions - """ - ms = self.ri.microsoft - vckeys = (self.ri.vc, self.ri.vc_for_python, self.ri.vs) - vs_vers = [] - for hkey, key in itertools.product(self.ri.HKEYS, vckeys): - try: - bkey = winreg.OpenKey(hkey, ms(key), 0, winreg.KEY_READ) - except OSError: - continue - with bkey: - subkeys, values, _ = winreg.QueryInfoKey(bkey) - for i in range(values): - with contextlib.suppress(ValueError): - ver = float(winreg.EnumValue(bkey, i)[0]) - if ver not in vs_vers: - vs_vers.append(ver) - for i in range(subkeys): - with contextlib.suppress(ValueError): - ver = float(winreg.EnumKey(bkey, i)) - if ver not in vs_vers: - vs_vers.append(ver) - return sorted(vs_vers) - - def find_programdata_vs_vers(self): - r""" - Find Visual studio 2017+ versions from information in - "C:\ProgramData\Microsoft\VisualStudio\Packages\_Instances". - - Return - ------ - dict - float version as key, path as value. - """ - vs_versions = {} - instances_dir = r'C:\ProgramData\Microsoft\VisualStudio\Packages\_Instances' - - try: - hashed_names = listdir(instances_dir) - - except OSError: - # Directory not exists with all Visual Studio versions - return vs_versions - - for name in hashed_names: - try: - # Get VS installation path from "state.json" file - state_path = join(instances_dir, name, 'state.json') - with open(state_path, 'rt', encoding='utf-8') as state_file: - state = json.load(state_file) - vs_path = state['installationPath'] - - # Raises OSError if this VS installation does not contain VC - listdir(join(vs_path, r'VC\Tools\MSVC')) - - # Store version and path - vs_versions[self._as_float_version(state['installationVersion'])] = ( - vs_path - ) - - except (OSError, KeyError): - # Skip if "state.json" file is missing or bad format - continue - - return vs_versions - - @staticmethod - def _as_float_version(version): - """ - Return a string version as a simplified float version (major.minor) - - Parameters - ---------- - version: str - Version. - - Return - ------ - float - version - """ - return float('.'.join(version.split('.')[:2])) - - @property - def VSInstallDir(self): - """ - Microsoft Visual Studio directory. - - Return - ------ - str - path - """ - # Default path - default = join( - self.ProgramFilesx86, 'Microsoft Visual Studio %0.1f' % self.vs_ver - ) - - # Try to get path from registry, if fail use default path - return self.ri.lookup(self.ri.vs, '%0.1f' % self.vs_ver) or default - - @property - def VCInstallDir(self): - """ - Microsoft Visual C++ directory. - - Return - ------ - str - path - """ - path = self._guess_vc() or self._guess_vc_legacy() - - if not isdir(path): - msg = 'Microsoft Visual C++ directory not found' - raise distutils.errors.DistutilsPlatformError(msg) - - return path - - def _guess_vc(self): - """ - Locate Visual C++ for VS2017+. - - Return - ------ - str - path - """ - if self.vs_ver <= 14.0: - return '' - - try: - # First search in known VS paths - vs_dir = self.known_vs_paths[self.vs_ver] - except KeyError: - # Else, search with path from registry - vs_dir = self.VSInstallDir - - guess_vc = join(vs_dir, r'VC\Tools\MSVC') - - # Subdir with VC exact version as name - try: - # Update the VC version with real one instead of VS version - vc_ver = listdir(guess_vc)[-1] - self.vc_ver = self._as_float_version(vc_ver) - return join(guess_vc, vc_ver) - except (OSError, IndexError): - return '' - - def _guess_vc_legacy(self): - """ - Locate Visual C++ for versions prior to 2017. - - Return - ------ - str - path - """ - default = join( - self.ProgramFilesx86, r'Microsoft Visual Studio %0.1f\VC' % self.vs_ver - ) - - # Try to get "VC++ for Python" path from registry as default path - reg_path = join(self.ri.vc_for_python, '%0.1f' % self.vs_ver) - python_vc = self.ri.lookup(reg_path, 'installdir') - default_vc = join(python_vc, 'VC') if python_vc else default - - # Try to get path from registry, if fail use default path - return self.ri.lookup(self.ri.vc, '%0.1f' % self.vs_ver) or default_vc - - @property - def WindowsSdkVersion(self): - """ - Microsoft Windows SDK versions for specified MSVC++ version. - - Return - ------ - tuple of str - versions - """ - if self.vs_ver <= 9.0: - return '7.0', '6.1', '6.0a' - elif self.vs_ver == 10.0: - return '7.1', '7.0a' - elif self.vs_ver == 11.0: - return '8.0', '8.0a' - elif self.vs_ver == 12.0: - return '8.1', '8.1a' - elif self.vs_ver >= 14.0: - return '10.0', '8.1' - return None - - @property - def WindowsSdkLastVersion(self): - """ - Microsoft Windows SDK last version. - - Return - ------ - str - version - """ - return self._use_last_dir_name(join(self.WindowsSdkDir, 'lib')) - - @property - def WindowsSdkDir(self): # noqa: C901 # is too complex (12) # FIXME - """ - Microsoft Windows SDK directory. - - Return - ------ - str - path - """ - sdkdir = '' - for ver in self.WindowsSdkVersion: - # Try to get it from registry - loc = join(self.ri.windows_sdk, 'v%s' % ver) - sdkdir = self.ri.lookup(loc, 'installationfolder') - if sdkdir: - break - if not sdkdir or not isdir(sdkdir): - # Try to get "VC++ for Python" version from registry - path = join(self.ri.vc_for_python, '%0.1f' % self.vc_ver) - install_base = self.ri.lookup(path, 'installdir') - if install_base: - sdkdir = join(install_base, 'WinSDK') - if not sdkdir or not isdir(sdkdir): - # If fail, use default new path - for ver in self.WindowsSdkVersion: - intver = ver[: ver.rfind('.')] - path = r'Microsoft SDKs\Windows Kits\%s' % intver - d = join(self.ProgramFiles, path) - if isdir(d): - sdkdir = d - if not sdkdir or not isdir(sdkdir): - # If fail, use default old path - for ver in self.WindowsSdkVersion: - path = r'Microsoft SDKs\Windows\v%s' % ver - d = join(self.ProgramFiles, path) - if isdir(d): - sdkdir = d - if not sdkdir: - # If fail, use Platform SDK - sdkdir = join(self.VCInstallDir, 'PlatformSDK') - return sdkdir - - @property - def WindowsSDKExecutablePath(self): - """ - Microsoft Windows SDK executable directory. - - Return - ------ - str - path - """ - # Find WinSDK NetFx Tools registry dir name - if self.vs_ver <= 11.0: - netfxver = 35 - arch = '' - else: - netfxver = 40 - hidex86 = True if self.vs_ver <= 12.0 else False - arch = self.pi.current_dir(x64=True, hidex86=hidex86) - fx = 'WinSDK-NetFx%dTools%s' % (netfxver, arch.replace('\\', '-')) - - # list all possibles registry paths - regpaths = [] - if self.vs_ver >= 14.0: - for ver in self.NetFxSdkVersion: - regpaths += [join(self.ri.netfx_sdk, ver, fx)] - - for ver in self.WindowsSdkVersion: - regpaths += [join(self.ri.windows_sdk, 'v%sA' % ver, fx)] - - # Return installation folder from the more recent path - for path in regpaths: - execpath = self.ri.lookup(path, 'installationfolder') - if execpath: - return execpath - - return None - - @property - def FSharpInstallDir(self): - """ - Microsoft Visual F# directory. - - Return - ------ - str - path - """ - path = join(self.ri.visualstudio, r'%0.1f\Setup\F#' % self.vs_ver) - return self.ri.lookup(path, 'productdir') or '' - - @property - def UniversalCRTSdkDir(self): - """ - Microsoft Universal CRT SDK directory. - - Return - ------ - str - path - """ - # Set Kit Roots versions for specified MSVC++ version - vers = ('10', '81') if self.vs_ver >= 14.0 else () - - # Find path of the more recent Kit - for ver in vers: - sdkdir = self.ri.lookup(self.ri.windows_kits_roots, 'kitsroot%s' % ver) - if sdkdir: - return sdkdir or '' - - return None - - @property - def UniversalCRTSdkLastVersion(self): - """ - Microsoft Universal C Runtime SDK last version. - - Return - ------ - str - version - """ - return self._use_last_dir_name(join(self.UniversalCRTSdkDir, 'lib')) - - @property - def NetFxSdkVersion(self): - """ - Microsoft .NET Framework SDK versions. - - Return - ------ - tuple of str - versions - """ - # Set FxSdk versions for specified VS version - return ( - ('4.7.2', '4.7.1', '4.7', '4.6.2', '4.6.1', '4.6', '4.5.2', '4.5.1', '4.5') - if self.vs_ver >= 14.0 - else () - ) - - @property - def NetFxSdkDir(self): - """ - Microsoft .NET Framework SDK directory. - - Return - ------ - str - path - """ - sdkdir = '' - for ver in self.NetFxSdkVersion: - loc = join(self.ri.netfx_sdk, ver) - sdkdir = self.ri.lookup(loc, 'kitsinstallationfolder') - if sdkdir: - break - return sdkdir - - @property - def FrameworkDir32(self): - """ - Microsoft .NET Framework 32bit directory. - - Return - ------ - str - path - """ - # Default path - guess_fw = join(self.WinDir, r'Microsoft.NET\Framework') - - # Try to get path from registry, if fail use default path - return self.ri.lookup(self.ri.vc, 'frameworkdir32') or guess_fw - - @property - def FrameworkDir64(self): - """ - Microsoft .NET Framework 64bit directory. - - Return - ------ - str - path - """ - # Default path - guess_fw = join(self.WinDir, r'Microsoft.NET\Framework64') - - # Try to get path from registry, if fail use default path - return self.ri.lookup(self.ri.vc, 'frameworkdir64') or guess_fw - - @property - def FrameworkVersion32(self): - """ - Microsoft .NET Framework 32bit versions. - - Return - ------ - tuple of str - versions - """ - return self._find_dot_net_versions(32) - - @property - def FrameworkVersion64(self): - """ - Microsoft .NET Framework 64bit versions. - - Return - ------ - tuple of str - versions - """ - return self._find_dot_net_versions(64) - - def _find_dot_net_versions(self, bits): - """ - Find Microsoft .NET Framework versions. - - Parameters - ---------- - bits: int - Platform number of bits: 32 or 64. - - Return - ------ - tuple of str - versions - """ - # Find actual .NET version in registry - reg_ver = self.ri.lookup(self.ri.vc, 'frameworkver%d' % bits) - dot_net_dir = getattr(self, 'FrameworkDir%d' % bits) - ver = reg_ver or self._use_last_dir_name(dot_net_dir, 'v') or '' - - # Set .NET versions for specified MSVC++ version - if self.vs_ver >= 12.0: - return ver, 'v4.0' - elif self.vs_ver >= 10.0: - return 'v4.0.30319' if ver.lower()[:2] != 'v4' else ver, 'v3.5' - elif self.vs_ver == 9.0: - return 'v3.5', 'v2.0.50727' - elif self.vs_ver == 8.0: - return 'v3.0', 'v2.0.50727' - return None - - @staticmethod - def _use_last_dir_name(path, prefix=''): - """ - Return name of the last dir in path or '' if no dir found. - - Parameters - ---------- - path: str - Use dirs in this path - prefix: str - Use only dirs starting by this prefix - - Return - ------ - str - name - """ - matching_dirs = ( - dir_name - for dir_name in reversed(listdir(path)) - if isdir(join(path, dir_name)) and dir_name.startswith(prefix) - ) - return next(matching_dirs, None) or '' - - -class EnvironmentInfo: - """ - Return environment variables for specified Microsoft Visual C++ version - and platform : Lib, Include, Path and libpath. - - This function is compatible with Microsoft Visual C++ 9.0 to 14.X. - - Script created by analysing Microsoft environment configuration files like - "vcvars[...].bat", "SetEnv.Cmd", "vcbuildtools.bat", ... - - Parameters - ---------- - arch: str - Target architecture. - vc_ver: float - Required Microsoft Visual C++ version. If not set, autodetect the last - version. - vc_min_ver: float - Minimum Microsoft Visual C++ version. - """ - - # Variables and properties in this class use originals CamelCase variables - # names from Microsoft source files for more easy comparison. - - def __init__(self, arch, vc_ver=None, vc_min_ver=0): - self.pi = PlatformInfo(arch) - self.ri = RegistryInfo(self.pi) - self.si = SystemInfo(self.ri, vc_ver) - - if self.vc_ver < vc_min_ver: - err = 'No suitable Microsoft Visual C++ version found' - raise distutils.errors.DistutilsPlatformError(err) - - @property - def vs_ver(self): - """ - Microsoft Visual Studio. - - Return - ------ - float - version - """ - return self.si.vs_ver - - @property - def vc_ver(self): - """ - Microsoft Visual C++ version. - - Return - ------ - float - version - """ - return self.si.vc_ver - - @property - def VSTools(self): - """ - Microsoft Visual Studio Tools. - - Return - ------ - list of str - paths - """ - paths = [r'Common7\IDE', r'Common7\Tools'] - - if self.vs_ver >= 14.0: - arch_subdir = self.pi.current_dir(hidex86=True, x64=True) - paths += [r'Common7\IDE\CommonExtensions\Microsoft\TestWindow'] - paths += [r'Team Tools\Performance Tools'] - paths += [r'Team Tools\Performance Tools%s' % arch_subdir] - - return [join(self.si.VSInstallDir, path) for path in paths] - - @property - def VCIncludes(self): - """ - Microsoft Visual C++ & Microsoft Foundation Class Includes. - - Return - ------ - list of str - paths - """ - return [ - join(self.si.VCInstallDir, 'Include'), - join(self.si.VCInstallDir, r'ATLMFC\Include'), - ] - - @property - def VCLibraries(self): - """ - Microsoft Visual C++ & Microsoft Foundation Class Libraries. - - Return - ------ - list of str - paths - """ - if self.vs_ver >= 15.0: - arch_subdir = self.pi.target_dir(x64=True) - else: - arch_subdir = self.pi.target_dir(hidex86=True) - paths = ['Lib%s' % arch_subdir, r'ATLMFC\Lib%s' % arch_subdir] - - if self.vs_ver >= 14.0: - paths += [r'Lib\store%s' % arch_subdir] - - return [join(self.si.VCInstallDir, path) for path in paths] - - @property - def VCStoreRefs(self): - """ - Microsoft Visual C++ store references Libraries. - - Return - ------ - list of str - paths - """ - if self.vs_ver < 14.0: - return [] - return [join(self.si.VCInstallDir, r'Lib\store\references')] - - @property - def VCTools(self): - """ - Microsoft Visual C++ Tools. - - Return - ------ - list of str - paths - """ - si = self.si - tools = [join(si.VCInstallDir, 'VCPackages')] - - forcex86 = True if self.vs_ver <= 10.0 else False - arch_subdir = self.pi.cross_dir(forcex86) - if arch_subdir: - tools += [join(si.VCInstallDir, 'Bin%s' % arch_subdir)] - - if self.vs_ver == 14.0: - path = 'Bin%s' % self.pi.current_dir(hidex86=True) - tools += [join(si.VCInstallDir, path)] - - elif self.vs_ver >= 15.0: - host_dir = ( - r'bin\HostX86%s' if self.pi.current_is_x86() else r'bin\HostX64%s' - ) - tools += [join(si.VCInstallDir, host_dir % self.pi.target_dir(x64=True))] - - if self.pi.current_cpu != self.pi.target_cpu: - tools += [ - join(si.VCInstallDir, host_dir % self.pi.current_dir(x64=True)) - ] - - else: - tools += [join(si.VCInstallDir, 'Bin')] - - return tools - - @property - def OSLibraries(self): - """ - Microsoft Windows SDK Libraries. - - Return - ------ - list of str - paths - """ - if self.vs_ver <= 10.0: - arch_subdir = self.pi.target_dir(hidex86=True, x64=True) - return [join(self.si.WindowsSdkDir, 'Lib%s' % arch_subdir)] - - else: - arch_subdir = self.pi.target_dir(x64=True) - lib = join(self.si.WindowsSdkDir, 'lib') - libver = self._sdk_subdir - return [join(lib, '%sum%s' % (libver, arch_subdir))] - - @property - def OSIncludes(self): - """ - Microsoft Windows SDK Include. - - Return - ------ - list of str - paths - """ - include = join(self.si.WindowsSdkDir, 'include') - - if self.vs_ver <= 10.0: - return [include, join(include, 'gl')] - - else: - if self.vs_ver >= 14.0: - sdkver = self._sdk_subdir - else: - sdkver = '' - return [ - join(include, '%sshared' % sdkver), - join(include, '%sum' % sdkver), - join(include, '%swinrt' % sdkver), - ] - - @property - def OSLibpath(self): - """ - Microsoft Windows SDK Libraries Paths. - - Return - ------ - list of str - paths - """ - ref = join(self.si.WindowsSdkDir, 'References') - libpath = [] - - if self.vs_ver <= 9.0: - libpath += self.OSLibraries - - if self.vs_ver >= 11.0: - libpath += [join(ref, r'CommonConfiguration\Neutral')] - - if self.vs_ver >= 14.0: - libpath += [ - ref, - join(self.si.WindowsSdkDir, 'UnionMetadata'), - join(ref, 'Windows.Foundation.UniversalApiContract', '1.0.0.0'), - join(ref, 'Windows.Foundation.FoundationContract', '1.0.0.0'), - join(ref, 'Windows.Networking.Connectivity.WwanContract', '1.0.0.0'), - join( - self.si.WindowsSdkDir, - 'ExtensionSDKs', - 'Microsoft.VCLibs', - '%0.1f' % self.vs_ver, - 'References', - 'CommonConfiguration', - 'neutral', - ), - ] - return libpath - - @property - def SdkTools(self): - """ - Microsoft Windows SDK Tools. - - Return - ------ - list of str - paths - """ - return list(self._sdk_tools()) - - def _sdk_tools(self): - """ - Microsoft Windows SDK Tools paths generator. - - Return - ------ - generator of str - paths - """ - if self.vs_ver < 15.0: - bin_dir = 'Bin' if self.vs_ver <= 11.0 else r'Bin\x86' - yield join(self.si.WindowsSdkDir, bin_dir) - - if not self.pi.current_is_x86(): - arch_subdir = self.pi.current_dir(x64=True) - path = 'Bin%s' % arch_subdir - yield join(self.si.WindowsSdkDir, path) - - if self.vs_ver in (10.0, 11.0): - if self.pi.target_is_x86(): - arch_subdir = '' - else: - arch_subdir = self.pi.current_dir(hidex86=True, x64=True) - path = r'Bin\NETFX 4.0 Tools%s' % arch_subdir - yield join(self.si.WindowsSdkDir, path) - - elif self.vs_ver >= 15.0: - path = join(self.si.WindowsSdkDir, 'Bin') - arch_subdir = self.pi.current_dir(x64=True) - sdkver = self.si.WindowsSdkLastVersion - yield join(path, '%s%s' % (sdkver, arch_subdir)) - - if self.si.WindowsSDKExecutablePath: - yield self.si.WindowsSDKExecutablePath - - @property - def _sdk_subdir(self): - """ - Microsoft Windows SDK version subdir. - - Return - ------ - str - subdir - """ - ucrtver = self.si.WindowsSdkLastVersion - return ('%s\\' % ucrtver) if ucrtver else '' - - @property - def SdkSetup(self): - """ - Microsoft Windows SDK Setup. - - Return - ------ - list of str - paths - """ - if self.vs_ver > 9.0: - return [] - - return [join(self.si.WindowsSdkDir, 'Setup')] - - @property - def FxTools(self): - """ - Microsoft .NET Framework Tools. - - Return - ------ - list of str - paths - """ - pi = self.pi - si = self.si - - if self.vs_ver <= 10.0: - include32 = True - include64 = not pi.target_is_x86() and not pi.current_is_x86() - else: - include32 = pi.target_is_x86() or pi.current_is_x86() - include64 = pi.current_cpu == 'amd64' or pi.target_cpu == 'amd64' - - tools = [] - if include32: - tools += [join(si.FrameworkDir32, ver) for ver in si.FrameworkVersion32] - if include64: - tools += [join(si.FrameworkDir64, ver) for ver in si.FrameworkVersion64] - return tools - - @property - def NetFxSDKLibraries(self): - """ - Microsoft .Net Framework SDK Libraries. - - Return - ------ - list of str - paths - """ - if self.vs_ver < 14.0 or not self.si.NetFxSdkDir: - return [] - - arch_subdir = self.pi.target_dir(x64=True) - return [join(self.si.NetFxSdkDir, r'lib\um%s' % arch_subdir)] - - @property - def NetFxSDKIncludes(self): - """ - Microsoft .Net Framework SDK Includes. - - Return - ------ - list of str - paths - """ - if self.vs_ver < 14.0 or not self.si.NetFxSdkDir: - return [] - - return [join(self.si.NetFxSdkDir, r'include\um')] - - @property - def VsTDb(self): - """ - Microsoft Visual Studio Team System Database. - - Return - ------ - list of str - paths - """ - return [join(self.si.VSInstallDir, r'VSTSDB\Deploy')] - - @property - def MSBuild(self): - """ - Microsoft Build Engine. - - Return - ------ - list of str - paths - """ - if self.vs_ver < 12.0: - return [] - elif self.vs_ver < 15.0: - base_path = self.si.ProgramFilesx86 - arch_subdir = self.pi.current_dir(hidex86=True) - else: - base_path = self.si.VSInstallDir - arch_subdir = '' - - path = r'MSBuild\%0.1f\bin%s' % (self.vs_ver, arch_subdir) - build = [join(base_path, path)] - - if self.vs_ver >= 15.0: - # Add Roslyn C# & Visual Basic Compiler - build += [join(base_path, path, 'Roslyn')] - - return build - - @property - def HTMLHelpWorkshop(self): - """ - Microsoft HTML Help Workshop. - - Return - ------ - list of str - paths - """ - if self.vs_ver < 11.0: - return [] - - return [join(self.si.ProgramFilesx86, 'HTML Help Workshop')] - - @property - def UCRTLibraries(self): - """ - Microsoft Universal C Runtime SDK Libraries. - - Return - ------ - list of str - paths - """ - if self.vs_ver < 14.0: - return [] - - arch_subdir = self.pi.target_dir(x64=True) - lib = join(self.si.UniversalCRTSdkDir, 'lib') - ucrtver = self._ucrt_subdir - return [join(lib, '%sucrt%s' % (ucrtver, arch_subdir))] - - @property - def UCRTIncludes(self): - """ - Microsoft Universal C Runtime SDK Include. - - Return - ------ - list of str - paths - """ - if self.vs_ver < 14.0: - return [] - - include = join(self.si.UniversalCRTSdkDir, 'include') - return [join(include, '%sucrt' % self._ucrt_subdir)] - - @property - def _ucrt_subdir(self): - """ - Microsoft Universal C Runtime SDK version subdir. - - Return - ------ - str - subdir - """ - ucrtver = self.si.UniversalCRTSdkLastVersion - return ('%s\\' % ucrtver) if ucrtver else '' - - @property - def FSharp(self): - """ - Microsoft Visual F#. - - Return - ------ - list of str - paths - """ - if 11.0 > self.vs_ver > 12.0: - return [] - - return [self.si.FSharpInstallDir] - - @property - def VCRuntimeRedist(self): - """ - Microsoft Visual C++ runtime redistributable dll. - - Return - ------ - str - path - """ - vcruntime = 'vcruntime%d0.dll' % self.vc_ver - arch_subdir = self.pi.target_dir(x64=True).strip('\\') - - # Installation prefixes candidates - prefixes = [] - tools_path = self.si.VCInstallDir - redist_path = dirname(tools_path.replace(r'\Tools', r'\Redist')) - if isdir(redist_path): - # Redist version may not be exactly the same as tools - redist_path = join(redist_path, listdir(redist_path)[-1]) - prefixes += [redist_path, join(redist_path, 'onecore')] - - prefixes += [join(tools_path, 'redist')] # VS14 legacy path - - # CRT directory - crt_dirs = ( - 'Microsoft.VC%d.CRT' % (self.vc_ver * 10), - # Sometime store in directory with VS version instead of VC - 'Microsoft.VC%d.CRT' % (int(self.vs_ver) * 10), - ) - - # vcruntime path - for prefix, crt_dir in itertools.product(prefixes, crt_dirs): - path = join(prefix, arch_subdir, crt_dir, vcruntime) - if isfile(path): - return path - return None - - def return_env(self, exists=True): - """ - Return environment dict. - - Parameters - ---------- - exists: bool - It True, only return existing paths. - - Return - ------ - dict - environment - """ - env = dict( - include=self._build_paths( - 'include', - [ - self.VCIncludes, - self.OSIncludes, - self.UCRTIncludes, - self.NetFxSDKIncludes, - ], - exists, - ), - lib=self._build_paths( - 'lib', - [ - self.VCLibraries, - self.OSLibraries, - self.FxTools, - self.UCRTLibraries, - self.NetFxSDKLibraries, - ], - exists, - ), - libpath=self._build_paths( - 'libpath', - [self.VCLibraries, self.FxTools, self.VCStoreRefs, self.OSLibpath], - exists, - ), - path=self._build_paths( - 'path', - [ - self.VCTools, - self.VSTools, - self.VsTDb, - self.SdkTools, - self.SdkSetup, - self.FxTools, - self.MSBuild, - self.HTMLHelpWorkshop, - self.FSharp, - ], - exists, - ), - ) - if self.vs_ver >= 14 and isfile(self.VCRuntimeRedist): - env['py_vcruntime_redist'] = self.VCRuntimeRedist - return env - - def _build_paths(self, name, spec_path_lists, exists): - """ - Given an environment variable name and specified paths, - return a pathsep-separated string of paths containing - unique, extant, directories from those paths and from - the environment variable. Raise an error if no paths - are resolved. - - Parameters - ---------- - name: str - Environment variable name - spec_path_lists: list of str - Paths - exists: bool - It True, only return existing paths. - - Return - ------ - str - Pathsep-separated paths - """ - # flatten spec_path_lists - spec_paths = itertools.chain.from_iterable(spec_path_lists) - env_paths = environ.get(name, '').split(pathsep) - paths = itertools.chain(spec_paths, env_paths) - extant_paths = list(filter(isdir, paths)) if exists else paths - if not extant_paths: - msg = "%s environment variable is empty" % name.upper() - raise distutils.errors.DistutilsPlatformError(msg) - unique_paths = unique_everseen(extant_paths) - return pathsep.join(unique_paths) From 481bccfdc0a869a90b032d2cf12c4c40c6da2558 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Tue, 27 Aug 2024 03:29:34 -0400 Subject: [PATCH 1051/1761] Remove associated tests --- setuptools/tests/test_msvc14.py | 85 --------------------------------- 1 file changed, 85 deletions(-) delete mode 100644 setuptools/tests/test_msvc14.py diff --git a/setuptools/tests/test_msvc14.py b/setuptools/tests/test_msvc14.py deleted file mode 100644 index 57d3cc38e8..0000000000 --- a/setuptools/tests/test_msvc14.py +++ /dev/null @@ -1,85 +0,0 @@ -""" -Tests for msvc support module (msvc14 unit tests). -""" - -import os -import sys - -import pytest - -from distutils.errors import DistutilsPlatformError - - -@pytest.mark.skipif(sys.platform != "win32", reason="These tests are only for win32") -class TestMSVC14: - """Python 3.8 "distutils/tests/test_msvccompiler.py" backport""" - - def test_no_compiler(self): - import setuptools.msvc as _msvccompiler - - # makes sure query_vcvarsall raises - # a DistutilsPlatformError if the compiler - # is not found - - def _find_vcvarsall(plat_spec): - return None, None - - old_find_vcvarsall = _msvccompiler._msvc14_find_vcvarsall - _msvccompiler._msvc14_find_vcvarsall = _find_vcvarsall - try: - pytest.raises( - DistutilsPlatformError, - _msvccompiler._msvc14_get_vc_env, - 'wont find this version', - ) - finally: - _msvccompiler._msvc14_find_vcvarsall = old_find_vcvarsall - - def test_get_vc_env_unicode(self): - import setuptools.msvc as _msvccompiler - - test_var = 'ṰḖṤṪ┅ṼẨṜ' - test_value = '₃⁴₅' - - # Ensure we don't early exit from _get_vc_env - old_distutils_use_sdk = os.environ.pop('DISTUTILS_USE_SDK', None) - os.environ[test_var] = test_value - try: - env = _msvccompiler._msvc14_get_vc_env('x86') - assert test_var.lower() in env - assert test_value == env[test_var.lower()] - finally: - os.environ.pop(test_var) - if old_distutils_use_sdk: - os.environ['DISTUTILS_USE_SDK'] = old_distutils_use_sdk - - def test_get_vc2017(self): - import setuptools.msvc as _msvccompiler - - # This function cannot be mocked, so pass it if we find VS 2017 - # and mark it skipped if we do not. - version, path = _msvccompiler._msvc14_find_vc2017() - if os.environ.get('APPVEYOR_BUILD_WORKER_IMAGE', '') == 'Visual Studio 2017': - assert version - if version: - assert version >= 15 - assert os.path.isdir(path) - else: - pytest.skip("VS 2017 is not installed") - - def test_get_vc2015(self): - import setuptools.msvc as _msvccompiler - - # This function cannot be mocked, so pass it if we find VS 2015 - # and mark it skipped if we do not. - version, path = _msvccompiler._msvc14_find_vc2015() - if os.environ.get('APPVEYOR_BUILD_WORKER_IMAGE', '') in [ - 'Visual Studio 2015', - 'Visual Studio 2017', - ]: - assert version - if version: - assert version >= 14 - assert os.path.isdir(path) - else: - pytest.skip("VS 2015 is not installed") From 64ffb2d1f5c41ed8b965554e3d2640dd14cd5dcf Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Tue, 27 Aug 2024 09:21:39 +0100 Subject: [PATCH 1052/1761] Remove '_vendor' directory --- conftest.py | 5 - distutils/_vendor/__init__.py | 0 .../packaging-24.0.dist-info/INSTALLER | 1 - .../_vendor/packaging-24.0.dist-info/LICENSE | 3 - .../packaging-24.0.dist-info/LICENSE.APACHE | 177 --- .../packaging-24.0.dist-info/LICENSE.BSD | 23 - .../_vendor/packaging-24.0.dist-info/METADATA | 102 -- .../_vendor/packaging-24.0.dist-info/RECORD | 37 - .../packaging-24.0.dist-info/REQUESTED | 0 .../_vendor/packaging-24.0.dist-info/WHEEL | 4 - distutils/_vendor/packaging/__init__.py | 15 - distutils/_vendor/packaging/_elffile.py | 108 -- distutils/_vendor/packaging/_manylinux.py | 260 ----- distutils/_vendor/packaging/_musllinux.py | 83 -- distutils/_vendor/packaging/_parser.py | 356 ------ distutils/_vendor/packaging/_structures.py | 61 - distutils/_vendor/packaging/_tokenizer.py | 192 ---- distutils/_vendor/packaging/markers.py | 252 ---- distutils/_vendor/packaging/metadata.py | 825 ------------- distutils/_vendor/packaging/py.typed | 0 distutils/_vendor/packaging/requirements.py | 90 -- distutils/_vendor/packaging/specifiers.py | 1017 ----------------- distutils/_vendor/packaging/tags.py | 571 --------- distutils/_vendor/packaging/utils.py | 172 --- distutils/_vendor/packaging/version.py | 563 --------- distutils/_vendor/ruff.toml | 1 - distutils/dist.py | 8 +- pyproject.toml | 2 +- 28 files changed, 8 insertions(+), 4920 deletions(-) delete mode 100644 distutils/_vendor/__init__.py delete mode 100644 distutils/_vendor/packaging-24.0.dist-info/INSTALLER delete mode 100644 distutils/_vendor/packaging-24.0.dist-info/LICENSE delete mode 100644 distutils/_vendor/packaging-24.0.dist-info/LICENSE.APACHE delete mode 100644 distutils/_vendor/packaging-24.0.dist-info/LICENSE.BSD delete mode 100644 distutils/_vendor/packaging-24.0.dist-info/METADATA delete mode 100644 distutils/_vendor/packaging-24.0.dist-info/RECORD delete mode 100644 distutils/_vendor/packaging-24.0.dist-info/REQUESTED delete mode 100644 distutils/_vendor/packaging-24.0.dist-info/WHEEL delete mode 100644 distutils/_vendor/packaging/__init__.py delete mode 100644 distutils/_vendor/packaging/_elffile.py delete mode 100644 distutils/_vendor/packaging/_manylinux.py delete mode 100644 distutils/_vendor/packaging/_musllinux.py delete mode 100644 distutils/_vendor/packaging/_parser.py delete mode 100644 distutils/_vendor/packaging/_structures.py delete mode 100644 distutils/_vendor/packaging/_tokenizer.py delete mode 100644 distutils/_vendor/packaging/markers.py delete mode 100644 distutils/_vendor/packaging/metadata.py delete mode 100644 distutils/_vendor/packaging/py.typed delete mode 100644 distutils/_vendor/packaging/requirements.py delete mode 100644 distutils/_vendor/packaging/specifiers.py delete mode 100644 distutils/_vendor/packaging/tags.py delete mode 100644 distutils/_vendor/packaging/utils.py delete mode 100644 distutils/_vendor/packaging/version.py delete mode 100644 distutils/_vendor/ruff.toml diff --git a/conftest.py b/conftest.py index 352366cb8f..930a595274 100644 --- a/conftest.py +++ b/conftest.py @@ -16,11 +16,6 @@ ]) -collect_ignore_glob = [ - 'distutils/_vendor/**/*', -] - - @pytest.fixture def save_env(): orig = os.environ.copy() diff --git a/distutils/_vendor/__init__.py b/distutils/_vendor/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/distutils/_vendor/packaging-24.0.dist-info/INSTALLER b/distutils/_vendor/packaging-24.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e38a..0000000000 --- a/distutils/_vendor/packaging-24.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/distutils/_vendor/packaging-24.0.dist-info/LICENSE b/distutils/_vendor/packaging-24.0.dist-info/LICENSE deleted file mode 100644 index 6f62d44e4e..0000000000 --- a/distutils/_vendor/packaging-24.0.dist-info/LICENSE +++ /dev/null @@ -1,3 +0,0 @@ -This software is made available under the terms of *either* of the licenses -found in LICENSE.APACHE or LICENSE.BSD. Contributions to this software is made -under the terms of *both* these licenses. diff --git a/distutils/_vendor/packaging-24.0.dist-info/LICENSE.APACHE b/distutils/_vendor/packaging-24.0.dist-info/LICENSE.APACHE deleted file mode 100644 index f433b1a53f..0000000000 --- a/distutils/_vendor/packaging-24.0.dist-info/LICENSE.APACHE +++ /dev/null @@ -1,177 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS diff --git a/distutils/_vendor/packaging-24.0.dist-info/LICENSE.BSD b/distutils/_vendor/packaging-24.0.dist-info/LICENSE.BSD deleted file mode 100644 index 42ce7b75c9..0000000000 --- a/distutils/_vendor/packaging-24.0.dist-info/LICENSE.BSD +++ /dev/null @@ -1,23 +0,0 @@ -Copyright (c) Donald Stufft and individual contributors. -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - - 1. Redistributions of source code must retain the above copyright notice, - this list of conditions and the following disclaimer. - - 2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/distutils/_vendor/packaging-24.0.dist-info/METADATA b/distutils/_vendor/packaging-24.0.dist-info/METADATA deleted file mode 100644 index 10ab4390a9..0000000000 --- a/distutils/_vendor/packaging-24.0.dist-info/METADATA +++ /dev/null @@ -1,102 +0,0 @@ -Metadata-Version: 2.1 -Name: packaging -Version: 24.0 -Summary: Core utilities for Python packages -Author-email: Donald Stufft -Requires-Python: >=3.7 -Description-Content-Type: text/x-rst -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: Apache Software License -Classifier: License :: OSI Approved :: BSD License -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Typing :: Typed -Project-URL: Documentation, https://packaging.pypa.io/ -Project-URL: Source, https://github.com/pypa/packaging - -packaging -========= - -.. start-intro - -Reusable core utilities for various Python Packaging -`interoperability specifications `_. - -This library provides utilities that implement the interoperability -specifications which have clearly one correct behaviour (eg: :pep:`440`) -or benefit greatly from having a single shared implementation (eg: :pep:`425`). - -.. end-intro - -The ``packaging`` project includes the following: version handling, specifiers, -markers, requirements, tags, utilities. - -Documentation -------------- - -The `documentation`_ provides information and the API for the following: - -- Version Handling -- Specifiers -- Markers -- Requirements -- Tags -- Utilities - -Installation ------------- - -Use ``pip`` to install these utilities:: - - pip install packaging - -The ``packaging`` library uses calendar-based versioning (``YY.N``). - -Discussion ----------- - -If you run into bugs, you can file them in our `issue tracker`_. - -You can also join ``#pypa`` on Freenode to ask questions or get involved. - - -.. _`documentation`: https://packaging.pypa.io/ -.. _`issue tracker`: https://github.com/pypa/packaging/issues - - -Code of Conduct ---------------- - -Everyone interacting in the packaging project's codebases, issue trackers, chat -rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_. - -.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md - -Contributing ------------- - -The ``CONTRIBUTING.rst`` file outlines how to contribute to this project as -well as how to report a potential security issue. The documentation for this -project also covers information about `project development`_ and `security`_. - -.. _`project development`: https://packaging.pypa.io/en/latest/development/ -.. _`security`: https://packaging.pypa.io/en/latest/security/ - -Project History ---------------- - -Please review the ``CHANGELOG.rst`` file or the `Changelog documentation`_ for -recent changes and project history. - -.. _`Changelog documentation`: https://packaging.pypa.io/en/latest/changelog/ - diff --git a/distutils/_vendor/packaging-24.0.dist-info/RECORD b/distutils/_vendor/packaging-24.0.dist-info/RECORD deleted file mode 100644 index bcf796c2f4..0000000000 --- a/distutils/_vendor/packaging-24.0.dist-info/RECORD +++ /dev/null @@ -1,37 +0,0 @@ -packaging-24.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -packaging-24.0.dist-info/LICENSE,sha256=ytHvW9NA1z4HS6YU0m996spceUDD2MNIUuZcSQlobEg,197 -packaging-24.0.dist-info/LICENSE.APACHE,sha256=DVQuDIgE45qn836wDaWnYhSdxoLXgpRRKH4RuTjpRZQ,10174 -packaging-24.0.dist-info/LICENSE.BSD,sha256=tw5-m3QvHMb5SLNMFqo5_-zpQZY2S8iP8NIYDwAo-sU,1344 -packaging-24.0.dist-info/METADATA,sha256=0dESdhY_wHValuOrbgdebiEw04EbX4dkujlxPdEsFus,3203 -packaging-24.0.dist-info/RECORD,, -packaging-24.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -packaging-24.0.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81 -packaging/__init__.py,sha256=UzotcV07p8vcJzd80S-W0srhgY8NMVD_XvJcZ7JN-tA,496 -packaging/__pycache__/__init__.cpython-312.pyc,, -packaging/__pycache__/_elffile.cpython-312.pyc,, -packaging/__pycache__/_manylinux.cpython-312.pyc,, -packaging/__pycache__/_musllinux.cpython-312.pyc,, -packaging/__pycache__/_parser.cpython-312.pyc,, -packaging/__pycache__/_structures.cpython-312.pyc,, -packaging/__pycache__/_tokenizer.cpython-312.pyc,, -packaging/__pycache__/markers.cpython-312.pyc,, -packaging/__pycache__/metadata.cpython-312.pyc,, -packaging/__pycache__/requirements.cpython-312.pyc,, -packaging/__pycache__/specifiers.cpython-312.pyc,, -packaging/__pycache__/tags.cpython-312.pyc,, -packaging/__pycache__/utils.cpython-312.pyc,, -packaging/__pycache__/version.cpython-312.pyc,, -packaging/_elffile.py,sha256=hbmK8OD6Z7fY6hwinHEUcD1by7czkGiNYu7ShnFEk2k,3266 -packaging/_manylinux.py,sha256=1ng_TqyH49hY6s3W_zVHyoJIaogbJqbIF1jJ0fAehc4,9590 -packaging/_musllinux.py,sha256=kgmBGLFybpy8609-KTvzmt2zChCPWYvhp5BWP4JX7dE,2676 -packaging/_parser.py,sha256=zlsFB1FpMRjkUdQb6WLq7xON52ruQadxFpYsDXWhLb4,10347 -packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431 -packaging/_tokenizer.py,sha256=alCtbwXhOFAmFGZ6BQ-wCTSFoRAJ2z-ysIf7__MTJ_k,5292 -packaging/markers.py,sha256=eH-txS2zq1HdNpTd9LcZUcVIwewAiNU0grmq5wjKnOk,8208 -packaging/metadata.py,sha256=w7jPEg6mDf1FTZMn79aFxFuk4SKtynUJtxr2InTxlV4,33036 -packaging/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -packaging/requirements.py,sha256=dgoBeVprPu2YE6Q8nGfwOPTjATHbRa_ZGLyXhFEln6Q,2933 -packaging/specifiers.py,sha256=dB2DwbmvSbEuVilEyiIQ382YfW5JfwzXTfRRPVtaENY,39784 -packaging/tags.py,sha256=fedHXiOHkBxNZTXotXv8uXPmMFU9ae-TKBujgYHigcA,18950 -packaging/utils.py,sha256=XgdmP3yx9-wQEFjO7OvMj9RjEf5JlR5HFFR69v7SQ9E,5268 -packaging/version.py,sha256=XjRBLNK17UMDgLeP8UHnqwiY3TdSi03xFQURtec211A,16236 diff --git a/distutils/_vendor/packaging-24.0.dist-info/REQUESTED b/distutils/_vendor/packaging-24.0.dist-info/REQUESTED deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/distutils/_vendor/packaging-24.0.dist-info/WHEEL b/distutils/_vendor/packaging-24.0.dist-info/WHEEL deleted file mode 100644 index 3b5e64b5e6..0000000000 --- a/distutils/_vendor/packaging-24.0.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: flit 3.9.0 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/distutils/_vendor/packaging/__init__.py b/distutils/_vendor/packaging/__init__.py deleted file mode 100644 index e7c0aa12ca..0000000000 --- a/distutils/_vendor/packaging/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -__title__ = "packaging" -__summary__ = "Core utilities for Python packages" -__uri__ = "https://github.com/pypa/packaging" - -__version__ = "24.0" - -__author__ = "Donald Stufft and individual contributors" -__email__ = "donald@stufft.io" - -__license__ = "BSD-2-Clause or Apache-2.0" -__copyright__ = "2014 %s" % __author__ diff --git a/distutils/_vendor/packaging/_elffile.py b/distutils/_vendor/packaging/_elffile.py deleted file mode 100644 index 6fb19b30bb..0000000000 --- a/distutils/_vendor/packaging/_elffile.py +++ /dev/null @@ -1,108 +0,0 @@ -""" -ELF file parser. - -This provides a class ``ELFFile`` that parses an ELF executable in a similar -interface to ``ZipFile``. Only the read interface is implemented. - -Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca -ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html -""" - -import enum -import os -import struct -from typing import IO, Optional, Tuple - - -class ELFInvalid(ValueError): - pass - - -class EIClass(enum.IntEnum): - C32 = 1 - C64 = 2 - - -class EIData(enum.IntEnum): - Lsb = 1 - Msb = 2 - - -class EMachine(enum.IntEnum): - I386 = 3 - S390 = 22 - Arm = 40 - X8664 = 62 - AArc64 = 183 - - -class ELFFile: - """ - Representation of an ELF executable. - """ - - def __init__(self, f: IO[bytes]) -> None: - self._f = f - - try: - ident = self._read("16B") - except struct.error: - raise ELFInvalid("unable to parse identification") - magic = bytes(ident[:4]) - if magic != b"\x7fELF": - raise ELFInvalid(f"invalid magic: {magic!r}") - - self.capacity = ident[4] # Format for program header (bitness). - self.encoding = ident[5] # Data structure encoding (endianness). - - try: - # e_fmt: Format for program header. - # p_fmt: Format for section header. - # p_idx: Indexes to find p_type, p_offset, and p_filesz. - e_fmt, self._p_fmt, self._p_idx = { - (1, 1): ("HHIIIIIHHH", ">IIIIIIII", (0, 1, 4)), # 32-bit MSB. - (2, 1): ("HHIQQQIHHH", ">IIQQQQQQ", (0, 2, 5)), # 64-bit MSB. - }[(self.capacity, self.encoding)] - except KeyError: - raise ELFInvalid( - f"unrecognized capacity ({self.capacity}) or " - f"encoding ({self.encoding})" - ) - - try: - ( - _, - self.machine, # Architecture type. - _, - _, - self._e_phoff, # Offset of program header. - _, - self.flags, # Processor-specific flags. - _, - self._e_phentsize, # Size of section. - self._e_phnum, # Number of sections. - ) = self._read(e_fmt) - except struct.error as e: - raise ELFInvalid("unable to parse machine and section information") from e - - def _read(self, fmt: str) -> Tuple[int, ...]: - return struct.unpack(fmt, self._f.read(struct.calcsize(fmt))) - - @property - def interpreter(self) -> Optional[str]: - """ - The path recorded in the ``PT_INTERP`` section header. - """ - for index in range(self._e_phnum): - self._f.seek(self._e_phoff + self._e_phentsize * index) - try: - data = self._read(self._p_fmt) - except struct.error: - continue - if data[self._p_idx[0]] != 3: # Not PT_INTERP. - continue - self._f.seek(data[self._p_idx[1]]) - return os.fsdecode(self._f.read(data[self._p_idx[2]])).strip("\0") - return None diff --git a/distutils/_vendor/packaging/_manylinux.py b/distutils/_vendor/packaging/_manylinux.py deleted file mode 100644 index ad62505f3f..0000000000 --- a/distutils/_vendor/packaging/_manylinux.py +++ /dev/null @@ -1,260 +0,0 @@ -import collections -import contextlib -import functools -import os -import re -import sys -import warnings -from typing import Dict, Generator, Iterator, NamedTuple, Optional, Sequence, Tuple - -from ._elffile import EIClass, EIData, ELFFile, EMachine - -EF_ARM_ABIMASK = 0xFF000000 -EF_ARM_ABI_VER5 = 0x05000000 -EF_ARM_ABI_FLOAT_HARD = 0x00000400 - - -# `os.PathLike` not a generic type until Python 3.9, so sticking with `str` -# as the type for `path` until then. -@contextlib.contextmanager -def _parse_elf(path: str) -> Generator[Optional[ELFFile], None, None]: - try: - with open(path, "rb") as f: - yield ELFFile(f) - except (OSError, TypeError, ValueError): - yield None - - -def _is_linux_armhf(executable: str) -> bool: - # hard-float ABI can be detected from the ELF header of the running - # process - # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf - with _parse_elf(executable) as f: - return ( - f is not None - and f.capacity == EIClass.C32 - and f.encoding == EIData.Lsb - and f.machine == EMachine.Arm - and f.flags & EF_ARM_ABIMASK == EF_ARM_ABI_VER5 - and f.flags & EF_ARM_ABI_FLOAT_HARD == EF_ARM_ABI_FLOAT_HARD - ) - - -def _is_linux_i686(executable: str) -> bool: - with _parse_elf(executable) as f: - return ( - f is not None - and f.capacity == EIClass.C32 - and f.encoding == EIData.Lsb - and f.machine == EMachine.I386 - ) - - -def _have_compatible_abi(executable: str, archs: Sequence[str]) -> bool: - if "armv7l" in archs: - return _is_linux_armhf(executable) - if "i686" in archs: - return _is_linux_i686(executable) - allowed_archs = { - "x86_64", - "aarch64", - "ppc64", - "ppc64le", - "s390x", - "loongarch64", - "riscv64", - } - return any(arch in allowed_archs for arch in archs) - - -# If glibc ever changes its major version, we need to know what the last -# minor version was, so we can build the complete list of all versions. -# For now, guess what the highest minor version might be, assume it will -# be 50 for testing. Once this actually happens, update the dictionary -# with the actual value. -_LAST_GLIBC_MINOR: Dict[int, int] = collections.defaultdict(lambda: 50) - - -class _GLibCVersion(NamedTuple): - major: int - minor: int - - -def _glibc_version_string_confstr() -> Optional[str]: - """ - Primary implementation of glibc_version_string using os.confstr. - """ - # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely - # to be broken or missing. This strategy is used in the standard library - # platform module. - # https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183 - try: - # Should be a string like "glibc 2.17". - version_string: Optional[str] = os.confstr("CS_GNU_LIBC_VERSION") - assert version_string is not None - _, version = version_string.rsplit() - except (AssertionError, AttributeError, OSError, ValueError): - # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... - return None - return version - - -def _glibc_version_string_ctypes() -> Optional[str]: - """ - Fallback implementation of glibc_version_string using ctypes. - """ - try: - import ctypes - except ImportError: - return None - - # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen - # manpage says, "If filename is NULL, then the returned handle is for the - # main program". This way we can let the linker do the work to figure out - # which libc our process is actually using. - # - # We must also handle the special case where the executable is not a - # dynamically linked executable. This can occur when using musl libc, - # for example. In this situation, dlopen() will error, leading to an - # OSError. Interestingly, at least in the case of musl, there is no - # errno set on the OSError. The single string argument used to construct - # OSError comes from libc itself and is therefore not portable to - # hard code here. In any case, failure to call dlopen() means we - # can proceed, so we bail on our attempt. - try: - process_namespace = ctypes.CDLL(None) - except OSError: - return None - - try: - gnu_get_libc_version = process_namespace.gnu_get_libc_version - except AttributeError: - # Symbol doesn't exist -> therefore, we are not linked to - # glibc. - return None - - # Call gnu_get_libc_version, which returns a string like "2.5" - gnu_get_libc_version.restype = ctypes.c_char_p - version_str: str = gnu_get_libc_version() - # py2 / py3 compatibility: - if not isinstance(version_str, str): - version_str = version_str.decode("ascii") - - return version_str - - -def _glibc_version_string() -> Optional[str]: - """Returns glibc version string, or None if not using glibc.""" - return _glibc_version_string_confstr() or _glibc_version_string_ctypes() - - -def _parse_glibc_version(version_str: str) -> Tuple[int, int]: - """Parse glibc version. - - We use a regexp instead of str.split because we want to discard any - random junk that might come after the minor version -- this might happen - in patched/forked versions of glibc (e.g. Linaro's version of glibc - uses version strings like "2.20-2014.11"). See gh-3588. - """ - m = re.match(r"(?P[0-9]+)\.(?P[0-9]+)", version_str) - if not m: - warnings.warn( - f"Expected glibc version with 2 components major.minor," - f" got: {version_str}", - RuntimeWarning, - ) - return -1, -1 - return int(m.group("major")), int(m.group("minor")) - - -@functools.lru_cache() -def _get_glibc_version() -> Tuple[int, int]: - version_str = _glibc_version_string() - if version_str is None: - return (-1, -1) - return _parse_glibc_version(version_str) - - -# From PEP 513, PEP 600 -def _is_compatible(arch: str, version: _GLibCVersion) -> bool: - sys_glibc = _get_glibc_version() - if sys_glibc < version: - return False - # Check for presence of _manylinux module. - try: - import _manylinux - except ImportError: - return True - if hasattr(_manylinux, "manylinux_compatible"): - result = _manylinux.manylinux_compatible(version[0], version[1], arch) - if result is not None: - return bool(result) - return True - if version == _GLibCVersion(2, 5): - if hasattr(_manylinux, "manylinux1_compatible"): - return bool(_manylinux.manylinux1_compatible) - if version == _GLibCVersion(2, 12): - if hasattr(_manylinux, "manylinux2010_compatible"): - return bool(_manylinux.manylinux2010_compatible) - if version == _GLibCVersion(2, 17): - if hasattr(_manylinux, "manylinux2014_compatible"): - return bool(_manylinux.manylinux2014_compatible) - return True - - -_LEGACY_MANYLINUX_MAP = { - # CentOS 7 w/ glibc 2.17 (PEP 599) - (2, 17): "manylinux2014", - # CentOS 6 w/ glibc 2.12 (PEP 571) - (2, 12): "manylinux2010", - # CentOS 5 w/ glibc 2.5 (PEP 513) - (2, 5): "manylinux1", -} - - -def platform_tags(archs: Sequence[str]) -> Iterator[str]: - """Generate manylinux tags compatible to the current platform. - - :param archs: Sequence of compatible architectures. - The first one shall be the closest to the actual architecture and be the part of - platform tag after the ``linux_`` prefix, e.g. ``x86_64``. - The ``linux_`` prefix is assumed as a prerequisite for the current platform to - be manylinux-compatible. - - :returns: An iterator of compatible manylinux tags. - """ - if not _have_compatible_abi(sys.executable, archs): - return - # Oldest glibc to be supported regardless of architecture is (2, 17). - too_old_glibc2 = _GLibCVersion(2, 16) - if set(archs) & {"x86_64", "i686"}: - # On x86/i686 also oldest glibc to be supported is (2, 5). - too_old_glibc2 = _GLibCVersion(2, 4) - current_glibc = _GLibCVersion(*_get_glibc_version()) - glibc_max_list = [current_glibc] - # We can assume compatibility across glibc major versions. - # https://sourceware.org/bugzilla/show_bug.cgi?id=24636 - # - # Build a list of maximum glibc versions so that we can - # output the canonical list of all glibc from current_glibc - # down to too_old_glibc2, including all intermediary versions. - for glibc_major in range(current_glibc.major - 1, 1, -1): - glibc_minor = _LAST_GLIBC_MINOR[glibc_major] - glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor)) - for arch in archs: - for glibc_max in glibc_max_list: - if glibc_max.major == too_old_glibc2.major: - min_minor = too_old_glibc2.minor - else: - # For other glibc major versions oldest supported is (x, 0). - min_minor = -1 - for glibc_minor in range(glibc_max.minor, min_minor, -1): - glibc_version = _GLibCVersion(glibc_max.major, glibc_minor) - tag = "manylinux_{}_{}".format(*glibc_version) - if _is_compatible(arch, glibc_version): - yield f"{tag}_{arch}" - # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags. - if glibc_version in _LEGACY_MANYLINUX_MAP: - legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version] - if _is_compatible(arch, glibc_version): - yield f"{legacy_tag}_{arch}" diff --git a/distutils/_vendor/packaging/_musllinux.py b/distutils/_vendor/packaging/_musllinux.py deleted file mode 100644 index 86419df9d7..0000000000 --- a/distutils/_vendor/packaging/_musllinux.py +++ /dev/null @@ -1,83 +0,0 @@ -"""PEP 656 support. - -This module implements logic to detect if the currently running Python is -linked against musl, and what musl version is used. -""" - -import functools -import re -import subprocess -import sys -from typing import Iterator, NamedTuple, Optional, Sequence - -from ._elffile import ELFFile - - -class _MuslVersion(NamedTuple): - major: int - minor: int - - -def _parse_musl_version(output: str) -> Optional[_MuslVersion]: - lines = [n for n in (n.strip() for n in output.splitlines()) if n] - if len(lines) < 2 or lines[0][:4] != "musl": - return None - m = re.match(r"Version (\d+)\.(\d+)", lines[1]) - if not m: - return None - return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2))) - - -@functools.lru_cache() -def _get_musl_version(executable: str) -> Optional[_MuslVersion]: - """Detect currently-running musl runtime version. - - This is done by checking the specified executable's dynamic linking - information, and invoking the loader to parse its output for a version - string. If the loader is musl, the output would be something like:: - - musl libc (x86_64) - Version 1.2.2 - Dynamic Program Loader - """ - try: - with open(executable, "rb") as f: - ld = ELFFile(f).interpreter - except (OSError, TypeError, ValueError): - return None - if ld is None or "musl" not in ld: - return None - proc = subprocess.run([ld], stderr=subprocess.PIPE, text=True) - return _parse_musl_version(proc.stderr) - - -def platform_tags(archs: Sequence[str]) -> Iterator[str]: - """Generate musllinux tags compatible to the current platform. - - :param archs: Sequence of compatible architectures. - The first one shall be the closest to the actual architecture and be the part of - platform tag after the ``linux_`` prefix, e.g. ``x86_64``. - The ``linux_`` prefix is assumed as a prerequisite for the current platform to - be musllinux-compatible. - - :returns: An iterator of compatible musllinux tags. - """ - sys_musl = _get_musl_version(sys.executable) - if sys_musl is None: # Python not dynamically linked against musl. - return - for arch in archs: - for minor in range(sys_musl.minor, -1, -1): - yield f"musllinux_{sys_musl.major}_{minor}_{arch}" - - -if __name__ == "__main__": # pragma: no cover - import sysconfig - - plat = sysconfig.get_platform() - assert plat.startswith("linux-"), "not linux" - - print("plat:", plat) - print("musl:", _get_musl_version(sys.executable)) - print("tags:", end=" ") - for t in platform_tags(re.sub(r"[.-]", "_", plat.split("-", 1)[-1])): - print(t, end="\n ") diff --git a/distutils/_vendor/packaging/_parser.py b/distutils/_vendor/packaging/_parser.py deleted file mode 100644 index 684df75457..0000000000 --- a/distutils/_vendor/packaging/_parser.py +++ /dev/null @@ -1,356 +0,0 @@ -"""Handwritten parser of dependency specifiers. - -The docstring for each __parse_* function contains ENBF-inspired grammar representing -the implementation. -""" - -import ast -from typing import Any, List, NamedTuple, Optional, Tuple, Union - -from ._tokenizer import DEFAULT_RULES, Tokenizer - - -class Node: - def __init__(self, value: str) -> None: - self.value = value - - def __str__(self) -> str: - return self.value - - def __repr__(self) -> str: - return f"<{self.__class__.__name__}('{self}')>" - - def serialize(self) -> str: - raise NotImplementedError - - -class Variable(Node): - def serialize(self) -> str: - return str(self) - - -class Value(Node): - def serialize(self) -> str: - return f'"{self}"' - - -class Op(Node): - def serialize(self) -> str: - return str(self) - - -MarkerVar = Union[Variable, Value] -MarkerItem = Tuple[MarkerVar, Op, MarkerVar] -# MarkerAtom = Union[MarkerItem, List["MarkerAtom"]] -# MarkerList = List[Union["MarkerList", MarkerAtom, str]] -# mypy does not support recursive type definition -# https://github.com/python/mypy/issues/731 -MarkerAtom = Any -MarkerList = List[Any] - - -class ParsedRequirement(NamedTuple): - name: str - url: str - extras: List[str] - specifier: str - marker: Optional[MarkerList] - - -# -------------------------------------------------------------------------------------- -# Recursive descent parser for dependency specifier -# -------------------------------------------------------------------------------------- -def parse_requirement(source: str) -> ParsedRequirement: - return _parse_requirement(Tokenizer(source, rules=DEFAULT_RULES)) - - -def _parse_requirement(tokenizer: Tokenizer) -> ParsedRequirement: - """ - requirement = WS? IDENTIFIER WS? extras WS? requirement_details - """ - tokenizer.consume("WS") - - name_token = tokenizer.expect( - "IDENTIFIER", expected="package name at the start of dependency specifier" - ) - name = name_token.text - tokenizer.consume("WS") - - extras = _parse_extras(tokenizer) - tokenizer.consume("WS") - - url, specifier, marker = _parse_requirement_details(tokenizer) - tokenizer.expect("END", expected="end of dependency specifier") - - return ParsedRequirement(name, url, extras, specifier, marker) - - -def _parse_requirement_details( - tokenizer: Tokenizer, -) -> Tuple[str, str, Optional[MarkerList]]: - """ - requirement_details = AT URL (WS requirement_marker?)? - | specifier WS? (requirement_marker)? - """ - - specifier = "" - url = "" - marker = None - - if tokenizer.check("AT"): - tokenizer.read() - tokenizer.consume("WS") - - url_start = tokenizer.position - url = tokenizer.expect("URL", expected="URL after @").text - if tokenizer.check("END", peek=True): - return (url, specifier, marker) - - tokenizer.expect("WS", expected="whitespace after URL") - - # The input might end after whitespace. - if tokenizer.check("END", peek=True): - return (url, specifier, marker) - - marker = _parse_requirement_marker( - tokenizer, span_start=url_start, after="URL and whitespace" - ) - else: - specifier_start = tokenizer.position - specifier = _parse_specifier(tokenizer) - tokenizer.consume("WS") - - if tokenizer.check("END", peek=True): - return (url, specifier, marker) - - marker = _parse_requirement_marker( - tokenizer, - span_start=specifier_start, - after=( - "version specifier" - if specifier - else "name and no valid version specifier" - ), - ) - - return (url, specifier, marker) - - -def _parse_requirement_marker( - tokenizer: Tokenizer, *, span_start: int, after: str -) -> MarkerList: - """ - requirement_marker = SEMICOLON marker WS? - """ - - if not tokenizer.check("SEMICOLON"): - tokenizer.raise_syntax_error( - f"Expected end or semicolon (after {after})", - span_start=span_start, - ) - tokenizer.read() - - marker = _parse_marker(tokenizer) - tokenizer.consume("WS") - - return marker - - -def _parse_extras(tokenizer: Tokenizer) -> List[str]: - """ - extras = (LEFT_BRACKET wsp* extras_list? wsp* RIGHT_BRACKET)? - """ - if not tokenizer.check("LEFT_BRACKET", peek=True): - return [] - - with tokenizer.enclosing_tokens( - "LEFT_BRACKET", - "RIGHT_BRACKET", - around="extras", - ): - tokenizer.consume("WS") - extras = _parse_extras_list(tokenizer) - tokenizer.consume("WS") - - return extras - - -def _parse_extras_list(tokenizer: Tokenizer) -> List[str]: - """ - extras_list = identifier (wsp* ',' wsp* identifier)* - """ - extras: List[str] = [] - - if not tokenizer.check("IDENTIFIER"): - return extras - - extras.append(tokenizer.read().text) - - while True: - tokenizer.consume("WS") - if tokenizer.check("IDENTIFIER", peek=True): - tokenizer.raise_syntax_error("Expected comma between extra names") - elif not tokenizer.check("COMMA"): - break - - tokenizer.read() - tokenizer.consume("WS") - - extra_token = tokenizer.expect("IDENTIFIER", expected="extra name after comma") - extras.append(extra_token.text) - - return extras - - -def _parse_specifier(tokenizer: Tokenizer) -> str: - """ - specifier = LEFT_PARENTHESIS WS? version_many WS? RIGHT_PARENTHESIS - | WS? version_many WS? - """ - with tokenizer.enclosing_tokens( - "LEFT_PARENTHESIS", - "RIGHT_PARENTHESIS", - around="version specifier", - ): - tokenizer.consume("WS") - parsed_specifiers = _parse_version_many(tokenizer) - tokenizer.consume("WS") - - return parsed_specifiers - - -def _parse_version_many(tokenizer: Tokenizer) -> str: - """ - version_many = (SPECIFIER (WS? COMMA WS? SPECIFIER)*)? - """ - parsed_specifiers = "" - while tokenizer.check("SPECIFIER"): - span_start = tokenizer.position - parsed_specifiers += tokenizer.read().text - if tokenizer.check("VERSION_PREFIX_TRAIL", peek=True): - tokenizer.raise_syntax_error( - ".* suffix can only be used with `==` or `!=` operators", - span_start=span_start, - span_end=tokenizer.position + 1, - ) - if tokenizer.check("VERSION_LOCAL_LABEL_TRAIL", peek=True): - tokenizer.raise_syntax_error( - "Local version label can only be used with `==` or `!=` operators", - span_start=span_start, - span_end=tokenizer.position, - ) - tokenizer.consume("WS") - if not tokenizer.check("COMMA"): - break - parsed_specifiers += tokenizer.read().text - tokenizer.consume("WS") - - return parsed_specifiers - - -# -------------------------------------------------------------------------------------- -# Recursive descent parser for marker expression -# -------------------------------------------------------------------------------------- -def parse_marker(source: str) -> MarkerList: - return _parse_full_marker(Tokenizer(source, rules=DEFAULT_RULES)) - - -def _parse_full_marker(tokenizer: Tokenizer) -> MarkerList: - retval = _parse_marker(tokenizer) - tokenizer.expect("END", expected="end of marker expression") - return retval - - -def _parse_marker(tokenizer: Tokenizer) -> MarkerList: - """ - marker = marker_atom (BOOLOP marker_atom)+ - """ - expression = [_parse_marker_atom(tokenizer)] - while tokenizer.check("BOOLOP"): - token = tokenizer.read() - expr_right = _parse_marker_atom(tokenizer) - expression.extend((token.text, expr_right)) - return expression - - -def _parse_marker_atom(tokenizer: Tokenizer) -> MarkerAtom: - """ - marker_atom = WS? LEFT_PARENTHESIS WS? marker WS? RIGHT_PARENTHESIS WS? - | WS? marker_item WS? - """ - - tokenizer.consume("WS") - if tokenizer.check("LEFT_PARENTHESIS", peek=True): - with tokenizer.enclosing_tokens( - "LEFT_PARENTHESIS", - "RIGHT_PARENTHESIS", - around="marker expression", - ): - tokenizer.consume("WS") - marker: MarkerAtom = _parse_marker(tokenizer) - tokenizer.consume("WS") - else: - marker = _parse_marker_item(tokenizer) - tokenizer.consume("WS") - return marker - - -def _parse_marker_item(tokenizer: Tokenizer) -> MarkerItem: - """ - marker_item = WS? marker_var WS? marker_op WS? marker_var WS? - """ - tokenizer.consume("WS") - marker_var_left = _parse_marker_var(tokenizer) - tokenizer.consume("WS") - marker_op = _parse_marker_op(tokenizer) - tokenizer.consume("WS") - marker_var_right = _parse_marker_var(tokenizer) - tokenizer.consume("WS") - return (marker_var_left, marker_op, marker_var_right) - - -def _parse_marker_var(tokenizer: Tokenizer) -> MarkerVar: - """ - marker_var = VARIABLE | QUOTED_STRING - """ - if tokenizer.check("VARIABLE"): - return process_env_var(tokenizer.read().text.replace(".", "_")) - elif tokenizer.check("QUOTED_STRING"): - return process_python_str(tokenizer.read().text) - else: - tokenizer.raise_syntax_error( - message="Expected a marker variable or quoted string" - ) - - -def process_env_var(env_var: str) -> Variable: - if env_var in ("platform_python_implementation", "python_implementation"): - return Variable("platform_python_implementation") - else: - return Variable(env_var) - - -def process_python_str(python_str: str) -> Value: - value = ast.literal_eval(python_str) - return Value(str(value)) - - -def _parse_marker_op(tokenizer: Tokenizer) -> Op: - """ - marker_op = IN | NOT IN | OP - """ - if tokenizer.check("IN"): - tokenizer.read() - return Op("in") - elif tokenizer.check("NOT"): - tokenizer.read() - tokenizer.expect("WS", expected="whitespace after 'not'") - tokenizer.expect("IN", expected="'in' after 'not'") - return Op("not in") - elif tokenizer.check("OP"): - return Op(tokenizer.read().text) - else: - return tokenizer.raise_syntax_error( - "Expected marker operator, one of " - "<=, <, !=, ==, >=, >, ~=, ===, in, not in" - ) diff --git a/distutils/_vendor/packaging/_structures.py b/distutils/_vendor/packaging/_structures.py deleted file mode 100644 index 90a6465f96..0000000000 --- a/distutils/_vendor/packaging/_structures.py +++ /dev/null @@ -1,61 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - - -class InfinityType: - def __repr__(self) -> str: - return "Infinity" - - def __hash__(self) -> int: - return hash(repr(self)) - - def __lt__(self, other: object) -> bool: - return False - - def __le__(self, other: object) -> bool: - return False - - def __eq__(self, other: object) -> bool: - return isinstance(other, self.__class__) - - def __gt__(self, other: object) -> bool: - return True - - def __ge__(self, other: object) -> bool: - return True - - def __neg__(self: object) -> "NegativeInfinityType": - return NegativeInfinity - - -Infinity = InfinityType() - - -class NegativeInfinityType: - def __repr__(self) -> str: - return "-Infinity" - - def __hash__(self) -> int: - return hash(repr(self)) - - def __lt__(self, other: object) -> bool: - return True - - def __le__(self, other: object) -> bool: - return True - - def __eq__(self, other: object) -> bool: - return isinstance(other, self.__class__) - - def __gt__(self, other: object) -> bool: - return False - - def __ge__(self, other: object) -> bool: - return False - - def __neg__(self: object) -> InfinityType: - return Infinity - - -NegativeInfinity = NegativeInfinityType() diff --git a/distutils/_vendor/packaging/_tokenizer.py b/distutils/_vendor/packaging/_tokenizer.py deleted file mode 100644 index dd0d648d49..0000000000 --- a/distutils/_vendor/packaging/_tokenizer.py +++ /dev/null @@ -1,192 +0,0 @@ -import contextlib -import re -from dataclasses import dataclass -from typing import Dict, Iterator, NoReturn, Optional, Tuple, Union - -from .specifiers import Specifier - - -@dataclass -class Token: - name: str - text: str - position: int - - -class ParserSyntaxError(Exception): - """The provided source text could not be parsed correctly.""" - - def __init__( - self, - message: str, - *, - source: str, - span: Tuple[int, int], - ) -> None: - self.span = span - self.message = message - self.source = source - - super().__init__() - - def __str__(self) -> str: - marker = " " * self.span[0] + "~" * (self.span[1] - self.span[0]) + "^" - return "\n ".join([self.message, self.source, marker]) - - -DEFAULT_RULES: "Dict[str, Union[str, re.Pattern[str]]]" = { - "LEFT_PARENTHESIS": r"\(", - "RIGHT_PARENTHESIS": r"\)", - "LEFT_BRACKET": r"\[", - "RIGHT_BRACKET": r"\]", - "SEMICOLON": r";", - "COMMA": r",", - "QUOTED_STRING": re.compile( - r""" - ( - ('[^']*') - | - ("[^"]*") - ) - """, - re.VERBOSE, - ), - "OP": r"(===|==|~=|!=|<=|>=|<|>)", - "BOOLOP": r"\b(or|and)\b", - "IN": r"\bin\b", - "NOT": r"\bnot\b", - "VARIABLE": re.compile( - r""" - \b( - python_version - |python_full_version - |os[._]name - |sys[._]platform - |platform_(release|system) - |platform[._](version|machine|python_implementation) - |python_implementation - |implementation_(name|version) - |extra - )\b - """, - re.VERBOSE, - ), - "SPECIFIER": re.compile( - Specifier._operator_regex_str + Specifier._version_regex_str, - re.VERBOSE | re.IGNORECASE, - ), - "AT": r"\@", - "URL": r"[^ \t]+", - "IDENTIFIER": r"\b[a-zA-Z0-9][a-zA-Z0-9._-]*\b", - "VERSION_PREFIX_TRAIL": r"\.\*", - "VERSION_LOCAL_LABEL_TRAIL": r"\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*", - "WS": r"[ \t]+", - "END": r"$", -} - - -class Tokenizer: - """Context-sensitive token parsing. - - Provides methods to examine the input stream to check whether the next token - matches. - """ - - def __init__( - self, - source: str, - *, - rules: "Dict[str, Union[str, re.Pattern[str]]]", - ) -> None: - self.source = source - self.rules: Dict[str, re.Pattern[str]] = { - name: re.compile(pattern) for name, pattern in rules.items() - } - self.next_token: Optional[Token] = None - self.position = 0 - - def consume(self, name: str) -> None: - """Move beyond provided token name, if at current position.""" - if self.check(name): - self.read() - - def check(self, name: str, *, peek: bool = False) -> bool: - """Check whether the next token has the provided name. - - By default, if the check succeeds, the token *must* be read before - another check. If `peek` is set to `True`, the token is not loaded and - would need to be checked again. - """ - assert ( - self.next_token is None - ), f"Cannot check for {name!r}, already have {self.next_token!r}" - assert name in self.rules, f"Unknown token name: {name!r}" - - expression = self.rules[name] - - match = expression.match(self.source, self.position) - if match is None: - return False - if not peek: - self.next_token = Token(name, match[0], self.position) - return True - - def expect(self, name: str, *, expected: str) -> Token: - """Expect a certain token name next, failing with a syntax error otherwise. - - The token is *not* read. - """ - if not self.check(name): - raise self.raise_syntax_error(f"Expected {expected}") - return self.read() - - def read(self) -> Token: - """Consume the next token and return it.""" - token = self.next_token - assert token is not None - - self.position += len(token.text) - self.next_token = None - - return token - - def raise_syntax_error( - self, - message: str, - *, - span_start: Optional[int] = None, - span_end: Optional[int] = None, - ) -> NoReturn: - """Raise ParserSyntaxError at the given position.""" - span = ( - self.position if span_start is None else span_start, - self.position if span_end is None else span_end, - ) - raise ParserSyntaxError( - message, - source=self.source, - span=span, - ) - - @contextlib.contextmanager - def enclosing_tokens( - self, open_token: str, close_token: str, *, around: str - ) -> Iterator[None]: - if self.check(open_token): - open_position = self.position - self.read() - else: - open_position = None - - yield - - if open_position is None: - return - - if not self.check(close_token): - self.raise_syntax_error( - f"Expected matching {close_token} for {open_token}, after {around}", - span_start=open_position, - ) - - self.read() diff --git a/distutils/_vendor/packaging/markers.py b/distutils/_vendor/packaging/markers.py deleted file mode 100644 index 8b98fca723..0000000000 --- a/distutils/_vendor/packaging/markers.py +++ /dev/null @@ -1,252 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import operator -import os -import platform -import sys -from typing import Any, Callable, Dict, List, Optional, Tuple, Union - -from ._parser import ( - MarkerAtom, - MarkerList, - Op, - Value, - Variable, - parse_marker as _parse_marker, -) -from ._tokenizer import ParserSyntaxError -from .specifiers import InvalidSpecifier, Specifier -from .utils import canonicalize_name - -__all__ = [ - "InvalidMarker", - "UndefinedComparison", - "UndefinedEnvironmentName", - "Marker", - "default_environment", -] - -Operator = Callable[[str, str], bool] - - -class InvalidMarker(ValueError): - """ - An invalid marker was found, users should refer to PEP 508. - """ - - -class UndefinedComparison(ValueError): - """ - An invalid operation was attempted on a value that doesn't support it. - """ - - -class UndefinedEnvironmentName(ValueError): - """ - A name was attempted to be used that does not exist inside of the - environment. - """ - - -def _normalize_extra_values(results: Any) -> Any: - """ - Normalize extra values. - """ - if isinstance(results[0], tuple): - lhs, op, rhs = results[0] - if isinstance(lhs, Variable) and lhs.value == "extra": - normalized_extra = canonicalize_name(rhs.value) - rhs = Value(normalized_extra) - elif isinstance(rhs, Variable) and rhs.value == "extra": - normalized_extra = canonicalize_name(lhs.value) - lhs = Value(normalized_extra) - results[0] = lhs, op, rhs - return results - - -def _format_marker( - marker: Union[List[str], MarkerAtom, str], first: Optional[bool] = True -) -> str: - - assert isinstance(marker, (list, tuple, str)) - - # Sometimes we have a structure like [[...]] which is a single item list - # where the single item is itself it's own list. In that case we want skip - # the rest of this function so that we don't get extraneous () on the - # outside. - if ( - isinstance(marker, list) - and len(marker) == 1 - and isinstance(marker[0], (list, tuple)) - ): - return _format_marker(marker[0]) - - if isinstance(marker, list): - inner = (_format_marker(m, first=False) for m in marker) - if first: - return " ".join(inner) - else: - return "(" + " ".join(inner) + ")" - elif isinstance(marker, tuple): - return " ".join([m.serialize() for m in marker]) - else: - return marker - - -_operators: Dict[str, Operator] = { - "in": lambda lhs, rhs: lhs in rhs, - "not in": lambda lhs, rhs: lhs not in rhs, - "<": operator.lt, - "<=": operator.le, - "==": operator.eq, - "!=": operator.ne, - ">=": operator.ge, - ">": operator.gt, -} - - -def _eval_op(lhs: str, op: Op, rhs: str) -> bool: - try: - spec = Specifier("".join([op.serialize(), rhs])) - except InvalidSpecifier: - pass - else: - return spec.contains(lhs, prereleases=True) - - oper: Optional[Operator] = _operators.get(op.serialize()) - if oper is None: - raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.") - - return oper(lhs, rhs) - - -def _normalize(*values: str, key: str) -> Tuple[str, ...]: - # PEP 685 – Comparison of extra names for optional distribution dependencies - # https://peps.python.org/pep-0685/ - # > When comparing extra names, tools MUST normalize the names being - # > compared using the semantics outlined in PEP 503 for names - if key == "extra": - return tuple(canonicalize_name(v) for v in values) - - # other environment markers don't have such standards - return values - - -def _evaluate_markers(markers: MarkerList, environment: Dict[str, str]) -> bool: - groups: List[List[bool]] = [[]] - - for marker in markers: - assert isinstance(marker, (list, tuple, str)) - - if isinstance(marker, list): - groups[-1].append(_evaluate_markers(marker, environment)) - elif isinstance(marker, tuple): - lhs, op, rhs = marker - - if isinstance(lhs, Variable): - environment_key = lhs.value - lhs_value = environment[environment_key] - rhs_value = rhs.value - else: - lhs_value = lhs.value - environment_key = rhs.value - rhs_value = environment[environment_key] - - lhs_value, rhs_value = _normalize(lhs_value, rhs_value, key=environment_key) - groups[-1].append(_eval_op(lhs_value, op, rhs_value)) - else: - assert marker in ["and", "or"] - if marker == "or": - groups.append([]) - - return any(all(item) for item in groups) - - -def format_full_version(info: "sys._version_info") -> str: - version = "{0.major}.{0.minor}.{0.micro}".format(info) - kind = info.releaselevel - if kind != "final": - version += kind[0] + str(info.serial) - return version - - -def default_environment() -> Dict[str, str]: - iver = format_full_version(sys.implementation.version) - implementation_name = sys.implementation.name - return { - "implementation_name": implementation_name, - "implementation_version": iver, - "os_name": os.name, - "platform_machine": platform.machine(), - "platform_release": platform.release(), - "platform_system": platform.system(), - "platform_version": platform.version(), - "python_full_version": platform.python_version(), - "platform_python_implementation": platform.python_implementation(), - "python_version": ".".join(platform.python_version_tuple()[:2]), - "sys_platform": sys.platform, - } - - -class Marker: - def __init__(self, marker: str) -> None: - # Note: We create a Marker object without calling this constructor in - # packaging.requirements.Requirement. If any additional logic is - # added here, make sure to mirror/adapt Requirement. - try: - self._markers = _normalize_extra_values(_parse_marker(marker)) - # The attribute `_markers` can be described in terms of a recursive type: - # MarkerList = List[Union[Tuple[Node, ...], str, MarkerList]] - # - # For example, the following expression: - # python_version > "3.6" or (python_version == "3.6" and os_name == "unix") - # - # is parsed into: - # [ - # (, ')>, ), - # 'and', - # [ - # (, , ), - # 'or', - # (, , ) - # ] - # ] - except ParserSyntaxError as e: - raise InvalidMarker(str(e)) from e - - def __str__(self) -> str: - return _format_marker(self._markers) - - def __repr__(self) -> str: - return f"" - - def __hash__(self) -> int: - return hash((self.__class__.__name__, str(self))) - - def __eq__(self, other: Any) -> bool: - if not isinstance(other, Marker): - return NotImplemented - - return str(self) == str(other) - - def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool: - """Evaluate a marker. - - Return the boolean from evaluating the given marker against the - environment. environment is an optional argument to override all or - part of the determined environment. - - The environment is determined from the current Python process. - """ - current_environment = default_environment() - current_environment["extra"] = "" - if environment is not None: - current_environment.update(environment) - # The API used to allow setting extra to None. We need to handle this - # case for backwards compatibility. - if current_environment["extra"] is None: - current_environment["extra"] = "" - - return _evaluate_markers(self._markers, current_environment) diff --git a/distutils/_vendor/packaging/metadata.py b/distutils/_vendor/packaging/metadata.py deleted file mode 100644 index fb27493079..0000000000 --- a/distutils/_vendor/packaging/metadata.py +++ /dev/null @@ -1,825 +0,0 @@ -import email.feedparser -import email.header -import email.message -import email.parser -import email.policy -import sys -import typing -from typing import ( - Any, - Callable, - Dict, - Generic, - List, - Optional, - Tuple, - Type, - Union, - cast, -) - -from . import requirements, specifiers, utils, version as version_module - -T = typing.TypeVar("T") -if sys.version_info[:2] >= (3, 8): # pragma: no cover - from typing import Literal, TypedDict -else: # pragma: no cover - if typing.TYPE_CHECKING: - from typing_extensions import Literal, TypedDict - else: - try: - from typing_extensions import Literal, TypedDict - except ImportError: - - class Literal: - def __init_subclass__(*_args, **_kwargs): - pass - - class TypedDict: - def __init_subclass__(*_args, **_kwargs): - pass - - -try: - ExceptionGroup -except NameError: # pragma: no cover - - class ExceptionGroup(Exception): # noqa: N818 - """A minimal implementation of :external:exc:`ExceptionGroup` from Python 3.11. - - If :external:exc:`ExceptionGroup` is already defined by Python itself, - that version is used instead. - """ - - message: str - exceptions: List[Exception] - - def __init__(self, message: str, exceptions: List[Exception]) -> None: - self.message = message - self.exceptions = exceptions - - def __repr__(self) -> str: - return f"{self.__class__.__name__}({self.message!r}, {self.exceptions!r})" - -else: # pragma: no cover - ExceptionGroup = ExceptionGroup - - -class InvalidMetadata(ValueError): - """A metadata field contains invalid data.""" - - field: str - """The name of the field that contains invalid data.""" - - def __init__(self, field: str, message: str) -> None: - self.field = field - super().__init__(message) - - -# The RawMetadata class attempts to make as few assumptions about the underlying -# serialization formats as possible. The idea is that as long as a serialization -# formats offer some very basic primitives in *some* way then we can support -# serializing to and from that format. -class RawMetadata(TypedDict, total=False): - """A dictionary of raw core metadata. - - Each field in core metadata maps to a key of this dictionary (when data is - provided). The key is lower-case and underscores are used instead of dashes - compared to the equivalent core metadata field. Any core metadata field that - can be specified multiple times or can hold multiple values in a single - field have a key with a plural name. See :class:`Metadata` whose attributes - match the keys of this dictionary. - - Core metadata fields that can be specified multiple times are stored as a - list or dict depending on which is appropriate for the field. Any fields - which hold multiple values in a single field are stored as a list. - - """ - - # Metadata 1.0 - PEP 241 - metadata_version: str - name: str - version: str - platforms: List[str] - summary: str - description: str - keywords: List[str] - home_page: str - author: str - author_email: str - license: str - - # Metadata 1.1 - PEP 314 - supported_platforms: List[str] - download_url: str - classifiers: List[str] - requires: List[str] - provides: List[str] - obsoletes: List[str] - - # Metadata 1.2 - PEP 345 - maintainer: str - maintainer_email: str - requires_dist: List[str] - provides_dist: List[str] - obsoletes_dist: List[str] - requires_python: str - requires_external: List[str] - project_urls: Dict[str, str] - - # Metadata 2.0 - # PEP 426 attempted to completely revamp the metadata format - # but got stuck without ever being able to build consensus on - # it and ultimately ended up withdrawn. - # - # However, a number of tools had started emitting METADATA with - # `2.0` Metadata-Version, so for historical reasons, this version - # was skipped. - - # Metadata 2.1 - PEP 566 - description_content_type: str - provides_extra: List[str] - - # Metadata 2.2 - PEP 643 - dynamic: List[str] - - # Metadata 2.3 - PEP 685 - # No new fields were added in PEP 685, just some edge case were - # tightened up to provide better interoptability. - - -_STRING_FIELDS = { - "author", - "author_email", - "description", - "description_content_type", - "download_url", - "home_page", - "license", - "maintainer", - "maintainer_email", - "metadata_version", - "name", - "requires_python", - "summary", - "version", -} - -_LIST_FIELDS = { - "classifiers", - "dynamic", - "obsoletes", - "obsoletes_dist", - "platforms", - "provides", - "provides_dist", - "provides_extra", - "requires", - "requires_dist", - "requires_external", - "supported_platforms", -} - -_DICT_FIELDS = { - "project_urls", -} - - -def _parse_keywords(data: str) -> List[str]: - """Split a string of comma-separate keyboards into a list of keywords.""" - return [k.strip() for k in data.split(",")] - - -def _parse_project_urls(data: List[str]) -> Dict[str, str]: - """Parse a list of label/URL string pairings separated by a comma.""" - urls = {} - for pair in data: - # Our logic is slightly tricky here as we want to try and do - # *something* reasonable with malformed data. - # - # The main thing that we have to worry about, is data that does - # not have a ',' at all to split the label from the Value. There - # isn't a singular right answer here, and we will fail validation - # later on (if the caller is validating) so it doesn't *really* - # matter, but since the missing value has to be an empty str - # and our return value is dict[str, str], if we let the key - # be the missing value, then they'd have multiple '' values that - # overwrite each other in a accumulating dict. - # - # The other potentional issue is that it's possible to have the - # same label multiple times in the metadata, with no solid "right" - # answer with what to do in that case. As such, we'll do the only - # thing we can, which is treat the field as unparseable and add it - # to our list of unparsed fields. - parts = [p.strip() for p in pair.split(",", 1)] - parts.extend([""] * (max(0, 2 - len(parts)))) # Ensure 2 items - - # TODO: The spec doesn't say anything about if the keys should be - # considered case sensitive or not... logically they should - # be case-preserving and case-insensitive, but doing that - # would open up more cases where we might have duplicate - # entries. - label, url = parts - if label in urls: - # The label already exists in our set of urls, so this field - # is unparseable, and we can just add the whole thing to our - # unparseable data and stop processing it. - raise KeyError("duplicate labels in project urls") - urls[label] = url - - return urls - - -def _get_payload(msg: email.message.Message, source: Union[bytes, str]) -> str: - """Get the body of the message.""" - # If our source is a str, then our caller has managed encodings for us, - # and we don't need to deal with it. - if isinstance(source, str): - payload: str = msg.get_payload() - return payload - # If our source is a bytes, then we're managing the encoding and we need - # to deal with it. - else: - bpayload: bytes = msg.get_payload(decode=True) - try: - return bpayload.decode("utf8", "strict") - except UnicodeDecodeError: - raise ValueError("payload in an invalid encoding") - - -# The various parse_FORMAT functions here are intended to be as lenient as -# possible in their parsing, while still returning a correctly typed -# RawMetadata. -# -# To aid in this, we also generally want to do as little touching of the -# data as possible, except where there are possibly some historic holdovers -# that make valid data awkward to work with. -# -# While this is a lower level, intermediate format than our ``Metadata`` -# class, some light touch ups can make a massive difference in usability. - -# Map METADATA fields to RawMetadata. -_EMAIL_TO_RAW_MAPPING = { - "author": "author", - "author-email": "author_email", - "classifier": "classifiers", - "description": "description", - "description-content-type": "description_content_type", - "download-url": "download_url", - "dynamic": "dynamic", - "home-page": "home_page", - "keywords": "keywords", - "license": "license", - "maintainer": "maintainer", - "maintainer-email": "maintainer_email", - "metadata-version": "metadata_version", - "name": "name", - "obsoletes": "obsoletes", - "obsoletes-dist": "obsoletes_dist", - "platform": "platforms", - "project-url": "project_urls", - "provides": "provides", - "provides-dist": "provides_dist", - "provides-extra": "provides_extra", - "requires": "requires", - "requires-dist": "requires_dist", - "requires-external": "requires_external", - "requires-python": "requires_python", - "summary": "summary", - "supported-platform": "supported_platforms", - "version": "version", -} -_RAW_TO_EMAIL_MAPPING = {raw: email for email, raw in _EMAIL_TO_RAW_MAPPING.items()} - - -def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[str]]]: - """Parse a distribution's metadata stored as email headers (e.g. from ``METADATA``). - - This function returns a two-item tuple of dicts. The first dict is of - recognized fields from the core metadata specification. Fields that can be - parsed and translated into Python's built-in types are converted - appropriately. All other fields are left as-is. Fields that are allowed to - appear multiple times are stored as lists. - - The second dict contains all other fields from the metadata. This includes - any unrecognized fields. It also includes any fields which are expected to - be parsed into a built-in type but were not formatted appropriately. Finally, - any fields that are expected to appear only once but are repeated are - included in this dict. - - """ - raw: Dict[str, Union[str, List[str], Dict[str, str]]] = {} - unparsed: Dict[str, List[str]] = {} - - if isinstance(data, str): - parsed = email.parser.Parser(policy=email.policy.compat32).parsestr(data) - else: - parsed = email.parser.BytesParser(policy=email.policy.compat32).parsebytes(data) - - # We have to wrap parsed.keys() in a set, because in the case of multiple - # values for a key (a list), the key will appear multiple times in the - # list of keys, but we're avoiding that by using get_all(). - for name in frozenset(parsed.keys()): - # Header names in RFC are case insensitive, so we'll normalize to all - # lower case to make comparisons easier. - name = name.lower() - - # We use get_all() here, even for fields that aren't multiple use, - # because otherwise someone could have e.g. two Name fields, and we - # would just silently ignore it rather than doing something about it. - headers = parsed.get_all(name) or [] - - # The way the email module works when parsing bytes is that it - # unconditionally decodes the bytes as ascii using the surrogateescape - # handler. When you pull that data back out (such as with get_all() ), - # it looks to see if the str has any surrogate escapes, and if it does - # it wraps it in a Header object instead of returning the string. - # - # As such, we'll look for those Header objects, and fix up the encoding. - value = [] - # Flag if we have run into any issues processing the headers, thus - # signalling that the data belongs in 'unparsed'. - valid_encoding = True - for h in headers: - # It's unclear if this can return more types than just a Header or - # a str, so we'll just assert here to make sure. - assert isinstance(h, (email.header.Header, str)) - - # If it's a header object, we need to do our little dance to get - # the real data out of it. In cases where there is invalid data - # we're going to end up with mojibake, but there's no obvious, good - # way around that without reimplementing parts of the Header object - # ourselves. - # - # That should be fine since, if mojibacked happens, this key is - # going into the unparsed dict anyways. - if isinstance(h, email.header.Header): - # The Header object stores it's data as chunks, and each chunk - # can be independently encoded, so we'll need to check each - # of them. - chunks: List[Tuple[bytes, Optional[str]]] = [] - for bin, encoding in email.header.decode_header(h): - try: - bin.decode("utf8", "strict") - except UnicodeDecodeError: - # Enable mojibake. - encoding = "latin1" - valid_encoding = False - else: - encoding = "utf8" - chunks.append((bin, encoding)) - - # Turn our chunks back into a Header object, then let that - # Header object do the right thing to turn them into a - # string for us. - value.append(str(email.header.make_header(chunks))) - # This is already a string, so just add it. - else: - value.append(h) - - # We've processed all of our values to get them into a list of str, - # but we may have mojibake data, in which case this is an unparsed - # field. - if not valid_encoding: - unparsed[name] = value - continue - - raw_name = _EMAIL_TO_RAW_MAPPING.get(name) - if raw_name is None: - # This is a bit of a weird situation, we've encountered a key that - # we don't know what it means, so we don't know whether it's meant - # to be a list or not. - # - # Since we can't really tell one way or another, we'll just leave it - # as a list, even though it may be a single item list, because that's - # what makes the most sense for email headers. - unparsed[name] = value - continue - - # If this is one of our string fields, then we'll check to see if our - # value is a list of a single item. If it is then we'll assume that - # it was emitted as a single string, and unwrap the str from inside - # the list. - # - # If it's any other kind of data, then we haven't the faintest clue - # what we should parse it as, and we have to just add it to our list - # of unparsed stuff. - if raw_name in _STRING_FIELDS and len(value) == 1: - raw[raw_name] = value[0] - # If this is one of our list of string fields, then we can just assign - # the value, since email *only* has strings, and our get_all() call - # above ensures that this is a list. - elif raw_name in _LIST_FIELDS: - raw[raw_name] = value - # Special Case: Keywords - # The keywords field is implemented in the metadata spec as a str, - # but it conceptually is a list of strings, and is serialized using - # ", ".join(keywords), so we'll do some light data massaging to turn - # this into what it logically is. - elif raw_name == "keywords" and len(value) == 1: - raw[raw_name] = _parse_keywords(value[0]) - # Special Case: Project-URL - # The project urls is implemented in the metadata spec as a list of - # specially-formatted strings that represent a key and a value, which - # is fundamentally a mapping, however the email format doesn't support - # mappings in a sane way, so it was crammed into a list of strings - # instead. - # - # We will do a little light data massaging to turn this into a map as - # it logically should be. - elif raw_name == "project_urls": - try: - raw[raw_name] = _parse_project_urls(value) - except KeyError: - unparsed[name] = value - # Nothing that we've done has managed to parse this, so it'll just - # throw it in our unparseable data and move on. - else: - unparsed[name] = value - - # We need to support getting the Description from the message payload in - # addition to getting it from the the headers. This does mean, though, there - # is the possibility of it being set both ways, in which case we put both - # in 'unparsed' since we don't know which is right. - try: - payload = _get_payload(parsed, data) - except ValueError: - unparsed.setdefault("description", []).append( - parsed.get_payload(decode=isinstance(data, bytes)) - ) - else: - if payload: - # Check to see if we've already got a description, if so then both - # it, and this body move to unparseable. - if "description" in raw: - description_header = cast(str, raw.pop("description")) - unparsed.setdefault("description", []).extend( - [description_header, payload] - ) - elif "description" in unparsed: - unparsed["description"].append(payload) - else: - raw["description"] = payload - - # We need to cast our `raw` to a metadata, because a TypedDict only support - # literal key names, but we're computing our key names on purpose, but the - # way this function is implemented, our `TypedDict` can only have valid key - # names. - return cast(RawMetadata, raw), unparsed - - -_NOT_FOUND = object() - - -# Keep the two values in sync. -_VALID_METADATA_VERSIONS = ["1.0", "1.1", "1.2", "2.1", "2.2", "2.3"] -_MetadataVersion = Literal["1.0", "1.1", "1.2", "2.1", "2.2", "2.3"] - -_REQUIRED_ATTRS = frozenset(["metadata_version", "name", "version"]) - - -class _Validator(Generic[T]): - """Validate a metadata field. - - All _process_*() methods correspond to a core metadata field. The method is - called with the field's raw value. If the raw value is valid it is returned - in its "enriched" form (e.g. ``version.Version`` for the ``Version`` field). - If the raw value is invalid, :exc:`InvalidMetadata` is raised (with a cause - as appropriate). - """ - - name: str - raw_name: str - added: _MetadataVersion - - def __init__( - self, - *, - added: _MetadataVersion = "1.0", - ) -> None: - self.added = added - - def __set_name__(self, _owner: "Metadata", name: str) -> None: - self.name = name - self.raw_name = _RAW_TO_EMAIL_MAPPING[name] - - def __get__(self, instance: "Metadata", _owner: Type["Metadata"]) -> T: - # With Python 3.8, the caching can be replaced with functools.cached_property(). - # No need to check the cache as attribute lookup will resolve into the - # instance's __dict__ before __get__ is called. - cache = instance.__dict__ - value = instance._raw.get(self.name) - - # To make the _process_* methods easier, we'll check if the value is None - # and if this field is NOT a required attribute, and if both of those - # things are true, we'll skip the the converter. This will mean that the - # converters never have to deal with the None union. - if self.name in _REQUIRED_ATTRS or value is not None: - try: - converter: Callable[[Any], T] = getattr(self, f"_process_{self.name}") - except AttributeError: - pass - else: - value = converter(value) - - cache[self.name] = value - try: - del instance._raw[self.name] # type: ignore[misc] - except KeyError: - pass - - return cast(T, value) - - def _invalid_metadata( - self, msg: str, cause: Optional[Exception] = None - ) -> InvalidMetadata: - exc = InvalidMetadata( - self.raw_name, msg.format_map({"field": repr(self.raw_name)}) - ) - exc.__cause__ = cause - return exc - - def _process_metadata_version(self, value: str) -> _MetadataVersion: - # Implicitly makes Metadata-Version required. - if value not in _VALID_METADATA_VERSIONS: - raise self._invalid_metadata(f"{value!r} is not a valid metadata version") - return cast(_MetadataVersion, value) - - def _process_name(self, value: str) -> str: - if not value: - raise self._invalid_metadata("{field} is a required field") - # Validate the name as a side-effect. - try: - utils.canonicalize_name(value, validate=True) - except utils.InvalidName as exc: - raise self._invalid_metadata( - f"{value!r} is invalid for {{field}}", cause=exc - ) - else: - return value - - def _process_version(self, value: str) -> version_module.Version: - if not value: - raise self._invalid_metadata("{field} is a required field") - try: - return version_module.parse(value) - except version_module.InvalidVersion as exc: - raise self._invalid_metadata( - f"{value!r} is invalid for {{field}}", cause=exc - ) - - def _process_summary(self, value: str) -> str: - """Check the field contains no newlines.""" - if "\n" in value: - raise self._invalid_metadata("{field} must be a single line") - return value - - def _process_description_content_type(self, value: str) -> str: - content_types = {"text/plain", "text/x-rst", "text/markdown"} - message = email.message.EmailMessage() - message["content-type"] = value - - content_type, parameters = ( - # Defaults to `text/plain` if parsing failed. - message.get_content_type().lower(), - message["content-type"].params, - ) - # Check if content-type is valid or defaulted to `text/plain` and thus was - # not parseable. - if content_type not in content_types or content_type not in value.lower(): - raise self._invalid_metadata( - f"{{field}} must be one of {list(content_types)}, not {value!r}" - ) - - charset = parameters.get("charset", "UTF-8") - if charset != "UTF-8": - raise self._invalid_metadata( - f"{{field}} can only specify the UTF-8 charset, not {list(charset)}" - ) - - markdown_variants = {"GFM", "CommonMark"} - variant = parameters.get("variant", "GFM") # Use an acceptable default. - if content_type == "text/markdown" and variant not in markdown_variants: - raise self._invalid_metadata( - f"valid Markdown variants for {{field}} are {list(markdown_variants)}, " - f"not {variant!r}", - ) - return value - - def _process_dynamic(self, value: List[str]) -> List[str]: - for dynamic_field in map(str.lower, value): - if dynamic_field in {"name", "version", "metadata-version"}: - raise self._invalid_metadata( - f"{value!r} is not allowed as a dynamic field" - ) - elif dynamic_field not in _EMAIL_TO_RAW_MAPPING: - raise self._invalid_metadata(f"{value!r} is not a valid dynamic field") - return list(map(str.lower, value)) - - def _process_provides_extra( - self, - value: List[str], - ) -> List[utils.NormalizedName]: - normalized_names = [] - try: - for name in value: - normalized_names.append(utils.canonicalize_name(name, validate=True)) - except utils.InvalidName as exc: - raise self._invalid_metadata( - f"{name!r} is invalid for {{field}}", cause=exc - ) - else: - return normalized_names - - def _process_requires_python(self, value: str) -> specifiers.SpecifierSet: - try: - return specifiers.SpecifierSet(value) - except specifiers.InvalidSpecifier as exc: - raise self._invalid_metadata( - f"{value!r} is invalid for {{field}}", cause=exc - ) - - def _process_requires_dist( - self, - value: List[str], - ) -> List[requirements.Requirement]: - reqs = [] - try: - for req in value: - reqs.append(requirements.Requirement(req)) - except requirements.InvalidRequirement as exc: - raise self._invalid_metadata(f"{req!r} is invalid for {{field}}", cause=exc) - else: - return reqs - - -class Metadata: - """Representation of distribution metadata. - - Compared to :class:`RawMetadata`, this class provides objects representing - metadata fields instead of only using built-in types. Any invalid metadata - will cause :exc:`InvalidMetadata` to be raised (with a - :py:attr:`~BaseException.__cause__` attribute as appropriate). - """ - - _raw: RawMetadata - - @classmethod - def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> "Metadata": - """Create an instance from :class:`RawMetadata`. - - If *validate* is true, all metadata will be validated. All exceptions - related to validation will be gathered and raised as an :class:`ExceptionGroup`. - """ - ins = cls() - ins._raw = data.copy() # Mutations occur due to caching enriched values. - - if validate: - exceptions: List[Exception] = [] - try: - metadata_version = ins.metadata_version - metadata_age = _VALID_METADATA_VERSIONS.index(metadata_version) - except InvalidMetadata as metadata_version_exc: - exceptions.append(metadata_version_exc) - metadata_version = None - - # Make sure to check for the fields that are present, the required - # fields (so their absence can be reported). - fields_to_check = frozenset(ins._raw) | _REQUIRED_ATTRS - # Remove fields that have already been checked. - fields_to_check -= {"metadata_version"} - - for key in fields_to_check: - try: - if metadata_version: - # Can't use getattr() as that triggers descriptor protocol which - # will fail due to no value for the instance argument. - try: - field_metadata_version = cls.__dict__[key].added - except KeyError: - exc = InvalidMetadata(key, f"unrecognized field: {key!r}") - exceptions.append(exc) - continue - field_age = _VALID_METADATA_VERSIONS.index( - field_metadata_version - ) - if field_age > metadata_age: - field = _RAW_TO_EMAIL_MAPPING[key] - exc = InvalidMetadata( - field, - "{field} introduced in metadata version " - "{field_metadata_version}, not {metadata_version}", - ) - exceptions.append(exc) - continue - getattr(ins, key) - except InvalidMetadata as exc: - exceptions.append(exc) - - if exceptions: - raise ExceptionGroup("invalid metadata", exceptions) - - return ins - - @classmethod - def from_email( - cls, data: Union[bytes, str], *, validate: bool = True - ) -> "Metadata": - """Parse metadata from email headers. - - If *validate* is true, the metadata will be validated. All exceptions - related to validation will be gathered and raised as an :class:`ExceptionGroup`. - """ - raw, unparsed = parse_email(data) - - if validate: - exceptions: list[Exception] = [] - for unparsed_key in unparsed: - if unparsed_key in _EMAIL_TO_RAW_MAPPING: - message = f"{unparsed_key!r} has invalid data" - else: - message = f"unrecognized field: {unparsed_key!r}" - exceptions.append(InvalidMetadata(unparsed_key, message)) - - if exceptions: - raise ExceptionGroup("unparsed", exceptions) - - try: - return cls.from_raw(raw, validate=validate) - except ExceptionGroup as exc_group: - raise ExceptionGroup( - "invalid or unparsed metadata", exc_group.exceptions - ) from None - - metadata_version: _Validator[_MetadataVersion] = _Validator() - """:external:ref:`core-metadata-metadata-version` - (required; validated to be a valid metadata version)""" - name: _Validator[str] = _Validator() - """:external:ref:`core-metadata-name` - (required; validated using :func:`~packaging.utils.canonicalize_name` and its - *validate* parameter)""" - version: _Validator[version_module.Version] = _Validator() - """:external:ref:`core-metadata-version` (required)""" - dynamic: _Validator[Optional[List[str]]] = _Validator( - added="2.2", - ) - """:external:ref:`core-metadata-dynamic` - (validated against core metadata field names and lowercased)""" - platforms: _Validator[Optional[List[str]]] = _Validator() - """:external:ref:`core-metadata-platform`""" - supported_platforms: _Validator[Optional[List[str]]] = _Validator(added="1.1") - """:external:ref:`core-metadata-supported-platform`""" - summary: _Validator[Optional[str]] = _Validator() - """:external:ref:`core-metadata-summary` (validated to contain no newlines)""" - description: _Validator[Optional[str]] = _Validator() # TODO 2.1: can be in body - """:external:ref:`core-metadata-description`""" - description_content_type: _Validator[Optional[str]] = _Validator(added="2.1") - """:external:ref:`core-metadata-description-content-type` (validated)""" - keywords: _Validator[Optional[List[str]]] = _Validator() - """:external:ref:`core-metadata-keywords`""" - home_page: _Validator[Optional[str]] = _Validator() - """:external:ref:`core-metadata-home-page`""" - download_url: _Validator[Optional[str]] = _Validator(added="1.1") - """:external:ref:`core-metadata-download-url`""" - author: _Validator[Optional[str]] = _Validator() - """:external:ref:`core-metadata-author`""" - author_email: _Validator[Optional[str]] = _Validator() - """:external:ref:`core-metadata-author-email`""" - maintainer: _Validator[Optional[str]] = _Validator(added="1.2") - """:external:ref:`core-metadata-maintainer`""" - maintainer_email: _Validator[Optional[str]] = _Validator(added="1.2") - """:external:ref:`core-metadata-maintainer-email`""" - license: _Validator[Optional[str]] = _Validator() - """:external:ref:`core-metadata-license`""" - classifiers: _Validator[Optional[List[str]]] = _Validator(added="1.1") - """:external:ref:`core-metadata-classifier`""" - requires_dist: _Validator[Optional[List[requirements.Requirement]]] = _Validator( - added="1.2" - ) - """:external:ref:`core-metadata-requires-dist`""" - requires_python: _Validator[Optional[specifiers.SpecifierSet]] = _Validator( - added="1.2" - ) - """:external:ref:`core-metadata-requires-python`""" - # Because `Requires-External` allows for non-PEP 440 version specifiers, we - # don't do any processing on the values. - requires_external: _Validator[Optional[List[str]]] = _Validator(added="1.2") - """:external:ref:`core-metadata-requires-external`""" - project_urls: _Validator[Optional[Dict[str, str]]] = _Validator(added="1.2") - """:external:ref:`core-metadata-project-url`""" - # PEP 685 lets us raise an error if an extra doesn't pass `Name` validation - # regardless of metadata version. - provides_extra: _Validator[Optional[List[utils.NormalizedName]]] = _Validator( - added="2.1", - ) - """:external:ref:`core-metadata-provides-extra`""" - provides_dist: _Validator[Optional[List[str]]] = _Validator(added="1.2") - """:external:ref:`core-metadata-provides-dist`""" - obsoletes_dist: _Validator[Optional[List[str]]] = _Validator(added="1.2") - """:external:ref:`core-metadata-obsoletes-dist`""" - requires: _Validator[Optional[List[str]]] = _Validator(added="1.1") - """``Requires`` (deprecated)""" - provides: _Validator[Optional[List[str]]] = _Validator(added="1.1") - """``Provides`` (deprecated)""" - obsoletes: _Validator[Optional[List[str]]] = _Validator(added="1.1") - """``Obsoletes`` (deprecated)""" diff --git a/distutils/_vendor/packaging/py.typed b/distutils/_vendor/packaging/py.typed deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/distutils/_vendor/packaging/requirements.py b/distutils/_vendor/packaging/requirements.py deleted file mode 100644 index bdc43a7e98..0000000000 --- a/distutils/_vendor/packaging/requirements.py +++ /dev/null @@ -1,90 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from typing import Any, Iterator, Optional, Set - -from ._parser import parse_requirement as _parse_requirement -from ._tokenizer import ParserSyntaxError -from .markers import Marker, _normalize_extra_values -from .specifiers import SpecifierSet -from .utils import canonicalize_name - - -class InvalidRequirement(ValueError): - """ - An invalid requirement was found, users should refer to PEP 508. - """ - - -class Requirement: - """Parse a requirement. - - Parse a given requirement string into its parts, such as name, specifier, - URL, and extras. Raises InvalidRequirement on a badly-formed requirement - string. - """ - - # TODO: Can we test whether something is contained within a requirement? - # If so how do we do that? Do we need to test against the _name_ of - # the thing as well as the version? What about the markers? - # TODO: Can we normalize the name and extra name? - - def __init__(self, requirement_string: str) -> None: - try: - parsed = _parse_requirement(requirement_string) - except ParserSyntaxError as e: - raise InvalidRequirement(str(e)) from e - - self.name: str = parsed.name - self.url: Optional[str] = parsed.url or None - self.extras: Set[str] = set(parsed.extras or []) - self.specifier: SpecifierSet = SpecifierSet(parsed.specifier) - self.marker: Optional[Marker] = None - if parsed.marker is not None: - self.marker = Marker.__new__(Marker) - self.marker._markers = _normalize_extra_values(parsed.marker) - - def _iter_parts(self, name: str) -> Iterator[str]: - yield name - - if self.extras: - formatted_extras = ",".join(sorted(self.extras)) - yield f"[{formatted_extras}]" - - if self.specifier: - yield str(self.specifier) - - if self.url: - yield f"@ {self.url}" - if self.marker: - yield " " - - if self.marker: - yield f"; {self.marker}" - - def __str__(self) -> str: - return "".join(self._iter_parts(self.name)) - - def __repr__(self) -> str: - return f"" - - def __hash__(self) -> int: - return hash( - ( - self.__class__.__name__, - *self._iter_parts(canonicalize_name(self.name)), - ) - ) - - def __eq__(self, other: Any) -> bool: - if not isinstance(other, Requirement): - return NotImplemented - - return ( - canonicalize_name(self.name) == canonicalize_name(other.name) - and self.extras == other.extras - and self.specifier == other.specifier - and self.url == other.url - and self.marker == other.marker - ) diff --git a/distutils/_vendor/packaging/specifiers.py b/distutils/_vendor/packaging/specifiers.py deleted file mode 100644 index 2d015bab59..0000000000 --- a/distutils/_vendor/packaging/specifiers.py +++ /dev/null @@ -1,1017 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -""" -.. testsetup:: - - from packaging.specifiers import Specifier, SpecifierSet, InvalidSpecifier - from packaging.version import Version -""" - -import abc -import itertools -import re -from typing import Callable, Iterable, Iterator, List, Optional, Tuple, TypeVar, Union - -from .utils import canonicalize_version -from .version import Version - -UnparsedVersion = Union[Version, str] -UnparsedVersionVar = TypeVar("UnparsedVersionVar", bound=UnparsedVersion) -CallableOperator = Callable[[Version, str], bool] - - -def _coerce_version(version: UnparsedVersion) -> Version: - if not isinstance(version, Version): - version = Version(version) - return version - - -class InvalidSpecifier(ValueError): - """ - Raised when attempting to create a :class:`Specifier` with a specifier - string that is invalid. - - >>> Specifier("lolwat") - Traceback (most recent call last): - ... - packaging.specifiers.InvalidSpecifier: Invalid specifier: 'lolwat' - """ - - -class BaseSpecifier(metaclass=abc.ABCMeta): - @abc.abstractmethod - def __str__(self) -> str: - """ - Returns the str representation of this Specifier-like object. This - should be representative of the Specifier itself. - """ - - @abc.abstractmethod - def __hash__(self) -> int: - """ - Returns a hash value for this Specifier-like object. - """ - - @abc.abstractmethod - def __eq__(self, other: object) -> bool: - """ - Returns a boolean representing whether or not the two Specifier-like - objects are equal. - - :param other: The other object to check against. - """ - - @property - @abc.abstractmethod - def prereleases(self) -> Optional[bool]: - """Whether or not pre-releases as a whole are allowed. - - This can be set to either ``True`` or ``False`` to explicitly enable or disable - prereleases or it can be set to ``None`` (the default) to use default semantics. - """ - - @prereleases.setter - def prereleases(self, value: bool) -> None: - """Setter for :attr:`prereleases`. - - :param value: The value to set. - """ - - @abc.abstractmethod - def contains(self, item: str, prereleases: Optional[bool] = None) -> bool: - """ - Determines if the given item is contained within this specifier. - """ - - @abc.abstractmethod - def filter( - self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None - ) -> Iterator[UnparsedVersionVar]: - """ - Takes an iterable of items and filters them so that only items which - are contained within this specifier are allowed in it. - """ - - -class Specifier(BaseSpecifier): - """This class abstracts handling of version specifiers. - - .. tip:: - - It is generally not required to instantiate this manually. You should instead - prefer to work with :class:`SpecifierSet` instead, which can parse - comma-separated version specifiers (which is what package metadata contains). - """ - - _operator_regex_str = r""" - (?P(~=|==|!=|<=|>=|<|>|===)) - """ - _version_regex_str = r""" - (?P - (?: - # The identity operators allow for an escape hatch that will - # do an exact string match of the version you wish to install. - # This will not be parsed by PEP 440 and we cannot determine - # any semantic meaning from it. This operator is discouraged - # but included entirely as an escape hatch. - (?<====) # Only match for the identity operator - \s* - [^\s;)]* # The arbitrary version can be just about anything, - # we match everything except for whitespace, a - # semi-colon for marker support, and a closing paren - # since versions can be enclosed in them. - ) - | - (?: - # The (non)equality operators allow for wild card and local - # versions to be specified so we have to define these two - # operators separately to enable that. - (?<===|!=) # Only match for equals and not equals - - \s* - v? - (?:[0-9]+!)? # epoch - [0-9]+(?:\.[0-9]+)* # release - - # You cannot use a wild card and a pre-release, post-release, a dev or - # local version together so group them with a | and make them optional. - (?: - \.\* # Wild card syntax of .* - | - (?: # pre release - [-_\.]? - (alpha|beta|preview|pre|a|b|c|rc) - [-_\.]? - [0-9]* - )? - (?: # post release - (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) - )? - (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release - (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local - )? - ) - | - (?: - # The compatible operator requires at least two digits in the - # release segment. - (?<=~=) # Only match for the compatible operator - - \s* - v? - (?:[0-9]+!)? # epoch - [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *) - (?: # pre release - [-_\.]? - (alpha|beta|preview|pre|a|b|c|rc) - [-_\.]? - [0-9]* - )? - (?: # post release - (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) - )? - (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release - ) - | - (?: - # All other operators only allow a sub set of what the - # (non)equality operators do. Specifically they do not allow - # local versions to be specified nor do they allow the prefix - # matching wild cards. - (?=": "greater_than_equal", - "<": "less_than", - ">": "greater_than", - "===": "arbitrary", - } - - def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None: - """Initialize a Specifier instance. - - :param spec: - The string representation of a specifier which will be parsed and - normalized before use. - :param prereleases: - This tells the specifier if it should accept prerelease versions if - applicable or not. The default of ``None`` will autodetect it from the - given specifiers. - :raises InvalidSpecifier: - If the given specifier is invalid (i.e. bad syntax). - """ - match = self._regex.search(spec) - if not match: - raise InvalidSpecifier(f"Invalid specifier: '{spec}'") - - self._spec: Tuple[str, str] = ( - match.group("operator").strip(), - match.group("version").strip(), - ) - - # Store whether or not this Specifier should accept prereleases - self._prereleases = prereleases - - # https://github.com/python/mypy/pull/13475#pullrequestreview-1079784515 - @property # type: ignore[override] - def prereleases(self) -> bool: - # If there is an explicit prereleases set for this, then we'll just - # blindly use that. - if self._prereleases is not None: - return self._prereleases - - # Look at all of our specifiers and determine if they are inclusive - # operators, and if they are if they are including an explicit - # prerelease. - operator, version = self._spec - if operator in ["==", ">=", "<=", "~=", "==="]: - # The == specifier can include a trailing .*, if it does we - # want to remove before parsing. - if operator == "==" and version.endswith(".*"): - version = version[:-2] - - # Parse the version, and if it is a pre-release than this - # specifier allows pre-releases. - if Version(version).is_prerelease: - return True - - return False - - @prereleases.setter - def prereleases(self, value: bool) -> None: - self._prereleases = value - - @property - def operator(self) -> str: - """The operator of this specifier. - - >>> Specifier("==1.2.3").operator - '==' - """ - return self._spec[0] - - @property - def version(self) -> str: - """The version of this specifier. - - >>> Specifier("==1.2.3").version - '1.2.3' - """ - return self._spec[1] - - def __repr__(self) -> str: - """A representation of the Specifier that shows all internal state. - - >>> Specifier('>=1.0.0') - =1.0.0')> - >>> Specifier('>=1.0.0', prereleases=False) - =1.0.0', prereleases=False)> - >>> Specifier('>=1.0.0', prereleases=True) - =1.0.0', prereleases=True)> - """ - pre = ( - f", prereleases={self.prereleases!r}" - if self._prereleases is not None - else "" - ) - - return f"<{self.__class__.__name__}({str(self)!r}{pre})>" - - def __str__(self) -> str: - """A string representation of the Specifier that can be round-tripped. - - >>> str(Specifier('>=1.0.0')) - '>=1.0.0' - >>> str(Specifier('>=1.0.0', prereleases=False)) - '>=1.0.0' - """ - return "{}{}".format(*self._spec) - - @property - def _canonical_spec(self) -> Tuple[str, str]: - canonical_version = canonicalize_version( - self._spec[1], - strip_trailing_zero=(self._spec[0] != "~="), - ) - return self._spec[0], canonical_version - - def __hash__(self) -> int: - return hash(self._canonical_spec) - - def __eq__(self, other: object) -> bool: - """Whether or not the two Specifier-like objects are equal. - - :param other: The other object to check against. - - The value of :attr:`prereleases` is ignored. - - >>> Specifier("==1.2.3") == Specifier("== 1.2.3.0") - True - >>> (Specifier("==1.2.3", prereleases=False) == - ... Specifier("==1.2.3", prereleases=True)) - True - >>> Specifier("==1.2.3") == "==1.2.3" - True - >>> Specifier("==1.2.3") == Specifier("==1.2.4") - False - >>> Specifier("==1.2.3") == Specifier("~=1.2.3") - False - """ - if isinstance(other, str): - try: - other = self.__class__(str(other)) - except InvalidSpecifier: - return NotImplemented - elif not isinstance(other, self.__class__): - return NotImplemented - - return self._canonical_spec == other._canonical_spec - - def _get_operator(self, op: str) -> CallableOperator: - operator_callable: CallableOperator = getattr( - self, f"_compare_{self._operators[op]}" - ) - return operator_callable - - def _compare_compatible(self, prospective: Version, spec: str) -> bool: - - # Compatible releases have an equivalent combination of >= and ==. That - # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to - # implement this in terms of the other specifiers instead of - # implementing it ourselves. The only thing we need to do is construct - # the other specifiers. - - # We want everything but the last item in the version, but we want to - # ignore suffix segments. - prefix = _version_join( - list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1] - ) - - # Add the prefix notation to the end of our string - prefix += ".*" - - return self._get_operator(">=")(prospective, spec) and self._get_operator("==")( - prospective, prefix - ) - - def _compare_equal(self, prospective: Version, spec: str) -> bool: - - # We need special logic to handle prefix matching - if spec.endswith(".*"): - # In the case of prefix matching we want to ignore local segment. - normalized_prospective = canonicalize_version( - prospective.public, strip_trailing_zero=False - ) - # Get the normalized version string ignoring the trailing .* - normalized_spec = canonicalize_version(spec[:-2], strip_trailing_zero=False) - # Split the spec out by bangs and dots, and pretend that there is - # an implicit dot in between a release segment and a pre-release segment. - split_spec = _version_split(normalized_spec) - - # Split the prospective version out by bangs and dots, and pretend - # that there is an implicit dot in between a release segment and - # a pre-release segment. - split_prospective = _version_split(normalized_prospective) - - # 0-pad the prospective version before shortening it to get the correct - # shortened version. - padded_prospective, _ = _pad_version(split_prospective, split_spec) - - # Shorten the prospective version to be the same length as the spec - # so that we can determine if the specifier is a prefix of the - # prospective version or not. - shortened_prospective = padded_prospective[: len(split_spec)] - - return shortened_prospective == split_spec - else: - # Convert our spec string into a Version - spec_version = Version(spec) - - # If the specifier does not have a local segment, then we want to - # act as if the prospective version also does not have a local - # segment. - if not spec_version.local: - prospective = Version(prospective.public) - - return prospective == spec_version - - def _compare_not_equal(self, prospective: Version, spec: str) -> bool: - return not self._compare_equal(prospective, spec) - - def _compare_less_than_equal(self, prospective: Version, spec: str) -> bool: - - # NB: Local version identifiers are NOT permitted in the version - # specifier, so local version labels can be universally removed from - # the prospective version. - return Version(prospective.public) <= Version(spec) - - def _compare_greater_than_equal(self, prospective: Version, spec: str) -> bool: - - # NB: Local version identifiers are NOT permitted in the version - # specifier, so local version labels can be universally removed from - # the prospective version. - return Version(prospective.public) >= Version(spec) - - def _compare_less_than(self, prospective: Version, spec_str: str) -> bool: - - # Convert our spec to a Version instance, since we'll want to work with - # it as a version. - spec = Version(spec_str) - - # Check to see if the prospective version is less than the spec - # version. If it's not we can short circuit and just return False now - # instead of doing extra unneeded work. - if not prospective < spec: - return False - - # This special case is here so that, unless the specifier itself - # includes is a pre-release version, that we do not accept pre-release - # versions for the version mentioned in the specifier (e.g. <3.1 should - # not match 3.1.dev0, but should match 3.0.dev0). - if not spec.is_prerelease and prospective.is_prerelease: - if Version(prospective.base_version) == Version(spec.base_version): - return False - - # If we've gotten to here, it means that prospective version is both - # less than the spec version *and* it's not a pre-release of the same - # version in the spec. - return True - - def _compare_greater_than(self, prospective: Version, spec_str: str) -> bool: - - # Convert our spec to a Version instance, since we'll want to work with - # it as a version. - spec = Version(spec_str) - - # Check to see if the prospective version is greater than the spec - # version. If it's not we can short circuit and just return False now - # instead of doing extra unneeded work. - if not prospective > spec: - return False - - # This special case is here so that, unless the specifier itself - # includes is a post-release version, that we do not accept - # post-release versions for the version mentioned in the specifier - # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0). - if not spec.is_postrelease and prospective.is_postrelease: - if Version(prospective.base_version) == Version(spec.base_version): - return False - - # Ensure that we do not allow a local version of the version mentioned - # in the specifier, which is technically greater than, to match. - if prospective.local is not None: - if Version(prospective.base_version) == Version(spec.base_version): - return False - - # If we've gotten to here, it means that prospective version is both - # greater than the spec version *and* it's not a pre-release of the - # same version in the spec. - return True - - def _compare_arbitrary(self, prospective: Version, spec: str) -> bool: - return str(prospective).lower() == str(spec).lower() - - def __contains__(self, item: Union[str, Version]) -> bool: - """Return whether or not the item is contained in this specifier. - - :param item: The item to check for. - - This is used for the ``in`` operator and behaves the same as - :meth:`contains` with no ``prereleases`` argument passed. - - >>> "1.2.3" in Specifier(">=1.2.3") - True - >>> Version("1.2.3") in Specifier(">=1.2.3") - True - >>> "1.0.0" in Specifier(">=1.2.3") - False - >>> "1.3.0a1" in Specifier(">=1.2.3") - False - >>> "1.3.0a1" in Specifier(">=1.2.3", prereleases=True) - True - """ - return self.contains(item) - - def contains( - self, item: UnparsedVersion, prereleases: Optional[bool] = None - ) -> bool: - """Return whether or not the item is contained in this specifier. - - :param item: - The item to check for, which can be a version string or a - :class:`Version` instance. - :param prereleases: - Whether or not to match prereleases with this Specifier. If set to - ``None`` (the default), it uses :attr:`prereleases` to determine - whether or not prereleases are allowed. - - >>> Specifier(">=1.2.3").contains("1.2.3") - True - >>> Specifier(">=1.2.3").contains(Version("1.2.3")) - True - >>> Specifier(">=1.2.3").contains("1.0.0") - False - >>> Specifier(">=1.2.3").contains("1.3.0a1") - False - >>> Specifier(">=1.2.3", prereleases=True).contains("1.3.0a1") - True - >>> Specifier(">=1.2.3").contains("1.3.0a1", prereleases=True) - True - """ - - # Determine if prereleases are to be allowed or not. - if prereleases is None: - prereleases = self.prereleases - - # Normalize item to a Version, this allows us to have a shortcut for - # "2.0" in Specifier(">=2") - normalized_item = _coerce_version(item) - - # Determine if we should be supporting prereleases in this specifier - # or not, if we do not support prereleases than we can short circuit - # logic if this version is a prereleases. - if normalized_item.is_prerelease and not prereleases: - return False - - # Actually do the comparison to determine if this item is contained - # within this Specifier or not. - operator_callable: CallableOperator = self._get_operator(self.operator) - return operator_callable(normalized_item, self.version) - - def filter( - self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None - ) -> Iterator[UnparsedVersionVar]: - """Filter items in the given iterable, that match the specifier. - - :param iterable: - An iterable that can contain version strings and :class:`Version` instances. - The items in the iterable will be filtered according to the specifier. - :param prereleases: - Whether or not to allow prereleases in the returned iterator. If set to - ``None`` (the default), it will be intelligently decide whether to allow - prereleases or not (based on the :attr:`prereleases` attribute, and - whether the only versions matching are prereleases). - - This method is smarter than just ``filter(Specifier().contains, [...])`` - because it implements the rule from :pep:`440` that a prerelease item - SHOULD be accepted if no other versions match the given specifier. - - >>> list(Specifier(">=1.2.3").filter(["1.2", "1.3", "1.5a1"])) - ['1.3'] - >>> list(Specifier(">=1.2.3").filter(["1.2", "1.2.3", "1.3", Version("1.4")])) - ['1.2.3', '1.3', ] - >>> list(Specifier(">=1.2.3").filter(["1.2", "1.5a1"])) - ['1.5a1'] - >>> list(Specifier(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True)) - ['1.3', '1.5a1'] - >>> list(Specifier(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"])) - ['1.3', '1.5a1'] - """ - - yielded = False - found_prereleases = [] - - kw = {"prereleases": prereleases if prereleases is not None else True} - - # Attempt to iterate over all the values in the iterable and if any of - # them match, yield them. - for version in iterable: - parsed_version = _coerce_version(version) - - if self.contains(parsed_version, **kw): - # If our version is a prerelease, and we were not set to allow - # prereleases, then we'll store it for later in case nothing - # else matches this specifier. - if parsed_version.is_prerelease and not ( - prereleases or self.prereleases - ): - found_prereleases.append(version) - # Either this is not a prerelease, or we should have been - # accepting prereleases from the beginning. - else: - yielded = True - yield version - - # Now that we've iterated over everything, determine if we've yielded - # any values, and if we have not and we have any prereleases stored up - # then we will go ahead and yield the prereleases. - if not yielded and found_prereleases: - for version in found_prereleases: - yield version - - -_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$") - - -def _version_split(version: str) -> List[str]: - """Split version into components. - - The split components are intended for version comparison. The logic does - not attempt to retain the original version string, so joining the - components back with :func:`_version_join` may not produce the original - version string. - """ - result: List[str] = [] - - epoch, _, rest = version.rpartition("!") - result.append(epoch or "0") - - for item in rest.split("."): - match = _prefix_regex.search(item) - if match: - result.extend(match.groups()) - else: - result.append(item) - return result - - -def _version_join(components: List[str]) -> str: - """Join split version components into a version string. - - This function assumes the input came from :func:`_version_split`, where the - first component must be the epoch (either empty or numeric), and all other - components numeric. - """ - epoch, *rest = components - return f"{epoch}!{'.'.join(rest)}" - - -def _is_not_suffix(segment: str) -> bool: - return not any( - segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post") - ) - - -def _pad_version(left: List[str], right: List[str]) -> Tuple[List[str], List[str]]: - left_split, right_split = [], [] - - # Get the release segment of our versions - left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left))) - right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right))) - - # Get the rest of our versions - left_split.append(left[len(left_split[0]) :]) - right_split.append(right[len(right_split[0]) :]) - - # Insert our padding - left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0]))) - right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0]))) - - return ( - list(itertools.chain.from_iterable(left_split)), - list(itertools.chain.from_iterable(right_split)), - ) - - -class SpecifierSet(BaseSpecifier): - """This class abstracts handling of a set of version specifiers. - - It can be passed a single specifier (``>=3.0``), a comma-separated list of - specifiers (``>=3.0,!=3.1``), or no specifier at all. - """ - - def __init__( - self, specifiers: str = "", prereleases: Optional[bool] = None - ) -> None: - """Initialize a SpecifierSet instance. - - :param specifiers: - The string representation of a specifier or a comma-separated list of - specifiers which will be parsed and normalized before use. - :param prereleases: - This tells the SpecifierSet if it should accept prerelease versions if - applicable or not. The default of ``None`` will autodetect it from the - given specifiers. - - :raises InvalidSpecifier: - If the given ``specifiers`` are not parseable than this exception will be - raised. - """ - - # Split on `,` to break each individual specifier into it's own item, and - # strip each item to remove leading/trailing whitespace. - split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] - - # Make each individual specifier a Specifier and save in a frozen set for later. - self._specs = frozenset(map(Specifier, split_specifiers)) - - # Store our prereleases value so we can use it later to determine if - # we accept prereleases or not. - self._prereleases = prereleases - - @property - def prereleases(self) -> Optional[bool]: - # If we have been given an explicit prerelease modifier, then we'll - # pass that through here. - if self._prereleases is not None: - return self._prereleases - - # If we don't have any specifiers, and we don't have a forced value, - # then we'll just return None since we don't know if this should have - # pre-releases or not. - if not self._specs: - return None - - # Otherwise we'll see if any of the given specifiers accept - # prereleases, if any of them do we'll return True, otherwise False. - return any(s.prereleases for s in self._specs) - - @prereleases.setter - def prereleases(self, value: bool) -> None: - self._prereleases = value - - def __repr__(self) -> str: - """A representation of the specifier set that shows all internal state. - - Note that the ordering of the individual specifiers within the set may not - match the input string. - - >>> SpecifierSet('>=1.0.0,!=2.0.0') - =1.0.0')> - >>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=False) - =1.0.0', prereleases=False)> - >>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=True) - =1.0.0', prereleases=True)> - """ - pre = ( - f", prereleases={self.prereleases!r}" - if self._prereleases is not None - else "" - ) - - return f"" - - def __str__(self) -> str: - """A string representation of the specifier set that can be round-tripped. - - Note that the ordering of the individual specifiers within the set may not - match the input string. - - >>> str(SpecifierSet(">=1.0.0,!=1.0.1")) - '!=1.0.1,>=1.0.0' - >>> str(SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False)) - '!=1.0.1,>=1.0.0' - """ - return ",".join(sorted(str(s) for s in self._specs)) - - def __hash__(self) -> int: - return hash(self._specs) - - def __and__(self, other: Union["SpecifierSet", str]) -> "SpecifierSet": - """Return a SpecifierSet which is a combination of the two sets. - - :param other: The other object to combine with. - - >>> SpecifierSet(">=1.0.0,!=1.0.1") & '<=2.0.0,!=2.0.1' - =1.0.0')> - >>> SpecifierSet(">=1.0.0,!=1.0.1") & SpecifierSet('<=2.0.0,!=2.0.1') - =1.0.0')> - """ - if isinstance(other, str): - other = SpecifierSet(other) - elif not isinstance(other, SpecifierSet): - return NotImplemented - - specifier = SpecifierSet() - specifier._specs = frozenset(self._specs | other._specs) - - if self._prereleases is None and other._prereleases is not None: - specifier._prereleases = other._prereleases - elif self._prereleases is not None and other._prereleases is None: - specifier._prereleases = self._prereleases - elif self._prereleases == other._prereleases: - specifier._prereleases = self._prereleases - else: - raise ValueError( - "Cannot combine SpecifierSets with True and False prerelease " - "overrides." - ) - - return specifier - - def __eq__(self, other: object) -> bool: - """Whether or not the two SpecifierSet-like objects are equal. - - :param other: The other object to check against. - - The value of :attr:`prereleases` is ignored. - - >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.1") - True - >>> (SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False) == - ... SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True)) - True - >>> SpecifierSet(">=1.0.0,!=1.0.1") == ">=1.0.0,!=1.0.1" - True - >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0") - False - >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.2") - False - """ - if isinstance(other, (str, Specifier)): - other = SpecifierSet(str(other)) - elif not isinstance(other, SpecifierSet): - return NotImplemented - - return self._specs == other._specs - - def __len__(self) -> int: - """Returns the number of specifiers in this specifier set.""" - return len(self._specs) - - def __iter__(self) -> Iterator[Specifier]: - """ - Returns an iterator over all the underlying :class:`Specifier` instances - in this specifier set. - - >>> sorted(SpecifierSet(">=1.0.0,!=1.0.1"), key=str) - [, =1.0.0')>] - """ - return iter(self._specs) - - def __contains__(self, item: UnparsedVersion) -> bool: - """Return whether or not the item is contained in this specifier. - - :param item: The item to check for. - - This is used for the ``in`` operator and behaves the same as - :meth:`contains` with no ``prereleases`` argument passed. - - >>> "1.2.3" in SpecifierSet(">=1.0.0,!=1.0.1") - True - >>> Version("1.2.3") in SpecifierSet(">=1.0.0,!=1.0.1") - True - >>> "1.0.1" in SpecifierSet(">=1.0.0,!=1.0.1") - False - >>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1") - False - >>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True) - True - """ - return self.contains(item) - - def contains( - self, - item: UnparsedVersion, - prereleases: Optional[bool] = None, - installed: Optional[bool] = None, - ) -> bool: - """Return whether or not the item is contained in this SpecifierSet. - - :param item: - The item to check for, which can be a version string or a - :class:`Version` instance. - :param prereleases: - Whether or not to match prereleases with this SpecifierSet. If set to - ``None`` (the default), it uses :attr:`prereleases` to determine - whether or not prereleases are allowed. - - >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.2.3") - True - >>> SpecifierSet(">=1.0.0,!=1.0.1").contains(Version("1.2.3")) - True - >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.0.1") - False - >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1") - False - >>> SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True).contains("1.3.0a1") - True - >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1", prereleases=True) - True - """ - # Ensure that our item is a Version instance. - if not isinstance(item, Version): - item = Version(item) - - # Determine if we're forcing a prerelease or not, if we're not forcing - # one for this particular filter call, then we'll use whatever the - # SpecifierSet thinks for whether or not we should support prereleases. - if prereleases is None: - prereleases = self.prereleases - - # We can determine if we're going to allow pre-releases by looking to - # see if any of the underlying items supports them. If none of them do - # and this item is a pre-release then we do not allow it and we can - # short circuit that here. - # Note: This means that 1.0.dev1 would not be contained in something - # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0 - if not prereleases and item.is_prerelease: - return False - - if installed and item.is_prerelease: - item = Version(item.base_version) - - # We simply dispatch to the underlying specs here to make sure that the - # given version is contained within all of them. - # Note: This use of all() here means that an empty set of specifiers - # will always return True, this is an explicit design decision. - return all(s.contains(item, prereleases=prereleases) for s in self._specs) - - def filter( - self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None - ) -> Iterator[UnparsedVersionVar]: - """Filter items in the given iterable, that match the specifiers in this set. - - :param iterable: - An iterable that can contain version strings and :class:`Version` instances. - The items in the iterable will be filtered according to the specifier. - :param prereleases: - Whether or not to allow prereleases in the returned iterator. If set to - ``None`` (the default), it will be intelligently decide whether to allow - prereleases or not (based on the :attr:`prereleases` attribute, and - whether the only versions matching are prereleases). - - This method is smarter than just ``filter(SpecifierSet(...).contains, [...])`` - because it implements the rule from :pep:`440` that a prerelease item - SHOULD be accepted if no other versions match the given specifier. - - >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", "1.5a1"])) - ['1.3'] - >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", Version("1.4")])) - ['1.3', ] - >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.5a1"])) - [] - >>> list(SpecifierSet(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True)) - ['1.3', '1.5a1'] - >>> list(SpecifierSet(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"])) - ['1.3', '1.5a1'] - - An "empty" SpecifierSet will filter items based on the presence of prerelease - versions in the set. - - >>> list(SpecifierSet("").filter(["1.3", "1.5a1"])) - ['1.3'] - >>> list(SpecifierSet("").filter(["1.5a1"])) - ['1.5a1'] - >>> list(SpecifierSet("", prereleases=True).filter(["1.3", "1.5a1"])) - ['1.3', '1.5a1'] - >>> list(SpecifierSet("").filter(["1.3", "1.5a1"], prereleases=True)) - ['1.3', '1.5a1'] - """ - # Determine if we're forcing a prerelease or not, if we're not forcing - # one for this particular filter call, then we'll use whatever the - # SpecifierSet thinks for whether or not we should support prereleases. - if prereleases is None: - prereleases = self.prereleases - - # If we have any specifiers, then we want to wrap our iterable in the - # filter method for each one, this will act as a logical AND amongst - # each specifier. - if self._specs: - for spec in self._specs: - iterable = spec.filter(iterable, prereleases=bool(prereleases)) - return iter(iterable) - # If we do not have any specifiers, then we need to have a rough filter - # which will filter out any pre-releases, unless there are no final - # releases. - else: - filtered: List[UnparsedVersionVar] = [] - found_prereleases: List[UnparsedVersionVar] = [] - - for item in iterable: - parsed_version = _coerce_version(item) - - # Store any item which is a pre-release for later unless we've - # already found a final version or we are accepting prereleases - if parsed_version.is_prerelease and not prereleases: - if not filtered: - found_prereleases.append(item) - else: - filtered.append(item) - - # If we've found no items except for pre-releases, then we'll go - # ahead and use the pre-releases - if not filtered and found_prereleases and prereleases is None: - return iter(found_prereleases) - - return iter(filtered) diff --git a/distutils/_vendor/packaging/tags.py b/distutils/_vendor/packaging/tags.py deleted file mode 100644 index 89f1926137..0000000000 --- a/distutils/_vendor/packaging/tags.py +++ /dev/null @@ -1,571 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import logging -import platform -import re -import struct -import subprocess -import sys -import sysconfig -from importlib.machinery import EXTENSION_SUFFIXES -from typing import ( - Dict, - FrozenSet, - Iterable, - Iterator, - List, - Optional, - Sequence, - Tuple, - Union, - cast, -) - -from . import _manylinux, _musllinux - -logger = logging.getLogger(__name__) - -PythonVersion = Sequence[int] -MacVersion = Tuple[int, int] - -INTERPRETER_SHORT_NAMES: Dict[str, str] = { - "python": "py", # Generic. - "cpython": "cp", - "pypy": "pp", - "ironpython": "ip", - "jython": "jy", -} - - -_32_BIT_INTERPRETER = struct.calcsize("P") == 4 - - -class Tag: - """ - A representation of the tag triple for a wheel. - - Instances are considered immutable and thus are hashable. Equality checking - is also supported. - """ - - __slots__ = ["_interpreter", "_abi", "_platform", "_hash"] - - def __init__(self, interpreter: str, abi: str, platform: str) -> None: - self._interpreter = interpreter.lower() - self._abi = abi.lower() - self._platform = platform.lower() - # The __hash__ of every single element in a Set[Tag] will be evaluated each time - # that a set calls its `.disjoint()` method, which may be called hundreds of - # times when scanning a page of links for packages with tags matching that - # Set[Tag]. Pre-computing the value here produces significant speedups for - # downstream consumers. - self._hash = hash((self._interpreter, self._abi, self._platform)) - - @property - def interpreter(self) -> str: - return self._interpreter - - @property - def abi(self) -> str: - return self._abi - - @property - def platform(self) -> str: - return self._platform - - def __eq__(self, other: object) -> bool: - if not isinstance(other, Tag): - return NotImplemented - - return ( - (self._hash == other._hash) # Short-circuit ASAP for perf reasons. - and (self._platform == other._platform) - and (self._abi == other._abi) - and (self._interpreter == other._interpreter) - ) - - def __hash__(self) -> int: - return self._hash - - def __str__(self) -> str: - return f"{self._interpreter}-{self._abi}-{self._platform}" - - def __repr__(self) -> str: - return f"<{self} @ {id(self)}>" - - -def parse_tag(tag: str) -> FrozenSet[Tag]: - """ - Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances. - - Returning a set is required due to the possibility that the tag is a - compressed tag set. - """ - tags = set() - interpreters, abis, platforms = tag.split("-") - for interpreter in interpreters.split("."): - for abi in abis.split("."): - for platform_ in platforms.split("."): - tags.add(Tag(interpreter, abi, platform_)) - return frozenset(tags) - - -def _get_config_var(name: str, warn: bool = False) -> Union[int, str, None]: - value: Union[int, str, None] = sysconfig.get_config_var(name) - if value is None and warn: - logger.debug( - "Config variable '%s' is unset, Python ABI tag may be incorrect", name - ) - return value - - -def _normalize_string(string: str) -> str: - return string.replace(".", "_").replace("-", "_").replace(" ", "_") - - -def _is_threaded_cpython(abis: List[str]) -> bool: - """ - Determine if the ABI corresponds to a threaded (`--disable-gil`) build. - - The threaded builds are indicated by a "t" in the abiflags. - """ - if len(abis) == 0: - return False - # expect e.g., cp313 - m = re.match(r"cp\d+(.*)", abis[0]) - if not m: - return False - abiflags = m.group(1) - return "t" in abiflags - - -def _abi3_applies(python_version: PythonVersion, threading: bool) -> bool: - """ - Determine if the Python version supports abi3. - - PEP 384 was first implemented in Python 3.2. The threaded (`--disable-gil`) - builds do not support abi3. - """ - return len(python_version) > 1 and tuple(python_version) >= (3, 2) and not threading - - -def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]: - py_version = tuple(py_version) # To allow for version comparison. - abis = [] - version = _version_nodot(py_version[:2]) - threading = debug = pymalloc = ucs4 = "" - with_debug = _get_config_var("Py_DEBUG", warn) - has_refcount = hasattr(sys, "gettotalrefcount") - # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled - # extension modules is the best option. - # https://github.com/pypa/pip/issues/3383#issuecomment-173267692 - has_ext = "_d.pyd" in EXTENSION_SUFFIXES - if with_debug or (with_debug is None and (has_refcount or has_ext)): - debug = "d" - if py_version >= (3, 13) and _get_config_var("Py_GIL_DISABLED", warn): - threading = "t" - if py_version < (3, 8): - with_pymalloc = _get_config_var("WITH_PYMALLOC", warn) - if with_pymalloc or with_pymalloc is None: - pymalloc = "m" - if py_version < (3, 3): - unicode_size = _get_config_var("Py_UNICODE_SIZE", warn) - if unicode_size == 4 or ( - unicode_size is None and sys.maxunicode == 0x10FFFF - ): - ucs4 = "u" - elif debug: - # Debug builds can also load "normal" extension modules. - # We can also assume no UCS-4 or pymalloc requirement. - abis.append(f"cp{version}{threading}") - abis.insert(0, f"cp{version}{threading}{debug}{pymalloc}{ucs4}") - return abis - - -def cpython_tags( - python_version: Optional[PythonVersion] = None, - abis: Optional[Iterable[str]] = None, - platforms: Optional[Iterable[str]] = None, - *, - warn: bool = False, -) -> Iterator[Tag]: - """ - Yields the tags for a CPython interpreter. - - The tags consist of: - - cp-- - - cp-abi3- - - cp-none- - - cp-abi3- # Older Python versions down to 3.2. - - If python_version only specifies a major version then user-provided ABIs and - the 'none' ABItag will be used. - - If 'abi3' or 'none' are specified in 'abis' then they will be yielded at - their normal position and not at the beginning. - """ - if not python_version: - python_version = sys.version_info[:2] - - interpreter = f"cp{_version_nodot(python_version[:2])}" - - if abis is None: - if len(python_version) > 1: - abis = _cpython_abis(python_version, warn) - else: - abis = [] - abis = list(abis) - # 'abi3' and 'none' are explicitly handled later. - for explicit_abi in ("abi3", "none"): - try: - abis.remove(explicit_abi) - except ValueError: - pass - - platforms = list(platforms or platform_tags()) - for abi in abis: - for platform_ in platforms: - yield Tag(interpreter, abi, platform_) - - threading = _is_threaded_cpython(abis) - use_abi3 = _abi3_applies(python_version, threading) - if use_abi3: - yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms) - yield from (Tag(interpreter, "none", platform_) for platform_ in platforms) - - if use_abi3: - for minor_version in range(python_version[1] - 1, 1, -1): - for platform_ in platforms: - interpreter = "cp{version}".format( - version=_version_nodot((python_version[0], minor_version)) - ) - yield Tag(interpreter, "abi3", platform_) - - -def _generic_abi() -> List[str]: - """ - Return the ABI tag based on EXT_SUFFIX. - """ - # The following are examples of `EXT_SUFFIX`. - # We want to keep the parts which are related to the ABI and remove the - # parts which are related to the platform: - # - linux: '.cpython-310-x86_64-linux-gnu.so' => cp310 - # - mac: '.cpython-310-darwin.so' => cp310 - # - win: '.cp310-win_amd64.pyd' => cp310 - # - win: '.pyd' => cp37 (uses _cpython_abis()) - # - pypy: '.pypy38-pp73-x86_64-linux-gnu.so' => pypy38_pp73 - # - graalpy: '.graalpy-38-native-x86_64-darwin.dylib' - # => graalpy_38_native - - ext_suffix = _get_config_var("EXT_SUFFIX", warn=True) - if not isinstance(ext_suffix, str) or ext_suffix[0] != ".": - raise SystemError("invalid sysconfig.get_config_var('EXT_SUFFIX')") - parts = ext_suffix.split(".") - if len(parts) < 3: - # CPython3.7 and earlier uses ".pyd" on Windows. - return _cpython_abis(sys.version_info[:2]) - soabi = parts[1] - if soabi.startswith("cpython"): - # non-windows - abi = "cp" + soabi.split("-")[1] - elif soabi.startswith("cp"): - # windows - abi = soabi.split("-")[0] - elif soabi.startswith("pypy"): - abi = "-".join(soabi.split("-")[:2]) - elif soabi.startswith("graalpy"): - abi = "-".join(soabi.split("-")[:3]) - elif soabi: - # pyston, ironpython, others? - abi = soabi - else: - return [] - return [_normalize_string(abi)] - - -def generic_tags( - interpreter: Optional[str] = None, - abis: Optional[Iterable[str]] = None, - platforms: Optional[Iterable[str]] = None, - *, - warn: bool = False, -) -> Iterator[Tag]: - """ - Yields the tags for a generic interpreter. - - The tags consist of: - - -- - - The "none" ABI will be added if it was not explicitly provided. - """ - if not interpreter: - interp_name = interpreter_name() - interp_version = interpreter_version(warn=warn) - interpreter = "".join([interp_name, interp_version]) - if abis is None: - abis = _generic_abi() - else: - abis = list(abis) - platforms = list(platforms or platform_tags()) - if "none" not in abis: - abis.append("none") - for abi in abis: - for platform_ in platforms: - yield Tag(interpreter, abi, platform_) - - -def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]: - """ - Yields Python versions in descending order. - - After the latest version, the major-only version will be yielded, and then - all previous versions of that major version. - """ - if len(py_version) > 1: - yield f"py{_version_nodot(py_version[:2])}" - yield f"py{py_version[0]}" - if len(py_version) > 1: - for minor in range(py_version[1] - 1, -1, -1): - yield f"py{_version_nodot((py_version[0], minor))}" - - -def compatible_tags( - python_version: Optional[PythonVersion] = None, - interpreter: Optional[str] = None, - platforms: Optional[Iterable[str]] = None, -) -> Iterator[Tag]: - """ - Yields the sequence of tags that are compatible with a specific version of Python. - - The tags consist of: - - py*-none- - - -none-any # ... if `interpreter` is provided. - - py*-none-any - """ - if not python_version: - python_version = sys.version_info[:2] - platforms = list(platforms or platform_tags()) - for version in _py_interpreter_range(python_version): - for platform_ in platforms: - yield Tag(version, "none", platform_) - if interpreter: - yield Tag(interpreter, "none", "any") - for version in _py_interpreter_range(python_version): - yield Tag(version, "none", "any") - - -def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str: - if not is_32bit: - return arch - - if arch.startswith("ppc"): - return "ppc" - - return "i386" - - -def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> List[str]: - formats = [cpu_arch] - if cpu_arch == "x86_64": - if version < (10, 4): - return [] - formats.extend(["intel", "fat64", "fat32"]) - - elif cpu_arch == "i386": - if version < (10, 4): - return [] - formats.extend(["intel", "fat32", "fat"]) - - elif cpu_arch == "ppc64": - # TODO: Need to care about 32-bit PPC for ppc64 through 10.2? - if version > (10, 5) or version < (10, 4): - return [] - formats.append("fat64") - - elif cpu_arch == "ppc": - if version > (10, 6): - return [] - formats.extend(["fat32", "fat"]) - - if cpu_arch in {"arm64", "x86_64"}: - formats.append("universal2") - - if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}: - formats.append("universal") - - return formats - - -def mac_platforms( - version: Optional[MacVersion] = None, arch: Optional[str] = None -) -> Iterator[str]: - """ - Yields the platform tags for a macOS system. - - The `version` parameter is a two-item tuple specifying the macOS version to - generate platform tags for. The `arch` parameter is the CPU architecture to - generate platform tags for. Both parameters default to the appropriate value - for the current system. - """ - version_str, _, cpu_arch = platform.mac_ver() - if version is None: - version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2]))) - if version == (10, 16): - # When built against an older macOS SDK, Python will report macOS 10.16 - # instead of the real version. - version_str = subprocess.run( - [ - sys.executable, - "-sS", - "-c", - "import platform; print(platform.mac_ver()[0])", - ], - check=True, - env={"SYSTEM_VERSION_COMPAT": "0"}, - stdout=subprocess.PIPE, - text=True, - ).stdout - version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2]))) - else: - version = version - if arch is None: - arch = _mac_arch(cpu_arch) - else: - arch = arch - - if (10, 0) <= version and version < (11, 0): - # Prior to Mac OS 11, each yearly release of Mac OS bumped the - # "minor" version number. The major version was always 10. - for minor_version in range(version[1], -1, -1): - compat_version = 10, minor_version - binary_formats = _mac_binary_formats(compat_version, arch) - for binary_format in binary_formats: - yield "macosx_{major}_{minor}_{binary_format}".format( - major=10, minor=minor_version, binary_format=binary_format - ) - - if version >= (11, 0): - # Starting with Mac OS 11, each yearly release bumps the major version - # number. The minor versions are now the midyear updates. - for major_version in range(version[0], 10, -1): - compat_version = major_version, 0 - binary_formats = _mac_binary_formats(compat_version, arch) - for binary_format in binary_formats: - yield "macosx_{major}_{minor}_{binary_format}".format( - major=major_version, minor=0, binary_format=binary_format - ) - - if version >= (11, 0): - # Mac OS 11 on x86_64 is compatible with binaries from previous releases. - # Arm64 support was introduced in 11.0, so no Arm binaries from previous - # releases exist. - # - # However, the "universal2" binary format can have a - # macOS version earlier than 11.0 when the x86_64 part of the binary supports - # that version of macOS. - if arch == "x86_64": - for minor_version in range(16, 3, -1): - compat_version = 10, minor_version - binary_formats = _mac_binary_formats(compat_version, arch) - for binary_format in binary_formats: - yield "macosx_{major}_{minor}_{binary_format}".format( - major=compat_version[0], - minor=compat_version[1], - binary_format=binary_format, - ) - else: - for minor_version in range(16, 3, -1): - compat_version = 10, minor_version - binary_format = "universal2" - yield "macosx_{major}_{minor}_{binary_format}".format( - major=compat_version[0], - minor=compat_version[1], - binary_format=binary_format, - ) - - -def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]: - linux = _normalize_string(sysconfig.get_platform()) - if not linux.startswith("linux_"): - # we should never be here, just yield the sysconfig one and return - yield linux - return - if is_32bit: - if linux == "linux_x86_64": - linux = "linux_i686" - elif linux == "linux_aarch64": - linux = "linux_armv8l" - _, arch = linux.split("_", 1) - archs = {"armv8l": ["armv8l", "armv7l"]}.get(arch, [arch]) - yield from _manylinux.platform_tags(archs) - yield from _musllinux.platform_tags(archs) - for arch in archs: - yield f"linux_{arch}" - - -def _generic_platforms() -> Iterator[str]: - yield _normalize_string(sysconfig.get_platform()) - - -def platform_tags() -> Iterator[str]: - """ - Provides the platform tags for this installation. - """ - if platform.system() == "Darwin": - return mac_platforms() - elif platform.system() == "Linux": - return _linux_platforms() - else: - return _generic_platforms() - - -def interpreter_name() -> str: - """ - Returns the name of the running interpreter. - - Some implementations have a reserved, two-letter abbreviation which will - be returned when appropriate. - """ - name = sys.implementation.name - return INTERPRETER_SHORT_NAMES.get(name) or name - - -def interpreter_version(*, warn: bool = False) -> str: - """ - Returns the version of the running interpreter. - """ - version = _get_config_var("py_version_nodot", warn=warn) - if version: - version = str(version) - else: - version = _version_nodot(sys.version_info[:2]) - return version - - -def _version_nodot(version: PythonVersion) -> str: - return "".join(map(str, version)) - - -def sys_tags(*, warn: bool = False) -> Iterator[Tag]: - """ - Returns the sequence of tag triples for the running interpreter. - - The order of the sequence corresponds to priority order for the - interpreter, from most to least important. - """ - - interp_name = interpreter_name() - if interp_name == "cp": - yield from cpython_tags(warn=warn) - else: - yield from generic_tags() - - if interp_name == "pp": - interp = "pp3" - elif interp_name == "cp": - interp = "cp" + interpreter_version(warn=warn) - else: - interp = None - yield from compatible_tags(interpreter=interp) diff --git a/distutils/_vendor/packaging/utils.py b/distutils/_vendor/packaging/utils.py deleted file mode 100644 index c2c2f75aa8..0000000000 --- a/distutils/_vendor/packaging/utils.py +++ /dev/null @@ -1,172 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import re -from typing import FrozenSet, NewType, Tuple, Union, cast - -from .tags import Tag, parse_tag -from .version import InvalidVersion, Version - -BuildTag = Union[Tuple[()], Tuple[int, str]] -NormalizedName = NewType("NormalizedName", str) - - -class InvalidName(ValueError): - """ - An invalid distribution name; users should refer to the packaging user guide. - """ - - -class InvalidWheelFilename(ValueError): - """ - An invalid wheel filename was found, users should refer to PEP 427. - """ - - -class InvalidSdistFilename(ValueError): - """ - An invalid sdist filename was found, users should refer to the packaging user guide. - """ - - -# Core metadata spec for `Name` -_validate_regex = re.compile( - r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE -) -_canonicalize_regex = re.compile(r"[-_.]+") -_normalized_regex = re.compile(r"^([a-z0-9]|[a-z0-9]([a-z0-9-](?!--))*[a-z0-9])$") -# PEP 427: The build number must start with a digit. -_build_tag_regex = re.compile(r"(\d+)(.*)") - - -def canonicalize_name(name: str, *, validate: bool = False) -> NormalizedName: - if validate and not _validate_regex.match(name): - raise InvalidName(f"name is invalid: {name!r}") - # This is taken from PEP 503. - value = _canonicalize_regex.sub("-", name).lower() - return cast(NormalizedName, value) - - -def is_normalized_name(name: str) -> bool: - return _normalized_regex.match(name) is not None - - -def canonicalize_version( - version: Union[Version, str], *, strip_trailing_zero: bool = True -) -> str: - """ - This is very similar to Version.__str__, but has one subtle difference - with the way it handles the release segment. - """ - if isinstance(version, str): - try: - parsed = Version(version) - except InvalidVersion: - # Legacy versions cannot be normalized - return version - else: - parsed = version - - parts = [] - - # Epoch - if parsed.epoch != 0: - parts.append(f"{parsed.epoch}!") - - # Release segment - release_segment = ".".join(str(x) for x in parsed.release) - if strip_trailing_zero: - # NB: This strips trailing '.0's to normalize - release_segment = re.sub(r"(\.0)+$", "", release_segment) - parts.append(release_segment) - - # Pre-release - if parsed.pre is not None: - parts.append("".join(str(x) for x in parsed.pre)) - - # Post-release - if parsed.post is not None: - parts.append(f".post{parsed.post}") - - # Development release - if parsed.dev is not None: - parts.append(f".dev{parsed.dev}") - - # Local version segment - if parsed.local is not None: - parts.append(f"+{parsed.local}") - - return "".join(parts) - - -def parse_wheel_filename( - filename: str, -) -> Tuple[NormalizedName, Version, BuildTag, FrozenSet[Tag]]: - if not filename.endswith(".whl"): - raise InvalidWheelFilename( - f"Invalid wheel filename (extension must be '.whl'): {filename}" - ) - - filename = filename[:-4] - dashes = filename.count("-") - if dashes not in (4, 5): - raise InvalidWheelFilename( - f"Invalid wheel filename (wrong number of parts): {filename}" - ) - - parts = filename.split("-", dashes - 2) - name_part = parts[0] - # See PEP 427 for the rules on escaping the project name. - if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None: - raise InvalidWheelFilename(f"Invalid project name: {filename}") - name = canonicalize_name(name_part) - - try: - version = Version(parts[1]) - except InvalidVersion as e: - raise InvalidWheelFilename( - f"Invalid wheel filename (invalid version): {filename}" - ) from e - - if dashes == 5: - build_part = parts[2] - build_match = _build_tag_regex.match(build_part) - if build_match is None: - raise InvalidWheelFilename( - f"Invalid build number: {build_part} in '{filename}'" - ) - build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2))) - else: - build = () - tags = parse_tag(parts[-1]) - return (name, version, build, tags) - - -def parse_sdist_filename(filename: str) -> Tuple[NormalizedName, Version]: - if filename.endswith(".tar.gz"): - file_stem = filename[: -len(".tar.gz")] - elif filename.endswith(".zip"): - file_stem = filename[: -len(".zip")] - else: - raise InvalidSdistFilename( - f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):" - f" {filename}" - ) - - # We are requiring a PEP 440 version, which cannot contain dashes, - # so we split on the last dash. - name_part, sep, version_part = file_stem.rpartition("-") - if not sep: - raise InvalidSdistFilename(f"Invalid sdist filename: {filename}") - - name = canonicalize_name(name_part) - - try: - version = Version(version_part) - except InvalidVersion as e: - raise InvalidSdistFilename( - f"Invalid sdist filename (invalid version): {filename}" - ) from e - - return (name, version) diff --git a/distutils/_vendor/packaging/version.py b/distutils/_vendor/packaging/version.py deleted file mode 100644 index 5faab9bd0d..0000000000 --- a/distutils/_vendor/packaging/version.py +++ /dev/null @@ -1,563 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -""" -.. testsetup:: - - from packaging.version import parse, Version -""" - -import itertools -import re -from typing import Any, Callable, NamedTuple, Optional, SupportsInt, Tuple, Union - -from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType - -__all__ = ["VERSION_PATTERN", "parse", "Version", "InvalidVersion"] - -LocalType = Tuple[Union[int, str], ...] - -CmpPrePostDevType = Union[InfinityType, NegativeInfinityType, Tuple[str, int]] -CmpLocalType = Union[ - NegativeInfinityType, - Tuple[Union[Tuple[int, str], Tuple[NegativeInfinityType, Union[int, str]]], ...], -] -CmpKey = Tuple[ - int, - Tuple[int, ...], - CmpPrePostDevType, - CmpPrePostDevType, - CmpPrePostDevType, - CmpLocalType, -] -VersionComparisonMethod = Callable[[CmpKey, CmpKey], bool] - - -class _Version(NamedTuple): - epoch: int - release: Tuple[int, ...] - dev: Optional[Tuple[str, int]] - pre: Optional[Tuple[str, int]] - post: Optional[Tuple[str, int]] - local: Optional[LocalType] - - -def parse(version: str) -> "Version": - """Parse the given version string. - - >>> parse('1.0.dev1') - - - :param version: The version string to parse. - :raises InvalidVersion: When the version string is not a valid version. - """ - return Version(version) - - -class InvalidVersion(ValueError): - """Raised when a version string is not a valid version. - - >>> Version("invalid") - Traceback (most recent call last): - ... - packaging.version.InvalidVersion: Invalid version: 'invalid' - """ - - -class _BaseVersion: - _key: Tuple[Any, ...] - - def __hash__(self) -> int: - return hash(self._key) - - # Please keep the duplicated `isinstance` check - # in the six comparisons hereunder - # unless you find a way to avoid adding overhead function calls. - def __lt__(self, other: "_BaseVersion") -> bool: - if not isinstance(other, _BaseVersion): - return NotImplemented - - return self._key < other._key - - def __le__(self, other: "_BaseVersion") -> bool: - if not isinstance(other, _BaseVersion): - return NotImplemented - - return self._key <= other._key - - def __eq__(self, other: object) -> bool: - if not isinstance(other, _BaseVersion): - return NotImplemented - - return self._key == other._key - - def __ge__(self, other: "_BaseVersion") -> bool: - if not isinstance(other, _BaseVersion): - return NotImplemented - - return self._key >= other._key - - def __gt__(self, other: "_BaseVersion") -> bool: - if not isinstance(other, _BaseVersion): - return NotImplemented - - return self._key > other._key - - def __ne__(self, other: object) -> bool: - if not isinstance(other, _BaseVersion): - return NotImplemented - - return self._key != other._key - - -# Deliberately not anchored to the start and end of the string, to make it -# easier for 3rd party code to reuse -_VERSION_PATTERN = r""" - v? - (?: - (?:(?P[0-9]+)!)? # epoch - (?P[0-9]+(?:\.[0-9]+)*) # release segment - (?P

                                          # pre-release
-            [-_\.]?
-            (?Palpha|a|beta|b|preview|pre|c|rc)
-            [-_\.]?
-            (?P[0-9]+)?
-        )?
-        (?P                                         # post release
-            (?:-(?P[0-9]+))
-            |
-            (?:
-                [-_\.]?
-                (?Ppost|rev|r)
-                [-_\.]?
-                (?P[0-9]+)?
-            )
-        )?
-        (?P                                          # dev release
-            [-_\.]?
-            (?Pdev)
-            [-_\.]?
-            (?P[0-9]+)?
-        )?
-    )
-    (?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
-"""
-
-VERSION_PATTERN = _VERSION_PATTERN
-"""
-A string containing the regular expression used to match a valid version.
-
-The pattern is not anchored at either end, and is intended for embedding in larger
-expressions (for example, matching a version number as part of a file name). The
-regular expression should be compiled with the ``re.VERBOSE`` and ``re.IGNORECASE``
-flags set.
-
-:meta hide-value:
-"""
-
-
-class Version(_BaseVersion):
-    """This class abstracts handling of a project's versions.
-
-    A :class:`Version` instance is comparison aware and can be compared and
-    sorted using the standard Python interfaces.
-
-    >>> v1 = Version("1.0a5")
-    >>> v2 = Version("1.0")
-    >>> v1
-    
-    >>> v2
-    
-    >>> v1 < v2
-    True
-    >>> v1 == v2
-    False
-    >>> v1 > v2
-    False
-    >>> v1 >= v2
-    False
-    >>> v1 <= v2
-    True
-    """
-
-    _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
-    _key: CmpKey
-
-    def __init__(self, version: str) -> None:
-        """Initialize a Version object.
-
-        :param version:
-            The string representation of a version which will be parsed and normalized
-            before use.
-        :raises InvalidVersion:
-            If the ``version`` does not conform to PEP 440 in any way then this
-            exception will be raised.
-        """
-
-        # Validate the version and parse it into pieces
-        match = self._regex.search(version)
-        if not match:
-            raise InvalidVersion(f"Invalid version: '{version}'")
-
-        # Store the parsed out pieces of the version
-        self._version = _Version(
-            epoch=int(match.group("epoch")) if match.group("epoch") else 0,
-            release=tuple(int(i) for i in match.group("release").split(".")),
-            pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
-            post=_parse_letter_version(
-                match.group("post_l"), match.group("post_n1") or match.group("post_n2")
-            ),
-            dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
-            local=_parse_local_version(match.group("local")),
-        )
-
-        # Generate a key which will be used for sorting
-        self._key = _cmpkey(
-            self._version.epoch,
-            self._version.release,
-            self._version.pre,
-            self._version.post,
-            self._version.dev,
-            self._version.local,
-        )
-
-    def __repr__(self) -> str:
-        """A representation of the Version that shows all internal state.
-
-        >>> Version('1.0.0')
-        
-        """
-        return f""
-
-    def __str__(self) -> str:
-        """A string representation of the version that can be rounded-tripped.
-
-        >>> str(Version("1.0a5"))
-        '1.0a5'
-        """
-        parts = []
-
-        # Epoch
-        if self.epoch != 0:
-            parts.append(f"{self.epoch}!")
-
-        # Release segment
-        parts.append(".".join(str(x) for x in self.release))
-
-        # Pre-release
-        if self.pre is not None:
-            parts.append("".join(str(x) for x in self.pre))
-
-        # Post-release
-        if self.post is not None:
-            parts.append(f".post{self.post}")
-
-        # Development release
-        if self.dev is not None:
-            parts.append(f".dev{self.dev}")
-
-        # Local version segment
-        if self.local is not None:
-            parts.append(f"+{self.local}")
-
-        return "".join(parts)
-
-    @property
-    def epoch(self) -> int:
-        """The epoch of the version.
-
-        >>> Version("2.0.0").epoch
-        0
-        >>> Version("1!2.0.0").epoch
-        1
-        """
-        return self._version.epoch
-
-    @property
-    def release(self) -> Tuple[int, ...]:
-        """The components of the "release" segment of the version.
-
-        >>> Version("1.2.3").release
-        (1, 2, 3)
-        >>> Version("2.0.0").release
-        (2, 0, 0)
-        >>> Version("1!2.0.0.post0").release
-        (2, 0, 0)
-
-        Includes trailing zeroes but not the epoch or any pre-release / development /
-        post-release suffixes.
-        """
-        return self._version.release
-
-    @property
-    def pre(self) -> Optional[Tuple[str, int]]:
-        """The pre-release segment of the version.
-
-        >>> print(Version("1.2.3").pre)
-        None
-        >>> Version("1.2.3a1").pre
-        ('a', 1)
-        >>> Version("1.2.3b1").pre
-        ('b', 1)
-        >>> Version("1.2.3rc1").pre
-        ('rc', 1)
-        """
-        return self._version.pre
-
-    @property
-    def post(self) -> Optional[int]:
-        """The post-release number of the version.
-
-        >>> print(Version("1.2.3").post)
-        None
-        >>> Version("1.2.3.post1").post
-        1
-        """
-        return self._version.post[1] if self._version.post else None
-
-    @property
-    def dev(self) -> Optional[int]:
-        """The development number of the version.
-
-        >>> print(Version("1.2.3").dev)
-        None
-        >>> Version("1.2.3.dev1").dev
-        1
-        """
-        return self._version.dev[1] if self._version.dev else None
-
-    @property
-    def local(self) -> Optional[str]:
-        """The local version segment of the version.
-
-        >>> print(Version("1.2.3").local)
-        None
-        >>> Version("1.2.3+abc").local
-        'abc'
-        """
-        if self._version.local:
-            return ".".join(str(x) for x in self._version.local)
-        else:
-            return None
-
-    @property
-    def public(self) -> str:
-        """The public portion of the version.
-
-        >>> Version("1.2.3").public
-        '1.2.3'
-        >>> Version("1.2.3+abc").public
-        '1.2.3'
-        >>> Version("1.2.3+abc.dev1").public
-        '1.2.3'
-        """
-        return str(self).split("+", 1)[0]
-
-    @property
-    def base_version(self) -> str:
-        """The "base version" of the version.
-
-        >>> Version("1.2.3").base_version
-        '1.2.3'
-        >>> Version("1.2.3+abc").base_version
-        '1.2.3'
-        >>> Version("1!1.2.3+abc.dev1").base_version
-        '1!1.2.3'
-
-        The "base version" is the public version of the project without any pre or post
-        release markers.
-        """
-        parts = []
-
-        # Epoch
-        if self.epoch != 0:
-            parts.append(f"{self.epoch}!")
-
-        # Release segment
-        parts.append(".".join(str(x) for x in self.release))
-
-        return "".join(parts)
-
-    @property
-    def is_prerelease(self) -> bool:
-        """Whether this version is a pre-release.
-
-        >>> Version("1.2.3").is_prerelease
-        False
-        >>> Version("1.2.3a1").is_prerelease
-        True
-        >>> Version("1.2.3b1").is_prerelease
-        True
-        >>> Version("1.2.3rc1").is_prerelease
-        True
-        >>> Version("1.2.3dev1").is_prerelease
-        True
-        """
-        return self.dev is not None or self.pre is not None
-
-    @property
-    def is_postrelease(self) -> bool:
-        """Whether this version is a post-release.
-
-        >>> Version("1.2.3").is_postrelease
-        False
-        >>> Version("1.2.3.post1").is_postrelease
-        True
-        """
-        return self.post is not None
-
-    @property
-    def is_devrelease(self) -> bool:
-        """Whether this version is a development release.
-
-        >>> Version("1.2.3").is_devrelease
-        False
-        >>> Version("1.2.3.dev1").is_devrelease
-        True
-        """
-        return self.dev is not None
-
-    @property
-    def major(self) -> int:
-        """The first item of :attr:`release` or ``0`` if unavailable.
-
-        >>> Version("1.2.3").major
-        1
-        """
-        return self.release[0] if len(self.release) >= 1 else 0
-
-    @property
-    def minor(self) -> int:
-        """The second item of :attr:`release` or ``0`` if unavailable.
-
-        >>> Version("1.2.3").minor
-        2
-        >>> Version("1").minor
-        0
-        """
-        return self.release[1] if len(self.release) >= 2 else 0
-
-    @property
-    def micro(self) -> int:
-        """The third item of :attr:`release` or ``0`` if unavailable.
-
-        >>> Version("1.2.3").micro
-        3
-        >>> Version("1").micro
-        0
-        """
-        return self.release[2] if len(self.release) >= 3 else 0
-
-
-def _parse_letter_version(
-    letter: Optional[str], number: Union[str, bytes, SupportsInt, None]
-) -> Optional[Tuple[str, int]]:
-
-    if letter:
-        # We consider there to be an implicit 0 in a pre-release if there is
-        # not a numeral associated with it.
-        if number is None:
-            number = 0
-
-        # We normalize any letters to their lower case form
-        letter = letter.lower()
-
-        # We consider some words to be alternate spellings of other words and
-        # in those cases we want to normalize the spellings to our preferred
-        # spelling.
-        if letter == "alpha":
-            letter = "a"
-        elif letter == "beta":
-            letter = "b"
-        elif letter in ["c", "pre", "preview"]:
-            letter = "rc"
-        elif letter in ["rev", "r"]:
-            letter = "post"
-
-        return letter, int(number)
-    if not letter and number:
-        # We assume if we are given a number, but we are not given a letter
-        # then this is using the implicit post release syntax (e.g. 1.0-1)
-        letter = "post"
-
-        return letter, int(number)
-
-    return None
-
-
-_local_version_separators = re.compile(r"[\._-]")
-
-
-def _parse_local_version(local: Optional[str]) -> Optional[LocalType]:
-    """
-    Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
-    """
-    if local is not None:
-        return tuple(
-            part.lower() if not part.isdigit() else int(part)
-            for part in _local_version_separators.split(local)
-        )
-    return None
-
-
-def _cmpkey(
-    epoch: int,
-    release: Tuple[int, ...],
-    pre: Optional[Tuple[str, int]],
-    post: Optional[Tuple[str, int]],
-    dev: Optional[Tuple[str, int]],
-    local: Optional[LocalType],
-) -> CmpKey:
-
-    # When we compare a release version, we want to compare it with all of the
-    # trailing zeros removed. So we'll use a reverse the list, drop all the now
-    # leading zeros until we come to something non zero, then take the rest
-    # re-reverse it back into the correct order and make it a tuple and use
-    # that for our sorting key.
-    _release = tuple(
-        reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
-    )
-
-    # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
-    # We'll do this by abusing the pre segment, but we _only_ want to do this
-    # if there is not a pre or a post segment. If we have one of those then
-    # the normal sorting rules will handle this case correctly.
-    if pre is None and post is None and dev is not None:
-        _pre: CmpPrePostDevType = NegativeInfinity
-    # Versions without a pre-release (except as noted above) should sort after
-    # those with one.
-    elif pre is None:
-        _pre = Infinity
-    else:
-        _pre = pre
-
-    # Versions without a post segment should sort before those with one.
-    if post is None:
-        _post: CmpPrePostDevType = NegativeInfinity
-
-    else:
-        _post = post
-
-    # Versions without a development segment should sort after those with one.
-    if dev is None:
-        _dev: CmpPrePostDevType = Infinity
-
-    else:
-        _dev = dev
-
-    if local is None:
-        # Versions without a local segment should sort before those with one.
-        _local: CmpLocalType = NegativeInfinity
-    else:
-        # Versions with a local segment need that segment parsed to implement
-        # the sorting rules in PEP440.
-        # - Alpha numeric segments sort before numeric segments
-        # - Alpha numeric segments sort lexicographically
-        # - Numeric segments sort numerically
-        # - Shorter versions sort before longer versions when the prefixes
-        #   match exactly
-        _local = tuple(
-            (i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
-        )
-
-    return epoch, _release, _pre, _post, _dev, _local
diff --git a/distutils/_vendor/ruff.toml b/distutils/_vendor/ruff.toml
deleted file mode 100644
index 00fee625a5..0000000000
--- a/distutils/_vendor/ruff.toml
+++ /dev/null
@@ -1 +0,0 @@
-exclude = ["*"]
diff --git a/distutils/dist.py b/distutils/dist.py
index 60edc5b514..6191ae78ab 100644
--- a/distutils/dist.py
+++ b/distutils/dist.py
@@ -15,7 +15,6 @@
 from email import message_from_file
 
 from ._log import log
-from ._vendor.packaging.utils import canonicalize_name, canonicalize_version
 from .debug import DEBUG
 from .errors import (
     DistutilsArgError,
@@ -26,6 +25,13 @@
 from .fancy_getopt import FancyGetopt, translate_longopt
 from .util import check_environ, rfc822_escape, strtobool
 
+try:
+    from packaging.utils import canonicalize_name, canonicalize_version
+except ImportError as ex:  # pragma: no cover
+    raise ImportError(
+        "Distutils should be distributed alongside setuptools and its dependencies"
+    ) from ex
+
 # Regex to define acceptable Distutils command names.  This is not *quite*
 # the same as a Python NAME -- I don't allow leading underscores.  The fact
 # that they're very similar is no coincidence; the default naming scheme is
diff --git a/pyproject.toml b/pyproject.toml
index 2d8d47fd9c..c9b358b544 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -17,7 +17,7 @@ classifiers = [
 	"Programming Language :: Python :: 3 :: Only",
 ]
 requires-python = ">=3.8"
-dependencies = []
+dependencies = ["packaging"]  # setuptools dependency
 dynamic = ["version"]
 
 [project.urls]

From defa7257bcb835f01792aa60bf84e3e6ca00207a Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Tue, 27 Aug 2024 05:09:09 -0400
Subject: [PATCH 1053/1761] Avoid issues with  _build_ext being Any (#4599)

---
 setuptools/command/build_ext.py    | 24 +++++++++++++-----------
 setuptools/tests/test_build_ext.py |  3 +--
 2 files changed, 14 insertions(+), 13 deletions(-)

diff --git a/setuptools/command/build_ext.py b/setuptools/command/build_ext.py
index d44d7b8ae1..fe3e7a1c94 100644
--- a/setuptools/command/build_ext.py
+++ b/setuptools/command/build_ext.py
@@ -6,7 +6,7 @@
 from importlib.machinery import EXTENSION_SUFFIXES
 from importlib.util import cache_from_source as _compiled_file_name
 from pathlib import Path
-from typing import Iterator
+from typing import TYPE_CHECKING, Iterator
 
 from setuptools.dist import Distribution
 from setuptools.errors import BaseError
@@ -16,17 +16,19 @@
 from distutils.ccompiler import new_compiler
 from distutils.sysconfig import customize_compiler, get_config_var
 
-try:
-    # Attempt to use Cython for building extensions, if available
-    from Cython.Distutils.build_ext import (  # type: ignore[import-not-found] # Cython not installed on CI tests
-        build_ext as _build_ext,
-    )
-
-    # Additionally, assert that the compiler module will load
-    # also. Ref #1229.
-    __import__('Cython.Compiler.Main')
-except ImportError:
+if TYPE_CHECKING:
+    # Cython not installed on CI tests, causing _build_ext to be `Any`
     from distutils.command.build_ext import build_ext as _build_ext
+else:
+    try:
+        # Attempt to use Cython for building extensions, if available
+        from Cython.Distutils.build_ext import build_ext as _build_ext
+
+        # Additionally, assert that the compiler module will load
+        # also. Ref #1229.
+        __import__('Cython.Compiler.Main')
+    except ImportError:
+        from distutils.command.build_ext import build_ext as _build_ext
 
 # make sure _config_vars is initialized
 get_config_var("LDSHARED")
diff --git a/setuptools/tests/test_build_ext.py b/setuptools/tests/test_build_ext.py
index 814fbd86aa..dab8b41cc9 100644
--- a/setuptools/tests/test_build_ext.py
+++ b/setuptools/tests/test_build_ext.py
@@ -183,12 +183,11 @@ def get_build_ext_cmd(self, optional: bool, **opts):
             "eggs.c": "#include missingheader.h\n",
             ".build": {"lib": {}, "tmp": {}},
         }
-        path.build(files)
+        path.build(files)  # type: ignore[arg-type] # jaraco/path#232
         extension = Extension('spam.eggs', ['eggs.c'], optional=optional)
         dist = Distribution(dict(ext_modules=[extension]))
         dist.script_name = 'setup.py'
         cmd = build_ext(dist)
-        # TODO: False-positive [attr-defined], raise upstream
         vars(cmd).update(build_lib=".build/lib", build_temp=".build/tmp", **opts)
         cmd.ensure_finalized()
         return cmd

From d52ca0492f4c6bac24c90a5e9dc759243ceb50a8 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Tue, 27 Aug 2024 05:09:47 -0400
Subject: [PATCH 1054/1761] `pkg_resources`: use
 `_typeshed.importlib.LoaderProtocol` (#4597)

---
 pkg_resources/__init__.py | 8 ++------
 1 file changed, 2 insertions(+), 6 deletions(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index 76aa5e77ba..0e3a9ca2cd 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -100,6 +100,7 @@
 
 if TYPE_CHECKING:
     from _typeshed import BytesPath, StrOrBytesPath, StrPath
+    from _typeshed.importlib import LoaderProtocol
     from typing_extensions import Self, TypeAlias
 
 warnings.warn(
@@ -131,11 +132,6 @@
 )
 
 
-# Use _typeshed.importlib.LoaderProtocol once available https://github.com/python/typeshed/pull/11890
-class _LoaderProtocol(Protocol):
-    def load_module(self, fullname: str, /) -> types.ModuleType: ...
-
-
 class _ZipLoaderModule(Protocol):
     __loader__: zipimport.zipimporter
 
@@ -1644,7 +1640,7 @@ class NullProvider:
 
     egg_name: str | None = None
     egg_info: str | None = None
-    loader: _LoaderProtocol | None = None
+    loader: LoaderProtocol | None = None
 
     def __init__(self, module: _ModuleLike):
         self.loader = getattr(module, '__loader__', None)

From 9aec16bac638f2cac742e70abdd6b99c680eae73 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Tue, 27 Aug 2024 05:11:55 -0400
Subject: [PATCH 1055/1761] Make get_ext_filename typesafe (#4592)

---
 newsfragments/4592.misc.rst     |  1 +
 setuptools/command/build_ext.py | 18 +++++++++++-------
 2 files changed, 12 insertions(+), 7 deletions(-)
 create mode 100644 newsfragments/4592.misc.rst

diff --git a/newsfragments/4592.misc.rst b/newsfragments/4592.misc.rst
new file mode 100644
index 0000000000..79d36a9d82
--- /dev/null
+++ b/newsfragments/4592.misc.rst
@@ -0,0 +1 @@
+If somehow the ``EXT_SUFFIX`` configuration variable and ``SETUPTOOLS_EXT_SUFFIX`` environment variables are both missing, ``setuptools.command.build_ext.pyget_ext_filename`` will now raise an `OSError` instead of a `TypeError` -- by :user:`Avasam`
diff --git a/setuptools/command/build_ext.py b/setuptools/command/build_ext.py
index fe3e7a1c94..e2a88ce218 100644
--- a/setuptools/command/build_ext.py
+++ b/setuptools/command/build_ext.py
@@ -157,21 +157,25 @@ def _get_output_mapping(self) -> Iterator[tuple[str, str]]:
                 output_cache = _compiled_file_name(regular_stub, optimization=opt)
                 yield (output_cache, inplace_cache)
 
-    def get_ext_filename(self, fullname):
+    def get_ext_filename(self, fullname: str) -> str:
         so_ext = os.getenv('SETUPTOOLS_EXT_SUFFIX')
         if so_ext:
             filename = os.path.join(*fullname.split('.')) + so_ext
         else:
             filename = _build_ext.get_ext_filename(self, fullname)
-            so_ext = get_config_var('EXT_SUFFIX')
+            ext_suffix = get_config_var('EXT_SUFFIX')
+            if not isinstance(ext_suffix, str):
+                raise OSError(
+                    "Configuration variable EXT_SUFFIX not found for this platform "
+                    + "and environment variable SETUPTOOLS_EXT_SUFFIX is missing"
+                )
+            so_ext = ext_suffix
 
         if fullname in self.ext_map:
             ext = self.ext_map[fullname]
-            use_abi3 = ext.py_limited_api and get_abi3_suffix()
-            if use_abi3:
-                filename = filename[: -len(so_ext)]
-                so_ext = get_abi3_suffix()
-                filename = filename + so_ext
+            abi3_suffix = get_abi3_suffix()
+            if ext.py_limited_api and abi3_suffix:  # Use abi3
+                filename = filename[: -len(so_ext)] + abi3_suffix
             if isinstance(ext, Library):
                 fn, ext = os.path.splitext(filename)
                 return self.shlib_compiler.library_filename(fn, libtype)

From e30a556008ff0f4181a0ad00442647fa5a4927f1 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Tue, 27 Aug 2024 05:37:33 -0400
Subject: [PATCH 1056/1761] `pkg_resources` fully type all collection
 attributes (#4598)

---
 newsfragments/4598.feature.rst |  1 +
 pkg_resources/__init__.py      | 54 +++++++++++++++++-----------------
 2 files changed, 28 insertions(+), 27 deletions(-)
 create mode 100644 newsfragments/4598.feature.rst

diff --git a/newsfragments/4598.feature.rst b/newsfragments/4598.feature.rst
new file mode 100644
index 0000000000..ee2ea40dfe
--- /dev/null
+++ b/newsfragments/4598.feature.rst
@@ -0,0 +1 @@
+Fully typed all collection attributes in ``pkg_resources`` -- by :user:`Avasam`
diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index 0e3a9ca2cd..a8ca8ab818 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -1,6 +1,3 @@
-# TODO: Add Generic type annotations to initialized collections.
-# For now we'd simply use implicit Any/Unknown which would add redundant annotations
-# mypy: disable-error-code="var-annotated"
 """
 Package resource API
 --------------------
@@ -621,13 +618,13 @@ def resource_listdir(self, resource_name: str) -> list[str]:
 class WorkingSet:
     """A collection of active distributions on sys.path (or a similar list)"""
 
-    def __init__(self, entries: Iterable[str] | None = None):
+    def __init__(self, entries: Iterable[str] | None = None) -> None:
         """Create working set from list of path entries (default=sys.path)"""
         self.entries: list[str] = []
-        self.entry_keys = {}
-        self.by_key = {}
-        self.normalized_to_canonical_keys = {}
-        self.callbacks = []
+        self.entry_keys: dict[str | None, list[str]] = {}
+        self.by_key: dict[str, Distribution] = {}
+        self.normalized_to_canonical_keys: dict[str, str] = {}
+        self.callbacks: list[Callable[[Distribution], object]] = []
 
         if entries is None:
             entries = sys.path
@@ -864,14 +861,16 @@ def resolve(
         # set of processed requirements
         processed = set()
         # key -> dist
-        best = {}
-        to_activate = []
+        best: dict[str, Distribution] = {}
+        to_activate: list[Distribution] = []
 
         req_extras = _ReqExtras()
 
         # Mapping of requirement to set of distributions that required it;
         # useful for reporting info about conflicts.
-        required_by = collections.defaultdict(set)
+        required_by: collections.defaultdict[Requirement, set[str]] = (
+            collections.defaultdict(set)
+        )
 
         while requirements:
             # process dependencies breadth-first
@@ -1128,7 +1127,7 @@ def __init__(
         search_path: Iterable[str] | None = None,
         platform: str | None = get_supported_platform(),
         python: str | None = PY_MAJOR,
-    ):
+    ) -> None:
         """Snapshot distributions available on a search path
 
         Any distributions found on `search_path` are added to the environment.
@@ -1145,7 +1144,7 @@ def __init__(
         wish to map *all* distributions, not just those compatible with the
         running platform or Python version.
         """
-        self._distmap = {}
+        self._distmap: dict[str, list[Distribution]] = {}
         self.platform = platform
         self.python = python
         self.scan(search_path)
@@ -1342,8 +1341,9 @@ class ResourceManager:
 
     extraction_path: str | None = None
 
-    def __init__(self):
-        self.cached_files = {}
+    def __init__(self) -> None:
+        # acts like a set
+        self.cached_files: dict[str, Literal[True]] = {}
 
     def resource_exists(
         self, package_or_requirement: _PkgReqType, resource_name: str
@@ -1642,7 +1642,7 @@ class NullProvider:
     egg_info: str | None = None
     loader: LoaderProtocol | None = None
 
-    def __init__(self, module: _ModuleLike):
+    def __init__(self, module: _ModuleLike) -> None:
         self.loader = getattr(module, '__loader__', None)
         self.module_path = os.path.dirname(getattr(module, '__file__', ''))
 
@@ -1859,7 +1859,7 @@ def _parents(path):
 class EggProvider(NullProvider):
     """Provider based on a virtual filesystem"""
 
-    def __init__(self, module: _ModuleLike):
+    def __init__(self, module: _ModuleLike) -> None:
         super().__init__(module)
         self._setup_prefix()
 
@@ -1925,7 +1925,7 @@ def _get(self, path) -> bytes:
     def _listdir(self, path):
         return []
 
-    def __init__(self):
+    def __init__(self) -> None:
         pass
 
 
@@ -1991,7 +1991,7 @@ class ZipProvider(EggProvider):
     # ZipProvider's loader should always be a zipimporter or equivalent
     loader: zipimport.zipimporter
 
-    def __init__(self, module: _ZipLoaderModule):
+    def __init__(self, module: _ZipLoaderModule) -> None:
         super().__init__(module)
         self.zip_pre = self.loader.archive + os.sep
 
@@ -2170,7 +2170,7 @@ class FileMetadata(EmptyProvider):
     the provided location.
     """
 
-    def __init__(self, path: StrPath):
+    def __init__(self, path: StrPath) -> None:
         self.path = path
 
     def _get_metadata_path(self, name):
@@ -2219,7 +2219,7 @@ class PathMetadata(DefaultProvider):
         dist = Distribution.from_filename(egg_path, metadata=metadata)
     """
 
-    def __init__(self, path: str, egg_info: str):
+    def __init__(self, path: str, egg_info: str) -> None:
         self.module_path = path
         self.egg_info = egg_info
 
@@ -2227,7 +2227,7 @@ def __init__(self, path: str, egg_info: str):
 class EggMetadata(ZipProvider):
     """Metadata provider for .egg files"""
 
-    def __init__(self, importer: zipimport.zipimporter):
+    def __init__(self, importer: zipimport.zipimporter) -> None:
         """Create a metadata provider from a zipimporter"""
 
         self.zip_pre = importer.archive + os.sep
@@ -2354,7 +2354,7 @@ class NoDists:
     def __bool__(self):
         return False
 
-    def __call__(self, fullpath):
+    def __call__(self, fullpath: object):
         return iter(())
 
 
@@ -2704,7 +2704,7 @@ def __init__(
         attrs: Iterable[str] = (),
         extras: Iterable[str] = (),
         dist: Distribution | None = None,
-    ):
+    ) -> None:
         if not MODULE(module_name):
             raise ValueError("Invalid module name", module_name)
         self.name = name
@@ -2898,7 +2898,7 @@ def __init__(
         py_version: str | None = PY_MAJOR,
         platform: str | None = None,
         precedence: int = EGG_DIST,
-    ):
+    ) -> None:
         self.project_name = safe_name(project_name or 'Unknown')
         if version is not None:
             self._version = safe_version(version)
@@ -3451,7 +3451,7 @@ class Requirement(packaging.requirements.Requirement):
     # packaging.requirements.Requirement)
     extras: tuple[str, ...]  # type: ignore[assignment]
 
-    def __init__(self, requirement_string: str):
+    def __init__(self, requirement_string: str) -> None:
         """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
         super().__init__(requirement_string)
         self.unsafe_name = self.name
@@ -3554,7 +3554,7 @@ def split_sections(s: _NestedStr) -> Iterator[tuple[str | None, list[str]]]:
     header, they're returned in a first ``section`` of ``None``.
     """
     section = None
-    content = []
+    content: list[str] = []
     for line in yield_lines(s):
         if line.startswith("["):
             if line.endswith("]"):

From 5c829acbf55e1604df2291ab7c49374c8442805f Mon Sep 17 00:00:00 2001
From: Chris Barker 
Date: Tue, 27 Aug 2024 02:42:25 -0700
Subject: [PATCH 1057/1761] change VERSION to __version__ in the example
 (#4590)

change VERSION to __Version__ in the example

change string, list or tuple to "string" -- in the footnote about attr -- it is only referenced by dynamic version -- and the version should only be a string (PEP 440).

---------

Co-authored-by: Anderson Bravalheri 
---
 docs/userguide/pyproject_config.rst | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/docs/userguide/pyproject_config.rst b/docs/userguide/pyproject_config.rst
index 749afe2344..4f60ad9324 100644
--- a/docs/userguide/pyproject_config.rst
+++ b/docs/userguide/pyproject_config.rst
@@ -192,7 +192,7 @@ corresponding entry is required in the ``tool.setuptools.dynamic`` table
    dynamic = ["version", "readme"]
    # ...
    [tool.setuptools.dynamic]
-   version = {attr = "my_package.VERSION"}
+   version = {attr = "my_package.__version__"}  # any module attribute compatible with ast.literal_eval
    readme = {file = ["README.rst", "USAGE.rst"]}
 
 In the ``dynamic`` table, the ``attr`` directive [#directives]_ will read an
@@ -280,7 +280,7 @@ not installed yet. You may also need to manually add the project directory to
    directive for ``tool.setuptools.dynamic.version``.
 
 .. [#attr] ``attr`` is meant to be used when the module attribute is statically
-   specified (e.g. as a string, list or tuple). As a rule of thumb, the
+   specified (e.g. as a string). As a rule of thumb, the
    attribute should be able to be parsed with :func:`ast.literal_eval`, and
    should not be modified or re-assigned.
 

From 59ec6f98fa5f4fb96b687a62dff079f18e67a42d Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Tue, 27 Aug 2024 05:45:51 -0400
Subject: [PATCH 1058/1761] Made setuptools.package_index.Credential a
 NamedTuple (#4585)

---
 newsfragments/4585.feature.rst |  1 +
 setuptools/package_index.py    | 22 +++++++++++-----------
 2 files changed, 12 insertions(+), 11 deletions(-)
 create mode 100644 newsfragments/4585.feature.rst

diff --git a/newsfragments/4585.feature.rst b/newsfragments/4585.feature.rst
new file mode 100644
index 0000000000..566bca75f8
--- /dev/null
+++ b/newsfragments/4585.feature.rst
@@ -0,0 +1 @@
+Made ``setuptools.package_index.Credential`` a `typing.NamedTuple` -- by :user:`Avasam`
diff --git a/setuptools/package_index.py b/setuptools/package_index.py
index a66cbb2e61..1510e01934 100644
--- a/setuptools/package_index.py
+++ b/setuptools/package_index.py
@@ -18,6 +18,7 @@
 import urllib.request
 from fnmatch import translate
 from functools import wraps
+from typing import NamedTuple
 
 from more_itertools import unique_everseen
 
@@ -1001,21 +1002,20 @@ def _encode_auth(auth):
     return encoded.replace('\n', '')
 
 
-class Credential:
-    """
-    A username/password pair. Use like a namedtuple.
+class Credential(NamedTuple):
     """
+    A username/password pair.
 
-    def __init__(self, username, password):
-        self.username = username
-        self.password = password
+    Displayed separated by `:`.
+    >>> str(Credential('username', 'password'))
+    'username:password'
+    """
 
-    def __iter__(self):
-        yield self.username
-        yield self.password
+    username: str
+    password: str
 
-    def __str__(self):
-        return '%(username)s:%(password)s' % vars(self)
+    def __str__(self) -> str:
+        return f'{self.username}:{self.password}'
 
 
 class PyPIConfig(configparser.RawConfigParser):

From d12330dbfceb3c5c2185d2d6dbf6222a719ab2e6 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Tue, 27 Aug 2024 06:07:12 -0400
Subject: [PATCH 1059/1761] Initial pyright config (#4192)

* Bump importlib_metadata in type tests

* New pyright specific workflow

* Add missing spaces in comparison

* Fix requirements

* Typo and fix cygwin

* Update .github/workflows/pyright.yml

* get_ext_filename doesn't need to be modified for this PR

---------

Co-authored-by: Anderson Bravalheri 
Co-authored-by: Anderson Bravalheri 
---
 .github/workflows/pyright.yml             | 74 +++++++++++++++++++++++
 pkg_resources/__init__.py                 | 12 ++++
 pkg_resources/tests/test_pkg_resources.py |  6 +-
 pkg_resources/tests/test_resources.py     |  8 +--
 pyproject.toml                            |  4 ++
 pyrightconfig.json                        | 32 ++++++++++
 setuptools/__init__.py                    |  3 +-
 setuptools/_reqs.py                       |  8 +--
 setuptools/command/build.py               |  6 ++
 setuptools/command/build_clib.py          |  4 +-
 setuptools/command/easy_install.py        |  6 +-
 setuptools/command/editable_wheel.py      |  4 +-
 setuptools/config/pyprojecttoml.py        |  8 ++-
 setuptools/monkey.py                      | 31 ++++++----
 setuptools/package_index.py               |  2 +-
 setuptools/sandbox.py                     |  2 +-
 setuptools/tests/test_config_discovery.py |  6 +-
 setuptools/tests/test_setuptools.py       |  2 +-
 18 files changed, 183 insertions(+), 35 deletions(-)
 create mode 100644 .github/workflows/pyright.yml
 create mode 100644 pyrightconfig.json

diff --git a/.github/workflows/pyright.yml b/.github/workflows/pyright.yml
new file mode 100644
index 0000000000..bb25f1ba82
--- /dev/null
+++ b/.github/workflows/pyright.yml
@@ -0,0 +1,74 @@
+# Split workflow file to not interfere with skeleton
+name: pyright
+
+on:
+  merge_group:
+  push:
+    branches-ignore:
+    # temporary GH branches relating to merge queues (jaraco/skeleton#93)
+    - gh-readonly-queue/**
+    tags:
+    # required if branches-ignore is supplied (jaraco/skeleton#103)
+    - '**'
+  pull_request:
+  workflow_dispatch:
+
+concurrency:
+  group: >-
+    ${{ github.workflow }}-
+    ${{ github.ref_type }}-
+    ${{ github.event.pull_request.number || github.sha }}
+  cancel-in-progress: true
+
+env:
+  # pin pyright version so a new version doesn't suddenly cause the CI to fail,
+  # until types-setuptools is removed from typeshed.
+  # For help with static-typing issues, or pyright update, ping @Avasam
+  PYRIGHT_VERSION: "1.1.377"
+
+  # Environment variable to support color support (jaraco/skeleton#66)
+  FORCE_COLOR: 1
+
+  # Suppress noisy pip warnings
+  PIP_DISABLE_PIP_VERSION_CHECK: 'true'
+  PIP_NO_PYTHON_VERSION_WARNING: 'true'
+  PIP_NO_WARN_SCRIPT_LOCATION: 'true'
+
+jobs:
+  pyright:
+    strategy:
+      # https://blog.jaraco.com/efficient-use-of-ci-resources/
+      matrix:
+        python:
+          - "3.8"
+          - "3.12"
+        platform:
+          - ubuntu-latest
+    runs-on: ${{ matrix.platform }}
+    timeout-minutes: 10
+    steps:
+      - uses: actions/checkout@v4
+      - name: Setup Python
+        uses: actions/setup-python@v5
+        with:
+          python-version: ${{ matrix.python }}
+          allow-prereleases: true
+      - name: Install typed dependencies
+        run: python -m pip install -e .[core,type]
+      - name: Inform how to run locally
+        run: |
+          echo 'To run this test locally with npm pre-installed, run:'
+          echo '> npx -y pyright@${{ env.PYRIGHT_VERSION }} --threads'
+          echo 'You can also instead install "Pyright for Python" which will install npm for you:'
+          if [ '$PYRIGHT_VERSION' == 'latest' ]; then
+            echo '> pip install -U'
+          else
+            echo '> pip install pyright==${{ env.PYRIGHT_VERSION }}'
+          fi
+          echo 'pyright --threads'
+        shell: bash
+      - name: Run pyright
+        uses: jakebailey/pyright-action@v2
+        with:
+          version: ${{ env.PYRIGHT_VERSION }}
+          extra-args: --threads
diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index a8ca8ab818..ab46d1dc42 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -561,24 +561,30 @@ def get_entry_info(dist: _EPDistType, group: str, name: str) -> EntryPoint | Non
 class IMetadataProvider(Protocol):
     def has_metadata(self, name: str) -> bool:
         """Does the package's distribution contain the named metadata?"""
+        ...
 
     def get_metadata(self, name: str) -> str:
         """The named metadata resource as a string"""
+        ...
 
     def get_metadata_lines(self, name: str) -> Iterator[str]:
         """Yield named metadata resource as list of non-blank non-comment lines
 
         Leading and trailing whitespace is stripped from each line, and lines
         with ``#`` as the first non-blank character are omitted."""
+        ...
 
     def metadata_isdir(self, name: str) -> bool:
         """Is the named metadata a directory?  (like ``os.path.isdir()``)"""
+        ...
 
     def metadata_listdir(self, name: str) -> list[str]:
         """List of metadata names in the directory (like ``os.listdir()``)"""
+        ...
 
     def run_script(self, script_name: str, namespace: dict[str, Any]) -> None:
         """Execute the named script in the supplied namespace dictionary"""
+        ...
 
 
 class IResourceProvider(IMetadataProvider, Protocol):
@@ -590,6 +596,7 @@ def get_resource_filename(
         """Return a true filesystem path for `resource_name`
 
         `manager` must be a ``ResourceManager``"""
+        ...
 
     def get_resource_stream(
         self, manager: ResourceManager, resource_name: str
@@ -597,6 +604,7 @@ def get_resource_stream(
         """Return a readable file-like object for `resource_name`
 
         `manager` must be a ``ResourceManager``"""
+        ...
 
     def get_resource_string(
         self, manager: ResourceManager, resource_name: str
@@ -604,15 +612,19 @@ def get_resource_string(
         """Return the contents of `resource_name` as :obj:`bytes`
 
         `manager` must be a ``ResourceManager``"""
+        ...
 
     def has_resource(self, resource_name: str) -> bool:
         """Does the package contain the named resource?"""
+        ...
 
     def resource_isdir(self, resource_name: str) -> bool:
         """Is the named resource a directory?  (like ``os.path.isdir()``)"""
+        ...
 
     def resource_listdir(self, resource_name: str) -> list[str]:
         """List of resource names in the directory (like ``os.listdir()``)"""
+        ...
 
 
 class WorkingSet:
diff --git a/pkg_resources/tests/test_pkg_resources.py b/pkg_resources/tests/test_pkg_resources.py
index 023adf60b0..18adb3c9d2 100644
--- a/pkg_resources/tests/test_pkg_resources.py
+++ b/pkg_resources/tests/test_pkg_resources.py
@@ -70,7 +70,7 @@ def teardown_class(cls):
             finalizer()
 
     def test_resource_listdir(self):
-        import mod
+        import mod  # pyright: ignore[reportMissingImports] # Temporary package for test
 
         zp = pkg_resources.ZipProvider(mod)
 
@@ -84,7 +84,7 @@ def test_resource_listdir(self):
         assert zp.resource_listdir('nonexistent') == []
         assert zp.resource_listdir('nonexistent/') == []
 
-        import mod2
+        import mod2  # pyright: ignore[reportMissingImports] # Temporary package for test
 
         zp2 = pkg_resources.ZipProvider(mod2)
 
@@ -100,7 +100,7 @@ def test_resource_filename_rewrites_on_change(self):
         same size and modification time, it should not be overwritten on a
         subsequent call to get_resource_filename.
         """
-        import mod
+        import mod  # pyright: ignore[reportMissingImports] # Temporary package for test
 
         manager = pkg_resources.ResourceManager()
         zp = pkg_resources.ZipProvider(mod)
diff --git a/pkg_resources/tests/test_resources.py b/pkg_resources/tests/test_resources.py
index 3b67296952..f5e793fb90 100644
--- a/pkg_resources/tests/test_resources.py
+++ b/pkg_resources/tests/test_resources.py
@@ -817,11 +817,11 @@ def test_two_levels_deep(self, symlinked_tmpdir):
             (pkg1 / '__init__.py').write_text(self.ns_str, encoding='utf-8')
             (pkg2 / '__init__.py').write_text(self.ns_str, encoding='utf-8')
         with pytest.warns(DeprecationWarning, match="pkg_resources.declare_namespace"):
-            import pkg1
+            import pkg1  # pyright: ignore[reportMissingImports] # Temporary package for test
         assert "pkg1" in pkg_resources._namespace_packages
         # attempt to import pkg2 from site-pkgs2
         with pytest.warns(DeprecationWarning, match="pkg_resources.declare_namespace"):
-            import pkg1.pkg2
+            import pkg1.pkg2  # pyright: ignore[reportMissingImports] # Temporary package for test
         # check the _namespace_packages dict
         assert "pkg1.pkg2" in pkg_resources._namespace_packages
         assert pkg_resources._namespace_packages["pkg1"] == ["pkg1.pkg2"]
@@ -862,8 +862,8 @@ def test_path_order(self, symlinked_tmpdir):
             (subpkg / '__init__.py').write_text(vers_str % number, encoding='utf-8')
 
         with pytest.warns(DeprecationWarning, match="pkg_resources.declare_namespace"):
-            import nspkg
-            import nspkg.subpkg
+            import nspkg  # pyright: ignore[reportMissingImports] # Temporary package for test
+            import nspkg.subpkg  # pyright: ignore[reportMissingImports] # Temporary package for test
         expected = [str(site.realpath() / 'nspkg') for site in site_dirs]
         assert nspkg.__path__ == expected
         assert nspkg.subpkg.__version__ == 1
diff --git a/pyproject.toml b/pyproject.toml
index ced13342d2..fbcd4b48ef 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -131,6 +131,10 @@ type = [
 	# until types-setuptools is removed from typeshed.
 	# For help with static-typing issues, or mypy update, ping @Avasam 
 	"mypy==1.11.*",
+	# Typing fixes in version newer than we require at runtime
+	"importlib_metadata>=7.0.2; python_version < '3.10'",
+	# Imported unconditionally in tools/finalize.py
+	'jaraco.develop >= 7.21; sys_platform != "cygwin"',
 ]
 
 
diff --git a/pyrightconfig.json b/pyrightconfig.json
new file mode 100644
index 0000000000..cd04c33371
--- /dev/null
+++ b/pyrightconfig.json
@@ -0,0 +1,32 @@
+{
+	"$schema": "https://raw.githubusercontent.com/microsoft/pyright/main/packages/vscode-pyright/schemas/pyrightconfig.schema.json",
+	"exclude": [
+		"build",
+		".tox",
+		".eggs",
+		"**/_vendor", // Vendored
+		"setuptools/_distutils", // Vendored
+		"setuptools/config/_validate_pyproject/**", // Auto-generated
+	],
+	// Our testing setup doesn't allow passing CLI arguments, so local devs have to set this manually.
+	// "pythonVersion": "3.8",
+	// For now we don't mind if mypy's `type: ignore` comments accidentally suppresses pyright issues
+	"enableTypeIgnoreComments": true,
+	"typeCheckingMode": "basic",
+	// Too many issues caused by dynamic patching, still worth fixing when we can
+	"reportAttributeAccessIssue": "warning",
+	// Fails on Python 3.12 due to missing distutils and on cygwin CI tests
+	"reportAssignmentType": "warning",
+	"reportMissingImports": "warning",
+	"reportOptionalCall": "warning",
+	// FIXME: A handful of reportOperatorIssue spread throughout the codebase
+	"reportOperatorIssue": "warning",
+	// Deferred initialization (initialize_options/finalize_options) causes many "potentially None" issues
+	// TODO: Fix with type-guards or by changing how it's initialized
+	"reportArgumentType": "warning", // A lot of these are caused by jaraco.path.build's spec argument not being a Mapping https://github.com/jaraco/jaraco.path/pull/3
+	"reportCallIssue": "warning",
+	"reportGeneralTypeIssues": "warning",
+	"reportOptionalIterable": "warning",
+	"reportOptionalMemberAccess": "warning",
+	"reportOptionalOperand": "warning",
+}
diff --git a/setuptools/__init__.py b/setuptools/__init__.py
index 1c39fd9dab..ab373c51d6 100644
--- a/setuptools/__init__.py
+++ b/setuptools/__init__.py
@@ -12,6 +12,7 @@
 import re
 import sys
 from abc import abstractmethod
+from collections.abc import Mapping
 from typing import TYPE_CHECKING, TypeVar, overload
 
 sys.path.extend(((vendor_path := os.path.join(os.path.dirname(os.path.dirname(__file__)), 'setuptools', '_vendor')) not in sys.path) * [vendor_path])  # fmt: skip
@@ -59,7 +60,7 @@ class MinimalDistribution(distutils.core.Distribution):
         fetch_build_eggs interface.
         """
 
-        def __init__(self, attrs):
+        def __init__(self, attrs: Mapping[str, object]):
             _incl = 'dependency_links', 'setup_requires'
             filtered = {k: attrs[k] for k in set(_incl) & set(attrs)}
             super().__init__(filtered)
diff --git a/setuptools/_reqs.py b/setuptools/_reqs.py
index 2d09244b43..71ea23dea9 100644
--- a/setuptools/_reqs.py
+++ b/setuptools/_reqs.py
@@ -28,15 +28,13 @@ def parse_strings(strs: _StrOrIter) -> Iterator[str]:
     return text.join_continuation(map(text.drop_comment, text.yield_lines(strs)))
 
 
+# These overloads are only needed because of a mypy false-positive, pyright gets it right
+# https://github.com/python/mypy/issues/3737
 @overload
 def parse(strs: _StrOrIter) -> Iterator[Requirement]: ...
-
-
 @overload
 def parse(strs: _StrOrIter, parser: Callable[[str], _T]) -> Iterator[_T]: ...
-
-
-def parse(strs, parser=parse_req):
+def parse(strs: _StrOrIter, parser: Callable[[str], _T] = parse_req) -> Iterator[_T]:  # type: ignore[assignment]
     """
     Replacement for ``pkg_resources.parse_requirements`` that uses ``packaging``.
     """
diff --git a/setuptools/command/build.py b/setuptools/command/build.py
index 0c5e544804..f60fcbda15 100644
--- a/setuptools/command/build.py
+++ b/setuptools/command/build.py
@@ -87,12 +87,15 @@ def finalize_options(self):
 
     def initialize_options(self):
         """(Required by the original :class:`setuptools.Command` interface)"""
+        ...
 
     def finalize_options(self):
         """(Required by the original :class:`setuptools.Command` interface)"""
+        ...
 
     def run(self):
         """(Required by the original :class:`setuptools.Command` interface)"""
+        ...
 
     def get_source_files(self) -> list[str]:
         """
@@ -104,6 +107,7 @@ def get_source_files(self) -> list[str]:
         with all the files necessary to build the distribution.
         All files should be strings relative to the project root directory.
         """
+        ...
 
     def get_outputs(self) -> list[str]:
         """
@@ -117,6 +121,7 @@ def get_outputs(self) -> list[str]:
            in ``get_output_mapping()`` plus files that are generated during the build
            and don't correspond to any source file already present in the project.
         """
+        ...
 
     def get_output_mapping(self) -> dict[str, str]:
         """
@@ -127,3 +132,4 @@ def get_output_mapping(self) -> dict[str, str]:
         Destination files should be strings in the form of
         ``"{build_lib}/destination/file/path"``.
         """
+        ...
diff --git a/setuptools/command/build_clib.py b/setuptools/command/build_clib.py
index 9db57ac8a2..d532762ebe 100644
--- a/setuptools/command/build_clib.py
+++ b/setuptools/command/build_clib.py
@@ -5,7 +5,9 @@
 from distutils.errors import DistutilsSetupError
 
 try:
-    from distutils._modified import newer_pairwise_group
+    from distutils._modified import (  # pyright: ignore[reportMissingImports]
+        newer_pairwise_group,
+    )
 except ImportError:
     # fallback for SETUPTOOLS_USE_DISTUTILS=stdlib
     from .._distutils._modified import newer_pairwise_group
diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py
index 54d1e48449..46c0a231eb 100644
--- a/setuptools/command/easy_install.py
+++ b/setuptools/command/easy_install.py
@@ -1792,7 +1792,7 @@ def auto_chmod(func, arg, exc):
         return func(arg)
     et, ev, _ = sys.exc_info()
     # TODO: This code doesn't make sense. What is it trying to do?
-    raise (ev[0], ev[1] + (" %s %s" % (func, arg)))
+    raise (ev[0], ev[1] + (" %s %s" % (func, arg)))  # pyright: ignore[reportOptionalSubscript, reportIndexIssue]
 
 
 def update_dist_caches(dist_path, fix_zipimporter_caches):
@@ -2018,7 +2018,9 @@ def is_python_script(script_text, filename):
 
 
 try:
-    from os import chmod as _chmod
+    from os import (
+        chmod as _chmod,  # pyright: ignore[reportAssignmentType] # Loosing type-safety w/ pyright, but that's ok
+    )
 except ImportError:
     # Jython compatibility
     def _chmod(*args: object, **kwargs: object) -> None:  # type: ignore[misc] # Mypy re-uses the imported definition anyway
diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py
index 46852c1a94..9eaa62aba0 100644
--- a/setuptools/command/editable_wheel.py
+++ b/setuptools/command/editable_wheel.py
@@ -39,6 +39,8 @@
 from .install_scripts import install_scripts as install_scripts_cls
 
 if TYPE_CHECKING:
+    from typing_extensions import Self
+
     from .._vendor.wheel.wheelfile import WheelFile
 
 _P = TypeVar("_P", bound=StrPath)
@@ -379,7 +381,7 @@ def _select_strategy(
 class EditableStrategy(Protocol):
     def __call__(self, wheel: WheelFile, files: list[str], mapping: dict[str, str]): ...
 
-    def __enter__(self): ...
+    def __enter__(self) -> Self: ...
 
     def __exit__(self, _exc_type, _exc_value, _traceback): ...
 
diff --git a/setuptools/config/pyprojecttoml.py b/setuptools/config/pyprojecttoml.py
index 943b9f5a00..a381e38eae 100644
--- a/setuptools/config/pyprojecttoml.py
+++ b/setuptools/config/pyprojecttoml.py
@@ -303,7 +303,10 @@ def _obtain(self, dist: Distribution, field: str, package_dir: Mapping[str, str]
     def _obtain_version(self, dist: Distribution, package_dir: Mapping[str, str]):
         # Since plugins can set version, let's silently skip if it cannot be obtained
         if "version" in self.dynamic and "version" in self.dynamic_cfg:
-            return _expand.version(self._obtain(dist, "version", package_dir))
+            return _expand.version(
+                # We already do an early check for the presence of "version"
+                self._obtain(dist, "version", package_dir)  # pyright: ignore[reportArgumentType]
+            )
         return None
 
     def _obtain_readme(self, dist: Distribution) -> dict[str, str] | None:
@@ -313,9 +316,10 @@ def _obtain_readme(self, dist: Distribution) -> dict[str, str] | None:
         dynamic_cfg = self.dynamic_cfg
         if "readme" in dynamic_cfg:
             return {
+                # We already do an early check for the presence of "readme"
                 "text": self._obtain(dist, "readme", {}),
                 "content-type": dynamic_cfg["readme"].get("content-type", "text/x-rst"),
-            }
+            }  # pyright: ignore[reportReturnType]
 
         self._ensure_previously_set(dist, "readme")
         return None
diff --git a/setuptools/monkey.py b/setuptools/monkey.py
index abcc2755be..a69ccd3312 100644
--- a/setuptools/monkey.py
+++ b/setuptools/monkey.py
@@ -10,11 +10,13 @@
 import sys
 import types
 from importlib import import_module
-from typing import TypeVar
+from typing import Type, TypeVar, cast, overload
 
 import distutils.filelist
 
 _T = TypeVar("_T")
+_UnpatchT = TypeVar("_UnpatchT", type, types.FunctionType)
+
 
 __all__: list[str] = []
 """
@@ -37,25 +39,30 @@ def _get_mro(cls):
     return inspect.getmro(cls)
 
 
-def get_unpatched(item: _T) -> _T:
-    lookup = (
-        get_unpatched_class
-        if isinstance(item, type)
-        else get_unpatched_function
-        if isinstance(item, types.FunctionType)
-        else lambda item: None
-    )
-    return lookup(item)
+@overload
+def get_unpatched(item: _UnpatchT) -> _UnpatchT: ...
+@overload
+def get_unpatched(item: object) -> None: ...
+def get_unpatched(
+    item: type | types.FunctionType | object,
+) -> type | types.FunctionType | None:
+    if isinstance(item, type):
+        return get_unpatched_class(item)
+    if isinstance(item, types.FunctionType):
+        return get_unpatched_function(item)
+    return None
 
 
-def get_unpatched_class(cls):
+def get_unpatched_class(cls: type[_T]) -> type[_T]:
     """Protect against re-patching the distutils if reloaded
 
     Also ensures that no other distutils extension monkeypatched the distutils
     first.
     """
     external_bases = (
-        cls for cls in _get_mro(cls) if not cls.__module__.startswith('setuptools')
+        cast(Type[_T], cls)
+        for cls in _get_mro(cls)
+        if not cls.__module__.startswith('setuptools')
     )
     base = next(external_bases)
     if not base.__module__.startswith('distutils'):
diff --git a/setuptools/package_index.py b/setuptools/package_index.py
index 1510e01934..9e01d5e082 100644
--- a/setuptools/package_index.py
+++ b/setuptools/package_index.py
@@ -1072,7 +1072,7 @@ def open_with_auth(url, opener=urllib.request.urlopen):
     if scheme in ('http', 'https'):
         auth, address = _splituser(netloc)
     else:
-        auth = None
+        auth, address = (None, None)
 
     if not auth:
         cred = PyPIConfig().find_credential(url)
diff --git a/setuptools/sandbox.py b/setuptools/sandbox.py
index 7d545f1004..98bd26ab9e 100644
--- a/setuptools/sandbox.py
+++ b/setuptools/sandbox.py
@@ -18,7 +18,7 @@
 from distutils.errors import DistutilsError
 
 if sys.platform.startswith('java'):
-    import org.python.modules.posix.PosixModule as _os
+    import org.python.modules.posix.PosixModule as _os  # pyright: ignore[reportMissingImports]
 else:
     _os = sys.modules[os.name]
 _open = open
diff --git a/setuptools/tests/test_config_discovery.py b/setuptools/tests/test_config_discovery.py
index af172953e3..6b60c7e7f7 100644
--- a/setuptools/tests/test_config_discovery.py
+++ b/setuptools/tests/test_config_discovery.py
@@ -2,6 +2,7 @@
 import sys
 from configparser import ConfigParser
 from itertools import product
+from typing import cast
 
 import jaraco.path
 import pytest
@@ -618,7 +619,10 @@ def _get_dist(dist_path, attrs):
     script = dist_path / 'setup.py'
     if script.exists():
         with Path(dist_path):
-            dist = distutils.core.run_setup("setup.py", {}, stop_after="init")
+            dist = cast(
+                Distribution,
+                distutils.core.run_setup("setup.py", {}, stop_after="init"),
+            )
     else:
         dist = Distribution(attrs)
 
diff --git a/setuptools/tests/test_setuptools.py b/setuptools/tests/test_setuptools.py
index 6af1d98c6b..72b8ed47f1 100644
--- a/setuptools/tests/test_setuptools.py
+++ b/setuptools/tests/test_setuptools.py
@@ -53,7 +53,7 @@ def testExtractConst(self):
         def f1():
             global x, y, z
             x = "test"
-            y = z
+            y = z  # pyright: ignore[reportUnboundVariable] # Explicitly testing for this runtime issue
 
         fc = f1.__code__
 

From 2d63f5c750b26bfc65675378e2814384517f7942 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Tue, 27 Aug 2024 06:09:32 -0400
Subject: [PATCH 1060/1761] Type str/repr dunders (#4582)

Co-authored-by: Anderson Bravalheri 
---
 pkg_resources/__init__.py        | 14 +++++++-------
 setuptools/sandbox.py            |  2 +-
 setuptools/tests/test_sandbox.py |  2 +-
 setuptools/tests/test_wheel.py   |  2 +-
 4 files changed, 10 insertions(+), 10 deletions(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index ab46d1dc42..f64c0b18fe 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -301,7 +301,7 @@ def get_supported_platform():
 class ResolutionError(Exception):
     """Abstract base for dependency resolution errors"""
 
-    def __repr__(self):
+    def __repr__(self) -> str:
         return self.__class__.__name__ + repr(self.args)
 
 
@@ -377,7 +377,7 @@ def requirers_str(self):
     def report(self):
         return self._template.format(**locals())
 
-    def __str__(self):
+    def __str__(self) -> str:
         return self.report()
 
 
@@ -2725,7 +2725,7 @@ def __init__(
         self.extras = tuple(extras)
         self.dist = dist
 
-    def __str__(self):
+    def __str__(self) -> str:
         s = "%s = %s" % (self.name, self.module_name)
         if self.attrs:
             s += ':' + '.'.join(self.attrs)
@@ -2733,7 +2733,7 @@ def __str__(self):
             s += ' [%s]' % ','.join(self.extras)
         return s
 
-    def __repr__(self):
+    def __repr__(self) -> str:
         return "EntryPoint.parse(%r)" % str(self)
 
     @overload
@@ -3157,13 +3157,13 @@ def egg_name(self):
             filename += '-' + self.platform
         return filename
 
-    def __repr__(self):
+    def __repr__(self) -> str:
         if self.location:
             return "%s (%s)" % (self, self.location)
         else:
             return str(self)
 
-    def __str__(self):
+    def __str__(self) -> str:
         try:
             version = getattr(self, 'version', None)
         except ValueError:
@@ -3508,7 +3508,7 @@ def __contains__(
     def __hash__(self):
         return self.__hash
 
-    def __repr__(self):
+    def __repr__(self) -> str:
         return "Requirement.parse(%r)" % str(self)
 
     @staticmethod
diff --git a/setuptools/sandbox.py b/setuptools/sandbox.py
index 98bd26ab9e..dc24e17d7b 100644
--- a/setuptools/sandbox.py
+++ b/setuptools/sandbox.py
@@ -509,6 +509,6 @@ class SandboxViolation(DistutilsError):
         """
     ).lstrip()
 
-    def __str__(self):
+    def __str__(self) -> str:
         cmd, args, kwargs = self.args
         return self.tmpl.format(**locals())
diff --git a/setuptools/tests/test_sandbox.py b/setuptools/tests/test_sandbox.py
index 75dfcba2f1..20db6baaa6 100644
--- a/setuptools/tests/test_sandbox.py
+++ b/setuptools/tests/test_sandbox.py
@@ -75,7 +75,7 @@ def test_unpickleable_exception(self):
         class CantPickleThis(Exception):
             "This Exception is unpickleable because it's not in globals"
 
-            def __repr__(self):
+            def __repr__(self) -> str:
                 return 'CantPickleThis%r' % (self.args,)
 
         with setuptools.sandbox.ExceptionSaver() as saved_exc:
diff --git a/setuptools/tests/test_wheel.py b/setuptools/tests/test_wheel.py
index ee46f664e4..ac736e2947 100644
--- a/setuptools/tests/test_wheel.py
+++ b/setuptools/tests/test_wheel.py
@@ -175,7 +175,7 @@ def __init__(self, id, **kwargs):
         self._id = id
         self._fields = kwargs
 
-    def __repr__(self):
+    def __repr__(self) -> str:
         return '%s(**%r)' % (self._id, self._fields)
 
 

From 9b2d09cb1c3d18642576bba4092f4b67763df400 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Tue, 20 Aug 2024 16:47:04 -0400
Subject: [PATCH 1061/1761] Type all get/set dunders

---
 pkg_resources/__init__.py           | 14 +++++++++++---
 setuptools/command/build_py.py      |  2 +-
 setuptools/command/develop.py       |  2 +-
 setuptools/command/test.py          | 10 +++++++++-
 setuptools/config/expand.py         |  2 +-
 setuptools/config/setupcfg.py       |  2 +-
 setuptools/tests/test_build_meta.py |  3 ++-
 7 files changed, 26 insertions(+), 9 deletions(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index f64c0b18fe..be39007579 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -1094,7 +1094,15 @@ def _added_new(self, dist):
         for callback in self.callbacks:
             callback(dist)
 
-    def __getstate__(self):
+    def __getstate__(
+        self,
+    ) -> tuple[
+        list[str],
+        dict[str | None, list[str]],
+        dict[str, Distribution],
+        dict[str, str],
+        list[Callable[[Distribution], object]],
+    ]:
         return (
             self.entries[:],
             self.entry_keys.copy(),
@@ -1103,7 +1111,7 @@ def __getstate__(self):
             self.callbacks[:],
         )
 
-    def __setstate__(self, e_k_b_n_c):
+    def __setstate__(self, e_k_b_n_c) -> None:
         entries, keys, by_key, normalized_to_canonical_keys, callbacks = e_k_b_n_c
         self.entries = entries[:]
         self.entry_keys = keys.copy()
@@ -3171,7 +3179,7 @@ def __str__(self) -> str:
         version = version or "[unknown version]"
         return "%s %s" % (self.project_name, version)
 
-    def __getattr__(self, attr):
+    def __getattr__(self, attr: str):
         """Delegate all unrecognized public attributes to .metadata provider"""
         if attr.startswith('_'):
             raise AttributeError(attr)
diff --git a/setuptools/command/build_py.py b/setuptools/command/build_py.py
index e6d9656f10..584d2c15ac 100644
--- a/setuptools/command/build_py.py
+++ b/setuptools/command/build_py.py
@@ -81,7 +81,7 @@ def run(self):
         # output files are.
         self.byte_compile(orig.build_py.get_outputs(self, include_bytecode=False))
 
-    def __getattr__(self, attr):
+    def __getattr__(self, attr: str):
         "lazily compute data files"
         if attr == 'data_files':
             self.data_files = self._get_data_files()
diff --git a/setuptools/command/develop.py b/setuptools/command/develop.py
index 1938434b3a..4ecbd5a1e8 100644
--- a/setuptools/command/develop.py
+++ b/setuptools/command/develop.py
@@ -188,7 +188,7 @@ class VersionlessRequirement:
     def __init__(self, dist):
         self.__dist = dist
 
-    def __getattr__(self, name):
+    def __getattr__(self, name: str):
         return getattr(self.__dist, name)
 
     def as_requirement(self):
diff --git a/setuptools/command/test.py b/setuptools/command/test.py
index 38e1164c27..89886efa4b 100644
--- a/setuptools/command/test.py
+++ b/setuptools/command/test.py
@@ -1,8 +1,16 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    pass
+
 from setuptools import Command
 from setuptools.warnings import SetuptoolsDeprecationWarning
 
 
-def __getattr__(name):
+# Would restrict to Literal["test"], but mypy doesn't support it: https://github.com/python/mypy/issues/8203
+def __getattr__(name: str) -> type[_test]:
     if name == 'test':
         SetuptoolsDeprecationWarning.emit(
             "The test command is disabled and references to it are deprecated.",
diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py
index dfee1c5d37..d12cc1d8b0 100644
--- a/setuptools/config/expand.py
+++ b/setuptools/config/expand.py
@@ -61,7 +61,7 @@ def _find_assignments(self) -> Iterator[tuple[ast.AST, ast.AST]]:
             elif isinstance(statement, ast.AnnAssign) and statement.value:
                 yield (statement.target, statement.value)
 
-    def __getattr__(self, attr):
+    def __getattr__(self, attr: str):
         """Attempt to load an attribute "statically", via :func:`ast.literal_eval`."""
         try:
             return next(
diff --git a/setuptools/config/setupcfg.py b/setuptools/config/setupcfg.py
index 072b787062..54469f74a3 100644
--- a/setuptools/config/setupcfg.py
+++ b/setuptools/config/setupcfg.py
@@ -280,7 +280,7 @@ def parsers(self):
             '%s must provide .parsers property' % self.__class__.__name__
         )
 
-    def __setitem__(self, option_name, value):
+    def __setitem__(self, option_name, value) -> None:
         target_obj = self.target_obj
 
         # Translate alias into real name.
diff --git a/setuptools/tests/test_build_meta.py b/setuptools/tests/test_build_meta.py
index c34f711662..cfd4cd453a 100644
--- a/setuptools/tests/test_build_meta.py
+++ b/setuptools/tests/test_build_meta.py
@@ -8,6 +8,7 @@
 import tarfile
 from concurrent import futures
 from pathlib import Path
+from typing import Any, Callable
 from zipfile import ZipFile
 
 import pytest
@@ -44,7 +45,7 @@ def __init__(self, *args, **kwargs):
         super().__init__(*args, **kwargs)
         self.pool = futures.ProcessPoolExecutor(max_workers=1)
 
-    def __getattr__(self, name):
+    def __getattr__(self, name: str) -> Callable[..., Any]:
         """Handles arbitrary function invocations on the build backend."""
 
         def method(*args, **kw):

From 2e2e918f7a6292bba2b4b0a40ba0d87c579dc334 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 27 Aug 2024 11:11:54 +0100
Subject: [PATCH 1062/1761] Remove unnecessary conditional on TYPE_CHECKING

---
 setuptools/command/test.py | 5 -----
 1 file changed, 5 deletions(-)

diff --git a/setuptools/command/test.py b/setuptools/command/test.py
index 89886efa4b..341b11a20e 100644
--- a/setuptools/command/test.py
+++ b/setuptools/command/test.py
@@ -1,10 +1,5 @@
 from __future__ import annotations
 
-from typing import TYPE_CHECKING
-
-if TYPE_CHECKING:
-    pass
-
 from setuptools import Command
 from setuptools.warnings import SetuptoolsDeprecationWarning
 

From 2c47de2bc8a1a0a11816e34b311c856b4b1df78e Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Tue, 27 Aug 2024 06:28:02 -0400
Subject: [PATCH 1063/1761] Type all comparison/operators dunders (#4583)

---
 pkg_resources/__init__.py | 26 +++++++++++++-------------
 1 file changed, 13 insertions(+), 13 deletions(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index be39007579..4e9b83d83d 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -1312,7 +1312,7 @@ def __iter__(self) -> Iterator[str]:
             if self[key]:
                 yield key
 
-    def __iadd__(self, other: Distribution | Environment):
+    def __iadd__(self, other: Distribution | Environment) -> Self:
         """In-place addition of a distribution or environment"""
         if isinstance(other, Distribution):
             self.add(other)
@@ -1324,7 +1324,7 @@ def __iadd__(self, other: Distribution | Environment):
             raise TypeError("Can't add %r to environment" % (other,))
         return self
 
-    def __add__(self, other: Distribution | Environment):
+    def __add__(self, other: Distribution | Environment) -> Self:
         """Add an environment or distribution to an environment"""
         new = self.__class__([], platform=None, python=None)
         for env in self, other:
@@ -2371,7 +2371,7 @@ class NoDists:
     []
     """
 
-    def __bool__(self):
+    def __bool__(self) -> Literal[False]:
         return False
 
     def __call__(self, fullpath: object):
@@ -2970,28 +2970,28 @@ def hashcmp(self):
             self.platform or '',
         )
 
-    def __hash__(self):
+    def __hash__(self) -> int:
         return hash(self.hashcmp)
 
-    def __lt__(self, other: Distribution):
+    def __lt__(self, other: Distribution) -> bool:
         return self.hashcmp < other.hashcmp
 
-    def __le__(self, other: Distribution):
+    def __le__(self, other: Distribution) -> bool:
         return self.hashcmp <= other.hashcmp
 
-    def __gt__(self, other: Distribution):
+    def __gt__(self, other: Distribution) -> bool:
         return self.hashcmp > other.hashcmp
 
-    def __ge__(self, other: Distribution):
+    def __ge__(self, other: Distribution) -> bool:
         return self.hashcmp >= other.hashcmp
 
-    def __eq__(self, other: object):
+    def __eq__(self, other: object) -> bool:
         if not isinstance(other, self.__class__):
             # It's not a Distribution, so they are not equal
             return False
         return self.hashcmp == other.hashcmp
 
-    def __ne__(self, other: object):
+    def __ne__(self, other: object) -> bool:
         return not self == other
 
     # These properties have to be lazy so that we don't have to load any
@@ -3488,10 +3488,10 @@ def __init__(self, requirement_string: str) -> None:
         )
         self.__hash = hash(self.hashCmp)
 
-    def __eq__(self, other: object):
+    def __eq__(self, other: object) -> bool:
         return isinstance(other, Requirement) and self.hashCmp == other.hashCmp
 
-    def __ne__(self, other):
+    def __ne__(self, other: object) -> bool:
         return not self == other
 
     def __contains__(
@@ -3513,7 +3513,7 @@ def __contains__(
             prereleases=True,
         )
 
-    def __hash__(self):
+    def __hash__(self) -> int:
         return self.__hash
 
     def __repr__(self) -> str:

From c11cc27ac8c3e5ecab512b28b711aee94cab079b Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Tue, 27 Aug 2024 07:00:39 -0400
Subject: [PATCH 1064/1761] Type context manager dunders (#4581)

---
 _distutils_hack/__init__.py          |  4 +--
 setuptools/command/editable_wheel.py | 37 ++++++++++++++++++++++------
 setuptools/config/expand.py          | 13 +++++++---
 setuptools/config/pyprojecttoml.py   |  8 +++++-
 setuptools/sandbox.py                | 23 ++++++++++++++---
 5 files changed, 68 insertions(+), 17 deletions(-)

diff --git a/_distutils_hack/__init__.py b/_distutils_hack/__init__.py
index b05d04e98e..30ac3a7403 100644
--- a/_distutils_hack/__init__.py
+++ b/_distutils_hack/__init__.py
@@ -217,10 +217,10 @@ def add_shim():
 
 
 class shim:
-    def __enter__(self):
+    def __enter__(self) -> None:
         insert_shim()
 
-    def __exit__(self, exc, value, tb):
+    def __exit__(self, exc: object, value: object, tb: object) -> None:
         _remove_shim()
 
 
diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py
index 9eaa62aba0..4a8b3abb43 100644
--- a/setuptools/command/editable_wheel.py
+++ b/setuptools/command/editable_wheel.py
@@ -23,6 +23,7 @@
 from itertools import chain, starmap
 from pathlib import Path
 from tempfile import TemporaryDirectory
+from types import TracebackType
 from typing import TYPE_CHECKING, Iterable, Iterator, Mapping, Protocol, TypeVar, cast
 
 from .. import Command, _normalization, _path, errors, namespaces
@@ -383,7 +384,13 @@ def __call__(self, wheel: WheelFile, files: list[str], mapping: dict[str, str]):
 
     def __enter__(self) -> Self: ...
 
-    def __exit__(self, _exc_type, _exc_value, _traceback): ...
+    def __exit__(
+        self,
+        exc_type: type[BaseException] | None,
+        exc_value: BaseException | None,
+        traceback: TracebackType | None,
+        /,
+    ) -> object: ...
 
 
 class _StaticPth:
@@ -397,7 +404,7 @@ def __call__(self, wheel: WheelFile, files: list[str], mapping: dict[str, str]):
         contents = _encode_pth(f"{entries}\n")
         wheel.writestr(f"__editable__.{self.name}.pth", contents)
 
-    def __enter__(self):
+    def __enter__(self) -> Self:
         msg = f"""
         Editable install will be performed using .pth file to extend `sys.path` with:
         {list(map(os.fspath, self.path_entries))!r}
@@ -405,7 +412,13 @@ def __enter__(self):
         _logger.warning(msg + _LENIENT_WARNING)
         return self
 
-    def __exit__(self, _exc_type, _exc_value, _traceback): ...
+    def __exit__(
+        self,
+        _exc_type: object,
+        _exc_value: object,
+        _traceback: object,
+    ) -> None:
+        pass
 
 
 class _LinkTree(_StaticPth):
@@ -463,12 +476,17 @@ def _create_links(self, outputs, output_mapping):
         for relative, src in mappings.items():
             self._create_file(relative, src, link=link_type)
 
-    def __enter__(self):
+    def __enter__(self) -> Self:
         msg = "Strict editable install will be performed using a link tree.\n"
         _logger.warning(msg + _STRICT_WARNING)
         return self
 
-    def __exit__(self, _exc_type, _exc_value, _traceback):
+    def __exit__(
+        self,
+        _exc_type: object,
+        _exc_value: object,
+        _traceback: object,
+    ) -> None:
         msg = f"""\n
         Strict editable installation performed using the auxiliary directory:
             {self.auxiliary_dir}
@@ -524,12 +542,17 @@ def __call__(self, wheel: WheelFile, files: list[str], mapping: dict[str, str]):
         for file, content in self.get_implementation():
             wheel.writestr(file, content)
 
-    def __enter__(self):
+    def __enter__(self) -> Self:
         msg = "Editable install will be performed using a meta path finder.\n"
         _logger.warning(msg + _LENIENT_WARNING)
         return self
 
-    def __exit__(self, _exc_type, _exc_value, _traceback):
+    def __exit__(
+        self,
+        _exc_type: object,
+        _exc_value: object,
+        _traceback: object,
+    ) -> None:
         msg = """\n
         Please be careful with folders in your working directory with the same
         name as your package as they may take precedence during imports.
diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py
index d12cc1d8b0..e11bcf9b42 100644
--- a/setuptools/config/expand.py
+++ b/setuptools/config/expand.py
@@ -30,7 +30,7 @@
 from importlib.machinery import ModuleSpec, all_suffixes
 from itertools import chain
 from pathlib import Path
-from types import ModuleType
+from types import ModuleType, TracebackType
 from typing import TYPE_CHECKING, Any, Callable, Iterable, Iterator, Mapping, TypeVar
 
 from .._path import StrPath, same_path as _same_path
@@ -40,6 +40,8 @@
 from distutils.errors import DistutilsOptionError
 
 if TYPE_CHECKING:
+    from typing_extensions import Self
+
     from setuptools.dist import Distribution
 
 _K = TypeVar("_K")
@@ -385,10 +387,15 @@ def __call__(self):
             self._called = True
             self._dist.set_defaults(name=False)  # Skip name, we can still be parsing
 
-    def __enter__(self):
+    def __enter__(self) -> Self:
         return self
 
-    def __exit__(self, _exc_type, _exc_value, _traceback):
+    def __exit__(
+        self,
+        exc_type: type[BaseException] | None,
+        exc_value: BaseException | None,
+        traceback: TracebackType | None,
+    ) -> None:
         if self._called:
             self._dist.set_defaults.analyse_name()  # Now we can set a default name
 
diff --git a/setuptools/config/pyprojecttoml.py b/setuptools/config/pyprojecttoml.py
index a381e38eae..3449a4bfb7 100644
--- a/setuptools/config/pyprojecttoml.py
+++ b/setuptools/config/pyprojecttoml.py
@@ -15,6 +15,7 @@
 import os
 from contextlib import contextmanager
 from functools import partial
+from types import TracebackType
 from typing import TYPE_CHECKING, Any, Callable, Mapping
 
 from .._path import StrPath
@@ -434,7 +435,12 @@ def __enter__(self) -> Self:
 
         return super().__enter__()
 
-    def __exit__(self, exc_type, exc_value, traceback):
+    def __exit__(
+        self,
+        exc_type: type[BaseException] | None,
+        exc_value: BaseException | None,
+        traceback: TracebackType | None,
+    ) -> None:
         """When exiting the context, if values of ``packages``, ``py_modules`` and
         ``package_dir`` are missing in ``setuptools_cfg``, copy from ``dist``.
         """
diff --git a/setuptools/sandbox.py b/setuptools/sandbox.py
index dc24e17d7b..9c2c78a32c 100644
--- a/setuptools/sandbox.py
+++ b/setuptools/sandbox.py
@@ -11,6 +11,8 @@
 import sys
 import tempfile
 import textwrap
+from types import TracebackType
+from typing import TYPE_CHECKING
 
 import pkg_resources
 from pkg_resources import working_set
@@ -24,6 +26,9 @@
 _open = open
 
 
+if TYPE_CHECKING:
+    from typing_extensions import Self
+
 __all__ = [
     "AbstractSandbox",
     "DirectorySandbox",
@@ -118,10 +123,15 @@ class ExceptionSaver:
     later.
     """
 
-    def __enter__(self):
+    def __enter__(self) -> Self:
         return self
 
-    def __exit__(self, type, exc, tb):
+    def __exit__(
+        self,
+        type: type[BaseException] | None,
+        exc: BaseException | None,
+        tb: TracebackType | None,
+    ) -> bool:
         if not exc:
             return False
 
@@ -278,12 +288,17 @@ def _copy(self, source):
         for name in self._attrs:
             setattr(os, name, getattr(source, name))
 
-    def __enter__(self):
+    def __enter__(self) -> None:
         self._copy(self)
         builtins.open = self._open
         self._active = True
 
-    def __exit__(self, exc_type, exc_value, traceback):
+    def __exit__(
+        self,
+        exc_type: object,
+        exc_value: object,
+        traceback: object,
+    ) -> None:
         self._active = False
         builtins.open = _open
         self._copy(_os)

From 63d55b7d27314b5638175e1583e800c29ce5b41e Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Tue, 27 Aug 2024 07:05:47 -0400
Subject: [PATCH 1065/1761] Link to issues in mypy.ini for non py.typed
 dependencies (#4561)

---
 mypy.ini | 24 ++++++++++++++++--------
 1 file changed, 16 insertions(+), 8 deletions(-)

diff --git a/mypy.ini b/mypy.ini
index 43bb9d56c9..fe6b23844a 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -8,13 +8,16 @@ warn_redundant_casts = True
 # required to support namespace packages: https://github.com/python/mypy/issues/14057
 explicit_package_bases = True
 exclude = (?x)(
+	# Avoid scanning Python files in generated folders
 	^build/
 	| ^.tox/
 	| ^.eggs/
-	| ^pkg_resources/tests/data/my-test-package-source/setup.py$ # Duplicate module name
-	| ^setuptools/_vendor/ # Vendored
-	| ^setuptools/_distutils/ # Vendored
-	| ^setuptools/config/_validate_pyproject/ # Auto-generated
+	| ^setuptools/config/_validate_pyproject/
+	# These are vendored
+	| ^setuptools/_vendor/
+	| ^setuptools/_distutils/
+	# Duplicate module name
+	| ^pkg_resources/tests/data/my-test-package-source/setup.py$
 	)
 # Too many false-positives
 disable_error_code = overload-overlap
@@ -35,10 +38,15 @@ disable_error_code = import-not-found
 #     - support for `SETUPTOOLS_USE_DISTUTILS=stdlib` is dropped (#3625)
 #       for setuptools to import `_distutils` directly
 #     - or non-stdlib distutils typings are exposed
-# - All jaraco modules are still untyped
-# - _validate_project sometimes complains about trove_classifiers (#4296)
-# - wheel appears to be untyped
-[mypy-distutils.*,jaraco.*,trove_classifiers,wheel.*]
+# - The following are not marked as py.typed:
+#  - jaraco.develop: https://github.com/jaraco/jaraco.develop/issues/22
+#  - jaraco.envs: https://github.com/jaraco/jaraco.envs/issues/7
+#  - jaraco.packaging: https://github.com/jaraco/jaraco.packaging/issues/20
+#  - jaraco.path: https://github.com/jaraco/jaraco.path/issues/2
+#  - jaraco.test: https://github.com/jaraco/jaraco.test/issues/7
+#  - jaraco.text: https://github.com/jaraco/jaraco.text/issues/17
+#  - wheel: does not intend on exposing a programatic API https://github.com/pypa/wheel/pull/610#issuecomment-2081687671
+[mypy-distutils.*,jaraco.develop,jaraco.envs,jaraco.packaging.*,jaraco.path,jaraco.test.*,jaraco.text,wheel.*]
 ignore_missing_imports = True
 
 # Even when excluding a module, import issues can show up due to following import

From f6e6f864dcbd51e197d5486c63fd9e169d046e43 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 27 Aug 2024 15:03:24 +0100
Subject: [PATCH 1066/1761] Test setuptools own sdist does not include tox
 files

---
 setuptools/tests/test_sdist.py | 9 +++++++++
 1 file changed, 9 insertions(+)

diff --git a/setuptools/tests/test_sdist.py b/setuptools/tests/test_sdist.py
index 2df202fd18..7fc9dae872 100644
--- a/setuptools/tests/test_sdist.py
+++ b/setuptools/tests/test_sdist.py
@@ -907,3 +907,12 @@ def run_sdist(monkeypatch, project):
     archive = next((project / "dist").glob("*.tar.gz"))
     with tarfile.open(str(archive)) as tar:
         return set(tar.getnames())
+
+
+def test_sanity_check_setuptools_own_sdist(setuptools_sdist):
+    with tarfile.open(setuptools_sdist) as tar:
+        files = tar.getnames()
+
+    # setuptools sdist should not include the .tox folder
+    tox_files = [name for name in files if ".tox" in name]
+    assert len(tox_files) == 0

From cdb58b466a96b66620de656c7343b3344bf580a1 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 27 Aug 2024 15:03:42 +0100
Subject: [PATCH 1067/1761] Prune .tox directory

---
 MANIFEST.in | 1 +
 1 file changed, 1 insertion(+)

diff --git a/MANIFEST.in b/MANIFEST.in
index 0643e7ee2d..cb6ce2300e 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -19,3 +19,4 @@ include tox.ini
 include setuptools/tests/config/setupcfg_examples.txt
 include setuptools/config/*.schema.json
 global-exclude *.py[cod] __pycache__
+prune .tox

From 196d44b8a1bd626764e9679c1b4b77ccc1d7ed3f Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 27 Aug 2024 11:31:46 -0400
Subject: [PATCH 1068/1761] Apply downstream VS 2017 support.

Ref pypa/setuptools#2663 and pypa/setuptools#3950 and pypa/setuptools#4600
---
 distutils/_msvccompiler.py | 56 ++++++++++++++++++++++----------------
 1 file changed, 32 insertions(+), 24 deletions(-)

diff --git a/distutils/_msvccompiler.py b/distutils/_msvccompiler.py
index b0322410c5..14efed8321 100644
--- a/distutils/_msvccompiler.py
+++ b/distutils/_msvccompiler.py
@@ -79,32 +79,40 @@ def _find_vc2017():
     if not root:
         return None, None
 
-    try:
-        path = subprocess.check_output(
-            [
-                os.path.join(
-                    root, "Microsoft Visual Studio", "Installer", "vswhere.exe"
-                ),
-                "-latest",
-                "-prerelease",
-                "-requires",
-                "Microsoft.VisualStudio.Component.VC.Tools.x86.x64",
-                "-property",
-                "installationPath",
-                "-products",
-                "*",
-            ],
-            encoding="mbcs",
-            errors="strict",
-        ).strip()
-    except (subprocess.CalledProcessError, OSError, UnicodeDecodeError):
-        return None, None
+    variant = 'arm64' if get_platform() == 'win-arm64' else 'x86.x64'
+    suitable_components = (
+        f"Microsoft.VisualStudio.Component.VC.Tools.{variant}",
+        "Microsoft.VisualStudio.Workload.WDExpress",
+    )
+
+    for component in suitable_components:
+        # Workaround for `-requiresAny` (only available on VS 2017 > 15.6)
+        with contextlib.suppress(
+            subprocess.CalledProcessError, OSError, UnicodeDecodeError
+        ):
+            path = (
+                subprocess.check_output([
+                    os.path.join(
+                        root, "Microsoft Visual Studio", "Installer", "vswhere.exe"
+                    ),
+                    "-latest",
+                    "-prerelease",
+                    "-requires",
+                    component,
+                    "-property",
+                    "installationPath",
+                    "-products",
+                    "*",
+                ])
+                .decode(encoding="mbcs", errors="strict")
+                .strip()
+            )
 
-    path = os.path.join(path, "VC", "Auxiliary", "Build")
-    if os.path.isdir(path):
-        return 15, path
+            path = os.path.join(path, "VC", "Auxiliary", "Build")
+            if os.path.isdir(path):
+                return 15, path
 
-    return None, None
+    return None, None  # no suitable component found
 
 
 PLAT_SPEC_TO_RUNTIME = {

From f3f2ca5cc5dfb6d00020bff0455ad1a7a294da3d Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 27 Aug 2024 16:31:23 +0100
Subject: [PATCH 1069/1761] Remove custom manifest_maker.prune_file_list and
 rely on default implementation in sdist

It seems that `manifest_maker.prune_file_list` is basically
re-implementing the same exclusion patters as already implemented by
`distutils.command.sdist`.
So we should be able to remove this duplication.
---
 setuptools/command/egg_info.py | 10 ----------
 1 file changed, 10 deletions(-)

diff --git a/setuptools/command/egg_info.py b/setuptools/command/egg_info.py
index 09255a3240..280eb5e807 100644
--- a/setuptools/command/egg_info.py
+++ b/setuptools/command/egg_info.py
@@ -606,16 +606,6 @@ def _add_referenced_files(self):
             log.debug("adding file referenced by config '%s'", rf)
         self.filelist.extend(referenced)
 
-    def prune_file_list(self):
-        build = self.get_finalized_command('build')
-        base_dir = self.distribution.get_fullname()
-        self.filelist.prune(build.build_base)
-        self.filelist.prune(base_dir)
-        sep = re.escape(os.sep)
-        self.filelist.exclude_pattern(
-            r'(^|' + sep + r')(RCS|CVS|\.svn)' + sep, is_regex=True
-        )
-
     def _safe_data_files(self, build_py):
         """
         The parent class implementation of this method

From 16c6f7231c70d1689e5167bb8541029008a85f76 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 27 Aug 2024 15:19:07 +0100
Subject: [PATCH 1070/1761] Automatically exclude top-level .tox|.nox|.ven from
 sdist

---
 MANIFEST.in                    |  1 -
 setuptools/command/sdist.py    |  7 +++++++
 setuptools/tests/test_sdist.py | 36 +++++++++++++++++++++++++++++++++-
 3 files changed, 42 insertions(+), 2 deletions(-)

diff --git a/MANIFEST.in b/MANIFEST.in
index cb6ce2300e..0643e7ee2d 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -19,4 +19,3 @@ include tox.ini
 include setuptools/tests/config/setupcfg_examples.txt
 include setuptools/config/*.schema.json
 global-exclude *.py[cod] __pycache__
-prune .tox
diff --git a/setuptools/command/sdist.py b/setuptools/command/sdist.py
index 68afab89b4..fa9a2c4d81 100644
--- a/setuptools/command/sdist.py
+++ b/setuptools/command/sdist.py
@@ -2,6 +2,7 @@
 
 import contextlib
 import os
+import re
 from itertools import chain
 
 from .._importlib import metadata
@@ -156,6 +157,12 @@ def _add_defaults_data_files(self):
         except TypeError:
             log.warn("data_files contains unexpected objects")
 
+    def prune_file_list(self):
+        super().prune_file_list()
+        # Prevent accidental inclusion of test-related cache dirs at the project root
+        sep = re.escape(os.sep)
+        self.filelist.exclude_pattern(r"^(\.tox|\.nox|\.venv)" + sep, is_regex=True)
+
     def check_readme(self):
         for f in self.READMES:
             if os.path.exists(f):
diff --git a/setuptools/tests/test_sdist.py b/setuptools/tests/test_sdist.py
index 7fc9dae872..6df5c8f0f8 100644
--- a/setuptools/tests/test_sdist.py
+++ b/setuptools/tests/test_sdist.py
@@ -9,6 +9,7 @@
 import tempfile
 import unicodedata
 from inspect import cleandoc
+from pathlib import Path
 from unittest import mock
 
 import jaraco.path
@@ -425,6 +426,39 @@ def test_defaults_case_sensitivity(self, source_dir):
         assert 'setup.py' not in manifest, manifest
         assert 'setup.cfg' not in manifest, manifest
 
+    def test_exclude_dev_only_cache_folders(self, source_dir):
+        included = {
+            # Emulate problem in https://github.com/pypa/setuptools/issues/4601
+            "MANIFEST.in": "global-include LICEN[CS]E* COPYING* NOTICE* AUTHORS*",
+            # For the sake of being conservative and limiting unforeseen side-effects
+            # we just exclude dev-only cache folders at the root of the repository
+            "test/.venv/lib/python3.9/site-packages/bar-2.dist-info/AUTHORS.rst": "",
+            "src/.nox/py/lib/python3.12/site-packages/bar-2.dist-info/COPYING.txt": "",
+            "doc/.tox/default/lib/python3.11/site-packages/foo-4.dist-info/LICENSE": "",
+        }
+
+        excluded = {
+            # .tox/.nox/.venv are well-know folders present at the root of Python repos
+            # and therefore should be excluded
+            ".tox/release/lib/python3.11/site-packages/foo-4.dist-info/LICENSE": "",
+            ".nox/py/lib/python3.12/site-packages/bar-2.dist-info/COPYING.txt": "",
+            ".venv/lib/python3.9/site-packages/bar-2.dist-info/AUTHORS.rst": "",
+        }
+
+        for file, content in {**excluded, **included}.items():
+            Path(source_dir, file).parent.mkdir(parents=True, exist_ok=True)
+            Path(source_dir, file).write_text(content, encoding="utf-8")
+
+        cmd = self.setup_with_extension()
+        self.assert_package_data_in_manifest(cmd)
+        manifest = cmd.filelist.files
+        for path in excluded:
+            assert os.path.exists(path)
+            assert path not in manifest
+        for path in included:
+            assert os.path.exists(path)
+            assert path in manifest
+
     @fail_on_ascii
     def test_manifest_is_written_with_utf8_encoding(self):
         # Test for #303.
@@ -915,4 +949,4 @@ def test_sanity_check_setuptools_own_sdist(setuptools_sdist):
 
     # setuptools sdist should not include the .tox folder
     tox_files = [name for name in files if ".tox" in name]
-    assert len(tox_files) == 0
+    assert len(tox_files) == 0, f"not empty {tox_files}"

From 5f1160f4a0f3fababf5222edbf536f1690d94ed1 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 27 Aug 2024 11:43:34 -0400
Subject: [PATCH 1071/1761] Apply error message from downstream.

Ref pypa/setuptools#4600 and pypa/setuptools#2334
---
 distutils/_msvccompiler.py | 6 +++++-
 1 file changed, 5 insertions(+), 1 deletion(-)

diff --git a/distutils/_msvccompiler.py b/distutils/_msvccompiler.py
index 14efed8321..425bd72bb5 100644
--- a/distutils/_msvccompiler.py
+++ b/distutils/_msvccompiler.py
@@ -148,7 +148,11 @@ def _get_vc_env(plat_spec):
 
     vcvarsall, _ = _find_vcvarsall(plat_spec)
     if not vcvarsall:
-        raise DistutilsPlatformError("Unable to find vcvarsall.bat")
+        raise DistutilsPlatformError(
+            'Microsoft Visual C++ 14.0 or greater is required. '
+            'Get it with "Microsoft C++ Build Tools": '
+            'https://visualstudio.microsoft.com/visual-cpp-build-tools/'
+        )
 
     try:
         out = subprocess.check_output(

From 9b774633ceb5082c3483e629f76cdc0b3cc5e237 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 27 Aug 2024 17:00:55 +0100
Subject: [PATCH 1072/1761] Test against false positve matches

---
 setuptools/tests/test_sdist.py | 15 +++++++++++----
 1 file changed, 11 insertions(+), 4 deletions(-)

diff --git a/setuptools/tests/test_sdist.py b/setuptools/tests/test_sdist.py
index 6df5c8f0f8..5beb110016 100644
--- a/setuptools/tests/test_sdist.py
+++ b/setuptools/tests/test_sdist.py
@@ -429,12 +429,19 @@ def test_defaults_case_sensitivity(self, source_dir):
     def test_exclude_dev_only_cache_folders(self, source_dir):
         included = {
             # Emulate problem in https://github.com/pypa/setuptools/issues/4601
-            "MANIFEST.in": "global-include LICEN[CS]E* COPYING* NOTICE* AUTHORS*",
+            "MANIFEST.in": (
+                "global-include LICEN[CS]E* COPYING* NOTICE* AUTHORS*\n"
+                "global-include *.txt\n"
+            ),
             # For the sake of being conservative and limiting unforeseen side-effects
-            # we just exclude dev-only cache folders at the root of the repository
+            # we just exclude dev-only cache folders at the root of the repository:
             "test/.venv/lib/python3.9/site-packages/bar-2.dist-info/AUTHORS.rst": "",
             "src/.nox/py/lib/python3.12/site-packages/bar-2.dist-info/COPYING.txt": "",
             "doc/.tox/default/lib/python3.11/site-packages/foo-4.dist-info/LICENSE": "",
+            # Let's test against false positives with similarly named files:
+            ".venv-requirements.txt": "",
+            ".tox-coveragerc.txt": "",
+            ".noxy/coveragerc.txt": "",
         }
 
         excluded = {
@@ -454,10 +461,10 @@ def test_exclude_dev_only_cache_folders(self, source_dir):
         manifest = cmd.filelist.files
         for path in excluded:
             assert os.path.exists(path)
-            assert path not in manifest
+            assert path not in manifest, (path, manifest)
         for path in included:
             assert os.path.exists(path)
-            assert path in manifest
+            assert path in manifest, (path, manifest)
 
     @fail_on_ascii
     def test_manifest_is_written_with_utf8_encoding(self):

From 67e854fecc49089300ee196bee3f23227747f0c3 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 27 Aug 2024 17:35:09 +0100
Subject: [PATCH 1073/1761] Account for windows path separators in tests

---
 setuptools/tests/test_sdist.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setuptools/tests/test_sdist.py b/setuptools/tests/test_sdist.py
index 5beb110016..f628f3eb4a 100644
--- a/setuptools/tests/test_sdist.py
+++ b/setuptools/tests/test_sdist.py
@@ -458,7 +458,7 @@ def test_exclude_dev_only_cache_folders(self, source_dir):
 
         cmd = self.setup_with_extension()
         self.assert_package_data_in_manifest(cmd)
-        manifest = cmd.filelist.files
+        manifest = {f.replace(os.sep, '/') for f in cmd.filelist.files}
         for path in excluded:
             assert os.path.exists(path)
             assert path not in manifest, (path, manifest)

From 30311bd00df732291879488ff92f2b586122e8a8 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 27 Aug 2024 12:37:55 -0400
Subject: [PATCH 1074/1761] Add news fragment.

---
 newsfragments/4606.removal.rst | 1 +
 1 file changed, 1 insertion(+)
 create mode 100644 newsfragments/4606.removal.rst

diff --git a/newsfragments/4606.removal.rst b/newsfragments/4606.removal.rst
new file mode 100644
index 0000000000..682bc193e6
--- /dev/null
+++ b/newsfragments/4606.removal.rst
@@ -0,0 +1 @@
+Synced with pypa/distutils@58fe058e4, including consolidating Visual Studio 2017 support (#4600, pypa/distutils#289), removal of deprecated legacy MSVC compiler modules (pypa/distutils#287), suppressing of errors when the home directory is missing (pypa/distutils#278), removal of wininst binaries (pypa/distutils#282).

From ef2957a952244e4dfd179d29e1eaaa2cd83a1e26 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Tue, 27 Aug 2024 12:50:35 -0400
Subject: [PATCH 1075/1761] Reraise sensible errors from auto_chmod

---
 newsfragments/4593.feature.rst     |  1 +
 setuptools/command/easy_install.py | 13 ++++++++-----
 2 files changed, 9 insertions(+), 5 deletions(-)
 create mode 100644 newsfragments/4593.feature.rst

diff --git a/newsfragments/4593.feature.rst b/newsfragments/4593.feature.rst
new file mode 100644
index 0000000000..4e3e4ed66b
--- /dev/null
+++ b/newsfragments/4593.feature.rst
@@ -0,0 +1 @@
+Reraise error from ``setuptools.command.easy_install.auto_chmod`` instead of nonsensical ``TypeError: 'Exception' object is not subscriptable`` -- by :user:`Avasam`
diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py
index 46c0a231eb..bdf9fa242b 100644
--- a/setuptools/command/easy_install.py
+++ b/setuptools/command/easy_install.py
@@ -34,7 +34,7 @@
 from collections.abc import Iterable
 from glob import glob
 from sysconfig import get_path
-from typing import TYPE_CHECKING
+from typing import TYPE_CHECKING, Callable, TypeVar
 
 from jaraco.text import yield_lines
 
@@ -89,6 +89,8 @@
     'get_exe_prefixes',
 ]
 
+_T = TypeVar("_T")
+
 
 def is_64bit():
     return struct.calcsize("P") == 8
@@ -1786,13 +1788,14 @@ def _first_line_re():
     return re.compile(first_line_re.pattern.decode())
 
 
-def auto_chmod(func, arg, exc):
+# Must match shutil._OnExcCallback
+def auto_chmod(func: Callable[..., _T], arg: str, exc: BaseException) -> _T:
+    """shutils onexc callback to automatically call chmod for certain functions."""
+    # Only retry for scenarios known to have an issue
     if func in [os.unlink, os.remove] and os.name == 'nt':
         chmod(arg, stat.S_IWRITE)
         return func(arg)
-    et, ev, _ = sys.exc_info()
-    # TODO: This code doesn't make sense. What is it trying to do?
-    raise (ev[0], ev[1] + (" %s %s" % (func, arg)))  # pyright: ignore[reportOptionalSubscript, reportIndexIssue]
+    raise exc
 
 
 def update_dist_caches(dist_path, fix_zipimporter_caches):

From 8ec5b5aeef7a6cd80ca8c3291ad66acc3986069b Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 27 Aug 2024 18:03:22 +0100
Subject: [PATCH 1076/1761] Add missing news fragment for PR 4603

---
 newsfragments/4603.feature.rst | 1 +
 1 file changed, 1 insertion(+)
 create mode 100644 newsfragments/4603.feature.rst

diff --git a/newsfragments/4603.feature.rst b/newsfragments/4603.feature.rst
new file mode 100644
index 0000000000..45dde0b5ad
--- /dev/null
+++ b/newsfragments/4603.feature.rst
@@ -0,0 +1 @@
+Automatically exclude ``.tox|.nox|.venv`` directories from ``sdist``.

From 18a44d8f5660df9e23ee823a073b2d3238bc8293 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 27 Aug 2024 15:00:13 -0400
Subject: [PATCH 1077/1761] Add news fragment.

---
 newsfragments/4600.removal.rst | 1 +
 1 file changed, 1 insertion(+)
 create mode 100644 newsfragments/4600.removal.rst

diff --git a/newsfragments/4600.removal.rst b/newsfragments/4600.removal.rst
new file mode 100644
index 0000000000..9164225381
--- /dev/null
+++ b/newsfragments/4600.removal.rst
@@ -0,0 +1 @@
+Removed the monkeypatching of distutils._msvccompiler. Now all compiler logic is consolidated in distutils.
\ No newline at end of file

From 98ad794354efecf4ed1f629d4e5f02feae00d2ae Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 27 Aug 2024 15:01:54 -0400
Subject: [PATCH 1078/1761] =?UTF-8?q?Bump=20version:=2073.0.1=20=E2=86=92?=
 =?UTF-8?q?=2074.0.0?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg               |  2 +-
 NEWS.rst                       | 27 +++++++++++++++++++++++++++
 newsfragments/4548.feature.rst |  1 -
 newsfragments/4553.feature.rst |  1 -
 newsfragments/4585.feature.rst |  1 -
 newsfragments/4592.misc.rst    |  1 -
 newsfragments/4593.feature.rst |  1 -
 newsfragments/4598.feature.rst |  1 -
 newsfragments/4600.removal.rst |  1 -
 newsfragments/4603.feature.rst |  1 -
 newsfragments/4606.removal.rst |  1 -
 pyproject.toml                 |  2 +-
 12 files changed, 29 insertions(+), 11 deletions(-)
 delete mode 100644 newsfragments/4548.feature.rst
 delete mode 100644 newsfragments/4553.feature.rst
 delete mode 100644 newsfragments/4585.feature.rst
 delete mode 100644 newsfragments/4592.misc.rst
 delete mode 100644 newsfragments/4593.feature.rst
 delete mode 100644 newsfragments/4598.feature.rst
 delete mode 100644 newsfragments/4600.removal.rst
 delete mode 100644 newsfragments/4603.feature.rst
 delete mode 100644 newsfragments/4606.removal.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 51d22156e4..4377121819 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 73.0.1
+current_version = 74.0.0
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index 3c36fbfa5a..6bb9eeaf57 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,30 @@
+v74.0.0
+=======
+
+Features
+--------
+
+- Changed the type of error raised by ``setuptools.command.easy_install.CommandSpec.from_param`` on unsupported argument from `AttributeError` to `TypeError` -- by :user:`Avasam` (#4548)
+- Added detection of ARM64 variant of MSVC -- by :user:`saschanaz` (#4553)
+- Made ``setuptools.package_index.Credential`` a `typing.NamedTuple` -- by :user:`Avasam` (#4585)
+- Reraise error from ``setuptools.command.easy_install.auto_chmod`` instead of nonsensical ``TypeError: 'Exception' object is not subscriptable`` -- by :user:`Avasam` (#4593)
+- Fully typed all collection attributes in ``pkg_resources`` -- by :user:`Avasam` (#4598)
+- Automatically exclude ``.tox|.nox|.venv`` directories from ``sdist``. (#4603)
+
+
+Deprecations and Removals
+-------------------------
+
+- Removed the monkeypatching of distutils._msvccompiler. Now all compiler logic is consolidated in distutils. (#4600)
+- Synced with pypa/distutils@58fe058e4, including consolidating Visual Studio 2017 support (#4600, pypa/distutils#289), removal of deprecated legacy MSVC compiler modules (pypa/distutils#287), suppressing of errors when the home directory is missing (pypa/distutils#278), removal of wininst binaries (pypa/distutils#282). (#4606)
+
+
+Misc
+----
+
+- #4592
+
+
 v73.0.1
 =======
 
diff --git a/newsfragments/4548.feature.rst b/newsfragments/4548.feature.rst
deleted file mode 100644
index 7fcf5f4377..0000000000
--- a/newsfragments/4548.feature.rst
+++ /dev/null
@@ -1 +0,0 @@
-Changed the type of error raised by ``setuptools.command.easy_install.CommandSpec.from_param`` on unsupported argument from `AttributeError` to `TypeError` -- by :user:`Avasam`
diff --git a/newsfragments/4553.feature.rst b/newsfragments/4553.feature.rst
deleted file mode 100644
index 43ea1eeac9..0000000000
--- a/newsfragments/4553.feature.rst
+++ /dev/null
@@ -1 +0,0 @@
-Added detection of ARM64 variant of MSVC -- by :user:`saschanaz`
diff --git a/newsfragments/4585.feature.rst b/newsfragments/4585.feature.rst
deleted file mode 100644
index 566bca75f8..0000000000
--- a/newsfragments/4585.feature.rst
+++ /dev/null
@@ -1 +0,0 @@
-Made ``setuptools.package_index.Credential`` a `typing.NamedTuple` -- by :user:`Avasam`
diff --git a/newsfragments/4592.misc.rst b/newsfragments/4592.misc.rst
deleted file mode 100644
index 79d36a9d82..0000000000
--- a/newsfragments/4592.misc.rst
+++ /dev/null
@@ -1 +0,0 @@
-If somehow the ``EXT_SUFFIX`` configuration variable and ``SETUPTOOLS_EXT_SUFFIX`` environment variables are both missing, ``setuptools.command.build_ext.pyget_ext_filename`` will now raise an `OSError` instead of a `TypeError` -- by :user:`Avasam`
diff --git a/newsfragments/4593.feature.rst b/newsfragments/4593.feature.rst
deleted file mode 100644
index 4e3e4ed66b..0000000000
--- a/newsfragments/4593.feature.rst
+++ /dev/null
@@ -1 +0,0 @@
-Reraise error from ``setuptools.command.easy_install.auto_chmod`` instead of nonsensical ``TypeError: 'Exception' object is not subscriptable`` -- by :user:`Avasam`
diff --git a/newsfragments/4598.feature.rst b/newsfragments/4598.feature.rst
deleted file mode 100644
index ee2ea40dfe..0000000000
--- a/newsfragments/4598.feature.rst
+++ /dev/null
@@ -1 +0,0 @@
-Fully typed all collection attributes in ``pkg_resources`` -- by :user:`Avasam`
diff --git a/newsfragments/4600.removal.rst b/newsfragments/4600.removal.rst
deleted file mode 100644
index 9164225381..0000000000
--- a/newsfragments/4600.removal.rst
+++ /dev/null
@@ -1 +0,0 @@
-Removed the monkeypatching of distutils._msvccompiler. Now all compiler logic is consolidated in distutils.
\ No newline at end of file
diff --git a/newsfragments/4603.feature.rst b/newsfragments/4603.feature.rst
deleted file mode 100644
index 45dde0b5ad..0000000000
--- a/newsfragments/4603.feature.rst
+++ /dev/null
@@ -1 +0,0 @@
-Automatically exclude ``.tox|.nox|.venv`` directories from ``sdist``.
diff --git a/newsfragments/4606.removal.rst b/newsfragments/4606.removal.rst
deleted file mode 100644
index 682bc193e6..0000000000
--- a/newsfragments/4606.removal.rst
+++ /dev/null
@@ -1 +0,0 @@
-Synced with pypa/distutils@58fe058e4, including consolidating Visual Studio 2017 support (#4600, pypa/distutils#289), removal of deprecated legacy MSVC compiler modules (pypa/distutils#287), suppressing of errors when the home directory is missing (pypa/distutils#278), removal of wininst binaries (pypa/distutils#282).
diff --git a/pyproject.toml b/pyproject.toml
index fbcd4b48ef..cee3b55fde 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "73.0.1"
+version = "74.0.0"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From b3a71640fa5fb95e095cd5ec056f355791fe9a59 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 16 Apr 2024 13:41:26 +0100
Subject: [PATCH 1079/1761] Add .pth encoding to compat.py312

---
 setuptools/compat/py312.py | 11 +++++++++++
 1 file changed, 11 insertions(+)
 create mode 100644 setuptools/compat/py312.py

diff --git a/setuptools/compat/py312.py b/setuptools/compat/py312.py
new file mode 100644
index 0000000000..71f372adc4
--- /dev/null
+++ b/setuptools/compat/py312.py
@@ -0,0 +1,11 @@
+import sys
+
+if sys.version_info >= (3, 13):
+    # Python 3.13 should support `.pth` files encoded in UTF-8
+    # See discussion in https://github.com/python/cpython/issues/77102
+    PTH_ENCODING = "utf-8"
+else:
+    from .py39 import LOCALE_ENCODING
+
+    # PTH_ENCODING = "locale"
+    PTH_ENCODING = LOCALE_ENCODING

From 01bc93bb817d7bfc4608f2398c7858a300034715 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 16 Apr 2024 13:42:33 +0100
Subject: [PATCH 1080/1761] Use compat.py312.PTH_ENCODING instead of locale

---
 setuptools/command/easy_install.py   | 48 ++++++++++++++++++++--------
 setuptools/command/editable_wheel.py |  9 ++++--
 setuptools/namespaces.py             |  7 ++--
 3 files changed, 44 insertions(+), 20 deletions(-)

diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py
index 63bff97fc1..3f7fc17a88 100644
--- a/setuptools/command/easy_install.py
+++ b/setuptools/command/easy_install.py
@@ -63,7 +63,7 @@
 from setuptools.wheel import Wheel
 
 from .._path import ensure_directory
-from ..compat import py39, py311
+from ..compat import py39, py311, py312
 
 from distutils import dir_util, log
 from distutils.command import install
@@ -590,8 +590,9 @@ def check_pth_processing(self):  # noqa: C901
                 os.unlink(ok_file)
             dirname = os.path.dirname(ok_file)
             os.makedirs(dirname, exist_ok=True)
-            f = open(pth_file, 'w', encoding=py39.LOCALE_ENCODING)
-            # ^-- Requires encoding="locale" instead of "utf-8" (python/cpython#77102).
+            f = open(pth_file, 'w', encoding=py312.PTH_ENCODING)
+            # ^-- Python<3.13 require encoding="locale" instead of "utf-8",
+            #     see python/cpython#77102.
         except OSError:
             self.cant_write_to_target()
         else:
@@ -1282,8 +1283,9 @@ def update_pth(self, dist):  # noqa: C901  # is too complex (11)  # FIXME
         if os.path.islink(filename):
             os.unlink(filename)
 
-        with open(filename, 'wt', encoding=py39.LOCALE_ENCODING) as f:
-            # Requires encoding="locale" instead of "utf-8" (python/cpython#77102).
+        with open(filename, 'wt', encoding=py312.PTH_ENCODING) as f:
+            # ^-- Python<3.13 require encoding="locale" instead of "utf-8",
+            #     see python/cpython#77102.
             f.write(self.pth_file.make_relative(dist.location) + '\n')
 
     def unpack_progress(self, src, dst):
@@ -1509,9 +1511,8 @@ def expand_paths(inputs):  # noqa: C901  # is too complex (11)  # FIXME
                 continue
 
             # Read the .pth file
-            with open(os.path.join(dirname, name), encoding=py39.LOCALE_ENCODING) as f:
-                # Requires encoding="locale" instead of "utf-8" (python/cpython#77102).
-                lines = list(yield_lines(f))
+            content = _read_pth(os.path.join(dirname, name))
+            lines = list(yield_lines(content))
 
             # Yield existing non-dupe, non-import directory lines from it
             for line in lines:
@@ -1625,9 +1626,8 @@ def _load_raw(self):
         paths = []
         dirty = saw_import = False
         seen = set(self.sitedirs)
-        f = open(self.filename, 'rt', encoding=py39.LOCALE_ENCODING)
-        # ^-- Requires encoding="locale" instead of "utf-8" (python/cpython#77102).
-        for line in f:
+        content = _read_pth(self.filename)
+        for line in content.splitlines():
             path = line.rstrip()
             # still keep imports and empty/commented lines for formatting
             paths.append(path)
@@ -1646,7 +1646,6 @@ def _load_raw(self):
                 paths.pop()
                 continue
             seen.add(normalized_path)
-        f.close()
         # remove any trailing empty/blank line
         while paths and not paths[-1].strip():
             paths.pop()
@@ -1697,8 +1696,9 @@ def save(self):
             data = '\n'.join(lines) + '\n'
             if os.path.islink(self.filename):
                 os.unlink(self.filename)
-            with open(self.filename, 'wt', encoding=py39.LOCALE_ENCODING) as f:
-                # Requires encoding="locale" instead of "utf-8" (python/cpython#77102).
+            with open(self.filename, 'wt', encoding=py312.PTH_ENCODING) as f:
+                # ^-- Python<3.13 require encoding="locale" instead of "utf-8",
+                #     see python/cpython#77102.
                 f.write(data)
         elif os.path.exists(self.filename):
             log.debug("Deleting empty %s", self.filename)
@@ -2357,6 +2357,26 @@ def only_strs(values):
     return filter(lambda val: isinstance(val, str), values)
 
 
+def _read_pth(fullname: str) -> str:
+    # Python<3.13 require encoding="locale" instead of "utf-8", see python/cpython#77102
+    # In the case old versions of setuptools are producing `pth` files with
+    # different encodings that might be problematic... So we fallback to "locale".
+
+    try:
+        with open(fullname, encoding=py312.PTH_ENCODING) as f:
+            return f.read()
+    except UnicodeDecodeError:  # pragma: no cover
+        # This error may only happen for Python >= 3.13
+        # TODO: Possible deprecation warnings to be added in the future:
+        #       ``.pth file {fullname!r} is not UTF-8.``
+        #       Your environment contain {fullname!r} that cannot be read as UTF-8.
+        #       This is likely to have been produced with an old version of setuptools.
+        #       Please be mindful that this is deprecated and in the future, non-utf8
+        #       .pth files may cause setuptools to fail.
+        with open(fullname, encoding=py39.LOCALE_ENCODING) as f:
+            return f.read()
+
+
 class EasyInstallDeprecationWarning(SetuptoolsDeprecationWarning):
     _SUMMARY = "easy_install command is deprecated."
     _DETAILS = """
diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py
index 4a8b3abb43..2b21eacbad 100644
--- a/setuptools/command/editable_wheel.py
+++ b/setuptools/command/editable_wheel.py
@@ -28,7 +28,7 @@
 
 from .. import Command, _normalization, _path, errors, namespaces
 from .._path import StrPath
-from ..compat import py39
+from ..compat import py312
 from ..discovery import find_package_path
 from ..dist import Distribution
 from ..warnings import InformationOnly, SetuptoolsDeprecationWarning, SetuptoolsWarning
@@ -561,7 +561,9 @@ def __exit__(
 
 
 def _encode_pth(content: str) -> bytes:
-    """.pth files are always read with 'locale' encoding, the recommendation
+    """
+    Prior to Python 3.13 (see https://github.com/python/cpython/issues/77102),
+    .pth files are always read with 'locale' encoding, the recommendation
     from the cpython core developers is to write them as ``open(path, "w")``
     and ignore warnings (see python/cpython#77102, pypa/setuptools#3937).
     This function tries to simulate this behaviour without having to create an
@@ -571,7 +573,8 @@ def _encode_pth(content: str) -> bytes:
     or ``locale.getencoding()``).
     """
     with io.BytesIO() as buffer:
-        wrapper = io.TextIOWrapper(buffer, encoding=py39.LOCALE_ENCODING)
+        wrapper = io.TextIOWrapper(buffer, encoding=py312.PTH_ENCODING)
+        # TODO: Python 3.13 replace the whole function with `bytes(content, "utf-8")`
         wrapper.write(content)
         wrapper.flush()
         buffer.seek(0)
diff --git a/setuptools/namespaces.py b/setuptools/namespaces.py
index e82439e3ef..299fdd9479 100644
--- a/setuptools/namespaces.py
+++ b/setuptools/namespaces.py
@@ -1,7 +1,7 @@
 import itertools
 import os
 
-from .compat import py39
+from .compat import py312
 
 from distutils import log
 
@@ -25,8 +25,9 @@ def install_namespaces(self):
             list(lines)
             return
 
-        with open(filename, 'wt', encoding=py39.LOCALE_ENCODING) as f:
-            # Requires encoding="locale" instead of "utf-8" (python/cpython#77102).
+        with open(filename, 'wt', encoding=py312.PTH_ENCODING) as f:
+            # Python<3.13 requires encoding="locale" instead of "utf-8"
+            # See: python/cpython#77102
             f.writelines(lines)
 
     def uninstall_namespaces(self):

From f14509d2ef9998f860564c6a2b0eef7882777c7c Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Fri, 2 Aug 2024 11:59:06 +0100
Subject: [PATCH 1081/1761] Update setuptools/compat/py312.py

Co-authored-by: Avasam 
---
 setuptools/compat/py312.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setuptools/compat/py312.py b/setuptools/compat/py312.py
index 71f372adc4..71bd441353 100644
--- a/setuptools/compat/py312.py
+++ b/setuptools/compat/py312.py
@@ -1,6 +1,6 @@
 import sys
 
-if sys.version_info >= (3, 13):
+if sys.version_info >= (3, 12, 4):
     # Python 3.13 should support `.pth` files encoded in UTF-8
     # See discussion in https://github.com/python/cpython/issues/77102
     PTH_ENCODING = "utf-8"

From 353d0aabca8f655ee369ba41d9072bc0e9292a37 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 12 Aug 2024 17:26:21 +0100
Subject: [PATCH 1082/1761] Fix type check

---
 setuptools/compat/py312.py | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)

diff --git a/setuptools/compat/py312.py b/setuptools/compat/py312.py
index 71bd441353..b20c5f697a 100644
--- a/setuptools/compat/py312.py
+++ b/setuptools/compat/py312.py
@@ -1,9 +1,11 @@
+from __future__ import annotations
+
 import sys
 
 if sys.version_info >= (3, 12, 4):
     # Python 3.13 should support `.pth` files encoded in UTF-8
     # See discussion in https://github.com/python/cpython/issues/77102
-    PTH_ENCODING = "utf-8"
+    PTH_ENCODING: str | None = "utf-8"
 else:
     from .py39 import LOCALE_ENCODING
 

From 7ffe2d5fc2bce4bbf9a3c75b6bb091805f33ab55 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 28 Aug 2024 10:52:37 +0100
Subject: [PATCH 1083/1761] Document convention on compat modules

---
 docs/development/developer-guide.rst | 24 ++++++++++++++++++++++++
 1 file changed, 24 insertions(+)

diff --git a/docs/development/developer-guide.rst b/docs/development/developer-guide.rst
index 4566fef8d0..00cd3ff2e2 100644
--- a/docs/development/developer-guide.rst
+++ b/docs/development/developer-guide.rst
@@ -132,6 +132,30 @@ To refresh the dependencies, run the following command::
 
     $ tox -e vendor
 
+
+---------------------
+Compatibility Modules
+---------------------
+
+Setuptools organises its own internal polyfills, backports, version conditional
+imports and workarounds into a series of separated modules under the
+``setuptools.compat`` package. These modules provide compatibility layers or
+shims that ensure code runs smoothly across different Python versions.
+
+These modules are named to reflect the version of Python that requires the
+legacy behavior. For example, the module ``setuptools.compat.py310`` supports
+compatibility with Python 3.10 and earlier.
+This naming convention is beneficial because it signals when the code
+can be removed. When support for Python 3.10 is dropped (i.e., when
+``requires-python = ">=3.11"`` is added to ``pyproject.toml``),
+imports of the module ``py310`` will be easily identifiable as removable debt.
+
+Please note that these modules are implementation details of ``setuptools`` and
+not part of the public API. The naming convention is documented above as a
+guide for ``setuptools`` developers. There is no guarantee regarding the
+stability of these modules and they should not be used in 3rd-party projects.
+
+
 ----------------
 Type annotations
 ----------------

From 3c5731f5836cb42848e0871712b6eaa70d8fda38 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 28 Aug 2024 18:03:51 +0100
Subject: [PATCH 1084/1761] Fix outdated docstring in _msvccompiler

---
 distutils/_msvccompiler.py | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)

diff --git a/distutils/_msvccompiler.py b/distutils/_msvccompiler.py
index 7400fbaa2b..6a89282562 100644
--- a/distutils/_msvccompiler.py
+++ b/distutils/_msvccompiler.py
@@ -3,8 +3,7 @@
 Contains MSVCCompiler, an implementation of the abstract CCompiler class
 for Microsoft Visual Studio 2015.
 
-The module is compatible with VS 2015 and later. You can find legacy support
-for older versions in distutils.msvc9compiler and distutils.msvccompiler.
+Please ensure that you are using VS 2015 or later.
 """
 
 # Written by Perry Stoll

From ff1b10c2b655559b2fd229d6068d615907a7f9a5 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 29 Aug 2024 16:02:39 +0100
Subject: [PATCH 1085/1761] Document setuptools uses skeleton and distutils

---
 docs/development/developer-guide.rst | 41 ++++++++++++++--------------
 1 file changed, 20 insertions(+), 21 deletions(-)

diff --git a/docs/development/developer-guide.rst b/docs/development/developer-guide.rst
index 00cd3ff2e2..a9b3ad3bab 100644
--- a/docs/development/developer-guide.rst
+++ b/docs/development/developer-guide.rst
@@ -133,27 +133,26 @@ To refresh the dependencies, run the following command::
     $ tox -e vendor
 
 
----------------------
-Compatibility Modules
----------------------
-
-Setuptools organises its own internal polyfills, backports, version conditional
-imports and workarounds into a series of separated modules under the
-``setuptools.compat`` package. These modules provide compatibility layers or
-shims that ensure code runs smoothly across different Python versions.
-
-These modules are named to reflect the version of Python that requires the
-legacy behavior. For example, the module ``setuptools.compat.py310`` supports
-compatibility with Python 3.10 and earlier.
-This naming convention is beneficial because it signals when the code
-can be removed. When support for Python 3.10 is dropped (i.e., when
-``requires-python = ">=3.11"`` is added to ``pyproject.toml``),
-imports of the module ``py310`` will be easily identifiable as removable debt.
-
-Please note that these modules are implementation details of ``setuptools`` and
-not part of the public API. The naming convention is documented above as a
-guide for ``setuptools`` developers. There is no guarantee regarding the
-stability of these modules and they should not be used in 3rd-party projects.
+------------------------------------
+Code conventions and other practices
+------------------------------------
+
+Setuptools utilizes the `skeleton `_
+framework as a foundation for sharing re-usable maintenance tasks
+across different projects in the ecosystem.
+
+This also means that the project adheres to the same coding conventions
+and other practices described in the `skeleton documentation
+`_.
+
+Moreover, changes in the code base should be kept as compatible as possible
+to ``skeleton`` to avoid merge conflicts, or accidental regressions on
+periodical merges.
+
+Finally, the ``setuptools/_distutils`` directory should not be modified
+directly when contributing to the ``setuptools`` project.
+Instead, this directory is maintained as a separated project in
+https://github.com/pypa/distutils, and periodically merged into ``setuptools``.
 
 
 ----------------

From 0c326f3f77b2420163f73d97f8fbd090fa49147d Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 29 Aug 2024 13:13:06 -0400
Subject: [PATCH 1086/1761] Add a degenerate nitpick_ignore for downstream
 consumers. Add a 'local' comment to delineate where the skeleton ends and the
 downstream begins.

---
 docs/conf.py | 6 ++++++
 1 file changed, 6 insertions(+)

diff --git a/docs/conf.py b/docs/conf.py
index 32150488de..3d956a8c16 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -1,3 +1,6 @@
+from __future__ import annotations
+
+
 extensions = [
     'sphinx.ext.autodoc',
     'jaraco.packaging.sphinx',
@@ -30,6 +33,7 @@
 
 # Be strict about any broken references
 nitpicky = True
+nitpick_ignore: list[tuple[str, str]] = []
 
 # Include Python intersphinx mapping to prevent failures
 # jaraco/skeleton#51
@@ -40,3 +44,5 @@
 
 # Preserve authored syntax for defaults
 autodoc_preserve_defaults = True
+
+# local

From 2beb8b0c9d0f7046370e7c58c4e6baaf35154a16 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 29 Aug 2024 16:26:28 -0400
Subject: [PATCH 1087/1761] Add support for linking usernames.

Closes jaraco/skeleton#144
---
 docs/conf.py | 9 +++++++++
 1 file changed, 9 insertions(+)

diff --git a/docs/conf.py b/docs/conf.py
index 3d956a8c16..d5745d6298 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -45,4 +45,13 @@
 # Preserve authored syntax for defaults
 autodoc_preserve_defaults = True
 
+# Add support for linking usernames, PyPI projects, Wikipedia pages
+github_url = 'https://github.com/'
+extlinks = {
+    'user': (f'{github_url}%s', '@%s'),
+    'pypi': ('https://pypi.org/project/%s', '%s'),
+    'wiki': ('https://wikipedia.org/wiki/%s', '%s'),
+}
+extensions += ['sphinx.ext.extlinks']
+
 # local

From 790fa6e6feb9a93d39135494819b12e9df8a7bba Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 29 Aug 2024 16:53:52 -0400
Subject: [PATCH 1088/1761] Include the trailing slash in
 disable_error_code(overload-overlap), also required for clean diffs.

Ref jaraco/skeleton#142
---
 mypy.ini | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/mypy.ini b/mypy.ini
index 2806c330e7..efcb8cbc20 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -12,4 +12,4 @@ explicit_package_bases = True
 
 disable_error_code =
 	# Disable due to many false positives
-	overload-overlap
+	overload-overlap,

From 8eafcbc79c2968c76a3e6ee0b41baf7f41c098bc Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Fri, 30 Aug 2024 10:02:15 -0400
Subject: [PATCH 1089/1761] Run Ruff (fix CI)

---
 docs/conf.py | 1 -
 1 file changed, 1 deletion(-)

diff --git a/docs/conf.py b/docs/conf.py
index 5b1d6cbb10..4ea38e7490 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -1,6 +1,5 @@
 from __future__ import annotations
 
-
 extensions = [
     'sphinx.ext.autodoc',
     'jaraco.packaging.sphinx',

From 9b23cc8cddc45adcfd33500e9a69f5229a2ecaa0 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Miro=20Hron=C4=8Dok?= 
Date: Thu, 29 Aug 2024 11:45:17 +0200
Subject: [PATCH 1090/1761] Delete unused file

This file was used only in .travis.yml.
---
 tools/ppc64le-patch.py | 28 ----------------------------
 1 file changed, 28 deletions(-)
 delete mode 100644 tools/ppc64le-patch.py

diff --git a/tools/ppc64le-patch.py b/tools/ppc64le-patch.py
deleted file mode 100644
index a0b04ce502..0000000000
--- a/tools/ppc64le-patch.py
+++ /dev/null
@@ -1,28 +0,0 @@
-"""
-Except on bionic, Travis Linux base image for PPC64LE
-platform lacks the proper
-permissions to the directory ~/.cache/pip/wheels that allow
-the user running travis build to install pip packages.
-TODO: is someone tracking this issue? Maybe just move to bionic?
-"""
-
-import collections
-import os
-import subprocess
-
-
-def patch():
-    env = collections.defaultdict(str, os.environ)
-    if env['TRAVIS_CPU_ARCH'] != 'ppc64le':
-        return
-    cmd = [
-        'sudo',
-        'chown',
-        '-Rfv',
-        '{USER}:{GROUP}'.format_map(env),
-        os.path.expanduser('~/.cache/pip/wheels'),
-    ]
-    subprocess.Popen(cmd)
-
-
-__name__ == '__main__' and patch()

From df12ccbcd90c5ea054aebcdf957939898244098f Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Fri, 30 Aug 2024 10:39:05 -0400
Subject: [PATCH 1091/1761] Update pyright configs comments

---
 .github/workflows/pyright.yml | 12 +++++++-----
 pyrightconfig.json            | 16 ++++++++--------
 2 files changed, 15 insertions(+), 13 deletions(-)

diff --git a/.github/workflows/pyright.yml b/.github/workflows/pyright.yml
index bb25f1ba82..38fb910d85 100644
--- a/.github/workflows/pyright.yml
+++ b/.github/workflows/pyright.yml
@@ -9,7 +9,7 @@ on:
     - gh-readonly-queue/**
     tags:
     # required if branches-ignore is supplied (jaraco/skeleton#103)
-    - '**'
+    - "**"
   pull_request:
   workflow_dispatch:
 
@@ -24,15 +24,17 @@ env:
   # pin pyright version so a new version doesn't suddenly cause the CI to fail,
   # until types-setuptools is removed from typeshed.
   # For help with static-typing issues, or pyright update, ping @Avasam
+  #
+  # An exact version from https://github.com/microsoft/pyright/releases or "latest"
   PYRIGHT_VERSION: "1.1.377"
 
   # Environment variable to support color support (jaraco/skeleton#66)
   FORCE_COLOR: 1
 
   # Suppress noisy pip warnings
-  PIP_DISABLE_PIP_VERSION_CHECK: 'true'
-  PIP_NO_PYTHON_VERSION_WARNING: 'true'
-  PIP_NO_WARN_SCRIPT_LOCATION: 'true'
+  PIP_DISABLE_PIP_VERSION_CHECK: "true"
+  PIP_NO_PYTHON_VERSION_WARNING: "true"
+  PIP_NO_WARN_SCRIPT_LOCATION: "true"
 
 jobs:
   pyright:
@@ -65,7 +67,7 @@ jobs:
           else
             echo '> pip install pyright==${{ env.PYRIGHT_VERSION }}'
           fi
-          echo 'pyright --threads'
+          echo '> pyright --threads'
         shell: bash
       - name: Run pyright
         uses: jakebailey/pyright-action@v2
diff --git a/pyrightconfig.json b/pyrightconfig.json
index cd04c33371..27a329e169 100644
--- a/pyrightconfig.json
+++ b/pyrightconfig.json
@@ -1,12 +1,14 @@
 {
 	"$schema": "https://raw.githubusercontent.com/microsoft/pyright/main/packages/vscode-pyright/schemas/pyrightconfig.schema.json",
 	"exclude": [
+		// Avoid scanning Python files in generated folders
 		"build",
 		".tox",
 		".eggs",
-		"**/_vendor", // Vendored
-		"setuptools/_distutils", // Vendored
-		"setuptools/config/_validate_pyproject/**", // Auto-generated
+		"setuptools/config/_validate_pyproject/**",
+		// These are vendored
+		"**/_vendor",
+		"setuptools/_distutils",
 	],
 	// Our testing setup doesn't allow passing CLI arguments, so local devs have to set this manually.
 	// "pythonVersion": "3.8",
@@ -15,15 +17,13 @@
 	"typeCheckingMode": "basic",
 	// Too many issues caused by dynamic patching, still worth fixing when we can
 	"reportAttributeAccessIssue": "warning",
-	// Fails on Python 3.12 due to missing distutils and on cygwin CI tests
-	"reportAssignmentType": "warning",
+	// Fails on Python 3.12 due to missing distutils
 	"reportMissingImports": "warning",
-	"reportOptionalCall": "warning",
 	// FIXME: A handful of reportOperatorIssue spread throughout the codebase
 	"reportOperatorIssue": "warning",
 	// Deferred initialization (initialize_options/finalize_options) causes many "potentially None" issues
-	// TODO: Fix with type-guards or by changing how it's initialized
-	"reportArgumentType": "warning", // A lot of these are caused by jaraco.path.build's spec argument not being a Mapping https://github.com/jaraco/jaraco.path/pull/3
+	// TODO: Fix with type-guards, by changing how it's initialized, or by casting initial assignments
+	"reportArgumentType": "warning",
 	"reportCallIssue": "warning",
 	"reportGeneralTypeIssues": "warning",
 	"reportOptionalIterable": "warning",

From 1a6e38c0bfccd18a01deaca1491bcde3e778404c Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 31 Aug 2024 05:50:38 -0400
Subject: [PATCH 1092/1761] Remove workaround for sphinx-contrib/sphinx-lint#83

---
 tox.ini | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)

diff --git a/tox.ini b/tox.ini
index 01f0975f5c..1424305140 100644
--- a/tox.ini
+++ b/tox.ini
@@ -31,9 +31,7 @@ extras =
 changedir = docs
 commands =
 	python -m sphinx -W --keep-going . {toxinidir}/build/html
-	python -m sphinxlint \
-		# workaround for sphinx-contrib/sphinx-lint#83
-		--jobs 1
+	python -m sphinxlint
 
 [testenv:finalize]
 description = assemble changelog and tag a release

From 9d05e00f4592f591fd57ea9bfa278950c6b60bf5 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 2 Sep 2024 08:46:56 -0400
Subject: [PATCH 1093/1761] Import packaging naturally.

---
 distutils/dist.py | 9 ++-------
 1 file changed, 2 insertions(+), 7 deletions(-)

diff --git a/distutils/dist.py b/distutils/dist.py
index 6191ae78ab..154301baff 100644
--- a/distutils/dist.py
+++ b/distutils/dist.py
@@ -14,6 +14,8 @@
 from collections.abc import Iterable
 from email import message_from_file
 
+from packaging.utils import canonicalize_name, canonicalize_version
+
 from ._log import log
 from .debug import DEBUG
 from .errors import (
@@ -25,13 +27,6 @@
 from .fancy_getopt import FancyGetopt, translate_longopt
 from .util import check_environ, rfc822_escape, strtobool
 
-try:
-    from packaging.utils import canonicalize_name, canonicalize_version
-except ImportError as ex:  # pragma: no cover
-    raise ImportError(
-        "Distutils should be distributed alongside setuptools and its dependencies"
-    ) from ex
-
 # Regex to define acceptable Distutils command names.  This is not *quite*
 # the same as a Python NAME -- I don't allow leading underscores.  The fact
 # that they're very similar is no coincidence; the default naming scheme is

From 247a1c15b0b19057906d9715ce05f8a569c77ea8 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 2 Sep 2024 08:51:22 -0400
Subject: [PATCH 1094/1761] Updated comment and indented the dependency.

---
 pyproject.toml | 5 ++++-
 1 file changed, 4 insertions(+), 1 deletion(-)

diff --git a/pyproject.toml b/pyproject.toml
index c9b358b544..068902265f 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -17,7 +17,10 @@ classifiers = [
 	"Programming Language :: Python :: 3 :: Only",
 ]
 requires-python = ">=3.8"
-dependencies = ["packaging"]  # setuptools dependency
+dependencies = [
+	# Setuptools must require these
+	"packaging",
+]
 dynamic = ["version"]
 
 [project.urls]

From 7ee6a6e42ee6c0fcb1f02adb081c89501c7bed42 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 2 Sep 2024 08:57:06 -0400
Subject: [PATCH 1095/1761] =?UTF-8?q?=F0=9F=91=B9=20Feed=20the=20hobgoblin?=
 =?UTF-8?q?s=20(delint).?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 docs/conf.py | 1 -
 1 file changed, 1 deletion(-)

diff --git a/docs/conf.py b/docs/conf.py
index 9460b53eda..6c2c7f538f 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -1,6 +1,5 @@
 from __future__ import annotations
 
-
 extensions = [
     'sphinx.ext.autodoc',
     'jaraco.packaging.sphinx',

From de03b24ba3a522a990dfda352d2d5d8e1fa34650 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 2 Sep 2024 09:06:20 -0400
Subject: [PATCH 1096/1761] Prefer imperative, third-person.

---
 distutils/_msvccompiler.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/distutils/_msvccompiler.py b/distutils/_msvccompiler.py
index 6a89282562..0c7aa7af78 100644
--- a/distutils/_msvccompiler.py
+++ b/distutils/_msvccompiler.py
@@ -3,7 +3,7 @@
 Contains MSVCCompiler, an implementation of the abstract CCompiler class
 for Microsoft Visual Studio 2015.
 
-Please ensure that you are using VS 2015 or later.
+This module requires VS 2015 or later.
 """
 
 # Written by Perry Stoll

From 8170b7aa74f5f69a20a52b862f61c62cd1c1b249 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 2 Sep 2024 09:16:46 -0400
Subject: [PATCH 1097/1761] Add news fragment.

---
 newsfragments/4622.bugfix.rst | 1 +
 1 file changed, 1 insertion(+)
 create mode 100644 newsfragments/4622.bugfix.rst

diff --git a/newsfragments/4622.bugfix.rst b/newsfragments/4622.bugfix.rst
new file mode 100644
index 0000000000..0128edbcd5
--- /dev/null
+++ b/newsfragments/4622.bugfix.rst
@@ -0,0 +1 @@
+Merge with pypa/distutils@3dcdf8567, removing the duplicate vendored copy of packaging.
\ No newline at end of file

From 60b037ea820383cfb46c539cae85aa55cbeac247 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 14 Aug 2024 14:39:25 +0100
Subject: [PATCH 1098/1761] Add unit test for ext-mopdules in pyproject.toml

---
 .../tests/config/test_apply_pyprojecttoml.py  | 20 +++++++++++++++++++
 1 file changed, 20 insertions(+)

diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py
index 78959b6454..e86bae4589 100644
--- a/setuptools/tests/config/test_apply_pyprojecttoml.py
+++ b/setuptools/tests/config/test_apply_pyprojecttoml.py
@@ -324,6 +324,26 @@ def test_invalid_module_name(self, tmp_path, monkeypatch, module):
             self.dist(module).py_modules
 
 
+class TestExtModules:
+    def test_pyproject_sets_attribute(self, tmp_path, monkeypatch):
+        monkeypatch.chdir(tmp_path)
+        pyproject = Path("pyproject.toml")
+        toml_config = """
+        [project]
+        name = "test"
+        version = "42.0"
+        [tool.setuptools]
+        ext-modules = [
+          {name = "my.ext", sources = ["hello.c", "world.c"]}
+        ]
+        """
+        pyproject.write_text(cleandoc(toml_config), encoding="utf-8")
+        dist = pyprojecttoml.apply_configuration(Distribution({}), pyproject)
+        assert len(dist.ext_modules) == 1
+        assert dist.ext_modules[0].name == "my.ext"
+        assert set(dist.ext_modules[0].sources) == {"hello.c", "world.c"}
+
+
 class TestDeprecatedFields:
     def test_namespace_packages(self, tmp_path):
         pyproject = tmp_path / "pyproject.toml"

From 933ab6d9ddba10b84466627fde34a2a52cb18b67 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 14 Aug 2024 14:10:08 +0100
Subject: [PATCH 1099/1761] Update JSON schema to allow ext-modules property

---
 setuptools/config/setuptools.schema.json | 81 ++++++++++++++++++++++++
 1 file changed, 81 insertions(+)

diff --git a/setuptools/config/setuptools.schema.json b/setuptools/config/setuptools.schema.json
index 50ee6217ee..ec887b3573 100644
--- a/setuptools/config/setuptools.schema.json
+++ b/setuptools/config/setuptools.schema.json
@@ -158,6 +158,11 @@
       "items": {"type": "string", "format": "python-module-name-relaxed"},
       "$comment": "TODO: clarify the relationship with ``packages``"
     },
+    "ext-modules": {
+      "description": "Extension modules to be compiled by setuptools",
+      "type": "array",
+      "items": {"$ref": "#/definitions/ext-module"}
+    },
     "data-files": {
       "$$description": [
         "``dict``-like structure where each key represents a directory and",
@@ -254,6 +259,82 @@
         {"type": "string", "format": "pep561-stub-name"}
       ]
     },
+    "ext-module": {
+      "$id": "#/definitions/ext-module",
+      "title": "Extension module",
+      "description": "Parameters to construct a :class:`setuptools.Extension` object",
+      "type": "object",
+      "required": ["name", "sources"],
+      "additionalProperties": false,
+      "properties": {
+        "name": {
+          "type": "string",
+          "format": "python-module-name-relaxed"
+        },
+        "sources": {
+          "type": "array",
+          "items": {"type": "string"}
+        },
+        "include-dirs":{
+          "type": "array",
+          "items": {"type": "string"}
+        },
+        "define-macros": {
+          "type": "array",
+          "items": {
+            "type": "array",
+            "items": [
+              {"description": "macro name", "type": "string"},
+              {"description": "macro value", "oneOf": [{"type": "string"}, {"type": "null"}]}
+            ],
+            "additionalItems": false
+          }
+        },
+        "undef-macros": {
+          "type": "array",
+          "items": {"type": "string"}
+        },
+        "library-dirs": {
+          "type": "array",
+          "items": {"type": "string"}
+        },
+        "libraries": {
+          "type": "array",
+          "items": {"type": "string"}
+        },
+        "runtime-library-dirs": {
+          "type": "array",
+          "items": {"type": "string"}
+        },
+        "extra-objects": {
+          "type": "array",
+          "items": {"type": "string"}
+        },
+        "extra-compile-args": {
+          "type": "array",
+          "items": {"type": "string"}
+        },
+        "extra-link-args": {
+          "type": "array",
+          "items": {"type": "string"}
+        },
+        "export-symbols": {
+          "type": "array",
+          "items": {"type": "string"}
+        },
+        "swig-opts": {
+          "type": "array",
+          "items": {"type": "string"}
+        },
+        "depends": {
+          "type": "array",
+          "items": {"type": "string"}
+        },
+        "language": {"type": "string"},
+        "optional": {"type": "boolean"},
+        "py-limited-api": {"type": "boolean"}
+      }
+    },
     "file-directive": {
       "$id": "#/definitions/file-directive",
       "title": "'file:' directive",

From b1b9f0f6df8135829d79b729378c0cd9786be03f Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 14 Aug 2024 14:17:08 +0100
Subject: [PATCH 1100/1761] Update generated validation code for pyproject.toml

---
 .../fastjsonschema_validations.py             | 277 ++++++++++++++++--
 1 file changed, 246 insertions(+), 31 deletions(-)

diff --git a/setuptools/config/_validate_pyproject/fastjsonschema_validations.py b/setuptools/config/_validate_pyproject/fastjsonschema_validations.py
index 7e403af4b7..42e7aa5e33 100644
--- a/setuptools/config/_validate_pyproject/fastjsonschema_validations.py
+++ b/setuptools/config/_validate_pyproject/fastjsonschema_validations.py
@@ -31,7 +31,7 @@ def validate(data, custom_formats={}, name_prefix=None):
 
 def validate_https___packaging_python_org_en_latest_specifications_declaring_build_dependencies(data, custom_formats={}, name_prefix=None):
     if not isinstance(data, (dict)):
-        raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-build-dependencies/', 'title': 'Data structure for ``pyproject.toml`` files', '$$description': ['File format containing build-time configurations for the Python ecosystem. ', ':pep:`517` initially defined a build-system independent format for source trees', 'which was complemented by :pep:`518` to provide a way of specifying dependencies ', 'for building Python projects.', 'Please notice the ``project`` table (as initially defined in  :pep:`621`) is not included', 'in this schema and should be considered separately.'], 'type': 'object', 'additionalProperties': False, 'properties': {'build-system': {'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/pyproject-toml/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, 'tool': {'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/deprecated/distutils/configfile.html', 'title': '``tool.distutils`` table', '$$description': ['**EXPERIMENTAL** (NOT OFFICIALLY SUPPORTED): Use ``tool.distutils``', 'subtables to configure arguments for ``distutils`` commands.', 'Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` commands via `distutils configuration files', '`_.', 'See also `the old Python docs _`.'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html', 'title': '``tool.setuptools`` table', '$$description': ['``setuptools``-specific configurations that can be set by users that require', 'customization.', 'These configurations are completely optional and probably can be skipped when', 'creating simple packages. They are equivalent to some of the `Keywords', '`_', 'used by the ``setup.py`` file, and can be set via the ``tool.setuptools`` table.', 'It considers only ``setuptools`` `parameters', '`_', 'that are not covered by :pep:`621`; and intentionally excludes ``dependency_links``', 'and ``setup_requires`` (incompatible with modern workflows/standards).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'$$description': ['Whether the project can be safely installed and run from a zip file.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'boolean'}, 'script-files': {'$$description': ['Legacy way of defining scripts (entry-points are preferred).', 'Equivalent to the ``script`` keyword in ``setup.py``', '(it was renamed to avoid confusion with entry-point based ``project.scripts``', 'defined in :pep:`621`).', '**DISCOURAGED**: generic script wrappers are tricky and may not work properly.', 'Whenever possible, please use ``project.scripts`` instead.'], 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$ref': '#/definitions/package-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$ref': '#/definitions/package-name'}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html', 'description': '**DEPRECATED**: use implicit namespaces instead (:pep:`420`).'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['``dict``-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', '**DISCOURAGED**: please notice this might not work as expected with wheels.', 'Whenever possible, consider using data files inside the package directories', '(or create a new namespace package that only contains data files).', 'See `data files support', '`_.'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', '    cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['**PROVISIONAL**: list of glob patterns for all license files being distributed.', '(likely to become standard with :pep:`639`).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'dependencies': {'$ref': '#/definitions/file-directive-for-dependencies'}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$ref': '#/definitions/file-directive-for-dependencies'}}}, 'readme': {'type': 'object', 'anyOf': [{'$ref': '#/definitions/file-directive'}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'$ref': '#/definitions/file-directive/properties/file'}}, 'additionalProperties': False}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'file-directive-for-dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/pyproject-toml/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='type')
+        raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-build-dependencies/', 'title': 'Data structure for ``pyproject.toml`` files', '$$description': ['File format containing build-time configurations for the Python ecosystem. ', ':pep:`517` initially defined a build-system independent format for source trees', 'which was complemented by :pep:`518` to provide a way of specifying dependencies ', 'for building Python projects.', 'Please notice the ``project`` table (as initially defined in  :pep:`621`) is not included', 'in this schema and should be considered separately.'], 'type': 'object', 'additionalProperties': False, 'properties': {'build-system': {'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/pyproject-toml/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, 'tool': {'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/deprecated/distutils/configfile.html', 'title': '``tool.distutils`` table', '$$description': ['**EXPERIMENTAL** (NOT OFFICIALLY SUPPORTED): Use ``tool.distutils``', 'subtables to configure arguments for ``distutils`` commands.', 'Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` commands via `distutils configuration files', '`_.', 'See also `the old Python docs _`.'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html', 'title': '``tool.setuptools`` table', '$$description': ['``setuptools``-specific configurations that can be set by users that require', 'customization.', 'These configurations are completely optional and probably can be skipped when', 'creating simple packages. They are equivalent to some of the `Keywords', '`_', 'used by the ``setup.py`` file, and can be set via the ``tool.setuptools`` table.', 'It considers only ``setuptools`` `parameters', '`_', 'that are not covered by :pep:`621`; and intentionally excludes ``dependency_links``', 'and ``setup_requires`` (incompatible with modern workflows/standards).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'$$description': ['Whether the project can be safely installed and run from a zip file.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'boolean'}, 'script-files': {'$$description': ['Legacy way of defining scripts (entry-points are preferred).', 'Equivalent to the ``script`` keyword in ``setup.py``', '(it was renamed to avoid confusion with entry-point based ``project.scripts``', 'defined in :pep:`621`).', '**DISCOURAGED**: generic script wrappers are tricky and may not work properly.', 'Whenever possible, please use ``project.scripts`` instead.'], 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$ref': '#/definitions/package-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$ref': '#/definitions/package-name'}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html', 'description': '**DEPRECATED**: use implicit namespaces instead (:pep:`420`).'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'ext-modules': {'description': 'Extension modules to be compiled by setuptools', 'type': 'array', 'items': {'$ref': '#/definitions/ext-module'}}, 'data-files': {'$$description': ['``dict``-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', '**DISCOURAGED**: please notice this might not work as expected with wheels.', 'Whenever possible, consider using data files inside the package directories', '(or create a new namespace package that only contains data files).', 'See `data files support', '`_.'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', '    cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['**PROVISIONAL**: list of glob patterns for all license files being distributed.', '(likely to become standard with :pep:`639`).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'dependencies': {'$ref': '#/definitions/file-directive-for-dependencies'}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$ref': '#/definitions/file-directive-for-dependencies'}}}, 'readme': {'type': 'object', 'anyOf': [{'$ref': '#/definitions/file-directive'}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'$ref': '#/definitions/file-directive/properties/file'}}, 'additionalProperties': False}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, 'ext-module': {'$id': '#/definitions/ext-module', 'title': 'Extension module', 'description': 'Parameters to construct a :class:`setuptools.Extension` object', 'type': 'object', 'required': ['name', 'sources'], 'additionalProperties': False, 'properties': {'name': {'type': 'string', 'format': 'python-module-name-relaxed'}, 'sources': {'type': 'array', 'items': {'type': 'string'}}, 'include-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'define-macros': {'type': 'array', 'items': {'type': 'array', 'items': [{'description': 'macro name', 'type': 'string'}, {'description': 'macro value', 'oneOf': [{'type': 'string'}, {'type': 'null'}]}], 'additionalItems': False}}, 'undef-macros': {'type': 'array', 'items': {'type': 'string'}}, 'library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'libraries': {'type': 'array', 'items': {'type': 'string'}}, 'runtime-library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'extra-objects': {'type': 'array', 'items': {'type': 'string'}}, 'extra-compile-args': {'type': 'array', 'items': {'type': 'string'}}, 'extra-link-args': {'type': 'array', 'items': {'type': 'string'}}, 'export-symbols': {'type': 'array', 'items': {'type': 'string'}}, 'swig-opts': {'type': 'array', 'items': {'type': 'string'}}, 'depends': {'type': 'array', 'items': {'type': 'string'}}, 'language': {'type': 'string'}, 'optional': {'type': 'boolean'}, 'py-limited-api': {'type': 'boolean'}}}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'file-directive-for-dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/pyproject-toml/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='type')
     data_is_dict = isinstance(data, dict)
     if data_is_dict:
         data_keys = set(data.keys())
@@ -86,7 +86,7 @@ def validate_https___packaging_python_org_en_latest_specifications_declaring_bui
             data_keys.remove("tool")
             data__tool = data["tool"]
             if not isinstance(data__tool, (dict)):
-                raise JsonSchemaValueException("" + (name_prefix or "data") + ".tool must be object", value=data__tool, name="" + (name_prefix or "data") + ".tool", definition={'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/deprecated/distutils/configfile.html', 'title': '``tool.distutils`` table', '$$description': ['**EXPERIMENTAL** (NOT OFFICIALLY SUPPORTED): Use ``tool.distutils``', 'subtables to configure arguments for ``distutils`` commands.', 'Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` commands via `distutils configuration files', '`_.', 'See also `the old Python docs _`.'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html', 'title': '``tool.setuptools`` table', '$$description': ['``setuptools``-specific configurations that can be set by users that require', 'customization.', 'These configurations are completely optional and probably can be skipped when', 'creating simple packages. They are equivalent to some of the `Keywords', '`_', 'used by the ``setup.py`` file, and can be set via the ``tool.setuptools`` table.', 'It considers only ``setuptools`` `parameters', '`_', 'that are not covered by :pep:`621`; and intentionally excludes ``dependency_links``', 'and ``setup_requires`` (incompatible with modern workflows/standards).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'$$description': ['Whether the project can be safely installed and run from a zip file.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'boolean'}, 'script-files': {'$$description': ['Legacy way of defining scripts (entry-points are preferred).', 'Equivalent to the ``script`` keyword in ``setup.py``', '(it was renamed to avoid confusion with entry-point based ``project.scripts``', 'defined in :pep:`621`).', '**DISCOURAGED**: generic script wrappers are tricky and may not work properly.', 'Whenever possible, please use ``project.scripts`` instead.'], 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$ref': '#/definitions/package-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$ref': '#/definitions/package-name'}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html', 'description': '**DEPRECATED**: use implicit namespaces instead (:pep:`420`).'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['``dict``-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', '**DISCOURAGED**: please notice this might not work as expected with wheels.', 'Whenever possible, consider using data files inside the package directories', '(or create a new namespace package that only contains data files).', 'See `data files support', '`_.'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', '    cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['**PROVISIONAL**: list of glob patterns for all license files being distributed.', '(likely to become standard with :pep:`639`).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'dependencies': {'$ref': '#/definitions/file-directive-for-dependencies'}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$ref': '#/definitions/file-directive-for-dependencies'}}}, 'readme': {'type': 'object', 'anyOf': [{'$ref': '#/definitions/file-directive'}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'$ref': '#/definitions/file-directive/properties/file'}}, 'additionalProperties': False}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'file-directive-for-dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}, rule='type')
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".tool must be object", value=data__tool, name="" + (name_prefix or "data") + ".tool", definition={'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/deprecated/distutils/configfile.html', 'title': '``tool.distutils`` table', '$$description': ['**EXPERIMENTAL** (NOT OFFICIALLY SUPPORTED): Use ``tool.distutils``', 'subtables to configure arguments for ``distutils`` commands.', 'Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` commands via `distutils configuration files', '`_.', 'See also `the old Python docs _`.'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html', 'title': '``tool.setuptools`` table', '$$description': ['``setuptools``-specific configurations that can be set by users that require', 'customization.', 'These configurations are completely optional and probably can be skipped when', 'creating simple packages. They are equivalent to some of the `Keywords', '`_', 'used by the ``setup.py`` file, and can be set via the ``tool.setuptools`` table.', 'It considers only ``setuptools`` `parameters', '`_', 'that are not covered by :pep:`621`; and intentionally excludes ``dependency_links``', 'and ``setup_requires`` (incompatible with modern workflows/standards).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'$$description': ['Whether the project can be safely installed and run from a zip file.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'boolean'}, 'script-files': {'$$description': ['Legacy way of defining scripts (entry-points are preferred).', 'Equivalent to the ``script`` keyword in ``setup.py``', '(it was renamed to avoid confusion with entry-point based ``project.scripts``', 'defined in :pep:`621`).', '**DISCOURAGED**: generic script wrappers are tricky and may not work properly.', 'Whenever possible, please use ``project.scripts`` instead.'], 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$ref': '#/definitions/package-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$ref': '#/definitions/package-name'}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html', 'description': '**DEPRECATED**: use implicit namespaces instead (:pep:`420`).'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'ext-modules': {'description': 'Extension modules to be compiled by setuptools', 'type': 'array', 'items': {'$ref': '#/definitions/ext-module'}}, 'data-files': {'$$description': ['``dict``-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', '**DISCOURAGED**: please notice this might not work as expected with wheels.', 'Whenever possible, consider using data files inside the package directories', '(or create a new namespace package that only contains data files).', 'See `data files support', '`_.'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', '    cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['**PROVISIONAL**: list of glob patterns for all license files being distributed.', '(likely to become standard with :pep:`639`).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'dependencies': {'$ref': '#/definitions/file-directive-for-dependencies'}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$ref': '#/definitions/file-directive-for-dependencies'}}}, 'readme': {'type': 'object', 'anyOf': [{'$ref': '#/definitions/file-directive'}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'$ref': '#/definitions/file-directive/properties/file'}}, 'additionalProperties': False}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, 'ext-module': {'$id': '#/definitions/ext-module', 'title': 'Extension module', 'description': 'Parameters to construct a :class:`setuptools.Extension` object', 'type': 'object', 'required': ['name', 'sources'], 'additionalProperties': False, 'properties': {'name': {'type': 'string', 'format': 'python-module-name-relaxed'}, 'sources': {'type': 'array', 'items': {'type': 'string'}}, 'include-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'define-macros': {'type': 'array', 'items': {'type': 'array', 'items': [{'description': 'macro name', 'type': 'string'}, {'description': 'macro value', 'oneOf': [{'type': 'string'}, {'type': 'null'}]}], 'additionalItems': False}}, 'undef-macros': {'type': 'array', 'items': {'type': 'string'}}, 'library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'libraries': {'type': 'array', 'items': {'type': 'string'}}, 'runtime-library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'extra-objects': {'type': 'array', 'items': {'type': 'string'}}, 'extra-compile-args': {'type': 'array', 'items': {'type': 'string'}}, 'extra-link-args': {'type': 'array', 'items': {'type': 'string'}}, 'export-symbols': {'type': 'array', 'items': {'type': 'string'}}, 'swig-opts': {'type': 'array', 'items': {'type': 'string'}}, 'depends': {'type': 'array', 'items': {'type': 'string'}}, 'language': {'type': 'string'}, 'optional': {'type': 'boolean'}, 'py-limited-api': {'type': 'boolean'}}}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'file-directive-for-dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}, rule='type')
             data__tool_is_dict = isinstance(data__tool, dict)
             if data__tool_is_dict:
                 data__tool_keys = set(data__tool.keys())
@@ -99,12 +99,12 @@ def validate_https___packaging_python_org_en_latest_specifications_declaring_bui
                     data__tool__setuptools = data__tool["setuptools"]
                     validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html(data__tool__setuptools, custom_formats, (name_prefix or "data") + ".tool.setuptools")
         if data_keys:
-            raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-build-dependencies/', 'title': 'Data structure for ``pyproject.toml`` files', '$$description': ['File format containing build-time configurations for the Python ecosystem. ', ':pep:`517` initially defined a build-system independent format for source trees', 'which was complemented by :pep:`518` to provide a way of specifying dependencies ', 'for building Python projects.', 'Please notice the ``project`` table (as initially defined in  :pep:`621`) is not included', 'in this schema and should be considered separately.'], 'type': 'object', 'additionalProperties': False, 'properties': {'build-system': {'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/pyproject-toml/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, 'tool': {'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/deprecated/distutils/configfile.html', 'title': '``tool.distutils`` table', '$$description': ['**EXPERIMENTAL** (NOT OFFICIALLY SUPPORTED): Use ``tool.distutils``', 'subtables to configure arguments for ``distutils`` commands.', 'Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` commands via `distutils configuration files', '`_.', 'See also `the old Python docs _`.'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html', 'title': '``tool.setuptools`` table', '$$description': ['``setuptools``-specific configurations that can be set by users that require', 'customization.', 'These configurations are completely optional and probably can be skipped when', 'creating simple packages. They are equivalent to some of the `Keywords', '`_', 'used by the ``setup.py`` file, and can be set via the ``tool.setuptools`` table.', 'It considers only ``setuptools`` `parameters', '`_', 'that are not covered by :pep:`621`; and intentionally excludes ``dependency_links``', 'and ``setup_requires`` (incompatible with modern workflows/standards).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'$$description': ['Whether the project can be safely installed and run from a zip file.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'boolean'}, 'script-files': {'$$description': ['Legacy way of defining scripts (entry-points are preferred).', 'Equivalent to the ``script`` keyword in ``setup.py``', '(it was renamed to avoid confusion with entry-point based ``project.scripts``', 'defined in :pep:`621`).', '**DISCOURAGED**: generic script wrappers are tricky and may not work properly.', 'Whenever possible, please use ``project.scripts`` instead.'], 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$ref': '#/definitions/package-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$ref': '#/definitions/package-name'}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html', 'description': '**DEPRECATED**: use implicit namespaces instead (:pep:`420`).'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['``dict``-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', '**DISCOURAGED**: please notice this might not work as expected with wheels.', 'Whenever possible, consider using data files inside the package directories', '(or create a new namespace package that only contains data files).', 'See `data files support', '`_.'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', '    cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['**PROVISIONAL**: list of glob patterns for all license files being distributed.', '(likely to become standard with :pep:`639`).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'dependencies': {'$ref': '#/definitions/file-directive-for-dependencies'}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$ref': '#/definitions/file-directive-for-dependencies'}}}, 'readme': {'type': 'object', 'anyOf': [{'$ref': '#/definitions/file-directive'}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'$ref': '#/definitions/file-directive/properties/file'}}, 'additionalProperties': False}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'file-directive-for-dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/pyproject-toml/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='additionalProperties')
+            raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-build-dependencies/', 'title': 'Data structure for ``pyproject.toml`` files', '$$description': ['File format containing build-time configurations for the Python ecosystem. ', ':pep:`517` initially defined a build-system independent format for source trees', 'which was complemented by :pep:`518` to provide a way of specifying dependencies ', 'for building Python projects.', 'Please notice the ``project`` table (as initially defined in  :pep:`621`) is not included', 'in this schema and should be considered separately.'], 'type': 'object', 'additionalProperties': False, 'properties': {'build-system': {'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/pyproject-toml/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, 'tool': {'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/deprecated/distutils/configfile.html', 'title': '``tool.distutils`` table', '$$description': ['**EXPERIMENTAL** (NOT OFFICIALLY SUPPORTED): Use ``tool.distutils``', 'subtables to configure arguments for ``distutils`` commands.', 'Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` commands via `distutils configuration files', '`_.', 'See also `the old Python docs _`.'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html', 'title': '``tool.setuptools`` table', '$$description': ['``setuptools``-specific configurations that can be set by users that require', 'customization.', 'These configurations are completely optional and probably can be skipped when', 'creating simple packages. They are equivalent to some of the `Keywords', '`_', 'used by the ``setup.py`` file, and can be set via the ``tool.setuptools`` table.', 'It considers only ``setuptools`` `parameters', '`_', 'that are not covered by :pep:`621`; and intentionally excludes ``dependency_links``', 'and ``setup_requires`` (incompatible with modern workflows/standards).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'$$description': ['Whether the project can be safely installed and run from a zip file.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'boolean'}, 'script-files': {'$$description': ['Legacy way of defining scripts (entry-points are preferred).', 'Equivalent to the ``script`` keyword in ``setup.py``', '(it was renamed to avoid confusion with entry-point based ``project.scripts``', 'defined in :pep:`621`).', '**DISCOURAGED**: generic script wrappers are tricky and may not work properly.', 'Whenever possible, please use ``project.scripts`` instead.'], 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$ref': '#/definitions/package-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$ref': '#/definitions/package-name'}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html', 'description': '**DEPRECATED**: use implicit namespaces instead (:pep:`420`).'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'ext-modules': {'description': 'Extension modules to be compiled by setuptools', 'type': 'array', 'items': {'$ref': '#/definitions/ext-module'}}, 'data-files': {'$$description': ['``dict``-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', '**DISCOURAGED**: please notice this might not work as expected with wheels.', 'Whenever possible, consider using data files inside the package directories', '(or create a new namespace package that only contains data files).', 'See `data files support', '`_.'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', '    cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['**PROVISIONAL**: list of glob patterns for all license files being distributed.', '(likely to become standard with :pep:`639`).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'dependencies': {'$ref': '#/definitions/file-directive-for-dependencies'}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$ref': '#/definitions/file-directive-for-dependencies'}}}, 'readme': {'type': 'object', 'anyOf': [{'$ref': '#/definitions/file-directive'}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'$ref': '#/definitions/file-directive/properties/file'}}, 'additionalProperties': False}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, 'ext-module': {'$id': '#/definitions/ext-module', 'title': 'Extension module', 'description': 'Parameters to construct a :class:`setuptools.Extension` object', 'type': 'object', 'required': ['name', 'sources'], 'additionalProperties': False, 'properties': {'name': {'type': 'string', 'format': 'python-module-name-relaxed'}, 'sources': {'type': 'array', 'items': {'type': 'string'}}, 'include-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'define-macros': {'type': 'array', 'items': {'type': 'array', 'items': [{'description': 'macro name', 'type': 'string'}, {'description': 'macro value', 'oneOf': [{'type': 'string'}, {'type': 'null'}]}], 'additionalItems': False}}, 'undef-macros': {'type': 'array', 'items': {'type': 'string'}}, 'library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'libraries': {'type': 'array', 'items': {'type': 'string'}}, 'runtime-library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'extra-objects': {'type': 'array', 'items': {'type': 'string'}}, 'extra-compile-args': {'type': 'array', 'items': {'type': 'string'}}, 'extra-link-args': {'type': 'array', 'items': {'type': 'string'}}, 'export-symbols': {'type': 'array', 'items': {'type': 'string'}}, 'swig-opts': {'type': 'array', 'items': {'type': 'string'}}, 'depends': {'type': 'array', 'items': {'type': 'string'}}, 'language': {'type': 'string'}, 'optional': {'type': 'boolean'}, 'py-limited-api': {'type': 'boolean'}}}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'file-directive-for-dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/pyproject-toml/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='additionalProperties')
     return data
 
 def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html(data, custom_formats={}, name_prefix=None):
     if not isinstance(data, (dict)):
-        raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html', 'title': '``tool.setuptools`` table', '$$description': ['``setuptools``-specific configurations that can be set by users that require', 'customization.', 'These configurations are completely optional and probably can be skipped when', 'creating simple packages. They are equivalent to some of the `Keywords', '`_', 'used by the ``setup.py`` file, and can be set via the ``tool.setuptools`` table.', 'It considers only ``setuptools`` `parameters', '`_', 'that are not covered by :pep:`621`; and intentionally excludes ``dependency_links``', 'and ``setup_requires`` (incompatible with modern workflows/standards).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'$$description': ['Whether the project can be safely installed and run from a zip file.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'boolean'}, 'script-files': {'$$description': ['Legacy way of defining scripts (entry-points are preferred).', 'Equivalent to the ``script`` keyword in ``setup.py``', '(it was renamed to avoid confusion with entry-point based ``project.scripts``', 'defined in :pep:`621`).', '**DISCOURAGED**: generic script wrappers are tricky and may not work properly.', 'Whenever possible, please use ``project.scripts`` instead.'], 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}}, {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html', 'description': '**DEPRECATED**: use implicit namespaces instead (:pep:`420`).'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['``dict``-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', '**DISCOURAGED**: please notice this might not work as expected with wheels.', 'Whenever possible, consider using data files inside the package directories', '(or create a new namespace package that only contains data files).', 'See `data files support', '`_.'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', '    cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['**PROVISIONAL**: list of glob patterns for all license files being distributed.', '(likely to become standard with :pep:`639`).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}}}, 'readme': {'type': 'object', 'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'additionalProperties': False}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'file-directive-for-dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}, rule='type')
+        raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html', 'title': '``tool.setuptools`` table', '$$description': ['``setuptools``-specific configurations that can be set by users that require', 'customization.', 'These configurations are completely optional and probably can be skipped when', 'creating simple packages. They are equivalent to some of the `Keywords', '`_', 'used by the ``setup.py`` file, and can be set via the ``tool.setuptools`` table.', 'It considers only ``setuptools`` `parameters', '`_', 'that are not covered by :pep:`621`; and intentionally excludes ``dependency_links``', 'and ``setup_requires`` (incompatible with modern workflows/standards).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'$$description': ['Whether the project can be safely installed and run from a zip file.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'boolean'}, 'script-files': {'$$description': ['Legacy way of defining scripts (entry-points are preferred).', 'Equivalent to the ``script`` keyword in ``setup.py``', '(it was renamed to avoid confusion with entry-point based ``project.scripts``', 'defined in :pep:`621`).', '**DISCOURAGED**: generic script wrappers are tricky and may not work properly.', 'Whenever possible, please use ``project.scripts`` instead.'], 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}}, {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html', 'description': '**DEPRECATED**: use implicit namespaces instead (:pep:`420`).'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'ext-modules': {'description': 'Extension modules to be compiled by setuptools', 'type': 'array', 'items': {'$id': '#/definitions/ext-module', 'title': 'Extension module', 'description': 'Parameters to construct a :class:`setuptools.Extension` object', 'type': 'object', 'required': ['name', 'sources'], 'additionalProperties': False, 'properties': {'name': {'type': 'string', 'format': 'python-module-name-relaxed'}, 'sources': {'type': 'array', 'items': {'type': 'string'}}, 'include-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'define-macros': {'type': 'array', 'items': {'type': 'array', 'items': [{'description': 'macro name', 'type': 'string'}, {'description': 'macro value', 'oneOf': [{'type': 'string'}, {'type': 'null'}]}], 'additionalItems': False}}, 'undef-macros': {'type': 'array', 'items': {'type': 'string'}}, 'library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'libraries': {'type': 'array', 'items': {'type': 'string'}}, 'runtime-library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'extra-objects': {'type': 'array', 'items': {'type': 'string'}}, 'extra-compile-args': {'type': 'array', 'items': {'type': 'string'}}, 'extra-link-args': {'type': 'array', 'items': {'type': 'string'}}, 'export-symbols': {'type': 'array', 'items': {'type': 'string'}}, 'swig-opts': {'type': 'array', 'items': {'type': 'string'}}, 'depends': {'type': 'array', 'items': {'type': 'string'}}, 'language': {'type': 'string'}, 'optional': {'type': 'boolean'}, 'py-limited-api': {'type': 'boolean'}}}}, 'data-files': {'$$description': ['``dict``-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', '**DISCOURAGED**: please notice this might not work as expected with wheels.', 'Whenever possible, consider using data files inside the package directories', '(or create a new namespace package that only contains data files).', 'See `data files support', '`_.'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', '    cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['**PROVISIONAL**: list of glob patterns for all license files being distributed.', '(likely to become standard with :pep:`639`).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}}}, 'readme': {'type': 'object', 'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'additionalProperties': False}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, 'ext-module': {'$id': '#/definitions/ext-module', 'title': 'Extension module', 'description': 'Parameters to construct a :class:`setuptools.Extension` object', 'type': 'object', 'required': ['name', 'sources'], 'additionalProperties': False, 'properties': {'name': {'type': 'string', 'format': 'python-module-name-relaxed'}, 'sources': {'type': 'array', 'items': {'type': 'string'}}, 'include-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'define-macros': {'type': 'array', 'items': {'type': 'array', 'items': [{'description': 'macro name', 'type': 'string'}, {'description': 'macro value', 'oneOf': [{'type': 'string'}, {'type': 'null'}]}], 'additionalItems': False}}, 'undef-macros': {'type': 'array', 'items': {'type': 'string'}}, 'library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'libraries': {'type': 'array', 'items': {'type': 'string'}}, 'runtime-library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'extra-objects': {'type': 'array', 'items': {'type': 'string'}}, 'extra-compile-args': {'type': 'array', 'items': {'type': 'string'}}, 'extra-link-args': {'type': 'array', 'items': {'type': 'string'}}, 'export-symbols': {'type': 'array', 'items': {'type': 'string'}}, 'swig-opts': {'type': 'array', 'items': {'type': 'string'}}, 'depends': {'type': 'array', 'items': {'type': 'string'}}, 'language': {'type': 'string'}, 'optional': {'type': 'boolean'}, 'py-limited-api': {'type': 'boolean'}}}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'file-directive-for-dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}, rule='type')
     data_is_dict = isinstance(data, dict)
     if data_is_dict:
         data_keys = set(data.keys())
@@ -366,6 +366,16 @@ def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_htm
                     if isinstance(data__pymodules_item, str):
                         if not custom_formats["python-module-name-relaxed"](data__pymodules_item):
                             raise JsonSchemaValueException("" + (name_prefix or "data") + ".py-modules[{data__pymodules_x}]".format(**locals()) + " must be python-module-name-relaxed", value=data__pymodules_item, name="" + (name_prefix or "data") + ".py-modules[{data__pymodules_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'python-module-name-relaxed'}, rule='format')
+        if "ext-modules" in data_keys:
+            data_keys.remove("ext-modules")
+            data__extmodules = data["ext-modules"]
+            if not isinstance(data__extmodules, (list, tuple)):
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".ext-modules must be array", value=data__extmodules, name="" + (name_prefix or "data") + ".ext-modules", definition={'description': 'Extension modules to be compiled by setuptools', 'type': 'array', 'items': {'$id': '#/definitions/ext-module', 'title': 'Extension module', 'description': 'Parameters to construct a :class:`setuptools.Extension` object', 'type': 'object', 'required': ['name', 'sources'], 'additionalProperties': False, 'properties': {'name': {'type': 'string', 'format': 'python-module-name-relaxed'}, 'sources': {'type': 'array', 'items': {'type': 'string'}}, 'include-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'define-macros': {'type': 'array', 'items': {'type': 'array', 'items': [{'description': 'macro name', 'type': 'string'}, {'description': 'macro value', 'oneOf': [{'type': 'string'}, {'type': 'null'}]}], 'additionalItems': False}}, 'undef-macros': {'type': 'array', 'items': {'type': 'string'}}, 'library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'libraries': {'type': 'array', 'items': {'type': 'string'}}, 'runtime-library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'extra-objects': {'type': 'array', 'items': {'type': 'string'}}, 'extra-compile-args': {'type': 'array', 'items': {'type': 'string'}}, 'extra-link-args': {'type': 'array', 'items': {'type': 'string'}}, 'export-symbols': {'type': 'array', 'items': {'type': 'string'}}, 'swig-opts': {'type': 'array', 'items': {'type': 'string'}}, 'depends': {'type': 'array', 'items': {'type': 'string'}}, 'language': {'type': 'string'}, 'optional': {'type': 'boolean'}, 'py-limited-api': {'type': 'boolean'}}}}, rule='type')
+            data__extmodules_is_list = isinstance(data__extmodules, (list, tuple))
+            if data__extmodules_is_list:
+                data__extmodules_len = len(data__extmodules)
+                for data__extmodules_x, data__extmodules_item in enumerate(data__extmodules):
+                    validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html__definitions_ext_module(data__extmodules_item, custom_formats, (name_prefix or "data") + ".ext-modules[{data__extmodules_x}]".format(**locals()))
         if "data-files" in data_keys:
             data_keys.remove("data-files")
             data__datafiles = data["data-files"]
@@ -524,7 +534,7 @@ def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_htm
                 if data__dynamic_keys:
                     raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic must not contain "+str(data__dynamic_keys)+" properties", value=data__dynamic, name="" + (name_prefix or "data") + ".dynamic", definition={'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}}}, 'readme': {'type': 'object', 'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'additionalProperties': False}], 'required': ['file']}}}, rule='additionalProperties')
         if data_keys:
-            raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html', 'title': '``tool.setuptools`` table', '$$description': ['``setuptools``-specific configurations that can be set by users that require', 'customization.', 'These configurations are completely optional and probably can be skipped when', 'creating simple packages. They are equivalent to some of the `Keywords', '`_', 'used by the ``setup.py`` file, and can be set via the ``tool.setuptools`` table.', 'It considers only ``setuptools`` `parameters', '`_', 'that are not covered by :pep:`621`; and intentionally excludes ``dependency_links``', 'and ``setup_requires`` (incompatible with modern workflows/standards).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'$$description': ['Whether the project can be safely installed and run from a zip file.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'boolean'}, 'script-files': {'$$description': ['Legacy way of defining scripts (entry-points are preferred).', 'Equivalent to the ``script`` keyword in ``setup.py``', '(it was renamed to avoid confusion with entry-point based ``project.scripts``', 'defined in :pep:`621`).', '**DISCOURAGED**: generic script wrappers are tricky and may not work properly.', 'Whenever possible, please use ``project.scripts`` instead.'], 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}}, {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html', 'description': '**DEPRECATED**: use implicit namespaces instead (:pep:`420`).'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['``dict``-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', '**DISCOURAGED**: please notice this might not work as expected with wheels.', 'Whenever possible, consider using data files inside the package directories', '(or create a new namespace package that only contains data files).', 'See `data files support', '`_.'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', '    cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['**PROVISIONAL**: list of glob patterns for all license files being distributed.', '(likely to become standard with :pep:`639`).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}}}, 'readme': {'type': 'object', 'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'additionalProperties': False}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'file-directive-for-dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}, rule='additionalProperties')
+            raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html', 'title': '``tool.setuptools`` table', '$$description': ['``setuptools``-specific configurations that can be set by users that require', 'customization.', 'These configurations are completely optional and probably can be skipped when', 'creating simple packages. They are equivalent to some of the `Keywords', '`_', 'used by the ``setup.py`` file, and can be set via the ``tool.setuptools`` table.', 'It considers only ``setuptools`` `parameters', '`_', 'that are not covered by :pep:`621`; and intentionally excludes ``dependency_links``', 'and ``setup_requires`` (incompatible with modern workflows/standards).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'$$description': ['Whether the project can be safely installed and run from a zip file.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'boolean'}, 'script-files': {'$$description': ['Legacy way of defining scripts (entry-points are preferred).', 'Equivalent to the ``script`` keyword in ``setup.py``', '(it was renamed to avoid confusion with entry-point based ``project.scripts``', 'defined in :pep:`621`).', '**DISCOURAGED**: generic script wrappers are tricky and may not work properly.', 'Whenever possible, please use ``project.scripts`` instead.'], 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}}, {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html', 'description': '**DEPRECATED**: use implicit namespaces instead (:pep:`420`).'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'ext-modules': {'description': 'Extension modules to be compiled by setuptools', 'type': 'array', 'items': {'$id': '#/definitions/ext-module', 'title': 'Extension module', 'description': 'Parameters to construct a :class:`setuptools.Extension` object', 'type': 'object', 'required': ['name', 'sources'], 'additionalProperties': False, 'properties': {'name': {'type': 'string', 'format': 'python-module-name-relaxed'}, 'sources': {'type': 'array', 'items': {'type': 'string'}}, 'include-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'define-macros': {'type': 'array', 'items': {'type': 'array', 'items': [{'description': 'macro name', 'type': 'string'}, {'description': 'macro value', 'oneOf': [{'type': 'string'}, {'type': 'null'}]}], 'additionalItems': False}}, 'undef-macros': {'type': 'array', 'items': {'type': 'string'}}, 'library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'libraries': {'type': 'array', 'items': {'type': 'string'}}, 'runtime-library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'extra-objects': {'type': 'array', 'items': {'type': 'string'}}, 'extra-compile-args': {'type': 'array', 'items': {'type': 'string'}}, 'extra-link-args': {'type': 'array', 'items': {'type': 'string'}}, 'export-symbols': {'type': 'array', 'items': {'type': 'string'}}, 'swig-opts': {'type': 'array', 'items': {'type': 'string'}}, 'depends': {'type': 'array', 'items': {'type': 'string'}}, 'language': {'type': 'string'}, 'optional': {'type': 'boolean'}, 'py-limited-api': {'type': 'boolean'}}}}, 'data-files': {'$$description': ['``dict``-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', '**DISCOURAGED**: please notice this might not work as expected with wheels.', 'Whenever possible, consider using data files inside the package directories', '(or create a new namespace package that only contains data files).', 'See `data files support', '`_.'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', '    cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['**PROVISIONAL**: list of glob patterns for all license files being distributed.', '(likely to become standard with :pep:`639`).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}}}, 'readme': {'type': 'object', 'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'additionalProperties': False}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, 'ext-module': {'$id': '#/definitions/ext-module', 'title': 'Extension module', 'description': 'Parameters to construct a :class:`setuptools.Extension` object', 'type': 'object', 'required': ['name', 'sources'], 'additionalProperties': False, 'properties': {'name': {'type': 'string', 'format': 'python-module-name-relaxed'}, 'sources': {'type': 'array', 'items': {'type': 'string'}}, 'include-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'define-macros': {'type': 'array', 'items': {'type': 'array', 'items': [{'description': 'macro name', 'type': 'string'}, {'description': 'macro value', 'oneOf': [{'type': 'string'}, {'type': 'null'}]}], 'additionalItems': False}}, 'undef-macros': {'type': 'array', 'items': {'type': 'string'}}, 'library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'libraries': {'type': 'array', 'items': {'type': 'string'}}, 'runtime-library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'extra-objects': {'type': 'array', 'items': {'type': 'string'}}, 'extra-compile-args': {'type': 'array', 'items': {'type': 'string'}}, 'extra-link-args': {'type': 'array', 'items': {'type': 'string'}}, 'export-symbols': {'type': 'array', 'items': {'type': 'string'}}, 'swig-opts': {'type': 'array', 'items': {'type': 'string'}}, 'depends': {'type': 'array', 'items': {'type': 'string'}}, 'language': {'type': 'string'}, 'optional': {'type': 'boolean'}, 'py-limited-api': {'type': 'boolean'}}}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'file-directive-for-dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}, rule='additionalProperties')
     return data
 
 def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html__definitions_file_directive_properties_file(data, custom_formats={}, name_prefix=None):
@@ -613,6 +623,211 @@ def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_htm
             raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, rule='additionalProperties')
     return data
 
+def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html__definitions_ext_module(data, custom_formats={}, name_prefix=None):
+    if not isinstance(data, (dict)):
+        raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/ext-module', 'title': 'Extension module', 'description': 'Parameters to construct a :class:`setuptools.Extension` object', 'type': 'object', 'required': ['name', 'sources'], 'additionalProperties': False, 'properties': {'name': {'type': 'string', 'format': 'python-module-name-relaxed'}, 'sources': {'type': 'array', 'items': {'type': 'string'}}, 'include-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'define-macros': {'type': 'array', 'items': {'type': 'array', 'items': [{'description': 'macro name', 'type': 'string'}, {'description': 'macro value', 'oneOf': [{'type': 'string'}, {'type': 'null'}]}], 'additionalItems': False}}, 'undef-macros': {'type': 'array', 'items': {'type': 'string'}}, 'library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'libraries': {'type': 'array', 'items': {'type': 'string'}}, 'runtime-library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'extra-objects': {'type': 'array', 'items': {'type': 'string'}}, 'extra-compile-args': {'type': 'array', 'items': {'type': 'string'}}, 'extra-link-args': {'type': 'array', 'items': {'type': 'string'}}, 'export-symbols': {'type': 'array', 'items': {'type': 'string'}}, 'swig-opts': {'type': 'array', 'items': {'type': 'string'}}, 'depends': {'type': 'array', 'items': {'type': 'string'}}, 'language': {'type': 'string'}, 'optional': {'type': 'boolean'}, 'py-limited-api': {'type': 'boolean'}}}, rule='type')
+    data_is_dict = isinstance(data, dict)
+    if data_is_dict:
+        data__missing_keys = set(['name', 'sources']) - data.keys()
+        if data__missing_keys:
+            raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain " + (str(sorted(data__missing_keys)) + " properties"), value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/ext-module', 'title': 'Extension module', 'description': 'Parameters to construct a :class:`setuptools.Extension` object', 'type': 'object', 'required': ['name', 'sources'], 'additionalProperties': False, 'properties': {'name': {'type': 'string', 'format': 'python-module-name-relaxed'}, 'sources': {'type': 'array', 'items': {'type': 'string'}}, 'include-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'define-macros': {'type': 'array', 'items': {'type': 'array', 'items': [{'description': 'macro name', 'type': 'string'}, {'description': 'macro value', 'oneOf': [{'type': 'string'}, {'type': 'null'}]}], 'additionalItems': False}}, 'undef-macros': {'type': 'array', 'items': {'type': 'string'}}, 'library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'libraries': {'type': 'array', 'items': {'type': 'string'}}, 'runtime-library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'extra-objects': {'type': 'array', 'items': {'type': 'string'}}, 'extra-compile-args': {'type': 'array', 'items': {'type': 'string'}}, 'extra-link-args': {'type': 'array', 'items': {'type': 'string'}}, 'export-symbols': {'type': 'array', 'items': {'type': 'string'}}, 'swig-opts': {'type': 'array', 'items': {'type': 'string'}}, 'depends': {'type': 'array', 'items': {'type': 'string'}}, 'language': {'type': 'string'}, 'optional': {'type': 'boolean'}, 'py-limited-api': {'type': 'boolean'}}}, rule='required')
+        data_keys = set(data.keys())
+        if "name" in data_keys:
+            data_keys.remove("name")
+            data__name = data["name"]
+            if not isinstance(data__name, (str)):
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".name must be string", value=data__name, name="" + (name_prefix or "data") + ".name", definition={'type': 'string', 'format': 'python-module-name-relaxed'}, rule='type')
+            if isinstance(data__name, str):
+                if not custom_formats["python-module-name-relaxed"](data__name):
+                    raise JsonSchemaValueException("" + (name_prefix or "data") + ".name must be python-module-name-relaxed", value=data__name, name="" + (name_prefix or "data") + ".name", definition={'type': 'string', 'format': 'python-module-name-relaxed'}, rule='format')
+        if "sources" in data_keys:
+            data_keys.remove("sources")
+            data__sources = data["sources"]
+            if not isinstance(data__sources, (list, tuple)):
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".sources must be array", value=data__sources, name="" + (name_prefix or "data") + ".sources", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type')
+            data__sources_is_list = isinstance(data__sources, (list, tuple))
+            if data__sources_is_list:
+                data__sources_len = len(data__sources)
+                for data__sources_x, data__sources_item in enumerate(data__sources):
+                    if not isinstance(data__sources_item, (str)):
+                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".sources[{data__sources_x}]".format(**locals()) + " must be string", value=data__sources_item, name="" + (name_prefix or "data") + ".sources[{data__sources_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+        if "include-dirs" in data_keys:
+            data_keys.remove("include-dirs")
+            data__includedirs = data["include-dirs"]
+            if not isinstance(data__includedirs, (list, tuple)):
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".include-dirs must be array", value=data__includedirs, name="" + (name_prefix or "data") + ".include-dirs", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type')
+            data__includedirs_is_list = isinstance(data__includedirs, (list, tuple))
+            if data__includedirs_is_list:
+                data__includedirs_len = len(data__includedirs)
+                for data__includedirs_x, data__includedirs_item in enumerate(data__includedirs):
+                    if not isinstance(data__includedirs_item, (str)):
+                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".include-dirs[{data__includedirs_x}]".format(**locals()) + " must be string", value=data__includedirs_item, name="" + (name_prefix or "data") + ".include-dirs[{data__includedirs_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+        if "define-macros" in data_keys:
+            data_keys.remove("define-macros")
+            data__definemacros = data["define-macros"]
+            if not isinstance(data__definemacros, (list, tuple)):
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".define-macros must be array", value=data__definemacros, name="" + (name_prefix or "data") + ".define-macros", definition={'type': 'array', 'items': {'type': 'array', 'items': [{'description': 'macro name', 'type': 'string'}, {'description': 'macro value', 'oneOf': [{'type': 'string'}, {'type': 'null'}]}], 'additionalItems': False}}, rule='type')
+            data__definemacros_is_list = isinstance(data__definemacros, (list, tuple))
+            if data__definemacros_is_list:
+                data__definemacros_len = len(data__definemacros)
+                for data__definemacros_x, data__definemacros_item in enumerate(data__definemacros):
+                    if not isinstance(data__definemacros_item, (list, tuple)):
+                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".define-macros[{data__definemacros_x}]".format(**locals()) + " must be array", value=data__definemacros_item, name="" + (name_prefix or "data") + ".define-macros[{data__definemacros_x}]".format(**locals()) + "", definition={'type': 'array', 'items': [{'description': 'macro name', 'type': 'string'}, {'description': 'macro value', 'oneOf': [{'type': 'string'}, {'type': 'null'}]}], 'additionalItems': False}, rule='type')
+                    data__definemacros_item_is_list = isinstance(data__definemacros_item, (list, tuple))
+                    if data__definemacros_item_is_list:
+                        data__definemacros_item_len = len(data__definemacros_item)
+                        if data__definemacros_item_len > 0:
+                            data__definemacros_item__0 = data__definemacros_item[0]
+                            if not isinstance(data__definemacros_item__0, (str)):
+                                raise JsonSchemaValueException("" + (name_prefix or "data") + ".define-macros[{data__definemacros_x}][0]".format(**locals()) + " must be string", value=data__definemacros_item__0, name="" + (name_prefix or "data") + ".define-macros[{data__definemacros_x}][0]".format(**locals()) + "", definition={'description': 'macro name', 'type': 'string'}, rule='type')
+                        if data__definemacros_item_len > 1:
+                            data__definemacros_item__1 = data__definemacros_item[1]
+                            data__definemacros_item__1_one_of_count9 = 0
+                            if data__definemacros_item__1_one_of_count9 < 2:
+                                try:
+                                    if not isinstance(data__definemacros_item__1, (str)):
+                                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".define-macros[{data__definemacros_x}][1]".format(**locals()) + " must be string", value=data__definemacros_item__1, name="" + (name_prefix or "data") + ".define-macros[{data__definemacros_x}][1]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+                                    data__definemacros_item__1_one_of_count9 += 1
+                                except JsonSchemaValueException: pass
+                            if data__definemacros_item__1_one_of_count9 < 2:
+                                try:
+                                    if not isinstance(data__definemacros_item__1, (NoneType)):
+                                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".define-macros[{data__definemacros_x}][1]".format(**locals()) + " must be null", value=data__definemacros_item__1, name="" + (name_prefix or "data") + ".define-macros[{data__definemacros_x}][1]".format(**locals()) + "", definition={'type': 'null'}, rule='type')
+                                    data__definemacros_item__1_one_of_count9 += 1
+                                except JsonSchemaValueException: pass
+                            if data__definemacros_item__1_one_of_count9 != 1:
+                                raise JsonSchemaValueException("" + (name_prefix or "data") + ".define-macros[{data__definemacros_x}][1]".format(**locals()) + " must be valid exactly by one definition" + (" (" + str(data__definemacros_item__1_one_of_count9) + " matches found)"), value=data__definemacros_item__1, name="" + (name_prefix or "data") + ".define-macros[{data__definemacros_x}][1]".format(**locals()) + "", definition={'description': 'macro value', 'oneOf': [{'type': 'string'}, {'type': 'null'}]}, rule='oneOf')
+                        if data__definemacros_item_len > 2:
+                            raise JsonSchemaValueException("" + (name_prefix or "data") + ".define-macros[{data__definemacros_x}]".format(**locals()) + " must contain only specified items", value=data__definemacros_item, name="" + (name_prefix or "data") + ".define-macros[{data__definemacros_x}]".format(**locals()) + "", definition={'type': 'array', 'items': [{'description': 'macro name', 'type': 'string'}, {'description': 'macro value', 'oneOf': [{'type': 'string'}, {'type': 'null'}]}], 'additionalItems': False}, rule='items')
+        if "undef-macros" in data_keys:
+            data_keys.remove("undef-macros")
+            data__undefmacros = data["undef-macros"]
+            if not isinstance(data__undefmacros, (list, tuple)):
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".undef-macros must be array", value=data__undefmacros, name="" + (name_prefix or "data") + ".undef-macros", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type')
+            data__undefmacros_is_list = isinstance(data__undefmacros, (list, tuple))
+            if data__undefmacros_is_list:
+                data__undefmacros_len = len(data__undefmacros)
+                for data__undefmacros_x, data__undefmacros_item in enumerate(data__undefmacros):
+                    if not isinstance(data__undefmacros_item, (str)):
+                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".undef-macros[{data__undefmacros_x}]".format(**locals()) + " must be string", value=data__undefmacros_item, name="" + (name_prefix or "data") + ".undef-macros[{data__undefmacros_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+        if "library-dirs" in data_keys:
+            data_keys.remove("library-dirs")
+            data__librarydirs = data["library-dirs"]
+            if not isinstance(data__librarydirs, (list, tuple)):
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".library-dirs must be array", value=data__librarydirs, name="" + (name_prefix or "data") + ".library-dirs", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type')
+            data__librarydirs_is_list = isinstance(data__librarydirs, (list, tuple))
+            if data__librarydirs_is_list:
+                data__librarydirs_len = len(data__librarydirs)
+                for data__librarydirs_x, data__librarydirs_item in enumerate(data__librarydirs):
+                    if not isinstance(data__librarydirs_item, (str)):
+                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".library-dirs[{data__librarydirs_x}]".format(**locals()) + " must be string", value=data__librarydirs_item, name="" + (name_prefix or "data") + ".library-dirs[{data__librarydirs_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+        if "libraries" in data_keys:
+            data_keys.remove("libraries")
+            data__libraries = data["libraries"]
+            if not isinstance(data__libraries, (list, tuple)):
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".libraries must be array", value=data__libraries, name="" + (name_prefix or "data") + ".libraries", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type')
+            data__libraries_is_list = isinstance(data__libraries, (list, tuple))
+            if data__libraries_is_list:
+                data__libraries_len = len(data__libraries)
+                for data__libraries_x, data__libraries_item in enumerate(data__libraries):
+                    if not isinstance(data__libraries_item, (str)):
+                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".libraries[{data__libraries_x}]".format(**locals()) + " must be string", value=data__libraries_item, name="" + (name_prefix or "data") + ".libraries[{data__libraries_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+        if "runtime-library-dirs" in data_keys:
+            data_keys.remove("runtime-library-dirs")
+            data__runtimelibrarydirs = data["runtime-library-dirs"]
+            if not isinstance(data__runtimelibrarydirs, (list, tuple)):
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".runtime-library-dirs must be array", value=data__runtimelibrarydirs, name="" + (name_prefix or "data") + ".runtime-library-dirs", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type')
+            data__runtimelibrarydirs_is_list = isinstance(data__runtimelibrarydirs, (list, tuple))
+            if data__runtimelibrarydirs_is_list:
+                data__runtimelibrarydirs_len = len(data__runtimelibrarydirs)
+                for data__runtimelibrarydirs_x, data__runtimelibrarydirs_item in enumerate(data__runtimelibrarydirs):
+                    if not isinstance(data__runtimelibrarydirs_item, (str)):
+                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".runtime-library-dirs[{data__runtimelibrarydirs_x}]".format(**locals()) + " must be string", value=data__runtimelibrarydirs_item, name="" + (name_prefix or "data") + ".runtime-library-dirs[{data__runtimelibrarydirs_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+        if "extra-objects" in data_keys:
+            data_keys.remove("extra-objects")
+            data__extraobjects = data["extra-objects"]
+            if not isinstance(data__extraobjects, (list, tuple)):
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".extra-objects must be array", value=data__extraobjects, name="" + (name_prefix or "data") + ".extra-objects", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type')
+            data__extraobjects_is_list = isinstance(data__extraobjects, (list, tuple))
+            if data__extraobjects_is_list:
+                data__extraobjects_len = len(data__extraobjects)
+                for data__extraobjects_x, data__extraobjects_item in enumerate(data__extraobjects):
+                    if not isinstance(data__extraobjects_item, (str)):
+                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".extra-objects[{data__extraobjects_x}]".format(**locals()) + " must be string", value=data__extraobjects_item, name="" + (name_prefix or "data") + ".extra-objects[{data__extraobjects_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+        if "extra-compile-args" in data_keys:
+            data_keys.remove("extra-compile-args")
+            data__extracompileargs = data["extra-compile-args"]
+            if not isinstance(data__extracompileargs, (list, tuple)):
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".extra-compile-args must be array", value=data__extracompileargs, name="" + (name_prefix or "data") + ".extra-compile-args", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type')
+            data__extracompileargs_is_list = isinstance(data__extracompileargs, (list, tuple))
+            if data__extracompileargs_is_list:
+                data__extracompileargs_len = len(data__extracompileargs)
+                for data__extracompileargs_x, data__extracompileargs_item in enumerate(data__extracompileargs):
+                    if not isinstance(data__extracompileargs_item, (str)):
+                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".extra-compile-args[{data__extracompileargs_x}]".format(**locals()) + " must be string", value=data__extracompileargs_item, name="" + (name_prefix or "data") + ".extra-compile-args[{data__extracompileargs_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+        if "extra-link-args" in data_keys:
+            data_keys.remove("extra-link-args")
+            data__extralinkargs = data["extra-link-args"]
+            if not isinstance(data__extralinkargs, (list, tuple)):
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".extra-link-args must be array", value=data__extralinkargs, name="" + (name_prefix or "data") + ".extra-link-args", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type')
+            data__extralinkargs_is_list = isinstance(data__extralinkargs, (list, tuple))
+            if data__extralinkargs_is_list:
+                data__extralinkargs_len = len(data__extralinkargs)
+                for data__extralinkargs_x, data__extralinkargs_item in enumerate(data__extralinkargs):
+                    if not isinstance(data__extralinkargs_item, (str)):
+                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".extra-link-args[{data__extralinkargs_x}]".format(**locals()) + " must be string", value=data__extralinkargs_item, name="" + (name_prefix or "data") + ".extra-link-args[{data__extralinkargs_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+        if "export-symbols" in data_keys:
+            data_keys.remove("export-symbols")
+            data__exportsymbols = data["export-symbols"]
+            if not isinstance(data__exportsymbols, (list, tuple)):
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".export-symbols must be array", value=data__exportsymbols, name="" + (name_prefix or "data") + ".export-symbols", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type')
+            data__exportsymbols_is_list = isinstance(data__exportsymbols, (list, tuple))
+            if data__exportsymbols_is_list:
+                data__exportsymbols_len = len(data__exportsymbols)
+                for data__exportsymbols_x, data__exportsymbols_item in enumerate(data__exportsymbols):
+                    if not isinstance(data__exportsymbols_item, (str)):
+                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".export-symbols[{data__exportsymbols_x}]".format(**locals()) + " must be string", value=data__exportsymbols_item, name="" + (name_prefix or "data") + ".export-symbols[{data__exportsymbols_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+        if "swig-opts" in data_keys:
+            data_keys.remove("swig-opts")
+            data__swigopts = data["swig-opts"]
+            if not isinstance(data__swigopts, (list, tuple)):
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".swig-opts must be array", value=data__swigopts, name="" + (name_prefix or "data") + ".swig-opts", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type')
+            data__swigopts_is_list = isinstance(data__swigopts, (list, tuple))
+            if data__swigopts_is_list:
+                data__swigopts_len = len(data__swigopts)
+                for data__swigopts_x, data__swigopts_item in enumerate(data__swigopts):
+                    if not isinstance(data__swigopts_item, (str)):
+                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".swig-opts[{data__swigopts_x}]".format(**locals()) + " must be string", value=data__swigopts_item, name="" + (name_prefix or "data") + ".swig-opts[{data__swigopts_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+        if "depends" in data_keys:
+            data_keys.remove("depends")
+            data__depends = data["depends"]
+            if not isinstance(data__depends, (list, tuple)):
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".depends must be array", value=data__depends, name="" + (name_prefix or "data") + ".depends", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type')
+            data__depends_is_list = isinstance(data__depends, (list, tuple))
+            if data__depends_is_list:
+                data__depends_len = len(data__depends)
+                for data__depends_x, data__depends_item in enumerate(data__depends):
+                    if not isinstance(data__depends_item, (str)):
+                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".depends[{data__depends_x}]".format(**locals()) + " must be string", value=data__depends_item, name="" + (name_prefix or "data") + ".depends[{data__depends_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+        if "language" in data_keys:
+            data_keys.remove("language")
+            data__language = data["language"]
+            if not isinstance(data__language, (str)):
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".language must be string", value=data__language, name="" + (name_prefix or "data") + ".language", definition={'type': 'string'}, rule='type')
+        if "optional" in data_keys:
+            data_keys.remove("optional")
+            data__optional = data["optional"]
+            if not isinstance(data__optional, (bool)):
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".optional must be boolean", value=data__optional, name="" + (name_prefix or "data") + ".optional", definition={'type': 'boolean'}, rule='type')
+        if "py-limited-api" in data_keys:
+            data_keys.remove("py-limited-api")
+            data__pylimitedapi = data["py-limited-api"]
+            if not isinstance(data__pylimitedapi, (bool)):
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".py-limited-api must be boolean", value=data__pylimitedapi, name="" + (name_prefix or "data") + ".py-limited-api", definition={'type': 'boolean'}, rule='type')
+        if data_keys:
+            raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/ext-module', 'title': 'Extension module', 'description': 'Parameters to construct a :class:`setuptools.Extension` object', 'type': 'object', 'required': ['name', 'sources'], 'additionalProperties': False, 'properties': {'name': {'type': 'string', 'format': 'python-module-name-relaxed'}, 'sources': {'type': 'array', 'items': {'type': 'string'}}, 'include-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'define-macros': {'type': 'array', 'items': {'type': 'array', 'items': [{'description': 'macro name', 'type': 'string'}, {'description': 'macro value', 'oneOf': [{'type': 'string'}, {'type': 'null'}]}], 'additionalItems': False}}, 'undef-macros': {'type': 'array', 'items': {'type': 'string'}}, 'library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'libraries': {'type': 'array', 'items': {'type': 'string'}}, 'runtime-library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'extra-objects': {'type': 'array', 'items': {'type': 'string'}}, 'extra-compile-args': {'type': 'array', 'items': {'type': 'string'}}, 'extra-link-args': {'type': 'array', 'items': {'type': 'string'}}, 'export-symbols': {'type': 'array', 'items': {'type': 'string'}}, 'swig-opts': {'type': 'array', 'items': {'type': 'string'}}, 'depends': {'type': 'array', 'items': {'type': 'string'}}, 'language': {'type': 'string'}, 'optional': {'type': 'boolean'}, 'py-limited-api': {'type': 'boolean'}}}, rule='additionalProperties')
+    return data
+
 def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html__definitions_find_directive(data, custom_formats={}, name_prefix=None):
     if not isinstance(data, (dict)):
         raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}, rule='type')
@@ -674,26 +889,26 @@ def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_htm
 def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html__definitions_package_name(data, custom_formats={}, name_prefix=None):
     if not isinstance(data, (str)):
         raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, rule='type')
-    data_any_of_count9 = 0
-    if not data_any_of_count9:
+    data_any_of_count10 = 0
+    if not data_any_of_count10:
         try:
             if not isinstance(data, (str)):
                 raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'format': 'python-module-name-relaxed'}, rule='type')
             if isinstance(data, str):
                 if not custom_formats["python-module-name-relaxed"](data):
                     raise JsonSchemaValueException("" + (name_prefix or "data") + " must be python-module-name-relaxed", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'format': 'python-module-name-relaxed'}, rule='format')
-            data_any_of_count9 += 1
+            data_any_of_count10 += 1
         except JsonSchemaValueException: pass
-    if not data_any_of_count9:
+    if not data_any_of_count10:
         try:
             if not isinstance(data, (str)):
                 raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'format': 'pep561-stub-name'}, rule='type')
             if isinstance(data, str):
                 if not custom_formats["pep561-stub-name"](data):
                     raise JsonSchemaValueException("" + (name_prefix or "data") + " must be pep561-stub-name", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'format': 'pep561-stub-name'}, rule='format')
-            data_any_of_count9 += 1
+            data_any_of_count10 += 1
         except JsonSchemaValueException: pass
-    if not data_any_of_count9:
+    if not data_any_of_count10:
         raise JsonSchemaValueException("" + (name_prefix or "data") + " cannot be validated by any definition", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, rule='anyOf')
     return data
 
@@ -749,19 +964,19 @@ def validate_https___packaging_python_org_en_latest_specifications_pyproject_tom
         if "readme" in data_keys:
             data_keys.remove("readme")
             data__readme = data["readme"]
-            data__readme_one_of_count10 = 0
-            if data__readme_one_of_count10 < 2:
+            data__readme_one_of_count11 = 0
+            if data__readme_one_of_count11 < 2:
                 try:
                     if not isinstance(data__readme, (str)):
                         raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must be string", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, rule='type')
-                    data__readme_one_of_count10 += 1
+                    data__readme_one_of_count11 += 1
                 except JsonSchemaValueException: pass
-            if data__readme_one_of_count10 < 2:
+            if data__readme_one_of_count11 < 2:
                 try:
                     if not isinstance(data__readme, (dict)):
                         raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must be object", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}, rule='type')
-                    data__readme_any_of_count11 = 0
-                    if not data__readme_any_of_count11:
+                    data__readme_any_of_count12 = 0
+                    if not data__readme_any_of_count12:
                         try:
                             data__readme_is_dict = isinstance(data__readme, dict)
                             if data__readme_is_dict:
@@ -774,9 +989,9 @@ def validate_https___packaging_python_org_en_latest_specifications_pyproject_tom
                                     data__readme__file = data__readme["file"]
                                     if not isinstance(data__readme__file, (str)):
                                         raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme.file must be string", value=data__readme__file, name="" + (name_prefix or "data") + ".readme.file", definition={'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}, rule='type')
-                            data__readme_any_of_count11 += 1
+                            data__readme_any_of_count12 += 1
                         except JsonSchemaValueException: pass
-                    if not data__readme_any_of_count11:
+                    if not data__readme_any_of_count12:
                         try:
                             data__readme_is_dict = isinstance(data__readme, dict)
                             if data__readme_is_dict:
@@ -789,9 +1004,9 @@ def validate_https___packaging_python_org_en_latest_specifications_pyproject_tom
                                     data__readme__text = data__readme["text"]
                                     if not isinstance(data__readme__text, (str)):
                                         raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme.text must be string", value=data__readme__text, name="" + (name_prefix or "data") + ".readme.text", definition={'type': 'string', 'description': 'Full text describing the project.'}, rule='type')
-                            data__readme_any_of_count11 += 1
+                            data__readme_any_of_count12 += 1
                         except JsonSchemaValueException: pass
-                    if not data__readme_any_of_count11:
+                    if not data__readme_any_of_count12:
                         raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme cannot be validated by any definition", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, rule='anyOf')
                     data__readme_is_dict = isinstance(data__readme, dict)
                     if data__readme_is_dict:
@@ -804,10 +1019,10 @@ def validate_https___packaging_python_org_en_latest_specifications_pyproject_tom
                             data__readme__contenttype = data__readme["content-type"]
                             if not isinstance(data__readme__contenttype, (str)):
                                 raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme.content-type must be string", value=data__readme__contenttype, name="" + (name_prefix or "data") + ".readme.content-type", definition={'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}, rule='type')
-                    data__readme_one_of_count10 += 1
+                    data__readme_one_of_count11 += 1
                 except JsonSchemaValueException: pass
-            if data__readme_one_of_count10 != 1:
-                raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must be valid exactly by one definition" + (" (" + str(data__readme_one_of_count10) + " matches found)"), value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, rule='oneOf')
+            if data__readme_one_of_count11 != 1:
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must be valid exactly by one definition" + (" (" + str(data__readme_one_of_count11) + " matches found)"), value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, rule='oneOf')
         if "requires-python" in data_keys:
             data_keys.remove("requires-python")
             data__requirespython = data["requires-python"]
@@ -819,8 +1034,8 @@ def validate_https___packaging_python_org_en_latest_specifications_pyproject_tom
         if "license" in data_keys:
             data_keys.remove("license")
             data__license = data["license"]
-            data__license_one_of_count12 = 0
-            if data__license_one_of_count12 < 2:
+            data__license_one_of_count13 = 0
+            if data__license_one_of_count13 < 2:
                 try:
                     data__license_is_dict = isinstance(data__license, dict)
                     if data__license_is_dict:
@@ -833,9 +1048,9 @@ def validate_https___packaging_python_org_en_latest_specifications_pyproject_tom
                             data__license__file = data__license["file"]
                             if not isinstance(data__license__file, (str)):
                                 raise JsonSchemaValueException("" + (name_prefix or "data") + ".license.file must be string", value=data__license__file, name="" + (name_prefix or "data") + ".license.file", definition={'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}, rule='type')
-                    data__license_one_of_count12 += 1
+                    data__license_one_of_count13 += 1
                 except JsonSchemaValueException: pass
-            if data__license_one_of_count12 < 2:
+            if data__license_one_of_count13 < 2:
                 try:
                     data__license_is_dict = isinstance(data__license, dict)
                     if data__license_is_dict:
@@ -848,10 +1063,10 @@ def validate_https___packaging_python_org_en_latest_specifications_pyproject_tom
                             data__license__text = data__license["text"]
                             if not isinstance(data__license__text, (str)):
                                 raise JsonSchemaValueException("" + (name_prefix or "data") + ".license.text must be string", value=data__license__text, name="" + (name_prefix or "data") + ".license.text", definition={'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}, rule='type')
-                    data__license_one_of_count12 += 1
+                    data__license_one_of_count13 += 1
                 except JsonSchemaValueException: pass
-            if data__license_one_of_count12 != 1:
-                raise JsonSchemaValueException("" + (name_prefix or "data") + ".license must be valid exactly by one definition" + (" (" + str(data__license_one_of_count12) + " matches found)"), value=data__license, name="" + (name_prefix or "data") + ".license", definition={'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, rule='oneOf')
+            if data__license_one_of_count13 != 1:
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".license must be valid exactly by one definition" + (" (" + str(data__license_one_of_count13) + " matches found)"), value=data__license, name="" + (name_prefix or "data") + ".license", definition={'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, rule='oneOf')
         if "authors" in data_keys:
             data_keys.remove("authors")
             data__authors = data["authors"]

From 3b1051ad371455fb05ac0928af7a2ac1c5bb30a4 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 14 Aug 2024 15:31:08 +0100
Subject: [PATCH 1101/1761] Implement ext-modules from pyproject.toml

---
 setuptools/config/_apply_pyprojecttoml.py | 26 +++++++++++++++++++----
 1 file changed, 22 insertions(+), 4 deletions(-)

diff --git a/setuptools/config/_apply_pyprojecttoml.py b/setuptools/config/_apply_pyprojecttoml.py
index 7b9c0b1a59..23179f3548 100644
--- a/setuptools/config/_apply_pyprojecttoml.py
+++ b/setuptools/config/_apply_pyprojecttoml.py
@@ -17,10 +17,11 @@
 from inspect import cleandoc
 from itertools import chain
 from types import MappingProxyType
-from typing import TYPE_CHECKING, Any, Callable, Dict, Mapping, Union
+from typing import TYPE_CHECKING, Any, Callable, Dict, Mapping, TypeVar, Union
 
 from .._path import StrPath
 from ..errors import RemovedConfigError
+from ..extension import Extension
 from ..warnings import SetuptoolsWarning
 
 if TYPE_CHECKING:
@@ -35,6 +36,7 @@
 _ProjectReadmeValue: TypeAlias = Union[str, Dict[str, str]]
 _CorrespFn: TypeAlias = Callable[["Distribution", Any, StrPath], None]
 _Correspondence: TypeAlias = Union[str, _CorrespFn]
+_T = TypeVar("_T")
 
 _logger = logging.getLogger(__name__)
 
@@ -117,13 +119,14 @@ def json_compatible_key(key: str) -> str:
 
 
 def _set_config(dist: Distribution, field: str, value: Any):
+    val = _PREPROCESS.get(field, _noop)(dist, value)
     setter = getattr(dist.metadata, f"set_{field}", None)
     if setter:
-        setter(value)
+        setter(val)
     elif hasattr(dist.metadata, field) or field in SETUPTOOLS_PATCHES:
-        setattr(dist.metadata, field, value)
+        setattr(dist.metadata, field, val)
     else:
-        setattr(dist, field, value)
+        setattr(dist, field, val)
 
 
 _CONTENT_TYPES = {
@@ -218,6 +221,17 @@ def _optional_dependencies(dist: Distribution, val: dict, _root_dir):
     dist.extras_require = {**existing, **val}
 
 
+def _ext_modules(dist: Distribution, val: list[dict]) -> list[Extension]:
+    existing = dist.ext_modules or []
+    args = ({k.replace("-", "_"): v for k, v in x.items()} for x in val)
+    new = [Extension(**kw) for kw in args]
+    return [*existing, *new]
+
+
+def _noop(_dist: Distribution, val: _T) -> _T:
+    return val
+
+
 def _unify_entry_points(project_table: dict):
     project = project_table
     entry_points = project.pop("entry-points", project.pop("entry_points", {}))
@@ -376,6 +390,10 @@ def _acessor(obj):
     "license_files",
 }
 
+_PREPROCESS = {
+    "ext_modules": _ext_modules,
+}
+
 _PREVIOUSLY_DEFINED = {
     "name": _attrgetter("metadata.name"),
     "version": _attrgetter("metadata.version"),

From bf768e0cfb2194e554bb268801e249c3e1af2ca6 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 14 Aug 2024 15:31:43 +0100
Subject: [PATCH 1102/1761] Add experimental warning to ext-modules in
 pyproject.toml

---
 setuptools/config/pyprojecttoml.py                  | 3 +++
 setuptools/tests/config/test_apply_pyprojecttoml.py | 3 ++-
 2 files changed, 5 insertions(+), 1 deletion(-)

diff --git a/setuptools/config/pyprojecttoml.py b/setuptools/config/pyprojecttoml.py
index 3449a4bfb7..5d95e18b83 100644
--- a/setuptools/config/pyprojecttoml.py
+++ b/setuptools/config/pyprojecttoml.py
@@ -130,6 +130,9 @@ def read_configuration(
     asdict["tool"] = tool_table
     tool_table["setuptools"] = setuptools_table
 
+    if "ext-modules" in setuptools_table:
+        _ExperimentalConfiguration.emit(subject="[tool.setuptools.ext-modules]")
+
     with _ignore_errors(ignore_option_errors):
         # Don't complain about unrelated errors (e.g. tools not using the "tool" table)
         subset = {"project": project_table, "tool": {"setuptools": setuptools_table}}
diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py
index e86bae4589..deee6fa47c 100644
--- a/setuptools/tests/config/test_apply_pyprojecttoml.py
+++ b/setuptools/tests/config/test_apply_pyprojecttoml.py
@@ -338,7 +338,8 @@ def test_pyproject_sets_attribute(self, tmp_path, monkeypatch):
         ]
         """
         pyproject.write_text(cleandoc(toml_config), encoding="utf-8")
-        dist = pyprojecttoml.apply_configuration(Distribution({}), pyproject)
+        with pytest.warns(pyprojecttoml._ExperimentalConfiguration):
+            dist = pyprojecttoml.apply_configuration(Distribution({}), pyproject)
         assert len(dist.ext_modules) == 1
         assert dist.ext_modules[0].name == "my.ext"
         assert set(dist.ext_modules[0].sources) == {"hello.c", "world.c"}

From 11731e2950342a3c1d0138fe27d23a4e1e652119 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 14 Aug 2024 16:42:08 +0100
Subject: [PATCH 1103/1761] Add docs about ext-modules in pyproject.toml

---
 docs/userguide/ext_modules.rst      | 53 ++++++++++++++++++++++-------
 docs/userguide/pyproject_config.rst |  3 ++
 setuptools/extension.py             |  2 +-
 3 files changed, 45 insertions(+), 13 deletions(-)

diff --git a/docs/userguide/ext_modules.rst b/docs/userguide/ext_modules.rst
index eabc2c0ab3..19954f50e4 100644
--- a/docs/userguide/ext_modules.rst
+++ b/docs/userguide/ext_modules.rst
@@ -27,22 +27,41 @@ and all project metadata configuration in the ``pyproject.toml`` file:
    version = "0.42"
 
 To instruct setuptools to compile the ``foo.c`` file into the extension module
-``mylib.foo``, we need to add a ``setup.py`` file similar to the following:
+``mylib.foo``, we need to define an appropriate configuration in either
+``pyproject.toml`` [#pyproject.toml]_ or ``setup.py`` file ,
+similar to the following:
 
-.. code-block:: python
+.. tab:: pyproject.toml
 
-   from setuptools import Extension, setup
+    .. code-block:: toml
 
-   setup(
-       ext_modules=[
-           Extension(
-               name="mylib.foo",  # as it would be imported
-                                  # may include packages/namespaces separated by `.`
-
-               sources=["foo.c"], # all sources are compiled into a single binary file
-           ),
+       [tool.setuptools]
+       ext-modules = [
+         {name = "mylib.foo", sources = ["foo.c"]}
        ]
-   )
+
+.. tab:: setup.py
+
+    .. code-block:: python
+
+       from setuptools import Extension, setup
+
+       setup(
+           ext_modules=[
+               Extension(
+                   name="mylib.foo",
+                   sources=["foo.c"],
+               ),
+           ]
+       )
+
+The ``name`` value corresponds to how the extension module would be
+imported and may include packages/namespaces separated by ``.``.
+The ``sources`` value is a list of all source files that are compiled
+into a single binary file.
+Optionally any other parameter of :class:`setuptools.Extension` can be defined
+in the configuration file (but in the case of ``pyproject.toml`` they must be
+written using :wiki:`kebab-case` convention).
 
 .. seealso::
    You can find more information on the `Python docs about C/C++ extensions`_.
@@ -168,6 +187,16 @@ Extension API Reference
 .. autoclass:: setuptools.Extension
 
 
+----
+
+.. rubric:: Notes
+
+.. [#pyproject.toml]
+   Declarative configuration of extension modules via ``pyproject.toml`` was
+   introduced recently and is still considered experimental.
+   Therefore it might change in future versions of ``setuptools``.
+
+
 .. _Python docs about C/C++ extensions: https://docs.python.org/3/extending/extending.html
 .. _Cython: https://cython.readthedocs.io/en/stable/index.html
 .. _directory options: https://gcc.gnu.org/onlinedocs/gcc/Directory-Options.html
diff --git a/docs/userguide/pyproject_config.rst b/docs/userguide/pyproject_config.rst
index 4f60ad9324..e988fec7ac 100644
--- a/docs/userguide/pyproject_config.rst
+++ b/docs/userguide/pyproject_config.rst
@@ -88,6 +88,9 @@ file, and can be set via the ``tool.setuptools`` table:
 Key                       Value Type (TOML)           Notes
 ========================= =========================== =========================
 ``py-modules``            array                       See tip below.
+``ext-modules``           array of                    **Experimental** - Each item corresponds to a
+                          tables/inline-tables        :class:`setuptools.Extension` object and may define
+                                                      the associated parameters in :wiki:`kebab-case`.
 ``packages``              array or ``find`` directive See tip below.
 ``package-dir``           table/inline-table          Used when explicitly/manually listing ``packages``.
 ------------------------- --------------------------- -------------------------
diff --git a/setuptools/extension.py b/setuptools/extension.py
index dcc7709982..cbefc72508 100644
--- a/setuptools/extension.py
+++ b/setuptools/extension.py
@@ -127,7 +127,7 @@ class Extension(_Extension):
     :keyword bool py_limited_api:
       opt-in flag for the usage of :doc:`Python's limited API `.
 
-    :raises setuptools.errors.PlatformError: if 'runtime_library_dirs' is
+    :raises setuptools.errors.PlatformError: if ``runtime_library_dirs`` is
       specified on Windows. (since v63)
     """
 

From 592d089d2eb8b1e50fdd45d1939f77fe3832a307 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 14 Aug 2024 16:47:41 +0100
Subject: [PATCH 1104/1761] Add news fragment

---
 newsfragments/4568.feature.rst | 2 ++
 1 file changed, 2 insertions(+)
 create mode 100644 newsfragments/4568.feature.rst

diff --git a/newsfragments/4568.feature.rst b/newsfragments/4568.feature.rst
new file mode 100644
index 0000000000..dadf4f4386
--- /dev/null
+++ b/newsfragments/4568.feature.rst
@@ -0,0 +1,2 @@
+Added support for defining ``ext-modules`` via ``pyproject.toml``
+(**EXPERIMENTAL**, may change in future releases).

From 21e8ab5abf09e8e93f713d2493c22fb2472ac865 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Fri, 30 Aug 2024 09:44:07 +0100
Subject: [PATCH 1105/1761] Add warning for universal wheels

---
 setuptools/command/bdist_wheel.py | 16 +++++++++++++++-
 1 file changed, 15 insertions(+), 1 deletion(-)

diff --git a/setuptools/command/bdist_wheel.py b/setuptools/command/bdist_wheel.py
index 8f06786659..18d6ecc5a2 100644
--- a/setuptools/command/bdist_wheel.py
+++ b/setuptools/command/bdist_wheel.py
@@ -26,6 +26,7 @@
 from wheel.wheelfile import WheelFile
 
 from .. import Command, __version__
+from ..warnings import SetuptoolsDeprecationWarning
 from .egg_info import egg_info as egg_info_cls
 
 from distutils import log
@@ -205,7 +206,7 @@ class bdist_wheel(Command):
             "g",
             "Group name used when creating a tar file [default: current group]",
         ),
-        ("universal", None, "make a universal wheel [default: false]"),
+        ("universal", None, "*DEPRECATED* make a universal wheel [default: false]"),
         (
             "compression=",
             None,
@@ -285,6 +286,19 @@ def finalize_options(self) -> None:
             if val.lower() in ("1", "true", "yes"):
                 self.universal = True
 
+        if self.universal:
+            SetuptoolsDeprecationWarning.emit(
+                "bdist_wheel.universal is deprecated",
+                """
+                With Python 2.7 end-of-life, support for building universal wheels
+                (i.e., wheels that support both Python 2 and Python 3)
+                is being obviated.
+                Please discontinue using this option, or if you still need it,
+                file an issue with pypa/setuptools describing your use case.
+                """,
+                due_date=(2025, 8, 30),  # Introduced in 2024-08-30
+            )
+
         if self.build_number is not None and not self.build_number[:1].isdigit():
             raise ValueError("Build tag (build-number) must start with a digit.")
 

From 61fe5d3a2477a07dc9d92cbdb420eac3585c1d15 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Fri, 30 Aug 2024 09:53:50 +0100
Subject: [PATCH 1106/1761] Adapt tests for deprecation of universal

---
 setuptools/command/bdist_wheel.py    |  2 +-
 setuptools/tests/test_bdist_wheel.py | 39 +++++++++++++++++-----------
 setuptools/tests/test_build_meta.py  |  3 ---
 3 files changed, 25 insertions(+), 19 deletions(-)

diff --git a/setuptools/command/bdist_wheel.py b/setuptools/command/bdist_wheel.py
index 18d6ecc5a2..fa97976fef 100644
--- a/setuptools/command/bdist_wheel.py
+++ b/setuptools/command/bdist_wheel.py
@@ -279,7 +279,7 @@ def finalize_options(self) -> None:
 
         # Support legacy [wheel] section for setting universal
         wheel = self.distribution.get_option_dict("wheel")
-        if "universal" in wheel:
+        if "universal" in wheel:  # pragma: no cover
             # please don't define this in your global configs
             log.warn("The [wheel] section is deprecated. Use [bdist_wheel] instead.")
             val = wheel["universal"][1].strip()
diff --git a/setuptools/tests/test_bdist_wheel.py b/setuptools/tests/test_bdist_wheel.py
index a1e3d9a73e..8b64e90f72 100644
--- a/setuptools/tests/test_bdist_wheel.py
+++ b/setuptools/tests/test_bdist_wheel.py
@@ -26,6 +26,7 @@
     remove_readonly_exc,
 )
 from setuptools.dist import Distribution
+from setuptools.warnings import SetuptoolsDeprecationWarning
 
 from distutils.core import run_setup
 
@@ -123,7 +124,6 @@
             )
             """
         ),
-        "setup.cfg": "[bdist_wheel]\nuniversal=1",
         "headersdist.py": "",
         "header.h": "",
     },
@@ -300,8 +300,8 @@ def license_paths(self):
 
 def test_licenses_default(dummy_dist, monkeypatch, tmp_path):
     monkeypatch.chdir(dummy_dist)
-    bdist_wheel_cmd(bdist_dir=str(tmp_path), universal=True).run()
-    with ZipFile("dist/dummy_dist-1.0-py2.py3-none-any.whl") as wf:
+    bdist_wheel_cmd(bdist_dir=str(tmp_path)).run()
+    with ZipFile("dist/dummy_dist-1.0-py3-none-any.whl") as wf:
         license_files = {
             "dummy_dist-1.0.dist-info/" + fname for fname in DEFAULT_LICENSE_FILES
         }
@@ -314,9 +314,9 @@ def test_licenses_deprecated(dummy_dist, monkeypatch, tmp_path):
     )
     monkeypatch.chdir(dummy_dist)
 
-    bdist_wheel_cmd(bdist_dir=str(tmp_path), universal=True).run()
+    bdist_wheel_cmd(bdist_dir=str(tmp_path)).run()
 
-    with ZipFile("dist/dummy_dist-1.0-py2.py3-none-any.whl") as wf:
+    with ZipFile("dist/dummy_dist-1.0-py3-none-any.whl") as wf:
         license_files = {"dummy_dist-1.0.dist-info/DUMMYFILE"}
         assert set(wf.namelist()) == DEFAULT_FILES | license_files
 
@@ -337,8 +337,8 @@ def test_licenses_deprecated(dummy_dist, monkeypatch, tmp_path):
 def test_licenses_override(dummy_dist, monkeypatch, tmp_path, config_file, config):
     dummy_dist.joinpath(config_file).write_text(config, encoding="utf-8")
     monkeypatch.chdir(dummy_dist)
-    bdist_wheel_cmd(bdist_dir=str(tmp_path), universal=True).run()
-    with ZipFile("dist/dummy_dist-1.0-py2.py3-none-any.whl") as wf:
+    bdist_wheel_cmd(bdist_dir=str(tmp_path)).run()
+    with ZipFile("dist/dummy_dist-1.0-py3-none-any.whl") as wf:
         license_files = {
             "dummy_dist-1.0.dist-info/" + fname for fname in {"DUMMYFILE", "LICENSE"}
         }
@@ -350,20 +350,29 @@ def test_licenses_disabled(dummy_dist, monkeypatch, tmp_path):
         "[metadata]\nlicense_files=\n", encoding="utf-8"
     )
     monkeypatch.chdir(dummy_dist)
-    bdist_wheel_cmd(bdist_dir=str(tmp_path), universal=True).run()
-    with ZipFile("dist/dummy_dist-1.0-py2.py3-none-any.whl") as wf:
+    bdist_wheel_cmd(bdist_dir=str(tmp_path)).run()
+    with ZipFile("dist/dummy_dist-1.0-py3-none-any.whl") as wf:
         assert set(wf.namelist()) == DEFAULT_FILES
 
 
 def test_build_number(dummy_dist, monkeypatch, tmp_path):
     monkeypatch.chdir(dummy_dist)
-    bdist_wheel_cmd(bdist_dir=str(tmp_path), build_number="2", universal=True).run()
-    with ZipFile("dist/dummy_dist-1.0-2-py2.py3-none-any.whl") as wf:
+    bdist_wheel_cmd(bdist_dir=str(tmp_path), build_number="2").run()
+    with ZipFile("dist/dummy_dist-1.0-2-py3-none-any.whl") as wf:
         filenames = set(wf.namelist())
         assert "dummy_dist-1.0.dist-info/RECORD" in filenames
         assert "dummy_dist-1.0.dist-info/METADATA" in filenames
 
 
+def test_universal_deprecated(dummy_dist, monkeypatch, tmp_path):
+    monkeypatch.chdir(dummy_dist)
+    with pytest.warns(SetuptoolsDeprecationWarning, match=".*universal is deprecated"):
+        bdist_wheel_cmd(bdist_dir=str(tmp_path), universal=True).run()
+
+    # For now we still respect the option
+    assert os.path.exists("dist/dummy_dist-1.0-py2.py3-none-any.whl")
+
+
 EXTENSION_EXAMPLE = """\
 #include 
 
@@ -431,8 +440,8 @@ def test_build_from_readonly_tree(dummy_dist, monkeypatch, tmp_path):
 )
 def test_compression(dummy_dist, monkeypatch, tmp_path, option, compress_type):
     monkeypatch.chdir(dummy_dist)
-    bdist_wheel_cmd(bdist_dir=str(tmp_path), universal=True, compression=option).run()
-    with ZipFile("dist/dummy_dist-1.0-py2.py3-none-any.whl") as wf:
+    bdist_wheel_cmd(bdist_dir=str(tmp_path), compression=option).run()
+    with ZipFile("dist/dummy_dist-1.0-py3-none-any.whl") as wf:
         filenames = set(wf.namelist())
         assert "dummy_dist-1.0.dist-info/RECORD" in filenames
         assert "dummy_dist-1.0.dist-info/METADATA" in filenames
@@ -451,8 +460,8 @@ def test_wheelfile_line_endings(wheel_paths):
 def test_unix_epoch_timestamps(dummy_dist, monkeypatch, tmp_path):
     monkeypatch.setenv("SOURCE_DATE_EPOCH", "0")
     monkeypatch.chdir(dummy_dist)
-    bdist_wheel_cmd(bdist_dir=str(tmp_path), build_number="2a", universal=True).run()
-    with ZipFile("dist/dummy_dist-1.0-2a-py2.py3-none-any.whl") as wf:
+    bdist_wheel_cmd(bdist_dir=str(tmp_path), build_number="2a").run()
+    with ZipFile("dist/dummy_dist-1.0-2a-py3-none-any.whl") as wf:
         for zinfo in wf.filelist:
             assert zinfo.date_time >= (1980, 1, 1, 0, 0, 0)  # min epoch is used
 
diff --git a/setuptools/tests/test_build_meta.py b/setuptools/tests/test_build_meta.py
index cfd4cd453a..53095afb52 100644
--- a/setuptools/tests/test_build_meta.py
+++ b/setuptools/tests/test_build_meta.py
@@ -358,9 +358,6 @@ def test_build_with_pyproject_config(self, tmpdir, setup_script):
 
                 [tool.distutils.sdist]
                 formats = "gztar"
-
-                [tool.distutils.bdist_wheel]
-                universal = true
                 """
             ),
             "MANIFEST.in": DALS(

From 1f5f6be24baf0bc092e792d5065fd7c87ae787af Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Fri, 30 Aug 2024 15:22:34 +0100
Subject: [PATCH 1107/1761] Add news fragment

---
 newsfragments/4617.feature.rst | 1 +
 1 file changed, 1 insertion(+)
 create mode 100644 newsfragments/4617.feature.rst

diff --git a/newsfragments/4617.feature.rst b/newsfragments/4617.feature.rst
new file mode 100644
index 0000000000..905e9fd497
--- /dev/null
+++ b/newsfragments/4617.feature.rst
@@ -0,0 +1 @@
+Deprecated ``bdist_wheel.universal`` configuration.

From 7ecbcb0eb8a67a0408e1f6062f6591ea412f4e06 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 2 Sep 2024 12:40:59 -0400
Subject: [PATCH 1108/1761] Revert "Remove monkeypatching of _msvccompiler."

This reverts commit 4e4e23cfcf7194d9d02ec262cfec33aa6701c28e.

Closes #4625
---
 setuptools/msvc.py | 1750 ++++++++++++++++++++++++++++++++++++++++++++
 1 file changed, 1750 insertions(+)
 create mode 100644 setuptools/msvc.py

diff --git a/setuptools/msvc.py b/setuptools/msvc.py
new file mode 100644
index 0000000000..57f09417ca
--- /dev/null
+++ b/setuptools/msvc.py
@@ -0,0 +1,1750 @@
+"""
+Improved support for Microsoft Visual C++ compilers.
+
+Known supported compilers:
+--------------------------
+Microsoft Visual C++ 14.X:
+    Microsoft Visual C++ Build Tools 2015 (x86, x64, arm)
+    Microsoft Visual Studio Build Tools 2017 (x86, x64, arm, arm64)
+    Microsoft Visual Studio Build Tools 2019 (x86, x64, arm, arm64)
+
+This may also support compilers shipped with compatible Visual Studio versions.
+"""
+
+from __future__ import annotations
+
+import contextlib
+import itertools
+import json
+import platform
+import subprocess
+from os import listdir, pathsep
+from os.path import dirname, isdir, isfile, join
+from subprocess import CalledProcessError
+from typing import TYPE_CHECKING
+
+from more_itertools import unique_everseen
+
+import distutils.errors
+from distutils.util import get_platform
+
+# https://github.com/python/mypy/issues/8166
+if not TYPE_CHECKING and platform.system() == 'Windows':
+    import winreg
+    from os import environ
+else:
+    # Mock winreg and environ so the module can be imported on this platform.
+
+    class winreg:
+        HKEY_USERS = None
+        HKEY_CURRENT_USER = None
+        HKEY_LOCAL_MACHINE = None
+        HKEY_CLASSES_ROOT = None
+
+    environ: dict[str, str] = dict()
+
+
+def _msvc14_find_vc2015():
+    """Python 3.8 "distutils/_msvccompiler.py" backport"""
+    try:
+        key = winreg.OpenKey(
+            winreg.HKEY_LOCAL_MACHINE,
+            r"Software\Microsoft\VisualStudio\SxS\VC7",
+            0,
+            winreg.KEY_READ | winreg.KEY_WOW64_32KEY,
+        )
+    except OSError:
+        return None, None
+
+    best_version = 0
+    best_dir = None
+    with key:
+        for i in itertools.count():
+            try:
+                v, vc_dir, vt = winreg.EnumValue(key, i)
+            except OSError:
+                break
+            if v and vt == winreg.REG_SZ and isdir(vc_dir):
+                try:
+                    version = int(float(v))
+                except (ValueError, TypeError):
+                    continue
+                if version >= 14 and version > best_version:
+                    best_version, best_dir = version, vc_dir
+    return best_version, best_dir
+
+
+def _msvc14_find_vc2017():
+    """Python 3.8 "distutils/_msvccompiler.py" backport
+
+    Returns "15, path" based on the result of invoking vswhere.exe
+    If no install is found, returns "None, None"
+
+    The version is returned to avoid unnecessarily changing the function
+    result. It may be ignored when the path is not None.
+
+    If vswhere.exe is not available, by definition, VS 2017 is not
+    installed.
+    """
+    root = environ.get("ProgramFiles(x86)") or environ.get("ProgramFiles")
+    if not root:
+        return None, None
+
+    variant = 'arm64' if get_platform() == 'win-arm64' else 'x86.x64'
+    suitable_components = (
+        f"Microsoft.VisualStudio.Component.VC.Tools.{variant}",
+        "Microsoft.VisualStudio.Workload.WDExpress",
+    )
+
+    for component in suitable_components:
+        # Workaround for `-requiresAny` (only available on VS 2017 > 15.6)
+        with contextlib.suppress(CalledProcessError, OSError, UnicodeDecodeError):
+            path = (
+                subprocess.check_output([
+                    join(root, "Microsoft Visual Studio", "Installer", "vswhere.exe"),
+                    "-latest",
+                    "-prerelease",
+                    "-requires",
+                    component,
+                    "-property",
+                    "installationPath",
+                    "-products",
+                    "*",
+                ])
+                .decode(encoding="mbcs", errors="strict")
+                .strip()
+            )
+
+            path = join(path, "VC", "Auxiliary", "Build")
+            if isdir(path):
+                return 15, path
+
+    return None, None  # no suitable component found
+
+
+PLAT_SPEC_TO_RUNTIME = {
+    'x86': 'x86',
+    'x86_amd64': 'x64',
+    'x86_arm': 'arm',
+    'x86_arm64': 'arm64',
+}
+
+
+def _msvc14_find_vcvarsall(plat_spec):
+    """Python 3.8 "distutils/_msvccompiler.py" backport"""
+    _, best_dir = _msvc14_find_vc2017()
+    vcruntime = None
+
+    if plat_spec in PLAT_SPEC_TO_RUNTIME:
+        vcruntime_plat = PLAT_SPEC_TO_RUNTIME[plat_spec]
+    else:
+        vcruntime_plat = 'x64' if 'amd64' in plat_spec else 'x86'
+
+    if best_dir:
+        vcredist = join(
+            best_dir,
+            "..",
+            "..",
+            "redist",
+            "MSVC",
+            "**",
+            vcruntime_plat,
+            "Microsoft.VC14*.CRT",
+            "vcruntime140.dll",
+        )
+        try:
+            import glob
+
+            vcruntime = glob.glob(vcredist, recursive=True)[-1]
+        except (ImportError, OSError, LookupError):
+            vcruntime = None
+
+    if not best_dir:
+        best_version, best_dir = _msvc14_find_vc2015()
+        if best_version:
+            vcruntime = join(
+                best_dir,
+                'redist',
+                vcruntime_plat,
+                "Microsoft.VC140.CRT",
+                "vcruntime140.dll",
+            )
+
+    if not best_dir:
+        return None, None
+
+    vcvarsall = join(best_dir, "vcvarsall.bat")
+    if not isfile(vcvarsall):
+        return None, None
+
+    if not vcruntime or not isfile(vcruntime):
+        vcruntime = None
+
+    return vcvarsall, vcruntime
+
+
+def _msvc14_get_vc_env(plat_spec):
+    """Python 3.8 "distutils/_msvccompiler.py" backport"""
+    if "DISTUTILS_USE_SDK" in environ:
+        return {key.lower(): value for key, value in environ.items()}
+
+    vcvarsall, vcruntime = _msvc14_find_vcvarsall(plat_spec)
+    if not vcvarsall:
+        raise distutils.errors.DistutilsPlatformError("Unable to find vcvarsall.bat")
+
+    try:
+        out = subprocess.check_output(
+            'cmd /u /c "{}" {} && set'.format(vcvarsall, plat_spec),
+            stderr=subprocess.STDOUT,
+        ).decode('utf-16le', errors='replace')
+    except subprocess.CalledProcessError as exc:
+        raise distutils.errors.DistutilsPlatformError(
+            "Error executing {}".format(exc.cmd)
+        ) from exc
+
+    env = {
+        key.lower(): value
+        for key, _, value in (line.partition('=') for line in out.splitlines())
+        if key and value
+    }
+
+    if vcruntime:
+        env['py_vcruntime_redist'] = vcruntime
+    return env
+
+
+def msvc14_get_vc_env(plat_spec):
+    """
+    Patched "distutils._msvccompiler._get_vc_env" for support extra
+    Microsoft Visual C++ 14.X compilers.
+
+    Set environment without use of "vcvarsall.bat".
+
+    Parameters
+    ----------
+    plat_spec: str
+        Target architecture.
+
+    Return
+    ------
+    dict
+        environment
+    """
+
+    # Always use backport from CPython 3.8
+    try:
+        return _msvc14_get_vc_env(plat_spec)
+    except distutils.errors.DistutilsPlatformError as exc:
+        _augment_exception(exc, 14.0)
+        raise
+
+
+def _augment_exception(exc, version, arch=''):
+    """
+    Add details to the exception message to help guide the user
+    as to what action will resolve it.
+    """
+    # Error if MSVC++ directory not found or environment not set
+    message = exc.args[0]
+
+    if "vcvarsall" in message.lower() or "visual c" in message.lower():
+        # Special error message if MSVC++ not installed
+        tmpl = 'Microsoft Visual C++ {version:0.1f} or greater is required.'
+        message = tmpl.format(**locals())
+        msdownload = 'www.microsoft.com/download/details.aspx?id=%d'
+        if version == 9.0:
+            if arch.lower().find('ia64') > -1:
+                # For VC++ 9.0, if IA64 support is needed, redirect user
+                # to Windows SDK 7.0.
+                # Note: No download link available from Microsoft.
+                message += ' Get it with "Microsoft Windows SDK 7.0"'
+            else:
+                # For VC++ 9.0 redirect user to Vc++ for Python 2.7 :
+                # This redirection link is maintained by Microsoft.
+                # Contact vspython@microsoft.com if it needs updating.
+                message += ' Get it from http://aka.ms/vcpython27'
+        elif version == 10.0:
+            # For VC++ 10.0 Redirect user to Windows SDK 7.1
+            message += ' Get it with "Microsoft Windows SDK 7.1": '
+            message += msdownload % 8279
+        elif version >= 14.0:
+            # For VC++ 14.X Redirect user to latest Visual C++ Build Tools
+            message += (
+                ' Get it with "Microsoft C++ Build Tools": '
+                r'https://visualstudio.microsoft.com'
+                r'/visual-cpp-build-tools/'
+            )
+
+    exc.args = (message,)
+
+
+class PlatformInfo:
+    """
+    Current and Target Architectures information.
+
+    Parameters
+    ----------
+    arch: str
+        Target architecture.
+    """
+
+    current_cpu = environ.get('processor_architecture', '').lower()
+
+    def __init__(self, arch):
+        self.arch = arch.lower().replace('x64', 'amd64')
+
+    @property
+    def target_cpu(self):
+        """
+        Return Target CPU architecture.
+
+        Return
+        ------
+        str
+            Target CPU
+        """
+        return self.arch[self.arch.find('_') + 1 :]
+
+    def target_is_x86(self):
+        """
+        Return True if target CPU is x86 32 bits..
+
+        Return
+        ------
+        bool
+            CPU is x86 32 bits
+        """
+        return self.target_cpu == 'x86'
+
+    def current_is_x86(self):
+        """
+        Return True if current CPU is x86 32 bits..
+
+        Return
+        ------
+        bool
+            CPU is x86 32 bits
+        """
+        return self.current_cpu == 'x86'
+
+    def current_dir(self, hidex86=False, x64=False):
+        """
+        Current platform specific subfolder.
+
+        Parameters
+        ----------
+        hidex86: bool
+            return '' and not '\x86' if architecture is x86.
+        x64: bool
+            return '\x64' and not '\amd64' if architecture is amd64.
+
+        Return
+        ------
+        str
+            subfolder: '\target', or '' (see hidex86 parameter)
+        """
+        return (
+            ''
+            if (self.current_cpu == 'x86' and hidex86)
+            else r'\x64'
+            if (self.current_cpu == 'amd64' and x64)
+            else r'\%s' % self.current_cpu
+        )
+
+    def target_dir(self, hidex86=False, x64=False):
+        r"""
+        Target platform specific subfolder.
+
+        Parameters
+        ----------
+        hidex86: bool
+            return '' and not '\x86' if architecture is x86.
+        x64: bool
+            return '\x64' and not '\amd64' if architecture is amd64.
+
+        Return
+        ------
+        str
+            subfolder: '\current', or '' (see hidex86 parameter)
+        """
+        return (
+            ''
+            if (self.target_cpu == 'x86' and hidex86)
+            else r'\x64'
+            if (self.target_cpu == 'amd64' and x64)
+            else r'\%s' % self.target_cpu
+        )
+
+    def cross_dir(self, forcex86=False):
+        r"""
+        Cross platform specific subfolder.
+
+        Parameters
+        ----------
+        forcex86: bool
+            Use 'x86' as current architecture even if current architecture is
+            not x86.
+
+        Return
+        ------
+        str
+            subfolder: '' if target architecture is current architecture,
+            '\current_target' if not.
+        """
+        current = 'x86' if forcex86 else self.current_cpu
+        return (
+            ''
+            if self.target_cpu == current
+            else self.target_dir().replace('\\', '\\%s_' % current)
+        )
+
+
+class RegistryInfo:
+    """
+    Microsoft Visual Studio related registry information.
+
+    Parameters
+    ----------
+    platform_info: PlatformInfo
+        "PlatformInfo" instance.
+    """
+
+    HKEYS = (
+        winreg.HKEY_USERS,
+        winreg.HKEY_CURRENT_USER,
+        winreg.HKEY_LOCAL_MACHINE,
+        winreg.HKEY_CLASSES_ROOT,
+    )
+
+    def __init__(self, platform_info):
+        self.pi = platform_info
+
+    @property
+    def visualstudio(self):
+        """
+        Microsoft Visual Studio root registry key.
+
+        Return
+        ------
+        str
+            Registry key
+        """
+        return 'VisualStudio'
+
+    @property
+    def sxs(self):
+        """
+        Microsoft Visual Studio SxS registry key.
+
+        Return
+        ------
+        str
+            Registry key
+        """
+        return join(self.visualstudio, 'SxS')
+
+    @property
+    def vc(self):
+        """
+        Microsoft Visual C++ VC7 registry key.
+
+        Return
+        ------
+        str
+            Registry key
+        """
+        return join(self.sxs, 'VC7')
+
+    @property
+    def vs(self):
+        """
+        Microsoft Visual Studio VS7 registry key.
+
+        Return
+        ------
+        str
+            Registry key
+        """
+        return join(self.sxs, 'VS7')
+
+    @property
+    def vc_for_python(self):
+        """
+        Microsoft Visual C++ for Python registry key.
+
+        Return
+        ------
+        str
+            Registry key
+        """
+        return r'DevDiv\VCForPython'
+
+    @property
+    def microsoft_sdk(self):
+        """
+        Microsoft SDK registry key.
+
+        Return
+        ------
+        str
+            Registry key
+        """
+        return 'Microsoft SDKs'
+
+    @property
+    def windows_sdk(self):
+        """
+        Microsoft Windows/Platform SDK registry key.
+
+        Return
+        ------
+        str
+            Registry key
+        """
+        return join(self.microsoft_sdk, 'Windows')
+
+    @property
+    def netfx_sdk(self):
+        """
+        Microsoft .NET Framework SDK registry key.
+
+        Return
+        ------
+        str
+            Registry key
+        """
+        return join(self.microsoft_sdk, 'NETFXSDK')
+
+    @property
+    def windows_kits_roots(self):
+        """
+        Microsoft Windows Kits Roots registry key.
+
+        Return
+        ------
+        str
+            Registry key
+        """
+        return r'Windows Kits\Installed Roots'
+
+    def microsoft(self, key, x86=False):
+        """
+        Return key in Microsoft software registry.
+
+        Parameters
+        ----------
+        key: str
+            Registry key path where look.
+        x86: str
+            Force x86 software registry.
+
+        Return
+        ------
+        str
+            Registry key
+        """
+        node64 = '' if self.pi.current_is_x86() or x86 else 'Wow6432Node'
+        return join('Software', node64, 'Microsoft', key)
+
+    def lookup(self, key, name):
+        """
+        Look for values in registry in Microsoft software registry.
+
+        Parameters
+        ----------
+        key: str
+            Registry key path where look.
+        name: str
+            Value name to find.
+
+        Return
+        ------
+        str
+            value
+        """
+        key_read = winreg.KEY_READ
+        openkey = winreg.OpenKey
+        closekey = winreg.CloseKey
+        ms = self.microsoft
+        for hkey in self.HKEYS:
+            bkey = None
+            try:
+                bkey = openkey(hkey, ms(key), 0, key_read)
+            except OSError:
+                if not self.pi.current_is_x86():
+                    try:
+                        bkey = openkey(hkey, ms(key, True), 0, key_read)
+                    except OSError:
+                        continue
+                else:
+                    continue
+            try:
+                return winreg.QueryValueEx(bkey, name)[0]
+            except OSError:
+                pass
+            finally:
+                if bkey:
+                    closekey(bkey)
+        return None
+
+
+class SystemInfo:
+    """
+    Microsoft Windows and Visual Studio related system information.
+
+    Parameters
+    ----------
+    registry_info: RegistryInfo
+        "RegistryInfo" instance.
+    vc_ver: float
+        Required Microsoft Visual C++ version.
+    """
+
+    # Variables and properties in this class use originals CamelCase variables
+    # names from Microsoft source files for more easy comparison.
+    WinDir = environ.get('WinDir', '')
+    ProgramFiles = environ.get('ProgramFiles', '')
+    ProgramFilesx86 = environ.get('ProgramFiles(x86)', ProgramFiles)
+
+    def __init__(self, registry_info, vc_ver=None):
+        self.ri = registry_info
+        self.pi = self.ri.pi
+
+        self.known_vs_paths = self.find_programdata_vs_vers()
+
+        # Except for VS15+, VC version is aligned with VS version
+        self.vs_ver = self.vc_ver = vc_ver or self._find_latest_available_vs_ver()
+
+    def _find_latest_available_vs_ver(self):
+        """
+        Find the latest VC version
+
+        Return
+        ------
+        float
+            version
+        """
+        reg_vc_vers = self.find_reg_vs_vers()
+
+        if not (reg_vc_vers or self.known_vs_paths):
+            raise distutils.errors.DistutilsPlatformError(
+                'No Microsoft Visual C++ version found'
+            )
+
+        vc_vers = set(reg_vc_vers)
+        vc_vers.update(self.known_vs_paths)
+        return sorted(vc_vers)[-1]
+
+    def find_reg_vs_vers(self):
+        """
+        Find Microsoft Visual Studio versions available in registry.
+
+        Return
+        ------
+        list of float
+            Versions
+        """
+        ms = self.ri.microsoft
+        vckeys = (self.ri.vc, self.ri.vc_for_python, self.ri.vs)
+        vs_vers = []
+        for hkey, key in itertools.product(self.ri.HKEYS, vckeys):
+            try:
+                bkey = winreg.OpenKey(hkey, ms(key), 0, winreg.KEY_READ)
+            except OSError:
+                continue
+            with bkey:
+                subkeys, values, _ = winreg.QueryInfoKey(bkey)
+                for i in range(values):
+                    with contextlib.suppress(ValueError):
+                        ver = float(winreg.EnumValue(bkey, i)[0])
+                        if ver not in vs_vers:
+                            vs_vers.append(ver)
+                for i in range(subkeys):
+                    with contextlib.suppress(ValueError):
+                        ver = float(winreg.EnumKey(bkey, i))
+                        if ver not in vs_vers:
+                            vs_vers.append(ver)
+        return sorted(vs_vers)
+
+    def find_programdata_vs_vers(self):
+        r"""
+        Find Visual studio 2017+ versions from information in
+        "C:\ProgramData\Microsoft\VisualStudio\Packages\_Instances".
+
+        Return
+        ------
+        dict
+            float version as key, path as value.
+        """
+        vs_versions = {}
+        instances_dir = r'C:\ProgramData\Microsoft\VisualStudio\Packages\_Instances'
+
+        try:
+            hashed_names = listdir(instances_dir)
+
+        except OSError:
+            # Directory not exists with all Visual Studio versions
+            return vs_versions
+
+        for name in hashed_names:
+            try:
+                # Get VS installation path from "state.json" file
+                state_path = join(instances_dir, name, 'state.json')
+                with open(state_path, 'rt', encoding='utf-8') as state_file:
+                    state = json.load(state_file)
+                vs_path = state['installationPath']
+
+                # Raises OSError if this VS installation does not contain VC
+                listdir(join(vs_path, r'VC\Tools\MSVC'))
+
+                # Store version and path
+                vs_versions[self._as_float_version(state['installationVersion'])] = (
+                    vs_path
+                )
+
+            except (OSError, KeyError):
+                # Skip if "state.json" file is missing or bad format
+                continue
+
+        return vs_versions
+
+    @staticmethod
+    def _as_float_version(version):
+        """
+        Return a string version as a simplified float version (major.minor)
+
+        Parameters
+        ----------
+        version: str
+            Version.
+
+        Return
+        ------
+        float
+            version
+        """
+        return float('.'.join(version.split('.')[:2]))
+
+    @property
+    def VSInstallDir(self):
+        """
+        Microsoft Visual Studio directory.
+
+        Return
+        ------
+        str
+            path
+        """
+        # Default path
+        default = join(
+            self.ProgramFilesx86, 'Microsoft Visual Studio %0.1f' % self.vs_ver
+        )
+
+        # Try to get path from registry, if fail use default path
+        return self.ri.lookup(self.ri.vs, '%0.1f' % self.vs_ver) or default
+
+    @property
+    def VCInstallDir(self):
+        """
+        Microsoft Visual C++ directory.
+
+        Return
+        ------
+        str
+            path
+        """
+        path = self._guess_vc() or self._guess_vc_legacy()
+
+        if not isdir(path):
+            msg = 'Microsoft Visual C++ directory not found'
+            raise distutils.errors.DistutilsPlatformError(msg)
+
+        return path
+
+    def _guess_vc(self):
+        """
+        Locate Visual C++ for VS2017+.
+
+        Return
+        ------
+        str
+            path
+        """
+        if self.vs_ver <= 14.0:
+            return ''
+
+        try:
+            # First search in known VS paths
+            vs_dir = self.known_vs_paths[self.vs_ver]
+        except KeyError:
+            # Else, search with path from registry
+            vs_dir = self.VSInstallDir
+
+        guess_vc = join(vs_dir, r'VC\Tools\MSVC')
+
+        # Subdir with VC exact version as name
+        try:
+            # Update the VC version with real one instead of VS version
+            vc_ver = listdir(guess_vc)[-1]
+            self.vc_ver = self._as_float_version(vc_ver)
+            return join(guess_vc, vc_ver)
+        except (OSError, IndexError):
+            return ''
+
+    def _guess_vc_legacy(self):
+        """
+        Locate Visual C++ for versions prior to 2017.
+
+        Return
+        ------
+        str
+            path
+        """
+        default = join(
+            self.ProgramFilesx86, r'Microsoft Visual Studio %0.1f\VC' % self.vs_ver
+        )
+
+        # Try to get "VC++ for Python" path from registry as default path
+        reg_path = join(self.ri.vc_for_python, '%0.1f' % self.vs_ver)
+        python_vc = self.ri.lookup(reg_path, 'installdir')
+        default_vc = join(python_vc, 'VC') if python_vc else default
+
+        # Try to get path from registry, if fail use default path
+        return self.ri.lookup(self.ri.vc, '%0.1f' % self.vs_ver) or default_vc
+
+    @property
+    def WindowsSdkVersion(self):
+        """
+        Microsoft Windows SDK versions for specified MSVC++ version.
+
+        Return
+        ------
+        tuple of str
+            versions
+        """
+        if self.vs_ver <= 9.0:
+            return '7.0', '6.1', '6.0a'
+        elif self.vs_ver == 10.0:
+            return '7.1', '7.0a'
+        elif self.vs_ver == 11.0:
+            return '8.0', '8.0a'
+        elif self.vs_ver == 12.0:
+            return '8.1', '8.1a'
+        elif self.vs_ver >= 14.0:
+            return '10.0', '8.1'
+        return None
+
+    @property
+    def WindowsSdkLastVersion(self):
+        """
+        Microsoft Windows SDK last version.
+
+        Return
+        ------
+        str
+            version
+        """
+        return self._use_last_dir_name(join(self.WindowsSdkDir, 'lib'))
+
+    @property
+    def WindowsSdkDir(self):  # noqa: C901  # is too complex (12)  # FIXME
+        """
+        Microsoft Windows SDK directory.
+
+        Return
+        ------
+        str
+            path
+        """
+        sdkdir = ''
+        for ver in self.WindowsSdkVersion:
+            # Try to get it from registry
+            loc = join(self.ri.windows_sdk, 'v%s' % ver)
+            sdkdir = self.ri.lookup(loc, 'installationfolder')
+            if sdkdir:
+                break
+        if not sdkdir or not isdir(sdkdir):
+            # Try to get "VC++ for Python" version from registry
+            path = join(self.ri.vc_for_python, '%0.1f' % self.vc_ver)
+            install_base = self.ri.lookup(path, 'installdir')
+            if install_base:
+                sdkdir = join(install_base, 'WinSDK')
+        if not sdkdir or not isdir(sdkdir):
+            # If fail, use default new path
+            for ver in self.WindowsSdkVersion:
+                intver = ver[: ver.rfind('.')]
+                path = r'Microsoft SDKs\Windows Kits\%s' % intver
+                d = join(self.ProgramFiles, path)
+                if isdir(d):
+                    sdkdir = d
+        if not sdkdir or not isdir(sdkdir):
+            # If fail, use default old path
+            for ver in self.WindowsSdkVersion:
+                path = r'Microsoft SDKs\Windows\v%s' % ver
+                d = join(self.ProgramFiles, path)
+                if isdir(d):
+                    sdkdir = d
+        if not sdkdir:
+            # If fail, use Platform SDK
+            sdkdir = join(self.VCInstallDir, 'PlatformSDK')
+        return sdkdir
+
+    @property
+    def WindowsSDKExecutablePath(self):
+        """
+        Microsoft Windows SDK executable directory.
+
+        Return
+        ------
+        str
+            path
+        """
+        # Find WinSDK NetFx Tools registry dir name
+        if self.vs_ver <= 11.0:
+            netfxver = 35
+            arch = ''
+        else:
+            netfxver = 40
+            hidex86 = True if self.vs_ver <= 12.0 else False
+            arch = self.pi.current_dir(x64=True, hidex86=hidex86)
+        fx = 'WinSDK-NetFx%dTools%s' % (netfxver, arch.replace('\\', '-'))
+
+        # list all possibles registry paths
+        regpaths = []
+        if self.vs_ver >= 14.0:
+            for ver in self.NetFxSdkVersion:
+                regpaths += [join(self.ri.netfx_sdk, ver, fx)]
+
+        for ver in self.WindowsSdkVersion:
+            regpaths += [join(self.ri.windows_sdk, 'v%sA' % ver, fx)]
+
+        # Return installation folder from the more recent path
+        for path in regpaths:
+            execpath = self.ri.lookup(path, 'installationfolder')
+            if execpath:
+                return execpath
+
+        return None
+
+    @property
+    def FSharpInstallDir(self):
+        """
+        Microsoft Visual F# directory.
+
+        Return
+        ------
+        str
+            path
+        """
+        path = join(self.ri.visualstudio, r'%0.1f\Setup\F#' % self.vs_ver)
+        return self.ri.lookup(path, 'productdir') or ''
+
+    @property
+    def UniversalCRTSdkDir(self):
+        """
+        Microsoft Universal CRT SDK directory.
+
+        Return
+        ------
+        str
+            path
+        """
+        # Set Kit Roots versions for specified MSVC++ version
+        vers = ('10', '81') if self.vs_ver >= 14.0 else ()
+
+        # Find path of the more recent Kit
+        for ver in vers:
+            sdkdir = self.ri.lookup(self.ri.windows_kits_roots, 'kitsroot%s' % ver)
+            if sdkdir:
+                return sdkdir or ''
+
+        return None
+
+    @property
+    def UniversalCRTSdkLastVersion(self):
+        """
+        Microsoft Universal C Runtime SDK last version.
+
+        Return
+        ------
+        str
+            version
+        """
+        return self._use_last_dir_name(join(self.UniversalCRTSdkDir, 'lib'))
+
+    @property
+    def NetFxSdkVersion(self):
+        """
+        Microsoft .NET Framework SDK versions.
+
+        Return
+        ------
+        tuple of str
+            versions
+        """
+        # Set FxSdk versions for specified VS version
+        return (
+            ('4.7.2', '4.7.1', '4.7', '4.6.2', '4.6.1', '4.6', '4.5.2', '4.5.1', '4.5')
+            if self.vs_ver >= 14.0
+            else ()
+        )
+
+    @property
+    def NetFxSdkDir(self):
+        """
+        Microsoft .NET Framework SDK directory.
+
+        Return
+        ------
+        str
+            path
+        """
+        sdkdir = ''
+        for ver in self.NetFxSdkVersion:
+            loc = join(self.ri.netfx_sdk, ver)
+            sdkdir = self.ri.lookup(loc, 'kitsinstallationfolder')
+            if sdkdir:
+                break
+        return sdkdir
+
+    @property
+    def FrameworkDir32(self):
+        """
+        Microsoft .NET Framework 32bit directory.
+
+        Return
+        ------
+        str
+            path
+        """
+        # Default path
+        guess_fw = join(self.WinDir, r'Microsoft.NET\Framework')
+
+        # Try to get path from registry, if fail use default path
+        return self.ri.lookup(self.ri.vc, 'frameworkdir32') or guess_fw
+
+    @property
+    def FrameworkDir64(self):
+        """
+        Microsoft .NET Framework 64bit directory.
+
+        Return
+        ------
+        str
+            path
+        """
+        # Default path
+        guess_fw = join(self.WinDir, r'Microsoft.NET\Framework64')
+
+        # Try to get path from registry, if fail use default path
+        return self.ri.lookup(self.ri.vc, 'frameworkdir64') or guess_fw
+
+    @property
+    def FrameworkVersion32(self):
+        """
+        Microsoft .NET Framework 32bit versions.
+
+        Return
+        ------
+        tuple of str
+            versions
+        """
+        return self._find_dot_net_versions(32)
+
+    @property
+    def FrameworkVersion64(self):
+        """
+        Microsoft .NET Framework 64bit versions.
+
+        Return
+        ------
+        tuple of str
+            versions
+        """
+        return self._find_dot_net_versions(64)
+
+    def _find_dot_net_versions(self, bits):
+        """
+        Find Microsoft .NET Framework versions.
+
+        Parameters
+        ----------
+        bits: int
+            Platform number of bits: 32 or 64.
+
+        Return
+        ------
+        tuple of str
+            versions
+        """
+        # Find actual .NET version in registry
+        reg_ver = self.ri.lookup(self.ri.vc, 'frameworkver%d' % bits)
+        dot_net_dir = getattr(self, 'FrameworkDir%d' % bits)
+        ver = reg_ver or self._use_last_dir_name(dot_net_dir, 'v') or ''
+
+        # Set .NET versions for specified MSVC++ version
+        if self.vs_ver >= 12.0:
+            return ver, 'v4.0'
+        elif self.vs_ver >= 10.0:
+            return 'v4.0.30319' if ver.lower()[:2] != 'v4' else ver, 'v3.5'
+        elif self.vs_ver == 9.0:
+            return 'v3.5', 'v2.0.50727'
+        elif self.vs_ver == 8.0:
+            return 'v3.0', 'v2.0.50727'
+        return None
+
+    @staticmethod
+    def _use_last_dir_name(path, prefix=''):
+        """
+        Return name of the last dir in path or '' if no dir found.
+
+        Parameters
+        ----------
+        path: str
+            Use dirs in this path
+        prefix: str
+            Use only dirs starting by this prefix
+
+        Return
+        ------
+        str
+            name
+        """
+        matching_dirs = (
+            dir_name
+            for dir_name in reversed(listdir(path))
+            if isdir(join(path, dir_name)) and dir_name.startswith(prefix)
+        )
+        return next(matching_dirs, None) or ''
+
+
+class EnvironmentInfo:
+    """
+    Return environment variables for specified Microsoft Visual C++ version
+    and platform : Lib, Include, Path and libpath.
+
+    This function is compatible with Microsoft Visual C++ 9.0 to 14.X.
+
+    Script created by analysing Microsoft environment configuration files like
+    "vcvars[...].bat", "SetEnv.Cmd", "vcbuildtools.bat", ...
+
+    Parameters
+    ----------
+    arch: str
+        Target architecture.
+    vc_ver: float
+        Required Microsoft Visual C++ version. If not set, autodetect the last
+        version.
+    vc_min_ver: float
+        Minimum Microsoft Visual C++ version.
+    """
+
+    # Variables and properties in this class use originals CamelCase variables
+    # names from Microsoft source files for more easy comparison.
+
+    def __init__(self, arch, vc_ver=None, vc_min_ver=0):
+        self.pi = PlatformInfo(arch)
+        self.ri = RegistryInfo(self.pi)
+        self.si = SystemInfo(self.ri, vc_ver)
+
+        if self.vc_ver < vc_min_ver:
+            err = 'No suitable Microsoft Visual C++ version found'
+            raise distutils.errors.DistutilsPlatformError(err)
+
+    @property
+    def vs_ver(self):
+        """
+        Microsoft Visual Studio.
+
+        Return
+        ------
+        float
+            version
+        """
+        return self.si.vs_ver
+
+    @property
+    def vc_ver(self):
+        """
+        Microsoft Visual C++ version.
+
+        Return
+        ------
+        float
+            version
+        """
+        return self.si.vc_ver
+
+    @property
+    def VSTools(self):
+        """
+        Microsoft Visual Studio Tools.
+
+        Return
+        ------
+        list of str
+            paths
+        """
+        paths = [r'Common7\IDE', r'Common7\Tools']
+
+        if self.vs_ver >= 14.0:
+            arch_subdir = self.pi.current_dir(hidex86=True, x64=True)
+            paths += [r'Common7\IDE\CommonExtensions\Microsoft\TestWindow']
+            paths += [r'Team Tools\Performance Tools']
+            paths += [r'Team Tools\Performance Tools%s' % arch_subdir]
+
+        return [join(self.si.VSInstallDir, path) for path in paths]
+
+    @property
+    def VCIncludes(self):
+        """
+        Microsoft Visual C++ & Microsoft Foundation Class Includes.
+
+        Return
+        ------
+        list of str
+            paths
+        """
+        return [
+            join(self.si.VCInstallDir, 'Include'),
+            join(self.si.VCInstallDir, r'ATLMFC\Include'),
+        ]
+
+    @property
+    def VCLibraries(self):
+        """
+        Microsoft Visual C++ & Microsoft Foundation Class Libraries.
+
+        Return
+        ------
+        list of str
+            paths
+        """
+        if self.vs_ver >= 15.0:
+            arch_subdir = self.pi.target_dir(x64=True)
+        else:
+            arch_subdir = self.pi.target_dir(hidex86=True)
+        paths = ['Lib%s' % arch_subdir, r'ATLMFC\Lib%s' % arch_subdir]
+
+        if self.vs_ver >= 14.0:
+            paths += [r'Lib\store%s' % arch_subdir]
+
+        return [join(self.si.VCInstallDir, path) for path in paths]
+
+    @property
+    def VCStoreRefs(self):
+        """
+        Microsoft Visual C++ store references Libraries.
+
+        Return
+        ------
+        list of str
+            paths
+        """
+        if self.vs_ver < 14.0:
+            return []
+        return [join(self.si.VCInstallDir, r'Lib\store\references')]
+
+    @property
+    def VCTools(self):
+        """
+        Microsoft Visual C++ Tools.
+
+        Return
+        ------
+        list of str
+            paths
+        """
+        si = self.si
+        tools = [join(si.VCInstallDir, 'VCPackages')]
+
+        forcex86 = True if self.vs_ver <= 10.0 else False
+        arch_subdir = self.pi.cross_dir(forcex86)
+        if arch_subdir:
+            tools += [join(si.VCInstallDir, 'Bin%s' % arch_subdir)]
+
+        if self.vs_ver == 14.0:
+            path = 'Bin%s' % self.pi.current_dir(hidex86=True)
+            tools += [join(si.VCInstallDir, path)]
+
+        elif self.vs_ver >= 15.0:
+            host_dir = (
+                r'bin\HostX86%s' if self.pi.current_is_x86() else r'bin\HostX64%s'
+            )
+            tools += [join(si.VCInstallDir, host_dir % self.pi.target_dir(x64=True))]
+
+            if self.pi.current_cpu != self.pi.target_cpu:
+                tools += [
+                    join(si.VCInstallDir, host_dir % self.pi.current_dir(x64=True))
+                ]
+
+        else:
+            tools += [join(si.VCInstallDir, 'Bin')]
+
+        return tools
+
+    @property
+    def OSLibraries(self):
+        """
+        Microsoft Windows SDK Libraries.
+
+        Return
+        ------
+        list of str
+            paths
+        """
+        if self.vs_ver <= 10.0:
+            arch_subdir = self.pi.target_dir(hidex86=True, x64=True)
+            return [join(self.si.WindowsSdkDir, 'Lib%s' % arch_subdir)]
+
+        else:
+            arch_subdir = self.pi.target_dir(x64=True)
+            lib = join(self.si.WindowsSdkDir, 'lib')
+            libver = self._sdk_subdir
+            return [join(lib, '%sum%s' % (libver, arch_subdir))]
+
+    @property
+    def OSIncludes(self):
+        """
+        Microsoft Windows SDK Include.
+
+        Return
+        ------
+        list of str
+            paths
+        """
+        include = join(self.si.WindowsSdkDir, 'include')
+
+        if self.vs_ver <= 10.0:
+            return [include, join(include, 'gl')]
+
+        else:
+            if self.vs_ver >= 14.0:
+                sdkver = self._sdk_subdir
+            else:
+                sdkver = ''
+            return [
+                join(include, '%sshared' % sdkver),
+                join(include, '%sum' % sdkver),
+                join(include, '%swinrt' % sdkver),
+            ]
+
+    @property
+    def OSLibpath(self):
+        """
+        Microsoft Windows SDK Libraries Paths.
+
+        Return
+        ------
+        list of str
+            paths
+        """
+        ref = join(self.si.WindowsSdkDir, 'References')
+        libpath = []
+
+        if self.vs_ver <= 9.0:
+            libpath += self.OSLibraries
+
+        if self.vs_ver >= 11.0:
+            libpath += [join(ref, r'CommonConfiguration\Neutral')]
+
+        if self.vs_ver >= 14.0:
+            libpath += [
+                ref,
+                join(self.si.WindowsSdkDir, 'UnionMetadata'),
+                join(ref, 'Windows.Foundation.UniversalApiContract', '1.0.0.0'),
+                join(ref, 'Windows.Foundation.FoundationContract', '1.0.0.0'),
+                join(ref, 'Windows.Networking.Connectivity.WwanContract', '1.0.0.0'),
+                join(
+                    self.si.WindowsSdkDir,
+                    'ExtensionSDKs',
+                    'Microsoft.VCLibs',
+                    '%0.1f' % self.vs_ver,
+                    'References',
+                    'CommonConfiguration',
+                    'neutral',
+                ),
+            ]
+        return libpath
+
+    @property
+    def SdkTools(self):
+        """
+        Microsoft Windows SDK Tools.
+
+        Return
+        ------
+        list of str
+            paths
+        """
+        return list(self._sdk_tools())
+
+    def _sdk_tools(self):
+        """
+        Microsoft Windows SDK Tools paths generator.
+
+        Return
+        ------
+        generator of str
+            paths
+        """
+        if self.vs_ver < 15.0:
+            bin_dir = 'Bin' if self.vs_ver <= 11.0 else r'Bin\x86'
+            yield join(self.si.WindowsSdkDir, bin_dir)
+
+        if not self.pi.current_is_x86():
+            arch_subdir = self.pi.current_dir(x64=True)
+            path = 'Bin%s' % arch_subdir
+            yield join(self.si.WindowsSdkDir, path)
+
+        if self.vs_ver in (10.0, 11.0):
+            if self.pi.target_is_x86():
+                arch_subdir = ''
+            else:
+                arch_subdir = self.pi.current_dir(hidex86=True, x64=True)
+            path = r'Bin\NETFX 4.0 Tools%s' % arch_subdir
+            yield join(self.si.WindowsSdkDir, path)
+
+        elif self.vs_ver >= 15.0:
+            path = join(self.si.WindowsSdkDir, 'Bin')
+            arch_subdir = self.pi.current_dir(x64=True)
+            sdkver = self.si.WindowsSdkLastVersion
+            yield join(path, '%s%s' % (sdkver, arch_subdir))
+
+        if self.si.WindowsSDKExecutablePath:
+            yield self.si.WindowsSDKExecutablePath
+
+    @property
+    def _sdk_subdir(self):
+        """
+        Microsoft Windows SDK version subdir.
+
+        Return
+        ------
+        str
+            subdir
+        """
+        ucrtver = self.si.WindowsSdkLastVersion
+        return ('%s\\' % ucrtver) if ucrtver else ''
+
+    @property
+    def SdkSetup(self):
+        """
+        Microsoft Windows SDK Setup.
+
+        Return
+        ------
+        list of str
+            paths
+        """
+        if self.vs_ver > 9.0:
+            return []
+
+        return [join(self.si.WindowsSdkDir, 'Setup')]
+
+    @property
+    def FxTools(self):
+        """
+        Microsoft .NET Framework Tools.
+
+        Return
+        ------
+        list of str
+            paths
+        """
+        pi = self.pi
+        si = self.si
+
+        if self.vs_ver <= 10.0:
+            include32 = True
+            include64 = not pi.target_is_x86() and not pi.current_is_x86()
+        else:
+            include32 = pi.target_is_x86() or pi.current_is_x86()
+            include64 = pi.current_cpu == 'amd64' or pi.target_cpu == 'amd64'
+
+        tools = []
+        if include32:
+            tools += [join(si.FrameworkDir32, ver) for ver in si.FrameworkVersion32]
+        if include64:
+            tools += [join(si.FrameworkDir64, ver) for ver in si.FrameworkVersion64]
+        return tools
+
+    @property
+    def NetFxSDKLibraries(self):
+        """
+        Microsoft .Net Framework SDK Libraries.
+
+        Return
+        ------
+        list of str
+            paths
+        """
+        if self.vs_ver < 14.0 or not self.si.NetFxSdkDir:
+            return []
+
+        arch_subdir = self.pi.target_dir(x64=True)
+        return [join(self.si.NetFxSdkDir, r'lib\um%s' % arch_subdir)]
+
+    @property
+    def NetFxSDKIncludes(self):
+        """
+        Microsoft .Net Framework SDK Includes.
+
+        Return
+        ------
+        list of str
+            paths
+        """
+        if self.vs_ver < 14.0 or not self.si.NetFxSdkDir:
+            return []
+
+        return [join(self.si.NetFxSdkDir, r'include\um')]
+
+    @property
+    def VsTDb(self):
+        """
+        Microsoft Visual Studio Team System Database.
+
+        Return
+        ------
+        list of str
+            paths
+        """
+        return [join(self.si.VSInstallDir, r'VSTSDB\Deploy')]
+
+    @property
+    def MSBuild(self):
+        """
+        Microsoft Build Engine.
+
+        Return
+        ------
+        list of str
+            paths
+        """
+        if self.vs_ver < 12.0:
+            return []
+        elif self.vs_ver < 15.0:
+            base_path = self.si.ProgramFilesx86
+            arch_subdir = self.pi.current_dir(hidex86=True)
+        else:
+            base_path = self.si.VSInstallDir
+            arch_subdir = ''
+
+        path = r'MSBuild\%0.1f\bin%s' % (self.vs_ver, arch_subdir)
+        build = [join(base_path, path)]
+
+        if self.vs_ver >= 15.0:
+            # Add Roslyn C# & Visual Basic Compiler
+            build += [join(base_path, path, 'Roslyn')]
+
+        return build
+
+    @property
+    def HTMLHelpWorkshop(self):
+        """
+        Microsoft HTML Help Workshop.
+
+        Return
+        ------
+        list of str
+            paths
+        """
+        if self.vs_ver < 11.0:
+            return []
+
+        return [join(self.si.ProgramFilesx86, 'HTML Help Workshop')]
+
+    @property
+    def UCRTLibraries(self):
+        """
+        Microsoft Universal C Runtime SDK Libraries.
+
+        Return
+        ------
+        list of str
+            paths
+        """
+        if self.vs_ver < 14.0:
+            return []
+
+        arch_subdir = self.pi.target_dir(x64=True)
+        lib = join(self.si.UniversalCRTSdkDir, 'lib')
+        ucrtver = self._ucrt_subdir
+        return [join(lib, '%sucrt%s' % (ucrtver, arch_subdir))]
+
+    @property
+    def UCRTIncludes(self):
+        """
+        Microsoft Universal C Runtime SDK Include.
+
+        Return
+        ------
+        list of str
+            paths
+        """
+        if self.vs_ver < 14.0:
+            return []
+
+        include = join(self.si.UniversalCRTSdkDir, 'include')
+        return [join(include, '%sucrt' % self._ucrt_subdir)]
+
+    @property
+    def _ucrt_subdir(self):
+        """
+        Microsoft Universal C Runtime SDK version subdir.
+
+        Return
+        ------
+        str
+            subdir
+        """
+        ucrtver = self.si.UniversalCRTSdkLastVersion
+        return ('%s\\' % ucrtver) if ucrtver else ''
+
+    @property
+    def FSharp(self):
+        """
+        Microsoft Visual F#.
+
+        Return
+        ------
+        list of str
+            paths
+        """
+        if 11.0 > self.vs_ver > 12.0:
+            return []
+
+        return [self.si.FSharpInstallDir]
+
+    @property
+    def VCRuntimeRedist(self):
+        """
+        Microsoft Visual C++ runtime redistributable dll.
+
+        Return
+        ------
+        str
+            path
+        """
+        vcruntime = 'vcruntime%d0.dll' % self.vc_ver
+        arch_subdir = self.pi.target_dir(x64=True).strip('\\')
+
+        # Installation prefixes candidates
+        prefixes = []
+        tools_path = self.si.VCInstallDir
+        redist_path = dirname(tools_path.replace(r'\Tools', r'\Redist'))
+        if isdir(redist_path):
+            # Redist version may not be exactly the same as tools
+            redist_path = join(redist_path, listdir(redist_path)[-1])
+            prefixes += [redist_path, join(redist_path, 'onecore')]
+
+        prefixes += [join(tools_path, 'redist')]  # VS14 legacy path
+
+        # CRT directory
+        crt_dirs = (
+            'Microsoft.VC%d.CRT' % (self.vc_ver * 10),
+            # Sometime store in directory with VS version instead of VC
+            'Microsoft.VC%d.CRT' % (int(self.vs_ver) * 10),
+        )
+
+        # vcruntime path
+        for prefix, crt_dir in itertools.product(prefixes, crt_dirs):
+            path = join(prefix, arch_subdir, crt_dir, vcruntime)
+            if isfile(path):
+                return path
+        return None
+
+    def return_env(self, exists=True):
+        """
+        Return environment dict.
+
+        Parameters
+        ----------
+        exists: bool
+            It True, only return existing paths.
+
+        Return
+        ------
+        dict
+            environment
+        """
+        env = dict(
+            include=self._build_paths(
+                'include',
+                [
+                    self.VCIncludes,
+                    self.OSIncludes,
+                    self.UCRTIncludes,
+                    self.NetFxSDKIncludes,
+                ],
+                exists,
+            ),
+            lib=self._build_paths(
+                'lib',
+                [
+                    self.VCLibraries,
+                    self.OSLibraries,
+                    self.FxTools,
+                    self.UCRTLibraries,
+                    self.NetFxSDKLibraries,
+                ],
+                exists,
+            ),
+            libpath=self._build_paths(
+                'libpath',
+                [self.VCLibraries, self.FxTools, self.VCStoreRefs, self.OSLibpath],
+                exists,
+            ),
+            path=self._build_paths(
+                'path',
+                [
+                    self.VCTools,
+                    self.VSTools,
+                    self.VsTDb,
+                    self.SdkTools,
+                    self.SdkSetup,
+                    self.FxTools,
+                    self.MSBuild,
+                    self.HTMLHelpWorkshop,
+                    self.FSharp,
+                ],
+                exists,
+            ),
+        )
+        if self.vs_ver >= 14 and isfile(self.VCRuntimeRedist):
+            env['py_vcruntime_redist'] = self.VCRuntimeRedist
+        return env
+
+    def _build_paths(self, name, spec_path_lists, exists):
+        """
+        Given an environment variable name and specified paths,
+        return a pathsep-separated string of paths containing
+        unique, extant, directories from those paths and from
+        the environment variable. Raise an error if no paths
+        are resolved.
+
+        Parameters
+        ----------
+        name: str
+            Environment variable name
+        spec_path_lists: list of str
+            Paths
+        exists: bool
+            It True, only return existing paths.
+
+        Return
+        ------
+        str
+            Pathsep-separated paths
+        """
+        # flatten spec_path_lists
+        spec_paths = itertools.chain.from_iterable(spec_path_lists)
+        env_paths = environ.get(name, '').split(pathsep)
+        paths = itertools.chain(spec_paths, env_paths)
+        extant_paths = list(filter(isdir, paths)) if exists else paths
+        if not extant_paths:
+            msg = "%s environment variable is empty" % name.upper()
+            raise distutils.errors.DistutilsPlatformError(msg)
+        unique_paths = unique_everseen(extant_paths)
+        return pathsep.join(unique_paths)

From 242388806a0ac0f47f49020c67befd685a8e4a52 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 2 Sep 2024 12:49:50 -0400
Subject: [PATCH 1109/1761] Remove only the monkeypatching, leaving
 EnvironmentInfo in place.

---
 setuptools/msvc.py | 248 +--------------------------------------------
 1 file changed, 1 insertion(+), 247 deletions(-)

diff --git a/setuptools/msvc.py b/setuptools/msvc.py
index 57f09417ca..b4e9c9eb36 100644
--- a/setuptools/msvc.py
+++ b/setuptools/msvc.py
@@ -1,14 +1,5 @@
 """
-Improved support for Microsoft Visual C++ compilers.
-
-Known supported compilers:
---------------------------
-Microsoft Visual C++ 14.X:
-    Microsoft Visual C++ Build Tools 2015 (x86, x64, arm)
-    Microsoft Visual Studio Build Tools 2017 (x86, x64, arm, arm64)
-    Microsoft Visual Studio Build Tools 2019 (x86, x64, arm, arm64)
-
-This may also support compilers shipped with compatible Visual Studio versions.
+Environment info about Microsoft Compilers.
 """
 
 from __future__ import annotations
@@ -17,16 +8,13 @@
 import itertools
 import json
 import platform
-import subprocess
 from os import listdir, pathsep
 from os.path import dirname, isdir, isfile, join
-from subprocess import CalledProcessError
 from typing import TYPE_CHECKING
 
 from more_itertools import unique_everseen
 
 import distutils.errors
-from distutils.util import get_platform
 
 # https://github.com/python/mypy/issues/8166
 if not TYPE_CHECKING and platform.system() == 'Windows':
@@ -44,240 +32,6 @@ class winreg:
     environ: dict[str, str] = dict()
 
 
-def _msvc14_find_vc2015():
-    """Python 3.8 "distutils/_msvccompiler.py" backport"""
-    try:
-        key = winreg.OpenKey(
-            winreg.HKEY_LOCAL_MACHINE,
-            r"Software\Microsoft\VisualStudio\SxS\VC7",
-            0,
-            winreg.KEY_READ | winreg.KEY_WOW64_32KEY,
-        )
-    except OSError:
-        return None, None
-
-    best_version = 0
-    best_dir = None
-    with key:
-        for i in itertools.count():
-            try:
-                v, vc_dir, vt = winreg.EnumValue(key, i)
-            except OSError:
-                break
-            if v and vt == winreg.REG_SZ and isdir(vc_dir):
-                try:
-                    version = int(float(v))
-                except (ValueError, TypeError):
-                    continue
-                if version >= 14 and version > best_version:
-                    best_version, best_dir = version, vc_dir
-    return best_version, best_dir
-
-
-def _msvc14_find_vc2017():
-    """Python 3.8 "distutils/_msvccompiler.py" backport
-
-    Returns "15, path" based on the result of invoking vswhere.exe
-    If no install is found, returns "None, None"
-
-    The version is returned to avoid unnecessarily changing the function
-    result. It may be ignored when the path is not None.
-
-    If vswhere.exe is not available, by definition, VS 2017 is not
-    installed.
-    """
-    root = environ.get("ProgramFiles(x86)") or environ.get("ProgramFiles")
-    if not root:
-        return None, None
-
-    variant = 'arm64' if get_platform() == 'win-arm64' else 'x86.x64'
-    suitable_components = (
-        f"Microsoft.VisualStudio.Component.VC.Tools.{variant}",
-        "Microsoft.VisualStudio.Workload.WDExpress",
-    )
-
-    for component in suitable_components:
-        # Workaround for `-requiresAny` (only available on VS 2017 > 15.6)
-        with contextlib.suppress(CalledProcessError, OSError, UnicodeDecodeError):
-            path = (
-                subprocess.check_output([
-                    join(root, "Microsoft Visual Studio", "Installer", "vswhere.exe"),
-                    "-latest",
-                    "-prerelease",
-                    "-requires",
-                    component,
-                    "-property",
-                    "installationPath",
-                    "-products",
-                    "*",
-                ])
-                .decode(encoding="mbcs", errors="strict")
-                .strip()
-            )
-
-            path = join(path, "VC", "Auxiliary", "Build")
-            if isdir(path):
-                return 15, path
-
-    return None, None  # no suitable component found
-
-
-PLAT_SPEC_TO_RUNTIME = {
-    'x86': 'x86',
-    'x86_amd64': 'x64',
-    'x86_arm': 'arm',
-    'x86_arm64': 'arm64',
-}
-
-
-def _msvc14_find_vcvarsall(plat_spec):
-    """Python 3.8 "distutils/_msvccompiler.py" backport"""
-    _, best_dir = _msvc14_find_vc2017()
-    vcruntime = None
-
-    if plat_spec in PLAT_SPEC_TO_RUNTIME:
-        vcruntime_plat = PLAT_SPEC_TO_RUNTIME[plat_spec]
-    else:
-        vcruntime_plat = 'x64' if 'amd64' in plat_spec else 'x86'
-
-    if best_dir:
-        vcredist = join(
-            best_dir,
-            "..",
-            "..",
-            "redist",
-            "MSVC",
-            "**",
-            vcruntime_plat,
-            "Microsoft.VC14*.CRT",
-            "vcruntime140.dll",
-        )
-        try:
-            import glob
-
-            vcruntime = glob.glob(vcredist, recursive=True)[-1]
-        except (ImportError, OSError, LookupError):
-            vcruntime = None
-
-    if not best_dir:
-        best_version, best_dir = _msvc14_find_vc2015()
-        if best_version:
-            vcruntime = join(
-                best_dir,
-                'redist',
-                vcruntime_plat,
-                "Microsoft.VC140.CRT",
-                "vcruntime140.dll",
-            )
-
-    if not best_dir:
-        return None, None
-
-    vcvarsall = join(best_dir, "vcvarsall.bat")
-    if not isfile(vcvarsall):
-        return None, None
-
-    if not vcruntime or not isfile(vcruntime):
-        vcruntime = None
-
-    return vcvarsall, vcruntime
-
-
-def _msvc14_get_vc_env(plat_spec):
-    """Python 3.8 "distutils/_msvccompiler.py" backport"""
-    if "DISTUTILS_USE_SDK" in environ:
-        return {key.lower(): value for key, value in environ.items()}
-
-    vcvarsall, vcruntime = _msvc14_find_vcvarsall(plat_spec)
-    if not vcvarsall:
-        raise distutils.errors.DistutilsPlatformError("Unable to find vcvarsall.bat")
-
-    try:
-        out = subprocess.check_output(
-            'cmd /u /c "{}" {} && set'.format(vcvarsall, plat_spec),
-            stderr=subprocess.STDOUT,
-        ).decode('utf-16le', errors='replace')
-    except subprocess.CalledProcessError as exc:
-        raise distutils.errors.DistutilsPlatformError(
-            "Error executing {}".format(exc.cmd)
-        ) from exc
-
-    env = {
-        key.lower(): value
-        for key, _, value in (line.partition('=') for line in out.splitlines())
-        if key and value
-    }
-
-    if vcruntime:
-        env['py_vcruntime_redist'] = vcruntime
-    return env
-
-
-def msvc14_get_vc_env(plat_spec):
-    """
-    Patched "distutils._msvccompiler._get_vc_env" for support extra
-    Microsoft Visual C++ 14.X compilers.
-
-    Set environment without use of "vcvarsall.bat".
-
-    Parameters
-    ----------
-    plat_spec: str
-        Target architecture.
-
-    Return
-    ------
-    dict
-        environment
-    """
-
-    # Always use backport from CPython 3.8
-    try:
-        return _msvc14_get_vc_env(plat_spec)
-    except distutils.errors.DistutilsPlatformError as exc:
-        _augment_exception(exc, 14.0)
-        raise
-
-
-def _augment_exception(exc, version, arch=''):
-    """
-    Add details to the exception message to help guide the user
-    as to what action will resolve it.
-    """
-    # Error if MSVC++ directory not found or environment not set
-    message = exc.args[0]
-
-    if "vcvarsall" in message.lower() or "visual c" in message.lower():
-        # Special error message if MSVC++ not installed
-        tmpl = 'Microsoft Visual C++ {version:0.1f} or greater is required.'
-        message = tmpl.format(**locals())
-        msdownload = 'www.microsoft.com/download/details.aspx?id=%d'
-        if version == 9.0:
-            if arch.lower().find('ia64') > -1:
-                # For VC++ 9.0, if IA64 support is needed, redirect user
-                # to Windows SDK 7.0.
-                # Note: No download link available from Microsoft.
-                message += ' Get it with "Microsoft Windows SDK 7.0"'
-            else:
-                # For VC++ 9.0 redirect user to Vc++ for Python 2.7 :
-                # This redirection link is maintained by Microsoft.
-                # Contact vspython@microsoft.com if it needs updating.
-                message += ' Get it from http://aka.ms/vcpython27'
-        elif version == 10.0:
-            # For VC++ 10.0 Redirect user to Windows SDK 7.1
-            message += ' Get it with "Microsoft Windows SDK 7.1": '
-            message += msdownload % 8279
-        elif version >= 14.0:
-            # For VC++ 14.X Redirect user to latest Visual C++ Build Tools
-            message += (
-                ' Get it with "Microsoft C++ Build Tools": '
-                r'https://visualstudio.microsoft.com'
-                r'/visual-cpp-build-tools/'
-            )
-
-    exc.args = (message,)
-
-
 class PlatformInfo:
     """
     Current and Target Architectures information.

From 616d8735ebe890691b3c78470c40992a7532f11a Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 2 Sep 2024 12:51:16 -0400
Subject: [PATCH 1110/1761] Add news fragment.

---
 newsfragments/4625.bugfix.rst | 1 +
 1 file changed, 1 insertion(+)
 create mode 100644 newsfragments/4625.bugfix.rst

diff --git a/newsfragments/4625.bugfix.rst b/newsfragments/4625.bugfix.rst
new file mode 100644
index 0000000000..55ac0785d0
--- /dev/null
+++ b/newsfragments/4625.bugfix.rst
@@ -0,0 +1 @@
+Restored ``setuptools.msvc.Environmentinfo`` as it is used externally.
\ No newline at end of file

From a16582be2518bb53f413e1a4bf76f7014a55e806 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 2 Sep 2024 13:11:09 -0400
Subject: [PATCH 1111/1761] Add a test for construction of EnvironmentInfo.

---
 conftest.py        | 7 +++++++
 setuptools/msvc.py | 3 +++
 2 files changed, 10 insertions(+)

diff --git a/conftest.py b/conftest.py
index 01ed2fa2e6..0807a8d3d0 100644
--- a/conftest.py
+++ b/conftest.py
@@ -1,3 +1,4 @@
+import platform
 import sys
 
 import pytest
@@ -67,3 +68,9 @@ def conditional_skip(request):
             pytest.skip("running integration tests only")
         if not running_integration_tests and is_integration_test:
             pytest.skip("skipping integration tests")
+
+
+@pytest.fixture
+def windows_only():
+    if platform.system() != 'Windows':
+        pytest.skip("Windows only")
diff --git a/setuptools/msvc.py b/setuptools/msvc.py
index b4e9c9eb36..4295c10352 100644
--- a/setuptools/msvc.py
+++ b/setuptools/msvc.py
@@ -1,5 +1,8 @@
 """
 Environment info about Microsoft Compilers.
+
+>>> getfixture('windows_only')
+>>> ei = EnvironmentInfo('amd64')
 """
 
 from __future__ import annotations

From 1a9d87308dc0d8aabeaae0dce989b35dfb7699f0 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 2 Sep 2024 13:30:25 -0400
Subject: [PATCH 1112/1761] =?UTF-8?q?Bump=20version:=2074.0.0=20=E2=86=92?=
 =?UTF-8?q?=2074.1.0?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg               |  2 +-
 NEWS.rst                       | 17 +++++++++++++++++
 newsfragments/4568.feature.rst |  2 --
 newsfragments/4622.bugfix.rst  |  1 -
 newsfragments/4625.bugfix.rst  |  1 -
 pyproject.toml                 |  2 +-
 6 files changed, 19 insertions(+), 6 deletions(-)
 delete mode 100644 newsfragments/4568.feature.rst
 delete mode 100644 newsfragments/4622.bugfix.rst
 delete mode 100644 newsfragments/4625.bugfix.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 4377121819..459cd01324 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 74.0.0
+current_version = 74.1.0
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index 6bb9eeaf57..fa5f4597b1 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,20 @@
+v74.1.0
+=======
+
+Features
+--------
+
+- Added support for defining ``ext-modules`` via ``pyproject.toml``
+  (**EXPERIMENTAL**, may change in future releases). (#4568)
+
+
+Bugfixes
+--------
+
+- Merge with pypa/distutils@3dcdf8567, removing the duplicate vendored copy of packaging. (#4622)
+- Restored ``setuptools.msvc.Environmentinfo`` as it is used externally. (#4625)
+
+
 v74.0.0
 =======
 
diff --git a/newsfragments/4568.feature.rst b/newsfragments/4568.feature.rst
deleted file mode 100644
index dadf4f4386..0000000000
--- a/newsfragments/4568.feature.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Added support for defining ``ext-modules`` via ``pyproject.toml``
-(**EXPERIMENTAL**, may change in future releases).
diff --git a/newsfragments/4622.bugfix.rst b/newsfragments/4622.bugfix.rst
deleted file mode 100644
index 0128edbcd5..0000000000
--- a/newsfragments/4622.bugfix.rst
+++ /dev/null
@@ -1 +0,0 @@
-Merge with pypa/distutils@3dcdf8567, removing the duplicate vendored copy of packaging.
\ No newline at end of file
diff --git a/newsfragments/4625.bugfix.rst b/newsfragments/4625.bugfix.rst
deleted file mode 100644
index 55ac0785d0..0000000000
--- a/newsfragments/4625.bugfix.rst
+++ /dev/null
@@ -1 +0,0 @@
-Restored ``setuptools.msvc.Environmentinfo`` as it is used externally.
\ No newline at end of file
diff --git a/pyproject.toml b/pyproject.toml
index cee3b55fde..072a333645 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "74.0.0"
+version = "74.1.0"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From 6864167a0f5cc95c258383314816046a2e877dfa Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 2 Sep 2024 18:21:47 -0400
Subject: [PATCH 1113/1761] Use monkeypatch to monkeypatch.

---
 distutils/tests/test_msvccompiler.py | 17 +++++++----------
 1 file changed, 7 insertions(+), 10 deletions(-)

diff --git a/distutils/tests/test_msvccompiler.py b/distutils/tests/test_msvccompiler.py
index 23b6c732c3..71129cae27 100644
--- a/distutils/tests/test_msvccompiler.py
+++ b/distutils/tests/test_msvccompiler.py
@@ -14,22 +14,19 @@
 
 
 class Testmsvccompiler(support.TempdirManager):
-    def test_no_compiler(self):
+    def test_no_compiler(self, monkeypatch):
         # makes sure query_vcvarsall raises
         # a DistutilsPlatformError if the compiler
         # is not found
         def _find_vcvarsall(plat_spec):
             return None, None
 
-        old_find_vcvarsall = _msvccompiler._find_vcvarsall
-        _msvccompiler._find_vcvarsall = _find_vcvarsall
-        try:
-            with pytest.raises(DistutilsPlatformError):
-                _msvccompiler._get_vc_env(
-                    'wont find this version',
-                )
-        finally:
-            _msvccompiler._find_vcvarsall = old_find_vcvarsall
+        monkeypatch.setattr(_msvccompiler, '_find_vcvarsall', _find_vcvarsall)
+
+        with pytest.raises(DistutilsPlatformError):
+            _msvccompiler._get_vc_env(
+                'wont find this version',
+            )
 
     @needs_winreg
     def test_get_vc_env_unicode(self):

From 971074d4a2f6e222cdbc43ce881586412c7ab8a1 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 2 Sep 2024 18:38:49 -0400
Subject: [PATCH 1114/1761] Disable TRY400 so it doesn't cause problems in
 other branches. Disable RUF100 to prevent it from failing in this branch. Ref
 pypa/distutils#292

---
 distutils/_msvccompiler.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/distutils/_msvccompiler.py b/distutils/_msvccompiler.py
index e7652218d8..03653929a8 100644
--- a/distutils/_msvccompiler.py
+++ b/distutils/_msvccompiler.py
@@ -159,7 +159,7 @@ def _get_vc_env(plat_spec):
             stderr=subprocess.STDOUT,
         ).decode('utf-16le', errors='replace')
     except subprocess.CalledProcessError as exc:
-        log.error(exc.output)
+        log.error(exc.output)  # noqa: RUF100, TRY400
         raise DistutilsPlatformError(f"Error executing {exc.cmd}")
 
     env = {

From 26292805c3e624fd771ffb7bb61f73c9ac5ce422 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 3 Sep 2024 08:22:39 -0400
Subject: [PATCH 1115/1761] Prefer generator expression in VCRuntimeRedist

---
 setuptools/msvc.py | 10 +++++-----
 1 file changed, 5 insertions(+), 5 deletions(-)

diff --git a/setuptools/msvc.py b/setuptools/msvc.py
index 4295c10352..51afb43857 100644
--- a/setuptools/msvc.py
+++ b/setuptools/msvc.py
@@ -1406,11 +1406,11 @@ def VCRuntimeRedist(self):
         )
 
         # vcruntime path
-        for prefix, crt_dir in itertools.product(prefixes, crt_dirs):
-            path = join(prefix, arch_subdir, crt_dir, vcruntime)
-            if isfile(path):
-                return path
-        return None
+        candidate_paths = (
+            join(prefix, arch_subdir, crt_dir, vcruntime)
+            for (prefix, crt_dir) in itertools.product(prefixes, crt_dirs)
+        )
+        return next(filter(isfile, candidate_paths), None)
 
     def return_env(self, exists=True):
         """

From f4adb80536eec233ee4a017303b9298bc253573b Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 3 Sep 2024 08:29:33 -0400
Subject: [PATCH 1116/1761] In return_env, avoid checking `isfile`.

Since `isfile` is already called on the paths processed by VCRuntimeRedist, this call is redundant, but it also fails if the result from VCRuntimeRedist was None. Fixes #1902
---
 setuptools/msvc.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setuptools/msvc.py b/setuptools/msvc.py
index 51afb43857..b9fdf6836b 100644
--- a/setuptools/msvc.py
+++ b/setuptools/msvc.py
@@ -1469,7 +1469,7 @@ def return_env(self, exists=True):
                 exists,
             ),
         )
-        if self.vs_ver >= 14 and isfile(self.VCRuntimeRedist):
+        if self.vs_ver >= 14 and self.VCRuntimeRedist:
             env['py_vcruntime_redist'] = self.VCRuntimeRedist
         return env
 

From 1a810033bb65790af48ebdd2f1f8944d28400fb2 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 3 Sep 2024 08:29:56 -0400
Subject: [PATCH 1117/1761] Add type annotation for VCRuntimeRedist and update
 the docstring to reflect the behavior.

---
 setuptools/msvc.py | 7 ++-----
 1 file changed, 2 insertions(+), 5 deletions(-)

diff --git a/setuptools/msvc.py b/setuptools/msvc.py
index b9fdf6836b..a7fb8d30b6 100644
--- a/setuptools/msvc.py
+++ b/setuptools/msvc.py
@@ -1375,14 +1375,11 @@ def FSharp(self):
         return [self.si.FSharpInstallDir]
 
     @property
-    def VCRuntimeRedist(self):
+    def VCRuntimeRedist(self) -> str | None:
         """
         Microsoft Visual C++ runtime redistributable dll.
 
-        Return
-        ------
-        str
-            path
+        Returns the first suitable path found or None.
         """
         vcruntime = 'vcruntime%d0.dll' % self.vc_ver
         arch_subdir = self.pi.target_dir(x64=True).strip('\\')

From 13bd961b52a16c5baba67da0e6603e4f3f0126a5 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 3 Sep 2024 08:37:20 -0400
Subject: [PATCH 1118/1761] Add news fragment.

---
 newsfragments/1902.bugfix.rst | 1 +
 1 file changed, 1 insertion(+)
 create mode 100644 newsfragments/1902.bugfix.rst

diff --git a/newsfragments/1902.bugfix.rst b/newsfragments/1902.bugfix.rst
new file mode 100644
index 0000000000..5b2d0b7ba5
--- /dev/null
+++ b/newsfragments/1902.bugfix.rst
@@ -0,0 +1 @@
+Fixed TypeError in ``msvc.EnvironmentInfo.return_env`` when no runtime redistributables are installed.
\ No newline at end of file

From bf5d08c5994e0b4999f1062128e1fe74dd6fffb2 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 3 Sep 2024 08:49:00 -0400
Subject: [PATCH 1119/1761] In msvc, use os.path namespace.

---
 setuptools/msvc.py | 199 ++++++++++++++++++++++++---------------------
 1 file changed, 105 insertions(+), 94 deletions(-)

diff --git a/setuptools/msvc.py b/setuptools/msvc.py
index a7fb8d30b6..de4b05f928 100644
--- a/setuptools/msvc.py
+++ b/setuptools/msvc.py
@@ -10,9 +10,9 @@
 import contextlib
 import itertools
 import json
+import os
+import os.path
 import platform
-from os import listdir, pathsep
-from os.path import dirname, isdir, isfile, join
 from typing import TYPE_CHECKING
 
 from more_itertools import unique_everseen
@@ -198,7 +198,7 @@ def sxs(self):
         str
             Registry key
         """
-        return join(self.visualstudio, 'SxS')
+        return os.path.join(self.visualstudio, 'SxS')
 
     @property
     def vc(self):
@@ -210,7 +210,7 @@ def vc(self):
         str
             Registry key
         """
-        return join(self.sxs, 'VC7')
+        return os.path.join(self.sxs, 'VC7')
 
     @property
     def vs(self):
@@ -222,7 +222,7 @@ def vs(self):
         str
             Registry key
         """
-        return join(self.sxs, 'VS7')
+        return os.path.join(self.sxs, 'VS7')
 
     @property
     def vc_for_python(self):
@@ -258,7 +258,7 @@ def windows_sdk(self):
         str
             Registry key
         """
-        return join(self.microsoft_sdk, 'Windows')
+        return os.path.join(self.microsoft_sdk, 'Windows')
 
     @property
     def netfx_sdk(self):
@@ -270,7 +270,7 @@ def netfx_sdk(self):
         str
             Registry key
         """
-        return join(self.microsoft_sdk, 'NETFXSDK')
+        return os.path.join(self.microsoft_sdk, 'NETFXSDK')
 
     @property
     def windows_kits_roots(self):
@@ -301,7 +301,7 @@ def microsoft(self, key, x86=False):
             Registry key
         """
         node64 = '' if self.pi.current_is_x86() or x86 else 'Wow6432Node'
-        return join('Software', node64, 'Microsoft', key)
+        return os.path.join('Software', node64, 'Microsoft', key)
 
     def lookup(self, key, name):
         """
@@ -437,7 +437,7 @@ def find_programdata_vs_vers(self):
         instances_dir = r'C:\ProgramData\Microsoft\VisualStudio\Packages\_Instances'
 
         try:
-            hashed_names = listdir(instances_dir)
+            hashed_names = os.listdir(instances_dir)
 
         except OSError:
             # Directory not exists with all Visual Studio versions
@@ -446,13 +446,13 @@ def find_programdata_vs_vers(self):
         for name in hashed_names:
             try:
                 # Get VS installation path from "state.json" file
-                state_path = join(instances_dir, name, 'state.json')
+                state_path = os.path.join(instances_dir, name, 'state.json')
                 with open(state_path, 'rt', encoding='utf-8') as state_file:
                     state = json.load(state_file)
                 vs_path = state['installationPath']
 
                 # Raises OSError if this VS installation does not contain VC
-                listdir(join(vs_path, r'VC\Tools\MSVC'))
+                os.listdir(os.path.join(vs_path, r'VC\Tools\MSVC'))
 
                 # Store version and path
                 vs_versions[self._as_float_version(state['installationVersion'])] = (
@@ -493,7 +493,7 @@ def VSInstallDir(self):
             path
         """
         # Default path
-        default = join(
+        default = os.path.join(
             self.ProgramFilesx86, 'Microsoft Visual Studio %0.1f' % self.vs_ver
         )
 
@@ -512,7 +512,7 @@ def VCInstallDir(self):
         """
         path = self._guess_vc() or self._guess_vc_legacy()
 
-        if not isdir(path):
+        if not os.path.isdir(path):
             msg = 'Microsoft Visual C++ directory not found'
             raise distutils.errors.DistutilsPlatformError(msg)
 
@@ -537,14 +537,14 @@ def _guess_vc(self):
             # Else, search with path from registry
             vs_dir = self.VSInstallDir
 
-        guess_vc = join(vs_dir, r'VC\Tools\MSVC')
+        guess_vc = os.path.join(vs_dir, r'VC\Tools\MSVC')
 
         # Subdir with VC exact version as name
         try:
             # Update the VC version with real one instead of VS version
-            vc_ver = listdir(guess_vc)[-1]
+            vc_ver = os.listdir(guess_vc)[-1]
             self.vc_ver = self._as_float_version(vc_ver)
-            return join(guess_vc, vc_ver)
+            return os.path.join(guess_vc, vc_ver)
         except (OSError, IndexError):
             return ''
 
@@ -557,14 +557,14 @@ def _guess_vc_legacy(self):
         str
             path
         """
-        default = join(
+        default = os.path.join(
             self.ProgramFilesx86, r'Microsoft Visual Studio %0.1f\VC' % self.vs_ver
         )
 
         # Try to get "VC++ for Python" path from registry as default path
-        reg_path = join(self.ri.vc_for_python, '%0.1f' % self.vs_ver)
+        reg_path = os.path.join(self.ri.vc_for_python, '%0.1f' % self.vs_ver)
         python_vc = self.ri.lookup(reg_path, 'installdir')
-        default_vc = join(python_vc, 'VC') if python_vc else default
+        default_vc = os.path.join(python_vc, 'VC') if python_vc else default
 
         # Try to get path from registry, if fail use default path
         return self.ri.lookup(self.ri.vc, '%0.1f' % self.vs_ver) or default_vc
@@ -601,7 +601,7 @@ def WindowsSdkLastVersion(self):
         str
             version
         """
-        return self._use_last_dir_name(join(self.WindowsSdkDir, 'lib'))
+        return self._use_last_dir_name(os.path.join(self.WindowsSdkDir, 'lib'))
 
     @property
     def WindowsSdkDir(self):  # noqa: C901  # is too complex (12)  # FIXME
@@ -616,34 +616,34 @@ def WindowsSdkDir(self):  # noqa: C901  # is too complex (12)  # FIXME
         sdkdir = ''
         for ver in self.WindowsSdkVersion:
             # Try to get it from registry
-            loc = join(self.ri.windows_sdk, 'v%s' % ver)
+            loc = os.path.join(self.ri.windows_sdk, 'v%s' % ver)
             sdkdir = self.ri.lookup(loc, 'installationfolder')
             if sdkdir:
                 break
-        if not sdkdir or not isdir(sdkdir):
+        if not sdkdir or not os.path.isdir(sdkdir):
             # Try to get "VC++ for Python" version from registry
-            path = join(self.ri.vc_for_python, '%0.1f' % self.vc_ver)
+            path = os.path.join(self.ri.vc_for_python, '%0.1f' % self.vc_ver)
             install_base = self.ri.lookup(path, 'installdir')
             if install_base:
-                sdkdir = join(install_base, 'WinSDK')
-        if not sdkdir or not isdir(sdkdir):
+                sdkdir = os.path.join(install_base, 'WinSDK')
+        if not sdkdir or not os.path.isdir(sdkdir):
             # If fail, use default new path
             for ver in self.WindowsSdkVersion:
                 intver = ver[: ver.rfind('.')]
                 path = r'Microsoft SDKs\Windows Kits\%s' % intver
-                d = join(self.ProgramFiles, path)
-                if isdir(d):
+                d = os.path.join(self.ProgramFiles, path)
+                if os.path.isdir(d):
                     sdkdir = d
-        if not sdkdir or not isdir(sdkdir):
+        if not sdkdir or not os.path.isdir(sdkdir):
             # If fail, use default old path
             for ver in self.WindowsSdkVersion:
                 path = r'Microsoft SDKs\Windows\v%s' % ver
-                d = join(self.ProgramFiles, path)
-                if isdir(d):
+                d = os.path.join(self.ProgramFiles, path)
+                if os.path.isdir(d):
                     sdkdir = d
         if not sdkdir:
             # If fail, use Platform SDK
-            sdkdir = join(self.VCInstallDir, 'PlatformSDK')
+            sdkdir = os.path.join(self.VCInstallDir, 'PlatformSDK')
         return sdkdir
 
     @property
@@ -670,10 +670,10 @@ def WindowsSDKExecutablePath(self):
         regpaths = []
         if self.vs_ver >= 14.0:
             for ver in self.NetFxSdkVersion:
-                regpaths += [join(self.ri.netfx_sdk, ver, fx)]
+                regpaths += [os.path.join(self.ri.netfx_sdk, ver, fx)]
 
         for ver in self.WindowsSdkVersion:
-            regpaths += [join(self.ri.windows_sdk, 'v%sA' % ver, fx)]
+            regpaths += [os.path.join(self.ri.windows_sdk, 'v%sA' % ver, fx)]
 
         # Return installation folder from the more recent path
         for path in regpaths:
@@ -693,7 +693,7 @@ def FSharpInstallDir(self):
         str
             path
         """
-        path = join(self.ri.visualstudio, r'%0.1f\Setup\F#' % self.vs_ver)
+        path = os.path.join(self.ri.visualstudio, r'%0.1f\Setup\F#' % self.vs_ver)
         return self.ri.lookup(path, 'productdir') or ''
 
     @property
@@ -727,7 +727,7 @@ def UniversalCRTSdkLastVersion(self):
         str
             version
         """
-        return self._use_last_dir_name(join(self.UniversalCRTSdkDir, 'lib'))
+        return self._use_last_dir_name(os.path.join(self.UniversalCRTSdkDir, 'lib'))
 
     @property
     def NetFxSdkVersion(self):
@@ -758,7 +758,7 @@ def NetFxSdkDir(self):
         """
         sdkdir = ''
         for ver in self.NetFxSdkVersion:
-            loc = join(self.ri.netfx_sdk, ver)
+            loc = os.path.join(self.ri.netfx_sdk, ver)
             sdkdir = self.ri.lookup(loc, 'kitsinstallationfolder')
             if sdkdir:
                 break
@@ -775,7 +775,7 @@ def FrameworkDir32(self):
             path
         """
         # Default path
-        guess_fw = join(self.WinDir, r'Microsoft.NET\Framework')
+        guess_fw = os.path.join(self.WinDir, r'Microsoft.NET\Framework')
 
         # Try to get path from registry, if fail use default path
         return self.ri.lookup(self.ri.vc, 'frameworkdir32') or guess_fw
@@ -791,7 +791,7 @@ def FrameworkDir64(self):
             path
         """
         # Default path
-        guess_fw = join(self.WinDir, r'Microsoft.NET\Framework64')
+        guess_fw = os.path.join(self.WinDir, r'Microsoft.NET\Framework64')
 
         # Try to get path from registry, if fail use default path
         return self.ri.lookup(self.ri.vc, 'frameworkdir64') or guess_fw
@@ -869,8 +869,9 @@ def _use_last_dir_name(path, prefix=''):
         """
         matching_dirs = (
             dir_name
-            for dir_name in reversed(listdir(path))
-            if isdir(join(path, dir_name)) and dir_name.startswith(prefix)
+            for dir_name in reversed(os.listdir(path))
+            if os.path.isdir(os.path.join(path, dir_name))
+            and dir_name.startswith(prefix)
         )
         return next(matching_dirs, None) or ''
 
@@ -950,7 +951,7 @@ def VSTools(self):
             paths += [r'Team Tools\Performance Tools']
             paths += [r'Team Tools\Performance Tools%s' % arch_subdir]
 
-        return [join(self.si.VSInstallDir, path) for path in paths]
+        return [os.path.join(self.si.VSInstallDir, path) for path in paths]
 
     @property
     def VCIncludes(self):
@@ -963,8 +964,8 @@ def VCIncludes(self):
             paths
         """
         return [
-            join(self.si.VCInstallDir, 'Include'),
-            join(self.si.VCInstallDir, r'ATLMFC\Include'),
+            os.path.join(self.si.VCInstallDir, 'Include'),
+            os.path.join(self.si.VCInstallDir, r'ATLMFC\Include'),
         ]
 
     @property
@@ -986,7 +987,7 @@ def VCLibraries(self):
         if self.vs_ver >= 14.0:
             paths += [r'Lib\store%s' % arch_subdir]
 
-        return [join(self.si.VCInstallDir, path) for path in paths]
+        return [os.path.join(self.si.VCInstallDir, path) for path in paths]
 
     @property
     def VCStoreRefs(self):
@@ -1000,7 +1001,7 @@ def VCStoreRefs(self):
         """
         if self.vs_ver < 14.0:
             return []
-        return [join(self.si.VCInstallDir, r'Lib\store\references')]
+        return [os.path.join(self.si.VCInstallDir, r'Lib\store\references')]
 
     @property
     def VCTools(self):
@@ -1013,30 +1014,34 @@ def VCTools(self):
             paths
         """
         si = self.si
-        tools = [join(si.VCInstallDir, 'VCPackages')]
+        tools = [os.path.join(si.VCInstallDir, 'VCPackages')]
 
         forcex86 = True if self.vs_ver <= 10.0 else False
         arch_subdir = self.pi.cross_dir(forcex86)
         if arch_subdir:
-            tools += [join(si.VCInstallDir, 'Bin%s' % arch_subdir)]
+            tools += [os.path.join(si.VCInstallDir, 'Bin%s' % arch_subdir)]
 
         if self.vs_ver == 14.0:
             path = 'Bin%s' % self.pi.current_dir(hidex86=True)
-            tools += [join(si.VCInstallDir, path)]
+            tools += [os.path.join(si.VCInstallDir, path)]
 
         elif self.vs_ver >= 15.0:
             host_dir = (
                 r'bin\HostX86%s' if self.pi.current_is_x86() else r'bin\HostX64%s'
             )
-            tools += [join(si.VCInstallDir, host_dir % self.pi.target_dir(x64=True))]
+            tools += [
+                os.path.join(si.VCInstallDir, host_dir % self.pi.target_dir(x64=True))
+            ]
 
             if self.pi.current_cpu != self.pi.target_cpu:
                 tools += [
-                    join(si.VCInstallDir, host_dir % self.pi.current_dir(x64=True))
+                    os.path.join(
+                        si.VCInstallDir, host_dir % self.pi.current_dir(x64=True)
+                    )
                 ]
 
         else:
-            tools += [join(si.VCInstallDir, 'Bin')]
+            tools += [os.path.join(si.VCInstallDir, 'Bin')]
 
         return tools
 
@@ -1052,13 +1057,13 @@ def OSLibraries(self):
         """
         if self.vs_ver <= 10.0:
             arch_subdir = self.pi.target_dir(hidex86=True, x64=True)
-            return [join(self.si.WindowsSdkDir, 'Lib%s' % arch_subdir)]
+            return [os.path.join(self.si.WindowsSdkDir, 'Lib%s' % arch_subdir)]
 
         else:
             arch_subdir = self.pi.target_dir(x64=True)
-            lib = join(self.si.WindowsSdkDir, 'lib')
+            lib = os.path.join(self.si.WindowsSdkDir, 'lib')
             libver = self._sdk_subdir
-            return [join(lib, '%sum%s' % (libver, arch_subdir))]
+            return [os.path.join(lib, '%sum%s' % (libver, arch_subdir))]
 
     @property
     def OSIncludes(self):
@@ -1070,10 +1075,10 @@ def OSIncludes(self):
         list of str
             paths
         """
-        include = join(self.si.WindowsSdkDir, 'include')
+        include = os.path.join(self.si.WindowsSdkDir, 'include')
 
         if self.vs_ver <= 10.0:
-            return [include, join(include, 'gl')]
+            return [include, os.path.join(include, 'gl')]
 
         else:
             if self.vs_ver >= 14.0:
@@ -1081,9 +1086,9 @@ def OSIncludes(self):
             else:
                 sdkver = ''
             return [
-                join(include, '%sshared' % sdkver),
-                join(include, '%sum' % sdkver),
-                join(include, '%swinrt' % sdkver),
+                os.path.join(include, '%sshared' % sdkver),
+                os.path.join(include, '%sum' % sdkver),
+                os.path.join(include, '%swinrt' % sdkver),
             ]
 
     @property
@@ -1096,23 +1101,25 @@ def OSLibpath(self):
         list of str
             paths
         """
-        ref = join(self.si.WindowsSdkDir, 'References')
+        ref = os.path.join(self.si.WindowsSdkDir, 'References')
         libpath = []
 
         if self.vs_ver <= 9.0:
             libpath += self.OSLibraries
 
         if self.vs_ver >= 11.0:
-            libpath += [join(ref, r'CommonConfiguration\Neutral')]
+            libpath += [os.path.join(ref, r'CommonConfiguration\Neutral')]
 
         if self.vs_ver >= 14.0:
             libpath += [
                 ref,
-                join(self.si.WindowsSdkDir, 'UnionMetadata'),
-                join(ref, 'Windows.Foundation.UniversalApiContract', '1.0.0.0'),
-                join(ref, 'Windows.Foundation.FoundationContract', '1.0.0.0'),
-                join(ref, 'Windows.Networking.Connectivity.WwanContract', '1.0.0.0'),
-                join(
+                os.path.join(self.si.WindowsSdkDir, 'UnionMetadata'),
+                os.path.join(ref, 'Windows.Foundation.UniversalApiContract', '1.0.0.0'),
+                os.path.join(ref, 'Windows.Foundation.FoundationContract', '1.0.0.0'),
+                os.path.join(
+                    ref, 'Windows.Networking.Connectivity.WwanContract', '1.0.0.0'
+                ),
+                os.path.join(
                     self.si.WindowsSdkDir,
                     'ExtensionSDKs',
                     'Microsoft.VCLibs',
@@ -1147,12 +1154,12 @@ def _sdk_tools(self):
         """
         if self.vs_ver < 15.0:
             bin_dir = 'Bin' if self.vs_ver <= 11.0 else r'Bin\x86'
-            yield join(self.si.WindowsSdkDir, bin_dir)
+            yield os.path.join(self.si.WindowsSdkDir, bin_dir)
 
         if not self.pi.current_is_x86():
             arch_subdir = self.pi.current_dir(x64=True)
             path = 'Bin%s' % arch_subdir
-            yield join(self.si.WindowsSdkDir, path)
+            yield os.path.join(self.si.WindowsSdkDir, path)
 
         if self.vs_ver in (10.0, 11.0):
             if self.pi.target_is_x86():
@@ -1160,13 +1167,13 @@ def _sdk_tools(self):
             else:
                 arch_subdir = self.pi.current_dir(hidex86=True, x64=True)
             path = r'Bin\NETFX 4.0 Tools%s' % arch_subdir
-            yield join(self.si.WindowsSdkDir, path)
+            yield os.path.join(self.si.WindowsSdkDir, path)
 
         elif self.vs_ver >= 15.0:
-            path = join(self.si.WindowsSdkDir, 'Bin')
+            path = os.path.join(self.si.WindowsSdkDir, 'Bin')
             arch_subdir = self.pi.current_dir(x64=True)
             sdkver = self.si.WindowsSdkLastVersion
-            yield join(path, '%s%s' % (sdkver, arch_subdir))
+            yield os.path.join(path, '%s%s' % (sdkver, arch_subdir))
 
         if self.si.WindowsSDKExecutablePath:
             yield self.si.WindowsSDKExecutablePath
@@ -1197,7 +1204,7 @@ def SdkSetup(self):
         if self.vs_ver > 9.0:
             return []
 
-        return [join(self.si.WindowsSdkDir, 'Setup')]
+        return [os.path.join(self.si.WindowsSdkDir, 'Setup')]
 
     @property
     def FxTools(self):
@@ -1221,9 +1228,13 @@ def FxTools(self):
 
         tools = []
         if include32:
-            tools += [join(si.FrameworkDir32, ver) for ver in si.FrameworkVersion32]
+            tools += [
+                os.path.join(si.FrameworkDir32, ver) for ver in si.FrameworkVersion32
+            ]
         if include64:
-            tools += [join(si.FrameworkDir64, ver) for ver in si.FrameworkVersion64]
+            tools += [
+                os.path.join(si.FrameworkDir64, ver) for ver in si.FrameworkVersion64
+            ]
         return tools
 
     @property
@@ -1240,7 +1251,7 @@ def NetFxSDKLibraries(self):
             return []
 
         arch_subdir = self.pi.target_dir(x64=True)
-        return [join(self.si.NetFxSdkDir, r'lib\um%s' % arch_subdir)]
+        return [os.path.join(self.si.NetFxSdkDir, r'lib\um%s' % arch_subdir)]
 
     @property
     def NetFxSDKIncludes(self):
@@ -1255,7 +1266,7 @@ def NetFxSDKIncludes(self):
         if self.vs_ver < 14.0 or not self.si.NetFxSdkDir:
             return []
 
-        return [join(self.si.NetFxSdkDir, r'include\um')]
+        return [os.path.join(self.si.NetFxSdkDir, r'include\um')]
 
     @property
     def VsTDb(self):
@@ -1267,7 +1278,7 @@ def VsTDb(self):
         list of str
             paths
         """
-        return [join(self.si.VSInstallDir, r'VSTSDB\Deploy')]
+        return [os.path.join(self.si.VSInstallDir, r'VSTSDB\Deploy')]
 
     @property
     def MSBuild(self):
@@ -1289,11 +1300,11 @@ def MSBuild(self):
             arch_subdir = ''
 
         path = r'MSBuild\%0.1f\bin%s' % (self.vs_ver, arch_subdir)
-        build = [join(base_path, path)]
+        build = [os.path.join(base_path, path)]
 
         if self.vs_ver >= 15.0:
             # Add Roslyn C# & Visual Basic Compiler
-            build += [join(base_path, path, 'Roslyn')]
+            build += [os.path.join(base_path, path, 'Roslyn')]
 
         return build
 
@@ -1310,7 +1321,7 @@ def HTMLHelpWorkshop(self):
         if self.vs_ver < 11.0:
             return []
 
-        return [join(self.si.ProgramFilesx86, 'HTML Help Workshop')]
+        return [os.path.join(self.si.ProgramFilesx86, 'HTML Help Workshop')]
 
     @property
     def UCRTLibraries(self):
@@ -1326,9 +1337,9 @@ def UCRTLibraries(self):
             return []
 
         arch_subdir = self.pi.target_dir(x64=True)
-        lib = join(self.si.UniversalCRTSdkDir, 'lib')
+        lib = os.path.join(self.si.UniversalCRTSdkDir, 'lib')
         ucrtver = self._ucrt_subdir
-        return [join(lib, '%sucrt%s' % (ucrtver, arch_subdir))]
+        return [os.path.join(lib, '%sucrt%s' % (ucrtver, arch_subdir))]
 
     @property
     def UCRTIncludes(self):
@@ -1343,8 +1354,8 @@ def UCRTIncludes(self):
         if self.vs_ver < 14.0:
             return []
 
-        include = join(self.si.UniversalCRTSdkDir, 'include')
-        return [join(include, '%sucrt' % self._ucrt_subdir)]
+        include = os.path.join(self.si.UniversalCRTSdkDir, 'include')
+        return [os.path.join(include, '%sucrt' % self._ucrt_subdir)]
 
     @property
     def _ucrt_subdir(self):
@@ -1387,13 +1398,13 @@ def VCRuntimeRedist(self) -> str | None:
         # Installation prefixes candidates
         prefixes = []
         tools_path = self.si.VCInstallDir
-        redist_path = dirname(tools_path.replace(r'\Tools', r'\Redist'))
-        if isdir(redist_path):
+        redist_path = os.path.dirname(tools_path.replace(r'\Tools', r'\Redist'))
+        if os.path.isdir(redist_path):
             # Redist version may not be exactly the same as tools
-            redist_path = join(redist_path, listdir(redist_path)[-1])
-            prefixes += [redist_path, join(redist_path, 'onecore')]
+            redist_path = os.path.join(redist_path, os.listdir(redist_path)[-1])
+            prefixes += [redist_path, os.path.join(redist_path, 'onecore')]
 
-        prefixes += [join(tools_path, 'redist')]  # VS14 legacy path
+        prefixes += [os.path.join(tools_path, 'redist')]  # VS14 legacy path
 
         # CRT directory
         crt_dirs = (
@@ -1404,10 +1415,10 @@ def VCRuntimeRedist(self) -> str | None:
 
         # vcruntime path
         candidate_paths = (
-            join(prefix, arch_subdir, crt_dir, vcruntime)
+            os.path.join(prefix, arch_subdir, crt_dir, vcruntime)
             for (prefix, crt_dir) in itertools.product(prefixes, crt_dirs)
         )
-        return next(filter(isfile, candidate_paths), None)
+        return next(filter(os.path.isfile, candidate_paths), None)
 
     def return_env(self, exists=True):
         """
@@ -1494,11 +1505,11 @@ def _build_paths(self, name, spec_path_lists, exists):
         """
         # flatten spec_path_lists
         spec_paths = itertools.chain.from_iterable(spec_path_lists)
-        env_paths = environ.get(name, '').split(pathsep)
+        env_paths = environ.get(name, '').split(os.pathsep)
         paths = itertools.chain(spec_paths, env_paths)
-        extant_paths = list(filter(isdir, paths)) if exists else paths
+        extant_paths = list(filter(os.path.isdir, paths)) if exists else paths
         if not extant_paths:
             msg = "%s environment variable is empty" % name.upper()
             raise distutils.errors.DistutilsPlatformError(msg)
         unique_paths = unique_everseen(extant_paths)
-        return pathsep.join(unique_paths)
+        return os.pathsep.join(unique_paths)

From 7ee29bd9ade19515c9521911c2d243817c021ee8 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 3 Sep 2024 08:49:09 -0400
Subject: [PATCH 1120/1761] =?UTF-8?q?Bump=20version:=2074.1.0=20=E2=86=92?=
 =?UTF-8?q?=2074.1.1?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg              | 2 +-
 NEWS.rst                      | 9 +++++++++
 newsfragments/1902.bugfix.rst | 1 -
 pyproject.toml                | 2 +-
 4 files changed, 11 insertions(+), 3 deletions(-)
 delete mode 100644 newsfragments/1902.bugfix.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 459cd01324..a80e6ea38c 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 74.1.0
+current_version = 74.1.1
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index fa5f4597b1..5629e16a0e 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,12 @@
+v74.1.1
+=======
+
+Bugfixes
+--------
+
+- Fixed TypeError in ``msvc.EnvironmentInfo.return_env`` when no runtime redistributables are installed. (#1902)
+
+
 v74.1.0
 =======
 
diff --git a/newsfragments/1902.bugfix.rst b/newsfragments/1902.bugfix.rst
deleted file mode 100644
index 5b2d0b7ba5..0000000000
--- a/newsfragments/1902.bugfix.rst
+++ /dev/null
@@ -1 +0,0 @@
-Fixed TypeError in ``msvc.EnvironmentInfo.return_env`` when no runtime redistributables are installed.
\ No newline at end of file
diff --git a/pyproject.toml b/pyproject.toml
index 072a333645..3c9a9480c2 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "74.1.0"
+version = "74.1.1"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From 91bc99ac821731fc8b594d38c0b5500f8da0819f Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 3 Sep 2024 09:18:27 -0400
Subject: [PATCH 1121/1761] In sdist.prune_file_list, support build.build_base
 as a pathlib.Path.

Closes pypa/setuptools#4615
---
 distutils/command/sdist.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/distutils/command/sdist.py b/distutils/command/sdist.py
index e8abb73920..eda6afe811 100644
--- a/distutils/command/sdist.py
+++ b/distutils/command/sdist.py
@@ -391,7 +391,7 @@ def prune_file_list(self):
         build = self.get_finalized_command('build')
         base_dir = self.distribution.get_fullname()
 
-        self.filelist.exclude_pattern(None, prefix=build.build_base)
+        self.filelist.exclude_pattern(None, prefix=os.fspath(build.build_base))
         self.filelist.exclude_pattern(None, prefix=base_dir)
 
         if sys.platform == 'win32':

From d901698dc01e18b4ebdb04e9a65df98883f7108b Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 3 Sep 2024 09:11:37 -0400
Subject: [PATCH 1122/1761] Add test capturing missed expectation.

Ref #4615
---
 setuptools/tests/test_sdist.py | 16 ++++++++++++++++
 1 file changed, 16 insertions(+)

diff --git a/setuptools/tests/test_sdist.py b/setuptools/tests/test_sdist.py
index f628f3eb4a..d0db9c8d6f 100644
--- a/setuptools/tests/test_sdist.py
+++ b/setuptools/tests/test_sdist.py
@@ -4,6 +4,7 @@
 import io
 import logging
 import os
+import pathlib
 import sys
 import tarfile
 import tempfile
@@ -822,6 +823,21 @@ def get_source_files(self):
         manifest = cmd.filelist.files
         assert '.myfile~' in manifest
 
+    @pytest.mark.xfail(reason="4615")
+    def test_build_base_pathlib(self, source_dir):
+        """
+        Ensure if build_base is a pathlib.Path, the build still succeeds.
+        """
+        dist = Distribution({
+            **SETUP_ATTRS,
+            "script_name": "setup.py",
+            "options": {"build": {"build_base": pathlib.Path('build')}},
+        })
+        cmd = sdist(dist)
+        cmd.ensure_finalized()
+        with quiet():
+            cmd.run()
+
 
 def test_default_revctrl():
     """

From 9d4b288a2643df4872036d06d6b14f933db8cebc Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 3 Sep 2024 09:28:44 -0400
Subject: [PATCH 1123/1761] Enable the test

---
 setuptools/tests/test_sdist.py | 1 -
 1 file changed, 1 deletion(-)

diff --git a/setuptools/tests/test_sdist.py b/setuptools/tests/test_sdist.py
index d0db9c8d6f..be48ce2c7e 100644
--- a/setuptools/tests/test_sdist.py
+++ b/setuptools/tests/test_sdist.py
@@ -823,7 +823,6 @@ def get_source_files(self):
         manifest = cmd.filelist.files
         assert '.myfile~' in manifest
 
-    @pytest.mark.xfail(reason="4615")
     def test_build_base_pathlib(self, source_dir):
         """
         Ensure if build_base is a pathlib.Path, the build still succeeds.

From 6bf20d96aaeb3bce0d24a45198be4c9bf286a6b4 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 3 Sep 2024 09:30:04 -0400
Subject: [PATCH 1124/1761] Add news fragment.

---
 newsfragments/4615.bugfix.rst | 1 +
 1 file changed, 1 insertion(+)
 create mode 100644 newsfragments/4615.bugfix.rst

diff --git a/newsfragments/4615.bugfix.rst b/newsfragments/4615.bugfix.rst
new file mode 100644
index 0000000000..e121f2dbca
--- /dev/null
+++ b/newsfragments/4615.bugfix.rst
@@ -0,0 +1 @@
+Fixed TypeError in sdist filelist processing by adding support for pathlib Paths for the build_base.
\ No newline at end of file

From a07de2b9364d5aa618c78c3ad60312963abfa7ba Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 3 Sep 2024 14:01:34 -0400
Subject: [PATCH 1125/1761] Skip test on stdlib distutils

---
 setuptools/tests/test_sdist.py | 1 +
 1 file changed, 1 insertion(+)

diff --git a/setuptools/tests/test_sdist.py b/setuptools/tests/test_sdist.py
index be48ce2c7e..30347190db 100644
--- a/setuptools/tests/test_sdist.py
+++ b/setuptools/tests/test_sdist.py
@@ -823,6 +823,7 @@ def get_source_files(self):
         manifest = cmd.filelist.files
         assert '.myfile~' in manifest
 
+    @pytest.mark.skipif("os.environ.get('SETUPTOOLS_USE_DISTUTILS') == 'stdlib'")
     def test_build_base_pathlib(self, source_dir):
         """
         Ensure if build_base is a pathlib.Path, the build still succeeds.

From c484f9edb936049cf5958a7ce004b18a74158c13 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 3 Sep 2024 19:36:41 -0400
Subject: [PATCH 1126/1761] =?UTF-8?q?Bump=20version:=2074.1.1=20=E2=86=92?=
 =?UTF-8?q?=2074.1.2?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg              | 2 +-
 NEWS.rst                      | 9 +++++++++
 newsfragments/4615.bugfix.rst | 1 -
 pyproject.toml                | 2 +-
 4 files changed, 11 insertions(+), 3 deletions(-)
 delete mode 100644 newsfragments/4615.bugfix.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index a80e6ea38c..cbb10b8211 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 74.1.1
+current_version = 74.1.2
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index 5629e16a0e..c0838a12eb 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,12 @@
+v74.1.2
+=======
+
+Bugfixes
+--------
+
+- Fixed TypeError in sdist filelist processing by adding support for pathlib Paths for the build_base. (#4615)
+
+
 v74.1.1
 =======
 
diff --git a/newsfragments/4615.bugfix.rst b/newsfragments/4615.bugfix.rst
deleted file mode 100644
index e121f2dbca..0000000000
--- a/newsfragments/4615.bugfix.rst
+++ /dev/null
@@ -1 +0,0 @@
-Fixed TypeError in sdist filelist processing by adding support for pathlib Paths for the build_base.
\ No newline at end of file
diff --git a/pyproject.toml b/pyproject.toml
index 3c9a9480c2..d0b709d9be 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "74.1.1"
+version = "74.1.2"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From 340137fa4cdd2f7945bcaa7aa0aa1a6600a8d11a Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Tue, 3 Sep 2024 09:14:15 +0300
Subject: [PATCH 1127/1761] Ignore TRY400 globally

While the suggestion might be valid in some cases, it doesn't fit all
use cases and ends up being a nuisance.
---
 distutils/_msvccompiler.py | 2 +-
 ruff.toml                  | 1 +
 2 files changed, 2 insertions(+), 1 deletion(-)

diff --git a/distutils/_msvccompiler.py b/distutils/_msvccompiler.py
index 03653929a8..e7652218d8 100644
--- a/distutils/_msvccompiler.py
+++ b/distutils/_msvccompiler.py
@@ -159,7 +159,7 @@ def _get_vc_env(plat_spec):
             stderr=subprocess.STDOUT,
         ).decode('utf-16le', errors='replace')
     except subprocess.CalledProcessError as exc:
-        log.error(exc.output)  # noqa: RUF100, TRY400
+        log.error(exc.output)
         raise DistutilsPlatformError(f"Error executing {exc.cmd}")
 
     env = {
diff --git a/ruff.toml b/ruff.toml
index 664d894a86..370e9cae49 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -39,6 +39,7 @@ ignore = [
 	# local
 	"B028",
 	"B904",
+	"TRY400",
 ]
 
 [format]

From aba9eefa8273a86346256dae5b2f565427dc1cab Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Tue, 3 Sep 2024 09:15:58 +0300
Subject: [PATCH 1128/1761] Clean up ruff ignore

---
 ruff.toml | 8 +++-----
 1 file changed, 3 insertions(+), 5 deletions(-)

diff --git a/ruff.toml b/ruff.toml
index 370e9cae49..730732b197 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -16,11 +16,7 @@ extend-select = [
 	"YTT",
 ]
 ignore = [
-	# local
-	"PERF203",
-	"TRY003",
-
-  # https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules
+	# https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules
 	"W191",
 	"E111",
 	"E114",
@@ -39,6 +35,8 @@ ignore = [
 	# local
 	"B028",
 	"B904",
+	"PERF203",
+	"TRY003",
 	"TRY400",
 ]
 

From 3b2ef1e72a704d7a2528408c190b39ed71560341 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Wed, 4 Sep 2024 23:54:31 -0400
Subject: [PATCH 1129/1761] Removed test_integration tests (for easy_install).

Closes #4632
---
 NEWS.rst                             |   1 +
 setuptools/tests/test_integration.py | 121 ---------------------------
 2 files changed, 1 insertion(+), 121 deletions(-)
 delete mode 100644 setuptools/tests/test_integration.py

diff --git a/NEWS.rst b/NEWS.rst
index c0838a12eb..7b62a76e0c 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -5,6 +5,7 @@ Bugfixes
 --------
 
 - Fixed TypeError in sdist filelist processing by adding support for pathlib Paths for the build_base. (#4615)
+- Removed degraded and deprecated ``test_integration`` (easy_install) from the test suite. (#4632)
 
 
 v74.1.1
diff --git a/setuptools/tests/test_integration.py b/setuptools/tests/test_integration.py
deleted file mode 100644
index b0e7d67b5e..0000000000
--- a/setuptools/tests/test_integration.py
+++ /dev/null
@@ -1,121 +0,0 @@
-"""Run some integration tests.
-
-Try to install a few packages.
-"""
-
-import glob
-import os
-import sys
-import urllib.request
-
-import pytest
-
-from setuptools.command import easy_install as easy_install_pkg
-from setuptools.command.easy_install import easy_install
-from setuptools.dist import Distribution
-
-pytestmark = pytest.mark.skipif(
-    'platform.python_implementation() == "PyPy" and platform.system() == "Windows"',
-    reason="pypa/setuptools#2496",
-)
-
-
-def setup_module(module):
-    packages = 'stevedore', 'virtualenvwrapper', 'pbr', 'novaclient'
-    for pkg in packages:
-        try:
-            __import__(pkg)
-            tmpl = "Integration tests cannot run when {pkg} is installed"
-            pytest.skip(tmpl.format(**locals()))
-        except ImportError:
-            pass
-
-    try:
-        urllib.request.urlopen('https://pypi.python.org/pypi')
-    except Exception as exc:
-        pytest.skip(str(exc))
-
-
-@pytest.fixture
-def install_context(request, tmpdir, monkeypatch):
-    """Fixture to set up temporary installation directory."""
-    # Save old values so we can restore them.
-    new_cwd = tmpdir.mkdir('cwd')
-    user_base = tmpdir.mkdir('user_base')
-    user_site = tmpdir.mkdir('user_site')
-    install_dir = tmpdir.mkdir('install_dir')
-
-    def fin():
-        # undo the monkeypatch, particularly needed under
-        # windows because of kept handle on cwd
-        monkeypatch.undo()
-        new_cwd.remove()
-        user_base.remove()
-        user_site.remove()
-        install_dir.remove()
-
-    request.addfinalizer(fin)
-
-    # Change the environment and site settings to control where the
-    # files are installed and ensure we do not overwrite anything.
-    monkeypatch.chdir(new_cwd)
-    monkeypatch.setattr(easy_install_pkg, '__file__', user_site.strpath)
-    monkeypatch.setattr('site.USER_BASE', user_base.strpath)
-    monkeypatch.setattr('site.USER_SITE', user_site.strpath)
-    monkeypatch.setattr('sys.path', sys.path + [install_dir.strpath])
-    monkeypatch.setenv('PYTHONPATH', str(os.path.pathsep.join(sys.path)))
-
-    # Set up the command for performing the installation.
-    dist = Distribution()
-    cmd = easy_install(dist)
-    cmd.install_dir = install_dir.strpath
-    return cmd
-
-
-def _install_one(requirement, cmd, pkgname, modulename):
-    cmd.args = [requirement]
-    cmd.ensure_finalized()
-    cmd.run()
-    target = cmd.install_dir
-    dest_path = glob.glob(os.path.join(target, pkgname + '*.egg'))
-    assert dest_path
-    assert os.path.exists(os.path.join(dest_path[0], pkgname, modulename))
-
-
-def test_stevedore(install_context):
-    _install_one('stevedore', install_context, 'stevedore', 'extension.py')
-
-
-@pytest.mark.xfail
-def test_virtualenvwrapper(install_context):
-    _install_one(
-        'virtualenvwrapper', install_context, 'virtualenvwrapper', 'hook_loader.py'
-    )
-
-
-def test_pbr(install_context):
-    _install_one('pbr', install_context, 'pbr', 'core.py')
-
-
-@pytest.mark.xfail
-@pytest.mark.filterwarnings("ignore:'encoding' argument not specified")
-# ^-- Dependency chain: `python-novaclient` < `oslo-utils` < `netifaces==0.11.0`
-#     netifaces' setup.py uses `open` without `encoding="utf-8"` which is hijacked by
-#     `setuptools.sandbox._open` and triggers the EncodingWarning.
-#     Can't use EncodingWarning in the filter, as it does not exist on Python < 3.10.
-def test_python_novaclient(install_context):
-    _install_one('python-novaclient', install_context, 'novaclient', 'base.py')
-
-
-def test_pyuri(install_context):
-    """
-    Install the pyuri package (version 0.3.1 at the time of writing).
-
-    This is also a regression test for issue #1016.
-    """
-    _install_one('pyuri', install_context, 'pyuri', 'uri.py')
-
-    pyuri = install_context.installed_projects['pyuri']
-
-    # The package data should be installed.
-    assert os.path.exists(os.path.join(pyuri.location, 'pyuri', 'uri.regex'))

From 8e8f0cbcc4060cbd7668f27cfb23b5d78eabd3d4 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 5 Sep 2024 09:51:07 -0400
Subject: [PATCH 1130/1761] Clean up cruft in errors docstring.

---
 distutils/errors.py | 13 +++++--------
 1 file changed, 5 insertions(+), 8 deletions(-)

diff --git a/distutils/errors.py b/distutils/errors.py
index 626254c321..3196a4f097 100644
--- a/distutils/errors.py
+++ b/distutils/errors.py
@@ -1,12 +1,9 @@
-"""distutils.errors
+"""
+Exceptions used by the Distutils modules.
 
-Provides exceptions used by the Distutils modules.  Note that Distutils
-modules may raise standard exceptions; in particular, SystemExit is
-usually raised for errors that are obviously the end-user's fault
-(eg. bad command-line arguments).
-
-This module is safe to use in "from ... import *" mode; it only exports
-symbols whose names start with "Distutils" and end with "Error"."""
+Distutils modules may raise these or standard exceptions,
+including :exc:`SystemExit`.
+"""
 
 
 class DistutilsError(Exception):

From cded66c0ef65488cc1c7b0ccc762945e53f67341 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 5 Sep 2024 10:11:08 -0400
Subject: [PATCH 1131/1761] Rely on dependencies instead of vendoring them.

---
 distutils/_collections.py     | 58 ----------------------------
 distutils/_functools.py       | 73 -----------------------------------
 distutils/_itertools.py       | 52 -------------------------
 distutils/_modified.py        |  3 +-
 distutils/ccompiler.py        |  3 +-
 distutils/command/install.py  |  5 ++-
 distutils/command/register.py |  3 +-
 distutils/command/upload.py   |  3 +-
 distutils/sysconfig.py        |  3 +-
 distutils/util.py             |  3 +-
 pyproject.toml                |  3 ++
 11 files changed, 18 insertions(+), 191 deletions(-)
 delete mode 100644 distutils/_collections.py
 delete mode 100644 distutils/_functools.py
 delete mode 100644 distutils/_itertools.py

diff --git a/distutils/_collections.py b/distutils/_collections.py
deleted file mode 100644
index 863030b3cf..0000000000
--- a/distutils/_collections.py
+++ /dev/null
@@ -1,58 +0,0 @@
-from __future__ import annotations
-
-import collections
-import itertools
-
-
-# from jaraco.collections 3.5.1
-class DictStack(list, collections.abc.Mapping):
-    """
-    A stack of dictionaries that behaves as a view on those dictionaries,
-    giving preference to the last.
-
-    >>> stack = DictStack([dict(a=1, c=2), dict(b=2, a=2)])
-    >>> stack['a']
-    2
-    >>> stack['b']
-    2
-    >>> stack['c']
-    2
-    >>> len(stack)
-    3
-    >>> stack.push(dict(a=3))
-    >>> stack['a']
-    3
-    >>> set(stack.keys()) == set(['a', 'b', 'c'])
-    True
-    >>> set(stack.items()) == set([('a', 3), ('b', 2), ('c', 2)])
-    True
-    >>> dict(**stack) == dict(stack) == dict(a=3, c=2, b=2)
-    True
-    >>> d = stack.pop()
-    >>> stack['a']
-    2
-    >>> d = stack.pop()
-    >>> stack['a']
-    1
-    >>> stack.get('b', None)
-    >>> 'c' in stack
-    True
-    """
-
-    def __iter__(self):
-        dicts = list.__iter__(self)
-        return iter(set(itertools.chain.from_iterable(c.keys() for c in dicts)))
-
-    def __getitem__(self, key):
-        for scope in reversed(tuple(list.__iter__(self))):
-            if key in scope:
-                return scope[key]
-        raise KeyError(key)
-
-    push = list.append
-
-    def __contains__(self, other):
-        return collections.abc.Mapping.__contains__(self, other)
-
-    def __len__(self):
-        return len(list(iter(self)))
diff --git a/distutils/_functools.py b/distutils/_functools.py
deleted file mode 100644
index e03365eafa..0000000000
--- a/distutils/_functools.py
+++ /dev/null
@@ -1,73 +0,0 @@
-import collections.abc
-import functools
-
-
-# from jaraco.functools 3.5
-def pass_none(func):
-    """
-    Wrap func so it's not called if its first param is None
-
-    >>> print_text = pass_none(print)
-    >>> print_text('text')
-    text
-    >>> print_text(None)
-    """
-
-    @functools.wraps(func)
-    def wrapper(param, *args, **kwargs):
-        if param is not None:
-            return func(param, *args, **kwargs)
-
-    return wrapper
-
-
-# from jaraco.functools 4.0
-@functools.singledispatch
-def _splat_inner(args, func):
-    """Splat args to func."""
-    return func(*args)
-
-
-@_splat_inner.register
-def _(args: collections.abc.Mapping, func):
-    """Splat kargs to func as kwargs."""
-    return func(**args)
-
-
-def splat(func):
-    """
-    Wrap func to expect its parameters to be passed positionally in a tuple.
-
-    Has a similar effect to that of ``itertools.starmap`` over
-    simple ``map``.
-
-    >>> import itertools, operator
-    >>> pairs = [(-1, 1), (0, 2)]
-    >>> _ = tuple(itertools.starmap(print, pairs))
-    -1 1
-    0 2
-    >>> _ = tuple(map(splat(print), pairs))
-    -1 1
-    0 2
-
-    The approach generalizes to other iterators that don't have a "star"
-    equivalent, such as a "starfilter".
-
-    >>> list(filter(splat(operator.add), pairs))
-    [(0, 2)]
-
-    Splat also accepts a mapping argument.
-
-    >>> def is_nice(msg, code):
-    ...     return "smile" in msg or code == 0
-    >>> msgs = [
-    ...     dict(msg='smile!', code=20),
-    ...     dict(msg='error :(', code=1),
-    ...     dict(msg='unknown', code=0),
-    ... ]
-    >>> for msg in filter(splat(is_nice), msgs):
-    ...     print(msg)
-    {'msg': 'smile!', 'code': 20}
-    {'msg': 'unknown', 'code': 0}
-    """
-    return functools.wraps(func)(functools.partial(_splat_inner, func=func))
diff --git a/distutils/_itertools.py b/distutils/_itertools.py
deleted file mode 100644
index 85b2951186..0000000000
--- a/distutils/_itertools.py
+++ /dev/null
@@ -1,52 +0,0 @@
-# from more_itertools 10.2
-def always_iterable(obj, base_type=(str, bytes)):
-    """If *obj* is iterable, return an iterator over its items::
-
-        >>> obj = (1, 2, 3)
-        >>> list(always_iterable(obj))
-        [1, 2, 3]
-
-    If *obj* is not iterable, return a one-item iterable containing *obj*::
-
-        >>> obj = 1
-        >>> list(always_iterable(obj))
-        [1]
-
-    If *obj* is ``None``, return an empty iterable:
-
-        >>> obj = None
-        >>> list(always_iterable(None))
-        []
-
-    By default, binary and text strings are not considered iterable::
-
-        >>> obj = 'foo'
-        >>> list(always_iterable(obj))
-        ['foo']
-
-    If *base_type* is set, objects for which ``isinstance(obj, base_type)``
-    returns ``True`` won't be considered iterable.
-
-        >>> obj = {'a': 1}
-        >>> list(always_iterable(obj))  # Iterate over the dict's keys
-        ['a']
-        >>> list(always_iterable(obj, base_type=dict))  # Treat dicts as a unit
-        [{'a': 1}]
-
-    Set *base_type* to ``None`` to avoid any special handling and treat objects
-    Python considers iterable as iterable:
-
-        >>> obj = 'foo'
-        >>> list(always_iterable(obj, base_type=None))
-        ['f', 'o', 'o']
-    """
-    if obj is None:
-        return iter(())
-
-    if (base_type is not None) and isinstance(obj, base_type):
-        return iter((obj,))
-
-    try:
-        return iter(obj)
-    except TypeError:
-        return iter((obj,))
diff --git a/distutils/_modified.py b/distutils/_modified.py
index b7bdaa2943..7cdca9398f 100644
--- a/distutils/_modified.py
+++ b/distutils/_modified.py
@@ -3,7 +3,8 @@
 import functools
 import os.path
 
-from ._functools import splat
+from jaraco.functools import splat
+
 from .compat.py39 import zip_strict
 from .errors import DistutilsFileError
 
diff --git a/distutils/ccompiler.py b/distutils/ccompiler.py
index bc4743bcbf..5e73e56d02 100644
--- a/distutils/ccompiler.py
+++ b/distutils/ccompiler.py
@@ -9,7 +9,8 @@
 import types
 import warnings
 
-from ._itertools import always_iterable
+from more_itertools import always_iterable
+
 from ._log import log
 from ._modified import newer_group
 from .dir_util import mkpath
diff --git a/distutils/command/install.py b/distutils/command/install.py
index b83e061e02..ceb453e041 100644
--- a/distutils/command/install.py
+++ b/distutils/command/install.py
@@ -10,7 +10,8 @@
 from distutils._log import log
 from site import USER_BASE, USER_SITE
 
-from .. import _collections
+import jaraco.collections
+
 from ..core import Command
 from ..debug import DEBUG
 from ..errors import DistutilsOptionError, DistutilsPlatformError
@@ -428,7 +429,7 @@ def finalize_options(self):  # noqa: C901
             local_vars['userbase'] = self.install_userbase
             local_vars['usersite'] = self.install_usersite
 
-        self.config_vars = _collections.DictStack([
+        self.config_vars = jaraco.collections.DictStack([
             fw.vars(),
             compat_vars,
             sysconfig.get_config_vars(),
diff --git a/distutils/command/register.py b/distutils/command/register.py
index c1acd27b54..1089daf78f 100644
--- a/distutils/command/register.py
+++ b/distutils/command/register.py
@@ -13,7 +13,8 @@
 from distutils._log import log
 from warnings import warn
 
-from .._itertools import always_iterable
+from more_itertools import always_iterable
+
 from ..core import PyPIRCCommand
 
 
diff --git a/distutils/command/upload.py b/distutils/command/upload.py
index a2461e089f..5717eef1fa 100644
--- a/distutils/command/upload.py
+++ b/distutils/command/upload.py
@@ -13,7 +13,8 @@
 from urllib.parse import urlparse
 from urllib.request import HTTPError, Request, urlopen
 
-from .._itertools import always_iterable
+from more_itertools import always_iterable
+
 from ..core import PyPIRCCommand
 from ..errors import DistutilsError, DistutilsOptionError
 from ..spawn import spawn
diff --git a/distutils/sysconfig.py b/distutils/sysconfig.py
index 28a7c571dc..da1eecbe7e 100644
--- a/distutils/sysconfig.py
+++ b/distutils/sysconfig.py
@@ -16,7 +16,8 @@
 import sys
 import sysconfig
 
-from ._functools import pass_none
+from jaraco.functools import pass_none
+
 from .compat import py39
 from .errors import DistutilsPlatformError
 from .util import is_mingw
diff --git a/distutils/util.py b/distutils/util.py
index 4cc6bd283c..609c1a50cd 100644
--- a/distutils/util.py
+++ b/distutils/util.py
@@ -17,7 +17,8 @@
 import sysconfig
 import tempfile
 
-from ._functools import pass_none
+from jaraco.functools import pass_none
+
 from ._log import log
 from ._modified import newer
 from .errors import DistutilsByteCompileError, DistutilsPlatformError
diff --git a/pyproject.toml b/pyproject.toml
index 068902265f..9f528752ab 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -20,6 +20,9 @@ requires-python = ">=3.8"
 dependencies = [
 	# Setuptools must require these
 	"packaging",
+	"jaraco.functools",
+	"more_itertools",
+	"jaraco.collections",
 ]
 dynamic = ["version"]
 

From 10078fe2b139c424f034f0f817d9695c899cd9f6 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 5 Sep 2024 10:27:16 -0400
Subject: [PATCH 1132/1761] Simply log the directory being created, rather than
 the whole ancestry.

---
 distutils/dir_util.py            | 14 +++++---------
 distutils/tests/test_dir_util.py |  2 +-
 2 files changed, 6 insertions(+), 10 deletions(-)

diff --git a/distutils/dir_util.py b/distutils/dir_util.py
index 724afeff6f..dfe0613429 100644
--- a/distutils/dir_util.py
+++ b/distutils/dir_util.py
@@ -20,7 +20,7 @@ def mkpath(name, mode=0o777, verbose=True, dry_run=False):  # noqa: C901
     means the current directory, which of course exists), then do nothing.
     Raise DistutilsFileError if unable to create some directory along the way
     (eg. some sub-path exists, but is a file rather than a directory).
-    If 'verbose' is true, print a one-line summary of each mkdir to stdout.
+    If 'verbose' is true, log the directory created.
     Return the list of directories actually created.
 
     os.makedirs is not used because:
@@ -36,12 +36,11 @@ def mkpath(name, mode=0o777, verbose=True, dry_run=False):  # noqa: C901
     if not isinstance(name, str):
         raise DistutilsInternalError(f"mkpath: 'name' must be a string (got {name!r})")
 
-    # XXX what's the better way to handle verbosity? print as we create
-    # each directory in the path (the current behaviour), or only announce
-    # the creation of the whole path? (quite easy to do the latter since
-    # we're not using a recursive algorithm)
-
     name = os.path.normpath(name)
+
+    if verbose and not os.path.isdir(name):
+        log.info("creating %s", name)
+
     created_dirs = []
     if os.path.isdir(name) or name == '':
         return created_dirs
@@ -66,9 +65,6 @@ def mkpath(name, mode=0o777, verbose=True, dry_run=False):  # noqa: C901
         if abs_head in _path_created:
             continue
 
-        if verbose >= 1:
-            log.info("creating %s", head)
-
         if not dry_run:
             try:
                 os.mkdir(head, mode)
diff --git a/distutils/tests/test_dir_util.py b/distutils/tests/test_dir_util.py
index c00ffbbd81..12e643ab74 100644
--- a/distutils/tests/test_dir_util.py
+++ b/distutils/tests/test_dir_util.py
@@ -34,7 +34,7 @@ def test_mkpath_remove_tree_verbosity(self, caplog):
         remove_tree(self.root_target, verbose=False)
 
         mkpath(self.target, verbose=True)
-        wanted = [f'creating {self.root_target}', f'creating {self.target}']
+        wanted = [f'creating {self.target}']
         assert caplog.messages == wanted
         caplog.clear()
 

From 45b1295d9717b7a6498b092f2272d754cd86203a Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 5 Sep 2024 13:36:55 -0400
Subject: [PATCH 1133/1761] Updated mkpath to use pathlib.Path.mkdir.

---
 distutils/dir_util.py | 60 ++++++++++++-------------------------------
 1 file changed, 16 insertions(+), 44 deletions(-)

diff --git a/distutils/dir_util.py b/distutils/dir_util.py
index dfe0613429..a9ee842aac 100644
--- a/distutils/dir_util.py
+++ b/distutils/dir_util.py
@@ -2,8 +2,9 @@
 
 Utility functions for manipulating directories and directory trees."""
 
-import errno
+import itertools
 import os
+import pathlib
 
 from ._log import log
 from .errors import DistutilsFileError, DistutilsInternalError
@@ -13,7 +14,7 @@
 _path_created = set()
 
 
-def mkpath(name, mode=0o777, verbose=True, dry_run=False):  # noqa: C901
+def mkpath(name, mode=0o777, verbose=True, dry_run=False):
     """Create a directory and any missing ancestor directories.
 
     If the directory already exists (or if 'name' is the empty string, which
@@ -22,12 +23,6 @@ def mkpath(name, mode=0o777, verbose=True, dry_run=False):  # noqa: C901
     (eg. some sub-path exists, but is a file rather than a directory).
     If 'verbose' is true, log the directory created.
     Return the list of directories actually created.
-
-    os.makedirs is not used because:
-
-    a) It's new to Python 1.5.2, and
-    b) it blows up if the directory already exists (in which case it should
-       silently succeed).
     """
 
     global _path_created
@@ -36,47 +31,24 @@ def mkpath(name, mode=0o777, verbose=True, dry_run=False):  # noqa: C901
     if not isinstance(name, str):
         raise DistutilsInternalError(f"mkpath: 'name' must be a string (got {name!r})")
 
-    name = os.path.normpath(name)
-
-    if verbose and not os.path.isdir(name):
-        log.info("creating %s", name)
-
-    created_dirs = []
-    if os.path.isdir(name) or name == '':
-        return created_dirs
-    if os.path.abspath(name) in _path_created:
-        return created_dirs
+    name = pathlib.Path(name)
 
-    (head, tail) = os.path.split(name)
-    tails = [tail]  # stack of lone dirs to create
+    if str(name.absolute()) in _path_created:
+        return
 
-    while head and tail and not os.path.isdir(head):
-        (head, tail) = os.path.split(head)
-        tails.insert(0, tail)  # push next higher dir onto stack
+    if verbose and not name.is_dir():
+        log.info("creating %s", name)
 
-    # now 'head' contains the deepest directory that already exists
-    # (that is, the child of 'head' in 'name' is the highest directory
-    # that does *not* exist)
-    for d in tails:
-        # print "head = %s, d = %s: " % (head, d),
-        head = os.path.join(head, d)
-        abs_head = os.path.abspath(head)
+    ancestry = itertools.chain((name,), name.parents)
+    missing = (path for path in ancestry if not path.is_dir())
 
-        if abs_head in _path_created:
-            continue
+    try:
+        dry_run or name.mkdir(mode=mode, parents=True, exist_ok=True)
+        _path_created.add(name.absolute())
+    except OSError as exc:
+        raise DistutilsFileError(f"could not create '{name}': {exc.args[-1]}")
 
-        if not dry_run:
-            try:
-                os.mkdir(head, mode)
-            except OSError as exc:
-                if not (exc.errno == errno.EEXIST and os.path.isdir(head)):
-                    raise DistutilsFileError(
-                        f"could not create '{head}': {exc.args[-1]}"
-                    )
-            created_dirs.append(head)
-
-        _path_created.add(abs_head)
-    return created_dirs
+    return list(map(str, missing))
 
 
 def create_tree(base_dir, files, mode=0o777, verbose=True, dry_run=False):

From 284279d7eed2466ae933cc61ac1cacc5b9c44527 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 5 Sep 2024 14:20:08 -0400
Subject: [PATCH 1134/1761] Refactored mkpath as a singledispatch function.

---
 distutils/dir_util.py | 23 ++++++++++++++++-------
 1 file changed, 16 insertions(+), 7 deletions(-)

diff --git a/distutils/dir_util.py b/distutils/dir_util.py
index a9ee842aac..ab848240ea 100644
--- a/distutils/dir_util.py
+++ b/distutils/dir_util.py
@@ -2,6 +2,7 @@
 
 Utility functions for manipulating directories and directory trees."""
 
+import functools
 import itertools
 import os
 import pathlib
@@ -14,7 +15,8 @@
 _path_created = set()
 
 
-def mkpath(name, mode=0o777, verbose=True, dry_run=False):
+@functools.singledispatch
+def mkpath(name: pathlib.Path, mode=0o777, verbose=True, dry_run=False):
     """Create a directory and any missing ancestor directories.
 
     If the directory already exists (or if 'name' is the empty string, which
@@ -27,12 +29,6 @@ def mkpath(name, mode=0o777, verbose=True, dry_run=False):
 
     global _path_created
 
-    # Detect a common bug -- name is None
-    if not isinstance(name, str):
-        raise DistutilsInternalError(f"mkpath: 'name' must be a string (got {name!r})")
-
-    name = pathlib.Path(name)
-
     if str(name.absolute()) in _path_created:
         return
 
@@ -51,6 +47,19 @@ def mkpath(name, mode=0o777, verbose=True, dry_run=False):
     return list(map(str, missing))
 
 
+@mkpath.register
+def _(name: str, *args, **kwargs):
+    return mkpath(pathlib.Path(name), *args, **kwargs)
+
+
+@mkpath.register
+def _(name: None, *args, **kwargs):
+    """
+    Detect a common bug -- name is None.
+    """
+    raise DistutilsInternalError(f"mkpath: 'name' must be a string (got {name!r})")
+
+
 def create_tree(base_dir, files, mode=0o777, verbose=True, dry_run=False):
     """Create all the empty directories under 'base_dir' needed to put 'files'
     there.

From a235e3b2c26be1599b948b83f5b2ed55831fd132 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 5 Sep 2024 14:41:32 -0400
Subject: [PATCH 1135/1761] Declare also the dependencies used by distutils
 (adds jaraco.collections).

---
 newsfragments/+1ac90f4a.feature.rst           |    1 +
 pyproject.toml                                |    6 +
 .../INSTALLER                                 |    1 +
 .../LICENSE                                   |   17 +
 .../METADATA                                  |   85 ++
 .../jaraco.collections-5.1.0.dist-info/RECORD |   10 +
 .../REQUESTED                                 |    0
 .../jaraco.collections-5.1.0.dist-info/WHEEL  |    5 +
 .../top_level.txt                             |    1 +
 .../_vendor/jaraco/collections/__init__.py    | 1091 +++++++++++++++++
 .../_vendor/jaraco/collections/py.typed       |    0
 11 files changed, 1217 insertions(+)
 create mode 100644 newsfragments/+1ac90f4a.feature.rst
 create mode 100644 setuptools/_vendor/jaraco.collections-5.1.0.dist-info/INSTALLER
 create mode 100644 setuptools/_vendor/jaraco.collections-5.1.0.dist-info/LICENSE
 create mode 100644 setuptools/_vendor/jaraco.collections-5.1.0.dist-info/METADATA
 create mode 100644 setuptools/_vendor/jaraco.collections-5.1.0.dist-info/RECORD
 create mode 100644 setuptools/_vendor/jaraco.collections-5.1.0.dist-info/REQUESTED
 create mode 100644 setuptools/_vendor/jaraco.collections-5.1.0.dist-info/WHEEL
 create mode 100644 setuptools/_vendor/jaraco.collections-5.1.0.dist-info/top_level.txt
 create mode 100644 setuptools/_vendor/jaraco/collections/__init__.py
 create mode 100644 setuptools/_vendor/jaraco/collections/py.typed

diff --git a/newsfragments/+1ac90f4a.feature.rst b/newsfragments/+1ac90f4a.feature.rst
new file mode 100644
index 0000000000..d78e4a7f89
--- /dev/null
+++ b/newsfragments/+1ac90f4a.feature.rst
@@ -0,0 +1 @@
+Declare also the dependencies used by distutils (adds jaraco.collections).
\ No newline at end of file
diff --git a/pyproject.toml b/pyproject.toml
index d0b709d9be..f623e16445 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -100,6 +100,12 @@ core = [
 
 	# pkg_resources
 	"platformdirs >= 2.6.2",
+
+	# for distutils
+	"jaraco.collections",
+	"jaraco.functools",
+	"packaging",
+	"more_itertools",
 ]
 
 check = [
diff --git a/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/INSTALLER b/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/INSTALLER
new file mode 100644
index 0000000000..a1b589e38a
--- /dev/null
+++ b/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/LICENSE b/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/LICENSE
new file mode 100644
index 0000000000..1bb5a44356
--- /dev/null
+++ b/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/LICENSE
@@ -0,0 +1,17 @@
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
diff --git a/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/METADATA b/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/METADATA
new file mode 100644
index 0000000000..fe6ca5ad88
--- /dev/null
+++ b/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/METADATA
@@ -0,0 +1,85 @@
+Metadata-Version: 2.1
+Name: jaraco.collections
+Version: 5.1.0
+Summary: Collection objects similar to those in stdlib by jaraco
+Author-email: "Jason R. Coombs" 
+Project-URL: Source, https://github.com/jaraco/jaraco.collections
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Requires-Python: >=3.8
+Description-Content-Type: text/x-rst
+License-File: LICENSE
+Requires-Dist: jaraco.text
+Provides-Extra: check
+Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'check'
+Requires-Dist: pytest-ruff >=0.2.1 ; (sys_platform != "cygwin") and extra == 'check'
+Provides-Extra: cover
+Requires-Dist: pytest-cov ; extra == 'cover'
+Provides-Extra: doc
+Requires-Dist: sphinx >=3.5 ; extra == 'doc'
+Requires-Dist: jaraco.packaging >=9.3 ; extra == 'doc'
+Requires-Dist: rst.linker >=1.9 ; extra == 'doc'
+Requires-Dist: furo ; extra == 'doc'
+Requires-Dist: sphinx-lint ; extra == 'doc'
+Requires-Dist: jaraco.tidelift >=1.4 ; extra == 'doc'
+Provides-Extra: enabler
+Requires-Dist: pytest-enabler >=2.2 ; extra == 'enabler'
+Provides-Extra: test
+Requires-Dist: pytest !=8.1.*,>=6 ; extra == 'test'
+Provides-Extra: type
+Requires-Dist: pytest-mypy ; extra == 'type'
+
+.. image:: https://img.shields.io/pypi/v/jaraco.collections.svg
+   :target: https://pypi.org/project/jaraco.collections
+
+.. image:: https://img.shields.io/pypi/pyversions/jaraco.collections.svg
+
+.. image:: https://github.com/jaraco/jaraco.collections/actions/workflows/main.yml/badge.svg
+   :target: https://github.com/jaraco/jaraco.collections/actions?query=workflow%3A%22tests%22
+   :alt: tests
+
+.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
+    :target: https://github.com/astral-sh/ruff
+    :alt: Ruff
+
+.. image:: https://readthedocs.org/projects/jaracocollections/badge/?version=latest
+   :target: https://jaracocollections.readthedocs.io/en/latest/?badge=latest
+
+.. image:: https://img.shields.io/badge/skeleton-2024-informational
+   :target: https://blog.jaraco.com/skeleton
+
+.. image:: https://tidelift.com/badges/package/pypi/jaraco.collections
+   :target: https://tidelift.com/subscription/pkg/pypi-jaraco.collections?utm_source=pypi-jaraco.collections&utm_medium=readme
+
+Models and classes to supplement the stdlib 'collections' module.
+
+See the docs, linked above, for descriptions and usage examples.
+
+Highlights include:
+
+- RangeMap: A mapping that accepts a range of values for keys.
+- Projection: A subset over an existing mapping.
+- KeyTransformingDict: Generalized mapping with keys transformed by a function.
+- FoldedCaseKeyedDict: A dict whose string keys are case-insensitive.
+- BijectiveMap: A map where keys map to values and values back to their keys.
+- ItemsAsAttributes: A mapping mix-in exposing items as attributes.
+- IdentityOverrideMap: A map whose keys map by default to themselves unless overridden.
+- FrozenDict: A hashable, immutable map.
+- Enumeration: An object whose keys are enumerated.
+- Everything: A container that contains all things.
+- Least, Greatest: Objects that are always less than or greater than any other.
+- pop_all: Return all items from the mutable sequence and remove them from that sequence.
+- DictStack: A stack of dicts, great for sharing scopes.
+- WeightedLookup: A specialized RangeMap for selecting an item by weights.
+
+For Enterprise
+==============
+
+Available as part of the Tidelift Subscription.
+
+This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use.
+
+`Learn more `_.
diff --git a/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/RECORD b/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/RECORD
new file mode 100644
index 0000000000..48b957ec88
--- /dev/null
+++ b/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/RECORD
@@ -0,0 +1,10 @@
+jaraco.collections-5.1.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+jaraco.collections-5.1.0.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
+jaraco.collections-5.1.0.dist-info/METADATA,sha256=IMUaliNsA5X1Ox9MXUWOagch5R4Wwb_3M7erp29dBtg,3933
+jaraco.collections-5.1.0.dist-info/RECORD,,
+jaraco.collections-5.1.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+jaraco.collections-5.1.0.dist-info/WHEEL,sha256=Mdi9PDNwEZptOjTlUcAth7XJDFtKrHYaQMPulZeBCiQ,91
+jaraco.collections-5.1.0.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
+jaraco/collections/__init__.py,sha256=Pc1-SqjWm81ad1P0-GttpkwO_LWlnaY6gUq8gcKh2v0,26640
+jaraco/collections/__pycache__/__init__.cpython-312.pyc,,
+jaraco/collections/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
diff --git a/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/REQUESTED b/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/REQUESTED
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/WHEEL b/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/WHEEL
new file mode 100644
index 0000000000..50e1e84e4a
--- /dev/null
+++ b/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: setuptools (73.0.1)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/top_level.txt b/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/top_level.txt
new file mode 100644
index 0000000000..f6205a5f19
--- /dev/null
+++ b/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+jaraco
diff --git a/setuptools/_vendor/jaraco/collections/__init__.py b/setuptools/_vendor/jaraco/collections/__init__.py
new file mode 100644
index 0000000000..0d501cf9e9
--- /dev/null
+++ b/setuptools/_vendor/jaraco/collections/__init__.py
@@ -0,0 +1,1091 @@
+from __future__ import annotations
+
+import collections.abc
+import copy
+import functools
+import itertools
+import operator
+import random
+import re
+from collections.abc import Container, Iterable, Mapping
+from typing import TYPE_CHECKING, Any, Callable, Dict, TypeVar, Union, overload
+
+import jaraco.text
+
+if TYPE_CHECKING:
+    from _operator import _SupportsComparison
+
+    from _typeshed import SupportsKeysAndGetItem
+    from typing_extensions import Self
+
+    _RangeMapKT = TypeVar('_RangeMapKT', bound=_SupportsComparison)
+else:
+    # _SupportsComparison doesn't exist at runtime,
+    # but _RangeMapKT is used in RangeMap's superclass' type parameters
+    _RangeMapKT = TypeVar('_RangeMapKT')
+
+_T = TypeVar('_T')
+_VT = TypeVar('_VT')
+
+_Matchable = Union[Callable, Container, Iterable, re.Pattern]
+
+
+def _dispatch(obj: _Matchable) -> Callable:
+    # can't rely on singledispatch for Union[Container, Iterable]
+    # due to ambiguity
+    # (https://peps.python.org/pep-0443/#abstract-base-classes).
+    if isinstance(obj, re.Pattern):
+        return obj.fullmatch
+    # mypy issue: https://github.com/python/mypy/issues/11071
+    if not isinstance(obj, Callable):  # type: ignore[arg-type]
+        if not isinstance(obj, Container):
+            obj = set(obj)  # type: ignore[arg-type]
+        obj = obj.__contains__
+    return obj  # type: ignore[return-value]
+
+
+class Projection(collections.abc.Mapping):
+    """
+    Project a set of keys over a mapping
+
+    >>> sample = {'a': 1, 'b': 2, 'c': 3}
+    >>> prj = Projection(['a', 'c', 'd'], sample)
+    >>> dict(prj)
+    {'a': 1, 'c': 3}
+
+    Projection also accepts an iterable or callable or pattern.
+
+    >>> iter_prj = Projection(iter('acd'), sample)
+    >>> call_prj = Projection(lambda k: ord(k) in (97, 99, 100), sample)
+    >>> pat_prj = Projection(re.compile(r'[acd]'), sample)
+    >>> prj == iter_prj == call_prj == pat_prj
+    True
+
+    Keys should only appear if they were specified and exist in the space.
+    Order is retained.
+
+    >>> list(prj)
+    ['a', 'c']
+
+    Attempting to access a key not in the projection
+    results in a KeyError.
+
+    >>> prj['b']
+    Traceback (most recent call last):
+    ...
+    KeyError: 'b'
+
+    Use the projection to update another dict.
+
+    >>> target = {'a': 2, 'b': 2}
+    >>> target.update(prj)
+    >>> target
+    {'a': 1, 'b': 2, 'c': 3}
+
+    Projection keeps a reference to the original dict, so
+    modifying the original dict may modify the Projection.
+
+    >>> del sample['a']
+    >>> dict(prj)
+    {'c': 3}
+    """
+
+    def __init__(self, keys: _Matchable, space: Mapping):
+        self._match = _dispatch(keys)
+        self._space = space
+
+    def __getitem__(self, key):
+        if not self._match(key):
+            raise KeyError(key)
+        return self._space[key]
+
+    def _keys_resolved(self):
+        return filter(self._match, self._space)
+
+    def __iter__(self):
+        return self._keys_resolved()
+
+    def __len__(self):
+        return len(tuple(self._keys_resolved()))
+
+
+class Mask(Projection):
+    """
+    The inverse of a :class:`Projection`, masking out keys.
+
+    >>> sample = {'a': 1, 'b': 2, 'c': 3}
+    >>> msk = Mask(['a', 'c', 'd'], sample)
+    >>> dict(msk)
+    {'b': 2}
+    """
+
+    def __init__(self, *args, **kwargs):
+        super().__init__(*args, **kwargs)
+        # self._match = compose(operator.not_, self._match)
+        self._match = lambda key, orig=self._match: not orig(key)
+
+
+def dict_map(function, dictionary):
+    """
+    Return a new dict with function applied to values of dictionary.
+
+    >>> dict_map(lambda x: x+1, dict(a=1, b=2))
+    {'a': 2, 'b': 3}
+    """
+    return dict((key, function(value)) for key, value in dictionary.items())
+
+
+class RangeMap(Dict[_RangeMapKT, _VT]):
+    """
+    A dictionary-like object that uses the keys as bounds for a range.
+    Inclusion of the value for that range is determined by the
+    key_match_comparator, which defaults to less-than-or-equal.
+    A value is returned for a key if it is the first key that matches in
+    the sorted list of keys.
+
+    One may supply keyword parameters to be passed to the sort function used
+    to sort keys (i.e. key, reverse) as sort_params.
+
+    Create a map that maps 1-3 -> 'a', 4-6 -> 'b'
+
+    >>> r = RangeMap({3: 'a', 6: 'b'})  # boy, that was easy
+    >>> r[1], r[2], r[3], r[4], r[5], r[6]
+    ('a', 'a', 'a', 'b', 'b', 'b')
+
+    Even float values should work so long as the comparison operator
+    supports it.
+
+    >>> r[4.5]
+    'b'
+
+    Notice that the way rangemap is defined, it must be open-ended
+    on one side.
+
+    >>> r[0]
+    'a'
+    >>> r[-1]
+    'a'
+
+    One can close the open-end of the RangeMap by using undefined_value
+
+    >>> r = RangeMap({0: RangeMap.undefined_value, 3: 'a', 6: 'b'})
+    >>> r[0]
+    Traceback (most recent call last):
+    ...
+    KeyError: 0
+
+    One can get the first or last elements in the range by using RangeMap.Item
+
+    >>> last_item = RangeMap.Item(-1)
+    >>> r[last_item]
+    'b'
+
+    .last_item is a shortcut for Item(-1)
+
+    >>> r[RangeMap.last_item]
+    'b'
+
+    Sometimes it's useful to find the bounds for a RangeMap
+
+    >>> r.bounds()
+    (0, 6)
+
+    RangeMap supports .get(key, default)
+
+    >>> r.get(0, 'not found')
+    'not found'
+
+    >>> r.get(7, 'not found')
+    'not found'
+
+    One often wishes to define the ranges by their left-most values,
+    which requires use of sort params and a key_match_comparator.
+
+    >>> r = RangeMap({1: 'a', 4: 'b'},
+    ...     sort_params=dict(reverse=True),
+    ...     key_match_comparator=operator.ge)
+    >>> r[1], r[2], r[3], r[4], r[5], r[6]
+    ('a', 'a', 'a', 'b', 'b', 'b')
+
+    That wasn't nearly as easy as before, so an alternate constructor
+    is provided:
+
+    >>> r = RangeMap.left({1: 'a', 4: 'b', 7: RangeMap.undefined_value})
+    >>> r[1], r[2], r[3], r[4], r[5], r[6]
+    ('a', 'a', 'a', 'b', 'b', 'b')
+
+    """
+
+    def __init__(
+        self,
+        source: (
+            SupportsKeysAndGetItem[_RangeMapKT, _VT] | Iterable[tuple[_RangeMapKT, _VT]]
+        ),
+        sort_params: Mapping[str, Any] = {},
+        key_match_comparator: Callable[[_RangeMapKT, _RangeMapKT], bool] = operator.le,
+    ):
+        dict.__init__(self, source)
+        self.sort_params = sort_params
+        self.match = key_match_comparator
+
+    @classmethod
+    def left(
+        cls,
+        source: (
+            SupportsKeysAndGetItem[_RangeMapKT, _VT] | Iterable[tuple[_RangeMapKT, _VT]]
+        ),
+    ) -> Self:
+        return cls(
+            source, sort_params=dict(reverse=True), key_match_comparator=operator.ge
+        )
+
+    def __getitem__(self, item: _RangeMapKT) -> _VT:
+        sorted_keys = sorted(self.keys(), **self.sort_params)
+        if isinstance(item, RangeMap.Item):
+            result = self.__getitem__(sorted_keys[item])
+        else:
+            key = self._find_first_match_(sorted_keys, item)
+            result = dict.__getitem__(self, key)
+            if result is RangeMap.undefined_value:
+                raise KeyError(key)
+        return result
+
+    @overload  # type: ignore[override] # Signature simplified over dict and Mapping
+    def get(self, key: _RangeMapKT, default: _T) -> _VT | _T: ...
+    @overload
+    def get(self, key: _RangeMapKT, default: None = None) -> _VT | None: ...
+    def get(self, key: _RangeMapKT, default: _T | None = None) -> _VT | _T | None:
+        """
+        Return the value for key if key is in the dictionary, else default.
+        If default is not given, it defaults to None, so that this method
+        never raises a KeyError.
+        """
+        try:
+            return self[key]
+        except KeyError:
+            return default
+
+    def _find_first_match_(
+        self, keys: Iterable[_RangeMapKT], item: _RangeMapKT
+    ) -> _RangeMapKT:
+        is_match = functools.partial(self.match, item)
+        matches = filter(is_match, keys)
+        try:
+            return next(matches)
+        except StopIteration:
+            raise KeyError(item) from None
+
+    def bounds(self) -> tuple[_RangeMapKT, _RangeMapKT]:
+        sorted_keys = sorted(self.keys(), **self.sort_params)
+        return (sorted_keys[RangeMap.first_item], sorted_keys[RangeMap.last_item])
+
+    # some special values for the RangeMap
+    undefined_value = type('RangeValueUndefined', (), {})()
+
+    class Item(int):
+        """RangeMap Item"""
+
+    first_item = Item(0)
+    last_item = Item(-1)
+
+
+def __identity(x):
+    return x
+
+
+def sorted_items(d, key=__identity, reverse=False):
+    """
+    Return the items of the dictionary sorted by the keys.
+
+    >>> sample = dict(foo=20, bar=42, baz=10)
+    >>> tuple(sorted_items(sample))
+    (('bar', 42), ('baz', 10), ('foo', 20))
+
+    >>> reverse_string = lambda s: ''.join(reversed(s))
+    >>> tuple(sorted_items(sample, key=reverse_string))
+    (('foo', 20), ('bar', 42), ('baz', 10))
+
+    >>> tuple(sorted_items(sample, reverse=True))
+    (('foo', 20), ('baz', 10), ('bar', 42))
+    """
+
+    # wrap the key func so it operates on the first element of each item
+    def pairkey_key(item):
+        return key(item[0])
+
+    return sorted(d.items(), key=pairkey_key, reverse=reverse)
+
+
+class KeyTransformingDict(dict):
+    """
+    A dict subclass that transforms the keys before they're used.
+    Subclasses may override the default transform_key to customize behavior.
+    """
+
+    @staticmethod
+    def transform_key(key):  # pragma: nocover
+        return key
+
+    def __init__(self, *args, **kargs):
+        super().__init__()
+        # build a dictionary using the default constructs
+        d = dict(*args, **kargs)
+        # build this dictionary using transformed keys.
+        for item in d.items():
+            self.__setitem__(*item)
+
+    def __setitem__(self, key, val):
+        key = self.transform_key(key)
+        super().__setitem__(key, val)
+
+    def __getitem__(self, key):
+        key = self.transform_key(key)
+        return super().__getitem__(key)
+
+    def __contains__(self, key):
+        key = self.transform_key(key)
+        return super().__contains__(key)
+
+    def __delitem__(self, key):
+        key = self.transform_key(key)
+        return super().__delitem__(key)
+
+    def get(self, key, *args, **kwargs):
+        key = self.transform_key(key)
+        return super().get(key, *args, **kwargs)
+
+    def setdefault(self, key, *args, **kwargs):
+        key = self.transform_key(key)
+        return super().setdefault(key, *args, **kwargs)
+
+    def pop(self, key, *args, **kwargs):
+        key = self.transform_key(key)
+        return super().pop(key, *args, **kwargs)
+
+    def matching_key_for(self, key):
+        """
+        Given a key, return the actual key stored in self that matches.
+        Raise KeyError if the key isn't found.
+        """
+        try:
+            return next(e_key for e_key in self.keys() if e_key == key)
+        except StopIteration as err:
+            raise KeyError(key) from err
+
+
+class FoldedCaseKeyedDict(KeyTransformingDict):
+    """
+    A case-insensitive dictionary (keys are compared as insensitive
+    if they are strings).
+
+    >>> d = FoldedCaseKeyedDict()
+    >>> d['heLlo'] = 'world'
+    >>> list(d.keys()) == ['heLlo']
+    True
+    >>> list(d.values()) == ['world']
+    True
+    >>> d['hello'] == 'world'
+    True
+    >>> 'hello' in d
+    True
+    >>> 'HELLO' in d
+    True
+    >>> print(repr(FoldedCaseKeyedDict({'heLlo': 'world'})))
+    {'heLlo': 'world'}
+    >>> d = FoldedCaseKeyedDict({'heLlo': 'world'})
+    >>> print(d['hello'])
+    world
+    >>> print(d['Hello'])
+    world
+    >>> list(d.keys())
+    ['heLlo']
+    >>> d = FoldedCaseKeyedDict({'heLlo': 'world', 'Hello': 'world'})
+    >>> list(d.values())
+    ['world']
+    >>> key, = d.keys()
+    >>> key in ['heLlo', 'Hello']
+    True
+    >>> del d['HELLO']
+    >>> d
+    {}
+
+    get should work
+
+    >>> d['Sumthin'] = 'else'
+    >>> d.get('SUMTHIN')
+    'else'
+    >>> d.get('OTHER', 'thing')
+    'thing'
+    >>> del d['sumthin']
+
+    setdefault should also work
+
+    >>> d['This'] = 'that'
+    >>> print(d.setdefault('this', 'other'))
+    that
+    >>> len(d)
+    1
+    >>> print(d['this'])
+    that
+    >>> print(d.setdefault('That', 'other'))
+    other
+    >>> print(d['THAT'])
+    other
+
+    Make it pop!
+
+    >>> print(d.pop('THAT'))
+    other
+
+    To retrieve the key in its originally-supplied form, use matching_key_for
+
+    >>> print(d.matching_key_for('this'))
+    This
+
+    >>> d.matching_key_for('missing')
+    Traceback (most recent call last):
+    ...
+    KeyError: 'missing'
+    """
+
+    @staticmethod
+    def transform_key(key):
+        return jaraco.text.FoldedCase(key)
+
+
+class DictAdapter:
+    """
+    Provide a getitem interface for attributes of an object.
+
+    Let's say you want to get at the string.lowercase property in a formatted
+    string. It's easy with DictAdapter.
+
+    >>> import string
+    >>> print("lowercase is %(ascii_lowercase)s" % DictAdapter(string))
+    lowercase is abcdefghijklmnopqrstuvwxyz
+    """
+
+    def __init__(self, wrapped_ob):
+        self.object = wrapped_ob
+
+    def __getitem__(self, name):
+        return getattr(self.object, name)
+
+
+class ItemsAsAttributes:
+    """
+    Mix-in class to enable a mapping object to provide items as
+    attributes.
+
+    >>> C = type('C', (dict, ItemsAsAttributes), dict())
+    >>> i = C()
+    >>> i['foo'] = 'bar'
+    >>> i.foo
+    'bar'
+
+    Natural attribute access takes precedence
+
+    >>> i.foo = 'henry'
+    >>> i.foo
+    'henry'
+
+    But as you might expect, the mapping functionality is preserved.
+
+    >>> i['foo']
+    'bar'
+
+    A normal attribute error should be raised if an attribute is
+    requested that doesn't exist.
+
+    >>> i.missing
+    Traceback (most recent call last):
+    ...
+    AttributeError: 'C' object has no attribute 'missing'
+
+    It also works on dicts that customize __getitem__
+
+    >>> missing_func = lambda self, key: 'missing item'
+    >>> C = type(
+    ...     'C',
+    ...     (dict, ItemsAsAttributes),
+    ...     dict(__missing__ = missing_func),
+    ... )
+    >>> i = C()
+    >>> i.missing
+    'missing item'
+    >>> i.foo
+    'missing item'
+    """
+
+    def __getattr__(self, key):
+        try:
+            return getattr(super(), key)
+        except AttributeError as e:
+            # attempt to get the value from the mapping (return self[key])
+            #  but be careful not to lose the original exception context.
+            noval = object()
+
+            def _safe_getitem(cont, key, missing_result):
+                try:
+                    return cont[key]
+                except KeyError:
+                    return missing_result
+
+            result = _safe_getitem(self, key, noval)
+            if result is not noval:
+                return result
+            # raise the original exception, but use the original class
+            #  name, not 'super'.
+            (message,) = e.args
+            message = message.replace('super', self.__class__.__name__, 1)
+            e.args = (message,)
+            raise
+
+
+def invert_map(map):
+    """
+    Given a dictionary, return another dictionary with keys and values
+    switched. If any of the values resolve to the same key, raises
+    a ValueError.
+
+    >>> numbers = dict(a=1, b=2, c=3)
+    >>> letters = invert_map(numbers)
+    >>> letters[1]
+    'a'
+    >>> numbers['d'] = 3
+    >>> invert_map(numbers)
+    Traceback (most recent call last):
+    ...
+    ValueError: Key conflict in inverted mapping
+    """
+    res = dict((v, k) for k, v in map.items())
+    if not len(res) == len(map):
+        raise ValueError('Key conflict in inverted mapping')
+    return res
+
+
+class IdentityOverrideMap(dict):
+    """
+    A dictionary that by default maps each key to itself, but otherwise
+    acts like a normal dictionary.
+
+    >>> d = IdentityOverrideMap()
+    >>> d[42]
+    42
+    >>> d['speed'] = 'speedo'
+    >>> print(d['speed'])
+    speedo
+    """
+
+    def __missing__(self, key):
+        return key
+
+
+class DictStack(list, collections.abc.MutableMapping):
+    """
+    A stack of dictionaries that behaves as a view on those dictionaries,
+    giving preference to the last.
+
+    >>> stack = DictStack([dict(a=1, c=2), dict(b=2, a=2)])
+    >>> stack['a']
+    2
+    >>> stack['b']
+    2
+    >>> stack['c']
+    2
+    >>> len(stack)
+    3
+    >>> stack.push(dict(a=3))
+    >>> stack['a']
+    3
+    >>> stack['a'] = 4
+    >>> set(stack.keys()) == set(['a', 'b', 'c'])
+    True
+    >>> set(stack.items()) == set([('a', 4), ('b', 2), ('c', 2)])
+    True
+    >>> dict(**stack) == dict(stack) == dict(a=4, c=2, b=2)
+    True
+    >>> d = stack.pop()
+    >>> stack['a']
+    2
+    >>> d = stack.pop()
+    >>> stack['a']
+    1
+    >>> stack.get('b', None)
+    >>> 'c' in stack
+    True
+    >>> del stack['c']
+    >>> dict(stack)
+    {'a': 1}
+    """
+
+    def __iter__(self):
+        dicts = list.__iter__(self)
+        return iter(set(itertools.chain.from_iterable(c.keys() for c in dicts)))
+
+    def __getitem__(self, key):
+        for scope in reversed(tuple(list.__iter__(self))):
+            if key in scope:
+                return scope[key]
+        raise KeyError(key)
+
+    push = list.append
+
+    def __contains__(self, other):
+        return collections.abc.Mapping.__contains__(self, other)
+
+    def __len__(self):
+        return len(list(iter(self)))
+
+    def __setitem__(self, key, item):
+        last = list.__getitem__(self, -1)
+        return last.__setitem__(key, item)
+
+    def __delitem__(self, key):
+        last = list.__getitem__(self, -1)
+        return last.__delitem__(key)
+
+    # workaround for mypy confusion
+    def pop(self, *args, **kwargs):
+        return list.pop(self, *args, **kwargs)
+
+
+class BijectiveMap(dict):
+    """
+    A Bijective Map (two-way mapping).
+
+    Implemented as a simple dictionary of 2x the size, mapping values back
+    to keys.
+
+    Note, this implementation may be incomplete. If there's not a test for
+    your use case below, it's likely to fail, so please test and send pull
+    requests or patches for additional functionality needed.
+
+
+    >>> m = BijectiveMap()
+    >>> m['a'] = 'b'
+    >>> m == {'a': 'b', 'b': 'a'}
+    True
+    >>> print(m['b'])
+    a
+
+    >>> m['c'] = 'd'
+    >>> len(m)
+    2
+
+    Some weird things happen if you map an item to itself or overwrite a
+    single key of a pair, so it's disallowed.
+
+    >>> m['e'] = 'e'
+    Traceback (most recent call last):
+    ValueError: Key cannot map to itself
+
+    >>> m['d'] = 'e'
+    Traceback (most recent call last):
+    ValueError: Key/Value pairs may not overlap
+
+    >>> m['e'] = 'd'
+    Traceback (most recent call last):
+    ValueError: Key/Value pairs may not overlap
+
+    >>> print(m.pop('d'))
+    c
+
+    >>> 'c' in m
+    False
+
+    >>> m = BijectiveMap(dict(a='b'))
+    >>> len(m)
+    1
+    >>> print(m['b'])
+    a
+
+    >>> m = BijectiveMap()
+    >>> m.update(a='b')
+    >>> m['b']
+    'a'
+
+    >>> del m['b']
+    >>> len(m)
+    0
+    >>> 'a' in m
+    False
+    """
+
+    def __init__(self, *args, **kwargs):
+        super().__init__()
+        self.update(*args, **kwargs)
+
+    def __setitem__(self, item, value):
+        if item == value:
+            raise ValueError("Key cannot map to itself")
+        overlap = (
+            item in self
+            and self[item] != value
+            or value in self
+            and self[value] != item
+        )
+        if overlap:
+            raise ValueError("Key/Value pairs may not overlap")
+        super().__setitem__(item, value)
+        super().__setitem__(value, item)
+
+    def __delitem__(self, item):
+        self.pop(item)
+
+    def __len__(self):
+        return super().__len__() // 2
+
+    def pop(self, key, *args, **kwargs):
+        mirror = self[key]
+        super().__delitem__(mirror)
+        return super().pop(key, *args, **kwargs)
+
+    def update(self, *args, **kwargs):
+        # build a dictionary using the default constructs
+        d = dict(*args, **kwargs)
+        # build this dictionary using transformed keys.
+        for item in d.items():
+            self.__setitem__(*item)
+
+
+class FrozenDict(collections.abc.Mapping, collections.abc.Hashable):
+    """
+    An immutable mapping.
+
+    >>> a = FrozenDict(a=1, b=2)
+    >>> b = FrozenDict(a=1, b=2)
+    >>> a == b
+    True
+
+    >>> a == dict(a=1, b=2)
+    True
+    >>> dict(a=1, b=2) == a
+    True
+    >>> 'a' in a
+    True
+    >>> type(hash(a)) is type(0)
+    True
+    >>> set(iter(a)) == {'a', 'b'}
+    True
+    >>> len(a)
+    2
+    >>> a['a'] == a.get('a') == 1
+    True
+
+    >>> a['c'] = 3
+    Traceback (most recent call last):
+    ...
+    TypeError: 'FrozenDict' object does not support item assignment
+
+    >>> a.update(y=3)
+    Traceback (most recent call last):
+    ...
+    AttributeError: 'FrozenDict' object has no attribute 'update'
+
+    Copies should compare equal
+
+    >>> copy.copy(a) == a
+    True
+
+    Copies should be the same type
+
+    >>> isinstance(copy.copy(a), FrozenDict)
+    True
+
+    FrozenDict supplies .copy(), even though
+    collections.abc.Mapping doesn't demand it.
+
+    >>> a.copy() == a
+    True
+    >>> a.copy() is not a
+    True
+    """
+
+    __slots__ = ['__data']
+
+    def __new__(cls, *args, **kwargs):
+        self = super().__new__(cls)
+        self.__data = dict(*args, **kwargs)
+        return self
+
+    # Container
+    def __contains__(self, key):
+        return key in self.__data
+
+    # Hashable
+    def __hash__(self):
+        return hash(tuple(sorted(self.__data.items())))
+
+    # Mapping
+    def __iter__(self):
+        return iter(self.__data)
+
+    def __len__(self):
+        return len(self.__data)
+
+    def __getitem__(self, key):
+        return self.__data[key]
+
+    # override get for efficiency provided by dict
+    def get(self, *args, **kwargs):
+        return self.__data.get(*args, **kwargs)
+
+    # override eq to recognize underlying implementation
+    def __eq__(self, other):
+        if isinstance(other, FrozenDict):
+            other = other.__data
+        return self.__data.__eq__(other)
+
+    def copy(self):
+        "Return a shallow copy of self"
+        return copy.copy(self)
+
+
+class Enumeration(ItemsAsAttributes, BijectiveMap):
+    """
+    A convenient way to provide enumerated values
+
+    >>> e = Enumeration('a b c')
+    >>> e['a']
+    0
+
+    >>> e.a
+    0
+
+    >>> e[1]
+    'b'
+
+    >>> set(e.names) == set('abc')
+    True
+
+    >>> set(e.codes) == set(range(3))
+    True
+
+    >>> e.get('d') is None
+    True
+
+    Codes need not start with 0
+
+    >>> e = Enumeration('a b c', range(1, 4))
+    >>> e['a']
+    1
+
+    >>> e[3]
+    'c'
+    """
+
+    def __init__(self, names, codes=None):
+        if isinstance(names, str):
+            names = names.split()
+        if codes is None:
+            codes = itertools.count()
+        super().__init__(zip(names, codes))
+
+    @property
+    def names(self):
+        return (key for key in self if isinstance(key, str))
+
+    @property
+    def codes(self):
+        return (self[name] for name in self.names)
+
+
+class Everything:
+    """
+    A collection "containing" every possible thing.
+
+    >>> 'foo' in Everything()
+    True
+
+    >>> import random
+    >>> random.randint(1, 999) in Everything()
+    True
+
+    >>> random.choice([None, 'foo', 42, ('a', 'b', 'c')]) in Everything()
+    True
+    """
+
+    def __contains__(self, other):
+        return True
+
+
+class InstrumentedDict(collections.UserDict):
+    """
+    Instrument an existing dictionary with additional
+    functionality, but always reference and mutate
+    the original dictionary.
+
+    >>> orig = {'a': 1, 'b': 2}
+    >>> inst = InstrumentedDict(orig)
+    >>> inst['a']
+    1
+    >>> inst['c'] = 3
+    >>> orig['c']
+    3
+    >>> inst.keys() == orig.keys()
+    True
+    """
+
+    def __init__(self, data):
+        super().__init__()
+        self.data = data
+
+
+class Least:
+    """
+    A value that is always lesser than any other
+
+    >>> least = Least()
+    >>> 3 < least
+    False
+    >>> 3 > least
+    True
+    >>> least < 3
+    True
+    >>> least <= 3
+    True
+    >>> least > 3
+    False
+    >>> 'x' > least
+    True
+    >>> None > least
+    True
+    """
+
+    def __le__(self, other):
+        return True
+
+    __lt__ = __le__
+
+    def __ge__(self, other):
+        return False
+
+    __gt__ = __ge__
+
+
+class Greatest:
+    """
+    A value that is always greater than any other
+
+    >>> greatest = Greatest()
+    >>> 3 < greatest
+    True
+    >>> 3 > greatest
+    False
+    >>> greatest < 3
+    False
+    >>> greatest > 3
+    True
+    >>> greatest >= 3
+    True
+    >>> 'x' > greatest
+    False
+    >>> None > greatest
+    False
+    """
+
+    def __ge__(self, other):
+        return True
+
+    __gt__ = __ge__
+
+    def __le__(self, other):
+        return False
+
+    __lt__ = __le__
+
+
+def pop_all(items):
+    """
+    Clear items in place and return a copy of items.
+
+    >>> items = [1, 2, 3]
+    >>> popped = pop_all(items)
+    >>> popped is items
+    False
+    >>> popped
+    [1, 2, 3]
+    >>> items
+    []
+    """
+    result, items[:] = items[:], []
+    return result
+
+
+class FreezableDefaultDict(collections.defaultdict):
+    """
+    Often it is desirable to prevent the mutation of
+    a default dict after its initial construction, such
+    as to prevent mutation during iteration.
+
+    >>> dd = FreezableDefaultDict(list)
+    >>> dd[0].append('1')
+    >>> dd.freeze()
+    >>> dd[1]
+    []
+    >>> len(dd)
+    1
+    """
+
+    def __missing__(self, key):
+        return getattr(self, '_frozen', super().__missing__)(key)
+
+    def freeze(self):
+        self._frozen = lambda key: self.default_factory()
+
+
+class Accumulator:
+    def __init__(self, initial=0):
+        self.val = initial
+
+    def __call__(self, val):
+        self.val += val
+        return self.val
+
+
+class WeightedLookup(RangeMap):
+    """
+    Given parameters suitable for a dict representing keys
+    and a weighted proportion, return a RangeMap representing
+    spans of values proportial to the weights:
+
+    >>> even = WeightedLookup(a=1, b=1)
+
+    [0, 1) -> a
+    [1, 2) -> b
+
+    >>> lk = WeightedLookup(a=1, b=2)
+
+    [0, 1) -> a
+    [1, 3) -> b
+
+    >>> lk[.5]
+    'a'
+    >>> lk[1.5]
+    'b'
+
+    Adds ``.random()`` to select a random weighted value:
+
+    >>> lk.random() in ['a', 'b']
+    True
+
+    >>> choices = [lk.random() for x in range(1000)]
+
+    Statistically speaking, choices should be .5 a:b
+    >>> ratio = choices.count('a') / choices.count('b')
+    >>> .4 < ratio < .6
+    True
+    """
+
+    def __init__(self, *args, **kwargs):
+        raw = dict(*args, **kwargs)
+
+        # allocate keys by weight
+        indexes = map(Accumulator(), raw.values())
+        super().__init__(zip(indexes, raw.keys()), key_match_comparator=operator.lt)
+
+    def random(self):
+        lower, upper = self.bounds()
+        selector = random.random() * upper
+        return self[selector]
diff --git a/setuptools/_vendor/jaraco/collections/py.typed b/setuptools/_vendor/jaraco/collections/py.typed
new file mode 100644
index 0000000000..e69de29bb2

From f2157f50101906b300108e953bd4ccff26be9e2e Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 5 Sep 2024 14:31:45 -0400
Subject: [PATCH 1136/1761] Replaced global variable with a custom cache
 wrapper.

---
 distutils/dir_util.py | 48 ++++++++++++++++++++++++++++---------------
 1 file changed, 32 insertions(+), 16 deletions(-)

diff --git a/distutils/dir_util.py b/distutils/dir_util.py
index ab848240ea..1705cd050d 100644
--- a/distutils/dir_util.py
+++ b/distutils/dir_util.py
@@ -10,12 +10,39 @@
 from ._log import log
 from .errors import DistutilsFileError, DistutilsInternalError
 
-# cache for by mkpath() -- in addition to cheapening redundant calls,
-# eliminates redundant "creating /foo/bar/baz" messages in dry-run mode
-_path_created = set()
+
+class SkipRepeatAbsolutePaths(set):
+    """
+    Cache for mkpath.
+
+    In addition to cheapening redundant calls, eliminates redundant
+    "creating /foo/bar/baz" messages in dry-run mode.
+    """
+
+    def __init__(self):
+        SkipRepeatAbsolutePaths.instance = self
+
+    @classmethod
+    def clear(cls):
+        super(cls, cls.instance).clear()
+
+    def wrap(self, func):
+        @functools.wraps(func)
+        def wrapper(path, *args, **kwargs):
+            if path.absolute() in self:
+                return
+            self.add(path.absolute())
+            return func(path, *args, **kwargs)
+
+        return wrapper
+
+
+# Python 3.8 compatibility
+wrapper = SkipRepeatAbsolutePaths().wrap
 
 
 @functools.singledispatch
+@wrapper
 def mkpath(name: pathlib.Path, mode=0o777, verbose=True, dry_run=False):
     """Create a directory and any missing ancestor directories.
 
@@ -26,12 +53,6 @@ def mkpath(name: pathlib.Path, mode=0o777, verbose=True, dry_run=False):
     If 'verbose' is true, log the directory created.
     Return the list of directories actually created.
     """
-
-    global _path_created
-
-    if str(name.absolute()) in _path_created:
-        return
-
     if verbose and not name.is_dir():
         log.info("creating %s", name)
 
@@ -40,7 +61,6 @@ def mkpath(name: pathlib.Path, mode=0o777, verbose=True, dry_run=False):
 
     try:
         dry_run or name.mkdir(mode=mode, parents=True, exist_ok=True)
-        _path_created.add(name.absolute())
     except OSError as exc:
         raise DistutilsFileError(f"could not create '{name}': {exc.args[-1]}")
 
@@ -185,8 +205,6 @@ def remove_tree(directory, verbose=True, dry_run=False):
     Any errors are ignored (apart from being reported to stdout if 'verbose'
     is true).
     """
-    global _path_created
-
     if verbose >= 1:
         log.info("removing '%s' (and everything under it)", directory)
     if dry_run:
@@ -196,10 +214,8 @@ def remove_tree(directory, verbose=True, dry_run=False):
     for cmd in cmdtuples:
         try:
             cmd[0](cmd[1])
-            # remove dir from cache if it's already there
-            abspath = os.path.abspath(cmd[1])
-            if abspath in _path_created:
-                _path_created.remove(abspath)
+            # Clear the cache
+            SkipRepeatAbsolutePaths.clear()
         except OSError as exc:
             log.warning("error removing %s: %s", directory, exc)
 

From e2ab6012da6b0dcf895e95f219327f489db6e04c Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 5 Sep 2024 17:41:40 -0400
Subject: [PATCH 1137/1761] Extracted _copy_one function.

---
 distutils/dir_util.py | 112 ++++++++++++++++++++++++------------------
 1 file changed, 65 insertions(+), 47 deletions(-)

diff --git a/distutils/dir_util.py b/distutils/dir_util.py
index 1705cd050d..3b22839d27 100644
--- a/distutils/dir_util.py
+++ b/distutils/dir_util.py
@@ -7,6 +7,7 @@
 import os
 import pathlib
 
+from . import file_util
 from ._log import log
 from .errors import DistutilsFileError, DistutilsInternalError
 
@@ -98,7 +99,7 @@ def create_tree(base_dir, files, mode=0o777, verbose=True, dry_run=False):
         mkpath(dir, mode, verbose=verbose, dry_run=dry_run)
 
 
-def copy_tree(  # noqa: C901
+def copy_tree(
     src,
     dst,
     preserve_mode=True,
@@ -127,8 +128,6 @@ def copy_tree(  # noqa: C901
     (the default), the destination of the symlink will be copied.
     'update' and 'verbose' are the same as for 'copy_file'.
     """
-    from distutils.file_util import copy_file
-
     if not dry_run and not os.path.isdir(src):
         raise DistutilsFileError(f"cannot copy tree '{src}': not a directory")
     try:
@@ -142,50 +141,69 @@ def copy_tree(  # noqa: C901
     if not dry_run:
         mkpath(dst, verbose=verbose)
 
-    outputs = []
-
-    for n in names:
-        src_name = os.path.join(src, n)
-        dst_name = os.path.join(dst, n)
-
-        if n.startswith('.nfs'):
-            # skip NFS rename files
-            continue
-
-        if preserve_symlinks and os.path.islink(src_name):
-            link_dest = os.readlink(src_name)
-            if verbose >= 1:
-                log.info("linking %s -> %s", dst_name, link_dest)
-            if not dry_run:
-                os.symlink(link_dest, dst_name)
-            outputs.append(dst_name)
-
-        elif os.path.isdir(src_name):
-            outputs.extend(
-                copy_tree(
-                    src_name,
-                    dst_name,
-                    preserve_mode,
-                    preserve_times,
-                    preserve_symlinks,
-                    update,
-                    verbose=verbose,
-                    dry_run=dry_run,
-                )
-            )
-        else:
-            copy_file(
-                src_name,
-                dst_name,
-                preserve_mode,
-                preserve_times,
-                update,
-                verbose=verbose,
-                dry_run=dry_run,
-            )
-            outputs.append(dst_name)
-
-    return outputs
+    copy_one = functools.partial(
+        _copy_one,
+        src=src,
+        dst=dst,
+        preserve_symlinks=preserve_symlinks,
+        verbose=verbose,
+        dry_run=dry_run,
+        preserve_mode=preserve_mode,
+        preserve_times=preserve_times,
+        update=update,
+    )
+    return list(itertools.chain.from_iterable(map(copy_one, names)))
+
+
+def _copy_one(
+    name,
+    *,
+    src,
+    dst,
+    preserve_symlinks,
+    verbose,
+    dry_run,
+    preserve_mode,
+    preserve_times,
+    update,
+):
+    src_name = os.path.join(src, name)
+    dst_name = os.path.join(dst, name)
+
+    if name.startswith('.nfs'):
+        # skip NFS rename files
+        return
+
+    if preserve_symlinks and os.path.islink(src_name):
+        link_dest = os.readlink(src_name)
+        if verbose >= 1:
+            log.info("linking %s -> %s", dst_name, link_dest)
+        if not dry_run:
+            os.symlink(link_dest, dst_name)
+        yield dst_name
+
+    elif os.path.isdir(src_name):
+        yield from copy_tree(
+            src_name,
+            dst_name,
+            preserve_mode,
+            preserve_times,
+            preserve_symlinks,
+            update,
+            verbose=verbose,
+            dry_run=dry_run,
+        )
+    else:
+        file_util.copy_file(
+            src_name,
+            dst_name,
+            preserve_mode,
+            preserve_times,
+            update,
+            verbose=verbose,
+            dry_run=dry_run,
+        )
+        yield dst_name
 
 
 def _build_cmdtuple(path, cmdtuples):

From bcba955249ae22b9cadab363a300fb7a828f0be8 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 5 Sep 2024 17:58:01 -0400
Subject: [PATCH 1138/1761] "Removed support for 'compress="compress"' to
 archive_util.make_tarball."

---
 distutils/archive_util.py            | 26 ++++------------------
 distutils/tests/test_archive_util.py | 33 ----------------------------
 2 files changed, 4 insertions(+), 55 deletions(-)

diff --git a/distutils/archive_util.py b/distutils/archive_util.py
index cc4699b1a3..5bb6df763d 100644
--- a/distutils/archive_util.py
+++ b/distutils/archive_util.py
@@ -4,8 +4,6 @@
 that sort of thing)."""
 
 import os
-import sys
-from warnings import warn
 
 try:
     import zipfile
@@ -67,8 +65,7 @@ def make_tarball(
     """Create a (possibly compressed) tar file from all the files under
     'base_dir'.
 
-    'compress' must be "gzip" (the default), "bzip2", "xz", "compress", or
-    None.  ("compress" will be deprecated in Python 3.2)
+    'compress' must be "gzip" (the default), "bzip2", "xz", or None.
 
     'owner' and 'group' can be used to define an owner and a group for the
     archive that is being built. If not provided, the current owner and group
@@ -84,20 +81,17 @@ def make_tarball(
         'bzip2': 'bz2',
         'xz': 'xz',
         None: '',
-        'compress': '',
     }
-    compress_ext = {'gzip': '.gz', 'bzip2': '.bz2', 'xz': '.xz', 'compress': '.Z'}
+    compress_ext = {'gzip': '.gz', 'bzip2': '.bz2', 'xz': '.xz'}
 
     # flags for compression program, each element of list will be an argument
     if compress is not None and compress not in compress_ext.keys():
         raise ValueError(
-            "bad value for 'compress': must be None, 'gzip', 'bzip2', "
-            "'xz' or 'compress'"
+            "bad value for 'compress': must be None, 'gzip', 'bzip2', 'xz'"
         )
 
     archive_name = base_name + '.tar'
-    if compress != 'compress':
-        archive_name += compress_ext.get(compress, '')
+    archive_name += compress_ext.get(compress, '')
 
     mkpath(os.path.dirname(archive_name), dry_run=dry_run)
 
@@ -125,18 +119,6 @@ def _set_uid_gid(tarinfo):
         finally:
             tar.close()
 
-    # compression using `compress`
-    if compress == 'compress':
-        warn("'compress' is deprecated.", DeprecationWarning)
-        # the option varies depending on the platform
-        compressed_name = archive_name + compress_ext[compress]
-        if sys.platform == 'win32':
-            cmd = [compress, archive_name, compressed_name]
-        else:
-            cmd = [compress, '-f', archive_name]
-        spawn(cmd, dry_run=dry_run)
-        return compressed_name
-
     return archive_name
 
 
diff --git a/distutils/tests/test_archive_util.py b/distutils/tests/test_archive_util.py
index 389eba16e8..3e4ed75a76 100644
--- a/distutils/tests/test_archive_util.py
+++ b/distutils/tests/test_archive_util.py
@@ -6,7 +6,6 @@
 import pathlib
 import sys
 import tarfile
-import warnings
 from distutils import archive_util
 from distutils.archive_util import (
     ARCHIVE_FORMATS,
@@ -23,7 +22,6 @@
 import pytest
 from test.support import patch
 
-from .compat.py38 import check_warnings
 from .unix_compat import UID_0_SUPPORT, grp, pwd, require_uid_0, require_unix_id
 
 
@@ -190,37 +188,6 @@ def test_tarfile_vs_tar(self):
         tarball = base_name + '.tar'
         assert os.path.exists(tarball)
 
-    @pytest.mark.skipif("not shutil.which('compress')")
-    def test_compress_deprecated(self):
-        tmpdir = self._create_files()
-        base_name = os.path.join(self.mkdtemp(), 'archive')
-
-        # using compress and testing the DeprecationWarning
-        old_dir = os.getcwd()
-        os.chdir(tmpdir)
-        try:
-            with check_warnings() as w:
-                warnings.simplefilter("always")
-                make_tarball(base_name, 'dist', compress='compress')
-        finally:
-            os.chdir(old_dir)
-        tarball = base_name + '.tar.Z'
-        assert os.path.exists(tarball)
-        assert len(w.warnings) == 1
-
-        # same test with dry_run
-        os.remove(tarball)
-        old_dir = os.getcwd()
-        os.chdir(tmpdir)
-        try:
-            with check_warnings() as w:
-                warnings.simplefilter("always")
-                make_tarball(base_name, 'dist', compress='compress', dry_run=True)
-        finally:
-            os.chdir(old_dir)
-        assert not os.path.exists(tarball)
-        assert len(w.warnings) == 1
-
     @pytest.mark.usefixtures('needs_zlib')
     def test_make_zipfile(self):
         zipfile = pytest.importorskip('zipfile')

From 3d38185e82baf307ea6a1f815d906a29e63fa89e Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 5 Sep 2024 18:05:58 -0400
Subject: [PATCH 1139/1761] Removed deprecated 'check_warnings' from 'sdist'
 and 'regitser' commands.

---
 distutils/command/register.py | 14 --------------
 distutils/command/sdist.py    | 12 ------------
 distutils/tests/test_sdist.py | 10 ----------
 3 files changed, 36 deletions(-)

diff --git a/distutils/command/register.py b/distutils/command/register.py
index 1089daf78f..9645401fd7 100644
--- a/distutils/command/register.py
+++ b/distutils/command/register.py
@@ -11,7 +11,6 @@
 import urllib.parse
 import urllib.request
 from distutils._log import log
-from warnings import warn
 
 from more_itertools import always_iterable
 
@@ -65,19 +64,6 @@ def run(self):
         else:
             self.send_metadata()
 
-    def check_metadata(self):
-        """Deprecated API."""
-        warn(
-            "distutils.command.register.check_metadata is deprecated; "
-            "use the check command instead",
-            DeprecationWarning,
-        )
-        check = self.distribution.get_command_obj('check')
-        check.ensure_finalized()
-        check.strict = self.strict
-        check.restructuredtext = True
-        check.run()
-
     def _set_config(self):
         """Reads the configuration file and set attributes."""
         config = self._read_pypirc()
diff --git a/distutils/command/sdist.py b/distutils/command/sdist.py
index eda6afe811..d723a1c9fb 100644
--- a/distutils/command/sdist.py
+++ b/distutils/command/sdist.py
@@ -8,7 +8,6 @@
 from distutils._log import log
 from glob import glob
 from itertools import filterfalse
-from warnings import warn
 
 from ..core import Command
 from ..errors import DistutilsOptionError, DistutilsTemplateError
@@ -177,17 +176,6 @@ def run(self):
         # or zipfile, or whatever.
         self.make_distribution()
 
-    def check_metadata(self):
-        """Deprecated API."""
-        warn(
-            "distutils.command.sdist.check_metadata is deprecated, \
-              use the check command instead",
-            PendingDeprecationWarning,
-        )
-        check = self.distribution.get_command_obj('check')
-        check.ensure_finalized()
-        check.run()
-
     def get_file_list(self):
         """Figure out the list of files to include in the source
         distribution, and put it in 'self.filelist'.  This might involve
diff --git a/distutils/tests/test_sdist.py b/distutils/tests/test_sdist.py
index 7daaaa6323..c49a4bfc7a 100644
--- a/distutils/tests/test_sdist.py
+++ b/distutils/tests/test_sdist.py
@@ -4,7 +4,6 @@
 import pathlib
 import shutil  # noqa: F401
 import tarfile
-import warnings
 import zipfile
 from distutils.archive_util import ARCHIVE_FORMATS
 from distutils.command.sdist import sdist, show_formats
@@ -20,7 +19,6 @@
 import pytest
 from more_itertools import ilen
 
-from .compat.py38 import check_warnings
 from .unix_compat import grp, pwd, require_uid_0, require_unix_id
 
 SETUP_PY = """
@@ -275,14 +273,6 @@ def test_metadata_check_option(self, caplog):
         cmd.run()
         assert len(self.warnings(caplog.messages, 'warning: check: ')) == 0
 
-    def test_check_metadata_deprecated(self):
-        # makes sure make_metadata is deprecated
-        dist, cmd = self.get_cmd()
-        with check_warnings() as w:
-            warnings.simplefilter("always")
-            cmd.check_metadata()
-            assert len(w.warnings) == 1
-
     def test_show_formats(self, capsys):
         show_formats()
 

From 05668ed2e1c9013259a62b34247d2b318dd949f5 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 5 Sep 2024 18:23:33 -0400
Subject: [PATCH 1140/1761] Enabled TestPyPIRCCommand to run its tests.

---
 distutils/tests/test_config.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/distutils/tests/test_config.py b/distutils/tests/test_config.py
index be5ae0a687..ee6d7604de 100644
--- a/distutils/tests/test_config.py
+++ b/distutils/tests/test_config.py
@@ -51,7 +51,7 @@ class BasePyPIRCCommandTestCase(support.TempdirManager):
     pass
 
 
-class PyPIRCCommandTestCase(BasePyPIRCCommandTestCase):
+class TestPyPIRCCommand(BasePyPIRCCommandTestCase):
     def test_server_registration(self):
         # This test makes sure PyPIRCCommand knows how to:
         # 1. handle several sections in .pypirc

From eebb1212effa7a60b5a6cded5072013be43d3801 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 5 Sep 2024 18:34:02 -0400
Subject: [PATCH 1141/1761] Removed dependence of TestSdist on test_config.

---
 distutils/tests/test_sdist.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/distutils/tests/test_sdist.py b/distutils/tests/test_sdist.py
index c49a4bfc7a..9b0930d757 100644
--- a/distutils/tests/test_sdist.py
+++ b/distutils/tests/test_sdist.py
@@ -10,7 +10,6 @@
 from distutils.core import Distribution
 from distutils.errors import DistutilsOptionError
 from distutils.filelist import FileList
-from distutils.tests.test_config import BasePyPIRCCommandTestCase
 from os.path import join
 from textwrap import dedent
 
@@ -19,6 +18,7 @@
 import pytest
 from more_itertools import ilen
 
+from . import support
 from .unix_compat import grp, pwd, require_uid_0, require_unix_id
 
 SETUP_PY = """
@@ -66,7 +66,7 @@ def clean_lines(filepath):
         yield from filter(None, map(str.strip, f))
 
 
-class TestSDist(BasePyPIRCCommandTestCase):
+class TestSDist(support.TempdirManager):
     def get_cmd(self, metadata=None):
         """Returns a cmd"""
         if metadata is None:

From dc7e30936b05f3fb4935c0db5064160b077c6216 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 5 Sep 2024 18:35:44 -0400
Subject: [PATCH 1142/1761] Replaced open/read/close logic with 'read_text' and
 added encoding to address EncodingWarning.

---
 distutils/tests/test_config.py | 8 ++------
 1 file changed, 2 insertions(+), 6 deletions(-)

diff --git a/distutils/tests/test_config.py b/distutils/tests/test_config.py
index ee6d7604de..6e3f5f24dd 100644
--- a/distutils/tests/test_config.py
+++ b/distutils/tests/test_config.py
@@ -1,6 +1,7 @@
 """Tests for distutils.pypirc.pypirc."""
 
 import os
+import pathlib
 from distutils.tests import support
 
 import pytest
@@ -91,12 +92,7 @@ def test_server_empty_registration(self):
         assert not os.path.exists(rc)
         cmd._store_pypirc('tarek', 'xxx')
         assert os.path.exists(rc)
-        f = open(rc)
-        try:
-            content = f.read()
-            assert content == WANTED
-        finally:
-            f.close()
+        assert pathlib.Path(rc).read_text(encoding='utf-8') == WANTED
 
     def test_config_interpolation(self):
         # using the % character in .pypirc should not raise an error (#20120)

From c2c1f323ac50de697c5274aff5a027e6d2f6db53 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 5 Sep 2024 18:38:08 -0400
Subject: [PATCH 1143/1761] Remove collect_ignore referring to msvc9compiler.

---
 conftest.py | 8 --------
 1 file changed, 8 deletions(-)

diff --git a/conftest.py b/conftest.py
index 930a595274..9667879d24 100644
--- a/conftest.py
+++ b/conftest.py
@@ -7,14 +7,6 @@
 import path
 import pytest
 
-collect_ignore = []
-
-
-if platform.system() != 'Windows':
-    collect_ignore.extend([
-        'distutils/msvc9compiler.py',
-    ])
-
 
 @pytest.fixture
 def save_env():

From ddc15558b168fbb8394df643eab427625863dad5 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 5 Sep 2024 18:44:10 -0400
Subject: [PATCH 1144/1761] Flagged register and upload commands and
 deprecated.

---
 distutils/command/register.py | 2 ++
 distutils/command/upload.py   | 2 ++
 pytest.ini                    | 4 ++++
 3 files changed, 8 insertions(+)

diff --git a/distutils/command/register.py b/distutils/command/register.py
index 9645401fd7..b3373a3c21 100644
--- a/distutils/command/register.py
+++ b/distutils/command/register.py
@@ -10,6 +10,7 @@
 import logging
 import urllib.parse
 import urllib.request
+import warnings
 from distutils._log import log
 
 from more_itertools import always_iterable
@@ -50,6 +51,7 @@ def finalize_options(self):
         self.distribution.command_options['check'] = check_options
 
     def run(self):
+        warnings.warn("register command is deprecated. Do not use.")
         self.finalize_options()
         self._set_config()
 
diff --git a/distutils/command/upload.py b/distutils/command/upload.py
index 5717eef1fa..3428a6b613 100644
--- a/distutils/command/upload.py
+++ b/distutils/command/upload.py
@@ -9,6 +9,7 @@
 import io
 import logging
 import os
+import warnings
 from base64 import standard_b64encode
 from urllib.parse import urlparse
 from urllib.request import HTTPError, Request, urlopen
@@ -63,6 +64,7 @@ def finalize_options(self):
             self.password = self.distribution.password
 
     def run(self):
+        warnings.warn("upload command is deprecated. Do not use.")
         if not self.distribution.dist_files:
             msg = (
                 "Must create and upload files in one command "
diff --git a/pytest.ini b/pytest.ini
index dd57c6ef4e..0efd9e4199 100644
--- a/pytest.ini
+++ b/pytest.ini
@@ -44,3 +44,7 @@ filterwarnings=
 	# https://sourceforge.net/p/docutils/bugs/490/
 	ignore:'encoding' argument not specified::docutils.io
 	ignore:UTF-8 Mode affects locale.getpreferredencoding()::docutils.io
+
+	# suppress known deprecation
+	ignore:register command is deprecated
+	ignore:upload command is deprecated

From ccaee103f31a1ab77812aa09766af0adb29db6fe Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 5 Sep 2024 18:47:51 -0400
Subject: [PATCH 1145/1761] Move comment back to its relevant line.

---
 setuptools/__init__.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setuptools/__init__.py b/setuptools/__init__.py
index ab373c51d6..3a1cc58aa3 100644
--- a/setuptools/__init__.py
+++ b/setuptools/__init__.py
@@ -111,8 +111,8 @@ def _fetch_build_eggs(dist):
 
 
 def setup(**attrs):
-    # Make sure we have any requirements needed to interpret 'attrs'.
     logging.configure()
+    # Make sure we have any requirements needed to interpret 'attrs'.
     _install_setup_requires(attrs)
     return distutils.core.setup(**attrs)
 

From 990977fb894118432fece5a4095155093378894c Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 6 Sep 2024 11:22:33 -0400
Subject: [PATCH 1146/1761] Remove pypi-based commands upload, register,
 upload-docs.

---
 newsfragments/2971.removal.rst    |   1 +
 setuptools/command/register.py    |  22 ---
 setuptools/command/upload.py      |  20 ---
 setuptools/command/upload_docs.py | 221 ------------------------------
 setuptools/tests/test_register.py |  19 ---
 setuptools/tests/test_upload.py   |  19 ---
 6 files changed, 1 insertion(+), 301 deletions(-)
 create mode 100644 newsfragments/2971.removal.rst
 delete mode 100644 setuptools/command/register.py
 delete mode 100644 setuptools/command/upload.py
 delete mode 100644 setuptools/command/upload_docs.py
 delete mode 100644 setuptools/tests/test_register.py
 delete mode 100644 setuptools/tests/test_upload.py

diff --git a/newsfragments/2971.removal.rst b/newsfragments/2971.removal.rst
new file mode 100644
index 0000000000..940453ab0a
--- /dev/null
+++ b/newsfragments/2971.removal.rst
@@ -0,0 +1 @@
+Removed upload_docs command.
\ No newline at end of file
diff --git a/setuptools/command/register.py b/setuptools/command/register.py
deleted file mode 100644
index 93ef04aa0e..0000000000
--- a/setuptools/command/register.py
+++ /dev/null
@@ -1,22 +0,0 @@
-from setuptools.errors import RemovedCommandError
-
-from ..dist import Distribution
-
-import distutils.command.register as orig
-from distutils import log
-
-
-class register(orig.register):
-    """Formerly used to register packages on PyPI."""
-
-    distribution: Distribution  # override distutils.dist.Distribution with setuptools.dist.Distribution
-
-    def run(self):
-        msg = (
-            "The register command has been removed, use twine to upload "
-            "instead (https://pypi.org/p/twine)"
-        )
-
-        self.announce("ERROR: " + msg, log.ERROR)
-
-        raise RemovedCommandError(msg)
diff --git a/setuptools/command/upload.py b/setuptools/command/upload.py
deleted file mode 100644
index 649b41fa30..0000000000
--- a/setuptools/command/upload.py
+++ /dev/null
@@ -1,20 +0,0 @@
-from setuptools.dist import Distribution
-from setuptools.errors import RemovedCommandError
-
-from distutils import log
-from distutils.command import upload as orig
-
-
-class upload(orig.upload):
-    """Formerly used to upload packages to PyPI."""
-
-    distribution: Distribution  # override distutils.dist.Distribution with setuptools.dist.Distribution
-
-    def run(self):
-        msg = (
-            "The upload command has been removed, use twine to upload "
-            "instead (https://pypi.org/p/twine)"
-        )
-
-        self.announce("ERROR: " + msg, log.ERROR)
-        raise RemovedCommandError(msg)
diff --git a/setuptools/command/upload_docs.py b/setuptools/command/upload_docs.py
deleted file mode 100644
index 3c2946cfc8..0000000000
--- a/setuptools/command/upload_docs.py
+++ /dev/null
@@ -1,221 +0,0 @@
-"""upload_docs
-
-Implements a Distutils 'upload_docs' subcommand (upload documentation to
-sites other than PyPi such as devpi).
-"""
-
-import functools
-import http.client
-import itertools
-import os
-import shutil
-import tempfile
-import urllib.parse
-import zipfile
-from base64 import standard_b64encode
-
-from .._importlib import metadata
-from ..warnings import SetuptoolsDeprecationWarning
-from .upload import upload
-
-from distutils import log
-from distutils.errors import DistutilsOptionError
-
-
-def _encode(s):
-    return s.encode('utf-8', 'surrogateescape')
-
-
-class upload_docs(upload):
-    # override the default repository as upload_docs isn't
-    # supported by Warehouse (and won't be).
-    DEFAULT_REPOSITORY = 'https://pypi.python.org/pypi/'
-
-    description = 'Upload documentation to sites other than PyPi such as devpi'
-
-    user_options = [
-        (
-            'repository=',
-            'r',
-            "url of repository [default: %s]" % upload.DEFAULT_REPOSITORY,
-        ),
-        ('show-response', None, 'display full response text from server'),
-        ('upload-dir=', None, 'directory to upload'),
-    ]
-    boolean_options = upload.boolean_options
-
-    def has_sphinx(self):
-        return bool(
-            self.upload_dir is None
-            and metadata.entry_points(group='distutils.commands', name='build_sphinx')
-        )
-
-    sub_commands = [('build_sphinx', has_sphinx)]
-
-    def initialize_options(self):
-        upload.initialize_options(self)
-        self.upload_dir = None
-        self.target_dir = None
-
-    def finalize_options(self):
-        log.warn(
-            "Upload_docs command is deprecated. Use Read the Docs "
-            "(https://readthedocs.org) instead."
-        )
-        upload.finalize_options(self)
-        if self.upload_dir is None:
-            if self.has_sphinx():
-                build_sphinx = self.get_finalized_command('build_sphinx')
-                self.target_dir = dict(build_sphinx.builder_target_dirs)['html']
-            else:
-                build = self.get_finalized_command('build')
-                self.target_dir = os.path.join(build.build_base, 'docs')
-        else:
-            self.ensure_dirname('upload_dir')
-            self.target_dir = self.upload_dir
-        self.announce('Using upload directory %s' % self.target_dir)
-
-    def create_zipfile(self, filename):
-        zip_file = zipfile.ZipFile(filename, "w")
-        try:
-            self.mkpath(self.target_dir)  # just in case
-            for root, dirs, files in os.walk(self.target_dir):
-                if root == self.target_dir and not files:
-                    tmpl = "no files found in upload directory '%s'"
-                    raise DistutilsOptionError(tmpl % self.target_dir)
-                for name in files:
-                    full = os.path.join(root, name)
-                    relative = root[len(self.target_dir) :].lstrip(os.path.sep)
-                    dest = os.path.join(relative, name)
-                    zip_file.write(full, dest)
-        finally:
-            zip_file.close()
-
-    def run(self):
-        SetuptoolsDeprecationWarning.emit(
-            "Deprecated command",
-            """
-            upload_docs is deprecated and will be removed in a future version.
-            Instead, use tools like devpi and Read the Docs; or lower level tools like
-            httpie and curl to interact directly with your hosting service API.
-            """,
-            due_date=(2023, 9, 26),  # warning introduced in 27 Jul 2022
-        )
-
-        # Run sub commands
-        for cmd_name in self.get_sub_commands():
-            self.run_command(cmd_name)
-
-        tmp_dir = tempfile.mkdtemp()
-        name = self.distribution.metadata.get_name()
-        zip_file = os.path.join(tmp_dir, "%s.zip" % name)
-        try:
-            self.create_zipfile(zip_file)
-            self.upload_file(zip_file)
-        finally:
-            shutil.rmtree(tmp_dir)
-
-    @staticmethod
-    def _build_part(item, sep_boundary):
-        key, values = item
-        title = '\nContent-Disposition: form-data; name="%s"' % key
-        # handle multiple entries for the same name
-        if not isinstance(values, list):
-            values = [values]
-        for value in values:
-            if isinstance(value, tuple):
-                title += '; filename="%s"' % value[0]
-                value = value[1]
-            else:
-                value = _encode(value)
-            yield sep_boundary
-            yield _encode(title)
-            yield b"\n\n"
-            yield value
-            if value and value[-1:] == b'\r':
-                yield b'\n'  # write an extra newline (lurve Macs)
-
-    @classmethod
-    def _build_multipart(cls, data):
-        """
-        Build up the MIME payload for the POST data
-        """
-        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
-        sep_boundary = b'\n--' + boundary.encode('ascii')
-        end_boundary = sep_boundary + b'--'
-        end_items = (
-            end_boundary,
-            b"\n",
-        )
-        builder = functools.partial(
-            cls._build_part,
-            sep_boundary=sep_boundary,
-        )
-        part_groups = map(builder, data.items())
-        parts = itertools.chain.from_iterable(part_groups)
-        body_items = itertools.chain(parts, end_items)
-        content_type = 'multipart/form-data; boundary=%s' % boundary
-        return b''.join(body_items), content_type
-
-    def upload_file(self, filename):
-        with open(filename, 'rb') as f:
-            content = f.read()
-        meta = self.distribution.metadata
-        data = {
-            ':action': 'doc_upload',
-            'name': meta.get_name(),
-            'content': (os.path.basename(filename), content),
-        }
-        # set up the authentication
-        credentials = _encode(self.username + ':' + self.password)
-        credentials = standard_b64encode(credentials).decode('ascii')
-        auth = "Basic " + credentials
-
-        body, ct = self._build_multipart(data)
-
-        msg = "Submitting documentation to %s" % (self.repository)
-        self.announce(msg, log.INFO)
-
-        # build the Request
-        # We can't use urllib2 since we need to send the Basic
-        # auth right with the first request
-        schema, netloc, url, params, query, fragments = urllib.parse.urlparse(
-            self.repository
-        )
-        assert not params and not query and not fragments
-        if schema == 'http':
-            conn = http.client.HTTPConnection(netloc)
-        elif schema == 'https':
-            conn = http.client.HTTPSConnection(netloc)
-        else:
-            raise AssertionError("unsupported schema " + schema)
-
-        data = ''
-        try:
-            conn.connect()
-            conn.putrequest("POST", url)
-            content_type = ct
-            conn.putheader('Content-type', content_type)
-            conn.putheader('Content-length', str(len(body)))
-            conn.putheader('Authorization', auth)
-            conn.endheaders()
-            conn.send(body)
-        except OSError as e:
-            self.announce(str(e), log.ERROR)
-            return
-
-        r = conn.getresponse()
-        if r.status == 200:
-            msg = 'Server response (%s): %s' % (r.status, r.reason)
-            self.announce(msg, log.INFO)
-        elif r.status == 301:
-            location = r.getheader('Location')
-            if location is None:
-                location = 'https://pythonhosted.org/%s/' % meta.get_name()
-            msg = 'Upload successful. Visit %s' % location
-            self.announce(msg, log.INFO)
-        else:
-            msg = 'Upload failed (%s): %s' % (r.status, r.reason)
-            self.announce(msg, log.ERROR)
-        if self.show_response:
-            print('-' * 75, r.read(), '-' * 75)
diff --git a/setuptools/tests/test_register.py b/setuptools/tests/test_register.py
deleted file mode 100644
index 0c7d109d31..0000000000
--- a/setuptools/tests/test_register.py
+++ /dev/null
@@ -1,19 +0,0 @@
-from unittest import mock
-
-import pytest
-
-from setuptools.command.register import register
-from setuptools.dist import Distribution
-from setuptools.errors import RemovedCommandError
-
-
-class TestRegister:
-    def test_register_exception(self):
-        """Ensure that the register command has been properly removed."""
-        dist = Distribution()
-        dist.dist_files = [(mock.Mock(), mock.Mock(), mock.Mock())]
-
-        cmd = register(dist)
-
-        with pytest.raises(RemovedCommandError):
-            cmd.run()
diff --git a/setuptools/tests/test_upload.py b/setuptools/tests/test_upload.py
deleted file mode 100644
index cbcd455c41..0000000000
--- a/setuptools/tests/test_upload.py
+++ /dev/null
@@ -1,19 +0,0 @@
-from unittest import mock
-
-import pytest
-
-from setuptools.command.upload import upload
-from setuptools.dist import Distribution
-from setuptools.errors import RemovedCommandError
-
-
-class TestUpload:
-    def test_upload_exception(self):
-        """Ensure that the register command has been properly removed."""
-        dist = Distribution()
-        dist.dist_files = [(mock.Mock(), mock.Mock(), mock.Mock())]
-
-        cmd = upload(dist)
-
-        with pytest.raises(RemovedCommandError):
-            cmd.run()

From 6ce5426cfa104012a47ee2cf94f905f86d647304 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 6 Sep 2024 11:45:39 -0400
Subject: [PATCH 1147/1761] Decouple sdist tests from pypirc fixture.

---
 distutils/tests/test_sdist.py | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/distutils/tests/test_sdist.py b/distutils/tests/test_sdist.py
index 9b0930d757..5aca43e34f 100644
--- a/distutils/tests/test_sdist.py
+++ b/distutils/tests/test_sdist.py
@@ -45,8 +45,9 @@
 
 
 @pytest.fixture(autouse=True)
-def project_dir(request, pypirc):
+def project_dir(request, distutils_managed_tempdir):
     self = request.instance
+    self.tmp_dir = self.mkdtemp()
     jaraco.path.build(
         {
             'somecode': {

From 2e6f25c51909bd646975326360ebabac7dfaf476 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 6 Sep 2024 11:50:19 -0400
Subject: [PATCH 1148/1761] Removed PyPI commands (register, upload) and
 supporting logic.

---
 conftest.py                      |  23 ---
 distutils/command/__init__.py    |   2 -
 distutils/command/register.py    | 311 ------------------------------
 distutils/command/upload.py      | 211 ---------------------
 distutils/config.py              | 151 ---------------
 distutils/core.py                |   3 +-
 distutils/tests/test_config.py   | 112 -----------
 distutils/tests/test_register.py | 314 -------------------------------
 distutils/tests/test_upload.py   | 215 ---------------------
 9 files changed, 1 insertion(+), 1341 deletions(-)
 delete mode 100644 distutils/command/register.py
 delete mode 100644 distutils/command/upload.py
 delete mode 100644 distutils/config.py
 delete mode 100644 distutils/tests/test_config.py
 delete mode 100644 distutils/tests/test_register.py
 delete mode 100644 distutils/tests/test_upload.py

diff --git a/conftest.py b/conftest.py
index 9667879d24..98f98d41ab 100644
--- a/conftest.py
+++ b/conftest.py
@@ -82,29 +82,6 @@ def temp_cwd(tmp_path):
         yield
 
 
-@pytest.fixture
-def pypirc(request, save_env, distutils_managed_tempdir):
-    from distutils.core import Distribution, PyPIRCCommand
-
-    self = request.instance
-    self.tmp_dir = self.mkdtemp()
-    os.environ['HOME'] = self.tmp_dir
-    os.environ['USERPROFILE'] = self.tmp_dir
-    self.rc = os.path.join(self.tmp_dir, '.pypirc')
-    self.dist = Distribution()
-
-    class command(PyPIRCCommand):
-        def __init__(self, dist):
-            super().__init__(dist)
-
-        def initialize_options(self):
-            pass
-
-        finalize_options = initialize_options
-
-    self._cmd = command
-
-
 # from pytest-dev/pytest#363
 @pytest.fixture(scope="session")
 def monkeysession(request):
diff --git a/distutils/command/__init__.py b/distutils/command/__init__.py
index 1e8fbe60c2..0f8a1692ba 100644
--- a/distutils/command/__init__.py
+++ b/distutils/command/__init__.py
@@ -16,10 +16,8 @@
     'install_scripts',
     'install_data',
     'sdist',
-    'register',
     'bdist',
     'bdist_dumb',
     'bdist_rpm',
     'check',
-    'upload',
 ]
diff --git a/distutils/command/register.py b/distutils/command/register.py
deleted file mode 100644
index b3373a3c21..0000000000
--- a/distutils/command/register.py
+++ /dev/null
@@ -1,311 +0,0 @@
-"""distutils.command.register
-
-Implements the Distutils 'register' command (register with the repository).
-"""
-
-# created 2002/10/21, Richard Jones
-
-import getpass
-import io
-import logging
-import urllib.parse
-import urllib.request
-import warnings
-from distutils._log import log
-
-from more_itertools import always_iterable
-
-from ..core import PyPIRCCommand
-
-
-class register(PyPIRCCommand):
-    description = "register the distribution with the Python package index"
-    user_options = PyPIRCCommand.user_options + [
-        ('list-classifiers', None, 'list the valid Trove classifiers'),
-        (
-            'strict',
-            None,
-            'Will stop the registering if the meta-data are not fully compliant',
-        ),
-    ]
-    boolean_options = PyPIRCCommand.boolean_options + [
-        'verify',
-        'list-classifiers',
-        'strict',
-    ]
-
-    sub_commands = [('check', lambda self: True)]
-
-    def initialize_options(self):
-        PyPIRCCommand.initialize_options(self)
-        self.list_classifiers = False
-        self.strict = False
-
-    def finalize_options(self):
-        PyPIRCCommand.finalize_options(self)
-        # setting options for the `check` subcommand
-        check_options = {
-            'strict': ('register', self.strict),
-            'restructuredtext': ('register', 1),
-        }
-        self.distribution.command_options['check'] = check_options
-
-    def run(self):
-        warnings.warn("register command is deprecated. Do not use.")
-        self.finalize_options()
-        self._set_config()
-
-        # Run sub commands
-        for cmd_name in self.get_sub_commands():
-            self.run_command(cmd_name)
-
-        if self.dry_run:
-            self.verify_metadata()
-        elif self.list_classifiers:
-            self.classifiers()
-        else:
-            self.send_metadata()
-
-    def _set_config(self):
-        """Reads the configuration file and set attributes."""
-        config = self._read_pypirc()
-        if config != {}:
-            self.username = config['username']
-            self.password = config['password']
-            self.repository = config['repository']
-            self.realm = config['realm']
-            self.has_config = True
-        else:
-            if self.repository not in ('pypi', self.DEFAULT_REPOSITORY):
-                raise ValueError(f'{self.repository} not found in .pypirc')
-            if self.repository == 'pypi':
-                self.repository = self.DEFAULT_REPOSITORY
-            self.has_config = False
-
-    def classifiers(self):
-        """Fetch the list of classifiers from the server."""
-        url = self.repository + '?:action=list_classifiers'
-        response = urllib.request.urlopen(url)
-        log.info(self._read_pypi_response(response))
-
-    def verify_metadata(self):
-        """Send the metadata to the package index server to be checked."""
-        # send the info to the server and report the result
-        (code, result) = self.post_to_server(self.build_post_data('verify'))
-        log.info('Server response (%s): %s', code, result)
-
-    def send_metadata(self):  # noqa: C901
-        """Send the metadata to the package index server.
-
-        Well, do the following:
-        1. figure who the user is, and then
-        2. send the data as a Basic auth'ed POST.
-
-        First we try to read the username/password from $HOME/.pypirc,
-        which is a ConfigParser-formatted file with a section
-        [distutils] containing username and password entries (both
-        in clear text). Eg:
-
-            [distutils]
-            index-servers =
-                pypi
-
-            [pypi]
-            username: fred
-            password: sekrit
-
-        Otherwise, to figure who the user is, we offer the user three
-        choices:
-
-         1. use existing login,
-         2. register as a new user, or
-         3. set the password to a random string and email the user.
-
-        """
-        # see if we can short-cut and get the username/password from the
-        # config
-        if self.has_config:
-            choice = '1'
-            username = self.username
-            password = self.password
-        else:
-            choice = 'x'
-            username = password = ''
-
-        # get the user's login info
-        choices = '1 2 3 4'.split()
-        while choice not in choices:
-            self.announce(
-                """\
-We need to know who you are, so please choose either:
- 1. use your existing login,
- 2. register as a new user,
- 3. have the server generate a new password for you (and email it to you), or
- 4. quit
-Your selection [default 1]: """,
-                logging.INFO,
-            )
-            choice = input()
-            if not choice:
-                choice = '1'
-            elif choice not in choices:
-                print('Please choose one of the four options!')
-
-        if choice == '1':
-            # get the username and password
-            while not username:
-                username = input('Username: ')
-            while not password:
-                password = getpass.getpass('Password: ')
-
-            # set up the authentication
-            auth = urllib.request.HTTPPasswordMgr()
-            host = urllib.parse.urlparse(self.repository)[1]
-            auth.add_password(self.realm, host, username, password)
-            # send the info to the server and report the result
-            code, result = self.post_to_server(self.build_post_data('submit'), auth)
-            self.announce(f'Server response ({code}): {result}', logging.INFO)
-
-            # possibly save the login
-            if code == 200:
-                if self.has_config:
-                    # sharing the password in the distribution instance
-                    # so the upload command can reuse it
-                    self.distribution.password = password
-                else:
-                    self.announce(
-                        (
-                            'I can store your PyPI login so future '
-                            'submissions will be faster.'
-                        ),
-                        logging.INFO,
-                    )
-                    self.announce(
-                        f'(the login will be stored in {self._get_rc_file()})',
-                        logging.INFO,
-                    )
-                    choice = 'X'
-                    while choice.lower() not in 'yn':
-                        choice = input('Save your login (y/N)?')
-                        if not choice:
-                            choice = 'n'
-                    if choice.lower() == 'y':
-                        self._store_pypirc(username, password)
-
-        elif choice == '2':
-            data = {':action': 'user'}
-            data['name'] = data['password'] = data['email'] = ''
-            data['confirm'] = None
-            while not data['name']:
-                data['name'] = input('Username: ')
-            while data['password'] != data['confirm']:
-                while not data['password']:
-                    data['password'] = getpass.getpass('Password: ')
-                while not data['confirm']:
-                    data['confirm'] = getpass.getpass(' Confirm: ')
-                if data['password'] != data['confirm']:
-                    data['password'] = ''
-                    data['confirm'] = None
-                    print("Password and confirm don't match!")
-            while not data['email']:
-                data['email'] = input('   EMail: ')
-            code, result = self.post_to_server(data)
-            if code != 200:
-                log.info('Server response (%s): %s', code, result)
-            else:
-                log.info('You will receive an email shortly.')
-                log.info('Follow the instructions in it to complete registration.')
-        elif choice == '3':
-            data = {':action': 'password_reset'}
-            data['email'] = ''
-            while not data['email']:
-                data['email'] = input('Your email address: ')
-            code, result = self.post_to_server(data)
-            log.info('Server response (%s): %s', code, result)
-
-    def build_post_data(self, action):
-        # figure the data to send - the metadata plus some additional
-        # information used by the package server
-        meta = self.distribution.metadata
-        data = {
-            ':action': action,
-            'metadata_version': '1.0',
-            'name': meta.get_name(),
-            'version': meta.get_version(),
-            'summary': meta.get_description(),
-            'home_page': meta.get_url(),
-            'author': meta.get_contact(),
-            'author_email': meta.get_contact_email(),
-            'license': meta.get_licence(),
-            'description': meta.get_long_description(),
-            'keywords': meta.get_keywords(),
-            'platform': meta.get_platforms(),
-            'classifiers': meta.get_classifiers(),
-            'download_url': meta.get_download_url(),
-            # PEP 314
-            'provides': meta.get_provides(),
-            'requires': meta.get_requires(),
-            'obsoletes': meta.get_obsoletes(),
-        }
-        if data['provides'] or data['requires'] or data['obsoletes']:
-            data['metadata_version'] = '1.1'
-        return data
-
-    def post_to_server(self, data, auth=None):  # noqa: C901
-        """Post a query to the server, and return a string response."""
-        if 'name' in data:
-            self.announce(
-                'Registering {} to {}'.format(data['name'], self.repository),
-                logging.INFO,
-            )
-        # Build up the MIME payload for the urllib2 POST data
-        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
-        sep_boundary = '\n--' + boundary
-        end_boundary = sep_boundary + '--'
-        body = io.StringIO()
-        for key, values in data.items():
-            for value in map(str, make_iterable(values)):
-                body.write(sep_boundary)
-                body.write(f'\nContent-Disposition: form-data; name="{key}"')
-                body.write("\n\n")
-                body.write(value)
-                if value and value[-1] == '\r':
-                    body.write('\n')  # write an extra newline (lurve Macs)
-        body.write(end_boundary)
-        body.write("\n")
-        body = body.getvalue().encode("utf-8")
-
-        # build the Request
-        headers = {
-            'Content-type': f'multipart/form-data; boundary={boundary}; charset=utf-8',
-            'Content-length': str(len(body)),
-        }
-        req = urllib.request.Request(self.repository, body, headers)
-
-        # handle HTTP and include the Basic Auth handler
-        opener = urllib.request.build_opener(
-            urllib.request.HTTPBasicAuthHandler(password_mgr=auth)
-        )
-        data = ''
-        try:
-            result = opener.open(req)
-        except urllib.error.HTTPError as e:
-            if self.show_response:
-                data = e.fp.read()
-            result = e.code, e.msg
-        except urllib.error.URLError as e:
-            result = 500, str(e)
-        else:
-            if self.show_response:
-                data = self._read_pypi_response(result)
-            result = 200, 'OK'
-        if self.show_response:
-            msg = '\n'.join(('-' * 75, data, '-' * 75))
-            self.announce(msg, logging.INFO)
-        return result
-
-
-def make_iterable(values):
-    if values is None:
-        return [None]
-    return always_iterable(values)
diff --git a/distutils/command/upload.py b/distutils/command/upload.py
deleted file mode 100644
index 3428a6b613..0000000000
--- a/distutils/command/upload.py
+++ /dev/null
@@ -1,211 +0,0 @@
-"""
-distutils.command.upload
-
-Implements the Distutils 'upload' subcommand (upload package to a package
-index).
-"""
-
-import hashlib
-import io
-import logging
-import os
-import warnings
-from base64 import standard_b64encode
-from urllib.parse import urlparse
-from urllib.request import HTTPError, Request, urlopen
-
-from more_itertools import always_iterable
-
-from ..core import PyPIRCCommand
-from ..errors import DistutilsError, DistutilsOptionError
-from ..spawn import spawn
-
-# PyPI Warehouse supports MD5, SHA256, and Blake2 (blake2-256)
-# https://bugs.python.org/issue40698
-_FILE_CONTENT_DIGESTS = {
-    "md5_digest": getattr(hashlib, "md5", None),
-    "sha256_digest": getattr(hashlib, "sha256", None),
-    "blake2_256_digest": getattr(hashlib, "blake2b", None),
-}
-
-
-class upload(PyPIRCCommand):
-    description = "upload binary package to PyPI"
-
-    user_options = PyPIRCCommand.user_options + [
-        ('sign', 's', 'sign files to upload using gpg'),
-        ('identity=', 'i', 'GPG identity used to sign files'),
-    ]
-
-    boolean_options = PyPIRCCommand.boolean_options + ['sign']
-
-    def initialize_options(self):
-        PyPIRCCommand.initialize_options(self)
-        self.username = ''
-        self.password = ''
-        self.show_response = False
-        self.sign = False
-        self.identity = None
-
-    def finalize_options(self):
-        PyPIRCCommand.finalize_options(self)
-        if self.identity and not self.sign:
-            raise DistutilsOptionError("Must use --sign for --identity to have meaning")
-        config = self._read_pypirc()
-        if config != {}:
-            self.username = config['username']
-            self.password = config['password']
-            self.repository = config['repository']
-            self.realm = config['realm']
-
-        # getting the password from the distribution
-        # if previously set by the register command
-        if not self.password and self.distribution.password:
-            self.password = self.distribution.password
-
-    def run(self):
-        warnings.warn("upload command is deprecated. Do not use.")
-        if not self.distribution.dist_files:
-            msg = (
-                "Must create and upload files in one command "
-                "(e.g. setup.py sdist upload)"
-            )
-            raise DistutilsOptionError(msg)
-        for command, pyversion, filename in self.distribution.dist_files:
-            self.upload_file(command, pyversion, filename)
-
-    def upload_file(self, command, pyversion, filename):  # noqa: C901
-        # Makes sure the repository URL is compliant
-        schema, netloc, url, params, query, fragments = urlparse(self.repository)
-        if params or query or fragments:
-            raise AssertionError(f"Incompatible url {self.repository}")
-
-        if schema not in ('http', 'https'):
-            raise AssertionError("unsupported schema " + schema)
-
-        # Sign if requested
-        if self.sign:
-            gpg_args = ["gpg", "--detach-sign", "-a", filename]
-            if self.identity:
-                gpg_args[2:2] = ["--local-user", self.identity]
-            spawn(gpg_args, dry_run=self.dry_run)
-
-        # Fill in the data - send all the meta-data in case we need to
-        # register a new release
-        f = open(filename, 'rb')
-        try:
-            content = f.read()
-        finally:
-            f.close()
-
-        meta = self.distribution.metadata
-        data = {
-            # action
-            ':action': 'file_upload',
-            'protocol_version': '1',
-            # identify release
-            'name': meta.get_name(),
-            'version': meta.get_version(),
-            # file content
-            'content': (os.path.basename(filename), content),
-            'filetype': command,
-            'pyversion': pyversion,
-            # additional meta-data
-            'metadata_version': '1.0',
-            'summary': meta.get_description(),
-            'home_page': meta.get_url(),
-            'author': meta.get_contact(),
-            'author_email': meta.get_contact_email(),
-            'license': meta.get_licence(),
-            'description': meta.get_long_description(),
-            'keywords': meta.get_keywords(),
-            'platform': meta.get_platforms(),
-            'classifiers': meta.get_classifiers(),
-            'download_url': meta.get_download_url(),
-            # PEP 314
-            'provides': meta.get_provides(),
-            'requires': meta.get_requires(),
-            'obsoletes': meta.get_obsoletes(),
-        }
-
-        data['comment'] = ''
-
-        # file content digests
-        for digest_name, digest_cons in _FILE_CONTENT_DIGESTS.items():
-            if digest_cons is None:
-                continue
-            try:
-                data[digest_name] = digest_cons(content).hexdigest()
-            except ValueError:
-                # hash digest not available or blocked by security policy
-                pass
-
-        if self.sign:
-            with open(filename + ".asc", "rb") as f:
-                data['gpg_signature'] = (os.path.basename(filename) + ".asc", f.read())
-
-        # set up the authentication
-        user_pass = (self.username + ":" + self.password).encode('ascii')
-        # The exact encoding of the authentication string is debated.
-        # Anyway PyPI only accepts ascii for both username or password.
-        auth = "Basic " + standard_b64encode(user_pass).decode('ascii')
-
-        # Build up the MIME payload for the POST data
-        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
-        sep_boundary = b'\r\n--' + boundary.encode('ascii')
-        end_boundary = sep_boundary + b'--\r\n'
-        body = io.BytesIO()
-        for key, values in data.items():
-            title = f'\r\nContent-Disposition: form-data; name="{key}"'
-            for value in make_iterable(values):
-                if type(value) is tuple:
-                    title += f'; filename="{value[0]}"'
-                    value = value[1]
-                else:
-                    value = str(value).encode('utf-8')
-                body.write(sep_boundary)
-                body.write(title.encode('utf-8'))
-                body.write(b"\r\n\r\n")
-                body.write(value)
-        body.write(end_boundary)
-        body = body.getvalue()
-
-        msg = f"Submitting {filename} to {self.repository}"
-        self.announce(msg, logging.INFO)
-
-        # build the Request
-        headers = {
-            'Content-type': f'multipart/form-data; boundary={boundary}',
-            'Content-length': str(len(body)),
-            'Authorization': auth,
-        }
-
-        request = Request(self.repository, data=body, headers=headers)
-        # send the data
-        try:
-            result = urlopen(request)
-            status = result.getcode()
-            reason = result.msg
-        except HTTPError as e:
-            status = e.code
-            reason = e.msg
-        except OSError as e:
-            self.announce(str(e), logging.ERROR)
-            raise
-
-        if status == 200:
-            self.announce(f'Server response ({status}): {reason}', logging.INFO)
-            if self.show_response:
-                text = self._read_pypi_response(result)
-                msg = '\n'.join(('-' * 75, text, '-' * 75))
-                self.announce(msg, logging.INFO)
-        else:
-            msg = f'Upload failed ({status}): {reason}'
-            self.announce(msg, logging.ERROR)
-            raise DistutilsError(msg)
-
-
-def make_iterable(values):
-    if values is None:
-        return [None]
-    return always_iterable(values, base_type=(bytes, str, tuple))
diff --git a/distutils/config.py b/distutils/config.py
deleted file mode 100644
index ebd2e11da3..0000000000
--- a/distutils/config.py
+++ /dev/null
@@ -1,151 +0,0 @@
-"""distutils.pypirc
-
-Provides the PyPIRCCommand class, the base class for the command classes
-that uses .pypirc in the distutils.command package.
-"""
-
-import email.message
-import os
-from configparser import RawConfigParser
-
-from .cmd import Command
-
-DEFAULT_PYPIRC = """\
-[distutils]
-index-servers =
-    pypi
-
-[pypi]
-username:%s
-password:%s
-"""
-
-
-class PyPIRCCommand(Command):
-    """Base command that knows how to handle the .pypirc file"""
-
-    DEFAULT_REPOSITORY = 'https://upload.pypi.org/legacy/'
-    DEFAULT_REALM = 'pypi'
-    repository = None
-    realm = None
-
-    user_options = [
-        ('repository=', 'r', f"url of repository [default: {DEFAULT_REPOSITORY}]"),
-        ('show-response', None, 'display full response text from server'),
-    ]
-
-    boolean_options = ['show-response']
-
-    def _get_rc_file(self):
-        """Returns rc file path."""
-        return os.path.join(os.path.expanduser('~'), '.pypirc')
-
-    def _store_pypirc(self, username, password):
-        """Creates a default .pypirc file."""
-        rc = self._get_rc_file()
-        raw = os.open(rc, os.O_CREAT | os.O_WRONLY, 0o600)
-        with os.fdopen(raw, 'w', encoding='utf-8') as f:
-            f.write(DEFAULT_PYPIRC % (username, password))
-
-    def _read_pypirc(self):  # noqa: C901
-        """Reads the .pypirc file."""
-        rc = self._get_rc_file()
-        if os.path.exists(rc):
-            self.announce(f'Using PyPI login from {rc}')
-            repository = self.repository or self.DEFAULT_REPOSITORY
-
-            config = RawConfigParser()
-            config.read(rc, encoding='utf-8')
-            sections = config.sections()
-            if 'distutils' in sections:
-                # let's get the list of servers
-                index_servers = config.get('distutils', 'index-servers')
-                _servers = [
-                    server.strip()
-                    for server in index_servers.split('\n')
-                    if server.strip() != ''
-                ]
-                if _servers == []:
-                    # nothing set, let's try to get the default pypi
-                    if 'pypi' in sections:
-                        _servers = ['pypi']
-                    else:
-                        # the file is not properly defined, returning
-                        # an empty dict
-                        return {}
-                for server in _servers:
-                    current = {'server': server}
-                    current['username'] = config.get(server, 'username')
-
-                    # optional params
-                    for key, default in (
-                        ('repository', self.DEFAULT_REPOSITORY),
-                        ('realm', self.DEFAULT_REALM),
-                        ('password', None),
-                    ):
-                        if config.has_option(server, key):
-                            current[key] = config.get(server, key)
-                        else:
-                            current[key] = default
-
-                    # work around people having "repository" for the "pypi"
-                    # section of their config set to the HTTP (rather than
-                    # HTTPS) URL
-                    if server == 'pypi' and repository in (
-                        self.DEFAULT_REPOSITORY,
-                        'pypi',
-                    ):
-                        current['repository'] = self.DEFAULT_REPOSITORY
-                        return current
-
-                    if (
-                        current['server'] == repository
-                        or current['repository'] == repository
-                    ):
-                        return current
-            elif 'server-login' in sections:
-                # old format
-                server = 'server-login'
-                if config.has_option(server, 'repository'):
-                    repository = config.get(server, 'repository')
-                else:
-                    repository = self.DEFAULT_REPOSITORY
-                return {
-                    'username': config.get(server, 'username'),
-                    'password': config.get(server, 'password'),
-                    'repository': repository,
-                    'server': server,
-                    'realm': self.DEFAULT_REALM,
-                }
-
-        return {}
-
-    def _read_pypi_response(self, response):
-        """Read and decode a PyPI HTTP response."""
-        content_type = response.getheader('content-type', 'text/plain')
-        return response.read().decode(_extract_encoding(content_type))
-
-    def initialize_options(self):
-        """Initialize options."""
-        self.repository = None
-        self.realm = None
-        self.show_response = False
-
-    def finalize_options(self):
-        """Finalizes options."""
-        if self.repository is None:
-            self.repository = self.DEFAULT_REPOSITORY
-        if self.realm is None:
-            self.realm = self.DEFAULT_REALM
-
-
-def _extract_encoding(content_type):
-    """
-    >>> _extract_encoding('text/plain')
-    'ascii'
-    >>> _extract_encoding('text/html; charset="utf8"')
-    'utf8'
-    """
-    msg = email.message.EmailMessage()
-    msg['content-type'] = content_type
-    return msg['content-type'].params.get('charset', 'ascii')
diff --git a/distutils/core.py b/distutils/core.py
index 82113c47c1..bc06091abb 100644
--- a/distutils/core.py
+++ b/distutils/core.py
@@ -11,7 +11,6 @@
 import tokenize
 
 from .cmd import Command
-from .config import PyPIRCCommand
 from .debug import DEBUG
 
 # Mainly import these so setup scripts can "from distutils.core import" them.
@@ -24,7 +23,7 @@
 )
 from .extension import Extension
 
-__all__ = ['Distribution', 'Command', 'PyPIRCCommand', 'Extension', 'setup']
+__all__ = ['Distribution', 'Command', 'Extension', 'setup']
 
 # This is a barebones help message generated displayed when the user
 # runs the setup script with no arguments at all.  More useful help
diff --git a/distutils/tests/test_config.py b/distutils/tests/test_config.py
deleted file mode 100644
index 6e3f5f24dd..0000000000
--- a/distutils/tests/test_config.py
+++ /dev/null
@@ -1,112 +0,0 @@
-"""Tests for distutils.pypirc.pypirc."""
-
-import os
-import pathlib
-from distutils.tests import support
-
-import pytest
-
-PYPIRC = """\
-[distutils]
-
-index-servers =
-    server1
-    server2
-    server3
-
-[server1]
-username:me
-password:secret
-
-[server2]
-username:meagain
-password: secret
-realm:acme
-repository:http://another.pypi/
-
-[server3]
-username:cbiggles
-password:yh^%#rest-of-my-password
-"""
-
-PYPIRC_OLD = """\
-[server-login]
-username:tarek
-password:secret
-"""
-
-WANTED = """\
-[distutils]
-index-servers =
-    pypi
-
-[pypi]
-username:tarek
-password:xxx
-"""
-
-
-@support.combine_markers
-@pytest.mark.usefixtures('pypirc')
-class BasePyPIRCCommandTestCase(support.TempdirManager):
-    pass
-
-
-class TestPyPIRCCommand(BasePyPIRCCommandTestCase):
-    def test_server_registration(self):
-        # This test makes sure PyPIRCCommand knows how to:
-        # 1. handle several sections in .pypirc
-        # 2. handle the old format
-
-        # new format
-        self.write_file(self.rc, PYPIRC)
-        cmd = self._cmd(self.dist)
-        config = cmd._read_pypirc()
-
-        config = list(sorted(config.items()))
-        waited = [
-            ('password', 'secret'),
-            ('realm', 'pypi'),
-            ('repository', 'https://upload.pypi.org/legacy/'),
-            ('server', 'server1'),
-            ('username', 'me'),
-        ]
-        assert config == waited
-
-        # old format
-        self.write_file(self.rc, PYPIRC_OLD)
-        config = cmd._read_pypirc()
-        config = list(sorted(config.items()))
-        waited = [
-            ('password', 'secret'),
-            ('realm', 'pypi'),
-            ('repository', 'https://upload.pypi.org/legacy/'),
-            ('server', 'server-login'),
-            ('username', 'tarek'),
-        ]
-        assert config == waited
-
-    def test_server_empty_registration(self):
-        cmd = self._cmd(self.dist)
-        rc = cmd._get_rc_file()
-        assert not os.path.exists(rc)
-        cmd._store_pypirc('tarek', 'xxx')
-        assert os.path.exists(rc)
-        assert pathlib.Path(rc).read_text(encoding='utf-8') == WANTED
-
-    def test_config_interpolation(self):
-        # using the % character in .pypirc should not raise an error (#20120)
-        self.write_file(self.rc, PYPIRC)
-        cmd = self._cmd(self.dist)
-        cmd.repository = 'server3'
-        config = cmd._read_pypirc()
-
-        config = list(sorted(config.items()))
-        waited = [
-            ('password', 'yh^%#rest-of-my-password'),
-            ('realm', 'pypi'),
-            ('repository', 'https://upload.pypi.org/legacy/'),
-            ('server', 'server3'),
-            ('username', 'cbiggles'),
-        ]
-        assert config == waited
diff --git a/distutils/tests/test_register.py b/distutils/tests/test_register.py
deleted file mode 100644
index 14dfb832c7..0000000000
--- a/distutils/tests/test_register.py
+++ /dev/null
@@ -1,314 +0,0 @@
-"""Tests for distutils.command.register."""
-
-import getpass
-import os
-import pathlib
-import urllib
-from distutils.command import register as register_module
-from distutils.command.register import register
-from distutils.errors import DistutilsSetupError
-from distutils.tests.test_config import BasePyPIRCCommandTestCase
-
-import pytest
-
-try:
-    import docutils
-except ImportError:
-    docutils = None
-
-PYPIRC_NOPASSWORD = """\
-[distutils]
-
-index-servers =
-    server1
-
-[server1]
-username:me
-"""
-
-WANTED_PYPIRC = """\
-[distutils]
-index-servers =
-    pypi
-
-[pypi]
-username:tarek
-password:password
-"""
-
-
-class Inputs:
-    """Fakes user inputs."""
-
-    def __init__(self, *answers):
-        self.answers = answers
-        self.index = 0
-
-    def __call__(self, prompt=''):
-        try:
-            return self.answers[self.index]
-        finally:
-            self.index += 1
-
-
-class FakeOpener:
-    """Fakes a PyPI server"""
-
-    def __init__(self):
-        self.reqs = []
-
-    def __call__(self, *args):
-        return self
-
-    def open(self, req, data=None, timeout=None):
-        self.reqs.append(req)
-        return self
-
-    def read(self):
-        return b'xxx'
-
-    def getheader(self, name, default=None):
-        return {
-            'content-type': 'text/plain; charset=utf-8',
-        }.get(name.lower(), default)
-
-
-@pytest.fixture(autouse=True)
-def autopass(monkeypatch):
-    monkeypatch.setattr(getpass, 'getpass', lambda prompt: 'password')
-
-
-@pytest.fixture(autouse=True)
-def fake_opener(monkeypatch, request):
-    opener = FakeOpener()
-    monkeypatch.setattr(urllib.request, 'build_opener', opener)
-    monkeypatch.setattr(urllib.request, '_opener', None)
-    request.instance.conn = opener
-
-
-class TestRegister(BasePyPIRCCommandTestCase):
-    def _get_cmd(self, metadata=None):
-        if metadata is None:
-            metadata = {
-                'url': 'xxx',
-                'author': 'xxx',
-                'author_email': 'xxx',
-                'name': 'xxx',
-                'version': 'xxx',
-                'long_description': 'xxx',
-            }
-        pkg_info, dist = self.create_dist(**metadata)
-        return register(dist)
-
-    def test_create_pypirc(self):
-        # this test makes sure a .pypirc file
-        # is created when requested.
-
-        # let's create a register instance
-        cmd = self._get_cmd()
-
-        # we shouldn't have a .pypirc file yet
-        assert not os.path.exists(self.rc)
-
-        # patching input and getpass.getpass
-        # so register gets happy
-        #
-        # Here's what we are faking :
-        # use your existing login (choice 1.)
-        # Username : 'tarek'
-        # Password : 'password'
-        # Save your login (y/N)? : 'y'
-        inputs = Inputs('1', 'tarek', 'y')
-        register_module.input = inputs.__call__
-        # let's run the command
-        try:
-            cmd.run()
-        finally:
-            del register_module.input
-
-        # A new .pypirc file should contain WANTED_PYPIRC
-        assert pathlib.Path(self.rc).read_text(encoding='utf-8') == WANTED_PYPIRC
-
-        # now let's make sure the .pypirc file generated
-        # really works : we shouldn't be asked anything
-        # if we run the command again
-        def _no_way(prompt=''):
-            raise AssertionError(prompt)
-
-        register_module.input = _no_way
-
-        cmd.show_response = True
-        cmd.run()
-
-        # let's see what the server received : we should
-        # have 2 similar requests
-        assert len(self.conn.reqs) == 2
-        req1 = dict(self.conn.reqs[0].headers)
-        req2 = dict(self.conn.reqs[1].headers)
-
-        assert req1['Content-length'] == '1358'
-        assert req2['Content-length'] == '1358'
-        assert b'xxx' in self.conn.reqs[1].data
-
-    def test_password_not_in_file(self):
-        self.write_file(self.rc, PYPIRC_NOPASSWORD)
-        cmd = self._get_cmd()
-        cmd._set_config()
-        cmd.finalize_options()
-        cmd.send_metadata()
-
-        # dist.password should be set
-        # therefore used afterwards by other commands
-        assert cmd.distribution.password == 'password'
-
-    def test_registering(self):
-        # this test runs choice 2
-        cmd = self._get_cmd()
-        inputs = Inputs('2', 'tarek', 'tarek@ziade.org')
-        register_module.input = inputs.__call__
-        try:
-            # let's run the command
-            cmd.run()
-        finally:
-            del register_module.input
-
-        # we should have send a request
-        assert len(self.conn.reqs) == 1
-        req = self.conn.reqs[0]
-        headers = dict(req.headers)
-        assert headers['Content-length'] == '608'
-        assert b'tarek' in req.data
-
-    def test_password_reset(self):
-        # this test runs choice 3
-        cmd = self._get_cmd()
-        inputs = Inputs('3', 'tarek@ziade.org')
-        register_module.input = inputs.__call__
-        try:
-            # let's run the command
-            cmd.run()
-        finally:
-            del register_module.input
-
-        # we should have send a request
-        assert len(self.conn.reqs) == 1
-        req = self.conn.reqs[0]
-        headers = dict(req.headers)
-        assert headers['Content-length'] == '290'
-        assert b'tarek' in req.data
-
-    def test_strict(self):
-        # testing the strict option
-        # when on, the register command stops if
-        # the metadata is incomplete or if
-        # long_description is not reSt compliant
-
-        pytest.importorskip('docutils')
-
-        # empty metadata
-        cmd = self._get_cmd({})
-        cmd.ensure_finalized()
-        cmd.strict = True
-        with pytest.raises(DistutilsSetupError):
-            cmd.run()
-
-        # metadata are OK but long_description is broken
-        metadata = {
-            'url': 'xxx',
-            'author': 'xxx',
-            'author_email': 'éxéxé',
-            'name': 'xxx',
-            'version': 'xxx',
-            'long_description': 'title\n==\n\ntext',
-        }
-
-        cmd = self._get_cmd(metadata)
-        cmd.ensure_finalized()
-        cmd.strict = True
-        with pytest.raises(DistutilsSetupError):
-            cmd.run()
-
-        # now something that works
-        metadata['long_description'] = 'title\n=====\n\ntext'
-        cmd = self._get_cmd(metadata)
-        cmd.ensure_finalized()
-        cmd.strict = True
-        inputs = Inputs('1', 'tarek', 'y')
-        register_module.input = inputs.__call__
-        # let's run the command
-        try:
-            cmd.run()
-        finally:
-            del register_module.input
-
-        # strict is not by default
-        cmd = self._get_cmd()
-        cmd.ensure_finalized()
-        inputs = Inputs('1', 'tarek', 'y')
-        register_module.input = inputs.__call__
-        # let's run the command
-        try:
-            cmd.run()
-        finally:
-            del register_module.input
-
-        # and finally a Unicode test (bug #12114)
-        metadata = {
-            'url': 'xxx',
-            'author': '\u00c9ric',
-            'author_email': 'xxx',
-            'name': 'xxx',
-            'version': 'xxx',
-            'description': 'Something about esszet \u00df',
-            'long_description': 'More things about esszet \u00df',
-        }
-
-        cmd = self._get_cmd(metadata)
-        cmd.ensure_finalized()
-        cmd.strict = True
-        inputs = Inputs('1', 'tarek', 'y')
-        register_module.input = inputs.__call__
-        # let's run the command
-        try:
-            cmd.run()
-        finally:
-            del register_module.input
-
-    def test_register_invalid_long_description(self, monkeypatch):
-        pytest.importorskip('docutils')
-        description = ':funkie:`str`'  # mimic Sphinx-specific markup
-        metadata = {
-            'url': 'xxx',
-            'author': 'xxx',
-            'author_email': 'xxx',
-            'name': 'xxx',
-            'version': 'xxx',
-            'long_description': description,
-        }
-        cmd = self._get_cmd(metadata)
-        cmd.ensure_finalized()
-        cmd.strict = True
-        inputs = Inputs('2', 'tarek', 'tarek@ziade.org')
-        monkeypatch.setattr(register_module, 'input', inputs, raising=False)
-
-        with pytest.raises(DistutilsSetupError):
-            cmd.run()
-
-    def test_list_classifiers(self, caplog):
-        cmd = self._get_cmd()
-        cmd.list_classifiers = True
-        cmd.run()
-        assert caplog.messages == ['running check', 'xxx']
-
-    def test_show_response(self, caplog):
-        # test that the --show-response option return a well formatted response
-        cmd = self._get_cmd()
-        inputs = Inputs('1', 'tarek', 'y')
-        register_module.input = inputs.__call__
-        cmd.show_response = True
-        try:
-            cmd.run()
-        finally:
-            del register_module.input
-
-        assert caplog.messages[3] == 75 * '-' + '\nxxx\n' + 75 * '-'
diff --git a/distutils/tests/test_upload.py b/distutils/tests/test_upload.py
deleted file mode 100644
index 56df209c73..0000000000
--- a/distutils/tests/test_upload.py
+++ /dev/null
@@ -1,215 +0,0 @@
-"""Tests for distutils.command.upload."""
-
-import os
-import unittest.mock as mock
-from distutils.command import upload as upload_mod
-from distutils.command.upload import upload
-from distutils.core import Distribution
-from distutils.errors import DistutilsError
-from distutils.tests.test_config import PYPIRC, BasePyPIRCCommandTestCase
-from urllib.request import HTTPError
-
-import pytest
-
-PYPIRC_LONG_PASSWORD = """\
-[distutils]
-
-index-servers =
-    server1
-    server2
-
-[server1]
-username:me
-password:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
-
-[server2]
-username:meagain
-password: secret
-realm:acme
-repository:http://another.pypi/
-"""
-
-
-PYPIRC_NOPASSWORD = """\
-[distutils]
-
-index-servers =
-    server1
-
-[server1]
-username:me
-"""
-
-
-class FakeOpen:
-    def __init__(self, url, msg=None, code=None):
-        self.url = url
-        if not isinstance(url, str):
-            self.req = url
-        else:
-            self.req = None
-        self.msg = msg or 'OK'
-        self.code = code or 200
-
-    def getheader(self, name, default=None):
-        return {
-            'content-type': 'text/plain; charset=utf-8',
-        }.get(name.lower(), default)
-
-    def read(self):
-        return b'xyzzy'
-
-    def getcode(self):
-        return self.code
-
-
-@pytest.fixture(autouse=True)
-def urlopen(request, monkeypatch):
-    self = request.instance
-    monkeypatch.setattr(upload_mod, 'urlopen', self._urlopen)
-    self.next_msg = self.next_code = None
-
-
-class TestUpload(BasePyPIRCCommandTestCase):
-    def _urlopen(self, url):
-        self.last_open = FakeOpen(url, msg=self.next_msg, code=self.next_code)
-        return self.last_open
-
-    def test_finalize_options(self):
-        # new format
-        self.write_file(self.rc, PYPIRC)
-        dist = Distribution()
-        cmd = upload(dist)
-        cmd.finalize_options()
-        for attr, waited in (
-            ('username', 'me'),
-            ('password', 'secret'),
-            ('realm', 'pypi'),
-            ('repository', 'https://upload.pypi.org/legacy/'),
-        ):
-            assert getattr(cmd, attr) == waited
-
-    def test_saved_password(self):
-        # file with no password
-        self.write_file(self.rc, PYPIRC_NOPASSWORD)
-
-        # make sure it passes
-        dist = Distribution()
-        cmd = upload(dist)
-        cmd.finalize_options()
-        assert cmd.password is None
-
-        # make sure we get it as well, if another command
-        # initialized it at the dist level
-        dist.password = 'xxx'
-        cmd = upload(dist)
-        cmd.finalize_options()
-        assert cmd.password == 'xxx'
-
-    def test_upload(self, caplog):
-        tmp = self.mkdtemp()
-        path = os.path.join(tmp, 'xxx')
-        self.write_file(path)
-        command, pyversion, filename = 'xxx', '2.6', path
-        dist_files = [(command, pyversion, filename)]
-        self.write_file(self.rc, PYPIRC_LONG_PASSWORD)
-
-        # lets run it
-        pkg_dir, dist = self.create_dist(dist_files=dist_files)
-        cmd = upload(dist)
-        cmd.show_response = True
-        cmd.ensure_finalized()
-        cmd.run()
-
-        # what did we send ?
-        headers = dict(self.last_open.req.headers)
-        assert int(headers['Content-length']) >= 2162
-        content_type = headers['Content-type']
-        assert content_type.startswith('multipart/form-data')
-        assert self.last_open.req.get_method() == 'POST'
-        expected_url = 'https://upload.pypi.org/legacy/'
-        assert self.last_open.req.get_full_url() == expected_url
-        data = self.last_open.req.data
-        assert b'xxx' in data
-        assert b'protocol_version' in data
-        assert b'sha256_digest' in data
-        assert (
-            b'cd2eb0837c9b4c962c22d2ff8b5441b7b45805887f051d39bf133b583baf'
-            b'6860' in data
-        )
-        if b'md5_digest' in data:
-            assert b'f561aaf6ef0bf14d4208bb46a4ccb3ad' in data
-        if b'blake2_256_digest' in data:
-            assert (
-                b'b6f289a27d4fe90da63c503bfe0a9b761a8f76bb86148565065f040be'
-                b'6d1c3044cf7ded78ef800509bccb4b648e507d88dc6383d67642aadcc'
-                b'ce443f1534330a' in data
-            )
-
-        # The PyPI response body was echoed
-        results = caplog.messages
-        assert results[-1] == 75 * '-' + '\nxyzzy\n' + 75 * '-'
-
-    # bpo-32304: archives whose last byte was b'\r' were corrupted due to
-    # normalization intended for Mac OS 9.
-    def test_upload_correct_cr(self):
-        # content that ends with \r should not be modified.
-        tmp = self.mkdtemp()
-        path = os.path.join(tmp, 'xxx')
-        self.write_file(path, content='yy\r')
-        command, pyversion, filename = 'xxx', '2.6', path
-        dist_files = [(command, pyversion, filename)]
-        self.write_file(self.rc, PYPIRC_LONG_PASSWORD)
-
-        # other fields that ended with \r used to be modified, now are
-        # preserved.
-        pkg_dir, dist = self.create_dist(
-            dist_files=dist_files, description='long description\r'
-        )
-        cmd = upload(dist)
-        cmd.show_response = True
-        cmd.ensure_finalized()
-        cmd.run()
-
-        headers = dict(self.last_open.req.headers)
-        assert int(headers['Content-length']) >= 2172
-        assert b'long description\r' in self.last_open.req.data
-
-    def test_upload_fails(self, caplog):
-        self.next_msg = "Not Found"
-        self.next_code = 404
-        with pytest.raises(DistutilsError):
-            self.test_upload(caplog)
-
-    @pytest.mark.parametrize(
-        'exception,expected,raised_exception',
-        [
-            (OSError('oserror'), 'oserror', OSError),
-            pytest.param(
-                HTTPError('url', 400, 'httperror', {}, None),
-                'Upload failed (400): httperror',
-                DistutilsError,
-                id="HTTP 400",
-            ),
-        ],
-    )
-    def test_wrong_exception_order(self, exception, expected, raised_exception, caplog):
-        tmp = self.mkdtemp()
-        path = os.path.join(tmp, 'xxx')
-        self.write_file(path)
-        dist_files = [('xxx', '2.6', path)]  # command, pyversion, filename
-        self.write_file(self.rc, PYPIRC_LONG_PASSWORD)
-
-        pkg_dir, dist = self.create_dist(dist_files=dist_files)
-
-        with mock.patch(
-            'distutils.command.upload.urlopen',
-            new=mock.Mock(side_effect=exception),
-        ):
-            with pytest.raises(raised_exception):
-                cmd = upload(dist)
-                cmd.ensure_finalized()
-                cmd.run()
-            results = caplog.messages
-            assert expected in results[-1]
-            caplog.clear()

From 962835d068af4d993f2195e7a6ffd4f70548b9f4 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 6 Sep 2024 12:20:34 -0400
Subject: [PATCH 1149/1761] Remove reliance on distutils commands for external
 dependencies check.

---
 setuptools/tests/test_virtualenv.py | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)

diff --git a/setuptools/tests/test_virtualenv.py b/setuptools/tests/test_virtualenv.py
index cdc10f5004..b02949baf9 100644
--- a/setuptools/tests/test_virtualenv.py
+++ b/setuptools/tests/test_virtualenv.py
@@ -110,5 +110,4 @@ def test_no_missing_dependencies(bare_venv, request):
     Quick and dirty test to ensure all external dependencies are vendored.
     """
     setuptools_dir = request.config.rootdir
-    for command in ('upload',):  # sorted(distutils.command.__all__):
-        bare_venv.run(['python', 'setup.py', command, '-h'], cwd=setuptools_dir)
+    bare_venv.run(['python', 'setup.py', '--help'], cwd=setuptools_dir)

From bddd2591d35cf59b2a0d54156b195c334ab7dc8a Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 6 Sep 2024 12:49:42 -0400
Subject: [PATCH 1150/1761] Remove borland compiler.

---
 distutils/bcppcompiler.py | 396 --------------------------------------
 1 file changed, 396 deletions(-)
 delete mode 100644 distutils/bcppcompiler.py

diff --git a/distutils/bcppcompiler.py b/distutils/bcppcompiler.py
deleted file mode 100644
index 9157b43328..0000000000
--- a/distutils/bcppcompiler.py
+++ /dev/null
@@ -1,396 +0,0 @@
-"""distutils.bcppcompiler
-
-Contains BorlandCCompiler, an implementation of the abstract CCompiler class
-for the Borland C++ compiler.
-"""
-
-# This implementation by Lyle Johnson, based on the original msvccompiler.py
-# module and using the directions originally published by Gordon Williams.
-
-# XXX looks like there's a LOT of overlap between these two classes:
-# someone should sit down and factor out the common code as
-# WindowsCCompiler!  --GPW
-
-import os
-import warnings
-
-from ._log import log
-from ._modified import newer
-from .ccompiler import CCompiler, gen_preprocess_options
-from .errors import (
-    CompileError,
-    DistutilsExecError,
-    LibError,
-    LinkError,
-    UnknownFileError,
-)
-from .file_util import write_file
-
-warnings.warn(
-    "bcppcompiler is deprecated and slated to be removed "
-    "in the future. Please discontinue use or file an issue "
-    "with pypa/distutils describing your use case.",
-    DeprecationWarning,
-)
-
-
-class BCPPCompiler(CCompiler):
-    """Concrete class that implements an interface to the Borland C/C++
-    compiler, as defined by the CCompiler abstract class.
-    """
-
-    compiler_type = 'bcpp'
-
-    # Just set this so CCompiler's constructor doesn't barf.  We currently
-    # don't use the 'set_executables()' bureaucracy provided by CCompiler,
-    # as it really isn't necessary for this sort of single-compiler class.
-    # Would be nice to have a consistent interface with UnixCCompiler,
-    # though, so it's worth thinking about.
-    executables = {}
-
-    # Private class data (need to distinguish C from C++ source for compiler)
-    _c_extensions = ['.c']
-    _cpp_extensions = ['.cc', '.cpp', '.cxx']
-
-    # Needed for the filename generation methods provided by the
-    # base class, CCompiler.
-    src_extensions = _c_extensions + _cpp_extensions
-    obj_extension = '.obj'
-    static_lib_extension = '.lib'
-    shared_lib_extension = '.dll'
-    static_lib_format = shared_lib_format = '%s%s'
-    exe_extension = '.exe'
-
-    def __init__(self, verbose=False, dry_run=False, force=False):
-        super().__init__(verbose, dry_run, force)
-
-        # These executables are assumed to all be in the path.
-        # Borland doesn't seem to use any special registry settings to
-        # indicate their installation locations.
-
-        self.cc = "bcc32.exe"
-        self.linker = "ilink32.exe"
-        self.lib = "tlib.exe"
-
-        self.preprocess_options = None
-        self.compile_options = ['/tWM', '/O2', '/q', '/g0']
-        self.compile_options_debug = ['/tWM', '/Od', '/q', '/g0']
-
-        self.ldflags_shared = ['/Tpd', '/Gn', '/q', '/x']
-        self.ldflags_shared_debug = ['/Tpd', '/Gn', '/q', '/x']
-        self.ldflags_static = []
-        self.ldflags_exe = ['/Gn', '/q', '/x']
-        self.ldflags_exe_debug = ['/Gn', '/q', '/x', '/r']
-
-    # -- Worker methods ------------------------------------------------
-
-    def compile(
-        self,
-        sources,
-        output_dir=None,
-        macros=None,
-        include_dirs=None,
-        debug=False,
-        extra_preargs=None,
-        extra_postargs=None,
-        depends=None,
-    ):
-        macros, objects, extra_postargs, pp_opts, build = self._setup_compile(
-            output_dir, macros, include_dirs, sources, depends, extra_postargs
-        )
-        compile_opts = extra_preargs or []
-        compile_opts.append('-c')
-        if debug:
-            compile_opts.extend(self.compile_options_debug)
-        else:
-            compile_opts.extend(self.compile_options)
-
-        for obj in objects:
-            try:
-                src, ext = build[obj]
-            except KeyError:
-                continue
-            # XXX why do the normpath here?
-            src = os.path.normpath(src)
-            obj = os.path.normpath(obj)
-            # XXX _setup_compile() did a mkpath() too but before the normpath.
-            # Is it possible to skip the normpath?
-            self.mkpath(os.path.dirname(obj))
-
-            if ext == '.res':
-                # This is already a binary file -- skip it.
-                continue  # the 'for' loop
-            if ext == '.rc':
-                # This needs to be compiled to a .res file -- do it now.
-                try:
-                    self.spawn(["brcc32", "-fo", obj, src])
-                except DistutilsExecError as msg:
-                    raise CompileError(msg)
-                continue  # the 'for' loop
-
-            # The next two are both for the real compiler.
-            if ext in self._c_extensions:
-                input_opt = ""
-            elif ext in self._cpp_extensions:
-                input_opt = "-P"
-            else:
-                # Unknown file type -- no extra options.  The compiler
-                # will probably fail, but let it just in case this is a
-                # file the compiler recognizes even if we don't.
-                input_opt = ""
-
-            output_opt = "-o" + obj
-
-            # Compiler command line syntax is: "bcc32 [options] file(s)".
-            # Note that the source file names must appear at the end of
-            # the command line.
-            try:
-                self.spawn(
-                    [self.cc]
-                    + compile_opts
-                    + pp_opts
-                    + [input_opt, output_opt]
-                    + extra_postargs
-                    + [src]
-                )
-            except DistutilsExecError as msg:
-                raise CompileError(msg)
-
-        return objects
-
-    # compile ()
-
-    def create_static_lib(
-        self, objects, output_libname, output_dir=None, debug=False, target_lang=None
-    ):
-        (objects, output_dir) = self._fix_object_args(objects, output_dir)
-        output_filename = self.library_filename(output_libname, output_dir=output_dir)
-
-        if self._need_link(objects, output_filename):
-            lib_args = [output_filename, '/u'] + objects
-            if debug:
-                pass  # XXX what goes here?
-            try:
-                self.spawn([self.lib] + lib_args)
-            except DistutilsExecError as msg:
-                raise LibError(msg)
-        else:
-            log.debug("skipping %s (up-to-date)", output_filename)
-
-    # create_static_lib ()
-
-    def link(  # noqa: C901
-        self,
-        target_desc,
-        objects,
-        output_filename,
-        output_dir=None,
-        libraries=None,
-        library_dirs=None,
-        runtime_library_dirs=None,
-        export_symbols=None,
-        debug=False,
-        extra_preargs=None,
-        extra_postargs=None,
-        build_temp=None,
-        target_lang=None,
-    ):
-        # XXX this ignores 'build_temp'!  should follow the lead of
-        # msvccompiler.py
-
-        (objects, output_dir) = self._fix_object_args(objects, output_dir)
-        (libraries, library_dirs, runtime_library_dirs) = self._fix_lib_args(
-            libraries, library_dirs, runtime_library_dirs
-        )
-
-        if runtime_library_dirs:
-            log.warning(
-                "I don't know what to do with 'runtime_library_dirs': %s",
-                str(runtime_library_dirs),
-            )
-
-        if output_dir is not None:
-            output_filename = os.path.join(output_dir, output_filename)
-
-        if self._need_link(objects, output_filename):
-            # Figure out linker args based on type of target.
-            if target_desc == CCompiler.EXECUTABLE:
-                startup_obj = 'c0w32'
-                if debug:
-                    ld_args = self.ldflags_exe_debug[:]
-                else:
-                    ld_args = self.ldflags_exe[:]
-            else:
-                startup_obj = 'c0d32'
-                if debug:
-                    ld_args = self.ldflags_shared_debug[:]
-                else:
-                    ld_args = self.ldflags_shared[:]
-
-            # Create a temporary exports file for use by the linker
-            if export_symbols is None:
-                def_file = ''
-            else:
-                head, tail = os.path.split(output_filename)
-                modname, ext = os.path.splitext(tail)
-                temp_dir = os.path.dirname(objects[0])  # preserve tree structure
-                def_file = os.path.join(temp_dir, f'{modname}.def')
-                contents = ['EXPORTS']
-                contents.extend(f'  {sym}=_{sym}' for sym in export_symbols)
-                self.execute(write_file, (def_file, contents), f"writing {def_file}")
-
-            # Borland C++ has problems with '/' in paths
-            objects2 = map(os.path.normpath, objects)
-            # split objects in .obj and .res files
-            # Borland C++ needs them at different positions in the command line
-            objects = [startup_obj]
-            resources = []
-            for file in objects2:
-                (base, ext) = os.path.splitext(os.path.normcase(file))
-                if ext == '.res':
-                    resources.append(file)
-                else:
-                    objects.append(file)
-
-            for ell in library_dirs:
-                ld_args.append(f"/L{os.path.normpath(ell)}")
-            ld_args.append("/L.")  # we sometimes use relative paths
-
-            # list of object files
-            ld_args.extend(objects)
-
-            # XXX the command-line syntax for Borland C++ is a bit wonky;
-            # certain filenames are jammed together in one big string, but
-            # comma-delimited.  This doesn't mesh too well with the
-            # Unix-centric attitude (with a DOS/Windows quoting hack) of
-            # 'spawn()', so constructing the argument list is a bit
-            # awkward.  Note that doing the obvious thing and jamming all
-            # the filenames and commas into one argument would be wrong,
-            # because 'spawn()' would quote any filenames with spaces in
-            # them.  Arghghh!.  Apparently it works fine as coded...
-
-            # name of dll/exe file
-            ld_args.extend([',', output_filename])
-            # no map file and start libraries
-            ld_args.append(',,')
-
-            for lib in libraries:
-                # see if we find it and if there is a bcpp specific lib
-                # (xxx_bcpp.lib)
-                libfile = self.find_library_file(library_dirs, lib, debug)
-                if libfile is None:
-                    ld_args.append(lib)
-                    # probably a BCPP internal library -- don't warn
-                else:
-                    # full name which prefers bcpp_xxx.lib over xxx.lib
-                    ld_args.append(libfile)
-
-            # some default libraries
-            ld_args.extend(('import32', 'cw32mt'))
-
-            # def file for export symbols
-            ld_args.extend([',', def_file])
-            # add resource files
-            ld_args.append(',')
-            ld_args.extend(resources)
-
-            if extra_preargs:
-                ld_args[:0] = extra_preargs
-            if extra_postargs:
-                ld_args.extend(extra_postargs)
-
-            self.mkpath(os.path.dirname(output_filename))
-            try:
-                self.spawn([self.linker] + ld_args)
-            except DistutilsExecError as msg:
-                raise LinkError(msg)
-
-        else:
-            log.debug("skipping %s (up-to-date)", output_filename)
-
-    # link ()
-
-    # -- Miscellaneous methods -----------------------------------------
-
-    def find_library_file(self, dirs, lib, debug=False):
-        # List of effective library names to try, in order of preference:
-        # xxx_bcpp.lib is better than xxx.lib
-        # and xxx_d.lib is better than xxx.lib if debug is set
-        #
-        # The "_bcpp" suffix is to handle a Python installation for people
-        # with multiple compilers (primarily Distutils hackers, I suspect
-        # ;-).  The idea is they'd have one static library for each
-        # compiler they care about, since (almost?) every Windows compiler
-        # seems to have a different format for static libraries.
-        if debug:
-            dlib = lib + "_d"
-            try_names = (dlib + "_bcpp", lib + "_bcpp", dlib, lib)
-        else:
-            try_names = (lib + "_bcpp", lib)
-
-        for dir in dirs:
-            for name in try_names:
-                libfile = os.path.join(dir, self.library_filename(name))
-                if os.path.exists(libfile):
-                    return libfile
-        else:
-            # Oops, didn't find it in *any* of 'dirs'
-            return None
-
-    # overwrite the one from CCompiler to support rc and res-files
-    def object_filenames(self, source_filenames, strip_dir=False, output_dir=''):
-        if output_dir is None:
-            output_dir = ''
-        obj_names = []
-        for src_name in source_filenames:
-            # use normcase to make sure '.rc' is really '.rc' and not '.RC'
-            (base, ext) = os.path.splitext(os.path.normcase(src_name))
-            if ext not in (self.src_extensions + ['.rc', '.res']):
-                raise UnknownFileError(f"unknown file type '{ext}' (from '{src_name}')")
-            if strip_dir:
-                base = os.path.basename(base)
-            if ext == '.res':
-                # these can go unchanged
-                obj_names.append(os.path.join(output_dir, base + ext))
-            elif ext == '.rc':
-                # these need to be compiled to .res-files
-                obj_names.append(os.path.join(output_dir, base + '.res'))
-            else:
-                obj_names.append(os.path.join(output_dir, base + self.obj_extension))
-        return obj_names
-
-    # object_filenames ()
-
-    def preprocess(
-        self,
-        source,
-        output_file=None,
-        macros=None,
-        include_dirs=None,
-        extra_preargs=None,
-        extra_postargs=None,
-    ):
-        (_, macros, include_dirs) = self._fix_compile_args(None, macros, include_dirs)
-        pp_opts = gen_preprocess_options(macros, include_dirs)
-        pp_args = ['cpp32.exe'] + pp_opts
-        if output_file is not None:
-            pp_args.append('-o' + output_file)
-        if extra_preargs:
-            pp_args[:0] = extra_preargs
-        if extra_postargs:
-            pp_args.extend(extra_postargs)
-        pp_args.append(source)
-
-        # We need to preprocess: either we're being forced to, or the
-        # source file is newer than the target (or the target doesn't
-        # exist).
-        if self.force or output_file is None or newer(source, output_file):
-            if output_file:
-                self.mkpath(os.path.dirname(output_file))
-            try:
-                self.spawn(pp_args)
-            except DistutilsExecError as msg:
-                print(msg)
-                raise CompileError(msg)
-
-    # preprocess()

From 4ecf617a4b4482186570b295acd0db593fa24e10 Mon Sep 17 00:00:00 2001
From: Naveen M K 
Date: Thu, 11 Jul 2024 20:43:06 +0530
Subject: [PATCH 1151/1761] cygwinccompiler: Get the compilers from sysconfig

On the CLANG64 environment of MSYS2, we should use clang
instead of gcc. This patch gets the compiler from sysconfig which
would be set when Python was built.

Without this patch, the build fails when it's trying to check if the
compiler is cygwin's one.
---
 distutils/cygwinccompiler.py | 9 +++++++--
 1 file changed, 7 insertions(+), 2 deletions(-)

diff --git a/distutils/cygwinccompiler.py b/distutils/cygwinccompiler.py
index 18b1b3557b..3c67524e6d 100644
--- a/distutils/cygwinccompiler.py
+++ b/distutils/cygwinccompiler.py
@@ -21,6 +21,7 @@
     DistutilsPlatformError,
 )
 from .file_util import write_file
+from .sysconfig import get_config_vars
 from .unixccompiler import UnixCCompiler
 from .version import LooseVersion, suppress_known_deprecation
 
@@ -61,8 +62,12 @@ def __init__(self, verbose=False, dry_run=False, force=False):
                 "Compiling may fail because of undefined preprocessor macros."
             )
 
-        self.cc = os.environ.get('CC', 'gcc')
-        self.cxx = os.environ.get('CXX', 'g++')
+        self.cc, self.cxx = get_config_vars('CC', 'CXX')
+
+        # Override 'CC' and 'CXX' environment variables for
+        # building using MINGW compiler for MSVC python.
+        self.cc = os.environ.get('CC', self.cc or 'gcc')
+        self.cxx = os.environ.get('CXX', self.cxx or 'g++')
 
         self.linker_dll = self.cc
         self.linker_dll_cxx = self.cxx

From a675458e1a7d6ae81d0d441338a74dc98ffc5a61 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 7 Sep 2024 10:16:01 -0400
Subject: [PATCH 1152/1761] Allow the workflow to be triggered manually.

---
 .github/workflows/main.yml | 1 +
 1 file changed, 1 insertion(+)

diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index ac0ff69e22..441b93efab 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -10,6 +10,7 @@ on:
     # required if branches-ignore is supplied (jaraco/skeleton#103)
     - '**'
   pull_request:
+  workflow_dispatch:
 
 permissions:
   contents: read

From 1aaddc1e9ada945cdbe6677fcf9944242b08fca2 Mon Sep 17 00:00:00 2001
From: "Haoyu (Daniel)" 
Date: Tue, 10 Sep 2024 17:32:56 +0800
Subject: [PATCH 1153/1761] make setuptools inline literal

---
 docs/userguide/datafiles.rst | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/docs/userguide/datafiles.rst b/docs/userguide/datafiles.rst
index 4eca7e4303..48356563ba 100644
--- a/docs/userguide/datafiles.rst
+++ b/docs/userguide/datafiles.rst
@@ -8,7 +8,7 @@ are placed in a platform-specific location.  However, the most common use case
 for data files distributed with a package is for use *by* the package, usually
 by including the data files **inside the package directory**.
 
-Setuptools focuses on this most common type of data files and offers three ways
+``Setuptools`` focuses on this most common type of data files and offers three ways
 of specifying which files should be included in your packages, as described in
 the following section.
 
@@ -172,7 +172,7 @@ file, nor require to be added by a revision control system plugin.
 
 .. note::
         If your glob patterns use paths, you *must* use a forward slash (``/``) as
-        the path separator, even if you are on Windows.  Setuptools automatically
+        the path separator, even if you are on Windows. ``Setuptools`` automatically
         converts slashes to appropriate platform-specific separators at build time.
 
 .. important::
@@ -450,7 +450,7 @@ With :ref:`package-data`, the configuration might look like this:
             }
         )
 
-In other words, we allow Setuptools to scan for namespace packages in the ``src`` directory,
+In other words, we allow ``Setuptools`` to scan for namespace packages in the ``src`` directory,
 which enables the ``data`` directory to be identified, and then, we separately specify data
 files for the root package ``mypkg``, and the namespace package ``data`` under the package
 ``mypkg``.

From 70b5ec518dddef4a017972fa288b82fb30235095 Mon Sep 17 00:00:00 2001
From: "Haoyu (Daniel)" 
Date: Tue, 10 Sep 2024 18:50:22 +0800
Subject: [PATCH 1154/1761] add clarify of data files

---
 docs/userguide/datafiles.rst | 11 ++++++-----
 1 file changed, 6 insertions(+), 5 deletions(-)

diff --git a/docs/userguide/datafiles.rst b/docs/userguide/datafiles.rst
index 48356563ba..000b0324ee 100644
--- a/docs/userguide/datafiles.rst
+++ b/docs/userguide/datafiles.rst
@@ -2,11 +2,12 @@
 Data Files Support
 ====================
 
-Old packaging installation methods in the Python ecosystem
-have traditionally allowed installation of "data files", which
-are placed in a platform-specific location.  However, the most common use case
-for data files distributed with a package is for use *by* the package, usually
-by including the data files **inside the package directory**.
+Old packaging installation methods in the Python ecosystem have
+traditionally allowed the inclusion of "data files" (i.e. files beyond
+:ref:`the default set ` ), which are placed in a platform-specific
+location. However, the most common use case for data files distributed
+with a package is for use *by* the package, usually by including the
+data files **inside the package directory**.
 
 ``Setuptools`` focuses on this most common type of data files and offers three ways
 of specifying which files should be included in your packages, as described in

From 0a5667ea33c8182f4582b5015c90ee61665f7bfc Mon Sep 17 00:00:00 2001
From: "Haoyu (Daniel)" 
Date: Tue, 10 Sep 2024 19:10:02 +0800
Subject: [PATCH 1155/1761] add index to second level titles

---
 docs/userguide/datafiles.rst | 14 +++++++-------
 1 file changed, 7 insertions(+), 7 deletions(-)

diff --git a/docs/userguide/datafiles.rst b/docs/userguide/datafiles.rst
index 000b0324ee..5c9800b42e 100644
--- a/docs/userguide/datafiles.rst
+++ b/docs/userguide/datafiles.rst
@@ -3,7 +3,7 @@ Data Files Support
 ====================
 
 Old packaging installation methods in the Python ecosystem have
-traditionally allowed the inclusion of "data files" (i.e. files beyond
+traditionally allowed the inclusion of "data files" (files beyond
 :ref:`the default set ` ), which are placed in a platform-specific
 location. However, the most common use case for data files distributed
 with a package is for use *by* the package, usually by including the
@@ -20,8 +20,8 @@ Configuration Options
 
 .. _include-package-data:
 
-include_package_data
---------------------
+1. ``include_package_data``
+---------------------------
 
 First, you can use the ``include_package_data`` keyword.
 For example, if the package tree looks like this::
@@ -102,8 +102,8 @@ your package, provided:
 
 .. _package-data:
 
-package_data
-------------
+2. ``package_data``
+-------------------
 
 By default, ``include_package_data`` considers **all** non ``.py`` files found inside
 the package directory (``src/mypkg`` in this case) as data files, and includes those that
@@ -272,8 +272,8 @@ we specify that ``data1.rst`` from ``mypkg1`` alone should be captured as well.
 
 .. _exclude-package-data:
 
-exclude_package_data
---------------------
+3. ``exclude_package_data``
+---------------------------
 
 Sometimes, the ``include_package_data`` or ``package_data`` options alone
 aren't sufficient to precisely define what files you want included. For example,

From a377af03cf1b32e23e3a4b2120b35a1884b13a27 Mon Sep 17 00:00:00 2001
From: "Haoyu (Daniel)" 
Date: Tue, 10 Sep 2024 19:16:05 +0800
Subject: [PATCH 1156/1761] fix case sensitivity in toml

---
 docs/userguide/datafiles.rst | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/docs/userguide/datafiles.rst b/docs/userguide/datafiles.rst
index 5c9800b42e..7f8476c1d2 100644
--- a/docs/userguide/datafiles.rst
+++ b/docs/userguide/datafiles.rst
@@ -94,7 +94,7 @@ your package, provided:
 
 .. note::
    .. versionadded:: v61.0.0
-      The default value for ``tool.setuptools.include-package-data`` is ``True``
+      The default value for ``tool.setuptools.include-package-data`` is ``true``
       when projects are configured via ``pyproject.toml``.
       This behaviour differs from ``setup.cfg`` and ``setup.py``
       (where ``include_package_data=False`` by default), which was not changed

From 1a4a1297d445eba3c0517b7d1523e0ba38696d4e Mon Sep 17 00:00:00 2001
From: "Haoyu (Daniel)" 
Date: Tue, 10 Sep 2024 19:31:41 +0800
Subject: [PATCH 1157/1761] adjust section positioning

---
 docs/userguide/datafiles.rst | 42 ++++++++++++++++++++----------------
 1 file changed, 23 insertions(+), 19 deletions(-)

diff --git a/docs/userguide/datafiles.rst b/docs/userguide/datafiles.rst
index 7f8476c1d2..aa329d53e8 100644
--- a/docs/userguide/datafiles.rst
+++ b/docs/userguide/datafiles.rst
@@ -24,6 +24,7 @@ Configuration Options
 ---------------------------
 
 First, you can use the ``include_package_data`` keyword.
+
 For example, if the package tree looks like this::
 
     project_root_directory
@@ -36,7 +37,25 @@ For example, if the package tree looks like this::
             ├── data1.txt
             └── data2.txt
 
-and you supply this configuration:
+When at least one of the following conditions are met:
+
+1. These files are included via the :ref:`MANIFEST.in ` file,
+   like so::
+
+        include src/mypkg/*.txt
+        include src/mypkg/*.rst
+
+2. They are being tracked by a revision control system such as Git, Mercurial
+   or SVN, **AND** you have configured an appropriate plugin such as
+   :pypi:`setuptools-scm` or :pypi:`setuptools-svn`.
+   (See the section below on :ref:`Adding Support for Revision
+   Control Systems` for information on how to configure such plugins.)
+
+then all the ``.txt`` and ``.rst`` files will be included into
+the source distribution.
+
+To further include them into the ``wheels``, you can need to use the
+``include_package_data`` keyword:
 
 .. tab:: pyproject.toml
 
@@ -44,8 +63,8 @@ and you supply this configuration:
 
         [tool.setuptools]
         # ...
-        # By default, include-package-data is true in pyproject.toml, so you do
-        # NOT have to specify this line.
+        # By default, include-package-data is true in pyproject.toml,
+        # so you do NOT have to specify this line.
         include-package-data = true
 
         [tool.setuptools.packages.find]
@@ -77,27 +96,12 @@ and you supply this configuration:
         include_package_data=True
     )
 
-then all the ``.txt`` and ``.rst`` files will be automatically installed with
-your package, provided:
-
-1. These files are included via the :ref:`MANIFEST.in ` file,
-   like so::
-
-        include src/mypkg/*.txt
-        include src/mypkg/*.rst
-
-2. OR, they are being tracked by a revision control system such as Git, Mercurial
-   or SVN, and you have configured an appropriate plugin such as
-   :pypi:`setuptools-scm` or :pypi:`setuptools-svn`.
-   (See the section below on :ref:`Adding Support for Revision
-   Control Systems` for information on how to write such plugins.)
-
 .. note::
    .. versionadded:: v61.0.0
       The default value for ``tool.setuptools.include-package-data`` is ``true``
       when projects are configured via ``pyproject.toml``.
       This behaviour differs from ``setup.cfg`` and ``setup.py``
-      (where ``include_package_data=False`` by default), which was not changed
+      (where ``include_package_data`` is ``False`` by default), which was not changed
       to ensure backwards compatibility with existing projects.
 
 .. _package-data:

From d1e4b118269bfd78e6866de81fe7bc73905d3dd6 Mon Sep 17 00:00:00 2001
From: "Haoyu (Daniel)" 
Date: Tue, 10 Sep 2024 19:43:27 +0800
Subject: [PATCH 1158/1761] fix typo

---
 docs/userguide/datafiles.rst | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/docs/userguide/datafiles.rst b/docs/userguide/datafiles.rst
index aa329d53e8..99f2e123fa 100644
--- a/docs/userguide/datafiles.rst
+++ b/docs/userguide/datafiles.rst
@@ -37,7 +37,7 @@ For example, if the package tree looks like this::
             ├── data1.txt
             └── data2.txt
 
-When at least one of the following conditions are met:
+When **at least one** of the following conditions are met:
 
 1. These files are included via the :ref:`MANIFEST.in ` file,
    like so::
@@ -54,7 +54,7 @@ When at least one of the following conditions are met:
 then all the ``.txt`` and ``.rst`` files will be included into
 the source distribution.
 
-To further include them into the ``wheels``, you can need to use the
+To further include them into the ``wheels``, you can use the
 ``include_package_data`` keyword:
 
 .. tab:: pyproject.toml

From a079e8b143f2a687851925b23e4e04af543a8c4e Mon Sep 17 00:00:00 2001
From: "Haoyu (Daniel)" 
Date: Tue, 10 Sep 2024 19:44:08 +0800
Subject: [PATCH 1159/1761] revision control system -> version control system

---
 docs/userguide/datafiles.rst | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/docs/userguide/datafiles.rst b/docs/userguide/datafiles.rst
index 99f2e123fa..5ee4bde136 100644
--- a/docs/userguide/datafiles.rst
+++ b/docs/userguide/datafiles.rst
@@ -45,7 +45,7 @@ When **at least one** of the following conditions are met:
         include src/mypkg/*.txt
         include src/mypkg/*.rst
 
-2. They are being tracked by a revision control system such as Git, Mercurial
+2. They are being tracked by a version control system such as Git, Mercurial
    or SVN, **AND** you have configured an appropriate plugin such as
    :pypi:`setuptools-scm` or :pypi:`setuptools-svn`.
    (See the section below on :ref:`Adding Support for Revision

From 8513d29bc8e291948164736b9ee4381d76628e32 Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Tue, 10 Sep 2024 14:57:04 +0200
Subject: [PATCH 1160/1761] Fix a couple typos found by codespell

---
 docs/development/developer-guide.rst | 2 +-
 mypy.ini                             | 2 +-
 setuptools/command/easy_install.py   | 4 ++--
 3 files changed, 4 insertions(+), 4 deletions(-)

diff --git a/docs/development/developer-guide.rst b/docs/development/developer-guide.rst
index a9b3ad3bab..ae1b58f2f5 100644
--- a/docs/development/developer-guide.rst
+++ b/docs/development/developer-guide.rst
@@ -138,7 +138,7 @@ Code conventions and other practices
 ------------------------------------
 
 Setuptools utilizes the `skeleton `_
-framework as a foundation for sharing re-usable maintenance tasks
+framework as a foundation for sharing reusable maintenance tasks
 across different projects in the ecosystem.
 
 This also means that the project adheres to the same coding conventions
diff --git a/mypy.ini b/mypy.ini
index 6891d0d2d0..7de7e5a508 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -48,7 +48,7 @@ disable_error_code = import-not-found
 #  - jaraco.path: https://github.com/jaraco/jaraco.path/issues/2
 #  - jaraco.test: https://github.com/jaraco/jaraco.test/issues/7
 #  - jaraco.text: https://github.com/jaraco/jaraco.text/issues/17
-#  - wheel: does not intend on exposing a programatic API https://github.com/pypa/wheel/pull/610#issuecomment-2081687671
+#  - wheel: does not intend on exposing a programmatic API https://github.com/pypa/wheel/pull/610#issuecomment-2081687671
 [mypy-distutils.*,jaraco.develop,jaraco.envs,jaraco.packaging.*,jaraco.path,jaraco.test.*,jaraco.text,wheel.*]
 ignore_missing_imports = True
 
diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py
index 3f7fc17a88..25a9eee937 100644
--- a/setuptools/command/easy_install.py
+++ b/setuptools/command/easy_install.py
@@ -2022,11 +2022,11 @@ def is_python_script(script_text, filename):
 
 try:
     from os import (
-        chmod as _chmod,  # pyright: ignore[reportAssignmentType] # Loosing type-safety w/ pyright, but that's ok
+        chmod as _chmod,  # pyright: ignore[reportAssignmentType] # Losing type-safety w/ pyright, but that's ok
     )
 except ImportError:
     # Jython compatibility
-    def _chmod(*args: object, **kwargs: object) -> None:  # type: ignore[misc] # Mypy re-uses the imported definition anyway
+    def _chmod(*args: object, **kwargs: object) -> None:  # type: ignore[misc] # Mypy reuses the imported definition anyway
         pass
 
 

From 46fd8db91815234da4f6a90a648f8607f4f08e04 Mon Sep 17 00:00:00 2001
From: "Haoyu (Daniel)" 
Date: Wed, 11 Sep 2024 09:51:14 +0800
Subject: [PATCH 1161/1761] revert version control -> revision control

---
 docs/userguide/datafiles.rst | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/docs/userguide/datafiles.rst b/docs/userguide/datafiles.rst
index 5ee4bde136..99f2e123fa 100644
--- a/docs/userguide/datafiles.rst
+++ b/docs/userguide/datafiles.rst
@@ -45,7 +45,7 @@ When **at least one** of the following conditions are met:
         include src/mypkg/*.txt
         include src/mypkg/*.rst
 
-2. They are being tracked by a version control system such as Git, Mercurial
+2. They are being tracked by a revision control system such as Git, Mercurial
    or SVN, **AND** you have configured an appropriate plugin such as
    :pypi:`setuptools-scm` or :pypi:`setuptools-svn`.
    (See the section below on :ref:`Adding Support for Revision

From 3a8430008f102c9e7e781ec908ccbbf202fcd748 Mon Sep 17 00:00:00 2001
From: "Haoyu (Daniel)" 
Date: Wed, 11 Sep 2024 09:54:18 +0800
Subject: [PATCH 1162/1761] clarify usage of setuptools vs Setuptools

---
 docs/userguide/datafiles.rst | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/docs/userguide/datafiles.rst b/docs/userguide/datafiles.rst
index 99f2e123fa..8e2ea0d6d1 100644
--- a/docs/userguide/datafiles.rst
+++ b/docs/userguide/datafiles.rst
@@ -9,7 +9,7 @@ location. However, the most common use case for data files distributed
 with a package is for use *by* the package, usually by including the
 data files **inside the package directory**.
 
-``Setuptools`` focuses on this most common type of data files and offers three ways
+Setuptools focuses on this most common type of data files and offers three ways
 of specifying which files should be included in your packages, as described in
 the following section.
 
@@ -177,7 +177,7 @@ file, nor require to be added by a revision control system plugin.
 
 .. note::
         If your glob patterns use paths, you *must* use a forward slash (``/``) as
-        the path separator, even if you are on Windows. ``Setuptools`` automatically
+        the path separator, even if you are on Windows. ``setuptools`` automatically
         converts slashes to appropriate platform-specific separators at build time.
 
 .. important::
@@ -455,7 +455,7 @@ With :ref:`package-data`, the configuration might look like this:
             }
         )
 
-In other words, we allow ``Setuptools`` to scan for namespace packages in the ``src`` directory,
+In other words, we allow ``setuptools`` to scan for namespace packages in the ``src`` directory,
 which enables the ``data`` directory to be identified, and then, we separately specify data
 files for the root package ``mypkg``, and the namespace package ``data`` under the package
 ``mypkg``.

From 0a7fb2cde8f586b54b7da49cf3f093165232cd36 Mon Sep 17 00:00:00 2001
From: DWesl <22566757+DWesl@users.noreply.github.com>
Date: Wed, 11 Sep 2024 10:31:38 -0400
Subject: [PATCH 1163/1761] TST: Remove the part of the new test checking old
 behavior.

Not the best solution, but I don't know how to check rpath in a manner that doesn't crash.
---
 distutils/tests/test_build_ext.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/distutils/tests/test_build_ext.py b/distutils/tests/test_build_ext.py
index d7296f2c18..27ccc7ab1e 100644
--- a/distutils/tests/test_build_ext.py
+++ b/distutils/tests/test_build_ext.py
@@ -96,7 +96,7 @@ class TestBuildExt(TempdirManager):
     def build_ext(self, *args, **kwargs):
         return build_ext(*args, **kwargs)
 
-    @pytest.mark.parametrize("copy_so", [False, True])
+    @pytest.mark.parametrize("copy_so", [False])
     def test_build_ext(self, copy_so):
         missing_compiler_executable()
         copy_xxmodule_c(self.tmp_dir)

From 32138db92eed5f6ce6180acabdc605db547acd24 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Mon, 12 Aug 2024 13:50:04 -0400
Subject: [PATCH 1164/1761] Update platformdirs to >= 4.2.2

---
 newsfragments/4560.misc.rst | 1 +
 pyproject.toml              | 2 +-
 2 files changed, 2 insertions(+), 1 deletion(-)
 create mode 100644 newsfragments/4560.misc.rst

diff --git a/newsfragments/4560.misc.rst b/newsfragments/4560.misc.rst
new file mode 100644
index 0000000000..0878f09abd
--- /dev/null
+++ b/newsfragments/4560.misc.rst
@@ -0,0 +1 @@
+Bumped declared ``platformdirs`` dependency to ``>= 4.2.2`` to help platforms lacking `ctypes` support install setuptools seamlessly -- by :user:`Avasam`
diff --git a/pyproject.toml b/pyproject.toml
index f623e16445..79dc7e8466 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -99,7 +99,7 @@ core = [
 	"wheel>=0.43.0",
 
 	# pkg_resources
-	"platformdirs >= 2.6.2",
+	"platformdirs >= 4.2.2", # Made ctypes optional (see #4461)
 
 	# for distutils
 	"jaraco.collections",

From c009de87ec226f9aee9fd1a1f9100874404d3871 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Wed, 11 Sep 2024 17:05:14 -0400
Subject: [PATCH 1165/1761] =?UTF-8?q?=F0=9F=91=B9=20Feed=20the=20hobgoblin?=
 =?UTF-8?q?s=20(delint).?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 distutils/compat/py38.py          | 1 +
 distutils/tests/test_build_ext.py | 8 ++++++--
 2 files changed, 7 insertions(+), 2 deletions(-)

diff --git a/distutils/compat/py38.py b/distutils/compat/py38.py
index 2d44211147..03ec73ef0e 100644
--- a/distutils/compat/py38.py
+++ b/distutils/compat/py38.py
@@ -14,6 +14,7 @@ def removeprefix(self, prefix):
             return self[len(prefix) :]
         else:
             return self[:]
+
 else:
 
     def removesuffix(self, suffix):
diff --git a/distutils/tests/test_build_ext.py b/distutils/tests/test_build_ext.py
index 27ccc7ab1e..f88d216cb2 100644
--- a/distutils/tests/test_build_ext.py
+++ b/distutils/tests/test_build_ext.py
@@ -112,7 +112,9 @@ def test_build_ext(self, copy_so):
                     runtime_library_dirs=['/usr/lib'],
                 )
             elif sys.platform == 'linux':
-                libz_so = {os.path.realpath(name) for name in glob.iglob('/usr/lib*/libz.so*')}
+                libz_so = {
+                    os.path.realpath(name) for name in glob.iglob('/usr/lib*/libz.so*')
+                }
                 libz_so = sorted(libz_so, key=lambda lib_path: len(lib_path))
                 shutil.copyfile(libz_so[-1], '/tmp/libxx_z.so')
 
@@ -167,10 +169,12 @@ def _test_xx(copy_so):
                 ["readelf", "-d", xx.__file__], universal_newlines=True
             )
             import pprint
+
             pprint.pprint(so_headers)
             rpaths = [
                 rpath
-                for line in so_headers.split("\n") if "RPATH" in line or "RUNPATH" in line
+                for line in so_headers.split("\n")
+                if "RPATH" in line or "RUNPATH" in line
                 for rpath in line.split()[2][1:-1].split(":")
             ]
             if not copy_so:

From 81b766c06cc83679c4a04c2bfa6d2c8cc559bf33 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Wed, 11 Sep 2024 18:14:38 -0400
Subject: [PATCH 1166/1761] Fix an incompatibility (and source of merge
 conflicts) with projects using Ruff/isort.

Remove extra line after imports in conf.py (jaraco/skeleton#147)
---
 docs/conf.py | 1 -
 1 file changed, 1 deletion(-)

diff --git a/docs/conf.py b/docs/conf.py
index d5745d6298..240329c360 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -1,6 +1,5 @@
 from __future__ import annotations
 
-
 extensions = [
     'sphinx.ext.autodoc',
     'jaraco.packaging.sphinx',

From 3fe8c5ba792fd58a5a24eef4e8a845f3b5dd6c2c Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Wed, 11 Sep 2024 18:14:58 -0400
Subject: [PATCH 1167/1761] Add Python 3.13 and 3.14 into the matrix.
 (jaraco/skeleton#146)

---
 .github/workflows/main.yml | 8 ++++++--
 1 file changed, 6 insertions(+), 2 deletions(-)

diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index 441b93efab..251b9c1d82 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -36,7 +36,7 @@ jobs:
       matrix:
         python:
         - "3.8"
-        - "3.12"
+        - "3.13"
         platform:
         - ubuntu-latest
         - macos-latest
@@ -48,10 +48,14 @@ jobs:
           platform: ubuntu-latest
         - python: "3.11"
           platform: ubuntu-latest
+        - python: "3.12"
+          platform: ubuntu-latest
+        - python: "3.14"
+          platform: ubuntu-latest
         - python: pypy3.10
           platform: ubuntu-latest
     runs-on: ${{ matrix.platform }}
-    continue-on-error: ${{ matrix.python == '3.13' }}
+    continue-on-error: ${{ matrix.python == '3.14' }}
     steps:
       - uses: actions/checkout@v4
       - name: Setup Python

From a0339c1ef84032b47997fdcd503332a52ffd88bf Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Sat, 14 Sep 2024 19:39:30 -0400
Subject: [PATCH 1168/1761] Fix cross-platform compilation using
 `distutils._msvccompiler.MSVCCompiler` Actually use the `plat_name` param in
 `MSVCCompiler.initialize` Added tests

---
 distutils/_msvccompiler.py           |  2 +-
 distutils/tests/test_msvccompiler.py | 24 ++++++++++++++++++++++++
 newsfragments/298.bugfix.rst         |  1 +
 3 files changed, 26 insertions(+), 1 deletion(-)
 create mode 100644 newsfragments/298.bugfix.rst

diff --git a/distutils/_msvccompiler.py b/distutils/_msvccompiler.py
index e7652218d8..97b067c686 100644
--- a/distutils/_msvccompiler.py
+++ b/distutils/_msvccompiler.py
@@ -284,7 +284,7 @@ def initialize(self, plat_name=None):
                 f"--plat-name must be one of {tuple(_vcvars_names)}"
             )
 
-        plat_spec = _get_vcvars_spec(get_host_platform(), get_platform())
+        plat_spec = _get_vcvars_spec(get_host_platform(), plat_name)
 
         vc_env = _get_vc_env(plat_spec)
         if not vc_env:
diff --git a/distutils/tests/test_msvccompiler.py b/distutils/tests/test_msvccompiler.py
index 71129cae27..028dcdf5cc 100644
--- a/distutils/tests/test_msvccompiler.py
+++ b/distutils/tests/test_msvccompiler.py
@@ -2,11 +2,13 @@
 
 import os
 import sys
+import sysconfig
 import threading
 import unittest.mock as mock
 from distutils import _msvccompiler
 from distutils.errors import DistutilsPlatformError
 from distutils.tests import support
+from distutils.util import get_platform
 
 import pytest
 
@@ -28,6 +30,28 @@ def _find_vcvarsall(plat_spec):
                 'wont find this version',
             )
 
+    @pytest.mark.skipif(
+        not sysconfig.get_platform().startswith("win"),
+        reason="Only run test for non-mingw Windows platforms",
+    )
+    @pytest.mark.parametrize(
+        "plat_name, expected",
+        [
+            ("win-arm64", "win-arm64"),
+            ("win-amd64", "win-amd64"),
+            (None, get_platform()),
+        ],
+    )
+    def test_cross_platform_compilation_paths(self, monkeypatch, plat_name, expected):
+        compiler = _msvccompiler.MSVCCompiler()
+
+        # makes sure that the right target platform name is used
+        def _get_vcvars_spec(host_platform, platform):
+            assert platform == expected
+
+        monkeypatch.setattr(_msvccompiler, '_get_vcvars_spec', _get_vcvars_spec)
+        compiler.initialize(plat_name)
+
     @needs_winreg
     def test_get_vc_env_unicode(self):
         test_var = 'ṰḖṤṪ┅ṼẨṜ'
diff --git a/newsfragments/298.bugfix.rst b/newsfragments/298.bugfix.rst
new file mode 100644
index 0000000000..eeb10466bf
--- /dev/null
+++ b/newsfragments/298.bugfix.rst
@@ -0,0 +1 @@
+Fix cross-platform compilation using `distutils._msvccompiler.MSVCCompiler` -- by :user:`saschanaz` and :user:`Avasam` 

From d38d1a3e241f3c9a07dad4d1b70f5e1350b7b64e Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sun, 15 Sep 2024 10:26:46 -0400
Subject: [PATCH 1169/1761] Move the comment to the docstring.

---
 distutils/tests/test_msvccompiler.py | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)

diff --git a/distutils/tests/test_msvccompiler.py b/distutils/tests/test_msvccompiler.py
index 028dcdf5cc..ceb15d3a63 100644
--- a/distutils/tests/test_msvccompiler.py
+++ b/distutils/tests/test_msvccompiler.py
@@ -43,9 +43,11 @@ def _find_vcvarsall(plat_spec):
         ],
     )
     def test_cross_platform_compilation_paths(self, monkeypatch, plat_name, expected):
+        """
+        Ensure a specified target platform is passed to _get_vcvars_spec.
+        """
         compiler = _msvccompiler.MSVCCompiler()
 
-        # makes sure that the right target platform name is used
         def _get_vcvars_spec(host_platform, platform):
             assert platform == expected
 

From f15beb8b79a0180c662e832e5517ab42ddb8c946 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sun, 15 Sep 2024 10:27:33 -0400
Subject: [PATCH 1170/1761] Use double-backticks for rst compatibility.

---
 newsfragments/298.bugfix.rst | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/newsfragments/298.bugfix.rst b/newsfragments/298.bugfix.rst
index eeb10466bf..a357d34f25 100644
--- a/newsfragments/298.bugfix.rst
+++ b/newsfragments/298.bugfix.rst
@@ -1 +1 @@
-Fix cross-platform compilation using `distutils._msvccompiler.MSVCCompiler` -- by :user:`saschanaz` and :user:`Avasam` 
+Fix cross-platform compilation using ``distutils._msvccompiler.MSVCCompiler`` -- by :user:`saschanaz` and :user:`Avasam` 

From a56a8f9cb798eec1c023242db15a7b3e27c72911 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Sat, 14 Sep 2024 19:39:30 -0400
Subject: [PATCH 1171/1761] Fix cross-platform compilation using
 `distutils._msvccompiler.MSVCCompiler`.

Actually use the `plat_name` param in `MSVCCompiler.initialize`.

Selective cherry-pick of pypa/distutils@a0339c1.

Closes pypa/setuptools#4648.
---
 newsfragments/4648.bugfix.rst          | 1 +
 setuptools/_distutils/_msvccompiler.py | 2 +-
 2 files changed, 2 insertions(+), 1 deletion(-)
 create mode 100644 newsfragments/4648.bugfix.rst

diff --git a/newsfragments/4648.bugfix.rst b/newsfragments/4648.bugfix.rst
new file mode 100644
index 0000000000..feb8edcc18
--- /dev/null
+++ b/newsfragments/4648.bugfix.rst
@@ -0,0 +1 @@
+Fix cross-platform compilation using ``distutils._msvccompiler.MSVCCompiler`` -- by :user:`saschanaz` and :user:`Avasam`
\ No newline at end of file
diff --git a/setuptools/_distutils/_msvccompiler.py b/setuptools/_distutils/_msvccompiler.py
index 03653929a8..bf10ae2365 100644
--- a/setuptools/_distutils/_msvccompiler.py
+++ b/setuptools/_distutils/_msvccompiler.py
@@ -284,7 +284,7 @@ def initialize(self, plat_name=None):
                 f"--plat-name must be one of {tuple(_vcvars_names)}"
             )
 
-        plat_spec = _get_vcvars_spec(get_host_platform(), get_platform())
+        plat_spec = _get_vcvars_spec(get_host_platform(), plat_name)
 
         vc_env = _get_vc_env(plat_spec)
         if not vc_env:

From 4c274911c59dd0161303d6cb991ec2a621ce1fb9 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sun, 15 Sep 2024 10:44:04 -0400
Subject: [PATCH 1172/1761] =?UTF-8?q?Bump=20version:=2074.1.2=20=E2=86=92?=
 =?UTF-8?q?=2074.1.3?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg              | 2 +-
 NEWS.rst                      | 9 +++++++++
 newsfragments/4648.bugfix.rst | 1 -
 pyproject.toml                | 2 +-
 4 files changed, 11 insertions(+), 3 deletions(-)
 delete mode 100644 newsfragments/4648.bugfix.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index cbb10b8211..5392694585 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 74.1.2
+current_version = 74.1.3
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index 7b62a76e0c..94543403c6 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,12 @@
+v74.1.3
+=======
+
+Bugfixes
+--------
+
+- Fix cross-platform compilation using ``distutils._msvccompiler.MSVCCompiler`` -- by :user:`saschanaz` and :user:`Avasam` (#4648)
+
+
 v74.1.2
 =======
 
diff --git a/newsfragments/4648.bugfix.rst b/newsfragments/4648.bugfix.rst
deleted file mode 100644
index feb8edcc18..0000000000
--- a/newsfragments/4648.bugfix.rst
+++ /dev/null
@@ -1 +0,0 @@
-Fix cross-platform compilation using ``distutils._msvccompiler.MSVCCompiler`` -- by :user:`saschanaz` and :user:`Avasam`
\ No newline at end of file
diff --git a/pyproject.toml b/pyproject.toml
index d0b709d9be..f0848e20cc 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "74.1.2"
+version = "74.1.3"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From 378984e02edae91d5f49425da8436f8dd9152b8a Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sun, 15 Sep 2024 10:48:59 -0400
Subject: [PATCH 1173/1761] Remove news fragments, not useful here.

---
 newsfragments/298.bugfix.rst | 1 -
 1 file changed, 1 deletion(-)
 delete mode 100644 newsfragments/298.bugfix.rst

diff --git a/newsfragments/298.bugfix.rst b/newsfragments/298.bugfix.rst
deleted file mode 100644
index a357d34f25..0000000000
--- a/newsfragments/298.bugfix.rst
+++ /dev/null
@@ -1 +0,0 @@
-Fix cross-platform compilation using ``distutils._msvccompiler.MSVCCompiler`` -- by :user:`saschanaz` and :user:`Avasam` 

From f15861e3ae2fb8a74efc528fa25754c329090611 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sun, 15 Sep 2024 10:56:45 -0400
Subject: [PATCH 1174/1761] Add news fragment.

---
 newsfragments/4649.removal.rst | 1 +
 1 file changed, 1 insertion(+)
 create mode 100644 newsfragments/4649.removal.rst

diff --git a/newsfragments/4649.removal.rst b/newsfragments/4649.removal.rst
new file mode 100644
index 0000000000..f53388b230
--- /dev/null
+++ b/newsfragments/4649.removal.rst
@@ -0,0 +1 @@
+Merge with pypa/distutils@7283751. Removed the register and upload commands and the config module that backs them (pypa/distutils#294). Removed the borland compiler. Replaced vendored dependencies with natural dependencies. Cygwin C compiler now gets compilers from sysconfig (pypa/distutils#296).
\ No newline at end of file

From 5e27b2a6e324e70bd82a045aef8f75c84a3d3b28 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sun, 15 Sep 2024 11:34:10 -0400
Subject: [PATCH 1175/1761] =?UTF-8?q?Bump=20version:=2074.1.3=20=E2=86=92?=
 =?UTF-8?q?=2075.0.0?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg                    |  2 +-
 NEWS.rst                            | 16 ++++++++++++++++
 newsfragments/+1ac90f4a.feature.rst |  1 -
 newsfragments/2971.removal.rst      |  1 -
 newsfragments/4649.removal.rst      |  1 -
 pyproject.toml                      |  2 +-
 6 files changed, 18 insertions(+), 5 deletions(-)
 delete mode 100644 newsfragments/+1ac90f4a.feature.rst
 delete mode 100644 newsfragments/2971.removal.rst
 delete mode 100644 newsfragments/4649.removal.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 5392694585..3b56c2bf16 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 74.1.3
+current_version = 75.0.0
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index 94543403c6..066180ecb8 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,19 @@
+v75.0.0
+=======
+
+Features
+--------
+
+- Declare also the dependencies used by distutils (adds jaraco.collections).
+
+
+Deprecations and Removals
+-------------------------
+
+- Removed upload_docs command. (#2971)
+- Merge with pypa/distutils@7283751. Removed the register and upload commands and the config module that backs them (pypa/distutils#294). Removed the borland compiler. Replaced vendored dependencies with natural dependencies. Cygwin C compiler now gets compilers from sysconfig (pypa/distutils#296). (#4649)
+
+
 v74.1.3
 =======
 
diff --git a/newsfragments/+1ac90f4a.feature.rst b/newsfragments/+1ac90f4a.feature.rst
deleted file mode 100644
index d78e4a7f89..0000000000
--- a/newsfragments/+1ac90f4a.feature.rst
+++ /dev/null
@@ -1 +0,0 @@
-Declare also the dependencies used by distutils (adds jaraco.collections).
\ No newline at end of file
diff --git a/newsfragments/2971.removal.rst b/newsfragments/2971.removal.rst
deleted file mode 100644
index 940453ab0a..0000000000
--- a/newsfragments/2971.removal.rst
+++ /dev/null
@@ -1 +0,0 @@
-Removed upload_docs command.
\ No newline at end of file
diff --git a/newsfragments/4649.removal.rst b/newsfragments/4649.removal.rst
deleted file mode 100644
index f53388b230..0000000000
--- a/newsfragments/4649.removal.rst
+++ /dev/null
@@ -1 +0,0 @@
-Merge with pypa/distutils@7283751. Removed the register and upload commands and the config module that backs them (pypa/distutils#294). Removed the borland compiler. Replaced vendored dependencies with natural dependencies. Cygwin C compiler now gets compilers from sysconfig (pypa/distutils#296).
\ No newline at end of file
diff --git a/pyproject.toml b/pyproject.toml
index 7a8ae98603..60982975ba 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "74.1.3"
+version = "75.0.0"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From 37c3d27f0b1757b7273da4eda837890b8e5dd015 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 16 Sep 2024 08:38:11 -0400
Subject: [PATCH 1176/1761] Removed reference to upload_docs module in entry
 points.

Closes #4650
---
 newsfragments/4650.bugfix.rst | 1 +
 pyproject.toml                | 1 -
 2 files changed, 1 insertion(+), 1 deletion(-)
 create mode 100644 newsfragments/4650.bugfix.rst

diff --git a/newsfragments/4650.bugfix.rst b/newsfragments/4650.bugfix.rst
new file mode 100644
index 0000000000..6ba8ea256a
--- /dev/null
+++ b/newsfragments/4650.bugfix.rst
@@ -0,0 +1 @@
+Removed reference to upload_docs module in entry points.
\ No newline at end of file
diff --git a/pyproject.toml b/pyproject.toml
index 60982975ba..da04baae5d 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -166,7 +166,6 @@ rotate = "setuptools.command.rotate:rotate"
 saveopts = "setuptools.command.saveopts:saveopts"
 sdist = "setuptools.command.sdist:sdist"
 setopt = "setuptools.command.setopt:setopt"
-upload_docs = "setuptools.command.upload_docs:upload_docs"
 
 [project.entry-points."setuptools.finalize_distribution_options"]
 parent_finalize = "setuptools.dist:_Distribution.finalize_options"

From 3106af0512fe67464a8b5e7524c07fddf7717660 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 16 Sep 2024 08:38:27 -0400
Subject: [PATCH 1177/1761] =?UTF-8?q?Bump=20version:=2075.0.0=20=E2=86=92?=
 =?UTF-8?q?=2075.1.0?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg               |  2 +-
 NEWS.rst                       | 15 +++++++++++++++
 newsfragments/4617.feature.rst |  1 -
 newsfragments/4650.bugfix.rst  |  1 -
 pyproject.toml                 |  2 +-
 5 files changed, 17 insertions(+), 4 deletions(-)
 delete mode 100644 newsfragments/4617.feature.rst
 delete mode 100644 newsfragments/4650.bugfix.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 3b56c2bf16..9166a09130 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 75.0.0
+current_version = 75.1.0
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index 066180ecb8..313d6dfdc1 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,18 @@
+v75.1.0
+=======
+
+Features
+--------
+
+- Deprecated ``bdist_wheel.universal`` configuration. (#4617)
+
+
+Bugfixes
+--------
+
+- Removed reference to upload_docs module in entry points. (#4650)
+
+
 v75.0.0
 =======
 
diff --git a/newsfragments/4617.feature.rst b/newsfragments/4617.feature.rst
deleted file mode 100644
index 905e9fd497..0000000000
--- a/newsfragments/4617.feature.rst
+++ /dev/null
@@ -1 +0,0 @@
-Deprecated ``bdist_wheel.universal`` configuration.
diff --git a/newsfragments/4650.bugfix.rst b/newsfragments/4650.bugfix.rst
deleted file mode 100644
index 6ba8ea256a..0000000000
--- a/newsfragments/4650.bugfix.rst
+++ /dev/null
@@ -1 +0,0 @@
-Removed reference to upload_docs module in entry points.
\ No newline at end of file
diff --git a/pyproject.toml b/pyproject.toml
index da04baae5d..49206410ce 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "75.0.0"
+version = "75.1.0"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From bbd2167d06782ac9cb88b255a919429a7137593d Mon Sep 17 00:00:00 2001
From: "Haoyu (Daniel)" 
Date: Tue, 17 Sep 2024 11:07:07 +0800
Subject: [PATCH 1178/1761] apply @abravalheri 's suggestion as is first

Co-authored-by: @abravalheri
---
 docs/userguide/datafiles.rst | 20 ++++++++++++++------
 1 file changed, 14 insertions(+), 6 deletions(-)

diff --git a/docs/userguide/datafiles.rst b/docs/userguide/datafiles.rst
index 8e2ea0d6d1..aaa8b9061f 100644
--- a/docs/userguide/datafiles.rst
+++ b/docs/userguide/datafiles.rst
@@ -2,12 +2,20 @@
 Data Files Support
 ====================
 
-Old packaging installation methods in the Python ecosystem have
-traditionally allowed the inclusion of "data files" (files beyond
-:ref:`the default set ` ), which are placed in a platform-specific
-location. However, the most common use case for data files distributed
-with a package is for use *by* the package, usually by including the
-data files **inside the package directory**.
+In the Python ecosystem, the term "data files" is used in various complex scenarios
+and can have nuanced meanings.
+For the purposes of this documentation, we define "data files" as non-Python files
+that are installed alongside Python modules and packages on the user's machine
+when they install a :term:`distribution ` from PyPI
+or via a ``.whl`` file.
+These files are typically intended for use at runtime by the package itself or
+to influence the behavior of other packages or systems.
+They may also be referred to as "resource files."
+Old packaging installation methods in the Python ecosystem
+have traditionally allowed installation of "data files", which
+are placed in a platform-specific location.  However, the most common use case
+for data files distributed with a package is for use *by* the package, usually
+by including the data files **inside the package directory**.
 
 Setuptools focuses on this most common type of data files and offers three ways
 of specifying which files should be included in your packages, as described in

From b07bde5eeddbc5480effc55a31d20e804018abe5 Mon Sep 17 00:00:00 2001
From: "Haoyu (Daniel)" 
Date: Tue, 17 Sep 2024 11:13:01 +0800
Subject: [PATCH 1179/1761] remove source files for simplicity

---
 docs/userguide/datafiles.rst | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/docs/userguide/datafiles.rst b/docs/userguide/datafiles.rst
index aaa8b9061f..57c336bb9d 100644
--- a/docs/userguide/datafiles.rst
+++ b/docs/userguide/datafiles.rst
@@ -8,9 +8,10 @@ For the purposes of this documentation, we define "data files" as non-Python fil
 that are installed alongside Python modules and packages on the user's machine
 when they install a :term:`distribution ` from PyPI
 or via a ``.whl`` file.
+
 These files are typically intended for use at runtime by the package itself or
 to influence the behavior of other packages or systems.
-They may also be referred to as "resource files."
+
 Old packaging installation methods in the Python ecosystem
 have traditionally allowed installation of "data files", which
 are placed in a platform-specific location.  However, the most common use case

From e19f2d6beb55963ad8d4bb966fbda8ba1df8f1bf Mon Sep 17 00:00:00 2001
From: "Haoyu (Daniel)" 
Date: Tue, 17 Sep 2024 11:17:06 +0800
Subject: [PATCH 1180/1761] replace  with sdist or wheel

---
 docs/userguide/datafiles.rst | 12 ++++++------
 1 file changed, 6 insertions(+), 6 deletions(-)

diff --git a/docs/userguide/datafiles.rst b/docs/userguide/datafiles.rst
index 57c336bb9d..503c06b7db 100644
--- a/docs/userguide/datafiles.rst
+++ b/docs/userguide/datafiles.rst
@@ -3,13 +3,13 @@ Data Files Support
 ====================
 
 In the Python ecosystem, the term "data files" is used in various complex scenarios
-and can have nuanced meanings.
-For the purposes of this documentation, we define "data files" as non-Python files
-that are installed alongside Python modules and packages on the user's machine
-when they install a :term:`distribution ` from PyPI
-or via a ``.whl`` file.
+and can have nuanced meanings. For the purposes of this documentation,
+we define "data files" as non-Python files that are installed alongside Python
+modules and packages on the user's machine when they install a
+:term:`distribution ` from either source distribution
+or via a binary distribution (``.whl`` file for example).
 
-These files are typically intended for use at runtime by the package itself or
+These files are typically intended for use at **runtime** by the package itself or
 to influence the behavior of other packages or systems.
 
 Old packaging installation methods in the Python ecosystem

From cae1e68e09f6b3374df3d87f88ea1f4e6cede1bf Mon Sep 17 00:00:00 2001
From: "Haoyu (Daniel)" 
Date: Tue, 17 Sep 2024 12:27:03 +0800
Subject: [PATCH 1181/1761] add sketch

---
 docs/userguide/datafiles.rst | 32 ++++++++++++++++++++++++++++++++
 1 file changed, 32 insertions(+)

diff --git a/docs/userguide/datafiles.rst b/docs/userguide/datafiles.rst
index 503c06b7db..01d3ce36a7 100644
--- a/docs/userguide/datafiles.rst
+++ b/docs/userguide/datafiles.rst
@@ -351,6 +351,38 @@ Any files that match these patterns will be *excluded* from installation,
 even if they were listed in ``package_data`` or were included as a result of using
 ``include_package_data``.
 
+.. _interplay_package_data_keywords:
+
+Interplay between these keywords
+--------------------------------
+
+TODO:
+- better formatted logic expression
+- check logic correctness (especially regarding parentheses)
+
+Meanwhile, to further clarify the interplay between these three keywords,
+to include certain data file into the source distribution, the following
+logic condition has two be met::
+
+    (m or p) and not (i or e)  # TODO: why "not i"?
+
+In plain language, the file should be selected by either ``package-data``
+or ``MAINFEST.in``, AND should not be excluded by ``exclude-package-data``.  # TODO: why "not i"?
+
+To include some data file into the ``.whl``::
+
+    (not e) and ((i and m) or p)
+
+In plain language, the file should not be excluded by ``exclude-package-data``
+(highest priority), and should be either: 1. selected by ``package-data``;
+2. selected by ``MANIFEST.in`` AND use ``include-package-data = true``.
+
+Notation::
+
+    i - "include-package-data = true" is set
+    e - file selected by "exclude-package-data"
+    p - file selected by "package-data"
+    m - "MANIFEST.in" exists and includes file
 
 Summary
 -------

From c46d0600415abc183856cd3aea7f93ef9615b2d7 Mon Sep 17 00:00:00 2001
From: "Haoyu (Daniel)" 
Date: Tue, 17 Sep 2024 14:03:27 +0800
Subject: [PATCH 1182/1761] update sketch

---
 docs/userguide/datafiles.rst | 7 +++----
 1 file changed, 3 insertions(+), 4 deletions(-)

diff --git a/docs/userguide/datafiles.rst b/docs/userguide/datafiles.rst
index 01d3ce36a7..174c31016c 100644
--- a/docs/userguide/datafiles.rst
+++ b/docs/userguide/datafiles.rst
@@ -364,10 +364,9 @@ Meanwhile, to further clarify the interplay between these three keywords,
 to include certain data file into the source distribution, the following
 logic condition has two be met::
 
-    (m or p) and not (i or e)  # TODO: why "not i"?
+    m or (p and not (i or e))  # TODO: why not i?
 
-In plain language, the file should be selected by either ``package-data``
-or ``MAINFEST.in``, AND should not be excluded by ``exclude-package-data``.  # TODO: why "not i"?
+In plain language, TODO:
 
 To include some data file into the ``.whl``::
 
@@ -382,7 +381,7 @@ Notation::
     i - "include-package-data = true" is set
     e - file selected by "exclude-package-data"
     p - file selected by "package-data"
-    m - "MANIFEST.in" exists and includes file
+    m - file included in "MANIFEST.in"
 
 Summary
 -------

From f32b975efbb3d9eb331841b84bfa58deaf2ac0a7 Mon Sep 17 00:00:00 2001
From: "Haoyu (Daniel)" 
Date: Tue, 17 Sep 2024 14:10:09 +0800
Subject: [PATCH 1183/1761] update legacy with latest setuptools

---
 docs/userguide/datafiles.rst | 11 ++++-------
 1 file changed, 4 insertions(+), 7 deletions(-)

diff --git a/docs/userguide/datafiles.rst b/docs/userguide/datafiles.rst
index 174c31016c..cda73faddb 100644
--- a/docs/userguide/datafiles.rst
+++ b/docs/userguide/datafiles.rst
@@ -356,24 +356,21 @@ even if they were listed in ``package_data`` or were included as a result of usi
 Interplay between these keywords
 --------------------------------
 
-TODO:
-- better formatted logic expression
-- check logic correctness (especially regarding parentheses)
-
 Meanwhile, to further clarify the interplay between these three keywords,
 to include certain data file into the source distribution, the following
 logic condition has two be met::
 
-    m or (p and not (i or e))  # TODO: why not i?
+    m or (p and not e)
 
-In plain language, TODO:
+In plain language, the file should be either: 1. included in ``MANIFEST.in``;
+or 2. selected by ``package-data`` AND not excluded by ``exclude-package-data``.
 
 To include some data file into the ``.whl``::
 
     (not e) and ((i and m) or p)
 
 In plain language, the file should not be excluded by ``exclude-package-data``
-(highest priority), and should be either: 1. selected by ``package-data``;
+(highest priority), and should be either: 1. selected by ``package-data``; or
 2. selected by ``MANIFEST.in`` AND use ``include-package-data = true``.
 
 Notation::

From 5481166422104ab8217d92d1c1e9b8620af507c2 Mon Sep 17 00:00:00 2001
From: "Haoyu (Daniel)" 
Date: Tue, 17 Sep 2024 14:16:23 +0800
Subject: [PATCH 1184/1761] fix typo

---
 docs/userguide/datafiles.rst | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/docs/userguide/datafiles.rst b/docs/userguide/datafiles.rst
index cda73faddb..2b4e66c1dc 100644
--- a/docs/userguide/datafiles.rst
+++ b/docs/userguide/datafiles.rst
@@ -358,7 +358,7 @@ Interplay between these keywords
 
 Meanwhile, to further clarify the interplay between these three keywords,
 to include certain data file into the source distribution, the following
-logic condition has two be met::
+logic condition has to be met::
 
     m or (p and not e)
 
@@ -373,7 +373,7 @@ In plain language, the file should not be excluded by ``exclude-package-data``
 (highest priority), and should be either: 1. selected by ``package-data``; or
 2. selected by ``MANIFEST.in`` AND use ``include-package-data = true``.
 
-Notation::
+**Notation**::
 
     i - "include-package-data = true" is set
     e - file selected by "exclude-package-data"

From 041e23def94a360b9af2cd650d9896cd1ca39b97 Mon Sep 17 00:00:00 2001
From: "Haoyu (Daniel)" 
Date: Tue, 17 Sep 2024 14:26:54 +0800
Subject: [PATCH 1185/1761] add note for version difference

---
 docs/userguide/datafiles.rst | 6 ++++++
 1 file changed, 6 insertions(+)

diff --git a/docs/userguide/datafiles.rst b/docs/userguide/datafiles.rst
index 2b4e66c1dc..ad9b082be4 100644
--- a/docs/userguide/datafiles.rst
+++ b/docs/userguide/datafiles.rst
@@ -380,6 +380,12 @@ In plain language, the file should not be excluded by ``exclude-package-data``
     p - file selected by "package-data"
     m - file included in "MANIFEST.in"
 
+.. note::
+    Different versions of ``setuptools`` might behave differently. The above
+    description applies to versions after ``58.5.3`` (exclusive). For information
+    on the behavior of earlier versions and more details, please refer to the
+    `GitHub repository `_.
+
 Summary
 -------
 

From e447010119709bba54a52c7a265848d7ee7dd102 Mon Sep 17 00:00:00 2001
From: "Haoyu (Daniel)" 
Date: Tue, 17 Sep 2024 22:34:41 +0800
Subject: [PATCH 1186/1761] remove note on bug behaviour < 58.5.3

---
 docs/userguide/datafiles.rst | 6 ------
 1 file changed, 6 deletions(-)

diff --git a/docs/userguide/datafiles.rst b/docs/userguide/datafiles.rst
index ad9b082be4..2b4e66c1dc 100644
--- a/docs/userguide/datafiles.rst
+++ b/docs/userguide/datafiles.rst
@@ -380,12 +380,6 @@ In plain language, the file should not be excluded by ``exclude-package-data``
     p - file selected by "package-data"
     m - file included in "MANIFEST.in"
 
-.. note::
-    Different versions of ``setuptools`` might behave differently. The above
-    description applies to versions after ``58.5.3`` (exclusive). For information
-    on the behavior of earlier versions and more details, please refer to the
-    `GitHub repository `_.
-
 Summary
 -------
 

From 5eab47faeaf8d1239139d446886da417a95def8e Mon Sep 17 00:00:00 2001
From: "Haoyu (Daniel)" 
Date: Tue, 17 Sep 2024 22:40:12 +0800
Subject: [PATCH 1187/1761] remove custom notation

---
 docs/userguide/datafiles.rst | 23 +++++++++++------------
 1 file changed, 11 insertions(+), 12 deletions(-)

diff --git a/docs/userguide/datafiles.rst b/docs/userguide/datafiles.rst
index 2b4e66c1dc..280b9a0a35 100644
--- a/docs/userguide/datafiles.rst
+++ b/docs/userguide/datafiles.rst
@@ -360,25 +360,24 @@ Meanwhile, to further clarify the interplay between these three keywords,
 to include certain data file into the source distribution, the following
 logic condition has to be met::
 
-    m or (p and not e)
+    MANIFEST.in or (package-data and not exclude-package-data)
 
-In plain language, the file should be either: 1. included in ``MANIFEST.in``;
-or 2. selected by ``package-data`` AND not excluded by ``exclude-package-data``.
+In plain language, the file should be either:
+
+1. included in ``MANIFEST.in``; or
+
+2. selected by ``package-data`` AND not excluded by ``exclude-package-data``.
 
 To include some data file into the ``.whl``::
 
-    (not e) and ((i and m) or p)
+    (not exclude-package-data) and ((include-package-data and MANIFEST.in) or package-data)
 
-In plain language, the file should not be excluded by ``exclude-package-data``
-(highest priority), and should be either: 1. selected by ``package-data``; or
-2. selected by ``MANIFEST.in`` AND use ``include-package-data = true``.
+In other words, the file should not be excluded by ``exclude-package-data``
+(highest priority), AND should be either:
 
-**Notation**::
+1. selected by ``package-data``; or
 
-    i - "include-package-data = true" is set
-    e - file selected by "exclude-package-data"
-    p - file selected by "package-data"
-    m - file included in "MANIFEST.in"
+2. selected by ``MANIFEST.in`` AND use ``include-package-data = true``.
 
 Summary
 -------

From e74b501029ea3914cb4b8281bba0f93cba9169d3 Mon Sep 17 00:00:00 2001
From: Phil Elson 
Date: Fri, 13 Sep 2024 17:33:33 +0200
Subject: [PATCH 1188/1761] Implement the test for being able to pass dist-info
 dir

---
 setuptools/build_meta.py             |  5 ++++-
 setuptools/tests/test_bdist_wheel.py | 22 ++++++++++++++++++++++
 2 files changed, 26 insertions(+), 1 deletion(-)

diff --git a/setuptools/build_meta.py b/setuptools/build_meta.py
index a6b85afc42..ecf434bbf3 100644
--- a/setuptools/build_meta.py
+++ b/setuptools/build_meta.py
@@ -417,9 +417,12 @@ def build_wheel(
         config_settings: _ConfigSettings = None,
         metadata_directory: StrPath | None = None,
     ):
+        cmd = ['bdist_wheel']
+        if metadata_directory:
+            cmd.extend(['--dist-info-dir', metadata_directory])
         with suppress_known_deprecation():
             return self._build_with_temp_dir(
-                ['bdist_wheel'],
+                cmd,
                 '.whl',
                 wheel_directory,
                 config_settings,
diff --git a/setuptools/tests/test_bdist_wheel.py b/setuptools/tests/test_bdist_wheel.py
index 8b64e90f72..a127cc1b2d 100644
--- a/setuptools/tests/test_bdist_wheel.py
+++ b/setuptools/tests/test_bdist_wheel.py
@@ -619,3 +619,25 @@ def _fake_import(name: str, *args, **kwargs):
     monkeypatch.delitem(sys.modules, "setuptools.command.bdist_wheel")
 
     import setuptools.command.bdist_wheel  # noqa: F401
+
+
+def test_dist_info_provided(dummy_dist, monkeypatch, tmp_path):
+    monkeypatch.chdir(dummy_dist)
+    distinfo = tmp_path / "dummy_dist.dist-info"
+
+    distinfo.mkdir()
+    (distinfo / "METADATA").write_text("name: helloworld", encoding="utf-8")
+
+    # We don't control the metadata. According to PEP-517, "The hook MAY also
+    # create other files inside this directory, and a build frontend MUST
+    # preserve".
+    (distinfo / "FOO").write_text("bar", encoding="utf-8")
+
+    bdist_wheel_cmd(bdist_dir=str(tmp_path), universal=True, dist_info_dir=str(distinfo)).run()
+    expected = {
+        "dummy_dist-1.0.dist-info/FOO",
+        "dummy_dist-1.0.dist-info/RECORD",
+    }
+    with ZipFile("dist/dummy_dist-1.0-py2.py3-none-any.whl") as wf:
+        # Check that all expected files are there.
+        assert set(wf.namelist()).intersection(expected) == expected

From bc82d73b12b8260b9b2a3736cf7f3c603e379e93 Mon Sep 17 00:00:00 2001
From: Phil Elson 
Date: Fri, 13 Sep 2024 17:37:56 +0200
Subject: [PATCH 1189/1761] Implement the desired functionality

---
 setuptools/command/bdist_wheel.py | 23 ++++++++++++++++++++---
 1 file changed, 20 insertions(+), 3 deletions(-)

diff --git a/setuptools/command/bdist_wheel.py b/setuptools/command/bdist_wheel.py
index fa97976fef..59c162dd9e 100644
--- a/setuptools/command/bdist_wheel.py
+++ b/setuptools/command/bdist_wheel.py
@@ -231,6 +231,13 @@ class bdist_wheel(Command):
             None,
             "Python tag (cp32|cp33|cpNN) for abi3 wheel tag [default: false]",
         ),
+        (
+            "dist-info-dir=",
+            None,
+            "directory where a pre-generated dist-info can be found (e.g. as a "
+            "result of calling the PEP517 'prepare_metadata_for_build_wheel' "
+            "method)",
+        ),
     ]
 
     boolean_options = ["keep-temp", "skip-build", "relative", "universal"]
@@ -243,6 +250,7 @@ def initialize_options(self) -> None:
         self.format = "zip"
         self.keep_temp = False
         self.dist_dir: str | None = None
+        self.dist_info_dir = None
         self.egginfo_dir: str | None = None
         self.root_is_pure: bool | None = None
         self.skip_build = False
@@ -261,8 +269,9 @@ def finalize_options(self) -> None:
             bdist_base = self.get_finalized_command("bdist").bdist_base
             self.bdist_dir = os.path.join(bdist_base, "wheel")
 
-        egg_info = cast(egg_info_cls, self.distribution.get_command_obj("egg_info"))
-        egg_info.ensure_finalized()  # needed for correct `wheel_dist_name`
+        if self.dist_info_dir is None:
+            egg_info = self.distribution.get_command_obj("egg_info")
+            egg_info.ensure_finalized()  # needed for correct `wheel_dist_name`
 
         self.data_dir = self.wheel_dist_name + ".data"
         self.plat_name_supplied = bool(self.plat_name)
@@ -447,7 +456,15 @@ def run(self):
             f"{safer_version(self.distribution.get_version())}.dist-info"
         )
         distinfo_dir = os.path.join(self.bdist_dir, distinfo_dirname)
-        self.egg2dist(self.egginfo_dir, distinfo_dir)
+        if self.dist_info_dir:
+            # Use the given dist-info directly.
+            shutil.copytree(self.dist_info_dir, distinfo_dir)
+            # Egg info is still generated, so remove it now to avoid it getting
+            # copied into the wheel.
+            shutil.rmtree(self.egginfo_dir)
+        else:
+            # Convert the generated egg-info into dist-info.
+            self.egg2dist(self.egginfo_dir, distinfo_dir)
 
         self.write_wheelfile(distinfo_dir)
 

From 283ce3b9eb8720a50948f4ee938b603d2f1e97d9 Mon Sep 17 00:00:00 2001
From: Phil Elson 
Date: Fri, 13 Sep 2024 17:46:44 +0200
Subject: [PATCH 1190/1761] Add a news fragment about the work

---
 newsfragments/1825.bugfix.rst | 1 +
 1 file changed, 1 insertion(+)
 create mode 100644 newsfragments/1825.bugfix.rst

diff --git a/newsfragments/1825.bugfix.rst b/newsfragments/1825.bugfix.rst
new file mode 100644
index 0000000000..1fd1e95a4f
--- /dev/null
+++ b/newsfragments/1825.bugfix.rst
@@ -0,0 +1 @@
+Enabled passing dist-info-dir to the bdist_wheel, and to the PEP-517 build backend. Therefore, metadata can now be injected prior to wheel building when following PEP-517 -- by :user:`pelson`

From 38ca0b579b841881794e909cd30866e1967d487c Mon Sep 17 00:00:00 2001
From: Phil Elson 
Date: Wed, 25 Sep 2024 12:00:41 +0200
Subject: [PATCH 1191/1761] Remove dead-code from setup.py

---
 setup.py | 29 -----------------------------
 1 file changed, 29 deletions(-)

diff --git a/setup.py b/setup.py
index 1cd9e36c15..7709a46e67 100755
--- a/setup.py
+++ b/setup.py
@@ -1,7 +1,6 @@
 #!/usr/bin/env python
 
 import os
-import sys
 import textwrap
 
 import setuptools
@@ -10,33 +9,6 @@
 here = os.path.dirname(__file__)
 
 
-package_data = {
-    "": ["LICEN[CS]E*", "COPYING*", "NOTICE*", "AUTHORS*"],
-    "setuptools": ['script (dev).tmpl', 'script.tmpl', 'site-patch.py'],
-}
-
-force_windows_specific_files = os.environ.get(
-    "SETUPTOOLS_INSTALL_WINDOWS_SPECIFIC_FILES", "1"
-).lower() not in ("", "0", "false", "no")
-
-include_windows_files = sys.platform == 'win32' or force_windows_specific_files
-
-if include_windows_files:
-    package_data.setdefault('setuptools', []).extend(['*.exe'])
-    package_data.setdefault('setuptools.command', []).extend(['*.xml'])
-
-
-def pypi_link(pkg_filename):
-    """
-    Given the filename, including md5 fragment, construct the
-    dependency link for PyPI.
-    """
-    root = 'https://files.pythonhosted.org/packages/source'
-    name, sep, rest = pkg_filename.partition('-')
-    parts = root, name[0], name, pkg_filename
-    return '/'.join(parts)
-
-
 class install_with_pth(install):
     """
     Custom install command to install a .pth file for distutils patching.
@@ -84,7 +56,6 @@ def _restore_install_lib(self):
 
 setup_params = dict(
     cmdclass={'install': install_with_pth},
-    package_data=package_data,
 )
 
 if __name__ == '__main__':

From bd615ee524a3c1b68b06347958c7d92f83e8a259 Mon Sep 17 00:00:00 2001
From: Phil Elson 
Date: Mon, 16 Sep 2024 06:02:37 +0200
Subject: [PATCH 1192/1761] Test that there is no egg-info files accidentally
 leaking into the wheel

---
 newsfragments/1825.bugfix.rst        |  2 +-
 setuptools/command/bdist_wheel.py    |  1 -
 setuptools/tests/test_bdist_wheel.py | 11 +++++++----
 3 files changed, 8 insertions(+), 6 deletions(-)

diff --git a/newsfragments/1825.bugfix.rst b/newsfragments/1825.bugfix.rst
index 1fd1e95a4f..99e8fdf22a 100644
--- a/newsfragments/1825.bugfix.rst
+++ b/newsfragments/1825.bugfix.rst
@@ -1 +1 @@
-Enabled passing dist-info-dir to the bdist_wheel, and to the PEP-517 build backend. Therefore, metadata can now be injected prior to wheel building when following PEP-517 -- by :user:`pelson`
+Allow passing dist-info-dir to the bdist_wheel command, as well as to the PEP-517 build backend. Metadata can therefore now be injected prior to wheel building when following PEP-517 -- by :user:`pelson`
diff --git a/setuptools/command/bdist_wheel.py b/setuptools/command/bdist_wheel.py
index 59c162dd9e..a770de2e54 100644
--- a/setuptools/command/bdist_wheel.py
+++ b/setuptools/command/bdist_wheel.py
@@ -27,7 +27,6 @@
 
 from .. import Command, __version__
 from ..warnings import SetuptoolsDeprecationWarning
-from .egg_info import egg_info as egg_info_cls
 
 from distutils import log
 
diff --git a/setuptools/tests/test_bdist_wheel.py b/setuptools/tests/test_bdist_wheel.py
index a127cc1b2d..47200d0a26 100644
--- a/setuptools/tests/test_bdist_wheel.py
+++ b/setuptools/tests/test_bdist_wheel.py
@@ -633,11 +633,14 @@ def test_dist_info_provided(dummy_dist, monkeypatch, tmp_path):
     # preserve".
     (distinfo / "FOO").write_text("bar", encoding="utf-8")
 
-    bdist_wheel_cmd(bdist_dir=str(tmp_path), universal=True, dist_info_dir=str(distinfo)).run()
+    bdist_wheel_cmd(bdist_dir=str(tmp_path), dist_info_dir=str(distinfo)).run()
     expected = {
         "dummy_dist-1.0.dist-info/FOO",
         "dummy_dist-1.0.dist-info/RECORD",
     }
-    with ZipFile("dist/dummy_dist-1.0-py2.py3-none-any.whl") as wf:
-        # Check that all expected files are there.
-        assert set(wf.namelist()).intersection(expected) == expected
+    with ZipFile("dist/dummy_dist-1.0-py3-none-any.whl") as wf:
+        files_found = set(wf.namelist())
+    # Check that all expected files are there.
+    assert expected - files_found == set()
+    # Make sure there is no accidental egg-info bleeding into the wheel.
+    assert not [path for path in files_found if 'egg-info' in str(path)]

From eb81747c129f033ede333e97589ff4f8b600b6e0 Mon Sep 17 00:00:00 2001
From: Phil Elson 
Date: Wed, 25 Sep 2024 13:52:22 +0200
Subject: [PATCH 1193/1761] Restore the egg-info-cls casting

---
 setuptools/command/bdist_wheel.py | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/setuptools/command/bdist_wheel.py b/setuptools/command/bdist_wheel.py
index a770de2e54..83abe6e0c3 100644
--- a/setuptools/command/bdist_wheel.py
+++ b/setuptools/command/bdist_wheel.py
@@ -27,6 +27,7 @@
 
 from .. import Command, __version__
 from ..warnings import SetuptoolsDeprecationWarning
+from .egg_info import egg_info as egg_info_cls
 
 from distutils import log
 
@@ -269,7 +270,7 @@ def finalize_options(self) -> None:
             self.bdist_dir = os.path.join(bdist_base, "wheel")
 
         if self.dist_info_dir is None:
-            egg_info = self.distribution.get_command_obj("egg_info")
+            egg_info = cast(egg_info_cls, self.distribution.get_command_obj("egg_info"))
             egg_info.ensure_finalized()  # needed for correct `wheel_dist_name`
 
         self.data_dir = self.wheel_dist_name + ".data"

From 2f3b273e534965d33fce9c5576ad89c4afbd8746 Mon Sep 17 00:00:00 2001
From: Phil Elson 
Date: Thu, 26 Sep 2024 07:55:43 +0200
Subject: [PATCH 1194/1761] Update newsfragments/1825.bugfix.rst

Co-authored-by: Anderson Bravalheri 
---
 newsfragments/1825.bugfix.rst | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/newsfragments/1825.bugfix.rst b/newsfragments/1825.bugfix.rst
index 99e8fdf22a..226df0a936 100644
--- a/newsfragments/1825.bugfix.rst
+++ b/newsfragments/1825.bugfix.rst
@@ -1 +1 @@
-Allow passing dist-info-dir to the bdist_wheel command, as well as to the PEP-517 build backend. Metadata can therefore now be injected prior to wheel building when following PEP-517 -- by :user:`pelson`
+Re-use pre-existing ``.dist-info`` dir when creating wheels via the build backend APIs (PEP 517) and the `metadata_directory` argument is passed -- by :user:`pelson`.

From cf299fcb838f388a210569df7f5419fe762f26c7 Mon Sep 17 00:00:00 2001
From: Phil Elson 
Date: Thu, 26 Sep 2024 13:05:05 +0200
Subject: [PATCH 1195/1761] Update setuptools/command/bdist_wheel.py

Co-authored-by: Anderson Bravalheri 
---
 setuptools/command/bdist_wheel.py | 1 +
 1 file changed, 1 insertion(+)

diff --git a/setuptools/command/bdist_wheel.py b/setuptools/command/bdist_wheel.py
index 83abe6e0c3..aeade98f6f 100644
--- a/setuptools/command/bdist_wheel.py
+++ b/setuptools/command/bdist_wheel.py
@@ -458,6 +458,7 @@ def run(self):
         distinfo_dir = os.path.join(self.bdist_dir, distinfo_dirname)
         if self.dist_info_dir:
             # Use the given dist-info directly.
+            log.debug(f"reusing {self.dist_info_dir}")
             shutil.copytree(self.dist_info_dir, distinfo_dir)
             # Egg info is still generated, so remove it now to avoid it getting
             # copied into the wheel.

From fc08e7ef01fea2033e7f1c33e51704574f392ca0 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 26 Sep 2024 14:28:56 +0100
Subject: [PATCH 1196/1761] Fix RST in newsfragment

---
 newsfragments/1825.bugfix.rst | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/newsfragments/1825.bugfix.rst b/newsfragments/1825.bugfix.rst
index 226df0a936..ff55d18725 100644
--- a/newsfragments/1825.bugfix.rst
+++ b/newsfragments/1825.bugfix.rst
@@ -1 +1 @@
-Re-use pre-existing ``.dist-info`` dir when creating wheels via the build backend APIs (PEP 517) and the `metadata_directory` argument is passed -- by :user:`pelson`.
+Re-use pre-existing ``.dist-info`` dir when creating wheels via the build backend APIs (PEP 517) and the ``metadata_directory`` argument is passed -- by :user:`pelson`.

From 1595318c54a0018cc8200bc3f07f0786da74ef3f Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 26 Sep 2024 14:59:12 +0100
Subject: [PATCH 1197/1761] Update docs/userguide/datafiles.rst

---
 docs/userguide/datafiles.rst | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)

diff --git a/docs/userguide/datafiles.rst b/docs/userguide/datafiles.rst
index 280b9a0a35..72a658ee9c 100644
--- a/docs/userguide/datafiles.rst
+++ b/docs/userguide/datafiles.rst
@@ -6,8 +6,7 @@ In the Python ecosystem, the term "data files" is used in various complex scenar
 and can have nuanced meanings. For the purposes of this documentation,
 we define "data files" as non-Python files that are installed alongside Python
 modules and packages on the user's machine when they install a
-:term:`distribution ` from either source distribution
-or via a binary distribution (``.whl`` file for example).
+:term:`distribution ` via :term:`wheel `.
 
 These files are typically intended for use at **runtime** by the package itself or
 to influence the behavior of other packages or systems.

From c328b85dcbe4eda92ff1afc6aef2922e94262193 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 26 Sep 2024 15:36:06 +0100
Subject: [PATCH 1198/1761] Remove trailling / from interspjinx_mapping to
 avoid verbose logs

---
 docs/conf.py | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/docs/conf.py b/docs/conf.py
index 4ea38e7490..20c2a8f099 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -191,7 +191,7 @@
 # Allow linking objects on other Sphinx sites seamlessly:
 intersphinx_mapping.update(
     # python=('https://docs.python.org/3', None),
-    python=('https://docs.python.org/3.11/', None),
+    python=('https://docs.python.org/3.11', None),
     # ^-- Python 3.11 is required because it still contains `distutils`.
     #     Just leaving it as `3` would imply 3.12+, but that causes an
     #     error with the cross references to distutils functions.
@@ -237,9 +237,9 @@
 intersphinx_mapping.update({
     'pip': ('https://pip.pypa.io/en/latest', None),
     'build': ('https://build.pypa.io/en/latest', None),
-    'PyPUG': ('https://packaging.python.org/en/latest/', None),
-    'packaging': ('https://packaging.pypa.io/en/latest/', None),
-    'twine': ('https://twine.readthedocs.io/en/stable/', None),
+    'PyPUG': ('https://packaging.python.org/en/latest', None),
+    'packaging': ('https://packaging.pypa.io/en/latest', None),
+    'twine': ('https://twine.readthedocs.io/en/stable', None),
     'importlib-resources': (
         'https://importlib-resources.readthedocs.io/en/latest',
         None,

From 433c263777c640bd52d8e9d78c092e94b8945257 Mon Sep 17 00:00:00 2001
From: Anders Theet 
Date: Tue, 1 Oct 2024 12:43:36 +0200
Subject: [PATCH 1199/1761] Use minimum requirement for jaraco.functools

---
 pyproject.toml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/pyproject.toml b/pyproject.toml
index 49206410ce..fc4c213606 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -103,7 +103,7 @@ core = [
 
 	# for distutils
 	"jaraco.collections",
-	"jaraco.functools",
+	"jaraco.functools>=4",
 	"packaging",
 	"more_itertools",
 ]

From 3fc912a390f46f1163bdccdfe1be9b84401b96b4 Mon Sep 17 00:00:00 2001
From: Anders Theet 
Date: Tue, 8 Oct 2024 08:30:18 +0200
Subject: [PATCH 1200/1761] Use minimum requirement for jaraco.functools

---
 pyproject.toml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/pyproject.toml b/pyproject.toml
index 9f528752ab..ce2c5988c5 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -20,7 +20,7 @@ requires-python = ">=3.8"
 dependencies = [
 	# Setuptools must require these
 	"packaging",
-	"jaraco.functools",
+	"jaraco.functools >= 4",
 	"more_itertools",
 	"jaraco.collections",
 ]

From 258edabeb2dcc5dc3e62e9cf98926390e452340d Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 7 Oct 2024 15:40:23 +0100
Subject: [PATCH 1201/1761] Post-pone deprecation for now

It seems that there are many projects that haven't adapted yet, so
we should probably avoid disruption.

Evidence: https://github.com/search?q=%2Fauthor-email%7Cmaintainer-email%7Clong-description%7Clicense-file%2F+path%3Asetup.cfg&type=code
---
 setuptools/dist.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/setuptools/dist.py b/setuptools/dist.py
index 68f877decd..7c516fefb8 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -504,7 +504,7 @@ def warn_dash_deprecation(self, opt, section):
                 versions. Please use the underscore name {underscore_opt!r} instead.
                 """,
                 see_docs="userguide/declarative_config.html",
-                due_date=(2024, 9, 26),
+                due_date=(2025, 3, 3),
                 # Warning initially introduced in 3 Mar 2021
             )
         return underscore_opt
@@ -529,7 +529,7 @@ def make_option_lowercase(self, opt, section):
             future versions. Please use lowercase {lowercase_opt!r} instead.
             """,
             see_docs="userguide/declarative_config.html",
-            due_date=(2024, 9, 26),
+            due_date=(2025, 3, 3),
             # Warning initially introduced in 6 Mar 2021
         )
         return lowercase_opt

From 35d1139126b9a5021e1d7fd819f5dbbec9ce950c Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 15 Oct 2024 11:35:26 +0100
Subject: [PATCH 1202/1761] For now avoid problems with pyproject-hooks

---
 pyproject.toml | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/pyproject.toml b/pyproject.toml
index 49206410ce..6cb4d9d17c 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -56,8 +56,8 @@ test = [
 	"pytest-home >= 0.5",
 	"pytest-subprocess",
 
-	# workaround for pypa/setuptools#4333
-	"pyproject-hooks!=1.1",
+	# workaround for pypa/pyproject-hooks#206
+	"pyproject-hooks!=1.2",  # TODO: fix problem with egg-info, see #4670
 
 	"jaraco.test",
 ]

From a0a0e329161868703e3ef4cec86e72d748e5b9bf Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 15 Oct 2024 11:48:15 +0100
Subject: [PATCH 1203/1761] Fix version specifier in workaround for
 pyproject-hooks

---
 pyproject.toml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/pyproject.toml b/pyproject.toml
index 6cb4d9d17c..b9b6d441db 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -57,7 +57,7 @@ test = [
 	"pytest-subprocess",
 
 	# workaround for pypa/pyproject-hooks#206
-	"pyproject-hooks!=1.2",  # TODO: fix problem with egg-info, see #4670
+	"pyproject-hooks<1.1",  # TODO: fix problem with egg-info, see #4670
 
 	"jaraco.test",
 ]

From e2f467964ddce1c436652c99d8c6ffe493583edc Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 15 Oct 2024 11:52:19 +0100
Subject: [PATCH 1204/1761] Add missing workaround in github actions

---
 .github/workflows/main.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index f90a4607b1..0c765a6bdd 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -100,7 +100,7 @@ jobs:
         run: |
           rm -rf dist
           # workaround for pypa/setuptools#4333
-          pipx run --pip-args 'pyproject-hooks!=1.1' build
+          pipx run --pip-args 'pyproject-hooks<1.1' build
           echo "PRE_BUILT_SETUPTOOLS_SDIST=$(ls dist/*.tar.gz)" >> $GITHUB_ENV
           echo "PRE_BUILT_SETUPTOOLS_WHEEL=$(ls dist/*.whl)" >> $GITHUB_ENV
           rm -rf setuptools.egg-info  # Avoid interfering with the other tests

From b5e40f31208ad23288172bd2b7c2a556244c858f Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 15 Oct 2024 11:46:22 +0100
Subject: [PATCH 1205/1761] Avoid iterating over entry-points while an empty
 .egg-info exists in sys.path

---
 setuptools/command/egg_info.py | 6 +++++-
 1 file changed, 5 insertions(+), 1 deletion(-)

diff --git a/setuptools/command/egg_info.py b/setuptools/command/egg_info.py
index 280eb5e807..f4d3a2a57e 100644
--- a/setuptools/command/egg_info.py
+++ b/setuptools/command/egg_info.py
@@ -293,13 +293,17 @@ def delete_file(self, filename):
             os.unlink(filename)
 
     def run(self):
+        # Pre-load to avoid iterating over entry-points while an empty .egg-info
+        # exists in sys.path. See pypa/pyproject-hooks#206
+        writers = list(metadata.entry_points(group='egg_info.writers'))
+
         self.mkpath(self.egg_info)
         try:
             os.utime(self.egg_info, None)
         except OSError as e:
             msg = f"Cannot update time stamp of directory '{self.egg_info}'"
             raise distutils.errors.DistutilsFileError(msg) from e
-        for ep in metadata.entry_points(group='egg_info.writers'):
+        for ep in writers:
             writer = ep.load()
             writer(self, ep.name, os.path.join(self.egg_info, ep.name))
 

From fa66840443ae2b3bb0a3721879b55c57816fb7ba Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 15 Oct 2024 11:49:38 +0100
Subject: [PATCH 1206/1761] Remove workaround

---
 .github/workflows/main.yml | 2 +-
 pyproject.toml             | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index 0c765a6bdd..f90a4607b1 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -100,7 +100,7 @@ jobs:
         run: |
           rm -rf dist
           # workaround for pypa/setuptools#4333
-          pipx run --pip-args 'pyproject-hooks<1.1' build
+          pipx run --pip-args 'pyproject-hooks!=1.1' build
           echo "PRE_BUILT_SETUPTOOLS_SDIST=$(ls dist/*.tar.gz)" >> $GITHUB_ENV
           echo "PRE_BUILT_SETUPTOOLS_WHEEL=$(ls dist/*.whl)" >> $GITHUB_ENV
           rm -rf setuptools.egg-info  # Avoid interfering with the other tests
diff --git a/pyproject.toml b/pyproject.toml
index b9b6d441db..7c6cf36cbe 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -57,7 +57,7 @@ test = [
 	"pytest-subprocess",
 
 	# workaround for pypa/pyproject-hooks#206
-	"pyproject-hooks<1.1",  # TODO: fix problem with egg-info, see #4670
+	"pyproject-hooks!=1.1",
 
 	"jaraco.test",
 ]

From 28677ae4e4d1d6f5a4267be6df5bdbb024d89d20 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 15 Oct 2024 16:42:40 +0100
Subject: [PATCH 1207/1761] Add news fragment

---
 newsfragments/4680.bugfix.rst | 4 ++++
 1 file changed, 4 insertions(+)
 create mode 100644 newsfragments/4680.bugfix.rst

diff --git a/newsfragments/4680.bugfix.rst b/newsfragments/4680.bugfix.rst
new file mode 100644
index 0000000000..7f5fd0aade
--- /dev/null
+++ b/newsfragments/4680.bugfix.rst
@@ -0,0 +1,4 @@
+Changed ``egg_info`` command to avoid adding an empty ``.egg-info`` while
+iterating over entry-points is available in ``sys.path``.
+This avoids triggering integration problems with ``importlib.metadata``/``importlib_metadata``
+(reference: pypa/pyproject-hooks#206).

From fcb04c3e0ac1a958541cae2ec63fdcd80e88c2ef Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 15 Oct 2024 17:04:36 +0100
Subject: [PATCH 1208/1761] Fix newsfragment phrasing

---
 newsfragments/4680.bugfix.rst | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/newsfragments/4680.bugfix.rst b/newsfragments/4680.bugfix.rst
index 7f5fd0aade..71cb0d4322 100644
--- a/newsfragments/4680.bugfix.rst
+++ b/newsfragments/4680.bugfix.rst
@@ -1,4 +1,4 @@
-Changed ``egg_info`` command to avoid adding an empty ``.egg-info`` while
-iterating over entry-points is available in ``sys.path``.
+Changed ``egg_info`` command to avoid adding an empty ``.egg-info`` directory
+while iterating over entry-points.
 This avoids triggering integration problems with ``importlib.metadata``/``importlib_metadata``
 (reference: pypa/pyproject-hooks#206).

From b828db4242966382f47fe521865ac8b1f12ace4b Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 15 Oct 2024 17:04:45 +0100
Subject: [PATCH 1209/1761] =?UTF-8?q?Bump=20version:=2075.1.0=20=E2=86=92?=
 =?UTF-8?q?=2075.1.1?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg              |  2 +-
 NEWS.rst                      | 13 +++++++++++++
 newsfragments/1825.bugfix.rst |  1 -
 newsfragments/4680.bugfix.rst |  4 ----
 pyproject.toml                |  2 +-
 5 files changed, 15 insertions(+), 7 deletions(-)
 delete mode 100644 newsfragments/1825.bugfix.rst
 delete mode 100644 newsfragments/4680.bugfix.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 9166a09130..5247cca130 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 75.1.0
+current_version = 75.1.1
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index 313d6dfdc1..75ef319dd5 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,16 @@
+v75.1.1
+=======
+
+Bugfixes
+--------
+
+- Re-use pre-existing ``.dist-info`` dir when creating wheels via the build backend APIs (PEP 517) and the ``metadata_directory`` argument is passed -- by :user:`pelson`. (#1825)
+- Changed ``egg_info`` command to avoid adding an empty ``.egg-info`` directory
+  while iterating over entry-points.
+  This avoids triggering integration problems with ``importlib.metadata``/``importlib_metadata``
+  (reference: pypa/pyproject-hooks#206). (#4680)
+
+
 v75.1.0
 =======
 
diff --git a/newsfragments/1825.bugfix.rst b/newsfragments/1825.bugfix.rst
deleted file mode 100644
index ff55d18725..0000000000
--- a/newsfragments/1825.bugfix.rst
+++ /dev/null
@@ -1 +0,0 @@
-Re-use pre-existing ``.dist-info`` dir when creating wheels via the build backend APIs (PEP 517) and the ``metadata_directory`` argument is passed -- by :user:`pelson`.
diff --git a/newsfragments/4680.bugfix.rst b/newsfragments/4680.bugfix.rst
deleted file mode 100644
index 71cb0d4322..0000000000
--- a/newsfragments/4680.bugfix.rst
+++ /dev/null
@@ -1,4 +0,0 @@
-Changed ``egg_info`` command to avoid adding an empty ``.egg-info`` directory
-while iterating over entry-points.
-This avoids triggering integration problems with ``importlib.metadata``/``importlib_metadata``
-(reference: pypa/pyproject-hooks#206).
diff --git a/pyproject.toml b/pyproject.toml
index 7c6cf36cbe..c27a988afd 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "75.1.0"
+version = "75.1.1"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From d457d0e87889aefe2093cd79ab4d1ee35d3101e7 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Tue, 20 Aug 2024 12:57:29 -0400
Subject: [PATCH 1210/1761] Type sequence checks in setuptools/dist.py

---
 newsfragments/4578.bugfix.rst  |  1 +
 newsfragments/4578.feature.rst |  1 +
 setuptools/dist.py             | 48 ++++++++++++++++++++++++----------
 setuptools/tests/test_dist.py  |  8 +++---
 4 files changed, 40 insertions(+), 18 deletions(-)
 create mode 100644 newsfragments/4578.bugfix.rst
 create mode 100644 newsfragments/4578.feature.rst

diff --git a/newsfragments/4578.bugfix.rst b/newsfragments/4578.bugfix.rst
new file mode 100644
index 0000000000..e9bde46269
--- /dev/null
+++ b/newsfragments/4578.bugfix.rst
@@ -0,0 +1 @@
+Fix a `TypeError` when a ``Distribution``'s old included attribute was a `tuple` -- by :user:`Avasam`
diff --git a/newsfragments/4578.feature.rst b/newsfragments/4578.feature.rst
new file mode 100644
index 0000000000..48f57edce3
--- /dev/null
+++ b/newsfragments/4578.feature.rst
@@ -0,0 +1 @@
+Made errors when parsing ``Distribution`` data more explicit about the expected type (``tuple[str, ...] | list[str]``) -- by :user:`Avasam`
diff --git a/setuptools/dist.py b/setuptools/dist.py
index 7c516fefb8..bb9a2a9951 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -8,7 +8,7 @@
 import sys
 from glob import iglob
 from pathlib import Path
-from typing import TYPE_CHECKING, MutableMapping
+from typing import TYPE_CHECKING, List, MutableMapping, NoReturn, Tuple, Union, overload
 
 from more_itertools import partition, unique_everseen
 from packaging.markers import InvalidMarker, Marker
@@ -21,6 +21,7 @@
     command as _,  # noqa: F401 # imported for side-effects
 )
 from ._importlib import metadata
+from ._reqs import _StrOrIter
 from .config import pyprojecttoml, setupcfg
 from .discovery import ConfigDiscovery
 from .monkey import get_unpatched
@@ -36,9 +37,22 @@
 from distutils.fancy_getopt import translate_longopt
 from distutils.util import strtobool
 
+if TYPE_CHECKING:
+    from typing_extensions import TypeAlias
+
 __all__ = ['Distribution']
 
 sequence = tuple, list
+"""
+Supported iterable types that are known to be:
+- ordered (which `set` isn't)
+- not match a str (which `Sequence[str]` does)
+- not imply a nested type (like `dict`)
+for use with `isinstance`.
+"""
+_Sequence: TypeAlias = Union[Tuple[str, ...], List[str]]
+# This is how stringifying _Sequence would look in Python 3.10
+_requence_type_repr = "tuple[str, ...] | list[str]"
 
 
 def check_importable(dist, attr, value):
@@ -51,7 +65,7 @@ def check_importable(dist, attr, value):
         ) from e
 
 
-def assert_string_list(dist, attr, value):
+def assert_string_list(dist, attr: str, value: _Sequence) -> None:
     """Verify that value is a string list"""
     try:
         # verify that value is a list or tuple to exclude unordered
@@ -61,7 +75,7 @@ def assert_string_list(dist, attr, value):
         assert ''.join(value) != value
     except (TypeError, ValueError, AttributeError, AssertionError) as e:
         raise DistutilsSetupError(
-            "%r must be a list of strings (got %r)" % (attr, value)
+            f"{attr!r} must be of type <{_requence_type_repr}> (got {value!r})"
         ) from e
 
 
@@ -138,7 +152,11 @@ def invalid_unless_false(dist, attr, value):
     raise DistutilsSetupError(f"{attr} is invalid.")
 
 
-def check_requirements(dist, attr, value):
+@overload
+def check_requirements(dist, attr: str, value: set | dict) -> NoReturn: ...
+@overload
+def check_requirements(dist, attr: str, value: _StrOrIter) -> None: ...
+def check_requirements(dist, attr: str, value: _StrOrIter) -> None:
     """Verify that install_requires is a valid requirements list"""
     try:
         list(_reqs.parse(value))
@@ -146,10 +164,10 @@ def check_requirements(dist, attr, value):
             raise TypeError("Unordered types are not allowed")
     except (TypeError, ValueError) as error:
         tmpl = (
-            "{attr!r} must be a string or list of strings "
-            "containing valid project/version requirement specifiers; {error}"
+            f"{attr!r} must be a string or iterable of strings "
+            f"containing valid project/version requirement specifiers; {error}"
         )
-        raise DistutilsSetupError(tmpl.format(attr=attr, error=error)) from error
+        raise DistutilsSetupError(tmpl) from error
 
 
 def check_specifier(dist, attr, value):
@@ -767,11 +785,11 @@ def has_contents_for(self, package):
 
         return False
 
-    def _exclude_misc(self, name, value):
+    def _exclude_misc(self, name: str, value: _Sequence) -> None:
         """Handle 'exclude()' for list/tuple attrs without a special handler"""
         if not isinstance(value, sequence):
             raise DistutilsSetupError(
-                "%s: setting must be a list or tuple (%r)" % (name, value)
+                f"{name}: setting must be of type <{_requence_type_repr}> (got {value!r})"
             )
         try:
             old = getattr(self, name)
@@ -784,11 +802,13 @@ def _exclude_misc(self, name, value):
         elif old:
             setattr(self, name, [item for item in old if item not in value])
 
-    def _include_misc(self, name, value):
+    def _include_misc(self, name: str, value: _Sequence) -> None:
         """Handle 'include()' for list/tuple attrs without a special handler"""
 
         if not isinstance(value, sequence):
-            raise DistutilsSetupError("%s: setting must be a list (%r)" % (name, value))
+            raise DistutilsSetupError(
+                f"{name}: setting must be of type <{_requence_type_repr}> (got {value!r})"
+            )
         try:
             old = getattr(self, name)
         except AttributeError as e:
@@ -801,7 +821,7 @@ def _include_misc(self, name, value):
             )
         else:
             new = [item for item in value if item not in old]
-            setattr(self, name, old + new)
+            setattr(self, name, list(old) + new)
 
     def exclude(self, **attrs):
         """Remove items from distribution that are named in keyword arguments
@@ -826,10 +846,10 @@ def exclude(self, **attrs):
             else:
                 self._exclude_misc(k, v)
 
-    def _exclude_packages(self, packages):
+    def _exclude_packages(self, packages: _Sequence) -> None:
         if not isinstance(packages, sequence):
             raise DistutilsSetupError(
-                "packages: setting must be a list or tuple (%r)" % (packages,)
+                f"packages: setting must be of type <{_requence_type_repr}> (got {packages!r})"
             )
         list(map(self.exclude_package, packages))
 
diff --git a/setuptools/tests/test_dist.py b/setuptools/tests/test_dist.py
index 597785b519..fde0de99ac 100644
--- a/setuptools/tests/test_dist.py
+++ b/setuptools/tests/test_dist.py
@@ -118,8 +118,8 @@ def test_provides_extras_deterministic_order():
             'hello': '*.msg',
         },
         (
-            "\"values of 'package_data' dict\" "
-            "must be a list of strings (got '*.msg')"
+            "\"values of 'package_data' dict\" must be of type "
+            " (got '*.msg')"
         ),
     ),
     # Invalid value type (generators are single use)
@@ -128,8 +128,8 @@ def test_provides_extras_deterministic_order():
             'hello': (x for x in "generator"),
         },
         (
-            "\"values of 'package_data' dict\" must be a list of strings "
-            "(got "
+            " (got 
Date: Tue, 27 Aug 2024 11:32:59 +0100
Subject: [PATCH 1211/1761] Use variable msg instead of tmpl in setuptools/dist

---
 setuptools/dist.py | 11 +++++------
 1 file changed, 5 insertions(+), 6 deletions(-)

diff --git a/setuptools/dist.py b/setuptools/dist.py
index bb9a2a9951..b5d78aa37d 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -140,8 +140,7 @@ def _check_marker(marker):
 def assert_bool(dist, attr, value):
     """Verify that value is True, False, 0, or 1"""
     if bool(value) != value:
-        tmpl = "{attr!r} must be a boolean value (got {value!r})"
-        raise DistutilsSetupError(tmpl.format(attr=attr, value=value))
+        raise DistutilsSetupError(f"{attr!r} must be a boolean value (got {value!r})")
 
 
 def invalid_unless_false(dist, attr, value):
@@ -163,11 +162,11 @@ def check_requirements(dist, attr: str, value: _StrOrIter) -> None:
         if isinstance(value, (dict, set)):
             raise TypeError("Unordered types are not allowed")
     except (TypeError, ValueError) as error:
-        tmpl = (
+        msg = (
             f"{attr!r} must be a string or iterable of strings "
             f"containing valid project/version requirement specifiers; {error}"
         )
-        raise DistutilsSetupError(tmpl) from error
+        raise DistutilsSetupError(msg) from error
 
 
 def check_specifier(dist, attr, value):
@@ -175,8 +174,8 @@ def check_specifier(dist, attr, value):
     try:
         SpecifierSet(value)
     except (InvalidSpecifier, AttributeError) as error:
-        tmpl = "{attr!r} must be a string containing valid version specifiers; {error}"
-        raise DistutilsSetupError(tmpl.format(attr=attr, error=error)) from error
+        msg = f"{attr!r} must be a string containing valid version specifiers; {error}"
+        raise DistutilsSetupError(msg) from error
 
 
 def check_entry_points(dist, attr, value):

From 000a413e2af9c271166cebe6909ad664907887f1 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 27 Aug 2024 11:49:33 +0100
Subject: [PATCH 1212/1761] Deprecate public access to setuptools.dist.sequence

---
 setuptools/dist.py | 38 ++++++++++++++++++++++++++++++--------
 1 file changed, 30 insertions(+), 8 deletions(-)

diff --git a/setuptools/dist.py b/setuptools/dist.py
index b5d78aa37d..f22e3eea54 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -8,7 +8,16 @@
 import sys
 from glob import iglob
 from pathlib import Path
-from typing import TYPE_CHECKING, List, MutableMapping, NoReturn, Tuple, Union, overload
+from typing import (
+    TYPE_CHECKING,
+    Any,
+    List,
+    MutableMapping,
+    NoReturn,
+    Tuple,
+    Union,
+    overload,
+)
 
 from more_itertools import partition, unique_everseen
 from packaging.markers import InvalidMarker, Marker
@@ -42,8 +51,10 @@
 
 __all__ = ['Distribution']
 
-sequence = tuple, list
+_sequence = tuple, list
 """
+:meta private:
+
 Supported iterable types that are known to be:
 - ordered (which `set` isn't)
 - not match a str (which `Sequence[str]` does)
@@ -55,6 +66,17 @@
 _requence_type_repr = "tuple[str, ...] | list[str]"
 
 
+def __getattr__(name: str) -> Any:  # pragma: no cover
+    if name == "sequence":
+        SetuptoolsDeprecationWarning.emit(
+            "`setuptools.dist.sequence` is an internal implementation detail.",
+            "Please define your own `sequence = tuple, list` instead.",
+            due_date=(2025, 8, 28),  # Originally added on 2024-08-27
+        )
+        return _sequence
+    raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
+
+
 def check_importable(dist, attr, value):
     try:
         ep = metadata.EntryPoint(value=value, name=None, group=None)
@@ -70,7 +92,7 @@ def assert_string_list(dist, attr: str, value: _Sequence) -> None:
     try:
         # verify that value is a list or tuple to exclude unordered
         # or single-use iterables
-        assert isinstance(value, sequence)
+        assert isinstance(value, _sequence)
         # verify that elements of value are strings
         assert ''.join(value) != value
     except (TypeError, ValueError, AttributeError, AssertionError) as e:
@@ -786,7 +808,7 @@ def has_contents_for(self, package):
 
     def _exclude_misc(self, name: str, value: _Sequence) -> None:
         """Handle 'exclude()' for list/tuple attrs without a special handler"""
-        if not isinstance(value, sequence):
+        if not isinstance(value, _sequence):
             raise DistutilsSetupError(
                 f"{name}: setting must be of type <{_requence_type_repr}> (got {value!r})"
             )
@@ -794,7 +816,7 @@ def _exclude_misc(self, name: str, value: _Sequence) -> None:
             old = getattr(self, name)
         except AttributeError as e:
             raise DistutilsSetupError("%s: No such distribution setting" % name) from e
-        if old is not None and not isinstance(old, sequence):
+        if old is not None and not isinstance(old, _sequence):
             raise DistutilsSetupError(
                 name + ": this setting cannot be changed via include/exclude"
             )
@@ -804,7 +826,7 @@ def _exclude_misc(self, name: str, value: _Sequence) -> None:
     def _include_misc(self, name: str, value: _Sequence) -> None:
         """Handle 'include()' for list/tuple attrs without a special handler"""
 
-        if not isinstance(value, sequence):
+        if not isinstance(value, _sequence):
             raise DistutilsSetupError(
                 f"{name}: setting must be of type <{_requence_type_repr}> (got {value!r})"
             )
@@ -814,7 +836,7 @@ def _include_misc(self, name: str, value: _Sequence) -> None:
             raise DistutilsSetupError("%s: No such distribution setting" % name) from e
         if old is None:
             setattr(self, name, value)
-        elif not isinstance(old, sequence):
+        elif not isinstance(old, _sequence):
             raise DistutilsSetupError(
                 name + ": this setting cannot be changed via include/exclude"
             )
@@ -846,7 +868,7 @@ def exclude(self, **attrs):
                 self._exclude_misc(k, v)
 
     def _exclude_packages(self, packages: _Sequence) -> None:
-        if not isinstance(packages, sequence):
+        if not isinstance(packages, _sequence):
             raise DistutilsSetupError(
                 f"packages: setting must be of type <{_requence_type_repr}> (got {packages!r})"
             )

From 96be735ca2e77b7db876133dfda0b4df3ced4ac0 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 15 Oct 2024 18:12:02 +0100
Subject: [PATCH 1213/1761] Workaround for bdist_wheel.dist_info_dir problems

---
 setuptools/build_meta.py | 43 ++++++++++++++++++++++++++++++----------
 1 file changed, 32 insertions(+), 11 deletions(-)

diff --git a/setuptools/build_meta.py b/setuptools/build_meta.py
index ecf434bbf3..3231105e69 100644
--- a/setuptools/build_meta.py
+++ b/setuptools/build_meta.py
@@ -417,17 +417,27 @@ def build_wheel(
         config_settings: _ConfigSettings = None,
         metadata_directory: StrPath | None = None,
     ):
-        cmd = ['bdist_wheel']
-        if metadata_directory:
-            cmd.extend(['--dist-info-dir', metadata_directory])
-        with suppress_known_deprecation():
-            return self._build_with_temp_dir(
-                cmd,
-                '.whl',
-                wheel_directory,
-                config_settings,
-                self._arbitrary_args(config_settings),
-            )
+        def _build(cmd: list[str]):
+            with suppress_known_deprecation():
+                return self._build_with_temp_dir(
+                    cmd,
+                    '.whl',
+                    wheel_directory,
+                    config_settings,
+                    self._arbitrary_args(config_settings),
+                )
+
+        if metadata_directory is None:
+            return _build(['bdist_wheel'])
+
+        try:
+            return _build(['bdist_wheel', '--dist-info-dir', metadata_directory])
+        except SystemExit as ex:
+            # pypa/setuptools#4683
+            if "--dist-info-dir" not in str(ex):
+                raise
+            _IncompatibleBdistWheel.emit()
+            return _build(['bdist_wheel'])
 
     def build_sdist(
         self, sdist_directory: StrPath, config_settings: _ConfigSettings = None
@@ -514,6 +524,17 @@ def run_setup(self, setup_script='setup.py'):
             sys.argv[0] = sys_argv_0
 
 
+class _IncompatibleBdistWheel(SetuptoolsDeprecationWarning):
+    _SUMMARY = "wheel.bdist_wheel is deprecated, please import it from setuptools"
+    _DETAILS = """
+    Ensure that any custom bdist_wheel implementation is a subclass of
+    setuptools.command.bdist_wheel.bdist_wheel.
+    """
+    _DUE_DATE = (2025, 10, 15)
+    # Initially introduced in 2024/10/15, but maybe too disruptive to be enforced?
+    _SEE_URL = "https://github.com/pypa/wheel/pull/631"
+
+
 # The primary backend
 _BACKEND = _BuildMetaBackend()
 

From a663287c9c5f0bfc5e05addfb3a15fea7fc716c3 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 15 Oct 2024 20:31:58 +0100
Subject: [PATCH 1214/1761] Add pragma for edge-case code path

---
 setuptools/build_meta.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setuptools/build_meta.py b/setuptools/build_meta.py
index 3231105e69..19d0e1688e 100644
--- a/setuptools/build_meta.py
+++ b/setuptools/build_meta.py
@@ -432,7 +432,7 @@ def _build(cmd: list[str]):
 
         try:
             return _build(['bdist_wheel', '--dist-info-dir', metadata_directory])
-        except SystemExit as ex:
+        except SystemExit as ex:  # pragma: nocover
             # pypa/setuptools#4683
             if "--dist-info-dir" not in str(ex):
                 raise

From 50b732a4006f3b84315d4473f7c203e4fe13aed9 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 15 Oct 2024 20:34:44 +0100
Subject: [PATCH 1215/1761] Check for more specific error message

---
 setuptools/build_meta.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setuptools/build_meta.py b/setuptools/build_meta.py
index 19d0e1688e..e730a27f25 100644
--- a/setuptools/build_meta.py
+++ b/setuptools/build_meta.py
@@ -434,7 +434,7 @@ def _build(cmd: list[str]):
             return _build(['bdist_wheel', '--dist-info-dir', metadata_directory])
         except SystemExit as ex:  # pragma: nocover
             # pypa/setuptools#4683
-            if "--dist-info-dir" not in str(ex):
+            if "--dist-info-dir not recognized" not in str(ex):
                 raise
             _IncompatibleBdistWheel.emit()
             return _build(['bdist_wheel'])

From 0534fde847e0bd0c2214d6821c042c0eb5c0ffc3 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 15 Oct 2024 20:34:54 +0100
Subject: [PATCH 1216/1761] Add news fragment

---
 newsfragments/4684.bugfix.rst | 2 ++
 1 file changed, 2 insertions(+)
 create mode 100644 newsfragments/4684.bugfix.rst

diff --git a/newsfragments/4684.bugfix.rst b/newsfragments/4684.bugfix.rst
new file mode 100644
index 0000000000..40f554cccc
--- /dev/null
+++ b/newsfragments/4684.bugfix.rst
@@ -0,0 +1,2 @@
+Add workaround for ``bdist_wheel --dist-info-dir`` errors
+when customisation does not inherit from setuptools.

From 61a5a03fbf8acc59e6e12144011aa06b85162bda Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 16 Oct 2024 11:04:49 +0100
Subject: [PATCH 1217/1761] =?UTF-8?q?Bump=20version:=2075.1.1=20=E2=86=92?=
 =?UTF-8?q?=2075.2.0?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg               |  2 +-
 NEWS.rst                       | 17 +++++++++++++++++
 newsfragments/4578.bugfix.rst  |  1 -
 newsfragments/4578.feature.rst |  1 -
 newsfragments/4684.bugfix.rst  |  2 --
 pyproject.toml                 |  2 +-
 6 files changed, 19 insertions(+), 6 deletions(-)
 delete mode 100644 newsfragments/4578.bugfix.rst
 delete mode 100644 newsfragments/4578.feature.rst
 delete mode 100644 newsfragments/4684.bugfix.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 5247cca130..91921ce92d 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 75.1.1
+current_version = 75.2.0
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index 75ef319dd5..e79b45a623 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,20 @@
+v75.2.0
+=======
+
+Features
+--------
+
+- Made errors when parsing ``Distribution`` data more explicit about the expected type (``tuple[str, ...] | list[str]``) -- by :user:`Avasam` (#4578)
+
+
+Bugfixes
+--------
+
+- Fix a `TypeError` when a ``Distribution``'s old included attribute was a `tuple` -- by :user:`Avasam` (#4578)
+- Add workaround for ``bdist_wheel --dist-info-dir`` errors
+  when customisation does not inherit from setuptools. (#4684)
+
+
 v75.1.1
 =======
 
diff --git a/newsfragments/4578.bugfix.rst b/newsfragments/4578.bugfix.rst
deleted file mode 100644
index e9bde46269..0000000000
--- a/newsfragments/4578.bugfix.rst
+++ /dev/null
@@ -1 +0,0 @@
-Fix a `TypeError` when a ``Distribution``'s old included attribute was a `tuple` -- by :user:`Avasam`
diff --git a/newsfragments/4578.feature.rst b/newsfragments/4578.feature.rst
deleted file mode 100644
index 48f57edce3..0000000000
--- a/newsfragments/4578.feature.rst
+++ /dev/null
@@ -1 +0,0 @@
-Made errors when parsing ``Distribution`` data more explicit about the expected type (``tuple[str, ...] | list[str]``) -- by :user:`Avasam`
diff --git a/newsfragments/4684.bugfix.rst b/newsfragments/4684.bugfix.rst
deleted file mode 100644
index 40f554cccc..0000000000
--- a/newsfragments/4684.bugfix.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Add workaround for ``bdist_wheel --dist-info-dir`` errors
-when customisation does not inherit from setuptools.
diff --git a/pyproject.toml b/pyproject.toml
index c27a988afd..bfa4d154a7 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "75.1.1"
+version = "75.2.0"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From ecde60bfa50de155aa88c3410bd00b7dbaa0afd4 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Tue, 27 Aug 2024 10:54:32 -0400
Subject: [PATCH 1218/1761] Update mypy.ini from skeleton

---
 mypy.ini | 18 ++++++++++++++----
 1 file changed, 14 insertions(+), 4 deletions(-)

diff --git a/mypy.ini b/mypy.ini
index 7de7e5a508..a9d0fdd7df 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -1,17 +1,27 @@
 [mypy]
-# CI should test for all versions, local development gets hints for oldest supported
-# But our testing setup doesn't allow passing CLI arguments, so local devs have to set this manually.
-# python_version = 3.8
+## upstream
+
+# Is the project well-typed?
 strict = False
+
+# Early opt-in even when strict = False
 warn_unused_ignores = True
 warn_redundant_casts = True
-# required to support namespace packages: https://github.com/python/mypy/issues/14057
+enable_error_code = ignore-without-code
+
+# Support namespace packages per https://github.com/python/mypy/issues/14057
 explicit_package_bases = True
 
 disable_error_code =
 	# Disable due to many false positives
 	overload-overlap,
 
+## local
+
+# CI should test for all versions, local development gets hints for oldest supported
+# But our testing setup doesn't allow passing CLI arguments, so local devs have to set this manually.
+# python_version = 3.8
+
 exclude = (?x)(
 	# Avoid scanning Python files in generated folders
 	^build/

From 3403ffd553a8781f07cadd94ecc1680d8e2003c2 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Tue, 27 Aug 2024 11:59:12 -0400
Subject: [PATCH 1219/1761] Re-enable mypy & Resolve all [ignore-without-code]

---
 mypy.ini                                  |  2 +-
 pkg_resources/__init__.py                 |  2 +-
 pyproject.toml                            |  4 ----
 setuptools/build_meta.py                  |  2 +-
 setuptools/config/expand.py               | 13 ++++++++-----
 setuptools/config/pyprojecttoml.py        |  4 ++--
 setuptools/msvc.py                        |  2 +-
 setuptools/tests/test_build_ext.py        |  2 +-
 setuptools/tests/test_editable_install.py | 14 +++++++++-----
 9 files changed, 24 insertions(+), 21 deletions(-)

diff --git a/mypy.ini b/mypy.ini
index a9d0fdd7df..26692755ea 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -64,6 +64,6 @@ ignore_missing_imports = True
 
 # Even when excluding a module, import issues can show up due to following import
 # https://github.com/python/mypy/issues/11936#issuecomment-1466764006
-[mypy-setuptools.config._validate_pyproject.*,setuptools._distutils.*]
+[mypy-setuptools.config._validate_pyproject.*,setuptools._vendor.*,setuptools._distutils.*]
 follow_imports = silent
 # silent => ignore errors when following imports
diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index 4e9b83d83d..f1f0ef2535 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -2777,7 +2777,7 @@ def load(
         if require:
             # We could pass `env` and `installer` directly,
             # but keeping `*args` and `**kwargs` for backwards compatibility
-            self.require(*args, **kwargs)  # type: ignore
+            self.require(*args, **kwargs)  # type: ignore[arg-type]
         return self.resolve()
 
     def resolve(self) -> _ResolvedEntryPoint:
diff --git a/pyproject.toml b/pyproject.toml
index bfa4d154a7..eb6ec11041 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -216,7 +216,3 @@ formats = "zip"
 
 
 [tool.setuptools_scm]
-
-
-[tool.pytest-enabler.mypy]
-# Disabled due to jaraco/skeleton#143
diff --git a/setuptools/build_meta.py b/setuptools/build_meta.py
index e730a27f25..a3c83c7002 100644
--- a/setuptools/build_meta.py
+++ b/setuptools/build_meta.py
@@ -431,7 +431,7 @@ def _build(cmd: list[str]):
             return _build(['bdist_wheel'])
 
         try:
-            return _build(['bdist_wheel', '--dist-info-dir', metadata_directory])
+            return _build(['bdist_wheel', '--dist-info-dir', str(metadata_directory)])
         except SystemExit as ex:  # pragma: nocover
             # pypa/setuptools#4683
             if "--dist-info-dir not recognized" not in str(ex):
diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py
index e11bcf9b42..8f2040fefa 100644
--- a/setuptools/config/expand.py
+++ b/setuptools/config/expand.py
@@ -203,7 +203,8 @@ def _load_spec(spec: ModuleSpec, module_name: str) -> ModuleType:
         return sys.modules[name]
     module = importlib.util.module_from_spec(spec)
     sys.modules[name] = module  # cache (it also ensures `==` works on loaded items)
-    spec.loader.exec_module(module)  # type: ignore
+    assert spec.loader is not None
+    spec.loader.exec_module(module)
     return module
 
 
@@ -285,10 +286,11 @@ def find_packages(
 
     from setuptools.discovery import construct_package_dir
 
-    if namespaces:
-        from setuptools.discovery import PEP420PackageFinder as PackageFinder
+    # check "not namespaces" first due to python/mypy#6232
+    if not namespaces:
+        from setuptools.discovery import PackageFinder
     else:
-        from setuptools.discovery import PackageFinder  # type: ignore
+        from setuptools.discovery import PEP420PackageFinder as PackageFinder
 
     root_dir = root_dir or os.curdir
     where = kwargs.pop('where', ['.'])
@@ -359,7 +361,8 @@ def entry_points(text: str, text_source="entry-points") -> dict[str, dict]:
     entry-point names, and the second level values are references to objects
     (that correspond to the entry-point value).
     """
-    parser = ConfigParser(default_section=None, delimiters=("=",))  # type: ignore
+    # Using undocumented behaviour, see python/typeshed#12700
+    parser = ConfigParser(default_section=None, delimiters=("=",))  # type: ignore[call-overload]
     parser.optionxform = str  # case sensitive
     parser.read_string(text, text_source)
     groups = {k: dict(v.items()) for k, v in parser.items()}
diff --git a/setuptools/config/pyprojecttoml.py b/setuptools/config/pyprojecttoml.py
index 5d95e18b83..e0040cefbd 100644
--- a/setuptools/config/pyprojecttoml.py
+++ b/setuptools/config/pyprojecttoml.py
@@ -44,8 +44,8 @@ def validate(config: dict, filepath: StrPath) -> bool:
 
     trove_classifier = validator.FORMAT_FUNCTIONS.get("trove-classifier")
     if hasattr(trove_classifier, "_disable_download"):
-        # Improve reproducibility by default. See issue 31 for validate-pyproject.
-        trove_classifier._disable_download()  # type: ignore
+        # Improve reproducibility by default. See abravalheri/validate-pyproject#31
+        trove_classifier._disable_download()  # type: ignore[union-attr]
 
     try:
         return validator.validate(config)
diff --git a/setuptools/msvc.py b/setuptools/msvc.py
index de4b05f928..7ee685e023 100644
--- a/setuptools/msvc.py
+++ b/setuptools/msvc.py
@@ -1418,7 +1418,7 @@ def VCRuntimeRedist(self) -> str | None:
             os.path.join(prefix, arch_subdir, crt_dir, vcruntime)
             for (prefix, crt_dir) in itertools.product(prefixes, crt_dirs)
         )
-        return next(filter(os.path.isfile, candidate_paths), None)
+        return next(filter(os.path.isfile, candidate_paths), None)  # type: ignore[arg-type] #python/mypy#12682
 
     def return_env(self, exists=True):
         """
diff --git a/setuptools/tests/test_build_ext.py b/setuptools/tests/test_build_ext.py
index dab8b41cc9..f3e4ccd364 100644
--- a/setuptools/tests/test_build_ext.py
+++ b/setuptools/tests/test_build_ext.py
@@ -183,7 +183,7 @@ def get_build_ext_cmd(self, optional: bool, **opts):
             "eggs.c": "#include missingheader.h\n",
             ".build": {"lib": {}, "tmp": {}},
         }
-        path.build(files)  # type: ignore[arg-type] # jaraco/path#232
+        path.build(files)  # jaraco/path#232
         extension = Extension('spam.eggs', ['eggs.c'], optional=optional)
         dist = Distribution(dict(ext_modules=[extension]))
         dist.script_name = 'setup.py'
diff --git a/setuptools/tests/test_editable_install.py b/setuptools/tests/test_editable_install.py
index 287367ac18..bdbaa3c7e7 100644
--- a/setuptools/tests/test_editable_install.py
+++ b/setuptools/tests/test_editable_install.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import os
 import platform
 import stat
@@ -8,6 +10,7 @@
 from importlib.machinery import EXTENSION_SUFFIXES
 from pathlib import Path
 from textwrap import dedent
+from typing import Any
 from unittest.mock import Mock
 from uuid import uuid4
 
@@ -840,7 +843,8 @@ class TestOverallBehaviour:
         version = "3.14159"
         """
 
-    FLAT_LAYOUT = {
+    # Any: Would need a TypedDict. Keep it simple for tests
+    FLAT_LAYOUT: dict[str, Any] = {
         "pyproject.toml": dedent(PYPROJECT),
         "MANIFEST.in": EXAMPLE["MANIFEST.in"],
         "otherfile.py": "",
@@ -878,9 +882,9 @@ class TestOverallBehaviour:
             "otherfile.py": "",
             "mypkg": {
                 "__init__.py": "",
-                "mod1.py": FLAT_LAYOUT["mypkg"]["mod1.py"],  # type: ignore
+                "mod1.py": FLAT_LAYOUT["mypkg"]["mod1.py"],
             },
-            "other": FLAT_LAYOUT["mypkg"]["subpackage"],  # type: ignore
+            "other": FLAT_LAYOUT["mypkg"]["subpackage"],
         },
         "namespace": {
             "pyproject.toml": dedent(PYPROJECT),
@@ -888,8 +892,8 @@ class TestOverallBehaviour:
             "otherfile.py": "",
             "src": {
                 "mypkg": {
-                    "mod1.py": FLAT_LAYOUT["mypkg"]["mod1.py"],  # type: ignore
-                    "subpackage": FLAT_LAYOUT["mypkg"]["subpackage"],  # type: ignore
+                    "mod1.py": FLAT_LAYOUT["mypkg"]["mod1.py"],
+                    "subpackage": FLAT_LAYOUT["mypkg"]["subpackage"],
                 },
             },
         },

From 1429bf595df92ce2cf901a54d4e17d5d8e44f4f4 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Tue, 27 Aug 2024 12:33:34 -0400
Subject: [PATCH 1220/1761] Fix ConfigHandler generic

---
 setuptools/config/setupcfg.py            | 17 ++++++++++-------
 setuptools/tests/config/test_setupcfg.py |  4 ++--
 2 files changed, 12 insertions(+), 9 deletions(-)

diff --git a/setuptools/config/setupcfg.py b/setuptools/config/setupcfg.py
index 54469f74a3..4fee109e26 100644
--- a/setuptools/config/setupcfg.py
+++ b/setuptools/config/setupcfg.py
@@ -27,7 +27,6 @@
     List,
     Tuple,
     TypeVar,
-    Union,
     cast,
 )
 
@@ -53,7 +52,7 @@
 while the second element of the tuple is the option value itself
 """
 AllCommandOptions = Dict["str", SingleCommandOptions]  # cmd name => its options
-Target = TypeVar("Target", bound=Union["Distribution", "DistributionMetadata"])
+Target = TypeVar("Target", "Distribution", "DistributionMetadata")
 
 
 def read_configuration(
@@ -96,7 +95,7 @@ def _apply(
     filepath: StrPath,
     other_files: Iterable[StrPath] = (),
     ignore_option_errors: bool = False,
-) -> tuple[ConfigHandler, ...]:
+) -> tuple[ConfigMetadataHandler, ConfigOptionsHandler]:
     """Read configuration from ``filepath`` and applies to the ``dist`` object."""
     from setuptools.dist import _Distribution
 
@@ -122,7 +121,7 @@ def _apply(
     return handlers
 
 
-def _get_option(target_obj: Target, key: str):
+def _get_option(target_obj: Distribution | DistributionMetadata, key: str):
     """
     Given a target object and option key, get that option from
     the target object, either through a get_{key} method or
@@ -134,10 +133,14 @@ def _get_option(target_obj: Target, key: str):
     return getter()
 
 
-def configuration_to_dict(handlers: tuple[ConfigHandler, ...]) -> dict:
+def configuration_to_dict(
+    handlers: Iterable[
+        ConfigHandler[Distribution] | ConfigHandler[DistributionMetadata]
+    ],
+) -> dict:
     """Returns configuration data gathered by given handlers as a dict.
 
-    :param list[ConfigHandler] handlers: Handlers list,
+    :param Iterable[ConfigHandler] handlers: Handlers list,
         usually from parse_configuration()
 
     :rtype: dict
@@ -254,7 +257,7 @@ def __init__(
         ensure_discovered: expand.EnsurePackagesDiscovered,
     ):
         self.ignore_option_errors = ignore_option_errors
-        self.target_obj = target_obj
+        self.target_obj: Target = target_obj
         self.sections = dict(self._section_options(options))
         self.set_options: list[str] = []
         self.ensure_discovered = ensure_discovered
diff --git a/setuptools/tests/config/test_setupcfg.py b/setuptools/tests/config/test_setupcfg.py
index 4f0a7349f5..8d95798123 100644
--- a/setuptools/tests/config/test_setupcfg.py
+++ b/setuptools/tests/config/test_setupcfg.py
@@ -7,7 +7,7 @@
 import pytest
 from packaging.requirements import InvalidRequirement
 
-from setuptools.config.setupcfg import ConfigHandler, read_configuration
+from setuptools.config.setupcfg import ConfigHandler, Target, read_configuration
 from setuptools.dist import Distribution, _Distribution
 from setuptools.warnings import SetuptoolsDeprecationWarning
 
@@ -16,7 +16,7 @@
 from distutils.errors import DistutilsFileError, DistutilsOptionError
 
 
-class ErrConfigHandler(ConfigHandler):
+class ErrConfigHandler(ConfigHandler[Target]):
     """Erroneous handler. Fails to implement required methods."""
 
     section_prefix = "**err**"

From 2072d9876f813dde19ea856751b076265c1d0305 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Thu, 17 Oct 2024 12:20:14 -0400
Subject: [PATCH 1221/1761] Using `dict` as an `OrderedDict` and allowed using
 `dict` as an ordered type in `setuptools.dist.check_requirements`

---
 newsfragments/4575.feature.rst         |  1 +
 setuptools/command/_requirestxt.py     |  3 +--
 setuptools/command/egg_info.py         |  5 +----
 setuptools/dist.py                     | 29 +++++++++++++-------------
 setuptools/tests/test_core_metadata.py |  1 -
 setuptools/tests/test_dist.py          | 10 ++-------
 6 files changed, 20 insertions(+), 29 deletions(-)
 create mode 100644 newsfragments/4575.feature.rst

diff --git a/newsfragments/4575.feature.rst b/newsfragments/4575.feature.rst
new file mode 100644
index 0000000000..64ab49830f
--- /dev/null
+++ b/newsfragments/4575.feature.rst
@@ -0,0 +1 @@
+Allowed using `dict` as an ordered type in ``setuptools.dist.check_requirements`` -- by :user:`Avasam`
diff --git a/setuptools/command/_requirestxt.py b/setuptools/command/_requirestxt.py
index b87476d6f4..d426f5dffb 100644
--- a/setuptools/command/_requirestxt.py
+++ b/setuptools/command/_requirestxt.py
@@ -18,12 +18,11 @@
 from packaging.requirements import Requirement
 
 from .. import _reqs
+from .._reqs import _StrOrIter
 
 # dict can work as an ordered set
 _T = TypeVar("_T")
 _Ordered = Dict[_T, None]
-_ordered = dict
-_StrOrIter = _reqs._StrOrIter
 
 
 def _prepare(
diff --git a/setuptools/command/egg_info.py b/setuptools/command/egg_info.py
index f4d3a2a57e..bc6c677878 100644
--- a/setuptools/command/egg_info.py
+++ b/setuptools/command/egg_info.py
@@ -2,7 +2,6 @@
 
 Create a distribution's .egg-info directory and contents"""
 
-import collections
 import functools
 import os
 import re
@@ -211,11 +210,9 @@ def save_version_info(self, filename):
         build tag. Install build keys in a deterministic order
         to avoid arbitrary reordering on subsequent builds.
         """
-        egg_info = collections.OrderedDict()
         # follow the order these keys would have been added
         # when PYTHONHASHSEED=0
-        egg_info['tag_build'] = self.tags()
-        egg_info['tag_date'] = 0
+        egg_info = dict(tag_build=self.tags(), tag_date=0)
         edit_config(filename, dict(egg_info=egg_info))
 
     def finalize_options(self):
diff --git a/setuptools/dist.py b/setuptools/dist.py
index f22e3eea54..d6b8e08214 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -11,12 +11,12 @@
 from typing import (
     TYPE_CHECKING,
     Any,
+    Dict,
     List,
     MutableMapping,
-    NoReturn,
+    Sequence,
     Tuple,
     Union,
-    overload,
 )
 
 from more_itertools import partition, unique_everseen
@@ -30,7 +30,6 @@
     command as _,  # noqa: F401 # imported for side-effects
 )
 from ._importlib import metadata
-from ._reqs import _StrOrIter
 from .config import pyprojecttoml, setupcfg
 from .discovery import ConfigDiscovery
 from .monkey import get_unpatched
@@ -63,7 +62,13 @@
 """
 _Sequence: TypeAlias = Union[Tuple[str, ...], List[str]]
 # This is how stringifying _Sequence would look in Python 3.10
-_requence_type_repr = "tuple[str, ...] | list[str]"
+_sequence_type_repr = "tuple[str, ...] | list[str]"
+_OrderedStrSequence: TypeAlias = Union[str, Dict[str, Any], Sequence[str]]
+"""
+:meta private:
+Avoid single-use iterable. Disallow sets.
+A poor approximation of an OrderedSequence (dict doesn't match a Sequence).
+"""
 
 
 def __getattr__(name: str) -> Any:  # pragma: no cover
@@ -97,7 +102,7 @@ def assert_string_list(dist, attr: str, value: _Sequence) -> None:
         assert ''.join(value) != value
     except (TypeError, ValueError, AttributeError, AssertionError) as e:
         raise DistutilsSetupError(
-            f"{attr!r} must be of type <{_requence_type_repr}> (got {value!r})"
+            f"{attr!r} must be of type <{_sequence_type_repr}> (got {value!r})"
         ) from e
 
 
@@ -173,15 +178,11 @@ def invalid_unless_false(dist, attr, value):
     raise DistutilsSetupError(f"{attr} is invalid.")
 
 
-@overload
-def check_requirements(dist, attr: str, value: set | dict) -> NoReturn: ...
-@overload
-def check_requirements(dist, attr: str, value: _StrOrIter) -> None: ...
-def check_requirements(dist, attr: str, value: _StrOrIter) -> None:
+def check_requirements(dist, attr: str, value: _OrderedStrSequence) -> None:
     """Verify that install_requires is a valid requirements list"""
     try:
         list(_reqs.parse(value))
-        if isinstance(value, (dict, set)):
+        if isinstance(value, set):
             raise TypeError("Unordered types are not allowed")
     except (TypeError, ValueError) as error:
         msg = (
@@ -810,7 +811,7 @@ def _exclude_misc(self, name: str, value: _Sequence) -> None:
         """Handle 'exclude()' for list/tuple attrs without a special handler"""
         if not isinstance(value, _sequence):
             raise DistutilsSetupError(
-                f"{name}: setting must be of type <{_requence_type_repr}> (got {value!r})"
+                f"{name}: setting must be of type <{_sequence_type_repr}> (got {value!r})"
             )
         try:
             old = getattr(self, name)
@@ -828,7 +829,7 @@ def _include_misc(self, name: str, value: _Sequence) -> None:
 
         if not isinstance(value, _sequence):
             raise DistutilsSetupError(
-                f"{name}: setting must be of type <{_requence_type_repr}> (got {value!r})"
+                f"{name}: setting must be of type <{_sequence_type_repr}> (got {value!r})"
             )
         try:
             old = getattr(self, name)
@@ -870,7 +871,7 @@ def exclude(self, **attrs):
     def _exclude_packages(self, packages: _Sequence) -> None:
         if not isinstance(packages, _sequence):
             raise DistutilsSetupError(
-                f"packages: setting must be of type <{_requence_type_repr}> (got {packages!r})"
+                f"packages: setting must be of type <{_sequence_type_repr}> (got {packages!r})"
             )
         list(map(self.exclude_package, packages))
 
diff --git a/setuptools/tests/test_core_metadata.py b/setuptools/tests/test_core_metadata.py
index 34828ac750..51d4a10810 100644
--- a/setuptools/tests/test_core_metadata.py
+++ b/setuptools/tests/test_core_metadata.py
@@ -310,7 +310,6 @@ def test_parity_with_metadata_from_pypa_wheel(tmp_path):
         python_requires=">=3.8",
         install_requires="""
         packaging==23.2
-        ordered-set==3.1.1
         more-itertools==8.8.0; extra == "other"
         jaraco.text==3.7.0
         importlib-resources==5.10.2; python_version<"3.8"
diff --git a/setuptools/tests/test_dist.py b/setuptools/tests/test_dist.py
index fde0de99ac..1bc4923032 100644
--- a/setuptools/tests/test_dist.py
+++ b/setuptools/tests/test_dist.py
@@ -1,4 +1,3 @@
-import collections
 import os
 import re
 import urllib.parse
@@ -72,15 +71,10 @@ def sdist_with_index(distname, version):
 
 
 def test_provides_extras_deterministic_order():
-    extras = collections.OrderedDict()
-    extras['a'] = ['foo']
-    extras['b'] = ['bar']
-    attrs = dict(extras_require=extras)
+    attrs = dict(extras_require=dict(a=['foo'], b=['bar']))
     dist = Distribution(attrs)
     assert list(dist.metadata.provides_extras) == ['a', 'b']
-    attrs['extras_require'] = collections.OrderedDict(
-        reversed(list(attrs['extras_require'].items()))
-    )
+    attrs['extras_require'] = dict(reversed(attrs['extras_require'].items()))
     dist = Distribution(attrs)
     assert list(dist.metadata.provides_extras) == ['b', 'a']
 

From 206c4b8913b0cb792e1077673b9db7f3c73e8ac5 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Thu, 17 Oct 2024 12:57:21 -0400
Subject: [PATCH 1222/1761] Bump mypy to 1.12 and pyright to 1.1.385

---
 .github/workflows/pyright.yml  |  3 ++-
 mypy.ini                       |  3 ++-
 pkg_resources/__init__.py      |  2 +-
 pyproject.toml                 |  2 +-
 setuptools/_path.py            |  3 ++-
 setuptools/command/__init__.py | 10 +++++++++-
 setuptools/command/sdist.py    |  3 ++-
 7 files changed, 19 insertions(+), 7 deletions(-)

diff --git a/.github/workflows/pyright.yml b/.github/workflows/pyright.yml
index 38fb910d85..17a1e2dbbe 100644
--- a/.github/workflows/pyright.yml
+++ b/.github/workflows/pyright.yml
@@ -26,7 +26,7 @@ env:
   # For help with static-typing issues, or pyright update, ping @Avasam
   #
   # An exact version from https://github.com/microsoft/pyright/releases or "latest"
-  PYRIGHT_VERSION: "1.1.377"
+  PYRIGHT_VERSION: "1.1.385"
 
   # Environment variable to support color support (jaraco/skeleton#66)
   FORCE_COLOR: 1
@@ -73,4 +73,5 @@ jobs:
         uses: jakebailey/pyright-action@v2
         with:
           version: ${{ env.PYRIGHT_VERSION }}
+          python-version: ${{ matrix.python }}
           extra-args: --threads
diff --git a/mypy.ini b/mypy.ini
index 26692755ea..2dc8aab56f 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -52,6 +52,7 @@ disable_error_code = import-not-found
 #       for setuptools to import `_distutils` directly
 #     - or non-stdlib distutils typings are exposed
 # - The following are not marked as py.typed:
+#  - jaraco: Since mypy 1.12, the root name of the untyped namespace package gets called-out too
 #  - jaraco.develop: https://github.com/jaraco/jaraco.develop/issues/22
 #  - jaraco.envs: https://github.com/jaraco/jaraco.envs/issues/7
 #  - jaraco.packaging: https://github.com/jaraco/jaraco.packaging/issues/20
@@ -59,7 +60,7 @@ disable_error_code = import-not-found
 #  - jaraco.test: https://github.com/jaraco/jaraco.test/issues/7
 #  - jaraco.text: https://github.com/jaraco/jaraco.text/issues/17
 #  - wheel: does not intend on exposing a programmatic API https://github.com/pypa/wheel/pull/610#issuecomment-2081687671
-[mypy-distutils.*,jaraco.develop,jaraco.envs,jaraco.packaging.*,jaraco.path,jaraco.test.*,jaraco.text,wheel.*]
+[mypy-distutils.*,jaraco,jaraco.develop,jaraco.envs,jaraco.packaging.*,jaraco.path,jaraco.test.*,jaraco.text,wheel.*]
 ignore_missing_imports = True
 
 # Even when excluding a module, import issues can show up due to following import
diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index f1f0ef2535..47824ab66e 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -2648,7 +2648,7 @@ def _cygwin_patch(filename: StrOrBytesPath):  # pragma: nocover
     would probably better, in Cygwin even more so, except
     that this seems to be by design...
     """
-    return os.path.abspath(filename) if sys.platform == 'cygwin' else filename
+    return os.path.abspath(filename) if sys.platform == 'cygwin' else filename  # type: ignore[type-var] # python/mypy#17952
 
 
 if TYPE_CHECKING:
diff --git a/pyproject.toml b/pyproject.toml
index eb6ec11041..72fc6df93a 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -136,7 +136,7 @@ type = [
 	# pin mypy version so a new version doesn't suddenly cause the CI to fail,
 	# until types-setuptools is removed from typeshed.
 	# For help with static-typing issues, or mypy update, ping @Avasam 
-	"mypy==1.11.*",
+	"mypy==1.12.*",
 	# Typing fixes in version newer than we require at runtime
 	"importlib_metadata>=7.0.2; python_version < '3.10'",
 	# Imported unconditionally in tools/finalize.py
diff --git a/setuptools/_path.py b/setuptools/_path.py
index dd4a9db8cb..c7bef83365 100644
--- a/setuptools/_path.py
+++ b/setuptools/_path.py
@@ -11,9 +11,10 @@
 
 from more_itertools import unique_everseen
 
-if sys.version_info >= (3, 9):
+if TYPE_CHECKING:
     StrPath: TypeAlias = Union[str, os.PathLike[str]]  #  Same as _typeshed.StrPath
 else:
+    # Python 3.8 support
     StrPath: TypeAlias = Union[str, os.PathLike]
 
 
diff --git a/setuptools/command/__init__.py b/setuptools/command/__init__.py
index bf011e896d..50e6c2f54f 100644
--- a/setuptools/command/__init__.py
+++ b/setuptools/command/__init__.py
@@ -1,12 +1,20 @@
+# mypy: disable_error_code=call-overload
+# pyright: reportCallIssue=false, reportArgumentType=false
+# Can't disable on the exact line because distutils doesn't exists on Python 3.12
+# and type-checkers aren't aware of distutils_hack,
+# causing distutils.command.bdist.bdist.format_commands to be Any.
+
 import sys
 
 from distutils.command.bdist import bdist
 
 if 'egg' not in bdist.format_commands:
     try:
+        # format_commands is a dict in vendored distutils
+        # It used to be a list in older (stdlib) distutils
+        # We support both for backwards compatibility
         bdist.format_commands['egg'] = ('bdist_egg', "Python .egg file")
     except TypeError:
-        # For backward compatibility with older distutils (stdlib)
         bdist.format_command['egg'] = ('bdist_egg', "Python .egg file")
         bdist.format_commands.append('egg')
 
diff --git a/setuptools/command/sdist.py b/setuptools/command/sdist.py
index fa9a2c4d81..65ce735dde 100644
--- a/setuptools/command/sdist.py
+++ b/setuptools/command/sdist.py
@@ -4,6 +4,7 @@
 import os
 import re
 from itertools import chain
+from typing import ClassVar
 
 from .._importlib import metadata
 from ..dist import Distribution
@@ -49,7 +50,7 @@ class sdist(orig.sdist):
     ]
 
     distribution: Distribution  # override distutils.dist.Distribution with setuptools.dist.Distribution
-    negative_opt: dict[str, str] = {}
+    negative_opt: ClassVar[dict[str, str]] = {}
 
     README_EXTENSIONS = ['', '.rst', '.txt', '.md']
     READMES = tuple('README{0}'.format(ext) for ext in README_EXTENSIONS)

From 22e845ba6049f2417e1f8d14a67bd5a97a9b62c9 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Thu, 17 Oct 2024 17:14:33 -0400
Subject: [PATCH 1223/1761] use a real boolean (False) default for
 display_option_names generated attributes

---
 distutils/dist.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/distutils/dist.py b/distutils/dist.py
index 154301baff..8e1e6d0b4e 100644
--- a/distutils/dist.py
+++ b/distutils/dist.py
@@ -139,7 +139,7 @@ def __init__(self, attrs=None):  # noqa: C901
         self.dry_run = False
         self.help = False
         for attr in self.display_option_names:
-            setattr(self, attr, 0)
+            setattr(self, attr, False)
 
         # Store the distribution meta-data (name, version, author, and so
         # forth) in a separate object -- we're getting to have enough

From bc1aa6fa7f8278756f808708e85499008d46d28f Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 19 Oct 2024 17:08:10 -0400
Subject: [PATCH 1224/1761] Restore expectation that mkpath returns a list of
 the items it created.

Closes #305
---
 distutils/dir_util.py            |  2 +-
 distutils/tests/test_dir_util.py | 11 +++++++++++
 2 files changed, 12 insertions(+), 1 deletion(-)

diff --git a/distutils/dir_util.py b/distutils/dir_util.py
index 3b22839d27..3fa4d7f4c3 100644
--- a/distutils/dir_util.py
+++ b/distutils/dir_util.py
@@ -58,7 +58,7 @@ def mkpath(name: pathlib.Path, mode=0o777, verbose=True, dry_run=False):
         log.info("creating %s", name)
 
     ancestry = itertools.chain((name,), name.parents)
-    missing = (path for path in ancestry if not path.is_dir())
+    missing = list(path for path in ancestry if not path.is_dir())
 
     try:
         dry_run or name.mkdir(mode=mode, parents=True, exist_ok=True)
diff --git a/distutils/tests/test_dir_util.py b/distutils/tests/test_dir_util.py
index 12e643ab74..0a6ff431ac 100644
--- a/distutils/tests/test_dir_util.py
+++ b/distutils/tests/test_dir_util.py
@@ -52,6 +52,17 @@ def test_mkpath_with_custom_mode(self):
         mkpath(self.target2, 0o555)
         assert stat.S_IMODE(os.stat(self.target2).st_mode) == 0o555 & ~umask
 
+    def test_mkpath_returns_as_described(self, tmp_path):
+        """
+        Per the docstring, mkpath should return the list of directories created.
+
+        pypa/distutils#305 revealed that the return value is always the empty
+        string and no one complained. Consider removing this expectation.
+        """
+        target = tmp_path / 'foodir'
+        res = mkpath(target)
+        assert res == [str(target)]
+
     def test_create_tree_verbosity(self, caplog):
         create_tree(self.root_target, ['one', 'two', 'three'], verbose=False)
         assert caplog.messages == []

From ce4b760e88c9eca60ae21f57d58ec8f1ac88ac50 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 19 Oct 2024 17:13:20 -0400
Subject: [PATCH 1225/1761] Remove support for returning the directories
 created in mkpath, unused.

Closes #306
---
 distutils/dir_util.py            |  8 +-------
 distutils/tests/test_dir_util.py | 11 -----------
 2 files changed, 1 insertion(+), 18 deletions(-)

diff --git a/distutils/dir_util.py b/distutils/dir_util.py
index 3fa4d7f4c3..5696359cc4 100644
--- a/distutils/dir_util.py
+++ b/distutils/dir_util.py
@@ -44,7 +44,7 @@ def wrapper(path, *args, **kwargs):
 
 @functools.singledispatch
 @wrapper
-def mkpath(name: pathlib.Path, mode=0o777, verbose=True, dry_run=False):
+def mkpath(name: pathlib.Path, mode=0o777, verbose=True, dry_run=False) -> None:
     """Create a directory and any missing ancestor directories.
 
     If the directory already exists (or if 'name' is the empty string, which
@@ -52,21 +52,15 @@ def mkpath(name: pathlib.Path, mode=0o777, verbose=True, dry_run=False):
     Raise DistutilsFileError if unable to create some directory along the way
     (eg. some sub-path exists, but is a file rather than a directory).
     If 'verbose' is true, log the directory created.
-    Return the list of directories actually created.
     """
     if verbose and not name.is_dir():
         log.info("creating %s", name)
 
-    ancestry = itertools.chain((name,), name.parents)
-    missing = list(path for path in ancestry if not path.is_dir())
-
     try:
         dry_run or name.mkdir(mode=mode, parents=True, exist_ok=True)
     except OSError as exc:
         raise DistutilsFileError(f"could not create '{name}': {exc.args[-1]}")
 
-    return list(map(str, missing))
-
 
 @mkpath.register
 def _(name: str, *args, **kwargs):
diff --git a/distutils/tests/test_dir_util.py b/distutils/tests/test_dir_util.py
index 0a6ff431ac..12e643ab74 100644
--- a/distutils/tests/test_dir_util.py
+++ b/distutils/tests/test_dir_util.py
@@ -52,17 +52,6 @@ def test_mkpath_with_custom_mode(self):
         mkpath(self.target2, 0o555)
         assert stat.S_IMODE(os.stat(self.target2).st_mode) == 0o555 & ~umask
 
-    def test_mkpath_returns_as_described(self, tmp_path):
-        """
-        Per the docstring, mkpath should return the list of directories created.
-
-        pypa/distutils#305 revealed that the return value is always the empty
-        string and no one complained. Consider removing this expectation.
-        """
-        target = tmp_path / 'foodir'
-        res = mkpath(target)
-        assert res == [str(target)]
-
     def test_create_tree_verbosity(self, caplog):
         create_tree(self.root_target, ['one', 'two', 'three'], verbose=False)
         assert caplog.messages == []

From b1e746c2f25b2380cfa45105aafa2852a47f4f41 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 19 Oct 2024 16:55:22 -0400
Subject: [PATCH 1226/1761] Add test capturing missed expectation.

Ref #304
---
 distutils/tests/test_dir_util.py | 23 +++++++++++++++++++++++
 1 file changed, 23 insertions(+)

diff --git a/distutils/tests/test_dir_util.py b/distutils/tests/test_dir_util.py
index 12e643ab74..ee76d05316 100644
--- a/distutils/tests/test_dir_util.py
+++ b/distutils/tests/test_dir_util.py
@@ -1,6 +1,7 @@
 """Tests for distutils.dir_util."""
 
 import os
+import pathlib
 import stat
 import unittest.mock as mock
 from distutils import dir_util, errors
@@ -110,3 +111,25 @@ def test_copy_tree_exception_in_listdir(self):
         ):
             src = self.tempdirs[-1]
             dir_util.copy_tree(src, None)
+
+    @pytest.mark.xfail(reason="#304")
+    def test_mkpath_exception_uncached(self, monkeypatch, tmp_path):
+        """
+        Caching should not remember failed attempts.
+
+        pypa/distutils#304
+        """
+
+        class FailPath(pathlib.Path):
+            def mkdir(self, *args, **kwargs):
+                raise OSError("Failed to create directory")
+
+        target = tmp_path / 'foodir'
+
+        with pytest.raises(errors.DistutilsFileError):
+            mkpath(FailPath(target))
+
+        assert not target.exists()
+
+        mkpath(target)
+        assert target.exists()

From 5904204aa175d9b4219742a137608eed190cc51b Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 19 Oct 2024 16:59:42 -0400
Subject: [PATCH 1227/1761] Correct the order of operations to ensure that
 failed attempts aren't cached and are thus retried on subsequent operations.

Closes #304
---
 distutils/dir_util.py            | 3 ++-
 distutils/tests/test_dir_util.py | 1 -
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/distutils/dir_util.py b/distutils/dir_util.py
index 5696359cc4..d9782602cf 100644
--- a/distutils/dir_util.py
+++ b/distutils/dir_util.py
@@ -32,8 +32,9 @@ def wrap(self, func):
         def wrapper(path, *args, **kwargs):
             if path.absolute() in self:
                 return
+            result = func(path, *args, **kwargs)
             self.add(path.absolute())
-            return func(path, *args, **kwargs)
+            return result
 
         return wrapper
 
diff --git a/distutils/tests/test_dir_util.py b/distutils/tests/test_dir_util.py
index ee76d05316..fcc37ac568 100644
--- a/distutils/tests/test_dir_util.py
+++ b/distutils/tests/test_dir_util.py
@@ -112,7 +112,6 @@ def test_copy_tree_exception_in_listdir(self):
             src = self.tempdirs[-1]
             dir_util.copy_tree(src, None)
 
-    @pytest.mark.xfail(reason="#304")
     def test_mkpath_exception_uncached(self, monkeypatch, tmp_path):
         """
         Caching should not remember failed attempts.

From 464735f1f188fd2dd6eaac12d2985e851e4658bd Mon Sep 17 00:00:00 2001
From: Aohan Dang 
Date: Tue, 8 Oct 2024 10:14:57 -0400
Subject: [PATCH 1228/1761] Fix issue with absolute path with Python 3.13 on
 Windows

With Python 3.13 on Windows, `os.path.isabs()` no longer returns `True`
for a path that starts with a slash. Thus, when the argument to
`_make_relative()` is an absolute path, the return value starts with a
slash on Python 3.13 and does not start with a slash on older Python
versions. This causes the extension module build directory to be
calculated incorrectly with Python 3.13 on Windows.

Fix this by ensuring that the return value does not start with a slash.
---
 distutils/ccompiler.py | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/distutils/ccompiler.py b/distutils/ccompiler.py
index 5e73e56d02..fdbb1ca795 100644
--- a/distutils/ccompiler.py
+++ b/distutils/ccompiler.py
@@ -989,7 +989,8 @@ def _make_relative(base):
         # Chop off the drive
         no_drive = os.path.splitdrive(base)[1]
         # If abs, chop off leading /
-        return no_drive[os.path.isabs(no_drive) :]
+        is_abs = os.path.isabs(no_drive) or sys.platform == 'win32' and (no_drive.startswith('/') or no_drive.startswith('\\'))
+        return no_drive[is_abs:]
 
     def shared_object_filename(self, basename, strip_dir=False, output_dir=''):
         assert output_dir is not None

From e6ba3ad63812c4160a4b0d585316329ceb750387 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 21 Oct 2024 15:11:21 +0100
Subject: [PATCH 1229/1761] Remove unused type ignore

---
 pkg_resources/__init__.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index 47824ab66e..f1f0ef2535 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -2648,7 +2648,7 @@ def _cygwin_patch(filename: StrOrBytesPath):  # pragma: nocover
     would probably better, in Cygwin even more so, except
     that this seems to be by design...
     """
-    return os.path.abspath(filename) if sys.platform == 'cygwin' else filename  # type: ignore[type-var] # python/mypy#17952
+    return os.path.abspath(filename) if sys.platform == 'cygwin' else filename
 
 
 if TYPE_CHECKING:

From 9cefa0af0cbfda14d8852ad8d55ca2ca29a99983 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Mon, 12 Aug 2024 13:02:44 -0400
Subject: [PATCH 1230/1761] Disable and revert unpacked-list-comprehension
 (UP027)

---
 ruff.toml                         | 5 +++--
 setuptools/package_index.py       | 5 +----
 setuptools/tests/test_egg_info.py | 4 ++--
 3 files changed, 6 insertions(+), 8 deletions(-)

diff --git a/ruff.toml b/ruff.toml
index b55b4e8067..53d644f6a5 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -12,18 +12,19 @@ extend-select = [
 
 	# local
 	"ANN2", # missing-return-type-*
-	"FA", # flake8-future-annotations
 	"F404", # late-future-import
+	"FA", # flake8-future-annotations
 	"I", # isort
 	"PYI", # flake8-pyi
+	"TRY", # tryceratops
 	"UP", # pyupgrade
-	"TRY",
 	"YTT", # flake8-2020
 ]
 ignore = [
 	"TRY003", # raise-vanilla-args, avoid multitude of exception classes
 	"TRY301", # raise-within-try, it's handy
 	"UP015", # redundant-open-modes, explicit is preferred
+	"UP027", # unpacked-list-comprehension, is actually slower for cases relevant to unpacking, set for deprecation: https://github.com/astral-sh/ruff/issues/12754
 	"UP030", # temporarily disabled
 	"UP031", # temporarily disabled
 	"UP032", # temporarily disabled
diff --git a/setuptools/package_index.py b/setuptools/package_index.py
index 9e01d5e082..9b3769fac9 100644
--- a/setuptools/package_index.py
+++ b/setuptools/package_index.py
@@ -561,10 +561,7 @@ def not_found_in_index(self, requirement):
         if self[requirement.key]:  # we've seen at least one distro
             meth, msg = self.info, "Couldn't retrieve index page for %r"
         else:  # no distros seen for this name, might be misspelled
-            meth, msg = (
-                self.warn,
-                "Couldn't find index page for %r (maybe misspelled?)",
-            )
+            meth, msg = self.warn, "Couldn't find index page for %r (maybe misspelled?)"
         meth(msg, requirement.unsafe_name)
         self.scan_all()
 
diff --git a/setuptools/tests/test_egg_info.py b/setuptools/tests/test_egg_info.py
index 6e8d0c68c3..12d6b30a8b 100644
--- a/setuptools/tests/test_egg_info.py
+++ b/setuptools/tests/test_egg_info.py
@@ -283,9 +283,9 @@ def parametrize(*test_list, **format_dict):
                 else:
                     install_cmd_kwargs = {}
                 name = name_kwargs[0].strip()
-                setup_py_requires, setup_cfg_requires, expected_requires = (
+                setup_py_requires, setup_cfg_requires, expected_requires = [
                     DALS(a).format(**format_dict) for a in test_params
-                )
+                ]
                 for id_, requires, use_cfg in (
                     (name, setup_py_requires, False),
                     (name + '_in_setup_cfg', setup_cfg_requires, True),

From 5bc3ebfe66a218261ca2884766a1e6c906ff70c4 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 21 Oct 2024 12:53:41 +0100
Subject: [PATCH 1231/1761] Fix possible invalid dynamic behaviour of
 optional-dependencies

---
 setuptools/config/_apply_pyprojecttoml.py     |  6 ++--
 .../config/test_pyprojecttoml_dynamic_deps.py | 32 ++++++++++++-------
 2 files changed, 25 insertions(+), 13 deletions(-)

diff --git a/setuptools/config/_apply_pyprojecttoml.py b/setuptools/config/_apply_pyprojecttoml.py
index 23179f3548..16fe753b58 100644
--- a/setuptools/config/_apply_pyprojecttoml.py
+++ b/setuptools/config/_apply_pyprojecttoml.py
@@ -217,8 +217,10 @@ def _dependencies(dist: Distribution, val: list, _root_dir):
 
 
 def _optional_dependencies(dist: Distribution, val: dict, _root_dir):
-    existing = getattr(dist, "extras_require", None) or {}
-    dist.extras_require = {**existing, **val}
+    if getattr(dist, "extras_require", None):
+        msg = "`extras_require` overwritten in `pyproject.toml` (optional-dependencies)"
+        SetuptoolsWarning.emit(msg)
+    dist.extras_require = val
 
 
 def _ext_modules(dist: Distribution, val: list[dict]) -> list[Extension]:
diff --git a/setuptools/tests/config/test_pyprojecttoml_dynamic_deps.py b/setuptools/tests/config/test_pyprojecttoml_dynamic_deps.py
index 37e5234a45..e42f28ffaa 100644
--- a/setuptools/tests/config/test_pyprojecttoml_dynamic_deps.py
+++ b/setuptools/tests/config/test_pyprojecttoml_dynamic_deps.py
@@ -5,6 +5,7 @@
 
 from setuptools.config.pyprojecttoml import apply_configuration
 from setuptools.dist import Distribution
+from setuptools.warnings import SetuptoolsWarning
 
 
 def test_dynamic_dependencies(tmp_path):
@@ -77,23 +78,32 @@ def test_mixed_dynamic_optional_dependencies(tmp_path):
 
             [tool.setuptools.dynamic.optional-dependencies.images]
             file = ["requirements-images.txt"]
-
-            [build-system]
-            requires = ["setuptools", "wheel"]
-            build-backend = "setuptools.build_meta"
             """
         ),
     }
 
     path.build(files, prefix=tmp_path)
-
-    # Test that the mix-and-match doesn't currently validate.
     pyproject = tmp_path / "pyproject.toml"
     with pytest.raises(ValueError, match="project.optional-dependencies"):
         apply_configuration(Distribution(), pyproject)
 
-    # Explicitly disable the validation and try again, to see that the mix-and-match
-    # result would be correct.
-    dist = Distribution()
-    dist = apply_configuration(dist, pyproject, ignore_option_errors=True)
-    assert dist.extras_require == {"docs": ["sphinx"], "images": ["pillow~=42.0"]}
+
+def test_mixed_extras_require_optional_dependencies(tmp_path):
+    files = {
+        "pyproject.toml": cleandoc(
+            """
+            [project]
+            name = "myproj"
+            version = "1.0"
+            optional-dependencies.docs = ["sphinx"]
+            """
+        ),
+    }
+
+    path.build(files, prefix=tmp_path)
+    pyproject = tmp_path / "pyproject.toml"
+
+    with pytest.warns(SetuptoolsWarning, match=".extras_require. overwritten"):
+        dist = Distribution({"extras_require": {"hello": ["world"]}})
+        dist = apply_configuration(dist, pyproject)
+        assert dist.extras_require == {"docs": ["sphinx"]}

From 446e58f7a147015e0946e76083d86db8e30e7ffb Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 21 Oct 2024 11:30:28 +0100
Subject: [PATCH 1232/1761] Addopt modern syntax sugar in setupcfg.py

---
 setuptools/config/setupcfg.py | 15 ++++++---------
 1 file changed, 6 insertions(+), 9 deletions(-)

diff --git a/setuptools/config/setupcfg.py b/setuptools/config/setupcfg.py
index 4fee109e26..5b4e1e8d95 100644
--- a/setuptools/config/setupcfg.py
+++ b/setuptools/config/setupcfg.py
@@ -304,7 +304,7 @@ def __setitem__(self, option_name, value) -> None:
             return
 
         simple_setter = functools.partial(target_obj.__setattr__, option_name)
-        setter = getattr(target_obj, 'set_%s' % option_name, simple_setter)
+        setter = getattr(target_obj, f"set_{option_name}", simple_setter)
         setter(parsed)
 
         self.set_options.append(option_name)
@@ -372,8 +372,8 @@ def parser(value):
             exclude_directive = 'file:'
             if value.startswith(exclude_directive):
                 raise ValueError(
-                    'Only strings are accepted for the {0} field, '
-                    'files are not accepted'.format(key)
+                    f'Only strings are accepted for the {key} field, '
+                    'files are not accepted'
                 )
             return value
 
@@ -491,12 +491,12 @@ def parse(self) -> None:
         for section_name, section_options in self.sections.items():
             method_postfix = ''
             if section_name:  # [section.option] variant
-                method_postfix = '_%s' % section_name
+                method_postfix = f"_{section_name}"
 
             section_parser_method: Callable | None = getattr(
                 self,
                 # Dots in section names are translated into dunderscores.
-                ('parse_section%s' % method_postfix).replace('.', '__'),
+                f'parse_section{method_postfix}'.replace('.', '__'),
                 None,
             )
 
@@ -701,10 +701,7 @@ def parse_section_packages__find(self, section_options):
         section_data = self._parse_section_to_dict(section_options, self._parse_list)
 
         valid_keys = ['where', 'include', 'exclude']
-
-        find_kwargs = dict([
-            (k, v) for k, v in section_data.items() if k in valid_keys and v
-        ])
+        find_kwargs = {k: v for k, v in section_data.items() if k in valid_keys and v}
 
         where = find_kwargs.get('where')
         if where is not None:

From 2c99d1e350512c7020c516ccea0d64848399b5d3 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 21 Oct 2024 16:17:01 +0100
Subject: [PATCH 1233/1761] Add news fragment

---
 newsfragments/4696.bugfix.rst | 4 ++++
 1 file changed, 4 insertions(+)
 create mode 100644 newsfragments/4696.bugfix.rst

diff --git a/newsfragments/4696.bugfix.rst b/newsfragments/4696.bugfix.rst
new file mode 100644
index 0000000000..77ebf87c48
--- /dev/null
+++ b/newsfragments/4696.bugfix.rst
@@ -0,0 +1,4 @@
+Fix clashes for ``optional-dependencies`` in ``pyproject.toml`` and
+``extra_requires`` in ``setup.cfg/setup.py``.
+As per PEP 621, ``optional-dependencies`` has to be honoured and dynamic
+behaviour is not allowed.

From 99c75c945ac7afd3d37ca918125a157492e959da Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Mon, 21 Oct 2024 12:24:22 -0400
Subject: [PATCH 1234/1761] Ensured all methods in `setuptools.modified` raise
 a consistant `distutils.error.DistutilsError` type (#4567)

* Ensured all methods in `setuptools.modified` raise a consistant `distutils.error.DistutilsError` type

* Update tests to reflect runtime behaviour with SETUPTOOLS_USE_DISTUTILS=stdlib

* Update newsfragments/4567.bugfix.rst

Co-authored-by: Anderson Bravalheri 

* Attempt to fix setuptools/tests/test_distutils_adoption.py

* can't use setuptoolsrelative import in test

* Fix formatting error

---------

Co-authored-by: Anderson Bravalheri 
Co-authored-by: Anderson Bravalheri 
---
 newsfragments/4567.bugfix.rst               |  4 ++
 setuptools/command/build_clib.py            |  9 +---
 setuptools/modified.py                      | 22 +++++++---
 setuptools/tests/test_distutils_adoption.py | 47 +++++++++++++++++++--
 4 files changed, 64 insertions(+), 18 deletions(-)
 create mode 100644 newsfragments/4567.bugfix.rst

diff --git a/newsfragments/4567.bugfix.rst b/newsfragments/4567.bugfix.rst
new file mode 100644
index 0000000000..7d7bb282e1
--- /dev/null
+++ b/newsfragments/4567.bugfix.rst
@@ -0,0 +1,4 @@
+Ensured methods in ``setuptools.modified`` preferably raise a consistent
+``distutils.errors.DistutilsError`` type
+(except in the deprecated use case of ``SETUPTOOLS_USE_DISTUTILS=stdlib``)
+-- by :user:`Avasam`
diff --git a/setuptools/command/build_clib.py b/setuptools/command/build_clib.py
index d532762ebe..eab08e70f2 100644
--- a/setuptools/command/build_clib.py
+++ b/setuptools/command/build_clib.py
@@ -1,17 +1,10 @@
 from ..dist import Distribution
+from ..modified import newer_pairwise_group
 
 import distutils.command.build_clib as orig
 from distutils import log
 from distutils.errors import DistutilsSetupError
 
-try:
-    from distutils._modified import (  # pyright: ignore[reportMissingImports]
-        newer_pairwise_group,
-    )
-except ImportError:
-    # fallback for SETUPTOOLS_USE_DISTUTILS=stdlib
-    from .._distutils._modified import newer_pairwise_group
-
 
 class build_clib(orig.build_clib):
     """
diff --git a/setuptools/modified.py b/setuptools/modified.py
index 245a61580b..6ba02fab68 100644
--- a/setuptools/modified.py
+++ b/setuptools/modified.py
@@ -1,8 +1,18 @@
-from ._distutils._modified import (
-    newer,
-    newer_group,
-    newer_pairwise,
-    newer_pairwise_group,
-)
+try:
+    # Ensure a DistutilsError raised by these methods is the same as distutils.errors.DistutilsError
+    from distutils._modified import (
+        newer,
+        newer_group,
+        newer_pairwise,
+        newer_pairwise_group,
+    )
+except ImportError:
+    # fallback for SETUPTOOLS_USE_DISTUTILS=stdlib, because _modified never existed in stdlib
+    from ._distutils._modified import (
+        newer,
+        newer_group,
+        newer_pairwise,
+        newer_pairwise_group,
+    )
 
 __all__ = ['newer', 'newer_pairwise', 'newer_group', 'newer_pairwise_group']
diff --git a/setuptools/tests/test_distutils_adoption.py b/setuptools/tests/test_distutils_adoption.py
index 0b020ba9fc..aabfdd283a 100644
--- a/setuptools/tests/test_distutils_adoption.py
+++ b/setuptools/tests/test_distutils_adoption.py
@@ -114,6 +114,7 @@ def test_distutils_has_origin():
 """
 
 
+@pytest.mark.usefixtures("tmpdir_cwd")
 @pytest.mark.parametrize(
     "distutils_version, imported_module",
     [
@@ -125,9 +126,7 @@ def test_distutils_has_origin():
         ("local", "archive_util"),
     ],
 )
-def test_modules_are_not_duplicated_on_import(
-    distutils_version, imported_module, tmpdir_cwd, venv
-):
+def test_modules_are_not_duplicated_on_import(distutils_version, imported_module, venv):
     env = dict(SETUPTOOLS_USE_DISTUTILS=distutils_version)
     script = ENSURE_IMPORTS_ARE_NOT_DUPLICATED.format(imported_module=imported_module)
     cmd = ['python', '-c', script]
@@ -145,6 +144,7 @@ def test_modules_are_not_duplicated_on_import(
 """
 
 
+@pytest.mark.usefixtures("tmpdir_cwd")
 @pytest.mark.parametrize(
     "distutils_version",
     [
@@ -152,8 +152,47 @@ def test_modules_are_not_duplicated_on_import(
         pytest.param("stdlib", marks=skip_without_stdlib_distutils),
     ],
 )
-def test_log_module_is_not_duplicated_on_import(distutils_version, tmpdir_cwd, venv):
+def test_log_module_is_not_duplicated_on_import(distutils_version, venv):
     env = dict(SETUPTOOLS_USE_DISTUTILS=distutils_version)
     cmd = ['python', '-c', ENSURE_LOG_IMPORT_IS_NOT_DUPLICATED]
     output = venv.run(cmd, env=win_sr(env), **_TEXT_KWARGS).strip()
     assert output == "success"
+
+
+ENSURE_CONSISTENT_ERROR_FROM_MODIFIED_PY = r"""
+from setuptools.modified import newer
+from {imported_module}.errors import DistutilsError
+
+# Can't use pytest.raises in this context
+try:
+    newer("", "")
+except DistutilsError:
+    print("success")
+else:
+    raise AssertionError("Expected to raise")
+"""
+
+
+@pytest.mark.usefixtures("tmpdir_cwd")
+@pytest.mark.parametrize(
+    "distutils_version, imported_module",
+    [
+        ("local", "distutils"),
+        # Unfortunately we still get ._distutils.errors.DistutilsError with SETUPTOOLS_USE_DISTUTILS=stdlib
+        # But that's a deprecated use-case we don't mind not fully supporting in newer code
+        pytest.param(
+            "stdlib", "setuptools._distutils", marks=skip_without_stdlib_distutils
+        ),
+    ],
+)
+def test_consistent_error_from_modified_py(distutils_version, imported_module, venv):
+    env = dict(SETUPTOOLS_USE_DISTUTILS=distutils_version)
+    cmd = [
+        'python',
+        '-c',
+        ENSURE_CONSISTENT_ERROR_FROM_MODIFIED_PY.format(
+            imported_module=imported_module
+        ),
+    ]
+    output = venv.run(cmd, env=win_sr(env), **_TEXT_KWARGS).strip()
+    assert output == "success"

From e5f16a2a990ff18cdef27a22b742f97444867186 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Mon, 21 Oct 2024 18:18:34 -0400
Subject: [PATCH 1235/1761] bump jaraco.test to py.typed version 5.5 in test
 extra (#4651)

---
 mypy.ini       | 10 +++++++---
 pyproject.toml |  2 +-
 2 files changed, 8 insertions(+), 4 deletions(-)

diff --git a/mypy.ini b/mypy.ini
index 2dc8aab56f..cfd909ba93 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -51,16 +51,20 @@ disable_error_code = import-not-found
 #     - support for `SETUPTOOLS_USE_DISTUTILS=stdlib` is dropped (#3625)
 #       for setuptools to import `_distutils` directly
 #     - or non-stdlib distutils typings are exposed
+[mypy-distutils.*]
+ignore_missing_imports = True
+
+#  - wheel: does not intend on exposing a programmatic API https://github.com/pypa/wheel/pull/610#issuecomment-2081687671
+[mypy-wheel.*]
+ignore_missing_imports = True
 # - The following are not marked as py.typed:
 #  - jaraco: Since mypy 1.12, the root name of the untyped namespace package gets called-out too
 #  - jaraco.develop: https://github.com/jaraco/jaraco.develop/issues/22
 #  - jaraco.envs: https://github.com/jaraco/jaraco.envs/issues/7
 #  - jaraco.packaging: https://github.com/jaraco/jaraco.packaging/issues/20
 #  - jaraco.path: https://github.com/jaraco/jaraco.path/issues/2
-#  - jaraco.test: https://github.com/jaraco/jaraco.test/issues/7
 #  - jaraco.text: https://github.com/jaraco/jaraco.text/issues/17
-#  - wheel: does not intend on exposing a programmatic API https://github.com/pypa/wheel/pull/610#issuecomment-2081687671
-[mypy-distutils.*,jaraco,jaraco.develop,jaraco.envs,jaraco.packaging.*,jaraco.path,jaraco.test.*,jaraco.text,wheel.*]
+[mypy-jaraco,jaraco.develop,jaraco.envs,jaraco.packaging.*,jaraco.path,jaraco.text]
 ignore_missing_imports = True
 
 # Even when excluding a module, import issues can show up due to following import
diff --git a/pyproject.toml b/pyproject.toml
index 9dca4c67f4..1a4906fb0c 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -59,7 +59,7 @@ test = [
 	# workaround for pypa/pyproject-hooks#206
 	"pyproject-hooks!=1.1",
 
-	"jaraco.test",
+	"jaraco.test>=5.5", # py.typed
 ]
 
 doc = [

From a39336ba37c50695f5a57be20d8452e46a4ceb10 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Tue, 22 Oct 2024 08:38:04 -0400
Subject: [PATCH 1236/1761] Ruff: enable all pyflakes and perf rules (#4556)

---
 ruff.toml                          | 4 +++-
 setuptools/command/easy_install.py | 2 +-
 setuptools/tests/test_manifest.py  | 8 +-------
 3 files changed, 5 insertions(+), 9 deletions(-)

diff --git a/ruff.toml b/ruff.toml
index 53d644f6a5..484246aa1e 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -7,20 +7,22 @@ exclude = [
 [lint]
 extend-select = [
 	"C901",
-	"PERF401",
 	"W",
 
 	# local
 	"ANN2", # missing-return-type-*
+	"F", # Pyflakes
 	"F404", # late-future-import
 	"FA", # flake8-future-annotations
 	"I", # isort
+	"PERF", # Perflint
 	"PYI", # flake8-pyi
 	"TRY", # tryceratops
 	"UP", # pyupgrade
 	"YTT", # flake8-2020
 ]
 ignore = [
+	"PERF203", # try-except-in-loop, micro-optimisation with many false-positive. Worth checking but don't block CI
 	"TRY003", # raise-vanilla-args, avoid multitude of exception classes
 	"TRY301", # raise-within-try, it's handy
 	"UP015", # redundant-open-modes, explicit is preferred
diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py
index 25a9eee937..2a59c74ccd 100644
--- a/setuptools/command/easy_install.py
+++ b/setuptools/command/easy_install.py
@@ -2187,7 +2187,7 @@ def get_args(cls, dist, header=None):
         spec = str(dist.as_requirement())
         for type_ in 'console', 'gui':
             group = type_ + '_scripts'
-            for name, ep in dist.get_entry_map(group).items():
+            for name in dist.get_entry_map(group).keys():
                 cls._ensure_safe_name(name)
                 script_text = cls.template % locals()
                 args = cls._get_script_args(type_, name, header, script_text)
diff --git a/setuptools/tests/test_manifest.py b/setuptools/tests/test_manifest.py
index ae5572b83c..444c7a3689 100644
--- a/setuptools/tests/test_manifest.py
+++ b/setuptools/tests/test_manifest.py
@@ -485,14 +485,8 @@ def test_process_template_line_invalid(self):
             'prune',
             'blarg',
         ):
-            try:
+            with pytest.raises(DistutilsTemplateError):
                 file_list.process_template_line(action)
-            except DistutilsTemplateError:
-                pass
-            except Exception:
-                assert False, "Incorrect error thrown"
-            else:
-                assert False, "Should have thrown an error"
 
     def test_include(self, caplog):
         caplog.set_level(logging.DEBUG)

From 1f92af5201ebffcf4c7107f9d9dfcc687e57e2cc Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Fri, 25 Oct 2024 17:17:41 +0100
Subject: [PATCH 1237/1761] Sync pre-commit-hook with version of ruff that gets
 automatically pulled in tests

---
 .pre-commit-config.yaml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index a88677f50c..04870d16bf 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,6 +1,6 @@
 repos:
 - repo: https://github.com/astral-sh/ruff-pre-commit
-  rev: v0.5.7
+  rev: v0.7.1
   hooks:
   - id: ruff
     args: [--fix, --unsafe-fixes]

From c498d162e7501165fb8415fec4ed3282b2f04af9 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Fri, 25 Oct 2024 17:54:35 +0100
Subject: [PATCH 1238/1761] Satisfy ruff linter - automatic fix

---
 setuptools/command/alias.py              |  3 +--
 setuptools/command/bdist_wheel.py        |  6 +++---
 setuptools/command/install.py            |  3 +--
 setuptools/tests/config/test_setupcfg.py |  4 +---
 setuptools/tests/test_build_py.py        | 12 +++++------
 setuptools/tests/test_easy_install.py    | 27 ++++++++++++------------
 6 files changed, 25 insertions(+), 30 deletions(-)

diff --git a/setuptools/command/alias.py b/setuptools/command/alias.py
index 4eed652381..3111d26496 100644
--- a/setuptools/command/alias.py
+++ b/setuptools/command/alias.py
@@ -34,8 +34,7 @@ def finalize_options(self):
         option_base.finalize_options(self)
         if self.remove and len(self.args) != 1:
             raise DistutilsOptionError(
-                "Must specify exactly one argument (the alias name) when "
-                "using --remove"
+                "Must specify exactly one argument (the alias name) when using --remove"
             )
 
     def run(self):
diff --git a/setuptools/command/bdist_wheel.py b/setuptools/command/bdist_wheel.py
index aeade98f6f..f377cd5ee6 100644
--- a/setuptools/command/bdist_wheel.py
+++ b/setuptools/command/bdist_wheel.py
@@ -390,9 +390,9 @@ def get_tag(self) -> tuple[str, str, str]:
             supported_tags = [
                 (t.interpreter, t.abi, plat_name) for t in tags.sys_tags()
             ]
-            assert (
-                tag in supported_tags
-            ), f"would build wheel with unsupported tag {tag}"
+            assert tag in supported_tags, (
+                f"would build wheel with unsupported tag {tag}"
+            )
         return tag
 
     def run(self):
diff --git a/setuptools/command/install.py b/setuptools/command/install.py
index e2ec1abdde..e1dff0aac7 100644
--- a/setuptools/command/install.py
+++ b/setuptools/command/install.py
@@ -70,8 +70,7 @@ def finalize_options(self):
         elif self.single_version_externally_managed:
             if not self.root and not self.record:
                 raise DistutilsArgError(
-                    "You must specify --record or --root when building system"
-                    " packages"
+                    "You must specify --record or --root when building system packages"
                 )
 
     def handle_extra_path(self):
diff --git a/setuptools/tests/config/test_setupcfg.py b/setuptools/tests/config/test_setupcfg.py
index 8d95798123..9f225416c0 100644
--- a/setuptools/tests/config/test_setupcfg.py
+++ b/setuptools/tests/config/test_setupcfg.py
@@ -415,9 +415,7 @@ def test_not_utf8(self, tmpdir):
         """
         fake_env(
             tmpdir,
-            '# vim: set fileencoding=iso-8859-15 :\n'
-            '[metadata]\n'
-            'description = éàïôñ\n',
+            '# vim: set fileencoding=iso-8859-15 :\n[metadata]\ndescription = éàïôñ\n',
             encoding='iso-8859-15',
         )
         with pytest.raises(UnicodeDecodeError):
diff --git a/setuptools/tests/test_build_py.py b/setuptools/tests/test_build_py.py
index b8cd77dc0b..e64cfa2e4b 100644
--- a/setuptools/tests/test_build_py.py
+++ b/setuptools/tests/test_build_py.py
@@ -55,9 +55,9 @@ def test_recursive_in_package_data_glob(tmpdir_cwd):
     dist.parse_command_line()
     dist.run_commands()
 
-    assert stat.S_ISREG(
-        os.stat('build/lib/path/subpath/subsubpath/data').st_mode
-    ), "File is not included"
+    assert stat.S_ISREG(os.stat('build/lib/path/subpath/subsubpath/data').st_mode), (
+        "File is not included"
+    )
 
 
 def test_read_only(tmpdir_cwd):
@@ -116,9 +116,9 @@ def test_executable_data(tmpdir_cwd):
     dist.parse_command_line()
     dist.run_commands()
 
-    assert (
-        os.stat('build/lib/pkg/run-me').st_mode & stat.S_IEXEC
-    ), "Script is not executable"
+    assert os.stat('build/lib/pkg/run-me').st_mode & stat.S_IEXEC, (
+        "Script is not executable"
+    )
 
 
 EXAMPLE_WITH_MANIFEST = {
diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py
index de257db80c..933cebb78e 100644
--- a/setuptools/tests/test_easy_install.py
+++ b/setuptools/tests/test_easy_install.py
@@ -327,9 +327,9 @@ def test_many_pth_distributions_merge_together(self, tmpdir):
         pth_path = str(pth_subdir.join("file1.pth"))
         pth1 = PthDistributions(pth_path)
         pth2 = PthDistributions(pth_path)
-        assert (
-            pth1.paths == pth2.paths == []
-        ), "unless there would be some default added at some point"
+        assert pth1.paths == pth2.paths == [], (
+            "unless there would be some default added at some point"
+        )
         # and so putting the src_subdir in folder distinct than the pth one,
         # so to keep it absolute by PthDistributions
         new_src_path = tmpdir.join("src_subdir")
@@ -337,17 +337,17 @@ def test_many_pth_distributions_merge_together(self, tmpdir):
         new_src_path_str = str(new_src_path)
         pth1.paths.append(new_src_path_str)
         pth1.save()
-        assert (
-            pth1.paths
-        ), "the new_src_path added must still be present/valid in pth1 after save"
+        assert pth1.paths, (
+            "the new_src_path added must still be present/valid in pth1 after save"
+        )
         # now,
-        assert (
-            new_src_path_str not in pth2.paths
-        ), "right before we save the entry should still not be present"
+        assert new_src_path_str not in pth2.paths, (
+            "right before we save the entry should still not be present"
+        )
         pth2.save()
-        assert (
-            new_src_path_str in pth2.paths
-        ), "the new_src_path entry should have been added by pth2 with its save() call"
+        assert new_src_path_str in pth2.paths, (
+            "the new_src_path entry should have been added by pth2 with its save() call"
+        )
         assert pth2.paths[-1] == new_src_path, (
             "and it should match exactly on the last entry actually "
             "given we append to it in save()"
@@ -719,8 +719,7 @@ def test_setup_requires_override_nspkg(self, use_setup_cfg):
                         run_setup(test_setup_py, ['--name'])
                     except pkg_resources.VersionConflict:
                         self.fail(
-                            'Installing setup.py requirements '
-                            'caused a VersionConflict'
+                            'Installing setup.py requirements caused a VersionConflict'
                         )
 
                 assert 'FAIL' not in stdout.getvalue()

From 1155ca8001fb1619714dc5d6e4f1344803106697 Mon Sep 17 00:00:00 2001
From: Aohan Dang 
Date: Wed, 9 Oct 2024 12:54:25 -0400
Subject: [PATCH 1239/1761] Fix wheel ABI tag for debug Python 3.13 on Windows

---
 newsfragments/4674.bugfix.rst        | 1 +
 setuptools/command/bdist_wheel.py    | 3 +++
 setuptools/tests/test_bdist_wheel.py | 6 ++++++
 3 files changed, 10 insertions(+)
 create mode 100644 newsfragments/4674.bugfix.rst

diff --git a/newsfragments/4674.bugfix.rst b/newsfragments/4674.bugfix.rst
new file mode 100644
index 0000000000..9a6d2454ab
--- /dev/null
+++ b/newsfragments/4674.bugfix.rst
@@ -0,0 +1 @@
+Fix the ABI tag when building a wheel using the debug build of Python 3.13 on Windows. Previously, the ABI tag was missing the ``"d"`` flag.
diff --git a/setuptools/command/bdist_wheel.py b/setuptools/command/bdist_wheel.py
index f377cd5ee6..3b64d6270e 100644
--- a/setuptools/command/bdist_wheel.py
+++ b/setuptools/command/bdist_wheel.py
@@ -129,6 +129,9 @@ def get_abi_tag() -> str | None:
     elif soabi and impl == "cp" and soabi.startswith("cp"):
         # Windows
         abi = soabi.split("-")[0]
+        if hasattr(sys, "gettotalrefcount"):
+            # using debug build; append "d" flag
+            abi += "d"
     elif soabi and impl == "pp":
         # we want something like pypy36-pp73
         abi = "-".join(soabi.split("-")[:2])
diff --git a/setuptools/tests/test_bdist_wheel.py b/setuptools/tests/test_bdist_wheel.py
index 47200d0a26..141ef716ab 100644
--- a/setuptools/tests/test_bdist_wheel.py
+++ b/setuptools/tests/test_bdist_wheel.py
@@ -470,6 +470,12 @@ def test_get_abi_tag_windows(monkeypatch):
     monkeypatch.setattr(tags, "interpreter_name", lambda: "cp")
     monkeypatch.setattr(sysconfig, "get_config_var", lambda x: "cp313-win_amd64")
     assert get_abi_tag() == "cp313"
+    monkeypatch.setattr(sys, "gettotalrefcount", lambda: 1, False)
+    assert get_abi_tag() == "cp313d"
+    monkeypatch.setattr(sysconfig, "get_config_var", lambda x: "cp313t-win_amd64")
+    assert get_abi_tag() == "cp313td"
+    monkeypatch.delattr(sys, "gettotalrefcount")
+    assert get_abi_tag() == "cp313t"
 
 
 def test_get_abi_tag_pypy_old(monkeypatch):

From 0bc324806ce62e93fcbcf9d3f1b018f85176da52 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Fri, 25 Oct 2024 13:41:19 -0400
Subject: [PATCH 1240/1761] Merge simple type annotations from typeshed (#4504)

* Merge simple type annotations from typeshed

* Incorrect TypeAlias usage

* Wrap self.always_copy in bool

* Update setuptools/command/editable_wheel.py

* Add jaraco ignore_missing_imports

* Fix type-errors post-merge

* More distutils workarounds

* Remove StrIter

---------

Co-authored-by: Anderson Bravalheri 
---
 mypy.ini                                  | 14 ++++---
 ruff.toml                                 |  2 +
 setuptools/__init__.py                    |  8 ++--
 setuptools/_path.py                       |  6 +--
 setuptools/build_meta.py                  | 35 +++++++++--------
 setuptools/command/bdist_egg.py           | 16 +++++++-
 setuptools/command/bdist_wheel.py         |  6 +--
 setuptools/command/build_ext.py           |  8 ++--
 setuptools/command/build_py.py            | 18 +++++----
 setuptools/command/easy_install.py        | 26 +++++++------
 setuptools/command/editable_wheel.py      | 24 +++++-------
 setuptools/command/egg_info.py            |  6 +--
 setuptools/command/install_lib.py         |  9 +++--
 setuptools/command/install_scripts.py     |  2 +-
 setuptools/config/_apply_pyprojecttoml.py | 22 ++++++-----
 setuptools/config/expand.py               | 16 ++++----
 setuptools/config/pyprojecttoml.py        |  6 +--
 setuptools/config/setupcfg.py             | 15 +++++---
 setuptools/depends.py                     | 16 ++++++--
 setuptools/discovery.py                   | 25 +++++++-----
 setuptools/dist.py                        | 37 ++++++++++--------
 setuptools/errors.py                      | 47 ++++++++++-------------
 setuptools/extension.py                   | 24 +++++++++---
 setuptools/glob.py                        |  4 +-
 setuptools/installer.py                   |  3 +-
 setuptools/logging.py                     |  2 +-
 setuptools/package_index.py               | 28 +++++++-------
 setuptools/sandbox.py                     | 12 +++---
 setuptools/warnings.py                    |  7 +++-
 29 files changed, 253 insertions(+), 191 deletions(-)

diff --git a/mypy.ini b/mypy.ini
index cfd909ba93..cadfa6be59 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -5,7 +5,7 @@
 strict = False
 
 # Early opt-in even when strict = False
-warn_unused_ignores = True
+# warn_unused_ignores = True # Disabled until we have distutils stubs for Python 3.12+
 warn_redundant_casts = True
 enable_error_code = ignore-without-code
 
@@ -34,12 +34,14 @@ exclude = (?x)(
 	# Duplicate module name
 	| ^pkg_resources/tests/data/my-test-package-source/setup.py$
 	)
-
-# DistributionMetadata.license_files and DistributionMetadata.license_file
-# are dynamically patched in setuptools/_core_metadata.py
-# and no DistributionMetadata subclass exists in setuptools
 [mypy-setuptools.*]
-disable_error_code = attr-defined
+disable_error_code =
+	# DistributionMetadata.license_files and DistributionMetadata.license_file
+	# are dynamically patched in setuptools/_core_metadata.py
+	# and no DistributionMetadata subclass exists in setuptools
+	attr-defined,
+	# See issue described below about distutils across Python versions
+	has-type,
 
 # - pkg_resources tests create modules that won't exists statically before the test is run.
 #   Let's ignore all "import-not-found" since, if an import really wasn't found, then the test would fail.
diff --git a/ruff.toml b/ruff.toml
index 484246aa1e..e154cdf70d 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -56,6 +56,8 @@ ignore = [
 # Only enforcing return type annotations for public modules
 "**/tests/**" = ["ANN2"]
 "tools/**" = ["ANN2"]
+# Temporarily disabling enforced return annotations for the setuptool package to progressively type from Typeshed
+"setuptools/**" = ["ANN2"]
 # Suppress nuisance warnings about module-import-not-at-top-of-file (E402) due to workaround for #4476
 "setuptools/__init__.py" = ["E402"]
 "pkg_resources/__init__.py" = ["E402"]
diff --git a/setuptools/__init__.py b/setuptools/__init__.py
index 3a1cc58aa3..eba86c4f9c 100644
--- a/setuptools/__init__.py
+++ b/setuptools/__init__.py
@@ -89,7 +89,7 @@ def finalize_options(self):
         _fetch_build_eggs(dist)
 
 
-def _fetch_build_eggs(dist):
+def _fetch_build_eggs(dist: Distribution):
     try:
         dist.fetch_build_eggs(dist.setup_requires)
     except Exception as ex:
@@ -120,10 +120,8 @@ def setup(**attrs):
 setup.__doc__ = distutils.core.setup.__doc__
 
 if TYPE_CHECKING:
-    from typing_extensions import TypeAlias
-
     # Work around a mypy issue where type[T] can't be used as a base: https://github.com/python/mypy/issues/10962
-    _Command: TypeAlias = distutils.core.Command
+    from distutils.core import Command as _Command
 else:
     _Command = monkey.get_unpatched(distutils.core.Command)
 
@@ -188,7 +186,7 @@ def _ensure_stringlike(self, option, what, default=None):
             )
         return val
 
-    def ensure_string_list(self, option):
+    def ensure_string_list(self, option: str):
         r"""Ensure that 'option' is a list of strings.  If 'option' is
         currently a string, we split it either on /,\s*/ or /\s+/, so
         "foo bar baz", "foo,bar,baz", and "foo,   bar baz" all become
diff --git a/setuptools/_path.py b/setuptools/_path.py
index c7bef83365..5a2bdbd0d4 100644
--- a/setuptools/_path.py
+++ b/setuptools/_path.py
@@ -5,13 +5,11 @@
 import sys
 from typing import TYPE_CHECKING, Union
 
-if TYPE_CHECKING:
-    from typing_extensions import TypeAlias
-
-
 from more_itertools import unique_everseen
 
 if TYPE_CHECKING:
+    from typing_extensions import TypeAlias
+
     StrPath: TypeAlias = Union[str, os.PathLike[str]]  #  Same as _typeshed.StrPath
 else:
     # Python 3.8 support
diff --git a/setuptools/build_meta.py b/setuptools/build_meta.py
index a3c83c7002..afe0b57b3d 100644
--- a/setuptools/build_meta.py
+++ b/setuptools/build_meta.py
@@ -38,7 +38,7 @@
 import tokenize
 import warnings
 from pathlib import Path
-from typing import TYPE_CHECKING, Dict, Iterable, Iterator, List, Union
+from typing import TYPE_CHECKING, Iterable, Iterator, List, Mapping, Union
 
 import setuptools
 
@@ -53,7 +53,6 @@
 if TYPE_CHECKING:
     from typing_extensions import TypeAlias
 
-
 __all__ = [
     'get_requires_for_build_sdist',
     'get_requires_for_build_wheel',
@@ -147,7 +146,7 @@ def suppress_known_deprecation():
         yield
 
 
-_ConfigSettings: TypeAlias = Union[Dict[str, Union[str, List[str], None]], None]
+_ConfigSettings: TypeAlias = Union[Mapping[str, Union[str, List[str], None]], None]
 """
 Currently the user can run::
 
@@ -291,7 +290,9 @@ def _arbitrary_args(self, config_settings: _ConfigSettings) -> Iterator[str]:
 
 
 class _BuildMetaBackend(_ConfigSettingsTranslator):
-    def _get_build_requires(self, config_settings, requirements):
+    def _get_build_requires(
+        self, config_settings: _ConfigSettings, requirements: list[str]
+    ):
         sys.argv = [
             *sys.argv[:1],
             *self._global_args(config_settings),
@@ -305,7 +306,7 @@ def _get_build_requires(self, config_settings, requirements):
 
         return requirements
 
-    def run_setup(self, setup_script='setup.py'):
+    def run_setup(self, setup_script: str = 'setup.py'):
         # Note that we can reuse our build directory between calls
         # Correctness comes first, then optimization later
         __file__ = os.path.abspath(setup_script)
@@ -328,13 +329,15 @@ def run_setup(self, setup_script='setup.py'):
                 "setup-py-deprecated.html",
             )
 
-    def get_requires_for_build_wheel(self, config_settings=None):
+    def get_requires_for_build_wheel(self, config_settings: _ConfigSettings = None):
         return self._get_build_requires(config_settings, requirements=[])
 
-    def get_requires_for_build_sdist(self, config_settings=None):
+    def get_requires_for_build_sdist(self, config_settings: _ConfigSettings = None):
         return self._get_build_requires(config_settings, requirements=[])
 
-    def _bubble_up_info_directory(self, metadata_directory: str, suffix: str) -> str:
+    def _bubble_up_info_directory(
+        self, metadata_directory: StrPath, suffix: str
+    ) -> str:
         """
         PEP 517 requires that the .dist-info directory be placed in the
         metadata_directory. To comply, we MUST copy the directory to the root.
@@ -347,7 +350,7 @@ def _bubble_up_info_directory(self, metadata_directory: str, suffix: str) -> str
             # PEP 517 allow other files and dirs to exist in metadata_directory
         return info_dir.name
 
-    def _find_info_directory(self, metadata_directory: str, suffix: str) -> Path:
+    def _find_info_directory(self, metadata_directory: StrPath, suffix: str) -> Path:
         for parent, dirs, _ in os.walk(metadata_directory):
             candidates = [f for f in dirs if f.endswith(suffix)]
 
@@ -359,14 +362,14 @@ def _find_info_directory(self, metadata_directory: str, suffix: str) -> Path:
         raise errors.InternalError(msg)
 
     def prepare_metadata_for_build_wheel(
-        self, metadata_directory, config_settings=None
+        self, metadata_directory: StrPath, config_settings: _ConfigSettings = None
     ):
         sys.argv = [
             *sys.argv[:1],
             *self._global_args(config_settings),
             "dist_info",
             "--output-dir",
-            metadata_directory,
+            str(metadata_directory),
             "--keep-egg-info",
         ]
         with no_install_setup_requires():
@@ -462,7 +465,7 @@ def build_editable(
             self,
             wheel_directory: StrPath,
             config_settings: _ConfigSettings = None,
-            metadata_directory: str | None = None,
+            metadata_directory: StrPath | None = None,
         ):
             # XXX can or should we hide our editable_wheel command normally?
             info_dir = self._get_dist_info_dir(metadata_directory)
@@ -473,11 +476,13 @@ def build_editable(
                     cmd, ".whl", wheel_directory, config_settings
                 )
 
-        def get_requires_for_build_editable(self, config_settings=None):
+        def get_requires_for_build_editable(
+            self, config_settings: _ConfigSettings = None
+        ):
             return self.get_requires_for_build_wheel(config_settings)
 
         def prepare_metadata_for_build_editable(
-            self, metadata_directory, config_settings=None
+            self, metadata_directory: StrPath, config_settings: _ConfigSettings = None
         ):
             return self.prepare_metadata_for_build_wheel(
                 metadata_directory, config_settings
@@ -496,7 +501,7 @@ class _BuildMetaLegacyBackend(_BuildMetaBackend):
     and will eventually be removed.
     """
 
-    def run_setup(self, setup_script='setup.py'):
+    def run_setup(self, setup_script: str = 'setup.py'):
         # In order to maintain compatibility with scripts assuming that
         # the setup.py script is in a directory on the PYTHONPATH, inject
         # '' into sys.path. (pypa/setuptools#1642)
diff --git a/setuptools/command/bdist_egg.py b/setuptools/command/bdist_egg.py
index f3b7150208..c9eee16a5d 100644
--- a/setuptools/command/bdist_egg.py
+++ b/setuptools/command/bdist_egg.py
@@ -2,6 +2,8 @@
 
 Build .egg distributions"""
 
+from __future__ import annotations
+
 import marshal
 import os
 import re
@@ -9,6 +11,7 @@
 import textwrap
 from sysconfig import get_path, get_python_version
 from types import CodeType
+from typing import TYPE_CHECKING, Literal
 
 from setuptools import Command
 from setuptools.extension import Library
@@ -18,6 +21,12 @@
 from distutils import log
 from distutils.dir_util import mkpath, remove_tree
 
+if TYPE_CHECKING:
+    from typing_extensions import TypeAlias
+
+# Same as zipfile._ZipFileMode from typeshed
+_ZipFileMode: TypeAlias = Literal["r", "w", "x", "a"]
+
 
 def _get_purelib():
     return get_path("purelib")
@@ -431,7 +440,12 @@ def can_scan():
 
 
 def make_zipfile(
-    zip_filename, base_dir, verbose=False, dry_run=False, compress=True, mode='w'
+    zip_filename,
+    base_dir,
+    verbose: bool = False,
+    dry_run: bool = False,
+    compress=True,
+    mode: _ZipFileMode = 'w',
 ):
     """Create a zip file from all the files under 'base_dir'.  The output
     zip file will be named 'base_dir' + ".zip".  Uses either the "zipfile"
diff --git a/setuptools/command/bdist_wheel.py b/setuptools/command/bdist_wheel.py
index f377cd5ee6..81a85e217f 100644
--- a/setuptools/command/bdist_wheel.py
+++ b/setuptools/command/bdist_wheel.py
@@ -257,9 +257,9 @@ def initialize_options(self) -> None:
         self.relative = False
         self.owner = None
         self.group = None
-        self.universal: bool = False
-        self.compression: int | str = "deflated"
-        self.python_tag: str = python_tag()
+        self.universal = False
+        self.compression: str | int = "deflated"
+        self.python_tag = python_tag()
         self.build_number: str | None = None
         self.py_limited_api: str | Literal[False] = False
         self.plat_name_supplied = False
diff --git a/setuptools/command/build_ext.py b/setuptools/command/build_ext.py
index e2a88ce218..1b9c313ff5 100644
--- a/setuptools/command/build_ext.py
+++ b/setuptools/command/build_ext.py
@@ -89,8 +89,8 @@ def get_abi3_suffix():
 
 class build_ext(_build_ext):
     distribution: Distribution  # override distutils.dist.Distribution with setuptools.dist.Distribution
-    editable_mode: bool = False
-    inplace: bool = False
+    editable_mode = False
+    inplace = False
 
     def run(self):
         """Build extensions in build directory, then copy if --inplace"""
@@ -410,7 +410,7 @@ def link_shared_object(
         library_dirs=None,
         runtime_library_dirs=None,
         export_symbols=None,
-        debug=False,
+        debug: bool = False,
         extra_preargs=None,
         extra_postargs=None,
         build_temp=None,
@@ -445,7 +445,7 @@ def link_shared_object(
         library_dirs=None,
         runtime_library_dirs=None,
         export_symbols=None,
-        debug=False,
+        debug: bool = False,
         extra_preargs=None,
         extra_postargs=None,
         build_temp=None,
diff --git a/setuptools/command/build_py.py b/setuptools/command/build_py.py
index 584d2c15ac..628a20b40b 100644
--- a/setuptools/command/build_py.py
+++ b/setuptools/command/build_py.py
@@ -12,6 +12,8 @@
 
 from more_itertools import unique_everseen
 
+from setuptools._path import StrPath
+
 from ..dist import Distribution
 from ..warnings import SetuptoolsDeprecationWarning
 
@@ -48,14 +50,14 @@ def finalize_options(self):
             del self.__dict__['data_files']
         self.__updated_files = []
 
-    def copy_file(
+    def copy_file(  # type: ignore[override] # No overload, str support only
         self,
-        infile,
-        outfile,
-        preserve_mode=True,
-        preserve_times=True,
-        link=None,
-        level=1,
+        infile: StrPath,
+        outfile: StrPath,
+        preserve_mode: bool = True,
+        preserve_times: bool = True,
+        link: str | None = None,
+        level: object = 1,
     ):
         # Overwrite base class to allow using links
         if link:
@@ -141,7 +143,7 @@ def find_data_files(self, package, src_dir):
         )
         return self.exclude_data_files(package, src_dir, files)
 
-    def get_outputs(self, include_bytecode=True) -> list[str]:
+    def get_outputs(self, include_bytecode: bool = True) -> list[str]:  # type: ignore[override] # Using a real boolean instead of 0|1
         """See :class:`setuptools.commands.build.SubCommand`"""
         if self.editable_mode:
             return list(self.get_output_mapping().keys())
diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py
index 2a59c74ccd..6c835db593 100644
--- a/setuptools/command/easy_install.py
+++ b/setuptools/command/easy_install.py
@@ -425,7 +425,7 @@ def expand_dirs(self):
         ]
         self._expand_attrs(dirs)
 
-    def run(self, show_deprecation=True):
+    def run(self, show_deprecation: bool = True):
         if show_deprecation:
             self.announce(
                 "WARNING: The easy_install command is deprecated "
@@ -674,7 +674,7 @@ def _tmpdir(self):
         finally:
             os.path.exists(tmpdir) and _rmtree(tmpdir)
 
-    def easy_install(self, spec, deps=False):
+    def easy_install(self, spec, deps: bool = False):
         with self._tmpdir() as tmpdir:
             if not isinstance(spec, Requirement):
                 if URL_SCHEME(spec):
@@ -711,9 +711,9 @@ def easy_install(self, spec, deps=False):
             else:
                 return self.install_item(spec, dist.location, tmpdir, deps)
 
-    def install_item(self, spec, download, tmpdir, deps, install_needed=False):
+    def install_item(self, spec, download, tmpdir, deps, install_needed: bool = False):
         # Installation is also needed if file in tmpdir or is not an egg
-        install_needed = install_needed or self.always_copy
+        install_needed = install_needed or bool(self.always_copy)
         install_needed = install_needed or os.path.dirname(download) == tmpdir
         install_needed = install_needed or not download.endswith('.egg')
         install_needed = install_needed or (
@@ -759,7 +759,7 @@ def process_distribution(  # noqa: C901
         self,
         requirement,
         dist,
-        deps=True,
+        deps: bool = True,
         *info,
     ):
         self.update_pth(dist)
@@ -860,7 +860,7 @@ def _load_template(dev_path):
         raw_bytes = resource_string('setuptools', name)
         return raw_bytes.decode('utf-8')
 
-    def write_script(self, script_name, contents, mode="t", blockers=()):
+    def write_script(self, script_name, contents, mode: str = "t", blockers=()):
         """Write an executable file to the scripts directory"""
         self.delete_blockers(  # clean up old .py/.pyw w/o a script
             [os.path.join(self.script_dir, x) for x in blockers]
@@ -1143,7 +1143,7 @@ def install_wheel(self, wheel_path, tmpdir):
         """
     )
 
-    def installation_report(self, req, dist, what="Installed"):
+    def installation_report(self, req, dist, what: str = "Installed"):
         """Helpful installation message for display to package users"""
         msg = "\n%(what)s %(eggloc)s%(extras)s"
         if self.multi_version and not self.no_report:
@@ -2080,7 +2080,7 @@ def from_environment(cls):
         return cls([cls._sys_executable()])
 
     @classmethod
-    def from_string(cls, string):
+    def from_string(cls, string: str):
         """
         Construct a command spec from a simple string representing a command
         line parseable by shlex.split.
@@ -2088,7 +2088,7 @@ def from_string(cls, string):
         items = shlex.split(string, **cls.split_args)
         return cls(items)
 
-    def install_options(self, script_text):
+    def install_options(self, script_text: str):
         self.options = shlex.split(self._extract_options(script_text))
         cmdline = subprocess.list2cmdline(self)
         if not isascii(cmdline):
@@ -2218,7 +2218,11 @@ def _get_script_args(cls, type_, name, header, script_text):
         yield (name, header + script_text)
 
     @classmethod
-    def get_header(cls, script_text="", executable=None):
+    def get_header(
+        cls,
+        script_text: str = "",
+        executable: str | CommandSpec | Iterable[str] | None = None,
+    ):
         """Create a #! line, getting options (if any) from script_text"""
         cmd = cls.command_spec_class.best().from_param(executable)
         cmd.install_options(script_text)
@@ -2340,7 +2344,7 @@ def load_launcher_manifest(name):
     return manifest.decode('utf-8') % vars()
 
 
-def _rmtree(path, ignore_errors=False, onexc=auto_chmod):
+def _rmtree(path, ignore_errors: bool = False, onexc=auto_chmod):
     return py311.shutil_rmtree(path, ignore_errors, onexc)
 
 
diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py
index 2b21eacbad..c1641a7267 100644
--- a/setuptools/command/editable_wheel.py
+++ b/setuptools/command/editable_wheel.py
@@ -23,7 +23,6 @@
 from itertools import chain, starmap
 from pathlib import Path
 from tempfile import TemporaryDirectory
-from types import TracebackType
 from typing import TYPE_CHECKING, Iterable, Iterator, Mapping, Protocol, TypeVar, cast
 
 from .. import Command, _normalization, _path, errors, namespaces
@@ -138,7 +137,6 @@ def run(self):
             self._create_wheel_file(bdist_wheel)
         except Exception:
             traceback.print_exc()
-            # TODO: Fix false-positive [attr-defined] in typeshed
             project = self.distribution.name or self.distribution.get_name()
             _DebuggingTips.emit(project=project)
             raise
@@ -231,7 +229,6 @@ def _set_editable_mode(self):
         """Set the ``editable_mode`` flag in the build sub-commands"""
         dist = self.distribution
         build = dist.get_command_obj("build")
-        # TODO: Update typeshed distutils stubs to overload non-None return type by default
         for cmd_name in build.get_sub_commands():
             cmd = dist.get_command_obj(cmd_name)
             if hasattr(cmd, "editable_mode"):
@@ -380,16 +377,15 @@ def _select_strategy(
 
 
 class EditableStrategy(Protocol):
-    def __call__(self, wheel: WheelFile, files: list[str], mapping: dict[str, str]): ...
-
+    def __call__(
+        self, wheel: WheelFile, files: list[str], mapping: Mapping[str, str]
+    ): ...
     def __enter__(self) -> Self: ...
-
     def __exit__(
         self,
-        exc_type: type[BaseException] | None,
-        exc_value: BaseException | None,
-        traceback: TracebackType | None,
-        /,
+        _exc_type: object,
+        _exc_value: object,
+        _traceback: object,
     ) -> object: ...
 
 
@@ -399,7 +395,7 @@ def __init__(self, dist: Distribution, name: str, path_entries: list[Path]):
         self.name = name
         self.path_entries = path_entries
 
-    def __call__(self, wheel: WheelFile, files: list[str], mapping: dict[str, str]):
+    def __call__(self, wheel: WheelFile, files: list[str], mapping: Mapping[str, str]):
         entries = "\n".join(str(p.resolve()) for p in self.path_entries)
         contents = _encode_pth(f"{entries}\n")
         wheel.writestr(f"__editable__.{self.name}.pth", contents)
@@ -444,7 +440,7 @@ def __init__(
         self._file = dist.get_command_obj("build_py").copy_file
         super().__init__(dist, name, [self.auxiliary_dir])
 
-    def __call__(self, wheel: WheelFile, files: list[str], mapping: dict[str, str]):
+    def __call__(self, wheel: WheelFile, files: list[str], mapping: Mapping[str, str]):
         self._create_links(files, mapping)
         super().__call__(wheel, files, mapping)
 
@@ -461,7 +457,7 @@ def _create_file(self, relative_output: str, src_file: str, link=None):
             dest.parent.mkdir(parents=True)
         self._file(src_file, dest, link=link)
 
-    def _create_links(self, outputs, output_mapping):
+    def _create_links(self, outputs, output_mapping: Mapping[str, str]):
         self.auxiliary_dir.mkdir(parents=True, exist_ok=True)
         link_type = "sym" if _can_symlink_files(self.auxiliary_dir) else "hard"
         normalised = ((self._normalize_output(k), v) for k, v in output_mapping.items())
@@ -538,7 +534,7 @@ def get_implementation(self) -> Iterator[tuple[str, bytes]]:
         content = _encode_pth(f"import {finder}; {finder}.install()")
         yield (f"__editable__.{self.name}.pth", content)
 
-    def __call__(self, wheel: WheelFile, files: list[str], mapping: dict[str, str]):
+    def __call__(self, wheel: WheelFile, files: list[str], mapping: Mapping[str, str]):
         for file, content in self.get_implementation():
             wheel.writestr(file, content)
 
diff --git a/setuptools/command/egg_info.py b/setuptools/command/egg_info.py
index bc6c677878..7b9f8f0b72 100644
--- a/setuptools/command/egg_info.py
+++ b/setuptools/command/egg_info.py
@@ -252,7 +252,7 @@ def _get_egg_basename(self, py_version=PY_MAJOR, platform=None):
         """Compute filename of the output egg. Private API."""
         return _egg_basename(self.egg_name, self.egg_version, py_version, platform)
 
-    def write_or_delete_file(self, what, filename, data, force=False):
+    def write_or_delete_file(self, what, filename, data, force: bool = False):
         """Write `data` to `filename` or delete if empty
 
         If `data` is non-empty, this routine is the same as ``write_file()``.
@@ -324,7 +324,7 @@ def find_sources(self):
 class FileList(_FileList):
     # Implementations of the various MANIFEST.in commands
 
-    def __init__(self, warn=None, debug_print=None, ignore_egg_info_dir=False):
+    def __init__(self, warn=None, debug_print=None, ignore_egg_info_dir: bool = False):
         super().__init__(warn, debug_print)
         self.ignore_egg_info_dir = ignore_egg_info_dir
 
@@ -690,7 +690,7 @@ def overwrite_arg(cmd, basename, filename):
     write_arg(cmd, basename, filename, True)
 
 
-def write_arg(cmd, basename, filename, force=False):
+def write_arg(cmd, basename, filename, force: bool = False):
     argname = os.path.splitext(basename)[0]
     value = getattr(cmd.distribution, argname, None)
     if value is not None:
diff --git a/setuptools/command/install_lib.py b/setuptools/command/install_lib.py
index 292f07ab6e..53b68f6363 100644
--- a/setuptools/command/install_lib.py
+++ b/setuptools/command/install_lib.py
@@ -95,10 +95,11 @@ def copy_tree(
         self,
         infile: StrPath,
         outfile: str,
-        preserve_mode=True,
-        preserve_times=True,
-        preserve_symlinks=False,
-        level=1,
+        # override: Using actual booleans
+        preserve_mode: bool = True,  # type: ignore[override]
+        preserve_times: bool = True,  # type: ignore[override]
+        preserve_symlinks: bool = False,  # type: ignore[override]
+        level: object = 1,
     ) -> list[str]:
         assert preserve_mode and preserve_times and not preserve_symlinks
         exclude = self.get_exclusions()
diff --git a/setuptools/command/install_scripts.py b/setuptools/command/install_scripts.py
index 7b90611d1c..f1ccc2bbf8 100644
--- a/setuptools/command/install_scripts.py
+++ b/setuptools/command/install_scripts.py
@@ -56,7 +56,7 @@ def _install_ep_scripts(self):
         for args in writer.get_args(dist, cmd.as_header()):
             self.write_script(*args)
 
-    def write_script(self, script_name, contents, mode="t", *ignored):
+    def write_script(self, script_name, contents, mode: str = "t", *ignored):
         """Write an executable file to the scripts directory"""
         from setuptools.command.easy_install import chmod, current_umask
 
diff --git a/setuptools/config/_apply_pyprojecttoml.py b/setuptools/config/_apply_pyprojecttoml.py
index 16fe753b58..2b7fe7bd80 100644
--- a/setuptools/config/_apply_pyprojecttoml.py
+++ b/setuptools/config/_apply_pyprojecttoml.py
@@ -30,12 +30,12 @@
     from setuptools._importlib import metadata
     from setuptools.dist import Distribution
 
-    from distutils.dist import _OptionsList
+    from distutils.dist import _OptionsList  # Comes from typeshed
+
 
 EMPTY: Mapping = MappingProxyType({})  # Immutable dict-like
 _ProjectReadmeValue: TypeAlias = Union[str, Dict[str, str]]
-_CorrespFn: TypeAlias = Callable[["Distribution", Any, StrPath], None]
-_Correspondence: TypeAlias = Union[str, _CorrespFn]
+_Correspondence: TypeAlias = Callable[["Distribution", Any, Union[StrPath, None]], None]
 _T = TypeVar("_T")
 
 _logger = logging.getLogger(__name__)
@@ -149,7 +149,9 @@ def _guess_content_type(file: str) -> str | None:
     raise ValueError(f"Undefined content type for {file}, {msg}")
 
 
-def _long_description(dist: Distribution, val: _ProjectReadmeValue, root_dir: StrPath):
+def _long_description(
+    dist: Distribution, val: _ProjectReadmeValue, root_dir: StrPath | None
+):
     from setuptools.config import expand
 
     file: str | tuple[()]
@@ -171,7 +173,7 @@ def _long_description(dist: Distribution, val: _ProjectReadmeValue, root_dir: St
         dist._referenced_files.add(file)
 
 
-def _license(dist: Distribution, val: dict, root_dir: StrPath):
+def _license(dist: Distribution, val: dict, root_dir: StrPath | None):
     from setuptools.config import expand
 
     if "file" in val:
@@ -181,7 +183,7 @@ def _license(dist: Distribution, val: dict, root_dir: StrPath):
         _set_config(dist, "license", val["text"])
 
 
-def _people(dist: Distribution, val: list[dict], _root_dir: StrPath, kind: str):
+def _people(dist: Distribution, val: list[dict], _root_dir: StrPath | None, kind: str):
     field = []
     email_field = []
     for person in val:
@@ -199,24 +201,24 @@ def _people(dist: Distribution, val: list[dict], _root_dir: StrPath, kind: str):
         _set_config(dist, f"{kind}_email", ", ".join(email_field))
 
 
-def _project_urls(dist: Distribution, val: dict, _root_dir):
+def _project_urls(dist: Distribution, val: dict, _root_dir: StrPath | None):
     _set_config(dist, "project_urls", val)
 
 
-def _python_requires(dist: Distribution, val: str, _root_dir):
+def _python_requires(dist: Distribution, val: str, _root_dir: StrPath | None):
     from packaging.specifiers import SpecifierSet
 
     _set_config(dist, "python_requires", SpecifierSet(val))
 
 
-def _dependencies(dist: Distribution, val: list, _root_dir):
+def _dependencies(dist: Distribution, val: list, _root_dir: StrPath | None):
     if getattr(dist, "install_requires", []):
         msg = "`install_requires` overwritten in `pyproject.toml` (dependencies)"
         SetuptoolsWarning.emit(msg)
     dist.install_requires = val
 
 
-def _optional_dependencies(dist: Distribution, val: dict, _root_dir):
+def _optional_dependencies(dist: Distribution, val: dict, _root_dir: StrPath | None):
     if getattr(dist, "extras_require", None):
         msg = "`extras_require` overwritten in `pyproject.toml` (optional-dependencies)"
         SetuptoolsWarning.emit(msg)
diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py
index 8f2040fefa..81caf1c35e 100644
--- a/setuptools/config/expand.py
+++ b/setuptools/config/expand.py
@@ -45,7 +45,7 @@
     from setuptools.dist import Distribution
 
 _K = TypeVar("_K")
-_V = TypeVar("_V", covariant=True)
+_V_co = TypeVar("_V_co", covariant=True)
 
 
 class StaticModule:
@@ -354,7 +354,7 @@ def canonic_data_files(
     ]
 
 
-def entry_points(text: str, text_source="entry-points") -> dict[str, dict]:
+def entry_points(text: str, text_source: str = "entry-points") -> dict[str, dict]:
     """Given the contents of entry-points file,
     process it into a 2-level dictionary (``dict[str, dict[str, str]]``).
     The first level keys are entry-point groups, the second level keys are
@@ -398,7 +398,7 @@ def __exit__(
         exc_type: type[BaseException] | None,
         exc_value: BaseException | None,
         traceback: TracebackType | None,
-    ) -> None:
+    ):
         if self._called:
             self._dist.set_defaults.analyse_name()  # Now we can set a default name
 
@@ -413,7 +413,7 @@ def package_dir(self) -> Mapping[str, str]:
         return LazyMappingProxy(self._get_package_dir)
 
 
-class LazyMappingProxy(Mapping[_K, _V]):
+class LazyMappingProxy(Mapping[_K, _V_co]):
     """Mapping proxy that delays resolving the target object, until really needed.
 
     >>> def obtain_mapping():
@@ -427,16 +427,16 @@ class LazyMappingProxy(Mapping[_K, _V]):
     'other value'
     """
 
-    def __init__(self, obtain_mapping_value: Callable[[], Mapping[_K, _V]]):
+    def __init__(self, obtain_mapping_value: Callable[[], Mapping[_K, _V_co]]):
         self._obtain = obtain_mapping_value
-        self._value: Mapping[_K, _V] | None = None
+        self._value: Mapping[_K, _V_co] | None = None
 
-    def _target(self) -> Mapping[_K, _V]:
+    def _target(self) -> Mapping[_K, _V_co]:
         if self._value is None:
             self._value = self._obtain()
         return self._value
 
-    def __getitem__(self, key: _K) -> _V:
+    def __getitem__(self, key: _K) -> _V_co:
         return self._target()[key]
 
     def __len__(self) -> int:
diff --git a/setuptools/config/pyprojecttoml.py b/setuptools/config/pyprojecttoml.py
index e0040cefbd..f5bda2ce34 100644
--- a/setuptools/config/pyprojecttoml.py
+++ b/setuptools/config/pyprojecttoml.py
@@ -63,7 +63,7 @@ def validate(config: dict, filepath: StrPath) -> bool:
 def apply_configuration(
     dist: Distribution,
     filepath: StrPath,
-    ignore_option_errors=False,
+    ignore_option_errors: bool = False,
 ) -> Distribution:
     """Apply the configuration from a ``pyproject.toml`` file into an existing
     distribution object.
@@ -74,8 +74,8 @@ def apply_configuration(
 
 def read_configuration(
     filepath: StrPath,
-    expand=True,
-    ignore_option_errors=False,
+    expand: bool = True,
+    ignore_option_errors: bool = False,
     dist: Distribution | None = None,
 ) -> dict[str, Any]:
     """Read given configuration file and returns options from it as a dict.
diff --git a/setuptools/config/setupcfg.py b/setuptools/config/setupcfg.py
index 5b4e1e8d95..35fe4f9aa9 100644
--- a/setuptools/config/setupcfg.py
+++ b/setuptools/config/setupcfg.py
@@ -41,22 +41,25 @@
 from . import expand
 
 if TYPE_CHECKING:
+    from typing_extensions import TypeAlias
+
     from setuptools.dist import Distribution
 
     from distutils.dist import DistributionMetadata
 
-SingleCommandOptions = Dict["str", Tuple["str", Any]]
+SingleCommandOptions: TypeAlias = Dict[str, Tuple[str, Any]]
 """Dict that associate the name of the options of a particular command to a
 tuple. The first element of the tuple indicates the origin of the option value
 (e.g. the name of the configuration file where it was read from),
 while the second element of the tuple is the option value itself
 """
-AllCommandOptions = Dict["str", SingleCommandOptions]  # cmd name => its options
+AllCommandOptions: TypeAlias = Dict[str, SingleCommandOptions]
+"""cmd name => its options"""
 Target = TypeVar("Target", "Distribution", "DistributionMetadata")
 
 
 def read_configuration(
-    filepath: StrPath, find_others=False, ignore_option_errors=False
+    filepath: StrPath, find_others: bool = False, ignore_option_errors: bool = False
 ) -> dict:
     """Read given configuration file and returns options from it as a dict.
 
@@ -158,7 +161,7 @@ def configuration_to_dict(
 def parse_configuration(
     distribution: Distribution,
     command_options: AllCommandOptions,
-    ignore_option_errors=False,
+    ignore_option_errors: bool = False,
 ) -> tuple[ConfigMetadataHandler, ConfigOptionsHandler]:
     """Performs additional parsing of configuration options
     for a distribution.
@@ -379,7 +382,7 @@ def parser(value):
 
         return parser
 
-    def _parse_file(self, value, root_dir: StrPath):
+    def _parse_file(self, value, root_dir: StrPath | None):
         """Represents value as a string, allowing including text
         from nearest files using `file:` directive.
 
@@ -547,7 +550,7 @@ def __init__(
         ignore_option_errors: bool,
         ensure_discovered: expand.EnsurePackagesDiscovered,
         package_dir: dict | None = None,
-        root_dir: StrPath = os.curdir,
+        root_dir: StrPath | None = os.curdir,
     ):
         super().__init__(target_obj, options, ignore_option_errors, ensure_discovered)
         self.package_dir = package_dir
diff --git a/setuptools/depends.py b/setuptools/depends.py
index 9398b95331..e73f06808e 100644
--- a/setuptools/depends.py
+++ b/setuptools/depends.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import contextlib
 import dis
 import marshal
@@ -15,7 +17,13 @@ class Require:
     """A prerequisite to building or installing a distribution"""
 
     def __init__(
-        self, name, requested_version, module, homepage='', attribute=None, format=None
+        self,
+        name,
+        requested_version,
+        module,
+        homepage: str = '',
+        attribute=None,
+        format=None,
     ):
         if format is None and requested_version is not None:
             format = Version
@@ -43,7 +51,7 @@ def version_ok(self, version):
             and self.format(version) >= self.requested_version
         )
 
-    def get_version(self, paths=None, default="unknown"):
+    def get_version(self, paths=None, default: str = "unknown"):
         """Get version number of installed module, 'None', or 'default'
 
         Search 'paths' for module.  If not found, return 'None'.  If found,
@@ -98,7 +106,7 @@ def empty():
 # XXX it'd be better to test assertions about bytecode instead.
 if not sys.platform.startswith('java') and sys.platform != 'cli':
 
-    def get_module_constant(module, symbol, default=-1, paths=None):
+    def get_module_constant(module, symbol, default: str | int = -1, paths=None):
         """Find 'module' by searching 'paths', and extract 'symbol'
 
         Return 'None' if 'module' does not exist on 'paths', or it does not define
@@ -126,7 +134,7 @@ def get_module_constant(module, symbol, default=-1, paths=None):
 
         return extract_constant(code, symbol, default)
 
-    def extract_constant(code, symbol, default=-1):
+    def extract_constant(code, symbol, default: str | int = -1):
         """Extract the constant value of 'symbol' from 'code'
 
         If the name 'symbol' is bound to a constant value by the Python code
diff --git a/setuptools/discovery.py b/setuptools/discovery.py
index 577be2f16b..09d1e2f474 100644
--- a/setuptools/discovery.py
+++ b/setuptools/discovery.py
@@ -41,10 +41,11 @@
 
 import itertools
 import os
+from collections.abc import Iterator
 from fnmatch import fnmatchcase
 from glob import glob
 from pathlib import Path
-from typing import TYPE_CHECKING, Iterable, Iterator, Mapping
+from typing import TYPE_CHECKING, Iterable, Mapping
 
 import _distutils_hack.override  # noqa: F401
 
@@ -53,13 +54,11 @@
 from distutils import log
 from distutils.util import convert_path
 
-StrIter = Iterator[str]
-
-chain_iter = itertools.chain.from_iterable
-
 if TYPE_CHECKING:
     from setuptools import Distribution
 
+chain_iter = itertools.chain.from_iterable
+
 
 def _valid_name(path: StrPath) -> bool:
     # Ignore invalid names that cannot be imported directly
@@ -124,7 +123,9 @@ def find(
         )
 
     @classmethod
-    def _find_iter(cls, where: StrPath, exclude: _Filter, include: _Filter) -> StrIter:
+    def _find_iter(
+        cls, where: StrPath, exclude: _Filter, include: _Filter
+    ) -> Iterator[str]:
         raise NotImplementedError
 
 
@@ -136,7 +137,9 @@ class PackageFinder(_Finder):
     ALWAYS_EXCLUDE = ("ez_setup", "*__pycache__")
 
     @classmethod
-    def _find_iter(cls, where: StrPath, exclude: _Filter, include: _Filter) -> StrIter:
+    def _find_iter(
+        cls, where: StrPath, exclude: _Filter, include: _Filter
+    ) -> Iterator[str]:
         """
         All the packages found in 'where' that pass the 'include' filter, but
         not the 'exclude' filter.
@@ -185,7 +188,9 @@ class ModuleFinder(_Finder):
     """
 
     @classmethod
-    def _find_iter(cls, where: StrPath, exclude: _Filter, include: _Filter) -> StrIter:
+    def _find_iter(
+        cls, where: StrPath, exclude: _Filter, include: _Filter
+    ) -> Iterator[str]:
         for file in glob(os.path.join(where, "*.py")):
             module, _ext = os.path.splitext(os.path.basename(file))
 
@@ -328,7 +333,9 @@ def _package_dir(self) -> dict[str, str]:
             return {}
         return self.dist.package_dir
 
-    def __call__(self, force=False, name=True, ignore_ext_modules=False):
+    def __call__(
+        self, force: bool = False, name: bool = True, ignore_ext_modules: bool = False
+    ):
         """Automatically discover missing configuration fields
         and modifies the given ``distribution`` object in-place.
 
diff --git a/setuptools/dist.py b/setuptools/dist.py
index d6b8e08214..1348ca61c2 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -6,6 +6,7 @@
 import os
 import re
 import sys
+from collections.abc import Iterable
 from glob import iglob
 from pathlib import Path
 from typing import (
@@ -24,12 +25,15 @@
 from packaging.specifiers import InvalidSpecifier, SpecifierSet
 from packaging.version import Version
 
+from setuptools._path import StrPath
+
 from . import (
     _entry_points,
     _reqs,
     command as _,  # noqa: F401 # imported for side-effects
 )
 from ._importlib import metadata
+from ._reqs import _StrOrIter
 from .config import pyprojecttoml, setupcfg
 from .discovery import ConfigDiscovery
 from .monkey import get_unpatched
@@ -235,10 +239,8 @@ def check_packages(dist, attr, value):
 
 
 if TYPE_CHECKING:
-    from typing_extensions import TypeAlias
-
     # Work around a mypy issue where type[T] can't be used as a base: https://github.com/python/mypy/issues/10962
-    _Distribution: TypeAlias = distutils.core.Distribution
+    from distutils.core import Distribution as _Distribution
 else:
     _Distribution = get_unpatched(distutils.core.Distribution)
 
@@ -302,7 +304,8 @@ class Distribution(_Distribution):
     # Used by build_py, editable_wheel and install_lib commands for legacy namespaces
     namespace_packages: list[str]  #: :meta private: DEPRECATED
 
-    def __init__(self, attrs: MutableMapping | None = None) -> None:
+    # Any: Dynamic assignment results in Incompatible types in assignment
+    def __init__(self, attrs: MutableMapping[str, Any] | None = None) -> None:
         have_package_data = hasattr(self, "package_data")
         if not have_package_data:
             self.package_data: dict[str, list[str]] = {}
@@ -311,9 +314,9 @@ def __init__(self, attrs: MutableMapping | None = None) -> None:
         self.include_package_data: bool | None = None
         self.exclude_package_data: dict[str, list[str]] | None = None
         # Filter-out setuptools' specific options.
-        self.src_root = attrs.pop("src_root", None)
-        self.dependency_links = attrs.pop('dependency_links', [])
-        self.setup_requires = attrs.pop('setup_requires', [])
+        self.src_root: str | None = attrs.pop("src_root", None)
+        self.dependency_links: list[str] = attrs.pop('dependency_links', [])
+        self.setup_requires: list[str] = attrs.pop('setup_requires', [])
         for ep in metadata.entry_points(group='distutils.setup_keywords'):
             vars(self).setdefault(ep.name, None)
 
@@ -515,7 +518,7 @@ def _parse_config_files(self, filenames=None):  # noqa: C901
             except ValueError as e:
                 raise DistutilsOptionError(e) from e
 
-    def warn_dash_deprecation(self, opt, section):
+    def warn_dash_deprecation(self, opt: str, section: str):
         if section in (
             'options.extras_require',
             'options.data_files',
@@ -557,7 +560,7 @@ def _setuptools_commands(self):
             # during bootstrapping, distribution doesn't exist
             return []
 
-    def make_option_lowercase(self, opt, section):
+    def make_option_lowercase(self, opt: str, section: str):
         if section != 'metadata' or opt.islower():
             return opt
 
@@ -621,7 +624,7 @@ def _set_command_options(self, command_obj, option_dict=None):  # noqa: C901
             except ValueError as e:
                 raise DistutilsOptionError(e) from e
 
-    def _get_project_config_files(self, filenames):
+    def _get_project_config_files(self, filenames: Iterable[StrPath] | None):
         """Add default file and split between INI and TOML"""
         tomlfiles = []
         standard_project_metadata = Path(self.src_root or os.curdir, "pyproject.toml")
@@ -633,7 +636,11 @@ def _get_project_config_files(self, filenames):
             tomlfiles = [standard_project_metadata]
         return filenames, tomlfiles
 
-    def parse_config_files(self, filenames=None, ignore_option_errors=False):
+    def parse_config_files(
+        self,
+        filenames: Iterable[StrPath] | None = None,
+        ignore_option_errors: bool = False,
+    ):
         """Parses configuration files from various levels
         and loads configuration.
         """
@@ -650,7 +657,7 @@ def parse_config_files(self, filenames=None, ignore_option_errors=False):
         self._finalize_requires()
         self._finalize_license_files()
 
-    def fetch_build_eggs(self, requires):
+    def fetch_build_eggs(self, requires: _StrOrIter):
         """Resolve pre-setup requirements"""
         from .installer import _fetch_build_eggs
 
@@ -721,7 +728,7 @@ def fetch_build_egg(self, req):
 
         return fetch_build_egg(self, req)
 
-    def get_command_class(self, command):
+    def get_command_class(self, command: str):
         """Pluggable version of get_command_class()"""
         if command in self.cmdclass:
             return self.cmdclass[command]
@@ -775,7 +782,7 @@ def include(self, **attrs):
             else:
                 self._include_misc(k, v)
 
-    def exclude_package(self, package):
+    def exclude_package(self, package: str):
         """Remove packages, modules, and extensions in named package"""
 
         pfx = package + '.'
@@ -796,7 +803,7 @@ def exclude_package(self, package):
                 if p.name != package and not p.name.startswith(pfx)
             ]
 
-    def has_contents_for(self, package):
+    def has_contents_for(self, package: str):
         """Return true if 'exclude_package(package)' would do something"""
 
         pfx = package + '.'
diff --git a/setuptools/errors.py b/setuptools/errors.py
index 90fcf7170e..990ecbf4e2 100644
--- a/setuptools/errors.py
+++ b/setuptools/errors.py
@@ -5,45 +5,40 @@
 
 from __future__ import annotations
 
-from typing import TYPE_CHECKING
-
 from distutils import errors as _distutils_errors
 
-if TYPE_CHECKING:
-    from typing_extensions import TypeAlias
-
 # Re-export errors from distutils to facilitate the migration to PEP632
 
-ByteCompileError: TypeAlias = _distutils_errors.DistutilsByteCompileError
-CCompilerError: TypeAlias = _distutils_errors.CCompilerError
-ClassError: TypeAlias = _distutils_errors.DistutilsClassError
-CompileError: TypeAlias = _distutils_errors.CompileError
-ExecError: TypeAlias = _distutils_errors.DistutilsExecError
-FileError: TypeAlias = _distutils_errors.DistutilsFileError
-InternalError: TypeAlias = _distutils_errors.DistutilsInternalError
-LibError: TypeAlias = _distutils_errors.LibError
-LinkError: TypeAlias = _distutils_errors.LinkError
-ModuleError: TypeAlias = _distutils_errors.DistutilsModuleError
-OptionError: TypeAlias = _distutils_errors.DistutilsOptionError
-PlatformError: TypeAlias = _distutils_errors.DistutilsPlatformError
-PreprocessError: TypeAlias = _distutils_errors.PreprocessError
-SetupError: TypeAlias = _distutils_errors.DistutilsSetupError
-TemplateError: TypeAlias = _distutils_errors.DistutilsTemplateError
-UnknownFileError: TypeAlias = _distutils_errors.UnknownFileError
+ByteCompileError = _distutils_errors.DistutilsByteCompileError
+CCompilerError = _distutils_errors.CCompilerError
+ClassError = _distutils_errors.DistutilsClassError
+CompileError = _distutils_errors.CompileError
+ExecError = _distutils_errors.DistutilsExecError
+FileError = _distutils_errors.DistutilsFileError
+InternalError = _distutils_errors.DistutilsInternalError
+LibError = _distutils_errors.LibError
+LinkError = _distutils_errors.LinkError
+ModuleError = _distutils_errors.DistutilsModuleError
+OptionError = _distutils_errors.DistutilsOptionError
+PlatformError = _distutils_errors.DistutilsPlatformError
+PreprocessError = _distutils_errors.PreprocessError
+SetupError = _distutils_errors.DistutilsSetupError
+TemplateError = _distutils_errors.DistutilsTemplateError
+UnknownFileError = _distutils_errors.UnknownFileError
 
 # The root error class in the hierarchy
-BaseError: TypeAlias = _distutils_errors.DistutilsError
+BaseError = _distutils_errors.DistutilsError
 
 
-class InvalidConfigError(OptionError):
+class InvalidConfigError(OptionError):  # type: ignore[valid-type, misc] # distutils imports are `Any` on python 3.12+
     """Error used for invalid configurations."""
 
 
-class RemovedConfigError(OptionError):
+class RemovedConfigError(OptionError):  # type: ignore[valid-type, misc] # distutils imports are `Any` on python 3.12+
     """Error used for configurations that were deprecated and removed."""
 
 
-class RemovedCommandError(BaseError, RuntimeError):
+class RemovedCommandError(BaseError, RuntimeError):  # type: ignore[valid-type, misc] # distutils imports are `Any` on python 3.12+
     """Error used for commands that have been removed in setuptools.
 
     Since ``setuptools`` is built on ``distutils``, simply removing a command
@@ -53,7 +48,7 @@ class RemovedCommandError(BaseError, RuntimeError):
     """
 
 
-class PackageDiscoveryError(BaseError, RuntimeError):
+class PackageDiscoveryError(BaseError, RuntimeError):  # type: ignore[valid-type, misc] # distutils imports are `Any` on python 3.12+
     """Impossible to perform automatic discovery of packages and/or modules.
 
     The current project layout or given discovery options can lead to problems when
diff --git a/setuptools/extension.py b/setuptools/extension.py
index cbefc72508..79bcc203e9 100644
--- a/setuptools/extension.py
+++ b/setuptools/extension.py
@@ -4,6 +4,8 @@
 import re
 from typing import TYPE_CHECKING
 
+from setuptools._path import StrPath
+
 from .monkey import get_unpatched
 
 import distutils.core
@@ -27,10 +29,8 @@ def _have_cython():
 # for compatibility
 have_pyrex = _have_cython
 if TYPE_CHECKING:
-    from typing_extensions import TypeAlias
-
     # Work around a mypy issue where type[T] can't be used as a base: https://github.com/python/mypy/issues/10962
-    _Extension: TypeAlias = distutils.core.Extension
+    from distutils.core import Extension as _Extension
 else:
     _Extension = get_unpatched(distutils.core.Extension)
 
@@ -52,7 +52,7 @@ class Extension(_Extension):
       the full name of the extension, including any packages -- ie.
       *not* a filename or pathname, but Python dotted name
 
-    :arg list[str] sources:
+    :arg list[str|os.PathLike[str]] sources:
       list of source filenames, relative to the distribution root
       (where the setup script lives), in Unix form (slash-separated)
       for portability.  Source files may be C, C++, SWIG (.i),
@@ -140,11 +140,23 @@ class Extension(_Extension):
     _needs_stub: bool  #: Private API, internal use only.
     _file_name: str  #: Private API, internal use only.
 
-    def __init__(self, name: str, sources, *args, py_limited_api: bool = False, **kw):
+    def __init__(
+        self,
+        name: str,
+        sources: list[StrPath],
+        *args,
+        py_limited_api: bool = False,
+        **kw,
+    ):
         # The *args is needed for compatibility as calls may use positional
         # arguments. py_limited_api may be set only via keyword.
         self.py_limited_api = py_limited_api
-        super().__init__(name, sources, *args, **kw)
+        super().__init__(
+            name,
+            sources,  # type: ignore[arg-type] # Vendored version of setuptools supports PathLike
+            *args,
+            **kw,
+        )
 
     def _convert_pyx_sources_to_lang(self):
         """
diff --git a/setuptools/glob.py b/setuptools/glob.py
index ffe0ae92cb..97aca44314 100644
--- a/setuptools/glob.py
+++ b/setuptools/glob.py
@@ -13,7 +13,7 @@
 __all__ = ["glob", "iglob", "escape"]
 
 
-def glob(pathname, recursive=False):
+def glob(pathname, recursive: bool = False):
     """Return a list of paths matching a pathname pattern.
 
     The pattern may contain simple shell-style wildcards a la
@@ -27,7 +27,7 @@ def glob(pathname, recursive=False):
     return list(iglob(pathname, recursive=recursive))
 
 
-def iglob(pathname, recursive=False):
+def iglob(pathname, recursive: bool = False):
     """Return an iterator which yields the paths matching a pathname pattern.
 
     The pattern may contain simple shell-style wildcards a la
diff --git a/setuptools/installer.py b/setuptools/installer.py
index ce3559cd93..ba2d808d49 100644
--- a/setuptools/installer.py
+++ b/setuptools/installer.py
@@ -6,6 +6,7 @@
 from functools import partial
 
 from . import _reqs
+from ._reqs import _StrOrIter
 from .warnings import SetuptoolsDeprecationWarning
 from .wheel import Wheel
 
@@ -30,7 +31,7 @@ def fetch_build_egg(dist, req):
     return _fetch_build_egg_no_warn(dist, req)
 
 
-def _fetch_build_eggs(dist, requires):
+def _fetch_build_eggs(dist, requires: _StrOrIter):
     import pkg_resources  # Delay import to avoid unnecessary side-effects
 
     _DeprecatedInstaller.emit(stacklevel=3)
diff --git a/setuptools/logging.py b/setuptools/logging.py
index e9674c5a81..c6d25a6b1e 100644
--- a/setuptools/logging.py
+++ b/setuptools/logging.py
@@ -35,6 +35,6 @@ def configure():
         distutils.dist.log = distutils.log
 
 
-def set_threshold(level):
+def set_threshold(level: int):
     logging.root.setLevel(level * 10)
     return set_threshold.unpatched(level)
diff --git a/setuptools/package_index.py b/setuptools/package_index.py
index 9b3769fac9..2a05b35c4f 100644
--- a/setuptools/package_index.py
+++ b/setuptools/package_index.py
@@ -303,20 +303,20 @@ class PackageIndex(Environment):
 
     def __init__(
         self,
-        index_url="https://pypi.org/simple/",
+        index_url: str = "https://pypi.org/simple/",
         hosts=('*',),
         ca_bundle=None,
-        verify_ssl=True,
+        verify_ssl: bool = True,
         *args,
         **kw,
     ):
         super().__init__(*args, **kw)
         self.index_url = index_url + "/"[: not index_url.endswith('/')]
-        self.scanned_urls = {}
-        self.fetched_urls = {}
-        self.package_pages = {}
+        self.scanned_urls: dict = {}
+        self.fetched_urls: dict = {}
+        self.package_pages: dict = {}
         self.allows = re.compile('|'.join(map(translate, hosts))).match
-        self.to_scan = []
+        self.to_scan: list = []
         self.opener = urllib.request.urlopen
 
     def add(self, dist):
@@ -328,7 +328,7 @@ def add(self, dist):
         return super().add(dist)
 
     # FIXME: 'PackageIndex.process_url' is too complex (14)
-    def process_url(self, url, retrieve=False):  # noqa: C901
+    def process_url(self, url, retrieve: bool = False):  # noqa: C901
         """Evaluate a URL as a possible download, and maybe retrieve it"""
         if url in self.scanned_urls and not retrieve:
             return
@@ -381,7 +381,7 @@ def process_url(self, url, retrieve=False):  # noqa: C901
         if url.startswith(self.index_url) and getattr(f, 'code', None) != 404:
             page = self.process_index(url, page)
 
-    def process_filename(self, fn, nested=False):
+    def process_filename(self, fn, nested: bool = False):
         # process filenames or directories
         if not os.path.exists(fn):
             self.warn("Not found: %s", fn)
@@ -397,7 +397,7 @@ def process_filename(self, fn, nested=False):
             self.debug("Found: %s", fn)
             list(map(self.add, dists))
 
-    def url_ok(self, url, fatal=False):
+    def url_ok(self, url, fatal: bool = False):
         s = URL_SCHEME(url)
         is_file = s and s.group(1).lower() == 'file'
         if is_file or self.allows(urllib.parse.urlparse(url)[1]):
@@ -603,9 +603,9 @@ def fetch_distribution(  # noqa: C901  # is too complex (14)  # FIXME
         self,
         requirement,
         tmpdir,
-        force_scan=False,
-        source=False,
-        develop_ok=False,
+        force_scan: bool = False,
+        source: bool = False,
+        develop_ok: bool = False,
         local_index=None,
     ):
         """Obtain a distribution suitable for fulfilling `requirement`
@@ -681,7 +681,9 @@ def find(req, env=None):
             self.info("Best match: %s", dist)
             return dist.clone(location=dist.download_location)
 
-    def fetch(self, requirement, tmpdir, force_scan=False, source=False):
+    def fetch(
+        self, requirement, tmpdir, force_scan: bool = False, source: bool = False
+    ):
         """Obtain a file suitable for fulfilling `requirement`
 
         DEPRECATED; use the ``fetch_distribution()`` method now instead.  For
diff --git a/setuptools/sandbox.py b/setuptools/sandbox.py
index 9c2c78a32c..ec5e79da05 100644
--- a/setuptools/sandbox.py
+++ b/setuptools/sandbox.py
@@ -295,10 +295,10 @@ def __enter__(self) -> None:
 
     def __exit__(
         self,
-        exc_type: object,
-        exc_value: object,
-        traceback: object,
-    ) -> None:
+        exc_type: type[BaseException] | None,
+        exc_value: BaseException | None,
+        traceback: TracebackType | None,
+    ):
         self._active = False
         builtins.open = _open
         self._copy(_os)
@@ -416,7 +416,7 @@ def _remap_pair(self, operation, src, dst, *args, **kw):
 class DirectorySandbox(AbstractSandbox):
     """Restrict operations to a single subdirectory - pseudo-chroot"""
 
-    write_ops = dict.fromkeys([
+    write_ops: dict[str, None] = dict.fromkeys([
         "open",
         "chmod",
         "chown",
@@ -491,7 +491,7 @@ def _remap_pair(self, operation, src, dst, *args, **kw):
             self._violation(operation, src, dst, *args, **kw)
         return (src, dst)
 
-    def open(self, file, flags, mode=0o777, *args, **kw):
+    def open(self, file, flags, mode: int = 0o777, *args, **kw):
         """Called for low-level os.open()"""
         if flags & WRITE_FLAGS and not self._ok(file):
             self._violation("os.open", file, flags, mode, *args, **kw)
diff --git a/setuptools/warnings.py b/setuptools/warnings.py
index 8c94bc96e6..f0ef616725 100644
--- a/setuptools/warnings.py
+++ b/setuptools/warnings.py
@@ -12,9 +12,12 @@
 from datetime import date
 from inspect import cleandoc
 from textwrap import indent
-from typing import Tuple
+from typing import TYPE_CHECKING, Tuple
 
-_DueDate = Tuple[int, int, int]  # time tuple
+if TYPE_CHECKING:
+    from typing_extensions import TypeAlias
+
+_DueDate: TypeAlias = Tuple[int, int, int]  # time tuple
 _INDENT = 8 * " "
 _TEMPLATE = f"""{80 * '*'}\n{{details}}\n{80 * '*'}"""
 

From 62b6678a32087ed3bfc8ff19761764340295834e Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Sat, 26 Oct 2024 00:12:59 +0100
Subject: [PATCH 1241/1761] Bump pre-commit hook for ruff to avoid clashes with
 pytest-ruff (jaraco/skeleton#150)

---
 .pre-commit-config.yaml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 8ec58e22fa..04870d16bf 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,6 +1,6 @@
 repos:
 - repo: https://github.com/astral-sh/ruff-pre-commit
-  rev: v0.5.6
+  rev: v0.7.1
   hooks:
   - id: ruff
     args: [--fix, --unsafe-fixes]

From db4dfc495552aca8d6f05ed58441fa65fdc2ed9c Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 28 Oct 2024 09:11:52 -0700
Subject: [PATCH 1242/1761] Add Python 3.13 and 3.14 into the matrix.
 (jaraco/skeleton#151)


From e61a9df7cdc9c8d1b56c30b7b3f94a7cdac14414 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 28 Oct 2024 12:19:31 -0400
Subject: [PATCH 1243/1761] Include pyproject.toml in ruff.toml.

Closes jaraco/skeleton#119. Workaround for astral-sh/ruff#10299.
---
 ruff.toml | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/ruff.toml b/ruff.toml
index 922aa1f198..8b22940a64 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -1,3 +1,6 @@
+# include pyproject.toml for requires-python (workaround astral-sh/ruff#10299)
+include = "pyproject.toml"
+
 [lint]
 extend-select = [
 	"C901",

From 750a1891ec4a1c0602050e3463e9593a8c13aa14 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 28 Oct 2024 12:22:50 -0400
Subject: [PATCH 1244/1761] Require Python 3.9 or later now that Python 3.8 is
 EOL.

---
 .github/workflows/main.yml | 4 +---
 pyproject.toml             | 2 +-
 2 files changed, 2 insertions(+), 4 deletions(-)

diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index 251b9c1d82..9c01fc4d14 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -35,15 +35,13 @@ jobs:
       # https://blog.jaraco.com/efficient-use-of-ci-resources/
       matrix:
         python:
-        - "3.8"
+        - "3.9"
         - "3.13"
         platform:
         - ubuntu-latest
         - macos-latest
         - windows-latest
         include:
-        - python: "3.9"
-          platform: ubuntu-latest
         - python: "3.10"
           platform: ubuntu-latest
         - python: "3.11"
diff --git a/pyproject.toml b/pyproject.toml
index 1d81b1cc4a..328b98cb46 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -16,7 +16,7 @@ classifiers = [
 	"Programming Language :: Python :: 3",
 	"Programming Language :: Python :: 3 :: Only",
 ]
-requires-python = ">=3.8"
+requires-python = ">=3.9"
 dependencies = [
 ]
 dynamic = ["version"]

From 7f4ab19ce8001a81c7cee37fe7ee390e88263a78 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Mon, 28 Oct 2024 18:38:41 -0400
Subject: [PATCH 1245/1761] pkg_resources ANN2 autofixes

---
 pkg_resources/__init__.py | 34 +++++++++++++++++-----------------
 1 file changed, 17 insertions(+), 17 deletions(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index f1f0ef2535..e20db4a4fa 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -172,7 +172,7 @@ def _sget_dict(val):
     return val.copy()
 
 
-def _sset_dict(key, ob, state):
+def _sset_dict(key, ob, state) -> None:
     ob.clear()
     ob.update(state)
 
@@ -181,7 +181,7 @@ def _sget_object(val):
     return val.__getstate__()
 
 
-def _sset_object(key, ob, state):
+def _sset_object(key, ob, state) -> None:
     ob.__setstate__(state)
 
 
@@ -1090,7 +1090,7 @@ def subscribe(
         for dist in self:
             callback(dist)
 
-    def _added_new(self, dist):
+    def _added_new(self, dist) -> None:
         for callback in self.callbacks:
             callback(dist)
 
@@ -1463,7 +1463,7 @@ def get_cache_path(self, archive_name: str, names: Iterable[StrPath] = ()) -> st
         return target_path
 
     @staticmethod
-    def _warn_unsafe_extraction_path(path):
+    def _warn_unsafe_extraction_path(path) -> None:
         """
         If the default extraction path is overridden and set to an insecure
         location, such as /tmp, it opens up an opportunity for an attacker to
@@ -1577,7 +1577,7 @@ def safe_version(version: str) -> str:
         return re.sub('[^A-Za-z0-9.]+', '-', version)
 
 
-def _forgiving_version(version):
+def _forgiving_version(version) -> str:
     """Fallback when ``safe_version`` is not safe enough
     >>> parse_version(_forgiving_version('0.23ubuntu1'))
     
@@ -1779,7 +1779,7 @@ def _fn(self, base: str | None, resource_name: str):
         return base
 
     @staticmethod
-    def _validate_resource_path(path):
+    def _validate_resource_path(path) -> None:
         """
         Validate the resource paths according to the docs.
         https://setuptools.pypa.io/en/latest/pkg_resources.html#basic-resource-access
@@ -1890,7 +1890,7 @@ def _setup_prefix(self):
         egg = next(eggs, None)
         egg and self._set_egg(egg)
 
-    def _set_egg(self, path: str):
+    def _set_egg(self, path: str) -> None:
         self.egg_name = os.path.basename(path)
         self.egg_info = os.path.join(path, 'EGG-INFO')
         self.egg_root = path
@@ -1918,7 +1918,7 @@ def _get(self, path) -> bytes:
             return stream.read()
 
     @classmethod
-    def _register(cls):
+    def _register(cls) -> None:
         loader_names = (
             'SourceFileLoader',
             'SourcelessFileLoader',
@@ -2208,7 +2208,7 @@ def get_metadata(self, name: str) -> str:
         self._warn_on_replacement(metadata)
         return metadata
 
-    def _warn_on_replacement(self, metadata):
+    def _warn_on_replacement(self, metadata) -> None:
         replacement_char = '�'
         if replacement_char in metadata:
             tmpl = "{self.path} could not be properly decoded in UTF-8"
@@ -2505,7 +2505,7 @@ def _handle_ns(packageName, path_item):
     return subpath
 
 
-def _rebuild_mod_path(orig_path, package_name, module: types.ModuleType):
+def _rebuild_mod_path(orig_path, package_name, module: types.ModuleType) -> None:
     """
     Rebuild module.__path__ ensuring that all entries are ordered
     corresponding to their sys.path order
@@ -2624,7 +2624,7 @@ def null_ns_handler(
     path_item: str | None,
     packageName: str | None,
     module: _ModuleLike | None,
-):
+) -> None:
     return None
 
 
@@ -2635,7 +2635,7 @@ def null_ns_handler(
 def normalize_path(filename: StrPath) -> str: ...
 @overload
 def normalize_path(filename: BytesPath) -> bytes: ...
-def normalize_path(filename: StrOrBytesPath):
+def normalize_path(filename: StrOrBytesPath) -> str | bytes:
     """Normalize a file/dir name for comparison purposes"""
     return os.path.normcase(os.path.realpath(os.path.normpath(_cygwin_patch(filename))))
 
@@ -2691,7 +2691,7 @@ def _is_unpacked_egg(path):
     )
 
 
-def _set_parent_ns(packageName):
+def _set_parent_ns(packageName) -> None:
     parts = packageName.split('.')
     name = parts.pop()
     if parts:
@@ -3336,7 +3336,7 @@ def check_version_conflict(self):
                 " to sys.path" % (modname, fn, self.location),
             )
 
-    def has_version(self):
+    def has_version(self) -> bool:
         try:
             self.version
         except ValueError:
@@ -3552,7 +3552,7 @@ def ensure_directory(path: StrOrBytesPath) -> None:
     os.makedirs(dirname, exist_ok=True)
 
 
-def _bypass_ensure_directory(path):
+def _bypass_ensure_directory(path) -> None:
     """Sandbox-bypassing version of ensure_directory()"""
     if not WRITE_SUPPORT:
         raise OSError('"os.mkdir" not supported on this platform.')
@@ -3658,7 +3658,7 @@ def _call_aside(f, *args, **kwargs):
 
 
 @_call_aside
-def _initialize(g=globals()):
+def _initialize(g=globals()) -> None:
     "Set up global resource manager (deliberately not state-saved)"
     manager = ResourceManager()
     g['_manager'] = manager
@@ -3670,7 +3670,7 @@ def _initialize(g=globals()):
 
 
 @_call_aside
-def _initialize_master_working_set():
+def _initialize_master_working_set() -> None:
     """
     Prepare the master working set and make the ``require()``
     API available.

From a21c7fab9985c48e6f37e0e375ef1797647c98c0 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 29 Oct 2024 09:54:58 +0000
Subject: [PATCH 1246/1761] Fix grammar in news fragment

---
 newsfragments/4696.bugfix.rst | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/newsfragments/4696.bugfix.rst b/newsfragments/4696.bugfix.rst
index 77ebf87c48..ab035a308d 100644
--- a/newsfragments/4696.bugfix.rst
+++ b/newsfragments/4696.bugfix.rst
@@ -1,4 +1,4 @@
 Fix clashes for ``optional-dependencies`` in ``pyproject.toml`` and
 ``extra_requires`` in ``setup.cfg/setup.py``.
-As per PEP 621, ``optional-dependencies`` has to be honoured and dynamic
+As per PEP 621, ``optional-dependencies`` have to be honoured and dynamic
 behaviour is not allowed.

From d8da7dfaa0cef4a69ee21d9406391651c4dc5ce4 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 29 Oct 2024 09:55:08 +0000
Subject: [PATCH 1247/1761] =?UTF-8?q?Bump=20version:=2075.2.0=20=E2=86=92?=
 =?UTF-8?q?=2075.3.0?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg               |  2 +-
 NEWS.rst                       | 29 +++++++++++++++++++++++++++++
 newsfragments/4560.misc.rst    |  1 -
 newsfragments/4567.bugfix.rst  |  4 ----
 newsfragments/4575.feature.rst |  1 -
 newsfragments/4674.bugfix.rst  |  1 -
 newsfragments/4696.bugfix.rst  |  4 ----
 pyproject.toml                 |  2 +-
 8 files changed, 31 insertions(+), 13 deletions(-)
 delete mode 100644 newsfragments/4560.misc.rst
 delete mode 100644 newsfragments/4567.bugfix.rst
 delete mode 100644 newsfragments/4575.feature.rst
 delete mode 100644 newsfragments/4674.bugfix.rst
 delete mode 100644 newsfragments/4696.bugfix.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 91921ce92d..974699dc24 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 75.2.0
+current_version = 75.3.0
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index e79b45a623..39bd36de66 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,32 @@
+v75.3.0
+=======
+
+Features
+--------
+
+- Allowed using `dict` as an ordered type in ``setuptools.dist.check_requirements`` -- by :user:`Avasam` (#4575)
+
+
+Bugfixes
+--------
+
+- Ensured methods in ``setuptools.modified`` preferably raise a consistent
+  ``distutils.errors.DistutilsError`` type
+  (except in the deprecated use case of ``SETUPTOOLS_USE_DISTUTILS=stdlib``)
+  -- by :user:`Avasam` (#4567)
+- Fix the ABI tag when building a wheel using the debug build of Python 3.13 on Windows. Previously, the ABI tag was missing the ``"d"`` flag. (#4674)
+- Fix clashes for ``optional-dependencies`` in ``pyproject.toml`` and
+  ``extra_requires`` in ``setup.cfg/setup.py``.
+  As per PEP 621, ``optional-dependencies`` have to be honoured and dynamic
+  behaviour is not allowed. (#4696)
+
+
+Misc
+----
+
+- #4560
+
+
 v75.2.0
 =======
 
diff --git a/newsfragments/4560.misc.rst b/newsfragments/4560.misc.rst
deleted file mode 100644
index 0878f09abd..0000000000
--- a/newsfragments/4560.misc.rst
+++ /dev/null
@@ -1 +0,0 @@
-Bumped declared ``platformdirs`` dependency to ``>= 4.2.2`` to help platforms lacking `ctypes` support install setuptools seamlessly -- by :user:`Avasam`
diff --git a/newsfragments/4567.bugfix.rst b/newsfragments/4567.bugfix.rst
deleted file mode 100644
index 7d7bb282e1..0000000000
--- a/newsfragments/4567.bugfix.rst
+++ /dev/null
@@ -1,4 +0,0 @@
-Ensured methods in ``setuptools.modified`` preferably raise a consistent
-``distutils.errors.DistutilsError`` type
-(except in the deprecated use case of ``SETUPTOOLS_USE_DISTUTILS=stdlib``)
--- by :user:`Avasam`
diff --git a/newsfragments/4575.feature.rst b/newsfragments/4575.feature.rst
deleted file mode 100644
index 64ab49830f..0000000000
--- a/newsfragments/4575.feature.rst
+++ /dev/null
@@ -1 +0,0 @@
-Allowed using `dict` as an ordered type in ``setuptools.dist.check_requirements`` -- by :user:`Avasam`
diff --git a/newsfragments/4674.bugfix.rst b/newsfragments/4674.bugfix.rst
deleted file mode 100644
index 9a6d2454ab..0000000000
--- a/newsfragments/4674.bugfix.rst
+++ /dev/null
@@ -1 +0,0 @@
-Fix the ABI tag when building a wheel using the debug build of Python 3.13 on Windows. Previously, the ABI tag was missing the ``"d"`` flag.
diff --git a/newsfragments/4696.bugfix.rst b/newsfragments/4696.bugfix.rst
deleted file mode 100644
index ab035a308d..0000000000
--- a/newsfragments/4696.bugfix.rst
+++ /dev/null
@@ -1,4 +0,0 @@
-Fix clashes for ``optional-dependencies`` in ``pyproject.toml`` and
-``extra_requires`` in ``setup.cfg/setup.py``.
-As per PEP 621, ``optional-dependencies`` have to be honoured and dynamic
-behaviour is not allowed.
diff --git a/pyproject.toml b/pyproject.toml
index 1a4906fb0c..423d00701b 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "75.2.0"
+version = "75.3.0"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From 4969c28722acd60c0e3680302f661833f391f5a7 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 30 Oct 2024 15:27:41 +0000
Subject: [PATCH 1248/1761] Bump Python version for `pyright` GHA

Since https://github.com/pypa/setuptools/commit/e47994c81ce3e97502f761bdef60b20fe72bf02a we are having problems with 3.8.

This PR updates the versions to support the oldest and newest supported stable Python versions.
---
 .github/workflows/pyright.yml | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/.github/workflows/pyright.yml b/.github/workflows/pyright.yml
index 17a1e2dbbe..42d0164ff1 100644
--- a/.github/workflows/pyright.yml
+++ b/.github/workflows/pyright.yml
@@ -42,8 +42,8 @@ jobs:
       # https://blog.jaraco.com/efficient-use-of-ci-resources/
       matrix:
         python:
-          - "3.8"
-          - "3.12"
+          - "3.9"
+          - "3.13"
         platform:
           - ubuntu-latest
     runs-on: ${{ matrix.platform }}

From b9d56bc9fd7f1a7f3b2eccd1bb8b1d5b02f701ff Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 30 Oct 2024 15:37:18 +0000
Subject: [PATCH 1249/1761] Remove actions on 3.8 for macos

---
 .github/workflows/main.yml | 6 ------
 1 file changed, 6 deletions(-)

diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index 7a47b6e9cc..bb58704edd 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -63,12 +63,6 @@ jobs:
         - platform: ubuntu-latest
           python: "3.10"
           distutils: stdlib
-        # Python 3.8, 3.9 are on macos-13 but not macos-latest (macos-14-arm64)
-        # https://github.com/actions/setup-python/issues/850
-        # https://github.com/actions/setup-python/issues/696#issuecomment-1637587760
-        - {python: "3.8", platform: "macos-13"}
-        exclude:
-        - {python: "3.8", platform: "macos-latest"}
     runs-on: ${{ matrix.platform }}
     continue-on-error: ${{ matrix.python == '3.14' }}
     env:

From 531c497d35504f8c969b60fdc51d7a1df02dfa13 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 30 Oct 2024 15:46:22 +0000
Subject: [PATCH 1250/1761] Fix ruff config

---
 ruff.toml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/ruff.toml b/ruff.toml
index 10b6807f40..438c048962 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -1,5 +1,5 @@
 # include pyproject.toml for requires-python (workaround astral-sh/ruff#10299)
-include = "pyproject.toml"
+include = ["pyproject.toml"]
 
 exclude = [
 	"**/_vendor",

From db2b2533c4c535d9a6320ae3c7e1558dd174756c Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 30 Oct 2024 15:58:06 +0000
Subject: [PATCH 1251/1761] Bump min Python version in comments for typechecker
 configs

---
 mypy.ini           | 2 +-
 pyrightconfig.json | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/mypy.ini b/mypy.ini
index cadfa6be59..57e19efa9e 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -20,7 +20,7 @@ disable_error_code =
 
 # CI should test for all versions, local development gets hints for oldest supported
 # But our testing setup doesn't allow passing CLI arguments, so local devs have to set this manually.
-# python_version = 3.8
+# python_version = 3.9
 
 exclude = (?x)(
 	# Avoid scanning Python files in generated folders
diff --git a/pyrightconfig.json b/pyrightconfig.json
index 27a329e169..da3cd978ce 100644
--- a/pyrightconfig.json
+++ b/pyrightconfig.json
@@ -11,7 +11,7 @@
 		"setuptools/_distutils",
 	],
 	// Our testing setup doesn't allow passing CLI arguments, so local devs have to set this manually.
-	// "pythonVersion": "3.8",
+	// "pythonVersion": "3.9",
 	// For now we don't mind if mypy's `type: ignore` comments accidentally suppresses pyright issues
 	"enableTypeIgnoreComments": true,
 	"typeCheckingMode": "basic",

From b9dd033976087e0566660c65e2a05969d8c1f082 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 30 Oct 2024 16:01:38 +0000
Subject: [PATCH 1252/1761] Let test for deprecated codepath xfail on macOS

---
 setuptools/tests/test_easy_install.py | 4 ++++
 1 file changed, 4 insertions(+)

diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py
index 933cebb78e..bfe1b8da90 100644
--- a/setuptools/tests/test_easy_install.py
+++ b/setuptools/tests/test_easy_install.py
@@ -1407,6 +1407,10 @@ def test_use_correct_python_version_string(tmpdir, tmpdir_cwd, monkeypatch):
     assert cmd.config_vars['py_version_nodot'] == '310'
 
 
+@pytest.mark.xfail(
+    sys.platform == "darwin",
+    reason="https://github.com/pypa/setuptools/pull/4716#issuecomment-2447624418",
+)
 def test_editable_user_and_build_isolation(setup_context, monkeypatch, tmp_path):
     """`setup.py develop` should honor `--user` even under build isolation"""
 

From 8116f8ee660f6cc4861437f6a1cd3b7dff9f38c1 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 30 Oct 2024 15:22:48 +0000
Subject: [PATCH 1253/1761] Mention that type-stubs need to be contained in a
 package directory.

Closes #4713.
---
 docs/userguide/miscellaneous.rst | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)

diff --git a/docs/userguide/miscellaneous.rst b/docs/userguide/miscellaneous.rst
index 7d841f6661..7354bd3a86 100644
--- a/docs/userguide/miscellaneous.rst
+++ b/docs/userguide/miscellaneous.rst
@@ -38,7 +38,9 @@ of ``setuptools``. The behavior of older versions might differ.
    .. versionadded:: v69.0.0
       ``setuptools`` will attempt to include type information files
       by default in the distribution
-      (``.pyi`` and ``py.typed``, as specified in :pep:`561`).
+      (``.pyi`` and ``py.typed``, as specified in :pep:`561`),
+      as long as they are contained inside of a package directory
+      (for the time being there is no automatic support for top-level ``.pyi`` files).
 
     *Please note however that this feature is* **EXPERIMENTAL** *and may change in
     the future.*

From 1183377a10beb9ee65e3d1992a9ffb646e388fde Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Wed, 30 Oct 2024 14:18:42 -0400
Subject: [PATCH 1254/1761] Sync with typeshed

---
 setuptools/command/bdist_egg.py      | 2 +-
 setuptools/command/editable_wheel.py | 9 +++++----
 setuptools/command/rotate.py         | 3 ++-
 setuptools/config/expand.py          | 4 +++-
 setuptools/config/pyprojecttoml.py   | 5 +++--
 setuptools/config/setupcfg.py        | 3 ++-
 setuptools/discovery.py              | 6 +++---
 setuptools/sandbox.py                | 4 ++--
 8 files changed, 21 insertions(+), 15 deletions(-)

diff --git a/setuptools/command/bdist_egg.py b/setuptools/command/bdist_egg.py
index c9eee16a5d..2881d8a9da 100644
--- a/setuptools/command/bdist_egg.py
+++ b/setuptools/command/bdist_egg.py
@@ -322,7 +322,7 @@ def get_ext_outputs(self):
         return all_outputs, ext_outputs
 
 
-NATIVE_EXTENSIONS = dict.fromkeys('.dll .so .dylib .pyd'.split())
+NATIVE_EXTENSIONS: dict[str, None] = dict.fromkeys('.dll .so .dylib .pyd'.split())
 
 
 def walk_egg(egg_dir):
diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py
index c1641a7267..34722e1aac 100644
--- a/setuptools/command/editable_wheel.py
+++ b/setuptools/command/editable_wheel.py
@@ -23,6 +23,7 @@
 from itertools import chain, starmap
 from pathlib import Path
 from tempfile import TemporaryDirectory
+from types import TracebackType
 from typing import TYPE_CHECKING, Iterable, Iterator, Mapping, Protocol, TypeVar, cast
 
 from .. import Command, _normalization, _path, errors, namespaces
@@ -379,13 +380,13 @@ def _select_strategy(
 class EditableStrategy(Protocol):
     def __call__(
         self, wheel: WheelFile, files: list[str], mapping: Mapping[str, str]
-    ): ...
+    ) -> object: ...
     def __enter__(self) -> Self: ...
     def __exit__(
         self,
-        _exc_type: object,
-        _exc_value: object,
-        _traceback: object,
+        _exc_type: type[BaseException] | None,
+        _exc_value: BaseException | None,
+        _traceback: TracebackType | None,
     ) -> object: ...
 
 
diff --git a/setuptools/command/rotate.py b/setuptools/command/rotate.py
index dcdfafbcf7..76c7b8612e 100644
--- a/setuptools/command/rotate.py
+++ b/setuptools/command/rotate.py
@@ -2,6 +2,7 @@
 
 import os
 import shutil
+from typing import ClassVar
 
 from setuptools import Command
 
@@ -20,7 +21,7 @@ class rotate(Command):
         ('keep=', 'k', "number of matching distributions to keep"),
     ]
 
-    boolean_options: list[str] = []
+    boolean_options: ClassVar[list[str]] = []
 
     def initialize_options(self):
         self.match = None
diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py
index 81caf1c35e..c07314dd7f 100644
--- a/setuptools/config/expand.py
+++ b/setuptools/config/expand.py
@@ -354,7 +354,9 @@ def canonic_data_files(
     ]
 
 
-def entry_points(text: str, text_source: str = "entry-points") -> dict[str, dict]:
+def entry_points(
+    text: str, text_source: str = "entry-points"
+) -> dict[str, dict[str, str]]:
     """Given the contents of entry-points file,
     process it into a 2-level dictionary (``dict[str, dict[str, str]]``).
     The first level keys are entry-point groups, the second level keys are
diff --git a/setuptools/config/pyprojecttoml.py b/setuptools/config/pyprojecttoml.py
index f5bda2ce34..cacd898264 100644
--- a/setuptools/config/pyprojecttoml.py
+++ b/setuptools/config/pyprojecttoml.py
@@ -330,7 +330,7 @@ def _obtain_readme(self, dist: Distribution) -> dict[str, str] | None:
 
     def _obtain_entry_points(
         self, dist: Distribution, package_dir: Mapping[str, str]
-    ) -> dict[str, dict] | None:
+    ) -> dict[str, dict[str, Any]] | None:
         fields = ("entry-points", "scripts", "gui-scripts")
         if not any(field in self.dynamic for field in fields):
             return None
@@ -340,7 +340,8 @@ def _obtain_entry_points(
             return None
 
         groups = _expand.entry_points(text)
-        expanded = {"entry-points": groups}
+        # Any is str | dict[str, str], but causes variance issues
+        expanded: dict[str, dict[str, Any]] = {"entry-points": groups}
 
         def _set_scripts(field: str, group: str):
             if group in groups:
diff --git a/setuptools/config/setupcfg.py b/setuptools/config/setupcfg.py
index 35fe4f9aa9..7aafcf6680 100644
--- a/setuptools/config/setupcfg.py
+++ b/setuptools/config/setupcfg.py
@@ -20,6 +20,7 @@
     TYPE_CHECKING,
     Any,
     Callable,
+    ClassVar,
     Dict,
     Generic,
     Iterable,
@@ -245,7 +246,7 @@ class ConfigHandler(Generic[Target]):
 
     """
 
-    aliases: dict[str, str] = {}
+    aliases: ClassVar[dict[str, str]] = {}
     """Options aliases.
     For compatibility with various packages. E.g.: d2to1 and pbr.
     Note: `-` in keys is replaced with `_` by config parser.
diff --git a/setuptools/discovery.py b/setuptools/discovery.py
index 09d1e2f474..9865552151 100644
--- a/setuptools/discovery.py
+++ b/setuptools/discovery.py
@@ -45,7 +45,7 @@
 from fnmatch import fnmatchcase
 from glob import glob
 from pathlib import Path
-from typing import TYPE_CHECKING, Iterable, Mapping
+from typing import TYPE_CHECKING, ClassVar, Iterable, Mapping
 
 import _distutils_hack.override  # noqa: F401
 
@@ -84,8 +84,8 @@ def __contains__(self, item: str) -> bool:
 class _Finder:
     """Base class that exposes functionality for module/package finders"""
 
-    ALWAYS_EXCLUDE: tuple[str, ...] = ()
-    DEFAULT_EXCLUDE: tuple[str, ...] = ()
+    ALWAYS_EXCLUDE: ClassVar[tuple[str, ...]] = ()
+    DEFAULT_EXCLUDE: ClassVar[tuple[str, ...]] = ()
 
     @classmethod
     def find(
diff --git a/setuptools/sandbox.py b/setuptools/sandbox.py
index ec5e79da05..5dedeeb47f 100644
--- a/setuptools/sandbox.py
+++ b/setuptools/sandbox.py
@@ -12,7 +12,7 @@
 import tempfile
 import textwrap
 from types import TracebackType
-from typing import TYPE_CHECKING
+from typing import TYPE_CHECKING, ClassVar
 
 import pkg_resources
 from pkg_resources import working_set
@@ -416,7 +416,7 @@ def _remap_pair(self, operation, src, dst, *args, **kw):
 class DirectorySandbox(AbstractSandbox):
     """Restrict operations to a single subdirectory - pseudo-chroot"""
 
-    write_ops: dict[str, None] = dict.fromkeys([
+    write_ops: ClassVar[dict[str, None]] = dict.fromkeys([
         "open",
         "chmod",
         "chown",

From cd0ef12fc236328693c6e4fb10226b92d3bfcb21 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Wed, 30 Oct 2024 15:24:36 -0400
Subject: [PATCH 1255/1761] Consistently using `sys.version_info` as a
 `NamedTuple`

---
 pkg_resources/__init__.py             | 2 +-
 pkg_resources/tests/test_resources.py | 2 +-
 setuptools/command/bdist_wheel.py     | 4 ++--
 setuptools/command/easy_install.py    | 9 ++++-----
 setuptools/command/egg_info.py        | 2 +-
 setuptools/package_index.py           | 5 +----
 setuptools/tests/test_easy_install.py | 4 +++-
 setuptools/tests/test_egg_info.py     | 4 ++--
 8 files changed, 15 insertions(+), 17 deletions(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index f1f0ef2535..d27b10d4b6 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -387,7 +387,7 @@ class UnknownExtra(ResolutionError):
 
 _provider_factories: dict[type[_ModuleLike], _ProviderFactoryType] = {}
 
-PY_MAJOR = '{}.{}'.format(*sys.version_info)
+PY_MAJOR = f'{sys.version_info.major}.{sys.version_info.minor}'
 EGG_DIST = 3
 BINARY_DIST = 2
 SOURCE_DIST = 1
diff --git a/pkg_resources/tests/test_resources.py b/pkg_resources/tests/test_resources.py
index f5e793fb90..8bd8a1766a 100644
--- a/pkg_resources/tests/test_resources.py
+++ b/pkg_resources/tests/test_resources.py
@@ -119,7 +119,7 @@ def testDistroBasics(self):
         self.checkFooPkg(d)
 
         d = Distribution("/some/path")
-        assert d.py_version == '{}.{}'.format(*sys.version_info)
+        assert d.py_version == f'{sys.version_info.major}.{sys.version_info.minor}'
         assert d.platform is None
 
     def testDistroParse(self):
diff --git a/setuptools/command/bdist_wheel.py b/setuptools/command/bdist_wheel.py
index c88753476f..f23caaa09f 100644
--- a/setuptools/command/bdist_wheel.py
+++ b/setuptools/command/bdist_wheel.py
@@ -64,7 +64,7 @@ def _is_32bit_interpreter() -> bool:
 
 
 def python_tag() -> str:
-    return f"py{sys.version_info[0]}"
+    return f"py{sys.version_info.major}"
 
 
 def get_platform(archive_root: str | None) -> str:
@@ -483,7 +483,7 @@ def run(self):
         # Add to 'Distribution.dist_files' so that the "upload" command works
         getattr(self.distribution, "dist_files", []).append((
             "bdist_wheel",
-            "{}.{}".format(*sys.version_info[:2]),  # like 3.7
+            f"{sys.version_info.major}.{sys.version_info.minor}",
             wheel_path,
         ))
 
diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py
index 6c835db593..5778020ccb 100644
--- a/setuptools/command/easy_install.py
+++ b/setuptools/command/easy_install.py
@@ -234,10 +234,9 @@ def _render_version():
         """
         Render the Setuptools version and installation details, then exit.
         """
-        ver = '{}.{}'.format(*sys.version_info)
+        ver = f'{sys.version_info.major}.{sys.version_info.minor}'
         dist = get_distribution('setuptools')
-        tmpl = 'setuptools {dist.version} from {dist.location} (Python {ver})'
-        print(tmpl.format(**locals()))
+        print(f'setuptools {dist.version} from {dist.location} (Python {ver})')
         raise SystemExit
 
     def finalize_options(self):  # noqa: C901  # is too complex (25)  # FIXME
@@ -1441,7 +1440,7 @@ def get_site_dirs():
                 os.path.join(
                     prefix,
                     "lib",
-                    "python{}.{}".format(*sys.version_info),
+                    f"python{sys.version_info.major}.{sys.version_info.minor}",
                     "site-packages",
                 ),
                 os.path.join(prefix, "lib", "site-python"),
@@ -1468,7 +1467,7 @@ def get_site_dirs():
             home,
             'Library',
             'Python',
-            '{}.{}'.format(*sys.version_info),
+            f'{sys.version_info.major}.{sys.version_info.minor}',
             'site-packages',
         )
         sitedirs.append(home_sp)
diff --git a/setuptools/command/egg_info.py b/setuptools/command/egg_info.py
index 7b9f8f0b72..f9b8c6df71 100644
--- a/setuptools/command/egg_info.py
+++ b/setuptools/command/egg_info.py
@@ -31,7 +31,7 @@
 from distutils.filelist import FileList as _FileList
 from distutils.util import convert_path
 
-PY_MAJOR = '{}.{}'.format(*sys.version_info)
+PY_MAJOR = f'{sys.version_info.major}.{sys.version_info.minor}'
 
 
 def translate_pattern(glob):  # noqa: C901  # is too complex (14)  # FIXME
diff --git a/setuptools/package_index.py b/setuptools/package_index.py
index 2a05b35c4f..ef7f41514e 100644
--- a/setuptools/package_index.py
+++ b/setuptools/package_index.py
@@ -64,10 +64,7 @@
 
 _SOCKET_TIMEOUT = 15
 
-_tmpl = "setuptools/{setuptools.__version__} Python-urllib/{py_major}"
-user_agent = _tmpl.format(
-    py_major='{}.{}'.format(*sys.version_info), setuptools=setuptools
-)
+user_agent = f"setuptools/{setuptools.__version__} Python-urllib/{sys.version_info.major}.{sys.version_info.minor}"
 
 
 def parse_requirement_arg(spec):
diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py
index bfe1b8da90..60a31e3bf2 100644
--- a/setuptools/tests/test_easy_install.py
+++ b/setuptools/tests/test_easy_install.py
@@ -859,7 +859,9 @@ def test_setup_requires_with_python_requires(self, monkeypatch, tmpdir):
         )
         dep_2_0_sdist = 'dep-2.0.tar.gz'
         dep_2_0_url = path_to_url(str(tmpdir / dep_2_0_sdist))
-        dep_2_0_python_requires = '!=' + '.'.join(map(str, sys.version_info[:2])) + '.*'
+        dep_2_0_python_requires = (
+            f'!={sys.version_info.major}.{sys.version_info.minor}.*'
+        )
         make_python_requires_sdist(
             str(tmpdir / dep_2_0_sdist), 'dep', '2.0', dep_2_0_python_requires
         )
diff --git a/setuptools/tests/test_egg_info.py b/setuptools/tests/test_egg_info.py
index 12d6b30a8b..f82d931eba 100644
--- a/setuptools/tests/test_egg_info.py
+++ b/setuptools/tests/test_egg_info.py
@@ -261,11 +261,11 @@ def _setup_script_with_requires(self, requires, use_setup_cfg=False):
         })
 
     mismatch_marker = "python_version<'{this_ver}'".format(
-        this_ver=sys.version_info[0],
+        this_ver=sys.version_info.major,
     )
     # Alternate equivalent syntax.
     mismatch_marker_alternate = 'python_version < "{this_ver}"'.format(
-        this_ver=sys.version_info[0],
+        this_ver=sys.version_info.major,
     )
     invalid_marker = "<=>++"
 

From 83b081bce5ef279eeee94b956be5fd3548283072 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 31 Oct 2024 12:26:33 +0000
Subject: [PATCH 1256/1761] Move setuptools._path.StrPath out of the
 TYPE_CHECKING check (Python 3.9+)

---
 setuptools/_path.py | 6 ++----
 1 file changed, 2 insertions(+), 4 deletions(-)

diff --git a/setuptools/_path.py b/setuptools/_path.py
index 5a2bdbd0d4..b749c6c6a0 100644
--- a/setuptools/_path.py
+++ b/setuptools/_path.py
@@ -10,10 +10,8 @@
 if TYPE_CHECKING:
     from typing_extensions import TypeAlias
 
-    StrPath: TypeAlias = Union[str, os.PathLike[str]]  #  Same as _typeshed.StrPath
-else:
-    # Python 3.8 support
-    StrPath: TypeAlias = Union[str, os.PathLike]
+
+StrPath: TypeAlias = Union[str, os.PathLike[str]]  #  Same as _typeshed.StrPath
 
 
 def ensure_directory(path):

From c7df80e958db7380a82a43f8ed946f6d34e06ecd Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 31 Oct 2024 12:32:33 +0000
Subject: [PATCH 1257/1761] Add comment explaining the origin of workaround for
 splituser

---
 setuptools/package_index.py | 1 +
 1 file changed, 1 insertion(+)

diff --git a/setuptools/package_index.py b/setuptools/package_index.py
index 2a05b35c4f..ce9debbce4 100644
--- a/setuptools/package_index.py
+++ b/setuptools/package_index.py
@@ -1104,6 +1104,7 @@ def open_with_auth(url, opener=urllib.request.urlopen):
 
 
 # copy of urllib.parse._splituser from Python 3.8
+# See https://github.com/python/cpython/issues/80072.
 def _splituser(host):
     """splituser('user[:passwd]@host[:port]')
     --> 'user[:passwd]', 'host[:port]'."""

From 093af8c72b21a2387fe94f28a47ab94d9b550e17 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Thu, 31 Oct 2024 10:00:43 -0400
Subject: [PATCH 1258/1761] Disallow blanket and unused suppressions (PGH,
 RUF10)

---
 ruff.toml                                           | 2 ++
 setuptools/_distutils/compat/py38.py                | 2 +-
 setuptools/tests/config/test_apply_pyprojecttoml.py | 2 +-
 3 files changed, 4 insertions(+), 2 deletions(-)

diff --git a/ruff.toml b/ruff.toml
index 438c048962..f6b11962f1 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -19,7 +19,9 @@ extend-select = [
 	"FA", # flake8-future-annotations
 	"I", # isort
 	"PERF", # Perflint
+	"PGH", # pygrep-hooks (blanket-* rules)
 	"PYI", # flake8-pyi
+	"RUF10", # unused-noqa & redirected-noqa
 	"TRY", # tryceratops
 	"UP", # pyupgrade
 	"YTT", # flake8-2020
diff --git a/setuptools/_distutils/compat/py38.py b/setuptools/_distutils/compat/py38.py
index 03ec73ef0e..afe5345553 100644
--- a/setuptools/_distutils/compat/py38.py
+++ b/setuptools/_distutils/compat/py38.py
@@ -26,7 +26,7 @@ def removeprefix(self, prefix):
 
 def aix_platform(osname, version, release):
     try:
-        import _aix_support  # type: ignore
+        import _aix_support
 
         return _aix_support.aix_platform()
     except ImportError:
diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py
index deee6fa47c..d18ba6e129 100644
--- a/setuptools/tests/config/test_apply_pyprojecttoml.py
+++ b/setuptools/tests/config/test_apply_pyprojecttoml.py
@@ -17,7 +17,7 @@
 from ini2toml.api import LiteTranslator
 from packaging.metadata import Metadata
 
-import setuptools  # noqa ensure monkey patch to metadata
+import setuptools  # noqa: F401 # ensure monkey patch to metadata
 from setuptools.command.egg_info import write_requirements
 from setuptools.config import expand, pyprojecttoml, setupcfg
 from setuptools.config._apply_pyprojecttoml import _MissingDynamic, _some_attrgetter

From e3d3e988e1c09834199b4f17a066401e757e58a6 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Mon, 28 Oct 2024 15:07:45 -0400
Subject: [PATCH 1259/1761] Add fake __getattribute__ to AbstractSandbox to let
 type-checkers know it has a bunch of dynamic methods

---
 setuptools/sandbox.py | 7 ++++++-
 1 file changed, 6 insertions(+), 1 deletion(-)

diff --git a/setuptools/sandbox.py b/setuptools/sandbox.py
index 5dedeeb47f..4f8a515b58 100644
--- a/setuptools/sandbox.py
+++ b/setuptools/sandbox.py
@@ -12,7 +12,7 @@
 import tempfile
 import textwrap
 from types import TracebackType
-from typing import TYPE_CHECKING, ClassVar
+from typing import TYPE_CHECKING, Any, ClassVar
 
 import pkg_resources
 from pkg_resources import working_set
@@ -406,6 +406,11 @@ def _remap_pair(self, operation, src, dst, *args, **kw):
             self._remap_input(operation + '-to', dst, *args, **kw),
         )
 
+    if TYPE_CHECKING:
+        # This is a catch-all for all the dynamically created attributes.
+        # This isn't public API anyway
+        def __getattribute__(self, name: str) -> Any: ...
+
 
 if hasattr(os, 'devnull'):
     _EXCEPTIONS = [os.devnull]

From 04b515ec9aab310ffafe843fb7a84caeb33848ed Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Wed, 30 Oct 2024 12:12:08 -0400
Subject: [PATCH 1260/1761] setuptools ANN201 autofixes for fully untyped
 functions

---
 _distutils_hack/__init__.py            |  3 +-
 setuptools/__init__.py                 |  2 +-
 setuptools/archive_util.py             | 10 +++--
 setuptools/command/alias.py            |  4 +-
 setuptools/command/bdist_egg.py        | 14 +++----
 setuptools/command/bdist_rpm.py        |  2 +-
 setuptools/command/build.py            |  6 +--
 setuptools/command/build_clib.py       |  2 +-
 setuptools/command/build_ext.py        | 12 +++---
 setuptools/command/build_py.py         |  8 ++--
 setuptools/command/develop.py          |  6 +--
 setuptools/command/dist_info.py        |  4 +-
 setuptools/command/easy_install.py     | 52 +++++++++++++-------------
 setuptools/command/editable_wheel.py   |  4 +-
 setuptools/command/egg_info.py         | 48 ++++++++++++------------
 setuptools/command/install.py          |  4 +-
 setuptools/command/install_egg_info.py |  4 +-
 setuptools/command/install_lib.py      |  2 +-
 setuptools/command/install_scripts.py  |  4 +-
 setuptools/command/rotate.py           |  4 +-
 setuptools/command/sdist.py            | 14 +++----
 setuptools/command/setopt.py           |  4 +-
 setuptools/config/setupcfg.py          | 12 +++---
 setuptools/discovery.py                |  2 +-
 setuptools/dist.py                     | 14 +++----
 setuptools/launch.py                   |  2 +-
 setuptools/logging.py                  |  2 +-
 setuptools/msvc.py                     | 12 +++---
 setuptools/namespaces.py               |  4 +-
 setuptools/package_index.py            | 34 ++++++++---------
 setuptools/sandbox.py                  |  2 +-
 setuptools/wheel.py                    |  4 +-
 32 files changed, 151 insertions(+), 150 deletions(-)

diff --git a/_distutils_hack/__init__.py b/_distutils_hack/__init__.py
index 30ac3a7403..6ee497b38f 100644
--- a/_distutils_hack/__init__.py
+++ b/_distutils_hack/__init__.py
@@ -3,8 +3,7 @@
 import sys
 
 report_url = (
-    "https://github.com/pypa/setuptools/issues/new?"
-    "template=distutils-deprecation.yml"
+    "https://github.com/pypa/setuptools/issues/new?template=distutils-deprecation.yml"
 )
 
 
diff --git a/setuptools/__init__.py b/setuptools/__init__.py
index eba86c4f9c..28e5ee994c 100644
--- a/setuptools/__init__.py
+++ b/setuptools/__init__.py
@@ -186,7 +186,7 @@ def _ensure_stringlike(self, option, what, default=None):
             )
         return val
 
-    def ensure_string_list(self, option: str):
+    def ensure_string_list(self, option: str) -> None:
         r"""Ensure that 'option' is a list of strings.  If 'option' is
         currently a string, we split it either on /,\s*/ or /\s+/, so
         "foo bar baz", "foo,bar,baz", and "foo,   bar baz" all become
diff --git a/setuptools/archive_util.py b/setuptools/archive_util.py
index e4acd75f9b..cd9cf9c08f 100644
--- a/setuptools/archive_util.py
+++ b/setuptools/archive_util.py
@@ -31,7 +31,9 @@ def default_filter(src, dst):
     return dst
 
 
-def unpack_archive(filename, extract_dir, progress_filter=default_filter, drivers=None):
+def unpack_archive(
+    filename, extract_dir, progress_filter=default_filter, drivers=None
+) -> None:
     """Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat``
 
     `progress_filter` is a function taking two arguments: a source path
@@ -63,7 +65,7 @@ def unpack_archive(filename, extract_dir, progress_filter=default_filter, driver
         raise UnrecognizedFormat("Not a recognized archive type: %s" % filename)
 
 
-def unpack_directory(filename, extract_dir, progress_filter=default_filter):
+def unpack_directory(filename, extract_dir, progress_filter=default_filter) -> None:
     """ "Unpack" a directory, using the same interface as for archives
 
     Raises ``UnrecognizedFormat`` if `filename` is not a directory
@@ -90,7 +92,7 @@ def unpack_directory(filename, extract_dir, progress_filter=default_filter):
             shutil.copystat(f, target)
 
 
-def unpack_zipfile(filename, extract_dir, progress_filter=default_filter):
+def unpack_zipfile(filename, extract_dir, progress_filter=default_filter) -> None:
     """Unpack zip `filename` to `extract_dir`
 
     Raises ``UnrecognizedFormat`` if `filename` is not a zipfile (as determined
@@ -185,7 +187,7 @@ def _iter_open_tar(tar_obj, extract_dir, progress_filter):
             yield member, final_dst
 
 
-def unpack_tarfile(filename, extract_dir, progress_filter=default_filter):
+def unpack_tarfile(filename, extract_dir, progress_filter=default_filter) -> bool:
     """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir`
 
     Raises ``UnrecognizedFormat`` if `filename` is not a tarfile (as determined
diff --git a/setuptools/command/alias.py b/setuptools/command/alias.py
index 3111d26496..388830d7a6 100644
--- a/setuptools/command/alias.py
+++ b/setuptools/command/alias.py
@@ -30,14 +30,14 @@ def initialize_options(self):
         self.args = None
         self.remove = None
 
-    def finalize_options(self):
+    def finalize_options(self) -> None:
         option_base.finalize_options(self)
         if self.remove and len(self.args) != 1:
             raise DistutilsOptionError(
                 "Must specify exactly one argument (the alias name) when using --remove"
             )
 
-    def run(self):
+    def run(self) -> None:
         aliases = self.distribution.get_option_dict('aliases')
 
         if not self.args:
diff --git a/setuptools/command/bdist_egg.py b/setuptools/command/bdist_egg.py
index 2881d8a9da..24e5ff19de 100644
--- a/setuptools/command/bdist_egg.py
+++ b/setuptools/command/bdist_egg.py
@@ -50,7 +50,7 @@ def sorted_walk(dir):
         yield base, dirs, files
 
 
-def write_stub(resource, pyfile):
+def write_stub(resource, pyfile) -> None:
     _stub_template = textwrap.dedent(
         """
         def __bootstrap__():
@@ -101,7 +101,7 @@ def initialize_options(self):
         self.egg_output = None
         self.exclude_source_files = None
 
-    def finalize_options(self):
+    def finalize_options(self) -> None:
         ei_cmd = self.ei_cmd = self.get_finalized_command("egg_info")
         self.egg_info = ei_cmd.egg_info
 
@@ -125,7 +125,7 @@ def finalize_options(self):
 
             self.egg_output = os.path.join(self.dist_dir, basename + '.egg')
 
-    def do_install_data(self):
+    def do_install_data(self) -> None:
         # Hack for packages that install data to install's --install-lib
         self.get_finalized_command('install').install_lib = self.bdist_dir
 
@@ -277,10 +277,10 @@ def zip_safe(self):
         log.warn("zip_safe flag not set; analyzing archive contents...")
         return analyze_egg(self.bdist_dir, self.stubs)
 
-    def gen_header(self):
+    def gen_header(self) -> str:
         return 'w'
 
-    def copy_metadata_to(self, target_dir):
+    def copy_metadata_to(self, target_dir) -> None:
         "Copy metadata (egg info) to the target_dir"
         # normalize the path (so that a forward-slash in egg_info will
         # match using startswith below)
@@ -353,7 +353,7 @@ def analyze_egg(egg_dir, stubs):
     return safe
 
 
-def write_safety_flag(egg_dir, safe):
+def write_safety_flag(egg_dir, safe) -> None:
     # Write or remove zip safety flag file(s)
     for flag, fn in safety_flags.items():
         fn = os.path.join(egg_dir, fn)
@@ -421,7 +421,7 @@ def iter_symbols(code):
             yield from iter_symbols(const)
 
 
-def can_scan():
+def can_scan() -> bool:
     if not sys.platform.startswith('java') and sys.platform != 'cli':
         # CPython, PyPy, etc.
         return True
diff --git a/setuptools/command/bdist_rpm.py b/setuptools/command/bdist_rpm.py
index e0d4caf2e9..6dbb27002a 100644
--- a/setuptools/command/bdist_rpm.py
+++ b/setuptools/command/bdist_rpm.py
@@ -15,7 +15,7 @@ class bdist_rpm(orig.bdist_rpm):
 
     distribution: Distribution  # override distutils.dist.Distribution with setuptools.dist.Distribution
 
-    def run(self):
+    def run(self) -> None:
         SetuptoolsDeprecationWarning.emit(
             "Deprecated command",
             """
diff --git a/setuptools/command/build.py b/setuptools/command/build.py
index f60fcbda15..54cbb8d2e7 100644
--- a/setuptools/command/build.py
+++ b/setuptools/command/build.py
@@ -85,15 +85,15 @@ def finalize_options(self):
             ...
     """
 
-    def initialize_options(self):
+    def initialize_options(self) -> None:
         """(Required by the original :class:`setuptools.Command` interface)"""
         ...
 
-    def finalize_options(self):
+    def finalize_options(self) -> None:
         """(Required by the original :class:`setuptools.Command` interface)"""
         ...
 
-    def run(self):
+    def run(self) -> None:
         """(Required by the original :class:`setuptools.Command` interface)"""
         ...
 
diff --git a/setuptools/command/build_clib.py b/setuptools/command/build_clib.py
index eab08e70f2..bee3d58c03 100644
--- a/setuptools/command/build_clib.py
+++ b/setuptools/command/build_clib.py
@@ -24,7 +24,7 @@ class build_clib(orig.build_clib):
 
     distribution: Distribution  # override distutils.dist.Distribution with setuptools.dist.Distribution
 
-    def build_libraries(self, libraries):
+    def build_libraries(self, libraries) -> None:
         for lib_name, build_info in libraries:
             sources = build_info.get('sources')
             if sources is None or not isinstance(sources, (list, tuple)):
diff --git a/setuptools/command/build_ext.py b/setuptools/command/build_ext.py
index 1b9c313ff5..d4c831e176 100644
--- a/setuptools/command/build_ext.py
+++ b/setuptools/command/build_ext.py
@@ -110,7 +110,7 @@ def _get_inplace_equivalent(self, build_py, ext: Extension) -> tuple[str, str]:
         regular_file = os.path.join(self.build_lib, filename)
         return (inplace_file, regular_file)
 
-    def copy_extensions_to_source(self):
+    def copy_extensions_to_source(self) -> None:
         build_py = self.get_finalized_command('build_py')
         for ext in self.extensions:
             inplace_file, regular_file = self._get_inplace_equivalent(build_py, ext)
@@ -191,7 +191,7 @@ def initialize_options(self):
         self.ext_map = {}
         self.editable_mode = False
 
-    def finalize_options(self):
+    def finalize_options(self) -> None:
         _build_ext.finalize_options(self)
         self.extensions = self.extensions or []
         self.check_extensions_list(self.extensions)
@@ -254,7 +254,7 @@ def get_export_symbols(self, ext):
             return ext.export_symbols
         return _build_ext.get_export_symbols(self, ext)
 
-    def build_extension(self, ext):
+    def build_extension(self, ext) -> None:
         ext._convert_pyx_sources_to_lang()
         _compiler = self.compiler
         try:
@@ -344,7 +344,7 @@ def __get_output_extensions(self):
         if self.get_finalized_command('build_py').optimize:
             yield '.pyo'
 
-    def write_stub(self, output_dir, ext, compile=False):
+    def write_stub(self, output_dir, ext, compile=False) -> None:
         stub_file = os.path.join(output_dir, *ext._full_name.split('.')) + '.py'
         self._write_stub_file(stub_file, ext, compile)
 
@@ -415,7 +415,7 @@ def link_shared_object(
         extra_postargs=None,
         build_temp=None,
         target_lang=None,
-    ):
+    ) -> None:
         self.link(
             self.SHARED_LIBRARY,
             objects,
@@ -450,7 +450,7 @@ def link_shared_object(
         extra_postargs=None,
         build_temp=None,
         target_lang=None,
-    ):
+    ) -> None:
         # XXX we need to either disallow these attrs on Library instances,
         # or warn/abort here if set, or something...
         # libraries=None, library_dirs=None, runtime_library_dirs=None,
diff --git a/setuptools/command/build_py.py b/setuptools/command/build_py.py
index 628a20b40b..aa4429d1a8 100644
--- a/setuptools/command/build_py.py
+++ b/setuptools/command/build_py.py
@@ -24,7 +24,7 @@
 _IMPLICIT_DATA_FILES = ('*.pyi', 'py.typed')
 
 
-def make_writable(target):
+def make_writable(target) -> None:
     os.chmod(target, os.stat(target).st_mode | stat.S_IWRITE)
 
 
@@ -67,7 +67,7 @@ def copy_file(  # type: ignore[override] # No overload, str support only
             infile, outfile, preserve_mode, preserve_times, link, level
         )
 
-    def run(self):
+    def run(self) -> None:
         """Build modules, packages, and copy data files to build directory"""
         if not (self.py_modules or self.packages) or self.editable_mode:
             return
@@ -172,7 +172,7 @@ def _get_package_data_output_mapping(self) -> Iterator[tuple[str, str]]:
                 srcfile = os.path.join(src_dir, filename)
                 yield (target, srcfile)
 
-    def build_package_data(self):
+    def build_package_data(self) -> None:
         """Copy data files into build directory"""
         for target, srcfile in self._get_package_data_output_mapping():
             self.mkpath(os.path.dirname(target))
@@ -239,7 +239,7 @@ def _filter_build_files(self, files: Iterable[str], egg_info: str) -> Iterator[s
             if not os.path.isabs(file) or all(d not in norm_path for d in norm_dirs):
                 yield file
 
-    def get_data_files(self):
+    def get_data_files(self) -> None:
         pass  # Lazily compute data files in _get_data_files() function.
 
     def check_package(self, package, package_dir):
diff --git a/setuptools/command/develop.py b/setuptools/command/develop.py
index 4ecbd5a1e8..19908d5d70 100644
--- a/setuptools/command/develop.py
+++ b/setuptools/command/develop.py
@@ -42,7 +42,7 @@ def initialize_options(self):
         self.setup_path = None
         self.always_copy_from = '.'  # always copy eggs installed in curdir
 
-    def finalize_options(self):
+    def finalize_options(self) -> None:
         import pkg_resources
 
         ei = self.get_finalized_command("egg_info")
@@ -104,7 +104,7 @@ def _resolve_setup_path(egg_base, install_dir, egg_path):
             )
         return path_to_setup
 
-    def install_for_development(self):
+    def install_for_development(self) -> None:
         self.run_command('egg_info')
 
         # Build extensions in-place
@@ -126,7 +126,7 @@ def install_for_development(self):
         # and handling requirements
         self.process_distribution(None, self.dist, not self.no_deps)
 
-    def uninstall_link(self):
+    def uninstall_link(self) -> None:
         if os.path.exists(self.egg_link):
             log.info("Removing %s (link to %s)", self.egg_link, self.egg_base)
 
diff --git a/setuptools/command/dist_info.py b/setuptools/command/dist_info.py
index 1db3fbf6bd..3ad27ed708 100644
--- a/setuptools/command/dist_info.py
+++ b/setuptools/command/dist_info.py
@@ -48,7 +48,7 @@ def initialize_options(self):
         self.tag_build = None
         self.keep_egg_info = False
 
-    def finalize_options(self):
+    def finalize_options(self) -> None:
         dist = self.distribution
         project_dir = dist.src_root or os.curdir
         self.output_dir = Path(self.output_dir or project_dir)
@@ -88,7 +88,7 @@ def _maybe_bkp_dir(self, dir_path: str, requires_bkp: bool):
         else:
             yield
 
-    def run(self):
+    def run(self) -> None:
         self.output_dir.mkdir(parents=True, exist_ok=True)
         self.egg_info.run()
         egg_info_dir = self.egg_info.egg_info
diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py
index 5778020ccb..915d3e1c07 100644
--- a/setuptools/command/easy_install.py
+++ b/setuptools/command/easy_install.py
@@ -34,7 +34,7 @@
 from collections.abc import Iterable
 from glob import glob
 from sysconfig import get_path
-from typing import TYPE_CHECKING, Callable, TypeVar
+from typing import TYPE_CHECKING, Callable, NoReturn, TypeVar
 
 from jaraco.text import yield_lines
 
@@ -212,7 +212,7 @@ def initialize_options(self):
             self, self.distribution.get_option_dict('easy_install')
         )
 
-    def delete_blockers(self, blockers):
+    def delete_blockers(self, blockers) -> None:
         extant_blockers = (
             filename
             for filename in blockers
@@ -407,12 +407,12 @@ def _expand_attrs(self, attrs):
                 val = subst_vars(val, self.config_vars)
                 setattr(self, attr, val)
 
-    def expand_basedirs(self):
+    def expand_basedirs(self) -> None:
         """Calls `os.path.expanduser` on install_base, install_platbase and
         root."""
         self._expand_attrs(['install_base', 'install_platbase', 'root'])
 
-    def expand_dirs(self):
+    def expand_dirs(self) -> None:
         """Calls `os.path.expanduser` on install dirs."""
         dirs = [
             'install_purelib',
@@ -424,7 +424,7 @@ def expand_dirs(self):
         ]
         self._expand_attrs(dirs)
 
-    def run(self, show_deprecation: bool = True):
+    def run(self, show_deprecation: bool = True) -> None:
         if show_deprecation:
             self.announce(
                 "WARNING: The easy_install command is deprecated "
@@ -464,10 +464,10 @@ def pseudo_tempname(self):
             pid = random.randint(0, sys.maxsize)
         return os.path.join(self.install_dir, "test-easy-install-%s" % pid)
 
-    def warn_deprecated_options(self):
+    def warn_deprecated_options(self) -> None:
         pass
 
-    def check_site_dir(self):  # is too complex (12)  # FIXME
+    def check_site_dir(self) -> None:  # is too complex (12)  # FIXME
         """Verify that self.install_dir is .pth-capable dir, if needed"""
 
         instdir = normalize_path(self.install_dir)
@@ -554,7 +554,7 @@ def check_site_dir(self):  # is too complex (12)  # FIXME
         """
     ).lstrip()
 
-    def cant_write_to_target(self):
+    def cant_write_to_target(self) -> NoReturn:
         msg = self.__cant_write_msg % (
             sys.exc_info()[1],
             self.install_dir,
@@ -626,7 +626,7 @@ def check_pth_processing(self):  # noqa: C901
             log.warn("TEST FAILED: %s does NOT support .pth files", instdir)
         return False
 
-    def install_egg_scripts(self, dist):
+    def install_egg_scripts(self, dist) -> None:
         """Write all the scripts for `dist`, unless scripts are excluded"""
         if not self.exclude_scripts and dist.metadata_isdir('scripts'):
             for script_name in dist.metadata_listdir('scripts'):
@@ -639,7 +639,7 @@ def install_egg_scripts(self, dist):
                 )
         self.install_wrapper_scripts(dist)
 
-    def add_output(self, path):
+    def add_output(self, path) -> None:
         if os.path.isdir(path):
             for base, dirs, files in os.walk(path):
                 for filename in files:
@@ -647,14 +647,14 @@ def add_output(self, path):
         else:
             self.outputs.append(path)
 
-    def not_editable(self, spec):
+    def not_editable(self, spec) -> None:
         if self.editable:
             raise DistutilsArgError(
                 "Invalid argument %r: you can't use filenames or URLs "
                 "with --editable (except via the --find-links option)." % (spec,)
             )
 
-    def check_editable(self, spec):
+    def check_editable(self, spec) -> None:
         if not self.editable:
             return
 
@@ -760,7 +760,7 @@ def process_distribution(  # noqa: C901
         dist,
         deps: bool = True,
         *info,
-    ):
+    ) -> None:
         self.update_pth(dist)
         self.package_index.add(dist)
         if dist in self.local_index[dist.key]:
@@ -798,7 +798,7 @@ def process_distribution(  # noqa: C901
                     self.easy_install(dist.as_requirement())
         log.info("Finished processing dependencies for %s", requirement)
 
-    def should_unzip(self, dist):
+    def should_unzip(self, dist) -> bool:
         if self.zip_ok is not None:
             return not self.zip_ok
         if dist.has_metadata('not-zip-safe'):
@@ -828,13 +828,13 @@ def maybe_move(self, spec, dist_filename, setup_base):
         shutil.move(setup_base, dst)
         return dst
 
-    def install_wrapper_scripts(self, dist):
+    def install_wrapper_scripts(self, dist) -> None:
         if self.exclude_scripts:
             return
         for args in ScriptWriter.best().get_args(dist):
             self.write_script(*args)
 
-    def install_script(self, dist, script_name, script_text, dev_path=None):
+    def install_script(self, dist, script_name, script_text, dev_path=None) -> None:
         """Generate a legacy script wrapper and install it"""
         spec = str(dist.as_requirement())
         is_script = is_python_script(script_text, script_name)
@@ -859,7 +859,7 @@ def _load_template(dev_path):
         raw_bytes = resource_string('setuptools', name)
         return raw_bytes.decode('utf-8')
 
-    def write_script(self, script_name, contents, mode: str = "t", blockers=()):
+    def write_script(self, script_name, contents, mode: str = "t", blockers=()) -> None:
         """Write an executable file to the scripts directory"""
         self.delete_blockers(  # clean up old .py/.pyw w/o a script
             [os.path.join(self.script_dir, x) for x in blockers]
@@ -1042,7 +1042,7 @@ def install_exe(self, dist_filename, tmpdir):
         return self.install_egg(egg_path, tmpdir)
 
     # FIXME: 'easy_install.exe_to_egg' is too complex (12)
-    def exe_to_egg(self, dist_filename, egg_tmp):  # noqa: C901
+    def exe_to_egg(self, dist_filename, egg_tmp) -> None:  # noqa: C901
         """Extract a bdist_wininst to the directories an egg would use"""
         # Check for .pth file and set up prefix translations
         prefixes = get_exe_prefixes(dist_filename)
@@ -1174,7 +1174,7 @@ def report_editable(self, spec, setup_script):
         python = sys.executable
         return '\n' + self.__editable_msg % locals()
 
-    def run_setup(self, setup_script, setup_base, args):
+    def run_setup(self, setup_script, setup_base, args) -> None:
         sys.modules.setdefault('distutils.command.bdist_egg', bdist_egg)
         sys.modules.setdefault('distutils.command.egg_info', egg_info)
 
@@ -1243,7 +1243,7 @@ def _set_fetcher_options(self, base):
         cfg_filename = os.path.join(base, 'setup.cfg')
         setopt.edit_config(cfg_filename, settings)
 
-    def update_pth(self, dist):  # noqa: C901  # is too complex (11)  # FIXME
+    def update_pth(self, dist) -> None:  # noqa: C901  # is too complex (11)  # FIXME
         if self.pth_file is None:
             return
 
@@ -1292,7 +1292,7 @@ def unpack_progress(self, src, dst):
         log.debug("Unpacking %s to %s", src, dst)
         return dst  # only unpack-and-compile skips files for dry run
 
-    def unpack_and_compile(self, egg_path, destination):
+    def unpack_and_compile(self, egg_path, destination) -> None:
         to_compile = []
         to_chmod = []
 
@@ -1311,7 +1311,7 @@ def pf(src, dst):
                 mode = ((os.stat(f)[stat.ST_MODE]) | 0o555) & 0o7755
                 chmod(f, mode)
 
-    def byte_compile(self, to_compile):
+    def byte_compile(self, to_compile) -> None:
         if sys.dont_write_bytecode:
             return
 
@@ -1366,7 +1366,7 @@ def byte_compile(self, to_compile):
         """
     ).strip()
 
-    def create_home_path(self):
+    def create_home_path(self) -> None:
         """Create directories under ~."""
         if not self.user:
             return
@@ -1656,7 +1656,7 @@ def _load(self):
             return self._load_raw()
         return [], False
 
-    def save(self):
+    def save(self) -> None:
         """Write changed .pth file back to disk"""
         # first reload the file
         last_paths, last_dirty = self._load()
@@ -1710,7 +1710,7 @@ def save(self):
     def _wrap_lines(lines):
         return lines
 
-    def add(self, dist):
+    def add(self, dist) -> None:
         """Add `dist` to the distribution map"""
         new_path = dist.location not in self.paths and (
             dist.location not in self.sitedirs
@@ -1723,7 +1723,7 @@ def add(self, dist):
             self.dirty = True
         super().add(dist)
 
-    def remove(self, dist):
+    def remove(self, dist) -> None:
         """Remove `dist` from the distribution map"""
         while dist.location in self.paths:
             self.paths.remove(dist.location)
diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py
index 34722e1aac..1aa8c0d76d 100644
--- a/setuptools/command/editable_wheel.py
+++ b/setuptools/command/editable_wheel.py
@@ -119,13 +119,13 @@ def initialize_options(self):
         self.project_dir = None
         self.mode = None
 
-    def finalize_options(self):
+    def finalize_options(self) -> None:
         dist = self.distribution
         self.project_dir = dist.src_root or os.curdir
         self.package_dir = dist.package_dir or {}
         self.dist_dir = Path(self.dist_dir or os.path.join(self.project_dir, "dist"))
 
-    def run(self):
+    def run(self) -> None:
         try:
             self.dist_dir.mkdir(exist_ok=True)
             self._ensure_dist_info()
diff --git a/setuptools/command/egg_info.py b/setuptools/command/egg_info.py
index f9b8c6df71..56ddb100e0 100644
--- a/setuptools/command/egg_info.py
+++ b/setuptools/command/egg_info.py
@@ -195,7 +195,7 @@ def initialize_options(self):
     # allow the 'tag_svn_revision' to be detected and
     # set, supporting sdists built on older Setuptools.
     @property
-    def tag_svn_revision(self):
+    def tag_svn_revision(self) -> None:
         pass
 
     @tag_svn_revision.setter
@@ -204,7 +204,7 @@ def tag_svn_revision(self, value):
 
     ####################################
 
-    def save_version_info(self, filename):
+    def save_version_info(self, filename) -> None:
         """
         Materialize the value of date into the
         build tag. Install build keys in a deterministic order
@@ -215,7 +215,7 @@ def save_version_info(self, filename):
         egg_info = dict(tag_build=self.tags(), tag_date=0)
         edit_config(filename, dict(egg_info=egg_info))
 
-    def finalize_options(self):
+    def finalize_options(self) -> None:
         # Note: we need to capture the current value returned
         # by `self.tagged_version()`, so we can later update
         # `self.distribution.metadata.version` without
@@ -252,7 +252,7 @@ def _get_egg_basename(self, py_version=PY_MAJOR, platform=None):
         """Compute filename of the output egg. Private API."""
         return _egg_basename(self.egg_name, self.egg_version, py_version, platform)
 
-    def write_or_delete_file(self, what, filename, data, force: bool = False):
+    def write_or_delete_file(self, what, filename, data, force: bool = False) -> None:
         """Write `data` to `filename` or delete if empty
 
         If `data` is non-empty, this routine is the same as ``write_file()``.
@@ -270,7 +270,7 @@ def write_or_delete_file(self, what, filename, data, force: bool = False):
             else:
                 self.delete_file(filename)
 
-    def write_file(self, what, filename, data):
+    def write_file(self, what, filename, data) -> None:
         """Write `data` to `filename` (if not a dry run) after announcing it
 
         `what` is used in a log message to identify what is being written
@@ -283,13 +283,13 @@ def write_file(self, what, filename, data):
             f.write(data)
             f.close()
 
-    def delete_file(self, filename):
+    def delete_file(self, filename) -> None:
         """Delete `filename` (if not a dry run) after announcing it"""
         log.info("deleting %s", filename)
         if not self.dry_run:
             os.unlink(filename)
 
-    def run(self):
+    def run(self) -> None:
         # Pre-load to avoid iterating over entry-points while an empty .egg-info
         # exists in sys.path. See pypa/pyproject-hooks#206
         writers = list(metadata.entry_points(group='egg_info.writers'))
@@ -311,7 +311,7 @@ def run(self):
 
         self.find_sources()
 
-    def find_sources(self):
+    def find_sources(self) -> None:
         """Generate SOURCES.txt manifest file"""
         manifest_filename = os.path.join(self.egg_info, "SOURCES.txt")
         mm = manifest_maker(self.distribution)
@@ -472,7 +472,7 @@ def global_exclude(self, pattern):
         match = translate_pattern(os.path.join('**', pattern))
         return self._remove_files(match.match)
 
-    def append(self, item):
+    def append(self, item) -> None:
         if item.endswith('\r'):  # Fix older sdists built on Windows
             item = item[:-1]
         path = convert_path(item)
@@ -480,7 +480,7 @@ def append(self, item):
         if self._safe_path(path):
             self.files.append(path)
 
-    def extend(self, paths):
+    def extend(self, paths) -> None:
         self.files.extend(filter(self._safe_path, paths))
 
     def _repair(self):
@@ -524,17 +524,17 @@ def _safe_path(self, path):
 class manifest_maker(sdist):
     template = "MANIFEST.in"
 
-    def initialize_options(self):
+    def initialize_options(self) -> None:
         self.use_defaults = True
         self.prune = True
         self.manifest_only = True
         self.force_manifest = True
         self.ignore_egg_info_dir = False
 
-    def finalize_options(self):
+    def finalize_options(self) -> None:
         pass
 
-    def run(self):
+    def run(self) -> None:
         self.filelist = FileList(ignore_egg_info_dir=self.ignore_egg_info_dir)
         if not os.path.exists(self.manifest):
             self.write_manifest()  # it must exist so it'll get in the list
@@ -552,7 +552,7 @@ def _manifest_normalize(self, path):
         path = unicode_utils.filesys_decode(path)
         return path.replace(os.sep, '/')
 
-    def write_manifest(self):
+    def write_manifest(self) -> None:
         """
         Write the file list in 'self.filelist' to the manifest file
         named by 'self.manifest'.
@@ -564,7 +564,7 @@ def write_manifest(self):
         msg = "writing manifest file '%s'" % self.manifest
         self.execute(write_file, (self.manifest, files), msg)
 
-    def warn(self, msg):
+    def warn(self, msg) -> None:
         if not self._should_suppress_warning(msg):
             sdist.warn(self, msg)
 
@@ -575,7 +575,7 @@ def _should_suppress_warning(msg):
         """
         return re.match(r"standard file .*not found", msg)
 
-    def add_defaults(self):
+    def add_defaults(self) -> None:
         sdist.add_defaults(self)
         self.filelist.append(self.template)
         self.filelist.append(self.manifest)
@@ -593,7 +593,7 @@ def add_defaults(self):
         ei_cmd = self.get_finalized_command('egg_info')
         self.filelist.graft(ei_cmd.egg_info)
 
-    def add_license_files(self):
+    def add_license_files(self) -> None:
         license_files = self.distribution.metadata.license_files or []
         for lf in license_files:
             log.info("adding license file '%s'", lf)
@@ -632,7 +632,7 @@ def _safe_data_files(self, build_py):
         return build_py.get_data_files()
 
 
-def write_file(filename, contents):
+def write_file(filename, contents) -> None:
     """Create a file with the specified name and write 'contents' (a
     sequence of strings without line terminators) to it.
     """
@@ -645,7 +645,7 @@ def write_file(filename, contents):
         f.write(contents)
 
 
-def write_pkg_info(cmd, basename, filename):
+def write_pkg_info(cmd, basename, filename) -> None:
     log.info("writing %s", filename)
     if not cmd.dry_run:
         metadata = cmd.distribution.metadata
@@ -664,7 +664,7 @@ def write_pkg_info(cmd, basename, filename):
         bdist_egg.write_safety_flag(cmd.egg_info, safe)
 
 
-def warn_depends_obsolete(cmd, basename, filename):
+def warn_depends_obsolete(cmd, basename, filename) -> None:
     """
     Unused: left to avoid errors when updating (from source) from <= 67.8.
     Old installations have a .dist-info directory with the entry-point
@@ -679,18 +679,18 @@ def warn_depends_obsolete(cmd, basename, filename):
 write_setup_requirements = _requirestxt.write_setup_requirements
 
 
-def write_toplevel_names(cmd, basename, filename):
+def write_toplevel_names(cmd, basename, filename) -> None:
     pkgs = dict.fromkeys([
         k.split('.', 1)[0] for k in cmd.distribution.iter_distribution_names()
     ])
     cmd.write_file("top-level names", filename, '\n'.join(sorted(pkgs)) + '\n')
 
 
-def overwrite_arg(cmd, basename, filename):
+def overwrite_arg(cmd, basename, filename) -> None:
     write_arg(cmd, basename, filename, True)
 
 
-def write_arg(cmd, basename, filename, force: bool = False):
+def write_arg(cmd, basename, filename, force: bool = False) -> None:
     argname = os.path.splitext(basename)[0]
     value = getattr(cmd.distribution, argname, None)
     if value is not None:
@@ -698,7 +698,7 @@ def write_arg(cmd, basename, filename, force: bool = False):
     cmd.write_or_delete_file(argname, filename, value, force)
 
 
-def write_entries(cmd, basename, filename):
+def write_entries(cmd, basename, filename) -> None:
     eps = _entry_points.load(cmd.distribution.entry_points)
     defn = _entry_points.render(eps)
     cmd.write_or_delete_file('entry points', filename, defn, True)
diff --git a/setuptools/command/install.py b/setuptools/command/install.py
index e1dff0aac7..1262da0bc3 100644
--- a/setuptools/command/install.py
+++ b/setuptools/command/install.py
@@ -63,7 +63,7 @@ def initialize_options(self):
         self.old_and_unmanageable = None
         self.single_version_externally_managed = None
 
-    def finalize_options(self):
+    def finalize_options(self) -> None:
         super().finalize_options()
         if self.root:
             self.single_version_externally_managed = True
@@ -130,7 +130,7 @@ def _called_from_setup(run_frame):
 
         return False
 
-    def do_egg_install(self):
+    def do_egg_install(self) -> None:
         easy_install = self.distribution.get_command_class('easy_install')
 
         cmd = easy_install(
diff --git a/setuptools/command/install_egg_info.py b/setuptools/command/install_egg_info.py
index be47254f00..be4dd7b229 100644
--- a/setuptools/command/install_egg_info.py
+++ b/setuptools/command/install_egg_info.py
@@ -28,7 +28,7 @@ def finalize_options(self):
         self.target = os.path.join(self.install_dir, basename)
         self.outputs = []
 
-    def run(self):
+    def run(self) -> None:
         self.run_command('egg_info')
         if os.path.isdir(self.target) and not os.path.islink(self.target):
             dir_util.remove_tree(self.target, dry_run=self.dry_run)
@@ -42,7 +42,7 @@ def run(self):
     def get_outputs(self):
         return self.outputs
 
-    def copytree(self):
+    def copytree(self) -> None:
         # Copy the .egg-info tree to site-packages
         def skimmer(src, dst):
             # filter out source-control directories; note that 'src' is always
diff --git a/setuptools/command/install_lib.py b/setuptools/command/install_lib.py
index 53b68f6363..530a8b51d1 100644
--- a/setuptools/command/install_lib.py
+++ b/setuptools/command/install_lib.py
@@ -15,7 +15,7 @@ class install_lib(orig.install_lib):
 
     distribution: Distribution  # override distutils.dist.Distribution with setuptools.dist.Distribution
 
-    def run(self):
+    def run(self) -> None:
         self.build()
         outfiles = self.install()
         if outfiles is not None:
diff --git a/setuptools/command/install_scripts.py b/setuptools/command/install_scripts.py
index f1ccc2bbf8..4401cf693d 100644
--- a/setuptools/command/install_scripts.py
+++ b/setuptools/command/install_scripts.py
@@ -15,7 +15,7 @@ class install_scripts(orig.install_scripts):
 
     distribution: Distribution  # override distutils.dist.Distribution with setuptools.dist.Distribution
 
-    def initialize_options(self):
+    def initialize_options(self) -> None:
         orig.install_scripts.initialize_options(self)
         self.no_ep = False
 
@@ -56,7 +56,7 @@ def _install_ep_scripts(self):
         for args in writer.get_args(dist, cmd.as_header()):
             self.write_script(*args)
 
-    def write_script(self, script_name, contents, mode: str = "t", *ignored):
+    def write_script(self, script_name, contents, mode: str = "t", *ignored) -> None:
         """Write an executable file to the scripts directory"""
         from setuptools.command.easy_install import chmod, current_umask
 
diff --git a/setuptools/command/rotate.py b/setuptools/command/rotate.py
index 76c7b8612e..c10e8d5024 100644
--- a/setuptools/command/rotate.py
+++ b/setuptools/command/rotate.py
@@ -28,7 +28,7 @@ def initialize_options(self):
         self.dist_dir = None
         self.keep = None
 
-    def finalize_options(self):
+    def finalize_options(self) -> None:
         if self.match is None:
             raise DistutilsOptionError(
                 "Must specify one or more (comma-separated) match patterns "
@@ -44,7 +44,7 @@ def finalize_options(self):
             self.match = [convert_path(p.strip()) for p in self.match.split(',')]
         self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
 
-    def run(self):
+    def run(self) -> None:
         self.run_command("egg_info")
         from glob import glob
 
diff --git a/setuptools/command/sdist.py b/setuptools/command/sdist.py
index 65ce735dde..be69b33500 100644
--- a/setuptools/command/sdist.py
+++ b/setuptools/command/sdist.py
@@ -55,7 +55,7 @@ class sdist(orig.sdist):
     README_EXTENSIONS = ['', '.rst', '.txt', '.md']
     READMES = tuple('README{0}'.format(ext) for ext in README_EXTENSIONS)
 
-    def run(self):
+    def run(self) -> None:
         self.run_command('egg_info')
         ei_cmd = self.get_finalized_command('egg_info')
         self.filelist = ei_cmd.filelist
@@ -74,10 +74,10 @@ def run(self):
             if data not in dist_files:
                 dist_files.append(data)
 
-    def initialize_options(self):
+    def initialize_options(self) -> None:
         orig.sdist.initialize_options(self)
 
-    def make_distribution(self):
+    def make_distribution(self) -> None:
         """
         Workaround for #516
         """
@@ -105,7 +105,7 @@ class NoValue:
             if orig_val is not NoValue:
                 os.link = orig_val
 
-    def add_defaults(self):
+    def add_defaults(self) -> None:
         super().add_defaults()
         self._add_defaults_build_sub_commands()
 
@@ -158,13 +158,13 @@ def _add_defaults_data_files(self):
         except TypeError:
             log.warn("data_files contains unexpected objects")
 
-    def prune_file_list(self):
+    def prune_file_list(self) -> None:
         super().prune_file_list()
         # Prevent accidental inclusion of test-related cache dirs at the project root
         sep = re.escape(os.sep)
         self.filelist.exclude_pattern(r"^(\.tox|\.nox|\.venv)" + sep, is_regex=True)
 
-    def check_readme(self):
+    def check_readme(self) -> None:
         for f in self.READMES:
             if os.path.exists(f):
                 return
@@ -173,7 +173,7 @@ def check_readme(self):
                 "standard file not found: should have one of " + ', '.join(self.READMES)
             )
 
-    def make_release_tree(self, base_dir, files):
+    def make_release_tree(self, base_dir, files) -> None:
         orig.sdist.make_release_tree(self, base_dir, files)
 
         # Save any egg_info command line options used to create this sdist
diff --git a/setuptools/command/setopt.py b/setuptools/command/setopt.py
index e351af22f0..75393f32f0 100644
--- a/setuptools/command/setopt.py
+++ b/setuptools/command/setopt.py
@@ -126,14 +126,14 @@ def initialize_options(self):
         self.set_value = None
         self.remove = None
 
-    def finalize_options(self):
+    def finalize_options(self) -> None:
         option_base.finalize_options(self)
         if self.command is None or self.option is None:
             raise DistutilsOptionError("Must specify --command *and* --option")
         if self.set_value is None and not self.remove:
             raise DistutilsOptionError("Must specify --set-value or --remove")
 
-    def run(self):
+    def run(self) -> None:
         edit_config(
             self.filename,
             {self.command: {self.option.replace('-', '_'): self.set_value}},
diff --git a/setuptools/config/setupcfg.py b/setuptools/config/setupcfg.py
index 7aafcf6680..9a8f759fc3 100644
--- a/setuptools/config/setupcfg.py
+++ b/setuptools/config/setupcfg.py
@@ -477,7 +477,7 @@ def _parse_section_to_dict(cls, section_options, values_parser=None):
         parser = (lambda _, v: values_parser(v)) if values_parser else (lambda _, v: v)
         return cls._parse_section_to_dict_with_key(section_options, parser)
 
-    def parse_section(self, section_options):
+    def parse_section(self, section_options) -> None:
         """Parses configuration file section.
 
         :param dict section_options:
@@ -713,7 +713,7 @@ def parse_section_packages__find(self, section_options):
 
         return find_kwargs
 
-    def parse_section_entry_points(self, section_options):
+    def parse_section_entry_points(self, section_options) -> None:
         """Parses `entry_points` configuration file section.
 
         :param dict section_options:
@@ -725,21 +725,21 @@ def _parse_package_data(self, section_options):
         package_data = self._parse_section_to_dict(section_options, self._parse_list)
         return expand.canonic_package_data(package_data)
 
-    def parse_section_package_data(self, section_options):
+    def parse_section_package_data(self, section_options) -> None:
         """Parses `package_data` configuration file section.
 
         :param dict section_options:
         """
         self['package_data'] = self._parse_package_data(section_options)
 
-    def parse_section_exclude_package_data(self, section_options):
+    def parse_section_exclude_package_data(self, section_options) -> None:
         """Parses `exclude_package_data` configuration file section.
 
         :param dict section_options:
         """
         self['exclude_package_data'] = self._parse_package_data(section_options)
 
-    def parse_section_extras_require(self, section_options):
+    def parse_section_extras_require(self, section_options) -> None:
         """Parses `extras_require` configuration file section.
 
         :param dict section_options:
@@ -751,7 +751,7 @@ def parse_section_extras_require(self, section_options):
 
         self['extras_require'] = parsed
 
-    def parse_section_data_files(self, section_options):
+    def parse_section_data_files(self, section_options) -> None:
         """Parses `data_files` configuration file section.
 
         :param dict section_options:
diff --git a/setuptools/discovery.py b/setuptools/discovery.py
index 9865552151..cbe9cfb7f4 100644
--- a/setuptools/discovery.py
+++ b/setuptools/discovery.py
@@ -479,7 +479,7 @@ def _ensure_no_accidental_inclusion(self, detected: list[str], kind: str):
             """
             raise PackageDiscoveryError(cleandoc(msg))
 
-    def analyse_name(self):
+    def analyse_name(self) -> None:
         """The packages/modules are the essential contribution of the author.
         Therefore the name of the distribution can be derived from them.
         """
diff --git a/setuptools/dist.py b/setuptools/dist.py
index 1348ca61c2..d1ff1c0a23 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -640,7 +640,7 @@ def parse_config_files(
         self,
         filenames: Iterable[StrPath] | None = None,
         ignore_option_errors: bool = False,
-    ):
+    ) -> None:
         """Parses configuration files from various levels
         and loads configuration.
         """
@@ -663,7 +663,7 @@ def fetch_build_eggs(self, requires: _StrOrIter):
 
         return _fetch_build_eggs(self, requires)
 
-    def finalize_options(self):
+    def finalize_options(self) -> None:
         """
         Allow plugins to apply arbitrary operations to the
         distribution. Each hook may optionally define a 'order'
@@ -760,7 +760,7 @@ def get_command_list(self):
                 self.cmdclass[ep.name] = cmdclass
         return _Distribution.get_command_list(self)
 
-    def include(self, **attrs):
+    def include(self, **attrs) -> None:
         """Add items to distribution that are named in keyword arguments
 
         For example, 'dist.include(py_modules=["x"])' would add 'x' to
@@ -782,7 +782,7 @@ def include(self, **attrs):
             else:
                 self._include_misc(k, v)
 
-    def exclude_package(self, package: str):
+    def exclude_package(self, package: str) -> None:
         """Remove packages, modules, and extensions in named package"""
 
         pfx = package + '.'
@@ -803,7 +803,7 @@ def exclude_package(self, package: str):
                 if p.name != package and not p.name.startswith(pfx)
             ]
 
-    def has_contents_for(self, package: str):
+    def has_contents_for(self, package: str) -> bool:
         """Return true if 'exclude_package(package)' would do something"""
 
         pfx = package + '.'
@@ -852,7 +852,7 @@ def _include_misc(self, name: str, value: _Sequence) -> None:
             new = [item for item in value if item not in old]
             setattr(self, name, list(old) + new)
 
-    def exclude(self, **attrs):
+    def exclude(self, **attrs) -> None:
         """Remove items from distribution that are named in keyword arguments
 
         For example, 'dist.exclude(py_modules=["x"])' would remove 'x' from
@@ -991,7 +991,7 @@ def handle_display_options(self, option_order):
         finally:
             sys.stdout.reconfigure(encoding=encoding)
 
-    def run_command(self, command):
+    def run_command(self, command) -> None:
         self.set_defaults()
         # Postpone defaults until all explicit configuration is considered
         # (setup() args, config files, command line and plugins)
diff --git a/setuptools/launch.py b/setuptools/launch.py
index 56c7d035f1..0d162647d5 100644
--- a/setuptools/launch.py
+++ b/setuptools/launch.py
@@ -10,7 +10,7 @@
 import tokenize
 
 
-def run():
+def run() -> None:
     """
     Run the script in sys.argv[1] as if it had
     been invoked naturally.
diff --git a/setuptools/logging.py b/setuptools/logging.py
index c6d25a6b1e..71aa8ca5a4 100644
--- a/setuptools/logging.py
+++ b/setuptools/logging.py
@@ -11,7 +11,7 @@ def _not_warning(record):
     return record.levelno < logging.WARNING
 
 
-def configure():
+def configure() -> None:
     """
     Configure logging to emit warning and above to stderr
     and everything else to stdout. This behavior is provided
diff --git a/setuptools/msvc.py b/setuptools/msvc.py
index 7ee685e023..ad4a2f375f 100644
--- a/setuptools/msvc.py
+++ b/setuptools/msvc.py
@@ -84,7 +84,7 @@ def current_is_x86(self):
         """
         return self.current_cpu == 'x86'
 
-    def current_dir(self, hidex86=False, x64=False):
+    def current_dir(self, hidex86=False, x64=False) -> str:
         """
         Current platform specific subfolder.
 
@@ -108,7 +108,7 @@ def current_dir(self, hidex86=False, x64=False):
             else r'\%s' % self.current_cpu
         )
 
-    def target_dir(self, hidex86=False, x64=False):
+    def target_dir(self, hidex86=False, x64=False) -> str:
         r"""
         Target platform specific subfolder.
 
@@ -177,7 +177,7 @@ def __init__(self, platform_info):
         self.pi = platform_info
 
     @property
-    def visualstudio(self):
+    def visualstudio(self) -> str:
         """
         Microsoft Visual Studio root registry key.
 
@@ -225,7 +225,7 @@ def vs(self):
         return os.path.join(self.sxs, 'VS7')
 
     @property
-    def vc_for_python(self):
+    def vc_for_python(self) -> str:
         """
         Microsoft Visual C++ for Python registry key.
 
@@ -237,7 +237,7 @@ def vc_for_python(self):
         return r'DevDiv\VCForPython'
 
     @property
-    def microsoft_sdk(self):
+    def microsoft_sdk(self) -> str:
         """
         Microsoft SDK registry key.
 
@@ -273,7 +273,7 @@ def netfx_sdk(self):
         return os.path.join(self.microsoft_sdk, 'NETFXSDK')
 
     @property
-    def windows_kits_roots(self):
+    def windows_kits_roots(self) -> str:
         """
         Microsoft Windows Kits Roots registry key.
 
diff --git a/setuptools/namespaces.py b/setuptools/namespaces.py
index 299fdd9479..85ea2ebd65 100644
--- a/setuptools/namespaces.py
+++ b/setuptools/namespaces.py
@@ -11,7 +11,7 @@
 class Installer:
     nspkg_ext = '-nspkg.pth'
 
-    def install_namespaces(self):
+    def install_namespaces(self) -> None:
         nsp = self._get_all_ns_packages()
         if not nsp:
             return
@@ -30,7 +30,7 @@ def install_namespaces(self):
             # See: python/cpython#77102
             f.writelines(lines)
 
-    def uninstall_namespaces(self):
+    def uninstall_namespaces(self) -> None:
         filename = self._get_nspkg_file()
         if not os.path.exists(filename):
             return
diff --git a/setuptools/package_index.py b/setuptools/package_index.py
index 6b4a256dfc..276f1d45d8 100644
--- a/setuptools/package_index.py
+++ b/setuptools/package_index.py
@@ -306,7 +306,7 @@ def __init__(
         verify_ssl: bool = True,
         *args,
         **kw,
-    ):
+    ) -> None:
         super().__init__(*args, **kw)
         self.index_url = index_url + "/"[: not index_url.endswith('/')]
         self.scanned_urls: dict = {}
@@ -325,7 +325,7 @@ def add(self, dist):
         return super().add(dist)
 
     # FIXME: 'PackageIndex.process_url' is too complex (14)
-    def process_url(self, url, retrieve: bool = False):  # noqa: C901
+    def process_url(self, url, retrieve: bool = False) -> None:  # noqa: C901
         """Evaluate a URL as a possible download, and maybe retrieve it"""
         if url in self.scanned_urls and not retrieve:
             return
@@ -378,7 +378,7 @@ def process_url(self, url, retrieve: bool = False):  # noqa: C901
         if url.startswith(self.index_url) and getattr(f, 'code', None) != 404:
             page = self.process_index(url, page)
 
-    def process_filename(self, fn, nested: bool = False):
+    def process_filename(self, fn, nested: bool = False) -> None:
         # process filenames or directories
         if not os.path.exists(fn):
             self.warn("Not found: %s", fn)
@@ -394,7 +394,7 @@ def process_filename(self, fn, nested: bool = False):
             self.debug("Found: %s", fn)
             list(map(self.add, dists))
 
-    def url_ok(self, url, fatal: bool = False):
+    def url_ok(self, url, fatal: bool = False) -> bool:
         s = URL_SCHEME(url)
         is_file = s and s.group(1).lower() == 'file'
         if is_file or self.allows(urllib.parse.urlparse(url)[1]):
@@ -410,7 +410,7 @@ def url_ok(self, url, fatal: bool = False):
             self.warn(msg, url)
             return False
 
-    def scan_egg_links(self, search_path):
+    def scan_egg_links(self, search_path) -> None:
         dirs = filter(os.path.isdir, search_path)
         egg_links = (
             (path, entry)
@@ -420,7 +420,7 @@ def scan_egg_links(self, search_path):
         )
         list(itertools.starmap(self.scan_egg_link, egg_links))
 
-    def scan_egg_link(self, path, entry):
+    def scan_egg_link(self, path, entry) -> None:
         content = _read_utf8_with_fallback(os.path.join(path, entry))
         # filter non-empty lines
         lines = list(filter(None, map(str.strip, content.splitlines())))
@@ -481,21 +481,21 @@ def process_index(self, url, page):
             lambda m: '%s' % m.group(1, 3, 2), page
         )
 
-    def need_version_info(self, url):
+    def need_version_info(self, url) -> None:
         self.scan_all(
             "Page at %s links to .py file(s) without version info; an index "
             "scan is required.",
             url,
         )
 
-    def scan_all(self, msg=None, *args):
+    def scan_all(self, msg=None, *args) -> None:
         if self.index_url not in self.fetched_urls:
             if msg:
                 self.warn(msg, *args)
             self.info("Scanning index of all packages (this may take a while)")
         self.scan_url(self.index_url)
 
-    def find_packages(self, requirement):
+    def find_packages(self, requirement) -> None:
         self.scan_url(self.index_url + requirement.unsafe_name + '/')
 
         if not self.package_pages.get(requirement.key):
@@ -519,7 +519,7 @@ def obtain(self, requirement, installer=None):
             self.debug("%s does not match %s", requirement, dist)
         return super().obtain(requirement, installer)
 
-    def check_hash(self, checker, filename, tfp):
+    def check_hash(self, checker, filename, tfp) -> None:
         """
         checker is a ContentChecker
         """
@@ -533,7 +533,7 @@ def check_hash(self, checker, filename, tfp):
                 % (checker.hash.name, os.path.basename(filename))
             )
 
-    def add_find_links(self, urls):
+    def add_find_links(self, urls) -> None:
         """Add `urls` to the list that will be prescanned for searches"""
         for url in urls:
             if (
@@ -554,7 +554,7 @@ def prescan(self):
             list(map(self.scan_url, self.to_scan))
         self.to_scan = None  # from now on, go ahead and process immediately
 
-    def not_found_in_index(self, requirement):
+    def not_found_in_index(self, requirement) -> None:
         if self[requirement.key]:  # we've seen at least one distro
             meth, msg = self.info, "Couldn't retrieve index page for %r"
         else:  # no distros seen for this name, might be misspelled
@@ -777,7 +777,7 @@ def _download_to(self, url, filename):
             if fp:
                 fp.close()
 
-    def reporthook(self, url, filename, blocknum, blksize, size):
+    def reporthook(self, url, filename, blocknum, blksize, size) -> None:
         pass  # no-op
 
     # FIXME:
@@ -884,7 +884,7 @@ def _download_other(self, url, filename):
         self.url_ok(url, True)
         return self._attempt_download(url, filename)
 
-    def scan_url(self, url):
+    def scan_url(self, url) -> None:
         self.process_url(url, True)
 
     def _attempt_download(self, url, filename):
@@ -930,13 +930,13 @@ def _vcs_split_rev_from_url(url):
 
         return resolved, rev
 
-    def debug(self, msg, *args):
+    def debug(self, msg, *args) -> None:
         log.debug(msg, *args)
 
-    def info(self, msg, *args):
+    def info(self, msg, *args) -> None:
         log.info(msg, *args)
 
-    def warn(self, msg, *args):
+    def warn(self, msg, *args) -> None:
         log.warn(msg, *args)
 
 
diff --git a/setuptools/sandbox.py b/setuptools/sandbox.py
index 5dedeeb47f..5b3c8a1e9d 100644
--- a/setuptools/sandbox.py
+++ b/setuptools/sandbox.py
@@ -453,7 +453,7 @@ def _open(self, path, mode='r', *args, **kw):
             self._violation("open", path, mode, *args, **kw)
         return _open(path, mode, *args, **kw)
 
-    def tmpnam(self):
+    def tmpnam(self) -> None:
         self._violation("tmpnam")
 
     def _ok(self, path):
diff --git a/setuptools/wheel.py b/setuptools/wheel.py
index 69a73df244..5759a77f1f 100644
--- a/setuptools/wheel.py
+++ b/setuptools/wheel.py
@@ -39,7 +39,7 @@ def _get_supported_tags():
     return {(t.interpreter, t.abi, t.platform) for t in sys_tags()}
 
 
-def unpack(src_dir, dst_dir):
+def unpack(src_dir, dst_dir) -> None:
     """Move everything under `src_dir` to `dst_dir`, and delete the former."""
     for dirpath, dirnames, filenames in os.walk(src_dir):
         subdir = os.path.relpath(dirpath, src_dir)
@@ -116,7 +116,7 @@ def get_dist_info(self, zf):
                 return dirname
         raise ValueError("unsupported wheel format. .dist-info not found")
 
-    def install_as_egg(self, destination_eggdir):
+    def install_as_egg(self, destination_eggdir) -> None:
         """Install wheel as an egg directory."""
         with zipfile.ZipFile(self.filename) as zf:
             self._install_as_egg(destination_eggdir, zf)

From f160c700762fcf0eef7a75ff4aa58ba262473c60 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Mon, 28 Oct 2024 17:59:00 -0400
Subject: [PATCH 1261/1761] Re-enable ANN2 for setuptools

---
 ruff.toml                                     |  5 --
 setuptools/__init__.py                        |  4 +-
 setuptools/_path.py                           |  4 +-
 setuptools/command/bdist_egg.py               |  8 +--
 setuptools/command/build_ext.py               |  4 +-
 setuptools/command/build_py.py                | 13 +++--
 setuptools/command/easy_install.py            | 20 ++++----
 setuptools/command/egg_info.py                |  4 +-
 setuptools/command/install_scripts.py         |  2 +-
 setuptools/depends.py                         | 17 +++++--
 setuptools/dist.py                            | 22 ++++----
 setuptools/glob.py                            | 50 +++++++++++++------
 setuptools/installer.py                       |  6 ++-
 setuptools/logging.py                         |  2 +-
 setuptools/package_index.py                   | 14 +++---
 setuptools/sandbox.py                         |  6 ++-
 setuptools/tests/config/downloads/__init__.py |  8 +--
 setuptools/tests/test_build_ext.py            |  2 +-
 setuptools/tests/test_editable_install.py     |  2 +-
 tools/build_launchers.py                      |  4 +-
 20 files changed, 119 insertions(+), 78 deletions(-)

diff --git a/ruff.toml b/ruff.toml
index 438c048962..a1692aa5fd 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -56,11 +56,6 @@ ignore = [
 ]
 
 [lint.per-file-ignores]
-# Only enforcing return type annotations for public modules
-"**/tests/**" = ["ANN2"]
-"tools/**" = ["ANN2"]
-# Temporarily disabling enforced return annotations for the setuptool package to progressively type from Typeshed
-"setuptools/**" = ["ANN2"]
 # Suppress nuisance warnings about module-import-not-at-top-of-file (E402) due to workaround for #4476
 "setuptools/__init__.py" = ["E402"]
 "pkg_resources/__init__.py" = ["E402"]
diff --git a/setuptools/__init__.py b/setuptools/__init__.py
index eba86c4f9c..783e5e7591 100644
--- a/setuptools/__init__.py
+++ b/setuptools/__init__.py
@@ -186,7 +186,7 @@ def _ensure_stringlike(self, option, what, default=None):
             )
         return val
 
-    def ensure_string_list(self, option: str):
+    def ensure_string_list(self, option: str) -> None:
         r"""Ensure that 'option' is a list of strings.  If 'option' is
         currently a string, we split it either on /,\s*/ or /\s+/, so
         "foo bar baz", "foo,bar,baz", and "foo,   bar baz" all become
@@ -226,7 +226,7 @@ def reinitialize_command(
     ) -> _Command:
         cmd = _Command.reinitialize_command(self, command, reinit_subcommands)
         vars(cmd).update(kw)
-        return cmd
+        return cmd  # pyright: ignore[reportReturnType] # pypa/distutils#307
 
     @abstractmethod
     def initialize_options(self) -> None:
diff --git a/setuptools/_path.py b/setuptools/_path.py
index b749c6c6a0..0d99b0f539 100644
--- a/setuptools/_path.py
+++ b/setuptools/_path.py
@@ -3,15 +3,15 @@
 import contextlib
 import os
 import sys
-from typing import TYPE_CHECKING, Union
+from typing import TYPE_CHECKING, TypeVar, Union
 
 from more_itertools import unique_everseen
 
 if TYPE_CHECKING:
     from typing_extensions import TypeAlias
 
-
 StrPath: TypeAlias = Union[str, os.PathLike[str]]  #  Same as _typeshed.StrPath
+StrPathT = TypeVar("StrPathT", bound=Union[str, os.PathLike[str]])
 
 
 def ensure_directory(path):
diff --git a/setuptools/command/bdist_egg.py b/setuptools/command/bdist_egg.py
index 2881d8a9da..77aea56f45 100644
--- a/setuptools/command/bdist_egg.py
+++ b/setuptools/command/bdist_egg.py
@@ -16,7 +16,7 @@
 from setuptools import Command
 from setuptools.extension import Library
 
-from .._path import ensure_directory
+from .._path import StrPathT, ensure_directory
 
 from distutils import log
 from distutils.dir_util import mkpath, remove_tree
@@ -440,13 +440,13 @@ def can_scan():
 
 
 def make_zipfile(
-    zip_filename,
+    zip_filename: StrPathT,
     base_dir,
     verbose: bool = False,
     dry_run: bool = False,
     compress=True,
     mode: _ZipFileMode = 'w',
-):
+) -> StrPathT:
     """Create a zip file from all the files under 'base_dir'.  The output
     zip file will be named 'base_dir' + ".zip".  Uses either the "zipfile"
     Python module (if available) or the InfoZIP "zip" utility (if installed
@@ -455,7 +455,7 @@ def make_zipfile(
     """
     import zipfile
 
-    mkpath(os.path.dirname(zip_filename), dry_run=dry_run)
+    mkpath(os.path.dirname(zip_filename), dry_run=dry_run)  # type: ignore[arg-type] # python/mypy#18075
     log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)
 
     def visit(z, dirname, names):
diff --git a/setuptools/command/build_ext.py b/setuptools/command/build_ext.py
index 1b9c313ff5..3e5e7dfc3a 100644
--- a/setuptools/command/build_ext.py
+++ b/setuptools/command/build_ext.py
@@ -415,7 +415,7 @@ def link_shared_object(
         extra_postargs=None,
         build_temp=None,
         target_lang=None,
-    ):
+    ) -> None:
         self.link(
             self.SHARED_LIBRARY,
             objects,
@@ -450,7 +450,7 @@ def link_shared_object(
         extra_postargs=None,
         build_temp=None,
         target_lang=None,
-    ):
+    ) -> None:
         # XXX we need to either disallow these attrs on Library instances,
         # or warn/abort here if set, or something...
         # libraries=None, library_dirs=None, runtime_library_dirs=None,
diff --git a/setuptools/command/build_py.py b/setuptools/command/build_py.py
index 628a20b40b..1c3259ae52 100644
--- a/setuptools/command/build_py.py
+++ b/setuptools/command/build_py.py
@@ -12,8 +12,7 @@
 
 from more_itertools import unique_everseen
 
-from setuptools._path import StrPath
-
+from .._path import StrPath, StrPathT
 from ..dist import Distribution
 from ..warnings import SetuptoolsDeprecationWarning
 
@@ -50,20 +49,20 @@ def finalize_options(self):
             del self.__dict__['data_files']
         self.__updated_files = []
 
-    def copy_file(  # type: ignore[override] # No overload, str support only
+    def copy_file(  # type: ignore[override] # No overload, no bytes support
         self,
         infile: StrPath,
-        outfile: StrPath,
+        outfile: StrPathT,
         preserve_mode: bool = True,
         preserve_times: bool = True,
         link: str | None = None,
         level: object = 1,
-    ):
+    ) -> tuple[StrPathT | str, bool]:
         # Overwrite base class to allow using links
         if link:
             infile = str(Path(infile).resolve())
-            outfile = str(Path(outfile).resolve())
-        return super().copy_file(
+            outfile = str(Path(outfile).resolve())  # type: ignore[assignment] # Re-assigning a str when outfile is StrPath is ok
+        return super().copy_file(  # pyright: ignore[reportReturnType] # pypa/distutils#309
             infile, outfile, preserve_mode, preserve_times, link, level
         )
 
diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py
index 5778020ccb..bffb8212ff 100644
--- a/setuptools/command/easy_install.py
+++ b/setuptools/command/easy_install.py
@@ -424,7 +424,7 @@ def expand_dirs(self):
         ]
         self._expand_attrs(dirs)
 
-    def run(self, show_deprecation: bool = True):
+    def run(self, show_deprecation: bool = True) -> None:
         if show_deprecation:
             self.announce(
                 "WARNING: The easy_install command is deprecated "
@@ -673,7 +673,7 @@ def _tmpdir(self):
         finally:
             os.path.exists(tmpdir) and _rmtree(tmpdir)
 
-    def easy_install(self, spec, deps: bool = False):
+    def easy_install(self, spec, deps: bool = False) -> Distribution | None:
         with self._tmpdir() as tmpdir:
             if not isinstance(spec, Requirement):
                 if URL_SCHEME(spec):
@@ -710,7 +710,9 @@ def easy_install(self, spec, deps: bool = False):
             else:
                 return self.install_item(spec, dist.location, tmpdir, deps)
 
-    def install_item(self, spec, download, tmpdir, deps, install_needed: bool = False):
+    def install_item(
+        self, spec, download, tmpdir, deps, install_needed: bool = False
+    ) -> Distribution | None:
         # Installation is also needed if file in tmpdir or is not an egg
         install_needed = install_needed or bool(self.always_copy)
         install_needed = install_needed or os.path.dirname(download) == tmpdir
@@ -760,7 +762,7 @@ def process_distribution(  # noqa: C901
         dist,
         deps: bool = True,
         *info,
-    ):
+    ) -> None:
         self.update_pth(dist)
         self.package_index.add(dist)
         if dist in self.local_index[dist.key]:
@@ -859,7 +861,7 @@ def _load_template(dev_path):
         raw_bytes = resource_string('setuptools', name)
         return raw_bytes.decode('utf-8')
 
-    def write_script(self, script_name, contents, mode: str = "t", blockers=()):
+    def write_script(self, script_name, contents, mode: str = "t", blockers=()) -> None:
         """Write an executable file to the scripts directory"""
         self.delete_blockers(  # clean up old .py/.pyw w/o a script
             [os.path.join(self.script_dir, x) for x in blockers]
@@ -881,7 +883,7 @@ def write_script(self, script_name, contents, mode: str = "t", blockers=()):
             f.write(contents)
         chmod(target, 0o777 - mask)
 
-    def install_eggs(self, spec, dist_filename, tmpdir):
+    def install_eggs(self, spec, dist_filename, tmpdir) -> list[Distribution]:
         # .egg dirs or files are already built, so just return them
         installer_map = {
             '.egg': self.install_egg,
@@ -1142,7 +1144,7 @@ def install_wheel(self, wheel_path, tmpdir):
         """
     )
 
-    def installation_report(self, req, dist, what: str = "Installed"):
+    def installation_report(self, req, dist, what: str = "Installed") -> str:
         """Helpful installation message for display to package users"""
         msg = "\n%(what)s %(eggloc)s%(extras)s"
         if self.multi_version and not self.no_report:
@@ -2079,7 +2081,7 @@ def from_environment(cls):
         return cls([cls._sys_executable()])
 
     @classmethod
-    def from_string(cls, string: str):
+    def from_string(cls, string: str) -> Self:
         """
         Construct a command spec from a simple string representing a command
         line parseable by shlex.split.
@@ -2221,7 +2223,7 @@ def get_header(
         cls,
         script_text: str = "",
         executable: str | CommandSpec | Iterable[str] | None = None,
-    ):
+    ) -> str:
         """Create a #! line, getting options (if any) from script_text"""
         cmd = cls.command_spec_class.best().from_param(executable)
         cmd.install_options(script_text)
diff --git a/setuptools/command/egg_info.py b/setuptools/command/egg_info.py
index f9b8c6df71..ba3bbb0e5a 100644
--- a/setuptools/command/egg_info.py
+++ b/setuptools/command/egg_info.py
@@ -252,7 +252,7 @@ def _get_egg_basename(self, py_version=PY_MAJOR, platform=None):
         """Compute filename of the output egg. Private API."""
         return _egg_basename(self.egg_name, self.egg_version, py_version, platform)
 
-    def write_or_delete_file(self, what, filename, data, force: bool = False):
+    def write_or_delete_file(self, what, filename, data, force: bool = False) -> None:
         """Write `data` to `filename` or delete if empty
 
         If `data` is non-empty, this routine is the same as ``write_file()``.
@@ -690,7 +690,7 @@ def overwrite_arg(cmd, basename, filename):
     write_arg(cmd, basename, filename, True)
 
 
-def write_arg(cmd, basename, filename, force: bool = False):
+def write_arg(cmd, basename, filename, force: bool = False) -> None:
     argname = os.path.splitext(basename)[0]
     value = getattr(cmd.distribution, argname, None)
     if value is not None:
diff --git a/setuptools/command/install_scripts.py b/setuptools/command/install_scripts.py
index f1ccc2bbf8..036d35662d 100644
--- a/setuptools/command/install_scripts.py
+++ b/setuptools/command/install_scripts.py
@@ -56,7 +56,7 @@ def _install_ep_scripts(self):
         for args in writer.get_args(dist, cmd.as_header()):
             self.write_script(*args)
 
-    def write_script(self, script_name, contents, mode: str = "t", *ignored):
+    def write_script(self, script_name, contents, mode: str = "t", *ignored) -> None:
         """Write an executable file to the scripts directory"""
         from setuptools.command.easy_install import chmod, current_umask
 
diff --git a/setuptools/depends.py b/setuptools/depends.py
index e73f06808e..0011c05da1 100644
--- a/setuptools/depends.py
+++ b/setuptools/depends.py
@@ -4,12 +4,16 @@
 import dis
 import marshal
 import sys
+from types import CodeType
+from typing import Any, Literal, TypeVar
 
 from packaging.version import Version
 
 from . import _imp
 from ._imp import PY_COMPILED, PY_FROZEN, PY_SOURCE, find_module
 
+_T = TypeVar("_T")
+
 __all__ = ['Require', 'find_module']
 
 
@@ -51,7 +55,9 @@ def version_ok(self, version):
             and self.format(version) >= self.requested_version
         )
 
-    def get_version(self, paths=None, default: str = "unknown"):
+    def get_version(
+        self, paths=None, default: _T | Literal["unknown"] = "unknown"
+    ) -> _T | Literal["unknown"] | None | Any:
         """Get version number of installed module, 'None', or 'default'
 
         Search 'paths' for module.  If not found, return 'None'.  If found,
@@ -106,7 +112,9 @@ def empty():
 # XXX it'd be better to test assertions about bytecode instead.
 if not sys.platform.startswith('java') and sys.platform != 'cli':
 
-    def get_module_constant(module, symbol, default: str | int = -1, paths=None):
+    def get_module_constant(
+        module, symbol, default: _T | int = -1, paths=None
+    ) -> _T | int | None | Any:
         """Find 'module' by searching 'paths', and extract 'symbol'
 
         Return 'None' if 'module' does not exist on 'paths', or it does not define
@@ -134,7 +142,9 @@ def get_module_constant(module, symbol, default: str | int = -1, paths=None):
 
         return extract_constant(code, symbol, default)
 
-    def extract_constant(code, symbol, default: str | int = -1):
+    def extract_constant(
+        code: CodeType, symbol: str, default: _T | int = -1
+    ) -> _T | int | None | Any:
         """Extract the constant value of 'symbol' from 'code'
 
         If the name 'symbol' is bound to a constant value by the Python code
@@ -163,6 +173,7 @@ def extract_constant(code, symbol, default: str | int = -1):
             arg = byte_code.arg
 
             if op == LOAD_CONST:
+                assert arg is not None
                 const = code.co_consts[arg]
             elif arg == name_idx and (op == STORE_NAME or op == STORE_GLOBAL):
                 return const
diff --git a/setuptools/dist.py b/setuptools/dist.py
index 1348ca61c2..6c22998211 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -25,14 +25,13 @@
 from packaging.specifiers import InvalidSpecifier, SpecifierSet
 from packaging.version import Version
 
-from setuptools._path import StrPath
-
 from . import (
     _entry_points,
     _reqs,
     command as _,  # noqa: F401 # imported for side-effects
 )
 from ._importlib import metadata
+from ._path import StrPath
 from ._reqs import _StrOrIter
 from .config import pyprojecttoml, setupcfg
 from .discovery import ConfigDiscovery
@@ -52,6 +51,9 @@
 if TYPE_CHECKING:
     from typing_extensions import TypeAlias
 
+    from pkg_resources import Distribution as _pkg_resources_Distribution
+
+
 __all__ = ['Distribution']
 
 _sequence = tuple, list
@@ -518,7 +520,7 @@ def _parse_config_files(self, filenames=None):  # noqa: C901
             except ValueError as e:
                 raise DistutilsOptionError(e) from e
 
-    def warn_dash_deprecation(self, opt: str, section: str):
+    def warn_dash_deprecation(self, opt: str, section: str) -> str:
         if section in (
             'options.extras_require',
             'options.data_files',
@@ -560,7 +562,7 @@ def _setuptools_commands(self):
             # during bootstrapping, distribution doesn't exist
             return []
 
-    def make_option_lowercase(self, opt: str, section: str):
+    def make_option_lowercase(self, opt: str, section: str) -> str:
         if section != 'metadata' or opt.islower():
             return opt
 
@@ -640,7 +642,7 @@ def parse_config_files(
         self,
         filenames: Iterable[StrPath] | None = None,
         ignore_option_errors: bool = False,
-    ):
+    ) -> None:
         """Parses configuration files from various levels
         and loads configuration.
         """
@@ -657,7 +659,9 @@ def parse_config_files(
         self._finalize_requires()
         self._finalize_license_files()
 
-    def fetch_build_eggs(self, requires: _StrOrIter):
+    def fetch_build_eggs(
+        self, requires: _StrOrIter
+    ) -> list[_pkg_resources_Distribution]:
         """Resolve pre-setup requirements"""
         from .installer import _fetch_build_eggs
 
@@ -728,7 +732,7 @@ def fetch_build_egg(self, req):
 
         return fetch_build_egg(self, req)
 
-    def get_command_class(self, command: str):
+    def get_command_class(self, command: str) -> type[distutils.cmd.Command]:  # type: ignore[override] # Not doing complex overrides yet
         """Pluggable version of get_command_class()"""
         if command in self.cmdclass:
             return self.cmdclass[command]
@@ -782,7 +786,7 @@ def include(self, **attrs):
             else:
                 self._include_misc(k, v)
 
-    def exclude_package(self, package: str):
+    def exclude_package(self, package: str) -> None:
         """Remove packages, modules, and extensions in named package"""
 
         pfx = package + '.'
@@ -803,7 +807,7 @@ def exclude_package(self, package: str):
                 if p.name != package and not p.name.startswith(pfx)
             ]
 
-    def has_contents_for(self, package: str):
+    def has_contents_for(self, package: str) -> bool:
         """Return true if 'exclude_package(package)' would do something"""
 
         pfx = package + '.'
diff --git a/setuptools/glob.py b/setuptools/glob.py
index 97aca44314..827e8433fa 100644
--- a/setuptools/glob.py
+++ b/setuptools/glob.py
@@ -6,14 +6,21 @@
  * Hidden files are not ignored.
 """
 
+from __future__ import annotations
+
 import fnmatch
 import os
 import re
+from collections.abc import Iterator
+from typing import TYPE_CHECKING, AnyStr, Iterable, overload
+
+if TYPE_CHECKING:
+    from _typeshed import BytesPath, StrOrBytesPath, StrPath
 
 __all__ = ["glob", "iglob", "escape"]
 
 
-def glob(pathname, recursive: bool = False):
+def glob(pathname: AnyStr, recursive: bool = False) -> list[AnyStr]:
     """Return a list of paths matching a pathname pattern.
 
     The pattern may contain simple shell-style wildcards a la
@@ -27,7 +34,7 @@ def glob(pathname, recursive: bool = False):
     return list(iglob(pathname, recursive=recursive))
 
 
-def iglob(pathname, recursive: bool = False):
+def iglob(pathname: AnyStr, recursive: bool = False) -> Iterator[AnyStr]:
     """Return an iterator which yields the paths matching a pathname pattern.
 
     The pattern may contain simple shell-style wildcards a la
@@ -45,7 +52,7 @@ def iglob(pathname, recursive: bool = False):
     return it
 
 
-def _iglob(pathname, recursive):
+def _iglob(pathname: AnyStr, recursive: bool) -> Iterator[AnyStr]:
     dirname, basename = os.path.split(pathname)
     glob_in_dir = glob2 if recursive and _isrecursive(basename) else glob1
 
@@ -66,7 +73,7 @@ def _iglob(pathname, recursive):
     # drive or UNC path.  Prevent an infinite recursion if a drive or UNC path
     # contains magic characters (i.e. r'\\?\C:').
     if dirname != pathname and has_magic(dirname):
-        dirs = _iglob(dirname, recursive)
+        dirs: Iterable[AnyStr] = _iglob(dirname, recursive)
     else:
         dirs = [dirname]
     if not has_magic(basename):
@@ -81,7 +88,11 @@ def _iglob(pathname, recursive):
 # takes a literal basename (so it only has to check for its existence).
 
 
-def glob1(dirname, pattern):
+@overload
+def glob1(dirname: StrPath, pattern: str) -> list[str]: ...
+@overload
+def glob1(dirname: BytesPath, pattern: bytes) -> list[bytes]: ...
+def glob1(dirname: StrOrBytesPath, pattern: str | bytes) -> list[str] | list[bytes]:
     if not dirname:
         if isinstance(pattern, bytes):
             dirname = os.curdir.encode('ASCII')
@@ -91,7 +102,8 @@ def glob1(dirname, pattern):
         names = os.listdir(dirname)
     except OSError:
         return []
-    return fnmatch.filter(names, pattern)
+    # mypy false-positives: str or bytes type possibility is always kept in sync
+    return fnmatch.filter(names, pattern)  # type: ignore[type-var, return-value]
 
 
 def glob0(dirname, basename):
@@ -110,14 +122,22 @@ def glob0(dirname, basename):
 # directory.
 
 
-def glob2(dirname, pattern):
+@overload
+def glob2(dirname: StrPath, pattern: str) -> Iterator[str]: ...
+@overload
+def glob2(dirname: BytesPath, pattern: bytes) -> Iterator[bytes]: ...
+def glob2(dirname: StrOrBytesPath, pattern: str | bytes) -> Iterator[str | bytes]:
     assert _isrecursive(pattern)
     yield pattern[:0]
     yield from _rlistdir(dirname)
 
 
 # Recursively yields relative pathnames inside a literal directory.
-def _rlistdir(dirname):
+@overload
+def _rlistdir(dirname: StrPath) -> Iterator[str]: ...
+@overload
+def _rlistdir(dirname: BytesPath) -> Iterator[bytes]: ...
+def _rlistdir(dirname: StrOrBytesPath) -> Iterator[str | bytes]:
     if not dirname:
         if isinstance(dirname, bytes):
             dirname = os.curdir.encode('ASCII')
@@ -129,24 +149,24 @@ def _rlistdir(dirname):
         return
     for x in names:
         yield x
-        path = os.path.join(dirname, x) if dirname else x
+        # mypy false-positives: str or bytes type possibility is always kept in sync
+        path = os.path.join(dirname, x) if dirname else x  # type: ignore[arg-type]
         for y in _rlistdir(path):
-            yield os.path.join(x, y)
+            yield os.path.join(x, y)  # type: ignore[arg-type]
 
 
 magic_check = re.compile('([*?[])')
 magic_check_bytes = re.compile(b'([*?[])')
 
 
-def has_magic(s):
+def has_magic(s: str | bytes) -> bool:
     if isinstance(s, bytes):
-        match = magic_check_bytes.search(s)
+        return magic_check_bytes.search(s) is not None
     else:
-        match = magic_check.search(s)
-    return match is not None
+        return magic_check.search(s) is not None
 
 
-def _isrecursive(pattern):
+def _isrecursive(pattern: str | bytes) -> bool:
     if isinstance(pattern, bytes):
         return pattern == b'**'
     else:
diff --git a/setuptools/installer.py b/setuptools/installer.py
index ba2d808d49..64bc2def07 100644
--- a/setuptools/installer.py
+++ b/setuptools/installer.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import glob
 import os
 import subprocess
@@ -5,6 +7,8 @@
 import tempfile
 from functools import partial
 
+from pkg_resources import Distribution
+
 from . import _reqs
 from ._reqs import _StrOrIter
 from .warnings import SetuptoolsDeprecationWarning
@@ -31,7 +35,7 @@ def fetch_build_egg(dist, req):
     return _fetch_build_egg_no_warn(dist, req)
 
 
-def _fetch_build_eggs(dist, requires: _StrOrIter):
+def _fetch_build_eggs(dist, requires: _StrOrIter) -> list[Distribution]:
     import pkg_resources  # Delay import to avoid unnecessary side-effects
 
     _DeprecatedInstaller.emit(stacklevel=3)
diff --git a/setuptools/logging.py b/setuptools/logging.py
index c6d25a6b1e..dd7fdaa7d9 100644
--- a/setuptools/logging.py
+++ b/setuptools/logging.py
@@ -35,6 +35,6 @@ def configure():
         distutils.dist.log = distutils.log
 
 
-def set_threshold(level: int):
+def set_threshold(level: int) -> int:
     logging.root.setLevel(level * 10)
     return set_threshold.unpatched(level)
diff --git a/setuptools/package_index.py b/setuptools/package_index.py
index 6b4a256dfc..619099bc8d 100644
--- a/setuptools/package_index.py
+++ b/setuptools/package_index.py
@@ -1,5 +1,7 @@
 """PyPI and direct package downloading."""
 
+from __future__ import annotations
+
 import base64
 import configparser
 import hashlib
@@ -325,7 +327,7 @@ def add(self, dist):
         return super().add(dist)
 
     # FIXME: 'PackageIndex.process_url' is too complex (14)
-    def process_url(self, url, retrieve: bool = False):  # noqa: C901
+    def process_url(self, url, retrieve: bool = False) -> None:  # noqa: C901
         """Evaluate a URL as a possible download, and maybe retrieve it"""
         if url in self.scanned_urls and not retrieve:
             return
@@ -378,7 +380,7 @@ def process_url(self, url, retrieve: bool = False):  # noqa: C901
         if url.startswith(self.index_url) and getattr(f, 'code', None) != 404:
             page = self.process_index(url, page)
 
-    def process_filename(self, fn, nested: bool = False):
+    def process_filename(self, fn, nested: bool = False) -> None:
         # process filenames or directories
         if not os.path.exists(fn):
             self.warn("Not found: %s", fn)
@@ -394,7 +396,7 @@ def process_filename(self, fn, nested: bool = False):
             self.debug("Found: %s", fn)
             list(map(self.add, dists))
 
-    def url_ok(self, url, fatal: bool = False):
+    def url_ok(self, url, fatal: bool = False) -> bool:
         s = URL_SCHEME(url)
         is_file = s and s.group(1).lower() == 'file'
         if is_file or self.allows(urllib.parse.urlparse(url)[1]):
@@ -604,7 +606,7 @@ def fetch_distribution(  # noqa: C901  # is too complex (14)  # FIXME
         source: bool = False,
         develop_ok: bool = False,
         local_index=None,
-    ):
+    ) -> Distribution | None:
         """Obtain a distribution suitable for fulfilling `requirement`
 
         `requirement` must be a ``pkg_resources.Requirement`` instance.
@@ -626,7 +628,7 @@ def fetch_distribution(  # noqa: C901  # is too complex (14)  # FIXME
         skipped = set()
         dist = None
 
-        def find(req, env=None):
+        def find(req, env: Environment | None = None):
             if env is None:
                 env = self
             # Find a matching distribution; may be called more than once
@@ -680,7 +682,7 @@ def find(req, env=None):
 
     def fetch(
         self, requirement, tmpdir, force_scan: bool = False, source: bool = False
-    ):
+    ) -> str | None:
         """Obtain a file suitable for fulfilling `requirement`
 
         DEPRECATED; use the ``fetch_distribution()`` method now instead.  For
diff --git a/setuptools/sandbox.py b/setuptools/sandbox.py
index 5dedeeb47f..43da85fa42 100644
--- a/setuptools/sandbox.py
+++ b/setuptools/sandbox.py
@@ -19,7 +19,9 @@
 
 from distutils.errors import DistutilsError
 
-if sys.platform.startswith('java'):
+if TYPE_CHECKING:
+    import os as _os
+elif sys.platform.startswith('java'):
     import org.python.modules.posix.PosixModule as _os  # pyright: ignore[reportMissingImports]
 else:
     _os = sys.modules[os.name]
@@ -491,7 +493,7 @@ def _remap_pair(self, operation, src, dst, *args, **kw):
             self._violation(operation, src, dst, *args, **kw)
         return (src, dst)
 
-    def open(self, file, flags, mode: int = 0o777, *args, **kw):
+    def open(self, file, flags, mode: int = 0o777, *args, **kw) -> int:
         """Called for low-level os.open()"""
         if flags & WRITE_FLAGS and not self._ok(file):
             self._violation("os.open", file, flags, mode, *args, **kw)
diff --git a/setuptools/tests/config/downloads/__init__.py b/setuptools/tests/config/downloads/__init__.py
index 9fb9b14b02..00a16423f4 100644
--- a/setuptools/tests/config/downloads/__init__.py
+++ b/setuptools/tests/config/downloads/__init__.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import re
 import time
 from pathlib import Path
@@ -16,14 +18,14 @@
 # ----------------------------------------------------------------------
 
 
-def output_file(url: str, download_dir: Path = DOWNLOAD_DIR):
+def output_file(url: str, download_dir: Path = DOWNLOAD_DIR) -> Path:
     file_name = url.strip()
     for part in NAME_REMOVE:
         file_name = file_name.replace(part, '').strip().strip('/:').strip()
     return Path(download_dir, re.sub(r"[^\-_\.\w\d]+", "_", file_name))
 
 
-def retrieve_file(url: str, download_dir: Path = DOWNLOAD_DIR, wait: float = 5):
+def retrieve_file(url: str, download_dir: Path = DOWNLOAD_DIR, wait: float = 5) -> Path:
     path = output_file(url, download_dir)
     if path.exists():
         print(f"Skipping {url} (already exists: {path})")
@@ -38,7 +40,7 @@ def retrieve_file(url: str, download_dir: Path = DOWNLOAD_DIR, wait: float = 5):
     return path
 
 
-def urls_from_file(list_file: Path):
+def urls_from_file(list_file: Path) -> list[str]:
     """``list_file`` should be a text file where each line corresponds to a URL to
     download.
     """
diff --git a/setuptools/tests/test_build_ext.py b/setuptools/tests/test_build_ext.py
index f3e4ccd364..88318b26c5 100644
--- a/setuptools/tests/test_build_ext.py
+++ b/setuptools/tests/test_build_ext.py
@@ -178,7 +178,7 @@ def C(file):
 
 
 class TestBuildExtInplace:
-    def get_build_ext_cmd(self, optional: bool, **opts):
+    def get_build_ext_cmd(self, optional: bool, **opts) -> build_ext:
         files = {
             "eggs.c": "#include missingheader.h\n",
             ".build": {"lib": {}, "tmp": {}},
diff --git a/setuptools/tests/test_editable_install.py b/setuptools/tests/test_editable_install.py
index bdbaa3c7e7..038dcadf93 100644
--- a/setuptools/tests/test_editable_install.py
+++ b/setuptools/tests/test_editable_install.py
@@ -1275,7 +1275,7 @@ def assert_path(pkg, expected):
             assert str(Path(path).resolve()) == expected
 
 
-def assert_link_to(file: Path, other: Path):
+def assert_link_to(file: Path, other: Path) -> None:
     if file.is_symlink():
         assert str(file.resolve()) == str(other.resolve())
     else:
diff --git a/tools/build_launchers.py b/tools/build_launchers.py
index a8b85c5f55..48609367a0 100644
--- a/tools/build_launchers.py
+++ b/tools/build_launchers.py
@@ -54,13 +54,13 @@
 """
 
 
-def resolve_platform(platform: str):
+def resolve_platform(platform: str) -> str:
     if platform in ["Win32", "x64"]:
         return platform[-2:]
     return platform
 
 
-def get_executable_name(name, platform: str):
+def get_executable_name(name, platform: str) -> str:
     return f"{name}-{resolve_platform(platform)}"
 
 

From e5ec6fd17ab04db5cfc90d7b4cb1665f23551036 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Fri, 1 Nov 2024 11:15:54 -0400
Subject: [PATCH 1262/1761] fix mypy failure after multiple merges

---
 setuptools/command/install.py | 24 ++++++++++++++++++------
 1 file changed, 18 insertions(+), 6 deletions(-)

diff --git a/setuptools/command/install.py b/setuptools/command/install.py
index 1262da0bc3..67d6b7f0c8 100644
--- a/setuptools/command/install.py
+++ b/setuptools/command/install.py
@@ -4,7 +4,7 @@
 import inspect
 import platform
 from collections.abc import Callable
-from typing import Any, ClassVar, cast
+from typing import TYPE_CHECKING, Any, ClassVar, cast
 
 import setuptools
 
@@ -15,6 +15,12 @@
 import distutils.command.install as orig
 from distutils.errors import DistutilsArgError
 
+if TYPE_CHECKING:
+    # This is only used for a type-cast, don't import at runtime or it'll cause deprecation warnings
+    from .easy_install import easy_install as easy_install_cls
+else:
+    easy_install_cls = None
+
 # Prior to numpy 1.9, NumPy relies on the '_install' name, so provide it for
 # now. See https://github.com/pypa/setuptools/issues/199/
 _install = orig.install
@@ -133,11 +139,17 @@ def _called_from_setup(run_frame):
     def do_egg_install(self) -> None:
         easy_install = self.distribution.get_command_class('easy_install')
 
-        cmd = easy_install(
-            self.distribution,
-            args="x",
-            root=self.root,
-            record=self.record,
+        cmd = cast(
+            # We'd want to cast easy_install as type[easy_install_cls] but a bug in
+            # mypy makes it think easy_install() returns a Command on Python 3.12+
+            # https://github.com/python/mypy/issues/18088
+            easy_install_cls,
+            easy_install(  # type: ignore[call-arg]
+                self.distribution,
+                args="x",
+                root=self.root,
+                record=self.record,
+            ),
         )
         cmd.ensure_finalized()  # finalize before bdist_egg munges install cmd
         cmd.always_copy_from = '.'  # make sure local-dir eggs get installed

From eca1d66335211afbafcfbd92878402432efdf174 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Fri, 1 Nov 2024 14:17:06 -0400
Subject: [PATCH 1263/1761] Merge TypedDict from typeshed

---
 setuptools/command/easy_install.py | 11 ++++++++---
 setuptools/msvc.py                 | 17 ++++++++++++++---
 2 files changed, 22 insertions(+), 6 deletions(-)

diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py
index 7ecb071f06..e1cd32cf0d 100644
--- a/setuptools/command/easy_install.py
+++ b/setuptools/command/easy_install.py
@@ -34,7 +34,7 @@
 from collections.abc import Iterable
 from glob import glob
 from sysconfig import get_path
-from typing import TYPE_CHECKING, Callable, NoReturn, TypeVar
+from typing import TYPE_CHECKING, Callable, NoReturn, TypedDict, TypeVar
 
 from jaraco.text import yield_lines
 
@@ -2039,6 +2039,11 @@ def chmod(path, mode):
         log.debug("chmod failed: %s", e)
 
 
+class _SplitArgs(TypedDict, total=False):
+    comments: bool
+    posix: bool
+
+
 class CommandSpec(list):
     """
     A command spec for a #! header, specified as a list of arguments akin to
@@ -2046,7 +2051,7 @@ class CommandSpec(list):
     """
 
     options: list[str] = []
-    split_args: dict[str, bool] = dict()
+    split_args = _SplitArgs()
 
     @classmethod
     def best(cls):
@@ -2129,7 +2134,7 @@ def _render(items):
 
 
 class WindowsCommandSpec(CommandSpec):
-    split_args = dict(posix=False)
+    split_args = _SplitArgs(posix=False)
 
 
 class ScriptWriter:
diff --git a/setuptools/msvc.py b/setuptools/msvc.py
index ad4a2f375f..55f5090878 100644
--- a/setuptools/msvc.py
+++ b/setuptools/msvc.py
@@ -13,12 +13,15 @@
 import os
 import os.path
 import platform
-from typing import TYPE_CHECKING
+from typing import TYPE_CHECKING, TypedDict
 
 from more_itertools import unique_everseen
 
 import distutils.errors
 
+if TYPE_CHECKING:
+    from typing_extensions import NotRequired
+
 # https://github.com/python/mypy/issues/8166
 if not TYPE_CHECKING and platform.system() == 'Windows':
     import winreg
@@ -876,6 +879,14 @@ def _use_last_dir_name(path, prefix=''):
         return next(matching_dirs, None) or ''
 
 
+class _EnvironmentDict(TypedDict):
+    include: str
+    lib: str
+    libpath: str
+    path: str
+    py_vcruntime_redist: NotRequired[str | None]
+
+
 class EnvironmentInfo:
     """
     Return environment variables for specified Microsoft Visual C++ version
@@ -1420,7 +1431,7 @@ def VCRuntimeRedist(self) -> str | None:
         )
         return next(filter(os.path.isfile, candidate_paths), None)  # type: ignore[arg-type] #python/mypy#12682
 
-    def return_env(self, exists=True):
+    def return_env(self, exists: bool = True) -> _EnvironmentDict:
         """
         Return environment dict.
 
@@ -1434,7 +1445,7 @@ def return_env(self, exists=True):
         dict
             environment
         """
-        env = dict(
+        env = _EnvironmentDict(
             include=self._build_paths(
                 'include',
                 [

From e8a62dff2fd913bb88f943b05cae527c374d6374 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 31 Oct 2024 17:27:21 +0000
Subject: [PATCH 1264/1761] Automatic updates from Ruff for Python 3.9+

---
 pkg_resources/__init__.py                 | 15 +++++----------
 setuptools/_reqs.py                       |  3 ++-
 setuptools/build_meta.py                  |  5 +++--
 setuptools/command/_requirestxt.py        |  5 +++--
 setuptools/command/bdist_wheel.py         |  3 ++-
 setuptools/command/build_ext.py           |  3 ++-
 setuptools/command/build_py.py            |  2 +-
 setuptools/command/editable_wheel.py      |  3 ++-
 setuptools/config/_apply_pyprojecttoml.py |  5 +++--
 setuptools/config/expand.py               |  3 ++-
 setuptools/config/pyprojecttoml.py        |  3 ++-
 setuptools/config/setupcfg.py             | 22 +++++-----------------
 setuptools/discovery.py                   |  4 ++--
 setuptools/dist.py                        | 17 ++++-------------
 setuptools/monkey.py                      |  4 ++--
 setuptools/warnings.py                    |  4 ++--
 setuptools/wheel.py                       |  2 +-
 tools/generate_validation_code.py         |  2 +-
 18 files changed, 44 insertions(+), 61 deletions(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index 4fedab53fa..aa5d9f3a93 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -50,22 +50,17 @@
 import warnings
 import zipfile
 import zipimport
+from collections.abc import Iterable, Iterator, Mapping, MutableSequence
 from pkgutil import get_importer
 from typing import (
     TYPE_CHECKING,
     Any,
     BinaryIO,
     Callable,
-    Dict,
-    Iterable,
-    Iterator,
     Literal,
-    Mapping,
-    MutableSequence,
     NamedTuple,
     NoReturn,
     Protocol,
-    Tuple,
     TypeVar,
     Union,
     overload,
@@ -424,7 +419,7 @@ def get_provider(moduleOrReq: str | Requirement) -> IResourceProvider | Distribu
     return _find_adapter(_provider_factories, loader)(module)
 
 
-@functools.lru_cache(maxsize=None)
+@functools.cache
 def _macos_vers():
     version = platform.mac_ver()[0]
     # fallback for MacPorts
@@ -1120,7 +1115,7 @@ def __setstate__(self, e_k_b_n_c) -> None:
         self.callbacks = callbacks[:]
 
 
-class _ReqExtras(Dict["Requirement", Tuple[str, ...]]):
+class _ReqExtras(dict["Requirement", tuple[str, ...]]):
     """
     Map each requirement to the extras that demanded it.
     """
@@ -1952,7 +1947,7 @@ def __init__(self) -> None:
 empty_provider = EmptyProvider()
 
 
-class ZipManifests(Dict[str, "MemoizedZipManifests.manifest_mod"]):
+class ZipManifests(dict[str, "MemoizedZipManifests.manifest_mod"]):
     """
     zip manifest builder
     """
@@ -2662,7 +2657,7 @@ def _normalize_cached(filename: StrOrBytesPath) -> str | bytes: ...
 
 else:
 
-    @functools.lru_cache(maxsize=None)
+    @functools.cache
     def _normalize_cached(filename):
         return normalize_path(filename)
 
diff --git a/setuptools/_reqs.py b/setuptools/_reqs.py
index 71ea23dea9..c793be4d6e 100644
--- a/setuptools/_reqs.py
+++ b/setuptools/_reqs.py
@@ -1,7 +1,8 @@
 from __future__ import annotations
 
+from collections.abc import Iterable, Iterator
 from functools import lru_cache
-from typing import TYPE_CHECKING, Callable, Iterable, Iterator, TypeVar, Union, overload
+from typing import TYPE_CHECKING, Callable, TypeVar, Union, overload
 
 import jaraco.text as text
 from packaging.requirements import Requirement
diff --git a/setuptools/build_meta.py b/setuptools/build_meta.py
index afe0b57b3d..3500b2dfd1 100644
--- a/setuptools/build_meta.py
+++ b/setuptools/build_meta.py
@@ -37,8 +37,9 @@
 import tempfile
 import tokenize
 import warnings
+from collections.abc import Iterable, Iterator, Mapping
 from pathlib import Path
-from typing import TYPE_CHECKING, Iterable, Iterator, List, Mapping, Union
+from typing import TYPE_CHECKING, Union
 
 import setuptools
 
@@ -146,7 +147,7 @@ def suppress_known_deprecation():
         yield
 
 
-_ConfigSettings: TypeAlias = Union[Mapping[str, Union[str, List[str], None]], None]
+_ConfigSettings: TypeAlias = Union[Mapping[str, Union[str, list[str], None]], None]
 """
 Currently the user can run::
 
diff --git a/setuptools/command/_requirestxt.py b/setuptools/command/_requirestxt.py
index d426f5dffb..171f41b87e 100644
--- a/setuptools/command/_requirestxt.py
+++ b/setuptools/command/_requirestxt.py
@@ -11,8 +11,9 @@
 
 import io
 from collections import defaultdict
+from collections.abc import Mapping
 from itertools import filterfalse
-from typing import Dict, Mapping, TypeVar
+from typing import TypeVar
 
 from jaraco.text import yield_lines
 from packaging.requirements import Requirement
@@ -22,7 +23,7 @@
 
 # dict can work as an ordered set
 _T = TypeVar("_T")
-_Ordered = Dict[_T, None]
+_Ordered = dict[_T, None]
 
 
 def _prepare(
diff --git a/setuptools/command/bdist_wheel.py b/setuptools/command/bdist_wheel.py
index f23caaa09f..7e4e556cb0 100644
--- a/setuptools/command/bdist_wheel.py
+++ b/setuptools/command/bdist_wheel.py
@@ -14,11 +14,12 @@
 import sys
 import sysconfig
 import warnings
+from collections.abc import Iterable, Sequence
 from email.generator import BytesGenerator, Generator
 from email.policy import EmailPolicy
 from glob import iglob
 from shutil import rmtree
-from typing import TYPE_CHECKING, Callable, Iterable, Literal, Sequence, cast
+from typing import TYPE_CHECKING, Callable, Literal, cast
 from zipfile import ZIP_DEFLATED, ZIP_STORED
 
 from packaging import tags, version as _packaging_version
diff --git a/setuptools/command/build_ext.py b/setuptools/command/build_ext.py
index d4c831e176..00a38ba3c9 100644
--- a/setuptools/command/build_ext.py
+++ b/setuptools/command/build_ext.py
@@ -3,10 +3,11 @@
 import itertools
 import os
 import sys
+from collections.abc import Iterator
 from importlib.machinery import EXTENSION_SUFFIXES
 from importlib.util import cache_from_source as _compiled_file_name
 from pathlib import Path
-from typing import TYPE_CHECKING, Iterator
+from typing import TYPE_CHECKING
 
 from setuptools.dist import Distribution
 from setuptools.errors import BaseError
diff --git a/setuptools/command/build_py.py b/setuptools/command/build_py.py
index e0b1754b7c..f8c9b11676 100644
--- a/setuptools/command/build_py.py
+++ b/setuptools/command/build_py.py
@@ -5,10 +5,10 @@
 import os
 import stat
 import textwrap
+from collections.abc import Iterable, Iterator
 from functools import partial
 from glob import glob
 from pathlib import Path
-from typing import Iterable, Iterator
 
 from more_itertools import unique_everseen
 
diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py
index 1aa8c0d76d..cae643618c 100644
--- a/setuptools/command/editable_wheel.py
+++ b/setuptools/command/editable_wheel.py
@@ -17,6 +17,7 @@
 import os
 import shutil
 import traceback
+from collections.abc import Iterable, Iterator, Mapping
 from contextlib import suppress
 from enum import Enum
 from inspect import cleandoc
@@ -24,7 +25,7 @@
 from pathlib import Path
 from tempfile import TemporaryDirectory
 from types import TracebackType
-from typing import TYPE_CHECKING, Iterable, Iterator, Mapping, Protocol, TypeVar, cast
+from typing import TYPE_CHECKING, Protocol, TypeVar, cast
 
 from .. import Command, _normalization, _path, errors, namespaces
 from .._path import StrPath
diff --git a/setuptools/config/_apply_pyprojecttoml.py b/setuptools/config/_apply_pyprojecttoml.py
index 2b7fe7bd80..c4bbcff730 100644
--- a/setuptools/config/_apply_pyprojecttoml.py
+++ b/setuptools/config/_apply_pyprojecttoml.py
@@ -12,12 +12,13 @@
 
 import logging
 import os
+from collections.abc import Mapping
 from email.headerregistry import Address
 from functools import partial, reduce
 from inspect import cleandoc
 from itertools import chain
 from types import MappingProxyType
-from typing import TYPE_CHECKING, Any, Callable, Dict, Mapping, TypeVar, Union
+from typing import TYPE_CHECKING, Any, Callable, TypeVar, Union
 
 from .._path import StrPath
 from ..errors import RemovedConfigError
@@ -34,7 +35,7 @@
 
 
 EMPTY: Mapping = MappingProxyType({})  # Immutable dict-like
-_ProjectReadmeValue: TypeAlias = Union[str, Dict[str, str]]
+_ProjectReadmeValue: TypeAlias = Union[str, dict[str, str]]
 _Correspondence: TypeAlias = Callable[["Distribution", Any, Union[StrPath, None]], None]
 _T = TypeVar("_T")
 
diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py
index c07314dd7f..2a85363d8d 100644
--- a/setuptools/config/expand.py
+++ b/setuptools/config/expand.py
@@ -25,13 +25,14 @@
 import os
 import pathlib
 import sys
+from collections.abc import Iterable, Iterator, Mapping
 from configparser import ConfigParser
 from glob import iglob
 from importlib.machinery import ModuleSpec, all_suffixes
 from itertools import chain
 from pathlib import Path
 from types import ModuleType, TracebackType
-from typing import TYPE_CHECKING, Any, Callable, Iterable, Iterator, Mapping, TypeVar
+from typing import TYPE_CHECKING, Any, Callable, TypeVar
 
 from .._path import StrPath, same_path as _same_path
 from ..discovery import find_package_path
diff --git a/setuptools/config/pyprojecttoml.py b/setuptools/config/pyprojecttoml.py
index cacd898264..688040ab50 100644
--- a/setuptools/config/pyprojecttoml.py
+++ b/setuptools/config/pyprojecttoml.py
@@ -13,10 +13,11 @@
 
 import logging
 import os
+from collections.abc import Mapping
 from contextlib import contextmanager
 from functools import partial
 from types import TracebackType
-from typing import TYPE_CHECKING, Any, Callable, Mapping
+from typing import TYPE_CHECKING, Any, Callable
 
 from .._path import StrPath
 from ..errors import FileError, InvalidConfigError
diff --git a/setuptools/config/setupcfg.py b/setuptools/config/setupcfg.py
index 9a8f759fc3..0428a32cc1 100644
--- a/setuptools/config/setupcfg.py
+++ b/setuptools/config/setupcfg.py
@@ -15,21 +15,9 @@
 import functools
 import os
 from collections import defaultdict
+from collections.abc import Iterable, Iterator
 from functools import partial, wraps
-from typing import (
-    TYPE_CHECKING,
-    Any,
-    Callable,
-    ClassVar,
-    Dict,
-    Generic,
-    Iterable,
-    Iterator,
-    List,
-    Tuple,
-    TypeVar,
-    cast,
-)
+from typing import TYPE_CHECKING, Any, Callable, ClassVar, Generic, TypeVar, cast
 
 from packaging.markers import default_environment as marker_env
 from packaging.requirements import InvalidRequirement, Requirement
@@ -48,13 +36,13 @@
 
     from distutils.dist import DistributionMetadata
 
-SingleCommandOptions: TypeAlias = Dict[str, Tuple[str, Any]]
+SingleCommandOptions: TypeAlias = dict[str, tuple[str, Any]]
 """Dict that associate the name of the options of a particular command to a
 tuple. The first element of the tuple indicates the origin of the option value
 (e.g. the name of the configuration file where it was read from),
 while the second element of the tuple is the option value itself
 """
-AllCommandOptions: TypeAlias = Dict[str, SingleCommandOptions]
+AllCommandOptions: TypeAlias = dict[str, SingleCommandOptions]
 """cmd name => its options"""
 Target = TypeVar("Target", "Distribution", "DistributionMetadata")
 
@@ -114,7 +102,7 @@ def _apply(
 
     try:
         # TODO: Temporary cast until mypy 1.12 is released with upstream fixes from typeshed
-        _Distribution.parse_config_files(dist, filenames=cast(List[str], filenames))
+        _Distribution.parse_config_files(dist, filenames=cast(list[str], filenames))
         handlers = parse_configuration(
             dist, dist.command_options, ignore_option_errors=ignore_option_errors
         )
diff --git a/setuptools/discovery.py b/setuptools/discovery.py
index cbe9cfb7f4..eb8969146f 100644
--- a/setuptools/discovery.py
+++ b/setuptools/discovery.py
@@ -41,11 +41,11 @@
 
 import itertools
 import os
-from collections.abc import Iterator
+from collections.abc import Iterable, Iterator, Mapping
 from fnmatch import fnmatchcase
 from glob import glob
 from pathlib import Path
-from typing import TYPE_CHECKING, ClassVar, Iterable, Mapping
+from typing import TYPE_CHECKING, ClassVar
 
 import _distutils_hack.override  # noqa: F401
 
diff --git a/setuptools/dist.py b/setuptools/dist.py
index 782ac8a1ad..333bc29290 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -6,19 +6,10 @@
 import os
 import re
 import sys
-from collections.abc import Iterable
+from collections.abc import Iterable, MutableMapping, Sequence
 from glob import iglob
 from pathlib import Path
-from typing import (
-    TYPE_CHECKING,
-    Any,
-    Dict,
-    List,
-    MutableMapping,
-    Sequence,
-    Tuple,
-    Union,
-)
+from typing import TYPE_CHECKING, Any, Union
 
 from more_itertools import partition, unique_everseen
 from packaging.markers import InvalidMarker, Marker
@@ -66,10 +57,10 @@
 - not imply a nested type (like `dict`)
 for use with `isinstance`.
 """
-_Sequence: TypeAlias = Union[Tuple[str, ...], List[str]]
+_Sequence: TypeAlias = Union[tuple[str, ...], list[str]]
 # This is how stringifying _Sequence would look in Python 3.10
 _sequence_type_repr = "tuple[str, ...] | list[str]"
-_OrderedStrSequence: TypeAlias = Union[str, Dict[str, Any], Sequence[str]]
+_OrderedStrSequence: TypeAlias = Union[str, dict[str, Any], Sequence[str]]
 """
 :meta private:
 Avoid single-use iterable. Disallow sets.
diff --git a/setuptools/monkey.py b/setuptools/monkey.py
index 3b6eefb4dd..07919722b8 100644
--- a/setuptools/monkey.py
+++ b/setuptools/monkey.py
@@ -8,7 +8,7 @@
 import platform
 import sys
 import types
-from typing import Type, TypeVar, cast, overload
+from typing import TypeVar, cast, overload
 
 import distutils.filelist
 
@@ -58,7 +58,7 @@ def get_unpatched_class(cls: type[_T]) -> type[_T]:
     first.
     """
     external_bases = (
-        cast(Type[_T], cls)
+        cast(type[_T], cls)
         for cls in _get_mro(cls)
         if not cls.__module__.startswith('setuptools')
     )
diff --git a/setuptools/warnings.py b/setuptools/warnings.py
index f0ef616725..96467787c2 100644
--- a/setuptools/warnings.py
+++ b/setuptools/warnings.py
@@ -12,12 +12,12 @@
 from datetime import date
 from inspect import cleandoc
 from textwrap import indent
-from typing import TYPE_CHECKING, Tuple
+from typing import TYPE_CHECKING
 
 if TYPE_CHECKING:
     from typing_extensions import TypeAlias
 
-_DueDate: TypeAlias = Tuple[int, int, int]  # time tuple
+_DueDate: TypeAlias = tuple[int, int, int]  # time tuple
 _INDENT = 8 * " "
 _TEMPLATE = f"""{80 * '*'}\n{{details}}\n{80 * '*'}"""
 
diff --git a/setuptools/wheel.py b/setuptools/wheel.py
index 5759a77f1f..a6b3720033 100644
--- a/setuptools/wheel.py
+++ b/setuptools/wheel.py
@@ -31,7 +31,7 @@
 NAMESPACE_PACKAGE_INIT = "__import__('pkg_resources').declare_namespace(__name__)\n"
 
 
-@functools.lru_cache(maxsize=None)
+@functools.cache
 def _get_supported_tags():
     # We calculate the supported tags only once, otherwise calling
     # this method on thousands of wheels takes seconds instead of
diff --git a/tools/generate_validation_code.py b/tools/generate_validation_code.py
index 82c7f011d9..157693de65 100644
--- a/tools/generate_validation_code.py
+++ b/tools/generate_validation_code.py
@@ -3,8 +3,8 @@
 import itertools
 import subprocess
 import sys
+from collections.abc import Iterable
 from pathlib import Path
-from typing import Iterable
 
 
 def generate_pyproject_validation(dest: Path, schemas: Iterable[Path]) -> bool:

From cd4836f1ea178750ce8340e3414fbf4f57f29f54 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Fri, 1 Nov 2024 11:49:15 -0400
Subject: [PATCH 1265/1761] Deprecation warning for setuptools.command._install

---
 setuptools/command/install.py | 13 ++++++++++---
 1 file changed, 10 insertions(+), 3 deletions(-)

diff --git a/setuptools/command/install.py b/setuptools/command/install.py
index 67d6b7f0c8..741b140c70 100644
--- a/setuptools/command/install.py
+++ b/setuptools/command/install.py
@@ -21,9 +21,16 @@
 else:
     easy_install_cls = None
 
-# Prior to numpy 1.9, NumPy relies on the '_install' name, so provide it for
-# now. See https://github.com/pypa/setuptools/issues/199/
-_install = orig.install
+
+def __getattr__(name: str):  # pragma: no cover
+    if name == "_install":
+        SetuptoolsDeprecationWarning.emit(
+            "`setuptools.command._install` was an internal implementation detail "
+            + "that was left in for numpy<1.9 support.",
+            due_date=(2025, 5, 2),  # Originally added on 2024-11-01
+        )
+        return orig.install
+    raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
 
 
 class install(orig.install):

From 5c34e69568f23a524af4fa9dad3f5e80f22ec3e6 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 1 Nov 2024 22:35:31 -0400
Subject: [PATCH 1266/1761] Use extend for proper workaround.

Closes jaraco/skeleton#152
Workaround for astral-sh/ruff#10299
---
 ruff.toml | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/ruff.toml b/ruff.toml
index 8b22940a64..9379d6e1f6 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -1,5 +1,5 @@
-# include pyproject.toml for requires-python (workaround astral-sh/ruff#10299)
-include = "pyproject.toml"
+# extend pyproject.toml for requires-python (workaround astral-sh/ruff#10299)
+extend = "pyproject.toml"
 
 [lint]
 extend-select = [

From 956a2e9a2b24f79aaca6881b93fa23fd354afc96 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 4 Nov 2024 13:21:42 +0000
Subject: [PATCH 1267/1761] Fix remaining ruff error

---
 setuptools/glob.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/setuptools/glob.py b/setuptools/glob.py
index 827e8433fa..1dfff2cd50 100644
--- a/setuptools/glob.py
+++ b/setuptools/glob.py
@@ -11,8 +11,8 @@
 import fnmatch
 import os
 import re
-from collections.abc import Iterator
-from typing import TYPE_CHECKING, AnyStr, Iterable, overload
+from collections.abc import Iterable, Iterator
+from typing import TYPE_CHECKING, AnyStr, overload
 
 if TYPE_CHECKING:
     from _typeshed import BytesPath, StrOrBytesPath, StrPath

From 7a298d9abd05d02d89021f08cd2bb36bcca9952b Mon Sep 17 00:00:00 2001
From: Marc Mueller <30130371+cdce8p@users.noreply.github.com>
Date: Mon, 4 Nov 2024 14:23:45 +0100
Subject: [PATCH 1268/1761] Fix formatting and imports for Python 3.9

---
 setuptools/tests/test_wheel.py | 49 ++++++++++++++++++++--------------
 1 file changed, 29 insertions(+), 20 deletions(-)

diff --git a/setuptools/tests/test_wheel.py b/setuptools/tests/test_wheel.py
index ac736e2947..5724c6eabc 100644
--- a/setuptools/tests/test_wheel.py
+++ b/setuptools/tests/test_wheel.py
@@ -564,14 +564,17 @@ def test_wheel_install(params):
     install_tree = params.get('install_tree')
     file_defs = params.get('file_defs', {})
     setup_kwargs = params.get('setup_kwargs', {})
-    with build_wheel(
-        name=project_name,
-        version=version,
-        install_requires=install_requires,
-        extras_require=extras_require,
-        extra_file_defs=file_defs,
-        **setup_kwargs,
-    ) as filename, tempdir() as install_dir:
+    with (
+        build_wheel(
+            name=project_name,
+            version=version,
+            install_requires=install_requires,
+            extras_require=extras_require,
+            extra_file_defs=file_defs,
+            **setup_kwargs,
+        ) as filename,
+        tempdir() as install_dir,
+    ):
         _check_wheel_install(
             filename, install_dir, install_tree, project_name, version, requires_txt
         )
@@ -580,10 +583,13 @@ def test_wheel_install(params):
 def test_wheel_install_pep_503():
     project_name = 'Foo_Bar'  # PEP 503 canonicalized name is "foo-bar"
     version = '1.0'
-    with build_wheel(
-        name=project_name,
-        version=version,
-    ) as filename, tempdir() as install_dir:
+    with (
+        build_wheel(
+            name=project_name,
+            version=version,
+        ) as filename,
+        tempdir() as install_dir,
+    ):
         new_filename = filename.replace(project_name, canonicalize_name(project_name))
         shutil.move(filename, new_filename)
         _check_wheel_install(
@@ -687,14 +693,17 @@ def build_wheel(extra_file_defs=None, **kwargs):
     file_defs = params.get('file_defs', {})
     setup_kwargs = params.get('setup_kwargs', {})
 
-    with build_wheel(
-        name=project_name,
-        version=version,
-        install_requires=[],
-        extras_require={},
-        extra_file_defs=file_defs,
-        **setup_kwargs,
-    ) as filename, tempdir() as install_dir:
+    with (
+        build_wheel(
+            name=project_name,
+            version=version,
+            install_requires=[],
+            extras_require={},
+            extra_file_defs=file_defs,
+            **setup_kwargs,
+        ) as filename,
+        tempdir() as install_dir,
+    ):
         _check_wheel_install(
             filename, install_dir, install_tree, project_name, version, None
         )

From 442b937af6e86f1b8ff90755f63e422fc8ba0ec5 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 30 Oct 2024 16:40:04 +0000
Subject: [PATCH 1269/1761] Bump version check in pkg_resources

---
 pkg_resources/__init__.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index aa5d9f3a93..6418eb2ab0 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -21,8 +21,8 @@
 
 import sys
 
-if sys.version_info < (3, 8):  # noqa: UP036 # Check for unsupported versions
-    raise RuntimeError("Python 3.8 or later is required")
+if sys.version_info < (3, 9):  # noqa: UP036 # Check for unsupported versions
+    raise RuntimeError("Python 3.9 or later is required")
 
 import _imp
 import collections

From 4286e49fc6bb5e882aae3111013f12fdc4cd0b91 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 30 Oct 2024 16:40:58 +0000
Subject: [PATCH 1270/1761] Remove 'm' SOABI flag that no longer exists in 3.9+

See https://docs.python.org/3/library/sys.html#sys.abiflags

> Changed in version 3.8: Default flags became an empty string (m flag for pymalloc has been removed).
---
 setuptools/command/bdist_wheel.py | 10 +---------
 1 file changed, 1 insertion(+), 9 deletions(-)

diff --git a/setuptools/command/bdist_wheel.py b/setuptools/command/bdist_wheel.py
index 7e4e556cb0..8cf91538f9 100644
--- a/setuptools/command/bdist_wheel.py
+++ b/setuptools/command/bdist_wheel.py
@@ -111,19 +111,11 @@ def get_abi_tag() -> str | None:
     impl = tags.interpreter_name()
     if not soabi and impl in ("cp", "pp") and hasattr(sys, "maxunicode"):
         d = ""
-        m = ""
         u = ""
         if get_flag("Py_DEBUG", hasattr(sys, "gettotalrefcount"), warn=(impl == "cp")):
             d = "d"
 
-        if get_flag(
-            "WITH_PYMALLOC",
-            impl == "cp",
-            warn=(impl == "cp" and sys.version_info < (3, 8)),
-        ) and sys.version_info < (3, 8):
-            m = "m"
-
-        abi = f"{impl}{tags.interpreter_version()}{d}{m}{u}"
+        abi = f"{impl}{tags.interpreter_version()}{d}{u}"
     elif soabi and impl == "cp" and soabi.startswith("cpython"):
         # non-Windows
         abi = "cp" + soabi.split("-")[1]

From 5ed6ad1325d5ba1edd6df3bcc833875a89e5f1cf Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 30 Oct 2024 16:45:02 +0000
Subject: [PATCH 1271/1761] Remove old unnecessary workaround for towncrier

---
 tox.ini | 3 ---
 1 file changed, 3 deletions(-)

diff --git a/tox.ini b/tox.ini
index b767cd4fbc..aae0fe66ad 100644
--- a/tox.ini
+++ b/tox.ini
@@ -54,8 +54,6 @@ extras =
 	doc
 	test
 changedir = docs
-deps =
-	importlib_resources < 6  # twisted/towncrier#528 (waiting for release)
 commands =
 	python -m sphinx -W --keep-going . {toxinidir}/build/html
 	python -m sphinxlint
@@ -67,7 +65,6 @@ deps =
 	towncrier
 	bump2version
 	jaraco.develop >= 7.23
-	importlib_resources < 6  # twisted/towncrier#528 (waiting for release)
 pass_env = *
 commands =
 	python tools/finalize.py

From c6758100614fda93541a5c20a070fa11331b8b56 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 30 Oct 2024 16:47:55 +0000
Subject: [PATCH 1272/1761] Remove conditional requirement on
 importlib_resources in favour of importlib.resources

---
 pyproject.toml           | 1 -
 setuptools/_importlib.py | 5 +----
 2 files changed, 1 insertion(+), 5 deletions(-)

diff --git a/pyproject.toml b/pyproject.toml
index c80bee509c..300438232a 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -93,7 +93,6 @@ core = [
 	"packaging>=24",
 	"more_itertools>=8.8",
 	"jaraco.text>=3.7",
-	"importlib_resources>=5.10.2; python_version < '3.9'",
 	"importlib_metadata>=6; python_version < '3.10'",
 	"tomli>=2.0.1; python_version < '3.11'",
 	"wheel>=0.43.0",
diff --git a/setuptools/_importlib.py b/setuptools/_importlib.py
index 5317be0fa0..ce0fd52653 100644
--- a/setuptools/_importlib.py
+++ b/setuptools/_importlib.py
@@ -6,7 +6,4 @@
     import importlib.metadata as metadata  # noqa: F401
 
 
-if sys.version_info < (3, 9):
-    import importlib_resources as resources  # pragma: no cover
-else:
-    import importlib.resources as resources  # noqa: F401
+import importlib.resources as resources  # noqa: F401

From bec3ff5d004336d93852d74a589847e042cde5b2 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 30 Oct 2024 16:49:07 +0000
Subject: [PATCH 1273/1761] Update dependency on pyproject-hooks so that it
 accepts newer versions

---
 tox.ini | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/tox.ini b/tox.ini
index aae0fe66ad..12e156a3fa 100644
--- a/tox.ini
+++ b/tox.ini
@@ -75,7 +75,7 @@ deps =
 	path
 	jaraco.packaging
 	# workaround for pypa/pyproject-hooks#192
-	pyproject-hooks<1.1
+	pyproject-hooks!=1.1
 	uv
 commands =
 	vendor: python -m tools.vendored

From 35dec48c078d262564ab870cea4d540208f440f9 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 30 Oct 2024 16:53:03 +0000
Subject: [PATCH 1274/1761] Removed vendored version of importlib_resources

---
 .../INSTALLER                                 |   1 -
 .../LICENSE                                   | 202 ---------------
 .../METADATA                                  | 100 --------
 .../RECORD                                    |  89 -------
 .../REQUESTED                                 |   0
 .../importlib_resources-6.4.0.dist-info/WHEEL |   5 -
 .../top_level.txt                             |   1 -
 .../_vendor/importlib_resources/__init__.py   |  36 ---
 .../_vendor/importlib_resources/_adapters.py  | 168 ------------
 .../_vendor/importlib_resources/_common.py    | 210 ---------------
 .../_vendor/importlib_resources/_itertools.py |  38 ---
 setuptools/_vendor/importlib_resources/abc.py | 171 -------------
 .../importlib_resources/compat/__init__.py    |   0
 .../importlib_resources/compat/py38.py        |  11 -
 .../importlib_resources/compat/py39.py        |  10 -
 .../_vendor/importlib_resources/functional.py |  81 ------
 .../importlib_resources/future/__init__.py    |   0
 .../importlib_resources/future/adapters.py    |  95 -------
 .../_vendor/importlib_resources/py.typed      |   0
 .../_vendor/importlib_resources/readers.py    | 194 --------------
 .../_vendor/importlib_resources/simple.py     | 106 --------
 .../importlib_resources/tests/__init__.py     |   0
 .../importlib_resources/tests/_path.py        |  56 ----
 .../tests/compat/__init__.py                  |   0
 .../importlib_resources/tests/compat/py312.py |  18 --
 .../importlib_resources/tests/compat/py39.py  |  10 -
 .../tests/data01/__init__.py                  |   0
 .../tests/data01/binary.file                  | Bin 4 -> 0 bytes
 .../tests/data01/subdirectory/__init__.py     |   0
 .../tests/data01/subdirectory/binary.file     |   1 -
 .../tests/data01/utf-16.file                  | Bin 44 -> 0 bytes
 .../tests/data01/utf-8.file                   |   1 -
 .../tests/data02/__init__.py                  |   0
 .../tests/data02/one/__init__.py              |   0
 .../tests/data02/one/resource1.txt            |   1 -
 .../subdirectory/subsubdir/resource.txt       |   1 -
 .../tests/data02/two/__init__.py              |   0
 .../tests/data02/two/resource2.txt            |   1 -
 .../tests/namespacedata01/binary.file         | Bin 4 -> 0 bytes
 .../namespacedata01/subdirectory/binary.file  |   1 -
 .../tests/namespacedata01/utf-16.file         | Bin 44 -> 0 bytes
 .../tests/namespacedata01/utf-8.file          |   1 -
 .../tests/test_compatibilty_files.py          | 104 --------
 .../tests/test_contents.py                    |  43 ----
 .../importlib_resources/tests/test_custom.py  |  47 ----
 .../importlib_resources/tests/test_files.py   | 117 ---------
 .../tests/test_functional.py                  | 242 ------------------
 .../importlib_resources/tests/test_open.py    |  89 -------
 .../importlib_resources/tests/test_path.py    |  65 -----
 .../importlib_resources/tests/test_read.py    |  97 -------
 .../importlib_resources/tests/test_reader.py  | 145 -----------
 .../tests/test_resource.py                    | 241 -----------------
 .../_vendor/importlib_resources/tests/util.py | 164 ------------
 .../_vendor/importlib_resources/tests/zip.py  |  32 ---
 54 files changed, 2995 deletions(-)
 delete mode 100644 setuptools/_vendor/importlib_resources-6.4.0.dist-info/INSTALLER
 delete mode 100644 setuptools/_vendor/importlib_resources-6.4.0.dist-info/LICENSE
 delete mode 100644 setuptools/_vendor/importlib_resources-6.4.0.dist-info/METADATA
 delete mode 100644 setuptools/_vendor/importlib_resources-6.4.0.dist-info/RECORD
 delete mode 100644 setuptools/_vendor/importlib_resources-6.4.0.dist-info/REQUESTED
 delete mode 100644 setuptools/_vendor/importlib_resources-6.4.0.dist-info/WHEEL
 delete mode 100644 setuptools/_vendor/importlib_resources-6.4.0.dist-info/top_level.txt
 delete mode 100644 setuptools/_vendor/importlib_resources/__init__.py
 delete mode 100644 setuptools/_vendor/importlib_resources/_adapters.py
 delete mode 100644 setuptools/_vendor/importlib_resources/_common.py
 delete mode 100644 setuptools/_vendor/importlib_resources/_itertools.py
 delete mode 100644 setuptools/_vendor/importlib_resources/abc.py
 delete mode 100644 setuptools/_vendor/importlib_resources/compat/__init__.py
 delete mode 100644 setuptools/_vendor/importlib_resources/compat/py38.py
 delete mode 100644 setuptools/_vendor/importlib_resources/compat/py39.py
 delete mode 100644 setuptools/_vendor/importlib_resources/functional.py
 delete mode 100644 setuptools/_vendor/importlib_resources/future/__init__.py
 delete mode 100644 setuptools/_vendor/importlib_resources/future/adapters.py
 delete mode 100644 setuptools/_vendor/importlib_resources/py.typed
 delete mode 100644 setuptools/_vendor/importlib_resources/readers.py
 delete mode 100644 setuptools/_vendor/importlib_resources/simple.py
 delete mode 100644 setuptools/_vendor/importlib_resources/tests/__init__.py
 delete mode 100644 setuptools/_vendor/importlib_resources/tests/_path.py
 delete mode 100644 setuptools/_vendor/importlib_resources/tests/compat/__init__.py
 delete mode 100644 setuptools/_vendor/importlib_resources/tests/compat/py312.py
 delete mode 100644 setuptools/_vendor/importlib_resources/tests/compat/py39.py
 delete mode 100644 setuptools/_vendor/importlib_resources/tests/data01/__init__.py
 delete mode 100644 setuptools/_vendor/importlib_resources/tests/data01/binary.file
 delete mode 100644 setuptools/_vendor/importlib_resources/tests/data01/subdirectory/__init__.py
 delete mode 100644 setuptools/_vendor/importlib_resources/tests/data01/subdirectory/binary.file
 delete mode 100644 setuptools/_vendor/importlib_resources/tests/data01/utf-16.file
 delete mode 100644 setuptools/_vendor/importlib_resources/tests/data01/utf-8.file
 delete mode 100644 setuptools/_vendor/importlib_resources/tests/data02/__init__.py
 delete mode 100644 setuptools/_vendor/importlib_resources/tests/data02/one/__init__.py
 delete mode 100644 setuptools/_vendor/importlib_resources/tests/data02/one/resource1.txt
 delete mode 100644 setuptools/_vendor/importlib_resources/tests/data02/subdirectory/subsubdir/resource.txt
 delete mode 100644 setuptools/_vendor/importlib_resources/tests/data02/two/__init__.py
 delete mode 100644 setuptools/_vendor/importlib_resources/tests/data02/two/resource2.txt
 delete mode 100644 setuptools/_vendor/importlib_resources/tests/namespacedata01/binary.file
 delete mode 100644 setuptools/_vendor/importlib_resources/tests/namespacedata01/subdirectory/binary.file
 delete mode 100644 setuptools/_vendor/importlib_resources/tests/namespacedata01/utf-16.file
 delete mode 100644 setuptools/_vendor/importlib_resources/tests/namespacedata01/utf-8.file
 delete mode 100644 setuptools/_vendor/importlib_resources/tests/test_compatibilty_files.py
 delete mode 100644 setuptools/_vendor/importlib_resources/tests/test_contents.py
 delete mode 100644 setuptools/_vendor/importlib_resources/tests/test_custom.py
 delete mode 100644 setuptools/_vendor/importlib_resources/tests/test_files.py
 delete mode 100644 setuptools/_vendor/importlib_resources/tests/test_functional.py
 delete mode 100644 setuptools/_vendor/importlib_resources/tests/test_open.py
 delete mode 100644 setuptools/_vendor/importlib_resources/tests/test_path.py
 delete mode 100644 setuptools/_vendor/importlib_resources/tests/test_read.py
 delete mode 100644 setuptools/_vendor/importlib_resources/tests/test_reader.py
 delete mode 100644 setuptools/_vendor/importlib_resources/tests/test_resource.py
 delete mode 100644 setuptools/_vendor/importlib_resources/tests/util.py
 delete mode 100644 setuptools/_vendor/importlib_resources/tests/zip.py

diff --git a/setuptools/_vendor/importlib_resources-6.4.0.dist-info/INSTALLER b/setuptools/_vendor/importlib_resources-6.4.0.dist-info/INSTALLER
deleted file mode 100644
index a1b589e38a..0000000000
--- a/setuptools/_vendor/importlib_resources-6.4.0.dist-info/INSTALLER
+++ /dev/null
@@ -1 +0,0 @@
-pip
diff --git a/setuptools/_vendor/importlib_resources-6.4.0.dist-info/LICENSE b/setuptools/_vendor/importlib_resources-6.4.0.dist-info/LICENSE
deleted file mode 100644
index d645695673..0000000000
--- a/setuptools/_vendor/importlib_resources-6.4.0.dist-info/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
-   APPENDIX: How to apply the Apache License to your work.
-
-      To apply the Apache License to your work, attach the following
-      boilerplate notice, with the fields enclosed by brackets "[]"
-      replaced with your own identifying information. (Don't include
-      the brackets!)  The text should be enclosed in the appropriate
-      comment syntax for the file format. We also recommend that a
-      file or class name and description of purpose be included on the
-      same "printed page" as the copyright notice for easier
-      identification within third-party archives.
-
-   Copyright [yyyy] [name of copyright owner]
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-   You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
diff --git a/setuptools/_vendor/importlib_resources-6.4.0.dist-info/METADATA b/setuptools/_vendor/importlib_resources-6.4.0.dist-info/METADATA
deleted file mode 100644
index b088e721d2..0000000000
--- a/setuptools/_vendor/importlib_resources-6.4.0.dist-info/METADATA
+++ /dev/null
@@ -1,100 +0,0 @@
-Metadata-Version: 2.1
-Name: importlib_resources
-Version: 6.4.0
-Summary: Read resources from Python packages
-Home-page: https://github.com/python/importlib_resources
-Author: Barry Warsaw
-Author-email: barry@python.org
-Project-URL: Documentation, https://importlib-resources.readthedocs.io/
-Classifier: Development Status :: 5 - Production/Stable
-Classifier: Intended Audience :: Developers
-Classifier: License :: OSI Approved :: Apache Software License
-Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3 :: Only
-Requires-Python: >=3.8
-License-File: LICENSE
-Requires-Dist: zipp >=3.1.0 ; python_version < "3.10"
-Provides-Extra: docs
-Requires-Dist: sphinx >=3.5 ; extra == 'docs'
-Requires-Dist: sphinx <7.2.5 ; extra == 'docs'
-Requires-Dist: jaraco.packaging >=9.3 ; extra == 'docs'
-Requires-Dist: rst.linker >=1.9 ; extra == 'docs'
-Requires-Dist: furo ; extra == 'docs'
-Requires-Dist: sphinx-lint ; extra == 'docs'
-Requires-Dist: jaraco.tidelift >=1.4 ; extra == 'docs'
-Provides-Extra: testing
-Requires-Dist: pytest >=6 ; extra == 'testing'
-Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'testing'
-Requires-Dist: pytest-cov ; extra == 'testing'
-Requires-Dist: pytest-enabler >=2.2 ; extra == 'testing'
-Requires-Dist: pytest-ruff >=0.2.1 ; extra == 'testing'
-Requires-Dist: zipp >=3.17 ; extra == 'testing'
-Requires-Dist: jaraco.test >=5.4 ; extra == 'testing'
-Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing'
-
-.. image:: https://img.shields.io/pypi/v/importlib_resources.svg
-   :target: https://pypi.org/project/importlib_resources
-
-.. image:: https://img.shields.io/pypi/pyversions/importlib_resources.svg
-
-.. image:: https://github.com/python/importlib_resources/actions/workflows/main.yml/badge.svg
-   :target: https://github.com/python/importlib_resources/actions?query=workflow%3A%22tests%22
-   :alt: tests
-
-.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
-    :target: https://github.com/astral-sh/ruff
-    :alt: Ruff
-
-.. image:: https://readthedocs.org/projects/importlib-resources/badge/?version=latest
-   :target: https://importlib-resources.readthedocs.io/en/latest/?badge=latest
-
-.. image:: https://img.shields.io/badge/skeleton-2024-informational
-   :target: https://blog.jaraco.com/skeleton
-
-.. image:: https://tidelift.com/badges/package/pypi/importlib-resources
-   :target: https://tidelift.com/subscription/pkg/pypi-importlib-resources?utm_source=pypi-importlib-resources&utm_medium=readme
-
-``importlib_resources`` is a backport of Python standard library
-`importlib.resources
-`_
-module for older Pythons.
-
-The key goal of this module is to replace parts of `pkg_resources
-`_ with a
-solution in Python's stdlib that relies on well-defined APIs.  This makes
-reading resources included in packages easier, with more stable and consistent
-semantics.
-
-Compatibility
-=============
-
-New features are introduced in this third-party library and later merged
-into CPython. The following table indicates which versions of this library
-were contributed to different versions in the standard library:
-
-.. list-table::
-   :header-rows: 1
-
-   * - importlib_resources
-     - stdlib
-   * - 6.0
-     - 3.13
-   * - 5.12
-     - 3.12
-   * - 5.7
-     - 3.11
-   * - 5.0
-     - 3.10
-   * - 1.3
-     - 3.9
-   * - 0.5 (?)
-     - 3.7
-
-For Enterprise
-==============
-
-Available as part of the Tidelift Subscription.
-
-This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use.
-
-`Learn more `_.
diff --git a/setuptools/_vendor/importlib_resources-6.4.0.dist-info/RECORD b/setuptools/_vendor/importlib_resources-6.4.0.dist-info/RECORD
deleted file mode 100644
index 18888dea71..0000000000
--- a/setuptools/_vendor/importlib_resources-6.4.0.dist-info/RECORD
+++ /dev/null
@@ -1,89 +0,0 @@
-importlib_resources-6.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-importlib_resources-6.4.0.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
-importlib_resources-6.4.0.dist-info/METADATA,sha256=g4eM2LuL0OiZcUVND0qwDJUpE29gOvtO3BSPXTbO9Fk,3944
-importlib_resources-6.4.0.dist-info/RECORD,,
-importlib_resources-6.4.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources-6.4.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
-importlib_resources-6.4.0.dist-info/top_level.txt,sha256=fHIjHU1GZwAjvcydpmUnUrTnbvdiWjG4OEVZK8by0TQ,20
-importlib_resources/__init__.py,sha256=uyp1kzYR6SawQBsqlyaXXfIxJx4Z2mM8MjmZn8qq2Gk,505
-importlib_resources/__pycache__/__init__.cpython-312.pyc,,
-importlib_resources/__pycache__/_adapters.cpython-312.pyc,,
-importlib_resources/__pycache__/_common.cpython-312.pyc,,
-importlib_resources/__pycache__/_itertools.cpython-312.pyc,,
-importlib_resources/__pycache__/abc.cpython-312.pyc,,
-importlib_resources/__pycache__/functional.cpython-312.pyc,,
-importlib_resources/__pycache__/readers.cpython-312.pyc,,
-importlib_resources/__pycache__/simple.cpython-312.pyc,,
-importlib_resources/_adapters.py,sha256=vprJGbUeHbajX6XCuMP6J3lMrqCi-P_MTlziJUR7jfk,4482
-importlib_resources/_common.py,sha256=blt4-ZtHnbUPzQQyPP7jLGgl_86btIW5ZhIsEhclhoA,5571
-importlib_resources/_itertools.py,sha256=eDisV6RqiNZOogLSXf6LOGHOYc79FGgPrKNLzFLmCrU,1277
-importlib_resources/abc.py,sha256=UKNU9ncEDkZRB3txcGb3WLxsL2iju9JbaLTI-dfLE_4,5162
-importlib_resources/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/compat/__pycache__/__init__.cpython-312.pyc,,
-importlib_resources/compat/__pycache__/py38.cpython-312.pyc,,
-importlib_resources/compat/__pycache__/py39.cpython-312.pyc,,
-importlib_resources/compat/py38.py,sha256=MWhut3XsAJwBYUaa5Qb2AoCrZNqcQjVThP-P1uBoE_4,230
-importlib_resources/compat/py39.py,sha256=Wfln4uQUShNz1XdCG-toG6_Y0WrlUmO9JzpvtcfQ-Cw,184
-importlib_resources/functional.py,sha256=mLU4DwSlh8_2IXWqwKOfPVxyRqAEpB3B4XTfRxr3X3M,2651
-importlib_resources/future/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/future/__pycache__/__init__.cpython-312.pyc,,
-importlib_resources/future/__pycache__/adapters.cpython-312.pyc,,
-importlib_resources/future/adapters.py,sha256=1-MF2VRcCButhcC1OMfZILU9o3kwZ4nXB2lurXpaIAw,2940
-importlib_resources/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/readers.py,sha256=WNKurBHHVu9EVtUhWkOj2fxH50HP7uanNFuupAqH2S8,5863
-importlib_resources/simple.py,sha256=CQ3TiIMFiJs_80o-7xJL1EpbUUVna4-NGDrSTQ3HW2Y,2584
-importlib_resources/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/__pycache__/__init__.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/_path.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/test_compatibilty_files.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/test_contents.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/test_custom.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/test_files.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/test_functional.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/test_open.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/test_path.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/test_read.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/test_reader.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/test_resource.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/util.cpython-312.pyc,,
-importlib_resources/tests/__pycache__/zip.cpython-312.pyc,,
-importlib_resources/tests/_path.py,sha256=nkv3ek7D1U898v921rYbldDCtKri2oyYOi3EJqGjEGU,1289
-importlib_resources/tests/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/compat/__pycache__/__init__.cpython-312.pyc,,
-importlib_resources/tests/compat/__pycache__/py312.cpython-312.pyc,,
-importlib_resources/tests/compat/__pycache__/py39.cpython-312.pyc,,
-importlib_resources/tests/compat/py312.py,sha256=qcWjpZhQo2oEsdwIlRRQHrsMGDltkFTnETeG7fLdUS8,364
-importlib_resources/tests/compat/py39.py,sha256=lRTk0RWAOEb9RzAgvdRnqJUGCBLc3qoFQwzuJSa_zP4,329
-importlib_resources/tests/data01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/data01/__pycache__/__init__.cpython-312.pyc,,
-importlib_resources/tests/data01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4
-importlib_resources/tests/data01/subdirectory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/data01/subdirectory/__pycache__/__init__.cpython-312.pyc,,
-importlib_resources/tests/data01/subdirectory/binary.file,sha256=xtRM9Bj2EOP-nh2SlP9D3vgcbNytbLsYIM_0jTqkNV0,4
-importlib_resources/tests/data01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44
-importlib_resources/tests/data01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20
-importlib_resources/tests/data02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/data02/__pycache__/__init__.cpython-312.pyc,,
-importlib_resources/tests/data02/one/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/data02/one/__pycache__/__init__.cpython-312.pyc,,
-importlib_resources/tests/data02/one/resource1.txt,sha256=10flKac7c-XXFzJ3t-AB5MJjlBy__dSZvPE_dOm2q6U,13
-importlib_resources/tests/data02/subdirectory/subsubdir/resource.txt,sha256=jnrBBztxYrtQck7cmVnc4xQVO4-agzAZDGSFkAWtlFw,10
-importlib_resources/tests/data02/two/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-importlib_resources/tests/data02/two/__pycache__/__init__.cpython-312.pyc,,
-importlib_resources/tests/data02/two/resource2.txt,sha256=lt2jbN3TMn9QiFKM832X39bU_62UptDdUkoYzkvEbl0,13
-importlib_resources/tests/namespacedata01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4
-importlib_resources/tests/namespacedata01/subdirectory/binary.file,sha256=cbkhEL8TXIVYHIoSj2oZwPasp1KwxskeNXGJnPCbFF0,4
-importlib_resources/tests/namespacedata01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44
-importlib_resources/tests/namespacedata01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20
-importlib_resources/tests/test_compatibilty_files.py,sha256=95N_R7aik8cvnE6sBJpsxmP0K5plOWRIJDgbalD-Hpw,3314
-importlib_resources/tests/test_contents.py,sha256=70HW3mL_hv05Emv-OgdmwoLhXxjtuVxiWVaUpgRaRWA,930
-importlib_resources/tests/test_custom.py,sha256=QrHZqIWl0e-fsQRfm0ych8stOlKJOsAIU3rK6QOcyN0,1221
-importlib_resources/tests/test_files.py,sha256=OcShYu33kCcyXlDyZSVPkJNE08h-N_4bQOLV2QaSqX0,3472
-importlib_resources/tests/test_functional.py,sha256=ByCVViAwb2PIlKvDNJEqTZ0aLZGpFl5qa7CMCX-7HKM,8591
-importlib_resources/tests/test_open.py,sha256=ccmzbOeEa6zTd4ymZZ8yISrecfuYV0jhon-Vddqysu4,2778
-importlib_resources/tests/test_path.py,sha256=x8r2gJxG3hFM9xCOFNkgmHYXxsMldMLTSW_AZYf1l-A,2009
-importlib_resources/tests/test_read.py,sha256=7tsILQ2NoqVGFQxhHqxBwc5hWcN8b_3idojCsszTNfQ,3112
-importlib_resources/tests/test_reader.py,sha256=IcIUXaiPAtuahGV4_ZT4YXFLMMsJmcM1iOxqdIH2Aa4,5001
-importlib_resources/tests/test_resource.py,sha256=fcF8WgZ6rDCTRFnxtAUbdiaNe4G23yGovT1nb2dc7ls,7823
-importlib_resources/tests/util.py,sha256=vjVzEyX0X2RkTN-wGiQiplayp9sZom4JDjJinTNewos,4745
-importlib_resources/tests/zip.py,sha256=2MKmF8-osXBJSnqcUTuAUek_-tSB3iKmIT9qPhcsOsM,783
diff --git a/setuptools/_vendor/importlib_resources-6.4.0.dist-info/REQUESTED b/setuptools/_vendor/importlib_resources-6.4.0.dist-info/REQUESTED
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/setuptools/_vendor/importlib_resources-6.4.0.dist-info/WHEEL b/setuptools/_vendor/importlib_resources-6.4.0.dist-info/WHEEL
deleted file mode 100644
index bab98d6758..0000000000
--- a/setuptools/_vendor/importlib_resources-6.4.0.dist-info/WHEEL
+++ /dev/null
@@ -1,5 +0,0 @@
-Wheel-Version: 1.0
-Generator: bdist_wheel (0.43.0)
-Root-Is-Purelib: true
-Tag: py3-none-any
-
diff --git a/setuptools/_vendor/importlib_resources-6.4.0.dist-info/top_level.txt b/setuptools/_vendor/importlib_resources-6.4.0.dist-info/top_level.txt
deleted file mode 100644
index 58ad1bd333..0000000000
--- a/setuptools/_vendor/importlib_resources-6.4.0.dist-info/top_level.txt
+++ /dev/null
@@ -1 +0,0 @@
-importlib_resources
diff --git a/setuptools/_vendor/importlib_resources/__init__.py b/setuptools/_vendor/importlib_resources/__init__.py
deleted file mode 100644
index 0d029abd63..0000000000
--- a/setuptools/_vendor/importlib_resources/__init__.py
+++ /dev/null
@@ -1,36 +0,0 @@
-"""Read resources contained within a package."""
-
-from ._common import (
-    as_file,
-    files,
-    Package,
-    Anchor,
-)
-
-from .functional import (
-    contents,
-    is_resource,
-    open_binary,
-    open_text,
-    path,
-    read_binary,
-    read_text,
-)
-
-from .abc import ResourceReader
-
-
-__all__ = [
-    'Package',
-    'Anchor',
-    'ResourceReader',
-    'as_file',
-    'files',
-    'contents',
-    'is_resource',
-    'open_binary',
-    'open_text',
-    'path',
-    'read_binary',
-    'read_text',
-]
diff --git a/setuptools/_vendor/importlib_resources/_adapters.py b/setuptools/_vendor/importlib_resources/_adapters.py
deleted file mode 100644
index 50688fbb66..0000000000
--- a/setuptools/_vendor/importlib_resources/_adapters.py
+++ /dev/null
@@ -1,168 +0,0 @@
-from contextlib import suppress
-from io import TextIOWrapper
-
-from . import abc
-
-
-class SpecLoaderAdapter:
-    """
-    Adapt a package spec to adapt the underlying loader.
-    """
-
-    def __init__(self, spec, adapter=lambda spec: spec.loader):
-        self.spec = spec
-        self.loader = adapter(spec)
-
-    def __getattr__(self, name):
-        return getattr(self.spec, name)
-
-
-class TraversableResourcesLoader:
-    """
-    Adapt a loader to provide TraversableResources.
-    """
-
-    def __init__(self, spec):
-        self.spec = spec
-
-    def get_resource_reader(self, name):
-        return CompatibilityFiles(self.spec)._native()
-
-
-def _io_wrapper(file, mode='r', *args, **kwargs):
-    if mode == 'r':
-        return TextIOWrapper(file, *args, **kwargs)
-    elif mode == 'rb':
-        return file
-    raise ValueError(f"Invalid mode value '{mode}', only 'r' and 'rb' are supported")
-
-
-class CompatibilityFiles:
-    """
-    Adapter for an existing or non-existent resource reader
-    to provide a compatibility .files().
-    """
-
-    class SpecPath(abc.Traversable):
-        """
-        Path tied to a module spec.
-        Can be read and exposes the resource reader children.
-        """
-
-        def __init__(self, spec, reader):
-            self._spec = spec
-            self._reader = reader
-
-        def iterdir(self):
-            if not self._reader:
-                return iter(())
-            return iter(
-                CompatibilityFiles.ChildPath(self._reader, path)
-                for path in self._reader.contents()
-            )
-
-        def is_file(self):
-            return False
-
-        is_dir = is_file
-
-        def joinpath(self, other):
-            if not self._reader:
-                return CompatibilityFiles.OrphanPath(other)
-            return CompatibilityFiles.ChildPath(self._reader, other)
-
-        @property
-        def name(self):
-            return self._spec.name
-
-        def open(self, mode='r', *args, **kwargs):
-            return _io_wrapper(self._reader.open_resource(None), mode, *args, **kwargs)
-
-    class ChildPath(abc.Traversable):
-        """
-        Path tied to a resource reader child.
-        Can be read but doesn't expose any meaningful children.
-        """
-
-        def __init__(self, reader, name):
-            self._reader = reader
-            self._name = name
-
-        def iterdir(self):
-            return iter(())
-
-        def is_file(self):
-            return self._reader.is_resource(self.name)
-
-        def is_dir(self):
-            return not self.is_file()
-
-        def joinpath(self, other):
-            return CompatibilityFiles.OrphanPath(self.name, other)
-
-        @property
-        def name(self):
-            return self._name
-
-        def open(self, mode='r', *args, **kwargs):
-            return _io_wrapper(
-                self._reader.open_resource(self.name), mode, *args, **kwargs
-            )
-
-    class OrphanPath(abc.Traversable):
-        """
-        Orphan path, not tied to a module spec or resource reader.
-        Can't be read and doesn't expose any meaningful children.
-        """
-
-        def __init__(self, *path_parts):
-            if len(path_parts) < 1:
-                raise ValueError('Need at least one path part to construct a path')
-            self._path = path_parts
-
-        def iterdir(self):
-            return iter(())
-
-        def is_file(self):
-            return False
-
-        is_dir = is_file
-
-        def joinpath(self, other):
-            return CompatibilityFiles.OrphanPath(*self._path, other)
-
-        @property
-        def name(self):
-            return self._path[-1]
-
-        def open(self, mode='r', *args, **kwargs):
-            raise FileNotFoundError("Can't open orphan path")
-
-    def __init__(self, spec):
-        self.spec = spec
-
-    @property
-    def _reader(self):
-        with suppress(AttributeError):
-            return self.spec.loader.get_resource_reader(self.spec.name)
-
-    def _native(self):
-        """
-        Return the native reader if it supports files().
-        """
-        reader = self._reader
-        return reader if hasattr(reader, 'files') else self
-
-    def __getattr__(self, attr):
-        return getattr(self._reader, attr)
-
-    def files(self):
-        return CompatibilityFiles.SpecPath(self.spec, self._reader)
-
-
-def wrap_spec(package):
-    """
-    Construct a package spec with traversable compatibility
-    on the spec/loader/reader.
-    """
-    return SpecLoaderAdapter(package.__spec__, TraversableResourcesLoader)
diff --git a/setuptools/_vendor/importlib_resources/_common.py b/setuptools/_vendor/importlib_resources/_common.py
deleted file mode 100644
index 8df6b39e41..0000000000
--- a/setuptools/_vendor/importlib_resources/_common.py
+++ /dev/null
@@ -1,210 +0,0 @@
-import os
-import pathlib
-import tempfile
-import functools
-import contextlib
-import types
-import importlib
-import inspect
-import warnings
-import itertools
-
-from typing import Union, Optional, cast
-from .abc import ResourceReader, Traversable
-
-Package = Union[types.ModuleType, str]
-Anchor = Package
-
-
-def package_to_anchor(func):
-    """
-    Replace 'package' parameter as 'anchor' and warn about the change.
-
-    Other errors should fall through.
-
-    >>> files('a', 'b')
-    Traceback (most recent call last):
-    TypeError: files() takes from 0 to 1 positional arguments but 2 were given
-
-    Remove this compatibility in Python 3.14.
-    """
-    undefined = object()
-
-    @functools.wraps(func)
-    def wrapper(anchor=undefined, package=undefined):
-        if package is not undefined:
-            if anchor is not undefined:
-                return func(anchor, package)
-            warnings.warn(
-                "First parameter to files is renamed to 'anchor'",
-                DeprecationWarning,
-                stacklevel=2,
-            )
-            return func(package)
-        elif anchor is undefined:
-            return func()
-        return func(anchor)
-
-    return wrapper
-
-
-@package_to_anchor
-def files(anchor: Optional[Anchor] = None) -> Traversable:
-    """
-    Get a Traversable resource for an anchor.
-    """
-    return from_package(resolve(anchor))
-
-
-def get_resource_reader(package: types.ModuleType) -> Optional[ResourceReader]:
-    """
-    Return the package's loader if it's a ResourceReader.
-    """
-    # We can't use
-    # a issubclass() check here because apparently abc.'s __subclasscheck__()
-    # hook wants to create a weak reference to the object, but
-    # zipimport.zipimporter does not support weak references, resulting in a
-    # TypeError.  That seems terrible.
-    spec = package.__spec__
-    reader = getattr(spec.loader, 'get_resource_reader', None)  # type: ignore
-    if reader is None:
-        return None
-    return reader(spec.name)  # type: ignore
-
-
-@functools.singledispatch
-def resolve(cand: Optional[Anchor]) -> types.ModuleType:
-    return cast(types.ModuleType, cand)
-
-
-@resolve.register
-def _(cand: str) -> types.ModuleType:
-    return importlib.import_module(cand)
-
-
-@resolve.register
-def _(cand: None) -> types.ModuleType:
-    return resolve(_infer_caller().f_globals['__name__'])
-
-
-def _infer_caller():
-    """
-    Walk the stack and find the frame of the first caller not in this module.
-    """
-
-    def is_this_file(frame_info):
-        return frame_info.filename == __file__
-
-    def is_wrapper(frame_info):
-        return frame_info.function == 'wrapper'
-
-    not_this_file = itertools.filterfalse(is_this_file, inspect.stack())
-    # also exclude 'wrapper' due to singledispatch in the call stack
-    callers = itertools.filterfalse(is_wrapper, not_this_file)
-    return next(callers).frame
-
-
-def from_package(package: types.ModuleType):
-    """
-    Return a Traversable object for the given package.
-
-    """
-    # deferred for performance (python/cpython#109829)
-    from .future.adapters import wrap_spec
-
-    spec = wrap_spec(package)
-    reader = spec.loader.get_resource_reader(spec.name)
-    return reader.files()
-
-
-@contextlib.contextmanager
-def _tempfile(
-    reader,
-    suffix='',
-    # gh-93353: Keep a reference to call os.remove() in late Python
-    # finalization.
-    *,
-    _os_remove=os.remove,
-):
-    # Not using tempfile.NamedTemporaryFile as it leads to deeper 'try'
-    # blocks due to the need to close the temporary file to work on Windows
-    # properly.
-    fd, raw_path = tempfile.mkstemp(suffix=suffix)
-    try:
-        try:
-            os.write(fd, reader())
-        finally:
-            os.close(fd)
-        del reader
-        yield pathlib.Path(raw_path)
-    finally:
-        try:
-            _os_remove(raw_path)
-        except FileNotFoundError:
-            pass
-
-
-def _temp_file(path):
-    return _tempfile(path.read_bytes, suffix=path.name)
-
-
-def _is_present_dir(path: Traversable) -> bool:
-    """
-    Some Traversables implement ``is_dir()`` to raise an
-    exception (i.e. ``FileNotFoundError``) when the
-    directory doesn't exist. This function wraps that call
-    to always return a boolean and only return True
-    if there's a dir and it exists.
-    """
-    with contextlib.suppress(FileNotFoundError):
-        return path.is_dir()
-    return False
-
-
-@functools.singledispatch
-def as_file(path):
-    """
-    Given a Traversable object, return that object as a
-    path on the local file system in a context manager.
-    """
-    return _temp_dir(path) if _is_present_dir(path) else _temp_file(path)
-
-
-@as_file.register(pathlib.Path)
-@contextlib.contextmanager
-def _(path):
-    """
-    Degenerate behavior for pathlib.Path objects.
-    """
-    yield path
-
-
-@contextlib.contextmanager
-def _temp_path(dir: tempfile.TemporaryDirectory):
-    """
-    Wrap tempfile.TemporyDirectory to return a pathlib object.
-    """
-    with dir as result:
-        yield pathlib.Path(result)
-
-
-@contextlib.contextmanager
-def _temp_dir(path):
-    """
-    Given a traversable dir, recursively replicate the whole tree
-    to the file system in a context manager.
-    """
-    assert path.is_dir()
-    with _temp_path(tempfile.TemporaryDirectory()) as temp_dir:
-        yield _write_contents(temp_dir, path)
-
-
-def _write_contents(target, source):
-    child = target.joinpath(source.name)
-    if source.is_dir():
-        child.mkdir()
-        for item in source.iterdir():
-            _write_contents(child, item)
-    else:
-        child.write_bytes(source.read_bytes())
-    return child
diff --git a/setuptools/_vendor/importlib_resources/_itertools.py b/setuptools/_vendor/importlib_resources/_itertools.py
deleted file mode 100644
index 7b775ef5ae..0000000000
--- a/setuptools/_vendor/importlib_resources/_itertools.py
+++ /dev/null
@@ -1,38 +0,0 @@
-# from more_itertools 9.0
-def only(iterable, default=None, too_long=None):
-    """If *iterable* has only one item, return it.
-    If it has zero items, return *default*.
-    If it has more than one item, raise the exception given by *too_long*,
-    which is ``ValueError`` by default.
-    >>> only([], default='missing')
-    'missing'
-    >>> only([1])
-    1
-    >>> only([1, 2])  # doctest: +IGNORE_EXCEPTION_DETAIL
-    Traceback (most recent call last):
-    ...
-    ValueError: Expected exactly one item in iterable, but got 1, 2,
-     and perhaps more.'
-    >>> only([1, 2], too_long=TypeError)  # doctest: +IGNORE_EXCEPTION_DETAIL
-    Traceback (most recent call last):
-    ...
-    TypeError
-    Note that :func:`only` attempts to advance *iterable* twice to ensure there
-    is only one item.  See :func:`spy` or :func:`peekable` to check
-    iterable contents less destructively.
-    """
-    it = iter(iterable)
-    first_value = next(it, default)
-
-    try:
-        second_value = next(it)
-    except StopIteration:
-        pass
-    else:
-        msg = (
-            'Expected exactly one item in iterable, but got {!r}, {!r}, '
-            'and perhaps more.'.format(first_value, second_value)
-        )
-        raise too_long or ValueError(msg)
-
-    return first_value
diff --git a/setuptools/_vendor/importlib_resources/abc.py b/setuptools/_vendor/importlib_resources/abc.py
deleted file mode 100644
index 7a58dd2f96..0000000000
--- a/setuptools/_vendor/importlib_resources/abc.py
+++ /dev/null
@@ -1,171 +0,0 @@
-import abc
-import io
-import itertools
-import pathlib
-from typing import Any, BinaryIO, Iterable, Iterator, NoReturn, Text, Optional
-from typing import runtime_checkable, Protocol
-
-from .compat.py38 import StrPath
-
-
-__all__ = ["ResourceReader", "Traversable", "TraversableResources"]
-
-
-class ResourceReader(metaclass=abc.ABCMeta):
-    """Abstract base class for loaders to provide resource reading support."""
-
-    @abc.abstractmethod
-    def open_resource(self, resource: Text) -> BinaryIO:
-        """Return an opened, file-like object for binary reading.
-
-        The 'resource' argument is expected to represent only a file name.
-        If the resource cannot be found, FileNotFoundError is raised.
-        """
-        # This deliberately raises FileNotFoundError instead of
-        # NotImplementedError so that if this method is accidentally called,
-        # it'll still do the right thing.
-        raise FileNotFoundError
-
-    @abc.abstractmethod
-    def resource_path(self, resource: Text) -> Text:
-        """Return the file system path to the specified resource.
-
-        The 'resource' argument is expected to represent only a file name.
-        If the resource does not exist on the file system, raise
-        FileNotFoundError.
-        """
-        # This deliberately raises FileNotFoundError instead of
-        # NotImplementedError so that if this method is accidentally called,
-        # it'll still do the right thing.
-        raise FileNotFoundError
-
-    @abc.abstractmethod
-    def is_resource(self, path: Text) -> bool:
-        """Return True if the named 'path' is a resource.
-
-        Files are resources, directories are not.
-        """
-        raise FileNotFoundError
-
-    @abc.abstractmethod
-    def contents(self) -> Iterable[str]:
-        """Return an iterable of entries in `package`."""
-        raise FileNotFoundError
-
-
-class TraversalError(Exception):
-    pass
-
-
-@runtime_checkable
-class Traversable(Protocol):
-    """
-    An object with a subset of pathlib.Path methods suitable for
-    traversing directories and opening files.
-
-    Any exceptions that occur when accessing the backing resource
-    may propagate unaltered.
-    """
-
-    @abc.abstractmethod
-    def iterdir(self) -> Iterator["Traversable"]:
-        """
-        Yield Traversable objects in self
-        """
-
-    def read_bytes(self) -> bytes:
-        """
-        Read contents of self as bytes
-        """
-        with self.open('rb') as strm:
-            return strm.read()
-
-    def read_text(self, encoding: Optional[str] = None) -> str:
-        """
-        Read contents of self as text
-        """
-        with self.open(encoding=encoding) as strm:
-            return strm.read()
-
-    @abc.abstractmethod
-    def is_dir(self) -> bool:
-        """
-        Return True if self is a directory
-        """
-
-    @abc.abstractmethod
-    def is_file(self) -> bool:
-        """
-        Return True if self is a file
-        """
-
-    def joinpath(self, *descendants: StrPath) -> "Traversable":
-        """
-        Return Traversable resolved with any descendants applied.
-
-        Each descendant should be a path segment relative to self
-        and each may contain multiple levels separated by
-        ``posixpath.sep`` (``/``).
-        """
-        if not descendants:
-            return self
-        names = itertools.chain.from_iterable(
-            path.parts for path in map(pathlib.PurePosixPath, descendants)
-        )
-        target = next(names)
-        matches = (
-            traversable for traversable in self.iterdir() if traversable.name == target
-        )
-        try:
-            match = next(matches)
-        except StopIteration:
-            raise TraversalError(
-                "Target not found during traversal.", target, list(names)
-            )
-        return match.joinpath(*names)
-
-    def __truediv__(self, child: StrPath) -> "Traversable":
-        """
-        Return Traversable child in self
-        """
-        return self.joinpath(child)
-
-    @abc.abstractmethod
-    def open(self, mode='r', *args, **kwargs):
-        """
-        mode may be 'r' or 'rb' to open as text or binary. Return a handle
-        suitable for reading (same as pathlib.Path.open).
-
-        When opening as text, accepts encoding parameters such as those
-        accepted by io.TextIOWrapper.
-        """
-
-    @property
-    @abc.abstractmethod
-    def name(self) -> str:
-        """
-        The base name of this object without any parent references.
-        """
-
-
-class TraversableResources(ResourceReader):
-    """
-    The required interface for providing traversable
-    resources.
-    """
-
-    @abc.abstractmethod
-    def files(self) -> "Traversable":
-        """Return a Traversable object for the loaded package."""
-
-    def open_resource(self, resource: StrPath) -> io.BufferedReader:
-        return self.files().joinpath(resource).open('rb')
-
-    def resource_path(self, resource: Any) -> NoReturn:
-        raise FileNotFoundError(resource)
-
-    def is_resource(self, path: StrPath) -> bool:
-        return self.files().joinpath(path).is_file()
-
-    def contents(self) -> Iterator[str]:
-        return (item.name for item in self.files().iterdir())
diff --git a/setuptools/_vendor/importlib_resources/compat/__init__.py b/setuptools/_vendor/importlib_resources/compat/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/setuptools/_vendor/importlib_resources/compat/py38.py b/setuptools/_vendor/importlib_resources/compat/py38.py
deleted file mode 100644
index 4d548257f8..0000000000
--- a/setuptools/_vendor/importlib_resources/compat/py38.py
+++ /dev/null
@@ -1,11 +0,0 @@
-import os
-import sys
-
-from typing import Union
-
-
-if sys.version_info >= (3, 9):
-    StrPath = Union[str, os.PathLike[str]]
-else:
-    # PathLike is only subscriptable at runtime in 3.9+
-    StrPath = Union[str, "os.PathLike[str]"]
diff --git a/setuptools/_vendor/importlib_resources/compat/py39.py b/setuptools/_vendor/importlib_resources/compat/py39.py
deleted file mode 100644
index ab87b9dc14..0000000000
--- a/setuptools/_vendor/importlib_resources/compat/py39.py
+++ /dev/null
@@ -1,10 +0,0 @@
-import sys
-
-
-__all__ = ['ZipPath']
-
-
-if sys.version_info >= (3, 10):
-    from zipfile import Path as ZipPath  # type: ignore
-else:
-    from zipp import Path as ZipPath  # type: ignore
diff --git a/setuptools/_vendor/importlib_resources/functional.py b/setuptools/_vendor/importlib_resources/functional.py
deleted file mode 100644
index f59416f2dd..0000000000
--- a/setuptools/_vendor/importlib_resources/functional.py
+++ /dev/null
@@ -1,81 +0,0 @@
-"""Simplified function-based API for importlib.resources"""
-
-import warnings
-
-from ._common import files, as_file
-
-
-_MISSING = object()
-
-
-def open_binary(anchor, *path_names):
-    """Open for binary reading the *resource* within *package*."""
-    return _get_resource(anchor, path_names).open('rb')
-
-
-def open_text(anchor, *path_names, encoding=_MISSING, errors='strict'):
-    """Open for text reading the *resource* within *package*."""
-    encoding = _get_encoding_arg(path_names, encoding)
-    resource = _get_resource(anchor, path_names)
-    return resource.open('r', encoding=encoding, errors=errors)
-
-
-def read_binary(anchor, *path_names):
-    """Read and return contents of *resource* within *package* as bytes."""
-    return _get_resource(anchor, path_names).read_bytes()
-
-
-def read_text(anchor, *path_names, encoding=_MISSING, errors='strict'):
-    """Read and return contents of *resource* within *package* as str."""
-    encoding = _get_encoding_arg(path_names, encoding)
-    resource = _get_resource(anchor, path_names)
-    return resource.read_text(encoding=encoding, errors=errors)
-
-
-def path(anchor, *path_names):
-    """Return the path to the *resource* as an actual file system path."""
-    return as_file(_get_resource(anchor, path_names))
-
-
-def is_resource(anchor, *path_names):
-    """Return ``True`` if there is a resource named *name* in the package,
-
-    Otherwise returns ``False``.
-    """
-    return _get_resource(anchor, path_names).is_file()
-
-
-def contents(anchor, *path_names):
-    """Return an iterable over the named resources within the package.
-
-    The iterable returns :class:`str` resources (e.g. files).
-    The iterable does not recurse into subdirectories.
-    """
-    warnings.warn(
-        "importlib.resources.contents is deprecated. "
-        "Use files(anchor).iterdir() instead.",
-        DeprecationWarning,
-        stacklevel=1,
-    )
-    return (resource.name for resource in _get_resource(anchor, path_names).iterdir())
-
-
-def _get_encoding_arg(path_names, encoding):
-    # For compatibility with versions where *encoding* was a positional
-    # argument, it needs to be given explicitly when there are multiple
-    # *path_names*.
-    # This limitation can be removed in Python 3.15.
-    if encoding is _MISSING:
-        if len(path_names) > 1:
-            raise TypeError(
-                "'encoding' argument required with multiple path names",
-            )
-        else:
-            return 'utf-8'
-    return encoding
-
-
-def _get_resource(anchor, path_names):
-    if anchor is None:
-        raise TypeError("anchor must be module or string, got None")
-    return files(anchor).joinpath(*path_names)
diff --git a/setuptools/_vendor/importlib_resources/future/__init__.py b/setuptools/_vendor/importlib_resources/future/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/setuptools/_vendor/importlib_resources/future/adapters.py b/setuptools/_vendor/importlib_resources/future/adapters.py
deleted file mode 100644
index 0e9764bae8..0000000000
--- a/setuptools/_vendor/importlib_resources/future/adapters.py
+++ /dev/null
@@ -1,95 +0,0 @@
-import functools
-import pathlib
-from contextlib import suppress
-from types import SimpleNamespace
-
-from .. import readers, _adapters
-
-
-def _block_standard(reader_getter):
-    """
-    Wrap _adapters.TraversableResourcesLoader.get_resource_reader
-    and intercept any standard library readers.
-    """
-
-    @functools.wraps(reader_getter)
-    def wrapper(*args, **kwargs):
-        """
-        If the reader is from the standard library, return None to allow
-        allow likely newer implementations in this library to take precedence.
-        """
-        try:
-            reader = reader_getter(*args, **kwargs)
-        except NotADirectoryError:
-            # MultiplexedPath may fail on zip subdirectory
-            return
-        # Python 3.10+
-        mod_name = reader.__class__.__module__
-        if mod_name.startswith('importlib.') and mod_name.endswith('readers'):
-            return
-        # Python 3.8, 3.9
-        if isinstance(reader, _adapters.CompatibilityFiles) and (
-            reader.spec.loader.__class__.__module__.startswith('zipimport')
-            or reader.spec.loader.__class__.__module__.startswith(
-                '_frozen_importlib_external'
-            )
-        ):
-            return
-        return reader
-
-    return wrapper
-
-
-def _skip_degenerate(reader):
-    """
-    Mask any degenerate reader. Ref #298.
-    """
-    is_degenerate = (
-        isinstance(reader, _adapters.CompatibilityFiles) and not reader._reader
-    )
-    return reader if not is_degenerate else None
-
-
-class TraversableResourcesLoader(_adapters.TraversableResourcesLoader):
-    """
-    Adapt loaders to provide TraversableResources and other
-    compatibility.
-
-    Ensures the readers from importlib_resources are preferred
-    over stdlib readers.
-    """
-
-    def get_resource_reader(self, name):
-        return (
-            _skip_degenerate(_block_standard(super().get_resource_reader)(name))
-            or self._standard_reader()
-            or super().get_resource_reader(name)
-        )
-
-    def _standard_reader(self):
-        return self._zip_reader() or self._namespace_reader() or self._file_reader()
-
-    def _zip_reader(self):
-        with suppress(AttributeError):
-            return readers.ZipReader(self.spec.loader, self.spec.name)
-
-    def _namespace_reader(self):
-        with suppress(AttributeError, ValueError):
-            return readers.NamespaceReader(self.spec.submodule_search_locations)
-
-    def _file_reader(self):
-        try:
-            path = pathlib.Path(self.spec.origin)
-        except TypeError:
-            return None
-        if path.exists():
-            return readers.FileReader(SimpleNamespace(path=path))
-
-
-def wrap_spec(package):
-    """
-    Override _adapters.wrap_spec to use TraversableResourcesLoader
-    from above. Ensures that future behavior is always available on older
-    Pythons.
-    """
-    return _adapters.SpecLoaderAdapter(package.__spec__, TraversableResourcesLoader)
diff --git a/setuptools/_vendor/importlib_resources/py.typed b/setuptools/_vendor/importlib_resources/py.typed
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/setuptools/_vendor/importlib_resources/readers.py b/setuptools/_vendor/importlib_resources/readers.py
deleted file mode 100644
index 4a80a774aa..0000000000
--- a/setuptools/_vendor/importlib_resources/readers.py
+++ /dev/null
@@ -1,194 +0,0 @@
-import collections
-import contextlib
-import itertools
-import pathlib
-import operator
-import re
-import warnings
-
-from . import abc
-
-from ._itertools import only
-from .compat.py39 import ZipPath
-
-
-def remove_duplicates(items):
-    return iter(collections.OrderedDict.fromkeys(items))
-
-
-class FileReader(abc.TraversableResources):
-    def __init__(self, loader):
-        self.path = pathlib.Path(loader.path).parent
-
-    def resource_path(self, resource):
-        """
-        Return the file system path to prevent
-        `resources.path()` from creating a temporary
-        copy.
-        """
-        return str(self.path.joinpath(resource))
-
-    def files(self):
-        return self.path
-
-
-class ZipReader(abc.TraversableResources):
-    def __init__(self, loader, module):
-        _, _, name = module.rpartition('.')
-        self.prefix = loader.prefix.replace('\\', '/') + name + '/'
-        self.archive = loader.archive
-
-    def open_resource(self, resource):
-        try:
-            return super().open_resource(resource)
-        except KeyError as exc:
-            raise FileNotFoundError(exc.args[0])
-
-    def is_resource(self, path):
-        """
-        Workaround for `zipfile.Path.is_file` returning true
-        for non-existent paths.
-        """
-        target = self.files().joinpath(path)
-        return target.is_file() and target.exists()
-
-    def files(self):
-        return ZipPath(self.archive, self.prefix)
-
-
-class MultiplexedPath(abc.Traversable):
-    """
-    Given a series of Traversable objects, implement a merged
-    version of the interface across all objects. Useful for
-    namespace packages which may be multihomed at a single
-    name.
-    """
-
-    def __init__(self, *paths):
-        self._paths = list(map(_ensure_traversable, remove_duplicates(paths)))
-        if not self._paths:
-            message = 'MultiplexedPath must contain at least one path'
-            raise FileNotFoundError(message)
-        if not all(path.is_dir() for path in self._paths):
-            raise NotADirectoryError('MultiplexedPath only supports directories')
-
-    def iterdir(self):
-        children = (child for path in self._paths for child in path.iterdir())
-        by_name = operator.attrgetter('name')
-        groups = itertools.groupby(sorted(children, key=by_name), key=by_name)
-        return map(self._follow, (locs for name, locs in groups))
-
-    def read_bytes(self):
-        raise FileNotFoundError(f'{self} is not a file')
-
-    def read_text(self, *args, **kwargs):
-        raise FileNotFoundError(f'{self} is not a file')
-
-    def is_dir(self):
-        return True
-
-    def is_file(self):
-        return False
-
-    def joinpath(self, *descendants):
-        try:
-            return super().joinpath(*descendants)
-        except abc.TraversalError:
-            # One of the paths did not resolve (a directory does not exist).
-            # Just return something that will not exist.
-            return self._paths[0].joinpath(*descendants)
-
-    @classmethod
-    def _follow(cls, children):
-        """
-        Construct a MultiplexedPath if needed.
-
-        If children contains a sole element, return it.
-        Otherwise, return a MultiplexedPath of the items.
-        Unless one of the items is not a Directory, then return the first.
-        """
-        subdirs, one_dir, one_file = itertools.tee(children, 3)
-
-        try:
-            return only(one_dir)
-        except ValueError:
-            try:
-                return cls(*subdirs)
-            except NotADirectoryError:
-                return next(one_file)
-
-    def open(self, *args, **kwargs):
-        raise FileNotFoundError(f'{self} is not a file')
-
-    @property
-    def name(self):
-        return self._paths[0].name
-
-    def __repr__(self):
-        paths = ', '.join(f"'{path}'" for path in self._paths)
-        return f'MultiplexedPath({paths})'
-
-
-class NamespaceReader(abc.TraversableResources):
-    def __init__(self, namespace_path):
-        if 'NamespacePath' not in str(namespace_path):
-            raise ValueError('Invalid path')
-        self.path = MultiplexedPath(*map(self._resolve, namespace_path))
-
-    @classmethod
-    def _resolve(cls, path_str) -> abc.Traversable:
-        r"""
-        Given an item from a namespace path, resolve it to a Traversable.
-
-        path_str might be a directory on the filesystem or a path to a
-        zipfile plus the path within the zipfile, e.g. ``/foo/bar`` or
-        ``/foo/baz.zip/inner_dir`` or ``foo\baz.zip\inner_dir\sub``.
-        """
-        (dir,) = (cand for cand in cls._candidate_paths(path_str) if cand.is_dir())
-        return dir
-
-    @classmethod
-    def _candidate_paths(cls, path_str):
-        yield pathlib.Path(path_str)
-        yield from cls._resolve_zip_path(path_str)
-
-    @staticmethod
-    def _resolve_zip_path(path_str):
-        for match in reversed(list(re.finditer(r'[\\/]', path_str))):
-            with contextlib.suppress(
-                FileNotFoundError,
-                IsADirectoryError,
-                NotADirectoryError,
-                PermissionError,
-            ):
-                inner = path_str[match.end() :].replace('\\', '/') + '/'
-                yield ZipPath(path_str[: match.start()], inner.lstrip('/'))
-
-    def resource_path(self, resource):
-        """
-        Return the file system path to prevent
-        `resources.path()` from creating a temporary
-        copy.
-        """
-        return str(self.path.joinpath(resource))
-
-    def files(self):
-        return self.path
-
-
-def _ensure_traversable(path):
-    """
-    Convert deprecated string arguments to traversables (pathlib.Path).
-
-    Remove with Python 3.15.
-    """
-    if not isinstance(path, str):
-        return path
-
-    warnings.warn(
-        "String arguments are deprecated. Pass a Traversable instead.",
-        DeprecationWarning,
-        stacklevel=3,
-    )
-
-    return pathlib.Path(path)
diff --git a/setuptools/_vendor/importlib_resources/simple.py b/setuptools/_vendor/importlib_resources/simple.py
deleted file mode 100644
index 96f117fec6..0000000000
--- a/setuptools/_vendor/importlib_resources/simple.py
+++ /dev/null
@@ -1,106 +0,0 @@
-"""
-Interface adapters for low-level readers.
-"""
-
-import abc
-import io
-import itertools
-from typing import BinaryIO, List
-
-from .abc import Traversable, TraversableResources
-
-
-class SimpleReader(abc.ABC):
-    """
-    The minimum, low-level interface required from a resource
-    provider.
-    """
-
-    @property
-    @abc.abstractmethod
-    def package(self) -> str:
-        """
-        The name of the package for which this reader loads resources.
-        """
-
-    @abc.abstractmethod
-    def children(self) -> List['SimpleReader']:
-        """
-        Obtain an iterable of SimpleReader for available
-        child containers (e.g. directories).
-        """
-
-    @abc.abstractmethod
-    def resources(self) -> List[str]:
-        """
-        Obtain available named resources for this virtual package.
-        """
-
-    @abc.abstractmethod
-    def open_binary(self, resource: str) -> BinaryIO:
-        """
-        Obtain a File-like for a named resource.
-        """
-
-    @property
-    def name(self):
-        return self.package.split('.')[-1]
-
-
-class ResourceContainer(Traversable):
-    """
-    Traversable container for a package's resources via its reader.
-    """
-
-    def __init__(self, reader: SimpleReader):
-        self.reader = reader
-
-    def is_dir(self):
-        return True
-
-    def is_file(self):
-        return False
-
-    def iterdir(self):
-        files = (ResourceHandle(self, name) for name in self.reader.resources)
-        dirs = map(ResourceContainer, self.reader.children())
-        return itertools.chain(files, dirs)
-
-    def open(self, *args, **kwargs):
-        raise IsADirectoryError()
-
-
-class ResourceHandle(Traversable):
-    """
-    Handle to a named resource in a ResourceReader.
-    """
-
-    def __init__(self, parent: ResourceContainer, name: str):
-        self.parent = parent
-        self.name = name  # type: ignore
-
-    def is_file(self):
-        return True
-
-    def is_dir(self):
-        return False
-
-    def open(self, mode='r', *args, **kwargs):
-        stream = self.parent.reader.open_binary(self.name)
-        if 'b' not in mode:
-            stream = io.TextIOWrapper(stream, *args, **kwargs)
-        return stream
-
-    def joinpath(self, name):
-        raise RuntimeError("Cannot traverse into a resource")
-
-
-class TraversableReader(TraversableResources, SimpleReader):
-    """
-    A TraversableResources based on SimpleReader. Resource providers
-    may derive from this class to provide the TraversableResources
-    interface by supplying the SimpleReader interface.
-    """
-
-    def files(self):
-        return ResourceContainer(self)
diff --git a/setuptools/_vendor/importlib_resources/tests/__init__.py b/setuptools/_vendor/importlib_resources/tests/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/setuptools/_vendor/importlib_resources/tests/_path.py b/setuptools/_vendor/importlib_resources/tests/_path.py
deleted file mode 100644
index 1f97c96146..0000000000
--- a/setuptools/_vendor/importlib_resources/tests/_path.py
+++ /dev/null
@@ -1,56 +0,0 @@
-import pathlib
-import functools
-
-from typing import Dict, Union
-
-
-####
-# from jaraco.path 3.4.1
-
-FilesSpec = Dict[str, Union[str, bytes, 'FilesSpec']]  # type: ignore
-
-
-def build(spec: FilesSpec, prefix=pathlib.Path()):
-    """
-    Build a set of files/directories, as described by the spec.
-
-    Each key represents a pathname, and the value represents
-    the content. Content may be a nested directory.
-
-    >>> spec = {
-    ...     'README.txt': "A README file",
-    ...     "foo": {
-    ...         "__init__.py": "",
-    ...         "bar": {
-    ...             "__init__.py": "",
-    ...         },
-    ...         "baz.py": "# Some code",
-    ...     }
-    ... }
-    >>> target = getfixture('tmp_path')
-    >>> build(spec, target)
-    >>> target.joinpath('foo/baz.py').read_text(encoding='utf-8')
-    '# Some code'
-    """
-    for name, contents in spec.items():
-        create(contents, pathlib.Path(prefix) / name)
-
-
-@functools.singledispatch
-def create(content: Union[str, bytes, FilesSpec], path):
-    path.mkdir(exist_ok=True)
-    build(content, prefix=path)  # type: ignore
-
-
-@create.register
-def _(content: bytes, path):
-    path.write_bytes(content)
-
-
-@create.register
-def _(content: str, path):
-    path.write_text(content, encoding='utf-8')
-
-
-# end from jaraco.path
-####
diff --git a/setuptools/_vendor/importlib_resources/tests/compat/__init__.py b/setuptools/_vendor/importlib_resources/tests/compat/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/setuptools/_vendor/importlib_resources/tests/compat/py312.py b/setuptools/_vendor/importlib_resources/tests/compat/py312.py
deleted file mode 100644
index ea9a58ba2e..0000000000
--- a/setuptools/_vendor/importlib_resources/tests/compat/py312.py
+++ /dev/null
@@ -1,18 +0,0 @@
-import contextlib
-
-from .py39 import import_helper
-
-
-@contextlib.contextmanager
-def isolated_modules():
-    """
-    Save modules on entry and cleanup on exit.
-    """
-    (saved,) = import_helper.modules_setup()
-    try:
-        yield
-    finally:
-        import_helper.modules_cleanup(saved)
-
-
-vars(import_helper).setdefault('isolated_modules', isolated_modules)
diff --git a/setuptools/_vendor/importlib_resources/tests/compat/py39.py b/setuptools/_vendor/importlib_resources/tests/compat/py39.py
deleted file mode 100644
index e158eb85d3..0000000000
--- a/setuptools/_vendor/importlib_resources/tests/compat/py39.py
+++ /dev/null
@@ -1,10 +0,0 @@
-"""
-Backward-compatability shims to support Python 3.9 and earlier.
-"""
-
-from jaraco.test.cpython import from_test_support, try_import
-
-import_helper = try_import('import_helper') or from_test_support(
-    'modules_setup', 'modules_cleanup', 'DirsOnSysPath'
-)
-os_helper = try_import('os_helper') or from_test_support('temp_dir')
diff --git a/setuptools/_vendor/importlib_resources/tests/data01/__init__.py b/setuptools/_vendor/importlib_resources/tests/data01/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/setuptools/_vendor/importlib_resources/tests/data01/binary.file b/setuptools/_vendor/importlib_resources/tests/data01/binary.file
deleted file mode 100644
index eaf36c1daccfdf325514461cd1a2ffbc139b5464..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 4
LcmZQzWMT#Y01f~L

diff --git a/setuptools/_vendor/importlib_resources/tests/data01/subdirectory/__init__.py b/setuptools/_vendor/importlib_resources/tests/data01/subdirectory/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/setuptools/_vendor/importlib_resources/tests/data01/subdirectory/binary.file b/setuptools/_vendor/importlib_resources/tests/data01/subdirectory/binary.file
deleted file mode 100644
index 5bd8bb897b..0000000000
--- a/setuptools/_vendor/importlib_resources/tests/data01/subdirectory/binary.file
+++ /dev/null
@@ -1 +0,0 @@
-
\ No newline at end of file
diff --git a/setuptools/_vendor/importlib_resources/tests/data01/utf-16.file b/setuptools/_vendor/importlib_resources/tests/data01/utf-16.file
deleted file mode 100644
index 2cb772295ef4b480a8d83725bd5006a0236d8f68..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 44
ucmezW&x0YAAqNQa8FUyF7(y9B7~B|i84MZBfV^^`Xc15@g+Y;liva-T)Ce>H

diff --git a/setuptools/_vendor/importlib_resources/tests/data01/utf-8.file b/setuptools/_vendor/importlib_resources/tests/data01/utf-8.file
deleted file mode 100644
index 1c0132ad90..0000000000
--- a/setuptools/_vendor/importlib_resources/tests/data01/utf-8.file
+++ /dev/null
@@ -1 +0,0 @@
-Hello, UTF-8 world!
diff --git a/setuptools/_vendor/importlib_resources/tests/data02/__init__.py b/setuptools/_vendor/importlib_resources/tests/data02/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/setuptools/_vendor/importlib_resources/tests/data02/one/__init__.py b/setuptools/_vendor/importlib_resources/tests/data02/one/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/setuptools/_vendor/importlib_resources/tests/data02/one/resource1.txt b/setuptools/_vendor/importlib_resources/tests/data02/one/resource1.txt
deleted file mode 100644
index 61a813e401..0000000000
--- a/setuptools/_vendor/importlib_resources/tests/data02/one/resource1.txt
+++ /dev/null
@@ -1 +0,0 @@
-one resource
diff --git a/setuptools/_vendor/importlib_resources/tests/data02/subdirectory/subsubdir/resource.txt b/setuptools/_vendor/importlib_resources/tests/data02/subdirectory/subsubdir/resource.txt
deleted file mode 100644
index 48f587a2d0..0000000000
--- a/setuptools/_vendor/importlib_resources/tests/data02/subdirectory/subsubdir/resource.txt
+++ /dev/null
@@ -1 +0,0 @@
-a resource
\ No newline at end of file
diff --git a/setuptools/_vendor/importlib_resources/tests/data02/two/__init__.py b/setuptools/_vendor/importlib_resources/tests/data02/two/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/setuptools/_vendor/importlib_resources/tests/data02/two/resource2.txt b/setuptools/_vendor/importlib_resources/tests/data02/two/resource2.txt
deleted file mode 100644
index a80ce46ea3..0000000000
--- a/setuptools/_vendor/importlib_resources/tests/data02/two/resource2.txt
+++ /dev/null
@@ -1 +0,0 @@
-two resource
diff --git a/setuptools/_vendor/importlib_resources/tests/namespacedata01/binary.file b/setuptools/_vendor/importlib_resources/tests/namespacedata01/binary.file
deleted file mode 100644
index eaf36c1daccfdf325514461cd1a2ffbc139b5464..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 4
LcmZQzWMT#Y01f~L

diff --git a/setuptools/_vendor/importlib_resources/tests/namespacedata01/subdirectory/binary.file b/setuptools/_vendor/importlib_resources/tests/namespacedata01/subdirectory/binary.file
deleted file mode 100644
index 100f50643d..0000000000
--- a/setuptools/_vendor/importlib_resources/tests/namespacedata01/subdirectory/binary.file
+++ /dev/null
@@ -1 +0,0 @@
-

\ No newline at end of file
diff --git a/setuptools/_vendor/importlib_resources/tests/namespacedata01/utf-16.file b/setuptools/_vendor/importlib_resources/tests/namespacedata01/utf-16.file
deleted file mode 100644
index 2cb772295ef4b480a8d83725bd5006a0236d8f68..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 44
ucmezW&x0YAAqNQa8FUyF7(y9B7~B|i84MZBfV^^`Xc15@g+Y;liva-T)Ce>H

diff --git a/setuptools/_vendor/importlib_resources/tests/namespacedata01/utf-8.file b/setuptools/_vendor/importlib_resources/tests/namespacedata01/utf-8.file
deleted file mode 100644
index 1c0132ad90..0000000000
--- a/setuptools/_vendor/importlib_resources/tests/namespacedata01/utf-8.file
+++ /dev/null
@@ -1 +0,0 @@
-Hello, UTF-8 world!
diff --git a/setuptools/_vendor/importlib_resources/tests/test_compatibilty_files.py b/setuptools/_vendor/importlib_resources/tests/test_compatibilty_files.py
deleted file mode 100644
index 13ad0dfb21..0000000000
--- a/setuptools/_vendor/importlib_resources/tests/test_compatibilty_files.py
+++ /dev/null
@@ -1,104 +0,0 @@
-import io
-import unittest
-
-import importlib_resources as resources
-
-from importlib_resources._adapters import (
-    CompatibilityFiles,
-    wrap_spec,
-)
-
-from . import util
-
-
-class CompatibilityFilesTests(unittest.TestCase):
-    @property
-    def package(self):
-        bytes_data = io.BytesIO(b'Hello, world!')
-        return util.create_package(
-            file=bytes_data,
-            path='some_path',
-            contents=('a', 'b', 'c'),
-        )
-
-    @property
-    def files(self):
-        return resources.files(self.package)
-
-    def test_spec_path_iter(self):
-        self.assertEqual(
-            sorted(path.name for path in self.files.iterdir()),
-            ['a', 'b', 'c'],
-        )
-
-    def test_child_path_iter(self):
-        self.assertEqual(list((self.files / 'a').iterdir()), [])
-
-    def test_orphan_path_iter(self):
-        self.assertEqual(list((self.files / 'a' / 'a').iterdir()), [])
-        self.assertEqual(list((self.files / 'a' / 'a' / 'a').iterdir()), [])
-
-    def test_spec_path_is(self):
-        self.assertFalse(self.files.is_file())
-        self.assertFalse(self.files.is_dir())
-
-    def test_child_path_is(self):
-        self.assertTrue((self.files / 'a').is_file())
-        self.assertFalse((self.files / 'a').is_dir())
-
-    def test_orphan_path_is(self):
-        self.assertFalse((self.files / 'a' / 'a').is_file())
-        self.assertFalse((self.files / 'a' / 'a').is_dir())
-        self.assertFalse((self.files / 'a' / 'a' / 'a').is_file())
-        self.assertFalse((self.files / 'a' / 'a' / 'a').is_dir())
-
-    def test_spec_path_name(self):
-        self.assertEqual(self.files.name, 'testingpackage')
-
-    def test_child_path_name(self):
-        self.assertEqual((self.files / 'a').name, 'a')
-
-    def test_orphan_path_name(self):
-        self.assertEqual((self.files / 'a' / 'b').name, 'b')
-        self.assertEqual((self.files / 'a' / 'b' / 'c').name, 'c')
-
-    def test_spec_path_open(self):
-        self.assertEqual(self.files.read_bytes(), b'Hello, world!')
-        self.assertEqual(self.files.read_text(encoding='utf-8'), 'Hello, world!')
-
-    def test_child_path_open(self):
-        self.assertEqual((self.files / 'a').read_bytes(), b'Hello, world!')
-        self.assertEqual(
-            (self.files / 'a').read_text(encoding='utf-8'), 'Hello, world!'
-        )
-
-    def test_orphan_path_open(self):
-        with self.assertRaises(FileNotFoundError):
-            (self.files / 'a' / 'b').read_bytes()
-        with self.assertRaises(FileNotFoundError):
-            (self.files / 'a' / 'b' / 'c').read_bytes()
-
-    def test_open_invalid_mode(self):
-        with self.assertRaises(ValueError):
-            self.files.open('0')
-
-    def test_orphan_path_invalid(self):
-        with self.assertRaises(ValueError):
-            CompatibilityFiles.OrphanPath()
-
-    def test_wrap_spec(self):
-        spec = wrap_spec(self.package)
-        self.assertIsInstance(spec.loader.get_resource_reader(None), CompatibilityFiles)
-
-
-class CompatibilityFilesNoReaderTests(unittest.TestCase):
-    @property
-    def package(self):
-        return util.create_package_from_loader(None)
-
-    @property
-    def files(self):
-        return resources.files(self.package)
-
-    def test_spec_path_joinpath(self):
-        self.assertIsInstance(self.files / 'a', CompatibilityFiles.OrphanPath)
diff --git a/setuptools/_vendor/importlib_resources/tests/test_contents.py b/setuptools/_vendor/importlib_resources/tests/test_contents.py
deleted file mode 100644
index 7dc3b0a619..0000000000
--- a/setuptools/_vendor/importlib_resources/tests/test_contents.py
+++ /dev/null
@@ -1,43 +0,0 @@
-import unittest
-import importlib_resources as resources
-
-from . import data01
-from . import util
-
-
-class ContentsTests:
-    expected = {
-        '__init__.py',
-        'binary.file',
-        'subdirectory',
-        'utf-16.file',
-        'utf-8.file',
-    }
-
-    def test_contents(self):
-        contents = {path.name for path in resources.files(self.data).iterdir()}
-        assert self.expected <= contents
-
-
-class ContentsDiskTests(ContentsTests, unittest.TestCase):
-    def setUp(self):
-        self.data = data01
-
-
-class ContentsZipTests(ContentsTests, util.ZipSetup, unittest.TestCase):
-    pass
-
-
-class ContentsNamespaceTests(ContentsTests, unittest.TestCase):
-    expected = {
-        # no __init__ because of namespace design
-        'binary.file',
-        'subdirectory',
-        'utf-16.file',
-        'utf-8.file',
-    }
-
-    def setUp(self):
-        from . import namespacedata01
-
-        self.data = namespacedata01
diff --git a/setuptools/_vendor/importlib_resources/tests/test_custom.py b/setuptools/_vendor/importlib_resources/tests/test_custom.py
deleted file mode 100644
index 86c65676f1..0000000000
--- a/setuptools/_vendor/importlib_resources/tests/test_custom.py
+++ /dev/null
@@ -1,47 +0,0 @@
-import unittest
-import contextlib
-import pathlib
-
-import importlib_resources as resources
-from .. import abc
-from ..abc import TraversableResources, ResourceReader
-from . import util
-from .compat.py39 import os_helper
-
-
-class SimpleLoader:
-    """
-    A simple loader that only implements a resource reader.
-    """
-
-    def __init__(self, reader: ResourceReader):
-        self.reader = reader
-
-    def get_resource_reader(self, package):
-        return self.reader
-
-
-class MagicResources(TraversableResources):
-    """
-    Magically returns the resources at path.
-    """
-
-    def __init__(self, path: pathlib.Path):
-        self.path = path
-
-    def files(self):
-        return self.path
-
-
-class CustomTraversableResourcesTests(unittest.TestCase):
-    def setUp(self):
-        self.fixtures = contextlib.ExitStack()
-        self.addCleanup(self.fixtures.close)
-
-    def test_custom_loader(self):
-        temp_dir = pathlib.Path(self.fixtures.enter_context(os_helper.temp_dir()))
-        loader = SimpleLoader(MagicResources(temp_dir))
-        pkg = util.create_package_from_loader(loader)
-        files = resources.files(pkg)
-        assert isinstance(files, abc.Traversable)
-        assert list(files.iterdir()) == []
diff --git a/setuptools/_vendor/importlib_resources/tests/test_files.py b/setuptools/_vendor/importlib_resources/tests/test_files.py
deleted file mode 100644
index 3e86ec64bc..0000000000
--- a/setuptools/_vendor/importlib_resources/tests/test_files.py
+++ /dev/null
@@ -1,117 +0,0 @@
-import textwrap
-import unittest
-import warnings
-import importlib
-import contextlib
-
-import importlib_resources as resources
-from ..abc import Traversable
-from . import data01
-from . import util
-from . import _path
-from .compat.py39 import os_helper
-from .compat.py312 import import_helper
-
-
-@contextlib.contextmanager
-def suppress_known_deprecation():
-    with warnings.catch_warnings(record=True) as ctx:
-        warnings.simplefilter('default', category=DeprecationWarning)
-        yield ctx
-
-
-class FilesTests:
-    def test_read_bytes(self):
-        files = resources.files(self.data)
-        actual = files.joinpath('utf-8.file').read_bytes()
-        assert actual == b'Hello, UTF-8 world!\n'
-
-    def test_read_text(self):
-        files = resources.files(self.data)
-        actual = files.joinpath('utf-8.file').read_text(encoding='utf-8')
-        assert actual == 'Hello, UTF-8 world!\n'
-
-    def test_traversable(self):
-        assert isinstance(resources.files(self.data), Traversable)
-
-    def test_joinpath_with_multiple_args(self):
-        files = resources.files(self.data)
-        binfile = files.joinpath('subdirectory', 'binary.file')
-        self.assertTrue(binfile.is_file())
-
-    def test_old_parameter(self):
-        """
-        Files used to take a 'package' parameter. Make sure anyone
-        passing by name is still supported.
-        """
-        with suppress_known_deprecation():
-            resources.files(package=self.data)
-
-
-class OpenDiskTests(FilesTests, unittest.TestCase):
-    def setUp(self):
-        self.data = data01
-
-
-class OpenZipTests(FilesTests, util.ZipSetup, unittest.TestCase):
-    pass
-
-
-class OpenNamespaceTests(FilesTests, unittest.TestCase):
-    def setUp(self):
-        from . import namespacedata01
-
-        self.data = namespacedata01
-
-
-class OpenNamespaceZipTests(FilesTests, util.ZipSetup, unittest.TestCase):
-    ZIP_MODULE = 'namespacedata01'
-
-
-class SiteDir:
-    def setUp(self):
-        self.fixtures = contextlib.ExitStack()
-        self.addCleanup(self.fixtures.close)
-        self.site_dir = self.fixtures.enter_context(os_helper.temp_dir())
-        self.fixtures.enter_context(import_helper.DirsOnSysPath(self.site_dir))
-        self.fixtures.enter_context(import_helper.isolated_modules())
-
-
-class ModulesFilesTests(SiteDir, unittest.TestCase):
-    def test_module_resources(self):
-        """
-        A module can have resources found adjacent to the module.
-        """
-        spec = {
-            'mod.py': '',
-            'res.txt': 'resources are the best',
-        }
-        _path.build(spec, self.site_dir)
-        import mod
-
-        actual = resources.files(mod).joinpath('res.txt').read_text(encoding='utf-8')
-        assert actual == spec['res.txt']
-
-
-class ImplicitContextFilesTests(SiteDir, unittest.TestCase):
-    def test_implicit_files(self):
-        """
-        Without any parameter, files() will infer the location as the caller.
-        """
-        spec = {
-            'somepkg': {
-                '__init__.py': textwrap.dedent(
-                    """
-                    import importlib_resources as res
-                    val = res.files().joinpath('res.txt').read_text(encoding='utf-8')
-                    """
-                ),
-                'res.txt': 'resources are the best',
-            },
-        }
-        _path.build(spec, self.site_dir)
-        assert importlib.import_module('somepkg').val == 'resources are the best'
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/setuptools/_vendor/importlib_resources/tests/test_functional.py b/setuptools/_vendor/importlib_resources/tests/test_functional.py
deleted file mode 100644
index 69706cf7be..0000000000
--- a/setuptools/_vendor/importlib_resources/tests/test_functional.py
+++ /dev/null
@@ -1,242 +0,0 @@
-import unittest
-import os
-import contextlib
-
-try:
-    from test.support.warnings_helper import ignore_warnings, check_warnings
-except ImportError:
-    # older Python versions
-    from test.support import ignore_warnings, check_warnings
-
-import importlib_resources as resources
-
-# Since the functional API forwards to Traversable, we only test
-# filesystem resources here -- not zip files, namespace packages etc.
-# We do test for two kinds of Anchor, though.
-
-
-class StringAnchorMixin:
-    anchor01 = 'importlib_resources.tests.data01'
-    anchor02 = 'importlib_resources.tests.data02'
-
-
-class ModuleAnchorMixin:
-    from . import data01 as anchor01
-    from . import data02 as anchor02
-
-
-class FunctionalAPIBase:
-    def _gen_resourcetxt_path_parts(self):
-        """Yield various names of a text file in anchor02, each in a subTest"""
-        for path_parts in (
-            ('subdirectory', 'subsubdir', 'resource.txt'),
-            ('subdirectory/subsubdir/resource.txt',),
-            ('subdirectory/subsubdir', 'resource.txt'),
-        ):
-            with self.subTest(path_parts=path_parts):
-                yield path_parts
-
-    def test_read_text(self):
-        self.assertEqual(
-            resources.read_text(self.anchor01, 'utf-8.file'),
-            'Hello, UTF-8 world!\n',
-        )
-        self.assertEqual(
-            resources.read_text(
-                self.anchor02,
-                'subdirectory',
-                'subsubdir',
-                'resource.txt',
-                encoding='utf-8',
-            ),
-            'a resource',
-        )
-        for path_parts in self._gen_resourcetxt_path_parts():
-            self.assertEqual(
-                resources.read_text(
-                    self.anchor02,
-                    *path_parts,
-                    encoding='utf-8',
-                ),
-                'a resource',
-            )
-        # Use generic OSError, since e.g. attempting to read a directory can
-        # fail with PermissionError rather than IsADirectoryError
-        with self.assertRaises(OSError):
-            resources.read_text(self.anchor01)
-        with self.assertRaises(OSError):
-            resources.read_text(self.anchor01, 'no-such-file')
-        with self.assertRaises(UnicodeDecodeError):
-            resources.read_text(self.anchor01, 'utf-16.file')
-        self.assertEqual(
-            resources.read_text(
-                self.anchor01,
-                'binary.file',
-                encoding='latin1',
-            ),
-            '\x00\x01\x02\x03',
-        )
-        self.assertEqual(
-            resources.read_text(
-                self.anchor01,
-                'utf-16.file',
-                errors='backslashreplace',
-            ),
-            'Hello, UTF-16 world!\n'.encode('utf-16').decode(
-                errors='backslashreplace',
-            ),
-        )
-
-    def test_read_binary(self):
-        self.assertEqual(
-            resources.read_binary(self.anchor01, 'utf-8.file'),
-            b'Hello, UTF-8 world!\n',
-        )
-        for path_parts in self._gen_resourcetxt_path_parts():
-            self.assertEqual(
-                resources.read_binary(self.anchor02, *path_parts),
-                b'a resource',
-            )
-
-    def test_open_text(self):
-        with resources.open_text(self.anchor01, 'utf-8.file') as f:
-            self.assertEqual(f.read(), 'Hello, UTF-8 world!\n')
-        for path_parts in self._gen_resourcetxt_path_parts():
-            with resources.open_text(
-                self.anchor02,
-                *path_parts,
-                encoding='utf-8',
-            ) as f:
-                self.assertEqual(f.read(), 'a resource')
-        # Use generic OSError, since e.g. attempting to read a directory can
-        # fail with PermissionError rather than IsADirectoryError
-        with self.assertRaises(OSError):
-            resources.open_text(self.anchor01)
-        with self.assertRaises(OSError):
-            resources.open_text(self.anchor01, 'no-such-file')
-        with resources.open_text(self.anchor01, 'utf-16.file') as f:
-            with self.assertRaises(UnicodeDecodeError):
-                f.read()
-        with resources.open_text(
-            self.anchor01,
-            'binary.file',
-            encoding='latin1',
-        ) as f:
-            self.assertEqual(f.read(), '\x00\x01\x02\x03')
-        with resources.open_text(
-            self.anchor01,
-            'utf-16.file',
-            errors='backslashreplace',
-        ) as f:
-            self.assertEqual(
-                f.read(),
-                'Hello, UTF-16 world!\n'.encode('utf-16').decode(
-                    errors='backslashreplace',
-                ),
-            )
-
-    def test_open_binary(self):
-        with resources.open_binary(self.anchor01, 'utf-8.file') as f:
-            self.assertEqual(f.read(), b'Hello, UTF-8 world!\n')
-        for path_parts in self._gen_resourcetxt_path_parts():
-            with resources.open_binary(
-                self.anchor02,
-                *path_parts,
-            ) as f:
-                self.assertEqual(f.read(), b'a resource')
-
-    def test_path(self):
-        with resources.path(self.anchor01, 'utf-8.file') as path:
-            with open(str(path), encoding='utf-8') as f:
-                self.assertEqual(f.read(), 'Hello, UTF-8 world!\n')
-        with resources.path(self.anchor01) as path:
-            with open(os.path.join(path, 'utf-8.file'), encoding='utf-8') as f:
-                self.assertEqual(f.read(), 'Hello, UTF-8 world!\n')
-
-    def test_is_resource(self):
-        is_resource = resources.is_resource
-        self.assertTrue(is_resource(self.anchor01, 'utf-8.file'))
-        self.assertFalse(is_resource(self.anchor01, 'no_such_file'))
-        self.assertFalse(is_resource(self.anchor01))
-        self.assertFalse(is_resource(self.anchor01, 'subdirectory'))
-        for path_parts in self._gen_resourcetxt_path_parts():
-            self.assertTrue(is_resource(self.anchor02, *path_parts))
-
-    def test_contents(self):
-        with check_warnings((".*contents.*", DeprecationWarning)):
-            c = resources.contents(self.anchor01)
-        self.assertGreaterEqual(
-            set(c),
-            {'utf-8.file', 'utf-16.file', 'binary.file', 'subdirectory'},
-        )
-        with contextlib.ExitStack() as cm:
-            cm.enter_context(self.assertRaises(OSError))
-            cm.enter_context(check_warnings((".*contents.*", DeprecationWarning)))
-
-            list(resources.contents(self.anchor01, 'utf-8.file'))
-
-        for path_parts in self._gen_resourcetxt_path_parts():
-            with contextlib.ExitStack() as cm:
-                cm.enter_context(self.assertRaises(OSError))
-                cm.enter_context(check_warnings((".*contents.*", DeprecationWarning)))
-
-                list(resources.contents(self.anchor01, *path_parts))
-        with check_warnings((".*contents.*", DeprecationWarning)):
-            c = resources.contents(self.anchor01, 'subdirectory')
-        self.assertGreaterEqual(
-            set(c),
-            {'binary.file'},
-        )
-
-    @ignore_warnings(category=DeprecationWarning)
-    def test_common_errors(self):
-        for func in (
-            resources.read_text,
-            resources.read_binary,
-            resources.open_text,
-            resources.open_binary,
-            resources.path,
-            resources.is_resource,
-            resources.contents,
-        ):
-            with self.subTest(func=func):
-                # Rejecting None anchor
-                with self.assertRaises(TypeError):
-                    func(None)
-                # Rejecting invalid anchor type
-                with self.assertRaises((TypeError, AttributeError)):
-                    func(1234)
-                # Unknown module
-                with self.assertRaises(ModuleNotFoundError):
-                    func('$missing module$')
-
-    def test_text_errors(self):
-        for func in (
-            resources.read_text,
-            resources.open_text,
-        ):
-            with self.subTest(func=func):
-                # Multiple path arguments need explicit encoding argument.
-                with self.assertRaises(TypeError):
-                    func(
-                        self.anchor02,
-                        'subdirectory',
-                        'subsubdir',
-                        'resource.txt',
-                    )
-
-
-class FunctionalAPITest_StringAnchor(
-    unittest.TestCase,
-    FunctionalAPIBase,
-    StringAnchorMixin,
-):
-    pass
-
-
-class FunctionalAPITest_ModuleAnchor(
-    unittest.TestCase,
-    FunctionalAPIBase,
-    ModuleAnchorMixin,
-):
-    pass
diff --git a/setuptools/_vendor/importlib_resources/tests/test_open.py b/setuptools/_vendor/importlib_resources/tests/test_open.py
deleted file mode 100644
index 44f1018af3..0000000000
--- a/setuptools/_vendor/importlib_resources/tests/test_open.py
+++ /dev/null
@@ -1,89 +0,0 @@
-import unittest
-
-import importlib_resources as resources
-from . import data01
-from . import util
-
-
-class CommonBinaryTests(util.CommonTests, unittest.TestCase):
-    def execute(self, package, path):
-        target = resources.files(package).joinpath(path)
-        with target.open('rb'):
-            pass
-
-
-class CommonTextTests(util.CommonTests, unittest.TestCase):
-    def execute(self, package, path):
-        target = resources.files(package).joinpath(path)
-        with target.open(encoding='utf-8'):
-            pass
-
-
-class OpenTests:
-    def test_open_binary(self):
-        target = resources.files(self.data) / 'binary.file'
-        with target.open('rb') as fp:
-            result = fp.read()
-            self.assertEqual(result, bytes(range(4)))
-
-    def test_open_text_default_encoding(self):
-        target = resources.files(self.data) / 'utf-8.file'
-        with target.open(encoding='utf-8') as fp:
-            result = fp.read()
-            self.assertEqual(result, 'Hello, UTF-8 world!\n')
-
-    def test_open_text_given_encoding(self):
-        target = resources.files(self.data) / 'utf-16.file'
-        with target.open(encoding='utf-16', errors='strict') as fp:
-            result = fp.read()
-        self.assertEqual(result, 'Hello, UTF-16 world!\n')
-
-    def test_open_text_with_errors(self):
-        """
-        Raises UnicodeError without the 'errors' argument.
-        """
-        target = resources.files(self.data) / 'utf-16.file'
-        with target.open(encoding='utf-8', errors='strict') as fp:
-            self.assertRaises(UnicodeError, fp.read)
-        with target.open(encoding='utf-8', errors='ignore') as fp:
-            result = fp.read()
-        self.assertEqual(
-            result,
-            'H\x00e\x00l\x00l\x00o\x00,\x00 '
-            '\x00U\x00T\x00F\x00-\x001\x006\x00 '
-            '\x00w\x00o\x00r\x00l\x00d\x00!\x00\n\x00',
-        )
-
-    def test_open_binary_FileNotFoundError(self):
-        target = resources.files(self.data) / 'does-not-exist'
-        with self.assertRaises(FileNotFoundError):
-            target.open('rb')
-
-    def test_open_text_FileNotFoundError(self):
-        target = resources.files(self.data) / 'does-not-exist'
-        with self.assertRaises(FileNotFoundError):
-            target.open(encoding='utf-8')
-
-
-class OpenDiskTests(OpenTests, unittest.TestCase):
-    def setUp(self):
-        self.data = data01
-
-
-class OpenDiskNamespaceTests(OpenTests, unittest.TestCase):
-    def setUp(self):
-        from . import namespacedata01
-
-        self.data = namespacedata01
-
-
-class OpenZipTests(OpenTests, util.ZipSetup, unittest.TestCase):
-    pass
-
-
-class OpenNamespaceZipTests(OpenTests, util.ZipSetup, unittest.TestCase):
-    ZIP_MODULE = 'namespacedata01'
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/setuptools/_vendor/importlib_resources/tests/test_path.py b/setuptools/_vendor/importlib_resources/tests/test_path.py
deleted file mode 100644
index c3e1cbb4ed..0000000000
--- a/setuptools/_vendor/importlib_resources/tests/test_path.py
+++ /dev/null
@@ -1,65 +0,0 @@
-import io
-import pathlib
-import unittest
-
-import importlib_resources as resources
-from . import data01
-from . import util
-
-
-class CommonTests(util.CommonTests, unittest.TestCase):
-    def execute(self, package, path):
-        with resources.as_file(resources.files(package).joinpath(path)):
-            pass
-
-
-class PathTests:
-    def test_reading(self):
-        """
-        Path should be readable and a pathlib.Path instance.
-        """
-        target = resources.files(self.data) / 'utf-8.file'
-        with resources.as_file(target) as path:
-            self.assertIsInstance(path, pathlib.Path)
-            self.assertTrue(path.name.endswith("utf-8.file"), repr(path))
-            self.assertEqual('Hello, UTF-8 world!\n', path.read_text(encoding='utf-8'))
-
-
-class PathDiskTests(PathTests, unittest.TestCase):
-    data = data01
-
-    def test_natural_path(self):
-        """
-        Guarantee the internal implementation detail that
-        file-system-backed resources do not get the tempdir
-        treatment.
-        """
-        target = resources.files(self.data) / 'utf-8.file'
-        with resources.as_file(target) as path:
-            assert 'data' in str(path)
-
-
-class PathMemoryTests(PathTests, unittest.TestCase):
-    def setUp(self):
-        file = io.BytesIO(b'Hello, UTF-8 world!\n')
-        self.addCleanup(file.close)
-        self.data = util.create_package(
-            file=file, path=FileNotFoundError("package exists only in memory")
-        )
-        self.data.__spec__.origin = None
-        self.data.__spec__.has_location = False
-
-
-class PathZipTests(PathTests, util.ZipSetup, unittest.TestCase):
-    def test_remove_in_context_manager(self):
-        """
-        It is not an error if the file that was temporarily stashed on the
-        file system is removed inside the `with` stanza.
-        """
-        target = resources.files(self.data) / 'utf-8.file'
-        with resources.as_file(target) as path:
-            path.unlink()
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/setuptools/_vendor/importlib_resources/tests/test_read.py b/setuptools/_vendor/importlib_resources/tests/test_read.py
deleted file mode 100644
index 97d90128cf..0000000000
--- a/setuptools/_vendor/importlib_resources/tests/test_read.py
+++ /dev/null
@@ -1,97 +0,0 @@
-import unittest
-import importlib_resources as resources
-
-from . import data01
-from . import util
-from importlib import import_module
-
-
-class CommonBinaryTests(util.CommonTests, unittest.TestCase):
-    def execute(self, package, path):
-        resources.files(package).joinpath(path).read_bytes()
-
-
-class CommonTextTests(util.CommonTests, unittest.TestCase):
-    def execute(self, package, path):
-        resources.files(package).joinpath(path).read_text(encoding='utf-8')
-
-
-class ReadTests:
-    def test_read_bytes(self):
-        result = resources.files(self.data).joinpath('binary.file').read_bytes()
-        self.assertEqual(result, bytes(range(4)))
-
-    def test_read_text_default_encoding(self):
-        result = (
-            resources.files(self.data)
-            .joinpath('utf-8.file')
-            .read_text(encoding='utf-8')
-        )
-        self.assertEqual(result, 'Hello, UTF-8 world!\n')
-
-    def test_read_text_given_encoding(self):
-        result = (
-            resources.files(self.data)
-            .joinpath('utf-16.file')
-            .read_text(encoding='utf-16')
-        )
-        self.assertEqual(result, 'Hello, UTF-16 world!\n')
-
-    def test_read_text_with_errors(self):
-        """
-        Raises UnicodeError without the 'errors' argument.
-        """
-        target = resources.files(self.data) / 'utf-16.file'
-        self.assertRaises(UnicodeError, target.read_text, encoding='utf-8')
-        result = target.read_text(encoding='utf-8', errors='ignore')
-        self.assertEqual(
-            result,
-            'H\x00e\x00l\x00l\x00o\x00,\x00 '
-            '\x00U\x00T\x00F\x00-\x001\x006\x00 '
-            '\x00w\x00o\x00r\x00l\x00d\x00!\x00\n\x00',
-        )
-
-
-class ReadDiskTests(ReadTests, unittest.TestCase):
-    data = data01
-
-
-class ReadZipTests(ReadTests, util.ZipSetup, unittest.TestCase):
-    def test_read_submodule_resource(self):
-        submodule = import_module('data01.subdirectory')
-        result = resources.files(submodule).joinpath('binary.file').read_bytes()
-        self.assertEqual(result, bytes(range(4, 8)))
-
-    def test_read_submodule_resource_by_name(self):
-        result = (
-            resources.files('data01.subdirectory').joinpath('binary.file').read_bytes()
-        )
-        self.assertEqual(result, bytes(range(4, 8)))
-
-
-class ReadNamespaceTests(ReadTests, unittest.TestCase):
-    def setUp(self):
-        from . import namespacedata01
-
-        self.data = namespacedata01
-
-
-class ReadNamespaceZipTests(ReadTests, util.ZipSetup, unittest.TestCase):
-    ZIP_MODULE = 'namespacedata01'
-
-    def test_read_submodule_resource(self):
-        submodule = import_module('namespacedata01.subdirectory')
-        result = resources.files(submodule).joinpath('binary.file').read_bytes()
-        self.assertEqual(result, bytes(range(12, 16)))
-
-    def test_read_submodule_resource_by_name(self):
-        result = (
-            resources.files('namespacedata01.subdirectory')
-            .joinpath('binary.file')
-            .read_bytes()
-        )
-        self.assertEqual(result, bytes(range(12, 16)))
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/setuptools/_vendor/importlib_resources/tests/test_reader.py b/setuptools/_vendor/importlib_resources/tests/test_reader.py
deleted file mode 100644
index 95c2fc85a4..0000000000
--- a/setuptools/_vendor/importlib_resources/tests/test_reader.py
+++ /dev/null
@@ -1,145 +0,0 @@
-import os.path
-import sys
-import pathlib
-import unittest
-
-from importlib import import_module
-from importlib_resources.readers import MultiplexedPath, NamespaceReader
-
-
-class MultiplexedPathTest(unittest.TestCase):
-    @classmethod
-    def setUpClass(cls):
-        cls.folder = pathlib.Path(__file__).parent / 'namespacedata01'
-
-    def test_init_no_paths(self):
-        with self.assertRaises(FileNotFoundError):
-            MultiplexedPath()
-
-    def test_init_file(self):
-        with self.assertRaises(NotADirectoryError):
-            MultiplexedPath(self.folder / 'binary.file')
-
-    def test_iterdir(self):
-        contents = {path.name for path in MultiplexedPath(self.folder).iterdir()}
-        try:
-            contents.remove('__pycache__')
-        except (KeyError, ValueError):
-            pass
-        self.assertEqual(
-            contents, {'subdirectory', 'binary.file', 'utf-16.file', 'utf-8.file'}
-        )
-
-    def test_iterdir_duplicate(self):
-        data01 = pathlib.Path(__file__).parent.joinpath('data01')
-        contents = {
-            path.name for path in MultiplexedPath(self.folder, data01).iterdir()
-        }
-        for remove in ('__pycache__', '__init__.pyc'):
-            try:
-                contents.remove(remove)
-            except (KeyError, ValueError):
-                pass
-        self.assertEqual(
-            contents,
-            {'__init__.py', 'binary.file', 'subdirectory', 'utf-16.file', 'utf-8.file'},
-        )
-
-    def test_is_dir(self):
-        self.assertEqual(MultiplexedPath(self.folder).is_dir(), True)
-
-    def test_is_file(self):
-        self.assertEqual(MultiplexedPath(self.folder).is_file(), False)
-
-    def test_open_file(self):
-        path = MultiplexedPath(self.folder)
-        with self.assertRaises(FileNotFoundError):
-            path.read_bytes()
-        with self.assertRaises(FileNotFoundError):
-            path.read_text()
-        with self.assertRaises(FileNotFoundError):
-            path.open()
-
-    def test_join_path(self):
-        data01 = pathlib.Path(__file__).parent.joinpath('data01')
-        prefix = str(data01.parent)
-        path = MultiplexedPath(self.folder, data01)
-        self.assertEqual(
-            str(path.joinpath('binary.file'))[len(prefix) + 1 :],
-            os.path.join('namespacedata01', 'binary.file'),
-        )
-        sub = path.joinpath('subdirectory')
-        assert isinstance(sub, MultiplexedPath)
-        assert 'namespacedata01' in str(sub)
-        assert 'data01' in str(sub)
-        self.assertEqual(
-            str(path.joinpath('imaginary'))[len(prefix) + 1 :],
-            os.path.join('namespacedata01', 'imaginary'),
-        )
-        self.assertEqual(path.joinpath(), path)
-
-    def test_join_path_compound(self):
-        path = MultiplexedPath(self.folder)
-        assert not path.joinpath('imaginary/foo.py').exists()
-
-    def test_join_path_common_subdir(self):
-        data01 = pathlib.Path(__file__).parent.joinpath('data01')
-        data02 = pathlib.Path(__file__).parent.joinpath('data02')
-        prefix = str(data01.parent)
-        path = MultiplexedPath(data01, data02)
-        self.assertIsInstance(path.joinpath('subdirectory'), MultiplexedPath)
-        self.assertEqual(
-            str(path.joinpath('subdirectory', 'subsubdir'))[len(prefix) + 1 :],
-            os.path.join('data02', 'subdirectory', 'subsubdir'),
-        )
-
-    def test_repr(self):
-        self.assertEqual(
-            repr(MultiplexedPath(self.folder)),
-            f"MultiplexedPath('{self.folder}')",
-        )
-
-    def test_name(self):
-        self.assertEqual(
-            MultiplexedPath(self.folder).name,
-            os.path.basename(self.folder),
-        )
-
-
-class NamespaceReaderTest(unittest.TestCase):
-    site_dir = str(pathlib.Path(__file__).parent)
-
-    @classmethod
-    def setUpClass(cls):
-        sys.path.append(cls.site_dir)
-
-    @classmethod
-    def tearDownClass(cls):
-        sys.path.remove(cls.site_dir)
-
-    def test_init_error(self):
-        with self.assertRaises(ValueError):
-            NamespaceReader(['path1', 'path2'])
-
-    def test_resource_path(self):
-        namespacedata01 = import_module('namespacedata01')
-        reader = NamespaceReader(namespacedata01.__spec__.submodule_search_locations)
-
-        root = os.path.abspath(os.path.join(__file__, '..', 'namespacedata01'))
-        self.assertEqual(
-            reader.resource_path('binary.file'), os.path.join(root, 'binary.file')
-        )
-        self.assertEqual(
-            reader.resource_path('imaginary'), os.path.join(root, 'imaginary')
-        )
-
-    def test_files(self):
-        namespacedata01 = import_module('namespacedata01')
-        reader = NamespaceReader(namespacedata01.__spec__.submodule_search_locations)
-        root = os.path.abspath(os.path.join(__file__, '..', 'namespacedata01'))
-        self.assertIsInstance(reader.files(), MultiplexedPath)
-        self.assertEqual(repr(reader.files()), f"MultiplexedPath('{root}')")
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/setuptools/_vendor/importlib_resources/tests/test_resource.py b/setuptools/_vendor/importlib_resources/tests/test_resource.py
deleted file mode 100644
index dc2a108cde..0000000000
--- a/setuptools/_vendor/importlib_resources/tests/test_resource.py
+++ /dev/null
@@ -1,241 +0,0 @@
-import sys
-import unittest
-import importlib_resources as resources
-import pathlib
-
-from . import data01
-from . import util
-from importlib import import_module
-
-
-class ResourceTests:
-    # Subclasses are expected to set the `data` attribute.
-
-    def test_is_file_exists(self):
-        target = resources.files(self.data) / 'binary.file'
-        self.assertTrue(target.is_file())
-
-    def test_is_file_missing(self):
-        target = resources.files(self.data) / 'not-a-file'
-        self.assertFalse(target.is_file())
-
-    def test_is_dir(self):
-        target = resources.files(self.data) / 'subdirectory'
-        self.assertFalse(target.is_file())
-        self.assertTrue(target.is_dir())
-
-
-class ResourceDiskTests(ResourceTests, unittest.TestCase):
-    def setUp(self):
-        self.data = data01
-
-
-class ResourceZipTests(ResourceTests, util.ZipSetup, unittest.TestCase):
-    pass
-
-
-def names(traversable):
-    return {item.name for item in traversable.iterdir()}
-
-
-class ResourceLoaderTests(unittest.TestCase):
-    def test_resource_contents(self):
-        package = util.create_package(
-            file=data01, path=data01.__file__, contents=['A', 'B', 'C']
-        )
-        self.assertEqual(names(resources.files(package)), {'A', 'B', 'C'})
-
-    def test_is_file(self):
-        package = util.create_package(
-            file=data01, path=data01.__file__, contents=['A', 'B', 'C', 'D/E', 'D/F']
-        )
-        self.assertTrue(resources.files(package).joinpath('B').is_file())
-
-    def test_is_dir(self):
-        package = util.create_package(
-            file=data01, path=data01.__file__, contents=['A', 'B', 'C', 'D/E', 'D/F']
-        )
-        self.assertTrue(resources.files(package).joinpath('D').is_dir())
-
-    def test_resource_missing(self):
-        package = util.create_package(
-            file=data01, path=data01.__file__, contents=['A', 'B', 'C', 'D/E', 'D/F']
-        )
-        self.assertFalse(resources.files(package).joinpath('Z').is_file())
-
-
-class ResourceCornerCaseTests(unittest.TestCase):
-    def test_package_has_no_reader_fallback(self):
-        """
-        Test odd ball packages which:
-        # 1. Do not have a ResourceReader as a loader
-        # 2. Are not on the file system
-        # 3. Are not in a zip file
-        """
-        module = util.create_package(
-            file=data01, path=data01.__file__, contents=['A', 'B', 'C']
-        )
-        # Give the module a dummy loader.
-        module.__loader__ = object()
-        # Give the module a dummy origin.
-        module.__file__ = '/path/which/shall/not/be/named'
-        module.__spec__.loader = module.__loader__
-        module.__spec__.origin = module.__file__
-        self.assertFalse(resources.files(module).joinpath('A').is_file())
-
-
-class ResourceFromZipsTest01(util.ZipSetupBase, unittest.TestCase):
-    ZIP_MODULE = 'data01'
-
-    def test_is_submodule_resource(self):
-        submodule = import_module('data01.subdirectory')
-        self.assertTrue(resources.files(submodule).joinpath('binary.file').is_file())
-
-    def test_read_submodule_resource_by_name(self):
-        self.assertTrue(
-            resources.files('data01.subdirectory').joinpath('binary.file').is_file()
-        )
-
-    def test_submodule_contents(self):
-        submodule = import_module('data01.subdirectory')
-        self.assertEqual(
-            names(resources.files(submodule)), {'__init__.py', 'binary.file'}
-        )
-
-    def test_submodule_contents_by_name(self):
-        self.assertEqual(
-            names(resources.files('data01.subdirectory')),
-            {'__init__.py', 'binary.file'},
-        )
-
-    def test_as_file_directory(self):
-        with resources.as_file(resources.files('data01')) as data:
-            assert data.name == 'data01'
-            assert data.is_dir()
-            assert data.joinpath('subdirectory').is_dir()
-            assert len(list(data.iterdir()))
-        assert not data.parent.exists()
-
-
-class ResourceFromZipsTest02(util.ZipSetupBase, unittest.TestCase):
-    ZIP_MODULE = 'data02'
-
-    def test_unrelated_contents(self):
-        """
-        Test thata zip with two unrelated subpackages return
-        distinct resources. Ref python/importlib_resources#44.
-        """
-        self.assertEqual(
-            names(resources.files('data02.one')),
-            {'__init__.py', 'resource1.txt'},
-        )
-        self.assertEqual(
-            names(resources.files('data02.two')),
-            {'__init__.py', 'resource2.txt'},
-        )
-
-
-class DeletingZipsTest(util.ZipSetupBase, unittest.TestCase):
-    """Having accessed resources in a zip file should not keep an open
-    reference to the zip.
-    """
-
-    def test_iterdir_does_not_keep_open(self):
-        [item.name for item in resources.files('data01').iterdir()]
-
-    def test_is_file_does_not_keep_open(self):
-        resources.files('data01').joinpath('binary.file').is_file()
-
-    def test_is_file_failure_does_not_keep_open(self):
-        resources.files('data01').joinpath('not-present').is_file()
-
-    @unittest.skip("Desired but not supported.")
-    def test_as_file_does_not_keep_open(self):  # pragma: no cover
-        resources.as_file(resources.files('data01') / 'binary.file')
-
-    def test_entered_path_does_not_keep_open(self):
-        """
-        Mimic what certifi does on import to make its bundle
-        available for the process duration.
-        """
-        resources.as_file(resources.files('data01') / 'binary.file').__enter__()
-
-    def test_read_binary_does_not_keep_open(self):
-        resources.files('data01').joinpath('binary.file').read_bytes()
-
-    def test_read_text_does_not_keep_open(self):
-        resources.files('data01').joinpath('utf-8.file').read_text(encoding='utf-8')
-
-
-class ResourceFromNamespaceTests:
-    def test_is_submodule_resource(self):
-        self.assertTrue(
-            resources.files(import_module('namespacedata01'))
-            .joinpath('binary.file')
-            .is_file()
-        )
-
-    def test_read_submodule_resource_by_name(self):
-        self.assertTrue(
-            resources.files('namespacedata01').joinpath('binary.file').is_file()
-        )
-
-    def test_submodule_contents(self):
-        contents = names(resources.files(import_module('namespacedata01')))
-        try:
-            contents.remove('__pycache__')
-        except KeyError:
-            pass
-        self.assertEqual(
-            contents, {'subdirectory', 'binary.file', 'utf-8.file', 'utf-16.file'}
-        )
-
-    def test_submodule_contents_by_name(self):
-        contents = names(resources.files('namespacedata01'))
-        try:
-            contents.remove('__pycache__')
-        except KeyError:
-            pass
-        self.assertEqual(
-            contents, {'subdirectory', 'binary.file', 'utf-8.file', 'utf-16.file'}
-        )
-
-    def test_submodule_sub_contents(self):
-        contents = names(resources.files(import_module('namespacedata01.subdirectory')))
-        try:
-            contents.remove('__pycache__')
-        except KeyError:
-            pass
-        self.assertEqual(contents, {'binary.file'})
-
-    def test_submodule_sub_contents_by_name(self):
-        contents = names(resources.files('namespacedata01.subdirectory'))
-        try:
-            contents.remove('__pycache__')
-        except KeyError:
-            pass
-        self.assertEqual(contents, {'binary.file'})
-
-
-class ResourceFromNamespaceDiskTests(ResourceFromNamespaceTests, unittest.TestCase):
-    site_dir = str(pathlib.Path(__file__).parent)
-
-    @classmethod
-    def setUpClass(cls):
-        sys.path.append(cls.site_dir)
-
-    @classmethod
-    def tearDownClass(cls):
-        sys.path.remove(cls.site_dir)
-
-
-class ResourceFromNamespaceZipTests(
-    util.ZipSetupBase,
-    ResourceFromNamespaceTests,
-    unittest.TestCase,
-):
-    ZIP_MODULE = 'namespacedata01'
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/setuptools/_vendor/importlib_resources/tests/util.py b/setuptools/_vendor/importlib_resources/tests/util.py
deleted file mode 100644
index fb827d2fa0..0000000000
--- a/setuptools/_vendor/importlib_resources/tests/util.py
+++ /dev/null
@@ -1,164 +0,0 @@
-import abc
-import importlib
-import io
-import sys
-import types
-import pathlib
-import contextlib
-
-from . import data01
-from ..abc import ResourceReader
-from .compat.py39 import import_helper, os_helper
-from . import zip as zip_
-
-
-from importlib.machinery import ModuleSpec
-
-
-class Reader(ResourceReader):
-    def __init__(self, **kwargs):
-        vars(self).update(kwargs)
-
-    def get_resource_reader(self, package):
-        return self
-
-    def open_resource(self, path):
-        self._path = path
-        if isinstance(self.file, Exception):
-            raise self.file
-        return self.file
-
-    def resource_path(self, path_):
-        self._path = path_
-        if isinstance(self.path, Exception):
-            raise self.path
-        return self.path
-
-    def is_resource(self, path_):
-        self._path = path_
-        if isinstance(self.path, Exception):
-            raise self.path
-
-        def part(entry):
-            return entry.split('/')
-
-        return any(
-            len(parts) == 1 and parts[0] == path_ for parts in map(part, self._contents)
-        )
-
-    def contents(self):
-        if isinstance(self.path, Exception):
-            raise self.path
-        yield from self._contents
-
-
-def create_package_from_loader(loader, is_package=True):
-    name = 'testingpackage'
-    module = types.ModuleType(name)
-    spec = ModuleSpec(name, loader, origin='does-not-exist', is_package=is_package)
-    module.__spec__ = spec
-    module.__loader__ = loader
-    return module
-
-
-def create_package(file=None, path=None, is_package=True, contents=()):
-    return create_package_from_loader(
-        Reader(file=file, path=path, _contents=contents),
-        is_package,
-    )
-
-
-class CommonTests(metaclass=abc.ABCMeta):
-    """
-    Tests shared by test_open, test_path, and test_read.
-    """
-
-    @abc.abstractmethod
-    def execute(self, package, path):
-        """
-        Call the pertinent legacy API function (e.g. open_text, path)
-        on package and path.
-        """
-
-    def test_package_name(self):
-        """
-        Passing in the package name should succeed.
-        """
-        self.execute(data01.__name__, 'utf-8.file')
-
-    def test_package_object(self):
-        """
-        Passing in the package itself should succeed.
-        """
-        self.execute(data01, 'utf-8.file')
-
-    def test_string_path(self):
-        """
-        Passing in a string for the path should succeed.
-        """
-        path = 'utf-8.file'
-        self.execute(data01, path)
-
-    def test_pathlib_path(self):
-        """
-        Passing in a pathlib.PurePath object for the path should succeed.
-        """
-        path = pathlib.PurePath('utf-8.file')
-        self.execute(data01, path)
-
-    def test_importing_module_as_side_effect(self):
-        """
-        The anchor package can already be imported.
-        """
-        del sys.modules[data01.__name__]
-        self.execute(data01.__name__, 'utf-8.file')
-
-    def test_missing_path(self):
-        """
-        Attempting to open or read or request the path for a
-        non-existent path should succeed if open_resource
-        can return a viable data stream.
-        """
-        bytes_data = io.BytesIO(b'Hello, world!')
-        package = create_package(file=bytes_data, path=FileNotFoundError())
-        self.execute(package, 'utf-8.file')
-        self.assertEqual(package.__loader__._path, 'utf-8.file')
-
-    def test_extant_path(self):
-        # Attempting to open or read or request the path when the
-        # path does exist should still succeed. Does not assert
-        # anything about the result.
-        bytes_data = io.BytesIO(b'Hello, world!')
-        # any path that exists
-        path = __file__
-        package = create_package(file=bytes_data, path=path)
-        self.execute(package, 'utf-8.file')
-        self.assertEqual(package.__loader__._path, 'utf-8.file')
-
-    def test_useless_loader(self):
-        package = create_package(file=FileNotFoundError(), path=FileNotFoundError())
-        with self.assertRaises(FileNotFoundError):
-            self.execute(package, 'utf-8.file')
-
-
-class ZipSetupBase:
-    ZIP_MODULE = 'data01'
-
-    def setUp(self):
-        self.fixtures = contextlib.ExitStack()
-        self.addCleanup(self.fixtures.close)
-
-        self.fixtures.enter_context(import_helper.isolated_modules())
-
-        temp_dir = self.fixtures.enter_context(os_helper.temp_dir())
-        modules = pathlib.Path(temp_dir) / 'zipped modules.zip'
-        src_path = pathlib.Path(__file__).parent.joinpath(self.ZIP_MODULE)
-        self.fixtures.enter_context(
-            import_helper.DirsOnSysPath(str(zip_.make_zip_file(src_path, modules)))
-        )
-
-        self.data = importlib.import_module(self.ZIP_MODULE)
-
-
-class ZipSetup(ZipSetupBase):
-    pass
diff --git a/setuptools/_vendor/importlib_resources/tests/zip.py b/setuptools/_vendor/importlib_resources/tests/zip.py
deleted file mode 100644
index 962195a901..0000000000
--- a/setuptools/_vendor/importlib_resources/tests/zip.py
+++ /dev/null
@@ -1,32 +0,0 @@
-"""
-Generate zip test data files.
-"""
-
-import contextlib
-import os
-import pathlib
-import zipfile
-
-import zipp
-
-
-def make_zip_file(src, dst):
-    """
-    Zip the files in src into a new zipfile at dst.
-    """
-    with zipfile.ZipFile(dst, 'w') as zf:
-        for src_path, rel in walk(src):
-            dst_name = src.name / pathlib.PurePosixPath(rel.as_posix())
-            zf.write(src_path, dst_name)
-        zipp.CompleteDirs.inject(zf)
-    return dst
-
-
-def walk(datapath):
-    for dirpath, dirnames, filenames in os.walk(datapath):
-        with contextlib.suppress(ValueError):
-            dirnames.remove('__pycache__')
-        for filename in filenames:
-            res = pathlib.Path(dirpath) / filename
-            rel = res.relative_to(datapath)
-            yield res, rel

From 0fe2e06d91efc25ccddcdbd43ae42b4d94351e95 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 30 Oct 2024 17:34:06 +0000
Subject: [PATCH 1275/1761] Add news fragments

---
 newsfragments/4718.feature.1.rst | 1 +
 newsfragments/4718.feature.2.rst | 4 ++++
 newsfragments/4718.feature.3.rst | 3 +++
 3 files changed, 8 insertions(+)
 create mode 100644 newsfragments/4718.feature.1.rst
 create mode 100644 newsfragments/4718.feature.2.rst
 create mode 100644 newsfragments/4718.feature.3.rst

diff --git a/newsfragments/4718.feature.1.rst b/newsfragments/4718.feature.1.rst
new file mode 100644
index 0000000000..f171cdde84
--- /dev/null
+++ b/newsfragments/4718.feature.1.rst
@@ -0,0 +1 @@
+Require Python 3.9 or later.
diff --git a/newsfragments/4718.feature.2.rst b/newsfragments/4718.feature.2.rst
new file mode 100644
index 0000000000..7c32c13b61
--- /dev/null
+++ b/newsfragments/4718.feature.2.rst
@@ -0,0 +1,4 @@
+Remove dependency on ``importlib_resources``
+and the vendored copy of the library.
+Instead, ``setuptools`` consistently rely on stdlib's ``importlib.resources``
+(available on Python 3.9+).
diff --git a/newsfragments/4718.feature.3.rst b/newsfragments/4718.feature.3.rst
new file mode 100644
index 0000000000..9c80da9064
--- /dev/null
+++ b/newsfragments/4718.feature.3.rst
@@ -0,0 +1,3 @@
+Setuptools' ``bdist_wheel`` implementation no longer produces wheels with
+the ``m`` SOABI flag (pymalloc-related).
+This flag was removed on Python 3.8+ (see :obj:`sys.abiflags`).

From 9fc7bbc8c36b71ef2db07cccb4a2b161006f37e7 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Thu, 31 Oct 2024 10:25:17 -0400
Subject: [PATCH 1276/1761] Ruff: lint pytest

---
 pkg_resources/tests/test_resources.py         |  4 ++--
 pyproject.toml                                |  4 ++--
 ruff.toml                                     |  9 ++++++++
 setuptools/command/install_lib.py             |  4 +++-
 .../tests/config/test_apply_pyprojecttoml.py  |  2 +-
 .../integration/test_pip_install_sdist.py     | 19 ++++++++-------
 setuptools/tests/test_bdist_egg.py            | 10 +++++---
 setuptools/tests/test_easy_install.py         | 15 ++++++------
 setuptools/tests/test_packageindex.py         | 23 +++++--------------
 setuptools/tests/test_setuptools.py           |  5 ++--
 setuptools/tests/test_wheel.py                |  2 +-
 11 files changed, 51 insertions(+), 46 deletions(-)

diff --git a/pkg_resources/tests/test_resources.py b/pkg_resources/tests/test_resources.py
index 8bd8a1766a..a3097a85dc 100644
--- a/pkg_resources/tests/test_resources.py
+++ b/pkg_resources/tests/test_resources.py
@@ -700,7 +700,7 @@ def test_spaces_between_multiple_versions(self):
         (req,) = parse_requirements('foo >= 1.0, < 3')
 
     @pytest.mark.parametrize(
-        ['lower', 'upper'],
+        'lower, upper',
         [
             ('1.2-rc1', '1.2rc1'),
             ('0.4', '0.4.0'),
@@ -724,7 +724,7 @@ def testVersionEquality(self, lower, upper):
         """
 
     @pytest.mark.parametrize(
-        ['lower', 'upper'],
+        'lower, upper',
         [
             ('2.1', '2.1.1'),
             ('2a1', '2b0'),
diff --git a/pyproject.toml b/pyproject.toml
index 300438232a..ca09a084d8 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -114,8 +114,8 @@ check = [
 
 	# local
 
-	# workaround for businho/pytest-ruff#28
-	"ruff >= 0.5.2; sys_platform != 'cygwin'",
+	# changed defaults for PT001 and PT023 astral-sh/ruff#13292
+	"ruff >= 0.7.0; sys_platform != 'cygwin'",
 ]
 
 cover = [
diff --git a/ruff.toml b/ruff.toml
index 2746ba7d3f..c6fbc173aa 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -20,6 +20,7 @@ extend-select = [
 	"I", # isort
 	"PERF", # Perflint
 	"PGH", # pygrep-hooks (blanket-* rules)
+	"PT", # flake8-pytest-style
 	"PYI", # flake8-pyi
 	"RUF10", # unused-noqa & redirected-noqa
 	"TRY", # tryceratops
@@ -28,6 +29,11 @@ extend-select = [
 ]
 ignore = [
 	"PERF203", # try-except-in-loop, micro-optimisation with many false-positive. Worth checking but don't block CI
+	"PT004", # deprecated https://github.com/astral-sh/ruff/issues/8796#issuecomment-2057143531
+	"PT005", # deprecated https://github.com/astral-sh/ruff/issues/8796#issuecomment-2057143531
+	"PT007", # temporarily disabled, TODO: configure and standardize to preference
+	"PT011", # temporarily disabled, TODO: tighten expected error 
+	"PT012", # pytest-raises-with-multiple-statements, avoid extra dummy methods for a few lines, sometimes we explicitly assert in case of no error
 	"TRY003", # raise-vanilla-args, avoid multitude of exception classes
 	"TRY301", # raise-within-try, it's handy
 	"UP015", # redundant-open-modes, explicit is preferred
@@ -75,6 +81,9 @@ sections.delayed = ["distutils"]
 [lint.flake8-annotations]
 ignore-fully-untyped = true
 
+[lint.flake8-pytest-style]
+parametrize-names-type = "csv"
+
 [format]
 # Enable preview to get hugged parenthesis unwrapping and other nice surprises
 # See https://github.com/jaraco/skeleton/pull/133#issuecomment-2239538373
diff --git a/setuptools/command/install_lib.py b/setuptools/command/install_lib.py
index 530a8b51d1..adebdbe688 100644
--- a/setuptools/command/install_lib.py
+++ b/setuptools/command/install_lib.py
@@ -101,7 +101,9 @@ def copy_tree(
         preserve_symlinks: bool = False,  # type: ignore[override]
         level: object = 1,
     ) -> list[str]:
-        assert preserve_mode and preserve_times and not preserve_symlinks
+        assert preserve_mode
+        assert preserve_times
+        assert not preserve_symlinks
         exclude = self.get_exclusions()
 
         if not exclude:
diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py
index d18ba6e129..e8b9eaf770 100644
--- a/setuptools/tests/config/test_apply_pyprojecttoml.py
+++ b/setuptools/tests/config/test_apply_pyprojecttoml.py
@@ -209,7 +209,7 @@ def test_no_explicit_content_type_for_missing_extension(tmp_path):
 
 
 @pytest.mark.parametrize(
-    ('pyproject_text', 'expected_maintainers_meta_value'),
+    'pyproject_text, expected_maintainers_meta_value',
     (
         pytest.param(
             PEP621_EXAMPLE,
diff --git a/setuptools/tests/integration/test_pip_install_sdist.py b/setuptools/tests/integration/test_pip_install_sdist.py
index e5203d18f9..2e06f5e3cb 100644
--- a/setuptools/tests/integration/test_pip_install_sdist.py
+++ b/setuptools/tests/integration/test_pip_install_sdist.py
@@ -104,23 +104,22 @@ def venv_python(tmp_path):
 
 
 @pytest.fixture(autouse=True)
-def _prepare(tmp_path, venv_python, monkeypatch, request):
+def _prepare(tmp_path, venv_python, monkeypatch):
     download_path = os.getenv("DOWNLOAD_PATH", str(tmp_path))
     os.makedirs(download_path, exist_ok=True)
 
     # Environment vars used for building some of the packages
     monkeypatch.setenv("USE_MYPYC", "1")
 
-    def _debug_info():
-        # Let's provide the maximum amount of information possible in the case
-        # it is necessary to debug the tests directly from the CI logs.
-        print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
-        print("Temporary directory:")
-        map(print, tmp_path.glob("*"))
-        print("Virtual environment:")
-        run([venv_python, "-m", "pip", "freeze"])
+    yield
 
-    request.addfinalizer(_debug_info)
+    # Let's provide the maximum amount of information possible in the case
+    # it is necessary to debug the tests directly from the CI logs.
+    print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
+    print("Temporary directory:")
+    map(print, tmp_path.glob("*"))
+    print("Virtual environment:")
+    run([venv_python, "-m", "pip", "freeze"])
 
 
 @pytest.mark.parametrize('package, version', EXAMPLES)
diff --git a/setuptools/tests/test_bdist_egg.py b/setuptools/tests/test_bdist_egg.py
index 12ed4d328c..036167dd95 100644
--- a/setuptools/tests/test_bdist_egg.py
+++ b/setuptools/tests/test_bdist_egg.py
@@ -17,7 +17,7 @@
 """
 
 
-@pytest.fixture(scope='function')
+@pytest.fixture
 def setup_context(tmpdir):
     with (tmpdir / 'setup.py').open('w') as f:
         f.write(SETUP_PY)
@@ -28,7 +28,9 @@ def setup_context(tmpdir):
 
 
 class Test:
-    def test_bdist_egg(self, setup_context, user_override):
+    @pytest.mark.usefixtures("user_override")
+    @pytest.mark.usefixtures("setup_context")
+    def test_bdist_egg(self):
         dist = Distribution(
             dict(
                 script_name='setup.py',
@@ -50,7 +52,9 @@ def test_bdist_egg(self, setup_context, user_override):
         os.environ.get('PYTHONDONTWRITEBYTECODE', False),
         reason="Byte code disabled",
     )
-    def test_exclude_source_files(self, setup_context, user_override):
+    @pytest.mark.usefixtures("user_override")
+    @pytest.mark.usefixtures("setup_context")
+    def test_exclude_source_files(self):
         dist = Distribution(
             dict(
                 script_name='setup.py',
diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py
index 60a31e3bf2..fdd3cda3a1 100644
--- a/setuptools/tests/test_easy_install.py
+++ b/setuptools/tests/test_easy_install.py
@@ -407,14 +407,14 @@ def test_multiproc_atexit(self):
         logging.basicConfig(level=logging.INFO, stream=sys.stderr)
         log.info('this should not break')
 
-    @pytest.fixture()
+    @pytest.fixture
     def foo_package(self, tmpdir):
         egg_file = tmpdir / 'foo-1.0.egg-info'
         with egg_file.open('w') as f:
             f.write('Name: foo\n')
         return str(tmpdir)
 
-    @pytest.fixture()
+    @pytest.fixture
     def install_target(self, tmpdir):
         target = str(tmpdir)
         with mock.patch('sys.path', sys.path + [target]):
@@ -472,6 +472,12 @@ def distutils_package():
         yield
 
 
+@pytest.mark.usefixtures("distutils_package")
+class TestDistutilsPackage:
+    def test_bdist_egg_available_on_distutils_pkg(self):
+        run_setup('setup.py', ['bdist_egg'])
+
+
 @pytest.fixture
 def mock_index():
     # set up a server which will simulate an alternate package index.
@@ -484,11 +490,6 @@ def mock_index():
     return p_index
 
 
-class TestDistutilsPackage:
-    def test_bdist_egg_available_on_distutils_pkg(self, distutils_package):
-        run_setup('setup.py', ['bdist_egg'])
-
-
 class TestInstallRequires:
     def test_setup_install_includes_dependencies(self, tmp_path, mock_index):
         """
diff --git a/setuptools/tests/test_packageindex.py b/setuptools/tests/test_packageindex.py
index e1f8458674..2a6e5917a8 100644
--- a/setuptools/tests/test_packageindex.py
+++ b/setuptools/tests/test_packageindex.py
@@ -1,4 +1,5 @@
 import http.client
+import re
 import urllib.error
 import urllib.request
 from inspect import cleandoc
@@ -24,11 +25,8 @@ def test_regex(self):
     def test_bad_url_bad_port(self):
         index = setuptools.package_index.PackageIndex()
         url = 'http://127.0.0.1:0/nonesuch/test_package_index'
-        try:
+        with pytest.raises(Exception, match=re.escape(url)):
             v = index.open_url(url)
-        except Exception as exc:
-            assert url in str(exc)
-        else:
             assert isinstance(v, urllib.error.HTTPError)
 
     def test_bad_url_typo(self):
@@ -37,15 +35,10 @@ def test_bad_url_typo(self):
         # in its home URL
         index = setuptools.package_index.PackageIndex(hosts=('www.example.com',))
 
-        url = (
-            'url:%20https://svn.plone.org/svn'
-            '/collective/inquant.contentmirror.plone/trunk'
-        )
-        try:
+        url = 'url:%20https://svn.plone.org/svn/collective/inquant.contentmirror.plone/trunk'
+
+        with pytest.raises(Exception, match=re.escape(url)):
             v = index.open_url(url)
-        except Exception as exc:
-            assert url in str(exc)
-        else:
             assert isinstance(v, urllib.error.HTTPError)
 
     def test_bad_url_bad_status_line(self):
@@ -56,12 +49,8 @@ def _urlopen(*args):
 
         index.opener = _urlopen
         url = 'http://example.com'
-        try:
+        with pytest.raises(Exception, match=r'line'):
             index.open_url(url)
-        except Exception as exc:
-            assert 'line' in str(exc)
-        else:
-            raise AssertionError('Should have raise here!')
 
     def test_bad_url_double_scheme(self):
         """
diff --git a/setuptools/tests/test_setuptools.py b/setuptools/tests/test_setuptools.py
index 72b8ed47f1..ab027b9d09 100644
--- a/setuptools/tests/test_setuptools.py
+++ b/setuptools/tests/test_setuptools.py
@@ -21,7 +21,7 @@
 
 @pytest.fixture(autouse=True)
 def isolated_dir(tmpdir_cwd):
-    yield
+    return
 
 
 def makeSetup(**args):
@@ -257,7 +257,8 @@ def can_symlink(tmpdir):
     os.remove(link_fn)
 
 
-def test_findall_missing_symlink(tmpdir, can_symlink):
+@pytest.mark.usefixtures("can_symlink")
+def test_findall_missing_symlink(tmpdir):
     with tmpdir.as_cwd():
         os.symlink('foo', 'bar')
         found = list(setuptools.findall())
diff --git a/setuptools/tests/test_wheel.py b/setuptools/tests/test_wheel.py
index 5724c6eabc..4125e609f9 100644
--- a/setuptools/tests/test_wheel.py
+++ b/setuptools/tests/test_wheel.py
@@ -78,7 +78,7 @@
 
 
 @pytest.mark.parametrize(
-    ('filename', 'info'), WHEEL_INFO_TESTS, ids=[t[0] for t in WHEEL_INFO_TESTS]
+    'filename, info', WHEEL_INFO_TESTS, ids=[t[0] for t in WHEEL_INFO_TESTS]
 )
 def test_wheel_info(filename, info):
     if inspect.isclass(info):

From 879ad202a7b02f00a6123aab18ee62dba67c8dc0 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Mon, 4 Nov 2024 13:16:13 -0500
Subject: [PATCH 1277/1761] Set Ruff's pytest's parametrize-names-type to
 default (tuple)

---
 pkg_resources/tests/test_pkg_resources.py             |  8 ++++----
 pkg_resources/tests/test_resources.py                 |  4 ++--
 ruff.toml                                             |  3 ---
 setuptools/tests/config/test_apply_pyprojecttoml.py   | 11 ++++++-----
 setuptools/tests/config/test_expand.py                |  6 +++---
 setuptools/tests/config/test_pyprojecttoml.py         |  4 ++--
 .../tests/integration/test_pip_install_sdist.py       |  2 +-
 setuptools/tests/test_bdist_wheel.py                  |  6 +++---
 setuptools/tests/test_build_meta.py                   |  2 +-
 setuptools/tests/test_config_discovery.py             |  8 ++++----
 setuptools/tests/test_core_metadata.py                |  6 +++---
 setuptools/tests/test_dist.py                         |  8 ++++----
 setuptools/tests/test_dist_info.py                    |  2 +-
 setuptools/tests/test_distutils_adoption.py           |  4 ++--
 setuptools/tests/test_egg_info.py                     |  6 +++---
 setuptools/tests/test_glob.py                         |  2 +-
 setuptools/tests/test_logging.py                      |  2 +-
 setuptools/tests/test_wheel.py                        |  2 +-
 18 files changed, 42 insertions(+), 44 deletions(-)

diff --git a/pkg_resources/tests/test_pkg_resources.py b/pkg_resources/tests/test_pkg_resources.py
index 18adb3c9d2..2e5526d1aa 100644
--- a/pkg_resources/tests/test_pkg_resources.py
+++ b/pkg_resources/tests/test_pkg_resources.py
@@ -236,7 +236,7 @@ def make_distribution_no_version(tmpdir, basename):
 
 
 @pytest.mark.parametrize(
-    'suffix, expected_filename, expected_dist_type',
+    ("suffix", "expected_filename", "expected_dist_type"),
     [
         ('egg-info', 'PKG-INFO', EggInfoDistribution),
         ('dist-info', 'METADATA', DistInfoDistribution),
@@ -376,7 +376,7 @@ def test_version_resolved_from_egg_info(self, env):
         assert dist.version == version
 
     @pytest.mark.parametrize(
-        'unnormalized, normalized',
+        ("unnormalized", "normalized"),
         [
             ('foo', 'foo'),
             ('foo/', 'foo'),
@@ -398,7 +398,7 @@ def test_normalize_path_trailing_sep(self, unnormalized, normalized):
         reason='Testing case-insensitive filesystems.',
     )
     @pytest.mark.parametrize(
-        'unnormalized, normalized',
+        ("unnormalized", "normalized"),
         [
             ('MiXeD/CasE', 'mixed/case'),
         ],
@@ -414,7 +414,7 @@ def test_normalize_path_normcase(self, unnormalized, normalized):
         reason='Testing systems using backslashes as path separators.',
     )
     @pytest.mark.parametrize(
-        'unnormalized, expected',
+        ("unnormalized", "expected"),
         [
             ('forward/slash', 'forward\\slash'),
             ('forward/slash/', 'forward\\slash'),
diff --git a/pkg_resources/tests/test_resources.py b/pkg_resources/tests/test_resources.py
index a3097a85dc..be1c1213af 100644
--- a/pkg_resources/tests/test_resources.py
+++ b/pkg_resources/tests/test_resources.py
@@ -700,7 +700,7 @@ def test_spaces_between_multiple_versions(self):
         (req,) = parse_requirements('foo >= 1.0, < 3')
 
     @pytest.mark.parametrize(
-        'lower, upper',
+        ("lower", "upper"),
         [
             ('1.2-rc1', '1.2rc1'),
             ('0.4', '0.4.0'),
@@ -724,7 +724,7 @@ def testVersionEquality(self, lower, upper):
         """
 
     @pytest.mark.parametrize(
-        'lower, upper',
+        ("lower", "upper"),
         [
             ('2.1', '2.1.1'),
             ('2a1', '2b0'),
diff --git a/ruff.toml b/ruff.toml
index c6fbc173aa..9f20438943 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -81,9 +81,6 @@ sections.delayed = ["distutils"]
 [lint.flake8-annotations]
 ignore-fully-untyped = true
 
-[lint.flake8-pytest-style]
-parametrize-names-type = "csv"
-
 [format]
 # Enable preview to get hugged parenthesis unwrapping and other nice surprises
 # See https://github.com/jaraco/skeleton/pull/133#issuecomment-2239538373
diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py
index e8b9eaf770..da43bb6a2b 100644
--- a/setuptools/tests/config/test_apply_pyprojecttoml.py
+++ b/setuptools/tests/config/test_apply_pyprojecttoml.py
@@ -183,7 +183,7 @@ def test_pep621_example(tmp_path):
 
 
 @pytest.mark.parametrize(
-    "readme, ctype",
+    ("readme", "ctype"),
     [
         ("Readme.txt", "text/plain"),
         ("readme.md", "text/markdown"),
@@ -209,7 +209,7 @@ def test_no_explicit_content_type_for_missing_extension(tmp_path):
 
 
 @pytest.mark.parametrize(
-    'pyproject_text, expected_maintainers_meta_value',
+    ("pyproject_text", "expected_maintainers_meta_value"),
     (
         pytest.param(
             PEP621_EXAMPLE,
@@ -370,7 +370,7 @@ def pyproject(self, tmp_path, dynamic, extra_content=""):
         return file
 
     @pytest.mark.parametrize(
-        "attr, field, value",
+        ("attr", "field", "value"),
         [
             ("classifiers", "classifiers", ["Private :: Classifier"]),
             ("entry_points", "scripts", {"console_scripts": ["foobar=foobar:main"]}),
@@ -395,7 +395,7 @@ def test_not_listed_in_dynamic(self, tmp_path, attr, field, value):
         assert not dist_value
 
     @pytest.mark.parametrize(
-        "attr, field, value",
+        ("attr", "field", "value"),
         [
             ("install_requires", "dependencies", []),
             ("extras_require", "optional-dependencies", {}),
@@ -442,7 +442,8 @@ def test_optional_dependencies_dont_remove_env_markers(self, tmp_path):
         assert ':python_version < "3.7"' in reqs
 
     @pytest.mark.parametrize(
-        "field,group", [("scripts", "console_scripts"), ("gui-scripts", "gui_scripts")]
+        ("field", "group"),
+        [("scripts", "console_scripts"), ("gui-scripts", "gui_scripts")],
     )
     @pytest.mark.filterwarnings("error")
     def test_scripts_dont_require_dynamic_entry_points(self, tmp_path, field, group):
diff --git a/setuptools/tests/config/test_expand.py b/setuptools/tests/config/test_expand.py
index f51d2008d0..fa9122b32c 100644
--- a/setuptools/tests/config/test_expand.py
+++ b/setuptools/tests/config/test_expand.py
@@ -141,7 +141,7 @@ def test_import_order(self, tmp_path):
 
 
 @pytest.mark.parametrize(
-    'package_dir, file, module, return_value',
+    ("package_dir", "file", "module", "return_value"),
     [
         ({"": "src"}, "src/pkg/main.py", "pkg.main", 42),
         ({"pkg": "lib"}, "lib/main.py", "pkg.main", 13),
@@ -158,7 +158,7 @@ def test_resolve_class(monkeypatch, tmp_path, package_dir, file, module, return_
 
 
 @pytest.mark.parametrize(
-    'args, pkgs',
+    ("args", "pkgs"),
     [
         ({"where": ["."], "namespaces": False}, {"pkg", "other"}),
         ({"where": [".", "dir1"], "namespaces": False}, {"pkg", "other", "dir2"}),
@@ -192,7 +192,7 @@ def test_find_packages(tmp_path, args, pkgs):
 
 
 @pytest.mark.parametrize(
-    "files, where, expected_package_dir",
+    ("files", "where", "expected_package_dir"),
     [
         (["pkg1/__init__.py", "pkg1/other.py"], ["."], {}),
         (["pkg1/__init__.py", "pkg2/__init__.py"], ["."], {}),
diff --git a/setuptools/tests/config/test_pyprojecttoml.py b/setuptools/tests/config/test_pyprojecttoml.py
index bb15ce10de..db40fcd23d 100644
--- a/setuptools/tests/config/test_pyprojecttoml.py
+++ b/setuptools/tests/config/test_pyprojecttoml.py
@@ -149,7 +149,7 @@ def test_read_configuration(tmp_path):
 
 
 @pytest.mark.parametrize(
-    "pkg_root, opts",
+    ("pkg_root", "opts"),
     [
         (".", {}),
         ("src", {}),
@@ -308,7 +308,7 @@ def test_ignore_unrelated_config(tmp_path, example):
 
 
 @pytest.mark.parametrize(
-    "example, error_msg",
+    ("example", "error_msg"),
     [
         (
             """
diff --git a/setuptools/tests/integration/test_pip_install_sdist.py b/setuptools/tests/integration/test_pip_install_sdist.py
index 2e06f5e3cb..b2f1c08003 100644
--- a/setuptools/tests/integration/test_pip_install_sdist.py
+++ b/setuptools/tests/integration/test_pip_install_sdist.py
@@ -122,7 +122,7 @@ def _prepare(tmp_path, venv_python, monkeypatch):
     run([venv_python, "-m", "pip", "freeze"])
 
 
-@pytest.mark.parametrize('package, version', EXAMPLES)
+@pytest.mark.parametrize(("package", "version"), EXAMPLES)
 @pytest.mark.uses_network
 def test_install_sdist(package, version, tmp_path, venv_python, setuptools_wheel):
     venv_pip = (venv_python, "-m", "pip")
diff --git a/setuptools/tests/test_bdist_wheel.py b/setuptools/tests/test_bdist_wheel.py
index 141ef716ab..3dfa9c850c 100644
--- a/setuptools/tests/test_bdist_wheel.py
+++ b/setuptools/tests/test_bdist_wheel.py
@@ -322,7 +322,7 @@ def test_licenses_deprecated(dummy_dist, monkeypatch, tmp_path):
 
 
 @pytest.mark.parametrize(
-    "config_file, config",
+    ("config_file", "config"),
     [
         ("setup.cfg", "[metadata]\nlicense_files=licenses/*\n  LICENSE"),
         ("setup.cfg", "[metadata]\nlicense_files=licenses/*, LICENSE"),
@@ -434,7 +434,7 @@ def test_build_from_readonly_tree(dummy_dist, monkeypatch, tmp_path):
 
 
 @pytest.mark.parametrize(
-    "option, compress_type",
+    ("option", "compress_type"),
     list(bdist_wheel.supported_compressions.items()),
     ids=list(bdist_wheel.supported_compressions),
 )
@@ -589,7 +589,7 @@ def test_data_dir_with_tag_build(monkeypatch, tmp_path):
 
 
 @pytest.mark.parametrize(
-    "reported,expected",
+    ("reported", "expected"),
     [("linux-x86_64", "linux_i686"), ("linux-aarch64", "linux_armv7l")],
 )
 @pytest.mark.skipif(
diff --git a/setuptools/tests/test_build_meta.py b/setuptools/tests/test_build_meta.py
index 53095afb52..121f409057 100644
--- a/setuptools/tests/test_build_meta.py
+++ b/setuptools/tests/test_build_meta.py
@@ -737,7 +737,7 @@ def test_editable_with_config_settings(self, tmpdir_cwd, config_settings):
         self._assert_link_tree(next(Path("build").glob("__editable__.*")))
 
     @pytest.mark.parametrize(
-        'setup_literal, requirements',
+        ("setup_literal", "requirements"),
         [
             ("'foo'", ['foo']),
             ("['foo']", ['foo']),
diff --git a/setuptools/tests/test_config_discovery.py b/setuptools/tests/test_config_discovery.py
index 6b60c7e7f7..b5df8203cd 100644
--- a/setuptools/tests/test_config_discovery.py
+++ b/setuptools/tests/test_config_discovery.py
@@ -161,7 +161,7 @@ def test_project(self, tmp_path, circumstance):
     }
 
     @pytest.mark.parametrize(
-        "config_file, param, circumstance",
+        ("config_file", "param", "circumstance"),
         product(
             ["setup.cfg", "setup.py", "pyproject.toml"],
             ["packages", "py_modules"],
@@ -191,7 +191,7 @@ def test_purposefully_empty(self, tmp_path, config_file, param, circumstance):
         assert getattr(dist, other) is None
 
     @pytest.mark.parametrize(
-        "extra_files, pkgs",
+        ("extra_files", "pkgs"),
         [
             (["venv/bin/simulate_venv"], {"pkg"}),
             (["pkg-stubs/__init__.pyi"], {"pkg", "pkg-stubs"}),
@@ -284,7 +284,7 @@ def test_build_with_discovered_name(self, tmp_path):
 
 class TestWithAttrDirective:
     @pytest.mark.parametrize(
-        "folder, opts",
+        ("folder", "opts"),
         [
             ("src", {}),
             ("lib", {"packages": "find:", "packages.find": {"where": "lib"}}),
@@ -446,7 +446,7 @@ def _simulate_package_with_data_files(self, tmp_path, src_root):
     """
 
     @pytest.mark.parametrize(
-        "src_root, files",
+        ("src_root", "files"),
         [
             (".", {"setup.cfg": DALS(EXAMPLE_SETUPCFG)}),
             (".", {"pyproject.toml": DALS(EXAMPLE_PYPROJECT)}),
diff --git a/setuptools/tests/test_core_metadata.py b/setuptools/tests/test_core_metadata.py
index 51d4a10810..2b585a9c91 100644
--- a/setuptools/tests/test_core_metadata.py
+++ b/setuptools/tests/test_core_metadata.py
@@ -23,7 +23,7 @@
 
 
 @pytest.mark.parametrize(
-    'content, result',
+    ("content", "result"),
     (
         pytest.param(
             "Just a single line",
@@ -154,7 +154,7 @@ def __read_test_cases():
     ]
 
 
-@pytest.mark.parametrize('name,attrs', __read_test_cases())
+@pytest.mark.parametrize(("name", "attrs"), __read_test_cases())
 def test_read_metadata(name, attrs):
     dist = Distribution(attrs)
     metadata_out = dist.metadata
@@ -263,7 +263,7 @@ def merge_dicts(d1, d2):
     ]
 
 
-@pytest.mark.parametrize('name,attrs', __maintainer_test_cases())
+@pytest.mark.parametrize(("name", "attrs"), __maintainer_test_cases())
 def test_maintainer_author(name, attrs, tmpdir):
     tested_keys = {
         'author': 'Author',
diff --git a/setuptools/tests/test_dist.py b/setuptools/tests/test_dist.py
index 1bc4923032..be953079f5 100644
--- a/setuptools/tests/test_dist.py
+++ b/setuptools/tests/test_dist.py
@@ -129,7 +129,7 @@ def test_provides_extras_deterministic_order():
 )
 
 
-@pytest.mark.parametrize('package_data, expected_message', CHECK_PACKAGE_DATA_TESTS)
+@pytest.mark.parametrize(('package_data', 'expected_message'), CHECK_PACKAGE_DATA_TESTS)
 def test_check_package_data(package_data, expected_message):
     if expected_message is None:
         assert check_package_data(None, 'package_data', package_data) is None
@@ -156,7 +156,7 @@ def test_metadata_name():
 
 
 @pytest.mark.parametrize(
-    "dist_name, py_module",
+    ('dist_name', 'py_module'),
     [
         ("my.pkg", "my_pkg"),
         ("my-pkg", "my_pkg"),
@@ -187,7 +187,7 @@ def test_dist_default_py_modules(tmp_path, dist_name, py_module):
 
 
 @pytest.mark.parametrize(
-    "dist_name, package_dir, package_files, packages",
+    ('dist_name', 'package_dir', 'package_files', 'packages'),
     [
         ("my.pkg", None, ["my_pkg/__init__.py", "my_pkg/mod.py"], ["my_pkg"]),
         ("my-pkg", None, ["my_pkg/__init__.py", "my_pkg/mod.py"], ["my_pkg"]),
@@ -241,7 +241,7 @@ def test_dist_default_packages(
 
 
 @pytest.mark.parametrize(
-    "dist_name, package_dir, package_files",
+    ('dist_name', 'package_dir', 'package_files'),
     [
         ("my.pkg.nested", None, ["my/pkg/nested/__init__.py"]),
         ("my.pkg", None, ["my/pkg/__init__.py", "my/pkg/file.py"]),
diff --git a/setuptools/tests/test_dist_info.py b/setuptools/tests/test_dist_info.py
index 6e109c9db2..31e6e95a68 100644
--- a/setuptools/tests/test_dist_info.py
+++ b/setuptools/tests/test_dist_info.py
@@ -169,7 +169,7 @@ class TestWheelCompatibility:
 
     @pytest.mark.parametrize("name", "my-proj my_proj my.proj My.Proj".split())
     @pytest.mark.parametrize("version", ["0.42.13"])
-    @pytest.mark.parametrize("suffix, cfg", EGG_INFO_OPTS)
+    @pytest.mark.parametrize(("suffix", "cfg"), EGG_INFO_OPTS)
     def test_dist_info_is_the_same_as_in_wheel(
         self, name, version, tmp_path, suffix, cfg
     ):
diff --git a/setuptools/tests/test_distutils_adoption.py b/setuptools/tests/test_distutils_adoption.py
index aabfdd283a..f99a588499 100644
--- a/setuptools/tests/test_distutils_adoption.py
+++ b/setuptools/tests/test_distutils_adoption.py
@@ -116,7 +116,7 @@ def test_distutils_has_origin():
 
 @pytest.mark.usefixtures("tmpdir_cwd")
 @pytest.mark.parametrize(
-    "distutils_version, imported_module",
+    ('distutils_version', 'imported_module'),
     [
         pytest.param("stdlib", "dir_util", marks=skip_without_stdlib_distutils),
         pytest.param("stdlib", "file_util", marks=skip_without_stdlib_distutils),
@@ -175,7 +175,7 @@ def test_log_module_is_not_duplicated_on_import(distutils_version, venv):
 
 @pytest.mark.usefixtures("tmpdir_cwd")
 @pytest.mark.parametrize(
-    "distutils_version, imported_module",
+    ('distutils_version', 'imported_module'),
     [
         ("local", "distutils"),
         # Unfortunately we still get ._distutils.errors.DistutilsError with SETUPTOOLS_USE_DISTUTILS=stdlib
diff --git a/setuptools/tests/test_egg_info.py b/setuptools/tests/test_egg_info.py
index f82d931eba..7b7433f0ba 100644
--- a/setuptools/tests/test_egg_info.py
+++ b/setuptools/tests/test_egg_info.py
@@ -543,7 +543,7 @@ def test_doesnt_provides_extra(self, tmpdir_cwd, env):
         assert 'Provides-Extra:' not in pkg_info_text
 
     @pytest.mark.parametrize(
-        "files, license_in_sources",
+        ('files', 'license_in_sources'),
         [
             (
                 {
@@ -627,7 +627,7 @@ def test_setup_cfg_license_file(self, tmpdir_cwd, env, files, license_in_sources
             assert 'INVALID_LICENSE' not in sources_text
 
     @pytest.mark.parametrize(
-        "files, incl_licenses, excl_licenses",
+        ('files', 'incl_licenses', 'excl_licenses'),
         [
             (
                 {
@@ -839,7 +839,7 @@ def test_setup_cfg_license_files(
             assert sources_lines.count(lf) == 0
 
     @pytest.mark.parametrize(
-        "files, incl_licenses, excl_licenses",
+        ('files', 'incl_licenses', 'excl_licenses'),
         [
             (
                 {
diff --git a/setuptools/tests/test_glob.py b/setuptools/tests/test_glob.py
index bdccba6c24..8d225a4461 100644
--- a/setuptools/tests/test_glob.py
+++ b/setuptools/tests/test_glob.py
@@ -5,7 +5,7 @@
 
 
 @pytest.mark.parametrize(
-    'tree, pattern, matches',
+    ('tree', 'pattern', 'matches'),
     (
         ('', b'', []),
         ('', '', []),
diff --git a/setuptools/tests/test_logging.py b/setuptools/tests/test_logging.py
index e01df7277c..ea58001e93 100644
--- a/setuptools/tests/test_logging.py
+++ b/setuptools/tests/test_logging.py
@@ -19,7 +19,7 @@
 
 
 @pytest.mark.parametrize(
-    "flag, expected_level", [("--dry-run", "INFO"), ("--verbose", "DEBUG")]
+    ('flag', 'expected_level'), [("--dry-run", "INFO"), ("--verbose", "DEBUG")]
 )
 def test_verbosity_level(tmp_path, monkeypatch, flag, expected_level):
     """Make sure the correct verbosity level is set (issue #3038)"""
diff --git a/setuptools/tests/test_wheel.py b/setuptools/tests/test_wheel.py
index 4125e609f9..5724c6eabc 100644
--- a/setuptools/tests/test_wheel.py
+++ b/setuptools/tests/test_wheel.py
@@ -78,7 +78,7 @@
 
 
 @pytest.mark.parametrize(
-    'filename, info', WHEEL_INFO_TESTS, ids=[t[0] for t in WHEEL_INFO_TESTS]
+    ('filename', 'info'), WHEEL_INFO_TESTS, ids=[t[0] for t in WHEEL_INFO_TESTS]
 )
 def test_wheel_info(filename, info):
     if inspect.isclass(info):

From fcdf55e588af53d0fd1fa0d77cc6c9aeaa34802a Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 31 Oct 2024 17:31:25 +0000
Subject: [PATCH 1278/1761] Remove unused vars or mark them explicitly

---
 pkg_resources/__init__.py                 |  2 +-
 pkg_resources/tests/test_resources.py     |  6 +++---
 setup.py                                  |  2 +-
 setuptools/__init__.py                    |  2 +-
 setuptools/command/bdist_egg.py           |  2 +-
 setuptools/command/build_ext.py           |  2 +-
 setuptools/command/easy_install.py        |  2 +-
 setuptools/command/install_lib.py         |  2 +-
 setuptools/config/setupcfg.py             |  2 +-
 setuptools/depends.py                     |  4 ++--
 setuptools/dist.py                        |  8 ++++----
 setuptools/package_index.py               | 10 +++++-----
 setuptools/sandbox.py                     |  2 +-
 setuptools/tests/config/test_setupcfg.py  | 20 +++++++++-----------
 setuptools/tests/test_easy_install.py     |  8 ++++----
 setuptools/tests/test_egg_info.py         | 16 ++++++++--------
 setuptools/tests/test_manifest.py         |  2 +-
 setuptools/tests/test_setuptools.py       |  2 +-
 setuptools/tests/test_windows_wrappers.py |  6 +++---
 19 files changed, 49 insertions(+), 51 deletions(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index 6418eb2ab0..74b0465bfa 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -2064,7 +2064,7 @@ def _extract_resource(self, manager: ResourceManager, zip_path) -> str:  # noqa:
             # return the extracted directory name
             return os.path.dirname(last)
 
-        timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
+        timestamp, _size = self._get_date_and_size(self.zipinfo[zip_path])
 
         if not WRITE_SUPPORT:
             raise OSError(
diff --git a/pkg_resources/tests/test_resources.py b/pkg_resources/tests/test_resources.py
index be1c1213af..00fb5b3290 100644
--- a/pkg_resources/tests/test_resources.py
+++ b/pkg_resources/tests/test_resources.py
@@ -693,11 +693,11 @@ def test_requirements_with_markers(self):
         ) != Requirement.parse("name[foo,bar]==1.0;python_version=='3.6'")
 
     def test_local_version(self):
-        (req,) = parse_requirements('foo==1.0+org1')
+        parse_requirements('foo==1.0+org1')
 
     def test_spaces_between_multiple_versions(self):
-        (req,) = parse_requirements('foo>=1.0, <3')
-        (req,) = parse_requirements('foo >= 1.0, < 3')
+        parse_requirements('foo>=1.0, <3')
+        parse_requirements('foo >= 1.0, < 3')
 
     @pytest.mark.parametrize(
         ("lower", "upper"),
diff --git a/setup.py b/setup.py
index 1cd9e36c15..c28a14e722 100755
--- a/setup.py
+++ b/setup.py
@@ -32,7 +32,7 @@ def pypi_link(pkg_filename):
     dependency link for PyPI.
     """
     root = 'https://files.pythonhosted.org/packages/source'
-    name, sep, rest = pkg_filename.partition('-')
+    name, _sep, _rest = pkg_filename.partition('-')
     parts = root, name[0], name, pkg_filename
     return '/'.join(parts)
 
diff --git a/setuptools/__init__.py b/setuptools/__init__.py
index 783e5e7591..b330e9aa3e 100644
--- a/setuptools/__init__.py
+++ b/setuptools/__init__.py
@@ -70,7 +70,7 @@ def __init__(self, attrs: Mapping[str, object]):
         def _get_project_config_files(self, filenames=None):
             """Ignore ``pyproject.toml``, they are not related to setup_requires"""
             try:
-                cfg, toml = super()._split_standard_project_metadata(filenames)
+                cfg, _toml = super()._split_standard_project_metadata(filenames)
             except Exception:
                 return filenames, ()
             return cfg, ()
diff --git a/setuptools/command/bdist_egg.py b/setuptools/command/bdist_egg.py
index a623155f22..04d7e945bc 100644
--- a/setuptools/command/bdist_egg.py
+++ b/setuptools/command/bdist_egg.py
@@ -181,7 +181,7 @@ def run(self):  # noqa: C901  # is too complex (14)  # FIXME
         self.stubs = []
         to_compile = []
         for p, ext_name in enumerate(ext_outputs):
-            filename, ext = os.path.splitext(ext_name)
+            filename, _ext = os.path.splitext(ext_name)
             pyfile = os.path.join(self.bdist_dir, strip_module(filename) + '.py')
             self.stubs.append(pyfile)
             log.info("creating stub loader for %s", ext_name)
diff --git a/setuptools/command/build_ext.py b/setuptools/command/build_ext.py
index 00a38ba3c9..f098246b9b 100644
--- a/setuptools/command/build_ext.py
+++ b/setuptools/command/build_ext.py
@@ -460,7 +460,7 @@ def link_shared_object(
 
         assert output_dir is None  # distutils build_ext doesn't pass this
         output_dir, filename = os.path.split(output_libname)
-        basename, ext = os.path.splitext(filename)
+        basename, _ext = os.path.splitext(filename)
         if self.library_filename("x").startswith('lib'):
             # strip 'lib' prefix; this is kludgy if some platform uses
             # a different prefix
diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py
index e1cd32cf0d..37f01ed46e 100644
--- a/setuptools/command/easy_install.py
+++ b/setuptools/command/easy_install.py
@@ -1547,7 +1547,7 @@ def extract_wininst_cfg(dist_filename):
             return None
         f.seek(prepended - 12)
 
-        tag, cfglen, bmlen = struct.unpack(" Iterator[tuple[str, SingleCommandOptions]]:
         for full_name, value in options.items():
-            pre, sep, name = full_name.partition(cls.section_prefix)
+            pre, _sep, name = full_name.partition(cls.section_prefix)
             if pre:
                 continue
             yield name.lstrip('.'), value
diff --git a/setuptools/depends.py b/setuptools/depends.py
index 0011c05da1..16a67c0306 100644
--- a/setuptools/depends.py
+++ b/setuptools/depends.py
@@ -70,7 +70,7 @@ def get_version(
 
         if self.attribute is None:
             try:
-                f, p, i = find_module(self.module, paths)
+                f, _p, _i = find_module(self.module, paths)
             except ImportError:
                 return None
             if f:
@@ -122,7 +122,7 @@ def get_module_constant(
         constant.  Otherwise, return 'default'."""
 
         try:
-            f, path, (suffix, mode, kind) = info = find_module(module, paths)
+            f, path, (_suffix, _mode, kind) = info = find_module(module, paths)
         except ImportError:
             # Module doesn't exist
             return None
diff --git a/setuptools/dist.py b/setuptools/dist.py
index 333bc29290..6062c4f868 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -113,7 +113,7 @@ def check_nsp(dist, attr, value):
                 "Distribution contains no modules or packages for "
                 + "namespace package %r" % nsp
             )
-        parent, sep, child = nsp.rpartition('.')
+        parent, _sep, _child = nsp.rpartition('.')
         if parent and parent not in ns_packages:
             distutils.log.warn(
                 "WARNING: %r is declared as a package namespace, but %r"
@@ -145,7 +145,7 @@ def check_extras(dist, attr, value):
 
 
 def _check_extra(extra, reqs):
-    name, sep, marker = extra.partition(':')
+    _name, _sep, marker = extra.partition(':')
     try:
         _check_marker(marker)
     except InvalidMarker:
@@ -886,7 +886,7 @@ def _parse_command_opts(self, parser, args):
         command = args[0]
         aliases = self.get_option_dict('aliases')
         while command in aliases:
-            src, alias = aliases[command]
+            _src, alias = aliases[command]
             del aliases[command]  # ensure each alias can expand only once!
             import shlex
 
@@ -951,7 +951,7 @@ def iter_distribution_names(self):
 
         for ext in self.ext_modules or ():
             if isinstance(ext, tuple):
-                name, buildinfo = ext
+                name, _buildinfo = ext
             else:
                 name = ext.name
             if name.endswith('module'):
diff --git a/setuptools/package_index.py b/setuptools/package_index.py
index ca1dbf0cb6..ad95d3573a 100644
--- a/setuptools/package_index.py
+++ b/setuptools/package_index.py
@@ -104,7 +104,7 @@ def parse_bdist_wininst(name):
 
 def egg_info_for_url(url):
     parts = urllib.parse.urlparse(url)
-    scheme, server, path, parameters, query, fragment = parts
+    _scheme, server, path, _parameters, _query, fragment = parts
     base = urllib.parse.unquote(path.split('/')[-1])
     if server == 'sourceforge.net' and base == 'download':  # XXX Yuck
         base = urllib.parse.unquote(path.split('/')[-2])
@@ -431,7 +431,7 @@ def scan_egg_link(self, path, entry) -> None:
             # format is not recognized; punt
             return
 
-        egg_path, setup_path = lines
+        egg_path, _setup_path = lines
 
         for dist in find_distributions(os.path.join(path, egg_path)):
             dist.location = os.path.join(path, *lines)
@@ -820,7 +820,7 @@ def open_url(self, url, warning=None):  # noqa: C901  # is too complex (12)
     def _download_url(self, url, tmpdir):
         # Determine download filename
         #
-        name, fragment = egg_info_for_url(url)
+        name, _fragment = egg_info_for_url(url)
         if name:
             while '..' in name:
                 name = name.replace('..', '.').replace('\\', '_')
@@ -848,7 +848,7 @@ def _resolve_vcs(url):
         >>> rvcs('http://foo/bar')
         """
         scheme = urllib.parse.urlsplit(url).scheme
-        pre, sep, post = scheme.partition('+')
+        pre, sep, _post = scheme.partition('+')
         # svn and git have their own protocol; hg does not
         allowed = set(['svn', 'git'] + ['hg'] * bool(sep))
         return next(iter({pre} & allowed), None)
@@ -1121,7 +1121,7 @@ def fix_sf_url(url):
 
 def local_open(url):
     """Read a local path, with special support for directories"""
-    scheme, server, path, param, query, frag = urllib.parse.urlparse(url)
+    _scheme, _server, path, _param, _query, _frag = urllib.parse.urlparse(url)
     filename = urllib.request.url2pathname(path)
     if os.path.isfile(filename):
         return urllib.request.urlopen(url)
diff --git a/setuptools/sandbox.py b/setuptools/sandbox.py
index c774a02f8c..b6c63f03d4 100644
--- a/setuptools/sandbox.py
+++ b/setuptools/sandbox.py
@@ -150,7 +150,7 @@ def resume(self):
         if '_saved' not in vars(self):
             return
 
-        type, exc = map(pickle.loads, self._saved)
+        _type, exc = map(pickle.loads, self._saved)
         raise exc.with_traceback(self._tb)
 
 
diff --git a/setuptools/tests/config/test_setupcfg.py b/setuptools/tests/config/test_setupcfg.py
index 9f225416c0..b31118c0fb 100644
--- a/setuptools/tests/config/test_setupcfg.py
+++ b/setuptools/tests/config/test_setupcfg.py
@@ -288,9 +288,7 @@ def test_version(self, tmpdir):
             assert dist.metadata.version == '2016.11.26'
 
     def test_version_file(self, tmpdir):
-        _, config = fake_env(
-            tmpdir, '[metadata]\nversion = file: fake_package/version.txt\n'
-        )
+        fake_env(tmpdir, '[metadata]\nversion = file: fake_package/version.txt\n')
         tmpdir.join('fake_package', 'version.txt').write('1.2.3\n')
 
         with get_dist(tmpdir) as dist:
@@ -302,7 +300,7 @@ def test_version_file(self, tmpdir):
                 dist.metadata.version
 
     def test_version_with_package_dir_simple(self, tmpdir):
-        _, config = fake_env(
+        fake_env(
             tmpdir,
             '[metadata]\n'
             'version = attr: fake_package_simple.VERSION\n'
@@ -316,7 +314,7 @@ def test_version_with_package_dir_simple(self, tmpdir):
             assert dist.metadata.version == '1.2.3'
 
     def test_version_with_package_dir_rename(self, tmpdir):
-        _, config = fake_env(
+        fake_env(
             tmpdir,
             '[metadata]\n'
             'version = attr: fake_package_rename.VERSION\n'
@@ -330,7 +328,7 @@ def test_version_with_package_dir_rename(self, tmpdir):
             assert dist.metadata.version == '1.2.3'
 
     def test_version_with_package_dir_complex(self, tmpdir):
-        _, config = fake_env(
+        fake_env(
             tmpdir,
             '[metadata]\n'
             'version = attr: fake_package_complex.VERSION\n'
@@ -585,8 +583,8 @@ def test_packages(self, tmpdir):
     def test_find_directive(self, tmpdir):
         dir_package, config = fake_env(tmpdir, '[options]\npackages = find:\n')
 
-        dir_sub_one, _ = make_package_dir('sub_one', dir_package)
-        dir_sub_two, _ = make_package_dir('sub_two', dir_package)
+        make_package_dir('sub_one', dir_package)
+        make_package_dir('sub_two', dir_package)
 
         with get_dist(tmpdir) as dist:
             assert set(dist.packages) == set([
@@ -624,8 +622,8 @@ def test_find_namespace_directive(self, tmpdir):
             tmpdir, '[options]\npackages = find_namespace:\n'
         )
 
-        dir_sub_one, _ = make_package_dir('sub_one', dir_package)
-        dir_sub_two, _ = make_package_dir('sub_two', dir_package, ns=True)
+        make_package_dir('sub_one', dir_package)
+        make_package_dir('sub_two', dir_package, ns=True)
 
         with get_dist(tmpdir) as dist:
             assert set(dist.packages) == {
@@ -779,7 +777,7 @@ def test_entry_points(self, tmpdir):
             assert dist.entry_points == expected
 
     def test_case_sensitive_entry_points(self, tmpdir):
-        _, config = fake_env(
+        fake_env(
             tmpdir,
             '[options.entry_points]\n'
             'GROUP1 = point1 = pack.module:func, '
diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py
index fdd3cda3a1..586324be37 100644
--- a/setuptools/tests/test_easy_install.py
+++ b/setuptools/tests/test_easy_install.py
@@ -72,7 +72,7 @@ def test_get_script_args(self):
         header = ei.CommandSpec.best().from_environment().as_header()
         dist = FakeDist()
         args = next(ei.ScriptWriter.get_args(dist))
-        name, script = itertools.islice(args, 2)
+        _name, script = itertools.islice(args, 2)
         assert script.startswith(header)
         assert "'spec'" in script
         assert "'console_scripts'" in script
@@ -651,7 +651,7 @@ def test_setup_requires_overrides_version_conflict(self, use_setup_cfg):
                     temp_dir, use_setup_cfg=use_setup_cfg
                 )
                 test_setup_py = os.path.join(test_pkg, 'setup.py')
-                with contexts.quiet() as (stdout, stderr):
+                with contexts.quiet() as (stdout, _stderr):
                     # Don't even need to install the package, just
                     # running the setup.py at all is sufficient
                     run_setup(test_setup_py, ['--name'])
@@ -713,7 +713,7 @@ def test_setup_requires_override_nspkg(self, use_setup_cfg):
 
                 test_setup_py = os.path.join(test_pkg, 'setup.py')
 
-                with contexts.quiet() as (stdout, stderr):
+                with contexts.quiet() as (stdout, _stderr):
                     try:
                         # Don't even need to install the package, just
                         # running the setup.py at all is sufficient
@@ -765,7 +765,7 @@ def make_dependency_sdist(dist_path, distname, version):
                     use_setup_cfg=use_setup_cfg + ('version',),
                 )
                 test_setup_py = os.path.join(test_pkg, 'setup.py')
-                with contexts.quiet() as (stdout, stderr):
+                with contexts.quiet() as (stdout, _stderr):
                     run_setup(test_setup_py, ['--version'])
                 lines = stdout.readlines()
                 assert len(lines) > 0
diff --git a/setuptools/tests/test_egg_info.py b/setuptools/tests/test_egg_info.py
index 7b7433f0ba..a68ecaba4c 100644
--- a/setuptools/tests/test_egg_info.py
+++ b/setuptools/tests/test_egg_info.py
@@ -512,7 +512,7 @@ def test_provides_extra(self, tmpdir_cwd, env):
         environ = os.environ.copy().update(
             HOME=env.paths['home'],
         )
-        code, data = environment.run_setup_py(
+        environment.run_setup_py(
             cmd=['egg_info'],
             pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)]),
             data_stream=1,
@@ -1060,7 +1060,7 @@ def test_license_file_attr_pkg_info(self, tmpdir_cwd, env):
     def test_metadata_version(self, tmpdir_cwd, env):
         """Make sure latest metadata version is used by default."""
         self._setup_script_with_requires("")
-        code, data = environment.run_setup_py(
+        environment.run_setup_py(
             cmd=['egg_info'],
             pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)]),
             data_stream=1,
@@ -1085,7 +1085,7 @@ def test_long_description_content_type(self, tmpdir_cwd, env):
         environ = os.environ.copy().update(
             HOME=env.paths['home'],
         )
-        code, data = environment.run_setup_py(
+        environment.run_setup_py(
             cmd=['egg_info'],
             pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)]),
             data_stream=1,
@@ -1107,7 +1107,7 @@ def test_long_description(self, tmpdir_cwd, env):
             "long_description='This is a long description\\nover multiple lines',"
             "long_description_content_type='text/markdown',"
         )
-        code, data = environment.run_setup_py(
+        environment.run_setup_py(
             cmd=['egg_info'],
             pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)]),
             data_stream=1,
@@ -1138,7 +1138,7 @@ def test_project_urls(self, tmpdir_cwd, env):
         environ = os.environ.copy().update(
             HOME=env.paths['home'],
         )
-        code, data = environment.run_setup_py(
+        environment.run_setup_py(
             cmd=['egg_info'],
             pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)]),
             data_stream=1,
@@ -1156,7 +1156,7 @@ def test_project_urls(self, tmpdir_cwd, env):
     def test_license(self, tmpdir_cwd, env):
         """Test single line license."""
         self._setup_script_with_requires("license='MIT',")
-        code, data = environment.run_setup_py(
+        environment.run_setup_py(
             cmd=['egg_info'],
             pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)]),
             data_stream=1,
@@ -1171,7 +1171,7 @@ def test_license_escape(self, tmpdir_cwd, env):
         self._setup_script_with_requires(
             "license='This is a long license text \\nover multiple lines',"
         )
-        code, data = environment.run_setup_py(
+        environment.run_setup_py(
             cmd=['egg_info'],
             pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)]),
             data_stream=1,
@@ -1189,7 +1189,7 @@ def test_python_requires_egg_info(self, tmpdir_cwd, env):
         environ = os.environ.copy().update(
             HOME=env.paths['home'],
         )
-        code, data = environment.run_setup_py(
+        environment.run_setup_py(
             cmd=['egg_info'],
             pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)]),
             data_stream=1,
diff --git a/setuptools/tests/test_manifest.py b/setuptools/tests/test_manifest.py
index 444c7a3689..ad988d2c5f 100644
--- a/setuptools/tests/test_manifest.py
+++ b/setuptools/tests/test_manifest.py
@@ -369,7 +369,7 @@ def assertWarnings(self, caplog):
     def make_files(self, files):
         for file in files:
             file = os.path.join(self.temp_dir, file)
-            dirname, basename = os.path.split(file)
+            dirname, _basename = os.path.split(file)
             os.makedirs(dirname, exist_ok=True)
             touch(file)
 
diff --git a/setuptools/tests/test_setuptools.py b/setuptools/tests/test_setuptools.py
index ab027b9d09..1d56e1a8a4 100644
--- a/setuptools/tests/test_setuptools.py
+++ b/setuptools/tests/test_setuptools.py
@@ -74,7 +74,7 @@ def testFindModule(self):
             dep.find_module('no-such.-thing')
         with pytest.raises(ImportError):
             dep.find_module('setuptools.non-existent')
-        f, p, i = dep.find_module('setuptools.tests')
+        f, _p, _i = dep.find_module('setuptools.tests')
         f.close()
 
     @needs_bytecode
diff --git a/setuptools/tests/test_windows_wrappers.py b/setuptools/tests/test_windows_wrappers.py
index 4a112baf75..e46bb6abc0 100644
--- a/setuptools/tests/test_windows_wrappers.py
+++ b/setuptools/tests/test_windows_wrappers.py
@@ -116,7 +116,7 @@ def test_basic(self, tmpdir):
             text=True,
             encoding="utf-8",
         )
-        stdout, stderr = proc.communicate('hello\nworld\n')
+        stdout, _stderr = proc.communicate('hello\nworld\n')
         actual = stdout.replace('\r\n', '\n')
         expected = textwrap.dedent(
             r"""
@@ -153,7 +153,7 @@ def test_symlink(self, tmpdir):
             text=True,
             encoding="utf-8",
         )
-        stdout, stderr = proc.communicate('hello\nworld\n')
+        stdout, _stderr = proc.communicate('hello\nworld\n')
         actual = stdout.replace('\r\n', '\n')
         expected = textwrap.dedent(
             r"""
@@ -201,7 +201,7 @@ def test_with_options(self, tmpdir):
             text=True,
             encoding="utf-8",
         )
-        stdout, stderr = proc.communicate()
+        stdout, _stderr = proc.communicate()
         actual = stdout.replace('\r\n', '\n')
         expected = textwrap.dedent(
             r"""

From 8c78ed81105cf446cd7e513c350a61852591a58c Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Wed, 6 Nov 2024 13:32:46 -0500
Subject: [PATCH 1279/1761] ANN204 (missing return type for special methods)
 autofixes

---
 _distutils_hack/__init__.py             | 2 +-
 pkg_resources/tests/test_resources.py   | 2 +-
 pkg_resources/tests/test_working_set.py | 2 +-
 setuptools/__init__.py                  | 4 ++--
 setuptools/build_meta.py                | 2 +-
 setuptools/command/develop.py           | 2 +-
 setuptools/command/easy_install.py      | 2 +-
 setuptools/command/editable_wheel.py    | 6 +++---
 setuptools/command/egg_info.py          | 4 +++-
 setuptools/config/expand.py             | 6 +++---
 setuptools/config/pyprojecttoml.py      | 4 ++--
 setuptools/config/setupcfg.py           | 6 +++---
 setuptools/depends.py                   | 2 +-
 setuptools/discovery.py                 | 4 ++--
 setuptools/extension.py                 | 2 +-
 setuptools/msvc.py                      | 8 ++++----
 setuptools/package_index.py             | 2 +-
 setuptools/sandbox.py                   | 4 ++--
 setuptools/wheel.py                     | 2 +-
 19 files changed, 34 insertions(+), 32 deletions(-)

diff --git a/_distutils_hack/__init__.py b/_distutils_hack/__init__.py
index 6ee497b38f..94f71b99ec 100644
--- a/_distutils_hack/__init__.py
+++ b/_distutils_hack/__init__.py
@@ -90,7 +90,7 @@ def do_override():
 
 
 class _TrivialRe:
-    def __init__(self, *patterns):
+    def __init__(self, *patterns) -> None:
         self._patterns = patterns
 
     def match(self, string):
diff --git a/pkg_resources/tests/test_resources.py b/pkg_resources/tests/test_resources.py
index 00fb5b3290..70436c0881 100644
--- a/pkg_resources/tests/test_resources.py
+++ b/pkg_resources/tests/test_resources.py
@@ -32,7 +32,7 @@ def pairwise(iterable):
 class Metadata(pkg_resources.EmptyProvider):
     """Mock object to return metadata as if from an on-disk distribution"""
 
-    def __init__(self, *pairs):
+    def __init__(self, *pairs) -> None:
         self.metadata = dict(pairs)
 
     def has_metadata(self, name) -> bool:
diff --git a/pkg_resources/tests/test_working_set.py b/pkg_resources/tests/test_working_set.py
index 0537bb69a4..7bb84952c1 100644
--- a/pkg_resources/tests/test_working_set.py
+++ b/pkg_resources/tests/test_working_set.py
@@ -56,7 +56,7 @@ def parse_distributions(s):
 
 
 class FakeInstaller:
-    def __init__(self, installable_dists):
+    def __init__(self, installable_dists) -> None:
         self._installable_dists = installable_dists
 
     def __call__(self, req):
diff --git a/setuptools/__init__.py b/setuptools/__init__.py
index b330e9aa3e..4f5c01708a 100644
--- a/setuptools/__init__.py
+++ b/setuptools/__init__.py
@@ -60,7 +60,7 @@ class MinimalDistribution(distutils.core.Distribution):
         fetch_build_eggs interface.
         """
 
-        def __init__(self, attrs: Mapping[str, object]):
+        def __init__(self, attrs: Mapping[str, object]) -> None:
             _incl = 'dependency_links', 'setup_requires'
             filtered = {k: attrs[k] for k in set(_incl) & set(attrs)}
             super().__init__(filtered)
@@ -167,7 +167,7 @@ class Command(_Command):
     command_consumes_arguments = False
     distribution: Distribution  # override distutils.dist.Distribution with setuptools.dist.Distribution
 
-    def __init__(self, dist: Distribution, **kw):
+    def __init__(self, dist: Distribution, **kw) -> None:
         """
         Construct the command for dist, updating
         vars(self) with any keyword parameters.
diff --git a/setuptools/build_meta.py b/setuptools/build_meta.py
index 3500b2dfd1..23471accb6 100644
--- a/setuptools/build_meta.py
+++ b/setuptools/build_meta.py
@@ -72,7 +72,7 @@
 
 
 class SetupRequirementsError(BaseException):
-    def __init__(self, specifiers):
+    def __init__(self, specifiers) -> None:
         self.specifiers = specifiers
 
 
diff --git a/setuptools/command/develop.py b/setuptools/command/develop.py
index 19908d5d70..7eee29d491 100644
--- a/setuptools/command/develop.py
+++ b/setuptools/command/develop.py
@@ -185,7 +185,7 @@ class VersionlessRequirement:
     'foo'
     """
 
-    def __init__(self, dist):
+    def __init__(self, dist) -> None:
         self.__dist = dist
 
     def __getattr__(self, name: str):
diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py
index 37f01ed46e..21e6f008d7 100644
--- a/setuptools/command/easy_install.py
+++ b/setuptools/command/easy_install.py
@@ -1612,7 +1612,7 @@ def get_exe_prefixes(exe_filename):
 class PthDistributions(Environment):
     """A .pth file with Distribution paths in it"""
 
-    def __init__(self, filename, sitedirs=()):
+    def __init__(self, filename, sitedirs=()) -> None:
         self.filename = filename
         self.sitedirs = list(map(normalize_path, sitedirs))
         self.basedir = normalize_path(os.path.dirname(self.filename))
diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py
index cae643618c..30570e092a 100644
--- a/setuptools/command/editable_wheel.py
+++ b/setuptools/command/editable_wheel.py
@@ -392,7 +392,7 @@ def __exit__(
 
 
 class _StaticPth:
-    def __init__(self, dist: Distribution, name: str, path_entries: list[Path]):
+    def __init__(self, dist: Distribution, name: str, path_entries: list[Path]) -> None:
         self.dist = dist
         self.name = name
         self.path_entries = path_entries
@@ -436,7 +436,7 @@ def __init__(
         name: str,
         auxiliary_dir: StrPath,
         build_lib: StrPath,
-    ):
+    ) -> None:
         self.auxiliary_dir = Path(auxiliary_dir)
         self.build_lib = Path(build_lib).resolve()
         self._file = dist.get_command_obj("build_py").copy_file
@@ -496,7 +496,7 @@ def __exit__(
 
 
 class _TopLevelFinder:
-    def __init__(self, dist: Distribution, name: str):
+    def __init__(self, dist: Distribution, name: str) -> None:
         self.dist = dist
         self.name = name
 
diff --git a/setuptools/command/egg_info.py b/setuptools/command/egg_info.py
index 56ddb100e0..1411ac3d89 100644
--- a/setuptools/command/egg_info.py
+++ b/setuptools/command/egg_info.py
@@ -324,7 +324,9 @@ def find_sources(self) -> None:
 class FileList(_FileList):
     # Implementations of the various MANIFEST.in commands
 
-    def __init__(self, warn=None, debug_print=None, ignore_egg_info_dir: bool = False):
+    def __init__(
+        self, warn=None, debug_print=None, ignore_egg_info_dir: bool = False
+    ) -> None:
         super().__init__(warn, debug_print)
         self.ignore_egg_info_dir = ignore_egg_info_dir
 
diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py
index 2a85363d8d..54c68bed4f 100644
--- a/setuptools/config/expand.py
+++ b/setuptools/config/expand.py
@@ -52,7 +52,7 @@
 class StaticModule:
     """Proxy to a module object that avoids executing arbitrary code."""
 
-    def __init__(self, name: str, spec: ModuleSpec):
+    def __init__(self, name: str, spec: ModuleSpec) -> None:
         module = ast.parse(pathlib.Path(spec.origin).read_bytes())  # type: ignore[arg-type] # Let it raise an error on None
         vars(self).update(locals())
         del self.self
@@ -383,7 +383,7 @@ class EnsurePackagesDiscovered:
     and those might not have been processed yet.
     """
 
-    def __init__(self, distribution: Distribution):
+    def __init__(self, distribution: Distribution) -> None:
         self._dist = distribution
         self._called = False
 
@@ -430,7 +430,7 @@ class LazyMappingProxy(Mapping[_K, _V_co]):
     'other value'
     """
 
-    def __init__(self, obtain_mapping_value: Callable[[], Mapping[_K, _V_co]]):
+    def __init__(self, obtain_mapping_value: Callable[[], Mapping[_K, _V_co]]) -> None:
         self._obtain = obtain_mapping_value
         self._value: Mapping[_K, _V_co] | None = None
 
diff --git a/setuptools/config/pyprojecttoml.py b/setuptools/config/pyprojecttoml.py
index 688040ab50..15b0baa18e 100644
--- a/setuptools/config/pyprojecttoml.py
+++ b/setuptools/config/pyprojecttoml.py
@@ -176,7 +176,7 @@ def __init__(
         root_dir: StrPath | None = None,
         ignore_option_errors: bool = False,
         dist: Distribution | None = None,
-    ):
+    ) -> None:
         self.config = config
         self.root_dir = root_dir or os.getcwd()
         self.project_cfg = config.get("project", {})
@@ -413,7 +413,7 @@ def _ignore_errors(ignore_option_errors: bool):
 class _EnsurePackagesDiscovered(_expand.EnsurePackagesDiscovered):
     def __init__(
         self, distribution: Distribution, project_cfg: dict, setuptools_cfg: dict
-    ):
+    ) -> None:
         super().__init__(distribution)
         self._project_cfg = project_cfg
         self._setuptools_cfg = setuptools_cfg
diff --git a/setuptools/config/setupcfg.py b/setuptools/config/setupcfg.py
index 1f3a180bbe..b35d0b00cd 100644
--- a/setuptools/config/setupcfg.py
+++ b/setuptools/config/setupcfg.py
@@ -247,7 +247,7 @@ def __init__(
         options: AllCommandOptions,
         ignore_option_errors,
         ensure_discovered: expand.EnsurePackagesDiscovered,
-    ):
+    ) -> None:
         self.ignore_option_errors = ignore_option_errors
         self.target_obj: Target = target_obj
         self.sections = dict(self._section_options(options))
@@ -540,7 +540,7 @@ def __init__(
         ensure_discovered: expand.EnsurePackagesDiscovered,
         package_dir: dict | None = None,
         root_dir: StrPath | None = os.curdir,
-    ):
+    ) -> None:
         super().__init__(target_obj, options, ignore_option_errors, ensure_discovered)
         self.package_dir = package_dir
         self.root_dir = root_dir
@@ -602,7 +602,7 @@ def __init__(
         options: AllCommandOptions,
         ignore_option_errors: bool,
         ensure_discovered: expand.EnsurePackagesDiscovered,
-    ):
+    ) -> None:
         super().__init__(target_obj, options, ignore_option_errors, ensure_discovered)
         self.root_dir = target_obj.src_root
         self.package_dir: dict[str, str] = {}  # To be filled by `find_packages`
diff --git a/setuptools/depends.py b/setuptools/depends.py
index 16a67c0306..1be71857a5 100644
--- a/setuptools/depends.py
+++ b/setuptools/depends.py
@@ -28,7 +28,7 @@ def __init__(
         homepage: str = '',
         attribute=None,
         format=None,
-    ):
+    ) -> None:
         if format is None and requested_version is not None:
             format = Version
 
diff --git a/setuptools/discovery.py b/setuptools/discovery.py
index eb8969146f..c888399185 100644
--- a/setuptools/discovery.py
+++ b/setuptools/discovery.py
@@ -71,7 +71,7 @@ class _Filter:
     the input matches at least one of the patterns.
     """
 
-    def __init__(self, *patterns: str):
+    def __init__(self, *patterns: str) -> None:
         self._patterns = dict.fromkeys(patterns)
 
     def __call__(self, item: str) -> bool:
@@ -300,7 +300,7 @@ class ConfigDiscovery:
     (from other metadata/options, the file system or conventions)
     """
 
-    def __init__(self, distribution: Distribution):
+    def __init__(self, distribution: Distribution) -> None:
         self.dist = distribution
         self._called = False
         self._disabled = False
diff --git a/setuptools/extension.py b/setuptools/extension.py
index 79bcc203e9..76e03d9d6b 100644
--- a/setuptools/extension.py
+++ b/setuptools/extension.py
@@ -147,7 +147,7 @@ def __init__(
         *args,
         py_limited_api: bool = False,
         **kw,
-    ):
+    ) -> None:
         # The *args is needed for compatibility as calls may use positional
         # arguments. py_limited_api may be set only via keyword.
         self.py_limited_api = py_limited_api
diff --git a/setuptools/msvc.py b/setuptools/msvc.py
index 55f5090878..6492d3be9d 100644
--- a/setuptools/msvc.py
+++ b/setuptools/msvc.py
@@ -50,7 +50,7 @@ class PlatformInfo:
 
     current_cpu = environ.get('processor_architecture', '').lower()
 
-    def __init__(self, arch):
+    def __init__(self, arch) -> None:
         self.arch = arch.lower().replace('x64', 'amd64')
 
     @property
@@ -176,7 +176,7 @@ class RegistryInfo:
         winreg.HKEY_CLASSES_ROOT,
     )
 
-    def __init__(self, platform_info):
+    def __init__(self, platform_info) -> None:
         self.pi = platform_info
 
     @property
@@ -366,7 +366,7 @@ class SystemInfo:
     ProgramFiles = environ.get('ProgramFiles', '')
     ProgramFilesx86 = environ.get('ProgramFiles(x86)', ProgramFiles)
 
-    def __init__(self, registry_info, vc_ver=None):
+    def __init__(self, registry_info, vc_ver=None) -> None:
         self.ri = registry_info
         self.pi = self.ri.pi
 
@@ -911,7 +911,7 @@ class EnvironmentInfo:
     # Variables and properties in this class use originals CamelCase variables
     # names from Microsoft source files for more easy comparison.
 
-    def __init__(self, arch, vc_ver=None, vc_min_ver=0):
+    def __init__(self, arch, vc_ver=None, vc_min_ver=0) -> None:
         self.pi = PlatformInfo(arch)
         self.ri = RegistryInfo(self.pi)
         self.si = SystemInfo(self.ri, vc_ver)
diff --git a/setuptools/package_index.py b/setuptools/package_index.py
index ad95d3573a..97806e8ff8 100644
--- a/setuptools/package_index.py
+++ b/setuptools/package_index.py
@@ -270,7 +270,7 @@ class HashChecker(ContentChecker):
         r'(?P[a-f0-9]+)'
     )
 
-    def __init__(self, hash_name, expected):
+    def __init__(self, hash_name, expected) -> None:
         self.hash_name = hash_name
         self.hash = hashlib.new(hash_name)
         self.expected = expected
diff --git a/setuptools/sandbox.py b/setuptools/sandbox.py
index b6c63f03d4..2d84242d66 100644
--- a/setuptools/sandbox.py
+++ b/setuptools/sandbox.py
@@ -279,7 +279,7 @@ class AbstractSandbox:
 
     _active = False
 
-    def __init__(self):
+    def __init__(self) -> None:
         self._attrs = [
             name
             for name in dir(_os)
@@ -442,7 +442,7 @@ class DirectorySandbox(AbstractSandbox):
     _exception_patterns: list[str | re.Pattern] = []
     "exempt writing to paths that match the pattern"
 
-    def __init__(self, sandbox, exceptions=_EXCEPTIONS):
+    def __init__(self, sandbox, exceptions=_EXCEPTIONS) -> None:
         self._sandbox = os.path.normcase(os.path.realpath(sandbox))
         self._prefix = os.path.join(self._sandbox, '')
         self._exceptions = [
diff --git a/setuptools/wheel.py b/setuptools/wheel.py
index a6b3720033..fb19f1a65a 100644
--- a/setuptools/wheel.py
+++ b/setuptools/wheel.py
@@ -76,7 +76,7 @@ def disable_info_traces():
 
 
 class Wheel:
-    def __init__(self, filename):
+    def __init__(self, filename) -> None:
         match = WHEEL_NAME(os.path.basename(filename))
         if match is None:
             raise ValueError('invalid wheel name: %r' % filename)

From 68cb6ba936c788f0dc372e8877aab23d35b83320 Mon Sep 17 00:00:00 2001
From: Agriya Khetarpal <74401230+agriyakhetarpal@users.noreply.github.com>
Date: Thu, 7 Nov 2024 03:11:59 +0530
Subject: [PATCH 1280/1761] Accept an `Iterable` at runtime for `Extension`

---
 distutils/extension.py | 35 ++++++++++++++++++++++++-----------
 1 file changed, 24 insertions(+), 11 deletions(-)

diff --git a/distutils/extension.py b/distutils/extension.py
index 33159079c1..f6e3445bad 100644
--- a/distutils/extension.py
+++ b/distutils/extension.py
@@ -26,12 +26,14 @@ class Extension:
       name : string
         the full name of the extension, including any packages -- ie.
         *not* a filename or pathname, but Python dotted name
-      sources : [string | os.PathLike]
-        list of source filenames, relative to the distribution root
-        (where the setup script lives), in Unix form (slash-separated)
-        for portability.  Source files may be C, C++, SWIG (.i),
-        platform-specific resource files, or whatever else is recognized
-        by the "build_ext" command as source for a Python extension.
+      sources : Iterable[string | os.PathLike]
+        iterable of source filenames (except strings, which could be misinterpreted
+        as a single filename), relative to the distribution root (where the setup
+        script lives), in Unix form (slash-separated) for portability. Can be any
+        non-string iterable (list, tuple, set, etc.) containing strings or
+        PathLike objects. Source files may be C, C++, SWIG (.i), platform-specific
+        resource files, or whatever else is recognized by the "build_ext" command
+        as source for a Python extension.
       include_dirs : [string]
         list of directories to search for C/C++ header files (in Unix
         form for portability)
@@ -106,12 +108,23 @@ def __init__(
     ):
         if not isinstance(name, str):
             raise AssertionError("'name' must be a string")  # noqa: TRY004
-        if not (
-            isinstance(sources, list)
-            and all(isinstance(v, (str, os.PathLike)) for v in sources)
-        ):
+
+        # we handle the string case first; though strings are iterable, we disallow them
+        if isinstance(sources, str):
+            raise AssertionError(  # noqa: TRY004
+                "'sources' must be an iterable of strings or PathLike objects, not a string"
+            )
+
+        # mow we check if it's iterable and contains valid types
+        try:
+            sources = list(sources)  # convert to list for consistency
+            if not all(isinstance(v, (str, os.PathLike)) for v in sources):
+                raise AssertionError(
+                    "All elements in 'sources' must be strings or PathLike objects"
+                )
+        except TypeError:
             raise AssertionError(
-                "'sources' must be a list of strings or PathLike objects."
+                "'sources' must be an iterable of strings or PathLike objects"
             )
 
         self.name = name

From 115bb678c722286246ad31dc7cc0cc92fe1111d8 Mon Sep 17 00:00:00 2001
From: Agriya Khetarpal <74401230+agriyakhetarpal@users.noreply.github.com>
Date: Thu, 7 Nov 2024 03:14:20 +0530
Subject: [PATCH 1281/1761] Add more tests to cover different iterables

---
 distutils/tests/test_extension.py | 12 +++++++++++-
 1 file changed, 11 insertions(+), 1 deletion(-)

diff --git a/distutils/tests/test_extension.py b/distutils/tests/test_extension.py
index 41872e04e8..31d1fc890e 100644
--- a/distutils/tests/test_extension.py
+++ b/distutils/tests/test_extension.py
@@ -69,7 +69,7 @@ def test_extension_init(self):
         assert ext.name == 'name'
 
         # the second argument, which is the list of files, must
-        # be a list of strings or PathLike objects
+        # be a list of strings or PathLike objects, and not a string
         with pytest.raises(AssertionError):
             Extension('name', 'file')
         with pytest.raises(AssertionError):
@@ -79,6 +79,16 @@ def test_extension_init(self):
         ext = Extension('name', [pathlib.Path('file1'), pathlib.Path('file2')])
         assert ext.sources == ['file1', 'file2']
 
+        # any non-string iterable of strings or PathLike objects should work
+        ext = Extension('name', ('file1', 'file2'))  # tuple
+        assert ext.sources == ['file1', 'file2']
+        ext = Extension('name', {'file1', 'file2'})  # set
+        assert sorted(ext.sources) == ['file1', 'file2']
+        ext = Extension('name', iter(['file1', 'file2']))  # iterator
+        assert ext.sources == ['file1', 'file2']
+        ext = Extension('name', [pathlib.Path('file1'), 'file2'])  # mixed types
+        assert ext.sources == ['file1', 'file2']
+
         # others arguments have defaults
         for attr in (
             'include_dirs',

From 4a467fac7a98cc5d605afc1ebd951cdd82268f86 Mon Sep 17 00:00:00 2001
From: Agriya Khetarpal <74401230+agriyakhetarpal@users.noreply.github.com>
Date: Thu, 7 Nov 2024 04:09:16 +0530
Subject: [PATCH 1282/1761] Delegate to `os.fspath` for type checking

Co-Authored-By: Avasam 
---
 distutils/extension.py | 7 +------
 1 file changed, 1 insertion(+), 6 deletions(-)

diff --git a/distutils/extension.py b/distutils/extension.py
index f6e3445bad..8d766f674b 100644
--- a/distutils/extension.py
+++ b/distutils/extension.py
@@ -117,18 +117,13 @@ def __init__(
 
         # mow we check if it's iterable and contains valid types
         try:
-            sources = list(sources)  # convert to list for consistency
-            if not all(isinstance(v, (str, os.PathLike)) for v in sources):
-                raise AssertionError(
-                    "All elements in 'sources' must be strings or PathLike objects"
-                )
+            self.sources = list(map(os.fspath, sources))
         except TypeError:
             raise AssertionError(
                 "'sources' must be an iterable of strings or PathLike objects"
             )
 
         self.name = name
-        self.sources = list(map(os.fspath, sources))
         self.include_dirs = include_dirs or []
         self.define_macros = define_macros or []
         self.undef_macros = undef_macros or []

From 2930193c0714e4aa016b68c2d510a5a177c95b8a Mon Sep 17 00:00:00 2001
From: Agriya Khetarpal <74401230+agriyakhetarpal@users.noreply.github.com>
Date: Thu, 7 Nov 2024 04:10:50 +0530
Subject: [PATCH 1283/1761] Fix typo

Co-authored-by: Avasam 
---
 distutils/extension.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/distutils/extension.py b/distutils/extension.py
index 8d766f674b..0b77614507 100644
--- a/distutils/extension.py
+++ b/distutils/extension.py
@@ -115,7 +115,7 @@ def __init__(
                 "'sources' must be an iterable of strings or PathLike objects, not a string"
             )
 
-        # mow we check if it's iterable and contains valid types
+        # now we check if it's iterable and contains valid types
         try:
             self.sources = list(map(os.fspath, sources))
         except TypeError:

From 42e06250ce712839cfa437a5460ef4bb7b13ec0c Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Wed, 6 Nov 2024 20:33:47 -0500
Subject: [PATCH 1284/1761] Add news fragment.

---
 newsfragments/+f0b61194.bugfix.rst | 1 +
 1 file changed, 1 insertion(+)
 create mode 100644 newsfragments/+f0b61194.bugfix.rst

diff --git a/newsfragments/+f0b61194.bugfix.rst b/newsfragments/+f0b61194.bugfix.rst
new file mode 100644
index 0000000000..597165c3a0
--- /dev/null
+++ b/newsfragments/+f0b61194.bugfix.rst
@@ -0,0 +1 @@
+Merge with pypa/distutils@251797602, including fix for dirutil.mkpath handling in pypa/distutils#304.
\ No newline at end of file

From 8e13abf6f5dfe0f7176603e01861801a4c3ed583 Mon Sep 17 00:00:00 2001
From: Marc Mueller <30130371+cdce8p@users.noreply.github.com>
Date: Fri, 8 Nov 2024 14:50:02 +0100
Subject: [PATCH 1285/1761] Update vendored packaging version to 24.2

---
 newsfragments/4740.feature.rst                |   1 +
 pyproject.toml                                |   6 +-
 .../packaging-24.1.dist-info/INSTALLER        |   1 -
 .../_vendor/packaging-24.1.dist-info/RECORD   |  37 -
 .../packaging-24.2.dist-info/INSTALLER        |   1 +
 .../LICENSE                                   |   0
 .../LICENSE.APACHE                            |   0
 .../LICENSE.BSD                               |   0
 .../METADATA                                  |   4 +-
 .../_vendor/packaging-24.2.dist-info/RECORD   |  25 +
 .../REQUESTED                                 |   0
 .../WHEEL                                     |   2 +-
 setuptools/_vendor/packaging/__init__.py      |   4 +-
 setuptools/_vendor/packaging/_elffile.py      |   8 +-
 setuptools/_vendor/packaging/_manylinux.py    |   1 +
 .../_vendor/packaging/licenses/__init__.py    | 145 ++++
 .../_vendor/packaging/licenses/_spdx.py       | 759 ++++++++++++++++++
 setuptools/_vendor/packaging/markers.py       |  24 +-
 setuptools/_vendor/packaging/metadata.py      | 107 ++-
 setuptools/_vendor/packaging/specifiers.py    |  27 +-
 setuptools/_vendor/packaging/tags.py          | 107 ++-
 setuptools/_vendor/packaging/utils.py         |  77 +-
 setuptools/_vendor/packaging/version.py       |  33 +-
 23 files changed, 1198 insertions(+), 171 deletions(-)
 create mode 100644 newsfragments/4740.feature.rst
 delete mode 100644 setuptools/_vendor/packaging-24.1.dist-info/INSTALLER
 delete mode 100644 setuptools/_vendor/packaging-24.1.dist-info/RECORD
 create mode 100644 setuptools/_vendor/packaging-24.2.dist-info/INSTALLER
 rename setuptools/_vendor/{packaging-24.1.dist-info => packaging-24.2.dist-info}/LICENSE (100%)
 rename setuptools/_vendor/{packaging-24.1.dist-info => packaging-24.2.dist-info}/LICENSE.APACHE (100%)
 rename setuptools/_vendor/{packaging-24.1.dist-info => packaging-24.2.dist-info}/LICENSE.BSD (100%)
 rename setuptools/_vendor/{packaging-24.1.dist-info => packaging-24.2.dist-info}/METADATA (98%)
 create mode 100644 setuptools/_vendor/packaging-24.2.dist-info/RECORD
 rename setuptools/_vendor/{packaging-24.1.dist-info => packaging-24.2.dist-info}/REQUESTED (100%)
 rename setuptools/_vendor/{packaging-24.1.dist-info => packaging-24.2.dist-info}/WHEEL (71%)
 create mode 100644 setuptools/_vendor/packaging/licenses/__init__.py
 create mode 100644 setuptools/_vendor/packaging/licenses/_spdx.py

diff --git a/newsfragments/4740.feature.rst b/newsfragments/4740.feature.rst
new file mode 100644
index 0000000000..9dd6db56cf
--- /dev/null
+++ b/newsfragments/4740.feature.rst
@@ -0,0 +1 @@
+Updated vendored packaging version to 24.2.
diff --git a/pyproject.toml b/pyproject.toml
index ca09a084d8..b541d13010 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -41,7 +41,7 @@ test = [
 	"virtualenv>=13.0.0",
 	"wheel>=0.44.0", # Consistent requirement normalisation in METADATA (see #4547)
 	"pip>=19.1", # For proper file:// URLs support.
-	"packaging>=23.2",
+	"packaging>=24.2",
 	"jaraco.envs>=2.2",
 	"pytest-xdist>=3", # Dropped dependency on pytest-fork and py
 	"jaraco.path>=3.2.0",
@@ -90,7 +90,7 @@ doc = [
 ssl = []
 certs = []
 core = [
-	"packaging>=24",
+	"packaging>=24.2",
 	"more_itertools>=8.8",
 	"jaraco.text>=3.7",
 	"importlib_metadata>=6; python_version < '3.10'",
@@ -134,7 +134,7 @@ type = [
 
 	# pin mypy version so a new version doesn't suddenly cause the CI to fail,
 	# until types-setuptools is removed from typeshed.
-	# For help with static-typing issues, or mypy update, ping @Avasam 
+	# For help with static-typing issues, or mypy update, ping @Avasam
 	"mypy==1.12.*",
 	# Typing fixes in version newer than we require at runtime
 	"importlib_metadata>=7.0.2; python_version < '3.10'",
diff --git a/setuptools/_vendor/packaging-24.1.dist-info/INSTALLER b/setuptools/_vendor/packaging-24.1.dist-info/INSTALLER
deleted file mode 100644
index a1b589e38a..0000000000
--- a/setuptools/_vendor/packaging-24.1.dist-info/INSTALLER
+++ /dev/null
@@ -1 +0,0 @@
-pip
diff --git a/setuptools/_vendor/packaging-24.1.dist-info/RECORD b/setuptools/_vendor/packaging-24.1.dist-info/RECORD
deleted file mode 100644
index 2b1e6bd4db..0000000000
--- a/setuptools/_vendor/packaging-24.1.dist-info/RECORD
+++ /dev/null
@@ -1,37 +0,0 @@
-packaging-24.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-packaging-24.1.dist-info/LICENSE,sha256=ytHvW9NA1z4HS6YU0m996spceUDD2MNIUuZcSQlobEg,197
-packaging-24.1.dist-info/LICENSE.APACHE,sha256=DVQuDIgE45qn836wDaWnYhSdxoLXgpRRKH4RuTjpRZQ,10174
-packaging-24.1.dist-info/LICENSE.BSD,sha256=tw5-m3QvHMb5SLNMFqo5_-zpQZY2S8iP8NIYDwAo-sU,1344
-packaging-24.1.dist-info/METADATA,sha256=X3ooO3WnCfzNSBrqQjefCD1POAF1M2WSLmsHMgQlFdk,3204
-packaging-24.1.dist-info/RECORD,,
-packaging-24.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-packaging-24.1.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
-packaging/__init__.py,sha256=dtw2bNmWCQ9WnMoK3bk_elL1svSlikXtLpZhCFIB9SE,496
-packaging/__pycache__/__init__.cpython-312.pyc,,
-packaging/__pycache__/_elffile.cpython-312.pyc,,
-packaging/__pycache__/_manylinux.cpython-312.pyc,,
-packaging/__pycache__/_musllinux.cpython-312.pyc,,
-packaging/__pycache__/_parser.cpython-312.pyc,,
-packaging/__pycache__/_structures.cpython-312.pyc,,
-packaging/__pycache__/_tokenizer.cpython-312.pyc,,
-packaging/__pycache__/markers.cpython-312.pyc,,
-packaging/__pycache__/metadata.cpython-312.pyc,,
-packaging/__pycache__/requirements.cpython-312.pyc,,
-packaging/__pycache__/specifiers.cpython-312.pyc,,
-packaging/__pycache__/tags.cpython-312.pyc,,
-packaging/__pycache__/utils.cpython-312.pyc,,
-packaging/__pycache__/version.cpython-312.pyc,,
-packaging/_elffile.py,sha256=_LcJW4YNKywYsl4169B2ukKRqwxjxst_8H0FRVQKlz8,3282
-packaging/_manylinux.py,sha256=Xo4V0PZz8sbuVCbTni0t1CR0AHeir_7ib4lTmV8scD4,9586
-packaging/_musllinux.py,sha256=p9ZqNYiOItGee8KcZFeHF_YcdhVwGHdK6r-8lgixvGQ,2694
-packaging/_parser.py,sha256=s_TvTvDNK0NrM2QB3VKThdWFM4Nc0P6JnkObkl3MjpM,10236
-packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431
-packaging/_tokenizer.py,sha256=J6v5H7Jzvb-g81xp_2QACKwO7LxHQA6ikryMU7zXwN8,5273
-packaging/markers.py,sha256=dWKSqn5Sp-jDmOG-W3GfLHKjwhf1IsznbT71VlBoB5M,10671
-packaging/metadata.py,sha256=KINuSkJ12u-SyoKNTy_pHNGAfMUtxNvZ53qA1zAKcKI,32349
-packaging/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-packaging/requirements.py,sha256=gYyRSAdbrIyKDY66ugIDUQjRMvxkH2ALioTmX3tnL6o,2947
-packaging/specifiers.py,sha256=rjpc3hoJuunRIT6DdH7gLTnQ5j5QKSuWjoTC5sdHtHI,39714
-packaging/tags.py,sha256=y8EbheOu9WS7s-MebaXMcHMF-jzsA_C1Lz5XRTiSy4w,18883
-packaging/utils.py,sha256=NAdYUwnlAOpkat_RthavX8a07YuVxgGL_vwrx73GSDM,5287
-packaging/version.py,sha256=V0H3SOj_8werrvorrb6QDLRhlcqSErNTTkCvvfszhDI,16198
diff --git a/setuptools/_vendor/packaging-24.2.dist-info/INSTALLER b/setuptools/_vendor/packaging-24.2.dist-info/INSTALLER
new file mode 100644
index 0000000000..5c69047b2e
--- /dev/null
+++ b/setuptools/_vendor/packaging-24.2.dist-info/INSTALLER
@@ -0,0 +1 @@
+uv
\ No newline at end of file
diff --git a/setuptools/_vendor/packaging-24.1.dist-info/LICENSE b/setuptools/_vendor/packaging-24.2.dist-info/LICENSE
similarity index 100%
rename from setuptools/_vendor/packaging-24.1.dist-info/LICENSE
rename to setuptools/_vendor/packaging-24.2.dist-info/LICENSE
diff --git a/setuptools/_vendor/packaging-24.1.dist-info/LICENSE.APACHE b/setuptools/_vendor/packaging-24.2.dist-info/LICENSE.APACHE
similarity index 100%
rename from setuptools/_vendor/packaging-24.1.dist-info/LICENSE.APACHE
rename to setuptools/_vendor/packaging-24.2.dist-info/LICENSE.APACHE
diff --git a/setuptools/_vendor/packaging-24.1.dist-info/LICENSE.BSD b/setuptools/_vendor/packaging-24.2.dist-info/LICENSE.BSD
similarity index 100%
rename from setuptools/_vendor/packaging-24.1.dist-info/LICENSE.BSD
rename to setuptools/_vendor/packaging-24.2.dist-info/LICENSE.BSD
diff --git a/setuptools/_vendor/packaging-24.1.dist-info/METADATA b/setuptools/_vendor/packaging-24.2.dist-info/METADATA
similarity index 98%
rename from setuptools/_vendor/packaging-24.1.dist-info/METADATA
rename to setuptools/_vendor/packaging-24.2.dist-info/METADATA
index 255dc46e0e..1479c8694b 100644
--- a/setuptools/_vendor/packaging-24.1.dist-info/METADATA
+++ b/setuptools/_vendor/packaging-24.2.dist-info/METADATA
@@ -1,6 +1,6 @@
-Metadata-Version: 2.1
+Metadata-Version: 2.3
 Name: packaging
-Version: 24.1
+Version: 24.2
 Summary: Core utilities for Python packages
 Author-email: Donald Stufft 
 Requires-Python: >=3.8
diff --git a/setuptools/_vendor/packaging-24.2.dist-info/RECORD b/setuptools/_vendor/packaging-24.2.dist-info/RECORD
new file mode 100644
index 0000000000..678aa5a501
--- /dev/null
+++ b/setuptools/_vendor/packaging-24.2.dist-info/RECORD
@@ -0,0 +1,25 @@
+packaging-24.2.dist-info/INSTALLER,sha256=5hhM4Q4mYTT9z6QB6PGpUAW81PGNFrYrdXMj4oM_6ak,2
+packaging-24.2.dist-info/LICENSE,sha256=ytHvW9NA1z4HS6YU0m996spceUDD2MNIUuZcSQlobEg,197
+packaging-24.2.dist-info/LICENSE.APACHE,sha256=DVQuDIgE45qn836wDaWnYhSdxoLXgpRRKH4RuTjpRZQ,10174
+packaging-24.2.dist-info/LICENSE.BSD,sha256=tw5-m3QvHMb5SLNMFqo5_-zpQZY2S8iP8NIYDwAo-sU,1344
+packaging-24.2.dist-info/METADATA,sha256=ohH86s6k5mIfQxY2TS0LcSfADeOFa4BiCC-bxZV-pNs,3204
+packaging-24.2.dist-info/RECORD,,
+packaging-24.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+packaging-24.2.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82
+packaging/__init__.py,sha256=dk4Ta_vmdVJxYHDcfyhvQNw8V3PgSBomKNXqg-D2JDY,494
+packaging/_elffile.py,sha256=cflAQAkE25tzhYmq_aCi72QfbT_tn891tPzfpbeHOwE,3306
+packaging/_manylinux.py,sha256=vl5OCoz4kx80H5rwXKeXWjl9WNISGmr4ZgTpTP9lU9c,9612
+packaging/_musllinux.py,sha256=p9ZqNYiOItGee8KcZFeHF_YcdhVwGHdK6r-8lgixvGQ,2694
+packaging/_parser.py,sha256=s_TvTvDNK0NrM2QB3VKThdWFM4Nc0P6JnkObkl3MjpM,10236
+packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431
+packaging/_tokenizer.py,sha256=J6v5H7Jzvb-g81xp_2QACKwO7LxHQA6ikryMU7zXwN8,5273
+packaging/licenses/__init__.py,sha256=1x5M1nEYjcgwEbLt0dXwz2ukjr18DiCzC0sraQqJ-Ww,5715
+packaging/licenses/_spdx.py,sha256=oAm1ztPFwlsmCKe7lAAsv_OIOfS1cWDu9bNBkeu-2ns,48398
+packaging/markers.py,sha256=c89TNzB7ZdGYhkovm6PYmqGyHxXlYVaLW591PHUNKD8,10561
+packaging/metadata.py,sha256=YJibM7GYe4re8-0a3OlXmGS-XDgTEoO4tlBt2q25Bng,34762
+packaging/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+packaging/requirements.py,sha256=gYyRSAdbrIyKDY66ugIDUQjRMvxkH2ALioTmX3tnL6o,2947
+packaging/specifiers.py,sha256=GG1wPNMcL0fMJO68vF53wKMdwnfehDcaI-r9NpTfilA,40074
+packaging/tags.py,sha256=CFqrJzAzc2XNGexerH__T-Y5Iwq7WbsYXsiLERLWxY0,21014
+packaging/utils.py,sha256=0F3Hh9OFuRgrhTgGZUl5K22Fv1YP2tZl1z_2gO6kJiA,5050
+packaging/version.py,sha256=olfyuk_DPbflNkJ4wBWetXQ17c74x3DB501degUv7DY,16676
diff --git a/setuptools/_vendor/packaging-24.1.dist-info/REQUESTED b/setuptools/_vendor/packaging-24.2.dist-info/REQUESTED
similarity index 100%
rename from setuptools/_vendor/packaging-24.1.dist-info/REQUESTED
rename to setuptools/_vendor/packaging-24.2.dist-info/REQUESTED
diff --git a/setuptools/_vendor/packaging-24.1.dist-info/WHEEL b/setuptools/_vendor/packaging-24.2.dist-info/WHEEL
similarity index 71%
rename from setuptools/_vendor/packaging-24.1.dist-info/WHEEL
rename to setuptools/_vendor/packaging-24.2.dist-info/WHEEL
index 3b5e64b5e6..e3c6feefa2 100644
--- a/setuptools/_vendor/packaging-24.1.dist-info/WHEEL
+++ b/setuptools/_vendor/packaging-24.2.dist-info/WHEEL
@@ -1,4 +1,4 @@
 Wheel-Version: 1.0
-Generator: flit 3.9.0
+Generator: flit 3.10.1
 Root-Is-Purelib: true
 Tag: py3-none-any
diff --git a/setuptools/_vendor/packaging/__init__.py b/setuptools/_vendor/packaging/__init__.py
index 9ba41d8357..d79f73c574 100644
--- a/setuptools/_vendor/packaging/__init__.py
+++ b/setuptools/_vendor/packaging/__init__.py
@@ -6,10 +6,10 @@
 __summary__ = "Core utilities for Python packages"
 __uri__ = "https://github.com/pypa/packaging"
 
-__version__ = "24.1"
+__version__ = "24.2"
 
 __author__ = "Donald Stufft and individual contributors"
 __email__ = "donald@stufft.io"
 
 __license__ = "BSD-2-Clause or Apache-2.0"
-__copyright__ = "2014 %s" % __author__
+__copyright__ = f"2014 {__author__}"
diff --git a/setuptools/_vendor/packaging/_elffile.py b/setuptools/_vendor/packaging/_elffile.py
index f7a02180bf..25f4282cc2 100644
--- a/setuptools/_vendor/packaging/_elffile.py
+++ b/setuptools/_vendor/packaging/_elffile.py
@@ -48,8 +48,8 @@ def __init__(self, f: IO[bytes]) -> None:
 
         try:
             ident = self._read("16B")
-        except struct.error:
-            raise ELFInvalid("unable to parse identification")
+        except struct.error as e:
+            raise ELFInvalid("unable to parse identification") from e
         magic = bytes(ident[:4])
         if magic != b"\x7fELF":
             raise ELFInvalid(f"invalid magic: {magic!r}")
@@ -67,11 +67,11 @@ def __init__(self, f: IO[bytes]) -> None:
                 (2, 1): ("HHIQQQIHHH", ">IIQQQQQQ", (0, 2, 5)),  # 64-bit MSB.
             }[(self.capacity, self.encoding)]
-        except KeyError:
+        except KeyError as e:
             raise ELFInvalid(
                 f"unrecognized capacity ({self.capacity}) or "
                 f"encoding ({self.encoding})"
-            )
+            ) from e
 
         try:
             (
diff --git a/setuptools/_vendor/packaging/_manylinux.py b/setuptools/_vendor/packaging/_manylinux.py
index 08f651fbd8..61339a6fcc 100644
--- a/setuptools/_vendor/packaging/_manylinux.py
+++ b/setuptools/_vendor/packaging/_manylinux.py
@@ -164,6 +164,7 @@ def _parse_glibc_version(version_str: str) -> tuple[int, int]:
             f"Expected glibc version with 2 components major.minor,"
             f" got: {version_str}",
             RuntimeWarning,
+            stacklevel=2,
         )
         return -1, -1
     return int(m.group("major")), int(m.group("minor"))
diff --git a/setuptools/_vendor/packaging/licenses/__init__.py b/setuptools/_vendor/packaging/licenses/__init__.py
new file mode 100644
index 0000000000..569156d6ca
--- /dev/null
+++ b/setuptools/_vendor/packaging/licenses/__init__.py
@@ -0,0 +1,145 @@
+#######################################################################################
+#
+# Adapted from:
+#  https://github.com/pypa/hatch/blob/5352e44/backend/src/hatchling/licenses/parse.py
+#
+# MIT License
+#
+# Copyright (c) 2017-present Ofek Lev 
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy of this
+# software and associated documentation files (the "Software"), to deal in the Software
+# without restriction, including without limitation the rights to use, copy, modify,
+# merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to the following
+# conditions:
+#
+# The above copyright notice and this permission notice shall be included in all copies
+# or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
+# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+# PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
+# CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
+# OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+#
+# With additional allowance of arbitrary `LicenseRef-` identifiers, not just
+# `LicenseRef-Public-Domain` and `LicenseRef-Proprietary`.
+#
+#######################################################################################
+from __future__ import annotations
+
+import re
+from typing import NewType, cast
+
+from packaging.licenses._spdx import EXCEPTIONS, LICENSES
+
+__all__ = [
+    "NormalizedLicenseExpression",
+    "InvalidLicenseExpression",
+    "canonicalize_license_expression",
+]
+
+license_ref_allowed = re.compile("^[A-Za-z0-9.-]*$")
+
+NormalizedLicenseExpression = NewType("NormalizedLicenseExpression", str)
+
+
+class InvalidLicenseExpression(ValueError):
+    """Raised when a license-expression string is invalid
+
+    >>> canonicalize_license_expression("invalid")
+    Traceback (most recent call last):
+        ...
+    packaging.licenses.InvalidLicenseExpression: Invalid license expression: 'invalid'
+    """
+
+
+def canonicalize_license_expression(
+    raw_license_expression: str,
+) -> NormalizedLicenseExpression:
+    if not raw_license_expression:
+        message = f"Invalid license expression: {raw_license_expression!r}"
+        raise InvalidLicenseExpression(message)
+
+    # Pad any parentheses so tokenization can be achieved by merely splitting on
+    # whitespace.
+    license_expression = raw_license_expression.replace("(", " ( ").replace(")", " ) ")
+    licenseref_prefix = "LicenseRef-"
+    license_refs = {
+        ref.lower(): "LicenseRef-" + ref[len(licenseref_prefix) :]
+        for ref in license_expression.split()
+        if ref.lower().startswith(licenseref_prefix.lower())
+    }
+
+    # Normalize to lower case so we can look up licenses/exceptions
+    # and so boolean operators are Python-compatible.
+    license_expression = license_expression.lower()
+
+    tokens = license_expression.split()
+
+    # Rather than implementing boolean logic, we create an expression that Python can
+    # parse. Everything that is not involved with the grammar itself is treated as
+    # `False` and the expression should evaluate as such.
+    python_tokens = []
+    for token in tokens:
+        if token not in {"or", "and", "with", "(", ")"}:
+            python_tokens.append("False")
+        elif token == "with":
+            python_tokens.append("or")
+        elif token == "(" and python_tokens and python_tokens[-1] not in {"or", "and"}:
+            message = f"Invalid license expression: {raw_license_expression!r}"
+            raise InvalidLicenseExpression(message)
+        else:
+            python_tokens.append(token)
+
+    python_expression = " ".join(python_tokens)
+    try:
+        invalid = eval(python_expression, globals(), locals())
+    except Exception:
+        invalid = True
+
+    if invalid is not False:
+        message = f"Invalid license expression: {raw_license_expression!r}"
+        raise InvalidLicenseExpression(message) from None
+
+    # Take a final pass to check for unknown licenses/exceptions.
+    normalized_tokens = []
+    for token in tokens:
+        if token in {"or", "and", "with", "(", ")"}:
+            normalized_tokens.append(token.upper())
+            continue
+
+        if normalized_tokens and normalized_tokens[-1] == "WITH":
+            if token not in EXCEPTIONS:
+                message = f"Unknown license exception: {token!r}"
+                raise InvalidLicenseExpression(message)
+
+            normalized_tokens.append(EXCEPTIONS[token]["id"])
+        else:
+            if token.endswith("+"):
+                final_token = token[:-1]
+                suffix = "+"
+            else:
+                final_token = token
+                suffix = ""
+
+            if final_token.startswith("licenseref-"):
+                if not license_ref_allowed.match(final_token):
+                    message = f"Invalid licenseref: {final_token!r}"
+                    raise InvalidLicenseExpression(message)
+                normalized_tokens.append(license_refs[final_token] + suffix)
+            else:
+                if final_token not in LICENSES:
+                    message = f"Unknown license: {final_token!r}"
+                    raise InvalidLicenseExpression(message)
+                normalized_tokens.append(LICENSES[final_token]["id"] + suffix)
+
+    normalized_expression = " ".join(normalized_tokens)
+
+    return cast(
+        NormalizedLicenseExpression,
+        normalized_expression.replace("( ", "(").replace(" )", ")"),
+    )
diff --git a/setuptools/_vendor/packaging/licenses/_spdx.py b/setuptools/_vendor/packaging/licenses/_spdx.py
new file mode 100644
index 0000000000..eac22276a3
--- /dev/null
+++ b/setuptools/_vendor/packaging/licenses/_spdx.py
@@ -0,0 +1,759 @@
+
+from __future__ import annotations
+
+from typing import TypedDict
+
+class SPDXLicense(TypedDict):
+    id: str
+    deprecated: bool
+
+class SPDXException(TypedDict):
+    id: str
+    deprecated: bool
+
+
+VERSION = '3.25.0'
+
+LICENSES: dict[str, SPDXLicense] = {
+    '0bsd': {'id': '0BSD', 'deprecated': False},
+    '3d-slicer-1.0': {'id': '3D-Slicer-1.0', 'deprecated': False},
+    'aal': {'id': 'AAL', 'deprecated': False},
+    'abstyles': {'id': 'Abstyles', 'deprecated': False},
+    'adacore-doc': {'id': 'AdaCore-doc', 'deprecated': False},
+    'adobe-2006': {'id': 'Adobe-2006', 'deprecated': False},
+    'adobe-display-postscript': {'id': 'Adobe-Display-PostScript', 'deprecated': False},
+    'adobe-glyph': {'id': 'Adobe-Glyph', 'deprecated': False},
+    'adobe-utopia': {'id': 'Adobe-Utopia', 'deprecated': False},
+    'adsl': {'id': 'ADSL', 'deprecated': False},
+    'afl-1.1': {'id': 'AFL-1.1', 'deprecated': False},
+    'afl-1.2': {'id': 'AFL-1.2', 'deprecated': False},
+    'afl-2.0': {'id': 'AFL-2.0', 'deprecated': False},
+    'afl-2.1': {'id': 'AFL-2.1', 'deprecated': False},
+    'afl-3.0': {'id': 'AFL-3.0', 'deprecated': False},
+    'afmparse': {'id': 'Afmparse', 'deprecated': False},
+    'agpl-1.0': {'id': 'AGPL-1.0', 'deprecated': True},
+    'agpl-1.0-only': {'id': 'AGPL-1.0-only', 'deprecated': False},
+    'agpl-1.0-or-later': {'id': 'AGPL-1.0-or-later', 'deprecated': False},
+    'agpl-3.0': {'id': 'AGPL-3.0', 'deprecated': True},
+    'agpl-3.0-only': {'id': 'AGPL-3.0-only', 'deprecated': False},
+    'agpl-3.0-or-later': {'id': 'AGPL-3.0-or-later', 'deprecated': False},
+    'aladdin': {'id': 'Aladdin', 'deprecated': False},
+    'amd-newlib': {'id': 'AMD-newlib', 'deprecated': False},
+    'amdplpa': {'id': 'AMDPLPA', 'deprecated': False},
+    'aml': {'id': 'AML', 'deprecated': False},
+    'aml-glslang': {'id': 'AML-glslang', 'deprecated': False},
+    'ampas': {'id': 'AMPAS', 'deprecated': False},
+    'antlr-pd': {'id': 'ANTLR-PD', 'deprecated': False},
+    'antlr-pd-fallback': {'id': 'ANTLR-PD-fallback', 'deprecated': False},
+    'any-osi': {'id': 'any-OSI', 'deprecated': False},
+    'apache-1.0': {'id': 'Apache-1.0', 'deprecated': False},
+    'apache-1.1': {'id': 'Apache-1.1', 'deprecated': False},
+    'apache-2.0': {'id': 'Apache-2.0', 'deprecated': False},
+    'apafml': {'id': 'APAFML', 'deprecated': False},
+    'apl-1.0': {'id': 'APL-1.0', 'deprecated': False},
+    'app-s2p': {'id': 'App-s2p', 'deprecated': False},
+    'apsl-1.0': {'id': 'APSL-1.0', 'deprecated': False},
+    'apsl-1.1': {'id': 'APSL-1.1', 'deprecated': False},
+    'apsl-1.2': {'id': 'APSL-1.2', 'deprecated': False},
+    'apsl-2.0': {'id': 'APSL-2.0', 'deprecated': False},
+    'arphic-1999': {'id': 'Arphic-1999', 'deprecated': False},
+    'artistic-1.0': {'id': 'Artistic-1.0', 'deprecated': False},
+    'artistic-1.0-cl8': {'id': 'Artistic-1.0-cl8', 'deprecated': False},
+    'artistic-1.0-perl': {'id': 'Artistic-1.0-Perl', 'deprecated': False},
+    'artistic-2.0': {'id': 'Artistic-2.0', 'deprecated': False},
+    'aswf-digital-assets-1.0': {'id': 'ASWF-Digital-Assets-1.0', 'deprecated': False},
+    'aswf-digital-assets-1.1': {'id': 'ASWF-Digital-Assets-1.1', 'deprecated': False},
+    'baekmuk': {'id': 'Baekmuk', 'deprecated': False},
+    'bahyph': {'id': 'Bahyph', 'deprecated': False},
+    'barr': {'id': 'Barr', 'deprecated': False},
+    'bcrypt-solar-designer': {'id': 'bcrypt-Solar-Designer', 'deprecated': False},
+    'beerware': {'id': 'Beerware', 'deprecated': False},
+    'bitstream-charter': {'id': 'Bitstream-Charter', 'deprecated': False},
+    'bitstream-vera': {'id': 'Bitstream-Vera', 'deprecated': False},
+    'bittorrent-1.0': {'id': 'BitTorrent-1.0', 'deprecated': False},
+    'bittorrent-1.1': {'id': 'BitTorrent-1.1', 'deprecated': False},
+    'blessing': {'id': 'blessing', 'deprecated': False},
+    'blueoak-1.0.0': {'id': 'BlueOak-1.0.0', 'deprecated': False},
+    'boehm-gc': {'id': 'Boehm-GC', 'deprecated': False},
+    'borceux': {'id': 'Borceux', 'deprecated': False},
+    'brian-gladman-2-clause': {'id': 'Brian-Gladman-2-Clause', 'deprecated': False},
+    'brian-gladman-3-clause': {'id': 'Brian-Gladman-3-Clause', 'deprecated': False},
+    'bsd-1-clause': {'id': 'BSD-1-Clause', 'deprecated': False},
+    'bsd-2-clause': {'id': 'BSD-2-Clause', 'deprecated': False},
+    'bsd-2-clause-darwin': {'id': 'BSD-2-Clause-Darwin', 'deprecated': False},
+    'bsd-2-clause-first-lines': {'id': 'BSD-2-Clause-first-lines', 'deprecated': False},
+    'bsd-2-clause-freebsd': {'id': 'BSD-2-Clause-FreeBSD', 'deprecated': True},
+    'bsd-2-clause-netbsd': {'id': 'BSD-2-Clause-NetBSD', 'deprecated': True},
+    'bsd-2-clause-patent': {'id': 'BSD-2-Clause-Patent', 'deprecated': False},
+    'bsd-2-clause-views': {'id': 'BSD-2-Clause-Views', 'deprecated': False},
+    'bsd-3-clause': {'id': 'BSD-3-Clause', 'deprecated': False},
+    'bsd-3-clause-acpica': {'id': 'BSD-3-Clause-acpica', 'deprecated': False},
+    'bsd-3-clause-attribution': {'id': 'BSD-3-Clause-Attribution', 'deprecated': False},
+    'bsd-3-clause-clear': {'id': 'BSD-3-Clause-Clear', 'deprecated': False},
+    'bsd-3-clause-flex': {'id': 'BSD-3-Clause-flex', 'deprecated': False},
+    'bsd-3-clause-hp': {'id': 'BSD-3-Clause-HP', 'deprecated': False},
+    'bsd-3-clause-lbnl': {'id': 'BSD-3-Clause-LBNL', 'deprecated': False},
+    'bsd-3-clause-modification': {'id': 'BSD-3-Clause-Modification', 'deprecated': False},
+    'bsd-3-clause-no-military-license': {'id': 'BSD-3-Clause-No-Military-License', 'deprecated': False},
+    'bsd-3-clause-no-nuclear-license': {'id': 'BSD-3-Clause-No-Nuclear-License', 'deprecated': False},
+    'bsd-3-clause-no-nuclear-license-2014': {'id': 'BSD-3-Clause-No-Nuclear-License-2014', 'deprecated': False},
+    'bsd-3-clause-no-nuclear-warranty': {'id': 'BSD-3-Clause-No-Nuclear-Warranty', 'deprecated': False},
+    'bsd-3-clause-open-mpi': {'id': 'BSD-3-Clause-Open-MPI', 'deprecated': False},
+    'bsd-3-clause-sun': {'id': 'BSD-3-Clause-Sun', 'deprecated': False},
+    'bsd-4-clause': {'id': 'BSD-4-Clause', 'deprecated': False},
+    'bsd-4-clause-shortened': {'id': 'BSD-4-Clause-Shortened', 'deprecated': False},
+    'bsd-4-clause-uc': {'id': 'BSD-4-Clause-UC', 'deprecated': False},
+    'bsd-4.3reno': {'id': 'BSD-4.3RENO', 'deprecated': False},
+    'bsd-4.3tahoe': {'id': 'BSD-4.3TAHOE', 'deprecated': False},
+    'bsd-advertising-acknowledgement': {'id': 'BSD-Advertising-Acknowledgement', 'deprecated': False},
+    'bsd-attribution-hpnd-disclaimer': {'id': 'BSD-Attribution-HPND-disclaimer', 'deprecated': False},
+    'bsd-inferno-nettverk': {'id': 'BSD-Inferno-Nettverk', 'deprecated': False},
+    'bsd-protection': {'id': 'BSD-Protection', 'deprecated': False},
+    'bsd-source-beginning-file': {'id': 'BSD-Source-beginning-file', 'deprecated': False},
+    'bsd-source-code': {'id': 'BSD-Source-Code', 'deprecated': False},
+    'bsd-systemics': {'id': 'BSD-Systemics', 'deprecated': False},
+    'bsd-systemics-w3works': {'id': 'BSD-Systemics-W3Works', 'deprecated': False},
+    'bsl-1.0': {'id': 'BSL-1.0', 'deprecated': False},
+    'busl-1.1': {'id': 'BUSL-1.1', 'deprecated': False},
+    'bzip2-1.0.5': {'id': 'bzip2-1.0.5', 'deprecated': True},
+    'bzip2-1.0.6': {'id': 'bzip2-1.0.6', 'deprecated': False},
+    'c-uda-1.0': {'id': 'C-UDA-1.0', 'deprecated': False},
+    'cal-1.0': {'id': 'CAL-1.0', 'deprecated': False},
+    'cal-1.0-combined-work-exception': {'id': 'CAL-1.0-Combined-Work-Exception', 'deprecated': False},
+    'caldera': {'id': 'Caldera', 'deprecated': False},
+    'caldera-no-preamble': {'id': 'Caldera-no-preamble', 'deprecated': False},
+    'catharon': {'id': 'Catharon', 'deprecated': False},
+    'catosl-1.1': {'id': 'CATOSL-1.1', 'deprecated': False},
+    'cc-by-1.0': {'id': 'CC-BY-1.0', 'deprecated': False},
+    'cc-by-2.0': {'id': 'CC-BY-2.0', 'deprecated': False},
+    'cc-by-2.5': {'id': 'CC-BY-2.5', 'deprecated': False},
+    'cc-by-2.5-au': {'id': 'CC-BY-2.5-AU', 'deprecated': False},
+    'cc-by-3.0': {'id': 'CC-BY-3.0', 'deprecated': False},
+    'cc-by-3.0-at': {'id': 'CC-BY-3.0-AT', 'deprecated': False},
+    'cc-by-3.0-au': {'id': 'CC-BY-3.0-AU', 'deprecated': False},
+    'cc-by-3.0-de': {'id': 'CC-BY-3.0-DE', 'deprecated': False},
+    'cc-by-3.0-igo': {'id': 'CC-BY-3.0-IGO', 'deprecated': False},
+    'cc-by-3.0-nl': {'id': 'CC-BY-3.0-NL', 'deprecated': False},
+    'cc-by-3.0-us': {'id': 'CC-BY-3.0-US', 'deprecated': False},
+    'cc-by-4.0': {'id': 'CC-BY-4.0', 'deprecated': False},
+    'cc-by-nc-1.0': {'id': 'CC-BY-NC-1.0', 'deprecated': False},
+    'cc-by-nc-2.0': {'id': 'CC-BY-NC-2.0', 'deprecated': False},
+    'cc-by-nc-2.5': {'id': 'CC-BY-NC-2.5', 'deprecated': False},
+    'cc-by-nc-3.0': {'id': 'CC-BY-NC-3.0', 'deprecated': False},
+    'cc-by-nc-3.0-de': {'id': 'CC-BY-NC-3.0-DE', 'deprecated': False},
+    'cc-by-nc-4.0': {'id': 'CC-BY-NC-4.0', 'deprecated': False},
+    'cc-by-nc-nd-1.0': {'id': 'CC-BY-NC-ND-1.0', 'deprecated': False},
+    'cc-by-nc-nd-2.0': {'id': 'CC-BY-NC-ND-2.0', 'deprecated': False},
+    'cc-by-nc-nd-2.5': {'id': 'CC-BY-NC-ND-2.5', 'deprecated': False},
+    'cc-by-nc-nd-3.0': {'id': 'CC-BY-NC-ND-3.0', 'deprecated': False},
+    'cc-by-nc-nd-3.0-de': {'id': 'CC-BY-NC-ND-3.0-DE', 'deprecated': False},
+    'cc-by-nc-nd-3.0-igo': {'id': 'CC-BY-NC-ND-3.0-IGO', 'deprecated': False},
+    'cc-by-nc-nd-4.0': {'id': 'CC-BY-NC-ND-4.0', 'deprecated': False},
+    'cc-by-nc-sa-1.0': {'id': 'CC-BY-NC-SA-1.0', 'deprecated': False},
+    'cc-by-nc-sa-2.0': {'id': 'CC-BY-NC-SA-2.0', 'deprecated': False},
+    'cc-by-nc-sa-2.0-de': {'id': 'CC-BY-NC-SA-2.0-DE', 'deprecated': False},
+    'cc-by-nc-sa-2.0-fr': {'id': 'CC-BY-NC-SA-2.0-FR', 'deprecated': False},
+    'cc-by-nc-sa-2.0-uk': {'id': 'CC-BY-NC-SA-2.0-UK', 'deprecated': False},
+    'cc-by-nc-sa-2.5': {'id': 'CC-BY-NC-SA-2.5', 'deprecated': False},
+    'cc-by-nc-sa-3.0': {'id': 'CC-BY-NC-SA-3.0', 'deprecated': False},
+    'cc-by-nc-sa-3.0-de': {'id': 'CC-BY-NC-SA-3.0-DE', 'deprecated': False},
+    'cc-by-nc-sa-3.0-igo': {'id': 'CC-BY-NC-SA-3.0-IGO', 'deprecated': False},
+    'cc-by-nc-sa-4.0': {'id': 'CC-BY-NC-SA-4.0', 'deprecated': False},
+    'cc-by-nd-1.0': {'id': 'CC-BY-ND-1.0', 'deprecated': False},
+    'cc-by-nd-2.0': {'id': 'CC-BY-ND-2.0', 'deprecated': False},
+    'cc-by-nd-2.5': {'id': 'CC-BY-ND-2.5', 'deprecated': False},
+    'cc-by-nd-3.0': {'id': 'CC-BY-ND-3.0', 'deprecated': False},
+    'cc-by-nd-3.0-de': {'id': 'CC-BY-ND-3.0-DE', 'deprecated': False},
+    'cc-by-nd-4.0': {'id': 'CC-BY-ND-4.0', 'deprecated': False},
+    'cc-by-sa-1.0': {'id': 'CC-BY-SA-1.0', 'deprecated': False},
+    'cc-by-sa-2.0': {'id': 'CC-BY-SA-2.0', 'deprecated': False},
+    'cc-by-sa-2.0-uk': {'id': 'CC-BY-SA-2.0-UK', 'deprecated': False},
+    'cc-by-sa-2.1-jp': {'id': 'CC-BY-SA-2.1-JP', 'deprecated': False},
+    'cc-by-sa-2.5': {'id': 'CC-BY-SA-2.5', 'deprecated': False},
+    'cc-by-sa-3.0': {'id': 'CC-BY-SA-3.0', 'deprecated': False},
+    'cc-by-sa-3.0-at': {'id': 'CC-BY-SA-3.0-AT', 'deprecated': False},
+    'cc-by-sa-3.0-de': {'id': 'CC-BY-SA-3.0-DE', 'deprecated': False},
+    'cc-by-sa-3.0-igo': {'id': 'CC-BY-SA-3.0-IGO', 'deprecated': False},
+    'cc-by-sa-4.0': {'id': 'CC-BY-SA-4.0', 'deprecated': False},
+    'cc-pddc': {'id': 'CC-PDDC', 'deprecated': False},
+    'cc0-1.0': {'id': 'CC0-1.0', 'deprecated': False},
+    'cddl-1.0': {'id': 'CDDL-1.0', 'deprecated': False},
+    'cddl-1.1': {'id': 'CDDL-1.1', 'deprecated': False},
+    'cdl-1.0': {'id': 'CDL-1.0', 'deprecated': False},
+    'cdla-permissive-1.0': {'id': 'CDLA-Permissive-1.0', 'deprecated': False},
+    'cdla-permissive-2.0': {'id': 'CDLA-Permissive-2.0', 'deprecated': False},
+    'cdla-sharing-1.0': {'id': 'CDLA-Sharing-1.0', 'deprecated': False},
+    'cecill-1.0': {'id': 'CECILL-1.0', 'deprecated': False},
+    'cecill-1.1': {'id': 'CECILL-1.1', 'deprecated': False},
+    'cecill-2.0': {'id': 'CECILL-2.0', 'deprecated': False},
+    'cecill-2.1': {'id': 'CECILL-2.1', 'deprecated': False},
+    'cecill-b': {'id': 'CECILL-B', 'deprecated': False},
+    'cecill-c': {'id': 'CECILL-C', 'deprecated': False},
+    'cern-ohl-1.1': {'id': 'CERN-OHL-1.1', 'deprecated': False},
+    'cern-ohl-1.2': {'id': 'CERN-OHL-1.2', 'deprecated': False},
+    'cern-ohl-p-2.0': {'id': 'CERN-OHL-P-2.0', 'deprecated': False},
+    'cern-ohl-s-2.0': {'id': 'CERN-OHL-S-2.0', 'deprecated': False},
+    'cern-ohl-w-2.0': {'id': 'CERN-OHL-W-2.0', 'deprecated': False},
+    'cfitsio': {'id': 'CFITSIO', 'deprecated': False},
+    'check-cvs': {'id': 'check-cvs', 'deprecated': False},
+    'checkmk': {'id': 'checkmk', 'deprecated': False},
+    'clartistic': {'id': 'ClArtistic', 'deprecated': False},
+    'clips': {'id': 'Clips', 'deprecated': False},
+    'cmu-mach': {'id': 'CMU-Mach', 'deprecated': False},
+    'cmu-mach-nodoc': {'id': 'CMU-Mach-nodoc', 'deprecated': False},
+    'cnri-jython': {'id': 'CNRI-Jython', 'deprecated': False},
+    'cnri-python': {'id': 'CNRI-Python', 'deprecated': False},
+    'cnri-python-gpl-compatible': {'id': 'CNRI-Python-GPL-Compatible', 'deprecated': False},
+    'coil-1.0': {'id': 'COIL-1.0', 'deprecated': False},
+    'community-spec-1.0': {'id': 'Community-Spec-1.0', 'deprecated': False},
+    'condor-1.1': {'id': 'Condor-1.1', 'deprecated': False},
+    'copyleft-next-0.3.0': {'id': 'copyleft-next-0.3.0', 'deprecated': False},
+    'copyleft-next-0.3.1': {'id': 'copyleft-next-0.3.1', 'deprecated': False},
+    'cornell-lossless-jpeg': {'id': 'Cornell-Lossless-JPEG', 'deprecated': False},
+    'cpal-1.0': {'id': 'CPAL-1.0', 'deprecated': False},
+    'cpl-1.0': {'id': 'CPL-1.0', 'deprecated': False},
+    'cpol-1.02': {'id': 'CPOL-1.02', 'deprecated': False},
+    'cronyx': {'id': 'Cronyx', 'deprecated': False},
+    'crossword': {'id': 'Crossword', 'deprecated': False},
+    'crystalstacker': {'id': 'CrystalStacker', 'deprecated': False},
+    'cua-opl-1.0': {'id': 'CUA-OPL-1.0', 'deprecated': False},
+    'cube': {'id': 'Cube', 'deprecated': False},
+    'curl': {'id': 'curl', 'deprecated': False},
+    'cve-tou': {'id': 'cve-tou', 'deprecated': False},
+    'd-fsl-1.0': {'id': 'D-FSL-1.0', 'deprecated': False},
+    'dec-3-clause': {'id': 'DEC-3-Clause', 'deprecated': False},
+    'diffmark': {'id': 'diffmark', 'deprecated': False},
+    'dl-de-by-2.0': {'id': 'DL-DE-BY-2.0', 'deprecated': False},
+    'dl-de-zero-2.0': {'id': 'DL-DE-ZERO-2.0', 'deprecated': False},
+    'doc': {'id': 'DOC', 'deprecated': False},
+    'docbook-schema': {'id': 'DocBook-Schema', 'deprecated': False},
+    'docbook-xml': {'id': 'DocBook-XML', 'deprecated': False},
+    'dotseqn': {'id': 'Dotseqn', 'deprecated': False},
+    'drl-1.0': {'id': 'DRL-1.0', 'deprecated': False},
+    'drl-1.1': {'id': 'DRL-1.1', 'deprecated': False},
+    'dsdp': {'id': 'DSDP', 'deprecated': False},
+    'dtoa': {'id': 'dtoa', 'deprecated': False},
+    'dvipdfm': {'id': 'dvipdfm', 'deprecated': False},
+    'ecl-1.0': {'id': 'ECL-1.0', 'deprecated': False},
+    'ecl-2.0': {'id': 'ECL-2.0', 'deprecated': False},
+    'ecos-2.0': {'id': 'eCos-2.0', 'deprecated': True},
+    'efl-1.0': {'id': 'EFL-1.0', 'deprecated': False},
+    'efl-2.0': {'id': 'EFL-2.0', 'deprecated': False},
+    'egenix': {'id': 'eGenix', 'deprecated': False},
+    'elastic-2.0': {'id': 'Elastic-2.0', 'deprecated': False},
+    'entessa': {'id': 'Entessa', 'deprecated': False},
+    'epics': {'id': 'EPICS', 'deprecated': False},
+    'epl-1.0': {'id': 'EPL-1.0', 'deprecated': False},
+    'epl-2.0': {'id': 'EPL-2.0', 'deprecated': False},
+    'erlpl-1.1': {'id': 'ErlPL-1.1', 'deprecated': False},
+    'etalab-2.0': {'id': 'etalab-2.0', 'deprecated': False},
+    'eudatagrid': {'id': 'EUDatagrid', 'deprecated': False},
+    'eupl-1.0': {'id': 'EUPL-1.0', 'deprecated': False},
+    'eupl-1.1': {'id': 'EUPL-1.1', 'deprecated': False},
+    'eupl-1.2': {'id': 'EUPL-1.2', 'deprecated': False},
+    'eurosym': {'id': 'Eurosym', 'deprecated': False},
+    'fair': {'id': 'Fair', 'deprecated': False},
+    'fbm': {'id': 'FBM', 'deprecated': False},
+    'fdk-aac': {'id': 'FDK-AAC', 'deprecated': False},
+    'ferguson-twofish': {'id': 'Ferguson-Twofish', 'deprecated': False},
+    'frameworx-1.0': {'id': 'Frameworx-1.0', 'deprecated': False},
+    'freebsd-doc': {'id': 'FreeBSD-DOC', 'deprecated': False},
+    'freeimage': {'id': 'FreeImage', 'deprecated': False},
+    'fsfap': {'id': 'FSFAP', 'deprecated': False},
+    'fsfap-no-warranty-disclaimer': {'id': 'FSFAP-no-warranty-disclaimer', 'deprecated': False},
+    'fsful': {'id': 'FSFUL', 'deprecated': False},
+    'fsfullr': {'id': 'FSFULLR', 'deprecated': False},
+    'fsfullrwd': {'id': 'FSFULLRWD', 'deprecated': False},
+    'ftl': {'id': 'FTL', 'deprecated': False},
+    'furuseth': {'id': 'Furuseth', 'deprecated': False},
+    'fwlw': {'id': 'fwlw', 'deprecated': False},
+    'gcr-docs': {'id': 'GCR-docs', 'deprecated': False},
+    'gd': {'id': 'GD', 'deprecated': False},
+    'gfdl-1.1': {'id': 'GFDL-1.1', 'deprecated': True},
+    'gfdl-1.1-invariants-only': {'id': 'GFDL-1.1-invariants-only', 'deprecated': False},
+    'gfdl-1.1-invariants-or-later': {'id': 'GFDL-1.1-invariants-or-later', 'deprecated': False},
+    'gfdl-1.1-no-invariants-only': {'id': 'GFDL-1.1-no-invariants-only', 'deprecated': False},
+    'gfdl-1.1-no-invariants-or-later': {'id': 'GFDL-1.1-no-invariants-or-later', 'deprecated': False},
+    'gfdl-1.1-only': {'id': 'GFDL-1.1-only', 'deprecated': False},
+    'gfdl-1.1-or-later': {'id': 'GFDL-1.1-or-later', 'deprecated': False},
+    'gfdl-1.2': {'id': 'GFDL-1.2', 'deprecated': True},
+    'gfdl-1.2-invariants-only': {'id': 'GFDL-1.2-invariants-only', 'deprecated': False},
+    'gfdl-1.2-invariants-or-later': {'id': 'GFDL-1.2-invariants-or-later', 'deprecated': False},
+    'gfdl-1.2-no-invariants-only': {'id': 'GFDL-1.2-no-invariants-only', 'deprecated': False},
+    'gfdl-1.2-no-invariants-or-later': {'id': 'GFDL-1.2-no-invariants-or-later', 'deprecated': False},
+    'gfdl-1.2-only': {'id': 'GFDL-1.2-only', 'deprecated': False},
+    'gfdl-1.2-or-later': {'id': 'GFDL-1.2-or-later', 'deprecated': False},
+    'gfdl-1.3': {'id': 'GFDL-1.3', 'deprecated': True},
+    'gfdl-1.3-invariants-only': {'id': 'GFDL-1.3-invariants-only', 'deprecated': False},
+    'gfdl-1.3-invariants-or-later': {'id': 'GFDL-1.3-invariants-or-later', 'deprecated': False},
+    'gfdl-1.3-no-invariants-only': {'id': 'GFDL-1.3-no-invariants-only', 'deprecated': False},
+    'gfdl-1.3-no-invariants-or-later': {'id': 'GFDL-1.3-no-invariants-or-later', 'deprecated': False},
+    'gfdl-1.3-only': {'id': 'GFDL-1.3-only', 'deprecated': False},
+    'gfdl-1.3-or-later': {'id': 'GFDL-1.3-or-later', 'deprecated': False},
+    'giftware': {'id': 'Giftware', 'deprecated': False},
+    'gl2ps': {'id': 'GL2PS', 'deprecated': False},
+    'glide': {'id': 'Glide', 'deprecated': False},
+    'glulxe': {'id': 'Glulxe', 'deprecated': False},
+    'glwtpl': {'id': 'GLWTPL', 'deprecated': False},
+    'gnuplot': {'id': 'gnuplot', 'deprecated': False},
+    'gpl-1.0': {'id': 'GPL-1.0', 'deprecated': True},
+    'gpl-1.0+': {'id': 'GPL-1.0+', 'deprecated': True},
+    'gpl-1.0-only': {'id': 'GPL-1.0-only', 'deprecated': False},
+    'gpl-1.0-or-later': {'id': 'GPL-1.0-or-later', 'deprecated': False},
+    'gpl-2.0': {'id': 'GPL-2.0', 'deprecated': True},
+    'gpl-2.0+': {'id': 'GPL-2.0+', 'deprecated': True},
+    'gpl-2.0-only': {'id': 'GPL-2.0-only', 'deprecated': False},
+    'gpl-2.0-or-later': {'id': 'GPL-2.0-or-later', 'deprecated': False},
+    'gpl-2.0-with-autoconf-exception': {'id': 'GPL-2.0-with-autoconf-exception', 'deprecated': True},
+    'gpl-2.0-with-bison-exception': {'id': 'GPL-2.0-with-bison-exception', 'deprecated': True},
+    'gpl-2.0-with-classpath-exception': {'id': 'GPL-2.0-with-classpath-exception', 'deprecated': True},
+    'gpl-2.0-with-font-exception': {'id': 'GPL-2.0-with-font-exception', 'deprecated': True},
+    'gpl-2.0-with-gcc-exception': {'id': 'GPL-2.0-with-GCC-exception', 'deprecated': True},
+    'gpl-3.0': {'id': 'GPL-3.0', 'deprecated': True},
+    'gpl-3.0+': {'id': 'GPL-3.0+', 'deprecated': True},
+    'gpl-3.0-only': {'id': 'GPL-3.0-only', 'deprecated': False},
+    'gpl-3.0-or-later': {'id': 'GPL-3.0-or-later', 'deprecated': False},
+    'gpl-3.0-with-autoconf-exception': {'id': 'GPL-3.0-with-autoconf-exception', 'deprecated': True},
+    'gpl-3.0-with-gcc-exception': {'id': 'GPL-3.0-with-GCC-exception', 'deprecated': True},
+    'graphics-gems': {'id': 'Graphics-Gems', 'deprecated': False},
+    'gsoap-1.3b': {'id': 'gSOAP-1.3b', 'deprecated': False},
+    'gtkbook': {'id': 'gtkbook', 'deprecated': False},
+    'gutmann': {'id': 'Gutmann', 'deprecated': False},
+    'haskellreport': {'id': 'HaskellReport', 'deprecated': False},
+    'hdparm': {'id': 'hdparm', 'deprecated': False},
+    'hidapi': {'id': 'HIDAPI', 'deprecated': False},
+    'hippocratic-2.1': {'id': 'Hippocratic-2.1', 'deprecated': False},
+    'hp-1986': {'id': 'HP-1986', 'deprecated': False},
+    'hp-1989': {'id': 'HP-1989', 'deprecated': False},
+    'hpnd': {'id': 'HPND', 'deprecated': False},
+    'hpnd-dec': {'id': 'HPND-DEC', 'deprecated': False},
+    'hpnd-doc': {'id': 'HPND-doc', 'deprecated': False},
+    'hpnd-doc-sell': {'id': 'HPND-doc-sell', 'deprecated': False},
+    'hpnd-export-us': {'id': 'HPND-export-US', 'deprecated': False},
+    'hpnd-export-us-acknowledgement': {'id': 'HPND-export-US-acknowledgement', 'deprecated': False},
+    'hpnd-export-us-modify': {'id': 'HPND-export-US-modify', 'deprecated': False},
+    'hpnd-export2-us': {'id': 'HPND-export2-US', 'deprecated': False},
+    'hpnd-fenneberg-livingston': {'id': 'HPND-Fenneberg-Livingston', 'deprecated': False},
+    'hpnd-inria-imag': {'id': 'HPND-INRIA-IMAG', 'deprecated': False},
+    'hpnd-intel': {'id': 'HPND-Intel', 'deprecated': False},
+    'hpnd-kevlin-henney': {'id': 'HPND-Kevlin-Henney', 'deprecated': False},
+    'hpnd-markus-kuhn': {'id': 'HPND-Markus-Kuhn', 'deprecated': False},
+    'hpnd-merchantability-variant': {'id': 'HPND-merchantability-variant', 'deprecated': False},
+    'hpnd-mit-disclaimer': {'id': 'HPND-MIT-disclaimer', 'deprecated': False},
+    'hpnd-netrek': {'id': 'HPND-Netrek', 'deprecated': False},
+    'hpnd-pbmplus': {'id': 'HPND-Pbmplus', 'deprecated': False},
+    'hpnd-sell-mit-disclaimer-xserver': {'id': 'HPND-sell-MIT-disclaimer-xserver', 'deprecated': False},
+    'hpnd-sell-regexpr': {'id': 'HPND-sell-regexpr', 'deprecated': False},
+    'hpnd-sell-variant': {'id': 'HPND-sell-variant', 'deprecated': False},
+    'hpnd-sell-variant-mit-disclaimer': {'id': 'HPND-sell-variant-MIT-disclaimer', 'deprecated': False},
+    'hpnd-sell-variant-mit-disclaimer-rev': {'id': 'HPND-sell-variant-MIT-disclaimer-rev', 'deprecated': False},
+    'hpnd-uc': {'id': 'HPND-UC', 'deprecated': False},
+    'hpnd-uc-export-us': {'id': 'HPND-UC-export-US', 'deprecated': False},
+    'htmltidy': {'id': 'HTMLTIDY', 'deprecated': False},
+    'ibm-pibs': {'id': 'IBM-pibs', 'deprecated': False},
+    'icu': {'id': 'ICU', 'deprecated': False},
+    'iec-code-components-eula': {'id': 'IEC-Code-Components-EULA', 'deprecated': False},
+    'ijg': {'id': 'IJG', 'deprecated': False},
+    'ijg-short': {'id': 'IJG-short', 'deprecated': False},
+    'imagemagick': {'id': 'ImageMagick', 'deprecated': False},
+    'imatix': {'id': 'iMatix', 'deprecated': False},
+    'imlib2': {'id': 'Imlib2', 'deprecated': False},
+    'info-zip': {'id': 'Info-ZIP', 'deprecated': False},
+    'inner-net-2.0': {'id': 'Inner-Net-2.0', 'deprecated': False},
+    'intel': {'id': 'Intel', 'deprecated': False},
+    'intel-acpi': {'id': 'Intel-ACPI', 'deprecated': False},
+    'interbase-1.0': {'id': 'Interbase-1.0', 'deprecated': False},
+    'ipa': {'id': 'IPA', 'deprecated': False},
+    'ipl-1.0': {'id': 'IPL-1.0', 'deprecated': False},
+    'isc': {'id': 'ISC', 'deprecated': False},
+    'isc-veillard': {'id': 'ISC-Veillard', 'deprecated': False},
+    'jam': {'id': 'Jam', 'deprecated': False},
+    'jasper-2.0': {'id': 'JasPer-2.0', 'deprecated': False},
+    'jpl-image': {'id': 'JPL-image', 'deprecated': False},
+    'jpnic': {'id': 'JPNIC', 'deprecated': False},
+    'json': {'id': 'JSON', 'deprecated': False},
+    'kastrup': {'id': 'Kastrup', 'deprecated': False},
+    'kazlib': {'id': 'Kazlib', 'deprecated': False},
+    'knuth-ctan': {'id': 'Knuth-CTAN', 'deprecated': False},
+    'lal-1.2': {'id': 'LAL-1.2', 'deprecated': False},
+    'lal-1.3': {'id': 'LAL-1.3', 'deprecated': False},
+    'latex2e': {'id': 'Latex2e', 'deprecated': False},
+    'latex2e-translated-notice': {'id': 'Latex2e-translated-notice', 'deprecated': False},
+    'leptonica': {'id': 'Leptonica', 'deprecated': False},
+    'lgpl-2.0': {'id': 'LGPL-2.0', 'deprecated': True},
+    'lgpl-2.0+': {'id': 'LGPL-2.0+', 'deprecated': True},
+    'lgpl-2.0-only': {'id': 'LGPL-2.0-only', 'deprecated': False},
+    'lgpl-2.0-or-later': {'id': 'LGPL-2.0-or-later', 'deprecated': False},
+    'lgpl-2.1': {'id': 'LGPL-2.1', 'deprecated': True},
+    'lgpl-2.1+': {'id': 'LGPL-2.1+', 'deprecated': True},
+    'lgpl-2.1-only': {'id': 'LGPL-2.1-only', 'deprecated': False},
+    'lgpl-2.1-or-later': {'id': 'LGPL-2.1-or-later', 'deprecated': False},
+    'lgpl-3.0': {'id': 'LGPL-3.0', 'deprecated': True},
+    'lgpl-3.0+': {'id': 'LGPL-3.0+', 'deprecated': True},
+    'lgpl-3.0-only': {'id': 'LGPL-3.0-only', 'deprecated': False},
+    'lgpl-3.0-or-later': {'id': 'LGPL-3.0-or-later', 'deprecated': False},
+    'lgpllr': {'id': 'LGPLLR', 'deprecated': False},
+    'libpng': {'id': 'Libpng', 'deprecated': False},
+    'libpng-2.0': {'id': 'libpng-2.0', 'deprecated': False},
+    'libselinux-1.0': {'id': 'libselinux-1.0', 'deprecated': False},
+    'libtiff': {'id': 'libtiff', 'deprecated': False},
+    'libutil-david-nugent': {'id': 'libutil-David-Nugent', 'deprecated': False},
+    'liliq-p-1.1': {'id': 'LiLiQ-P-1.1', 'deprecated': False},
+    'liliq-r-1.1': {'id': 'LiLiQ-R-1.1', 'deprecated': False},
+    'liliq-rplus-1.1': {'id': 'LiLiQ-Rplus-1.1', 'deprecated': False},
+    'linux-man-pages-1-para': {'id': 'Linux-man-pages-1-para', 'deprecated': False},
+    'linux-man-pages-copyleft': {'id': 'Linux-man-pages-copyleft', 'deprecated': False},
+    'linux-man-pages-copyleft-2-para': {'id': 'Linux-man-pages-copyleft-2-para', 'deprecated': False},
+    'linux-man-pages-copyleft-var': {'id': 'Linux-man-pages-copyleft-var', 'deprecated': False},
+    'linux-openib': {'id': 'Linux-OpenIB', 'deprecated': False},
+    'loop': {'id': 'LOOP', 'deprecated': False},
+    'lpd-document': {'id': 'LPD-document', 'deprecated': False},
+    'lpl-1.0': {'id': 'LPL-1.0', 'deprecated': False},
+    'lpl-1.02': {'id': 'LPL-1.02', 'deprecated': False},
+    'lppl-1.0': {'id': 'LPPL-1.0', 'deprecated': False},
+    'lppl-1.1': {'id': 'LPPL-1.1', 'deprecated': False},
+    'lppl-1.2': {'id': 'LPPL-1.2', 'deprecated': False},
+    'lppl-1.3a': {'id': 'LPPL-1.3a', 'deprecated': False},
+    'lppl-1.3c': {'id': 'LPPL-1.3c', 'deprecated': False},
+    'lsof': {'id': 'lsof', 'deprecated': False},
+    'lucida-bitmap-fonts': {'id': 'Lucida-Bitmap-Fonts', 'deprecated': False},
+    'lzma-sdk-9.11-to-9.20': {'id': 'LZMA-SDK-9.11-to-9.20', 'deprecated': False},
+    'lzma-sdk-9.22': {'id': 'LZMA-SDK-9.22', 'deprecated': False},
+    'mackerras-3-clause': {'id': 'Mackerras-3-Clause', 'deprecated': False},
+    'mackerras-3-clause-acknowledgment': {'id': 'Mackerras-3-Clause-acknowledgment', 'deprecated': False},
+    'magaz': {'id': 'magaz', 'deprecated': False},
+    'mailprio': {'id': 'mailprio', 'deprecated': False},
+    'makeindex': {'id': 'MakeIndex', 'deprecated': False},
+    'martin-birgmeier': {'id': 'Martin-Birgmeier', 'deprecated': False},
+    'mcphee-slideshow': {'id': 'McPhee-slideshow', 'deprecated': False},
+    'metamail': {'id': 'metamail', 'deprecated': False},
+    'minpack': {'id': 'Minpack', 'deprecated': False},
+    'miros': {'id': 'MirOS', 'deprecated': False},
+    'mit': {'id': 'MIT', 'deprecated': False},
+    'mit-0': {'id': 'MIT-0', 'deprecated': False},
+    'mit-advertising': {'id': 'MIT-advertising', 'deprecated': False},
+    'mit-cmu': {'id': 'MIT-CMU', 'deprecated': False},
+    'mit-enna': {'id': 'MIT-enna', 'deprecated': False},
+    'mit-feh': {'id': 'MIT-feh', 'deprecated': False},
+    'mit-festival': {'id': 'MIT-Festival', 'deprecated': False},
+    'mit-khronos-old': {'id': 'MIT-Khronos-old', 'deprecated': False},
+    'mit-modern-variant': {'id': 'MIT-Modern-Variant', 'deprecated': False},
+    'mit-open-group': {'id': 'MIT-open-group', 'deprecated': False},
+    'mit-testregex': {'id': 'MIT-testregex', 'deprecated': False},
+    'mit-wu': {'id': 'MIT-Wu', 'deprecated': False},
+    'mitnfa': {'id': 'MITNFA', 'deprecated': False},
+    'mmixware': {'id': 'MMIXware', 'deprecated': False},
+    'motosoto': {'id': 'Motosoto', 'deprecated': False},
+    'mpeg-ssg': {'id': 'MPEG-SSG', 'deprecated': False},
+    'mpi-permissive': {'id': 'mpi-permissive', 'deprecated': False},
+    'mpich2': {'id': 'mpich2', 'deprecated': False},
+    'mpl-1.0': {'id': 'MPL-1.0', 'deprecated': False},
+    'mpl-1.1': {'id': 'MPL-1.1', 'deprecated': False},
+    'mpl-2.0': {'id': 'MPL-2.0', 'deprecated': False},
+    'mpl-2.0-no-copyleft-exception': {'id': 'MPL-2.0-no-copyleft-exception', 'deprecated': False},
+    'mplus': {'id': 'mplus', 'deprecated': False},
+    'ms-lpl': {'id': 'MS-LPL', 'deprecated': False},
+    'ms-pl': {'id': 'MS-PL', 'deprecated': False},
+    'ms-rl': {'id': 'MS-RL', 'deprecated': False},
+    'mtll': {'id': 'MTLL', 'deprecated': False},
+    'mulanpsl-1.0': {'id': 'MulanPSL-1.0', 'deprecated': False},
+    'mulanpsl-2.0': {'id': 'MulanPSL-2.0', 'deprecated': False},
+    'multics': {'id': 'Multics', 'deprecated': False},
+    'mup': {'id': 'Mup', 'deprecated': False},
+    'naist-2003': {'id': 'NAIST-2003', 'deprecated': False},
+    'nasa-1.3': {'id': 'NASA-1.3', 'deprecated': False},
+    'naumen': {'id': 'Naumen', 'deprecated': False},
+    'nbpl-1.0': {'id': 'NBPL-1.0', 'deprecated': False},
+    'ncbi-pd': {'id': 'NCBI-PD', 'deprecated': False},
+    'ncgl-uk-2.0': {'id': 'NCGL-UK-2.0', 'deprecated': False},
+    'ncl': {'id': 'NCL', 'deprecated': False},
+    'ncsa': {'id': 'NCSA', 'deprecated': False},
+    'net-snmp': {'id': 'Net-SNMP', 'deprecated': True},
+    'netcdf': {'id': 'NetCDF', 'deprecated': False},
+    'newsletr': {'id': 'Newsletr', 'deprecated': False},
+    'ngpl': {'id': 'NGPL', 'deprecated': False},
+    'nicta-1.0': {'id': 'NICTA-1.0', 'deprecated': False},
+    'nist-pd': {'id': 'NIST-PD', 'deprecated': False},
+    'nist-pd-fallback': {'id': 'NIST-PD-fallback', 'deprecated': False},
+    'nist-software': {'id': 'NIST-Software', 'deprecated': False},
+    'nlod-1.0': {'id': 'NLOD-1.0', 'deprecated': False},
+    'nlod-2.0': {'id': 'NLOD-2.0', 'deprecated': False},
+    'nlpl': {'id': 'NLPL', 'deprecated': False},
+    'nokia': {'id': 'Nokia', 'deprecated': False},
+    'nosl': {'id': 'NOSL', 'deprecated': False},
+    'noweb': {'id': 'Noweb', 'deprecated': False},
+    'npl-1.0': {'id': 'NPL-1.0', 'deprecated': False},
+    'npl-1.1': {'id': 'NPL-1.1', 'deprecated': False},
+    'nposl-3.0': {'id': 'NPOSL-3.0', 'deprecated': False},
+    'nrl': {'id': 'NRL', 'deprecated': False},
+    'ntp': {'id': 'NTP', 'deprecated': False},
+    'ntp-0': {'id': 'NTP-0', 'deprecated': False},
+    'nunit': {'id': 'Nunit', 'deprecated': True},
+    'o-uda-1.0': {'id': 'O-UDA-1.0', 'deprecated': False},
+    'oar': {'id': 'OAR', 'deprecated': False},
+    'occt-pl': {'id': 'OCCT-PL', 'deprecated': False},
+    'oclc-2.0': {'id': 'OCLC-2.0', 'deprecated': False},
+    'odbl-1.0': {'id': 'ODbL-1.0', 'deprecated': False},
+    'odc-by-1.0': {'id': 'ODC-By-1.0', 'deprecated': False},
+    'offis': {'id': 'OFFIS', 'deprecated': False},
+    'ofl-1.0': {'id': 'OFL-1.0', 'deprecated': False},
+    'ofl-1.0-no-rfn': {'id': 'OFL-1.0-no-RFN', 'deprecated': False},
+    'ofl-1.0-rfn': {'id': 'OFL-1.0-RFN', 'deprecated': False},
+    'ofl-1.1': {'id': 'OFL-1.1', 'deprecated': False},
+    'ofl-1.1-no-rfn': {'id': 'OFL-1.1-no-RFN', 'deprecated': False},
+    'ofl-1.1-rfn': {'id': 'OFL-1.1-RFN', 'deprecated': False},
+    'ogc-1.0': {'id': 'OGC-1.0', 'deprecated': False},
+    'ogdl-taiwan-1.0': {'id': 'OGDL-Taiwan-1.0', 'deprecated': False},
+    'ogl-canada-2.0': {'id': 'OGL-Canada-2.0', 'deprecated': False},
+    'ogl-uk-1.0': {'id': 'OGL-UK-1.0', 'deprecated': False},
+    'ogl-uk-2.0': {'id': 'OGL-UK-2.0', 'deprecated': False},
+    'ogl-uk-3.0': {'id': 'OGL-UK-3.0', 'deprecated': False},
+    'ogtsl': {'id': 'OGTSL', 'deprecated': False},
+    'oldap-1.1': {'id': 'OLDAP-1.1', 'deprecated': False},
+    'oldap-1.2': {'id': 'OLDAP-1.2', 'deprecated': False},
+    'oldap-1.3': {'id': 'OLDAP-1.3', 'deprecated': False},
+    'oldap-1.4': {'id': 'OLDAP-1.4', 'deprecated': False},
+    'oldap-2.0': {'id': 'OLDAP-2.0', 'deprecated': False},
+    'oldap-2.0.1': {'id': 'OLDAP-2.0.1', 'deprecated': False},
+    'oldap-2.1': {'id': 'OLDAP-2.1', 'deprecated': False},
+    'oldap-2.2': {'id': 'OLDAP-2.2', 'deprecated': False},
+    'oldap-2.2.1': {'id': 'OLDAP-2.2.1', 'deprecated': False},
+    'oldap-2.2.2': {'id': 'OLDAP-2.2.2', 'deprecated': False},
+    'oldap-2.3': {'id': 'OLDAP-2.3', 'deprecated': False},
+    'oldap-2.4': {'id': 'OLDAP-2.4', 'deprecated': False},
+    'oldap-2.5': {'id': 'OLDAP-2.5', 'deprecated': False},
+    'oldap-2.6': {'id': 'OLDAP-2.6', 'deprecated': False},
+    'oldap-2.7': {'id': 'OLDAP-2.7', 'deprecated': False},
+    'oldap-2.8': {'id': 'OLDAP-2.8', 'deprecated': False},
+    'olfl-1.3': {'id': 'OLFL-1.3', 'deprecated': False},
+    'oml': {'id': 'OML', 'deprecated': False},
+    'openpbs-2.3': {'id': 'OpenPBS-2.3', 'deprecated': False},
+    'openssl': {'id': 'OpenSSL', 'deprecated': False},
+    'openssl-standalone': {'id': 'OpenSSL-standalone', 'deprecated': False},
+    'openvision': {'id': 'OpenVision', 'deprecated': False},
+    'opl-1.0': {'id': 'OPL-1.0', 'deprecated': False},
+    'opl-uk-3.0': {'id': 'OPL-UK-3.0', 'deprecated': False},
+    'opubl-1.0': {'id': 'OPUBL-1.0', 'deprecated': False},
+    'oset-pl-2.1': {'id': 'OSET-PL-2.1', 'deprecated': False},
+    'osl-1.0': {'id': 'OSL-1.0', 'deprecated': False},
+    'osl-1.1': {'id': 'OSL-1.1', 'deprecated': False},
+    'osl-2.0': {'id': 'OSL-2.0', 'deprecated': False},
+    'osl-2.1': {'id': 'OSL-2.1', 'deprecated': False},
+    'osl-3.0': {'id': 'OSL-3.0', 'deprecated': False},
+    'padl': {'id': 'PADL', 'deprecated': False},
+    'parity-6.0.0': {'id': 'Parity-6.0.0', 'deprecated': False},
+    'parity-7.0.0': {'id': 'Parity-7.0.0', 'deprecated': False},
+    'pddl-1.0': {'id': 'PDDL-1.0', 'deprecated': False},
+    'php-3.0': {'id': 'PHP-3.0', 'deprecated': False},
+    'php-3.01': {'id': 'PHP-3.01', 'deprecated': False},
+    'pixar': {'id': 'Pixar', 'deprecated': False},
+    'pkgconf': {'id': 'pkgconf', 'deprecated': False},
+    'plexus': {'id': 'Plexus', 'deprecated': False},
+    'pnmstitch': {'id': 'pnmstitch', 'deprecated': False},
+    'polyform-noncommercial-1.0.0': {'id': 'PolyForm-Noncommercial-1.0.0', 'deprecated': False},
+    'polyform-small-business-1.0.0': {'id': 'PolyForm-Small-Business-1.0.0', 'deprecated': False},
+    'postgresql': {'id': 'PostgreSQL', 'deprecated': False},
+    'ppl': {'id': 'PPL', 'deprecated': False},
+    'psf-2.0': {'id': 'PSF-2.0', 'deprecated': False},
+    'psfrag': {'id': 'psfrag', 'deprecated': False},
+    'psutils': {'id': 'psutils', 'deprecated': False},
+    'python-2.0': {'id': 'Python-2.0', 'deprecated': False},
+    'python-2.0.1': {'id': 'Python-2.0.1', 'deprecated': False},
+    'python-ldap': {'id': 'python-ldap', 'deprecated': False},
+    'qhull': {'id': 'Qhull', 'deprecated': False},
+    'qpl-1.0': {'id': 'QPL-1.0', 'deprecated': False},
+    'qpl-1.0-inria-2004': {'id': 'QPL-1.0-INRIA-2004', 'deprecated': False},
+    'radvd': {'id': 'radvd', 'deprecated': False},
+    'rdisc': {'id': 'Rdisc', 'deprecated': False},
+    'rhecos-1.1': {'id': 'RHeCos-1.1', 'deprecated': False},
+    'rpl-1.1': {'id': 'RPL-1.1', 'deprecated': False},
+    'rpl-1.5': {'id': 'RPL-1.5', 'deprecated': False},
+    'rpsl-1.0': {'id': 'RPSL-1.0', 'deprecated': False},
+    'rsa-md': {'id': 'RSA-MD', 'deprecated': False},
+    'rscpl': {'id': 'RSCPL', 'deprecated': False},
+    'ruby': {'id': 'Ruby', 'deprecated': False},
+    'ruby-pty': {'id': 'Ruby-pty', 'deprecated': False},
+    'sax-pd': {'id': 'SAX-PD', 'deprecated': False},
+    'sax-pd-2.0': {'id': 'SAX-PD-2.0', 'deprecated': False},
+    'saxpath': {'id': 'Saxpath', 'deprecated': False},
+    'scea': {'id': 'SCEA', 'deprecated': False},
+    'schemereport': {'id': 'SchemeReport', 'deprecated': False},
+    'sendmail': {'id': 'Sendmail', 'deprecated': False},
+    'sendmail-8.23': {'id': 'Sendmail-8.23', 'deprecated': False},
+    'sgi-b-1.0': {'id': 'SGI-B-1.0', 'deprecated': False},
+    'sgi-b-1.1': {'id': 'SGI-B-1.1', 'deprecated': False},
+    'sgi-b-2.0': {'id': 'SGI-B-2.0', 'deprecated': False},
+    'sgi-opengl': {'id': 'SGI-OpenGL', 'deprecated': False},
+    'sgp4': {'id': 'SGP4', 'deprecated': False},
+    'shl-0.5': {'id': 'SHL-0.5', 'deprecated': False},
+    'shl-0.51': {'id': 'SHL-0.51', 'deprecated': False},
+    'simpl-2.0': {'id': 'SimPL-2.0', 'deprecated': False},
+    'sissl': {'id': 'SISSL', 'deprecated': False},
+    'sissl-1.2': {'id': 'SISSL-1.2', 'deprecated': False},
+    'sl': {'id': 'SL', 'deprecated': False},
+    'sleepycat': {'id': 'Sleepycat', 'deprecated': False},
+    'smlnj': {'id': 'SMLNJ', 'deprecated': False},
+    'smppl': {'id': 'SMPPL', 'deprecated': False},
+    'snia': {'id': 'SNIA', 'deprecated': False},
+    'snprintf': {'id': 'snprintf', 'deprecated': False},
+    'softsurfer': {'id': 'softSurfer', 'deprecated': False},
+    'soundex': {'id': 'Soundex', 'deprecated': False},
+    'spencer-86': {'id': 'Spencer-86', 'deprecated': False},
+    'spencer-94': {'id': 'Spencer-94', 'deprecated': False},
+    'spencer-99': {'id': 'Spencer-99', 'deprecated': False},
+    'spl-1.0': {'id': 'SPL-1.0', 'deprecated': False},
+    'ssh-keyscan': {'id': 'ssh-keyscan', 'deprecated': False},
+    'ssh-openssh': {'id': 'SSH-OpenSSH', 'deprecated': False},
+    'ssh-short': {'id': 'SSH-short', 'deprecated': False},
+    'ssleay-standalone': {'id': 'SSLeay-standalone', 'deprecated': False},
+    'sspl-1.0': {'id': 'SSPL-1.0', 'deprecated': False},
+    'standardml-nj': {'id': 'StandardML-NJ', 'deprecated': True},
+    'sugarcrm-1.1.3': {'id': 'SugarCRM-1.1.3', 'deprecated': False},
+    'sun-ppp': {'id': 'Sun-PPP', 'deprecated': False},
+    'sun-ppp-2000': {'id': 'Sun-PPP-2000', 'deprecated': False},
+    'sunpro': {'id': 'SunPro', 'deprecated': False},
+    'swl': {'id': 'SWL', 'deprecated': False},
+    'swrule': {'id': 'swrule', 'deprecated': False},
+    'symlinks': {'id': 'Symlinks', 'deprecated': False},
+    'tapr-ohl-1.0': {'id': 'TAPR-OHL-1.0', 'deprecated': False},
+    'tcl': {'id': 'TCL', 'deprecated': False},
+    'tcp-wrappers': {'id': 'TCP-wrappers', 'deprecated': False},
+    'termreadkey': {'id': 'TermReadKey', 'deprecated': False},
+    'tgppl-1.0': {'id': 'TGPPL-1.0', 'deprecated': False},
+    'threeparttable': {'id': 'threeparttable', 'deprecated': False},
+    'tmate': {'id': 'TMate', 'deprecated': False},
+    'torque-1.1': {'id': 'TORQUE-1.1', 'deprecated': False},
+    'tosl': {'id': 'TOSL', 'deprecated': False},
+    'tpdl': {'id': 'TPDL', 'deprecated': False},
+    'tpl-1.0': {'id': 'TPL-1.0', 'deprecated': False},
+    'ttwl': {'id': 'TTWL', 'deprecated': False},
+    'ttyp0': {'id': 'TTYP0', 'deprecated': False},
+    'tu-berlin-1.0': {'id': 'TU-Berlin-1.0', 'deprecated': False},
+    'tu-berlin-2.0': {'id': 'TU-Berlin-2.0', 'deprecated': False},
+    'ubuntu-font-1.0': {'id': 'Ubuntu-font-1.0', 'deprecated': False},
+    'ucar': {'id': 'UCAR', 'deprecated': False},
+    'ucl-1.0': {'id': 'UCL-1.0', 'deprecated': False},
+    'ulem': {'id': 'ulem', 'deprecated': False},
+    'umich-merit': {'id': 'UMich-Merit', 'deprecated': False},
+    'unicode-3.0': {'id': 'Unicode-3.0', 'deprecated': False},
+    'unicode-dfs-2015': {'id': 'Unicode-DFS-2015', 'deprecated': False},
+    'unicode-dfs-2016': {'id': 'Unicode-DFS-2016', 'deprecated': False},
+    'unicode-tou': {'id': 'Unicode-TOU', 'deprecated': False},
+    'unixcrypt': {'id': 'UnixCrypt', 'deprecated': False},
+    'unlicense': {'id': 'Unlicense', 'deprecated': False},
+    'upl-1.0': {'id': 'UPL-1.0', 'deprecated': False},
+    'urt-rle': {'id': 'URT-RLE', 'deprecated': False},
+    'vim': {'id': 'Vim', 'deprecated': False},
+    'vostrom': {'id': 'VOSTROM', 'deprecated': False},
+    'vsl-1.0': {'id': 'VSL-1.0', 'deprecated': False},
+    'w3c': {'id': 'W3C', 'deprecated': False},
+    'w3c-19980720': {'id': 'W3C-19980720', 'deprecated': False},
+    'w3c-20150513': {'id': 'W3C-20150513', 'deprecated': False},
+    'w3m': {'id': 'w3m', 'deprecated': False},
+    'watcom-1.0': {'id': 'Watcom-1.0', 'deprecated': False},
+    'widget-workshop': {'id': 'Widget-Workshop', 'deprecated': False},
+    'wsuipa': {'id': 'Wsuipa', 'deprecated': False},
+    'wtfpl': {'id': 'WTFPL', 'deprecated': False},
+    'wxwindows': {'id': 'wxWindows', 'deprecated': True},
+    'x11': {'id': 'X11', 'deprecated': False},
+    'x11-distribute-modifications-variant': {'id': 'X11-distribute-modifications-variant', 'deprecated': False},
+    'x11-swapped': {'id': 'X11-swapped', 'deprecated': False},
+    'xdebug-1.03': {'id': 'Xdebug-1.03', 'deprecated': False},
+    'xerox': {'id': 'Xerox', 'deprecated': False},
+    'xfig': {'id': 'Xfig', 'deprecated': False},
+    'xfree86-1.1': {'id': 'XFree86-1.1', 'deprecated': False},
+    'xinetd': {'id': 'xinetd', 'deprecated': False},
+    'xkeyboard-config-zinoviev': {'id': 'xkeyboard-config-Zinoviev', 'deprecated': False},
+    'xlock': {'id': 'xlock', 'deprecated': False},
+    'xnet': {'id': 'Xnet', 'deprecated': False},
+    'xpp': {'id': 'xpp', 'deprecated': False},
+    'xskat': {'id': 'XSkat', 'deprecated': False},
+    'xzoom': {'id': 'xzoom', 'deprecated': False},
+    'ypl-1.0': {'id': 'YPL-1.0', 'deprecated': False},
+    'ypl-1.1': {'id': 'YPL-1.1', 'deprecated': False},
+    'zed': {'id': 'Zed', 'deprecated': False},
+    'zeeff': {'id': 'Zeeff', 'deprecated': False},
+    'zend-2.0': {'id': 'Zend-2.0', 'deprecated': False},
+    'zimbra-1.3': {'id': 'Zimbra-1.3', 'deprecated': False},
+    'zimbra-1.4': {'id': 'Zimbra-1.4', 'deprecated': False},
+    'zlib': {'id': 'Zlib', 'deprecated': False},
+    'zlib-acknowledgement': {'id': 'zlib-acknowledgement', 'deprecated': False},
+    'zpl-1.1': {'id': 'ZPL-1.1', 'deprecated': False},
+    'zpl-2.0': {'id': 'ZPL-2.0', 'deprecated': False},
+    'zpl-2.1': {'id': 'ZPL-2.1', 'deprecated': False},
+}
+
+EXCEPTIONS: dict[str, SPDXException] = {
+    '389-exception': {'id': '389-exception', 'deprecated': False},
+    'asterisk-exception': {'id': 'Asterisk-exception', 'deprecated': False},
+    'asterisk-linking-protocols-exception': {'id': 'Asterisk-linking-protocols-exception', 'deprecated': False},
+    'autoconf-exception-2.0': {'id': 'Autoconf-exception-2.0', 'deprecated': False},
+    'autoconf-exception-3.0': {'id': 'Autoconf-exception-3.0', 'deprecated': False},
+    'autoconf-exception-generic': {'id': 'Autoconf-exception-generic', 'deprecated': False},
+    'autoconf-exception-generic-3.0': {'id': 'Autoconf-exception-generic-3.0', 'deprecated': False},
+    'autoconf-exception-macro': {'id': 'Autoconf-exception-macro', 'deprecated': False},
+    'bison-exception-1.24': {'id': 'Bison-exception-1.24', 'deprecated': False},
+    'bison-exception-2.2': {'id': 'Bison-exception-2.2', 'deprecated': False},
+    'bootloader-exception': {'id': 'Bootloader-exception', 'deprecated': False},
+    'classpath-exception-2.0': {'id': 'Classpath-exception-2.0', 'deprecated': False},
+    'clisp-exception-2.0': {'id': 'CLISP-exception-2.0', 'deprecated': False},
+    'cryptsetup-openssl-exception': {'id': 'cryptsetup-OpenSSL-exception', 'deprecated': False},
+    'digirule-foss-exception': {'id': 'DigiRule-FOSS-exception', 'deprecated': False},
+    'ecos-exception-2.0': {'id': 'eCos-exception-2.0', 'deprecated': False},
+    'erlang-otp-linking-exception': {'id': 'erlang-otp-linking-exception', 'deprecated': False},
+    'fawkes-runtime-exception': {'id': 'Fawkes-Runtime-exception', 'deprecated': False},
+    'fltk-exception': {'id': 'FLTK-exception', 'deprecated': False},
+    'fmt-exception': {'id': 'fmt-exception', 'deprecated': False},
+    'font-exception-2.0': {'id': 'Font-exception-2.0', 'deprecated': False},
+    'freertos-exception-2.0': {'id': 'freertos-exception-2.0', 'deprecated': False},
+    'gcc-exception-2.0': {'id': 'GCC-exception-2.0', 'deprecated': False},
+    'gcc-exception-2.0-note': {'id': 'GCC-exception-2.0-note', 'deprecated': False},
+    'gcc-exception-3.1': {'id': 'GCC-exception-3.1', 'deprecated': False},
+    'gmsh-exception': {'id': 'Gmsh-exception', 'deprecated': False},
+    'gnat-exception': {'id': 'GNAT-exception', 'deprecated': False},
+    'gnome-examples-exception': {'id': 'GNOME-examples-exception', 'deprecated': False},
+    'gnu-compiler-exception': {'id': 'GNU-compiler-exception', 'deprecated': False},
+    'gnu-javamail-exception': {'id': 'gnu-javamail-exception', 'deprecated': False},
+    'gpl-3.0-interface-exception': {'id': 'GPL-3.0-interface-exception', 'deprecated': False},
+    'gpl-3.0-linking-exception': {'id': 'GPL-3.0-linking-exception', 'deprecated': False},
+    'gpl-3.0-linking-source-exception': {'id': 'GPL-3.0-linking-source-exception', 'deprecated': False},
+    'gpl-cc-1.0': {'id': 'GPL-CC-1.0', 'deprecated': False},
+    'gstreamer-exception-2005': {'id': 'GStreamer-exception-2005', 'deprecated': False},
+    'gstreamer-exception-2008': {'id': 'GStreamer-exception-2008', 'deprecated': False},
+    'i2p-gpl-java-exception': {'id': 'i2p-gpl-java-exception', 'deprecated': False},
+    'kicad-libraries-exception': {'id': 'KiCad-libraries-exception', 'deprecated': False},
+    'lgpl-3.0-linking-exception': {'id': 'LGPL-3.0-linking-exception', 'deprecated': False},
+    'libpri-openh323-exception': {'id': 'libpri-OpenH323-exception', 'deprecated': False},
+    'libtool-exception': {'id': 'Libtool-exception', 'deprecated': False},
+    'linux-syscall-note': {'id': 'Linux-syscall-note', 'deprecated': False},
+    'llgpl': {'id': 'LLGPL', 'deprecated': False},
+    'llvm-exception': {'id': 'LLVM-exception', 'deprecated': False},
+    'lzma-exception': {'id': 'LZMA-exception', 'deprecated': False},
+    'mif-exception': {'id': 'mif-exception', 'deprecated': False},
+    'nokia-qt-exception-1.1': {'id': 'Nokia-Qt-exception-1.1', 'deprecated': True},
+    'ocaml-lgpl-linking-exception': {'id': 'OCaml-LGPL-linking-exception', 'deprecated': False},
+    'occt-exception-1.0': {'id': 'OCCT-exception-1.0', 'deprecated': False},
+    'openjdk-assembly-exception-1.0': {'id': 'OpenJDK-assembly-exception-1.0', 'deprecated': False},
+    'openvpn-openssl-exception': {'id': 'openvpn-openssl-exception', 'deprecated': False},
+    'pcre2-exception': {'id': 'PCRE2-exception', 'deprecated': False},
+    'ps-or-pdf-font-exception-20170817': {'id': 'PS-or-PDF-font-exception-20170817', 'deprecated': False},
+    'qpl-1.0-inria-2004-exception': {'id': 'QPL-1.0-INRIA-2004-exception', 'deprecated': False},
+    'qt-gpl-exception-1.0': {'id': 'Qt-GPL-exception-1.0', 'deprecated': False},
+    'qt-lgpl-exception-1.1': {'id': 'Qt-LGPL-exception-1.1', 'deprecated': False},
+    'qwt-exception-1.0': {'id': 'Qwt-exception-1.0', 'deprecated': False},
+    'romic-exception': {'id': 'romic-exception', 'deprecated': False},
+    'rrdtool-floss-exception-2.0': {'id': 'RRDtool-FLOSS-exception-2.0', 'deprecated': False},
+    'sane-exception': {'id': 'SANE-exception', 'deprecated': False},
+    'shl-2.0': {'id': 'SHL-2.0', 'deprecated': False},
+    'shl-2.1': {'id': 'SHL-2.1', 'deprecated': False},
+    'stunnel-exception': {'id': 'stunnel-exception', 'deprecated': False},
+    'swi-exception': {'id': 'SWI-exception', 'deprecated': False},
+    'swift-exception': {'id': 'Swift-exception', 'deprecated': False},
+    'texinfo-exception': {'id': 'Texinfo-exception', 'deprecated': False},
+    'u-boot-exception-2.0': {'id': 'u-boot-exception-2.0', 'deprecated': False},
+    'ubdl-exception': {'id': 'UBDL-exception', 'deprecated': False},
+    'universal-foss-exception-1.0': {'id': 'Universal-FOSS-exception-1.0', 'deprecated': False},
+    'vsftpd-openssl-exception': {'id': 'vsftpd-openssl-exception', 'deprecated': False},
+    'wxwindows-exception-3.1': {'id': 'WxWindows-exception-3.1', 'deprecated': False},
+    'x11vnc-openssl-exception': {'id': 'x11vnc-openssl-exception', 'deprecated': False},
+}
diff --git a/setuptools/_vendor/packaging/markers.py b/setuptools/_vendor/packaging/markers.py
index 7ac7bb69a5..fb7f49cf8c 100644
--- a/setuptools/_vendor/packaging/markers.py
+++ b/setuptools/_vendor/packaging/markers.py
@@ -18,9 +18,9 @@
 
 __all__ = [
     "InvalidMarker",
+    "Marker",
     "UndefinedComparison",
     "UndefinedEnvironmentName",
-    "Marker",
     "default_environment",
 ]
 
@@ -232,7 +232,7 @@ def _evaluate_markers(markers: MarkerList, environment: dict[str, str]) -> bool:
 
 
 def format_full_version(info: sys._version_info) -> str:
-    version = "{0.major}.{0.minor}.{0.micro}".format(info)
+    version = f"{info.major}.{info.minor}.{info.micro}"
     kind = info.releaselevel
     if kind != "final":
         version += kind[0] + str(info.serial)
@@ -309,12 +309,6 @@ def evaluate(self, environment: dict[str, str] | None = None) -> bool:
         """
         current_environment = cast("dict[str, str]", default_environment())
         current_environment["extra"] = ""
-        # Work around platform.python_version() returning something that is not PEP 440
-        # compliant for non-tagged Python builds. We preserve default_environment()'s
-        # behavior of returning platform.python_version() verbatim, and leave it to the
-        # caller to provide a syntactically valid version if they want to override it.
-        if current_environment["python_full_version"].endswith("+"):
-            current_environment["python_full_version"] += "local"
         if environment is not None:
             current_environment.update(environment)
             # The API used to allow setting extra to None. We need to handle this
@@ -322,4 +316,16 @@ def evaluate(self, environment: dict[str, str] | None = None) -> bool:
             if current_environment["extra"] is None:
                 current_environment["extra"] = ""
 
-        return _evaluate_markers(self._markers, current_environment)
+        return _evaluate_markers(
+            self._markers, _repair_python_full_version(current_environment)
+        )
+
+
+def _repair_python_full_version(env: dict[str, str]) -> dict[str, str]:
+    """
+    Work around platform.python_version() returning something that is not PEP 440
+    compliant for non-tagged Python builds.
+    """
+    if env["python_full_version"].endswith("+"):
+        env["python_full_version"] += "local"
+    return env
diff --git a/setuptools/_vendor/packaging/metadata.py b/setuptools/_vendor/packaging/metadata.py
index eb8dc844d2..721f411cfc 100644
--- a/setuptools/_vendor/packaging/metadata.py
+++ b/setuptools/_vendor/packaging/metadata.py
@@ -5,6 +5,8 @@
 import email.message
 import email.parser
 import email.policy
+import pathlib
+import sys
 import typing
 from typing import (
     Any,
@@ -15,15 +17,16 @@
     cast,
 )
 
-from . import requirements, specifiers, utils
+from . import licenses, requirements, specifiers, utils
 from . import version as version_module
+from .licenses import NormalizedLicenseExpression
 
 T = typing.TypeVar("T")
 
 
-try:
-    ExceptionGroup
-except NameError:  # pragma: no cover
+if sys.version_info >= (3, 11):  # pragma: no cover
+    ExceptionGroup = ExceptionGroup
+else:  # pragma: no cover
 
     class ExceptionGroup(Exception):
         """A minimal implementation of :external:exc:`ExceptionGroup` from Python 3.11.
@@ -42,9 +45,6 @@ def __init__(self, message: str, exceptions: list[Exception]) -> None:
         def __repr__(self) -> str:
             return f"{self.__class__.__name__}({self.message!r}, {self.exceptions!r})"
 
-else:  # pragma: no cover
-    ExceptionGroup = ExceptionGroup
-
 
 class InvalidMetadata(ValueError):
     """A metadata field contains invalid data."""
@@ -128,6 +128,10 @@ class RawMetadata(TypedDict, total=False):
     # No new fields were added in PEP 685, just some edge case were
     # tightened up to provide better interoptability.
 
+    # Metadata 2.4 - PEP 639
+    license_expression: str
+    license_files: list[str]
+
 
 _STRING_FIELDS = {
     "author",
@@ -137,6 +141,7 @@ class RawMetadata(TypedDict, total=False):
     "download_url",
     "home_page",
     "license",
+    "license_expression",
     "maintainer",
     "maintainer_email",
     "metadata_version",
@@ -149,6 +154,7 @@ class RawMetadata(TypedDict, total=False):
 _LIST_FIELDS = {
     "classifiers",
     "dynamic",
+    "license_files",
     "obsoletes",
     "obsoletes_dist",
     "platforms",
@@ -167,7 +173,7 @@ class RawMetadata(TypedDict, total=False):
 
 
 def _parse_keywords(data: str) -> list[str]:
-    """Split a string of comma-separate keyboards into a list of keywords."""
+    """Split a string of comma-separated keywords into a list of keywords."""
     return [k.strip() for k in data.split(",")]
 
 
@@ -216,16 +222,18 @@ def _get_payload(msg: email.message.Message, source: bytes | str) -> str:
     # If our source is a str, then our caller has managed encodings for us,
     # and we don't need to deal with it.
     if isinstance(source, str):
-        payload: str = msg.get_payload()
+        payload = msg.get_payload()
+        assert isinstance(payload, str)
         return payload
     # If our source is a bytes, then we're managing the encoding and we need
     # to deal with it.
     else:
-        bpayload: bytes = msg.get_payload(decode=True)
+        bpayload = msg.get_payload(decode=True)
+        assert isinstance(bpayload, bytes)
         try:
             return bpayload.decode("utf8", "strict")
-        except UnicodeDecodeError:
-            raise ValueError("payload in an invalid encoding")
+        except UnicodeDecodeError as exc:
+            raise ValueError("payload in an invalid encoding") from exc
 
 
 # The various parse_FORMAT functions here are intended to be as lenient as
@@ -251,6 +259,8 @@ def _get_payload(msg: email.message.Message, source: bytes | str) -> str:
     "home-page": "home_page",
     "keywords": "keywords",
     "license": "license",
+    "license-expression": "license_expression",
+    "license-file": "license_files",
     "maintainer": "maintainer",
     "maintainer-email": "maintainer_email",
     "metadata-version": "metadata_version",
@@ -426,7 +436,7 @@ def parse_email(data: bytes | str) -> tuple[RawMetadata, dict[str, list[str]]]:
         payload = _get_payload(parsed, data)
     except ValueError:
         unparsed.setdefault("description", []).append(
-            parsed.get_payload(decode=isinstance(data, bytes))
+            parsed.get_payload(decode=isinstance(data, bytes))  # type: ignore[call-overload]
         )
     else:
         if payload:
@@ -453,8 +463,8 @@ def parse_email(data: bytes | str) -> tuple[RawMetadata, dict[str, list[str]]]:
 
 
 # Keep the two values in sync.
-_VALID_METADATA_VERSIONS = ["1.0", "1.1", "1.2", "2.1", "2.2", "2.3"]
-_MetadataVersion = Literal["1.0", "1.1", "1.2", "2.1", "2.2", "2.3"]
+_VALID_METADATA_VERSIONS = ["1.0", "1.1", "1.2", "2.1", "2.2", "2.3", "2.4"]
+_MetadataVersion = Literal["1.0", "1.1", "1.2", "2.1", "2.2", "2.3", "2.4"]
 
 _REQUIRED_ATTRS = frozenset(["metadata_version", "name", "version"])
 
@@ -535,7 +545,7 @@ def _process_name(self, value: str) -> str:
         except utils.InvalidName as exc:
             raise self._invalid_metadata(
                 f"{value!r} is invalid for {{field}}", cause=exc
-            )
+            ) from exc
         else:
             return value
 
@@ -547,7 +557,7 @@ def _process_version(self, value: str) -> version_module.Version:
         except version_module.InvalidVersion as exc:
             raise self._invalid_metadata(
                 f"{value!r} is invalid for {{field}}", cause=exc
-            )
+            ) from exc
 
     def _process_summary(self, value: str) -> str:
         """Check the field contains no newlines."""
@@ -591,10 +601,12 @@ def _process_dynamic(self, value: list[str]) -> list[str]:
         for dynamic_field in map(str.lower, value):
             if dynamic_field in {"name", "version", "metadata-version"}:
                 raise self._invalid_metadata(
-                    f"{value!r} is not allowed as a dynamic field"
+                    f"{dynamic_field!r} is not allowed as a dynamic field"
                 )
             elif dynamic_field not in _EMAIL_TO_RAW_MAPPING:
-                raise self._invalid_metadata(f"{value!r} is not a valid dynamic field")
+                raise self._invalid_metadata(
+                    f"{dynamic_field!r} is not a valid dynamic field"
+                )
         return list(map(str.lower, value))
 
     def _process_provides_extra(
@@ -608,7 +620,7 @@ def _process_provides_extra(
         except utils.InvalidName as exc:
             raise self._invalid_metadata(
                 f"{name!r} is invalid for {{field}}", cause=exc
-            )
+            ) from exc
         else:
             return normalized_names
 
@@ -618,7 +630,7 @@ def _process_requires_python(self, value: str) -> specifiers.SpecifierSet:
         except specifiers.InvalidSpecifier as exc:
             raise self._invalid_metadata(
                 f"{value!r} is invalid for {{field}}", cause=exc
-            )
+            ) from exc
 
     def _process_requires_dist(
         self,
@@ -629,10 +641,49 @@ def _process_requires_dist(
             for req in value:
                 reqs.append(requirements.Requirement(req))
         except requirements.InvalidRequirement as exc:
-            raise self._invalid_metadata(f"{req!r} is invalid for {{field}}", cause=exc)
+            raise self._invalid_metadata(
+                f"{req!r} is invalid for {{field}}", cause=exc
+            ) from exc
         else:
             return reqs
 
+    def _process_license_expression(
+        self, value: str
+    ) -> NormalizedLicenseExpression | None:
+        try:
+            return licenses.canonicalize_license_expression(value)
+        except ValueError as exc:
+            raise self._invalid_metadata(
+                f"{value!r} is invalid for {{field}}", cause=exc
+            ) from exc
+
+    def _process_license_files(self, value: list[str]) -> list[str]:
+        paths = []
+        for path in value:
+            if ".." in path:
+                raise self._invalid_metadata(
+                    f"{path!r} is invalid for {{field}}, "
+                    "parent directory indicators are not allowed"
+                )
+            if "*" in path:
+                raise self._invalid_metadata(
+                    f"{path!r} is invalid for {{field}}, paths must be resolved"
+                )
+            if (
+                pathlib.PurePosixPath(path).is_absolute()
+                or pathlib.PureWindowsPath(path).is_absolute()
+            ):
+                raise self._invalid_metadata(
+                    f"{path!r} is invalid for {{field}}, paths must be relative"
+                )
+            if pathlib.PureWindowsPath(path).as_posix() != path:
+                raise self._invalid_metadata(
+                    f"{path!r} is invalid for {{field}}, "
+                    "paths must use '/' delimiter"
+                )
+            paths.append(path)
+        return paths
+
 
 class Metadata:
     """Representation of distribution metadata.
@@ -688,8 +739,8 @@ def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> Metadata:
                             field = _RAW_TO_EMAIL_MAPPING[key]
                             exc = InvalidMetadata(
                                 field,
-                                "{field} introduced in metadata version "
-                                "{field_metadata_version}, not {metadata_version}",
+                                f"{field} introduced in metadata version "
+                                f"{field_metadata_version}, not {metadata_version}",
                             )
                             exceptions.append(exc)
                             continue
@@ -733,6 +784,8 @@ def from_email(cls, data: bytes | str, *, validate: bool = True) -> Metadata:
     metadata_version: _Validator[_MetadataVersion] = _Validator()
     """:external:ref:`core-metadata-metadata-version`
     (required; validated to be a valid metadata version)"""
+    # `name` is not normalized/typed to NormalizedName so as to provide access to
+    # the original/raw name.
     name: _Validator[str] = _Validator()
     """:external:ref:`core-metadata-name`
     (required; validated using :func:`~packaging.utils.canonicalize_name` and its
@@ -770,6 +823,12 @@ def from_email(cls, data: bytes | str, *, validate: bool = True) -> Metadata:
     """:external:ref:`core-metadata-maintainer-email`"""
     license: _Validator[str | None] = _Validator()
     """:external:ref:`core-metadata-license`"""
+    license_expression: _Validator[NormalizedLicenseExpression | None] = _Validator(
+        added="2.4"
+    )
+    """:external:ref:`core-metadata-license-expression`"""
+    license_files: _Validator[list[str] | None] = _Validator(added="2.4")
+    """:external:ref:`core-metadata-license-file`"""
     classifiers: _Validator[list[str] | None] = _Validator(added="1.1")
     """:external:ref:`core-metadata-classifier`"""
     requires_dist: _Validator[list[requirements.Requirement] | None] = _Validator(
diff --git a/setuptools/_vendor/packaging/specifiers.py b/setuptools/_vendor/packaging/specifiers.py
index 2fa75f7abb..b30926af8b 100644
--- a/setuptools/_vendor/packaging/specifiers.py
+++ b/setuptools/_vendor/packaging/specifiers.py
@@ -234,7 +234,7 @@ def __init__(self, spec: str = "", prereleases: bool | None = None) -> None:
         """
         match = self._regex.search(spec)
         if not match:
-            raise InvalidSpecifier(f"Invalid specifier: '{spec}'")
+            raise InvalidSpecifier(f"Invalid specifier: {spec!r}")
 
         self._spec: tuple[str, str] = (
             match.group("operator").strip(),
@@ -256,7 +256,7 @@ def prereleases(self) -> bool:
         # operators, and if they are if they are including an explicit
         # prerelease.
         operator, version = self._spec
-        if operator in ["==", ">=", "<=", "~=", "==="]:
+        if operator in ["==", ">=", "<=", "~=", "===", ">", "<"]:
             # The == specifier can include a trailing .*, if it does we
             # want to remove before parsing.
             if operator == "==" and version.endswith(".*"):
@@ -694,12 +694,18 @@ class SpecifierSet(BaseSpecifier):
     specifiers (``>=3.0,!=3.1``), or no specifier at all.
     """
 
-    def __init__(self, specifiers: str = "", prereleases: bool | None = None) -> None:
+    def __init__(
+        self,
+        specifiers: str | Iterable[Specifier] = "",
+        prereleases: bool | None = None,
+    ) -> None:
         """Initialize a SpecifierSet instance.
 
         :param specifiers:
             The string representation of a specifier or a comma-separated list of
             specifiers which will be parsed and normalized before use.
+            May also be an iterable of ``Specifier`` instances, which will be used
+            as is.
         :param prereleases:
             This tells the SpecifierSet if it should accept prerelease versions if
             applicable or not. The default of ``None`` will autodetect it from the
@@ -710,12 +716,17 @@ def __init__(self, specifiers: str = "", prereleases: bool | None = None) -> Non
             raised.
         """
 
-        # Split on `,` to break each individual specifier into it's own item, and
-        # strip each item to remove leading/trailing whitespace.
-        split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
+        if isinstance(specifiers, str):
+            # Split on `,` to break each individual specifier into its own item, and
+            # strip each item to remove leading/trailing whitespace.
+            split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
 
-        # Make each individual specifier a Specifier and save in a frozen set for later.
-        self._specs = frozenset(map(Specifier, split_specifiers))
+            # Make each individual specifier a Specifier and save in a frozen set
+            # for later.
+            self._specs = frozenset(map(Specifier, split_specifiers))
+        else:
+            # Save the supplied specifiers in a frozen set.
+            self._specs = frozenset(specifiers)
 
         # Store our prereleases value so we can use it later to determine if
         # we accept prereleases or not.
diff --git a/setuptools/_vendor/packaging/tags.py b/setuptools/_vendor/packaging/tags.py
index 6667d29908..f5903402ab 100644
--- a/setuptools/_vendor/packaging/tags.py
+++ b/setuptools/_vendor/packaging/tags.py
@@ -25,7 +25,7 @@
 logger = logging.getLogger(__name__)
 
 PythonVersion = Sequence[int]
-MacVersion = Tuple[int, int]
+AppleVersion = Tuple[int, int]
 
 INTERPRETER_SHORT_NAMES: dict[str, str] = {
     "python": "py",  # Generic.
@@ -47,7 +47,7 @@ class Tag:
     is also supported.
     """
 
-    __slots__ = ["_interpreter", "_abi", "_platform", "_hash"]
+    __slots__ = ["_abi", "_hash", "_interpreter", "_platform"]
 
     def __init__(self, interpreter: str, abi: str, platform: str) -> None:
         self._interpreter = interpreter.lower()
@@ -235,9 +235,8 @@ def cpython_tags(
     if use_abi3:
         for minor_version in range(python_version[1] - 1, 1, -1):
             for platform_ in platforms:
-                interpreter = "cp{version}".format(
-                    version=_version_nodot((python_version[0], minor_version))
-                )
+                version = _version_nodot((python_version[0], minor_version))
+                interpreter = f"cp{version}"
                 yield Tag(interpreter, "abi3", platform_)
 
 
@@ -363,7 +362,7 @@ def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str:
     return "i386"
 
 
-def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> list[str]:
+def _mac_binary_formats(version: AppleVersion, cpu_arch: str) -> list[str]:
     formats = [cpu_arch]
     if cpu_arch == "x86_64":
         if version < (10, 4):
@@ -396,7 +395,7 @@ def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> list[str]:
 
 
 def mac_platforms(
-    version: MacVersion | None = None, arch: str | None = None
+    version: AppleVersion | None = None, arch: str | None = None
 ) -> Iterator[str]:
     """
     Yields the platform tags for a macOS system.
@@ -408,7 +407,7 @@ def mac_platforms(
     """
     version_str, _, cpu_arch = platform.mac_ver()
     if version is None:
-        version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
+        version = cast("AppleVersion", tuple(map(int, version_str.split(".")[:2])))
         if version == (10, 16):
             # When built against an older macOS SDK, Python will report macOS 10.16
             # instead of the real version.
@@ -424,7 +423,7 @@ def mac_platforms(
                 stdout=subprocess.PIPE,
                 text=True,
             ).stdout
-            version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
+            version = cast("AppleVersion", tuple(map(int, version_str.split(".")[:2])))
     else:
         version = version
     if arch is None:
@@ -435,24 +434,22 @@ def mac_platforms(
     if (10, 0) <= version and version < (11, 0):
         # Prior to Mac OS 11, each yearly release of Mac OS bumped the
         # "minor" version number.  The major version was always 10.
+        major_version = 10
         for minor_version in range(version[1], -1, -1):
-            compat_version = 10, minor_version
+            compat_version = major_version, minor_version
             binary_formats = _mac_binary_formats(compat_version, arch)
             for binary_format in binary_formats:
-                yield "macosx_{major}_{minor}_{binary_format}".format(
-                    major=10, minor=minor_version, binary_format=binary_format
-                )
+                yield f"macosx_{major_version}_{minor_version}_{binary_format}"
 
     if version >= (11, 0):
         # Starting with Mac OS 11, each yearly release bumps the major version
         # number.   The minor versions are now the midyear updates.
+        minor_version = 0
         for major_version in range(version[0], 10, -1):
-            compat_version = major_version, 0
+            compat_version = major_version, minor_version
             binary_formats = _mac_binary_formats(compat_version, arch)
             for binary_format in binary_formats:
-                yield "macosx_{major}_{minor}_{binary_format}".format(
-                    major=major_version, minor=0, binary_format=binary_format
-                )
+                yield f"macosx_{major_version}_{minor_version}_{binary_format}"
 
     if version >= (11, 0):
         # Mac OS 11 on x86_64 is compatible with binaries from previous releases.
@@ -462,25 +459,75 @@ def mac_platforms(
         # However, the "universal2" binary format can have a
         # macOS version earlier than 11.0 when the x86_64 part of the binary supports
         # that version of macOS.
+        major_version = 10
         if arch == "x86_64":
             for minor_version in range(16, 3, -1):
-                compat_version = 10, minor_version
+                compat_version = major_version, minor_version
                 binary_formats = _mac_binary_formats(compat_version, arch)
                 for binary_format in binary_formats:
-                    yield "macosx_{major}_{minor}_{binary_format}".format(
-                        major=compat_version[0],
-                        minor=compat_version[1],
-                        binary_format=binary_format,
-                    )
+                    yield f"macosx_{major_version}_{minor_version}_{binary_format}"
         else:
             for minor_version in range(16, 3, -1):
-                compat_version = 10, minor_version
+                compat_version = major_version, minor_version
                 binary_format = "universal2"
-                yield "macosx_{major}_{minor}_{binary_format}".format(
-                    major=compat_version[0],
-                    minor=compat_version[1],
-                    binary_format=binary_format,
-                )
+                yield f"macosx_{major_version}_{minor_version}_{binary_format}"
+
+
+def ios_platforms(
+    version: AppleVersion | None = None, multiarch: str | None = None
+) -> Iterator[str]:
+    """
+    Yields the platform tags for an iOS system.
+
+    :param version: A two-item tuple specifying the iOS version to generate
+        platform tags for. Defaults to the current iOS version.
+    :param multiarch: The CPU architecture+ABI to generate platform tags for -
+        (the value used by `sys.implementation._multiarch` e.g.,
+        `arm64_iphoneos` or `x84_64_iphonesimulator`). Defaults to the current
+        multiarch value.
+    """
+    if version is None:
+        # if iOS is the current platform, ios_ver *must* be defined. However,
+        # it won't exist for CPython versions before 3.13, which causes a mypy
+        # error.
+        _, release, _, _ = platform.ios_ver()  # type: ignore[attr-defined, unused-ignore]
+        version = cast("AppleVersion", tuple(map(int, release.split(".")[:2])))
+
+    if multiarch is None:
+        multiarch = sys.implementation._multiarch
+    multiarch = multiarch.replace("-", "_")
+
+    ios_platform_template = "ios_{major}_{minor}_{multiarch}"
+
+    # Consider any iOS major.minor version from the version requested, down to
+    # 12.0. 12.0 is the first iOS version that is known to have enough features
+    # to support CPython. Consider every possible minor release up to X.9. There
+    # highest the minor has ever gone is 8 (14.8 and 15.8) but having some extra
+    # candidates that won't ever match doesn't really hurt, and it saves us from
+    # having to keep an explicit list of known iOS versions in the code. Return
+    # the results descending order of version number.
+
+    # If the requested major version is less than 12, there won't be any matches.
+    if version[0] < 12:
+        return
+
+    # Consider the actual X.Y version that was requested.
+    yield ios_platform_template.format(
+        major=version[0], minor=version[1], multiarch=multiarch
+    )
+
+    # Consider every minor version from X.0 to the minor version prior to the
+    # version requested by the platform.
+    for minor in range(version[1] - 1, -1, -1):
+        yield ios_platform_template.format(
+            major=version[0], minor=minor, multiarch=multiarch
+        )
+
+    for major in range(version[0] - 1, 11, -1):
+        for minor in range(9, -1, -1):
+            yield ios_platform_template.format(
+                major=major, minor=minor, multiarch=multiarch
+            )
 
 
 def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]:
@@ -512,6 +559,8 @@ def platform_tags() -> Iterator[str]:
     """
     if platform.system() == "Darwin":
         return mac_platforms()
+    elif platform.system() == "iOS":
+        return ios_platforms()
     elif platform.system() == "Linux":
         return _linux_platforms()
     else:
diff --git a/setuptools/_vendor/packaging/utils.py b/setuptools/_vendor/packaging/utils.py
index d33da5bb8b..23450953df 100644
--- a/setuptools/_vendor/packaging/utils.py
+++ b/setuptools/_vendor/packaging/utils.py
@@ -4,11 +4,12 @@
 
 from __future__ import annotations
 
+import functools
 import re
 from typing import NewType, Tuple, Union, cast
 
 from .tags import Tag, parse_tag
-from .version import InvalidVersion, Version
+from .version import InvalidVersion, Version, _TrimmedRelease
 
 BuildTag = Union[Tuple[()], Tuple[int, str]]
 NormalizedName = NewType("NormalizedName", str)
@@ -54,52 +55,40 @@ def is_normalized_name(name: str) -> bool:
     return _normalized_regex.match(name) is not None
 
 
+@functools.singledispatch
 def canonicalize_version(
     version: Version | str, *, strip_trailing_zero: bool = True
 ) -> str:
     """
-    This is very similar to Version.__str__, but has one subtle difference
-    with the way it handles the release segment.
-    """
-    if isinstance(version, str):
-        try:
-            parsed = Version(version)
-        except InvalidVersion:
-            # Legacy versions cannot be normalized
-            return version
-    else:
-        parsed = version
-
-    parts = []
+    Return a canonical form of a version as a string.
 
-    # Epoch
-    if parsed.epoch != 0:
-        parts.append(f"{parsed.epoch}!")
+    >>> canonicalize_version('1.0.1')
+    '1.0.1'
 
-    # Release segment
-    release_segment = ".".join(str(x) for x in parsed.release)
-    if strip_trailing_zero:
-        # NB: This strips trailing '.0's to normalize
-        release_segment = re.sub(r"(\.0)+$", "", release_segment)
-    parts.append(release_segment)
+    Per PEP 625, versions may have multiple canonical forms, differing
+    only by trailing zeros.
 
-    # Pre-release
-    if parsed.pre is not None:
-        parts.append("".join(str(x) for x in parsed.pre))
+    >>> canonicalize_version('1.0.0')
+    '1'
+    >>> canonicalize_version('1.0.0', strip_trailing_zero=False)
+    '1.0.0'
 
-    # Post-release
-    if parsed.post is not None:
-        parts.append(f".post{parsed.post}")
+    Invalid versions are returned unaltered.
 
-    # Development release
-    if parsed.dev is not None:
-        parts.append(f".dev{parsed.dev}")
+    >>> canonicalize_version('foo bar baz')
+    'foo bar baz'
+    """
+    return str(_TrimmedRelease(str(version)) if strip_trailing_zero else version)
 
-    # Local version segment
-    if parsed.local is not None:
-        parts.append(f"+{parsed.local}")
 
-    return "".join(parts)
+@canonicalize_version.register
+def _(version: str, *, strip_trailing_zero: bool = True) -> str:
+    try:
+        parsed = Version(version)
+    except InvalidVersion:
+        # Legacy versions cannot be normalized
+        return version
+    return canonicalize_version(parsed, strip_trailing_zero=strip_trailing_zero)
 
 
 def parse_wheel_filename(
@@ -107,28 +96,28 @@ def parse_wheel_filename(
 ) -> tuple[NormalizedName, Version, BuildTag, frozenset[Tag]]:
     if not filename.endswith(".whl"):
         raise InvalidWheelFilename(
-            f"Invalid wheel filename (extension must be '.whl'): {filename}"
+            f"Invalid wheel filename (extension must be '.whl'): {filename!r}"
         )
 
     filename = filename[:-4]
     dashes = filename.count("-")
     if dashes not in (4, 5):
         raise InvalidWheelFilename(
-            f"Invalid wheel filename (wrong number of parts): {filename}"
+            f"Invalid wheel filename (wrong number of parts): {filename!r}"
         )
 
     parts = filename.split("-", dashes - 2)
     name_part = parts[0]
     # See PEP 427 for the rules on escaping the project name.
     if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None:
-        raise InvalidWheelFilename(f"Invalid project name: {filename}")
+        raise InvalidWheelFilename(f"Invalid project name: {filename!r}")
     name = canonicalize_name(name_part)
 
     try:
         version = Version(parts[1])
     except InvalidVersion as e:
         raise InvalidWheelFilename(
-            f"Invalid wheel filename (invalid version): {filename}"
+            f"Invalid wheel filename (invalid version): {filename!r}"
         ) from e
 
     if dashes == 5:
@@ -136,7 +125,7 @@ def parse_wheel_filename(
         build_match = _build_tag_regex.match(build_part)
         if build_match is None:
             raise InvalidWheelFilename(
-                f"Invalid build number: {build_part} in '{filename}'"
+                f"Invalid build number: {build_part} in {filename!r}"
             )
         build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2)))
     else:
@@ -153,14 +142,14 @@ def parse_sdist_filename(filename: str) -> tuple[NormalizedName, Version]:
     else:
         raise InvalidSdistFilename(
             f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):"
-            f" {filename}"
+            f" {filename!r}"
         )
 
     # We are requiring a PEP 440 version, which cannot contain dashes,
     # so we split on the last dash.
     name_part, sep, version_part = file_stem.rpartition("-")
     if not sep:
-        raise InvalidSdistFilename(f"Invalid sdist filename: {filename}")
+        raise InvalidSdistFilename(f"Invalid sdist filename: {filename!r}")
 
     name = canonicalize_name(name_part)
 
@@ -168,7 +157,7 @@ def parse_sdist_filename(filename: str) -> tuple[NormalizedName, Version]:
         version = Version(version_part)
     except InvalidVersion as e:
         raise InvalidSdistFilename(
-            f"Invalid sdist filename (invalid version): {filename}"
+            f"Invalid sdist filename (invalid version): {filename!r}"
         ) from e
 
     return (name, version)
diff --git a/setuptools/_vendor/packaging/version.py b/setuptools/_vendor/packaging/version.py
index 46bc261308..c9bbda20e4 100644
--- a/setuptools/_vendor/packaging/version.py
+++ b/setuptools/_vendor/packaging/version.py
@@ -15,7 +15,7 @@
 
 from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType
 
-__all__ = ["VERSION_PATTERN", "parse", "Version", "InvalidVersion"]
+__all__ = ["VERSION_PATTERN", "InvalidVersion", "Version", "parse"]
 
 LocalType = Tuple[Union[int, str], ...]
 
@@ -199,7 +199,7 @@ def __init__(self, version: str) -> None:
         # Validate the version and parse it into pieces
         match = self._regex.search(version)
         if not match:
-            raise InvalidVersion(f"Invalid version: '{version}'")
+            raise InvalidVersion(f"Invalid version: {version!r}")
 
         # Store the parsed out pieces of the version
         self._version = _Version(
@@ -232,7 +232,7 @@ def __repr__(self) -> str:
         return f""
 
     def __str__(self) -> str:
-        """A string representation of the version that can be rounded-tripped.
+        """A string representation of the version that can be round-tripped.
 
         >>> str(Version("1.0a5"))
         '1.0a5'
@@ -350,8 +350,8 @@ def public(self) -> str:
         '1.2.3'
         >>> Version("1.2.3+abc").public
         '1.2.3'
-        >>> Version("1.2.3+abc.dev1").public
-        '1.2.3'
+        >>> Version("1!1.2.3dev1+abc").public
+        '1!1.2.3.dev1'
         """
         return str(self).split("+", 1)[0]
 
@@ -363,7 +363,7 @@ def base_version(self) -> str:
         '1.2.3'
         >>> Version("1.2.3+abc").base_version
         '1.2.3'
-        >>> Version("1!1.2.3+abc.dev1").base_version
+        >>> Version("1!1.2.3dev1+abc").base_version
         '1!1.2.3'
 
         The "base version" is the public version of the project without any pre or post
@@ -451,6 +451,23 @@ def micro(self) -> int:
         return self.release[2] if len(self.release) >= 3 else 0
 
 
+class _TrimmedRelease(Version):
+    @property
+    def release(self) -> tuple[int, ...]:
+        """
+        Release segment without any trailing zeros.
+
+        >>> _TrimmedRelease('1.0.0').release
+        (1,)
+        >>> _TrimmedRelease('0.0').release
+        (0,)
+        """
+        rel = super().release
+        nonzeros = (index for index, val in enumerate(rel) if val)
+        last_nonzero = max(nonzeros, default=0)
+        return rel[: last_nonzero + 1]
+
+
 def _parse_letter_version(
     letter: str | None, number: str | bytes | SupportsInt | None
 ) -> tuple[str, int] | None:
@@ -476,7 +493,9 @@ def _parse_letter_version(
             letter = "post"
 
         return letter, int(number)
-    if not letter and number:
+
+    assert not letter
+    if number:
         # We assume if we are given a number, but we are not given a letter
         # then this is using the implicit post release syntax (e.g. 1.0-1)
         letter = "post"

From 061d93f0f58b0b2273e9c55dc9877d8ee9a24db8 Mon Sep 17 00:00:00 2001
From: Marc Mueller <30130371+cdce8p@users.noreply.github.com>
Date: Fri, 8 Nov 2024 15:24:48 +0100
Subject: [PATCH 1286/1761] Fix test

---
 setuptools/tests/test_dist.py | 6 +++++-
 1 file changed, 5 insertions(+), 1 deletion(-)

diff --git a/setuptools/tests/test_dist.py b/setuptools/tests/test_dist.py
index be953079f5..7b8cb91469 100644
--- a/setuptools/tests/test_dist.py
+++ b/setuptools/tests/test_dist.py
@@ -144,8 +144,12 @@ def test_check_specifier():
     dist = Distribution(attrs)
     check_specifier(dist, attrs, attrs['python_requires'])
 
-    # invalid specifier value
     attrs = {'name': 'foo', 'python_requires': ['>=3.0', '!=3.1']}
+    dist = Distribution(attrs)
+    check_specifier(dist, attrs, attrs['python_requires'])
+
+    # invalid specifier value
+    attrs = {'name': 'foo', 'python_requires': '>=invalid-version'}
     with pytest.raises(DistutilsSetupError):
         dist = Distribution(attrs)
 

From 913d50a130b3c92636978b1d0e0ace8c60c23a8e Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 28 Aug 2024 17:52:16 +0100
Subject: [PATCH 1287/1761] Allow user to skip validation of pyproject.toml via
 env var

---
 setuptools/config/pyprojecttoml.py            | 13 +++++++++++++
 setuptools/tests/config/test_pyprojecttoml.py |  7 +++++++
 2 files changed, 20 insertions(+)

diff --git a/setuptools/config/pyprojecttoml.py b/setuptools/config/pyprojecttoml.py
index 15b0baa18e..fb277f5b7a 100644
--- a/setuptools/config/pyprojecttoml.py
+++ b/setuptools/config/pyprojecttoml.py
@@ -41,6 +41,19 @@ def load_file(filepath: StrPath) -> dict:
 
 
 def validate(config: dict, filepath: StrPath) -> bool:
+    skip = os.getenv("SETUPTOOLS_DANGEROUSLY_SKIP_PYPROJECT_VALIDATION", "false")
+    if skip.lower() == "true":  # https://github.com/pypa/setuptools/issues/4459
+        SetuptoolsWarning.emit(
+            "Skipping the validation of `pyproject.toml`.",
+            """
+            Please note that some setuptools functionalities rely on the validation of
+            `pyproject.toml` against misconfiguration to ensure proper operation.
+            By skipping the automatic checks, you taking responsibility for making sure
+            the file is valid. Otherwise unexpected behaviours may occur.
+            """,
+        )
+        return True
+
     from . import _validate_pyproject as validator
 
     trove_classifier = validator.FORMAT_FUNCTIONS.get("trove-classifier")
diff --git a/setuptools/tests/config/test_pyprojecttoml.py b/setuptools/tests/config/test_pyprojecttoml.py
index db40fcd23d..df27487d51 100644
--- a/setuptools/tests/config/test_pyprojecttoml.py
+++ b/setuptools/tests/config/test_pyprojecttoml.py
@@ -17,6 +17,7 @@
 )
 from setuptools.dist import Distribution
 from setuptools.errors import OptionError
+from setuptools.warnings import SetuptoolsWarning
 
 import distutils.core
 
@@ -394,3 +395,9 @@ def test_warn_tools_typo(tmp_path):
 
     with pytest.warns(_ToolsTypoInMetadata):
         read_configuration(pyproject)
+
+
+def test_warn_skipping_validation(monkeypatch):
+    monkeypatch.setenv("SETUPTOOLS_DANGEROUSLY_SKIP_PYPROJECT_VALIDATION", "true")
+    with pytest.warns(SetuptoolsWarning, match="Skipping the validation"):
+        assert validate({"completely-wrong": "data"}, "pyproject.toml") is True

From b7f776838708e58a1406656b11fe0e22aa78e065 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 11 Nov 2024 14:30:14 +0000
Subject: [PATCH 1288/1761] Add news fragment

---
 newsfragments/4611.feature.rst | 7 +++++++
 1 file changed, 7 insertions(+)
 create mode 100644 newsfragments/4611.feature.rst

diff --git a/newsfragments/4611.feature.rst b/newsfragments/4611.feature.rst
new file mode 100644
index 0000000000..ee7da9fbad
--- /dev/null
+++ b/newsfragments/4611.feature.rst
@@ -0,0 +1,7 @@
+Added support for the environment variable
+``SETUPTOOLS_DANGEROUSLY_SKIP_PYPROJECT_VALIDATION=true``, allowing users to bypass
+the validation of ``pyproject.toml``.
+This option should be used only as a last resort when resolving dependency
+issues, as it may lead to improper functioning.
+Users who enable this setting are responsible for ensuring that ``pyproject.toml``
+complies with setuptools requirements.

From d9975c6a31b88bd6ae8963e9c981525cc910f82f Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 7 Oct 2024 14:12:38 +0100
Subject: [PATCH 1289/1761] Extract convenience wrapper of rmtree to
 setuptools._shutil for reuse

---
 setuptools/_shutil.py              | 41 ++++++++++++++++++++++++++++++
 setuptools/command/easy_install.py | 39 +++-------------------------
 2 files changed, 44 insertions(+), 36 deletions(-)
 create mode 100644 setuptools/_shutil.py

diff --git a/setuptools/_shutil.py b/setuptools/_shutil.py
new file mode 100644
index 0000000000..8abf5faa6b
--- /dev/null
+++ b/setuptools/_shutil.py
@@ -0,0 +1,41 @@
+"""Convenience layer on top of stdlib's shutil and os"""
+
+import os
+import stat
+from typing import Callable, TypeVar
+
+from .compat import py311
+
+from distutils import log
+
+try:
+    from os import chmod
+except ImportError:
+    # Jython compatibility
+    def chmod(*args: object, **kwargs: object) -> None:  # type: ignore[misc] # Mypy reuses the imported definition anyway
+        pass
+
+
+_T = TypeVar("_T")
+
+
+def attempt_chmod_verbose(path, mode):
+    log.debug("changing mode of %s to %o", path, mode)
+    try:
+        chmod(path, mode)
+    except OSError as e:
+        log.debug("chmod failed: %s", e)
+
+
+# Must match shutil._OnExcCallback
+def _auto_chmod(func: Callable[..., _T], arg: str, exc: BaseException) -> _T:
+    """shutils onexc callback to automatically call chmod for certain functions."""
+    # Only retry for scenarios known to have an issue
+    if func in [os.unlink, os.remove] and os.name == 'nt':
+        attempt_chmod_verbose(arg, stat.S_IWRITE)
+        return func(arg)
+    raise exc
+
+
+def rmtree(path, ignore_errors=False, onexc=_auto_chmod):
+    return py311.shutil_rmtree(path, ignore_errors, onexc)
diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py
index 21e6f008d7..b40610f8ba 100644
--- a/setuptools/command/easy_install.py
+++ b/setuptools/command/easy_install.py
@@ -34,7 +34,7 @@
 from collections.abc import Iterable
 from glob import glob
 from sysconfig import get_path
-from typing import TYPE_CHECKING, Callable, NoReturn, TypedDict, TypeVar
+from typing import TYPE_CHECKING, NoReturn, TypedDict
 
 from jaraco.text import yield_lines
 
@@ -63,7 +63,8 @@
 from setuptools.wheel import Wheel
 
 from .._path import ensure_directory
-from ..compat import py39, py311, py312
+from .._shutil import attempt_chmod_verbose as chmod, rmtree as _rmtree
+from ..compat import py39, py312
 
 from distutils import dir_util, log
 from distutils.command import install
@@ -89,8 +90,6 @@
     'get_exe_prefixes',
 ]
 
-_T = TypeVar("_T")
-
 
 def is_64bit():
     return struct.calcsize("P") == 8
@@ -1789,16 +1788,6 @@ def _first_line_re():
     return re.compile(first_line_re.pattern.decode())
 
 
-# Must match shutil._OnExcCallback
-def auto_chmod(func: Callable[..., _T], arg: str, exc: BaseException) -> _T:
-    """shutils onexc callback to automatically call chmod for certain functions."""
-    # Only retry for scenarios known to have an issue
-    if func in [os.unlink, os.remove] and os.name == 'nt':
-        chmod(arg, stat.S_IWRITE)
-        return func(arg)
-    raise exc
-
-
 def update_dist_caches(dist_path, fix_zipimporter_caches):
     """
     Fix any globally cached `dist_path` related data
@@ -2021,24 +2010,6 @@ def is_python_script(script_text, filename):
     return False  # Not any Python I can recognize
 
 
-try:
-    from os import (
-        chmod as _chmod,  # pyright: ignore[reportAssignmentType] # Losing type-safety w/ pyright, but that's ok
-    )
-except ImportError:
-    # Jython compatibility
-    def _chmod(*args: object, **kwargs: object) -> None:  # type: ignore[misc] # Mypy reuses the imported definition anyway
-        pass
-
-
-def chmod(path, mode):
-    log.debug("changing mode of %s to %o", path, mode)
-    try:
-        _chmod(path, mode)
-    except OSError as e:
-        log.debug("chmod failed: %s", e)
-
-
 class _SplitArgs(TypedDict, total=False):
     comments: bool
     posix: bool
@@ -2350,10 +2321,6 @@ def load_launcher_manifest(name):
     return manifest.decode('utf-8') % vars()
 
 
-def _rmtree(path, ignore_errors: bool = False, onexc=auto_chmod):
-    return py311.shutil_rmtree(path, ignore_errors, onexc)
-
-
 def current_umask():
     tmp = os.umask(0o022)
     os.umask(tmp)

From 1678730e70272129044cb1c47ca7f7f05cd4db46 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 7 Oct 2024 14:35:19 +0100
Subject: [PATCH 1290/1761] Extract common pattern to remove dir if exists to
 setuptools._shutil

---
 setuptools/_shutil.py           | 5 +++++
 setuptools/command/dist_info.py | 6 +-----
 2 files changed, 6 insertions(+), 5 deletions(-)

diff --git a/setuptools/_shutil.py b/setuptools/_shutil.py
index 8abf5faa6b..3d2c11e019 100644
--- a/setuptools/_shutil.py
+++ b/setuptools/_shutil.py
@@ -39,3 +39,8 @@ def _auto_chmod(func: Callable[..., _T], arg: str, exc: BaseException) -> _T:
 
 def rmtree(path, ignore_errors=False, onexc=_auto_chmod):
     return py311.shutil_rmtree(path, ignore_errors, onexc)
+
+
+def rmdir(path, **opts):
+    if os.path.isdir(path):
+        rmtree(path, **opts)
diff --git a/setuptools/command/dist_info.py b/setuptools/command/dist_info.py
index 3ad27ed708..0192ebb260 100644
--- a/setuptools/command/dist_info.py
+++ b/setuptools/command/dist_info.py
@@ -10,6 +10,7 @@
 from typing import cast
 
 from .. import _normalization
+from .._shutil import rmdir as _rm
 from .egg_info import egg_info as egg_info_cls
 
 from distutils import log
@@ -100,8 +101,3 @@ def run(self) -> None:
         # TODO: if bdist_wheel if merged into setuptools, just add "keep_egg_info" there
         with self._maybe_bkp_dir(egg_info_dir, self.keep_egg_info):
             bdist_wheel.egg2dist(egg_info_dir, self.dist_info_dir)
-
-
-def _rm(dir_name, **opts):
-    if os.path.isdir(dir_name):
-        shutil.rmtree(dir_name, **opts)

From b9be1442ba86c62d3473e476d0784e808da4af23 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 7 Oct 2024 15:21:17 +0100
Subject: [PATCH 1291/1761] Attempt to solve typechecking problems

---
 setuptools/_shutil.py | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/setuptools/_shutil.py b/setuptools/_shutil.py
index 3d2c11e019..2db65abb63 100644
--- a/setuptools/_shutil.py
+++ b/setuptools/_shutil.py
@@ -9,7 +9,8 @@
 from distutils import log
 
 try:
-    from os import chmod
+    from os import chmod  # pyright: ignore[reportAssignmentType]
+    # Losing type-safety w/ pyright, but that's ok
 except ImportError:
     # Jython compatibility
     def chmod(*args: object, **kwargs: object) -> None:  # type: ignore[misc] # Mypy reuses the imported definition anyway

From 6ddac39a5ee7a0bc25466fc44a24416fd902527f Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 7 Oct 2024 16:05:02 +0100
Subject: [PATCH 1292/1761] Ignore some lines for coverage

---
 setuptools/_shutil.py | 8 +++++---
 1 file changed, 5 insertions(+), 3 deletions(-)

diff --git a/setuptools/_shutil.py b/setuptools/_shutil.py
index 2db65abb63..ca6397343c 100644
--- a/setuptools/_shutil.py
+++ b/setuptools/_shutil.py
@@ -11,7 +11,7 @@
 try:
     from os import chmod  # pyright: ignore[reportAssignmentType]
     # Losing type-safety w/ pyright, but that's ok
-except ImportError:
+except ImportError:  # pragma: no cover
     # Jython compatibility
     def chmod(*args: object, **kwargs: object) -> None:  # type: ignore[misc] # Mypy reuses the imported definition anyway
         pass
@@ -24,12 +24,14 @@ def attempt_chmod_verbose(path, mode):
     log.debug("changing mode of %s to %o", path, mode)
     try:
         chmod(path, mode)
-    except OSError as e:
+    except OSError as e:  # pragma: no cover
         log.debug("chmod failed: %s", e)
 
 
 # Must match shutil._OnExcCallback
-def _auto_chmod(func: Callable[..., _T], arg: str, exc: BaseException) -> _T:
+def _auto_chmod(
+    func: Callable[..., _T], arg: str, exc: BaseException
+) -> _T:  # pragma: no cover
     """shutils onexc callback to automatically call chmod for certain functions."""
     # Only retry for scenarios known to have an issue
     if func in [os.unlink, os.remove] and os.name == 'nt':

From 8272bc3186fb5991e290c2afd31d1f5fb2d74fb5 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 15 Oct 2024 17:20:45 +0100
Subject: [PATCH 1293/1761] Refactor usage of shutil.rmtree in other parts of
 setuptools

---
 setuptools/command/bdist_wheel.py    | 33 +++++-----------------------
 setuptools/command/editable_wheel.py |  4 ++--
 setuptools/command/rotate.py         |  5 ++---
 3 files changed, 9 insertions(+), 33 deletions(-)

diff --git a/setuptools/command/bdist_wheel.py b/setuptools/command/bdist_wheel.py
index 8cf91538f9..5855a8a832 100644
--- a/setuptools/command/bdist_wheel.py
+++ b/setuptools/command/bdist_wheel.py
@@ -9,7 +9,6 @@
 import os
 import re
 import shutil
-import stat
 import struct
 import sys
 import sysconfig
@@ -18,23 +17,19 @@
 from email.generator import BytesGenerator, Generator
 from email.policy import EmailPolicy
 from glob import iglob
-from shutil import rmtree
-from typing import TYPE_CHECKING, Callable, Literal, cast
+from typing import Literal, cast
 from zipfile import ZIP_DEFLATED, ZIP_STORED
 
 from packaging import tags, version as _packaging_version
 from wheel.metadata import pkginfo_to_metadata
 from wheel.wheelfile import WheelFile
 
-from .. import Command, __version__
+from .. import Command, __version__, _shutil
 from ..warnings import SetuptoolsDeprecationWarning
 from .egg_info import egg_info as egg_info_cls
 
 from distutils import log
 
-if TYPE_CHECKING:
-    from _typeshed import ExcInfo
-
 
 def safe_name(name: str) -> str:
     """Convert an arbitrary string to a standard distribution name
@@ -148,21 +143,6 @@ def safer_version(version: str) -> str:
     return safe_version(version).replace("-", "_")
 
 
-def remove_readonly(
-    func: Callable[..., object],
-    path: str,
-    excinfo: ExcInfo,
-) -> None:
-    remove_readonly_exc(func, path, excinfo[1])
-
-
-def remove_readonly_exc(
-    func: Callable[..., object], path: str, exc: BaseException
-) -> None:
-    os.chmod(path, stat.S_IWRITE)
-    func(path)
-
-
 class bdist_wheel(Command):
     description = "create a wheel distribution"
 
@@ -458,7 +438,7 @@ def run(self):
             shutil.copytree(self.dist_info_dir, distinfo_dir)
             # Egg info is still generated, so remove it now to avoid it getting
             # copied into the wheel.
-            shutil.rmtree(self.egginfo_dir)
+            _shutil.rmtree(self.egginfo_dir)
         else:
             # Convert the generated egg-info into dist-info.
             self.egg2dist(self.egginfo_dir, distinfo_dir)
@@ -483,10 +463,7 @@ def run(self):
         if not self.keep_temp:
             log.info(f"removing {self.bdist_dir}")
             if not self.dry_run:
-                if sys.version_info < (3, 12):
-                    rmtree(self.bdist_dir, onerror=remove_readonly)
-                else:
-                    rmtree(self.bdist_dir, onexc=remove_readonly_exc)
+                _shutil.rmtree(self.bdist_dir)
 
     def write_wheelfile(
         self, wheelfile_base: str, generator: str = f"setuptools ({__version__})"
@@ -570,7 +547,7 @@ def egg2dist(self, egginfo_path: str, distinfo_path: str) -> None:
         def adios(p: str) -> None:
             """Appropriately delete directory, file or link."""
             if os.path.exists(p) and not os.path.islink(p) and os.path.isdir(p):
-                shutil.rmtree(p)
+                _shutil.rmtree(p)
             elif os.path.exists(p):
                 os.unlink(p)
 
diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py
index 30570e092a..db9a50c3af 100644
--- a/setuptools/command/editable_wheel.py
+++ b/setuptools/command/editable_wheel.py
@@ -27,7 +27,7 @@
 from types import TracebackType
 from typing import TYPE_CHECKING, Protocol, TypeVar, cast
 
-from .. import Command, _normalization, _path, errors, namespaces
+from .. import Command, _normalization, _path, _shutil, errors, namespaces
 from .._path import StrPath
 from ..compat import py312
 from ..discovery import find_package_path
@@ -773,7 +773,7 @@ def _is_nested(pkg: str, pkg_path: str, parent: str, parent_path: str) -> bool:
 
 def _empty_dir(dir_: _P) -> _P:
     """Create a directory ensured to be empty. Existing files may be removed."""
-    shutil.rmtree(dir_, ignore_errors=True)
+    _shutil.rmtree(dir_, ignore_errors=True)
     os.makedirs(dir_)
     return dir_
 
diff --git a/setuptools/command/rotate.py b/setuptools/command/rotate.py
index c10e8d5024..acdce07baa 100644
--- a/setuptools/command/rotate.py
+++ b/setuptools/command/rotate.py
@@ -1,10 +1,9 @@
 from __future__ import annotations
 
 import os
-import shutil
 from typing import ClassVar
 
-from setuptools import Command
+from .. import Command, _shutil
 
 from distutils import log
 from distutils.errors import DistutilsOptionError
@@ -61,6 +60,6 @@ def run(self) -> None:
                 log.info("Deleting %s", f)
                 if not self.dry_run:
                     if os.path.isdir(f):
-                        shutil.rmtree(f)
+                        _shutil.rmtree(f)
                     else:
                         os.unlink(f)

From bb93502e7f7fd7ef68cec7039034bfeaacb50fdb Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 15 Oct 2024 17:22:28 +0100
Subject: [PATCH 1294/1761] Add docstring

---
 setuptools/_shutil.py | 4 ++++
 1 file changed, 4 insertions(+)

diff --git a/setuptools/_shutil.py b/setuptools/_shutil.py
index ca6397343c..6acbb4281f 100644
--- a/setuptools/_shutil.py
+++ b/setuptools/_shutil.py
@@ -41,6 +41,10 @@ def _auto_chmod(
 
 
 def rmtree(path, ignore_errors=False, onexc=_auto_chmod):
+    """
+    Similar to ``shutil.rmtree`` but automatically executes ``chmod``
+    for well know Windows failure scenarios.
+    """
     return py311.shutil_rmtree(path, ignore_errors, onexc)
 
 

From db2b2065bfc20c9edf1c5d8ea9ad0ae68e64acdd Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 15 Oct 2024 21:37:44 +0100
Subject: [PATCH 1295/1761] Extract test for shutil.rmtree callback to its own
 file

---
 setuptools/tests/test_bdist_wheel.py    | 31 +------------------------
 setuptools/tests/test_shutil_wrapper.py | 23 ++++++++++++++++++
 2 files changed, 24 insertions(+), 30 deletions(-)
 create mode 100644 setuptools/tests/test_shutil_wrapper.py

diff --git a/setuptools/tests/test_bdist_wheel.py b/setuptools/tests/test_bdist_wheel.py
index 3dfa9c850c..d51dfbeb6d 100644
--- a/setuptools/tests/test_bdist_wheel.py
+++ b/setuptools/tests/test_bdist_wheel.py
@@ -11,7 +11,6 @@
 import sysconfig
 from contextlib import suppress
 from inspect import cleandoc
-from unittest.mock import Mock
 from zipfile import ZipFile
 
 import jaraco.path
@@ -19,12 +18,7 @@
 from packaging import tags
 
 import setuptools
-from setuptools.command.bdist_wheel import (
-    bdist_wheel,
-    get_abi_tag,
-    remove_readonly,
-    remove_readonly_exc,
-)
+from setuptools.command.bdist_wheel import bdist_wheel, get_abi_tag
 from setuptools.dist import Distribution
 from setuptools.warnings import SetuptoolsDeprecationWarning
 
@@ -510,29 +504,6 @@ def test_platform_with_space(dummy_dist, monkeypatch):
     bdist_wheel_cmd(plat_name="isilon onefs").run()
 
 
-def test_rmtree_readonly(monkeypatch, tmp_path):
-    """Verify onerr works as expected"""
-
-    bdist_dir = tmp_path / "with_readonly"
-    bdist_dir.mkdir()
-    some_file = bdist_dir.joinpath("file.txt")
-    some_file.touch()
-    some_file.chmod(stat.S_IREAD)
-
-    expected_count = 1 if sys.platform.startswith("win") else 0
-
-    if sys.version_info < (3, 12):
-        count_remove_readonly = Mock(side_effect=remove_readonly)
-        shutil.rmtree(bdist_dir, onerror=count_remove_readonly)
-        assert count_remove_readonly.call_count == expected_count
-    else:
-        count_remove_readonly_exc = Mock(side_effect=remove_readonly_exc)
-        shutil.rmtree(bdist_dir, onexc=count_remove_readonly_exc)
-        assert count_remove_readonly_exc.call_count == expected_count
-
-    assert not bdist_dir.is_dir()
-
-
 def test_data_dir_with_tag_build(monkeypatch, tmp_path):
     """
     Setuptools allow authors to set PEP 440's local version segments
diff --git a/setuptools/tests/test_shutil_wrapper.py b/setuptools/tests/test_shutil_wrapper.py
new file mode 100644
index 0000000000..74ff7e9a89
--- /dev/null
+++ b/setuptools/tests/test_shutil_wrapper.py
@@ -0,0 +1,23 @@
+import stat
+import sys
+from unittest.mock import Mock
+
+from setuptools import _shutil
+
+
+def test_rmtree_readonly(monkeypatch, tmp_path):
+    """Verify onerr works as expected"""
+
+    tmp_dir = tmp_path / "with_readonly"
+    tmp_dir.mkdir()
+    some_file = tmp_dir.joinpath("file.txt")
+    some_file.touch()
+    some_file.chmod(stat.S_IREAD)
+
+    expected_count = 1 if sys.platform.startswith("win") else 0
+    chmod_fn = Mock(wraps=_shutil.attempt_chmod_verbose)
+    monkeypatch.setattr(_shutil, "attempt_chmod_verbose", chmod_fn)
+
+    _shutil.rmtree(tmp_dir)
+    assert chmod_fn.call_count == expected_count
+    assert not tmp_dir.is_dir()

From 49726ca1a6a68832402c9e78b6cdc0b06caa3725 Mon Sep 17 00:00:00 2001
From: Carson Lam <46059+carsonyl@users.noreply.github.com>
Date: Thu, 19 Sep 2024 14:48:56 -0700
Subject: [PATCH 1296/1761] Omit ruff config from distribution.

---
 MANIFEST.in | 1 +
 1 file changed, 1 insertion(+)

diff --git a/MANIFEST.in b/MANIFEST.in
index 0643e7ee2d..fbf0e02a0c 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -19,3 +19,4 @@ include tox.ini
 include setuptools/tests/config/setupcfg_examples.txt
 include setuptools/config/*.schema.json
 global-exclude *.py[cod] __pycache__
+global-exclude ruff.toml

From 68ac812eca265ec1d857755f29889ab0c5ace021 Mon Sep 17 00:00:00 2001
From: Carson Lam <46059+carsonyl@users.noreply.github.com>
Date: Thu, 26 Sep 2024 22:50:40 -0700
Subject: [PATCH 1297/1761] Include ruff.toml in sdist but not wheel.

---
 MANIFEST.in    | 1 -
 pyproject.toml | 3 +++
 2 files changed, 3 insertions(+), 1 deletion(-)

diff --git a/MANIFEST.in b/MANIFEST.in
index fbf0e02a0c..0643e7ee2d 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -19,4 +19,3 @@ include tox.ini
 include setuptools/tests/config/setupcfg_examples.txt
 include setuptools/config/*.schema.json
 global-exclude *.py[cod] __pycache__
-global-exclude ruff.toml
diff --git a/pyproject.toml b/pyproject.toml
index 7bba23d01a..6de5c4a399 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -210,6 +210,9 @@ exclude = [
 ]
 namespaces = true
 
+[tool.setuptools.exclude-package-data]
+"*" = ["ruff.toml"]
+
 [tool.distutils.sdist]
 formats = "zip"
 

From 748c8513c91087248cf436b9fae94043e310aa9c Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Mon, 11 Nov 2024 14:46:39 +0000
Subject: [PATCH 1298/1761] Update mypy requirement from ==1.12.* to
 >=1.12,<1.14

Updates the requirements on [mypy](https://github.com/python/mypy) to permit the latest version.
- [Changelog](https://github.com/python/mypy/blob/master/CHANGELOG.md)
- [Commits](https://github.com/python/mypy/compare/v1.12.0...v1.13.0)

---
updated-dependencies:
- dependency-name: mypy
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] 
---
 pyproject.toml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/pyproject.toml b/pyproject.toml
index 7bba23d01a..48eda3bc09 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -135,7 +135,7 @@ type = [
 	# pin mypy version so a new version doesn't suddenly cause the CI to fail,
 	# until types-setuptools is removed from typeshed.
 	# For help with static-typing issues, or mypy update, ping @Avasam
-	"mypy==1.12.*",
+	"mypy>=1.12,<1.14",
 	# Typing fixes in version newer than we require at runtime
 	"importlib_metadata>=7.0.2; python_version < '3.10'",
 	# Imported unconditionally in tools/finalize.py

From 8f5559c859309a170c2056e04020ea9679fc7517 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 11 Nov 2024 17:56:34 +0000
Subject: [PATCH 1299/1761] =?UTF-8?q?Bump=20version:=2075.3.0=20=E2=86=92?=
 =?UTF-8?q?=2075.4.0?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg                   |  2 +-
 NEWS.rst                           | 30 ++++++++++++++++++++++++++++++
 newsfragments/+f0b61194.bugfix.rst |  1 -
 newsfragments/4611.feature.rst     |  7 -------
 newsfragments/4718.feature.1.rst   |  1 -
 newsfragments/4718.feature.2.rst   |  4 ----
 newsfragments/4718.feature.3.rst   |  3 ---
 newsfragments/4740.feature.rst     |  1 -
 pyproject.toml                     |  2 +-
 9 files changed, 32 insertions(+), 19 deletions(-)
 delete mode 100644 newsfragments/+f0b61194.bugfix.rst
 delete mode 100644 newsfragments/4611.feature.rst
 delete mode 100644 newsfragments/4718.feature.1.rst
 delete mode 100644 newsfragments/4718.feature.2.rst
 delete mode 100644 newsfragments/4718.feature.3.rst
 delete mode 100644 newsfragments/4740.feature.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 974699dc24..23aaeed4e4 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 75.3.0
+current_version = 75.4.0
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index 39bd36de66..04666a9c02 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,33 @@
+v75.4.0
+=======
+
+Features
+--------
+
+- Added support for the environment variable
+  ``SETUPTOOLS_DANGEROUSLY_SKIP_PYPROJECT_VALIDATION=true``, allowing users to bypass
+  the validation of ``pyproject.toml``.
+  This option should be used only as a last resort when resolving dependency
+  issues, as it may lead to improper functioning.
+  Users who enable this setting are responsible for ensuring that ``pyproject.toml``
+  complies with setuptools requirements. (#4611)
+- Require Python 3.9 or later. (#4718)
+- Remove dependency on ``importlib_resources``
+  and the vendored copy of the library.
+  Instead, ``setuptools`` consistently rely on stdlib's ``importlib.resources``
+  (available on Python 3.9+). (#4718)
+- Setuptools' ``bdist_wheel`` implementation no longer produces wheels with
+  the ``m`` SOABI flag (pymalloc-related).
+  This flag was removed on Python 3.8+ (see :obj:`sys.abiflags`). (#4718)
+- Updated vendored packaging version to 24.2. (#4740)
+
+
+Bugfixes
+--------
+
+- Merge with pypa/distutils@251797602, including fix for dirutil.mkpath handling in pypa/distutils#304.
+
+
 v75.3.0
 =======
 
diff --git a/newsfragments/+f0b61194.bugfix.rst b/newsfragments/+f0b61194.bugfix.rst
deleted file mode 100644
index 597165c3a0..0000000000
--- a/newsfragments/+f0b61194.bugfix.rst
+++ /dev/null
@@ -1 +0,0 @@
-Merge with pypa/distutils@251797602, including fix for dirutil.mkpath handling in pypa/distutils#304.
\ No newline at end of file
diff --git a/newsfragments/4611.feature.rst b/newsfragments/4611.feature.rst
deleted file mode 100644
index ee7da9fbad..0000000000
--- a/newsfragments/4611.feature.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-Added support for the environment variable
-``SETUPTOOLS_DANGEROUSLY_SKIP_PYPROJECT_VALIDATION=true``, allowing users to bypass
-the validation of ``pyproject.toml``.
-This option should be used only as a last resort when resolving dependency
-issues, as it may lead to improper functioning.
-Users who enable this setting are responsible for ensuring that ``pyproject.toml``
-complies with setuptools requirements.
diff --git a/newsfragments/4718.feature.1.rst b/newsfragments/4718.feature.1.rst
deleted file mode 100644
index f171cdde84..0000000000
--- a/newsfragments/4718.feature.1.rst
+++ /dev/null
@@ -1 +0,0 @@
-Require Python 3.9 or later.
diff --git a/newsfragments/4718.feature.2.rst b/newsfragments/4718.feature.2.rst
deleted file mode 100644
index 7c32c13b61..0000000000
--- a/newsfragments/4718.feature.2.rst
+++ /dev/null
@@ -1,4 +0,0 @@
-Remove dependency on ``importlib_resources``
-and the vendored copy of the library.
-Instead, ``setuptools`` consistently rely on stdlib's ``importlib.resources``
-(available on Python 3.9+).
diff --git a/newsfragments/4718.feature.3.rst b/newsfragments/4718.feature.3.rst
deleted file mode 100644
index 9c80da9064..0000000000
--- a/newsfragments/4718.feature.3.rst
+++ /dev/null
@@ -1,3 +0,0 @@
-Setuptools' ``bdist_wheel`` implementation no longer produces wheels with
-the ``m`` SOABI flag (pymalloc-related).
-This flag was removed on Python 3.8+ (see :obj:`sys.abiflags`).
diff --git a/newsfragments/4740.feature.rst b/newsfragments/4740.feature.rst
deleted file mode 100644
index 9dd6db56cf..0000000000
--- a/newsfragments/4740.feature.rst
+++ /dev/null
@@ -1 +0,0 @@
-Updated vendored packaging version to 24.2.
diff --git a/pyproject.toml b/pyproject.toml
index 48eda3bc09..c8a739bcad 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "75.3.0"
+version = "75.4.0"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From f6c9fdb5489ab49856cb1f4c237f02651c16b05f Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 12 Nov 2024 10:17:49 +0000
Subject: [PATCH 1300/1761] Revert "Allow user to skip validation of
 pyproject.toml via env var"

This reverts commit 913d50a130b3c92636978b1d0e0ace8c60c23a8e.
---
 setuptools/config/pyprojecttoml.py            | 13 -------------
 setuptools/tests/config/test_pyprojecttoml.py |  7 -------
 2 files changed, 20 deletions(-)

diff --git a/setuptools/config/pyprojecttoml.py b/setuptools/config/pyprojecttoml.py
index fb277f5b7a..15b0baa18e 100644
--- a/setuptools/config/pyprojecttoml.py
+++ b/setuptools/config/pyprojecttoml.py
@@ -41,19 +41,6 @@ def load_file(filepath: StrPath) -> dict:
 
 
 def validate(config: dict, filepath: StrPath) -> bool:
-    skip = os.getenv("SETUPTOOLS_DANGEROUSLY_SKIP_PYPROJECT_VALIDATION", "false")
-    if skip.lower() == "true":  # https://github.com/pypa/setuptools/issues/4459
-        SetuptoolsWarning.emit(
-            "Skipping the validation of `pyproject.toml`.",
-            """
-            Please note that some setuptools functionalities rely on the validation of
-            `pyproject.toml` against misconfiguration to ensure proper operation.
-            By skipping the automatic checks, you taking responsibility for making sure
-            the file is valid. Otherwise unexpected behaviours may occur.
-            """,
-        )
-        return True
-
     from . import _validate_pyproject as validator
 
     trove_classifier = validator.FORMAT_FUNCTIONS.get("trove-classifier")
diff --git a/setuptools/tests/config/test_pyprojecttoml.py b/setuptools/tests/config/test_pyprojecttoml.py
index df27487d51..db40fcd23d 100644
--- a/setuptools/tests/config/test_pyprojecttoml.py
+++ b/setuptools/tests/config/test_pyprojecttoml.py
@@ -17,7 +17,6 @@
 )
 from setuptools.dist import Distribution
 from setuptools.errors import OptionError
-from setuptools.warnings import SetuptoolsWarning
 
 import distutils.core
 
@@ -395,9 +394,3 @@ def test_warn_tools_typo(tmp_path):
 
     with pytest.warns(_ToolsTypoInMetadata):
         read_configuration(pyproject)
-
-
-def test_warn_skipping_validation(monkeypatch):
-    monkeypatch.setenv("SETUPTOOLS_DANGEROUSLY_SKIP_PYPROJECT_VALIDATION", "true")
-    with pytest.warns(SetuptoolsWarning, match="Skipping the validation"):
-        assert validate({"completely-wrong": "data"}, "pyproject.toml") is True

From 36e945e5a12ed552b33c523ba8ede4fa5a38d553 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 12 Nov 2024 10:22:26 +0000
Subject: [PATCH 1301/1761] Add news fragment

---
 newsfragments/4746.feature.rst | 2 ++
 1 file changed, 2 insertions(+)
 create mode 100644 newsfragments/4746.feature.rst

diff --git a/newsfragments/4746.feature.rst b/newsfragments/4746.feature.rst
new file mode 100644
index 0000000000..a66da9e943
--- /dev/null
+++ b/newsfragments/4746.feature.rst
@@ -0,0 +1,2 @@
+Removed support for ``SETUPTOOLS_DANGEROUSLY_SKIP_PYPROJECT_VALIDATION``, as it
+is deemed prone to errors.

From 01b9f208110da1db8e8aec0debbffd15ea30a38c Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 13 Nov 2024 10:57:40 +0000
Subject: [PATCH 1302/1761] Add note about removed variable in NEWS.rst

---
 NEWS.rst | 4 ++++
 1 file changed, 4 insertions(+)

diff --git a/NEWS.rst b/NEWS.rst
index 04666a9c02..9508b67aab 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -11,6 +11,10 @@ Features
   issues, as it may lead to improper functioning.
   Users who enable this setting are responsible for ensuring that ``pyproject.toml``
   complies with setuptools requirements. (#4611)
+
+  .. attention::
+     This environment variable was removed in a later version of ``setuptools``.
+
 - Require Python 3.9 or later. (#4718)
 - Remove dependency on ``importlib_resources``
   and the vendored copy of the library.

From 540001561bc2c6766940ba5fd6247735c1a3a290 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 13 Nov 2024 10:59:59 +0000
Subject: [PATCH 1303/1761] =?UTF-8?q?Bump=20version:=2075.4.0=20=E2=86=92?=
 =?UTF-8?q?=2075.5.0?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg               |  2 +-
 NEWS.rst                       | 10 ++++++++++
 newsfragments/4746.feature.rst |  2 --
 pyproject.toml                 |  2 +-
 4 files changed, 12 insertions(+), 4 deletions(-)
 delete mode 100644 newsfragments/4746.feature.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 23aaeed4e4..71f8813923 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 75.4.0
+current_version = 75.5.0
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index 9508b67aab..25dac78e1d 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,13 @@
+v75.5.0
+=======
+
+Features
+--------
+
+- Removed support for ``SETUPTOOLS_DANGEROUSLY_SKIP_PYPROJECT_VALIDATION``, as it
+  is deemed prone to errors. (#4746)
+
+
 v75.4.0
 =======
 
diff --git a/newsfragments/4746.feature.rst b/newsfragments/4746.feature.rst
deleted file mode 100644
index a66da9e943..0000000000
--- a/newsfragments/4746.feature.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Removed support for ``SETUPTOOLS_DANGEROUSLY_SKIP_PYPROJECT_VALIDATION``, as it
-is deemed prone to errors.
diff --git a/pyproject.toml b/pyproject.toml
index c8a739bcad..60ea8661b3 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "75.4.0"
+version = "75.5.0"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From 2b6dfd0864d58b87ad961f57e346ae0c7aab7e80 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 22 Oct 2024 14:00:50 +0100
Subject: [PATCH 1304/1761] Test compatibility between setuptools and wheel
 metadata for real use cases

---
 setuptools/tests/test_core_metadata.py | 209 ++++++++++++++++---------
 1 file changed, 136 insertions(+), 73 deletions(-)

diff --git a/setuptools/tests/test_core_metadata.py b/setuptools/tests/test_core_metadata.py
index 2b585a9c91..816d79312f 100644
--- a/setuptools/tests/test_core_metadata.py
+++ b/setuptools/tests/test_core_metadata.py
@@ -1,16 +1,25 @@
+from __future__ import annotations
+
 import functools
 import importlib
 import io
 from email import message_from_string
+from email.message import Message
+from pathlib import Path
+from unittest.mock import Mock
 
 import pytest
 from packaging.metadata import Metadata
+from packaging.requirements import Requirement
 
 from setuptools import _reqs, sic
 from setuptools._core_metadata import rfc822_escape, rfc822_unescape
 from setuptools.command.egg_info import egg_info, write_requirements
+from setuptools.config import expand, setupcfg
 from setuptools.dist import Distribution
 
+from .config.downloads import retrieve_file, urls_from_file
+
 EXAMPLE_BASE_INFO = dict(
     name="package",
     version="0.0.1",
@@ -303,84 +312,138 @@ def test_maintainer_author(name, attrs, tmpdir):
             assert line in pkg_lines_set
 
 
-def test_parity_with_metadata_from_pypa_wheel(tmp_path):
-    attrs = dict(
-        **EXAMPLE_BASE_INFO,
-        # Example with complex requirement definition
-        python_requires=">=3.8",
-        install_requires="""
-        packaging==23.2
-        more-itertools==8.8.0; extra == "other"
-        jaraco.text==3.7.0
-        importlib-resources==5.10.2; python_version<"3.8"
-        importlib-metadata==6.0.0 ; python_version<"3.8"
-        colorama>=0.4.4; sys_platform == "win32"
-        """,
-        extras_require={
-            "testing": """
-                pytest >= 6
-                pytest-checkdocs >= 2.4
-                tomli ; \\
-                        # Using stdlib when possible
-                        python_version < "3.11"
-                ini2toml[lite]>=0.9
-                """,
-            "other": [],
-        },
-    )
-    # Generate a PKG-INFO file using setuptools
-    dist = Distribution(attrs)
-    with io.StringIO() as fp:
-        dist.metadata.write_pkg_file(fp)
-        pkg_info = fp.getvalue()
+class TestParityWithMetadataFromPyPaWheel:
+    def base_example(self):
+        attrs = dict(
+            **EXAMPLE_BASE_INFO,
+            # Example with complex requirement definition
+            python_requires=">=3.8",
+            install_requires="""
+            packaging==23.2
+            more-itertools==8.8.0; extra == "other"
+            jaraco.text==3.7.0
+            importlib-resources==5.10.2; python_version<"3.8"
+            importlib-metadata==6.0.0 ; python_version<"3.8"
+            colorama>=0.4.4; sys_platform == "win32"
+            """,
+            extras_require={
+                "testing": """
+                    pytest >= 6
+                    pytest-checkdocs >= 2.4
+                    tomli ; \\
+                            # Using stdlib when possible
+                            python_version < "3.11"
+                    ini2toml[lite]>=0.9
+                    """,
+                "other": [],
+            },
+        )
+        # Generate a PKG-INFO file using setuptools
+        return Distribution(attrs)
+
+    def test_requires_dist(self, tmp_path):
+        dist = self.base_example()
+        pkg_info = _get_pkginfo(dist)
+        assert _valid_metadata(pkg_info)
+
+        # Ensure Requires-Dist is present
+        expected = [
+            'Metadata-Version:',
+            'Requires-Python: >=3.8',
+            'Provides-Extra: other',
+            'Provides-Extra: testing',
+            'Requires-Dist: tomli; python_version < "3.11" and extra == "testing"',
+            'Requires-Dist: more-itertools==8.8.0; extra == "other"',
+            'Requires-Dist: ini2toml[lite]>=0.9; extra == "testing"',
+        ]
+        for line in expected:
+            assert line in pkg_info
+
+    HERE = Path(__file__).parent
+    EXAMPLES_FILE = HERE / "config/setupcfg_examples.txt"
+
+    @pytest.fixture(params=[None, *urls_from_file(EXAMPLES_FILE)])
+    def dist(self, request, monkeypatch, tmp_path):
+        """Example of distribution with arbitrary configuration"""
+        monkeypatch.chdir(tmp_path)
+        monkeypatch.setattr(expand, "read_attr", Mock(return_value="0.42"))
+        monkeypatch.setattr(expand, "read_files", Mock(return_value="hello world"))
+        if request.param is None:
+            yield self.base_example()
+        else:
+            # Real-world usage
+            config = retrieve_file(request.param)
+            yield setupcfg.apply_configuration(Distribution({}), config)
+
+    def test_equivalent_output(self, tmp_path, dist):
+        """Ensure output from setuptools is equivalent to the one from `pypa/wheel`"""
+        # Generate a METADATA file using pypa/wheel for comparison
+        wheel_metadata = importlib.import_module("wheel.metadata")
+        pkginfo_to_metadata = getattr(wheel_metadata, "pkginfo_to_metadata", None)
+
+        if pkginfo_to_metadata is None:
+            pytest.xfail(
+                "wheel.metadata.pkginfo_to_metadata is undefined, "
+                "(this is likely to be caused by API changes in pypa/wheel"
+            )
+
+        # Generate an simplified "egg-info" dir for pypa/wheel to convert
+        pkg_info = _get_pkginfo(dist)
+        egg_info_dir = tmp_path / "pkg.egg-info"
+        egg_info_dir.mkdir(parents=True)
+        (egg_info_dir / "PKG-INFO").write_text(pkg_info, encoding="utf-8")
+        write_requirements(egg_info(dist), egg_info_dir, egg_info_dir / "requires.txt")
+
+        # Get pypa/wheel generated METADATA but normalize requirements formatting
+        metadata_msg = pkginfo_to_metadata(egg_info_dir, egg_info_dir / "PKG-INFO")
+        metadata_str = _normalize_metadata(metadata_msg)
+        pkg_info_msg = message_from_string(pkg_info)
+        pkg_info_str = _normalize_metadata(pkg_info_msg)
+
+        # Compare setuptools PKG-INFO x pypa/wheel METADATA
+        assert metadata_str == pkg_info_str
+
+
+def _normalize_metadata(msg: Message) -> str:
+    """Allow equivalent metadata to be compared directly"""
+    # The main challenge regards the requirements and extras.
+    # Both setuptools and wheel already apply some level of normalization
+    # but they differ regarding which character is chosen, according to the
+    # following spec it should be "-":
+    # https://packaging.python.org/en/latest/specifications/name-normalization/
+
+    # Related issues:
+    # https://github.com/pypa/packaging/issues/845
+    # https://github.com/pypa/packaging/issues/644#issuecomment-2429813968
+
+    extras = {x.replace("_", "-"): x for x in msg.get_all("Provides-Extra", [])}
+    reqs = [
+        _normalize_req(req, extras)
+        for req in _reqs.parse(msg.get_all("Requires-Dist", []))
+    ]
+    del msg["Requires-Dist"]
+    del msg["Provides-Extra"]
 
-    assert _valid_metadata(pkg_info)
+    for req in sorted(reqs):
+        msg["Requires-Dist"] = req
+    for extra in sorted(extras):
+        msg["Provides-Extra"] = extra
 
-    # Ensure Requires-Dist is present
-    expected = [
-        'Metadata-Version:',
-        'Requires-Python: >=3.8',
-        'Provides-Extra: other',
-        'Provides-Extra: testing',
-        'Requires-Dist: tomli; python_version < "3.11" and extra == "testing"',
-        'Requires-Dist: more-itertools==8.8.0; extra == "other"',
-        'Requires-Dist: ini2toml[lite]>=0.9; extra == "testing"',
-    ]
-    for line in expected:
-        assert line in pkg_info
+    return msg.as_string()
 
-    # Generate a METADATA file using pypa/wheel for comparison
-    wheel_metadata = importlib.import_module("wheel.metadata")
-    pkginfo_to_metadata = getattr(wheel_metadata, "pkginfo_to_metadata", None)
 
-    if pkginfo_to_metadata is None:
-        pytest.xfail(
-            "wheel.metadata.pkginfo_to_metadata is undefined, "
-            "(this is likely to be caused by API changes in pypa/wheel"
-        )
+def _normalize_req(req: Requirement, extras: dict[str, str]) -> str:
+    """Allow equivalent requirement objects to be compared directly"""
+    as_str = str(req).replace(req.name, req.name.replace("_", "-"))
+    for norm, orig in extras.items():
+        as_str = as_str.replace(orig, norm)
+    return as_str
 
-    # Generate an simplified "egg-info" dir for pypa/wheel to convert
-    egg_info_dir = tmp_path / "pkg.egg-info"
-    egg_info_dir.mkdir(parents=True)
-    (egg_info_dir / "PKG-INFO").write_text(pkg_info, encoding="utf-8")
-    write_requirements(egg_info(dist), egg_info_dir, egg_info_dir / "requires.txt")
-
-    # Get pypa/wheel generated METADATA but normalize requirements formatting
-    metadata_msg = pkginfo_to_metadata(egg_info_dir, egg_info_dir / "PKG-INFO")
-    metadata_deps = set(_reqs.parse(metadata_msg.get_all("Requires-Dist")))
-    metadata_extras = set(metadata_msg.get_all("Provides-Extra"))
-    del metadata_msg["Requires-Dist"]
-    del metadata_msg["Provides-Extra"]
-    pkg_info_msg = message_from_string(pkg_info)
-    pkg_info_deps = set(_reqs.parse(pkg_info_msg.get_all("Requires-Dist")))
-    pkg_info_extras = set(pkg_info_msg.get_all("Provides-Extra"))
-    del pkg_info_msg["Requires-Dist"]
-    del pkg_info_msg["Provides-Extra"]
-
-    # Compare setuptools PKG-INFO x pypa/wheel METADATA
-    assert metadata_msg.as_string() == pkg_info_msg.as_string()
-    assert metadata_deps == pkg_info_deps
-    assert metadata_extras == pkg_info_extras
+
+def _get_pkginfo(dist: Distribution):
+    with io.StringIO() as fp:
+        dist.metadata.write_pkg_file(fp)
+        return fp.getvalue()
 
 
 def _valid_metadata(text: str) -> bool:

From 48fd9ba3856f57c571158cc448c7776ccbe414a0 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 23 Oct 2024 11:37:15 +0100
Subject: [PATCH 1305/1761] Test metadata roundtrip when using pypa/wheel
 parsing techniques

---
 setuptools/tests/test_core_metadata.py | 28 ++++++++++++++++++++++++++
 1 file changed, 28 insertions(+)

diff --git a/setuptools/tests/test_core_metadata.py b/setuptools/tests/test_core_metadata.py
index 816d79312f..0473f67801 100644
--- a/setuptools/tests/test_core_metadata.py
+++ b/setuptools/tests/test_core_metadata.py
@@ -4,7 +4,10 @@
 import importlib
 import io
 from email import message_from_string
+from email.generator import Generator
 from email.message import Message
+from email.parser import Parser
+from email.policy import EmailPolicy
 from pathlib import Path
 from unittest.mock import Mock
 
@@ -403,6 +406,31 @@ def test_equivalent_output(self, tmp_path, dist):
         # Compare setuptools PKG-INFO x pypa/wheel METADATA
         assert metadata_str == pkg_info_str
 
+        # Make sure it parses/serializes well in pypa/wheel
+        _assert_roundtrip_message(pkg_info)
+
+
+def _assert_roundtrip_message(metadata: str) -> None:
+    """Emulate the way wheel.bdist_wheel parses and regenerates the message,
+    then ensures the metadata generated by setuptools is compatible.
+    """
+    with io.StringIO(metadata) as buffer:
+        msg = Parser().parse(buffer)
+
+    serialization_policy = EmailPolicy(
+        utf8=True,
+        mangle_from_=False,
+        max_line_length=0,
+    )
+    with io.BytesIO() as buffer:
+        out = io.TextIOWrapper(buffer, encoding="utf-8")
+        Generator(out, policy=serialization_policy).flatten(msg)
+        out.flush()
+        regenerated = buffer.getvalue()
+
+    raw_metadata = bytes(metadata, "utf-8")
+    assert regenerated == raw_metadata
+
 
 def _normalize_metadata(msg: Message) -> str:
     """Allow equivalent metadata to be compared directly"""

From fbbfbfb6b7df73bc843543fc7ae9986c3359bf51 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 23 Oct 2024 11:43:24 +0100
Subject: [PATCH 1306/1761] Preserve original PKG-INFO as METADATA in
 bdist_wheel

---
 setuptools/command/bdist_wheel.py | 62 ++++++++++++-------------------
 1 file changed, 24 insertions(+), 38 deletions(-)

diff --git a/setuptools/command/bdist_wheel.py b/setuptools/command/bdist_wheel.py
index 5855a8a832..976a322b14 100644
--- a/setuptools/command/bdist_wheel.py
+++ b/setuptools/command/bdist_wheel.py
@@ -14,14 +14,12 @@
 import sysconfig
 import warnings
 from collections.abc import Iterable, Sequence
-from email.generator import BytesGenerator, Generator
-from email.policy import EmailPolicy
+from email.generator import BytesGenerator
 from glob import iglob
 from typing import Literal, cast
 from zipfile import ZIP_DEFLATED, ZIP_STORED
 
 from packaging import tags, version as _packaging_version
-from wheel.metadata import pkginfo_to_metadata
 from wheel.wheelfile import WheelFile
 
 from .. import Command, __version__, _shutil
@@ -569,42 +567,30 @@ def adios(p: str) -> None:
 
             raise ValueError(err)
 
-        if os.path.isfile(egginfo_path):
-            # .egg-info is a single file
-            pkg_info = pkginfo_to_metadata(egginfo_path, egginfo_path)
-            os.mkdir(distinfo_path)
-        else:
-            # .egg-info is a directory
-            pkginfo_path = os.path.join(egginfo_path, "PKG-INFO")
-            pkg_info = pkginfo_to_metadata(egginfo_path, pkginfo_path)
-
-            # ignore common egg metadata that is useless to wheel
-            shutil.copytree(
-                egginfo_path,
-                distinfo_path,
-                ignore=lambda x, y: {
-                    "PKG-INFO",
-                    "requires.txt",
-                    "SOURCES.txt",
-                    "not-zip-safe",
-                },
-            )
-
-            # delete dependency_links if it is only whitespace
-            dependency_links_path = os.path.join(distinfo_path, "dependency_links.txt")
-            with open(dependency_links_path, encoding="utf-8") as dependency_links_file:
-                dependency_links = dependency_links_file.read().strip()
-            if not dependency_links:
-                adios(dependency_links_path)
-
-        pkg_info_path = os.path.join(distinfo_path, "METADATA")
-        serialization_policy = EmailPolicy(
-            utf8=True,
-            mangle_from_=False,
-            max_line_length=0,
+        # .egg-info is a directory
+        pkginfo_path = os.path.join(egginfo_path, "PKG-INFO")
+
+        # ignore common egg metadata that is useless to wheel
+        shutil.copytree(
+            egginfo_path,
+            distinfo_path,
+            ignore=lambda x, y: {
+                "PKG-INFO",
+                "requires.txt",
+                "SOURCES.txt",
+                "not-zip-safe",
+            },
         )
-        with open(pkg_info_path, "w", encoding="utf-8") as out:
-            Generator(out, policy=serialization_policy).flatten(pkg_info)
+
+        # delete dependency_links if it is only whitespace
+        dependency_links_path = os.path.join(distinfo_path, "dependency_links.txt")
+        with open(dependency_links_path, encoding="utf-8") as dependency_links_file:
+            dependency_links = dependency_links_file.read().strip()
+        if not dependency_links:
+            adios(dependency_links_path)
+
+        metadata_path = os.path.join(distinfo_path, "METADATA")
+        shutil.copy(pkginfo_path, metadata_path)
 
         for license_path in self.license_paths:
             filename = os.path.basename(license_path)

From 33f97961c599b7a6a3e1bd2fd97490cc1b657be6 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 23 Oct 2024 13:01:15 +0100
Subject: [PATCH 1307/1761] Avoid newline problems on windows

---
 setuptools/tests/test_core_metadata.py | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/setuptools/tests/test_core_metadata.py b/setuptools/tests/test_core_metadata.py
index 0473f67801..70bfcc0090 100644
--- a/setuptools/tests/test_core_metadata.py
+++ b/setuptools/tests/test_core_metadata.py
@@ -429,6 +429,9 @@ def _assert_roundtrip_message(metadata: str) -> None:
         regenerated = buffer.getvalue()
 
     raw_metadata = bytes(metadata, "utf-8")
+    # Normalise newlines to avoid test errors on Windows:
+    raw_metadata = b"\n".join(raw_metadata.splitlines())
+    regenerated = b"\n".join(regenerated.splitlines())
     assert regenerated == raw_metadata
 
 

From 089aca9754d716b5459ab53a2b0c58adf9dd672d Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 23 Oct 2024 19:21:55 +0100
Subject: [PATCH 1308/1761] Ignore coverage in test code

---
 setuptools/tests/test_core_metadata.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setuptools/tests/test_core_metadata.py b/setuptools/tests/test_core_metadata.py
index 70bfcc0090..bc2e09ba7e 100644
--- a/setuptools/tests/test_core_metadata.py
+++ b/setuptools/tests/test_core_metadata.py
@@ -384,7 +384,7 @@ def test_equivalent_output(self, tmp_path, dist):
         wheel_metadata = importlib.import_module("wheel.metadata")
         pkginfo_to_metadata = getattr(wheel_metadata, "pkginfo_to_metadata", None)
 
-        if pkginfo_to_metadata is None:
+        if pkginfo_to_metadata is None:  # pragma: nocover
             pytest.xfail(
                 "wheel.metadata.pkginfo_to_metadata is undefined, "
                 "(this is likely to be caused by API changes in pypa/wheel"

From a4fa01db264b374b13d65cd64e2cd06ecf1ab9b5 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 19 Nov 2024 17:25:56 +0000
Subject: [PATCH 1309/1761] Add news fragment

---
 newsfragments/4701.feature.rst | 3 +++
 1 file changed, 3 insertions(+)
 create mode 100644 newsfragments/4701.feature.rst

diff --git a/newsfragments/4701.feature.rst b/newsfragments/4701.feature.rst
new file mode 100644
index 0000000000..2ee6cb1226
--- /dev/null
+++ b/newsfragments/4701.feature.rst
@@ -0,0 +1,3 @@
+Preserve original ``PKG-INFO`` into ``METADATA`` when creating wheel
+(instead of calling ``wheel.metadata.pkginfo_to_metadata``).
+This helps to be more compliant with the flow specified in PEP 517.

From 0b5b4174380d5d2fc8781bc3a459d8d5e2e64859 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 19 Nov 2024 17:30:22 +0000
Subject: [PATCH 1310/1761] Mark tests that may depend on external network

---
 setuptools/tests/test_core_metadata.py | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/setuptools/tests/test_core_metadata.py b/setuptools/tests/test_core_metadata.py
index bc2e09ba7e..cf0bb32e9f 100644
--- a/setuptools/tests/test_core_metadata.py
+++ b/setuptools/tests/test_core_metadata.py
@@ -378,6 +378,7 @@ def dist(self, request, monkeypatch, tmp_path):
             config = retrieve_file(request.param)
             yield setupcfg.apply_configuration(Distribution({}), config)
 
+    @pytest.mark.uses_network
     def test_equivalent_output(self, tmp_path, dist):
         """Ensure output from setuptools is equivalent to the one from `pypa/wheel`"""
         # Generate a METADATA file using pypa/wheel for comparison
@@ -455,6 +456,7 @@ def _normalize_metadata(msg: Message) -> str:
     del msg["Requires-Dist"]
     del msg["Provides-Extra"]
 
+    # Ensure consistent ord
     for req in sorted(reqs):
         msg["Requires-Dist"] = req
     for extra in sorted(extras):

From 9a4c8d484ad0010b5a0d9a97e645a50b4d657d13 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Tue, 19 Nov 2024 19:27:36 -0500
Subject: [PATCH 1311/1761] Runtime typing fixes for typeshed return type merge

---
 setuptools/build_meta.py               |  4 ++--
 setuptools/command/bdist_egg.py        |  2 +-
 setuptools/command/bdist_wheel.py      |  2 +-
 setuptools/command/build_ext.py        |  4 ++--
 setuptools/command/build_py.py         | 23 +++++++++--------------
 setuptools/command/easy_install.py     |  4 ++--
 setuptools/command/editable_wheel.py   |  4 ++--
 setuptools/command/egg_info.py         |  5 +++--
 setuptools/command/install_egg_info.py |  4 ++--
 setuptools/command/saveopts.py         |  4 ++--
 setuptools/command/sdist.py            |  4 ++--
 setuptools/command/setopt.py           |  2 +-
 setuptools/dist.py                     |  5 +++--
 setuptools/monkey.py                   |  6 +++---
 setuptools/msvc.py                     |  8 ++++----
 setuptools/unicode_utils.py            |  6 +++---
 16 files changed, 42 insertions(+), 45 deletions(-)

diff --git a/setuptools/build_meta.py b/setuptools/build_meta.py
index 23471accb6..00fa5e1f70 100644
--- a/setuptools/build_meta.py
+++ b/setuptools/build_meta.py
@@ -91,11 +91,11 @@ def patch(cls):
         for the duration of this context.
         """
         orig = distutils.core.Distribution
-        distutils.core.Distribution = cls
+        distutils.core.Distribution = cls  # type: ignore[misc] # monkeypatching
         try:
             yield
         finally:
-            distutils.core.Distribution = orig
+            distutils.core.Distribution = orig  # type: ignore[misc] # monkeypatching
 
 
 @contextlib.contextmanager
diff --git a/setuptools/command/bdist_egg.py b/setuptools/command/bdist_egg.py
index 04d7e945bc..ac3e6ef1f9 100644
--- a/setuptools/command/bdist_egg.py
+++ b/setuptools/command/bdist_egg.py
@@ -277,7 +277,7 @@ def zip_safe(self):
         log.warn("zip_safe flag not set; analyzing archive contents...")
         return analyze_egg(self.bdist_dir, self.stubs)
 
-    def gen_header(self) -> str:
+    def gen_header(self) -> Literal["w"]:
         return 'w'
 
     def copy_metadata_to(self, target_dir) -> None:
diff --git a/setuptools/command/bdist_wheel.py b/setuptools/command/bdist_wheel.py
index 976a322b14..234df2a7c7 100644
--- a/setuptools/command/bdist_wheel.py
+++ b/setuptools/command/bdist_wheel.py
@@ -218,7 +218,7 @@ class bdist_wheel(Command):
 
     def initialize_options(self) -> None:
         self.bdist_dir: str | None = None
-        self.data_dir: str | None = None
+        self.data_dir = ""
         self.plat_name: str | None = None
         self.plat_tag: str | None = None
         self.format = "zip"
diff --git a/setuptools/command/build_ext.py b/setuptools/command/build_ext.py
index f098246b9b..e5c6b76b38 100644
--- a/setuptools/command/build_ext.py
+++ b/setuptools/command/build_ext.py
@@ -95,7 +95,7 @@ class build_ext(_build_ext):
 
     def run(self):
         """Build extensions in build directory, then copy if --inplace"""
-        old_inplace, self.inplace = self.inplace, 0
+        old_inplace, self.inplace = self.inplace, False
         _build_ext.run(self)
         self.inplace = old_inplace
         if old_inplace:
@@ -248,7 +248,7 @@ def setup_shlib_compiler(self):
             compiler.set_link_objects(self.link_objects)
 
         # hack so distutils' build_extension() builds a library instead
-        compiler.link_shared_object = link_shared_object.__get__(compiler)
+        compiler.link_shared_object = link_shared_object.__get__(compiler)  # type: ignore[method-assign]
 
     def get_export_symbols(self, ext):
         if isinstance(ext, Library):
diff --git a/setuptools/command/build_py.py b/setuptools/command/build_py.py
index f8c9b11676..e7d60c6440 100644
--- a/setuptools/command/build_py.py
+++ b/setuptools/command/build_py.py
@@ -39,7 +39,7 @@ class build_py(orig.build_py):
 
     distribution: Distribution  # override distutils.dist.Distribution with setuptools.dist.Distribution
     editable_mode: bool = False
-    existing_egg_info_dir: str | None = None  #: Private API, internal use only.
+    existing_egg_info_dir: StrPath | None = None  #: Private API, internal use only.
 
     def finalize_options(self):
         orig.build_py.finalize_options(self)
@@ -47,7 +47,6 @@ def finalize_options(self):
         self.exclude_package_data = self.distribution.exclude_package_data or {}
         if 'data_files' in self.__dict__:
             del self.__dict__['data_files']
-        self.__updated_files = []
 
     def copy_file(  # type: ignore[override] # No overload, no bytes support
         self,
@@ -89,12 +88,6 @@ def __getattr__(self, attr: str):
             return self.data_files
         return orig.build_py.__getattr__(self, attr)
 
-    def build_module(self, module, module_file, package):
-        outfile, copied = orig.build_py.build_module(self, module, module_file, package)
-        if copied:
-            self.__updated_files.append(outfile)
-        return outfile, copied
-
     def _get_data_files(self):
         """Generate list of '(package,src_dir,build_dir,filenames)' tuples"""
         self.analyze_manifest()
@@ -178,17 +171,17 @@ def build_package_data(self) -> None:
             _outf, _copied = self.copy_file(srcfile, target)
             make_writable(target)
 
-    def analyze_manifest(self):
-        self.manifest_files = mf = {}
+    def analyze_manifest(self) -> None:
+        self.manifest_files: dict[str, list[str]] = {}
         if not self.distribution.include_package_data:
             return
-        src_dirs = {}
+        src_dirs: dict[str, str] = {}
         for package in self.packages or ():
             # Locate package source directory
             src_dirs[assert_relative(self.get_package_dir(package))] = package
 
         if (
-            getattr(self, 'existing_egg_info_dir', None)
+            self.existing_egg_info_dir
             and Path(self.existing_egg_info_dir, "SOURCES.txt").exists()
         ):
             egg_info_dir = self.existing_egg_info_dir
@@ -217,9 +210,11 @@ def analyze_manifest(self):
                     importable = check.importable_subpackage(src_dirs[d], f)
                     if importable:
                         check.warn(importable)
-                mf.setdefault(src_dirs[d], []).append(path)
+                self.manifest_files.setdefault(src_dirs[d], []).append(path)
 
-    def _filter_build_files(self, files: Iterable[str], egg_info: str) -> Iterator[str]:
+    def _filter_build_files(
+        self, files: Iterable[str], egg_info: StrPath
+    ) -> Iterator[str]:
         """
         ``build_meta`` may try to create egg_info outside of the project directory,
         and this can be problematic for certain plugins (reported in issue #3500).
diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py
index b40610f8ba..66fe68f7a9 100644
--- a/setuptools/command/easy_install.py
+++ b/setuptools/command/easy_install.py
@@ -238,7 +238,7 @@ def _render_version():
         print(f'setuptools {dist.version} from {dist.location} (Python {ver})')
         raise SystemExit
 
-    def finalize_options(self):  # noqa: C901  # is too complex (25)  # FIXME
+    def finalize_options(self) -> None:  # noqa: C901  # is too complex (25)  # FIXME
         self.version and self._render_version()
 
         py_version = sys.version.split()[0]
@@ -354,7 +354,7 @@ def finalize_options(self):  # noqa: C901  # is too complex (25)  # FIXME
                 "No urls, filenames, or requirements specified (see --help)"
             )
 
-        self.outputs = []
+        self.outputs: list[str] = []
 
     @staticmethod
     def _process_site_dirs(site_dirs):
diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py
index db9a50c3af..6d23d11fad 100644
--- a/setuptools/command/editable_wheel.py
+++ b/setuptools/command/editable_wheel.py
@@ -779,12 +779,12 @@ def _empty_dir(dir_: _P) -> _P:
 
 
 class _NamespaceInstaller(namespaces.Installer):
-    def __init__(self, distribution, installation_dir, editable_name, src_root):
+    def __init__(self, distribution, installation_dir, editable_name, src_root) -> None:
         self.distribution = distribution
         self.src_root = src_root
         self.installation_dir = installation_dir
         self.editable_name = editable_name
-        self.outputs = []
+        self.outputs: list[str] = []
         self.dry_run = False
 
     def _get_nspkg_file(self):
diff --git a/setuptools/command/egg_info.py b/setuptools/command/egg_info.py
index 1411ac3d89..a300356d33 100644
--- a/setuptools/command/egg_info.py
+++ b/setuptools/command/egg_info.py
@@ -7,6 +7,7 @@
 import re
 import sys
 import time
+from collections.abc import Callable
 
 import packaging
 import packaging.requirements
@@ -330,7 +331,7 @@ def __init__(
         super().__init__(warn, debug_print)
         self.ignore_egg_info_dir = ignore_egg_info_dir
 
-    def process_template_line(self, line):
+    def process_template_line(self, line) -> None:
         # Parse the line: split it up, make sure the right number of words
         # is there, and return the relevant words.  'action' is always
         # defined: it's the first word of the line.  Which of the other
@@ -338,7 +339,7 @@ def process_template_line(self, line):
         # patterns, (dir and patterns), or (dir_pattern).
         (action, patterns, dir, dir_pattern) = self._parse_template_line(line)
 
-        action_map = {
+        action_map: dict[str, Callable] = {
             'include': self.include,
             'exclude': self.exclude,
             'global-include': self.global_include,
diff --git a/setuptools/command/install_egg_info.py b/setuptools/command/install_egg_info.py
index be4dd7b229..a6e6ec6446 100644
--- a/setuptools/command/install_egg_info.py
+++ b/setuptools/command/install_egg_info.py
@@ -20,13 +20,13 @@ class install_egg_info(namespaces.Installer, Command):
     def initialize_options(self):
         self.install_dir = None
 
-    def finalize_options(self):
+    def finalize_options(self) -> None:
         self.set_undefined_options('install_lib', ('install_dir', 'install_dir'))
         ei_cmd = self.get_finalized_command("egg_info")
         basename = f"{ei_cmd._get_egg_basename()}.egg-info"
         self.source = ei_cmd.egg_info
         self.target = os.path.join(self.install_dir, basename)
-        self.outputs = []
+        self.outputs: list[str] = []
 
     def run(self) -> None:
         self.run_command('egg_info')
diff --git a/setuptools/command/saveopts.py b/setuptools/command/saveopts.py
index f175de1015..2a2cbce6e2 100644
--- a/setuptools/command/saveopts.py
+++ b/setuptools/command/saveopts.py
@@ -6,9 +6,9 @@ class saveopts(option_base):
 
     description = "save supplied options to setup.cfg or other config file"
 
-    def run(self):
+    def run(self) -> None:
         dist = self.distribution
-        settings = {}
+        settings: dict[str, dict[str, str]] = {}
 
         for cmd in dist.command_options:
             if cmd == 'saveopts':
diff --git a/setuptools/command/sdist.py b/setuptools/command/sdist.py
index be69b33500..64e866c96b 100644
--- a/setuptools/command/sdist.py
+++ b/setuptools/command/sdist.py
@@ -202,10 +202,10 @@ def read_manifest(self):
         """
         log.info("reading manifest file '%s'", self.manifest)
         manifest = open(self.manifest, 'rb')
-        for line in manifest:
+        for bytes_line in manifest:
             # The manifest must contain UTF-8. See #303.
             try:
-                line = line.decode('UTF-8')
+                line = bytes_line.decode('UTF-8')
             except UnicodeDecodeError:
                 log.warn("%r not UTF-8 decodable -- skipping" % line)
                 continue
diff --git a/setuptools/command/setopt.py b/setuptools/command/setopt.py
index 75393f32f0..200cdff0f7 100644
--- a/setuptools/command/setopt.py
+++ b/setuptools/command/setopt.py
@@ -37,7 +37,7 @@ def edit_config(filename, settings, dry_run=False):
     """
     log.debug("Reading configuration from %s", filename)
     opts = configparser.RawConfigParser()
-    opts.optionxform = lambda x: x
+    opts.optionxform = lambda optionstr: optionstr  # type: ignore[method-assign] # overriding method
     _cfg_read_utf8_with_fallback(opts, filename)
 
     for section, options in settings.items():
diff --git a/setuptools/dist.py b/setuptools/dist.py
index 6062c4f868..5b3175fb5b 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -904,7 +904,7 @@ def _parse_command_opts(self, parser, args):
 
         return nargs
 
-    def get_cmdline_options(self):
+    def get_cmdline_options(self) -> dict[str, dict[str, str | None]]:
         """Return a '{cmd: {opt:val}}' map of all command-line options
 
         Option names are all long, but do not include the leading '--', and
@@ -914,9 +914,10 @@ def get_cmdline_options(self):
         Note that options provided by config files are intentionally excluded.
         """
 
-        d = {}
+        d: dict[str, dict[str, str | None]] = {}
 
         for cmd, opts in self.command_options.items():
+            val: str | None
             for opt, (src, val) in opts.items():
                 if src != "command line":
                     continue
diff --git a/setuptools/monkey.py b/setuptools/monkey.py
index 07919722b8..d8e30dbb80 100644
--- a/setuptools/monkey.py
+++ b/setuptools/monkey.py
@@ -73,7 +73,7 @@ def patch_all():
     import setuptools
 
     # we can't patch distutils.cmd, alas
-    distutils.core.Command = setuptools.Command
+    distutils.core.Command = setuptools.Command  # type: ignore[misc,assignment] # monkeypatching
 
     _patch_distribution_metadata()
 
@@ -82,8 +82,8 @@ def patch_all():
         module.Distribution = setuptools.dist.Distribution
 
     # Install the patched Extension
-    distutils.core.Extension = setuptools.extension.Extension
-    distutils.extension.Extension = setuptools.extension.Extension
+    distutils.core.Extension = setuptools.extension.Extension  # type: ignore[misc,assignment] # monkeypatching
+    distutils.extension.Extension = setuptools.extension.Extension  # type: ignore[misc,assignment] # monkeypatching
     if 'distutils.command.build_ext' in sys.modules:
         sys.modules[
             'distutils.command.build_ext'
diff --git a/setuptools/msvc.py b/setuptools/msvc.py
index 6492d3be9d..94c64871a6 100644
--- a/setuptools/msvc.py
+++ b/setuptools/msvc.py
@@ -426,7 +426,7 @@ def find_reg_vs_vers(self):
                             vs_vers.append(ver)
         return sorted(vs_vers)
 
-    def find_programdata_vs_vers(self):
+    def find_programdata_vs_vers(self) -> dict[float, str]:
         r"""
         Find Visual studio 2017+ versions from information in
         "C:\ProgramData\Microsoft\VisualStudio\Packages\_Instances".
@@ -436,7 +436,7 @@ def find_programdata_vs_vers(self):
         dict
             float version as key, path as value.
         """
-        vs_versions = {}
+        vs_versions: dict[float, str] = {}
         instances_dir = r'C:\ProgramData\Microsoft\VisualStudio\Packages\_Instances'
 
         try:
@@ -607,7 +607,7 @@ def WindowsSdkLastVersion(self):
         return self._use_last_dir_name(os.path.join(self.WindowsSdkDir, 'lib'))
 
     @property
-    def WindowsSdkDir(self):  # noqa: C901  # is too complex (12)  # FIXME
+    def WindowsSdkDir(self) -> str | None:  # noqa: C901  # is too complex (12)  # FIXME
         """
         Microsoft Windows SDK directory.
 
@@ -616,7 +616,7 @@ def WindowsSdkDir(self):  # noqa: C901  # is too complex (12)  # FIXME
         str
             path
         """
-        sdkdir = ''
+        sdkdir: str | None = ''
         for ver in self.WindowsSdkVersion:
             # Try to get it from registry
             loc = os.path.join(self.ri.windows_sdk, 'v%s' % ver)
diff --git a/setuptools/unicode_utils.py b/setuptools/unicode_utils.py
index 862d79e898..a6e33f2e0d 100644
--- a/setuptools/unicode_utils.py
+++ b/setuptools/unicode_utils.py
@@ -1,6 +1,6 @@
 import sys
 import unicodedata
-from configparser import ConfigParser
+from configparser import RawConfigParser
 
 from .compat import py39
 from .warnings import SetuptoolsDeprecationWarning
@@ -65,10 +65,10 @@ def _read_utf8_with_fallback(file: str, fallback_encoding=py39.LOCALE_ENCODING)
 
 
 def _cfg_read_utf8_with_fallback(
-    cfg: ConfigParser, file: str, fallback_encoding=py39.LOCALE_ENCODING
+    cfg: RawConfigParser, file: str, fallback_encoding=py39.LOCALE_ENCODING
 ) -> None:
     """Same idea as :func:`_read_utf8_with_fallback`, but for the
-    :meth:`ConfigParser.read` method.
+    :meth:`RawConfigParser.read` method.
 
     This method may call ``cfg.clear()``.
     """

From 2b471c275ff4e822dd7d41acf4fcaaf1bae41741 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Tue, 19 Nov 2024 19:41:20 -0500
Subject: [PATCH 1312/1761] Changed the ``WindowsSdkVersion``,
 ``FrameworkVersion32`` and ``FrameworkVersion64`` properties of
 ``setuptools.msvc.PlatformInfo`` to return an empty `tuple` instead of `None`
 as a fallthrough case

---
 newsfragments/xxx.feature.rst |  1 +
 setuptools/msvc.py            | 14 +++++++-------
 2 files changed, 8 insertions(+), 7 deletions(-)
 create mode 100644 newsfragments/xxx.feature.rst

diff --git a/newsfragments/xxx.feature.rst b/newsfragments/xxx.feature.rst
new file mode 100644
index 0000000000..2a46935eea
--- /dev/null
+++ b/newsfragments/xxx.feature.rst
@@ -0,0 +1 @@
+Changed the ``WindowsSdkVersion``, ``FrameworkVersion32`` and ``FrameworkVersion64`` properties of ``setuptools.msvc.PlatformInfo`` to return an empty `tuple` instead of `None` as a fallthrough case --  by :user:`Avasam`
diff --git a/setuptools/msvc.py b/setuptools/msvc.py
index 94c64871a6..8d6d2cf084 100644
--- a/setuptools/msvc.py
+++ b/setuptools/msvc.py
@@ -20,7 +20,7 @@
 import distutils.errors
 
 if TYPE_CHECKING:
-    from typing_extensions import NotRequired
+    from typing_extensions import LiteralString, NotRequired
 
 # https://github.com/python/mypy/issues/8166
 if not TYPE_CHECKING and platform.system() == 'Windows':
@@ -573,7 +573,7 @@ def _guess_vc_legacy(self):
         return self.ri.lookup(self.ri.vc, '%0.1f' % self.vs_ver) or default_vc
 
     @property
-    def WindowsSdkVersion(self):
+    def WindowsSdkVersion(self) -> tuple[LiteralString, ...]:
         """
         Microsoft Windows SDK versions for specified MSVC++ version.
 
@@ -592,7 +592,7 @@ def WindowsSdkVersion(self):
             return '8.1', '8.1a'
         elif self.vs_ver >= 14.0:
             return '10.0', '8.1'
-        return None
+        return ()
 
     @property
     def WindowsSdkLastVersion(self):
@@ -800,7 +800,7 @@ def FrameworkDir64(self):
         return self.ri.lookup(self.ri.vc, 'frameworkdir64') or guess_fw
 
     @property
-    def FrameworkVersion32(self):
+    def FrameworkVersion32(self) -> tuple[str, ...]:
         """
         Microsoft .NET Framework 32bit versions.
 
@@ -812,7 +812,7 @@ def FrameworkVersion32(self):
         return self._find_dot_net_versions(32)
 
     @property
-    def FrameworkVersion64(self):
+    def FrameworkVersion64(self) -> tuple[str, ...]:
         """
         Microsoft .NET Framework 64bit versions.
 
@@ -823,7 +823,7 @@ def FrameworkVersion64(self):
         """
         return self._find_dot_net_versions(64)
 
-    def _find_dot_net_versions(self, bits):
+    def _find_dot_net_versions(self, bits) -> tuple[str, ...]:
         """
         Find Microsoft .NET Framework versions.
 
@@ -851,7 +851,7 @@ def _find_dot_net_versions(self, bits):
             return 'v3.5', 'v2.0.50727'
         elif self.vs_ver == 8.0:
             return 'v3.0', 'v2.0.50727'
-        return None
+        return ()
 
     @staticmethod
     def _use_last_dir_name(path, prefix=''):

From 50d671b1e4ca090affe57d3a95b2ad7fa43c0e27 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 20 Nov 2024 13:41:34 +0000
Subject: [PATCH 1313/1761] Rename news fragment

---
 newsfragments/{xxx.feature.rst => 4754.feature.rst} | 0
 1 file changed, 0 insertions(+), 0 deletions(-)
 rename newsfragments/{xxx.feature.rst => 4754.feature.rst} (100%)

diff --git a/newsfragments/xxx.feature.rst b/newsfragments/4754.feature.rst
similarity index 100%
rename from newsfragments/xxx.feature.rst
rename to newsfragments/4754.feature.rst

From bf2ced2a61833915a307c73405da99b6408154c7 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 20 Nov 2024 17:50:09 +0000
Subject: [PATCH 1314/1761] =?UTF-8?q?Bump=20version:=2075.5.0=20=E2=86=92?=
 =?UTF-8?q?=2075.6.0?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg               |  2 +-
 NEWS.rst                       | 12 ++++++++++++
 newsfragments/4701.feature.rst |  3 ---
 newsfragments/4754.feature.rst |  1 -
 pyproject.toml                 |  2 +-
 5 files changed, 14 insertions(+), 6 deletions(-)
 delete mode 100644 newsfragments/4701.feature.rst
 delete mode 100644 newsfragments/4754.feature.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 71f8813923..e21bc31417 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 75.5.0
+current_version = 75.6.0
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index 25dac78e1d..326fdac650 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,15 @@
+v75.6.0
+=======
+
+Features
+--------
+
+- Preserve original ``PKG-INFO`` into ``METADATA`` when creating wheel
+  (instead of calling ``wheel.metadata.pkginfo_to_metadata``).
+  This helps to be more compliant with the flow specified in PEP 517. (#4701)
+- Changed the ``WindowsSdkVersion``, ``FrameworkVersion32`` and ``FrameworkVersion64`` properties of ``setuptools.msvc.PlatformInfo`` to return an empty `tuple` instead of `None` as a fallthrough case --  by :user:`Avasam` (#4754)
+
+
 v75.5.0
 =======
 
diff --git a/newsfragments/4701.feature.rst b/newsfragments/4701.feature.rst
deleted file mode 100644
index 2ee6cb1226..0000000000
--- a/newsfragments/4701.feature.rst
+++ /dev/null
@@ -1,3 +0,0 @@
-Preserve original ``PKG-INFO`` into ``METADATA`` when creating wheel
-(instead of calling ``wheel.metadata.pkginfo_to_metadata``).
-This helps to be more compliant with the flow specified in PEP 517.
diff --git a/newsfragments/4754.feature.rst b/newsfragments/4754.feature.rst
deleted file mode 100644
index 2a46935eea..0000000000
--- a/newsfragments/4754.feature.rst
+++ /dev/null
@@ -1 +0,0 @@
-Changed the ``WindowsSdkVersion``, ``FrameworkVersion32`` and ``FrameworkVersion64`` properties of ``setuptools.msvc.PlatformInfo`` to return an empty `tuple` instead of `None` as a fallthrough case --  by :user:`Avasam`
diff --git a/pyproject.toml b/pyproject.toml
index 60ea8661b3..ff1e730dd7 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "75.5.0"
+version = "75.6.0"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From 79ae47e01ac6e9f5587025523c3a4063043d8d73 Mon Sep 17 00:00:00 2001
From: Alex Waygood 
Date: Fri, 22 Nov 2024 11:15:00 +0000
Subject: [PATCH 1315/1761] Bump Ruff to 0.8.0

---
 .pre-commit-config.yaml | 2 +-
 pyproject.toml          | 4 ++--
 ruff.toml               | 3 ---
 3 files changed, 3 insertions(+), 6 deletions(-)

diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 04870d16bf..aecc11eb22 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,6 +1,6 @@
 repos:
 - repo: https://github.com/astral-sh/ruff-pre-commit
-  rev: v0.7.1
+  rev: v0.8.0
   hooks:
   - id: ruff
     args: [--fix, --unsafe-fixes]
diff --git a/pyproject.toml b/pyproject.toml
index ff1e730dd7..7114b8a02f 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -114,8 +114,8 @@ check = [
 
 	# local
 
-	# changed defaults for PT001 and PT023 astral-sh/ruff#13292
-	"ruff >= 0.7.0; sys_platform != 'cygwin'",
+	# Removal of deprecated UP027, PT004 & PT005 astral-sh/ruff#14383
+	"ruff >= 0.8.0; sys_platform != 'cygwin'",
 ]
 
 cover = [
diff --git a/ruff.toml b/ruff.toml
index 9f20438943..27757bcf4b 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -29,15 +29,12 @@ extend-select = [
 ]
 ignore = [
 	"PERF203", # try-except-in-loop, micro-optimisation with many false-positive. Worth checking but don't block CI
-	"PT004", # deprecated https://github.com/astral-sh/ruff/issues/8796#issuecomment-2057143531
-	"PT005", # deprecated https://github.com/astral-sh/ruff/issues/8796#issuecomment-2057143531
 	"PT007", # temporarily disabled, TODO: configure and standardize to preference
 	"PT011", # temporarily disabled, TODO: tighten expected error 
 	"PT012", # pytest-raises-with-multiple-statements, avoid extra dummy methods for a few lines, sometimes we explicitly assert in case of no error
 	"TRY003", # raise-vanilla-args, avoid multitude of exception classes
 	"TRY301", # raise-within-try, it's handy
 	"UP015", # redundant-open-modes, explicit is preferred
-	"UP027", # unpacked-list-comprehension, is actually slower for cases relevant to unpacking, set for deprecation: https://github.com/astral-sh/ruff/issues/12754
 	"UP030", # temporarily disabled
 	"UP031", # temporarily disabled
 	"UP032", # temporarily disabled

From ff9c6842d2581ce3c6db58b3595edb268e792ff7 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Mon, 28 Oct 2024 17:51:31 -0400
Subject: [PATCH 1316/1761] Return real boolean from copy_file

---
 distutils/file_util.py | 10 +++++-----
 1 file changed, 5 insertions(+), 5 deletions(-)

diff --git a/distutils/file_util.py b/distutils/file_util.py
index 85ee4dafcb..0acc8cb84b 100644
--- a/distutils/file_util.py
+++ b/distutils/file_util.py
@@ -118,7 +118,7 @@ def copy_file(  # noqa: C901
     if update and not newer(src, dst):
         if verbose >= 1:
             log.debug("not copying %s (output up-to-date)", src)
-        return (dst, 0)
+        return (dst, False)
 
     try:
         action = _copy_action[link]
@@ -132,7 +132,7 @@ def copy_file(  # noqa: C901
             log.info("%s %s -> %s", action, src, dst)
 
     if dry_run:
-        return (dst, 1)
+        return (dst, True)
 
     # If linking (hard or symbolic), use the appropriate system call
     # (Unix only, of course, but that's the caller's responsibility)
@@ -146,11 +146,11 @@ def copy_file(  # noqa: C901
                 #  even under Unix, see issue #8876).
                 pass
             else:
-                return (dst, 1)
+                return (dst, True)
     elif link == 'sym':
         if not (os.path.exists(dst) and os.path.samefile(src, dst)):
             os.symlink(src, dst)
-            return (dst, 1)
+            return (dst, True)
 
     # Otherwise (non-Mac, not linking), copy the file contents and
     # (optionally) copy the times and mode.
@@ -165,7 +165,7 @@ def copy_file(  # noqa: C901
         if preserve_mode:
             os.chmod(dst, S_IMODE(st[ST_MODE]))
 
-    return (dst, 1)
+    return (dst, True)
 
 
 # XXX I suspect this is Unix-specific -- need porting help!

From 9c1bec62b3781ad176b4d674034648452c500d67 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Sun, 24 Nov 2024 16:06:18 -0500
Subject: [PATCH 1317/1761] Fix test_mkpath_exception_uncached

---
 distutils/tests/test_dir_util.py | 4 ++++
 1 file changed, 4 insertions(+)

diff --git a/distutils/tests/test_dir_util.py b/distutils/tests/test_dir_util.py
index fcc37ac568..1d4001af6f 100644
--- a/distutils/tests/test_dir_util.py
+++ b/distutils/tests/test_dir_util.py
@@ -123,6 +123,10 @@ class FailPath(pathlib.Path):
             def mkdir(self, *args, **kwargs):
                 raise OSError("Failed to create directory")
 
+            _flavor = (
+                pathlib._windows_flavour if os.name == 'nt' else pathlib._posix_flavour
+            )
+
         target = tmp_path / 'foodir'
 
         with pytest.raises(errors.DistutilsFileError):

From 499dc247ac5989f70b06e4047b0a631c8323bc67 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sun, 24 Nov 2024 16:16:32 -0500
Subject: [PATCH 1318/1761] Add a comment pointing to the motivation.

---
 pyproject.toml | 1 +
 1 file changed, 1 insertion(+)

diff --git a/pyproject.toml b/pyproject.toml
index 6de5c4a399..66d39f34fa 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -211,6 +211,7 @@ exclude = [
 namespaces = true
 
 [tool.setuptools.exclude-package-data]
+# Remove ruff.toml when installing vendored packages (#4652)
 "*" = ["ruff.toml"]
 
 [tool.distutils.sdist]

From 89627a77258ea9e333dceac535cab050cfa80adf Mon Sep 17 00:00:00 2001
From: Sam Gross 
Date: Fri, 1 Nov 2024 15:47:29 +0000
Subject: [PATCH 1319/1761] Set `Py_GIL_DISABLED=1` for free threading on
 Windows

When free threaded CPython is installed from the official Windows
installer it doesn't have the macro `Py_GIL_DISABLED` properly set
becuase its `pyconfig.h` file is shared across the co-installed default
build.

Define the macro when building free threaded Python extensions on
Windows so that each individual C API extension doesn't have to work
around this limitation.

See https://github.com/pypa/setuptools/issues/4662
---
 distutils/command/build_ext.py | 8 +++++++-
 distutils/util.py              | 4 ++++
 2 files changed, 11 insertions(+), 1 deletion(-)

diff --git a/distutils/command/build_ext.py b/distutils/command/build_ext.py
index a7e3038be6..8d3dd7688a 100644
--- a/distutils/command/build_ext.py
+++ b/distutils/command/build_ext.py
@@ -23,7 +23,7 @@
 )
 from ..extension import Extension
 from ..sysconfig import customize_compiler, get_config_h_filename, get_python_version
-from ..util import get_platform, is_mingw
+from ..util import get_platform, is_mingw, is_freethreaded
 
 # An extension name is just a dot-separated list of Python NAMEs (ie.
 # the same as a fully-qualified module name).
@@ -333,6 +333,12 @@ def run(self):  # noqa: C901
         if os.name == 'nt' and self.plat_name != get_platform():
             self.compiler.initialize(self.plat_name)
 
+        # The official Windows free threaded Python installer doesn't set
+        # Py_GIL_DISABLED because its pyconfig.h is shared with the
+        # default build, so we need to define it here.
+        if os.name == 'nt' and is_freethreaded():
+            self.compiler.define_macro('Py_GIL_DISABLED', '1')
+
         # And make sure that any compile/link-related options (which might
         # come from the command-line or from the setup script) are set in
         # that CCompiler object -- that way, they automatically apply to
diff --git a/distutils/util.py b/distutils/util.py
index 609c1a50cd..6ef2c9854a 100644
--- a/distutils/util.py
+++ b/distutils/util.py
@@ -503,3 +503,7 @@ def is_mingw():
     get_platform() starts with 'mingw'.
     """
     return sys.platform == 'win32' and get_platform().startswith('mingw')
+
+def is_freethreaded():
+    """Return True if the Python interpreter is built with free threading support."""
+    return bool(sysconfig.get_config_var('Py_GIL_DISABLED'))

From de1e6245eece2b51df15d42a49bf5a406cc71f78 Mon Sep 17 00:00:00 2001
From: Sam Gross 
Date: Fri, 1 Nov 2024 16:18:27 +0000
Subject: [PATCH 1320/1761] Link to setuptools issue

---
 distutils/command/build_ext.py | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/distutils/command/build_ext.py b/distutils/command/build_ext.py
index 8d3dd7688a..271378e580 100644
--- a/distutils/command/build_ext.py
+++ b/distutils/command/build_ext.py
@@ -335,7 +335,8 @@ def run(self):  # noqa: C901
 
         # The official Windows free threaded Python installer doesn't set
         # Py_GIL_DISABLED because its pyconfig.h is shared with the
-        # default build, so we need to define it here.
+        # default build, so we need to define it here
+        # (see pypa/setuptools#4662).
         if os.name == 'nt' and is_freethreaded():
             self.compiler.define_macro('Py_GIL_DISABLED', '1')
 

From 480a30f0c78dc448d691faecb650c152fa8cdc42 Mon Sep 17 00:00:00 2001
From: Mathieu Dupuy 
Date: Sun, 1 Dec 2024 15:33:09 +0100
Subject: [PATCH 1321/1761] correct 'namespace' term to 'namespace-package' in
 quickstart doc

---
 docs/userguide/quickstart.rst | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/docs/userguide/quickstart.rst b/docs/userguide/quickstart.rst
index b0f1401e0e..d303ab9355 100644
--- a/docs/userguide/quickstart.rst
+++ b/docs/userguide/quickstart.rst
@@ -199,7 +199,7 @@ Package discovery
 -----------------
 For projects that follow a simple directory structure, ``setuptools`` should be
 able to automatically detect all :term:`packages ` and
-:term:`namespaces `. However, complex projects might include
+:term:`namespaces `. However, complex projects might include
 additional folders and supporting files that not necessarily should be
 distributed (or that can confuse ``setuptools`` auto discovery algorithm).
 

From cc43212ffcc2b3e9830fe4f95d465a17f9f04116 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 12 Dec 2024 16:01:30 +0000
Subject: [PATCH 1322/1761] Workaround for actions/setup-python#981 (#4769)

---
 .github/workflows/main.yml | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index bb58704edd..db8c150173 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -63,6 +63,9 @@ jobs:
         - platform: ubuntu-latest
           python: "3.10"
           distutils: stdlib
+        # TODO: Re-evaluate the need for the following workaround
+        exclude:
+        - {python: "3.9", platform: "macos-latest"}  # actions/setup-python#981
     runs-on: ${{ matrix.platform }}
     continue-on-error: ${{ matrix.python == '3.14' }}
     env:

From 83d425d1efe433dccb56ee89a726f50e7e5e9536 Mon Sep 17 00:00:00 2001
From: Dustin Ingram 
Date: Mon, 9 Dec 2024 23:56:00 +0000
Subject: [PATCH 1323/1761] Fix wheel file naming

---
 setuptools/_normalization.py         |  8 +++++++-
 setuptools/command/bdist_wheel.py    | 14 ++++++++++----
 setuptools/tests/test_bdist_wheel.py |  4 ++--
 setuptools/tests/test_dist_info.py   |  2 +-
 4 files changed, 20 insertions(+), 8 deletions(-)

diff --git a/setuptools/_normalization.py b/setuptools/_normalization.py
index 467b643d46..9541a55d6c 100644
--- a/setuptools/_normalization.py
+++ b/setuptools/_normalization.py
@@ -134,7 +134,13 @@ def filename_component_broken(value: str) -> str:
 def safer_name(value: str) -> str:
     """Like ``safe_name`` but can be used as filename component for wheel"""
     # See bdist_wheel.safer_name
-    return filename_component(safe_name(value))
+    return (
+        # Per https://packaging.python.org/en/latest/specifications/name-normalization/#name-normalization
+        re.sub(r"[-_.]+", "-", safe_name(value))
+        .lower()
+        # Per https://packaging.python.org/en/latest/specifications/binary-distribution-format/#escaping-and-unicode
+        .replace("-", "_")
+    )
 
 
 def safer_best_effort_version(value: str) -> str:
diff --git a/setuptools/command/bdist_wheel.py b/setuptools/command/bdist_wheel.py
index 234df2a7c7..2f129481fa 100644
--- a/setuptools/command/bdist_wheel.py
+++ b/setuptools/command/bdist_wheel.py
@@ -134,7 +134,13 @@ def get_abi_tag() -> str | None:
 
 
 def safer_name(name: str) -> str:
-    return safe_name(name).replace("-", "_")
+    return (
+        # Per https://packaging.python.org/en/latest/specifications/name-normalization/#name-normalization
+        re.sub(r"[-_.]+", "-", safe_name(name))
+        .lower()
+        # Per https://packaging.python.org/en/latest/specifications/binary-distribution-format/#escaping-and-unicode
+        .replace("-", "_")
+    )
 
 
 def safer_version(version: str) -> str:
@@ -364,9 +370,9 @@ def get_tag(self) -> tuple[str, str, str]:
             supported_tags = [
                 (t.interpreter, t.abi, plat_name) for t in tags.sys_tags()
             ]
-            assert tag in supported_tags, (
-                f"would build wheel with unsupported tag {tag}"
-            )
+            assert (
+                tag in supported_tags
+            ), f"would build wheel with unsupported tag {tag}"
         return tag
 
     def run(self):
diff --git a/setuptools/tests/test_bdist_wheel.py b/setuptools/tests/test_bdist_wheel.py
index d51dfbeb6d..776d21d729 100644
--- a/setuptools/tests/test_bdist_wheel.py
+++ b/setuptools/tests/test_bdist_wheel.py
@@ -246,9 +246,9 @@ def test_no_scripts(wheel_paths):
 
 
 def test_unicode_record(wheel_paths):
-    path = next(path for path in wheel_paths if "unicode.dist" in path)
+    path = next(path for path in wheel_paths if "unicode_dist" in path)
     with ZipFile(path) as zf:
-        record = zf.read("unicode.dist-0.1.dist-info/RECORD")
+        record = zf.read("unicode_dist-0.1.dist-info/RECORD")
 
     assert "åäö_日本語.py".encode() in record
 
diff --git a/setuptools/tests/test_dist_info.py b/setuptools/tests/test_dist_info.py
index 31e6e95a68..426694e019 100644
--- a/setuptools/tests/test_dist_info.py
+++ b/setuptools/tests/test_dist_info.py
@@ -188,7 +188,7 @@ def test_dist_info_is_the_same_as_in_wheel(
         dist_info = next(tmp_path.glob("dir_dist/*.dist-info"))
 
         assert dist_info.name == wheel_dist_info.name
-        assert dist_info.name.startswith(f"{name.replace('-', '_')}-{version}{suffix}")
+        assert dist_info.name.startswith(f"my_proj-{version}{suffix}")
         for file in "METADATA", "entry_points.txt":
             assert read(dist_info / file) == read(wheel_dist_info / file)
 

From c1bf7c76ace9eec1de1a6fb807787eb3ab51022a Mon Sep 17 00:00:00 2001
From: Dustin Ingram 
Date: Mon, 9 Dec 2024 23:59:43 +0000
Subject: [PATCH 1324/1761] Fix bug in test

---
 setuptools/tests/test_easy_install.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py
index 586324be37..f7a39c8ca6 100644
--- a/setuptools/tests/test_easy_install.py
+++ b/setuptools/tests/test_easy_install.py
@@ -719,7 +719,7 @@ def test_setup_requires_override_nspkg(self, use_setup_cfg):
                         # running the setup.py at all is sufficient
                         run_setup(test_setup_py, ['--name'])
                     except pkg_resources.VersionConflict:
-                        self.fail(
+                        pytest.fail(
                             'Installing setup.py requirements caused a VersionConflict'
                         )
 

From 25120929af621de277c1d386cf6c60e0249f70a8 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Sun, 22 Dec 2024 18:29:31 -0500
Subject: [PATCH 1325/1761] Remove link to jaraco/path#232

---
 setuptools/tests/test_build_ext.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setuptools/tests/test_build_ext.py b/setuptools/tests/test_build_ext.py
index 88318b26c5..f4bacad8be 100644
--- a/setuptools/tests/test_build_ext.py
+++ b/setuptools/tests/test_build_ext.py
@@ -183,7 +183,7 @@ def get_build_ext_cmd(self, optional: bool, **opts) -> build_ext:
             "eggs.c": "#include missingheader.h\n",
             ".build": {"lib": {}, "tmp": {}},
         }
-        path.build(files)  # jaraco/path#232
+        path.build(files)
         extension = Extension('spam.eggs', ['eggs.c'], optional=optional)
         dist = Distribution(dict(ext_modules=[extension]))
         dist.script_name = 'setup.py'

From 11798dfdf00cb35d4b06d285b5fe48e3ddaa5a26 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Sun, 22 Dec 2024 23:28:57 -0500
Subject: [PATCH 1326/1761] Bump mypy to 1.14, jaraco.path to 3.7.2

---
 mypy.ini                               | 6 +++---
 pyproject.toml                         | 4 ++--
 setuptools/tests/test_build_ext.py     | 4 ++--
 setuptools/tests/test_core_metadata.py | 4 ++--
 4 files changed, 9 insertions(+), 9 deletions(-)

diff --git a/mypy.ini b/mypy.ini
index 57e19efa9e..c1d01a42c3 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -58,7 +58,7 @@ ignore_missing_imports = True
 
 #  - wheel: does not intend on exposing a programmatic API https://github.com/pypa/wheel/pull/610#issuecomment-2081687671
 [mypy-wheel.*]
-ignore_missing_imports = True
+follow_untyped_imports = True
 # - The following are not marked as py.typed:
 #  - jaraco: Since mypy 1.12, the root name of the untyped namespace package gets called-out too
 #  - jaraco.develop: https://github.com/jaraco/jaraco.develop/issues/22
@@ -66,8 +66,8 @@ ignore_missing_imports = True
 #  - jaraco.packaging: https://github.com/jaraco/jaraco.packaging/issues/20
 #  - jaraco.path: https://github.com/jaraco/jaraco.path/issues/2
 #  - jaraco.text: https://github.com/jaraco/jaraco.text/issues/17
-[mypy-jaraco,jaraco.develop,jaraco.envs,jaraco.packaging.*,jaraco.path,jaraco.text]
-ignore_missing_imports = True
+[mypy-jaraco,jaraco.develop.*,jaraco.envs,jaraco.packaging.*,jaraco.path,jaraco.text]
+follow_untyped_imports = True
 
 # Even when excluding a module, import issues can show up due to following import
 # https://github.com/python/mypy/issues/11936#issuecomment-1466764006
diff --git a/pyproject.toml b/pyproject.toml
index a19d4ac164..48df917af8 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -44,7 +44,7 @@ test = [
 	"packaging>=24.2",
 	"jaraco.envs>=2.2",
 	"pytest-xdist>=3", # Dropped dependency on pytest-fork and py
-	"jaraco.path>=3.2.0",
+	"jaraco.path>=3.7.2", # Typing fixes
 	"build[virtualenv]>=1.0.3",
 	"filelock>=3.4.0",
 	"ini2toml[lite]>=0.14",
@@ -135,7 +135,7 @@ type = [
 	# pin mypy version so a new version doesn't suddenly cause the CI to fail,
 	# until types-setuptools is removed from typeshed.
 	# For help with static-typing issues, or mypy update, ping @Avasam
-	"mypy>=1.12,<1.14",
+	"mypy==1.14.*",
 	# Typing fixes in version newer than we require at runtime
 	"importlib_metadata>=7.0.2; python_version < '3.10'",
 	# Imported unconditionally in tools/finalize.py
diff --git a/setuptools/tests/test_build_ext.py b/setuptools/tests/test_build_ext.py
index 88318b26c5..d107a272e1 100644
--- a/setuptools/tests/test_build_ext.py
+++ b/setuptools/tests/test_build_ext.py
@@ -179,11 +179,11 @@ def C(file):
 
 class TestBuildExtInplace:
     def get_build_ext_cmd(self, optional: bool, **opts) -> build_ext:
-        files = {
+        files: dict[str, str | dict[str, dict[str, str]]] = {
             "eggs.c": "#include missingheader.h\n",
             ".build": {"lib": {}, "tmp": {}},
         }
-        path.build(files)  # jaraco/path#232
+        path.build(files)
         extension = Extension('spam.eggs', ['eggs.c'], optional=optional)
         dist = Distribution(dict(ext_modules=[extension]))
         dist.script_name = 'setup.py'
diff --git a/setuptools/tests/test_core_metadata.py b/setuptools/tests/test_core_metadata.py
index cf0bb32e9f..b67373bc37 100644
--- a/setuptools/tests/test_core_metadata.py
+++ b/setuptools/tests/test_core_metadata.py
@@ -5,7 +5,7 @@
 import io
 from email import message_from_string
 from email.generator import Generator
-from email.message import Message
+from email.message import EmailMessage, Message
 from email.parser import Parser
 from email.policy import EmailPolicy
 from pathlib import Path
@@ -416,7 +416,7 @@ def _assert_roundtrip_message(metadata: str) -> None:
     then ensures the metadata generated by setuptools is compatible.
     """
     with io.StringIO(metadata) as buffer:
-        msg = Parser().parse(buffer)
+        msg = Parser(EmailMessage).parse(buffer)
 
     serialization_policy = EmailPolicy(
         utf8=True,

From 52848a0d32ee377a578b8cafd7090446e240eb9e Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 26 Dec 2024 11:19:27 -0500
Subject: [PATCH 1327/1761] Trim the comment a bit.

---
 distutils/command/build_ext.py | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)

diff --git a/distutils/command/build_ext.py b/distutils/command/build_ext.py
index 271378e580..df2524b1ce 100644
--- a/distutils/command/build_ext.py
+++ b/distutils/command/build_ext.py
@@ -335,8 +335,7 @@ def run(self):  # noqa: C901
 
         # The official Windows free threaded Python installer doesn't set
         # Py_GIL_DISABLED because its pyconfig.h is shared with the
-        # default build, so we need to define it here
-        # (see pypa/setuptools#4662).
+        # default build, so define it here (pypa/setuptools#4662).
         if os.name == 'nt' and is_freethreaded():
             self.compiler.define_macro('Py_GIL_DISABLED', '1')
 

From 468532edd3ce99dfdbdf88d9a0b70a2b50fccc04 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 26 Dec 2024 12:10:10 -0500
Subject: [PATCH 1328/1761] =?UTF-8?q?=F0=9F=91=B9=20Feed=20the=20hobgoblin?=
 =?UTF-8?q?s=20(delint).?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 distutils/tests/test_dir_util.py | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)

diff --git a/distutils/tests/test_dir_util.py b/distutils/tests/test_dir_util.py
index 1d4001af6f..08d71393e5 100644
--- a/distutils/tests/test_dir_util.py
+++ b/distutils/tests/test_dir_util.py
@@ -106,8 +106,9 @@ def test_copy_tree_exception_in_listdir(self):
         """
         An exception in listdir should raise a DistutilsFileError
         """
-        with mock.patch("os.listdir", side_effect=OSError()), pytest.raises(
-            errors.DistutilsFileError
+        with (
+            mock.patch("os.listdir", side_effect=OSError()),
+            pytest.raises(errors.DistutilsFileError),
         ):
             src = self.tempdirs[-1]
             dir_util.copy_tree(src, None)

From bbee59bd0f8a671659674df42286051f59ea96ce Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 26 Dec 2024 12:10:34 -0500
Subject: [PATCH 1329/1761] Use alternate spelling for flavor attribute.

---
 distutils/tests/test_dir_util.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/distutils/tests/test_dir_util.py b/distutils/tests/test_dir_util.py
index 08d71393e5..6cb84e3a38 100644
--- a/distutils/tests/test_dir_util.py
+++ b/distutils/tests/test_dir_util.py
@@ -124,7 +124,7 @@ class FailPath(pathlib.Path):
             def mkdir(self, *args, **kwargs):
                 raise OSError("Failed to create directory")
 
-            _flavor = (
+            _flavour = (
                 pathlib._windows_flavour if os.name == 'nt' else pathlib._posix_flavour
             )
 

From a7fdc064b25bd395a126090dd573198d1b933003 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 26 Dec 2024 12:11:02 -0500
Subject: [PATCH 1330/1761] Only apply workaround on required Pythons.

---
 distutils/tests/test_dir_util.py | 10 +++++++---
 1 file changed, 7 insertions(+), 3 deletions(-)

diff --git a/distutils/tests/test_dir_util.py b/distutils/tests/test_dir_util.py
index 6cb84e3a38..65a69d8fd6 100644
--- a/distutils/tests/test_dir_util.py
+++ b/distutils/tests/test_dir_util.py
@@ -3,6 +3,7 @@
 import os
 import pathlib
 import stat
+import sys
 import unittest.mock as mock
 from distutils import dir_util, errors
 from distutils.dir_util import (
@@ -124,9 +125,12 @@ class FailPath(pathlib.Path):
             def mkdir(self, *args, **kwargs):
                 raise OSError("Failed to create directory")
 
-            _flavour = (
-                pathlib._windows_flavour if os.name == 'nt' else pathlib._posix_flavour
-            )
+            if sys.version_info < (3, 12):
+                _flavour = (
+                    pathlib._windows_flavour
+                    if os.name == 'nt'
+                    else pathlib._posix_flavour
+                )
 
         target = tmp_path / 'foodir'
 

From 01fbd65a89697b3631bb4c30809a1ca7b7601835 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 26 Dec 2024 12:12:25 -0500
Subject: [PATCH 1331/1761] Let pathlib resolve the flavor.

---
 distutils/tests/test_dir_util.py | 6 +-----
 1 file changed, 1 insertion(+), 5 deletions(-)

diff --git a/distutils/tests/test_dir_util.py b/distutils/tests/test_dir_util.py
index 65a69d8fd6..326cb34614 100644
--- a/distutils/tests/test_dir_util.py
+++ b/distutils/tests/test_dir_util.py
@@ -126,11 +126,7 @@ def mkdir(self, *args, **kwargs):
                 raise OSError("Failed to create directory")
 
             if sys.version_info < (3, 12):
-                _flavour = (
-                    pathlib._windows_flavour
-                    if os.name == 'nt'
-                    else pathlib._posix_flavour
-                )
+                _flavour = pathlib.Path()._flavour
 
         target = tmp_path / 'foodir'
 

From b24919b17acda9ec262465687e302deb2fc2cb25 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 26 Dec 2024 12:30:08 -0500
Subject: [PATCH 1332/1761] =?UTF-8?q?=F0=9F=91=B9=20Feed=20the=20hobgoblin?=
 =?UTF-8?q?s=20(delint).?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 distutils/command/build_ext.py | 2 +-
 distutils/util.py              | 1 +
 2 files changed, 2 insertions(+), 1 deletion(-)

diff --git a/distutils/command/build_ext.py b/distutils/command/build_ext.py
index df2524b1ce..82c0e9f5e6 100644
--- a/distutils/command/build_ext.py
+++ b/distutils/command/build_ext.py
@@ -23,7 +23,7 @@
 )
 from ..extension import Extension
 from ..sysconfig import customize_compiler, get_config_h_filename, get_python_version
-from ..util import get_platform, is_mingw, is_freethreaded
+from ..util import get_platform, is_freethreaded, is_mingw
 
 # An extension name is just a dot-separated list of Python NAMEs (ie.
 # the same as a fully-qualified module name).
diff --git a/distutils/util.py b/distutils/util.py
index 6ef2c9854a..8d8260bc33 100644
--- a/distutils/util.py
+++ b/distutils/util.py
@@ -504,6 +504,7 @@ def is_mingw():
     """
     return sys.platform == 'win32' and get_platform().startswith('mingw')
 
+
 def is_freethreaded():
     """Return True if the Python interpreter is built with free threading support."""
     return bool(sysconfig.get_config_var('Py_GIL_DISABLED'))

From 1bae350d30c5ce556d0595394800c8d35c71c4e2 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Sun, 24 Nov 2024 15:33:47 -0500
Subject: [PATCH 1333/1761] Run Ruff 0.8.0

---
 distutils/command/build_clib.py       |  3 +--
 distutils/command/build_ext.py        |  6 ++---
 distutils/command/check.py            |  5 +---
 distutils/command/install_data.py     |  5 ++--
 distutils/fancy_getopt.py             |  6 ++---
 distutils/filelist.py                 |  5 +---
 distutils/spawn.py                    |  2 +-
 distutils/tests/__init__.py           |  2 +-
 distutils/tests/test_file_util.py     | 13 +++++-----
 distutils/tests/test_spawn.py         | 34 +++++++++++++++++----------
 distutils/tests/test_unixccompiler.py | 13 +++++-----
 distutils/tests/test_version.py       | 12 +++++-----
 distutils/version.py                  |  3 +--
 ruff.toml                             |  4 ++++
 14 files changed, 58 insertions(+), 55 deletions(-)

diff --git a/distutils/command/build_clib.py b/distutils/command/build_clib.py
index a600d09373..1305d5bb3d 100644
--- a/distutils/command/build_clib.py
+++ b/distutils/command/build_clib.py
@@ -138,8 +138,7 @@ def check_library_list(self, libraries):
 
             if '/' in name or (os.sep != '/' and os.sep in name):
                 raise DistutilsSetupError(
-                    f"bad library name '{lib[0]}': "
-                    "may not contain directory separators"
+                    f"bad library name '{lib[0]}': may not contain directory separators"
                 )
 
             if not isinstance(build_info, dict):
diff --git a/distutils/command/build_ext.py b/distutils/command/build_ext.py
index 82c0e9f5e6..cf60bd0ad8 100644
--- a/distutils/command/build_ext.py
+++ b/distutils/command/build_ext.py
@@ -443,8 +443,7 @@ def check_extensions_list(self, extensions):  # noqa: C901
                 for macro in macros:
                     if not (isinstance(macro, tuple) and len(macro) in (1, 2)):
                         raise DistutilsSetupError(
-                            "'macros' element of build info dict "
-                            "must be 1- or 2-tuple"
+                            "'macros' element of build info dict must be 1- or 2-tuple"
                         )
                     if len(macro) == 1:
                         ext.undef_macros.append(macro[0])
@@ -672,8 +671,7 @@ def find_swig(self):
                 return "swig.exe"
         else:
             raise DistutilsPlatformError(
-                "I don't know how to find (much less run) SWIG "
-                f"on platform '{os.name}'"
+                f"I don't know how to find (much less run) SWIG on platform '{os.name}'"
             )
 
     # -- Name generators -----------------------------------------------
diff --git a/distutils/command/check.py b/distutils/command/check.py
index 93d754e73d..1375028e4d 100644
--- a/distutils/command/check.py
+++ b/distutils/command/check.py
@@ -46,10 +46,7 @@ class check(Command):
         (
             'restructuredtext',
             'r',
-            (
-                'Checks if long string meta-data syntax '
-                'are reStructuredText-compliant'
-            ),
+            'Checks if long string meta-data syntax are reStructuredText-compliant',
         ),
         ('strict', 's', 'Will exit with an error if a check fails'),
     ]
diff --git a/distutils/command/install_data.py b/distutils/command/install_data.py
index a90ec3b4d0..36f5bcc8bf 100644
--- a/distutils/command/install_data.py
+++ b/distutils/command/install_data.py
@@ -9,7 +9,7 @@
 
 import functools
 import os
-from typing import Iterable
+from collections.abc import Iterable
 
 from ..core import Command
 from ..util import change_root, convert_path
@@ -22,8 +22,7 @@ class install_data(Command):
         (
             'install-dir=',
             'd',
-            "base directory for installing data files "
-            "[default: installation base dir]",
+            "base directory for installing data files [default: installation base dir]",
         ),
         ('root=', None, "install everything relative to this alternate root directory"),
         ('force', 'f', "force installation (overwrite existing files)"),
diff --git a/distutils/fancy_getopt.py b/distutils/fancy_getopt.py
index 907cc2b73c..4ea89603fa 100644
--- a/distutils/fancy_getopt.py
+++ b/distutils/fancy_getopt.py
@@ -12,7 +12,8 @@
 import re
 import string
 import sys
-from typing import Any, Sequence
+from collections.abc import Sequence
+from typing import Any
 
 from .errors import DistutilsArgError, DistutilsGetoptError
 
@@ -167,8 +168,7 @@ def _grok_option_table(self):  # noqa: C901
 
             if not ((short is None) or (isinstance(short, str) and len(short) == 1)):
                 raise DistutilsGetoptError(
-                    f"invalid short option '{short}': "
-                    "must a single character or None"
+                    f"invalid short option '{short}': must a single character or None"
                 )
 
             self.repeat[long] = repeat
diff --git a/distutils/filelist.py b/distutils/filelist.py
index 44ae9e67ef..9857b19549 100644
--- a/distutils/filelist.py
+++ b/distutils/filelist.py
@@ -127,10 +127,7 @@ def process_template_line(self, line):  # noqa: C901
             for pattern in patterns:
                 if not self.exclude_pattern(pattern, anchor=True):
                     log.warning(
-                        (
-                            "warning: no previously-included files "
-                            "found matching '%s'"
-                        ),
+                        "warning: no previously-included files found matching '%s'",
                         pattern,
                     )
 
diff --git a/distutils/spawn.py b/distutils/spawn.py
index 107b011397..ba280334d1 100644
--- a/distutils/spawn.py
+++ b/distutils/spawn.py
@@ -12,7 +12,7 @@
 import subprocess
 import sys
 import warnings
-from typing import Mapping
+from collections.abc import Mapping
 
 from ._log import log
 from .debug import DEBUG
diff --git a/distutils/tests/__init__.py b/distutils/tests/__init__.py
index 93fbf49074..5a8ab06100 100644
--- a/distutils/tests/__init__.py
+++ b/distutils/tests/__init__.py
@@ -8,7 +8,7 @@
 """
 
 import shutil
-from typing import Sequence
+from collections.abc import Sequence
 
 
 def missing_compiler_executable(cmd_names: Sequence[str] = []):  # pragma: no cover
diff --git a/distutils/tests/test_file_util.py b/distutils/tests/test_file_util.py
index 85ac2136b3..a75d4a0317 100644
--- a/distutils/tests/test_file_util.py
+++ b/distutils/tests/test_file_util.py
@@ -44,18 +44,19 @@ def test_move_file_verbosity(self, caplog):
 
     def test_move_file_exception_unpacking_rename(self):
         # see issue 22182
-        with mock.patch("os.rename", side_effect=OSError("wrong", 1)), pytest.raises(
-            DistutilsFileError
+        with (
+            mock.patch("os.rename", side_effect=OSError("wrong", 1)),
+            pytest.raises(DistutilsFileError),
         ):
             jaraco.path.build({self.source: 'spam eggs'})
             move_file(self.source, self.target, verbose=False)
 
     def test_move_file_exception_unpacking_unlink(self):
         # see issue 22182
-        with mock.patch(
-            "os.rename", side_effect=OSError(errno.EXDEV, "wrong")
-        ), mock.patch("os.unlink", side_effect=OSError("wrong", 1)), pytest.raises(
-            DistutilsFileError
+        with (
+            mock.patch("os.rename", side_effect=OSError(errno.EXDEV, "wrong")),
+            mock.patch("os.unlink", side_effect=OSError("wrong", 1)),
+            pytest.raises(DistutilsFileError),
         ):
             jaraco.path.build({self.source: 'spam eggs'})
             move_file(self.source, self.target, verbose=False)
diff --git a/distutils/tests/test_spawn.py b/distutils/tests/test_spawn.py
index fd7b669cbf..fcbc765ef2 100644
--- a/distutils/tests/test_spawn.py
+++ b/distutils/tests/test_spawn.py
@@ -73,9 +73,12 @@ def test_find_executable(self, tmp_path):
         # PATH='': no match, except in the current directory
         with os_helper.EnvironmentVarGuard() as env:
             env['PATH'] = ''
-            with mock.patch(
-                'distutils.spawn.os.confstr', return_value=tmp_dir, create=True
-            ), mock.patch('distutils.spawn.os.defpath', tmp_dir):
+            with (
+                mock.patch(
+                    'distutils.spawn.os.confstr', return_value=tmp_dir, create=True
+                ),
+                mock.patch('distutils.spawn.os.defpath', tmp_dir),
+            ):
                 rv = find_executable(program)
                 assert rv is None
 
@@ -87,9 +90,10 @@ def test_find_executable(self, tmp_path):
         # PATH=':': explicitly looks in the current directory
         with os_helper.EnvironmentVarGuard() as env:
             env['PATH'] = os.pathsep
-            with mock.patch(
-                'distutils.spawn.os.confstr', return_value='', create=True
-            ), mock.patch('distutils.spawn.os.defpath', ''):
+            with (
+                mock.patch('distutils.spawn.os.confstr', return_value='', create=True),
+                mock.patch('distutils.spawn.os.defpath', ''),
+            ):
                 rv = find_executable(program)
                 assert rv is None
 
@@ -103,16 +107,22 @@ def test_find_executable(self, tmp_path):
             env.pop('PATH', None)
 
             # without confstr
-            with mock.patch(
-                'distutils.spawn.os.confstr', side_effect=ValueError, create=True
-            ), mock.patch('distutils.spawn.os.defpath', tmp_dir):
+            with (
+                mock.patch(
+                    'distutils.spawn.os.confstr', side_effect=ValueError, create=True
+                ),
+                mock.patch('distutils.spawn.os.defpath', tmp_dir),
+            ):
                 rv = find_executable(program)
                 assert rv == filename
 
             # with confstr
-            with mock.patch(
-                'distutils.spawn.os.confstr', return_value=tmp_dir, create=True
-            ), mock.patch('distutils.spawn.os.defpath', ''):
+            with (
+                mock.patch(
+                    'distutils.spawn.os.confstr', return_value=tmp_dir, create=True
+                ),
+                mock.patch('distutils.spawn.os.defpath', ''),
+            ):
                 rv = find_executable(program)
                 assert rv == filename
 
diff --git a/distutils/tests/test_unixccompiler.py b/distutils/tests/test_unixccompiler.py
index 50b66544a8..1695328771 100644
--- a/distutils/tests/test_unixccompiler.py
+++ b/distutils/tests/test_unixccompiler.py
@@ -272,13 +272,12 @@ def gcvs(*args, _orig=sysconfig.get_config_vars):
 
         sysconfig.get_config_var = gcv
         sysconfig.get_config_vars = gcvs
-        with mock.patch.object(
-            self.cc, 'spawn', return_value=None
-        ) as mock_spawn, mock.patch.object(
-            self.cc, '_need_link', return_value=True
-        ), mock.patch.object(
-            self.cc, 'mkpath', return_value=None
-        ), EnvironmentVarGuard() as env:
+        with (
+            mock.patch.object(self.cc, 'spawn', return_value=None) as mock_spawn,
+            mock.patch.object(self.cc, '_need_link', return_value=True),
+            mock.patch.object(self.cc, 'mkpath', return_value=None),
+            EnvironmentVarGuard() as env,
+        ):
             env['CC'] = 'ccache my_cc'
             env['CXX'] = 'my_cxx'
             del env['LDSHARED']
diff --git a/distutils/tests/test_version.py b/distutils/tests/test_version.py
index 1508e1cc0a..b68f097724 100644
--- a/distutils/tests/test_version.py
+++ b/distutils/tests/test_version.py
@@ -53,9 +53,9 @@ def test_cmp_strict(self):
             res = StrictVersion(v1)._cmp(v2)
             assert res == wanted, f'cmp({v1}, {v2}) should be {wanted}, got {res}'
             res = StrictVersion(v1)._cmp(object())
-            assert (
-                res is NotImplemented
-            ), f'cmp({v1}, {v2}) should be NotImplemented, got {res}'
+            assert res is NotImplemented, (
+                f'cmp({v1}, {v2}) should be NotImplemented, got {res}'
+            )
 
     def test_cmp(self):
         versions = (
@@ -75,6 +75,6 @@ def test_cmp(self):
             res = LooseVersion(v1)._cmp(v2)
             assert res == wanted, f'cmp({v1}, {v2}) should be {wanted}, got {res}'
             res = LooseVersion(v1)._cmp(object())
-            assert (
-                res is NotImplemented
-            ), f'cmp({v1}, {v2}) should be NotImplemented, got {res}'
+            assert res is NotImplemented, (
+                f'cmp({v1}, {v2}) should be NotImplemented, got {res}'
+            )
diff --git a/distutils/version.py b/distutils/version.py
index 942b56bf94..2223ee9c8c 100644
--- a/distutils/version.py
+++ b/distutils/version.py
@@ -53,8 +53,7 @@ def __init__(self, vstring=None):
         if vstring:
             self.parse(vstring)
         warnings.warn(
-            "distutils Version classes are deprecated. "
-            "Use packaging.version instead.",
+            "distutils Version classes are deprecated. Use packaging.version instead.",
             DeprecationWarning,
             stacklevel=2,
         )
diff --git a/ruff.toml b/ruff.toml
index 9c78018338..0cc5b267d7 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -19,6 +19,10 @@ extend-select = [
 	"YTT",
 ]
 ignore = [
+	# TODO: Fix these new violations in Ruff 0.8.0
+	"UP031",
+	"UP036",
+
 	# https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules
 	"W191",
 	"E111",

From f5b7336316af0e984e4b55a361aeb29225f7065e Mon Sep 17 00:00:00 2001
From: Agriya Khetarpal <74401230+agriyakhetarpal@users.noreply.github.com>
Date: Thu, 26 Dec 2024 23:28:57 +0530
Subject: [PATCH 1334/1761] Add review suggestions around code comments

Co-authored-by: Jason R. Coombs 
Co-authored-by: Avasam 
---
 distutils/extension.py            | 2 +-
 distutils/tests/test_extension.py | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/distutils/extension.py b/distutils/extension.py
index 0b77614507..fa088ec2f5 100644
--- a/distutils/extension.py
+++ b/distutils/extension.py
@@ -109,7 +109,7 @@ def __init__(
         if not isinstance(name, str):
             raise AssertionError("'name' must be a string")  # noqa: TRY004
 
-        # we handle the string case first; though strings are iterable, we disallow them
+        # handle the string case first; since strings are iterable, disallow them
         if isinstance(sources, str):
             raise AssertionError(  # noqa: TRY004
                 "'sources' must be an iterable of strings or PathLike objects, not a string"
diff --git a/distutils/tests/test_extension.py b/distutils/tests/test_extension.py
index 31d1fc890e..7b4612849e 100644
--- a/distutils/tests/test_extension.py
+++ b/distutils/tests/test_extension.py
@@ -69,7 +69,7 @@ def test_extension_init(self):
         assert ext.name == 'name'
 
         # the second argument, which is the list of files, must
-        # be a list of strings or PathLike objects, and not a string
+        # be an iterable of strings or PathLike objects, and not a string
         with pytest.raises(AssertionError):
             Extension('name', 'file')
         with pytest.raises(AssertionError):

From efeb97c02684965d63e78eb9319458b0e8074f66 Mon Sep 17 00:00:00 2001
From: Agriya Khetarpal <74401230+agriyakhetarpal@users.noreply.github.com>
Date: Thu, 26 Dec 2024 23:33:03 +0530
Subject: [PATCH 1335/1761] Use `TypeError` instead of `AssertionError`

---
 distutils/extension.py            | 6 +++---
 distutils/tests/test_extension.py | 6 +++---
 2 files changed, 6 insertions(+), 6 deletions(-)

diff --git a/distutils/extension.py b/distutils/extension.py
index fa088ec2f5..f925987e84 100644
--- a/distutils/extension.py
+++ b/distutils/extension.py
@@ -107,11 +107,11 @@ def __init__(
         **kw,  # To catch unknown keywords
     ):
         if not isinstance(name, str):
-            raise AssertionError("'name' must be a string")  # noqa: TRY004
+            raise TypeError("'name' must be a string")  # noqa: TRY004
 
         # handle the string case first; since strings are iterable, disallow them
         if isinstance(sources, str):
-            raise AssertionError(  # noqa: TRY004
+            raise TypeError(
                 "'sources' must be an iterable of strings or PathLike objects, not a string"
             )
 
@@ -119,7 +119,7 @@ def __init__(
         try:
             self.sources = list(map(os.fspath, sources))
         except TypeError:
-            raise AssertionError(
+            raise TypeError(
                 "'sources' must be an iterable of strings or PathLike objects"
             )
 
diff --git a/distutils/tests/test_extension.py b/distutils/tests/test_extension.py
index 7b4612849e..dc998ec55b 100644
--- a/distutils/tests/test_extension.py
+++ b/distutils/tests/test_extension.py
@@ -63,16 +63,16 @@ def test_read_setup_file(self):
 
     def test_extension_init(self):
         # the first argument, which is the name, must be a string
-        with pytest.raises(AssertionError):
+        with pytest.raises(TypeError):
             Extension(1, [])
         ext = Extension('name', [])
         assert ext.name == 'name'
 
         # the second argument, which is the list of files, must
         # be an iterable of strings or PathLike objects, and not a string
-        with pytest.raises(AssertionError):
+        with pytest.raises(TypeError):
             Extension('name', 'file')
-        with pytest.raises(AssertionError):
+        with pytest.raises(TypeError):
             Extension('name', ['file', 1])
         ext = Extension('name', ['file1', 'file2'])
         assert ext.sources == ['file1', 'file2']

From a88eace7acc39b76aeb8d967777d285dbeb0341f Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Sun, 24 Nov 2024 17:08:38 -0500
Subject: [PATCH 1336/1761] UP031 manual fixes for Ruff 0.8.0

---
 distutils/command/build.py            |  3 ++-
 distutils/command/install.py          |  4 ++--
 distutils/command/install_egg_info.py |  8 +++-----
 distutils/command/sdist.py            |  3 +--
 distutils/dist.py                     |  2 +-
 distutils/fancy_getopt.py             | 10 +++++-----
 distutils/sysconfig.py                |  2 +-
 distutils/tests/test_build.py         |  4 +++-
 distutils/tests/test_build_ext.py     |  7 ++++---
 distutils/text_file.py                |  4 ++--
 distutils/util.py                     |  2 +-
 11 files changed, 25 insertions(+), 24 deletions(-)

diff --git a/distutils/command/build.py b/distutils/command/build.py
index caf55073af..ccd2c706a3 100644
--- a/distutils/command/build.py
+++ b/distutils/command/build.py
@@ -113,7 +113,8 @@ def finalize_options(self):  # noqa: C901
             self.build_temp = os.path.join(self.build_base, 'temp' + plat_specifier)
         if self.build_scripts is None:
             self.build_scripts = os.path.join(
-                self.build_base, 'scripts-%d.%d' % sys.version_info[:2]
+                self.build_base,
+                f'scripts-{sys.version_info.major}.{sys.version_info.minor}',
             )
 
         if self.executable is None and sys.executable:
diff --git a/distutils/command/install.py b/distutils/command/install.py
index ceb453e041..9400995024 100644
--- a/distutils/command/install.py
+++ b/distutils/command/install.py
@@ -407,8 +407,8 @@ def finalize_options(self):  # noqa: C901
             'dist_version': self.distribution.get_version(),
             'dist_fullname': self.distribution.get_fullname(),
             'py_version': py_version,
-            'py_version_short': '%d.%d' % sys.version_info[:2],
-            'py_version_nodot': '%d%d' % sys.version_info[:2],
+            'py_version_short': f'{sys.version_info.major}.{sys.version_info.minor}',
+            'py_version_nodot': f'{sys.version_info.major}{sys.version_info.minor}',
             'sys_prefix': prefix,
             'prefix': prefix,
             'sys_exec_prefix': exec_prefix,
diff --git a/distutils/command/install_egg_info.py b/distutils/command/install_egg_info.py
index 4fbb3440ab..0baeee7bb4 100644
--- a/distutils/command/install_egg_info.py
+++ b/distutils/command/install_egg_info.py
@@ -31,11 +31,9 @@ def basename(self):
         Allow basename to be overridden by child class.
         Ref pypa/distutils#2.
         """
-        return "%s-%s-py%d.%d.egg-info" % (
-            to_filename(safe_name(self.distribution.get_name())),
-            to_filename(safe_version(self.distribution.get_version())),
-            *sys.version_info[:2],
-        )
+        name = to_filename(safe_name(self.distribution.get_name()))
+        version = to_filename(safe_version(self.distribution.get_version()))
+        return f"{name}-{version}-py{sys.version_info.major}.{sys.version_info.minor}.egg-info"
 
     def finalize_options(self):
         self.set_undefined_options('install_lib', ('install_dir', 'install_dir'))
diff --git a/distutils/command/sdist.py b/distutils/command/sdist.py
index d723a1c9fb..003e0bf875 100644
--- a/distutils/command/sdist.py
+++ b/distutils/command/sdist.py
@@ -362,8 +362,7 @@ def read_template(self):
                 # convert_path function
                 except (DistutilsTemplateError, ValueError) as msg:
                     self.warn(
-                        "%s, line %d: %s"
-                        % (template.filename, template.current_line, msg)
+                        f"{template.filename}, line {int(template.current_line)}: {msg}"
                     )
         finally:
             template.close()
diff --git a/distutils/dist.py b/distutils/dist.py
index 8e1e6d0b4e..f58159add9 100644
--- a/distutils/dist.py
+++ b/distutils/dist.py
@@ -722,7 +722,7 @@ def print_command_list(self, commands, header, max_length):
             except AttributeError:
                 description = "(no description available)"
 
-            print("  %-*s  %s" % (max_length, cmd, description))
+            print(f"  {cmd:<{max_length}}  {description}")
 
     def print_commands(self):
         """Print out a help message listing all available commands with a
diff --git a/distutils/fancy_getopt.py b/distutils/fancy_getopt.py
index 4ea89603fa..6f507ad9ea 100644
--- a/distutils/fancy_getopt.py
+++ b/distutils/fancy_getopt.py
@@ -351,18 +351,18 @@ def generate_help(self, header=None):  # noqa: C901
             # Case 1: no short option at all (makes life easy)
             if short is None:
                 if text:
-                    lines.append("  --%-*s  %s" % (max_opt, long, text[0]))
+                    lines.append(f"  --{long:<{max_opt}}  {text[0]}")
                 else:
-                    lines.append("  --%-*s  " % (max_opt, long))
+                    lines.append(f"  --{long:<{max_opt}}")
 
             # Case 2: we have a short option, so we have to include it
             # just after the long option
             else:
                 opt_names = f"{long} (-{short})"
                 if text:
-                    lines.append("  --%-*s  %s" % (max_opt, opt_names, text[0]))
+                    lines.append(f"  --{opt_names:<{max_opt}}  {text[0]}")
                 else:
-                    lines.append("  --%-*s" % opt_names)
+                    lines.append(f"  --{opt_names:<{max_opt}}")
 
             for ell in text[1:]:
                 lines.append(big_indent + ell)
@@ -464,6 +464,6 @@ def __init__(self, options: Sequence[Any] = []):
 say, "How should I know?"].)"""
 
     for w in (10, 20, 30, 40):
-        print("width: %d" % w)
+        print(f"width: {w}")
         print("\n".join(wrap_text(text, w)))
         print()
diff --git a/distutils/sysconfig.py b/distutils/sysconfig.py
index da1eecbe7e..fc0ea78721 100644
--- a/distutils/sysconfig.py
+++ b/distutils/sysconfig.py
@@ -107,7 +107,7 @@ def get_python_version():
     leaving off the patchlevel.  Sample return values could be '1.5'
     or '2.2'.
     """
-    return '%d.%d' % sys.version_info[:2]
+    return f'{sys.version_info.major}.{sys.version_info.minor}'
 
 
 def get_python_inc(plat_specific=False, prefix=None):
diff --git a/distutils/tests/test_build.py b/distutils/tests/test_build.py
index d379aca0bb..f7fe69acd1 100644
--- a/distutils/tests/test_build.py
+++ b/distutils/tests/test_build.py
@@ -40,7 +40,9 @@ def test_finalize_options(self):
         assert cmd.build_temp == wanted
 
         # build_scripts is build/scripts-x.x
-        wanted = os.path.join(cmd.build_base, 'scripts-%d.%d' % sys.version_info[:2])
+        wanted = os.path.join(
+            cmd.build_base, f'scripts-{sys.version_info.major}.{sys.version_info.minor}'
+        )
         assert cmd.build_scripts == wanted
 
         # executable is os.path.normpath(sys.executable)
diff --git a/distutils/tests/test_build_ext.py b/distutils/tests/test_build_ext.py
index 8bd3cef855..3e73d5bf3a 100644
--- a/distutils/tests/test_build_ext.py
+++ b/distutils/tests/test_build_ext.py
@@ -522,14 +522,15 @@ def _try_compile_deployment_target(self, operator, target):  # pragma: no cover
         # at least one value we test with will not exist yet.
         if target[:2] < (10, 10):
             # for 10.1 through 10.9.x -> "10n0"
-            target = '%02d%01d0' % target
+            tmpl = '{:02}{:01}0'
         else:
             # for 10.10 and beyond -> "10nn00"
             if len(target) >= 2:
-                target = '%02d%02d00' % target
+                tmpl = '{:02}{:02}00'
             else:
                 # 11 and later can have no minor version (11 instead of 11.0)
-                target = '%02d0000' % target
+                tmpl = '{:02}0000'
+        target = tmpl.format(*target)
         deptarget_ext = Extension(
             'deptarget',
             [self.tmp_path / 'deptargetmodule.c'],
diff --git a/distutils/text_file.py b/distutils/text_file.py
index fec29c73b0..89d9048d59 100644
--- a/distutils/text_file.py
+++ b/distutils/text_file.py
@@ -133,9 +133,9 @@ def gen_error(self, msg, line=None):
             line = self.current_line
         outmsg.append(self.filename + ", ")
         if isinstance(line, (list, tuple)):
-            outmsg.append("lines %d-%d: " % tuple(line))
+            outmsg.append("lines {}-{}: ".format(*line))
         else:
-            outmsg.append("line %d: " % line)
+            outmsg.append(f"line {int(line)}: ")
         outmsg.append(str(msg))
         return "".join(outmsg)
 
diff --git a/distutils/util.py b/distutils/util.py
index 8d8260bc33..fdc7ba9839 100644
--- a/distutils/util.py
+++ b/distutils/util.py
@@ -288,7 +288,7 @@ def split_quoted(s):
             elif s[end] == '"':  # slurp doubly-quoted string
                 m = _dquote_re.match(s, end)
             else:
-                raise RuntimeError("this can't happen (bad char '%c')" % s[end])
+                raise RuntimeError(f"this can't happen (bad char '{s[end]}')")
 
             if m is None:
                 raise ValueError(f"bad string (mismatched {s[end]} quotes?)")

From 2017969f03d94d325b1d1aa3f5c2bcad807bff18 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Mon, 28 Oct 2024 14:24:39 -0400
Subject: [PATCH 1337/1761] Make reinitialize_command's return type Generic
 when "command" argument is a Command

---
 distutils/cmd.py  | 17 ++++++++++++++++-
 distutils/dist.py | 20 +++++++++++++++++++-
 2 files changed, 35 insertions(+), 2 deletions(-)

diff --git a/distutils/cmd.py b/distutils/cmd.py
index 2bb97956ab..6ffe7bd4b3 100644
--- a/distutils/cmd.py
+++ b/distutils/cmd.py
@@ -4,15 +4,20 @@
 in the distutils.command package.
 """
 
+from __future__ import annotations
+
 import logging
 import os
 import re
 import sys
+from typing import TypeVar, overload
 
 from . import _modified, archive_util, dir_util, file_util, util
 from ._log import log
 from .errors import DistutilsOptionError
 
+_CommandT = TypeVar("_CommandT", bound="Command")
+
 
 class Command:
     """Abstract base class for defining command classes, the "worker bees"
@@ -305,7 +310,17 @@ def get_finalized_command(self, command, create=True):
 
     # XXX rename to 'get_reinitialized_command()'? (should do the
     # same in dist.py, if so)
-    def reinitialize_command(self, command, reinit_subcommands=False):
+    @overload
+    def reinitialize_command(
+        self, command: str, reinit_subcommands: bool = False
+    ) -> Command: ...
+    @overload
+    def reinitialize_command(
+        self, command: _CommandT, reinit_subcommands: bool = False
+    ) -> _CommandT: ...
+    def reinitialize_command(
+        self, command: str | Command, reinit_subcommands=False
+    ) -> Command:
         return self.distribution.reinitialize_command(command, reinit_subcommands)
 
     def run_command(self, command):
diff --git a/distutils/dist.py b/distutils/dist.py
index 8e1e6d0b4e..a47945984a 100644
--- a/distutils/dist.py
+++ b/distutils/dist.py
@@ -4,6 +4,8 @@
 being built/installed/distributed.
 """
 
+from __future__ import annotations
+
 import contextlib
 import logging
 import os
@@ -13,6 +15,7 @@
 import warnings
 from collections.abc import Iterable
 from email import message_from_file
+from typing import TYPE_CHECKING, TypeVar, overload
 
 from packaging.utils import canonicalize_name, canonicalize_version
 
@@ -27,6 +30,11 @@
 from .fancy_getopt import FancyGetopt, translate_longopt
 from .util import check_environ, rfc822_escape, strtobool
 
+if TYPE_CHECKING:
+    from .cmd import Command
+
+_CommandT = TypeVar("_CommandT", bound="Command")
+
 # Regex to define acceptable Distutils command names.  This is not *quite*
 # the same as a Python NAME -- I don't allow leading underscores.  The fact
 # that they're very similar is no coincidence; the default naming scheme is
@@ -900,7 +908,17 @@ def _set_command_options(self, command_obj, option_dict=None):  # noqa: C901
             except ValueError as msg:
                 raise DistutilsOptionError(msg)
 
-    def reinitialize_command(self, command, reinit_subcommands=False):
+    @overload
+    def reinitialize_command(
+        self, command: str, reinit_subcommands: bool = False
+    ) -> Command: ...
+    @overload
+    def reinitialize_command(
+        self, command: _CommandT, reinit_subcommands: bool = False
+    ) -> _CommandT: ...
+    def reinitialize_command(
+        self, command: str | Command, reinit_subcommands=False
+    ) -> Command:
         """Reinitializes a command to the state it was in when first
         returned by 'get_command_obj()': ie., initialized but not yet
         finalized.  This provides the opportunity to sneak option

From 2a01f314e1f4e0091e4bab2ddb498b4e7c789045 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Mon, 25 Nov 2024 17:29:14 -0500
Subject: [PATCH 1338/1761] Coerce Distribution.script_args to list

---
 distutils/core.py            | 5 ++++-
 distutils/dist.py            | 4 +++-
 distutils/fancy_getopt.py    | 6 ++++--
 distutils/tests/test_dist.py | 6 ++++++
 4 files changed, 17 insertions(+), 4 deletions(-)

diff --git a/distutils/core.py b/distutils/core.py
index bc06091abb..bd62546bdd 100644
--- a/distutils/core.py
+++ b/distutils/core.py
@@ -6,9 +6,12 @@
 really defined in distutils.dist and distutils.cmd.
 """
 
+from __future__ import annotations
+
 import os
 import sys
 import tokenize
+from collections.abc import Iterable
 
 from .cmd import Command
 from .debug import DEBUG
@@ -215,7 +218,7 @@ def run_commands(dist):
     return dist
 
 
-def run_setup(script_name, script_args=None, stop_after="run"):
+def run_setup(script_name, script_args: Iterable[str] | None = None, stop_after="run"):
     """Run a setup script in a somewhat controlled environment, and
     return the Distribution instance that drives things.  This is useful
     if you need to find out the distribution meta-data (passed as
diff --git a/distutils/dist.py b/distutils/dist.py
index 8e1e6d0b4e..b633a62236 100644
--- a/distutils/dist.py
+++ b/distutils/dist.py
@@ -169,7 +169,7 @@ def __init__(self, attrs=None):  # noqa: C901
         # and sys.argv[1:], but they can be overridden when the caller is
         # not necessarily a setup script run from the command-line.
         self.script_name = None
-        self.script_args = None
+        self.script_args: list[str] | None = None
 
         # 'command_options' is where we store command options between
         # parsing them (from config files, the command-line, etc.) and when
@@ -269,6 +269,8 @@ def __init__(self, attrs=None):  # noqa: C901
         self.want_user_cfg = True
 
         if self.script_args is not None:
+            # Coerce any possible iterable from attrs into a list
+            self.script_args = list(self.script_args)
             for arg in self.script_args:
                 if not arg.startswith('-'):
                     break
diff --git a/distutils/fancy_getopt.py b/distutils/fancy_getopt.py
index 4ea89603fa..c4aeaf2348 100644
--- a/distutils/fancy_getopt.py
+++ b/distutils/fancy_getopt.py
@@ -8,6 +8,8 @@
   * options set attributes of a passed-in object
 """
 
+from __future__ import annotations
+
 import getopt
 import re
 import string
@@ -219,7 +221,7 @@ def _grok_option_table(self):  # noqa: C901
                 self.short_opts.append(short)
                 self.short2long[short[0]] = long
 
-    def getopt(self, args=None, object=None):  # noqa: C901
+    def getopt(self, args: Sequence[str] | None = None, object=None):  # noqa: C901
         """Parse command-line options in args. Store as attributes on object.
 
         If 'args' is None or not supplied, uses 'sys.argv[1:]'.  If
@@ -375,7 +377,7 @@ def print_help(self, header=None, file=None):
             file.write(line + "\n")
 
 
-def fancy_getopt(options, negative_opt, object, args):
+def fancy_getopt(options, negative_opt, object, args: Sequence[str] | None):
     parser = FancyGetopt(options)
     parser.set_negative_aliases(negative_opt)
     return parser.getopt(args, object)
diff --git a/distutils/tests/test_dist.py b/distutils/tests/test_dist.py
index 4d78a19803..7f44777eac 100644
--- a/distutils/tests/test_dist.py
+++ b/distutils/tests/test_dist.py
@@ -246,6 +246,12 @@ def test_find_config_files_disable(self, temp_home):
         # make sure --no-user-cfg disables the user cfg file
         assert len(all_files) - 1 == len(files)
 
+    def test_script_args_list_coercion(self):
+        d = Distribution(attrs={'script_args': ('build', '--no-user-cfg')})
+
+        # make sure script_args is a list even if it started as a different iterable
+        assert d.script_args == ['build', '--no-user-cfg']
+
     @pytest.mark.skipif(
         'platform.system() == "Windows"',
         reason='Windows does not honor chmod 000',

From ac548562ccc1633ff69b721a1c0ef084ffb011ac Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Sun, 24 Nov 2024 15:48:15 -0500
Subject: [PATCH 1339/1761] Remove py38 compat modules

---
 conftest.py                           |  2 +-
 distutils/compat/__init__.py          |  4 +--
 distutils/compat/py38.py              | 34 ------------------
 distutils/tests/compat/py38.py        | 50 ---------------------------
 distutils/tests/compat/py39.py        | 22 ++++++++++++
 distutils/tests/test_bdist_rpm.py     |  3 +-
 distutils/tests/test_build_ext.py     |  8 ++---
 distutils/tests/test_extension.py     |  3 +-
 distutils/tests/test_filelist.py      |  2 +-
 distutils/tests/test_spawn.py         |  2 +-
 distutils/tests/test_unixccompiler.py |  2 +-
 distutils/util.py                     | 12 ++-----
 ruff.toml                             |  4 +++
 13 files changed, 37 insertions(+), 111 deletions(-)
 delete mode 100644 distutils/compat/py38.py
 delete mode 100644 distutils/tests/compat/py38.py
 create mode 100644 distutils/tests/compat/py39.py

diff --git a/conftest.py b/conftest.py
index 98f98d41ab..3b9444f78c 100644
--- a/conftest.py
+++ b/conftest.py
@@ -48,7 +48,7 @@ def _save_cwd():
 
 @pytest.fixture
 def distutils_managed_tempdir(request):
-    from distutils.tests.compat import py38 as os_helper
+    from distutils.tests.compat import py39 as os_helper
 
     self = request.instance
     self.tempdirs = []
diff --git a/distutils/compat/__init__.py b/distutils/compat/__init__.py
index e12534a32c..c715ee9cc5 100644
--- a/distutils/compat/__init__.py
+++ b/distutils/compat/__init__.py
@@ -1,7 +1,5 @@
 from __future__ import annotations
 
-from .py38 import removeprefix
-
 
 def consolidate_linker_args(args: list[str]) -> list[str] | str:
     """
@@ -12,4 +10,4 @@ def consolidate_linker_args(args: list[str]) -> list[str] | str:
 
     if not all(arg.startswith('-Wl,') for arg in args):
         return args
-    return '-Wl,' + ','.join(removeprefix(arg, '-Wl,') for arg in args)
+    return '-Wl,' + ','.join(arg.removeprefix('-Wl,') for arg in args)
diff --git a/distutils/compat/py38.py b/distutils/compat/py38.py
deleted file mode 100644
index 03ec73ef0e..0000000000
--- a/distutils/compat/py38.py
+++ /dev/null
@@ -1,34 +0,0 @@
-import sys
-
-if sys.version_info < (3, 9):
-
-    def removesuffix(self, suffix):
-        # suffix='' should not call self[:-0].
-        if suffix and self.endswith(suffix):
-            return self[: -len(suffix)]
-        else:
-            return self[:]
-
-    def removeprefix(self, prefix):
-        if self.startswith(prefix):
-            return self[len(prefix) :]
-        else:
-            return self[:]
-
-else:
-
-    def removesuffix(self, suffix):
-        return self.removesuffix(suffix)
-
-    def removeprefix(self, prefix):
-        return self.removeprefix(prefix)
-
-
-def aix_platform(osname, version, release):
-    try:
-        import _aix_support  # type: ignore
-
-        return _aix_support.aix_platform()
-    except ImportError:
-        pass
-    return f"{osname}-{version}.{release}"
diff --git a/distutils/tests/compat/py38.py b/distutils/tests/compat/py38.py
deleted file mode 100644
index 211d3a6c50..0000000000
--- a/distutils/tests/compat/py38.py
+++ /dev/null
@@ -1,50 +0,0 @@
-# flake8: noqa
-
-import contextlib
-import builtins
-import sys
-
-from test.support import requires_zlib
-import test.support
-
-
-ModuleNotFoundError = getattr(builtins, 'ModuleNotFoundError', ImportError)
-
-try:
-    from test.support.warnings_helper import check_warnings
-except (ModuleNotFoundError, ImportError):
-    from test.support import check_warnings
-
-
-try:
-    from test.support.os_helper import (
-        rmtree,
-        EnvironmentVarGuard,
-        unlink,
-        skip_unless_symlink,
-        temp_dir,
-    )
-except (ModuleNotFoundError, ImportError):
-    from test.support import (
-        rmtree,
-        EnvironmentVarGuard,
-        unlink,
-        skip_unless_symlink,
-        temp_dir,
-    )
-
-
-try:
-    from test.support.import_helper import (
-        DirsOnSysPath,
-        CleanImport,
-    )
-except (ModuleNotFoundError, ImportError):
-    from test.support import (
-        DirsOnSysPath,
-        CleanImport,
-    )
-
-
-if sys.version_info < (3, 9):
-    requires_zlib = lambda: test.support.requires_zlib
diff --git a/distutils/tests/compat/py39.py b/distutils/tests/compat/py39.py
new file mode 100644
index 0000000000..8246883695
--- /dev/null
+++ b/distutils/tests/compat/py39.py
@@ -0,0 +1,22 @@
+import sys
+
+if sys.version_info >= (3, 10):
+    from test.support.import_helper import (
+        CleanImport as CleanImport,
+        DirsOnSysPath as DirsOnSysPath,
+    )
+    from test.support.os_helper import (
+        EnvironmentVarGuard as EnvironmentVarGuard,
+        rmtree as rmtree,
+        skip_unless_symlink as skip_unless_symlink,
+        unlink as unlink,
+    )
+else:
+    from test.support import (
+        CleanImport as CleanImport,
+        DirsOnSysPath as DirsOnSysPath,
+        EnvironmentVarGuard as EnvironmentVarGuard,
+        rmtree as rmtree,
+        skip_unless_symlink as skip_unless_symlink,
+        unlink as unlink,
+    )
diff --git a/distutils/tests/test_bdist_rpm.py b/distutils/tests/test_bdist_rpm.py
index 1109fdf117..75051430e2 100644
--- a/distutils/tests/test_bdist_rpm.py
+++ b/distutils/tests/test_bdist_rpm.py
@@ -8,8 +8,7 @@
 from distutils.tests import support
 
 import pytest
-
-from .compat.py38 import requires_zlib
+from test.support import requires_zlib
 
 SETUP_PY = """\
 from distutils.core import setup
diff --git a/distutils/tests/test_build_ext.py b/distutils/tests/test_build_ext.py
index 8bd3cef855..8477c9da1d 100644
--- a/distutils/tests/test_build_ext.py
+++ b/distutils/tests/test_build_ext.py
@@ -19,11 +19,7 @@
 )
 from distutils.extension import Extension
 from distutils.tests import missing_compiler_executable
-from distutils.tests.support import (
-    TempdirManager,
-    copy_xxmodule_c,
-    fixup_build_ext,
-)
+from distutils.tests.support import TempdirManager, copy_xxmodule_c, fixup_build_ext
 from io import StringIO
 
 import jaraco.path
@@ -31,7 +27,7 @@
 import pytest
 from test import support
 
-from .compat import py38 as import_helper
+from .compat import py39 as import_helper
 
 
 @pytest.fixture()
diff --git a/distutils/tests/test_extension.py b/distutils/tests/test_extension.py
index 41872e04e8..e51c1cd8e7 100644
--- a/distutils/tests/test_extension.py
+++ b/distutils/tests/test_extension.py
@@ -6,8 +6,7 @@
 from distutils.extension import Extension, read_setup_file
 
 import pytest
-
-from .compat.py38 import check_warnings
+from test.support.warnings_helper import check_warnings
 
 
 class TestExtension:
diff --git a/distutils/tests/test_filelist.py b/distutils/tests/test_filelist.py
index ec7e5cf363..130e6fb53b 100644
--- a/distutils/tests/test_filelist.py
+++ b/distutils/tests/test_filelist.py
@@ -10,7 +10,7 @@
 import jaraco.path
 import pytest
 
-from .compat import py38 as os_helper
+from .compat import py39 as os_helper
 
 MANIFEST_IN = """\
 include ok
diff --git a/distutils/tests/test_spawn.py b/distutils/tests/test_spawn.py
index fcbc765ef2..3b9fc926f6 100644
--- a/distutils/tests/test_spawn.py
+++ b/distutils/tests/test_spawn.py
@@ -12,7 +12,7 @@
 import pytest
 from test.support import unix_shell
 
-from .compat import py38 as os_helper
+from .compat import py39 as os_helper
 
 
 class TestSpawn(support.TempdirManager):
diff --git a/distutils/tests/test_unixccompiler.py b/distutils/tests/test_unixccompiler.py
index 1695328771..2c2f4aaec2 100644
--- a/distutils/tests/test_unixccompiler.py
+++ b/distutils/tests/test_unixccompiler.py
@@ -12,7 +12,7 @@
 import pytest
 
 from . import support
-from .compat.py38 import EnvironmentVarGuard
+from .compat.py39 import EnvironmentVarGuard
 
 
 @pytest.fixture(autouse=True)
diff --git a/distutils/util.py b/distutils/util.py
index 8d8260bc33..1334e2f799 100644
--- a/distutils/util.py
+++ b/distutils/util.py
@@ -25,7 +25,7 @@
 from .spawn import spawn
 
 
-def get_host_platform():
+def get_host_platform() -> str:
     """
     Return a string that identifies the current platform. Use this
     function to distinguish platform-specific build directories and
@@ -34,15 +34,7 @@ def get_host_platform():
 
     # This function initially exposed platforms as defined in Python 3.9
     # even with older Python versions when distutils was split out.
-    # Now it delegates to stdlib sysconfig, but maintains compatibility.
-
-    if sys.version_info < (3, 9):
-        if os.name == "posix" and hasattr(os, 'uname'):
-            osname, host, release, version, machine = os.uname()
-            if osname[:3] == "aix":
-                from .compat.py38 import aix_platform
-
-                return aix_platform(osname, version, release)
+    # Now it delegates to stdlib sysconfig.
 
     return sysconfig.get_platform()
 
diff --git a/ruff.toml b/ruff.toml
index 0cc5b267d7..b09308276e 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -47,6 +47,10 @@ ignore = [
 	"TRY400",
 ]
 
+[lint.isort]
+combine-as-imports = true
+split-on-trailing-comma = false
+
 [format]
 # Enable preview to get hugged parenthesis unwrapping and other nice surprises
 # See https://github.com/jaraco/skeleton/pull/133#issuecomment-2239538373

From 4e6e8fc954fad20d0d869524594d30b12a5aba34 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 26 Dec 2024 20:52:27 -0500
Subject: [PATCH 1340/1761] Remove UP036 exclusion.

---
 ruff.toml | 1 -
 1 file changed, 1 deletion(-)

diff --git a/ruff.toml b/ruff.toml
index b09308276e..da3e3f8dab 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -21,7 +21,6 @@ extend-select = [
 ignore = [
 	# TODO: Fix these new violations in Ruff 0.8.0
 	"UP031",
-	"UP036",
 
 	# https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules
 	"W191",

From fc15d4575aec0c4adeec367c777bac3b642bdc8a Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 26 Dec 2024 20:54:55 -0500
Subject: [PATCH 1341/1761] Prefer the standard format for imports, even though
 it's unnecessarily repetitive.

---
 distutils/tests/compat/py39.py | 18 ++++++++++++++++++
 ruff.toml                      |  4 ----
 2 files changed, 18 insertions(+), 4 deletions(-)

diff --git a/distutils/tests/compat/py39.py b/distutils/tests/compat/py39.py
index 8246883695..aca3939a0c 100644
--- a/distutils/tests/compat/py39.py
+++ b/distutils/tests/compat/py39.py
@@ -3,20 +3,38 @@
 if sys.version_info >= (3, 10):
     from test.support.import_helper import (
         CleanImport as CleanImport,
+    )
+    from test.support.import_helper import (
         DirsOnSysPath as DirsOnSysPath,
     )
     from test.support.os_helper import (
         EnvironmentVarGuard as EnvironmentVarGuard,
+    )
+    from test.support.os_helper import (
         rmtree as rmtree,
+    )
+    from test.support.os_helper import (
         skip_unless_symlink as skip_unless_symlink,
+    )
+    from test.support.os_helper import (
         unlink as unlink,
     )
 else:
     from test.support import (
         CleanImport as CleanImport,
+    )
+    from test.support import (
         DirsOnSysPath as DirsOnSysPath,
+    )
+    from test.support import (
         EnvironmentVarGuard as EnvironmentVarGuard,
+    )
+    from test.support import (
         rmtree as rmtree,
+    )
+    from test.support import (
         skip_unless_symlink as skip_unless_symlink,
+    )
+    from test.support import (
         unlink as unlink,
     )
diff --git a/ruff.toml b/ruff.toml
index da3e3f8dab..0d8179b35e 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -46,10 +46,6 @@ ignore = [
 	"TRY400",
 ]
 
-[lint.isort]
-combine-as-imports = true
-split-on-trailing-comma = false
-
 [format]
 # Enable preview to get hugged parenthesis unwrapping and other nice surprises
 # See https://github.com/jaraco/skeleton/pull/133#issuecomment-2239538373

From deb1d5a9f4b8c1b8c722e4ab844863469b882387 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Fri, 25 Oct 2024 13:17:22 -0400
Subject: [PATCH 1342/1761] type `Distribution.get_command_obj` to not return
 `None` with `create=True`

---
 distutils/dist.py | 13 +++++++++++--
 1 file changed, 11 insertions(+), 2 deletions(-)

diff --git a/distutils/dist.py b/distutils/dist.py
index ef4f4e0241..e8c5236be8 100644
--- a/distutils/dist.py
+++ b/distutils/dist.py
@@ -15,7 +15,7 @@
 import warnings
 from collections.abc import Iterable
 from email import message_from_file
-from typing import TYPE_CHECKING, TypeVar, overload
+from typing import TYPE_CHECKING, Literal, TypeVar, overload
 
 from packaging.utils import canonicalize_name, canonicalize_version
 
@@ -31,6 +31,7 @@
 from .util import check_environ, rfc822_escape, strtobool
 
 if TYPE_CHECKING:
+    # type-only import because of mutual dependence between these modules
     from .cmd import Command
 
 _CommandT = TypeVar("_CommandT", bound="Command")
@@ -837,7 +838,15 @@ def get_command_class(self, command):
 
         raise DistutilsModuleError(f"invalid command '{command}'")
 
-    def get_command_obj(self, command, create=True):
+    @overload
+    def get_command_obj(
+        self, command: str, create: Literal[True] = True
+    ) -> Command: ...
+    @overload
+    def get_command_obj(
+        self, command: str, create: Literal[False]
+    ) -> Command | None: ...
+    def get_command_obj(self, command: str, create: bool = True) -> Command | None:
         """Return the command object for 'command'.  Normally this object
         is cached on a previous call to 'get_command_obj()'; if no command
         object for 'command' is in the cache, then we either create and

From 9d9887db963e8f3e8e6758e1a3d3d2238a7d1f23 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Thu, 26 Dec 2024 19:49:58 -0500
Subject: [PATCH 1343/1761] ClassVar classvar mutables and tuple from typeshed

---
 distutils/cmd.py                      | 12 ++++++++++--
 distutils/command/bdist.py            |  5 +++--
 distutils/command/build_clib.py       |  3 ++-
 distutils/command/build_scripts.py    |  3 ++-
 distutils/command/check.py            |  3 ++-
 distutils/command/command_template    |  8 ++++----
 distutils/command/install_egg_info.py |  3 ++-
 distutils/command/install_headers.py  |  4 +++-
 distutils/command/sdist.py            |  3 ++-
 distutils/tests/test_dist.py          |  3 ++-
 10 files changed, 32 insertions(+), 15 deletions(-)

diff --git a/distutils/cmd.py b/distutils/cmd.py
index 6ffe7bd4b3..9c6fa6566c 100644
--- a/distutils/cmd.py
+++ b/distutils/cmd.py
@@ -10,7 +10,8 @@
 import os
 import re
 import sys
-from typing import TypeVar, overload
+from collections.abc import Callable
+from typing import Any, ClassVar, TypeVar, overload
 
 from . import _modified, archive_util, dir_util, file_util, util
 from ._log import log
@@ -49,7 +50,14 @@ class Command:
     # 'sub_commands' is usually defined at the *end* of a class, because
     # predicates can be unbound methods, so they must already have been
     # defined.  The canonical example is the "install" command.
-    sub_commands = []
+    sub_commands: ClassVar[  # Any to work around variance issues
+        list[tuple[str, Callable[[Any], bool] | None]]
+    ] = []
+
+    user_options: ClassVar[
+        # Specifying both because list is invariant. Avoids mypy override assignment issues
+        list[tuple[str, str, str]] | list[tuple[str, str | None, str]]
+    ] = []
 
     # -- Creation/initialization methods -------------------------------
 
diff --git a/distutils/command/bdist.py b/distutils/command/bdist.py
index f334075159..1ec3c35f40 100644
--- a/distutils/command/bdist.py
+++ b/distutils/command/bdist.py
@@ -5,6 +5,7 @@
 
 import os
 import warnings
+from typing import ClassVar
 
 from ..core import Command
 from ..errors import DistutilsOptionError, DistutilsPlatformError
@@ -23,7 +24,7 @@ def show_formats():
     pretty_printer.print_help("List of available distribution formats:")
 
 
-class ListCompat(dict):
+class ListCompat(dict[str, tuple[str, str]]):
     # adapter to allow for Setuptools compatibility in format_commands
     def append(self, item):
         warnings.warn(
@@ -70,7 +71,7 @@ class bdist(Command):
     ]
 
     # The following commands do not take a format option from bdist
-    no_format_option = ('bdist_rpm',)
+    no_format_option: ClassVar[tuple[str, ...]] = ('bdist_rpm',)
 
     # This won't do in reality: will need to distinguish RPM-ish Linux,
     # Debian-ish Linux, Solaris, FreeBSD, ..., Windows, Mac OS.
diff --git a/distutils/command/build_clib.py b/distutils/command/build_clib.py
index 1305d5bb3d..3e1832768b 100644
--- a/distutils/command/build_clib.py
+++ b/distutils/command/build_clib.py
@@ -16,6 +16,7 @@
 
 import os
 from distutils._log import log
+from typing import ClassVar
 
 from ..core import Command
 from ..errors import DistutilsSetupError
@@ -31,7 +32,7 @@ def show_compilers():
 class build_clib(Command):
     description = "build C/C++ libraries used by Python extensions"
 
-    user_options = [
+    user_options: ClassVar[list[tuple[str, str, str]]] = [
         ('build-clib=', 'b', "directory to build C/C++ libraries to"),
         ('build-temp=', 't', "directory to put temporary build by-products"),
         ('debug', 'g', "compile with debugging information"),
diff --git a/distutils/command/build_scripts.py b/distutils/command/build_scripts.py
index 9e5963c243..1c6fd3caff 100644
--- a/distutils/command/build_scripts.py
+++ b/distutils/command/build_scripts.py
@@ -8,6 +8,7 @@
 from distutils import sysconfig
 from distutils._log import log
 from stat import ST_MODE
+from typing import ClassVar
 
 from .._modified import newer
 from ..core import Command
@@ -25,7 +26,7 @@
 class build_scripts(Command):
     description = "\"build\" scripts (copy and fixup #! line)"
 
-    user_options = [
+    user_options: ClassVar[list[tuple[str, str, str]]] = [
         ('build-dir=', 'd', "directory to \"build\" (copy) to"),
         ('force', 'f', "forcibly build everything (ignore file timestamps"),
         ('executable=', 'e', "specify final destination interpreter path"),
diff --git a/distutils/command/check.py b/distutils/command/check.py
index 1375028e4d..078c1ce87e 100644
--- a/distutils/command/check.py
+++ b/distutils/command/check.py
@@ -4,6 +4,7 @@
 """
 
 import contextlib
+from typing import ClassVar
 
 from ..core import Command
 from ..errors import DistutilsSetupError
@@ -41,7 +42,7 @@ class check(Command):
     """This command checks the meta-data of the package."""
 
     description = "perform some checks on the package"
-    user_options = [
+    user_options: ClassVar[list[tuple[str, str, str]]] = [
         ('metadata', 'm', 'Verify meta-data'),
         (
             'restructuredtext',
diff --git a/distutils/command/command_template b/distutils/command/command_template
index 6106819db8..a4a751ad3c 100644
--- a/distutils/command/command_template
+++ b/distutils/command/command_template
@@ -8,18 +8,18 @@ Implements the Distutils 'x' command.
 __revision__ = "$Id$"
 
 from distutils.core import Command
+from typing import ClassVar
 
 
 class x(Command):
-
     # Brief (40-50 characters) description of the command
     description = ""
 
     # List of option tuples: long name, short name (None if no short
     # name), and help string.
-    user_options = [('', '',
-                     ""),
-                   ]
+    user_options: ClassVar[list[tuple[str, str, str]]] = [
+        ('', '', ""),
+    ]
 
     def initialize_options(self):
         self. = None
diff --git a/distutils/command/install_egg_info.py b/distutils/command/install_egg_info.py
index 0baeee7bb4..230e94ab46 100644
--- a/distutils/command/install_egg_info.py
+++ b/distutils/command/install_egg_info.py
@@ -8,6 +8,7 @@
 import os
 import re
 import sys
+from typing import ClassVar
 
 from .. import dir_util
 from .._log import log
@@ -18,7 +19,7 @@ class install_egg_info(Command):
     """Install an .egg-info file for the package"""
 
     description = "Install package's PKG-INFO metadata as an .egg-info file"
-    user_options = [
+    user_options: ClassVar[list[tuple[str, str, str]]] = [
         ('install-dir=', 'd', "directory to install to"),
     ]
 
diff --git a/distutils/command/install_headers.py b/distutils/command/install_headers.py
index fbb3b242ea..586121e089 100644
--- a/distutils/command/install_headers.py
+++ b/distutils/command/install_headers.py
@@ -3,6 +3,8 @@
 Implements the Distutils 'install_headers' command, to install C/C++ header
 files to the Python include directory."""
 
+from typing import ClassVar
+
 from ..core import Command
 
 
@@ -10,7 +12,7 @@
 class install_headers(Command):
     description = "install C/C++ header files"
 
-    user_options = [
+    user_options: ClassVar[list[tuple[str, str, str]]] = [
         ('install-dir=', 'd', "directory to install header files to"),
         ('force', 'f', "force installation (overwrite existing files)"),
     ]
diff --git a/distutils/command/sdist.py b/distutils/command/sdist.py
index 003e0bf875..acb3a41650 100644
--- a/distutils/command/sdist.py
+++ b/distutils/command/sdist.py
@@ -8,6 +8,7 @@
 from distutils._log import log
 from glob import glob
 from itertools import filterfalse
+from typing import ClassVar
 
 from ..core import Command
 from ..errors import DistutilsOptionError, DistutilsTemplateError
@@ -114,7 +115,7 @@ def checking_metadata(self):
 
     sub_commands = [('check', checking_metadata)]
 
-    READMES = ('README', 'README.txt', 'README.rst')
+    READMES: ClassVar[tuple[str, ...]] = ('README', 'README.txt', 'README.rst')
 
     def initialize_options(self):
         # 'template' and 'manifest' are, respectively, the names of
diff --git a/distutils/tests/test_dist.py b/distutils/tests/test_dist.py
index 4d78a19803..cd07bcd048 100644
--- a/distutils/tests/test_dist.py
+++ b/distutils/tests/test_dist.py
@@ -13,6 +13,7 @@
 from distutils.cmd import Command
 from distutils.dist import Distribution, fix_help_options
 from distutils.tests import support
+from typing import ClassVar
 
 import jaraco.path
 import pytest
@@ -23,7 +24,7 @@
 class test_dist(Command):
     """Sample distutils extension command."""
 
-    user_options = [
+    user_options: ClassVar[list[tuple[str, str, str]]] = [
         ("sample-option=", "S", "help text"),
     ]
 

From cebba7f925b40fdab5fd47f8ec92aa158c989a3c Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 26 Dec 2024 21:17:00 -0500
Subject: [PATCH 1344/1761] =?UTF-8?q?=F0=9F=91=B9=20Feed=20the=20hobgoblin?=
 =?UTF-8?q?s=20(delint).?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 distutils/extension.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/distutils/extension.py b/distutils/extension.py
index f925987e84..e053273436 100644
--- a/distutils/extension.py
+++ b/distutils/extension.py
@@ -107,7 +107,7 @@ def __init__(
         **kw,  # To catch unknown keywords
     ):
         if not isinstance(name, str):
-            raise TypeError("'name' must be a string")  # noqa: TRY004
+            raise TypeError("'name' must be a string")
 
         # handle the string case first; since strings are iterable, disallow them
         if isinstance(sources, str):

From edfd6d2159374575cdcc16834712243ed366c64f Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 26 Dec 2024 21:18:30 -0500
Subject: [PATCH 1345/1761] Collapse startswith operations

Co-authored-by: Avasam 
---
 distutils/ccompiler.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/distutils/ccompiler.py b/distutils/ccompiler.py
index fdbb1ca795..6979d160eb 100644
--- a/distutils/ccompiler.py
+++ b/distutils/ccompiler.py
@@ -989,7 +989,7 @@ def _make_relative(base):
         # Chop off the drive
         no_drive = os.path.splitdrive(base)[1]
         # If abs, chop off leading /
-        is_abs = os.path.isabs(no_drive) or sys.platform == 'win32' and (no_drive.startswith('/') or no_drive.startswith('\\'))
+        is_abs = os.path.isabs(no_drive) or sys.platform == 'win32' and no_drive.startswith(('/', "\\"))
         return no_drive[is_abs:]
 
     def shared_object_filename(self, basename, strip_dir=False, output_dir=''):

From af7fcbb0d56ae14753db53acd8792eddb4d8f814 Mon Sep 17 00:00:00 2001
From: Sam James 
Date: Sun, 22 Dec 2024 01:44:16 +0000
Subject: [PATCH 1346/1761] Use CFLAGS if set as-is, match CXXFLAGS behavior

Since 2c937116cc0dcd9b26b6070e89a3dc5dcbedc2ae, CXXFLAGS is used
as-is if set in the envionment rather than clobbered by whatever
CPython happened to be built with.

Do the same for CFLAGS: use it as-is if set in the environment, don't
prepend CPython's saved flags.

Fixes: https://github.com/pypa/distutils/issues/299
---
 distutils/sysconfig.py            | 1 +
 distutils/tests/test_sysconfig.py | 4 ++--
 2 files changed, 3 insertions(+), 2 deletions(-)

diff --git a/distutils/sysconfig.py b/distutils/sysconfig.py
index fc0ea78721..358d1079dc 100644
--- a/distutils/sysconfig.py
+++ b/distutils/sysconfig.py
@@ -340,6 +340,7 @@ def customize_compiler(compiler):
 
         ldshared = _add_flags(ldshared, 'LD')
         ldcxxshared = _add_flags(ldcxxshared, 'LD')
+        cflags = os.environ.get('CFLAGS', cflags)
         cflags = _add_flags(cflags, 'C')
         ldshared = _add_flags(ldshared, 'C')
         cxxflags = os.environ.get('CXXFLAGS', cxxflags)
diff --git a/distutils/tests/test_sysconfig.py b/distutils/tests/test_sysconfig.py
index 49274a36ae..3191e7717b 100644
--- a/distutils/tests/test_sysconfig.py
+++ b/distutils/tests/test_sysconfig.py
@@ -130,9 +130,9 @@ def test_customize_compiler(self):
         comp = self.customize_compiler()
         assert comp.exes['archiver'] == 'env_ar --env-arflags'
         assert comp.exes['preprocessor'] == 'env_cpp --env-cppflags'
-        assert comp.exes['compiler'] == 'env_cc --sc-cflags --env-cflags --env-cppflags'
+        assert comp.exes['compiler'] == 'env_cc --env-cflags --env-cflags --env-cppflags'
         assert comp.exes['compiler_so'] == (
-            'env_cc --sc-cflags --env-cflags --env-cppflags --sc-ccshared'
+            'env_cc --env-cflags --env-cflags --env-cppflags --sc-ccshared'
         )
         assert (
             comp.exes['compiler_cxx']

From 630551a88b9e7394b2996728a0b6a50500b8e45b Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 26 Dec 2024 21:38:53 -0500
Subject: [PATCH 1347/1761] =?UTF-8?q?=F0=9F=91=B9=20Feed=20the=20hobgoblin?=
 =?UTF-8?q?s=20(delint).?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 distutils/tests/test_sysconfig.py | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)

diff --git a/distutils/tests/test_sysconfig.py b/distutils/tests/test_sysconfig.py
index 3191e7717b..867e7dcb39 100644
--- a/distutils/tests/test_sysconfig.py
+++ b/distutils/tests/test_sysconfig.py
@@ -130,7 +130,9 @@ def test_customize_compiler(self):
         comp = self.customize_compiler()
         assert comp.exes['archiver'] == 'env_ar --env-arflags'
         assert comp.exes['preprocessor'] == 'env_cpp --env-cppflags'
-        assert comp.exes['compiler'] == 'env_cc --env-cflags --env-cflags --env-cppflags'
+        assert (
+            comp.exes['compiler'] == 'env_cc --env-cflags --env-cflags --env-cppflags'
+        )
         assert comp.exes['compiler_so'] == (
             'env_cc --env-cflags --env-cflags --env-cppflags --sc-ccshared'
         )

From 2296e9fabe70cbd765f8cbf992c7020a3e54a5b8 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 27 Dec 2024 03:01:25 -0500
Subject: [PATCH 1348/1761] Add news fragment.

---
 newsfragments/4478.feature.rst | 1 +
 1 file changed, 1 insertion(+)
 create mode 100644 newsfragments/4478.feature.rst

diff --git a/newsfragments/4478.feature.rst b/newsfragments/4478.feature.rst
new file mode 100644
index 0000000000..bd53339464
--- /dev/null
+++ b/newsfragments/4478.feature.rst
@@ -0,0 +1 @@
+Synced with pypa/distutils@c97a3db2f including better support for free threaded Python on Windows (pypa/distutils#310), improved typing support, and linter accommodations.

From 0ebc351ea53236c542dc3f081546e3e0917f38b7 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 27 Dec 2024 03:20:57 -0500
Subject: [PATCH 1349/1761] Fix term reference in quickstart.

Closes #4779
---
 docs/userguide/quickstart.rst | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/docs/userguide/quickstart.rst b/docs/userguide/quickstart.rst
index d303ab9355..606654f86c 100644
--- a/docs/userguide/quickstart.rst
+++ b/docs/userguide/quickstart.rst
@@ -199,7 +199,7 @@ Package discovery
 -----------------
 For projects that follow a simple directory structure, ``setuptools`` should be
 able to automatically detect all :term:`packages ` and
-:term:`namespaces `. However, complex projects might include
+:term:`namespaces `. However, complex projects might include
 additional folders and supporting files that not necessarily should be
 distributed (or that can confuse ``setuptools`` auto discovery algorithm).
 

From bba52647d2e90368e7662d5573bb9a63a9184318 Mon Sep 17 00:00:00 2001
From: Sam James 
Date: Sat, 28 Dec 2024 16:45:50 +0000
Subject: [PATCH 1350/1761] Don't duplicate CFLAGS

Followup to af7fcbb0d56ae14753db53acd8792eddb4d8f814. I accidentally
left that in when trying two approaches.

Reported at https://github.com/pypa/distutils/pull/322#discussion_r1898349462.
---
 distutils/sysconfig.py            | 1 -
 distutils/tests/test_sysconfig.py | 6 ++----
 2 files changed, 2 insertions(+), 5 deletions(-)

diff --git a/distutils/sysconfig.py b/distutils/sysconfig.py
index 358d1079dc..ef3def83eb 100644
--- a/distutils/sysconfig.py
+++ b/distutils/sysconfig.py
@@ -341,7 +341,6 @@ def customize_compiler(compiler):
         ldshared = _add_flags(ldshared, 'LD')
         ldcxxshared = _add_flags(ldcxxshared, 'LD')
         cflags = os.environ.get('CFLAGS', cflags)
-        cflags = _add_flags(cflags, 'C')
         ldshared = _add_flags(ldshared, 'C')
         cxxflags = os.environ.get('CXXFLAGS', cxxflags)
         ldcxxshared = _add_flags(ldcxxshared, 'CXX')
diff --git a/distutils/tests/test_sysconfig.py b/distutils/tests/test_sysconfig.py
index 867e7dcb39..43d77c23fa 100644
--- a/distutils/tests/test_sysconfig.py
+++ b/distutils/tests/test_sysconfig.py
@@ -130,11 +130,9 @@ def test_customize_compiler(self):
         comp = self.customize_compiler()
         assert comp.exes['archiver'] == 'env_ar --env-arflags'
         assert comp.exes['preprocessor'] == 'env_cpp --env-cppflags'
-        assert (
-            comp.exes['compiler'] == 'env_cc --env-cflags --env-cflags --env-cppflags'
-        )
+        assert comp.exes['compiler'] == 'env_cc --env-cflags --env-cppflags'
         assert comp.exes['compiler_so'] == (
-            'env_cc --env-cflags --env-cflags --env-cppflags --sc-ccshared'
+            'env_cc --env-cflags --env-cppflags --sc-ccshared'
         )
         assert (
             comp.exes['compiler_cxx']

From b75af806383b075032531bb33d1caef485b03ece Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Mon, 30 Dec 2024 18:33:21 -0500
Subject: [PATCH 1351/1761] Fix test_build_ext.py on Python 3.9

---
 setuptools/tests/test_build_ext.py | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/setuptools/tests/test_build_ext.py b/setuptools/tests/test_build_ext.py
index d107a272e1..5ce96a66f7 100644
--- a/setuptools/tests/test_build_ext.py
+++ b/setuptools/tests/test_build_ext.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import os
 import sys
 from importlib.util import cache_from_source as _compiled_file_name

From fc9f889ccf13ecc4667857d43731d2a09cc2b498 Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Tue, 10 Sep 2024 12:25:40 +0200
Subject: [PATCH 1352/1761] Enforce ruff/pyupgrade rule UP030

UP030 Use implicit references for positional format fields
---
 ruff.toml                      | 1 -
 setuptools/command/sdist.py    | 2 +-
 setuptools/tests/test_wheel.py | 2 +-
 3 files changed, 2 insertions(+), 3 deletions(-)

diff --git a/ruff.toml b/ruff.toml
index 27757bcf4b..4830b9674e 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -35,7 +35,6 @@ ignore = [
 	"TRY003", # raise-vanilla-args, avoid multitude of exception classes
 	"TRY301", # raise-within-try, it's handy
 	"UP015", # redundant-open-modes, explicit is preferred
-	"UP030", # temporarily disabled
 	"UP031", # temporarily disabled
 	"UP032", # temporarily disabled
 	"UP038", # Using `X | Y` in `isinstance` call is slower and more verbose https://github.com/astral-sh/ruff/issues/7871
diff --git a/setuptools/command/sdist.py b/setuptools/command/sdist.py
index 64e866c96b..bf085cb56b 100644
--- a/setuptools/command/sdist.py
+++ b/setuptools/command/sdist.py
@@ -53,7 +53,7 @@ class sdist(orig.sdist):
     negative_opt: ClassVar[dict[str, str]] = {}
 
     README_EXTENSIONS = ['', '.rst', '.txt', '.md']
-    READMES = tuple('README{0}'.format(ext) for ext in README_EXTENSIONS)
+    READMES = tuple('README{}'.format(ext) for ext in README_EXTENSIONS)
 
     def run(self) -> None:
         self.run_command('egg_info')
diff --git a/setuptools/tests/test_wheel.py b/setuptools/tests/test_wheel.py
index 5724c6eabc..5c6bda0454 100644
--- a/setuptools/tests/test_wheel.py
+++ b/setuptools/tests/test_wheel.py
@@ -605,7 +605,7 @@ def test_wheel_install_pep_503():
 def test_wheel_no_dist_dir():
     project_name = 'nodistinfo'
     version = '1.0'
-    wheel_name = '{0}-{1}-py2.py3-none-any.whl'.format(project_name, version)
+    wheel_name = '{}-{}-py2.py3-none-any.whl'.format(project_name, version)
     with tempdir() as source_dir:
         wheel_path = os.path.join(source_dir, wheel_name)
         # create an empty zip file

From 050eafb6d82472c1b1f11ee9ba0a35911fcd26ef Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Tue, 10 Sep 2024 12:26:25 +0200
Subject: [PATCH 1353/1761] Enforce ruff/pyupgrade rule UP031

UP031 Use format specifiers instead of percent format
---
 pkg_resources/__init__.py                | 36 ++++++------
 ruff.toml                                |  1 -
 setuptools/__init__.py                   |  4 +-
 setuptools/_core_metadata.py             |  6 +-
 setuptools/_imp.py                       |  6 +-
 setuptools/archive_util.py               |  8 +--
 setuptools/command/alias.py              |  4 +-
 setuptools/command/bdist_egg.py          |  2 +-
 setuptools/command/build_clib.py         | 16 +++---
 setuptools/command/build_ext.py          |  2 +-
 setuptools/command/build_py.py           |  4 +-
 setuptools/command/easy_install.py       | 35 ++++++------
 setuptools/command/egg_info.py           | 13 ++---
 setuptools/command/install_egg_info.py   |  2 +-
 setuptools/command/sdist.py              |  2 +-
 setuptools/command/setopt.py             |  2 +-
 setuptools/config/expand.py              |  2 +-
 setuptools/config/setupcfg.py            | 14 ++++-
 setuptools/depends.py                    |  2 +-
 setuptools/dist.py                       | 15 +++--
 setuptools/monkey.py                     |  2 +-
 setuptools/msvc.py                       | 72 ++++++++++++------------
 setuptools/package_index.py              | 34 ++++++-----
 setuptools/tests/config/test_setupcfg.py |  8 +--
 setuptools/tests/server.py               |  6 +-
 setuptools/tests/test_build_ext.py       |  2 +-
 setuptools/tests/test_core_metadata.py   |  2 +-
 setuptools/tests/test_dist.py            |  2 +-
 setuptools/tests/test_easy_install.py    | 34 ++++++-----
 setuptools/tests/test_egg_info.py        |  5 +-
 setuptools/tests/test_install_scripts.py |  8 +--
 setuptools/tests/test_manifest.py        |  5 +-
 setuptools/tests/test_sandbox.py         |  2 +-
 setuptools/tests/test_sdist.py           |  5 +-
 setuptools/tests/test_wheel.py           | 14 ++---
 setuptools/wheel.py                      |  8 +--
 36 files changed, 191 insertions(+), 194 deletions(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index 74b0465bfa..94dde1f218 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -200,7 +200,7 @@ def get_supported_platform():
     m = macosVersionString.match(plat)
     if m is not None and sys.platform == "darwin":
         try:
-            plat = 'macosx-%s-%s' % ('.'.join(_macos_vers()[:2]), m.group(3))
+            plat = 'macosx-{}-{}'.format('.'.join(_macos_vers()[:2]), m.group(3))
         except ValueError:
             # not macOS
             pass
@@ -492,7 +492,7 @@ def compatible_platforms(provided: str | None, required: str | None) -> bool:
             provDarwin = darwinVersionString.match(provided)
             if provDarwin:
                 dversion = int(provDarwin.group(1))
-                macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))
+                macosversion = "{}.{}".format(reqMac.group(1), reqMac.group(2))
                 if (
                     dversion == 7
                     and macosversion >= "10.3"
@@ -1316,7 +1316,7 @@ def __iadd__(self, other: Distribution | Environment) -> Self:
                 for dist in other[project]:
                     self.add(dist)
         else:
-            raise TypeError("Can't add %r to environment" % (other,))
+            raise TypeError("Can't add {!r} to environment".format(other))
         return self
 
     def __add__(self, other: Distribution | Environment) -> Self:
@@ -2018,7 +2018,7 @@ def _zipinfo_name(self, fspath):
             return ''
         if fspath.startswith(self.zip_pre):
             return fspath[len(self.zip_pre) :]
-        raise AssertionError("%s is not a subpath of %s" % (fspath, self.zip_pre))
+        raise AssertionError("{} is not a subpath of {}".format(fspath, self.zip_pre))
 
     def _parts(self, zip_path):
         # Convert a zipfile subpath into an egg-relative path part list.
@@ -2026,7 +2026,7 @@ def _parts(self, zip_path):
         fspath = self.zip_pre + zip_path
         if fspath.startswith(self.egg_root + os.sep):
             return fspath[len(self.egg_root) + 1 :].split(os.sep)
-        raise AssertionError("%s is not a subpath of %s" % (fspath, self.egg_root))
+        raise AssertionError("{} is not a subpath of {}".format(fspath, self.egg_root))
 
     @property
     def zipinfo(self):
@@ -2729,15 +2729,15 @@ def __init__(
         self.dist = dist
 
     def __str__(self) -> str:
-        s = "%s = %s" % (self.name, self.module_name)
+        s = "{} = {}".format(self.name, self.module_name)
         if self.attrs:
             s += ':' + '.'.join(self.attrs)
         if self.extras:
-            s += ' [%s]' % ','.join(self.extras)
+            s += ' [{}]'.format(','.join(self.extras))
         return s
 
     def __repr__(self) -> str:
-        return "EntryPoint.parse(%r)" % str(self)
+        return "EntryPoint.parse({!r})".format(str(self))
 
     @overload
     def load(
@@ -3108,7 +3108,7 @@ def requires(self, extras: Iterable[str] = ()) -> list[Requirement]:
                 deps.extend(dm[safe_extra(ext)])
             except KeyError as e:
                 raise UnknownExtra(
-                    "%s has no such extra feature %r" % (self, ext)
+                    "{} has no such extra feature {!r}".format(self, ext)
                 ) from e
         return deps
 
@@ -3150,7 +3150,7 @@ def activate(self, path: list[str] | None = None, replace: bool = False) -> None
 
     def egg_name(self):
         """Return what this distribution's standard .egg filename should be"""
-        filename = "%s-%s-py%s" % (
+        filename = "{}-{}-py{}".format(
             to_filename(self.project_name),
             to_filename(self.version),
             self.py_version or PY_MAJOR,
@@ -3162,7 +3162,7 @@ def egg_name(self):
 
     def __repr__(self) -> str:
         if self.location:
-            return "%s (%s)" % (self, self.location)
+            return "{} ({})".format(self, self.location)
         else:
             return str(self)
 
@@ -3172,7 +3172,7 @@ def __str__(self) -> str:
         except ValueError:
             version = None
         version = version or "[unknown version]"
-        return "%s %s" % (self.project_name, version)
+        return "{} {}".format(self.project_name, version)
 
     def __getattr__(self, attr: str):
         """Delegate all unrecognized public attributes to .metadata provider"""
@@ -3200,9 +3200,9 @@ def from_filename(
     def as_requirement(self):
         """Return a ``Requirement`` that matches this distribution exactly"""
         if isinstance(self.parsed_version, packaging.version.Version):
-            spec = "%s==%s" % (self.project_name, self.parsed_version)
+            spec = "{}=={}".format(self.project_name, self.parsed_version)
         else:
-            spec = "%s===%s" % (self.project_name, self.parsed_version)
+            spec = "{}==={}".format(self.project_name, self.parsed_version)
 
         return Requirement.parse(spec)
 
@@ -3210,7 +3210,7 @@ def load_entry_point(self, group: str, name: str) -> _ResolvedEntryPoint:
         """Return the `name` entry point of `group` or raise ImportError"""
         ep = self.get_entry_info(group, name)
         if ep is None:
-            raise ImportError("Entry point %r not found" % ((group, name),))
+            raise ImportError("Entry point {!r} not found".format((group, name)))
         return ep.load()
 
     @overload
@@ -3327,8 +3327,8 @@ def check_version_conflict(self):
             ):
                 continue
             issue_warning(
-                "Module %s was already imported from %s, but %s is being added"
-                " to sys.path" % (modname, fn, self.location),
+                "Module {} was already imported from {}, but {} is being added"
+                " to sys.path".format(modname, fn, self.location),
             )
 
     def has_version(self) -> bool:
@@ -3512,7 +3512,7 @@ def __hash__(self) -> int:
         return self.__hash
 
     def __repr__(self) -> str:
-        return "Requirement.parse(%r)" % str(self)
+        return "Requirement.parse({!r})".format(str(self))
 
     @staticmethod
     def parse(s: str | Iterable[str]) -> Requirement:
diff --git a/ruff.toml b/ruff.toml
index 4830b9674e..e7005e27d9 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -35,7 +35,6 @@ ignore = [
 	"TRY003", # raise-vanilla-args, avoid multitude of exception classes
 	"TRY301", # raise-within-try, it's handy
 	"UP015", # redundant-open-modes, explicit is preferred
-	"UP031", # temporarily disabled
 	"UP032", # temporarily disabled
 	"UP038", # Using `X | Y` in `isinstance` call is slower and more verbose https://github.com/astral-sh/ruff/issues/7871
 	# Only enforcing return type annotations for public functions
diff --git a/setuptools/__init__.py b/setuptools/__init__.py
index 4f5c01708a..ab3be9581c 100644
--- a/setuptools/__init__.py
+++ b/setuptools/__init__.py
@@ -182,7 +182,7 @@ def _ensure_stringlike(self, option, what, default=None):
             return default
         elif not isinstance(val, str):
             raise DistutilsOptionError(
-                "'%s' must be a %s (got `%s`)" % (option, what, val)
+                "'{}' must be a {} (got `{}`)".format(option, what, val)
             )
         return val
 
@@ -210,7 +210,7 @@ def ensure_string_list(self, option: str) -> None:
                 ok = False
             if not ok:
                 raise DistutilsOptionError(
-                    "'%s' must be a list of strings (got %r)" % (option, val)
+                    "'{}' must be a list of strings (got {!r})".format(option, val)
                 )
 
     @overload
diff --git a/setuptools/_core_metadata.py b/setuptools/_core_metadata.py
index 2e9c48a77b..72b745dc93 100644
--- a/setuptools/_core_metadata.py
+++ b/setuptools/_core_metadata.py
@@ -150,7 +150,7 @@ def write_pkg_file(self, file):  # noqa: C901  # is too complex (14)  # FIXME
     version = self.get_metadata_version()
 
     def write_field(key, value):
-        file.write("%s: %s\n" % (key, value))
+        file.write("{}: {}\n".format(key, value))
 
     write_field('Metadata-Version', str(version))
     write_field('Name', self.get_name())
@@ -179,7 +179,7 @@ def write_field(key, value):
         write_field('License', rfc822_escape(license))
 
     for project_url in self.project_urls.items():
-        write_field('Project-URL', '%s, %s' % project_url)
+        write_field('Project-URL', '{}, {}'.format(*project_url))
 
     keywords = ','.join(self.get_keywords())
     if keywords:
@@ -209,7 +209,7 @@ def write_field(key, value):
 
     long_description = self.get_long_description()
     if long_description:
-        file.write("\n%s" % long_description)
+        file.write("\n{}".format(long_description))
         if not long_description.endswith("\n"):
             file.write("\n")
 
diff --git a/setuptools/_imp.py b/setuptools/_imp.py
index bddbf6a683..f0fba01e50 100644
--- a/setuptools/_imp.py
+++ b/setuptools/_imp.py
@@ -29,7 +29,7 @@ def find_module(module, paths=None):
     """Just like 'imp.find_module()', but with package support"""
     spec = find_spec(module, paths)
     if spec is None:
-        raise ImportError("Can't find %s" % module)
+        raise ImportError("Can't find {}".format(module))
     if not spec.has_location and hasattr(spec, 'submodule_search_locations'):
         spec = importlib.util.spec_from_loader('__init__.py', spec.loader)
 
@@ -76,12 +76,12 @@ def find_module(module, paths=None):
 def get_frozen_object(module, paths=None):
     spec = find_spec(module, paths)
     if not spec:
-        raise ImportError("Can't find %s" % module)
+        raise ImportError("Can't find {}".format(module))
     return spec.loader.get_code(module)
 
 
 def get_module(module, paths, info):
     spec = find_spec(module, paths)
     if not spec:
-        raise ImportError("Can't find %s" % module)
+        raise ImportError("Can't find {}".format(module))
     return module_from_spec(spec)
diff --git a/setuptools/archive_util.py b/setuptools/archive_util.py
index cd9cf9c08f..7ef5294b55 100644
--- a/setuptools/archive_util.py
+++ b/setuptools/archive_util.py
@@ -62,7 +62,7 @@ def unpack_archive(
         else:
             return
     else:
-        raise UnrecognizedFormat("Not a recognized archive type: %s" % filename)
+        raise UnrecognizedFormat("Not a recognized archive type: {}".format(filename))
 
 
 def unpack_directory(filename, extract_dir, progress_filter=default_filter) -> None:
@@ -71,7 +71,7 @@ def unpack_directory(filename, extract_dir, progress_filter=default_filter) -> N
     Raises ``UnrecognizedFormat`` if `filename` is not a directory
     """
     if not os.path.isdir(filename):
-        raise UnrecognizedFormat("%s is not a directory" % filename)
+        raise UnrecognizedFormat("{} is not a directory".format(filename))
 
     paths = {
         filename: ('', extract_dir),
@@ -101,7 +101,7 @@ def unpack_zipfile(filename, extract_dir, progress_filter=default_filter) -> Non
     """
 
     if not zipfile.is_zipfile(filename):
-        raise UnrecognizedFormat("%s is not a zip file" % (filename,))
+        raise UnrecognizedFormat("{} is not a zip file".format(filename))
 
     with zipfile.ZipFile(filename) as z:
         _unpack_zipfile_obj(z, extract_dir, progress_filter)
@@ -198,7 +198,7 @@ def unpack_tarfile(filename, extract_dir, progress_filter=default_filter) -> boo
         tarobj = tarfile.open(filename)
     except tarfile.TarError as e:
         raise UnrecognizedFormat(
-            "%s is not a compressed or uncompressed tar file" % (filename,)
+            "{} is not a compressed or uncompressed tar file".format(filename)
         ) from e
 
     for member, final_dst in _iter_open_tar(
diff --git a/setuptools/command/alias.py b/setuptools/command/alias.py
index 388830d7a6..9903a23503 100644
--- a/setuptools/command/alias.py
+++ b/setuptools/command/alias.py
@@ -55,7 +55,7 @@ def run(self) -> None:
                 print("setup.py alias", format_alias(alias, aliases))
                 return
             else:
-                print("No alias definition found for %r" % alias)
+                print("No alias definition found for {!r}".format(alias))
                 return
         else:
             alias = self.args[0]
@@ -73,5 +73,5 @@ def format_alias(name, aliases):
     elif source == config_file('local'):
         source = ''
     else:
-        source = '--filename=%r' % source
+        source = '--filename={!r}'.format(source)
     return source + name + ' ' + command
diff --git a/setuptools/command/bdist_egg.py b/setuptools/command/bdist_egg.py
index ac3e6ef1f9..d991049b5b 100644
--- a/setuptools/command/bdist_egg.py
+++ b/setuptools/command/bdist_egg.py
@@ -263,7 +263,7 @@ def zap_pyfiles(self):
                     pattern = r'(?P.+)\.(?P[^.]+)\.pyc'
                     m = re.match(pattern, name)
                     path_new = os.path.join(base, os.pardir, m.group('name') + '.pyc')
-                    log.info("Renaming file from [%s] to [%s]" % (path_old, path_new))
+                    log.info("Renaming file from [{}] to [{}]".format(path_old, path_new))
                     try:
                         os.remove(path_new)
                     except OSError:
diff --git a/setuptools/command/build_clib.py b/setuptools/command/build_clib.py
index bee3d58c03..f2705cecdc 100644
--- a/setuptools/command/build_clib.py
+++ b/setuptools/command/build_clib.py
@@ -29,9 +29,9 @@ def build_libraries(self, libraries) -> None:
             sources = build_info.get('sources')
             if sources is None or not isinstance(sources, (list, tuple)):
                 raise DistutilsSetupError(
-                    "in 'libraries' option (library '%s'), "
+                    "in 'libraries' option (library '{}'), "
                     "'sources' must be present and must be "
-                    "a list of source filenames" % lib_name
+                    "a list of source filenames".format(lib_name)
                 )
             sources = sorted(list(sources))
 
@@ -43,9 +43,9 @@ def build_libraries(self, libraries) -> None:
             obj_deps = build_info.get('obj_deps', dict())
             if not isinstance(obj_deps, dict):
                 raise DistutilsSetupError(
-                    "in 'libraries' option (library '%s'), "
+                    "in 'libraries' option (library '{}'), "
                     "'obj_deps' must be a dictionary of "
-                    "type 'source: list'" % lib_name
+                    "type 'source: list'".format(lib_name)
                 )
             dependencies = []
 
@@ -54,9 +54,9 @@ def build_libraries(self, libraries) -> None:
             global_deps = obj_deps.get('', list())
             if not isinstance(global_deps, (list, tuple)):
                 raise DistutilsSetupError(
-                    "in 'libraries' option (library '%s'), "
+                    "in 'libraries' option (library '{}'), "
                     "'obj_deps' must be a dictionary of "
-                    "type 'source: list'" % lib_name
+                    "type 'source: list'".format(lib_name)
                 )
 
             # Build the list to be used by newer_pairwise_group
@@ -67,9 +67,9 @@ def build_libraries(self, libraries) -> None:
                 extra_deps = obj_deps.get(source, list())
                 if not isinstance(extra_deps, (list, tuple)):
                     raise DistutilsSetupError(
-                        "in 'libraries' option (library '%s'), "
+                        "in 'libraries' option (library '{}'), "
                         "'obj_deps' must be a dictionary of "
-                        "type 'source: list'" % lib_name
+                        "type 'source: list'".format(lib_name)
                     )
                 src_deps.extend(extra_deps)
                 dependencies.append(src_deps)
diff --git a/setuptools/command/build_ext.py b/setuptools/command/build_ext.py
index e5c6b76b38..b8a395bafa 100644
--- a/setuptools/command/build_ext.py
+++ b/setuptools/command/build_ext.py
@@ -360,7 +360,7 @@ def _write_stub_file(self, stub_file: str, ext: Extension, compile=False):
                     "   global __bootstrap__, __file__, __loader__",
                     "   import sys, os, pkg_resources, importlib.util" + if_dl(", dl"),
                     "   __file__ = pkg_resources.resource_filename"
-                    "(__name__,%r)" % os.path.basename(ext._file_name),
+                    "(__name__,{!r})".format(os.path.basename(ext._file_name)),
                     "   del __bootstrap__",
                     "   if '__loader__' in globals():",
                     "       del __loader__",
diff --git a/setuptools/command/build_py.py b/setuptools/command/build_py.py
index e7d60c6440..f17cd66fe9 100644
--- a/setuptools/command/build_py.py
+++ b/setuptools/command/build_py.py
@@ -259,10 +259,10 @@ def check_package(self, package, package_dir):
             contents = f.read()
         if b'declare_namespace' not in contents:
             raise distutils.errors.DistutilsError(
-                "Namespace package problem: %s is a namespace package, but "
+                "Namespace package problem: {} is a namespace package, but "
                 "its\n__init__.py does not call declare_namespace()! Please "
                 'fix it.\n(See the setuptools manual under '
-                '"Namespace Packages" for details.)\n"' % (package,)
+                '"Namespace Packages" for details.)\n"'.format(package)
             )
         return init_py
 
diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py
index 66fe68f7a9..88d39c043a 100644
--- a/setuptools/command/easy_install.py
+++ b/setuptools/command/easy_install.py
@@ -148,7 +148,7 @@ class easy_install(Command):
             None,
             "Don't load find-links defined in packages being installed",
         ),
-        ('user', None, "install in user site-package '%s'" % site.USER_SITE),
+        ('user', None, "install in user site-package '{}'".format(site.USER_SITE)),
     ]
     boolean_options = [
         'zip-ok',
@@ -446,7 +446,7 @@ def run(self, show_deprecation: bool = True) -> None:
                 self.execute(
                     file_util.write_file,
                     (self.record, outputs),
-                    "writing list of installed files to '%s'" % self.record,
+                    "writing list of installed files to '{}'".format(self.record),
                 )
             self.warn_deprecated_options()
         finally:
@@ -461,7 +461,7 @@ def pseudo_tempname(self):
             pid = os.getpid()
         except Exception:
             pid = random.randint(0, sys.maxsize)
-        return os.path.join(self.install_dir, "test-easy-install-%s" % pid)
+        return os.path.join(self.install_dir, "test-easy-install-{}".format(pid))
 
     def warn_deprecated_options(self) -> None:
         pass
@@ -649,8 +649,8 @@ def add_output(self, path) -> None:
     def not_editable(self, spec) -> None:
         if self.editable:
             raise DistutilsArgError(
-                "Invalid argument %r: you can't use filenames or URLs "
-                "with --editable (except via the --find-links option)." % (spec,)
+                "Invalid argument {!r}: you can't use filenames or URLs "
+                "with --editable (except via the --find-links option).".format(spec)
             )
 
     def check_editable(self, spec) -> None:
@@ -659,8 +659,7 @@ def check_editable(self, spec) -> None:
 
         if os.path.exists(os.path.join(self.build_directory, spec.key)):
             raise DistutilsArgError(
-                "%r already exists in %s; can't do a checkout there"
-                % (spec.key, self.build_directory)
+                "{!r} already exists in {}; can't do a checkout there".format(spec.key, self.build_directory)
             )
 
     @contextlib.contextmanager
@@ -698,7 +697,7 @@ def easy_install(self, spec, deps: bool = False) -> Distribution | None:
                 self.local_index,
             )
             if dist is None:
-                msg = "Could not find suitable distribution for %r" % spec
+                msg = "Could not find suitable distribution for {!r}".format(spec)
                 if self.always_copy:
                     msg += " (--always-copy skips system and development eggs)"
                 raise DistutilsError(msg)
@@ -917,12 +916,11 @@ def install_eggs(self, spec, dist_filename, tmpdir) -> list[Distribution]:
             setups = glob(os.path.join(setup_base, '*', 'setup.py'))
             if not setups:
                 raise DistutilsError(
-                    "Couldn't find a setup script in %s"
-                    % os.path.abspath(dist_filename)
+                    "Couldn't find a setup script in {}".format(os.path.abspath(dist_filename))
                 )
             if len(setups) > 1:
                 raise DistutilsError(
-                    "Multiple setup scripts in %s" % os.path.abspath(dist_filename)
+                    "Multiple setup scripts in {}".format(os.path.abspath(dist_filename))
                 )
             setup_script = setups[0]
 
@@ -1000,7 +998,7 @@ def install_exe(self, dist_filename, tmpdir):
         cfg = extract_wininst_cfg(dist_filename)
         if cfg is None:
             raise DistutilsError(
-                "%s is not a valid distutils Windows .exe" % dist_filename
+                "{} is not a valid distutils Windows .exe".format(dist_filename)
             )
         # Create a dummy distribution object until we build the real distro
         dist = Distribution(
@@ -1026,7 +1024,7 @@ def install_exe(self, dist_filename, tmpdir):
                 f.write('Metadata-Version: 1.0\n')
                 for k, v in cfg.items('metadata'):
                     if k != 'target_version':
-                        f.write('%s: %s\n' % (k.replace('_', '-').title(), v))
+                        f.write('{}: {}\n'.format(k.replace('_', '-').title(), v))
         script_dir = os.path.join(_egg_info, 'scripts')
         # delete entry-point scripts to avoid duping
         self.delete_blockers([
@@ -1114,8 +1112,7 @@ def install_wheel(self, wheel_path, tmpdir):
             self.execute(
                 wheel.install_as_egg,
                 (destination,),
-                ("Installing %s to %s")
-                % (os.path.basename(wheel_path), os.path.dirname(destination)),
+                ("Installing {} to {}").format(os.path.basename(wheel_path), os.path.dirname(destination)),
             )
         finally:
             update_dist_caches(destination, fix_zipimporter_caches=False)
@@ -1191,7 +1188,7 @@ def run_setup(self, setup_script, setup_base, args) -> None:
         try:
             run_setup(setup_script, args)
         except SystemExit as v:
-            raise DistutilsError("Setup script exited with %s" % (v.args[0],)) from v
+            raise DistutilsError("Setup script exited with {}".format(v.args[0])) from v
 
     def build_and_install(self, setup_script, setup_base):
         args = ['bdist_egg', '--dist-dir']
@@ -1374,7 +1371,7 @@ def create_home_path(self) -> None:
         home = convert_path(os.path.expanduser("~"))
         for path in only_strs(self.config_vars.values()):
             if path.startswith(home) and not os.path.isdir(path):
-                self.debug_print("os.makedirs('%s', 0o700)" % path)
+                self.debug_print("os.makedirs('{}', 0o700)".format(path))
                 os.makedirs(path, 0o700)
 
     INSTALL_SCHEMES = dict(
@@ -1599,7 +1596,7 @@ def get_exe_prefixes(exe_filename):
                 for pth in yield_lines(contents):
                     pth = pth.strip().replace('\\', '/')
                     if not pth.startswith('import'):
-                        prefixes.append((('%s/%s/' % (parts[0], pth)), ''))
+                        prefixes.append((('{}/{}/'.format(parts[0], pth)), ''))
     finally:
         z.close()
     prefixes = [(x.lower(), y) for x, y in prefixes]
@@ -2305,7 +2302,7 @@ def get_win_launcher(type):
 
     Returns the executable as a byte string.
     """
-    launcher_fn = '%s.exe' % type
+    launcher_fn = '{}.exe'.format(type)
     if is_64bit():
         if get_platform() == "win-arm64":
             launcher_fn = launcher_fn.replace(".", "-arm64.")
diff --git a/setuptools/command/egg_info.py b/setuptools/command/egg_info.py
index a300356d33..9a9ea63322 100644
--- a/setuptools/command/egg_info.py
+++ b/setuptools/command/egg_info.py
@@ -48,7 +48,7 @@ def translate_pattern(glob):  # noqa: C901  # is too complex (14)  # FIXME
     chunks = glob.split(os.path.sep)
 
     sep = re.escape(os.sep)
-    valid_char = '[^%s]' % (sep,)
+    valid_char = '[^{}]'.format(sep)
 
     for c, chunk in enumerate(chunks):
         last_chunk = c == len(chunks) - 1
@@ -60,7 +60,7 @@ def translate_pattern(glob):  # noqa: C901  # is too complex (14)  # FIXME
                 pat += '.*'
             else:
                 # Match '(name/)*'
-                pat += '(?:%s+%s)*' % (valid_char, sep)
+                pat += '(?:{}+{})*'.format(valid_char, sep)
             continue  # Break here as the whole path component has been handled
 
         # Find any special characters in the remainder
@@ -102,7 +102,7 @@ def translate_pattern(glob):  # noqa: C901  # is too complex (14)  # FIXME
                         inner = inner[1:]
 
                     char_class += re.escape(inner)
-                    pat += '[%s]' % (char_class,)
+                    pat += '[{}]'.format(char_class)
 
                     # Skip to the end ]
                     i = inner_i
@@ -231,8 +231,7 @@ def finalize_options(self) -> None:
             packaging.requirements.Requirement(spec % (self.egg_name, self.egg_version))
         except ValueError as e:
             raise distutils.errors.DistutilsOptionError(
-                "Invalid distribution name or version syntax: %s-%s"
-                % (self.egg_name, self.egg_version)
+                "Invalid distribution name or version syntax: {}-{}".format(self.egg_name, self.egg_version)
             ) from e
 
         if self.egg_base is None:
@@ -502,7 +501,7 @@ def _safe_path(self, path):
         # To avoid accidental trans-codings errors, first to unicode
         u_path = unicode_utils.filesys_decode(path)
         if u_path is None:
-            log.warn("'%s' in unexpected encoding -- skipping" % path)
+            log.warn("'{}' in unexpected encoding -- skipping".format(path))
             return False
 
         # Must ensure utf-8 encodability
@@ -564,7 +563,7 @@ def write_manifest(self) -> None:
 
         # Now _repairs should encodability, but not unicode
         files = [self._manifest_normalize(f) for f in self.filelist.files]
-        msg = "writing manifest file '%s'" % self.manifest
+        msg = "writing manifest file '{}'".format(self.manifest)
         self.execute(write_file, (self.manifest, files), msg)
 
     def warn(self, msg) -> None:
diff --git a/setuptools/command/install_egg_info.py b/setuptools/command/install_egg_info.py
index a6e6ec6446..991531caa0 100644
--- a/setuptools/command/install_egg_info.py
+++ b/setuptools/command/install_egg_info.py
@@ -36,7 +36,7 @@ def run(self) -> None:
             self.execute(os.unlink, (self.target,), "Removing " + self.target)
         if not self.dry_run:
             ensure_directory(self.target)
-        self.execute(self.copytree, (), "Copying %s to %s" % (self.source, self.target))
+        self.execute(self.copytree, (), "Copying {} to {}".format(self.source, self.target))
         self.install_namespaces()
 
     def get_outputs(self):
diff --git a/setuptools/command/sdist.py b/setuptools/command/sdist.py
index bf085cb56b..03a70ce9c9 100644
--- a/setuptools/command/sdist.py
+++ b/setuptools/command/sdist.py
@@ -207,7 +207,7 @@ def read_manifest(self):
             try:
                 line = bytes_line.decode('UTF-8')
             except UnicodeDecodeError:
-                log.warn("%r not UTF-8 decodable -- skipping" % line)
+                log.warn("{!r} not UTF-8 decodable -- skipping".format(line))
                 continue
             # ignore comments and blank lines
             line = line.strip()
diff --git a/setuptools/command/setopt.py b/setuptools/command/setopt.py
index 200cdff0f7..ee220f9454 100644
--- a/setuptools/command/setopt.py
+++ b/setuptools/command/setopt.py
@@ -23,7 +23,7 @@ def config_file(kind="local"):
         return os.path.join(os.path.dirname(distutils.__file__), 'distutils.cfg')
     if kind == 'user':
         dot = os.name == 'posix' and '.' or ''
-        return os.path.expanduser(convert_path("~/%spydistutils.cfg" % dot))
+        return os.path.expanduser(convert_path("~/{}pydistutils.cfg".format(dot)))
     raise ValueError("config_file() type must be 'local', 'global', or 'user'", kind)
 
 
diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py
index 54c68bed4f..8469e979a1 100644
--- a/setuptools/config/expand.py
+++ b/setuptools/config/expand.py
@@ -329,7 +329,7 @@ def version(value: Callable | Iterable[str | int] | str) -> str:
         return _value
     if hasattr(_value, '__iter__'):
         return '.'.join(map(str, _value))
-    return '%s' % _value
+    return '{}'.format(_value)
 
 
 def canonic_package_data(package_data: dict) -> dict:
diff --git a/setuptools/config/setupcfg.py b/setuptools/config/setupcfg.py
index b35d0b00cd..0f9167be77 100644
--- a/setuptools/config/setupcfg.py
+++ b/setuptools/config/setupcfg.py
@@ -272,7 +272,7 @@ def _section_options(
     def parsers(self):
         """Metadata item name to parser function mapping."""
         raise NotImplementedError(
-            '%s must provide .parsers property' % self.__class__.__name__
+            '{} must provide .parsers property'.format(self.__class__.__name__)
         )
 
     def __setitem__(self, option_name, value) -> None:
@@ -483,12 +483,24 @@ def parse(self) -> None:
         for section_name, section_options in self.sections.items():
             method_postfix = ''
             if section_name:  # [section.option] variant
+<<<<<<< HEAD
                 method_postfix = f"_{section_name}"
+||||||| parent of e98b9ffc4 (Enforce ruff/pyupgrade rule UP031)
+                method_postfix = '_%s' % section_name
+=======
+                method_postfix = '_{}'.format(section_name)
+>>>>>>> e98b9ffc4 (Enforce ruff/pyupgrade rule UP031)
 
             section_parser_method: Callable | None = getattr(
                 self,
                 # Dots in section names are translated into dunderscores.
+<<<<<<< HEAD
                 f'parse_section{method_postfix}'.replace('.', '__'),
+||||||| parent of e98b9ffc4 (Enforce ruff/pyupgrade rule UP031)
+                ('parse_section%s' % method_postfix).replace('.', '__'),
+=======
+                ('parse_section{}'.format(method_postfix)).replace('.', '__'),
+>>>>>>> e98b9ffc4 (Enforce ruff/pyupgrade rule UP031)
                 None,
             )
 
diff --git a/setuptools/depends.py b/setuptools/depends.py
index 1be71857a5..09eff3855f 100644
--- a/setuptools/depends.py
+++ b/setuptools/depends.py
@@ -43,7 +43,7 @@ def __init__(
     def full_name(self):
         """Return full package/distribution name, w/version"""
         if self.requested_version is not None:
-            return '%s-%s' % (self.name, self.requested_version)
+            return '{}-{}'.format(self.name, self.requested_version)
         return self.name
 
     def version_ok(self, version):
diff --git a/setuptools/dist.py b/setuptools/dist.py
index 5b3175fb5b..182a03b36a 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -85,7 +85,7 @@ def check_importable(dist, attr, value):
         assert not ep.extras
     except (TypeError, ValueError, AttributeError, AssertionError) as e:
         raise DistutilsSetupError(
-            "%r must be importable 'module:attrs' string (got %r)" % (attr, value)
+            "{!r} must be importable 'module:attrs' string (got {!r})".format(attr, value)
         ) from e
 
 
@@ -111,7 +111,7 @@ def check_nsp(dist, attr, value):
         if not dist.has_contents_for(nsp):
             raise DistutilsSetupError(
                 "Distribution contains no modules or packages for "
-                + "namespace package %r" % nsp
+                + "namespace package {!r}".format(nsp)
             )
         parent, _sep, _child = nsp.rpartition('.')
         if parent and parent not in ns_packages:
@@ -588,10 +588,10 @@ def _set_command_options(self, command_obj, option_dict=None):  # noqa: C901
             option_dict = self.get_option_dict(command_name)
 
         if DEBUG:
-            self.announce("  setting options for '%s' command:" % command_name)
+            self.announce("  setting options for '{}' command:".format(command_name))
         for option, (source, value) in option_dict.items():
             if DEBUG:
-                self.announce("    %s = %s (from %s)" % (option, value, source))
+                self.announce("    {} = {} (from {})".format(option, value, source))
             try:
                 bool_opts = [translate_longopt(o) for o in command_obj.boolean_options]
             except AttributeError:
@@ -611,8 +611,7 @@ def _set_command_options(self, command_obj, option_dict=None):  # noqa: C901
                     setattr(command_obj, option, value)
                 else:
                     raise DistutilsOptionError(
-                        "error in %s: command '%s' has no such option '%s'"
-                        % (source, command_name, option)
+                        "error in {}: command '{}' has no such option '{}'".format(source, command_name, option)
                     )
             except ValueError as e:
                 raise DistutilsOptionError(e) from e
@@ -818,7 +817,7 @@ def _exclude_misc(self, name: str, value: _Sequence) -> None:
         try:
             old = getattr(self, name)
         except AttributeError as e:
-            raise DistutilsSetupError("%s: No such distribution setting" % name) from e
+            raise DistutilsSetupError(f"{name}: No such distribution setting") from e
         if old is not None and not isinstance(old, _sequence):
             raise DistutilsSetupError(
                 name + ": this setting cannot be changed via include/exclude"
@@ -836,7 +835,7 @@ def _include_misc(self, name: str, value: _Sequence) -> None:
         try:
             old = getattr(self, name)
         except AttributeError as e:
-            raise DistutilsSetupError("%s: No such distribution setting" % name) from e
+            raise DistutilsSetupError("{}: No such distribution setting".format(name)) from e
         if old is None:
             setattr(self, name, value)
         elif not isinstance(old, _sequence):
diff --git a/setuptools/monkey.py b/setuptools/monkey.py
index d8e30dbb80..ec5d8cdae1 100644
--- a/setuptools/monkey.py
+++ b/setuptools/monkey.py
@@ -64,7 +64,7 @@ def get_unpatched_class(cls: type[_T]) -> type[_T]:
     )
     base = next(external_bases)
     if not base.__module__.startswith('distutils'):
-        msg = "distutils has already been patched by %r" % cls
+        msg = "distutils has already been patched by {!r}".format(cls)
         raise AssertionError(msg)
     return base
 
diff --git a/setuptools/msvc.py b/setuptools/msvc.py
index 8d6d2cf084..55e5cd21fd 100644
--- a/setuptools/msvc.py
+++ b/setuptools/msvc.py
@@ -108,7 +108,7 @@ def current_dir(self, hidex86=False, x64=False) -> str:
             if (self.current_cpu == 'x86' and hidex86)
             else r'\x64'
             if (self.current_cpu == 'amd64' and x64)
-            else r'\%s' % self.current_cpu
+            else r'\{}'.format(self.current_cpu)
         )
 
     def target_dir(self, hidex86=False, x64=False) -> str:
@@ -132,7 +132,7 @@ def target_dir(self, hidex86=False, x64=False) -> str:
             if (self.target_cpu == 'x86' and hidex86)
             else r'\x64'
             if (self.target_cpu == 'amd64' and x64)
-            else r'\%s' % self.target_cpu
+            else r'\{}'.format(self.target_cpu)
         )
 
     def cross_dir(self, forcex86=False):
@@ -155,7 +155,7 @@ def cross_dir(self, forcex86=False):
         return (
             ''
             if self.target_cpu == current
-            else self.target_dir().replace('\\', '\\%s_' % current)
+            else self.target_dir().replace('\\', '\\{}_'.format(current))
         )
 
 
@@ -497,11 +497,11 @@ def VSInstallDir(self):
         """
         # Default path
         default = os.path.join(
-            self.ProgramFilesx86, 'Microsoft Visual Studio %0.1f' % self.vs_ver
+            self.ProgramFilesx86, 'Microsoft Visual Studio {:0.1f}'.format(self.vs_ver)
         )
 
         # Try to get path from registry, if fail use default path
-        return self.ri.lookup(self.ri.vs, '%0.1f' % self.vs_ver) or default
+        return self.ri.lookup(self.ri.vs, '{:0.1f}'.format(self.vs_ver)) or default
 
     @property
     def VCInstallDir(self):
@@ -561,16 +561,16 @@ def _guess_vc_legacy(self):
             path
         """
         default = os.path.join(
-            self.ProgramFilesx86, r'Microsoft Visual Studio %0.1f\VC' % self.vs_ver
+            self.ProgramFilesx86, r'Microsoft Visual Studio {:0.1f}\VC'.format(self.vs_ver)
         )
 
         # Try to get "VC++ for Python" path from registry as default path
-        reg_path = os.path.join(self.ri.vc_for_python, '%0.1f' % self.vs_ver)
+        reg_path = os.path.join(self.ri.vc_for_python, '{:0.1f}'.format(self.vs_ver))
         python_vc = self.ri.lookup(reg_path, 'installdir')
         default_vc = os.path.join(python_vc, 'VC') if python_vc else default
 
         # Try to get path from registry, if fail use default path
-        return self.ri.lookup(self.ri.vc, '%0.1f' % self.vs_ver) or default_vc
+        return self.ri.lookup(self.ri.vc, '{:0.1f}'.format(self.vs_ver)) or default_vc
 
     @property
     def WindowsSdkVersion(self) -> tuple[LiteralString, ...]:
@@ -619,13 +619,13 @@ def WindowsSdkDir(self) -> str | None:  # noqa: C901  # is too complex (12)  # F
         sdkdir: str | None = ''
         for ver in self.WindowsSdkVersion:
             # Try to get it from registry
-            loc = os.path.join(self.ri.windows_sdk, 'v%s' % ver)
+            loc = os.path.join(self.ri.windows_sdk, 'v{}'.format(ver))
             sdkdir = self.ri.lookup(loc, 'installationfolder')
             if sdkdir:
                 break
         if not sdkdir or not os.path.isdir(sdkdir):
             # Try to get "VC++ for Python" version from registry
-            path = os.path.join(self.ri.vc_for_python, '%0.1f' % self.vc_ver)
+            path = os.path.join(self.ri.vc_for_python, '{:0.1f}'.format(self.vc_ver))
             install_base = self.ri.lookup(path, 'installdir')
             if install_base:
                 sdkdir = os.path.join(install_base, 'WinSDK')
@@ -633,14 +633,14 @@ def WindowsSdkDir(self) -> str | None:  # noqa: C901  # is too complex (12)  # F
             # If fail, use default new path
             for ver in self.WindowsSdkVersion:
                 intver = ver[: ver.rfind('.')]
-                path = r'Microsoft SDKs\Windows Kits\%s' % intver
+                path = r'Microsoft SDKs\Windows Kits\{}'.format(intver)
                 d = os.path.join(self.ProgramFiles, path)
                 if os.path.isdir(d):
                     sdkdir = d
         if not sdkdir or not os.path.isdir(sdkdir):
             # If fail, use default old path
             for ver in self.WindowsSdkVersion:
-                path = r'Microsoft SDKs\Windows\v%s' % ver
+                path = r'Microsoft SDKs\Windows\v{}'.format(ver)
                 d = os.path.join(self.ProgramFiles, path)
                 if os.path.isdir(d):
                     sdkdir = d
@@ -676,7 +676,7 @@ def WindowsSDKExecutablePath(self):
                 regpaths += [os.path.join(self.ri.netfx_sdk, ver, fx)]
 
         for ver in self.WindowsSdkVersion:
-            regpaths += [os.path.join(self.ri.windows_sdk, 'v%sA' % ver, fx)]
+            regpaths += [os.path.join(self.ri.windows_sdk, 'v{}A'.format(ver), fx)]
 
         # Return installation folder from the more recent path
         for path in regpaths:
@@ -696,7 +696,7 @@ def FSharpInstallDir(self):
         str
             path
         """
-        path = os.path.join(self.ri.visualstudio, r'%0.1f\Setup\F#' % self.vs_ver)
+        path = os.path.join(self.ri.visualstudio, r'{:0.1f}\Setup\F#'.format(self.vs_ver))
         return self.ri.lookup(path, 'productdir') or ''
 
     @property
@@ -714,7 +714,7 @@ def UniversalCRTSdkDir(self):
 
         # Find path of the more recent Kit
         for ver in vers:
-            sdkdir = self.ri.lookup(self.ri.windows_kits_roots, 'kitsroot%s' % ver)
+            sdkdir = self.ri.lookup(self.ri.windows_kits_roots, 'kitsroot{}'.format(ver))
             if sdkdir:
                 return sdkdir or ''
 
@@ -960,7 +960,7 @@ def VSTools(self):
             arch_subdir = self.pi.current_dir(hidex86=True, x64=True)
             paths += [r'Common7\IDE\CommonExtensions\Microsoft\TestWindow']
             paths += [r'Team Tools\Performance Tools']
-            paths += [r'Team Tools\Performance Tools%s' % arch_subdir]
+            paths += [r'Team Tools\Performance Tools{}'.format(arch_subdir)]
 
         return [os.path.join(self.si.VSInstallDir, path) for path in paths]
 
@@ -993,10 +993,10 @@ def VCLibraries(self):
             arch_subdir = self.pi.target_dir(x64=True)
         else:
             arch_subdir = self.pi.target_dir(hidex86=True)
-        paths = ['Lib%s' % arch_subdir, r'ATLMFC\Lib%s' % arch_subdir]
+        paths = ['Lib{}'.format(arch_subdir), r'ATLMFC\Lib{}'.format(arch_subdir)]
 
         if self.vs_ver >= 14.0:
-            paths += [r'Lib\store%s' % arch_subdir]
+            paths += [r'Lib\store{}'.format(arch_subdir)]
 
         return [os.path.join(self.si.VCInstallDir, path) for path in paths]
 
@@ -1030,10 +1030,10 @@ def VCTools(self):
         forcex86 = True if self.vs_ver <= 10.0 else False
         arch_subdir = self.pi.cross_dir(forcex86)
         if arch_subdir:
-            tools += [os.path.join(si.VCInstallDir, 'Bin%s' % arch_subdir)]
+            tools += [os.path.join(si.VCInstallDir, 'Bin{}'.format(arch_subdir))]
 
         if self.vs_ver == 14.0:
-            path = 'Bin%s' % self.pi.current_dir(hidex86=True)
+            path = 'Bin{}'.format(self.pi.current_dir(hidex86=True))
             tools += [os.path.join(si.VCInstallDir, path)]
 
         elif self.vs_ver >= 15.0:
@@ -1068,13 +1068,13 @@ def OSLibraries(self):
         """
         if self.vs_ver <= 10.0:
             arch_subdir = self.pi.target_dir(hidex86=True, x64=True)
-            return [os.path.join(self.si.WindowsSdkDir, 'Lib%s' % arch_subdir)]
+            return [os.path.join(self.si.WindowsSdkDir, 'Lib{}'.format(arch_subdir))]
 
         else:
             arch_subdir = self.pi.target_dir(x64=True)
             lib = os.path.join(self.si.WindowsSdkDir, 'lib')
             libver = self._sdk_subdir
-            return [os.path.join(lib, '%sum%s' % (libver, arch_subdir))]
+            return [os.path.join(lib, '{}um{}'.format(libver, arch_subdir))]
 
     @property
     def OSIncludes(self):
@@ -1097,9 +1097,9 @@ def OSIncludes(self):
             else:
                 sdkver = ''
             return [
-                os.path.join(include, '%sshared' % sdkver),
-                os.path.join(include, '%sum' % sdkver),
-                os.path.join(include, '%swinrt' % sdkver),
+                os.path.join(include, '{}shared'.format(sdkver)),
+                os.path.join(include, '{}um'.format(sdkver)),
+                os.path.join(include, '{}winrt'.format(sdkver)),
             ]
 
     @property
@@ -1134,7 +1134,7 @@ def OSLibpath(self):
                     self.si.WindowsSdkDir,
                     'ExtensionSDKs',
                     'Microsoft.VCLibs',
-                    '%0.1f' % self.vs_ver,
+                    '{:0.1f}'.format(self.vs_ver),
                     'References',
                     'CommonConfiguration',
                     'neutral',
@@ -1169,7 +1169,7 @@ def _sdk_tools(self):
 
         if not self.pi.current_is_x86():
             arch_subdir = self.pi.current_dir(x64=True)
-            path = 'Bin%s' % arch_subdir
+            path = 'Bin{}'.format(arch_subdir)
             yield os.path.join(self.si.WindowsSdkDir, path)
 
         if self.vs_ver in (10.0, 11.0):
@@ -1177,14 +1177,14 @@ def _sdk_tools(self):
                 arch_subdir = ''
             else:
                 arch_subdir = self.pi.current_dir(hidex86=True, x64=True)
-            path = r'Bin\NETFX 4.0 Tools%s' % arch_subdir
+            path = r'Bin\NETFX 4.0 Tools{}'.format(arch_subdir)
             yield os.path.join(self.si.WindowsSdkDir, path)
 
         elif self.vs_ver >= 15.0:
             path = os.path.join(self.si.WindowsSdkDir, 'Bin')
             arch_subdir = self.pi.current_dir(x64=True)
             sdkver = self.si.WindowsSdkLastVersion
-            yield os.path.join(path, '%s%s' % (sdkver, arch_subdir))
+            yield os.path.join(path, '{}{}'.format(sdkver, arch_subdir))
 
         if self.si.WindowsSDKExecutablePath:
             yield self.si.WindowsSDKExecutablePath
@@ -1200,7 +1200,7 @@ def _sdk_subdir(self):
             subdir
         """
         ucrtver = self.si.WindowsSdkLastVersion
-        return ('%s\\' % ucrtver) if ucrtver else ''
+        return ('{}\\'.format(ucrtver)) if ucrtver else ''
 
     @property
     def SdkSetup(self):
@@ -1262,7 +1262,7 @@ def NetFxSDKLibraries(self):
             return []
 
         arch_subdir = self.pi.target_dir(x64=True)
-        return [os.path.join(self.si.NetFxSdkDir, r'lib\um%s' % arch_subdir)]
+        return [os.path.join(self.si.NetFxSdkDir, r'lib\um{}'.format(arch_subdir))]
 
     @property
     def NetFxSDKIncludes(self):
@@ -1310,7 +1310,7 @@ def MSBuild(self):
             base_path = self.si.VSInstallDir
             arch_subdir = ''
 
-        path = r'MSBuild\%0.1f\bin%s' % (self.vs_ver, arch_subdir)
+        path = r'MSBuild\{:0.1f}\bin{}'.format(self.vs_ver, arch_subdir)
         build = [os.path.join(base_path, path)]
 
         if self.vs_ver >= 15.0:
@@ -1350,7 +1350,7 @@ def UCRTLibraries(self):
         arch_subdir = self.pi.target_dir(x64=True)
         lib = os.path.join(self.si.UniversalCRTSdkDir, 'lib')
         ucrtver = self._ucrt_subdir
-        return [os.path.join(lib, '%sucrt%s' % (ucrtver, arch_subdir))]
+        return [os.path.join(lib, '{}ucrt{}'.format(ucrtver, arch_subdir))]
 
     @property
     def UCRTIncludes(self):
@@ -1366,7 +1366,7 @@ def UCRTIncludes(self):
             return []
 
         include = os.path.join(self.si.UniversalCRTSdkDir, 'include')
-        return [os.path.join(include, '%sucrt' % self._ucrt_subdir)]
+        return [os.path.join(include, '{}ucrt'.format(self._ucrt_subdir))]
 
     @property
     def _ucrt_subdir(self):
@@ -1379,7 +1379,7 @@ def _ucrt_subdir(self):
             subdir
         """
         ucrtver = self.si.UniversalCRTSdkLastVersion
-        return ('%s\\' % ucrtver) if ucrtver else ''
+        return ('{}\\'.format(ucrtver)) if ucrtver else ''
 
     @property
     def FSharp(self):
@@ -1520,7 +1520,7 @@ def _build_paths(self, name, spec_path_lists, exists):
         paths = itertools.chain(spec_paths, env_paths)
         extant_paths = list(filter(os.path.isdir, paths)) if exists else paths
         if not extant_paths:
-            msg = "%s environment variable is empty" % name.upper()
+            msg = "{} environment variable is empty".format(name.upper())
             raise distutils.errors.DistutilsPlatformError(msg)
         unique_paths = unique_everseen(extant_paths)
         return os.pathsep.join(unique_paths)
diff --git a/setuptools/package_index.py b/setuptools/package_index.py
index 97806e8ff8..24fae988a2 100644
--- a/setuptools/package_index.py
+++ b/setuptools/package_index.py
@@ -74,7 +74,7 @@ def parse_requirement_arg(spec):
         return Requirement.parse(spec)
     except ValueError as e:
         raise DistutilsError(
-            "Not a URL, existing file, or requirement spec: %r" % (spec,)
+            "Not a URL, existing file, or requirement spec: {!r}".format(spec)
         ) from e
 
 
@@ -357,7 +357,7 @@ def process_url(self, url, retrieve: bool = False) -> None:  # noqa: C901
         if f is None:
             return
         if isinstance(f, urllib.error.HTTPError) and f.code == 401:
-            self.info("Authentication error: %s" % f.msg)
+            self.info("Authentication error: {}".format(f.msg))
         self.fetched_urls[f.url] = True
         if 'html' not in f.headers.get('content-type', '').lower():
             f.close()  # not html, we can't process it
@@ -474,13 +474,13 @@ def process_index(self, url, page):
             base, frag = egg_info_for_url(new_url)
             if base.endswith('.py') and not frag:
                 if ver:
-                    new_url += '#egg=%s-%s' % (pkg, ver)
+                    new_url += '#egg={}-{}'.format(pkg, ver)
                 else:
                     self.need_version_info(url)
             self.scan_url(new_url)
 
         return PYPI_MD5.sub(
-            lambda m: '%s' % m.group(1, 3, 2), page
+            lambda m: '{}'.format(*m.group(1, 3, 2)), page
         )
 
     def need_version_info(self, url) -> None:
@@ -525,14 +525,13 @@ def check_hash(self, checker, filename, tfp) -> None:
         """
         checker is a ContentChecker
         """
-        checker.report(self.debug, "Validating %%s checksum for %s" % filename)
+        checker.report(self.debug, "Validating %s checksum for {}".format(filename))
         if not checker.is_valid():
             tfp.close()
             os.unlink(filename)
             raise DistutilsError(
-                "%s validation failed for %s; "
-                "possible download problem?"
-                % (checker.hash.name, os.path.basename(filename))
+                "{} validation failed for {}; "
+                "possible download problem?".format(checker.hash.name, os.path.basename(filename))
             )
 
     def add_find_links(self, urls) -> None:
@@ -720,8 +719,7 @@ def gen_setup(self, filename, fragment, tmpdir):
             with open(os.path.join(tmpdir, 'setup.py'), 'w', encoding="utf-8") as file:
                 file.write(
                     "from setuptools import setup\n"
-                    "setup(name=%r, version=%r, py_modules=[%r])\n"
-                    % (
+                    "setup(name={!r}, version={!r}, py_modules=[{!r}])\n".format(
                         dists[0].project_name,
                         dists[0].version,
                         os.path.splitext(basename)[0],
@@ -731,9 +729,9 @@ def gen_setup(self, filename, fragment, tmpdir):
 
         elif match:
             raise DistutilsError(
-                "Can't unambiguously interpret project/version identifier %r; "
+                "Can't unambiguously interpret project/version identifier {!r}; "
                 "any dashes in the name or version should be escaped using "
-                "underscores. %r" % (fragment, dists)
+                "underscores. {!r}".format(fragment, dists)
             )
         else:
             raise DistutilsError(
@@ -752,7 +750,7 @@ def _download_to(self, url, filename):
             fp = self.open_url(url)
             if isinstance(fp, urllib.error.HTTPError):
                 raise DistutilsError(
-                    "Can't download %s: %s %s" % (url, fp.code, fp.msg)
+                    "Can't download {}: {} {}".format(url, fp.code, fp.msg)
                 )
             headers = fp.info()
             blocknum = 0
@@ -793,7 +791,7 @@ def open_url(self, url, warning=None):  # noqa: C901  # is too complex (12)
             if warning:
                 self.warn(warning, msg)
             else:
-                raise DistutilsError('%s %s' % (url, msg)) from v
+                raise DistutilsError('{} {}'.format(url, msg)) from v
         except urllib.error.HTTPError as v:
             return v
         except urllib.error.URLError as v:
@@ -801,21 +799,21 @@ def open_url(self, url, warning=None):  # noqa: C901  # is too complex (12)
                 self.warn(warning, v.reason)
             else:
                 raise DistutilsError(
-                    "Download error for %s: %s" % (url, v.reason)
+                    "Download error for {}: {}".format(url, v.reason)
                 ) from v
         except http.client.BadStatusLine as v:
             if warning:
                 self.warn(warning, v.line)
             else:
                 raise DistutilsError(
-                    '%s returned a bad status line. The server might be '
-                    'down, %s' % (url, v.line)
+                    '{} returned a bad status line. The server might be '
+                    'down, {}'.format(url, v.line)
                 ) from v
         except (http.client.HTTPException, OSError) as v:
             if warning:
                 self.warn(warning, v)
             else:
-                raise DistutilsError("Download error for %s: %s" % (url, v)) from v
+                raise DistutilsError("Download error for {}: {}".format(url, v)) from v
 
     def _download_url(self, url, tmpdir):
         # Determine download filename
diff --git a/setuptools/tests/config/test_setupcfg.py b/setuptools/tests/config/test_setupcfg.py
index b31118c0fb..399c9dd743 100644
--- a/setuptools/tests/config/test_setupcfg.py
+++ b/setuptools/tests/config/test_setupcfg.py
@@ -87,14 +87,14 @@ def test_basic(self, tmpdir):
             '[options]\n'
             'scripts = bin/a.py, bin/b.py\n',
         )
-        config_dict = read_configuration('%s' % config)
+        config_dict = read_configuration('{}'.format(config))
         assert config_dict['metadata']['version'] == '10.1.1'
         assert config_dict['metadata']['keywords'] == ['one', 'two']
         assert config_dict['options']['scripts'] == ['bin/a.py', 'bin/b.py']
 
     def test_no_config(self, tmpdir):
         with pytest.raises(DistutilsFileError):
-            read_configuration('%s' % tmpdir.join('setup.cfg'))
+            read_configuration('{}'.format(tmpdir.join('setup.cfg')))
 
     def test_ignore_errors(self, tmpdir):
         _, config = fake_env(
@@ -102,9 +102,9 @@ def test_ignore_errors(self, tmpdir):
             '[metadata]\nversion = attr: none.VERSION\nkeywords = one, two\n',
         )
         with pytest.raises(ImportError):
-            read_configuration('%s' % config)
+            read_configuration('{}'.format(config))
 
-        config_dict = read_configuration('%s' % config, ignore_option_errors=True)
+        config_dict = read_configuration('{}'.format(config), ignore_option_errors=True)
 
         assert config_dict['metadata']['keywords'] == ['one', 'two']
         assert 'version' not in config_dict['metadata']
diff --git a/setuptools/tests/server.py b/setuptools/tests/server.py
index 15bbc3b1f0..0a0fc2b668 100644
--- a/setuptools/tests/server.py
+++ b/setuptools/tests/server.py
@@ -44,7 +44,7 @@ def stop(self):
 
     def base_url(self):
         port = self.server_port
-        return 'http://127.0.0.1:%s/setuptools/tests/indexes/' % port
+        return 'http://127.0.0.1:{}/setuptools/tests/indexes/'.format(port)
 
 
 class RequestRecorder(http.server.BaseHTTPRequestHandler):
@@ -70,11 +70,11 @@ def run(self):
 
     @property
     def netloc(self):
-        return 'localhost:%s' % self.server_port
+        return 'localhost:{}'.format(self.server_port)
 
     @property
     def url(self):
-        return 'http://%s/' % self.netloc
+        return 'http://{}/'.format(self.netloc)
 
 
 def path_to_url(path, authority=None):
diff --git a/setuptools/tests/test_build_ext.py b/setuptools/tests/test_build_ext.py
index 5ce96a66f7..be7b7cc01c 100644
--- a/setuptools/tests/test_build_ext.py
+++ b/setuptools/tests/test_build_ext.py
@@ -290,4 +290,4 @@ def test_build_ext_config_handling(tmpdir_cwd):
         cmd=['build'],
         data_stream=(0, 2),
     )
-    assert code == 0, '\nSTDOUT:\n%s\nSTDERR:\n%s' % output
+    assert code == 0, '\nSTDOUT:\n{}\nSTDERR:\n{}'.format(*output)
diff --git a/setuptools/tests/test_core_metadata.py b/setuptools/tests/test_core_metadata.py
index b67373bc37..89b2ea3054 100644
--- a/setuptools/tests/test_core_metadata.py
+++ b/setuptools/tests/test_core_metadata.py
@@ -311,7 +311,7 @@ def test_maintainer_author(name, attrs, tmpdir):
             for line in pkg_lines:
                 assert not line.startswith(fkey + ':')
         else:
-            line = '%s: %s' % (fkey, val)
+            line = '{}: {}'.format(fkey, val)
             assert line in pkg_lines_set
 
 
diff --git a/setuptools/tests/test_dist.py b/setuptools/tests/test_dist.py
index 7b8cb91469..e1d7c1e043 100644
--- a/setuptools/tests/test_dist.py
+++ b/setuptools/tests/test_dist.py
@@ -24,7 +24,7 @@ def test_dist_fetch_build_egg(tmpdir):
 
     def sdist_with_index(distname, version):
         dist_dir = index.mkdir(distname)
-        dist_sdist = '%s-%s.tar.gz' % (distname, version)
+        dist_sdist = '{}-{}.tar.gz'.format(distname, version)
         make_nspkg_sdist(str(dist_dir.join(dist_sdist)), distname, version)
         with dist_dir.join('index.html').open('w') as fp:
             fp.write(
diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py
index 586324be37..a34f9bd525 100644
--- a/setuptools/tests/test_easy_install.py
+++ b/setuptools/tests/test_easy_install.py
@@ -814,7 +814,7 @@ def test_setup_requires_with_pep508_url(self, mock_index, monkeypatch):
                     # Ignored (overridden by setup_attrs)
                     'python-xlib',
                     '0.19',
-                    setup_attrs=dict(setup_requires='dependency @ %s' % dep_url),
+                    setup_attrs=dict(setup_requires='dependency @ {}'.format(dep_url)),
                 )
                 test_setup_py = os.path.join(test_pkg, 'setup.py')
                 run_setup(test_setup_py, ['--version'])
@@ -1103,11 +1103,10 @@ def make_trivial_sdist(dist_path, distname, version):
                     """\
              import setuptools
              setuptools.setup(
-                 name=%r,
-                 version=%r
+                 name={!r},
+                 version={!r}
              )
-         """
-                    % (distname, version)
+         """.format(distname, version)
                 ),
             ),
             ('setup.cfg', ''),
@@ -1131,13 +1130,12 @@ def make_nspkg_sdist(dist_path, distname, version):
         """\
         import setuptools
         setuptools.setup(
-            name=%r,
-            version=%r,
-            packages=%r,
-            namespace_packages=[%r]
+            name={!r},
+            version={!r},
+            packages={!r},
+            namespace_packages=[{!r}]
         )
-    """
-        % (distname, version, packages, nspackage)
+    """.format(distname, version, packages, nspackage)
     )
 
     init = "__import__('pkg_resources').declare_namespace(__name__)"
@@ -1212,7 +1210,7 @@ def create_setup_requires_package(
     test_setup_attrs = {
         'name': 'test_pkg',
         'version': '0.0',
-        'setup_requires': ['%s==%s' % (distname, version)],
+        'setup_requires': ['{}=={}'.format(distname, version)],
         'dependency_links': [os.path.abspath(path)],
     }
     if setup_attrs:
@@ -1233,7 +1231,7 @@ def create_setup_requires_package(
                 section = options
             if isinstance(value, (tuple, list)):
                 value = ';'.join(value)
-            section.append('%s: %s' % (name, value))
+            section.append('{}: {}'.format(name, value))
         test_setup_cfg_contents = DALS(
             """
             [metadata]
@@ -1261,7 +1259,7 @@ def create_setup_requires_package(
     with open(os.path.join(test_pkg, 'setup.py'), 'w', encoding="utf-8") as f:
         f.write(setup_py_template % test_setup_attrs)
 
-    foobar_path = os.path.join(path, '%s-%s.tar.gz' % (distname, version))
+    foobar_path = os.path.join(path, '{}-{}.tar.gz'.format(distname, version))
     make_package(foobar_path, distname, version)
 
     return test_pkg
@@ -1276,12 +1274,12 @@ class TestScriptHeader:
     exe_with_spaces = r'C:\Program Files\Python36\python.exe'
 
     def test_get_script_header(self):
-        expected = '#!%s\n' % ei.nt_quote_arg(os.path.normpath(sys.executable))
+        expected = '#!{}\n'.format(ei.nt_quote_arg(os.path.normpath(sys.executable)))
         actual = ei.ScriptWriter.get_header('#!/usr/local/bin/python')
         assert actual == expected
 
     def test_get_script_header_args(self):
-        expected = '#!%s -x\n' % ei.nt_quote_arg(os.path.normpath(sys.executable))
+        expected = '#!{} -x\n'.format(ei.nt_quote_arg(os.path.normpath(sys.executable)))
         actual = ei.ScriptWriter.get_header('#!/usr/bin/python -x')
         assert actual == expected
 
@@ -1289,14 +1287,14 @@ def test_get_script_header_non_ascii_exe(self):
         actual = ei.ScriptWriter.get_header(
             '#!/usr/bin/python', executable=self.non_ascii_exe
         )
-        expected = '#!%s -x\n' % self.non_ascii_exe
+        expected = '#!{} -x\n'.format(self.non_ascii_exe)
         assert actual == expected
 
     def test_get_script_header_exe_with_spaces(self):
         actual = ei.ScriptWriter.get_header(
             '#!/usr/bin/python', executable='"' + self.exe_with_spaces + '"'
         )
-        expected = '#!"%s"\n' % self.exe_with_spaces
+        expected = '#!"{}"\n'.format(self.exe_with_spaces)
         assert actual == expected
 
 
diff --git a/setuptools/tests/test_egg_info.py b/setuptools/tests/test_egg_info.py
index a68ecaba4c..24e73be44f 100644
--- a/setuptools/tests/test_egg_info.py
+++ b/setuptools/tests/test_egg_info.py
@@ -38,9 +38,8 @@ def env():
                 '.pydistutils.cfg': DALS(
                     """
                 [egg_info]
-                egg-base = %(egg-base)s
-                """
-                    % env.paths
+                egg-base = {egg-base}
+                """.format(**env.paths)
                 )
             }
         })
diff --git a/setuptools/tests/test_install_scripts.py b/setuptools/tests/test_install_scripts.py
index 2ae5496525..f496fe5917 100644
--- a/setuptools/tests/test_install_scripts.py
+++ b/setuptools/tests/test_install_scripts.py
@@ -38,7 +38,7 @@ def test_sys_executable_escaping_unix(self, tmpdir, monkeypatch):
         Ensure that shebang is not quoted on Unix when getting the Python exe
         from sys.executable.
         """
-        expected = '#!%s\n' % self.unix_exe
+        expected = '#!{}\n'.format(self.unix_exe)
         monkeypatch.setattr('sys.executable', self.unix_exe)
         with tmpdir.as_cwd():
             self._run_install_scripts(str(tmpdir))
@@ -52,7 +52,7 @@ def test_sys_executable_escaping_win32(self, tmpdir, monkeypatch):
         Ensure that shebang is quoted on Windows when getting the Python exe
         from sys.executable and it contains a space.
         """
-        expected = '#!"%s"\n' % self.win32_exe
+        expected = '#!"{}"\n'.format(self.win32_exe)
         monkeypatch.setattr('sys.executable', self.win32_exe)
         with tmpdir.as_cwd():
             self._run_install_scripts(str(tmpdir))
@@ -67,7 +67,7 @@ def test_executable_with_spaces_escaping_unix(self, tmpdir):
         a value with spaces
         is specified using --executable.
         """
-        expected = '#!%s\n' % self.unix_spaces_exe
+        expected = '#!{}\n'.format(self.unix_spaces_exe)
         with tmpdir.as_cwd():
             self._run_install_scripts(str(tmpdir), self.unix_spaces_exe)
             with open(str(tmpdir.join('foo')), 'r', encoding="utf-8") as f:
@@ -81,7 +81,7 @@ def test_executable_arg_escaping_win32(self, tmpdir):
         getting a path with spaces
         from --executable, that is itself properly quoted.
         """
-        expected = '#!"%s"\n' % self.win32_exe
+        expected = '#!"{}"\n'.format(self.win32_exe)
         with tmpdir.as_cwd():
             self._run_install_scripts(str(tmpdir), '"' + self.win32_exe + '"')
             with open(str(tmpdir.join('foo-script.py')), 'r', encoding="utf-8") as f:
diff --git a/setuptools/tests/test_manifest.py b/setuptools/tests/test_manifest.py
index ad988d2c5f..333644051f 100644
--- a/setuptools/tests/test_manifest.py
+++ b/setuptools/tests/test_manifest.py
@@ -38,9 +38,8 @@ def make_local_path(s):
     """\
 from setuptools import setup
 
-setup(**%r)
-"""
-    % SETUP_ATTRS
+setup(**{!r})
+""".format(SETUP_ATTRS)
 )
 
 
diff --git a/setuptools/tests/test_sandbox.py b/setuptools/tests/test_sandbox.py
index 20db6baaa6..5531ea8660 100644
--- a/setuptools/tests/test_sandbox.py
+++ b/setuptools/tests/test_sandbox.py
@@ -76,7 +76,7 @@ class CantPickleThis(Exception):
             "This Exception is unpickleable because it's not in globals"
 
             def __repr__(self) -> str:
-                return 'CantPickleThis%r' % (self.args,)
+                return 'CantPickleThis{!r}'.format(self.args)
 
         with setuptools.sandbox.ExceptionSaver() as saved_exc:
             raise CantPickleThis('detail')
diff --git a/setuptools/tests/test_sdist.py b/setuptools/tests/test_sdist.py
index 30347190db..3ad1f070f6 100644
--- a/setuptools/tests/test_sdist.py
+++ b/setuptools/tests/test_sdist.py
@@ -41,9 +41,8 @@
     """\
 from setuptools import setup
 
-setup(**%r)
-"""
-    % SETUP_ATTRS
+setup(**{!r})
+""".format(SETUP_ATTRS)
 )
 
 EXTENSION = Extension(
diff --git a/setuptools/tests/test_wheel.py b/setuptools/tests/test_wheel.py
index 5c6bda0454..7e700a14d2 100644
--- a/setuptools/tests/test_wheel.py
+++ b/setuptools/tests/test_wheel.py
@@ -176,7 +176,7 @@ def __init__(self, id, **kwargs):
         self._fields = kwargs
 
     def __repr__(self) -> str:
-        return '%s(**%r)' % (self._id, self._fields)
+        return '{}(**{!r})'.format(self._id, self._fields)
 
 
 # Using Any to avoid possible type union issues later in test
@@ -369,9 +369,8 @@ def __repr__(self) -> str:
         id='requires2',
         install_requires="""
         bar
-        foo<=2.0; %r in sys_platform
-        """
-        % sys.platform,
+        foo<=2.0; {!r} in sys_platform
+        """.format(sys.platform),
         requires_txt=DALS(
             """
             bar
@@ -382,9 +381,8 @@ def __repr__(self) -> str:
     dict(
         id='requires3',
         install_requires="""
-        bar; %r != sys_platform
-        """
-        % sys.platform,
+        bar; {!r} != sys_platform
+        """.format(sys.platform),
     ),
     dict(
         id='requires4',
@@ -406,7 +404,7 @@ def __repr__(self) -> str:
     dict(
         id='requires5',
         extras_require={
-            'extra': 'foobar; %r != sys_platform' % sys.platform,
+            'extra': 'foobar; {!r} != sys_platform'.format(sys.platform),
         },
         requires_txt=DALS(
             """
diff --git a/setuptools/wheel.py b/setuptools/wheel.py
index fb19f1a65a..08821b7c7a 100644
--- a/setuptools/wheel.py
+++ b/setuptools/wheel.py
@@ -79,7 +79,7 @@ class Wheel:
     def __init__(self, filename) -> None:
         match = WHEEL_NAME(os.path.basename(filename))
         if match is None:
-            raise ValueError('invalid wheel name: %r' % filename)
+            raise ValueError('invalid wheel name: {!r}'.format(filename))
         self.filename = filename
         for k, v in match.groupdict().items():
             setattr(self, k, v)
@@ -122,9 +122,9 @@ def install_as_egg(self, destination_eggdir) -> None:
             self._install_as_egg(destination_eggdir, zf)
 
     def _install_as_egg(self, destination_eggdir, zf):
-        dist_basename = '%s-%s' % (self.project_name, self.version)
+        dist_basename = '{}-{}'.format(self.project_name, self.version)
         dist_info = self.get_dist_info(zf)
-        dist_data = '%s.data' % dist_basename
+        dist_data = '{}.data'.format(dist_basename)
         egg_info = os.path.join(destination_eggdir, 'EGG-INFO')
 
         self._convert_metadata(zf, destination_eggdir, dist_info, egg_info)
@@ -145,7 +145,7 @@ def get_metadata(name):
         wheel_version = parse_version(wheel_metadata.get('Wheel-Version'))
         wheel_v1 = parse_version('1.0') <= wheel_version < parse_version('2.0dev0')
         if not wheel_v1:
-            raise ValueError('unsupported wheel format version: %s' % wheel_version)
+            raise ValueError('unsupported wheel format version: {}'.format(wheel_version))
         # Extract to target directory.
         _unpack_zipfile_obj(zf, destination_eggdir)
         # Convert metadata.

From f28308aba4a62b1602e0dfc750b3016e22fad89a Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Tue, 10 Sep 2024 12:26:55 +0200
Subject: [PATCH 1354/1761] A round of `ruff format` after `ruff check --fix`

---
 setuptools/command/bdist_egg.py        |  4 +++-
 setuptools/command/easy_install.py     | 16 ++++++++++++----
 setuptools/command/egg_info.py         |  4 +++-
 setuptools/command/install_egg_info.py |  4 +++-
 setuptools/dist.py                     | 12 +++++++++---
 setuptools/msvc.py                     | 11 ++++++++---
 setuptools/package_index.py            |  5 +++--
 setuptools/tests/test_manifest.py      |  4 +---
 setuptools/tests/test_sdist.py         |  4 +---
 setuptools/wheel.py                    |  4 +++-
 10 files changed, 46 insertions(+), 22 deletions(-)

diff --git a/setuptools/command/bdist_egg.py b/setuptools/command/bdist_egg.py
index d991049b5b..44ccdfd240 100644
--- a/setuptools/command/bdist_egg.py
+++ b/setuptools/command/bdist_egg.py
@@ -263,7 +263,9 @@ def zap_pyfiles(self):
                     pattern = r'(?P.+)\.(?P[^.]+)\.pyc'
                     m = re.match(pattern, name)
                     path_new = os.path.join(base, os.pardir, m.group('name') + '.pyc')
-                    log.info("Renaming file from [{}] to [{}]".format(path_old, path_new))
+                    log.info(
+                        "Renaming file from [{}] to [{}]".format(path_old, path_new)
+                    )
                     try:
                         os.remove(path_new)
                     except OSError:
diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py
index 88d39c043a..53f96aef2b 100644
--- a/setuptools/command/easy_install.py
+++ b/setuptools/command/easy_install.py
@@ -659,7 +659,9 @@ def check_editable(self, spec) -> None:
 
         if os.path.exists(os.path.join(self.build_directory, spec.key)):
             raise DistutilsArgError(
-                "{!r} already exists in {}; can't do a checkout there".format(spec.key, self.build_directory)
+                "{!r} already exists in {}; can't do a checkout there".format(
+                    spec.key, self.build_directory
+                )
             )
 
     @contextlib.contextmanager
@@ -916,11 +918,15 @@ def install_eggs(self, spec, dist_filename, tmpdir) -> list[Distribution]:
             setups = glob(os.path.join(setup_base, '*', 'setup.py'))
             if not setups:
                 raise DistutilsError(
-                    "Couldn't find a setup script in {}".format(os.path.abspath(dist_filename))
+                    "Couldn't find a setup script in {}".format(
+                        os.path.abspath(dist_filename)
+                    )
                 )
             if len(setups) > 1:
                 raise DistutilsError(
-                    "Multiple setup scripts in {}".format(os.path.abspath(dist_filename))
+                    "Multiple setup scripts in {}".format(
+                        os.path.abspath(dist_filename)
+                    )
                 )
             setup_script = setups[0]
 
@@ -1112,7 +1118,9 @@ def install_wheel(self, wheel_path, tmpdir):
             self.execute(
                 wheel.install_as_egg,
                 (destination,),
-                ("Installing {} to {}").format(os.path.basename(wheel_path), os.path.dirname(destination)),
+                ("Installing {} to {}").format(
+                    os.path.basename(wheel_path), os.path.dirname(destination)
+                ),
             )
         finally:
             update_dist_caches(destination, fix_zipimporter_caches=False)
diff --git a/setuptools/command/egg_info.py b/setuptools/command/egg_info.py
index 9a9ea63322..335eeb4efd 100644
--- a/setuptools/command/egg_info.py
+++ b/setuptools/command/egg_info.py
@@ -231,7 +231,9 @@ def finalize_options(self) -> None:
             packaging.requirements.Requirement(spec % (self.egg_name, self.egg_version))
         except ValueError as e:
             raise distutils.errors.DistutilsOptionError(
-                "Invalid distribution name or version syntax: {}-{}".format(self.egg_name, self.egg_version)
+                "Invalid distribution name or version syntax: {}-{}".format(
+                    self.egg_name, self.egg_version
+                )
             ) from e
 
         if self.egg_base is None:
diff --git a/setuptools/command/install_egg_info.py b/setuptools/command/install_egg_info.py
index 991531caa0..42cfe4e64d 100644
--- a/setuptools/command/install_egg_info.py
+++ b/setuptools/command/install_egg_info.py
@@ -36,7 +36,9 @@ def run(self) -> None:
             self.execute(os.unlink, (self.target,), "Removing " + self.target)
         if not self.dry_run:
             ensure_directory(self.target)
-        self.execute(self.copytree, (), "Copying {} to {}".format(self.source, self.target))
+        self.execute(
+            self.copytree, (), "Copying {} to {}".format(self.source, self.target)
+        )
         self.install_namespaces()
 
     def get_outputs(self):
diff --git a/setuptools/dist.py b/setuptools/dist.py
index 182a03b36a..13a0b43448 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -85,7 +85,9 @@ def check_importable(dist, attr, value):
         assert not ep.extras
     except (TypeError, ValueError, AttributeError, AssertionError) as e:
         raise DistutilsSetupError(
-            "{!r} must be importable 'module:attrs' string (got {!r})".format(attr, value)
+            "{!r} must be importable 'module:attrs' string (got {!r})".format(
+                attr, value
+            )
         ) from e
 
 
@@ -611,7 +613,9 @@ def _set_command_options(self, command_obj, option_dict=None):  # noqa: C901
                     setattr(command_obj, option, value)
                 else:
                     raise DistutilsOptionError(
-                        "error in {}: command '{}' has no such option '{}'".format(source, command_name, option)
+                        "error in {}: command '{}' has no such option '{}'".format(
+                            source, command_name, option
+                        )
                     )
             except ValueError as e:
                 raise DistutilsOptionError(e) from e
@@ -835,7 +839,9 @@ def _include_misc(self, name: str, value: _Sequence) -> None:
         try:
             old = getattr(self, name)
         except AttributeError as e:
-            raise DistutilsSetupError("{}: No such distribution setting".format(name)) from e
+            raise DistutilsSetupError(
+                "{}: No such distribution setting".format(name)
+            ) from e
         if old is None:
             setattr(self, name, value)
         elif not isinstance(old, _sequence):
diff --git a/setuptools/msvc.py b/setuptools/msvc.py
index 55e5cd21fd..727897d1d3 100644
--- a/setuptools/msvc.py
+++ b/setuptools/msvc.py
@@ -561,7 +561,8 @@ def _guess_vc_legacy(self):
             path
         """
         default = os.path.join(
-            self.ProgramFilesx86, r'Microsoft Visual Studio {:0.1f}\VC'.format(self.vs_ver)
+            self.ProgramFilesx86,
+            r'Microsoft Visual Studio {:0.1f}\VC'.format(self.vs_ver),
         )
 
         # Try to get "VC++ for Python" path from registry as default path
@@ -696,7 +697,9 @@ def FSharpInstallDir(self):
         str
             path
         """
-        path = os.path.join(self.ri.visualstudio, r'{:0.1f}\Setup\F#'.format(self.vs_ver))
+        path = os.path.join(
+            self.ri.visualstudio, r'{:0.1f}\Setup\F#'.format(self.vs_ver)
+        )
         return self.ri.lookup(path, 'productdir') or ''
 
     @property
@@ -714,7 +717,9 @@ def UniversalCRTSdkDir(self):
 
         # Find path of the more recent Kit
         for ver in vers:
-            sdkdir = self.ri.lookup(self.ri.windows_kits_roots, 'kitsroot{}'.format(ver))
+            sdkdir = self.ri.lookup(
+                self.ri.windows_kits_roots, 'kitsroot{}'.format(ver)
+            )
             if sdkdir:
                 return sdkdir or ''
 
diff --git a/setuptools/package_index.py b/setuptools/package_index.py
index 24fae988a2..373c047b93 100644
--- a/setuptools/package_index.py
+++ b/setuptools/package_index.py
@@ -530,8 +530,9 @@ def check_hash(self, checker, filename, tfp) -> None:
             tfp.close()
             os.unlink(filename)
             raise DistutilsError(
-                "{} validation failed for {}; "
-                "possible download problem?".format(checker.hash.name, os.path.basename(filename))
+                "{} validation failed for {}; " "possible download problem?".format(
+                    checker.hash.name, os.path.basename(filename)
+                )
             )
 
     def add_find_links(self, urls) -> None:
diff --git a/setuptools/tests/test_manifest.py b/setuptools/tests/test_manifest.py
index 333644051f..a821cf778b 100644
--- a/setuptools/tests/test_manifest.py
+++ b/setuptools/tests/test_manifest.py
@@ -34,13 +34,11 @@ def make_local_path(s):
     'packages': ['app'],
 }
 
-SETUP_PY = (
-    """\
+SETUP_PY = """\
 from setuptools import setup
 
 setup(**{!r})
 """.format(SETUP_ATTRS)
-)
 
 
 @contextlib.contextmanager
diff --git a/setuptools/tests/test_sdist.py b/setuptools/tests/test_sdist.py
index 3ad1f070f6..8f5e620f0d 100644
--- a/setuptools/tests/test_sdist.py
+++ b/setuptools/tests/test_sdist.py
@@ -37,13 +37,11 @@
     'data_files': [("data", [os.path.join("d", "e.dat")])],
 }
 
-SETUP_PY = (
-    """\
+SETUP_PY = """\
 from setuptools import setup
 
 setup(**{!r})
 """.format(SETUP_ATTRS)
-)
 
 EXTENSION = Extension(
     name="sdist_test.f",
diff --git a/setuptools/wheel.py b/setuptools/wheel.py
index 08821b7c7a..9ccd9e7ede 100644
--- a/setuptools/wheel.py
+++ b/setuptools/wheel.py
@@ -145,7 +145,9 @@ def get_metadata(name):
         wheel_version = parse_version(wheel_metadata.get('Wheel-Version'))
         wheel_v1 = parse_version('1.0') <= wheel_version < parse_version('2.0dev0')
         if not wheel_v1:
-            raise ValueError('unsupported wheel format version: {}'.format(wheel_version))
+            raise ValueError(
+                'unsupported wheel format version: {}'.format(wheel_version)
+            )
         # Extract to target directory.
         _unpack_zipfile_obj(zf, destination_eggdir)
         # Convert metadata.

From c0c1b8b60e0f4af2444f47054f93ef4af7bbf0ea Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Tue, 10 Sep 2024 12:27:29 +0200
Subject: [PATCH 1355/1761] Enforce ruff/pyupgrade rule UP032

UP032 Use f-string instead of `format` call
---
 pkg_resources/__init__.py                 | 42 ++++++-------
 pkg_resources/tests/test_pkg_resources.py | 10 ++--
 ruff.toml                                 |  1 -
 setuptools/__init__.py                    |  4 +-
 setuptools/_core_metadata.py              |  4 +-
 setuptools/_imp.py                        |  6 +-
 setuptools/archive_util.py                |  8 +--
 setuptools/command/alias.py               |  4 +-
 setuptools/command/bdist_egg.py           |  2 +-
 setuptools/command/build_clib.py          | 16 ++---
 setuptools/command/build_ext.py           |  2 +-
 setuptools/command/build_py.py            |  4 +-
 setuptools/command/dist_info.py           |  2 +-
 setuptools/command/easy_install.py        | 38 +++++-------
 setuptools/command/egg_info.py            | 14 ++---
 setuptools/command/install_egg_info.py    |  2 +-
 setuptools/command/sdist.py               |  4 +-
 setuptools/command/setopt.py              |  2 +-
 setuptools/config/expand.py               |  2 +-
 setuptools/config/setupcfg.py             | 14 +----
 setuptools/depends.py                     |  2 +-
 setuptools/dist.py                        | 24 ++++----
 setuptools/monkey.py                      |  2 +-
 setuptools/msvc.py                        | 72 +++++++++++------------
 setuptools/package_index.py               | 36 +++++-------
 setuptools/tests/config/test_setupcfg.py  |  6 +-
 setuptools/tests/server.py                |  6 +-
 setuptools/tests/test_core_metadata.py    |  2 +-
 setuptools/tests/test_dist.py             |  2 +-
 setuptools/tests/test_easy_install.py     | 44 +++++++-------
 setuptools/tests/test_egg_info.py         |  8 +--
 setuptools/tests/test_install_scripts.py  |  8 +--
 setuptools/tests/test_manifest.py         |  6 +-
 setuptools/tests/test_sandbox.py          |  2 +-
 setuptools/tests/test_sdist.py            |  6 +-
 setuptools/tests/test_wheel.py            | 18 +++---
 setuptools/tests/test_windows_wrappers.py |  4 +-
 setuptools/wheel.py                       |  8 +--
 38 files changed, 196 insertions(+), 241 deletions(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index 94dde1f218..044c1a04ef 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -492,7 +492,7 @@ def compatible_platforms(provided: str | None, required: str | None) -> bool:
             provDarwin = darwinVersionString.match(provided)
             if provDarwin:
                 dversion = int(provDarwin.group(1))
-                macosversion = "{}.{}".format(reqMac.group(1), reqMac.group(2))
+                macosversion = f"{reqMac.group(1)}.{reqMac.group(2)}"
                 if (
                     dversion == 7
                     and macosversion >= "10.3"
@@ -1316,7 +1316,7 @@ def __iadd__(self, other: Distribution | Environment) -> Self:
                 for dist in other[project]:
                     self.add(dist)
         else:
-            raise TypeError("Can't add {!r} to environment".format(other))
+            raise TypeError(f"Can't add {other!r} to environment")
         return self
 
     def __add__(self, other: Distribution | Environment) -> Self:
@@ -1699,7 +1699,7 @@ def get_metadata(self, name: str) -> str:
         except UnicodeDecodeError as exc:
             # Include the path in the error message to simplify
             # troubleshooting, and without changing the exception type.
-            exc.reason += ' in {} file at path: {}'.format(name, path)
+            exc.reason += f' in {name} file at path: {path}'
             raise
 
     def get_metadata_lines(self, name: str) -> Iterator[str]:
@@ -2018,7 +2018,7 @@ def _zipinfo_name(self, fspath):
             return ''
         if fspath.startswith(self.zip_pre):
             return fspath[len(self.zip_pre) :]
-        raise AssertionError("{} is not a subpath of {}".format(fspath, self.zip_pre))
+        raise AssertionError(f"{fspath} is not a subpath of {self.zip_pre}")
 
     def _parts(self, zip_path):
         # Convert a zipfile subpath into an egg-relative path part list.
@@ -2026,7 +2026,7 @@ def _parts(self, zip_path):
         fspath = self.zip_pre + zip_path
         if fspath.startswith(self.egg_root + os.sep):
             return fspath[len(self.egg_root) + 1 :].split(os.sep)
-        raise AssertionError("{} is not a subpath of {}".format(fspath, self.egg_root))
+        raise AssertionError(f"{fspath} is not a subpath of {self.egg_root}")
 
     @property
     def zipinfo(self):
@@ -2729,7 +2729,7 @@ def __init__(
         self.dist = dist
 
     def __str__(self) -> str:
-        s = "{} = {}".format(self.name, self.module_name)
+        s = f"{self.name} = {self.module_name}"
         if self.attrs:
             s += ':' + '.'.join(self.attrs)
         if self.extras:
@@ -2737,7 +2737,7 @@ def __str__(self) -> str:
         return s
 
     def __repr__(self) -> str:
-        return "EntryPoint.parse({!r})".format(str(self))
+        return f"EntryPoint.parse({str(self)!r})"
 
     @overload
     def load(
@@ -3049,9 +3049,7 @@ def version(self):
             version = self._get_version()
             if version is None:
                 path = self._get_metadata_path_for_display(self.PKG_INFO)
-                msg = ("Missing 'Version:' header and/or {} file at path: {}").format(
-                    self.PKG_INFO, path
-                )
+                msg = (f"Missing 'Version:' header and/or {self.PKG_INFO} file at path: {path}")
                 raise ValueError(msg, self) from e
 
             return version
@@ -3108,7 +3106,7 @@ def requires(self, extras: Iterable[str] = ()) -> list[Requirement]:
                 deps.extend(dm[safe_extra(ext)])
             except KeyError as e:
                 raise UnknownExtra(
-                    "{} has no such extra feature {!r}".format(self, ext)
+                    f"{self} has no such extra feature {ext!r}"
                 ) from e
         return deps
 
@@ -3150,11 +3148,7 @@ def activate(self, path: list[str] | None = None, replace: bool = False) -> None
 
     def egg_name(self):
         """Return what this distribution's standard .egg filename should be"""
-        filename = "{}-{}-py{}".format(
-            to_filename(self.project_name),
-            to_filename(self.version),
-            self.py_version or PY_MAJOR,
-        )
+        filename = f"{to_filename(self.project_name)}-{to_filename(self.version)}-py{self.py_version or PY_MAJOR}"
 
         if self.platform:
             filename += '-' + self.platform
@@ -3162,7 +3156,7 @@ def egg_name(self):
 
     def __repr__(self) -> str:
         if self.location:
-            return "{} ({})".format(self, self.location)
+            return f"{self} ({self.location})"
         else:
             return str(self)
 
@@ -3172,7 +3166,7 @@ def __str__(self) -> str:
         except ValueError:
             version = None
         version = version or "[unknown version]"
-        return "{} {}".format(self.project_name, version)
+        return f"{self.project_name} {version}"
 
     def __getattr__(self, attr: str):
         """Delegate all unrecognized public attributes to .metadata provider"""
@@ -3200,9 +3194,9 @@ def from_filename(
     def as_requirement(self):
         """Return a ``Requirement`` that matches this distribution exactly"""
         if isinstance(self.parsed_version, packaging.version.Version):
-            spec = "{}=={}".format(self.project_name, self.parsed_version)
+            spec = f"{self.project_name}=={self.parsed_version}"
         else:
-            spec = "{}==={}".format(self.project_name, self.parsed_version)
+            spec = f"{self.project_name}==={self.parsed_version}"
 
         return Requirement.parse(spec)
 
@@ -3210,7 +3204,7 @@ def load_entry_point(self, group: str, name: str) -> _ResolvedEntryPoint:
         """Return the `name` entry point of `group` or raise ImportError"""
         ep = self.get_entry_info(group, name)
         if ep is None:
-            raise ImportError("Entry point {!r} not found".format((group, name)))
+            raise ImportError(f"Entry point {(group, name)!r} not found")
         return ep.load()
 
     @overload
@@ -3327,8 +3321,8 @@ def check_version_conflict(self):
             ):
                 continue
             issue_warning(
-                "Module {} was already imported from {}, but {} is being added"
-                " to sys.path".format(modname, fn, self.location),
+                f"Module {modname} was already imported from {fn}, but {self.location} is being added"
+                " to sys.path",
             )
 
     def has_version(self) -> bool:
@@ -3512,7 +3506,7 @@ def __hash__(self) -> int:
         return self.__hash
 
     def __repr__(self) -> str:
-        return "Requirement.parse({!r})".format(str(self))
+        return f"Requirement.parse({str(self)!r})"
 
     @staticmethod
     def parse(s: str | Iterable[str]) -> Requirement:
diff --git a/pkg_resources/tests/test_pkg_resources.py b/pkg_resources/tests/test_pkg_resources.py
index 2e5526d1aa..27ddaab42f 100644
--- a/pkg_resources/tests/test_pkg_resources.py
+++ b/pkg_resources/tests/test_pkg_resources.py
@@ -214,8 +214,8 @@ def test_get_metadata__bad_utf8(tmpdir):
         "codec can't decode byte 0xe9 in position 1: "
         'invalid continuation byte in METADATA file at path: '
     )
-    assert expected in actual, 'actual: {}'.format(actual)
-    assert actual.endswith(metadata_path), 'actual: {}'.format(actual)
+    assert expected in actual, f'actual: {actual}'
+    assert actual.endswith(metadata_path), f'actual: {actual}'
 
 
 def make_distribution_no_version(tmpdir, basename):
@@ -252,12 +252,10 @@ def test_distribution_version_missing(
     """
     Test Distribution.version when the "Version" header is missing.
     """
-    basename = 'foo.{}'.format(suffix)
+    basename = f'foo.{suffix}'
     dist, dist_dir = make_distribution_no_version(tmpdir, basename)
 
-    expected_text = ("Missing 'Version:' header and/or {} file at path: ").format(
-        expected_filename
-    )
+    expected_text = (f"Missing 'Version:' header and/or {expected_filename} file at path: ")
     metadata_path = os.path.join(dist_dir, expected_filename)
 
     # Now check the exception raised when the "version" attribute is accessed.
diff --git a/ruff.toml b/ruff.toml
index e7005e27d9..b9c4a8f569 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -35,7 +35,6 @@ ignore = [
 	"TRY003", # raise-vanilla-args, avoid multitude of exception classes
 	"TRY301", # raise-within-try, it's handy
 	"UP015", # redundant-open-modes, explicit is preferred
-	"UP032", # temporarily disabled
 	"UP038", # Using `X | Y` in `isinstance` call is slower and more verbose https://github.com/astral-sh/ruff/issues/7871
 	# Only enforcing return type annotations for public functions
 	"ANN202", # missing-return-type-private-function
diff --git a/setuptools/__init__.py b/setuptools/__init__.py
index ab3be9581c..8b83a5271d 100644
--- a/setuptools/__init__.py
+++ b/setuptools/__init__.py
@@ -182,7 +182,7 @@ def _ensure_stringlike(self, option, what, default=None):
             return default
         elif not isinstance(val, str):
             raise DistutilsOptionError(
-                "'{}' must be a {} (got `{}`)".format(option, what, val)
+                f"'{option}' must be a {what} (got `{val}`)"
             )
         return val
 
@@ -210,7 +210,7 @@ def ensure_string_list(self, option: str) -> None:
                 ok = False
             if not ok:
                 raise DistutilsOptionError(
-                    "'{}' must be a list of strings (got {!r})".format(option, val)
+                    f"'{option}' must be a list of strings (got {val!r})"
                 )
 
     @overload
diff --git a/setuptools/_core_metadata.py b/setuptools/_core_metadata.py
index 72b745dc93..a5e18a4006 100644
--- a/setuptools/_core_metadata.py
+++ b/setuptools/_core_metadata.py
@@ -150,7 +150,7 @@ def write_pkg_file(self, file):  # noqa: C901  # is too complex (14)  # FIXME
     version = self.get_metadata_version()
 
     def write_field(key, value):
-        file.write("{}: {}\n".format(key, value))
+        file.write(f"{key}: {value}\n")
 
     write_field('Metadata-Version', str(version))
     write_field('Name', self.get_name())
@@ -209,7 +209,7 @@ def write_field(key, value):
 
     long_description = self.get_long_description()
     if long_description:
-        file.write("\n{}".format(long_description))
+        file.write(f"\n{long_description}")
         if not long_description.endswith("\n"):
             file.write("\n")
 
diff --git a/setuptools/_imp.py b/setuptools/_imp.py
index f0fba01e50..f1d9f29218 100644
--- a/setuptools/_imp.py
+++ b/setuptools/_imp.py
@@ -29,7 +29,7 @@ def find_module(module, paths=None):
     """Just like 'imp.find_module()', but with package support"""
     spec = find_spec(module, paths)
     if spec is None:
-        raise ImportError("Can't find {}".format(module))
+        raise ImportError(f"Can't find {module}")
     if not spec.has_location and hasattr(spec, 'submodule_search_locations'):
         spec = importlib.util.spec_from_loader('__init__.py', spec.loader)
 
@@ -76,12 +76,12 @@ def find_module(module, paths=None):
 def get_frozen_object(module, paths=None):
     spec = find_spec(module, paths)
     if not spec:
-        raise ImportError("Can't find {}".format(module))
+        raise ImportError(f"Can't find {module}")
     return spec.loader.get_code(module)
 
 
 def get_module(module, paths, info):
     spec = find_spec(module, paths)
     if not spec:
-        raise ImportError("Can't find {}".format(module))
+        raise ImportError(f"Can't find {module}")
     return module_from_spec(spec)
diff --git a/setuptools/archive_util.py b/setuptools/archive_util.py
index 7ef5294b55..1a02010bb2 100644
--- a/setuptools/archive_util.py
+++ b/setuptools/archive_util.py
@@ -62,7 +62,7 @@ def unpack_archive(
         else:
             return
     else:
-        raise UnrecognizedFormat("Not a recognized archive type: {}".format(filename))
+        raise UnrecognizedFormat(f"Not a recognized archive type: {filename}")
 
 
 def unpack_directory(filename, extract_dir, progress_filter=default_filter) -> None:
@@ -71,7 +71,7 @@ def unpack_directory(filename, extract_dir, progress_filter=default_filter) -> N
     Raises ``UnrecognizedFormat`` if `filename` is not a directory
     """
     if not os.path.isdir(filename):
-        raise UnrecognizedFormat("{} is not a directory".format(filename))
+        raise UnrecognizedFormat(f"{filename} is not a directory")
 
     paths = {
         filename: ('', extract_dir),
@@ -101,7 +101,7 @@ def unpack_zipfile(filename, extract_dir, progress_filter=default_filter) -> Non
     """
 
     if not zipfile.is_zipfile(filename):
-        raise UnrecognizedFormat("{} is not a zip file".format(filename))
+        raise UnrecognizedFormat(f"{filename} is not a zip file")
 
     with zipfile.ZipFile(filename) as z:
         _unpack_zipfile_obj(z, extract_dir, progress_filter)
@@ -198,7 +198,7 @@ def unpack_tarfile(filename, extract_dir, progress_filter=default_filter) -> boo
         tarobj = tarfile.open(filename)
     except tarfile.TarError as e:
         raise UnrecognizedFormat(
-            "{} is not a compressed or uncompressed tar file".format(filename)
+            f"{filename} is not a compressed or uncompressed tar file"
         ) from e
 
     for member, final_dst in _iter_open_tar(
diff --git a/setuptools/command/alias.py b/setuptools/command/alias.py
index 9903a23503..b8d74af71d 100644
--- a/setuptools/command/alias.py
+++ b/setuptools/command/alias.py
@@ -55,7 +55,7 @@ def run(self) -> None:
                 print("setup.py alias", format_alias(alias, aliases))
                 return
             else:
-                print("No alias definition found for {!r}".format(alias))
+                print(f"No alias definition found for {alias!r}")
                 return
         else:
             alias = self.args[0]
@@ -73,5 +73,5 @@ def format_alias(name, aliases):
     elif source == config_file('local'):
         source = ''
     else:
-        source = '--filename={!r}'.format(source)
+        source = f'--filename={source!r}'
     return source + name + ' ' + command
diff --git a/setuptools/command/bdist_egg.py b/setuptools/command/bdist_egg.py
index 44ccdfd240..21e3e8f14d 100644
--- a/setuptools/command/bdist_egg.py
+++ b/setuptools/command/bdist_egg.py
@@ -264,7 +264,7 @@ def zap_pyfiles(self):
                     m = re.match(pattern, name)
                     path_new = os.path.join(base, os.pardir, m.group('name') + '.pyc')
                     log.info(
-                        "Renaming file from [{}] to [{}]".format(path_old, path_new)
+                        f"Renaming file from [{path_old}] to [{path_new}]"
                     )
                     try:
                         os.remove(path_new)
diff --git a/setuptools/command/build_clib.py b/setuptools/command/build_clib.py
index f2705cecdc..f376f4ce4d 100644
--- a/setuptools/command/build_clib.py
+++ b/setuptools/command/build_clib.py
@@ -29,9 +29,9 @@ def build_libraries(self, libraries) -> None:
             sources = build_info.get('sources')
             if sources is None or not isinstance(sources, (list, tuple)):
                 raise DistutilsSetupError(
-                    "in 'libraries' option (library '{}'), "
+                    f"in 'libraries' option (library '{lib_name}'), "
                     "'sources' must be present and must be "
-                    "a list of source filenames".format(lib_name)
+                    "a list of source filenames"
                 )
             sources = sorted(list(sources))
 
@@ -43,9 +43,9 @@ def build_libraries(self, libraries) -> None:
             obj_deps = build_info.get('obj_deps', dict())
             if not isinstance(obj_deps, dict):
                 raise DistutilsSetupError(
-                    "in 'libraries' option (library '{}'), "
+                    f"in 'libraries' option (library '{lib_name}'), "
                     "'obj_deps' must be a dictionary of "
-                    "type 'source: list'".format(lib_name)
+                    "type 'source: list'"
                 )
             dependencies = []
 
@@ -54,9 +54,9 @@ def build_libraries(self, libraries) -> None:
             global_deps = obj_deps.get('', list())
             if not isinstance(global_deps, (list, tuple)):
                 raise DistutilsSetupError(
-                    "in 'libraries' option (library '{}'), "
+                    f"in 'libraries' option (library '{lib_name}'), "
                     "'obj_deps' must be a dictionary of "
-                    "type 'source: list'".format(lib_name)
+                    "type 'source: list'"
                 )
 
             # Build the list to be used by newer_pairwise_group
@@ -67,9 +67,9 @@ def build_libraries(self, libraries) -> None:
                 extra_deps = obj_deps.get(source, list())
                 if not isinstance(extra_deps, (list, tuple)):
                     raise DistutilsSetupError(
-                        "in 'libraries' option (library '{}'), "
+                        f"in 'libraries' option (library '{lib_name}'), "
                         "'obj_deps' must be a dictionary of "
-                        "type 'source: list'".format(lib_name)
+                        "type 'source: list'"
                     )
                 src_deps.extend(extra_deps)
                 dependencies.append(src_deps)
diff --git a/setuptools/command/build_ext.py b/setuptools/command/build_ext.py
index b8a395bafa..6e56ae4ca2 100644
--- a/setuptools/command/build_ext.py
+++ b/setuptools/command/build_ext.py
@@ -360,7 +360,7 @@ def _write_stub_file(self, stub_file: str, ext: Extension, compile=False):
                     "   global __bootstrap__, __file__, __loader__",
                     "   import sys, os, pkg_resources, importlib.util" + if_dl(", dl"),
                     "   __file__ = pkg_resources.resource_filename"
-                    "(__name__,{!r})".format(os.path.basename(ext._file_name)),
+                    f"(__name__,{os.path.basename(ext._file_name)!r})",
                     "   del __bootstrap__",
                     "   if '__loader__' in globals():",
                     "       del __loader__",
diff --git a/setuptools/command/build_py.py b/setuptools/command/build_py.py
index f17cd66fe9..2f6fcb7cdc 100644
--- a/setuptools/command/build_py.py
+++ b/setuptools/command/build_py.py
@@ -259,10 +259,10 @@ def check_package(self, package, package_dir):
             contents = f.read()
         if b'declare_namespace' not in contents:
             raise distutils.errors.DistutilsError(
-                "Namespace package problem: {} is a namespace package, but "
+                f"Namespace package problem: {package} is a namespace package, but "
                 "its\n__init__.py does not call declare_namespace()! Please "
                 'fix it.\n(See the setuptools manual under '
-                '"Namespace Packages" for details.)\n"'.format(package)
+                '"Namespace Packages" for details.)\n"'
             )
         return init_py
 
diff --git a/setuptools/command/dist_info.py b/setuptools/command/dist_info.py
index 0192ebb260..dca01ff0ce 100644
--- a/setuptools/command/dist_info.py
+++ b/setuptools/command/dist_info.py
@@ -95,7 +95,7 @@ def run(self) -> None:
         egg_info_dir = self.egg_info.egg_info
         assert os.path.isdir(egg_info_dir), ".egg-info dir should have been created"
 
-        log.info("creating '{}'".format(os.path.abspath(self.dist_info_dir)))
+        log.info(f"creating '{os.path.abspath(self.dist_info_dir)}'")
         bdist_wheel = self.get_finalized_command('bdist_wheel')
 
         # TODO: if bdist_wheel if merged into setuptools, just add "keep_egg_info" there
diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py
index 53f96aef2b..379beede14 100644
--- a/setuptools/command/easy_install.py
+++ b/setuptools/command/easy_install.py
@@ -148,7 +148,7 @@ class easy_install(Command):
             None,
             "Don't load find-links defined in packages being installed",
         ),
-        ('user', None, "install in user site-package '{}'".format(site.USER_SITE)),
+        ('user', None, f"install in user site-package '{site.USER_SITE}'"),
     ]
     boolean_options = [
         'zip-ok',
@@ -446,7 +446,7 @@ def run(self, show_deprecation: bool = True) -> None:
                 self.execute(
                     file_util.write_file,
                     (self.record, outputs),
-                    "writing list of installed files to '{}'".format(self.record),
+                    f"writing list of installed files to '{self.record}'",
                 )
             self.warn_deprecated_options()
         finally:
@@ -461,7 +461,7 @@ def pseudo_tempname(self):
             pid = os.getpid()
         except Exception:
             pid = random.randint(0, sys.maxsize)
-        return os.path.join(self.install_dir, "test-easy-install-{}".format(pid))
+        return os.path.join(self.install_dir, f"test-easy-install-{pid}")
 
     def warn_deprecated_options(self) -> None:
         pass
@@ -649,8 +649,8 @@ def add_output(self, path) -> None:
     def not_editable(self, spec) -> None:
         if self.editable:
             raise DistutilsArgError(
-                "Invalid argument {!r}: you can't use filenames or URLs "
-                "with --editable (except via the --find-links option).".format(spec)
+                f"Invalid argument {spec!r}: you can't use filenames or URLs "
+                "with --editable (except via the --find-links option)."
             )
 
     def check_editable(self, spec) -> None:
@@ -659,9 +659,7 @@ def check_editable(self, spec) -> None:
 
         if os.path.exists(os.path.join(self.build_directory, spec.key)):
             raise DistutilsArgError(
-                "{!r} already exists in {}; can't do a checkout there".format(
-                    spec.key, self.build_directory
-                )
+                f"{spec.key!r} already exists in {self.build_directory}; can't do a checkout there"
             )
 
     @contextlib.contextmanager
@@ -699,7 +697,7 @@ def easy_install(self, spec, deps: bool = False) -> Distribution | None:
                 self.local_index,
             )
             if dist is None:
-                msg = "Could not find suitable distribution for {!r}".format(spec)
+                msg = f"Could not find suitable distribution for {spec!r}"
                 if self.always_copy:
                     msg += " (--always-copy skips system and development eggs)"
                 raise DistutilsError(msg)
@@ -918,15 +916,11 @@ def install_eggs(self, spec, dist_filename, tmpdir) -> list[Distribution]:
             setups = glob(os.path.join(setup_base, '*', 'setup.py'))
             if not setups:
                 raise DistutilsError(
-                    "Couldn't find a setup script in {}".format(
-                        os.path.abspath(dist_filename)
-                    )
+                    f"Couldn't find a setup script in {os.path.abspath(dist_filename)}"
                 )
             if len(setups) > 1:
                 raise DistutilsError(
-                    "Multiple setup scripts in {}".format(
-                        os.path.abspath(dist_filename)
-                    )
+                    f"Multiple setup scripts in {os.path.abspath(dist_filename)}"
                 )
             setup_script = setups[0]
 
@@ -1004,7 +998,7 @@ def install_exe(self, dist_filename, tmpdir):
         cfg = extract_wininst_cfg(dist_filename)
         if cfg is None:
             raise DistutilsError(
-                "{} is not a valid distutils Windows .exe".format(dist_filename)
+                f"{dist_filename} is not a valid distutils Windows .exe"
             )
         # Create a dummy distribution object until we build the real distro
         dist = Distribution(
@@ -1118,9 +1112,7 @@ def install_wheel(self, wheel_path, tmpdir):
             self.execute(
                 wheel.install_as_egg,
                 (destination,),
-                ("Installing {} to {}").format(
-                    os.path.basename(wheel_path), os.path.dirname(destination)
-                ),
+                (f"Installing {os.path.basename(wheel_path)} to {os.path.dirname(destination)}"),
             )
         finally:
             update_dist_caches(destination, fix_zipimporter_caches=False)
@@ -1196,7 +1188,7 @@ def run_setup(self, setup_script, setup_base, args) -> None:
         try:
             run_setup(setup_script, args)
         except SystemExit as v:
-            raise DistutilsError("Setup script exited with {}".format(v.args[0])) from v
+            raise DistutilsError(f"Setup script exited with {v.args[0]}") from v
 
     def build_and_install(self, setup_script, setup_base):
         args = ['bdist_egg', '--dist-dir']
@@ -1379,7 +1371,7 @@ def create_home_path(self) -> None:
         home = convert_path(os.path.expanduser("~"))
         for path in only_strs(self.config_vars.values()):
             if path.startswith(home) and not os.path.isdir(path):
-                self.debug_print("os.makedirs('{}', 0o700)".format(path))
+                self.debug_print(f"os.makedirs('{path}', 0o700)")
                 os.makedirs(path, 0o700)
 
     INSTALL_SCHEMES = dict(
@@ -1604,7 +1596,7 @@ def get_exe_prefixes(exe_filename):
                 for pth in yield_lines(contents):
                     pth = pth.strip().replace('\\', '/')
                     if not pth.startswith('import'):
-                        prefixes.append((('{}/{}/'.format(parts[0], pth)), ''))
+                        prefixes.append(((f'{parts[0]}/{pth}/'), ''))
     finally:
         z.close()
     prefixes = [(x.lower(), y) for x, y in prefixes]
@@ -2310,7 +2302,7 @@ def get_win_launcher(type):
 
     Returns the executable as a byte string.
     """
-    launcher_fn = '{}.exe'.format(type)
+    launcher_fn = f'{type}.exe'
     if is_64bit():
         if get_platform() == "win-arm64":
             launcher_fn = launcher_fn.replace(".", "-arm64.")
diff --git a/setuptools/command/egg_info.py b/setuptools/command/egg_info.py
index 335eeb4efd..f77631168f 100644
--- a/setuptools/command/egg_info.py
+++ b/setuptools/command/egg_info.py
@@ -48,7 +48,7 @@ def translate_pattern(glob):  # noqa: C901  # is too complex (14)  # FIXME
     chunks = glob.split(os.path.sep)
 
     sep = re.escape(os.sep)
-    valid_char = '[^{}]'.format(sep)
+    valid_char = f'[^{sep}]'
 
     for c, chunk in enumerate(chunks):
         last_chunk = c == len(chunks) - 1
@@ -60,7 +60,7 @@ def translate_pattern(glob):  # noqa: C901  # is too complex (14)  # FIXME
                 pat += '.*'
             else:
                 # Match '(name/)*'
-                pat += '(?:{}+{})*'.format(valid_char, sep)
+                pat += f'(?:{valid_char}+{sep})*'
             continue  # Break here as the whole path component has been handled
 
         # Find any special characters in the remainder
@@ -102,7 +102,7 @@ def translate_pattern(glob):  # noqa: C901  # is too complex (14)  # FIXME
                         inner = inner[1:]
 
                     char_class += re.escape(inner)
-                    pat += '[{}]'.format(char_class)
+                    pat += f'[{char_class}]'
 
                     # Skip to the end ]
                     i = inner_i
@@ -231,9 +231,7 @@ def finalize_options(self) -> None:
             packaging.requirements.Requirement(spec % (self.egg_name, self.egg_version))
         except ValueError as e:
             raise distutils.errors.DistutilsOptionError(
-                "Invalid distribution name or version syntax: {}-{}".format(
-                    self.egg_name, self.egg_version
-                )
+                f"Invalid distribution name or version syntax: {self.egg_name}-{self.egg_version}"
             ) from e
 
         if self.egg_base is None:
@@ -503,7 +501,7 @@ def _safe_path(self, path):
         # To avoid accidental trans-codings errors, first to unicode
         u_path = unicode_utils.filesys_decode(path)
         if u_path is None:
-            log.warn("'{}' in unexpected encoding -- skipping".format(path))
+            log.warn(f"'{path}' in unexpected encoding -- skipping")
             return False
 
         # Must ensure utf-8 encodability
@@ -565,7 +563,7 @@ def write_manifest(self) -> None:
 
         # Now _repairs should encodability, but not unicode
         files = [self._manifest_normalize(f) for f in self.filelist.files]
-        msg = "writing manifest file '{}'".format(self.manifest)
+        msg = f"writing manifest file '{self.manifest}'"
         self.execute(write_file, (self.manifest, files), msg)
 
     def warn(self, msg) -> None:
diff --git a/setuptools/command/install_egg_info.py b/setuptools/command/install_egg_info.py
index 42cfe4e64d..cbf125c764 100644
--- a/setuptools/command/install_egg_info.py
+++ b/setuptools/command/install_egg_info.py
@@ -37,7 +37,7 @@ def run(self) -> None:
         if not self.dry_run:
             ensure_directory(self.target)
         self.execute(
-            self.copytree, (), "Copying {} to {}".format(self.source, self.target)
+            self.copytree, (), f"Copying {self.source} to {self.target}"
         )
         self.install_namespaces()
 
diff --git a/setuptools/command/sdist.py b/setuptools/command/sdist.py
index 03a70ce9c9..9631cf3114 100644
--- a/setuptools/command/sdist.py
+++ b/setuptools/command/sdist.py
@@ -53,7 +53,7 @@ class sdist(orig.sdist):
     negative_opt: ClassVar[dict[str, str]] = {}
 
     README_EXTENSIONS = ['', '.rst', '.txt', '.md']
-    READMES = tuple('README{}'.format(ext) for ext in README_EXTENSIONS)
+    READMES = tuple(f'README{ext}' for ext in README_EXTENSIONS)
 
     def run(self) -> None:
         self.run_command('egg_info')
@@ -207,7 +207,7 @@ def read_manifest(self):
             try:
                 line = bytes_line.decode('UTF-8')
             except UnicodeDecodeError:
-                log.warn("{!r} not UTF-8 decodable -- skipping".format(line))
+                log.warn(f"{line!r} not UTF-8 decodable -- skipping")
                 continue
             # ignore comments and blank lines
             line = line.strip()
diff --git a/setuptools/command/setopt.py b/setuptools/command/setopt.py
index ee220f9454..678a0593d6 100644
--- a/setuptools/command/setopt.py
+++ b/setuptools/command/setopt.py
@@ -23,7 +23,7 @@ def config_file(kind="local"):
         return os.path.join(os.path.dirname(distutils.__file__), 'distutils.cfg')
     if kind == 'user':
         dot = os.name == 'posix' and '.' or ''
-        return os.path.expanduser(convert_path("~/{}pydistutils.cfg".format(dot)))
+        return os.path.expanduser(convert_path(f"~/{dot}pydistutils.cfg"))
     raise ValueError("config_file() type must be 'local', 'global', or 'user'", kind)
 
 
diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py
index 8469e979a1..ccb5d63cd2 100644
--- a/setuptools/config/expand.py
+++ b/setuptools/config/expand.py
@@ -329,7 +329,7 @@ def version(value: Callable | Iterable[str | int] | str) -> str:
         return _value
     if hasattr(_value, '__iter__'):
         return '.'.join(map(str, _value))
-    return '{}'.format(_value)
+    return f'{_value}'
 
 
 def canonic_package_data(package_data: dict) -> dict:
diff --git a/setuptools/config/setupcfg.py b/setuptools/config/setupcfg.py
index 0f9167be77..f2eb833b22 100644
--- a/setuptools/config/setupcfg.py
+++ b/setuptools/config/setupcfg.py
@@ -272,7 +272,7 @@ def _section_options(
     def parsers(self):
         """Metadata item name to parser function mapping."""
         raise NotImplementedError(
-            '{} must provide .parsers property'.format(self.__class__.__name__)
+            f'{self.__class__.__name__} must provide .parsers property'
         )
 
     def __setitem__(self, option_name, value) -> None:
@@ -483,24 +483,12 @@ def parse(self) -> None:
         for section_name, section_options in self.sections.items():
             method_postfix = ''
             if section_name:  # [section.option] variant
-<<<<<<< HEAD
                 method_postfix = f"_{section_name}"
-||||||| parent of e98b9ffc4 (Enforce ruff/pyupgrade rule UP031)
-                method_postfix = '_%s' % section_name
-=======
-                method_postfix = '_{}'.format(section_name)
->>>>>>> e98b9ffc4 (Enforce ruff/pyupgrade rule UP031)
 
             section_parser_method: Callable | None = getattr(
                 self,
                 # Dots in section names are translated into dunderscores.
-<<<<<<< HEAD
                 f'parse_section{method_postfix}'.replace('.', '__'),
-||||||| parent of e98b9ffc4 (Enforce ruff/pyupgrade rule UP031)
-                ('parse_section%s' % method_postfix).replace('.', '__'),
-=======
-                ('parse_section{}'.format(method_postfix)).replace('.', '__'),
->>>>>>> e98b9ffc4 (Enforce ruff/pyupgrade rule UP031)
                 None,
             )
 
diff --git a/setuptools/depends.py b/setuptools/depends.py
index 09eff3855f..e5223b7956 100644
--- a/setuptools/depends.py
+++ b/setuptools/depends.py
@@ -43,7 +43,7 @@ def __init__(
     def full_name(self):
         """Return full package/distribution name, w/version"""
         if self.requested_version is not None:
-            return '{}-{}'.format(self.name, self.requested_version)
+            return f'{self.name}-{self.requested_version}'
         return self.name
 
     def version_ok(self, version):
diff --git a/setuptools/dist.py b/setuptools/dist.py
index 13a0b43448..639a435ad4 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -85,9 +85,7 @@ def check_importable(dist, attr, value):
         assert not ep.extras
     except (TypeError, ValueError, AttributeError, AssertionError) as e:
         raise DistutilsSetupError(
-            "{!r} must be importable 'module:attrs' string (got {!r})".format(
-                attr, value
-            )
+            f"{attr!r} must be importable 'module:attrs' string (got {value!r})"
         ) from e
 
 
@@ -113,7 +111,7 @@ def check_nsp(dist, attr, value):
         if not dist.has_contents_for(nsp):
             raise DistutilsSetupError(
                 "Distribution contains no modules or packages for "
-                + "namespace package {!r}".format(nsp)
+                + f"namespace package {nsp!r}"
             )
         parent, _sep, _child = nsp.rpartition('.')
         if parent and parent not in ns_packages:
@@ -212,15 +210,15 @@ def check_package_data(dist, attr, value):
     """Verify that value is a dictionary of package names to glob lists"""
     if not isinstance(value, dict):
         raise DistutilsSetupError(
-            "{!r} must be a dictionary mapping package names to lists of "
-            "string wildcard patterns".format(attr)
+            f"{attr!r} must be a dictionary mapping package names to lists of "
+            "string wildcard patterns"
         )
     for k, v in value.items():
         if not isinstance(k, str):
             raise DistutilsSetupError(
-                "keys of {!r} dict must be strings (got {!r})".format(attr, k)
+                f"keys of {attr!r} dict must be strings (got {k!r})"
             )
-        assert_string_list(dist, 'values of {!r} dict'.format(attr), v)
+        assert_string_list(dist, f'values of {attr!r} dict', v)
 
 
 def check_packages(dist, attr, value):
@@ -590,10 +588,10 @@ def _set_command_options(self, command_obj, option_dict=None):  # noqa: C901
             option_dict = self.get_option_dict(command_name)
 
         if DEBUG:
-            self.announce("  setting options for '{}' command:".format(command_name))
+            self.announce(f"  setting options for '{command_name}' command:")
         for option, (source, value) in option_dict.items():
             if DEBUG:
-                self.announce("    {} = {} (from {})".format(option, value, source))
+                self.announce(f"    {option} = {value} (from {source})")
             try:
                 bool_opts = [translate_longopt(o) for o in command_obj.boolean_options]
             except AttributeError:
@@ -613,9 +611,7 @@ def _set_command_options(self, command_obj, option_dict=None):  # noqa: C901
                     setattr(command_obj, option, value)
                 else:
                     raise DistutilsOptionError(
-                        "error in {}: command '{}' has no such option '{}'".format(
-                            source, command_name, option
-                        )
+                        f"error in {source}: command '{command_name}' has no such option '{option}'"
                     )
             except ValueError as e:
                 raise DistutilsOptionError(e) from e
@@ -840,7 +836,7 @@ def _include_misc(self, name: str, value: _Sequence) -> None:
             old = getattr(self, name)
         except AttributeError as e:
             raise DistutilsSetupError(
-                "{}: No such distribution setting".format(name)
+                f"{name}: No such distribution setting"
             ) from e
         if old is None:
             setattr(self, name, value)
diff --git a/setuptools/monkey.py b/setuptools/monkey.py
index ec5d8cdae1..6ad1abac29 100644
--- a/setuptools/monkey.py
+++ b/setuptools/monkey.py
@@ -64,7 +64,7 @@ def get_unpatched_class(cls: type[_T]) -> type[_T]:
     )
     base = next(external_bases)
     if not base.__module__.startswith('distutils'):
-        msg = "distutils has already been patched by {!r}".format(cls)
+        msg = f"distutils has already been patched by {cls!r}"
         raise AssertionError(msg)
     return base
 
diff --git a/setuptools/msvc.py b/setuptools/msvc.py
index 727897d1d3..4ed97c75a1 100644
--- a/setuptools/msvc.py
+++ b/setuptools/msvc.py
@@ -108,7 +108,7 @@ def current_dir(self, hidex86=False, x64=False) -> str:
             if (self.current_cpu == 'x86' and hidex86)
             else r'\x64'
             if (self.current_cpu == 'amd64' and x64)
-            else r'\{}'.format(self.current_cpu)
+            else rf'\{self.current_cpu}'
         )
 
     def target_dir(self, hidex86=False, x64=False) -> str:
@@ -132,7 +132,7 @@ def target_dir(self, hidex86=False, x64=False) -> str:
             if (self.target_cpu == 'x86' and hidex86)
             else r'\x64'
             if (self.target_cpu == 'amd64' and x64)
-            else r'\{}'.format(self.target_cpu)
+            else rf'\{self.target_cpu}'
         )
 
     def cross_dir(self, forcex86=False):
@@ -155,7 +155,7 @@ def cross_dir(self, forcex86=False):
         return (
             ''
             if self.target_cpu == current
-            else self.target_dir().replace('\\', '\\{}_'.format(current))
+            else self.target_dir().replace('\\', f'\\{current}_')
         )
 
 
@@ -497,11 +497,11 @@ def VSInstallDir(self):
         """
         # Default path
         default = os.path.join(
-            self.ProgramFilesx86, 'Microsoft Visual Studio {:0.1f}'.format(self.vs_ver)
+            self.ProgramFilesx86, f'Microsoft Visual Studio {self.vs_ver:0.1f}'
         )
 
         # Try to get path from registry, if fail use default path
-        return self.ri.lookup(self.ri.vs, '{:0.1f}'.format(self.vs_ver)) or default
+        return self.ri.lookup(self.ri.vs, f'{self.vs_ver:0.1f}') or default
 
     @property
     def VCInstallDir(self):
@@ -562,16 +562,16 @@ def _guess_vc_legacy(self):
         """
         default = os.path.join(
             self.ProgramFilesx86,
-            r'Microsoft Visual Studio {:0.1f}\VC'.format(self.vs_ver),
+            rf'Microsoft Visual Studio {self.vs_ver:0.1f}\VC',
         )
 
         # Try to get "VC++ for Python" path from registry as default path
-        reg_path = os.path.join(self.ri.vc_for_python, '{:0.1f}'.format(self.vs_ver))
+        reg_path = os.path.join(self.ri.vc_for_python, f'{self.vs_ver:0.1f}')
         python_vc = self.ri.lookup(reg_path, 'installdir')
         default_vc = os.path.join(python_vc, 'VC') if python_vc else default
 
         # Try to get path from registry, if fail use default path
-        return self.ri.lookup(self.ri.vc, '{:0.1f}'.format(self.vs_ver)) or default_vc
+        return self.ri.lookup(self.ri.vc, f'{self.vs_ver:0.1f}') or default_vc
 
     @property
     def WindowsSdkVersion(self) -> tuple[LiteralString, ...]:
@@ -620,13 +620,13 @@ def WindowsSdkDir(self) -> str | None:  # noqa: C901  # is too complex (12)  # F
         sdkdir: str | None = ''
         for ver in self.WindowsSdkVersion:
             # Try to get it from registry
-            loc = os.path.join(self.ri.windows_sdk, 'v{}'.format(ver))
+            loc = os.path.join(self.ri.windows_sdk, f'v{ver}')
             sdkdir = self.ri.lookup(loc, 'installationfolder')
             if sdkdir:
                 break
         if not sdkdir or not os.path.isdir(sdkdir):
             # Try to get "VC++ for Python" version from registry
-            path = os.path.join(self.ri.vc_for_python, '{:0.1f}'.format(self.vc_ver))
+            path = os.path.join(self.ri.vc_for_python, f'{self.vc_ver:0.1f}')
             install_base = self.ri.lookup(path, 'installdir')
             if install_base:
                 sdkdir = os.path.join(install_base, 'WinSDK')
@@ -634,14 +634,14 @@ def WindowsSdkDir(self) -> str | None:  # noqa: C901  # is too complex (12)  # F
             # If fail, use default new path
             for ver in self.WindowsSdkVersion:
                 intver = ver[: ver.rfind('.')]
-                path = r'Microsoft SDKs\Windows Kits\{}'.format(intver)
+                path = rf'Microsoft SDKs\Windows Kits\{intver}'
                 d = os.path.join(self.ProgramFiles, path)
                 if os.path.isdir(d):
                     sdkdir = d
         if not sdkdir or not os.path.isdir(sdkdir):
             # If fail, use default old path
             for ver in self.WindowsSdkVersion:
-                path = r'Microsoft SDKs\Windows\v{}'.format(ver)
+                path = rf'Microsoft SDKs\Windows\v{ver}'
                 d = os.path.join(self.ProgramFiles, path)
                 if os.path.isdir(d):
                     sdkdir = d
@@ -677,7 +677,7 @@ def WindowsSDKExecutablePath(self):
                 regpaths += [os.path.join(self.ri.netfx_sdk, ver, fx)]
 
         for ver in self.WindowsSdkVersion:
-            regpaths += [os.path.join(self.ri.windows_sdk, 'v{}A'.format(ver), fx)]
+            regpaths += [os.path.join(self.ri.windows_sdk, f'v{ver}A', fx)]
 
         # Return installation folder from the more recent path
         for path in regpaths:
@@ -698,7 +698,7 @@ def FSharpInstallDir(self):
             path
         """
         path = os.path.join(
-            self.ri.visualstudio, r'{:0.1f}\Setup\F#'.format(self.vs_ver)
+            self.ri.visualstudio, rf'{self.vs_ver:0.1f}\Setup\F#'
         )
         return self.ri.lookup(path, 'productdir') or ''
 
@@ -718,7 +718,7 @@ def UniversalCRTSdkDir(self):
         # Find path of the more recent Kit
         for ver in vers:
             sdkdir = self.ri.lookup(
-                self.ri.windows_kits_roots, 'kitsroot{}'.format(ver)
+                self.ri.windows_kits_roots, f'kitsroot{ver}'
             )
             if sdkdir:
                 return sdkdir or ''
@@ -965,7 +965,7 @@ def VSTools(self):
             arch_subdir = self.pi.current_dir(hidex86=True, x64=True)
             paths += [r'Common7\IDE\CommonExtensions\Microsoft\TestWindow']
             paths += [r'Team Tools\Performance Tools']
-            paths += [r'Team Tools\Performance Tools{}'.format(arch_subdir)]
+            paths += [rf'Team Tools\Performance Tools{arch_subdir}']
 
         return [os.path.join(self.si.VSInstallDir, path) for path in paths]
 
@@ -998,10 +998,10 @@ def VCLibraries(self):
             arch_subdir = self.pi.target_dir(x64=True)
         else:
             arch_subdir = self.pi.target_dir(hidex86=True)
-        paths = ['Lib{}'.format(arch_subdir), r'ATLMFC\Lib{}'.format(arch_subdir)]
+        paths = [f'Lib{arch_subdir}', rf'ATLMFC\Lib{arch_subdir}']
 
         if self.vs_ver >= 14.0:
-            paths += [r'Lib\store{}'.format(arch_subdir)]
+            paths += [rf'Lib\store{arch_subdir}']
 
         return [os.path.join(self.si.VCInstallDir, path) for path in paths]
 
@@ -1035,10 +1035,10 @@ def VCTools(self):
         forcex86 = True if self.vs_ver <= 10.0 else False
         arch_subdir = self.pi.cross_dir(forcex86)
         if arch_subdir:
-            tools += [os.path.join(si.VCInstallDir, 'Bin{}'.format(arch_subdir))]
+            tools += [os.path.join(si.VCInstallDir, f'Bin{arch_subdir}')]
 
         if self.vs_ver == 14.0:
-            path = 'Bin{}'.format(self.pi.current_dir(hidex86=True))
+            path = f'Bin{self.pi.current_dir(hidex86=True)}'
             tools += [os.path.join(si.VCInstallDir, path)]
 
         elif self.vs_ver >= 15.0:
@@ -1073,13 +1073,13 @@ def OSLibraries(self):
         """
         if self.vs_ver <= 10.0:
             arch_subdir = self.pi.target_dir(hidex86=True, x64=True)
-            return [os.path.join(self.si.WindowsSdkDir, 'Lib{}'.format(arch_subdir))]
+            return [os.path.join(self.si.WindowsSdkDir, f'Lib{arch_subdir}')]
 
         else:
             arch_subdir = self.pi.target_dir(x64=True)
             lib = os.path.join(self.si.WindowsSdkDir, 'lib')
             libver = self._sdk_subdir
-            return [os.path.join(lib, '{}um{}'.format(libver, arch_subdir))]
+            return [os.path.join(lib, f'{libver}um{arch_subdir}')]
 
     @property
     def OSIncludes(self):
@@ -1102,9 +1102,9 @@ def OSIncludes(self):
             else:
                 sdkver = ''
             return [
-                os.path.join(include, '{}shared'.format(sdkver)),
-                os.path.join(include, '{}um'.format(sdkver)),
-                os.path.join(include, '{}winrt'.format(sdkver)),
+                os.path.join(include, f'{sdkver}shared'),
+                os.path.join(include, f'{sdkver}um'),
+                os.path.join(include, f'{sdkver}winrt'),
             ]
 
     @property
@@ -1139,7 +1139,7 @@ def OSLibpath(self):
                     self.si.WindowsSdkDir,
                     'ExtensionSDKs',
                     'Microsoft.VCLibs',
-                    '{:0.1f}'.format(self.vs_ver),
+                    f'{self.vs_ver:0.1f}',
                     'References',
                     'CommonConfiguration',
                     'neutral',
@@ -1174,7 +1174,7 @@ def _sdk_tools(self):
 
         if not self.pi.current_is_x86():
             arch_subdir = self.pi.current_dir(x64=True)
-            path = 'Bin{}'.format(arch_subdir)
+            path = f'Bin{arch_subdir}'
             yield os.path.join(self.si.WindowsSdkDir, path)
 
         if self.vs_ver in (10.0, 11.0):
@@ -1182,14 +1182,14 @@ def _sdk_tools(self):
                 arch_subdir = ''
             else:
                 arch_subdir = self.pi.current_dir(hidex86=True, x64=True)
-            path = r'Bin\NETFX 4.0 Tools{}'.format(arch_subdir)
+            path = rf'Bin\NETFX 4.0 Tools{arch_subdir}'
             yield os.path.join(self.si.WindowsSdkDir, path)
 
         elif self.vs_ver >= 15.0:
             path = os.path.join(self.si.WindowsSdkDir, 'Bin')
             arch_subdir = self.pi.current_dir(x64=True)
             sdkver = self.si.WindowsSdkLastVersion
-            yield os.path.join(path, '{}{}'.format(sdkver, arch_subdir))
+            yield os.path.join(path, f'{sdkver}{arch_subdir}')
 
         if self.si.WindowsSDKExecutablePath:
             yield self.si.WindowsSDKExecutablePath
@@ -1205,7 +1205,7 @@ def _sdk_subdir(self):
             subdir
         """
         ucrtver = self.si.WindowsSdkLastVersion
-        return ('{}\\'.format(ucrtver)) if ucrtver else ''
+        return (f'{ucrtver}\\') if ucrtver else ''
 
     @property
     def SdkSetup(self):
@@ -1267,7 +1267,7 @@ def NetFxSDKLibraries(self):
             return []
 
         arch_subdir = self.pi.target_dir(x64=True)
-        return [os.path.join(self.si.NetFxSdkDir, r'lib\um{}'.format(arch_subdir))]
+        return [os.path.join(self.si.NetFxSdkDir, rf'lib\um{arch_subdir}')]
 
     @property
     def NetFxSDKIncludes(self):
@@ -1315,7 +1315,7 @@ def MSBuild(self):
             base_path = self.si.VSInstallDir
             arch_subdir = ''
 
-        path = r'MSBuild\{:0.1f}\bin{}'.format(self.vs_ver, arch_subdir)
+        path = rf'MSBuild\{self.vs_ver:0.1f}\bin{arch_subdir}'
         build = [os.path.join(base_path, path)]
 
         if self.vs_ver >= 15.0:
@@ -1355,7 +1355,7 @@ def UCRTLibraries(self):
         arch_subdir = self.pi.target_dir(x64=True)
         lib = os.path.join(self.si.UniversalCRTSdkDir, 'lib')
         ucrtver = self._ucrt_subdir
-        return [os.path.join(lib, '{}ucrt{}'.format(ucrtver, arch_subdir))]
+        return [os.path.join(lib, f'{ucrtver}ucrt{arch_subdir}')]
 
     @property
     def UCRTIncludes(self):
@@ -1371,7 +1371,7 @@ def UCRTIncludes(self):
             return []
 
         include = os.path.join(self.si.UniversalCRTSdkDir, 'include')
-        return [os.path.join(include, '{}ucrt'.format(self._ucrt_subdir))]
+        return [os.path.join(include, f'{self._ucrt_subdir}ucrt')]
 
     @property
     def _ucrt_subdir(self):
@@ -1384,7 +1384,7 @@ def _ucrt_subdir(self):
             subdir
         """
         ucrtver = self.si.UniversalCRTSdkLastVersion
-        return ('{}\\'.format(ucrtver)) if ucrtver else ''
+        return (f'{ucrtver}\\') if ucrtver else ''
 
     @property
     def FSharp(self):
@@ -1525,7 +1525,7 @@ def _build_paths(self, name, spec_path_lists, exists):
         paths = itertools.chain(spec_paths, env_paths)
         extant_paths = list(filter(os.path.isdir, paths)) if exists else paths
         if not extant_paths:
-            msg = "{} environment variable is empty".format(name.upper())
+            msg = f"{name.upper()} environment variable is empty"
             raise distutils.errors.DistutilsPlatformError(msg)
         unique_paths = unique_everseen(extant_paths)
         return os.pathsep.join(unique_paths)
diff --git a/setuptools/package_index.py b/setuptools/package_index.py
index 373c047b93..25ca98b334 100644
--- a/setuptools/package_index.py
+++ b/setuptools/package_index.py
@@ -74,7 +74,7 @@ def parse_requirement_arg(spec):
         return Requirement.parse(spec)
     except ValueError as e:
         raise DistutilsError(
-            "Not a URL, existing file, or requirement spec: {!r}".format(spec)
+            f"Not a URL, existing file, or requirement spec: {spec!r}"
         ) from e
 
 
@@ -357,7 +357,7 @@ def process_url(self, url, retrieve: bool = False) -> None:  # noqa: C901
         if f is None:
             return
         if isinstance(f, urllib.error.HTTPError) and f.code == 401:
-            self.info("Authentication error: {}".format(f.msg))
+            self.info(f"Authentication error: {f.msg}")
         self.fetched_urls[f.url] = True
         if 'html' not in f.headers.get('content-type', '').lower():
             f.close()  # not html, we can't process it
@@ -474,7 +474,7 @@ def process_index(self, url, page):
             base, frag = egg_info_for_url(new_url)
             if base.endswith('.py') and not frag:
                 if ver:
-                    new_url += '#egg={}-{}'.format(pkg, ver)
+                    new_url += f'#egg={pkg}-{ver}'
                 else:
                     self.need_version_info(url)
             self.scan_url(new_url)
@@ -525,14 +525,12 @@ def check_hash(self, checker, filename, tfp) -> None:
         """
         checker is a ContentChecker
         """
-        checker.report(self.debug, "Validating %s checksum for {}".format(filename))
+        checker.report(self.debug, f"Validating %s checksum for {filename}")
         if not checker.is_valid():
             tfp.close()
             os.unlink(filename)
             raise DistutilsError(
-                "{} validation failed for {}; " "possible download problem?".format(
-                    checker.hash.name, os.path.basename(filename)
-                )
+                f"{checker.hash.name} validation failed for {os.path.basename(filename)}; " "possible download problem?"
             )
 
     def add_find_links(self, urls) -> None:
@@ -720,19 +718,15 @@ def gen_setup(self, filename, fragment, tmpdir):
             with open(os.path.join(tmpdir, 'setup.py'), 'w', encoding="utf-8") as file:
                 file.write(
                     "from setuptools import setup\n"
-                    "setup(name={!r}, version={!r}, py_modules=[{!r}])\n".format(
-                        dists[0].project_name,
-                        dists[0].version,
-                        os.path.splitext(basename)[0],
-                    )
+                    f"setup(name={dists[0].project_name!r}, version={dists[0].version!r}, py_modules=[{os.path.splitext(basename)[0]!r}])\n"
                 )
             return filename
 
         elif match:
             raise DistutilsError(
-                "Can't unambiguously interpret project/version identifier {!r}; "
+                f"Can't unambiguously interpret project/version identifier {fragment!r}; "
                 "any dashes in the name or version should be escaped using "
-                "underscores. {!r}".format(fragment, dists)
+                f"underscores. {dists!r}"
             )
         else:
             raise DistutilsError(
@@ -751,7 +745,7 @@ def _download_to(self, url, filename):
             fp = self.open_url(url)
             if isinstance(fp, urllib.error.HTTPError):
                 raise DistutilsError(
-                    "Can't download {}: {} {}".format(url, fp.code, fp.msg)
+                    f"Can't download {url}: {fp.code} {fp.msg}"
                 )
             headers = fp.info()
             blocknum = 0
@@ -792,7 +786,7 @@ def open_url(self, url, warning=None):  # noqa: C901  # is too complex (12)
             if warning:
                 self.warn(warning, msg)
             else:
-                raise DistutilsError('{} {}'.format(url, msg)) from v
+                raise DistutilsError(f'{url} {msg}') from v
         except urllib.error.HTTPError as v:
             return v
         except urllib.error.URLError as v:
@@ -800,21 +794,21 @@ def open_url(self, url, warning=None):  # noqa: C901  # is too complex (12)
                 self.warn(warning, v.reason)
             else:
                 raise DistutilsError(
-                    "Download error for {}: {}".format(url, v.reason)
+                    f"Download error for {url}: {v.reason}"
                 ) from v
         except http.client.BadStatusLine as v:
             if warning:
                 self.warn(warning, v.line)
             else:
                 raise DistutilsError(
-                    '{} returned a bad status line. The server might be '
-                    'down, {}'.format(url, v.line)
+                    f'{url} returned a bad status line. The server might be '
+                    f'down, {v.line}'
                 ) from v
         except (http.client.HTTPException, OSError) as v:
             if warning:
                 self.warn(warning, v)
             else:
-                raise DistutilsError("Download error for {}: {}".format(url, v)) from v
+                raise DistutilsError(f"Download error for {url}: {v}") from v
 
     def _download_url(self, url, tmpdir):
         # Determine download filename
@@ -1133,7 +1127,7 @@ def local_open(url):
                 break
             elif os.path.isdir(filepath):
                 f += '/'
-            files.append('{name}'.format(name=f))
+            files.append(f'{f}')
         else:
             tmpl = "{url}{files}"
             body = tmpl.format(url=url, files='\n'.join(files))
diff --git a/setuptools/tests/config/test_setupcfg.py b/setuptools/tests/config/test_setupcfg.py
index 399c9dd743..407419b2ea 100644
--- a/setuptools/tests/config/test_setupcfg.py
+++ b/setuptools/tests/config/test_setupcfg.py
@@ -87,7 +87,7 @@ def test_basic(self, tmpdir):
             '[options]\n'
             'scripts = bin/a.py, bin/b.py\n',
         )
-        config_dict = read_configuration('{}'.format(config))
+        config_dict = read_configuration(f'{config}')
         assert config_dict['metadata']['version'] == '10.1.1'
         assert config_dict['metadata']['keywords'] == ['one', 'two']
         assert config_dict['options']['scripts'] == ['bin/a.py', 'bin/b.py']
@@ -102,9 +102,9 @@ def test_ignore_errors(self, tmpdir):
             '[metadata]\nversion = attr: none.VERSION\nkeywords = one, two\n',
         )
         with pytest.raises(ImportError):
-            read_configuration('{}'.format(config))
+            read_configuration(f'{config}')
 
-        config_dict = read_configuration('{}'.format(config), ignore_option_errors=True)
+        config_dict = read_configuration(f'{config}', ignore_option_errors=True)
 
         assert config_dict['metadata']['keywords'] == ['one', 'two']
         assert 'version' not in config_dict['metadata']
diff --git a/setuptools/tests/server.py b/setuptools/tests/server.py
index 0a0fc2b668..623a49a550 100644
--- a/setuptools/tests/server.py
+++ b/setuptools/tests/server.py
@@ -44,7 +44,7 @@ def stop(self):
 
     def base_url(self):
         port = self.server_port
-        return 'http://127.0.0.1:{}/setuptools/tests/indexes/'.format(port)
+        return f'http://127.0.0.1:{port}/setuptools/tests/indexes/'
 
 
 class RequestRecorder(http.server.BaseHTTPRequestHandler):
@@ -70,11 +70,11 @@ def run(self):
 
     @property
     def netloc(self):
-        return 'localhost:{}'.format(self.server_port)
+        return f'localhost:{self.server_port}'
 
     @property
     def url(self):
-        return 'http://{}/'.format(self.netloc)
+        return f'http://{self.netloc}/'
 
 
 def path_to_url(path, authority=None):
diff --git a/setuptools/tests/test_core_metadata.py b/setuptools/tests/test_core_metadata.py
index 89b2ea3054..c34b9eb831 100644
--- a/setuptools/tests/test_core_metadata.py
+++ b/setuptools/tests/test_core_metadata.py
@@ -311,7 +311,7 @@ def test_maintainer_author(name, attrs, tmpdir):
             for line in pkg_lines:
                 assert not line.startswith(fkey + ':')
         else:
-            line = '{}: {}'.format(fkey, val)
+            line = f'{fkey}: {val}'
             assert line in pkg_lines_set
 
 
diff --git a/setuptools/tests/test_dist.py b/setuptools/tests/test_dist.py
index e1d7c1e043..533eb9f45e 100644
--- a/setuptools/tests/test_dist.py
+++ b/setuptools/tests/test_dist.py
@@ -24,7 +24,7 @@ def test_dist_fetch_build_egg(tmpdir):
 
     def sdist_with_index(distname, version):
         dist_dir = index.mkdir(distname)
-        dist_sdist = '{}-{}.tar.gz'.format(distname, version)
+        dist_sdist = f'{distname}-{version}.tar.gz'
         make_nspkg_sdist(str(dist_dir.join(dist_sdist)), distname, version)
         with dist_dir.join('index.html').open('w') as fp:
             fp.write(
diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py
index a34f9bd525..e9b96027ce 100644
--- a/setuptools/tests/test_easy_install.py
+++ b/setuptools/tests/test_easy_install.py
@@ -735,14 +735,14 @@ def make_dependency_sdist(dist_path, distname, version):
                 (
                     'setup.py',
                     DALS(
-                        """
+                        f"""
                     import setuptools
                     setuptools.setup(
-                        name={name!r},
+                        name={distname!r},
                         version={version!r},
-                        py_modules=[{name!r}],
+                        py_modules=[{distname!r}],
                     )
-                    """.format(name=distname, version=version)
+                    """
                     ),
                 ),
                 (
@@ -814,7 +814,7 @@ def test_setup_requires_with_pep508_url(self, mock_index, monkeypatch):
                     # Ignored (overridden by setup_attrs)
                     'python-xlib',
                     '0.19',
-                    setup_attrs=dict(setup_requires='dependency @ {}'.format(dep_url)),
+                    setup_attrs=dict(setup_requires=f'dependency @ {dep_url}'),
                 )
                 test_setup_py = os.path.join(test_pkg, 'setup.py')
                 run_setup(test_setup_py, ['--version'])
@@ -1100,13 +1100,13 @@ def make_trivial_sdist(dist_path, distname, version):
             (
                 'setup.py',
                 DALS(
-                    """\
+                    f"""\
              import setuptools
              setuptools.setup(
-                 name={!r},
-                 version={!r}
+                 name={distname!r},
+                 version={version!r}
              )
-         """.format(distname, version)
+         """
                 ),
             ),
             ('setup.cfg', ''),
@@ -1127,15 +1127,15 @@ def make_nspkg_sdist(dist_path, distname, version):
     packages = ['.'.join(parts[:idx]) for idx in range(1, len(parts) + 1)]
 
     setup_py = DALS(
-        """\
+        f"""\
         import setuptools
         setuptools.setup(
-            name={!r},
-            version={!r},
-            packages={!r},
-            namespace_packages=[{!r}]
+            name={distname!r},
+            version={version!r},
+            packages={packages!r},
+            namespace_packages=[{nspackage!r}]
         )
-    """.format(distname, version, packages, nspackage)
+    """
     )
 
     init = "__import__('pkg_resources').declare_namespace(__name__)"
@@ -1210,7 +1210,7 @@ def create_setup_requires_package(
     test_setup_attrs = {
         'name': 'test_pkg',
         'version': '0.0',
-        'setup_requires': ['{}=={}'.format(distname, version)],
+        'setup_requires': [f'{distname}=={version}'],
         'dependency_links': [os.path.abspath(path)],
     }
     if setup_attrs:
@@ -1231,7 +1231,7 @@ def create_setup_requires_package(
                 section = options
             if isinstance(value, (tuple, list)):
                 value = ';'.join(value)
-            section.append('{}: {}'.format(name, value))
+            section.append(f'{name}: {value}')
         test_setup_cfg_contents = DALS(
             """
             [metadata]
@@ -1259,7 +1259,7 @@ def create_setup_requires_package(
     with open(os.path.join(test_pkg, 'setup.py'), 'w', encoding="utf-8") as f:
         f.write(setup_py_template % test_setup_attrs)
 
-    foobar_path = os.path.join(path, '{}-{}.tar.gz'.format(distname, version))
+    foobar_path = os.path.join(path, f'{distname}-{version}.tar.gz')
     make_package(foobar_path, distname, version)
 
     return test_pkg
@@ -1274,12 +1274,12 @@ class TestScriptHeader:
     exe_with_spaces = r'C:\Program Files\Python36\python.exe'
 
     def test_get_script_header(self):
-        expected = '#!{}\n'.format(ei.nt_quote_arg(os.path.normpath(sys.executable)))
+        expected = f'#!{ei.nt_quote_arg(os.path.normpath(sys.executable))}\n'
         actual = ei.ScriptWriter.get_header('#!/usr/local/bin/python')
         assert actual == expected
 
     def test_get_script_header_args(self):
-        expected = '#!{} -x\n'.format(ei.nt_quote_arg(os.path.normpath(sys.executable)))
+        expected = f'#!{ei.nt_quote_arg(os.path.normpath(sys.executable))} -x\n'
         actual = ei.ScriptWriter.get_header('#!/usr/bin/python -x')
         assert actual == expected
 
@@ -1287,14 +1287,14 @@ def test_get_script_header_non_ascii_exe(self):
         actual = ei.ScriptWriter.get_header(
             '#!/usr/bin/python', executable=self.non_ascii_exe
         )
-        expected = '#!{} -x\n'.format(self.non_ascii_exe)
+        expected = f'#!{self.non_ascii_exe} -x\n'
         assert actual == expected
 
     def test_get_script_header_exe_with_spaces(self):
         actual = ei.ScriptWriter.get_header(
             '#!/usr/bin/python', executable='"' + self.exe_with_spaces + '"'
         )
-        expected = '#!"{}"\n'.format(self.exe_with_spaces)
+        expected = f'#!"{self.exe_with_spaces}"\n'
         assert actual == expected
 
 
diff --git a/setuptools/tests/test_egg_info.py b/setuptools/tests/test_egg_info.py
index 24e73be44f..9924f9cbbd 100644
--- a/setuptools/tests/test_egg_info.py
+++ b/setuptools/tests/test_egg_info.py
@@ -259,13 +259,9 @@ def _setup_script_with_requires(self, requires, use_setup_cfg=False):
             'setup.cfg': setup_config,
         })
 
-    mismatch_marker = "python_version<'{this_ver}'".format(
-        this_ver=sys.version_info.major,
-    )
+    mismatch_marker = f"python_version<'{sys.version_info[0]}'"
     # Alternate equivalent syntax.
-    mismatch_marker_alternate = 'python_version < "{this_ver}"'.format(
-        this_ver=sys.version_info.major,
-    )
+    mismatch_marker_alternate = f'python_version < "{sys.version_info[0]}"'
     invalid_marker = "<=>++"
 
     class RequiresTestHelper:
diff --git a/setuptools/tests/test_install_scripts.py b/setuptools/tests/test_install_scripts.py
index f496fe5917..e62a6b7f31 100644
--- a/setuptools/tests/test_install_scripts.py
+++ b/setuptools/tests/test_install_scripts.py
@@ -38,7 +38,7 @@ def test_sys_executable_escaping_unix(self, tmpdir, monkeypatch):
         Ensure that shebang is not quoted on Unix when getting the Python exe
         from sys.executable.
         """
-        expected = '#!{}\n'.format(self.unix_exe)
+        expected = f'#!{self.unix_exe}\n'
         monkeypatch.setattr('sys.executable', self.unix_exe)
         with tmpdir.as_cwd():
             self._run_install_scripts(str(tmpdir))
@@ -52,7 +52,7 @@ def test_sys_executable_escaping_win32(self, tmpdir, monkeypatch):
         Ensure that shebang is quoted on Windows when getting the Python exe
         from sys.executable and it contains a space.
         """
-        expected = '#!"{}"\n'.format(self.win32_exe)
+        expected = f'#!"{self.win32_exe}"\n'
         monkeypatch.setattr('sys.executable', self.win32_exe)
         with tmpdir.as_cwd():
             self._run_install_scripts(str(tmpdir))
@@ -67,7 +67,7 @@ def test_executable_with_spaces_escaping_unix(self, tmpdir):
         a value with spaces
         is specified using --executable.
         """
-        expected = '#!{}\n'.format(self.unix_spaces_exe)
+        expected = f'#!{self.unix_spaces_exe}\n'
         with tmpdir.as_cwd():
             self._run_install_scripts(str(tmpdir), self.unix_spaces_exe)
             with open(str(tmpdir.join('foo')), 'r', encoding="utf-8") as f:
@@ -81,7 +81,7 @@ def test_executable_arg_escaping_win32(self, tmpdir):
         getting a path with spaces
         from --executable, that is itself properly quoted.
         """
-        expected = '#!"{}"\n'.format(self.win32_exe)
+        expected = f'#!"{self.win32_exe}"\n'
         with tmpdir.as_cwd():
             self._run_install_scripts(str(tmpdir), '"' + self.win32_exe + '"')
             with open(str(tmpdir.join('foo-script.py')), 'r', encoding="utf-8") as f:
diff --git a/setuptools/tests/test_manifest.py b/setuptools/tests/test_manifest.py
index a821cf778b..903a528db0 100644
--- a/setuptools/tests/test_manifest.py
+++ b/setuptools/tests/test_manifest.py
@@ -34,11 +34,11 @@ def make_local_path(s):
     'packages': ['app'],
 }
 
-SETUP_PY = """\
+SETUP_PY = f"""\
 from setuptools import setup
 
-setup(**{!r})
-""".format(SETUP_ATTRS)
+setup(**{SETUP_ATTRS!r})
+"""
 
 
 @contextlib.contextmanager
diff --git a/setuptools/tests/test_sandbox.py b/setuptools/tests/test_sandbox.py
index 5531ea8660..a476b7c93d 100644
--- a/setuptools/tests/test_sandbox.py
+++ b/setuptools/tests/test_sandbox.py
@@ -76,7 +76,7 @@ class CantPickleThis(Exception):
             "This Exception is unpickleable because it's not in globals"
 
             def __repr__(self) -> str:
-                return 'CantPickleThis{!r}'.format(self.args)
+                return f'CantPickleThis{self.args!r}'
 
         with setuptools.sandbox.ExceptionSaver() as saved_exc:
             raise CantPickleThis('detail')
diff --git a/setuptools/tests/test_sdist.py b/setuptools/tests/test_sdist.py
index 8f5e620f0d..3ee0511b1c 100644
--- a/setuptools/tests/test_sdist.py
+++ b/setuptools/tests/test_sdist.py
@@ -37,11 +37,11 @@
     'data_files': [("data", [os.path.join("d", "e.dat")])],
 }
 
-SETUP_PY = """\
+SETUP_PY = f"""\
 from setuptools import setup
 
-setup(**{!r})
-""".format(SETUP_ATTRS)
+setup(**{SETUP_ATTRS!r})
+"""
 
 EXTENSION = Extension(
     name="sdist_test.f",
diff --git a/setuptools/tests/test_wheel.py b/setuptools/tests/test_wheel.py
index 7e700a14d2..70165c608b 100644
--- a/setuptools/tests/test_wheel.py
+++ b/setuptools/tests/test_wheel.py
@@ -176,7 +176,7 @@ def __init__(self, id, **kwargs):
         self._fields = kwargs
 
     def __repr__(self) -> str:
-        return '{}(**{!r})'.format(self._id, self._fields)
+        return f'{self._id}(**{self._fields!r})'
 
 
 # Using Any to avoid possible type union issues later in test
@@ -367,10 +367,10 @@ def __repr__(self) -> str:
     ),
     dict(
         id='requires2',
-        install_requires="""
+        install_requires=f"""
         bar
-        foo<=2.0; {!r} in sys_platform
-        """.format(sys.platform),
+        foo<=2.0; {sys.platform!r} in sys_platform
+        """,
         requires_txt=DALS(
             """
             bar
@@ -380,9 +380,9 @@ def __repr__(self) -> str:
     ),
     dict(
         id='requires3',
-        install_requires="""
-        bar; {!r} != sys_platform
-        """.format(sys.platform),
+        install_requires=f"""
+        bar; {sys.platform!r} != sys_platform
+        """,
     ),
     dict(
         id='requires4',
@@ -404,7 +404,7 @@ def __repr__(self) -> str:
     dict(
         id='requires5',
         extras_require={
-            'extra': 'foobar; {!r} != sys_platform'.format(sys.platform),
+            'extra': f'foobar; {sys.platform!r} != sys_platform',
         },
         requires_txt=DALS(
             """
@@ -603,7 +603,7 @@ def test_wheel_install_pep_503():
 def test_wheel_no_dist_dir():
     project_name = 'nodistinfo'
     version = '1.0'
-    wheel_name = '{}-{}-py2.py3-none-any.whl'.format(project_name, version)
+    wheel_name = f'{project_name}-{version}-py2.py3-none-any.whl'
     with tempdir() as source_dir:
         wheel_path = os.path.join(source_dir, wheel_name)
         # create an empty zip file
diff --git a/setuptools/tests/test_windows_wrappers.py b/setuptools/tests/test_windows_wrappers.py
index e46bb6abc0..f895485387 100644
--- a/setuptools/tests/test_windows_wrappers.py
+++ b/setuptools/tests/test_windows_wrappers.py
@@ -57,9 +57,9 @@ def win_launcher_exe(prefix):
     """A simple routine to select launcher script based on platform."""
     assert prefix in ('cli', 'gui')
     if platform.machine() == "ARM64":
-        return "{}-arm64.exe".format(prefix)
+        return f"{prefix}-arm64.exe"
     else:
-        return "{}-32.exe".format(prefix)
+        return f"{prefix}-32.exe"
 
 
 class TestCLI(WrapperTester):
diff --git a/setuptools/wheel.py b/setuptools/wheel.py
index 9ccd9e7ede..0ac8fc49c1 100644
--- a/setuptools/wheel.py
+++ b/setuptools/wheel.py
@@ -79,7 +79,7 @@ class Wheel:
     def __init__(self, filename) -> None:
         match = WHEEL_NAME(os.path.basename(filename))
         if match is None:
-            raise ValueError('invalid wheel name: {!r}'.format(filename))
+            raise ValueError(f'invalid wheel name: {filename!r}')
         self.filename = filename
         for k, v in match.groupdict().items():
             setattr(self, k, v)
@@ -122,9 +122,9 @@ def install_as_egg(self, destination_eggdir) -> None:
             self._install_as_egg(destination_eggdir, zf)
 
     def _install_as_egg(self, destination_eggdir, zf):
-        dist_basename = '{}-{}'.format(self.project_name, self.version)
+        dist_basename = f'{self.project_name}-{self.version}'
         dist_info = self.get_dist_info(zf)
-        dist_data = '{}.data'.format(dist_basename)
+        dist_data = f'{dist_basename}.data'
         egg_info = os.path.join(destination_eggdir, 'EGG-INFO')
 
         self._convert_metadata(zf, destination_eggdir, dist_info, egg_info)
@@ -146,7 +146,7 @@ def get_metadata(name):
         wheel_v1 = parse_version('1.0') <= wheel_version < parse_version('2.0dev0')
         if not wheel_v1:
             raise ValueError(
-                'unsupported wheel format version: {}'.format(wheel_version)
+                f'unsupported wheel format version: {wheel_version}'
             )
         # Extract to target directory.
         _unpack_zipfile_obj(zf, destination_eggdir)

From 88861f9b9e31af8d4e54ed3fedb4120efff98536 Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Tue, 10 Sep 2024 12:27:49 +0200
Subject: [PATCH 1356/1761] A round of `ruff format` after `ruff check --fix`

---
 pkg_resources/__init__.py                 |  6 ++----
 pkg_resources/tests/test_pkg_resources.py |  4 +++-
 setuptools/__init__.py                    |  4 +---
 setuptools/command/bdist_egg.py           |  4 +---
 setuptools/command/easy_install.py        |  4 +++-
 setuptools/command/install_egg_info.py    |  4 +---
 setuptools/dist.py                        |  4 +---
 setuptools/msvc.py                        |  8 ++------
 setuptools/package_index.py               | 11 ++++-------
 setuptools/wheel.py                       |  4 +---
 10 files changed, 19 insertions(+), 34 deletions(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index 044c1a04ef..3a387d08e5 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -3049,7 +3049,7 @@ def version(self):
             version = self._get_version()
             if version is None:
                 path = self._get_metadata_path_for_display(self.PKG_INFO)
-                msg = (f"Missing 'Version:' header and/or {self.PKG_INFO} file at path: {path}")
+                msg = f"Missing 'Version:' header and/or {self.PKG_INFO} file at path: {path}"
                 raise ValueError(msg, self) from e
 
             return version
@@ -3105,9 +3105,7 @@ def requires(self, extras: Iterable[str] = ()) -> list[Requirement]:
             try:
                 deps.extend(dm[safe_extra(ext)])
             except KeyError as e:
-                raise UnknownExtra(
-                    f"{self} has no such extra feature {ext!r}"
-                ) from e
+                raise UnknownExtra(f"{self} has no such extra feature {ext!r}") from e
         return deps
 
     def _get_metadata_path_for_display(self, name):
diff --git a/pkg_resources/tests/test_pkg_resources.py b/pkg_resources/tests/test_pkg_resources.py
index 27ddaab42f..0f696e8502 100644
--- a/pkg_resources/tests/test_pkg_resources.py
+++ b/pkg_resources/tests/test_pkg_resources.py
@@ -255,7 +255,9 @@ def test_distribution_version_missing(
     basename = f'foo.{suffix}'
     dist, dist_dir = make_distribution_no_version(tmpdir, basename)
 
-    expected_text = (f"Missing 'Version:' header and/or {expected_filename} file at path: ")
+    expected_text = (
+        f"Missing 'Version:' header and/or {expected_filename} file at path: "
+    )
     metadata_path = os.path.join(dist_dir, expected_filename)
 
     # Now check the exception raised when the "version" attribute is accessed.
diff --git a/setuptools/__init__.py b/setuptools/__init__.py
index 8b83a5271d..64464dfaa3 100644
--- a/setuptools/__init__.py
+++ b/setuptools/__init__.py
@@ -181,9 +181,7 @@ def _ensure_stringlike(self, option, what, default=None):
             setattr(self, option, default)
             return default
         elif not isinstance(val, str):
-            raise DistutilsOptionError(
-                f"'{option}' must be a {what} (got `{val}`)"
-            )
+            raise DistutilsOptionError(f"'{option}' must be a {what} (got `{val}`)")
         return val
 
     def ensure_string_list(self, option: str) -> None:
diff --git a/setuptools/command/bdist_egg.py b/setuptools/command/bdist_egg.py
index 21e3e8f14d..9a08b36585 100644
--- a/setuptools/command/bdist_egg.py
+++ b/setuptools/command/bdist_egg.py
@@ -263,9 +263,7 @@ def zap_pyfiles(self):
                     pattern = r'(?P.+)\.(?P[^.]+)\.pyc'
                     m = re.match(pattern, name)
                     path_new = os.path.join(base, os.pardir, m.group('name') + '.pyc')
-                    log.info(
-                        f"Renaming file from [{path_old}] to [{path_new}]"
-                    )
+                    log.info(f"Renaming file from [{path_old}] to [{path_new}]")
                     try:
                         os.remove(path_new)
                     except OSError:
diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py
index 379beede14..4a93ff077e 100644
--- a/setuptools/command/easy_install.py
+++ b/setuptools/command/easy_install.py
@@ -1112,7 +1112,9 @@ def install_wheel(self, wheel_path, tmpdir):
             self.execute(
                 wheel.install_as_egg,
                 (destination,),
-                (f"Installing {os.path.basename(wheel_path)} to {os.path.dirname(destination)}"),
+                (
+                    f"Installing {os.path.basename(wheel_path)} to {os.path.dirname(destination)}"
+                ),
             )
         finally:
             update_dist_caches(destination, fix_zipimporter_caches=False)
diff --git a/setuptools/command/install_egg_info.py b/setuptools/command/install_egg_info.py
index cbf125c764..44f22ccf51 100644
--- a/setuptools/command/install_egg_info.py
+++ b/setuptools/command/install_egg_info.py
@@ -36,9 +36,7 @@ def run(self) -> None:
             self.execute(os.unlink, (self.target,), "Removing " + self.target)
         if not self.dry_run:
             ensure_directory(self.target)
-        self.execute(
-            self.copytree, (), f"Copying {self.source} to {self.target}"
-        )
+        self.execute(self.copytree, (), f"Copying {self.source} to {self.target}")
         self.install_namespaces()
 
     def get_outputs(self):
diff --git a/setuptools/dist.py b/setuptools/dist.py
index 639a435ad4..18ba1883f2 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -835,9 +835,7 @@ def _include_misc(self, name: str, value: _Sequence) -> None:
         try:
             old = getattr(self, name)
         except AttributeError as e:
-            raise DistutilsSetupError(
-                f"{name}: No such distribution setting"
-            ) from e
+            raise DistutilsSetupError(f"{name}: No such distribution setting") from e
         if old is None:
             setattr(self, name, value)
         elif not isinstance(old, _sequence):
diff --git a/setuptools/msvc.py b/setuptools/msvc.py
index 4ed97c75a1..7f006c3c1a 100644
--- a/setuptools/msvc.py
+++ b/setuptools/msvc.py
@@ -697,9 +697,7 @@ def FSharpInstallDir(self):
         str
             path
         """
-        path = os.path.join(
-            self.ri.visualstudio, rf'{self.vs_ver:0.1f}\Setup\F#'
-        )
+        path = os.path.join(self.ri.visualstudio, rf'{self.vs_ver:0.1f}\Setup\F#')
         return self.ri.lookup(path, 'productdir') or ''
 
     @property
@@ -717,9 +715,7 @@ def UniversalCRTSdkDir(self):
 
         # Find path of the more recent Kit
         for ver in vers:
-            sdkdir = self.ri.lookup(
-                self.ri.windows_kits_roots, f'kitsroot{ver}'
-            )
+            sdkdir = self.ri.lookup(self.ri.windows_kits_roots, f'kitsroot{ver}')
             if sdkdir:
                 return sdkdir or ''
 
diff --git a/setuptools/package_index.py b/setuptools/package_index.py
index 25ca98b334..1a6abebcda 100644
--- a/setuptools/package_index.py
+++ b/setuptools/package_index.py
@@ -530,7 +530,8 @@ def check_hash(self, checker, filename, tfp) -> None:
             tfp.close()
             os.unlink(filename)
             raise DistutilsError(
-                f"{checker.hash.name} validation failed for {os.path.basename(filename)}; " "possible download problem?"
+                f"{checker.hash.name} validation failed for {os.path.basename(filename)}; "
+                "possible download problem?"
             )
 
     def add_find_links(self, urls) -> None:
@@ -744,9 +745,7 @@ def _download_to(self, url, filename):
             checker = HashChecker.from_url(url)
             fp = self.open_url(url)
             if isinstance(fp, urllib.error.HTTPError):
-                raise DistutilsError(
-                    f"Can't download {url}: {fp.code} {fp.msg}"
-                )
+                raise DistutilsError(f"Can't download {url}: {fp.code} {fp.msg}")
             headers = fp.info()
             blocknum = 0
             bs = self.dl_blocksize
@@ -793,9 +792,7 @@ def open_url(self, url, warning=None):  # noqa: C901  # is too complex (12)
             if warning:
                 self.warn(warning, v.reason)
             else:
-                raise DistutilsError(
-                    f"Download error for {url}: {v.reason}"
-                ) from v
+                raise DistutilsError(f"Download error for {url}: {v.reason}") from v
         except http.client.BadStatusLine as v:
             if warning:
                 self.warn(warning, v.line)
diff --git a/setuptools/wheel.py b/setuptools/wheel.py
index 0ac8fc49c1..c7ca43b5cf 100644
--- a/setuptools/wheel.py
+++ b/setuptools/wheel.py
@@ -145,9 +145,7 @@ def get_metadata(name):
         wheel_version = parse_version(wheel_metadata.get('Wheel-Version'))
         wheel_v1 = parse_version('1.0') <= wheel_version < parse_version('2.0dev0')
         if not wheel_v1:
-            raise ValueError(
-                f'unsupported wheel format version: {wheel_version}'
-            )
+            raise ValueError(f'unsupported wheel format version: {wheel_version}')
         # Extract to target directory.
         _unpack_zipfile_obj(zf, destination_eggdir)
         # Convert metadata.

From 578d630f935dbb3a0a3c001c77b49a369f1b1b58 Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Tue, 10 Sep 2024 12:35:07 +0200
Subject: [PATCH 1357/1761] Manual fixes after `ruff check --fix` runs

---
 pkg_resources/__init__.py                | 11 +++++++----
 setuptools/_core_metadata.py             |  4 ++--
 setuptools/command/bdist_egg.py          |  2 +-
 setuptools/command/build_ext.py          |  2 +-
 setuptools/command/easy_install.py       |  7 ++++---
 setuptools/dist.py                       |  3 +--
 setuptools/tests/config/test_setupcfg.py |  8 ++++----
 setuptools/tests/test_build_ext.py       |  4 ++--
 8 files changed, 22 insertions(+), 19 deletions(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index 3a387d08e5..871f7344e4 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -200,7 +200,9 @@ def get_supported_platform():
     m = macosVersionString.match(plat)
     if m is not None and sys.platform == "darwin":
         try:
-            plat = 'macosx-{}-{}'.format('.'.join(_macos_vers()[:2]), m.group(3))
+            major_minor = '.'.join(_macos_vers()[:2])
+            build = m.group(3)
+            plat = f'macosx-{major_minor}-{build}'
         except ValueError:
             # not macOS
             pass
@@ -2733,7 +2735,8 @@ def __str__(self) -> str:
         if self.attrs:
             s += ':' + '.'.join(self.attrs)
         if self.extras:
-            s += ' [{}]'.format(','.join(self.extras))
+            extras = ','.join(self.extras)
+            s += f' [{extras}]'
         return s
 
     def __repr__(self) -> str:
@@ -3319,8 +3322,8 @@ def check_version_conflict(self):
             ):
                 continue
             issue_warning(
-                f"Module {modname} was already imported from {fn}, but {self.location} is being added"
-                " to sys.path",
+                f"Module {modname} was already imported from {fn}, "
+                f"but {self.location} is being added to sys.path",
             )
 
     def has_version(self) -> bool:
diff --git a/setuptools/_core_metadata.py b/setuptools/_core_metadata.py
index a5e18a4006..642b80df31 100644
--- a/setuptools/_core_metadata.py
+++ b/setuptools/_core_metadata.py
@@ -178,8 +178,8 @@ def write_field(key, value):
     if license:
         write_field('License', rfc822_escape(license))
 
-    for project_url in self.project_urls.items():
-        write_field('Project-URL', '{}, {}'.format(*project_url))
+    for label, url in self.project_urls.items():
+        write_field('Project-URL', f'{label}, {url}')
 
     keywords = ','.join(self.get_keywords())
     if keywords:
diff --git a/setuptools/command/bdist_egg.py b/setuptools/command/bdist_egg.py
index 9a08b36585..7f66c3ba6a 100644
--- a/setuptools/command/bdist_egg.py
+++ b/setuptools/command/bdist_egg.py
@@ -69,7 +69,7 @@ def __bootstrap__():
 
 
 class bdist_egg(Command):
-    description = "create an \"egg\" distribution"
+    description = 'create an "egg" distribution'
 
     user_options = [
         ('bdist-dir=', 'b', "temporary directory for creating the distribution"),
diff --git a/setuptools/command/build_ext.py b/setuptools/command/build_ext.py
index 6e56ae4ca2..be833a379c 100644
--- a/setuptools/command/build_ext.py
+++ b/setuptools/command/build_ext.py
@@ -168,7 +168,7 @@ def get_ext_filename(self, fullname: str) -> str:
             if not isinstance(ext_suffix, str):
                 raise OSError(
                     "Configuration variable EXT_SUFFIX not found for this platform "
-                    + "and environment variable SETUPTOOLS_EXT_SUFFIX is missing"
+                    "and environment variable SETUPTOOLS_EXT_SUFFIX is missing"
                 )
             so_ext = ext_suffix
 
diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py
index 4a93ff077e..eb1b4c1fcc 100644
--- a/setuptools/command/easy_install.py
+++ b/setuptools/command/easy_install.py
@@ -132,8 +132,8 @@ class easy_install(Command):
         (
             'optimize=',
             'O',
-            "also compile with optimization: -O1 for \"python -O\", "
-            "-O2 for \"python -OO\", and -O0 to disable [default: -O0]",
+            'also compile with optimization: -O1 for "python -O", '
+            '-O2 for "python -OO", and -O0 to disable [default: -O0]',
         ),
         ('record=', None, "filename in which to record list of installed files"),
         ('always-unzip', 'Z', "don't install as a zipfile, no matter what"),
@@ -1024,7 +1024,8 @@ def install_exe(self, dist_filename, tmpdir):
                 f.write('Metadata-Version: 1.0\n')
                 for k, v in cfg.items('metadata'):
                     if k != 'target_version':
-                        f.write('{}: {}\n'.format(k.replace('_', '-').title(), v))
+                        k = k.replace('_', '-').title()
+                        f.write(f'{k}: {v}\n')
         script_dir = os.path.join(_egg_info, 'scripts')
         # delete entry-point scripts to avoid duping
         self.delete_blockers([
diff --git a/setuptools/dist.py b/setuptools/dist.py
index 18ba1883f2..ba45504aa8 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -110,8 +110,7 @@ def check_nsp(dist, attr, value):
     for nsp in ns_packages:
         if not dist.has_contents_for(nsp):
             raise DistutilsSetupError(
-                "Distribution contains no modules or packages for "
-                + f"namespace package {nsp!r}"
+                f"Distribution contains no modules or packages for namespace package {nsp!r}"
             )
         parent, _sep, _child = nsp.rpartition('.')
         if parent and parent not in ns_packages:
diff --git a/setuptools/tests/config/test_setupcfg.py b/setuptools/tests/config/test_setupcfg.py
index 407419b2ea..adadc02da3 100644
--- a/setuptools/tests/config/test_setupcfg.py
+++ b/setuptools/tests/config/test_setupcfg.py
@@ -87,14 +87,14 @@ def test_basic(self, tmpdir):
             '[options]\n'
             'scripts = bin/a.py, bin/b.py\n',
         )
-        config_dict = read_configuration(f'{config}')
+        config_dict = read_configuration(str(config))
         assert config_dict['metadata']['version'] == '10.1.1'
         assert config_dict['metadata']['keywords'] == ['one', 'two']
         assert config_dict['options']['scripts'] == ['bin/a.py', 'bin/b.py']
 
     def test_no_config(self, tmpdir):
         with pytest.raises(DistutilsFileError):
-            read_configuration('{}'.format(tmpdir.join('setup.cfg')))
+            read_configuration(str(tmpdir.join('setup.cfg')))
 
     def test_ignore_errors(self, tmpdir):
         _, config = fake_env(
@@ -102,9 +102,9 @@ def test_ignore_errors(self, tmpdir):
             '[metadata]\nversion = attr: none.VERSION\nkeywords = one, two\n',
         )
         with pytest.raises(ImportError):
-            read_configuration(f'{config}')
+            read_configuration(str(config))
 
-        config_dict = read_configuration(f'{config}', ignore_option_errors=True)
+        config_dict = read_configuration(str(config), ignore_option_errors=True)
 
         assert config_dict['metadata']['keywords'] == ['one', 'two']
         assert 'version' not in config_dict['metadata']
diff --git a/setuptools/tests/test_build_ext.py b/setuptools/tests/test_build_ext.py
index be7b7cc01c..c7b60ac32f 100644
--- a/setuptools/tests/test_build_ext.py
+++ b/setuptools/tests/test_build_ext.py
@@ -286,8 +286,8 @@ def test_build_ext_config_handling(tmpdir_cwd):
         ),
     }
     path.build(files)
-    code, output = environment.run_setup_py(
+    code, (stdout, stderr) = environment.run_setup_py(
         cmd=['build'],
         data_stream=(0, 2),
     )
-    assert code == 0, '\nSTDOUT:\n{}\nSTDERR:\n{}'.format(*output)
+    assert code == 0, f'\nSTDOUT:\n{stdout}\nSTDERR:\n{stderr}'

From 35365c6cd523941b94b6d7f763505c2d820a9bae Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Tue, 10 Sep 2024 12:42:35 +0200
Subject: [PATCH 1358/1761] Manually remove more .format() calls

---
 setuptools/command/bdist_wheel.py | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)

diff --git a/setuptools/command/bdist_wheel.py b/setuptools/command/bdist_wheel.py
index 234df2a7c7..994b4b4167 100644
--- a/setuptools/command/bdist_wheel.py
+++ b/setuptools/command/bdist_wheel.py
@@ -184,9 +184,7 @@ class bdist_wheel(Command):
         (
             "compression=",
             None,
-            "zipfile compression (one of: {}) [default: 'deflated']".format(
-                ", ".join(supported_compressions)
-            ),
+            f"zipfile compression (one of: {', '.join(supported_compressions)}) [default: 'deflated']",
         ),
         (
             "python-tag=",

From f65ea5b380efdd828f3c3d76e682144177bcdc1a Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Tue, 10 Sep 2024 12:42:35 +0200
Subject: [PATCH 1359/1761] Manually remove more %-formatting

---
 docs/userguide/extension.rst |  2 +-
 pkg_resources/__init__.py    |  8 ++------
 setuptools/msvc.py           | 14 +++++++-------
 3 files changed, 10 insertions(+), 14 deletions(-)

diff --git a/docs/userguide/extension.rst b/docs/userguide/extension.rst
index e1e37b5db1..ef5e33f3a8 100644
--- a/docs/userguide/extension.rst
+++ b/docs/userguide/extension.rst
@@ -122,7 +122,7 @@ a non-``None`` value.  Here's an example validation function::
         """Verify that value is True, False, 0, or 1"""
         if bool(value) != value:
             raise SetupError(
-                "%r must be a boolean value (got %r)" % (attr,value)
+                f"{attr!r} must be a boolean value (got {value!r}"
             )
 
 Your function should accept three arguments: the ``Distribution`` object,
diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index 871f7344e4..87cfa75218 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -451,12 +451,8 @@ def get_build_platform():
     if sys.platform == "darwin" and not plat.startswith('macosx-'):
         try:
             version = _macos_vers()
-            machine = os.uname()[4].replace(" ", "_")
-            return "macosx-%d.%d-%s" % (
-                int(version[0]),
-                int(version[1]),
-                _macos_arch(machine),
-            )
+            machine = _macos_arch(os.uname()[4].replace(" ", "_"))
+            return f"macosx-{version[0]}.{version[1]}-{machine}"
         except ValueError:
             # if someone is running a non-Mac darwin system, this will fall
             # through to the default implementation
diff --git a/setuptools/msvc.py b/setuptools/msvc.py
index 7f006c3c1a..9c9a63568e 100644
--- a/setuptools/msvc.py
+++ b/setuptools/msvc.py
@@ -667,8 +667,8 @@ def WindowsSDKExecutablePath(self):
         else:
             netfxver = 40
             hidex86 = True if self.vs_ver <= 12.0 else False
-            arch = self.pi.current_dir(x64=True, hidex86=hidex86)
-        fx = 'WinSDK-NetFx%dTools%s' % (netfxver, arch.replace('\\', '-'))
+            arch = self.pi.current_dir(x64=True, hidex86=hidex86).replace('\\', '-')
+        fx = f'WinSDK-NetFx{netfxver}Tools{arch}'
 
         # list all possibles registry paths
         regpaths = []
@@ -839,8 +839,8 @@ def _find_dot_net_versions(self, bits) -> tuple[str, ...]:
             versions
         """
         # Find actual .NET version in registry
-        reg_ver = self.ri.lookup(self.ri.vc, 'frameworkver%d' % bits)
-        dot_net_dir = getattr(self, 'FrameworkDir%d' % bits)
+        reg_ver = self.ri.lookup(self.ri.vc, f'frameworkver{bits}')
+        dot_net_dir = getattr(self, f'FrameworkDir{bits}')
         ver = reg_ver or self._use_last_dir_name(dot_net_dir, 'v') or ''
 
         # Set .NET versions for specified MSVC++ version
@@ -1404,7 +1404,7 @@ def VCRuntimeRedist(self) -> str | None:
 
         Returns the first suitable path found or None.
         """
-        vcruntime = 'vcruntime%d0.dll' % self.vc_ver
+        vcruntime = f'vcruntime{self.vc_ver}0.dll'
         arch_subdir = self.pi.target_dir(x64=True).strip('\\')
 
         # Installation prefixes candidates
@@ -1420,9 +1420,9 @@ def VCRuntimeRedist(self) -> str | None:
 
         # CRT directory
         crt_dirs = (
-            'Microsoft.VC%d.CRT' % (self.vc_ver * 10),
+            f'Microsoft.VC{self.vc_ver * 10}.CRT',
             # Sometime store in directory with VS version instead of VC
-            'Microsoft.VC%d.CRT' % (int(self.vs_ver) * 10),
+            f'Microsoft.VC{int(self.vs_ver) * 10}.CRT',
         )
 
         # vcruntime path

From 3acab2caf956f16befee832223e63fecb28562d4 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Fri, 3 Jan 2025 02:11:44 -0500
Subject: [PATCH 1360/1761] Simplified typed assignments

---
 pkg_resources/__init__.py            |  4 +-
 setuptools/command/_requirestxt.py   |  4 +-
 setuptools/command/bdist_wheel.py    |  2 +-
 setuptools/command/editable_wheel.py |  2 +-
 setuptools/config/pyprojecttoml.py   |  2 +-
 setuptools/config/setupcfg.py        |  2 +-
 setuptools/dist.py                   |  4 +-
 setuptools/msvc.py                   | 87 ++++++++++++++--------------
 setuptools/tests/test_wheel.py       | 20 +++----
 9 files changed, 62 insertions(+), 65 deletions(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index 74b0465bfa..d175100f2a 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -875,9 +875,7 @@ def resolve(
 
         # Mapping of requirement to set of distributions that required it;
         # useful for reporting info about conflicts.
-        required_by: collections.defaultdict[Requirement, set[str]] = (
-            collections.defaultdict(set)
-        )
+        required_by = collections.defaultdict[Requirement, set[str]](set)
 
         while requirements:
             # process dependencies breadth-first
diff --git a/setuptools/command/_requirestxt.py b/setuptools/command/_requirestxt.py
index 171f41b87e..9029b12514 100644
--- a/setuptools/command/_requirestxt.py
+++ b/setuptools/command/_requirestxt.py
@@ -38,13 +38,13 @@ def _prepare(
 
 def _convert_extras_requirements(
     extras_require: Mapping[str, _StrOrIter],
-) -> Mapping[str, _Ordered[Requirement]]:
+) -> defaultdict[str, _Ordered[Requirement]]:
     """
     Convert requirements in `extras_require` of the form
     `"extra": ["barbazquux; {marker}"]` to
     `"extra:{marker}": ["barbazquux"]`.
     """
-    output: Mapping[str, _Ordered[Requirement]] = defaultdict(dict)
+    output = defaultdict[str, _Ordered[Requirement]](dict)
     for section, v in extras_require.items():
         # Do not strip empty sections.
         output[section]
diff --git a/setuptools/command/bdist_wheel.py b/setuptools/command/bdist_wheel.py
index 234df2a7c7..32ed1378c9 100644
--- a/setuptools/command/bdist_wheel.py
+++ b/setuptools/command/bdist_wheel.py
@@ -500,7 +500,7 @@ def license_paths(self) -> Iterable[str]:
             # Setuptools has resolved any patterns to actual file names
             return self.distribution.metadata.license_files or ()
 
-        files: set[str] = set()
+        files = set[str]()
         metadata = self.distribution.get_option_dict("metadata")
         if setuptools_major_version >= 42:
             # Setuptools recognizes the license_files option but does not do globbing
diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py
index 6d23d11fad..b03e677757 100644
--- a/setuptools/command/editable_wheel.py
+++ b/setuptools/command/editable_wheel.py
@@ -506,7 +506,7 @@ def template_vars(self) -> tuple[str, str, dict[str, str], dict[str, list[str]]]
         package_dir = self.dist.package_dir or {}
         roots = _find_package_roots(top_level, package_dir, src_root)
 
-        namespaces_: dict[str, list[str]] = dict(
+        namespaces_ = dict(
             chain(
                 _find_namespaces(self.dist.packages or [], roots),
                 ((ns, []) for ns in _find_virtual_namespaces(roots)),
diff --git a/setuptools/config/pyprojecttoml.py b/setuptools/config/pyprojecttoml.py
index 15b0baa18e..fd6c5968c8 100644
--- a/setuptools/config/pyprojecttoml.py
+++ b/setuptools/config/pyprojecttoml.py
@@ -185,7 +185,7 @@ def __init__(
         self.dynamic_cfg = self.setuptools_cfg.get("dynamic", {})
         self.ignore_option_errors = ignore_option_errors
         self._dist = dist
-        self._referenced_files: set[str] = set()
+        self._referenced_files = set[str]()
 
     def _ensure_dist(self) -> Distribution:
         from setuptools.dist import Distribution
diff --git a/setuptools/config/setupcfg.py b/setuptools/config/setupcfg.py
index b35d0b00cd..0d0d73c77c 100644
--- a/setuptools/config/setupcfg.py
+++ b/setuptools/config/setupcfg.py
@@ -253,7 +253,7 @@ def __init__(
         self.sections = dict(self._section_options(options))
         self.set_options: list[str] = []
         self.ensure_discovered = ensure_discovered
-        self._referenced_files: set[str] = set()
+        self._referenced_files = set[str]()
         """After parsing configurations, this property will enumerate
         all files referenced by the "file:" directive. Private API for setuptools only.
         """
diff --git a/setuptools/dist.py b/setuptools/dist.py
index 5b3175fb5b..5c7e0da4e7 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -321,7 +321,7 @@ def __init__(self, attrs: MutableMapping[str, Any] | None = None) -> None:
         # Private API (setuptools-use only, not restricted to Distribution)
         # Stores files that are referenced by the configuration and need to be in the
         # sdist (e.g. `version = file: VERSION.txt`)
-        self._referenced_files: set[str] = set()
+        self._referenced_files = set[str]()
 
         self.set_defaults = ConfigDiscovery(self)
 
@@ -400,7 +400,7 @@ def _normalize_requires(self):
     def _finalize_license_files(self) -> None:
         """Compute names of all license files which should be included."""
         license_files: list[str] | None = self.metadata.license_files
-        patterns: list[str] = license_files if license_files else []
+        patterns = license_files or []
 
         license_file: str | None = self.metadata.license_file
         if license_file and license_file not in patterns:
diff --git a/setuptools/msvc.py b/setuptools/msvc.py
index 8d6d2cf084..9c9a63568e 100644
--- a/setuptools/msvc.py
+++ b/setuptools/msvc.py
@@ -108,7 +108,7 @@ def current_dir(self, hidex86=False, x64=False) -> str:
             if (self.current_cpu == 'x86' and hidex86)
             else r'\x64'
             if (self.current_cpu == 'amd64' and x64)
-            else r'\%s' % self.current_cpu
+            else rf'\{self.current_cpu}'
         )
 
     def target_dir(self, hidex86=False, x64=False) -> str:
@@ -132,7 +132,7 @@ def target_dir(self, hidex86=False, x64=False) -> str:
             if (self.target_cpu == 'x86' and hidex86)
             else r'\x64'
             if (self.target_cpu == 'amd64' and x64)
-            else r'\%s' % self.target_cpu
+            else rf'\{self.target_cpu}'
         )
 
     def cross_dir(self, forcex86=False):
@@ -155,7 +155,7 @@ def cross_dir(self, forcex86=False):
         return (
             ''
             if self.target_cpu == current
-            else self.target_dir().replace('\\', '\\%s_' % current)
+            else self.target_dir().replace('\\', f'\\{current}_')
         )
 
 
@@ -497,11 +497,11 @@ def VSInstallDir(self):
         """
         # Default path
         default = os.path.join(
-            self.ProgramFilesx86, 'Microsoft Visual Studio %0.1f' % self.vs_ver
+            self.ProgramFilesx86, f'Microsoft Visual Studio {self.vs_ver:0.1f}'
         )
 
         # Try to get path from registry, if fail use default path
-        return self.ri.lookup(self.ri.vs, '%0.1f' % self.vs_ver) or default
+        return self.ri.lookup(self.ri.vs, f'{self.vs_ver:0.1f}') or default
 
     @property
     def VCInstallDir(self):
@@ -561,16 +561,17 @@ def _guess_vc_legacy(self):
             path
         """
         default = os.path.join(
-            self.ProgramFilesx86, r'Microsoft Visual Studio %0.1f\VC' % self.vs_ver
+            self.ProgramFilesx86,
+            rf'Microsoft Visual Studio {self.vs_ver:0.1f}\VC',
         )
 
         # Try to get "VC++ for Python" path from registry as default path
-        reg_path = os.path.join(self.ri.vc_for_python, '%0.1f' % self.vs_ver)
+        reg_path = os.path.join(self.ri.vc_for_python, f'{self.vs_ver:0.1f}')
         python_vc = self.ri.lookup(reg_path, 'installdir')
         default_vc = os.path.join(python_vc, 'VC') if python_vc else default
 
         # Try to get path from registry, if fail use default path
-        return self.ri.lookup(self.ri.vc, '%0.1f' % self.vs_ver) or default_vc
+        return self.ri.lookup(self.ri.vc, f'{self.vs_ver:0.1f}') or default_vc
 
     @property
     def WindowsSdkVersion(self) -> tuple[LiteralString, ...]:
@@ -619,13 +620,13 @@ def WindowsSdkDir(self) -> str | None:  # noqa: C901  # is too complex (12)  # F
         sdkdir: str | None = ''
         for ver in self.WindowsSdkVersion:
             # Try to get it from registry
-            loc = os.path.join(self.ri.windows_sdk, 'v%s' % ver)
+            loc = os.path.join(self.ri.windows_sdk, f'v{ver}')
             sdkdir = self.ri.lookup(loc, 'installationfolder')
             if sdkdir:
                 break
         if not sdkdir or not os.path.isdir(sdkdir):
             # Try to get "VC++ for Python" version from registry
-            path = os.path.join(self.ri.vc_for_python, '%0.1f' % self.vc_ver)
+            path = os.path.join(self.ri.vc_for_python, f'{self.vc_ver:0.1f}')
             install_base = self.ri.lookup(path, 'installdir')
             if install_base:
                 sdkdir = os.path.join(install_base, 'WinSDK')
@@ -633,14 +634,14 @@ def WindowsSdkDir(self) -> str | None:  # noqa: C901  # is too complex (12)  # F
             # If fail, use default new path
             for ver in self.WindowsSdkVersion:
                 intver = ver[: ver.rfind('.')]
-                path = r'Microsoft SDKs\Windows Kits\%s' % intver
+                path = rf'Microsoft SDKs\Windows Kits\{intver}'
                 d = os.path.join(self.ProgramFiles, path)
                 if os.path.isdir(d):
                     sdkdir = d
         if not sdkdir or not os.path.isdir(sdkdir):
             # If fail, use default old path
             for ver in self.WindowsSdkVersion:
-                path = r'Microsoft SDKs\Windows\v%s' % ver
+                path = rf'Microsoft SDKs\Windows\v{ver}'
                 d = os.path.join(self.ProgramFiles, path)
                 if os.path.isdir(d):
                     sdkdir = d
@@ -666,8 +667,8 @@ def WindowsSDKExecutablePath(self):
         else:
             netfxver = 40
             hidex86 = True if self.vs_ver <= 12.0 else False
-            arch = self.pi.current_dir(x64=True, hidex86=hidex86)
-        fx = 'WinSDK-NetFx%dTools%s' % (netfxver, arch.replace('\\', '-'))
+            arch = self.pi.current_dir(x64=True, hidex86=hidex86).replace('\\', '-')
+        fx = f'WinSDK-NetFx{netfxver}Tools{arch}'
 
         # list all possibles registry paths
         regpaths = []
@@ -676,7 +677,7 @@ def WindowsSDKExecutablePath(self):
                 regpaths += [os.path.join(self.ri.netfx_sdk, ver, fx)]
 
         for ver in self.WindowsSdkVersion:
-            regpaths += [os.path.join(self.ri.windows_sdk, 'v%sA' % ver, fx)]
+            regpaths += [os.path.join(self.ri.windows_sdk, f'v{ver}A', fx)]
 
         # Return installation folder from the more recent path
         for path in regpaths:
@@ -696,7 +697,7 @@ def FSharpInstallDir(self):
         str
             path
         """
-        path = os.path.join(self.ri.visualstudio, r'%0.1f\Setup\F#' % self.vs_ver)
+        path = os.path.join(self.ri.visualstudio, rf'{self.vs_ver:0.1f}\Setup\F#')
         return self.ri.lookup(path, 'productdir') or ''
 
     @property
@@ -714,7 +715,7 @@ def UniversalCRTSdkDir(self):
 
         # Find path of the more recent Kit
         for ver in vers:
-            sdkdir = self.ri.lookup(self.ri.windows_kits_roots, 'kitsroot%s' % ver)
+            sdkdir = self.ri.lookup(self.ri.windows_kits_roots, f'kitsroot{ver}')
             if sdkdir:
                 return sdkdir or ''
 
@@ -838,8 +839,8 @@ def _find_dot_net_versions(self, bits) -> tuple[str, ...]:
             versions
         """
         # Find actual .NET version in registry
-        reg_ver = self.ri.lookup(self.ri.vc, 'frameworkver%d' % bits)
-        dot_net_dir = getattr(self, 'FrameworkDir%d' % bits)
+        reg_ver = self.ri.lookup(self.ri.vc, f'frameworkver{bits}')
+        dot_net_dir = getattr(self, f'FrameworkDir{bits}')
         ver = reg_ver or self._use_last_dir_name(dot_net_dir, 'v') or ''
 
         # Set .NET versions for specified MSVC++ version
@@ -960,7 +961,7 @@ def VSTools(self):
             arch_subdir = self.pi.current_dir(hidex86=True, x64=True)
             paths += [r'Common7\IDE\CommonExtensions\Microsoft\TestWindow']
             paths += [r'Team Tools\Performance Tools']
-            paths += [r'Team Tools\Performance Tools%s' % arch_subdir]
+            paths += [rf'Team Tools\Performance Tools{arch_subdir}']
 
         return [os.path.join(self.si.VSInstallDir, path) for path in paths]
 
@@ -993,10 +994,10 @@ def VCLibraries(self):
             arch_subdir = self.pi.target_dir(x64=True)
         else:
             arch_subdir = self.pi.target_dir(hidex86=True)
-        paths = ['Lib%s' % arch_subdir, r'ATLMFC\Lib%s' % arch_subdir]
+        paths = [f'Lib{arch_subdir}', rf'ATLMFC\Lib{arch_subdir}']
 
         if self.vs_ver >= 14.0:
-            paths += [r'Lib\store%s' % arch_subdir]
+            paths += [rf'Lib\store{arch_subdir}']
 
         return [os.path.join(self.si.VCInstallDir, path) for path in paths]
 
@@ -1030,10 +1031,10 @@ def VCTools(self):
         forcex86 = True if self.vs_ver <= 10.0 else False
         arch_subdir = self.pi.cross_dir(forcex86)
         if arch_subdir:
-            tools += [os.path.join(si.VCInstallDir, 'Bin%s' % arch_subdir)]
+            tools += [os.path.join(si.VCInstallDir, f'Bin{arch_subdir}')]
 
         if self.vs_ver == 14.0:
-            path = 'Bin%s' % self.pi.current_dir(hidex86=True)
+            path = f'Bin{self.pi.current_dir(hidex86=True)}'
             tools += [os.path.join(si.VCInstallDir, path)]
 
         elif self.vs_ver >= 15.0:
@@ -1068,13 +1069,13 @@ def OSLibraries(self):
         """
         if self.vs_ver <= 10.0:
             arch_subdir = self.pi.target_dir(hidex86=True, x64=True)
-            return [os.path.join(self.si.WindowsSdkDir, 'Lib%s' % arch_subdir)]
+            return [os.path.join(self.si.WindowsSdkDir, f'Lib{arch_subdir}')]
 
         else:
             arch_subdir = self.pi.target_dir(x64=True)
             lib = os.path.join(self.si.WindowsSdkDir, 'lib')
             libver = self._sdk_subdir
-            return [os.path.join(lib, '%sum%s' % (libver, arch_subdir))]
+            return [os.path.join(lib, f'{libver}um{arch_subdir}')]
 
     @property
     def OSIncludes(self):
@@ -1097,9 +1098,9 @@ def OSIncludes(self):
             else:
                 sdkver = ''
             return [
-                os.path.join(include, '%sshared' % sdkver),
-                os.path.join(include, '%sum' % sdkver),
-                os.path.join(include, '%swinrt' % sdkver),
+                os.path.join(include, f'{sdkver}shared'),
+                os.path.join(include, f'{sdkver}um'),
+                os.path.join(include, f'{sdkver}winrt'),
             ]
 
     @property
@@ -1134,7 +1135,7 @@ def OSLibpath(self):
                     self.si.WindowsSdkDir,
                     'ExtensionSDKs',
                     'Microsoft.VCLibs',
-                    '%0.1f' % self.vs_ver,
+                    f'{self.vs_ver:0.1f}',
                     'References',
                     'CommonConfiguration',
                     'neutral',
@@ -1169,7 +1170,7 @@ def _sdk_tools(self):
 
         if not self.pi.current_is_x86():
             arch_subdir = self.pi.current_dir(x64=True)
-            path = 'Bin%s' % arch_subdir
+            path = f'Bin{arch_subdir}'
             yield os.path.join(self.si.WindowsSdkDir, path)
 
         if self.vs_ver in (10.0, 11.0):
@@ -1177,14 +1178,14 @@ def _sdk_tools(self):
                 arch_subdir = ''
             else:
                 arch_subdir = self.pi.current_dir(hidex86=True, x64=True)
-            path = r'Bin\NETFX 4.0 Tools%s' % arch_subdir
+            path = rf'Bin\NETFX 4.0 Tools{arch_subdir}'
             yield os.path.join(self.si.WindowsSdkDir, path)
 
         elif self.vs_ver >= 15.0:
             path = os.path.join(self.si.WindowsSdkDir, 'Bin')
             arch_subdir = self.pi.current_dir(x64=True)
             sdkver = self.si.WindowsSdkLastVersion
-            yield os.path.join(path, '%s%s' % (sdkver, arch_subdir))
+            yield os.path.join(path, f'{sdkver}{arch_subdir}')
 
         if self.si.WindowsSDKExecutablePath:
             yield self.si.WindowsSDKExecutablePath
@@ -1200,7 +1201,7 @@ def _sdk_subdir(self):
             subdir
         """
         ucrtver = self.si.WindowsSdkLastVersion
-        return ('%s\\' % ucrtver) if ucrtver else ''
+        return (f'{ucrtver}\\') if ucrtver else ''
 
     @property
     def SdkSetup(self):
@@ -1262,7 +1263,7 @@ def NetFxSDKLibraries(self):
             return []
 
         arch_subdir = self.pi.target_dir(x64=True)
-        return [os.path.join(self.si.NetFxSdkDir, r'lib\um%s' % arch_subdir)]
+        return [os.path.join(self.si.NetFxSdkDir, rf'lib\um{arch_subdir}')]
 
     @property
     def NetFxSDKIncludes(self):
@@ -1310,7 +1311,7 @@ def MSBuild(self):
             base_path = self.si.VSInstallDir
             arch_subdir = ''
 
-        path = r'MSBuild\%0.1f\bin%s' % (self.vs_ver, arch_subdir)
+        path = rf'MSBuild\{self.vs_ver:0.1f}\bin{arch_subdir}'
         build = [os.path.join(base_path, path)]
 
         if self.vs_ver >= 15.0:
@@ -1350,7 +1351,7 @@ def UCRTLibraries(self):
         arch_subdir = self.pi.target_dir(x64=True)
         lib = os.path.join(self.si.UniversalCRTSdkDir, 'lib')
         ucrtver = self._ucrt_subdir
-        return [os.path.join(lib, '%sucrt%s' % (ucrtver, arch_subdir))]
+        return [os.path.join(lib, f'{ucrtver}ucrt{arch_subdir}')]
 
     @property
     def UCRTIncludes(self):
@@ -1366,7 +1367,7 @@ def UCRTIncludes(self):
             return []
 
         include = os.path.join(self.si.UniversalCRTSdkDir, 'include')
-        return [os.path.join(include, '%sucrt' % self._ucrt_subdir)]
+        return [os.path.join(include, f'{self._ucrt_subdir}ucrt')]
 
     @property
     def _ucrt_subdir(self):
@@ -1379,7 +1380,7 @@ def _ucrt_subdir(self):
             subdir
         """
         ucrtver = self.si.UniversalCRTSdkLastVersion
-        return ('%s\\' % ucrtver) if ucrtver else ''
+        return (f'{ucrtver}\\') if ucrtver else ''
 
     @property
     def FSharp(self):
@@ -1403,7 +1404,7 @@ def VCRuntimeRedist(self) -> str | None:
 
         Returns the first suitable path found or None.
         """
-        vcruntime = 'vcruntime%d0.dll' % self.vc_ver
+        vcruntime = f'vcruntime{self.vc_ver}0.dll'
         arch_subdir = self.pi.target_dir(x64=True).strip('\\')
 
         # Installation prefixes candidates
@@ -1419,9 +1420,9 @@ def VCRuntimeRedist(self) -> str | None:
 
         # CRT directory
         crt_dirs = (
-            'Microsoft.VC%d.CRT' % (self.vc_ver * 10),
+            f'Microsoft.VC{self.vc_ver * 10}.CRT',
             # Sometime store in directory with VS version instead of VC
-            'Microsoft.VC%d.CRT' % (int(self.vs_ver) * 10),
+            f'Microsoft.VC{int(self.vs_ver) * 10}.CRT',
         )
 
         # vcruntime path
@@ -1520,7 +1521,7 @@ def _build_paths(self, name, spec_path_lists, exists):
         paths = itertools.chain(spec_paths, env_paths)
         extant_paths = list(filter(os.path.isdir, paths)) if exists else paths
         if not extant_paths:
-            msg = "%s environment variable is empty" % name.upper()
+            msg = f"{name.upper()} environment variable is empty"
             raise distutils.errors.DistutilsPlatformError(msg)
         unique_paths = unique_everseen(extant_paths)
         return os.pathsep.join(unique_paths)
diff --git a/setuptools/tests/test_wheel.py b/setuptools/tests/test_wheel.py
index 5724c6eabc..70165c608b 100644
--- a/setuptools/tests/test_wheel.py
+++ b/setuptools/tests/test_wheel.py
@@ -176,7 +176,7 @@ def __init__(self, id, **kwargs):
         self._fields = kwargs
 
     def __repr__(self) -> str:
-        return '%s(**%r)' % (self._id, self._fields)
+        return f'{self._id}(**{self._fields!r})'
 
 
 # Using Any to avoid possible type union issues later in test
@@ -367,11 +367,10 @@ def __repr__(self) -> str:
     ),
     dict(
         id='requires2',
-        install_requires="""
+        install_requires=f"""
         bar
-        foo<=2.0; %r in sys_platform
-        """
-        % sys.platform,
+        foo<=2.0; {sys.platform!r} in sys_platform
+        """,
         requires_txt=DALS(
             """
             bar
@@ -381,10 +380,9 @@ def __repr__(self) -> str:
     ),
     dict(
         id='requires3',
-        install_requires="""
-        bar; %r != sys_platform
-        """
-        % sys.platform,
+        install_requires=f"""
+        bar; {sys.platform!r} != sys_platform
+        """,
     ),
     dict(
         id='requires4',
@@ -406,7 +404,7 @@ def __repr__(self) -> str:
     dict(
         id='requires5',
         extras_require={
-            'extra': 'foobar; %r != sys_platform' % sys.platform,
+            'extra': f'foobar; {sys.platform!r} != sys_platform',
         },
         requires_txt=DALS(
             """
@@ -605,7 +603,7 @@ def test_wheel_install_pep_503():
 def test_wheel_no_dist_dir():
     project_name = 'nodistinfo'
     version = '1.0'
-    wheel_name = '{0}-{1}-py2.py3-none-any.whl'.format(project_name, version)
+    wheel_name = f'{project_name}-{version}-py2.py3-none-any.whl'
     with tempdir() as source_dir:
         wheel_path = os.path.join(source_dir, wheel_name)
         # create an empty zip file

From 39a607d25def76ef760334a494554847da8c8f0f Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 3 Jan 2025 10:23:13 -0500
Subject: [PATCH 1361/1761] Bump badge for 2025.

---
 README.rst | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/README.rst b/README.rst
index efabeee4f9..4d3cabee9d 100644
--- a/README.rst
+++ b/README.rst
@@ -14,5 +14,5 @@
 .. .. image:: https://readthedocs.org/projects/PROJECT_RTD/badge/?version=latest
 ..    :target: https://PROJECT_RTD.readthedocs.io/en/latest/?badge=latest
 
-.. image:: https://img.shields.io/badge/skeleton-2024-informational
+.. image:: https://img.shields.io/badge/skeleton-2025-informational
    :target: https://blog.jaraco.com/skeleton

From d13d5a7d9210114447aae0ba1a814ae6af8aeffe Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 4 Jan 2025 04:39:08 -0500
Subject: [PATCH 1362/1761] =?UTF-8?q?=F0=9F=91=B9=20Feed=20the=20hobgoblin?=
 =?UTF-8?q?s=20(delint).?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 distutils/ccompiler.py | 6 +++++-
 1 file changed, 5 insertions(+), 1 deletion(-)

diff --git a/distutils/ccompiler.py b/distutils/ccompiler.py
index 6979d160eb..fbf1f7a4cf 100644
--- a/distutils/ccompiler.py
+++ b/distutils/ccompiler.py
@@ -989,7 +989,11 @@ def _make_relative(base):
         # Chop off the drive
         no_drive = os.path.splitdrive(base)[1]
         # If abs, chop off leading /
-        is_abs = os.path.isabs(no_drive) or sys.platform == 'win32' and no_drive.startswith(('/', "\\"))
+        is_abs = (
+            os.path.isabs(no_drive)
+            or sys.platform == 'win32'
+            and no_drive.startswith(('/', "\\"))
+        )
         return no_drive[is_abs:]
 
     def shared_object_filename(self, basename, strip_dir=False, output_dir=''):

From 1400152e10663f51109869b9d387c50820a95767 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 4 Jan 2025 04:56:38 -0500
Subject: [PATCH 1363/1761] Extract a classmethod _make_out_path_exts suitable
 for isolated testing.

---
 distutils/ccompiler.py | 10 ++++++++--
 1 file changed, 8 insertions(+), 2 deletions(-)

diff --git a/distutils/ccompiler.py b/distutils/ccompiler.py
index fbf1f7a4cf..6e303c34e6 100644
--- a/distutils/ccompiler.py
+++ b/distutils/ccompiler.py
@@ -969,10 +969,16 @@ def out_extensions(self):
         return dict.fromkeys(self.src_extensions, self.obj_extension)
 
     def _make_out_path(self, output_dir, strip_dir, src_name):
+        return self._make_out_path_exts(
+            output_dir, strip_dir, src_name, self.out_extensions
+        )
+
+    @classmethod
+    def _make_out_path_exts(cls, output_dir, strip_dir, src_name, extensions):
         base, ext = os.path.splitext(src_name)
-        base = self._make_relative(base)
+        base = cls._make_relative(base)
         try:
-            new_ext = self.out_extensions[ext]
+            new_ext = extensions[ext]
         except LookupError:
             raise UnknownFileError(f"unknown file type '{ext}' (from '{src_name}')")
         if strip_dir:

From b16c1de88e33bea41ec5c74abdb225e9f8704a3a Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 4 Jan 2025 05:12:54 -0500
Subject: [PATCH 1364/1761] Re-write _make_relative to rely on pathlib for
 cross-platform and cross-python compatibility.

---
 distutils/ccompiler.py | 24 +++++++-----------------
 1 file changed, 7 insertions(+), 17 deletions(-)

diff --git a/distutils/ccompiler.py b/distutils/ccompiler.py
index 6e303c34e6..9b637f8c98 100644
--- a/distutils/ccompiler.py
+++ b/distutils/ccompiler.py
@@ -4,6 +4,7 @@
 for the Distutils compiler abstraction model."""
 
 import os
+import pathlib
 import re
 import sys
 import types
@@ -976,31 +977,20 @@ def _make_out_path(self, output_dir, strip_dir, src_name):
     @classmethod
     def _make_out_path_exts(cls, output_dir, strip_dir, src_name, extensions):
         base, ext = os.path.splitext(src_name)
+        base = pathlib.PurePath(base)
+        # Ensure base is relative to honor output_dir (python/cpython#37775).
         base = cls._make_relative(base)
         try:
             new_ext = extensions[ext]
         except LookupError:
             raise UnknownFileError(f"unknown file type '{ext}' (from '{src_name}')")
         if strip_dir:
-            base = os.path.basename(base)
-        return os.path.join(output_dir, base + new_ext)
+            base = base.name
+        return os.path.join(output_dir, base.with_suffix(new_ext))
 
     @staticmethod
-    def _make_relative(base):
-        """
-        In order to ensure that a filename always honors the
-        indicated output_dir, make sure it's relative.
-        Ref python/cpython#37775.
-        """
-        # Chop off the drive
-        no_drive = os.path.splitdrive(base)[1]
-        # If abs, chop off leading /
-        is_abs = (
-            os.path.isabs(no_drive)
-            or sys.platform == 'win32'
-            and no_drive.startswith(('/', "\\"))
-        )
-        return no_drive[is_abs:]
+    def _make_relative(base: pathlib.Path):
+        return base.relative_to(base.anchor)
 
     def shared_object_filename(self, basename, strip_dir=False, output_dir=''):
         assert output_dir is not None

From 57152cf9766590e205346752aa632842b5e8a741 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 4 Jan 2025 05:19:35 -0500
Subject: [PATCH 1365/1761] Move suppress_path_mangle to test_ccompiler to
 limit the scope.

---
 conftest.py                       | 2 +-
 distutils/tests/test_ccompiler.py | 2 ++
 2 files changed, 3 insertions(+), 1 deletion(-)

diff --git a/conftest.py b/conftest.py
index 3b9444f78c..578b7aac59 100644
--- a/conftest.py
+++ b/conftest.py
@@ -92,7 +92,7 @@ def monkeysession(request):
     mpatch.undo()
 
 
-@pytest.fixture(autouse=True, scope="session")
+@pytest.fixture(scope="module")
 def suppress_path_mangle(monkeysession):
     """
     Disable the path mangling in CCompiler. Workaround for #169.
diff --git a/distutils/tests/test_ccompiler.py b/distutils/tests/test_ccompiler.py
index d23b907cad..7ebfed56be 100644
--- a/distutils/tests/test_ccompiler.py
+++ b/distutils/tests/test_ccompiler.py
@@ -7,6 +7,8 @@
 
 import pytest
 
+pytestmark = pytest.mark.usefixtures('suppress_path_mangle')
+
 
 def _make_strs(paths):
     """

From ede1af29d210311f5f033f7226da58a19f197183 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 4 Jan 2025 05:23:51 -0500
Subject: [PATCH 1366/1761] Add tests and fix failure identified in the tests.

---
 distutils/ccompiler.py | 9 ++++++++-
 1 file changed, 8 insertions(+), 1 deletion(-)

diff --git a/distutils/ccompiler.py b/distutils/ccompiler.py
index 9b637f8c98..7b5baaf9ae 100644
--- a/distutils/ccompiler.py
+++ b/distutils/ccompiler.py
@@ -976,6 +976,13 @@ def _make_out_path(self, output_dir, strip_dir, src_name):
 
     @classmethod
     def _make_out_path_exts(cls, output_dir, strip_dir, src_name, extensions):
+        r"""
+        >>> exts = {'.c': '.o'}
+        >>> CCompiler._make_out_path_exts('.', False, '/foo/bar.c', exts).replace('\\', '/')
+        './foo/bar.o'
+        >>> CCompiler._make_out_path_exts('.', True, '/foo/bar.c', exts).replace('\\', '/')
+        './bar.o'
+        """
         base, ext = os.path.splitext(src_name)
         base = pathlib.PurePath(base)
         # Ensure base is relative to honor output_dir (python/cpython#37775).
@@ -985,7 +992,7 @@ def _make_out_path_exts(cls, output_dir, strip_dir, src_name, extensions):
         except LookupError:
             raise UnknownFileError(f"unknown file type '{ext}' (from '{src_name}')")
         if strip_dir:
-            base = base.name
+            base = pathlib.PurePath(base.name)
         return os.path.join(output_dir, base.with_suffix(new_ext))
 
     @staticmethod

From 36ce8b329524088cfa53b9a4bffcce3a8d233539 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 4 Jan 2025 05:38:01 -0500
Subject: [PATCH 1367/1761] Refactor for simplicity.

---
 distutils/ccompiler.py | 9 ++++-----
 1 file changed, 4 insertions(+), 5 deletions(-)

diff --git a/distutils/ccompiler.py b/distutils/ccompiler.py
index 7b5baaf9ae..714f13d8d3 100644
--- a/distutils/ccompiler.py
+++ b/distutils/ccompiler.py
@@ -983,14 +983,13 @@ def _make_out_path_exts(cls, output_dir, strip_dir, src_name, extensions):
         >>> CCompiler._make_out_path_exts('.', True, '/foo/bar.c', exts).replace('\\', '/')
         './bar.o'
         """
-        base, ext = os.path.splitext(src_name)
-        base = pathlib.PurePath(base)
+        src = pathlib.PurePath(src_name)
         # Ensure base is relative to honor output_dir (python/cpython#37775).
-        base = cls._make_relative(base)
+        base = cls._make_relative(src)
         try:
-            new_ext = extensions[ext]
+            new_ext = extensions[src.suffix]
         except LookupError:
-            raise UnknownFileError(f"unknown file type '{ext}' (from '{src_name}')")
+            raise UnknownFileError(f"unknown file type '{src.suffix}' (from '{src}')")
         if strip_dir:
             base = pathlib.PurePath(base.name)
         return os.path.join(output_dir, base.with_suffix(new_ext))

From 5ed9d93e77aa3e2c70d8cea1bfeb15549932169f Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sun, 5 Jan 2025 10:25:29 -0500
Subject: [PATCH 1368/1761] Add news fragment.

---
 newsfragments/4790.feature.rst | 1 +
 1 file changed, 1 insertion(+)
 create mode 100644 newsfragments/4790.feature.rst

diff --git a/newsfragments/4790.feature.rst b/newsfragments/4790.feature.rst
new file mode 100644
index 0000000000..c667e36258
--- /dev/null
+++ b/newsfragments/4790.feature.rst
@@ -0,0 +1 @@
+Synced with pypa/distutils@ff11eed0c including bugfix for duplicate CFLAGS, adaption to support Python 3.13 is_abs in the C compiler (#4669), and setting of ``Py_GIL_DISABLED=1`` for free threaded Python on Windows (#4662).

From c384f184d20e8232a1ce73f88d151b9808b66949 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sun, 5 Jan 2025 10:37:08 -0500
Subject: [PATCH 1369/1761] Py_GIL_Disabled was handled previously.

---
 newsfragments/4790.feature.rst | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/newsfragments/4790.feature.rst b/newsfragments/4790.feature.rst
index c667e36258..21139708b3 100644
--- a/newsfragments/4790.feature.rst
+++ b/newsfragments/4790.feature.rst
@@ -1 +1 @@
-Synced with pypa/distutils@ff11eed0c including bugfix for duplicate CFLAGS, adaption to support Python 3.13 is_abs in the C compiler (#4669), and setting of ``Py_GIL_DISABLED=1`` for free threaded Python on Windows (#4662).
+Synced with pypa/distutils@ff11eed0c including bugfix for duplicate CFLAGS and adaption to support Python 3.13 is_abs in the C compiler (#4669).

From 4e82e8b75c8bd8cba1232a107dc171b4fd2c588c Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sun, 5 Jan 2025 11:12:17 -0500
Subject: [PATCH 1370/1761] =?UTF-8?q?Bump=20version:=2075.6.0=20=E2=86=92?=
 =?UTF-8?q?=2075.7.0?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg               |  2 +-
 NEWS.rst                       | 10 ++++++++++
 newsfragments/4478.feature.rst |  1 -
 newsfragments/4790.feature.rst |  1 -
 pyproject.toml                 |  2 +-
 5 files changed, 12 insertions(+), 4 deletions(-)
 delete mode 100644 newsfragments/4478.feature.rst
 delete mode 100644 newsfragments/4790.feature.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index e21bc31417..4578bade30 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 75.6.0
+current_version = 75.7.0
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index 326fdac650..b83fe6e156 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,13 @@
+v75.7.0
+=======
+
+Features
+--------
+
+- Synced with pypa/distutils@c97a3db2f including better support for free threaded Python on Windows (pypa/distutils#310), improved typing support, and linter accommodations. (#4478)
+- Synced with pypa/distutils@ff11eed0c including bugfix for duplicate CFLAGS and adaption to support Python 3.13 is_abs in the C compiler (#4669). (#4790)
+
+
 v75.6.0
 =======
 
diff --git a/newsfragments/4478.feature.rst b/newsfragments/4478.feature.rst
deleted file mode 100644
index bd53339464..0000000000
--- a/newsfragments/4478.feature.rst
+++ /dev/null
@@ -1 +0,0 @@
-Synced with pypa/distutils@c97a3db2f including better support for free threaded Python on Windows (pypa/distutils#310), improved typing support, and linter accommodations.
diff --git a/newsfragments/4790.feature.rst b/newsfragments/4790.feature.rst
deleted file mode 100644
index 21139708b3..0000000000
--- a/newsfragments/4790.feature.rst
+++ /dev/null
@@ -1 +0,0 @@
-Synced with pypa/distutils@ff11eed0c including bugfix for duplicate CFLAGS and adaption to support Python 3.13 is_abs in the C compiler (#4669).
diff --git a/pyproject.toml b/pyproject.toml
index 61a0c91a67..5560a11043 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "75.6.0"
+version = "75.7.0"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From e1d81b87858678672adcee63469b59b6944bfec7 Mon Sep 17 00:00:00 2001
From: Scott Talbert 
Date: Sun, 5 Jan 2025 13:18:27 -0500
Subject: [PATCH 1371/1761] setuptools.msvc: set host_dir correctly on ARM64
 hosts

---
 setuptools/msvc.py | 8 +++++---
 1 file changed, 5 insertions(+), 3 deletions(-)

diff --git a/setuptools/msvc.py b/setuptools/msvc.py
index 9c9a63568e..7c162d3070 100644
--- a/setuptools/msvc.py
+++ b/setuptools/msvc.py
@@ -1038,9 +1038,11 @@ def VCTools(self):
             tools += [os.path.join(si.VCInstallDir, path)]
 
         elif self.vs_ver >= 15.0:
-            host_dir = (
-                r'bin\HostX86%s' if self.pi.current_is_x86() else r'bin\HostX64%s'
-            )
+            if self.pi.current_cpu in ('x86', 'arm64'):
+                host_id = self.pi.current_cpu.upper()
+            else:
+                host_id = 'X64'
+            host_dir = os.path.join('bin', f'Host{host_id}%s')
             tools += [
                 os.path.join(si.VCInstallDir, host_dir % self.pi.target_dir(x64=True))
             ]

From df1c1f7d20bb5c168162abbc877b7b99a95ce170 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 8 Jan 2025 14:33:08 +0000
Subject: [PATCH 1372/1761] Explicit pass the Python path to pipx in CI step

---
 .github/workflows/main.yml | 5 ++++-
 1 file changed, 4 insertions(+), 1 deletion(-)

diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index db8c150173..16153ad1e9 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -95,7 +95,10 @@ jobs:
         run: |
           rm -rf dist
           # workaround for pypa/setuptools#4333
-          pipx run --pip-args 'pyproject-hooks!=1.1' build
+          pipx run \
+            --python ${{ steps.python-install.outputs.python-path }} \
+            --pip-args 'pyproject-hooks!=1.1' \
+            build
           echo "PRE_BUILT_SETUPTOOLS_SDIST=$(ls dist/*.tar.gz)" >> $GITHUB_ENV
           echo "PRE_BUILT_SETUPTOOLS_WHEEL=$(ls dist/*.whl)" >> $GITHUB_ENV
           rm -rf setuptools.egg-info  # Avoid interfering with the other tests

From 059d5a6a2e4a96afd425d0c10c7880958a43add1 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 8 Jan 2025 14:46:38 +0000
Subject: [PATCH 1373/1761] Use environment variable for pipx

---
 .github/workflows/main.yml | 9 +++++----
 1 file changed, 5 insertions(+), 4 deletions(-)

diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index 16153ad1e9..c9de26d330 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -90,15 +90,16 @@ jobs:
         if: steps.cache.outputs.cache-hit != 'true'
         working-directory: setuptools/tests/config
         run: python -m downloads.preload setupcfg_examples.txt
+      - name: Adjust env vars
+        shell: bash
+        run: |
+          echo 'PIPX_DEFAULT_PYTHON=${{ steps.python-install.outputs.python-path }}' >> $GITHUB_ENV
       - name: Pre-build distributions for test
         shell: bash
         run: |
           rm -rf dist
           # workaround for pypa/setuptools#4333
-          pipx run \
-            --python ${{ steps.python-install.outputs.python-path }} \
-            --pip-args 'pyproject-hooks!=1.1' \
-            build
+          pipx run --pip-args 'pyproject-hooks!=1.1' build
           echo "PRE_BUILT_SETUPTOOLS_SDIST=$(ls dist/*.tar.gz)" >> $GITHUB_ENV
           echo "PRE_BUILT_SETUPTOOLS_WHEEL=$(ls dist/*.whl)" >> $GITHUB_ENV
           rm -rf setuptools.egg-info  # Avoid interfering with the other tests

From aad7d3ddb43746d2c5736ee0abdd5b919c8be307 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Fri, 6 Sep 2024 10:17:08 +0100
Subject: [PATCH 1374/1761] Prepare test for PEP 643 by removing checks on
 Metadata-Version and Dynamic

---
 setuptools/tests/config/test_apply_pyprojecttoml.py | 2 ++
 setuptools/tests/test_core_metadata.py              | 3 +++
 2 files changed, 5 insertions(+)

diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py
index da43bb6a2b..f2de080774 100644
--- a/setuptools/tests/config/test_apply_pyprojecttoml.py
+++ b/setuptools/tests/config/test_apply_pyprojecttoml.py
@@ -502,6 +502,8 @@ def core_metadata(dist) -> str:
     skip_prefixes += ("Description-Content-Type:",)
     # Remove empty lines
     skip_lines.add("")
+    # TODO: Mark static values coming from pyproject.toml with `_static.*`
+    skip_prefixes += ("Dynamic:",)
 
     result = []
     for line in pkg_file_txt.splitlines():
diff --git a/setuptools/tests/test_core_metadata.py b/setuptools/tests/test_core_metadata.py
index c34b9eb831..e910e4fd14 100644
--- a/setuptools/tests/test_core_metadata.py
+++ b/setuptools/tests/test_core_metadata.py
@@ -462,6 +462,9 @@ def _normalize_metadata(msg: Message) -> str:
     for extra in sorted(extras):
         msg["Provides-Extra"] = extra
 
+    # TODO: Handle lack of PEP 643 implementation in pypa/wheel?
+    del msg["Metadata-Version"]
+
     return msg.as_string()
 
 

From 22b3bf31de957bac44d4dbcbec8c95a2eb2d535f Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Fri, 6 Sep 2024 10:23:50 +0100
Subject: [PATCH 1375/1761] First draft implementation of '_static' in
 preparation for PEP 643

---
 setuptools/_core_metadata.py      | 37 +++++++++++++-
 setuptools/_static.py             | 83 +++++++++++++++++++++++++++++++
 setuptools/config/expand.py       |  4 +-
 setuptools/config/setupcfg.py     | 43 ++++++++--------
 setuptools/dist.py                | 14 ++++--
 setuptools/monkey.py              | 13 +++++
 setuptools/tests/test_egg_info.py |  2 +-
 7 files changed, 166 insertions(+), 30 deletions(-)
 create mode 100644 setuptools/_static.py

diff --git a/setuptools/_core_metadata.py b/setuptools/_core_metadata.py
index 642b80df31..4e526cbb44 100644
--- a/setuptools/_core_metadata.py
+++ b/setuptools/_core_metadata.py
@@ -18,7 +18,7 @@
 from packaging.utils import canonicalize_name, canonicalize_version
 from packaging.version import Version
 
-from . import _normalization, _reqs
+from . import _normalization, _reqs, _static
 from .warnings import SetuptoolsDeprecationWarning
 
 from distutils.util import rfc822_escape
@@ -27,7 +27,7 @@
 def get_metadata_version(self):
     mv = getattr(self, 'metadata_version', None)
     if mv is None:
-        mv = Version('2.1')
+        mv = Version('2.2')
         self.metadata_version = mv
     return mv
 
@@ -207,6 +207,10 @@ def write_field(key, value):
     self._write_list(file, 'License-File', self.license_files or [])
     _write_requirements(self, file)
 
+    for field, attr in _POSSIBLE_DYNAMIC_FIELDS.items():
+        if hasattr(self, attr) and not isinstance(getattr(self, attr), _static.Static):
+            write_field('Dynamic', field)
+
     long_description = self.get_long_description()
     if long_description:
         file.write(f"\n{long_description}")
@@ -284,3 +288,32 @@ def _distribution_fullname(name: str, version: str) -> str:
         canonicalize_name(name).replace('-', '_'),
         canonicalize_version(version, strip_trailing_zero=False),
     )
+
+
+_POSSIBLE_DYNAMIC_FIELDS = {
+    "author": "author",
+    "author-email": "author_email",
+    "classifier": "classifiers",
+    "description": "long_description",
+    "description-content-type": "long_description_content_type",
+    "download-url": "download_url",
+    "home-page": "url",
+    "keywords": "keywords",
+    "license": "license",
+    # "license-file": "license_files", # PEP 639 allows backfilling without dynamic ??
+    "maintainer": "maintainer",
+    "maintainer-email": "maintainer_email",
+    "obsoletes": "obsoletes",
+    # "obsoletes-dist": "obsoletes_dist",  # NOT USED
+    "platform": "platforms",
+    "project-url": "project_urls",
+    "provides": "provides",
+    # "provides-dist": "provides_dist",  # NOT USED
+    "provides-extra": "extras_require",
+    "requires": "requires",
+    "requires-dist": "install_requires",
+    # "requires-external": "requires_external",  # NOT USED
+    "requires-python": "python_requires",
+    "summary": "description",
+    # "supported-platform": "supported_platforms",  # NOT USED
+}
diff --git a/setuptools/_static.py b/setuptools/_static.py
new file mode 100644
index 0000000000..6a3ae1bc10
--- /dev/null
+++ b/setuptools/_static.py
@@ -0,0 +1,83 @@
+from collections import abc
+from functools import singledispatch
+
+import packaging.specifiers
+
+
+class Static:
+    """
+    Wrapper for butil-in object types that are allow setuptools to identify
+    static core metadata (in opposition to ``Dynamic``, as defined :pep:`643`).
+
+    The trick is to mark values with :class:`Static` when they come from
+    ``pyproject.toml`` or ``setup.cfg``, so if any plugin overwrite the value
+    with a built-in, setuptools will be able to recognise the change.
+
+    We inherit from built-in classes, so that we don't need to change the existing
+    code base to deal with the new types.
+    We also prefer "immutable-ish" objects to avoid changes after the initial parsing.
+    """
+
+
+class Str(str, Static):
+    pass
+
+
+class Tuple(tuple, Static):
+    pass
+
+
+class Mapping(dict, Static):
+    pass
+
+
+def _do_not_modify(*_, **__):
+    raise NotImplementedError("Direct modification disallowed (statically defined)")
+
+
+# Make `Mapping` immutable-ish (we cannot inherit from types.MappingProxyType):
+for _method in (
+    '__delitem__',
+    '__ior__',
+    '__setitem__',
+    'clear',
+    'pop',
+    'popitem',
+    'setdefault',
+    'update',
+):
+    setattr(Mapping, _method, _do_not_modify)
+
+
+class SpeficierSet(packaging.specifiers.SpecifierSet, Static):
+    """Not exactly a builtin type but useful for ``requires-python``"""
+
+
+@singledispatch
+def convert(value):
+    return value
+
+
+@convert.register
+def _(value: str) -> Str:
+    return Str(value)
+
+
+@convert.register
+def _(value: str) -> Str:
+    return Str(value)
+
+
+@convert.register
+def _(value: tuple) -> Tuple:
+    return Tuple(value)
+
+
+@convert.register
+def _(value: list) -> Tuple:
+    return Tuple(value)
+
+
+@convert.register
+def _(value: abc.Mapping) -> Mapping:
+    return Mapping(value)
diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py
index ccb5d63cd2..553f948ea1 100644
--- a/setuptools/config/expand.py
+++ b/setuptools/config/expand.py
@@ -34,6 +34,7 @@
 from types import ModuleType, TracebackType
 from typing import TYPE_CHECKING, Any, Callable, TypeVar
 
+from .. import _static
 from .._path import StrPath, same_path as _same_path
 from ..discovery import find_package_path
 from ..warnings import SetuptoolsWarning
@@ -181,7 +182,8 @@ def read_attr(
     spec = _find_spec(module_name, path)
 
     try:
-        return getattr(StaticModule(module_name, spec), attr_name)
+        value = getattr(StaticModule(module_name, spec), attr_name)
+        return _static.convert(value)
     except Exception:
         # fallback to evaluate module
         module = _load_spec(spec, module_name)
diff --git a/setuptools/config/setupcfg.py b/setuptools/config/setupcfg.py
index 4615815b6b..ef00fe9ff8 100644
--- a/setuptools/config/setupcfg.py
+++ b/setuptools/config/setupcfg.py
@@ -21,9 +21,9 @@
 
 from packaging.markers import default_environment as marker_env
 from packaging.requirements import InvalidRequirement, Requirement
-from packaging.specifiers import SpecifierSet
 from packaging.version import InvalidVersion, Version
 
+from .. import _static
 from .._path import StrPath
 from ..errors import FileError, OptionError
 from ..warnings import SetuptoolsDeprecationWarning
@@ -309,7 +309,7 @@ def _parse_list(cls, value, separator=','):
 
         :param value:
         :param separator: List items separator character.
-        :rtype: list
+        :rtype: tuple
         """
         if isinstance(value, list):  # _get_parser_compound case
             return value
@@ -367,7 +367,7 @@ def parser(value):
                     f'Only strings are accepted for the {key} field, '
                     'files are not accepted'
                 )
-            return value
+            return _static.Str(value)
 
         return parser
 
@@ -387,15 +387,15 @@ def _parse_file(self, value, root_dir: StrPath | None):
         include_directive = 'file:'
 
         if not isinstance(value, str):
-            return value
+            return _static.convert(value)
 
         if not value.startswith(include_directive):
-            return value
+            return _static.Str(value)
 
         spec = value[len(include_directive) :]
         filepaths = [path.strip() for path in spec.split(',')]
         self._referenced_files.update(filepaths)
-        return expand.read_files(filepaths, root_dir)
+        return _static.Str(expand.read_files(filepaths, root_dir))  # Too optimistic?
 
     def _parse_attr(self, value, package_dir, root_dir: StrPath):
         """Represents value as a module attribute.
@@ -409,7 +409,7 @@ def _parse_attr(self, value, package_dir, root_dir: StrPath):
         """
         attr_directive = 'attr:'
         if not value.startswith(attr_directive):
-            return value
+            return _static.Str(value)
 
         attr_desc = value.replace(attr_directive, '')
 
@@ -473,7 +473,7 @@ def parse_section(self, section_options) -> None:
         for name, (_, value) in section_options.items():
             with contextlib.suppress(KeyError):
                 # Keep silent for a new option may appear anytime.
-                self[name] = value
+                self[name] = _static.convert(value)
 
     def parse(self) -> None:
         """Parses configuration file items from one
@@ -548,23 +548,23 @@ def __init__(
     @property
     def parsers(self):
         """Metadata item name to parser function mapping."""
-        parse_list = self._parse_list
+        parse_tuple_static = self._get_parser_compound(self._parse_list, _static.Tuple)
+        parse_dict_static = self._get_parser_compound(self._parse_dict, _static.Mapping)
         parse_file = partial(self._parse_file, root_dir=self.root_dir)
-        parse_dict = self._parse_dict
         exclude_files_parser = self._exclude_files_parser
 
         return {
-            'platforms': parse_list,
-            'keywords': parse_list,
-            'provides': parse_list,
-            'obsoletes': parse_list,
-            'classifiers': self._get_parser_compound(parse_file, parse_list),
+            'platforms': parse_tuple_static,
+            'keywords': parse_tuple_static,
+            'provides': parse_tuple_static,
+            'obsoletes': parse_tuple_static,
+            'classifiers': self._get_parser_compound(parse_file, parse_tuple_static),
             'license': exclude_files_parser('license'),
-            'license_files': parse_list,
+            'license_files': parse_tuple_static,
             'description': parse_file,
             'long_description': parse_file,
             'version': self._parse_version,
-            'project_urls': parse_dict,
+            'project_urls': parse_dict_static,
         }
 
     def _parse_version(self, value):
@@ -620,20 +620,19 @@ def _parse_requirements_list(self, label: str, value: str):
         _warn_accidental_env_marker_misconfig(label, value, parsed)
         # Filter it to only include lines that are not comments. `parse_list`
         # will have stripped each line and filtered out empties.
-        return [line for line in parsed if not line.startswith("#")]
+        return _static.Tuple(line for line in parsed if not line.startswith("#"))
 
     @property
     def parsers(self):
         """Metadata item name to parser function mapping."""
         parse_list = self._parse_list
         parse_bool = self._parse_bool
-        parse_dict = self._parse_dict
         parse_cmdclass = self._parse_cmdclass
 
         return {
             'zip_safe': parse_bool,
             'include_package_data': parse_bool,
-            'package_dir': parse_dict,
+            'package_dir': self._parse_dict,
             'scripts': parse_list,
             'eager_resources': parse_list,
             'dependency_links': parse_list,
@@ -650,7 +649,7 @@ def parsers(self):
             'packages': self._parse_packages,
             'entry_points': self._parse_file_in_root,
             'py_modules': parse_list,
-            'python_requires': SpecifierSet,
+            'python_requires': _static.SpeficierSet,
             'cmdclass': parse_cmdclass,
         }
 
@@ -737,7 +736,7 @@ def parse_section_extras_require(self, section_options) -> None:
             lambda k, v: self._parse_requirements_list(f"extras_require[{k}]", v),
         )
 
-        self['extras_require'] = parsed
+        self['extras_require'] = _static.Mapping(parsed)
 
     def parse_section_data_files(self, section_options) -> None:
         """Parses `data_files` configuration file section.
diff --git a/setuptools/dist.py b/setuptools/dist.py
index f878b2fa45..539c0c3315 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -19,6 +19,7 @@
 from . import (
     _entry_points,
     _reqs,
+    _static,
     command as _,  # noqa: F401 # imported for side-effects
 )
 from ._importlib import metadata
@@ -391,10 +392,15 @@ def _normalize_requires(self):
         """Make sure requirement-related attributes exist and are normalized"""
         install_requires = getattr(self, "install_requires", None) or []
         extras_require = getattr(self, "extras_require", None) or {}
-        self.install_requires = list(map(str, _reqs.parse(install_requires)))
-        self.extras_require = {
-            k: list(map(str, _reqs.parse(v or []))) for k, v in extras_require.items()
-        }
+
+        # Preserve the "static"-ness of values parsed from config files
+        seq = _static.Tuple if isinstance(install_requires, _static.Static) else list
+        self.install_requires = seq(map(str, _reqs.parse(install_requires)))
+
+        mapp = _static.Mapping if isinstance(extras_require, _static.Static) else dict
+        self.extras_require = mapp(
+            (k, list(map(str, _reqs.parse(v or [])))) for k, v in extras_require.items()
+        )
 
     def _finalize_license_files(self) -> None:
         """Compute names of all license files which should be included."""
diff --git a/setuptools/monkey.py b/setuptools/monkey.py
index 6ad1abac29..a2ff1de396 100644
--- a/setuptools/monkey.py
+++ b/setuptools/monkey.py
@@ -89,6 +89,19 @@ def patch_all():
             'distutils.command.build_ext'
         ].Extension = setuptools.extension.Extension
 
+    if hasattr(distutils.dist, '_ensure_list'):
+        from . import _static
+
+        ensure_list = distutils.dist._ensure_list
+
+        def _ensure_list_accept_static(value, fieldname):
+            if isinstance(value, _static.Static):
+                return value
+
+            return ensure_list(value, fieldname)
+
+        patch_func(_ensure_list_accept_static, distutils.dist, '_ensure_list')
+
 
 def _patch_distribution_metadata():
     from . import _core_metadata
diff --git a/setuptools/tests/test_egg_info.py b/setuptools/tests/test_egg_info.py
index 9924f9cbbd..b30f8633b7 100644
--- a/setuptools/tests/test_egg_info.py
+++ b/setuptools/tests/test_egg_info.py
@@ -517,7 +517,7 @@ def test_provides_extra(self, tmpdir_cwd, env):
         with open(os.path.join(egg_info_dir, 'PKG-INFO'), encoding="utf-8") as fp:
             pkg_info_lines = fp.read().split('\n')
         assert 'Provides-Extra: foobar' in pkg_info_lines
-        assert 'Metadata-Version: 2.1' in pkg_info_lines
+        assert 'Metadata-Version: 2.2' in pkg_info_lines
 
     def test_doesnt_provides_extra(self, tmpdir_cwd, env):
         self._setup_script_with_requires(

From 5124e8cc238ae1cc00a0748f1f33b802d02e8763 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Fri, 6 Sep 2024 17:50:46 +0100
Subject: [PATCH 1376/1761] Modify Metadata-Version expectation in
 test_egg_info

---
 setuptools/tests/test_egg_info.py | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/setuptools/tests/test_egg_info.py b/setuptools/tests/test_egg_info.py
index b30f8633b7..8233c9b884 100644
--- a/setuptools/tests/test_egg_info.py
+++ b/setuptools/tests/test_egg_info.py
@@ -1064,7 +1064,7 @@ def test_metadata_version(self, tmpdir_cwd, env):
         with open(os.path.join(egg_info_dir, 'PKG-INFO'), encoding="utf-8") as fp:
             pkg_info_lines = fp.read().split('\n')
         # Update metadata version if changed
-        assert self._extract_mv_version(pkg_info_lines) == (2, 1)
+        assert self._extract_mv_version(pkg_info_lines) == (2, 2)
 
     def test_long_description_content_type(self, tmpdir_cwd, env):
         # Test that specifying a `long_description_content_type` keyword arg to
@@ -1091,7 +1091,7 @@ def test_long_description_content_type(self, tmpdir_cwd, env):
             pkg_info_lines = fp.read().split('\n')
         expected_line = 'Description-Content-Type: text/markdown'
         assert expected_line in pkg_info_lines
-        assert 'Metadata-Version: 2.1' in pkg_info_lines
+        assert 'Metadata-Version: 2.2' in pkg_info_lines
 
     def test_long_description(self, tmpdir_cwd, env):
         # Test that specifying `long_description` and `long_description_content_type`
@@ -1110,7 +1110,7 @@ def test_long_description(self, tmpdir_cwd, env):
         egg_info_dir = os.path.join('.', 'foo.egg-info')
         with open(os.path.join(egg_info_dir, 'PKG-INFO'), encoding="utf-8") as fp:
             pkg_info_lines = fp.read().split('\n')
-        assert 'Metadata-Version: 2.1' in pkg_info_lines
+        assert 'Metadata-Version: 2.2' in pkg_info_lines
         assert '' == pkg_info_lines[-1]  # last line should be empty
         long_desc_lines = pkg_info_lines[pkg_info_lines.index('') :]
         assert 'This is a long description' in long_desc_lines

From fad8675879ded276c282341762bc2b434ef81769 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 26 Sep 2024 15:30:14 +0100
Subject: [PATCH 1377/1761] Use _static.{List,Dict} and an attribute to track
 modifications instead of _static.{Tuple,Mapping} for better compatibility

---
 setuptools/_core_metadata.py  |   8 +-
 setuptools/_static.py         | 161 +++++++++++++++++++++++++++-------
 setuptools/config/expand.py   |   3 +-
 setuptools/config/setupcfg.py |  45 ++++++----
 setuptools/dist.py            |   8 +-
 setuptools/monkey.py          |  13 ---
 6 files changed, 168 insertions(+), 70 deletions(-)

diff --git a/setuptools/_core_metadata.py b/setuptools/_core_metadata.py
index 4e526cbb44..850cc409f7 100644
--- a/setuptools/_core_metadata.py
+++ b/setuptools/_core_metadata.py
@@ -18,7 +18,8 @@
 from packaging.utils import canonicalize_name, canonicalize_version
 from packaging.version import Version
 
-from . import _normalization, _reqs, _static
+from . import _normalization, _reqs
+from ._static import is_static
 from .warnings import SetuptoolsDeprecationWarning
 
 from distutils.util import rfc822_escape
@@ -208,7 +209,7 @@ def write_field(key, value):
     _write_requirements(self, file)
 
     for field, attr in _POSSIBLE_DYNAMIC_FIELDS.items():
-        if hasattr(self, attr) and not isinstance(getattr(self, attr), _static.Static):
+        if (val := getattr(self, attr, None)) and not is_static(val):
             write_field('Dynamic', field)
 
     long_description = self.get_long_description()
@@ -291,6 +292,7 @@ def _distribution_fullname(name: str, version: str) -> str:
 
 
 _POSSIBLE_DYNAMIC_FIELDS = {
+    # Core Metadata Field x related Distribution attribute
     "author": "author",
     "author-email": "author_email",
     "classifier": "classifiers",
@@ -300,7 +302,7 @@ def _distribution_fullname(name: str, version: str) -> str:
     "home-page": "url",
     "keywords": "keywords",
     "license": "license",
-    # "license-file": "license_files", # PEP 639 allows backfilling without dynamic ??
+    # "license-file": "license_files", # XXX: does PEP 639 exempt Dynamic ??
     "maintainer": "maintainer",
     "maintainer-email": "maintainer_email",
     "obsoletes": "obsoletes",
diff --git a/setuptools/_static.py b/setuptools/_static.py
index 6a3ae1bc10..0735ca685c 100644
--- a/setuptools/_static.py
+++ b/setuptools/_static.py
@@ -1,12 +1,14 @@
-from collections import abc
-from functools import singledispatch
+from functools import wraps
+from typing import Any, TypeVar
 
 import packaging.specifiers
 
+from .warnings import SetuptoolsDeprecationWarning
+
 
 class Static:
     """
-    Wrapper for butil-in object types that are allow setuptools to identify
+    Wrapper for built-in object types that are allow setuptools to identify
     static core metadata (in opposition to ``Dynamic``, as defined :pep:`643`).
 
     The trick is to mark values with :class:`Static` when they come from
@@ -15,8 +17,43 @@ class Static:
 
     We inherit from built-in classes, so that we don't need to change the existing
     code base to deal with the new types.
-    We also prefer "immutable-ish" objects to avoid changes after the initial parsing.
+    We also should strive for immutability objects to avoid changes after the
+    initial parsing.
+    """
+
+    _mutated_: bool = False  # TODO: Remove after deprecation warning is solved
+
+
+def _prevent_modification(target: type, method: str, copying: str):
+    """
+    Because setuptools is very flexible we cannot fully prevent
+    plugins and user customisations from modifying static values that were
+    parsed from config files.
+    But we can attempt to block "in-place" mutations and identify when they
+    were done.
     """
+    fn = getattr(target, method)
+
+    @wraps(fn)
+    def _replacement(self: Static, *args, **kwargs):
+        # TODO: After deprecation period raise NotImplementedError instead of warning
+        #       which obviated the existence and checks of the `_mutated_` attribute.
+        self._mutated_ = True
+        SetuptoolsDeprecationWarning.emit(
+            "Direct modification of value will be disallowed",
+            f"""
+            In an effort to implement PEP 643, direct/in-place changes of static values
+            that come from configuration files are deprecated.
+            If you need to modify this value, please first create a copy with {copying}
+            and make sure conform to all relevant standards when overriding setuptools
+            functionality (https://packaging.python.org/en/latest/specifications/).
+            """,
+            due_date=(2025, 10, 10),  # Initially introduced in 2024-09-06
+        )
+        return fn(self, *args, **kwargs)
+
+    _replacement.__doc__ = ""  # otherwise doctest may fail.
+    setattr(target, method, _replacement)
 
 
 class Str(str, Static):
@@ -27,15 +64,69 @@ class Tuple(tuple, Static):
     pass
 
 
-class Mapping(dict, Static):
-    pass
+class List(list, Static):
+    """
+    :meta private:
+    >>> x = List([1, 2, 3])
+    >>> is_static(x)
+    True
+    >>> x += [0]  # doctest: +IGNORE_EXCEPTION_DETAIL
+    Traceback (most recent call last):
+    SetuptoolsDeprecationWarning: Direct modification ...
+    >>> is_static(x)  # no longer static after modification
+    False
+    >>> y = list(x)
+    >>> y.clear()
+    >>> y
+    []
+    >>> y == x
+    False
+    >>> is_static(List(y))
+    True
+    """
 
 
-def _do_not_modify(*_, **__):
-    raise NotImplementedError("Direct modification disallowed (statically defined)")
+# Make `List` immutable-ish
+# (certain places of setuptools/distutils issue a warn if we use tuple instead of list)
+for _method in (
+    '__delitem__',
+    '__iadd__',
+    '__setitem__',
+    'append',
+    'clear',
+    'extend',
+    'insert',
+    'remove',
+    'reverse',
+    'pop',
+):
+    _prevent_modification(List, _method, "`list(value)`")
 
 
-# Make `Mapping` immutable-ish (we cannot inherit from types.MappingProxyType):
+class Dict(dict, Static):
+    """
+    :meta private:
+    >>> x = Dict({'a': 1, 'b': 2})
+    >>> is_static(x)
+    True
+    >>> x['c'] = 0  # doctest: +IGNORE_EXCEPTION_DETAIL
+    Traceback (most recent call last):
+    SetuptoolsDeprecationWarning: Direct modification ...
+    >>> x._mutated_
+    True
+    >>> is_static(x)  # no longer static after modification
+    False
+    >>> y = dict(x)
+    >>> y.popitem()
+    ('b', 2)
+    >>> y == x
+    False
+    >>> is_static(Dict(y))
+    True
+    """
+
+
+# Make `Dict` immutable-ish (we cannot inherit from types.MappingProxyType):
 for _method in (
     '__delitem__',
     '__ior__',
@@ -46,38 +137,46 @@ def _do_not_modify(*_, **__):
     'setdefault',
     'update',
 ):
-    setattr(Mapping, _method, _do_not_modify)
+    _prevent_modification(Dict, _method, "`dict(value)`")
 
 
 class SpeficierSet(packaging.specifiers.SpecifierSet, Static):
-    """Not exactly a builtin type but useful for ``requires-python``"""
-
-
-@singledispatch
-def convert(value):
-    return value
+    """Not exactly a built-in type but useful for ``requires-python``"""
 
 
-@convert.register
-def _(value: str) -> Str:
-    return Str(value)
+T = TypeVar("T")
 
 
-@convert.register
-def _(value: str) -> Str:
-    return Str(value)
+def noop(value: T) -> T:
+    """
+    >>> noop(42)
+    42
+    """
+    return value
 
 
-@convert.register
-def _(value: tuple) -> Tuple:
-    return Tuple(value)
+_CONVERSIONS = {str: Str, tuple: Tuple, list: List, dict: Dict}
 
 
-@convert.register
-def _(value: list) -> Tuple:
-    return Tuple(value)
+def attempt_conversion(value: T) -> T:
+    """
+    >>> is_static(attempt_conversion("hello"))
+    True
+    >>> is_static(object())
+    False
+    """
+    return _CONVERSIONS.get(type(value), noop)(value)  # type: ignore[call-overload]
 
 
-@convert.register
-def _(value: abc.Mapping) -> Mapping:
-    return Mapping(value)
+def is_static(value: Any) -> bool:
+    """
+    >>> is_static(a := Dict({'a': 1}))
+    True
+    >>> is_static(dict(a))
+    False
+    >>> is_static(b := List([1, 2, 3]))
+    True
+    >>> is_static(list(b))
+    False
+    """
+    return isinstance(value, Static) and not value._mutated_
diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py
index 553f948ea1..531f965013 100644
--- a/setuptools/config/expand.py
+++ b/setuptools/config/expand.py
@@ -183,7 +183,8 @@ def read_attr(
 
     try:
         value = getattr(StaticModule(module_name, spec), attr_name)
-        return _static.convert(value)
+        # XXX: Is marking as static contents coming from modules too optimistic?
+        return _static.attempt_conversion(value)
     except Exception:
         # fallback to evaluate module
         module = _load_spec(spec, module_name)
diff --git a/setuptools/config/setupcfg.py b/setuptools/config/setupcfg.py
index ef00fe9ff8..c9e6975538 100644
--- a/setuptools/config/setupcfg.py
+++ b/setuptools/config/setupcfg.py
@@ -309,7 +309,7 @@ def _parse_list(cls, value, separator=','):
 
         :param value:
         :param separator: List items separator character.
-        :rtype: tuple
+        :rtype: list
         """
         if isinstance(value, list):  # _get_parser_compound case
             return value
@@ -387,7 +387,7 @@ def _parse_file(self, value, root_dir: StrPath | None):
         include_directive = 'file:'
 
         if not isinstance(value, str):
-            return _static.convert(value)
+            return value
 
         if not value.startswith(include_directive):
             return _static.Str(value)
@@ -395,7 +395,8 @@ def _parse_file(self, value, root_dir: StrPath | None):
         spec = value[len(include_directive) :]
         filepaths = [path.strip() for path in spec.split(',')]
         self._referenced_files.update(filepaths)
-        return _static.Str(expand.read_files(filepaths, root_dir))  # Too optimistic?
+        # XXX: Is marking as static contents coming from files too optimistic?
+        return _static.Str(expand.read_files(filepaths, root_dir))
 
     def _parse_attr(self, value, package_dir, root_dir: StrPath):
         """Represents value as a module attribute.
@@ -473,7 +474,7 @@ def parse_section(self, section_options) -> None:
         for name, (_, value) in section_options.items():
             with contextlib.suppress(KeyError):
                 # Keep silent for a new option may appear anytime.
-                self[name] = _static.convert(value)
+                self[name] = value
 
     def parse(self) -> None:
         """Parses configuration file items from one
@@ -548,22 +549,28 @@ def __init__(
     @property
     def parsers(self):
         """Metadata item name to parser function mapping."""
-        parse_tuple_static = self._get_parser_compound(self._parse_list, _static.Tuple)
-        parse_dict_static = self._get_parser_compound(self._parse_dict, _static.Mapping)
+        parse_list_static = self._get_parser_compound(self._parse_list, _static.List)
+        parse_dict_static = self._get_parser_compound(self._parse_dict, _static.Dict)
         parse_file = partial(self._parse_file, root_dir=self.root_dir)
         exclude_files_parser = self._exclude_files_parser
 
         return {
-            'platforms': parse_tuple_static,
-            'keywords': parse_tuple_static,
-            'provides': parse_tuple_static,
-            'obsoletes': parse_tuple_static,
-            'classifiers': self._get_parser_compound(parse_file, parse_tuple_static),
+            'author': _static.Str,
+            'author_email': _static.Str,
+            'maintainer': _static.Str,
+            'maintainer_email': _static.Str,
+            'platforms': parse_list_static,
+            'keywords': parse_list_static,
+            'provides': parse_list_static,
+            'obsoletes': parse_list_static,
+            'classifiers': self._get_parser_compound(parse_file, parse_list_static),
             'license': exclude_files_parser('license'),
-            'license_files': parse_tuple_static,
+            'license_files': parse_list_static,
             'description': parse_file,
             'long_description': parse_file,
-            'version': self._parse_version,
+            'long_description_content_type': _static.Str,
+            'version': self._parse_version,  # Cannot be marked as dynamic
+            'url': _static.Str,
             'project_urls': parse_dict_static,
         }
 
@@ -620,7 +627,8 @@ def _parse_requirements_list(self, label: str, value: str):
         _warn_accidental_env_marker_misconfig(label, value, parsed)
         # Filter it to only include lines that are not comments. `parse_list`
         # will have stripped each line and filtered out empties.
-        return _static.Tuple(line for line in parsed if not line.startswith("#"))
+        return _static.List(line for line in parsed if not line.startswith("#"))
+        # ^-- Use `_static.List` to mark a non-`Dynamic` Core Metadata
 
     @property
     def parsers(self):
@@ -642,14 +650,14 @@ def parsers(self):
                 "consider using implicit namespaces instead (PEP 420).",
                 # TODO: define due date, see setuptools.dist:check_nsp.
             ),
-            'install_requires': partial(
+            'install_requires': partial(  # Core Metadata
                 self._parse_requirements_list, "install_requires"
             ),
             'setup_requires': self._parse_list_semicolon,
             'packages': self._parse_packages,
             'entry_points': self._parse_file_in_root,
             'py_modules': parse_list,
-            'python_requires': _static.SpeficierSet,
+            'python_requires': _static.SpeficierSet,  # Core Metadata
             'cmdclass': parse_cmdclass,
         }
 
@@ -726,7 +734,7 @@ def parse_section_exclude_package_data(self, section_options) -> None:
         """
         self['exclude_package_data'] = self._parse_package_data(section_options)
 
-    def parse_section_extras_require(self, section_options) -> None:
+    def parse_section_extras_require(self, section_options) -> None:  # Core Metadata
         """Parses `extras_require` configuration file section.
 
         :param dict section_options:
@@ -736,7 +744,8 @@ def parse_section_extras_require(self, section_options) -> None:
             lambda k, v: self._parse_requirements_list(f"extras_require[{k}]", v),
         )
 
-        self['extras_require'] = _static.Mapping(parsed)
+        self['extras_require'] = _static.Dict(parsed)
+        # ^-- Use `_static.Dict` to mark a non-`Dynamic` Core Metadata
 
     def parse_section_data_files(self, section_options) -> None:
         """Parses `data_files` configuration file section.
diff --git a/setuptools/dist.py b/setuptools/dist.py
index 539c0c3315..0249651267 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -394,11 +394,11 @@ def _normalize_requires(self):
         extras_require = getattr(self, "extras_require", None) or {}
 
         # Preserve the "static"-ness of values parsed from config files
-        seq = _static.Tuple if isinstance(install_requires, _static.Static) else list
-        self.install_requires = seq(map(str, _reqs.parse(install_requires)))
+        list_ = _static.List if _static.is_static(install_requires) else list
+        self.install_requires = list_(map(str, _reqs.parse(install_requires)))
 
-        mapp = _static.Mapping if isinstance(extras_require, _static.Static) else dict
-        self.extras_require = mapp(
+        dict_ = _static.Dict if _static.is_static(extras_require) else dict
+        self.extras_require = dict_(
             (k, list(map(str, _reqs.parse(v or [])))) for k, v in extras_require.items()
         )
 
diff --git a/setuptools/monkey.py b/setuptools/monkey.py
index a2ff1de396..6ad1abac29 100644
--- a/setuptools/monkey.py
+++ b/setuptools/monkey.py
@@ -89,19 +89,6 @@ def patch_all():
             'distutils.command.build_ext'
         ].Extension = setuptools.extension.Extension
 
-    if hasattr(distutils.dist, '_ensure_list'):
-        from . import _static
-
-        ensure_list = distutils.dist._ensure_list
-
-        def _ensure_list_accept_static(value, fieldname):
-            if isinstance(value, _static.Static):
-                return value
-
-            return ensure_list(value, fieldname)
-
-        patch_func(_ensure_list_accept_static, distutils.dist, '_ensure_list')
-
 
 def _patch_distribution_metadata():
     from . import _core_metadata

From 0adaa839b8e34a88975a1c89d9a1bfefd46e2af4 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 26 Sep 2024 18:54:10 +0100
Subject: [PATCH 1378/1761] Add tests for Dynamic core metadata for setup.cfg

---
 setuptools/tests/test_core_metadata.py | 71 ++++++++++++++++++++++++++
 1 file changed, 71 insertions(+)

diff --git a/setuptools/tests/test_core_metadata.py b/setuptools/tests/test_core_metadata.py
index e910e4fd14..a4ade2ee1d 100644
--- a/setuptools/tests/test_core_metadata.py
+++ b/setuptools/tests/test_core_metadata.py
@@ -8,6 +8,7 @@
 from email.message import EmailMessage, Message
 from email.parser import Parser
 from email.policy import EmailPolicy
+from inspect import cleandoc
 from pathlib import Path
 from unittest.mock import Mock
 
@@ -411,6 +412,72 @@ def test_equivalent_output(self, tmp_path, dist):
         _assert_roundtrip_message(pkg_info)
 
 
+class TestPEP643:
+    STATIC_CONFIG = {
+        "setup.cfg": cleandoc(
+            """
+            [metadata]
+            name = package
+            version = 0.0.1
+            author = Foo Bar
+            author_email = foo@bar.net
+            long_description = Long
+                               description
+            description = Short description
+            keywords = one, two
+            [options]
+            install_requires = requests
+            """
+        ),
+    }
+
+    @pytest.mark.parametrize("file", STATIC_CONFIG.keys())
+    def test_static_config_has_no_dynamic(self, file, tmpdir_cwd):
+        Path(file).write_text(self.STATIC_CONFIG[file], encoding="utf-8")
+        metadata = _get_metadata()
+        assert metadata.get_all("Dynamic") is None
+        assert metadata.get_all("dynamic") is None
+
+    @pytest.mark.parametrize("file", STATIC_CONFIG.keys())
+    @pytest.mark.parametrize(
+        "fields",
+        [
+            # Single dynamic field
+            {"requires-python": ("python_requires", ">=3.12")},
+            {"author-email": ("author_email", "snoopy@peanuts.com")},
+            {"keywords": ("keywords", ["hello", "world"])},
+            # Multiple dynamic fields
+            {
+                "summary": ("description", "hello world"),
+                "description": ("long_description", "bla bla bla bla"),
+                "requires-dist": ("install_requires", ["hello-world"]),
+            },
+        ],
+    )
+    def test_modified_fields_marked_as_dynamic(self, file, fields, tmpdir_cwd):
+        # We start with a static config
+        Path(file).write_text(self.STATIC_CONFIG[file], encoding="utf-8")
+        dist = _makedist()
+
+        # ... but then we simulate the effects of a plugin modifying the distribution
+        for attr, value in fields.values():
+            # `dist` and `dist.metadata` are complicated...
+            # Some attributes work when set on `dist`, others on `dist.metadata`...
+            # Here we set in both just in case (this also avoids calling `_finalize_*`)
+            setattr(dist, attr, value)
+            setattr(dist.metadata, attr, value)
+
+        # Then we should be able to list the modified fields as Dynamic
+        metadata = _get_metadata(dist)
+        assert set(metadata.get_all("Dynamic")) == set(fields)
+
+
+def _makedist(**attrs):
+    dist = Distribution(attrs)
+    dist.parse_config_files()
+    return dist
+
+
 def _assert_roundtrip_message(metadata: str) -> None:
     """Emulate the way wheel.bdist_wheel parses and regenerates the message,
     then ensures the metadata generated by setuptools is compatible.
@@ -482,6 +549,10 @@ def _get_pkginfo(dist: Distribution):
         return fp.getvalue()
 
 
+def _get_metadata(dist: Distribution | None = None):
+    return message_from_string(_get_pkginfo(dist or _makedist()))
+
+
 def _valid_metadata(text: str) -> bool:
     metadata = Metadata.from_email(text, validate=True)  # can raise exceptions
     return metadata is not None

From cf576e29f6872f11ce5a26d4362bed48d6535c84 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 26 Sep 2024 19:02:30 +0100
Subject: [PATCH 1379/1761] Add tests for Dynamic core metadata for
 pyproject.toml

---
 setuptools/tests/test_core_metadata.py | 14 ++++++++++++++
 1 file changed, 14 insertions(+)

diff --git a/setuptools/tests/test_core_metadata.py b/setuptools/tests/test_core_metadata.py
index a4ade2ee1d..eba48718d6 100644
--- a/setuptools/tests/test_core_metadata.py
+++ b/setuptools/tests/test_core_metadata.py
@@ -429,6 +429,20 @@ class TestPEP643:
             install_requires = requests
             """
         ),
+        "pyproject.toml": cleandoc(
+            """
+            [project]
+            name = "package"
+            version = "0.0.1"
+            authors = [
+              {name = "Foo Bar", email = "foo@bar.net"}
+            ]
+            description = "Short description"
+            readme = {text = "Long\\ndescription", content-type = "text/plain"}
+            keywords = ["one", "two"]
+            dependencies = ["requests"]
+            """
+        ),
     }
 
     @pytest.mark.parametrize("file", STATIC_CONFIG.keys())

From 8b4c8a3c95f43d771d0fa6e4ebceea3436bc70f7 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Fri, 4 Oct 2024 10:24:27 +0100
Subject: [PATCH 1380/1761] Add tests for static 'attr' directive

---
 setuptools/tests/config/test_expand.py | 30 ++++++++++++++++++++++++--
 1 file changed, 28 insertions(+), 2 deletions(-)

diff --git a/setuptools/tests/config/test_expand.py b/setuptools/tests/config/test_expand.py
index fa9122b32c..c5710ec63d 100644
--- a/setuptools/tests/config/test_expand.py
+++ b/setuptools/tests/config/test_expand.py
@@ -4,6 +4,7 @@
 
 import pytest
 
+from setuptools._static import is_static
 from setuptools.config import expand
 from setuptools.discovery import find_package_path
 
@@ -93,11 +94,15 @@ def test_read_attr(self, tmp_path, monkeypatch):
         with monkeypatch.context() as m:
             m.chdir(tmp_path)
             # Make sure it can read the attr statically without evaluating the module
-            assert expand.read_attr('pkg.sub.VERSION') == '0.1.1'
+            version = expand.read_attr('pkg.sub.VERSION')
             values = expand.read_attr('lib.mod.VALUES', {'lib': 'pkg/sub'})
 
+        assert version == '0.1.1'
+        assert is_static(values)
+
         assert values['a'] == 0
         assert values['b'] == {42}
+        assert is_static(values)
 
         # Make sure the same APIs work outside cwd
         assert expand.read_attr('pkg.sub.VERSION', root_dir=tmp_path) == '0.1.1'
@@ -118,7 +123,28 @@ def test_read_annotated_attr(self, tmp_path, example):
         }
         write_files(files, tmp_path)
         # Make sure this attribute can be read statically
-        assert expand.read_attr('pkg.sub.VERSION', root_dir=tmp_path) == '0.1.1'
+        version = expand.read_attr('pkg.sub.VERSION', root_dir=tmp_path)
+        assert version == '0.1.1'
+        assert is_static(version)
+
+    @pytest.mark.parametrize(
+        "example",
+        [
+            "VERSION = (lambda: '0.1.1')()\n",
+            "def fn(): return '0.1.1'\nVERSION = fn()\n",
+            "VERSION: str = (lambda: '0.1.1')()\n",
+        ],
+    )
+    def test_read_dynamic_attr(self, tmp_path, monkeypatch, example):
+        files = {
+            "pkg/__init__.py": "",
+            "pkg/sub/__init__.py": example,
+        }
+        write_files(files, tmp_path)
+        monkeypatch.chdir(tmp_path)
+        version = expand.read_attr('pkg.sub.VERSION')
+        assert version == '0.1.1'
+        assert not is_static(version)
 
     def test_import_order(self, tmp_path):
         """

From f699fd842e3ddedbe937ee33b0bd6ad28e735664 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 21 Oct 2024 14:56:13 +0100
Subject: [PATCH 1381/1761] Fix spelling error

---
 setuptools/_static.py         | 2 +-
 setuptools/config/setupcfg.py | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/setuptools/_static.py b/setuptools/_static.py
index 0735ca685c..97536b898b 100644
--- a/setuptools/_static.py
+++ b/setuptools/_static.py
@@ -140,7 +140,7 @@ class Dict(dict, Static):
     _prevent_modification(Dict, _method, "`dict(value)`")
 
 
-class SpeficierSet(packaging.specifiers.SpecifierSet, Static):
+class SpecifierSet(packaging.specifiers.SpecifierSet, Static):
     """Not exactly a built-in type but useful for ``requires-python``"""
 
 
diff --git a/setuptools/config/setupcfg.py b/setuptools/config/setupcfg.py
index c9e6975538..633aa9d45d 100644
--- a/setuptools/config/setupcfg.py
+++ b/setuptools/config/setupcfg.py
@@ -657,7 +657,7 @@ def parsers(self):
             'packages': self._parse_packages,
             'entry_points': self._parse_file_in_root,
             'py_modules': parse_list,
-            'python_requires': _static.SpeficierSet,  # Core Metadata
+            'python_requires': _static.SpecifierSet,  # Core Metadata
             'cmdclass': parse_cmdclass,
         }
 

From 8b22d73be5e23a9611398d81aedc5164115940ce Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 21 Oct 2024 14:56:52 +0100
Subject: [PATCH 1382/1761] Mark values from pyproject.toml as static

---
 setuptools/_static.py                         |  4 +
 setuptools/config/_apply_pyprojecttoml.py     | 76 +++++++++++++------
 .../tests/config/test_apply_pyprojecttoml.py  | 27 +++++++
 3 files changed, 82 insertions(+), 25 deletions(-)

diff --git a/setuptools/_static.py b/setuptools/_static.py
index 97536b898b..901e72c1e6 100644
--- a/setuptools/_static.py
+++ b/setuptools/_static.py
@@ -180,3 +180,7 @@ def is_static(value: Any) -> bool:
     False
     """
     return isinstance(value, Static) and not value._mutated_
+
+
+EMPTY_LIST = List()
+EMPTY_DICT = Dict()
diff --git a/setuptools/config/_apply_pyprojecttoml.py b/setuptools/config/_apply_pyprojecttoml.py
index c4bbcff730..331596bdd7 100644
--- a/setuptools/config/_apply_pyprojecttoml.py
+++ b/setuptools/config/_apply_pyprojecttoml.py
@@ -20,6 +20,7 @@
 from types import MappingProxyType
 from typing import TYPE_CHECKING, Any, Callable, TypeVar, Union
 
+from .. import _static
 from .._path import StrPath
 from ..errors import RemovedConfigError
 from ..extension import Extension
@@ -65,10 +66,11 @@ def apply(dist: Distribution, config: dict, filename: StrPath) -> Distribution:
 
 
 def _apply_project_table(dist: Distribution, config: dict, root_dir: StrPath):
-    project_table = config.get("project", {}).copy()
-    if not project_table:
+    orig_config = config.get("project", {})
+    if not orig_config:
         return  # short-circuit
 
+    project_table = {k: _static.attempt_conversion(v) for k, v in orig_config.items()}
     _handle_missing_dynamic(dist, project_table)
     _unify_entry_points(project_table)
 
@@ -98,7 +100,11 @@ def _apply_tool_table(dist: Distribution, config: dict, filename: StrPath):
             raise RemovedConfigError("\n".join([cleandoc(msg), suggestion]))
 
         norm_key = TOOL_TABLE_RENAMES.get(norm_key, norm_key)
-        _set_config(dist, norm_key, value)
+        corresp = TOOL_TABLE_CORRESPONDENCE.get(norm_key, norm_key)
+        if callable(corresp):
+            corresp(dist, value)
+        else:
+            _set_config(dist, corresp, value)
 
     _copy_command_options(config, dist, filename)
 
@@ -143,7 +149,7 @@ def _guess_content_type(file: str) -> str | None:
         return None
 
     if ext in _CONTENT_TYPES:
-        return _CONTENT_TYPES[ext]
+        return _static.Str(_CONTENT_TYPES[ext])
 
     valid = ", ".join(f"{k} ({v})" for k, v in _CONTENT_TYPES.items())
     msg = f"only the following file extensions are recognized: {valid}."
@@ -165,10 +171,11 @@ def _long_description(
         text = val.get("text") or expand.read_files(file, root_dir)
         ctype = val["content-type"]
 
-    _set_config(dist, "long_description", text)
+    # XXX: Is it completely safe to assume static?
+    _set_config(dist, "long_description", _static.Str(text))
 
     if ctype:
-        _set_config(dist, "long_description_content_type", ctype)
+        _set_config(dist, "long_description_content_type", _static.Str(ctype))
 
     if file:
         dist._referenced_files.add(file)
@@ -178,10 +185,12 @@ def _license(dist: Distribution, val: dict, root_dir: StrPath | None):
     from setuptools.config import expand
 
     if "file" in val:
-        _set_config(dist, "license", expand.read_files([val["file"]], root_dir))
+        # XXX: Is it completely safe to assume static?
+        value = expand.read_files([val["file"]], root_dir)
+        _set_config(dist, "license", _static.Str(value))
         dist._referenced_files.add(val["file"])
     else:
-        _set_config(dist, "license", val["text"])
+        _set_config(dist, "license", _static.Str(val["text"]))
 
 
 def _people(dist: Distribution, val: list[dict], _root_dir: StrPath | None, kind: str):
@@ -197,9 +206,9 @@ def _people(dist: Distribution, val: list[dict], _root_dir: StrPath | None, kind
             email_field.append(str(addr))
 
     if field:
-        _set_config(dist, kind, ", ".join(field))
+        _set_config(dist, kind, _static.Str(", ".join(field)))
     if email_field:
-        _set_config(dist, f"{kind}_email", ", ".join(email_field))
+        _set_config(dist, f"{kind}_email", _static.Str(", ".join(email_field)))
 
 
 def _project_urls(dist: Distribution, val: dict, _root_dir: StrPath | None):
@@ -207,9 +216,7 @@ def _project_urls(dist: Distribution, val: dict, _root_dir: StrPath | None):
 
 
 def _python_requires(dist: Distribution, val: str, _root_dir: StrPath | None):
-    from packaging.specifiers import SpecifierSet
-
-    _set_config(dist, "python_requires", SpecifierSet(val))
+    _set_config(dist, "python_requires", _static.SpecifierSet(val))
 
 
 def _dependencies(dist: Distribution, val: list, _root_dir: StrPath | None):
@@ -237,9 +244,14 @@ def _noop(_dist: Distribution, val: _T) -> _T:
     return val
 
 
+def _identity(val: _T) -> _T:
+    return val
+
+
 def _unify_entry_points(project_table: dict):
     project = project_table
-    entry_points = project.pop("entry-points", project.pop("entry_points", {}))
+    given = project.pop("entry-points", project.pop("entry_points", {}))
+    entry_points = dict(given)  # Avoid problems with static
     renaming = {"scripts": "console_scripts", "gui_scripts": "gui_scripts"}
     for key, value in list(project.items()):  # eager to allow modifications
         norm_key = json_compatible_key(key)
@@ -333,6 +345,14 @@ def _get_previous_gui_scripts(dist: Distribution) -> list | None:
     return value.get("gui_scripts")
 
 
+def _set_static_list_metadata(attr: str, dist: Distribution, val: list) -> None:
+    """Apply distutils metadata validation but preserve "static" behaviour"""
+    meta = dist.metadata
+    setter, getter = getattr(meta, f"set_{attr}"), getattr(meta, f"get_{attr}")
+    setter(val)
+    setattr(meta, attr, _static.List(getter()))
+
+
 def _attrgetter(attr):
     """
     Similar to ``operator.attrgetter`` but returns None if ``attr`` is not found
@@ -386,6 +406,12 @@ def _acessor(obj):
         See https://packaging.python.org/en/latest/guides/packaging-namespace-packages/.
         """,
 }
+TOOL_TABLE_CORRESPONDENCE = {
+    # Fields with corresponding core metadata need to be marked as static:
+    "obsoletes": partial(_set_static_list_metadata, "obsoletes"),
+    "provides": partial(_set_static_list_metadata, "provides"),
+    "platforms": partial(_set_static_list_metadata, "platforms"),
+}
 
 SETUPTOOLS_PATCHES = {
     "long_description_content_type",
@@ -422,17 +448,17 @@ def _acessor(obj):
 _RESET_PREVIOUSLY_DEFINED: dict = {
     # Fix improper setting: given in `setup.py`, but not listed in `dynamic`
     # dict: pyproject name => value to which reset
-    "license": {},
-    "authors": [],
-    "maintainers": [],
-    "keywords": [],
-    "classifiers": [],
-    "urls": {},
-    "entry-points": {},
-    "scripts": {},
-    "gui-scripts": {},
-    "dependencies": [],
-    "optional-dependencies": {},
+    "license": _static.EMPTY_DICT,
+    "authors": _static.EMPTY_LIST,
+    "maintainers": _static.EMPTY_LIST,
+    "keywords": _static.EMPTY_LIST,
+    "classifiers": _static.EMPTY_LIST,
+    "urls": _static.EMPTY_DICT,
+    "entry-points": _static.EMPTY_DICT,
+    "scripts": _static.EMPTY_DICT,
+    "gui-scripts": _static.EMPTY_DICT,
+    "dependencies": _static.EMPTY_LIST,
+    "optional-dependencies": _static.EMPTY_DICT,
 }
 
 
diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py
index f2de080774..b9526433ea 100644
--- a/setuptools/tests/config/test_apply_pyprojecttoml.py
+++ b/setuptools/tests/config/test_apply_pyprojecttoml.py
@@ -18,6 +18,7 @@
 from packaging.metadata import Metadata
 
 import setuptools  # noqa: F401 # ensure monkey patch to metadata
+from setuptools._static import is_static
 from setuptools.command.egg_info import write_requirements
 from setuptools.config import expand, pyprojecttoml, setupcfg
 from setuptools.config._apply_pyprojecttoml import _MissingDynamic, _some_attrgetter
@@ -480,6 +481,32 @@ def test_version(self, tmp_path, monkeypatch, capsys):
         assert "42.0" in captured.out
 
 
+class TestStaticConfig:
+    def test_mark_static_fields(self, tmp_path, monkeypatch):
+        monkeypatch.chdir(tmp_path)
+        toml_config = """
+        [project]
+        name = "test"
+        version = "42.0"
+        dependencies = ["hello"]
+        keywords = ["world"]
+        classifiers = ["private :: hello world"]
+        [tool.setuptools]
+        obsoletes = ["abcd"]
+        provides = ["abcd"]
+        platforms = ["abcd"]
+        """
+        pyproject = Path(tmp_path, "pyproject.toml")
+        pyproject.write_text(cleandoc(toml_config), encoding="utf-8")
+        dist = pyprojecttoml.apply_configuration(Distribution({}), pyproject)
+        assert is_static(dist.install_requires)
+        assert is_static(dist.metadata.keywords)
+        assert is_static(dist.metadata.classifiers)
+        assert is_static(dist.metadata.obsoletes)
+        assert is_static(dist.metadata.provides)
+        assert is_static(dist.metadata.platforms)
+
+
 # --- Auxiliary Functions ---
 
 

From 770b4fc8f6248d862629028f5ee4218975f9516b Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 21 Oct 2024 18:00:55 +0100
Subject: [PATCH 1383/1761] Remove test workaround for unmarked static values
 from pyproject.toml

---
 setuptools/tests/config/test_apply_pyprojecttoml.py | 2 --
 1 file changed, 2 deletions(-)

diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py
index b9526433ea..20146b4a89 100644
--- a/setuptools/tests/config/test_apply_pyprojecttoml.py
+++ b/setuptools/tests/config/test_apply_pyprojecttoml.py
@@ -529,8 +529,6 @@ def core_metadata(dist) -> str:
     skip_prefixes += ("Description-Content-Type:",)
     # Remove empty lines
     skip_lines.add("")
-    # TODO: Mark static values coming from pyproject.toml with `_static.*`
-    skip_prefixes += ("Dynamic:",)
 
     result = []
     for line in pkg_file_txt.splitlines():

From b055895fa337a6e03a29c2ea6493b6b778d2ba46 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 21 Oct 2024 19:14:41 +0100
Subject: [PATCH 1384/1761] Add extra tests for static/dynamic metadata

---
 setuptools/tests/test_core_metadata.py | 5 +++++
 1 file changed, 5 insertions(+)

diff --git a/setuptools/tests/test_core_metadata.py b/setuptools/tests/test_core_metadata.py
index eba48718d6..b1edb79b40 100644
--- a/setuptools/tests/test_core_metadata.py
+++ b/setuptools/tests/test_core_metadata.py
@@ -425,6 +425,7 @@ class TestPEP643:
                                description
             description = Short description
             keywords = one, two
+            platforms = abcd
             [options]
             install_requires = requests
             """
@@ -441,6 +442,9 @@ class TestPEP643:
             readme = {text = "Long\\ndescription", content-type = "text/plain"}
             keywords = ["one", "two"]
             dependencies = ["requests"]
+            [tool.setuptools]
+            provides = ["abcd"]
+            obsoletes = ["abcd"]
             """
         ),
     }
@@ -460,6 +464,7 @@ def test_static_config_has_no_dynamic(self, file, tmpdir_cwd):
             {"requires-python": ("python_requires", ">=3.12")},
             {"author-email": ("author_email", "snoopy@peanuts.com")},
             {"keywords": ("keywords", ["hello", "world"])},
+            {"platform": ("platforms", ["abcd"])},
             # Multiple dynamic fields
             {
                 "summary": ("description", "hello world"),

From a50f6e2e1e8b4610adde709079bec17ad0944197 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 21 Oct 2024 22:46:08 +0100
Subject: [PATCH 1385/1761] Fix _static.Dict.__ior__ for Python 3.8

---
 setuptools/_static.py | 6 ++++--
 1 file changed, 4 insertions(+), 2 deletions(-)

diff --git a/setuptools/_static.py b/setuptools/_static.py
index 901e72c1e6..4ddac2c08e 100644
--- a/setuptools/_static.py
+++ b/setuptools/_static.py
@@ -24,7 +24,7 @@ class Static:
     _mutated_: bool = False  # TODO: Remove after deprecation warning is solved
 
 
-def _prevent_modification(target: type, method: str, copying: str):
+def _prevent_modification(target: type, method: str, copying: str) -> None:
     """
     Because setuptools is very flexible we cannot fully prevent
     plugins and user customisations from modifying static values that were
@@ -32,7 +32,9 @@ def _prevent_modification(target: type, method: str, copying: str):
     But we can attempt to block "in-place" mutations and identify when they
     were done.
     """
-    fn = getattr(target, method)
+    fn = getattr(target, method, None)
+    if fn is None:
+        return
 
     @wraps(fn)
     def _replacement(self: Static, *args, **kwargs):

From 1c61d4799438677c7cfaaccf281312bfb1aee9b3 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 8 Jan 2025 17:13:27 +0000
Subject: [PATCH 1386/1761] Add news fragments for PEP 643

---
 newsfragments/4698.feature.rst | 3 +++
 1 file changed, 3 insertions(+)
 create mode 100644 newsfragments/4698.feature.rst

diff --git a/newsfragments/4698.feature.rst b/newsfragments/4698.feature.rst
new file mode 100644
index 0000000000..955624ed4c
--- /dev/null
+++ b/newsfragments/4698.feature.rst
@@ -0,0 +1,3 @@
+Implemented ``Dynamic`` field for core metadata (as introduced in PEP 643).
+The existing implementation is currently experimental and the exact approach
+may change in future releases.

From 72c422261b40f2b95a8be6605cc7dd93cec81794 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 8 Jan 2025 17:15:33 +0000
Subject: [PATCH 1387/1761] Avoid using Any in function

---
 setuptools/_static.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/setuptools/_static.py b/setuptools/_static.py
index 4ddac2c08e..075a0bcddf 100644
--- a/setuptools/_static.py
+++ b/setuptools/_static.py
@@ -1,5 +1,5 @@
 from functools import wraps
-from typing import Any, TypeVar
+from typing import TypeVar
 
 import packaging.specifiers
 
@@ -170,7 +170,7 @@ def attempt_conversion(value: T) -> T:
     return _CONVERSIONS.get(type(value), noop)(value)  # type: ignore[call-overload]
 
 
-def is_static(value: Any) -> bool:
+def is_static(value: object) -> bool:
     """
     >>> is_static(a := Dict({'a': 1}))
     True

From 5c9d9809dec1b20e2a9da6b4a06355fd6f87a190 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 8 Jan 2025 17:45:19 +0000
Subject: [PATCH 1388/1761] =?UTF-8?q?Bump=20version:=2075.7.0=20=E2=86=92?=
 =?UTF-8?q?=2075.8.0?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg               |  2 +-
 NEWS.rst                       | 11 +++++++++++
 newsfragments/4698.feature.rst |  3 ---
 pyproject.toml                 |  2 +-
 4 files changed, 13 insertions(+), 5 deletions(-)
 delete mode 100644 newsfragments/4698.feature.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 4578bade30..384a18455d 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 75.7.0
+current_version = 75.8.0
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index b83fe6e156..e9e795005a 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,14 @@
+v75.8.0
+=======
+
+Features
+--------
+
+- Implemented ``Dynamic`` field for core metadata (as introduced in PEP 643).
+  The existing implementation is currently experimental and the exact approach
+  may change in future releases. (#4698)
+
+
 v75.7.0
 =======
 
diff --git a/newsfragments/4698.feature.rst b/newsfragments/4698.feature.rst
deleted file mode 100644
index 955624ed4c..0000000000
--- a/newsfragments/4698.feature.rst
+++ /dev/null
@@ -1,3 +0,0 @@
-Implemented ``Dynamic`` field for core metadata (as introduced in PEP 643).
-The existing implementation is currently experimental and the exact approach
-may change in future releases.
diff --git a/pyproject.toml b/pyproject.toml
index 5560a11043..a9febdbe8c 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "75.7.0"
+version = "75.8.0"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From 2226d6df6b62ad7c7b667b86308e1ca422d3f9ed Mon Sep 17 00:00:00 2001
From: Marc Mueller <30130371+cdce8p@users.noreply.github.com>
Date: Thu, 9 Jan 2025 19:03:45 +0100
Subject: [PATCH 1389/1761] Fix ruff issues

---
 pkg_resources/tests/test_working_set.py | 10 +++++++---
 setuptools/tests/test_egg_info.py       |  7 ++++++-
 2 files changed, 13 insertions(+), 4 deletions(-)

diff --git a/pkg_resources/tests/test_working_set.py b/pkg_resources/tests/test_working_set.py
index 7bb84952c1..ed20c59dd3 100644
--- a/pkg_resources/tests/test_working_set.py
+++ b/pkg_resources/tests/test_working_set.py
@@ -104,9 +104,13 @@ def parametrize_test_working_set_resolve(*test_list):
                 )
             )
     return pytest.mark.parametrize(
-        'installed_dists,installable_dists,'
-        'requirements,replace_conflicting,'
-        'resolved_dists_or_exception',
+        (
+            "installed_dists",
+            "installable_dists",
+            "requirements",
+            "replace_conflicting",
+            "resolved_dists_or_exception",
+        ),
         argvalues,
         ids=idlist,
     )
diff --git a/setuptools/tests/test_egg_info.py b/setuptools/tests/test_egg_info.py
index a68ecaba4c..6076ae416a 100644
--- a/setuptools/tests/test_egg_info.py
+++ b/setuptools/tests/test_egg_info.py
@@ -305,7 +305,12 @@ def parametrize(*test_list, **format_dict):
                         )
                     )
             return pytest.mark.parametrize(
-                'requires,use_setup_cfg,expected_requires,install_cmd_kwargs',
+                (
+                    "requires",
+                    "use_setup_cfg",
+                    "expected_requires",
+                    "install_cmd_kwargs",
+                ),
                 argvalues,
                 ids=idlist,
             )

From 3aa2d57bc7e4afd4e665aa47c8aea073b779a240 Mon Sep 17 00:00:00 2001
From: Dustin Ingram 
Date: Thu, 9 Jan 2025 21:44:19 +0000
Subject: [PATCH 1390/1761] Fix pesky test

---
 setuptools/tests/test_easy_install.py | 16 +++++++++-------
 1 file changed, 9 insertions(+), 7 deletions(-)

diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py
index 126efc7060..10c2b4f08a 100644
--- a/setuptools/tests/test_easy_install.py
+++ b/setuptools/tests/test_easy_install.py
@@ -670,11 +670,11 @@ def test_setup_requires_override_nspkg(self, use_setup_cfg):
 
         with contexts.save_pkg_resources_state():
             with contexts.tempdir() as temp_dir:
-                foobar_1_archive = os.path.join(temp_dir, 'foo.bar-0.1.tar.gz')
-                make_nspkg_sdist(foobar_1_archive, 'foo.bar', '0.1')
+                foobar_1_archive = os.path.join(temp_dir, 'foo_bar-0.1.tar.gz')
+                make_nspkg_sdist(foobar_1_archive, 'foo_bar', '0.1')
                 # Now actually go ahead an extract to the temp dir and add the
                 # extracted path to sys.path so foo.bar v0.1 is importable
-                foobar_1_dir = os.path.join(temp_dir, 'foo.bar-0.1')
+                foobar_1_dir = os.path.join(temp_dir, 'foo_bar-0.1')
                 os.mkdir(foobar_1_dir)
                 with tarfile.open(foobar_1_archive) as tf:
                     tf.extraction_filter = lambda member, path: member
@@ -697,14 +697,14 @@ def test_setup_requires_override_nspkg(self, use_setup_cfg):
                             len(foo.__path__) == 2):
                         print('FAIL')
 
-                    if 'foo.bar-0.2' not in foo.__path__[0]:
+                    if 'foo_bar-0.2' not in foo.__path__[0]:
                         print('FAIL')
                 """
                 )
 
                 test_pkg = create_setup_requires_package(
                     temp_dir,
-                    'foo.bar',
+                    'foo_bar',
                     '0.2',
                     make_nspkg_sdist,
                     template,
@@ -1120,8 +1120,10 @@ def make_nspkg_sdist(dist_path, distname, version):
     package with the same name as distname.  The top-level package is
     designated a namespace package).
     """
-
-    parts = distname.split('.')
+    # The project name might not contain periods. Replace dashes and
+    # underscores with periods before constructing the namespace.
+    namespace = distname.replace('-', '.').replace('_', '.')
+    parts = namespace.split('.')
     nspackage = parts[0]
 
     packages = ['.'.join(parts[:idx]) for idx in range(1, len(parts) + 1)]

From eb3ad58fd19c54a34abea70c7786b7b1f2dffe15 Mon Sep 17 00:00:00 2001
From: Dustin Ingram 
Date: Thu, 9 Jan 2025 22:00:36 +0000
Subject: [PATCH 1391/1761] Fix linting

---
 setuptools/command/bdist_wheel.py | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/setuptools/command/bdist_wheel.py b/setuptools/command/bdist_wheel.py
index d47f63c4d8..8e66e439d1 100644
--- a/setuptools/command/bdist_wheel.py
+++ b/setuptools/command/bdist_wheel.py
@@ -368,9 +368,9 @@ def get_tag(self) -> tuple[str, str, str]:
             supported_tags = [
                 (t.interpreter, t.abi, plat_name) for t in tags.sys_tags()
             ]
-            assert (
-                tag in supported_tags
-            ), f"would build wheel with unsupported tag {tag}"
+            assert tag in supported_tags, (
+                f"would build wheel with unsupported tag {tag}"
+            )
         return tag
 
     def run(self):

From 12109dfed0b665d8bc5d86bedd564022728faaf9 Mon Sep 17 00:00:00 2001
From: Dustin Ingram 
Date: Thu, 9 Jan 2025 22:03:25 +0000
Subject: [PATCH 1392/1761] Add newsfragment

---
 newsfragments/4766.bugfix.rst | 1 +
 1 file changed, 1 insertion(+)
 create mode 100644 newsfragments/4766.bugfix.rst

diff --git a/newsfragments/4766.bugfix.rst b/newsfragments/4766.bugfix.rst
new file mode 100644
index 0000000000..fcd54785d8
--- /dev/null
+++ b/newsfragments/4766.bugfix.rst
@@ -0,0 +1 @@
+Fix wheel file naming to follow binary distribution specification -- by :user:`di`

From 2234be899ec1761c4a5bd4ae870d38b291ecf1df Mon Sep 17 00:00:00 2001
From: Dustin Ingram 
Date: Thu, 9 Jan 2025 22:21:10 +0000
Subject: [PATCH 1393/1761] Ignore missing coverage in test

---
 setuptools/tests/test_easy_install.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py
index 10c2b4f08a..692d8dbed9 100644
--- a/setuptools/tests/test_easy_install.py
+++ b/setuptools/tests/test_easy_install.py
@@ -718,7 +718,7 @@ def test_setup_requires_override_nspkg(self, use_setup_cfg):
                         # Don't even need to install the package, just
                         # running the setup.py at all is sufficient
                         run_setup(test_setup_py, ['--name'])
-                    except pkg_resources.VersionConflict:
+                    except pkg_resources.VersionConflict:  # pragma: nocover
                         pytest.fail(
                             'Installing setup.py requirements caused a VersionConflict'
                         )

From 7c1716fb4551816612b543b611ce428836b50eef Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 20 Jan 2025 12:16:41 +0000
Subject: [PATCH 1394/1761] Fix new mandatory configuration for RTD

---
 .readthedocs.yaml | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/.readthedocs.yaml b/.readthedocs.yaml
index dc8516ac20..724370638f 100644
--- a/.readthedocs.yaml
+++ b/.readthedocs.yaml
@@ -5,6 +5,9 @@ python:
     extra_requirements:
       - doc
 
+sphinx:
+  configuration: docs/conf.py
+
 # required boilerplate readthedocs/readthedocs.org#10401
 build:
   os: ubuntu-lts-latest

From 4b73bfaf36b40ad94e816f102a5a77676a703b88 Mon Sep 17 00:00:00 2001
From: Nathan Goldbaum 
Date: Fri, 17 Jan 2025 12:09:22 -0700
Subject: [PATCH 1395/1761] Fix crash formatting error message

---
 setuptools/command/bdist_wheel.py | 7 ++++++-
 1 file changed, 6 insertions(+), 1 deletion(-)

diff --git a/setuptools/command/bdist_wheel.py b/setuptools/command/bdist_wheel.py
index d9e1eb974f..fd3e845c47 100644
--- a/setuptools/command/bdist_wheel.py
+++ b/setuptools/command/bdist_wheel.py
@@ -291,10 +291,15 @@ def _validate_py_limited_api(self) -> None:
             raise ValueError(f"py-limited-api must match '{PY_LIMITED_API_PATTERN}'")
 
         if sysconfig.get_config_var("Py_GIL_DISABLED"):
+            # sys.abiflags is only defined on POSIX
+            if getattr(sys, "abiflags", ""):
+                abinote = f" ({sys.abiflags=!r}). "
+            else:
+                abinote = ". "
             raise ValueError(
                 f"`py_limited_api={self.py_limited_api!r}` not supported. "
                 "`Py_LIMITED_API` is currently incompatible with "
-                f"`Py_GIL_DISABLED` ({sys.abiflags=!r}). "
+                f"`Py_GIL_DISABLED`{abinote}"
                 "See https://github.com/python/cpython/issues/111506."
             )
 

From 8db1f5e67c18d5ba99750fe9be7a02778ed81bca Mon Sep 17 00:00:00 2001
From: Nathan Goldbaum 
Date: Fri, 17 Jan 2025 12:15:06 -0700
Subject: [PATCH 1396/1761] add release note entry

---
 newsfragments/4809.bugfix.rst | 2 ++
 1 file changed, 2 insertions(+)
 create mode 100644 newsfragments/4809.bugfix.rst

diff --git a/newsfragments/4809.bugfix.rst b/newsfragments/4809.bugfix.rst
new file mode 100644
index 0000000000..288e3f686a
--- /dev/null
+++ b/newsfragments/4809.bugfix.rst
@@ -0,0 +1,2 @@
+Fixed crash generating error message printed when building wheels for the
+free-threaded build using the limited API. -- by :user:`ngoldbaum`

From f049c9a5423248b0e1230c4a1cf1cb1b133d7ce5 Mon Sep 17 00:00:00 2001
From: Nathan Goldbaum 
Date: Sun, 19 Jan 2025 08:26:31 -0700
Subject: [PATCH 1397/1761] Update bdist_wheel.py

Co-authored-by: Avasam 
---
 setuptools/command/bdist_wheel.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setuptools/command/bdist_wheel.py b/setuptools/command/bdist_wheel.py
index fd3e845c47..cc48077dbc 100644
--- a/setuptools/command/bdist_wheel.py
+++ b/setuptools/command/bdist_wheel.py
@@ -292,7 +292,7 @@ def _validate_py_limited_api(self) -> None:
 
         if sysconfig.get_config_var("Py_GIL_DISABLED"):
             # sys.abiflags is only defined on POSIX
-            if getattr(sys, "abiflags", ""):
+            if hasattr(sys, "abiflags"):
                 abinote = f" ({sys.abiflags=!r}). "
             else:
                 abinote = ". "

From 3bde6efc7e0ad2da92e27bce76aa41805f7240ab Mon Sep 17 00:00:00 2001
From: Nathan Goldbaum 
Date: Mon, 20 Jan 2025 08:49:11 -0700
Subject: [PATCH 1398/1761] Update setuptools/command/bdist_wheel.py

Co-authored-by: Anderson Bravalheri 
---
 setuptools/command/bdist_wheel.py | 5 -----
 1 file changed, 5 deletions(-)

diff --git a/setuptools/command/bdist_wheel.py b/setuptools/command/bdist_wheel.py
index cc48077dbc..aafff9191e 100644
--- a/setuptools/command/bdist_wheel.py
+++ b/setuptools/command/bdist_wheel.py
@@ -291,11 +291,6 @@ def _validate_py_limited_api(self) -> None:
             raise ValueError(f"py-limited-api must match '{PY_LIMITED_API_PATTERN}'")
 
         if sysconfig.get_config_var("Py_GIL_DISABLED"):
-            # sys.abiflags is only defined on POSIX
-            if hasattr(sys, "abiflags"):
-                abinote = f" ({sys.abiflags=!r}). "
-            else:
-                abinote = ". "
             raise ValueError(
                 f"`py_limited_api={self.py_limited_api!r}` not supported. "
                 "`Py_LIMITED_API` is currently incompatible with "

From d92a59556223bb1a9a86fb94b10d59a8c018b0f7 Mon Sep 17 00:00:00 2001
From: Nathan Goldbaum 
Date: Mon, 20 Jan 2025 08:49:17 -0700
Subject: [PATCH 1399/1761] Update setuptools/command/bdist_wheel.py

Co-authored-by: Anderson Bravalheri 
---
 setuptools/command/bdist_wheel.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setuptools/command/bdist_wheel.py b/setuptools/command/bdist_wheel.py
index aafff9191e..bcd176f98e 100644
--- a/setuptools/command/bdist_wheel.py
+++ b/setuptools/command/bdist_wheel.py
@@ -294,7 +294,7 @@ def _validate_py_limited_api(self) -> None:
             raise ValueError(
                 f"`py_limited_api={self.py_limited_api!r}` not supported. "
                 "`Py_LIMITED_API` is currently incompatible with "
-                f"`Py_GIL_DISABLED`{abinote}"
+                "`Py_GIL_DISABLED`."
                 "See https://github.com/python/cpython/issues/111506."
             )
 

From 8795306772ed9fac82826529eb8b334945c6205c Mon Sep 17 00:00:00 2001
From: Dustin Ingram 
Date: Mon, 27 Jan 2025 15:07:28 +0000
Subject: [PATCH 1400/1761] Call make_nspkg_sdist with 'foo.bar' distname

---
 setuptools/tests/test_easy_install.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py
index 692d8dbed9..19fffacadb 100644
--- a/setuptools/tests/test_easy_install.py
+++ b/setuptools/tests/test_easy_install.py
@@ -671,7 +671,7 @@ def test_setup_requires_override_nspkg(self, use_setup_cfg):
         with contexts.save_pkg_resources_state():
             with contexts.tempdir() as temp_dir:
                 foobar_1_archive = os.path.join(temp_dir, 'foo_bar-0.1.tar.gz')
-                make_nspkg_sdist(foobar_1_archive, 'foo_bar', '0.1')
+                make_nspkg_sdist(foobar_1_archive, 'foo.bar', '0.1')
                 # Now actually go ahead an extract to the temp dir and add the
                 # extracted path to sys.path so foo.bar v0.1 is importable
                 foobar_1_dir = os.path.join(temp_dir, 'foo_bar-0.1')
@@ -704,7 +704,7 @@ def test_setup_requires_override_nspkg(self, use_setup_cfg):
 
                 test_pkg = create_setup_requires_package(
                     temp_dir,
-                    'foo_bar',
+                    'foo.bar',
                     '0.2',
                     make_nspkg_sdist,
                     template,

From ae5df974d552eb0566b79a434d8a9fd7bdfd1081 Mon Sep 17 00:00:00 2001
From: Dustin Ingram 
Date: Mon, 27 Jan 2025 15:47:51 +0000
Subject: [PATCH 1401/1761] Don't normalize the distname in `make_nspkg_sdist`

---
 setuptools/tests/test_easy_install.py | 5 +----
 1 file changed, 1 insertion(+), 4 deletions(-)

diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py
index 19fffacadb..c554d03f83 100644
--- a/setuptools/tests/test_easy_install.py
+++ b/setuptools/tests/test_easy_install.py
@@ -1120,10 +1120,7 @@ def make_nspkg_sdist(dist_path, distname, version):
     package with the same name as distname.  The top-level package is
     designated a namespace package).
     """
-    # The project name might not contain periods. Replace dashes and
-    # underscores with periods before constructing the namespace.
-    namespace = distname.replace('-', '.').replace('_', '.')
-    parts = namespace.split('.')
+    parts = distname.split('.')
     nspackage = parts[0]
 
     packages = ['.'.join(parts[:idx]) for idx in range(1, len(parts) + 1)]

From 1ec0c91ba05772a14f7c18747350b6b2ca7f81ab Mon Sep 17 00:00:00 2001
From: Dustin Ingram 
Date: Mon, 27 Jan 2025 15:49:53 +0000
Subject: [PATCH 1402/1761] Require that `distname` contain at least one period

---
 setuptools/tests/test_easy_install.py | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py
index c554d03f83..e6e6383c6f 100644
--- a/setuptools/tests/test_easy_install.py
+++ b/setuptools/tests/test_easy_install.py
@@ -1120,6 +1120,9 @@ def make_nspkg_sdist(dist_path, distname, version):
     package with the same name as distname.  The top-level package is
     designated a namespace package).
     """
+    # Assert that the distname contains at least one period
+    assert '.' in distname
+
     parts = distname.split('.')
     nspackage = parts[0]
 

From 1836bfbdc1f1e16deb640dc143029e38ba3f9d5d Mon Sep 17 00:00:00 2001
From: Dustin Ingram 
Date: Mon, 27 Jan 2025 15:51:30 +0000
Subject: [PATCH 1403/1761] Normalize distname in
 `create_setup_requires_package`

---
 setuptools/tests/test_easy_install.py | 6 ++++--
 1 file changed, 4 insertions(+), 2 deletions(-)

diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py
index e6e6383c6f..b58b0b6666 100644
--- a/setuptools/tests/test_easy_install.py
+++ b/setuptools/tests/test_easy_install.py
@@ -26,6 +26,7 @@
 import setuptools.command.easy_install as ei
 from pkg_resources import Distribution as PRDistribution, normalize_path, working_set
 from setuptools import sandbox
+from setuptools._normalization import safer_name
 from setuptools.command.easy_install import PthDistributions
 from setuptools.dist import Distribution
 from setuptools.sandbox import run_setup
@@ -1209,10 +1210,11 @@ def create_setup_requires_package(
     package itself is just 'test_pkg'.
     """
 
+    normalized_distname = safer_name(distname)
     test_setup_attrs = {
         'name': 'test_pkg',
         'version': '0.0',
-        'setup_requires': [f'{distname}=={version}'],
+        'setup_requires': [f'{normalized_distname}=={version}'],
         'dependency_links': [os.path.abspath(path)],
     }
     if setup_attrs:
@@ -1261,7 +1263,7 @@ def create_setup_requires_package(
     with open(os.path.join(test_pkg, 'setup.py'), 'w', encoding="utf-8") as f:
         f.write(setup_py_template % test_setup_attrs)
 
-    foobar_path = os.path.join(path, f'{distname}-{version}.tar.gz')
+    foobar_path = os.path.join(path, f'{normalized_distname}-{version}.tar.gz')
     make_package(foobar_path, distname, version)
 
     return test_pkg

From 9f764e8a6df19248cabd7225ed35bc342affbd95 Mon Sep 17 00:00:00 2001
From: Dustin Ingram 
Date: Mon, 27 Jan 2025 15:55:30 +0000
Subject: [PATCH 1404/1761] Consolidate `safe_name`/`safer_name` logic

---
 setuptools/command/bdist_wheel.py | 18 +-----------------
 1 file changed, 1 insertion(+), 17 deletions(-)

diff --git a/setuptools/command/bdist_wheel.py b/setuptools/command/bdist_wheel.py
index 8e66e439d1..b452097426 100644
--- a/setuptools/command/bdist_wheel.py
+++ b/setuptools/command/bdist_wheel.py
@@ -23,19 +23,13 @@
 from wheel.wheelfile import WheelFile
 
 from .. import Command, __version__, _shutil
+from .._normalization import safer_name
 from ..warnings import SetuptoolsDeprecationWarning
 from .egg_info import egg_info as egg_info_cls
 
 from distutils import log
 
 
-def safe_name(name: str) -> str:
-    """Convert an arbitrary string to a standard distribution name
-    Any runs of non-alphanumeric/. characters are replaced with a single '-'.
-    """
-    return re.sub("[^A-Za-z0-9.]+", "-", name)
-
-
 def safe_version(version: str) -> str:
     """
     Convert an arbitrary string to a standard version string
@@ -133,16 +127,6 @@ def get_abi_tag() -> str | None:
     return abi
 
 
-def safer_name(name: str) -> str:
-    return (
-        # Per https://packaging.python.org/en/latest/specifications/name-normalization/#name-normalization
-        re.sub(r"[-_.]+", "-", safe_name(name))
-        .lower()
-        # Per https://packaging.python.org/en/latest/specifications/binary-distribution-format/#escaping-and-unicode
-        .replace("-", "_")
-    )
-
-
 def safer_version(version: str) -> str:
     return safe_version(version).replace("-", "_")
 

From 27cc9091c718071f2b41a4c9921b1baa2c876ef3 Mon Sep 17 00:00:00 2001
From: Dustin Ingram 
Date: Mon, 27 Jan 2025 16:22:17 +0000
Subject: [PATCH 1405/1761] Remove unnecessary use of `make_nspkg_sdist`

---
 setuptools/tests/test_dist.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/setuptools/tests/test_dist.py b/setuptools/tests/test_dist.py
index 533eb9f45e..e65ab310e7 100644
--- a/setuptools/tests/test_dist.py
+++ b/setuptools/tests/test_dist.py
@@ -8,7 +8,7 @@
 from setuptools import Distribution
 from setuptools.dist import check_package_data, check_specifier
 
-from .test_easy_install import make_nspkg_sdist
+from .test_easy_install import make_trivial_sdist
 from .test_find_packages import ensure_files
 from .textwrap import DALS
 
@@ -25,7 +25,7 @@ def test_dist_fetch_build_egg(tmpdir):
     def sdist_with_index(distname, version):
         dist_dir = index.mkdir(distname)
         dist_sdist = f'{distname}-{version}.tar.gz'
-        make_nspkg_sdist(str(dist_dir.join(dist_sdist)), distname, version)
+        make_trivial_sdist(str(dist_dir.join(dist_sdist)), distname, version)
         with dist_dir.join('index.html').open('w') as fp:
             fp.write(
                 DALS(

From ef9b8e5c5eec50853c4cd2ceeccbf5f963172560 Mon Sep 17 00:00:00 2001
From: Marc Mueller <30130371+cdce8p@users.noreply.github.com>
Date: Sun, 3 Nov 2024 16:42:22 +0100
Subject: [PATCH 1406/1761] Store license-files in licenses subfolder

---
 newsfragments/4728.feature.rst       |  1 +
 setuptools/command/bdist_wheel.py    |  6 ++--
 setuptools/dist.py                   |  7 +++--
 setuptools/tests/test_bdist_wheel.py | 46 ++++++++++++++++++++++++++--
 setuptools/tests/test_build_meta.py  |  7 +++--
 setuptools/tests/test_egg_info.py    | 20 ++++++++++++
 6 files changed, 78 insertions(+), 9 deletions(-)
 create mode 100644 newsfragments/4728.feature.rst

diff --git a/newsfragments/4728.feature.rst b/newsfragments/4728.feature.rst
new file mode 100644
index 0000000000..61906656c0
--- /dev/null
+++ b/newsfragments/4728.feature.rst
@@ -0,0 +1 @@
+Store ``License-File``s in ``.dist-info/licenses`` subfolder and added support for recursive globs for ``license_files`` (`PEP 639 `_). -- by :user:`cdce8p`
diff --git a/setuptools/command/bdist_wheel.py b/setuptools/command/bdist_wheel.py
index bcd176f98e..73a028269e 100644
--- a/setuptools/command/bdist_wheel.py
+++ b/setuptools/command/bdist_wheel.py
@@ -590,9 +590,11 @@ def adios(p: str) -> None:
         metadata_path = os.path.join(distinfo_path, "METADATA")
         shutil.copy(pkginfo_path, metadata_path)
 
+        licenses_folder_path = os.path.join(distinfo_path, "licenses")
         for license_path in self.license_paths:
-            filename = os.path.basename(license_path)
-            shutil.copy(license_path, os.path.join(distinfo_path, filename))
+            dist_info_license_path = os.path.join(licenses_folder_path, license_path)
+            os.makedirs(os.path.dirname(dist_info_license_path), exist_ok=True)
+            shutil.copy(license_path, dist_info_license_path)
 
         adios(egginfo_path)
 
diff --git a/setuptools/dist.py b/setuptools/dist.py
index 0249651267..962da7c34b 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -418,7 +418,10 @@ def _finalize_license_files(self) -> None:
             patterns = ['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']
 
         self.metadata.license_files = list(
-            unique_everseen(self._expand_patterns(patterns))
+            map(
+                lambda path: path.replace("\\", "/"),
+                unique_everseen(self._expand_patterns(patterns)),
+            )
         )
 
     @staticmethod
@@ -432,7 +435,7 @@ def _expand_patterns(patterns):
         return (
             path
             for pattern in patterns
-            for path in sorted(iglob(pattern))
+            for path in sorted(iglob(pattern, recursive=True))
             if not path.endswith('~') and os.path.isfile(path)
         )
 
diff --git a/setuptools/tests/test_bdist_wheel.py b/setuptools/tests/test_bdist_wheel.py
index d51dfbeb6d..0f2e6ce136 100644
--- a/setuptools/tests/test_bdist_wheel.py
+++ b/setuptools/tests/test_bdist_wheel.py
@@ -172,6 +172,20 @@
         ),
         "README.rst": "UTF-8 描述 説明",
     },
+    "licenses-dist": {
+        "setup.cfg": cleandoc(
+            """
+            [metadata]
+            name = licenses-dist
+            version = 1.0
+            license_files = **/LICENSE
+            """
+        ),
+        "LICENSE": "",
+        "src": {
+            "vendor": {"LICENSE": ""},
+        },
+    },
 }
 
 
@@ -238,6 +252,11 @@ def dummy_dist(tmp_path_factory):
     return mkexample(tmp_path_factory, "dummy-dist")
 
 
+@pytest.fixture
+def licenses_dist(tmp_path_factory):
+    return mkexample(tmp_path_factory, "licenses-dist")
+
+
 def test_no_scripts(wheel_paths):
     """Make sure entry point scripts are not generated."""
     path = next(path for path in wheel_paths if "complex_dist" in path)
@@ -297,7 +316,8 @@ def test_licenses_default(dummy_dist, monkeypatch, tmp_path):
     bdist_wheel_cmd(bdist_dir=str(tmp_path)).run()
     with ZipFile("dist/dummy_dist-1.0-py3-none-any.whl") as wf:
         license_files = {
-            "dummy_dist-1.0.dist-info/" + fname for fname in DEFAULT_LICENSE_FILES
+            "dummy_dist-1.0.dist-info/licenses/" + fname
+            for fname in DEFAULT_LICENSE_FILES
         }
         assert set(wf.namelist()) == DEFAULT_FILES | license_files
 
@@ -311,7 +331,7 @@ def test_licenses_deprecated(dummy_dist, monkeypatch, tmp_path):
     bdist_wheel_cmd(bdist_dir=str(tmp_path)).run()
 
     with ZipFile("dist/dummy_dist-1.0-py3-none-any.whl") as wf:
-        license_files = {"dummy_dist-1.0.dist-info/DUMMYFILE"}
+        license_files = {"dummy_dist-1.0.dist-info/licenses/licenses/DUMMYFILE"}
         assert set(wf.namelist()) == DEFAULT_FILES | license_files
 
 
@@ -334,9 +354,29 @@ def test_licenses_override(dummy_dist, monkeypatch, tmp_path, config_file, confi
     bdist_wheel_cmd(bdist_dir=str(tmp_path)).run()
     with ZipFile("dist/dummy_dist-1.0-py3-none-any.whl") as wf:
         license_files = {
-            "dummy_dist-1.0.dist-info/" + fname for fname in {"DUMMYFILE", "LICENSE"}
+            "dummy_dist-1.0.dist-info/licenses/" + fname
+            for fname in {"licenses/DUMMYFILE", "LICENSE"}
         }
         assert set(wf.namelist()) == DEFAULT_FILES | license_files
+        metadata = wf.read("dummy_dist-1.0.dist-info/METADATA").decode("utf8")
+        assert "License-File: licenses/DUMMYFILE" in metadata
+        assert "License-File: LICENSE" in metadata
+
+
+def test_licenses_preserve_folder_structure(licenses_dist, monkeypatch, tmp_path):
+    monkeypatch.chdir(licenses_dist)
+    bdist_wheel_cmd(bdist_dir=str(tmp_path)).run()
+    print(os.listdir("dist"))
+    with ZipFile("dist/licenses_dist-1.0-py3-none-any.whl") as wf:
+        default_files = {name.replace("dummy_", "licenses_") for name in DEFAULT_FILES}
+        license_files = {
+            "licenses_dist-1.0.dist-info/licenses/LICENSE",
+            "licenses_dist-1.0.dist-info/licenses/src/vendor/LICENSE",
+        }
+        assert set(wf.namelist()) == default_files | license_files
+        metadata = wf.read("licenses_dist-1.0.dist-info/METADATA").decode("utf8")
+        assert "License-File: src/vendor/LICENSE" in metadata
+        assert "License-File: LICENSE" in metadata
 
 
 def test_licenses_disabled(dummy_dist, monkeypatch, tmp_path):
diff --git a/setuptools/tests/test_build_meta.py b/setuptools/tests/test_build_meta.py
index 121f409057..b26fd2f5b0 100644
--- a/setuptools/tests/test_build_meta.py
+++ b/setuptools/tests/test_build_meta.py
@@ -393,7 +393,9 @@ def test_build_with_pyproject_config(self, tmpdir, setup_script):
         with ZipFile(os.path.join(tmpdir, "temp", wheel_file)) as zipfile:
             wheel_contents = set(zipfile.namelist())
             metadata = str(zipfile.read("foo-0.1.dist-info/METADATA"), "utf-8")
-            license = str(zipfile.read("foo-0.1.dist-info/LICENSE.txt"), "utf-8")
+            license = str(
+                zipfile.read("foo-0.1.dist-info/licenses/LICENSE.txt"), "utf-8"
+            )
             epoints = str(zipfile.read("foo-0.1.dist-info/entry_points.txt"), "utf-8")
 
         assert sdist_contents - {"foo-0.1/setup.py"} == {
@@ -426,7 +428,7 @@ def test_build_with_pyproject_config(self, tmpdir, setup_script):
             "foo/cli.py",
             "foo/data.txt",  # include_package_data defaults to True
             "foo/py.typed",  # include type information by default
-            "foo-0.1.dist-info/LICENSE.txt",
+            "foo-0.1.dist-info/licenses/LICENSE.txt",
             "foo-0.1.dist-info/METADATA",
             "foo-0.1.dist-info/WHEEL",
             "foo-0.1.dist-info/entry_points.txt",
@@ -438,6 +440,7 @@ def test_build_with_pyproject_config(self, tmpdir, setup_script):
         for line in (
             "Summary: This is a Python package",
             "License: MIT",
+            "License-File: LICENSE.txt",
             "Classifier: Intended Audience :: Developers",
             "Requires-Dist: appdirs",
             "Requires-Dist: " + str(Requirement('tomli>=1 ; extra == "all"')),
diff --git a/setuptools/tests/test_egg_info.py b/setuptools/tests/test_egg_info.py
index 8879ec58ce..9756d7c519 100644
--- a/setuptools/tests/test_egg_info.py
+++ b/setuptools/tests/test_egg_info.py
@@ -815,6 +815,22 @@ def test_setup_cfg_license_file(self, tmpdir_cwd, env, files, license_in_sources
                 [],
                 id="files_only_added_once",
             ),
+            pytest.param(
+                {
+                    'setup.cfg': DALS(
+                        """
+                              [metadata]
+                              license_files = **/LICENSE
+                              """
+                    ),
+                    'LICENSE': "ABC license",
+                    'LICENSE-OTHER': "Don't include",
+                    'vendor': {'LICENSE': "Vendor license"},
+                },
+                ['LICENSE', 'vendor/LICENSE'],
+                ['LICENSE-OTHER'],
+                id="recursive_glob",
+            ),
         ],
     )
     def test_setup_cfg_license_files(
@@ -1032,12 +1048,14 @@ def test_license_file_attr_pkg_info(self, tmpdir_cwd, env):
                               license_files =
                                   NOTICE*
                                   LICENSE*
+                                  **/LICENSE
                               """
             ),
             "LICENSE-ABC": "ABC license",
             "LICENSE-XYZ": "XYZ license",
             "NOTICE": "included",
             "IGNORE": "not include",
+            "vendor": {'LICENSE': "Vendor license"},
         })
 
         environment.run_setup_py(
@@ -1053,9 +1071,11 @@ def test_license_file_attr_pkg_info(self, tmpdir_cwd, env):
 
         # Only 'NOTICE', LICENSE-ABC', and 'LICENSE-XYZ' should have been matched
         # Also assert that order from license_files is keeped
+        assert len(license_file_lines) == 4
         assert "License-File: NOTICE" == license_file_lines[0]
         assert "License-File: LICENSE-ABC" in license_file_lines[1:]
         assert "License-File: LICENSE-XYZ" in license_file_lines[1:]
+        assert "License-File: vendor/LICENSE" in license_file_lines[3]
 
     def test_metadata_version(self, tmpdir_cwd, env):
         """Make sure latest metadata version is used by default."""

From 02e1062cf6bcdffdae7e945fa5e5d04e9b6d2974 Mon Sep 17 00:00:00 2001
From: Marc Mueller <30130371+cdce8p@users.noreply.github.com>
Date: Wed, 6 Nov 2024 12:09:27 +0100
Subject: [PATCH 1407/1761] Update validate-pyproject to 0.23.0

---
 newsfragments/4734.misc.rst                   |   1 +
 setuptools/config/_validate_pyproject/NOTICE  |   2 +-
 .../_validate_pyproject/extra_validations.py  |  32 +-
 .../fastjsonschema_validations.py             | 351 +++++++++++-------
 .../config/_validate_pyproject/formats.py     |  33 +-
 tox.ini                                       |   2 +-
 6 files changed, 286 insertions(+), 135 deletions(-)
 create mode 100644 newsfragments/4734.misc.rst

diff --git a/newsfragments/4734.misc.rst b/newsfragments/4734.misc.rst
new file mode 100644
index 0000000000..3b3f2c94f3
--- /dev/null
+++ b/newsfragments/4734.misc.rst
@@ -0,0 +1 @@
+Updated ``pyproject.toml`` validation via ``validate-pyproject`` v0.23.0.
diff --git a/setuptools/config/_validate_pyproject/NOTICE b/setuptools/config/_validate_pyproject/NOTICE
index 74e8821fc8..ac5464d88c 100644
--- a/setuptools/config/_validate_pyproject/NOTICE
+++ b/setuptools/config/_validate_pyproject/NOTICE
@@ -1,7 +1,7 @@
 The code contained in this directory was automatically generated using the
 following command:
 
-    python -m validate_pyproject.pre_compile --output-dir=setuptools/config/_validate_pyproject --enable-plugins setuptools distutils --very-verbose -t distutils=setuptools/config/distutils.schema.json -t setuptools=setuptools/config/setuptools.schema.json
+    python -m validate_pyproject.pre_compile --output-dir=setuptools/config/_validate_pyproject --enable-plugins setuptools distutils --very-verbose -t setuptools=setuptools/config/setuptools.schema.json -t distutils=setuptools/config/distutils.schema.json
 
 Please avoid changing it manually.
 
diff --git a/setuptools/config/_validate_pyproject/extra_validations.py b/setuptools/config/_validate_pyproject/extra_validations.py
index c4ffe651dd..789411d0ff 100644
--- a/setuptools/config/_validate_pyproject/extra_validations.py
+++ b/setuptools/config/_validate_pyproject/extra_validations.py
@@ -24,6 +24,13 @@ class RedefiningStaticFieldAsDynamic(ValidationError):
     )
 
 
+class IncludedDependencyGroupMustExist(ValidationError):
+    _DESC = """An included dependency group must exist and must not be cyclic.
+    """
+    __doc__ = _DESC
+    _URL = "https://peps.python.org/pep-0735/"
+
+
 def validate_project_dynamic(pyproject: T) -> T:
     project_table = pyproject.get("project", {})
     dynamic = project_table.get("dynamic", [])
@@ -49,4 +56,27 @@ def validate_project_dynamic(pyproject: T) -> T:
     return pyproject
 
 
-EXTRA_VALIDATIONS = (validate_project_dynamic,)
+def validate_include_depenency(pyproject: T) -> T:
+    dependency_groups = pyproject.get("dependency-groups", {})
+    for key, value in dependency_groups.items():
+        for each in value:
+            if (
+                isinstance(each, dict)
+                and (include_group := each.get("include-group"))
+                and include_group not in dependency_groups
+            ):
+                raise IncludedDependencyGroupMustExist(
+                    message=f"The included dependency group {include_group} doesn't exist",
+                    value=each,
+                    name=f"data.dependency_groups.{key}",
+                    definition={
+                        "description": cleandoc(IncludedDependencyGroupMustExist._DESC),
+                        "see": IncludedDependencyGroupMustExist._URL,
+                    },
+                    rule="PEP 735",
+                )
+    # TODO: check for `include-group` cycles (can be conditional to graphlib)
+    return pyproject
+
+
+EXTRA_VALIDATIONS = (validate_project_dynamic, validate_include_depenency)
diff --git a/setuptools/config/_validate_pyproject/fastjsonschema_validations.py b/setuptools/config/_validate_pyproject/fastjsonschema_validations.py
index 42e7aa5e33..c69368a83f 100644
--- a/setuptools/config/_validate_pyproject/fastjsonschema_validations.py
+++ b/setuptools/config/_validate_pyproject/fastjsonschema_validations.py
@@ -17,6 +17,7 @@
 
 
 REGEX_PATTERNS = {
+    '^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9._-]*[a-zA-Z0-9])$': re.compile('^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9._-]*[a-zA-Z0-9])\\Z'),
     '^.*$': re.compile('^.*$'),
     '.+': re.compile('.+'),
     '^.+$': re.compile('^.+$'),
@@ -31,7 +32,7 @@ def validate(data, custom_formats={}, name_prefix=None):
 
 def validate_https___packaging_python_org_en_latest_specifications_declaring_build_dependencies(data, custom_formats={}, name_prefix=None):
     if not isinstance(data, (dict)):
-        raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-build-dependencies/', 'title': 'Data structure for ``pyproject.toml`` files', '$$description': ['File format containing build-time configurations for the Python ecosystem. ', ':pep:`517` initially defined a build-system independent format for source trees', 'which was complemented by :pep:`518` to provide a way of specifying dependencies ', 'for building Python projects.', 'Please notice the ``project`` table (as initially defined in  :pep:`621`) is not included', 'in this schema and should be considered separately.'], 'type': 'object', 'additionalProperties': False, 'properties': {'build-system': {'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/pyproject-toml/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, 'tool': {'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/deprecated/distutils/configfile.html', 'title': '``tool.distutils`` table', '$$description': ['**EXPERIMENTAL** (NOT OFFICIALLY SUPPORTED): Use ``tool.distutils``', 'subtables to configure arguments for ``distutils`` commands.', 'Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` commands via `distutils configuration files', '`_.', 'See also `the old Python docs _`.'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html', 'title': '``tool.setuptools`` table', '$$description': ['``setuptools``-specific configurations that can be set by users that require', 'customization.', 'These configurations are completely optional and probably can be skipped when', 'creating simple packages. They are equivalent to some of the `Keywords', '`_', 'used by the ``setup.py`` file, and can be set via the ``tool.setuptools`` table.', 'It considers only ``setuptools`` `parameters', '`_', 'that are not covered by :pep:`621`; and intentionally excludes ``dependency_links``', 'and ``setup_requires`` (incompatible with modern workflows/standards).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'$$description': ['Whether the project can be safely installed and run from a zip file.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'boolean'}, 'script-files': {'$$description': ['Legacy way of defining scripts (entry-points are preferred).', 'Equivalent to the ``script`` keyword in ``setup.py``', '(it was renamed to avoid confusion with entry-point based ``project.scripts``', 'defined in :pep:`621`).', '**DISCOURAGED**: generic script wrappers are tricky and may not work properly.', 'Whenever possible, please use ``project.scripts`` instead.'], 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$ref': '#/definitions/package-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$ref': '#/definitions/package-name'}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html', 'description': '**DEPRECATED**: use implicit namespaces instead (:pep:`420`).'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'ext-modules': {'description': 'Extension modules to be compiled by setuptools', 'type': 'array', 'items': {'$ref': '#/definitions/ext-module'}}, 'data-files': {'$$description': ['``dict``-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', '**DISCOURAGED**: please notice this might not work as expected with wheels.', 'Whenever possible, consider using data files inside the package directories', '(or create a new namespace package that only contains data files).', 'See `data files support', '`_.'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', '    cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['**PROVISIONAL**: list of glob patterns for all license files being distributed.', '(likely to become standard with :pep:`639`).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'dependencies': {'$ref': '#/definitions/file-directive-for-dependencies'}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$ref': '#/definitions/file-directive-for-dependencies'}}}, 'readme': {'type': 'object', 'anyOf': [{'$ref': '#/definitions/file-directive'}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'$ref': '#/definitions/file-directive/properties/file'}}, 'additionalProperties': False}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, 'ext-module': {'$id': '#/definitions/ext-module', 'title': 'Extension module', 'description': 'Parameters to construct a :class:`setuptools.Extension` object', 'type': 'object', 'required': ['name', 'sources'], 'additionalProperties': False, 'properties': {'name': {'type': 'string', 'format': 'python-module-name-relaxed'}, 'sources': {'type': 'array', 'items': {'type': 'string'}}, 'include-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'define-macros': {'type': 'array', 'items': {'type': 'array', 'items': [{'description': 'macro name', 'type': 'string'}, {'description': 'macro value', 'oneOf': [{'type': 'string'}, {'type': 'null'}]}], 'additionalItems': False}}, 'undef-macros': {'type': 'array', 'items': {'type': 'string'}}, 'library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'libraries': {'type': 'array', 'items': {'type': 'string'}}, 'runtime-library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'extra-objects': {'type': 'array', 'items': {'type': 'string'}}, 'extra-compile-args': {'type': 'array', 'items': {'type': 'string'}}, 'extra-link-args': {'type': 'array', 'items': {'type': 'string'}}, 'export-symbols': {'type': 'array', 'items': {'type': 'string'}}, 'swig-opts': {'type': 'array', 'items': {'type': 'string'}}, 'depends': {'type': 'array', 'items': {'type': 'string'}}, 'language': {'type': 'string'}, 'optional': {'type': 'boolean'}, 'py-limited-api': {'type': 'boolean'}}}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'file-directive-for-dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/pyproject-toml/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='type')
+        raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-build-dependencies/', 'title': 'Data structure for ``pyproject.toml`` files', '$$description': ['File format containing build-time configurations for the Python ecosystem. ', ':pep:`517` initially defined a build-system independent format for source trees', 'which was complemented by :pep:`518` to provide a way of specifying dependencies ', 'for building Python projects.', 'Please notice the ``project`` table (as initially defined in  :pep:`621`) is not included', 'in this schema and should be considered separately.'], 'type': 'object', 'additionalProperties': False, 'properties': {'build-system': {'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/pyproject-toml/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'type': 'string', 'description': 'An SPDX license identifier', 'format': 'SPDX'}, {'type': 'object', 'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'type': 'object', 'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'license-files': {'description': 'Paths or globs to paths of license files', 'type': 'array', 'items': {'type': 'string'}}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'license-files', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'allOf': [{'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}}, {'if': {'required': ['license-files']}, 'then': {'properties': {'license': {'type': 'string'}}}}], 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, 'tool': {'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/deprecated/distutils/configfile.html', 'title': '``tool.distutils`` table', '$$description': ['**EXPERIMENTAL** (NOT OFFICIALLY SUPPORTED): Use ``tool.distutils``', 'subtables to configure arguments for ``distutils`` commands.', 'Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` commands via `distutils configuration files', '`_.', 'See also `the old Python docs _`.'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html', 'title': '``tool.setuptools`` table', '$$description': ['``setuptools``-specific configurations that can be set by users that require', 'customization.', 'These configurations are completely optional and probably can be skipped when', 'creating simple packages. They are equivalent to some of the `Keywords', '`_', 'used by the ``setup.py`` file, and can be set via the ``tool.setuptools`` table.', 'It considers only ``setuptools`` `parameters', '`_', 'that are not covered by :pep:`621`; and intentionally excludes ``dependency_links``', 'and ``setup_requires`` (incompatible with modern workflows/standards).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'$$description': ['Whether the project can be safely installed and run from a zip file.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'boolean'}, 'script-files': {'$$description': ['Legacy way of defining scripts (entry-points are preferred).', 'Equivalent to the ``script`` keyword in ``setup.py``', '(it was renamed to avoid confusion with entry-point based ``project.scripts``', 'defined in :pep:`621`).', '**DISCOURAGED**: generic script wrappers are tricky and may not work properly.', 'Whenever possible, please use ``project.scripts`` instead.'], 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$ref': '#/definitions/package-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$ref': '#/definitions/package-name'}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html', 'description': '**DEPRECATED**: use implicit namespaces instead (:pep:`420`).'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'ext-modules': {'description': 'Extension modules to be compiled by setuptools', 'type': 'array', 'items': {'$ref': '#/definitions/ext-module'}}, 'data-files': {'$$description': ['``dict``-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', '**DISCOURAGED**: please notice this might not work as expected with wheels.', 'Whenever possible, consider using data files inside the package directories', '(or create a new namespace package that only contains data files).', 'See `data files support', '`_.'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', '    cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['**PROVISIONAL**: list of glob patterns for all license files being distributed.', '(likely to become standard with :pep:`639`).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'dependencies': {'$ref': '#/definitions/file-directive-for-dependencies'}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$ref': '#/definitions/file-directive-for-dependencies'}}}, 'readme': {'type': 'object', 'anyOf': [{'$ref': '#/definitions/file-directive'}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'$ref': '#/definitions/file-directive/properties/file'}}, 'additionalProperties': False}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, 'ext-module': {'$id': '#/definitions/ext-module', 'title': 'Extension module', 'description': 'Parameters to construct a :class:`setuptools.Extension` object', 'type': 'object', 'required': ['name', 'sources'], 'additionalProperties': False, 'properties': {'name': {'type': 'string', 'format': 'python-module-name-relaxed'}, 'sources': {'type': 'array', 'items': {'type': 'string'}}, 'include-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'define-macros': {'type': 'array', 'items': {'type': 'array', 'items': [{'description': 'macro name', 'type': 'string'}, {'description': 'macro value', 'oneOf': [{'type': 'string'}, {'type': 'null'}]}], 'additionalItems': False}}, 'undef-macros': {'type': 'array', 'items': {'type': 'string'}}, 'library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'libraries': {'type': 'array', 'items': {'type': 'string'}}, 'runtime-library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'extra-objects': {'type': 'array', 'items': {'type': 'string'}}, 'extra-compile-args': {'type': 'array', 'items': {'type': 'string'}}, 'extra-link-args': {'type': 'array', 'items': {'type': 'string'}}, 'export-symbols': {'type': 'array', 'items': {'type': 'string'}}, 'swig-opts': {'type': 'array', 'items': {'type': 'string'}}, 'depends': {'type': 'array', 'items': {'type': 'string'}}, 'language': {'type': 'string'}, 'optional': {'type': 'boolean'}, 'py-limited-api': {'type': 'boolean'}}}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'file-directive-for-dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}, 'dependency-groups': {'type': 'object', 'description': 'Dependency groups following PEP 735', 'additionalProperties': False, 'patternProperties': {'^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9._-]*[a-zA-Z0-9])$': {'type': 'array', 'items': {'oneOf': [{'type': 'string', 'description': 'Python package specifiers following PEP 508', 'format': 'pep508'}, {'type': 'object', 'additionalProperties': False, 'properties': {'include-group': {'description': 'Another dependency group to include in this one', 'type': 'string', 'pattern': '^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9._-]*[a-zA-Z0-9])$'}}}]}}}}}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/pyproject-toml/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'type': 'string', 'description': 'An SPDX license identifier', 'format': 'SPDX'}, {'type': 'object', 'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'type': 'object', 'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'license-files': {'description': 'Paths or globs to paths of license files', 'type': 'array', 'items': {'type': 'string'}}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'license-files', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'allOf': [{'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}}, {'if': {'required': ['license-files']}, 'then': {'properties': {'license': {'type': 'string'}}}}], 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='type')
     data_is_dict = isinstance(data, dict)
     if data_is_dict:
         data_keys = set(data.keys())
@@ -98,8 +99,59 @@ def validate_https___packaging_python_org_en_latest_specifications_declaring_bui
                     data__tool_keys.remove("setuptools")
                     data__tool__setuptools = data__tool["setuptools"]
                     validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html(data__tool__setuptools, custom_formats, (name_prefix or "data") + ".tool.setuptools")
+        if "dependency-groups" in data_keys:
+            data_keys.remove("dependency-groups")
+            data__dependencygroups = data["dependency-groups"]
+            if not isinstance(data__dependencygroups, (dict)):
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".dependency-groups must be object", value=data__dependencygroups, name="" + (name_prefix or "data") + ".dependency-groups", definition={'type': 'object', 'description': 'Dependency groups following PEP 735', 'additionalProperties': False, 'patternProperties': {'^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9._-]*[a-zA-Z0-9])$': {'type': 'array', 'items': {'oneOf': [{'type': 'string', 'description': 'Python package specifiers following PEP 508', 'format': 'pep508'}, {'type': 'object', 'additionalProperties': False, 'properties': {'include-group': {'description': 'Another dependency group to include in this one', 'type': 'string', 'pattern': '^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9._-]*[a-zA-Z0-9])$'}}}]}}}}, rule='type')
+            data__dependencygroups_is_dict = isinstance(data__dependencygroups, dict)
+            if data__dependencygroups_is_dict:
+                data__dependencygroups_keys = set(data__dependencygroups.keys())
+                for data__dependencygroups_key, data__dependencygroups_val in data__dependencygroups.items():
+                    if REGEX_PATTERNS['^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9._-]*[a-zA-Z0-9])$'].search(data__dependencygroups_key):
+                        if data__dependencygroups_key in data__dependencygroups_keys:
+                            data__dependencygroups_keys.remove(data__dependencygroups_key)
+                        if not isinstance(data__dependencygroups_val, (list, tuple)):
+                            raise JsonSchemaValueException("" + (name_prefix or "data") + ".dependency-groups.{data__dependencygroups_key}".format(**locals()) + " must be array", value=data__dependencygroups_val, name="" + (name_prefix or "data") + ".dependency-groups.{data__dependencygroups_key}".format(**locals()) + "", definition={'type': 'array', 'items': {'oneOf': [{'type': 'string', 'description': 'Python package specifiers following PEP 508', 'format': 'pep508'}, {'type': 'object', 'additionalProperties': False, 'properties': {'include-group': {'description': 'Another dependency group to include in this one', 'type': 'string', 'pattern': '^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9._-]*[a-zA-Z0-9])$'}}}]}}, rule='type')
+                        data__dependencygroups_val_is_list = isinstance(data__dependencygroups_val, (list, tuple))
+                        if data__dependencygroups_val_is_list:
+                            data__dependencygroups_val_len = len(data__dependencygroups_val)
+                            for data__dependencygroups_val_x, data__dependencygroups_val_item in enumerate(data__dependencygroups_val):
+                                data__dependencygroups_val_item_one_of_count1 = 0
+                                if data__dependencygroups_val_item_one_of_count1 < 2:
+                                    try:
+                                        if not isinstance(data__dependencygroups_val_item, (str)):
+                                            raise JsonSchemaValueException("" + (name_prefix or "data") + ".dependency-groups.{data__dependencygroups_key}[{data__dependencygroups_val_x}]".format(**locals()) + " must be string", value=data__dependencygroups_val_item, name="" + (name_prefix or "data") + ".dependency-groups.{data__dependencygroups_key}[{data__dependencygroups_val_x}]".format(**locals()) + "", definition={'type': 'string', 'description': 'Python package specifiers following PEP 508', 'format': 'pep508'}, rule='type')
+                                        if isinstance(data__dependencygroups_val_item, str):
+                                            if not custom_formats["pep508"](data__dependencygroups_val_item):
+                                                raise JsonSchemaValueException("" + (name_prefix or "data") + ".dependency-groups.{data__dependencygroups_key}[{data__dependencygroups_val_x}]".format(**locals()) + " must be pep508", value=data__dependencygroups_val_item, name="" + (name_prefix or "data") + ".dependency-groups.{data__dependencygroups_key}[{data__dependencygroups_val_x}]".format(**locals()) + "", definition={'type': 'string', 'description': 'Python package specifiers following PEP 508', 'format': 'pep508'}, rule='format')
+                                        data__dependencygroups_val_item_one_of_count1 += 1
+                                    except JsonSchemaValueException: pass
+                                if data__dependencygroups_val_item_one_of_count1 < 2:
+                                    try:
+                                        if not isinstance(data__dependencygroups_val_item, (dict)):
+                                            raise JsonSchemaValueException("" + (name_prefix or "data") + ".dependency-groups.{data__dependencygroups_key}[{data__dependencygroups_val_x}]".format(**locals()) + " must be object", value=data__dependencygroups_val_item, name="" + (name_prefix or "data") + ".dependency-groups.{data__dependencygroups_key}[{data__dependencygroups_val_x}]".format(**locals()) + "", definition={'type': 'object', 'additionalProperties': False, 'properties': {'include-group': {'description': 'Another dependency group to include in this one', 'type': 'string', 'pattern': '^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9._-]*[a-zA-Z0-9])$'}}}, rule='type')
+                                        data__dependencygroups_val_item_is_dict = isinstance(data__dependencygroups_val_item, dict)
+                                        if data__dependencygroups_val_item_is_dict:
+                                            data__dependencygroups_val_item_keys = set(data__dependencygroups_val_item.keys())
+                                            if "include-group" in data__dependencygroups_val_item_keys:
+                                                data__dependencygroups_val_item_keys.remove("include-group")
+                                                data__dependencygroups_val_item__includegroup = data__dependencygroups_val_item["include-group"]
+                                                if not isinstance(data__dependencygroups_val_item__includegroup, (str)):
+                                                    raise JsonSchemaValueException("" + (name_prefix or "data") + ".dependency-groups.{data__dependencygroups_key}[{data__dependencygroups_val_x}].include-group".format(**locals()) + " must be string", value=data__dependencygroups_val_item__includegroup, name="" + (name_prefix or "data") + ".dependency-groups.{data__dependencygroups_key}[{data__dependencygroups_val_x}].include-group".format(**locals()) + "", definition={'description': 'Another dependency group to include in this one', 'type': 'string', 'pattern': '^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9._-]*[a-zA-Z0-9])$'}, rule='type')
+                                                if isinstance(data__dependencygroups_val_item__includegroup, str):
+                                                    if not REGEX_PATTERNS['^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9._-]*[a-zA-Z0-9])$'].search(data__dependencygroups_val_item__includegroup):
+                                                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".dependency-groups.{data__dependencygroups_key}[{data__dependencygroups_val_x}].include-group".format(**locals()) + " must match pattern ^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9._-]*[a-zA-Z0-9])$", value=data__dependencygroups_val_item__includegroup, name="" + (name_prefix or "data") + ".dependency-groups.{data__dependencygroups_key}[{data__dependencygroups_val_x}].include-group".format(**locals()) + "", definition={'description': 'Another dependency group to include in this one', 'type': 'string', 'pattern': '^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9._-]*[a-zA-Z0-9])$'}, rule='pattern')
+                                            if data__dependencygroups_val_item_keys:
+                                                raise JsonSchemaValueException("" + (name_prefix or "data") + ".dependency-groups.{data__dependencygroups_key}[{data__dependencygroups_val_x}]".format(**locals()) + " must not contain "+str(data__dependencygroups_val_item_keys)+" properties", value=data__dependencygroups_val_item, name="" + (name_prefix or "data") + ".dependency-groups.{data__dependencygroups_key}[{data__dependencygroups_val_x}]".format(**locals()) + "", definition={'type': 'object', 'additionalProperties': False, 'properties': {'include-group': {'description': 'Another dependency group to include in this one', 'type': 'string', 'pattern': '^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9._-]*[a-zA-Z0-9])$'}}}, rule='additionalProperties')
+                                        data__dependencygroups_val_item_one_of_count1 += 1
+                                    except JsonSchemaValueException: pass
+                                if data__dependencygroups_val_item_one_of_count1 != 1:
+                                    raise JsonSchemaValueException("" + (name_prefix or "data") + ".dependency-groups.{data__dependencygroups_key}[{data__dependencygroups_val_x}]".format(**locals()) + " must be valid exactly by one definition" + (" (" + str(data__dependencygroups_val_item_one_of_count1) + " matches found)"), value=data__dependencygroups_val_item, name="" + (name_prefix or "data") + ".dependency-groups.{data__dependencygroups_key}[{data__dependencygroups_val_x}]".format(**locals()) + "", definition={'oneOf': [{'type': 'string', 'description': 'Python package specifiers following PEP 508', 'format': 'pep508'}, {'type': 'object', 'additionalProperties': False, 'properties': {'include-group': {'description': 'Another dependency group to include in this one', 'type': 'string', 'pattern': '^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9._-]*[a-zA-Z0-9])$'}}}]}, rule='oneOf')
+                if data__dependencygroups_keys:
+                    raise JsonSchemaValueException("" + (name_prefix or "data") + ".dependency-groups must not contain "+str(data__dependencygroups_keys)+" properties", value=data__dependencygroups, name="" + (name_prefix or "data") + ".dependency-groups", definition={'type': 'object', 'description': 'Dependency groups following PEP 735', 'additionalProperties': False, 'patternProperties': {'^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9._-]*[a-zA-Z0-9])$': {'type': 'array', 'items': {'oneOf': [{'type': 'string', 'description': 'Python package specifiers following PEP 508', 'format': 'pep508'}, {'type': 'object', 'additionalProperties': False, 'properties': {'include-group': {'description': 'Another dependency group to include in this one', 'type': 'string', 'pattern': '^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9._-]*[a-zA-Z0-9])$'}}}]}}}}, rule='additionalProperties')
         if data_keys:
-            raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-build-dependencies/', 'title': 'Data structure for ``pyproject.toml`` files', '$$description': ['File format containing build-time configurations for the Python ecosystem. ', ':pep:`517` initially defined a build-system independent format for source trees', 'which was complemented by :pep:`518` to provide a way of specifying dependencies ', 'for building Python projects.', 'Please notice the ``project`` table (as initially defined in  :pep:`621`) is not included', 'in this schema and should be considered separately.'], 'type': 'object', 'additionalProperties': False, 'properties': {'build-system': {'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/pyproject-toml/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, 'tool': {'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/deprecated/distutils/configfile.html', 'title': '``tool.distutils`` table', '$$description': ['**EXPERIMENTAL** (NOT OFFICIALLY SUPPORTED): Use ``tool.distutils``', 'subtables to configure arguments for ``distutils`` commands.', 'Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` commands via `distutils configuration files', '`_.', 'See also `the old Python docs _`.'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html', 'title': '``tool.setuptools`` table', '$$description': ['``setuptools``-specific configurations that can be set by users that require', 'customization.', 'These configurations are completely optional and probably can be skipped when', 'creating simple packages. They are equivalent to some of the `Keywords', '`_', 'used by the ``setup.py`` file, and can be set via the ``tool.setuptools`` table.', 'It considers only ``setuptools`` `parameters', '`_', 'that are not covered by :pep:`621`; and intentionally excludes ``dependency_links``', 'and ``setup_requires`` (incompatible with modern workflows/standards).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'$$description': ['Whether the project can be safely installed and run from a zip file.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'boolean'}, 'script-files': {'$$description': ['Legacy way of defining scripts (entry-points are preferred).', 'Equivalent to the ``script`` keyword in ``setup.py``', '(it was renamed to avoid confusion with entry-point based ``project.scripts``', 'defined in :pep:`621`).', '**DISCOURAGED**: generic script wrappers are tricky and may not work properly.', 'Whenever possible, please use ``project.scripts`` instead.'], 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$ref': '#/definitions/package-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$ref': '#/definitions/package-name'}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html', 'description': '**DEPRECATED**: use implicit namespaces instead (:pep:`420`).'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'ext-modules': {'description': 'Extension modules to be compiled by setuptools', 'type': 'array', 'items': {'$ref': '#/definitions/ext-module'}}, 'data-files': {'$$description': ['``dict``-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', '**DISCOURAGED**: please notice this might not work as expected with wheels.', 'Whenever possible, consider using data files inside the package directories', '(or create a new namespace package that only contains data files).', 'See `data files support', '`_.'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', '    cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['**PROVISIONAL**: list of glob patterns for all license files being distributed.', '(likely to become standard with :pep:`639`).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'dependencies': {'$ref': '#/definitions/file-directive-for-dependencies'}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$ref': '#/definitions/file-directive-for-dependencies'}}}, 'readme': {'type': 'object', 'anyOf': [{'$ref': '#/definitions/file-directive'}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'$ref': '#/definitions/file-directive/properties/file'}}, 'additionalProperties': False}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, 'ext-module': {'$id': '#/definitions/ext-module', 'title': 'Extension module', 'description': 'Parameters to construct a :class:`setuptools.Extension` object', 'type': 'object', 'required': ['name', 'sources'], 'additionalProperties': False, 'properties': {'name': {'type': 'string', 'format': 'python-module-name-relaxed'}, 'sources': {'type': 'array', 'items': {'type': 'string'}}, 'include-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'define-macros': {'type': 'array', 'items': {'type': 'array', 'items': [{'description': 'macro name', 'type': 'string'}, {'description': 'macro value', 'oneOf': [{'type': 'string'}, {'type': 'null'}]}], 'additionalItems': False}}, 'undef-macros': {'type': 'array', 'items': {'type': 'string'}}, 'library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'libraries': {'type': 'array', 'items': {'type': 'string'}}, 'runtime-library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'extra-objects': {'type': 'array', 'items': {'type': 'string'}}, 'extra-compile-args': {'type': 'array', 'items': {'type': 'string'}}, 'extra-link-args': {'type': 'array', 'items': {'type': 'string'}}, 'export-symbols': {'type': 'array', 'items': {'type': 'string'}}, 'swig-opts': {'type': 'array', 'items': {'type': 'string'}}, 'depends': {'type': 'array', 'items': {'type': 'string'}}, 'language': {'type': 'string'}, 'optional': {'type': 'boolean'}, 'py-limited-api': {'type': 'boolean'}}}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'file-directive-for-dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/pyproject-toml/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='additionalProperties')
+            raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-build-dependencies/', 'title': 'Data structure for ``pyproject.toml`` files', '$$description': ['File format containing build-time configurations for the Python ecosystem. ', ':pep:`517` initially defined a build-system independent format for source trees', 'which was complemented by :pep:`518` to provide a way of specifying dependencies ', 'for building Python projects.', 'Please notice the ``project`` table (as initially defined in  :pep:`621`) is not included', 'in this schema and should be considered separately.'], 'type': 'object', 'additionalProperties': False, 'properties': {'build-system': {'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/pyproject-toml/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'type': 'string', 'description': 'An SPDX license identifier', 'format': 'SPDX'}, {'type': 'object', 'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'type': 'object', 'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'license-files': {'description': 'Paths or globs to paths of license files', 'type': 'array', 'items': {'type': 'string'}}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'license-files', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'allOf': [{'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}}, {'if': {'required': ['license-files']}, 'then': {'properties': {'license': {'type': 'string'}}}}], 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, 'tool': {'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/deprecated/distutils/configfile.html', 'title': '``tool.distutils`` table', '$$description': ['**EXPERIMENTAL** (NOT OFFICIALLY SUPPORTED): Use ``tool.distutils``', 'subtables to configure arguments for ``distutils`` commands.', 'Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` commands via `distutils configuration files', '`_.', 'See also `the old Python docs _`.'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html', 'title': '``tool.setuptools`` table', '$$description': ['``setuptools``-specific configurations that can be set by users that require', 'customization.', 'These configurations are completely optional and probably can be skipped when', 'creating simple packages. They are equivalent to some of the `Keywords', '`_', 'used by the ``setup.py`` file, and can be set via the ``tool.setuptools`` table.', 'It considers only ``setuptools`` `parameters', '`_', 'that are not covered by :pep:`621`; and intentionally excludes ``dependency_links``', 'and ``setup_requires`` (incompatible with modern workflows/standards).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'$$description': ['Whether the project can be safely installed and run from a zip file.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'boolean'}, 'script-files': {'$$description': ['Legacy way of defining scripts (entry-points are preferred).', 'Equivalent to the ``script`` keyword in ``setup.py``', '(it was renamed to avoid confusion with entry-point based ``project.scripts``', 'defined in :pep:`621`).', '**DISCOURAGED**: generic script wrappers are tricky and may not work properly.', 'Whenever possible, please use ``project.scripts`` instead.'], 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$ref': '#/definitions/package-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$ref': '#/definitions/package-name'}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html', 'description': '**DEPRECATED**: use implicit namespaces instead (:pep:`420`).'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'ext-modules': {'description': 'Extension modules to be compiled by setuptools', 'type': 'array', 'items': {'$ref': '#/definitions/ext-module'}}, 'data-files': {'$$description': ['``dict``-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', '**DISCOURAGED**: please notice this might not work as expected with wheels.', 'Whenever possible, consider using data files inside the package directories', '(or create a new namespace package that only contains data files).', 'See `data files support', '`_.'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', '    cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['**PROVISIONAL**: list of glob patterns for all license files being distributed.', '(likely to become standard with :pep:`639`).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'dependencies': {'$ref': '#/definitions/file-directive-for-dependencies'}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$ref': '#/definitions/file-directive-for-dependencies'}}}, 'readme': {'type': 'object', 'anyOf': [{'$ref': '#/definitions/file-directive'}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'$ref': '#/definitions/file-directive/properties/file'}}, 'additionalProperties': False}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, 'ext-module': {'$id': '#/definitions/ext-module', 'title': 'Extension module', 'description': 'Parameters to construct a :class:`setuptools.Extension` object', 'type': 'object', 'required': ['name', 'sources'], 'additionalProperties': False, 'properties': {'name': {'type': 'string', 'format': 'python-module-name-relaxed'}, 'sources': {'type': 'array', 'items': {'type': 'string'}}, 'include-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'define-macros': {'type': 'array', 'items': {'type': 'array', 'items': [{'description': 'macro name', 'type': 'string'}, {'description': 'macro value', 'oneOf': [{'type': 'string'}, {'type': 'null'}]}], 'additionalItems': False}}, 'undef-macros': {'type': 'array', 'items': {'type': 'string'}}, 'library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'libraries': {'type': 'array', 'items': {'type': 'string'}}, 'runtime-library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'extra-objects': {'type': 'array', 'items': {'type': 'string'}}, 'extra-compile-args': {'type': 'array', 'items': {'type': 'string'}}, 'extra-link-args': {'type': 'array', 'items': {'type': 'string'}}, 'export-symbols': {'type': 'array', 'items': {'type': 'string'}}, 'swig-opts': {'type': 'array', 'items': {'type': 'string'}}, 'depends': {'type': 'array', 'items': {'type': 'string'}}, 'language': {'type': 'string'}, 'optional': {'type': 'boolean'}, 'py-limited-api': {'type': 'boolean'}}}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'file-directive-for-dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}, 'dependency-groups': {'type': 'object', 'description': 'Dependency groups following PEP 735', 'additionalProperties': False, 'patternProperties': {'^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9._-]*[a-zA-Z0-9])$': {'type': 'array', 'items': {'oneOf': [{'type': 'string', 'description': 'Python package specifiers following PEP 508', 'format': 'pep508'}, {'type': 'object', 'additionalProperties': False, 'properties': {'include-group': {'description': 'Another dependency group to include in this one', 'type': 'string', 'pattern': '^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9._-]*[a-zA-Z0-9])$'}}}]}}}}}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/pyproject-toml/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'type': 'string', 'description': 'An SPDX license identifier', 'format': 'SPDX'}, {'type': 'object', 'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'type': 'object', 'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'license-files': {'description': 'Paths or globs to paths of license files', 'type': 'array', 'items': {'type': 'string'}}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'license-files', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'allOf': [{'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}}, {'if': {'required': ['license-files']}, 'then': {'properties': {'license': {'type': 'string'}}}}], 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='additionalProperties')
     return data
 
 def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html(data, custom_formats={}, name_prefix=None):
@@ -177,8 +229,8 @@ def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_htm
         if "packages" in data_keys:
             data_keys.remove("packages")
             data__packages = data["packages"]
-            data__packages_one_of_count1 = 0
-            if data__packages_one_of_count1 < 2:
+            data__packages_one_of_count2 = 0
+            if data__packages_one_of_count2 < 2:
                 try:
                     if not isinstance(data__packages, (list, tuple)):
                         raise JsonSchemaValueException("" + (name_prefix or "data") + ".packages must be array", value=data__packages, name="" + (name_prefix or "data") + ".packages", definition={'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}}, rule='type')
@@ -187,15 +239,15 @@ def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_htm
                         data__packages_len = len(data__packages)
                         for data__packages_x, data__packages_item in enumerate(data__packages):
                             validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html__definitions_package_name(data__packages_item, custom_formats, (name_prefix or "data") + ".packages[{data__packages_x}]".format(**locals()))
-                    data__packages_one_of_count1 += 1
+                    data__packages_one_of_count2 += 1
                 except JsonSchemaValueException: pass
-            if data__packages_one_of_count1 < 2:
+            if data__packages_one_of_count2 < 2:
                 try:
                     validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html__definitions_find_directive(data__packages, custom_formats, (name_prefix or "data") + ".packages")
-                    data__packages_one_of_count1 += 1
+                    data__packages_one_of_count2 += 1
                 except JsonSchemaValueException: pass
-            if data__packages_one_of_count1 != 1:
-                raise JsonSchemaValueException("" + (name_prefix or "data") + ".packages must be valid exactly by one definition" + (" (" + str(data__packages_one_of_count1) + " matches found)"), value=data__packages, name="" + (name_prefix or "data") + ".packages", definition={'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}}, {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}]}, rule='oneOf')
+            if data__packages_one_of_count2 != 1:
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".packages must be valid exactly by one definition" + (" (" + str(data__packages_one_of_count2) + " matches found)"), value=data__packages, name="" + (name_prefix or "data") + ".packages", definition={'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}}, {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}]}, rule='oneOf')
         if "package-dir" in data_keys:
             data_keys.remove("package-dir")
             data__packagedir = data["package-dir"]
@@ -217,19 +269,19 @@ def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_htm
                     data__packagedir_property_names = True
                     for data__packagedir_key in data__packagedir:
                         try:
-                            data__packagedir_key_any_of_count2 = 0
-                            if not data__packagedir_key_any_of_count2:
+                            data__packagedir_key_any_of_count3 = 0
+                            if not data__packagedir_key_any_of_count3:
                                 try:
                                     if data__packagedir_key != "":
                                         raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must be same as const definition: ", value=data__packagedir_key, name="" + (name_prefix or "data") + ".package-dir", definition={'const': ''}, rule='const')
-                                    data__packagedir_key_any_of_count2 += 1
+                                    data__packagedir_key_any_of_count3 += 1
                                 except JsonSchemaValueException: pass
-                            if not data__packagedir_key_any_of_count2:
+                            if not data__packagedir_key_any_of_count3:
                                 try:
                                     validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html__definitions_package_name(data__packagedir_key, custom_formats, (name_prefix or "data") + ".package-dir")
-                                    data__packagedir_key_any_of_count2 += 1
+                                    data__packagedir_key_any_of_count3 += 1
                                 except JsonSchemaValueException: pass
-                            if not data__packagedir_key_any_of_count2:
+                            if not data__packagedir_key_any_of_count3:
                                 raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir cannot be validated by any definition", value=data__packagedir_key, name="" + (name_prefix or "data") + ".package-dir", definition={'anyOf': [{'const': ''}, {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}]}, rule='anyOf')
                         except JsonSchemaValueException:
                             data__packagedir_property_names = False
@@ -262,23 +314,23 @@ def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_htm
                     data__packagedata_property_names = True
                     for data__packagedata_key in data__packagedata:
                         try:
-                            data__packagedata_key_any_of_count3 = 0
-                            if not data__packagedata_key_any_of_count3:
+                            data__packagedata_key_any_of_count4 = 0
+                            if not data__packagedata_key_any_of_count4:
                                 try:
                                     if not isinstance(data__packagedata_key, (str)):
                                         raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must be string", value=data__packagedata_key, name="" + (name_prefix or "data") + ".package-data", definition={'type': 'string', 'format': 'python-module-name'}, rule='type')
                                     if isinstance(data__packagedata_key, str):
                                         if not custom_formats["python-module-name"](data__packagedata_key):
                                             raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must be python-module-name", value=data__packagedata_key, name="" + (name_prefix or "data") + ".package-data", definition={'type': 'string', 'format': 'python-module-name'}, rule='format')
-                                    data__packagedata_key_any_of_count3 += 1
+                                    data__packagedata_key_any_of_count4 += 1
                                 except JsonSchemaValueException: pass
-                            if not data__packagedata_key_any_of_count3:
+                            if not data__packagedata_key_any_of_count4:
                                 try:
                                     if data__packagedata_key != "*":
                                         raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must be same as const definition: *", value=data__packagedata_key, name="" + (name_prefix or "data") + ".package-data", definition={'const': '*'}, rule='const')
-                                    data__packagedata_key_any_of_count3 += 1
+                                    data__packagedata_key_any_of_count4 += 1
                                 except JsonSchemaValueException: pass
-                            if not data__packagedata_key_any_of_count3:
+                            if not data__packagedata_key_any_of_count4:
                                 raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data cannot be validated by any definition", value=data__packagedata_key, name="" + (name_prefix or "data") + ".package-data", definition={'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, rule='anyOf')
                         except JsonSchemaValueException:
                             data__packagedata_property_names = False
@@ -316,23 +368,23 @@ def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_htm
                     data__excludepackagedata_property_names = True
                     for data__excludepackagedata_key in data__excludepackagedata:
                         try:
-                            data__excludepackagedata_key_any_of_count4 = 0
-                            if not data__excludepackagedata_key_any_of_count4:
+                            data__excludepackagedata_key_any_of_count5 = 0
+                            if not data__excludepackagedata_key_any_of_count5:
                                 try:
                                     if not isinstance(data__excludepackagedata_key, (str)):
                                         raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must be string", value=data__excludepackagedata_key, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'type': 'string', 'format': 'python-module-name'}, rule='type')
                                     if isinstance(data__excludepackagedata_key, str):
                                         if not custom_formats["python-module-name"](data__excludepackagedata_key):
                                             raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must be python-module-name", value=data__excludepackagedata_key, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'type': 'string', 'format': 'python-module-name'}, rule='format')
-                                    data__excludepackagedata_key_any_of_count4 += 1
+                                    data__excludepackagedata_key_any_of_count5 += 1
                                 except JsonSchemaValueException: pass
-                            if not data__excludepackagedata_key_any_of_count4:
+                            if not data__excludepackagedata_key_any_of_count5:
                                 try:
                                     if data__excludepackagedata_key != "*":
                                         raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must be same as const definition: *", value=data__excludepackagedata_key, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'const': '*'}, rule='const')
-                                    data__excludepackagedata_key_any_of_count4 += 1
+                                    data__excludepackagedata_key_any_of_count5 += 1
                                 except JsonSchemaValueException: pass
-                            if not data__excludepackagedata_key_any_of_count4:
+                            if not data__excludepackagedata_key_any_of_count5:
                                 raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data cannot be validated by any definition", value=data__excludepackagedata_key, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, rule='anyOf')
                         except JsonSchemaValueException:
                             data__excludepackagedata_property_names = False
@@ -435,19 +487,19 @@ def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_htm
                 if "version" in data__dynamic_keys:
                     data__dynamic_keys.remove("version")
                     data__dynamic__version = data__dynamic["version"]
-                    data__dynamic__version_one_of_count5 = 0
-                    if data__dynamic__version_one_of_count5 < 2:
+                    data__dynamic__version_one_of_count6 = 0
+                    if data__dynamic__version_one_of_count6 < 2:
                         try:
                             validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html__definitions_attr_directive(data__dynamic__version, custom_formats, (name_prefix or "data") + ".dynamic.version")
-                            data__dynamic__version_one_of_count5 += 1
+                            data__dynamic__version_one_of_count6 += 1
                         except JsonSchemaValueException: pass
-                    if data__dynamic__version_one_of_count5 < 2:
+                    if data__dynamic__version_one_of_count6 < 2:
                         try:
                             validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html__definitions_file_directive(data__dynamic__version, custom_formats, (name_prefix or "data") + ".dynamic.version")
-                            data__dynamic__version_one_of_count5 += 1
+                            data__dynamic__version_one_of_count6 += 1
                         except JsonSchemaValueException: pass
-                    if data__dynamic__version_one_of_count5 != 1:
-                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.version must be valid exactly by one definition" + (" (" + str(data__dynamic__version_one_of_count5) + " matches found)"), value=data__dynamic__version, name="" + (name_prefix or "data") + ".dynamic.version", definition={'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, rule='oneOf')
+                    if data__dynamic__version_one_of_count6 != 1:
+                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.version must be valid exactly by one definition" + (" (" + str(data__dynamic__version_one_of_count6) + " matches found)"), value=data__dynamic__version, name="" + (name_prefix or "data") + ".dynamic.version", definition={'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, rule='oneOf')
                 if "classifiers" in data__dynamic_keys:
                     data__dynamic_keys.remove("classifiers")
                     data__dynamic__classifiers = data__dynamic["classifiers"]
@@ -498,13 +550,13 @@ def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_htm
                     data__dynamic__readme = data__dynamic["readme"]
                     if not isinstance(data__dynamic__readme, (dict)):
                         raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.readme must be object", value=data__dynamic__readme, name="" + (name_prefix or "data") + ".dynamic.readme", definition={'type': 'object', 'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'additionalProperties': False}], 'required': ['file']}, rule='type')
-                    data__dynamic__readme_any_of_count6 = 0
-                    if not data__dynamic__readme_any_of_count6:
+                    data__dynamic__readme_any_of_count7 = 0
+                    if not data__dynamic__readme_any_of_count7:
                         try:
                             validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html__definitions_file_directive(data__dynamic__readme, custom_formats, (name_prefix or "data") + ".dynamic.readme")
-                            data__dynamic__readme_any_of_count6 += 1
+                            data__dynamic__readme_any_of_count7 += 1
                         except JsonSchemaValueException: pass
-                    if not data__dynamic__readme_any_of_count6:
+                    if not data__dynamic__readme_any_of_count7:
                         try:
                             if not isinstance(data__dynamic__readme, (dict)):
                                 raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.readme must be object", value=data__dynamic__readme, name="" + (name_prefix or "data") + ".dynamic.readme", definition={'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'additionalProperties': False}, rule='type')
@@ -522,9 +574,9 @@ def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_htm
                                     validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html__definitions_file_directive_properties_file(data__dynamic__readme__file, custom_formats, (name_prefix or "data") + ".dynamic.readme.file")
                                 if data__dynamic__readme_keys:
                                     raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.readme must not contain "+str(data__dynamic__readme_keys)+" properties", value=data__dynamic__readme, name="" + (name_prefix or "data") + ".dynamic.readme", definition={'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'additionalProperties': False}, rule='additionalProperties')
-                            data__dynamic__readme_any_of_count6 += 1
+                            data__dynamic__readme_any_of_count7 += 1
                         except JsonSchemaValueException: pass
-                    if not data__dynamic__readme_any_of_count6:
+                    if not data__dynamic__readme_any_of_count7:
                         raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.readme cannot be validated by any definition", value=data__dynamic__readme, name="" + (name_prefix or "data") + ".dynamic.readme", definition={'type': 'object', 'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'additionalProperties': False}], 'required': ['file']}, rule='anyOf')
                     data__dynamic__readme_is_dict = isinstance(data__dynamic__readme, dict)
                     if data__dynamic__readme_is_dict:
@@ -538,14 +590,14 @@ def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_htm
     return data
 
 def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html__definitions_file_directive_properties_file(data, custom_formats={}, name_prefix=None):
-    data_one_of_count7 = 0
-    if data_one_of_count7 < 2:
+    data_one_of_count8 = 0
+    if data_one_of_count8 < 2:
         try:
             if not isinstance(data, (str)):
                 raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string'}, rule='type')
-            data_one_of_count7 += 1
+            data_one_of_count8 += 1
         except JsonSchemaValueException: pass
-    if data_one_of_count7 < 2:
+    if data_one_of_count8 < 2:
         try:
             if not isinstance(data, (list, tuple)):
                 raise JsonSchemaValueException("" + (name_prefix or "data") + " must be array", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type')
@@ -555,10 +607,10 @@ def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_htm
                 for data_x, data_item in enumerate(data):
                     if not isinstance(data_item, (str)):
                         raise JsonSchemaValueException("" + (name_prefix or "data") + "[{data_x}]".format(**locals()) + " must be string", value=data_item, name="" + (name_prefix or "data") + "[{data_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
-            data_one_of_count7 += 1
+            data_one_of_count8 += 1
         except JsonSchemaValueException: pass
-    if data_one_of_count7 != 1:
-        raise JsonSchemaValueException("" + (name_prefix or "data") + " must be valid exactly by one definition" + (" (" + str(data_one_of_count7) + " matches found)"), value=data, name="" + (name_prefix or "data") + "", definition={'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}, rule='oneOf')
+    if data_one_of_count8 != 1:
+        raise JsonSchemaValueException("" + (name_prefix or "data") + " must be valid exactly by one definition" + (" (" + str(data_one_of_count8) + " matches found)"), value=data, name="" + (name_prefix or "data") + "", definition={'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}, rule='oneOf')
     return data
 
 def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html__definitions_file_directive_for_dependencies(data, custom_formats={}, name_prefix=None):
@@ -577,14 +629,14 @@ def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_htm
         if "file" in data_keys:
             data_keys.remove("file")
             data__file = data["file"]
-            data__file_one_of_count8 = 0
-            if data__file_one_of_count8 < 2:
+            data__file_one_of_count9 = 0
+            if data__file_one_of_count9 < 2:
                 try:
                     if not isinstance(data__file, (str)):
                         raise JsonSchemaValueException("" + (name_prefix or "data") + ".file must be string", value=data__file, name="" + (name_prefix or "data") + ".file", definition={'type': 'string'}, rule='type')
-                    data__file_one_of_count8 += 1
+                    data__file_one_of_count9 += 1
                 except JsonSchemaValueException: pass
-            if data__file_one_of_count8 < 2:
+            if data__file_one_of_count9 < 2:
                 try:
                     if not isinstance(data__file, (list, tuple)):
                         raise JsonSchemaValueException("" + (name_prefix or "data") + ".file must be array", value=data__file, name="" + (name_prefix or "data") + ".file", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type')
@@ -594,10 +646,10 @@ def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_htm
                         for data__file_x, data__file_item in enumerate(data__file):
                             if not isinstance(data__file_item, (str)):
                                 raise JsonSchemaValueException("" + (name_prefix or "data") + ".file[{data__file_x}]".format(**locals()) + " must be string", value=data__file_item, name="" + (name_prefix or "data") + ".file[{data__file_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
-                    data__file_one_of_count8 += 1
+                    data__file_one_of_count9 += 1
                 except JsonSchemaValueException: pass
-            if data__file_one_of_count8 != 1:
-                raise JsonSchemaValueException("" + (name_prefix or "data") + ".file must be valid exactly by one definition" + (" (" + str(data__file_one_of_count8) + " matches found)"), value=data__file, name="" + (name_prefix or "data") + ".file", definition={'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}, rule='oneOf')
+            if data__file_one_of_count9 != 1:
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".file must be valid exactly by one definition" + (" (" + str(data__file_one_of_count9) + " matches found)"), value=data__file, name="" + (name_prefix or "data") + ".file", definition={'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}, rule='oneOf')
         if data_keys:
             raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, rule='additionalProperties')
     return data
@@ -682,21 +734,21 @@ def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_htm
                                 raise JsonSchemaValueException("" + (name_prefix or "data") + ".define-macros[{data__definemacros_x}][0]".format(**locals()) + " must be string", value=data__definemacros_item__0, name="" + (name_prefix or "data") + ".define-macros[{data__definemacros_x}][0]".format(**locals()) + "", definition={'description': 'macro name', 'type': 'string'}, rule='type')
                         if data__definemacros_item_len > 1:
                             data__definemacros_item__1 = data__definemacros_item[1]
-                            data__definemacros_item__1_one_of_count9 = 0
-                            if data__definemacros_item__1_one_of_count9 < 2:
+                            data__definemacros_item__1_one_of_count10 = 0
+                            if data__definemacros_item__1_one_of_count10 < 2:
                                 try:
                                     if not isinstance(data__definemacros_item__1, (str)):
                                         raise JsonSchemaValueException("" + (name_prefix or "data") + ".define-macros[{data__definemacros_x}][1]".format(**locals()) + " must be string", value=data__definemacros_item__1, name="" + (name_prefix or "data") + ".define-macros[{data__definemacros_x}][1]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
-                                    data__definemacros_item__1_one_of_count9 += 1
+                                    data__definemacros_item__1_one_of_count10 += 1
                                 except JsonSchemaValueException: pass
-                            if data__definemacros_item__1_one_of_count9 < 2:
+                            if data__definemacros_item__1_one_of_count10 < 2:
                                 try:
                                     if not isinstance(data__definemacros_item__1, (NoneType)):
                                         raise JsonSchemaValueException("" + (name_prefix or "data") + ".define-macros[{data__definemacros_x}][1]".format(**locals()) + " must be null", value=data__definemacros_item__1, name="" + (name_prefix or "data") + ".define-macros[{data__definemacros_x}][1]".format(**locals()) + "", definition={'type': 'null'}, rule='type')
-                                    data__definemacros_item__1_one_of_count9 += 1
+                                    data__definemacros_item__1_one_of_count10 += 1
                                 except JsonSchemaValueException: pass
-                            if data__definemacros_item__1_one_of_count9 != 1:
-                                raise JsonSchemaValueException("" + (name_prefix or "data") + ".define-macros[{data__definemacros_x}][1]".format(**locals()) + " must be valid exactly by one definition" + (" (" + str(data__definemacros_item__1_one_of_count9) + " matches found)"), value=data__definemacros_item__1, name="" + (name_prefix or "data") + ".define-macros[{data__definemacros_x}][1]".format(**locals()) + "", definition={'description': 'macro value', 'oneOf': [{'type': 'string'}, {'type': 'null'}]}, rule='oneOf')
+                            if data__definemacros_item__1_one_of_count10 != 1:
+                                raise JsonSchemaValueException("" + (name_prefix or "data") + ".define-macros[{data__definemacros_x}][1]".format(**locals()) + " must be valid exactly by one definition" + (" (" + str(data__definemacros_item__1_one_of_count10) + " matches found)"), value=data__definemacros_item__1, name="" + (name_prefix or "data") + ".define-macros[{data__definemacros_x}][1]".format(**locals()) + "", definition={'description': 'macro value', 'oneOf': [{'type': 'string'}, {'type': 'null'}]}, rule='oneOf')
                         if data__definemacros_item_len > 2:
                             raise JsonSchemaValueException("" + (name_prefix or "data") + ".define-macros[{data__definemacros_x}]".format(**locals()) + " must contain only specified items", value=data__definemacros_item, name="" + (name_prefix or "data") + ".define-macros[{data__definemacros_x}]".format(**locals()) + "", definition={'type': 'array', 'items': [{'description': 'macro name', 'type': 'string'}, {'description': 'macro value', 'oneOf': [{'type': 'string'}, {'type': 'null'}]}], 'additionalItems': False}, rule='items')
         if "undef-macros" in data_keys:
@@ -889,26 +941,26 @@ def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_htm
 def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html__definitions_package_name(data, custom_formats={}, name_prefix=None):
     if not isinstance(data, (str)):
         raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, rule='type')
-    data_any_of_count10 = 0
-    if not data_any_of_count10:
+    data_any_of_count11 = 0
+    if not data_any_of_count11:
         try:
             if not isinstance(data, (str)):
                 raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'format': 'python-module-name-relaxed'}, rule='type')
             if isinstance(data, str):
                 if not custom_formats["python-module-name-relaxed"](data):
                     raise JsonSchemaValueException("" + (name_prefix or "data") + " must be python-module-name-relaxed", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'format': 'python-module-name-relaxed'}, rule='format')
-            data_any_of_count10 += 1
+            data_any_of_count11 += 1
         except JsonSchemaValueException: pass
-    if not data_any_of_count10:
+    if not data_any_of_count11:
         try:
             if not isinstance(data, (str)):
                 raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'format': 'pep561-stub-name'}, rule='type')
             if isinstance(data, str):
                 if not custom_formats["pep561-stub-name"](data):
                     raise JsonSchemaValueException("" + (name_prefix or "data") + " must be pep561-stub-name", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'format': 'pep561-stub-name'}, rule='format')
-            data_any_of_count10 += 1
+            data_any_of_count11 += 1
         except JsonSchemaValueException: pass
-    if not data_any_of_count10:
+    if not data_any_of_count11:
         raise JsonSchemaValueException("" + (name_prefix or "data") + " cannot be validated by any definition", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, rule='anyOf')
     return data
 
@@ -933,12 +985,63 @@ def validate_https___setuptools_pypa_io_en_latest_deprecated_distutils_configfil
 
 def validate_https___packaging_python_org_en_latest_specifications_pyproject_toml(data, custom_formats={}, name_prefix=None):
     if not isinstance(data, (dict)):
-        raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/pyproject-toml/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'gui-scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, rule='type')
+        raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/pyproject-toml/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'type': 'string', 'description': 'An SPDX license identifier', 'format': 'SPDX'}, {'type': 'object', 'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'type': 'object', 'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'license-files': {'description': 'Paths or globs to paths of license files', 'type': 'array', 'items': {'type': 'string'}}, 'authors': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'gui-scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'license-files', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'allOf': [{'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}}, {'if': {'required': ['license-files']}, 'then': {'properties': {'license': {'type': 'string'}}}}], 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, rule='type')
+    try:
+        try:
+            data_is_dict = isinstance(data, dict)
+            if data_is_dict:
+                data__missing_keys = set(['dynamic']) - data.keys()
+                if data__missing_keys:
+                    raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain " + (str(sorted(data__missing_keys)) + " properties"), value=data, name="" + (name_prefix or "data") + "", definition={'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, rule='required')
+                data_keys = set(data.keys())
+                if "dynamic" in data_keys:
+                    data_keys.remove("dynamic")
+                    data__dynamic = data["dynamic"]
+                    data__dynamic_is_list = isinstance(data__dynamic, (list, tuple))
+                    if data__dynamic_is_list:
+                        data__dynamic_contains = False
+                        for data__dynamic_key in data__dynamic:
+                            try:
+                                if data__dynamic_key != "version":
+                                    raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic must be same as const definition: version", value=data__dynamic_key, name="" + (name_prefix or "data") + ".dynamic", definition={'const': 'version'}, rule='const')
+                                data__dynamic_contains = True
+                                break
+                            except JsonSchemaValueException: pass
+                        if not data__dynamic_contains:
+                            raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic must contain one of contains definition", value=data__dynamic, name="" + (name_prefix or "data") + ".dynamic", definition={'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}, rule='contains')
+        except JsonSchemaValueException: pass
+        else:
+            raise JsonSchemaValueException("" + (name_prefix or "data") + " must NOT match a disallowed definition", value=data, name="" + (name_prefix or "data") + "", definition={'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, rule='not')
+    except JsonSchemaValueException:
+        pass
+    else:
+        data_is_dict = isinstance(data, dict)
+        if data_is_dict:
+            data__missing_keys = set(['version']) - data.keys()
+            if data__missing_keys:
+                raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain " + (str(sorted(data__missing_keys)) + " properties"), value=data, name="" + (name_prefix or "data") + "", definition={'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, rule='required')
+    try:
+        data_is_dict = isinstance(data, dict)
+        if data_is_dict:
+            data__missing_keys = set(['license-files']) - data.keys()
+            if data__missing_keys:
+                raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain " + (str(sorted(data__missing_keys)) + " properties"), value=data, name="" + (name_prefix or "data") + "", definition={'required': ['license-files']}, rule='required')
+    except JsonSchemaValueException:
+        pass
+    else:
+        data_is_dict = isinstance(data, dict)
+        if data_is_dict:
+            data_keys = set(data.keys())
+            if "license" in data_keys:
+                data_keys.remove("license")
+                data__license = data["license"]
+                if not isinstance(data__license, (str)):
+                    raise JsonSchemaValueException("" + (name_prefix or "data") + ".license must be string", value=data__license, name="" + (name_prefix or "data") + ".license", definition={'type': 'string'}, rule='type')
     data_is_dict = isinstance(data, dict)
     if data_is_dict:
         data__missing_keys = set(['name']) - data.keys()
         if data__missing_keys:
-            raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain " + (str(sorted(data__missing_keys)) + " properties"), value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/pyproject-toml/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'gui-scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, rule='required')
+            raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain " + (str(sorted(data__missing_keys)) + " properties"), value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/pyproject-toml/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'type': 'string', 'description': 'An SPDX license identifier', 'format': 'SPDX'}, {'type': 'object', 'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'type': 'object', 'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'license-files': {'description': 'Paths or globs to paths of license files', 'type': 'array', 'items': {'type': 'string'}}, 'authors': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'gui-scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'license-files', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'allOf': [{'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}}, {'if': {'required': ['license-files']}, 'then': {'properties': {'license': {'type': 'string'}}}}], 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, rule='required')
         data_keys = set(data.keys())
         if "name" in data_keys:
             data_keys.remove("name")
@@ -964,19 +1067,19 @@ def validate_https___packaging_python_org_en_latest_specifications_pyproject_tom
         if "readme" in data_keys:
             data_keys.remove("readme")
             data__readme = data["readme"]
-            data__readme_one_of_count11 = 0
-            if data__readme_one_of_count11 < 2:
+            data__readme_one_of_count12 = 0
+            if data__readme_one_of_count12 < 2:
                 try:
                     if not isinstance(data__readme, (str)):
                         raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must be string", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, rule='type')
-                    data__readme_one_of_count11 += 1
+                    data__readme_one_of_count12 += 1
                 except JsonSchemaValueException: pass
-            if data__readme_one_of_count11 < 2:
+            if data__readme_one_of_count12 < 2:
                 try:
                     if not isinstance(data__readme, (dict)):
                         raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must be object", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}, rule='type')
-                    data__readme_any_of_count12 = 0
-                    if not data__readme_any_of_count12:
+                    data__readme_any_of_count13 = 0
+                    if not data__readme_any_of_count13:
                         try:
                             data__readme_is_dict = isinstance(data__readme, dict)
                             if data__readme_is_dict:
@@ -989,9 +1092,9 @@ def validate_https___packaging_python_org_en_latest_specifications_pyproject_tom
                                     data__readme__file = data__readme["file"]
                                     if not isinstance(data__readme__file, (str)):
                                         raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme.file must be string", value=data__readme__file, name="" + (name_prefix or "data") + ".readme.file", definition={'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}, rule='type')
-                            data__readme_any_of_count12 += 1
+                            data__readme_any_of_count13 += 1
                         except JsonSchemaValueException: pass
-                    if not data__readme_any_of_count12:
+                    if not data__readme_any_of_count13:
                         try:
                             data__readme_is_dict = isinstance(data__readme, dict)
                             if data__readme_is_dict:
@@ -1004,9 +1107,9 @@ def validate_https___packaging_python_org_en_latest_specifications_pyproject_tom
                                     data__readme__text = data__readme["text"]
                                     if not isinstance(data__readme__text, (str)):
                                         raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme.text must be string", value=data__readme__text, name="" + (name_prefix or "data") + ".readme.text", definition={'type': 'string', 'description': 'Full text describing the project.'}, rule='type')
-                            data__readme_any_of_count12 += 1
+                            data__readme_any_of_count13 += 1
                         except JsonSchemaValueException: pass
-                    if not data__readme_any_of_count12:
+                    if not data__readme_any_of_count13:
                         raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme cannot be validated by any definition", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, rule='anyOf')
                     data__readme_is_dict = isinstance(data__readme, dict)
                     if data__readme_is_dict:
@@ -1019,10 +1122,10 @@ def validate_https___packaging_python_org_en_latest_specifications_pyproject_tom
                             data__readme__contenttype = data__readme["content-type"]
                             if not isinstance(data__readme__contenttype, (str)):
                                 raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme.content-type must be string", value=data__readme__contenttype, name="" + (name_prefix or "data") + ".readme.content-type", definition={'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}, rule='type')
-                    data__readme_one_of_count11 += 1
+                    data__readme_one_of_count12 += 1
                 except JsonSchemaValueException: pass
-            if data__readme_one_of_count11 != 1:
-                raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must be valid exactly by one definition" + (" (" + str(data__readme_one_of_count11) + " matches found)"), value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, rule='oneOf')
+            if data__readme_one_of_count12 != 1:
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must be valid exactly by one definition" + (" (" + str(data__readme_one_of_count12) + " matches found)"), value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, rule='oneOf')
         if "requires-python" in data_keys:
             data_keys.remove("requires-python")
             data__requirespython = data["requires-python"]
@@ -1034,39 +1137,63 @@ def validate_https___packaging_python_org_en_latest_specifications_pyproject_tom
         if "license" in data_keys:
             data_keys.remove("license")
             data__license = data["license"]
-            data__license_one_of_count13 = 0
-            if data__license_one_of_count13 < 2:
+            data__license_one_of_count14 = 0
+            if data__license_one_of_count14 < 2:
+                try:
+                    if not isinstance(data__license, (str)):
+                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".license must be string", value=data__license, name="" + (name_prefix or "data") + ".license", definition={'type': 'string', 'description': 'An SPDX license identifier', 'format': 'SPDX'}, rule='type')
+                    if isinstance(data__license, str):
+                        if not custom_formats["SPDX"](data__license):
+                            raise JsonSchemaValueException("" + (name_prefix or "data") + ".license must be SPDX", value=data__license, name="" + (name_prefix or "data") + ".license", definition={'type': 'string', 'description': 'An SPDX license identifier', 'format': 'SPDX'}, rule='format')
+                    data__license_one_of_count14 += 1
+                except JsonSchemaValueException: pass
+            if data__license_one_of_count14 < 2:
                 try:
+                    if not isinstance(data__license, (dict)):
+                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".license must be object", value=data__license, name="" + (name_prefix or "data") + ".license", definition={'type': 'object', 'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, rule='type')
                     data__license_is_dict = isinstance(data__license, dict)
                     if data__license_is_dict:
                         data__license__missing_keys = set(['file']) - data__license.keys()
                         if data__license__missing_keys:
-                            raise JsonSchemaValueException("" + (name_prefix or "data") + ".license must contain " + (str(sorted(data__license__missing_keys)) + " properties"), value=data__license, name="" + (name_prefix or "data") + ".license", definition={'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, rule='required')
+                            raise JsonSchemaValueException("" + (name_prefix or "data") + ".license must contain " + (str(sorted(data__license__missing_keys)) + " properties"), value=data__license, name="" + (name_prefix or "data") + ".license", definition={'type': 'object', 'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, rule='required')
                         data__license_keys = set(data__license.keys())
                         if "file" in data__license_keys:
                             data__license_keys.remove("file")
                             data__license__file = data__license["file"]
                             if not isinstance(data__license__file, (str)):
                                 raise JsonSchemaValueException("" + (name_prefix or "data") + ".license.file must be string", value=data__license__file, name="" + (name_prefix or "data") + ".license.file", definition={'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}, rule='type')
-                    data__license_one_of_count13 += 1
+                    data__license_one_of_count14 += 1
                 except JsonSchemaValueException: pass
-            if data__license_one_of_count13 < 2:
+            if data__license_one_of_count14 < 2:
                 try:
+                    if not isinstance(data__license, (dict)):
+                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".license must be object", value=data__license, name="" + (name_prefix or "data") + ".license", definition={'type': 'object', 'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}, rule='type')
                     data__license_is_dict = isinstance(data__license, dict)
                     if data__license_is_dict:
                         data__license__missing_keys = set(['text']) - data__license.keys()
                         if data__license__missing_keys:
-                            raise JsonSchemaValueException("" + (name_prefix or "data") + ".license must contain " + (str(sorted(data__license__missing_keys)) + " properties"), value=data__license, name="" + (name_prefix or "data") + ".license", definition={'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}, rule='required')
+                            raise JsonSchemaValueException("" + (name_prefix or "data") + ".license must contain " + (str(sorted(data__license__missing_keys)) + " properties"), value=data__license, name="" + (name_prefix or "data") + ".license", definition={'type': 'object', 'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}, rule='required')
                         data__license_keys = set(data__license.keys())
                         if "text" in data__license_keys:
                             data__license_keys.remove("text")
                             data__license__text = data__license["text"]
                             if not isinstance(data__license__text, (str)):
                                 raise JsonSchemaValueException("" + (name_prefix or "data") + ".license.text must be string", value=data__license__text, name="" + (name_prefix or "data") + ".license.text", definition={'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}, rule='type')
-                    data__license_one_of_count13 += 1
+                    data__license_one_of_count14 += 1
                 except JsonSchemaValueException: pass
-            if data__license_one_of_count13 != 1:
-                raise JsonSchemaValueException("" + (name_prefix or "data") + ".license must be valid exactly by one definition" + (" (" + str(data__license_one_of_count13) + " matches found)"), value=data__license, name="" + (name_prefix or "data") + ".license", definition={'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, rule='oneOf')
+            if data__license_one_of_count14 != 1:
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".license must be valid exactly by one definition" + (" (" + str(data__license_one_of_count14) + " matches found)"), value=data__license, name="" + (name_prefix or "data") + ".license", definition={'description': '`Project license `_.', 'oneOf': [{'type': 'string', 'description': 'An SPDX license identifier', 'format': 'SPDX'}, {'type': 'object', 'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'type': 'object', 'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, rule='oneOf')
+        if "license-files" in data_keys:
+            data_keys.remove("license-files")
+            data__licensefiles = data["license-files"]
+            if not isinstance(data__licensefiles, (list, tuple)):
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".license-files must be array", value=data__licensefiles, name="" + (name_prefix or "data") + ".license-files", definition={'description': 'Paths or globs to paths of license files', 'type': 'array', 'items': {'type': 'string'}}, rule='type')
+            data__licensefiles_is_list = isinstance(data__licensefiles, (list, tuple))
+            if data__licensefiles_is_list:
+                data__licensefiles_len = len(data__licensefiles)
+                for data__licensefiles_x, data__licensefiles_item in enumerate(data__licensefiles):
+                    if not isinstance(data__licensefiles_item, (str)):
+                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".license-files[{data__licensefiles_x}]".format(**locals()) + " must be string", value=data__licensefiles_item, name="" + (name_prefix or "data") + ".license-files[{data__licensefiles_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
         if "authors" in data_keys:
             data_keys.remove("authors")
             data__authors = data["authors"]
@@ -1211,49 +1338,15 @@ def validate_https___packaging_python_org_en_latest_specifications_pyproject_tom
             data_keys.remove("dynamic")
             data__dynamic = data["dynamic"]
             if not isinstance(data__dynamic, (list, tuple)):
-                raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic must be array", value=data__dynamic, name="" + (name_prefix or "data") + ".dynamic", definition={'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}, rule='type')
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic must be array", value=data__dynamic, name="" + (name_prefix or "data") + ".dynamic", definition={'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'license-files', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}, rule='type')
             data__dynamic_is_list = isinstance(data__dynamic, (list, tuple))
             if data__dynamic_is_list:
                 data__dynamic_len = len(data__dynamic)
                 for data__dynamic_x, data__dynamic_item in enumerate(data__dynamic):
-                    if data__dynamic_item not in ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']:
-                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic[{data__dynamic_x}]".format(**locals()) + " must be one of ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']", value=data__dynamic_item, name="" + (name_prefix or "data") + ".dynamic[{data__dynamic_x}]".format(**locals()) + "", definition={'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}, rule='enum')
+                    if data__dynamic_item not in ['version', 'description', 'readme', 'requires-python', 'license', 'license-files', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']:
+                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic[{data__dynamic_x}]".format(**locals()) + " must be one of ['version', 'description', 'readme', 'requires-python', 'license', 'license-files', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']", value=data__dynamic_item, name="" + (name_prefix or "data") + ".dynamic[{data__dynamic_x}]".format(**locals()) + "", definition={'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'license-files', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}, rule='enum')
         if data_keys:
-            raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/pyproject-toml/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'gui-scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, rule='additionalProperties')
-    try:
-        try:
-            data_is_dict = isinstance(data, dict)
-            if data_is_dict:
-                data__missing_keys = set(['dynamic']) - data.keys()
-                if data__missing_keys:
-                    raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain " + (str(sorted(data__missing_keys)) + " properties"), value=data, name="" + (name_prefix or "data") + "", definition={'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, rule='required')
-                data_keys = set(data.keys())
-                if "dynamic" in data_keys:
-                    data_keys.remove("dynamic")
-                    data__dynamic = data["dynamic"]
-                    data__dynamic_is_list = isinstance(data__dynamic, (list, tuple))
-                    if data__dynamic_is_list:
-                        data__dynamic_contains = False
-                        for data__dynamic_key in data__dynamic:
-                            try:
-                                if data__dynamic_key != "version":
-                                    raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic must be same as const definition: version", value=data__dynamic_key, name="" + (name_prefix or "data") + ".dynamic", definition={'const': 'version'}, rule='const')
-                                data__dynamic_contains = True
-                                break
-                            except JsonSchemaValueException: pass
-                        if not data__dynamic_contains:
-                            raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic must contain one of contains definition", value=data__dynamic, name="" + (name_prefix or "data") + ".dynamic", definition={'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}, rule='contains')
-        except JsonSchemaValueException: pass
-        else:
-            raise JsonSchemaValueException("" + (name_prefix or "data") + " must NOT match a disallowed definition", value=data, name="" + (name_prefix or "data") + "", definition={'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, rule='not')
-    except JsonSchemaValueException:
-        pass
-    else:
-        data_is_dict = isinstance(data, dict)
-        if data_is_dict:
-            data__missing_keys = set(['version']) - data.keys()
-            if data__missing_keys:
-                raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain " + (str(sorted(data__missing_keys)) + " properties"), value=data, name="" + (name_prefix or "data") + "", definition={'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, rule='required')
+            raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/pyproject-toml/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'type': 'string', 'description': 'An SPDX license identifier', 'format': 'SPDX'}, {'type': 'object', 'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'type': 'object', 'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'license-files': {'description': 'Paths or globs to paths of license files', 'type': 'array', 'items': {'type': 'string'}}, 'authors': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'gui-scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'license-files', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'allOf': [{'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}}, {'if': {'required': ['license-files']}, 'then': {'properties': {'license': {'type': 'string'}}}}], 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, rule='additionalProperties')
     return data
 
 def validate_https___packaging_python_org_en_latest_specifications_pyproject_toml___definitions_dependency(data, custom_formats={}, name_prefix=None):
diff --git a/setuptools/config/_validate_pyproject/formats.py b/setuptools/config/_validate_pyproject/formats.py
index 153b1f0b27..1cf4a465ef 100644
--- a/setuptools/config/_validate_pyproject/formats.py
+++ b/setuptools/config/_validate_pyproject/formats.py
@@ -164,12 +164,15 @@ class _TroveClassifier:
     """
 
     downloaded: typing.Union[None, "Literal[False]", typing.Set[str]]
+    """
+    None => not cached yet
+    False => unavailable
+    set => cached values
+    """
 
     def __init__(self) -> None:
         self.downloaded = None
         self._skip_download = False
-        # None => not cached yet
-        # False => cache not available
         self.__name__ = "trove_classifier"  # Emulate a public function
 
     def _disable_download(self) -> None:
@@ -351,7 +354,7 @@ def python_entrypoint_reference(value: str) -> bool:
         obj = rest
 
     module_parts = module.split(".")
-    identifiers = _chain(module_parts, obj.split(".")) if rest else module_parts
+    identifiers = _chain(module_parts, obj.split(".")) if rest else iter(module_parts)
     return all(python_identifier(i.strip()) for i in identifiers)
 
 
@@ -373,3 +376,27 @@ def uint(value: builtins.int) -> bool:
 def int(value: builtins.int) -> bool:
     r"""Signed 64-bit integer (:math:`-2^{63} \leq x < 2^{63}`)"""
     return -(2**63) <= value < 2**63
+
+
+try:
+    from packaging import licenses as _licenses
+
+    def SPDX(value: str) -> bool:
+        """See :ref:`PyPA's License-Expression specification
+        ` (added in :pep:`639`).
+        """
+        try:
+            _licenses.canonicalize_license_expression(value)
+            return True
+        except _licenses.InvalidLicenseExpression:
+            return False
+
+except ImportError:  # pragma: no cover
+    _logger.warning(
+        "Could not find an up-to-date installation of `packaging`. "
+        "License expressions might not be validated. "
+        "To enforce validation, please install `packaging>=24.2`."
+    )
+
+    def SPDX(value: str) -> bool:
+        return True
diff --git a/tox.ini b/tox.ini
index 12e156a3fa..942e2b9835 100644
--- a/tox.ini
+++ b/tox.ini
@@ -83,7 +83,7 @@ commands =
 [testenv:generate-validation-code]
 skip_install = True
 deps =
-	validate-pyproject[all]==0.19
+	validate-pyproject[all]==0.23
 commands =
 	python -m tools.generate_validation_code
 

From 3869b1098aede38982f17f70a68569718af9cb96 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 3 Feb 2025 15:16:28 +0000
Subject: [PATCH 1408/1761] Add workaround for GHA failur in 'macos-latest'
 with 3.9

---
 .github/workflows/main.yml | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index c9de26d330..ef175cb8d7 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -63,9 +63,6 @@ jobs:
         - platform: ubuntu-latest
           python: "3.10"
           distutils: stdlib
-        # TODO: Re-evaluate the need for the following workaround
-        exclude:
-        - {python: "3.9", platform: "macos-latest"}  # actions/setup-python#981
     runs-on: ${{ matrix.platform }}
     continue-on-error: ${{ matrix.python == '3.14' }}
     env:
@@ -79,6 +76,9 @@ jobs:
         with:
           python-version: ${{ matrix.python }}
           allow-prereleases: true
+        env:
+          # Workaround for actions/setup-python#981 (env var only modified for this specific step)
+          SETUPTOOLS_USE_DISTUTILS: ${{ matrix.platform == 'macos-latest' && matrix.python == '3.9' && 'stdlib' || matrix.distutils || 'local' }}
       - uses: actions/cache@v4
         id: cache
         with:

From aee344d781920bba42ddbee4b4b44af29d7bab6e Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Wed, 12 Feb 2025 10:44:24 -0500
Subject: [PATCH 1409/1761] Removing dependabot config. Closes
 jaraco/skeleton#156

---
 .github/dependabot.yml | 8 --------
 1 file changed, 8 deletions(-)
 delete mode 100644 .github/dependabot.yml

diff --git a/.github/dependabot.yml b/.github/dependabot.yml
deleted file mode 100644
index 89ff33961b..0000000000
--- a/.github/dependabot.yml
+++ /dev/null
@@ -1,8 +0,0 @@
-version: 2
-updates:
-  - package-ecosystem: "pip"
-    directory: "/"
-    schedule:
-      interval: "daily"
-    allow:
-      - dependency-type: "all"

From fe63c2fd7c4dd91954cac0f59338d5e37816a98b Mon Sep 17 00:00:00 2001
From: Marc Mueller <30130371+cdce8p@users.noreply.github.com>
Date: Sun, 16 Feb 2025 20:54:15 +0100
Subject: [PATCH 1410/1761] Adjust test example

---
 setuptools/tests/test_bdist_wheel.py | 16 ++++++++--------
 1 file changed, 8 insertions(+), 8 deletions(-)

diff --git a/setuptools/tests/test_bdist_wheel.py b/setuptools/tests/test_bdist_wheel.py
index 0f2e6ce136..0cc1ddfd99 100644
--- a/setuptools/tests/test_bdist_wheel.py
+++ b/setuptools/tests/test_bdist_wheel.py
@@ -59,7 +59,7 @@
 EXAMPLES = {
     "dummy-dist": {
         "setup.py": SETUPPY_EXAMPLE,
-        "licenses": {"DUMMYFILE": ""},
+        "licenses_dir": {"DUMMYFILE": ""},
         **dict.fromkeys(DEFAULT_LICENSE_FILES | OTHER_IGNORED_FILES, ""),
     },
     "simple-dist": {
@@ -324,26 +324,26 @@ def test_licenses_default(dummy_dist, monkeypatch, tmp_path):
 
 def test_licenses_deprecated(dummy_dist, monkeypatch, tmp_path):
     dummy_dist.joinpath("setup.cfg").write_text(
-        "[metadata]\nlicense_file=licenses/DUMMYFILE", encoding="utf-8"
+        "[metadata]\nlicense_file=licenses_dir/DUMMYFILE", encoding="utf-8"
     )
     monkeypatch.chdir(dummy_dist)
 
     bdist_wheel_cmd(bdist_dir=str(tmp_path)).run()
 
     with ZipFile("dist/dummy_dist-1.0-py3-none-any.whl") as wf:
-        license_files = {"dummy_dist-1.0.dist-info/licenses/licenses/DUMMYFILE"}
+        license_files = {"dummy_dist-1.0.dist-info/licenses/licenses_dir/DUMMYFILE"}
         assert set(wf.namelist()) == DEFAULT_FILES | license_files
 
 
 @pytest.mark.parametrize(
     ("config_file", "config"),
     [
-        ("setup.cfg", "[metadata]\nlicense_files=licenses/*\n  LICENSE"),
-        ("setup.cfg", "[metadata]\nlicense_files=licenses/*, LICENSE"),
+        ("setup.cfg", "[metadata]\nlicense_files=licenses_dir/*\n  LICENSE"),
+        ("setup.cfg", "[metadata]\nlicense_files=licenses_dir/*, LICENSE"),
         (
             "setup.py",
             SETUPPY_EXAMPLE.replace(
-                ")", "  license_files=['licenses/DUMMYFILE', 'LICENSE'])"
+                ")", "  license_files=['licenses_dir/DUMMYFILE', 'LICENSE'])"
             ),
         ),
     ],
@@ -355,11 +355,11 @@ def test_licenses_override(dummy_dist, monkeypatch, tmp_path, config_file, confi
     with ZipFile("dist/dummy_dist-1.0-py3-none-any.whl") as wf:
         license_files = {
             "dummy_dist-1.0.dist-info/licenses/" + fname
-            for fname in {"licenses/DUMMYFILE", "LICENSE"}
+            for fname in {"licenses_dir/DUMMYFILE", "LICENSE"}
         }
         assert set(wf.namelist()) == DEFAULT_FILES | license_files
         metadata = wf.read("dummy_dist-1.0.dist-info/METADATA").decode("utf8")
-        assert "License-File: licenses/DUMMYFILE" in metadata
+        assert "License-File: licenses_dir/DUMMYFILE" in metadata
         assert "License-File: LICENSE" in metadata
 
 

From 3e9b9c7fbe4b753bbc80ee8be01c9a8a9233970d Mon Sep 17 00:00:00 2001
From: Marc Mueller <30130371+cdce8p@users.noreply.github.com>
Date: Mon, 17 Feb 2025 13:02:59 +0100
Subject: [PATCH 1411/1761] Use os.sep for replace

---
 setuptools/dist.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setuptools/dist.py b/setuptools/dist.py
index 962da7c34b..c6a3468123 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -419,7 +419,7 @@ def _finalize_license_files(self) -> None:
 
         self.metadata.license_files = list(
             map(
-                lambda path: path.replace("\\", "/"),
+                lambda path: path.replace(os.sep, "/"),
                 unique_everseen(self._expand_patterns(patterns)),
             )
         )

From 285d681810d38c5745283e1c5385b6eb0345dece Mon Sep 17 00:00:00 2001
From: Marc Mueller <30130371+cdce8p@users.noreply.github.com>
Date: Mon, 28 Oct 2024 06:01:58 +0100
Subject: [PATCH 1412/1761] Add initial support for License-Expression (PEP
 639)

---
 docs/userguide/pyproject_config.rst           |  2 +-
 newsfragments/4706.feature.rst                |  1 +
 pyproject.toml                                |  2 +-
 setuptools/_core_metadata.py                  | 10 ++-
 setuptools/config/_apply_pyprojecttoml.py     | 17 ++---
 setuptools/dist.py                            |  1 +
 .../tests/config/test_apply_pyprojecttoml.py  | 63 +++++++++++++++++++
 7 files changed, 84 insertions(+), 12 deletions(-)
 create mode 100644 newsfragments/4706.feature.rst

diff --git a/docs/userguide/pyproject_config.rst b/docs/userguide/pyproject_config.rst
index e988fec7ac..efc68603a9 100644
--- a/docs/userguide/pyproject_config.rst
+++ b/docs/userguide/pyproject_config.rst
@@ -49,7 +49,7 @@ The ``project`` table contains metadata fields as described by the
    readme = "README.rst"
    requires-python = ">=3.8"
    keywords = ["one", "two"]
-   license = {text = "BSD-3-Clause"}
+   license = "BSD-3-Clause"
    classifiers = [
        "Framework :: Django",
        "Programming Language :: Python :: 3",
diff --git a/newsfragments/4706.feature.rst b/newsfragments/4706.feature.rst
new file mode 100644
index 0000000000..38b09276c0
--- /dev/null
+++ b/newsfragments/4706.feature.rst
@@ -0,0 +1 @@
+Added initial support for ``License-Expression`` (`PEP 639 `_). -- by :user:`cdce8p`
diff --git a/pyproject.toml b/pyproject.toml
index a9febdbe8c..bd22d79294 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -11,10 +11,10 @@ authors = [
 ]
 description = "Easily download, build, install, upgrade, and uninstall Python packages"
 readme = "README.rst"
+license = "MIT"
 classifiers = [
 	"Development Status :: 5 - Production/Stable",
 	"Intended Audience :: Developers",
-	"License :: OSI Approved :: MIT License",
 	"Programming Language :: Python :: 3",
 	"Programming Language :: Python :: 3 :: Only",
 	"Topic :: Software Development :: Libraries :: Python Modules",
diff --git a/setuptools/_core_metadata.py b/setuptools/_core_metadata.py
index 850cc409f7..a407f6915b 100644
--- a/setuptools/_core_metadata.py
+++ b/setuptools/_core_metadata.py
@@ -88,6 +88,7 @@ def read_pkg_file(self, file):
     self.url = _read_field_from_msg(msg, 'home-page')
     self.download_url = _read_field_from_msg(msg, 'download-url')
     self.license = _read_field_unescaped_from_msg(msg, 'license')
+    self.license_expression = _read_field_unescaped_from_msg(msg, 'license_expression')
 
     self.long_description = _read_field_unescaped_from_msg(msg, 'description')
     if self.long_description is None and self.metadata_version >= Version('2.1'):
@@ -175,9 +176,12 @@ def write_field(key, value):
         if attr_val is not None:
             write_field(field, attr_val)
 
-    license = self.get_license()
-    if license:
-        write_field('License', rfc822_escape(license))
+    if self.license_expression:
+        write_field('License-Expression', rfc822_escape(self.license_expression))
+    else:
+        license = self.get_license()
+        if license:
+            write_field('License', rfc822_escape(license))
 
     for label, url in self.project_urls.items():
         write_field('Project-URL', f'{label}, {url}')
diff --git a/setuptools/config/_apply_pyprojecttoml.py b/setuptools/config/_apply_pyprojecttoml.py
index 331596bdd7..6664c6158a 100644
--- a/setuptools/config/_apply_pyprojecttoml.py
+++ b/setuptools/config/_apply_pyprojecttoml.py
@@ -181,16 +181,19 @@ def _long_description(
         dist._referenced_files.add(file)
 
 
-def _license(dist: Distribution, val: dict, root_dir: StrPath | None):
+def _license(dist: Distribution, val: str | dict, root_dir: StrPath | None):
     from setuptools.config import expand
 
-    if "file" in val:
-        # XXX: Is it completely safe to assume static?
-        value = expand.read_files([val["file"]], root_dir)
-        _set_config(dist, "license", _static.Str(value))
-        dist._referenced_files.add(val["file"])
+    if isinstance(val, str):
+        _set_config(dist, "license_expression", _static.Str(val))
     else:
-        _set_config(dist, "license", _static.Str(val["text"]))
+        if "file" in val:
+            # XXX: Is it completely safe to assume static?
+            value = expand.read_files([val["file"]], root_dir)
+            _set_config(dist, "license", _static.Str(value))
+            dist._referenced_files.add(val["file"])
+        else:
+            _set_config(dist, "license", _static.Str(val["text"]))
 
 
 def _people(dist: Distribution, val: list[dict], _root_dir: StrPath | None, kind: str):
diff --git a/setuptools/dist.py b/setuptools/dist.py
index c6a3468123..55175f5e57 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -288,6 +288,7 @@ class Distribution(_Distribution):
         'long_description_content_type': lambda: None,
         'project_urls': dict,
         'provides_extras': dict,  # behaves like an ordered set
+        'license_expression': lambda: None,
         'license_file': lambda: None,
         'license_files': lambda: None,
         'install_requires': list,
diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py
index 20146b4a89..a528a33f0d 100644
--- a/setuptools/tests/config/test_apply_pyprojecttoml.py
+++ b/setuptools/tests/config/test_apply_pyprojecttoml.py
@@ -156,6 +156,28 @@ def main_gui(): pass
 def main_tomatoes(): pass
 """
 
+PEP639_LICENSE_TEXT = """\
+[project]
+name = "spam"
+version = "2020.0.0"
+authors = [
+  {email = "hi@pradyunsg.me"},
+  {name = "Tzu-Ping Chung"}
+]
+license = {text = "MIT"}
+"""
+
+PEP639_LICENSE_EXPRESSION = """\
+[project]
+name = "spam"
+version = "2020.0.0"
+authors = [
+  {email = "hi@pradyunsg.me"},
+  {name = "Tzu-Ping Chung"}
+]
+license = "MIT"
+"""
+
 
 def _pep621_example_project(
     tmp_path,
@@ -251,6 +273,47 @@ def test_utf8_maintainer_in_metadata(  # issue-3663
     assert f"Maintainer-email: {expected_maintainers_meta_value}" in content
 
 
+@pytest.mark.parametrize(
+    ('pyproject_text', 'license', 'license_expression', 'content_str'),
+    (
+        pytest.param(
+            PEP639_LICENSE_TEXT,
+            'MIT',
+            None,
+            'License: MIT',
+            id='license-text',
+        ),
+        pytest.param(
+            PEP639_LICENSE_EXPRESSION,
+            None,
+            'MIT',
+            'License-Expression: MIT',
+            id='license-expression',
+        ),
+    ),
+)
+def test_license_in_metadata(
+    license,
+    license_expression,
+    content_str,
+    pyproject_text,
+    tmp_path,
+):
+    pyproject = _pep621_example_project(
+        tmp_path,
+        "README",
+        pyproject_text=pyproject_text,
+    )
+    dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
+    assert dist.metadata.license == license
+    assert dist.metadata.license_expression == license_expression
+    pkg_file = tmp_path / "PKG-FILE"
+    with open(pkg_file, "w", encoding="utf-8") as fh:
+        dist.metadata.write_pkg_file(fh)
+    content = pkg_file.read_text(encoding="utf-8")
+    assert content_str in content
+
+
 class TestLicenseFiles:
     # TODO: After PEP 639 is accepted, we have to move the license-files
     #       to the `project` table instead of `tool.setuptools`

From 420766cdd02008b80d019368c37880b8565b5a7c Mon Sep 17 00:00:00 2001
From: Marc Mueller <30130371+cdce8p@users.noreply.github.com>
Date: Mon, 25 Nov 2024 01:05:56 +0100
Subject: [PATCH 1413/1761] Additional test case

---
 .../tests/config/test_apply_pyprojecttoml.py  | 33 +++++++++++++++++--
 1 file changed, 30 insertions(+), 3 deletions(-)

diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py
index a528a33f0d..03f950ecd1 100644
--- a/setuptools/tests/config/test_apply_pyprojecttoml.py
+++ b/setuptools/tests/config/test_apply_pyprojecttoml.py
@@ -315,9 +315,6 @@ def test_license_in_metadata(
 
 
 class TestLicenseFiles:
-    # TODO: After PEP 639 is accepted, we have to move the license-files
-    #       to the `project` table instead of `tool.setuptools`
-
     def base_pyproject(self, tmp_path, additional_text):
         pyproject = _pep621_example_project(tmp_path, "README")
         text = pyproject.read_text(encoding="utf-8")
@@ -330,6 +327,24 @@ def base_pyproject(self, tmp_path, additional_text):
         pyproject.write_text(text, encoding="utf-8")
         return pyproject
 
+    def base_pyproject_license_pep639(self, tmp_path):
+        pyproject = _pep621_example_project(tmp_path, "README")
+        text = pyproject.read_text(encoding="utf-8")
+
+        # Sanity-check
+        assert 'license = {file = "LICENSE.txt"}' in text
+        assert 'license-files' not in text
+        assert "[tool.setuptools]" not in text
+
+        text = re.sub(
+            r"(license = {file = \"LICENSE.txt\"})\n",
+            ("license = \"licenseref-Proprietary\"\nlicense-files = [\"_FILE*\"]\n"),
+            text,
+            count=1,
+        )
+        pyproject.write_text(text, encoding="utf-8")
+        return pyproject
+
     def test_both_license_and_license_files_defined(self, tmp_path):
         setuptools_config = '[tool.setuptools]\nlicense-files = ["_FILE*"]'
         pyproject = self.base_pyproject(tmp_path, setuptools_config)
@@ -346,6 +361,18 @@ def test_both_license_and_license_files_defined(self, tmp_path):
         assert set(dist.metadata.license_files) == {"_FILE.rst", "_FILE.txt"}
         assert dist.metadata.license == "LicenseRef-Proprietary\n"
 
+    def test_both_license_and_license_files_defined_pep639(self, tmp_path):
+        # Set license and license-files
+        pyproject = self.base_pyproject_license_pep639(tmp_path)
+
+        (tmp_path / "_FILE.txt").touch()
+        (tmp_path / "_FILE.rst").touch()
+
+        dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
+        assert set(dist.metadata.license_files) == {"_FILE.rst", "_FILE.txt"}
+        assert dist.metadata.license is None
+        assert dist.metadata.license_expression == "LicenseRef-Proprietary"
+
     def test_default_patterns(self, tmp_path):
         setuptools_config = '[tool.setuptools]\nzip-safe = false'
         # ^ used just to trigger section validation

From 346bf17e0cc8fc6e8b0ea3e6dafa3af91009da6d Mon Sep 17 00:00:00 2001
From: Marc Mueller <30130371+cdce8p@users.noreply.github.com>
Date: Mon, 25 Nov 2024 01:05:13 +0100
Subject: [PATCH 1414/1761] Normalize license expression

---
 setuptools/config/_apply_pyprojecttoml.py     |  1 +
 setuptools/dist.py                            | 20 ++++++++++++++++
 .../tests/config/test_apply_pyprojecttoml.py  | 23 +++++++++++++++----
 3 files changed, 40 insertions(+), 4 deletions(-)

diff --git a/setuptools/config/_apply_pyprojecttoml.py b/setuptools/config/_apply_pyprojecttoml.py
index 6664c6158a..6bb2bea514 100644
--- a/setuptools/config/_apply_pyprojecttoml.py
+++ b/setuptools/config/_apply_pyprojecttoml.py
@@ -58,6 +58,7 @@ def apply(dist: Distribution, config: dict, filename: StrPath) -> Distribution:
     os.chdir(root_dir)
     try:
         dist._finalize_requires()
+        dist._finalize_license_expression()
         dist._finalize_license_files()
     finally:
         os.chdir(current_directory)
diff --git a/setuptools/dist.py b/setuptools/dist.py
index 55175f5e57..7a13ea6a04 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -12,6 +12,7 @@
 from typing import TYPE_CHECKING, Any, Union
 
 from more_itertools import partition, unique_everseen
+from packaging.licenses import canonicalize_license_expression
 from packaging.markers import InvalidMarker, Marker
 from packaging.specifiers import InvalidSpecifier, SpecifierSet
 from packaging.version import Version
@@ -27,6 +28,7 @@
 from ._reqs import _StrOrIter
 from .config import pyprojecttoml, setupcfg
 from .discovery import ConfigDiscovery
+from .errors import InvalidConfigError
 from .monkey import get_unpatched
 from .warnings import InformationOnly, SetuptoolsDeprecationWarning
 
@@ -403,6 +405,23 @@ def _normalize_requires(self):
             (k, list(map(str, _reqs.parse(v or [])))) for k, v in extras_require.items()
         )
 
+    def _finalize_license_expression(self) -> None:
+        """Normalize license and license_expression."""
+        license_expr = self.metadata.license_expression
+        if license_expr:
+            normalized = canonicalize_license_expression(license_expr)
+            if license_expr != normalized:
+                InformationOnly.emit(f"Normalizing '{license_expr}' to '{normalized}'")
+                self.metadata.license_expression = normalized
+
+            for cl in self.metadata.get_classifiers():
+                if not cl.startswith("License :: "):
+                    continue
+                raise InvalidConfigError(
+                    "License classifier are deprecated in favor of the license expression. "
+                    f"Remove the '{cl}' classifier."
+                )
+
     def _finalize_license_files(self) -> None:
         """Compute names of all license files which should be included."""
         license_files: list[str] | None = self.metadata.license_files
@@ -656,6 +675,7 @@ def parse_config_files(
             pyprojecttoml.apply_configuration(self, filename, ignore_option_errors)
 
         self._finalize_requires()
+        self._finalize_license_expression()
         self._finalize_license_files()
 
     def fetch_build_eggs(
diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py
index 03f950ecd1..91883b4618 100644
--- a/setuptools/tests/config/test_apply_pyprojecttoml.py
+++ b/setuptools/tests/config/test_apply_pyprojecttoml.py
@@ -23,7 +23,7 @@
 from setuptools.config import expand, pyprojecttoml, setupcfg
 from setuptools.config._apply_pyprojecttoml import _MissingDynamic, _some_attrgetter
 from setuptools.dist import Distribution
-from setuptools.errors import RemovedConfigError
+from setuptools.errors import InvalidConfigError, RemovedConfigError
 
 from .downloads import retrieve_file, urls_from_file
 
@@ -175,7 +175,10 @@ def main_tomatoes(): pass
   {email = "hi@pradyunsg.me"},
   {name = "Tzu-Ping Chung"}
 ]
-license = "MIT"
+license = "mit or apache-2.0"  # should be normalized in metadata
+classifiers = [
+    "Development Status :: 5 - Production/Stable",
+]
 """
 
 
@@ -286,8 +289,8 @@ def test_utf8_maintainer_in_metadata(  # issue-3663
         pytest.param(
             PEP639_LICENSE_EXPRESSION,
             None,
-            'MIT',
-            'License-Expression: MIT',
+            'MIT OR Apache-2.0',
+            'License-Expression: MIT OR Apache-2.0',
             id='license-expression',
         ),
     ),
@@ -314,6 +317,18 @@ def test_license_in_metadata(
     assert content_str in content
 
 
+def test_license_expression_with_bad_classifier(tmp_path):
+    text = PEP639_LICENSE_EXPRESSION.rsplit("\n", 2)[0]
+    pyproject = _pep621_example_project(
+        tmp_path,
+        "README",
+        f"{text}\n    \"License :: OSI Approved :: MIT License\"\n]",
+    )
+    msg = "License classifier are deprecated.*'License :: OSI Approved :: MIT License'"
+    with pytest.raises(InvalidConfigError, match=msg):
+        pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
+
+
 class TestLicenseFiles:
     def base_pyproject(self, tmp_path, additional_text):
         pyproject = _pep621_example_project(tmp_path, "README")

From 31d83409f26018b79ee5459445ce7e9b87752e97 Mon Sep 17 00:00:00 2001
From: Marc Mueller <30130371+cdce8p@users.noreply.github.com>
Date: Mon, 25 Nov 2024 01:07:59 +0100
Subject: [PATCH 1415/1761] Remove License-Expression field

---
 newsfragments/4706.feature.rst                      | 2 +-
 setuptools/_core_metadata.py                        | 9 +++------
 setuptools/tests/config/test_apply_pyprojecttoml.py | 2 +-
 3 files changed, 5 insertions(+), 8 deletions(-)

diff --git a/newsfragments/4706.feature.rst b/newsfragments/4706.feature.rst
index 38b09276c0..1d34f5f476 100644
--- a/newsfragments/4706.feature.rst
+++ b/newsfragments/4706.feature.rst
@@ -1 +1 @@
-Added initial support for ``License-Expression`` (`PEP 639 `_). -- by :user:`cdce8p`
+Added initial support for license expression (`PEP 639 `_). -- by :user:`cdce8p`
diff --git a/setuptools/_core_metadata.py b/setuptools/_core_metadata.py
index a407f6915b..25937f913c 100644
--- a/setuptools/_core_metadata.py
+++ b/setuptools/_core_metadata.py
@@ -176,12 +176,9 @@ def write_field(key, value):
         if attr_val is not None:
             write_field(field, attr_val)
 
-    if self.license_expression:
-        write_field('License-Expression', rfc822_escape(self.license_expression))
-    else:
-        license = self.get_license()
-        if license:
-            write_field('License', rfc822_escape(license))
+    license = self.license_expression or self.get_license()
+    if license:
+        write_field('License', rfc822_escape(license))
 
     for label, url in self.project_urls.items():
         write_field('Project-URL', f'{label}, {url}')
diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py
index 91883b4618..c9ab1f5718 100644
--- a/setuptools/tests/config/test_apply_pyprojecttoml.py
+++ b/setuptools/tests/config/test_apply_pyprojecttoml.py
@@ -290,7 +290,7 @@ def test_utf8_maintainer_in_metadata(  # issue-3663
             PEP639_LICENSE_EXPRESSION,
             None,
             'MIT OR Apache-2.0',
-            'License-Expression: MIT OR Apache-2.0',
+            'License: MIT OR Apache-2.0',  # TODO Metadata version '2.4'
             id='license-expression',
         ),
     ),

From 3744994a6b53aeaa8e0c7ed92df6bf86d01df8a7 Mon Sep 17 00:00:00 2001
From: Marc Mueller <30130371+cdce8p@users.noreply.github.com>
Date: Sun, 16 Feb 2025 21:48:04 +0100
Subject: [PATCH 1416/1761] Review

---
 pyproject.toml               | 2 +-
 setuptools/_core_metadata.py | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/pyproject.toml b/pyproject.toml
index bd22d79294..a9febdbe8c 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -11,10 +11,10 @@ authors = [
 ]
 description = "Easily download, build, install, upgrade, and uninstall Python packages"
 readme = "README.rst"
-license = "MIT"
 classifiers = [
 	"Development Status :: 5 - Production/Stable",
 	"Intended Audience :: Developers",
+	"License :: OSI Approved :: MIT License",
 	"Programming Language :: Python :: 3",
 	"Programming Language :: Python :: 3 :: Only",
 	"Topic :: Software Development :: Libraries :: Python Modules",
diff --git a/setuptools/_core_metadata.py b/setuptools/_core_metadata.py
index 25937f913c..5342186c0e 100644
--- a/setuptools/_core_metadata.py
+++ b/setuptools/_core_metadata.py
@@ -88,7 +88,7 @@ def read_pkg_file(self, file):
     self.url = _read_field_from_msg(msg, 'home-page')
     self.download_url = _read_field_from_msg(msg, 'download-url')
     self.license = _read_field_unescaped_from_msg(msg, 'license')
-    self.license_expression = _read_field_unescaped_from_msg(msg, 'license_expression')
+    self.license_expression = _read_field_unescaped_from_msg(msg, 'license-expression')
 
     self.long_description = _read_field_unescaped_from_msg(msg, 'description')
     if self.long_description is None and self.metadata_version >= Version('2.1'):

From 0d8f1f2d12470efe3830e05f70996eb177287e26 Mon Sep 17 00:00:00 2001
From: Marc Mueller <30130371+cdce8p@users.noreply.github.com>
Date: Mon, 17 Feb 2025 14:09:35 +0100
Subject: [PATCH 1417/1761] Replace error with warning and remove license
 classifier

---
 setuptools/dist.py                             | 15 +++++++++++----
 .../tests/config/test_apply_pyprojecttoml.py   | 18 +++++++++++++++---
 2 files changed, 26 insertions(+), 7 deletions(-)

diff --git a/setuptools/dist.py b/setuptools/dist.py
index 7a13ea6a04..3cbe0fdc11 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -28,7 +28,6 @@
 from ._reqs import _StrOrIter
 from .config import pyprojecttoml, setupcfg
 from .discovery import ConfigDiscovery
-from .errors import InvalidConfigError
 from .monkey import get_unpatched
 from .warnings import InformationOnly, SetuptoolsDeprecationWarning
 
@@ -414,13 +413,21 @@ def _finalize_license_expression(self) -> None:
                 InformationOnly.emit(f"Normalizing '{license_expr}' to '{normalized}'")
                 self.metadata.license_expression = normalized
 
+            classifiers = []
+            license_classifiers_found = False
             for cl in self.metadata.get_classifiers():
                 if not cl.startswith("License :: "):
+                    classifiers.append(cl)
                     continue
-                raise InvalidConfigError(
-                    "License classifier are deprecated in favor of the license expression. "
-                    f"Remove the '{cl}' classifier."
+                license_classifiers_found = True
+                SetuptoolsDeprecationWarning.emit(
+                    "License classifier are deprecated in favor of the license expression.",
+                    f"Please remove the '{cl}' classifier.",
+                    see_url="https://peps.python.org/pep-0639/",
+                    due_date=(2027, 2, 17),  # Introduced 2025-02-17
                 )
+            if license_classifiers_found:
+                self.metadata.set_classifiers(classifiers)
 
     def _finalize_license_files(self) -> None:
         """Compute names of all license files which should be included."""
diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py
index c9ab1f5718..089b6ae30e 100644
--- a/setuptools/tests/config/test_apply_pyprojecttoml.py
+++ b/setuptools/tests/config/test_apply_pyprojecttoml.py
@@ -9,6 +9,7 @@
 import io
 import re
 import tarfile
+import warnings
 from inspect import cleandoc
 from pathlib import Path
 from unittest.mock import Mock
@@ -23,7 +24,8 @@
 from setuptools.config import expand, pyprojecttoml, setupcfg
 from setuptools.config._apply_pyprojecttoml import _MissingDynamic, _some_attrgetter
 from setuptools.dist import Distribution
-from setuptools.errors import InvalidConfigError, RemovedConfigError
+from setuptools.errors import RemovedConfigError
+from setuptools.warnings import SetuptoolsDeprecationWarning
 
 from .downloads import retrieve_file, urls_from_file
 
@@ -178,6 +180,7 @@ def main_tomatoes(): pass
 license = "mit or apache-2.0"  # should be normalized in metadata
 classifiers = [
     "Development Status :: 5 - Production/Stable",
+    "Programming Language :: Python",
 ]
 """
 
@@ -324,10 +327,19 @@ def test_license_expression_with_bad_classifier(tmp_path):
         "README",
         f"{text}\n    \"License :: OSI Approved :: MIT License\"\n]",
     )
-    msg = "License classifier are deprecated.*'License :: OSI Approved :: MIT License'"
-    with pytest.raises(InvalidConfigError, match=msg):
+    msg = "License classifier are deprecated(?:.|\n)*'License :: OSI Approved :: MIT License'"
+    with pytest.raises(SetuptoolsDeprecationWarning, match=msg):
         pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
 
+    with warnings.catch_warnings():
+        warnings.simplefilter("ignore", SetuptoolsDeprecationWarning)
+        dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
+        # Check 'License :: OSI Approved :: MIT License' is removed
+        assert dist.metadata.get_classifiers() == [
+            "Development Status :: 5 - Production/Stable",
+            "Programming Language :: Python",
+        ]
+
 
 class TestLicenseFiles:
     def base_pyproject(self, tmp_path, additional_text):

From c02bb2fa43cb034c610e67a36fa28a06dcbd7004 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 17 Feb 2025 13:39:10 +0000
Subject: [PATCH 1418/1761] Export `PyInit_pkg` for `pkg.__init__` instead of
 `PyInit___init__`

---
 distutils/command/build_ext.py    | 11 ++++++++++-
 distutils/tests/test_build_ext.py | 13 +++++++++++++
 2 files changed, 23 insertions(+), 1 deletion(-)

diff --git a/distutils/command/build_ext.py b/distutils/command/build_ext.py
index cf60bd0ad8..3ec3663dcc 100644
--- a/distutils/command/build_ext.py
+++ b/distutils/command/build_ext.py
@@ -729,7 +729,7 @@ def get_export_symbols(self, ext):
         provided, "PyInit_" + module_name.  Only relevant on Windows, where
         the .pyd file (DLL) must export the module "PyInit_" function.
         """
-        name = ext.name.split('.')[-1]
+        name = self._get_module_name_for_symbol(ext)
         try:
             # Unicode module name support as defined in PEP-489
             # https://peps.python.org/pep-0489/#export-hook-name
@@ -744,6 +744,15 @@ def get_export_symbols(self, ext):
             ext.export_symbols.append(initfunc_name)
         return ext.export_symbols
 
+    def _get_module_name_for_symbol(self, ext):
+        # Package name should be used for `__init__` modules
+        # https://github.com/python/cpython/issues/80074
+        # https://github.com/pypa/setuptools/issues/4826
+        parts = ext.name.split(".")
+        if parts[-1] == "__init__" and len(parts) >= 2:
+            return parts[-2]
+        return parts[-1]
+
     def get_libraries(self, ext):  # noqa: C901
         """Return the list of libraries to link against when building a
         shared extension.  On most platforms, this is just 'ext.libraries';
diff --git a/distutils/tests/test_build_ext.py b/distutils/tests/test_build_ext.py
index beeba4850c..4274890a6f 100644
--- a/distutils/tests/test_build_ext.py
+++ b/distutils/tests/test_build_ext.py
@@ -345,6 +345,19 @@ def test_unicode_module_names(self):
         assert cmd.get_export_symbols(modules[0]) == ['PyInit_foo']
         assert cmd.get_export_symbols(modules[1]) == ['PyInitU_f_1gaa']
 
+    def test_export_symbols__init__(self):
+        # https://github.com/python/cpython/issues/80074
+        # https://github.com/pypa/setuptools/issues/4826
+        modules = [
+            Extension('foo.__init__', ['aaa']),
+            Extension('föö.__init__', ['uuu']),
+        ]
+        dist = Distribution({'name': 'xx', 'ext_modules': modules})
+        cmd = self.build_ext(dist)
+        cmd.ensure_finalized()
+        assert cmd.get_export_symbols(modules[0]) == ['PyInit_foo']
+        assert cmd.get_export_symbols(modules[1]) == ['PyInitU_f_1gaa']
+
     def test_compiler_option(self):
         # cmd.compiler is an option and
         # should not be overridden by a compiler instance

From 6f809a0741386fad5146d63c656049d1473cc5c7 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 17 Feb 2025 14:09:45 +0000
Subject: [PATCH 1419/1761] Attempt to solve problems with cygwin in the CI

---
 .github/workflows/main.yml | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index ef175cb8d7..7dfb5a507b 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -196,8 +196,8 @@ jobs:
       - name: Run tests
         shell: C:\cygwin\bin\env.exe CYGWIN_NOWINPATH=1 CHERE_INVOKING=1 C:\cygwin\bin\bash.exe -leo pipefail -o igncr {0}
         run: |
-          git config --global --add safe.directory "$(cygpath -u "$GITHUB_WORKSPACE")" # workaround for #3408
-          tox
+          git config --global --add safe.directory "$(cygpath -u "${{ github.workspace }}")" # workaround for #3408
+          python -m tox
       - name: Create coverage report
         if: hashFiles('.coverage') != ''  # Rudimentary `file.exists()`
         run: |

From 94a84c2f90f2c36b9f8ec3631ed7cba80b6fad7c Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 17 Feb 2025 14:19:22 +0000
Subject: [PATCH 1420/1761] Add command for debugging purposes

---
 .github/workflows/main.yml | 1 +
 1 file changed, 1 insertion(+)

diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index 7dfb5a507b..f16d866f90 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -197,6 +197,7 @@ jobs:
         shell: C:\cygwin\bin\env.exe CYGWIN_NOWINPATH=1 CHERE_INVOKING=1 C:\cygwin\bin\bash.exe -leo pipefail -o igncr {0}
         run: |
           git config --global --add safe.directory "$(cygpath -u "${{ github.workspace }}")" # workaround for #3408
+          echo "$(cygpath -u "${{ github.workspace }}")" # for debugging purposes
           python -m tox
       - name: Create coverage report
         if: hashFiles('.coverage') != ''  # Rudimentary `file.exists()`

From 32332e4728f46e9e37b1c775bbcc6c4ade282ed0 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 17 Feb 2025 14:27:17 +0000
Subject: [PATCH 1421/1761] Attempt to install missing dependencies

---
 .github/workflows/main.yml | 6 ++++++
 1 file changed, 6 insertions(+)

diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index f16d866f90..16eefcdc93 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -182,6 +182,7 @@ jobs:
           packages: >-
             python${{ matrix.python }},
             python${{ matrix.python }}-devel,
+            python${{ matrix.python }}-pip,
             python${{ matrix.python }}-tox,
             gcc-core,
             git,
@@ -193,6 +194,11 @@ jobs:
         run: |
           python -c 'import platform; print("python-version=" + platform.python_version())' >> ${GITHUB_OUTPUT}
         shell: C:\cygwin\bin\env.exe CYGWIN_NOWINPATH=1 CHERE_INVOKING=1 C:\cygwin\bin\bash.exe -leo pipefail -o igncr {0}
+      - name: Install missing dependencies
+        shell: C:\cygwin\bin\env.exe CYGWIN_NOWINPATH=1 CHERE_INVOKING=1 C:\cygwin\bin\bash.exe -leo pipefail -o igncr {0}
+        run: |
+          # Workaround for https://github.com/pypa/setuptools/issues/4831
+          python -m pip check | sed -rn 's/.*requires ([^,]+),.*/\1/p' | xargs python -m pip install
       - name: Run tests
         shell: C:\cygwin\bin\env.exe CYGWIN_NOWINPATH=1 CHERE_INVOKING=1 C:\cygwin\bin\bash.exe -leo pipefail -o igncr {0}
         run: |

From 9d6ade8c85fa799700bdcdf4572cbc2755a1732e Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 17 Feb 2025 14:47:37 +0000
Subject: [PATCH 1422/1761] Try to install tox using 'pip' on cygwin

---
 .github/workflows/main.yml | 5 ++---
 1 file changed, 2 insertions(+), 3 deletions(-)

diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index 16eefcdc93..b77ba8f70c 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -183,7 +183,6 @@ jobs:
             python${{ matrix.python }},
             python${{ matrix.python }}-devel,
             python${{ matrix.python }}-pip,
-            python${{ matrix.python }}-tox,
             gcc-core,
             git,
       - name: Record the currently selected Python version
@@ -194,11 +193,11 @@ jobs:
         run: |
           python -c 'import platform; print("python-version=" + platform.python_version())' >> ${GITHUB_OUTPUT}
         shell: C:\cygwin\bin\env.exe CYGWIN_NOWINPATH=1 CHERE_INVOKING=1 C:\cygwin\bin\bash.exe -leo pipefail -o igncr {0}
-      - name: Install missing dependencies
+      - name: Install tox using pip
         shell: C:\cygwin\bin\env.exe CYGWIN_NOWINPATH=1 CHERE_INVOKING=1 C:\cygwin\bin\bash.exe -leo pipefail -o igncr {0}
         run: |
           # Workaround for https://github.com/pypa/setuptools/issues/4831
-          python -m pip check | sed -rn 's/.*requires ([^,]+),.*/\1/p' | xargs python -m pip install
+          python -m pip install tox
       - name: Run tests
         shell: C:\cygwin\bin\env.exe CYGWIN_NOWINPATH=1 CHERE_INVOKING=1 C:\cygwin\bin\bash.exe -leo pipefail -o igncr {0}
         run: |

From 28baa9b6d0ca7f5f316724881bebc9fe156fd1fc Mon Sep 17 00:00:00 2001
From: Marc Mueller <30130371+cdce8p@users.noreply.github.com>
Date: Mon, 17 Feb 2025 16:42:34 +0100
Subject: [PATCH 1423/1761] Revert removing the license classifier

---
 setuptools/dist.py                                  | 6 ------
 setuptools/tests/config/test_apply_pyprojecttoml.py | 3 ++-
 2 files changed, 2 insertions(+), 7 deletions(-)

diff --git a/setuptools/dist.py b/setuptools/dist.py
index 3cbe0fdc11..27e8095709 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -413,21 +413,15 @@ def _finalize_license_expression(self) -> None:
                 InformationOnly.emit(f"Normalizing '{license_expr}' to '{normalized}'")
                 self.metadata.license_expression = normalized
 
-            classifiers = []
-            license_classifiers_found = False
             for cl in self.metadata.get_classifiers():
                 if not cl.startswith("License :: "):
-                    classifiers.append(cl)
                     continue
-                license_classifiers_found = True
                 SetuptoolsDeprecationWarning.emit(
                     "License classifier are deprecated in favor of the license expression.",
                     f"Please remove the '{cl}' classifier.",
                     see_url="https://peps.python.org/pep-0639/",
                     due_date=(2027, 2, 17),  # Introduced 2025-02-17
                 )
-            if license_classifiers_found:
-                self.metadata.set_classifiers(classifiers)
 
     def _finalize_license_files(self) -> None:
         """Compute names of all license files which should be included."""
diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py
index 089b6ae30e..468a1ba01d 100644
--- a/setuptools/tests/config/test_apply_pyprojecttoml.py
+++ b/setuptools/tests/config/test_apply_pyprojecttoml.py
@@ -334,10 +334,11 @@ def test_license_expression_with_bad_classifier(tmp_path):
     with warnings.catch_warnings():
         warnings.simplefilter("ignore", SetuptoolsDeprecationWarning)
         dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
-        # Check 'License :: OSI Approved :: MIT License' is removed
+        # Check license classifier is still included
         assert dist.metadata.get_classifiers() == [
             "Development Status :: 5 - Production/Stable",
             "Programming Language :: Python",
+            "License :: OSI Approved :: MIT License",
         ]
 
 

From 9bf4bb9a2822a1c1a488935613b5adb349a1c593 Mon Sep 17 00:00:00 2001
From: Marc Mueller <30130371+cdce8p@users.noreply.github.com>
Date: Mon, 17 Feb 2025 14:35:19 +0100
Subject: [PATCH 1424/1761] Bump core metadata version to 2.4

---
 newsfragments/4830.feature.rst                    |  1 +
 setuptools/_core_metadata.py                      |  7 ++++---
 .../tests/config/test_apply_pyprojecttoml.py      | 15 +++++++++++++--
 setuptools/tests/test_egg_info.py                 |  8 ++++----
 4 files changed, 22 insertions(+), 9 deletions(-)
 create mode 100644 newsfragments/4830.feature.rst

diff --git a/newsfragments/4830.feature.rst b/newsfragments/4830.feature.rst
new file mode 100644
index 0000000000..f21d17515a
--- /dev/null
+++ b/newsfragments/4830.feature.rst
@@ -0,0 +1 @@
+Bump core metadata version to ``2.4``. -- by :user:`cdce8p`
diff --git a/setuptools/_core_metadata.py b/setuptools/_core_metadata.py
index 5342186c0e..60b47b375e 100644
--- a/setuptools/_core_metadata.py
+++ b/setuptools/_core_metadata.py
@@ -28,7 +28,7 @@
 def get_metadata_version(self):
     mv = getattr(self, 'metadata_version', None)
     if mv is None:
-        mv = Version('2.2')
+        mv = Version('2.4')
         self.metadata_version = mv
     return mv
 
@@ -176,8 +176,9 @@ def write_field(key, value):
         if attr_val is not None:
             write_field(field, attr_val)
 
-    license = self.license_expression or self.get_license()
-    if license:
+    if license_expression := self.license_expression:
+        write_field('License-Expression', license_expression)
+    elif license := self.get_license():
         write_field('License', rfc822_escape(license))
 
     for label, url in self.project_urls.items():
diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py
index 468a1ba01d..c437988702 100644
--- a/setuptools/tests/config/test_apply_pyprojecttoml.py
+++ b/setuptools/tests/config/test_apply_pyprojecttoml.py
@@ -280,20 +280,28 @@ def test_utf8_maintainer_in_metadata(  # issue-3663
 
 
 @pytest.mark.parametrize(
-    ('pyproject_text', 'license', 'license_expression', 'content_str'),
+    (
+        'pyproject_text',
+        'license',
+        'license_expression',
+        'content_str',
+        'not_content_str',
+    ),
     (
         pytest.param(
             PEP639_LICENSE_TEXT,
             'MIT',
             None,
             'License: MIT',
+            'License-Expression: ',
             id='license-text',
         ),
         pytest.param(
             PEP639_LICENSE_EXPRESSION,
             None,
             'MIT OR Apache-2.0',
-            'License: MIT OR Apache-2.0',  # TODO Metadata version '2.4'
+            'License-Expression: MIT OR Apache-2.0',
+            'License: ',
             id='license-expression',
         ),
     ),
@@ -302,6 +310,7 @@ def test_license_in_metadata(
     license,
     license_expression,
     content_str,
+    not_content_str,
     pyproject_text,
     tmp_path,
 ):
@@ -317,7 +326,9 @@ def test_license_in_metadata(
     with open(pkg_file, "w", encoding="utf-8") as fh:
         dist.metadata.write_pkg_file(fh)
     content = pkg_file.read_text(encoding="utf-8")
+    assert "Metadata-Version: 2.4" in content
     assert content_str in content
+    assert not_content_str not in content
 
 
 def test_license_expression_with_bad_classifier(tmp_path):
diff --git a/setuptools/tests/test_egg_info.py b/setuptools/tests/test_egg_info.py
index 9756d7c519..528e2c13d8 100644
--- a/setuptools/tests/test_egg_info.py
+++ b/setuptools/tests/test_egg_info.py
@@ -522,7 +522,7 @@ def test_provides_extra(self, tmpdir_cwd, env):
         with open(os.path.join(egg_info_dir, 'PKG-INFO'), encoding="utf-8") as fp:
             pkg_info_lines = fp.read().split('\n')
         assert 'Provides-Extra: foobar' in pkg_info_lines
-        assert 'Metadata-Version: 2.2' in pkg_info_lines
+        assert 'Metadata-Version: 2.4' in pkg_info_lines
 
     def test_doesnt_provides_extra(self, tmpdir_cwd, env):
         self._setup_script_with_requires(
@@ -1089,7 +1089,7 @@ def test_metadata_version(self, tmpdir_cwd, env):
         with open(os.path.join(egg_info_dir, 'PKG-INFO'), encoding="utf-8") as fp:
             pkg_info_lines = fp.read().split('\n')
         # Update metadata version if changed
-        assert self._extract_mv_version(pkg_info_lines) == (2, 2)
+        assert self._extract_mv_version(pkg_info_lines) == (2, 4)
 
     def test_long_description_content_type(self, tmpdir_cwd, env):
         # Test that specifying a `long_description_content_type` keyword arg to
@@ -1116,7 +1116,7 @@ def test_long_description_content_type(self, tmpdir_cwd, env):
             pkg_info_lines = fp.read().split('\n')
         expected_line = 'Description-Content-Type: text/markdown'
         assert expected_line in pkg_info_lines
-        assert 'Metadata-Version: 2.2' in pkg_info_lines
+        assert 'Metadata-Version: 2.4' in pkg_info_lines
 
     def test_long_description(self, tmpdir_cwd, env):
         # Test that specifying `long_description` and `long_description_content_type`
@@ -1135,7 +1135,7 @@ def test_long_description(self, tmpdir_cwd, env):
         egg_info_dir = os.path.join('.', 'foo.egg-info')
         with open(os.path.join(egg_info_dir, 'PKG-INFO'), encoding="utf-8") as fp:
             pkg_info_lines = fp.read().split('\n')
-        assert 'Metadata-Version: 2.2' in pkg_info_lines
+        assert 'Metadata-Version: 2.4' in pkg_info_lines
         assert '' == pkg_info_lines[-1]  # last line should be empty
         long_desc_lines = pkg_info_lines[pkg_info_lines.index('') :]
         assert 'This is a long description' in long_desc_lines

From 6410380863bc2a643c984c1ef282cf3d9086bd41 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 17 Feb 2025 16:28:24 +0000
Subject: [PATCH 1425/1761] Prevent deprecated license classifiers from being
 written to core metadata

---
 setuptools/dist.py                            | 25 +++++++++++--------
 .../tests/config/test_apply_pyprojecttoml.py  |  8 +++---
 2 files changed, 19 insertions(+), 14 deletions(-)

diff --git a/setuptools/dist.py b/setuptools/dist.py
index 27e8095709..fcedf679e7 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -406,22 +406,27 @@ def _normalize_requires(self):
 
     def _finalize_license_expression(self) -> None:
         """Normalize license and license_expression."""
+        classifiers = self.metadata.get_classifiers()
+        license_classifiers = {cl for cl in classifiers if cl.startswith("License :: ")}
+
+        if license_classifiers:
+            SetuptoolsDeprecationWarning.emit(
+                "License classifier are deprecated in favor of the license expression.",
+                "Please remove the classifiers:\n\n" + "\n".join(license_classifiers),
+                see_url="https://peps.python.org/pep-0639/",
+                due_date=(2027, 2, 17),  # Introduced 2025-02-17
+            )
+
         license_expr = self.metadata.license_expression
         if license_expr:
             normalized = canonicalize_license_expression(license_expr)
             if license_expr != normalized:
                 InformationOnly.emit(f"Normalizing '{license_expr}' to '{normalized}'")
                 self.metadata.license_expression = normalized
-
-            for cl in self.metadata.get_classifiers():
-                if not cl.startswith("License :: "):
-                    continue
-                SetuptoolsDeprecationWarning.emit(
-                    "License classifier are deprecated in favor of the license expression.",
-                    f"Please remove the '{cl}' classifier.",
-                    see_url="https://peps.python.org/pep-0639/",
-                    due_date=(2027, 2, 17),  # Introduced 2025-02-17
-                )
+            if license_classifiers:
+                # Filter classifiers but preserve "static-ness" of metadata
+                filtered = [cl for cl in classifiers if cl not in license_classifiers]
+                self.metadata.set_classifiers(classifiers.__class__(filtered))
 
     def _finalize_license_files(self) -> None:
         """Compute names of all license files which should be included."""
diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py
index 468a1ba01d..eb1ca671fd 100644
--- a/setuptools/tests/config/test_apply_pyprojecttoml.py
+++ b/setuptools/tests/config/test_apply_pyprojecttoml.py
@@ -327,18 +327,18 @@ def test_license_expression_with_bad_classifier(tmp_path):
         "README",
         f"{text}\n    \"License :: OSI Approved :: MIT License\"\n]",
     )
-    msg = "License classifier are deprecated(?:.|\n)*'License :: OSI Approved :: MIT License'"
-    with pytest.raises(SetuptoolsDeprecationWarning, match=msg):
+    msg = "License classifier are deprecated"
+    with pytest.raises(SetuptoolsDeprecationWarning, match=msg) as exc:
         pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
+        assert "License :: OSI Approved :: MIT License" in str(exc.value)
 
     with warnings.catch_warnings():
         warnings.simplefilter("ignore", SetuptoolsDeprecationWarning)
         dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
-        # Check license classifier is still included
+        # Check 'License :: OSI Approved :: MIT License' is removed
         assert dist.metadata.get_classifiers() == [
             "Development Status :: 5 - Production/Stable",
             "Programming Language :: Python",
-            "License :: OSI Approved :: MIT License",
         ]
 
 

From 778e679b4d95e9bed8c41052883052e39e44e881 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 17 Feb 2025 16:36:39 +0000
Subject: [PATCH 1426/1761] Improve message in warning

---
 setuptools/dist.py                                  | 2 +-
 setuptools/tests/config/test_apply_pyprojecttoml.py | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/setuptools/dist.py b/setuptools/dist.py
index fcedf679e7..dc301a6369 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -411,7 +411,7 @@ def _finalize_license_expression(self) -> None:
 
         if license_classifiers:
             SetuptoolsDeprecationWarning.emit(
-                "License classifier are deprecated in favor of the license expression.",
+                "License classifiers are deprecated in favor of the license expression.",
                 "Please remove the classifiers:\n\n" + "\n".join(license_classifiers),
                 see_url="https://peps.python.org/pep-0639/",
                 due_date=(2027, 2, 17),  # Introduced 2025-02-17
diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py
index eb1ca671fd..d858236278 100644
--- a/setuptools/tests/config/test_apply_pyprojecttoml.py
+++ b/setuptools/tests/config/test_apply_pyprojecttoml.py
@@ -327,7 +327,7 @@ def test_license_expression_with_bad_classifier(tmp_path):
         "README",
         f"{text}\n    \"License :: OSI Approved :: MIT License\"\n]",
     )
-    msg = "License classifier are deprecated"
+    msg = "License classifiers are deprecated"
     with pytest.raises(SetuptoolsDeprecationWarning, match=msg) as exc:
         pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
         assert "License :: OSI Approved :: MIT License" in str(exc.value)

From ee51110a3781937c09cb03c6af373f0245c3bbe1 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 17 Feb 2025 16:41:31 +0000
Subject: [PATCH 1427/1761] Use a more explicit method for preserving
 static-ness of classifiers

---
 setuptools/dist.py | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)

diff --git a/setuptools/dist.py b/setuptools/dist.py
index dc301a6369..cc627f8583 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -425,8 +425,9 @@ def _finalize_license_expression(self) -> None:
                 self.metadata.license_expression = normalized
             if license_classifiers:
                 # Filter classifiers but preserve "static-ness" of metadata
-                filtered = [cl for cl in classifiers if cl not in license_classifiers]
-                self.metadata.set_classifiers(classifiers.__class__(filtered))
+                list_ = _static.List if _static.is_static(classifiers) else list
+                filtered = (cl for cl in classifiers if cl not in license_classifiers)
+                self.metadata.set_classifiers(list_(filtered))
 
     def _finalize_license_files(self) -> None:
         """Compute names of all license files which should be included."""

From 9bdad9f8fa39705abc2664a6cd090250c1772006 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 17 Feb 2025 17:00:44 +0000
Subject: [PATCH 1428/1761] Add news fragment

---
 newsfragments/4833.feature.rst | 3 +++
 1 file changed, 3 insertions(+)
 create mode 100644 newsfragments/4833.feature.rst

diff --git a/newsfragments/4833.feature.rst b/newsfragments/4833.feature.rst
new file mode 100644
index 0000000000..6a61c5ca05
--- /dev/null
+++ b/newsfragments/4833.feature.rst
@@ -0,0 +1,3 @@
+Added deprecation warning for license classifiers,
+according to `PEP 639
+`_.

From ea4095d0d2311fb4266b5ae9aa00f3e5be08c9b5 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 17 Feb 2025 18:04:14 +0000
Subject: [PATCH 1429/1761] Keep warning about license classifiers but raise an
 error if license expression is used

---
 setuptools/dist.py                            | 30 +++++++++++--------
 .../tests/config/test_apply_pyprojecttoml.py  | 29 +++++++++++-------
 2 files changed, 36 insertions(+), 23 deletions(-)

diff --git a/setuptools/dist.py b/setuptools/dist.py
index cc627f8583..9d7118a5d2 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -28,6 +28,7 @@
 from ._reqs import _StrOrIter
 from .config import pyprojecttoml, setupcfg
 from .discovery import ConfigDiscovery
+from .errors import InvalidConfigError
 from .monkey import get_unpatched
 from .warnings import InformationOnly, SetuptoolsDeprecationWarning
 
@@ -407,15 +408,7 @@ def _normalize_requires(self):
     def _finalize_license_expression(self) -> None:
         """Normalize license and license_expression."""
         classifiers = self.metadata.get_classifiers()
-        license_classifiers = {cl for cl in classifiers if cl.startswith("License :: ")}
-
-        if license_classifiers:
-            SetuptoolsDeprecationWarning.emit(
-                "License classifiers are deprecated in favor of the license expression.",
-                "Please remove the classifiers:\n\n" + "\n".join(license_classifiers),
-                see_url="https://peps.python.org/pep-0639/",
-                due_date=(2027, 2, 17),  # Introduced 2025-02-17
-            )
+        license_classifiers = [cl for cl in classifiers if cl.startswith("License :: ")]
 
         license_expr = self.metadata.license_expression
         if license_expr:
@@ -424,10 +417,21 @@ def _finalize_license_expression(self) -> None:
                 InformationOnly.emit(f"Normalizing '{license_expr}' to '{normalized}'")
                 self.metadata.license_expression = normalized
             if license_classifiers:
-                # Filter classifiers but preserve "static-ness" of metadata
-                list_ = _static.List if _static.is_static(classifiers) else list
-                filtered = (cl for cl in classifiers if cl not in license_classifiers)
-                self.metadata.set_classifiers(list_(filtered))
+                raise InvalidConfigError(
+                    "License classifiers have been superseded by license expressions "
+                    "(see https://peps.python.org/pep-0639/). Please remove:\n\n"
+                    + "\n".join(license_classifiers),
+                )
+        elif license_classifiers:
+            SetuptoolsDeprecationWarning.emit(
+                "License classifiers are deprecated.",
+                "Please consider removing the following classifiers in favor of a "
+                "SPDX license expression:\n\n" + "\n".join(license_classifiers),
+                see_url="https://peps.python.org/pep-0639/",
+                # Warning introduced on 2025-02-17
+                # TODO: Should we add a due date? It may affect old/unmaintained
+                #       packages in the ecosystem and cause problems...
+            )
 
     def _finalize_license_files(self) -> None:
         """Compute names of all license files which should be included."""
diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py
index d858236278..62baf7f827 100644
--- a/setuptools/tests/config/test_apply_pyprojecttoml.py
+++ b/setuptools/tests/config/test_apply_pyprojecttoml.py
@@ -9,7 +9,6 @@
 import io
 import re
 import tarfile
-import warnings
 from inspect import cleandoc
 from pathlib import Path
 from unittest.mock import Mock
@@ -24,7 +23,7 @@
 from setuptools.config import expand, pyprojecttoml, setupcfg
 from setuptools.config._apply_pyprojecttoml import _MissingDynamic, _some_attrgetter
 from setuptools.dist import Distribution
-from setuptools.errors import RemovedConfigError
+from setuptools.errors import InvalidConfigError, RemovedConfigError
 from setuptools.warnings import SetuptoolsDeprecationWarning
 
 from .downloads import retrieve_file, urls_from_file
@@ -320,25 +319,35 @@ def test_license_in_metadata(
     assert content_str in content
 
 
-def test_license_expression_with_bad_classifier(tmp_path):
+def test_license_classifier_with_license_expression(tmp_path):
     text = PEP639_LICENSE_EXPRESSION.rsplit("\n", 2)[0]
     pyproject = _pep621_example_project(
         tmp_path,
         "README",
         f"{text}\n    \"License :: OSI Approved :: MIT License\"\n]",
     )
-    msg = "License classifiers are deprecated"
-    with pytest.raises(SetuptoolsDeprecationWarning, match=msg) as exc:
+    msg = "License classifiers have been superseded by license expressions"
+    with pytest.raises(InvalidConfigError, match=msg) as exc:
         pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
         assert "License :: OSI Approved :: MIT License" in str(exc.value)
 
-    with warnings.catch_warnings():
-        warnings.simplefilter("ignore", SetuptoolsDeprecationWarning)
+
+def test_license_classifier_without_license_expression(tmp_path):
+    text = """\
+    [project]
+    name = "spam"
+    version = "2020.0.0"
+    license = {text = "mit or apache-2.0"}
+    classifiers = ["License :: OSI Approved :: MIT License"]
+    """
+    pyproject = _pep621_example_project(tmp_path, "README", text)
+
+    msg = "License classifiers are deprecated(?:.|\n)*MIT License"
+    with pytest.warns(SetuptoolsDeprecationWarning, match=msg):
         dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
-        # Check 'License :: OSI Approved :: MIT License' is removed
+        # Check license classifier is still included
         assert dist.metadata.get_classifiers() == [
-            "Development Status :: 5 - Production/Stable",
-            "Programming Language :: Python",
+            "License :: OSI Approved :: MIT License"
         ]
 
 

From 3af67b8183f808e91bab8648132b4d6a91704e3e Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 17 Feb 2025 18:06:20 +0000
Subject: [PATCH 1430/1761] Update newsfragment

---
 newsfragments/4833.feature.rst | 5 ++---
 1 file changed, 2 insertions(+), 3 deletions(-)

diff --git a/newsfragments/4833.feature.rst b/newsfragments/4833.feature.rst
index 6a61c5ca05..d8801becf7 100644
--- a/newsfragments/4833.feature.rst
+++ b/newsfragments/4833.feature.rst
@@ -1,3 +1,2 @@
-Added deprecation warning for license classifiers,
-according to `PEP 639
-`_.
+Added exception (or warning) when deprecated license classifiers are used,
+according to `PEP 639 `_.

From 3b71b5f9f4a277f8ffd95f60aee1fc10f7e0e011 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 17 Feb 2025 18:18:29 +0000
Subject: [PATCH 1431/1761] Use a better docs URL for warning

---
 setuptools/dist.py | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/setuptools/dist.py b/setuptools/dist.py
index 9d7118a5d2..44e600df5c 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -423,11 +423,12 @@ def _finalize_license_expression(self) -> None:
                     + "\n".join(license_classifiers),
                 )
         elif license_classifiers:
+            pypa_guides = "guides/licensing-examples-and-user-scenarios/"
             SetuptoolsDeprecationWarning.emit(
                 "License classifiers are deprecated.",
                 "Please consider removing the following classifiers in favor of a "
                 "SPDX license expression:\n\n" + "\n".join(license_classifiers),
-                see_url="https://peps.python.org/pep-0639/",
+                see_url=f"https://packaging.python.org/en/latest/{pypa_guides}",
                 # Warning introduced on 2025-02-17
                 # TODO: Should we add a due date? It may affect old/unmaintained
                 #       packages in the ecosystem and cause problems...

From 0587478f1e7fdac262ee5d034bbdb292c5e8ae90 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 17 Feb 2025 18:27:00 +0000
Subject: [PATCH 1432/1761] Ensure _apply_pyproject sets field on dist.metadata
 object not on dist

---
 setuptools/config/_apply_pyprojecttoml.py | 1 +
 1 file changed, 1 insertion(+)

diff --git a/setuptools/config/_apply_pyprojecttoml.py b/setuptools/config/_apply_pyprojecttoml.py
index 6bb2bea514..bcb3c03894 100644
--- a/setuptools/config/_apply_pyprojecttoml.py
+++ b/setuptools/config/_apply_pyprojecttoml.py
@@ -423,6 +423,7 @@ def _acessor(obj):
     "provides_extras",
     "license_file",
     "license_files",
+    "license_expression",
 }
 
 _PREPROCESS = {

From 29302de33aa57e4df01edcafcd331362e09ca8cd Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 18 Feb 2025 10:31:37 +0000
Subject: [PATCH 1433/1761] Update URL for warning

---
 setuptools/dist.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setuptools/dist.py b/setuptools/dist.py
index 44e600df5c..d202dbf504 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -423,7 +423,7 @@ def _finalize_license_expression(self) -> None:
                     + "\n".join(license_classifiers),
                 )
         elif license_classifiers:
-            pypa_guides = "guides/licensing-examples-and-user-scenarios/"
+            pypa_guides = "guides/writing-pyproject-toml/#license"
             SetuptoolsDeprecationWarning.emit(
                 "License classifiers are deprecated.",
                 "Please consider removing the following classifiers in favor of a "

From a20512e7f513b9a0471845777170560b64cedd39 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 18 Feb 2025 10:43:30 +0000
Subject: [PATCH 1434/1761] Fix bypassed assertion in tests

---
 setuptools/tests/config/test_apply_pyprojecttoml.py | 10 +++++-----
 1 file changed, 5 insertions(+), 5 deletions(-)

diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py
index 62baf7f827..5b7ca0f40d 100644
--- a/setuptools/tests/config/test_apply_pyprojecttoml.py
+++ b/setuptools/tests/config/test_apply_pyprojecttoml.py
@@ -329,7 +329,8 @@ def test_license_classifier_with_license_expression(tmp_path):
     msg = "License classifiers have been superseded by license expressions"
     with pytest.raises(InvalidConfigError, match=msg) as exc:
         pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
-        assert "License :: OSI Approved :: MIT License" in str(exc.value)
+
+    assert "License :: OSI Approved :: MIT License" in str(exc.value)
 
 
 def test_license_classifier_without_license_expression(tmp_path):
@@ -345,10 +346,9 @@ def test_license_classifier_without_license_expression(tmp_path):
     msg = "License classifiers are deprecated(?:.|\n)*MIT License"
     with pytest.warns(SetuptoolsDeprecationWarning, match=msg):
         dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
-        # Check license classifier is still included
-        assert dist.metadata.get_classifiers() == [
-            "License :: OSI Approved :: MIT License"
-        ]
+
+    # Check license classifier is still included
+    assert dist.metadata.get_classifiers() == ["License :: OSI Approved :: MIT License"]
 
 
 class TestLicenseFiles:

From ab277d305add64e07a62ecf16d2f555ef2145c39 Mon Sep 17 00:00:00 2001
From: Marc Mueller <30130371+cdce8p@users.noreply.github.com>
Date: Tue, 18 Feb 2025 12:41:16 +0100
Subject: [PATCH 1435/1761] Deprecate tools.setuptools.license-files

---
 docs/userguide/miscellaneous.rst              |  2 +-
 docs/userguide/pyproject_config.rst           |  2 +-
 newsfragments/4837.feature.rst                |  3 +++
 setuptools/config/_apply_pyprojecttoml.py     | 21 +++++++++++++++++--
 .../tests/config/test_apply_pyprojecttoml.py  | 19 ++++++++++++++---
 setuptools/tests/test_build_meta.py           | 10 +++++++--
 6 files changed, 48 insertions(+), 9 deletions(-)
 create mode 100644 newsfragments/4837.feature.rst

diff --git a/docs/userguide/miscellaneous.rst b/docs/userguide/miscellaneous.rst
index 7354bd3a86..1b493fba3b 100644
--- a/docs/userguide/miscellaneous.rst
+++ b/docs/userguide/miscellaneous.rst
@@ -24,7 +24,7 @@ files are included in a source distribution by default:
   in ``pyproject.toml`` and/or equivalent in ``setup.cfg``/``setup.py``;
   note that if you don't explicitly set this parameter, ``setuptools``
   will include any files that match the following glob patterns:
-  ``LICENSE*``, ``LICENCE*``, ``COPYING*``, ``NOTICE*``, ``AUTHORS**``;
+  ``LICEN[CS]E*``, ``COPYING*``, ``NOTICE*``, ``AUTHORS**``;
 - ``pyproject.toml``;
 - ``setup.cfg``;
 - ``setup.py``;
diff --git a/docs/userguide/pyproject_config.rst b/docs/userguide/pyproject_config.rst
index efc68603a9..4730bdddbe 100644
--- a/docs/userguide/pyproject_config.rst
+++ b/docs/userguide/pyproject_config.rst
@@ -99,7 +99,7 @@ Key                       Value Type (TOML)           Notes
                                                       See :doc:`/userguide/datafiles`.
 ``exclude-package-data``  table/inline-table          Empty by default. See :doc:`/userguide/datafiles`.
 ------------------------- --------------------------- -------------------------
-``license-files``         array of glob patterns      **Provisional** - likely to change with :pep:`639`
+``license-files``         array of glob patterns      **Deprecated** - use ``project.license-files`` instead. See :doc:`PyPUG:guides/writing-pyproject-toml/#license-files`
                                                       (by default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``)
 ``data-files``            table/inline-table          **Discouraged** - check :doc:`/userguide/datafiles`.
                                                       Whenever possible, consider using data files inside the package directories.
diff --git a/newsfragments/4837.feature.rst b/newsfragments/4837.feature.rst
new file mode 100644
index 0000000000..4ad97b9513
--- /dev/null
+++ b/newsfragments/4837.feature.rst
@@ -0,0 +1,3 @@
+Deprecated ``tools.setuptools.license-files`` in favor of ``project.license-files``
+and added exception if ``project.license-files`` and ``tools.setuptools.license-files``
+are used together. -- by :user:`cdce8p`
diff --git a/setuptools/config/_apply_pyprojecttoml.py b/setuptools/config/_apply_pyprojecttoml.py
index bcb3c03894..ccd8f7215d 100644
--- a/setuptools/config/_apply_pyprojecttoml.py
+++ b/setuptools/config/_apply_pyprojecttoml.py
@@ -22,9 +22,9 @@
 
 from .. import _static
 from .._path import StrPath
-from ..errors import RemovedConfigError
+from ..errors import InvalidConfigError, RemovedConfigError
 from ..extension import Extension
-from ..warnings import SetuptoolsWarning
+from ..warnings import SetuptoolsDeprecationWarning, SetuptoolsWarning
 
 if TYPE_CHECKING:
     from typing_extensions import TypeAlias
@@ -100,6 +100,23 @@ def _apply_tool_table(dist: Distribution, config: dict, filename: StrPath):
             """
             raise RemovedConfigError("\n".join([cleandoc(msg), suggestion]))
 
+        if norm_key == "license_files":
+            if dist.metadata.license_files:
+                raise InvalidConfigError(
+                    "'project.license-files' is defined already. "
+                    "Remove 'tool.setuptools.license-files'."
+                )
+
+            pypa_guides = "guides/writing-pyproject-toml/#license-files"
+            SetuptoolsDeprecationWarning.emit(
+                "'tool.setuptools.license-files' is deprecated in favor of "
+                "'project.license-files'",
+                see_url=f"https://packaging.python.org/en/latest/{pypa_guides}",
+            )
+            # Warning introduced on 2025-02-18
+            # TODO: Should we add a due date? It may affect old/unmaintained
+            #       packages in the ecosystem and cause problems...
+
         norm_key = TOOL_TABLE_RENAMES.get(norm_key, norm_key)
         corresp = TOOL_TABLE_CORRESPONDENCE.get(norm_key, norm_key)
         if callable(corresp):
diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py
index c437988702..29f6ffaf34 100644
--- a/setuptools/tests/config/test_apply_pyprojecttoml.py
+++ b/setuptools/tests/config/test_apply_pyprojecttoml.py
@@ -24,7 +24,7 @@
 from setuptools.config import expand, pyprojecttoml, setupcfg
 from setuptools.config._apply_pyprojecttoml import _MissingDynamic, _some_attrgetter
 from setuptools.dist import Distribution
-from setuptools.errors import RemovedConfigError
+from setuptools.errors import InvalidConfigError, RemovedConfigError
 from setuptools.warnings import SetuptoolsDeprecationWarning
 
 from .downloads import retrieve_file, urls_from_file
@@ -366,7 +366,7 @@ def base_pyproject(self, tmp_path, additional_text):
         pyproject.write_text(text, encoding="utf-8")
         return pyproject
 
-    def base_pyproject_license_pep639(self, tmp_path):
+    def base_pyproject_license_pep639(self, tmp_path, additional_text=""):
         pyproject = _pep621_example_project(tmp_path, "README")
         text = pyproject.read_text(encoding="utf-8")
 
@@ -381,6 +381,8 @@ def base_pyproject_license_pep639(self, tmp_path):
             text,
             count=1,
         )
+        if additional_text:
+            text = f"{text}\n{additional_text}\n"
         pyproject.write_text(text, encoding="utf-8")
         return pyproject
 
@@ -396,7 +398,9 @@ def test_both_license_and_license_files_defined(self, tmp_path):
         license = tmp_path / "LICENSE.txt"
         license.write_text("LicenseRef-Proprietary\n", encoding="utf-8")
 
-        dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
+        msg = "'tool.setuptools.license-files' is deprecated in favor of 'project.license-files'"
+        with pytest.warns(SetuptoolsDeprecationWarning, match=msg):
+            dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
         assert set(dist.metadata.license_files) == {"_FILE.rst", "_FILE.txt"}
         assert dist.metadata.license == "LicenseRef-Proprietary\n"
 
@@ -412,6 +416,15 @@ def test_both_license_and_license_files_defined_pep639(self, tmp_path):
         assert dist.metadata.license is None
         assert dist.metadata.license_expression == "LicenseRef-Proprietary"
 
+    def test_license_files_defined_twice(self, tmp_path):
+        # Set project.license-files and tools.setuptools.license-files
+        setuptools_config = '[tool.setuptools]\nlicense-files = ["_FILE*"]'
+        pyproject = self.base_pyproject_license_pep639(tmp_path, setuptools_config)
+
+        msg = "'project.license-files' is defined already. Remove 'tool.setuptools.license-files'"
+        with pytest.raises(InvalidConfigError, match=msg):
+            pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
+
     def test_default_patterns(self, tmp_path):
         setuptools_config = '[tool.setuptools]\nzip-safe = false'
         # ^ used just to trigger section validation
diff --git a/setuptools/tests/test_build_meta.py b/setuptools/tests/test_build_meta.py
index b26fd2f5b0..8598578475 100644
--- a/setuptools/tests/test_build_meta.py
+++ b/setuptools/tests/test_build_meta.py
@@ -6,6 +6,7 @@
 import signal
 import sys
 import tarfile
+import warnings
 from concurrent import futures
 from pathlib import Path
 from typing import Any, Callable
@@ -15,6 +16,8 @@
 from jaraco import path
 from packaging.requirements import Requirement
 
+from setuptools.warnings import SetuptoolsDeprecationWarning
+
 from .textwrap import DALS
 
 SETUP_SCRIPT_STUB = "__import__('setuptools').setup()"
@@ -384,8 +387,11 @@ def test_build_with_pyproject_config(self, tmpdir, setup_script):
         build_backend = self.get_build_backend()
         with tmpdir.as_cwd():
             path.build(files)
-            sdist_path = build_backend.build_sdist("temp")
-            wheel_file = build_backend.build_wheel("temp")
+            with warnings.catch_warnings():
+                msg = "'tool.setuptools.license-files' is deprecated in favor of 'project.license-files'"
+                warnings.filterwarnings("ignore", msg, SetuptoolsDeprecationWarning)
+                sdist_path = build_backend.build_sdist("temp")
+                wheel_file = build_backend.build_wheel("temp")
 
         with tarfile.open(os.path.join(tmpdir, "temp", sdist_path)) as tar:
             sdist_contents = set(tar.getnames())

From b44b4f19eda2a083d3b3bcd94939bb3b217431d0 Mon Sep 17 00:00:00 2001
From: Marc Mueller <30130371+cdce8p@users.noreply.github.com>
Date: Tue, 18 Feb 2025 13:28:11 +0100
Subject: [PATCH 1436/1761] Suggestions

---
 setuptools/config/_apply_pyprojecttoml.py | 32 +++++++++++------------
 1 file changed, 15 insertions(+), 17 deletions(-)

diff --git a/setuptools/config/_apply_pyprojecttoml.py b/setuptools/config/_apply_pyprojecttoml.py
index ccd8f7215d..9c60196e54 100644
--- a/setuptools/config/_apply_pyprojecttoml.py
+++ b/setuptools/config/_apply_pyprojecttoml.py
@@ -89,6 +89,21 @@ def _apply_tool_table(dist: Distribution, config: dict, filename: StrPath):
     if not tool_table:
         return  # short-circuit
 
+    if "license-files" in tool_table:
+        if dist.metadata.license_files:
+            raise InvalidConfigError(
+                "'project.license-files' is defined already. "
+                "Remove 'tool.setuptools.license-files'."
+            )
+
+        pypa_guides = "guides/writing-pyproject-toml/#license-files"
+        SetuptoolsDeprecationWarning.emit(
+            "'tool.setuptools.license-files' is deprecated in favor of "
+            "'project.license-files'",
+            see_url=f"https://packaging.python.org/en/latest/{pypa_guides}",
+            due_date=(2026, 2, 18),  # Warning introduced on 2025-02-18
+        )
+
     for field, value in tool_table.items():
         norm_key = json_compatible_key(field)
 
@@ -100,23 +115,6 @@ def _apply_tool_table(dist: Distribution, config: dict, filename: StrPath):
             """
             raise RemovedConfigError("\n".join([cleandoc(msg), suggestion]))
 
-        if norm_key == "license_files":
-            if dist.metadata.license_files:
-                raise InvalidConfigError(
-                    "'project.license-files' is defined already. "
-                    "Remove 'tool.setuptools.license-files'."
-                )
-
-            pypa_guides = "guides/writing-pyproject-toml/#license-files"
-            SetuptoolsDeprecationWarning.emit(
-                "'tool.setuptools.license-files' is deprecated in favor of "
-                "'project.license-files'",
-                see_url=f"https://packaging.python.org/en/latest/{pypa_guides}",
-            )
-            # Warning introduced on 2025-02-18
-            # TODO: Should we add a due date? It may affect old/unmaintained
-            #       packages in the ecosystem and cause problems...
-
         norm_key = TOOL_TABLE_RENAMES.get(norm_key, norm_key)
         corresp = TOOL_TABLE_CORRESPONDENCE.get(norm_key, norm_key)
         if callable(corresp):

From 7e50cb1a0812aac9d65c5cc0a0d212805c93aa7b Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 18 Feb 2025 13:20:49 +0000
Subject: [PATCH 1437/1761] Attempt to fix sphinx warnings

---
 newsfragments/4706.feature.rst | 2 +-
 newsfragments/4728.feature.rst | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/newsfragments/4706.feature.rst b/newsfragments/4706.feature.rst
index 1d34f5f476..1c06206078 100644
--- a/newsfragments/4706.feature.rst
+++ b/newsfragments/4706.feature.rst
@@ -1 +1 @@
-Added initial support for license expression (`PEP 639 `_). -- by :user:`cdce8p`
+Added initial support for license expression (:pep:`639#add-license-expression-field`). -- by :user:`cdce8p`
diff --git a/newsfragments/4728.feature.rst b/newsfragments/4728.feature.rst
index 61906656c0..5eb4fa6a40 100644
--- a/newsfragments/4728.feature.rst
+++ b/newsfragments/4728.feature.rst
@@ -1 +1 @@
-Store ``License-File``s in ``.dist-info/licenses`` subfolder and added support for recursive globs for ``license_files`` (`PEP 639 `_). -- by :user:`cdce8p`
+Store ``License-File``s in ``.dist-info/licenses`` subfolder and added support for recursive globs for ``license_files`` (:pep:`639#add-license-expression-field`). -- by :user:`cdce8p`

From 7f442369f457889bd7028de56f3a6549d1a180a1 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 18 Feb 2025 13:32:10 +0000
Subject: [PATCH 1438/1761] Attempt to improve display of pep links

---
 newsfragments/4706.feature.rst | 2 +-
 newsfragments/4728.feature.rst | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/newsfragments/4706.feature.rst b/newsfragments/4706.feature.rst
index 1c06206078..be8aea6456 100644
--- a/newsfragments/4706.feature.rst
+++ b/newsfragments/4706.feature.rst
@@ -1 +1 @@
-Added initial support for license expression (:pep:`639#add-license-expression-field`). -- by :user:`cdce8p`
+Added initial support for license expression (:pep:`PEP 639 <639#add-license-expression-field>`). -- by :user:`cdce8p`
diff --git a/newsfragments/4728.feature.rst b/newsfragments/4728.feature.rst
index 5eb4fa6a40..ea19b31a36 100644
--- a/newsfragments/4728.feature.rst
+++ b/newsfragments/4728.feature.rst
@@ -1 +1 @@
-Store ``License-File``s in ``.dist-info/licenses`` subfolder and added support for recursive globs for ``license_files`` (:pep:`639#add-license-expression-field`). -- by :user:`cdce8p`
+Store ``License-File``s in ``.dist-info/licenses`` subfolder and added support for recursive globs for ``license_files`` (:pep:`PEP 639 <639#add-license-expression-field>`). -- by :user:`cdce8p`

From d6abdced01b80617c89a5878775439821264b28e Mon Sep 17 00:00:00 2001
From: Marc Mueller <30130371+cdce8p@users.noreply.github.com>
Date: Tue, 18 Feb 2025 15:00:30 +0100
Subject: [PATCH 1439/1761] Fix reference

---
 docs/userguide/pyproject_config.rst | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/docs/userguide/pyproject_config.rst b/docs/userguide/pyproject_config.rst
index 4730bdddbe..3bf99bed1a 100644
--- a/docs/userguide/pyproject_config.rst
+++ b/docs/userguide/pyproject_config.rst
@@ -99,7 +99,7 @@ Key                       Value Type (TOML)           Notes
                                                       See :doc:`/userguide/datafiles`.
 ``exclude-package-data``  table/inline-table          Empty by default. See :doc:`/userguide/datafiles`.
 ------------------------- --------------------------- -------------------------
-``license-files``         array of glob patterns      **Deprecated** - use ``project.license-files`` instead. See :doc:`PyPUG:guides/writing-pyproject-toml/#license-files`
+``license-files``         array of glob patterns      **Deprecated** - use ``project.license-files`` instead. See :doc:`PyPUG:guides/writing-pyproject-toml`
                                                       (by default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``)
 ``data-files``            table/inline-table          **Discouraged** - check :doc:`/userguide/datafiles`.
                                                       Whenever possible, consider using data files inside the package directories.

From 62bd9444e652407961b283d8212f9d9d21077a82 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 18 Feb 2025 18:17:10 +0000
Subject: [PATCH 1440/1761] Add deprecation warning for project.license as a
 table in pyproject.toml

---
 setuptools/config/_apply_pyprojecttoml.py | 8 ++++++++
 1 file changed, 8 insertions(+)

diff --git a/setuptools/config/_apply_pyprojecttoml.py b/setuptools/config/_apply_pyprojecttoml.py
index 9c60196e54..f23d0d2de3 100644
--- a/setuptools/config/_apply_pyprojecttoml.py
+++ b/setuptools/config/_apply_pyprojecttoml.py
@@ -203,6 +203,14 @@ def _license(dist: Distribution, val: str | dict, root_dir: StrPath | None):
     if isinstance(val, str):
         _set_config(dist, "license_expression", _static.Str(val))
     else:
+        pypa_guides = "guides/writing-pyproject-toml/#license"
+        SetuptoolsDeprecationWarning.emit(
+            "`project.license` as a TOML table is deprecated",
+            "Please use a simple string containing a SPDX expression for "
+            "`project.license`. You can also use `project.license-files`.",
+            see_url=f"https://packaging.python.org/en/latest/{pypa_guides}",
+            due_date=(2026, 2, 18),  # Introduced on 2025-02-18
+        )
         if "file" in val:
             # XXX: Is it completely safe to assume static?
             value = expand.read_files([val["file"]], root_dir)

From e58f51414ce074628014c435eaa1f71df4dfa191 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 18 Feb 2025 18:18:24 +0000
Subject: [PATCH 1441/1761] Adequate tests to warning

---
 .../tests/config/test_apply_pyprojecttoml.py  | 86 +++++++++++++------
 setuptools/tests/test_build_meta.py           |  8 +-
 setuptools/tests/test_sdist.py                | 14 ++-
 3 files changed, 77 insertions(+), 31 deletions(-)

diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py
index c489c99bd6..848f44745f 100644
--- a/setuptools/tests/config/test_apply_pyprojecttoml.py
+++ b/setuptools/tests/config/test_apply_pyprojecttoml.py
@@ -93,7 +93,7 @@ def test_apply_pyproject_equivalent_to_setupcfg(url, monkeypatch, tmp_path):
 description = "Lovely Spam! Wonderful Spam!"
 readme = "README.rst"
 requires-python = ">=3.8"
-license = {file = "LICENSE.txt"}
+license-files = ["LICENSE.txt"]  # Updated to be PEP 639 compliant
 keywords = ["egg", "bacon", "sausage", "tomatoes", "Lobster Thermidor"]
 authors = [
   {email = "hi@pradyunsg.me"},
@@ -206,7 +206,6 @@ def test_pep621_example(tmp_path):
     """Make sure the example in PEP 621 works"""
     pyproject = _pep621_example_project(tmp_path)
     dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
-    assert dist.metadata.license == "--- LICENSE stub ---"
     assert set(dist.metadata.license_files) == {"LICENSE.txt"}
 
 
@@ -294,6 +293,11 @@ def test_utf8_maintainer_in_metadata(  # issue-3663
             'License: MIT',
             'License-Expression: ',
             id='license-text',
+            marks=[
+                pytest.mark.filterwarnings(
+                    "ignore:.project.license. as a TOML table is deprecated",
+                )
+            ],
         ),
         pytest.param(
             PEP639_LICENSE_EXPRESSION,
@@ -354,8 +358,12 @@ def test_license_classifier_without_license_expression(tmp_path):
     """
     pyproject = _pep621_example_project(tmp_path, "README", text)
 
-    msg = "License classifiers are deprecated(?:.|\n)*MIT License"
-    with pytest.warns(SetuptoolsDeprecationWarning, match=msg):
+    msg1 = "License classifiers are deprecated(?:.|\n)*MIT License"
+    msg2 = ".project.license. as a TOML table is deprecated"
+    with (
+        pytest.warns(SetuptoolsDeprecationWarning, match=msg1),
+        pytest.warns(SetuptoolsDeprecationWarning, match=msg2),
+    ):
         dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
 
     # Check license classifier is still included
@@ -363,38 +371,38 @@ def test_license_classifier_without_license_expression(tmp_path):
 
 
 class TestLicenseFiles:
-    def base_pyproject(self, tmp_path, additional_text):
-        pyproject = _pep621_example_project(tmp_path, "README")
-        text = pyproject.read_text(encoding="utf-8")
-
-        # Sanity-check
-        assert 'license = {file = "LICENSE.txt"}' in text
-        assert "[tool.setuptools]" not in text
-
-        text = f"{text}\n{additional_text}\n"
-        pyproject.write_text(text, encoding="utf-8")
-        return pyproject
-
-    def base_pyproject_license_pep639(self, tmp_path, additional_text=""):
-        pyproject = _pep621_example_project(tmp_path, "README")
-        text = pyproject.read_text(encoding="utf-8")
+    def base_pyproject(
+        self,
+        tmp_path,
+        additional_text="",
+        license_toml='license = {file = "LICENSE.txt"}\n',
+    ):
+        text = PEP639_LICENSE_EXPRESSION
 
         # Sanity-check
-        assert 'license = {file = "LICENSE.txt"}' in text
+        assert 'license = "mit or apache-2.0"' in text
         assert 'license-files' not in text
         assert "[tool.setuptools]" not in text
 
         text = re.sub(
-            r"(license = {file = \"LICENSE.txt\"})\n",
-            ("license = \"licenseref-Proprietary\"\nlicense-files = [\"_FILE*\"]\n"),
+            r"(license = .*)\n",
+            license_toml,
             text,
             count=1,
         )
-        if additional_text:
-            text = f"{text}\n{additional_text}\n"
-        pyproject.write_text(text, encoding="utf-8")
+        assert license_toml in text  # sanity check
+        text = f"{text}\n{additional_text}\n"
+        pyproject = _pep621_example_project(tmp_path, "README", pyproject_text=text)
         return pyproject
 
+    def base_pyproject_license_pep639(self, tmp_path, additional_text=""):
+        return self.base_pyproject(
+            tmp_path,
+            additional_text=additional_text,
+            license_toml='license = "licenseref-Proprietary"'
+            '\nlicense-files = ["_FILE*"]\n',
+        )
+
     def test_both_license_and_license_files_defined(self, tmp_path):
         setuptools_config = '[tool.setuptools]\nlicense-files = ["_FILE*"]'
         pyproject = self.base_pyproject(tmp_path, setuptools_config)
@@ -407,8 +415,12 @@ def test_both_license_and_license_files_defined(self, tmp_path):
         license = tmp_path / "LICENSE.txt"
         license.write_text("LicenseRef-Proprietary\n", encoding="utf-8")
 
-        msg = "'tool.setuptools.license-files' is deprecated in favor of 'project.license-files'"
-        with pytest.warns(SetuptoolsDeprecationWarning, match=msg):
+        msg1 = "'tool.setuptools.license-files' is deprecated in favor of 'project.license-files'"
+        msg2 = ".project.license. as a TOML table is deprecated"
+        with (
+            pytest.warns(SetuptoolsDeprecationWarning, match=msg1),
+            pytest.warns(SetuptoolsDeprecationWarning, match=msg2),
+        ):
             dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
         assert set(dist.metadata.license_files) == {"_FILE.rst", "_FILE.txt"}
         assert dist.metadata.license == "LicenseRef-Proprietary\n"
@@ -437,7 +449,7 @@ def test_license_files_defined_twice(self, tmp_path):
     def test_default_patterns(self, tmp_path):
         setuptools_config = '[tool.setuptools]\nzip-safe = false'
         # ^ used just to trigger section validation
-        pyproject = self.base_pyproject(tmp_path, setuptools_config)
+        pyproject = self.base_pyproject(tmp_path, setuptools_config, license_toml="")
 
         license_files = "LICENCE-a.html COPYING-abc.txt AUTHORS-xyz NOTICE,def".split()
 
@@ -445,9 +457,27 @@ def test_default_patterns(self, tmp_path):
             (tmp_path / fname).write_text(f"{fname}\n", encoding="utf-8")
 
         dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
+
         assert (tmp_path / "LICENSE.txt").exists()  # from base example
         assert set(dist.metadata.license_files) == {*license_files, "LICENSE.txt"}
 
+    def test_deprecated_file_expands_to_text(self, tmp_path):
+        """Make sure the old example with ``license = {text = ...}`` works"""
+
+        assert 'license-files = ["LICENSE.txt"]' in PEP621_EXAMPLE  # sanity check
+        text = PEP621_EXAMPLE.replace(
+            'license-files = ["LICENSE.txt"]',
+            'license = {file = "LICENSE.txt"}',
+        )
+        pyproject = _pep621_example_project(tmp_path, pyproject_text=text)
+
+        msg = ".project.license. as a TOML table is deprecated"
+        with pytest.warns(SetuptoolsDeprecationWarning, match=msg):
+            dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
+
+        assert dist.metadata.license == "--- LICENSE stub ---"
+        assert set(dist.metadata.license_files) == {"LICENSE.txt"}  # auto-filled
+
 
 class TestPyModules:
     # https://github.com/pypa/setuptools/issues/4316
diff --git a/setuptools/tests/test_build_meta.py b/setuptools/tests/test_build_meta.py
index 8598578475..624bba862e 100644
--- a/setuptools/tests/test_build_meta.py
+++ b/setuptools/tests/test_build_meta.py
@@ -387,9 +387,13 @@ def test_build_with_pyproject_config(self, tmpdir, setup_script):
         build_backend = self.get_build_backend()
         with tmpdir.as_cwd():
             path.build(files)
+            msgs = [
+                "'tool.setuptools.license-files' is deprecated in favor of 'project.license-files'",
+                "`project.license` as a TOML table is deprecated",
+            ]
             with warnings.catch_warnings():
-                msg = "'tool.setuptools.license-files' is deprecated in favor of 'project.license-files'"
-                warnings.filterwarnings("ignore", msg, SetuptoolsDeprecationWarning)
+                for msg in msgs:
+                    warnings.filterwarnings("ignore", msg, SetuptoolsDeprecationWarning)
                 sdist_path = build_backend.build_sdist("temp")
                 wheel_file = build_backend.build_wheel("temp")
 
diff --git a/setuptools/tests/test_sdist.py b/setuptools/tests/test_sdist.py
index 3ee0511b1c..19d8ddf6da 100644
--- a/setuptools/tests/test_sdist.py
+++ b/setuptools/tests/test_sdist.py
@@ -708,12 +708,21 @@ def test_sdist_with_latin1_encoded_filename(self):
             [project]
             name = "testing"
             readme = "USAGE.rst"
-            license = {file = "DOWHATYOUWANT"}
+            license-files = ["DOWHATYOUWANT"]
             dynamic = ["version"]
             [tool.setuptools.dynamic]
             version = {file = ["src/VERSION.txt"]}
             """,
         "pyproject.toml - directive with str instead of list": """
+            [project]
+            name = "testing"
+            readme = "USAGE.rst"
+            license-files = ["DOWHATYOUWANT"]
+            dynamic = ["version"]
+            [tool.setuptools.dynamic]
+            version = {file = "src/VERSION.txt"}
+            """,
+        "pyproject.toml - deprecated license table with file entry": """
             [project]
             name = "testing"
             readme = "USAGE.rst"
@@ -725,6 +734,9 @@ def test_sdist_with_latin1_encoded_filename(self):
     }
 
     @pytest.mark.parametrize("config", _EXAMPLE_DIRECTIVES.keys())
+    @pytest.mark.filterwarnings(
+        "ignore:.project.license. as a TOML table is deprecated"
+    )
     def test_add_files_referenced_by_config_directives(self, source_dir, config):
         config_file, _, _ = config.partition(" - ")
         config_text = self._EXAMPLE_DIRECTIVES[config]

From 68397dc45fe2daa80921fcf58a34f9b96c953984 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 18 Feb 2025 18:34:06 +0000
Subject: [PATCH 1442/1761] Add news fragment

---
 newsfragments/4840.feature.rst | 5 +++++
 1 file changed, 5 insertions(+)
 create mode 100644 newsfragments/4840.feature.rst

diff --git a/newsfragments/4840.feature.rst b/newsfragments/4840.feature.rst
new file mode 100644
index 0000000000..a033fd2afb
--- /dev/null
+++ b/newsfragments/4840.feature.rst
@@ -0,0 +1,5 @@
+Deprecated ``project.license`` as a TOML table in
+``pyproject.toml``. Users are expected to move towards using
+``project.license-files`` and/or SPDX expressions (as strings) in
+``pyproject.license``.
+See :pep:`PEP 639 <639#deprecate-license-key-table-subkeys>`.

From e0a6de58a053a73d6800463487bd12a26e2af963 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 19 Feb 2025 10:30:03 +0000
Subject: [PATCH 1443/1761] Ensure _finalize_license_expression preserve
 "static-ness"

---
 setuptools/dist.py | 15 +++++++++++++--
 1 file changed, 13 insertions(+), 2 deletions(-)

diff --git a/setuptools/dist.py b/setuptools/dist.py
index d202dbf504..15b1967be3 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -406,13 +406,24 @@ def _normalize_requires(self):
         )
 
     def _finalize_license_expression(self) -> None:
-        """Normalize license and license_expression."""
+        """
+        Normalize license and license_expression.
+        >>> dist = Distribution({"license_expression": _static.Str("mit aNd  gpl-3.0-OR-later")})
+        >>> _static.is_static(dist.metadata.license_expression)
+        True
+        >>> dist._finalize_license_expression()
+        >>> _static.is_static(dist.metadata.license_expression)  # preserve static-ness"
+        True
+        >>> print(dist.metadata.license_expression)
+        MIT AND GPL-3.0-or-later
+        """
         classifiers = self.metadata.get_classifiers()
         license_classifiers = [cl for cl in classifiers if cl.startswith("License :: ")]
 
         license_expr = self.metadata.license_expression
         if license_expr:
-            normalized = canonicalize_license_expression(license_expr)
+            str_ = _static.Str if _static.is_static(license_expr) else str
+            normalized = str_(canonicalize_license_expression(license_expr))
             if license_expr != normalized:
                 InformationOnly.emit(f"Normalizing '{license_expr}' to '{normalized}'")
                 self.metadata.license_expression = normalized

From 9ba666dbfc9fed66e846af66a2d052efaae79613 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 19 Feb 2025 10:33:40 +0000
Subject: [PATCH 1444/1761] Ensure PEP 639 implementation plays nicely with PEP
 643

---
 setuptools/_core_metadata.py              |  7 +++-
 setuptools/config/_apply_pyprojecttoml.py |  3 +-
 setuptools/tests/test_core_metadata.py    | 42 +++++++++++++++++++++++
 3 files changed, 50 insertions(+), 2 deletions(-)

diff --git a/setuptools/_core_metadata.py b/setuptools/_core_metadata.py
index 60b47b375e..975e9ceaa1 100644
--- a/setuptools/_core_metadata.py
+++ b/setuptools/_core_metadata.py
@@ -304,7 +304,12 @@ def _distribution_fullname(name: str, version: str) -> str:
     "home-page": "url",
     "keywords": "keywords",
     "license": "license",
-    # "license-file": "license_files", # XXX: does PEP 639 exempt Dynamic ??
+    # XXX: License-File is complicated because the user gives globs that are expanded
+    #      during the build. Without special handling it is likely always
+    #      marked as Dynamic, which is an acceptable outcome according to:
+    #      https://github.com/pypa/setuptools/issues/4629#issuecomment-2331233677
+    "license-file": "license_files",
+    "license-expression": "license_expression",  # PEP 639
     "maintainer": "maintainer",
     "maintainer-email": "maintainer_email",
     "obsoletes": "obsoletes",
diff --git a/setuptools/config/_apply_pyprojecttoml.py b/setuptools/config/_apply_pyprojecttoml.py
index f23d0d2de3..12c8b23108 100644
--- a/setuptools/config/_apply_pyprojecttoml.py
+++ b/setuptools/config/_apply_pyprojecttoml.py
@@ -459,7 +459,8 @@ def _acessor(obj):
     "description": _attrgetter("metadata.description"),
     "readme": _attrgetter("metadata.long_description"),
     "requires-python": _some_attrgetter("python_requires", "metadata.python_requires"),
-    "license": _attrgetter("metadata.license"),
+    "license": _some_attrgetter("metadata.license_expression", "metadata.license"),
+    # XXX: Should we wait until someone requires `license_files`?
     "authors": _some_attrgetter("metadata.author", "metadata.author_email"),
     "maintainers": _some_attrgetter("metadata.maintainer", "metadata.maintainer_email"),
     "keywords": _attrgetter("metadata.keywords"),
diff --git a/setuptools/tests/test_core_metadata.py b/setuptools/tests/test_core_metadata.py
index b1edb79b40..548cb869f7 100644
--- a/setuptools/tests/test_core_metadata.py
+++ b/setuptools/tests/test_core_metadata.py
@@ -12,6 +12,7 @@
 from pathlib import Path
 from unittest.mock import Mock
 
+import jaraco.path
 import pytest
 from packaging.metadata import Metadata
 from packaging.requirements import Requirement
@@ -442,6 +443,7 @@ class TestPEP643:
             readme = {text = "Long\\ndescription", content-type = "text/plain"}
             keywords = ["one", "two"]
             dependencies = ["requests"]
+            license = "AGPL-3.0-or-later"
             [tool.setuptools]
             provides = ["abcd"]
             obsoletes = ["abcd"]
@@ -490,6 +492,46 @@ def test_modified_fields_marked_as_dynamic(self, file, fields, tmpdir_cwd):
         metadata = _get_metadata(dist)
         assert set(metadata.get_all("Dynamic")) == set(fields)
 
+    @pytest.mark.parametrize(
+        "extra_toml",
+        [
+            "# Let setuptools autofill license-files",
+            "license-files = ['LICENSE*', 'AUTHORS*', 'NOTICE']",
+        ],
+    )
+    def test_license_files_dynamic(self, extra_toml, tmpdir_cwd):
+        # For simplicity (and for the time being) setuptools is not making
+        # any special handling to guarantee `License-File` is considered static.
+        # Instead we rely in the fact that, although suboptimal, it is OK to have
+        # it as dynamics, as per:
+        # https://github.com/pypa/setuptools/issues/4629#issuecomment-2331233677
+        files = {
+            "pyproject.toml": self.STATIC_CONFIG["pyproject.toml"].replace(
+                'license = "AGPL-3.0-or-later"',
+                f"dynamic = ['license']\n{extra_toml}",
+            ),
+            "LICENSE.md": "--- mock license ---",
+            "NOTICE": "--- mock notice ---",
+            "AUTHORS.txt": "--- me ---",
+        }
+        # Sanity checks:
+        assert extra_toml in files["pyproject.toml"]
+        assert 'license = "AGPL-3.0-or-later"' not in extra_toml
+
+        jaraco.path.build(files)
+        dist = _makedist(license_expression="AGPL-3.0-or-later")
+        metadata = _get_metadata(dist)
+        assert set(metadata.get_all("Dynamic")) == {
+            'license-file',
+            'license-expression',
+        }
+        assert metadata.get("License-Expression") == "AGPL-3.0-or-later"
+        assert set(metadata.get_all("License-File")) == {
+            "NOTICE",
+            "AUTHORS.txt",
+            "LICENSE.md",
+        }
+
 
 def _makedist(**attrs):
     dist = Distribution(attrs)

From 17dc3dfe4c24c2cf022c3d45e241a691045fee3a Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 19 Feb 2025 13:18:56 +0000
Subject: [PATCH 1445/1761] Improve license/license_expression relationship
 with 'dynamic' in pyproject.toml

---
 setuptools/config/_apply_pyprojecttoml.py           | 6 +++++-
 setuptools/tests/config/test_apply_pyprojecttoml.py | 6 ++++++
 2 files changed, 11 insertions(+), 1 deletion(-)

diff --git a/setuptools/config/_apply_pyprojecttoml.py b/setuptools/config/_apply_pyprojecttoml.py
index 12c8b23108..7f4c3d8b7e 100644
--- a/setuptools/config/_apply_pyprojecttoml.py
+++ b/setuptools/config/_apply_pyprojecttoml.py
@@ -201,6 +201,9 @@ def _license(dist: Distribution, val: str | dict, root_dir: StrPath | None):
     from setuptools.config import expand
 
     if isinstance(val, str):
+        if getattr(dist.metadata, "license", None):
+            SetuptoolsWarning.emit("`license` overwritten by `pyproject.toml`")
+            dist.metadata.license = None
         _set_config(dist, "license_expression", _static.Str(val))
     else:
         pypa_guides = "guides/writing-pyproject-toml/#license"
@@ -476,8 +479,9 @@ def _acessor(obj):
 
 _RESET_PREVIOUSLY_DEFINED: dict = {
     # Fix improper setting: given in `setup.py`, but not listed in `dynamic`
+    # Use "immutable" data structures to avoid in-place modification
     # dict: pyproject name => value to which reset
-    "license": _static.EMPTY_DICT,
+    "license": "",
     "authors": _static.EMPTY_LIST,
     "maintainers": _static.EMPTY_LIST,
     "keywords": _static.EMPTY_LIST,
diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py
index 848f44745f..bf8775ee36 100644
--- a/setuptools/tests/config/test_apply_pyprojecttoml.py
+++ b/setuptools/tests/config/test_apply_pyprojecttoml.py
@@ -554,6 +554,11 @@ def pyproject(self, tmp_path, dynamic, extra_content=""):
     @pytest.mark.parametrize(
         ("attr", "field", "value"),
         [
+            ("license_expression", "license", "MIT"),
+            pytest.param(
+                *("license", "license", "Not SPDX"),
+                marks=[pytest.mark.filterwarnings("ignore:.*license. overwritten")],
+            ),
             ("classifiers", "classifiers", ["Private :: Classifier"]),
             ("entry_points", "scripts", {"console_scripts": ["foobar=foobar:main"]}),
             ("entry_points", "gui-scripts", {"gui_scripts": ["bazquux=bazquux:main"]}),
@@ -579,6 +584,7 @@ def test_not_listed_in_dynamic(self, tmp_path, attr, field, value):
     @pytest.mark.parametrize(
         ("attr", "field", "value"),
         [
+            ("license_expression", "license", "MIT"),
             ("install_requires", "dependencies", []),
             ("extras_require", "optional-dependencies", {}),
             ("install_requires", "dependencies", ["six"]),

From 01396dbd1f79f5d41fa015783163b8a2cfe44d07 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 19 Feb 2025 13:41:32 +0000
Subject: [PATCH 1446/1761] Add comments and test about dynamic x license_files

---
 setuptools/config/_apply_pyprojecttoml.py     |  4 +++-
 .../tests/config/test_apply_pyprojecttoml.py  | 20 +++++++++++++++++++
 2 files changed, 23 insertions(+), 1 deletion(-)

diff --git a/setuptools/config/_apply_pyprojecttoml.py b/setuptools/config/_apply_pyprojecttoml.py
index 7f4c3d8b7e..06c9e6413f 100644
--- a/setuptools/config/_apply_pyprojecttoml.py
+++ b/setuptools/config/_apply_pyprojecttoml.py
@@ -479,9 +479,11 @@ def _acessor(obj):
 
 _RESET_PREVIOUSLY_DEFINED: dict = {
     # Fix improper setting: given in `setup.py`, but not listed in `dynamic`
-    # Use "immutable" data structures to avoid in-place modification
+    # Use "immutable" data structures to avoid in-place modification.
     # dict: pyproject name => value to which reset
     "license": "",
+    # XXX: `license-file` is currently not considered in the context of `dynamic`.
+    #      See TestPresetField.test_license_files_exempt_from_dynamic
     "authors": _static.EMPTY_LIST,
     "maintainers": _static.EMPTY_LIST,
     "keywords": _static.EMPTY_LIST,
diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py
index bf8775ee36..bcea9a8847 100644
--- a/setuptools/tests/config/test_apply_pyprojecttoml.py
+++ b/setuptools/tests/config/test_apply_pyprojecttoml.py
@@ -598,6 +598,26 @@ def test_listed_in_dynamic(self, tmp_path, attr, field, value):
         dist_value = _some_attrgetter(f"metadata.{attr}", attr)(dist)
         assert dist_value == value
 
+    def test_license_files_exempt_from_dynamic(self, monkeypatch, tmp_path):
+        """
+        license-file is currently not considered in the context of dynamic.
+        As per 2025-02-19, https://packaging.python.org/en/latest/specifications/pyproject-toml/#license-files
+        allows setuptools to fill-in `license-files` the way it sees fit:
+
+        > If the license-files key is not defined, tools can decide how to handle license files.
+        > For example they can choose not to include any files or use their own
+        > logic to discover the appropriate files in the distribution.
+
+        Using license_files from setup.py to fill-in the value is in accordance
+        with this rule.
+        """
+        monkeypatch.chdir(tmp_path)
+        pyproject = self.pyproject(tmp_path, [])
+        dist = makedist(tmp_path, license_files=["LIC*"])
+        (tmp_path / "LIC1").write_text("42", encoding="utf-8")
+        dist = pyprojecttoml.apply_configuration(dist, pyproject)
+        assert dist.metadata.license_files == ["LIC1"]
+
     def test_warning_overwritten_dependencies(self, tmp_path):
         src = "[project]\nname='pkg'\nversion='0.1'\ndependencies=['click']\n"
         pyproject = tmp_path / "pyproject.toml"

From 282177c5df9c236a9824b3e2fb4561c66a979973 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 19 Feb 2025 14:00:10 +0000
Subject: [PATCH 1447/1761] Apply suggestions from code review

---
 setuptools/config/_apply_pyprojecttoml.py | 3 ++-
 setuptools/dist.py                        | 2 +-
 2 files changed, 3 insertions(+), 2 deletions(-)

diff --git a/setuptools/config/_apply_pyprojecttoml.py b/setuptools/config/_apply_pyprojecttoml.py
index 06c9e6413f..ffa3fc3c49 100644
--- a/setuptools/config/_apply_pyprojecttoml.py
+++ b/setuptools/config/_apply_pyprojecttoml.py
@@ -463,7 +463,8 @@ def _acessor(obj):
     "readme": _attrgetter("metadata.long_description"),
     "requires-python": _some_attrgetter("python_requires", "metadata.python_requires"),
     "license": _some_attrgetter("metadata.license_expression", "metadata.license"),
-    # XXX: Should we wait until someone requires `license_files`?
+    # XXX: `license-file` is currently not considered in the context of `dynamic`.
+    #      See TestPresetField.test_license_files_exempt_from_dynamic
     "authors": _some_attrgetter("metadata.author", "metadata.author_email"),
     "maintainers": _some_attrgetter("metadata.maintainer", "metadata.maintainer_email"),
     "keywords": _attrgetter("metadata.keywords"),
diff --git a/setuptools/dist.py b/setuptools/dist.py
index 15b1967be3..133948eb08 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -412,7 +412,7 @@ def _finalize_license_expression(self) -> None:
         >>> _static.is_static(dist.metadata.license_expression)
         True
         >>> dist._finalize_license_expression()
-        >>> _static.is_static(dist.metadata.license_expression)  # preserve static-ness"
+        >>> _static.is_static(dist.metadata.license_expression)  # preserve "static-ness"
         True
         >>> print(dist.metadata.license_expression)
         MIT AND GPL-3.0-or-later

From 0bb59c250dbf20b49b20dd3fd9badcfaa0da6aaf Mon Sep 17 00:00:00 2001
From: Marc Mueller <30130371+cdce8p@users.noreply.github.com>
Date: Thu, 20 Feb 2025 11:58:13 +0100
Subject: [PATCH 1448/1761] Update link in userguide

---
 docs/userguide/pyproject_config.rst | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/docs/userguide/pyproject_config.rst b/docs/userguide/pyproject_config.rst
index 3bf99bed1a..e4cee52aa3 100644
--- a/docs/userguide/pyproject_config.rst
+++ b/docs/userguide/pyproject_config.rst
@@ -99,7 +99,8 @@ Key                       Value Type (TOML)           Notes
                                                       See :doc:`/userguide/datafiles`.
 ``exclude-package-data``  table/inline-table          Empty by default. See :doc:`/userguide/datafiles`.
 ------------------------- --------------------------- -------------------------
-``license-files``         array of glob patterns      **Deprecated** - use ``project.license-files`` instead. See :doc:`PyPUG:guides/writing-pyproject-toml`
+``license-files``         array of glob patterns      **Deprecated** - use ``project.license-files`` instead. See
+                                                      :external+PyPUG:ref:`Writing your pyproject.toml `
                                                       (by default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``)
 ``data-files``            table/inline-table          **Discouraged** - check :doc:`/userguide/datafiles`.
                                                       Whenever possible, consider using data files inside the package directories.

From 732ec1afb8456809828c2205cc3d6ced2ea8ae61 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 18 Feb 2025 12:30:36 +0000
Subject: [PATCH 1449/1761] Add minimal validation to license glob patterns

---
 setuptools/dist.py | 42 +++++++++++++++++++++++++++++++++---------
 1 file changed, 33 insertions(+), 9 deletions(-)

diff --git a/setuptools/dist.py b/setuptools/dist.py
index 133948eb08..5c49f89633 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -6,7 +6,7 @@
 import os
 import re
 import sys
-from collections.abc import Iterable, MutableMapping, Sequence
+from collections.abc import Iterable, Iterator, MutableMapping, Sequence
 from glob import iglob
 from pathlib import Path
 from typing import TYPE_CHECKING, Any, Union
@@ -461,14 +461,24 @@ def _finalize_license_files(self) -> None:
             patterns = ['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']
 
         self.metadata.license_files = list(
-            map(
-                lambda path: path.replace(os.sep, "/"),
-                unique_everseen(self._expand_patterns(patterns)),
-            )
+            unique_everseen(self._expand_patterns(patterns)),
         )
 
-    @staticmethod
-    def _expand_patterns(patterns):
+        if license_files and not self.metadata.license_files:
+            # Pattern explicitly given but no file found
+            if not self.metadata.license_files:
+                SetuptoolsDeprecationWarning.emit(
+                    "Cannot find any license files for the given patterns.",
+                    f"The glob patterns {patterns!r} do not match any file.",
+                    due_date=(2026, 2, 20),
+                    # Warning introduced on 2025/02/18
+                    # PEP 639 requires us to error, but as a transition period
+                    # we will only issue a warning to give people time to prepare.
+                    # After the transition, this should raise an InvalidConfigError.
+                )
+
+    @classmethod
+    def _expand_patterns(cls, patterns: list[str]) -> Iterator[str]:
         """
         >>> list(Distribution._expand_patterns(['LICENSE']))
         ['LICENSE']
@@ -476,12 +486,26 @@ def _expand_patterns(patterns):
         ['pyproject.toml', 'LICENSE']
         """
         return (
-            path
+            path.replace(os.sep, "/")
             for pattern in patterns
-            for path in sorted(iglob(pattern, recursive=True))
+            for path in sorted(cls._find_pattern(pattern))
             if not path.endswith('~') and os.path.isfile(path)
         )
 
+    @staticmethod
+    def _find_pattern(pattern: str) -> Iterator[str]:
+        """
+        >>> list(Distribution._find_pattern("setuptools/**/pyprojecttoml.py"))
+        ['setuptools/config/pyprojecttoml.py']
+        >>> list(Distribution._find_pattern("../LICENSE"))
+        Traceback (most recent call last):
+        ...
+        setuptools.errors.InvalidConfigError: Pattern '../LICENSE' cannot contain '..'
+        """
+        if ".." in pattern:  # XXX: Any other invalid character?
+            raise InvalidConfigError(f"Pattern {pattern!r} cannot contain '..'")
+        return iglob(pattern, recursive=True)
+
     # FIXME: 'Distribution._parse_config_files' is too complex (14)
     def _parse_config_files(self, filenames=None):  # noqa: C901
         """

From d0a55a0e72727aa11fd47b76ae1610be05f9a1ea Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 18 Feb 2025 12:47:50 +0000
Subject: [PATCH 1450/1761] Skip _finalize_license_files in test without source
 tree

---
 setuptools/tests/test_core_metadata.py | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/setuptools/tests/test_core_metadata.py b/setuptools/tests/test_core_metadata.py
index 548cb869f7..0d925111fa 100644
--- a/setuptools/tests/test_core_metadata.py
+++ b/setuptools/tests/test_core_metadata.py
@@ -373,6 +373,9 @@ def dist(self, request, monkeypatch, tmp_path):
         monkeypatch.chdir(tmp_path)
         monkeypatch.setattr(expand, "read_attr", Mock(return_value="0.42"))
         monkeypatch.setattr(expand, "read_files", Mock(return_value="hello world"))
+        monkeypatch.setattr(
+            Distribution, "_finalize_license_files", Mock(return_value=None)
+        )
         if request.param is None:
             yield self.base_example()
         else:

From 868c50edc8d1a44fb36328f0bd074453ba4b3879 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 18 Feb 2025 12:58:44 +0000
Subject: [PATCH 1451/1761] Add test for unmatch license-files pattern

---
 setuptools/tests/config/test_apply_pyprojecttoml.py | 8 ++++++++
 1 file changed, 8 insertions(+)

diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py
index bcea9a8847..733c2e9bdc 100644
--- a/setuptools/tests/config/test_apply_pyprojecttoml.py
+++ b/setuptools/tests/config/test_apply_pyprojecttoml.py
@@ -461,6 +461,14 @@ def test_default_patterns(self, tmp_path):
         assert (tmp_path / "LICENSE.txt").exists()  # from base example
         assert set(dist.metadata.license_files) == {*license_files, "LICENSE.txt"}
 
+    def test_missing_patterns(self, tmp_path):
+        pyproject = self.base_pyproject_license_pep639(tmp_path)
+        assert list(tmp_path.glob("_FILE*")) == []  # sanity check
+
+        msg = "Cannot find any license files for the given patterns."
+        with pytest.warns(SetuptoolsDeprecationWarning, match=msg):
+            pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
+
     def test_deprecated_file_expands_to_text(self, tmp_path):
         """Make sure the old example with ``license = {text = ...}`` works"""
 

From edbd1cfcb6a75958412b66e2d5b7428aba76583f Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 18 Feb 2025 13:08:49 +0000
Subject: [PATCH 1452/1761] Add news fragment

---
 newsfragments/4838.feature.rst | 3 +++
 1 file changed, 3 insertions(+)
 create mode 100644 newsfragments/4838.feature.rst

diff --git a/newsfragments/4838.feature.rst b/newsfragments/4838.feature.rst
new file mode 100644
index 0000000000..15e000c56c
--- /dev/null
+++ b/newsfragments/4838.feature.rst
@@ -0,0 +1,3 @@
+Added simple validation for given glob patterns in ``license-files``:
+a warning will be generated if no file is matched.
+Invalid glob patterns can raise an exception.

From d05bcc2e386bcaee2ccee44f5e5135a87dcaff49 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 18 Feb 2025 13:34:29 +0000
Subject: [PATCH 1453/1761] Avoid path separator problem in doctest on windows

---
 setuptools/dist.py | 14 ++++++--------
 1 file changed, 6 insertions(+), 8 deletions(-)

diff --git a/setuptools/dist.py b/setuptools/dist.py
index 5c49f89633..45f2fa4f73 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -484,6 +484,12 @@ def _expand_patterns(cls, patterns: list[str]) -> Iterator[str]:
         ['LICENSE']
         >>> list(Distribution._expand_patterns(['pyproject.toml', 'LIC*']))
         ['pyproject.toml', 'LICENSE']
+        >>> list(Distribution._expand_patterns(['setuptools/**/pyprojecttoml.py']))
+        ['setuptools/config/pyprojecttoml.py']
+        >>> list(Distribution._expand_patterns(['../LICENSE']))
+        Traceback (most recent call last):
+        ...
+        setuptools.errors.InvalidConfigError: Pattern '../LICENSE' cannot contain '..'
         """
         return (
             path.replace(os.sep, "/")
@@ -494,14 +500,6 @@ def _expand_patterns(cls, patterns: list[str]) -> Iterator[str]:
 
     @staticmethod
     def _find_pattern(pattern: str) -> Iterator[str]:
-        """
-        >>> list(Distribution._find_pattern("setuptools/**/pyprojecttoml.py"))
-        ['setuptools/config/pyprojecttoml.py']
-        >>> list(Distribution._find_pattern("../LICENSE"))
-        Traceback (most recent call last):
-        ...
-        setuptools.errors.InvalidConfigError: Pattern '../LICENSE' cannot contain '..'
-        """
         if ".." in pattern:  # XXX: Any other invalid character?
             raise InvalidConfigError(f"Pattern {pattern!r} cannot contain '..'")
         return iglob(pattern, recursive=True)

From c31ebdc4749f1439972451a7b9b28734281d3830 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 20 Feb 2025 13:20:51 +0000
Subject: [PATCH 1454/1761] Validate license-files glob patterns individually

Co-authored-by: Marc Mueller <30130371+cdce8p@users.noreply.github.com>
---
 setuptools/dist.py                            | 80 +++++++++++++------
 .../tests/config/test_apply_pyprojecttoml.py  |  2 +-
 2 files changed, 55 insertions(+), 27 deletions(-)

diff --git a/setuptools/dist.py b/setuptools/dist.py
index 45f2fa4f73..ce17217f69 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -7,7 +7,7 @@
 import re
 import sys
 from collections.abc import Iterable, Iterator, MutableMapping, Sequence
-from glob import iglob
+from glob import glob
 from pathlib import Path
 from typing import TYPE_CHECKING, Any, Union
 
@@ -459,26 +459,16 @@ def _finalize_license_files(self) -> None:
             # See https://wheel.readthedocs.io/en/stable/user_guide.html
             # -> 'Including license files in the generated wheel file'
             patterns = ['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']
+            files = self._expand_patterns(patterns, enforce_match=False)
+        else:  # Patterns explicitly given by the user
+            files = self._expand_patterns(patterns, enforce_match=True)
 
-        self.metadata.license_files = list(
-            unique_everseen(self._expand_patterns(patterns)),
-        )
-
-        if license_files and not self.metadata.license_files:
-            # Pattern explicitly given but no file found
-            if not self.metadata.license_files:
-                SetuptoolsDeprecationWarning.emit(
-                    "Cannot find any license files for the given patterns.",
-                    f"The glob patterns {patterns!r} do not match any file.",
-                    due_date=(2026, 2, 20),
-                    # Warning introduced on 2025/02/18
-                    # PEP 639 requires us to error, but as a transition period
-                    # we will only issue a warning to give people time to prepare.
-                    # After the transition, this should raise an InvalidConfigError.
-                )
+        self.metadata.license_files = list(unique_everseen(files))
 
     @classmethod
-    def _expand_patterns(cls, patterns: list[str]) -> Iterator[str]:
+    def _expand_patterns(
+        cls, patterns: list[str], enforce_match: bool = True
+    ) -> Iterator[str]:
         """
         >>> list(Distribution._expand_patterns(['LICENSE']))
         ['LICENSE']
@@ -486,23 +476,61 @@ def _expand_patterns(cls, patterns: list[str]) -> Iterator[str]:
         ['pyproject.toml', 'LICENSE']
         >>> list(Distribution._expand_patterns(['setuptools/**/pyprojecttoml.py']))
         ['setuptools/config/pyprojecttoml.py']
-        >>> list(Distribution._expand_patterns(['../LICENSE']))
-        Traceback (most recent call last):
-        ...
-        setuptools.errors.InvalidConfigError: Pattern '../LICENSE' cannot contain '..'
         """
         return (
             path.replace(os.sep, "/")
             for pattern in patterns
-            for path in sorted(cls._find_pattern(pattern))
+            for path in sorted(cls._find_pattern(pattern, enforce_match))
             if not path.endswith('~') and os.path.isfile(path)
         )
 
     @staticmethod
-    def _find_pattern(pattern: str) -> Iterator[str]:
-        if ".." in pattern:  # XXX: Any other invalid character?
+    def _find_pattern(pattern: str, enforce_match: bool = True) -> list[str]:
+        r"""
+        >>> Distribution._find_pattern("LICENSE")
+        ['LICENSE']
+        >>> Distribution._find_pattern("/LICENSE.MIT")
+        Traceback (most recent call last):
+        ...
+        setuptools.errors.InvalidConfigError: Pattern '/LICENSE.MIT' should be relative...
+        >>> Distribution._find_pattern("../LICENSE.MIT")
+        Traceback (most recent call last):
+        ...
+        setuptools.errors.InvalidConfigError: ...Pattern '../LICENSE.MIT' cannot contain '..'
+        >>> Distribution._find_pattern("LICEN{CSE*")
+        Traceback (most recent call last):
+        ...
+        setuptools.warnings.SetuptoolsDeprecationWarning: ...Pattern 'LICEN{CSE*' contains invalid characters...
+        """
+        if ".." in pattern:
             raise InvalidConfigError(f"Pattern {pattern!r} cannot contain '..'")
-        return iglob(pattern, recursive=True)
+        if pattern.startswith((os.sep, "/")) or ":\\" in pattern:
+            raise InvalidConfigError(
+                f"Pattern {pattern!r} should be relative and must not start with '/'"
+            )
+        if re.match(r'^[\w\-\.\/\*\?\[\]]+$', pattern) is None:
+            pypa_guides = "specifications/pyproject-toml/#license-files"
+            SetuptoolsDeprecationWarning.emit(
+                "Please provide a valid glob pattern.",
+                "Pattern {pattern!r} contains invalid characters.",
+                pattern=pattern,
+                see_url=f"https://packaging.python.org/en/latest/{pypa_guides}",
+                due_date=(2025, 2, 20),  # Introduced in 2024-02-20
+            )
+
+        found = glob(pattern, recursive=True)
+
+        if enforce_match and not found:
+            SetuptoolsDeprecationWarning.emit(
+                "Cannot find any files for the given pattern.",
+                "Pattern {pattern!r} did not match any files.",
+                pattern=pattern,
+                due_date=(2025, 2, 20),  # Introduced in 2024-02-20
+                # PEP 639 requires us to error, but as a transition period
+                # we will only issue a warning to give people time to prepare.
+                # After the transition, this should raise an InvalidConfigError.
+            )
+        return found
 
     # FIXME: 'Distribution._parse_config_files' is too complex (14)
     def _parse_config_files(self, filenames=None):  # noqa: C901
diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py
index 733c2e9bdc..f1a536bb4a 100644
--- a/setuptools/tests/config/test_apply_pyprojecttoml.py
+++ b/setuptools/tests/config/test_apply_pyprojecttoml.py
@@ -465,7 +465,7 @@ def test_missing_patterns(self, tmp_path):
         pyproject = self.base_pyproject_license_pep639(tmp_path)
         assert list(tmp_path.glob("_FILE*")) == []  # sanity check
 
-        msg = "Cannot find any license files for the given patterns."
+        msg = "Cannot find any files for the given pattern.*"
         with pytest.warns(SetuptoolsDeprecationWarning, match=msg):
             pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
 

From 5b5b2ab55c9454f756a962ec9584aee549f10414 Mon Sep 17 00:00:00 2001
From: Sam James 
Date: Fri, 21 Feb 2025 22:06:05 +0000
Subject: [PATCH 1455/1761] Update CFLAGS docs to reflect distutils change

Update the docs to reflect the distutils change for https://github.com/pypa/distutils/issues/299
in af7fcbb0d56ae14753db53acd8792eddb4d8f814.

Closes: https://github.com/pypa/setuptools/issues/4836
---
 docs/userguide/ext_modules.rst | 3 +--
 newsfragments/4836.bugfix.rst  | 1 +
 2 files changed, 2 insertions(+), 2 deletions(-)
 create mode 100644 newsfragments/4836.bugfix.rst

diff --git a/docs/userguide/ext_modules.rst b/docs/userguide/ext_modules.rst
index 19954f50e4..4abc3dcd04 100644
--- a/docs/userguide/ext_modules.rst
+++ b/docs/userguide/ext_modules.rst
@@ -110,8 +110,7 @@ The compiler options appear in the command line in the following order:
 
 .. Reference: "compiler_so" and distutils.ccompiler.gen_preprocess_options, CCompiler.compile, UnixCCompiler._compile
 
-* first, the options provided by the ``sysconfig`` variable ``CFLAGS``,
-* then, the options provided by the environment variables ``CFLAGS`` and ``CPPFLAGS``,
+* first, the options provided by the environment variables ``CFLAGS`` and ``CPPFLAGS``,
 * then, the options provided by the ``sysconfig`` variable ``CCSHARED``,
 * then, a ``-I`` option for each element of ``Extension.include_dirs``,
 * finally, the options provided by ``Extension.extra_compile_args``.
diff --git a/newsfragments/4836.bugfix.rst b/newsfragments/4836.bugfix.rst
new file mode 100644
index 0000000000..bb8adf0872
--- /dev/null
+++ b/newsfragments/4836.bugfix.rst
@@ -0,0 +1 @@
+Fix documentation for recent CFLAGS distutils change. -- by :user:`thesamesam`

From c1f2daa56dc3baf206bf78886fb34d8dd70d4dc7 Mon Sep 17 00:00:00 2001
From: shenxianpeng 
Date: Sat, 22 Feb 2025 13:34:19 +0000
Subject: [PATCH 1456/1761] Fix TODO comments and typos

---
 pkg_resources/__init__.py                   | 3 ---
 setuptools/_static.py                       | 2 +-
 setuptools/_vendor/autocommand/autoparse.py | 2 +-
 setuptools/_vendor/packaging/metadata.py    | 8 ++++----
 setuptools/command/editable_wheel.py        | 6 +++---
 setuptools/command/install.py               | 2 +-
 setuptools/tests/test_build_py.py           | 2 +-
 setuptools/unicode_utils.py                 | 2 +-
 8 files changed, 12 insertions(+), 15 deletions(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index 68feeb0593..86caf45177 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -3326,9 +3326,6 @@ def has_version(self) -> bool:
         except ValueError:
             issue_warning("Unbuilt egg for " + repr(self))
             return False
-        except SystemError:
-            # TODO: remove this except clause when python/cpython#103632 is fixed.
-            return False
         return True
 
     def clone(self, **kw: str | int | IResourceProvider | None) -> Self:
diff --git a/setuptools/_static.py b/setuptools/_static.py
index 075a0bcddf..af35862cf8 100644
--- a/setuptools/_static.py
+++ b/setuptools/_static.py
@@ -27,7 +27,7 @@ class Static:
 def _prevent_modification(target: type, method: str, copying: str) -> None:
     """
     Because setuptools is very flexible we cannot fully prevent
-    plugins and user customisations from modifying static values that were
+    plugins and user customizations from modifying static values that were
     parsed from config files.
     But we can attempt to block "in-place" mutations and identify when they
     were done.
diff --git a/setuptools/_vendor/autocommand/autoparse.py b/setuptools/_vendor/autocommand/autoparse.py
index 0276a3fae1..3ed80ed8a4 100644
--- a/setuptools/_vendor/autocommand/autoparse.py
+++ b/setuptools/_vendor/autocommand/autoparse.py
@@ -137,7 +137,7 @@ def _add_arguments(param, parser, used_char_args, add_nos):
             arg_spec['type'] = str
 
         # TODO: special case for list type.
-        #   - How to specificy type of list members?
+        #   - How to specify type of list members?
         #       - param: [int]
         #       - param: int =[]
         #   - action='append' vs nargs='*'
diff --git a/setuptools/_vendor/packaging/metadata.py b/setuptools/_vendor/packaging/metadata.py
index 721f411cfc..7bbfee156d 100644
--- a/setuptools/_vendor/packaging/metadata.py
+++ b/setuptools/_vendor/packaging/metadata.py
@@ -193,10 +193,10 @@ def _parse_project_urls(data: list[str]) -> dict[str, str]:
         # be the missing value, then they'd have multiple '' values that
         # overwrite each other in a accumulating dict.
         #
-        # The other potentional issue is that it's possible to have the
+        # The other potential issue is that it's possible to have the
         # same label multiple times in the metadata, with no solid "right"
         # answer with what to do in that case. As such, we'll do the only
-        # thing we can, which is treat the field as unparseable and add it
+        # thing we can, which is treat the field as unparsable and add it
         # to our list of unparsed fields.
         parts = [p.strip() for p in pair.split(",", 1)]
         parts.extend([""] * (max(0, 2 - len(parts))))  # Ensure 2 items
@@ -209,8 +209,8 @@ def _parse_project_urls(data: list[str]) -> dict[str, str]:
         label, url = parts
         if label in urls:
             # The label already exists in our set of urls, so this field
-            # is unparseable, and we can just add the whole thing to our
-            # unparseable data and stop processing it.
+            # is unparsable, and we can just add the whole thing to our
+            # unparsable data and stop processing it.
             raise KeyError("duplicate labels in project urls")
         urls[label] = url
 
diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py
index b03e677757..1a544ec258 100644
--- a/setuptools/command/editable_wheel.py
+++ b/setuptools/command/editable_wheel.py
@@ -278,7 +278,7 @@ def _run_build_subcommands(self) -> None:
         This method implements a temporary workaround to support the ecosystem
         while the implementations catch up.
         """
-        # TODO: Once plugins/customisations had the chance to catch up, replace
+        # TODO: Once plugins/customizations had the chance to catch up, replace
         #       `self._run_build_subcommands()` with `self.run_command("build")`.
         #       Also remove _safely_run, TestCustomBuildPy. Suggested date: Aug/2023.
         build = self.get_finalized_command("build")
@@ -309,7 +309,7 @@ def _safely_run(self, cmd_name: str):
                 https://setuptools.pypa.io/en/latest/userguide/extension.html.
 
                 For the time being `setuptools` will silence this error and ignore
-                the faulty command, but this behaviour will change in future versions.
+                the faulty command, but this behavior will change in future versions.
                 """,
                 # TODO: define due_date
                 # There is a series of shortcomings with the available editable install
@@ -564,7 +564,7 @@ def _encode_pth(content: str) -> bytes:
     .pth files are always read with 'locale' encoding, the recommendation
     from the cpython core developers is to write them as ``open(path, "w")``
     and ignore warnings (see python/cpython#77102, pypa/setuptools#3937).
-    This function tries to simulate this behaviour without having to create an
+    This function tries to simulate this behavior without having to create an
     actual file, in a way that supports a range of active Python versions.
     (There seems to be some variety in the way different version of Python handle
     ``encoding=None``, not all of them use ``locale.getpreferredencoding(False)``
diff --git a/setuptools/command/install.py b/setuptools/command/install.py
index 741b140c70..15ef364688 100644
--- a/setuptools/command/install.py
+++ b/setuptools/command/install.py
@@ -68,7 +68,7 @@ def initialize_options(self):
             """,
             see_url="https://blog.ganssle.io/articles/2021/10/setup-py-deprecated.html",
             # TODO: Document how to bootstrap setuptools without install
-            #       (e.g. by unziping the wheel file)
+            #       (e.g. by unzipping the wheel file)
             #       and then add a due_date to this warning.
         )
 
diff --git a/setuptools/tests/test_build_py.py b/setuptools/tests/test_build_py.py
index e64cfa2e4b..1e3a660833 100644
--- a/setuptools/tests/test_build_py.py
+++ b/setuptools/tests/test_build_py.py
@@ -168,7 +168,7 @@ def test_excluded_subpackages(tmpdir_cwd):
     with pytest.warns(SetuptoolsDeprecationWarning, match=msg):
         # TODO: To fix #3260 we need some transition period to deprecate the
         # existing behavior of `include_package_data`. After the transition, we
-        # should remove the warning and fix the behaviour.
+        # should remove the warning and fix the behavior.
 
         if os.getenv("SETUPTOOLS_USE_DISTUTILS") == "stdlib":
             # pytest.warns reset the warning filter temporarily
diff --git a/setuptools/unicode_utils.py b/setuptools/unicode_utils.py
index a6e33f2e0d..f502f5b089 100644
--- a/setuptools/unicode_utils.py
+++ b/setuptools/unicode_utils.py
@@ -86,7 +86,7 @@ class _Utf8EncodingNeeded(SetuptoolsDeprecationWarning):
     """
 
     _DETAILS = """
-    Fallback behaviour for UTF-8 is considered **deprecated** and future versions of
+    Fallback behavior for UTF-8 is considered **deprecated** and future versions of
     `setuptools` may not implement it.
 
     Please encode {file!r} with "utf-8" to ensure future builds will succeed.

From f2febf68696b459c58ad13abde913efc10d8b297 Mon Sep 17 00:00:00 2001
From: shenxianpeng 
Date: Sun, 23 Feb 2025 15:18:23 +0000
Subject: [PATCH 1457/1761] Fix more typos

---
 NEWS.rst                                                | 4 ++--
 setuptools/_vendor/autocommand-2.2.2.dist-info/METADATA | 4 ++--
 setuptools/_vendor/autocommand/autoasync.py             | 8 ++++----
 setuptools/_vendor/autocommand/autoparse.py             | 6 +++---
 setuptools/_vendor/inflect/__init__.py                  | 2 +-
 setuptools/_vendor/packaging/metadata.py                | 4 ++--
 6 files changed, 14 insertions(+), 14 deletions(-)

diff --git a/NEWS.rst b/NEWS.rst
index e9e795005a..bf098df0a5 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -620,7 +620,7 @@ Bugfixes
 --------
 
 - In tests, rely on pytest-home for reusable fixture. (#4072)
-- Explicitely marked as ``Protocol`` and fixed missing ``self`` argument in interfaces ``pkg_resources.IMetadataProvider`` and ``pkg_resources.IResourceProvider`` -- by :user:`Avasam` (#4144)
+- Explicitly marked as ``Protocol`` and fixed missing ``self`` argument in interfaces ``pkg_resources.IMetadataProvider`` and ``pkg_resources.IResourceProvider`` -- by :user:`Avasam` (#4144)
 - Restored expectation that egg-link files would be named with dash separators for compatibility with pip prior to version 24. (#4167)
 
 
@@ -7151,7 +7151,7 @@ setuptools
 
 * Fixed invalid URL error catching. Old Setuptools #20.
 
-* Fixed invalid bootstraping with easy_install installation (Distribute #40).
+* Fixed invalid bootstrapping with easy_install installation (Distribute #40).
   Thanks to Florian Schulze for the help.
 
 * Removed buildout/bootstrap.py. A new repository will create a specific
diff --git a/setuptools/_vendor/autocommand-2.2.2.dist-info/METADATA b/setuptools/_vendor/autocommand-2.2.2.dist-info/METADATA
index 32214fb440..e153f86374 100644
--- a/setuptools/_vendor/autocommand-2.2.2.dist-info/METADATA
+++ b/setuptools/_vendor/autocommand-2.2.2.dist-info/METADATA
@@ -320,7 +320,7 @@ optional arguments:
 STOP and STEP default to 1
 ```
 
-Even though autocommand is being applied to the `wrapper` returned by `print_yielded`, it still retreives the signature of the underlying `seq` function to create the argument parsing.
+Even though autocommand is being applied to the `wrapper` returned by `print_yielded`, it still retrieves the signature of the underlying `seq` function to create the argument parsing.
 
 ### Custom Parser
 
@@ -331,7 +331,7 @@ from argparse import ArgumentParser
 from autocommand import autocommand
 
 parser = ArgumentParser()
-# autocommand can't do optional positonal parameters
+# autocommand can't do optional positional parameters
 parser.add_argument('arg', nargs='?')
 # or mutually exclusive options
 group = parser.add_mutually_exclusive_group()
diff --git a/setuptools/_vendor/autocommand/autoasync.py b/setuptools/_vendor/autocommand/autoasync.py
index 688f7e0554..0d4825d761 100644
--- a/setuptools/_vendor/autocommand/autoasync.py
+++ b/setuptools/_vendor/autocommand/autoasync.py
@@ -54,14 +54,14 @@ async def _run_forever_coro(coro, args, kwargs, loop):
 def autoasync(coro=None, *, loop=None, forever=False, pass_loop=False):
     '''
     Convert an asyncio coroutine into a function which, when called, is
-    evaluted in an event loop, and the return value returned. This is intented
+    evaluated in an event loop, and the return value returned. This is intended
     to make it easy to write entry points into asyncio coroutines, which
-    otherwise need to be explictly evaluted with an event loop's
+    otherwise need to be explicitly evaluated with an event loop's
     run_until_complete.
 
     If `loop` is given, it is used as the event loop to run the coro in. If it
-    is None (the default), the loop is retreived using asyncio.get_event_loop.
-    This call is defered until the decorated function is called, so that
+    is None (the default), the loop is retrieved using asyncio.get_event_loop.
+    This call is deferred until the decorated function is called, so that
     callers can install custom event loops or event loop policies after
     @autoasync is applied.
 
diff --git a/setuptools/_vendor/autocommand/autoparse.py b/setuptools/_vendor/autocommand/autoparse.py
index 3ed80ed8a4..b50fad83e7 100644
--- a/setuptools/_vendor/autocommand/autoparse.py
+++ b/setuptools/_vendor/autocommand/autoparse.py
@@ -34,7 +34,7 @@ class AnnotationError(AutocommandError):
 
 class PositionalArgError(AutocommandError):
     '''
-    Postional Arg Error: autocommand can't handle postional-only parameters
+    Positional Arg Error: autocommand can't handle positional-only parameters
     '''
 
 
@@ -197,7 +197,7 @@ def make_parser(func_sig, description, epilog, add_nos):
     used_char_args = {'h'}
 
     # Arange the params so that single-character arguments are first. This
-    # esnures they don't have to get --long versions. sorted is stable, so the
+    # ensures they don't have to get --long versions. sorted is stable, so the
     # parameters will otherwise still be in relative order.
     params = sorted(
         func_sig.parameters.values(),
@@ -249,7 +249,7 @@ def autoparse(
     while parameters *with* defaults become --options. Use annotations to set
     the type of the parameter.
 
-    The `desctiption` and `epilog` parameters corrospond to the same respective
+    The `description` and `epilog` parameters correspond to the same respective
     argparse parameters. If no description is given, it defaults to the
     decorated functions's docstring, if present.
 
diff --git a/setuptools/_vendor/inflect/__init__.py b/setuptools/_vendor/inflect/__init__.py
index 3eec27f4c6..d9a94b4c99 100644
--- a/setuptools/_vendor/inflect/__init__.py
+++ b/setuptools/_vendor/inflect/__init__.py
@@ -1837,7 +1837,7 @@ def get_si_pron(thecase, word, gender) -> str:
     re.VERBOSE,
 )
 
-# THIS PATTERN CODES THE BEGINNINGS OF ALL ENGLISH WORDS BEGINING WITH A
+# THIS PATTERN CODES THE BEGINNINGS OF ALL ENGLISH WORDS BEGINNING WITH A
 # 'y' FOLLOWED BY A CONSONANT. ANY OTHER Y-CONSONANT PREFIX THEREFORE
 # IMPLIES AN ABBREVIATION.
 
diff --git a/setuptools/_vendor/packaging/metadata.py b/setuptools/_vendor/packaging/metadata.py
index 7bbfee156d..6a651deb44 100644
--- a/setuptools/_vendor/packaging/metadata.py
+++ b/setuptools/_vendor/packaging/metadata.py
@@ -424,7 +424,7 @@ def parse_email(data: bytes | str) -> tuple[RawMetadata, dict[str, list[str]]]:
             except KeyError:
                 unparsed[name] = value
         # Nothing that we've done has managed to parse this, so it'll just
-        # throw it in our unparseable data and move on.
+        # throw it in our unparsable data and move on.
         else:
             unparsed[name] = value
 
@@ -441,7 +441,7 @@ def parse_email(data: bytes | str) -> tuple[RawMetadata, dict[str, list[str]]]:
     else:
         if payload:
             # Check to see if we've already got a description, if so then both
-            # it, and this body move to unparseable.
+            # it, and this body move to unparsable.
             if "description" in raw:
                 description_header = cast(str, raw.pop("description"))
                 unparsed.setdefault("description", []).extend(

From e470e9e97da8621dd991e06057ef97db981e897d Mon Sep 17 00:00:00 2001
From: shenxianpeng 
Date: Sun, 23 Feb 2025 15:21:07 +0000
Subject: [PATCH 1458/1761] Revert remove TODO comment code

---
 pkg_resources/__init__.py | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index 86caf45177..68feeb0593 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -3326,6 +3326,9 @@ def has_version(self) -> bool:
         except ValueError:
             issue_warning("Unbuilt egg for " + repr(self))
             return False
+        except SystemError:
+            # TODO: remove this except clause when python/cpython#103632 is fixed.
+            return False
         return True
 
     def clone(self, **kw: str | int | IResourceProvider | None) -> Self:

From c97b282ffe4fd3de258906726a2dc36eda295e0c Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 17 Feb 2025 18:40:53 +0000
Subject: [PATCH 1459/1761] [CI] Address problems with `cygwin`
 (pypa/setuptools#4832, pypa/distutils#328)

---
 .github/workflows/main.yml | 9 +++++++--
 1 file changed, 7 insertions(+), 2 deletions(-)

diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index e608f6dbab..b5a9aef0cb 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -112,14 +112,19 @@ jobs:
             python${{ matrix.python }},
             python${{ matrix.python }}-devel,
             python${{ matrix.python }}-pytest,
-            python${{ matrix.python }}-tox,
+            python${{ matrix.python }}-pip,
             gcc-core,
             gcc-g++,
             ncompress
             git
+      - name: Install tox using pip
+        shell: C:\cygwin\bin\env.exe CYGWIN_NOWINPATH=1 CHERE_INVOKING=1 C:\cygwin\bin\bash.exe -leo pipefail -o igncr {0}
+        run: |
+          # Workaround for https://github.com/pypa/setuptools/issues/4831
+          python -m pip install tox
       - name: Run tests
         shell: C:\cygwin\bin\env.exe CYGWIN_NOWINPATH=1 CHERE_INVOKING=1 C:\cygwin\bin\bash.exe -leo pipefail -o igncr {0}
-        run: tox
+        run: python -m tox
 
   test_msys2_mingw:
     strategy:

From 42b1c4c6a412b9a5f58c941d678c782f848fd6bf Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 2 Sep 2024 17:19:35 -0400
Subject: [PATCH 1460/1761] Move compiler implementations into their own
 package.

---
 distutils/{ccompiler.py => compilers/C/base.py}         | 0
 distutils/{cygwinccompiler.py => compilers/C/cygwin.py} | 0
 distutils/{_msvccompiler.py => compilers/C/msvc.py}     | 0
 distutils/{unixccompiler.py => compilers/C/unix.py}     | 0
 distutils/{zosccompiler.py => compilers/C/zos.py}       | 0
 5 files changed, 0 insertions(+), 0 deletions(-)
 rename distutils/{ccompiler.py => compilers/C/base.py} (100%)
 rename distutils/{cygwinccompiler.py => compilers/C/cygwin.py} (100%)
 rename distutils/{_msvccompiler.py => compilers/C/msvc.py} (100%)
 rename distutils/{unixccompiler.py => compilers/C/unix.py} (100%)
 rename distutils/{zosccompiler.py => compilers/C/zos.py} (100%)

diff --git a/distutils/ccompiler.py b/distutils/compilers/C/base.py
similarity index 100%
rename from distutils/ccompiler.py
rename to distutils/compilers/C/base.py
diff --git a/distutils/cygwinccompiler.py b/distutils/compilers/C/cygwin.py
similarity index 100%
rename from distutils/cygwinccompiler.py
rename to distutils/compilers/C/cygwin.py
diff --git a/distutils/_msvccompiler.py b/distutils/compilers/C/msvc.py
similarity index 100%
rename from distutils/_msvccompiler.py
rename to distutils/compilers/C/msvc.py
diff --git a/distutils/unixccompiler.py b/distutils/compilers/C/unix.py
similarity index 100%
rename from distutils/unixccompiler.py
rename to distutils/compilers/C/unix.py
diff --git a/distutils/zosccompiler.py b/distutils/compilers/C/zos.py
similarity index 100%
rename from distutils/zosccompiler.py
rename to distutils/compilers/C/zos.py

From 52dad748002d1923d98207bca2af498ff9ddadf1 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 2 Sep 2024 18:07:58 -0400
Subject: [PATCH 1461/1761] Rename classes and add compatibility shims.

---
 distutils/_msvccompiler.py           |  3 +++
 distutils/ccompiler.py               | 21 +++++++++++++++++++
 distutils/compilers/C/base.py        | 28 ++++++++++++-------------
 distutils/compilers/C/cygwin.py      | 17 +++++++--------
 distutils/compilers/C/msvc.py        | 29 +++++++++++++-------------
 distutils/compilers/C/unix.py        | 19 +++++++++--------
 distutils/compilers/C/zos.py         |  8 +++----
 distutils/cygwinccompiler.py         | 31 ++++++++++++++++++++++++++++
 distutils/tests/test_msvccompiler.py |  5 +++--
 distutils/unixccompiler.py           |  3 +++
 distutils/zosccompiler.py            |  3 +++
 11 files changed, 115 insertions(+), 52 deletions(-)
 create mode 100644 distutils/_msvccompiler.py
 create mode 100644 distutils/ccompiler.py
 create mode 100644 distutils/cygwinccompiler.py
 create mode 100644 distutils/unixccompiler.py
 create mode 100644 distutils/zosccompiler.py

diff --git a/distutils/_msvccompiler.py b/distutils/_msvccompiler.py
new file mode 100644
index 0000000000..34d9735b06
--- /dev/null
+++ b/distutils/_msvccompiler.py
@@ -0,0 +1,3 @@
+from .compilers.C import msvc
+
+MSVCCompiler = msvc.Compiler
diff --git a/distutils/ccompiler.py b/distutils/ccompiler.py
new file mode 100644
index 0000000000..1f788c50ae
--- /dev/null
+++ b/distutils/ccompiler.py
@@ -0,0 +1,21 @@
+from .compilers.C import base
+from .compilers.C.base import (
+    CompileError,
+    gen_lib_options,
+    gen_preprocess_options,
+    get_default_compiler,
+    new_compiler,
+    show_compilers,
+)
+
+__all__ = [
+    'CompileError',
+    'gen_lib_options',
+    'gen_preprocess_options',
+    'get_default_compiler',
+    'new_compiler',
+    'show_compilers',
+]
+
+
+CCompiler = base.Compiler
diff --git a/distutils/compilers/C/base.py b/distutils/compilers/C/base.py
index 714f13d8d3..de1dfa6827 100644
--- a/distutils/compilers/C/base.py
+++ b/distutils/compilers/C/base.py
@@ -1,6 +1,6 @@
 """distutils.ccompiler
 
-Contains CCompiler, an abstract base class that defines the interface
+Contains Compiler, an abstract base class that defines the interface
 for the Distutils compiler abstraction model."""
 
 import os
@@ -12,22 +12,22 @@
 
 from more_itertools import always_iterable
 
-from ._log import log
-from ._modified import newer_group
-from .dir_util import mkpath
-from .errors import (
+from ..._log import log
+from ..._modified import newer_group
+from ...dir_util import mkpath
+from ...errors import (
     CompileError,
     DistutilsModuleError,
     DistutilsPlatformError,
     LinkError,
     UnknownFileError,
 )
-from .file_util import move_file
-from .spawn import spawn
-from .util import execute, is_mingw, split_quoted
+from ...file_util import move_file
+from ...spawn import spawn
+from ...util import execute, is_mingw, split_quoted
 
 
-class CCompiler:
+class Compiler:
     """Abstract base class to define the interface that must be implemented
     by real compiler classes.  Also has some utility methods used by
     several compiler classes.
@@ -726,7 +726,7 @@ def link_shared_lib(
         target_lang=None,
     ):
         self.link(
-            CCompiler.SHARED_LIBRARY,
+            Compiler.SHARED_LIBRARY,
             objects,
             self.library_filename(output_libname, lib_type='shared'),
             output_dir,
@@ -757,7 +757,7 @@ def link_shared_object(
         target_lang=None,
     ):
         self.link(
-            CCompiler.SHARED_OBJECT,
+            Compiler.SHARED_OBJECT,
             objects,
             output_filename,
             output_dir,
@@ -786,7 +786,7 @@ def link_executable(
         target_lang=None,
     ):
         self.link(
-            CCompiler.EXECUTABLE,
+            Compiler.EXECUTABLE,
             objects,
             self.executable_filename(output_progname),
             output_dir,
@@ -978,9 +978,9 @@ def _make_out_path(self, output_dir, strip_dir, src_name):
     def _make_out_path_exts(cls, output_dir, strip_dir, src_name, extensions):
         r"""
         >>> exts = {'.c': '.o'}
-        >>> CCompiler._make_out_path_exts('.', False, '/foo/bar.c', exts).replace('\\', '/')
+        >>> Compiler._make_out_path_exts('.', False, '/foo/bar.c', exts).replace('\\', '/')
         './foo/bar.o'
-        >>> CCompiler._make_out_path_exts('.', True, '/foo/bar.c', exts).replace('\\', '/')
+        >>> Compiler._make_out_path_exts('.', True, '/foo/bar.c', exts).replace('\\', '/')
         './bar.o'
         """
         src = pathlib.PurePath(src_name)
diff --git a/distutils/compilers/C/cygwin.py b/distutils/compilers/C/cygwin.py
index 3c67524e6d..29edd0ea03 100644
--- a/distutils/compilers/C/cygwin.py
+++ b/distutils/compilers/C/cygwin.py
@@ -14,16 +14,16 @@
 import warnings
 from subprocess import check_output
 
-from .errors import (
+from ...errors import (
     CCompilerError,
     CompileError,
     DistutilsExecError,
     DistutilsPlatformError,
 )
-from .file_util import write_file
-from .sysconfig import get_config_vars
-from .unixccompiler import UnixCCompiler
-from .version import LooseVersion, suppress_known_deprecation
+from ...file_util import write_file
+from ...sysconfig import get_config_vars
+from ...version import LooseVersion, suppress_known_deprecation
+from . import unix
 
 
 def get_msvcr():
@@ -37,7 +37,7 @@ def get_msvcr():
 )
 
 
-class CygwinCCompiler(UnixCCompiler):
+class Compiler(unix.Compiler):
     """Handles the Cygwin port of the GNU C compiler to Windows."""
 
     compiler_type = 'cygwin'
@@ -197,8 +197,7 @@ def link(
         if not debug:
             extra_preargs.append("-s")
 
-        UnixCCompiler.link(
-            self,
+        super().link(
             target_desc,
             objects,
             output_filename,
@@ -240,7 +239,7 @@ def out_extensions(self):
 
 
 # the same as cygwin plus some additional parameters
-class Mingw32CCompiler(CygwinCCompiler):
+class MinGW32Compiler(Compiler):
     """Handles the Mingw32 port of the GNU C compiler to Windows."""
 
     compiler_type = 'mingw32'
diff --git a/distutils/compilers/C/msvc.py b/distutils/compilers/C/msvc.py
index 97b067c686..023ef39589 100644
--- a/distutils/compilers/C/msvc.py
+++ b/distutils/compilers/C/msvc.py
@@ -23,16 +23,17 @@
 
 from itertools import count
 
-from ._log import log
-from .ccompiler import CCompiler, gen_lib_options
-from .errors import (
+from ..._log import log
+from ...errors import (
     CompileError,
     DistutilsExecError,
     DistutilsPlatformError,
     LibError,
     LinkError,
 )
-from .util import get_host_platform, get_platform
+from ...util import get_host_platform, get_platform
+from . import base
+from .base import gen_lib_options
 
 
 def _find_vc2015():
@@ -226,7 +227,7 @@ def _get_vcvars_spec(host_platform, platform):
     return vc_hp if vc_hp == vc_plat else f'{vc_hp}_{vc_plat}'
 
 
-class MSVCCompiler(CCompiler):
+class Compiler(base.Compiler):
     """Concrete class that implements an interface to Microsoft Visual C++,
     as defined by the CCompiler abstract class."""
 
@@ -339,15 +340,15 @@ def initialize(self, plat_name=None):
         self.ldflags_static_debug = [*ldflags_debug]
 
         self._ldflags = {
-            (CCompiler.EXECUTABLE, None): self.ldflags_exe,
-            (CCompiler.EXECUTABLE, False): self.ldflags_exe,
-            (CCompiler.EXECUTABLE, True): self.ldflags_exe_debug,
-            (CCompiler.SHARED_OBJECT, None): self.ldflags_shared,
-            (CCompiler.SHARED_OBJECT, False): self.ldflags_shared,
-            (CCompiler.SHARED_OBJECT, True): self.ldflags_shared_debug,
-            (CCompiler.SHARED_LIBRARY, None): self.ldflags_static,
-            (CCompiler.SHARED_LIBRARY, False): self.ldflags_static,
-            (CCompiler.SHARED_LIBRARY, True): self.ldflags_static_debug,
+            (base.Compiler.EXECUTABLE, None): self.ldflags_exe,
+            (base.Compiler.EXECUTABLE, False): self.ldflags_exe,
+            (base.Compiler.EXECUTABLE, True): self.ldflags_exe_debug,
+            (base.Compiler.SHARED_OBJECT, None): self.ldflags_shared,
+            (base.Compiler.SHARED_OBJECT, False): self.ldflags_shared,
+            (base.Compiler.SHARED_OBJECT, True): self.ldflags_shared_debug,
+            (base.Compiler.SHARED_LIBRARY, None): self.ldflags_static,
+            (base.Compiler.SHARED_LIBRARY, False): self.ldflags_static,
+            (base.Compiler.SHARED_LIBRARY, True): self.ldflags_static_debug,
         }
 
         self.initialized = True
diff --git a/distutils/compilers/C/unix.py b/distutils/compilers/C/unix.py
index 6c1116ae8f..fc97241f0a 100644
--- a/distutils/compilers/C/unix.py
+++ b/distutils/compilers/C/unix.py
@@ -21,13 +21,14 @@
 import shlex
 import sys
 
-from . import sysconfig
-from ._log import log
-from ._macos_compat import compiler_fixup
-from ._modified import newer
-from .ccompiler import CCompiler, gen_lib_options, gen_preprocess_options
-from .compat import consolidate_linker_args
-from .errors import CompileError, DistutilsExecError, LibError, LinkError
+from ... import sysconfig
+from ..._log import log
+from ..._macos_compat import compiler_fixup
+from ..._modified import newer
+from ...compat import consolidate_linker_args
+from ...errors import CompileError, DistutilsExecError, LibError, LinkError
+from . import base
+from .base import gen_lib_options, gen_preprocess_options
 
 # XXX Things not currently handled:
 #   * optimization/debug/warning flags; we just use whatever's in Python's
@@ -105,7 +106,7 @@ def _linker_params(linker_cmd, compiler_cmd):
     return linker_cmd[pivot:]
 
 
-class UnixCCompiler(CCompiler):
+class Compiler(base.Compiler):
     compiler_type = 'unix'
 
     # These are used by CCompiler in two places: the constructor sets
@@ -264,7 +265,7 @@ def link(
                 # Select a linker based on context: linker_exe when
                 # building an executable or linker_so (with shared options)
                 # when building a shared library.
-                building_exe = target_desc == CCompiler.EXECUTABLE
+                building_exe = target_desc == base.Compiler.EXECUTABLE
                 linker = (
                     self.linker_exe
                     if building_exe
diff --git a/distutils/compilers/C/zos.py b/distutils/compilers/C/zos.py
index af1e7fa5cc..5de91e49e5 100644
--- a/distutils/compilers/C/zos.py
+++ b/distutils/compilers/C/zos.py
@@ -13,9 +13,9 @@
 
 import os
 
-from . import sysconfig
-from .errors import CompileError, DistutilsExecError
-from .unixccompiler import UnixCCompiler
+from ... import sysconfig
+from ...errors import CompileError, DistutilsExecError
+from . import unix
 
 _cc_args = {
     'ibm-openxl': [
@@ -101,7 +101,7 @@
 # Python on z/OS is built with no compiler specific options in it's CFLAGS.
 # But each compiler requires it's own specific options to build successfully,
 # though some of the options are common between them
-class zOSCCompiler(UnixCCompiler):
+class Compiler(unix.Compiler):
     src_extensions = ['.c', '.C', '.cc', '.cxx', '.cpp', '.m', '.s']
     _cpp_extensions = ['.cc', '.cpp', '.cxx', '.C']
     _asm_extensions = ['.s']
diff --git a/distutils/cygwinccompiler.py b/distutils/cygwinccompiler.py
new file mode 100644
index 0000000000..de89e3cd84
--- /dev/null
+++ b/distutils/cygwinccompiler.py
@@ -0,0 +1,31 @@
+from .compilers.C import cygwin
+from .compilers.C.cygwin import (
+    CONFIG_H_NOTOK,
+    CONFIG_H_OK,
+    CONFIG_H_UNCERTAIN,
+    check_config_h,
+    get_msvcr,
+    is_cygwincc,
+)
+
+__all__ = [
+    'CONFIG_H_NOTOK',
+    'CONFIG_H_OK',
+    'CONFIG_H_UNCERTAIN',
+    'CygwinCCompiler',
+    'Mingw32CCompiler',
+    'check_config_h',
+    'get_msvcr',
+    'is_cygwincc',
+]
+
+
+CygwinCCompiler = cygwin.Compiler
+Mingw32CCompiler = cygwin.MinGW32Compiler
+
+
+get_versions = None
+"""
+A stand-in for the previous get_versions() function to prevent failures
+when monkeypatched. See pypa/setuptools#2969.
+"""
diff --git a/distutils/tests/test_msvccompiler.py b/distutils/tests/test_msvccompiler.py
index ceb15d3a63..21bd4cb3d9 100644
--- a/distutils/tests/test_msvccompiler.py
+++ b/distutils/tests/test_msvccompiler.py
@@ -6,6 +6,7 @@
 import threading
 import unittest.mock as mock
 from distutils import _msvccompiler
+from distutils.compilers.C import msvc
 from distutils.errors import DistutilsPlatformError
 from distutils.tests import support
 from distutils.util import get_platform
@@ -23,10 +24,10 @@ def test_no_compiler(self, monkeypatch):
         def _find_vcvarsall(plat_spec):
             return None, None
 
-        monkeypatch.setattr(_msvccompiler, '_find_vcvarsall', _find_vcvarsall)
+        monkeypatch.setattr(msvc, '_find_vcvarsall', _find_vcvarsall)
 
         with pytest.raises(DistutilsPlatformError):
-            _msvccompiler._get_vc_env(
+            msvc._get_vc_env(
                 'wont find this version',
             )
 
diff --git a/distutils/unixccompiler.py b/distutils/unixccompiler.py
new file mode 100644
index 0000000000..9cd30ad9a6
--- /dev/null
+++ b/distutils/unixccompiler.py
@@ -0,0 +1,3 @@
+from .compilers.C import unix
+
+UnixCCompiler = unix.Compiler
diff --git a/distutils/zosccompiler.py b/distutils/zosccompiler.py
new file mode 100644
index 0000000000..e49630ac6e
--- /dev/null
+++ b/distutils/zosccompiler.py
@@ -0,0 +1,3 @@
+from .compilers.C import zos
+
+zOSCCompiler = zos.Compiler

From 3fe42b9d937adca5056a936238c8d084036a557e Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 5 Sep 2024 09:38:29 -0400
Subject: [PATCH 1462/1761] Move compiler execeptions to their package.

---
 distutils/compilers/C/errors.py | 24 +++++++++++++++++++
 distutils/errors.py             | 41 +++++++++++----------------------
 2 files changed, 38 insertions(+), 27 deletions(-)
 create mode 100644 distutils/compilers/C/errors.py

diff --git a/distutils/compilers/C/errors.py b/distutils/compilers/C/errors.py
new file mode 100644
index 0000000000..01328592b2
--- /dev/null
+++ b/distutils/compilers/C/errors.py
@@ -0,0 +1,24 @@
+class Error(Exception):
+    """Some compile/link operation failed."""
+
+
+class PreprocessError(Error):
+    """Failure to preprocess one or more C/C++ files."""
+
+
+class CompileError(Error):
+    """Failure to compile one or more C/C++ source files."""
+
+
+class LibError(Error):
+    """Failure to create a static library from one or more C/C++ object
+    files."""
+
+
+class LinkError(Error):
+    """Failure to link one or more C/C++ object files into an executable
+    or shared library file."""
+
+
+class UnknownFileType(Error):
+    """Attempt to process an unknown file type."""
diff --git a/distutils/errors.py b/distutils/errors.py
index 3196a4f097..7c6ee258e3 100644
--- a/distutils/errors.py
+++ b/distutils/errors.py
@@ -5,6 +5,20 @@
 including :exc:`SystemExit`.
 """
 
+# compiler exceptions aliased for compatibility
+from .compilers.C.errors import (
+    CompileError,  # noqa: F401
+    LibError,  # noqa: F401
+    LinkError,  # noqa: F401
+    PreprocessError,  # noqa: F401
+)
+from .compilers.C.errors import (
+    Error as CCompilerError,  # noqa: F401
+)
+from .compilers.C.errors import (
+    UnknownFileType as UnknownFileError,  # noqa: F401
+)
+
 
 class DistutilsError(Exception):
     """The root of all Distutils evil."""
@@ -95,30 +109,3 @@ class DistutilsTemplateError(DistutilsError):
 
 class DistutilsByteCompileError(DistutilsError):
     """Byte compile error."""
-
-
-# Exception classes used by the CCompiler implementation classes
-class CCompilerError(Exception):
-    """Some compile/link operation failed."""
-
-
-class PreprocessError(CCompilerError):
-    """Failure to preprocess one or more C/C++ files."""
-
-
-class CompileError(CCompilerError):
-    """Failure to compile one or more C/C++ source files."""
-
-
-class LibError(CCompilerError):
-    """Failure to create a static library from one or more C/C++ object
-    files."""
-
-
-class LinkError(CCompilerError):
-    """Failure to link one or more C/C++ object files into an executable
-    or shared library file."""
-
-
-class UnknownFileError(CCompilerError):
-    """Attempt to process an unknown file type."""

From 91fe40001af7d14c9fcff2d73290ab415512118e Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 5 Sep 2024 09:46:33 -0400
Subject: [PATCH 1463/1761] In compiler package, rely on compiler errors.

---
 distutils/compilers/C/base.py   | 10 ++++++----
 distutils/compilers/C/cygwin.py |  8 +++++---
 distutils/compilers/C/msvc.py   |  8 +++++---
 distutils/compilers/C/unix.py   |  7 ++++++-
 distutils/compilers/C/zos.py    |  3 ++-
 5 files changed, 24 insertions(+), 12 deletions(-)

diff --git a/distutils/compilers/C/base.py b/distutils/compilers/C/base.py
index de1dfa6827..84a19993b0 100644
--- a/distutils/compilers/C/base.py
+++ b/distutils/compilers/C/base.py
@@ -16,15 +16,17 @@
 from ..._modified import newer_group
 from ...dir_util import mkpath
 from ...errors import (
-    CompileError,
     DistutilsModuleError,
     DistutilsPlatformError,
-    LinkError,
-    UnknownFileError,
 )
 from ...file_util import move_file
 from ...spawn import spawn
 from ...util import execute, is_mingw, split_quoted
+from .errors import (
+    CompileError,
+    LinkError,
+    UnknownFileType,
+)
 
 
 class Compiler:
@@ -989,7 +991,7 @@ def _make_out_path_exts(cls, output_dir, strip_dir, src_name, extensions):
         try:
             new_ext = extensions[src.suffix]
         except LookupError:
-            raise UnknownFileError(f"unknown file type '{src.suffix}' (from '{src}')")
+            raise UnknownFileType(f"unknown file type '{src.suffix}' (from '{src}')")
         if strip_dir:
             base = pathlib.PurePath(base.name)
         return os.path.join(output_dir, base.with_suffix(new_ext))
diff --git a/distutils/compilers/C/cygwin.py b/distutils/compilers/C/cygwin.py
index 29edd0ea03..bfabbb306e 100644
--- a/distutils/compilers/C/cygwin.py
+++ b/distutils/compilers/C/cygwin.py
@@ -15,8 +15,6 @@
 from subprocess import check_output
 
 from ...errors import (
-    CCompilerError,
-    CompileError,
     DistutilsExecError,
     DistutilsPlatformError,
 )
@@ -24,6 +22,10 @@
 from ...sysconfig import get_config_vars
 from ...version import LooseVersion, suppress_known_deprecation
 from . import unix
+from .errors import (
+    CompileError,
+    Error,
+)
 
 
 def get_msvcr():
@@ -250,7 +252,7 @@ def __init__(self, verbose=False, dry_run=False, force=False):
         shared_option = "-shared"
 
         if is_cygwincc(self.cc):
-            raise CCompilerError('Cygwin gcc cannot be used with --compiler=mingw32')
+            raise Error('Cygwin gcc cannot be used with --compiler=mingw32')
 
         self.set_executables(
             compiler=f'{self.cc} -O -Wall',
diff --git a/distutils/compilers/C/msvc.py b/distutils/compilers/C/msvc.py
index 023ef39589..2bdc6576e4 100644
--- a/distutils/compilers/C/msvc.py
+++ b/distutils/compilers/C/msvc.py
@@ -25,15 +25,17 @@
 
 from ..._log import log
 from ...errors import (
-    CompileError,
     DistutilsExecError,
     DistutilsPlatformError,
-    LibError,
-    LinkError,
 )
 from ...util import get_host_platform, get_platform
 from . import base
 from .base import gen_lib_options
+from .errors import (
+    CompileError,
+    LibError,
+    LinkError,
+)
 
 
 def _find_vc2015():
diff --git a/distutils/compilers/C/unix.py b/distutils/compilers/C/unix.py
index fc97241f0a..1ba93e6a96 100644
--- a/distutils/compilers/C/unix.py
+++ b/distutils/compilers/C/unix.py
@@ -26,9 +26,14 @@
 from ..._macos_compat import compiler_fixup
 from ..._modified import newer
 from ...compat import consolidate_linker_args
-from ...errors import CompileError, DistutilsExecError, LibError, LinkError
+from ...errors import DistutilsExecError
 from . import base
 from .base import gen_lib_options, gen_preprocess_options
+from .errors import (
+    CompileError,
+    LibError,
+    LinkError,
+)
 
 # XXX Things not currently handled:
 #   * optimization/debug/warning flags; we just use whatever's in Python's
diff --git a/distutils/compilers/C/zos.py b/distutils/compilers/C/zos.py
index 5de91e49e5..82d017fc90 100644
--- a/distutils/compilers/C/zos.py
+++ b/distutils/compilers/C/zos.py
@@ -14,8 +14,9 @@
 import os
 
 from ... import sysconfig
-from ...errors import CompileError, DistutilsExecError
+from ...errors import DistutilsExecError
 from . import unix
+from .errors import CompileError
 
 _cc_args = {
     'ibm-openxl': [

From f5e5969ad78a64823d39c99a250094e58092e4c3 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 6 Sep 2024 13:06:48 -0400
Subject: [PATCH 1464/1761] Move compiler tests to the compilers package.

---
 .../test_ccompiler.py => compilers/C/tests/test_base.py}     | 0
 .../C/tests/test_cygwin.py}                                  | 0
 .../C/tests/test_mingw.py}                                   | 0
 .../test_msvccompiler.py => compilers/C/tests/test_msvc.py}  | 0
 .../test_unixccompiler.py => compilers/C/tests/test_unix.py} | 5 ++---
 5 files changed, 2 insertions(+), 3 deletions(-)
 rename distutils/{tests/test_ccompiler.py => compilers/C/tests/test_base.py} (100%)
 rename distutils/{tests/test_cygwinccompiler.py => compilers/C/tests/test_cygwin.py} (100%)
 rename distutils/{tests/test_mingwccompiler.py => compilers/C/tests/test_mingw.py} (100%)
 rename distutils/{tests/test_msvccompiler.py => compilers/C/tests/test_msvc.py} (100%)
 rename distutils/{tests/test_unixccompiler.py => compilers/C/tests/test_unix.py} (99%)

diff --git a/distutils/tests/test_ccompiler.py b/distutils/compilers/C/tests/test_base.py
similarity index 100%
rename from distutils/tests/test_ccompiler.py
rename to distutils/compilers/C/tests/test_base.py
diff --git a/distutils/tests/test_cygwinccompiler.py b/distutils/compilers/C/tests/test_cygwin.py
similarity index 100%
rename from distutils/tests/test_cygwinccompiler.py
rename to distutils/compilers/C/tests/test_cygwin.py
diff --git a/distutils/tests/test_mingwccompiler.py b/distutils/compilers/C/tests/test_mingw.py
similarity index 100%
rename from distutils/tests/test_mingwccompiler.py
rename to distutils/compilers/C/tests/test_mingw.py
diff --git a/distutils/tests/test_msvccompiler.py b/distutils/compilers/C/tests/test_msvc.py
similarity index 100%
rename from distutils/tests/test_msvccompiler.py
rename to distutils/compilers/C/tests/test_msvc.py
diff --git a/distutils/tests/test_unixccompiler.py b/distutils/compilers/C/tests/test_unix.py
similarity index 99%
rename from distutils/tests/test_unixccompiler.py
rename to distutils/compilers/C/tests/test_unix.py
index 2c2f4aaec2..cb42e6c571 100644
--- a/distutils/tests/test_unixccompiler.py
+++ b/distutils/compilers/C/tests/test_unix.py
@@ -6,14 +6,13 @@
 from distutils import sysconfig
 from distutils.compat import consolidate_linker_args
 from distutils.errors import DistutilsPlatformError
+from distutils.tests import support
+from distutils.tests.compat.py39 import EnvironmentVarGuard
 from distutils.unixccompiler import UnixCCompiler
 from distutils.util import _clear_cached_macosx_ver
 
 import pytest
 
-from . import support
-from .compat.py39 import EnvironmentVarGuard
-
 
 @pytest.fixture(autouse=True)
 def save_values(monkeypatch):

From cdd4c28904fdc81f14941ac874127207a6966e04 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 6 Sep 2024 13:24:14 -0400
Subject: [PATCH 1465/1761] Direct tests to the new names

---
 distutils/compilers/C/tests/test_base.py   |  9 +++---
 distutils/compilers/C/tests/test_cygwin.py | 19 +++++--------
 distutils/compilers/C/tests/test_mingw.py  | 32 ++++++++--------------
 distutils/compilers/C/tests/test_msvc.py   | 20 ++++++--------
 distutils/compilers/C/tests/test_unix.py   |  7 +++--
 5 files changed, 37 insertions(+), 50 deletions(-)

diff --git a/distutils/compilers/C/tests/test_base.py b/distutils/compilers/C/tests/test_base.py
index 7ebfed56be..b73ec4e46d 100644
--- a/distutils/compilers/C/tests/test_base.py
+++ b/distutils/compilers/C/tests/test_base.py
@@ -3,10 +3,11 @@
 import sys
 import sysconfig
 import textwrap
-from distutils import ccompiler
 
 import pytest
 
+from .. import base
+
 pytestmark = pytest.mark.usefixtures('suppress_path_mangle')
 
 
@@ -46,7 +47,7 @@ def test_set_include_dirs(c_file):
     Extensions should build even if set_include_dirs is invoked.
     In particular, compiler-specific paths should not be overridden.
     """
-    compiler = ccompiler.new_compiler()
+    compiler = base.new_compiler()
     python = sysconfig.get_paths()['include']
     compiler.set_include_dirs([python])
     compiler.compile(_make_strs([c_file]))
@@ -60,7 +61,7 @@ def test_has_function_prototype():
     # Issue https://github.com/pypa/setuptools/issues/3648
     # Test prototype-generating behavior.
 
-    compiler = ccompiler.new_compiler()
+    compiler = base.new_compiler()
 
     # Every C implementation should have these.
     assert compiler.has_function('abort')
@@ -84,7 +85,7 @@ def test_include_dirs_after_multiple_compile_calls(c_file):
     Calling compile multiple times should not change the include dirs
     (regression test for setuptools issue #3591).
     """
-    compiler = ccompiler.new_compiler()
+    compiler = base.new_compiler()
     python = sysconfig.get_paths()['include']
     compiler.set_include_dirs([python])
     compiler.compile(_make_strs([c_file]))
diff --git a/distutils/compilers/C/tests/test_cygwin.py b/distutils/compilers/C/tests/test_cygwin.py
index 677bc0ac99..9adf6b8ebf 100644
--- a/distutils/compilers/C/tests/test_cygwin.py
+++ b/distutils/compilers/C/tests/test_cygwin.py
@@ -3,17 +3,12 @@
 import os
 import sys
 from distutils import sysconfig
-from distutils.cygwinccompiler import (
-    CONFIG_H_NOTOK,
-    CONFIG_H_OK,
-    CONFIG_H_UNCERTAIN,
-    check_config_h,
-    get_msvcr,
-)
 from distutils.tests import support
 
 import pytest
 
+from .. import cygwin
+
 
 @pytest.fixture(autouse=True)
 def stuff(request, monkeypatch, distutils_managed_tempdir):
@@ -54,24 +49,24 @@ def test_check_config_h(self):
             '4.0.1 (Apple Computer, Inc. build 5370)]'
         )
 
-        assert check_config_h()[0] == CONFIG_H_OK
+        assert cygwin.check_config_h()[0] == cygwin.CONFIG_H_OK
 
         # then it tries to see if it can find "__GNUC__" in pyconfig.h
         sys.version = 'something without the *CC word'
 
         # if the file doesn't exist it returns  CONFIG_H_UNCERTAIN
-        assert check_config_h()[0] == CONFIG_H_UNCERTAIN
+        assert cygwin.check_config_h()[0] == cygwin.CONFIG_H_UNCERTAIN
 
         # if it exists but does not contain __GNUC__, it returns CONFIG_H_NOTOK
         self.write_file(self.python_h, 'xxx')
-        assert check_config_h()[0] == CONFIG_H_NOTOK
+        assert cygwin.check_config_h()[0] == cygwin.CONFIG_H_NOTOK
 
         # and CONFIG_H_OK if __GNUC__ is found
         self.write_file(self.python_h, 'xxx __GNUC__ xxx')
-        assert check_config_h()[0] == CONFIG_H_OK
+        assert cygwin.check_config_h()[0] == cygwin.CONFIG_H_OK
 
     def test_get_msvcr(self):
-        assert get_msvcr() == []
+        assert cygwin.get_msvcr() == []
 
     @pytest.mark.skipif('sys.platform != "cygwin"')
     def test_dll_libraries_not_none(self):
diff --git a/distutils/compilers/C/tests/test_mingw.py b/distutils/compilers/C/tests/test_mingw.py
index 3e3ad5058c..dc45687a91 100644
--- a/distutils/compilers/C/tests/test_mingw.py
+++ b/distutils/compilers/C/tests/test_mingw.py
@@ -1,26 +1,24 @@
 from distutils import sysconfig
-from distutils.errors import CCompilerError, DistutilsPlatformError
+from distutils.errors import DistutilsPlatformError
 from distutils.util import is_mingw, split_quoted
 
 import pytest
 
+from .. import cygwin, errors
 
-class TestMingw32CCompiler:
+
+class TestMinGW32Compiler:
     @pytest.mark.skipif(not is_mingw(), reason='not on mingw')
     def test_compiler_type(self):
-        from distutils.cygwinccompiler import Mingw32CCompiler
-
-        compiler = Mingw32CCompiler()
+        compiler = cygwin.MinGW32Compiler()
         assert compiler.compiler_type == 'mingw32'
 
     @pytest.mark.skipif(not is_mingw(), reason='not on mingw')
     def test_set_executables(self, monkeypatch):
-        from distutils.cygwinccompiler import Mingw32CCompiler
-
         monkeypatch.setenv('CC', 'cc')
         monkeypatch.setenv('CXX', 'c++')
 
-        compiler = Mingw32CCompiler()
+        compiler = cygwin.MinGW32Compiler()
 
         assert compiler.compiler == split_quoted('cc -O -Wall')
         assert compiler.compiler_so == split_quoted('cc -shared -O -Wall')
@@ -30,27 +28,21 @@ def test_set_executables(self, monkeypatch):
 
     @pytest.mark.skipif(not is_mingw(), reason='not on mingw')
     def test_runtime_library_dir_option(self):
-        from distutils.cygwinccompiler import Mingw32CCompiler
-
-        compiler = Mingw32CCompiler()
+        compiler = cygwin.MinGW32Compiler()
         with pytest.raises(DistutilsPlatformError):
             compiler.runtime_library_dir_option('/usr/lib')
 
     @pytest.mark.skipif(not is_mingw(), reason='not on mingw')
     def test_cygwincc_error(self, monkeypatch):
-        import distutils.cygwinccompiler
-
-        monkeypatch.setattr(distutils.cygwinccompiler, 'is_cygwincc', lambda _: True)
+        monkeypatch.setattr(cygwin, 'is_cygwincc', lambda _: True)
 
-        with pytest.raises(CCompilerError):
-            distutils.cygwinccompiler.Mingw32CCompiler()
+        with pytest.raises(errors.Error):
+            cygwin.MinGW32Compiler()
 
     @pytest.mark.skipif('sys.platform == "cygwin"')
     def test_customize_compiler_with_msvc_python(self):
-        from distutils.cygwinccompiler import Mingw32CCompiler
-
         # In case we have an MSVC Python build, but still want to use
-        # Mingw32CCompiler, then customize_compiler() shouldn't fail at least.
+        # MinGW32Compiler, then customize_compiler() shouldn't fail at least.
         # https://github.com/pypa/setuptools/issues/4456
-        compiler = Mingw32CCompiler()
+        compiler = cygwin.MinGW32Compiler()
         sysconfig.customize_compiler(compiler)
diff --git a/distutils/compilers/C/tests/test_msvc.py b/distutils/compilers/C/tests/test_msvc.py
index 21bd4cb3d9..eca831996a 100644
--- a/distutils/compilers/C/tests/test_msvc.py
+++ b/distutils/compilers/C/tests/test_msvc.py
@@ -1,19 +1,17 @@
-"""Tests for distutils._msvccompiler."""
-
 import os
 import sys
 import sysconfig
 import threading
 import unittest.mock as mock
-from distutils import _msvccompiler
-from distutils.compilers.C import msvc
 from distutils.errors import DistutilsPlatformError
 from distutils.tests import support
 from distutils.util import get_platform
 
 import pytest
 
-needs_winreg = pytest.mark.skipif('not hasattr(_msvccompiler, "winreg")')
+from .. import msvc
+
+needs_winreg = pytest.mark.skipif('not hasattr(msvc, "winreg")')
 
 
 class Testmsvccompiler(support.TempdirManager):
@@ -47,12 +45,12 @@ def test_cross_platform_compilation_paths(self, monkeypatch, plat_name, expected
         """
         Ensure a specified target platform is passed to _get_vcvars_spec.
         """
-        compiler = _msvccompiler.MSVCCompiler()
+        compiler = msvc.Compiler()
 
         def _get_vcvars_spec(host_platform, platform):
             assert platform == expected
 
-        monkeypatch.setattr(_msvccompiler, '_get_vcvars_spec', _get_vcvars_spec)
+        monkeypatch.setattr(msvc, '_get_vcvars_spec', _get_vcvars_spec)
         compiler.initialize(plat_name)
 
     @needs_winreg
@@ -64,7 +62,7 @@ def test_get_vc_env_unicode(self):
         old_distutils_use_sdk = os.environ.pop('DISTUTILS_USE_SDK', None)
         os.environ[test_var] = test_value
         try:
-            env = _msvccompiler._get_vc_env('x86')
+            env = msvc._get_vc_env('x86')
             assert test_var.lower() in env
             assert test_value == env[test_var.lower()]
         finally:
@@ -77,7 +75,7 @@ def test_get_vc_env_unicode(self):
     def test_get_vc(self, ver):
         # This function cannot be mocked, so pass if VC is found
         # and skip otherwise.
-        lookup = getattr(_msvccompiler, f'_find_vc{ver}')
+        lookup = getattr(msvc, f'_find_vc{ver}')
         expected_version = {2015: 14, 2017: 15}[ver]
         version, path = lookup()
         if not version:
@@ -104,7 +102,7 @@ def test_concurrent_safe(self):
         """
         Concurrent calls to spawn should have consistent results.
         """
-        compiler = _msvccompiler.MSVCCompiler()
+        compiler = msvc.Compiler()
         compiler._paths = "expected"
         inner_cmd = 'import os; assert os.environ["PATH"] == "expected"'
         command = [sys.executable, '-c', inner_cmd]
@@ -125,7 +123,7 @@ def test_concurrent_safe_fallback(self):
         """
         from distutils import ccompiler
 
-        compiler = _msvccompiler.MSVCCompiler()
+        compiler = msvc.Compiler()
         compiler._paths = "expected"
 
         def CCompiler_spawn(self, cmd):
diff --git a/distutils/compilers/C/tests/test_unix.py b/distutils/compilers/C/tests/test_unix.py
index cb42e6c571..f4e2898458 100644
--- a/distutils/compilers/C/tests/test_unix.py
+++ b/distutils/compilers/C/tests/test_unix.py
@@ -8,11 +8,12 @@
 from distutils.errors import DistutilsPlatformError
 from distutils.tests import support
 from distutils.tests.compat.py39 import EnvironmentVarGuard
-from distutils.unixccompiler import UnixCCompiler
 from distutils.util import _clear_cached_macosx_ver
 
 import pytest
 
+from .. import unix
+
 
 @pytest.fixture(autouse=True)
 def save_values(monkeypatch):
@@ -23,7 +24,7 @@ def save_values(monkeypatch):
 
 @pytest.fixture(autouse=True)
 def compiler_wrapper(request):
-    class CompilerWrapper(UnixCCompiler):
+    class CompilerWrapper(unix.Compiler):
         def rpath_foo(self):
             return self.runtime_library_dir_option('/foo')
 
@@ -319,7 +320,7 @@ def test_has_function(self):
         self.cc.has_function('abort')
 
     def test_find_library_file(self, monkeypatch):
-        compiler = UnixCCompiler()
+        compiler = unix.Compiler()
         compiler._library_root = lambda dir: dir
         monkeypatch.setattr(os.path, 'exists', lambda d: 'existing' in d)
 

From d930eade1c190634d3d4c86bc08a06af3ca86bc0 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sun, 23 Feb 2025 16:49:59 -0500
Subject: [PATCH 1466/1761] Add news fragment.

---
 newsfragments/4852.feature.rst | 1 +
 1 file changed, 1 insertion(+)
 create mode 100644 newsfragments/4852.feature.rst

diff --git a/newsfragments/4852.feature.rst b/newsfragments/4852.feature.rst
new file mode 100644
index 0000000000..adabdbc33a
--- /dev/null
+++ b/newsfragments/4852.feature.rst
@@ -0,0 +1 @@
+Synced with pypa/distutils@91f75bb98 including exporting of PyInit_pkg (pypa/distutils#327) and a refactoring of the compiler classes into distutils.compilers (pypa/distutils#295).

From 75ce9aba3ed9f4002fa01db0287dfdb1600fb635 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sun, 23 Feb 2025 18:57:40 -0500
Subject: [PATCH 1467/1761] Add support for building lxml on pre-release
 Pythons.

Closes jaraco/skeleton#161
---
 .github/workflows/main.yml | 7 +++++++
 1 file changed, 7 insertions(+)

diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index 9c01fc4d14..5841cc37b7 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -56,6 +56,13 @@ jobs:
     continue-on-error: ${{ matrix.python == '3.14' }}
     steps:
       - uses: actions/checkout@v4
+      - name: Install build dependencies
+        # Install dependencies for building packages on pre-release Pythons
+        # jaraco/skeleton#161
+        if: matrix.python == '3.14' && matrix.platform == 'ubuntu-latest'
+        run: |
+          sudo apt update
+          sudo apt install -y libxml2-dev libxslt-dev
       - name: Setup Python
         uses: actions/setup-python@v4
         with:

From 1c9467fdec1cc1456772cd71c7e740f048ce86fc Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 24 Feb 2025 22:00:11 +0000
Subject: [PATCH 1468/1761] Fix new mandatory configuration field for RTD
 (jaraco/skeleton#159)

This field is now required and prevents the build from running if
absent.

Details in
https://about.readthedocs.com/blog/2024/12/deprecate-config-files-without-sphinx-or-mkdocs-config/
---
 .readthedocs.yaml | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/.readthedocs.yaml b/.readthedocs.yaml
index dc8516ac20..724370638f 100644
--- a/.readthedocs.yaml
+++ b/.readthedocs.yaml
@@ -5,6 +5,9 @@ python:
     extra_requirements:
       - doc
 
+sphinx:
+  configuration: docs/conf.py
+
 # required boilerplate readthedocs/readthedocs.org#10401
 build:
   os: ubuntu-lts-latest

From 19e95750f3465db016708f36f9ee51d155631994 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Mon, 24 Feb 2025 01:31:32 -0500
Subject: [PATCH 1469/1761] Merge typeshed's `setuptools._distutils`
 annotations

---
 distutils/_modified.py               |  28 +-
 distutils/archive_util.py            |  42 +--
 distutils/cmd.py                     | 154 ++++++++---
 distutils/command/bdist.py           |  25 +-
 distutils/command/bdist_dumb.py      |   3 +-
 distutils/command/bdist_rpm.py       |  11 +-
 distutils/command/build.py           |  10 +-
 distutils/command/build_clib.py      |  13 +-
 distutils/command/build_ext.py       |  32 ++-
 distutils/command/build_py.py        |  19 +-
 distutils/command/build_scripts.py   |   2 +-
 distutils/command/check.py           |   2 +-
 distutils/command/clean.py           |   3 +-
 distutils/command/install.py         |  39 +--
 distutils/command/install_data.py    |   7 +-
 distutils/command/install_headers.py |   2 +-
 distutils/command/install_lib.py     |  16 +-
 distutils/command/install_scripts.py |   7 +-
 distutils/command/sdist.py           |  36 +--
 distutils/compat/__init__.py         |   7 +-
 distutils/compilers/C/base.py        | 388 ++++++++++++++++++---------
 distutils/compilers/C/msvc.py        |  46 ++--
 distutils/compilers/C/unix.py        |  25 +-
 distutils/dist.py                    | 230 ++++++++++------
 distutils/errors.py                  |  21 +-
 distutils/extension.py               |  33 +--
 distutils/filelist.py                |  93 +++++--
 distutils/spawn.py                   |  24 +-
 distutils/sysconfig.py               |  37 ++-
 distutils/tests/test_sdist.py        |   2 +-
 distutils/util.py                    |  61 +++--
 31 files changed, 936 insertions(+), 482 deletions(-)

diff --git a/distutils/_modified.py b/distutils/_modified.py
index 7cdca9398f..5793189723 100644
--- a/distutils/_modified.py
+++ b/distutils/_modified.py
@@ -2,12 +2,21 @@
 
 import functools
 import os.path
+from collections.abc import Callable, Iterable
+from typing import Literal, TypeVar
 
 from jaraco.functools import splat
 
 from .compat.py39 import zip_strict
 from .errors import DistutilsFileError
 
+_SourcesT = TypeVar(
+    "_SourcesT", bound=str | bytes | os.PathLike[str] | os.PathLike[bytes]
+)
+_TargetsT = TypeVar(
+    "_TargetsT", bound=str | bytes | os.PathLike[str] | os.PathLike[bytes]
+)
+
 
 def _newer(source, target):
     return not os.path.exists(target) or (
@@ -15,7 +24,10 @@ def _newer(source, target):
     )
 
 
-def newer(source, target):
+def newer(
+    source: str | bytes | os.PathLike[str] | os.PathLike[bytes],
+    target: str | bytes | os.PathLike[str] | os.PathLike[bytes],
+) -> bool:
     """
     Is source modified more recently than target.
 
@@ -25,12 +37,16 @@ def newer(source, target):
     Raises DistutilsFileError if 'source' does not exist.
     """
     if not os.path.exists(source):
-        raise DistutilsFileError(f"file '{os.path.abspath(source)}' does not exist")
+        raise DistutilsFileError(f"file {os.path.abspath(source)!r} does not exist")
 
     return _newer(source, target)
 
 
-def newer_pairwise(sources, targets, newer=newer):
+def newer_pairwise(
+    sources: Iterable[_SourcesT],
+    targets: Iterable[_TargetsT],
+    newer: Callable[[_SourcesT, _TargetsT], bool] = newer,
+) -> tuple[list[_SourcesT], list[_TargetsT]]:
     """
     Filter filenames where sources are newer than targets.
 
@@ -43,7 +59,11 @@ def newer_pairwise(sources, targets, newer=newer):
     return tuple(map(list, zip(*newer_pairs))) or ([], [])
 
 
-def newer_group(sources, target, missing='error'):
+def newer_group(
+    sources: Iterable[str | bytes | os.PathLike[str] | os.PathLike[bytes]],
+    target: str | bytes | os.PathLike[str] | os.PathLike[bytes],
+    missing: Literal["error", "ignore", "newer"] = "error",
+) -> bool:
     """
     Is target out-of-date with respect to any file in sources.
 
diff --git a/distutils/archive_util.py b/distutils/archive_util.py
index 5bb6df763d..167a5c2e29 100644
--- a/distutils/archive_util.py
+++ b/distutils/archive_util.py
@@ -4,6 +4,7 @@
 that sort of thing)."""
 
 import os
+from typing import Literal
 
 try:
     import zipfile
@@ -54,14 +55,14 @@ def _get_uid(name):
 
 
 def make_tarball(
-    base_name,
-    base_dir,
-    compress="gzip",
-    verbose=False,
-    dry_run=False,
-    owner=None,
-    group=None,
-):
+    base_name: str,
+    base_dir: str | os.PathLike[str],
+    compress: Literal["gzip", "bzip2", "xz"] | None = "gzip",
+    verbose: bool = False,
+    dry_run: bool = False,
+    owner: str | None = None,
+    group: str | None = None,
+) -> str:
     """Create a (possibly compressed) tar file from all the files under
     'base_dir'.
 
@@ -122,7 +123,12 @@ def _set_uid_gid(tarinfo):
     return archive_name
 
 
-def make_zipfile(base_name, base_dir, verbose=False, dry_run=False):  # noqa: C901
+def make_zipfile(
+    base_name: str,
+    base_dir: str | os.PathLike[str],
+    verbose: bool = False,
+    dry_run: bool = False,
+) -> str:  # noqa: C901
     """Create a zip file from all the files under 'base_dir'.
 
     The output zip file will be named 'base_name' + ".zip".  Uses either the
@@ -205,15 +211,15 @@ def check_archive_formats(formats):
 
 
 def make_archive(
-    base_name,
-    format,
-    root_dir=None,
-    base_dir=None,
-    verbose=False,
-    dry_run=False,
-    owner=None,
-    group=None,
-):
+    base_name: str,
+    format: str,
+    root_dir: str | os.PathLike[str] | bytes | os.PathLike[bytes] | None = None,
+    base_dir: str | None = None,
+    verbose: bool = False,
+    dry_run: bool = False,
+    owner: str | None = None,
+    group: str | None = None,
+) -> str:
     """Create an archive file (eg. zip or tar).
 
     'base_name' is the name of the file to create, minus any format-specific
diff --git a/distutils/cmd.py b/distutils/cmd.py
index 9c6fa6566c..db34673090 100644
--- a/distutils/cmd.py
+++ b/distutils/cmd.py
@@ -10,13 +10,24 @@
 import os
 import re
 import sys
-from collections.abc import Callable
-from typing import Any, ClassVar, TypeVar, overload
+from abc import abstractmethod
+from collections.abc import Callable, MutableSequence
+from typing import TYPE_CHECKING, Any, ClassVar, TypeVar, overload
 
 from . import _modified, archive_util, dir_util, file_util, util
 from ._log import log
 from .errors import DistutilsOptionError
 
+if TYPE_CHECKING:
+    # type-only import because of mutual dependence between these classes
+    from distutils.dist import Distribution
+
+    from typing_extensions import TypeVarTuple, Unpack
+
+    _Ts = TypeVarTuple("_Ts")
+
+_StrPathT = TypeVar("_StrPathT", bound="str | os.PathLike[str]")
+_BytesPathT = TypeVar("_BytesPathT", bound="bytes | os.PathLike[bytes]")
 _CommandT = TypeVar("_CommandT", bound="Command")
 
 
@@ -61,7 +72,7 @@ class Command:
 
     # -- Creation/initialization methods -------------------------------
 
-    def __init__(self, dist):
+    def __init__(self, dist: Distribution) -> None:
         """Create and initialize a new Command object.  Most importantly,
         invokes the 'initialize_options()' method, which is the real
         initializer and depends on the actual command being
@@ -119,7 +130,7 @@ def __getattr__(self, attr):
         else:
             raise AttributeError(attr)
 
-    def ensure_finalized(self):
+    def ensure_finalized(self) -> None:
         if not self.finalized:
             self.finalize_options()
         self.finalized = True
@@ -137,7 +148,8 @@ def ensure_finalized(self):
     #     run the command: do whatever it is we're here to do,
     #     controlled by the command's various option values
 
-    def initialize_options(self):
+    @abstractmethod
+    def initialize_options(self) -> None:
         """Set default values for all the options that this command
         supports.  Note that these defaults may be overridden by other
         commands, by the setup script, by config files, or by the
@@ -151,7 +163,8 @@ def initialize_options(self):
             f"abstract method -- subclass {self.__class__} must override"
         )
 
-    def finalize_options(self):
+    @abstractmethod
+    def finalize_options(self) -> None:
         """Set final values for all the options that this command supports.
         This is always called as late as possible, ie.  after any option
         assignments from the command-line or from other commands have been
@@ -180,7 +193,8 @@ def dump_options(self, header=None, indent=""):
             value = getattr(self, option)
             self.announce(indent + f"{option} = {value}", level=logging.INFO)
 
-    def run(self):
+    @abstractmethod
+    def run(self) -> None:
         """A command's raison d'etre: carry out the action it exists to
         perform, controlled by the options initialized in
         'initialize_options()', customized by other commands, the setup
@@ -194,10 +208,10 @@ def run(self):
             f"abstract method -- subclass {self.__class__} must override"
         )
 
-    def announce(self, msg, level=logging.DEBUG):
+    def announce(self, msg: object, level: int = logging.DEBUG) -> None:
         log.log(level, msg)
 
-    def debug_print(self, msg):
+    def debug_print(self, msg: object) -> None:
         """Print 'msg' to stdout if the global DEBUG (taken from the
         DISTUTILS_DEBUG environment variable) flag is true.
         """
@@ -229,13 +243,13 @@ def _ensure_stringlike(self, option, what, default=None):
             raise DistutilsOptionError(f"'{option}' must be a {what} (got `{val}`)")
         return val
 
-    def ensure_string(self, option, default=None):
+    def ensure_string(self, option: str, default: str | None = None) -> None:
         """Ensure that 'option' is a string; if not defined, set it to
         'default'.
         """
         self._ensure_stringlike(option, "string", default)
 
-    def ensure_string_list(self, option):
+    def ensure_string_list(self, option: str) -> None:
         r"""Ensure that 'option' is a list of strings.  If 'option' is
         currently a string, we split it either on /,\s*/ or /\s+/, so
         "foo bar baz", "foo,bar,baz", and "foo,   bar baz" all become
@@ -263,13 +277,13 @@ def _ensure_tested_string(self, option, tester, what, error_fmt, default=None):
                 ("error in '%s' option: " + error_fmt) % (option, val)
             )
 
-    def ensure_filename(self, option):
+    def ensure_filename(self, option: str) -> None:
         """Ensure that 'option' is the name of an existing file."""
         self._ensure_tested_string(
             option, os.path.isfile, "filename", "'%s' does not exist or is not a file"
         )
 
-    def ensure_dirname(self, option):
+    def ensure_dirname(self, option: str) -> None:
         self._ensure_tested_string(
             option,
             os.path.isdir,
@@ -279,13 +293,15 @@ def ensure_dirname(self, option):
 
     # -- Convenience methods for commands ------------------------------
 
-    def get_command_name(self):
+    def get_command_name(self) -> str:
         if hasattr(self, 'command_name'):
             return self.command_name
         else:
             return self.__class__.__name__
 
-    def set_undefined_options(self, src_cmd, *option_pairs):
+    def set_undefined_options(
+        self, src_cmd: str, *option_pairs: tuple[str, str]
+    ) -> None:
         """Set the values of any "undefined" options from corresponding
         option values in some other command object.  "Undefined" here means
         "is None", which is the convention used to indicate that an option
@@ -306,7 +322,9 @@ def set_undefined_options(self, src_cmd, *option_pairs):
             if getattr(self, dst_option) is None:
                 setattr(self, dst_option, getattr(src_cmd_obj, src_option))
 
-    def get_finalized_command(self, command, create=True):
+    # NOTE: Because distutils is private setuptools implementation and we don't need to re-expose all commands here,
+    # we're not overloading each and every command possibility.
+    def get_finalized_command(self, command: str, create: bool = True) -> Command:
         """Wrapper around Distribution's 'get_command_obj()' method: find
         (create if necessary and 'create' is true) the command object for
         'command', call its 'ensure_finalized()' method, and return the
@@ -331,14 +349,14 @@ def reinitialize_command(
     ) -> Command:
         return self.distribution.reinitialize_command(command, reinit_subcommands)
 
-    def run_command(self, command):
+    def run_command(self, command: str) -> None:
         """Run some other command: uses the 'run_command()' method of
         Distribution, which creates and finalizes the command object if
         necessary and then invokes its 'run()' method.
         """
         self.distribution.run_command(command)
 
-    def get_sub_commands(self):
+    def get_sub_commands(self) -> list[str]:
         """Determine the sub-commands that are relevant in the current
         distribution (ie., that need to be run).  This is based on the
         'sub_commands' class attribute: each tuple in that list may include
@@ -353,24 +371,50 @@ def get_sub_commands(self):
 
     # -- External world manipulation -----------------------------------
 
-    def warn(self, msg):
+    def warn(self, msg: object) -> None:
         log.warning("warning: %s: %s\n", self.get_command_name(), msg)
 
-    def execute(self, func, args, msg=None, level=1):
+    def execute(
+        self,
+        func: Callable[[Unpack[_Ts]], object],
+        args: tuple[Unpack[_Ts]],
+        msg: object = None,
+        level: int = 1,
+    ) -> None:
         util.execute(func, args, msg, dry_run=self.dry_run)
 
-    def mkpath(self, name, mode=0o777):
+    def mkpath(self, name: str, mode: int = 0o777) -> None:
         dir_util.mkpath(name, mode, dry_run=self.dry_run)
 
+    @overload
+    def copy_file(
+        self,
+        infile: str | os.PathLike[str],
+        outfile: _StrPathT,
+        preserve_mode: bool = True,
+        preserve_times: bool = True,
+        link: str | None = None,
+        level: int = 1,
+    ) -> tuple[_StrPathT | str, bool]: ...
+    @overload
     def copy_file(
         self,
-        infile,
-        outfile,
-        preserve_mode=True,
-        preserve_times=True,
-        link=None,
-        level=1,
-    ):
+        infile: bytes | os.PathLike[bytes],
+        outfile: _BytesPathT,
+        preserve_mode: bool = True,
+        preserve_times: bool = True,
+        link: str | None = None,
+        level: int = 1,
+    ) -> tuple[_BytesPathT | bytes, bool]: ...
+    def copy_file(
+        self,
+        infile: str | os.PathLike[str] | bytes | os.PathLike[bytes],
+        outfile: str | os.PathLike[str] | bytes | os.PathLike[bytes],
+        preserve_mode: bool = True,
+        preserve_times: bool = True,
+        link: str | None = None,
+        level: int = 1,
+    ) -> tuple[str | os.PathLike[str] | bytes | os.PathLike[bytes], bool]:
         """Copy a file respecting verbose, dry-run and force flags.  (The
         former two default to whatever is in the Distribution object, and
         the latter defaults to false for commands that don't define it.)"""
@@ -386,13 +430,13 @@ def copy_file(
 
     def copy_tree(
         self,
-        infile,
-        outfile,
-        preserve_mode=True,
-        preserve_times=True,
-        preserve_symlinks=False,
-        level=1,
-    ):
+        infile: str | os.PathLike[str],
+        outfile: str,
+        preserve_mode: bool = True,
+        preserve_times: bool = True,
+        preserve_symlinks: bool = False,
+        level: int = 1,
+    ) -> list[str]:
         """Copy an entire directory tree respecting verbose, dry-run,
         and force flags.
         """
@@ -406,19 +450,40 @@ def copy_tree(
             dry_run=self.dry_run,
         )
 
-    def move_file(self, src, dst, level=1):
+    @overload
+    def move_file(
+        self, src: str | os.PathLike[str], dst: _StrPathT, level: int = 1
+    ) -> _StrPathT | str: ...
+    @overload
+    def move_file(
+        self, src: bytes | os.PathLike[bytes], dst: _BytesPathT, level: int = 1
+    ) -> _BytesPathT | bytes: ...
+    def move_file(
+        self,
+        src: str | os.PathLike[str] | bytes | os.PathLike[bytes],
+        dst: str | os.PathLike[str] | bytes | os.PathLike[bytes],
+        level: int = 1,
+    ) -> str | os.PathLike[str] | bytes | os.PathLike[bytes]:
         """Move a file respecting dry-run flag."""
         return file_util.move_file(src, dst, dry_run=self.dry_run)
 
-    def spawn(self, cmd, search_path=True, level=1):
+    def spawn(
+        self, cmd: MutableSequence[str], search_path: bool = True, level: int = 1
+    ) -> None:
         """Spawn an external command respecting dry-run flag."""
         from distutils.spawn import spawn
 
         spawn(cmd, search_path, dry_run=self.dry_run)
 
     def make_archive(
-        self, base_name, format, root_dir=None, base_dir=None, owner=None, group=None
-    ):
+        self,
+        base_name: str,
+        format: str,
+        root_dir: str | os.PathLike[str] | bytes | os.PathLike[bytes] | None = None,
+        base_dir: str | None = None,
+        owner: str | None = None,
+        group: str | None = None,
+    ) -> str:
         return archive_util.make_archive(
             base_name,
             format,
@@ -430,8 +495,15 @@ def make_archive(
         )
 
     def make_file(
-        self, infiles, outfile, func, args, exec_msg=None, skip_msg=None, level=1
-    ):
+        self,
+        infiles: str | list[str] | tuple[str, ...],
+        outfile: str | os.PathLike[str] | bytes | os.PathLike[bytes],
+        func: Callable[[Unpack[_Ts]], object],
+        args: tuple[Unpack[_Ts]],
+        exec_msg: object = None,
+        skip_msg: object = None,
+        level: int = 1,
+    ) -> None:
         """Special case of 'execute()' for operations that process one or
         more input files and generate one output file.  Works just like
         'execute()', except the operation is skipped and a different
diff --git a/distutils/command/bdist.py b/distutils/command/bdist.py
index 1ec3c35f40..230bdeaf90 100644
--- a/distutils/command/bdist.py
+++ b/distutils/command/bdist.py
@@ -5,12 +5,20 @@
 
 import os
 import warnings
-from typing import ClassVar
+from collections.abc import Callable
+from typing import TYPE_CHECKING, ClassVar
 
 from ..core import Command
 from ..errors import DistutilsOptionError, DistutilsPlatformError
 from ..util import get_platform
 
+if TYPE_CHECKING:
+    from typing_extensions import deprecated
+else:
+
+    def deprecated(fn):
+        return fn
+
 
 def show_formats():
     """Print list of available formats (arguments to "--format" option)."""
@@ -26,9 +34,10 @@ def show_formats():
 
 class ListCompat(dict[str, tuple[str, str]]):
     # adapter to allow for Setuptools compatibility in format_commands
-    def append(self, item):
+    @deprecated("format_commands is now a dict. append is deprecated.")
+    def append(self, item: object) -> None:
         warnings.warn(
-            """format_commands is now a dict. append is deprecated.""",
+            "format_commands is now a dict. append is deprecated.",
             DeprecationWarning,
             stacklevel=2,
         )
@@ -64,9 +73,9 @@ class bdist(Command):
         ),
     ]
 
-    boolean_options = ['skip-build']
+    boolean_options: ClassVar[list[str]] = ['skip-build']
 
-    help_options = [
+    help_options: ClassVar[list[tuple[str, str | None, str, Callable[[], object]]]] = [
         ('help-formats', None, "lists available distribution formats", show_formats),
     ]
 
@@ -75,7 +84,7 @@ class bdist(Command):
 
     # This won't do in reality: will need to distinguish RPM-ish Linux,
     # Debian-ish Linux, Solaris, FreeBSD, ..., Windows, Mac OS.
-    default_format = {'posix': 'gztar', 'nt': 'zip'}
+    default_format: ClassVar[dict[str, str]] = {'posix': 'gztar', 'nt': 'zip'}
 
     # Define commands in preferred order for the --help-formats option
     format_commands = ListCompat({
@@ -100,7 +109,7 @@ def initialize_options(self):
         self.group = None
         self.owner = None
 
-    def finalize_options(self):
+    def finalize_options(self) -> None:
         # have to finalize 'plat_name' before 'bdist_base'
         if self.plat_name is None:
             if self.skip_build:
@@ -128,7 +137,7 @@ def finalize_options(self):
         if self.dist_dir is None:
             self.dist_dir = "dist"
 
-    def run(self):
+    def run(self) -> None:
         # Figure out which sub-commands we need to run.
         commands = []
         for format in self.formats:
diff --git a/distutils/command/bdist_dumb.py b/distutils/command/bdist_dumb.py
index 67b0c8cce9..ccad66f431 100644
--- a/distutils/command/bdist_dumb.py
+++ b/distutils/command/bdist_dumb.py
@@ -6,6 +6,7 @@
 
 import os
 from distutils._log import log
+from typing import ClassVar
 
 from ..core import Command
 from ..dir_util import ensure_relative, remove_tree
@@ -54,7 +55,7 @@ class bdist_dumb(Command):
         ),
     ]
 
-    boolean_options = ['keep-temp', 'skip-build', 'relative']
+    boolean_options: ClassVar[list[str]] = ['keep-temp', 'skip-build', 'relative']
 
     default_format = {'posix': 'gztar', 'nt': 'zip'}
 
diff --git a/distutils/command/bdist_rpm.py b/distutils/command/bdist_rpm.py
index d443eb09b5..357b4e861e 100644
--- a/distutils/command/bdist_rpm.py
+++ b/distutils/command/bdist_rpm.py
@@ -7,6 +7,7 @@
 import subprocess
 import sys
 from distutils._log import log
+from typing import ClassVar
 
 from ..core import Command
 from ..debug import DEBUG
@@ -136,7 +137,7 @@ class bdist_rpm(Command):
         ('quiet', 'q', "Run the INSTALL phase of RPM building in quiet mode"),
     ]
 
-    boolean_options = [
+    boolean_options: ClassVar[list[str]] = [
         'keep-temp',
         'use-rpm-opt-flags',
         'rpm3-mode',
@@ -144,7 +145,7 @@ class bdist_rpm(Command):
         'quiet',
     ]
 
-    negative_opt = {
+    negative_opt: ClassVar[dict[str, str]] = {
         'no-keep-temp': 'keep-temp',
         'no-rpm-opt-flags': 'use-rpm-opt-flags',
         'rpm2-mode': 'rpm3-mode',
@@ -195,7 +196,7 @@ def initialize_options(self):
         self.force_arch = None
         self.quiet = False
 
-    def finalize_options(self):
+    def finalize_options(self) -> None:
         self.set_undefined_options('bdist', ('bdist_base', 'bdist_base'))
         if self.rpm_base is None:
             if not self.rpm3_mode:
@@ -228,7 +229,7 @@ def finalize_options(self):
         self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
         self.finalize_package_data()
 
-    def finalize_package_data(self):
+    def finalize_package_data(self) -> None:
         self.ensure_string('group', "Development/Libraries")
         self.ensure_string(
             'vendor',
@@ -274,7 +275,7 @@ def finalize_package_data(self):
 
         self.ensure_string('force_arch')
 
-    def run(self):  # noqa: C901
+    def run(self) -> None:  # noqa: C901
         if DEBUG:
             print("before _get_package_data():")
             print("vendor =", self.vendor)
diff --git a/distutils/command/build.py b/distutils/command/build.py
index ccd2c706a3..9493cefee9 100644
--- a/distutils/command/build.py
+++ b/distutils/command/build.py
@@ -5,6 +5,8 @@
 import os
 import sys
 import sysconfig
+from collections.abc import Callable
+from typing import ClassVar
 
 from ..core import Command
 from ..errors import DistutilsOptionError
@@ -43,9 +45,9 @@ class build(Command):
         ('executable=', 'e', "specify final destination interpreter path (build.py)"),
     ]
 
-    boolean_options = ['debug', 'force']
+    boolean_options: ClassVar[list[str]] = ['debug', 'force']
 
-    help_options = [
+    help_options: ClassVar[list[tuple[str, str | None, str, Callable[[], object]]]] = [
         ('help-compiler', None, "list available compilers", show_compilers),
     ]
 
@@ -65,7 +67,7 @@ def initialize_options(self):
         self.executable = None
         self.parallel = None
 
-    def finalize_options(self):  # noqa: C901
+    def finalize_options(self) -> None:  # noqa: C901
         if self.plat_name is None:
             self.plat_name = get_platform()
         else:
@@ -126,7 +128,7 @@ def finalize_options(self):  # noqa: C901
             except ValueError:
                 raise DistutilsOptionError("parallel should be an integer")
 
-    def run(self):
+    def run(self) -> None:
         # Run all relevant sub-commands.  This will be some subset of:
         #  - build_py      - pure Python modules
         #  - build_clib    - standalone C libraries
diff --git a/distutils/command/build_clib.py b/distutils/command/build_clib.py
index 3e1832768b..2a1643d634 100644
--- a/distutils/command/build_clib.py
+++ b/distutils/command/build_clib.py
@@ -15,6 +15,7 @@
 # cut 'n paste.  Sigh.
 
 import os
+from collections.abc import Callable
 from distutils._log import log
 from typing import ClassVar
 
@@ -40,9 +41,9 @@ class build_clib(Command):
         ('compiler=', 'c', "specify the compiler type"),
     ]
 
-    boolean_options = ['debug', 'force']
+    boolean_options: ClassVar[list[str]] = ['debug', 'force']
 
-    help_options = [
+    help_options: ClassVar[list[tuple[str, str | None, str, Callable[[], object]]]] = [
         ('help-compiler', None, "list available compilers", show_compilers),
     ]
 
@@ -61,7 +62,7 @@ def initialize_options(self):
         self.force = False
         self.compiler = None
 
-    def finalize_options(self):
+    def finalize_options(self) -> None:
         # This might be confusing: both build-clib and build-temp default
         # to build-temp as defined by the "build" command.  This is because
         # I think that C libraries are really just temporary build
@@ -88,7 +89,7 @@ def finalize_options(self):
         # XXX same as for build_ext -- what about 'self.define' and
         # 'self.undef' ?
 
-    def run(self):
+    def run(self) -> None:
         if not self.libraries:
             return
 
@@ -112,7 +113,7 @@ def run(self):
 
         self.build_libraries(self.libraries)
 
-    def check_library_list(self, libraries):
+    def check_library_list(self, libraries) -> None:
         """Ensure that the list of libraries is valid.
 
         `library` is presumably provided as a command option 'libraries'.
@@ -174,7 +175,7 @@ def get_source_files(self):
             filenames.extend(sources)
         return filenames
 
-    def build_libraries(self, libraries):
+    def build_libraries(self, libraries) -> None:
         for lib_name, build_info in libraries:
             sources = build_info.get('sources')
             if sources is None or not isinstance(sources, (list, tuple)):
diff --git a/distutils/command/build_ext.py b/distutils/command/build_ext.py
index 3ec3663dcc..c25352f603 100644
--- a/distutils/command/build_ext.py
+++ b/distutils/command/build_ext.py
@@ -8,8 +8,10 @@
 import os
 import re
 import sys
+from collections.abc import Callable
 from distutils._log import log
 from site import USER_BASE
+from typing import ClassVar
 
 from .._modified import newer_group
 from ..core import Command
@@ -98,9 +100,15 @@ class build_ext(Command):
         ('user', None, "add user include, library and rpath"),
     ]
 
-    boolean_options = ['inplace', 'debug', 'force', 'swig-cpp', 'user']
+    boolean_options: ClassVar[list[str]] = [
+        'inplace',
+        'debug',
+        'force',
+        'swig-cpp',
+        'user',
+    ]
 
-    help_options = [
+    help_options: ClassVar[list[tuple[str, str | None, str, Callable[[], object]]]] = [
         ('help-compiler', None, "list available compilers", show_compilers),
     ]
 
@@ -153,7 +161,7 @@ def _python_lib_dir(sysconfig):
             # building third party extensions
             yield sysconfig.get_config_var('LIBDIR')
 
-    def finalize_options(self):  # noqa: C901
+    def finalize_options(self) -> None:  # noqa: C901
         from distutils import sysconfig
 
         self.set_undefined_options(
@@ -292,7 +300,7 @@ def finalize_options(self):  # noqa: C901
             except ValueError:
                 raise DistutilsOptionError("parallel should be an integer")
 
-    def run(self):  # noqa: C901
+    def run(self) -> None:  # noqa: C901
         from ..ccompiler import new_compiler
 
         # 'self.extensions', as supplied by setup.py, is a list of
@@ -364,7 +372,7 @@ def run(self):  # noqa: C901
         # Now actually compile and link everything.
         self.build_extensions()
 
-    def check_extensions_list(self, extensions):  # noqa: C901
+    def check_extensions_list(self, extensions) -> None:  # noqa: C901
         """Ensure that the list of extensions (presumably provided as a
         command option 'extensions') is valid, i.e. it is a list of
         Extension objects.  We also support the old-style list of 2-tuples,
@@ -472,7 +480,7 @@ def get_outputs(self):
         # "build" tree.
         return [self.get_ext_fullpath(ext.name) for ext in self.extensions]
 
-    def build_extensions(self):
+    def build_extensions(self) -> None:
         # First, sanity-check the 'extensions' list
         self.check_extensions_list(self.extensions)
         if self.parallel:
@@ -515,7 +523,7 @@ def _filter_build_errors(self, ext):
                 raise
             self.warn(f'building extension "{ext.name}" failed: {e}')
 
-    def build_extension(self, ext):
+    def build_extension(self, ext) -> None:
         sources = ext.sources
         if sources is None or not isinstance(sources, (list, tuple)):
             raise DistutilsSetupError(
@@ -676,7 +684,7 @@ def find_swig(self):
 
     # -- Name generators -----------------------------------------------
     # (extension names, filenames, whatever)
-    def get_ext_fullpath(self, ext_name):
+    def get_ext_fullpath(self, ext_name: str) -> str:
         """Returns the path of the filename for a given extension.
 
         The file is located in `build_lib` or directly in the package
@@ -703,7 +711,7 @@ def get_ext_fullpath(self, ext_name):
         #   package_dir/filename
         return os.path.join(package_dir, filename)
 
-    def get_ext_fullname(self, ext_name):
+    def get_ext_fullname(self, ext_name: str) -> str:
         """Returns the fullname of a given extension name.
 
         Adds the `package.` prefix"""
@@ -712,7 +720,7 @@ def get_ext_fullname(self, ext_name):
         else:
             return self.package + '.' + ext_name
 
-    def get_ext_filename(self, ext_name):
+    def get_ext_filename(self, ext_name: str) -> str:
         r"""Convert the name of an extension (eg. "foo.bar") into the name
         of the file from which it will be loaded (eg. "foo/bar.so", or
         "foo\bar.pyd").
@@ -723,7 +731,7 @@ def get_ext_filename(self, ext_name):
         ext_suffix = get_config_var('EXT_SUFFIX')
         return os.path.join(*ext_path) + ext_suffix
 
-    def get_export_symbols(self, ext):
+    def get_export_symbols(self, ext: Extension) -> list[str]:
         """Return the list of symbols that a shared extension has to
         export.  This either uses 'ext.export_symbols' or, if it's not
         provided, "PyInit_" + module_name.  Only relevant on Windows, where
@@ -753,7 +761,7 @@ def _get_module_name_for_symbol(self, ext):
             return parts[-2]
         return parts[-1]
 
-    def get_libraries(self, ext):  # noqa: C901
+    def get_libraries(self, ext: Extension) -> list[str]:  # noqa: C901
         """Return the list of libraries to link against when building a
         shared extension.  On most platforms, this is just 'ext.libraries';
         on Windows, we add the Python library (eg. python20.dll).
diff --git a/distutils/command/build_py.py b/distutils/command/build_py.py
index 49d710346e..a20b076fe7 100644
--- a/distutils/command/build_py.py
+++ b/distutils/command/build_py.py
@@ -7,6 +7,7 @@
 import os
 import sys
 from distutils._log import log
+from typing import ClassVar
 
 from ..core import Command
 from ..errors import DistutilsFileError, DistutilsOptionError
@@ -29,8 +30,8 @@ class build_py(Command):
         ('force', 'f', "forcibly build everything (ignore file timestamps)"),
     ]
 
-    boolean_options = ['compile', 'force']
-    negative_opt = {'no-compile': 'compile'}
+    boolean_options: ClassVar[list[str]] = ['compile', 'force']
+    negative_opt: ClassVar[dict[str, str]] = {'no-compile': 'compile'}
 
     def initialize_options(self):
         self.build_lib = None
@@ -42,7 +43,7 @@ def initialize_options(self):
         self.optimize = 0
         self.force = None
 
-    def finalize_options(self):
+    def finalize_options(self) -> None:
         self.set_undefined_options(
             'build', ('build_lib', 'build_lib'), ('force', 'force')
         )
@@ -67,7 +68,7 @@ def finalize_options(self):
             except (ValueError, AssertionError):
                 raise DistutilsOptionError("optimize must be 0, 1, or 2")
 
-    def run(self):
+    def run(self) -> None:
         # XXX copy_file by default preserves atime and mtime.  IMHO this is
         # the right thing to do, but perhaps it should be an option -- in
         # particular, a site administrator might want installed files to
@@ -134,7 +135,7 @@ def find_data_files(self, package, src_dir):
             ])
         return files
 
-    def build_package_data(self):
+    def build_package_data(self) -> None:
         """Copy data files into build directory"""
         for _package, src_dir, build_dir, filenames in self.data_files:
             for filename in filenames:
@@ -306,7 +307,7 @@ def get_module_outfile(self, build_dir, package, module):
         outfile_path = [build_dir] + list(package) + [module + ".py"]
         return os.path.join(*outfile_path)
 
-    def get_outputs(self, include_bytecode=True):
+    def get_outputs(self, include_bytecode: bool = True) -> list[str]:
         modules = self.find_all_modules()
         outputs = []
         for package, module, _module_file in modules:
@@ -349,7 +350,7 @@ def build_module(self, module, module_file, package):
         self.mkpath(dir)
         return self.copy_file(module_file, outfile, preserve_mode=False)
 
-    def build_modules(self):
+    def build_modules(self) -> None:
         modules = self.find_modules()
         for package, module, module_file in modules:
             # Now "build" the module -- ie. copy the source file to
@@ -358,7 +359,7 @@ def build_modules(self):
             # under self.build_lib.)
             self.build_module(module, module_file, package)
 
-    def build_packages(self):
+    def build_packages(self) -> None:
         for package in self.packages:
             # Get list of (package, module, module_file) tuples based on
             # scanning the package directory.  'package' is only included
@@ -378,7 +379,7 @@ def build_packages(self):
                 assert package == package_
                 self.build_module(module, module_file, package)
 
-    def byte_compile(self, files):
+    def byte_compile(self, files) -> None:
         if sys.dont_write_bytecode:
             self.warn('byte-compiling is disabled, skipping.')
             return
diff --git a/distutils/command/build_scripts.py b/distutils/command/build_scripts.py
index 1c6fd3caff..3f7aae0a66 100644
--- a/distutils/command/build_scripts.py
+++ b/distutils/command/build_scripts.py
@@ -32,7 +32,7 @@ class build_scripts(Command):
         ('executable=', 'e', "specify final destination interpreter path"),
     ]
 
-    boolean_options = ['force']
+    boolean_options: ClassVar[list[str]] = ['force']
 
     def initialize_options(self):
         self.build_dir = None
diff --git a/distutils/command/check.py b/distutils/command/check.py
index 078c1ce87e..b678967191 100644
--- a/distutils/command/check.py
+++ b/distutils/command/check.py
@@ -52,7 +52,7 @@ class check(Command):
         ('strict', 's', 'Will exit with an error if a check fails'),
     ]
 
-    boolean_options = ['metadata', 'restructuredtext', 'strict']
+    boolean_options: ClassVar[list[str]] = ['metadata', 'restructuredtext', 'strict']
 
     def initialize_options(self):
         """Sets default values for options."""
diff --git a/distutils/command/clean.py b/distutils/command/clean.py
index fb54a60ed4..23427aba21 100644
--- a/distutils/command/clean.py
+++ b/distutils/command/clean.py
@@ -6,6 +6,7 @@
 
 import os
 from distutils._log import log
+from typing import ClassVar
 
 from ..core import Command
 from ..dir_util import remove_tree
@@ -30,7 +31,7 @@ class clean(Command):
         ('all', 'a', "remove all build output, not just temporary by-products"),
     ]
 
-    boolean_options = ['all']
+    boolean_options: ClassVar[list[str]] = ['all']
 
     def initialize_options(self):
         self.build_base = None
diff --git a/distutils/command/install.py b/distutils/command/install.py
index 9400995024..12f9d1fe39 100644
--- a/distutils/command/install.py
+++ b/distutils/command/install.py
@@ -9,6 +9,7 @@
 import sysconfig
 from distutils._log import log
 from site import USER_BASE, USER_SITE
+from typing import ClassVar
 
 import jaraco.collections
 
@@ -238,7 +239,7 @@ class install(Command):
         ('record=', None, "filename in which to record list of installed files"),
     ]
 
-    boolean_options = ['compile', 'force', 'skip-build']
+    boolean_options: ClassVar[list[str]] = ['compile', 'force', 'skip-build']
 
     if HAS_USER_SITE:
         user_options.append((
@@ -248,15 +249,15 @@ class install(Command):
         ))
         boolean_options.append('user')
 
-    negative_opt = {'no-compile': 'compile'}
+    negative_opt: ClassVar[dict[str, str]] = {'no-compile': 'compile'}
 
-    def initialize_options(self):
+    def initialize_options(self) -> None:
         """Initializes options."""
         # High-level options: these select both an installation base
         # and scheme.
-        self.prefix = None
-        self.exec_prefix = None
-        self.home = None
+        self.prefix: str | None = None
+        self.exec_prefix: str | None = None
+        self.home: str | None = None
         self.user = False
 
         # These select only the installation base; it's up to the user to
@@ -264,7 +265,7 @@ def initialize_options(self):
         # the --install-{platlib,purelib,scripts,data} options).
         self.install_base = None
         self.install_platbase = None
-        self.root = None
+        self.root: str | None = None
 
         # These options are the actual installation directories; if not
         # supplied by the user, they are filled in using the installation
@@ -273,7 +274,7 @@ def initialize_options(self):
         self.install_purelib = None  # for pure module distributions
         self.install_platlib = None  # non-pure (dists w/ extensions)
         self.install_headers = None  # for C/C++ headers
-        self.install_lib = None  # set to either purelib or platlib
+        self.install_lib: str | None = None  # set to either purelib or platlib
         self.install_scripts = None
         self.install_data = None
         self.install_userbase = USER_BASE
@@ -327,7 +328,7 @@ def initialize_options(self):
     # party Python modules on various platforms given a wide
     # array of user input is decided.  Yes, it's quite complex!)
 
-    def finalize_options(self):  # noqa: C901
+    def finalize_options(self) -> None:  # noqa: C901
         """Finalizes options."""
         # This method (and its helpers, like 'finalize_unix()',
         # 'finalize_other()', and 'select_scheme()') is where the default
@@ -510,7 +511,7 @@ def finalize_options(self):  # noqa: C901
         # Punt on doc directories for now -- after all, we're punting on
         # documentation completely!
 
-    def dump_dirs(self, msg):
+    def dump_dirs(self, msg) -> None:
         """Dumps the list of user options."""
         if not DEBUG:
             return
@@ -530,7 +531,7 @@ def dump_dirs(self, msg):
                 val = getattr(self, opt_name)
             log.debug("  %s: %s", opt_name, val)
 
-    def finalize_unix(self):
+    def finalize_unix(self) -> None:
         """Finalizes options for posix platforms."""
         if self.install_base is not None or self.install_platbase is not None:
             incomplete_scheme = (
@@ -579,7 +580,7 @@ def finalize_unix(self):
             self.install_platbase = self.exec_prefix
             self.select_scheme("posix_prefix")
 
-    def finalize_other(self):
+    def finalize_other(self) -> None:
         """Finalizes options for non-posix platforms"""
         if self.user:
             if self.install_userbase is None:
@@ -601,7 +602,7 @@ def finalize_other(self):
                     f"I don't know how to install stuff on '{os.name}'"
                 )
 
-    def select_scheme(self, name):
+    def select_scheme(self, name) -> None:
         _select_scheme(self, name)
 
     def _expand_attrs(self, attrs):
@@ -613,12 +614,12 @@ def _expand_attrs(self, attrs):
                 val = subst_vars(val, self.config_vars)
                 setattr(self, attr, val)
 
-    def expand_basedirs(self):
+    def expand_basedirs(self) -> None:
         """Calls `os.path.expanduser` on install_base, install_platbase and
         root."""
         self._expand_attrs(['install_base', 'install_platbase', 'root'])
 
-    def expand_dirs(self):
+    def expand_dirs(self) -> None:
         """Calls `os.path.expanduser` on install dirs."""
         self._expand_attrs([
             'install_purelib',
@@ -629,13 +630,13 @@ def expand_dirs(self):
             'install_data',
         ])
 
-    def convert_paths(self, *names):
+    def convert_paths(self, *names) -> None:
         """Call `convert_path` over `names`."""
         for name in names:
             attr = "install_" + name
             setattr(self, attr, convert_path(getattr(self, attr)))
 
-    def handle_extra_path(self):
+    def handle_extra_path(self) -> None:
         """Set `path_file` and `extra_dirs` using `extra_path`."""
         if self.extra_path is None:
             self.extra_path = self.distribution.extra_path
@@ -670,13 +671,13 @@ def handle_extra_path(self):
         self.path_file = path_file
         self.extra_dirs = extra_dirs
 
-    def change_roots(self, *names):
+    def change_roots(self, *names) -> None:
         """Change the install directories pointed by name using root."""
         for name in names:
             attr = "install_" + name
             setattr(self, attr, change_root(self.root, getattr(self, attr)))
 
-    def create_home_path(self):
+    def create_home_path(self) -> None:
         """Create directories under ~."""
         if not self.user:
             return
diff --git a/distutils/command/install_data.py b/distutils/command/install_data.py
index 36f5bcc8bf..4ad186e8ec 100644
--- a/distutils/command/install_data.py
+++ b/distutils/command/install_data.py
@@ -10,6 +10,7 @@
 import functools
 import os
 from collections.abc import Iterable
+from typing import ClassVar
 
 from ..core import Command
 from ..util import change_root, convert_path
@@ -28,7 +29,7 @@ class install_data(Command):
         ('force', 'f', "force installation (overwrite existing files)"),
     ]
 
-    boolean_options = ['force']
+    boolean_options: ClassVar[list[str]] = ['force']
 
     def initialize_options(self):
         self.install_dir = None
@@ -38,7 +39,7 @@ def initialize_options(self):
         self.data_files = self.distribution.data_files
         self.warn_dir = True
 
-    def finalize_options(self):
+    def finalize_options(self) -> None:
         self.set_undefined_options(
             'install',
             ('install_data', 'install_dir'),
@@ -46,7 +47,7 @@ def finalize_options(self):
             ('force', 'force'),
         )
 
-    def run(self):
+    def run(self) -> None:
         self.mkpath(self.install_dir)
         for f in self.data_files:
             self._copy(f)
diff --git a/distutils/command/install_headers.py b/distutils/command/install_headers.py
index 586121e089..97af1371ef 100644
--- a/distutils/command/install_headers.py
+++ b/distutils/command/install_headers.py
@@ -17,7 +17,7 @@ class install_headers(Command):
         ('force', 'f', "force installation (overwrite existing files)"),
     ]
 
-    boolean_options = ['force']
+    boolean_options: ClassVar[list[str]] = ['force']
 
     def initialize_options(self):
         self.install_dir = None
diff --git a/distutils/command/install_lib.py b/distutils/command/install_lib.py
index 4c1230a286..318a9db803 100644
--- a/distutils/command/install_lib.py
+++ b/distutils/command/install_lib.py
@@ -6,6 +6,7 @@
 import importlib.util
 import os
 import sys
+from typing import Any, ClassVar
 
 from ..core import Command
 from ..errors import DistutilsOptionError
@@ -47,8 +48,8 @@ class install_lib(Command):
         ('skip-build', None, "skip the build steps"),
     ]
 
-    boolean_options = ['force', 'compile', 'skip-build']
-    negative_opt = {'no-compile': 'compile'}
+    boolean_options: ClassVar[list[str]] = ['force', 'compile', 'skip-build']
+    negative_opt: ClassVar[dict[str, str]] = {'no-compile': 'compile'}
 
     def initialize_options(self):
         # let the 'install' command dictate our installation directory
@@ -59,7 +60,7 @@ def initialize_options(self):
         self.optimize = None
         self.skip_build = None
 
-    def finalize_options(self):
+    def finalize_options(self) -> None:
         # Get all the information we need to install pure Python modules
         # from the umbrella 'install' command -- build (source) directory,
         # install (target) directory, and whether to compile .py files.
@@ -86,7 +87,7 @@ def finalize_options(self):
             if self.optimize not in (0, 1, 2):
                 raise DistutilsOptionError("optimize must be 0, 1, or 2")
 
-    def run(self):
+    def run(self) -> None:
         # Make sure we have built everything we need first
         self.build()
 
@@ -102,14 +103,15 @@ def run(self):
     # -- Top-level worker functions ------------------------------------
     # (called from 'run()')
 
-    def build(self):
+    def build(self) -> None:
         if not self.skip_build:
             if self.distribution.has_pure_modules():
                 self.run_command('build_py')
             if self.distribution.has_ext_modules():
                 self.run_command('build_ext')
 
-    def install(self):
+    # Any: https://typing.readthedocs.io/en/latest/guides/writing_stubs.html#the-any-trick
+    def install(self) -> list[str] | Any:
         if os.path.isdir(self.build_dir):
             outfiles = self.copy_tree(self.build_dir, self.install_dir)
         else:
@@ -119,7 +121,7 @@ def install(self):
             return
         return outfiles
 
-    def byte_compile(self, files):
+    def byte_compile(self, files) -> None:
         if sys.dont_write_bytecode:
             self.warn('byte-compiling is disabled, skipping.')
             return
diff --git a/distutils/command/install_scripts.py b/distutils/command/install_scripts.py
index bb43387fb8..92e8694111 100644
--- a/distutils/command/install_scripts.py
+++ b/distutils/command/install_scripts.py
@@ -8,6 +8,7 @@
 import os
 from distutils._log import log
 from stat import ST_MODE
+from typing import ClassVar
 
 from ..core import Command
 
@@ -22,7 +23,7 @@ class install_scripts(Command):
         ('skip-build', None, "skip the build steps"),
     ]
 
-    boolean_options = ['force', 'skip-build']
+    boolean_options: ClassVar[list[str]] = ['force', 'skip-build']
 
     def initialize_options(self):
         self.install_dir = None
@@ -30,7 +31,7 @@ def initialize_options(self):
         self.build_dir = None
         self.skip_build = None
 
-    def finalize_options(self):
+    def finalize_options(self) -> None:
         self.set_undefined_options('build', ('build_scripts', 'build_dir'))
         self.set_undefined_options(
             'install',
@@ -39,7 +40,7 @@ def finalize_options(self):
             ('skip_build', 'skip_build'),
         )
 
-    def run(self):
+    def run(self) -> None:
         if not self.skip_build:
             self.run_command('build_scripts')
         self.outfiles = self.copy_tree(self.build_dir, self.install_dir)
diff --git a/distutils/command/sdist.py b/distutils/command/sdist.py
index acb3a41650..abb8dba5b8 100644
--- a/distutils/command/sdist.py
+++ b/distutils/command/sdist.py
@@ -4,6 +4,7 @@
 
 import os
 import sys
+from collections.abc import Callable
 from distutils import archive_util, dir_util, file_util
 from distutils._log import log
 from glob import glob
@@ -34,7 +35,7 @@ def show_formats():
 class sdist(Command):
     description = "create a source distribution (tarball, zip file, etc.)"
 
-    def checking_metadata(self):
+    def checking_metadata(self) -> bool:
         """Callable used for the check sub-command.
 
         Placed here so user_options can view it"""
@@ -98,7 +99,7 @@ def checking_metadata(self):
         ),
     ]
 
-    boolean_options = [
+    boolean_options: ClassVar[list[str]] = [
         'use-defaults',
         'prune',
         'manifest-only',
@@ -107,11 +108,14 @@ def checking_metadata(self):
         'metadata-check',
     ]
 
-    help_options = [
+    help_options: ClassVar[list[tuple[str, str | None, str, Callable[[], object]]]] = [
         ('help-formats', None, "list available distribution formats", show_formats),
     ]
 
-    negative_opt = {'no-defaults': 'use-defaults', 'no-prune': 'prune'}
+    negative_opt: ClassVar[dict[str, str]] = {
+        'no-defaults': 'use-defaults',
+        'no-prune': 'prune',
+    }
 
     sub_commands = [('check', checking_metadata)]
 
@@ -136,11 +140,11 @@ def initialize_options(self):
         self.dist_dir = None
 
         self.archive_files = None
-        self.metadata_check = 1
+        self.metadata_check = True
         self.owner = None
         self.group = None
 
-    def finalize_options(self):
+    def finalize_options(self) -> None:
         if self.manifest is None:
             self.manifest = "MANIFEST"
         if self.template is None:
@@ -155,7 +159,7 @@ def finalize_options(self):
         if self.dist_dir is None:
             self.dist_dir = "dist"
 
-    def run(self):
+    def run(self) -> None:
         # 'filelist' contains the list of files that will make up the
         # manifest
         self.filelist = FileList()
@@ -177,7 +181,7 @@ def run(self):
         # or zipfile, or whatever.
         self.make_distribution()
 
-    def get_file_list(self):
+    def get_file_list(self) -> None:
         """Figure out the list of files to include in the source
         distribution, and put it in 'self.filelist'.  This might involve
         reading the manifest template (and writing the manifest), or just
@@ -218,7 +222,7 @@ def get_file_list(self):
         self.filelist.remove_duplicates()
         self.write_manifest()
 
-    def add_defaults(self):
+    def add_defaults(self) -> None:
         """Add all the default files to self.filelist:
           - README or README.txt
           - setup.py
@@ -333,7 +337,7 @@ def _add_defaults_scripts(self):
             build_scripts = self.get_finalized_command('build_scripts')
             self.filelist.extend(build_scripts.get_source_files())
 
-    def read_template(self):
+    def read_template(self) -> None:
         """Read and parse manifest template file named by self.template.
 
         (usually "MANIFEST.in") The parsing and processing is done by
@@ -368,7 +372,7 @@ def read_template(self):
         finally:
             template.close()
 
-    def prune_file_list(self):
+    def prune_file_list(self) -> None:
         """Prune off branches that might slip into the file list as created
         by 'read_template()', but really don't belong there:
           * the build tree (typically "build")
@@ -391,7 +395,7 @@ def prune_file_list(self):
         vcs_ptrn = r'(^|{})({})({}).*'.format(seps, '|'.join(vcs_dirs), seps)
         self.filelist.exclude_pattern(vcs_ptrn, is_regex=True)
 
-    def write_manifest(self):
+    def write_manifest(self) -> None:
         """Write the file list in 'self.filelist' (presumably as filled in
         by 'add_defaults()' and 'read_template()') to the manifest file
         named by 'self.manifest'.
@@ -419,7 +423,7 @@ def _manifest_is_not_generated(self):
             first_line = next(fp)
         return first_line != '# file GENERATED by distutils, do NOT edit\n'
 
-    def read_manifest(self):
+    def read_manifest(self) -> None:
         """Read the manifest file (named by 'self.manifest') and use it to
         fill in 'self.filelist', the list of files to include in the source
         distribution.
@@ -431,7 +435,7 @@ def read_manifest(self):
                 filter(None, filterfalse(is_comment, map(str.strip, lines)))
             )
 
-    def make_release_tree(self, base_dir, files):
+    def make_release_tree(self, base_dir, files) -> None:
         """Create the directory tree that will become the source
         distribution archive.  All directories implied by the filenames in
         'files' are created under 'base_dir', and then we hard link or copy
@@ -473,7 +477,7 @@ def make_release_tree(self, base_dir, files):
 
         self.distribution.metadata.write_pkg_info(base_dir)
 
-    def make_distribution(self):
+    def make_distribution(self) -> None:
         """Create the source distribution(s).  First, we create the release
         tree with 'make_release_tree()'; then, we create all required
         archive files (according to 'self.formats') from the release tree.
@@ -511,5 +515,5 @@ def get_archive_files(self):
         return self.archive_files
 
 
-def is_comment(line):
+def is_comment(line: str) -> bool:
     return line.startswith('#')
diff --git a/distutils/compat/__init__.py b/distutils/compat/__init__.py
index c715ee9cc5..2c43729b09 100644
--- a/distutils/compat/__init__.py
+++ b/distutils/compat/__init__.py
@@ -1,7 +1,12 @@
 from __future__ import annotations
 
+from collections.abc import Iterable
+from typing import TypeVar
 
-def consolidate_linker_args(args: list[str]) -> list[str] | str:
+_IterableT = TypeVar("_IterableT", bound="Iterable[str]")
+
+
+def consolidate_linker_args(args: _IterableT) -> _IterableT | str:
     """
     Ensure the return value is a string for backward compatibility.
 
diff --git a/distutils/compilers/C/base.py b/distutils/compilers/C/base.py
index 84a19993b0..c6ec90550c 100644
--- a/distutils/compilers/C/base.py
+++ b/distutils/compilers/C/base.py
@@ -3,12 +3,24 @@
 Contains Compiler, an abstract base class that defines the interface
 for the Distutils compiler abstraction model."""
 
+from __future__ import annotations
+
 import os
 import pathlib
 import re
 import sys
-import types
 import warnings
+from collections.abc import Callable, Iterable, MutableSequence, Sequence
+from typing import (
+    TYPE_CHECKING,
+    ClassVar,
+    Literal,
+    Tuple,
+    TypeAlias,
+    TypeVar,
+    Union,
+    overload,
+)
 
 from more_itertools import always_iterable
 
@@ -28,6 +40,15 @@
     UnknownFileType,
 )
 
+if TYPE_CHECKING:
+    from typing_extensions import TypeVarTuple, Unpack
+
+    _Ts = TypeVarTuple("_Ts")
+
+_Macro: TypeAlias = Union[Tuple[str], Tuple[str, str | None]]
+_StrPathT = TypeVar("_StrPathT", bound=str | os.PathLike[str])
+_BytesPathT = TypeVar("_BytesPathT", bound=bytes | os.PathLike[bytes])
+
 
 class Compiler:
     """Abstract base class to define the interface that must be implemented
@@ -51,7 +72,7 @@ class Compiler:
     # dictionary (see below -- used by the 'new_compiler()' factory
     # function) -- authors of new compiler interface classes are
     # responsible for updating 'compiler_class'!
-    compiler_type = None
+    compiler_type: ClassVar[str] = None  # type: ignore[assignment]
 
     # XXX things not handled by this compiler abstraction model:
     #   * client can't provide additional options for a compiler,
@@ -73,16 +94,18 @@ class Compiler:
     #     think this is useless without the ability to null out the
     #     library search path anyways.
 
+    executables: ClassVar[dict]
+
     # Subclasses that rely on the standard filename generation methods
     # implemented below should override these; see the comment near
     # those methods ('object_filenames()' et. al.) for details:
-    src_extensions = None  # list of strings
-    obj_extension = None  # string
-    static_lib_extension = None
-    shared_lib_extension = None  # string
-    static_lib_format = None  # format string
-    shared_lib_format = None  # prob. same as static_lib_format
-    exe_extension = None  # string
+    src_extensions: ClassVar[list[str] | None] = None
+    obj_extension: ClassVar[str | None] = None
+    static_lib_extension: ClassVar[str | None] = None
+    shared_lib_extension: ClassVar[str | None] = None
+    static_lib_format: ClassVar[str | None] = None  # format string
+    shared_lib_format: ClassVar[str | None] = None  # prob. same as static_lib_format
+    exe_extension: ClassVar[str | None] = None
 
     # Default language settings. language_map is used to detect a source
     # file or Extension target language, checking source filenames.
@@ -90,14 +113,14 @@ class Compiler:
     # what language to use when mixing source types. For example, if some
     # extension has two files with ".c" extension, and one with ".cpp", it
     # is still linked as c++.
-    language_map = {
+    language_map: ClassVar[dict[str, str]] = {
         ".c": "c",
         ".cc": "c++",
         ".cpp": "c++",
         ".cxx": "c++",
         ".m": "objc",
     }
-    language_order = ["c++", "objc", "c"]
+    language_order: ClassVar[list[str]] = ["c++", "objc", "c"]
 
     include_dirs = []
     """
@@ -109,43 +132,45 @@ class Compiler:
     library dirs specific to this compiler class
     """
 
-    def __init__(self, verbose=False, dry_run=False, force=False):
+    def __init__(
+        self, verbose: bool = False, dry_run: bool = False, force: bool = False
+    ) -> None:
         self.dry_run = dry_run
         self.force = force
         self.verbose = verbose
 
         # 'output_dir': a common output directory for object, library,
         # shared object, and shared library files
-        self.output_dir = None
+        self.output_dir: str | None = None
 
         # 'macros': a list of macro definitions (or undefinitions).  A
         # macro definition is a 2-tuple (name, value), where the value is
         # either a string or None (no explicit value).  A macro
         # undefinition is a 1-tuple (name,).
-        self.macros = []
+        self.macros: list[_Macro] = []
 
         # 'include_dirs': a list of directories to search for include files
-        self.include_dirs = []
+        self.include_dirs: list[str] = []
 
         # 'libraries': a list of libraries to include in any link
         # (library names, not filenames: eg. "foo" not "libfoo.a")
-        self.libraries = []
+        self.libraries: list[str] = []
 
         # 'library_dirs': a list of directories to search for libraries
-        self.library_dirs = []
+        self.library_dirs: list[str] = []
 
         # 'runtime_library_dirs': a list of directories to search for
         # shared libraries/objects at runtime
-        self.runtime_library_dirs = []
+        self.runtime_library_dirs: list[str] = []
 
         # 'objects': a list of object files (or similar, such as explicitly
         # named library files) to include on any link
-        self.objects = []
+        self.objects: list[str] = []
 
         for key in self.executables.keys():
             self.set_executable(key, self.executables[key])
 
-    def set_executables(self, **kwargs):
+    def set_executables(self, **kwargs: str) -> None:
         """Define the executables (and options for them) that will be run
         to perform the various stages of compilation.  The exact set of
         executables that may be specified here depends on the compiler
@@ -214,11 +239,11 @@ def _is_valid_macro(name, value=None):
         """
         A valid macro is a ``name : str`` and a ``value : str | None``.
         """
-        return isinstance(name, str) and isinstance(value, (str, types.NoneType))
+        return isinstance(name, str) and isinstance(value, (str, None))
 
     # -- Bookkeeping methods -------------------------------------------
 
-    def define_macro(self, name, value=None):
+    def define_macro(self, name: str, value: str | None = None) -> None:
         """Define a preprocessor macro for all compilations driven by this
         compiler object.  The optional parameter 'value' should be a
         string; if it is not supplied, then the macro will be defined
@@ -233,7 +258,7 @@ def define_macro(self, name, value=None):
 
         self.macros.append((name, value))
 
-    def undefine_macro(self, name):
+    def undefine_macro(self, name: str) -> None:
         """Undefine a preprocessor macro for all compilations driven by
         this compiler object.  If the same macro is defined by
         'define_macro()' and undefined by 'undefine_macro()' the last call
@@ -251,7 +276,7 @@ def undefine_macro(self, name):
         undefn = (name,)
         self.macros.append(undefn)
 
-    def add_include_dir(self, dir):
+    def add_include_dir(self, dir: str) -> None:
         """Add 'dir' to the list of directories that will be searched for
         header files.  The compiler is instructed to search directories in
         the order in which they are supplied by successive calls to
@@ -259,7 +284,7 @@ def add_include_dir(self, dir):
         """
         self.include_dirs.append(dir)
 
-    def set_include_dirs(self, dirs):
+    def set_include_dirs(self, dirs: list[str]) -> None:
         """Set the list of directories that will be searched to 'dirs' (a
         list of strings).  Overrides any preceding calls to
         'add_include_dir()'; subsequence calls to 'add_include_dir()' add
@@ -269,7 +294,7 @@ def set_include_dirs(self, dirs):
         """
         self.include_dirs = dirs[:]
 
-    def add_library(self, libname):
+    def add_library(self, libname: str) -> None:
         """Add 'libname' to the list of libraries that will be included in
         all links driven by this compiler object.  Note that 'libname'
         should *not* be the name of a file containing a library, but the
@@ -285,7 +310,7 @@ def add_library(self, libname):
         """
         self.libraries.append(libname)
 
-    def set_libraries(self, libnames):
+    def set_libraries(self, libnames: list[str]) -> None:
         """Set the list of libraries to be included in all links driven by
         this compiler object to 'libnames' (a list of strings).  This does
         not affect any standard system libraries that the linker may
@@ -293,7 +318,7 @@ def set_libraries(self, libnames):
         """
         self.libraries = libnames[:]
 
-    def add_library_dir(self, dir):
+    def add_library_dir(self, dir: str) -> None:
         """Add 'dir' to the list of directories that will be searched for
         libraries specified to 'add_library()' and 'set_libraries()'.  The
         linker will be instructed to search for libraries in the order they
@@ -301,20 +326,20 @@ def add_library_dir(self, dir):
         """
         self.library_dirs.append(dir)
 
-    def set_library_dirs(self, dirs):
+    def set_library_dirs(self, dirs: list[str]) -> None:
         """Set the list of library search directories to 'dirs' (a list of
         strings).  This does not affect any standard library search path
         that the linker may search by default.
         """
         self.library_dirs = dirs[:]
 
-    def add_runtime_library_dir(self, dir):
+    def add_runtime_library_dir(self, dir: str) -> None:
         """Add 'dir' to the list of directories that will be searched for
         shared libraries at runtime.
         """
         self.runtime_library_dirs.append(dir)
 
-    def set_runtime_library_dirs(self, dirs):
+    def set_runtime_library_dirs(self, dirs: list[str]) -> None:
         """Set the list of directories to search for shared libraries at
         runtime to 'dirs' (a list of strings).  This does not affect any
         standard search path that the runtime linker may search by
@@ -322,7 +347,7 @@ def set_runtime_library_dirs(self, dirs):
         """
         self.runtime_library_dirs = dirs[:]
 
-    def add_link_object(self, object):
+    def add_link_object(self, object: str) -> None:
         """Add 'object' to the list of object files (or analogues, such as
         explicitly named library files or the output of "resource
         compilers") to be included in every link driven by this compiler
@@ -330,7 +355,7 @@ def add_link_object(self, object):
         """
         self.objects.append(object)
 
-    def set_link_objects(self, objects):
+    def set_link_objects(self, objects: list[str]) -> None:
         """Set the list of object files (or analogues) to be included in
         every link to 'objects'.  This does not affect any standard object
         files that the linker may include by default (such as system
@@ -343,7 +368,15 @@ def set_link_objects(self, objects):
 
     # Helper method to prep compiler in subclass compile() methods
 
-    def _setup_compile(self, outdir, macros, incdirs, sources, depends, extra):
+    def _setup_compile(
+        self,
+        outdir: str | None,
+        macros: list[_Macro] | None,
+        incdirs: list[str] | tuple[str, ...] | None,
+        sources,
+        depends,
+        extra,
+    ):
         """Process arguments and decide which source files to compile."""
         outdir, macros, incdirs = self._fix_compile_args(outdir, macros, incdirs)
 
@@ -375,7 +408,12 @@ def _get_cc_args(self, pp_opts, debug, before):
             cc_args[:0] = before
         return cc_args
 
-    def _fix_compile_args(self, output_dir, macros, include_dirs):
+    def _fix_compile_args(
+        self,
+        output_dir: str | None,
+        macros: list[_Macro] | None,
+        include_dirs: list[str] | tuple[str, ...] | None,
+    ) -> tuple[str, list[_Macro], list[str]]:
         """Typecheck and fix-up some of the arguments to the 'compile()'
         method, and return fixed-up values.  Specifically: if 'output_dir'
         is None, replaces it with 'self.output_dir'; ensures that 'macros'
@@ -425,7 +463,9 @@ def _prep_compile(self, sources, output_dir, depends=None):
         # return value to preserve API compatibility.
         return objects, {}
 
-    def _fix_object_args(self, objects, output_dir):
+    def _fix_object_args(
+        self, objects: list[str] | tuple[str, ...], output_dir: str | None
+    ) -> tuple[list[str], str]:
         """Typecheck and fix up some arguments supplied to various methods.
         Specifically: ensure that 'objects' is a list; if output_dir is
         None, replace with self.output_dir.  Return fixed versions of
@@ -442,7 +482,12 @@ def _fix_object_args(self, objects, output_dir):
 
         return (objects, output_dir)
 
-    def _fix_lib_args(self, libraries, library_dirs, runtime_library_dirs):
+    def _fix_lib_args(
+        self,
+        libraries: list[str] | tuple[str, ...] | None,
+        library_dirs: list[str] | tuple[str, ...] | None,
+        runtime_library_dirs: list[str] | tuple[str, ...] | None,
+    ) -> tuple[list[str], list[str], list[str]]:
         """Typecheck and fix up some of the arguments supplied to the
         'link_*' methods.  Specifically: ensure that all arguments are
         lists, and augment them with their permanent versions
@@ -492,7 +537,7 @@ def _need_link(self, objects, output_file):
                 newer = newer_group(objects, output_file)
             return newer
 
-    def detect_language(self, sources):
+    def detect_language(self, sources: str | list[str]) -> str | None:
         """Detect the language of a given file, or list of files. Uses
         language_map, and language_order to do the job.
         """
@@ -517,12 +562,12 @@ def detect_language(self, sources):
 
     def preprocess(
         self,
-        source,
-        output_file=None,
-        macros=None,
-        include_dirs=None,
-        extra_preargs=None,
-        extra_postargs=None,
+        source: str | os.PathLike[str],
+        output_file: str | os.PathLike[str] | None = None,
+        macros: list[_Macro] | None = None,
+        include_dirs: list[str] | tuple[str, ...] | None = None,
+        extra_preargs: list[str] | None = None,
+        extra_postargs: Iterable[str] | None = None,
     ):
         """Preprocess a single C/C++ source file, named in 'source'.
         Output will be written to file named 'output_file', or stdout if
@@ -537,15 +582,15 @@ def preprocess(
 
     def compile(
         self,
-        sources,
-        output_dir=None,
-        macros=None,
-        include_dirs=None,
-        debug=False,
-        extra_preargs=None,
-        extra_postargs=None,
-        depends=None,
-    ):
+        sources: Sequence[str | os.PathLike[str]],
+        output_dir: str | None = None,
+        macros: list[_Macro] | None = None,
+        include_dirs: list[str] | tuple[str, ...] | None = None,
+        debug: bool = False,
+        extra_preargs: list[str] | None = None,
+        extra_postargs: list[str] | None = None,
+        depends: list[str] | tuple[str, ...] | None = None,
+    ) -> list[str]:
         """Compile one or more source files.
 
         'sources' must be a list of filenames, most likely C/C++
@@ -618,8 +663,13 @@ def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
         pass
 
     def create_static_lib(
-        self, objects, output_libname, output_dir=None, debug=False, target_lang=None
-    ):
+        self,
+        objects: list[str] | tuple[str, ...],
+        output_libname: str,
+        output_dir: str | None = None,
+        debug: bool = False,
+        target_lang: str | None = None,
+    ) -> None:
         """Link a bunch of stuff together to create a static library file.
         The "bunch of stuff" consists of the list of object files supplied
         as 'objects', the extra object files supplied to
@@ -651,19 +701,19 @@ def create_static_lib(
 
     def link(
         self,
-        target_desc,
-        objects,
-        output_filename,
-        output_dir=None,
-        libraries=None,
-        library_dirs=None,
-        runtime_library_dirs=None,
-        export_symbols=None,
-        debug=False,
-        extra_preargs=None,
-        extra_postargs=None,
-        build_temp=None,
-        target_lang=None,
+        target_desc: str,
+        objects: list[str] | tuple[str, ...],
+        output_filename: str,
+        output_dir: str | None = None,
+        libraries: list[str] | tuple[str, ...] | None = None,
+        library_dirs: list[str] | tuple[str, ...] | None = None,
+        runtime_library_dirs: list[str] | tuple[str, ...] | None = None,
+        export_symbols: Iterable[str] | None = None,
+        debug: bool = False,
+        extra_preargs: list[str] | None = None,
+        extra_postargs: list[str] | None = None,
+        build_temp: str | os.PathLike[str] | None = None,
+        target_lang: str | None = None,
     ):
         """Link a bunch of stuff together to create an executable or
         shared library file.
@@ -714,18 +764,18 @@ def link(
 
     def link_shared_lib(
         self,
-        objects,
-        output_libname,
-        output_dir=None,
-        libraries=None,
-        library_dirs=None,
-        runtime_library_dirs=None,
-        export_symbols=None,
-        debug=False,
-        extra_preargs=None,
-        extra_postargs=None,
-        build_temp=None,
-        target_lang=None,
+        objects: list[str] | tuple[str, ...],
+        output_libname: str,
+        output_dir: str | None = None,
+        libraries: list[str] | tuple[str, ...] | None = None,
+        library_dirs: list[str] | tuple[str, ...] | None = None,
+        runtime_library_dirs: list[str] | tuple[str, ...] | None = None,
+        export_symbols: Iterable[str] | None = None,
+        debug: bool = False,
+        extra_preargs: list[str] | None = None,
+        extra_postargs: list[str] | None = None,
+        build_temp: str | os.PathLike[str] | None = None,
+        target_lang: str | None = None,
     ):
         self.link(
             Compiler.SHARED_LIBRARY,
@@ -745,18 +795,18 @@ def link_shared_lib(
 
     def link_shared_object(
         self,
-        objects,
-        output_filename,
-        output_dir=None,
-        libraries=None,
-        library_dirs=None,
-        runtime_library_dirs=None,
-        export_symbols=None,
-        debug=False,
-        extra_preargs=None,
-        extra_postargs=None,
-        build_temp=None,
-        target_lang=None,
+        objects: list[str] | tuple[str, ...],
+        output_filename: str,
+        output_dir: str | None = None,
+        libraries: list[str] | tuple[str, ...] | None = None,
+        library_dirs: list[str] | tuple[str, ...] | None = None,
+        runtime_library_dirs: list[str] | tuple[str, ...] | None = None,
+        export_symbols: Iterable[str] | None = None,
+        debug: bool = False,
+        extra_preargs: list[str] | None = None,
+        extra_postargs: list[str] | None = None,
+        build_temp: str | os.PathLike[str] | None = None,
+        target_lang: str | None = None,
     ):
         self.link(
             Compiler.SHARED_OBJECT,
@@ -776,16 +826,16 @@ def link_shared_object(
 
     def link_executable(
         self,
-        objects,
-        output_progname,
-        output_dir=None,
-        libraries=None,
-        library_dirs=None,
-        runtime_library_dirs=None,
-        debug=False,
-        extra_preargs=None,
-        extra_postargs=None,
-        target_lang=None,
+        objects: list[str] | tuple[str, ...],
+        output_progname: str,
+        output_dir: str | None = None,
+        libraries: list[str] | tuple[str, ...] | None = None,
+        library_dirs: list[str] | tuple[str, ...] | None = None,
+        runtime_library_dirs: list[str] | tuple[str, ...] | None = None,
+        debug: bool = False,
+        extra_preargs: list[str] | None = None,
+        extra_postargs: list[str] | None = None,
+        target_lang: str | None = None,
     ):
         self.link(
             Compiler.EXECUTABLE,
@@ -808,19 +858,19 @@ def link_executable(
     # no appropriate default implementation so subclasses should
     # implement all of these.
 
-    def library_dir_option(self, dir):
+    def library_dir_option(self, dir: str) -> str:
         """Return the compiler option to add 'dir' to the list of
         directories searched for libraries.
         """
         raise NotImplementedError
 
-    def runtime_library_dir_option(self, dir):
+    def runtime_library_dir_option(self, dir: str) -> str:
         """Return the compiler option to add 'dir' to the list of
         directories searched for runtime libraries.
         """
         raise NotImplementedError
 
-    def library_option(self, lib):
+    def library_option(self, lib: str) -> str:
         """Return the compiler option to add 'lib' to the list of libraries
         linked into the shared library or executable.
         """
@@ -828,12 +878,12 @@ def library_option(self, lib):
 
     def has_function(  # noqa: C901
         self,
-        funcname,
-        includes=None,
-        include_dirs=None,
-        libraries=None,
-        library_dirs=None,
-    ):
+        funcname: str,
+        includes: Iterable[str] | None = None,
+        include_dirs: list[str] | tuple[str, ...] | None = None,
+        libraries: list[str] | None = None,
+        library_dirs: list[str] | tuple[str, ...] | None = None,
+    ) -> bool:
         """Return a boolean indicating whether funcname is provided as
         a symbol on the current platform.  The optional arguments can
         be used to augment the compilation environment.
@@ -916,7 +966,9 @@ def has_function(  # noqa: C901
                 os.remove(fn)
         return True
 
-    def find_library_file(self, dirs, lib, debug=False):
+    def find_library_file(
+        self, dirs: Iterable[str], lib: str, debug: bool = False
+    ) -> str | None:
         """Search the specified list of directories for a static or shared
         library file 'lib' and return the full path to that file.  If
         'debug' true, look for a debugging version (if that makes sense on
@@ -959,7 +1011,12 @@ def find_library_file(self, dirs, lib, debug=False):
     #   * exe_extension -
     #     extension for executable files, eg. '' or '.exe'
 
-    def object_filenames(self, source_filenames, strip_dir=False, output_dir=''):
+    def object_filenames(
+        self,
+        source_filenames: Iterable[str | os.PathLike[str]],
+        strip_dir: bool = False,
+        output_dir: str | os.PathLike[str] | None = '',
+    ) -> list[str]:
         if output_dir is None:
             output_dir = ''
         return list(
@@ -1000,13 +1057,51 @@ def _make_out_path_exts(cls, output_dir, strip_dir, src_name, extensions):
     def _make_relative(base: pathlib.Path):
         return base.relative_to(base.anchor)
 
-    def shared_object_filename(self, basename, strip_dir=False, output_dir=''):
+    @overload
+    def shared_object_filename(
+        self,
+        basename: str,
+        strip_dir: Literal[False] = False,
+        output_dir: str | os.PathLike[str] = "",
+    ) -> str: ...
+    @overload
+    def shared_object_filename(
+        self,
+        basename: str | os.PathLike[str],
+        strip_dir: Literal[True],
+        output_dir: str | os.PathLike[str] = "",
+    ) -> str: ...
+    def shared_object_filename(
+        self,
+        basename: str | os.PathLike[str],
+        strip_dir: bool = False,
+        output_dir: str | os.PathLike[str] = '',
+    ) -> str:
         assert output_dir is not None
         if strip_dir:
             basename = os.path.basename(basename)
         return os.path.join(output_dir, basename + self.shared_lib_extension)
 
-    def executable_filename(self, basename, strip_dir=False, output_dir=''):
+    @overload
+    def executable_filename(
+        self,
+        basename: str,
+        strip_dir: Literal[False] = False,
+        output_dir: str | os.PathLike[str] = "",
+    ) -> str: ...
+    @overload
+    def executable_filename(
+        self,
+        basename: str | os.PathLike[str],
+        strip_dir: Literal[True],
+        output_dir: str | os.PathLike[str] = "",
+    ) -> str: ...
+    def executable_filename(
+        self,
+        basename: str | os.PathLike[str],
+        strip_dir: bool = False,
+        output_dir: str | os.PathLike[str] = '',
+    ) -> str:
         assert output_dir is not None
         if strip_dir:
             basename = os.path.basename(basename)
@@ -1014,10 +1109,10 @@ def executable_filename(self, basename, strip_dir=False, output_dir=''):
 
     def library_filename(
         self,
-        libname,
-        lib_type='static',
-        strip_dir=False,
-        output_dir='',  # or 'shared'
+        libname: str,
+        lib_type: str = "static",
+        strip_dir: bool = False,
+        output_dir: str | os.PathLike[str] = "",  # or 'shared'
     ):
         assert output_dir is not None
         expected = '"static", "shared", "dylib", "xcode_stub"'
@@ -1035,25 +1130,45 @@ def library_filename(
 
     # -- Utility methods -----------------------------------------------
 
-    def announce(self, msg, level=1):
+    def announce(self, msg: object, level: int = 1) -> None:
         log.debug(msg)
 
-    def debug_print(self, msg):
+    def debug_print(self, msg: object) -> None:
         from distutils.debug import DEBUG
 
         if DEBUG:
             print(msg)
 
-    def warn(self, msg):
+    def warn(self, msg: object) -> None:
         sys.stderr.write(f"warning: {msg}\n")
 
-    def execute(self, func, args, msg=None, level=1):
+    def execute(
+        self,
+        func: Callable[[Unpack[_Ts]], object],
+        args: tuple[Unpack[_Ts]],
+        msg: object = None,
+        level: int = 1,
+    ) -> None:
         execute(func, args, msg, self.dry_run)
 
-    def spawn(self, cmd, **kwargs):
+    def spawn(
+        self, cmd: MutableSequence[bytes | str | os.PathLike[str]], **kwargs
+    ) -> None:
         spawn(cmd, dry_run=self.dry_run, **kwargs)
 
-    def move_file(self, src, dst):
+    @overload
+    def move_file(
+        self, src: str | os.PathLike[str], dst: _StrPathT
+    ) -> _StrPathT | str: ...
+    @overload
+    def move_file(
+        self, src: bytes | os.PathLike[bytes], dst: _BytesPathT
+    ) -> _BytesPathT | bytes: ...
+    def move_file(
+        self,
+        src: str | os.PathLike[str] | bytes | os.PathLike[bytes],
+        dst: str | os.PathLike[str] | bytes | os.PathLike[bytes],
+    ) -> str | os.PathLike[str] | bytes | os.PathLike[bytes]:
         return move_file(src, dst, dry_run=self.dry_run)
 
     def mkpath(self, name, mode=0o777):
@@ -1076,7 +1191,7 @@ def mkpath(self, name, mode=0o777):
 )
 
 
-def get_default_compiler(osname=None, platform=None):
+def get_default_compiler(osname: str | None = None, platform: str | None = None) -> str:
     """Determine the default compiler to use for the given platform.
 
     osname should be one of the standard Python OS names (i.e. the
@@ -1125,7 +1240,7 @@ def get_default_compiler(osname=None, platform=None):
 }
 
 
-def show_compilers():
+def show_compilers() -> None:
     """Print list of available compilers (used by the "--help-compiler"
     options to "build", "build_ext", "build_clib").
     """
@@ -1142,7 +1257,13 @@ def show_compilers():
     pretty_printer.print_help("List of available compilers:")
 
 
-def new_compiler(plat=None, compiler=None, verbose=False, dry_run=False, force=False):
+def new_compiler(
+    plat: str | None = None,
+    compiler: str | None = None,
+    verbose: bool = False,
+    dry_run: bool = False,
+    force: bool = False,
+) -> Compiler:
     """Generate an instance of some CCompiler subclass for the supplied
     platform/compiler combination.  'plat' defaults to 'os.name'
     (eg. 'posix', 'nt'), and 'compiler' defaults to the default compiler
@@ -1188,7 +1309,9 @@ def new_compiler(plat=None, compiler=None, verbose=False, dry_run=False, force=F
     return klass(None, dry_run, force)
 
 
-def gen_preprocess_options(macros, include_dirs):
+def gen_preprocess_options(
+    macros: Iterable[_Macro], include_dirs: Iterable[str]
+) -> list[str]:
     """Generate C pre-processor options (-D, -U, -I) as used by at least
     two types of compilers: the typical Unix compiler and Visual C++.
     'macros' is the usual thing, a list of 1- or 2-tuples, where (name,)
@@ -1232,7 +1355,12 @@ def gen_preprocess_options(macros, include_dirs):
     return pp_opts
 
 
-def gen_lib_options(compiler, library_dirs, runtime_library_dirs, libraries):
+def gen_lib_options(
+    compiler: Compiler,
+    library_dirs: Iterable[str],
+    runtime_library_dirs: Iterable[str],
+    libraries: Iterable[str],
+) -> list[str]:
     """Generate linker options for searching library directories and
     linking with specific libraries.  'libraries' and 'library_dirs' are,
     respectively, lists of library names (not filenames!) and search
diff --git a/distutils/compilers/C/msvc.py b/distutils/compilers/C/msvc.py
index 2bdc6576e4..ea1a342dfe 100644
--- a/distutils/compilers/C/msvc.py
+++ b/distutils/compilers/C/msvc.py
@@ -17,6 +17,7 @@
 import subprocess
 import unittest.mock as mock
 import warnings
+from collections.abc import Iterable
 
 with contextlib.suppress(ImportError):
     import winreg
@@ -255,10 +256,10 @@ class Compiler(base.Compiler):
     obj_extension = '.obj'
     static_lib_extension = '.lib'
     shared_lib_extension = '.dll'
-    static_lib_format = shared_lib_format = '%s%s'
+    static_lib_format = static_lib_format = '%s%s'
     exe_extension = '.exe'
 
-    def __init__(self, verbose=False, dry_run=False, force=False):
+    def __init__(self, verbose=False, dry_run=False, force=False) -> None:
         super().__init__(verbose, dry_run, force)
         # target platform (.plat_name is consistent with 'bdist')
         self.plat_name = None
@@ -276,7 +277,7 @@ def _configure(cls, vc_env):
     def _parse_path(val):
         return [dir.rstrip(os.sep) for dir in val.split(os.pathsep) if dir]
 
-    def initialize(self, plat_name=None):
+    def initialize(self, plat_name: str | None = None) -> None:
         # multi-init means we would need to check platform same each time...
         assert not self.initialized, "don't init multiple times"
         if plat_name is None:
@@ -358,7 +359,7 @@ def initialize(self, plat_name=None):
     # -- Worker methods ------------------------------------------------
 
     @property
-    def out_extensions(self):
+    def out_extensions(self) -> dict[str, str]:
         return {
             **super().out_extensions,
             **{
@@ -462,8 +463,13 @@ def compile(  # noqa: C901
         return objects
 
     def create_static_lib(
-        self, objects, output_libname, output_dir=None, debug=False, target_lang=None
-    ):
+        self,
+        objects: list[str] | tuple[str, ...],
+        output_libname: str,
+        output_dir: str | None = None,
+        debug: bool = False,
+        target_lang: str | None = None,
+    ) -> None:
         if not self.initialized:
             self.initialize()
         objects, output_dir = self._fix_object_args(objects, output_dir)
@@ -483,20 +489,20 @@ def create_static_lib(
 
     def link(
         self,
-        target_desc,
-        objects,
-        output_filename,
-        output_dir=None,
-        libraries=None,
-        library_dirs=None,
-        runtime_library_dirs=None,
-        export_symbols=None,
-        debug=False,
-        extra_preargs=None,
-        extra_postargs=None,
-        build_temp=None,
-        target_lang=None,
-    ):
+        target_desc: str,
+        objects: list[str] | tuple[str, ...],
+        output_filename: str,
+        output_dir: str | None = None,
+        libraries: list[str] | tuple[str, ...] | None = None,
+        library_dirs: list[str] | tuple[str, ...] | None = None,
+        runtime_library_dirs: list[str] | tuple[str, ...] | None = None,
+        export_symbols: Iterable[str] | None = None,
+        debug: bool = False,
+        extra_preargs: list[str] | None = None,
+        extra_postargs: Iterable[str] | None = None,
+        build_temp: str | os.PathLike[str] | None = None,
+        target_lang: str | None = None,
+    ) -> None:
         if not self.initialized:
             self.initialize()
         objects, output_dir = self._fix_object_args(objects, output_dir)
diff --git a/distutils/compilers/C/unix.py b/distutils/compilers/C/unix.py
index 1ba93e6a96..4bf709cb28 100644
--- a/distutils/compilers/C/unix.py
+++ b/distutils/compilers/C/unix.py
@@ -20,6 +20,7 @@
 import re
 import shlex
 import sys
+from collections.abc import Iterable
 
 from ... import sysconfig
 from ..._log import log
@@ -28,7 +29,7 @@
 from ...compat import consolidate_linker_args
 from ...errors import DistutilsExecError
 from . import base
-from .base import gen_lib_options, gen_preprocess_options
+from .base import _Macro, gen_lib_options, gen_preprocess_options
 from .errors import (
     CompileError,
     LibError,
@@ -159,12 +160,12 @@ class Compiler(base.Compiler):
 
     def preprocess(
         self,
-        source,
-        output_file=None,
-        macros=None,
-        include_dirs=None,
-        extra_preargs=None,
-        extra_postargs=None,
+        source: str | os.PathLike[str],
+        output_file: str | os.PathLike[str] | None = None,
+        macros: list[_Macro] | None = None,
+        include_dirs: list[str] | tuple[str, ...] | None = None,
+        extra_preargs: list[str] | None = None,
+        extra_postargs: Iterable[str] | None = None,
     ):
         fixed_args = self._fix_compile_args(None, macros, include_dirs)
         ignore, macros, include_dirs = fixed_args
@@ -234,12 +235,12 @@ def create_static_lib(
     def link(
         self,
         target_desc,
-        objects,
+        objects: list[str] | tuple[str, ...],
         output_filename,
-        output_dir=None,
-        libraries=None,
-        library_dirs=None,
-        runtime_library_dirs=None,
+        output_dir: str | None = None,
+        libraries: list[str] | tuple[str, ...] | None = None,
+        library_dirs: list[str] | tuple[str, ...] | None = None,
+        runtime_library_dirs: list[str] | tuple[str, ...] | None = None,
         export_symbols=None,
         debug=False,
         extra_preargs=None,
diff --git a/distutils/dist.py b/distutils/dist.py
index 33ed8ebd7a..72bd13ce30 100644
--- a/distutils/dist.py
+++ b/distutils/dist.py
@@ -13,9 +13,21 @@
 import re
 import sys
 import warnings
-from collections.abc import Iterable
+from collections.abc import Iterable, MutableMapping
 from email import message_from_file
-from typing import TYPE_CHECKING, Literal, TypeVar, overload
+from typing import (
+    IO,
+    TYPE_CHECKING,
+    Any,
+    ClassVar,
+    List,
+    Literal,
+    Tuple,
+    TypeAlias,
+    TypeVar,
+    Union,
+    overload,
+)
 
 from packaging.utils import canonicalize_name, canonicalize_version
 
@@ -31,10 +43,16 @@
 from .util import check_environ, rfc822_escape, strtobool
 
 if TYPE_CHECKING:
+    from _typeshed import SupportsWrite
+
     # type-only import because of mutual dependence between these modules
     from .cmd import Command
 
 _CommandT = TypeVar("_CommandT", bound="Command")
+_OptionsList: TypeAlias = List[
+    Tuple[str, Union[str, None], str, int] | Tuple[str, Union[str, None], str]
+]
+
 
 # Regex to define acceptable Distutils command names.  This is not *quite*
 # the same as a Python NAME -- I don't allow leading underscores.  The fact
@@ -43,7 +61,7 @@
 command_re = re.compile(r'^[a-zA-Z]([a-zA-Z0-9_]*)$')
 
 
-def _ensure_list(value, fieldname):
+def _ensure_list(value: str | Iterable[str], fieldname) -> str | list[str]:
     if isinstance(value, str):
         # a string containing comma separated values is okay.  It will
         # be converted to a list by Distribution.finalize_options().
@@ -80,7 +98,7 @@ class Distribution:
     # don't want to pollute the commands with too many options that they
     # have minimal control over.
     # The fourth entry for verbose means that it can be repeated.
-    global_options = [
+    global_options: ClassVar[_OptionsList] = [
         ('verbose', 'v', "run verbosely (default)", 1),
         ('quiet', 'q', "run quietly (turns verbosity off)"),
         ('dry-run', 'n', "don't actually do anything"),
@@ -90,7 +108,7 @@ class Distribution:
 
     # 'common_usage' is a short (2-3 line) string describing the common
     # usage of the setup script.
-    common_usage = """\
+    common_usage: ClassVar[str] = """\
 Common commands: (see '--help-commands' for more)
 
   setup.py build      will build the package underneath 'build/'
@@ -98,7 +116,7 @@ class Distribution:
 """
 
     # options that are not propagated to the commands
-    display_options = [
+    display_options: ClassVar[_OptionsList] = [
         ('help-commands', None, "list all available commands"),
         ('name', None, "print package name"),
         ('version', 'V', "print package version"),
@@ -125,14 +143,17 @@ class Distribution:
         ('requires', None, "print the list of packages/modules required"),
         ('obsoletes', None, "print the list of packages/modules made obsolete"),
     ]
-    display_option_names = [translate_longopt(x[0]) for x in display_options]
+    display_option_names: ClassVar[list[str]] = [
+        translate_longopt(x[0]) for x in display_options
+    ]
 
     # negative options are options that exclude other options
-    negative_opt = {'quiet': 'verbose'}
+    negative_opt: ClassVar[dict[str, str]] = {'quiet': 'verbose'}
 
     # -- Creation/initialization methods -------------------------------
 
-    def __init__(self, attrs=None):  # noqa: C901
+    # Can't Unpack a TypedDict with optional properties, so using Any instead
+    def __init__(self, attrs: MutableMapping[str, Any] | None = None) -> None:  # noqa: C901
         """Construct a new Distribution instance: initialize all the
         attributes of a Distribution, and then use 'attrs' (a dictionary
         mapping attribute names to values) to assign some of those
@@ -172,12 +193,12 @@ def __init__(self, attrs=None):  # noqa: C901
         # named here.  This list is searched from the left; an error
         # is raised if no named package provides the command being
         # searched for.  (Always access using get_command_packages().)
-        self.command_packages = None
+        self.command_packages: str | list[str] | None = None
 
         # 'script_name' and 'script_args' are usually set to sys.argv[0]
         # and sys.argv[1:], but they can be overridden when the caller is
         # not necessarily a setup script run from the command-line.
-        self.script_name = None
+        self.script_name: str | os.PathLike[str] | None = None
         self.script_args: list[str] | None = None
 
         # 'command_options' is where we store command options between
@@ -185,7 +206,7 @@ def __init__(self, attrs=None):  # noqa: C901
         # they are actually needed -- ie. when the command in question is
         # instantiated.  It is a dictionary of dictionaries of 2-tuples:
         #   command_options = { command_name : { option : (source, value) } }
-        self.command_options = {}
+        self.command_options: dict[str, dict[str, tuple[str, str]]] = {}
 
         # 'dist_files' is the list of (command, pyversion, file) that
         # have been created by any dist commands run so far. This is
@@ -196,13 +217,13 @@ def __init__(self, attrs=None):  # noqa: C901
         # file. pyversion should not be used to specify minimum or
         # maximum required Python versions; use the metainfo for that
         # instead.
-        self.dist_files = []
+        self.dist_files: list[tuple[str, str, str]] = []
 
         # These options are really the business of various commands, rather
         # than of the Distribution itself.  We provide aliases for them in
         # Distribution as a convenience to the developer.
         self.packages = None
-        self.package_data = {}
+        self.package_data: dict[str, list[str]] = {}
         self.package_dir = None
         self.py_modules = None
         self.libraries = None
@@ -219,7 +240,7 @@ def __init__(self, attrs=None):  # noqa: C901
         # the caller at all.  'command_obj' maps command names to
         # Command instances -- that's how we enforce that every command
         # class is a singleton.
-        self.command_obj = {}
+        self.command_obj: dict[str, Command] = {}
 
         # 'have_run' maps command names to boolean values; it keeps track
         # of whether we have actually run a particular command, to make it
@@ -231,7 +252,7 @@ def __init__(self, attrs=None):  # noqa: C901
         # command object is created, and replaced with a true value when
         # the command is successfully run.  Thus it's probably best to use
         # '.get()' rather than a straight lookup.
-        self.have_run = {}
+        self.have_run: dict[str, bool] = {}
 
         # Now we'll use the attrs dictionary (ultimately, keyword args from
         # the setup script) to possibly override any or all of these
@@ -300,7 +321,7 @@ def get_option_dict(self, command):
             dict = self.command_options[command] = {}
         return dict
 
-    def dump_option_dicts(self, header=None, commands=None, indent=""):
+    def dump_option_dicts(self, header=None, commands=None, indent: str = "") -> None:
         from pprint import pformat
 
         if commands is None:  # dump all command option dicts
@@ -615,7 +636,7 @@ def _parse_command_opts(self, parser, args):  # noqa: C901
 
         return args
 
-    def finalize_options(self):
+    def finalize_options(self) -> None:
         """Set final values for all the options on the Distribution
         instance, analogous to the .finalize_options() method of Command
         objects.
@@ -718,7 +739,7 @@ def handle_display_options(self, option_order):
 
         return any_display_options
 
-    def print_command_list(self, commands, header, max_length):
+    def print_command_list(self, commands, header, max_length) -> None:
         """Print a subset of the list of all commands -- used by
         'print_commands()'.
         """
@@ -735,7 +756,7 @@ def print_command_list(self, commands, header, max_length):
 
             print(f"  {cmd:<{max_length}}  {description}")
 
-    def print_commands(self):
+    def print_commands(self) -> None:
         """Print out a help message listing all available commands with a
         description of each.  The list is divided into "standard commands"
         (listed in distutils.command.__all__) and "extra commands"
@@ -802,7 +823,7 @@ def get_command_packages(self):
             self.command_packages = pkgs
         return pkgs
 
-    def get_command_class(self, command):
+    def get_command_class(self, command: str) -> type[Command]:
         """Return the class that implements the Distutils command named by
         'command'.  First we check the 'cmdclass' dictionary; if the
         command is mentioned there, we fetch the class object from the
@@ -971,10 +992,10 @@ def reinitialize_command(
 
     # -- Methods that operate on the Distribution ----------------------
 
-    def announce(self, msg, level=logging.INFO):
+    def announce(self, msg, level: int = logging.INFO) -> None:
         log.log(level, msg)
 
-    def run_commands(self):
+    def run_commands(self) -> None:
         """Run each command that was seen on the setup script command line.
         Uses the list of commands found and cache of command objects
         created by 'get_command_obj()'.
@@ -984,7 +1005,7 @@ def run_commands(self):
 
     # -- Methods that operate on its Commands --------------------------
 
-    def run_command(self, command):
+    def run_command(self, command: str) -> None:
         """Do whatever it takes to run a command (including nothing at all,
         if the command has already been run).  Specifically: if we have
         already created and run the command named by 'command', return
@@ -1004,28 +1025,28 @@ def run_command(self, command):
 
     # -- Distribution query methods ------------------------------------
 
-    def has_pure_modules(self):
+    def has_pure_modules(self) -> bool:
         return len(self.packages or self.py_modules or []) > 0
 
-    def has_ext_modules(self):
+    def has_ext_modules(self) -> bool:
         return self.ext_modules and len(self.ext_modules) > 0
 
-    def has_c_libraries(self):
+    def has_c_libraries(self) -> bool:
         return self.libraries and len(self.libraries) > 0
 
-    def has_modules(self):
+    def has_modules(self) -> bool:
         return self.has_pure_modules() or self.has_ext_modules()
 
-    def has_headers(self):
+    def has_headers(self) -> bool:
         return self.headers and len(self.headers) > 0
 
-    def has_scripts(self):
+    def has_scripts(self) -> bool:
         return self.scripts and len(self.scripts) > 0
 
-    def has_data_files(self):
+    def has_data_files(self) -> bool:
         return self.data_files and len(self.data_files) > 0
 
-    def is_pure(self):
+    def is_pure(self) -> bool:
         return (
             self.has_pure_modules()
             and not self.has_ext_modules()
@@ -1038,6 +1059,53 @@ def is_pure(self):
     # they are defined in a sneaky way: the constructor binds self.get_XXX
     # to self.metadata.get_XXX.  The actual code is in the
     # DistributionMetadata class, below.
+    if TYPE_CHECKING:
+        # Unfortunately this means we need to specify them manually or not expose statically
+        def _(self) -> None:
+            self.get_name = self.metadata.get_name
+            self.get_version = self.metadata.get_version
+            self.get_fullname = self.metadata.get_fullname
+            self.get_author = self.metadata.get_author
+            self.get_author_email = self.metadata.get_author_email
+            self.get_maintainer = self.metadata.get_maintainer
+            self.get_maintainer_email = self.metadata.get_maintainer_email
+            self.get_contact = self.metadata.get_contact
+            self.get_contact_email = self.metadata.get_contact_email
+            self.get_url = self.metadata.get_url
+            self.get_license = self.metadata.get_license
+            self.get_licence = self.metadata.get_licence
+            self.get_description = self.metadata.get_description
+            self.get_long_description = self.metadata.get_long_description
+            self.get_keywords = self.metadata.get_keywords
+            self.get_platforms = self.metadata.get_platforms
+            self.get_classifiers = self.metadata.get_classifiers
+            self.get_download_url = self.metadata.get_download_url
+            self.get_requires = self.metadata.get_requires
+            self.get_provides = self.metadata.get_provides
+            self.get_obsoletes = self.metadata.get_obsoletes
+
+        # Default attributes generated in __init__ from self.display_option_names
+        help_commands: bool
+        name: str | Literal[False]
+        version: str | Literal[False]
+        fullname: str | Literal[False]
+        author: str | Literal[False]
+        author_email: str | Literal[False]
+        maintainer: str | Literal[False]
+        maintainer_email: str | Literal[False]
+        contact: str | Literal[False]
+        contact_email: str | Literal[False]
+        url: str | Literal[False]
+        license: str | Literal[False]
+        licence: str | Literal[False]
+        description: str | Literal[False]
+        long_description: str | Literal[False]
+        platforms: str | list[str] | Literal[False]
+        classifiers: str | list[str] | Literal[False]
+        keywords: str | list[str] | Literal[False]
+        provides: list[str] | Literal[False]
+        requires: list[str] | Literal[False]
+        obsoletes: list[str] | Literal[False]
 
 
 class DistributionMetadata:
@@ -1069,34 +1137,36 @@ class DistributionMetadata:
         "obsoletes",
     )
 
-    def __init__(self, path=None):
+    def __init__(
+        self, path: str | bytes | os.PathLike[str] | os.PathLike[bytes] | None = None
+    ) -> None:
         if path is not None:
             self.read_pkg_file(open(path))
         else:
-            self.name = None
-            self.version = None
-            self.author = None
-            self.author_email = None
-            self.maintainer = None
-            self.maintainer_email = None
-            self.url = None
-            self.license = None
-            self.description = None
-            self.long_description = None
-            self.keywords = None
-            self.platforms = None
-            self.classifiers = None
-            self.download_url = None
+            self.name: str | None = None
+            self.version: str | None = None
+            self.author: str | None = None
+            self.author_email: str | None = None
+            self.maintainer: str | None = None
+            self.maintainer_email: str | None = None
+            self.url: str | None = None
+            self.license: str | None = None
+            self.description: str | None = None
+            self.long_description: str | None = None
+            self.keywords: str | list[str] | None = None
+            self.platforms: str | list[str] | None = None
+            self.classifiers: str | list[str] | None = None
+            self.download_url: str | None = None
             # PEP 314
-            self.provides = None
-            self.requires = None
-            self.obsoletes = None
+            self.provides: str | list[str] | None = None
+            self.requires: str | list[str] | None = None
+            self.obsoletes: str | list[str] | None = None
 
-    def read_pkg_file(self, file):
+    def read_pkg_file(self, file: IO[str]) -> None:
         """Reads the metadata values from a file object."""
         msg = message_from_file(file)
 
-        def _read_field(name):
+        def _read_field(name: str) -> str | None:
             value = msg[name]
             if value and value != "UNKNOWN":
                 return value
@@ -1143,14 +1213,14 @@ def _read_list(name):
             self.provides = None
             self.obsoletes = None
 
-    def write_pkg_info(self, base_dir):
+    def write_pkg_info(self, base_dir: str | os.PathLike[str]) -> None:
         """Write the PKG-INFO file into the release tree."""
         with open(
             os.path.join(base_dir, 'PKG-INFO'), 'w', encoding='UTF-8'
         ) as pkg_info:
             self.write_pkg_file(pkg_info)
 
-    def write_pkg_file(self, file):
+    def write_pkg_file(self, file: SupportsWrite[str]) -> None:
         """Write the PKG-INFO format data to a file object."""
         version = '1.0'
         if (
@@ -1196,13 +1266,13 @@ def _write_list(self, file, name, values):
 
     # -- Metadata query methods ----------------------------------------
 
-    def get_name(self):
+    def get_name(self) -> str:
         return self.name or "UNKNOWN"
 
-    def get_version(self):
+    def get_version(self) -> str:
         return self.version or "0.0.0"
 
-    def get_fullname(self):
+    def get_fullname(self) -> str:
         return self._fullname(self.get_name(), self.get_version())
 
     @staticmethod
@@ -1224,74 +1294,74 @@ def _fullname(name: str, version: str) -> str:
             canonicalize_version(version, strip_trailing_zero=False),
         )
 
-    def get_author(self):
+    def get_author(self) -> str | None:
         return self.author
 
-    def get_author_email(self):
+    def get_author_email(self) -> str | None:
         return self.author_email
 
-    def get_maintainer(self):
+    def get_maintainer(self) -> str | None:
         return self.maintainer
 
-    def get_maintainer_email(self):
+    def get_maintainer_email(self) -> str | None:
         return self.maintainer_email
 
-    def get_contact(self):
+    def get_contact(self) -> str | None:
         return self.maintainer or self.author
 
-    def get_contact_email(self):
+    def get_contact_email(self) -> str | None:
         return self.maintainer_email or self.author_email
 
-    def get_url(self):
+    def get_url(self) -> str | None:
         return self.url
 
-    def get_license(self):
+    def get_license(self) -> str | None:
         return self.license
 
     get_licence = get_license
 
-    def get_description(self):
+    def get_description(self) -> str | None:
         return self.description
 
-    def get_long_description(self):
+    def get_long_description(self) -> str | None:
         return self.long_description
 
-    def get_keywords(self):
+    def get_keywords(self) -> str | list[str]:
         return self.keywords or []
 
-    def set_keywords(self, value):
+    def set_keywords(self, value: str | Iterable[str]) -> None:
         self.keywords = _ensure_list(value, 'keywords')
 
-    def get_platforms(self):
+    def get_platforms(self) -> str | list[str] | None:
         return self.platforms
 
-    def set_platforms(self, value):
+    def set_platforms(self, value: str | Iterable[str]) -> None:
         self.platforms = _ensure_list(value, 'platforms')
 
-    def get_classifiers(self):
+    def get_classifiers(self) -> str | list[str]:
         return self.classifiers or []
 
-    def set_classifiers(self, value):
+    def set_classifiers(self, value: str | Iterable[str]) -> None:
         self.classifiers = _ensure_list(value, 'classifiers')
 
-    def get_download_url(self):
+    def get_download_url(self) -> str | None:
         return self.download_url
 
     # PEP 314
-    def get_requires(self):
+    def get_requires(self) -> str | list[str]:
         return self.requires or []
 
-    def set_requires(self, value):
+    def set_requires(self, value: Iterable[str]) -> None:
         import distutils.versionpredicate
 
         for v in value:
             distutils.versionpredicate.VersionPredicate(v)
         self.requires = list(value)
 
-    def get_provides(self):
+    def get_provides(self) -> str | list[str]:
         return self.provides or []
 
-    def set_provides(self, value):
+    def set_provides(self, value: Iterable[str]) -> None:
         value = [v.strip() for v in value]
         for v in value:
             import distutils.versionpredicate
@@ -1299,10 +1369,10 @@ def set_provides(self, value):
             distutils.versionpredicate.split_provision(v)
         self.provides = value
 
-    def get_obsoletes(self):
+    def get_obsoletes(self) -> str | list[str]:
         return self.obsoletes or []
 
-    def set_obsoletes(self, value):
+    def set_obsoletes(self, value: Iterable[str]) -> None:
         import distutils.versionpredicate
 
         for v in value:
diff --git a/distutils/errors.py b/distutils/errors.py
index 7c6ee258e3..409d21faa2 100644
--- a/distutils/errors.py
+++ b/distutils/errors.py
@@ -6,18 +6,15 @@
 """
 
 # compiler exceptions aliased for compatibility
-from .compilers.C.errors import (
-    CompileError,  # noqa: F401
-    LibError,  # noqa: F401
-    LinkError,  # noqa: F401
-    PreprocessError,  # noqa: F401
-)
-from .compilers.C.errors import (
-    Error as CCompilerError,  # noqa: F401
-)
-from .compilers.C.errors import (
-    UnknownFileType as UnknownFileError,  # noqa: F401
-)
+from .compilers.C.errors import CompileError as CompileError
+from .compilers.C.errors import Error as _Error
+from .compilers.C.errors import LibError as LibError
+from .compilers.C.errors import LinkError as LinkError
+from .compilers.C.errors import PreprocessError as PreprocessError
+from .compilers.C.errors import UnknownFileType as _UnknownFileType
+
+CCompilerError = _Error
+UnknownFileError = _UnknownFileType
 
 
 class DistutilsError(Exception):
diff --git a/distutils/extension.py b/distutils/extension.py
index e053273436..0885e33be3 100644
--- a/distutils/extension.py
+++ b/distutils/extension.py
@@ -5,6 +5,7 @@
 
 import os
 import warnings
+from collections.abc import Iterable
 
 # This class is really only used by the "build_ext" command, so it might
 # make sense to put it in distutils.command.build_ext.  However, that
@@ -88,22 +89,22 @@ class Extension:
     # setup_keywords in core.py.
     def __init__(
         self,
-        name,
-        sources,
-        include_dirs=None,
-        define_macros=None,
-        undef_macros=None,
-        library_dirs=None,
-        libraries=None,
-        runtime_library_dirs=None,
-        extra_objects=None,
-        extra_compile_args=None,
-        extra_link_args=None,
-        export_symbols=None,
-        swig_opts=None,
-        depends=None,
-        language=None,
-        optional=None,
+        name: str,
+        sources: Iterable[str | os.PathLike[str]],
+        include_dirs: list[str] | None = None,
+        define_macros: list[tuple[str, str | None]] | None = None,
+        undef_macros: list[str] | None = None,
+        library_dirs: list[str] | None = None,
+        libraries: list[str] | None = None,
+        runtime_library_dirs: list[str] | None = None,
+        extra_objects: list[str] | None = None,
+        extra_compile_args: list[str] | None = None,
+        extra_link_args: list[str] | None = None,
+        export_symbols: list[str] | None = None,
+        swig_opts: list[str] | None = None,
+        depends: list[str] | None = None,
+        language: str | None = None,
+        optional: bool | None = None,
         **kw,  # To catch unknown keywords
     ):
         if not isinstance(name, str):
diff --git a/distutils/filelist.py b/distutils/filelist.py
index 9857b19549..3b8f8e6970 100644
--- a/distutils/filelist.py
+++ b/distutils/filelist.py
@@ -8,6 +8,8 @@
 import functools
 import os
 import re
+from collections.abc import Iterable
+from typing import Literal, overload
 
 from ._log import log
 from .errors import DistutilsInternalError, DistutilsTemplateError
@@ -29,19 +31,19 @@ class FileList:
         filtering applied)
     """
 
-    def __init__(self, warn=None, debug_print=None):
+    def __init__(self, warn: object = None, debug_print: object = None) -> None:
         # ignore argument to FileList, but keep them for backwards
         # compatibility
-        self.allfiles = None
-        self.files = []
+        self.allfiles: Iterable[str] | None = None
+        self.files: list[str] = []
 
-    def set_allfiles(self, allfiles):
+    def set_allfiles(self, allfiles: Iterable[str]) -> None:
         self.allfiles = allfiles
 
-    def findall(self, dir=os.curdir):
+    def findall(self, dir: str | os.PathLike[str] = os.curdir) -> None:
         self.allfiles = findall(dir)
 
-    def debug_print(self, msg):
+    def debug_print(self, msg: object) -> None:
         """Print 'msg' to stdout if the global DEBUG (taken from the
         DISTUTILS_DEBUG environment variable) flag is true.
         """
@@ -52,13 +54,13 @@ def debug_print(self, msg):
 
     # Collection methods
 
-    def append(self, item):
+    def append(self, item: str) -> None:
         self.files.append(item)
 
-    def extend(self, items):
+    def extend(self, items: Iterable[str]) -> None:
         self.files.extend(items)
 
-    def sort(self):
+    def sort(self) -> None:
         # Not a strict lexical sort!
         sortable_files = sorted(map(os.path.split, self.files))
         self.files = []
@@ -67,7 +69,7 @@ def sort(self):
 
     # Other miscellaneous utility methods
 
-    def remove_duplicates(self):
+    def remove_duplicates(self) -> None:
         # Assumes list has been sorted!
         for i in range(len(self.files) - 1, 0, -1):
             if self.files[i] == self.files[i - 1]:
@@ -105,7 +107,7 @@ def _parse_template_line(self, line):
 
         return (action, patterns, dir, dir_pattern)
 
-    def process_template_line(self, line):  # noqa: C901
+    def process_template_line(self, line: str) -> None:  # noqa: C901
         # Parse the line: split it up, make sure the right number of words
         # is there, and return the relevant words.  'action' is always
         # defined: it's the first word of the line.  Which of the other
@@ -193,8 +195,38 @@ def process_template_line(self, line):  # noqa: C901
             )
 
     # Filtering/selection methods
-
-    def include_pattern(self, pattern, anchor=True, prefix=None, is_regex=False):
+    @overload
+    def include_pattern(
+        self,
+        pattern: str,
+        anchor: bool = True,
+        prefix: str | None = None,
+        is_regex: Literal[False] = False,
+    ) -> bool: ...
+    @overload
+    def include_pattern(
+        self,
+        pattern: str | re.Pattern[str],
+        anchor: bool = True,
+        prefix: str | None = None,
+        *,
+        is_regex: Literal[True],
+    ) -> bool: ...
+    @overload
+    def include_pattern(
+        self,
+        pattern: str | re.Pattern[str],
+        anchor: bool,
+        prefix: str | None,
+        is_regex: Literal[True],
+    ) -> bool: ...
+    def include_pattern(
+        self,
+        pattern: str | re.Pattern,
+        anchor: bool = True,
+        prefix: str | None = None,
+        is_regex: bool = False,
+    ) -> bool:
         """Select strings (presumably filenames) from 'self.files' that
         match 'pattern', a Unix-style wildcard (glob) pattern.  Patterns
         are not quite the same as implemented by the 'fnmatch' module: '*'
@@ -235,7 +267,38 @@ def include_pattern(self, pattern, anchor=True, prefix=None, is_regex=False):
                 files_found = True
         return files_found
 
-    def exclude_pattern(self, pattern, anchor=True, prefix=None, is_regex=False):
+    @overload
+    def exclude_pattern(
+        self,
+        pattern: str,
+        anchor: bool = True,
+        prefix: str | None = None,
+        is_regex: Literal[False] = False,
+    ) -> bool: ...
+    @overload
+    def exclude_pattern(
+        self,
+        pattern: str | re.Pattern[str],
+        anchor: bool = True,
+        prefix: str | None = None,
+        *,
+        is_regex: Literal[True],
+    ) -> bool: ...
+    @overload
+    def exclude_pattern(
+        self,
+        pattern: str | re.Pattern[str],
+        anchor: bool,
+        prefix: str | None,
+        is_regex: Literal[True],
+    ) -> bool: ...
+    def exclude_pattern(
+        self,
+        pattern: str | re.Pattern,
+        anchor: bool = True,
+        prefix: str | None = None,
+        is_regex: bool = False,
+    ) -> bool:
         """Remove strings (presumably filenames) from 'files' that match
         'pattern'.  Other parameters are the same as for
         'include_pattern()', above.
@@ -294,7 +357,7 @@ def filter(cls, items):
         return filter(cls(), items)
 
 
-def findall(dir=os.curdir):
+def findall(dir: str | os.PathLike[str] = os.curdir):
     """
     Find all files under 'dir' and return the list of full filenames.
     Unless dir is '.', return full filenames with dir prepended.
diff --git a/distutils/spawn.py b/distutils/spawn.py
index ba280334d1..fc66281765 100644
--- a/distutils/spawn.py
+++ b/distutils/spawn.py
@@ -12,12 +12,16 @@
 import subprocess
 import sys
 import warnings
-from collections.abc import Mapping
+from collections.abc import Mapping, MutableSequence
+from subprocess import _ENV
+from typing import TypeVar, overload
 
 from ._log import log
 from .debug import DEBUG
 from .errors import DistutilsExecError
 
+_MappingT = TypeVar("_MappingT", bound=Mapping)
+
 
 def _debug(cmd):
     """
@@ -26,7 +30,7 @@ def _debug(cmd):
     return cmd if DEBUG else cmd[0]
 
 
-def _inject_macos_ver(env: Mapping[str:str] | None) -> Mapping[str:str] | None:
+def _inject_macos_ver(env: _MappingT | None) -> _MappingT | dict[str, str | int] | None:
     if platform.system() != 'Darwin':
         return env
 
@@ -37,11 +41,21 @@ def _inject_macos_ver(env: Mapping[str:str] | None) -> Mapping[str:str] | None:
     return {**_resolve(env), **update}
 
 
-def _resolve(env: Mapping[str:str] | None) -> Mapping[str:str]:
+@overload
+def _resolve(env: None) -> os._Environ[str]: ...
+@overload
+def _resolve(env: _MappingT) -> _MappingT: ...
+def _resolve(env: _MappingT | None) -> _MappingT | os._Environ[str]:
     return os.environ if env is None else env
 
 
-def spawn(cmd, search_path=True, verbose=False, dry_run=False, env=None):
+def spawn(
+    cmd: MutableSequence[bytes | str | os.PathLike[str]],
+    search_path: bool = True,
+    verbose: bool = False,
+    dry_run: bool = False,
+    env: _ENV | None = None,
+) -> None:
     """Run another program, specified as a command list 'cmd', in a new process.
 
     'cmd' is just the argument list for the new process, ie.
@@ -78,7 +92,7 @@ def spawn(cmd, search_path=True, verbose=False, dry_run=False, env=None):
         ) from err
 
 
-def find_executable(executable, path=None):
+def find_executable(executable: str, path: str | None = None) -> str | None:
     """Tries to find 'executable' in the directories listed in 'path'.
 
     A string listing directories separated by 'os.pathsep'; defaults to
diff --git a/distutils/sysconfig.py b/distutils/sysconfig.py
index ef3def83eb..c27e7799cd 100644
--- a/distutils/sysconfig.py
+++ b/distutils/sysconfig.py
@@ -15,13 +15,23 @@
 import re
 import sys
 import sysconfig
+from typing import TYPE_CHECKING, Literal, overload
 
 from jaraco.functools import pass_none
 
+from .ccompiler import CCompiler
 from .compat import py39
 from .errors import DistutilsPlatformError
 from .util import is_mingw
 
+if TYPE_CHECKING:
+    from typing_extensions import deprecated
+else:
+
+    def deprecated(fn):
+        return fn
+
+
 IS_PYPY = '__pypy__' in sys.builtin_module_names
 
 # These are needed in a couple of spots, so just compute them once.
@@ -110,7 +120,7 @@ def get_python_version():
     return f'{sys.version_info.major}.{sys.version_info.minor}'
 
 
-def get_python_inc(plat_specific=False, prefix=None):
+def get_python_inc(plat_specific: bool = False, prefix: str | None = None) -> str:
     """Return the directory containing installed Python header files.
 
     If 'plat_specific' is false (the default), this is the path to the
@@ -217,7 +227,9 @@ def _posix_lib(standard_lib, libpython, early_prefix, prefix):
         return os.path.join(libpython, "site-packages")
 
 
-def get_python_lib(plat_specific=False, standard_lib=False, prefix=None):
+def get_python_lib(
+    plat_specific: bool = False, standard_lib: bool = False, prefix: str | None = None
+) -> str:
     """Return the directory containing the Python library (standard or
     site additions).
 
@@ -288,7 +300,7 @@ def _customize_macos():
     )
 
 
-def customize_compiler(compiler):
+def customize_compiler(compiler: CCompiler) -> None:
     """Do any platform-specific customization of a CCompiler instance.
 
     Mainly needed on Unix, so we can plug in the information that
@@ -375,12 +387,12 @@ def customize_compiler(compiler):
         compiler.shared_lib_extension = shlib_suffix
 
 
-def get_config_h_filename():
+def get_config_h_filename() -> str:
     """Return full pathname of installed pyconfig.h file."""
     return sysconfig.get_config_h_filename()
 
 
-def get_makefile_filename():
+def get_makefile_filename() -> str:
     """Return full pathname of installed Makefile from the Python build."""
     return sysconfig.get_makefile_filename()
 
@@ -542,7 +554,11 @@ def expand_makefile_vars(s, vars):
 _config_vars = None
 
 
-def get_config_vars(*args):
+@overload
+def get_config_vars() -> dict[str, str | int]: ...
+@overload
+def get_config_vars(arg: str, /, *args: str) -> list[str | int]: ...
+def get_config_vars(*args: str) -> list[str | int] | dict[str, str | int]:
     """With no arguments, return a dictionary of all configuration
     variables relevant for the current platform.  Generally this includes
     everything needed to build extensions and install both pure modules and
@@ -560,7 +576,14 @@ def get_config_vars(*args):
     return [_config_vars.get(name) for name in args] if args else _config_vars
 
 
-def get_config_var(name):
+@overload
+@deprecated(
+    "SO is deprecated, use EXT_SUFFIX. Support will be removed when this module is synchronized with stdlib Python 3.11"
+)
+def get_config_var(name: Literal["SO"]) -> int | str | None: ...
+@overload
+def get_config_var(name: str) -> int | str | None: ...
+def get_config_var(name: str) -> int | str | None:
     """Return the value of a single variable using the dictionary
     returned by 'get_config_vars()'.  Equivalent to
     get_config_vars().get(name)
diff --git a/distutils/tests/test_sdist.py b/distutils/tests/test_sdist.py
index 5aca43e34f..6b1a376b26 100644
--- a/distutils/tests/test_sdist.py
+++ b/distutils/tests/test_sdist.py
@@ -270,7 +270,7 @@ def test_metadata_check_option(self, caplog):
         caplog.clear()
         dist, cmd = self.get_cmd()
         cmd.ensure_finalized()
-        cmd.metadata_check = 0
+        cmd.metadata_check = False
         cmd.run()
         assert len(self.warnings(caplog.messages, 'warning: check: ')) == 0
 
diff --git a/distutils/util.py b/distutils/util.py
index 83ad39e958..4d32fb2ea9 100644
--- a/distutils/util.py
+++ b/distutils/util.py
@@ -16,6 +16,8 @@
 import sys
 import sysconfig
 import tempfile
+from collections.abc import Callable, Iterable, Mapping
+from typing import TYPE_CHECKING, AnyStr
 
 from jaraco.functools import pass_none
 
@@ -24,6 +26,11 @@
 from .errors import DistutilsByteCompileError, DistutilsPlatformError
 from .spawn import spawn
 
+if TYPE_CHECKING:
+    from typing_extensions import TypeVarTuple, Unpack
+
+    _Ts = TypeVarTuple("_Ts")
+
 
 def get_host_platform() -> str:
     """
@@ -39,7 +46,7 @@ def get_host_platform() -> str:
     return sysconfig.get_platform()
 
 
-def get_platform():
+def get_platform() -> str:
     if os.name == 'nt':
         TARGET_TO_PLAT = {
             'x86': 'win32',
@@ -108,13 +115,13 @@ def get_macosx_target_ver():
     return syscfg_ver
 
 
-def split_version(s):
+def split_version(s: str) -> list[int]:
     """Convert a dot-separated string into a list of numbers for comparisons"""
     return [int(n) for n in s.split('.')]
 
 
 @pass_none
-def convert_path(pathname: str | os.PathLike) -> str:
+def convert_path(pathname: str | os.PathLike[str]) -> str:
     r"""
     Allow for pathlib.Path inputs, coax to a native path string.
 
@@ -132,7 +139,9 @@ def convert_path(pathname: str | os.PathLike) -> str:
     return os.fspath(pathlib.PurePath(pathname))
 
 
-def change_root(new_root, pathname):
+def change_root(
+    new_root: AnyStr | os.PathLike[AnyStr], pathname: AnyStr | os.PathLike[AnyStr]
+) -> AnyStr:
     """Return 'pathname' with 'new_root' prepended.  If 'pathname' is
     relative, this is equivalent to "os.path.join(new_root,pathname)".
     Otherwise, it requires making 'pathname' relative and then joining the
@@ -154,7 +163,7 @@ def change_root(new_root, pathname):
 
 
 @functools.lru_cache
-def check_environ():
+def check_environ() -> None:
     """Ensure that 'os.environ' has all the environment variables we
     guarantee that users can use in config files, command-line options,
     etc.  Currently this includes:
@@ -176,7 +185,7 @@ def check_environ():
         os.environ['PLAT'] = get_platform()
 
 
-def subst_vars(s, local_vars):
+def subst_vars(s, local_vars: Mapping[str, object]) -> str:
     """
     Perform variable substitution on 'string'.
     Variables are indicated by format-style braces ("{var}").
@@ -215,7 +224,7 @@ def _subst(match):
     return repl
 
 
-def grok_environment_error(exc, prefix="error: "):
+def grok_environment_error(exc: object, prefix: str = "error: ") -> str:
     # Function kept for backward compatibility.
     # Used to try clever things with EnvironmentErrors,
     # but nowadays str(exception) produces good messages.
@@ -233,7 +242,7 @@ def _init_regex():
     _dquote_re = re.compile(r'"(?:[^"\\]|\\.)*"')
 
 
-def split_quoted(s):
+def split_quoted(s: str) -> list[str]:
     """Split a string up according to Unix shell-like rules for quotes and
     backslashes.  In short: words are delimited by spaces, as long as those
     spaces are not escaped by a backslash, or inside a quoted string.
@@ -299,7 +308,13 @@ def split_quoted(s):
 # split_quoted ()
 
 
-def execute(func, args, msg=None, verbose=False, dry_run=False):
+def execute(
+    func: Callable[[Unpack[_Ts]], object],
+    args: tuple[Unpack[_Ts]],
+    msg: object = None,
+    verbose: bool = False,
+    dry_run: bool = False,
+) -> None:
     """Perform some action that affects the outside world (eg.  by
     writing to the filesystem).  Such actions are special because they
     are disabled by the 'dry_run' flag.  This method takes care of all
@@ -318,7 +333,7 @@ def execute(func, args, msg=None, verbose=False, dry_run=False):
         func(*args)
 
 
-def strtobool(val):
+def strtobool(val: str) -> bool:
     """Convert a string representation of truth to true (1) or false (0).
 
     True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values
@@ -327,23 +342,23 @@ def strtobool(val):
     """
     val = val.lower()
     if val in ('y', 'yes', 't', 'true', 'on', '1'):
-        return 1
+        return True
     elif val in ('n', 'no', 'f', 'false', 'off', '0'):
-        return 0
+        return False
     else:
         raise ValueError(f"invalid truth value {val!r}")
 
 
 def byte_compile(  # noqa: C901
-    py_files,
-    optimize=0,
-    force=False,
-    prefix=None,
-    base_dir=None,
-    verbose=True,
-    dry_run=False,
-    direct=None,
-):
+    py_files: Iterable[str],
+    optimize: int = 0,
+    force: bool = False,
+    prefix: str | None = None,
+    base_dir: str | None = None,
+    verbose: bool = True,
+    dry_run: bool = False,
+    direct: bool | None = None,
+) -> None:
     """Byte-compile a collection of Python source files to .pyc
     files in a __pycache__ subdirectory.  'py_files' is a list
     of files to compile; any files that don't end in ".py" are silently
@@ -473,7 +488,7 @@ def byte_compile(  # noqa: C901
                     log.debug("skipping byte-compilation of %s to %s", file, cfile_base)
 
 
-def rfc822_escape(header):
+def rfc822_escape(header: str) -> str:
     """Return a version of the string escaped for inclusion in an
     RFC-822 header, by ensuring there are 8 spaces space after each newline.
     """
@@ -488,7 +503,7 @@ def rfc822_escape(header):
     return indent.join(lines) + suffix
 
 
-def is_mingw():
+def is_mingw() -> bool:
     """Returns True if the current platform is mingw.
 
     Python compiled with Mingw-w64 has sys.platform == 'win32' and

From a40b4906e66ae3d2707f02dc2417e2342eb5161b Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Mon, 24 Feb 2025 18:34:03 -0500
Subject: [PATCH 1470/1761] Make sdist.metadata_check an actual boolean

---
 distutils/command/sdist.py    | 4 ++--
 distutils/tests/test_sdist.py | 2 +-
 2 files changed, 3 insertions(+), 3 deletions(-)

diff --git a/distutils/command/sdist.py b/distutils/command/sdist.py
index acb3a41650..441ccaa6e4 100644
--- a/distutils/command/sdist.py
+++ b/distutils/command/sdist.py
@@ -34,7 +34,7 @@ def show_formats():
 class sdist(Command):
     description = "create a source distribution (tarball, zip file, etc.)"
 
-    def checking_metadata(self):
+    def checking_metadata(self) -> bool:
         """Callable used for the check sub-command.
 
         Placed here so user_options can view it"""
@@ -136,7 +136,7 @@ def initialize_options(self):
         self.dist_dir = None
 
         self.archive_files = None
-        self.metadata_check = 1
+        self.metadata_check = True
         self.owner = None
         self.group = None
 
diff --git a/distutils/tests/test_sdist.py b/distutils/tests/test_sdist.py
index 5aca43e34f..6b1a376b26 100644
--- a/distutils/tests/test_sdist.py
+++ b/distutils/tests/test_sdist.py
@@ -270,7 +270,7 @@ def test_metadata_check_option(self, caplog):
         caplog.clear()
         dist, cmd = self.get_cmd()
         cmd.ensure_finalized()
-        cmd.metadata_check = 0
+        cmd.metadata_check = False
         cmd.run()
         assert len(self.warnings(caplog.messages, 'warning: check: ')) == 0
 

From c26fedb4f32f2167f3c8306864d4c06c5b0f3036 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Mon, 24 Feb 2025 18:43:16 -0500
Subject: [PATCH 1471/1761] Add make_archive overloads

---
 distutils/archive_util.py | 26 ++++++++++++++++++++++++--
 distutils/cmd.py          | 20 ++++++++++++++++++++
 2 files changed, 44 insertions(+), 2 deletions(-)

diff --git a/distutils/archive_util.py b/distutils/archive_util.py
index 167a5c2e29..9285666062 100644
--- a/distutils/archive_util.py
+++ b/distutils/archive_util.py
@@ -4,7 +4,7 @@
 that sort of thing)."""
 
 import os
-from typing import Literal
+from typing import Literal, overload
 
 try:
     import zipfile
@@ -128,7 +128,7 @@ def make_zipfile(
     base_dir: str | os.PathLike[str],
     verbose: bool = False,
     dry_run: bool = False,
-) -> str:  # noqa: C901
+) -> str:
     """Create a zip file from all the files under 'base_dir'.
 
     The output zip file will be named 'base_name' + ".zip".  Uses either the
@@ -210,6 +210,7 @@ def check_archive_formats(formats):
     return None
 
 
+@overload
 def make_archive(
     base_name: str,
     format: str,
@@ -219,6 +220,27 @@ def make_archive(
     dry_run: bool = False,
     owner: str | None = None,
     group: str | None = None,
+) -> str: ...
+@overload
+def make_archive(
+    base_name: str | os.PathLike[str],
+    format: str,
+    root_dir: str | os.PathLike[str] | bytes | os.PathLike[bytes],
+    base_dir: str | None = None,
+    verbose: bool = False,
+    dry_run: bool = False,
+    owner: str | None = None,
+    group: str | None = None,
+) -> str: ...
+def make_archive(
+    base_name: str | os.PathLike[str],
+    format: str,
+    root_dir: str | os.PathLike[str] | bytes | os.PathLike[bytes] | None = None,
+    base_dir: str | None = None,
+    verbose: bool = False,
+    dry_run: bool = False,
+    owner: str | None = None,
+    group: str | None = None,
 ) -> str:
     """Create an archive file (eg. zip or tar).
 
diff --git a/distutils/cmd.py b/distutils/cmd.py
index db34673090..5d4af6a48a 100644
--- a/distutils/cmd.py
+++ b/distutils/cmd.py
@@ -475,6 +475,7 @@ def spawn(
 
         spawn(cmd, search_path, dry_run=self.dry_run)
 
+    @overload
     def make_archive(
         self,
         base_name: str,
@@ -483,6 +484,25 @@ def make_archive(
         base_dir: str | None = None,
         owner: str | None = None,
         group: str | None = None,
+    ) -> str: ...
+    @overload
+    def make_archive(
+        self,
+        base_name: str | os.PathLike[str],
+        format: str,
+        root_dir: str | os.PathLike[str] | bytes | os.PathLike[bytes],
+        base_dir: str | None = None,
+        owner: str | None = None,
+        group: str | None = None,
+    ) -> str: ...
+    def make_archive(
+        self,
+        base_name: str | os.PathLike[str],
+        format: str,
+        root_dir: str | os.PathLike[str] | bytes | os.PathLike[bytes] | None = None,
+        base_dir: str | None = None,
+        owner: str | None = None,
+        group: str | None = None,
     ) -> str:
         return archive_util.make_archive(
             base_name,

From 21c981cf506ffaf5d040d09e97bd5e0d98d1acfc Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Mon, 24 Feb 2025 18:45:52 -0500
Subject: [PATCH 1472/1761] Fix impossible _Env import

---
 distutils/spawn.py | 7 +++++--
 1 file changed, 5 insertions(+), 2 deletions(-)

diff --git a/distutils/spawn.py b/distutils/spawn.py
index fc66281765..973668f268 100644
--- a/distutils/spawn.py
+++ b/distutils/spawn.py
@@ -13,13 +13,16 @@
 import sys
 import warnings
 from collections.abc import Mapping, MutableSequence
-from subprocess import _ENV
-from typing import TypeVar, overload
+from typing import TYPE_CHECKING, TypeVar, overload
 
 from ._log import log
 from .debug import DEBUG
 from .errors import DistutilsExecError
 
+if TYPE_CHECKING:
+    from subprocess import _ENV
+
+
 _MappingT = TypeVar("_MappingT", bound=Mapping)
 
 

From f1a6ec96f4fbd4f8820622910f64d1441f79e694 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Mon, 24 Feb 2025 18:48:29 -0500
Subject: [PATCH 1473/1761] Fix runtime bound typevars

---
 distutils/_modified.py        | 6 ++++--
 distutils/compilers/C/base.py | 4 ++--
 2 files changed, 6 insertions(+), 4 deletions(-)

diff --git a/distutils/_modified.py b/distutils/_modified.py
index 5793189723..f64cab7d61 100644
--- a/distutils/_modified.py
+++ b/distutils/_modified.py
@@ -1,5 +1,7 @@
 """Timestamp comparison of files and groups of files."""
 
+from __future__ import annotations
+
 import functools
 import os.path
 from collections.abc import Callable, Iterable
@@ -11,10 +13,10 @@
 from .errors import DistutilsFileError
 
 _SourcesT = TypeVar(
-    "_SourcesT", bound=str | bytes | os.PathLike[str] | os.PathLike[bytes]
+    "_SourcesT", bound="str | bytes | os.PathLike[str] | os.PathLike[bytes]"
 )
 _TargetsT = TypeVar(
-    "_TargetsT", bound=str | bytes | os.PathLike[str] | os.PathLike[bytes]
+    "_TargetsT", bound="str | bytes | os.PathLike[str] | os.PathLike[bytes]"
 )
 
 
diff --git a/distutils/compilers/C/base.py b/distutils/compilers/C/base.py
index c6ec90550c..421f7abb8d 100644
--- a/distutils/compilers/C/base.py
+++ b/distutils/compilers/C/base.py
@@ -46,8 +46,8 @@
     _Ts = TypeVarTuple("_Ts")
 
 _Macro: TypeAlias = Union[Tuple[str], Tuple[str, str | None]]
-_StrPathT = TypeVar("_StrPathT", bound=str | os.PathLike[str])
-_BytesPathT = TypeVar("_BytesPathT", bound=bytes | os.PathLike[bytes])
+_StrPathT = TypeVar("_StrPathT", bound="str | os.PathLike[str]")
+_BytesPathT = TypeVar("_BytesPathT", bound="bytes | os.PathLike[bytes]")
 
 
 class Compiler:

From 02b856ab08e87668f5ad7eb7b580fdfce6f39ef3 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Mon, 24 Feb 2025 18:52:12 -0500
Subject: [PATCH 1474/1761] Fix deprecated

---
 distutils/command/bdist.py    | 4 ++--
 distutils/compilers/C/base.py | 3 +--
 distutils/dist.py             | 6 ++----
 distutils/sysconfig.py        | 4 ++--
 4 files changed, 7 insertions(+), 10 deletions(-)

diff --git a/distutils/command/bdist.py b/distutils/command/bdist.py
index 230bdeaf90..a34129f79e 100644
--- a/distutils/command/bdist.py
+++ b/distutils/command/bdist.py
@@ -16,8 +16,8 @@
     from typing_extensions import deprecated
 else:
 
-    def deprecated(fn):
-        return fn
+    def deprecated(message):
+        return lambda fn: fn
 
 
 def show_formats():
diff --git a/distutils/compilers/C/base.py b/distutils/compilers/C/base.py
index 421f7abb8d..1828b993e6 100644
--- a/distutils/compilers/C/base.py
+++ b/distutils/compilers/C/base.py
@@ -15,7 +15,6 @@
     TYPE_CHECKING,
     ClassVar,
     Literal,
-    Tuple,
     TypeAlias,
     TypeVar,
     Union,
@@ -45,7 +44,7 @@
 
     _Ts = TypeVarTuple("_Ts")
 
-_Macro: TypeAlias = Union[Tuple[str], Tuple[str, str | None]]
+_Macro: TypeAlias = Union[tuple[str], tuple[str, Union[str, None]]]
 _StrPathT = TypeVar("_StrPathT", bound="str | os.PathLike[str]")
 _BytesPathT = TypeVar("_BytesPathT", bound="bytes | os.PathLike[bytes]")
 
diff --git a/distutils/dist.py b/distutils/dist.py
index 72bd13ce30..b1630b4c05 100644
--- a/distutils/dist.py
+++ b/distutils/dist.py
@@ -20,9 +20,7 @@
     TYPE_CHECKING,
     Any,
     ClassVar,
-    List,
     Literal,
-    Tuple,
     TypeAlias,
     TypeVar,
     Union,
@@ -49,8 +47,8 @@
     from .cmd import Command
 
 _CommandT = TypeVar("_CommandT", bound="Command")
-_OptionsList: TypeAlias = List[
-    Tuple[str, Union[str, None], str, int] | Tuple[str, Union[str, None], str]
+_OptionsList: TypeAlias = list[
+    Union[tuple[str, Union[str, None], str, int], tuple[str, Union[str, None], str]]
 ]
 
 
diff --git a/distutils/sysconfig.py b/distutils/sysconfig.py
index c27e7799cd..08b7d045e4 100644
--- a/distutils/sysconfig.py
+++ b/distutils/sysconfig.py
@@ -28,8 +28,8 @@
     from typing_extensions import deprecated
 else:
 
-    def deprecated(fn):
-        return fn
+    def deprecated(message):
+        return lambda fn: fn
 
 
 IS_PYPY = '__pypy__' in sys.builtin_module_names

From 509245fe5f5f1caf613a1e7cfa0c5ef16c6a0dc6 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Mon, 24 Feb 2025 18:54:37 -0500
Subject: [PATCH 1475/1761] Fix TypeAlias import

---
 distutils/compilers/C/base.py | 3 +--
 distutils/dist.py             | 2 +-
 2 files changed, 2 insertions(+), 3 deletions(-)

diff --git a/distutils/compilers/C/base.py b/distutils/compilers/C/base.py
index 1828b993e6..4767b7f332 100644
--- a/distutils/compilers/C/base.py
+++ b/distutils/compilers/C/base.py
@@ -15,7 +15,6 @@
     TYPE_CHECKING,
     ClassVar,
     Literal,
-    TypeAlias,
     TypeVar,
     Union,
     overload,
@@ -40,7 +39,7 @@
 )
 
 if TYPE_CHECKING:
-    from typing_extensions import TypeVarTuple, Unpack
+    from typing_extensions import TypeAlias, TypeVarTuple, Unpack
 
     _Ts = TypeVarTuple("_Ts")
 
diff --git a/distutils/dist.py b/distutils/dist.py
index b1630b4c05..69d42016a1 100644
--- a/distutils/dist.py
+++ b/distutils/dist.py
@@ -21,7 +21,6 @@
     Any,
     ClassVar,
     Literal,
-    TypeAlias,
     TypeVar,
     Union,
     overload,
@@ -42,6 +41,7 @@
 
 if TYPE_CHECKING:
     from _typeshed import SupportsWrite
+    from typing_extensions import TypeAlias
 
     # type-only import because of mutual dependence between these modules
     from .cmd import Command

From c6768e6181919f673fd67c67a99ade1f51bc1b85 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Mon, 24 Feb 2025 18:57:52 -0500
Subject: [PATCH 1476/1761] from __future__ import annotations Add a few
 missing

---
 distutils/archive_util.py       | 2 ++
 distutils/command/bdist.py      | 2 ++
 distutils/command/build.py      | 2 ++
 distutils/command/build_clib.py | 2 +-
 distutils/command/build_ext.py  | 2 ++
 distutils/command/install.py    | 2 ++
 distutils/command/sdist.py      | 2 ++
 distutils/compilers/C/msvc.py   | 1 +
 distutils/extension.py          | 2 ++
 distutils/filelist.py           | 2 ++
 distutils/sysconfig.py          | 2 ++
 11 files changed, 20 insertions(+), 1 deletion(-)

diff --git a/distutils/archive_util.py b/distutils/archive_util.py
index 9285666062..aa2d81ff41 100644
--- a/distutils/archive_util.py
+++ b/distutils/archive_util.py
@@ -3,6 +3,8 @@
 Utility functions for creating archive files (tarballs, zip files,
 that sort of thing)."""
 
+from __future__ import annotations
+
 import os
 from typing import Literal, overload
 
diff --git a/distutils/command/bdist.py b/distutils/command/bdist.py
index a34129f79e..07811aab27 100644
--- a/distutils/command/bdist.py
+++ b/distutils/command/bdist.py
@@ -3,6 +3,8 @@
 Implements the Distutils 'bdist' command (create a built [binary]
 distribution)."""
 
+from __future__ import annotations
+
 import os
 import warnings
 from collections.abc import Callable
diff --git a/distutils/command/build.py b/distutils/command/build.py
index 9493cefee9..61f2431a5e 100644
--- a/distutils/command/build.py
+++ b/distutils/command/build.py
@@ -2,6 +2,8 @@
 
 Implements the Distutils 'build' command."""
 
+from __future__ import annotations
+
 import os
 import sys
 import sysconfig
diff --git a/distutils/command/build_clib.py b/distutils/command/build_clib.py
index 2a1643d634..0d6d1c8a2f 100644
--- a/distutils/command/build_clib.py
+++ b/distutils/command/build_clib.py
@@ -4,7 +4,6 @@
 that is included in the module distribution and needed by an extension
 module."""
 
-
 # XXX this module has *lots* of code ripped-off quite transparently from
 # build_ext.py -- not surprisingly really, as the work required to build
 # a static library from a collection of C source files is not really all
@@ -13,6 +12,7 @@
 # necessary refactoring to account for the overlap in code between the
 # two modules, mainly because a number of subtle details changed in the
 # cut 'n paste.  Sigh.
+from __future__ import annotations
 
 import os
 from collections.abc import Callable
diff --git a/distutils/command/build_ext.py b/distutils/command/build_ext.py
index c25352f603..55dec90e21 100644
--- a/distutils/command/build_ext.py
+++ b/distutils/command/build_ext.py
@@ -4,6 +4,8 @@
 modules (currently limited to C extensions, should accommodate C++
 extensions ASAP)."""
 
+from __future__ import annotations
+
 import contextlib
 import os
 import re
diff --git a/distutils/command/install.py b/distutils/command/install.py
index 12f9d1fe39..b09048cf5b 100644
--- a/distutils/command/install.py
+++ b/distutils/command/install.py
@@ -2,6 +2,8 @@
 
 Implements the Distutils 'install' command."""
 
+from __future__ import annotations
+
 import contextlib
 import itertools
 import os
diff --git a/distutils/command/sdist.py b/distutils/command/sdist.py
index abb8dba5b8..b3bf0c326a 100644
--- a/distutils/command/sdist.py
+++ b/distutils/command/sdist.py
@@ -2,6 +2,8 @@
 
 Implements the Distutils 'sdist' command (create a source distribution)."""
 
+from __future__ import annotations
+
 import os
 import sys
 from collections.abc import Callable
diff --git a/distutils/compilers/C/msvc.py b/distutils/compilers/C/msvc.py
index ea1a342dfe..ebf2568bd0 100644
--- a/distutils/compilers/C/msvc.py
+++ b/distutils/compilers/C/msvc.py
@@ -11,6 +11,7 @@
 #   finding DevStudio (through the registry)
 # ported to VS 2005 and VS 2008 by Christian Heimes
 # ported to VS 2015 by Steve Dower
+from __future__ import annotations
 
 import contextlib
 import os
diff --git a/distutils/extension.py b/distutils/extension.py
index 0885e33be3..f51411266e 100644
--- a/distutils/extension.py
+++ b/distutils/extension.py
@@ -3,6 +3,8 @@
 Provides the Extension class, used to describe C/C++ extension
 modules in setup scripts."""
 
+from __future__ import annotations
+
 import os
 import warnings
 from collections.abc import Iterable
diff --git a/distutils/filelist.py b/distutils/filelist.py
index 3b8f8e6970..70dc0fdebc 100644
--- a/distutils/filelist.py
+++ b/distutils/filelist.py
@@ -4,6 +4,8 @@
 and building lists of files.
 """
 
+from __future__ import annotations
+
 import fnmatch
 import functools
 import os
diff --git a/distutils/sysconfig.py b/distutils/sysconfig.py
index 08b7d045e4..e5facaecd3 100644
--- a/distutils/sysconfig.py
+++ b/distutils/sysconfig.py
@@ -9,6 +9,8 @@
 Email:        
 """
 
+from __future__ import annotations
+
 import functools
 import os
 import pathlib

From d1071cbe4a9610648ff092acf2fb40de522f91de Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Mon, 24 Feb 2025 19:05:39 -0500
Subject: [PATCH 1477/1761] from __future__ import annotations Another missing

---
 distutils/command/install_lib.py | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/distutils/command/install_lib.py b/distutils/command/install_lib.py
index 318a9db803..2aababf800 100644
--- a/distutils/command/install_lib.py
+++ b/distutils/command/install_lib.py
@@ -3,6 +3,8 @@
 Implements the Distutils 'install_lib' command
 (install all Python modules)."""
 
+from __future__ import annotations
+
 import importlib.util
 import os
 import sys

From 7f00d9555c2859f9c48c60d6720097b25732915a Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Mon, 24 Feb 2025 19:12:22 -0500
Subject: [PATCH 1478/1761] Missing noqa

---
 distutils/archive_util.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/distutils/archive_util.py b/distutils/archive_util.py
index aa2d81ff41..d860f55272 100644
--- a/distutils/archive_util.py
+++ b/distutils/archive_util.py
@@ -125,7 +125,7 @@ def _set_uid_gid(tarinfo):
     return archive_name
 
 
-def make_zipfile(
+def make_zipfile(  # noqa: C901
     base_name: str,
     base_dir: str | os.PathLike[str],
     verbose: bool = False,

From df2cca69cd86f32775027ca93252a6ecd8ecd719 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 25 Feb 2025 13:42:01 +0000
Subject: [PATCH 1479/1761] Small adjustments and fixes to make tests work

---
 setuptools/dist.py                            |  4 +--
 .../tests/config/test_apply_pyprojecttoml.py  | 26 ++++++++++++++++---
 2 files changed, 24 insertions(+), 6 deletions(-)

diff --git a/setuptools/dist.py b/setuptools/dist.py
index ce17217f69..d457d5ebe7 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -515,7 +515,7 @@ def _find_pattern(pattern: str, enforce_match: bool = True) -> list[str]:
                 "Pattern {pattern!r} contains invalid characters.",
                 pattern=pattern,
                 see_url=f"https://packaging.python.org/en/latest/{pypa_guides}",
-                due_date=(2025, 2, 20),  # Introduced in 2024-02-20
+                due_date=(2026, 2, 20),  # Introduced in 2025-02-20
             )
 
         found = glob(pattern, recursive=True)
@@ -525,7 +525,7 @@ def _find_pattern(pattern: str, enforce_match: bool = True) -> list[str]:
                 "Cannot find any files for the given pattern.",
                 "Pattern {pattern!r} did not match any files.",
                 pattern=pattern,
-                due_date=(2025, 2, 20),  # Introduced in 2024-02-20
+                due_date=(2026, 2, 20),  # Introduced in 2025-02-20
                 # PEP 639 requires us to error, but as a transition period
                 # we will only issue a warning to give people time to prepare.
                 # After the transition, this should raise an InvalidConfigError.
diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py
index f1a536bb4a..489fd98e26 100644
--- a/setuptools/tests/config/test_apply_pyprojecttoml.py
+++ b/setuptools/tests/config/test_apply_pyprojecttoml.py
@@ -24,7 +24,7 @@
 from setuptools.config._apply_pyprojecttoml import _MissingDynamic, _some_attrgetter
 from setuptools.dist import Distribution
 from setuptools.errors import InvalidConfigError, RemovedConfigError
-from setuptools.warnings import SetuptoolsDeprecationWarning
+from setuptools.warnings import InformationOnly, SetuptoolsDeprecationWarning
 
 from .downloads import retrieve_file, urls_from_file
 
@@ -36,11 +36,22 @@ def makedist(path, **attrs):
     return Distribution({"src_root": path, **attrs})
 
 
+def _mock_expand_patterns(patterns, *_, **__):
+    """
+    Allow comparing the given patterns for 2 dist objects.
+    We need to strip special chars to avoid errors when validating.
+    """
+    return [re.sub("[^a-z0-9]+", "", p, flags=re.I) or "empty" for p in patterns]
+
+
 @pytest.mark.parametrize("url", urls_from_file(HERE / EXAMPLES_FILE))
 @pytest.mark.filterwarnings("ignore")
 @pytest.mark.uses_network
 def test_apply_pyproject_equivalent_to_setupcfg(url, monkeypatch, tmp_path):
     monkeypatch.setattr(expand, "read_attr", Mock(return_value="0.0.1"))
+    monkeypatch.setattr(
+        Distribution, "_expand_patterns", Mock(side_effect=_mock_expand_patterns)
+    )
     setupcfg_example = retrieve_file(url)
     pyproject_example = Path(tmp_path, "pyproject.toml")
     setupcfg_text = setupcfg_example.read_text(encoding="utf-8")
@@ -432,7 +443,10 @@ def test_both_license_and_license_files_defined_pep639(self, tmp_path):
         (tmp_path / "_FILE.txt").touch()
         (tmp_path / "_FILE.rst").touch()
 
-        dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
+        msg = "Normalizing.*LicenseRef"
+        with pytest.warns(InformationOnly, match=msg):
+            dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
+
         assert set(dist.metadata.license_files) == {"_FILE.rst", "_FILE.txt"}
         assert dist.metadata.license is None
         assert dist.metadata.license_expression == "LicenseRef-Proprietary"
@@ -465,8 +479,12 @@ def test_missing_patterns(self, tmp_path):
         pyproject = self.base_pyproject_license_pep639(tmp_path)
         assert list(tmp_path.glob("_FILE*")) == []  # sanity check
 
-        msg = "Cannot find any files for the given pattern.*"
-        with pytest.warns(SetuptoolsDeprecationWarning, match=msg):
+        msg1 = "Cannot find any files for the given pattern.*"
+        msg2 = "Normalizing 'licenseref-Proprietary' to 'LicenseRef-Proprietary'"
+        with (
+            pytest.warns(SetuptoolsDeprecationWarning, match=msg1),
+            pytest.warns(InformationOnly, match=msg2),
+        ):
             pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
 
     def test_deprecated_file_expands_to_text(self, tmp_path):

From 56c055b653f080544f490e198605974c71b6fe35 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 25 Feb 2025 14:04:22 +0000
Subject: [PATCH 1480/1761] =?UTF-8?q?Bump=20version:=2075.8.0=20=E2=86=92?=
 =?UTF-8?q?=2075.8.1?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg              |  2 +-
 NEWS.rst                      | 12 ++++++++++++
 newsfragments/4766.bugfix.rst |  1 -
 newsfragments/4809.bugfix.rst |  2 --
 newsfragments/4836.bugfix.rst |  1 -
 pyproject.toml                |  2 +-
 6 files changed, 14 insertions(+), 6 deletions(-)
 delete mode 100644 newsfragments/4766.bugfix.rst
 delete mode 100644 newsfragments/4809.bugfix.rst
 delete mode 100644 newsfragments/4836.bugfix.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 384a18455d..4f9f7eac69 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 75.8.0
+current_version = 75.8.1
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index e9e795005a..ac61b70ee0 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,15 @@
+v75.8.1
+=======
+
+Bugfixes
+--------
+
+- Fix wheel file naming to follow binary distribution specification -- by :user:`di` (#4766)
+- Fixed crash generating error message printed when building wheels for the
+  free-threaded build using the limited API. -- by :user:`ngoldbaum` (#4809)
+- Fix documentation for recent CFLAGS distutils change. -- by :user:`thesamesam` (#4836)
+
+
 v75.8.0
 =======
 
diff --git a/newsfragments/4766.bugfix.rst b/newsfragments/4766.bugfix.rst
deleted file mode 100644
index fcd54785d8..0000000000
--- a/newsfragments/4766.bugfix.rst
+++ /dev/null
@@ -1 +0,0 @@
-Fix wheel file naming to follow binary distribution specification -- by :user:`di`
diff --git a/newsfragments/4809.bugfix.rst b/newsfragments/4809.bugfix.rst
deleted file mode 100644
index 288e3f686a..0000000000
--- a/newsfragments/4809.bugfix.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Fixed crash generating error message printed when building wheels for the
-free-threaded build using the limited API. -- by :user:`ngoldbaum`
diff --git a/newsfragments/4836.bugfix.rst b/newsfragments/4836.bugfix.rst
deleted file mode 100644
index bb8adf0872..0000000000
--- a/newsfragments/4836.bugfix.rst
+++ /dev/null
@@ -1 +0,0 @@
-Fix documentation for recent CFLAGS distutils change. -- by :user:`thesamesam`
diff --git a/pyproject.toml b/pyproject.toml
index a9febdbe8c..ce6afa23b8 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "75.8.0"
+version = "75.8.1"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From 7b3366d209a041b749d5c98ff7bae6e6d0f05aef Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Tue, 25 Feb 2025 14:18:06 +0000
Subject: [PATCH 1481/1761] Add thanks note to newsfragment

---
 newsfragments/4838.feature.rst | 1 +
 1 file changed, 1 insertion(+)

diff --git a/newsfragments/4838.feature.rst b/newsfragments/4838.feature.rst
index 15e000c56c..31aa8ba43b 100644
--- a/newsfragments/4838.feature.rst
+++ b/newsfragments/4838.feature.rst
@@ -1,3 +1,4 @@
 Added simple validation for given glob patterns in ``license-files``:
 a warning will be generated if no file is matched.
 Invalid glob patterns can raise an exception.
+-- thanks :user:`cdce8p` for contributions.

From 1a2f93053d789f041d88c97c5da4eea9e949bdfe Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Tue, 25 Feb 2025 13:21:13 -0500
Subject: [PATCH 1482/1761] Select Ruff rules for modern type annotations
 (jaraco/skeleton#160)

* Select Ruff rules for modern type annotations

Ensure modern type annotation syntax and best practices
Not including those covered by type-checkers or exclusive to Python 3.11+

Not including rules currently in preview either.

These are the same set of rules I have in pywin32 as of https://github.com/mhammond/pywin32/pull/2458

setuptools has all the same rules enabled (except it also includes the `UP` group directly)

* Add PYI011 ignore and #local section

* Update ruff.toml

Co-authored-by: Jason R. Coombs 

* Add 	# upstream

---------

Co-authored-by: Jason R. Coombs 
---
 ruff.toml | 23 +++++++++++++++++++++++
 1 file changed, 23 insertions(+)

diff --git a/ruff.toml b/ruff.toml
index 9379d6e1f6..1d65c7c225 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -3,11 +3,32 @@ extend = "pyproject.toml"
 
 [lint]
 extend-select = [
+	# upstream
+ 
 	"C901",
 	"PERF401",
 	"W",
+ 
+ 	# Ensure modern type annotation syntax and best practices
+	# Not including those covered by type-checkers or exclusive to Python 3.11+
+	"FA", # flake8-future-annotations
+	"F404", # late-future-import
+	"PYI", # flake8-pyi
+	"UP006", # non-pep585-annotation
+	"UP007", # non-pep604-annotation
+	"UP010", # unnecessary-future-import
+	"UP035", # deprecated-import
+	"UP037", # quoted-annotation
+	"UP043", # unnecessary-default-type-args
+
+	# local
 ]
 ignore = [
+	# upstream
+ 
+	# Typeshed rejects complex or non-literal defaults for maintenance and testing reasons,
+	# irrelevant to this project.
+	"PYI011", # typed-argument-default-in-stub
 	# https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules
 	"W191",
 	"E111",
@@ -23,6 +44,8 @@ ignore = [
 	"COM819",
 	"ISC001",
 	"ISC002",
+
+ 	# local
 ]
 
 [format]

From 9b0d98cce8577593eb6c87931ea6b91f360739db Mon Sep 17 00:00:00 2001
From: shenxianpeng 
Date: Wed, 26 Feb 2025 03:28:12 +0000
Subject: [PATCH 1483/1761] Revert change files under _vendor

---
 .../_vendor/autocommand-2.2.2.dist-info/METADATA     |  4 ++--
 setuptools/_vendor/autocommand/autoasync.py          |  8 ++++----
 setuptools/_vendor/autocommand/autoparse.py          |  8 ++++----
 setuptools/_vendor/inflect/__init__.py               |  2 +-
 setuptools/_vendor/packaging/metadata.py             | 12 ++++++------
 5 files changed, 17 insertions(+), 17 deletions(-)

diff --git a/setuptools/_vendor/autocommand-2.2.2.dist-info/METADATA b/setuptools/_vendor/autocommand-2.2.2.dist-info/METADATA
index e153f86374..32214fb440 100644
--- a/setuptools/_vendor/autocommand-2.2.2.dist-info/METADATA
+++ b/setuptools/_vendor/autocommand-2.2.2.dist-info/METADATA
@@ -320,7 +320,7 @@ optional arguments:
 STOP and STEP default to 1
 ```
 
-Even though autocommand is being applied to the `wrapper` returned by `print_yielded`, it still retrieves the signature of the underlying `seq` function to create the argument parsing.
+Even though autocommand is being applied to the `wrapper` returned by `print_yielded`, it still retreives the signature of the underlying `seq` function to create the argument parsing.
 
 ### Custom Parser
 
@@ -331,7 +331,7 @@ from argparse import ArgumentParser
 from autocommand import autocommand
 
 parser = ArgumentParser()
-# autocommand can't do optional positional parameters
+# autocommand can't do optional positonal parameters
 parser.add_argument('arg', nargs='?')
 # or mutually exclusive options
 group = parser.add_mutually_exclusive_group()
diff --git a/setuptools/_vendor/autocommand/autoasync.py b/setuptools/_vendor/autocommand/autoasync.py
index 0d4825d761..688f7e0554 100644
--- a/setuptools/_vendor/autocommand/autoasync.py
+++ b/setuptools/_vendor/autocommand/autoasync.py
@@ -54,14 +54,14 @@ async def _run_forever_coro(coro, args, kwargs, loop):
 def autoasync(coro=None, *, loop=None, forever=False, pass_loop=False):
     '''
     Convert an asyncio coroutine into a function which, when called, is
-    evaluated in an event loop, and the return value returned. This is intended
+    evaluted in an event loop, and the return value returned. This is intented
     to make it easy to write entry points into asyncio coroutines, which
-    otherwise need to be explicitly evaluated with an event loop's
+    otherwise need to be explictly evaluted with an event loop's
     run_until_complete.
 
     If `loop` is given, it is used as the event loop to run the coro in. If it
-    is None (the default), the loop is retrieved using asyncio.get_event_loop.
-    This call is deferred until the decorated function is called, so that
+    is None (the default), the loop is retreived using asyncio.get_event_loop.
+    This call is defered until the decorated function is called, so that
     callers can install custom event loops or event loop policies after
     @autoasync is applied.
 
diff --git a/setuptools/_vendor/autocommand/autoparse.py b/setuptools/_vendor/autocommand/autoparse.py
index b50fad83e7..0276a3fae1 100644
--- a/setuptools/_vendor/autocommand/autoparse.py
+++ b/setuptools/_vendor/autocommand/autoparse.py
@@ -34,7 +34,7 @@ class AnnotationError(AutocommandError):
 
 class PositionalArgError(AutocommandError):
     '''
-    Positional Arg Error: autocommand can't handle positional-only parameters
+    Postional Arg Error: autocommand can't handle postional-only parameters
     '''
 
 
@@ -137,7 +137,7 @@ def _add_arguments(param, parser, used_char_args, add_nos):
             arg_spec['type'] = str
 
         # TODO: special case for list type.
-        #   - How to specify type of list members?
+        #   - How to specificy type of list members?
         #       - param: [int]
         #       - param: int =[]
         #   - action='append' vs nargs='*'
@@ -197,7 +197,7 @@ def make_parser(func_sig, description, epilog, add_nos):
     used_char_args = {'h'}
 
     # Arange the params so that single-character arguments are first. This
-    # ensures they don't have to get --long versions. sorted is stable, so the
+    # esnures they don't have to get --long versions. sorted is stable, so the
     # parameters will otherwise still be in relative order.
     params = sorted(
         func_sig.parameters.values(),
@@ -249,7 +249,7 @@ def autoparse(
     while parameters *with* defaults become --options. Use annotations to set
     the type of the parameter.
 
-    The `description` and `epilog` parameters correspond to the same respective
+    The `desctiption` and `epilog` parameters corrospond to the same respective
     argparse parameters. If no description is given, it defaults to the
     decorated functions's docstring, if present.
 
diff --git a/setuptools/_vendor/inflect/__init__.py b/setuptools/_vendor/inflect/__init__.py
index d9a94b4c99..3eec27f4c6 100644
--- a/setuptools/_vendor/inflect/__init__.py
+++ b/setuptools/_vendor/inflect/__init__.py
@@ -1837,7 +1837,7 @@ def get_si_pron(thecase, word, gender) -> str:
     re.VERBOSE,
 )
 
-# THIS PATTERN CODES THE BEGINNINGS OF ALL ENGLISH WORDS BEGINNING WITH A
+# THIS PATTERN CODES THE BEGINNINGS OF ALL ENGLISH WORDS BEGINING WITH A
 # 'y' FOLLOWED BY A CONSONANT. ANY OTHER Y-CONSONANT PREFIX THEREFORE
 # IMPLIES AN ABBREVIATION.
 
diff --git a/setuptools/_vendor/packaging/metadata.py b/setuptools/_vendor/packaging/metadata.py
index 6a651deb44..721f411cfc 100644
--- a/setuptools/_vendor/packaging/metadata.py
+++ b/setuptools/_vendor/packaging/metadata.py
@@ -193,10 +193,10 @@ def _parse_project_urls(data: list[str]) -> dict[str, str]:
         # be the missing value, then they'd have multiple '' values that
         # overwrite each other in a accumulating dict.
         #
-        # The other potential issue is that it's possible to have the
+        # The other potentional issue is that it's possible to have the
         # same label multiple times in the metadata, with no solid "right"
         # answer with what to do in that case. As such, we'll do the only
-        # thing we can, which is treat the field as unparsable and add it
+        # thing we can, which is treat the field as unparseable and add it
         # to our list of unparsed fields.
         parts = [p.strip() for p in pair.split(",", 1)]
         parts.extend([""] * (max(0, 2 - len(parts))))  # Ensure 2 items
@@ -209,8 +209,8 @@ def _parse_project_urls(data: list[str]) -> dict[str, str]:
         label, url = parts
         if label in urls:
             # The label already exists in our set of urls, so this field
-            # is unparsable, and we can just add the whole thing to our
-            # unparsable data and stop processing it.
+            # is unparseable, and we can just add the whole thing to our
+            # unparseable data and stop processing it.
             raise KeyError("duplicate labels in project urls")
         urls[label] = url
 
@@ -424,7 +424,7 @@ def parse_email(data: bytes | str) -> tuple[RawMetadata, dict[str, list[str]]]:
             except KeyError:
                 unparsed[name] = value
         # Nothing that we've done has managed to parse this, so it'll just
-        # throw it in our unparsable data and move on.
+        # throw it in our unparseable data and move on.
         else:
             unparsed[name] = value
 
@@ -441,7 +441,7 @@ def parse_email(data: bytes | str) -> tuple[RawMetadata, dict[str, list[str]]]:
     else:
         if payload:
             # Check to see if we've already got a description, if so then both
-            # it, and this body move to unparsable.
+            # it, and this body move to unparseable.
             if "description" in raw:
                 description_header = cast(str, raw.pop("description"))
                 unparsed.setdefault("description", []).extend(

From 79d6e46d4949a77238ca8884ed8137d5d6175d31 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 26 Feb 2025 16:14:11 +0000
Subject: [PATCH 1484/1761] Add regression test for issue 4853

---
 pkg_resources/tests/test_pkg_resources.py | 54 +++++++++++++++++++++++
 1 file changed, 54 insertions(+)

diff --git a/pkg_resources/tests/test_pkg_resources.py b/pkg_resources/tests/test_pkg_resources.py
index 0f696e8502..73a9a652a0 100644
--- a/pkg_resources/tests/test_pkg_resources.py
+++ b/pkg_resources/tests/test_pkg_resources.py
@@ -2,6 +2,7 @@
 
 import builtins
 import datetime
+import inspect
 import os
 import plistlib
 import stat
@@ -425,3 +426,56 @@ def test_normalize_path_backslash_sep(self, unnormalized, expected):
         """Ensure path seps are cleaned on backslash path sep systems."""
         result = pkg_resources.normalize_path(unnormalized)
         assert result.endswith(expected)
+
+
+class TestWorkdirRequire:
+    def fake_site_packages(self, tmp_path, monkeypatch, dist_files):
+        site_packages = tmp_path / "site-packages"
+        site_packages.mkdir()
+        for file, content in self.FILES.items():
+            path = site_packages / file
+            path.parent.mkdir(exist_ok=True, parents=True)
+            path.write_text(inspect.cleandoc(content), encoding="utf-8")
+
+        monkeypatch.setattr(sys, "path", [site_packages])
+        return os.fspath(site_packages)
+
+    FILES = {
+        "pkg1_mod-1.2.3.dist-info/METADATA": """
+            Metadata-Version: 2.4
+            Name: pkg1.mod
+            Version: 1.2.3
+            """,
+        "pkg2.mod-0.42.dist-info/METADATA": """
+            Metadata-Version: 2.1
+            Name: pkg2.mod
+            Version: 0.42
+            """,
+        "pkg3_mod.egg-info/PKG-INFO": """
+            Name: pkg3.mod
+            Version: 1.2.3
+            """,
+        "pkg4.mod.egg-info/PKG-INFO": """
+            Name: pkg4.mod
+            Version: 0.42
+            """,
+    }
+
+    @pytest.mark.parametrize(
+        ("name", "version", "req"),
+        [
+            ("pkg1.mod", "1.2.3", "pkg1.mod>=1"),
+            ("pkg2.mod", "0.42", "pkg2.mod>=0.4"),
+            ("pkg3.mod", "1.2.3", "pkg3.mod<=2"),
+            ("pkg4.mod", "0.42", "pkg4.mod>0.2,<1"),
+        ],
+    )
+    def test_require_normalised_name(self, tmp_path, monkeypatch, name, version, req):
+        # https://github.com/pypa/setuptools/issues/4853
+        site_packages = self.fake_site_packages(tmp_path, monkeypatch, self.FILES)
+        ws = pkg_resources.WorkingSet([site_packages])
+
+        [dist] = ws.require(req)
+        assert dist.version == version
+        assert dist.project_name == name
+        assert os.path.commonpath([dist.location, site_packages]) == site_packages

From 2c242238f536c4b942812632ba9dd0b1c48b4b85 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 26 Feb 2025 16:46:33 +0000
Subject: [PATCH 1485/1761] Update WorkingSet.find to consider standardised
 dist-info names

---
 pkg_resources/__init__.py                 | 17 +++++++++--------
 pkg_resources/tests/test_pkg_resources.py |  1 -
 2 files changed, 9 insertions(+), 9 deletions(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index 68feeb0593..6d90e7906d 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -708,14 +708,15 @@ def find(self, req: Requirement) -> Distribution | None:
         If there is no active distribution for the requested project, ``None``
         is returned.
         """
-        dist = self.by_key.get(req.key)
-
-        if dist is None:
-            canonical_key = self.normalized_to_canonical_keys.get(req.key)
-
-            if canonical_key is not None:
-                req.key = canonical_key
-                dist = self.by_key.get(canonical_key)
+        for candidate in (
+            req.key,
+            self.normalized_to_canonical_keys.get(req.key),
+            safe_name(req.key).replace(".", "-"),
+        ):
+            dist = self.by_key.get(candidate)
+            if dist:
+                req.key = candidate
+                break
 
         if dist is not None and dist not in req:
             # XXX add more info
diff --git a/pkg_resources/tests/test_pkg_resources.py b/pkg_resources/tests/test_pkg_resources.py
index 73a9a652a0..c749128449 100644
--- a/pkg_resources/tests/test_pkg_resources.py
+++ b/pkg_resources/tests/test_pkg_resources.py
@@ -477,5 +477,4 @@ def test_require_normalised_name(self, tmp_path, monkeypatch, name, version, req
 
         [dist] = ws.require(req)
         assert dist.version == version
-        assert dist.project_name == name
         assert os.path.commonpath([dist.location, site_packages]) == site_packages

From 23b73aaef2cb95650a997f80ea74c8d51bc5f01c Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 26 Feb 2025 17:00:44 +0000
Subject: [PATCH 1486/1761] Fix mypy errors

---
 pkg_resources/__init__.py | 8 ++++++--
 1 file changed, 6 insertions(+), 2 deletions(-)

diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index 6d90e7906d..8a2fbfa412 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -708,11 +708,15 @@ def find(self, req: Requirement) -> Distribution | None:
         If there is no active distribution for the requested project, ``None``
         is returned.
         """
-        for candidate in (
+        dist: Distribution | None = None
+
+        candidates = (
             req.key,
             self.normalized_to_canonical_keys.get(req.key),
             safe_name(req.key).replace(".", "-"),
-        ):
+        )
+
+        for candidate in filter(None, candidates):
             dist = self.by_key.get(candidate)
             if dist:
                 req.key = candidate

From a3718c8099235fb3b40d013f97530d5aeb5ba0ce Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 26 Feb 2025 17:07:20 +0000
Subject: [PATCH 1487/1761] Slightly change test, so that we are sure about the
 correct distribution being found

---
 pkg_resources/tests/test_pkg_resources.py | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/pkg_resources/tests/test_pkg_resources.py b/pkg_resources/tests/test_pkg_resources.py
index c749128449..2d54cfb5ce 100644
--- a/pkg_resources/tests/test_pkg_resources.py
+++ b/pkg_resources/tests/test_pkg_resources.py
@@ -453,11 +453,11 @@ def fake_site_packages(self, tmp_path, monkeypatch, dist_files):
             """,
         "pkg3_mod.egg-info/PKG-INFO": """
             Name: pkg3.mod
-            Version: 1.2.3
+            Version: 1.2.3.4
             """,
         "pkg4.mod.egg-info/PKG-INFO": """
             Name: pkg4.mod
-            Version: 0.42
+            Version: 0.42.1
             """,
     }
 
@@ -466,8 +466,8 @@ def fake_site_packages(self, tmp_path, monkeypatch, dist_files):
         [
             ("pkg1.mod", "1.2.3", "pkg1.mod>=1"),
             ("pkg2.mod", "0.42", "pkg2.mod>=0.4"),
-            ("pkg3.mod", "1.2.3", "pkg3.mod<=2"),
-            ("pkg4.mod", "0.42", "pkg4.mod>0.2,<1"),
+            ("pkg3.mod", "1.2.3.4", "pkg3.mod<=2"),
+            ("pkg4.mod", "0.42.1", "pkg4.mod>0.2,<1"),
         ],
     )
     def test_require_normalised_name(self, tmp_path, monkeypatch, name, version, req):

From 8280e2c4fc2f32a5da1ec3ba322c534e2f5369a3 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 26 Feb 2025 18:27:06 +0000
Subject: [PATCH 1488/1761] Attempt to solve path normalisation issue in
 windows tests

---
 pkg_resources/tests/test_pkg_resources.py | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)

diff --git a/pkg_resources/tests/test_pkg_resources.py b/pkg_resources/tests/test_pkg_resources.py
index 2d54cfb5ce..d378079f94 100644
--- a/pkg_resources/tests/test_pkg_resources.py
+++ b/pkg_resources/tests/test_pkg_resources.py
@@ -477,4 +477,6 @@ def test_require_normalised_name(self, tmp_path, monkeypatch, name, version, req
 
         [dist] = ws.require(req)
         assert dist.version == version
-        assert os.path.commonpath([dist.location, site_packages]) == site_packages
+        assert os.path.samefile(
+            os.path.commonpath([dist.location, site_packages]), site_packages
+        )

From 22355fcb3337317d4f6ca675aa60947692c9af3a Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 26 Feb 2025 19:56:25 +0000
Subject: [PATCH 1489/1761] Also consider '-' separator in tests

---
 pkg_resources/tests/test_pkg_resources.py | 25 +++++++++++++----------
 1 file changed, 14 insertions(+), 11 deletions(-)

diff --git a/pkg_resources/tests/test_pkg_resources.py b/pkg_resources/tests/test_pkg_resources.py
index d378079f94..cfc9b16c0f 100644
--- a/pkg_resources/tests/test_pkg_resources.py
+++ b/pkg_resources/tests/test_pkg_resources.py
@@ -462,21 +462,24 @@ def fake_site_packages(self, tmp_path, monkeypatch, dist_files):
     }
 
     @pytest.mark.parametrize(
-        ("name", "version", "req"),
+        ("version", "requirement"),
         [
-            ("pkg1.mod", "1.2.3", "pkg1.mod>=1"),
-            ("pkg2.mod", "0.42", "pkg2.mod>=0.4"),
-            ("pkg3.mod", "1.2.3.4", "pkg3.mod<=2"),
-            ("pkg4.mod", "0.42.1", "pkg4.mod>0.2,<1"),
+            ("1.2.3", "pkg1.mod>=1"),
+            ("0.42", "pkg2.mod>=0.4"),
+            ("1.2.3.4", "pkg3.mod<=2"),
+            ("0.42.1", "pkg4.mod>0.2,<1"),
         ],
     )
-    def test_require_normalised_name(self, tmp_path, monkeypatch, name, version, req):
+    def test_require_non_normalised_name(
+        self, tmp_path, monkeypatch, version, requirement
+    ):
         # https://github.com/pypa/setuptools/issues/4853
         site_packages = self.fake_site_packages(tmp_path, monkeypatch, self.FILES)
         ws = pkg_resources.WorkingSet([site_packages])
 
-        [dist] = ws.require(req)
-        assert dist.version == version
-        assert os.path.samefile(
-            os.path.commonpath([dist.location, site_packages]), site_packages
-        )
+        for req in [requirement, requirement.replace(".", "-")]:
+            [dist] = ws.require(req)
+            assert dist.version == version
+            assert os.path.samefile(
+                os.path.commonpath([dist.location, site_packages]), site_packages
+            )

From edca1811df4daa15d18eb06d0dd5da11eda8b3af Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 26 Feb 2025 20:04:33 +0000
Subject: [PATCH 1490/1761] Add news fragment

---
 newsfragments/4856.bugfix.rst | 2 ++
 1 file changed, 2 insertions(+)
 create mode 100644 newsfragments/4856.bugfix.rst

diff --git a/newsfragments/4856.bugfix.rst b/newsfragments/4856.bugfix.rst
new file mode 100644
index 0000000000..ad0087ea00
--- /dev/null
+++ b/newsfragments/4856.bugfix.rst
@@ -0,0 +1,2 @@
+Fixed ``pkg_resources.require(...)`` to also consider standardised
+``dist-info`` directories.

From 9aa3771f57f070bd13b9a3e83bd469413afc98c7 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 26 Feb 2025 20:20:16 +0000
Subject: [PATCH 1491/1761] =?UTF-8?q?Bump=20version:=2075.8.1=20=E2=86=92?=
 =?UTF-8?q?=2075.8.2?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg              |  2 +-
 NEWS.rst                      | 10 ++++++++++
 newsfragments/4856.bugfix.rst |  2 --
 pyproject.toml                |  2 +-
 4 files changed, 12 insertions(+), 4 deletions(-)
 delete mode 100644 newsfragments/4856.bugfix.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 4f9f7eac69..b2d1a307f4 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 75.8.1
+current_version = 75.8.2
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index ac61b70ee0..ed40816c32 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,13 @@
+v75.8.2
+=======
+
+Bugfixes
+--------
+
+- Fixed ``pkg_resources.require(...)`` to also consider standardised
+  ``dist-info`` directories. (#4856)
+
+
 v75.8.1
 =======
 
diff --git a/newsfragments/4856.bugfix.rst b/newsfragments/4856.bugfix.rst
deleted file mode 100644
index ad0087ea00..0000000000
--- a/newsfragments/4856.bugfix.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Fixed ``pkg_resources.require(...)`` to also consider standardised
-``dist-info`` directories.
diff --git a/pyproject.toml b/pyproject.toml
index ce6afa23b8..e18e517e46 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "75.8.1"
+version = "75.8.2"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From aa891069099398fe2eb294ac4b781460d8c0a39b Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Wed, 26 Feb 2025 17:56:42 -0500
Subject: [PATCH 1492/1761] Consistent import sorting (isort)
 (jaraco/skeleton#157)

---
 ruff.toml | 7 ++++---
 1 file changed, 4 insertions(+), 3 deletions(-)

diff --git a/ruff.toml b/ruff.toml
index 1d65c7c225..b52a6d7c80 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -5,9 +5,10 @@ extend = "pyproject.toml"
 extend-select = [
 	# upstream
  
-	"C901",
-	"PERF401",
-	"W",
+	"C901", # complex-structure
+	"I", # isort
+	"PERF401", # manual-list-comprehension
+	"W", # pycodestyle Warning
  
  	# Ensure modern type annotation syntax and best practices
 	# Not including those covered by type-checkers or exclusive to Python 3.11+

From aaa445be356a889362163a18bdcde831f5716f8a Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 27 Feb 2025 15:44:15 +0000
Subject: [PATCH 1493/1761] Enable 'edit this page' button in the docs

---
 docs/conf.py | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/docs/conf.py b/docs/conf.py
index 20c2a8f099..d38fad28a0 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -136,6 +136,9 @@
         "color-brand-primary": "#E5B62F",  # "yellow"
         "color-brand-content": "#E5B62F",
     },
+    "source_repository": "https://github.com/pypa/setuptools/",
+    "source_branch": "main",
+    "source_directory": "docs/",
 }
 
 # Redirect old docs so links and references in the ecosystem don't break

From 8f42595ca65133aeb4b75f38183233c27b2e6247 Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos Orfanos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Fri, 28 Feb 2025 00:19:07 +0100
Subject: [PATCH 1494/1761] Enable ruff rules ISC001/ISC002
 (jaraco/skeleton#158)

Starting with ruff 0.9.1, they are compatible with the ruff formatter
when used together.
---
 ruff.toml | 2 --
 1 file changed, 2 deletions(-)

diff --git a/ruff.toml b/ruff.toml
index b52a6d7c80..2b67926716 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -43,8 +43,6 @@ ignore = [
 	"Q003",
 	"COM812",
 	"COM819",
-	"ISC001",
-	"ISC002",
 
  	# local
 ]

From ec6af1f326bda492d40b3c8bd3c85a5326e54b69 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Mon, 3 Mar 2025 19:01:55 -0500
Subject: [PATCH 1495/1761] Fix failing test

---
 distutils/compilers/C/tests/test_base.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/distutils/compilers/C/tests/test_base.py b/distutils/compilers/C/tests/test_base.py
index b73ec4e46d..301fc1ec58 100644
--- a/distutils/compilers/C/tests/test_base.py
+++ b/distutils/compilers/C/tests/test_base.py
@@ -15,7 +15,7 @@ def _make_strs(paths):
     """
     Convert paths to strings for legacy compatibility.
     """
-    if sys.version_info > (3, 8) and platform.system() != "Windows":
+    if sys.version_info >= (3, 8) and platform.system() != "Windows":
         return paths
     return list(map(os.fspath, paths))
 

From 6a64635b65c575e7abe034c8e08b7177dabd0901 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Mon, 3 Mar 2025 19:28:58 -0500
Subject: [PATCH 1496/1761] Fix condition post-merge

---
 distutils/compilers/C/tests/test_base.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/distutils/compilers/C/tests/test_base.py b/distutils/compilers/C/tests/test_base.py
index b73ec4e46d..301fc1ec58 100644
--- a/distutils/compilers/C/tests/test_base.py
+++ b/distutils/compilers/C/tests/test_base.py
@@ -15,7 +15,7 @@ def _make_strs(paths):
     """
     Convert paths to strings for legacy compatibility.
     """
-    if sys.version_info > (3, 8) and platform.system() != "Windows":
+    if sys.version_info >= (3, 8) and platform.system() != "Windows":
         return paths
     return list(map(os.fspath, paths))
 

From b7d4b6ee00804bef36a8c398676e207813540c3b Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Tue, 4 Mar 2025 03:24:14 -0500
Subject: [PATCH 1497/1761] remove extra spaces in ruff.toml
 (jaraco/skeleton#164)

---
 ruff.toml | 10 +++++-----
 1 file changed, 5 insertions(+), 5 deletions(-)

diff --git a/ruff.toml b/ruff.toml
index 2b67926716..1e9528466b 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -4,13 +4,13 @@ extend = "pyproject.toml"
 [lint]
 extend-select = [
 	# upstream
- 
+
 	"C901", # complex-structure
 	"I", # isort
 	"PERF401", # manual-list-comprehension
 	"W", # pycodestyle Warning
- 
- 	# Ensure modern type annotation syntax and best practices
+
+	# Ensure modern type annotation syntax and best practices
 	# Not including those covered by type-checkers or exclusive to Python 3.11+
 	"FA", # flake8-future-annotations
 	"F404", # late-future-import
@@ -26,7 +26,7 @@ extend-select = [
 ]
 ignore = [
 	# upstream
- 
+
 	# Typeshed rejects complex or non-literal defaults for maintenance and testing reasons,
 	# irrelevant to this project.
 	"PYI011", # typed-argument-default-in-stub
@@ -44,7 +44,7 @@ ignore = [
 	"COM812",
 	"COM819",
 
- 	# local
+	# local
 ]
 
 [format]

From b00e9dd730423a399c1d3c3d5621687adff0c5a5 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Fri, 7 Mar 2025 09:05:55 -0500
Subject: [PATCH 1498/1761] Remove pycodestyle warnings, no longer meaningful
 when using ruff formatter.

Ref https://github.com/jaraco/skeleton/commit/d1c5444126aeacefee3949b30136446ab99979d8#commitcomment-153409678
---
 ruff.toml | 1 -
 1 file changed, 1 deletion(-)

diff --git a/ruff.toml b/ruff.toml
index 1e9528466b..267a1ba1f1 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -8,7 +8,6 @@ extend-select = [
 	"C901", # complex-structure
 	"I", # isort
 	"PERF401", # manual-list-comprehension
-	"W", # pycodestyle Warning
 
 	# Ensure modern type annotation syntax and best practices
 	# Not including those covered by type-checkers or exclusive to Python 3.11+

From d587ff737ee89778cf6f4bbd249e770c965fee06 Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos Orfanos
 <3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Fri, 7 Mar 2025 15:08:11 +0100
Subject: [PATCH 1499/1761] Update to the latest ruff version
 (jaraco/skeleton#166)

---
 .pre-commit-config.yaml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 04870d16bf..633e3648e9 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,6 +1,6 @@
 repos:
 - repo: https://github.com/astral-sh/ruff-pre-commit
-  rev: v0.7.1
+  rev: v0.9.9
   hooks:
   - id: ruff
     args: [--fix, --unsafe-fixes]

From 55908954e6c8bc0b246e39ee936172edd5359b58 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 8 Mar 2025 17:01:27 -0500
Subject: [PATCH 1500/1761] Mark failing tests as xfail. Ref #4864.

---
 setuptools/tests/config/test_setupcfg.py | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/setuptools/tests/config/test_setupcfg.py b/setuptools/tests/config/test_setupcfg.py
index adadc02da3..d356d2b77c 100644
--- a/setuptools/tests/config/test_setupcfg.py
+++ b/setuptools/tests/config/test_setupcfg.py
@@ -420,6 +420,7 @@ def test_not_utf8(self, tmpdir):
             with get_dist(tmpdir):
                 pass
 
+    @pytest.mark.xfail(reason="#4864")
     def test_warn_dash_deprecation(self, tmpdir):
         # warn_dash_deprecation() is a method in setuptools.dist
         # remove this test and the method when no longer needed
@@ -437,6 +438,7 @@ def test_warn_dash_deprecation(self, tmpdir):
         assert metadata.author_email == 'test@test.com'
         assert metadata.maintainer_email == 'foo@foo.com'
 
+    @pytest.mark.xfail(reason="#4864")
     def test_make_option_lowercase(self, tmpdir):
         # remove this test and the method make_option_lowercase() in setuptools.dist
         # when no longer needed

From 618a98b1de1a62ad5835763dd3d27b8fb90a9e37 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 8 Mar 2025 17:32:41 -0500
Subject: [PATCH 1501/1761] Remove Python version gate in _make_strs.

---
 distutils/compilers/C/tests/test_base.py | 5 ++---
 1 file changed, 2 insertions(+), 3 deletions(-)

diff --git a/distutils/compilers/C/tests/test_base.py b/distutils/compilers/C/tests/test_base.py
index 301fc1ec58..b1a852207d 100644
--- a/distutils/compilers/C/tests/test_base.py
+++ b/distutils/compilers/C/tests/test_base.py
@@ -1,6 +1,5 @@
 import os
 import platform
-import sys
 import sysconfig
 import textwrap
 
@@ -13,9 +12,9 @@
 
 def _make_strs(paths):
     """
-    Convert paths to strings for legacy compatibility.
+    Convert paths to strings for platform compatibility.
     """
-    if sys.version_info >= (3, 8) and platform.system() != "Windows":
+    if platform.system() != "Windows":
         return paths
     return list(map(os.fspath, paths))
 

From 6b5fd8c1bee9ac4ce61b268f8d41e0dd3bda587a Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 8 Mar 2025 18:23:09 -0500
Subject: [PATCH 1502/1761] Remove _make_strs and update msvc to accept
 WindowsPath.

---
 distutils/compilers/C/msvc.py            |  4 ++--
 distutils/compilers/C/tests/test_base.py | 18 ++++--------------
 2 files changed, 6 insertions(+), 16 deletions(-)

diff --git a/distutils/compilers/C/msvc.py b/distutils/compilers/C/msvc.py
index 2bdc6576e4..2abd7445a3 100644
--- a/distutils/compilers/C/msvc.py
+++ b/distutils/compilers/C/msvc.py
@@ -406,9 +406,9 @@ def compile(  # noqa: C901
                 src = os.path.abspath(src)
 
             if ext in self._c_extensions:
-                input_opt = "/Tc" + src
+                input_opt = f"/Tc{src}"
             elif ext in self._cpp_extensions:
-                input_opt = "/Tp" + src
+                input_opt = f"/Tp{src}"
                 add_cpp_opts = True
             elif ext in self._rc_extensions:
                 # compile .RC to .RES file
diff --git a/distutils/compilers/C/tests/test_base.py b/distutils/compilers/C/tests/test_base.py
index b1a852207d..a762e2b649 100644
--- a/distutils/compilers/C/tests/test_base.py
+++ b/distutils/compilers/C/tests/test_base.py
@@ -1,4 +1,3 @@
-import os
 import platform
 import sysconfig
 import textwrap
@@ -10,15 +9,6 @@
 pytestmark = pytest.mark.usefixtures('suppress_path_mangle')
 
 
-def _make_strs(paths):
-    """
-    Convert paths to strings for platform compatibility.
-    """
-    if platform.system() != "Windows":
-        return paths
-    return list(map(os.fspath, paths))
-
-
 @pytest.fixture
 def c_file(tmp_path):
     c_file = tmp_path / 'foo.c'
@@ -49,11 +39,11 @@ def test_set_include_dirs(c_file):
     compiler = base.new_compiler()
     python = sysconfig.get_paths()['include']
     compiler.set_include_dirs([python])
-    compiler.compile(_make_strs([c_file]))
+    compiler.compile([c_file])
 
     # do it again, setting include dirs after any initialization
     compiler.set_include_dirs([python])
-    compiler.compile(_make_strs([c_file]))
+    compiler.compile([c_file])
 
 
 def test_has_function_prototype():
@@ -87,7 +77,7 @@ def test_include_dirs_after_multiple_compile_calls(c_file):
     compiler = base.new_compiler()
     python = sysconfig.get_paths()['include']
     compiler.set_include_dirs([python])
-    compiler.compile(_make_strs([c_file]))
+    compiler.compile([c_file])
     assert compiler.include_dirs == [python]
-    compiler.compile(_make_strs([c_file]))
+    compiler.compile([c_file])
     assert compiler.include_dirs == [python]

From c71266345c64fd662b5f95bbbc6e4536172f496d Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 8 Mar 2025 17:14:40 -0500
Subject: [PATCH 1503/1761] Always rewrite a Python shebang to #!python.

Closes pypa/setuptools#4863
---
 distutils/command/build_scripts.py | 13 +------------
 1 file changed, 1 insertion(+), 12 deletions(-)

diff --git a/distutils/command/build_scripts.py b/distutils/command/build_scripts.py
index 1c6fd3caff..da18da1a40 100644
--- a/distutils/command/build_scripts.py
+++ b/distutils/command/build_scripts.py
@@ -5,7 +5,6 @@
 import os
 import re
 import tokenize
-from distutils import sysconfig
 from distutils._log import log
 from stat import ST_MODE
 from typing import ClassVar
@@ -106,18 +105,8 @@ def _copy_script(self, script, outfiles, updated_files):  # noqa: C901
         if shebang_match:
             log.info("copying and adjusting %s -> %s", script, self.build_dir)
             if not self.dry_run:
-                if not sysconfig.python_build:
-                    executable = self.executable
-                else:
-                    executable = os.path.join(
-                        sysconfig.get_config_var("BINDIR"),
-                        "python{}{}".format(
-                            sysconfig.get_config_var("VERSION"),
-                            sysconfig.get_config_var("EXE"),
-                        ),
-                    )
                 post_interp = shebang_match.group(1) or ''
-                shebang = "#!" + executable + post_interp + "\n"
+                shebang = f"#!python{post_interp}\n"
                 self._validate_shebang(shebang, f.encoding)
                 with open(outfile, "w", encoding=f.encoding) as outf:
                     outf.write(shebang)

From eecd653a8c1d556888808fe83644dd8d0133bed4 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 8 Mar 2025 17:25:00 -0500
Subject: [PATCH 1504/1761] =?UTF-8?q?=F0=9F=91=B9=20Feed=20the=20hobgoblin?=
 =?UTF-8?q?s=20(delint).?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 distutils/command/build_scripts.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/distutils/command/build_scripts.py b/distutils/command/build_scripts.py
index da18da1a40..7f9198c5e7 100644
--- a/distutils/command/build_scripts.py
+++ b/distutils/command/build_scripts.py
@@ -75,7 +75,7 @@ def copy_scripts(self):
 
         return outfiles, updated_files
 
-    def _copy_script(self, script, outfiles, updated_files):  # noqa: C901
+    def _copy_script(self, script, outfiles, updated_files):
         shebang_match = None
         script = convert_path(script)
         outfile = os.path.join(self.build_dir, os.path.basename(script))

From 3817dedb78f114d887499a3f7fb684acfcddc136 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 8 Mar 2025 19:20:32 -0500
Subject: [PATCH 1505/1761] Reword note.

---
 distutils/cmd.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/distutils/cmd.py b/distutils/cmd.py
index 5d4af6a48a..241621bd51 100644
--- a/distutils/cmd.py
+++ b/distutils/cmd.py
@@ -322,8 +322,8 @@ def set_undefined_options(
             if getattr(self, dst_option) is None:
                 setattr(self, dst_option, getattr(src_cmd_obj, src_option))
 
-    # NOTE: Because distutils is private setuptools implementation and we don't need to re-expose all commands here,
-    # we're not overloading each and every command possibility.
+    # NOTE: Because distutils is private to Setuptools and not all commands are exposed here,
+    # not every possible command is enumerated in the signature.
     def get_finalized_command(self, command: str, create: bool = True) -> Command:
         """Wrapper around Distribution's 'get_command_obj()' method: find
         (create if necessary and 'create' is true) the command object for

From 0cffd6186ecc7b76d8544a099473cc262b9335f3 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 8 Mar 2025 19:39:53 -0500
Subject: [PATCH 1506/1761] =?UTF-8?q?Bump=20version:=2075.8.2=20=E2=86=92?=
 =?UTF-8?q?=2075.9.0?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg               | 2 +-
 NEWS.rst                       | 9 +++++++++
 newsfragments/4852.feature.rst | 1 -
 pyproject.toml                 | 2 +-
 4 files changed, 11 insertions(+), 3 deletions(-)
 delete mode 100644 newsfragments/4852.feature.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index b2d1a307f4..7966da8876 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 75.8.2
+current_version = 75.9.0
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index c0138f3428..de626ed09e 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,12 @@
+v75.9.0
+=======
+
+Features
+--------
+
+- Synced with pypa/distutils@91f75bb98 including exporting of PyInit_pkg (pypa/distutils#327) and a refactoring of the compiler classes into distutils.compilers (pypa/distutils#295). (#4852)
+
+
 v75.8.2
 =======
 
diff --git a/newsfragments/4852.feature.rst b/newsfragments/4852.feature.rst
deleted file mode 100644
index adabdbc33a..0000000000
--- a/newsfragments/4852.feature.rst
+++ /dev/null
@@ -1 +0,0 @@
-Synced with pypa/distutils@91f75bb98 including exporting of PyInit_pkg (pypa/distutils#327) and a refactoring of the compiler classes into distutils.compilers (pypa/distutils#295).
diff --git a/pyproject.toml b/pyproject.toml
index e18e517e46..1df95f02ed 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "75.8.2"
+version = "75.9.0"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From 5cc292799ced115f6404b65d59eed0e629741bd7 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 8 Mar 2025 20:49:42 -0500
Subject: [PATCH 1507/1761] Add news fragment.

---
 newsfragments/4865.removal.rst | 1 +
 1 file changed, 1 insertion(+)
 create mode 100644 newsfragments/4865.removal.rst

diff --git a/newsfragments/4865.removal.rst b/newsfragments/4865.removal.rst
new file mode 100644
index 0000000000..2673f0b456
--- /dev/null
+++ b/newsfragments/4865.removal.rst
@@ -0,0 +1 @@
+Synced with pypa/distutils@5589d7527 including a simplified shebang generation when building scripts (#4863).

From 6d7cc0ff91349ce0a9d3b5902a93302d0073d7b0 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 8 Mar 2025 21:15:01 -0500
Subject: [PATCH 1508/1761] In config command, move to eager imports. Restore
 LinkError to ccompilers module.

Closes pypa/setuptools#4866
---
 distutils/ccompiler.py      | 2 ++
 distutils/command/config.py | 9 +--------
 2 files changed, 3 insertions(+), 8 deletions(-)

diff --git a/distutils/ccompiler.py b/distutils/ccompiler.py
index 1f788c50ae..a4f5e1cc33 100644
--- a/distutils/ccompiler.py
+++ b/distutils/ccompiler.py
@@ -1,6 +1,7 @@
 from .compilers.C import base
 from .compilers.C.base import (
     CompileError,
+    LinkError,
     gen_lib_options,
     gen_preprocess_options,
     get_default_compiler,
@@ -10,6 +11,7 @@
 
 __all__ = [
     'CompileError',
+    'LinkError',
     'gen_lib_options',
     'gen_preprocess_options',
     'get_default_compiler',
diff --git a/distutils/command/config.py b/distutils/command/config.py
index fe83c2924d..e087edd607 100644
--- a/distutils/command/config.py
+++ b/distutils/command/config.py
@@ -17,6 +17,7 @@
 from collections.abc import Sequence
 from distutils._log import log
 
+from ..ccompiler import CCompiler, CompileError, LinkError, new_compiler
 from ..core import Command
 from ..errors import DistutilsExecError
 from ..sysconfig import customize_compiler
@@ -90,8 +91,6 @@ def _check_compiler(self):
         """
         # We do this late, and only on-demand, because this is an expensive
         # import.
-        from ..ccompiler import CCompiler, new_compiler
-
         if not isinstance(self.compiler, CCompiler):
             self.compiler = new_compiler(
                 compiler=self.compiler, dry_run=self.dry_run, force=True
@@ -177,8 +176,6 @@ def try_cpp(self, body=None, headers=None, include_dirs=None, lang="c"):
         preprocessor succeeded, false if there were any errors.
         ('body' probably isn't of much use, but what the heck.)
         """
-        from ..ccompiler import CompileError
-
         self._check_compiler()
         ok = True
         try:
@@ -213,8 +210,6 @@ def try_compile(self, body, headers=None, include_dirs=None, lang="c"):
         """Try to compile a source file built from 'body' and 'headers'.
         Return true on success, false otherwise.
         """
-        from ..ccompiler import CompileError
-
         self._check_compiler()
         try:
             self._compile(body, headers, include_dirs, lang)
@@ -239,8 +234,6 @@ def try_link(
         'headers', to executable form.  Return true on success, false
         otherwise.
         """
-        from ..ccompiler import CompileError, LinkError
-
         self._check_compiler()
         try:
             self._link(body, headers, include_dirs, libraries, library_dirs, lang)

From 7cff740272fcb99ef1f7c17cc074029acaa9ca42 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 8 Mar 2025 21:49:40 -0500
Subject: [PATCH 1509/1761] Add news fragment.

---
 newsfragments/4866.bugfix.rst | 1 +
 1 file changed, 1 insertion(+)
 create mode 100644 newsfragments/4866.bugfix.rst

diff --git a/newsfragments/4866.bugfix.rst b/newsfragments/4866.bugfix.rst
new file mode 100644
index 0000000000..94366bd4f8
--- /dev/null
+++ b/newsfragments/4866.bugfix.rst
@@ -0,0 +1 @@
+Fix ImportError in distutils when configuring for linking.

From 0e968cfbc72296e8aa13a650c2adbef76f4b6464 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 8 Mar 2025 21:56:42 -0500
Subject: [PATCH 1510/1761] Remove more lazy imports.

No idea why they were there.
---
 distutils/command/build.py      |  7 +------
 distutils/command/build_clib.py | 10 +---------
 distutils/command/build_ext.py  |  9 +--------
 distutils/command/config.py     |  2 --
 4 files changed, 3 insertions(+), 25 deletions(-)

diff --git a/distutils/command/build.py b/distutils/command/build.py
index 61f2431a5e..6a8303a954 100644
--- a/distutils/command/build.py
+++ b/distutils/command/build.py
@@ -10,17 +10,12 @@
 from collections.abc import Callable
 from typing import ClassVar
 
+from ..ccompiler import show_compilers
 from ..core import Command
 from ..errors import DistutilsOptionError
 from ..util import get_platform
 
 
-def show_compilers():
-    from ..ccompiler import show_compilers
-
-    show_compilers()
-
-
 class build(Command):
     description = "build everything needed to install"
 
diff --git a/distutils/command/build_clib.py b/distutils/command/build_clib.py
index 0d6d1c8a2f..8b65b3d8ec 100644
--- a/distutils/command/build_clib.py
+++ b/distutils/command/build_clib.py
@@ -19,17 +19,12 @@
 from distutils._log import log
 from typing import ClassVar
 
+from ..ccompiler import new_compiler, show_compilers
 from ..core import Command
 from ..errors import DistutilsSetupError
 from ..sysconfig import customize_compiler
 
 
-def show_compilers():
-    from ..ccompiler import show_compilers
-
-    show_compilers()
-
-
 class build_clib(Command):
     description = "build C/C++ libraries used by Python extensions"
 
@@ -93,9 +88,6 @@ def run(self) -> None:
         if not self.libraries:
             return
 
-        # Yech -- this is cut 'n pasted from build_ext.py!
-        from ..ccompiler import new_compiler
-
         self.compiler = new_compiler(
             compiler=self.compiler, dry_run=self.dry_run, force=self.force
         )
diff --git a/distutils/command/build_ext.py b/distutils/command/build_ext.py
index 55dec90e21..ec45b4403e 100644
--- a/distutils/command/build_ext.py
+++ b/distutils/command/build_ext.py
@@ -16,6 +16,7 @@
 from typing import ClassVar
 
 from .._modified import newer_group
+from ..ccompiler import new_compiler, show_compilers
 from ..core import Command
 from ..errors import (
     CCompilerError,
@@ -34,12 +35,6 @@
 extension_name_re = re.compile(r'^[a-zA-Z_][a-zA-Z_0-9]*(\.[a-zA-Z_][a-zA-Z_0-9]*)*$')
 
 
-def show_compilers():
-    from ..ccompiler import show_compilers
-
-    show_compilers()
-
-
 class build_ext(Command):
     description = "build C/C++ extensions (compile/link to build directory)"
 
@@ -303,8 +298,6 @@ def finalize_options(self) -> None:  # noqa: C901
                 raise DistutilsOptionError("parallel should be an integer")
 
     def run(self) -> None:  # noqa: C901
-        from ..ccompiler import new_compiler
-
         # 'self.extensions', as supplied by setup.py, is a list of
         # Extension instances.  See the documentation for Extension (in
         # distutils.extension) for details.
diff --git a/distutils/command/config.py b/distutils/command/config.py
index e087edd607..44df823388 100644
--- a/distutils/command/config.py
+++ b/distutils/command/config.py
@@ -258,8 +258,6 @@ def try_run(
         built from 'body' and 'headers'.  Return true on success, false
         otherwise.
         """
-        from ..ccompiler import CompileError, LinkError
-
         self._check_compiler()
         try:
             src, obj, exe = self._link(

From 7530d69a8decc3fac377e361f17d00c0878cd6ea Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 8 Mar 2025 21:57:37 -0500
Subject: [PATCH 1511/1761] =?UTF-8?q?Bump=20version:=2075.9.0=20=E2=86=92?=
 =?UTF-8?q?=2075.9.1?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg              | 2 +-
 NEWS.rst                      | 9 +++++++++
 newsfragments/4866.bugfix.rst | 1 -
 pyproject.toml                | 2 +-
 4 files changed, 11 insertions(+), 3 deletions(-)
 delete mode 100644 newsfragments/4866.bugfix.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 7966da8876..e9e282c630 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 75.9.0
+current_version = 75.9.1
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index de626ed09e..fe9001c040 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,12 @@
+v75.9.1
+=======
+
+Bugfixes
+--------
+
+- Fix ImportError in distutils when configuring for linking. (#4866)
+
+
 v75.9.0
 =======
 
diff --git a/newsfragments/4866.bugfix.rst b/newsfragments/4866.bugfix.rst
deleted file mode 100644
index 94366bd4f8..0000000000
--- a/newsfragments/4866.bugfix.rst
+++ /dev/null
@@ -1 +0,0 @@
-Fix ImportError in distutils when configuring for linking.
diff --git a/pyproject.toml b/pyproject.toml
index 1df95f02ed..209186dd32 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "75.9.0"
+version = "75.9.1"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From c11a4940deba04a6f8f8e1410686116f587f6f70 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sun, 9 Mar 2025 09:40:49 -0400
Subject: [PATCH 1512/1761] =?UTF-8?q?Bump=20version:=2075.9.1=20=E2=86=92?=
 =?UTF-8?q?=2076.0.0?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg               | 2 +-
 NEWS.rst                       | 9 +++++++++
 newsfragments/4865.removal.rst | 1 -
 pyproject.toml                 | 2 +-
 4 files changed, 11 insertions(+), 3 deletions(-)
 delete mode 100644 newsfragments/4865.removal.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index e9e282c630..e459d38278 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 75.9.1
+current_version = 76.0.0
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index fe9001c040..21a6b11cd3 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,12 @@
+v76.0.0
+=======
+
+Deprecations and Removals
+-------------------------
+
+- Synced with pypa/distutils@5589d7527 including a simplified shebang generation when building scripts (#4863). (#4865)
+
+
 v75.9.1
 =======
 
diff --git a/newsfragments/4865.removal.rst b/newsfragments/4865.removal.rst
deleted file mode 100644
index 2673f0b456..0000000000
--- a/newsfragments/4865.removal.rst
+++ /dev/null
@@ -1 +0,0 @@
-Synced with pypa/distutils@5589d7527 including a simplified shebang generation when building scripts (#4863).
diff --git a/pyproject.toml b/pyproject.toml
index 209186dd32..479eef5bf4 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "75.9.1"
+version = "76.0.0"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From b53777a2755582c7a24c373eb617fff8b991eb9c Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sun, 9 Mar 2025 12:27:59 -0400
Subject: [PATCH 1513/1761] Clean up docstring.

---
 distutils/util.py | 13 +++++++------
 1 file changed, 7 insertions(+), 6 deletions(-)

diff --git a/distutils/util.py b/distutils/util.py
index 4d32fb2ea9..6dbe049f42 100644
--- a/distutils/util.py
+++ b/distutils/util.py
@@ -315,13 +315,14 @@ def execute(
     verbose: bool = False,
     dry_run: bool = False,
 ) -> None:
-    """Perform some action that affects the outside world (eg.  by
-    writing to the filesystem).  Such actions are special because they
-    are disabled by the 'dry_run' flag.  This method takes care of all
-    that bureaucracy for you; all you have to do is supply the
+    """
+    Perform some action that affects the outside world (e.g. by
+    writing to the filesystem). Such actions are special because they
+    are disabled by the 'dry_run' flag. This method handles that
+    complication; simply supply the
     function to call and an argument tuple for it (to embody the
-    "external action" being performed), and an optional message to
-    print.
+    "external action" being performed) and an optional message to
+    emit.
     """
     if msg is None:
         msg = f"{func.__name__}{args!r}"

From 45375cdd20e27f0fe48a29312eec50e310729907 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Wed, 26 Feb 2025 23:06:13 -0500
Subject: [PATCH 1514/1761] Reduce Ruff configs that duplicates upstream after
 skeleton merge

---
 ruff.toml | 16 ++++++----------
 1 file changed, 6 insertions(+), 10 deletions(-)

diff --git a/ruff.toml b/ruff.toml
index 1d5dff5eb7..ce0b99e1dc 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -10,13 +10,13 @@ exclude = [
 [lint]
 extend-select = [
 	# upstream
- 
+
 	"C901", # complex-structure
 	"I", # isort
 	"PERF401", # manual-list-comprehension
 	"W", # pycodestyle Warning
- 
- 	# Ensure modern type annotation syntax and best practices
+
+	# Ensure modern type annotation syntax and best practices
 	# Not including those covered by type-checkers or exclusive to Python 3.11+
 	"FA", # flake8-future-annotations
 	"F404", # late-future-import
@@ -30,13 +30,9 @@ extend-select = [
 
 	# local
 	"ANN2", # missing-return-type-*
-	"F", # Pyflakes
-	"F404", # late-future-import
-	"FA", # flake8-future-annotations
 	"PERF", # Perflint
 	"PGH", # pygrep-hooks (blanket-* rules)
 	"PT", # flake8-pytest-style
-	"PYI", # flake8-pyi
 	"RUF10", # unused-noqa & redirected-noqa
 	"TRY", # tryceratops
 	"UP", # pyupgrade
@@ -44,7 +40,7 @@ extend-select = [
 ]
 ignore = [
 	# upstream
- 
+
 	# Typeshed rejects complex or non-literal defaults for maintenance and testing reasons,
 	# irrelevant to this project.
 	"PYI011", # typed-argument-default-in-stub
@@ -64,8 +60,8 @@ ignore = [
 	"ISC001",
 	"ISC002",
 
- 	# local
- 	"PERF203", # try-except-in-loop, micro-optimisation with many false-positive. Worth checking but don't block CI
+	# local
+	"PERF203", # try-except-in-loop, micro-optimisation with many false-positive. Worth checking but don't block CI
 	"PT007", # temporarily disabled, TODO: configure and standardize to preference
 	"PT011", # temporarily disabled, TODO: tighten expected error 
 	"PT012", # pytest-raises-with-multiple-statements, avoid extra dummy methods for a few lines, sometimes we explicitly assert in case of no error

From 9559193e1eb12aee0a9eb6cc754e519e6e09ecb4 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sun, 2 Feb 2025 13:59:22 -0500
Subject: [PATCH 1515/1761] Merge pull request #4766 from di/fix/3777

Fix "Wheel naming is not following PEP 491 convention"
---
 newsfragments/4766.bugfix.rst         |  1 +
 setuptools/_normalization.py          |  8 +++++++-
 setuptools/command/bdist_wheel.py     | 12 +-----------
 setuptools/tests/test_bdist_wheel.py  |  4 ++--
 setuptools/tests/test_dist.py         |  4 ++--
 setuptools/tests/test_dist_info.py    |  2 +-
 setuptools/tests/test_easy_install.py | 18 +++++++++++-------
 7 files changed, 25 insertions(+), 24 deletions(-)
 create mode 100644 newsfragments/4766.bugfix.rst

diff --git a/newsfragments/4766.bugfix.rst b/newsfragments/4766.bugfix.rst
new file mode 100644
index 0000000000..fcd54785d8
--- /dev/null
+++ b/newsfragments/4766.bugfix.rst
@@ -0,0 +1 @@
+Fix wheel file naming to follow binary distribution specification -- by :user:`di`
diff --git a/setuptools/_normalization.py b/setuptools/_normalization.py
index 467b643d46..9541a55d6c 100644
--- a/setuptools/_normalization.py
+++ b/setuptools/_normalization.py
@@ -134,7 +134,13 @@ def filename_component_broken(value: str) -> str:
 def safer_name(value: str) -> str:
     """Like ``safe_name`` but can be used as filename component for wheel"""
     # See bdist_wheel.safer_name
-    return filename_component(safe_name(value))
+    return (
+        # Per https://packaging.python.org/en/latest/specifications/name-normalization/#name-normalization
+        re.sub(r"[-_.]+", "-", safe_name(value))
+        .lower()
+        # Per https://packaging.python.org/en/latest/specifications/binary-distribution-format/#escaping-and-unicode
+        .replace("-", "_")
+    )
 
 
 def safer_best_effort_version(value: str) -> str:
diff --git a/setuptools/command/bdist_wheel.py b/setuptools/command/bdist_wheel.py
index c88753476f..2584050a59 100644
--- a/setuptools/command/bdist_wheel.py
+++ b/setuptools/command/bdist_wheel.py
@@ -26,6 +26,7 @@
 from wheel.wheelfile import WheelFile
 
 from .. import Command, __version__
+from .._normalization import safer_name
 from ..warnings import SetuptoolsDeprecationWarning
 from .egg_info import egg_info as egg_info_cls
 
@@ -35,13 +36,6 @@
     from _typeshed import ExcInfo
 
 
-def safe_name(name: str) -> str:
-    """Convert an arbitrary string to a standard distribution name
-    Any runs of non-alphanumeric/. characters are replaced with a single '-'.
-    """
-    return re.sub("[^A-Za-z0-9.]+", "-", name)
-
-
 def safe_version(version: str) -> str:
     """
     Convert an arbitrary string to a standard version string
@@ -147,10 +141,6 @@ def get_abi_tag() -> str | None:
     return abi
 
 
-def safer_name(name: str) -> str:
-    return safe_name(name).replace("-", "_")
-
-
 def safer_version(version: str) -> str:
     return safe_version(version).replace("-", "_")
 
diff --git a/setuptools/tests/test_bdist_wheel.py b/setuptools/tests/test_bdist_wheel.py
index 141ef716ab..4b6302238f 100644
--- a/setuptools/tests/test_bdist_wheel.py
+++ b/setuptools/tests/test_bdist_wheel.py
@@ -252,9 +252,9 @@ def test_no_scripts(wheel_paths):
 
 
 def test_unicode_record(wheel_paths):
-    path = next(path for path in wheel_paths if "unicode.dist" in path)
+    path = next(path for path in wheel_paths if "unicode_dist" in path)
     with ZipFile(path) as zf:
-        record = zf.read("unicode.dist-0.1.dist-info/RECORD")
+        record = zf.read("unicode_dist-0.1.dist-info/RECORD")
 
     assert "åäö_日本語.py".encode() in record
 
diff --git a/setuptools/tests/test_dist.py b/setuptools/tests/test_dist.py
index 1bc4923032..7216aaf506 100644
--- a/setuptools/tests/test_dist.py
+++ b/setuptools/tests/test_dist.py
@@ -8,7 +8,7 @@
 from setuptools import Distribution
 from setuptools.dist import check_package_data, check_specifier
 
-from .test_easy_install import make_nspkg_sdist
+from .test_easy_install import make_trivial_sdist
 from .test_find_packages import ensure_files
 from .textwrap import DALS
 
@@ -25,7 +25,7 @@ def test_dist_fetch_build_egg(tmpdir):
     def sdist_with_index(distname, version):
         dist_dir = index.mkdir(distname)
         dist_sdist = '%s-%s.tar.gz' % (distname, version)
-        make_nspkg_sdist(str(dist_dir.join(dist_sdist)), distname, version)
+        make_trivial_sdist(str(dist_dir.join(dist_sdist)), distname, version)
         with dist_dir.join('index.html').open('w') as fp:
             fp.write(
                 DALS(
diff --git a/setuptools/tests/test_dist_info.py b/setuptools/tests/test_dist_info.py
index 6e109c9db2..47b9b7e19c 100644
--- a/setuptools/tests/test_dist_info.py
+++ b/setuptools/tests/test_dist_info.py
@@ -188,7 +188,7 @@ def test_dist_info_is_the_same_as_in_wheel(
         dist_info = next(tmp_path.glob("dir_dist/*.dist-info"))
 
         assert dist_info.name == wheel_dist_info.name
-        assert dist_info.name.startswith(f"{name.replace('-', '_')}-{version}{suffix}")
+        assert dist_info.name.startswith(f"my_proj-{version}{suffix}")
         for file in "METADATA", "entry_points.txt":
             assert read(dist_info / file) == read(wheel_dist_info / file)
 
diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py
index 933cebb78e..0f186356fe 100644
--- a/setuptools/tests/test_easy_install.py
+++ b/setuptools/tests/test_easy_install.py
@@ -26,6 +26,7 @@
 import setuptools.command.easy_install as ei
 from pkg_resources import Distribution as PRDistribution, normalize_path, working_set
 from setuptools import sandbox
+from setuptools._normalization import safer_name
 from setuptools.command.easy_install import PthDistributions
 from setuptools.dist import Distribution
 from setuptools.sandbox import run_setup
@@ -669,11 +670,11 @@ def test_setup_requires_override_nspkg(self, use_setup_cfg):
 
         with contexts.save_pkg_resources_state():
             with contexts.tempdir() as temp_dir:
-                foobar_1_archive = os.path.join(temp_dir, 'foo.bar-0.1.tar.gz')
+                foobar_1_archive = os.path.join(temp_dir, 'foo_bar-0.1.tar.gz')
                 make_nspkg_sdist(foobar_1_archive, 'foo.bar', '0.1')
                 # Now actually go ahead an extract to the temp dir and add the
                 # extracted path to sys.path so foo.bar v0.1 is importable
-                foobar_1_dir = os.path.join(temp_dir, 'foo.bar-0.1')
+                foobar_1_dir = os.path.join(temp_dir, 'foo_bar-0.1')
                 os.mkdir(foobar_1_dir)
                 with tarfile.open(foobar_1_archive) as tf:
                     tf.extraction_filter = lambda member, path: member
@@ -696,7 +697,7 @@ def test_setup_requires_override_nspkg(self, use_setup_cfg):
                             len(foo.__path__) == 2):
                         print('FAIL')
 
-                    if 'foo.bar-0.2' not in foo.__path__[0]:
+                    if 'foo_bar-0.2' not in foo.__path__[0]:
                         print('FAIL')
                 """
                 )
@@ -717,8 +718,8 @@ def test_setup_requires_override_nspkg(self, use_setup_cfg):
                         # Don't even need to install the package, just
                         # running the setup.py at all is sufficient
                         run_setup(test_setup_py, ['--name'])
-                    except pkg_resources.VersionConflict:
-                        self.fail(
+                    except pkg_resources.VersionConflict:  # pragma: nocover
+                        pytest.fail(
                             'Installing setup.py requirements caused a VersionConflict'
                         )
 
@@ -1118,6 +1119,8 @@ def make_nspkg_sdist(dist_path, distname, version):
     package with the same name as distname.  The top-level package is
     designated a namespace package).
     """
+    # Assert that the distname contains at least one period
+    assert '.' in distname
 
     parts = distname.split('.')
     nspackage = parts[0]
@@ -1206,10 +1209,11 @@ def create_setup_requires_package(
     package itself is just 'test_pkg'.
     """
 
+    normalized_distname = safer_name(distname)
     test_setup_attrs = {
         'name': 'test_pkg',
         'version': '0.0',
-        'setup_requires': ['%s==%s' % (distname, version)],
+        'setup_requires': [f'{normalized_distname}=={version}'],
         'dependency_links': [os.path.abspath(path)],
     }
     if setup_attrs:
@@ -1258,7 +1262,7 @@ def create_setup_requires_package(
     with open(os.path.join(test_pkg, 'setup.py'), 'w', encoding="utf-8") as f:
         f.write(setup_py_template % test_setup_attrs)
 
-    foobar_path = os.path.join(path, '%s-%s.tar.gz' % (distname, version))
+    foobar_path = os.path.join(path, f'{normalized_distname}-{version}.tar.gz')
     make_package(foobar_path, distname, version)
 
     return test_pkg

From 1740976b2495a381f0ae13ecba1bb8676334cbf1 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 11 Mar 2025 18:59:16 -0400
Subject: [PATCH 1516/1761] Repoint the news fragment.

---
 newsfragments/{4766.bugfix.rst => 4877.bugfix.rst} | 0
 1 file changed, 0 insertions(+), 0 deletions(-)
 rename newsfragments/{4766.bugfix.rst => 4877.bugfix.rst} (100%)

diff --git a/newsfragments/4766.bugfix.rst b/newsfragments/4877.bugfix.rst
similarity index 100%
rename from newsfragments/4766.bugfix.rst
rename to newsfragments/4877.bugfix.rst

From 23b7b529c943fd1d1f7f70d269cbe7bb4b74b292 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 11 Mar 2025 18:59:23 -0400
Subject: [PATCH 1517/1761] =?UTF-8?q?Bump=20version:=2075.3.0=20=E2=86=92?=
 =?UTF-8?q?=2075.3.1?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg              | 2 +-
 NEWS.rst                      | 9 +++++++++
 newsfragments/4877.bugfix.rst | 1 -
 pyproject.toml                | 2 +-
 4 files changed, 11 insertions(+), 3 deletions(-)
 delete mode 100644 newsfragments/4877.bugfix.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 974699dc24..7165091278 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 75.3.0
+current_version = 75.3.1
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index 39bd36de66..f613830bd4 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,12 @@
+v76.0.1
+=======
+
+Bugfixes
+--------
+
+- Fix wheel file naming to follow binary distribution specification -- by :user:`di` (#4877)
+
+
 v75.3.0
 =======
 
diff --git a/newsfragments/4877.bugfix.rst b/newsfragments/4877.bugfix.rst
deleted file mode 100644
index fcd54785d8..0000000000
--- a/newsfragments/4877.bugfix.rst
+++ /dev/null
@@ -1 +0,0 @@
-Fix wheel file naming to follow binary distribution specification -- by :user:`di`
diff --git a/pyproject.toml b/pyproject.toml
index 423d00701b..b9a12ee6e2 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "75.3.0"
+version = "75.3.1"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From 5cac330442ca2dab2bf75aa7b76dc23883ea93a0 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 11 Mar 2025 19:18:44 -0400
Subject: [PATCH 1518/1761] Pin ruff at 2024-10-29 to avoid emergent failures
 from later releases.

---
 pyproject.toml | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/pyproject.toml b/pyproject.toml
index b9a12ee6e2..533d64aff4 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -60,6 +60,9 @@ test = [
 	"pyproject-hooks!=1.1",
 
 	"jaraco.test>=5.5", # py.typed
+
+	# temporarily pin ruff for backport bugfix (#4879)
+	"ruff <= 0.7.1",
 ]
 
 doc = [

From be965883cfa5b210dd009e9d9f56d785b6b51d6a Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 8 Mar 2025 17:01:27 -0500
Subject: [PATCH 1519/1761] Mark failing tests as xfail. Ref #4864.

---
 setuptools/tests/config/test_setupcfg.py | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/setuptools/tests/config/test_setupcfg.py b/setuptools/tests/config/test_setupcfg.py
index 9f225416c0..ef5e67270d 100644
--- a/setuptools/tests/config/test_setupcfg.py
+++ b/setuptools/tests/config/test_setupcfg.py
@@ -422,6 +422,7 @@ def test_not_utf8(self, tmpdir):
             with get_dist(tmpdir):
                 pass
 
+    @pytest.mark.xfail(reason="#4864")
     def test_warn_dash_deprecation(self, tmpdir):
         # warn_dash_deprecation() is a method in setuptools.dist
         # remove this test and the method when no longer needed
@@ -439,6 +440,7 @@ def test_warn_dash_deprecation(self, tmpdir):
         assert metadata.author_email == 'test@test.com'
         assert metadata.maintainer_email == 'foo@foo.com'
 
+    @pytest.mark.xfail(reason="#4864")
     def test_make_option_lowercase(self, tmpdir):
         # remove this test and the method make_option_lowercase() in setuptools.dist
         # when no longer needed

From 51a09c9f295a82a7853600be5c50216fdbe60433 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 11 Mar 2025 19:22:32 -0400
Subject: [PATCH 1520/1761] Mark failing test as xfail.

Ref #4745
---
 setuptools/tests/test_dist.py | 1 +
 1 file changed, 1 insertion(+)

diff --git a/setuptools/tests/test_dist.py b/setuptools/tests/test_dist.py
index 7216aaf506..4247fbc604 100644
--- a/setuptools/tests/test_dist.py
+++ b/setuptools/tests/test_dist.py
@@ -138,6 +138,7 @@ def test_check_package_data(package_data, expected_message):
             check_package_data(None, 'package_data', package_data)
 
 
+@pytest.mark.xfail(reason="#4745")
 def test_check_specifier():
     # valid specifier value
     attrs = {'name': 'foo', 'python_requires': '>=3.0, !=3.1'}

From 29f042741d844ae3a9bb57c042f845d1debb834b Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 11 Mar 2025 19:59:48 -0400
Subject: [PATCH 1521/1761] Add a news entry.

---
 NEWS.rst | 7 ++++++-
 1 file changed, 6 insertions(+), 1 deletion(-)

diff --git a/NEWS.rst b/NEWS.rst
index f613830bd4..112deedc39 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,4 +1,9 @@
-v76.0.1
+v75.3.2
+=======
+
+- Fixed version error in changelog.
+
+v75.3.1
 =======
 
 Bugfixes

From d5234bd825eae911f85c85e22d09c1e94565e69d Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 11 Mar 2025 20:01:57 -0400
Subject: [PATCH 1522/1761] =?UTF-8?q?Bump=20version:=2075.3.1=20=E2=86=92?=
 =?UTF-8?q?=2075.3.2?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg | 2 +-
 pyproject.toml   | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 7165091278..a122da564d 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 75.3.1
+current_version = 75.3.2
 commit = True
 tag = True
 
diff --git a/pyproject.toml b/pyproject.toml
index 533d64aff4..f6f6f075f2 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "75.3.1"
+version = "75.3.2"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From 90bc1cf22f2790cada59404f5f6dedb9e9656b20 Mon Sep 17 00:00:00 2001
From: Thanos <111999343+Sachaa-Thanasius@users.noreply.github.com>
Date: Wed, 12 Mar 2025 14:50:31 -0400
Subject: [PATCH 1523/1761] Make sure type(None) is used instead of None for
 the type argument of isinstance.

Using None, whether alone or in a tuple, as the second argument raises a TypeError.
---
 distutils/compilers/C/base.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/distutils/compilers/C/base.py b/distutils/compilers/C/base.py
index 4767b7f332..b03455a6c6 100644
--- a/distutils/compilers/C/base.py
+++ b/distutils/compilers/C/base.py
@@ -237,7 +237,7 @@ def _is_valid_macro(name, value=None):
         """
         A valid macro is a ``name : str`` and a ``value : str | None``.
         """
-        return isinstance(name, str) and isinstance(value, (str, None))
+        return isinstance(name, str) and isinstance(value, (str, type(None)))
 
     # -- Bookkeeping methods -------------------------------------------
 

From cf726b7fa0f43164aa5cb5cc9876e99d0ea96064 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Sat, 15 Mar 2025 14:54:21 -0400
Subject: [PATCH 1524/1761] Explicit re-exports of submodules in
 `distutils.command.__all__`

---
 distutils/command/__init__.py | 35 +++++++++++++++++++++++++++--------
 1 file changed, 27 insertions(+), 8 deletions(-)

diff --git a/distutils/command/__init__.py b/distutils/command/__init__.py
index 0f8a1692ba..ceabe967b1 100644
--- a/distutils/command/__init__.py
+++ b/distutils/command/__init__.py
@@ -3,21 +3,40 @@
 Package containing implementation of all the standard Distutils
 commands."""
 
+from . import (
+    bdist,
+    bdist_dumb,
+    bdist_rpm,
+    build,
+    build_clib,
+    build_ext,
+    build_py,
+    build_scripts,
+    check,
+    clean,
+    install,
+    install_data,
+    install_headers,
+    install_lib,
+    install_scripts,
+    sdist,
+)
+
 __all__ = [
+    'bdist',
+    'bdist_dumb',
+    'bdist_rpm',
     'build',
-    'build_py',
-    'build_ext',
     'build_clib',
+    'build_ext',
+    'build_py',
     'build_scripts',
+    'check',
     'clean',
     'install',
-    'install_lib',
+    'install_data',
     'install_headers',
+    'install_lib',
     'install_scripts',
-    'install_data',
     'sdist',
-    'bdist',
-    'bdist_dumb',
-    'bdist_rpm',
-    'check',
 ]

From 0a4f350b49aa3e86bbe28c545904d075d81cd58a Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sun, 16 Mar 2025 13:27:04 -0400
Subject: [PATCH 1525/1761] Add test capturing missed expectation.

Ref #4786
---
 setuptools/msvc.py | 10 ++++++++++
 1 file changed, 10 insertions(+)

diff --git a/setuptools/msvc.py b/setuptools/msvc.py
index 9c9a63568e..b56455eefc 100644
--- a/setuptools/msvc.py
+++ b/setuptools/msvc.py
@@ -1024,6 +1024,16 @@ def VCTools(self):
         ------
         list of str
             paths
+
+        When host CPU is ARM, the tools should be found for ARM.
+
+        >>> getfixture('windows_only')
+        >>> mp = getfixture('monkeypatch')
+        >>> mp.setattr(PlatformInfo, 'current_cpu', 'arm64')
+        >>> ei = EnvironmentInfo(arch='irrelevant')
+        >>> paths = ei.VCTools
+        >>> any('HostARM64' in path for path in paths)
+        True
         """
         si = self.si
         tools = [os.path.join(si.VCInstallDir, 'VCPackages')]

From 3218d666e49245b0878db8b19ba20d8cd72d6424 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sun, 16 Mar 2025 17:56:20 -0400
Subject: [PATCH 1526/1761] Calculate host_id in one expression.

---
 setuptools/msvc.py | 5 +----
 1 file changed, 1 insertion(+), 4 deletions(-)

diff --git a/setuptools/msvc.py b/setuptools/msvc.py
index 37bef09ab3..313a781ae0 100644
--- a/setuptools/msvc.py
+++ b/setuptools/msvc.py
@@ -1048,10 +1048,7 @@ def VCTools(self):
             tools += [os.path.join(si.VCInstallDir, path)]
 
         elif self.vs_ver >= 15.0:
-            if self.pi.current_cpu in ('x86', 'arm64'):
-                host_id = self.pi.current_cpu.upper()
-            else:
-                host_id = 'X64'
+            host_id = self.pi.current_cpu.replace('amd64', 'x64').upper()
             host_dir = os.path.join('bin', f'Host{host_id}%s')
             tools += [
                 os.path.join(si.VCInstallDir, host_dir % self.pi.target_dir(x64=True))

From 5b66977169135a986f87d96a7df49a2378192203 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sun, 16 Mar 2025 17:57:24 -0400
Subject: [PATCH 1527/1761] Add news fragment.

---
 newsfragments/4786.feature.rst | 1 +
 1 file changed, 1 insertion(+)
 create mode 100644 newsfragments/4786.feature.rst

diff --git a/newsfragments/4786.feature.rst b/newsfragments/4786.feature.rst
new file mode 100644
index 0000000000..2e042259e6
--- /dev/null
+++ b/newsfragments/4786.feature.rst
@@ -0,0 +1 @@
+In setuptools.msvc.EnvironmentInfo, now honor the correct paths when on an ARM host.

From b6a539a7620880b9ea41102983e1ab6a2ca2ef29 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sun, 16 Mar 2025 19:07:06 -0400
Subject: [PATCH 1528/1761] Restore implicit expectation that importing
 unixccompiler makes distutils.ccompiler available.

Closes pypa/setuptools#4871
---
 distutils/unixccompiler.py    | 6 ++++++
 newsfragments/4871.bugfix.rst | 1 +
 2 files changed, 7 insertions(+)
 create mode 100644 newsfragments/4871.bugfix.rst

diff --git a/distutils/unixccompiler.py b/distutils/unixccompiler.py
index 9cd30ad9a6..20b8ce6b9b 100644
--- a/distutils/unixccompiler.py
+++ b/distutils/unixccompiler.py
@@ -1,3 +1,9 @@
+import importlib
+
 from .compilers.C import unix
 
 UnixCCompiler = unix.Compiler
+
+# ensure import of unixccompiler implies ccompiler imported
+# (pypa/setuptools#4871)
+importlib.import_module('distutils.ccompiler')
diff --git a/newsfragments/4871.bugfix.rst b/newsfragments/4871.bugfix.rst
new file mode 100644
index 0000000000..a4d8b8bab1
--- /dev/null
+++ b/newsfragments/4871.bugfix.rst
@@ -0,0 +1 @@
+Restored implicit distutils.ccompiler import for g-ir-scanner.

From 408274b81b15e3e10a99a3067d3dc00753a06124 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Mon, 10 Mar 2025 11:41:04 -0400
Subject: [PATCH 1529/1761] Restore missing public symbols after compilers move

---
 distutils/_msvccompiler.py    | 2 ++
 distutils/ccompiler.py        | 5 +++--
 newsfragments/4876.bugfix.rst | 1 +
 3 files changed, 6 insertions(+), 2 deletions(-)
 create mode 100644 newsfragments/4876.bugfix.rst

diff --git a/distutils/_msvccompiler.py b/distutils/_msvccompiler.py
index 34d9735b06..8471ccab28 100644
--- a/distutils/_msvccompiler.py
+++ b/distutils/_msvccompiler.py
@@ -1,3 +1,5 @@
 from .compilers.C import msvc
 
+__all__ = ["MSVCCompiler"]
+
 MSVCCompiler = msvc.Compiler
diff --git a/distutils/ccompiler.py b/distutils/ccompiler.py
index a4f5e1cc33..e39cd8aac1 100644
--- a/distutils/ccompiler.py
+++ b/distutils/ccompiler.py
@@ -1,17 +1,18 @@
 from .compilers.C import base
 from .compilers.C.base import (
-    CompileError,
-    LinkError,
+    compiler_class,
     gen_lib_options,
     gen_preprocess_options,
     get_default_compiler,
     new_compiler,
     show_compilers,
 )
+from .compilers.C.errors import CompileError, LinkError
 
 __all__ = [
     'CompileError',
     'LinkError',
+    'compiler_class',
     'gen_lib_options',
     'gen_preprocess_options',
     'get_default_compiler',
diff --git a/newsfragments/4876.bugfix.rst b/newsfragments/4876.bugfix.rst
new file mode 100644
index 0000000000..c9cf58f8c1
--- /dev/null
+++ b/newsfragments/4876.bugfix.rst
@@ -0,0 +1 @@
+Restore `distutils.ccompiler.compiler_class` -- by :user:`Avasam`

From 88bee158e43a8534aac08cadf3b45f393eadb462 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Sun, 9 Mar 2025 16:14:10 +0000
Subject: [PATCH 1530/1761] Disalow deprecated dash-separated and uppercase
 options in setup.cfg

---
 setuptools/dist.py                       | 96 +++++++++++-------------
 setuptools/tests/config/test_setupcfg.py | 62 +++++++--------
 2 files changed, 76 insertions(+), 82 deletions(-)

diff --git a/setuptools/dist.py b/setuptools/dist.py
index 0249651267..bcfe9c23f2 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -1,5 +1,6 @@
 from __future__ import annotations
 
+import functools
 import io
 import itertools
 import numbers
@@ -27,6 +28,7 @@
 from ._reqs import _StrOrIter
 from .config import pyprojecttoml, setupcfg
 from .discovery import ConfigDiscovery
+from .errors import InvalidConfigError
 from .monkey import get_unpatched
 from .warnings import InformationOnly, SetuptoolsDeprecationWarning
 
@@ -490,8 +492,8 @@ def _parse_config_files(self, filenames=None):  # noqa: C901
                         continue
 
                     val = parser.get(section, opt)
-                    opt = self.warn_dash_deprecation(opt, section)
-                    opt = self.make_option_lowercase(opt, section)
+                    opt = self._enforce_underscore(opt, section)
+                    opt = self._enforce_option_lowercase(opt, section)
                     opt_dict[opt] = (filename, val)
 
             # Make the ConfigParser forget everything (so we retain
@@ -516,64 +518,42 @@ def _parse_config_files(self, filenames=None):  # noqa: C901
             except ValueError as e:
                 raise DistutilsOptionError(e) from e
 
-    def warn_dash_deprecation(self, opt: str, section: str) -> str:
-        if section in (
-            'options.extras_require',
-            'options.data_files',
-        ):
+    def _enforce_underscore(self, opt: str, section: str) -> str:
+        if "-" not in opt or not self._config_requires_normalization(section):
             return opt
 
-        underscore_opt = opt.replace('-', '_')
-        commands = list(
-            itertools.chain(
-                distutils.command.__all__,
-                self._setuptools_commands(),
-            )
+        raise InvalidConfigError(
+            f"Invalid dash-separated key {opt!r} in {section!r} (setup.cfg), "
+            f"please use the underscore name {opt.replace('-', '_')!r} instead."
+            # Warning initially introduced in 3 Mar 2021
         )
-        if (
-            not section.startswith('options')
-            and section != 'metadata'
-            and section not in commands
-        ):
-            return underscore_opt
-
-        if '-' in opt:
-            SetuptoolsDeprecationWarning.emit(
-                "Invalid dash-separated options",
-                f"""
-                Usage of dash-separated {opt!r} will not be supported in future
-                versions. Please use the underscore name {underscore_opt!r} instead.
-                """,
-                see_docs="userguide/declarative_config.html",
-                due_date=(2025, 3, 3),
-                # Warning initially introduced in 3 Mar 2021
-            )
-        return underscore_opt
 
-    def _setuptools_commands(self):
-        try:
-            entry_points = metadata.distribution('setuptools').entry_points
-            return {ep.name for ep in entry_points}  # Avoid newer API for compatibility
-        except metadata.PackageNotFoundError:
-            # during bootstrapping, distribution doesn't exist
-            return []
-
-    def make_option_lowercase(self, opt: str, section: str) -> str:
-        if section != 'metadata' or opt.islower():
+    def _enforce_option_lowercase(self, opt: str, section: str) -> str:
+        if opt.islower() or not self._config_requires_normalization(section):
             return opt
 
-        lowercase_opt = opt.lower()
-        SetuptoolsDeprecationWarning.emit(
-            "Invalid uppercase configuration",
-            f"""
-            Usage of uppercase key {opt!r} in {section!r} will not be supported in
-            future versions. Please use lowercase {lowercase_opt!r} instead.
-            """,
-            see_docs="userguide/declarative_config.html",
-            due_date=(2025, 3, 3),
+        raise InvalidConfigError(
+            f"Invalid uppercase key {opt!r} in {section!r} (setup.cfg), "
+            f"please use lowercase {opt.lower()!r} instead."
             # Warning initially introduced in 6 Mar 2021
         )
-        return lowercase_opt
+
+    def _config_requires_normalization(self, section: str) -> bool:
+        skip = (
+            'options.extras_require',
+            'options.data_files',
+            'options.entry_points',
+            'options.package_data',
+            'options.exclude_package_data',
+        )
+        return section not in skip and self._is_setuptools_section(section)
+
+    def _is_setuptools_section(self, section: str) -> bool:
+        return (
+            section == "metadata"
+            or section.startswith("option")
+            or section in _setuptools_commands()
+        )
 
     # FIXME: 'Distribution._set_command_options' is too complex (14)
     def _set_command_options(self, command_obj, option_dict=None):  # noqa: C901
@@ -999,6 +979,18 @@ def run_command(self, command) -> None:
         super().run_command(command)
 
 
+@functools.cache
+def _setuptools_commands() -> set[str]:
+    try:
+        # Use older API for importlib.metadata compatibility
+        entry_points = metadata.distribution('setuptools').entry_points
+        eps = (ep.name for ep in entry_points)
+    except metadata.PackageNotFoundError:
+        # during bootstrapping, distribution doesn't exist
+        return set(distutils.command.__all__)
+    return {*distutils.command.__all__, *eps}
+
+
 class DistDeprecationWarning(SetuptoolsDeprecationWarning):
     """Class for warning about deprecations in dist in
     setuptools. Not ignored by default, unlike DeprecationWarning."""
diff --git a/setuptools/tests/config/test_setupcfg.py b/setuptools/tests/config/test_setupcfg.py
index d356d2b77c..a199871ffd 100644
--- a/setuptools/tests/config/test_setupcfg.py
+++ b/setuptools/tests/config/test_setupcfg.py
@@ -1,6 +1,7 @@
 import configparser
 import contextlib
 import inspect
+import re
 from pathlib import Path
 from unittest.mock import Mock, patch
 
@@ -9,6 +10,7 @@
 
 from setuptools.config.setupcfg import ConfigHandler, Target, read_configuration
 from setuptools.dist import Distribution, _Distribution
+from setuptools.errors import InvalidConfigError
 from setuptools.warnings import SetuptoolsDeprecationWarning
 
 from ..textwrap import DALS
@@ -420,36 +422,36 @@ def test_not_utf8(self, tmpdir):
             with get_dist(tmpdir):
                 pass
 
-    @pytest.mark.xfail(reason="#4864")
-    def test_warn_dash_deprecation(self, tmpdir):
-        # warn_dash_deprecation() is a method in setuptools.dist
-        # remove this test and the method when no longer needed
-        fake_env(
-            tmpdir,
-            '[metadata]\n'
-            'author-email = test@test.com\n'
-            'maintainer_email = foo@foo.com\n',
-        )
-        msg = "Usage of dash-separated 'author-email' will not be supported"
-        with pytest.warns(SetuptoolsDeprecationWarning, match=msg):
-            with get_dist(tmpdir) as dist:
-                metadata = dist.metadata
-
-        assert metadata.author_email == 'test@test.com'
-        assert metadata.maintainer_email == 'foo@foo.com'
-
-    @pytest.mark.xfail(reason="#4864")
-    def test_make_option_lowercase(self, tmpdir):
-        # remove this test and the method make_option_lowercase() in setuptools.dist
-        # when no longer needed
-        fake_env(tmpdir, '[metadata]\nName = foo\ndescription = Some description\n')
-        msg = "Usage of uppercase key 'Name' in 'metadata' will not be supported"
-        with pytest.warns(SetuptoolsDeprecationWarning, match=msg):
-            with get_dist(tmpdir) as dist:
-                metadata = dist.metadata
-
-        assert metadata.name == 'foo'
-        assert metadata.description == 'Some description'
+    @pytest.mark.parametrize(
+        ("error_msg", "config"),
+        [
+            (
+                "Invalid dash-separated key 'author-email' in 'metadata' (setup.cfg)",
+                DALS(
+                    """
+                    [metadata]
+                    author-email = test@test.com
+                    maintainer_email = foo@foo.com
+                    """
+                ),
+            ),
+            (
+                "Invalid uppercase key 'Name' in 'metadata' (setup.cfg)",
+                DALS(
+                    """
+                    [metadata]
+                    Name = foo
+                    description = Some description
+                    """
+                ),
+            ),
+        ],
+    )
+    def test_invalid_options_previously_deprecated(self, tmpdir, error_msg, config):
+        # this test and related methods can be removed when no longer needed
+        fake_env(tmpdir, config)
+        with pytest.raises(InvalidConfigError, match=re.escape(error_msg)):
+            get_dist(tmpdir).__enter__()
 
 
 class TestOptions:

From 3a0596f0c4f9d26d2a307333796681f0b54b3fd5 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Sun, 9 Mar 2025 17:45:20 +0000
Subject: [PATCH 1531/1761] Add news fragment

---
 newsfragments/4870.removal.rst | 10 ++++++++++
 1 file changed, 10 insertions(+)
 create mode 100644 newsfragments/4870.removal.rst

diff --git a/newsfragments/4870.removal.rst b/newsfragments/4870.removal.rst
new file mode 100644
index 0000000000..dd21a13c22
--- /dev/null
+++ b/newsfragments/4870.removal.rst
@@ -0,0 +1,10 @@
+Setuptools no longer accepts options containing uppercase or dash characters in ``setup.cfg``.
+Please ensure to write the options in ``setup.cfg`` using the :wiki:`lower_snake_case ` convention
+(e.g. ``Name => name``, ``install-requires => install_requires``).
+This is a follow-up on deprecations introduced in
+`v54.1.0 `_ (see #1608) and
+`v54.1.1 `_ (see #2592).
+
+.. note::
+   This change *does not affect configurations in* ``pyproject.toml``
+   (which uses the :wiki:`lower-kebab-case ` convention following the precedent in :pep:`517`).

From a67f998998ff3f19bf4c9dceb60cbc07a47c7abe Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Sun, 9 Mar 2025 17:47:23 +0000
Subject: [PATCH 1532/1761] Avoid duplication in setuptools.dist

---
 setuptools/dist.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/setuptools/dist.py b/setuptools/dist.py
index bcfe9c23f2..e140c86851 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -984,10 +984,10 @@ def _setuptools_commands() -> set[str]:
     try:
         # Use older API for importlib.metadata compatibility
         entry_points = metadata.distribution('setuptools').entry_points
-        eps = (ep.name for ep in entry_points)
+        eps: Iterable[str] = (ep.name for ep in entry_points)
     except metadata.PackageNotFoundError:
         # during bootstrapping, distribution doesn't exist
-        return set(distutils.command.__all__)
+        eps = []
     return {*distutils.command.__all__, *eps}
 
 

From 5a9b4b53e26ea174001f83c590cd37ed9da1f221 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 10 Mar 2025 09:09:57 +0000
Subject: [PATCH 1533/1761] Update mentions to PEP in newsfragment

---
 newsfragments/4870.removal.rst | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/newsfragments/4870.removal.rst b/newsfragments/4870.removal.rst
index dd21a13c22..5b713032d0 100644
--- a/newsfragments/4870.removal.rst
+++ b/newsfragments/4870.removal.rst
@@ -7,4 +7,4 @@ This is a follow-up on deprecations introduced in
 
 .. note::
    This change *does not affect configurations in* ``pyproject.toml``
-   (which uses the :wiki:`lower-kebab-case ` convention following the precedent in :pep:`517`).
+   (which uses the :wiki:`lower-kebab-case ` convention following the precedent set in :pep:`517`/:pep:`518`).

From 6b71893d27de2fc3da3ddd1092269bb0a3085f80 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 10 Mar 2025 09:19:02 +0000
Subject: [PATCH 1534/1761] Simplify negative conditions by applying DeMorgan's
 theorem

---
 setuptools/dist.py | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/setuptools/dist.py b/setuptools/dist.py
index e140c86851..e8af957aff 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -519,7 +519,7 @@ def _parse_config_files(self, filenames=None):  # noqa: C901
                 raise DistutilsOptionError(e) from e
 
     def _enforce_underscore(self, opt: str, section: str) -> str:
-        if "-" not in opt or not self._config_requires_normalization(section):
+        if "-" not in opt or self._skip_setupcfg_normalization(section):
             return opt
 
         raise InvalidConfigError(
@@ -529,7 +529,7 @@ def _enforce_underscore(self, opt: str, section: str) -> str:
         )
 
     def _enforce_option_lowercase(self, opt: str, section: str) -> str:
-        if opt.islower() or not self._config_requires_normalization(section):
+        if opt.islower() or self._skip_setupcfg_normalization(section):
             return opt
 
         raise InvalidConfigError(
@@ -538,7 +538,7 @@ def _enforce_option_lowercase(self, opt: str, section: str) -> str:
             # Warning initially introduced in 6 Mar 2021
         )
 
-    def _config_requires_normalization(self, section: str) -> bool:
+    def _skip_setupcfg_normalization(self, section: str) -> bool:
         skip = (
             'options.extras_require',
             'options.data_files',
@@ -546,7 +546,7 @@ def _config_requires_normalization(self, section: str) -> bool:
             'options.package_data',
             'options.exclude_package_data',
         )
-        return section not in skip and self._is_setuptools_section(section)
+        return section in skip or not self._is_setuptools_section(section)
 
     def _is_setuptools_section(self, section: str) -> bool:
         return (

From d3640575a483d8c2bf00d45d4249738a4968fb08 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 13 Mar 2025 11:53:26 +0000
Subject: [PATCH 1535/1761] Fix error in `setuptools/dist.py`

---
 setuptools/dist.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setuptools/dist.py b/setuptools/dist.py
index e8af957aff..8663077bb3 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -551,7 +551,7 @@ def _skip_setupcfg_normalization(self, section: str) -> bool:
     def _is_setuptools_section(self, section: str) -> bool:
         return (
             section == "metadata"
-            or section.startswith("option")
+            or section.startswith("options")
             or section in _setuptools_commands()
         )
 

From 3018692baac909355c46f9e8724845c72f6ca7e2 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 17 Mar 2025 21:21:22 -0400
Subject: [PATCH 1536/1761] Fix reference in changelog.

---
 newsfragments/4876.bugfix.rst | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/newsfragments/4876.bugfix.rst b/newsfragments/4876.bugfix.rst
index c9cf58f8c1..db7382c4c8 100644
--- a/newsfragments/4876.bugfix.rst
+++ b/newsfragments/4876.bugfix.rst
@@ -1 +1 @@
-Restore `distutils.ccompiler.compiler_class` -- by :user:`Avasam`
+Restore ``distutils.ccompiler.compiler_class`` -- by :user:`Avasam`

From 07e96b107adb799a754122e49701c73dc34fbabe Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Mon, 17 Mar 2025 21:21:27 -0400
Subject: [PATCH 1537/1761] =?UTF-8?q?Bump=20version:=2076.0.0=20=E2=86=92?=
 =?UTF-8?q?=2076.1.0?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg               |  2 +-
 NEWS.rst                       | 16 ++++++++++++++++
 newsfragments/4786.feature.rst |  1 -
 newsfragments/4871.bugfix.rst  |  1 -
 newsfragments/4876.bugfix.rst  |  1 -
 pyproject.toml                 |  2 +-
 6 files changed, 18 insertions(+), 5 deletions(-)
 delete mode 100644 newsfragments/4786.feature.rst
 delete mode 100644 newsfragments/4871.bugfix.rst
 delete mode 100644 newsfragments/4876.bugfix.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index e459d38278..91f2c2aed5 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 76.0.0
+current_version = 76.1.0
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index a9a79c779e..61f4e4d776 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,19 @@
+v76.1.0
+=======
+
+Features
+--------
+
+- In setuptools.msvc.EnvironmentInfo, now honor the correct paths when on an ARM host. (#4786)
+
+
+Bugfixes
+--------
+
+- Restored implicit distutils.ccompiler import for g-ir-scanner. (#4871)
+- Restore ``distutils.ccompiler.compiler_class`` -- by :user:`Avasam` (#4876)
+
+
 v75.3.2
 =======
 
diff --git a/newsfragments/4786.feature.rst b/newsfragments/4786.feature.rst
deleted file mode 100644
index 2e042259e6..0000000000
--- a/newsfragments/4786.feature.rst
+++ /dev/null
@@ -1 +0,0 @@
-In setuptools.msvc.EnvironmentInfo, now honor the correct paths when on an ARM host.
diff --git a/newsfragments/4871.bugfix.rst b/newsfragments/4871.bugfix.rst
deleted file mode 100644
index a4d8b8bab1..0000000000
--- a/newsfragments/4871.bugfix.rst
+++ /dev/null
@@ -1 +0,0 @@
-Restored implicit distutils.ccompiler import for g-ir-scanner.
diff --git a/newsfragments/4876.bugfix.rst b/newsfragments/4876.bugfix.rst
deleted file mode 100644
index db7382c4c8..0000000000
--- a/newsfragments/4876.bugfix.rst
+++ /dev/null
@@ -1 +0,0 @@
-Restore ``distutils.ccompiler.compiler_class`` -- by :user:`Avasam`
diff --git a/pyproject.toml b/pyproject.toml
index 479eef5bf4..f70a6350cc 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "76.0.0"
+version = "76.1.0"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From 2dd10915bc6ba12f30a9ce44cd1e678c63a0891e Mon Sep 17 00:00:00 2001
From: ManiacDC 
Date: Tue, 18 Mar 2025 10:19:38 -0400
Subject: [PATCH 1538/1761] Restores _default_compilers to ccompilers.py for
 backwards compatibility

---
 distutils/ccompiler.py        | 18 ++++++++++--------
 newsfragments/4877.bugfix.rst |  1 +
 2 files changed, 11 insertions(+), 8 deletions(-)
 create mode 100644 newsfragments/4877.bugfix.rst

diff --git a/distutils/ccompiler.py b/distutils/ccompiler.py
index e39cd8aac1..9cf0f4ce54 100644
--- a/distutils/ccompiler.py
+++ b/distutils/ccompiler.py
@@ -6,18 +6,20 @@
     get_default_compiler,
     new_compiler,
     show_compilers,
+    _default_compilers,
 )
 from .compilers.C.errors import CompileError, LinkError
 
 __all__ = [
-    'CompileError',
-    'LinkError',
-    'compiler_class',
-    'gen_lib_options',
-    'gen_preprocess_options',
-    'get_default_compiler',
-    'new_compiler',
-    'show_compilers',
+    "CompileError",
+    "LinkError",
+    "compiler_class",
+    "gen_lib_options",
+    "gen_preprocess_options",
+    "get_default_compiler",
+    "new_compiler",
+    "show_compilers",
+    _default_compilers,
 ]
 
 
diff --git a/newsfragments/4877.bugfix.rst b/newsfragments/4877.bugfix.rst
new file mode 100644
index 0000000000..8350c5a736
--- /dev/null
+++ b/newsfragments/4877.bugfix.rst
@@ -0,0 +1 @@
+Restore `distutils.ccompiler._default_compilers` -- by :user:`ManiacDC`

From bc319747b1d77c771b52124ab44cbdc04aac8078 Mon Sep 17 00:00:00 2001
From: ManiacDC 
Date: Tue, 18 Mar 2025 10:37:06 -0400
Subject: [PATCH 1539/1761] fix newsfragments

---
 newsfragments/{4877.bugfix.rst => 4876.bugfix-2.rst} | 0
 1 file changed, 0 insertions(+), 0 deletions(-)
 rename newsfragments/{4877.bugfix.rst => 4876.bugfix-2.rst} (100%)

diff --git a/newsfragments/4877.bugfix.rst b/newsfragments/4876.bugfix-2.rst
similarity index 100%
rename from newsfragments/4877.bugfix.rst
rename to newsfragments/4876.bugfix-2.rst

From 8653b91b2b3bdf8a6b77a26d0aa4dd28ffda9bc5 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Sun, 9 Mar 2025 12:31:30 +0000
Subject: [PATCH 1540/1761] Update vendored copy of wheel

---
 .../_vendor/wheel-0.43.0.dist-info/RECORD     |  63 --
 .../INSTALLER                                 |   0
 .../LICENSE.txt                               |   0
 .../METADATA                                  |  27 +-
 .../_vendor/wheel-0.45.1.dist-info/RECORD     |  68 ++
 .../REQUESTED                                 |   0
 .../WHEEL                                     |   2 +-
 .../entry_points.txt                          |   0
 setuptools/_vendor/wheel/__init__.py          |   2 +-
 setuptools/_vendor/wheel/_bdist_wheel.py      | 613 +++++++++++++++++
 .../_vendor/wheel/_setuptools_logging.py      |   4 +-
 setuptools/_vendor/wheel/bdist_wheel.py       | 619 +-----------------
 setuptools/_vendor/wheel/cli/__init__.py      |  12 +-
 setuptools/_vendor/wheel/cli/convert.py       | 505 +++++++-------
 setuptools/_vendor/wheel/macosx_libfile.py    |  31 +-
 setuptools/_vendor/wheel/metadata.py          |  29 +-
 setuptools/_vendor/wheel/util.py              |   9 -
 .../_vendor/wheel/vendored/packaging/LICENSE  |   3 +
 .../wheel/vendored/packaging/LICENSE.APACHE   | 177 +++++
 .../wheel/vendored/packaging/LICENSE.BSD      |  23 +
 setuptools/_vendor/wheel/wheelfile.py         |  49 +-
 21 files changed, 1295 insertions(+), 941 deletions(-)
 delete mode 100644 setuptools/_vendor/wheel-0.43.0.dist-info/RECORD
 rename setuptools/_vendor/{wheel-0.43.0.dist-info => wheel-0.45.1.dist-info}/INSTALLER (100%)
 rename setuptools/_vendor/{wheel-0.43.0.dist-info => wheel-0.45.1.dist-info}/LICENSE.txt (100%)
 rename setuptools/_vendor/{wheel-0.43.0.dist-info => wheel-0.45.1.dist-info}/METADATA (73%)
 create mode 100644 setuptools/_vendor/wheel-0.45.1.dist-info/RECORD
 rename setuptools/_vendor/{wheel-0.43.0.dist-info => wheel-0.45.1.dist-info}/REQUESTED (100%)
 rename setuptools/_vendor/{wheel-0.43.0.dist-info => wheel-0.45.1.dist-info}/WHEEL (71%)
 rename setuptools/_vendor/{wheel-0.43.0.dist-info => wheel-0.45.1.dist-info}/entry_points.txt (100%)
 create mode 100644 setuptools/_vendor/wheel/_bdist_wheel.py
 create mode 100644 setuptools/_vendor/wheel/vendored/packaging/LICENSE
 create mode 100644 setuptools/_vendor/wheel/vendored/packaging/LICENSE.APACHE
 create mode 100644 setuptools/_vendor/wheel/vendored/packaging/LICENSE.BSD

diff --git a/setuptools/_vendor/wheel-0.43.0.dist-info/RECORD b/setuptools/_vendor/wheel-0.43.0.dist-info/RECORD
deleted file mode 100644
index a3c6c3ea2f..0000000000
--- a/setuptools/_vendor/wheel-0.43.0.dist-info/RECORD
+++ /dev/null
@@ -1,63 +0,0 @@
-../../bin/wheel,sha256=cT2EHbrv-J-UyUXu26cDY-0I7RgcruysJeHFanT1Xfo,249
-wheel-0.43.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-wheel-0.43.0.dist-info/LICENSE.txt,sha256=MMI2GGeRCPPo6h0qZYx8pBe9_IkcmO8aifpP8MmChlQ,1107
-wheel-0.43.0.dist-info/METADATA,sha256=WbrCKwClnT5WCKVrjPjvxDgxo2tyeS7kOJyc1GaceEE,2153
-wheel-0.43.0.dist-info/RECORD,,
-wheel-0.43.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-wheel-0.43.0.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
-wheel-0.43.0.dist-info/entry_points.txt,sha256=rTY1BbkPHhkGMm4Q3F0pIzJBzW2kMxoG1oriffvGdA0,104
-wheel/__init__.py,sha256=D6jhH00eMzbgrXGAeOwVfD5i-lCAMMycuG1L0useDlo,59
-wheel/__main__.py,sha256=NkMUnuTCGcOkgY0IBLgBCVC_BGGcWORx2K8jYGS12UE,455
-wheel/__pycache__/__init__.cpython-312.pyc,,
-wheel/__pycache__/__main__.cpython-312.pyc,,
-wheel/__pycache__/_setuptools_logging.cpython-312.pyc,,
-wheel/__pycache__/bdist_wheel.cpython-312.pyc,,
-wheel/__pycache__/macosx_libfile.cpython-312.pyc,,
-wheel/__pycache__/metadata.cpython-312.pyc,,
-wheel/__pycache__/util.cpython-312.pyc,,
-wheel/__pycache__/wheelfile.cpython-312.pyc,,
-wheel/_setuptools_logging.py,sha256=NoCnjJ4DFEZ45Eo-2BdXLsWJCwGkait1tp_17paleVw,746
-wheel/bdist_wheel.py,sha256=OKJyp9E831zJrxoRfmM9AgOjByG1CB-pzF5kXQFmaKk,20938
-wheel/cli/__init__.py,sha256=eBNhnPwWTtdKAJHy77lvz7gOQ5Eu3GavGugXxhSsn-U,4264
-wheel/cli/__pycache__/__init__.cpython-312.pyc,,
-wheel/cli/__pycache__/convert.cpython-312.pyc,,
-wheel/cli/__pycache__/pack.cpython-312.pyc,,
-wheel/cli/__pycache__/tags.cpython-312.pyc,,
-wheel/cli/__pycache__/unpack.cpython-312.pyc,,
-wheel/cli/convert.py,sha256=qJcpYGKqdfw1P6BelgN1Hn_suNgM6bvyEWFlZeuSWx0,9439
-wheel/cli/pack.py,sha256=CAFcHdBVulvsHYJlndKVO7KMI9JqBTZz5ii0PKxxCOs,3103
-wheel/cli/tags.py,sha256=lHw-LaWrkS5Jy_qWcw-6pSjeNM6yAjDnqKI3E5JTTCU,4760
-wheel/cli/unpack.py,sha256=Y_J7ynxPSoFFTT7H0fMgbBlVErwyDGcObgme5MBuz58,1021
-wheel/macosx_libfile.py,sha256=HnW6OPdN993psStvwl49xtx2kw7hoVbe6nvwmf8WsKI,16103
-wheel/metadata.py,sha256=q-xCCqSAK7HzyZxK9A6_HAWmhqS1oB4BFw1-rHQxBiQ,5884
-wheel/util.py,sha256=e0jpnsbbM9QhaaMSyap-_ZgUxcxwpyLDk6RHcrduPLg,621
-wheel/vendored/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-wheel/vendored/__pycache__/__init__.cpython-312.pyc,,
-wheel/vendored/packaging/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-wheel/vendored/packaging/__pycache__/__init__.cpython-312.pyc,,
-wheel/vendored/packaging/__pycache__/_elffile.cpython-312.pyc,,
-wheel/vendored/packaging/__pycache__/_manylinux.cpython-312.pyc,,
-wheel/vendored/packaging/__pycache__/_musllinux.cpython-312.pyc,,
-wheel/vendored/packaging/__pycache__/_parser.cpython-312.pyc,,
-wheel/vendored/packaging/__pycache__/_structures.cpython-312.pyc,,
-wheel/vendored/packaging/__pycache__/_tokenizer.cpython-312.pyc,,
-wheel/vendored/packaging/__pycache__/markers.cpython-312.pyc,,
-wheel/vendored/packaging/__pycache__/requirements.cpython-312.pyc,,
-wheel/vendored/packaging/__pycache__/specifiers.cpython-312.pyc,,
-wheel/vendored/packaging/__pycache__/tags.cpython-312.pyc,,
-wheel/vendored/packaging/__pycache__/utils.cpython-312.pyc,,
-wheel/vendored/packaging/__pycache__/version.cpython-312.pyc,,
-wheel/vendored/packaging/_elffile.py,sha256=hbmK8OD6Z7fY6hwinHEUcD1by7czkGiNYu7ShnFEk2k,3266
-wheel/vendored/packaging/_manylinux.py,sha256=P7sdR5_7XBY09LVYYPhHmydMJIIwPXWsh4olk74Uuj4,9588
-wheel/vendored/packaging/_musllinux.py,sha256=z1s8To2hQ0vpn_d-O2i5qxGwEK8WmGlLt3d_26V7NeY,2674
-wheel/vendored/packaging/_parser.py,sha256=4tT4emSl2qTaU7VTQE1Xa9o1jMPCsBezsYBxyNMUN-s,10347
-wheel/vendored/packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431
-wheel/vendored/packaging/_tokenizer.py,sha256=alCtbwXhOFAmFGZ6BQ-wCTSFoRAJ2z-ysIf7__MTJ_k,5292
-wheel/vendored/packaging/markers.py,sha256=_TSPI1BhJYO7Bp9AzTmHQxIqHEVXaTjmDh9G-w8qzPA,8232
-wheel/vendored/packaging/requirements.py,sha256=dgoBeVprPu2YE6Q8nGfwOPTjATHbRa_ZGLyXhFEln6Q,2933
-wheel/vendored/packaging/specifiers.py,sha256=IWSt0SrLSP72heWhAC8UL0eGvas7XIQHjqiViVfmPKE,39778
-wheel/vendored/packaging/tags.py,sha256=fedHXiOHkBxNZTXotXv8uXPmMFU9ae-TKBujgYHigcA,18950
-wheel/vendored/packaging/utils.py,sha256=XgdmP3yx9-wQEFjO7OvMj9RjEf5JlR5HFFR69v7SQ9E,5268
-wheel/vendored/packaging/version.py,sha256=PFJaYZDxBgyxkfYhH3SQw4qfE9ICCWrTmitvq14y3bs,16234
-wheel/vendored/vendor.txt,sha256=Z2ENjB1i5prfez8CdM1Sdr3c6Zxv2rRRolMpLmBncAE,16
-wheel/wheelfile.py,sha256=DtJDWoZMvnBh4leNMDPGOprQU9d_dp6q-MmV0U--4xc,7694
diff --git a/setuptools/_vendor/wheel-0.43.0.dist-info/INSTALLER b/setuptools/_vendor/wheel-0.45.1.dist-info/INSTALLER
similarity index 100%
rename from setuptools/_vendor/wheel-0.43.0.dist-info/INSTALLER
rename to setuptools/_vendor/wheel-0.45.1.dist-info/INSTALLER
diff --git a/setuptools/_vendor/wheel-0.43.0.dist-info/LICENSE.txt b/setuptools/_vendor/wheel-0.45.1.dist-info/LICENSE.txt
similarity index 100%
rename from setuptools/_vendor/wheel-0.43.0.dist-info/LICENSE.txt
rename to setuptools/_vendor/wheel-0.45.1.dist-info/LICENSE.txt
diff --git a/setuptools/_vendor/wheel-0.43.0.dist-info/METADATA b/setuptools/_vendor/wheel-0.45.1.dist-info/METADATA
similarity index 73%
rename from setuptools/_vendor/wheel-0.43.0.dist-info/METADATA
rename to setuptools/_vendor/wheel-0.45.1.dist-info/METADATA
index e3722c00b9..f645dcb673 100644
--- a/setuptools/_vendor/wheel-0.43.0.dist-info/METADATA
+++ b/setuptools/_vendor/wheel-0.45.1.dist-info/METADATA
@@ -1,6 +1,6 @@
-Metadata-Version: 2.1
+Metadata-Version: 2.3
 Name: wheel
-Version: 0.43.0
+Version: 0.45.1
 Summary: A built-package format for Python
 Keywords: wheel,packaging
 Author-email: Daniel Holth 
@@ -29,19 +29,24 @@ Provides-Extra: test
 wheel
 =====
 
-This library is the reference implementation of the Python wheel packaging
-standard, as defined in `PEP 427`_.
+This is a command line tool for manipulating Python wheel files, as defined in
+`PEP 427`_. It contains the following functionality:
 
-It has two different roles:
+* Convert ``.egg`` archives into ``.whl``
+* Unpack wheel archives
+* Repack wheel archives
+* Add or remove tags in existing wheel archives
 
-#. A setuptools_ extension for building wheels that provides the
-   ``bdist_wheel`` setuptools command
-#. A command line tool for working with wheel files
+.. _PEP 427: https://www.python.org/dev/peps/pep-0427/
+
+Historical note
+---------------
 
-It should be noted that wheel is **not** intended to be used as a library, and
-as such there is no stable, public API.
+This project used to contain the implementation of the setuptools_ ``bdist_wheel``
+command, but as of setuptools v70.1, it no longer needs ``wheel`` installed for that to
+work. Thus, you should install this **only** if you intend to use the ``wheel`` command
+line tool!
 
-.. _PEP 427: https://www.python.org/dev/peps/pep-0427/
 .. _setuptools: https://pypi.org/project/setuptools/
 
 Documentation
diff --git a/setuptools/_vendor/wheel-0.45.1.dist-info/RECORD b/setuptools/_vendor/wheel-0.45.1.dist-info/RECORD
new file mode 100644
index 0000000000..c1535b697f
--- /dev/null
+++ b/setuptools/_vendor/wheel-0.45.1.dist-info/RECORD
@@ -0,0 +1,68 @@
+../../bin/wheel,sha256=pBhV19bQIgjS-r541fG3kLU6QtcyKaKdQ2RE9YIzeiU,249
+wheel-0.45.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+wheel-0.45.1.dist-info/LICENSE.txt,sha256=MMI2GGeRCPPo6h0qZYx8pBe9_IkcmO8aifpP8MmChlQ,1107
+wheel-0.45.1.dist-info/METADATA,sha256=mKz84H7m7jsxJyzeIcTVORiTb0NPMV39KvOIYhGgmjA,2313
+wheel-0.45.1.dist-info/RECORD,,
+wheel-0.45.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+wheel-0.45.1.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82
+wheel-0.45.1.dist-info/entry_points.txt,sha256=rTY1BbkPHhkGMm4Q3F0pIzJBzW2kMxoG1oriffvGdA0,104
+wheel/__init__.py,sha256=mrxMnvdXACur_LWegbUfh5g5ysWZrd63UJn890wvGNk,59
+wheel/__main__.py,sha256=NkMUnuTCGcOkgY0IBLgBCVC_BGGcWORx2K8jYGS12UE,455
+wheel/__pycache__/__init__.cpython-311.pyc,,
+wheel/__pycache__/__main__.cpython-311.pyc,,
+wheel/__pycache__/_bdist_wheel.cpython-311.pyc,,
+wheel/__pycache__/_setuptools_logging.cpython-311.pyc,,
+wheel/__pycache__/bdist_wheel.cpython-311.pyc,,
+wheel/__pycache__/macosx_libfile.cpython-311.pyc,,
+wheel/__pycache__/metadata.cpython-311.pyc,,
+wheel/__pycache__/util.cpython-311.pyc,,
+wheel/__pycache__/wheelfile.cpython-311.pyc,,
+wheel/_bdist_wheel.py,sha256=UghCQjSH_pVfcZh6oRjzSw_TQhcf3anSx1OkiLSL82M,21694
+wheel/_setuptools_logging.py,sha256=-5KC-lne0ilOUWIDfOkqapUWGMFZhuKYDIavIZiB5kM,781
+wheel/bdist_wheel.py,sha256=tpf9WufiSO1RuEMg5oPhIfSG8DMziCZ_4muCKF69Cqo,1107
+wheel/cli/__init__.py,sha256=Npq6_jKi03dhIcRnmbuFhwviVJxwO0tYEnEhWMv9cJo,4402
+wheel/cli/__pycache__/__init__.cpython-311.pyc,,
+wheel/cli/__pycache__/convert.cpython-311.pyc,,
+wheel/cli/__pycache__/pack.cpython-311.pyc,,
+wheel/cli/__pycache__/tags.cpython-311.pyc,,
+wheel/cli/__pycache__/unpack.cpython-311.pyc,,
+wheel/cli/convert.py,sha256=Bi0ntEXb9nTllCxWeTRQ4j-nPs3szWSEKipG_GgnMkQ,12634
+wheel/cli/pack.py,sha256=CAFcHdBVulvsHYJlndKVO7KMI9JqBTZz5ii0PKxxCOs,3103
+wheel/cli/tags.py,sha256=lHw-LaWrkS5Jy_qWcw-6pSjeNM6yAjDnqKI3E5JTTCU,4760
+wheel/cli/unpack.py,sha256=Y_J7ynxPSoFFTT7H0fMgbBlVErwyDGcObgme5MBuz58,1021
+wheel/macosx_libfile.py,sha256=k1x7CE3LPtOVGqj6NXQ1nTGYVPaeRrhVzUG_KPq3zDs,16572
+wheel/metadata.py,sha256=JC4p7jlQZu2bUTAQ2fevkqLjg_X6gnNyRhLn6OUO1tc,6171
+wheel/util.py,sha256=aL7aibHwYUgfc8WlolL5tXdkV4DatbJxZHb1kwHFJAU,423
+wheel/vendored/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+wheel/vendored/__pycache__/__init__.cpython-311.pyc,,
+wheel/vendored/packaging/LICENSE,sha256=ytHvW9NA1z4HS6YU0m996spceUDD2MNIUuZcSQlobEg,197
+wheel/vendored/packaging/LICENSE.APACHE,sha256=DVQuDIgE45qn836wDaWnYhSdxoLXgpRRKH4RuTjpRZQ,10174
+wheel/vendored/packaging/LICENSE.BSD,sha256=tw5-m3QvHMb5SLNMFqo5_-zpQZY2S8iP8NIYDwAo-sU,1344
+wheel/vendored/packaging/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+wheel/vendored/packaging/__pycache__/__init__.cpython-311.pyc,,
+wheel/vendored/packaging/__pycache__/_elffile.cpython-311.pyc,,
+wheel/vendored/packaging/__pycache__/_manylinux.cpython-311.pyc,,
+wheel/vendored/packaging/__pycache__/_musllinux.cpython-311.pyc,,
+wheel/vendored/packaging/__pycache__/_parser.cpython-311.pyc,,
+wheel/vendored/packaging/__pycache__/_structures.cpython-311.pyc,,
+wheel/vendored/packaging/__pycache__/_tokenizer.cpython-311.pyc,,
+wheel/vendored/packaging/__pycache__/markers.cpython-311.pyc,,
+wheel/vendored/packaging/__pycache__/requirements.cpython-311.pyc,,
+wheel/vendored/packaging/__pycache__/specifiers.cpython-311.pyc,,
+wheel/vendored/packaging/__pycache__/tags.cpython-311.pyc,,
+wheel/vendored/packaging/__pycache__/utils.cpython-311.pyc,,
+wheel/vendored/packaging/__pycache__/version.cpython-311.pyc,,
+wheel/vendored/packaging/_elffile.py,sha256=hbmK8OD6Z7fY6hwinHEUcD1by7czkGiNYu7ShnFEk2k,3266
+wheel/vendored/packaging/_manylinux.py,sha256=P7sdR5_7XBY09LVYYPhHmydMJIIwPXWsh4olk74Uuj4,9588
+wheel/vendored/packaging/_musllinux.py,sha256=z1s8To2hQ0vpn_d-O2i5qxGwEK8WmGlLt3d_26V7NeY,2674
+wheel/vendored/packaging/_parser.py,sha256=4tT4emSl2qTaU7VTQE1Xa9o1jMPCsBezsYBxyNMUN-s,10347
+wheel/vendored/packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431
+wheel/vendored/packaging/_tokenizer.py,sha256=alCtbwXhOFAmFGZ6BQ-wCTSFoRAJ2z-ysIf7__MTJ_k,5292
+wheel/vendored/packaging/markers.py,sha256=_TSPI1BhJYO7Bp9AzTmHQxIqHEVXaTjmDh9G-w8qzPA,8232
+wheel/vendored/packaging/requirements.py,sha256=dgoBeVprPu2YE6Q8nGfwOPTjATHbRa_ZGLyXhFEln6Q,2933
+wheel/vendored/packaging/specifiers.py,sha256=IWSt0SrLSP72heWhAC8UL0eGvas7XIQHjqiViVfmPKE,39778
+wheel/vendored/packaging/tags.py,sha256=fedHXiOHkBxNZTXotXv8uXPmMFU9ae-TKBujgYHigcA,18950
+wheel/vendored/packaging/utils.py,sha256=XgdmP3yx9-wQEFjO7OvMj9RjEf5JlR5HFFR69v7SQ9E,5268
+wheel/vendored/packaging/version.py,sha256=PFJaYZDxBgyxkfYhH3SQw4qfE9ICCWrTmitvq14y3bs,16234
+wheel/vendored/vendor.txt,sha256=Z2ENjB1i5prfez8CdM1Sdr3c6Zxv2rRRolMpLmBncAE,16
+wheel/wheelfile.py,sha256=USCttNlJwafxt51YYFFKG7jnxz8dfhbyqAZL6jMTA9s,8411
diff --git a/setuptools/_vendor/wheel-0.43.0.dist-info/REQUESTED b/setuptools/_vendor/wheel-0.45.1.dist-info/REQUESTED
similarity index 100%
rename from setuptools/_vendor/wheel-0.43.0.dist-info/REQUESTED
rename to setuptools/_vendor/wheel-0.45.1.dist-info/REQUESTED
diff --git a/setuptools/_vendor/wheel-0.43.0.dist-info/WHEEL b/setuptools/_vendor/wheel-0.45.1.dist-info/WHEEL
similarity index 71%
rename from setuptools/_vendor/wheel-0.43.0.dist-info/WHEEL
rename to setuptools/_vendor/wheel-0.45.1.dist-info/WHEEL
index 3b5e64b5e6..e3c6feefa2 100644
--- a/setuptools/_vendor/wheel-0.43.0.dist-info/WHEEL
+++ b/setuptools/_vendor/wheel-0.45.1.dist-info/WHEEL
@@ -1,4 +1,4 @@
 Wheel-Version: 1.0
-Generator: flit 3.9.0
+Generator: flit 3.10.1
 Root-Is-Purelib: true
 Tag: py3-none-any
diff --git a/setuptools/_vendor/wheel-0.43.0.dist-info/entry_points.txt b/setuptools/_vendor/wheel-0.45.1.dist-info/entry_points.txt
similarity index 100%
rename from setuptools/_vendor/wheel-0.43.0.dist-info/entry_points.txt
rename to setuptools/_vendor/wheel-0.45.1.dist-info/entry_points.txt
diff --git a/setuptools/_vendor/wheel/__init__.py b/setuptools/_vendor/wheel/__init__.py
index a773bbbcd7..3ab8f72d8b 100644
--- a/setuptools/_vendor/wheel/__init__.py
+++ b/setuptools/_vendor/wheel/__init__.py
@@ -1,3 +1,3 @@
 from __future__ import annotations
 
-__version__ = "0.43.0"
+__version__ = "0.45.1"
diff --git a/setuptools/_vendor/wheel/_bdist_wheel.py b/setuptools/_vendor/wheel/_bdist_wheel.py
new file mode 100644
index 0000000000..88973ebfb8
--- /dev/null
+++ b/setuptools/_vendor/wheel/_bdist_wheel.py
@@ -0,0 +1,613 @@
+"""
+Create a wheel (.whl) distribution.
+
+A wheel is a built archive format.
+"""
+
+from __future__ import annotations
+
+import os
+import re
+import shutil
+import stat
+import struct
+import sys
+import sysconfig
+import warnings
+from email.generator import BytesGenerator, Generator
+from email.policy import EmailPolicy
+from glob import iglob
+from shutil import rmtree
+from typing import TYPE_CHECKING, Callable, Iterable, Literal, Sequence, cast
+from zipfile import ZIP_DEFLATED, ZIP_STORED
+
+import setuptools
+from setuptools import Command
+
+from . import __version__ as wheel_version
+from .metadata import pkginfo_to_metadata
+from .util import log
+from .vendored.packaging import tags
+from .vendored.packaging import version as _packaging_version
+from .wheelfile import WheelFile
+
+if TYPE_CHECKING:
+    import types
+
+# ensure Python logging is configured
+try:
+    __import__("setuptools.logging")
+except ImportError:
+    # setuptools < ??
+    from . import _setuptools_logging
+
+    _setuptools_logging.configure()
+
+
+def safe_name(name: str) -> str:
+    """Convert an arbitrary string to a standard distribution name
+    Any runs of non-alphanumeric/. characters are replaced with a single '-'.
+    """
+    return re.sub("[^A-Za-z0-9.]+", "-", name)
+
+
+def safe_version(version: str) -> str:
+    """
+    Convert an arbitrary string to a standard version string
+    """
+    try:
+        # normalize the version
+        return str(_packaging_version.Version(version))
+    except _packaging_version.InvalidVersion:
+        version = version.replace(" ", ".")
+        return re.sub("[^A-Za-z0-9.]+", "-", version)
+
+
+setuptools_major_version = int(setuptools.__version__.split(".")[0])
+
+PY_LIMITED_API_PATTERN = r"cp3\d"
+
+
+def _is_32bit_interpreter() -> bool:
+    return struct.calcsize("P") == 4
+
+
+def python_tag() -> str:
+    return f"py{sys.version_info[0]}"
+
+
+def get_platform(archive_root: str | None) -> str:
+    """Return our platform name 'win32', 'linux_x86_64'"""
+    result = sysconfig.get_platform()
+    if result.startswith("macosx") and archive_root is not None:
+        from .macosx_libfile import calculate_macosx_platform_tag
+
+        result = calculate_macosx_platform_tag(archive_root, result)
+    elif _is_32bit_interpreter():
+        if result == "linux-x86_64":
+            # pip pull request #3497
+            result = "linux-i686"
+        elif result == "linux-aarch64":
+            # packaging pull request #234
+            # TODO armv8l, packaging pull request #690 => this did not land
+            # in pip/packaging yet
+            result = "linux-armv7l"
+
+    return result.replace("-", "_")
+
+
+def get_flag(
+    var: str, fallback: bool, expected: bool = True, warn: bool = True
+) -> bool:
+    """Use a fallback value for determining SOABI flags if the needed config
+    var is unset or unavailable."""
+    val = sysconfig.get_config_var(var)
+    if val is None:
+        if warn:
+            warnings.warn(
+                f"Config variable '{var}' is unset, Python ABI tag may be incorrect",
+                RuntimeWarning,
+                stacklevel=2,
+            )
+        return fallback
+    return val == expected
+
+
+def get_abi_tag() -> str | None:
+    """Return the ABI tag based on SOABI (if available) or emulate SOABI (PyPy2)."""
+    soabi: str = sysconfig.get_config_var("SOABI")
+    impl = tags.interpreter_name()
+    if not soabi and impl in ("cp", "pp") and hasattr(sys, "maxunicode"):
+        d = ""
+        m = ""
+        u = ""
+        if get_flag("Py_DEBUG", hasattr(sys, "gettotalrefcount"), warn=(impl == "cp")):
+            d = "d"
+
+        if get_flag(
+            "WITH_PYMALLOC",
+            impl == "cp",
+            warn=(impl == "cp" and sys.version_info < (3, 8)),
+        ) and sys.version_info < (3, 8):
+            m = "m"
+
+        abi = f"{impl}{tags.interpreter_version()}{d}{m}{u}"
+    elif soabi and impl == "cp" and soabi.startswith("cpython"):
+        # non-Windows
+        abi = "cp" + soabi.split("-")[1]
+    elif soabi and impl == "cp" and soabi.startswith("cp"):
+        # Windows
+        abi = soabi.split("-")[0]
+    elif soabi and impl == "pp":
+        # we want something like pypy36-pp73
+        abi = "-".join(soabi.split("-")[:2])
+        abi = abi.replace(".", "_").replace("-", "_")
+    elif soabi and impl == "graalpy":
+        abi = "-".join(soabi.split("-")[:3])
+        abi = abi.replace(".", "_").replace("-", "_")
+    elif soabi:
+        abi = soabi.replace(".", "_").replace("-", "_")
+    else:
+        abi = None
+
+    return abi
+
+
+def safer_name(name: str) -> str:
+    return safe_name(name).replace("-", "_")
+
+
+def safer_version(version: str) -> str:
+    return safe_version(version).replace("-", "_")
+
+
+def remove_readonly(
+    func: Callable[..., object],
+    path: str,
+    excinfo: tuple[type[Exception], Exception, types.TracebackType],
+) -> None:
+    remove_readonly_exc(func, path, excinfo[1])
+
+
+def remove_readonly_exc(func: Callable[..., object], path: str, exc: Exception) -> None:
+    os.chmod(path, stat.S_IWRITE)
+    func(path)
+
+
+class bdist_wheel(Command):
+    description = "create a wheel distribution"
+
+    supported_compressions = {
+        "stored": ZIP_STORED,
+        "deflated": ZIP_DEFLATED,
+    }
+
+    user_options = [
+        ("bdist-dir=", "b", "temporary directory for creating the distribution"),
+        (
+            "plat-name=",
+            "p",
+            "platform name to embed in generated filenames "
+            f"(default: {get_platform(None)})",
+        ),
+        (
+            "keep-temp",
+            "k",
+            "keep the pseudo-installation tree around after "
+            "creating the distribution archive",
+        ),
+        ("dist-dir=", "d", "directory to put final built distributions in"),
+        ("skip-build", None, "skip rebuilding everything (for testing/debugging)"),
+        (
+            "relative",
+            None,
+            "build the archive using relative paths (default: false)",
+        ),
+        (
+            "owner=",
+            "u",
+            "Owner name used when creating a tar file [default: current user]",
+        ),
+        (
+            "group=",
+            "g",
+            "Group name used when creating a tar file [default: current group]",
+        ),
+        ("universal", None, "make a universal wheel (default: false)"),
+        (
+            "compression=",
+            None,
+            "zipfile compression (one of: {}) (default: 'deflated')".format(
+                ", ".join(supported_compressions)
+            ),
+        ),
+        (
+            "python-tag=",
+            None,
+            f"Python implementation compatibility tag (default: '{python_tag()}')",
+        ),
+        (
+            "build-number=",
+            None,
+            "Build number for this particular version. "
+            "As specified in PEP-0427, this must start with a digit. "
+            "[default: None]",
+        ),
+        (
+            "py-limited-api=",
+            None,
+            "Python tag (cp32|cp33|cpNN) for abi3 wheel tag (default: false)",
+        ),
+    ]
+
+    boolean_options = ["keep-temp", "skip-build", "relative", "universal"]
+
+    def initialize_options(self):
+        self.bdist_dir: str = None
+        self.data_dir = None
+        self.plat_name: str | None = None
+        self.plat_tag = None
+        self.format = "zip"
+        self.keep_temp = False
+        self.dist_dir: str | None = None
+        self.egginfo_dir = None
+        self.root_is_pure: bool | None = None
+        self.skip_build = None
+        self.relative = False
+        self.owner = None
+        self.group = None
+        self.universal: bool = False
+        self.compression: str | int = "deflated"
+        self.python_tag: str = python_tag()
+        self.build_number: str | None = None
+        self.py_limited_api: str | Literal[False] = False
+        self.plat_name_supplied = False
+
+    def finalize_options(self):
+        if self.bdist_dir is None:
+            bdist_base = self.get_finalized_command("bdist").bdist_base
+            self.bdist_dir = os.path.join(bdist_base, "wheel")
+
+        egg_info = self.distribution.get_command_obj("egg_info")
+        egg_info.ensure_finalized()  # needed for correct `wheel_dist_name`
+
+        self.data_dir = self.wheel_dist_name + ".data"
+        self.plat_name_supplied = self.plat_name is not None
+
+        try:
+            self.compression = self.supported_compressions[self.compression]
+        except KeyError:
+            raise ValueError(f"Unsupported compression: {self.compression}") from None
+
+        need_options = ("dist_dir", "plat_name", "skip_build")
+
+        self.set_undefined_options("bdist", *zip(need_options, need_options))
+
+        self.root_is_pure = not (
+            self.distribution.has_ext_modules() or self.distribution.has_c_libraries()
+        )
+
+        if self.py_limited_api and not re.match(
+            PY_LIMITED_API_PATTERN, self.py_limited_api
+        ):
+            raise ValueError(f"py-limited-api must match '{PY_LIMITED_API_PATTERN}'")
+
+        # Support legacy [wheel] section for setting universal
+        wheel = self.distribution.get_option_dict("wheel")
+        if "universal" in wheel:
+            # please don't define this in your global configs
+            log.warning(
+                "The [wheel] section is deprecated. Use [bdist_wheel] instead.",
+            )
+            val = wheel["universal"][1].strip()
+            if val.lower() in ("1", "true", "yes"):
+                self.universal = True
+
+        if self.build_number is not None and not self.build_number[:1].isdigit():
+            raise ValueError("Build tag (build-number) must start with a digit.")
+
+    @property
+    def wheel_dist_name(self):
+        """Return distribution full name with - replaced with _"""
+        components = (
+            safer_name(self.distribution.get_name()),
+            safer_version(self.distribution.get_version()),
+        )
+        if self.build_number:
+            components += (self.build_number,)
+        return "-".join(components)
+
+    def get_tag(self) -> tuple[str, str, str]:
+        # bdist sets self.plat_name if unset, we should only use it for purepy
+        # wheels if the user supplied it.
+        if self.plat_name_supplied:
+            plat_name = cast(str, self.plat_name)
+        elif self.root_is_pure:
+            plat_name = "any"
+        else:
+            # macosx contains system version in platform name so need special handle
+            if self.plat_name and not self.plat_name.startswith("macosx"):
+                plat_name = self.plat_name
+            else:
+                # on macosx always limit the platform name to comply with any
+                # c-extension modules in bdist_dir, since the user can specify
+                # a higher MACOSX_DEPLOYMENT_TARGET via tools like CMake
+
+                # on other platforms, and on macosx if there are no c-extension
+                # modules, use the default platform name.
+                plat_name = get_platform(self.bdist_dir)
+
+            if _is_32bit_interpreter():
+                if plat_name in ("linux-x86_64", "linux_x86_64"):
+                    plat_name = "linux_i686"
+                if plat_name in ("linux-aarch64", "linux_aarch64"):
+                    # TODO armv8l, packaging pull request #690 => this did not land
+                    # in pip/packaging yet
+                    plat_name = "linux_armv7l"
+
+        plat_name = (
+            plat_name.lower().replace("-", "_").replace(".", "_").replace(" ", "_")
+        )
+
+        if self.root_is_pure:
+            if self.universal:
+                impl = "py2.py3"
+            else:
+                impl = self.python_tag
+            tag = (impl, "none", plat_name)
+        else:
+            impl_name = tags.interpreter_name()
+            impl_ver = tags.interpreter_version()
+            impl = impl_name + impl_ver
+            # We don't work on CPython 3.1, 3.0.
+            if self.py_limited_api and (impl_name + impl_ver).startswith("cp3"):
+                impl = self.py_limited_api
+                abi_tag = "abi3"
+            else:
+                abi_tag = str(get_abi_tag()).lower()
+            tag = (impl, abi_tag, plat_name)
+            # issue gh-374: allow overriding plat_name
+            supported_tags = [
+                (t.interpreter, t.abi, plat_name) for t in tags.sys_tags()
+            ]
+            assert (
+                tag in supported_tags
+            ), f"would build wheel with unsupported tag {tag}"
+        return tag
+
+    def run(self):
+        build_scripts = self.reinitialize_command("build_scripts")
+        build_scripts.executable = "python"
+        build_scripts.force = True
+
+        build_ext = self.reinitialize_command("build_ext")
+        build_ext.inplace = False
+
+        if not self.skip_build:
+            self.run_command("build")
+
+        install = self.reinitialize_command("install", reinit_subcommands=True)
+        install.root = self.bdist_dir
+        install.compile = False
+        install.skip_build = self.skip_build
+        install.warn_dir = False
+
+        # A wheel without setuptools scripts is more cross-platform.
+        # Use the (undocumented) `no_ep` option to setuptools'
+        # install_scripts command to avoid creating entry point scripts.
+        install_scripts = self.reinitialize_command("install_scripts")
+        install_scripts.no_ep = True
+
+        # Use a custom scheme for the archive, because we have to decide
+        # at installation time which scheme to use.
+        for key in ("headers", "scripts", "data", "purelib", "platlib"):
+            setattr(install, "install_" + key, os.path.join(self.data_dir, key))
+
+        basedir_observed = ""
+
+        if os.name == "nt":
+            # win32 barfs if any of these are ''; could be '.'?
+            # (distutils.command.install:change_roots bug)
+            basedir_observed = os.path.normpath(os.path.join(self.data_dir, ".."))
+            self.install_libbase = self.install_lib = basedir_observed
+
+        setattr(
+            install,
+            "install_purelib" if self.root_is_pure else "install_platlib",
+            basedir_observed,
+        )
+
+        log.info(f"installing to {self.bdist_dir}")
+
+        self.run_command("install")
+
+        impl_tag, abi_tag, plat_tag = self.get_tag()
+        archive_basename = f"{self.wheel_dist_name}-{impl_tag}-{abi_tag}-{plat_tag}"
+        if not self.relative:
+            archive_root = self.bdist_dir
+        else:
+            archive_root = os.path.join(
+                self.bdist_dir, self._ensure_relative(install.install_base)
+            )
+
+        self.set_undefined_options("install_egg_info", ("target", "egginfo_dir"))
+        distinfo_dirname = (
+            f"{safer_name(self.distribution.get_name())}-"
+            f"{safer_version(self.distribution.get_version())}.dist-info"
+        )
+        distinfo_dir = os.path.join(self.bdist_dir, distinfo_dirname)
+        self.egg2dist(self.egginfo_dir, distinfo_dir)
+
+        self.write_wheelfile(distinfo_dir)
+
+        # Make the archive
+        if not os.path.exists(self.dist_dir):
+            os.makedirs(self.dist_dir)
+
+        wheel_path = os.path.join(self.dist_dir, archive_basename + ".whl")
+        with WheelFile(wheel_path, "w", self.compression) as wf:
+            wf.write_files(archive_root)
+
+        # Add to 'Distribution.dist_files' so that the "upload" command works
+        getattr(self.distribution, "dist_files", []).append(
+            (
+                "bdist_wheel",
+                "{}.{}".format(*sys.version_info[:2]),  # like 3.7
+                wheel_path,
+            )
+        )
+
+        if not self.keep_temp:
+            log.info(f"removing {self.bdist_dir}")
+            if not self.dry_run:
+                if sys.version_info < (3, 12):
+                    rmtree(self.bdist_dir, onerror=remove_readonly)
+                else:
+                    rmtree(self.bdist_dir, onexc=remove_readonly_exc)
+
+    def write_wheelfile(
+        self, wheelfile_base: str, generator: str = f"bdist_wheel ({wheel_version})"
+    ):
+        from email.message import Message
+
+        msg = Message()
+        msg["Wheel-Version"] = "1.0"  # of the spec
+        msg["Generator"] = generator
+        msg["Root-Is-Purelib"] = str(self.root_is_pure).lower()
+        if self.build_number is not None:
+            msg["Build"] = self.build_number
+
+        # Doesn't work for bdist_wininst
+        impl_tag, abi_tag, plat_tag = self.get_tag()
+        for impl in impl_tag.split("."):
+            for abi in abi_tag.split("."):
+                for plat in plat_tag.split("."):
+                    msg["Tag"] = "-".join((impl, abi, plat))
+
+        wheelfile_path = os.path.join(wheelfile_base, "WHEEL")
+        log.info(f"creating {wheelfile_path}")
+        with open(wheelfile_path, "wb") as f:
+            BytesGenerator(f, maxheaderlen=0).flatten(msg)
+
+    def _ensure_relative(self, path: str) -> str:
+        # copied from dir_util, deleted
+        drive, path = os.path.splitdrive(path)
+        if path[0:1] == os.sep:
+            path = drive + path[1:]
+        return path
+
+    @property
+    def license_paths(self) -> Iterable[str]:
+        if setuptools_major_version >= 57:
+            # Setuptools has resolved any patterns to actual file names
+            return self.distribution.metadata.license_files or ()
+
+        files: set[str] = set()
+        metadata = self.distribution.get_option_dict("metadata")
+        if setuptools_major_version >= 42:
+            # Setuptools recognizes the license_files option but does not do globbing
+            patterns = cast(Sequence[str], self.distribution.metadata.license_files)
+        else:
+            # Prior to those, wheel is entirely responsible for handling license files
+            if "license_files" in metadata:
+                patterns = metadata["license_files"][1].split()
+            else:
+                patterns = ()
+
+        if "license_file" in metadata:
+            warnings.warn(
+                'The "license_file" option is deprecated. Use "license_files" instead.',
+                DeprecationWarning,
+                stacklevel=2,
+            )
+            files.add(metadata["license_file"][1])
+
+        if not files and not patterns and not isinstance(patterns, list):
+            patterns = ("LICEN[CS]E*", "COPYING*", "NOTICE*", "AUTHORS*")
+
+        for pattern in patterns:
+            for path in iglob(pattern):
+                if path.endswith("~"):
+                    log.debug(
+                        f'ignoring license file "{path}" as it looks like a backup'
+                    )
+                    continue
+
+                if path not in files and os.path.isfile(path):
+                    log.info(
+                        f'adding license file "{path}" (matched pattern "{pattern}")'
+                    )
+                    files.add(path)
+
+        return files
+
+    def egg2dist(self, egginfo_path: str, distinfo_path: str):
+        """Convert an .egg-info directory into a .dist-info directory"""
+
+        def adios(p: str) -> None:
+            """Appropriately delete directory, file or link."""
+            if os.path.exists(p) and not os.path.islink(p) and os.path.isdir(p):
+                shutil.rmtree(p)
+            elif os.path.exists(p):
+                os.unlink(p)
+
+        adios(distinfo_path)
+
+        if not os.path.exists(egginfo_path):
+            # There is no egg-info. This is probably because the egg-info
+            # file/directory is not named matching the distribution name used
+            # to name the archive file. Check for this case and report
+            # accordingly.
+            import glob
+
+            pat = os.path.join(os.path.dirname(egginfo_path), "*.egg-info")
+            possible = glob.glob(pat)
+            err = f"Egg metadata expected at {egginfo_path} but not found"
+            if possible:
+                alt = os.path.basename(possible[0])
+                err += f" ({alt} found - possible misnamed archive file?)"
+
+            raise ValueError(err)
+
+        if os.path.isfile(egginfo_path):
+            # .egg-info is a single file
+            pkg_info = pkginfo_to_metadata(egginfo_path, egginfo_path)
+            os.mkdir(distinfo_path)
+        else:
+            # .egg-info is a directory
+            pkginfo_path = os.path.join(egginfo_path, "PKG-INFO")
+            pkg_info = pkginfo_to_metadata(egginfo_path, pkginfo_path)
+
+            # ignore common egg metadata that is useless to wheel
+            shutil.copytree(
+                egginfo_path,
+                distinfo_path,
+                ignore=lambda x, y: {
+                    "PKG-INFO",
+                    "requires.txt",
+                    "SOURCES.txt",
+                    "not-zip-safe",
+                },
+            )
+
+            # delete dependency_links if it is only whitespace
+            dependency_links_path = os.path.join(distinfo_path, "dependency_links.txt")
+            with open(dependency_links_path, encoding="utf-8") as dependency_links_file:
+                dependency_links = dependency_links_file.read().strip()
+            if not dependency_links:
+                adios(dependency_links_path)
+
+        pkg_info_path = os.path.join(distinfo_path, "METADATA")
+        serialization_policy = EmailPolicy(
+            utf8=True,
+            mangle_from_=False,
+            max_line_length=0,
+        )
+        with open(pkg_info_path, "w", encoding="utf-8") as out:
+            Generator(out, policy=serialization_policy).flatten(pkg_info)
+
+        for license_path in self.license_paths:
+            filename = os.path.basename(license_path)
+            shutil.copy(license_path, os.path.join(distinfo_path, filename))
+
+        adios(egginfo_path)
diff --git a/setuptools/_vendor/wheel/_setuptools_logging.py b/setuptools/_vendor/wheel/_setuptools_logging.py
index 006c098523..a1a2482ba2 100644
--- a/setuptools/_vendor/wheel/_setuptools_logging.py
+++ b/setuptools/_vendor/wheel/_setuptools_logging.py
@@ -5,11 +5,11 @@
 import sys
 
 
-def _not_warning(record):
+def _not_warning(record: logging.LogRecord) -> bool:
     return record.levelno < logging.WARNING
 
 
-def configure():
+def configure() -> None:
     """
     Configure logging to emit warning and above to stderr
     and everything else to stdout. This behavior is provided
diff --git a/setuptools/_vendor/wheel/bdist_wheel.py b/setuptools/_vendor/wheel/bdist_wheel.py
index 6b811ee3df..dd7b8629e5 100644
--- a/setuptools/_vendor/wheel/bdist_wheel.py
+++ b/setuptools/_vendor/wheel/bdist_wheel.py
@@ -1,595 +1,26 @@
-"""
-Create a wheel (.whl) distribution.
-
-A wheel is a built archive format.
-"""
-
-from __future__ import annotations
-
-import os
-import re
-import shutil
-import stat
-import struct
-import sys
-import sysconfig
-import warnings
-from email.generator import BytesGenerator, Generator
-from email.policy import EmailPolicy
-from glob import iglob
-from shutil import rmtree
-from zipfile import ZIP_DEFLATED, ZIP_STORED
-
-import setuptools
-from setuptools import Command
-
-from . import __version__ as wheel_version
-from .macosx_libfile import calculate_macosx_platform_tag
-from .metadata import pkginfo_to_metadata
-from .util import log
-from .vendored.packaging import tags
-from .vendored.packaging import version as _packaging_version
-from .wheelfile import WheelFile
-
-
-def safe_name(name):
-    """Convert an arbitrary string to a standard distribution name
-    Any runs of non-alphanumeric/. characters are replaced with a single '-'.
-    """
-    return re.sub("[^A-Za-z0-9.]+", "-", name)
-
-
-def safe_version(version):
-    """
-    Convert an arbitrary string to a standard version string
-    """
+from typing import TYPE_CHECKING
+from warnings import warn
+
+warn(
+    "The 'wheel' package is no longer the canonical location of the 'bdist_wheel' "
+    "command, and will be removed in a future release. Please update to setuptools "
+    "v70.1 or later which contains an integrated version of this command.",
+    DeprecationWarning,
+    stacklevel=1,
+)
+
+if TYPE_CHECKING:
+    from ._bdist_wheel import bdist_wheel as bdist_wheel
+else:
     try:
-        # normalize the version
-        return str(_packaging_version.Version(version))
-    except _packaging_version.InvalidVersion:
-        version = version.replace(" ", ".")
-        return re.sub("[^A-Za-z0-9.]+", "-", version)
-
-
-setuptools_major_version = int(setuptools.__version__.split(".")[0])
-
-PY_LIMITED_API_PATTERN = r"cp3\d"
-
-
-def _is_32bit_interpreter():
-    return struct.calcsize("P") == 4
-
-
-def python_tag():
-    return f"py{sys.version_info[0]}"
-
-
-def get_platform(archive_root):
-    """Return our platform name 'win32', 'linux_x86_64'"""
-    result = sysconfig.get_platform()
-    if result.startswith("macosx") and archive_root is not None:
-        result = calculate_macosx_platform_tag(archive_root, result)
-    elif _is_32bit_interpreter():
-        if result == "linux-x86_64":
-            # pip pull request #3497
-            result = "linux-i686"
-        elif result == "linux-aarch64":
-            # packaging pull request #234
-            # TODO armv8l, packaging pull request #690 => this did not land
-            # in pip/packaging yet
-            result = "linux-armv7l"
-
-    return result.replace("-", "_")
-
-
-def get_flag(var, fallback, expected=True, warn=True):
-    """Use a fallback value for determining SOABI flags if the needed config
-    var is unset or unavailable."""
-    val = sysconfig.get_config_var(var)
-    if val is None:
-        if warn:
-            warnings.warn(
-                f"Config variable '{var}' is unset, Python ABI tag may " "be incorrect",
-                RuntimeWarning,
-                stacklevel=2,
-            )
-        return fallback
-    return val == expected
-
-
-def get_abi_tag():
-    """Return the ABI tag based on SOABI (if available) or emulate SOABI (PyPy2)."""
-    soabi = sysconfig.get_config_var("SOABI")
-    impl = tags.interpreter_name()
-    if not soabi and impl in ("cp", "pp") and hasattr(sys, "maxunicode"):
-        d = ""
-        m = ""
-        u = ""
-        if get_flag("Py_DEBUG", hasattr(sys, "gettotalrefcount"), warn=(impl == "cp")):
-            d = "d"
-
-        if get_flag(
-            "WITH_PYMALLOC",
-            impl == "cp",
-            warn=(impl == "cp" and sys.version_info < (3, 8)),
-        ) and sys.version_info < (3, 8):
-            m = "m"
-
-        abi = f"{impl}{tags.interpreter_version()}{d}{m}{u}"
-    elif soabi and impl == "cp" and soabi.startswith("cpython"):
-        # non-Windows
-        abi = "cp" + soabi.split("-")[1]
-    elif soabi and impl == "cp" and soabi.startswith("cp"):
-        # Windows
-        abi = soabi.split("-")[0]
-    elif soabi and impl == "pp":
-        # we want something like pypy36-pp73
-        abi = "-".join(soabi.split("-")[:2])
-        abi = abi.replace(".", "_").replace("-", "_")
-    elif soabi and impl == "graalpy":
-        abi = "-".join(soabi.split("-")[:3])
-        abi = abi.replace(".", "_").replace("-", "_")
-    elif soabi:
-        abi = soabi.replace(".", "_").replace("-", "_")
-    else:
-        abi = None
-
-    return abi
-
-
-def safer_name(name):
-    return safe_name(name).replace("-", "_")
-
-
-def safer_version(version):
-    return safe_version(version).replace("-", "_")
-
-
-def remove_readonly(func, path, excinfo):
-    remove_readonly_exc(func, path, excinfo[1])
-
-
-def remove_readonly_exc(func, path, exc):
-    os.chmod(path, stat.S_IWRITE)
-    func(path)
-
-
-class bdist_wheel(Command):
-    description = "create a wheel distribution"
-
-    supported_compressions = {
-        "stored": ZIP_STORED,
-        "deflated": ZIP_DEFLATED,
-    }
-
-    user_options = [
-        ("bdist-dir=", "b", "temporary directory for creating the distribution"),
-        (
-            "plat-name=",
-            "p",
-            "platform name to embed in generated filenames "
-            "(default: %s)" % get_platform(None),
-        ),
-        (
-            "keep-temp",
-            "k",
-            "keep the pseudo-installation tree around after "
-            "creating the distribution archive",
-        ),
-        ("dist-dir=", "d", "directory to put final built distributions in"),
-        ("skip-build", None, "skip rebuilding everything (for testing/debugging)"),
-        (
-            "relative",
-            None,
-            "build the archive using relative paths " "(default: false)",
-        ),
-        (
-            "owner=",
-            "u",
-            "Owner name used when creating a tar file" " [default: current user]",
-        ),
-        (
-            "group=",
-            "g",
-            "Group name used when creating a tar file" " [default: current group]",
-        ),
-        ("universal", None, "make a universal wheel" " (default: false)"),
-        (
-            "compression=",
-            None,
-            "zipfile compression (one of: {})" " (default: 'deflated')".format(
-                ", ".join(supported_compressions)
-            ),
-        ),
-        (
-            "python-tag=",
-            None,
-            "Python implementation compatibility tag"
-            " (default: '%s')" % (python_tag()),
-        ),
-        (
-            "build-number=",
-            None,
-            "Build number for this particular version. "
-            "As specified in PEP-0427, this must start with a digit. "
-            "[default: None]",
-        ),
-        (
-            "py-limited-api=",
-            None,
-            "Python tag (cp32|cp33|cpNN) for abi3 wheel tag" " (default: false)",
-        ),
-    ]
-
-    boolean_options = ["keep-temp", "skip-build", "relative", "universal"]
-
-    def initialize_options(self):
-        self.bdist_dir = None
-        self.data_dir = None
-        self.plat_name = None
-        self.plat_tag = None
-        self.format = "zip"
-        self.keep_temp = False
-        self.dist_dir = None
-        self.egginfo_dir = None
-        self.root_is_pure = None
-        self.skip_build = None
-        self.relative = False
-        self.owner = None
-        self.group = None
-        self.universal = False
-        self.compression = "deflated"
-        self.python_tag = python_tag()
-        self.build_number = None
-        self.py_limited_api = False
-        self.plat_name_supplied = False
-
-    def finalize_options(self):
-        if self.bdist_dir is None:
-            bdist_base = self.get_finalized_command("bdist").bdist_base
-            self.bdist_dir = os.path.join(bdist_base, "wheel")
-
-        egg_info = self.distribution.get_command_obj("egg_info")
-        egg_info.ensure_finalized()  # needed for correct `wheel_dist_name`
-
-        self.data_dir = self.wheel_dist_name + ".data"
-        self.plat_name_supplied = self.plat_name is not None
-
-        try:
-            self.compression = self.supported_compressions[self.compression]
-        except KeyError:
-            raise ValueError(f"Unsupported compression: {self.compression}") from None
-
-        need_options = ("dist_dir", "plat_name", "skip_build")
-
-        self.set_undefined_options("bdist", *zip(need_options, need_options))
-
-        self.root_is_pure = not (
-            self.distribution.has_ext_modules() or self.distribution.has_c_libraries()
-        )
-
-        if self.py_limited_api and not re.match(
-            PY_LIMITED_API_PATTERN, self.py_limited_api
-        ):
-            raise ValueError("py-limited-api must match '%s'" % PY_LIMITED_API_PATTERN)
-
-        # Support legacy [wheel] section for setting universal
-        wheel = self.distribution.get_option_dict("wheel")
-        if "universal" in wheel:
-            # please don't define this in your global configs
-            log.warning(
-                "The [wheel] section is deprecated. Use [bdist_wheel] instead.",
-            )
-            val = wheel["universal"][1].strip()
-            if val.lower() in ("1", "true", "yes"):
-                self.universal = True
-
-        if self.build_number is not None and not self.build_number[:1].isdigit():
-            raise ValueError("Build tag (build-number) must start with a digit.")
-
-    @property
-    def wheel_dist_name(self):
-        """Return distribution full name with - replaced with _"""
-        components = (
-            safer_name(self.distribution.get_name()),
-            safer_version(self.distribution.get_version()),
-        )
-        if self.build_number:
-            components += (self.build_number,)
-        return "-".join(components)
-
-    def get_tag(self):
-        # bdist sets self.plat_name if unset, we should only use it for purepy
-        # wheels if the user supplied it.
-        if self.plat_name_supplied:
-            plat_name = self.plat_name
-        elif self.root_is_pure:
-            plat_name = "any"
-        else:
-            # macosx contains system version in platform name so need special handle
-            if self.plat_name and not self.plat_name.startswith("macosx"):
-                plat_name = self.plat_name
-            else:
-                # on macosx always limit the platform name to comply with any
-                # c-extension modules in bdist_dir, since the user can specify
-                # a higher MACOSX_DEPLOYMENT_TARGET via tools like CMake
-
-                # on other platforms, and on macosx if there are no c-extension
-                # modules, use the default platform name.
-                plat_name = get_platform(self.bdist_dir)
-
-            if _is_32bit_interpreter():
-                if plat_name in ("linux-x86_64", "linux_x86_64"):
-                    plat_name = "linux_i686"
-                if plat_name in ("linux-aarch64", "linux_aarch64"):
-                    # TODO armv8l, packaging pull request #690 => this did not land
-                    # in pip/packaging yet
-                    plat_name = "linux_armv7l"
-
-        plat_name = (
-            plat_name.lower().replace("-", "_").replace(".", "_").replace(" ", "_")
-        )
-
-        if self.root_is_pure:
-            if self.universal:
-                impl = "py2.py3"
-            else:
-                impl = self.python_tag
-            tag = (impl, "none", plat_name)
-        else:
-            impl_name = tags.interpreter_name()
-            impl_ver = tags.interpreter_version()
-            impl = impl_name + impl_ver
-            # We don't work on CPython 3.1, 3.0.
-            if self.py_limited_api and (impl_name + impl_ver).startswith("cp3"):
-                impl = self.py_limited_api
-                abi_tag = "abi3"
-            else:
-                abi_tag = str(get_abi_tag()).lower()
-            tag = (impl, abi_tag, plat_name)
-            # issue gh-374: allow overriding plat_name
-            supported_tags = [
-                (t.interpreter, t.abi, plat_name) for t in tags.sys_tags()
-            ]
-            assert (
-                tag in supported_tags
-            ), f"would build wheel with unsupported tag {tag}"
-        return tag
-
-    def run(self):
-        build_scripts = self.reinitialize_command("build_scripts")
-        build_scripts.executable = "python"
-        build_scripts.force = True
-
-        build_ext = self.reinitialize_command("build_ext")
-        build_ext.inplace = False
-
-        if not self.skip_build:
-            self.run_command("build")
-
-        install = self.reinitialize_command("install", reinit_subcommands=True)
-        install.root = self.bdist_dir
-        install.compile = False
-        install.skip_build = self.skip_build
-        install.warn_dir = False
-
-        # A wheel without setuptools scripts is more cross-platform.
-        # Use the (undocumented) `no_ep` option to setuptools'
-        # install_scripts command to avoid creating entry point scripts.
-        install_scripts = self.reinitialize_command("install_scripts")
-        install_scripts.no_ep = True
-
-        # Use a custom scheme for the archive, because we have to decide
-        # at installation time which scheme to use.
-        for key in ("headers", "scripts", "data", "purelib", "platlib"):
-            setattr(install, "install_" + key, os.path.join(self.data_dir, key))
-
-        basedir_observed = ""
-
-        if os.name == "nt":
-            # win32 barfs if any of these are ''; could be '.'?
-            # (distutils.command.install:change_roots bug)
-            basedir_observed = os.path.normpath(os.path.join(self.data_dir, ".."))
-            self.install_libbase = self.install_lib = basedir_observed
-
-        setattr(
-            install,
-            "install_purelib" if self.root_is_pure else "install_platlib",
-            basedir_observed,
-        )
-
-        log.info(f"installing to {self.bdist_dir}")
-
-        self.run_command("install")
-
-        impl_tag, abi_tag, plat_tag = self.get_tag()
-        archive_basename = f"{self.wheel_dist_name}-{impl_tag}-{abi_tag}-{plat_tag}"
-        if not self.relative:
-            archive_root = self.bdist_dir
-        else:
-            archive_root = os.path.join(
-                self.bdist_dir, self._ensure_relative(install.install_base)
-            )
-
-        self.set_undefined_options("install_egg_info", ("target", "egginfo_dir"))
-        distinfo_dirname = (
-            f"{safer_name(self.distribution.get_name())}-"
-            f"{safer_version(self.distribution.get_version())}.dist-info"
-        )
-        distinfo_dir = os.path.join(self.bdist_dir, distinfo_dirname)
-        self.egg2dist(self.egginfo_dir, distinfo_dir)
-
-        self.write_wheelfile(distinfo_dir)
-
-        # Make the archive
-        if not os.path.exists(self.dist_dir):
-            os.makedirs(self.dist_dir)
-
-        wheel_path = os.path.join(self.dist_dir, archive_basename + ".whl")
-        with WheelFile(wheel_path, "w", self.compression) as wf:
-            wf.write_files(archive_root)
-
-        # Add to 'Distribution.dist_files' so that the "upload" command works
-        getattr(self.distribution, "dist_files", []).append(
-            (
-                "bdist_wheel",
-                "{}.{}".format(*sys.version_info[:2]),  # like 3.7
-                wheel_path,
-            )
-        )
-
-        if not self.keep_temp:
-            log.info(f"removing {self.bdist_dir}")
-            if not self.dry_run:
-                if sys.version_info < (3, 12):
-                    rmtree(self.bdist_dir, onerror=remove_readonly)
-                else:
-                    rmtree(self.bdist_dir, onexc=remove_readonly_exc)
-
-    def write_wheelfile(
-        self, wheelfile_base, generator="bdist_wheel (" + wheel_version + ")"
-    ):
-        from email.message import Message
-
-        msg = Message()
-        msg["Wheel-Version"] = "1.0"  # of the spec
-        msg["Generator"] = generator
-        msg["Root-Is-Purelib"] = str(self.root_is_pure).lower()
-        if self.build_number is not None:
-            msg["Build"] = self.build_number
-
-        # Doesn't work for bdist_wininst
-        impl_tag, abi_tag, plat_tag = self.get_tag()
-        for impl in impl_tag.split("."):
-            for abi in abi_tag.split("."):
-                for plat in plat_tag.split("."):
-                    msg["Tag"] = "-".join((impl, abi, plat))
-
-        wheelfile_path = os.path.join(wheelfile_base, "WHEEL")
-        log.info(f"creating {wheelfile_path}")
-        with open(wheelfile_path, "wb") as f:
-            BytesGenerator(f, maxheaderlen=0).flatten(msg)
-
-    def _ensure_relative(self, path):
-        # copied from dir_util, deleted
-        drive, path = os.path.splitdrive(path)
-        if path[0:1] == os.sep:
-            path = drive + path[1:]
-        return path
-
-    @property
-    def license_paths(self):
-        if setuptools_major_version >= 57:
-            # Setuptools has resolved any patterns to actual file names
-            return self.distribution.metadata.license_files or ()
-
-        files = set()
-        metadata = self.distribution.get_option_dict("metadata")
-        if setuptools_major_version >= 42:
-            # Setuptools recognizes the license_files option but does not do globbing
-            patterns = self.distribution.metadata.license_files
-        else:
-            # Prior to those, wheel is entirely responsible for handling license files
-            if "license_files" in metadata:
-                patterns = metadata["license_files"][1].split()
-            else:
-                patterns = ()
-
-        if "license_file" in metadata:
-            warnings.warn(
-                'The "license_file" option is deprecated. Use "license_files" instead.',
-                DeprecationWarning,
-                stacklevel=2,
-            )
-            files.add(metadata["license_file"][1])
-
-        if not files and not patterns and not isinstance(patterns, list):
-            patterns = ("LICEN[CS]E*", "COPYING*", "NOTICE*", "AUTHORS*")
-
-        for pattern in patterns:
-            for path in iglob(pattern):
-                if path.endswith("~"):
-                    log.debug(
-                        f'ignoring license file "{path}" as it looks like a backup'
-                    )
-                    continue
-
-                if path not in files and os.path.isfile(path):
-                    log.info(
-                        f'adding license file "{path}" (matched pattern "{pattern}")'
-                    )
-                    files.add(path)
-
-        return files
-
-    def egg2dist(self, egginfo_path, distinfo_path):
-        """Convert an .egg-info directory into a .dist-info directory"""
-
-        def adios(p):
-            """Appropriately delete directory, file or link."""
-            if os.path.exists(p) and not os.path.islink(p) and os.path.isdir(p):
-                shutil.rmtree(p)
-            elif os.path.exists(p):
-                os.unlink(p)
-
-        adios(distinfo_path)
-
-        if not os.path.exists(egginfo_path):
-            # There is no egg-info. This is probably because the egg-info
-            # file/directory is not named matching the distribution name used
-            # to name the archive file. Check for this case and report
-            # accordingly.
-            import glob
-
-            pat = os.path.join(os.path.dirname(egginfo_path), "*.egg-info")
-            possible = glob.glob(pat)
-            err = f"Egg metadata expected at {egginfo_path} but not found"
-            if possible:
-                alt = os.path.basename(possible[0])
-                err += f" ({alt} found - possible misnamed archive file?)"
-
-            raise ValueError(err)
-
-        if os.path.isfile(egginfo_path):
-            # .egg-info is a single file
-            pkginfo_path = egginfo_path
-            pkg_info = pkginfo_to_metadata(egginfo_path, egginfo_path)
-            os.mkdir(distinfo_path)
-        else:
-            # .egg-info is a directory
-            pkginfo_path = os.path.join(egginfo_path, "PKG-INFO")
-            pkg_info = pkginfo_to_metadata(egginfo_path, pkginfo_path)
-
-            # ignore common egg metadata that is useless to wheel
-            shutil.copytree(
-                egginfo_path,
-                distinfo_path,
-                ignore=lambda x, y: {
-                    "PKG-INFO",
-                    "requires.txt",
-                    "SOURCES.txt",
-                    "not-zip-safe",
-                },
-            )
-
-            # delete dependency_links if it is only whitespace
-            dependency_links_path = os.path.join(distinfo_path, "dependency_links.txt")
-            with open(dependency_links_path, encoding="utf-8") as dependency_links_file:
-                dependency_links = dependency_links_file.read().strip()
-            if not dependency_links:
-                adios(dependency_links_path)
-
-        pkg_info_path = os.path.join(distinfo_path, "METADATA")
-        serialization_policy = EmailPolicy(
-            utf8=True,
-            mangle_from_=False,
-            max_line_length=0,
-        )
-        with open(pkg_info_path, "w", encoding="utf-8") as out:
-            Generator(out, policy=serialization_policy).flatten(pkg_info)
-
-        for license_path in self.license_paths:
-            filename = os.path.basename(license_path)
-            shutil.copy(license_path, os.path.join(distinfo_path, filename))
-
-        adios(egginfo_path)
+        # Better integration/compatibility with setuptools:
+        # in the case new fixes or PEPs are implemented in setuptools
+        # there is no need to backport them to the deprecated code base.
+        # This is useful in the case of old packages in the ecosystem
+        # that are still used but have low maintenance.
+        from setuptools.command.bdist_wheel import bdist_wheel
+    except ImportError:
+        # Only used in the case of old setuptools versions.
+        # If the user wants to get the latest fixes/PEPs,
+        # they are encouraged to address the deprecation warning.
+        from ._bdist_wheel import bdist_wheel as bdist_wheel
diff --git a/setuptools/_vendor/wheel/cli/__init__.py b/setuptools/_vendor/wheel/cli/__init__.py
index a38860f5a6..6ba1217f5b 100644
--- a/setuptools/_vendor/wheel/cli/__init__.py
+++ b/setuptools/_vendor/wheel/cli/__init__.py
@@ -14,25 +14,25 @@ class WheelError(Exception):
     pass
 
 
-def unpack_f(args):
+def unpack_f(args: argparse.Namespace) -> None:
     from .unpack import unpack
 
     unpack(args.wheelfile, args.dest)
 
 
-def pack_f(args):
+def pack_f(args: argparse.Namespace) -> None:
     from .pack import pack
 
     pack(args.directory, args.dest_dir, args.build_number)
 
 
-def convert_f(args):
+def convert_f(args: argparse.Namespace) -> None:
     from .convert import convert
 
     convert(args.files, args.dest_dir, args.verbose)
 
 
-def tags_f(args):
+def tags_f(args: argparse.Namespace) -> None:
     from .tags import tags
 
     names = (
@@ -51,10 +51,10 @@ def tags_f(args):
         print(name)
 
 
-def version_f(args):
+def version_f(args: argparse.Namespace) -> None:
     from .. import __version__
 
-    print("wheel %s" % __version__)
+    print(f"wheel {__version__}")
 
 
 def parse_build_tag(build_tag: str) -> str:
diff --git a/setuptools/_vendor/wheel/cli/convert.py b/setuptools/_vendor/wheel/cli/convert.py
index 291534046a..61d4775c58 100644
--- a/setuptools/_vendor/wheel/cli/convert.py
+++ b/setuptools/_vendor/wheel/cli/convert.py
@@ -2,21 +2,23 @@
 
 import os.path
 import re
-import shutil
-import tempfile
-import zipfile
+from abc import ABCMeta, abstractmethod
+from collections import defaultdict
+from collections.abc import Iterator
+from email.message import Message
+from email.parser import Parser
+from email.policy import EmailPolicy
 from glob import iglob
+from pathlib import Path
+from textwrap import dedent
+from zipfile import ZipFile
 
-from ..bdist_wheel import bdist_wheel
+from .. import __version__
+from ..metadata import generate_requirements
+from ..vendored.packaging.tags import parse_tag
 from ..wheelfile import WheelFile
-from . import WheelError
 
-try:
-    from setuptools import Distribution
-except ImportError:
-    from distutils.dist import Distribution
-
-egg_info_re = re.compile(
+egg_filename_re = re.compile(
     r"""
     (?P.+?)-(?P.+?)
     (-(?Ppy\d\.\d+)
@@ -24,84 +26,168 @@
     )?.egg$""",
     re.VERBOSE,
 )
-
-
-class _bdist_wheel_tag(bdist_wheel):
-    # allow the client to override the default generated wheel tag
-    # The default bdist_wheel implementation uses python and abi tags
-    # of the running python process. This is not suitable for
-    # generating/repackaging prebuild binaries.
-
-    full_tag_supplied = False
-    full_tag = None  # None or a (pytag, soabitag, plattag) triple
-
-    def get_tag(self):
-        if self.full_tag_supplied and self.full_tag is not None:
-            return self.full_tag
+egg_info_re = re.compile(
+    r"""
+    ^(?P.+?)-(?P.+?)
+    (-(?Ppy\d\.\d+)
+    )?.egg-info/""",
+    re.VERBOSE,
+)
+wininst_re = re.compile(
+    r"\.(?Pwin32|win-amd64)(?:-(?Ppy\d\.\d))?\.exe$"
+)
+pyd_re = re.compile(r"\.(?P[a-z0-9]+)-(?Pwin32|win_amd64)\.pyd$")
+serialization_policy = EmailPolicy(
+    utf8=True,
+    mangle_from_=False,
+    max_line_length=0,
+)
+GENERATOR = f"wheel {__version__}"
+
+
+def convert_requires(requires: str, metadata: Message) -> None:
+    extra: str | None = None
+    requirements: dict[str | None, list[str]] = defaultdict(list)
+    for line in requires.splitlines():
+        line = line.strip()
+        if not line:
+            continue
+
+        if line.startswith("[") and line.endswith("]"):
+            extra = line[1:-1]
+            continue
+
+        requirements[extra].append(line)
+
+    for key, value in generate_requirements(requirements):
+        metadata.add_header(key, value)
+
+
+def convert_pkg_info(pkginfo: str, metadata: Message):
+    parsed_message = Parser().parsestr(pkginfo)
+    for key, value in parsed_message.items():
+        key_lower = key.lower()
+        if value == "UNKNOWN":
+            continue
+
+        if key_lower == "description":
+            description_lines = value.splitlines()
+            value = "\n".join(
+                (
+                    description_lines[0].lstrip(),
+                    dedent("\n".join(description_lines[1:])),
+                    "\n",
+                )
+            )
+            metadata.set_payload(value)
+        elif key_lower == "home-page":
+            metadata.add_header("Project-URL", f"Homepage, {value}")
+        elif key_lower == "download-url":
+            metadata.add_header("Project-URL", f"Download, {value}")
         else:
-            return bdist_wheel.get_tag(self)
-
-
-def egg2wheel(egg_path: str, dest_dir: str) -> None:
-    filename = os.path.basename(egg_path)
-    match = egg_info_re.match(filename)
-    if not match:
-        raise WheelError(f"Invalid egg file name: {filename}")
-
-    egg_info = match.groupdict()
-    dir = tempfile.mkdtemp(suffix="_e2w")
-    if os.path.isfile(egg_path):
-        # assume we have a bdist_egg otherwise
-        with zipfile.ZipFile(egg_path) as egg:
-            egg.extractall(dir)
-    else:
-        # support buildout-style installed eggs directories
-        for pth in os.listdir(egg_path):
-            src = os.path.join(egg_path, pth)
-            if os.path.isfile(src):
-                shutil.copy2(src, dir)
-            else:
-                shutil.copytree(src, os.path.join(dir, pth))
-
-    pyver = egg_info["pyver"]
-    if pyver:
-        pyver = egg_info["pyver"] = pyver.replace(".", "")
-
-    arch = (egg_info["arch"] or "any").replace(".", "_").replace("-", "_")
-
-    # assume all binary eggs are for CPython
-    abi = "cp" + pyver[2:] if arch != "any" else "none"
-
-    root_is_purelib = egg_info["arch"] is None
-    if root_is_purelib:
-        bw = bdist_wheel(Distribution())
-    else:
-        bw = _bdist_wheel_tag(Distribution())
-
-    bw.root_is_pure = root_is_purelib
-    bw.python_tag = pyver
-    bw.plat_name_supplied = True
-    bw.plat_name = egg_info["arch"] or "any"
-    if not root_is_purelib:
-        bw.full_tag_supplied = True
-        bw.full_tag = (pyver, abi, arch)
-
-    dist_info_dir = os.path.join(dir, "{name}-{ver}.dist-info".format(**egg_info))
-    bw.egg2dist(os.path.join(dir, "EGG-INFO"), dist_info_dir)
-    bw.write_wheelfile(dist_info_dir, generator="egg2wheel")
-    wheel_name = "{name}-{ver}-{pyver}-{}-{}.whl".format(abi, arch, **egg_info)
-    with WheelFile(os.path.join(dest_dir, wheel_name), "w") as wf:
-        wf.write_files(dir)
-
-    shutil.rmtree(dir)
-
-
-def parse_wininst_info(wininfo_name, egginfo_name):
-    """Extract metadata from filenames.
-
-    Extracts the 4 metadataitems needed (name, version, pyversion, arch) from
-    the installer filename and the name of the egg-info directory embedded in
-    the zipfile (if any).
+            metadata.add_header(key, value)
+
+    metadata.replace_header("Metadata-Version", "2.4")
+
+
+def normalize(name: str) -> str:
+    return re.sub(r"[-_.]+", "-", name).lower().replace("-", "_")
+
+
+class ConvertSource(metaclass=ABCMeta):
+    name: str
+    version: str
+    pyver: str = "py2.py3"
+    abi: str = "none"
+    platform: str = "any"
+    metadata: Message
+
+    @property
+    def dist_info_dir(self) -> str:
+        return f"{self.name}-{self.version}.dist-info"
+
+    @abstractmethod
+    def generate_contents(self) -> Iterator[tuple[str, bytes]]:
+        pass
+
+
+class EggFileSource(ConvertSource):
+    def __init__(self, path: Path):
+        if not (match := egg_filename_re.match(path.name)):
+            raise ValueError(f"Invalid egg file name: {path.name}")
+
+        # Binary wheels are assumed to be for CPython
+        self.path = path
+        self.name = normalize(match.group("name"))
+        self.version = match.group("ver")
+        if pyver := match.group("pyver"):
+            self.pyver = pyver.replace(".", "")
+            if arch := match.group("arch"):
+                self.abi = self.pyver.replace("py", "cp")
+                self.platform = normalize(arch)
+
+        self.metadata = Message()
+
+    def generate_contents(self) -> Iterator[tuple[str, bytes]]:
+        with ZipFile(self.path, "r") as zip_file:
+            for filename in sorted(zip_file.namelist()):
+                # Skip pure directory entries
+                if filename.endswith("/"):
+                    continue
+
+                # Handle files in the egg-info directory specially, selectively moving
+                # them to the dist-info directory while converting as needed
+                if filename.startswith("EGG-INFO/"):
+                    if filename == "EGG-INFO/requires.txt":
+                        requires = zip_file.read(filename).decode("utf-8")
+                        convert_requires(requires, self.metadata)
+                    elif filename == "EGG-INFO/PKG-INFO":
+                        pkginfo = zip_file.read(filename).decode("utf-8")
+                        convert_pkg_info(pkginfo, self.metadata)
+                    elif filename == "EGG-INFO/entry_points.txt":
+                        yield (
+                            f"{self.dist_info_dir}/entry_points.txt",
+                            zip_file.read(filename),
+                        )
+
+                    continue
+
+                # For any other file, just pass it through
+                yield filename, zip_file.read(filename)
+
+
+class EggDirectorySource(EggFileSource):
+    def generate_contents(self) -> Iterator[tuple[str, bytes]]:
+        for dirpath, _, filenames in os.walk(self.path):
+            for filename in sorted(filenames):
+                path = Path(dirpath, filename)
+                if path.parent.name == "EGG-INFO":
+                    if path.name == "requires.txt":
+                        requires = path.read_text("utf-8")
+                        convert_requires(requires, self.metadata)
+                    elif path.name == "PKG-INFO":
+                        pkginfo = path.read_text("utf-8")
+                        convert_pkg_info(pkginfo, self.metadata)
+                        if name := self.metadata.get("Name"):
+                            self.name = normalize(name)
+
+                        if version := self.metadata.get("Version"):
+                            self.version = version
+                    elif path.name == "entry_points.txt":
+                        yield (
+                            f"{self.dist_info_dir}/entry_points.txt",
+                            path.read_bytes(),
+                        )
+
+                    continue
+
+                # For any other file, just pass it through
+                yield str(path.relative_to(self.path)), path.read_bytes()
+
+
+class WininstFileSource(ConvertSource):
+    """
+    Handles distributions created with ``bdist_wininst``.
 
     The egginfo filename has the format::
 
@@ -129,145 +215,118 @@ def parse_wininst_info(wininfo_name, egginfo_name):
        should therefore ignore the architecture if the content is pure-python.
     """
 
-    egginfo = None
-    if egginfo_name:
-        egginfo = egg_info_re.search(egginfo_name)
-        if not egginfo:
-            raise ValueError(f"Egg info filename {egginfo_name} is not valid")
-
-    # Parse the wininst filename
-    # 1. Distribution name (up to the first '-')
-    w_name, sep, rest = wininfo_name.partition("-")
-    if not sep:
-        raise ValueError(f"Installer filename {wininfo_name} is not valid")
-
-    # Strip '.exe'
-    rest = rest[:-4]
-    # 2. Python version (from the last '-', must start with 'py')
-    rest2, sep, w_pyver = rest.rpartition("-")
-    if sep and w_pyver.startswith("py"):
-        rest = rest2
-        w_pyver = w_pyver.replace(".", "")
-    else:
-        # Not version specific - use py2.py3. While it is possible that
-        # pure-Python code is not compatible with both Python 2 and 3, there
-        # is no way of knowing from the wininst format, so we assume the best
-        # here (the user can always manually rename the wheel to be more
-        # restrictive if needed).
-        w_pyver = "py2.py3"
-    # 3. Version and architecture
-    w_ver, sep, w_arch = rest.rpartition(".")
-    if not sep:
-        raise ValueError(f"Installer filename {wininfo_name} is not valid")
-
-    if egginfo:
-        w_name = egginfo.group("name")
-        w_ver = egginfo.group("ver")
-
-    return {"name": w_name, "ver": w_ver, "arch": w_arch, "pyver": w_pyver}
-
-
-def wininst2wheel(path, dest_dir):
-    with zipfile.ZipFile(path) as bdw:
-        # Search for egg-info in the archive
-        egginfo_name = None
-        for filename in bdw.namelist():
-            if ".egg-info" in filename:
-                egginfo_name = filename
-                break
-
-        info = parse_wininst_info(os.path.basename(path), egginfo_name)
-
-        root_is_purelib = True
-        for zipinfo in bdw.infolist():
-            if zipinfo.filename.startswith("PLATLIB"):
-                root_is_purelib = False
-                break
-        if root_is_purelib:
-            paths = {"purelib": ""}
-        else:
-            paths = {"platlib": ""}
-
-        dist_info = "{name}-{ver}".format(**info)
-        datadir = "%s.data/" % dist_info
-
-        # rewrite paths to trick ZipFile into extracting an egg
-        # XXX grab wininst .ini - between .exe, padding, and first zip file.
-        members = []
-        egginfo_name = ""
-        for zipinfo in bdw.infolist():
-            key, basename = zipinfo.filename.split("/", 1)
-            key = key.lower()
-            basepath = paths.get(key, None)
-            if basepath is None:
-                basepath = datadir + key.lower() + "/"
-            oldname = zipinfo.filename
-            newname = basepath + basename
-            zipinfo.filename = newname
-            del bdw.NameToInfo[oldname]
-            bdw.NameToInfo[newname] = zipinfo
-            # Collect member names, but omit '' (from an entry like "PLATLIB/"
-            if newname:
-                members.append(newname)
-            # Remember egg-info name for the egg2dist call below
-            if not egginfo_name:
-                if newname.endswith(".egg-info"):
-                    egginfo_name = newname
-                elif ".egg-info/" in newname:
-                    egginfo_name, sep, _ = newname.rpartition("/")
-        dir = tempfile.mkdtemp(suffix="_b2w")
-        bdw.extractall(dir, members)
-
-    # egg2wheel
-    abi = "none"
-    pyver = info["pyver"]
-    arch = (info["arch"] or "any").replace(".", "_").replace("-", "_")
-    # Wininst installers always have arch even if they are not
-    # architecture-specific (because the format itself is).
-    # So, assume the content is architecture-neutral if root is purelib.
-    if root_is_purelib:
-        arch = "any"
-    # If the installer is architecture-specific, it's almost certainly also
-    # CPython-specific.
-    if arch != "any":
-        pyver = pyver.replace("py", "cp")
-    wheel_name = "-".join((dist_info, pyver, abi, arch))
-    if root_is_purelib:
-        bw = bdist_wheel(Distribution())
-    else:
-        bw = _bdist_wheel_tag(Distribution())
-
-    bw.root_is_pure = root_is_purelib
-    bw.python_tag = pyver
-    bw.plat_name_supplied = True
-    bw.plat_name = info["arch"] or "any"
-
-    if not root_is_purelib:
-        bw.full_tag_supplied = True
-        bw.full_tag = (pyver, abi, arch)
-
-    dist_info_dir = os.path.join(dir, "%s.dist-info" % dist_info)
-    bw.egg2dist(os.path.join(dir, egginfo_name), dist_info_dir)
-    bw.write_wheelfile(dist_info_dir, generator="wininst2wheel")
-
-    wheel_path = os.path.join(dest_dir, wheel_name)
-    with WheelFile(wheel_path, "w") as wf:
-        wf.write_files(dir)
-
-    shutil.rmtree(dir)
-
-
-def convert(files, dest_dir, verbose):
+    def __init__(self, path: Path):
+        self.path = path
+        self.metadata = Message()
+
+        # Determine the initial architecture and Python version from the file name
+        # (if possible)
+        if match := wininst_re.search(path.name):
+            self.platform = normalize(match.group("platform"))
+            if pyver := match.group("pyver"):
+                self.pyver = pyver.replace(".", "")
+
+        # Look for an .egg-info directory and any .pyd files for more precise info
+        egg_info_found = pyd_found = False
+        with ZipFile(self.path) as zip_file:
+            for filename in zip_file.namelist():
+                prefix, filename = filename.split("/", 1)
+                if not egg_info_found and (match := egg_info_re.match(filename)):
+                    egg_info_found = True
+                    self.name = normalize(match.group("name"))
+                    self.version = match.group("ver")
+                    if pyver := match.group("pyver"):
+                        self.pyver = pyver.replace(".", "")
+                elif not pyd_found and (match := pyd_re.search(filename)):
+                    pyd_found = True
+                    self.abi = match.group("abi")
+                    self.platform = match.group("platform")
+
+                if egg_info_found and pyd_found:
+                    break
+
+    def generate_contents(self) -> Iterator[tuple[str, bytes]]:
+        dist_info_dir = f"{self.name}-{self.version}.dist-info"
+        data_dir = f"{self.name}-{self.version}.data"
+        with ZipFile(self.path, "r") as zip_file:
+            for filename in sorted(zip_file.namelist()):
+                # Skip pure directory entries
+                if filename.endswith("/"):
+                    continue
+
+                # Handle files in the egg-info directory specially, selectively moving
+                # them to the dist-info directory while converting as needed
+                prefix, target_filename = filename.split("/", 1)
+                if egg_info_re.search(target_filename):
+                    basename = target_filename.rsplit("/", 1)[-1]
+                    if basename == "requires.txt":
+                        requires = zip_file.read(filename).decode("utf-8")
+                        convert_requires(requires, self.metadata)
+                    elif basename == "PKG-INFO":
+                        pkginfo = zip_file.read(filename).decode("utf-8")
+                        convert_pkg_info(pkginfo, self.metadata)
+                    elif basename == "entry_points.txt":
+                        yield (
+                            f"{dist_info_dir}/entry_points.txt",
+                            zip_file.read(filename),
+                        )
+
+                    continue
+                elif prefix == "SCRIPTS":
+                    target_filename = f"{data_dir}/scripts/{target_filename}"
+
+                # For any other file, just pass it through
+                yield target_filename, zip_file.read(filename)
+
+
+def convert(files: list[str], dest_dir: str, verbose: bool) -> None:
     for pat in files:
-        for installer in iglob(pat):
-            if os.path.splitext(installer)[1] == ".egg":
-                conv = egg2wheel
+        for archive in iglob(pat):
+            path = Path(archive)
+            if path.suffix == ".egg":
+                if path.is_dir():
+                    source: ConvertSource = EggDirectorySource(path)
+                else:
+                    source = EggFileSource(path)
             else:
-                conv = wininst2wheel
+                source = WininstFileSource(path)
 
             if verbose:
-                print(f"{installer}... ", flush=True)
+                print(f"{archive}...", flush=True, end="")
+
+            dest_path = Path(dest_dir) / (
+                f"{source.name}-{source.version}-{source.pyver}-{source.abi}"
+                f"-{source.platform}.whl"
+            )
+            with WheelFile(dest_path, "w") as wheelfile:
+                for name_or_zinfo, contents in source.generate_contents():
+                    wheelfile.writestr(name_or_zinfo, contents)
+
+                # Write the METADATA file
+                wheelfile.writestr(
+                    f"{source.dist_info_dir}/METADATA",
+                    source.metadata.as_string(policy=serialization_policy).encode(
+                        "utf-8"
+                    ),
+                )
+
+                # Write the WHEEL file
+                wheel_message = Message()
+                wheel_message.add_header("Wheel-Version", "1.0")
+                wheel_message.add_header("Generator", GENERATOR)
+                wheel_message.add_header(
+                    "Root-Is-Purelib", str(source.platform == "any").lower()
+                )
+                tags = parse_tag(f"{source.pyver}-{source.abi}-{source.platform}")
+                for tag in sorted(tags, key=lambda tag: tag.interpreter):
+                    wheel_message.add_header("Tag", str(tag))
+
+                wheelfile.writestr(
+                    f"{source.dist_info_dir}/WHEEL",
+                    wheel_message.as_string(policy=serialization_policy).encode(
+                        "utf-8"
+                    ),
+                )
 
-            conv(installer, dest_dir)
             if verbose:
                 print("OK")
diff --git a/setuptools/_vendor/wheel/macosx_libfile.py b/setuptools/_vendor/wheel/macosx_libfile.py
index 8953c3f805..abdfc9eda1 100644
--- a/setuptools/_vendor/wheel/macosx_libfile.py
+++ b/setuptools/_vendor/wheel/macosx_libfile.py
@@ -43,6 +43,13 @@
 import ctypes
 import os
 import sys
+from io import BufferedIOBase
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Union
+
+    StrPath = Union[str, os.PathLike[str]]
 
 """here the needed const and struct from mach-o header files"""
 
@@ -238,7 +245,7 @@
 """
 
 
-def swap32(x):
+def swap32(x: int) -> int:
     return (
         ((x << 24) & 0xFF000000)
         | ((x << 8) & 0x00FF0000)
@@ -247,7 +254,10 @@ def swap32(x):
     )
 
 
-def get_base_class_and_magic_number(lib_file, seek=None):
+def get_base_class_and_magic_number(
+    lib_file: BufferedIOBase,
+    seek: int | None = None,
+) -> tuple[type[ctypes.Structure], int]:
     if seek is None:
         seek = lib_file.tell()
     else:
@@ -271,11 +281,11 @@ def get_base_class_and_magic_number(lib_file, seek=None):
     return BaseClass, magic_number
 
 
-def read_data(struct_class, lib_file):
+def read_data(struct_class: type[ctypes.Structure], lib_file: BufferedIOBase):
     return struct_class.from_buffer_copy(lib_file.read(ctypes.sizeof(struct_class)))
 
 
-def extract_macosx_min_system_version(path_to_lib):
+def extract_macosx_min_system_version(path_to_lib: str):
     with open(path_to_lib, "rb") as lib_file:
         BaseClass, magic_number = get_base_class_and_magic_number(lib_file, 0)
         if magic_number not in [FAT_MAGIC, FAT_MAGIC_64, MH_MAGIC, MH_MAGIC_64]:
@@ -301,7 +311,7 @@ class FatArch(BaseClass):
                 read_data(FatArch, lib_file) for _ in range(fat_header.nfat_arch)
             ]
 
-            versions_list = []
+            versions_list: list[tuple[int, int, int]] = []
             for el in fat_arch_list:
                 try:
                     version = read_mach_header(lib_file, el.offset)
@@ -333,7 +343,10 @@ class FatArch(BaseClass):
                 return None
 
 
-def read_mach_header(lib_file, seek=None):
+def read_mach_header(
+    lib_file: BufferedIOBase,
+    seek: int | None = None,
+) -> tuple[int, int, int] | None:
     """
     This function parses a Mach-O header and extracts
     information about the minimal macOS version.
@@ -380,14 +393,14 @@ class VersionBuild(base_class):
             continue
 
 
-def parse_version(version):
+def parse_version(version: int) -> tuple[int, int, int]:
     x = (version & 0xFFFF0000) >> 16
     y = (version & 0x0000FF00) >> 8
     z = version & 0x000000FF
     return x, y, z
 
 
-def calculate_macosx_platform_tag(archive_root, platform_tag):
+def calculate_macosx_platform_tag(archive_root: StrPath, platform_tag: str) -> str:
     """
     Calculate proper macosx platform tag basing on files which are included to wheel
 
@@ -420,7 +433,7 @@ def calculate_macosx_platform_tag(archive_root, platform_tag):
 
     assert len(base_version) == 2
     start_version = base_version
-    versions_dict = {}
+    versions_dict: dict[str, tuple[int, int]] = {}
     for dirpath, _dirnames, filenames in os.walk(archive_root):
         for filename in filenames:
             if filename.endswith(".dylib") or filename.endswith(".so"):
diff --git a/setuptools/_vendor/wheel/metadata.py b/setuptools/_vendor/wheel/metadata.py
index 6aa4362808..b8098fa859 100644
--- a/setuptools/_vendor/wheel/metadata.py
+++ b/setuptools/_vendor/wheel/metadata.py
@@ -11,17 +11,17 @@
 import textwrap
 from email.message import Message
 from email.parser import Parser
-from typing import Iterator
+from typing import Generator, Iterable, Iterator, Literal
 
 from .vendored.packaging.requirements import Requirement
 
 
-def _nonblank(str):
+def _nonblank(str: str) -> bool | Literal[""]:
     return str and not str.startswith("#")
 
 
 @functools.singledispatch
-def yield_lines(iterable):
+def yield_lines(iterable: Iterable[str]) -> Iterator[str]:
     r"""
     Yield valid lines of a string or iterable.
     >>> list(yield_lines(''))
@@ -39,11 +39,13 @@ def yield_lines(iterable):
 
 
 @yield_lines.register(str)
-def _(text):
+def _(text: str) -> Iterator[str]:
     return filter(_nonblank, map(str.strip, text.splitlines()))
 
 
-def split_sections(s):
+def split_sections(
+    s: str | Iterator[str],
+) -> Generator[tuple[str | None, list[str]], None, None]:
     """Split a string or iterable thereof into (section, content) pairs
     Each ``section`` is a stripped version of the section header ("[section]")
     and each ``content`` is a list of stripped lines excluding blank lines and
@@ -51,7 +53,7 @@ def split_sections(s):
     header, they're returned in a first ``section`` of ``None``.
     """
     section = None
-    content = []
+    content: list[str] = []
     for line in yield_lines(s):
         if line.startswith("["):
             if line.endswith("]"):
@@ -68,7 +70,7 @@ def split_sections(s):
     yield section, content
 
 
-def safe_extra(extra):
+def safe_extra(extra: str) -> str:
     """Convert an arbitrary string to a standard 'extra' name
     Any runs of non-alphanumeric characters are replaced with a single '_',
     and the result is always lowercased.
@@ -76,7 +78,7 @@ def safe_extra(extra):
     return re.sub("[^A-Za-z0-9.-]+", "_", extra).lower()
 
 
-def safe_name(name):
+def safe_name(name: str) -> str:
     """Convert an arbitrary string to a standard distribution name
     Any runs of non-alphanumeric/. characters are replaced with a single '-'.
     """
@@ -85,10 +87,10 @@ def safe_name(name):
 
 def requires_to_requires_dist(requirement: Requirement) -> str:
     """Return the version specifier for a requirement in PEP 345/566 fashion."""
-    if getattr(requirement, "url", None):
+    if requirement.url:
         return " @ " + requirement.url
 
-    requires_dist = []
+    requires_dist: list[str] = []
     for spec in requirement.specifier:
         requires_dist.append(spec.operator + spec.version)
 
@@ -111,7 +113,7 @@ def convert_requirements(requirements: list[str]) -> Iterator[str]:
 
 
 def generate_requirements(
-    extras_require: dict[str, list[str]],
+    extras_require: dict[str | None, list[str]],
 ) -> Iterator[tuple[str, str]]:
     """
     Convert requirements from a setup()-style dictionary to
@@ -131,13 +133,14 @@ def generate_requirements(
             yield "Provides-Extra", extra
             if condition:
                 condition = "(" + condition + ") and "
-            condition += "extra == '%s'" % extra
+            condition += f"extra == '{extra}'"
 
         if condition:
             condition = " ; " + condition
 
         for new_req in convert_requirements(depends):
-            yield "Requires-Dist", new_req + condition
+            canonical_req = str(Requirement(new_req + condition))
+            yield "Requires-Dist", canonical_req
 
 
 def pkginfo_to_metadata(egg_info_path: str, pkginfo_path: str) -> Message:
diff --git a/setuptools/_vendor/wheel/util.py b/setuptools/_vendor/wheel/util.py
index d98d98cb52..c928aa403b 100644
--- a/setuptools/_vendor/wheel/util.py
+++ b/setuptools/_vendor/wheel/util.py
@@ -5,15 +5,6 @@
 
 log = logging.getLogger("wheel")
 
-# ensure Python logging is configured
-try:
-    __import__("setuptools.logging")
-except ImportError:
-    # setuptools < ??
-    from . import _setuptools_logging
-
-    _setuptools_logging.configure()
-
 
 def urlsafe_b64encode(data: bytes) -> bytes:
     """urlsafe_b64encode without padding"""
diff --git a/setuptools/_vendor/wheel/vendored/packaging/LICENSE b/setuptools/_vendor/wheel/vendored/packaging/LICENSE
new file mode 100644
index 0000000000..6f62d44e4e
--- /dev/null
+++ b/setuptools/_vendor/wheel/vendored/packaging/LICENSE
@@ -0,0 +1,3 @@
+This software is made available under the terms of *either* of the licenses
+found in LICENSE.APACHE or LICENSE.BSD. Contributions to this software is made
+under the terms of *both* these licenses.
diff --git a/setuptools/_vendor/wheel/vendored/packaging/LICENSE.APACHE b/setuptools/_vendor/wheel/vendored/packaging/LICENSE.APACHE
new file mode 100644
index 0000000000..f433b1a53f
--- /dev/null
+++ b/setuptools/_vendor/wheel/vendored/packaging/LICENSE.APACHE
@@ -0,0 +1,177 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
diff --git a/setuptools/_vendor/wheel/vendored/packaging/LICENSE.BSD b/setuptools/_vendor/wheel/vendored/packaging/LICENSE.BSD
new file mode 100644
index 0000000000..42ce7b75c9
--- /dev/null
+++ b/setuptools/_vendor/wheel/vendored/packaging/LICENSE.BSD
@@ -0,0 +1,23 @@
+Copyright (c) Donald Stufft and individual contributors.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+    1. Redistributions of source code must retain the above copyright notice,
+       this list of conditions and the following disclaimer.
+
+    2. Redistributions in binary form must reproduce the above copyright
+       notice, this list of conditions and the following disclaimer in the
+       documentation and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/setuptools/_vendor/wheel/wheelfile.py b/setuptools/_vendor/wheel/wheelfile.py
index 6440e90ade..0a0f4596c5 100644
--- a/setuptools/_vendor/wheel/wheelfile.py
+++ b/setuptools/_vendor/wheel/wheelfile.py
@@ -7,11 +7,22 @@
 import stat
 import time
 from io import StringIO, TextIOWrapper
+from typing import IO, TYPE_CHECKING, Literal
 from zipfile import ZIP_DEFLATED, ZipFile, ZipInfo
 
 from wheel.cli import WheelError
 from wheel.util import log, urlsafe_b64decode, urlsafe_b64encode
 
+if TYPE_CHECKING:
+    from typing import Protocol, Sized, Union
+
+    from typing_extensions import Buffer
+
+    StrPath = Union[str, os.PathLike[str]]
+
+    class SizedBuffer(Sized, Buffer, Protocol): ...
+
+
 # Non-greedy matching of an optional build number may be too clever (more
 # invalid wheel filenames will match). Separate regex for .dist-info?
 WHEEL_INFO_RE = re.compile(
@@ -22,7 +33,7 @@
 MINIMUM_TIMESTAMP = 315532800  # 1980-01-01 00:00:00 UTC
 
 
-def get_zipinfo_datetime(timestamp=None):
+def get_zipinfo_datetime(timestamp: float | None = None):
     # Some applications need reproducible .whl files, but they can't do this without
     # forcing the timestamp of the individual ZipInfo objects. See issue #143.
     timestamp = int(os.environ.get("SOURCE_DATE_EPOCH", timestamp or time.time()))
@@ -37,7 +48,12 @@ class WheelFile(ZipFile):
 
     _default_algorithm = hashlib.sha256
 
-    def __init__(self, file, mode="r", compression=ZIP_DEFLATED):
+    def __init__(
+        self,
+        file: StrPath,
+        mode: Literal["r", "w", "x", "a"] = "r",
+        compression: int = ZIP_DEFLATED,
+    ):
         basename = os.path.basename(file)
         self.parsed_filename = WHEEL_INFO_RE.match(basename)
         if not basename.endswith(".whl") or self.parsed_filename is None:
@@ -49,7 +65,7 @@ def __init__(self, file, mode="r", compression=ZIP_DEFLATED):
             self.parsed_filename.group("namever")
         )
         self.record_path = self.dist_info_path + "/RECORD"
-        self._file_hashes = {}
+        self._file_hashes: dict[str, tuple[None, None] | tuple[int, bytes]] = {}
         self._file_sizes = {}
         if mode == "r":
             # Ignore RECORD and any embedded wheel signatures
@@ -90,8 +106,13 @@ def __init__(self, file, mode="r", compression=ZIP_DEFLATED):
                         urlsafe_b64decode(hash_sum.encode("ascii")),
                     )
 
-    def open(self, name_or_info, mode="r", pwd=None):
-        def _update_crc(newdata):
+    def open(
+        self,
+        name_or_info: str | ZipInfo,
+        mode: Literal["r", "w"] = "r",
+        pwd: bytes | None = None,
+    ) -> IO[bytes]:
+        def _update_crc(newdata: bytes) -> None:
             eof = ef._eof
             update_crc_orig(newdata)
             running_hash.update(newdata)
@@ -119,9 +140,9 @@ def _update_crc(newdata):
 
         return ef
 
-    def write_files(self, base_dir):
+    def write_files(self, base_dir: str):
         log.info(f"creating '{self.filename}' and adding '{base_dir}' to it")
-        deferred = []
+        deferred: list[tuple[str, str]] = []
         for root, dirnames, filenames in os.walk(base_dir):
             # Sort the directory names so that `os.walk` will walk them in a
             # defined order on the next iteration.
@@ -141,7 +162,12 @@ def write_files(self, base_dir):
         for path, arcname in deferred:
             self.write(path, arcname)
 
-    def write(self, filename, arcname=None, compress_type=None):
+    def write(
+        self,
+        filename: str,
+        arcname: str | None = None,
+        compress_type: int | None = None,
+    ) -> None:
         with open(filename, "rb") as f:
             st = os.fstat(f.fileno())
             data = f.read()
@@ -153,7 +179,12 @@ def write(self, filename, arcname=None, compress_type=None):
         zinfo.compress_type = compress_type or self.compression
         self.writestr(zinfo, data, compress_type)
 
-    def writestr(self, zinfo_or_arcname, data, compress_type=None):
+    def writestr(
+        self,
+        zinfo_or_arcname: str | ZipInfo,
+        data: SizedBuffer | str,
+        compress_type: int | None = None,
+    ):
         if isinstance(zinfo_or_arcname, str):
             zinfo_or_arcname = ZipInfo(
                 zinfo_or_arcname, date_time=get_zipinfo_datetime()

From 5585c1c1f603e04ad778fc72066fad45c7366233 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 19 Mar 2025 18:03:18 +0000
Subject: [PATCH 1541/1761] Add news fragment

---
 newsfragments/4869.feature.rst | 1 +
 1 file changed, 1 insertion(+)
 create mode 100644 newsfragments/4869.feature.rst

diff --git a/newsfragments/4869.feature.rst b/newsfragments/4869.feature.rst
new file mode 100644
index 0000000000..12b28ed41a
--- /dev/null
+++ b/newsfragments/4869.feature.rst
@@ -0,0 +1 @@
+Updated vendored copy of ``wheel`` to ``v0.45.1``.

From dee0a5e7e41b0eedc2bd4f754098140044748482 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 19 Mar 2025 18:12:30 +0000
Subject: [PATCH 1542/1761] Add news fragment for PEP 639 marking as 'breaking'

---
 newsfragments/4829.removal.rst | 6 ++++++
 1 file changed, 6 insertions(+)
 create mode 100644 newsfragments/4829.removal.rst

diff --git a/newsfragments/4829.removal.rst b/newsfragments/4829.removal.rst
new file mode 100644
index 0000000000..3d31a6da3f
--- /dev/null
+++ b/newsfragments/4829.removal.rst
@@ -0,0 +1,6 @@
+Added initial implementation of :pep:`639`.
+Users relying on pre-:pep:`639` implementation details
+(like precise license file paths inside ``dist-info`` directory)
+may need to adjust their code base to avoid problems.
+Deprecations and stronger validation were also introduced
+(see details in the **Features** section).

From f49d58939a249bf3571992e18a7c237eea27d5f7 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 19 Mar 2025 18:17:10 +0000
Subject: [PATCH 1543/1761] Update URL in warning

---
 setuptools/dist.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setuptools/dist.py b/setuptools/dist.py
index d457d5ebe7..320abb7f80 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -509,7 +509,7 @@ def _find_pattern(pattern: str, enforce_match: bool = True) -> list[str]:
                 f"Pattern {pattern!r} should be relative and must not start with '/'"
             )
         if re.match(r'^[\w\-\.\/\*\?\[\]]+$', pattern) is None:
-            pypa_guides = "specifications/pyproject-toml/#license-files"
+            pypa_guides = "specifications/glob-patterns/"
             SetuptoolsDeprecationWarning.emit(
                 "Please provide a valid glob pattern.",
                 "Pattern {pattern!r} contains invalid characters.",

From 5d58b454a5f720f9afa09b47fe15913d0bef8cc4 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 19 Mar 2025 18:18:09 +0000
Subject: [PATCH 1544/1761] =?UTF-8?q?Bump=20version:=2076.1.0=20=E2=86=92?=
 =?UTF-8?q?=2077.0.0?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg               |  2 +-
 NEWS.rst                       | 43 ++++++++++++++++++++++++++++++++++
 newsfragments/4706.feature.rst |  1 -
 newsfragments/4728.feature.rst |  1 -
 newsfragments/4734.misc.rst    |  1 -
 newsfragments/4829.removal.rst |  6 -----
 newsfragments/4830.feature.rst |  1 -
 newsfragments/4833.feature.rst |  2 --
 newsfragments/4837.feature.rst |  3 ---
 newsfragments/4838.feature.rst |  4 ----
 newsfragments/4840.feature.rst |  5 ----
 newsfragments/4869.feature.rst |  1 -
 pyproject.toml                 |  2 +-
 13 files changed, 45 insertions(+), 27 deletions(-)
 delete mode 100644 newsfragments/4706.feature.rst
 delete mode 100644 newsfragments/4728.feature.rst
 delete mode 100644 newsfragments/4734.misc.rst
 delete mode 100644 newsfragments/4829.removal.rst
 delete mode 100644 newsfragments/4830.feature.rst
 delete mode 100644 newsfragments/4833.feature.rst
 delete mode 100644 newsfragments/4837.feature.rst
 delete mode 100644 newsfragments/4838.feature.rst
 delete mode 100644 newsfragments/4840.feature.rst
 delete mode 100644 newsfragments/4869.feature.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 91f2c2aed5..b0f3f1ea63 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 76.1.0
+current_version = 77.0.0
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index 61f4e4d776..393dc48efe 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,46 @@
+v77.0.0
+=======
+
+Features
+--------
+
+- Added initial support for license expression (:pep:`PEP 639 <639#add-license-expression-field>`). -- by :user:`cdce8p` (#4706)
+- Store ``License-File``s in ``.dist-info/licenses`` subfolder and added support for recursive globs for ``license_files`` (:pep:`PEP 639 <639#add-license-expression-field>`). -- by :user:`cdce8p` (#4728)
+- Bump core metadata version to ``2.4``. -- by :user:`cdce8p` (#4830)
+- Added exception (or warning) when deprecated license classifiers are used,
+  according to `PEP 639 `_. (#4833)
+- Deprecated ``tools.setuptools.license-files`` in favor of ``project.license-files``
+  and added exception if ``project.license-files`` and ``tools.setuptools.license-files``
+  are used together. -- by :user:`cdce8p` (#4837)
+- Added simple validation for given glob patterns in ``license-files``:
+  a warning will be generated if no file is matched.
+  Invalid glob patterns can raise an exception.
+  -- thanks :user:`cdce8p` for contributions. (#4838)
+- Deprecated ``project.license`` as a TOML table in
+  ``pyproject.toml``. Users are expected to move towards using
+  ``project.license-files`` and/or SPDX expressions (as strings) in
+  ``pyproject.license``.
+  See :pep:`PEP 639 <639#deprecate-license-key-table-subkeys>`. (#4840)
+- Updated vendored copy of ``wheel`` to ``v0.45.1``. (#4869)
+
+
+Deprecations and Removals
+-------------------------
+
+- Added initial implementation of :pep:`639`.
+  Users relying on pre-:pep:`639` implementation details
+  (like precise license file paths inside ``dist-info`` directory)
+  may need to adjust their code base to avoid problems.
+  Deprecations and stronger validation were also introduced
+  (see details in the **Features** section). (#4829)
+
+
+Misc
+----
+
+- #4734
+
+
 v76.1.0
 =======
 
diff --git a/newsfragments/4706.feature.rst b/newsfragments/4706.feature.rst
deleted file mode 100644
index be8aea6456..0000000000
--- a/newsfragments/4706.feature.rst
+++ /dev/null
@@ -1 +0,0 @@
-Added initial support for license expression (:pep:`PEP 639 <639#add-license-expression-field>`). -- by :user:`cdce8p`
diff --git a/newsfragments/4728.feature.rst b/newsfragments/4728.feature.rst
deleted file mode 100644
index ea19b31a36..0000000000
--- a/newsfragments/4728.feature.rst
+++ /dev/null
@@ -1 +0,0 @@
-Store ``License-File``s in ``.dist-info/licenses`` subfolder and added support for recursive globs for ``license_files`` (:pep:`PEP 639 <639#add-license-expression-field>`). -- by :user:`cdce8p`
diff --git a/newsfragments/4734.misc.rst b/newsfragments/4734.misc.rst
deleted file mode 100644
index 3b3f2c94f3..0000000000
--- a/newsfragments/4734.misc.rst
+++ /dev/null
@@ -1 +0,0 @@
-Updated ``pyproject.toml`` validation via ``validate-pyproject`` v0.23.0.
diff --git a/newsfragments/4829.removal.rst b/newsfragments/4829.removal.rst
deleted file mode 100644
index 3d31a6da3f..0000000000
--- a/newsfragments/4829.removal.rst
+++ /dev/null
@@ -1,6 +0,0 @@
-Added initial implementation of :pep:`639`.
-Users relying on pre-:pep:`639` implementation details
-(like precise license file paths inside ``dist-info`` directory)
-may need to adjust their code base to avoid problems.
-Deprecations and stronger validation were also introduced
-(see details in the **Features** section).
diff --git a/newsfragments/4830.feature.rst b/newsfragments/4830.feature.rst
deleted file mode 100644
index f21d17515a..0000000000
--- a/newsfragments/4830.feature.rst
+++ /dev/null
@@ -1 +0,0 @@
-Bump core metadata version to ``2.4``. -- by :user:`cdce8p`
diff --git a/newsfragments/4833.feature.rst b/newsfragments/4833.feature.rst
deleted file mode 100644
index d8801becf7..0000000000
--- a/newsfragments/4833.feature.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Added exception (or warning) when deprecated license classifiers are used,
-according to `PEP 639 `_.
diff --git a/newsfragments/4837.feature.rst b/newsfragments/4837.feature.rst
deleted file mode 100644
index 4ad97b9513..0000000000
--- a/newsfragments/4837.feature.rst
+++ /dev/null
@@ -1,3 +0,0 @@
-Deprecated ``tools.setuptools.license-files`` in favor of ``project.license-files``
-and added exception if ``project.license-files`` and ``tools.setuptools.license-files``
-are used together. -- by :user:`cdce8p`
diff --git a/newsfragments/4838.feature.rst b/newsfragments/4838.feature.rst
deleted file mode 100644
index 31aa8ba43b..0000000000
--- a/newsfragments/4838.feature.rst
+++ /dev/null
@@ -1,4 +0,0 @@
-Added simple validation for given glob patterns in ``license-files``:
-a warning will be generated if no file is matched.
-Invalid glob patterns can raise an exception.
--- thanks :user:`cdce8p` for contributions.
diff --git a/newsfragments/4840.feature.rst b/newsfragments/4840.feature.rst
deleted file mode 100644
index a033fd2afb..0000000000
--- a/newsfragments/4840.feature.rst
+++ /dev/null
@@ -1,5 +0,0 @@
-Deprecated ``project.license`` as a TOML table in
-``pyproject.toml``. Users are expected to move towards using
-``project.license-files`` and/or SPDX expressions (as strings) in
-``pyproject.license``.
-See :pep:`PEP 639 <639#deprecate-license-key-table-subkeys>`.
diff --git a/newsfragments/4869.feature.rst b/newsfragments/4869.feature.rst
deleted file mode 100644
index 12b28ed41a..0000000000
--- a/newsfragments/4869.feature.rst
+++ /dev/null
@@ -1 +0,0 @@
-Updated vendored copy of ``wheel`` to ``v0.45.1``.
diff --git a/pyproject.toml b/pyproject.toml
index f70a6350cc..7f2ed85cd1 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "76.1.0"
+version = "77.0.0"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From 7db26a1a5148ef57c8471a410f57258cad11b336 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 19 Mar 2025 18:39:21 +0000
Subject: [PATCH 1545/1761] Manually fix news fragment entries

- Fix weird `../NEWS (links).rst:9: WARNING: Inline literal start-string without end-string.` error in v77.0.0 CI.
- Reorder changlog entries for v77.0.0.
  See rationale in https://github.com/jaraco/skeleton/issues/169
---
 NEWS.rst | 43 +++++++++++++++++++++----------------------
 1 file changed, 21 insertions(+), 22 deletions(-)

diff --git a/NEWS.rst b/NEWS.rst
index 393dc48efe..d69768c1c5 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -4,35 +4,34 @@ v77.0.0
 Features
 --------
 
-- Added initial support for license expression (:pep:`PEP 639 <639#add-license-expression-field>`). -- by :user:`cdce8p` (#4706)
-- Store ``License-File``s in ``.dist-info/licenses`` subfolder and added support for recursive globs for ``license_files`` (:pep:`PEP 639 <639#add-license-expression-field>`). -- by :user:`cdce8p` (#4728)
-- Bump core metadata version to ``2.4``. -- by :user:`cdce8p` (#4830)
-- Added exception (or warning) when deprecated license classifiers are used,
-  according to `PEP 639 `_. (#4833)
-- Deprecated ``tools.setuptools.license-files`` in favor of ``project.license-files``
-  and added exception if ``project.license-files`` and ``tools.setuptools.license-files``
-  are used together. -- by :user:`cdce8p` (#4837)
-- Added simple validation for given glob patterns in ``license-files``:
-  a warning will be generated if no file is matched.
-  Invalid glob patterns can raise an exception.
-  -- thanks :user:`cdce8p` for contributions. (#4838)
-- Deprecated ``project.license`` as a TOML table in
-  ``pyproject.toml``. Users are expected to move towards using
-  ``project.license-files`` and/or SPDX expressions (as strings) in
-  ``pyproject.license``.
-  See :pep:`PEP 639 <639#deprecate-license-key-table-subkeys>`. (#4840)
-- Updated vendored copy of ``wheel`` to ``v0.45.1``. (#4869)
+- Added initial support for license expression (PEP :pep:`639 <639#add-license-expression-field>`). -- by :user:`cdce8p` (#4706)
+- Store ``License-File``\s in ``.dist-info/licenses`` subfolder and added support for recursive globs for ``license_files`` (PEP :pep:`639 <639#add-license-expression-field>`). -- by :user:`cdce8p` (#4728)
+- Bump core metadata version to ``2.4``\. -- by :user:`cdce8p` (#4830)
+- Updated vendored copy of ``wheel`` to ``v0.45.1``\. (#4869)
 
 
 Deprecations and Removals
 -------------------------
 
 - Added initial implementation of :pep:`639`.
-  Users relying on pre-:pep:`639` implementation details
+  Users relying on pre- :pep:`639` implementation details
   (like precise license file paths inside ``dist-info`` directory)
   may need to adjust their code base to avoid problems.
-  Deprecations and stronger validation were also introduced
-  (see details in the **Features** section). (#4829)
+  Deprecations and stronger validation were also introduced (#4829).
+- Added exception (or warning) when deprecated license classifiers are used,
+  according to PEP :pep:`639 <639#deprecate-license-classifiers>`. (#4833)
+- Deprecated ``tools.setuptools.license-files`` in favor of ``project.license-files``
+  and added exception if ``project.license-files`` and ``tools.setuptools.license-files``
+  are used together. -- by :user:`cdce8p` (#4837)
+- Deprecated ``project.license`` as a TOML table in
+  ``pyproject.toml``\. Users are expected to move towards using
+  ``project.license-files`` and/or SPDX expressions (as strings) in
+  ``pyproject.license``\.
+  See PEP :pep:`639 <639#deprecate-license-key-table-subkeys>`. (#4840)
+- Added simple validation for given glob patterns in ``license-files``\:
+  a warning will be generated if no file is matched.
+  Invalid glob patterns can raise an exception.
+  -- thanks :user:`cdce8p` for contributions. (#4838)
 
 
 Misc
@@ -1918,7 +1917,7 @@ Changes
 
   .. warning::
      Please note that future releases of setuptools will halt the build process
-     if a ``pyproject.toml`` file that does not match doc:`the PyPA Specification
+     if a ``pyproject.toml`` file that does not match :doc:`the PyPA Specification
      ` is given.
 * #3215: Updated ``pyproject.toml`` validation, as generated by ``validate-pyproject==0.6.1``.
 * #3218: Prevented builds from erroring if the project specifies metadata via

From 76531493d84c27461b73589e6a4e54c56d0cd6fb Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 19 Mar 2025 19:17:56 +0000
Subject: [PATCH 1546/1761] Add news fragments

---
 newsfragments/4891.doc.rst | 1 +
 1 file changed, 1 insertion(+)
 create mode 100644 newsfragments/4891.doc.rst

diff --git a/newsfragments/4891.doc.rst b/newsfragments/4891.doc.rst
new file mode 100644
index 0000000000..b3c1c1ee65
--- /dev/null
+++ b/newsfragments/4891.doc.rst
@@ -0,0 +1 @@
+Manually fix news fragment entries causing CI to crash when building docs.

From ce2e283acfef4d87eee63b5f745f81fc0904342c Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 19 Mar 2025 19:28:25 +0000
Subject: [PATCH 1547/1761] Change news fragment name to imply patch version
 bump

---
 newsfragments/{4891.doc.rst => 4891.bugfix.rst} | 0
 1 file changed, 0 insertions(+), 0 deletions(-)
 rename newsfragments/{4891.doc.rst => 4891.bugfix.rst} (100%)

diff --git a/newsfragments/4891.doc.rst b/newsfragments/4891.bugfix.rst
similarity index 100%
rename from newsfragments/4891.doc.rst
rename to newsfragments/4891.bugfix.rst

From f57746186b1476fac7701490f3e8c23bd0eea491 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Wed, 19 Mar 2025 19:30:17 +0000
Subject: [PATCH 1548/1761] =?UTF-8?q?Bump=20version:=2077.0.0=20=E2=86=92?=
 =?UTF-8?q?=2077.0.1?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg              | 2 +-
 NEWS.rst                      | 9 +++++++++
 newsfragments/4891.bugfix.rst | 1 -
 pyproject.toml                | 2 +-
 4 files changed, 11 insertions(+), 3 deletions(-)
 delete mode 100644 newsfragments/4891.bugfix.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index b0f3f1ea63..5354c2d385 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 77.0.0
+current_version = 77.0.1
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index d69768c1c5..08fa321d2e 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,12 @@
+v77.1.1
+=======
+
+Bugfixes
+--------
+
+- Manually fix news fragment entries causing CI to crash when building docs. (#4891)
+
+
 v77.0.0
 =======
 
diff --git a/newsfragments/4891.bugfix.rst b/newsfragments/4891.bugfix.rst
deleted file mode 100644
index b3c1c1ee65..0000000000
--- a/newsfragments/4891.bugfix.rst
+++ /dev/null
@@ -1 +0,0 @@
-Manually fix news fragment entries causing CI to crash when building docs.
diff --git a/pyproject.toml b/pyproject.toml
index 7f2ed85cd1..3de9e8e1e5 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "77.0.0"
+version = "77.0.1"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From be45d76938696a860ee81b06f357bdd4f92a0e7b Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Tue, 11 Mar 2025 19:38:41 -0400
Subject: [PATCH 1549/1761] Bugfix: MSVC compiler no longer sets
 shared_lib_format

---
 distutils/compilers/C/msvc.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/distutils/compilers/C/msvc.py b/distutils/compilers/C/msvc.py
index 795a47fdbe..6db062a9e7 100644
--- a/distutils/compilers/C/msvc.py
+++ b/distutils/compilers/C/msvc.py
@@ -257,7 +257,7 @@ class Compiler(base.Compiler):
     obj_extension = '.obj'
     static_lib_extension = '.lib'
     shared_lib_extension = '.dll'
-    static_lib_format = static_lib_format = '%s%s'
+    static_lib_format = shared_lib_format = '%s%s'
     exe_extension = '.exe'
 
     def __init__(self, verbose=False, dry_run=False, force=False) -> None:

From 62d4512f50158542eb605f84cd296643869433de Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Wed, 19 Mar 2025 18:52:33 -0400
Subject: [PATCH 1550/1761] Add news fragment.

---
 newsfragments/4885.bugfix.rst | 1 +
 1 file changed, 1 insertion(+)
 create mode 100644 newsfragments/4885.bugfix.rst

diff --git a/newsfragments/4885.bugfix.rst b/newsfragments/4885.bugfix.rst
new file mode 100644
index 0000000000..bbde0423ed
--- /dev/null
+++ b/newsfragments/4885.bugfix.rst
@@ -0,0 +1 @@
+Fixed copy pasta in msvc.shared_lib_format.

From 89a42e319b81a0326306c7d3928ae4d100862156 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Wed, 19 Mar 2025 18:53:07 -0400
Subject: [PATCH 1551/1761] Remove news fragments, no longer needed.

---
 newsfragments/4871.bugfix.rst | 1 -
 newsfragments/4876.bugfix.rst | 1 -
 2 files changed, 2 deletions(-)
 delete mode 100644 newsfragments/4871.bugfix.rst
 delete mode 100644 newsfragments/4876.bugfix.rst

diff --git a/newsfragments/4871.bugfix.rst b/newsfragments/4871.bugfix.rst
deleted file mode 100644
index a4d8b8bab1..0000000000
--- a/newsfragments/4871.bugfix.rst
+++ /dev/null
@@ -1 +0,0 @@
-Restored implicit distutils.ccompiler import for g-ir-scanner.
diff --git a/newsfragments/4876.bugfix.rst b/newsfragments/4876.bugfix.rst
deleted file mode 100644
index c9cf58f8c1..0000000000
--- a/newsfragments/4876.bugfix.rst
+++ /dev/null
@@ -1 +0,0 @@
-Restore `distutils.ccompiler.compiler_class` -- by :user:`Avasam`

From 742f566b53f0ed48027defc0e42ef569b9824679 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Wed, 19 Mar 2025 18:57:02 -0400
Subject: [PATCH 1552/1761] Fixed __all__.

---
 distutils/ccompiler.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/distutils/ccompiler.py b/distutils/ccompiler.py
index 9cf0f4ce54..33efc25183 100644
--- a/distutils/ccompiler.py
+++ b/distutils/ccompiler.py
@@ -19,7 +19,7 @@
     "get_default_compiler",
     "new_compiler",
     "show_compilers",
-    _default_compilers,
+    "_default_compilers",
 ]
 
 

From 55ccc6a11f1a3aa11b292bacee14abc6f13d3330 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Wed, 19 Mar 2025 19:04:07 -0400
Subject: [PATCH 1553/1761] Add a regression test.

---
 distutils/compilers/C/base.py | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/distutils/compilers/C/base.py b/distutils/compilers/C/base.py
index b03455a6c6..d46410186a 100644
--- a/distutils/compilers/C/base.py
+++ b/distutils/compilers/C/base.py
@@ -236,6 +236,9 @@ def _check_macro_definition(self, defn):
     def _is_valid_macro(name, value=None):
         """
         A valid macro is a ``name : str`` and a ``value : str | None``.
+
+        >>> Compiler._is_valid_macro('foo', None)
+        True
         """
         return isinstance(name, str) and isinstance(value, (str, type(None)))
 

From 8c659145d699bcca201eb0f308471da199f3d616 Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Wed, 19 Mar 2025 19:52:59 -0400
Subject: [PATCH 1554/1761] Remove Python 3.7 code paths

---
 distutils/command/install.py | 12 ++----------
 distutils/sysconfig.py       | 10 ----------
 2 files changed, 2 insertions(+), 20 deletions(-)

diff --git a/distutils/command/install.py b/distutils/command/install.py
index b09048cf5b..54bbdf6ecd 100644
--- a/distutils/command/install.py
+++ b/distutils/command/install.py
@@ -145,7 +145,7 @@ def _resolve_scheme(name):
     try:
         resolved = sysconfig.get_preferred_scheme(key)
     except Exception:
-        resolved = fw.scheme(_pypy_hack(name))
+        resolved = fw.scheme(name)
     return resolved
 
 
@@ -162,7 +162,7 @@ def _inject_headers(name, scheme):
     """
     # Bypass the preferred scheme, which may not
     # have defined headers.
-    fallback = _load_scheme(_pypy_hack(name))
+    fallback = _load_scheme(name)
     scheme.setdefault('headers', fallback['headers'])
     return scheme
 
@@ -172,14 +172,6 @@ def _scheme_attrs(scheme):
     return {f'install_{key}': scheme[key] for key in SCHEME_KEYS}
 
 
-def _pypy_hack(name):
-    PY37 = sys.version_info < (3, 8)
-    old_pypy = hasattr(sys, 'pypy_version_info') and PY37
-    prefix = not name.endswith(('_user', '_home'))
-    pypy_name = 'pypy' + '_nt' * (os.name == 'nt')
-    return pypy_name if old_pypy and prefix else name
-
-
 class install(Command):
     description = "install everything from build directory"
 
diff --git a/distutils/sysconfig.py b/distutils/sysconfig.py
index e5facaecd3..7ddc869ab5 100644
--- a/distutils/sysconfig.py
+++ b/distutils/sysconfig.py
@@ -156,8 +156,6 @@ def _extant(path):
 
 
 def _get_python_inc_posix(prefix, spec_prefix, plat_specific):
-    if IS_PYPY and sys.version_info < (3, 8):
-        return os.path.join(prefix, 'include')
     return (
         _get_python_inc_posix_python(plat_specific)
         or _extant(_get_python_inc_from_config(plat_specific, spec_prefix))
@@ -246,14 +244,6 @@ def get_python_lib(
     sys.base_exec_prefix -- i.e., ignore 'plat_specific'.
     """
 
-    if IS_PYPY and sys.version_info < (3, 8):
-        # PyPy-specific schema
-        if prefix is None:
-            prefix = PREFIX
-        if standard_lib:
-            return os.path.join(prefix, "lib-python", sys.version_info.major)
-        return os.path.join(prefix, 'site-packages')
-
     early_prefix = prefix
 
     if prefix is None:

From 205a3ecc960f709d0811aa08f097b9537d36c107 Mon Sep 17 00:00:00 2001
From: maniacdc 
Date: Wed, 19 Mar 2025 19:54:41 -0400
Subject: [PATCH 1555/1761] * fixed import order * added documentation to
 imports as to why they are there, and when they can be removed.

---
 distutils/ccompiler.py | 7 +++++--
 1 file changed, 5 insertions(+), 2 deletions(-)

diff --git a/distutils/ccompiler.py b/distutils/ccompiler.py
index 33efc25183..6f5bbde643 100644
--- a/distutils/ccompiler.py
+++ b/distutils/ccompiler.py
@@ -1,25 +1,28 @@
 from .compilers.C import base
 from .compilers.C.base import (
+    # `_default_compilers` is needed by numpy.distutils, which is supported until
+    #  Python 3.11 is deprecated. This import & export can be removed when
+    #  Python 3.11 is no longer supported by distutils.
+    _default_compilers,
     compiler_class,
     gen_lib_options,
     gen_preprocess_options,
     get_default_compiler,
     new_compiler,
     show_compilers,
-    _default_compilers,
 )
 from .compilers.C.errors import CompileError, LinkError
 
 __all__ = [
     "CompileError",
     "LinkError",
+    "_default_compilers",
     "compiler_class",
     "gen_lib_options",
     "gen_preprocess_options",
     "get_default_compiler",
     "new_compiler",
     "show_compilers",
-    "_default_compilers",
 ]
 
 

From 3a5038b7f4c98f118e6a67b458b40c26326ce1af Mon Sep 17 00:00:00 2001
From: Avasam 
Date: Wed, 19 Mar 2025 20:19:29 -0400
Subject: [PATCH 1556/1761] Re-enable mypy with simple non-runtime fixes

---
 distutils/compilers/C/base.py     |  8 +++---
 distutils/compilers/C/unix.py     |  2 +-
 distutils/dist.py                 |  3 ++-
 distutils/tests/test_build_ext.py |  2 +-
 mypy.ini                          | 41 +++++++++++++++++++++++++++++++
 pyproject.toml                    |  6 +----
 6 files changed, 50 insertions(+), 12 deletions(-)

diff --git a/distutils/compilers/C/base.py b/distutils/compilers/C/base.py
index d46410186a..5efd2a39d6 100644
--- a/distutils/compilers/C/base.py
+++ b/distutils/compilers/C/base.py
@@ -120,12 +120,12 @@ class Compiler:
     }
     language_order: ClassVar[list[str]] = ["c++", "objc", "c"]
 
-    include_dirs = []
+    include_dirs: list[str] = []
     """
     include dirs specific to this compiler class
     """
 
-    library_dirs = []
+    library_dirs: list[str] = []
     """
     library dirs specific to this compiler class
     """
@@ -148,14 +148,14 @@ def __init__(
         self.macros: list[_Macro] = []
 
         # 'include_dirs': a list of directories to search for include files
-        self.include_dirs: list[str] = []
+        self.include_dirs = []
 
         # 'libraries': a list of libraries to include in any link
         # (library names, not filenames: eg. "foo" not "libfoo.a")
         self.libraries: list[str] = []
 
         # 'library_dirs': a list of directories to search for libraries
-        self.library_dirs: list[str] = []
+        self.library_dirs = []
 
         # 'runtime_library_dirs': a list of directories to search for
         # shared libraries/objects at runtime
diff --git a/distutils/compilers/C/unix.py b/distutils/compilers/C/unix.py
index 77ff4edc42..e8a53d452f 100644
--- a/distutils/compilers/C/unix.py
+++ b/distutils/compilers/C/unix.py
@@ -323,7 +323,7 @@ def _is_gcc(self):
         compiler = os.path.basename(shlex.split(cc_var)[0])
         return "gcc" in compiler or "g++" in compiler
 
-    def runtime_library_dir_option(self, dir: str) -> str | list[str]:
+    def runtime_library_dir_option(self, dir: str) -> str | list[str]:  # type: ignore[override] # Fixed in pypa/distutils#339
         # XXX Hackish, at the very least.  See Python bug #445902:
         # https://bugs.python.org/issue445902
         # Linkers on different platforms need different options to
diff --git a/distutils/dist.py b/distutils/dist.py
index 69d42016a1..37b788df92 100644
--- a/distutils/dist.py
+++ b/distutils/dist.py
@@ -183,7 +183,7 @@ def __init__(self, attrs: MutableMapping[str, Any] | None = None) -> None:  # no
         # can 1) quickly figure out which class to instantiate when
         # we need to create a new command object, and 2) have a way
         # for the setup script to override command classes
-        self.cmdclass = {}
+        self.cmdclass: dict[str, type[Command]] = {}
 
         # 'command_packages' is a list of packages in which commands
         # are searched for.  The factory for command 'foo' is expected
@@ -1168,6 +1168,7 @@ def _read_field(name: str) -> str | None:
             value = msg[name]
             if value and value != "UNKNOWN":
                 return value
+            return None
 
         def _read_list(name):
             values = msg.get_all(name, None)
diff --git a/distutils/tests/test_build_ext.py b/distutils/tests/test_build_ext.py
index 24f7125b61..dab0507f3d 100644
--- a/distutils/tests/test_build_ext.py
+++ b/distutils/tests/test_build_ext.py
@@ -146,7 +146,7 @@ def test_build_ext(self, copy_so):
 
     @staticmethod
     def _test_xx(copy_so):
-        import xx
+        import xx  # type: ignore[import-not-found] # Module generated for tests
 
         for attr in ('error', 'foo', 'new', 'roj'):
             assert hasattr(xx, attr)
diff --git a/mypy.ini b/mypy.ini
index efcb8cbc20..2a8826b3df 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -13,3 +13,44 @@ explicit_package_bases = True
 disable_error_code =
 	# Disable due to many false positives
 	overload-overlap,
+
+# local
+
+	# TODO: Resolve and re-enable these gradually
+	operator,
+	attr-defined,
+	arg-type,
+	assignment,
+	call-overload,
+	return-value,
+	index,
+	type-var,
+	func-returns-value,
+	union-attr,
+	str-bytes-safe,
+	misc,
+	has-type,
+
+# Is only imported in a test and doesn't seem relevant.
+# TODO: Should we add types-pygments or remove from the test?
+[mypy-pygments.*]
+ignore_missing_imports = True
+
+# stdlib's test module is not typed on typeshed
+[mypy-test.*]
+ignore_missing_imports = True
+
+# https://github.com/jaraco/jaraco.envs/issues/7
+# https://github.com/jaraco/jaraco.envs/pull/8
+[mypy-jaraco.envs.*]
+ignore_missing_imports = True
+
+# https://github.com/jaraco/jaraco.path/issues/2
+# https://github.com/jaraco/jaraco.path/pull/7
+[mypy-jaraco.path.*]
+ignore_missing_imports = True
+
+# https://github.com/jaraco/jaraco.text/issues/17
+# https://github.com/jaraco/jaraco.text/pull/23
+[mypy-jaraco.text.*]
+ignore_missing_imports = True
diff --git a/pyproject.toml b/pyproject.toml
index 9cbb3590b0..cd1ac6fea3 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -77,12 +77,8 @@ type = [
 	"pytest-mypy",
 
 	# local
+	"types-docutils",
 ]
 
 
 [tool.setuptools_scm]
-
-
-[tool.pytest-enabler.mypy]
-# Disabled due to jaraco/skeleton#143
-# Disabled as distutils isn't ready yet

From 1c29b0e45d7353499ab1f2af2c2fa92aeba08235 Mon Sep 17 00:00:00 2001
From: Thanos <111999343+Sachaa-Thanasius@users.noreply.github.com>
Date: Wed, 19 Mar 2025 20:30:33 -0400
Subject: [PATCH 1557/1761] Replace usage of `jaraco.collections.DictStack`
 with `collections.ChainMap`

---
 distutils/command/install.py | 13 ++++++-------
 1 file changed, 6 insertions(+), 7 deletions(-)

diff --git a/distutils/command/install.py b/distutils/command/install.py
index b09048cf5b..36d99dce8c 100644
--- a/distutils/command/install.py
+++ b/distutils/command/install.py
@@ -4,6 +4,7 @@
 
 from __future__ import annotations
 
+import collections
 import contextlib
 import itertools
 import os
@@ -13,8 +14,6 @@
 from site import USER_BASE, USER_SITE
 from typing import ClassVar
 
-import jaraco.collections
-
 from ..core import Command
 from ..debug import DEBUG
 from ..errors import DistutilsOptionError, DistutilsPlatformError
@@ -432,12 +431,12 @@ def finalize_options(self) -> None:  # noqa: C901
             local_vars['userbase'] = self.install_userbase
             local_vars['usersite'] = self.install_usersite
 
-        self.config_vars = jaraco.collections.DictStack([
-            fw.vars(),
-            compat_vars,
-            sysconfig.get_config_vars(),
+        self.config_vars = collections.ChainMap(
             local_vars,
-        ])
+            sysconfig.get_config_vars(),
+            compat_vars,
+            fw.vars(),
+        )
 
         self.expand_basedirs()
 

From 74d1137f6418053dc87c24f2b27a07fa16c1cc31 Mon Sep 17 00:00:00 2001
From: Thanos <111999343+Sachaa-Thanasius@users.noreply.github.com>
Date: Wed, 19 Mar 2025 20:30:57 -0400
Subject: [PATCH 1558/1761] Remove `jaraco.collections` from the dependencies

---
 pyproject.toml | 1 -
 1 file changed, 1 deletion(-)

diff --git a/pyproject.toml b/pyproject.toml
index 9cbb3590b0..dd0eb01d0e 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -22,7 +22,6 @@ dependencies = [
 	"packaging",
 	"jaraco.functools >= 4",
 	"more_itertools",
-	"jaraco.collections",
 ]
 dynamic = ["version"]
 

From e2a30b5f0576483ef51dcb40d6a6f67bafec4572 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Wed, 19 Mar 2025 21:37:12 -0400
Subject: [PATCH 1559/1761] Restore quotes.

---
 distutils/ccompiler.py | 18 +++++++++---------
 1 file changed, 9 insertions(+), 9 deletions(-)

diff --git a/distutils/ccompiler.py b/distutils/ccompiler.py
index 6f5bbde643..3aeacdb9a9 100644
--- a/distutils/ccompiler.py
+++ b/distutils/ccompiler.py
@@ -14,15 +14,15 @@
 from .compilers.C.errors import CompileError, LinkError
 
 __all__ = [
-    "CompileError",
-    "LinkError",
-    "_default_compilers",
-    "compiler_class",
-    "gen_lib_options",
-    "gen_preprocess_options",
-    "get_default_compiler",
-    "new_compiler",
-    "show_compilers",
+    'CompileError',
+    'LinkError',
+    '_default_compilers',
+    'compiler_class',
+    'gen_lib_options',
+    'gen_preprocess_options',
+    'get_default_compiler',
+    'new_compiler',
+    'show_compilers',
 ]
 
 

From 731a65edc6f053cfb8cc3a25402f41dff7e4a624 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Wed, 19 Mar 2025 21:37:45 -0400
Subject: [PATCH 1560/1761] Rename newsfragment.

---
 newsfragments/{4876.bugfix-2.rst => 4876.bugfix.2.rst} | 0
 1 file changed, 0 insertions(+), 0 deletions(-)
 rename newsfragments/{4876.bugfix-2.rst => 4876.bugfix.2.rst} (100%)

diff --git a/newsfragments/4876.bugfix-2.rst b/newsfragments/4876.bugfix.2.rst
similarity index 100%
rename from newsfragments/4876.bugfix-2.rst
rename to newsfragments/4876.bugfix.2.rst

From b48a3b8bda04a392603362e1beb7c0b08f935fbd Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Wed, 19 Mar 2025 21:45:24 -0400
Subject: [PATCH 1561/1761] Move numpy compatibility imports to the compat
 package. Remove them from __all__, as that's not needed for compatibility.

---
 distutils/ccompiler.py    | 11 ++++-------
 distutils/compat/numpy.py |  2 ++
 2 files changed, 6 insertions(+), 7 deletions(-)
 create mode 100644 distutils/compat/numpy.py

diff --git a/distutils/ccompiler.py b/distutils/ccompiler.py
index 3aeacdb9a9..58bc6a55e2 100644
--- a/distutils/ccompiler.py
+++ b/distutils/ccompiler.py
@@ -1,10 +1,9 @@
-from .compilers.C import base
-from .compilers.C.base import (
-    # `_default_compilers` is needed by numpy.distutils, which is supported until
-    #  Python 3.11 is deprecated. This import & export can be removed when
-    #  Python 3.11 is no longer supported by distutils.
+from .compat.numpy import (  # noqa: F401
     _default_compilers,
     compiler_class,
+)
+from .compilers.C import base
+from .compilers.C.base import (
     gen_lib_options,
     gen_preprocess_options,
     get_default_compiler,
@@ -16,8 +15,6 @@
 __all__ = [
     'CompileError',
     'LinkError',
-    '_default_compilers',
-    'compiler_class',
     'gen_lib_options',
     'gen_preprocess_options',
     'get_default_compiler',
diff --git a/distutils/compat/numpy.py b/distutils/compat/numpy.py
new file mode 100644
index 0000000000..73eca7acb1
--- /dev/null
+++ b/distutils/compat/numpy.py
@@ -0,0 +1,2 @@
+# required for older numpy versions on Pythons prior to 3.12; see pypa/setuptools#4876
+from ..compilers.C.base import _default_compilers, compiler_class  # noqa: F401

From 0921b0361e60a8380dc7dcb163d8d4b325677fb6 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Wed, 19 Mar 2025 21:54:47 -0400
Subject: [PATCH 1562/1761] =?UTF-8?q?Bump=20version:=2077.0.1=20=E2=86=92?=
 =?UTF-8?q?=2077.0.2?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg                |  2 +-
 NEWS.rst                        | 10 ++++++++++
 newsfragments/4876.bugfix.2.rst |  1 -
 newsfragments/4885.bugfix.rst   |  1 -
 pyproject.toml                  |  2 +-
 5 files changed, 12 insertions(+), 4 deletions(-)
 delete mode 100644 newsfragments/4876.bugfix.2.rst
 delete mode 100644 newsfragments/4885.bugfix.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 5354c2d385..dd09ada207 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 77.0.1
+current_version = 77.0.2
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index 08fa321d2e..aee4383de2 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,13 @@
+v77.0.2
+=======
+
+Bugfixes
+--------
+
+- Restore `distutils.ccompiler._default_compilers` -- by :user:`ManiacDC` (#4876)
+- Fixed copy pasta in msvc.shared_lib_format. (#4885)
+
+
 v77.1.1
 =======
 
diff --git a/newsfragments/4876.bugfix.2.rst b/newsfragments/4876.bugfix.2.rst
deleted file mode 100644
index 8350c5a736..0000000000
--- a/newsfragments/4876.bugfix.2.rst
+++ /dev/null
@@ -1 +0,0 @@
-Restore `distutils.ccompiler._default_compilers` -- by :user:`ManiacDC`
diff --git a/newsfragments/4885.bugfix.rst b/newsfragments/4885.bugfix.rst
deleted file mode 100644
index bbde0423ed..0000000000
--- a/newsfragments/4885.bugfix.rst
+++ /dev/null
@@ -1 +0,0 @@
-Fixed copy pasta in msvc.shared_lib_format.
diff --git a/pyproject.toml b/pyproject.toml
index 1a605f8586..20e6ee69c9 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "77.0.1"
+version = "77.0.2"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From c3a84dcc31f1ad4d770e2dd2c0404beaa3b7cf2d Mon Sep 17 00:00:00 2001
From: Hang Lei 
Date: Thu, 20 Mar 2025 15:39:23 +0800
Subject: [PATCH 1563/1761] Fix typo

---
 NEWS.rst | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/NEWS.rst b/NEWS.rst
index aee4383de2..e6ce4362bf 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -8,7 +8,7 @@ Bugfixes
 - Fixed copy pasta in msvc.shared_lib_format. (#4885)
 
 
-v77.1.1
+v77.0.1
 =======
 
 Bugfixes

From dd0c99467a96506f31d25ce98cba16db084bfa57 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 20 Mar 2025 11:30:28 +0000
Subject: [PATCH 1564/1761] Convert error for license files into deprecation
 warning

---
 setuptools/dist.py | 16 +++++++++++++---
 1 file changed, 13 insertions(+), 3 deletions(-)

diff --git a/setuptools/dist.py b/setuptools/dist.py
index 320abb7f80..c912776f22 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -496,20 +496,30 @@ def _find_pattern(pattern: str, enforce_match: bool = True) -> list[str]:
         >>> Distribution._find_pattern("../LICENSE.MIT")
         Traceback (most recent call last):
         ...
-        setuptools.errors.InvalidConfigError: ...Pattern '../LICENSE.MIT' cannot contain '..'
+        setuptools.warnings.SetuptoolsDeprecationWarning: ...Pattern '../LICENSE.MIT' cannot contain '..'
         >>> Distribution._find_pattern("LICEN{CSE*")
         Traceback (most recent call last):
         ...
         setuptools.warnings.SetuptoolsDeprecationWarning: ...Pattern 'LICEN{CSE*' contains invalid characters...
         """
+        pypa_guides = "specifications/glob-patterns/"
         if ".." in pattern:
-            raise InvalidConfigError(f"Pattern {pattern!r} cannot contain '..'")
+            SetuptoolsDeprecationWarning.emit(
+                f"Pattern {pattern!r} cannot contain '..'",
+                """
+                According to the new PyPA standards, this glob pattern is invalid.
+                Please ensure the files specified are contained by the root
+                of the Python package (normally marked by `pyproject.toml`).
+                """,
+                see_url=f"https://packaging.python.org/en/latest/{pypa_guides}",
+                due_date=(2026, 2, 20),  # Introduced in 2025-03-20
+                # Replace with InvalidConfigError after deprecation
+            )
         if pattern.startswith((os.sep, "/")) or ":\\" in pattern:
             raise InvalidConfigError(
                 f"Pattern {pattern!r} should be relative and must not start with '/'"
             )
         if re.match(r'^[\w\-\.\/\*\?\[\]]+$', pattern) is None:
-            pypa_guides = "specifications/glob-patterns/"
             SetuptoolsDeprecationWarning.emit(
                 "Please provide a valid glob pattern.",
                 "Pattern {pattern!r} contains invalid characters.",

From 4929334be0e50ab5f18b3836ad1f7bf8063f2d1f Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 20 Mar 2025 11:31:06 +0000
Subject: [PATCH 1565/1761] Avoid '../' when writing license paths to
 wheel/metadata

---
 setuptools/_core_metadata.py      | 11 ++++++++++-
 setuptools/command/bdist_wheel.py |  4 +++-
 2 files changed, 13 insertions(+), 2 deletions(-)

diff --git a/setuptools/_core_metadata.py b/setuptools/_core_metadata.py
index 975e9ceaa1..814aaab0d3 100644
--- a/setuptools/_core_metadata.py
+++ b/setuptools/_core_metadata.py
@@ -207,7 +207,8 @@ def write_field(key, value):
     if self.long_description_content_type:
         write_field('Description-Content-Type', self.long_description_content_type)
 
-    self._write_list(file, 'License-File', self.license_files or [])
+    safe_license_files = map(_safe_license_file, self.license_files or [])
+    self._write_list(file, 'License-File', safe_license_files)
     _write_requirements(self, file)
 
     for field, attr in _POSSIBLE_DYNAMIC_FIELDS.items():
@@ -293,6 +294,14 @@ def _distribution_fullname(name: str, version: str) -> str:
     )
 
 
+def _safe_license_file(file):
+    # XXX: Do we need this after the deprecation discussed in #4892??
+    normalized = os.path.normpath(file).replace(os.sep, "/")
+    if "../" in normalized:
+        return os.path.basename(normalized)  # Temporarily restore pre PEP639 behaviour
+    return normalized
+
+
 _POSSIBLE_DYNAMIC_FIELDS = {
     # Core Metadata Field x related Distribution attribute
     "author": "author",
diff --git a/setuptools/command/bdist_wheel.py b/setuptools/command/bdist_wheel.py
index 86360b731c..1e3f637bcc 100644
--- a/setuptools/command/bdist_wheel.py
+++ b/setuptools/command/bdist_wheel.py
@@ -23,6 +23,7 @@
 from wheel.wheelfile import WheelFile
 
 from .. import Command, __version__, _shutil
+from .._core_metadata import _safe_license_file
 from .._normalization import safer_name
 from ..warnings import SetuptoolsDeprecationWarning
 from .egg_info import egg_info as egg_info_cls
@@ -582,7 +583,8 @@ def adios(p: str) -> None:
 
         licenses_folder_path = os.path.join(distinfo_path, "licenses")
         for license_path in self.license_paths:
-            dist_info_license_path = os.path.join(licenses_folder_path, license_path)
+            safe_path = _safe_license_file(license_path)
+            dist_info_license_path = os.path.join(licenses_folder_path, safe_path)
             os.makedirs(os.path.dirname(dist_info_license_path), exist_ok=True)
             shutil.copy(license_path, dist_info_license_path)
 

From 4818104e4f69261086d29ebfe8c6d3d8bdbd91c5 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 20 Mar 2025 11:33:02 +0000
Subject: [PATCH 1566/1761] Add tests for deprecated license paths

---
 setuptools/tests/test_bdist_wheel.py | 45 ++++++++++++++++++++++++++++
 1 file changed, 45 insertions(+)

diff --git a/setuptools/tests/test_bdist_wheel.py b/setuptools/tests/test_bdist_wheel.py
index fefadf143c..2ab4e9cfc6 100644
--- a/setuptools/tests/test_bdist_wheel.py
+++ b/setuptools/tests/test_bdist_wheel.py
@@ -661,3 +661,48 @@ def test_dist_info_provided(dummy_dist, monkeypatch, tmp_path):
     assert expected - files_found == set()
     # Make sure there is no accidental egg-info bleeding into the wheel.
     assert not [path for path in files_found if 'egg-info' in str(path)]
+
+
+def test_allow_grace_period_parent_directory_license(monkeypatch, tmp_path):
+    # Motivation: https://github.com/pypa/setuptools/issues/4892
+    # TODO: Remove this test after deprecation period is over
+    files = {
+        "LICENSE.txt": "parent license",  # <---- the license files are outside
+        "NOTICE.txt": "parent notice",
+        "python": {
+            "pyproject.toml": cleandoc(
+                """
+                [project]
+                name = "test-proj"
+                dynamic = ["version"]      # <---- testing dynamic will not break
+                [tool.setuptools.dynamic]
+                version.file = "VERSION"
+                """
+            ),
+            "setup.cfg": cleandoc(
+                """
+                [metadata]
+                license_files =
+                  ../LICENSE.txt
+                  ../NOTICE.txt
+                """
+            ),
+            "VERSION": "42",
+        },
+    }
+    jaraco.path.build(files, prefix=str(tmp_path))
+    monkeypatch.chdir(tmp_path / "python")
+    msg = "Pattern '../.*.txt' cannot contain '..'"
+    with pytest.warns(SetuptoolsDeprecationWarning, match=msg):
+        bdist_wheel_cmd().run()
+    with ZipFile("dist/test_proj-42-py3-none-any.whl") as wf:
+        files_found = set(wf.namelist())
+        expected_files = {
+            "test_proj-42.dist-info/licenses/LICENSE.txt",
+            "test_proj-42.dist-info/licenses/NOTICE.txt",
+        }
+        assert expected_files <= files_found
+
+        metadata = wf.read("test_proj-42.dist-info/METADATA").decode("utf8")
+        assert "License-File: LICENSE.txt" in metadata
+        assert "License-File: NOTICE.txt" in metadata

From 4669c792c5c399fc5f08c8dc18b09bba40195348 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 20 Mar 2025 11:38:23 +0000
Subject: [PATCH 1567/1761] Add news fragment

---
 newsfragments/4896.bugfix.rst | 2 ++
 setuptools/_core_metadata.py  | 2 +-
 2 files changed, 3 insertions(+), 1 deletion(-)
 create mode 100644 newsfragments/4896.bugfix.rst

diff --git a/newsfragments/4896.bugfix.rst b/newsfragments/4896.bugfix.rst
new file mode 100644
index 0000000000..3932fbb0ff
--- /dev/null
+++ b/newsfragments/4896.bugfix.rst
@@ -0,0 +1,2 @@
+Temporarily convert error for license glob patterns containing ``../`` into a deprecation warning
+to allow an accomodation period.
diff --git a/setuptools/_core_metadata.py b/setuptools/_core_metadata.py
index 814aaab0d3..a52d5cf755 100644
--- a/setuptools/_core_metadata.py
+++ b/setuptools/_core_metadata.py
@@ -295,7 +295,7 @@ def _distribution_fullname(name: str, version: str) -> str:
 
 
 def _safe_license_file(file):
-    # XXX: Do we need this after the deprecation discussed in #4892??
+    # XXX: Do we need this after the deprecation discussed in #4892, #4896??
     normalized = os.path.normpath(file).replace(os.sep, "/")
     if "../" in normalized:
         return os.path.basename(normalized)  # Temporarily restore pre PEP639 behaviour

From a1f35ae145b821a5f7d053ca7dfa13fc571b9311 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 20 Mar 2025 11:42:58 +0000
Subject: [PATCH 1568/1761] Fix doctest problem

---
 setuptools/dist.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setuptools/dist.py b/setuptools/dist.py
index c912776f22..696ea6db9c 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -496,7 +496,7 @@ def _find_pattern(pattern: str, enforce_match: bool = True) -> list[str]:
         >>> Distribution._find_pattern("../LICENSE.MIT")
         Traceback (most recent call last):
         ...
-        setuptools.warnings.SetuptoolsDeprecationWarning: ...Pattern '../LICENSE.MIT' cannot contain '..'
+        setuptools.warnings.SetuptoolsDeprecationWarning: ...Pattern '../LICENSE.MIT' cannot contain '..'...
         >>> Distribution._find_pattern("LICEN{CSE*")
         Traceback (most recent call last):
         ...

From 685778d2fba56a2b7f24d8375adbf797861f3a91 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 20 Mar 2025 11:43:49 +0000
Subject: [PATCH 1569/1761] Extend deprecation period so that users have 1
 entire year

---
 setuptools/dist.py | 7 +++----
 1 file changed, 3 insertions(+), 4 deletions(-)

diff --git a/setuptools/dist.py b/setuptools/dist.py
index 696ea6db9c..ec29ea0b34 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -507,12 +507,11 @@ def _find_pattern(pattern: str, enforce_match: bool = True) -> list[str]:
             SetuptoolsDeprecationWarning.emit(
                 f"Pattern {pattern!r} cannot contain '..'",
                 """
-                According to the new PyPA standards, this glob pattern is invalid.
                 Please ensure the files specified are contained by the root
                 of the Python package (normally marked by `pyproject.toml`).
                 """,
                 see_url=f"https://packaging.python.org/en/latest/{pypa_guides}",
-                due_date=(2026, 2, 20),  # Introduced in 2025-03-20
+                due_date=(2026, 3, 20),  # Introduced in 2025-03-20
                 # Replace with InvalidConfigError after deprecation
             )
         if pattern.startswith((os.sep, "/")) or ":\\" in pattern:
@@ -525,7 +524,7 @@ def _find_pattern(pattern: str, enforce_match: bool = True) -> list[str]:
                 "Pattern {pattern!r} contains invalid characters.",
                 pattern=pattern,
                 see_url=f"https://packaging.python.org/en/latest/{pypa_guides}",
-                due_date=(2026, 2, 20),  # Introduced in 2025-02-20
+                due_date=(2026, 3, 20),  # Introduced in 2025-02-20
             )
 
         found = glob(pattern, recursive=True)
@@ -535,7 +534,7 @@ def _find_pattern(pattern: str, enforce_match: bool = True) -> list[str]:
                 "Cannot find any files for the given pattern.",
                 "Pattern {pattern!r} did not match any files.",
                 pattern=pattern,
-                due_date=(2026, 2, 20),  # Introduced in 2025-02-20
+                due_date=(2026, 3, 20),  # Introduced in 2025-02-20
                 # PEP 639 requires us to error, but as a transition period
                 # we will only issue a warning to give people time to prepare.
                 # After the transition, this should raise an InvalidConfigError.

From 7bfad8a40dfe71f1a7a8f8a33d6d3a225056ff5e Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 20 Mar 2025 12:42:27 +0000
Subject: [PATCH 1570/1761] Avoid raising exception when license-files is
 defined outside of pyproject.toml

---
 setuptools/config/_apply_pyprojecttoml.py | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/setuptools/config/_apply_pyprojecttoml.py b/setuptools/config/_apply_pyprojecttoml.py
index ffa3fc3c49..f6fd1e3d1e 100644
--- a/setuptools/config/_apply_pyprojecttoml.py
+++ b/setuptools/config/_apply_pyprojecttoml.py
@@ -90,7 +90,8 @@ def _apply_tool_table(dist: Distribution, config: dict, filename: StrPath):
         return  # short-circuit
 
     if "license-files" in tool_table:
-        if dist.metadata.license_files:
+        if "license-files" in config.get("project", {}):
+            # https://github.com/pypa/setuptools/pull/4837#discussion_r2004983349
             raise InvalidConfigError(
                 "'project.license-files' is defined already. "
                 "Remove 'tool.setuptools.license-files'."

From f9ad3e103aaaf86119e6d93c509ffaef3184e676 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 20 Mar 2025 12:45:45 +0000
Subject: [PATCH 1571/1761] Add newsfragment

---
 newsfragments/4899.bugfix.rst | 3 +++
 1 file changed, 3 insertions(+)
 create mode 100644 newsfragments/4899.bugfix.rst

diff --git a/newsfragments/4899.bugfix.rst b/newsfragments/4899.bugfix.rst
new file mode 100644
index 0000000000..7205010ed3
--- /dev/null
+++ b/newsfragments/4899.bugfix.rst
@@ -0,0 +1,3 @@
+Avoided eagerly raising an exception when ``license-files`` is defined
+simultaneously inside and outside of ``pyproject.toml``.
+Instead we rely on the existing deprecation error.

From 932ec38924e92eda3e395d7af8fe11c949d30b6e Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 20 Mar 2025 12:56:54 +0000
Subject: [PATCH 1572/1761] Manually fix docs preventing CI jobs to pass

---
 NEWS.rst | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/NEWS.rst b/NEWS.rst
index e6ce4362bf..f5396595a7 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -4,8 +4,8 @@ v77.0.2
 Bugfixes
 --------
 
-- Restore `distutils.ccompiler._default_compilers` -- by :user:`ManiacDC` (#4876)
-- Fixed copy pasta in msvc.shared_lib_format. (#4885)
+- Restore ``distutils.ccompiler._default_compilers`` -- by :user:`ManiacDC` (#4876)
+- Fixed copy pasta in ``msvc.shared_lib_format``\. (#4885)
 
 
 v77.0.1

From f123312f6cb27f9813491349323ed276b0bc167c Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 20 Mar 2025 12:12:25 +0000
Subject: [PATCH 1573/1761] Defer import error for packaging.licenses in
 environments with packagin<24.2

---
 setuptools/_normalization.py | 18 ++++++++++++++++++
 setuptools/dist.py           |  4 ++--
 2 files changed, 20 insertions(+), 2 deletions(-)

diff --git a/setuptools/_normalization.py b/setuptools/_normalization.py
index 9541a55d6c..6aa9487b0c 100644
--- a/setuptools/_normalization.py
+++ b/setuptools/_normalization.py
@@ -148,3 +148,21 @@ def safer_best_effort_version(value: str) -> str:
     # See bdist_wheel.safer_verion
     # TODO: Replace with only safe_version in the future (no need for best effort)
     return filename_component(best_effort_version(value))
+
+
+try:
+    from packaging.licenses import (
+        canonicalize_license_expression as _canonicalize_license_expression,
+    )
+except ImportError:
+
+    def _canonicalize_license_expression(expression: str) -> str:
+        # Defer import error to affect only users that actually use it
+        # https://github.com/pypa/setuptools/issues/4894
+        raise ImportError(
+            "Cannot import `packaging.licenses`."
+            """
+            Setuptools>=77.0.0 requires "packaging>=24.2" to work properly.
+            Please make sure you have a suitable version installed.
+            """
+        )
diff --git a/setuptools/dist.py b/setuptools/dist.py
index 320abb7f80..ebaa126212 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -12,7 +12,6 @@
 from typing import TYPE_CHECKING, Any, Union
 
 from more_itertools import partition, unique_everseen
-from packaging.licenses import canonicalize_license_expression
 from packaging.markers import InvalidMarker, Marker
 from packaging.specifiers import InvalidSpecifier, SpecifierSet
 from packaging.version import Version
@@ -24,6 +23,7 @@
     command as _,  # noqa: F401 # imported for side-effects
 )
 from ._importlib import metadata
+from ._normalization import _canonicalize_license_expression
 from ._path import StrPath
 from ._reqs import _StrOrIter
 from .config import pyprojecttoml, setupcfg
@@ -423,7 +423,7 @@ def _finalize_license_expression(self) -> None:
         license_expr = self.metadata.license_expression
         if license_expr:
             str_ = _static.Str if _static.is_static(license_expr) else str
-            normalized = str_(canonicalize_license_expression(license_expr))
+            normalized = str_(_canonicalize_license_expression(license_expr))
             if license_expr != normalized:
                 InformationOnly.emit(f"Normalizing '{license_expr}' to '{normalized}'")
                 self.metadata.license_expression = normalized

From 4622b5aaf7f607c61de40cdfdca769e48b32f731 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 20 Mar 2025 12:16:18 +0000
Subject: [PATCH 1574/1761] Add news fragment

---
 newsfragments/4898.bugfix.rst | 4 ++++
 1 file changed, 4 insertions(+)
 create mode 100644 newsfragments/4898.bugfix.rst

diff --git a/newsfragments/4898.bugfix.rst b/newsfragments/4898.bugfix.rst
new file mode 100644
index 0000000000..c94d90198b
--- /dev/null
+++ b/newsfragments/4898.bugfix.rst
@@ -0,0 +1,4 @@
+Better error messages for ``packaging.licenses`` import errors in environments with ``packaging<24.2``\.
+The import statement was also deferred to spare users that are not using
+license expressions.
+

From 627a869d19828c17390e7c6c0a7bdd43922723a5 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 20 Mar 2025 12:21:01 +0000
Subject: [PATCH 1575/1761] Attempt to avoid typing error

---
 setuptools/_normalization.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setuptools/_normalization.py b/setuptools/_normalization.py
index 6aa9487b0c..4ba4f5e05e 100644
--- a/setuptools/_normalization.py
+++ b/setuptools/_normalization.py
@@ -156,7 +156,7 @@ def safer_best_effort_version(value: str) -> str:
     )
 except ImportError:
 
-    def _canonicalize_license_expression(expression: str) -> str:
+    def _canonicalize_license_expression(expression: str) -> str:  # type: ignore[misc]  # pyright: ignore[reportAssignmentType]
         # Defer import error to affect only users that actually use it
         # https://github.com/pypa/setuptools/issues/4894
         raise ImportError(

From 64612bf280b97e51b9014a7c55a393b686227518 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 20 Mar 2025 13:00:44 +0000
Subject: [PATCH 1576/1761] Remove duplicated dependency

---
 pyproject.toml | 1 -
 1 file changed, 1 deletion(-)

diff --git a/pyproject.toml b/pyproject.toml
index 20e6ee69c9..ed1eb1d6df 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -102,7 +102,6 @@ core = [
 
 	# for distutils
 	"jaraco.functools >= 4",
-	"packaging",
 	"more_itertools",
 ]
 

From 53fc322820862c83687d5fee9a56c6a14c8e8d84 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 20 Mar 2025 13:05:47 +0000
Subject: [PATCH 1577/1761] Silence typechecking in complicated statement

---
 setuptools/_normalization.py | 25 ++++++++++++++-----------
 1 file changed, 14 insertions(+), 11 deletions(-)

diff --git a/setuptools/_normalization.py b/setuptools/_normalization.py
index 4ba4f5e05e..96e0a937ef 100644
--- a/setuptools/_normalization.py
+++ b/setuptools/_normalization.py
@@ -4,6 +4,7 @@
 """
 
 import re
+from typing import TYPE_CHECKING
 
 import packaging
 
@@ -155,14 +156,16 @@ def safer_best_effort_version(value: str) -> str:
         canonicalize_license_expression as _canonicalize_license_expression,
     )
 except ImportError:
-
-    def _canonicalize_license_expression(expression: str) -> str:  # type: ignore[misc]  # pyright: ignore[reportAssignmentType]
-        # Defer import error to affect only users that actually use it
-        # https://github.com/pypa/setuptools/issues/4894
-        raise ImportError(
-            "Cannot import `packaging.licenses`."
-            """
-            Setuptools>=77.0.0 requires "packaging>=24.2" to work properly.
-            Please make sure you have a suitable version installed.
-            """
-        )
+    if not TYPE_CHECKING:
+        # XXX: pyright is still upset even with # pyright: ignore[reportAssignmentType]
+
+        def _canonicalize_license_expression(expression: str) -> str:
+            # Defer import error to affect only users that actually use it
+            # https://github.com/pypa/setuptools/issues/4894
+            raise ImportError(
+                "Cannot import `packaging.licenses`."
+                """
+                Setuptools>=77.0.0 requires "packaging>=24.2" to work properly.
+                Please make sure you have a suitable version installed.
+                """
+            )

From 676362d62a9e7b2bb57e0332ec9b4b0f8539a727 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 20 Mar 2025 13:12:44 +0000
Subject: [PATCH 1578/1761] Refactor fallback for packaging.licenses

---
 setuptools/_normalization.py | 32 ++++++++++++++++++++------------
 1 file changed, 20 insertions(+), 12 deletions(-)

diff --git a/setuptools/_normalization.py b/setuptools/_normalization.py
index 96e0a937ef..0937a4faf8 100644
--- a/setuptools/_normalization.py
+++ b/setuptools/_normalization.py
@@ -151,21 +151,29 @@ def safer_best_effort_version(value: str) -> str:
     return filename_component(best_effort_version(value))
 
 
+def _missing_canonicalize_license_expression(expression: str) -> str:
+    """
+    Defer import error to affect only users that actually use it
+    https://github.com/pypa/setuptools/issues/4894
+    >>> _missing_canonicalize_license_expression("a OR b")
+    Traceback (most recent call last):
+    ...
+    ImportError: ...Cannot import `packaging.licenses`...
+    """
+    raise ImportError(
+        "Cannot import `packaging.licenses`."
+        """
+        Setuptools>=77.0.0 requires "packaging>=24.2" to work properly.
+        Please make sure you have a suitable version installed.
+        """
+    )
+
+
 try:
     from packaging.licenses import (
         canonicalize_license_expression as _canonicalize_license_expression,
     )
-except ImportError:
+except ImportError:  # pragma: nocover
     if not TYPE_CHECKING:
         # XXX: pyright is still upset even with # pyright: ignore[reportAssignmentType]
-
-        def _canonicalize_license_expression(expression: str) -> str:
-            # Defer import error to affect only users that actually use it
-            # https://github.com/pypa/setuptools/issues/4894
-            raise ImportError(
-                "Cannot import `packaging.licenses`."
-                """
-                Setuptools>=77.0.0 requires "packaging>=24.2" to work properly.
-                Please make sure you have a suitable version installed.
-                """
-            )
+        _canonicalize_license_expression = _missing_canonicalize_license_expression

From 7c859e017368360ba66c8cc591279d8964c031bc Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 20 Mar 2025 14:06:39 +0000
Subject: [PATCH 1579/1761] =?UTF-8?q?Bump=20version:=2077.0.2=20=E2=86=92?=
 =?UTF-8?q?=2077.0.3?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg              |  2 +-
 NEWS.rst                      | 16 ++++++++++++++++
 newsfragments/4896.bugfix.rst |  2 --
 newsfragments/4898.bugfix.rst |  4 ----
 newsfragments/4899.bugfix.rst |  3 ---
 pyproject.toml                |  2 +-
 6 files changed, 18 insertions(+), 11 deletions(-)
 delete mode 100644 newsfragments/4896.bugfix.rst
 delete mode 100644 newsfragments/4898.bugfix.rst
 delete mode 100644 newsfragments/4899.bugfix.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index dd09ada207..8c3e9c7f1b 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 77.0.2
+current_version = 77.0.3
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index f5396595a7..778af01e44 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,19 @@
+v77.0.3
+=======
+
+Bugfixes
+--------
+
+- Temporarily convert error for license glob patterns containing ``../`` into a deprecation warning
+  to allow an accomodation period. (#4896)
+- Better error messages for ``packaging.licenses`` import errors in environments with ``packaging<24.2``\.
+  The import statement was also deferred to spare users that are not using
+  license expressions. (#4898)
+- Avoided eagerly raising an exception when ``license-files`` is defined
+  simultaneously inside and outside of ``pyproject.toml``.
+  Instead we rely on the existing deprecation error. (#4899)
+
+
 v77.0.2
 =======
 
diff --git a/newsfragments/4896.bugfix.rst b/newsfragments/4896.bugfix.rst
deleted file mode 100644
index 3932fbb0ff..0000000000
--- a/newsfragments/4896.bugfix.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Temporarily convert error for license glob patterns containing ``../`` into a deprecation warning
-to allow an accomodation period.
diff --git a/newsfragments/4898.bugfix.rst b/newsfragments/4898.bugfix.rst
deleted file mode 100644
index c94d90198b..0000000000
--- a/newsfragments/4898.bugfix.rst
+++ /dev/null
@@ -1,4 +0,0 @@
-Better error messages for ``packaging.licenses`` import errors in environments with ``packaging<24.2``\.
-The import statement was also deferred to spare users that are not using
-license expressions.
-
diff --git a/newsfragments/4899.bugfix.rst b/newsfragments/4899.bugfix.rst
deleted file mode 100644
index 7205010ed3..0000000000
--- a/newsfragments/4899.bugfix.rst
+++ /dev/null
@@ -1,3 +0,0 @@
-Avoided eagerly raising an exception when ``license-files`` is defined
-simultaneously inside and outside of ``pyproject.toml``.
-Instead we rely on the existing deprecation error.
diff --git a/pyproject.toml b/pyproject.toml
index ed1eb1d6df..b5e17f874a 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "77.0.2"
+version = "77.0.3"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From a75611aec4e492a9760dc9dbf02388506f49afa7 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 20 Mar 2025 17:01:56 -0400
Subject: [PATCH 1580/1761] Remove news fragments; merged downstream.

---
 newsfragments/4876.bugfix.2.rst | 1 -
 1 file changed, 1 deletion(-)
 delete mode 100644 newsfragments/4876.bugfix.2.rst

diff --git a/newsfragments/4876.bugfix.2.rst b/newsfragments/4876.bugfix.2.rst
deleted file mode 100644
index 8350c5a736..0000000000
--- a/newsfragments/4876.bugfix.2.rst
+++ /dev/null
@@ -1 +0,0 @@
-Restore `distutils.ccompiler._default_compilers` -- by :user:`ManiacDC`

From 24d96057c5de0b47574c23d148d633fdae389c12 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 20 Mar 2025 22:48:13 +0000
Subject: [PATCH 1581/1761] Document version of setuptools introducing support
 for PEP 639

---
 docs/userguide/pyproject_config.rst       | 8 ++++++++
 setuptools/config/_apply_pyprojecttoml.py | 5 +++--
 2 files changed, 11 insertions(+), 2 deletions(-)

diff --git a/docs/userguide/pyproject_config.rst b/docs/userguide/pyproject_config.rst
index e4cee52aa3..5bae5487d1 100644
--- a/docs/userguide/pyproject_config.rst
+++ b/docs/userguide/pyproject_config.rst
@@ -70,6 +70,14 @@ The ``project`` table contains metadata fields as described by the
    # ... other project metadata fields as listed in:
    #     https://packaging.python.org/en/latest/guides/writing-pyproject-toml/
 
+.. important::
+   Support for
+   :doc:`project.license-files `
+   and SPDX license expressions in
+   :doc:`project.license ` (:pep:`639`)
+   were introduced in version 71.0.0.
+
+
 .. _setuptools-table:
 
 Setuptools-specific configuration
diff --git a/setuptools/config/_apply_pyprojecttoml.py b/setuptools/config/_apply_pyprojecttoml.py
index f6fd1e3d1e..d99327976f 100644
--- a/setuptools/config/_apply_pyprojecttoml.py
+++ b/setuptools/config/_apply_pyprojecttoml.py
@@ -100,7 +100,7 @@ def _apply_tool_table(dist: Distribution, config: dict, filename: StrPath):
         pypa_guides = "guides/writing-pyproject-toml/#license-files"
         SetuptoolsDeprecationWarning.emit(
             "'tool.setuptools.license-files' is deprecated in favor of "
-            "'project.license-files'",
+            "'project.license-files' (available on setuptools>=71.0.0).",
             see_url=f"https://packaging.python.org/en/latest/{pypa_guides}",
             due_date=(2026, 2, 18),  # Warning introduced on 2025-02-18
         )
@@ -211,7 +211,8 @@ def _license(dist: Distribution, val: str | dict, root_dir: StrPath | None):
         SetuptoolsDeprecationWarning.emit(
             "`project.license` as a TOML table is deprecated",
             "Please use a simple string containing a SPDX expression for "
-            "`project.license`. You can also use `project.license-files`.",
+            "`project.license`. You can also use `project.license-files`. "
+            "(Both options available on setuptools>=71.0.0).",
             see_url=f"https://packaging.python.org/en/latest/{pypa_guides}",
             due_date=(2026, 2, 18),  # Introduced on 2025-02-18
         )

From b03d3006659c7c62647e92de5fcbce440fb1c713 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Thu, 20 Mar 2025 22:54:28 +0000
Subject: [PATCH 1582/1761] Fix mispell

---
 docs/userguide/pyproject_config.rst | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/docs/userguide/pyproject_config.rst b/docs/userguide/pyproject_config.rst
index 5bae5487d1..7a81da0665 100644
--- a/docs/userguide/pyproject_config.rst
+++ b/docs/userguide/pyproject_config.rst
@@ -72,9 +72,9 @@ The ``project`` table contains metadata fields as described by the
 
 .. important::
    Support for
-   :doc:`project.license-files `
+   :doc:`project.license-files `
    and SPDX license expressions in
-   :doc:`project.license ` (:pep:`639`)
+   :doc:`project.license ` (:pep:`639`)
    were introduced in version 71.0.0.
 
 

From ad84110008b826efd6e39bcc39b9998b4f1cc767 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Fri, 21 Mar 2025 00:14:38 +0000
Subject: [PATCH 1583/1761] Remove deprecated license classifier (PEP 639)
 (jaraco/skeleton#170)

---
 pyproject.toml | 1 -
 1 file changed, 1 deletion(-)

diff --git a/pyproject.toml b/pyproject.toml
index 328b98cb46..71b1a7dabc 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -12,7 +12,6 @@ readme = "README.rst"
 classifiers = [
 	"Development Status :: 5 - Production/Stable",
 	"Intended Audience :: Developers",
-	"License :: OSI Approved :: MIT License",
 	"Programming Language :: Python :: 3",
 	"Programming Language :: Python :: 3 :: Only",
 ]

From ce0da3ce728777b40da1d84313ab1f70cdaf99cb Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Fri, 21 Mar 2025 11:24:34 +0000
Subject: [PATCH 1584/1761] Fixes for version and doc warning

---
 docs/userguide/pyproject_config.rst       | 6 +++---
 setuptools/config/_apply_pyprojecttoml.py | 4 ++--
 2 files changed, 5 insertions(+), 5 deletions(-)

diff --git a/docs/userguide/pyproject_config.rst b/docs/userguide/pyproject_config.rst
index 7a81da0665..c5a3e7bb2f 100644
--- a/docs/userguide/pyproject_config.rst
+++ b/docs/userguide/pyproject_config.rst
@@ -72,10 +72,10 @@ The ``project`` table contains metadata fields as described by the
 
 .. important::
    Support for
-   :doc:`project.license-files `
+   :ref:`project.license-files `
    and SPDX license expressions in
-   :doc:`project.license ` (:pep:`639`)
-   were introduced in version 71.0.0.
+   :doc:`project.license ` (:pep:`639`)
+   were introduced in version 77.0.0.
 
 
 .. _setuptools-table:
diff --git a/setuptools/config/_apply_pyprojecttoml.py b/setuptools/config/_apply_pyprojecttoml.py
index d99327976f..9088bc1383 100644
--- a/setuptools/config/_apply_pyprojecttoml.py
+++ b/setuptools/config/_apply_pyprojecttoml.py
@@ -100,7 +100,7 @@ def _apply_tool_table(dist: Distribution, config: dict, filename: StrPath):
         pypa_guides = "guides/writing-pyproject-toml/#license-files"
         SetuptoolsDeprecationWarning.emit(
             "'tool.setuptools.license-files' is deprecated in favor of "
-            "'project.license-files' (available on setuptools>=71.0.0).",
+            "'project.license-files' (available on setuptools>=77.0.0).",
             see_url=f"https://packaging.python.org/en/latest/{pypa_guides}",
             due_date=(2026, 2, 18),  # Warning introduced on 2025-02-18
         )
@@ -212,7 +212,7 @@ def _license(dist: Distribution, val: str | dict, root_dir: StrPath | None):
             "`project.license` as a TOML table is deprecated",
             "Please use a simple string containing a SPDX expression for "
             "`project.license`. You can also use `project.license-files`. "
-            "(Both options available on setuptools>=71.0.0).",
+            "(Both options available on setuptools>=77.0.0).",
             see_url=f"https://packaging.python.org/en/latest/{pypa_guides}",
             due_date=(2026, 2, 18),  # Introduced on 2025-02-18
         )

From d1d873e49ad18fec3e165879058a5cd001dcef1b Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Fri, 21 Mar 2025 12:03:40 +0000
Subject: [PATCH 1585/1761] Fix external sphinx ref

---
 docs/userguide/pyproject_config.rst | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/docs/userguide/pyproject_config.rst b/docs/userguide/pyproject_config.rst
index c5a3e7bb2f..863222d037 100644
--- a/docs/userguide/pyproject_config.rst
+++ b/docs/userguide/pyproject_config.rst
@@ -72,7 +72,7 @@ The ``project`` table contains metadata fields as described by the
 
 .. important::
    Support for
-   :ref:`project.license-files `
+   :external+PyPUG:ref:`project.license-files `
    and SPDX license expressions in
    :doc:`project.license ` (:pep:`639`)
    were introduced in version 77.0.0.

From 2354282cd0f9f1f5a534194e12a066cc96b8112d Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Fri, 21 Mar 2025 14:55:06 +0000
Subject: [PATCH 1586/1761] Add news fragment

---
 newsfragments/4904.misc.rst | 1 +
 1 file changed, 1 insertion(+)
 create mode 100644 newsfragments/4904.misc.rst

diff --git a/newsfragments/4904.misc.rst b/newsfragments/4904.misc.rst
new file mode 100644
index 0000000000..0da5929c6e
--- /dev/null
+++ b/newsfragments/4904.misc.rst
@@ -0,0 +1 @@
+Add statement of which version is the first to implement :pep:`639` support.

From 31531d0ee11d7f4a0b16ec13473505eb044ea611 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Fri, 21 Mar 2025 15:51:56 +0000
Subject: [PATCH 1587/1761] Apply suggestions from code review

Co-authored-by: Marc Mueller <30130371+cdce8p@users.noreply.github.com>
---
 docs/userguide/pyproject_config.rst | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/docs/userguide/pyproject_config.rst b/docs/userguide/pyproject_config.rst
index 863222d037..396a941467 100644
--- a/docs/userguide/pyproject_config.rst
+++ b/docs/userguide/pyproject_config.rst
@@ -72,9 +72,9 @@ The ``project`` table contains metadata fields as described by the
 
 .. important::
    Support for
-   :external+PyPUG:ref:`project.license-files `
+   :external+PyPUG:ref:`project.license-files `
    and SPDX license expressions in
-   :doc:`project.license ` (:pep:`639`)
+   :external+PyPUG:ref:`project.license ` (:pep:`639`)
    were introduced in version 77.0.0.
 
 

From d0415055922b4e9b955676f0184e4dbf4c41fea1 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Fri, 21 Mar 2025 16:09:31 +0000
Subject: [PATCH 1588/1761] Fix reference.

---
 docs/userguide/pyproject_config.rst | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/docs/userguide/pyproject_config.rst b/docs/userguide/pyproject_config.rst
index 396a941467..7c6f3d4d6e 100644
--- a/docs/userguide/pyproject_config.rst
+++ b/docs/userguide/pyproject_config.rst
@@ -72,7 +72,7 @@ The ``project`` table contains metadata fields as described by the
 
 .. important::
    Support for
-   :external+PyPUG:ref:`project.license-files `
+   :external+PyPUG:ref:`project.license-files `
    and SPDX license expressions in
    :external+PyPUG:ref:`project.license ` (:pep:`639`)
    were introduced in version 77.0.0.

From 1ebb559a507f97ece7342d7f1532a49188cade33 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Thu, 20 Mar 2025 20:56:31 -0400
Subject: [PATCH 1589/1761] Remove workaround and update badge.

Closes jaraco/skeleton#155
---
 README.rst | 2 +-
 ruff.toml  | 3 ---
 2 files changed, 1 insertion(+), 4 deletions(-)

diff --git a/README.rst b/README.rst
index 4d3cabee9d..3000f5ab21 100644
--- a/README.rst
+++ b/README.rst
@@ -7,7 +7,7 @@
    :target: https://github.com/PROJECT_PATH/actions?query=workflow%3A%22tests%22
    :alt: tests
 
-.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
+.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json
     :target: https://github.com/astral-sh/ruff
     :alt: Ruff
 
diff --git a/ruff.toml b/ruff.toml
index 267a1ba1f1..63c0825f6b 100644
--- a/ruff.toml
+++ b/ruff.toml
@@ -1,6 +1,3 @@
-# extend pyproject.toml for requires-python (workaround astral-sh/ruff#10299)
-extend = "pyproject.toml"
-
 [lint]
 extend-select = [
 	# upstream

From 979e626055ab60095b37be04555a01a40f62e470 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 22 Mar 2025 05:33:58 -0400
Subject: [PATCH 1590/1761] Remove PIP_NO_PYTHON_VERSION_WARNING.

Ref pypa/pip#13154
---
 .github/workflows/main.yml | 1 -
 1 file changed, 1 deletion(-)

diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index 5841cc37b7..928acf2ca6 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -21,7 +21,6 @@ env:
 
   # Suppress noisy pip warnings
   PIP_DISABLE_PIP_VERSION_CHECK: 'true'
-  PIP_NO_PYTHON_VERSION_WARNING: 'true'
   PIP_NO_WARN_SCRIPT_LOCATION: 'true'
 
   # Ensure tests can sense settings about the environment

From d2cf96a3437b830499c88865eb6546feeff198d8 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sun, 23 Mar 2025 16:19:01 -0400
Subject: [PATCH 1591/1761] Revert "Merge pull request pypa/distutils#345 from
 Avasam/Explicit-re-exports-of-submodules-in-`distutils.command.__all__`"

This reverts commit 1d120d919bd6d5cb8ba1b179c2099bd947d2c466, reversing
changes made to 89a42e319b81a0326306c7d3928ae4d100862156.

Fix #351
Fix pypa/setuptools#4902
Fix pypa/setuptools#4906
Fix pypa/setuptools#4908
---
 distutils/command/__init__.py | 35 ++++++++---------------------------
 1 file changed, 8 insertions(+), 27 deletions(-)

diff --git a/distutils/command/__init__.py b/distutils/command/__init__.py
index ceabe967b1..0f8a1692ba 100644
--- a/distutils/command/__init__.py
+++ b/distutils/command/__init__.py
@@ -3,40 +3,21 @@
 Package containing implementation of all the standard Distutils
 commands."""
 
-from . import (
-    bdist,
-    bdist_dumb,
-    bdist_rpm,
-    build,
-    build_clib,
-    build_ext,
-    build_py,
-    build_scripts,
-    check,
-    clean,
-    install,
-    install_data,
-    install_headers,
-    install_lib,
-    install_scripts,
-    sdist,
-)
-
 __all__ = [
-    'bdist',
-    'bdist_dumb',
-    'bdist_rpm',
     'build',
-    'build_clib',
-    'build_ext',
     'build_py',
+    'build_ext',
+    'build_clib',
     'build_scripts',
-    'check',
     'clean',
     'install',
-    'install_data',
-    'install_headers',
     'install_lib',
+    'install_headers',
     'install_scripts',
+    'install_data',
     'sdist',
+    'bdist',
+    'bdist_dumb',
+    'bdist_rpm',
+    'check',
 ]

From 063dbcc621cad73ed1e669f23906eae21e12bc55 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sun, 23 Mar 2025 16:22:09 -0400
Subject: [PATCH 1592/1761] Add news fragment.

---
 newsfragments/4902.bugfix.rst | 1 +
 1 file changed, 1 insertion(+)
 create mode 100644 newsfragments/4902.bugfix.rst

diff --git a/newsfragments/4902.bugfix.rst b/newsfragments/4902.bugfix.rst
new file mode 100644
index 0000000000..6388d72292
--- /dev/null
+++ b/newsfragments/4902.bugfix.rst
@@ -0,0 +1 @@
+Reverted distutils changes that broke the monkey patching of command classes.

From 6da75919dfe5796acf9c3cb7813c6b08a31d51ad Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sun, 23 Mar 2025 16:22:21 -0400
Subject: [PATCH 1593/1761] =?UTF-8?q?Bump=20version:=2077.0.3=20=E2=86=92?=
 =?UTF-8?q?=2078.0.0?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg               |  2 +-
 NEWS.rst                       | 30 ++++++++++++++++++++++++++++++
 newsfragments/4870.removal.rst | 10 ----------
 newsfragments/4902.bugfix.rst  |  1 -
 newsfragments/4904.misc.rst    |  1 -
 pyproject.toml                 |  2 +-
 6 files changed, 32 insertions(+), 14 deletions(-)
 delete mode 100644 newsfragments/4870.removal.rst
 delete mode 100644 newsfragments/4902.bugfix.rst
 delete mode 100644 newsfragments/4904.misc.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 8c3e9c7f1b..db3afe7d8f 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 77.0.3
+current_version = 78.0.0
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index 778af01e44..aa4a0ffcb1 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,33 @@
+v78.0.0
+=======
+
+Bugfixes
+--------
+
+- Reverted distutils changes that broke the monkey patching of command classes. (#4902)
+
+
+Deprecations and Removals
+-------------------------
+
+- Setuptools no longer accepts options containing uppercase or dash characters in ``setup.cfg``.
+  Please ensure to write the options in ``setup.cfg`` using the :wiki:`lower_snake_case ` convention
+  (e.g. ``Name => name``, ``install-requires => install_requires``).
+  This is a follow-up on deprecations introduced in
+  `v54.1.0 `_ (see #1608) and
+  `v54.1.1 `_ (see #2592).
+
+  .. note::
+     This change *does not affect configurations in* ``pyproject.toml``
+     (which uses the :wiki:`lower-kebab-case ` convention following the precedent set in :pep:`517`/:pep:`518`). (#4870)
+
+
+Misc
+----
+
+- #4904
+
+
 v77.0.3
 =======
 
diff --git a/newsfragments/4870.removal.rst b/newsfragments/4870.removal.rst
deleted file mode 100644
index 5b713032d0..0000000000
--- a/newsfragments/4870.removal.rst
+++ /dev/null
@@ -1,10 +0,0 @@
-Setuptools no longer accepts options containing uppercase or dash characters in ``setup.cfg``.
-Please ensure to write the options in ``setup.cfg`` using the :wiki:`lower_snake_case ` convention
-(e.g. ``Name => name``, ``install-requires => install_requires``).
-This is a follow-up on deprecations introduced in
-`v54.1.0 `_ (see #1608) and
-`v54.1.1 `_ (see #2592).
-
-.. note::
-   This change *does not affect configurations in* ``pyproject.toml``
-   (which uses the :wiki:`lower-kebab-case ` convention following the precedent set in :pep:`517`/:pep:`518`).
diff --git a/newsfragments/4902.bugfix.rst b/newsfragments/4902.bugfix.rst
deleted file mode 100644
index 6388d72292..0000000000
--- a/newsfragments/4902.bugfix.rst
+++ /dev/null
@@ -1 +0,0 @@
-Reverted distutils changes that broke the monkey patching of command classes.
diff --git a/newsfragments/4904.misc.rst b/newsfragments/4904.misc.rst
deleted file mode 100644
index 0da5929c6e..0000000000
--- a/newsfragments/4904.misc.rst
+++ /dev/null
@@ -1 +0,0 @@
-Add statement of which version is the first to implement :pep:`639` support.
diff --git a/pyproject.toml b/pyproject.toml
index b5e17f874a..89526a1f76 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "77.0.3"
+version = "78.0.0"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From e771c64c613f148b9b46f45acf00955093dce4c2 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 24 Mar 2025 12:01:03 +0000
Subject: [PATCH 1594/1761] Temporarily remove 'requests' from integration
 tests

---
 setuptools/tests/integration/test_pip_install_sdist.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setuptools/tests/integration/test_pip_install_sdist.py b/setuptools/tests/integration/test_pip_install_sdist.py
index b2f1c08003..4e84f21832 100644
--- a/setuptools/tests/integration/test_pip_install_sdist.py
+++ b/setuptools/tests/integration/test_pip_install_sdist.py
@@ -54,7 +54,7 @@
     ("pyyaml", LATEST),  # cython + custom build_ext + custom distclass
     ("charset-normalizer", LATEST),  # uses mypyc, used by aiohttp
     ("protobuf", LATEST),
-    ("requests", LATEST),
+    # ("requests", LATEST),  # XXX: https://github.com/psf/requests/pull/6920
     ("celery", LATEST),
     # When adding packages to this list, make sure they expose a `__version__`
     # attribute, or modify the tests below

From 54b4c79bf40099bac3f5005352215acdbcd12e17 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 24 Mar 2025 12:08:25 +0000
Subject: [PATCH 1595/1761] Add news fragment

---
 newsfragments/4909.misc.rst | 2 ++
 1 file changed, 2 insertions(+)
 create mode 100644 newsfragments/4909.misc.rst

diff --git a/newsfragments/4909.misc.rst b/newsfragments/4909.misc.rst
new file mode 100644
index 0000000000..5d0a88c88d
--- /dev/null
+++ b/newsfragments/4909.misc.rst
@@ -0,0 +1,2 @@
+Temporarily remove ``requests`` from integration tests
+due to invalid ``setup.cfg``.

From 5450f57f1cefa44c961fb50fc18e9826c95a5d59 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 24 Mar 2025 14:17:31 +0000
Subject: [PATCH 1596/1761] =?UTF-8?q?Bump=20version:=2078.0.0=20=E2=86=92?=
 =?UTF-8?q?=2078.0.1?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg            | 2 +-
 NEWS.rst                    | 9 +++++++++
 newsfragments/4909.misc.rst | 2 --
 pyproject.toml              | 2 +-
 4 files changed, 11 insertions(+), 4 deletions(-)
 delete mode 100644 newsfragments/4909.misc.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index db3afe7d8f..f506444c07 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 78.0.0
+current_version = 78.0.1
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index aa4a0ffcb1..e3e415279c 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,12 @@
+v78.0.1
+=======
+
+Misc
+----
+
+- #4909
+
+
 v78.0.0
 =======
 
diff --git a/newsfragments/4909.misc.rst b/newsfragments/4909.misc.rst
deleted file mode 100644
index 5d0a88c88d..0000000000
--- a/newsfragments/4909.misc.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Temporarily remove ``requests`` from integration tests
-due to invalid ``setup.cfg``.
diff --git a/pyproject.toml b/pyproject.toml
index 89526a1f76..ded145dca1 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "78.0.0"
+version = "78.0.1"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From 4e9b8caec323aba5b2b3764ef97018a8b1596a4b Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 24 Mar 2025 17:25:02 +0000
Subject: [PATCH 1597/1761] Revert removals introduced in v78.0.0

---
 setuptools/dist.py                       | 28 ++++++++++++++++++++----
 setuptools/tests/config/test_setupcfg.py | 21 +++++++++++++-----
 2 files changed, 39 insertions(+), 10 deletions(-)

diff --git a/setuptools/dist.py b/setuptools/dist.py
index fa734c651f..8d972cc49b 100644
--- a/setuptools/dist.py
+++ b/setuptools/dist.py
@@ -626,21 +626,41 @@ def _enforce_underscore(self, opt: str, section: str) -> str:
         if "-" not in opt or self._skip_setupcfg_normalization(section):
             return opt
 
-        raise InvalidConfigError(
+        underscore_opt = opt.replace('-', '_')
+        affected = f"(Affected: {self.metadata.name})." if self.metadata.name else ""
+        SetuptoolsDeprecationWarning.emit(
             f"Invalid dash-separated key {opt!r} in {section!r} (setup.cfg), "
-            f"please use the underscore name {opt.replace('-', '_')!r} instead."
+            f"please use the underscore name {underscore_opt!r} instead.",
+            f"""
+            Usage of dash-separated {opt!r} will not be supported in future
+            versions. Please use the underscore name {underscore_opt!r} instead.
+            {affected}
+            """,
+            see_docs="userguide/declarative_config.html",
+            due_date=(2026, 3, 3),
             # Warning initially introduced in 3 Mar 2021
         )
+        return underscore_opt
 
     def _enforce_option_lowercase(self, opt: str, section: str) -> str:
         if opt.islower() or self._skip_setupcfg_normalization(section):
             return opt
 
-        raise InvalidConfigError(
+        lowercase_opt = opt.lower()
+        affected = f"(Affected: {self.metadata.name})." if self.metadata.name else ""
+        SetuptoolsDeprecationWarning.emit(
             f"Invalid uppercase key {opt!r} in {section!r} (setup.cfg), "
-            f"please use lowercase {opt.lower()!r} instead."
+            f"please use lowercase {lowercase_opt!r} instead.",
+            f"""
+            Usage of uppercase key {opt!r} in {section!r} will not be supported in
+            future versions. Please use lowercase {lowercase_opt!r} instead.
+            {affected}
+            """,
+            see_docs="userguide/declarative_config.html",
+            due_date=(2026, 3, 3),
             # Warning initially introduced in 6 Mar 2021
         )
+        return lowercase_opt
 
     def _skip_setupcfg_normalization(self, section: str) -> bool:
         skip = (
diff --git a/setuptools/tests/config/test_setupcfg.py b/setuptools/tests/config/test_setupcfg.py
index a199871ffd..cf3e63779a 100644
--- a/setuptools/tests/config/test_setupcfg.py
+++ b/setuptools/tests/config/test_setupcfg.py
@@ -10,7 +10,6 @@
 
 from setuptools.config.setupcfg import ConfigHandler, Target, read_configuration
 from setuptools.dist import Distribution, _Distribution
-from setuptools.errors import InvalidConfigError
 from setuptools.warnings import SetuptoolsDeprecationWarning
 
 from ..textwrap import DALS
@@ -423,7 +422,7 @@ def test_not_utf8(self, tmpdir):
                 pass
 
     @pytest.mark.parametrize(
-        ("error_msg", "config"),
+        ("error_msg", "config", "invalid"),
         [
             (
                 "Invalid dash-separated key 'author-email' in 'metadata' (setup.cfg)",
@@ -434,6 +433,7 @@ def test_not_utf8(self, tmpdir):
                     maintainer_email = foo@foo.com
                     """
                 ),
+                {"author-email": "test@test.com"},
             ),
             (
                 "Invalid uppercase key 'Name' in 'metadata' (setup.cfg)",
@@ -444,14 +444,23 @@ def test_not_utf8(self, tmpdir):
                     description = Some description
                     """
                 ),
+                {"Name": "foo"},
             ),
         ],
     )
-    def test_invalid_options_previously_deprecated(self, tmpdir, error_msg, config):
-        # this test and related methods can be removed when no longer needed
+    def test_invalid_options_previously_deprecated(
+        self, tmpdir, error_msg, config, invalid
+    ):
+        # This test and related methods can be removed when no longer needed.
+        # Deprecation postponed due to push-back from the community in
+        # https://github.com/pypa/setuptools/issues/4910
         fake_env(tmpdir, config)
-        with pytest.raises(InvalidConfigError, match=re.escape(error_msg)):
-            get_dist(tmpdir).__enter__()
+        with pytest.warns(SetuptoolsDeprecationWarning, match=re.escape(error_msg)):
+            dist = get_dist(tmpdir).__enter__()
+
+        for field, value in invalid.items():
+            attr = field.replace("-", "_").lower()
+            assert getattr(dist.metadata, attr) == value
 
 
 class TestOptions:

From caa48ab040420be2885e9d2a14ce6615bd17f992 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 24 Mar 2025 17:32:33 +0000
Subject: [PATCH 1598/1761] Add news fragment

---
 newsfragments/4911.bugfix.rst | 2 ++
 1 file changed, 2 insertions(+)
 create mode 100644 newsfragments/4911.bugfix.rst

diff --git a/newsfragments/4911.bugfix.rst b/newsfragments/4911.bugfix.rst
new file mode 100644
index 0000000000..49db1a49ed
--- /dev/null
+++ b/newsfragments/4911.bugfix.rst
@@ -0,0 +1,2 @@
+Postponed removals of deprecated dash-separated and uppercase fields in ``setup.cfg``.
+All packages with deprecated configurations are advised to move before 2026.

From 92e22dd5765b54e1ce7c9f25c419371e666b15dc Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 24 Mar 2025 18:33:04 +0000
Subject: [PATCH 1599/1761] Manually remove file to try to stop flaky tests on
 PyPy

---
 setuptools/tests/config/test_setupcfg.py | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/setuptools/tests/config/test_setupcfg.py b/setuptools/tests/config/test_setupcfg.py
index cf3e63779a..61af990447 100644
--- a/setuptools/tests/config/test_setupcfg.py
+++ b/setuptools/tests/config/test_setupcfg.py
@@ -458,6 +458,8 @@ def test_invalid_options_previously_deprecated(
         with pytest.warns(SetuptoolsDeprecationWarning, match=re.escape(error_msg)):
             dist = get_dist(tmpdir).__enter__()
 
+        tmpdir.join('setup.cfg').remove()
+
         for field, value in invalid.items():
             attr = field.replace("-", "_").lower()
             assert getattr(dist.metadata, attr) == value

From d4326dd6896caa87dbf928610fa160a536b69323 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 24 Mar 2025 18:57:50 +0000
Subject: [PATCH 1600/1761] Allow PyPy to fail on CI.

Failures are apparently unrelated but needs more investigation.
---
 .github/workflows/main.yml | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index 6e5892f2ed..14f3151d34 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -64,7 +64,8 @@ jobs:
           python: "3.10"
           distutils: stdlib
     runs-on: ${{ matrix.platform }}
-    continue-on-error: ${{ matrix.python == '3.14' }}
+    continue-on-error: ${{ matrix.python == '3.14' || matrix.python == 'pypy3.10' }}
+    # XXX: pypy seems to be flaky with unrelated tests in #6345
     env:
       SETUPTOOLS_USE_DISTUTILS: ${{ matrix.distutils || 'local' }}
     timeout-minutes: 75

From 3c88de1c62420c1e0161f48e34af6424ac009aa5 Mon Sep 17 00:00:00 2001
From: Anderson Bravalheri 
Date: Mon, 24 Mar 2025 19:26:25 +0000
Subject: [PATCH 1601/1761] =?UTF-8?q?Bump=20version:=2078.0.1=20=E2=86=92?=
 =?UTF-8?q?=2078.0.2?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg              |  2 +-
 NEWS.rst                      | 10 ++++++++++
 newsfragments/4911.bugfix.rst |  2 --
 pyproject.toml                |  2 +-
 4 files changed, 12 insertions(+), 4 deletions(-)
 delete mode 100644 newsfragments/4911.bugfix.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index f506444c07..afe1c33839 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 78.0.1
+current_version = 78.0.2
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index e3e415279c..b81de16a2f 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,13 @@
+v78.0.2
+=======
+
+Bugfixes
+--------
+
+- Postponed removals of deprecated dash-separated and uppercase fields in ``setup.cfg``.
+  All packages with deprecated configurations are advised to move before 2026. (#4911)
+
+
 v78.0.1
 =======
 
diff --git a/newsfragments/4911.bugfix.rst b/newsfragments/4911.bugfix.rst
deleted file mode 100644
index 49db1a49ed..0000000000
--- a/newsfragments/4911.bugfix.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Postponed removals of deprecated dash-separated and uppercase fields in ``setup.cfg``.
-All packages with deprecated configurations are advised to move before 2026.
diff --git a/pyproject.toml b/pyproject.toml
index ded145dca1..10abdf43c3 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "78.0.1"
+version = "78.0.2"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From efa2eb231c82f6630468ad358cfe4b65a013b690 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 25 Mar 2025 17:42:15 -0400
Subject: [PATCH 1602/1761] Restore access to _get_vc_env with a warning.

Fixes pypa/setuptools#4874
---
 distutils/_msvccompiler.py     | 11 +++++++++++
 newsfragments/4874.feature.rst |  1 +
 2 files changed, 12 insertions(+)
 create mode 100644 newsfragments/4874.feature.rst

diff --git a/distutils/_msvccompiler.py b/distutils/_msvccompiler.py
index 8471ccab28..d07c86ef8e 100644
--- a/distutils/_msvccompiler.py
+++ b/distutils/_msvccompiler.py
@@ -1,5 +1,16 @@
+import warnings
+
 from .compilers.C import msvc
 
 __all__ = ["MSVCCompiler"]
 
 MSVCCompiler = msvc.Compiler
+
+
+def __getattr__(name):
+    if name == '_get_vc_env':
+        warnings.warn(
+            "_get_vc_env is private; find an alternative (pypa/distutils#340)"
+        )
+        return msvc._get_vc_env
+    raise AttributeError(name)
diff --git a/newsfragments/4874.feature.rst b/newsfragments/4874.feature.rst
new file mode 100644
index 0000000000..a82f699fe9
--- /dev/null
+++ b/newsfragments/4874.feature.rst
@@ -0,0 +1 @@
+Restore access to _get_vc_env with a warning.

From 6ead555c5fb29bc57fe6105b1bffc163f56fd558 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 25 Mar 2025 18:29:17 -0400
Subject: [PATCH 1603/1761] =?UTF-8?q?Bump=20version:=2078.0.2=20=E2=86=92?=
 =?UTF-8?q?=2078.1.0?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg               | 2 +-
 NEWS.rst                       | 9 +++++++++
 newsfragments/4874.feature.rst | 1 -
 pyproject.toml                 | 2 +-
 4 files changed, 11 insertions(+), 3 deletions(-)
 delete mode 100644 newsfragments/4874.feature.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index afe1c33839..16058d4c24 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 78.0.2
+current_version = 78.1.0
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index b81de16a2f..554caf867c 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,12 @@
+v78.1.0
+=======
+
+Features
+--------
+
+- Restore access to _get_vc_env with a warning. (#4874)
+
+
 v78.0.2
 =======
 
diff --git a/newsfragments/4874.feature.rst b/newsfragments/4874.feature.rst
deleted file mode 100644
index a82f699fe9..0000000000
--- a/newsfragments/4874.feature.rst
+++ /dev/null
@@ -1 +0,0 @@
-Restore access to _get_vc_env with a warning.
diff --git a/pyproject.toml b/pyproject.toml
index 10abdf43c3..e4d0441317 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "78.0.2"
+version = "78.1.0"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From d751068fd2627d6d8f1729e39cbcd8119049998f Mon Sep 17 00:00:00 2001
From: Hugo van Kemenade <1324225+hugovk@users.noreply.github.com>
Date: Mon, 31 Mar 2025 22:42:14 +0300
Subject: [PATCH 1604/1761] Fix typo: pyproject.license -> project.license

---
 NEWS.rst | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/NEWS.rst b/NEWS.rst
index 554caf867c..304263eecd 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -119,7 +119,7 @@ Deprecations and Removals
 - Deprecated ``project.license`` as a TOML table in
   ``pyproject.toml``\. Users are expected to move towards using
   ``project.license-files`` and/or SPDX expressions (as strings) in
-  ``pyproject.license``\.
+  ``project.license``\.
   See PEP :pep:`639 <639#deprecate-license-key-table-subkeys>`. (#4840)
 - Added simple validation for given glob patterns in ``license-files``\:
   a warning will be generated if no file is matched.

From 2f093b54305e508eb4239e8c9fd94d4b02da9620 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sun, 13 Apr 2025 16:21:27 -0400
Subject: [PATCH 1605/1761] Remove latent comment.

---
 distutils/command/config.py | 2 --
 1 file changed, 2 deletions(-)

diff --git a/distutils/command/config.py b/distutils/command/config.py
index e087edd607..cb5c2a028a 100644
--- a/distutils/command/config.py
+++ b/distutils/command/config.py
@@ -89,8 +89,6 @@ def _check_compiler(self):
         """Check that 'self.compiler' really is a CCompiler object;
         if not, make it one.
         """
-        # We do this late, and only on-demand, because this is an expensive
-        # import.
         if not isinstance(self.compiler, CCompiler):
             self.compiler = new_compiler(
                 compiler=self.compiler, dry_run=self.dry_run, force=True

From 333514c28cc4fd507762ce388e4334b404c9dfd6 Mon Sep 17 00:00:00 2001
From: Christopher Head 
Date: Mon, 14 Apr 2025 18:27:21 -0700
Subject: [PATCH 1606/1761] Fix broken link

---
 NEWS.rst | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/NEWS.rst b/NEWS.rst
index 304263eecd..2275dffcbe 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -501,7 +501,7 @@ v72.2.0
 Features
 --------
 
-- Merged with pypa/distutils@b7ee725f3 including: Support for Pathlike objects in data files and extensions (pypa/distutils#272, pypa/distutils#237), native support for C++ compilers (pypa/distuils#228) and removed unused get_msvcr() (pypa/distutils#274). (#4538)
+- Merged with pypa/distutils@b7ee725f3 including: Support for Pathlike objects in data files and extensions (pypa/distutils#272, pypa/distutils#237), native support for C++ compilers (pypa/distutils#228) and removed unused get_msvcr() (pypa/distutils#274). (#4538)
 
 
 v72.1.0

From 222190724924be34eaa45a20b56895a2bb090d4a Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 15 Apr 2025 18:49:48 -0400
Subject: [PATCH 1607/1761] Remove test attempting to capture that a pure
 distutils program can be built.

This expectation is discouraged if not deprecated and shouldn't be captured here.
---
 setuptools/tests/test_easy_install.py | 19 -------------------
 1 file changed, 19 deletions(-)

diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py
index 59becce991..ee824dbc26 100644
--- a/setuptools/tests/test_easy_install.py
+++ b/setuptools/tests/test_easy_install.py
@@ -27,7 +27,6 @@
 from setuptools.dist import Distribution
 from setuptools.tests.server import MockServer
 
-from . import contexts
 from .textwrap import DALS
 
 import distutils.errors
@@ -384,24 +383,6 @@ def test_local_index(self, foo_package, install_target):
         assert actual == expected
 
 
-@pytest.fixture
-def distutils_package():
-    distutils_setup_py = SETUP_PY.replace(
-        'from setuptools import setup',
-        'from distutils.core import setup',
-    )
-    with contexts.tempdir(cd=os.chdir):
-        with open('setup.py', 'w', encoding="utf-8") as f:
-            f.write(distutils_setup_py)
-        yield
-
-
-@pytest.mark.usefixtures("distutils_package")
-class TestDistutilsPackage:
-    def test_bdist_egg_available_on_distutils_pkg(self):
-        subprocess.check_call([sys.executable, 'setup.py', 'bdist_egg'])
-
-
 @pytest.fixture
 def mock_index():
     # set up a server which will simulate an alternate package index.

From a088da48addf506975642e2b0f660f905aa963cc Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Tue, 15 Apr 2025 19:20:02 -0400
Subject: [PATCH 1608/1761] Add news fragment.

---
 newsfragments/2908.removal.rst | 1 +
 1 file changed, 1 insertion(+)
 create mode 100644 newsfragments/2908.removal.rst

diff --git a/newsfragments/2908.removal.rst b/newsfragments/2908.removal.rst
new file mode 100644
index 0000000000..67c9a40c83
--- /dev/null
+++ b/newsfragments/2908.removal.rst
@@ -0,0 +1 @@
+Removed support for the easy_install command including the sandbox module.

From f7a9d33c465f7ca9eee0629656ea1f053b120fc3 Mon Sep 17 00:00:00 2001
From: Nathan Goldbaum 
Date: Thu, 17 Apr 2025 14:01:34 -0600
Subject: [PATCH 1609/1761] Fix error message string formatting

---
 setuptools/command/bdist_wheel.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setuptools/command/bdist_wheel.py b/setuptools/command/bdist_wheel.py
index 1e3f637bcc..91ed00170e 100644
--- a/setuptools/command/bdist_wheel.py
+++ b/setuptools/command/bdist_wheel.py
@@ -285,7 +285,7 @@ def _validate_py_limited_api(self) -> None:
             raise ValueError(
                 f"`py_limited_api={self.py_limited_api!r}` not supported. "
                 "`Py_LIMITED_API` is currently incompatible with "
-                "`Py_GIL_DISABLED`."
+                "`Py_GIL_DISABLED`. "
                 "See https://github.com/python/cpython/issues/111506."
             )
 

From d8390feaa99091d1ba9626bec0e4ba7072fc507a Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 19 Apr 2025 12:49:55 -0400
Subject: [PATCH 1610/1761] Extract _resolve_download_filename with test.

---
 setuptools/package_index.py | 20 ++++++++++++++++----
 1 file changed, 16 insertions(+), 4 deletions(-)

diff --git a/setuptools/package_index.py b/setuptools/package_index.py
index 1a6abebcda..b317735097 100644
--- a/setuptools/package_index.py
+++ b/setuptools/package_index.py
@@ -807,9 +807,16 @@ def open_url(self, url, warning=None):  # noqa: C901  # is too complex (12)
             else:
                 raise DistutilsError(f"Download error for {url}: {v}") from v
 
-    def _download_url(self, url, tmpdir):
-        # Determine download filename
-        #
+    @staticmethod
+    def _resolve_download_filename(url, tmpdir):
+        """
+        >>> du = PackageIndex._resolve_download_filename
+        >>> root = getfixture('tmp_path')
+        >>> url = 'https://files.pythonhosted.org/packages/a9/5a/0db.../setuptools-78.1.0.tar.gz'
+        >>> import pathlib
+        >>> str(pathlib.Path(du(url, root)).relative_to(root))
+        'setuptools-78.1.0.tar.gz'
+        """
         name, _fragment = egg_info_for_url(url)
         if name:
             while '..' in name:
@@ -820,8 +827,13 @@ def _download_url(self, url, tmpdir):
         if name.endswith('.egg.zip'):
             name = name[:-4]  # strip the extra .zip before download
 
-        filename = os.path.join(tmpdir, name)
+        return os.path.join(tmpdir, name)
 
+    def _download_url(self, url, tmpdir):
+        """
+        Determine the download filename.
+        """
+        filename = self._resolve_download_filename(url, tmpdir)
         return self._download_vcs(url, filename) or self._download_other(url, filename)
 
     @staticmethod

From 250a6d17978f9f6ac3ac887091f2d32886fbbb0b Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 19 Apr 2025 13:03:47 -0400
Subject: [PATCH 1611/1761] Add a check to ensure the name resolves relative to
 the tmpdir.

Closes #4946
---
 setuptools/package_index.py | 18 ++++++++++++++++--
 1 file changed, 16 insertions(+), 2 deletions(-)

diff --git a/setuptools/package_index.py b/setuptools/package_index.py
index b317735097..a8f868e22b 100644
--- a/setuptools/package_index.py
+++ b/setuptools/package_index.py
@@ -810,12 +810,20 @@ def open_url(self, url, warning=None):  # noqa: C901  # is too complex (12)
     @staticmethod
     def _resolve_download_filename(url, tmpdir):
         """
+        >>> import pathlib
         >>> du = PackageIndex._resolve_download_filename
         >>> root = getfixture('tmp_path')
         >>> url = 'https://files.pythonhosted.org/packages/a9/5a/0db.../setuptools-78.1.0.tar.gz'
-        >>> import pathlib
         >>> str(pathlib.Path(du(url, root)).relative_to(root))
         'setuptools-78.1.0.tar.gz'
+
+        Ensures the target is always in tmpdir.
+
+        >>> url = 'https://anyhost/%2fhome%2fuser%2f.ssh%2fauthorized_keys'
+        >>> du(url, root)
+        Traceback (most recent call last):
+        ...
+        ValueError: Invalid filename...
         """
         name, _fragment = egg_info_for_url(url)
         if name:
@@ -827,7 +835,13 @@ def _resolve_download_filename(url, tmpdir):
         if name.endswith('.egg.zip'):
             name = name[:-4]  # strip the extra .zip before download
 
-        return os.path.join(tmpdir, name)
+        filename = os.path.join(tmpdir, name)
+
+        # ensure path resolves within the tmpdir
+        if not filename.startswith(str(tmpdir)):
+            raise ValueError(f"Invalid filename {filename}")
+
+        return filename
 
     def _download_url(self, url, tmpdir):
         """

From e409e8002932f2b86aae7b1abc8f8c2ebf96df2c Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 19 Apr 2025 13:35:30 -0400
Subject: [PATCH 1612/1761] Extract _sanitize method for sanitizing the
 filename.

---
 setuptools/package_index.py | 58 +++++++++++++++++++++++--------------
 1 file changed, 37 insertions(+), 21 deletions(-)

diff --git a/setuptools/package_index.py b/setuptools/package_index.py
index a8f868e22b..fdd0c825bf 100644
--- a/setuptools/package_index.py
+++ b/setuptools/package_index.py
@@ -808,7 +808,36 @@ def open_url(self, url, warning=None):  # noqa: C901  # is too complex (12)
                 raise DistutilsError(f"Download error for {url}: {v}") from v
 
     @staticmethod
-    def _resolve_download_filename(url, tmpdir):
+    def _sanitize(name):
+        r"""
+        Replace unsafe path directives with underscores.
+
+        >>> san = PackageIndex._sanitize
+        >>> san('/home/user/.ssh/authorized_keys')
+        '_home_user_.ssh_authorized_keys'
+        >>> san('..\\foo\\bing')
+        '__foo_bing'
+        >>> san('D:bar')
+        'D_bar'
+        >>> san('C:\\bar')
+        'C__bar'
+        >>> san('foo..bar')
+        'foo..bar'
+        >>> san('D:../foo')
+        'D___foo'
+        """
+        pattern = '|'.join((
+            # drive letters
+            r':',
+            # path separators
+            r'[/\\]',
+            # parent dirs
+            r'(?:(?<=([/\\]|:))\.\.(?=[/\\]|$))|(?:^\.\.(?=[/\\]|$))',
+        ))
+        return re.sub(pattern, r'_', name)
+
+    @classmethod
+    def _resolve_download_filename(cls, url, tmpdir):
         """
         >>> import pathlib
         >>> du = PackageIndex._resolve_download_filename
@@ -816,32 +845,19 @@ def _resolve_download_filename(url, tmpdir):
         >>> url = 'https://files.pythonhosted.org/packages/a9/5a/0db.../setuptools-78.1.0.tar.gz'
         >>> str(pathlib.Path(du(url, root)).relative_to(root))
         'setuptools-78.1.0.tar.gz'
-
-        Ensures the target is always in tmpdir.
-
-        >>> url = 'https://anyhost/%2fhome%2fuser%2f.ssh%2fauthorized_keys'
-        >>> du(url, root)
-        Traceback (most recent call last):
-        ...
-        ValueError: Invalid filename...
         """
         name, _fragment = egg_info_for_url(url)
-        if name:
-            while '..' in name:
-                name = name.replace('..', '.').replace('\\', '_')
-        else:
-            name = "__downloaded__"  # default if URL has no path contents
+        name = cls._sanitize(
+            name
+            or
+            # default if URL has no path contents
+            '__downloaded__'
+        )
 
         if name.endswith('.egg.zip'):
             name = name[:-4]  # strip the extra .zip before download
 
-        filename = os.path.join(tmpdir, name)
-
-        # ensure path resolves within the tmpdir
-        if not filename.startswith(str(tmpdir)):
-            raise ValueError(f"Invalid filename {filename}")
-
-        return filename
+        return os.path.join(tmpdir, name)
 
     def _download_url(self, url, tmpdir):
         """

From 2ca4a9fe4758fcd39d771d3d3a5b4840aacebdf7 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 19 Apr 2025 13:39:54 -0400
Subject: [PATCH 1613/1761] Rely on re.sub to perform the decision in one
 expression.

---
 setuptools/package_index.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/setuptools/package_index.py b/setuptools/package_index.py
index fdd0c825bf..3500c2d86f 100644
--- a/setuptools/package_index.py
+++ b/setuptools/package_index.py
@@ -854,8 +854,8 @@ def _resolve_download_filename(cls, url, tmpdir):
             '__downloaded__'
         )
 
-        if name.endswith('.egg.zip'):
-            name = name[:-4]  # strip the extra .zip before download
+        # strip any extra .zip before download
+        name = re.sub(r'\.egg\.zip$', '.egg', name)
 
         return os.path.join(tmpdir, name)
 

From 8faf1d7e0ca309983252e4f21837b73ee12e960f Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 19 Apr 2025 13:41:15 -0400
Subject: [PATCH 1614/1761] Add news fragment.

---
 newsfragments/4946.bugfix.rst | 1 +
 1 file changed, 1 insertion(+)
 create mode 100644 newsfragments/4946.bugfix.rst

diff --git a/newsfragments/4946.bugfix.rst b/newsfragments/4946.bugfix.rst
new file mode 100644
index 0000000000..b9100dc313
--- /dev/null
+++ b/newsfragments/4946.bugfix.rst
@@ -0,0 +1 @@
+More fully sanitized the filename in PackageIndex._download.

From 8e4868a036b7fae3208d16cb4e5fe6d63c3752df Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 19 Apr 2025 14:02:36 -0400
Subject: [PATCH 1615/1761] =?UTF-8?q?Bump=20version:=2078.1.0=20=E2=86=92?=
 =?UTF-8?q?=2078.1.1?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg              | 2 +-
 NEWS.rst                      | 9 +++++++++
 newsfragments/4946.bugfix.rst | 1 -
 pyproject.toml                | 2 +-
 4 files changed, 11 insertions(+), 3 deletions(-)
 delete mode 100644 newsfragments/4946.bugfix.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 16058d4c24..e949526899 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 78.1.0
+current_version = 78.1.1
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index 304263eecd..c3718af537 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,12 @@
+v78.1.1
+=======
+
+Bugfixes
+--------
+
+- More fully sanitized the filename in PackageIndex._download. (#4946)
+
+
 v78.1.0
 =======
 
diff --git a/newsfragments/4946.bugfix.rst b/newsfragments/4946.bugfix.rst
deleted file mode 100644
index b9100dc313..0000000000
--- a/newsfragments/4946.bugfix.rst
+++ /dev/null
@@ -1 +0,0 @@
-More fully sanitized the filename in PackageIndex._download.
diff --git a/pyproject.toml b/pyproject.toml
index 791d7013e8..08da18bbd7 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "78.1.0"
+version = "78.1.1"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From d157b0d5d23b076187942194ffe08fcf2c11c750 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 19 Apr 2025 22:45:48 -0400
Subject: [PATCH 1616/1761] Remove support for egg-based 'install' and
 'easy_install' distutils command.

---
 setuptools/command/install.py         | 38 +-----------
 setuptools/tests/test_easy_install.py | 84 ---------------------------
 2 files changed, 2 insertions(+), 120 deletions(-)

diff --git a/setuptools/command/install.py b/setuptools/command/install.py
index 15ef364688..ba667c5d16 100644
--- a/setuptools/command/install.py
+++ b/setuptools/command/install.py
@@ -1,16 +1,12 @@
 from __future__ import annotations
 
-import glob
 import inspect
 import platform
 from collections.abc import Callable
-from typing import TYPE_CHECKING, Any, ClassVar, cast
-
-import setuptools
+from typing import TYPE_CHECKING, Any, ClassVar
 
 from ..dist import Distribution
 from ..warnings import SetuptoolsDeprecationWarning, SetuptoolsWarning
-from .bdist_egg import bdist_egg as bdist_egg_cls
 
 import distutils.command.install as orig
 from distutils.errors import DistutilsArgError
@@ -144,37 +140,7 @@ def _called_from_setup(run_frame):
         return False
 
     def do_egg_install(self) -> None:
-        easy_install = self.distribution.get_command_class('easy_install')
-
-        cmd = cast(
-            # We'd want to cast easy_install as type[easy_install_cls] but a bug in
-            # mypy makes it think easy_install() returns a Command on Python 3.12+
-            # https://github.com/python/mypy/issues/18088
-            easy_install_cls,
-            easy_install(  # type: ignore[call-arg]
-                self.distribution,
-                args="x",
-                root=self.root,
-                record=self.record,
-            ),
-        )
-        cmd.ensure_finalized()  # finalize before bdist_egg munges install cmd
-        cmd.always_copy_from = '.'  # make sure local-dir eggs get installed
-
-        # pick up setup-dir .egg files only: no .egg-info
-        cmd.package_index.scan(glob.glob('*.egg'))
-
-        self.run_command('bdist_egg')
-        bdist_egg = cast(bdist_egg_cls, self.distribution.get_command_obj('bdist_egg'))
-        args = [bdist_egg.egg_output]
-
-        if setuptools.bootstrap_install_from:
-            # Bootstrap self-installation of setuptools
-            args.insert(0, setuptools.bootstrap_install_from)
-
-        cmd.args = args
-        cmd.run(show_deprecation=False)
-        setuptools.bootstrap_install_from = None
+        raise NotImplementedError("Support for egg-based install has been removed.")
 
 
 # XXX Python 3.1 doesn't see _nc if this is inside the class
diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py
index ee824dbc26..cd81aafdb7 100644
--- a/setuptools/tests/test_easy_install.py
+++ b/setuptools/tests/test_easy_install.py
@@ -25,7 +25,6 @@
 from setuptools._normalization import safer_name
 from setuptools.command.easy_install import PthDistributions
 from setuptools.dist import Distribution
-from setuptools.tests.server import MockServer
 
 from .textwrap import DALS
 
@@ -383,89 +382,6 @@ def test_local_index(self, foo_package, install_target):
         assert actual == expected
 
 
-@pytest.fixture
-def mock_index():
-    # set up a server which will simulate an alternate package index.
-    p_index = MockServer()
-    if p_index.server_port == 0:
-        # Some platforms (Jython) don't find a port to which to bind,
-        # so skip test for them.
-        pytest.skip("could not find a valid port")
-    p_index.start()
-    return p_index
-
-
-class TestInstallRequires:
-    def test_setup_install_includes_dependencies(self, tmp_path, mock_index):
-        """
-        When ``python setup.py install`` is called directly, it will use easy_install
-        to fetch dependencies.
-        """
-        # TODO: Remove these tests once `setup.py install` is completely removed
-        project_root = tmp_path / "project"
-        project_root.mkdir(exist_ok=True)
-        install_root = tmp_path / "install"
-        install_root.mkdir(exist_ok=True)
-
-        self.create_project(project_root)
-        cmd = [
-            sys.executable,
-            '-c',
-            '__import__("setuptools").setup()',
-            'install',
-            '--install-base',
-            str(install_root),
-            '--install-lib',
-            str(install_root),
-            '--install-headers',
-            str(install_root),
-            '--install-scripts',
-            str(install_root),
-            '--install-data',
-            str(install_root),
-            '--install-purelib',
-            str(install_root),
-            '--install-platlib',
-            str(install_root),
-        ]
-        env = {**os.environ, "__EASYINSTALL_INDEX": mock_index.url}
-        cp = subprocess.run(
-            cmd,
-            cwd=str(project_root),
-            env=env,
-            stdout=subprocess.PIPE,
-            stderr=subprocess.STDOUT,
-            text=True,
-            encoding="utf-8",
-        )
-        assert cp.returncode != 0
-        try:
-            assert '/does-not-exist/' in {r.path for r in mock_index.requests}
-            assert next(
-                line
-                for line in cp.stdout.splitlines()
-                if "not find suitable distribution for" in line
-                and "does-not-exist" in line
-            )
-        except Exception:
-            if "failed to get random numbers" in cp.stdout:
-                pytest.xfail(f"{sys.platform} failure - {cp.stdout}")
-            raise
-
-    def create_project(self, root):
-        config = """
-        [metadata]
-        name = project
-        version = 42
-
-        [options]
-        install_requires = does-not-exist
-        py_modules = mod
-        """
-        (root / 'setup.cfg').write_text(DALS(config), encoding="utf-8")
-        (root / 'mod.py').touch()
-
-
 def make_trivial_sdist(dist_path, distname, version):
     """
     Create a simple sdist tarball at dist_path, containing just a simple

From e365bc232fa724e6896a3be75b8cce013029f68e Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 19 Apr 2025 22:49:09 -0400
Subject: [PATCH 1617/1761] Remove tests.server module, no longer used.

---
 setuptools/tests/server.py | 86 --------------------------------------
 1 file changed, 86 deletions(-)
 delete mode 100644 setuptools/tests/server.py

diff --git a/setuptools/tests/server.py b/setuptools/tests/server.py
deleted file mode 100644
index 623a49a550..0000000000
--- a/setuptools/tests/server.py
+++ /dev/null
@@ -1,86 +0,0 @@
-"""Basic http server for tests to simulate PyPI or custom indexes"""
-
-import http.server
-import os
-import threading
-import time
-import urllib.parse
-import urllib.request
-
-
-class IndexServer(http.server.HTTPServer):
-    """Basic single-threaded http server simulating a package index
-
-    You can use this server in unittest like this::
-        s = IndexServer()
-        s.start()
-        index_url = s.base_url() + 'mytestindex'
-        # do some test requests to the index
-        # The index files should be located in setuptools/tests/indexes
-        s.stop()
-    """
-
-    def __init__(
-        self,
-        server_address=('', 0),
-        RequestHandlerClass=http.server.SimpleHTTPRequestHandler,
-    ):
-        http.server.HTTPServer.__init__(self, server_address, RequestHandlerClass)
-        self._run = True
-
-    def start(self):
-        self.thread = threading.Thread(target=self.serve_forever)
-        self.thread.start()
-
-    def stop(self):
-        "Stop the server"
-
-        # Let the server finish the last request and wait for a new one.
-        time.sleep(0.1)
-
-        self.shutdown()
-        self.thread.join()
-        self.socket.close()
-
-    def base_url(self):
-        port = self.server_port
-        return f'http://127.0.0.1:{port}/setuptools/tests/indexes/'
-
-
-class RequestRecorder(http.server.BaseHTTPRequestHandler):
-    def do_GET(self):
-        requests = vars(self.server).setdefault('requests', [])
-        requests.append(self)
-        self.send_response(200, 'OK')
-
-
-class MockServer(http.server.HTTPServer, threading.Thread):
-    """
-    A simple HTTP Server that records the requests made to it.
-    """
-
-    def __init__(self, server_address=('', 0), RequestHandlerClass=RequestRecorder):
-        http.server.HTTPServer.__init__(self, server_address, RequestHandlerClass)
-        threading.Thread.__init__(self)
-        self.daemon = True
-        self.requests = []
-
-    def run(self):
-        self.serve_forever()
-
-    @property
-    def netloc(self):
-        return f'localhost:{self.server_port}'
-
-    @property
-    def url(self):
-        return f'http://{self.netloc}/'
-
-
-def path_to_url(path, authority=None):
-    """Convert a path to a file: URL."""
-    path = os.path.normpath(os.path.abspath(path))
-    base = 'file:'
-    if authority is not None:
-        base += '//' + authority
-    return urllib.parse.urljoin(base, urllib.request.pathname2url(path))

From f89e652a79ecd4afbb71eabaf04a6709e11a4d5a Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sun, 20 Apr 2025 09:30:09 -0400
Subject: [PATCH 1618/1761] Removed support for the 'legacy-editable' feature.

According to [the docs](https://github.com/pypa/setuptools/blob/8e4868a036b7fae3208d16cb4e5fe6d63c3752df/docs/userguide/development_mode.rst#legacy-behavior), pip no longer relies on this feature and it's been there for over two years.
---
 docs/userguide/development_mode.rst |  7 ----
 newsfragments/917.removal.rst       |  1 +
 setuptools/build_meta.py            | 62 ++++++++++++-----------------
 setuptools/tests/test_build_meta.py | 24 -----------
 4 files changed, 26 insertions(+), 68 deletions(-)
 create mode 100644 newsfragments/917.removal.rst

diff --git a/docs/userguide/development_mode.rst b/docs/userguide/development_mode.rst
index 9a79b08a93..3eabe87fcb 100644
--- a/docs/userguide/development_mode.rst
+++ b/docs/userguide/development_mode.rst
@@ -197,13 +197,6 @@ works (still within the context of :pep:`660`).
    Users are encouraged to try out the new editable installation techniques
    and make the necessary adaptations.
 
-.. note::
-   Newer versions of ``pip`` no longer run the fallback command
-   ``python setup.py develop`` when the ``pyproject.toml`` file is present.
-   This means that setting the environment variable
-   ``SETUPTOOLS_ENABLE_FEATURES="legacy-editable"``
-   will have no effect when installing a package with ``pip``.
-
 
 How editable installations work
 -------------------------------
diff --git a/newsfragments/917.removal.rst b/newsfragments/917.removal.rst
new file mode 100644
index 0000000000..debf9bdc3a
--- /dev/null
+++ b/newsfragments/917.removal.rst
@@ -0,0 +1 @@
+Removed support for 'legacy-editable' installs.
diff --git a/setuptools/build_meta.py b/setuptools/build_meta.py
index 00fa5e1f70..8f2e930c73 100644
--- a/setuptools/build_meta.py
+++ b/setuptools/build_meta.py
@@ -67,9 +67,6 @@
     'SetupRequirementsError',
 ]
 
-SETUPTOOLS_ENABLE_FEATURES = os.getenv("SETUPTOOLS_ENABLE_FEATURES", "").lower()
-LEGACY_EDITABLE = "legacy-editable" in SETUPTOOLS_ENABLE_FEATURES.replace("_", "-")
-
 
 class SetupRequirementsError(BaseException):
     def __init__(self, specifiers) -> None:
@@ -457,37 +454,30 @@ def _get_dist_info_dir(self, metadata_directory: StrPath | None) -> str | None:
         assert len(dist_info_candidates) <= 1
         return str(dist_info_candidates[0]) if dist_info_candidates else None
 
-    if not LEGACY_EDITABLE:
-        # PEP660 hooks:
-        # build_editable
-        # get_requires_for_build_editable
-        # prepare_metadata_for_build_editable
-        def build_editable(
-            self,
-            wheel_directory: StrPath,
-            config_settings: _ConfigSettings = None,
-            metadata_directory: StrPath | None = None,
-        ):
-            # XXX can or should we hide our editable_wheel command normally?
-            info_dir = self._get_dist_info_dir(metadata_directory)
-            opts = ["--dist-info-dir", info_dir] if info_dir else []
-            cmd = ["editable_wheel", *opts, *self._editable_args(config_settings)]
-            with suppress_known_deprecation():
-                return self._build_with_temp_dir(
-                    cmd, ".whl", wheel_directory, config_settings
-                )
+    def build_editable(
+        self,
+        wheel_directory: StrPath,
+        config_settings: _ConfigSettings = None,
+        metadata_directory: StrPath | None = None,
+    ):
+        # XXX can or should we hide our editable_wheel command normally?
+        info_dir = self._get_dist_info_dir(metadata_directory)
+        opts = ["--dist-info-dir", info_dir] if info_dir else []
+        cmd = ["editable_wheel", *opts, *self._editable_args(config_settings)]
+        with suppress_known_deprecation():
+            return self._build_with_temp_dir(
+                cmd, ".whl", wheel_directory, config_settings
+            )
 
-        def get_requires_for_build_editable(
-            self, config_settings: _ConfigSettings = None
-        ):
-            return self.get_requires_for_build_wheel(config_settings)
+    def get_requires_for_build_editable(self, config_settings: _ConfigSettings = None):
+        return self.get_requires_for_build_wheel(config_settings)
 
-        def prepare_metadata_for_build_editable(
-            self, metadata_directory: StrPath, config_settings: _ConfigSettings = None
-        ):
-            return self.prepare_metadata_for_build_wheel(
-                metadata_directory, config_settings
-            )
+    def prepare_metadata_for_build_editable(
+        self, metadata_directory: StrPath, config_settings: _ConfigSettings = None
+    ):
+        return self.prepare_metadata_for_build_wheel(
+            metadata_directory, config_settings
+        )
 
 
 class _BuildMetaLegacyBackend(_BuildMetaBackend):
@@ -549,11 +539,9 @@ class _IncompatibleBdistWheel(SetuptoolsDeprecationWarning):
 prepare_metadata_for_build_wheel = _BACKEND.prepare_metadata_for_build_wheel
 build_wheel = _BACKEND.build_wheel
 build_sdist = _BACKEND.build_sdist
-
-if not LEGACY_EDITABLE:
-    get_requires_for_build_editable = _BACKEND.get_requires_for_build_editable
-    prepare_metadata_for_build_editable = _BACKEND.prepare_metadata_for_build_editable
-    build_editable = _BACKEND.build_editable
+get_requires_for_build_editable = _BACKEND.get_requires_for_build_editable
+prepare_metadata_for_build_editable = _BACKEND.prepare_metadata_for_build_editable
+build_editable = _BACKEND.build_editable
 
 
 # The legacy backend
diff --git a/setuptools/tests/test_build_meta.py b/setuptools/tests/test_build_meta.py
index 624bba862e..57162fd6af 100644
--- a/setuptools/tests/test_build_meta.py
+++ b/setuptools/tests/test_build_meta.py
@@ -936,30 +936,6 @@ def test_sys_argv_passthrough(self, tmpdir_cwd):
         build_backend.build_sdist("temp")
 
 
-def test_legacy_editable_install(venv, tmpdir, tmpdir_cwd):
-    pyproject = """
-    [build-system]
-    requires = ["setuptools"]
-    build-backend = "setuptools.build_meta"
-    [project]
-    name = "myproj"
-    version = "42"
-    """
-    path.build({"pyproject.toml": DALS(pyproject), "mymod.py": ""})
-
-    # First: sanity check
-    cmd = ["pip", "install", "--no-build-isolation", "-e", "."]
-    output = venv.run(cmd, cwd=tmpdir).lower()
-    assert "running setup.py develop for myproj" not in output
-    assert "created wheel for myproj" in output
-
-    # Then: real test
-    env = {**os.environ, "SETUPTOOLS_ENABLE_FEATURES": "legacy-editable"}
-    cmd = ["pip", "install", "--no-build-isolation", "-e", "."]
-    output = venv.run(cmd, cwd=tmpdir, env=env).lower()
-    assert "running setup.py develop for myproj" in output
-
-
 @pytest.mark.filterwarnings("ignore::setuptools.SetuptoolsDeprecationWarning")
 def test_sys_exit_0_in_setuppy(monkeypatch, tmp_path):
     """Setuptools should be resilient to setup.py with ``sys.exit(0)`` (#3973)."""

From 56962ec38bb53e1681de00dc5dc5b2e96b1b02b8 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sun, 20 Apr 2025 11:28:20 -0400
Subject: [PATCH 1619/1761] =?UTF-8?q?Bump=20version:=2078.1.1=20=E2=86=92?=
 =?UTF-8?q?=2079.0.0?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg              | 2 +-
 NEWS.rst                      | 9 +++++++++
 newsfragments/917.removal.rst | 1 -
 pyproject.toml                | 2 +-
 4 files changed, 11 insertions(+), 3 deletions(-)
 delete mode 100644 newsfragments/917.removal.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index e949526899..ce3d207697 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 78.1.1
+current_version = 79.0.0
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index c3718af537..e7f33786a3 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,12 @@
+v79.0.0
+=======
+
+Deprecations and Removals
+-------------------------
+
+- Removed support for 'legacy-editable' installs. (#917)
+
+
 v78.1.1
 =======
 
diff --git a/newsfragments/917.removal.rst b/newsfragments/917.removal.rst
deleted file mode 100644
index debf9bdc3a..0000000000
--- a/newsfragments/917.removal.rst
+++ /dev/null
@@ -1 +0,0 @@
-Removed support for 'legacy-editable' installs.
diff --git a/pyproject.toml b/pyproject.toml
index 08da18bbd7..3ba37aa59d 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "78.1.1"
+version = "79.0.0"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From 54b9f786afdde209de6e16bfa0c60fd54d4ff6bb Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 19 Apr 2025 19:39:37 -0400
Subject: [PATCH 1620/1761] Restore return type for run_setup.

Co-authored-by: Avasam 
---
 setuptools/command/easy_install.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py
index 4a88077670..32bc197121 100644
--- a/setuptools/command/easy_install.py
+++ b/setuptools/command/easy_install.py
@@ -1174,7 +1174,7 @@ def report_editable(self, spec, setup_script):
         python = sys.executable
         return '\n' + self.__editable_msg % locals()
 
-    def run_setup(self, setup_script, setup_base, args):
+    def run_setup(self, setup_script, setup_base, args) -> NoReturn:
         raise NotImplementedError("easy_install support has been removed")
 
     def build_and_install(self, setup_script, setup_base):

From 98e6b4cac625c6c13b718eeccea42d00d75f2577 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sun, 20 Apr 2025 17:22:23 -0400
Subject: [PATCH 1621/1761] Replace develop command with redirection to pip.

---
 setuptools/command/develop.py    | 196 +++----------------------------
 setuptools/tests/test_develop.py |  61 ----------
 2 files changed, 14 insertions(+), 243 deletions(-)

diff --git a/setuptools/command/develop.py b/setuptools/command/develop.py
index 7eee29d491..14346815fb 100644
--- a/setuptools/command/develop.py
+++ b/setuptools/command/develop.py
@@ -1,195 +1,27 @@
-import glob
-import os
+import subprocess
+import sys
 
-import setuptools
-from setuptools import _normalization, _path, namespaces
-from setuptools.command.easy_install import easy_install
+from setuptools import Command
 
-from ..unicode_utils import _read_utf8_with_fallback
 
-from distutils import log
-from distutils.errors import DistutilsOptionError
-from distutils.util import convert_path
-
-
-class develop(namespaces.DevelopInstaller, easy_install):
+class develop(Command):
     """Set up package for development"""
 
-    description = "install package in 'development mode'"
-
-    user_options = easy_install.user_options + [
-        ("uninstall", "u", "Uninstall this source package"),
-        ("egg-path=", None, "Set the path to be used in the .egg-link file"),
+    user_options = [
+        ("install-dir=", "d", "install package to DIR"),
     ]
 
-    boolean_options = easy_install.boolean_options + ['uninstall']
-
-    command_consumes_arguments = False  # override base
+    install_dir = None
 
     def run(self):
-        if self.uninstall:
-            self.multi_version = True
-            self.uninstall_link()
-            self.uninstall_namespaces()
-        else:
-            self.install_for_development()
-        self.warn_deprecated_options()
+        cmd = [sys.executable, '-m', 'pip', 'install', '-e', '.', '--use-pep517'] + [
+            '--target',
+            self.install_dir,
+        ] * bool(self.install_dir)
+        subprocess.check_call(cmd)
 
     def initialize_options(self):
-        self.uninstall = None
-        self.egg_path = None
-        easy_install.initialize_options(self)
-        self.setup_path = None
-        self.always_copy_from = '.'  # always copy eggs installed in curdir
+        pass
 
     def finalize_options(self) -> None:
-        import pkg_resources
-
-        ei = self.get_finalized_command("egg_info")
-        self.args = [ei.egg_name]
-
-        easy_install.finalize_options(self)
-        self.expand_basedirs()
-        self.expand_dirs()
-        # pick up setup-dir .egg files only: no .egg-info
-        self.package_index.scan(glob.glob('*.egg'))
-
-        egg_link_fn = (
-            _normalization.filename_component_broken(ei.egg_name) + '.egg-link'
-        )
-        self.egg_link = os.path.join(self.install_dir, egg_link_fn)
-        self.egg_base = ei.egg_base
-        if self.egg_path is None:
-            self.egg_path = os.path.abspath(ei.egg_base)
-
-        target = _path.normpath(self.egg_base)
-        egg_path = _path.normpath(os.path.join(self.install_dir, self.egg_path))
-        if egg_path != target:
-            raise DistutilsOptionError(
-                "--egg-path must be a relative path from the install"
-                " directory to " + target
-            )
-
-        # Make a distribution for the package's source
-        self.dist = pkg_resources.Distribution(
-            target,
-            pkg_resources.PathMetadata(target, os.path.abspath(ei.egg_info)),
-            project_name=ei.egg_name,
-        )
-
-        self.setup_path = self._resolve_setup_path(
-            self.egg_base,
-            self.install_dir,
-            self.egg_path,
-        )
-
-    @staticmethod
-    def _resolve_setup_path(egg_base, install_dir, egg_path):
-        """
-        Generate a path from egg_base back to '.' where the
-        setup script resides and ensure that path points to the
-        setup path from $install_dir/$egg_path.
-        """
-        path_to_setup = egg_base.replace(os.sep, '/').rstrip('/')
-        if path_to_setup != os.curdir:
-            path_to_setup = '../' * (path_to_setup.count('/') + 1)
-        resolved = _path.normpath(os.path.join(install_dir, egg_path, path_to_setup))
-        curdir = _path.normpath(os.curdir)
-        if resolved != curdir:
-            raise DistutilsOptionError(
-                "Can't get a consistent path to setup script from"
-                " installation directory",
-                resolved,
-                curdir,
-            )
-        return path_to_setup
-
-    def install_for_development(self) -> None:
-        self.run_command('egg_info')
-
-        # Build extensions in-place
-        self.reinitialize_command('build_ext', inplace=True)
-        self.run_command('build_ext')
-
-        if setuptools.bootstrap_install_from:
-            self.easy_install(setuptools.bootstrap_install_from)
-            setuptools.bootstrap_install_from = None
-
-        self.install_namespaces()
-
-        # create an .egg-link in the installation dir, pointing to our egg
-        log.info("Creating %s (link to %s)", self.egg_link, self.egg_base)
-        if not self.dry_run:
-            with open(self.egg_link, "w", encoding="utf-8") as f:
-                f.write(self.egg_path + "\n" + self.setup_path)
-        # postprocess the installed distro, fixing up .pth, installing scripts,
-        # and handling requirements
-        self.process_distribution(None, self.dist, not self.no_deps)
-
-    def uninstall_link(self) -> None:
-        if os.path.exists(self.egg_link):
-            log.info("Removing %s (link to %s)", self.egg_link, self.egg_base)
-
-            contents = [
-                line.rstrip()
-                for line in _read_utf8_with_fallback(self.egg_link).splitlines()
-            ]
-
-            if contents not in ([self.egg_path], [self.egg_path, self.setup_path]):
-                log.warn("Link points to %s: uninstall aborted", contents)
-                return
-            if not self.dry_run:
-                os.unlink(self.egg_link)
-        if not self.dry_run:
-            self.update_pth(self.dist)  # remove any .pth link to us
-        if self.distribution.scripts:
-            # XXX should also check for entry point scripts!
-            log.warn("Note: you must uninstall or replace scripts manually!")
-
-    def install_egg_scripts(self, dist):
-        if dist is not self.dist:
-            # Installing a dependency, so fall back to normal behavior
-            return easy_install.install_egg_scripts(self, dist)
-
-        # create wrapper scripts in the script dir, pointing to dist.scripts
-
-        # new-style...
-        self.install_wrapper_scripts(dist)
-
-        # ...and old-style
-        for script_name in self.distribution.scripts or []:
-            script_path = os.path.abspath(convert_path(script_name))
-            script_name = os.path.basename(script_path)
-            script_text = _read_utf8_with_fallback(script_path)
-            self.install_script(dist, script_name, script_text, script_path)
-
-        return None
-
-    def install_wrapper_scripts(self, dist):
-        dist = VersionlessRequirement(dist)
-        return easy_install.install_wrapper_scripts(self, dist)
-
-
-class VersionlessRequirement:
-    """
-    Adapt a pkg_resources.Distribution to simply return the project
-    name as the 'requirement' so that scripts will work across
-    multiple versions.
-
-    >>> from pkg_resources import Distribution
-    >>> dist = Distribution(project_name='foo', version='1.0')
-    >>> str(dist.as_requirement())
-    'foo==1.0'
-    >>> adapted_dist = VersionlessRequirement(dist)
-    >>> str(adapted_dist.as_requirement())
-    'foo'
-    """
-
-    def __init__(self, dist) -> None:
-        self.__dist = dist
-
-    def __getattr__(self, name: str):
-        return getattr(self.__dist, name)
-
-    def as_requirement(self):
-        return self.project_name
+        pass
diff --git a/setuptools/tests/test_develop.py b/setuptools/tests/test_develop.py
index 929fa9c285..6f1d27aa9f 100644
--- a/setuptools/tests/test_develop.py
+++ b/setuptools/tests/test_develop.py
@@ -51,67 +51,6 @@ def test_env(tmpdir, temp_user):
         yield target
 
 
-class TestDevelop:
-    in_virtualenv = hasattr(sys, 'real_prefix')
-    in_venv = hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix
-
-    def test_console_scripts(self, tmpdir):
-        """
-        Test that console scripts are installed and that they reference
-        only the project by name and not the current version.
-        """
-        pytest.skip(
-            "TODO: needs a fixture to cause 'develop' "
-            "to be invoked without mutating environment."
-        )
-        settings = dict(
-            name='foo',
-            packages=['foo'],
-            version='0.0',
-            entry_points={
-                'console_scripts': [
-                    'foocmd = foo:foo',
-                ],
-            },
-        )
-        dist = Distribution(settings)
-        dist.script_name = 'setup.py'
-        cmd = develop(dist)
-        cmd.ensure_finalized()
-        cmd.install_dir = tmpdir
-        cmd.run()
-        # assert '0.0' not in foocmd_text
-
-    @pytest.mark.xfail(reason="legacy behavior retained for compatibility #4167")
-    def test_egg_link_filename(self):
-        settings = dict(
-            name='Foo $$$ Bar_baz-bing',
-        )
-        dist = Distribution(settings)
-        cmd = develop(dist)
-        cmd.ensure_finalized()
-        link = pathlib.Path(cmd.egg_link)
-        assert link.suffix == '.egg-link'
-        assert link.stem == 'Foo_Bar_baz_bing'
-
-
-class TestResolver:
-    """
-    TODO: These tests were written with a minimal understanding
-    of what _resolve_setup_path is intending to do. Come up with
-    more meaningful cases that look like real-world scenarios.
-    """
-
-    def test_resolve_setup_path_cwd(self):
-        assert develop._resolve_setup_path('.', '.', '.') == '.'
-
-    def test_resolve_setup_path_one_dir(self):
-        assert develop._resolve_setup_path('pkgs', '.', 'pkgs') == '../'
-
-    def test_resolve_setup_path_one_dir_trailing_slash(self):
-        assert develop._resolve_setup_path('pkgs/', '.', 'pkgs') == '../'
-
-
 class TestNamespaces:
     @staticmethod
     def install_develop(src_dir, target):

From fb1d5a6522a12d7307e9e5ce2e595fb40654f380 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sun, 20 Apr 2025 17:29:03 -0400
Subject: [PATCH 1622/1761] Ensure the deprecation warning is retained.

---
 pytest.ini                    |  1 +
 setuptools/command/develop.py | 13 ++++++++++++-
 2 files changed, 13 insertions(+), 1 deletion(-)

diff --git a/pytest.ini b/pytest.ini
index 292b65864c..b6efa6f918 100644
--- a/pytest.ini
+++ b/pytest.ini
@@ -52,6 +52,7 @@ filterwarnings=
 	# https://github.com/pypa/setuptools/issues/917
 	ignore:setup.py install is deprecated.
 	ignore:easy_install command is deprecated.
+	ignore:develop command is deprecated.
 
 	# https://github.com/pypa/setuptools/issues/2497
 	ignore:.* is an invalid version and will not be supported::pkg_resources
diff --git a/setuptools/command/develop.py b/setuptools/command/develop.py
index 14346815fb..19cb87fdac 100644
--- a/setuptools/command/develop.py
+++ b/setuptools/command/develop.py
@@ -2,6 +2,7 @@
 import sys
 
 from setuptools import Command
+from setuptools.warnings import SetuptoolsDeprecationWarning
 
 
 class develop(Command):
@@ -21,7 +22,17 @@ def run(self):
         subprocess.check_call(cmd)
 
     def initialize_options(self):
-        pass
+        DevelopDeprecationWarning.emit()
 
     def finalize_options(self) -> None:
         pass
+
+
+class DevelopDeprecationWarning(SetuptoolsDeprecationWarning):
+    _SUMMARY = "develop command is deprecated."
+    _DETAILS = """
+    Please avoid running ``setup.py`` and ``develop``.
+    Instead, use standards-based tools like pip or uv.
+    """
+    _SEE_URL = "https://github.com/pypa/setuptools/issues/917"
+    # _DUE_DATE = (TBD)

From a463376e5c233aa7b641b4e6c4815ff01f5c1fb1 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sun, 20 Apr 2025 17:36:28 -0400
Subject: [PATCH 1623/1761] Bring back support for no-deps.

---
 setuptools/command/develop.py | 18 ++++++++++++++----
 1 file changed, 14 insertions(+), 4 deletions(-)

diff --git a/setuptools/command/develop.py b/setuptools/command/develop.py
index 19cb87fdac..5e3765cbe6 100644
--- a/setuptools/command/develop.py
+++ b/setuptools/command/develop.py
@@ -10,15 +10,25 @@ class develop(Command):
 
     user_options = [
         ("install-dir=", "d", "install package to DIR"),
+        ('no-deps', 'N', "don't install dependencies"),
+    ]
+    boolean_options = [
+        'no-deps',
     ]
 
     install_dir = None
+    no_deps = False
 
     def run(self):
-        cmd = [sys.executable, '-m', 'pip', 'install', '-e', '.', '--use-pep517'] + [
-            '--target',
-            self.install_dir,
-        ] * bool(self.install_dir)
+        cmd = (
+            [sys.executable, '-m', 'pip', 'install', '-e', '.', '--use-pep517']
+            + [
+                '--target',
+                self.install_dir,
+            ]
+            * bool(self.install_dir)
+            + ['--no-deps'] * self.no_deps
+        )
         subprocess.check_call(cmd)
 
     def initialize_options(self):

From b7c2e03a3bd08778a864364825c48c4f1a06f2a4 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sun, 20 Apr 2025 17:54:07 -0400
Subject: [PATCH 1624/1761] Prevent the easy_install command from running.

Tests still pass :D
---
 setuptools/command/easy_install.py | 28 +---------------------------
 1 file changed, 1 insertion(+), 27 deletions(-)

diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py
index 32bc197121..5b96201dca 100644
--- a/setuptools/command/easy_install.py
+++ b/setuptools/command/easy_install.py
@@ -423,33 +423,7 @@ def expand_dirs(self) -> None:
         self._expand_attrs(dirs)
 
     def run(self, show_deprecation: bool = True) -> None:
-        if show_deprecation:
-            self.announce(
-                "WARNING: The easy_install command is deprecated "
-                "and will be removed in a future version.",
-                log.WARN,
-            )
-        if self.verbose != self.distribution.verbose:
-            log.set_verbosity(self.verbose)
-        try:
-            for spec in self.args:
-                self.easy_install(spec, not self.no_deps)
-            if self.record:
-                outputs = self.outputs
-                if self.root:  # strip any package prefix
-                    root_len = len(self.root)
-                    for counter in range(len(outputs)):
-                        outputs[counter] = outputs[counter][root_len:]
-                from distutils import file_util
-
-                self.execute(
-                    file_util.write_file,
-                    (self.record, outputs),
-                    f"writing list of installed files to '{self.record}'",
-                )
-            self.warn_deprecated_options()
-        finally:
-            log.set_verbosity(self.distribution.verbose)
+        raise RuntimeError("easy_install command is disabled")
 
     def pseudo_tempname(self):
         """Return a pseudo-tempname base in the install directory.

From b8c06fffe4b118f3c549fc23d196c2f3d41aa17c Mon Sep 17 00:00:00 2001
From: Vincent Fazio 
Date: Fri, 18 Apr 2025 14:55:03 -0500
Subject: [PATCH 1625/1761] Respect CXX when parsing linker parameters for UNIX
 c++ targets

Previously, when parsing linker parameters for C++ targets, the CC
variable was used to determine what the "prefix" of the command was in
order to determine what the linker arguments were.

If the value of LDCXXSHARED did not match CC, the first argument would
be dropped as it was assumed to be the linker command.

However, if the command was a wrapper, such as ccache, it could lead to
compile problems as the generated command would be incorrect.

In the following scenario:
  LDCXXSHARED="ccache g++ -shared -Wl,--enable-new-dtags"
  CC="ccache gcc"
  CXX="ccache g++"

The command would be incorrectly parsed to:
  ccache g++ g++ -shared -Wl,--enable-new-dtags

Now, the CXX value is used to improve the chances of parsing the linker
arguments correctly to generate:
  ccache g++ -shared -Wl,--enable-new-dtags

LDCXXSHARED and CXX still need to be in sync either in the environment
or within the sysconfig variables in the CPython build for parsing to
work correctly.

The CXX value is now also respected when linking executable binaries.

Signed-off-by: Vincent Fazio 
---
 distutils/compilers/C/unix.py | 11 +++++------
 1 file changed, 5 insertions(+), 6 deletions(-)

diff --git a/distutils/compilers/C/unix.py b/distutils/compilers/C/unix.py
index e8a53d452f..1231b32d20 100644
--- a/distutils/compilers/C/unix.py
+++ b/distutils/compilers/C/unix.py
@@ -286,19 +286,18 @@ def link(
                 # building an executable or linker_so (with shared options)
                 # when building a shared library.
                 building_exe = target_desc == base.Compiler.EXECUTABLE
+                target_cxx = target_lang == "c++"
                 linker = (
-                    self.linker_exe
+                    (self.linker_exe_cxx if target_cxx else self.linker_exe)
                     if building_exe
-                    else (
-                        self.linker_so_cxx if target_lang == "c++" else self.linker_so
-                    )
+                    else (self.linker_so_cxx if target_cxx else self.linker_so)
                 )[:]
 
-                if target_lang == "c++" and self.compiler_cxx:
+                if target_cxx and self.compiler_cxx:
                     env, linker_ne = _split_env(linker)
                     aix, linker_na = _split_aix(linker_ne)
                     _, compiler_cxx_ne = _split_env(self.compiler_cxx)
-                    _, linker_exe_ne = _split_env(self.linker_exe)
+                    _, linker_exe_ne = _split_env(self.linker_exe_cxx)
 
                     params = _linker_params(linker_na, linker_exe_ne)
                     linker = env + aix + compiler_cxx_ne + params

From c0d6d7158fcf68b4f748d751b3175e6b79c6ae5c Mon Sep 17 00:00:00 2001
From: Vincent Fazio 
Date: Fri, 18 Apr 2025 15:31:26 -0500
Subject: [PATCH 1626/1761] Add test for argument parsing for CXX targets on
 UNIX

Signed-off-by: Vincent Fazio 
---
 distutils/compilers/C/tests/test_unix.py | 54 ++++++++++++++++++++++++
 1 file changed, 54 insertions(+)

diff --git a/distutils/compilers/C/tests/test_unix.py b/distutils/compilers/C/tests/test_unix.py
index f4e2898458..1eec2891b7 100644
--- a/distutils/compilers/C/tests/test_unix.py
+++ b/distutils/compilers/C/tests/test_unix.py
@@ -244,6 +244,60 @@ def gcvs(*args, _orig=sysconfig.get_config_vars):
             sysconfig.customize_compiler(self.cc)
         assert self.cc.linker_so[0] == 'my_cc'
 
+    @pytest.mark.skipif('platform.system == "Windows"')
+    def test_cxx_commands_used_are_correct(self):
+        def gcv(v):
+            if v == 'LDSHARED':
+                return 'ccache gcc-4.2 -bundle -undefined dynamic_lookup'
+            elif v == 'LDCXXSHARED':
+                return 'ccache g++-4.2 -bundle -undefined dynamic_lookup'
+            elif v == 'CXX':
+                return 'ccache g++-4.2'
+            elif v == 'CC':
+                return 'ccache gcc-4.2'
+            return ''
+
+        def gcvs(*args, _orig=sysconfig.get_config_vars):
+            if args:
+                return list(map(sysconfig.get_config_var, args))
+            return _orig()
+
+        sysconfig.get_config_var = gcv
+        sysconfig.get_config_vars = gcvs
+        with (
+            mock.patch.object(self.cc, 'spawn', return_value=None) as mock_spawn,
+            mock.patch.object(self.cc, '_need_link', return_value=True),
+            mock.patch.object(self.cc, 'mkpath', return_value=None),
+            EnvironmentVarGuard() as env,
+        ):
+            sysconfig.customize_compiler(self.cc)
+            assert self.cc.linker_so_cxx[0:2] == ['ccache', 'g++-4.2']
+            assert self.cc.linker_exe_cxx[0:2] == ['ccache', 'g++-4.2']
+            self.cc.link(None, [], 'a.out', target_lang='c++')
+            call_args = mock_spawn.call_args[0][0]
+            expected = ['ccache', 'g++-4.2', '-bundle', '-undefined', 'dynamic_lookup']
+            assert call_args[:5] == expected
+
+            self.cc.link_executable([], 'a.out', target_lang='c++')
+            call_args = mock_spawn.call_args[0][0]
+            expected = ['ccache', 'g++-4.2', '-o', 'a.out']
+            assert call_args[:4] == expected
+
+            env['LDCXXSHARED'] = 'wrapper g++-4.2 -bundle -undefined dynamic_lookup'
+            env['CXX'] = 'wrapper g++-4.2'
+            sysconfig.customize_compiler(self.cc)
+            assert self.cc.linker_so_cxx[0:2] == ['wrapper', 'g++-4.2']
+            assert self.cc.linker_exe_cxx[0:2] == ['wrapper', 'g++-4.2']
+            self.cc.link(None, [], 'a.out', target_lang='c++')
+            call_args = mock_spawn.call_args[0][0]
+            expected = ['wrapper', 'g++-4.2', '-bundle', '-undefined', 'dynamic_lookup']
+            assert call_args[:5] == expected
+
+            self.cc.link_executable([], 'a.out', target_lang='c++')
+            call_args = mock_spawn.call_args[0][0]
+            expected = ['wrapper', 'g++-4.2', '-o', 'a.out']
+            assert call_args[:4] == expected
+
     @pytest.mark.skipif('platform.system == "Windows"')
     @pytest.mark.usefixtures('disable_macos_customization')
     def test_cc_overrides_ldshared_for_cxx_correctly(self):

From 55f9116bc92aa6f01de97dd1939728f51df6aa2a Mon Sep 17 00:00:00 2001
From: Vincent Fazio 
Date: Mon, 21 Apr 2025 15:35:35 -0500
Subject: [PATCH 1627/1761] Fix new test case

Address the following:

  * use the compiler's executable extension in asserts
  * remove any environment variables that may have been injected by CI
  * add a pragma to ignore a line without coverage

Signed-off-by: Vincent Fazio 
---
 distutils/compilers/C/tests/test_unix.py | 15 ++++++++++++---
 1 file changed, 12 insertions(+), 3 deletions(-)

diff --git a/distutils/compilers/C/tests/test_unix.py b/distutils/compilers/C/tests/test_unix.py
index 1eec2891b7..35b6b0e050 100644
--- a/distutils/compilers/C/tests/test_unix.py
+++ b/distutils/compilers/C/tests/test_unix.py
@@ -260,7 +260,7 @@ def gcv(v):
         def gcvs(*args, _orig=sysconfig.get_config_vars):
             if args:
                 return list(map(sysconfig.get_config_var, args))
-            return _orig()
+            return _orig()  # pragma: no cover
 
         sysconfig.get_config_var = gcv
         sysconfig.get_config_vars = gcvs
@@ -270,6 +270,10 @@ def gcvs(*args, _orig=sysconfig.get_config_vars):
             mock.patch.object(self.cc, 'mkpath', return_value=None),
             EnvironmentVarGuard() as env,
         ):
+            # override environment overrides in case they're specified by CI
+            del env['CXX']
+            del env['LDCXXSHARED']
+
             sysconfig.customize_compiler(self.cc)
             assert self.cc.linker_so_cxx[0:2] == ['ccache', 'g++-4.2']
             assert self.cc.linker_exe_cxx[0:2] == ['ccache', 'g++-4.2']
@@ -280,7 +284,7 @@ def gcvs(*args, _orig=sysconfig.get_config_vars):
 
             self.cc.link_executable([], 'a.out', target_lang='c++')
             call_args = mock_spawn.call_args[0][0]
-            expected = ['ccache', 'g++-4.2', '-o', 'a.out']
+            expected = ['ccache', 'g++-4.2', '-o', self.cc.executable_filename('a.out')]
             assert call_args[:4] == expected
 
             env['LDCXXSHARED'] = 'wrapper g++-4.2 -bundle -undefined dynamic_lookup'
@@ -295,7 +299,12 @@ def gcvs(*args, _orig=sysconfig.get_config_vars):
 
             self.cc.link_executable([], 'a.out', target_lang='c++')
             call_args = mock_spawn.call_args[0][0]
-            expected = ['wrapper', 'g++-4.2', '-o', 'a.out']
+            expected = [
+                'wrapper',
+                'g++-4.2',
+                '-o',
+                self.cc.executable_filename('a.out'),
+            ]
             assert call_args[:4] == expected
 
     @pytest.mark.skipif('platform.system == "Windows"')

From 3bbd90c2b3a12edceb73423f21dc1f1dc3c6877b Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Wed, 23 Apr 2025 17:22:29 -0400
Subject: [PATCH 1628/1761] Add news fragment.

---
 newsfragments/+67050cd5.bugfix.rst | 1 +
 1 file changed, 1 insertion(+)
 create mode 100644 newsfragments/+67050cd5.bugfix.rst

diff --git a/newsfragments/+67050cd5.bugfix.rst b/newsfragments/+67050cd5.bugfix.rst
new file mode 100644
index 0000000000..3583cabf23
--- /dev/null
+++ b/newsfragments/+67050cd5.bugfix.rst
@@ -0,0 +1 @@
+Merge with pypa/distutils@24bd3179b including fix for pypa/distutils#355.

From 607f6be776db97d83c9cc54e0eaa578567dcc44c Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Wed, 23 Apr 2025 17:59:42 -0400
Subject: [PATCH 1629/1761] =?UTF-8?q?Bump=20version:=2079.0.0=20=E2=86=92?=
 =?UTF-8?q?=2079.0.1?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg                   | 2 +-
 NEWS.rst                           | 9 +++++++++
 newsfragments/+67050cd5.bugfix.rst | 1 -
 pyproject.toml                     | 2 +-
 4 files changed, 11 insertions(+), 3 deletions(-)
 delete mode 100644 newsfragments/+67050cd5.bugfix.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index ce3d207697..bf558280a3 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 79.0.0
+current_version = 79.0.1
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index e7f33786a3..6300d29d49 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,12 @@
+v79.0.1
+=======
+
+Bugfixes
+--------
+
+- Merge with pypa/distutils@24bd3179b including fix for pypa/distutils#355.
+
+
 v79.0.0
 =======
 
diff --git a/newsfragments/+67050cd5.bugfix.rst b/newsfragments/+67050cd5.bugfix.rst
deleted file mode 100644
index 3583cabf23..0000000000
--- a/newsfragments/+67050cd5.bugfix.rst
+++ /dev/null
@@ -1 +0,0 @@
-Merge with pypa/distutils@24bd3179b including fix for pypa/distutils#355.
diff --git a/pyproject.toml b/pyproject.toml
index 3ba37aa59d..02f0e9cdbd 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "79.0.0"
+version = "79.0.1"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From 51ed5d70783fe2d091a815dc9d70e0cd55f4cfc9 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 26 Apr 2025 20:23:22 -0400
Subject: [PATCH 1630/1761] =?UTF-8?q?=F0=9F=91=B9=20Feed=20the=20hobgoblin?=
 =?UTF-8?q?s=20(delint).?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 setuptools/tests/test_develop.py | 3 ---
 1 file changed, 3 deletions(-)

diff --git a/setuptools/tests/test_develop.py b/setuptools/tests/test_develop.py
index 6f1d27aa9f..67495b89c8 100644
--- a/setuptools/tests/test_develop.py
+++ b/setuptools/tests/test_develop.py
@@ -1,7 +1,6 @@
 """develop tests"""
 
 import os
-import pathlib
 import platform
 import subprocess
 import sys
@@ -9,8 +8,6 @@
 import pytest
 
 from setuptools._path import paths_on_pythonpath
-from setuptools.command.develop import develop
-from setuptools.dist import Distribution
 
 from . import contexts, namespaces
 

From 86f11f0e161540c358e8aaea06e0a37e18c20c30 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 26 Apr 2025 20:29:41 -0400
Subject: [PATCH 1631/1761] Add news fragment.

---
 newsfragments/917.removal.rst | 1 +
 1 file changed, 1 insertion(+)
 create mode 100644 newsfragments/917.removal.rst

diff --git a/newsfragments/917.removal.rst b/newsfragments/917.removal.rst
new file mode 100644
index 0000000000..37eec3c5d6
--- /dev/null
+++ b/newsfragments/917.removal.rst
@@ -0,0 +1 @@
+Develop command no longer uses easy_install, but instead defers execution to pip (which then will re-invoke Setuptools via PEP 517 to build the editable wheel). Most of the options to develop are dropped. This is the final warning before the command is dropped completely in a few months. Use-cases relying on 'setup.py develop' should pin to older Setuptools version or migrate to modern build tooling.

From a5f02fe88d46e963bc470a60a9f8613d7f889d49 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 26 Apr 2025 20:32:21 -0400
Subject: [PATCH 1632/1761] Remove another test relying on setup.py develop.

---
 setuptools/tests/test_easy_install.py | 65 ---------------------------
 1 file changed, 65 deletions(-)

diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py
index b58b0b6666..f7be2a945a 100644
--- a/setuptools/tests/test_easy_install.py
+++ b/setuptools/tests/test_easy_install.py
@@ -15,7 +15,6 @@
 import time
 import warnings
 import zipfile
-from pathlib import Path
 from typing import NamedTuple
 from unittest import mock
 
@@ -1410,67 +1409,3 @@ def test_use_correct_python_version_string(tmpdir, tmpdir_cwd, monkeypatch):
     assert cmd.config_vars['py_version'] == '3.10.1'
     assert cmd.config_vars['py_version_short'] == '3.10'
     assert cmd.config_vars['py_version_nodot'] == '310'
-
-
-@pytest.mark.xfail(
-    sys.platform == "darwin",
-    reason="https://github.com/pypa/setuptools/pull/4716#issuecomment-2447624418",
-)
-def test_editable_user_and_build_isolation(setup_context, monkeypatch, tmp_path):
-    """`setup.py develop` should honor `--user` even under build isolation"""
-
-    # == Arrange ==
-    # Pretend that build isolation was enabled
-    # e.g pip sets the environment variable PYTHONNOUSERSITE=1
-    monkeypatch.setattr('site.ENABLE_USER_SITE', False)
-
-    # Patching $HOME for 2 reasons:
-    # 1. setuptools/command/easy_install.py:create_home_path
-    #    tries creating directories in $HOME.
-    #    Given::
-    #        self.config_vars['DESTDIRS'] = (
-    #            "/home/user/.pyenv/versions/3.9.10 "
-    #            "/home/user/.pyenv/versions/3.9.10/lib "
-    #            "/home/user/.pyenv/versions/3.9.10/lib/python3.9 "
-    #            "/home/user/.pyenv/versions/3.9.10/lib/python3.9/lib-dynload")
-    #    `create_home_path` will::
-    #        makedirs(
-    #            "/home/user/.pyenv/versions/3.9.10 "
-    #            "/home/user/.pyenv/versions/3.9.10/lib "
-    #            "/home/user/.pyenv/versions/3.9.10/lib/python3.9 "
-    #            "/home/user/.pyenv/versions/3.9.10/lib/python3.9/lib-dynload")
-    #
-    # 2. We are going to force `site` to update site.USER_BASE and site.USER_SITE
-    #    To point inside our new home
-    monkeypatch.setenv('HOME', str(tmp_path / '.home'))
-    monkeypatch.setenv('USERPROFILE', str(tmp_path / '.home'))
-    monkeypatch.setenv('APPDATA', str(tmp_path / '.home'))
-    monkeypatch.setattr('site.USER_BASE', None)
-    monkeypatch.setattr('site.USER_SITE', None)
-    user_site = Path(site.getusersitepackages())
-    user_site.mkdir(parents=True, exist_ok=True)
-
-    sys_prefix = tmp_path / '.sys_prefix'
-    sys_prefix.mkdir(parents=True, exist_ok=True)
-    monkeypatch.setattr('sys.prefix', str(sys_prefix))
-
-    setup_script = (
-        "__import__('setuptools').setup(name='aproj', version=42, packages=[])\n"
-    )
-    (tmp_path / "setup.py").write_text(setup_script, encoding="utf-8")
-
-    # == Sanity check ==
-    assert list(sys_prefix.glob("*")) == []
-    assert list(user_site.glob("*")) == []
-
-    # == Act ==
-    run_setup('setup.py', ['develop', '--user'])
-
-    # == Assert ==
-    # Should not install to sys.prefix
-    assert list(sys_prefix.glob("*")) == []
-    # Should install to user site
-    installed = {f.name for f in user_site.glob("*")}
-    # sometimes easy-install.pth is created and sometimes not
-    installed = installed - {"easy-install.pth"}
-    assert installed == {'aproj.egg-link'}

From 018a20cb130e9357f39c176b59c83738a09d7daa Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 26 Apr 2025 20:42:25 -0400
Subject: [PATCH 1633/1761] Restore a few of the options to develop.

---
 setuptools/command/develop.py | 17 ++++++++++++-----
 1 file changed, 12 insertions(+), 5 deletions(-)

diff --git a/setuptools/command/develop.py b/setuptools/command/develop.py
index 5e3765cbe6..8d7a382ae2 100644
--- a/setuptools/command/develop.py
+++ b/setuptools/command/develop.py
@@ -1,3 +1,4 @@
+import site
 import subprocess
 import sys
 
@@ -11,23 +12,29 @@ class develop(Command):
     user_options = [
         ("install-dir=", "d", "install package to DIR"),
         ('no-deps', 'N', "don't install dependencies"),
+        ('user', None, f"install in user site-package '{site.USER_SITE}'"),
+        ('prefix=', None, "installation prefix"),
+        ("index-url=", "i", "base URL of Python Package Index"),
     ]
     boolean_options = [
         'no-deps',
+        'user',
     ]
 
     install_dir = None
     no_deps = False
+    user = False
+    prefix = None
+    index_url = None
 
     def run(self):
         cmd = (
             [sys.executable, '-m', 'pip', 'install', '-e', '.', '--use-pep517']
-            + [
-                '--target',
-                self.install_dir,
-            ]
-            * bool(self.install_dir)
+            + ['--target', self.install_dir] * bool(self.install_dir)
             + ['--no-deps'] * self.no_deps
+            + ['--user'] * self.user
+            + ['--prefix', self.prefix] * bool(self.prefix)
+            + ['--index-url', self.index_url] * bool(self.prefix)
         )
         subprocess.check_call(cmd)
 

From a7603da5d3c709f6f01c8df8031ba7a7ae7959a0 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 26 Apr 2025 21:46:21 -0400
Subject: [PATCH 1634/1761] Rename news fragment to reference the pull request
 for better precise locality.

---
 newsfragments/{917.removal.rst => 4955.removal.rst} | 0
 1 file changed, 0 insertions(+), 0 deletions(-)
 rename newsfragments/{917.removal.rst => 4955.removal.rst} (100%)

diff --git a/newsfragments/917.removal.rst b/newsfragments/4955.removal.rst
similarity index 100%
rename from newsfragments/917.removal.rst
rename to newsfragments/4955.removal.rst

From da119e7e996b00b6e26f79995bec55684a3fabbe Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sat, 26 Apr 2025 21:51:02 -0400
Subject: [PATCH 1635/1761] Set a due date 6 months in advance.

---
 setuptools/command/develop.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setuptools/command/develop.py b/setuptools/command/develop.py
index 8d7a382ae2..42df9a10ff 100644
--- a/setuptools/command/develop.py
+++ b/setuptools/command/develop.py
@@ -52,4 +52,4 @@ class DevelopDeprecationWarning(SetuptoolsDeprecationWarning):
     Instead, use standards-based tools like pip or uv.
     """
     _SEE_URL = "https://github.com/pypa/setuptools/issues/917"
-    # _DUE_DATE = (TBD)
+    _DUE_DATE = 2025, 10, 31

From 82c588aedd8142e7615031358e2d2640213a351d Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sun, 27 Apr 2025 12:50:23 -0400
Subject: [PATCH 1636/1761] Update test to honor new behavior in
 importlib_metadata 8.7

Fixes #4961
---
 newsfragments/4961.bugfix.rst     | 1 +
 setuptools/_entry_points.py       | 4 ++++
 setuptools/tests/test_egg_info.py | 5 +++--
 3 files changed, 8 insertions(+), 2 deletions(-)
 create mode 100644 newsfragments/4961.bugfix.rst

diff --git a/newsfragments/4961.bugfix.rst b/newsfragments/4961.bugfix.rst
new file mode 100644
index 0000000000..bddcee807d
--- /dev/null
+++ b/newsfragments/4961.bugfix.rst
@@ -0,0 +1 @@
+Update test to honor new behavior in importlib_metadata 8.7.
diff --git a/setuptools/_entry_points.py b/setuptools/_entry_points.py
index e785fc7df8..cd5dd2c8ac 100644
--- a/setuptools/_entry_points.py
+++ b/setuptools/_entry_points.py
@@ -15,6 +15,10 @@ def ensure_valid(ep):
     """
     Exercise one of the dynamic properties to trigger
     the pattern match.
+
+    This function is deprecated in favor of importlib_metadata 8.7 and
+    Python 3.14 importlib.metadata, which validates entry points on
+    construction.
     """
     try:
         ep.extras
diff --git a/setuptools/tests/test_egg_info.py b/setuptools/tests/test_egg_info.py
index 528e2c13d8..3653be096f 100644
--- a/setuptools/tests/test_egg_info.py
+++ b/setuptools/tests/test_egg_info.py
@@ -1287,10 +1287,11 @@ def test_invalid_entry_point(self, tmpdir_cwd, env):
         dist = Distribution({"name": "foo", "version": "0.0.1"})
         dist.entry_points = {"foo": "foo = invalid-identifier:foo"}
         cmd = dist.get_command_obj("egg_info")
-        expected_msg = r"Problems to parse .*invalid-identifier.*"
-        with pytest.raises(errors.OptionError, match=expected_msg) as ex:
+        expected_msg = r"(Invalid object reference|Problems to parse)"
+        with pytest.raises((errors.OptionError, ValueError), match=expected_msg) as ex:
             write_entries(cmd, "entry_points", "entry_points.txt")
             assert "ensure entry-point follows the spec" in ex.value.args[0]
+            assert "invalid-identifier" in str(ex.value)
 
     def test_valid_entry_point(self, tmpdir_cwd, env):
         dist = Distribution({"name": "foo", "version": "0.0.1"})

From aeea79266d82f99dbe556126b90b64215a663a2c Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sun, 27 Apr 2025 12:59:36 -0400
Subject: [PATCH 1637/1761] =?UTF-8?q?Bump=20version:=2079.0.1=20=E2=86=92?=
 =?UTF-8?q?=2080.0.0?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .bumpversion.cfg               |  2 +-
 NEWS.rst                       | 16 ++++++++++++++++
 newsfragments/2908.removal.rst |  1 -
 newsfragments/4955.removal.rst |  1 -
 newsfragments/4961.bugfix.rst  |  1 -
 pyproject.toml                 |  2 +-
 6 files changed, 18 insertions(+), 5 deletions(-)
 delete mode 100644 newsfragments/2908.removal.rst
 delete mode 100644 newsfragments/4955.removal.rst
 delete mode 100644 newsfragments/4961.bugfix.rst

diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index bf558280a3..5177b8c4d0 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 79.0.1
+current_version = 80.0.0
 commit = True
 tag = True
 
diff --git a/NEWS.rst b/NEWS.rst
index 6300d29d49..ebcca10197 100644
--- a/NEWS.rst
+++ b/NEWS.rst
@@ -1,3 +1,19 @@
+v80.0.0
+=======
+
+Bugfixes
+--------
+
+- Update test to honor new behavior in importlib_metadata 8.7. (#4961)
+
+
+Deprecations and Removals
+-------------------------
+
+- Removed support for the easy_install command including the sandbox module. (#2908)
+- Develop command no longer uses easy_install, but instead defers execution to pip (which then will re-invoke Setuptools via PEP 517 to build the editable wheel). Most of the options to develop are dropped. This is the final warning before the command is dropped completely in a few months. Use-cases relying on 'setup.py develop' should pin to older Setuptools version or migrate to modern build tooling. (#4955)
+
+
 v79.0.1
 =======
 
diff --git a/newsfragments/2908.removal.rst b/newsfragments/2908.removal.rst
deleted file mode 100644
index 67c9a40c83..0000000000
--- a/newsfragments/2908.removal.rst
+++ /dev/null
@@ -1 +0,0 @@
-Removed support for the easy_install command including the sandbox module.
diff --git a/newsfragments/4955.removal.rst b/newsfragments/4955.removal.rst
deleted file mode 100644
index 37eec3c5d6..0000000000
--- a/newsfragments/4955.removal.rst
+++ /dev/null
@@ -1 +0,0 @@
-Develop command no longer uses easy_install, but instead defers execution to pip (which then will re-invoke Setuptools via PEP 517 to build the editable wheel). Most of the options to develop are dropped. This is the final warning before the command is dropped completely in a few months. Use-cases relying on 'setup.py develop' should pin to older Setuptools version or migrate to modern build tooling.
diff --git a/newsfragments/4961.bugfix.rst b/newsfragments/4961.bugfix.rst
deleted file mode 100644
index bddcee807d..0000000000
--- a/newsfragments/4961.bugfix.rst
+++ /dev/null
@@ -1 +0,0 @@
-Update test to honor new behavior in importlib_metadata 8.7.
diff --git a/pyproject.toml b/pyproject.toml
index 02f0e9cdbd..2ef6e19256 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ backend-path = ["."]
 
 [project]
 name = "setuptools"
-version = "79.0.1"
+version = "80.0.0"
 authors = [
 	{ name = "Python Packaging Authority", email = "distutils-sig@python.org" },
 ]

From 62c2b9f57f72071bc8b7c45f48c3ac159ede3ead Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sun, 27 Apr 2025 20:11:59 -0400
Subject: [PATCH 1638/1761] Remove reference in test_windows_wrappers to
 easy_install.

---
 setuptools/tests/test_windows_wrappers.py | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)

diff --git a/setuptools/tests/test_windows_wrappers.py b/setuptools/tests/test_windows_wrappers.py
index f895485387..f14338404d 100644
--- a/setuptools/tests/test_windows_wrappers.py
+++ b/setuptools/tests/test_windows_wrappers.py
@@ -21,7 +21,6 @@
 import pytest
 
 import pkg_resources
-from setuptools.command.easy_install import nt_quote_arg
 
 pytestmark = pytest.mark.skipif(sys.platform != 'win32', reason="Windows only")
 
@@ -29,7 +28,7 @@
 class WrapperTester:
     @classmethod
     def prep_script(cls, template):
-        python_exe = nt_quote_arg(sys.executable)
+        python_exe = subprocess.list2cmdline([sys.executable])
         return template % locals()
 
     @classmethod

From 618afde03c5a30548fb72d5c2251377365127cda Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sun, 27 Apr 2025 20:21:02 -0400
Subject: [PATCH 1639/1761] Moved some fixtures out of test_easy_install.

---
 setuptools/tests/fixtures.py          | 186 ++++++++++++++++++++++++++
 setuptools/tests/test_dist.py         |   2 +-
 setuptools/tests/test_easy_install.py | 185 +------------------------
 3 files changed, 188 insertions(+), 185 deletions(-)

diff --git a/setuptools/tests/fixtures.py b/setuptools/tests/fixtures.py
index a5472984b5..ac9ce74047 100644
--- a/setuptools/tests/fixtures.py
+++ b/setuptools/tests/fixtures.py
@@ -1,13 +1,19 @@
 import contextlib
+import io
 import os
 import subprocess
 import sys
+import tarfile
+import time
 from pathlib import Path
 
 import path
 import pytest
 
+from setuptools._normalization import safer_name
+
 from . import contexts, environment
+from .textwrap import DALS
 
 
 @pytest.fixture
@@ -155,3 +161,183 @@ def bare_venv(tmp_path):
     env.create_opts = ['--no-setuptools', '--no-pip', '--no-wheel', '--no-seed']
     env.ensure_env()
     return env
+
+
+def make_sdist(dist_path, files):
+    """
+    Create a simple sdist tarball at dist_path, containing the files
+    listed in ``files`` as ``(filename, content)`` tuples.
+    """
+
+    # Distributions with only one file don't play well with pip.
+    assert len(files) > 1
+    with tarfile.open(dist_path, 'w:gz') as dist:
+        for filename, content in files:
+            file_bytes = io.BytesIO(content.encode('utf-8'))
+            file_info = tarfile.TarInfo(name=filename)
+            file_info.size = len(file_bytes.getvalue())
+            file_info.mtime = int(time.time())
+            dist.addfile(file_info, fileobj=file_bytes)
+
+
+def make_trivial_sdist(dist_path, distname, version):
+    """
+    Create a simple sdist tarball at dist_path, containing just a simple
+    setup.py.
+    """
+
+    make_sdist(
+        dist_path,
+        [
+            (
+                'setup.py',
+                DALS(
+                    f"""\
+             import setuptools
+             setuptools.setup(
+                 name={distname!r},
+                 version={version!r}
+             )
+         """
+                ),
+            ),
+            ('setup.cfg', ''),
+        ],
+    )
+
+
+def make_nspkg_sdist(dist_path, distname, version):
+    """
+    Make an sdist tarball with distname and version which also contains one
+    package with the same name as distname.  The top-level package is
+    designated a namespace package).
+    """
+    # Assert that the distname contains at least one period
+    assert '.' in distname
+
+    parts = distname.split('.')
+    nspackage = parts[0]
+
+    packages = ['.'.join(parts[:idx]) for idx in range(1, len(parts) + 1)]
+
+    setup_py = DALS(
+        f"""\
+        import setuptools
+        setuptools.setup(
+            name={distname!r},
+            version={version!r},
+            packages={packages!r},
+            namespace_packages=[{nspackage!r}]
+        )
+    """
+    )
+
+    init = "__import__('pkg_resources').declare_namespace(__name__)"
+
+    files = [('setup.py', setup_py), (os.path.join(nspackage, '__init__.py'), init)]
+    for package in packages[1:]:
+        filename = os.path.join(*(package.split('.') + ['__init__.py']))
+        files.append((filename, ''))
+
+    make_sdist(dist_path, files)
+
+
+def make_python_requires_sdist(dist_path, distname, version, python_requires):
+    make_sdist(
+        dist_path,
+        [
+            (
+                'setup.py',
+                DALS(
+                    """\
+                import setuptools
+                setuptools.setup(
+                  name={name!r},
+                  version={version!r},
+                  python_requires={python_requires!r},
+                )
+                """
+                ).format(
+                    name=distname, version=version, python_requires=python_requires
+                ),
+            ),
+            ('setup.cfg', ''),
+        ],
+    )
+
+
+def create_setup_requires_package(
+    path,
+    distname='foobar',
+    version='0.1',
+    make_package=make_trivial_sdist,
+    setup_py_template=None,
+    setup_attrs=None,
+    use_setup_cfg=(),
+):
+    """Creates a source tree under path for a trivial test package that has a
+    single requirement in setup_requires--a tarball for that requirement is
+    also created and added to the dependency_links argument.
+
+    ``distname`` and ``version`` refer to the name/version of the package that
+    the test package requires via ``setup_requires``.  The name of the test
+    package itself is just 'test_pkg'.
+    """
+
+    normalized_distname = safer_name(distname)
+    test_setup_attrs = {
+        'name': 'test_pkg',
+        'version': '0.0',
+        'setup_requires': [f'{normalized_distname}=={version}'],
+        'dependency_links': [os.path.abspath(path)],
+    }
+    if setup_attrs:
+        test_setup_attrs.update(setup_attrs)
+
+    test_pkg = os.path.join(path, 'test_pkg')
+    os.mkdir(test_pkg)
+
+    # setup.cfg
+    if use_setup_cfg:
+        options = []
+        metadata = []
+        for name in use_setup_cfg:
+            value = test_setup_attrs.pop(name)
+            if name in 'name version'.split():
+                section = metadata
+            else:
+                section = options
+            if isinstance(value, (tuple, list)):
+                value = ';'.join(value)
+            section.append(f'{name}: {value}')
+        test_setup_cfg_contents = DALS(
+            """
+            [metadata]
+            {metadata}
+            [options]
+            {options}
+            """
+        ).format(
+            options='\n'.join(options),
+            metadata='\n'.join(metadata),
+        )
+    else:
+        test_setup_cfg_contents = ''
+    with open(os.path.join(test_pkg, 'setup.cfg'), 'w', encoding="utf-8") as f:
+        f.write(test_setup_cfg_contents)
+
+    # setup.py
+    if setup_py_template is None:
+        setup_py_template = DALS(
+            """\
+            import setuptools
+            setuptools.setup(**%r)
+        """
+        )
+    with open(os.path.join(test_pkg, 'setup.py'), 'w', encoding="utf-8") as f:
+        f.write(setup_py_template % test_setup_attrs)
+
+    foobar_path = os.path.join(path, f'{normalized_distname}-{version}.tar.gz')
+    make_package(foobar_path, distname, version)
+
+    return test_pkg
diff --git a/setuptools/tests/test_dist.py b/setuptools/tests/test_dist.py
index e65ab310e7..011351f57e 100644
--- a/setuptools/tests/test_dist.py
+++ b/setuptools/tests/test_dist.py
@@ -8,7 +8,7 @@
 from setuptools import Distribution
 from setuptools.dist import check_package_data, check_specifier
 
-from .test_easy_install import make_trivial_sdist
+from .fixtures import make_trivial_sdist
 from .test_find_packages import ensure_files
 from .textwrap import DALS
 
diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py
index e5fb3276f5..af66b8d66e 100644
--- a/setuptools/tests/test_easy_install.py
+++ b/setuptools/tests/test_easy_install.py
@@ -1,15 +1,12 @@
 """Easy install Tests"""
 
-import io
 import itertools
 import logging
 import os
 import re
 import site
 import sys
-import tarfile
 import tempfile
-import time
 import warnings
 import zipfile
 from typing import NamedTuple
@@ -20,10 +17,10 @@
 import pkg_resources
 import setuptools.command.easy_install as ei
 from pkg_resources import Distribution as PRDistribution, normalize_path
-from setuptools._normalization import safer_name
 from setuptools.command.easy_install import PthDistributions
 from setuptools.dist import Distribution
 
+from .fixtures import make_sdist
 from .textwrap import DALS
 
 import distutils.errors
@@ -380,186 +377,6 @@ def test_local_index(self, foo_package, install_target):
         assert actual == expected
 
 
-def make_trivial_sdist(dist_path, distname, version):
-    """
-    Create a simple sdist tarball at dist_path, containing just a simple
-    setup.py.
-    """
-
-    make_sdist(
-        dist_path,
-        [
-            (
-                'setup.py',
-                DALS(
-                    f"""\
-             import setuptools
-             setuptools.setup(
-                 name={distname!r},
-                 version={version!r}
-             )
-         """
-                ),
-            ),
-            ('setup.cfg', ''),
-        ],
-    )
-
-
-def make_nspkg_sdist(dist_path, distname, version):
-    """
-    Make an sdist tarball with distname and version which also contains one
-    package with the same name as distname.  The top-level package is
-    designated a namespace package).
-    """
-    # Assert that the distname contains at least one period
-    assert '.' in distname
-
-    parts = distname.split('.')
-    nspackage = parts[0]
-
-    packages = ['.'.join(parts[:idx]) for idx in range(1, len(parts) + 1)]
-
-    setup_py = DALS(
-        f"""\
-        import setuptools
-        setuptools.setup(
-            name={distname!r},
-            version={version!r},
-            packages={packages!r},
-            namespace_packages=[{nspackage!r}]
-        )
-    """
-    )
-
-    init = "__import__('pkg_resources').declare_namespace(__name__)"
-
-    files = [('setup.py', setup_py), (os.path.join(nspackage, '__init__.py'), init)]
-    for package in packages[1:]:
-        filename = os.path.join(*(package.split('.') + ['__init__.py']))
-        files.append((filename, ''))
-
-    make_sdist(dist_path, files)
-
-
-def make_python_requires_sdist(dist_path, distname, version, python_requires):
-    make_sdist(
-        dist_path,
-        [
-            (
-                'setup.py',
-                DALS(
-                    """\
-                import setuptools
-                setuptools.setup(
-                  name={name!r},
-                  version={version!r},
-                  python_requires={python_requires!r},
-                )
-                """
-                ).format(
-                    name=distname, version=version, python_requires=python_requires
-                ),
-            ),
-            ('setup.cfg', ''),
-        ],
-    )
-
-
-def make_sdist(dist_path, files):
-    """
-    Create a simple sdist tarball at dist_path, containing the files
-    listed in ``files`` as ``(filename, content)`` tuples.
-    """
-
-    # Distributions with only one file don't play well with pip.
-    assert len(files) > 1
-    with tarfile.open(dist_path, 'w:gz') as dist:
-        for filename, content in files:
-            file_bytes = io.BytesIO(content.encode('utf-8'))
-            file_info = tarfile.TarInfo(name=filename)
-            file_info.size = len(file_bytes.getvalue())
-            file_info.mtime = int(time.time())
-            dist.addfile(file_info, fileobj=file_bytes)
-
-
-def create_setup_requires_package(
-    path,
-    distname='foobar',
-    version='0.1',
-    make_package=make_trivial_sdist,
-    setup_py_template=None,
-    setup_attrs=None,
-    use_setup_cfg=(),
-):
-    """Creates a source tree under path for a trivial test package that has a
-    single requirement in setup_requires--a tarball for that requirement is
-    also created and added to the dependency_links argument.
-
-    ``distname`` and ``version`` refer to the name/version of the package that
-    the test package requires via ``setup_requires``.  The name of the test
-    package itself is just 'test_pkg'.
-    """
-
-    normalized_distname = safer_name(distname)
-    test_setup_attrs = {
-        'name': 'test_pkg',
-        'version': '0.0',
-        'setup_requires': [f'{normalized_distname}=={version}'],
-        'dependency_links': [os.path.abspath(path)],
-    }
-    if setup_attrs:
-        test_setup_attrs.update(setup_attrs)
-
-    test_pkg = os.path.join(path, 'test_pkg')
-    os.mkdir(test_pkg)
-
-    # setup.cfg
-    if use_setup_cfg:
-        options = []
-        metadata = []
-        for name in use_setup_cfg:
-            value = test_setup_attrs.pop(name)
-            if name in 'name version'.split():
-                section = metadata
-            else:
-                section = options
-            if isinstance(value, (tuple, list)):
-                value = ';'.join(value)
-            section.append(f'{name}: {value}')
-        test_setup_cfg_contents = DALS(
-            """
-            [metadata]
-            {metadata}
-            [options]
-            {options}
-            """
-        ).format(
-            options='\n'.join(options),
-            metadata='\n'.join(metadata),
-        )
-    else:
-        test_setup_cfg_contents = ''
-    with open(os.path.join(test_pkg, 'setup.cfg'), 'w', encoding="utf-8") as f:
-        f.write(test_setup_cfg_contents)
-
-    # setup.py
-    if setup_py_template is None:
-        setup_py_template = DALS(
-            """\
-            import setuptools
-            setuptools.setup(**%r)
-        """
-        )
-    with open(os.path.join(test_pkg, 'setup.py'), 'w', encoding="utf-8") as f:
-        f.write(setup_py_template % test_setup_attrs)
-
-    foobar_path = os.path.join(path, f'{normalized_distname}-{version}.tar.gz')
-    make_package(foobar_path, distname, version)
-
-    return test_pkg
-
-
 @pytest.mark.skipif(
     sys.platform.startswith('java') and ei.is_sh(sys.executable),
     reason="Test cannot run under java when executable is sh",

From 5e907629baa69e0387b241e8ef7767edf4d0ede8 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sun, 27 Apr 2025 20:51:15 -0400
Subject: [PATCH 1640/1761] Moved scripts functionality into its own module.

---
 setuptools/_scripts.py                | 353 ++++++++++++++++++++++++++
 setuptools/command/easy_install.py    | 331 +-----------------------
 setuptools/command/install_scripts.py |   4 +-
 setuptools/tests/test_easy_install.py |  11 -
 setuptools/tests/test_scripts.py      |  12 +
 5 files changed, 370 insertions(+), 341 deletions(-)
 create mode 100644 setuptools/_scripts.py
 create mode 100644 setuptools/tests/test_scripts.py

diff --git a/setuptools/_scripts.py b/setuptools/_scripts.py
new file mode 100644
index 0000000000..e3e8a191d4
--- /dev/null
+++ b/setuptools/_scripts.py
@@ -0,0 +1,353 @@
+from __future__ import annotations
+
+import os
+import re
+import shlex
+import shutil
+import struct
+import subprocess
+import sys
+import textwrap
+from collections.abc import Iterable
+from typing import TYPE_CHECKING, TypedDict
+
+import pkg_resources
+
+if TYPE_CHECKING:
+    from typing_extensions import Self
+
+from .warnings import SetuptoolsWarning
+
+from distutils.command.build_scripts import first_line_re
+from distutils.util import get_platform
+
+
+class _SplitArgs(TypedDict, total=False):
+    comments: bool
+    posix: bool
+
+
+class CommandSpec(list):
+    """
+    A command spec for a #! header, specified as a list of arguments akin to
+    those passed to Popen.
+    """
+
+    options: list[str] = []
+    split_args = _SplitArgs()
+
+    @classmethod
+    def best(cls):
+        """
+        Choose the best CommandSpec class based on environmental conditions.
+        """
+        return cls
+
+    @classmethod
+    def _sys_executable(cls):
+        _default = os.path.normpath(sys.executable)
+        return os.environ.get('__PYVENV_LAUNCHER__', _default)
+
+    @classmethod
+    def from_param(cls, param: Self | str | Iterable[str] | None) -> Self:
+        """
+        Construct a CommandSpec from a parameter to build_scripts, which may
+        be None.
+        """
+        if isinstance(param, cls):
+            return param
+        if isinstance(param, str):
+            return cls.from_string(param)
+        if isinstance(param, Iterable):
+            return cls(param)
+        if param is None:
+            return cls.from_environment()
+        raise TypeError(f"Argument has an unsupported type {type(param)}")
+
+    @classmethod
+    def from_environment(cls):
+        return cls([cls._sys_executable()])
+
+    @classmethod
+    def from_string(cls, string: str) -> Self:
+        """
+        Construct a command spec from a simple string representing a command
+        line parseable by shlex.split.
+        """
+        items = shlex.split(string, **cls.split_args)
+        return cls(items)
+
+    def install_options(self, script_text: str):
+        self.options = shlex.split(self._extract_options(script_text))
+        cmdline = subprocess.list2cmdline(self)
+        if not isascii(cmdline):
+            self.options[:0] = ['-x']
+
+    @staticmethod
+    def _extract_options(orig_script):
+        """
+        Extract any options from the first line of the script.
+        """
+        first = (orig_script + '\n').splitlines()[0]
+        match = _first_line_re().match(first)
+        options = match.group(1) or '' if match else ''
+        return options.strip()
+
+    def as_header(self):
+        return self._render(self + list(self.options))
+
+    @staticmethod
+    def _strip_quotes(item):
+        _QUOTES = '"\''
+        for q in _QUOTES:
+            if item.startswith(q) and item.endswith(q):
+                return item[1:-1]
+        return item
+
+    @staticmethod
+    def _render(items):
+        cmdline = subprocess.list2cmdline(
+            CommandSpec._strip_quotes(item.strip()) for item in items
+        )
+        return '#!' + cmdline + '\n'
+
+
+class WindowsCommandSpec(CommandSpec):
+    split_args = _SplitArgs(posix=False)
+
+
+class ScriptWriter:
+    """
+    Encapsulates behavior around writing entry point scripts for console and
+    gui apps.
+    """
+
+    template = textwrap.dedent(
+        r"""
+        # EASY-INSTALL-ENTRY-SCRIPT: %(spec)r,%(group)r,%(name)r
+        import re
+        import sys
+
+        # for compatibility with easy_install; see #2198
+        __requires__ = %(spec)r
+
+        try:
+            from importlib.metadata import distribution
+        except ImportError:
+            try:
+                from importlib_metadata import distribution
+            except ImportError:
+                from pkg_resources import load_entry_point
+
+
+        def importlib_load_entry_point(spec, group, name):
+            dist_name, _, _ = spec.partition('==')
+            matches = (
+                entry_point
+                for entry_point in distribution(dist_name).entry_points
+                if entry_point.group == group and entry_point.name == name
+            )
+            return next(matches).load()
+
+
+        globals().setdefault('load_entry_point', importlib_load_entry_point)
+
+
+        if __name__ == '__main__':
+            sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
+            sys.exit(load_entry_point(%(spec)r, %(group)r, %(name)r)())
+        """
+    ).lstrip()
+
+    command_spec_class = CommandSpec
+
+    @classmethod
+    def get_args(cls, dist, header=None):
+        """
+        Yield write_script() argument tuples for a distribution's
+        console_scripts and gui_scripts entry points.
+        """
+        if header is None:
+            header = cls.get_header()
+        spec = str(dist.as_requirement())
+        for type_ in 'console', 'gui':
+            group = type_ + '_scripts'
+            for name in dist.get_entry_map(group).keys():
+                cls._ensure_safe_name(name)
+                script_text = cls.template % locals()
+                args = cls._get_script_args(type_, name, header, script_text)
+                yield from args
+
+    @staticmethod
+    def _ensure_safe_name(name):
+        """
+        Prevent paths in *_scripts entry point names.
+        """
+        has_path_sep = re.search(r'[\\/]', name)
+        if has_path_sep:
+            raise ValueError("Path separators not allowed in script names")
+
+    @classmethod
+    def best(cls):
+        """
+        Select the best ScriptWriter for this environment.
+        """
+        if sys.platform == 'win32' or (os.name == 'java' and os._name == 'nt'):
+            return WindowsScriptWriter.best()
+        else:
+            return cls
+
+    @classmethod
+    def _get_script_args(cls, type_, name, header, script_text):
+        # Simply write the stub with no extension.
+        yield (name, header + script_text)
+
+    @classmethod
+    def get_header(
+        cls,
+        script_text: str = "",
+        executable: str | CommandSpec | Iterable[str] | None = None,
+    ) -> str:
+        """Create a #! line, getting options (if any) from script_text"""
+        cmd = cls.command_spec_class.best().from_param(executable)
+        cmd.install_options(script_text)
+        return cmd.as_header()
+
+
+class WindowsScriptWriter(ScriptWriter):
+    command_spec_class = WindowsCommandSpec
+
+    @classmethod
+    def best(cls):
+        """
+        Select the best ScriptWriter suitable for Windows
+        """
+        writer_lookup = dict(
+            executable=WindowsExecutableLauncherWriter,
+            natural=cls,
+        )
+        # for compatibility, use the executable launcher by default
+        launcher = os.environ.get('SETUPTOOLS_LAUNCHER', 'executable')
+        return writer_lookup[launcher]
+
+    @classmethod
+    def _get_script_args(cls, type_, name, header, script_text):
+        "For Windows, add a .py extension"
+        ext = dict(console='.pya', gui='.pyw')[type_]
+        if ext not in os.environ['PATHEXT'].lower().split(';'):
+            msg = (
+                "{ext} not listed in PATHEXT; scripts will not be "
+                "recognized as executables."
+            ).format(**locals())
+            SetuptoolsWarning.emit(msg)
+        old = ['.pya', '.py', '-script.py', '.pyc', '.pyo', '.pyw', '.exe']
+        old.remove(ext)
+        header = cls._adjust_header(type_, header)
+        blockers = [name + x for x in old]
+        yield name + ext, header + script_text, 't', blockers
+
+    @classmethod
+    def _adjust_header(cls, type_, orig_header):
+        """
+        Make sure 'pythonw' is used for gui and 'python' is used for
+        console (regardless of what sys.executable is).
+        """
+        pattern = 'pythonw.exe'
+        repl = 'python.exe'
+        if type_ == 'gui':
+            pattern, repl = repl, pattern
+        pattern_ob = re.compile(re.escape(pattern), re.IGNORECASE)
+        new_header = pattern_ob.sub(string=orig_header, repl=repl)
+        return new_header if cls._use_header(new_header) else orig_header
+
+    @staticmethod
+    def _use_header(new_header):
+        """
+        Should _adjust_header use the replaced header?
+
+        On non-windows systems, always use. On
+        Windows systems, only use the replaced header if it resolves
+        to an executable on the system.
+        """
+        clean_header = new_header[2:-1].strip('"')
+        return sys.platform != 'win32' or shutil.which(clean_header)
+
+
+class WindowsExecutableLauncherWriter(WindowsScriptWriter):
+    @classmethod
+    def _get_script_args(cls, type_, name, header, script_text):
+        """
+        For Windows, add a .py extension and an .exe launcher
+        """
+        if type_ == 'gui':
+            launcher_type = 'gui'
+            ext = '-script.pyw'
+            old = ['.pyw']
+        else:
+            launcher_type = 'cli'
+            ext = '-script.py'
+            old = ['.py', '.pyc', '.pyo']
+        hdr = cls._adjust_header(type_, header)
+        blockers = [name + x for x in old]
+        yield (name + ext, hdr + script_text, 't', blockers)
+        yield (
+            name + '.exe',
+            get_win_launcher(launcher_type),
+            'b',  # write in binary mode
+        )
+        if not is_64bit():
+            # install a manifest for the launcher to prevent Windows
+            # from detecting it as an installer (which it will for
+            #  launchers like easy_install.exe). Consider only
+            #  adding a manifest for launchers detected as installers.
+            #  See Distribute #143 for details.
+            m_name = name + '.exe.manifest'
+            yield (m_name, load_launcher_manifest(name), 't')
+
+
+def get_win_launcher(type):
+    """
+    Load the Windows launcher (executable) suitable for launching a script.
+
+    `type` should be either 'cli' or 'gui'
+
+    Returns the executable as a byte string.
+    """
+    launcher_fn = f'{type}.exe'
+    if is_64bit():
+        if get_platform() == "win-arm64":
+            launcher_fn = launcher_fn.replace(".", "-arm64.")
+        else:
+            launcher_fn = launcher_fn.replace(".", "-64.")
+    else:
+        launcher_fn = launcher_fn.replace(".", "-32.")
+    return pkg_resources.resource_string('setuptools', launcher_fn)
+
+
+def load_launcher_manifest(name):
+    manifest = pkg_resources.resource_string(__name__, 'launcher manifest.xml')
+    return manifest.decode('utf-8') % vars()
+
+
+def _first_line_re():
+    """
+    Return a regular expression based on first_line_re suitable for matching
+    strings.
+    """
+    if isinstance(first_line_re.pattern, str):
+        return first_line_re
+
+    # first_line_re in Python >=3.1.4 and >=3.2.1 is a bytes pattern.
+    return re.compile(first_line_re.pattern.decode())
+
+
+def is_64bit():
+    return struct.calcsize("P") == 8
+
+
+def isascii(s):
+    try:
+        s.encode('ascii')
+    except UnicodeError:
+        return False
+    return True
diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py
index 5b96201dca..33d06b6a2b 100644
--- a/setuptools/command/easy_install.py
+++ b/setuptools/command/easy_install.py
@@ -17,8 +17,6 @@
 import io
 import os
 import random
-import re
-import shlex
 import shutil
 import site
 import stat
@@ -31,10 +29,9 @@
 import warnings
 import zipfile
 import zipimport
-from collections.abc import Iterable
 from glob import glob
 from sysconfig import get_path
-from typing import TYPE_CHECKING, NoReturn, TypedDict
+from typing import NoReturn
 
 from jaraco.text import yield_lines
 
@@ -58,16 +55,16 @@
 from setuptools.archive_util import unpack_archive
 from setuptools.command import bdist_egg, setopt
 from setuptools.package_index import URL_SCHEME, PackageIndex, parse_requirement_arg
-from setuptools.warnings import SetuptoolsDeprecationWarning, SetuptoolsWarning
+from setuptools.warnings import SetuptoolsDeprecationWarning
 from setuptools.wheel import Wheel
 
 from .._path import ensure_directory
+from .._scripts import CommandSpec, ScriptWriter
 from .._shutil import attempt_chmod_verbose as chmod, rmtree as _rmtree
 from ..compat import py39, py312
 
 from distutils import dir_util, log
 from distutils.command import install
-from distutils.command.build_scripts import first_line_re
 from distutils.errors import (
     DistutilsArgError,
     DistutilsError,
@@ -76,9 +73,6 @@
 )
 from distutils.util import convert_path, get_platform, subst_vars
 
-if TYPE_CHECKING:
-    from typing_extensions import Self
-
 # Turn on PEP440Warnings
 warnings.filterwarnings("default", category=pkg_resources.PEP440Warning)
 
@@ -1734,18 +1728,6 @@ def _wrap_lines(cls, lines):
     PthDistributions = RewritePthDistributions  # type: ignore[misc]  # Overwriting type
 
 
-def _first_line_re():
-    """
-    Return a regular expression based on first_line_re suitable for matching
-    strings.
-    """
-    if isinstance(first_line_re.pattern, str):
-        return first_line_re
-
-    # first_line_re in Python >=3.1.4 and >=3.2.1 is a bytes pattern.
-    return re.compile(first_line_re.pattern.decode())
-
-
 def update_dist_caches(dist_path, fix_zipimporter_caches):
     """
     Fix any globally cached `dist_path` related data
@@ -1968,317 +1950,10 @@ def is_python_script(script_text, filename):
     return False  # Not any Python I can recognize
 
 
-class _SplitArgs(TypedDict, total=False):
-    comments: bool
-    posix: bool
-
-
-class CommandSpec(list):
-    """
-    A command spec for a #! header, specified as a list of arguments akin to
-    those passed to Popen.
-    """
-
-    options: list[str] = []
-    split_args = _SplitArgs()
-
-    @classmethod
-    def best(cls):
-        """
-        Choose the best CommandSpec class based on environmental conditions.
-        """
-        return cls
-
-    @classmethod
-    def _sys_executable(cls):
-        _default = os.path.normpath(sys.executable)
-        return os.environ.get('__PYVENV_LAUNCHER__', _default)
-
-    @classmethod
-    def from_param(cls, param: Self | str | Iterable[str] | None) -> Self:
-        """
-        Construct a CommandSpec from a parameter to build_scripts, which may
-        be None.
-        """
-        if isinstance(param, cls):
-            return param
-        if isinstance(param, str):
-            return cls.from_string(param)
-        if isinstance(param, Iterable):
-            return cls(param)
-        if param is None:
-            return cls.from_environment()
-        raise TypeError(f"Argument has an unsupported type {type(param)}")
-
-    @classmethod
-    def from_environment(cls):
-        return cls([cls._sys_executable()])
-
-    @classmethod
-    def from_string(cls, string: str) -> Self:
-        """
-        Construct a command spec from a simple string representing a command
-        line parseable by shlex.split.
-        """
-        items = shlex.split(string, **cls.split_args)
-        return cls(items)
-
-    def install_options(self, script_text: str):
-        self.options = shlex.split(self._extract_options(script_text))
-        cmdline = subprocess.list2cmdline(self)
-        if not isascii(cmdline):
-            self.options[:0] = ['-x']
-
-    @staticmethod
-    def _extract_options(orig_script):
-        """
-        Extract any options from the first line of the script.
-        """
-        first = (orig_script + '\n').splitlines()[0]
-        match = _first_line_re().match(first)
-        options = match.group(1) or '' if match else ''
-        return options.strip()
-
-    def as_header(self):
-        return self._render(self + list(self.options))
-
-    @staticmethod
-    def _strip_quotes(item):
-        _QUOTES = '"\''
-        for q in _QUOTES:
-            if item.startswith(q) and item.endswith(q):
-                return item[1:-1]
-        return item
-
-    @staticmethod
-    def _render(items):
-        cmdline = subprocess.list2cmdline(
-            CommandSpec._strip_quotes(item.strip()) for item in items
-        )
-        return '#!' + cmdline + '\n'
-
-
 # For pbr compat; will be removed in a future version.
 sys_executable = CommandSpec._sys_executable()
 
 
-class WindowsCommandSpec(CommandSpec):
-    split_args = _SplitArgs(posix=False)
-
-
-class ScriptWriter:
-    """
-    Encapsulates behavior around writing entry point scripts for console and
-    gui apps.
-    """
-
-    template = textwrap.dedent(
-        r"""
-        # EASY-INSTALL-ENTRY-SCRIPT: %(spec)r,%(group)r,%(name)r
-        import re
-        import sys
-
-        # for compatibility with easy_install; see #2198
-        __requires__ = %(spec)r
-
-        try:
-            from importlib.metadata import distribution
-        except ImportError:
-            try:
-                from importlib_metadata import distribution
-            except ImportError:
-                from pkg_resources import load_entry_point
-
-
-        def importlib_load_entry_point(spec, group, name):
-            dist_name, _, _ = spec.partition('==')
-            matches = (
-                entry_point
-                for entry_point in distribution(dist_name).entry_points
-                if entry_point.group == group and entry_point.name == name
-            )
-            return next(matches).load()
-
-
-        globals().setdefault('load_entry_point', importlib_load_entry_point)
-
-
-        if __name__ == '__main__':
-            sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
-            sys.exit(load_entry_point(%(spec)r, %(group)r, %(name)r)())
-        """
-    ).lstrip()
-
-    command_spec_class = CommandSpec
-
-    @classmethod
-    def get_args(cls, dist, header=None):
-        """
-        Yield write_script() argument tuples for a distribution's
-        console_scripts and gui_scripts entry points.
-        """
-        if header is None:
-            header = cls.get_header()
-        spec = str(dist.as_requirement())
-        for type_ in 'console', 'gui':
-            group = type_ + '_scripts'
-            for name in dist.get_entry_map(group).keys():
-                cls._ensure_safe_name(name)
-                script_text = cls.template % locals()
-                args = cls._get_script_args(type_, name, header, script_text)
-                yield from args
-
-    @staticmethod
-    def _ensure_safe_name(name):
-        """
-        Prevent paths in *_scripts entry point names.
-        """
-        has_path_sep = re.search(r'[\\/]', name)
-        if has_path_sep:
-            raise ValueError("Path separators not allowed in script names")
-
-    @classmethod
-    def best(cls):
-        """
-        Select the best ScriptWriter for this environment.
-        """
-        if sys.platform == 'win32' or (os.name == 'java' and os._name == 'nt'):
-            return WindowsScriptWriter.best()
-        else:
-            return cls
-
-    @classmethod
-    def _get_script_args(cls, type_, name, header, script_text):
-        # Simply write the stub with no extension.
-        yield (name, header + script_text)
-
-    @classmethod
-    def get_header(
-        cls,
-        script_text: str = "",
-        executable: str | CommandSpec | Iterable[str] | None = None,
-    ) -> str:
-        """Create a #! line, getting options (if any) from script_text"""
-        cmd = cls.command_spec_class.best().from_param(executable)
-        cmd.install_options(script_text)
-        return cmd.as_header()
-
-
-class WindowsScriptWriter(ScriptWriter):
-    command_spec_class = WindowsCommandSpec
-
-    @classmethod
-    def best(cls):
-        """
-        Select the best ScriptWriter suitable for Windows
-        """
-        writer_lookup = dict(
-            executable=WindowsExecutableLauncherWriter,
-            natural=cls,
-        )
-        # for compatibility, use the executable launcher by default
-        launcher = os.environ.get('SETUPTOOLS_LAUNCHER', 'executable')
-        return writer_lookup[launcher]
-
-    @classmethod
-    def _get_script_args(cls, type_, name, header, script_text):
-        "For Windows, add a .py extension"
-        ext = dict(console='.pya', gui='.pyw')[type_]
-        if ext not in os.environ['PATHEXT'].lower().split(';'):
-            msg = (
-                "{ext} not listed in PATHEXT; scripts will not be "
-                "recognized as executables."
-            ).format(**locals())
-            SetuptoolsWarning.emit(msg)
-        old = ['.pya', '.py', '-script.py', '.pyc', '.pyo', '.pyw', '.exe']
-        old.remove(ext)
-        header = cls._adjust_header(type_, header)
-        blockers = [name + x for x in old]
-        yield name + ext, header + script_text, 't', blockers
-
-    @classmethod
-    def _adjust_header(cls, type_, orig_header):
-        """
-        Make sure 'pythonw' is used for gui and 'python' is used for
-        console (regardless of what sys.executable is).
-        """
-        pattern = 'pythonw.exe'
-        repl = 'python.exe'
-        if type_ == 'gui':
-            pattern, repl = repl, pattern
-        pattern_ob = re.compile(re.escape(pattern), re.IGNORECASE)
-        new_header = pattern_ob.sub(string=orig_header, repl=repl)
-        return new_header if cls._use_header(new_header) else orig_header
-
-    @staticmethod
-    def _use_header(new_header):
-        """
-        Should _adjust_header use the replaced header?
-
-        On non-windows systems, always use. On
-        Windows systems, only use the replaced header if it resolves
-        to an executable on the system.
-        """
-        clean_header = new_header[2:-1].strip('"')
-        return sys.platform != 'win32' or shutil.which(clean_header)
-
-
-class WindowsExecutableLauncherWriter(WindowsScriptWriter):
-    @classmethod
-    def _get_script_args(cls, type_, name, header, script_text):
-        """
-        For Windows, add a .py extension and an .exe launcher
-        """
-        if type_ == 'gui':
-            launcher_type = 'gui'
-            ext = '-script.pyw'
-            old = ['.pyw']
-        else:
-            launcher_type = 'cli'
-            ext = '-script.py'
-            old = ['.py', '.pyc', '.pyo']
-        hdr = cls._adjust_header(type_, header)
-        blockers = [name + x for x in old]
-        yield (name + ext, hdr + script_text, 't', blockers)
-        yield (
-            name + '.exe',
-            get_win_launcher(launcher_type),
-            'b',  # write in binary mode
-        )
-        if not is_64bit():
-            # install a manifest for the launcher to prevent Windows
-            # from detecting it as an installer (which it will for
-            #  launchers like easy_install.exe). Consider only
-            #  adding a manifest for launchers detected as installers.
-            #  See Distribute #143 for details.
-            m_name = name + '.exe.manifest'
-            yield (m_name, load_launcher_manifest(name), 't')
-
-
-def get_win_launcher(type):
-    """
-    Load the Windows launcher (executable) suitable for launching a script.
-
-    `type` should be either 'cli' or 'gui'
-
-    Returns the executable as a byte string.
-    """
-    launcher_fn = f'{type}.exe'
-    if is_64bit():
-        if get_platform() == "win-arm64":
-            launcher_fn = launcher_fn.replace(".", "-arm64.")
-        else:
-            launcher_fn = launcher_fn.replace(".", "-64.")
-    else:
-        launcher_fn = launcher_fn.replace(".", "-32.")
-    return resource_string('setuptools', launcher_fn)
-
-
-def load_launcher_manifest(name):
-    manifest = pkg_resources.resource_string(__name__, 'launcher manifest.xml')
-    return manifest.decode('utf-8') % vars()
-
-
 def current_umask():
     tmp = os.umask(0o022)
     os.umask(tmp)
diff --git a/setuptools/command/install_scripts.py b/setuptools/command/install_scripts.py
index 4401cf693d..59caac59c4 100644
--- a/setuptools/command/install_scripts.py
+++ b/setuptools/command/install_scripts.py
@@ -34,7 +34,7 @@ def _install_ep_scripts(self):
         # Delay import side-effects
         from pkg_resources import Distribution, PathMetadata
 
-        from . import easy_install as ei
+        from .. import _scripts
 
         ei_cmd = self.get_finalized_command("egg_info")
         dist = Distribution(
@@ -45,7 +45,7 @@ def _install_ep_scripts(self):
         )
         bs_cmd = self.get_finalized_command('build_scripts')
         exec_param = getattr(bs_cmd, 'executable', None)
-        writer = ei.ScriptWriter
+        writer = _scripts.ScriptWriter
         if exec_param == sys.executable:
             # In case the path to the Python executable contains a space, wrap
             # it so it's not split up.
diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py
index af66b8d66e..9d6571c17e 100644
--- a/setuptools/tests/test_easy_install.py
+++ b/setuptools/tests/test_easy_install.py
@@ -455,17 +455,6 @@ def test_from_param_raises_expected_error(self) -> None:
         ), exc_info.value
 
 
-class TestWindowsScriptWriter:
-    def test_header(self):
-        hdr = ei.WindowsScriptWriter.get_header('')
-        assert hdr.startswith('#!')
-        assert hdr.endswith('\n')
-        hdr = hdr.lstrip('#!')
-        hdr = hdr.rstrip('\n')
-        # header should not start with an escaped quote
-        assert not hdr.startswith('\\"')
-
-
 class VersionStub(NamedTuple):
     major: int
     minor: int
diff --git a/setuptools/tests/test_scripts.py b/setuptools/tests/test_scripts.py
new file mode 100644
index 0000000000..8641f7b639
--- /dev/null
+++ b/setuptools/tests/test_scripts.py
@@ -0,0 +1,12 @@
+from setuptools import _scripts
+
+
+class TestWindowsScriptWriter:
+    def test_header(self):
+        hdr = _scripts.WindowsScriptWriter.get_header('')
+        assert hdr.startswith('#!')
+        assert hdr.endswith('\n')
+        hdr = hdr.lstrip('#!')
+        hdr = hdr.rstrip('\n')
+        # header should not start with an escaped quote
+        assert not hdr.startswith('\\"')

From 7d7be035fec60f172c060012edec6990d4fa7d89 Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sun, 27 Apr 2025 20:59:51 -0400
Subject: [PATCH 1641/1761] Reference utility functions from _shutil.

---
 setuptools/_shutil.py                 | 6 ++++++
 setuptools/command/easy_install.py    | 8 +-------
 setuptools/command/install_scripts.py | 2 +-
 3 files changed, 8 insertions(+), 8 deletions(-)

diff --git a/setuptools/_shutil.py b/setuptools/_shutil.py
index 6acbb4281f..660459a110 100644
--- a/setuptools/_shutil.py
+++ b/setuptools/_shutil.py
@@ -51,3 +51,9 @@ def rmtree(path, ignore_errors=False, onexc=_auto_chmod):
 def rmdir(path, **opts):
     if os.path.isdir(path):
         rmtree(path, **opts)
+
+
+def current_umask():
+    tmp = os.umask(0o022)
+    os.umask(tmp)
+    return tmp
diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py
index 33d06b6a2b..d530af3154 100644
--- a/setuptools/command/easy_install.py
+++ b/setuptools/command/easy_install.py
@@ -60,7 +60,7 @@
 
 from .._path import ensure_directory
 from .._scripts import CommandSpec, ScriptWriter
-from .._shutil import attempt_chmod_verbose as chmod, rmtree as _rmtree
+from .._shutil import attempt_chmod_verbose as chmod, current_umask, rmtree as _rmtree
 from ..compat import py39, py312
 
 from distutils import dir_util, log
@@ -1954,12 +1954,6 @@ def is_python_script(script_text, filename):
 sys_executable = CommandSpec._sys_executable()
 
 
-def current_umask():
-    tmp = os.umask(0o022)
-    os.umask(tmp)
-    return tmp
-
-
 def only_strs(values):
     """
     Exclude non-str values. Ref #3063.
diff --git a/setuptools/command/install_scripts.py b/setuptools/command/install_scripts.py
index 59caac59c4..02a73e818b 100644
--- a/setuptools/command/install_scripts.py
+++ b/setuptools/command/install_scripts.py
@@ -58,7 +58,7 @@ def _install_ep_scripts(self):
 
     def write_script(self, script_name, contents, mode: str = "t", *ignored) -> None:
         """Write an executable file to the scripts directory"""
-        from setuptools.command.easy_install import chmod, current_umask
+        from .._shutil import attempt_chmod_verbose as chmod, current_umask
 
         log.info("Installing %s script to %s", script_name, self.install_dir)
         target = os.path.join(self.install_dir, script_name)

From 8940538d4703c71d39ca596ae3bb8ae1a76eba9e Mon Sep 17 00:00:00 2001
From: "Jason R. Coombs" 
Date: Sun, 27 Apr 2025 20:09:49 -0400
Subject: [PATCH 1642/1761] Remove easy_install and package_index.

---
 setuptools/command/easy_install.py    | 1989 +------------------------
 setuptools/package_index.py           | 1179 ---------------
 setuptools/tests/test_easy_install.py |  509 -------
 setuptools/tests/test_packageindex.py |  267 ----
 4 files changed, 1 insertion(+), 3943 deletions(-)
 delete mode 100644 setuptools/package_index.py
 delete mode 100644 setuptools/tests/test_easy_install.py
 delete mode 100644 setuptools/tests/test_packageindex.py

diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py
index d530af3154..c4bc88d589 100644
--- a/setuptools/command/easy_install.py
+++ b/setuptools/command/easy_install.py
@@ -1,1992 +1,5 @@
-"""
-Easy Install
-------------
-
-A tool for doing automatic download/extract/build of distutils-based Python
-packages.  For detailed documentation, see the accompanying EasyInstall.txt
-file, or visit the `EasyInstall home page`__.
-
-__ https://setuptools.pypa.io/en/latest/deprecated/easy_install.html
-
-"""
-
-from __future__ import annotations
-
-import configparser
-import contextlib
-import io
-import os
-import random
-import shutil
-import site
-import stat
-import struct
-import subprocess
-import sys
-import sysconfig
-import tempfile
-import textwrap
-import warnings
-import zipfile
-import zipimport
-from glob import glob
-from sysconfig import get_path
-from typing import NoReturn
-
-from jaraco.text import yield_lines
-
-import pkg_resources
-from pkg_resources import (
-    DEVELOP_DIST,
-    Distribution,
-    DistributionNotFound,
-    EggMetadata,
-    Environment,
-    PathMetadata,
-    Requirement,
-    VersionConflict,
-    WorkingSet,
-    find_distributions,
-    get_distribution,
-    normalize_path,
-    resource_string,
-)
 from setuptools import Command
-from setuptools.archive_util import unpack_archive
-from setuptools.command import bdist_egg, setopt
-from setuptools.package_index import URL_SCHEME, PackageIndex, parse_requirement_arg
-from setuptools.warnings import SetuptoolsDeprecationWarning
-from setuptools.wheel import Wheel
-
-from .._path import ensure_directory
-from .._scripts import CommandSpec, ScriptWriter
-from .._shutil import attempt_chmod_verbose as chmod, current_umask, rmtree as _rmtree
-from ..compat import py39, py312
-
-from distutils import dir_util, log
-from distutils.command import install
-from distutils.errors import (
-    DistutilsArgError,
-    DistutilsError,
-    DistutilsOptionError,
-    DistutilsPlatformError,
-)
-from distutils.util import convert_path, get_platform, subst_vars
-
-# Turn on PEP440Warnings
-warnings.filterwarnings("default", category=pkg_resources.PEP440Warning)
-
-__all__ = [
-    'easy_install',
-    'PthDistributions',
-    'extract_wininst_cfg',
-    'get_exe_prefixes',
-]
-
-
-def is_64bit():
-    return struct.calcsize("P") == 8
-
-
-def _to_bytes(s):
-    return s.encode('utf8')
-
-
-def isascii(s):
-    try:
-        s.encode('ascii')
-    except UnicodeError:
-        return False
-    return True
-
-
-def _one_liner(text):
-    return textwrap.dedent(text).strip().replace('\n', '; ')
 
 
 class easy_install(Command):
-    """Manage a download/build/install process"""
-
-    description = "Find/get/install Python packages"
-    command_consumes_arguments = True
-
-    user_options = [
-        ('prefix=', None, "installation prefix"),
-        ("zip-ok", "z", "install package as a zipfile"),
-        ("multi-version", "m", "make apps have to require() a version"),
-        ("upgrade", "U", "force upgrade (searches PyPI for latest versions)"),
-        ("install-dir=", "d", "install package to DIR"),
-        ("script-dir=", "s", "install scripts to DIR"),
-        ("exclude-scripts", "x", "Don't install scripts"),
-        ("always-copy", "a", "Copy all needed packages to install dir"),
-        ("index-url=", "i", "base URL of Python Package Index"),
-        ("find-links=", "f", "additional URL(s) to search for packages"),
-        ("build-directory=", "b", "download/extract/build in DIR; keep the results"),
-        (
-            'optimize=',
-            'O',
-            'also compile with optimization: -O1 for "python -O", '
-            '-O2 for "python -OO", and -O0 to disable [default: -O0]',
-        ),
-        ('record=', None, "filename in which to record list of installed files"),
-        ('always-unzip', 'Z', "don't install as a zipfile, no matter what"),
-        ('site-dirs=', 'S', "list of directories where .pth files work"),
-        ('editable', 'e', "Install specified packages in editable form"),
-        ('no-deps', 'N', "don't install dependencies"),
-        ('allow-hosts=', 'H', "pattern(s) that hostnames must match"),
-        ('local-snapshots-ok', 'l', "allow building eggs from local checkouts"),
-        ('version', None, "print version information and exit"),
-        (
-            'no-find-links',
-            None,
-            "Don't load find-links defined in packages being installed",
-        ),
-        ('user', None, f"install in user site-package '{site.USER_SITE}'"),
-    ]
-    boolean_options = [
-        'zip-ok',
-        'multi-version',
-        'exclude-scripts',
-        'upgrade',
-        'always-copy',
-        'editable',
-        'no-deps',
-        'local-snapshots-ok',
-        'version',
-        'user',
-    ]
-
-    negative_opt = {'always-unzip': 'zip-ok'}
-    create_index = PackageIndex
-
-    def initialize_options(self):
-        EasyInstallDeprecationWarning.emit()
-
-        # the --user option seems to be an opt-in one,
-        # so the default should be False.
-        self.user = False
-        self.zip_ok = self.local_snapshots_ok = None
-        self.install_dir = self.script_dir = self.exclude_scripts = None
-        self.index_url = None
-        self.find_links = None
-        self.build_directory = None
-        self.args = None
-        self.optimize = self.record = None
-        self.upgrade = self.always_copy = self.multi_version = None
-        self.editable = self.no_deps = self.allow_hosts = None
-        self.root = self.prefix = self.no_report = None
-        self.version = None
-        self.install_purelib = None  # for pure module distributions
-        self.install_platlib = None  # non-pure (dists w/ extensions)
-        self.install_headers = None  # for C/C++ headers
-        self.install_lib = None  # set to either purelib or platlib
-        self.install_scripts = None
-        self.install_data = None
-        self.install_base = None
-        self.install_platbase = None
-        self.install_userbase = site.USER_BASE
-        self.install_usersite = site.USER_SITE
-        self.no_find_links = None
-
-        # Options not specifiable via command line
-        self.package_index = None
-        self.pth_file = self.always_copy_from = None
-        self.site_dirs = None
-        self.installed_projects = {}
-        # Always read easy_install options, even if we are subclassed, or have
-        # an independent instance created.  This ensures that defaults will
-        # always come from the standard configuration file(s)' "easy_install"
-        # section, even if this is a "develop" or "install" command, or some
-        # other embedding.
-        self._dry_run = None
-        self.verbose = self.distribution.verbose
-        self.distribution._set_command_options(
-            self, self.distribution.get_option_dict('easy_install')
-        )
-
-    def delete_blockers(self, blockers) -> None:
-        extant_blockers = (
-            filename
-            for filename in blockers
-            if os.path.exists(filename) or os.path.islink(filename)
-        )
-        list(map(self._delete_path, extant_blockers))
-
-    def _delete_path(self, path):
-        log.info("Deleting %s", path)
-        if self.dry_run:
-            return
-
-        is_tree = os.path.isdir(path) and not os.path.islink(path)
-        remover = _rmtree if is_tree else os.unlink
-        remover(path)
-
-    @staticmethod
-    def _render_version():
-        """
-        Render the Setuptools version and installation details, then exit.
-        """
-        ver = f'{sys.version_info.major}.{sys.version_info.minor}'
-        dist = get_distribution('setuptools')
-        print(f'setuptools {dist.version} from {dist.location} (Python {ver})')
-        raise SystemExit
-
-    def finalize_options(self) -> None:  # noqa: C901  # is too complex (25)  # FIXME
-        self.version and self._render_version()
-
-        py_version = sys.version.split()[0]
-
-        self.config_vars = dict(sysconfig.get_config_vars())
-
-        self.config_vars.update({
-            'dist_name': self.distribution.get_name(),
-            'dist_version': self.distribution.get_version(),
-            'dist_fullname': self.distribution.get_fullname(),
-            'py_version': py_version,
-            'py_version_short': f'{sys.version_info.major}.{sys.version_info.minor}',
-            'py_version_nodot': f'{sys.version_info.major}{sys.version_info.minor}',
-            'sys_prefix': self.config_vars['prefix'],
-            'sys_exec_prefix': self.config_vars['exec_prefix'],
-            # Only POSIX systems have abiflags
-            'abiflags': getattr(sys, 'abiflags', ''),
-            # Only python 3.9+ has platlibdir
-            'platlibdir': getattr(sys, 'platlibdir', 'lib'),
-        })
-        with contextlib.suppress(AttributeError):
-            # only for distutils outside stdlib
-            self.config_vars.update({
-                'implementation_lower': install._get_implementation().lower(),
-                'implementation': install._get_implementation(),
-            })
-
-        # pypa/distutils#113 Python 3.9 compat
-        self.config_vars.setdefault(
-            'py_version_nodot_plat',
-            getattr(sys, 'windir', '').replace('.', ''),
-        )
-
-        self.config_vars['userbase'] = self.install_userbase
-        self.config_vars['usersite'] = self.install_usersite
-        if self.user and not site.ENABLE_USER_SITE:
-            log.warn("WARNING: The user site-packages directory is disabled.")
-
-        self._fix_install_dir_for_user_site()
-
-        self.expand_basedirs()
-        self.expand_dirs()
-
-        self._expand(
-            'install_dir',
-            'script_dir',
-            'build_directory',
-            'site_dirs',
-        )
-        # If a non-default installation directory was specified, default the
-        # script directory to match it.
-        if self.script_dir is None:
-            self.script_dir = self.install_dir
-
-        if self.no_find_links is None:
-            self.no_find_links = False
-
-        # Let install_dir get set by install_lib command, which in turn
-        # gets its info from the install command, and takes into account
-        # --prefix and --home and all that other crud.
-        self.set_undefined_options('install_lib', ('install_dir', 'install_dir'))
-        # Likewise, set default script_dir from 'install_scripts.install_dir'
-        self.set_undefined_options('install_scripts', ('install_dir', 'script_dir'))
-
-        if self.user and self.install_purelib:
-            self.install_dir = self.install_purelib
-            self.script_dir = self.install_scripts
-        # default --record from the install command
-        self.set_undefined_options('install', ('record', 'record'))
-        self.all_site_dirs = get_site_dirs()
-        self.all_site_dirs.extend(self._process_site_dirs(self.site_dirs))
-
-        if not self.editable:
-            self.check_site_dir()
-        default_index = os.getenv("__EASYINSTALL_INDEX", "https://pypi.org/simple/")
-        # ^ Private API for testing purposes only
-        self.index_url = self.index_url or default_index
-        self.shadow_path = self.all_site_dirs[:]
-        for path_item in self.install_dir, normalize_path(self.script_dir):
-            if path_item not in self.shadow_path:
-                self.shadow_path.insert(0, path_item)
-
-        if self.allow_hosts is not None:
-            hosts = [s.strip() for s in self.allow_hosts.split(',')]
-        else:
-            hosts = ['*']
-        if self.package_index is None:
-            self.package_index = self.create_index(
-                self.index_url,
-                search_path=self.shadow_path,
-                hosts=hosts,
-            )
-        self.local_index = Environment(self.shadow_path + sys.path)
-
-        if self.find_links is not None:
-            if isinstance(self.find_links, str):
-                self.find_links = self.find_links.split()
-        else:
-            self.find_links = []
-        if self.local_snapshots_ok:
-            self.package_index.scan_egg_links(self.shadow_path + sys.path)
-        if not self.no_find_links:
-            self.package_index.add_find_links(self.find_links)
-        self.set_undefined_options('install_lib', ('optimize', 'optimize'))
-        self.optimize = self._validate_optimize(self.optimize)
-
-        if self.editable and not self.build_directory:
-            raise DistutilsArgError(
-                "Must specify a build directory (-b) when using --editable"
-            )
-        if not self.args:
-            raise DistutilsArgError(
-                "No urls, filenames, or requirements specified (see --help)"
-            )
-
-        self.outputs: list[str] = []
-
-    @staticmethod
-    def _process_site_dirs(site_dirs):
-        if site_dirs is None:
-            return
-
-        normpath = map(normalize_path, sys.path)
-        site_dirs = [os.path.expanduser(s.strip()) for s in site_dirs.split(',')]
-        for d in site_dirs:
-            if not os.path.isdir(d):
-                log.warn("%s (in --site-dirs) does not exist", d)
-            elif normalize_path(d) not in normpath:
-                raise DistutilsOptionError(d + " (in --site-dirs) is not on sys.path")
-            else:
-                yield normalize_path(d)
-
-    @staticmethod
-    def _validate_optimize(value):
-        try:
-            value = int(value)
-            if value not in range(3):
-                raise ValueError
-        except ValueError as e:
-            raise DistutilsOptionError("--optimize must be 0, 1, or 2") from e
-
-        return value
-
-    def _fix_install_dir_for_user_site(self):
-        """
-        Fix the install_dir if "--user" was used.
-        """
-        if not self.user:
-            return
-
-        self.create_home_path()
-        if self.install_userbase is None:
-            msg = "User base directory is not specified"
-            raise DistutilsPlatformError(msg)
-        self.install_base = self.install_platbase = self.install_userbase
-        scheme_name = f'{os.name}_user'
-        self.select_scheme(scheme_name)
-
-    def _expand_attrs(self, attrs):
-        for attr in attrs:
-            val = getattr(self, attr)
-            if val is not None:
-                if os.name == 'posix' or os.name == 'nt':
-                    val = os.path.expanduser(val)
-                val = subst_vars(val, self.config_vars)
-                setattr(self, attr, val)
-
-    def expand_basedirs(self) -> None:
-        """Calls `os.path.expanduser` on install_base, install_platbase and
-        root."""
-        self._expand_attrs(['install_base', 'install_platbase', 'root'])
-
-    def expand_dirs(self) -> None:
-        """Calls `os.path.expanduser` on install dirs."""
-        dirs = [
-            'install_purelib',
-            'install_platlib',
-            'install_lib',
-            'install_headers',
-            'install_scripts',
-            'install_data',
-        ]
-        self._expand_attrs(dirs)
-
-    def run(self, show_deprecation: bool = True) -> None:
-        raise RuntimeError("easy_install command is disabled")
-
-    def pseudo_tempname(self):
-        """Return a pseudo-tempname base in the install directory.
-        This code is intentionally naive; if a malicious party can write to
-        the target directory you're already in deep doodoo.
-        """
-        try:
-            pid = os.getpid()
-        except Exception:
-            pid = random.randint(0, sys.maxsize)
-        return os.path.join(self.install_dir, f"test-easy-install-{pid}")
-
-    def warn_deprecated_options(self) -> None:
-        pass
-
-    def check_site_dir(self) -> None:  # is too complex (12)  # FIXME
-        """Verify that self.install_dir is .pth-capable dir, if needed"""
-
-        instdir = normalize_path(self.install_dir)
-        pth_file = os.path.join(instdir, 'easy-install.pth')
-
-        if not os.path.exists(instdir):
-            try:
-                os.makedirs(instdir)
-            except OSError:
-                self.cant_write_to_target()
-
-        # Is it a configured, PYTHONPATH, implicit, or explicit site dir?
-        is_site_dir = instdir in self.all_site_dirs
-
-        if not is_site_dir and not self.multi_version:
-            # No?  Then directly test whether it does .pth file processing
-            is_site_dir = self.check_pth_processing()
-        else:
-            # make sure we can write to target dir
-            testfile = self.pseudo_tempname() + '.write-test'
-            test_exists = os.path.exists(testfile)
-            try:
-                if test_exists:
-                    os.unlink(testfile)
-                open(testfile, 'wb').close()
-                os.unlink(testfile)
-            except OSError:
-                self.cant_write_to_target()
-
-        if not is_site_dir and not self.multi_version:
-            # Can't install non-multi to non-site dir with easy_install
-            pythonpath = os.environ.get('PYTHONPATH', '')
-            log.warn(self.__no_default_msg, self.install_dir, pythonpath)
-
-        if is_site_dir:
-            if self.pth_file is None:
-                self.pth_file = PthDistributions(pth_file, self.all_site_dirs)
-        else:
-            self.pth_file = None
-
-        if self.multi_version and not os.path.exists(pth_file):
-            self.pth_file = None  # don't create a .pth file
-        self.install_dir = instdir
-
-    __cant_write_msg = textwrap.dedent(
-        """
-        can't create or remove files in install directory
-
-        The following error occurred while trying to add or remove files in the
-        installation directory:
-
-            %s
-
-        The installation directory you specified (via --install-dir, --prefix, or
-        the distutils default setting) was:
-
-            %s
-        """
-    ).lstrip()
-
-    __not_exists_id = textwrap.dedent(
-        """
-        This directory does not currently exist.  Please create it and try again, or
-        choose a different installation directory (using the -d or --install-dir
-        option).
-        """
-    ).lstrip()
-
-    __access_msg = textwrap.dedent(
-        """
-        Perhaps your account does not have write access to this directory?  If the
-        installation directory is a system-owned directory, you may need to sign in
-        as the administrator or "root" account.  If you do not have administrative
-        access to this machine, you may wish to choose a different installation
-        directory, preferably one that is listed in your PYTHONPATH environment
-        variable.
-
-        For information on other options, you may wish to consult the
-        documentation at:
-
-          https://setuptools.pypa.io/en/latest/deprecated/easy_install.html
-
-        Please make the appropriate changes for your system and try again.
-        """
-    ).lstrip()
-
-    def cant_write_to_target(self) -> NoReturn:
-        msg = self.__cant_write_msg % (
-            sys.exc_info()[1],
-            self.install_dir,
-        )
-
-        if not os.path.exists(self.install_dir):
-            msg += '\n' + self.__not_exists_id
-        else:
-            msg += '\n' + self.__access_msg
-        raise DistutilsError(msg)
-
-    def check_pth_processing(self):  # noqa: C901
-        """Empirically verify whether .pth files are supported in inst. dir"""
-        instdir = self.install_dir
-        log.info("Checking .pth file support in %s", instdir)
-        pth_file = self.pseudo_tempname() + ".pth"
-        ok_file = pth_file + '.ok'
-        ok_exists = os.path.exists(ok_file)
-        tmpl = (
-            _one_liner(
-                """
-            import os
-            f = open({ok_file!r}, 'w', encoding="utf-8")
-            f.write('OK')
-            f.close()
-            """
-            )
-            + '\n'
-        )
-        try:
-            if ok_exists:
-                os.unlink(ok_file)
-            dirname = os.path.dirname(ok_file)
-            os.makedirs(dirname, exist_ok=True)
-            f = open(pth_file, 'w', encoding=py312.PTH_ENCODING)
-            # ^-- Python<3.13 require encoding="locale" instead of "utf-8",
-            #     see python/cpython#77102.
-        except OSError:
-            self.cant_write_to_target()
-        else:
-            try:
-                f.write(tmpl.format(**locals()))
-                f.close()
-                f = None
-                executable = sys.executable
-                if os.name == 'nt':
-                    dirname, basename = os.path.split(executable)
-                    alt = os.path.join(dirname, 'pythonw.exe')
-                    use_alt = basename.lower() == 'python.exe' and os.path.exists(alt)
-                    if use_alt:
-                        # use pythonw.exe to avoid opening a console window
-                        executable = alt
-
-                from distutils.spawn import spawn
-
-                spawn([executable, '-E', '-c', 'pass'], 0)
-
-                if os.path.exists(ok_file):
-                    log.info("TEST PASSED: %s appears to support .pth files", instdir)
-                    return True
-            finally:
-                if f:
-                    f.close()
-                if os.path.exists(ok_file):
-                    os.unlink(ok_file)
-                if os.path.exists(pth_file):
-                    os.unlink(pth_file)
-        if not self.multi_version:
-            log.warn("TEST FAILED: %s does NOT support .pth files", instdir)
-        return False
-
-    def install_egg_scripts(self, dist) -> None:
-        """Write all the scripts for `dist`, unless scripts are excluded"""
-        if not self.exclude_scripts and dist.metadata_isdir('scripts'):
-            for script_name in dist.metadata_listdir('scripts'):
-                if dist.metadata_isdir('scripts/' + script_name):
-                    # The "script" is a directory, likely a Python 3
-                    # __pycache__ directory, so skip it.
-                    continue
-                self.install_script(
-                    dist, script_name, dist.get_metadata('scripts/' + script_name)
-                )
-        self.install_wrapper_scripts(dist)
-
-    def add_output(self, path) -> None:
-        if os.path.isdir(path):
-            for base, dirs, files in os.walk(path):
-                for filename in files:
-                    self.outputs.append(os.path.join(base, filename))
-        else:
-            self.outputs.append(path)
-
-    def not_editable(self, spec) -> None:
-        if self.editable:
-            raise DistutilsArgError(
-                f"Invalid argument {spec!r}: you can't use filenames or URLs "
-                "with --editable (except via the --find-links option)."
-            )
-
-    def check_editable(self, spec) -> None:
-        if not self.editable:
-            return
-
-        if os.path.exists(os.path.join(self.build_directory, spec.key)):
-            raise DistutilsArgError(
-                f"{spec.key!r} already exists in {self.build_directory}; can't do a checkout there"
-            )
-
-    @contextlib.contextmanager
-    def _tmpdir(self):
-        tmpdir = tempfile.mkdtemp(prefix="easy_install-")
-        try:
-            # cast to str as workaround for #709 and #710 and #712
-            yield str(tmpdir)
-        finally:
-            os.path.exists(tmpdir) and _rmtree(tmpdir)
-
-    def easy_install(self, spec, deps: bool = False) -> Distribution | None:
-        with self._tmpdir() as tmpdir:
-            if not isinstance(spec, Requirement):
-                if URL_SCHEME(spec):
-                    # It's a url, download it to tmpdir and process
-                    self.not_editable(spec)
-                    dl = self.package_index.download(spec, tmpdir)
-                    return self.install_item(None, dl, tmpdir, deps, True)
-
-                elif os.path.exists(spec):
-                    # Existing file or directory, just process it directly
-                    self.not_editable(spec)
-                    return self.install_item(None, spec, tmpdir, deps, True)
-                else:
-                    spec = parse_requirement_arg(spec)
-
-            self.check_editable(spec)
-            dist = self.package_index.fetch_distribution(
-                spec,
-                tmpdir,
-                self.upgrade,
-                self.editable,
-                not self.always_copy,
-                self.local_index,
-            )
-            if dist is None:
-                msg = f"Could not find suitable distribution for {spec!r}"
-                if self.always_copy:
-                    msg += " (--always-copy skips system and development eggs)"
-                raise DistutilsError(msg)
-            elif dist.precedence == DEVELOP_DIST:
-                # .egg-info dists don't need installing, just process deps
-                self.process_distribution(spec, dist, deps, "Using")
-                return dist
-            else:
-                return self.install_item(spec, dist.location, tmpdir, deps)
-
-    def install_item(
-        self, spec, download, tmpdir, deps, install_needed: bool = False
-    ) -> Distribution | None:
-        # Installation is also needed if file in tmpdir or is not an egg
-        install_needed = install_needed or bool(self.always_copy)
-        install_needed = install_needed or os.path.dirname(download) == tmpdir
-        install_needed = install_needed or not download.endswith('.egg')
-        install_needed = install_needed or (
-            self.always_copy_from is not None
-            and os.path.dirname(normalize_path(download))
-            == normalize_path(self.always_copy_from)
-        )
-
-        if spec and not install_needed:
-            # at this point, we know it's a local .egg, we just don't know if
-            # it's already installed.
-            for dist in self.local_index[spec.project_name]:
-                if dist.location == download:
-                    break
-            else:
-                install_needed = True  # it's not in the local index
-
-        log.info("Processing %s", os.path.basename(download))
-
-        if install_needed:
-            dists = self.install_eggs(spec, download, tmpdir)
-            for dist in dists:
-                self.process_distribution(spec, dist, deps)
-        else:
-            dists = [self.egg_distribution(download)]
-            self.process_distribution(spec, dists[0], deps, "Using")
-
-        if spec is not None:
-            for dist in dists:
-                if dist in spec:
-                    return dist
-        return None
-
-    def select_scheme(self, name):
-        try:
-            install._select_scheme(self, name)
-        except AttributeError:
-            # stdlib distutils
-            install.install.select_scheme(self, name.replace('posix', 'unix'))
-
-    # FIXME: 'easy_install.process_distribution' is too complex (12)
-    def process_distribution(  # noqa: C901
-        self,
-        requirement,
-        dist,
-        deps: bool = True,
-        *info,
-    ) -> None:
-        self.update_pth(dist)
-        self.package_index.add(dist)
-        if dist in self.local_index[dist.key]:
-            self.local_index.remove(dist)
-        self.local_index.add(dist)
-        self.install_egg_scripts(dist)
-        self.installed_projects[dist.key] = dist
-        log.info(self.installation_report(requirement, dist, *info))
-        if dist.has_metadata('dependency_links.txt') and not self.no_find_links:
-            self.package_index.add_find_links(
-                dist.get_metadata_lines('dependency_links.txt')
-            )
-        if not deps and not self.always_copy:
-            return
-        elif requirement is not None and dist.key != requirement.key:
-            log.warn("Skipping dependencies for %s", dist)
-            return  # XXX this is not the distribution we were looking for
-        elif requirement is None or dist not in requirement:
-            # if we wound up with a different version, resolve what we've got
-            distreq = dist.as_requirement()
-            requirement = Requirement(str(distreq))
-        log.info("Processing dependencies for %s", requirement)
-        try:
-            distros = WorkingSet([]).resolve(
-                [requirement], self.local_index, self.easy_install
-            )
-        except DistributionNotFound as e:
-            raise DistutilsError(str(e)) from e
-        except VersionConflict as e:
-            raise DistutilsError(e.report()) from e
-        if self.always_copy or self.always_copy_from:
-            # Force all the relevant distros to be copied or activated
-            for dist in distros:
-                if dist.key not in self.installed_projects:
-                    self.easy_install(dist.as_requirement())
-        log.info("Finished processing dependencies for %s", requirement)
-
-    def should_unzip(self, dist) -> bool:
-        if self.zip_ok is not None:
-            return not self.zip_ok
-        if dist.has_metadata('not-zip-safe'):
-            return True
-        if not dist.has_metadata('zip-safe'):
-            return True
-        return False
-
-    def maybe_move(self, spec, dist_filename, setup_base):
-        dst = os.path.join(self.build_directory, spec.key)
-        if os.path.exists(dst):
-            msg = "%r already exists in %s; build directory %s will not be kept"
-            log.warn(msg, spec.key, self.build_directory, setup_base)
-            return setup_base
-        if os.path.isdir(dist_filename):
-            setup_base = dist_filename
-        else:
-            if os.path.dirname(dist_filename) == setup_base:
-                os.unlink(dist_filename)  # get it out of the tmp dir
-            contents = os.listdir(setup_base)
-            if len(contents) == 1:
-                dist_filename = os.path.join(setup_base, contents[0])
-                if os.path.isdir(dist_filename):
-                    # if the only thing there is a directory, move it instead
-                    setup_base = dist_filename
-        ensure_directory(dst)
-        shutil.move(setup_base, dst)
-        return dst
-
-    def install_wrapper_scripts(self, dist) -> None:
-        if self.exclude_scripts:
-            return
-        for args in ScriptWriter.best().get_args(dist):
-            self.write_script(*args)
-
-    def install_script(self, dist, script_name, script_text, dev_path=None) -> None:
-        """Generate a legacy script wrapper and install it"""
-        spec = str(dist.as_requirement())
-        is_script = is_python_script(script_text, script_name)
-
-        if is_script:
-            body = self._load_template(dev_path) % locals()
-            script_text = ScriptWriter.get_header(script_text) + body
-        self.write_script(script_name, _to_bytes(script_text), 'b')
-
-    @staticmethod
-    def _load_template(dev_path):
-        """
-        There are a couple of template scripts in the package. This
-        function loads one of them and prepares it for use.
-        """
-        # See https://github.com/pypa/setuptools/issues/134 for info
-        # on script file naming and downstream issues with SVR4
-        name = 'script.tmpl'
-        if dev_path:
-            name = name.replace('.tmpl', ' (dev).tmpl')
-
-        raw_bytes = resource_string('setuptools', name)
-        return raw_bytes.decode('utf-8')
-
-    def write_script(self, script_name, contents, mode: str = "t", blockers=()) -> None:
-        """Write an executable file to the scripts directory"""
-        self.delete_blockers(  # clean up old .py/.pyw w/o a script
-            [os.path.join(self.script_dir, x) for x in blockers]
-        )
-        log.info("Installing %s script to %s", script_name, self.script_dir)
-        target = os.path.join(self.script_dir, script_name)
-        self.add_output(target)
-
-        if self.dry_run:
-            return
-
-        mask = current_umask()
-        ensure_directory(target)
-        if os.path.exists(target):
-            os.unlink(target)
-
-        encoding = None if "b" in mode else "utf-8"
-        with open(target, "w" + mode, encoding=encoding) as f:
-            f.write(contents)
-        chmod(target, 0o777 - mask)
-
-    def install_eggs(self, spec, dist_filename, tmpdir) -> list[Distribution]:
-        # .egg dirs or files are already built, so just return them
-        installer_map = {
-            '.egg': self.install_egg,
-            '.exe': self.install_exe,
-            '.whl': self.install_wheel,
-        }
-        try:
-            install_dist = installer_map[dist_filename.lower()[-4:]]
-        except KeyError:
-            pass
-        else:
-            return [install_dist(dist_filename, tmpdir)]
-
-        # Anything else, try to extract and build
-        setup_base = tmpdir
-        if os.path.isfile(dist_filename) and not dist_filename.endswith('.py'):
-            unpack_archive(dist_filename, tmpdir, self.unpack_progress)
-        elif os.path.isdir(dist_filename):
-            setup_base = os.path.abspath(dist_filename)
-
-        if (
-            setup_base.startswith(tmpdir)  # something we downloaded
-            and self.build_directory
-            and spec is not None
-        ):
-            setup_base = self.maybe_move(spec, dist_filename, setup_base)
-
-        # Find the setup.py file
-        setup_script = os.path.join(setup_base, 'setup.py')
-
-        if not os.path.exists(setup_script):
-            setups = glob(os.path.join(setup_base, '*', 'setup.py'))
-            if not setups:
-                raise DistutilsError(
-                    f"Couldn't find a setup script in {os.path.abspath(dist_filename)}"
-                )
-            if len(setups) > 1:
-                raise DistutilsError(
-                    f"Multiple setup scripts in {os.path.abspath(dist_filename)}"
-                )
-            setup_script = setups[0]
-
-        # Now run it, and return the result
-        if self.editable:
-            log.info(self.report_editable(spec, setup_script))
-            return []
-        else:
-            return self.build_and_install(setup_script, setup_base)
-
-    def egg_distribution(self, egg_path):
-        if os.path.isdir(egg_path):
-            metadata = PathMetadata(egg_path, os.path.join(egg_path, 'EGG-INFO'))
-        else:
-            metadata = EggMetadata(zipimport.zipimporter(egg_path))
-        return Distribution.from_filename(egg_path, metadata=metadata)
-
-    # FIXME: 'easy_install.install_egg' is too complex (11)
-    def install_egg(self, egg_path, tmpdir):
-        destination = os.path.join(
-            self.install_dir,
-            os.path.basename(egg_path),
-        )
-        destination = os.path.abspath(destination)
-        if not self.dry_run:
-            ensure_directory(destination)
-
-        dist = self.egg_distribution(egg_path)
-        if not (
-            os.path.exists(destination) and os.path.samefile(egg_path, destination)
-        ):
-            if os.path.isdir(destination) and not os.path.islink(destination):
-                dir_util.remove_tree(destination, dry_run=self.dry_run)
-            elif os.path.exists(destination):
-                self.execute(
-                    os.unlink,
-                    (destination,),
-                    "Removing " + destination,
-                )
-            try:
-                new_dist_is_zipped = False
-                if os.path.isdir(egg_path):
-                    if egg_path.startswith(tmpdir):
-                        f, m = shutil.move, "Moving"
-                    else:
-                        f, m = shutil.copytree, "Copying"
-                elif self.should_unzip(dist):
-                    self.mkpath(destination)
-                    f, m = self.unpack_and_compile, "Extracting"
-                else:
-                    new_dist_is_zipped = True
-                    if egg_path.startswith(tmpdir):
-                        f, m = shutil.move, "Moving"
-                    else:
-                        f, m = shutil.copy2, "Copying"
-                self.execute(
-                    f,
-                    (egg_path, destination),
-                    (m + " %s to %s")
-                    % (os.path.basename(egg_path), os.path.dirname(destination)),
-                )
-                update_dist_caches(
-                    destination,
-                    fix_zipimporter_caches=new_dist_is_zipped,
-                )
-            except Exception:
-                update_dist_caches(destination, fix_zipimporter_caches=False)
-                raise
-
-        self.add_output(destination)
-        return self.egg_distribution(destination)
-
-    def install_exe(self, dist_filename, tmpdir):
-        # See if it's valid, get data
-        cfg = extract_wininst_cfg(dist_filename)
-        if cfg is None:
-            raise DistutilsError(
-                f"{dist_filename} is not a valid distutils Windows .exe"
-            )
-        # Create a dummy distribution object until we build the real distro
-        dist = Distribution(
-            None,
-            project_name=cfg.get('metadata', 'name'),
-            version=cfg.get('metadata', 'version'),
-            platform=get_platform(),
-        )
-
-        # Convert the .exe to an unpacked egg
-        egg_path = os.path.join(tmpdir, dist.egg_name() + '.egg')
-        dist.location = egg_path
-        egg_tmp = egg_path + '.tmp'
-        _egg_info = os.path.join(egg_tmp, 'EGG-INFO')
-        pkg_inf = os.path.join(_egg_info, 'PKG-INFO')
-        ensure_directory(pkg_inf)  # make sure EGG-INFO dir exists
-        dist._provider = PathMetadata(egg_tmp, _egg_info)  # XXX
-        self.exe_to_egg(dist_filename, egg_tmp)
-
-        # Write EGG-INFO/PKG-INFO
-        if not os.path.exists(pkg_inf):
-            with open(pkg_inf, 'w', encoding="utf-8") as f:
-                f.write('Metadata-Version: 1.0\n')
-                for k, v in cfg.items('metadata'):
-                    if k != 'target_version':
-                        k = k.replace('_', '-').title()
-                        f.write(f'{k}: {v}\n')
-        script_dir = os.path.join(_egg_info, 'scripts')
-        # delete entry-point scripts to avoid duping
-        self.delete_blockers([
-            os.path.join(script_dir, args[0]) for args in ScriptWriter.get_args(dist)
-        ])
-        # Build .egg file from tmpdir
-        bdist_egg.make_zipfile(
-            egg_path,
-            egg_tmp,
-            verbose=self.verbose,
-            dry_run=self.dry_run,
-        )
-        # install the .egg
-        return self.install_egg(egg_path, tmpdir)
-
-    # FIXME: 'easy_install.exe_to_egg' is too complex (12)
-    def exe_to_egg(self, dist_filename, egg_tmp) -> None:  # noqa: C901
-        """Extract a bdist_wininst to the directories an egg would use"""
-        # Check for .pth file and set up prefix translations
-        prefixes = get_exe_prefixes(dist_filename)
-        to_compile = []
-        native_libs = []
-        top_level = set()
-
-        def process(src, dst):
-            s = src.lower()
-            for old, new in prefixes:
-                if s.startswith(old):
-                    src = new + src[len(old) :]
-                    parts = src.split('/')
-                    dst = os.path.join(egg_tmp, *parts)
-                    dl = dst.lower()
-                    if dl.endswith('.pyd') or dl.endswith('.dll'):
-                        parts[-1] = bdist_egg.strip_module(parts[-1])
-                        top_level.add([os.path.splitext(parts[0])[0]])
-                        native_libs.append(src)
-                    elif dl.endswith('.py') and old != 'SCRIPTS/':
-                        top_level.add([os.path.splitext(parts[0])[0]])
-                        to_compile.append(dst)
-                    return dst
-            if not src.endswith('.pth'):
-                log.warn("WARNING: can't process %s", src)
-            return None
-
-        # extract, tracking .pyd/.dll->native_libs and .py -> to_compile
-        unpack_archive(dist_filename, egg_tmp, process)
-        stubs = []
-        for res in native_libs:
-            if res.lower().endswith('.pyd'):  # create stubs for .pyd's
-                parts = res.split('/')
-                resource = parts[-1]
-                parts[-1] = bdist_egg.strip_module(parts[-1]) + '.py'
-                pyfile = os.path.join(egg_tmp, *parts)
-                to_compile.append(pyfile)
-                stubs.append(pyfile)
-                bdist_egg.write_stub(resource, pyfile)
-        self.byte_compile(to_compile)  # compile .py's
-        bdist_egg.write_safety_flag(
-            os.path.join(egg_tmp, 'EGG-INFO'), bdist_egg.analyze_egg(egg_tmp, stubs)
-        )  # write zip-safety flag
-
-        for name in 'top_level', 'native_libs':
-            if locals()[name]:
-                txt = os.path.join(egg_tmp, 'EGG-INFO', name + '.txt')
-                if not os.path.exists(txt):
-                    with open(txt, 'w', encoding="utf-8") as f:
-                        f.write('\n'.join(locals()[name]) + '\n')
-
-    def install_wheel(self, wheel_path, tmpdir):
-        wheel = Wheel(wheel_path)
-        assert wheel.is_compatible()
-        destination = os.path.join(self.install_dir, wheel.egg_name())
-        destination = os.path.abspath(destination)
-        if not self.dry_run:
-            ensure_directory(destination)
-        if os.path.isdir(destination) and not os.path.islink(destination):
-            dir_util.remove_tree(destination, dry_run=self.dry_run)
-        elif os.path.exists(destination):
-            self.execute(
-                os.unlink,
-                (destination,),
-                "Removing " + destination,
-            )
-        try:
-            self.execute(
-                wheel.install_as_egg,
-                (destination,),
-                (
-                    f"Installing {os.path.basename(wheel_path)} to {os.path.dirname(destination)}"
-                ),
-            )
-        finally:
-            update_dist_caches(destination, fix_zipimporter_caches=False)
-        self.add_output(destination)
-        return self.egg_distribution(destination)
-
-    __mv_warning = textwrap.dedent(
-        """
-        Because this distribution was installed --multi-version, before you can
-        import modules from this package in an application, you will need to
-        'import pkg_resources' and then use a 'require()' call similar to one of
-        these examples, in order to select the desired version:
-
-            pkg_resources.require("%(name)s")  # latest installed version
-            pkg_resources.require("%(name)s==%(version)s")  # this exact version
-            pkg_resources.require("%(name)s>=%(version)s")  # this version or higher
-        """
-    ).lstrip()
-
-    __id_warning = textwrap.dedent(
-        """
-        Note also that the installation directory must be on sys.path at runtime for
-        this to work.  (e.g. by being the application's script directory, by being on
-        PYTHONPATH, or by being added to sys.path by your code.)
-        """
-    )
-
-    def installation_report(self, req, dist, what: str = "Installed") -> str:
-        """Helpful installation message for display to package users"""
-        msg = "\n%(what)s %(eggloc)s%(extras)s"
-        if self.multi_version and not self.no_report:
-            msg += '\n' + self.__mv_warning
-            if self.install_dir not in map(normalize_path, sys.path):
-                msg += '\n' + self.__id_warning
-
-        eggloc = dist.location
-        name = dist.project_name
-        version = dist.version
-        extras = ''  # TODO: self.report_extras(req, dist)
-        return msg % locals()
-
-    __editable_msg = textwrap.dedent(
-        """
-        Extracted editable version of %(spec)s to %(dirname)s
-
-        If it uses setuptools in its setup script, you can activate it in
-        "development" mode by going to that directory and running::
-
-            %(python)s setup.py develop
-
-        See the setuptools documentation for the "develop" command for more info.
-        """
-    ).lstrip()
-
-    def report_editable(self, spec, setup_script):
-        dirname = os.path.dirname(setup_script)
-        python = sys.executable
-        return '\n' + self.__editable_msg % locals()
-
-    def run_setup(self, setup_script, setup_base, args) -> NoReturn:
-        raise NotImplementedError("easy_install support has been removed")
-
-    def build_and_install(self, setup_script, setup_base):
-        args = ['bdist_egg', '--dist-dir']
-
-        dist_dir = tempfile.mkdtemp(
-            prefix='egg-dist-tmp-', dir=os.path.dirname(setup_script)
-        )
-        try:
-            self._set_fetcher_options(os.path.dirname(setup_script))
-            args.append(dist_dir)
-
-            self.run_setup(setup_script, setup_base, args)
-            all_eggs = Environment([dist_dir])
-            eggs = [
-                self.install_egg(dist.location, setup_base)
-                for key in all_eggs
-                for dist in all_eggs[key]
-            ]
-            if not eggs and not self.dry_run:
-                log.warn("No eggs found in %s (setup script problem?)", dist_dir)
-            return eggs
-        finally:
-            _rmtree(dist_dir)
-            log.set_verbosity(self.verbose)  # restore our log verbosity
-
-    def _set_fetcher_options(self, base):
-        """
-        When easy_install is about to run bdist_egg on a source dist, that
-        source dist might have 'setup_requires' directives, requiring
-        additional fetching. Ensure the fetcher options given to easy_install
-        are available to that command as well.
-        """
-        # find the fetch options from easy_install and write them out
-        # to the setup.cfg file.
-        ei_opts = self.distribution.get_option_dict('easy_install').copy()
-        fetch_directives = (
-            'find_links',
-            'site_dirs',
-            'index_url',
-            'optimize',
-            'allow_hosts',
-        )
-        fetch_options = {}
-        for key, val in ei_opts.items():
-            if key not in fetch_directives:
-                continue
-            fetch_options[key] = val[1]
-        # create a settings dictionary suitable for `edit_config`
-        settings = dict(easy_install=fetch_options)
-        cfg_filename = os.path.join(base, 'setup.cfg')
-        setopt.edit_config(cfg_filename, settings)
-
-    def update_pth(self, dist) -> None:  # noqa: C901  # is too complex (11)  # FIXME
-        if self.pth_file is None:
-            return
-
-        for d in self.pth_file[dist.key]:  # drop old entries
-            if not self.multi_version and d.location == dist.location:
-                continue
-
-            log.info("Removing %s from easy-install.pth file", d)
-            self.pth_file.remove(d)
-            if d.location in self.shadow_path:
-                self.shadow_path.remove(d.location)
-
-        if not self.multi_version:
-            if dist.location in self.pth_file.paths:
-                log.info(
-                    "%s is already the active version in easy-install.pth",
-                    dist,
-                )
-            else:
-                log.info("Adding %s to easy-install.pth file", dist)
-                self.pth_file.add(dist)  # add new entry
-                if dist.location not in self.shadow_path:
-                    self.shadow_path.append(dist.location)
-
-        if self.dry_run:
-            return
-
-        self.pth_file.save()
-
-        if dist.key != 'setuptools':
-            return
-
-        # Ensure that setuptools itself never becomes unavailable!
-        # XXX should this check for latest version?
-        filename = os.path.join(self.install_dir, 'setuptools.pth')
-        if os.path.islink(filename):
-            os.unlink(filename)
-
-        with open(filename, 'wt', encoding=py312.PTH_ENCODING) as f:
-            # ^-- Python<3.13 require encoding="locale" instead of "utf-8",
-            #     see python/cpython#77102.
-            f.write(self.pth_file.make_relative(dist.location) + '\n')
-
-    def unpack_progress(self, src, dst):
-        # Progress filter for unpacking
-        log.debug("Unpacking %s to %s", src, dst)
-        return dst  # only unpack-and-compile skips files for dry run
-
-    def unpack_and_compile(self, egg_path, destination) -> None:
-        to_compile = []
-        to_chmod = []
-
-        def pf(src, dst):
-            if dst.endswith('.py') and not src.startswith('EGG-INFO/'):
-                to_compile.append(dst)
-            elif dst.endswith('.dll') or dst.endswith('.so'):
-                to_chmod.append(dst)
-            self.unpack_progress(src, dst)
-            return not self.dry_run and dst or None
-
-        unpack_archive(egg_path, destination, pf)
-        self.byte_compile(to_compile)
-        if not self.dry_run:
-            for f in to_chmod:
-                mode = ((os.stat(f)[stat.ST_MODE]) | 0o555) & 0o7755
-                chmod(f, mode)
-
-    def byte_compile(self, to_compile) -> None:
-        if sys.dont_write_bytecode:
-            return
-
-        from distutils.util import byte_compile
-
-        try:
-            # try to make the byte compile messages quieter
-            log.set_verbosity(self.verbose - 1)
-
-            byte_compile(to_compile, optimize=0, force=True, dry_run=self.dry_run)
-            if self.optimize:
-                byte_compile(
-                    to_compile,
-                    optimize=self.optimize,
-                    force=True,
-                    dry_run=self.dry_run,
-                )
-        finally:
-            log.set_verbosity(self.verbose)  # restore original verbosity
-
-    __no_default_msg = textwrap.dedent(
-        """
-        bad install directory or PYTHONPATH
-
-        You are attempting to install a package to a directory that is not
-        on PYTHONPATH and which Python does not read ".pth" files from.  The
-        installation directory you specified (via --install-dir, --prefix, or
-        the distutils default setting) was:
-
-            %s
-
-        and your PYTHONPATH environment variable currently contains:
-
-            %r
-
-        Here are some of your options for correcting the problem:
-
-        * You can choose a different installation directory, i.e., one that is
-          on PYTHONPATH or supports .pth files
-
-        * You can add the installation directory to the PYTHONPATH environment
-          variable.  (It must then also be on PYTHONPATH whenever you run
-          Python and want to use the package(s) you are installing.)
-
-        * You can set up the installation directory to support ".pth" files by
-          using one of the approaches described here:
-
-          https://setuptools.pypa.io/en/latest/deprecated/easy_install.html#custom-installation-locations
-
-
-        Please make the appropriate changes for your system and try again.
-        """
-    ).strip()
-
-    def create_home_path(self) -> None:
-        """Create directories under ~."""
-        if not self.user:
-            return
-        home = convert_path(os.path.expanduser("~"))
-        for path in only_strs(self.config_vars.values()):
-            if path.startswith(home) and not os.path.isdir(path):
-                self.debug_print(f"os.makedirs('{path}', 0o700)")
-                os.makedirs(path, 0o700)
-
-    INSTALL_SCHEMES = dict(
-        posix=dict(
-            install_dir='$base/lib/python$py_version_short/site-packages',
-            script_dir='$base/bin',
-        ),
-    )
-
-    DEFAULT_SCHEME = dict(
-        install_dir='$base/Lib/site-packages',
-        script_dir='$base/Scripts',
-    )
-
-    def _expand(self, *attrs):
-        config_vars = self.get_finalized_command('install').config_vars
-
-        if self.prefix:
-            # Set default install_dir/scripts from --prefix
-            config_vars = dict(config_vars)
-            config_vars['base'] = self.prefix
-            scheme = self.INSTALL_SCHEMES.get(os.name, self.DEFAULT_SCHEME)
-            for attr, val in scheme.items():
-                if getattr(self, attr, None) is None:
-                    setattr(self, attr, val)
-
-        from distutils.util import subst_vars
-
-        for attr in attrs:
-            val = getattr(self, attr)
-            if val is not None:
-                val = subst_vars(val, config_vars)
-                if os.name == 'posix':
-                    val = os.path.expanduser(val)
-                setattr(self, attr, val)
-
-
-def _pythonpath():
-    items = os.environ.get('PYTHONPATH', '').split(os.pathsep)
-    return filter(None, items)
-
-
-def get_site_dirs():
-    """
-    Return a list of 'site' dirs
-    """
-
-    sitedirs = []
-
-    # start with PYTHONPATH
-    sitedirs.extend(_pythonpath())
-
-    prefixes = [sys.prefix]
-    if sys.exec_prefix != sys.prefix:
-        prefixes.append(sys.exec_prefix)
-    for prefix in prefixes:
-        if not prefix:
-            continue
-
-        if sys.platform in ('os2emx', 'riscos'):
-            sitedirs.append(os.path.join(prefix, "Lib", "site-packages"))
-        elif os.sep == '/':
-            sitedirs.extend([
-                os.path.join(
-                    prefix,
-                    "lib",
-                    f"python{sys.version_info.major}.{sys.version_info.minor}",
-                    "site-packages",
-                ),
-                os.path.join(prefix, "lib", "site-python"),
-            ])
-        else:
-            sitedirs.extend([
-                prefix,
-                os.path.join(prefix, "lib", "site-packages"),
-            ])
-        if sys.platform != 'darwin':
-            continue
-
-        # for framework builds *only* we add the standard Apple
-        # locations. Currently only per-user, but /Library and
-        # /Network/Library could be added too
-        if 'Python.framework' not in prefix:
-            continue
-
-        home = os.environ.get('HOME')
-        if not home:
-            continue
-
-        home_sp = os.path.join(
-            home,
-            'Library',
-            'Python',
-            f'{sys.version_info.major}.{sys.version_info.minor}',
-            'site-packages',
-        )
-        sitedirs.append(home_sp)
-    lib_paths = get_path('purelib'), get_path('platlib')
-
-    sitedirs.extend(s for s in lib_paths if s not in sitedirs)
-
-    if site.ENABLE_USER_SITE:
-        sitedirs.append(site.USER_SITE)
-
-    with contextlib.suppress(AttributeError):
-        sitedirs.extend(site.getsitepackages())
-
-    return list(map(normalize_path, sitedirs))
-
-
-def expand_paths(inputs):  # noqa: C901  # is too complex (11)  # FIXME
-    """Yield sys.path directories that might contain "old-style" packages"""
-
-    seen = set()
-
-    for dirname in inputs:
-        dirname = normalize_path(dirname)
-        if dirname in seen:
-            continue
-
-        seen.add(dirname)
-        if not os.path.isdir(dirname):
-            continue
-
-        files = os.listdir(dirname)
-        yield dirname, files
-
-        for name in files:
-            if not name.endswith('.pth'):
-                # We only care about the .pth files
-                continue
-            if name in ('easy-install.pth', 'setuptools.pth'):
-                # Ignore .pth files that we control
-                continue
-
-            # Read the .pth file
-            content = _read_pth(os.path.join(dirname, name))
-            lines = list(yield_lines(content))
-
-            # Yield existing non-dupe, non-import directory lines from it
-            for line in lines:
-                if line.startswith("import"):
-                    continue
-
-                line = normalize_path(line.rstrip())
-                if line in seen:
-                    continue
-
-                seen.add(line)
-                if not os.path.isdir(line):
-                    continue
-
-                yield line, os.listdir(line)
-
-
-def extract_wininst_cfg(dist_filename):
-    """Extract configuration data from a bdist_wininst .exe
-
-    Returns a configparser.RawConfigParser, or None
-    """
-    f = open(dist_filename, 'rb')
-    try:
-        endrec = zipfile._EndRecData(f)
-        if endrec is None:
-            return None
-
-        prepended = (endrec[9] - endrec[5]) - endrec[6]
-        if prepended < 12:  # no wininst data here
-            return None
-        f.seek(prepended - 12)
-
-        tag, cfglen, _bmlen = struct.unpack("egg path translations for a given .exe file"""
-
-    prefixes = [
-        ('PURELIB/', ''),
-        ('PLATLIB/pywin32_system32', ''),
-        ('PLATLIB/', ''),
-        ('SCRIPTS/', 'EGG-INFO/scripts/'),
-        ('DATA/lib/site-packages', ''),
-    ]
-    z = zipfile.ZipFile(exe_filename)
-    try:
-        for info in z.infolist():
-            name = info.filename
-            parts = name.split('/')
-            if len(parts) == 3 and parts[2] == 'PKG-INFO':
-                if parts[1].endswith('.egg-info'):
-                    prefixes.insert(0, ('/'.join(parts[:2]), 'EGG-INFO/'))
-                    break
-            if len(parts) != 2 or not name.endswith('.pth'):
-                continue
-            if name.endswith('-nspkg.pth'):
-                continue
-            if parts[0].upper() in ('PURELIB', 'PLATLIB'):
-                contents = z.read(name).decode()
-                for pth in yield_lines(contents):
-                    pth = pth.strip().replace('\\', '/')
-                    if not pth.startswith('import'):
-                        prefixes.append(((f'{parts[0]}/{pth}/'), ''))
-    finally:
-        z.close()
-    prefixes = [(x.lower(), y) for x, y in prefixes]
-    prefixes.sort()
-    prefixes.reverse()
-    return prefixes
-
-
-class PthDistributions(Environment):
-    """A .pth file with Distribution paths in it"""
-
-    def __init__(self, filename, sitedirs=()) -> None:
-        self.filename = filename
-        self.sitedirs = list(map(normalize_path, sitedirs))
-        self.basedir = normalize_path(os.path.dirname(self.filename))
-        self.paths, self.dirty = self._load()
-        # keep a copy if someone manually updates the paths attribute on the instance
-        self._init_paths = self.paths[:]
-        super().__init__([], None, None)
-        for path in yield_lines(self.paths):
-            list(map(self.add, find_distributions(path, True)))
-
-    def _load_raw(self):
-        paths = []
-        dirty = saw_import = False
-        seen = set(self.sitedirs)
-        content = _read_pth(self.filename)
-        for line in content.splitlines():
-            path = line.rstrip()
-            # still keep imports and empty/commented lines for formatting
-            paths.append(path)
-            if line.startswith(('import ', 'from ')):
-                saw_import = True
-                continue
-            stripped_path = path.strip()
-            if not stripped_path or stripped_path.startswith('#'):
-                continue
-            # skip non-existent paths, in case somebody deleted a package
-            # manually, and duplicate paths as well
-            normalized_path = normalize_path(os.path.join(self.basedir, path))
-            if normalized_path in seen or not os.path.exists(normalized_path):
-                log.debug("cleaned up dirty or duplicated %r", path)
-                dirty = True
-                paths.pop()
-                continue
-            seen.add(normalized_path)
-        # remove any trailing empty/blank line
-        while paths and not paths[-1].strip():
-            paths.pop()
-            dirty = True
-        return paths, dirty or (paths and saw_import)
-
-    def _load(self):
-        if os.path.isfile(self.filename):
-            return self._load_raw()
-        return [], False
-
-    def save(self) -> None:
-        """Write changed .pth file back to disk"""
-        # first reload the file
-        last_paths, last_dirty = self._load()
-        # and check that there are no difference with what we have.
-        # there can be difference if someone else has written to the file
-        # since we first loaded it.
-        # we don't want to lose the eventual new paths added since then.
-        for path in last_paths[:]:
-            if path not in self.paths:
-                self.paths.append(path)
-                log.info("detected new path %r", path)
-                last_dirty = True
-            else:
-                last_paths.remove(path)
-        # also, re-check that all paths are still valid before saving them
-        for path in self.paths[:]:
-            if path not in last_paths and not path.startswith((
-                'import ',
-                'from ',
-                '#',
-            )):
-                absolute_path = os.path.join(self.basedir, path)
-                if not os.path.exists(absolute_path):
-                    self.paths.remove(path)
-                    log.info("removing now non-existent path %r", path)
-                    last_dirty = True
-
-        self.dirty |= last_dirty or self.paths != self._init_paths
-        if not self.dirty:
-            return
-
-        rel_paths = list(map(self.make_relative, self.paths))
-        if rel_paths:
-            log.debug("Saving %s", self.filename)
-            lines = self._wrap_lines(rel_paths)
-            data = '\n'.join(lines) + '\n'
-            if os.path.islink(self.filename):
-                os.unlink(self.filename)
-            with open(self.filename, 'wt', encoding=py312.PTH_ENCODING) as f:
-                # ^-- Python<3.13 require encoding="locale" instead of "utf-8",
-                #     see python/cpython#77102.
-                f.write(data)
-        elif os.path.exists(self.filename):
-            log.debug("Deleting empty %s", self.filename)
-            os.unlink(self.filename)
-
-        self.dirty = False
-        self._init_paths[:] = self.paths[:]
-
-    @staticmethod
-    def _wrap_lines(lines):
-        return lines
-
-    def add(self, dist) -> None:
-        """Add `dist` to the distribution map"""
-        new_path = dist.location not in self.paths and (
-            dist.location not in self.sitedirs
-            or
-            # account for '.' being in PYTHONPATH
-            dist.location == os.getcwd()
-        )
-        if new_path:
-            self.paths.append(dist.location)
-            self.dirty = True
-        super().add(dist)
-
-    def remove(self, dist) -> None:
-        """Remove `dist` from the distribution map"""
-        while dist.location in self.paths:
-            self.paths.remove(dist.location)
-            self.dirty = True
-        super().remove(dist)
-
-    def make_relative(self, path):
-        npath, last = os.path.split(normalize_path(path))
-        baselen = len(self.basedir)
-        parts = [last]
-        sep = os.altsep == '/' and '/' or os.sep
-        while len(npath) >= baselen:
-            if npath == self.basedir:
-                parts.append(os.curdir)
-                parts.reverse()
-                return sep.join(parts)
-            npath, last = os.path.split(npath)
-            parts.append(last)
-        else:
-            return path
-
-
-class RewritePthDistributions(PthDistributions):
-    @classmethod
-    def _wrap_lines(cls, lines):
-        yield cls.prelude
-        yield from lines
-        yield cls.postlude
-
-    prelude = _one_liner(
-        """
-        import sys
-        sys.__plen = len(sys.path)
-        """
-    )
-    postlude = _one_liner(
-        """
-        import sys
-        new = sys.path[sys.__plen:]
-        del sys.path[sys.__plen:]
-        p = getattr(sys, '__egginsert', 0)
-        sys.path[p:p] = new
-        sys.__egginsert = p + len(new)
-        """
-    )
-
-
-if os.environ.get('SETUPTOOLS_SYS_PATH_TECHNIQUE', 'raw') == 'rewrite':
-    PthDistributions = RewritePthDistributions  # type: ignore[misc]  # Overwriting type
-
-
-def update_dist_caches(dist_path, fix_zipimporter_caches):
-    """
-    Fix any globally cached `dist_path` related data
-
-    `dist_path` should be a path of a newly installed egg distribution (zipped
-    or unzipped).
-
-    sys.path_importer_cache contains finder objects that have been cached when
-    importing data from the original distribution. Any such finders need to be
-    cleared since the replacement distribution might be packaged differently,
-    e.g. a zipped egg distribution might get replaced with an unzipped egg
-    folder or vice versa. Having the old finders cached may then cause Python
-    to attempt loading modules from the replacement distribution using an
-    incorrect loader.
-
-    zipimport.zipimporter objects are Python loaders charged with importing
-    data packaged inside zip archives. If stale loaders referencing the
-    original distribution, are left behind, they can fail to load modules from
-    the replacement distribution. E.g. if an old zipimport.zipimporter instance
-    is used to load data from a new zipped egg archive, it may cause the
-    operation to attempt to locate the requested data in the wrong location -
-    one indicated by the original distribution's zip archive directory
-    information. Such an operation may then fail outright, e.g. report having
-    read a 'bad local file header', or even worse, it may fail silently &
-    return invalid data.
-
-    zipimport._zip_directory_cache contains cached zip archive directory
-    information for all existing zipimport.zipimporter instances and all such
-    instances connected to the same archive share the same cached directory
-    information.
-
-    If asked, and the underlying Python implementation allows it, we can fix
-    all existing zipimport.zipimporter instances instead of having to track
-    them down and remove them one by one, by updating their shared cached zip
-    archive directory information. This, of course, assumes that the
-    replacement distribution is packaged as a zipped egg.
-
-    If not asked to fix existing zipimport.zipimporter instances, we still do
-    our best to clear any remaining zipimport.zipimporter related cached data
-    that might somehow later get used when attempting to load data from the new
-    distribution and thus cause such load operations to fail. Note that when
-    tracking down such remaining stale data, we can not catch every conceivable
-    usage from here, and we clear only those that we know of and have found to
-    cause problems if left alive. Any remaining caches should be updated by
-    whomever is in charge of maintaining them, i.e. they should be ready to
-    handle us replacing their zip archives with new distributions at runtime.
-
-    """
-    # There are several other known sources of stale zipimport.zipimporter
-    # instances that we do not clear here, but might if ever given a reason to
-    # do so:
-    # * Global setuptools pkg_resources.working_set (a.k.a. 'master working
-    # set') may contain distributions which may in turn contain their
-    #   zipimport.zipimporter loaders.
-    # * Several zipimport.zipimporter loaders held by local variables further
-    #   up the function call stack when running the setuptools installation.
-    # * Already loaded modules may have their __loader__ attribute set to the
-    #   exact loader instance used when importing them. Python 3.4 docs state
-    #   that this information is intended mostly for introspection and so is
-    #   not expected to cause us problems.
-    normalized_path = normalize_path(dist_path)
-    _uncache(normalized_path, sys.path_importer_cache)
-    if fix_zipimporter_caches:
-        _replace_zip_directory_cache_data(normalized_path)
-    else:
-        # Here, even though we do not want to fix existing and now stale
-        # zipimporter cache information, we still want to remove it. Related to
-        # Python's zip archive directory information cache, we clear each of
-        # its stale entries in two phases:
-        #   1. Clear the entry so attempting to access zip archive information
-        #      via any existing stale zipimport.zipimporter instances fails.
-        #   2. Remove the entry from the cache so any newly constructed
-        #      zipimport.zipimporter instances do not end up using old stale
-        #      zip archive directory information.
-        # This whole stale data removal step does not seem strictly necessary,
-        # but has been left in because it was done before we started replacing
-        # the zip archive directory information cache content if possible, and
-        # there are no relevant unit tests that we can depend on to tell us if
-        # this is really needed.
-        _remove_and_clear_zip_directory_cache_data(normalized_path)
-
-
-def _collect_zipimporter_cache_entries(normalized_path, cache):
-    """
-    Return zipimporter cache entry keys related to a given normalized path.
-
-    Alternative path spellings (e.g. those using different character case or
-    those using alternative path separators) related to the same path are
-    included. Any sub-path entries are included as well, i.e. those
-    corresponding to zip archives embedded in other zip archives.
-
-    """
-    result = []
-    prefix_len = len(normalized_path)
-    for p in cache:
-        np = normalize_path(p)
-        if np.startswith(normalized_path) and np[prefix_len : prefix_len + 1] in (
-            os.sep,
-            '',
-        ):
-            result.append(p)
-    return result
-
-
-def _update_zipimporter_cache(normalized_path, cache, updater=None):
-    """
-    Update zipimporter cache data for a given normalized path.
-
-    Any sub-path entries are processed as well, i.e. those corresponding to zip
-    archives embedded in other zip archives.
-
-    Given updater is a callable taking a cache entry key and the original entry
-    (after already removing the entry from the cache), and expected to update
-    the entry and possibly return a new one to be inserted in its place.
-    Returning None indicates that the entry should not be replaced with a new
-    one. If no updater is given, the cache entries are simply removed without
-    any additional processing, the same as if the updater simply returned None.
-
-    """
-    for p in _collect_zipimporter_cache_entries(normalized_path, cache):
-        # N.B. pypy's custom zipimport._zip_directory_cache implementation does
-        # not support the complete dict interface:
-        # * Does not support item assignment, thus not allowing this function
-        #    to be used only for removing existing cache entries.
-        #  * Does not support the dict.pop() method, forcing us to use the
-        #    get/del patterns instead. For more detailed information see the
-        #    following links:
-        #      https://github.com/pypa/setuptools/issues/202#issuecomment-202913420
-        #      https://foss.heptapod.net/pypy/pypy/-/blob/144c4e65cb6accb8e592f3a7584ea38265d1873c/pypy/module/zipimport/interp_zipimport.py
-        old_entry = cache[p]
-        del cache[p]
-        new_entry = updater and updater(p, old_entry)
-        if new_entry is not None:
-            cache[p] = new_entry
-
-
-def _uncache(normalized_path, cache):
-    _update_zipimporter_cache(normalized_path, cache)
-
-
-def _remove_and_clear_zip_directory_cache_data(normalized_path):
-    def clear_and_remove_cached_zip_archive_directory_data(path, old_entry):
-        old_entry.clear()
-
-    _update_zipimporter_cache(
-        normalized_path,
-        zipimport._zip_directory_cache,
-        updater=clear_and_remove_cached_zip_archive_directory_data,
-    )
-
-
-# PyPy Python implementation does not allow directly writing to the
-# zipimport._zip_directory_cache and so prevents us from attempting to correct
-# its content. The best we can do there is clear the problematic cache content
-# and have PyPy repopulate it as needed. The downside is that if there are any
-# stale zipimport.zipimporter instances laying around, attempting to use them
-# will fail due to not having its zip archive directory information available
-# instead of being automatically corrected to use the new correct zip archive
-# directory information.
-if '__pypy__' in sys.builtin_module_names:
-    _replace_zip_directory_cache_data = _remove_and_clear_zip_directory_cache_data
-else:
-
-    def _replace_zip_directory_cache_data(normalized_path):
-        def replace_cached_zip_archive_directory_data(path, old_entry):
-            # N.B. In theory, we could load the zip directory information just
-            # once for all updated path spellings, and then copy it locally and
-            # update its contained path strings to contain the correct
-            # spelling, but that seems like a way too invasive move (this cache
-            # structure is not officially documented anywhere and could in
-            # theory change with new Python releases) for no significant
-            # benefit.
-            old_entry.clear()
-            zipimport.zipimporter(path)
-            old_entry.update(zipimport._zip_directory_cache[path])
-            return old_entry
-
-        _update_zipimporter_cache(
-            normalized_path,
-            zipimport._zip_directory_cache,
-            updater=replace_cached_zip_archive_directory_data,
-        )
-
-
-def is_python(text, filename=''):
-    "Is this string a valid Python script?"
-    try:
-        compile(text, filename, 'exec')
-    except (SyntaxError, TypeError):
-        return False
-    else:
-        return True
-
-
-def is_sh(executable):
-    """Determine if the specified executable is a .sh (contains a #! line)"""
-    try:
-        with open(executable, encoding='latin-1') as fp:
-            magic = fp.read(2)
-    except OSError:
-        return executable
-    return magic == '#!'
-
-
-def nt_quote_arg(arg):
-    """Quote a command line argument according to Windows parsing rules"""
-    return subprocess.list2cmdline([arg])
-
-
-def is_python_script(script_text, filename):
-    """Is this text, as a whole, a Python script? (as opposed to shell/bat/etc."""
-    if filename.endswith('.py') or filename.endswith('.pyw'):
-        return True  # extension says it's Python
-    if is_python(script_text, filename):
-        return True  # it's syntactically valid Python
-    if script_text.startswith('#!'):
-        # It begins with a '#!' line, so check if 'python' is in it somewhere
-        return 'python' in script_text.splitlines()[0].lower()
-
-    return False  # Not any Python I can recognize
-
-
-# For pbr compat; will be removed in a future version.
-sys_executable = CommandSpec._sys_executable()
-
-
-def only_strs(values):
-    """
-    Exclude non-str values. Ref #3063.
-    """
-    return filter(lambda val: isinstance(val, str), values)
-
-
-def _read_pth(fullname: str) -> str:
-    # Python<3.13 require encoding="locale" instead of "utf-8", see python/cpython#77102
-    # In the case old versions of setuptools are producing `pth` files with
-    # different encodings that might be problematic... So we fallback to "locale".
-
-    try:
-        with open(fullname, encoding=py312.PTH_ENCODING) as f:
-            return f.read()
-    except UnicodeDecodeError:  # pragma: no cover
-        # This error may only happen for Python >= 3.13
-        # TODO: Possible deprecation warnings to be added in the future:
-        #       ``.pth file {fullname!r} is not UTF-8.``
-        #       Your environment contain {fullname!r} that cannot be read as UTF-8.
-        #       This is likely to have been produced with an old version of setuptools.
-        #       Please be mindful that this is deprecated and in the future, non-utf8
-        #       .pth files may cause setuptools to fail.
-        with open(fullname, encoding=py39.LOCALE_ENCODING) as f:
-            return f.read()
-
-
-class EasyInstallDeprecationWarning(SetuptoolsDeprecationWarning):
-    _SUMMARY = "easy_install command is deprecated."
-    _DETAILS = """
-    Please avoid running ``setup.py`` and ``easy_install``.
-    Instead, use pypa/build, pypa/installer or other
-    standards-based tools.
-    """
-    _SEE_URL = "https://github.com/pypa/setuptools/issues/917"
-    # _DUE_DATE not defined yet
+    """Stubbed command for temporary pbr compatibility."""
diff --git a/setuptools/package_index.py b/setuptools/package_index.py
deleted file mode 100644
index 3500c2d86f..0000000000
--- a/setuptools/package_index.py
+++ /dev/null
@@ -1,1179 +0,0 @@
-"""PyPI and direct package downloading."""
-
-from __future__ import annotations
-
-import base64
-import configparser
-import hashlib
-import html
-import http.client
-import io
-import itertools
-import os
-import re
-import shutil
-import socket
-import subprocess
-import sys
-import urllib.error
-import urllib.parse
-import urllib.request
-from fnmatch import translate
-from functools import wraps
-from typing import NamedTuple
-
-from more_itertools import unique_everseen
-
-import setuptools
-from pkg_resources import (
-    BINARY_DIST,
-    CHECKOUT_DIST,
-    DEVELOP_DIST,
-    EGG_DIST,
-    SOURCE_DIST,
-    Distribution,
-    Environment,
-    Requirement,
-    find_distributions,
-    normalize_path,
-    parse_version,
-    safe_name,
-    safe_version,
-    to_filename,
-)
-from setuptools.wheel import Wheel
-
-from .unicode_utils import _cfg_read_utf8_with_fallback, _read_utf8_with_fallback
-
-from distutils import log
-from distutils.errors import DistutilsError
-
-EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.+!]+)$')
-HREF = re.compile(r"""href\s*=\s*['"]?([^'"> ]+)""", re.I)
-PYPI_MD5 = re.compile(
-    r'([^<]+)\n\s+\(md5\)'
-)
-URL_SCHEME = re.compile('([-+.a-z0-9]{2,}):', re.I).match
-EXTENSIONS = ".tar.gz .tar.bz2 .tar .zip .tgz".split()
-
-__all__ = [
-    'PackageIndex',
-    'distros_for_url',
-    'parse_bdist_wininst',
-    'interpret_distro_name',
-]
-
-_SOCKET_TIMEOUT = 15
-
-user_agent = f"setuptools/{setuptools.__version__} Python-urllib/{sys.version_info.major}.{sys.version_info.minor}"
-
-
-def parse_requirement_arg(spec):
-    try:
-        return Requirement.parse(spec)
-    except ValueError as e:
-        raise DistutilsError(
-            f"Not a URL, existing file, or requirement spec: {spec!r}"
-        ) from e
-
-
-def parse_bdist_wininst(name):
-    """Return (base,pyversion) or (None,None) for possible .exe name"""
-
-    lower = name.lower()
-    base, py_ver, plat = None, None, None
-
-    if lower.endswith('.exe'):
-        if lower.endswith('.win32.exe'):
-            base = name[:-10]
-            plat = 'win32'
-        elif lower.startswith('.win32-py', -16):
-            py_ver = name[-7:-4]
-            base = name[:-16]
-            plat = 'win32'
-        elif lower.endswith('.win-amd64.exe'):
-            base = name[:-14]
-            plat = 'win-amd64'
-        elif lower.startswith('.win-amd64-py', -20):
-            py_ver = name[-7:-4]
-            base = name[:-20]
-            plat = 'win-amd64'
-    return base, py_ver, plat
-
-
-def egg_info_for_url(url):
-    parts = urllib.parse.urlparse(url)
-    _scheme, server, path, _parameters, _query, fragment = parts
-    base = urllib.parse.unquote(path.split('/')[-1])
-    if server == 'sourceforge.net' and base == 'download':  # XXX Yuck
-        base = urllib.parse.unquote(path.split('/')[-2])
-    if '#' in base:
-        base, fragment = base.split('#', 1)
-    return base, fragment
-
-
-def distros_for_url(url, metadata=None):
-    """Yield egg or source distribution objects that might be found at a URL"""
-    base, fragment = egg_info_for_url(url)
-    yield from distros_for_location(url, base, metadata)
-    if fragment:
-        match = EGG_FRAGMENT.match(fragment)
-        if match:
-            yield from interpret_distro_name(
-                url, match.group(1), metadata, precedence=CHECKOUT_DIST
-            )
-
-
-def distros_for_location(location, basename, metadata=None):
-    """Yield egg or source distribution objects based on basename"""
-    if basename.endswith('.egg.zip'):
-        basename = basename[:-4]  # strip the .zip
-    if basename.endswith('.egg') and '-' in basename:
-        # only one, unambiguous interpretation
-        return [Distribution.from_location(location, basename, metadata)]
-    if basename.endswith('.whl') and '-' in basename:
-        wheel = Wheel(basename)
-        if not wheel.is_compatible():
-            return []
-        return [
-            Distribution(
-                location=location,
-                project_name=wheel.project_name,
-                version=wheel.version,
-                # Increase priority over eggs.
-                precedence=EGG_DIST + 1,
-            )
-        ]
-    if basename.endswith('.exe'):
-        win_base, py_ver, platform = parse_bdist_wininst(basename)
-        if win_base is not None:
-            return interpret_distro_name(
-                location, win_base, metadata, py_ver, BINARY_DIST, platform
-            )
-    # Try source distro extensions (.zip, .tgz, etc.)
-    #
-    for ext in EXTENSIONS:
-        if basename.endswith(ext):
-            basename = basename[: -len(ext)]
-            return interpret_distro_name(location, basename, metadata)
-    return []  # no extension matched
-
-
-def distros_for_filename(filename, metadata=None):
-    """Yield possible egg or source distribution objects based on a filename"""
-    return distros_for_location(
-        normalize_path(filename), os.path.basename(filename), metadata
-    )
-
-
-def interpret_distro_name(
-    location, basename, metadata, py_version=None, precedence=SOURCE_DIST, platform=None
-):
-    """Generate the interpretation of a source distro name
-
-    Note: if `location` is a filesystem filename, you should call
-    ``pkg_resources.normalize_path()`` on it before passing it to this
-    routine!
-    """
-
-    parts = basename.split('-')
-    if not py_version and any(re.match(r'py\d\.\d$', p) for p in parts[2:]):
-        # it is a bdist_dumb, not an sdist -- bail out
-        return
-
-    # find the pivot (p) that splits the name from the version.
-    # infer the version as the first item that has a digit.
-    for p in range(len(parts)):
-        if parts[p][:1].isdigit():
-            break
-    else:
-        p = len(parts)
-
-    yield Distribution(
-        location,
-        metadata,
-        '-'.join(parts[:p]),
-        '-'.join(parts[p:]),
-        py_version=py_version,
-        precedence=precedence,
-        platform=platform,
-    )
-
-
-def unique_values(func):
-    """
-    Wrap a function returning an iterable such that the resulting iterable
-    only ever yields unique items.
-    """
-
-    @wraps(func)
-    def wrapper(*args, **kwargs):
-        return unique_everseen(func(*args, **kwargs))
-
-    return wrapper
-
-
-REL = re.compile(r"""<([^>]*\srel\s{0,10}=\s{0,10}['"]?([^'" >]+)[^>]*)>""", re.I)
-"""
-Regex for an HTML tag with 'rel="val"' attributes.
-"""
-
-
-@unique_values
-def find_external_links(url, page):
-    """Find rel="homepage" and rel="download" links in `page`, yielding URLs"""
-
-    for match in REL.finditer(page):
-        tag, rel = match.groups()
-        rels = set(map(str.strip, rel.lower().split(',')))
-        if 'homepage' in rels or 'download' in rels:
-            for match in HREF.finditer(tag):
-                yield urllib.parse.urljoin(url, htmldecode(match.group(1)))
-
-    for tag in ("Home Page", "Download URL"):
-        pos = page.find(tag)
-        if pos != -1:
-            match = HREF.search(page, pos)
-            if match:
-                yield urllib.parse.urljoin(url, htmldecode(match.group(1)))
-
-
-class ContentChecker:
-    """
-    A null content checker that defines the interface for checking content
-    """
-
-    def feed(self, block):
-        """
-        Feed a block of data to the hash.
-        """
-        return
-
-    def is_valid(self):
-        """
-        Check the hash. Return False if validation fails.
-        """
-        return True
-
-    def report(self, reporter, template):
-        """
-        Call reporter with information about the checker (hash name)
-        substituted into the template.
-        """
-        return
-
-
-class HashChecker(ContentChecker):
-    pattern = re.compile(
-        r'(?Psha1|sha224|sha384|sha256|sha512|md5)='
-        r'(?P[a-f0-9]+)'
-    )
-
-    def __init__(self, hash_name, expected) -> None:
-        self.hash_name = hash_name
-        self.hash = hashlib.new(hash_name)
-        self.expected = expected
-
-    @classmethod
-    def from_url(cls, url):
-        "Construct a (possibly null) ContentChecker from a URL"
-        fragment = urllib.parse.urlparse(url)[-1]
-        if not fragment:
-            return ContentChecker()
-        match = cls.pattern.search(fragment)
-        if not match:
-            return ContentChecker()
-        return cls(**match.groupdict())
-
-    def feed(self, block):
-        self.hash.update(block)
-
-    def is_valid(self):
-        return self.hash.hexdigest() == self.expected
-
-    def report(self, reporter, template):
-        msg = template % self.hash_name
-        return reporter(msg)
-
-
-class PackageIndex(Environment):
-    """A distribution index that scans web pages for download URLs"""
-
-    def __init__(
-        self,
-        index_url: str = "https://pypi.org/simple/",
-        hosts=('*',),
-        ca_bundle=None,
-        verify_ssl: bool = True,
-        *args,
-        **kw,
-    ) -> None:
-        super().__init__(*args, **kw)
-        self.index_url = index_url + "/"[: not index_url.endswith('/')]
-        self.scanned_urls: dict = {}
-        self.fetched_urls: dict = {}
-        self.package_pages: dict = {}
-        self.allows = re.compile('|'.join(map(translate, hosts))).match
-        self.to_scan: list = []
-        self.opener = urllib.request.urlopen
-
-    def add(self, dist):
-        # ignore invalid versions
-        try:
-            parse_version(dist.version)
-        except Exception:
-            return None
-        return super().add(dist)
-
-    # FIXME: 'PackageIndex.process_url' is too complex (14)
-    def process_url(self, url, retrieve: bool = False) -> None:  # noqa: C901
-        """Evaluate a URL as a possible download, and maybe retrieve it"""
-        if url in self.scanned_urls and not retrieve:
-            return
-        self.scanned_urls[url] = True
-        if not URL_SCHEME(url):
-            self.process_filename(url)
-            return
-        else:
-            dists = list(distros_for_url(url))
-            if dists:
-                if not self.url_ok(url):
-                    return
-                self.debug("Found link: %s", url)
-
-        if dists or not retrieve or url in self.fetched_urls:
-            list(map(self.add, dists))
-            return  # don't need the actual page
-
-        if not self.url_ok(url):
-            self.fetched_urls[url] = True
-            return
-
-        self.info("Reading %s", url)
-        self.fetched_urls[url] = True  # prevent multiple fetch attempts
-        tmpl = "Download error on %s: %%s -- Some packages may not be found!"
-        f = self.open_url(url, tmpl % url)
-        if f is None:
-            return
-        if isinstance(f, urllib.error.HTTPError) and f.code == 401:
-            self.info(f"Authentication error: {f.msg}")
-        self.fetched_urls[f.url] = True
-        if 'html' not in f.headers.get('content-type', '').lower():
-            f.close()  # not html, we can't process it
-            return
-
-        base = f.url  # handle redirects
-        page = f.read()
-        if not isinstance(page, str):
-            # In Python 3 and got bytes but want str.
-            if isinstance(f, urllib.error.HTTPError):
-                # Errors have no charset, assume latin1:
-                charset = 'latin-1'
-            else:
-                charset = f.headers.get_param('charset') or 'latin-1'
-            page = page.decode(charset, "ignore")
-        f.close()
-        for match in HREF.finditer(page):
-            link = urllib.parse.urljoin(base, htmldecode(match.group(1)))
-            self.process_url(link)
-        if url.startswith(self.index_url) and getattr(f, 'code', None) != 404:
-            page = self.process_index(url, page)
-
-    def process_filename(self, fn, nested: bool = False) -> None:
-        # process filenames or directories
-        if not os.path.exists(fn):
-            self.warn("Not found: %s", fn)
-            return
-
-        if os.path.isdir(fn) and not nested:
-            path = os.path.realpath(fn)
-            for item in os.listdir(path):
-                self.process_filename(os.path.join(path, item), True)
-
-        dists = distros_for_filename(fn)
-        if dists:
-            self.debug("Found: %s", fn)
-            list(map(self.add, dists))
-
-    def url_ok(self, url, fatal: bool = False) -> bool:
-        s = URL_SCHEME(url)
-        is_file = s and s.group(1).lower() == 'file'
-        if is_file or self.allows(urllib.parse.urlparse(url)[1]):
-            return True
-        msg = (
-            "\nNote: Bypassing %s (disallowed host; see "
-            "https://setuptools.pypa.io/en/latest/deprecated/"
-            "easy_install.html#restricting-downloads-with-allow-hosts for details).\n"
-        )
-        if fatal:
-            raise DistutilsError(msg % url)
-        else:
-            self.warn(msg, url)
-            return False
-
-    def scan_egg_links(self, search_path) -> None:
-        dirs = filter(os.path.isdir, search_path)
-        egg_links = (
-            (path, entry)
-            for path in dirs
-            for entry in os.listdir(path)
-            if entry.endswith('.egg-link')
-        )
-        list(itertools.starmap(self.scan_egg_link, egg_links))
-
-    def scan_egg_link(self, path, entry) -> None:
-        content = _read_utf8_with_fallback(os.path.join(path, entry))
-        # filter non-empty lines
-        lines = list(filter(None, map(str.strip, content.splitlines())))
-
-        if len(lines) != 2:
-            # format is not recognized; punt
-            return
-
-        egg_path, _setup_path = lines
-
-        for dist in find_distributions(os.path.join(path, egg_path)):
-            dist.location = os.path.join(path, *lines)
-            dist.precedence = SOURCE_DIST
-            self.add(dist)
-
-    def _scan(self, link):
-        # Process a URL to see if it's for a package page
-        NO_MATCH_SENTINEL = None, None
-        if not link.startswith(self.index_url):
-            return NO_MATCH_SENTINEL
-
-        parts = list(map(urllib.parse.unquote, link[len(self.index_url) :].split('/')))
-        if len(parts) != 2 or '#' in parts[1]:
-            return NO_MATCH_SENTINEL
-
-        # it's a package page, sanitize and index it
-        pkg = safe_name(parts[0])
-        ver = safe_version(parts[1])
-        self.package_pages.setdefault(pkg.lower(), {})[link] = True
-        return to_filename(pkg), to_filename(ver)
-
-    def process_index(self, url, page):
-        """Process the contents of a PyPI page"""
-
-        # process an index page into the package-page index
-        for match in HREF.finditer(page):
-            try:
-                self._scan(urllib.parse.urljoin(url, htmldecode(match.group(1))))
-            except ValueError:
-                pass
-
-        pkg, ver = self._scan(url)  # ensure this page is in the page index
-        if not pkg:
-            return ""  # no sense double-scanning non-package pages
-
-        # process individual package page
-        for new_url in find_external_links(url, page):
-            # Process the found URL
-            base, frag = egg_info_for_url(new_url)
-            if base.endswith('.py') and not frag:
-                if ver:
-                    new_url += f'#egg={pkg}-{ver}'
-                else:
-                    self.need_version_info(url)
-            self.scan_url(new_url)
-
-        return PYPI_MD5.sub(
-            lambda m: '{}'.format(*m.group(1, 3, 2)), page
-        )
-
-    def need_version_info(self, url) -> None:
-        self.scan_all(
-            "Page at %s links to .py file(s) without version info; an index "
-            "scan is required.",
-            url,
-        )
-
-    def scan_all(self, msg=None, *args) -> None:
-        if self.index_url not in self.fetched_urls:
-            if msg:
-                self.warn(msg, *args)
-            self.info("Scanning index of all packages (this may take a while)")
-        self.scan_url(self.index_url)
-
-    def find_packages(self, requirement) -> None:
-        self.scan_url(self.index_url + requirement.unsafe_name + '/')
-
-        if not self.package_pages.get(requirement.key):
-            # Fall back to safe version of the name
-            self.scan_url(self.index_url + requirement.project_name + '/')
-
-        if not self.package_pages.get(requirement.key):
-            # We couldn't find the target package, so search the index page too
-            self.not_found_in_index(requirement)
-
-        for url in list(self.package_pages.get(requirement.key, ())):
-            # scan each page that might be related to the desired package
-            self.scan_url(url)
-
-    def obtain(self, requirement, installer=None):
-        self.prescan()
-        self.find_packages(requirement)
-        for dist in self[requirement.key]:
-            if dist in requirement:
-                return dist
-            self.debug("%s does not match %s", requirement, dist)
-        return super().obtain(requirement, installer)
-
-    def check_hash(self, checker, filename, tfp) -> None:
-        """
-        checker is a ContentChecker
-        """
-        checker.report(self.debug, f"Validating %s checksum for {filename}")
-        if not checker.is_valid():
-            tfp.close()
-            os.unlink(filename)
-            raise DistutilsError(
-                f"{checker.hash.name} validation failed for {os.path.basename(filename)}; "
-                "possible download problem?"
-            )
-
-    def add_find_links(self, urls) -> None:
-        """Add `urls` to the list that will be prescanned for searches"""
-        for url in urls:
-            if (
-                self.to_scan is None  # if we have already "gone online"
-                or not URL_SCHEME(url)  # or it's a local file/directory
-                or url.startswith('file:')
-                or list(distros_for_url(url))  # or a direct package link
-            ):
-                # then go ahead and process it now
-                self.scan_url(url)
-            else:
-                # otherwise, defer retrieval till later
-                self.to_scan.append(url)
-
-    def prescan(self):
-        """Scan urls scheduled for prescanning (e.g. --find-links)"""
-        if self.to_scan:
-            list(map(self.scan_url, self.to_scan))
-        self.to_scan = None  # from now on, go ahead and process immediately
-
-    def not_found_in_index(self, requirement) -> None:
-        if self[requirement.key]:  # we've seen at least one distro
-            meth, msg = self.info, "Couldn't retrieve index page for %r"
-        else:  # no distros seen for this name, might be misspelled
-            meth, msg = self.warn, "Couldn't find index page for %r (maybe misspelled?)"
-        meth(msg, requirement.unsafe_name)
-        self.scan_all()
-
-    def download(self, spec, tmpdir):
-        """Locate and/or download `spec` to `tmpdir`, returning a local path
-
-        `spec` may be a ``Requirement`` object, or a string containing a URL,
-        an existing local filename, or a project/version requirement spec
-        (i.e. the string form of a ``Requirement`` object).  If it is the URL
-        of a .py file with an unambiguous ``#egg=name-version`` tag (i.e., one
-        that escapes ``-`` as ``_`` throughout), a trivial ``setup.py`` is
-        automatically created alongside the downloaded file.
-
-        If `spec` is a ``Requirement`` object or a string containing a
-        project/version requirement spec, this method returns the location of
-        a matching distribution (possibly after downloading it to `tmpdir`).
-        If `spec` is a locally existing file or directory name, it is simply
-        returned unchanged.  If `spec` is a URL, it is downloaded to a subpath
-        of `tmpdir`, and the local filename is returned.  Various errors may be
-        raised if a problem occurs during downloading.
-        """
-        if not isinstance(spec, Requirement):
-            scheme = URL_SCHEME(spec)
-            if scheme:
-                # It's a url, download it to tmpdir
-                found = self._download_url(spec, tmpdir)
-                base, fragment = egg_info_for_url(spec)
-                if base.endswith('.py'):
-                    found = self.gen_setup(found, fragment, tmpdir)
-                return found
-            elif os.path.exists(spec):
-                # Existing file or directory, just return it
-                return spec
-            else:
-                spec = parse_requirement_arg(spec)
-        return getattr(self.fetch_distribution(spec, tmpdir), 'location', None)
-
-    def fetch_distribution(  # noqa: C901  # is too complex (14)  # FIXME
-        self,
-        requirement,
-        tmpdir,
-        force_scan: bool = False,
-        source: bool = False,
-        develop_ok: bool = False,
-        local_index=None,
-    ) -> Distribution | None:
-        """Obtain a distribution suitable for fulfilling `requirement`
-
-        `requirement` must be a ``pkg_resources.Requirement`` instance.
-        If necessary, or if the `force_scan` flag is set, the requirement is
-        searched for in the (online) package index as well as the locally
-        installed packages.  If a distribution matching `requirement` is found,
-        the returned distribution's ``location`` is the value you would have
-        gotten from calling the ``download()`` method with the matching
-        distribution's URL or filename.  If no matching distribution is found,
-        ``None`` is returned.
-
-        If the `source` flag is set, only source distributions and source
-        checkout links will be considered.  Unless the `develop_ok` flag is
-        set, development and system eggs (i.e., those using the ``.egg-info``
-        format) will be ignored.
-        """
-        # process a Requirement
-        self.info("Searching for %s", requirement)
-        skipped = set()
-        dist = None
-
-        def find(req, env: Environment | None = None):
-            if env is None:
-                env = self
-            # Find a matching distribution; may be called more than once
-
-            for dist in env[req.key]:
-                if dist.precedence == DEVELOP_DIST and not develop_ok:
-                    if dist not in skipped:
-                        self.warn(
-                            "Skipping development or system egg: %s",
-                            dist,
-                        )
-                        skipped.add(dist)
-                    continue
-
-                test = dist in req and (dist.precedence <= SOURCE_DIST or not source)
-                if test:
-                    loc = self.download(dist.location, tmpdir)
-                    dist.download_location = loc
-                    if os.path.exists(dist.download_location):
-                        return dist
-
-            return None
-
-        if force_scan:
-            self.prescan()
-            self.find_packages(requirement)
-            dist = find(requirement)
-
-        if not dist and local_index is not None:
-            dist = find(requirement, local_index)
-
-        if dist is None:
-            if self.to_scan is not None:
-                self.prescan()
-            dist = find(requirement)
-
-        if dist is None and not force_scan:
-            self.find_packages(requirement)
-            dist = find(requirement)
-
-        if dist is None:
-            self.warn(
-                "No local packages or working download links found for %s%s",
-                (source and "a source distribution of " or ""),
-                requirement,
-            )
-            return None
-        else:
-            self.info("Best match: %s", dist)
-            return dist.clone(location=dist.download_location)
-
-    def fetch(
-        self, requirement, tmpdir, force_scan: bool = False, source: bool = False
-    ) -> str | None:
-        """Obtain a file suitable for fulfilling `requirement`
-
-        DEPRECATED; use the ``fetch_distribution()`` method now instead.  For
-        backward compatibility, this routine is identical but returns the
-        ``location`` of the downloaded distribution instead of a distribution
-        object.
-        """
-        dist = self.fetch_distribution(requirement, tmpdir, force_scan, source)
-        if dist is not None:
-            return dist.location
-        return None
-
-    def gen_setup(self, filename, fragment, tmpdir):
-        match = EGG_FRAGMENT.match(fragment)
-        dists = (
-            match
-            and [
-                d
-                for d in interpret_distro_name(filename, match.group(1), None)
-                if d.version
-            ]
-            or []
-        )
-
-        if len(dists) == 1:  # unambiguous ``#egg`` fragment
-            basename = os.path.basename(filename)
-
-            # Make sure the file has been downloaded to the temp dir.
-            if os.path.dirname(filename) != tmpdir:
-                dst = os.path.join(tmpdir, basename)
-                if not (os.path.exists(dst) and os.path.samefile(filename, dst)):
-                    shutil.copy2(filename, dst)
-                    filename = dst
-
-            with open(os.path.join(tmpdir, 'setup.py'), 'w', encoding="utf-8") as file:
-                file.write(
-                    "from setuptools import setup\n"
-                    f"setup(name={dists[0].project_name!r}, version={dists[0].version!r}, py_modules=[{os.path.splitext(basename)[0]!r}])\n"
-                )
-            return filename
-
-        elif match:
-            raise DistutilsError(
-                f"Can't unambiguously interpret project/version identifier {fragment!r}; "
-                "any dashes in the name or version should be escaped using "
-                f"underscores. {dists!r}"
-            )
-        else:
-            raise DistutilsError(
-                "Can't process plain .py files without an '#egg=name-version'"
-                " suffix to enable automatic setup script generation."
-            )
-
-    dl_blocksize = 8192
-
-    def _download_to(self, url, filename):
-        self.info("Downloading %s", url)
-        # Download the file
-        fp = None
-        try:
-            checker = HashChecker.from_url(url)
-            fp = self.open_url(url)
-            if isinstance(fp, urllib.error.HTTPError):
-                raise DistutilsError(f"Can't download {url}: {fp.code} {fp.msg}")
-            headers = fp.info()
-            blocknum = 0
-            bs = self.dl_blocksize
-            size = -1
-            if "content-length" in headers:
-                # Some servers return multiple Content-Length headers :(
-                sizes = headers.get_all('Content-Length')
-                size = max(map(int, sizes))
-                self.reporthook(url, filename, blocknum, bs, size)
-            with open(filename, 'wb') as tfp:
-                while True:
-                    block = fp.read(bs)
-                    if block:
-                        checker.feed(block)
-                        tfp.write(block)
-                        blocknum += 1
-                        self.reporthook(url, filename, blocknum, bs, size)
-                    else:
-                        break
-                self.check_hash(checker, filename, tfp)
-            return headers
-        finally:
-            if fp:
-                fp.close()
-
-    def reporthook(self, url, filename, blocknum, blksize, size) -> None:
-        pass  # no-op
-
-    # FIXME:
-    def open_url(self, url, warning=None):  # noqa: C901  # is too complex (12)
-        if url.startswith('file:'):
-            return local_open(url)
-        try:
-            return open_with_auth(url, self.opener)
-        except (ValueError, http.client.InvalidURL) as v:
-            msg = ' '.join([str(arg) for arg in v.args])
-            if warning:
-                self.warn(warning, msg)
-            else:
-                raise DistutilsError(f'{url} {msg}') from v
-        except urllib.error.HTTPError as v:
-            return v
-        except urllib.error.URLError as v:
-            if warning:
-                self.warn(warning, v.reason)
-            else:
-                raise DistutilsError(f"Download error for {url}: {v.reason}") from v
-        except http.client.BadStatusLine as v:
-            if warning:
-                self.warn(warning, v.line)
-            else:
-                raise DistutilsError(
-                    f'{url} returned a bad status line. The server might be '
-                    f'down, {v.line}'
-                ) from v
-        except (http.client.HTTPException, OSError) as v:
-            if warning:
-                self.warn(warning, v)
-            else:
-                raise DistutilsError(f"Download error for {url}: {v}") from v
-
-    @staticmethod
-    def _sanitize(name):
-        r"""
-        Replace unsafe path directives with underscores.
-
-        >>> san = PackageIndex._sanitize
-        >>> san('/home/user/.ssh/authorized_keys')
-        '_home_user_.ssh_authorized_keys'
-        >>> san('..\\foo\\bing')
-        '__foo_bing'
-        >>> san('D:bar')
-        'D_bar'
-        >>> san('C:\\bar')
-        'C__bar'
-        >>> san('foo..bar')
-        'foo..bar'
-        >>> san('D:../foo')
-        'D___foo'
-        """
-        pattern = '|'.join((
-            # drive letters
-            r':',
-            # path separators
-            r'[/\\]',
-            # parent dirs
-            r'(?:(?<=([/\\]|:))\.\.(?=[/\\]|$))|(?:^\.\.(?=[/\\]|$))',
-        ))
-        return re.sub(pattern, r'_', name)
-
-    @classmethod
-    def _resolve_download_filename(cls, url, tmpdir):
-        """
-        >>> import pathlib
-        >>> du = PackageIndex._resolve_download_filename
-        >>> root = getfixture('tmp_path')
-        >>> url = 'https://files.pythonhosted.org/packages/a9/5a/0db.../setuptools-78.1.0.tar.gz'
-        >>> str(pathlib.Path(du(url, root)).relative_to(root))
-        'setuptools-78.1.0.tar.gz'
-        """
-        name, _fragment = egg_info_for_url(url)
-        name = cls._sanitize(
-            name
-            or
-            # default if URL has no path contents
-            '__downloaded__'
-        )
-
-        # strip any extra .zip before download
-        name = re.sub(r'\.egg\.zip$', '.egg', name)
-
-        return os.path.join(tmpdir, name)
-
-    def _download_url(self, url, tmpdir):
-        """
-        Determine the download filename.
-        """
-        filename = self._resolve_download_filename(url, tmpdir)
-        return self._download_vcs(url, filename) or self._download_other(url, filename)
-
-    @staticmethod
-    def _resolve_vcs(url):
-        """
-        >>> rvcs = PackageIndex._resolve_vcs
-        >>> rvcs('git+http://foo/bar')
-        'git'
-        >>> rvcs('hg+https://foo/bar')
-        'hg'
-        >>> rvcs('git:myhost')
-        'git'
-        >>> rvcs('hg:myhost')
-        >>> rvcs('http://foo/bar')
-        """
-        scheme = urllib.parse.urlsplit(url).scheme
-        pre, sep, _post = scheme.partition('+')
-        # svn and git have their own protocol; hg does not
-        allowed = set(['svn', 'git'] + ['hg'] * bool(sep))
-        return next(iter({pre} & allowed), None)
-
-    def _download_vcs(self, url, spec_filename):
-        vcs = self._resolve_vcs(url)
-        if not vcs:
-            return None
-        if vcs == 'svn':
-            raise DistutilsError(
-                f"Invalid config, SVN download is not supported: {url}"
-            )
-
-        filename, _, _ = spec_filename.partition('#')
-        url, rev = self._vcs_split_rev_from_url(url)
-
-        self.info(f"Doing {vcs} clone from {url} to {filename}")
-        subprocess.check_call([vcs, 'clone', '--quiet', url, filename])
-
-        co_commands = dict(
-            git=[vcs, '-C', filename, 'checkout', '--quiet', rev],
-            hg=[vcs, '--cwd', filename, 'up', '-C', '-r', rev, '-q'],
-        )
-        if rev is not None:
-            self.info(f"Checking out {rev}")
-            subprocess.check_call(co_commands[vcs])
-
-        return filename
-
-    def _download_other(self, url, filename):
-        scheme = urllib.parse.urlsplit(url).scheme
-        if scheme == 'file':  # pragma: no cover
-            return urllib.request.url2pathname(urllib.parse.urlparse(url).path)
-        # raise error if not allowed
-        self.url_ok(url, True)
-        return self._attempt_download(url, filename)
-
-    def scan_url(self, url) -> None:
-        self.process_url(url, True)
-
-    def _attempt_download(self, url, filename):
-        headers = self._download_to(url, filename)
-        if 'html' in headers.get('content-type', '').lower():
-            return self._invalid_download_html(url, headers, filename)
-        else:
-            return filename
-
-    def _invalid_download_html(self, url, headers, filename):
-        os.unlink(filename)
-        raise DistutilsError(f"Unexpected HTML page found at {url}")
-
-    @staticmethod
-    def _vcs_split_rev_from_url(url):
-        """
-        Given a possible VCS URL, return a clean URL and resolved revision if any.
-
-        >>> vsrfu = PackageIndex._vcs_split_rev_from_url
-        >>> vsrfu('git+https://github.com/pypa/setuptools@v69.0.0#egg-info=setuptools')
-        ('https://github.com/pypa/setuptools', 'v69.0.0')
-        >>> vsrfu('git+https://github.com/pypa/setuptools#egg-info=setuptools')
-        ('https://github.com/pypa/setuptools', None)
-        >>> vsrfu('http://foo/bar')
-        ('http://foo/bar', None)
-        """
-        parts = urllib.parse.urlsplit(url)
-
-        clean_scheme = parts.scheme.split('+', 1)[-1]
-
-        # Some fragment identification fails
-        no_fragment_path, _, _ = parts.path.partition('#')
-
-        pre, sep, post = no_fragment_path.rpartition('@')
-        clean_path, rev = (pre, post) if sep else (post, None)
-
-        resolved = parts._replace(
-            scheme=clean_scheme,
-            path=clean_path,
-            # discard the fragment
-            fragment='',
-        ).geturl()
-
-        return resolved, rev
-
-    def debug(self, msg, *args) -> None:
-        log.debug(msg, *args)
-
-    def info(self, msg, *args) -> None:
-        log.info(msg, *args)
-
-    def warn(self, msg, *args) -> None:
-        log.warn(msg, *args)
-
-
-# This pattern matches a character entity reference (a decimal numeric
-# references, a hexadecimal numeric reference, or a named reference).
-entity_sub = re.compile(r'&(#(\d+|x[\da-fA-F]+)|[\w.:-]+);?').sub
-
-
-def decode_entity(match):
-    what = match.group(0)
-    return html.unescape(what)
-
-
-def htmldecode(text):
-    """
-    Decode HTML entities in the given text.
-
-    >>> htmldecode(
-    ...     'https://../package_name-0.1.2.tar.gz'
-    ...     '?tokena=A&tokenb=B">package_name-0.1.2.tar.gz')
-    'https://../package_name-0.1.2.tar.gz?tokena=A&tokenb=B">package_name-0.1.2.tar.gz'
-    """
-    return entity_sub(decode_entity, text)
-
-
-def socket_timeout(timeout=15):
-    def _socket_timeout(func):
-        def _socket_timeout(*args, **kwargs):
-            old_timeout = socket.getdefaulttimeout()
-            socket.setdefaulttimeout(timeout)
-            try:
-                return func(*args, **kwargs)
-            finally:
-                socket.setdefaulttimeout(old_timeout)
-
-        return _socket_timeout
-
-    return _socket_timeout
-
-
-def _encode_auth(auth):
-    """
-    Encode auth from a URL suitable for an HTTP header.
-    >>> str(_encode_auth('username%3Apassword'))
-    'dXNlcm5hbWU6cGFzc3dvcmQ='
-
-    Long auth strings should not cause a newline to be inserted.
-    >>> long_auth = 'username:' + 'password'*10
-    >>> chr(10) in str(_encode_auth(long_auth))
-    False
-    """
-    auth_s = urllib.parse.unquote(auth)
-    # convert to bytes
-    auth_bytes = auth_s.encode()
-    encoded_bytes = base64.b64encode(auth_bytes)
-    # convert back to a string
-    encoded = encoded_bytes.decode()
-    # strip the trailing carriage return
-    return encoded.replace('\n', '')
-
-
-class Credential(NamedTuple):
-    """
-    A username/password pair.
-
-    Displayed separated by `:`.
-    >>> str(Credential('username', 'password'))
-    'username:password'
-    """
-
-    username: str
-    password: str
-
-    def __str__(self) -> str:
-        return f'{self.username}:{self.password}'
-
-
-class PyPIConfig(configparser.RawConfigParser):
-    def __init__(self):
-        """
-        Load from ~/.pypirc
-        """
-        defaults = dict.fromkeys(['username', 'password', 'repository'], '')
-        super().__init__(defaults)
-
-        rc = os.path.join(os.path.expanduser('~'), '.pypirc')
-        if os.path.exists(rc):
-            _cfg_read_utf8_with_fallback(self, rc)
-
-    @property
-    def creds_by_repository(self):
-        sections_with_repositories = [
-            section
-            for section in self.sections()
-            if self.get(section, 'repository').strip()
-        ]
-
-        return dict(map(self._get_repo_cred, sections_with_repositories))
-
-    def _get_repo_cred(self, section):
-        repo = self.get(section, 'repository').strip()
-        return repo, Credential(
-            self.get(section, 'username').strip(),
-            self.get(section, 'password').strip(),
-        )
-
-    def find_credential(self, url):
-        """
-        If the URL indicated appears to be a repository defined in this
-        config, return the credential for that repository.
-        """
-        for repository, cred in self.creds_by_repository.items():
-            if url.startswith(repository):
-                return cred
-        return None
-
-
-def open_with_auth(url, opener=urllib.request.urlopen):
-    """Open a urllib2 request, handling HTTP authentication"""
-
-    parsed = urllib.parse.urlparse(url)
-    scheme, netloc, path, params, query, frag = parsed
-
-    # Double scheme does not raise on macOS as revealed by a
-    # failing test. We would expect "nonnumeric port". Refs #20.
-    if netloc.endswith(':'):
-        raise http.client.InvalidURL("nonnumeric port: ''")
-
-    if scheme in ('http', 'https'):
-        auth, address = _splituser(netloc)
-    else:
-        auth, address = (None, None)
-
-    if not auth:
-        cred = PyPIConfig().find_credential(url)
-        if cred:
-            auth = str(cred)
-            info = cred.username, url
-            log.info('Authenticating as %s for %s (from .pypirc)', *info)
-
-    if auth:
-        auth = "Basic " + _encode_auth(auth)
-        parts = scheme, address, path, params, query, frag
-        new_url = urllib.parse.urlunparse(parts)
-        request = urllib.request.Request(new_url)
-        request.add_header("Authorization", auth)
-    else:
-        request = urllib.request.Request(url)
-
-    request.add_header('User-Agent', user_agent)
-    fp = opener(request)
-
-    if auth:
-        # Put authentication info back into request URL if same host,
-        # so that links found on the page will work
-        s2, h2, path2, param2, query2, frag2 = urllib.parse.urlparse(fp.url)
-        if s2 == scheme and h2 == address:
-            parts = s2, netloc, path2, param2, query2, frag2
-            fp.url = urllib.parse.urlunparse(parts)
-
-    return fp
-
-
-# copy of urllib.parse._splituser from Python 3.8
-# See https://github.com/python/cpython/issues/80072.
-def _splituser(host):
-    """splituser('user[:passwd]@host[:port]')
-    --> 'user[:passwd]', 'host[:port]'."""
-    user, delim, host = host.rpartition('@')
-    return (user if delim else None), host
-
-
-# adding a timeout to avoid freezing package_index
-open_with_auth = socket_timeout(_SOCKET_TIMEOUT)(open_with_auth)
-
-
-def fix_sf_url(url):
-    return url  # backward compatibility
-
-
-def local_open(url):
-    """Read a local path, with special support for directories"""
-    _scheme, _server, path, _param, _query, _frag = urllib.parse.urlparse(url)
-    filename = urllib.request.url2pathname(path)
-    if os.path.isfile(filename):
-        return urllib.request.urlopen(url)
-    elif path.endswith('/') and os.path.isdir(filename):
-        files = []
-        for f in os.listdir(filename):
-            filepath = os.path.join(filename, f)
-            if f == 'index.html':
-                body = _read_utf8_with_fallback(filepath)
-                break
-            elif os.path.isdir(filepath):
-                f += '/'
-            files.append(f'{f}')
-        else:
-            tmpl = "{url}{files}"
-            body = tmpl.format(url=url, files='\n'.join(files))
-        status, message = 200, "OK"
-    else:
-        status, message, body = 404, "Path not found", "Not found"
-
-    headers = {'content-type': 'text/html'}
-    body_stream = io.StringIO(body)
-    return urllib.error.HTTPError(url, status, message, headers, body_stream)
diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py
deleted file mode 100644
index 9d6571c17e..0000000000
--- a/setuptools/tests/test_easy_install.py
+++ /dev/null
@@ -1,509 +0,0 @@
-"""Easy install Tests"""
-
-import itertools
-import logging
-import os
-import re
-import site
-import sys
-import tempfile
-import warnings
-import zipfile
-from typing import NamedTuple
-from unittest import mock
-
-import pytest
-
-import pkg_resources
-import setuptools.command.easy_install as ei
-from pkg_resources import Distribution as PRDistribution, normalize_path
-from setuptools.command.easy_install import PthDistributions
-from setuptools.dist import Distribution
-
-from .fixtures import make_sdist
-from .textwrap import DALS
-
-import distutils.errors
-
-
-@pytest.fixture(autouse=True)
-def pip_disable_index(monkeypatch):
-    """
-    Important: Disable the default index for pip to avoid
-    querying packages in the index and potentially resolving
-    and installing packages there.
-    """
-    monkeypatch.setenv('PIP_NO_INDEX', 'true')
-
-
-class FakeDist:
-    def get_entry_map(self, group):
-        if group != 'console_scripts':
-            return {}
-        return {'name': 'ep'}
-
-    def as_requirement(self):
-        return 'spec'
-
-
-SETUP_PY = DALS(
-    """
-    from setuptools import setup
-
-    setup()
-    """
-)
-
-
-class TestEasyInstallTest:
-    def test_get_script_args(self):
-        header = ei.CommandSpec.best().from_environment().as_header()
-        dist = FakeDist()
-        args = next(ei.ScriptWriter.get_args(dist))
-        _name, script = itertools.islice(args, 2)
-        assert script.startswith(header)
-        assert "'spec'" in script
-        assert "'console_scripts'" in script
-        assert "'name'" in script
-        assert re.search('^# EASY-INSTALL-ENTRY-SCRIPT', script, flags=re.MULTILINE)
-
-    def test_no_find_links(self):
-        # new option '--no-find-links', that blocks find-links added at
-        # the project level
-        dist = Distribution()
-        cmd = ei.easy_install(dist)
-        cmd.check_pth_processing = lambda: True
-        cmd.no_find_links = True
-        cmd.find_links = ['link1', 'link2']
-        cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok')
-        cmd.args = ['ok']
-        cmd.ensure_finalized()
-        assert cmd.package_index.scanned_urls == {}
-
-        # let's try without it (default behavior)
-        cmd = ei.easy_install(dist)
-        cmd.check_pth_processing = lambda: True
-        cmd.find_links = ['link1', 'link2']
-        cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok')
-        cmd.args = ['ok']
-        cmd.ensure_finalized()
-        keys = sorted(cmd.package_index.scanned_urls.keys())
-        assert keys == ['link1', 'link2']
-
-    def test_write_exception(self):
-        """
-        Test that `cant_write_to_target` is rendered as a DistutilsError.
-        """
-        dist = Distribution()
-        cmd = ei.easy_install(dist)
-        cmd.install_dir = os.getcwd()
-        with pytest.raises(distutils.errors.DistutilsError):
-            cmd.cant_write_to_target()
-
-    def test_all_site_dirs(self, monkeypatch):
-        """
-        get_site_dirs should always return site dirs reported by
-        site.getsitepackages.
-        """
-        path = normalize_path('/setuptools/test/site-packages')
-
-        def mock_gsp():
-            return [path]
-
-        monkeypatch.setattr(site, 'getsitepackages', mock_gsp, raising=False)
-        assert path in ei.get_site_dirs()
-
-    def test_all_site_dirs_works_without_getsitepackages(self, monkeypatch):
-        monkeypatch.delattr(site, 'getsitepackages', raising=False)
-        assert ei.get_site_dirs()
-
-    @pytest.fixture
-    def sdist_unicode(self, tmpdir):
-        files = [
-            (
-                'setup.py',
-                DALS(
-                    """
-                    import setuptools
-                    setuptools.setup(
-                        name="setuptools-test-unicode",
-                        version="1.0",
-                        packages=["mypkg"],
-                        include_package_data=True,
-                    )
-                    """
-                ),
-            ),
-            (
-                'mypkg/__init__.py',
-                "",
-            ),
-            (
-                'mypkg/☃.txt',
-                "",
-            ),
-        ]
-        sdist_name = 'setuptools-test-unicode-1.0.zip'
-        sdist = tmpdir / sdist_name
-        # can't use make_sdist, because the issue only occurs
-        #  with zip sdists.
-        sdist_zip = zipfile.ZipFile(str(sdist), 'w')
-        for filename, content in files:
-            sdist_zip.writestr(filename, content)
-        sdist_zip.close()
-        return str(sdist)
-
-    @pytest.fixture
-    def sdist_unicode_in_script(self, tmpdir):
-        files = [
-            (
-                "setup.py",
-                DALS(
-                    """
-                    import setuptools
-                    setuptools.setup(
-                        name="setuptools-test-unicode",
-                        version="1.0",
-                        packages=["mypkg"],
-                        include_package_data=True,
-                        scripts=['mypkg/unicode_in_script'],
-                    )
-                    """
-                ),
-            ),
-            ("mypkg/__init__.py", ""),
-            (
-                "mypkg/unicode_in_script",
-                DALS(
-                    """
-                    #!/bin/sh
-                    # á
-
-                    non_python_fn() {
-                    }
-                """
-                ),
-            ),
-        ]
-        sdist_name = "setuptools-test-unicode-script-1.0.zip"
-        sdist = tmpdir / sdist_name
-        # can't use make_sdist, because the issue only occurs
-        #  with zip sdists.
-        sdist_zip = zipfile.ZipFile(str(sdist), "w")
-        for filename, content in files:
-            sdist_zip.writestr(filename, content.encode('utf-8'))
-        sdist_zip.close()
-        return str(sdist)
-
-    @pytest.fixture
-    def sdist_script(self, tmpdir):
-        files = [
-            (
-                'setup.py',
-                DALS(
-                    """
-                    import setuptools
-                    setuptools.setup(
-                        name="setuptools-test-script",
-                        version="1.0",
-                        scripts=["mypkg_script"],
-                    )
-                    """
-                ),
-            ),
-            (
-                'mypkg_script',
-                DALS(
-                    """
-                     #/usr/bin/python
-                     print('mypkg_script')
-                     """
-                ),
-            ),
-        ]
-        sdist_name = 'setuptools-test-script-1.0.zip'
-        sdist = str(tmpdir / sdist_name)
-        make_sdist(sdist, files)
-        return sdist
-
-
-@pytest.mark.filterwarnings('ignore:Unbuilt egg')
-class TestPTHFileWriter:
-    def test_add_from_cwd_site_sets_dirty(self):
-        """a pth file manager should set dirty
-        if a distribution is in site but also the cwd
-        """
-        pth = PthDistributions('does-not_exist', [os.getcwd()])
-        assert not pth.dirty
-        pth.add(PRDistribution(os.getcwd()))
-        assert pth.dirty
-
-    def test_add_from_site_is_ignored(self):
-        location = '/test/location/does-not-have-to-exist'
-        # PthDistributions expects all locations to be normalized
-        location = pkg_resources.normalize_path(location)
-        pth = PthDistributions(
-            'does-not_exist',
-            [
-                location,
-            ],
-        )
-        assert not pth.dirty
-        pth.add(PRDistribution(location))
-        assert not pth.dirty
-
-    def test_many_pth_distributions_merge_together(self, tmpdir):
-        """
-        If the pth file is modified under the hood, then PthDistribution
-        will refresh its content before saving, merging contents when
-        necessary.
-        """
-        # putting the pth file in a dedicated sub-folder,
-        pth_subdir = tmpdir.join("pth_subdir")
-        pth_subdir.mkdir()
-        pth_path = str(pth_subdir.join("file1.pth"))
-        pth1 = PthDistributions(pth_path)
-        pth2 = PthDistributions(pth_path)
-        assert pth1.paths == pth2.paths == [], (
-            "unless there would be some default added at some point"
-        )
-        # and so putting the src_subdir in folder distinct than the pth one,
-        # so to keep it absolute by PthDistributions
-        new_src_path = tmpdir.join("src_subdir")
-        new_src_path.mkdir()  # must exist to be accounted
-        new_src_path_str = str(new_src_path)
-        pth1.paths.append(new_src_path_str)
-        pth1.save()
-        assert pth1.paths, (
-            "the new_src_path added must still be present/valid in pth1 after save"
-        )
-        # now,
-        assert new_src_path_str not in pth2.paths, (
-            "right before we save the entry should still not be present"
-        )
-        pth2.save()
-        assert new_src_path_str in pth2.paths, (
-            "the new_src_path entry should have been added by pth2 with its save() call"
-        )
-        assert pth2.paths[-1] == new_src_path, (
-            "and it should match exactly on the last entry actually "
-            "given we append to it in save()"
-        )
-        # finally,
-        assert PthDistributions(pth_path).paths == pth2.paths, (
-            "and we should have the exact same list at the end "
-            "with a fresh PthDistributions instance"
-        )
-
-
-@pytest.fixture
-def setup_context(tmpdir):
-    with (tmpdir / 'setup.py').open('w', encoding="utf-8") as f:
-        f.write(SETUP_PY)
-    with tmpdir.as_cwd():
-        yield tmpdir
-
-
-@pytest.mark.usefixtures("user_override")
-@pytest.mark.usefixtures("setup_context")
-class TestUserInstallTest:
-    # prevent check that site-packages is writable. easy_install
-    # shouldn't be writing to system site-packages during finalize
-    # options, but while it does, bypass the behavior.
-    prev_sp_write = mock.patch(
-        'setuptools.command.easy_install.easy_install.check_site_dir',
-        mock.Mock(),
-    )
-
-    # simulate setuptools installed in user site packages
-    @mock.patch('setuptools.command.easy_install.__file__', site.USER_SITE)
-    @mock.patch('site.ENABLE_USER_SITE', True)
-    @prev_sp_write
-    def test_user_install_not_implied_user_site_enabled(self):
-        self.assert_not_user_site()
-
-    @mock.patch('site.ENABLE_USER_SITE', False)
-    @prev_sp_write
-    def test_user_install_not_implied_user_site_disabled(self):
-        self.assert_not_user_site()
-
-    @staticmethod
-    def assert_not_user_site():
-        # create a finalized easy_install command
-        dist = Distribution()
-        dist.script_name = 'setup.py'
-        cmd = ei.easy_install(dist)
-        cmd.args = ['py']
-        cmd.ensure_finalized()
-        assert not cmd.user, 'user should not be implied'
-
-    def test_multiproc_atexit(self):
-        pytest.importorskip('multiprocessing')
-
-        log = logging.getLogger('test_easy_install')
-        logging.basicConfig(level=logging.INFO, stream=sys.stderr)
-        log.info('this should not break')
-
-    @pytest.fixture
-    def foo_package(self, tmpdir):
-        egg_file = tmpdir / 'foo-1.0.egg-info'
-        with egg_file.open('w') as f:
-            f.write('Name: foo\n')
-        return str(tmpdir)
-
-    @pytest.fixture
-    def install_target(self, tmpdir):
-        target = str(tmpdir)
-        with mock.patch('sys.path', sys.path + [target]):
-            python_path = os.path.pathsep.join(sys.path)
-            with mock.patch.dict(os.environ, PYTHONPATH=python_path):
-                yield target
-
-    def test_local_index(self, foo_package, install_target):
-        """
-        The local index must be used when easy_install locates installed
-        packages.
-        """
-        dist = Distribution()
-        dist.script_name = 'setup.py'
-        cmd = ei.easy_install(dist)
-        cmd.install_dir = install_target
-        cmd.args = ['foo']
-        cmd.ensure_finalized()
-        cmd.local_index.scan([foo_package])
-        res = cmd.easy_install('foo')
-        actual = os.path.normcase(os.path.realpath(res.location))
-        expected = os.path.normcase(os.path.realpath(foo_package))
-        assert actual == expected
-
-
-@pytest.mark.skipif(
-    sys.platform.startswith('java') and ei.is_sh(sys.executable),
-    reason="Test cannot run under java when executable is sh",
-)
-class TestScriptHeader:
-    non_ascii_exe = '/Users/José/bin/python'
-    exe_with_spaces = r'C:\Program Files\Python36\python.exe'
-
-    def test_get_script_header(self):
-        expected = f'#!{ei.nt_quote_arg(os.path.normpath(sys.executable))}\n'
-        actual = ei.ScriptWriter.get_header('#!/usr/local/bin/python')
-        assert actual == expected
-
-    def test_get_script_header_args(self):
-        expected = f'#!{ei.nt_quote_arg(os.path.normpath(sys.executable))} -x\n'
-        actual = ei.ScriptWriter.get_header('#!/usr/bin/python -x')
-        assert actual == expected
-
-    def test_get_script_header_non_ascii_exe(self):
-        actual = ei.ScriptWriter.get_header(
-            '#!/usr/bin/python', executable=self.non_ascii_exe
-        )
-        expected = f'#!{self.non_ascii_exe} -x\n'
-        assert actual == expected
-
-    def test_get_script_header_exe_with_spaces(self):
-        actual = ei.ScriptWriter.get_header(
-            '#!/usr/bin/python', executable='"' + self.exe_with_spaces + '"'
-        )
-        expected = f'#!"{self.exe_with_spaces}"\n'
-        assert actual == expected
-
-
-class TestCommandSpec:
-    def test_custom_launch_command(self):
-        """
-        Show how a custom CommandSpec could be used to specify a #! executable
-        which takes parameters.
-        """
-        cmd = ei.CommandSpec(['/usr/bin/env', 'python3'])
-        assert cmd.as_header() == '#!/usr/bin/env python3\n'
-
-    def test_from_param_for_CommandSpec_is_passthrough(self):
-        """
-        from_param should return an instance of a CommandSpec
-        """
-        cmd = ei.CommandSpec(['python'])
-        cmd_new = ei.CommandSpec.from_param(cmd)
-        assert cmd is cmd_new
-
-    @mock.patch('sys.executable', TestScriptHeader.exe_with_spaces)
-    @mock.patch.dict(os.environ)
-    def test_from_environment_with_spaces_in_executable(self):
-        os.environ.pop('__PYVENV_LAUNCHER__', None)
-        cmd = ei.CommandSpec.from_environment()
-        assert len(cmd) == 1
-        assert cmd.as_header().startswith('#!"')
-
-    def test_from_simple_string_uses_shlex(self):
-        """
-        In order to support `executable = /usr/bin/env my-python`, make sure
-        from_param invokes shlex on that input.
-        """
-        cmd = ei.CommandSpec.from_param('/usr/bin/env my-python')
-        assert len(cmd) == 2
-        assert '"' not in cmd.as_header()
-
-    def test_from_param_raises_expected_error(self) -> None:
-        """
-        from_param should raise its own TypeError when the argument's type is unsupported
-        """
-        with pytest.raises(TypeError) as exc_info:
-            ei.CommandSpec.from_param(object())  # type: ignore[arg-type] # We want a type error here
-        assert (
-            str(exc_info.value) == "Argument has an unsupported type "
-        ), exc_info.value
-
-
-class VersionStub(NamedTuple):
-    major: int
-    minor: int
-    micro: int
-    releaselevel: str
-    serial: int
-
-
-def test_use_correct_python_version_string(tmpdir, tmpdir_cwd, monkeypatch):
-    # In issue #3001, easy_install wrongly uses the `python3.1` directory
-    # when the interpreter is `python3.10` and the `--user` option is given.
-    # See pypa/setuptools#3001.
-    dist = Distribution()
-    cmd = dist.get_command_obj('easy_install')
-    cmd.args = ['ok']
-    cmd.optimize = 0
-    cmd.user = True
-    cmd.install_userbase = str(tmpdir)
-    cmd.install_usersite = None
-    install_cmd = dist.get_command_obj('install')
-    install_cmd.install_userbase = str(tmpdir)
-    install_cmd.install_usersite = None
-
-    with monkeypatch.context() as patch, warnings.catch_warnings():
-        warnings.simplefilter("ignore")
-        version = '3.10.1 (main, Dec 21 2021, 09:17:12) [GCC 10.2.1 20210110]'
-        info = VersionStub(3, 10, 1, "final", 0)
-        patch.setattr('site.ENABLE_USER_SITE', True)
-        patch.setattr('sys.version', version)
-        patch.setattr('sys.version_info', info)
-        patch.setattr(cmd, 'create_home_path', mock.Mock())
-        cmd.finalize_options()
-
-    name = "pypy" if hasattr(sys, 'pypy_version_info') else "python"
-    install_dir = cmd.install_dir.lower()
-
-    # In some platforms (e.g. Windows), install_dir is mostly determined
-    # via `sysconfig`, which define constants eagerly at module creation.
-    # This means that monkeypatching `sys.version` to emulate 3.10 for testing
-    # may have no effect.
-    # The safest test here is to rely on the fact that 3.1 is no longer
-    # supported/tested, and make sure that if 'python3.1' ever appears in the string
-    # it is followed by another digit (e.g. 'python3.10').
-    if re.search(name + r'3\.?1', install_dir):
-        assert re.search(name + r'3\.?1\d', install_dir)
-
-    # The following "variables" are used for interpolation in distutils
-    # installation schemes, so it should be fair to treat them as "semi-public",
-    # or at least public enough so we can have a test to make sure they are correct
-    assert cmd.config_vars['py_version'] == '3.10.1'
-    assert cmd.config_vars['py_version_short'] == '3.10'
-    assert cmd.config_vars['py_version_nodot'] == '310'
diff --git a/setuptools/tests/test_packageindex.py b/setuptools/tests/test_packageindex.py
deleted file mode 100644
index 2a6e5917a8..0000000000
--- a/setuptools/tests/test_packageindex.py
+++ /dev/null
@@ -1,267 +0,0 @@
-import http.client
-import re
-import urllib.error
-import urllib.request
-from inspect import cleandoc
-
-import pytest
-
-import setuptools.package_index
-
-import distutils.errors
-
-
-class TestPackageIndex:
-    def test_regex(self):
-        hash_url = 'http://other_url?:action=show_md5&'
-        hash_url += 'digest=0123456789abcdef0123456789abcdef'
-        doc = """
-            Name
-            (md5)
-        """.lstrip().format(**locals())
-        assert setuptools.package_index.PYPI_MD5.match(doc)
-
-    def test_bad_url_bad_port(self):
-        index = setuptools.package_index.PackageIndex()
-        url = 'http://127.0.0.1:0/nonesuch/test_package_index'
-        with pytest.raises(Exception, match=re.escape(url)):
-            v = index.open_url(url)
-            assert isinstance(v, urllib.error.HTTPError)
-
-    def test_bad_url_typo(self):
-        # issue 16
-        # easy_install inquant.contentmirror.plone breaks because of a typo
-        # in its home URL
-        index = setuptools.package_index.PackageIndex(hosts=('www.example.com',))
-
-        url = 'url:%20https://svn.plone.org/svn/collective/inquant.contentmirror.plone/trunk'
-
-        with pytest.raises(Exception, match=re.escape(url)):
-            v = index.open_url(url)
-            assert isinstance(v, urllib.error.HTTPError)
-
-    def test_bad_url_bad_status_line(self):
-        index = setuptools.package_index.PackageIndex(hosts=('www.example.com',))
-
-        def _urlopen(*args):
-            raise http.client.BadStatusLine('line')
-
-        index.opener = _urlopen
-        url = 'http://example.com'
-        with pytest.raises(Exception, match=r'line'):
-            index.open_url(url)
-
-    def test_bad_url_double_scheme(self):
-        """
-        A bad URL with a double scheme should raise a DistutilsError.
-        """
-        index = setuptools.package_index.PackageIndex(hosts=('www.example.com',))
-
-        # issue 20
-        url = 'http://http://svn.pythonpaste.org/Paste/wphp/trunk'
-        try:
-            index.open_url(url)
-        except distutils.errors.DistutilsError as error:
-            msg = str(error)
-            assert (
-                'nonnumeric port' in msg
-                or 'getaddrinfo failed' in msg
-                or 'Name or service not known' in msg
-            )
-            return
-        raise RuntimeError("Did not raise")
-
-    def test_url_ok(self):
-        index = setuptools.package_index.PackageIndex(hosts=('www.example.com',))
-        url = 'file:///tmp/test_package_index'
-        assert index.url_ok(url, True)
-
-    def test_parse_bdist_wininst(self):
-        parse = setuptools.package_index.parse_bdist_wininst
-
-        actual = parse('reportlab-2.5.win32-py2.4.exe')
-        expected = 'reportlab-2.5', '2.4', 'win32'
-        assert actual == expected
-
-        actual = parse('reportlab-2.5.win32.exe')
-        expected = 'reportlab-2.5', None, 'win32'
-        assert actual == expected
-
-        actual = parse('reportlab-2.5.win-amd64-py2.7.exe')
-        expected = 'reportlab-2.5', '2.7', 'win-amd64'
-        assert actual == expected
-
-        actual = parse('reportlab-2.5.win-amd64.exe')
-        expected = 'reportlab-2.5', None, 'win-amd64'
-        assert actual == expected
-
-    def test__vcs_split_rev_from_url(self):
-        """
-        Test the basic usage of _vcs_split_rev_from_url
-        """
-        vsrfu = setuptools.package_index.PackageIndex._vcs_split_rev_from_url
-        url, rev = vsrfu('https://example.com/bar@2995')
-        assert url == 'https://example.com/bar'
-        assert rev == '2995'
-
-    def test_local_index(self, tmpdir):
-        """
-        local_open should be able to read an index from the file system.
-        """
-        index_file = tmpdir / 'index.html'
-        with index_file.open('w') as f:
-            f.write('
content
') - url = 'file:' + urllib.request.pathname2url(str(tmpdir)) + '/' - res = setuptools.package_index.local_open(url) - assert 'content' in res.read() - - def test_egg_fragment(self): - """ - EGG fragments must comply to PEP 440 - """ - epoch = [ - '', - '1!', - ] - releases = [ - '0', - '0.0', - '0.0.0', - ] - pre = [ - 'a0', - 'b0', - 'rc0', - ] - post = ['.post0'] - dev = [ - '.dev0', - ] - local = [ - ('', ''), - ('+ubuntu.0', '+ubuntu.0'), - ('+ubuntu-0', '+ubuntu.0'), - ('+ubuntu_0', '+ubuntu.0'), - ] - versions = [ - [''.join([e, r, p, loc]) for loc in locs] - for e in epoch - for r in releases - for p in sum([pre, post, dev], ['']) - for locs in local - ] - for v, vc in versions: - dists = list( - setuptools.package_index.distros_for_url( - 'http://example.com/example-foo.zip#egg=example-foo-' + v - ) - ) - assert dists[0].version == '' - assert dists[1].version == vc - - def test_download_git_with_rev(self, tmp_path, fp): - url = 'git+https://github.example/group/project@master#egg=foo' - index = setuptools.package_index.PackageIndex() - - expected_dir = tmp_path / 'project@master' - fp.register([ - 'git', - 'clone', - '--quiet', - 'https://github.example/group/project', - expected_dir, - ]) - fp.register(['git', '-C', expected_dir, 'checkout', '--quiet', 'master']) - - result = index.download(url, tmp_path) - - assert result == str(expected_dir) - assert len(fp.calls) == 2 - - def test_download_git_no_rev(self, tmp_path, fp): - url = 'git+https://github.example/group/project#egg=foo' - index = setuptools.package_index.PackageIndex() - - expected_dir = tmp_path / 'project' - fp.register([ - 'git', - 'clone', - '--quiet', - 'https://github.example/group/project', - expected_dir, - ]) - index.download(url, tmp_path) - - def test_download_svn(self, tmp_path): - url = 'svn+https://svn.example/project#egg=foo' - index = setuptools.package_index.PackageIndex() - - msg = r".*SVN download is not supported.*" - with pytest.raises(distutils.errors.DistutilsError, match=msg): - index.download(url, tmp_path) - - -class TestContentCheckers: - def test_md5(self): - checker = setuptools.package_index.HashChecker.from_url( - 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478' - ) - checker.feed('You should probably not be using MD5'.encode('ascii')) - assert checker.hash.hexdigest() == 'f12895fdffbd45007040d2e44df98478' - assert checker.is_valid() - - def test_other_fragment(self): - "Content checks should succeed silently if no hash is present" - checker = setuptools.package_index.HashChecker.from_url( - 'http://foo/bar#something%20completely%20different' - ) - checker.feed('anything'.encode('ascii')) - assert checker.is_valid() - - def test_blank_md5(self): - "Content checks should succeed if a hash is empty" - checker = setuptools.package_index.HashChecker.from_url('http://foo/bar#md5=') - checker.feed('anything'.encode('ascii')) - assert checker.is_valid() - - def test_get_hash_name_md5(self): - checker = setuptools.package_index.HashChecker.from_url( - 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478' - ) - assert checker.hash_name == 'md5' - - def test_report(self): - checker = setuptools.package_index.HashChecker.from_url( - 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478' - ) - rep = checker.report(lambda x: x, 'My message about %s') - assert rep == 'My message about md5' - - -class TestPyPIConfig: - def test_percent_in_password(self, tmp_home_dir): - pypirc = tmp_home_dir / '.pypirc' - pypirc.write_text( - cleandoc( - """ - [pypi] - repository=https://pypi.org - username=jaraco - password=pity% - """ - ), - encoding="utf-8", - ) - cfg = setuptools.package_index.PyPIConfig() - cred = cfg.creds_by_repository['https://pypi.org'] - assert cred.username == 'jaraco' - assert cred.password == 'pity%' - - -@pytest.mark.timeout(1) -def test_REL_DoS(): - """ - REL should not hang on a contrived attack string. - """ - setuptools.package_index.REL.search('< rel=' + ' ' * 2**12) From 7245a99c2b42eff78c3eab406d27b7d259075d48 Mon Sep 17 00:00:00 2001 From: Bradley Reynolds Date: Mon, 28 Apr 2025 14:23:32 -0500 Subject: [PATCH 1643/1761] Tiny nit fix: `master`->`main` in PR template --- .github/pull_request_template.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index c4458c9825..9c88511a5e 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -13,6 +13,6 @@ Closes _(See [documentation][PR docs] for details)_ -[`newsfragments/`]: https://github.com/pypa/setuptools/tree/master/newsfragments +[`newsfragments/`]: https://github.com/pypa/setuptools/tree/main/newsfragments [PR docs]: https://setuptools.pypa.io/en/latest/development/developer-guide.html#making-a-pull-request From 92ff129754ece790b1129e756bbf2baacff65e1b Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Tue, 29 Apr 2025 17:44:12 -0400 Subject: [PATCH 1644/1761] Fixed index_url logic in develop compatibility shim. Closes #4966 --- newsfragments/4966.bugfix.rst | 1 + setuptools/command/develop.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 newsfragments/4966.bugfix.rst diff --git a/newsfragments/4966.bugfix.rst b/newsfragments/4966.bugfix.rst new file mode 100644 index 0000000000..c436d8bf04 --- /dev/null +++ b/newsfragments/4966.bugfix.rst @@ -0,0 +1 @@ +Fixed index_url logic in develop compatibility shim. diff --git a/setuptools/command/develop.py b/setuptools/command/develop.py index 42df9a10ff..0401b4d266 100644 --- a/setuptools/command/develop.py +++ b/setuptools/command/develop.py @@ -34,7 +34,7 @@ def run(self): + ['--no-deps'] * self.no_deps + ['--user'] * self.user + ['--prefix', self.prefix] * bool(self.prefix) - + ['--index-url', self.index_url] * bool(self.prefix) + + ['--index-url', self.index_url] * bool(self.self.index_url) ) subprocess.check_call(cmd) From 6c748caaaf128b7a64a5e4629787f06780b6d68d Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Tue, 29 Apr 2025 17:44:23 -0400 Subject: [PATCH 1645/1761] =?UTF-8?q?Bump=20version:=2080.0.0=20=E2=86=92?= =?UTF-8?q?=2080.0.1?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- NEWS.rst | 9 +++++++++ newsfragments/4966.bugfix.rst | 1 - pyproject.toml | 2 +- 4 files changed, 11 insertions(+), 3 deletions(-) delete mode 100644 newsfragments/4966.bugfix.rst diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 5177b8c4d0..ce67b06d12 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 80.0.0 +current_version = 80.0.1 commit = True tag = True diff --git a/NEWS.rst b/NEWS.rst index ebcca10197..1d36be2840 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -1,3 +1,12 @@ +v80.0.1 +======= + +Bugfixes +-------- + +- Fixed index_url logic in develop compatibility shim. (#4966) + + v80.0.0 ======= diff --git a/newsfragments/4966.bugfix.rst b/newsfragments/4966.bugfix.rst deleted file mode 100644 index c436d8bf04..0000000000 --- a/newsfragments/4966.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed index_url logic in develop compatibility shim. diff --git a/pyproject.toml b/pyproject.toml index 2ef6e19256..ae468c4993 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,7 +5,7 @@ backend-path = ["."] [project] name = "setuptools" -version = "80.0.0" +version = "80.0.1" authors = [ { name = "Python Packaging Authority", email = "distutils-sig@python.org" }, ] From 76b041dadc1d8fcbe78e7037ed2c6cba24325600 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Tue, 29 Apr 2025 21:20:52 -0400 Subject: [PATCH 1646/1761] Fixup --- setuptools/command/develop.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setuptools/command/develop.py b/setuptools/command/develop.py index 0401b4d266..1f704fcee8 100644 --- a/setuptools/command/develop.py +++ b/setuptools/command/develop.py @@ -34,7 +34,7 @@ def run(self): + ['--no-deps'] * self.no_deps + ['--user'] * self.user + ['--prefix', self.prefix] * bool(self.prefix) - + ['--index-url', self.index_url] * bool(self.self.index_url) + + ['--index-url', self.index_url] * bool(self.index_url) ) subprocess.check_call(cmd) From 0dc924ad325edcc4478532eb5ec58ad7518f0b5c Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Tue, 29 Apr 2025 21:44:58 -0400 Subject: [PATCH 1647/1761] Fall back to distutils install rather than failing. Closes #3143 --- newsfragments/3143.bugfix.rst | 1 + setuptools/command/install.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 newsfragments/3143.bugfix.rst diff --git a/newsfragments/3143.bugfix.rst b/newsfragments/3143.bugfix.rst new file mode 100644 index 0000000000..1df31bb91a --- /dev/null +++ b/newsfragments/3143.bugfix.rst @@ -0,0 +1 @@ +With ``setup.py install --prefix=...``, fall back to distutils install rather than failing. Note that running ``setup.py install`` is deprecated. diff --git a/setuptools/command/install.py b/setuptools/command/install.py index ba667c5d16..83b4a27970 100644 --- a/setuptools/command/install.py +++ b/setuptools/command/install.py @@ -102,7 +102,7 @@ def run(self): # Run in backward-compatibility mode to support bdist_* commands. super().run() else: - self.do_egg_install() + super().run() return None From 2b0d1739783955c5b018ac63850a70f06bee49b3 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Tue, 29 Apr 2025 21:47:01 -0400 Subject: [PATCH 1648/1761] Unify the behavior around the return type when calling super(install). --- setuptools/command/install.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/setuptools/command/install.py b/setuptools/command/install.py index 83b4a27970..a0a4070b57 100644 --- a/setuptools/command/install.py +++ b/setuptools/command/install.py @@ -100,9 +100,9 @@ def run(self): if not self._called_from_setup(inspect.currentframe()): # Run in backward-compatibility mode to support bdist_* commands. - super().run() + return super().run() else: - super().run() + return super().run() return None From a1ecac4f96d56938a7bb45f840923719efac2369 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Tue, 29 Apr 2025 21:47:44 -0400 Subject: [PATCH 1649/1761] Remove run override as it now unconditionally calls super(). --- setuptools/command/install.py | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/setuptools/command/install.py b/setuptools/command/install.py index a0a4070b57..0e1eed2ea5 100644 --- a/setuptools/command/install.py +++ b/setuptools/command/install.py @@ -93,19 +93,6 @@ def handle_extra_path(self): self.extra_dirs = '' return None - def run(self): - # Explicit request for old-style install? Just do it - if self.old_and_unmanageable or self.single_version_externally_managed: - return super().run() - - if not self._called_from_setup(inspect.currentframe()): - # Run in backward-compatibility mode to support bdist_* commands. - return super().run() - else: - return super().run() - - return None - @staticmethod def _called_from_setup(run_frame): """ From d8071d6625e41281726fb5aa866b8a40fa2d9da0 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Tue, 29 Apr 2025 21:48:13 -0400 Subject: [PATCH 1650/1761] Remove do_egg_install (unused). --- setuptools/command/install.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/setuptools/command/install.py b/setuptools/command/install.py index 0e1eed2ea5..f01ded8e61 100644 --- a/setuptools/command/install.py +++ b/setuptools/command/install.py @@ -126,9 +126,6 @@ def _called_from_setup(run_frame): return False - def do_egg_install(self) -> None: - raise NotImplementedError("Support for egg-based install has been removed.") - # XXX Python 3.1 doesn't see _nc if this is inside the class install.sub_commands = [ From 7fc5e05df9e239cf2938396dcb1ca93d00621e2a Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Tue, 29 Apr 2025 21:49:18 -0400 Subject: [PATCH 1651/1761] Add a due date on the deprecation. --- newsfragments/+99b1cfb7.feature.rst | 1 + setuptools/command/install.py | 4 +--- 2 files changed, 2 insertions(+), 3 deletions(-) create mode 100644 newsfragments/+99b1cfb7.feature.rst diff --git a/newsfragments/+99b1cfb7.feature.rst b/newsfragments/+99b1cfb7.feature.rst new file mode 100644 index 0000000000..0070e3a5c3 --- /dev/null +++ b/newsfragments/+99b1cfb7.feature.rst @@ -0,0 +1 @@ +Added a deadline of Oct 31 to the setup.py install deprecation. diff --git a/setuptools/command/install.py b/setuptools/command/install.py index f01ded8e61..fd73816543 100644 --- a/setuptools/command/install.py +++ b/setuptools/command/install.py @@ -63,9 +63,7 @@ def initialize_options(self): standards-based tools. """, see_url="https://blog.ganssle.io/articles/2021/10/setup-py-deprecated.html", - # TODO: Document how to bootstrap setuptools without install - # (e.g. by unzipping the wheel file) - # and then add a due_date to this warning. + due_date=(2025, 10, 31), ) super().initialize_options() From 6f7b6ddf095c35db92b5a0724ebbc179d897adb4 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Wed, 30 Apr 2025 13:20:39 -0400 Subject: [PATCH 1652/1761] =?UTF-8?q?Bump=20version:=2080.0.1=20=E2=86=92?= =?UTF-8?q?=2080.1.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- NEWS.rst | 15 +++++++++++++++ newsfragments/+99b1cfb7.feature.rst | 1 - newsfragments/3143.bugfix.rst | 1 - pyproject.toml | 2 +- 5 files changed, 17 insertions(+), 4 deletions(-) delete mode 100644 newsfragments/+99b1cfb7.feature.rst delete mode 100644 newsfragments/3143.bugfix.rst diff --git a/.bumpversion.cfg b/.bumpversion.cfg index ce67b06d12..4e43db0c77 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 80.0.1 +current_version = 80.1.0 commit = True tag = True diff --git a/NEWS.rst b/NEWS.rst index 1d36be2840..b29a6b4bdd 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -1,3 +1,18 @@ +v80.1.0 +======= + +Features +-------- + +- Added a deadline of Oct 31 to the setup.py install deprecation. + + +Bugfixes +-------- + +- With ``setup.py install --prefix=...``, fall back to distutils install rather than failing. Note that running ``setup.py install`` is deprecated. (#3143) + + v80.0.1 ======= diff --git a/newsfragments/+99b1cfb7.feature.rst b/newsfragments/+99b1cfb7.feature.rst deleted file mode 100644 index 0070e3a5c3..0000000000 --- a/newsfragments/+99b1cfb7.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Added a deadline of Oct 31 to the setup.py install deprecation. diff --git a/newsfragments/3143.bugfix.rst b/newsfragments/3143.bugfix.rst deleted file mode 100644 index 1df31bb91a..0000000000 --- a/newsfragments/3143.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -With ``setup.py install --prefix=...``, fall back to distutils install rather than failing. Note that running ``setup.py install`` is deprecated. diff --git a/pyproject.toml b/pyproject.toml index ae468c4993..3c16269ece 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,7 +5,7 @@ backend-path = ["."] [project] name = "setuptools" -version = "80.0.1" +version = "80.1.0" authors = [ { name = "Python Packaging Authority", email = "distutils-sig@python.org" }, ] From 3f94782c5ede0689cfc216693ddb9a79087d6c91 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Fri, 2 May 2025 20:01:23 -0400 Subject: [PATCH 1653/1761] Revert "Merge pull request pypa/distutils#332 from pypa/debt/unify-shebang" This reverts commit 5589d7527044a75ff681ceb4e1e97641578a0c87, reversing changes made to 250c300096abbf4147be62a428bd25a98abc487e. Closes pypa/setuptools#4934 --- distutils/command/build_scripts.py | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/distutils/command/build_scripts.py b/distutils/command/build_scripts.py index 127c51d8dc..3f7aae0a66 100644 --- a/distutils/command/build_scripts.py +++ b/distutils/command/build_scripts.py @@ -5,6 +5,7 @@ import os import re import tokenize +from distutils import sysconfig from distutils._log import log from stat import ST_MODE from typing import ClassVar @@ -75,7 +76,7 @@ def copy_scripts(self): return outfiles, updated_files - def _copy_script(self, script, outfiles, updated_files): + def _copy_script(self, script, outfiles, updated_files): # noqa: C901 shebang_match = None script = convert_path(script) outfile = os.path.join(self.build_dir, os.path.basename(script)) @@ -105,8 +106,18 @@ def _copy_script(self, script, outfiles, updated_files): if shebang_match: log.info("copying and adjusting %s -> %s", script, self.build_dir) if not self.dry_run: + if not sysconfig.python_build: + executable = self.executable + else: + executable = os.path.join( + sysconfig.get_config_var("BINDIR"), + "python{}{}".format( + sysconfig.get_config_var("VERSION"), + sysconfig.get_config_var("EXE"), + ), + ) post_interp = shebang_match.group(1) or '' - shebang = f"#!python{post_interp}\n" + shebang = "#!" + executable + post_interp + "\n" self._validate_shebang(shebang, f.encoding) with open(outfile, "w", encoding=f.encoding) as outf: outf.write(shebang) From 575445c672d78fcce22df1e459b7baf0304a38b9 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Fri, 2 May 2025 20:07:13 -0400 Subject: [PATCH 1654/1761] Remove support for special executable under a Python build. As far as I can tell, no one has complained about loss of this functionality. --- distutils/command/build_scripts.py | 15 ++------------- 1 file changed, 2 insertions(+), 13 deletions(-) diff --git a/distutils/command/build_scripts.py b/distutils/command/build_scripts.py index 3f7aae0a66..b86ee6e6ba 100644 --- a/distutils/command/build_scripts.py +++ b/distutils/command/build_scripts.py @@ -5,7 +5,6 @@ import os import re import tokenize -from distutils import sysconfig from distutils._log import log from stat import ST_MODE from typing import ClassVar @@ -76,7 +75,7 @@ def copy_scripts(self): return outfiles, updated_files - def _copy_script(self, script, outfiles, updated_files): # noqa: C901 + def _copy_script(self, script, outfiles, updated_files): shebang_match = None script = convert_path(script) outfile = os.path.join(self.build_dir, os.path.basename(script)) @@ -106,18 +105,8 @@ def _copy_script(self, script, outfiles, updated_files): # noqa: C901 if shebang_match: log.info("copying and adjusting %s -> %s", script, self.build_dir) if not self.dry_run: - if not sysconfig.python_build: - executable = self.executable - else: - executable = os.path.join( - sysconfig.get_config_var("BINDIR"), - "python{}{}".format( - sysconfig.get_config_var("VERSION"), - sysconfig.get_config_var("EXE"), - ), - ) post_interp = shebang_match.group(1) or '' - shebang = "#!" + executable + post_interp + "\n" + shebang = "#!" + self.executable + post_interp + "\n" self._validate_shebang(shebang, f.encoding) with open(outfile, "w", encoding=f.encoding) as outf: outf.write(shebang) From a8b19fc5ad2f8e43dc6d196374811e97855d7bf0 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Fri, 2 May 2025 20:20:02 -0400 Subject: [PATCH 1655/1761] In build_editable, ensure that 'executable' is hard-coded to #!python for portability. Replacement implementation for pypa/setuptools#4863 following pypa/setuptools#4934. --- setuptools/command/editable_wheel.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py index 1a544ec258..09ecdf2e6f 100644 --- a/setuptools/command/editable_wheel.py +++ b/setuptools/command/editable_wheel.py @@ -36,6 +36,7 @@ from .build import build as build_cls from .build_py import build_py as build_py_cls from .dist_info import dist_info as dist_info_cls +from .distutils.commands import build_scripts as build_scripts_cls from .egg_info import egg_info as egg_info_cls from .install import install as install_cls from .install_scripts import install_scripts as install_scripts_cls @@ -211,6 +212,11 @@ def _configure_build( install.install_headers = headers install.install_data = data + # For portability, ensure scripts are built with #!python shebang + # pypa/setuptools#4863 + build_scripts = cast(build_scripts_cls, dist.get_command_obj("build_scripts")) + build_scripts.executable = 'python' + install_scripts = cast( install_scripts_cls, dist.get_command_obj("install_scripts") ) From 6ae673ab0efe2b14b2e53922b6089c7747aad026 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Fri, 2 May 2025 20:30:37 -0400 Subject: [PATCH 1656/1761] Add news fragment. --- newsfragments/4934.feature.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 newsfragments/4934.feature.rst diff --git a/newsfragments/4934.feature.rst b/newsfragments/4934.feature.rst new file mode 100644 index 0000000000..96abb169a1 --- /dev/null +++ b/newsfragments/4934.feature.rst @@ -0,0 +1 @@ +Restored support for install_scripts --executable (and classic behavior for the executable for those invocations). Instead, build_editable provides the portable form of the executables for downstream installers to rewrite. From cddc53ba311351b0e2b7b77d3c94646c0ada9b99 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Fri, 2 May 2025 22:19:37 -0400 Subject: [PATCH 1657/1761] Fix import. --- setuptools/command/editable_wheel.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py index 09ecdf2e6f..6f24effedd 100644 --- a/setuptools/command/editable_wheel.py +++ b/setuptools/command/editable_wheel.py @@ -36,11 +36,12 @@ from .build import build as build_cls from .build_py import build_py as build_py_cls from .dist_info import dist_info as dist_info_cls -from .distutils.commands import build_scripts as build_scripts_cls from .egg_info import egg_info as egg_info_cls from .install import install as install_cls from .install_scripts import install_scripts as install_scripts_cls +from distutils.command.build_scripts import build_scripts as build_scripts_cls + if TYPE_CHECKING: from typing_extensions import Self From 21e38050e14a79598934f22347e8519b6ccd4ddd Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Fri, 2 May 2025 22:31:03 -0400 Subject: [PATCH 1658/1761] Cast is unnecessary, apparently --- setuptools/command/editable_wheel.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py index 6f24effedd..917d5f149b 100644 --- a/setuptools/command/editable_wheel.py +++ b/setuptools/command/editable_wheel.py @@ -40,8 +40,6 @@ from .install import install as install_cls from .install_scripts import install_scripts as install_scripts_cls -from distutils.command.build_scripts import build_scripts as build_scripts_cls - if TYPE_CHECKING: from typing_extensions import Self @@ -215,7 +213,7 @@ def _configure_build( # For portability, ensure scripts are built with #!python shebang # pypa/setuptools#4863 - build_scripts = cast(build_scripts_cls, dist.get_command_obj("build_scripts")) + build_scripts = dist.get_command_obj("build_scripts") build_scripts.executable = 'python' install_scripts = cast( From 22f087ce5678e166bfba58b058908d9bb478d930 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Fri, 2 May 2025 22:38:16 -0400 Subject: [PATCH 1659/1761] =?UTF-8?q?Bump=20version:=2080.1.0=20=E2=86=92?= =?UTF-8?q?=2080.2.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- NEWS.rst | 9 +++++++++ newsfragments/4934.feature.rst | 1 - pyproject.toml | 2 +- 4 files changed, 11 insertions(+), 3 deletions(-) delete mode 100644 newsfragments/4934.feature.rst diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 4e43db0c77..66eb0512b1 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 80.1.0 +current_version = 80.2.0 commit = True tag = True diff --git a/NEWS.rst b/NEWS.rst index b29a6b4bdd..03bcf8c1c6 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -1,3 +1,12 @@ +v80.2.0 +======= + +Features +-------- + +- Restored support for install_scripts --executable (and classic behavior for the executable for those invocations). Instead, build_editable provides the portable form of the executables for downstream installers to rewrite. (#4934) + + v80.1.0 ======= diff --git a/newsfragments/4934.feature.rst b/newsfragments/4934.feature.rst deleted file mode 100644 index 96abb169a1..0000000000 --- a/newsfragments/4934.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Restored support for install_scripts --executable (and classic behavior for the executable for those invocations). Instead, build_editable provides the portable form of the executables for downstream installers to rewrite. diff --git a/pyproject.toml b/pyproject.toml index 3c16269ece..0b8238e86a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,7 +5,7 @@ backend-path = ["."] [project] name = "setuptools" -version = "80.1.0" +version = "80.2.0" authors = [ { name = "Python Packaging Authority", email = "distutils-sig@python.org" }, ] From 9a81db3c77bc106017dcd4b0853a5a94f43ae33c Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 3 May 2025 03:57:47 -0400 Subject: [PATCH 1660/1761] Replace copy of license with an SPDX identifier. (jaraco/skeleton#171) --- LICENSE | 17 ----------------- pyproject.toml | 1 + 2 files changed, 1 insertion(+), 17 deletions(-) delete mode 100644 LICENSE diff --git a/LICENSE b/LICENSE deleted file mode 100644 index 1bb5a44356..0000000000 --- a/LICENSE +++ /dev/null @@ -1,17 +0,0 @@ -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to -deal in the Software without restriction, including without limitation the -rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -sell copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -IN THE SOFTWARE. diff --git a/pyproject.toml b/pyproject.toml index 71b1a7dabc..fa0c801fba 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,6 +16,7 @@ classifiers = [ "Programming Language :: Python :: 3 :: Only", ] requires-python = ">=3.9" +license = "MIT" dependencies = [ ] dynamic = ["version"] From 63588b6f19be4a7769423a1c14865f192931f829 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 3 May 2025 04:00:35 -0400 Subject: [PATCH 1661/1761] Add news fragment. --- newsfragments/4956.feature.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 newsfragments/4956.feature.rst diff --git a/newsfragments/4956.feature.rst b/newsfragments/4956.feature.rst new file mode 100644 index 0000000000..e42aec415d --- /dev/null +++ b/newsfragments/4956.feature.rst @@ -0,0 +1 @@ +Restored license declaration in package metadata. See jaraco/skeleton#171. From 2af90ac872ddbdfd229244377a95b8be1dde035f Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 3 May 2025 04:28:04 -0400 Subject: [PATCH 1662/1761] Update tests in setuptools/dist not to rely on Setuptools having a license file. --- setuptools/dist.py | 16 +++++++++------- setuptools/tests/fixtures.py | 6 ++++++ 2 files changed, 15 insertions(+), 7 deletions(-) diff --git a/setuptools/dist.py b/setuptools/dist.py index 8d972cc49b..57aeb2579d 100644 --- a/setuptools/dist.py +++ b/setuptools/dist.py @@ -471,12 +471,13 @@ def _expand_patterns( cls, patterns: list[str], enforce_match: bool = True ) -> Iterator[str]: """ - >>> list(Distribution._expand_patterns(['LICENSE'])) - ['LICENSE'] + >>> getfixture('sample_project_cwd') + >>> list(Distribution._expand_patterns(['LICENSE.txt'])) + ['LICENSE.txt'] >>> list(Distribution._expand_patterns(['pyproject.toml', 'LIC*'])) - ['pyproject.toml', 'LICENSE'] - >>> list(Distribution._expand_patterns(['setuptools/**/pyprojecttoml.py'])) - ['setuptools/config/pyprojecttoml.py'] + ['pyproject.toml', 'LICENSE.txt'] + >>> list(Distribution._expand_patterns(['src/**/*.dat'])) + ['src/sample/package_data.dat'] """ return ( path.replace(os.sep, "/") @@ -488,8 +489,9 @@ def _expand_patterns( @staticmethod def _find_pattern(pattern: str, enforce_match: bool = True) -> list[str]: r""" - >>> Distribution._find_pattern("LICENSE") - ['LICENSE'] + >>> getfixture('sample_project_cwd') + >>> Distribution._find_pattern("LICENSE.txt") + ['LICENSE.txt'] >>> Distribution._find_pattern("/LICENSE.MIT") Traceback (most recent call last): ... diff --git a/setuptools/tests/fixtures.py b/setuptools/tests/fixtures.py index a5472984b5..542e4a62b3 100644 --- a/setuptools/tests/fixtures.py +++ b/setuptools/tests/fixtures.py @@ -60,6 +60,12 @@ def sample_project(tmp_path): return tmp_path / 'sampleproject' +@pytest.fixture +def sample_project_cwd(sample_project): + with contextlib.chdir(sample_project): + yield + + # sdist and wheel artifacts should be stable across a round of tests # so we can build them once per session and use the files as "readonly" From 553dd342e42ab2093a36c13c30700aa4ccc4f226 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 3 May 2025 04:00:35 -0400 Subject: [PATCH 1663/1761] Rely on path.Path for directory context. `contextlib.chdir` isn't available until Python 3.11. --- setuptools/tests/fixtures.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setuptools/tests/fixtures.py b/setuptools/tests/fixtures.py index 542e4a62b3..6ca42648bd 100644 --- a/setuptools/tests/fixtures.py +++ b/setuptools/tests/fixtures.py @@ -62,7 +62,7 @@ def sample_project(tmp_path): @pytest.fixture def sample_project_cwd(sample_project): - with contextlib.chdir(sample_project): + with path.Path(sample_project): yield From 1ef1ee1076053de2501110deaddbae7313d0f107 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 3 May 2025 04:49:39 -0400 Subject: [PATCH 1664/1761] Add news fragment. --- newsfragments/917.feature.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 newsfragments/917.feature.rst diff --git a/newsfragments/917.feature.rst b/newsfragments/917.feature.rst new file mode 100644 index 0000000000..8b1ba1b794 --- /dev/null +++ b/newsfragments/917.feature.rst @@ -0,0 +1 @@ +Removed easy_install and package_index modules. From af8b3228487554d93ed15ec69cfe45f7c086e9b4 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 3 May 2025 04:56:41 -0400 Subject: [PATCH 1665/1761] =?UTF-8?q?Bump=20version:=2080.2.0=20=E2=86=92?= =?UTF-8?q?=2080.3.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- NEWS.rst | 10 ++++++++++ newsfragments/4956.feature.rst | 1 - newsfragments/917.feature.rst | 1 - pyproject.toml | 2 +- 5 files changed, 12 insertions(+), 4 deletions(-) delete mode 100644 newsfragments/4956.feature.rst delete mode 100644 newsfragments/917.feature.rst diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 66eb0512b1..49a31480ad 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 80.2.0 +current_version = 80.3.0 commit = True tag = True diff --git a/NEWS.rst b/NEWS.rst index 03bcf8c1c6..3978958837 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -1,3 +1,13 @@ +v80.3.0 +======= + +Features +-------- + +- Removed easy_install and package_index modules. (#917) +- Restored license declaration in package metadata. See jaraco/skeleton#171. (#4956) + + v80.2.0 ======= diff --git a/newsfragments/4956.feature.rst b/newsfragments/4956.feature.rst deleted file mode 100644 index e42aec415d..0000000000 --- a/newsfragments/4956.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Restored license declaration in package metadata. See jaraco/skeleton#171. diff --git a/newsfragments/917.feature.rst b/newsfragments/917.feature.rst deleted file mode 100644 index 8b1ba1b794..0000000000 --- a/newsfragments/917.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Removed easy_install and package_index modules. diff --git a/pyproject.toml b/pyproject.toml index 507cdbaa0f..51061e6c58 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,7 +5,7 @@ backend-path = ["."] [project] name = "setuptools" -version = "80.2.0" +version = "80.3.0" authors = [ { name = "Python Packaging Authority", email = "distutils-sig@python.org" }, ] From e7d7e721956e61701e2e701984b5fb0f55296fec Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 4 May 2025 11:08:17 -0400 Subject: [PATCH 1666/1761] Add documentation describing which interfaces are supported. Closes #4978 --- docs/userguide/index.rst | 1 + docs/userguide/interfaces.rst | 14 ++++++++++++++ 2 files changed, 15 insertions(+) create mode 100644 docs/userguide/interfaces.rst diff --git a/docs/userguide/index.rst b/docs/userguide/index.rst index d631c5d8ac..c7794ee8bf 100644 --- a/docs/userguide/index.rst +++ b/docs/userguide/index.rst @@ -24,6 +24,7 @@ Contents .. toctree:: :maxdepth: 1 + interfaces quickstart package_discovery dependency_management diff --git a/docs/userguide/interfaces.rst b/docs/userguide/interfaces.rst new file mode 100644 index 0000000000..b2fddd1763 --- /dev/null +++ b/docs/userguide/interfaces.rst @@ -0,0 +1,14 @@ +Supported Interfaces +==================== + +Setuptools is a complicated library with many interface surfaces and challenges. In addition to its primary purpose as a packaging build backend, Setuptools also has historically served as a standalone builder, installer, uploader, metadata provider, and more. Additionally, because it's implemented as a Python library, its entire functionality is incidentally exposed as a library. + +In addition to operating as a library, because newer versions of Setuptools are often used to build older (sometimes decades-old) packages, it has a high burden of stability. + +In order to have the ability to make sensible changes to the project, downstream developers and consumers should avoid depending on internal implementation details of the library and should rely only on the supported interfaces: + +- *Documented APIs* are expected to be extremely stable and have deprecation notices and periods prior to backward incompatible changes or removals. +- *Functional and Integration tests* that capture specific behaviors and expectations about how the library and system is intended to work for outside users. +- *Code comments and docstrings* (including in tests) may provide specific protections to limit the changes to behaviors on which a downstream consumer can rely. + +Depending on other behaviors is risky and subject to future breakage. If a project wishes to consider using interfaces that aren't covered above, consider requesting those interfaces to be added prior to depending on them (perhaps through a pull request implementing the change). From 9dccfa41c351672697df031ce9a30bb4af44c573 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 4 May 2025 12:24:24 -0400 Subject: [PATCH 1667/1761] Moved pbr setup into a fixture. --- setuptools/tests/fixtures.py | 43 +++++++++++++++++++ setuptools/tests/integration/test_pbr.py | 0 setuptools/tests/test_editable_install.py | 50 ++++++----------------- 3 files changed, 55 insertions(+), 38 deletions(-) create mode 100644 setuptools/tests/integration/test_pbr.py diff --git a/setuptools/tests/fixtures.py b/setuptools/tests/fixtures.py index 0736f14c16..27a169896f 100644 --- a/setuptools/tests/fixtures.py +++ b/setuptools/tests/fixtures.py @@ -7,6 +7,7 @@ import time from pathlib import Path +import jaraco.path import path import pytest @@ -347,3 +348,45 @@ def create_setup_requires_package( make_package(foobar_path, distname, version) return test_pkg + + +@pytest.fixture +def pbr_package(tmp_path, monkeypatch, venv): + files = { + "pyproject.toml": DALS( + """ + [build-system] + requires = ["setuptools"] + build-backend = "setuptools.build_meta" + """ + ), + "setup.py": DALS( + """ + __import__('setuptools').setup( + pbr=True, + setup_requires=["pbr"], + ) + """ + ), + "setup.cfg": DALS( + """ + [metadata] + name = mypkg + + [files] + packages = + mypkg + """ + ), + "mypkg": { + "__init__.py": "", + "hello.py": "print('Hello world!')", + }, + "other": {"test.txt": "Another file in here."}, + } + venv.run(["python", "-m", "pip", "install", "pbr"]) + prefix = tmp_path / 'mypkg' + prefix.mkdir() + jaraco.path.build(files, prefix=prefix) + monkeypatch.setenv('PBR_VERSION', "0.42") + return prefix diff --git a/setuptools/tests/integration/test_pbr.py b/setuptools/tests/integration/test_pbr.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/setuptools/tests/test_editable_install.py b/setuptools/tests/test_editable_install.py index 038dcadf93..5d85f2fb6e 100644 --- a/setuptools/tests/test_editable_install.py +++ b/setuptools/tests/test_editable_install.py @@ -1068,45 +1068,19 @@ def test_compat_install(tmp_path, venv): assert "cannot import name 'subpackage'" in out -def test_pbr_integration(tmp_path, venv, editable_opts): +def test_pbr_integration(pbr_package, venv, editable_opts): """Ensure editable installs work with pbr, issue #3500""" - files = { - "pyproject.toml": dedent( - """\ - [build-system] - requires = ["setuptools"] - build-backend = "setuptools.build_meta" - """ - ), - "setup.py": dedent( - """\ - __import__('setuptools').setup( - pbr=True, - setup_requires=["pbr"], - ) - """ - ), - "setup.cfg": dedent( - """\ - [metadata] - name = mypkg - - [files] - packages = - mypkg - """ - ), - "mypkg": { - "__init__.py": "", - "hello.py": "print('Hello world!')", - }, - "other": {"test.txt": "Another file in here."}, - } - venv.run(["python", "-m", "pip", "install", "pbr"]) - - with contexts.environment(PBR_VERSION="0.42"): - install_project("mypkg", venv, tmp_path, files, *editable_opts) - + cmd = [ + 'python', + '-m', + 'pip', + '-v', + 'install', + '--editable', + pbr_package, + *editable_opts, + ] + venv.run(cmd, stderr=subprocess.STDOUT) out = venv.run(["python", "-c", "import mypkg.hello"]) assert "Hello world!" in out From 88bd892e78c8b5a6855eac06383726a84a91a45e Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 4 May 2025 12:34:11 -0400 Subject: [PATCH 1668/1761] Add a failing integration test. Ref #4976 --- setuptools/tests/integration/test_pbr.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/setuptools/tests/integration/test_pbr.py b/setuptools/tests/integration/test_pbr.py index e69de29bb2..6666e2d82f 100644 --- a/setuptools/tests/integration/test_pbr.py +++ b/setuptools/tests/integration/test_pbr.py @@ -0,0 +1,20 @@ +import subprocess + +import pytest + + +@pytest.mark.xfail(reason="#4976") +def test_pbr_integration(pbr_package, venv): + """Ensure pbr packages install.""" + cmd = [ + 'python', + '-m', + 'pip', + '-v', + 'install', + '--no-build-isolation', + pbr_package, + ] + venv.run(cmd, stderr=subprocess.STDOUT) + out = venv.run(["python", "-c", "import mypkg.hello"]) + assert "Hello world!" in out From d6229353cd459aea9ccb70a4b76dfae1261a1270 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 4 May 2025 12:38:02 -0400 Subject: [PATCH 1669/1761] Restore ScriptWriter and sys_executable properties. Fixes #4976 --- setuptools/command/easy_install.py | 9 +++++++++ setuptools/tests/integration/test_pbr.py | 3 --- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py index c4bc88d589..de6c871c9a 100644 --- a/setuptools/command/easy_install.py +++ b/setuptools/command/easy_install.py @@ -1,5 +1,14 @@ +import os +import sys + from setuptools import Command +from .. import _scripts + class easy_install(Command): """Stubbed command for temporary pbr compatibility.""" + + +ScriptWriter = _scripts.ScriptWriter +sys_executable = os.environ.get("__PYVENV_LAUNCHER__", os.path.normpath(sys.executable)) diff --git a/setuptools/tests/integration/test_pbr.py b/setuptools/tests/integration/test_pbr.py index 6666e2d82f..a3e4e095c8 100644 --- a/setuptools/tests/integration/test_pbr.py +++ b/setuptools/tests/integration/test_pbr.py @@ -1,9 +1,6 @@ import subprocess -import pytest - -@pytest.mark.xfail(reason="#4976") def test_pbr_integration(pbr_package, venv): """Ensure pbr packages install.""" cmd = [ From 30c00380093b1a7ff5693f98d06ab4fa4f8923cf Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 4 May 2025 12:41:44 -0400 Subject: [PATCH 1670/1761] Render the attributes dynamically. --- setuptools/command/easy_install.py | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py index de6c871c9a..c96fed1bb2 100644 --- a/setuptools/command/easy_install.py +++ b/setuptools/command/easy_install.py @@ -1,5 +1,6 @@ import os import sys +import types from setuptools import Command @@ -10,5 +11,14 @@ class easy_install(Command): """Stubbed command for temporary pbr compatibility.""" -ScriptWriter = _scripts.ScriptWriter -sys_executable = os.environ.get("__PYVENV_LAUNCHER__", os.path.normpath(sys.executable)) +def __getattr__(name): + attr = getattr( + types.SimpleNamespace( + ScriptWriter=_scripts.ScriptWriter, + sys_executable=os.environ.get( + "__PYVENV_LAUNCHER__", os.path.normpath(sys.executable) + ), + ), + name, + ) + return attr From 5b39e4e50510e62902260fd4a437143cbf42c7f8 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 4 May 2025 12:55:13 -0400 Subject: [PATCH 1671/1761] Add the deprecation warning to attribute access. --- setuptools/command/easy_install.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py index c96fed1bb2..8765793d4c 100644 --- a/setuptools/command/easy_install.py +++ b/setuptools/command/easy_install.py @@ -4,7 +4,7 @@ from setuptools import Command -from .. import _scripts +from .. import _scripts, warnings class easy_install(Command): @@ -21,4 +21,10 @@ def __getattr__(name): ), name, ) + warnings.SetuptoolsDeprecationWarning.emit( + summary="easy_install module is deprecated", + details="Avoid accessing attributes of setuptools.command.easy_install.", + due_date=(2025, 10, 31), + see_url="https://github.com/pypa/setuptools/issues/4976", + ) return attr From 05cf544d23b8bbe5f914d198c2620abced8b7477 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 4 May 2025 12:57:52 -0400 Subject: [PATCH 1672/1761] Add news fragment. --- newsfragments/4976.bugfix.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 newsfragments/4976.bugfix.rst diff --git a/newsfragments/4976.bugfix.rst b/newsfragments/4976.bugfix.rst new file mode 100644 index 0000000000..27a988a5fd --- /dev/null +++ b/newsfragments/4976.bugfix.rst @@ -0,0 +1 @@ +Restored select attributes in easy_install for temporary pbr compatibility. From f37845bce6bb06ec25c24cf30210a485e945d21e Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 4 May 2025 14:28:23 -0400 Subject: [PATCH 1673/1761] =?UTF-8?q?Bump=20version:=2080.3.0=20=E2=86=92?= =?UTF-8?q?=2080.3.1?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- NEWS.rst | 9 +++++++++ newsfragments/4976.bugfix.rst | 1 - pyproject.toml | 2 +- 4 files changed, 11 insertions(+), 3 deletions(-) delete mode 100644 newsfragments/4976.bugfix.rst diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 49a31480ad..90aedc3ae7 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 80.3.0 +current_version = 80.3.1 commit = True tag = True diff --git a/NEWS.rst b/NEWS.rst index 3978958837..c6e7f938d5 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -1,3 +1,12 @@ +v80.3.1 +======= + +Bugfixes +-------- + +- Restored select attributes in easy_install for temporary pbr compatibility. (#4976) + + v80.3.0 ======= diff --git a/newsfragments/4976.bugfix.rst b/newsfragments/4976.bugfix.rst deleted file mode 100644 index 27a988a5fd..0000000000 --- a/newsfragments/4976.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Restored select attributes in easy_install for temporary pbr compatibility. diff --git a/pyproject.toml b/pyproject.toml index 51061e6c58..8a9c4f82ed 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,7 +5,7 @@ backend-path = ["."] [project] name = "setuptools" -version = "80.3.0" +version = "80.3.1" authors = [ { name = "Python Packaging Authority", email = "distutils-sig@python.org" }, ] From 867396152fcb99055795120750dfda53f85bb414 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 4 May 2025 22:06:52 +0200 Subject: [PATCH 1674/1761] Python 3 is the default nowadays (jaraco/skeleton#173) --- .github/workflows/main.yml | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 928acf2ca6..8029497018 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -63,7 +63,7 @@ jobs: sudo apt update sudo apt install -y libxml2-dev libxslt-dev - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python }} allow-prereleases: true @@ -85,9 +85,7 @@ jobs: with: fetch-depth: 0 - name: Setup Python - uses: actions/setup-python@v4 - with: - python-version: 3.x + uses: actions/setup-python@v5 - name: Install tox run: python -m pip install tox - name: Eval ${{ matrix.job }} @@ -119,9 +117,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Setup Python - uses: actions/setup-python@v4 - with: - python-version: 3.x + uses: actions/setup-python@v5 - name: Install tox run: python -m pip install tox - name: Run From d713048198301c43169812f0e5f1416af2bb4204 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Mon, 5 May 2025 07:04:59 +0200 Subject: [PATCH 1675/1761] Mark new tests as `uses_network` Mark a few new tests that try to install stuff from PyPI via `pip` as `uses_network`. --- setuptools/tests/integration/test_pbr.py | 3 +++ setuptools/tests/test_develop.py | 1 + setuptools/tests/test_editable_install.py | 1 + 3 files changed, 5 insertions(+) diff --git a/setuptools/tests/integration/test_pbr.py b/setuptools/tests/integration/test_pbr.py index a3e4e095c8..f89e5b8b21 100644 --- a/setuptools/tests/integration/test_pbr.py +++ b/setuptools/tests/integration/test_pbr.py @@ -1,6 +1,9 @@ import subprocess +import pytest + +@pytest.mark.uses_network def test_pbr_integration(pbr_package, venv): """Ensure pbr packages install.""" cmd = [ diff --git a/setuptools/tests/test_develop.py b/setuptools/tests/test_develop.py index 67495b89c8..354c51fc3c 100644 --- a/setuptools/tests/test_develop.py +++ b/setuptools/tests/test_develop.py @@ -70,6 +70,7 @@ def install_develop(src_dir, target): platform.python_implementation() == 'PyPy', reason="https://github.com/pypa/setuptools/issues/1202", ) + @pytest.mark.uses_network def test_namespace_package_importable(self, tmpdir): """ Installing two packages sharing the same namespace, one installed diff --git a/setuptools/tests/test_editable_install.py b/setuptools/tests/test_editable_install.py index 5d85f2fb6e..031f363846 100644 --- a/setuptools/tests/test_editable_install.py +++ b/setuptools/tests/test_editable_install.py @@ -1068,6 +1068,7 @@ def test_compat_install(tmp_path, venv): assert "cannot import name 'subpackage'" in out +@pytest.mark.uses_network def test_pbr_integration(pbr_package, venv, editable_opts): """Ensure editable installs work with pbr, issue #3500""" cmd = [ From 8cbafe3017f243b249c7711a14519be3396a84f5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Mon, 5 May 2025 07:08:46 +0200 Subject: [PATCH 1676/1761] Add a news fragment --- newsfragments/4982.misc.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 newsfragments/4982.misc.rst diff --git a/newsfragments/4982.misc.rst b/newsfragments/4982.misc.rst new file mode 100644 index 0000000000..8e40754306 --- /dev/null +++ b/newsfragments/4982.misc.rst @@ -0,0 +1 @@ +Mark more tests failing without Internet access as ``uses_network``. From aa911c6db2c10c5ac022afaa8d6da1e6c5688524 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Tue, 6 May 2025 16:53:49 -0400 Subject: [PATCH 1677/1761] By default, provide a much more concise error message. Fixes #4984 --- setuptools/command/editable_wheel.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py index 917d5f149b..98fdbbdd8d 100644 --- a/setuptools/command/editable_wheel.py +++ b/setuptools/command/editable_wheel.py @@ -138,9 +138,12 @@ def run(self) -> None: self._create_wheel_file(bdist_wheel) except Exception: - traceback.print_exc() project = self.distribution.name or self.distribution.get_name() - _DebuggingTips.emit(project=project) + if os.environ.get('SETUPTOOLS_INTERNAL_DEBUG'): + traceback.print_exc() + _DebuggingTips.emit(project=project) + else: + print("An error occurred building editable wheel for", project) raise def _ensure_dist_info(self): From bcc23a221e4d811bd37f5fe73d08f4013890cfb0 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Fri, 9 May 2025 16:16:58 -0400 Subject: [PATCH 1678/1761] Implement the editable debugging tips as a reference to the docs. --- docs/userguide/development_mode.rst | 19 ++++++++++ setuptools/command/editable_wheel.py | 43 +++++------------------ setuptools/compat/py310.py | 11 ++++++ setuptools/tests/test_editable_install.py | 5 ++- 4 files changed, 41 insertions(+), 37 deletions(-) diff --git a/docs/userguide/development_mode.rst b/docs/userguide/development_mode.rst index 3eabe87fcb..881bc3a920 100644 --- a/docs/userguide/development_mode.rst +++ b/docs/userguide/development_mode.rst @@ -229,6 +229,25 @@ More information is available on the text of :pep:`PEP 660 <660#what-to-put-in-t used. +Debugging Tips +-------------- + +If encountering problems installing a project in editable mode, +follow these recommended steps to help debug: + +- Try to install the project normally, without using the editable mode. + Does the error still persist? + (If it does, try fixing the problem before attempting the editable mode). +- When using binary extensions, make sure all OS-level + dependencies are installed (e.g. compilers, toolchains, binary libraries, ...). +- Try the latest version of setuptools (maybe the error was already fixed). +- When the project or its dependencies are using any setuptools extension + or customization, make sure they support the editable mode. + +After following the steps above, if the problem still persists and +you think this is related to how setuptools handles editable installations, +please submit a `reproducible example `_ at `the bug tracker `_. + ---- .. rubric:: Notes diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py index 98fdbbdd8d..c772570817 100644 --- a/setuptools/command/editable_wheel.py +++ b/setuptools/command/editable_wheel.py @@ -29,10 +29,10 @@ from .. import Command, _normalization, _path, _shutil, errors, namespaces from .._path import StrPath -from ..compat import py312 +from ..compat import py310, py312 from ..discovery import find_package_path from ..dist import Distribution -from ..warnings import InformationOnly, SetuptoolsDeprecationWarning, SetuptoolsWarning +from ..warnings import InformationOnly, SetuptoolsDeprecationWarning from .build import build as build_cls from .build_py import build_py as build_py_cls from .dist_info import dist_info as dist_info_cls @@ -137,13 +137,14 @@ def run(self) -> None: bdist_wheel.write_wheelfile(self.dist_info_dir) self._create_wheel_file(bdist_wheel) - except Exception: + except Exception as ex: project = self.distribution.name or self.distribution.get_name() - if os.environ.get('SETUPTOOLS_INTERNAL_DEBUG'): - traceback.print_exc() - _DebuggingTips.emit(project=project) - else: - print("An error occurred building editable wheel for", project) + py310.add_note( + ex, + f"An error occurred when building editable wheel for {project}.\n" + "See debugging tips in: " + "https://setuptools.pypa.io/en/latest/userguide/development_mode.html#debugging-tips", + ) raise def _ensure_dist_info(self): @@ -905,29 +906,3 @@ def _finder_template( class LinksNotSupported(errors.FileError): """File system does not seem to support either symlinks or hard links.""" - - -class _DebuggingTips(SetuptoolsWarning): - _SUMMARY = "Problem in editable installation." - _DETAILS = """ - An error happened while installing `{project}` in editable mode. - - The following steps are recommended to help debug this problem: - - - Try to install the project normally, without using the editable mode. - Does the error still persist? - (If it does, try fixing the problem before attempting the editable mode). - - If you are using binary extensions, make sure you have all OS-level - dependencies installed (e.g. compilers, toolchains, binary libraries, ...). - - Try the latest version of setuptools (maybe the error was already fixed). - - If you (or your project dependencies) are using any setuptools extension - or customization, make sure they support the editable mode. - - After following the steps above, if the problem still persists and - you think this is related to how setuptools handles editable installations, - please submit a reproducible example - (see https://stackoverflow.com/help/minimal-reproducible-example) to: - - https://github.com/pypa/setuptools/issues - """ - _SEE_DOCS = "userguide/development_mode.html" diff --git a/setuptools/compat/py310.py b/setuptools/compat/py310.py index b3912f8e02..58a4d9f366 100644 --- a/setuptools/compat/py310.py +++ b/setuptools/compat/py310.py @@ -7,3 +7,14 @@ import tomllib else: # pragma: no cover import tomli as tomllib + + +if sys.version_info >= (3, 11): + + def add_note(ex, note): + ex.add_note(note) + +else: # pragma: no cover + + def add_note(ex, note): + vars(ex).setdefault('__notes__', []).append(note) diff --git a/setuptools/tests/test_editable_install.py b/setuptools/tests/test_editable_install.py index 5d85f2fb6e..41b5887a22 100644 --- a/setuptools/tests/test_editable_install.py +++ b/setuptools/tests/test_editable_install.py @@ -21,7 +21,6 @@ from setuptools._importlib import resources as importlib_resources from setuptools.command.editable_wheel import ( - _DebuggingTips, _encode_pth, _find_namespaces, _find_package_roots, @@ -1201,9 +1200,9 @@ def test_debugging_tips(tmpdir_cwd, monkeypatch): simulated_failure = Mock(side_effect=SimulatedErr()) monkeypatch.setattr(cmd, "get_finalized_command", simulated_failure) - expected_msg = "following steps are recommended to help debug" - with pytest.raises(SimulatedErr), pytest.warns(_DebuggingTips, match=expected_msg): + with pytest.raises(SimulatedErr) as ctx: cmd.run() + assert any('debugging-tips' in note for note in ctx.value.__notes__) @pytest.mark.filterwarnings("error") From af2f2baf5b4ee81ed45a003070d68badf3f10b11 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Fri, 9 May 2025 16:23:15 -0400 Subject: [PATCH 1679/1761] Add news fragment. --- newsfragments/4984.feature.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 newsfragments/4984.feature.rst diff --git a/newsfragments/4984.feature.rst b/newsfragments/4984.feature.rst new file mode 100644 index 0000000000..b9f51c6525 --- /dev/null +++ b/newsfragments/4984.feature.rst @@ -0,0 +1 @@ +Simplified the error reporting in editable installs. From a82f96dc43cbfb9968b100256cb50702becd614e Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Fri, 9 May 2025 16:23:59 -0400 Subject: [PATCH 1680/1761] =?UTF-8?q?Bump=20version:=2080.3.1=20=E2=86=92?= =?UTF-8?q?=2080.4.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- NEWS.rst | 9 +++++++++ newsfragments/4984.feature.rst | 1 - pyproject.toml | 2 +- 4 files changed, 11 insertions(+), 3 deletions(-) delete mode 100644 newsfragments/4984.feature.rst diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 90aedc3ae7..f6554824fd 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 80.3.1 +current_version = 80.4.0 commit = True tag = True diff --git a/NEWS.rst b/NEWS.rst index c6e7f938d5..2de478188f 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -1,3 +1,12 @@ +v80.4.0 +======= + +Features +-------- + +- Simplified the error reporting in editable installs. (#4984) + + v80.3.1 ======= diff --git a/newsfragments/4984.feature.rst b/newsfragments/4984.feature.rst deleted file mode 100644 index b9f51c6525..0000000000 --- a/newsfragments/4984.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Simplified the error reporting in editable installs. diff --git a/pyproject.toml b/pyproject.toml index 8a9c4f82ed..b61a77e4cc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,7 +5,7 @@ backend-path = ["."] [project] name = "setuptools" -version = "80.3.1" +version = "80.4.0" authors = [ { name = "Python Packaging Authority", email = "distutils-sig@python.org" }, ] From d2b8d7750f78e870def98c4e04053af4acc86e29 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sat, 10 May 2025 12:32:22 -0400 Subject: [PATCH 1681/1761] Add coherent.licensed plugin to inject license texts into the build. Closes jaraco/skeleton#174 --- pyproject.toml | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index fa0c801fba..bda001a438 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,10 @@ [build-system] -requires = ["setuptools>=61.2", "setuptools_scm[toml]>=3.4.1"] +requires = [ + "setuptools>=61.2", + "setuptools_scm[toml]>=3.4.1", + # jaraco/skeleton#174 + "coherent.licensed", +] build-backend = "setuptools.build_meta" [project] From b535e75e95389eb8a16e34b238e2483f498593c8 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sat, 10 May 2025 18:47:43 +0200 Subject: [PATCH 1682/1761] Revert "Python 3 is the default nowadays (jaraco/skeleton#173)" (jaraco/skeleton#175) This reverts commit 867396152fcb99055795120750dfda53f85bb414. Removing `python-version` falls back on the Python bundled with the runner, making actions/setup-python a no-op. Here, the maintainer prefers using the latest release of Python 3. This is what `3.x` means: use the latest release of Python 3. --- .github/workflows/main.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 8029497018..53513eee9b 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -86,6 +86,8 @@ jobs: fetch-depth: 0 - name: Setup Python uses: actions/setup-python@v5 + with: + python-version: 3.x - name: Install tox run: python -m pip install tox - name: Eval ${{ matrix.job }} @@ -118,6 +120,8 @@ jobs: - uses: actions/checkout@v4 - name: Setup Python uses: actions/setup-python@v5 + with: + python-version: 3.x - name: Install tox run: python -m pip install tox - name: Run From 71183cbee6afd73a02da3c34bf2ef78d4b67e6f1 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Tue, 6 May 2025 20:32:04 +0100 Subject: [PATCH 1683/1761] Suggestions for documentation about public interfaces --- docs/conf.py | 1 + docs/userguide/interfaces.rst | 170 +++++++++++++++++++++++++++++++++- 2 files changed, 167 insertions(+), 4 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index d38fad28a0..3ddba1b505 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -241,6 +241,7 @@ 'pip': ('https://pip.pypa.io/en/latest', None), 'build': ('https://build.pypa.io/en/latest', None), 'PyPUG': ('https://packaging.python.org/en/latest', None), + 'pytest': ('https://docs.pytest.org/en/stable', None), 'packaging': ('https://packaging.pypa.io/en/latest', None), 'twine': ('https://twine.readthedocs.io/en/stable', None), 'importlib-resources': ( diff --git a/docs/userguide/interfaces.rst b/docs/userguide/interfaces.rst index b2fddd1763..fff1e1d5a9 100644 --- a/docs/userguide/interfaces.rst +++ b/docs/userguide/interfaces.rst @@ -7,8 +7,170 @@ In addition to operating as a library, because newer versions of Setuptools are In order to have the ability to make sensible changes to the project, downstream developers and consumers should avoid depending on internal implementation details of the library and should rely only on the supported interfaces: -- *Documented APIs* are expected to be extremely stable and have deprecation notices and periods prior to backward incompatible changes or removals. -- *Functional and Integration tests* that capture specific behaviors and expectations about how the library and system is intended to work for outside users. -- *Code comments and docstrings* (including in tests) may provide specific protections to limit the changes to behaviors on which a downstream consumer can rely. +- *Tier 1*: APIs required by modern PyPA packaging standards (:pep:`517`, :pep:`660`) and Documented APIs for customising build behavior or creating plugins (:doc:`/userguide/extension`, :doc:`references/keywords`): -Depending on other behaviors is risky and subject to future breakage. If a project wishes to consider using interfaces that aren't covered above, consider requesting those interfaces to be added prior to depending on them (perhaps through a pull request implementing the change). + These APIs are expected to be extremely stable and have deprecation notices and periods prior to backward incompatible changes or removals. + + Please note that *functional and integration tests* capture specific behaviors and expectations about how the library and system is intended to work for outside users; + and *code comments and docstrings* (including in tests) may provide specific protections to limit the changes to behaviors on which a downstream consumer can rely. + +- *Tier 2*: Documented ``distutils`` APIs: + + ``setuptools`` strives to honor the interfaces provided by ``distutils`` and + will coordinate with the ``pypa/distutils`` repository so that the + appropriate deprecation notices are issued. + + In principle, these are documented in :doc:`/deprecated/distutils/apiref`. + Please note however that when a suitable replacement is available or advised, + the existing ``distutils`` API is considered deprecated and should not be used + (see :pep:`632#migration-advice` and :doc:`/deprecated/distutils-legacy`). + +Depending on other behaviors is risky and subject to future breakage. If a project wishes to consider using interfaces that aren't covered above, consider requesting those interfaces to be added prior to depending on them (perhaps through a pull request implementing the change and relevant regression tests). + +Please check further information about deprecated and unsupported behaviors in :doc:`/deprecated/index`. + + +Support Policy Exceptions +------------------------- + +Behaviors and interfaces explicitly documented/advertised as deprecated, +or that :obj:`issue deprecation warnings ` +will be supported up to the end of the announced deprecation period. + +However there are a few circumstances in which the Setuptools' maintainers +reserve the right of speeding up the deprecation cycle and shortening deprecation periods: + +1. When security vulnerabilities are identified in specific code paths and the + reworking existing APIs is not viable. +2. When standards in the Python packaging ecosystem externally drive non-backward + compatible changes in the code base. +3. When changes in behavior are externally driven by 3rd-party dependencies + and code maintained outside of the ``pypa/setuptools`` repository. + +Note that these are exceptional circumstances and that the project will +carefully attempt to find alternatives before resorting to unscheduled removals. + + +What to do when deprecation periods are undefined? +-------------------------------------------------- + +In some cases it is difficult to define how long Setuptools will take +to remove certain features, behaviors or APIs. +For example, it may be complicated to assess how wide-spread the usage +of a certain feature is in the ecosystem. + +Therefore, Setuptools may start to issue deprecation warnings without a clear due date. +This occurs because we want to notify consumers about upcoming breaking +changes as soon as possible so that they can start working in migration plans. + +This does not mean that users should treat this deprecation as low priority or +interpret the lack of due date as a signal that a breaking change will never happen. + +The advised course of action is for users to create a migration plan +as soon as they have identified to be subject to a Setuptools deprecation. + +Setuptools may introduce relatively short deprecation periods (e.g. 6 months) +when a deprecation warning has already been issued for a long period without a +explicit due date. + + +How to stay on top of upcoming deprecations? +-------------------------------------------- + +It is a good idea to employ an automated test suite with relatively good +coverage in your project and keep an eye on the logs. +You can also automate this process by forwarding the standard output/error +streams to a log file and using heuristics to identify deprecations +(e.g. by searching for the word ``deprecation`` or ``deprecated``). +You may need to increase the level of verbosity of your output as +some tools may hide log messages by default (e.g. via ``pip -vv install ...``). + +Additionally, if you are supporting a project that depends on Setuptools, +you can implement a CI workflow that leverages +:external+python:ref:`Python warning filters ` +to improve the visibility of warnings. + +This workflow can be comprised, for example, of 3 iterative steps that require +developers to acknowledge the deprecation warnings: + +1. Leverage Python Warning's Filter to transform warnings into exceptions during automated tests. +2. Devise a migration plan: + + - It is a good idea to track deprecations as if they were issues, + and apply project management techniques to monitor the progress in handling them. + - Determine which parts of your code are affected and understand + the changes required to eliminate the warnings. + +3. Modify the warning's filter you are using in the CI to not fail + with the newly identified exceptions (e.g. by using the ``default`` action + with a specific category or regular expression for the warning message). + This can be done globally for the whole test suite or locally in a + test-by-test basis. + +Test tools like :pypi:`pytest` offer CLI and configuration options +to facilitate controlling the warning's filter (see :external+pytest:doc:`how-to/capture-warnings`). + +Note that there are many ways to incorporate such workflow in your CI. +For example, if you have enough deployment resources and consider +deprecation warning management to be a day-to-day development test +you can set the warning's filter directly on your main CI loop. +On the other hand if you have critical timelines and cannot afford CI jobs +occasionally failing to flag maintenance, you can consider scheduling a +periodic CI run separated from your main/mission-critical workflow. + + +What does "support" mean? +------------------------- + +Setuptools is a non-profit community-driven open source project and as such +the word "support" is used in a best-effort manner and with limited scope. +For example, it is not always possible to quickly provide fixes for bugs. + +We appreciate the patience of the community and incentivise users +impacted by bugs to contribute to fixes in the form of +:doc:`PR submissions `, to speed-up the process. + +When we say "a certain feature is supported" we mean that we will do our best +to ensure this feature keeps working as documented. +Note however that, as in any system, unintended breakages may happen. +We appreciate the community understand and `considerate feedback`_. + +.. _considerate feedback: https://opensource.how/etiquette/ + + +What to do after the deprecation period ends? +--------------------------------------------- + +If you have limited development resources and is not able to +devise a migration plan before Setuptools removes a deprecated feature, +you can still resort to restricting the version of Setuptools to be installed. +This usually includes modifying ``[build-system] requires`` in ``pyproject.toml`` +and/or specifying ``pip`` :external+pip:ref:`Constraints Files` via +the ``PIP_CONSTRAINT`` environment variable (or passing |build-constraint-uv|_). +Please avoid however to pre-emptively add version constraints if not necessary, +(you can read more about this in https://iscinumpy.dev/post/bound-version-constraints/). + +.. |build-constraint-uv| replace:: ``--build-constraint`` to ``uv`` +.. _build-constraint-uv: https://docs.astral.sh/uv/concepts/projects/build/#build-constraints + + +A note on "Public Names" +------------------------ + +Python devs may be used to the convention that private members are prefixed +with an ``_`` (underscore) character and that any member not marked by this +public. Due to the history and legacy of Setuptools this is not necessarily +the case [#private]_. + +In this project, "public interfaces" are defined as interfaces explicitly +documented for 3rd party consumption. + +When accessing a member in the ``setuptools`` package, please make sure it is +documented for external usage. Also note that names imported from different +modules/submodules are considered internal implementation details unless +explicitly listed in ``__all__``. The fact that they are accessible in the +namespace of the ``import``-er module is a mere side effect of the way Python works. + +.. [#private] + While names prefixed by ``_`` are always considered private, + not necessary the absence of the prefix signals public members. From 14870dd284386999fd232c382b5dcc0d20fb6290 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 11 May 2025 09:13:12 -0400 Subject: [PATCH 1684/1761] Make the quickstart primary. --- docs/userguide/index.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/userguide/index.rst b/docs/userguide/index.rst index c7794ee8bf..fd9947ee79 100644 --- a/docs/userguide/index.rst +++ b/docs/userguide/index.rst @@ -24,8 +24,8 @@ Contents .. toctree:: :maxdepth: 1 - interfaces quickstart + interfaces package_discovery dependency_management development_mode From 0d222675d19ca577d0b6a9027f7e030af6f73015 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 11 May 2025 14:27:01 -0400 Subject: [PATCH 1685/1761] Remove comment referring to pkg_resources. This comment doesn't mean anything to me, so I doubt it's meaningful to others. I suspect the relationship to older pkg_resources is irrelevant. --- setuptools/command/egg_info.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/setuptools/command/egg_info.py b/setuptools/command/egg_info.py index f77631168f..7e00ae2cea 100644 --- a/setuptools/command/egg_info.py +++ b/setuptools/command/egg_info.py @@ -655,8 +655,6 @@ def write_pkg_info(cmd, basename, filename) -> None: metadata.name, oldname = cmd.egg_name, metadata.name try: - # write unescaped data to PKG-INFO, so older pkg_resources - # can still parse it metadata.write_pkg_info(cmd.egg_info) finally: metadata.name, metadata.version = oldname, oldver From 07e32ac2b10cadfa833cc88883535216f603a7bb Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 11 May 2025 15:05:25 -0400 Subject: [PATCH 1686/1761] In bdist_egg, remove reliance on pkg_resources. --- pkg_resources/__init__.py | 6 +----- setuptools/command/bdist_egg.py | 20 +++++++++----------- 2 files changed, 10 insertions(+), 16 deletions(-) diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py index 8a2fbfa412..630aaa9a42 100644 --- a/pkg_resources/__init__.py +++ b/pkg_resources/__init__.py @@ -440,11 +440,7 @@ def _macos_arch(machine): def get_build_platform(): - """Return this platform's string for platform-specific distributions - - XXX Currently this is the same as ``distutils.util.get_platform()``, but it - needs some hacks for Linux and macOS. - """ + """Return this platform's string for platform-specific distributions""" from sysconfig import get_platform plat = get_platform() diff --git a/setuptools/command/bdist_egg.py b/setuptools/command/bdist_egg.py index 7f66c3ba6a..b66020c863 100644 --- a/setuptools/command/bdist_egg.py +++ b/setuptools/command/bdist_egg.py @@ -9,7 +9,7 @@ import re import sys import textwrap -from sysconfig import get_path, get_python_version +from sysconfig import get_path, get_platform, get_python_version from types import CodeType from typing import TYPE_CHECKING, Literal @@ -55,12 +55,12 @@ def write_stub(resource, pyfile) -> None: """ def __bootstrap__(): global __bootstrap__, __loader__, __file__ - import sys, pkg_resources, importlib.util - __file__ = pkg_resources.resource_filename(__name__, %r) - __loader__ = None; del __bootstrap__, __loader__ - spec = importlib.util.spec_from_file_location(__name__,__file__) - mod = importlib.util.module_from_spec(spec) - spec.loader.exec_module(mod) + import sys, importlib.resources as irs, importlib.util + with irs.as_file(irs.files(__name__).joinpath(%r)) as __file__: + __loader__ = None; del __bootstrap__, __loader__ + spec = importlib.util.spec_from_file_location(__name__,__file__) + mod = importlib.util.module_from_spec(spec) + spec.loader.exec_module(mod) __bootstrap__() """ ).lstrip() @@ -77,7 +77,7 @@ class bdist_egg(Command): 'plat-name=', 'p', "platform name to embed in generated filenames " - "(by default uses `pkg_resources.get_build_platform()`)", + "(by default uses `sysconfig.get_platform()`)", ), ('exclude-source-files', None, "remove all .py files from the generated egg"), ( @@ -110,9 +110,7 @@ def finalize_options(self) -> None: self.bdist_dir = os.path.join(bdist_base, 'egg') if self.plat_name is None: - from pkg_resources import get_build_platform - - self.plat_name = get_build_platform() + self.plat_name = get_platform() self.set_undefined_options('bdist', ('dist_dir', 'dist_dir')) From a8ad1ef28df961a673c8ecafeefa921fc7b57982 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 11 May 2025 15:28:26 -0400 Subject: [PATCH 1687/1761] Convert stub into a single string for easier human parsing. --- setuptools/command/build_ext.py | 57 +++++++++++++++++---------------- 1 file changed, 29 insertions(+), 28 deletions(-) diff --git a/setuptools/command/build_ext.py b/setuptools/command/build_ext.py index be833a379c..bb8e9fda32 100644 --- a/setuptools/command/build_ext.py +++ b/setuptools/command/build_ext.py @@ -3,6 +3,7 @@ import itertools import os import sys +import textwrap from collections.abc import Iterator from importlib.machinery import EXTENSION_SUFFIXES from importlib.util import cache_from_source as _compiled_file_name @@ -74,10 +75,6 @@ def _customize_compiler_for_shlib(compiler): pass -def if_dl(s): - return s if have_rtld else '' - - def get_abi3_suffix(): """Return the file extension for an abi3-compliant Extension()""" for suffix in EXTENSION_SUFFIXES: @@ -355,30 +352,34 @@ def _write_stub_file(self, stub_file: str, ext: Extension, compile=False): raise BaseError(stub_file + " already exists! Please delete.") if not self.dry_run: with open(stub_file, 'w', encoding="utf-8") as f: - content = '\n'.join([ - "def __bootstrap__():", - " global __bootstrap__, __file__, __loader__", - " import sys, os, pkg_resources, importlib.util" + if_dl(", dl"), - " __file__ = pkg_resources.resource_filename" - f"(__name__,{os.path.basename(ext._file_name)!r})", - " del __bootstrap__", - " if '__loader__' in globals():", - " del __loader__", - if_dl(" old_flags = sys.getdlopenflags()"), - " old_dir = os.getcwd()", - " try:", - " os.chdir(os.path.dirname(__file__))", - if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"), - " spec = importlib.util.spec_from_file_location(", - " __name__, __file__)", - " mod = importlib.util.module_from_spec(spec)", - " spec.loader.exec_module(mod)", - " finally:", - if_dl(" sys.setdlopenflags(old_flags)"), - " os.chdir(old_dir)", - "__bootstrap__()", - "", # terminal \n - ]) + content = ( + textwrap.dedent(f""" + def __bootstrap__(): + global __bootstrap__, __file__, __loader__ + import sys, os, pkg_resources, importlib.util + #rtld import dl + __file__ = pkg_resources.resource_filename(__name__, + {os.path.basename(ext._file_name)!r}) + del __bootstrap__ + if '__loader__' in globals(): + del __loader__ + #rtld old_flags = sys.getdlopenflags() + old_dir = os.getcwd() + try: + os.chdir(os.path.dirname(__file__)) + #rtld sys.setdlopenflags(dl.RTLD_NOW) + spec = importlib.util.spec_from_file_location( + __name__, __file__) + mod = importlib.util.module_from_spec(spec) + spec.loader.exec_module(mod) + finally: + #rtld sys.setdlopenflags(old_flags) + os.chdir(old_dir) + __bootstrap__() + """) + .lstrip() + .replace('#rtld', '#rtld' * (not have_rtld)) + ) f.write(content) if compile: self._compile_and_remove_stub(stub_file) From 1bc752c84bc7b489d311feca2e89b2a902b90b87 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 11 May 2025 15:31:42 -0400 Subject: [PATCH 1688/1761] In build_ext, remove reliance on pkg_resources. --- setuptools/command/build_ext.py | 36 ++++++++++++++++----------------- 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/setuptools/command/build_ext.py b/setuptools/command/build_ext.py index bb8e9fda32..af73fff7a5 100644 --- a/setuptools/command/build_ext.py +++ b/setuptools/command/build_ext.py @@ -356,25 +356,25 @@ def _write_stub_file(self, stub_file: str, ext: Extension, compile=False): textwrap.dedent(f""" def __bootstrap__(): global __bootstrap__, __file__, __loader__ - import sys, os, pkg_resources, importlib.util + import sys, os, importlib.resources as irs, importlib.util #rtld import dl - __file__ = pkg_resources.resource_filename(__name__, - {os.path.basename(ext._file_name)!r}) - del __bootstrap__ - if '__loader__' in globals(): - del __loader__ - #rtld old_flags = sys.getdlopenflags() - old_dir = os.getcwd() - try: - os.chdir(os.path.dirname(__file__)) - #rtld sys.setdlopenflags(dl.RTLD_NOW) - spec = importlib.util.spec_from_file_location( - __name__, __file__) - mod = importlib.util.module_from_spec(spec) - spec.loader.exec_module(mod) - finally: - #rtld sys.setdlopenflags(old_flags) - os.chdir(old_dir) + with irs.files(__name__).joinpath( + {os.path.basename(ext._file_name)!r}) as __file__: + del __bootstrap__ + if '__loader__' in globals(): + del __loader__ + #rtld old_flags = sys.getdlopenflags() + old_dir = os.getcwd() + try: + os.chdir(os.path.dirname(__file__)) + #rtld sys.setdlopenflags(dl.RTLD_NOW) + spec = importlib.util.spec_from_file_location( + __name__, __file__) + mod = importlib.util.module_from_spec(spec) + spec.loader.exec_module(mod) + finally: + #rtld sys.setdlopenflags(old_flags) + os.chdir(old_dir) __bootstrap__() """) .lstrip() From 956e3d72922083b3003ae87da9f81fcc95cbefb0 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 11 May 2025 15:54:26 -0400 Subject: [PATCH 1689/1761] Converted install_scripts and _scripts to prefer importlib.metadata over pkg_resources. --- setuptools/_scripts.py | 11 ++++++----- setuptools/command/install_scripts.py | 10 ++-------- 2 files changed, 8 insertions(+), 13 deletions(-) diff --git a/setuptools/_scripts.py b/setuptools/_scripts.py index e3e8a191d4..4c5d4ee9fd 100644 --- a/setuptools/_scripts.py +++ b/setuptools/_scripts.py @@ -169,13 +169,14 @@ def get_args(cls, dist, header=None): """ if header is None: header = cls.get_header() - spec = str(dist.as_requirement()) + spec = f'{dist.name}=={dist.version}' for type_ in 'console', 'gui': - group = type_ + '_scripts' - for name in dist.get_entry_map(group).keys(): - cls._ensure_safe_name(name) + group = f'{type_}_scripts' + for ep in dist.entry_points.select(group=group): + name = ep.name + cls._ensure_safe_name(ep.name) script_text = cls.template % locals() - args = cls._get_script_args(type_, name, header, script_text) + args = cls._get_script_args(type_, ep.name, header, script_text) yield from args @staticmethod diff --git a/setuptools/command/install_scripts.py b/setuptools/command/install_scripts.py index 02a73e818b..537181e321 100644 --- a/setuptools/command/install_scripts.py +++ b/setuptools/command/install_scripts.py @@ -32,17 +32,11 @@ def run(self) -> None: def _install_ep_scripts(self): # Delay import side-effects - from pkg_resources import Distribution, PathMetadata - from .. import _scripts + from .._importlib import metadata ei_cmd = self.get_finalized_command("egg_info") - dist = Distribution( - ei_cmd.egg_base, - PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info), - ei_cmd.egg_name, - ei_cmd.egg_version, - ) + dist = metadata.Distribution.at(path=ei_cmd.egg_info) bs_cmd = self.get_finalized_command('build_scripts') exec_param = getattr(bs_cmd, 'executable', None) writer = _scripts.ScriptWriter From 8d10cd10836b929498d209d277b3c3050d24cc21 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 11 May 2025 16:06:11 -0400 Subject: [PATCH 1690/1761] Add a compatibility shim and warning for legacy use on ScriptWriter.get_args --- setuptools/_scripts.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/setuptools/_scripts.py b/setuptools/_scripts.py index 4c5d4ee9fd..f9365c66b3 100644 --- a/setuptools/_scripts.py +++ b/setuptools/_scripts.py @@ -13,6 +13,8 @@ import pkg_resources +from ._importlib import metadata + if TYPE_CHECKING: from typing_extensions import Self @@ -167,6 +169,13 @@ def get_args(cls, dist, header=None): Yield write_script() argument tuples for a distribution's console_scripts and gui_scripts entry points. """ + + # If distribution is not an importlib.metadata.Distribution, assume + # it's a pkg_resources.Distribution and transform it. + if not hasattr(dist, 'entry_points'): + SetuptoolsWarning.emit("Unsupported distribution encountered.") + dist = metadata.Distribution.at(dist.egg_info) + if header is None: header = cls.get_header() spec = f'{dist.name}=={dist.version}' From 1a8fdb5fc43e237904c601174bd60da594f28172 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 11 May 2025 16:10:57 -0400 Subject: [PATCH 1691/1761] Prefer importlib.resources in _scripts. --- setuptools/_scripts.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/setuptools/_scripts.py b/setuptools/_scripts.py index f9365c66b3..88bf02f927 100644 --- a/setuptools/_scripts.py +++ b/setuptools/_scripts.py @@ -11,9 +11,7 @@ from collections.abc import Iterable from typing import TYPE_CHECKING, TypedDict -import pkg_resources - -from ._importlib import metadata +from ._importlib import metadata, resources if TYPE_CHECKING: from typing_extensions import Self @@ -331,12 +329,12 @@ def get_win_launcher(type): launcher_fn = launcher_fn.replace(".", "-64.") else: launcher_fn = launcher_fn.replace(".", "-32.") - return pkg_resources.resource_string('setuptools', launcher_fn) + return resources.files('setuptools').joinpath(launcher_fn).read_bytes() def load_launcher_manifest(name): - manifest = pkg_resources.resource_string(__name__, 'launcher manifest.xml') - return manifest.decode('utf-8') % vars() + res = resources.files(__name__).joinpath('launcher manifest.xml') + return res.read_text(encoding='utf-8') % vars() def _first_line_re(): From 81c6dfd749be75085902dc2caf28d56a6a5d92f9 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 11 May 2025 16:11:52 -0400 Subject: [PATCH 1692/1761] Remove unused context manager. --- setuptools/tests/contexts.py | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/setuptools/tests/contexts.py b/setuptools/tests/contexts.py index 97cceea0e7..3c931bbd4f 100644 --- a/setuptools/tests/contexts.py +++ b/setuptools/tests/contexts.py @@ -74,20 +74,6 @@ def save_user_site_setting(): site.ENABLE_USER_SITE = saved -@contextlib.contextmanager -def save_pkg_resources_state(): - import pkg_resources - - pr_state = pkg_resources.__getstate__() - # also save sys.path - sys_path = sys.path[:] - try: - yield pr_state, sys_path - finally: - sys.path[:] = sys_path - pkg_resources.__setstate__(pr_state) - - @contextlib.contextmanager def suppress_exceptions(*excs): try: From 2db38b4de3a920e4a81b2dea10f593cc4e305c67 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 11 May 2025 16:24:03 -0400 Subject: [PATCH 1693/1761] Adapt test_distinfo to test_version and migrate to use importlib.metadata. The previous reliance on pkg_resources no longer applies, so the relevant test and fixture has been removed. --- setuptools/tests/test_dist_info.py | 25 ++++--------------------- 1 file changed, 4 insertions(+), 21 deletions(-) diff --git a/setuptools/tests/test_dist_info.py b/setuptools/tests/test_dist_info.py index 426694e019..7a597c1468 100644 --- a/setuptools/tests/test_dist_info.py +++ b/setuptools/tests/test_dist_info.py @@ -10,6 +10,7 @@ import pytest import pkg_resources +from setuptools._importlib import metadata as md from setuptools.archive_util import unpack_archive from .textwrap import DALS @@ -34,15 +35,6 @@ def build_metadata(cls, **kwargs): @pytest.fixture def metadata(self, tmpdir): - dist_info_name = 'VersionedDistribution-2.718.dist-info' - versioned = tmpdir / dist_info_name - versioned.mkdir() - filename = versioned / 'METADATA' - content = self.build_metadata( - Name='VersionedDistribution', - ) - filename.write_text(content, encoding='utf-8') - dist_info_name = 'UnversionedDistribution.dist-info' unversioned = tmpdir / dist_info_name unversioned.mkdir() @@ -55,18 +47,9 @@ def metadata(self, tmpdir): return str(tmpdir) - def test_distinfo(self, metadata): - dists = dict( - (d.project_name, d) for d in pkg_resources.find_distributions(metadata) - ) - - assert len(dists) == 2, dists - - unversioned = dists['UnversionedDistribution'] - versioned = dists['VersionedDistribution'] - - assert versioned.version == '2.718' # from filename - assert unversioned.version == '0.3' # from METADATA + def test_version(self, metadata): + (dist,) = md.Distribution.discover(path=[metadata]) + assert dist.version == '0.3' def test_conditional_dependencies(self, metadata): specs = 'splort==4', 'quux>=1.1' From 68223ff521b2479b6fe210cd50977bf08c68f994 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 11 May 2025 16:28:12 -0400 Subject: [PATCH 1694/1761] Removed test_version and test_conditional_dependencies, as these tests were only exercising expectations about pkg_resources and not dist_info. --- setuptools/tests/test_dist_info.py | 46 ------------------------------ 1 file changed, 46 deletions(-) diff --git a/setuptools/tests/test_dist_info.py b/setuptools/tests/test_dist_info.py index 7a597c1468..010018d1e6 100644 --- a/setuptools/tests/test_dist_info.py +++ b/setuptools/tests/test_dist_info.py @@ -9,8 +9,6 @@ import pytest -import pkg_resources -from setuptools._importlib import metadata as md from setuptools.archive_util import unpack_archive from .textwrap import DALS @@ -19,50 +17,6 @@ class TestDistInfo: - metadata_base = DALS( - """ - Metadata-Version: 1.2 - Requires-Dist: splort (==4) - Provides-Extra: baz - Requires-Dist: quux (>=1.1); extra == 'baz' - """ - ) - - @classmethod - def build_metadata(cls, **kwargs): - lines = ('{key}: {value}\n'.format(**locals()) for key, value in kwargs.items()) - return cls.metadata_base + ''.join(lines) - - @pytest.fixture - def metadata(self, tmpdir): - dist_info_name = 'UnversionedDistribution.dist-info' - unversioned = tmpdir / dist_info_name - unversioned.mkdir() - filename = unversioned / 'METADATA' - content = self.build_metadata( - Name='UnversionedDistribution', - Version='0.3', - ) - filename.write_text(content, encoding='utf-8') - - return str(tmpdir) - - def test_version(self, metadata): - (dist,) = md.Distribution.discover(path=[metadata]) - assert dist.version == '0.3' - - def test_conditional_dependencies(self, metadata): - specs = 'splort==4', 'quux>=1.1' - requires = list(map(pkg_resources.Requirement.parse, specs)) - - for d in pkg_resources.find_distributions(metadata): - assert d.requires() == requires[:1] - assert d.requires(extras=('baz',)) == [ - requires[0], - pkg_resources.Requirement.parse('quux>=1.1;extra=="baz"'), - ] - assert d.extras == ['baz'] - def test_invalid_version(self, tmp_path): """ Supplying an invalid version crashes dist_info. From 0c045cca84ed9db7c0606d31a7091df90681ba52 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 11 May 2025 16:30:29 -0400 Subject: [PATCH 1695/1761] Prefer importlib.resources in test_windows_wrappers. --- setuptools/tests/test_windows_wrappers.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/setuptools/tests/test_windows_wrappers.py b/setuptools/tests/test_windows_wrappers.py index f14338404d..00895b1245 100644 --- a/setuptools/tests/test_windows_wrappers.py +++ b/setuptools/tests/test_windows_wrappers.py @@ -20,7 +20,7 @@ import pytest -import pkg_resources +from setuptools._importlib import resources pytestmark = pytest.mark.skipif(sys.platform != 'win32', reason="Windows only") @@ -48,7 +48,7 @@ def create_script(cls, tmpdir): # also copy cli.exe to the sample directory with (tmpdir / cls.wrapper_name).open('wb') as f: - w = pkg_resources.resource_string('setuptools', cls.wrapper_source) + w = resources.files('setuptools').join_path(cls.wrapper_source).read_bytes() f.write(w) From ef7c43f01259bd2d3d85c2853453553fc65cd344 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 11 May 2025 16:32:33 -0400 Subject: [PATCH 1696/1761] Add news fragment. --- newsfragments/3085.feature.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 newsfragments/3085.feature.rst diff --git a/newsfragments/3085.feature.rst b/newsfragments/3085.feature.rst new file mode 100644 index 0000000000..3ac7ca570d --- /dev/null +++ b/newsfragments/3085.feature.rst @@ -0,0 +1 @@ +Replaced more references to pkg_resources with importlib equivalents. From 6d9280d1aa87b7694b385f1de6692935d07f0d64 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 11 May 2025 16:50:15 -0400 Subject: [PATCH 1697/1761] Fixup docs build --- docs/userguide/interfaces.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/userguide/interfaces.rst b/docs/userguide/interfaces.rst index fff1e1d5a9..791f7a1818 100644 --- a/docs/userguide/interfaces.rst +++ b/docs/userguide/interfaces.rst @@ -7,7 +7,7 @@ In addition to operating as a library, because newer versions of Setuptools are In order to have the ability to make sensible changes to the project, downstream developers and consumers should avoid depending on internal implementation details of the library and should rely only on the supported interfaces: -- *Tier 1*: APIs required by modern PyPA packaging standards (:pep:`517`, :pep:`660`) and Documented APIs for customising build behavior or creating plugins (:doc:`/userguide/extension`, :doc:`references/keywords`): +- *Tier 1*: APIs required by modern PyPA packaging standards (:pep:`517`, :pep:`660`) and Documented APIs for customising build behavior or creating plugins (:doc:`/userguide/extension`, :doc:`/references/keywords`): These APIs are expected to be extremely stable and have deprecation notices and periods prior to backward incompatible changes or removals. From 18bfe51deb8479bfca40e326697a4f9c1b94521f Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 11 May 2025 16:50:53 -0400 Subject: [PATCH 1698/1761] =?UTF-8?q?Bump=20version:=2080.4.0=20=E2=86=92?= =?UTF-8?q?=2080.5.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- NEWS.rst | 15 +++++++++++++++ newsfragments/3085.feature.rst | 1 - newsfragments/4982.misc.rst | 1 - pyproject.toml | 2 +- 5 files changed, 17 insertions(+), 4 deletions(-) delete mode 100644 newsfragments/3085.feature.rst delete mode 100644 newsfragments/4982.misc.rst diff --git a/.bumpversion.cfg b/.bumpversion.cfg index f6554824fd..60492017e2 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 80.4.0 +current_version = 80.5.0 commit = True tag = True diff --git a/NEWS.rst b/NEWS.rst index 2de478188f..e937ce4419 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -1,3 +1,18 @@ +v80.5.0 +======= + +Features +-------- + +- Replaced more references to pkg_resources with importlib equivalents. (#3085) + + +Misc +---- + +- #4982 + + v80.4.0 ======= diff --git a/newsfragments/3085.feature.rst b/newsfragments/3085.feature.rst deleted file mode 100644 index 3ac7ca570d..0000000000 --- a/newsfragments/3085.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Replaced more references to pkg_resources with importlib equivalents. diff --git a/newsfragments/4982.misc.rst b/newsfragments/4982.misc.rst deleted file mode 100644 index 8e40754306..0000000000 --- a/newsfragments/4982.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Mark more tests failing without Internet access as ``uses_network``. diff --git a/pyproject.toml b/pyproject.toml index b61a77e4cc..6a6d499d8b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,7 +5,7 @@ backend-path = ["."] [project] name = "setuptools" -version = "80.4.0" +version = "80.5.0" authors = [ { name = "Python Packaging Authority", email = "distutils-sig@python.org" }, ] From 5a6c1532c206871bc2913349d97dda06e01b9963 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 11 May 2025 23:20:37 -0400 Subject: [PATCH 1699/1761] Bump to setuptools 77 or later. Closes jaraco/skeleton#176 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index bda001a438..ce6c17090d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [build-system] requires = [ - "setuptools>=61.2", + "setuptools>=77", "setuptools_scm[toml]>=3.4.1", # jaraco/skeleton#174 "coherent.licensed", From cd506f091c26addf1e5330e0695f6b92541275b2 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Mon, 12 May 2025 17:36:14 +0100 Subject: [PATCH 1700/1761] Fix error in windows test --- setuptools/tests/test_windows_wrappers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setuptools/tests/test_windows_wrappers.py b/setuptools/tests/test_windows_wrappers.py index 00895b1245..4f990eb1c3 100644 --- a/setuptools/tests/test_windows_wrappers.py +++ b/setuptools/tests/test_windows_wrappers.py @@ -48,7 +48,7 @@ def create_script(cls, tmpdir): # also copy cli.exe to the sample directory with (tmpdir / cls.wrapper_name).open('wb') as f: - w = resources.files('setuptools').join_path(cls.wrapper_source).read_bytes() + w = resources.files('setuptools').joinpath(cls.wrapper_source).read_bytes() f.write(w) From 3dfc763795bb81fbfe0143314633b8551481459b Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Mon, 12 May 2025 17:38:25 +0100 Subject: [PATCH 1701/1761] Add newsfragment --- newsfragments/4995.misc.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 newsfragments/4995.misc.rst diff --git a/newsfragments/4995.misc.rst b/newsfragments/4995.misc.rst new file mode 100644 index 0000000000..d04460c512 --- /dev/null +++ b/newsfragments/4995.misc.rst @@ -0,0 +1 @@ +Fixed ``no attribute 'join_path'`` error in Windows test From d5db652b004189d29d3a85d00a42ea6d16264682 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Mon, 12 May 2025 20:50:25 -0400 Subject: [PATCH 1702/1761] Remove comments referring to pkg_resources and inline a compatibility behavior with its explanation. --- setuptools/_normalization.py | 2 -- setuptools/_path.py | 15 ++++++++++++--- setuptools/_reqs.py | 2 +- 3 files changed, 13 insertions(+), 6 deletions(-) diff --git a/setuptools/_normalization.py b/setuptools/_normalization.py index 0937a4faf8..fb89323c9d 100644 --- a/setuptools/_normalization.py +++ b/setuptools/_normalization.py @@ -36,7 +36,6 @@ def safe_name(component: str) -> str: >>> safe_name("hello_world") 'hello_world' """ - # See pkg_resources.safe_name return _UNSAFE_NAME_CHARS.sub("-", component) @@ -81,7 +80,6 @@ def best_effort_version(version: str) -> str: >>> best_effort_version("42.+?1") '42.dev0+sanitized.1' """ - # See pkg_resources._forgiving_version try: return safe_version(version) except packaging.version.InvalidVersion: diff --git a/setuptools/_path.py b/setuptools/_path.py index 0d99b0f539..2b78022934 100644 --- a/setuptools/_path.py +++ b/setuptools/_path.py @@ -39,11 +39,20 @@ def same_path(p1: StrPath, p2: StrPath) -> bool: return normpath(p1) == normpath(p2) +def _cygwin_patch(filename: StrPath): # pragma: nocover + """ + Contrary to POSIX 2008, on Cygwin, getcwd (3) contains + symlink components. Using + os.path.abspath() works around this limitation. A fix in os.getcwd() + would probably better, in Cygwin even more so, except + that this seems to be by design... + """ + return os.path.abspath(filename) if sys.platform == 'cygwin' else filename + + def normpath(filename: StrPath) -> str: """Normalize a file/dir name for comparison purposes.""" - # See pkg_resources.normalize_path for notes about cygwin - file = os.path.abspath(filename) if sys.platform == 'cygwin' else filename - return os.path.normcase(os.path.realpath(os.path.normpath(file))) + return os.path.normcase(os.path.realpath(os.path.normpath(_cygwin_patch(filename)))) @contextlib.contextmanager diff --git a/setuptools/_reqs.py b/setuptools/_reqs.py index c793be4d6e..7be56cbf35 100644 --- a/setuptools/_reqs.py +++ b/setuptools/_reqs.py @@ -37,6 +37,6 @@ def parse(strs: _StrOrIter) -> Iterator[Requirement]: ... def parse(strs: _StrOrIter, parser: Callable[[str], _T]) -> Iterator[_T]: ... def parse(strs: _StrOrIter, parser: Callable[[str], _T] = parse_req) -> Iterator[_T]: # type: ignore[assignment] """ - Replacement for ``pkg_resources.parse_requirements`` that uses ``packaging``. + Parse requirements. """ return map(parser, parse_strings(strs)) From 510fce142b6a9b4cebc4f55d1940baf78ed3943e Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Mon, 12 May 2025 20:53:31 -0400 Subject: [PATCH 1703/1761] Set the due date for installer removal (same as easy_install and package_index). --- setuptools/installer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setuptools/installer.py b/setuptools/installer.py index 64bc2def07..f72bdfc687 100644 --- a/setuptools/installer.py +++ b/setuptools/installer.py @@ -147,4 +147,4 @@ class _DeprecatedInstaller(SetuptoolsDeprecationWarning): Requirements should be satisfied by a PEP 517 installer. If you are using pip, you can try `pip install --use-pep517`. """ - # _DUE_DATE not decided yet + _DUE_DATE = 2025, 10, 31 From 468a9a9007f8955dca1001571cfe02e9912e903b Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Mon, 12 May 2025 20:57:55 -0400 Subject: [PATCH 1704/1761] Rely on importlib.metadata to detect presence of wheel. --- setuptools/installer.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/setuptools/installer.py b/setuptools/installer.py index f72bdfc687..339c73c112 100644 --- a/setuptools/installer.py +++ b/setuptools/installer.py @@ -10,6 +10,7 @@ from pkg_resources import Distribution from . import _reqs +from ._importlib import metadata from ._reqs import _StrOrIter from .warnings import SetuptoolsDeprecationWarning from .wheel import Wheel @@ -133,11 +134,9 @@ def strip_marker(req): def _warn_wheel_not_available(dist): - import pkg_resources # Delay import to avoid unnecessary side-effects - try: - pkg_resources.get_distribution('wheel') - except pkg_resources.DistributionNotFound: + metadata.distribution('wheel') + except metadata.PackageNotFoundError: dist.announce('WARNING: The wheel package is not available.', log.WARN) From 8ceaa6c433884a1c45afd64c298e6fdd4047bed6 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Mon, 12 May 2025 21:06:36 -0400 Subject: [PATCH 1705/1761] Replace pkg_resources with packaging.requirements. --- setuptools/installer.py | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/setuptools/installer.py b/setuptools/installer.py index 339c73c112..1170254a8e 100644 --- a/setuptools/installer.py +++ b/setuptools/installer.py @@ -7,6 +7,9 @@ import tempfile from functools import partial +import packaging.requirements +import packaging.utils + from pkg_resources import Distribution from . import _reqs @@ -52,6 +55,14 @@ def _fetch_build_eggs(dist, requires: _StrOrIter) -> list[Distribution]: return resolved_dists +def _dist_matches_req(egg_dist, req): + return ( + packaging.utils.canonicalize_name(egg_dist.project_name) + == packaging.utils.canonicalize_name(req.name) + and egg_dist.version in req.specifier + ) + + def _fetch_build_egg_no_warn(dist, req): # noqa: C901 # is too complex (16) # FIXME import pkg_resources # Delay import to avoid unnecessary side-effects @@ -81,7 +92,7 @@ def _fetch_build_egg_no_warn(dist, req): # noqa: C901 # is too complex (16) # eggs_dir = os.path.realpath(dist.get_egg_cache_dir()) environment = pkg_resources.Environment() for egg_dist in pkg_resources.find_distributions(eggs_dir): - if egg_dist in req and environment.can_add(egg_dist): + if _dist_matches_req(egg_dist, req) and environment.can_add(egg_dist): return egg_dist with tempfile.TemporaryDirectory() as tmpdir: cmd = [ @@ -125,10 +136,8 @@ def strip_marker(req): calling pip with something like `babel; extra == "i18n"`, which would always be ignored. """ - import pkg_resources # Delay import to avoid unnecessary side-effects - # create a copy to avoid mutating the input - req = pkg_resources.Requirement.parse(str(req)) + req = packaging.requirements.Requirement(str(req)) req.marker = None return req From 2d2d3b6969568958da45450bcbbcdcb0e7dd50cb Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Mon, 12 May 2025 17:00:19 +0100 Subject: [PATCH 1706/1761] Restore delayed import of pkg_resources --- setuptools/installer.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/setuptools/installer.py b/setuptools/installer.py index 64bc2def07..151b45e967 100644 --- a/setuptools/installer.py +++ b/setuptools/installer.py @@ -6,8 +6,7 @@ import sys import tempfile from functools import partial - -from pkg_resources import Distribution +from typing import TYPE_CHECKING from . import _reqs from ._reqs import _StrOrIter @@ -17,6 +16,9 @@ from distutils import log from distutils.errors import DistutilsError +if TYPE_CHECKING: + from pkg_resources import Distribution + def _fixup_find_links(find_links): """Ensure find-links option end-up being a list of strings.""" From 1409c02e241a0ab9825920bd87f6252a1d814960 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Mon, 12 May 2025 18:06:44 +0100 Subject: [PATCH 1707/1761] Remove methods already defined in Distutils --- setuptools/__init__.py | 38 -------------------------------------- 1 file changed, 38 deletions(-) diff --git a/setuptools/__init__.py b/setuptools/__init__.py index 64464dfaa3..f1b9bfe9b8 100644 --- a/setuptools/__init__.py +++ b/setuptools/__init__.py @@ -9,7 +9,6 @@ import functools import os -import re import sys from abc import abstractmethod from collections.abc import Mapping @@ -30,7 +29,6 @@ from .warnings import SetuptoolsDeprecationWarning import distutils.core -from distutils.errors import DistutilsOptionError __all__ = [ 'setup', @@ -175,42 +173,6 @@ def __init__(self, dist: Distribution, **kw) -> None: super().__init__(dist) vars(self).update(kw) - def _ensure_stringlike(self, option, what, default=None): - val = getattr(self, option) - if val is None: - setattr(self, option, default) - return default - elif not isinstance(val, str): - raise DistutilsOptionError(f"'{option}' must be a {what} (got `{val}`)") - return val - - def ensure_string_list(self, option: str) -> None: - r"""Ensure that 'option' is a list of strings. If 'option' is - currently a string, we split it either on /,\s*/ or /\s+/, so - "foo bar baz", "foo,bar,baz", and "foo, bar baz" all become - ["foo", "bar", "baz"]. - - .. - TODO: This method seems to be similar to the one in ``distutils.cmd`` - Probably it is just here for backward compatibility with old Python versions? - - :meta private: - """ - val = getattr(self, option) - if val is None: - return - elif isinstance(val, str): - setattr(self, option, re.split(r',\s*|\s+', val)) - else: - if isinstance(val, list): - ok = all(isinstance(v, str) for v in val) - else: - ok = False - if not ok: - raise DistutilsOptionError( - f"'{option}' must be a list of strings (got {val!r})" - ) - @overload def reinitialize_command( self, command: str, reinit_subcommands: bool = False, **kw From 7ad574de5fe69f3032b0ecfbc7011c595f381e60 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Mon, 12 May 2025 18:33:57 +0100 Subject: [PATCH 1708/1761] Add newsfragment --- newsfragments/4996.misc.rst | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 newsfragments/4996.misc.rst diff --git a/newsfragments/4996.misc.rst b/newsfragments/4996.misc.rst new file mode 100644 index 0000000000..d4e6911f15 --- /dev/null +++ b/newsfragments/4996.misc.rst @@ -0,0 +1,2 @@ +Remove code duplication for ``_ensure_stringlike`` and ``ensure_string_list`` +in ``setuptools/__init__.py`` (already exists in ``distutils/cmd.py``). From 7379eaa957aaf4f2a01438066afb1674a64545f4 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Mon, 12 May 2025 22:27:05 -0400 Subject: [PATCH 1709/1761] Migrated installer to avoid pkg_resources. --- setuptools/dist.py | 6 +----- setuptools/installer.py | 40 ++++++++++++++--------------------- setuptools/tests/test_dist.py | 2 +- 3 files changed, 18 insertions(+), 30 deletions(-) diff --git a/setuptools/dist.py b/setuptools/dist.py index 57aeb2579d..f06298c868 100644 --- a/setuptools/dist.py +++ b/setuptools/dist.py @@ -46,8 +46,6 @@ if TYPE_CHECKING: from typing_extensions import TypeAlias - from pkg_resources import Distribution as _pkg_resources_Distribution - __all__ = ['Distribution'] @@ -761,9 +759,7 @@ def parse_config_files( self._finalize_license_expression() self._finalize_license_files() - def fetch_build_eggs( - self, requires: _StrOrIter - ) -> list[_pkg_resources_Distribution]: + def fetch_build_eggs(self, requires: _StrOrIter) -> list[metadata.Distribution]: """Resolve pre-setup requirements""" from .installer import _fetch_build_eggs diff --git a/setuptools/installer.py b/setuptools/installer.py index 1170254a8e..fb61d86868 100644 --- a/setuptools/installer.py +++ b/setuptools/installer.py @@ -5,14 +5,10 @@ import subprocess import sys import tempfile -from functools import partial import packaging.requirements import packaging.utils -from pkg_resources import Distribution - -from . import _reqs from ._importlib import metadata from ._reqs import _StrOrIter from .warnings import SetuptoolsDeprecationWarning @@ -39,33 +35,32 @@ def fetch_build_egg(dist, req): return _fetch_build_egg_no_warn(dist, req) -def _fetch_build_eggs(dist, requires: _StrOrIter) -> list[Distribution]: - import pkg_resources # Delay import to avoid unnecessary side-effects - +def _fetch_build_eggs(dist, requires: _StrOrIter) -> list[metadata.Distribution]: _DeprecatedInstaller.emit(stacklevel=3) _warn_wheel_not_available(dist) - resolved_dists = pkg_resources.working_set.resolve( - _reqs.parse(requires, pkg_resources.Requirement), # required for compatibility - installer=partial(_fetch_build_egg_no_warn, dist), # avoid warning twice - replace_conflicting=True, + from . import _reqs + + needed_reqs = ( + req for req in _reqs.parse(requires) if not req.marker or req.marker.evaluate() ) + resolved_dists = [_fetch_build_egg_no_warn(dist, req) for req in needed_reqs] for dist in resolved_dists: - pkg_resources.working_set.add(dist, replace=True) + # dist.locate_file('') is the directory containing EGG-INFO, where the importabl + # contents can be found. + sys.path.insert(0, str(dist.locate_file(''))) return resolved_dists def _dist_matches_req(egg_dist, req): return ( - packaging.utils.canonicalize_name(egg_dist.project_name) + packaging.utils.canonicalize_name(egg_dist.name) == packaging.utils.canonicalize_name(req.name) and egg_dist.version in req.specifier ) def _fetch_build_egg_no_warn(dist, req): # noqa: C901 # is too complex (16) # FIXME - import pkg_resources # Delay import to avoid unnecessary side-effects - # Ignore environment markers; if supplied, it is required. req = strip_marker(req) # Take easy_install options into account, but do not override relevant @@ -90,9 +85,11 @@ def _fetch_build_egg_no_warn(dist, req): # noqa: C901 # is too complex (16) # if dist.dependency_links: find_links.extend(dist.dependency_links) eggs_dir = os.path.realpath(dist.get_egg_cache_dir()) - environment = pkg_resources.Environment() - for egg_dist in pkg_resources.find_distributions(eggs_dir): - if _dist_matches_req(egg_dist, req) and environment.can_add(egg_dist): + cached_dists = metadata.Distribution.discover( + path=glob.glob(f'{eggs_dir}/*.egg/EGG-INFO') + ) + for egg_dist in cached_dists: + if _dist_matches_req(egg_dist, req): return egg_dist with tempfile.TemporaryDirectory() as tmpdir: cmd = [ @@ -122,12 +119,7 @@ def _fetch_build_egg_no_warn(dist, req): # noqa: C901 # is too complex (16) # wheel = Wheel(glob.glob(os.path.join(tmpdir, '*.whl'))[0]) dist_location = os.path.join(eggs_dir, wheel.egg_name()) wheel.install_as_egg(dist_location) - dist_metadata = pkg_resources.PathMetadata( - dist_location, os.path.join(dist_location, 'EGG-INFO') - ) - return pkg_resources.Distribution.from_filename( - dist_location, metadata=dist_metadata - ) + return metadata.Distribution.at(dist_location + '/EGG-INFO') def strip_marker(req): diff --git a/setuptools/tests/test_dist.py b/setuptools/tests/test_dist.py index 011351f57e..552ee2d2ea 100644 --- a/setuptools/tests/test_dist.py +++ b/setuptools/tests/test_dist.py @@ -56,7 +56,7 @@ def sdist_with_index(distname, version): dist = Distribution() dist.parse_config_files() resolved_dists = [dist.fetch_build_egg(r) for r in reqs] - assert [dist.key for dist in resolved_dists if dist] == reqs + assert [dist.name for dist in resolved_dists if dist] == reqs EXAMPLE_BASE_INFO = dict( From e9feabf8b52c1b5281cd7ddb9c8d98e079e3f375 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Tue, 13 May 2025 17:33:41 -0400 Subject: [PATCH 1710/1761] Add news fragment. --- newsfragments/4997.feature.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 newsfragments/4997.feature.rst diff --git a/newsfragments/4997.feature.rst b/newsfragments/4997.feature.rst new file mode 100644 index 0000000000..861d599ce6 --- /dev/null +++ b/newsfragments/4997.feature.rst @@ -0,0 +1 @@ +Removed usage of pkg_resources from installer. Set an official deadline on the installer deprecation to 2025-10-31. From 7cb4c76468735ae69450a3693bed56217afe902c Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Tue, 13 May 2025 19:06:59 -0400 Subject: [PATCH 1711/1761] Normalize imports --- setuptools/installer.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/setuptools/installer.py b/setuptools/installer.py index fb61d86868..3b19f4dd5f 100644 --- a/setuptools/installer.py +++ b/setuptools/installer.py @@ -9,8 +9,8 @@ import packaging.requirements import packaging.utils +from . import _reqs from ._importlib import metadata -from ._reqs import _StrOrIter from .warnings import SetuptoolsDeprecationWarning from .wheel import Wheel @@ -35,12 +35,10 @@ def fetch_build_egg(dist, req): return _fetch_build_egg_no_warn(dist, req) -def _fetch_build_eggs(dist, requires: _StrOrIter) -> list[metadata.Distribution]: +def _fetch_build_eggs(dist, requires: _reqs._StrOrIter) -> list[metadata.Distribution]: _DeprecatedInstaller.emit(stacklevel=3) _warn_wheel_not_available(dist) - from . import _reqs - needed_reqs = ( req for req in _reqs.parse(requires) if not req.marker or req.marker.evaluate() ) From 89d3aef7fa7ff5e68cd5c2c29bc986bbd4afa782 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 11 May 2025 17:33:37 -0400 Subject: [PATCH 1712/1761] Add news fragment. --- newsfragments/4981.feature.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 newsfragments/4981.feature.rst diff --git a/newsfragments/4981.feature.rst b/newsfragments/4981.feature.rst new file mode 100644 index 0000000000..d6163f7480 --- /dev/null +++ b/newsfragments/4981.feature.rst @@ -0,0 +1 @@ +Added a build dependency on coherent.licensed to inject the declared license text at build time. From 00c16df484fa3ec67223037846eff4a280dccb80 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Wed, 14 May 2025 15:23:05 -0400 Subject: [PATCH 1713/1761] =?UTF-8?q?Bump=20version:=2080.5.0=20=E2=86=92?= =?UTF-8?q?=2080.6.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- NEWS.rst | 15 +++++++++++++++ newsfragments/4981.feature.rst | 1 - newsfragments/4995.misc.rst | 1 - pyproject.toml | 2 +- 5 files changed, 17 insertions(+), 4 deletions(-) delete mode 100644 newsfragments/4981.feature.rst delete mode 100644 newsfragments/4995.misc.rst diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 60492017e2..0e941d188f 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 80.5.0 +current_version = 80.6.0 commit = True tag = True diff --git a/NEWS.rst b/NEWS.rst index e937ce4419..31ccfc3d32 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -1,3 +1,18 @@ +v80.6.0 +======= + +Features +-------- + +- Added a build dependency on coherent.licensed to inject the declared license text at build time. (#4981) + + +Misc +---- + +- #4995 + + v80.5.0 ======= diff --git a/newsfragments/4981.feature.rst b/newsfragments/4981.feature.rst deleted file mode 100644 index d6163f7480..0000000000 --- a/newsfragments/4981.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Added a build dependency on coherent.licensed to inject the declared license text at build time. diff --git a/newsfragments/4995.misc.rst b/newsfragments/4995.misc.rst deleted file mode 100644 index d04460c512..0000000000 --- a/newsfragments/4995.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed ``no attribute 'join_path'`` error in Windows test diff --git a/pyproject.toml b/pyproject.toml index 5a74b7a0ba..37a29ed162 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,7 +10,7 @@ backend-path = ["."] [project] name = "setuptools" -version = "80.5.0" +version = "80.6.0" authors = [ { name = "Python Packaging Authority", email = "distutils-sig@python.org" }, ] From 486081e9eff38c6ed19aa24eab8200d9bba4cdce Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Wed, 14 May 2025 15:50:38 -0400 Subject: [PATCH 1714/1761] =?UTF-8?q?Bump=20version:=2080.6.0=20=E2=86=92?= =?UTF-8?q?=2080.7.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- NEWS.rst | 15 +++++++++++++++ newsfragments/4996.misc.rst | 2 -- newsfragments/4997.feature.rst | 1 - pyproject.toml | 2 +- 5 files changed, 17 insertions(+), 5 deletions(-) delete mode 100644 newsfragments/4996.misc.rst delete mode 100644 newsfragments/4997.feature.rst diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 0e941d188f..21e04a6990 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 80.6.0 +current_version = 80.7.0 commit = True tag = True diff --git a/NEWS.rst b/NEWS.rst index 31ccfc3d32..675e45e17f 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -1,3 +1,18 @@ +v80.7.0 +======= + +Features +-------- + +- Removed usage of pkg_resources from installer. Set an official deadline on the installer deprecation to 2025-10-31. (#4997) + + +Misc +---- + +- #4996 + + v80.6.0 ======= diff --git a/newsfragments/4996.misc.rst b/newsfragments/4996.misc.rst deleted file mode 100644 index d4e6911f15..0000000000 --- a/newsfragments/4996.misc.rst +++ /dev/null @@ -1,2 +0,0 @@ -Remove code duplication for ``_ensure_stringlike`` and ``ensure_string_list`` -in ``setuptools/__init__.py`` (already exists in ``distutils/cmd.py``). diff --git a/newsfragments/4997.feature.rst b/newsfragments/4997.feature.rst deleted file mode 100644 index 861d599ce6..0000000000 --- a/newsfragments/4997.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Removed usage of pkg_resources from installer. Set an official deadline on the installer deprecation to 2025-10-31. diff --git a/pyproject.toml b/pyproject.toml index 37a29ed162..0bd98e4163 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,7 +10,7 @@ backend-path = ["."] [project] name = "setuptools" -version = "80.6.0" +version = "80.7.0" authors = [ { name = "Python Packaging Authority", email = "distutils-sig@python.org" }, ] From 76d2923db3e60261e7e9f26a37287b27ad3933a1 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Wed, 14 May 2025 21:45:47 -0400 Subject: [PATCH 1715/1761] Only attempt to fetch eggs for unsatisfied requirements. Ref #4998 --- newsfragments/4998.bugfix.rst | 1 + setuptools/installer.py | 11 ++++++++++- 2 files changed, 11 insertions(+), 1 deletion(-) create mode 100644 newsfragments/4998.bugfix.rst diff --git a/newsfragments/4998.bugfix.rst b/newsfragments/4998.bugfix.rst new file mode 100644 index 0000000000..8a23d407de --- /dev/null +++ b/newsfragments/4998.bugfix.rst @@ -0,0 +1 @@ +Only attempt to fetch eggs for unsatisfied requirements. diff --git a/setuptools/installer.py b/setuptools/installer.py index 3b19f4dd5f..977debcce9 100644 --- a/setuptools/installer.py +++ b/setuptools/installer.py @@ -1,6 +1,7 @@ from __future__ import annotations import glob +import itertools import os import subprocess import sys @@ -35,12 +36,20 @@ def fetch_build_egg(dist, req): return _fetch_build_egg_no_warn(dist, req) +def _present(req): + return any(_dist_matches_req(dist, req) for dist in metadata.distributions()) + + def _fetch_build_eggs(dist, requires: _reqs._StrOrIter) -> list[metadata.Distribution]: _DeprecatedInstaller.emit(stacklevel=3) _warn_wheel_not_available(dist) + parsed_reqs = _reqs.parse(requires) + + missing_reqs = itertools.filterfalse(_present, parsed_reqs) + needed_reqs = ( - req for req in _reqs.parse(requires) if not req.marker or req.marker.evaluate() + req for req in missing_reqs if not req.marker or req.marker.evaluate() ) resolved_dists = [_fetch_build_egg_no_warn(dist, req) for req in needed_reqs] for dist in resolved_dists: From e6f9ee9411566b276e52e828016b9d27007228bc Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Wed, 14 May 2025 22:09:36 -0400 Subject: [PATCH 1716/1761] In installer, when discovering egg dists, let metadata discovery search each egg. Closes #4998 --- newsfragments/4998.bugfix.1.rst | 1 + setuptools/installer.py | 4 +--- 2 files changed, 2 insertions(+), 3 deletions(-) create mode 100644 newsfragments/4998.bugfix.1.rst diff --git a/newsfragments/4998.bugfix.1.rst b/newsfragments/4998.bugfix.1.rst new file mode 100644 index 0000000000..143e5a42bd --- /dev/null +++ b/newsfragments/4998.bugfix.1.rst @@ -0,0 +1 @@ +In installer, when discovering egg dists, let metadata discovery search each egg. diff --git a/setuptools/installer.py b/setuptools/installer.py index 977debcce9..2c26e3a1f4 100644 --- a/setuptools/installer.py +++ b/setuptools/installer.py @@ -92,9 +92,7 @@ def _fetch_build_egg_no_warn(dist, req): # noqa: C901 # is too complex (16) # if dist.dependency_links: find_links.extend(dist.dependency_links) eggs_dir = os.path.realpath(dist.get_egg_cache_dir()) - cached_dists = metadata.Distribution.discover( - path=glob.glob(f'{eggs_dir}/*.egg/EGG-INFO') - ) + cached_dists = metadata.Distribution.discover(path=glob.glob(f'{eggs_dir}/*.egg')) for egg_dist in cached_dists: if _dist_matches_req(egg_dist, req): return egg_dist From 12ca0186ba7d9bf387d65400bb05205d0bcf9e56 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Wed, 14 May 2025 22:21:13 -0400 Subject: [PATCH 1717/1761] =?UTF-8?q?Bump=20version:=2080.7.0=20=E2=86=92?= =?UTF-8?q?=2080.7.1?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- NEWS.rst | 10 ++++++++++ newsfragments/4998.bugfix.1.rst | 1 - newsfragments/4998.bugfix.rst | 1 - pyproject.toml | 2 +- 5 files changed, 12 insertions(+), 4 deletions(-) delete mode 100644 newsfragments/4998.bugfix.1.rst delete mode 100644 newsfragments/4998.bugfix.rst diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 21e04a6990..eaaf3665a8 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 80.7.0 +current_version = 80.7.1 commit = True tag = True diff --git a/NEWS.rst b/NEWS.rst index 675e45e17f..93fdc47f73 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -1,3 +1,13 @@ +v80.7.1 +======= + +Bugfixes +-------- + +- Only attempt to fetch eggs for unsatisfied requirements. (#4998) +- In installer, when discovering egg dists, let metadata discovery search each egg. (#4998) + + v80.7.0 ======= diff --git a/newsfragments/4998.bugfix.1.rst b/newsfragments/4998.bugfix.1.rst deleted file mode 100644 index 143e5a42bd..0000000000 --- a/newsfragments/4998.bugfix.1.rst +++ /dev/null @@ -1 +0,0 @@ -In installer, when discovering egg dists, let metadata discovery search each egg. diff --git a/newsfragments/4998.bugfix.rst b/newsfragments/4998.bugfix.rst deleted file mode 100644 index 8a23d407de..0000000000 --- a/newsfragments/4998.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Only attempt to fetch eggs for unsatisfied requirements. diff --git a/pyproject.toml b/pyproject.toml index 0bd98e4163..9715dcf6b5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,7 +10,7 @@ backend-path = ["."] [project] name = "setuptools" -version = "80.7.0" +version = "80.7.1" authors = [ { name = "Python Packaging Authority", email = "distutils-sig@python.org" }, ] From ae480ff7c2b40442dc97fff712312549a9ed76e6 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Fri, 16 May 2025 14:46:25 -0400 Subject: [PATCH 1718/1761] Restore explicit LICENSE file Partially reverts commit 9a81db3c77bc106017dcd4b0853a5a94f43ae33c. Ref #5001 --- LICENSE | 17 +++++++++++++++++ newsfragments/5001.feature.rst | 1 + 2 files changed, 18 insertions(+) create mode 100644 LICENSE create mode 100644 newsfragments/5001.feature.rst diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000000..1bb5a44356 --- /dev/null +++ b/LICENSE @@ -0,0 +1,17 @@ +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. diff --git a/newsfragments/5001.feature.rst b/newsfragments/5001.feature.rst new file mode 100644 index 0000000000..0bd2ea4a41 --- /dev/null +++ b/newsfragments/5001.feature.rst @@ -0,0 +1 @@ +Restore explicit LICENSE file. From 62e47935abc3ede4ca5860d0775d70147e6c5635 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Mon, 19 May 2025 12:47:13 +0100 Subject: [PATCH 1719/1761] Comment out unused build dependency --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 9715dcf6b5..48b25b7669 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,7 +3,7 @@ requires = [ # "setuptools>=77", # "setuptools_scm[toml]>=3.4.1", # jaraco/skeleton#174 - "coherent.licensed", + # "coherent.licensed", ] build-backend = "setuptools.build_meta" backend-path = ["."] From 57d6fcd5854cec806b00ee966b62887d3d13922b Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Mon, 19 May 2025 12:49:17 +0100 Subject: [PATCH 1720/1761] Add news fragment --- newsfragments/5003.feature.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 newsfragments/5003.feature.rst diff --git a/newsfragments/5003.feature.rst b/newsfragments/5003.feature.rst new file mode 100644 index 0000000000..a67e314d3c --- /dev/null +++ b/newsfragments/5003.feature.rst @@ -0,0 +1 @@ +Removed no longer used build dependency on ``coherent.licensed``. From 95145dd0afd94006c158d668bffb1d6ec7f4cacb Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Mon, 19 May 2025 16:01:16 -0400 Subject: [PATCH 1721/1761] Extract a method for converting requires. --- setuptools/wheel.py | 46 +++++++++++++++++++++++++-------------------- 1 file changed, 26 insertions(+), 20 deletions(-) diff --git a/setuptools/wheel.py b/setuptools/wheel.py index c7ca43b5cf..562beb7b35 100644 --- a/setuptools/wheel.py +++ b/setuptools/wheel.py @@ -133,8 +133,6 @@ def _install_as_egg(self, destination_eggdir, zf): @staticmethod def _convert_metadata(zf, destination_eggdir, dist_info, egg_info): - import pkg_resources - def get_metadata(name): with zf.open(posixpath.join(dist_info, name)) as fp: value = fp.read().decode('utf-8') @@ -148,8 +146,32 @@ def get_metadata(name): raise ValueError(f'unsupported wheel format version: {wheel_version}') # Extract to target directory. _unpack_zipfile_obj(zf, destination_eggdir) - # Convert metadata. dist_info = os.path.join(destination_eggdir, dist_info) + install_requires, extras_require = Wheel._convert_requires( + destination_eggdir, dist_info + ) + os.rename(dist_info, egg_info) + os.rename( + os.path.join(egg_info, 'METADATA'), + os.path.join(egg_info, 'PKG-INFO'), + ) + setup_dist = setuptools.Distribution( + attrs=dict( + install_requires=install_requires, + extras_require=extras_require, + ), + ) + with disable_info_traces(): + write_requirements( + setup_dist.get_command_obj('egg_info'), + None, + os.path.join(egg_info, 'requires.txt'), + ) + + @staticmethod + def _convert_requires(destination_eggdir, dist_info): + import pkg_resources + dist = pkg_resources.Distribution.from_location( destination_eggdir, dist_info, @@ -172,23 +194,7 @@ def raw_req(req): ] for extra in dist.extras } - os.rename(dist_info, egg_info) - os.rename( - os.path.join(egg_info, 'METADATA'), - os.path.join(egg_info, 'PKG-INFO'), - ) - setup_dist = setuptools.Distribution( - attrs=dict( - install_requires=install_requires, - extras_require=extras_require, - ), - ) - with disable_info_traces(): - write_requirements( - setup_dist.get_command_obj('egg_info'), - None, - os.path.join(egg_info, 'requires.txt'), - ) + return install_requires, extras_require @staticmethod def _move_data_entries(destination_eggdir, dist_data): From 0e19b82062f168f428128d51f7bd12034daebd2b Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Mon, 19 May 2025 17:39:36 -0400 Subject: [PATCH 1722/1761] Replace pkg_resources with importlib.metadata and packaging.requirements. --- setuptools/_discovery.py | 33 +++++++++++++++++++++++++++++ setuptools/wheel.py | 45 ++++++++++++++++++++++++++++------------ 2 files changed, 65 insertions(+), 13 deletions(-) create mode 100644 setuptools/_discovery.py diff --git a/setuptools/_discovery.py b/setuptools/_discovery.py new file mode 100644 index 0000000000..d1b4a0ee03 --- /dev/null +++ b/setuptools/_discovery.py @@ -0,0 +1,33 @@ +import functools +import operator + +import packaging.requirements + + +# from coherent.build.discovery +def extras_from_dep(dep): + try: + markers = packaging.requirements.Requirement(dep).marker._markers + except AttributeError: + markers = () + return set( + marker[2].value + for marker in markers + if isinstance(marker, tuple) and marker[0].value == 'extra' + ) + + +def extras_from_deps(deps): + """ + >>> extras_from_deps(['requests']) + set() + >>> extras_from_deps(['pytest; extra == "test"']) + {'test'} + >>> sorted(extras_from_deps([ + ... 'requests', + ... 'pytest; extra == "test"', + ... 'pytest-cov; extra == "test"', + ... 'sphinx; extra=="doc"'])) + ['doc', 'test'] + """ + return functools.reduce(operator.or_, map(extras_from_dep, deps), set()) diff --git a/setuptools/wheel.py b/setuptools/wheel.py index 562beb7b35..124e01ad2f 100644 --- a/setuptools/wheel.py +++ b/setuptools/wheel.py @@ -9,6 +9,7 @@ import re import zipfile +from packaging.requirements import Requirement from packaging.tags import sys_tags from packaging.utils import canonicalize_name from packaging.version import Version as parse_version @@ -17,6 +18,8 @@ from setuptools.archive_util import _unpack_zipfile_obj from setuptools.command.egg_info import _egg_basename, write_requirements +from ._discovery import extras_from_deps +from ._importlib import metadata from .unicode_utils import _read_utf8_with_fallback from distutils.util import get_platform @@ -170,29 +173,45 @@ def get_metadata(name): @staticmethod def _convert_requires(destination_eggdir, dist_info): - import pkg_resources + md = metadata.Distribution.at(dist_info).metadata + deps = md.get_all('Requires-Dist') or [] + reqs = list(map(Requirement, deps)) - dist = pkg_resources.Distribution.from_location( - destination_eggdir, - dist_info, - metadata=pkg_resources.PathMetadata(destination_eggdir, dist_info), - ) + extras = extras_from_deps(deps) # Note: Evaluate and strip markers now, # as it's difficult to convert back from the syntax: # foobar; "linux" in sys_platform and extra == 'test' def raw_req(req): + req = Requirement(str(req)) req.marker = None return str(req) - install_requires = list(map(raw_req, dist.requires())) + def eval(req, **env): + return not req.marker or req.marker.evaluate(env) + + def for_extra(req): + try: + markers = req.marker._markers + except AttributeError: + markers = () + return set( + marker[2].value + for marker in markers + if isinstance(marker, tuple) and marker[0].value == 'extra' + ) + + install_requires = list( + map(raw_req, filter(eval, itertools.filterfalse(for_extra, reqs))) + ) extras_require = { - extra: [ - req - for req in map(raw_req, dist.requires((extra,))) - if req not in install_requires - ] - for extra in dist.extras + extra: list( + map( + raw_req, + (req for req in reqs if for_extra(req) and eval(req, extra=extra)), + ) + ) + for extra in extras } return install_requires, extras_require From 1bfd8db2d6b4f98cfe181d5f0854f19f1aa27c22 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Mon, 19 May 2025 21:21:01 -0400 Subject: [PATCH 1723/1761] Add news fragment. --- newsfragments/3085.feature.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 newsfragments/3085.feature.rst diff --git a/newsfragments/3085.feature.rst b/newsfragments/3085.feature.rst new file mode 100644 index 0000000000..24b5659761 --- /dev/null +++ b/newsfragments/3085.feature.rst @@ -0,0 +1 @@ +Replaced more references to pkg_resources with importlib equivalents in wheel odule. From b3786cd9b59576907e671d8b22b66c73cfed5dc6 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Tue, 20 May 2025 09:44:03 -0400 Subject: [PATCH 1724/1761] =?UTF-8?q?Bump=20version:=2080.7.1=20=E2=86=92?= =?UTF-8?q?=2080.8.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- NEWS.rst | 11 +++++++++++ newsfragments/3085.feature.rst | 1 - newsfragments/5001.feature.rst | 1 - newsfragments/5003.feature.rst | 1 - pyproject.toml | 2 +- 6 files changed, 13 insertions(+), 5 deletions(-) delete mode 100644 newsfragments/3085.feature.rst delete mode 100644 newsfragments/5001.feature.rst delete mode 100644 newsfragments/5003.feature.rst diff --git a/.bumpversion.cfg b/.bumpversion.cfg index eaaf3665a8..03e462f5a4 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 80.7.1 +current_version = 80.8.0 commit = True tag = True diff --git a/NEWS.rst b/NEWS.rst index 93fdc47f73..6eb9cacdc0 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -1,3 +1,14 @@ +v80.8.0 +======= + +Features +-------- + +- Replaced more references to pkg_resources with importlib equivalents in wheel odule. (#3085) +- Restore explicit LICENSE file. (#5001) +- Removed no longer used build dependency on ``coherent.licensed``. (#5003) + + v80.7.1 ======= diff --git a/newsfragments/3085.feature.rst b/newsfragments/3085.feature.rst deleted file mode 100644 index 24b5659761..0000000000 --- a/newsfragments/3085.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Replaced more references to pkg_resources with importlib equivalents in wheel odule. diff --git a/newsfragments/5001.feature.rst b/newsfragments/5001.feature.rst deleted file mode 100644 index 0bd2ea4a41..0000000000 --- a/newsfragments/5001.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Restore explicit LICENSE file. diff --git a/newsfragments/5003.feature.rst b/newsfragments/5003.feature.rst deleted file mode 100644 index a67e314d3c..0000000000 --- a/newsfragments/5003.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Removed no longer used build dependency on ``coherent.licensed``. diff --git a/pyproject.toml b/pyproject.toml index 48b25b7669..bb06f90f5d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,7 +10,7 @@ backend-path = ["."] [project] name = "setuptools" -version = "80.7.1" +version = "80.8.0" authors = [ { name = "Python Packaging Authority", email = "distutils-sig@python.org" }, ] From 0cb446ca40a87227a7c3aadd0bb8369119d75fd2 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Tue, 20 May 2025 09:53:36 -0400 Subject: [PATCH 1725/1761] Remove reference to pkg_resources. Ref #3085 --- setuptools/tests/test_wheel.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/setuptools/tests/test_wheel.py b/setuptools/tests/test_wheel.py index 70165c608b..ab68ee3106 100644 --- a/setuptools/tests/test_wheel.py +++ b/setuptools/tests/test_wheel.py @@ -11,6 +11,7 @@ import stat import subprocess import sys +import sysconfig import zipfile from typing import Any @@ -19,7 +20,7 @@ from packaging.tags import parse_tag from packaging.utils import canonicalize_name -from pkg_resources import PY_MAJOR, Distribution, PathMetadata +from pkg_resources import Distribution, PathMetadata from setuptools.wheel import Wheel from .contexts import tempdir @@ -140,7 +141,7 @@ def flatten_tree(tree): def format_install_tree(tree): return { x.format( - py_version=PY_MAJOR, + py_version=sysconfig.get_python_version(), platform=get_platform(), shlib_ext=get_config_var('EXT_SUFFIX') or get_config_var('SO'), ) From 0efe364a4f93b406669764c118795c9492bacd3d Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Tue, 20 May 2025 10:26:11 -0400 Subject: [PATCH 1726/1761] Remove test_wheel_install_pep_503. Closes #5005 --- setuptools/tests/test_wheel.py | 24 ------------------------ 1 file changed, 24 deletions(-) diff --git a/setuptools/tests/test_wheel.py b/setuptools/tests/test_wheel.py index ab68ee3106..1b276bf8a1 100644 --- a/setuptools/tests/test_wheel.py +++ b/setuptools/tests/test_wheel.py @@ -7,7 +7,6 @@ import inspect import os import pathlib -import shutil import stat import subprocess import sys @@ -18,7 +17,6 @@ import pytest from jaraco import path from packaging.tags import parse_tag -from packaging.utils import canonicalize_name from pkg_resources import Distribution, PathMetadata from setuptools.wheel import Wheel @@ -579,28 +577,6 @@ def test_wheel_install(params): ) -def test_wheel_install_pep_503(): - project_name = 'Foo_Bar' # PEP 503 canonicalized name is "foo-bar" - version = '1.0' - with ( - build_wheel( - name=project_name, - version=version, - ) as filename, - tempdir() as install_dir, - ): - new_filename = filename.replace(project_name, canonicalize_name(project_name)) - shutil.move(filename, new_filename) - _check_wheel_install( - new_filename, - install_dir, - None, - canonicalize_name(project_name), - version, - None, - ) - - def test_wheel_no_dist_dir(): project_name = 'nodistinfo' version = '1.0' From 2aae83b94a4e4b00542233393d85baaffa6043e8 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Tue, 20 May 2025 10:12:43 -0400 Subject: [PATCH 1727/1761] Remove reliance on pkg_resources in test_wheel. --- setuptools/tests/test_wheel.py | 19 ++++++++----------- 1 file changed, 8 insertions(+), 11 deletions(-) diff --git a/setuptools/tests/test_wheel.py b/setuptools/tests/test_wheel.py index 1b276bf8a1..62287501d0 100644 --- a/setuptools/tests/test_wheel.py +++ b/setuptools/tests/test_wheel.py @@ -18,7 +18,7 @@ from jaraco import path from packaging.tags import parse_tag -from pkg_resources import Distribution, PathMetadata +from setuptools._importlib import metadata from setuptools.wheel import Wheel from .contexts import tempdir @@ -158,15 +158,11 @@ def _check_wheel_install( exp = tree_set(install_dir) assert install_tree.issubset(exp), install_tree - exp - metadata = PathMetadata(egg_path, os.path.join(egg_path, 'EGG-INFO')) - dist = Distribution.from_filename(egg_path, metadata=metadata) - assert dist.project_name == project_name - assert dist.version == version - if requires_txt is None: - assert not dist.has_metadata('requires.txt') - else: - # Order must match to ensure reproducibility. - assert requires_txt == dist.get_metadata('requires.txt').lstrip() + (dist,) = metadata.Distribution.discover(path=[egg_path]) + + assert dist.metadata['Name'] == project_name + assert dist.metadata['Version'] == version + assert dist.read_text('requires.txt') == requires_txt class Record: @@ -405,7 +401,8 @@ def __repr__(self) -> str: extras_require={ 'extra': f'foobar; {sys.platform!r} != sys_platform', }, - requires_txt=DALS( + requires_txt='\n' + + DALS( """ [extra] """ From 0bf2663a19f2d947697997d125c3c880df2011b7 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Tue, 20 May 2025 10:29:13 -0400 Subject: [PATCH 1728/1761] Add news fragment. --- newsfragments/3085.feature.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 newsfragments/3085.feature.rst diff --git a/newsfragments/3085.feature.rst b/newsfragments/3085.feature.rst new file mode 100644 index 0000000000..57648f05e2 --- /dev/null +++ b/newsfragments/3085.feature.rst @@ -0,0 +1 @@ +Removed reliance on pkg_resources in test_wheel. From 134e587c0ba0b59e1661f08a45e6d6d1ecd24329 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Tue, 20 May 2025 11:08:53 -0400 Subject: [PATCH 1729/1761] Suppress nitpicky typecheck in pyright. --- setuptools/tests/test_wheel.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/setuptools/tests/test_wheel.py b/setuptools/tests/test_wheel.py index 62287501d0..f91465084a 100644 --- a/setuptools/tests/test_wheel.py +++ b/setuptools/tests/test_wheel.py @@ -160,8 +160,10 @@ def _check_wheel_install( (dist,) = metadata.Distribution.discover(path=[egg_path]) - assert dist.metadata['Name'] == project_name - assert dist.metadata['Version'] == version + # pyright is nitpicky; fine to assume dist.metadata.__getitem__ will fail or return None + # (https://github.com/pypa/setuptools/pull/5006#issuecomment-2894774288) + assert dist.metadata['Name'] == project_name # pyright: ignore # noqa: PGH003 + assert dist.metadata['Version'] == version # pyright: ignore # noqa: PGH003 assert dist.read_text('requires.txt') == requires_txt From 6fef3eda62cac0fca2cb3544e86963165479dcd8 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Fri, 28 Jun 2024 19:00:10 +0200 Subject: [PATCH 1730/1761] Enforce ruff/refurb rule FURB118 FURB118 Use `operator.itemgetter(0)` instead of defining a lambda --- setuptools/command/build_ext.py | 3 ++- setuptools/command/build_py.py | 3 ++- setuptools/command/editable_wheel.py | 3 ++- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/setuptools/command/build_ext.py b/setuptools/command/build_ext.py index af73fff7a5..e944683aaa 100644 --- a/setuptools/command/build_ext.py +++ b/setuptools/command/build_ext.py @@ -1,6 +1,7 @@ from __future__ import annotations import itertools +import operator import os import sys import textwrap @@ -323,7 +324,7 @@ def get_outputs(self) -> list[str]: def get_output_mapping(self) -> dict[str, str]: """See :class:`setuptools.commands.build.SubCommand`""" mapping = self._get_output_mapping() - return dict(sorted(mapping, key=lambda x: x[0])) + return dict(sorted(mapping, key=operator.itemgetter(0))) def __get_stubs_outputs(self): # assemble the base name for each extension that needs a stub diff --git a/setuptools/command/build_py.py b/setuptools/command/build_py.py index 2f6fcb7cdc..e4d35305e1 100644 --- a/setuptools/command/build_py.py +++ b/setuptools/command/build_py.py @@ -2,6 +2,7 @@ import fnmatch import itertools +import operator import os import stat import textwrap @@ -147,7 +148,7 @@ def get_output_mapping(self) -> dict[str, str]: self._get_package_data_output_mapping(), self._get_module_mapping(), ) - return dict(sorted(mapping, key=lambda x: x[0])) + return dict(sorted(mapping, key=operator.itemgetter(0))) def _get_module_mapping(self) -> Iterator[tuple[str, str]]: """Iterate over all modules producing (dest, src) pairs.""" diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py index c772570817..fa0823cddc 100644 --- a/setuptools/command/editable_wheel.py +++ b/setuptools/command/editable_wheel.py @@ -14,6 +14,7 @@ import io import logging +import operator import os import shutil import traceback @@ -900,7 +901,7 @@ def _finder_template( """Create a string containing the code for the``MetaPathFinder`` and ``PathEntryFinder``. """ - mapping = dict(sorted(mapping.items(), key=lambda p: p[0])) + mapping = dict(sorted(mapping.items(), key=operator.itemgetter(0))) return _FINDER_TEMPLATE.format(name=name, mapping=mapping, namespaces=namespaces) From 690c5e403129848d2ce901a7d9b117ef6fa1cd38 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Fri, 28 Jun 2024 19:01:02 +0200 Subject: [PATCH 1731/1761] Enforce ruff/refurb rule FURB142 FURB142 Use of `set.add()` in a for loop --- setuptools/tests/test_wheel.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/setuptools/tests/test_wheel.py b/setuptools/tests/test_wheel.py index f91465084a..50a650d1c1 100644 --- a/setuptools/tests/test_wheel.py +++ b/setuptools/tests/test_wheel.py @@ -116,8 +116,7 @@ def build_wheel(extra_file_defs=None, **kwargs): def tree_set(root): contents = set() for dirpath, dirnames, filenames in os.walk(root): - for filename in filenames: - contents.add(os.path.join(os.path.relpath(dirpath, root), filename)) + contents.update(os.path.join(os.path.relpath(dirpath, root), filename) for filename in filenames) return contents From 0cc2edff1f3a6f43229394eecb6299e8e0615c9e Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Fri, 28 Jun 2024 19:03:12 +0200 Subject: [PATCH 1732/1761] A round of `ruff format` after `ruff check --fix` --- setuptools/tests/test_wheel.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/setuptools/tests/test_wheel.py b/setuptools/tests/test_wheel.py index 50a650d1c1..2f3e8ba5a0 100644 --- a/setuptools/tests/test_wheel.py +++ b/setuptools/tests/test_wheel.py @@ -116,7 +116,10 @@ def build_wheel(extra_file_defs=None, **kwargs): def tree_set(root): contents = set() for dirpath, dirnames, filenames in os.walk(root): - contents.update(os.path.join(os.path.relpath(dirpath, root), filename) for filename in filenames) + contents.update( + os.path.join(os.path.relpath(dirpath, root), filename) + for filename in filenames + ) return contents From d05ff4c850c53b44d2960571a11174bd0c8e1d97 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Fri, 28 Jun 2024 20:03:33 +0200 Subject: [PATCH 1733/1761] Manual improvment --- setuptools/tests/test_wheel.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/setuptools/tests/test_wheel.py b/setuptools/tests/test_wheel.py index 2f3e8ba5a0..caa3ac5aa1 100644 --- a/setuptools/tests/test_wheel.py +++ b/setuptools/tests/test_wheel.py @@ -114,13 +114,11 @@ def build_wheel(extra_file_defs=None, **kwargs): def tree_set(root): - contents = set() - for dirpath, dirnames, filenames in os.walk(root): - contents.update( - os.path.join(os.path.relpath(dirpath, root), filename) - for filename in filenames - ) - return contents + return set( + os.path.join(os.path.relpath(dirpath, root), filename) + for dirpath, dirnames, filenames in os.walk(root) + for filename in filenames + ) def flatten_tree(tree): From f428f1201b9add97133e5032f090621d8bf13dda Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Fri, 28 Jun 2024 19:06:38 +0200 Subject: [PATCH 1734/1761] Enforce ruff/refurb rule FURB167 FURB167 Use of regular expression alias `re.I` FURB167 Use of regular expression alias `re.M` FURB167 Use of regular expression alias `re.S` FURB167 Use of regular expression alias `re.X` --- pkg_resources/__init__.py | 2 +- setuptools/_normalization.py | 8 ++++---- setuptools/tests/config/test_apply_pyprojecttoml.py | 4 ++-- setuptools/tests/config/test_pyprojecttoml.py | 4 ++-- setuptools/tests/test_dist_info.py | 2 +- tools/finalize.py | 2 +- 6 files changed, 11 insertions(+), 11 deletions(-) diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py index 630aaa9a42..b0c7896f74 100644 --- a/pkg_resources/__init__.py +++ b/pkg_resources/__init__.py @@ -128,7 +128,7 @@ class _ZipLoaderModule(Protocol): __loader__: zipimport.zipimporter -_PEP440_FALLBACK = re.compile(r"^v?(?P(?:[0-9]+!)?[0-9]+(?:\.[0-9]+)*)", re.I) +_PEP440_FALLBACK = re.compile(r"^v?(?P(?:[0-9]+!)?[0-9]+(?:\.[0-9]+)*)", re.IGNORECASE) class PEP440Warning(RuntimeWarning): diff --git a/setuptools/_normalization.py b/setuptools/_normalization.py index fb89323c9d..ce119d0fdc 100644 --- a/setuptools/_normalization.py +++ b/setuptools/_normalization.py @@ -9,10 +9,10 @@ import packaging # https://packaging.python.org/en/latest/specifications/core-metadata/#name -_VALID_NAME = re.compile(r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.I) -_UNSAFE_NAME_CHARS = re.compile(r"[^A-Z0-9._-]+", re.I) -_NON_ALPHANUMERIC = re.compile(r"[^A-Z0-9]+", re.I) -_PEP440_FALLBACK = re.compile(r"^v?(?P(?:[0-9]+!)?[0-9]+(?:\.[0-9]+)*)", re.I) +_VALID_NAME = re.compile(r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE) +_UNSAFE_NAME_CHARS = re.compile(r"[^A-Z0-9._-]+", re.IGNORECASE) +_NON_ALPHANUMERIC = re.compile(r"[^A-Z0-9]+", re.IGNORECASE) +_PEP440_FALLBACK = re.compile(r"^v?(?P(?:[0-9]+!)?[0-9]+(?:\.[0-9]+)*)", re.IGNORECASE) def safe_identifier(name: str) -> str: diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py index 489fd98e26..caeaf48c38 100644 --- a/setuptools/tests/config/test_apply_pyprojecttoml.py +++ b/setuptools/tests/config/test_apply_pyprojecttoml.py @@ -41,7 +41,7 @@ def _mock_expand_patterns(patterns, *_, **__): Allow comparing the given patterns for 2 dist objects. We need to strip special chars to avoid errors when validating. """ - return [re.sub("[^a-z0-9]+", "", p, flags=re.I) or "empty" for p in patterns] + return [re.sub("[^a-z0-9]+", "", p, flags=re.IGNORECASE) or "empty" for p in patterns] @pytest.mark.parametrize("url", urls_from_file(HERE / EXAMPLES_FILE)) @@ -600,7 +600,7 @@ def test_not_listed_in_dynamic(self, tmp_path, attr, field, value): """Setuptools cannot set a field if not listed in ``dynamic``""" pyproject = self.pyproject(tmp_path, []) dist = makedist(tmp_path, **{attr: value}) - msg = re.compile(f"defined outside of `pyproject.toml`:.*{field}", re.S) + msg = re.compile(f"defined outside of `pyproject.toml`:.*{field}", re.DOTALL) with pytest.warns(_MissingDynamic, match=msg): dist = pyprojecttoml.apply_configuration(dist, pyproject) diff --git a/setuptools/tests/config/test_pyprojecttoml.py b/setuptools/tests/config/test_pyprojecttoml.py index db40fcd23d..fa63580603 100644 --- a/setuptools/tests/config/test_pyprojecttoml.py +++ b/setuptools/tests/config/test_pyprojecttoml.py @@ -207,7 +207,7 @@ def test_scripts_not_listed_in_dynamic(self, tmp_path, missing_dynamic): dynamic = {"scripts", "gui-scripts", "entry-points"} - {missing_dynamic} msg = f"defined outside of `pyproject.toml`:.*{missing_dynamic}" - with pytest.raises(OptionError, match=re.compile(msg, re.S)): + with pytest.raises(OptionError, match=re.compile(msg, re.DOTALL)): expand_configuration(self.pyproject(dynamic), tmp_path) @@ -325,7 +325,7 @@ def test_invalid_example(tmp_path, example, error_msg): pyproject = tmp_path / "pyproject.toml" pyproject.write_text(cleandoc(example), encoding="utf-8") - pattern = re.compile(f"invalid pyproject.toml.*{error_msg}.*", re.M | re.S) + pattern = re.compile(f"invalid pyproject.toml.*{error_msg}.*", re.MULTILINE | re.DOTALL) with pytest.raises(ValueError, match=pattern): read_configuration(pyproject) diff --git a/setuptools/tests/test_dist_info.py b/setuptools/tests/test_dist_info.py index 010018d1e6..f65d0afbe4 100644 --- a/setuptools/tests/test_dist_info.py +++ b/setuptools/tests/test_dist_info.py @@ -23,7 +23,7 @@ def test_invalid_version(self, tmp_path): """ config = "[metadata]\nname=proj\nversion=42\n[egg_info]\ntag_build=invalid!!!\n" (tmp_path / "setup.cfg").write_text(config, encoding="utf-8") - msg = re.compile("invalid version", re.M | re.I) + msg = re.compile("invalid version", re.MULTILINE | re.IGNORECASE) proc = run_command_inner("dist_info", cwd=tmp_path, check=False) assert proc.returncode assert msg.search(proc.stdout) diff --git a/tools/finalize.py b/tools/finalize.py index d646e67cd0..27471c7e25 100644 --- a/tools/finalize.py +++ b/tools/finalize.py @@ -37,7 +37,7 @@ def _repair_changelog(): """ changelog_fn = pathlib.Path('NEWS.rst') changelog = changelog_fn.read_text(encoding='utf-8') - fixed = re.sub(r'^(v[0-9.]+)v[0-9.]+$', r'\1', changelog, flags=re.M) + fixed = re.sub(r'^(v[0-9.]+)v[0-9.]+$', r'\1', changelog, flags=re.MULTILINE) changelog_fn.write_text(fixed, encoding='utf-8') subprocess.check_output(['git', 'add', changelog_fn]) From 608cf49373ad5324b9d7dab50da6db3bfa04a064 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Fri, 28 Jun 2024 19:07:13 +0200 Subject: [PATCH 1735/1761] A round of `ruff format` after `ruff check --fix` --- pkg_resources/__init__.py | 4 +++- setuptools/_normalization.py | 4 +++- setuptools/tests/config/test_apply_pyprojecttoml.py | 4 +++- setuptools/tests/config/test_pyprojecttoml.py | 4 +++- 4 files changed, 12 insertions(+), 4 deletions(-) diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py index b0c7896f74..1718e2aba6 100644 --- a/pkg_resources/__init__.py +++ b/pkg_resources/__init__.py @@ -128,7 +128,9 @@ class _ZipLoaderModule(Protocol): __loader__: zipimport.zipimporter -_PEP440_FALLBACK = re.compile(r"^v?(?P(?:[0-9]+!)?[0-9]+(?:\.[0-9]+)*)", re.IGNORECASE) +_PEP440_FALLBACK = re.compile( + r"^v?(?P(?:[0-9]+!)?[0-9]+(?:\.[0-9]+)*)", re.IGNORECASE +) class PEP440Warning(RuntimeWarning): diff --git a/setuptools/_normalization.py b/setuptools/_normalization.py index ce119d0fdc..9268d21d84 100644 --- a/setuptools/_normalization.py +++ b/setuptools/_normalization.py @@ -12,7 +12,9 @@ _VALID_NAME = re.compile(r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE) _UNSAFE_NAME_CHARS = re.compile(r"[^A-Z0-9._-]+", re.IGNORECASE) _NON_ALPHANUMERIC = re.compile(r"[^A-Z0-9]+", re.IGNORECASE) -_PEP440_FALLBACK = re.compile(r"^v?(?P(?:[0-9]+!)?[0-9]+(?:\.[0-9]+)*)", re.IGNORECASE) +_PEP440_FALLBACK = re.compile( + r"^v?(?P(?:[0-9]+!)?[0-9]+(?:\.[0-9]+)*)", re.IGNORECASE +) def safe_identifier(name: str) -> str: diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py index caeaf48c38..71408734cc 100644 --- a/setuptools/tests/config/test_apply_pyprojecttoml.py +++ b/setuptools/tests/config/test_apply_pyprojecttoml.py @@ -41,7 +41,9 @@ def _mock_expand_patterns(patterns, *_, **__): Allow comparing the given patterns for 2 dist objects. We need to strip special chars to avoid errors when validating. """ - return [re.sub("[^a-z0-9]+", "", p, flags=re.IGNORECASE) or "empty" for p in patterns] + return [ + re.sub("[^a-z0-9]+", "", p, flags=re.IGNORECASE) or "empty" for p in patterns + ] @pytest.mark.parametrize("url", urls_from_file(HERE / EXAMPLES_FILE)) diff --git a/setuptools/tests/config/test_pyprojecttoml.py b/setuptools/tests/config/test_pyprojecttoml.py index fa63580603..6d995d23af 100644 --- a/setuptools/tests/config/test_pyprojecttoml.py +++ b/setuptools/tests/config/test_pyprojecttoml.py @@ -325,7 +325,9 @@ def test_invalid_example(tmp_path, example, error_msg): pyproject = tmp_path / "pyproject.toml" pyproject.write_text(cleandoc(example), encoding="utf-8") - pattern = re.compile(f"invalid pyproject.toml.*{error_msg}.*", re.MULTILINE | re.DOTALL) + pattern = re.compile( + f"invalid pyproject.toml.*{error_msg}.*", re.MULTILINE | re.DOTALL + ) with pytest.raises(ValueError, match=pattern): read_configuration(pyproject) From f2c7c6379c49ee92cebadeb51d3d086d8a242f25 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Tue, 31 Dec 2024 12:05:18 +0100 Subject: [PATCH 1736/1761] Apply ruff/refurb rule FURB188 FURB188 Prefer `removesuffix` over conditionally replacing with slice. --- setuptools/command/bdist_egg.py | 3 +-- setuptools/command/editable_wheel.py | 2 +- setuptools/command/egg_info.py | 3 +-- setuptools/dist.py | 3 +-- 4 files changed, 4 insertions(+), 7 deletions(-) diff --git a/setuptools/command/bdist_egg.py b/setuptools/command/bdist_egg.py index b66020c863..957b88f3a3 100644 --- a/setuptools/command/bdist_egg.py +++ b/setuptools/command/bdist_egg.py @@ -35,8 +35,7 @@ def _get_purelib(): def strip_module(filename): if '.' in filename: filename = os.path.splitext(filename)[0] - if filename.endswith('module'): - filename = filename[:-6] + filename = filename.removesuffix('module') return filename diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py index fa0823cddc..c04861c616 100644 --- a/setuptools/command/editable_wheel.py +++ b/setuptools/command/editable_wheel.py @@ -660,7 +660,7 @@ def _parent_path(pkg, pkg_path): >>> _parent_path("b", "src/c") 'src/c' """ - parent = pkg_path[: -len(pkg)] if pkg_path.endswith(pkg) else pkg_path + parent = pkg_path.removesuffix(pkg) return parent.rstrip("/" + os.sep) diff --git a/setuptools/command/egg_info.py b/setuptools/command/egg_info.py index 7e00ae2cea..e8a26cd3a7 100644 --- a/setuptools/command/egg_info.py +++ b/setuptools/command/egg_info.py @@ -475,8 +475,7 @@ def global_exclude(self, pattern): return self._remove_files(match.match) def append(self, item) -> None: - if item.endswith('\r'): # Fix older sdists built on Windows - item = item[:-1] + item = item.removesuffix('\r') path = convert_path(item) if self._safe_path(path): diff --git a/setuptools/dist.py b/setuptools/dist.py index f06298c868..5eab94e333 100644 --- a/setuptools/dist.py +++ b/setuptools/dist.py @@ -1062,8 +1062,7 @@ def iter_distribution_names(self): name, _buildinfo = ext else: name = ext.name - if name.endswith('module'): - name = name[:-6] + name = name.removesuffix('module') yield name def handle_display_options(self, option_order): From 43b5ffbc20f272063f275b0939ea3ebf43ff6915 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Fri, 28 Jun 2024 19:07:44 +0200 Subject: [PATCH 1737/1761] Enforce ruff/refurb rule FURB192 FURB192 Prefer `max` over `sorted()` to compute the maximum value in a sequence --- setuptools/msvc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setuptools/msvc.py b/setuptools/msvc.py index 313a781ae0..e9b7d1b2af 100644 --- a/setuptools/msvc.py +++ b/setuptools/msvc.py @@ -393,7 +393,7 @@ def _find_latest_available_vs_ver(self): vc_vers = set(reg_vc_vers) vc_vers.update(self.known_vs_paths) - return sorted(vc_vers)[-1] + return max(vc_vers) def find_reg_vs_vers(self): """ From ec52299314b37316fb31c6bcab06ee50cadccac5 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Wed, 1 Jan 2025 13:47:04 +0100 Subject: [PATCH 1738/1761] Manual improvment Co-authored-by: Avasam --- setuptools/tests/test_wheel.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/setuptools/tests/test_wheel.py b/setuptools/tests/test_wheel.py index caa3ac5aa1..a7a71c8fc3 100644 --- a/setuptools/tests/test_wheel.py +++ b/setuptools/tests/test_wheel.py @@ -114,11 +114,11 @@ def build_wheel(extra_file_defs=None, **kwargs): def tree_set(root): - return set( + return { os.path.join(os.path.relpath(dirpath, root), filename) for dirpath, dirnames, filenames in os.walk(root) for filename in filenames - ) + } def flatten_tree(tree): From bf7a50a7ff09303ac44ccc183899be3af3bde4f7 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Fri, 3 Jan 2025 20:17:11 +0100 Subject: [PATCH 1739/1761] Enforce ruff/refurb rules (FURB) --- ruff.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/ruff.toml b/ruff.toml index 349d22cebd..2d69c33894 100644 --- a/ruff.toml +++ b/ruff.toml @@ -26,6 +26,7 @@ extend-select = [ # local "ANN2", # missing-return-type-* + "FURB", # refurb "PERF", # Perflint "PGH", # pygrep-hooks (blanket-* rules) "PT", # flake8-pytest-style From 5fce380bfb58544c15b5e93ffb77ab70b7b0b115 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Fri, 23 May 2025 18:03:46 +0100 Subject: [PATCH 1740/1761] Use URL with detailed clarifications in deprecation warning --- setuptools/dist.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/setuptools/dist.py b/setuptools/dist.py index f06298c868..6474f54122 100644 --- a/setuptools/dist.py +++ b/setuptools/dist.py @@ -635,8 +635,11 @@ def _enforce_underscore(self, opt: str, section: str) -> str: Usage of dash-separated {opt!r} will not be supported in future versions. Please use the underscore name {underscore_opt!r} instead. {affected} + + Available configuration options are listed in: + https://setuptools.pypa.io/en/latest/userguide/declarative_config.html """, - see_docs="userguide/declarative_config.html", + see_url="https://github.com/pypa/setuptools/discussions/5011", due_date=(2026, 3, 3), # Warning initially introduced in 3 Mar 2021 ) @@ -655,8 +658,11 @@ def _enforce_option_lowercase(self, opt: str, section: str) -> str: Usage of uppercase key {opt!r} in {section!r} will not be supported in future versions. Please use lowercase {lowercase_opt!r} instead. {affected} + + Available configuration options are listed in: + https://setuptools.pypa.io/en/latest/userguide/declarative_config.html """, - see_docs="userguide/declarative_config.html", + see_url="https://github.com/pypa/setuptools/discussions/5011", due_date=(2026, 3, 3), # Warning initially introduced in 6 Mar 2021 ) From bea8c1c05a1ed9210f1ad995e40aa26df54795ca Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 25 May 2025 10:49:35 +0200 Subject: [PATCH 1741/1761] Add comment back. Co-authored-by: Avasam --- setuptools/command/egg_info.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setuptools/command/egg_info.py b/setuptools/command/egg_info.py index e8a26cd3a7..99171f373d 100644 --- a/setuptools/command/egg_info.py +++ b/setuptools/command/egg_info.py @@ -475,7 +475,7 @@ def global_exclude(self, pattern): return self._remove_files(match.match) def append(self, item) -> None: - item = item.removesuffix('\r') + item = item.removesuffix('\r') # Fix older sdists built on Windows path = convert_path(item) if self._safe_path(path): From 64bf9d0ce88a09748f702bd7736d9ec2879aa6ef Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 25 May 2025 10:44:30 +0200 Subject: [PATCH 1742/1761] Enforce ruff/flake8-implicit-str-concat rules (ISC) --- ruff.toml | 1 + setuptools/command/install.py | 2 +- setuptools/command/sdist.py | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/ruff.toml b/ruff.toml index 349d22cebd..83a6f173a3 100644 --- a/ruff.toml +++ b/ruff.toml @@ -26,6 +26,7 @@ extend-select = [ # local "ANN2", # missing-return-type-* + "ISC", # flake8-implicit-str-concat "PERF", # Perflint "PGH", # pygrep-hooks (blanket-* rules) "PT", # flake8-pytest-style diff --git a/setuptools/command/install.py b/setuptools/command/install.py index fd73816543..19ca601458 100644 --- a/setuptools/command/install.py +++ b/setuptools/command/install.py @@ -22,7 +22,7 @@ def __getattr__(name: str): # pragma: no cover if name == "_install": SetuptoolsDeprecationWarning.emit( "`setuptools.command._install` was an internal implementation detail " - + "that was left in for numpy<1.9 support.", + "that was left in for numpy<1.9 support.", due_date=(2025, 5, 2), # Originally added on 2024-11-01 ) return orig.install diff --git a/setuptools/command/sdist.py b/setuptools/command/sdist.py index 9631cf3114..1aed1d5e4e 100644 --- a/setuptools/command/sdist.py +++ b/setuptools/command/sdist.py @@ -30,7 +30,7 @@ class sdist(orig.sdist): ( 'keep-temp', 'k', - "keep the distribution tree around after creating " + "archive file(s)", + "keep the distribution tree around after creating archive file(s)", ), ( 'dist-dir=', From 9c28cdffd423f83e43dbfd39fc793c251da48585 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 25 May 2025 10:20:55 -0400 Subject: [PATCH 1743/1761] Set a deadline for the removal of pkg_resources later this year (December). Ref #3085 --- newsfragments/3085.feature.1.rst | 1 + pkg_resources/__init__.py | 7 +++++-- 2 files changed, 6 insertions(+), 2 deletions(-) create mode 100644 newsfragments/3085.feature.1.rst diff --git a/newsfragments/3085.feature.1.rst b/newsfragments/3085.feature.1.rst new file mode 100644 index 0000000000..eb7f5051c3 --- /dev/null +++ b/newsfragments/3085.feature.1.rst @@ -0,0 +1 @@ +Set a deadline for the removal of pkg_resources later this year (December). diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py index 630aaa9a42..926765b887 100644 --- a/pkg_resources/__init__.py +++ b/pkg_resources/__init__.py @@ -97,8 +97,11 @@ warnings.warn( "pkg_resources is deprecated as an API. " - "See https://setuptools.pypa.io/en/latest/pkg_resources.html", - DeprecationWarning, + "See https://setuptools.pypa.io/en/latest/pkg_resources.html. " + "The pkg_resources package is slated for removal as early as " + "2025-11-30. Refrain from using this package or pin to " + "Setuptools<81.", + UserWarning, stacklevel=2, ) From 3b0bf5bd43034c448a10e7102788fe710b4bb496 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Mon, 26 May 2025 20:14:38 -0400 Subject: [PATCH 1744/1761] Adjust ignore --- pytest.ini | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pytest.ini b/pytest.ini index b6efa6f918..e315bd296c 100644 --- a/pytest.ini +++ b/pytest.ini @@ -84,8 +84,8 @@ filterwarnings= # Avoid errors when testing pkg_resources.declare_namespace ignore:.*pkg_resources\.declare_namespace.*:DeprecationWarning - # suppress known deprecation - ignore:pkg_resources is deprecated:DeprecationWarning + # suppress known deprecation pypa/setuptools#3085 + ignore:pkg_resources is deprecated:UserWarning # Dependencies might not have been updated yet default:onerror argument is deprecated, use onexc instead From 9c4d383631d3951fcae0afd73b5d08ff5a262976 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Mon, 26 May 2025 20:38:39 -0400 Subject: [PATCH 1745/1761] =?UTF-8?q?Bump=20version:=2080.8.0=20=E2=86=92?= =?UTF-8?q?=2080.9.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- NEWS.rst | 10 ++++++++++ newsfragments/3085.feature.1.rst | 1 - newsfragments/3085.feature.rst | 1 - pyproject.toml | 2 +- 5 files changed, 12 insertions(+), 4 deletions(-) delete mode 100644 newsfragments/3085.feature.1.rst delete mode 100644 newsfragments/3085.feature.rst diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 03e462f5a4..0cb5e7c880 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 80.8.0 +current_version = 80.9.0 commit = True tag = True diff --git a/NEWS.rst b/NEWS.rst index abfa30feab..ccdd3c1971 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -1,3 +1,13 @@ +v80.9.0 +======= + +Features +-------- + +- Set a deadline for the removal of pkg_resources later this year (December). (#3085) +- Removed reliance on pkg_resources in test_wheel. (#3085) + + v80.8.0 ======= diff --git a/newsfragments/3085.feature.1.rst b/newsfragments/3085.feature.1.rst deleted file mode 100644 index eb7f5051c3..0000000000 --- a/newsfragments/3085.feature.1.rst +++ /dev/null @@ -1 +0,0 @@ -Set a deadline for the removal of pkg_resources later this year (December). diff --git a/newsfragments/3085.feature.rst b/newsfragments/3085.feature.rst deleted file mode 100644 index 57648f05e2..0000000000 --- a/newsfragments/3085.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Removed reliance on pkg_resources in test_wheel. diff --git a/pyproject.toml b/pyproject.toml index bb06f90f5d..5988fed8cd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,7 +10,7 @@ backend-path = ["."] [project] name = "setuptools" -version = "80.8.0" +version = "80.9.0" authors = [ { name = "Python Packaging Authority", email = "distutils-sig@python.org" }, ] From 0510646d94597e098be6f59b953e284396ccbfc6 Mon Sep 17 00:00:00 2001 From: Avasam Date: Tue, 27 May 2025 11:17:55 -0400 Subject: [PATCH 1746/1761] Type `setuptools/msvc.py` dir methods and properties --- setuptools/msvc.py | 74 ++++++++++++++++++++++++++++------------------ 1 file changed, 45 insertions(+), 29 deletions(-) diff --git a/setuptools/msvc.py b/setuptools/msvc.py index 313a781ae0..4fc529596d 100644 --- a/setuptools/msvc.py +++ b/setuptools/msvc.py @@ -17,6 +17,10 @@ from more_itertools import unique_everseen +from setuptools.compat import py310 + +from ._path import StrPath + import distutils.errors if TYPE_CHECKING: @@ -135,7 +139,7 @@ def target_dir(self, hidex86=False, x64=False) -> str: else rf'\{self.target_cpu}' ) - def cross_dir(self, forcex86=False): + def cross_dir(self, forcex86=False) -> str: r""" Cross platform specific subfolder. @@ -306,7 +310,7 @@ def microsoft(self, key, x86=False): node64 = '' if self.pi.current_is_x86() or x86 else 'Wow6432Node' return os.path.join('Software', node64, 'Microsoft', key) - def lookup(self, key, name): + def lookup(self, key: str, name: str) -> str | None: """ Look for values in registry in Microsoft software registry. @@ -319,7 +323,7 @@ def lookup(self, key, name): Return ------ - str + str | None value """ key_read = winreg.KEY_READ @@ -366,7 +370,7 @@ class SystemInfo: ProgramFiles = environ.get('ProgramFiles', '') ProgramFilesx86 = environ.get('ProgramFiles(x86)', ProgramFiles) - def __init__(self, registry_info, vc_ver=None) -> None: + def __init__(self, registry_info: RegistryInfo, vc_ver=None) -> None: self.ri = registry_info self.pi = self.ri.pi @@ -486,7 +490,7 @@ def _as_float_version(version): return float('.'.join(version.split('.')[:2])) @property - def VSInstallDir(self): + def VSInstallDir(self) -> str: """ Microsoft Visual Studio directory. @@ -504,7 +508,7 @@ def VSInstallDir(self): return self.ri.lookup(self.ri.vs, f'{self.vs_ver:0.1f}') or default @property - def VCInstallDir(self): + def VCInstallDir(self) -> str: """ Microsoft Visual C++ directory. @@ -608,7 +612,7 @@ def WindowsSdkLastVersion(self): return self._use_last_dir_name(os.path.join(self.WindowsSdkDir, 'lib')) @property - def WindowsSdkDir(self) -> str | None: # noqa: C901 # is too complex (12) # FIXME + def WindowsSdkDir(self) -> str: # noqa: C901 # is too complex (12) # FIXME """ Microsoft Windows SDK directory. @@ -651,13 +655,13 @@ def WindowsSdkDir(self) -> str | None: # noqa: C901 # is too complex (12) # F return sdkdir @property - def WindowsSDKExecutablePath(self): + def WindowsSDKExecutablePath(self) -> str | None: """ Microsoft Windows SDK executable directory. Return ------ - str + str | None path """ # Find WinSDK NetFx Tools registry dir name @@ -688,7 +692,7 @@ def WindowsSDKExecutablePath(self): return None @property - def FSharpInstallDir(self): + def FSharpInstallDir(self) -> str: """ Microsoft Visual F# directory. @@ -701,13 +705,13 @@ def FSharpInstallDir(self): return self.ri.lookup(path, 'productdir') or '' @property - def UniversalCRTSdkDir(self): + def UniversalCRTSdkDir(self) -> str | None: """ Microsoft Universal CRT SDK directory. Return ------ - str + str | None path """ # Set Kit Roots versions for specified MSVC++ version @@ -717,12 +721,12 @@ def UniversalCRTSdkDir(self): for ver in vers: sdkdir = self.ri.lookup(self.ri.windows_kits_roots, f'kitsroot{ver}') if sdkdir: - return sdkdir or '' + return sdkdir return None @property - def UniversalCRTSdkLastVersion(self): + def UniversalCRTSdkLastVersion(self) -> str: """ Microsoft Universal C Runtime SDK last version. @@ -731,7 +735,11 @@ def UniversalCRTSdkLastVersion(self): str version """ - return self._use_last_dir_name(os.path.join(self.UniversalCRTSdkDir, 'lib')) + try: + return self._use_last_dir_name(os.path.join(self.UniversalCRTSdkDir, 'lib')) # type: ignore[arg-type] # Expected TypeError + except TypeError as ex: + py310.add_note(ex, "Cannot find UniversalCRTSdkDir") + raise @property def NetFxSdkVersion(self): @@ -751,16 +759,16 @@ def NetFxSdkVersion(self): ) @property - def NetFxSdkDir(self): + def NetFxSdkDir(self) -> str | None: """ Microsoft .NET Framework SDK directory. Return ------ - str + str | None path """ - sdkdir = '' + sdkdir: str | None = '' for ver in self.NetFxSdkVersion: loc = os.path.join(self.ri.netfx_sdk, ver) sdkdir = self.ri.lookup(loc, 'kitsinstallationfolder') @@ -769,7 +777,7 @@ def NetFxSdkDir(self): return sdkdir @property - def FrameworkDir32(self): + def FrameworkDir32(self) -> str: """ Microsoft .NET Framework 32bit directory. @@ -785,7 +793,7 @@ def FrameworkDir32(self): return self.ri.lookup(self.ri.vc, 'frameworkdir32') or guess_fw @property - def FrameworkDir64(self): + def FrameworkDir64(self) -> str: """ Microsoft .NET Framework 64bit directory. @@ -855,13 +863,13 @@ def _find_dot_net_versions(self, bits) -> tuple[str, ...]: return () @staticmethod - def _use_last_dir_name(path, prefix=''): + def _use_last_dir_name(path: StrPath, prefix: str = '') -> str: """ Return name of the last dir in path or '' if no dir found. Parameters ---------- - path: str + path: StrPath Use dirs in this path prefix: str Use only dirs starting by this prefix @@ -877,7 +885,7 @@ def _use_last_dir_name(path, prefix=''): if os.path.isdir(os.path.join(path, dir_name)) and dir_name.startswith(prefix) ) - return next(matching_dirs, None) or '' + return next(matching_dirs, '') class _EnvironmentDict(TypedDict): @@ -1200,7 +1208,7 @@ def _sdk_tools(self): yield self.si.WindowsSDKExecutablePath @property - def _sdk_subdir(self): + def _sdk_subdir(self) -> str: """ Microsoft Windows SDK version subdir. @@ -1345,7 +1353,7 @@ def HTMLHelpWorkshop(self): return [os.path.join(self.si.ProgramFilesx86, 'HTML Help Workshop')] @property - def UCRTLibraries(self): + def UCRTLibraries(self) -> list[str]: """ Microsoft Universal C Runtime SDK Libraries. @@ -1358,12 +1366,16 @@ def UCRTLibraries(self): return [] arch_subdir = self.pi.target_dir(x64=True) - lib = os.path.join(self.si.UniversalCRTSdkDir, 'lib') + try: + lib = os.path.join(self.si.UniversalCRTSdkDir, 'lib') # type: ignore[arg-type] # Expected TypeError + except TypeError as ex: + py310.add_note(ex, "Cannot find UniversalCRTSdkDir") + raise ucrtver = self._ucrt_subdir return [os.path.join(lib, f'{ucrtver}ucrt{arch_subdir}')] @property - def UCRTIncludes(self): + def UCRTIncludes(self) -> list[str]: """ Microsoft Universal C Runtime SDK Include. @@ -1375,11 +1387,15 @@ def UCRTIncludes(self): if self.vs_ver < 14.0: return [] - include = os.path.join(self.si.UniversalCRTSdkDir, 'include') + try: + include = os.path.join(self.si.UniversalCRTSdkDir, 'include') # type: ignore[arg-type] # Expected TypeError + except TypeError as ex: + py310.add_note(ex, "Cannot find UniversalCRTSdkDir") + raise return [os.path.join(include, f'{self._ucrt_subdir}ucrt')] @property - def _ucrt_subdir(self): + def _ucrt_subdir(self) -> str: """ Microsoft Universal C Runtime SDK version subdir. From 3ef799712e58a023ac24ae76aa95539d9730f00c Mon Sep 17 00:00:00 2001 From: Avasam Date: Tue, 27 May 2025 12:34:11 -0400 Subject: [PATCH 1747/1761] Use relative paths Co-authored-by: Anderson Bravalheri --- setuptools/msvc.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/setuptools/msvc.py b/setuptools/msvc.py index 4fc529596d..698b23d7af 100644 --- a/setuptools/msvc.py +++ b/setuptools/msvc.py @@ -17,9 +17,8 @@ from more_itertools import unique_everseen -from setuptools.compat import py310 - from ._path import StrPath +from .compat import py310 import distutils.errors From 5a26893add9de0bcc9d6ba4a220ebc64165b0c3d Mon Sep 17 00:00:00 2001 From: Avasam Date: Wed, 28 May 2025 12:08:35 -0400 Subject: [PATCH 1748/1761] Merge typeshed return annotations --- setuptools/__init__.py | 9 ++++--- setuptools/build_meta.py | 32 +++++++++++++--------- setuptools/command/bdist_egg.py | 22 +++++++++------ setuptools/command/build_ext.py | 4 +-- setuptools/command/build_py.py | 8 +++--- setuptools/command/develop.py | 11 +++++--- setuptools/command/egg_info.py | 6 +++-- setuptools/command/sdist.py | 5 ++-- setuptools/command/setopt.py | 4 +-- setuptools/command/test.py | 8 +++--- setuptools/config/expand.py | 4 +-- setuptools/config/setupcfg.py | 8 +++--- setuptools/dist.py | 4 +-- setuptools/extension.py | 10 ++++--- setuptools/installer.py | 4 +-- setuptools/monkey.py | 4 +-- setuptools/msvc.py | 48 ++++++++++++++++++--------------- setuptools/wheel.py | 3 ++- 18 files changed, 113 insertions(+), 81 deletions(-) diff --git a/setuptools/__init__.py b/setuptools/__init__.py index f1b9bfe9b8..3c2297e569 100644 --- a/setuptools/__init__.py +++ b/setuptools/__init__.py @@ -108,11 +108,12 @@ def _fetch_build_eggs(dist: Distribution): raise -def setup(**attrs): +def setup(**attrs) -> Distribution: logging.configure() # Make sure we have any requirements needed to interpret 'attrs'. _install_setup_requires(attrs) - return distutils.core.setup(**attrs) + # Override return type of distutils.core.Distribution with setuptools.dist.Distribution + return distutils.core.setup(**attrs) # type: ignore[return-value] setup.__doc__ = distutils.core.setup.__doc__ @@ -176,14 +177,14 @@ def __init__(self, dist: Distribution, **kw) -> None: @overload def reinitialize_command( self, command: str, reinit_subcommands: bool = False, **kw - ) -> _Command: ... + ) -> Command: ... # override distutils.cmd.Command with setuptools.Command @overload def reinitialize_command( self, command: _CommandT, reinit_subcommands: bool = False, **kw ) -> _CommandT: ... def reinitialize_command( self, command: str | _Command, reinit_subcommands: bool = False, **kw - ) -> _Command: + ) -> Command | _Command: cmd = _Command.reinitialize_command(self, command, reinit_subcommands) vars(cmd).update(kw) return cmd # pyright: ignore[reportReturnType] # pypa/distutils#307 diff --git a/setuptools/build_meta.py b/setuptools/build_meta.py index 8f2e930c73..0dc04f6cbb 100644 --- a/setuptools/build_meta.py +++ b/setuptools/build_meta.py @@ -39,7 +39,7 @@ import warnings from collections.abc import Iterable, Iterator, Mapping from pathlib import Path -from typing import TYPE_CHECKING, Union +from typing import TYPE_CHECKING, NoReturn, Union import setuptools @@ -74,14 +74,14 @@ def __init__(self, specifiers) -> None: class Distribution(setuptools.dist.Distribution): - def fetch_build_eggs(self, specifiers): + def fetch_build_eggs(self, specifiers) -> NoReturn: specifier_list = list(parse_strings(specifiers)) raise SetupRequirementsError(specifier_list) @classmethod @contextlib.contextmanager - def patch(cls): + def patch(cls) -> Iterator[None]: """ Replace distutils.dist.Distribution with this class @@ -304,7 +304,7 @@ def _get_build_requires( return requirements - def run_setup(self, setup_script: str = 'setup.py'): + def run_setup(self, setup_script: str = 'setup.py') -> None: # Note that we can reuse our build directory between calls # Correctness comes first, then optimization later __file__ = os.path.abspath(setup_script) @@ -327,10 +327,14 @@ def run_setup(self, setup_script: str = 'setup.py'): "setup-py-deprecated.html", ) - def get_requires_for_build_wheel(self, config_settings: _ConfigSettings = None): + def get_requires_for_build_wheel( + self, config_settings: _ConfigSettings = None + ) -> list[str]: return self._get_build_requires(config_settings, requirements=[]) - def get_requires_for_build_sdist(self, config_settings: _ConfigSettings = None): + def get_requires_for_build_sdist( + self, config_settings: _ConfigSettings = None + ) -> list[str]: return self._get_build_requires(config_settings, requirements=[]) def _bubble_up_info_directory( @@ -361,7 +365,7 @@ def _find_info_directory(self, metadata_directory: StrPath, suffix: str) -> Path def prepare_metadata_for_build_wheel( self, metadata_directory: StrPath, config_settings: _ConfigSettings = None - ): + ) -> str: sys.argv = [ *sys.argv[:1], *self._global_args(config_settings), @@ -417,7 +421,7 @@ def build_wheel( wheel_directory: StrPath, config_settings: _ConfigSettings = None, metadata_directory: StrPath | None = None, - ): + ) -> str: def _build(cmd: list[str]): with suppress_known_deprecation(): return self._build_with_temp_dir( @@ -442,7 +446,7 @@ def _build(cmd: list[str]): def build_sdist( self, sdist_directory: StrPath, config_settings: _ConfigSettings = None - ): + ) -> str: return self._build_with_temp_dir( ['sdist', '--formats', 'gztar'], '.tar.gz', sdist_directory, config_settings ) @@ -459,7 +463,7 @@ def build_editable( wheel_directory: StrPath, config_settings: _ConfigSettings = None, metadata_directory: StrPath | None = None, - ): + ) -> str: # XXX can or should we hide our editable_wheel command normally? info_dir = self._get_dist_info_dir(metadata_directory) opts = ["--dist-info-dir", info_dir] if info_dir else [] @@ -469,12 +473,14 @@ def build_editable( cmd, ".whl", wheel_directory, config_settings ) - def get_requires_for_build_editable(self, config_settings: _ConfigSettings = None): + def get_requires_for_build_editable( + self, config_settings: _ConfigSettings = None + ) -> list[str]: return self.get_requires_for_build_wheel(config_settings) def prepare_metadata_for_build_editable( self, metadata_directory: StrPath, config_settings: _ConfigSettings = None - ): + ) -> str: return self.prepare_metadata_for_build_wheel( metadata_directory, config_settings ) @@ -492,7 +498,7 @@ class _BuildMetaLegacyBackend(_BuildMetaBackend): and will eventually be removed. """ - def run_setup(self, setup_script: str = 'setup.py'): + def run_setup(self, setup_script: str = 'setup.py') -> None: # In order to maintain compatibility with scripts assuming that # the setup.py script is in a directory on the PYTHONPATH, inject # '' into sys.path. (pypa/setuptools#1642) diff --git a/setuptools/command/bdist_egg.py b/setuptools/command/bdist_egg.py index b66020c863..40aa0d4e87 100644 --- a/setuptools/command/bdist_egg.py +++ b/setuptools/command/bdist_egg.py @@ -9,19 +9,21 @@ import re import sys import textwrap +from collections.abc import Iterator from sysconfig import get_path, get_platform, get_python_version from types import CodeType -from typing import TYPE_CHECKING, Literal +from typing import TYPE_CHECKING, AnyStr, Literal from setuptools import Command from setuptools.extension import Library -from .._path import StrPathT, ensure_directory +from .._path import StrPath, StrPathT, ensure_directory from distutils import log from distutils.dir_util import mkpath, remove_tree if TYPE_CHECKING: + from _typeshed import GenericPath from typing_extensions import TypeAlias # Same as zipfile._ZipFileMode from typeshed @@ -40,7 +42,9 @@ def strip_module(filename): return filename -def sorted_walk(dir): +def sorted_walk( + dir: GenericPath[AnyStr], +) -> Iterator[tuple[AnyStr, list[AnyStr], list[AnyStr]]]: """Do os.walk in a reproducible way, independent of indeterministic filesystem readdir order """ @@ -161,7 +165,7 @@ def call_command(self, cmdname, **kw): self.run_command(cmdname) return cmd - def run(self): # noqa: C901 # is too complex (14) # FIXME + def run(self) -> None: # noqa: C901 # is too complex (14) # FIXME # Generate metadata first self.run_command("egg_info") # We run install_lib before install_data, because some data hacks @@ -232,7 +236,7 @@ def run(self): # noqa: C901 # is too complex (14) # FIXME self.egg_output, archive_root, verbose=self.verbose, - dry_run=self.dry_run, + dry_run=self.dry_run, # type: ignore[arg-type] # Is an actual boolean in vendored _distutils mode=self.gen_header(), ) if not self.keep_temp: @@ -245,7 +249,7 @@ def run(self): # noqa: C901 # is too complex (14) # FIXME self.egg_output, )) - def zap_pyfiles(self): + def zap_pyfiles(self) -> None: log.info("Removing .py files from temporary directory") for base, dirs, files in walk_egg(self.bdist_dir): for name in files: @@ -260,6 +264,8 @@ def zap_pyfiles(self): pattern = r'(?P.+)\.(?P[^.]+)\.pyc' m = re.match(pattern, name) + # We shouldn't find any non-pyc files in __pycache__ + assert m is not None path_new = os.path.join(base, os.pardir, m.group('name') + '.pyc') log.info(f"Renaming file from [{path_old}] to [{path_new}]") try: @@ -323,7 +329,7 @@ def get_ext_outputs(self): NATIVE_EXTENSIONS: dict[str, None] = dict.fromkeys('.dll .so .dylib .pyd'.split()) -def walk_egg(egg_dir): +def walk_egg(egg_dir: StrPath) -> Iterator[tuple[str, list[str], list[str]]]: """Walk an unpacked egg's contents, skipping the metadata directory""" walker = sorted_walk(egg_dir) base, dirs, files = next(walker) @@ -409,7 +415,7 @@ def scan_module(egg_dir, base, name, stubs): return safe -def iter_symbols(code): +def iter_symbols(code: CodeType) -> Iterator[str]: """Yield names and strings used by `code` and its nested code objects""" yield from code.co_names for const in code.co_consts: diff --git a/setuptools/command/build_ext.py b/setuptools/command/build_ext.py index af73fff7a5..f2f38e9f72 100644 --- a/setuptools/command/build_ext.py +++ b/setuptools/command/build_ext.py @@ -90,7 +90,7 @@ class build_ext(_build_ext): editable_mode = False inplace = False - def run(self): + def run(self) -> None: """Build extensions in build directory, then copy if --inplace""" old_inplace, self.inplace = self.inplace, False _build_ext.run(self) @@ -220,7 +220,7 @@ def finalize_options(self) -> None: if self.editable_mode: self.inplace = True - def setup_shlib_compiler(self): + def setup_shlib_compiler(self) -> None: compiler = self.shlib_compiler = new_compiler( compiler=self.compiler, dry_run=self.dry_run, force=self.force ) diff --git a/setuptools/command/build_py.py b/setuptools/command/build_py.py index 2f6fcb7cdc..339699dbbc 100644 --- a/setuptools/command/build_py.py +++ b/setuptools/command/build_py.py @@ -41,7 +41,7 @@ class build_py(orig.build_py): editable_mode: bool = False existing_egg_info_dir: StrPath | None = None #: Private API, internal use only. - def finalize_options(self): + def finalize_options(self) -> None: orig.build_py.finalize_options(self) self.package_data = self.distribution.package_data self.exclude_package_data = self.distribution.exclude_package_data or {} @@ -93,7 +93,7 @@ def _get_data_files(self): self.analyze_manifest() return list(map(self._get_pkg_data_files, self.packages or ())) - def get_data_files_without_manifest(self): + def get_data_files_without_manifest(self) -> list[tuple[str, str, str, list[str]]]: """ Generate list of ``(package,src_dir,build_dir,filenames)`` tuples, but without triggering any attempt to analyze or build the manifest. @@ -103,7 +103,7 @@ def get_data_files_without_manifest(self): self.__dict__.setdefault('manifest_files', {}) return list(map(self._get_pkg_data_files, self.packages or ())) - def _get_pkg_data_files(self, package): + def _get_pkg_data_files(self, package: str) -> tuple[str, str, str, list[str]]: # Locate package source directory src_dir = self.get_package_dir(package) @@ -272,7 +272,7 @@ def initialize_options(self): self.editable_mode = False self.existing_egg_info_dir = None - def get_package_dir(self, package): + def get_package_dir(self, package: str) -> str: res = orig.build_py.get_package_dir(self, package) if self.distribution.src_root is not None: return os.path.join(self.distribution.src_root, res) diff --git a/setuptools/command/develop.py b/setuptools/command/develop.py index 1f704fcee8..8bf785826b 100644 --- a/setuptools/command/develop.py +++ b/setuptools/command/develop.py @@ -1,6 +1,7 @@ import site import subprocess import sys +from typing import cast from setuptools import Command from setuptools.warnings import SetuptoolsDeprecationWarning @@ -27,18 +28,20 @@ class develop(Command): prefix = None index_url = None - def run(self): - cmd = ( + def run(self) -> None: # type: ignore[override] # Not including easy_install's show_deprecation argument + # Casting because mypy doesn't understand bool mult conditionals + cmd = cast( + list[str], [sys.executable, '-m', 'pip', 'install', '-e', '.', '--use-pep517'] + ['--target', self.install_dir] * bool(self.install_dir) + ['--no-deps'] * self.no_deps + ['--user'] * self.user + ['--prefix', self.prefix] * bool(self.prefix) - + ['--index-url', self.index_url] * bool(self.index_url) + + ['--index-url', self.index_url] * bool(self.index_url), ) subprocess.check_call(cmd) - def initialize_options(self): + def initialize_options(self) -> None: DevelopDeprecationWarning.emit() def finalize_options(self) -> None: diff --git a/setuptools/command/egg_info.py b/setuptools/command/egg_info.py index 7e00ae2cea..2e40fcf06c 100644 --- a/setuptools/command/egg_info.py +++ b/setuptools/command/egg_info.py @@ -2,6 +2,8 @@ Create a distribution's .egg-info directory and contents""" +from __future__ import annotations + import functools import os import re @@ -196,11 +198,11 @@ def initialize_options(self): # allow the 'tag_svn_revision' to be detected and # set, supporting sdists built on older Setuptools. @property - def tag_svn_revision(self) -> None: + def tag_svn_revision(self) -> int | None: pass @tag_svn_revision.setter - def tag_svn_revision(self, value): + def tag_svn_revision(self, value) -> None: pass #################################### diff --git a/setuptools/command/sdist.py b/setuptools/command/sdist.py index 1aed1d5e4e..17279ac421 100644 --- a/setuptools/command/sdist.py +++ b/setuptools/command/sdist.py @@ -3,6 +3,7 @@ import contextlib import os import re +from collections.abc import Iterator from itertools import chain from typing import ClassVar @@ -16,7 +17,7 @@ _default_revctrl = list -def walk_revctrl(dirname=''): +def walk_revctrl(dirname='') -> Iterator: """Find all files under revision control""" for ep in metadata.entry_points(group='setuptools.file_finders'): yield from ep.load()(dirname) @@ -195,7 +196,7 @@ def _manifest_is_not_generated(self): first_line = fp.readline() return first_line != b'# file GENERATED by distutils, do NOT edit\n' - def read_manifest(self): + def read_manifest(self) -> None: """Read the manifest file (named by 'self.manifest') and use it to fill in 'self.filelist', the list of files to include in the source distribution. diff --git a/setuptools/command/setopt.py b/setuptools/command/setopt.py index 678a0593d6..43cb593999 100644 --- a/setuptools/command/setopt.py +++ b/setuptools/command/setopt.py @@ -27,7 +27,7 @@ def config_file(kind="local"): raise ValueError("config_file() type must be 'local', 'global', or 'user'", kind) -def edit_config(filename, settings, dry_run=False): +def edit_config(filename, settings, dry_run=False) -> None: """Edit a configuration file to include `settings` `settings` is a dictionary of dictionaries or ``None`` values, keyed by @@ -88,7 +88,7 @@ def initialize_options(self): self.user_config = None self.filename = None - def finalize_options(self): + def finalize_options(self) -> None: filenames = [] if self.global_config: filenames.append(config_file('global')) diff --git a/setuptools/command/test.py b/setuptools/command/test.py index 341b11a20e..5d03c91102 100644 --- a/setuptools/command/test.py +++ b/setuptools/command/test.py @@ -1,5 +1,7 @@ from __future__ import annotations +from typing import NoReturn + from setuptools import Command from setuptools.warnings import SetuptoolsDeprecationWarning @@ -35,11 +37,11 @@ class _test(Command): ('test-runner=', 'r', "Test runner to use"), ] - def initialize_options(self): + def initialize_options(self) -> None: pass - def finalize_options(self): + def finalize_options(self) -> None: pass - def run(self): + def run(self) -> NoReturn: raise RuntimeError("Support for the test command was removed in Setuptools 72") diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py index 531f965013..dc066d9427 100644 --- a/setuptools/config/expand.py +++ b/setuptools/config/expand.py @@ -390,7 +390,7 @@ def __init__(self, distribution: Distribution) -> None: self._dist = distribution self._called = False - def __call__(self): + def __call__(self) -> None: """Trigger the automatic package discovery, if it is still necessary.""" if not self._called: self._called = True @@ -404,7 +404,7 @@ def __exit__( exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None, - ): + ) -> None: if self._called: self._dist.set_defaults.analyse_name() # Now we can set a default name diff --git a/setuptools/config/setupcfg.py b/setuptools/config/setupcfg.py index 633aa9d45d..121a0febda 100644 --- a/setuptools/config/setupcfg.py +++ b/setuptools/config/setupcfg.py @@ -14,6 +14,7 @@ import contextlib import functools import os +from abc import abstractmethod from collections import defaultdict from collections.abc import Iterable, Iterator from functools import partial, wraps @@ -269,7 +270,8 @@ def _section_options( yield name.lstrip('.'), value @property - def parsers(self): + @abstractmethod + def parsers(self) -> dict[str, Callable]: """Metadata item name to parser function mapping.""" raise NotImplementedError( f'{self.__class__.__name__} must provide .parsers property' @@ -547,7 +549,7 @@ def __init__( self.root_dir = root_dir @property - def parsers(self): + def parsers(self) -> dict[str, Callable]: """Metadata item name to parser function mapping.""" parse_list_static = self._get_parser_compound(self._parse_list, _static.List) parse_dict_static = self._get_parser_compound(self._parse_dict, _static.Dict) @@ -631,7 +633,7 @@ def _parse_requirements_list(self, label: str, value: str): # ^-- Use `_static.List` to mark a non-`Dynamic` Core Metadata @property - def parsers(self): + def parsers(self) -> dict[str, Callable]: """Metadata item name to parser function mapping.""" parse_list = self._parse_list parse_bool = self._parse_bool diff --git a/setuptools/dist.py b/setuptools/dist.py index 6474f54122..b0ae193256 100644 --- a/setuptools/dist.py +++ b/setuptools/dist.py @@ -809,7 +809,7 @@ def _finalize_setup_keywords(self): if value is not None: ep.load()(self, ep.name, value) - def get_egg_cache_dir(self): + def get_egg_cache_dir(self) -> str: from . import windows_support egg_cache_dir = os.path.join(os.curdir, '.eggs') @@ -1056,7 +1056,7 @@ def get_cmdline_options(self) -> dict[str, dict[str, str | None]]: return d - def iter_distribution_names(self): + def iter_distribution_names(self) -> Iterator[str]: """Yield all packages, modules, and extension names in distribution""" yield from self.packages or () diff --git a/setuptools/extension.py b/setuptools/extension.py index 76e03d9d6b..3e63cbe12a 100644 --- a/setuptools/extension.py +++ b/setuptools/extension.py @@ -2,6 +2,7 @@ import functools import re +from collections.abc import Iterable from typing import TYPE_CHECKING from setuptools._path import StrPath @@ -13,7 +14,7 @@ import distutils.extension -def _have_cython(): +def _have_cython() -> bool: """ Return True if Cython can be imported. """ @@ -52,8 +53,9 @@ class Extension(_Extension): the full name of the extension, including any packages -- ie. *not* a filename or pathname, but Python dotted name - :arg list[str|os.PathLike[str]] sources: - list of source filenames, relative to the distribution root + :arg Iterable[str | os.PathLike[str]] sources: + iterable of source filenames, (except strings, which could be misinterpreted + as a single filename), relative to the distribution root (where the setup script lives), in Unix form (slash-separated) for portability. Source files may be C, C++, SWIG (.i), platform-specific resource files, or whatever else is recognized @@ -143,7 +145,7 @@ class Extension(_Extension): def __init__( self, name: str, - sources: list[StrPath], + sources: Iterable[StrPath], *args, py_limited_api: bool = False, **kw, diff --git a/setuptools/installer.py b/setuptools/installer.py index 2c26e3a1f4..36a8b09227 100644 --- a/setuptools/installer.py +++ b/setuptools/installer.py @@ -27,7 +27,7 @@ def _fixup_find_links(find_links): return find_links -def fetch_build_egg(dist, req): +def fetch_build_egg(dist, req) -> metadata.Distribution | metadata.PathDistribution: """Fetch an egg needed for building. Use pip/wheel to fetch/build a wheel.""" @@ -127,7 +127,7 @@ def _fetch_build_egg_no_warn(dist, req): # noqa: C901 # is too complex (16) # return metadata.Distribution.at(dist_location + '/EGG-INFO') -def strip_marker(req): +def strip_marker(req) -> packaging.requirements.Requirement: """ Return a new requirement without the environment marker to avoid calling pip with something like `babel; extra == "i18n"`, which diff --git a/setuptools/monkey.py b/setuptools/monkey.py index 6ad1abac29..24bb8180f9 100644 --- a/setuptools/monkey.py +++ b/setuptools/monkey.py @@ -69,7 +69,7 @@ def get_unpatched_class(cls: type[_T]) -> type[_T]: return base -def patch_all(): +def patch_all() -> None: import setuptools # we can't patch distutils.cmd, alas @@ -105,7 +105,7 @@ def _patch_distribution_metadata(): setattr(distutils.dist.DistributionMetadata, attr, new_val) -def patch_func(replacement, target_mod, func_name): +def patch_func(replacement, target_mod, func_name) -> None: """ Patch func_name in target_mod with replacement diff --git a/setuptools/msvc.py b/setuptools/msvc.py index 698b23d7af..8790c92442 100644 --- a/setuptools/msvc.py +++ b/setuptools/msvc.py @@ -13,7 +13,7 @@ import os import os.path import platform -from typing import TYPE_CHECKING, TypedDict +from typing import TYPE_CHECKING, TypedDict, overload from more_itertools import unique_everseen @@ -53,11 +53,11 @@ class PlatformInfo: current_cpu = environ.get('processor_architecture', '').lower() - def __init__(self, arch) -> None: + def __init__(self, arch: str) -> None: self.arch = arch.lower().replace('x64', 'amd64') @property - def target_cpu(self): + def target_cpu(self) -> str: """ Return Target CPU architecture. @@ -68,7 +68,7 @@ def target_cpu(self): """ return self.arch[self.arch.find('_') + 1 :] - def target_is_x86(self): + def target_is_x86(self) -> bool: """ Return True if target CPU is x86 32 bits.. @@ -79,7 +79,7 @@ def target_is_x86(self): """ return self.target_cpu == 'x86' - def current_is_x86(self): + def current_is_x86(self) -> bool: """ Return True if current CPU is x86 32 bits.. @@ -179,11 +179,11 @@ class RegistryInfo: winreg.HKEY_CLASSES_ROOT, ) - def __init__(self, platform_info) -> None: + def __init__(self, platform_info: PlatformInfo) -> None: self.pi = platform_info @property - def visualstudio(self) -> str: + def visualstudio(self) -> LiteralString: """ Microsoft Visual Studio root registry key. @@ -195,7 +195,7 @@ def visualstudio(self) -> str: return 'VisualStudio' @property - def sxs(self): + def sxs(self) -> LiteralString: """ Microsoft Visual Studio SxS registry key. @@ -207,7 +207,7 @@ def sxs(self): return os.path.join(self.visualstudio, 'SxS') @property - def vc(self): + def vc(self) -> LiteralString: """ Microsoft Visual C++ VC7 registry key. @@ -219,7 +219,7 @@ def vc(self): return os.path.join(self.sxs, 'VC7') @property - def vs(self): + def vs(self) -> LiteralString: """ Microsoft Visual Studio VS7 registry key. @@ -231,7 +231,7 @@ def vs(self): return os.path.join(self.sxs, 'VS7') @property - def vc_for_python(self) -> str: + def vc_for_python(self) -> LiteralString: """ Microsoft Visual C++ for Python registry key. @@ -243,7 +243,7 @@ def vc_for_python(self) -> str: return r'DevDiv\VCForPython' @property - def microsoft_sdk(self) -> str: + def microsoft_sdk(self) -> LiteralString: """ Microsoft SDK registry key. @@ -255,7 +255,7 @@ def microsoft_sdk(self) -> str: return 'Microsoft SDKs' @property - def windows_sdk(self): + def windows_sdk(self) -> LiteralString: """ Microsoft Windows/Platform SDK registry key. @@ -267,7 +267,7 @@ def windows_sdk(self): return os.path.join(self.microsoft_sdk, 'Windows') @property - def netfx_sdk(self): + def netfx_sdk(self) -> LiteralString: """ Microsoft .NET Framework SDK registry key. @@ -279,7 +279,7 @@ def netfx_sdk(self): return os.path.join(self.microsoft_sdk, 'NETFXSDK') @property - def windows_kits_roots(self) -> str: + def windows_kits_roots(self) -> LiteralString: """ Microsoft Windows Kits Roots registry key. @@ -290,7 +290,11 @@ def windows_kits_roots(self) -> str: """ return r'Windows Kits\Installed Roots' - def microsoft(self, key, x86=False): + @overload + def microsoft(self, key: LiteralString, x86: bool = False) -> LiteralString: ... + @overload + def microsoft(self, key: str, x86: bool = False) -> str: ... # type: ignore[misc] + def microsoft(self, key: str, x86: bool = False) -> str: """ Return key in Microsoft software registry. @@ -298,7 +302,7 @@ def microsoft(self, key, x86=False): ---------- key: str Registry key path where look. - x86: str + x86: bool Force x86 software registry. Return @@ -369,7 +373,9 @@ class SystemInfo: ProgramFiles = environ.get('ProgramFiles', '') ProgramFilesx86 = environ.get('ProgramFiles(x86)', ProgramFiles) - def __init__(self, registry_info: RegistryInfo, vc_ver=None) -> None: + def __init__( + self, registry_info: RegistryInfo, vc_ver: float | None = None + ) -> None: self.ri = registry_info self.pi = self.ri.pi @@ -398,7 +404,7 @@ def _find_latest_available_vs_ver(self): vc_vers.update(self.known_vs_paths) return sorted(vc_vers)[-1] - def find_reg_vs_vers(self): + def find_reg_vs_vers(self) -> list[float]: """ Find Microsoft Visual Studio versions available in registry. @@ -599,7 +605,7 @@ def WindowsSdkVersion(self) -> tuple[LiteralString, ...]: return () @property - def WindowsSdkLastVersion(self): + def WindowsSdkLastVersion(self) -> str: """ Microsoft Windows SDK last version. @@ -741,7 +747,7 @@ def UniversalCRTSdkLastVersion(self) -> str: raise @property - def NetFxSdkVersion(self): + def NetFxSdkVersion(self) -> tuple[LiteralString, ...]: """ Microsoft .NET Framework SDK versions. diff --git a/setuptools/wheel.py b/setuptools/wheel.py index 124e01ad2f..9366303154 100644 --- a/setuptools/wheel.py +++ b/setuptools/wheel.py @@ -8,6 +8,7 @@ import posixpath import re import zipfile +from collections.abc import Iterator from packaging.requirements import Requirement from packaging.tags import sys_tags @@ -65,7 +66,7 @@ def unpack(src_dir, dst_dir) -> None: @contextlib.contextmanager -def disable_info_traces(): +def disable_info_traces() -> Iterator[None]: """ Temporarily disable info traces. """ From dcf7ede3e78ff0ff9178ba6524f20dd9f1ff3e75 Mon Sep 17 00:00:00 2001 From: Avasam Date: Wed, 28 May 2025 16:10:00 -0400 Subject: [PATCH 1749/1761] Remove unused `# type: ignore[override]` --- setuptools/command/develop.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setuptools/command/develop.py b/setuptools/command/develop.py index 8bf785826b..2d468845e5 100644 --- a/setuptools/command/develop.py +++ b/setuptools/command/develop.py @@ -28,7 +28,7 @@ class develop(Command): prefix = None index_url = None - def run(self) -> None: # type: ignore[override] # Not including easy_install's show_deprecation argument + def run(self) -> None: # Casting because mypy doesn't understand bool mult conditionals cmd = cast( list[str], From 3f7d6c2932a399766ea2d3c6b0cea5f86ad342c8 Mon Sep 17 00:00:00 2001 From: Avasam Date: Wed, 28 May 2025 19:21:27 -0400 Subject: [PATCH 1750/1761] Apply suggested comment Co-authored-by: Anderson Bravalheri --- setuptools/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setuptools/__init__.py b/setuptools/__init__.py index 3c2297e569..b3e78edab6 100644 --- a/setuptools/__init__.py +++ b/setuptools/__init__.py @@ -113,6 +113,7 @@ def setup(**attrs) -> Distribution: # Make sure we have any requirements needed to interpret 'attrs'. _install_setup_requires(attrs) # Override return type of distutils.core.Distribution with setuptools.dist.Distribution + # (implicitly implemented via `setuptools.monkey.patch_all`). return distutils.core.setup(**attrs) # type: ignore[return-value] From 77514d3e9feefecde32419eeac8ab8625c431179 Mon Sep 17 00:00:00 2001 From: Avasam Date: Wed, 28 May 2025 16:14:06 -0400 Subject: [PATCH 1751/1761] Ran autofixes for `missing-return-type-special-method (ANN204)` with `ignore-fully-untyped = false` --- ruff.toml | 3 +-- setuptools/command/build_py.py | 8 +++++--- setuptools/command/editable_wheel.py | 12 +++++++++--- setuptools/config/expand.py | 2 +- setuptools/discovery.py | 2 +- setuptools/tests/integration/helpers.py | 11 +++++++---- setuptools/tests/test_bdist_wheel.py | 2 +- setuptools/tests/test_build_meta.py | 8 ++++---- setuptools/tests/test_wheel.py | 2 +- 9 files changed, 30 insertions(+), 20 deletions(-) diff --git a/ruff.toml b/ruff.toml index 83a6f173a3..8d7b5e71f7 100644 --- a/ruff.toml +++ b/ruff.toml @@ -66,13 +66,12 @@ ignore = [ "UP038", # Using `X | Y` in `isinstance` call is slower and more verbose https://github.com/astral-sh/ruff/issues/7871 # Only enforcing return type annotations for public functions "ANN202", # missing-return-type-private-function - "ANN204", # missing-return-type-special-method ] [lint.per-file-ignores] # Suppress nuisance warnings about module-import-not-at-top-of-file (E402) due to workaround for #4476 "setuptools/__init__.py" = ["E402"] -"pkg_resources/__init__.py" = ["E402"] +"pkg_resources/__init__.py" = ["E402", "ANN204"] [lint.isort] combine-as-imports = true diff --git a/setuptools/command/build_py.py b/setuptools/command/build_py.py index 339699dbbc..59a9785308 100644 --- a/setuptools/command/build_py.py +++ b/setuptools/command/build_py.py @@ -9,6 +9,7 @@ from functools import partial from glob import glob from pathlib import Path +from typing import Any from more_itertools import unique_everseen @@ -81,7 +82,8 @@ def run(self) -> None: # output files are. self.byte_compile(orig.build_py.get_outputs(self, include_bytecode=False)) - def __getattr__(self, attr: str): + # Should return "list[tuple[str, str, str, list[str]]] | Any" but can't do without typed distutils on Python 3.12+ + def __getattr__(self, attr: str) -> Any: "lazily compute data files" if attr == 'data_files': self.data_files = self._get_data_files() @@ -381,8 +383,8 @@ class _Warning(SetuptoolsDeprecationWarning): # _DUE_DATE: still not defined as this is particularly controversial. # Warning initially introduced in May 2022. See issue #3340 for discussion. - def __init__(self): - self._already_warned = set() + def __init__(self) -> None: + self._already_warned = set[str]() def is_module(self, file): return file.endswith(".py") and file[: -len(".py")].isidentifier() diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py index c772570817..48bd12ac40 100644 --- a/setuptools/command/editable_wheel.py +++ b/setuptools/command/editable_wheel.py @@ -406,7 +406,9 @@ def __init__(self, dist: Distribution, name: str, path_entries: list[Path]) -> N self.name = name self.path_entries = path_entries - def __call__(self, wheel: WheelFile, files: list[str], mapping: Mapping[str, str]): + def __call__( + self, wheel: WheelFile, files: list[str], mapping: Mapping[str, str] + ) -> None: entries = "\n".join(str(p.resolve()) for p in self.path_entries) contents = _encode_pth(f"{entries}\n") wheel.writestr(f"__editable__.{self.name}.pth", contents) @@ -451,7 +453,9 @@ def __init__( self._file = dist.get_command_obj("build_py").copy_file super().__init__(dist, name, [self.auxiliary_dir]) - def __call__(self, wheel: WheelFile, files: list[str], mapping: Mapping[str, str]): + def __call__( + self, wheel: WheelFile, files: list[str], mapping: Mapping[str, str] + ) -> None: self._create_links(files, mapping) super().__call__(wheel, files, mapping) @@ -545,7 +549,9 @@ def get_implementation(self) -> Iterator[tuple[str, bytes]]: content = _encode_pth(f"import {finder}; {finder}.install()") yield (f"__editable__.{self.name}.pth", content) - def __call__(self, wheel: WheelFile, files: list[str], mapping: Mapping[str, str]): + def __call__( + self, wheel: WheelFile, files: list[str], mapping: Mapping[str, str] + ) -> None: for file, content in self.get_implementation(): wheel.writestr(file, content) diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py index dc066d9427..d9a2ded430 100644 --- a/setuptools/config/expand.py +++ b/setuptools/config/expand.py @@ -65,7 +65,7 @@ def _find_assignments(self) -> Iterator[tuple[ast.AST, ast.AST]]: elif isinstance(statement, ast.AnnAssign) and statement.value: yield (statement.target, statement.value) - def __getattr__(self, attr: str): + def __getattr__(self, attr: str) -> Any: """Attempt to load an attribute "statically", via :func:`ast.literal_eval`.""" try: return next( diff --git a/setuptools/discovery.py b/setuptools/discovery.py index c888399185..606796c388 100644 --- a/setuptools/discovery.py +++ b/setuptools/discovery.py @@ -335,7 +335,7 @@ def _package_dir(self) -> dict[str, str]: def __call__( self, force: bool = False, name: bool = True, ignore_ext_modules: bool = False - ): + ) -> None: """Automatically discover missing configuration fields and modifies the given ``distribution`` object in-place. diff --git a/setuptools/tests/integration/helpers.py b/setuptools/tests/integration/helpers.py index 77b196e029..16b1302291 100644 --- a/setuptools/tests/integration/helpers.py +++ b/setuptools/tests/integration/helpers.py @@ -5,11 +5,14 @@ facilitate debugging. """ +from __future__ import annotations + import os import subprocess import tarfile +from collections.abc import Iterator from pathlib import Path -from zipfile import ZipFile +from zipfile import ZipFile, ZipInfo def run(cmd, env=None): @@ -35,16 +38,16 @@ def run(cmd, env=None): class Archive: """Compatibility layer for ZipFile/Info and TarFile/Info""" - def __init__(self, filename): + def __init__(self, filename) -> None: self._filename = filename if filename.endswith("tar.gz"): - self._obj = tarfile.open(filename, "r:gz") + self._obj: tarfile.TarFile | ZipFile = tarfile.open(filename, "r:gz") elif filename.endswith("zip"): self._obj = ZipFile(filename) else: raise ValueError(f"{filename} doesn't seem to be a zip or tar.gz") - def __iter__(self): + def __iter__(self) -> Iterator[ZipInfo] | Iterator[tarfile.TarInfo]: if hasattr(self._obj, "infolist"): return iter(self._obj.infolist()) return iter(self._obj) diff --git a/setuptools/tests/test_bdist_wheel.py b/setuptools/tests/test_bdist_wheel.py index 2ab4e9cfc6..68cc0c4d36 100644 --- a/setuptools/tests/test_bdist_wheel.py +++ b/setuptools/tests/test_bdist_wheel.py @@ -295,7 +295,7 @@ def test_preserve_unicode_metadata(monkeypatch, tmp_path): class simpler_bdist_wheel(bdist_wheel): """Avoid messing with setuptools/distutils internals""" - def __init__(self): + def __init__(self) -> None: pass @property diff --git a/setuptools/tests/test_build_meta.py b/setuptools/tests/test_build_meta.py index 57162fd6af..2cd0a0a8ed 100644 --- a/setuptools/tests/test_build_meta.py +++ b/setuptools/tests/test_build_meta.py @@ -35,7 +35,7 @@ class BuildBackendBase: - def __init__(self, cwd='.', env=None, backend_name='setuptools.build_meta'): + def __init__(self, cwd='.', env=None, backend_name='setuptools.build_meta') -> None: self.cwd = cwd self.env = env or {} self.backend_name = backend_name @@ -44,7 +44,7 @@ def __init__(self, cwd='.', env=None, backend_name='setuptools.build_meta'): class BuildBackend(BuildBackendBase): """PEP 517 Build Backend""" - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: super().__init__(*args, **kwargs) self.pool = futures.ProcessPoolExecutor(max_workers=1) @@ -77,12 +77,12 @@ def _kill(self, pid): class BuildBackendCaller(BuildBackendBase): - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: super().__init__(*args, **kwargs) (self.backend_name, _, self.backend_obj) = self.backend_name.partition(':') - def __call__(self, name, *args, **kw): + def __call__(self, name, *args, **kw) -> Any: """Handles arbitrary function invocations on the build backend.""" os.chdir(self.cwd) os.environ.update(self.env) diff --git a/setuptools/tests/test_wheel.py b/setuptools/tests/test_wheel.py index f91465084a..c6e3ec7bca 100644 --- a/setuptools/tests/test_wheel.py +++ b/setuptools/tests/test_wheel.py @@ -168,7 +168,7 @@ def _check_wheel_install( class Record: - def __init__(self, id, **kwargs): + def __init__(self, id, **kwargs) -> None: self._id = id self._fields = kwargs From 001f424323df2ac711aca6a9556ec17c53a683fb Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Wed, 31 Jul 2024 13:47:52 -0400 Subject: [PATCH 1752/1761] Remove post-release tags on setuptools' own build. Ref #4530 --- newsfragments/4530.feature.rst | 1 + setup.cfg | 3 --- tox.ini | 2 -- 3 files changed, 1 insertion(+), 5 deletions(-) create mode 100644 newsfragments/4530.feature.rst delete mode 100644 setup.cfg diff --git a/newsfragments/4530.feature.rst b/newsfragments/4530.feature.rst new file mode 100644 index 0000000000..f321dab10d --- /dev/null +++ b/newsfragments/4530.feature.rst @@ -0,0 +1 @@ +Remove post-release tags on setuptools' own build. diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 38922089ad..0000000000 --- a/setup.cfg +++ /dev/null @@ -1,3 +0,0 @@ -[egg_info] -tag_build = .post -tag_date = 1 diff --git a/tox.ini b/tox.ini index f457ff1fee..3bd33bdf27 100644 --- a/tox.ini +++ b/tox.ini @@ -103,8 +103,6 @@ setenv = TWINE_USERNAME = {env:TWINE_USERNAME:__token__} commands = python -c "import shutil; shutil.rmtree('dist', ignore_errors=True)" - # unset tag_build and tag_date pypa/setuptools#2500 - python setup.py egg_info -Db "" saveopts python -m build python -m twine upload dist/* python -m jaraco.develop.create-github-release From 2e3c6a8158bc9f9900ec4ab6a46e331ee762f780 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Tue, 3 Jun 2025 18:49:47 +0100 Subject: [PATCH 1753/1761] Add note about setuptools not defining the introduction of warnings as a breaking change --- docs/userguide/interfaces.rst | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/docs/userguide/interfaces.rst b/docs/userguide/interfaces.rst index 791f7a1818..767b6b08eb 100644 --- a/docs/userguide/interfaces.rst +++ b/docs/userguide/interfaces.rst @@ -50,6 +50,16 @@ reserve the right of speeding up the deprecation cycle and shortening deprecatio Note that these are exceptional circumstances and that the project will carefully attempt to find alternatives before resorting to unscheduled removals. +.. important:: + In the context of ``setuptools``, the introduction of :py:mod:`warnings` + (including deprecation warnings) is not considered a breaking change *per se*. + Instead it is considered a backwards compatible *communication action* that + precedes an upcoming breaking change. This is becauset code + containing warnings typically does not fail and can successfully terminate + execution, unless users explicitly opt into transforming those warnings + into errors (e.g., via Python's :external+python:ref:`-W option or + PYTHONWARNINGS environment variable `). + What to do when deprecation periods are undefined? -------------------------------------------------- From 522c189a3bae75c14b2c1be222348ebe649f2164 Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Sat, 14 Jun 2025 17:15:24 +0200 Subject: [PATCH 1754/1761] interfaces.rst: Fix typo discovered by codespell Just a test to understand why tests are failing on: * #5033 --- docs/userguide/interfaces.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/userguide/interfaces.rst b/docs/userguide/interfaces.rst index 791f7a1818..3d451e966a 100644 --- a/docs/userguide/interfaces.rst +++ b/docs/userguide/interfaces.rst @@ -147,7 +147,7 @@ you can still resort to restricting the version of Setuptools to be installed. This usually includes modifying ``[build-system] requires`` in ``pyproject.toml`` and/or specifying ``pip`` :external+pip:ref:`Constraints Files` via the ``PIP_CONSTRAINT`` environment variable (or passing |build-constraint-uv|_). -Please avoid however to pre-emptively add version constraints if not necessary, +Please avoid however to preemptively add version constraints if not necessary, (you can read more about this in https://iscinumpy.dev/post/bound-version-constraints/). .. |build-constraint-uv| replace:: ``--build-constraint`` to ``uv`` From 181d4809a8fddb2882931b81e112274ec0f31e9f Mon Sep 17 00:00:00 2001 From: Hugo van Kemenade <1324225+hugovk@users.noreply.github.com> Date: Mon, 16 Jun 2025 16:23:24 +0300 Subject: [PATCH 1755/1761] CI: Require Python >= 3.13.5 --- .github/workflows/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index c6078bcd10..824daa6fc2 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -42,7 +42,7 @@ jobs: matrix: python: - "3.9" - - "3.13" + - ">=3.13.5" platform: - ubuntu-latest - macos-latest From 81f213369bfdf9bdb0a1d5dd8ab8e1e558be2d28 Mon Sep 17 00:00:00 2001 From: Hugo van Kemenade <1324225+hugovk@users.noreply.github.com> Date: Tue, 17 Jun 2025 17:11:02 +0300 Subject: [PATCH 1756/1761] Note about temporary divergence from jaraco/skeleton Co-authored-by: Anderson Bravalheri --- .github/workflows/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 824daa6fc2..fdd673d7c5 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -42,7 +42,7 @@ jobs: matrix: python: - "3.9" - - ">=3.13.5" + - ">=3.13.5" # temporary bound until it becomes the default, python/cpython#135151 platform: - ubuntu-latest - macos-latest From b28b10691eb1f2d9a3fc95449e27654dc1658ea7 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Tue, 17 Jun 2025 17:31:08 +0100 Subject: [PATCH 1757/1761] Adequate to newly introduced Ruff checks --- .pre-commit-config.yaml | 2 +- ruff.toml | 1 + setuptools/tests/config/test_pyprojecttoml_dynamic_deps.py | 6 ++++-- setuptools/tests/test_build_py.py | 2 +- 4 files changed, 7 insertions(+), 4 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 633e3648e9..fa5592412f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.9.9 + rev: v0.12.0 hooks: - id: ruff args: [--fix, --unsafe-fixes] diff --git a/ruff.toml b/ruff.toml index a01a649662..f6afbc9fca 100644 --- a/ruff.toml +++ b/ruff.toml @@ -73,6 +73,7 @@ ignore = [ # Suppress nuisance warnings about module-import-not-at-top-of-file (E402) due to workaround for #4476 "setuptools/__init__.py" = ["E402"] "pkg_resources/__init__.py" = ["E402", "ANN204"] +"pkg_resources/tests/test_resources.py" = ["PT031"] [lint.isort] combine-as-imports = true diff --git a/setuptools/tests/config/test_pyprojecttoml_dynamic_deps.py b/setuptools/tests/config/test_pyprojecttoml_dynamic_deps.py index e42f28ffaa..9fc8050743 100644 --- a/setuptools/tests/config/test_pyprojecttoml_dynamic_deps.py +++ b/setuptools/tests/config/test_pyprojecttoml_dynamic_deps.py @@ -103,7 +103,9 @@ def test_mixed_extras_require_optional_dependencies(tmp_path): path.build(files, prefix=tmp_path) pyproject = tmp_path / "pyproject.toml" + dist = Distribution({"extras_require": {"hello": ["world"]}}) + with pytest.warns(SetuptoolsWarning, match=".extras_require. overwritten"): - dist = Distribution({"extras_require": {"hello": ["world"]}}) dist = apply_configuration(dist, pyproject) - assert dist.extras_require == {"docs": ["sphinx"]} + + assert dist.extras_require == {"docs": ["sphinx"]} diff --git a/setuptools/tests/test_build_py.py b/setuptools/tests/test_build_py.py index 1e3a660833..78848f7182 100644 --- a/setuptools/tests/test_build_py.py +++ b/setuptools/tests/test_build_py.py @@ -165,7 +165,7 @@ def test_excluded_subpackages(tmpdir_cwd): build_py = dist.get_command_obj("build_py") msg = r"Python recognizes 'mypkg\.tests' as an importable package" - with pytest.warns(SetuptoolsDeprecationWarning, match=msg): + with pytest.warns(SetuptoolsDeprecationWarning, match=msg): # noqa: PT031 # TODO: To fix #3260 we need some transition period to deprecate the # existing behavior of `include_package_data`. After the transition, we # should remove the warning and fix the behavior. From 7d6bc3ef11959b010f15d347977fc178bc169500 Mon Sep 17 00:00:00 2001 From: Anderson Bravalheri Date: Tue, 17 Jun 2025 18:11:26 +0100 Subject: [PATCH 1758/1761] Attempt to ignore coverage warnings preventing CI to run on cygwin --- pytest.ini | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pytest.ini b/pytest.ini index e315bd296c..02fbd39c50 100644 --- a/pytest.ini +++ b/pytest.ini @@ -95,3 +95,7 @@ filterwarnings= # Ignore warnings about consider_namespace_packages (jaraco/skeleton@6ff02e0eefcd) ignore:Unknown config option. consider_namespace_packages:pytest.PytestConfigWarning + + # Ignore warnings we cannot do anything about: + # https://github.com/pypa/setuptools/pull/5042#issuecomment-2981138461 + ignore:Couldn't import C tracer:coverage.exceptions.CoverageWarning From a3ac2264a687100789dcc49999b01c15a59b6778 Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Sat, 14 Jun 2025 16:05:47 +0200 Subject: [PATCH 1759/1761] Add ruff rules PIE --- newsfragments/5033.misc.rst | 1 + ruff.toml | 1 + setuptools/command/bdist_egg.py | 4 ++-- setuptools/command/egg_info.py | 2 +- 4 files changed, 5 insertions(+), 3 deletions(-) create mode 100644 newsfragments/5033.misc.rst diff --git a/newsfragments/5033.misc.rst b/newsfragments/5033.misc.rst new file mode 100644 index 0000000000..590911b1a0 --- /dev/null +++ b/newsfragments/5033.misc.rst @@ -0,0 +1 @@ +Avoid repeated calls to ``str.startswith`` and ``str.endswith``. diff --git a/ruff.toml b/ruff.toml index f6afbc9fca..20c06adaec 100644 --- a/ruff.toml +++ b/ruff.toml @@ -29,6 +29,7 @@ extend-select = [ "ISC", # flake8-implicit-str-concat "FURB", # refurb "PERF", # Perflint + "PIE", # flake8-pie "PGH", # pygrep-hooks (blanket-* rules) "PT", # flake8-pytest-style "RUF10", # unused-noqa & redirected-noqa diff --git a/setuptools/command/bdist_egg.py b/setuptools/command/bdist_egg.py index be0080fa2f..ab452680f1 100644 --- a/setuptools/command/bdist_egg.py +++ b/setuptools/command/bdist_egg.py @@ -348,9 +348,9 @@ def analyze_egg(egg_dir, stubs): safe = True for base, dirs, files in walk_egg(egg_dir): for name in files: - if name.endswith('.py') or name.endswith('.pyw'): + if name.endswith(('.py', '.pyw')): continue - elif name.endswith('.pyc') or name.endswith('.pyo'): + elif name.endswith(('.pyc', '.pyo')): # always scan, even if we already know we're not safe safe = scan_module(egg_dir, base, name, stubs) and safe return safe diff --git a/setuptools/command/egg_info.py b/setuptools/command/egg_info.py index bb18738ea5..d9de297ecf 100644 --- a/setuptools/command/egg_info.py +++ b/setuptools/command/egg_info.py @@ -146,7 +146,7 @@ def _maybe_tag(self, version): def _already_tagged(self, version: str) -> bool: # Depending on their format, tags may change with version normalization. # So in addition the regular tags, we have to search for the normalized ones. - return version.endswith(self.vtags) or version.endswith(self._safe_tags()) + return version.endswith((self.vtags, self._safe_tags())) def _safe_tags(self) -> str: # To implement this we can rely on `safe_version` pretending to be version 0 From 1a6cfbd00e99ed9d763a5af36a0830c392d9630f Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 20 Jul 2025 12:55:03 -0400 Subject: [PATCH 1760/1761] Revert "Restore default for blank issues." Partially reverts commit 05bd74c933c1471525e50b1e21d4d4ed1f492343. --- .github/ISSUE_TEMPLATE/config.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index ebc2d3399e..eae74f0ddf 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -1,3 +1,5 @@ +# Ref: https://help.github.com/en/github/building-a-strong-community/configuring-issue-templates-for-your-repository#configuring-the-template-chooser +blank_issues_enabled: false # default: true contact_links: - name: 🤔 Have questions or need support? url: https://github.com/pypa/setuptools/discussions From 9cc2f5c05c333cd4cecd2c0d9e7c5e208f2a3148 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 20 Jul 2025 13:02:56 -0400 Subject: [PATCH 1761/1761] Reapply "Restore default for blank issues." This reverts commit 1a6cfbd00e99ed9d763a5af36a0830c392d9630f. Closes #5052 --- .github/ISSUE_TEMPLATE/config.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index eae74f0ddf..ebc2d3399e 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -1,5 +1,3 @@ -# Ref: https://help.github.com/en/github/building-a-strong-community/configuring-issue-templates-for-your-repository#configuring-the-template-chooser -blank_issues_enabled: false # default: true contact_links: - name: 🤔 Have questions or need support? url: https://github.com/pypa/setuptools/discussions